diff --git a/.circleci/config.yml b/.circleci/config.yml index 7d2b09e4464..3e8884daf03 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,54 +1,73 @@ +version: 2.1 + aliases: - &opam_env TERM: dumb OPAMYES: true - - &docker_opam + - &run_on_tags + filters: + tags: + only: /.*/ + +executors: + linux-opam: docker: - - image: ocaml/opam:debian-8_ocaml-4.05.0 + - image: ocaml/opam2:debian-8 environment: <<: *opam_env working_directory: ~/flow - - - &docker_node + linux-node: docker: - image: circleci/node:8 working_directory: ~/flow + mac: + macos: + xcode: "9.0" + environment: + <<: *opam_env + working_directory: ~/flow + curl: + docker: + - image: appropriate/curl:latest + working_directory: /flow + flow-website: + docker: + - image: flowtype/flow-website:latest + working_directory: ~/flow + awscli: + docker: + - image: xueshanf/awscli:latest + working_directory: ~/flow - # Saves the currently-installed opam version to a file, which we include - # in cache keys. - - &opam_version - name: Calculate opam version - command: opam --version > .circleci/opamversion - - - &opam_deps - name: Install deps from opam - command: | - eval $(opam config env) - opam pin add -n flowtype-ci . | cat - opam depext flowtype-ci | cat - opam install --deps-only flowtype-ci | cat - - - &restore_opam_cache - keys: - - opam-cache-{{ arch }}-opam_{{ checksum ".circleci/opamversion" }}-ocaml_4_05_0-{{ checksum "opam" }} - - opam-cache-{{ arch }}-opam_{{ checksum ".circleci/opamversion" }}-ocaml_4_05_0 - - opam-cache-{{ arch }}-opam_{{ checksum ".circleci/opamversion" }} +commands: + restore-opam-cache: + steps: + - run: + name: Calculate opam version + command: opam --version > .circleci/opamversion + - restore_cache: + keys: + - opam-cache-{{ arch }}-opam_{{ checksum ".circleci/opamversion" }}-ocaml_4_07_1-{{ checksum "opam" }} - - &save_opam_cache - key: opam-cache-{{ arch }}-opam_{{ checksum ".circleci/opamversion" }}-ocaml_4_05_0-{{ checksum "opam" }} - paths: - - ~/.opam + save-opam-cache: + steps: + - save_cache: + key: opam-cache-{{ arch }}-opam_{{ checksum ".circleci/opamversion" }}-ocaml_4_07_1-{{ checksum "opam" }} + paths: + - ~/.opam - - &run_on_tags - filters: - tags: - only: /.*/ + create-opam-switch: + steps: + - run: + name: Install deps from opam + command: | + eval $(opam env) + opam switch create . 4.07.1 --deps-only | cat -version: 2 jobs: checkout: - <<: *docker_node + executor: linux-node steps: - checkout - run: @@ -62,41 +81,50 @@ jobs: paths: . build_linux: - <<: *docker_opam + executor: linux-opam steps: - attach_workspace: at: ~/flow - run: # TODO: move this to a custom docker image + # installs `zip` and `m4` but also has to fix some apt issues: + # https://discuss.circleci.com/t/failed-to-fetch-jessie-updates/29246 name: Install deps - command: sudo apt-get update && sudo apt-get install zip - - run: *opam_version - - restore_cache: *restore_opam_cache + command: | + sudo rm /etc/apt/sources.list + echo "deb http://archive.debian.org/debian/ jessie main contrib non-free" | sudo tee -a /etc/apt/sources.list + echo "deb http://archive.debian.org/debian/ jessie-backports main contrib non-free" | sudo tee -a /etc/apt/sources.list + echo "deb-src http://archive.debian.org/debian/ jessie main contrib non-free" | sudo tee -a /etc/apt/sources.list + echo "deb-src http://archive.debian.org/debian/ jessie-backports main contrib non-free" | sudo tee -a /etc/apt/sources.list + echo "Acquire::Check-Valid-Until false;" | sudo tee -a /etc/apt/apt.conf.d/10-nocheckvalid + echo 'Package: *\nPin: origin "archive.debian.org"\nPin-Priority: 500' | sudo tee -a /etc/apt/preferences.d/10-archive-pin + sudo apt-get update && sudo apt-get --yes install zip m4 + - restore-opam-cache - run: name: Update opam repo command: | opam remote list -s | grep upstream >/dev/null || \ opam remote add upstream https://opam.ocaml.org | cat opam update | cat - - run: *opam_deps + - create-opam-switch - run: name: Install extra deps from opam - command: opam install js_of_ocaml.3.1.0 | cat - - save_cache: *save_opam_cache + command: opam install js_of_ocaml.3.4.0 | cat + - save-opam-cache - run: name: Build flow command: | - opam config exec -- make bin/flow dist/flow.zip + opam exec -- make bin/flow dist/flow.zip mkdir -p bin/linux && cp bin/flow bin/linux/flow - run: name: Build libflowparser - command: opam config exec -- make -C src/parser dist/libflowparser.zip + command: opam exec -- make -C src/parser dist/libflowparser.zip - run: name: Build flow.js - command: opam config exec -- make js + command: opam exec -- make js - run: name: Build flow_parser.js - command: opam config exec -- make -C src/parser js + command: opam exec -- make -C src/parser js - run: name: Create artifacts command: | @@ -125,14 +153,14 @@ jobs: destination: flow_parser.js build_macos: - macos: - xcode: "9.0" - environment: - <<: *opam_env - working_directory: ~/flow + executor: mac steps: - attach_workspace: at: ~/flow + # https://github.com/Homebrew/brew/issues/5513 + - run: + name: Fix homebrew python + command: brew update # https://github.com/Homebrew/homebrew-core/issues/26358 - run: name: Fix homebrew python @@ -140,28 +168,27 @@ jobs: - run: name: Install opam command: command -v opam || brew install opam aspcud - - run: *opam_version - - restore_cache: *restore_opam_cache + - restore-opam-cache - run: name: Install ocaml - command: opam init --comp 4.05.0 -yn | cat + command: opam init --comp 4.07.1 -yn | cat - save_cache: - key: opam-cache-{{ arch }}-opam_{{ checksum ".circleci/opamversion" }}-ocaml_4_05_0 + key: opam-cache-{{ arch }}-opam_{{ checksum ".circleci/opamversion" }}-ocaml_4_07_1 paths: - ~/.opam - run: name: Update opam repo command: opam update | cat - - run: *opam_deps - - save_cache: *save_opam_cache + - create-opam-switch + - save-opam-cache - run: name: Build flow command: | - opam config exec -- make bin/flow dist/flow.zip + opam exec -- make bin/flow dist/flow.zip mkdir -p bin/macos && cp bin/flow bin/macos/flow - run: name: Build libflowparser - command: opam config exec -- make -C src/parser dist/libflowparser.zip + command: opam exec -- make -C src/parser dist/libflowparser.zip - run: name: Create artifacts command: | @@ -181,7 +208,9 @@ jobs: destination: libflowparser-osx.zip runtests_linux: - <<: *docker_node + executor: linux-node + environment: + FLOW_RUNTESTS_PARALLELISM: 8 steps: - attach_workspace: at: ~/flow @@ -190,9 +219,7 @@ jobs: command: ./runtests.sh bin/linux/flow | cat runtests_macos: - macos: - xcode: "9.0" - working_directory: ~/flow + executor: mac steps: - attach_workspace: at: ~/flow @@ -201,7 +228,7 @@ jobs: command: ./runtests.sh bin/macos/flow | cat tool_test_linux: - <<: *docker_node + executor: linux-node steps: - attach_workspace: at: ~/flow @@ -213,9 +240,7 @@ jobs: command: ./tool test -p 4 --bin bin/linux/flow | cat # Limit parallelism tool_test_macos: - macos: - xcode: "9.0" - working_directory: ~/flow + executor: mac steps: - attach_workspace: at: ~/flow @@ -227,7 +252,7 @@ jobs: command: ./tool test --bin bin/macos/flow | cat npm_pack: - <<: *docker_node + executor: linux-node steps: - attach_workspace: at: ~/flow @@ -244,20 +269,36 @@ jobs: cp dist/libflowparser-linux64.zip packages/flow-parser-bin/dist/release/libflowparser-linux64.zip cp dist/libflowparser-osx.zip packages/flow-parser-bin/dist/release/libflowparser-osx.zip make dist/npm-flow-parser-bin.tgz + - run: + name: Pack flow-remove-types and flow-node + command: | + rm -rf packages/flow-node + cp -r packages/flow-remove-types/ packages/flow-node/ + sed -i '0,/flow-remove-types/s//flow-node/' packages/flow-node/package.json + make dist/npm-flow-remove-types.tgz + make dist/npm-flow-node.tgz - persist_to_workspace: root: . paths: - dist/npm-flow-parser.tgz - dist/npm-flow-parser-bin.tgz + - dist/npm-flow-node.tgz + - dist/npm-flow-remove-types.tgz - store_artifacts: path: dist/npm-flow-parser.tgz destination: npm-flow-parser.tgz - store_artifacts: path: dist/npm-flow-parser-bin.tgz destination: npm-flow-parser-bin.tgz + - store_artifacts: + path: dist/npm-flow-node.tgz + destination: npm-flow-node.tgz + - store_artifacts: + path: dist/npm-flow-remove-types.tgz + destination: npm-flow-remove-types.tgz npm_deploy: - <<: *docker_node + executor: linux-node steps: - attach_workspace: at: ~/flow @@ -266,9 +307,7 @@ jobs: command: .circleci/deploy_npm.sh github_linux: - docker: - - image: appropriate/curl:latest - working_directory: /flow + executor: curl steps: - attach_workspace: at: /flow @@ -280,9 +319,7 @@ jobs: command: .circleci/github_upload.sh dist/libflowparser-linux64.zip "libflowparser-linux64-$CIRCLE_TAG.zip" github_macos: - docker: - - image: appropriate/curl:latest - working_directory: /flow + executor: curl steps: - attach_workspace: at: /flow @@ -294,9 +331,7 @@ jobs: command: .circleci/github_upload.sh dist/libflowparser-osx.zip "libflowparser-osx-$CIRCLE_TAG.zip" website_deploy: - docker: - - image: flowtype/flow-website:latest - working_directory: ~/flow + executor: flow-website steps: - attach_workspace: at: ~/flow @@ -333,9 +368,7 @@ jobs: command: bundle exec s3_website push --config-dir=website/ --site=dist/flow.org try_flow_deploy: - docker: - - image: xueshanf/awscli:latest - working_directory: ~/flow + executor: awscli steps: - attach_workspace: at: ~/flow @@ -351,7 +384,6 @@ jobs: command: aws s3 cp ~/try_flow s3://${S3_BUCKET}/static/${CIRCLE_TAG}/ --recursive workflows: - version: 2 build_and_test: jobs: - checkout: diff --git a/.circleci/deploy_npm.sh b/.circleci/deploy_npm.sh index 7dc54a70603..1aacf712f27 100755 --- a/.circleci/deploy_npm.sh +++ b/.circleci/deploy_npm.sh @@ -15,4 +15,10 @@ npm publish ./dist/npm-flow-parser-bin.tgz; echo "Publishing flow-parser"; npm publish ./dist/npm-flow-parser.tgz; +echo "Publishing flow-remove-types"; +npm publish ./dist/npm-flow-remove-types.tgz; + +echo "Publishing flow-node"; +npm publish ./dist/npm-flow-node.tgz; + if [ -f ~/.npmrc.bak ]; then mv ~/.npmrc.bak ~/.npmrc; fi diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000000..7f58871a829 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,27 @@ +--- +name: "Bug report" +about: You want to report a reproducible bug or regression in Flow. +labels: bug, needs triage +--- + + + +Flow version: + +## Expected behavior + +## Actual behavior + + +* Link to Try-Flow or Github repo: diff --git a/.github/ISSUE_TEMPLATE/discussion.md b/.github/ISSUE_TEMPLATE/discussion.md new file mode 100644 index 00000000000..ad735b86abc --- /dev/null +++ b/.github/ISSUE_TEMPLATE/discussion.md @@ -0,0 +1,16 @@ +--- +name: "Discussion" +about: You want to propose a change to Flow. +labels: "discussion" +--- + + + +## Proposal + + +## Use case diff --git a/.github/ISSUE_TEMPLATE/documentation.md b/.github/ISSUE_TEMPLATE/documentation.md new file mode 100644 index 00000000000..94940d9f8d3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/documentation.md @@ -0,0 +1,11 @@ +--- +name: "Documentation" +about: You want to report missing or incorrect documentation. +labels: documentation +--- + + + + diff --git a/.github/ISSUE_TEMPLATE/libdef.md b/.github/ISSUE_TEMPLATE/libdef.md new file mode 100644 index 00000000000..4eeb7199c12 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/libdef.md @@ -0,0 +1,19 @@ +--- +name: "Library Definitions" +about: You want to report an issue with Flow's library definitions. +labels: "Library definitions" +--- + + + + +## Missing/Incorrect APIs + + +## Relevant documentation diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 00000000000..a3150a0a141 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,12 @@ +--- +name: "Question" +about: Please direct all questions to stackoverflow.com +labels: question +--- + + + + +PLEASE DIRECT YOUR QUESTION TO STACKOVERFLOW.COM diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000000..13839bf1038 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,6 @@ + diff --git a/.gitignore b/.gitignore index 459c413b5a3..ee3f7f27d71 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ *TAGS _build +/_opam /dist /_obuild /.ocp @@ -18,3 +19,7 @@ flow.docdir *~ /hack/utils/get_build_id.gen.c /flowlib.rc +.merlin + +# This is an alias of flow-remove-types that we generate during publishing +/packages/flow-node diff --git a/.merlin b/.merlin index 2ab4b2616e5..220effe61e1 100644 --- a/.merlin +++ b/.merlin @@ -3,4 +3,15 @@ S src/** B _build/** -PKG sedlex oUnit lwt lwt.ppx js_of_ocaml +PKG compiler-libs +PKG dtoa +PKG js_of_ocaml +PKG lwt +PKG lwt_ppx +PKG lwt.log +PKG lwt.unix +PKG ocaml-migrate-parsetree +PKG oUnit +PKG ppx_gen_rec +PKG sedlex +PKG wtf8 diff --git a/.ocamlformat b/.ocamlformat new file mode 100644 index 00000000000..81dd85a366d --- /dev/null +++ b/.ocamlformat @@ -0,0 +1,24 @@ +# -*- conf -*- + +break-cases = all +break-fun-decl = smart +break-infix = fit-or-vertical +break-separators = after-and-docked +break-sequences = true +break-string-literals = never +cases-exp-indent = 2 +disambiguate-non-breaking-match = true +exp-grouping = preserve +field-space = tight-decl +if-then-else = k-r +indicate-nested-or-patterns = unsafe-no +leading-nested-match-parens = true +let-open = short +margin = 100 +parens-tuple-patterns = always +sequence-blank-line = preserve-one +sequence-style = terminator +space-around-records = true +space-around-variants = true +type-decl = sparse +wrap-fun-args = false diff --git a/00_config.ocp b/00_config.ocp deleted file mode 100644 index 480018fdd2b..00000000000 --- a/00_config.ocp +++ /dev/null @@ -1,7 +0,0 @@ - -have_lz4 = false -debug = false -(* -ccopt += [ "-IC:\path\to\lz4\lib" ] -cclib += [ "-LC:\path\to\lz4\lib" ] -*) \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 0a45f9bd5f0..d1abc700d28 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,3 +1,77 @@ # Code of Conduct -Facebook has adopted a Code of Conduct that we expect project participants to adhere to. Please [read the full text](https://code.facebook.com/codeofconduct) so that you can understand what actions will and will not be tolerated. +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to make participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at . All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5cf2d266f7f..145cdfcc4d2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,5 +1,10 @@ -We require contributors to sign our Contributor License Agreement. In order for us to review and merge your code, please sign up at https://code.facebook.com/cla. If you have any questions, please drop us a line at cla@fb.com. +We require contributors to sign our Contributor License Agreement. In order for us to review and merge your code, please sign up at https://code.facebook.com/cla. If you have any questions, please drop us a line at cla@fb.com. -You are also expected to follow the [Code of Conduct](CODE_OF_CONDUCT.md), so please read that if you are a new contributor. +You are also expected to follow the [Code of Conduct](CODE_OF_CONDUCT.md), so please read that if you are a new contributor. + +--- + +If you intend to contribute something that is NOT a library definition or documentation change, +please open a "discussion" issue first to ensure that your work aligns with the Flow team's goals. Thanks! diff --git a/Changelog.md b/Changelog.md index f2b1a81a9b2..a53f2b21df2 100644 --- a/Changelog.md +++ b/Changelog.md @@ -1,3 +1,800 @@ +### 0.108.0 + +Notable bug fixes: + +* Batch coverage info now persists through rechecks. +* When a file with @preventMunge is in a cycle with a file that does not, the un-munged file no longer has exported munged properties checked for annotations. +* Fixed a bug where autocomplete would not be triggered in a file with Windows-style line endings. +* Improved error messages for uses of `$ObjMap`, `$ObjMapi`, `$TupleMap`, and `$Call` with incorrect arity. + +Misc: + +* The deprecated `flow ide` command and associated machinery have been removed. + +Library definitions: + +* Added the missing parts of the Pointer Lock spec to libdefs. +* Thanks to @lyleunderwood for adding scrolling support to libdefs. +* Thanks to @goodmind for adding a definition for `undefined` to the prelude. + +### 0.107.0 +New Features: + +* Implement type refinements for property accesses through brackets (#7597, thanks @goodmind). + +Notable bug fixes: + +* Fix several issues with autocomplete that prevented optional properties, type aliases to utility types, types with default type arguments, and several other edge cases from being autocompleted. +* Fix Not_expects_bounds crash with `%checks` (#7863). + +Misc: + +* Make the LSP `textDocument/documentHighlight` request serviceable while Flow is in the middle of a recheck. +* Fix minor off-by-one error which caused some parts of traces to be pruned when using the `--traces` flag. +* A minor improvement in error messages when `undefined` is involved. +* Reduce memory usage slightly when abstract locations are enabled. +* Prevent log spew when `flow lsp` is started while the server is initializing. +* Improve completeness of sighashing. We recently observed an incremental bug caused by incomplete sighashing. This speculatively addresses similar potentially problematic cases. + +Library Definitions: + +* Add webkitGetAsEntry to DataTransferItem. +* Switch several properties to optional in Notification and NotificationOptions (#8032, thanks @pauldijou). +* Map/Set fix symbols (#7560, thanks @goodmind). +* Fix Array#reduce and Array#reduceRight (#7902, thanks @goodmind). + +### 0.106.3 + +We found and fixed a bug introduced in 0.105.0. Some internal code was using a hashing function and assumed collisions were far less likely than they proved to be. This could lead to random nonsensical errors which would then disappear, usually involving missing object properties. This likely only affected extremely large projects. + +### 0.106.2 + +Fixed the stack overflow reported by [#8037](https://github.com/facebook/flow/issues/8037). Thanks [@lukeapage](https://github.com/lukeapage) for isolating the repro! + +### 0.106.1 + +Forgot to cherry-pick `[rollouts]` (an experimental new .flowconfig section) into v0.106.0 + +### 0.106.0 + +Likely to cause new Flow errors: +* We're starting to make changes to how Flow models object spreads. For more [see this announcement](https://medium.com/flow-type/coming-soon-changes-to-object-spreads-73204aef84e1) +* Updated parsing of the experimental nullish coalescing `??` operator. It now has a lower precedence than `||` and `&&`, and parentheses are required to nest it with them. +* Flow wasn't typechecking the properties of certain obscure JSX usage (namespaced identifiers, member expression with @jsx / @csx), so would miss type errors in their expressions (e.g. `` now errors) + +Notable bug fixes: +* Fixed a bug where merge or check jobs would crash when a parse error was added to a file in a cycle. The crash was silent but unintended. +* Types-first no longer ignores the `munge_underscores` flowconfig option + +Misc: +* Various libdef updates. Thanks for all the PRs! + +Parser: +* Improved error messages for missing semicolon +* Comments are now correctly attached to `break` statements and array patterns +* `libflowparser` now supports `esproposal_nullish_coalescing` as an option + +### 0.105.2 + +v0.105.0 started running the Flow server in a cgroup on Linux distros that support [cgroup v2](https://www.kernel.org/doc/Documentation/cgroup-v2.txt). However, some versions of `systemd`, which manages cgroups, contain a bug that caused the Flow server to fail to start. This release avoids using `cgroup` on these systems. (#8012) + +### 0.105.1 + +This was an npm-only release to fix a packaging issue. No updated binaries were published. + +### 0.105.0 + +Likely to cause new Flow errors: + +* Types for `FileReader` properties and methods are now more precise (e.g., some parameters typed + `any` are now typed `ProgressEvent`; some properties now have `null` added to their types). Thanks, @nwoltman! + +* The value type parameter `V` of `$ReadOnlyMap` and `$ReadOnlyWeakMap` is now covariant. Thanks, @goodmind! + +* Types for the `vm` module in node.js are now more precise. Thanks, @goodmind! + +* The deprecated `$Enum<...>` utility type has now been deleted. Use `$Keys<...>` instead. + +* Indexing tuples with floats is no longer allowed. + +New Features: + +* Added support for `React.Profiler` (React v16.9+). Thanks, @bvaughn! + +* Added a `--types` flag to `flow graph dep-graph` to output only "type" dependencies: the subset of + imports that the types of a module's exports depends on. (Without the flag, we output "code" + dependencies: the set of all imports of a module.) + +* Preliminary support for automatically inserting annotations on a module's exports through + LSP. Thanks to @akuhlens (summer intern with the Flow team)! + +* Preliminary support for definite assignment checking of class instance properties. Thanks to + @pzp1997 (summer intern with the Flow team)! + +* Added an option to `.flowconfig` for exact-by-default objects. + +Perf fixes: + +* Fixed a non-termination issue with a recursive use of mapped types. +* Fixed an exponential-blowup issue with a combined use of spreads and unions. +* Fixed an exponential-blowup issue with recursive use of array spreads. + +Misc: + +* Fixed LSP init to say codeLens is not supported. +* Fixed lots of cases of bad error positioning, unblocking improvements to error suppressions and + error streaming. Thanks to @mvcccccc (summer intern with the Flow team)! + +Parser: + +* Improved a bunch of "unexpected" parse errors, providing what was expected in the error message. +* Fixed a bug in parsing of params in function types. + +### 0.104.0 + +Likely to cause new Flow errors: +* Fixed the definition of `Function.prototype.apply` to only accept array-like objects, not any iterable. +* Improved error positioning, which may cause previously-suppressed errors to become unsuppressed. + +New Features: +* `non-array-spread` lint rule: Fires when a non-array iterable is spread. This is useful for modeling the `loose: true` mode of `@babel/plugin-transform-spread`, where such code causes a runtime error. + +Notable Bug Fixes: +* Fixed a performance regression when computing dependencies in large projects +* Fixed built-in library definitions that needed to be explicitly inexact to pass the `implicit-inexact-object` linter. +* Improved libdefs for many browser APIs (e.g. Media Streams, MIDI, Permissions, Workers) (#7737, #7805, #7806, thanks @goodmind!) + +Misc: +* Various improvements to the types-first signature generator +* Improved the name of the server master process in `ps` +* Improved the output of `flow check --profile` + +`flow-remove-types`: +* Remove opaque types and `declare export` + +Parser: +* Fixed a bug allowing `await` to be a parameter in async functions +* Several improvements to the experimental comment attachment algorithm + + +### 0.103.0 + +New Features: +* Added `--types` flag to `flow cycle`, when given only type dependencies are used to compute cycles + +Notable bug fixes: +* Fixed a bug when destructuring unions gave spurious errors [[example]](https://flow.org/try/#0GYVwdgxgLglg9mABMAFAbwA6ILyIAwC+AXIpiWCALYBGApgE4A+AzlPTGAOYECUpBAKFCRYCRJ3QZipDOSp16BRIxklW7Lr35A) +* Updated for-in/for-of head expressions to be evaluated in the correct scope [[example]](https://flow.org/try/#0MYewdgzgLgBAHgLhgQQE6oIYE8A8YCuAtgEYCmqAfDALwwDaAugNwBQAZiKjABSiSxwYINvACUMAN4sY8VgF8gA) + +Performance: +* Improved the calculation for what are dependents of a file, reducing work during rechecks + +Library definitions: +* Added support for Array#flatMap (thanks @goodmind) +* Replaced usages of `Object` and `Function` (which are aliases for `any`) in library definitions with `any` +* Removed some usages of `any` from library definitions +* Updated Function#apply to accept strictly two arguments (thanks @goodmind) +* Added SpeechSynthesis definitions (thanks @goodmind) + +Misc: +* Updated/added HTML spec URLs in comments (thanks @kevinSuttle) +* Fixed parsing of anonymous class implements clause (thanks @goodmind) +* Added support for printing mixins and implements (thanks @goodmind) + +### 0.102.0 + +Likely to cause new Flow errors: +* Function components with no arguments get a sealed empty object type as props. +* Moved `MixedElement` export into the module declaration, so it will now need to be qualified as `React.MixedElement`. + +Notable bug fixes: +* Fixed error positioning around utility types (e.g. `$ObjMap`). +* Omit reporting error stack traces to end users over LSP. +* Fixed bug where Flow would crash when variable has same name as a type (fixes #7825) + +Misc: +* Refactored coverage computation to use the typed AST. This enables coverage results over more locations that earlier. +* Improved server and monitor error logging. +* In typing object types as react components, account for the `defaultProps` property and make them compatible with `React.AbstractComponent`. +* Optimized the way module exports are populated to prevent recursion limiter exceptions. +* Improved error messages for invalid `BigInt`s. (thanks, @goodmind!) +* Hovering over an imported type alias returns its definition. (thanks, @vicapow!) +* Fixed semver comparison to allow for suffixes such as `rc`. + +Libdefs: +* Remove `Object` type (equivalent to `any`) from `WeakSet` and `$ReadOnlyWeakSet`. (thanks, @goodmind!) +* Add methods to Node HTTP ServerResponse type definition. (thanks, @chrislloyd!) +* Add definitions for the Web Animations API. (thanks, @goodmind!) + +### 0.101.1 + +Notable bug fixes: +* Fixed a bug with suppressions in the experimental types-first mode. + +### 0.101.0 + +Likely to cause new Flow errors: + * `$Keys` now produces a more precise type, which may find errors where incompatible strings were passed to something expecting the `$Keys` of some object. + +New Features: + * We released a new implicit-inexact-object lint to detect when an inexact object is used without explicitly adding `...` to the + end of the props list. See [here](https://medium.com/flow-type/on-the-roadmap-exact-objects-by-default-16b72933c5cf) for context. + * Function type parameters may now use default arguments. This is not yet supported by babel. + +Notable bug fixes: + * Fixed a bug with ranges returned by autocomplete + * Fixed a bug where errors with bad locations reported over the LSP could cause the editor to clear all errors. + +Misc: + * `React.memo` and `React.lazy` now both allow you to specify an instance type via `React.AbstractComponent`. + * Various performance improvements to union types. + * Various libdef fixes and improvements. + * Various improvements to error positioning. + * The recursion limit is now configurable in the .flowconfig via `recursion_limit`. Most projects will not need to override this value. + +Parser: + * Forbid private fields named `#constructor` + * Fix duplicate private class field validation for getters/setters + * Fix parsing of private getters and setters + * Function type parameters may now use default arguments. + +### 0.100.0 + +Likely to cause new Flow errors: + +* The `React$ElementType` annotation, which was previously unsafe, is now strict. Before you could create an element given a component with this type using arbitrary props. To annotate any component that accepts some given props, use `React$ComponentType` instead. [Try Flow example](https://flow.org/try/#0JYWwDg9gTgLgBAKjgQwM5wEoFNkGN4BmUEIcARFDvmQNwBQdMAnmFnAArFjoC8cA3gB84AO2QgsALjioYUYCIDmcQQF96dAgFcR+YBBFwA4pSwwsUABRguqaZwjcAlALpw4lGFqiGAPGAA+AAksABtQiAAaOAASfhtHVAA6MQlVAEJfAHpA+lUGbV0YfUMdVGQCLABhA1koLXxLGvADLBEYaWw8GBiAUVCsCXaAFRYsF343OCysuF6oYihpKuRDEQh4XFq5BvhVuDDBtr30CAI4ADEIgHc4ADcABiSARgenh6nfZsgRY7hrgAWyHMdwsPDITAgWjI-1WMHBAIsWBhWQCeQYZQq1W29UaJiwZgsTg0hT0BhkWJqIjquyaJB+x06VB631aIzGvgc3ACEymMzmC2g0mGiNQbGQlDgMER-2ISjgCW46U+rN+7X+QJBYIhUJh1zhCKRKLRUz5swA8gBpYWi8WS6VsRXoaXAuAAA3xhKgboOAA9WPhUCr6Wz4KksOCAOrQUIAE2NdHydHKlSpNLxpnMUGJQA) +* The `React$ComponentType` annotation is now strict when used with refs. Before, it was possible to pass a `ref` having any type when creating an element from a component using this type. If you need to describe components that accept refs, use the `React$AbstractComponent` type instead. [Try Flow example](https://flow.org/try/#0JYWwDg9gTgLgBAKjgQwM5wEoFNkGN4BmUEIcARFDvmQNwBQdMAnmFnAArFjoC8cA3gB84AO2QgsALjioYUYCIDmcQQF96dXABs06AOKUsMLFDhYAHsZEATdNjwwAdAGESkEVhEwAPJwjcAPgE6ODhKGxMACgBKYNDQyhgAVygROG8wAIAJLC0tCAAaOAASfhgAC2BURzAuarEJVQBCbwB6TPpQ1RC4HpAYgW7uzQgRWTgAQWl7fBc3Uc8YABUWLF86oL4DLCMTDVbWuABRKGIoaQADSgILuCq4ZlZrFHQLkGBzLGtbtDgIAjgADF8gB3OAANwADI4AIyQ+F0bwTUTiLA8MgAdWgWmsZDCWAIPH41zgPCCJIAZBT8QRHP1oqo4K0AgxcKNxgAhaZUJwTABGsigDlc4AWXnW-lQRW2uygmzgMuMUH2hwA8gBpIpXAm3e6PL4vFBwC6Kky6sYwZAiXBYREclESdFYqA4vHXIkksk0uBUml0mKM5lAA) +* The `$Enum` built-in type annotation is now deprecated. Please use the semantically equivalent `$Keys` type instead. +* Destructuring patterns could previously include missing properties if the resulting binding was unused. This is now an error even when unused. [Try Flow example](https://flow.org/try/#0C4TwDgpgBA8lC8UDeUwC4oGdgCcCWAdgOZQC+A3AFAD01UAojjgPY4YAqAFhDtHplALNULSDlBQABgEdJUQlFCQpMSQBooAQwHMAZlABiAG2YB3KADcADADoAjFdtXKugK4EAxsDzMCUXQAUSGAa0qQYMACUyKRAA) + +New Features: + +* You can now use the built-in type `React$MixedElement` as the sound superclass of all React elements. This is a type alias for `React$Element`. + +Misc: + +* Add `decode` method to `HTMLImageElement` (thanks, @vicapow!) + +Parser: + +* Handle NonOctalDecimalIntegerLiteral +* Remove U+180e (Mongolian vowel separator) from list of valid whitespace code points +* Remove support for legacy octal literals with numeric separators +* Remove support for legacy octal bigints +* Fix various issues related to automatic semicolon insertion (ASI) for class properties + +### 0.99.1 + +Notable bug fixes: + +* Fix bug where well-formed-exports errors were reported for unchecked files + +### 0.99.0 + +Likely to cause new Flow errors: + +* The statics of function types used to be `any` but are now typed as an empty object. +* Recursive calls of named function expressions were previously unchecked, but are now checked. +* `$call` property syntax, deprecated in Flow v0.75, has finally been removed. + +Notable bug fixes: + +* Fix an issue where Flow would not catch certain errors involving React function components with unannotated props. +* Fix React synthetic mouse events for drag, wheel, pointer events to give the specific native event type. (Thanks, @Kiwka!) + +Misc: + +* Improved performance of starting a server from a saved state. + +Parser: + +* Fix parsing of function types inside tuples inside arrow function return types. + +### 0.98.1 + +Notable bug fixes: + +* Do not report bad module uses in unchecked files + +### 0.98.0 + +Likely to cause new Flow errors: + +* Infer `void` before typechecking starts for functions without a `return` statement, lessening the impact of a union typechecking bug (#7322). +* Fix a bug which prevented Flow from asking for required type annotations. +* Turn the `deprecated-utility` lint on by default. +* Two related changes to type refinements to fix unsoundness: + * `mixed` refined to an array produces a read-only array. + * `mixed` refined to an object produces a read-only object. + +New Features: + +* Add the ability to exclude paths included by a previous pattern in a `.flowconfig` (#7317). + +Notable bug fixes: + +* Fix a bug that led IDEs to report all code as uncovered (#7654). +* Fix the `untyped-import` lint rule so that `export [type] * from` triggers it. +* Flow now recognizes refinements against negative number literals. + +Misc: + +* Exclude `deprecated-utility` and `dynamic-export` lints when applying all=setting rules (#7473). +* Improve client/server version mismatch behavior so that the newest of the two is preserved, rather than the client version. +* Preserve exactness of the input type when using `$ObjMap` or `$ObjMapi` (#7642). +* Minor changes to metadata in the results of `flow type-at-pos --json`. +* Batch `DidOpen` notifications from the IDE in order to make checking in IDE lazy mode more efficient. +* When `flow lsp` automatically starts a server, it prefers the lazy mode set in a `.flowconfig` to the lazy mode passed on the CLI. +* Allow lints to be explicitly set to their defaults (normally redundant lint settings are disallowed). +* Fix spurious missing annotation errors when the `this` type is used incorrectly. +* Fix a bug that made `React.Element` behave differently than `React$Element`. +* Fix an edge case where object property assignments were typechecked incorrectly (#7618). +* Fix an unsoundness with addition or logical operators when combined with generics (#6671, #7070). +* Fix an issue which allowed read-only arrays to be written to if the index was of type `any`. +* Fix a bug which stymied typechecking after try/catch blocks (#7530). + +Libdefs: + +* Add `document.elementsFromPoint()` (#7540). +* Add `ConstantSourceNode` (#7543). +* Remove `React.Suspense` `maxDuration` attribute (#7613). + +### 0.97.0 + +Likely to cause new Flow errors: + +* Refining a variable of type `mixed` with `instanceof A` produces type `A` instead `empty` which was produced before. +* Types imported in a `declare module` are no longer automatically exported from that module as well. + +New Features: + +* #7518 Adds support for LSP function parameter completion (thanks @vicapow) + +Notable bug fixes: + +* Return a better error message when `flow coverage` is passed an invalid input path. +* Fixed a bug in which Flow crashed on very long directory paths. +* Fixed type-at-pos results when reporting the type of a callable object. + +Misc: + +* Improvements in AST utilities: The differ got improved support in several kinds of type annotations (literals, generic identifiers, `typeof`, tuples and interface types). The mapper got support for qualified identifiers. +* Introduces a resizable array data structure that is used in union-find. +* Improved error messages around callable and indexer. +* Type-at-pos now shows results without evaluating type destructors like object spread, `$Diff`, etc. This should lead to more compact results. +* Various refactorings in the internal type language and environment. + +Library definition improvements: + +* Updates in Node definitions. The stream definitions were updated to the latest version, and the readline.createInterface definitions were also updated. + +Parser: + +* #7471 Adds support for parsing of BigInt (Arbitrary precision integers) (thanks @goodmind) + +### 0.96.1 + +* Object literals with spreads can be described by object types with spreads in the signature verifier/builder. + +### 0.96.0 + +Likely to cause new Flow errors: + +* Recently the `Object` and `Function` types changed their meaning from "any function type" to "any + type." Accordingly, various `Object` and `Function` annotations that made sense before this + change have been updated in various library definitions. + +* Various other PRs making improvements in library definitions have been merged in this + release. These include core definitions like `Date` and `Object` as well as other DOM and Node + definitions. + +* We now issue a error when a value that is clearly not a type could be exported as such. + +* We now issue an error when a function is imported as a type. + +Notable bug fixes: + +* Some commands are not expected to update server state. But if such a command is cancelled and we + run a recheck before rerunning the command, not updating the server state would make it seem like + that recheck never happened (and lead to spurious future rechecks). This has now been fixed. + +* Fixed node_modules filter for lint warnings, which didn't work on Windows, and didn't respect the + node_resolver_dirnames config option. + +Misc: + +* Results of `batch-coverage` in lazy mode can be misleading, as it does not account for the fact + that the currently checked files might not be the ones the user is querying for. Running + `batch-coverage` in lazy mode is now disallowed. + +* Fixed an issue with `flow lsp` where logs would not be flushed. + +### 0.95.2 + +* The inferred statics object type of `React.createClass({})` will contain `defaultProps: void`, instead of `defaultProps: {||}` (unsealed empty object). +* Bug fix in internal cache mechanism + +### 0.95.1 + +* Added an overload for `JSON.stringify` allowing `mixed` input, which returns `string | void`. Without this, you can't call `JSON.stringify` on a `mixed` value at all, because while Flow does allow refining `mixed` to "not void" (e.g. `x === undefined ? undefined : JSON.stringify(x)`), it does not support refining `mixed` to "not a function" (e.g. imagine you could do `x === undefined || typeof x == 'function' ? undefined : JSON.stringify(x)`). This rolls back some of the more restrictive behavior introduced in v0.95.0, but is still more restrictive and more accurate than in <= v0.94.0. + +### 0.95.0 + +Likely to cause new Flow errors: +* Disallow `undefined` and functions in `JSON.stringify`: `JSON.stringify(undefined)` returns `undefined` instead of `string`. Rather than make it always return `string | void`, or use overloads to return `void` on those inputs, we instead disallow those inputs since they are rarely the intended behavior. (#7447) + +New features: +* `flow batch-coverage`: A new command to compute aggregate coverage for directories and file lists. Instead of producing the coverage at each location, it sums them and reports the per-file percentage and the aggregate percentage. + +Bug fixes: +* Fixed incorrect reporting of signature verification lint errors in unchecked files + +Other improvements: +* #7459 Add type for Symbol.prototype.description (thanks @dnalborczyk) +* #7452 Add types for String.prototype.trimStart/trimEnd (thanks @dnalborczyk) +* #7500 The "kind" of an autocomplete result is now reported over the Language Server Protocol, improving the autocomplete UI (thanks @vicapow) + +### 0.94.0 + +Bug fixes: +* Fixed `dynamic-exports` lint's spurious errors on exported classes and functions +* Handle package.json files that are valid JSON but invalid packages + +Performance: +* Reduce memory usage by filtering suppressed lint errors before formatting the errors for printing +* Quicker responses to cancellation requests + +Many libdef fixes and other improvements from the open source community: +* #3209 Fix autocomplete for generic type aliases (thanks @vkurchatkin!) +* #6750 Remove shadowed generics in `Proxy$traps` (thanks @talbenari1!) +* #6000 Document async function return type (thanks @callumlocke!) +* #7448 Tweaks to built-in http module (thanks @STRML!) +* #4570 Update types for Web Audio API (thanks @fand!) +* #5836 Fix examples in libdefs/creation page (thanks @tomasz-sodzawiczny!) + +Additional lib def improvements: +* Make `current` write-only in `React.Ref` - allowing union types for ref +* Add `setMediaKeys` API to definition of `HTMLMediaElement` +* Make type parameter to `http$Agent` covariant + +### 0.93.0 + +Likely to cause new Flow errors: + +* Removed a constraint involving `any` types and React proptypes for efficiency. This may result in some errors no longer being reported. + +New Features: + +* A new lint (`dynamic-export`) which when enabled will warn when most dynamic types are exported from a file. +* Flow now distinguishes between `any` and `empty` when computing line coverage. `empty` types are colored blue and `any` types red when using the `--color` option. Note that this may cause new expressions to be considered uncovered. + +Notable bug fixes: + +* Fixed a non-termination condition during `this`-substitution. +* Fixed an issue where `inexact-spread` lint errors could appear in the wrong position. + +Many, many libdef fixes and improvements! Many thanks to the open source community for these, and to @nmote and @jbrown215 for reviewing and merging so many of these! + +* #4388 add missing `InputEvent(Listener|Handler|Types|)` (thanks @keithamus!) +* #4664 Fix `IntersectionObserver` constructor definition (thanks @apostolos!) +* #4858 Make `ServiceWorkerMessageEvent` extend `ExtendableEvent` (thanks @keyiiiii!) +* #4772 add indexer property to string lib def (thanks @zacharygolba!) +* #5529 Add `module.builtinModules` to core libdef (thanks @simenB!) +* #5574 fix return parameter for `writable.setDefaultEncoding()` (thanks @dnalborczyk!) +* #5578 add `util.callbackify` to node type def (thanks @dnalborczyk!) +* #5628 Add lib declaration for `BroadcastChannel` (thanks @schmatz!) +* #5866 Add definition for `timingSafeEqual()` (thanks @rolftimmermans!) +* #5988 add `destroy` method to streams (thanks @hiikezoe!) +* #6091 Fix static declarations for `XMLHttpRequest` (thanks @robin-pham!) +* #6339 Fix parent of `AnimationEvent` (thanks @ngyikp!) +* #6367 Add types for `Object.getOwnPropertyDescriptors` (thanks @disnet!) +* #6471 Actualize node's `EventEmitter` API definition (thanks @antongolub!) +* #6535 add `Element.prototype.toggleAttribute` (thanks @keithamus!) +* #6614 Add `TransitionEvent` to dom libdef (thanks @koddsson!) +* #6785 Allow specifying encoding as string in options field of `appendFile`, `appendFileSync` (thanks @cappslock!) +* #6963 Add Audio declaration (thanks @vldvel!) +* #7011 Use more specific type for `navigator.serviceWorker` (thanks @dhui!) +* #7097 Add type definitions for message events (thanks @wachino!) +* #7122 Support for `Uint8Array` (thanks @cakoose!) +* #7144 Updated URL modules definitions for Node.js 10 (thanks @MrFranke!) +* #7146 Fix type definition of Node.js `dns.lookup()` (thanks @shuhei!) +* #7215 fix `https` interfaces (thanks @cakoose!) +* #7225 make `createContextualFragment` return a `DocumentFragment` (thanks @wincent!) +* #7342 add Document.queryCommandSupported (thanks @Eazymov!) +* #7358 Add `oncontextmenu` to `HTMLElement` (thanks @jasonLaster!) +* #7363 Add `MediaDeviceInfo` declaration (thanks @ea167!) +* #7367 Add `userSelect` to CSS declaration (thanks @shubhodeep9!) +* #7368 Fix `fs.promises.readFile` being incorreclty overloaded (thanks @Macil!) +* #7381 add `EventSource` to dom libdef. Likely to cause new errors (thanks @SlIdE42!) +* #7386 fix `XDomainRequest` in bom libdef. Likely to cause new errors (thanks @Mouvedia!) +* #7387 Added optional `displayName` property to `React$Context` (thanks @bvaughn!) +* #7405 Basic support for `typeof x === 'symbol'` (thanks @mroch!) +* #7420, #7430 and #7440 Various React improvements (thanks @threepointone!) +* #7423 make `useRef` type non-nullable (thanks @trysound!) +* #7445 add `Stream` type to Node thanks (thanks @goodmind!) + +Misc: + +* Updated our website and GitHub issue template to make it easier for open source users to contribute to Flow! +* Various improvements to the AST, including the differ, typed AST and the AST mapper + +### 0.92.1 + +Notable bug fixes: +* Fixed a bug introduced in v0.92.0 which could cause the server to crash when using an IDE. +* Fixed `module.system.haste.name_reducers` option for Windows (#7419 - thanks [jamesisaac](https://github.com/jamesisaac)) + +### 0.92.0 + +Likely to cause new Flow errors: + +Some fixes to tagged template literals will surface a new set of pre-existing errors! + +New Features: + +This release culminates months of hard work on quality of life improvements for IDE support. +Expect your requests to be faster, and your requests to take a bit less time. + +* Several Flow commands can run in parallel now, i.e. you can still get type definitions while rechecking. +Big props to @glevi for this massive QoL improvement! +* Alongside this change, @glevi also released a new lazy-check mode that leverages `watchman` to reduce the number of checked files. +Learn about it [in the docs](https://flow.org/en/docs/lang/lazy-modes/#toc-using-watchman-lazy-mode). + +Notable bug fixes: + +Thank you to the opensource community for these fixes! + +* #7354 Fix MouseEvent type definitions +* #7262 Update types for WeakMap +* #7100 Add missing crypto.randomFillSync and crypto.randomFill methods +* #7356 Add definitions for new debug hook useDebugValue +* #7352 Rename React hook useImperativeMethods -> useImperativeHandle +* #5665 Fix arity of clearInterval, clearTimeout +* `React.memo` now accepts any kind of `Component` + +### 0.91.0 + +Likely to cause new Flow errors: + +* Better positioning for React error messages. This may move already existing (and suppressed) errors to new positions. + +New Features: + +* The `--lazy-mode` flag was added to all commands that may autostart a server + +Notable bug fixes: + +* Fix a crash when using private class fields (fixes https://github.com/facebook/flow/issues/7355) + +Misc: + +* Various additions and typo fixes on flow.org docs and README (thanks @fschindler, @dominicfraser, @keithamus, and @fterh) +* Misc OCaml tweaks (thanks @rvantonder) +* Huge reduction (~15%) in total memory usage (thanks @nmote) +* Huge reduction in error collation time (thanks @panagosg7) + +## 0.90.0 + +Likely to cause new Flow errors: +* Removed unsafe rule allowing Date instances to be used as a number +* Changed $Shape<> types to reject null and void as subtypes +* Removed unsafe refinement from mixed to a function type using typeof + +Pull Requests: +* #7290 Add support for Path2D constructor arguments (thanks @zpao!) +* #7221 use package that works with 7.x in babel doc (thanks @rob2d!) +* #7231 Improve type of 'mkdir' and 'mkdirSync' (thanks @mrtnzlml!) +* #7278 fix: update toc links in hoc pages (thanks @evenchange4!) + +Notable bug fixes: +* Fixed issue where errors involving $Shape<> types were positioned poorly + +Parser: +* Fixed decoding of html entities at beginning of JSX children +* Fixed offset calculation in estree output to account for multibyte characters + +### 0.89.0 + +Likely to cause new Flow errors: +* Big revamp to React typing with the goal of adding support for `React.forwardRef` and better typing higher-order components. [Docs are available here](https://flow.org/en/docs/react/hoc). + +New Features: +* New `deprecated-utility` lint complains about deprecated Flow types. To start off, `$Supertype` and `$Subtype` are now deprecated. They were unsound and can usually be replaced using shapes or bounded generics. +* [`React.AbstractComponent`](https://flow.org/en/docs/react/types/#toc-react-abstractcomponent) makes it [way easier to type React higher-order components](https://flow.org/en/docs/react/hoc). +* [`React.Config`](https://flow.org/en/docs/react/types/#toc-react-config) is also intended to help type React higher-order components. + +Notable bug fixes: +* `flow coverage --color` handles multi-byte characters better +* `flow coverage` now supports `--strip-root` + +Misc: +* We've deleted `flow gen-flow-files` due to bitrot. We do plan on building a better version in the future. +* Various libdef updates. Thanks for all the PRs! +* `{}` now consistently represents an unsealed object. You can read more in the [documentation](https://flow.org/en/docs/types/objects/#toc-unsealed-objects). [example](https://flow.org/try/#0GYVwdgxgLglg9mABFApgZygCgN7DnALkTBAFsAjFAJwF8BKbGgKCdQx3oG5EB6HxAHJxE1KnCqIAhlEQAOWUwgIMiAB5Fc+IiQrUaiALyJG3PogCiVMROlzZQA) + +### 0.88.0 + +Likely to cause new Flow errors: + +* Made `Function` and `Object` types be aliases for `any`. They were always unsafe types, just like `any`, but they had peculiar behavior. This change revealed places where they were handled improperly within Flow, and ended up surfacing type errors that were previously missed. + +New Features: + +* Added the experimental Watchman lazy mode (`flow start --lazy-mode watchman`). This improves the lazy mode experience for repositories which use Mercurial and Watchman. We will document it more when/if it proves itself. +* Added `flow config check` which validates the `.flowconfig`. + +Misc: + +* Made miscellaneous improvements to the AST differ, which improves the output of global rename. +* Made `.flowconfig` parsing less strict (in particular, if the `--ignore-version` flag is passed, do not fatal on unrecognized config options). +* Performed a code cleanup in type normalization that caused some types in `type-at-pos` to be displayed differently. +* Removed redundant information in stored ASTs resulting in a modest reduction in memory usage. +* Flow assigns long string literals type `string`, rather than the singleton type of that literal. Now, this fact is surfaced in error messages. +* Fixed stack overflows: + * When checking a large number of files. + * When a large number of errors are present. + +Libdefs: + +* Added `React.Suspense`. +* Removed `React.useMutationEffect` hook. + + +### 0.87.0 + +Likely to cause new Flow errors: +* Fixed an error in the `React.createRef` definition. Refs are for any type of value, +not just React elements. + +New Features: +* Added support for subcommands to the CLI and a `flow config find` command that +finds the .flowconfig governing path. + +Notable bug fixes: +* Fixed file_sig for deep destructured requires. +* Fixed a stack overflow in the `flow ls --json` command. +* Fixed a crash when the module reference prefix is used on an untyped module. + +Misc: +* Removed the `flow port` command. +* Various improvements to the AST differ. +* Made progress in shifting from concrete to abstracting locations, by + - functorizing a number of modules over their use of locations, and + - updating the core typechecking logic to operate solely on structures containing + abstract locations. +* Removed the redundant Expression.Member.computed field from the AST. +* Allow configuring path to node in runtests.sh. +* Refactored `JSX.frag_closingElement` of the AST to not be an option. +* Fixed error localization in `import type` and `import typeof` with default. +* Added the location of function signatures to the AST. +* Removed prototype members when autocompleting JSX props. +* Cleaned-up handling of the mixed case from LookupT. +* Optimized the case of enumerations when used as keys. + +Library definition changes: +* Made geolocation `PositionOptions` optional. +* Added flow definitions for React hooks. +* Added a `calculateChangedBits` parameter to the `React.createContext` definition. +* Added `React.ConcurrentMode` and `React.StrictMode` definitions. +* Added `React.lazy` and `React.memo` definitions. + +Parser: +* Refactored flowconfig option parsing into a list of parsers. + +### 0.86.0 + +Likely to cause new Flow errors: +* New errors may arise from generic definitions which lack the necessary annotations. Flow does not infer generic types, and the types it used to infer in their place occasionally masked errors. The types it now infers are still not generic, but will mask fewer errors. +* Fixed bug in union type checking which caused Flow to miss some errors when used in conjunction with generics and certain [utility types](https://flow.org/en/docs/types/utilities/). +* Improvements to constraints involving `any` may result in new errors. + +New Features: +* `type-at-pos` can reconstruct spread objects when evaluation of the spread is not possible. +* `type-at-pos` now supports implicit instantiation with `_`. +* Added core types `$ReadOnlyMap`, `$ReadOnlyWeakMap`, `$ReadOnlySet`, and `$ReadOnlyWeakSet`. These types and their mutable counterparts mostly follow the pattern of `$ReadOnlyArray` and `Array`: the read-only parent class provides non-mutating methods, and the usual mutable class extends it. **Unlike `$ReadOnlyArray`, the new types are _invariant_ in their type parameters.** +* Added the `React.StrictMode` type. +* Added the [`flowtest` package](https://github.com/facebook/flow/tree/master/packages/flowtest). `flowtest` is a CLI tool for running tests on Flow types. +* Added the `sharedmemory.heap_size` option. + +Notable bug fixes: +* Restructured file signatures to support overloading in exports. +* Allow named exports in exact `module.exports` objects. +* Fixed forward references for `declare function`. +* Various bug fixes to the type normalizer and to `type-at-pos`. +* `flow suggest` no longer outputs `<>`. +* Imported symbols should no longer appear `Remote` in `type-at-pos`. + +Misc: +* Improved formatting in JavaScript output. +* Improved the way the element type of an array is determined. +* Various improvements to the AST differ. +* `WeakMap` keys must now be objects. +* Improved profiling for error collation and formatting. +* When a union lower bound flows into a union upper bound, if both unions are enums, we use the underlying set representation to handle this in O(n log n) time instead of quadratic time. +* The type normalizer properly represents utility types instead of treating them as generics. +* The type normalizer provides more accurate information about generics. +* Flowing a string lower bound into a union upper bound occurs in O(log n) instead of O(n) when the union is an enum. +* CJS modules' namespace objects are now treated as covariant. +* The type normalizer reconstructs literal types more precisely when `preserve_inferred_literal_types` is set. This does not change the behavior of `type-at-pos`. +* Deleted the `experimental.cancelable_rechecks` option. + +Parser: +* Removed the deprecated `expression` field from `Function` nodes in the AST. +* Enabled some tail call optimizations in `flow_parser.js` which should cause it to stack overflow in fewer cases. +* The layout generator no longer prints empty statements as `{}` instead of `;` in pretty mode. +* Allow anonymous function parameter types inside generics inside arrow function return types. For example, we disallow `var x = (): (string) => number => 123` because the first `=>` is ambiguous. However, `var x = (): T<(string) => number> => 123` is not ambiguous and we no longer disallow it. + +### 0.85.0 + +Likely to cause new Flow errors: + +* Fixed an issue that caused missing annotations errors to be suppressed. + + Please [read the full post](https://medium.com/flow-type/asking-for-required-annotations-64d4f9c1edf8) + for more information on what changed and tips on dealing with the new errors. + +### 0.84.0 + +Likely to cause new Flow errors: + +* Earlier, type constraints between `any` and other types would be dropped. Instead, they are now + propagated. In some cases, this unblocks further constraint solving and helps find more errors. +* When a variable is equality-checked with a literal, the variable's type is refined. Earlier, if + the variable's type was incompatible with the literal's type, it would silently be refined to + `empty`, whereas now this is an error. + +New Features: + +* Added support for wildcard (`_`) type arguments to function / constructor calls. This is + especially useful when some type arguments are sufficient to pin down the type of the result; the + others can simply be `_`. + +Notable bug fixes: + +* Fixed a case that would crash `get-def` and `find-refs` +* Fixed a bug with unreachability analysis for ternary expressions +* Fixed a bug with refinements merging at the end of switch statements + +Misc: + +* Fixed various AST printing / layout bugs +* Made various improvements to the AST differ +* Refactored parts of `get-def` and `find-refs` to reuse code +* Made progress on abstracting locations in the core type inference engine +* Made progress on module signature verification and generation +* Merged PRs that improve type declarations: e.g., `getBoundingClient` returns a `DOMRect` instead of a `ClientRect`. + +Parser: + +* Support for `...` to indicate inexactness in object types + +### 0.83.0 + +Likely to cause new Flow errors: +* Fixed a bug where type precision was lost at module boundaries + +Notable bug fixes: +* Prevented an exponential blowup when union types flow into "maybe" (`?T`) or "optional" (essentially `T | void`) types +* Allowed `{p:T}` to be a subtype of `$Shape<{+p:T}>` + +Misc: +* Fixed exception when using --traces +* Changed `--verbose` to not log while loading flowlibs by default; pass `--verbose-flowlibs` to override +* Added ability for LSP clients to cancel previously-sent commands +* Improved location of diagnostics via LSP while typing +* Fixed LSP textDocument/definition response when there are no results (prevents bogus jump-to-definition inside comments, for example) +* Limited number of errors returned via LSP to 200, to improve Nuclide and Visual Studio performance +* Fixed an exception when attempting to focus a file that Flow ignored + +Library definition changes: +* Added `ResizeObserver` declarations +* Added missing `net$Server.listening` property +* Added `process.cpuUsage()` + +Parser: +* Fixed `export default async function` to be a `FunctionDeclaration` not a `FunctionExpression` +* Made instance properties named 'constructor' a syntax error + ### 0.82.0 Likely to cause new Flow errors: diff --git a/Makefile b/Makefile index a44f2c92a10..abed52c3781 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -# Copyright (c) 2013-present, Facebook, Inc. +# Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. ################################################################################ @@ -8,8 +8,13 @@ EXTRA_INCLUDE_PATHS= EXTRA_LIB_PATHS= EXTRA_LIBS= -INTERNAL_MODULES=hack/stubs src/stubs +INTERNAL_MODULES=\ + hack/stubs/logging\ + hack/stubs/logging/common\ + src/stubs INTERNAL_NATIVE_C_FILES= +INTERNAL_BUILD_FLAGS= +INTERNAL_FLAGS= ifeq ($(OS), Windows_NT) UNAME_S=Windows @@ -67,31 +72,41 @@ endif MODULES=\ src/commands\ src/commands/config\ + src/commands/options\ src/common\ src/common/audit\ src/common/build_id\ src/common/errors\ + src/common/exit_status\ src/common/lints\ + src/common/logging_utils\ src/common/lwt\ src/common/modulename\ src/common/monad\ src/common/profiling\ + src/common/semver\ src/common/span\ src/common/tarjan\ src/common/transaction\ src/common/ty\ src/common/utils\ + src/common/utils/loc_utils\ src/common/xx\ src/flowlib\ + src/lsp\ src/monitor\ src/monitor/connections\ src/monitor/logger\ + src/monitor/rpc\ + src/monitor/status\ src/monitor/utils\ src/parser\ src/parser_utils\ + src/parser_utils/aloc\ src/parser_utils/output\ src/parser_utils/output/printers\ src/parsing\ + src/procs\ src/server\ src/server/command_handler\ src/server/env\ @@ -105,21 +120,27 @@ MODULES=\ src/server/server_files\ src/server/server_utils\ src/server/shmem\ + src/server/watchman_expression_terms\ src/services/autocomplete\ + src/services/get_def\ src/services/inference\ src/services/inference/module\ src/services/flowFileGen\ - src/services/port\ src/services/saved_state\ src/services/type_info\ src/state/heaps/context\ src/state/heaps/module\ src/state/heaps/parsing\ + src/state/heaps/parsing/exceptions\ src/state/locals/module\ + src/state/readers\ src/third-party/lz4\ src/third-party/ocaml-sourcemaps/src\ src/third-party/ocaml-vlq/src\ src/typing\ + src/typing/coverage_response\ + src/typing/errors\ + src/typing/polarity\ hack/dfind\ hack/find\ hack/globals\ @@ -130,11 +151,22 @@ MODULES=\ hack/socket\ hack/third-party/avl\ hack/third-party/core\ - hack/utils\ + hack/utils/cgroup\ + hack/utils/core\ + hack/utils/buffered_line_reader\ hack/utils/build_mode/prod\ hack/utils/collections\ hack/utils/disk\ + hack/utils/file_content\ + hack/utils/file_url\ hack/utils/hh_json\ + hack/utils/http_lite\ + hack/utils/jsonrpc\ + hack/utils/lsp\ + hack/utils/marshal_tools\ + hack/utils/opaque_digest\ + hack/utils/procfs\ + hack/utils/string\ hack/utils/sys\ hack/watchman\ $(INOTIFY)\ @@ -145,10 +177,10 @@ NATIVE_C_FILES=\ $(INOTIFY_STUBS)\ $(FSNOTIFY_STUBS)\ src/common/xx/xx_stubs.c\ + src/services/saved_state/saved_state_compression_stubs.c\ hack/heap/hh_assert.c\ hack/heap/hh_shared.c\ - hack/heap/hh_shared_sqlite.c\ - hack/utils/get_build_id.c\ + hack/utils/core/get_build_id.c\ hack/utils/sys/files.c\ hack/utils/sys/gc_profiling.c\ hack/utils/sys/getrusage.c\ @@ -169,7 +201,8 @@ FINDLIB_PACKAGES=\ lwt_ppx\ unix\ str\ - bigarray + bigarray\ + ppx_let NATIVE_LIBRARIES=\ pthread\ @@ -182,8 +215,21 @@ COPIED_PRELUDE=\ $(foreach lib,$(wildcard prelude/*.js),_build/$(lib)) JS_STUBS=\ + +dtoa/dtoa_stubs.js\ $(wildcard js/*.js) +OUNIT_TESTS=\ + src/commands/config/__tests__/command_config_tests.native\ + src/common/lwt/__tests__/lwt_tests.native\ + src/common/ty/__tests__/ty_tests.native\ + src/common/utils/__tests__/common_utils_tests.native\ + src/common/semver/__tests__/semver_tests.native\ + src/parser/__tests__/parser_tests.native\ + src/parser_utils/__tests__/parser_utils_tests.native\ + src/parser_utils/output/__tests__/parser_utils_output_tests.native\ + src/parser_utils/output/printers/__tests__/parser_utils_output_printers_tests.native\ + src/server/find_refs/__tests__/find_refs_tests.native + # src/typing/__tests__/typing_tests.native ################################################################################ # Rules # @@ -193,12 +239,13 @@ NATIVE_C_DIRS=$(patsubst %/,%,$(sort $(dir $(NATIVE_C_FILES)))) ALL_HEADER_FILES=$(addprefix _build/,$(shell find $(NATIVE_C_DIRS) -name '*.h')) ALL_HEADER_FILES+=_build/src/third-party/lz4/xxhash.c NATIVE_OBJECT_FILES=$(patsubst %.c,%.o,$(NATIVE_C_FILES)) -NATIVE_OBJECT_FILES+=hack/utils/get_build_id.gen.o +NATIVE_OBJECT_FILES+=hack/utils/core/get_build_id.gen.o BUILT_C_DIRS=$(addprefix _build/,$(NATIVE_C_DIRS)) BUILT_C_FILES=$(addprefix _build/,$(NATIVE_C_FILES)) BUILT_OBJECT_FILES=$(addprefix _build/,$(NATIVE_OBJECT_FILES)) +BUILT_OUNIT_TESTS=$(addprefix _build/,$(OUNIT_TESTS)) -CC_FLAGS=-DNO_SQLITE3 -DNO_HHVM +CC_FLAGS=-DNO_SQLITE3 CC_FLAGS += $(EXTRA_CC_FLAGS) CC_OPTS=$(foreach flag, $(CC_FLAGS), -ccopt $(flag)) INCLUDE_OPTS=$(foreach dir,$(MODULES),-I $(dir)) @@ -222,35 +269,35 @@ all-homebrew: export OPAMROOT="$(shell mktemp -d 2> /dev/null || mktemp -d -t opam)"; \ export OPAMYES="1"; \ export FLOW_RELEASE="1"; \ - opam init --no-setup && \ - opam pin add -n flowtype . && \ - opam config exec -- opam install flowtype --deps-only && \ - opam config exec -- make + opam init --bare --no-setup --disable-sandboxing && \ + rm -rf _opam && \ + opam switch create . --deps-only && \ + opam exec -- make clean: ocamlbuild -clean rm -rf bin - rm -f hack/utils/get_build_id.gen.c + rm -f hack/utils/core/get_build_id.gen.c rm -f flow.odocl -build-flow: _build/scripts/ppx_gen_flowlibs.native $(BUILT_OBJECT_FILES) $(COPIED_FLOWLIB) $(COPIED_PRELUDE) +build-flow: _build/scripts/ppx_gen_flowlibs.exe $(BUILT_OBJECT_FILES) $(COPIED_FLOWLIB) $(COPIED_PRELUDE) $(INTERNAL_BUILD_FLAGS) # Both lwt and lwt_ppx provide ppx stuff. Fixed in lwt 4.0.0 # https://github.com/ocsigen/lwt/issues/453 export OCAMLFIND_IGNORE_DUPS_IN="$(shell ocamlfind query lwt)"; \ - $(OCB) $(INCLUDE_OPTS) $(FINDLIB_OPTS) \ + $(OCB) $(INTERNAL_FLAGS) $(INCLUDE_OPTS) $(FINDLIB_OPTS) \ -lflags "$(LINKER_FLAGS)" \ $(RELEASE_TAGS) \ src/flow.native -build-flow-debug: _build/scripts/ppx_gen_flowlibs.native $(BUILT_OBJECT_FILES) $(COPIED_FLOWLIB) $(COPIED_PRELUDE) - $(OCB) $(INCLUDE_OPTS) $(FINDLIB_OPTS) \ +build-flow-debug: _build/scripts/ppx_gen_flowlibs.exe $(BUILT_OBJECT_FILES) $(COPIED_FLOWLIB) $(COPIED_PRELUDE) $(INTERNAL_BUILD_FLAGS) + $(OCB) $(INTERNAL_FLAGS) $(INCLUDE_OPTS) $(FINDLIB_OPTS) \ -lflags -custom -lflags "$(LINKER_FLAGS)" \ src/flow.d.byte mkdir -p bin cp _build/src/flow.d.byte bin/flow$(EXE) testgen: build-flow - $(OCB) $(INCLUDE_OPTS) $(FINDLIB_OPTS) \ + $(OCB) $(INTERNAL_FLAGS) $(INCLUDE_OPTS) $(FINDLIB_OPTS) \ -lflags "$(LINKER_FLAGS)" \ $(RELEASE_TAGS) \ testgen/flowtestgen.native @@ -269,10 +316,10 @@ $(BUILT_C_FILES): _build/%.c: %.c $(BUILT_OBJECT_FILES): %.o: %.c $(ALL_HEADER_FILES) cd $(dir $@) && ocamlopt $(EXTRA_INCLUDE_OPTS) $(CC_OPTS) -c $(notdir $<) -hack/utils/get_build_id.gen.c: FORCE scripts/script_utils.ml scripts/gen_build_id.ml +hack/utils/core/get_build_id.gen.c: FORCE scripts/script_utils.ml scripts/gen_build_id.ml ocaml -safe-string -I scripts -w -3 unix.cma scripts/gen_build_id.ml $@ -_build/hack/utils/get_build_id.gen.c: FORCE scripts/script_utils.ml scripts/gen_build_id.ml +_build/hack/utils/core/get_build_id.gen.c: FORCE scripts/script_utils.ml scripts/gen_build_id.ml ocaml -safe-string -I scripts -w -3 unix.cma scripts/gen_build_id.ml $@ $(COPIED_FLOWLIB): _build/%.js: %.js @@ -285,13 +332,46 @@ $(COPIED_PRELUDE): _build/%.js: %.js cp $< $@ rm -rf _build/src/prelude -_build/scripts/ppx_gen_flowlibs.native: scripts/ppx_gen_flowlibs.ml - $(OCB) -I scripts scripts/ppx_gen_flowlibs.native +_build/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs.cmxa: scripts/script_utils.ml scripts/ppx_gen_flowlibs/ppx_gen_flowlibs.ml + $(OCB) -I scripts -tag linkall -pkg unix scripts/ppx_gen_flowlibs/ppx_gen_flowlibs.cmxa + +_build/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs_standalone.cmxa: scripts/ppx_gen_flowlibs/ppx_gen_flowlibs_standalone.ml + $(OCB) -I scripts -tag linkall -pkg unix scripts/ppx_gen_flowlibs/ppx_gen_flowlibs_standalone.cmxa + +_build/scripts/ppx_gen_flowlibs.exe: _build/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs.cmxa _build/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs_standalone.cmxa + ocamlfind ocamlopt -linkpkg -linkall \ + -package ocaml-migrate-parsetree,unix \ + -I _build/scripts/ppx_gen_flowlibs \ + _build/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs.cmxa \ + _build/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs_standalone.cmxa \ + -o "$@" bin/flow$(EXE): build-flow mkdir -p $(@D) cp _build/src/flow.native $@ +$(BUILT_OUNIT_TESTS): $(BUILT_OBJECT_FILES) FORCE + export OCAMLFIND_IGNORE_DUPS_IN="$(shell ocamlfind query lwt)"; \ + $(OCB) $(INTERNAL_FLAGS) $(INCLUDE_OPTS) $(FINDLIB_OPTS) \ + -I $(patsubst _build/%,%,$(@D)) \ + -lflags "$(LINKER_FLAGS)" \ + $(patsubst _build/%,%,$@) + +.PHONY: build-ounit-tests +build-ounit-tests: $(BUILT_OBJECT_FILES) FORCE + export OCAMLFIND_IGNORE_DUPS_IN="$(shell ocamlfind query lwt)"; \ + $(OCB) $(INTERNAL_FLAGS) $(INCLUDE_OPTS) $(FINDLIB_OPTS) \ + $(foreach dir,$(dir $(OUNIT_TESTS)),-I $(dir)) \ + -lflags "$(LINKER_FLAGS)" \ + $(OUNIT_TESTS) + +.PHONY: ounit-tests +ounit-tests: build-ounit-tests + @for cmd in $(BUILT_OUNIT_TESTS); do \ + echo "Running $$cmd:"; \ + "$$cmd"; \ + done + do-test: ./runtests.sh bin/flow$(EXE) bin/flow$(EXE) check packages/flow-dev-tools @@ -307,7 +387,7 @@ test-tool: bin/flow$(EXE) test: bin/flow$(EXE) ${MAKE} do-test -js: _build/scripts/ppx_gen_flowlibs.native $(BUILT_OBJECT_FILES) $(COPIED_FLOWLIB) +js: _build/scripts/ppx_gen_flowlibs.exe $(BUILT_OBJECT_FILES) $(COPIED_FLOWLIB) mkdir -p bin # NOTE: temporarily disabling warning 31 because # hack/third-party/core/result.ml and the opam `result` module both define @@ -356,6 +436,9 @@ FORCE: .PHONY: all js build-flow build-flow-debug FORCE +# Don't run in parallel because of https://github.com/ocaml/ocamlbuild/issues/300 +.NOTPARALLEL: + # This rule runs if any .ml or .mli file has been touched. It recursively calls # ocamldep to figure out all the modules that we use to build src/flow.ml flow.odocl: $(shell find . -name "*.ml" -o -name "*.mli") diff --git a/README.md b/README.md index 8bc2e6b1215..5601e67200e 100644 --- a/README.md +++ b/README.md @@ -1,177 +1,82 @@ -# Flow [![Build Status](https://circleci.com/gh/facebook/flow/tree/master.svg?style=shield)](https://circleci.com/gh/facebook/flow/tree/master) [![Windows Build Status](https://ci.appveyor.com/api/projects/status/thyvx6i5nixtoocm/branch/master?svg=true)](https://ci.appveyor.com/project/Facebook/flow/branch/master) +# Flow [![Build Status](https://circleci.com/gh/facebook/flow/tree/master.svg?style=shield)](https://circleci.com/gh/facebook/flow/tree/master) [![Windows Build Status](https://ci.appveyor.com/api/projects/status/thyvx6i5nixtoocm/branch/master?svg=true)](https://ci.appveyor.com/project/Facebook/flow/branch/master) [![Join the chat at https://discordapp.com/invite/8ezwRUK](https://img.shields.io/discord/539606376339734558.svg?label=discord&logo=discord&logoColor=white)](https://discordapp.com/invite/8ezwRUK) Flow is a static typechecker for JavaScript. To find out more about Flow, check out [flow.org](https://flow.org/). For a background on the project, please read [this overview](https://flow.org/en/docs/lang/). +## Contents + +- [Requirements](#requirements) +- [Using Flow](#using-flow) +- [Using Flow's parser from JavaScript](#using-flows-parser-from-javascript) +- [Building Flow from source](#building-flow-from-source) +- [Join the Flow community](#join-the-flow-community) +- [License](#license) + + ## Requirements Flow works with: -* Mac OS X +* macOS * Linux (64-bit) * Windows (64-bit, Windows 10 recommended) There are [binary distributions](https://github.com/facebook/flow/releases) for each of these platforms and you can also build it from source on any of them as well. -## Installing Flow - -Flow is simple to install: all you need is the `flow` binary on your PATH and you're good to go. - - -### Installing Flow Per Project - -The recommended way to install Flow is via the [`flow-bin`](https://www.npmjs.com/package/flow-bin) `npm` package. Adding `flow-bin` to your project's `package.json`: - -- provides a smoother upgrade experience, since the correct version of Flow is automatically used based on the revision you check out -- installs Flow as part of your existing `npm install` workflow -- lets you use different versions of Flow on different projects - -``` -npm install --save-dev flow-bin -node_modules/.bin/flow -``` - -### Installing Flow Globally - -Although not recommended, you can also install Flow globally (for example, perhaps you don't use `npm` or `package.json`). - -The best way to install globally is via `flow-bin`: - -``` -npm install -g flow-bin -flow # make sure `npm bin -g` is on your path -``` - -On Mac OS X, you can install Flow via the [Homebrew](http://brew.sh/) package manager: - -``` -brew update -brew install flow -``` - -You can also build and install Flow via the OCaml [OPAM](https://opam.ocaml.org) package manager. Since Flow has some non-OCaml dependencies, you need to use the [`depext`](https://opam.ocaml.org/doc/FAQ.html#Somepackagefailduringcompilationcomplainingaboutmissingdependenciesquotm4quotquotlibgtkquotetc) package like so: - -``` -opam install depext -opam depext --install flowtype -``` - -If you don't have a new enough version of OCaml to compile Flow, you can also use OPAM to bootstrap a modern version. Install OPAM via the [binary packages](http://opam.ocaml.org/doc/Install.html#InstallOPAMin2minutes) for your operating system and run: - -``` -opam init --comp=4.05.0 -opam install flowtype -eval `opam config env` -flow --help -``` - +## Using Flow -## Getting started +Check out the [installation instructions](https://flow.org/en/docs/install/), and then [how to get started](https://flow.org/en/docs/usage/). -Getting started with flow is super easy. - -- Initialize Flow by running the following command in the root of your project -``` -flow init -``` - -- Add the following to the top of all the files you want to typecheck -``` javascript -/* @flow */ -``` - -- Run and see the magic happen -``` -flow check -``` - -More thorough documentation and many examples can be found at [flow.org](https://flow.org/). - -## Building Flow - -Flow is written in OCaml (OCaml 4.05.0 or higher is required). You can install OCaml on Mac OS X and Linux by following the instructions at [ocaml.org](https://ocaml.org/docs/install.html). - -For example, on Ubuntu 16.04 and similar systems: - -``` -sudo apt-get install opam -opam init --comp 4.05.0 -``` - -On OS X, using the [brew package manager](http://brew.sh/): - -``` -brew install opam -opam init --comp 4.05.0 -``` - -Then, restart your shell and install these additional libraries: - -``` -opam update -opam pin add flowtype . -n -opam install --deps-only flowtype -``` - -Once you have these dependencies, building Flow just requires running - -``` -make -``` +## Using Flow's parser from JavaScript -This produces a `bin` folder containing the `flow` binary. +While Flow is written in OCaml, its parser is available as a compiled-to-JavaScript module published to npm, named [flow-parser](https://www.npmjs.com/package/flow-parser). **Most end users of Flow +will not need to use this parser directly**, but JavaScript packages which make use of parsing +Flow-typed JavaScript can use this to generate Flow's syntax tree with annotated types attached. -In order to make the flow.js file, you first need to install js_of_ocaml: +## Building Flow from source -``` -opam install -y js_of_ocaml -``` +Flow is written in OCaml (OCaml 4.07.1 is required). -After that, making flow.js is easy: +1. Install [`opam`](http://opam.ocaml.org): -``` -make js -``` + - Mac: `brew install opam` + - Debian: `sudo apt-get install opam` + - Other Linux: see [opam docs](http://opam.ocaml.org/doc/Install.html) + - Windows: see [OCaml for Windows docs](https://fdopen.github.io/opam-repository-mingw/installation/) -The new `flow.js` file will also live in the `bin` folder. +2. Initialize `opam`: -*Note: at this time, the OCaml dependency prevents us from adding Flow to [npm](http://npmjs.org). Try [flow-bin](https://www.npmjs.org/package/flow-bin) if you need a npm binary wrapper.* + ```sh + opam init + ``` -Flow can also compile its parser to JavaScript. [Read how here](src/parser/README.md). +3. Install OCaml and Flow's dependencies: -## Building Flow on Windows + ```sh + # from within this git checkout + opam switch create . --deps-only -y + ``` -This is a little more complicated. Here is a process that works, though it probably can be simplified. +4. Build the `flow` binary: -The general idea is that we build in Cygwin, targeting mingw. This gives us a binary that works even outside of Cygwin. + ```sh + eval $(opam env) + make + ``` -### Install Cygwin -1. Install Cygwin 64bit from https://cygwin.com/install.html -2. In powershell, run `iex ((new-object net.webclient).DownloadString("https://raw.githubusercontent.com/ocaml/ocaml-ci-scripts/master/appveyor-install.ps1"))` which will likely run a cygwin setup installer with a bunch of cygwin packages and stuff. This helps make sure that every package that opam needs is available. + This produces the `bin/flow` binary. -### Install Opam -1. Open the cygwin64 terminal -2. Download opam with `curl -fsSL -o opam64.tar.xz https://github.com/fdopen/opam-repository-mingw/releases/download/0.0.0.1/opam64.tar.xz` -3. `tar -xf opam64.tar.xz` -4. `cd opam64` -5. Install opam `./install.sh` -6. Initialize opam to point to a mingw fork: `opam init -a default "https://github.com/fdopen/opam-repository-mingw.git" --comp "4.05.0+mingw64c" --switch "4.05.0+mingw64c"` -7. Make sure opam stuff is in your path: ```eval `opam config env` ``` +5. Build `flow.js` (optional): -### Install Flow -1. Clone flow: `git clone https://github.com/facebook/flow.git` -2. `cd flow` -3. Tell opam to use this directory as the flowtype project: `opam pin add flowtype . -n` -4. Install system dependencies `opam depext -u flowtype` -5. Install Flow's dependencies `opam install flowtype --deps-only` -7. Finally, build Flow: `make all` + ```sh + opam install -y js_of_ocaml.3.4.0 + make js + ``` -## Using Flow's parser from JavaScript + This produces `bin/flow.js`. -While Flow is written in OCaml, its parser is available as a compiled-to-JavaScript module published to npm, named [flow-parser](https://www.npmjs.com/package/flow-parser). **Most end users of Flow -will not need to use this parser directly** (and should install [flow-bin](https://www.npmjs.org/package/flow-bin) from npm above), but JavaScript packages which make use of parsing -Flow-typed JavaScript can use this to generate Flow's syntax tree with annotated types attached. + The Flow parser can also be compiled to JavaScript. [Read how here](src/parser/README.md). ## Running the tests @@ -185,6 +90,7 @@ For example: `bash runtests.sh bin/flow class | grep -v 'SKIP'` ## Join the Flow community * Website: [https://flow.org](https://flow.org/) +* Discord: https://discord.gg/8ezwRUK * irc: #flowtype on Freenode * Twitter: follow [@flowtype](https://twitter.com/flowtype) and [#flowtype](https://twitter.com/hashtag/flowtype) to keep up with the latest Flow news. * Stack Overflow: Ask a question with the [flowtype tag](http://stackoverflow.com/questions/tagged/flowtype) diff --git a/_tags b/_tags index e5487cd8418..8249ad1c155 100644 --- a/_tags +++ b/_tags @@ -1,12 +1,13 @@ -true: package(core_kernel), package(ppx_deriving), package(dtoa), package(wtf8) +true: package(core_kernel), package(ppx_deriving), package(ppx_deriving.eq), package(dtoa), package(wtf8), package(visitors.runtime) +: package(ocaml-migrate-parsetree) <**/*.ml*>: ocaml, warn_A, warn(-4-6-29-35-44-48-50), warn_error_A, safe_string -: unsafe_string +<**/__tests__/**>: package(oUnit) : warn(-27-34) : warn(-41) : warn(-3-27) -: ppx(scripts/ppx_gen_flowlibs.native lib/ prelude/) +: ppx(scripts/ppx_gen_flowlibs.exe --as-ppx -flowlib lib/ -prelude prelude/) : warn(-39) -: package(visitors.ppx) +: package(visitors.ppx), package(ppx_deriving.show) <**/node_modules/**>: -traverse <_obuild>: -traverse : -traverse diff --git a/appveyor.yml b/appveyor.yml index 028c84eda52..cc8de4d7000 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -4,10 +4,11 @@ os: Visual Studio 2015 platform: x64 environment: matrix: - - OPAM_SWITCH: 4.05.0+mingw64c + - OPAM_SWITCH: 4.07.1+mingw64c FORK_USER: ocaml FORK_BRANCH: master CYG_ROOT: C:\cygwin64 + PACKAGE: flowtype-ci TESTS: false INSTALL: false DEPOPTS: false @@ -41,4 +42,4 @@ deploy: secure: f1YgYz9csZ8QokY6+aXF10toZtXlFxicbm7uTcylqq+ILoRE6y5GUaf1TfoUalx5 artifact: /.*\.zip/ on: - appveyor_repo_tag: true + APPVEYOR_REPO_TAG: true diff --git a/dune b/dune new file mode 100644 index 00000000000..77e2e07dd28 --- /dev/null +++ b/dune @@ -0,0 +1,6 @@ +(env + (_ + (flags (:standard -w @a-4-6-20-29-35-41-42-44-45-48-50 \ -strict-sequence)) + (c_flags -DNO_HHVM -DNO_SQLITE3) + ) +) diff --git a/dune-project b/dune-project new file mode 100644 index 00000000000..6aae99ad17a --- /dev/null +++ b/dune-project @@ -0,0 +1 @@ +(lang dune 1.9) diff --git a/dune-workspace b/dune-workspace new file mode 100644 index 00000000000..f6a3b140f7f --- /dev/null +++ b/dune-workspace @@ -0,0 +1,15 @@ +(lang dune 1.9) + +; hacky solution to make hack/heap able to find Flow's vendored lz4.h. +; for the linker to find the implementation, src/flow.exe depends on the lz4 +; dune library even though it doesn't directly use it. +(env + (_ + (env-vars + (EXTRA_INCLUDE_PATHS src/third-party/lz4) + (EXTRA_LIB_PATHS "") + (EXTRA_NATIVE_LIBRARIES "") + (EXTRA_LINK_OPTS "") + ) + ) +) diff --git a/hack/.ocamlformat b/hack/.ocamlformat new file mode 100644 index 00000000000..737b96fe082 --- /dev/null +++ b/hack/.ocamlformat @@ -0,0 +1,23 @@ +# -*- conf -*- + +break-cases = all +break-fun-decl = smart +break-infix = fit-or-vertical +break-separators = after-and-docked +break-sequences = true +break-string-literals = never +cases-exp-indent = 2 +disambiguate-non-breaking-match = true +exp-grouping = preserve +field-space = tight-decl +if-then-else = k-r +indicate-nested-or-patterns = unsafe-no +leading-nested-match-parens = true +let-open = short +parens-tuple-patterns = always +sequence-blank-line = preserve-one +sequence-style = terminator +space-around-records = true +space-around-variants = true +type-decl = sparse +wrap-fun-args = false diff --git a/hack/_tags b/hack/_tags index 7ae3dfc1155..34445693435 100644 --- a/hack/_tags +++ b/hack/_tags @@ -9,11 +9,12 @@ true: package(unix),package(str),package(bigarray),package(ppx_deriving.std) : warn(-27-34) : warn(-27-45) : warn(-27) +: package(ocaml-migrate-parsetree) : -traverse : warn(-27) : warn(-41-45) : warn(-27) : warn(-32-41) : warn(-27) -not <{dfind,find,globals,heap,injection,procs,search,socket,third-party,utils,watchman,fsevents,fsnotify_darwin,fsnotify_linux,fsnotify_win,stubs}/**/*>: package(core_kernel), package(visitors.ppx), package(visitors.runtime), package(pcre) - +not <{dfind,find,heap,injection,socket,third-party,fsevents,fsnotify_darwin,fsnotify_linux,fsnotify_win,stubs}/**/*>: package(core_kernel), package(visitors.ppx), package(visitors.runtime), package(pcre), package(lwt), package(lwt.unix), package(lwt_ppx), package(lwt_log) +<{utils,watchman,search,procs,globals}/**/*>: -package(pcre) diff --git a/hack/common/common.ml b/hack/common/common.ml index ec459c79126..46ab4ae43f4 100644 --- a/hack/common/common.ml +++ b/hack/common/common.ml @@ -1,84 +1,111 @@ module List = struct include Core_kernel.List - let rec fold_left_env env l ~init ~f = match l with - | [] -> env, init + let rec fold_left_env env l ~init ~f = + match l with + | [] -> (env, init) | x :: xs -> - let env, init = f env init x in + let (env, init) = f env init x in fold_left_env env xs ~init ~f let rev_map_env env xs ~f = let f2 env init x = - let env, x = f env x in - env, x :: init + let (env, x) = f env x in + (env, x :: init) in fold_left_env env xs ~init:[] ~f:f2 let map_env env xs ~f = let rec aux env xs counter = match xs with - | [] -> env, [] + | [] -> (env, []) | [y1] -> - let env, z1 = f env y1 in - env, [z1] + let (env, z1) = f env y1 in + (env, [z1]) | [y1; y2] -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - env, [z1; z2] + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + (env, [z1; z2]) | [y1; y2; y3] -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - let env, z3 = f env y3 in - env, [z1; z2; z3] + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + let (env, z3) = f env y3 in + (env, [z1; z2; z3]) | [y1; y2; y3; y4] -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - let env, z3 = f env y3 in - let env, z4 = f env y4 in - env, [z1; z2; z3; z4] + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + let (env, z3) = f env y3 in + let (env, z4) = f env y4 in + (env, [z1; z2; z3; z4]) | [y1; y2; y3; y4; y5] -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - let env, z3 = f env y3 in - let env, z4 = f env y4 in - let env, z5 = f env y5 in - env, [z1; z2; z3; z4; z5] - | y1::y2::y3::y4::y5::ys -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - let env, z3 = f env y3 in - let env, z4 = f env y4 in - let env, z5 = f env y5 in - let env, zs = - if counter > 1000 - then - let env, zs = rev_map_env env ys ~f in - env, rev zs + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + let (env, z3) = f env y3 in + let (env, z4) = f env y4 in + let (env, z5) = f env y5 in + (env, [z1; z2; z3; z4; z5]) + | y1 :: y2 :: y3 :: y4 :: y5 :: ys -> + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + let (env, z3) = f env y3 in + let (env, z4) = f env y4 in + let (env, z5) = f env y5 in + let (env, zs) = + if counter > 1000 then + let (env, zs) = rev_map_env env ys ~f in + (env, rev zs) else aux env ys (counter + 1) in - env, z1::z2::z3::z4::z5::zs + (env, z1 :: z2 :: z3 :: z4 :: z5 :: zs) in aux env xs 0 - let rec map2_env env l1 l2 ~f = match l1, l2 with - | [], [] -> env, [] - | [], _ | _, [] -> raise @@ Invalid_argument "map2_env" - | x1 :: rl1, x2 :: rl2 -> - let env, x = f env x1 x2 in - let env, rl = map2_env env rl1 rl2 ~f in - env, x :: rl + let rec map2_env env l1 l2 ~f = + match (l1, l2) with + | ([], []) -> (env, []) + | ([], _) + | (_, []) -> + raise @@ Invalid_argument "map2_env" + | (x1 :: rl1, x2 :: rl2) -> + let (env, x) = f env x1 x2 in + let (env, rl) = map2_env env rl1 rl2 ~f in + (env, x :: rl) let filter_map_env env xs ~f = - let env, l = rev_map_env env xs ~f in - env, rev_filter_map l ~f:(fun x -> x) + let (env, l) = rev_map_env env xs ~f in + (env, rev_filter_map l ~f:(fun x -> x)) let rec replicate ~num x = match num with | 0 -> [] | n when n < 0 -> - raise @@ Invalid_argument ( - Printf.sprintf "List.replicate was called with %d argument" n) + raise + @@ Invalid_argument + (Printf.sprintf "List.replicate was called with %d argument" n) | _ -> x :: replicate ~num:(num - 1) x +end + +module Option = struct + include Core_kernel.Option + + let pp : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit + = + fun pp_x fmt x_opt -> + match x_opt with + | None -> Format.pp_print_string fmt "None" + | Some x -> + Format.pp_print_string fmt "(Some "; + pp_x fmt x; + Format.pp_print_string fmt ")" + + let show : (Format.formatter -> 'a -> unit) -> 'a t -> string = + (fun pp_x x_opt -> Format.asprintf "%a" (pp pp_x) x_opt) + + let if_none x_opt ~f = + match x_opt with + | Some x -> Some x + | None -> f () + let ( >>! ) = if_none end diff --git a/hack/common/dune b/hack/common/dune new file mode 100644 index 00000000000..3669859010d --- /dev/null +++ b/hack/common/dune @@ -0,0 +1,5 @@ +(library + (name common) + (wrapped false) + (libraries + core_kernel)) diff --git a/hack/dfind/dfindAddFile.ml b/hack/dfind/dfindAddFile.ml index 8c9901ad092..dbc3789771b 100644 --- a/hack/dfind/dfindAddFile.ml +++ b/hack/dfind/dfindAddFile.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,7 +7,6 @@ * *) - (*****************************************************************************) (* Adds a new file or directory to the environment *) (*****************************************************************************) @@ -30,18 +29,18 @@ open DfindMaybe * As opposed to: * file1 * file2 -*) + *) let get_files path dir_handle = let paths = ref SSet.empty in try while true do let file = Unix.readdir dir_handle in - if file = "." || file = ".." - then () + if file = "." || file = ".." then + () else let path = Filename.concat path file in - paths := SSet.add path !paths; + paths := SSet.add path !paths done; assert false with _ -> !paths @@ -49,9 +48,10 @@ let get_files path dir_handle = (* Gets rid of the '/' or '\' at the end of a directory name *) let normalize path = let size = String.length path in - if Char.escaped path.[size - 1] = Filename.dir_sep - then String.sub path 0 (size - 1) - else path + if Char.escaped path.[size - 1] = Filename.dir_sep then + String.sub path 0 (size - 1) + else + path (*****************************************************************************) (* The entry point @@ -70,25 +70,27 @@ let normalize path = *) (*****************************************************************************) -module ISet = Set.Make (struct type t = int let compare = compare end) +module ISet = Set.Make (struct + type t = int + + let compare = compare +end) (* This used to be an environment variable, but it is too complicated * for now. Hardcoding! Yay! -*) -let blacklist = List.map ~f:Str.regexp [ - ".*/wiki/images/.*"; - ".*/\\.git"; - ".*/\\.svn"; - ".*/\\.hg"; -] + *) +let blacklist = + List.map + ~f:Str.regexp + [".*/wiki/images/.*"; ".*/\\.git"; ".*/\\.svn"; ".*/\\.hg"] let is_blacklisted path = try - List.iter blacklist begin fun re -> - if Str.string_match re path 0 - then raise Exit - else () - end; + List.iter blacklist (fun re -> + if Str.string_match re path 0 then + raise Exit + else + ()); false with Exit -> true @@ -100,48 +102,55 @@ let rec add_file links env path = | _ -> return () and add_watch links env path = - call (add_fsnotify_watch env) path >>= function + call (add_fsnotify_watch env) path + >>= function | None -> return () | Some _watch -> add_file links env path -and add_fsnotify_watch env path = - return (Fsnotify.add_watch env.fsnotify path) +and add_fsnotify_watch env path = return (Fsnotify.add_watch env.fsnotify path) and add_new_file links env path = - let time = Time.get() in + let time = Time.get () in env.files <- TimeFiles.add (time, path) env.files; env.new_files <- SSet.add path env.new_files; - call (wrap Unix.lstat) path >>= fun ({ Unix.st_kind = kind; _ } as st) -> - if ISet.mem st.Unix.st_ino links then return () else - let links = ISet.add st.Unix.st_ino links in - match kind with - | Unix.S_LNK when ISet.mem st.Unix.st_ino links -> - return () - | Unix.S_LNK -> - return () - (* TODO add an option to support symlinks *) -(* call (wrap Unix.readlink) path >>= add_file links env *) - | Unix.S_DIR -> - call (add_watch links env) path >>= fun () -> - call (wrap Unix.opendir) path >>= fun dir_handle -> + call (wrap Unix.lstat) path + >>= fun ({ Unix.st_kind = kind; _ } as st) -> + if ISet.mem st.Unix.st_ino links then + return () + else + let links = ISet.add st.Unix.st_ino links in + match kind with + | Unix.S_LNK when ISet.mem st.Unix.st_ino links -> return () + | Unix.S_LNK -> return () + (* TODO add an option to support symlinks *) + (* call (wrap Unix.readlink) path >>= add_file links env *) + | Unix.S_DIR -> + call (add_watch links env) path + >>= fun () -> + call (wrap Unix.opendir) path + >>= fun dir_handle -> let files = get_files path dir_handle in SSet.iter (fun x -> ignore (add_file links env x)) files; (try Unix.closedir dir_handle with _ -> ()); let prev_files = - try SMap.find_unsafe path env.dirs - with Not_found -> SSet.empty in + (try SMap.find_unsafe path env.dirs with Not_found -> SSet.empty) + in let prev_files = SSet.union files prev_files in - let files = SSet.fold begin fun file all_files -> - try - let sub_dir = SMap.find_unsafe file env.dirs in - SSet.union sub_dir all_files - with Not_found -> - SSet.add file all_files - end files prev_files in + let files = + SSet.fold + begin + fun file all_files -> + try + let sub_dir = SMap.find_unsafe file env.dirs in + SSet.union sub_dir all_files + with Not_found -> SSet.add file all_files + end + files + prev_files + in env.dirs <- SMap.add path files env.dirs; return () - | _ -> return () - + | _ -> return () (* This is the only thing we want to expose *) let path env x = ignore (add_file ISet.empty env x) diff --git a/hack/dfind/dfindAddFile.mli b/hack/dfind/dfindAddFile.mli index f4cb86713dd..b625be68549 100644 --- a/hack/dfind/dfindAddFile.mli +++ b/hack/dfind/dfindAddFile.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,15 +7,14 @@ * *) - (*****************************************************************************) (* Adds a new file or directory to the environment *) (*****************************************************************************) -val path: DfindEnv.t -> string -> unit +val path : DfindEnv.t -> string -> unit (*****************************************************************************) (* Find all the files in a directory *) (*****************************************************************************) -val get_files: string -> Unix.dir_handle -> SSet.t +val get_files : string -> Unix.dir_handle -> SSet.t diff --git a/hack/dfind/dfindEnv.ml b/hack/dfind/dfindEnv.ml index 777cbeb1d05..c7d5fe9f01d 100644 --- a/hack/dfind/dfindEnv.ml +++ b/hack/dfind/dfindEnv.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,7 +7,6 @@ * *) - (*****************************************************************************) (* The environment shared by everyone *) (*****************************************************************************) @@ -16,8 +15,12 @@ module Time = struct type t = int let counter = ref 0 - let get () = incr counter; !counter - let compare = (-) + + let get () = + incr counter; + !counter + + let compare = ( - ) (* The beginning of times *) let bot = 0 @@ -25,30 +28,31 @@ module Time = struct let to_string x = string_of_int x end -module TimeFiles = MonoidAvl.Make(struct +module TimeFiles = MonoidAvl.Make (struct (* Timestamp + filename *) type elt = Time.t * string + let compare (_, x) (_, y) = String.compare x y type monoelt = Time.t + let neutral = Time.bot + let make = fst + let compose = max end) type t = { - (* The fsnotify environment, we use this for interacting with fsnotify *) - fsnotify : Fsnotify.env ; - - (* The set of files with their timestamp *) - mutable files : TimeFiles.t ; - - (* The set of new files (files created during an event) *) - mutable new_files : SSet.t ; - - (* The directories (and the files they contain) *) - mutable dirs : SSet.t SMap.t ; - } + (* The fsnotify environment, we use this for interacting with fsnotify *) + fsnotify: Fsnotify.env; + (* The set of files with their timestamp *) + mutable files: TimeFiles.t; + (* The set of new files (files created during an event) *) + mutable new_files: SSet.t; + (* The directories (and the files they contain) *) + mutable dirs: SSet.t SMap.t; +} (*****************************************************************************) (* Building the original environment, this call is called only once @@ -59,8 +63,8 @@ type t = { let make roots = let fsnotify = Fsnotify.init roots in { - fsnotify = fsnotify ; - files = TimeFiles.empty ; - new_files = SSet.empty ; - dirs = SMap.empty ; + fsnotify; + files = TimeFiles.empty; + new_files = SSet.empty; + dirs = SMap.empty; } diff --git a/hack/dfind/dfindEnv.mli b/hack/dfind/dfindEnv.mli index 9d4f39395f6..4027da45f82 100644 --- a/hack/dfind/dfindEnv.mli +++ b/hack/dfind/dfindEnv.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -17,36 +17,33 @@ * where I checked the time stamp and now etc ... * So we maintain our own clock. It is incremented by one on every event. *) -module Time: sig +module Time : sig type t - val get: unit -> t - val compare: t -> t -> int + val get : unit -> t + + val compare : t -> t -> int (* The beginning of times *) - val bot: t + val bot : t - val to_string: t -> string + val to_string : t -> string end (* Our fancy Avl (cf monoidAvl.ml) *) -module TimeFiles: MonoidAvl.S -with type elt = Time.t * string -with type monoelt = Time.t +module TimeFiles : + MonoidAvl.S with type elt = Time.t * string with type monoelt = Time.t type t = { - (* The fsnotify environment, we use this for interacting with fsnotify *) - fsnotify : Fsnotify.env ; - - (* The set of files with their timestamp *) - mutable files : TimeFiles.t ; - - (* The set of new files (files created during an event) *) - mutable new_files : SSet.t ; - - (* The directories (and the files they contain) *) - mutable dirs : SSet.t SMap.t ; - } + (* The fsnotify environment, we use this for interacting with fsnotify *) + fsnotify: Fsnotify.env; + (* The set of files with their timestamp *) + mutable files: TimeFiles.t; + (* The set of new files (files created during an event) *) + mutable new_files: SSet.t; + (* The directories (and the files they contain) *) + mutable dirs: SSet.t SMap.t; +} (*****************************************************************************) (* Building the original environment, this call is called only once @@ -54,4 +51,4 @@ type t = { *) (*****************************************************************************) -val make: string list -> t +val make : string list -> t diff --git a/hack/dfind/dfindLib.ml b/hack/dfind/dfindLib.ml index cfedb3bcd10..24a2c9f60e2 100644 --- a/hack/dfind/dfindLib.ml +++ b/hack/dfind/dfindLib.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,48 +7,58 @@ * *) - module type MARSHAL_TOOLS = sig type 'a result + type fd - val return: 'a -> 'a result - val (>>=): 'a result -> ('a -> 'b result) -> 'b result + val return : 'a -> 'a result + + val ( >>= ) : 'a result -> ('a -> 'b result) -> 'b result + + val descr_of_in_channel : 'a Daemon.in_channel -> fd - val descr_of_in_channel: 'a Daemon.in_channel -> fd - val descr_of_out_channel: 'a Daemon.out_channel -> fd + val descr_of_out_channel : 'a Daemon.out_channel -> fd - val to_fd_with_preamble: + val to_fd_with_preamble : ?timeout:Timeout.t -> ?flags:Marshal.extern_flags list -> fd -> 'a -> int result - val from_fd_with_preamble: ?timeout:Timeout.t -> fd -> 'a result + + val from_fd_with_preamble : ?timeout:Timeout.t -> fd -> 'a result end -module DFindLibFunctor (Marshal_tools: MARSHAL_TOOLS): sig +module DFindLibFunctor (Marshal_tools : MARSHAL_TOOLS) : sig type t + val init : - (Unix.file_descr * Unix.file_descr * Unix.file_descr) -> - (string * Path.t list) - -> t + Unix.file_descr * Unix.file_descr * Unix.file_descr -> + string * Path.t list -> + t + val wait_until_ready : t -> unit Marshal_tools.result + val pid : t -> int + val get_changes : ?timeout:Timeout.t -> t -> SSet.t Marshal_tools.result + val stop : t -> unit end = struct - let (>>=) = Marshal_tools.(>>=) + let ( >>= ) = Marshal_tools.( >>= ) type t = { infd: Marshal_tools.fd; outfd: Marshal_tools.fd; - daemon_handle: (DfindServer.msg, unit) Daemon.handle + daemon_handle: (DfindServer.msg, unit) Daemon.handle; } let init log_fds (scuba_table, roots) = - let name = Printf.sprintf "file watching process for server %d" (Unix.getpid ()) in - let {Daemon.channels = (ic, oc); _;} as daemon_handle = + let name = + Printf.sprintf "file watching process for server %d" (Unix.getpid ()) + in + let ({ Daemon.channels = (ic, oc); _ } as daemon_handle) = Daemon.spawn ~name log_fds DfindServer.entry_point (scuba_table, roots) in { @@ -61,7 +71,9 @@ end = struct let wait_until_ready handle = Marshal_tools.from_fd_with_preamble handle.infd - >>= (fun msg -> assert (msg = DfindServer.Ready); Marshal_tools.return ()) + >>= fun msg -> + assert (msg = DfindServer.Ready); + Marshal_tools.return () let request_changes ?timeout handle = Marshal_tools.to_fd_with_preamble handle.outfd () @@ -71,34 +83,34 @@ end = struct let rec loop acc = request_changes ?timeout daemon >>= (function - | DfindServer.Updates s -> Marshal_tools.return s - | DfindServer.Ready -> assert false - ) - >>= (fun diff -> - if SSet.is_empty diff - then Marshal_tools.return acc - else begin - let acc = SSet.union diff acc in - loop acc - end - ) - in loop SSet.empty + | DfindServer.Updates s -> Marshal_tools.return s + | DfindServer.Ready -> assert false) + >>= fun diff -> + if SSet.is_empty diff then + Marshal_tools.return acc + else + let acc = SSet.union diff acc in + loop acc + in + loop SSet.empty let stop handle = Daemon.kill handle.daemon_handle end -module RegularMarshalTools: MARSHAL_TOOLS - with type 'a result = 'a and type fd = Unix.file_descr = -struct +module RegularMarshalTools : + MARSHAL_TOOLS with type 'a result = 'a and type fd = Unix.file_descr = struct include Marshal_tools type 'a result = 'a + type fd = Unix.file_descr let return x = x - let (>>=) x f = f x + + let ( >>= ) x f = f x let descr_of_in_channel = Daemon.descr_of_in_channel + let descr_of_out_channel = Daemon.descr_of_out_channel end diff --git a/hack/dfind/dfindLib.mli b/hack/dfind/dfindLib.mli index 1cab8a67055..f047c85f73a 100644 --- a/hack/dfind/dfindLib.mli +++ b/hack/dfind/dfindLib.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,41 +10,54 @@ type t val init : - (Unix.file_descr * Unix.file_descr * Unix.file_descr) -> - (string * Path.t list) - -> t + Unix.file_descr * Unix.file_descr * Unix.file_descr -> + string * Path.t list -> + t + val wait_until_ready : t -> unit + val pid : t -> int + val get_changes : ?timeout:Timeout.t -> t -> SSet.t + val stop : t -> unit module type MARSHAL_TOOLS = sig type 'a result + type fd - val return: 'a -> 'a result - val (>>=): 'a result -> ('a -> 'b result) -> 'b result + val return : 'a -> 'a result + + val ( >>= ) : 'a result -> ('a -> 'b result) -> 'b result + + val descr_of_in_channel : 'a Daemon.in_channel -> fd - val descr_of_in_channel: 'a Daemon.in_channel -> fd - val descr_of_out_channel: 'a Daemon.out_channel -> fd + val descr_of_out_channel : 'a Daemon.out_channel -> fd - val to_fd_with_preamble: + val to_fd_with_preamble : ?timeout:Timeout.t -> ?flags:Marshal.extern_flags list -> fd -> 'a -> int result - val from_fd_with_preamble: ?timeout:Timeout.t -> fd -> 'a result + + val from_fd_with_preamble : ?timeout:Timeout.t -> fd -> 'a result end -module DFindLibFunctor : functor (Marshal_tools: MARSHAL_TOOLS) -> sig +module DFindLibFunctor (Marshal_tools : MARSHAL_TOOLS) : sig type t + val init : - (Unix.file_descr * Unix.file_descr * Unix.file_descr) -> - (string * Path.t list) - -> t + Unix.file_descr * Unix.file_descr * Unix.file_descr -> + string * Path.t list -> + t + val wait_until_ready : t -> unit Marshal_tools.result + val pid : t -> int + val get_changes : ?timeout:Timeout.t -> t -> SSet.t Marshal_tools.result - val stop: t -> unit + + val stop : t -> unit end diff --git a/hack/dfind/dfindLibLwt.ml b/hack/dfind/dfindLibLwt.ml index 1d9c52bfe5a..842a2f4ee57 100644 --- a/hack/dfind/dfindLibLwt.ml +++ b/hack/dfind/dfindLibLwt.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,26 +7,38 @@ * *) -module MarshalToolsLwt: DfindLib.MARSHAL_TOOLS - with type 'a result = 'a Lwt.t and type fd = Lwt_unix.file_descr = -struct +module MarshalToolsLwt : + DfindLib.MARSHAL_TOOLS + with type 'a result = 'a Lwt.t + and type fd = Lwt_unix.file_descr = struct type 'a result = 'a Lwt.t + type fd = Lwt_unix.file_descr let return = Lwt.return - let (>>=) = Lwt.(>>=) - let descr_of_in_channel ic = Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true (Daemon.descr_of_in_channel ic) - let descr_of_out_channel oc = Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true (Daemon.descr_of_out_channel oc) + let ( >>= ) = Lwt.( >>= ) + + let descr_of_in_channel ic = + Lwt_unix.of_unix_file_descr + ~blocking:false + ~set_flags:true + (Daemon.descr_of_in_channel ic) + + let descr_of_out_channel oc = + Lwt_unix.of_unix_file_descr + ~blocking:false + ~set_flags:true + (Daemon.descr_of_out_channel oc) let to_fd_with_preamble ?timeout ?flags fd v = - if timeout <> None - then raise (Invalid_argument "Use lwt timeouts directly"); + if timeout <> None then + raise (Invalid_argument "Use lwt timeouts directly"); Marshal_tools_lwt.to_fd_with_preamble ?flags fd v let from_fd_with_preamble ?timeout fd = - if timeout <> None - then raise (Invalid_argument "Use lwt timeouts directly"); + if timeout <> None then + raise (Invalid_argument "Use lwt timeouts directly"); Marshal_tools_lwt.from_fd_with_preamble fd end diff --git a/hack/dfind/dfindLibLwt.mli b/hack/dfind/dfindLibLwt.mli index 499d5e0b16c..77d87fb5a61 100644 --- a/hack/dfind/dfindLibLwt.mli +++ b/hack/dfind/dfindLibLwt.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,10 +10,14 @@ type t val init : - (Unix.file_descr * Unix.file_descr * Unix.file_descr) -> - (string * Path.t list) - -> t + Unix.file_descr * Unix.file_descr * Unix.file_descr -> + string * Path.t list -> + t + val wait_until_ready : t -> unit Lwt.t + val pid : t -> int + val get_changes : t -> SSet.t Lwt.t + val stop : t -> unit diff --git a/hack/dfind/dfindMaybe.ml b/hack/dfind/dfindMaybe.ml index 0e43ea8f628..cf350b0ed4f 100644 --- a/hack/dfind/dfindMaybe.ml +++ b/hack/dfind/dfindMaybe.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,7 +7,6 @@ * *) - (*****************************************************************************) (* A modified maybe monad * Most of the time, I prefer to use exceptions, I like things to blow up @@ -16,15 +15,16 @@ * to blow-up, we want to carry-on whatever happens. * So this monad never fails, it logs very nasty errors, for example, it will * log the fact that a watch couldn't be created, when the file still exists. -*) + *) (*****************************************************************************) let log = ref stderr + let set_log oc = log := oc type 'a t = 'a option -let (>>=) x f = +let ( >>= ) x f = match x with | None -> None | Some x -> f x @@ -32,22 +32,23 @@ let (>>=) x f = let return x = Some x let handle_file_exn path = function - | Fsnotify.Error (_, Unix.ENOENT) -> () - (* The file got deleted in the mean time ... we don't care *) + | Fsnotify.Error (_, Unix.ENOENT) -> + () (* The file got deleted in the mean time ... we don't care *) | Fsnotify.Error (reason, _) -> - (* This is bad ... *) - Printf.fprintf !log - "Error: could not add watch to %s [%s]\n" path reason + (* This is bad ... *) + Printf.fprintf !log "Error: could not add watch to %s [%s]\n" path reason | _ when Sys.file_exists path -> - (* Logging this makes the system very noisy. There are too many - * cases where a file has been removed etc ... - *) - () + (* Logging this makes the system very noisy. There are too many + * cases where a file has been removed etc ... + *) + () | _ -> () (* Calls (f path), never fails, logs the nasty exceptions *) let call f path = try f path - with e -> handle_file_exn path e; None + with e -> + handle_file_exn path e; + None -let wrap f = fun x -> return (f x) +let wrap f x = return (f x) diff --git a/hack/dfind/dfindMaybe.mli b/hack/dfind/dfindMaybe.mli index d0a049f15cb..486ad81b243 100644 --- a/hack/dfind/dfindMaybe.mli +++ b/hack/dfind/dfindMaybe.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,7 +7,6 @@ * *) - (*****************************************************************************) (* A modified maybe monad * Most of the time, I prefer to use exceptions, I like things to blow up @@ -16,7 +15,7 @@ * to blow-up, we want to carry-on whatever happens. * So this monad never fails, it logs very nasty errors, for example, it will * log the fact that a watch couldn't be created, when the file still exists. -*) + *) (*****************************************************************************) type 'a t @@ -24,9 +23,11 @@ type 'a t (* Called at the initialization of the server (cf server.ml) *) val set_log : out_channel -> unit -val (>>=) : 'a t -> ('a -> 'b t) -> 'b t -val return : 'a -> 'a t +val ( >>= ) : 'a t -> ('a -> 'b t) -> 'b t + +val return : 'a -> 'a t (* Calls (f path), never fails, logs the nasty exceptions *) -val call : (string -> 'a t) -> string -> 'a t -val wrap : ('a -> 'b) -> ('a -> 'b t) +val call : (string -> 'a t) -> string -> 'a t + +val wrap : ('a -> 'b) -> 'a -> 'b t diff --git a/hack/dfind/dfindServer.ml b/hack/dfind/dfindServer.ml index eac5caea420..8c8cfc1c498 100644 --- a/hack/dfind/dfindServer.ml +++ b/hack/dfind/dfindServer.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,7 +7,6 @@ * *) - (*****************************************************************************) (* Code relative to the client/server communication *) (*****************************************************************************) @@ -23,31 +22,30 @@ type msg = (* Processing an fsnotify event *) (*****************************************************************************) -let (process_fsnotify_event: - DfindEnv.t -> SSet.t -> Fsnotify.event - -> SSet.t) = fun env dirty event -> - let { Fsnotify.path; wpath; } = event in - +let (process_fsnotify_event : DfindEnv.t -> SSet.t -> Fsnotify.event -> SSet.t) + = + fun env dirty event -> + let { Fsnotify.path; wpath } = event in (* Tell everybody that this file has changed *) let dirty = SSet.add path dirty in (* Is it a directory? Be conservative, everything we know about this * directory is now "dirty" *) let dirty = - if SMap.mem path env.dirs - then SSet.union dirty (SMap.find_unsafe path env.dirs) - else begin + if SMap.mem path env.dirs then + SSet.union dirty (SMap.find_unsafe path env.dirs) + else let dir_content = - try SMap.find_unsafe wpath env.dirs - with Not_found -> SSet.empty + (try SMap.find_unsafe wpath env.dirs with Not_found -> SSet.empty) in env.dirs <- SMap.add wpath (SSet.add path dir_content) env.dirs; dirty - end in env.new_files <- SSet.empty; + (* Add the file, plus all of the sub elements if it is a directory *) DfindAddFile.path env path; + (* Add everything new we found in this directory * (empty when it's a regular file) *) @@ -73,8 +71,7 @@ let run_daemon (scuba_table, roots) (ic, oc) = let message_in_callback () = let () = Marshal_tools.from_fd_with_preamble infd in let count = SSet.cardinal !acc in - if count > 0 - then Hh_logger.log "Sending %d file updates\n%!" count; + if count > 0 then Hh_logger.log "Sending %d file updates\n%!" count; Marshal_tools.to_fd_with_preamble outfd (Updates !acc) |> ignore; acc := SSet.empty in @@ -84,5 +81,4 @@ let run_daemon (scuba_table, roots) (ic, oc) = Fsnotify.select env.fsnotify ~read_fdl ~timeout fsnotify_callback done -let entry_point = - Daemon.register_entry_point "dfind" run_daemon +let entry_point = Daemon.register_entry_point "dfind" run_daemon diff --git a/hack/dfind/dfindServer.mli b/hack/dfind/dfindServer.mli index 367a0649d0e..82fee68a7c5 100644 --- a/hack/dfind/dfindServer.mli +++ b/hack/dfind/dfindServer.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -11,4 +11,4 @@ type msg = | Ready | Updates of SSet.t -val entry_point: ((string * Path.t list), unit, msg) Daemon.entry +val entry_point : (string * Path.t list, unit, msg) Daemon.entry diff --git a/hack/dfind/dune b/hack/dfind/dune new file mode 100644 index 00000000000..d9b843e2c69 --- /dev/null +++ b/hack/dfind/dune @@ -0,0 +1,26 @@ +(library + (name dfind) + (wrapped false) + (modules + :standard \ + dfindLibLwt) + (libraries + avl + collections + fsnotify + logging_common + marshal_tools + sys_utils)) + +(library + (name dfind_lwt) + (wrapped false) + (modules dfindLibLwt) + (libraries + dfind + lwt + lwt.unix + marshal_tools_lwt + sys_utils + ) +) diff --git a/hack/find/dune b/hack/find/dune new file mode 100644 index 00000000000..08c674b42de --- /dev/null +++ b/hack/find/dune @@ -0,0 +1,5 @@ +(library + (name find) + (wrapped false) + (libraries + sys_utils)) diff --git a/hack/find/find.ml b/hack/find/find.ml index 4117964fa16..83e0e3431ba 100644 --- a/hack/find/find.ml +++ b/hack/find/find.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -14,30 +14,42 @@ open Hh_core let lstat_kind file = - let open Unix in - try Some (lstat file).st_kind - with Unix_error (ENOENT, _, _) -> - prerr_endline ("File not found: "^file); - None + Unix.( + try Some (lstat file).st_kind + with Unix_error (ENOENT, _, _) -> + prerr_endline ("File not found: " ^ file); + None) -let fold_files (type t) - ?max_depth ?(filter=(fun _ -> true)) ?(file_only = false) - (paths: Path.t list) (action: string -> t -> t) (init: t) = +let fold_files + (type t) + ?max_depth + ?(filter = (fun _ -> true)) + ?(file_only = false) + (paths : Path.t list) + (action : string -> t -> t) + (init : t) = let rec fold depth acc dir = - let acc = if not file_only && filter dir then action dir acc else acc in + let acc = + if (not file_only) && filter dir then + action dir acc + else + acc + in if max_depth = Some depth then acc else let files = Sys.readdir dir in Array.fold_left (fun acc file -> - let open Unix in - let file = Filename.concat dir file in - match lstat_kind file with - | Some S_REG when filter file -> action file acc - | Some S_DIR -> fold (depth+1) acc file - | _ -> acc) - acc files in + Unix.( + let file = Filename.concat dir file in + match lstat_kind file with + | Some S_REG when filter file -> action file acc + | Some S_DIR -> fold (depth + 1) acc file + | _ -> acc)) + acc + files + in let paths = List.map paths Path.to_string in List.fold_left paths ~init ~f:(fold 0) @@ -60,7 +72,7 @@ type stack = let max_files = 1000 -let make_next_files ?name:_ ?(filter = fun _ -> true) ?(others=[]) root = +let make_next_files ?name:_ ?(filter = (fun _ -> true)) ?(others = []) root = let rec process sz acc files dir stack = if sz >= max_files then (acc, Dir (files, dir, stack)) @@ -68,22 +80,29 @@ let make_next_files ?name:_ ?(filter = fun _ -> true) ?(others=[]) root = match files with | [] -> process_stack sz acc stack | file :: files -> - let file = if dir = "" then file else Filename.concat dir file in - let open Unix in - match lstat_kind file with + let file = + if dir = "" then + file + else + Filename.concat dir file + in + Unix.( + (match lstat_kind file with | Some S_REG when filter file -> - process (sz+1) (file :: acc) files dir stack + process (sz + 1) (file :: acc) files dir stack | Some S_DIR -> - let dirfiles = Array.to_list @@ Sys.readdir file in - process sz acc dirfiles file (Dir (files, dir, stack)) - | _ -> process sz acc files dir stack + let dirfiles = Array.to_list @@ Sys.readdir file in + process sz acc dirfiles file (Dir (files, dir, stack)) + | _ -> process sz acc files dir stack)) and process_stack sz acc = function | Nil -> (acc, Nil) - | Dir (files, dir, stack) -> process sz acc files dir stack in + | Dir (files, dir, stack) -> process sz acc files dir stack + in let state = - ref (Dir (Path.to_string root :: - List.map ~f:Path.to_string others, "", Nil)) in + ref + (Dir (Path.to_string root :: List.map ~f:Path.to_string others, "", Nil)) + in fun () -> - let res, st = process_stack 0 [] !state in + let (res, st) = process_stack 0 [] !state in state := st; res diff --git a/hack/find/find.mli b/hack/find/find.mli index 5fcdb9431b3..f7face0ef1e 100644 --- a/hack/find/find.mli +++ b/hack/find/find.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,19 +7,28 @@ * *) -val make_next_files: - ?name: string -> - ?filter:(string -> bool) -> ?others: Path.t list -> Path.t -> - (unit -> string list) +val make_next_files : + ?name:string -> + ?filter:(string -> bool) -> + ?others:Path.t list -> + Path.t -> + unit -> + string list -val find: - ?max_depth:int -> ?filter:(string -> bool) -> ?file_only:bool -> - Path.t list -> string list +val find : + ?max_depth:int -> + ?filter:(string -> bool) -> + ?file_only:bool -> + Path.t list -> + string list -val find_with_name: - ?max_depth:int -> ?file_only:bool -> - Path.t list -> string -> string list +val find_with_name : + ?max_depth:int -> ?file_only:bool -> Path.t list -> string -> string list -val iter_files: - ?max_depth:int -> ?filter:(string -> bool) -> ?file_only:bool -> - Path.t list -> (string -> unit) -> unit +val iter_files : + ?max_depth:int -> + ?filter:(string -> bool) -> + ?file_only:bool -> + Path.t list -> + (string -> unit) -> + unit diff --git a/hack/fsevents/dune b/hack/fsevents/dune new file mode 100644 index 00000000000..6282ea3c6a9 --- /dev/null +++ b/hack/fsevents/dune @@ -0,0 +1,9 @@ +(library + (name fsevents) + (wrapped false) + (c_names + fsevents_stubs) + (c_library_flags + -framework CoreServices + -framework CoreFoundation) + (libraries)) diff --git a/hack/fsevents/fsevents.ml b/hack/fsevents/fsevents.ml index 40f014ecffa..85750130b1f 100644 --- a/hack/fsevents/fsevents.ml +++ b/hack/fsevents/fsevents.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,13 +8,18 @@ *) type env + type event = string * string + external init : unit -> env = "stub_fsevents_init" + external add_watch : env -> string -> string = "stub_fsevents_add_watch" + external get_event_fd : env -> Unix.file_descr = "stub_fsevents_get_event_fd" + external read_events : env -> event list = "stub_fsevents_read_events" (* glevi is lazy and didn't implement removing watches since hh_server never * actually does that at the moment external rm_watch : env -> string -> string = "stub_fsevents_rm_watch" -*) + *) diff --git a/hack/fsevents/fsevents.mli b/hack/fsevents/fsevents.mli index 9ef6b37e96e..f562947232b 100644 --- a/hack/fsevents/fsevents.mli +++ b/hack/fsevents/fsevents.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,10 +8,15 @@ *) type env + type event = string * string + val init : unit -> env + val add_watch : env -> string -> string -val get_event_fd : env -> Unix.file_descr + +val get_event_fd : env -> Unix.file_descr + val read_events : env -> event list (* Currently unimplemented diff --git a/hack/fsnotify/dune b/hack/fsnotify/dune new file mode 100644 index 00000000000..a0b5689e14f --- /dev/null +++ b/hack/fsnotify/dune @@ -0,0 +1,22 @@ +(* -*- tuareg -*- *) + +let () = + (* https://github.com/ocaml/ocaml/blob/36c163248d77e7df0803c1e9893ad01948846081/asmcomp/x86_proc.ml#L40-L59 *) + let system = List.assoc "system" Jbuild_plugin.V1.ocamlc_config in + let fsnotify_impl = match system with + | "macosx" -> Some "fsnotify_Darwin" + | "linux" -> Some "fsnotify_Linux" + | "cygwin" + | "mingw64" + | "win64" -> Some "fsnotify_Windows" + | _ -> None + in + match fsnotify_impl with + | None -> Jbuild_plugin.V1.send "" + | Some fsnotify_impl -> Printf.ksprintf Jbuild_plugin.V1.send "\ + +(library + (name fsnotify) + (wrapped false) + (libraries %s)) +" fsnotify_impl diff --git a/hack/fsnotify_darwin/dune b/hack/fsnotify_darwin/dune new file mode 100644 index 00000000000..b1cd7cf43f8 --- /dev/null +++ b/hack/fsnotify_darwin/dune @@ -0,0 +1,6 @@ +(library + (name fsnotify_Darwin) + (wrapped false) + (libraries + fsevents + utils_core)) diff --git a/hack/fsnotify_darwin/fsnotify.ml b/hack/fsnotify_darwin/fsnotify.ml index ac95968b8a3..799e5ad56e5 100644 --- a/hack/fsnotify_darwin/fsnotify.ml +++ b/hack/fsnotify_darwin/fsnotify.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,68 +8,70 @@ *) open Hh_core +module SSet = Set.Make (String) -module SSet = Set.Make(String) exception Error of string * Unix.error type watch = string + type env = { - fsevents : Fsevents.env; - mutable wpaths : SSet.t; + fsevents: Fsevents.env; + mutable wpaths: SSet.t; } type event = { - path : string; (* The full path for the file/directory that changed *) - wpath : string; (* The watched path that triggered this event *) + path: string; + (* The full path for the file/directory that changed *) + wpath: string; (* The watched path that triggered this event *) } (* Returns None if we're already watching that path and Some watch otherwise *) let add_watch env path = (* FSEvents is watching the root directory. You don't actually need to tell it * to watch subdirectories and files too *) - if SSet.mem path env.wpaths - then None - else begin + if SSet.mem path env.wpaths then + None + else ( env.wpaths <- SSet.add path env.wpaths; Some path - end + ) let init roots = - let env = { - fsevents = Fsevents.init (); - wpaths = SSet.empty; - } in - List.iter roots begin fun root -> - try ignore (Fsevents.add_watch env.fsevents root) - with Unix.Unix_error (Unix.ENOENT, _, _) -> - prerr_endline ("Not watching root \"" ^ root ^ "\": file not found.") - end; + let env = { fsevents = Fsevents.init (); wpaths = SSet.empty } in + List.iter roots (fun root -> + try ignore (Fsevents.add_watch env.fsevents root) + with Unix.Unix_error (Unix.ENOENT, _, _) -> + prerr_endline ("Not watching root \"" ^ root ^ "\": file not found.")); env let read env = - List.map - (Fsevents.read_events env.fsevents) - (fun (path, wpath) -> {path; wpath;}) + List.map (Fsevents.read_events env.fsevents) (fun (path, wpath) -> + { path; wpath }) + +module FDMap = Map.Make (struct + type t = Unix.file_descr + + let compare = compare +end) -module FDMap = Map.Make( - struct type t = Unix.file_descr let compare = compare end -) type fd_select = Unix.file_descr * (unit -> unit) + let make_callback fdmap (fd, callback) = FDMap.add fd callback fdmap -let invoke_callback fdmap fd = - let callback = try - (FDMap.find fd fdmap) - with _ -> assert false in + +let invoke_callback fdmap fd = + let callback = (try FDMap.find fd fdmap with _ -> assert false) in callback () -let select env ?(read_fdl=[]) ?(write_fdl=[]) ~timeout callback = +let select env ?(read_fdl = []) ?(write_fdl = []) ~timeout callback = let callback () = callback (Unix.handle_unix_error read env) in let read_fdl = (Fsevents.get_event_fd env.fsevents, callback) :: read_fdl in let read_callbacks = - List.fold_left ~f:make_callback ~init:FDMap.empty read_fdl in + List.fold_left ~f:make_callback ~init:FDMap.empty read_fdl + in let write_callbacks = - List.fold_left ~f:make_callback ~init:FDMap.empty write_fdl in - let read_ready, write_ready, _ = + List.fold_left ~f:make_callback ~init:FDMap.empty write_fdl + in + let (read_ready, write_ready, _) = Unix.select (List.map read_fdl fst) (List.map write_fdl fst) [] timeout in List.iter write_ready (invoke_callback write_callbacks); diff --git a/hack/fsnotify_darwin/fsnotify.mli b/hack/fsnotify_darwin/fsnotify.mli index 373a323b387..b4954fa0ea4 100644 --- a/hack/fsnotify_darwin/fsnotify.mli +++ b/hack/fsnotify_darwin/fsnotify.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -16,8 +16,9 @@ type env type watch type event = { - path : string; (* The full path for the file/directory that changed *) - wpath : string; (* The watched path that triggered this event *) + path: string; + (* The full path for the file/directory that changed *) + wpath: string; (* The watched path that triggered this event *) } val init : string list -> env @@ -27,15 +28,18 @@ val add_watch : env -> string -> watch option (* A file descriptor and what to do when it is selected *) type fd_select = Unix.file_descr * (unit -> unit) + val select : (* The fsevents context *) env -> - (* Additional file descriptor to select for reading *) - ?read_fdl:(fd_select list) -> - (* Additional file descriptor to select for writing *) - ?write_fdl:(fd_select list) -> - (* Timeout...like Unix.select *) - timeout:float -> - (* The callback for file system events *) - (event list -> unit) -> + ?read_fdl: + (* Additional file descriptor to select for reading *) + fd_select list -> + ?write_fdl: + (* Additional file descriptor to select for writing *) + fd_select list -> + timeout:(* Timeout...like Unix.select *) + float -> + ((* The callback for file system events *) + event list -> unit) -> unit diff --git a/hack/fsnotify_linux/dune b/hack/fsnotify_linux/dune new file mode 100644 index 00000000000..85007e278e9 --- /dev/null +++ b/hack/fsnotify_linux/dune @@ -0,0 +1,6 @@ +(library + (name fsnotify_Linux) + (wrapped false) + (libraries + inotify + utils_core)) diff --git a/hack/fsnotify_linux/fsnotify.ml b/hack/fsnotify_linux/fsnotify.ml index bf28db7a0a1..c8155c4829a 100644 --- a/hack/fsnotify_linux/fsnotify.ml +++ b/hack/fsnotify_linux/fsnotify.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,14 +10,14 @@ open Hh_core exception Error of string * Unix.error + let wrap f () = - try - f () - with + try f () with | Unix.Unix_error (err, func, msg) -> - let reason = - Printf.sprintf "%s: %s: %s" func msg (Unix.error_message err) in - raise (Error (reason, err)) + let reason = + Printf.sprintf "%s: %s: %s" func msg (Unix.error_message err) + in + raise (Error (reason, err)) | e -> raise e type watch = Inotify.watch @@ -28,39 +28,50 @@ type watch = Inotify.watch * the file changes (by sending an event to a pipe, cf env.inotify). * We need to be able to compare watches because there could be multiple * paths that lead to the same watch (because of symlinks). -*) -module WMap = Map.Make(struct type t = watch let compare = compare end) + *) +module WMap = Map.Make (struct + type t = watch + + let compare = compare +end) type env = { - fd : Unix.file_descr; - mutable wpaths : string WMap.t; + fd: Unix.file_descr; + mutable wpaths: string WMap.t; } type event = { - path : string; (* The full path for the file/directory that changed *) - wpath : string; (* The watched path that triggered this event *) + path: string; + (* The full path for the file/directory that changed *) + wpath: string; (* The watched path that triggered this event *) } -let init _roots = { - fd = wrap (Inotify.create) (); - wpaths = WMap.empty; -} +let init _roots = { fd = wrap Inotify.create (); wpaths = WMap.empty } -let select_events = Inotify.( - [ S_Create; S_Delete; S_Delete_self; - S_Modify; S_Move_self; S_Moved_from; - S_Moved_to; S_Attrib; - ]) +let select_events = + Inotify. + [ + S_Create; + S_Delete; + S_Delete_self; + S_Modify; + S_Move_self; + S_Moved_from; + S_Moved_to; + S_Attrib; + ] (* Returns None if we're already watching that path and Some watch otherwise *) let add_watch env path = - let watch = wrap (fun () -> Inotify.add_watch env.fd path select_events) () in - if WMap.mem watch env.wpaths && WMap.find watch env.wpaths = path - then None - else begin + let watch = + wrap (fun () -> Inotify.add_watch env.fd path select_events) () + in + if WMap.mem watch env.wpaths && WMap.find watch env.wpaths = path then + None + else ( env.wpaths <- WMap.add watch path env.wpaths; Some watch - end + ) let check_event_type = function | Inotify.Access @@ -76,41 +87,50 @@ let check_event_type = function | Inotify.Open | Inotify.Ignored | Inotify.Modify - | Inotify.Isdir -> () + | Inotify.Isdir -> + () | Inotify.Q_overflow -> - Printf.printf "INOTIFY OVERFLOW!!!\n"; - exit 5 + Printf.printf "INOTIFY OVERFLOW!!!\n"; + exit 5 | Inotify.Unmount -> - Printf.printf "UNMOUNT EVENT!!!\n"; - exit 5 + Printf.printf "UNMOUNT EVENT!!!\n"; + exit 5 let process_event env events event = match event with - | _, _, _, None -> events - | watch, type_list, _, Some filename -> - List.iter type_list check_event_type; - let wpath = try WMap.find watch env.wpaths with _ -> assert false in - let path = Filename.concat wpath filename in - { path; wpath; }::events + | (_, _, _, None) -> events + | (watch, type_list, _, Some filename) -> + List.iter type_list check_event_type; + let wpath = (try WMap.find watch env.wpaths with _ -> assert false) in + let path = Filename.concat wpath filename in + { path; wpath } :: events let read env = let inotify_events = wrap (fun () -> Inotify.read env.fd) () in List.fold_left inotify_events ~f:(process_event env) ~init:[] -module FDMap = Map.Make( - struct type t = Unix.file_descr let compare = compare end -) +module FDMap = Map.Make (struct + type t = Unix.file_descr + + let compare = compare +end) + type fd_select = Unix.file_descr * (unit -> unit) + let make_callback fdmap (fd, callback) = FDMap.add fd callback fdmap -let invoke_callback fdmap fd = (FDMap.find fd fdmap) () -let select env ?(read_fdl=[]) ?(write_fdl=[]) ~timeout callback = + +let invoke_callback fdmap fd = (FDMap.find fd fdmap) () + +let select env ?(read_fdl = []) ?(write_fdl = []) ~timeout callback = let callback () = callback (Unix.handle_unix_error read env) in let read_fdl = (env.fd, callback) :: read_fdl in let read_callbacks = - List.fold_left read_fdl ~f:make_callback ~init:FDMap.empty in + List.fold_left read_fdl ~f:make_callback ~init:FDMap.empty + in let write_callbacks = - List.fold_left write_fdl ~f:make_callback ~init:FDMap.empty in - let read_ready, write_ready, _ = + List.fold_left write_fdl ~f:make_callback ~init:FDMap.empty + in + let (read_ready, write_ready, _) = Unix.select (List.map read_fdl fst) (List.map write_fdl fst) [] timeout in List.iter write_ready (invoke_callback write_callbacks); @@ -119,11 +139,16 @@ let select env ?(read_fdl=[]) ?(write_fdl=[]) ~timeout callback = (********** DEBUGGING ****************) (* Can be useful to see what the event actually is, for debugging *) let _string_of inotify_ev = - let wd, mask, cookie, s = inotify_ev in + let (wd, mask, cookie, s) = inotify_ev in let mask = String.concat ":" (List.map mask Inotify.string_of_event) in - let s = match s with - | Some s -> s - | None -> "\"\"" in + let s = + match s with + | Some s -> s + | None -> "\"\"" + in Printf.sprintf "wd [%u] mask[%s] cookie[%ld] %s" - (Inotify.int_of_watch wd) mask cookie s + (Inotify.int_of_watch wd) + mask + cookie + s diff --git a/hack/fsnotify_linux/fsnotify.mli b/hack/fsnotify_linux/fsnotify.mli index 373a323b387..b4954fa0ea4 100644 --- a/hack/fsnotify_linux/fsnotify.mli +++ b/hack/fsnotify_linux/fsnotify.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -16,8 +16,9 @@ type env type watch type event = { - path : string; (* The full path for the file/directory that changed *) - wpath : string; (* The watched path that triggered this event *) + path: string; + (* The full path for the file/directory that changed *) + wpath: string; (* The watched path that triggered this event *) } val init : string list -> env @@ -27,15 +28,18 @@ val add_watch : env -> string -> watch option (* A file descriptor and what to do when it is selected *) type fd_select = Unix.file_descr * (unit -> unit) + val select : (* The fsevents context *) env -> - (* Additional file descriptor to select for reading *) - ?read_fdl:(fd_select list) -> - (* Additional file descriptor to select for writing *) - ?write_fdl:(fd_select list) -> - (* Timeout...like Unix.select *) - timeout:float -> - (* The callback for file system events *) - (event list -> unit) -> + ?read_fdl: + (* Additional file descriptor to select for reading *) + fd_select list -> + ?write_fdl: + (* Additional file descriptor to select for writing *) + fd_select list -> + timeout:(* Timeout...like Unix.select *) + float -> + ((* The callback for file system events *) + event list -> unit) -> unit diff --git a/hack/fsnotify_win/dune b/hack/fsnotify_win/dune new file mode 100644 index 00000000000..0b7d3c18ee9 --- /dev/null +++ b/hack/fsnotify_win/dune @@ -0,0 +1,7 @@ +(library + (name fsnotify_Windows) + (wrapped false) + (c_names + fsnotify_stubs) + (libraries + utils_core)) diff --git a/hack/fsnotify_win/fsnotify.ml b/hack/fsnotify_win/fsnotify.ml index ff1edb83663..e074bb2e92c 100644 --- a/hack/fsnotify_win/fsnotify.ml +++ b/hack/fsnotify_win/fsnotify.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -30,7 +30,7 @@ type fsenv (* Abstract data type for a watching thread. *) type watcher_id -module SSet = Set.Make(String) +module SSet = Set.Make (String) type env = { fsenv: fsenv; @@ -40,14 +40,14 @@ type env = { } type event = { - path: string; (* The full path for the file/directory that changed *) + path: string; + (* The full path for the file/directory that changed *) wpath: string; (* The watched path that triggered this event *) } (** Stubs *) -external raw_init: - Unix.file_descr -> fsenv = "caml_fsnotify_init" +external raw_init : Unix.file_descr -> fsenv = "caml_fsnotify_init" (* [raw_add_watch out_fd dir] creates a thread that monitor [dir] and push a single charactes into the pipe 'out_fd' whenever a events is @@ -55,23 +55,21 @@ external raw_init: The return value is an opaque `watcher_id`, currently it contains the corresponding thread id. *) -external raw_add_watch: fsenv -> string -> watcher_id = "caml_fsnotify_add_watch" - -external raw_read_events: - fsenv -> event list = "caml_fsnotify_read_events" +external raw_add_watch : fsenv -> string -> watcher_id + = "caml_fsnotify_add_watch" +external raw_read_events : fsenv -> event list = "caml_fsnotify_read_events" (** Init *) let init roots = - let in_fd, out_fd = Unix.pipe () in + let (in_fd, out_fd) = Unix.pipe () in Unix.set_close_on_exec in_fd; Unix.set_close_on_exec out_fd; let fsenv = raw_init out_fd in let watchers = List.map roots ~f:(raw_add_watch fsenv) in { fsenv; fd = in_fd; watchers; wpaths = SSet.empty } - (** Faked add_watch, as for `fsnotify_darwin`. *) (* Returns None if we're already watching that path and Some watch otherwise *) @@ -80,47 +78,53 @@ let add_watch env path = * the whole directory. No need to register every files in it. *) if SSet.mem path env.wpaths then None - else begin + else ( env.wpaths <- SSet.add path env.wpaths; Some () - end + ) (** Select *) -module FDMap = Map.Make(struct - type t = Unix.file_descr - let compare = compare - end) +module FDMap = Map.Make (struct + type t = Unix.file_descr + + let compare = compare +end) + type fd_select = Unix.file_descr * (unit -> unit) + let make_callback fdmap (fd, callback) = FDMap.add fd callback fdmap -let invoke_callback fdmap fd = - let callback = - try FDMap.find fd fdmap - with _ -> assert false in + +let invoke_callback fdmap fd = + let callback = (try FDMap.find fd fdmap with _ -> assert false) in callback () let read_events env = (* read pop only one char from pipe, in order never to block. *) - ignore (Unix.read env.fd " " 0 1 : int); + let buf = Bytes.create 1 in + ignore (Unix.read env.fd buf 0 1 : int); + (* prefix the root path *) - List.map (raw_read_events env.fsenv) - ~f:(fun ev -> { ev with path = Filename.concat ev.wpath ev.path }) + List.map (raw_read_events env.fsenv) ~f:(fun ev -> + { ev with path = Filename.concat ev.wpath ev.path }) -let select env ?(read_fdl=[]) ?(write_fdl=[]) ~timeout callback = +let select env ?(read_fdl = []) ?(write_fdl = []) ~timeout callback = let callback () = callback (read_events env) in let read_fdl = (env.fd, callback) :: read_fdl in let read_callbacks = - List.fold_left ~f:make_callback ~init:FDMap.empty read_fdl in + List.fold_left ~f:make_callback ~init:FDMap.empty read_fdl + in let write_callbacks = - List.fold_left ~f:make_callback ~init:FDMap.empty write_fdl in - let read_ready, write_ready, _ = + List.fold_left ~f:make_callback ~init:FDMap.empty write_fdl + in + let (read_ready, write_ready, _) = Unix.select (List.map read_fdl fst) (List.map write_fdl fst) [] timeout in List.iter write_ready (invoke_callback write_callbacks); List.iter read_ready (invoke_callback read_callbacks) - (** Unused, for compatibility with `fsnotify_linux/fsnotify.mli` only. *) type watch = unit + exception Error of string * Unix.error diff --git a/hack/fsnotify_win/fsnotify.mli b/hack/fsnotify_win/fsnotify.mli index 373a323b387..2e3d110b737 100644 --- a/hack/fsnotify_win/fsnotify.mli +++ b/hack/fsnotify_win/fsnotify.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -16,8 +16,9 @@ type env type watch type event = { - path : string; (* The full path for the file/directory that changed *) - wpath : string; (* The watched path that triggered this event *) + path: string; + (* The full path for the file/directory that changed *) + wpath: string; (* The watched path that triggered this event *) } val init : string list -> env @@ -27,6 +28,7 @@ val add_watch : env -> string -> watch option (* A file descriptor and what to do when it is selected *) type fd_select = Unix.file_descr * (unit -> unit) + val select : (* The fsevents context *) env -> @@ -39,3 +41,4 @@ val select : (* The callback for file system events *) (event list -> unit) -> unit +[@@ocamlformat "disable=true"] diff --git a/hack/heap/config/discover.ml b/hack/heap/config/discover.ml new file mode 100644 index 00000000000..dcc6ad3a736 --- /dev/null +++ b/hack/heap/config/discover.ml @@ -0,0 +1,43 @@ +(** This is a dune configurator: +https://jbuilder.readthedocs.io/en/latest/configurator.html *) + +module C = Configurator.V1 + +(* cmake should have prepared some information for us in the env: + EXTRA_INCLUDE_PATHS + EXTRA_LIB_PATHS + EXTRA_NATIVE_LIBRARIES + EXTRA_LINK_OPTS +*) +let query_env s = + match Sys.getenv s with + | "" -> [] + | s -> String.split_on_char ' ' s + | exception Not_found -> [] + +let abs = + let current_dir = Sys.getcwd () in + (* we are in ./src/heap/config, locate . *) + let root_dir = Filename.(dirname @@ dirname @@ dirname current_dir) in + fun s -> + if Filename.is_relative s then + Filename.concat root_dir s + else + s + +let process_env () = + let includes = + query_env "EXTRA_INCLUDE_PATHS" |> List.map (fun s -> "-I" ^ abs s) + in + let dirs = query_env "EXTRA_LIB_PATHS" |> List.map (fun s -> "-L" ^ abs s) in + let names = + query_env "EXTRA_NATIVE_LIBRARIES" |> List.map (fun s -> "-l" ^ s) + in + let opaque_opts = query_env "EXTRA_LINK_OPTS" in + (includes, dirs @ names @ opaque_opts) + +let () = + C.main ~name:"heap" (fun (_ : C.t) -> + let (cflags, cldflags) = process_env () in + C.Flags.write_sexp "c_flags.sexp" cflags; + C.Flags.write_sexp "c_library_flags.sexp" cldflags) diff --git a/hack/heap/config/dune b/hack/heap/config/dune new file mode 100644 index 00000000000..62bffd02e26 --- /dev/null +++ b/hack/heap/config/dune @@ -0,0 +1,14 @@ +(executable + (name discover) + (libraries + imported_core + dune.configurator)) + +(rule + (targets c_flags.sexp c_library_flags.sexp) + (deps + (env_var EXTRA_INCLUDE_PATHS) + (env_var EXTRA_LIB_PATHS) + (env_var EXTRA_NATIVE_LIBRARIES) + (env_var EXTRA_LINK_OPTS)) + (action (run ./discover.exe))) diff --git a/hack/heap/dumbsqlite.ml b/hack/heap/dumbsqlite.ml deleted file mode 100644 index 64803a2e800..00000000000 --- a/hack/heap/dumbsqlite.ml +++ /dev/null @@ -1,62 +0,0 @@ -type sqlerr = int - -type sqlerr_t = { sqlerr : sqlerr } - -type classified_error = - | SqliteOk - | Row - | Done - | SqliteErr of int - | WrapperErr of int - -let classify_sqlerr {sqlerr} = - match sqlerr with - | 0 -> SqliteOk - | 100 -> Row - | 101 -> Done - | _ -> ( - if sqlerr > 0 then SqliteErr sqlerr - else WrapperErr sqlerr - ) - -external caml_dumb_sqlite_open : int -> string -> bool -> sqlerr = "caml_dumb_sqlite_open" -let caml_dumb_sqlite_open ~index ~path ~readonly = - {sqlerr = caml_dumb_sqlite_open index path readonly} - -external caml_dumb_sqlite_close : int -> sqlerr = "caml_dumb_sqlite_close" -let caml_dumb_sqlite_close ~index = - {sqlerr = caml_dumb_sqlite_close index} - -external caml_dumb_sqlite_prepare : int -> int -> string -> sqlerr = "caml_dumb_sqlite_prepare" -let caml_dumb_sqlite_prepare ~index ~s_index ~sql = - {sqlerr = caml_dumb_sqlite_prepare index s_index sql} - -external caml_dumb_sqlite_reset : int -> sqlerr = "caml_dumb_sqlite_reset" -let caml_dumb_sqlite_reset ~s_index = - {sqlerr = caml_dumb_sqlite_reset s_index} - -external caml_dumb_sqlite_step : int -> sqlerr = "caml_dumb_sqlite_step" -let caml_dumb_sqlite_step ~s_index = - {sqlerr = caml_dumb_sqlite_step s_index} - -external caml_dumb_sqlite_finalize : int -> sqlerr = "caml_dumb_sqlite_finalize" -let caml_dumb_sqlite_finalize ~s_index = - {sqlerr = caml_dumb_sqlite_finalize s_index} - -external caml_dumb_sqlite_bind_int64 : int -> int -> Int64.t -> sqlerr = "caml_dumb_sqlite_bind_int64" -let caml_dumb_sqlite_bind_int64 ~s_index ~param ~value = - {sqlerr = caml_dumb_sqlite_bind_int64 s_index param value} - -external caml_dumb_sqlite_bind_text : int -> int -> string -> sqlerr = "caml_dumb_sqlite_bind_text" -let caml_dumb_sqlite_bind_text ~s_index ~param ~value = - {sqlerr = caml_dumb_sqlite_bind_text s_index param value} - -external caml_dumb_sqlite_column_int64 : int -> int -> (sqlerr * Int64.t) = "caml_dumb_sqlite_column_int64" -let caml_dumb_sqlite_column_int64 ~s_index ~column = - let (sqlerr, res) = caml_dumb_sqlite_column_int64 s_index column in - ({sqlerr = sqlerr}, res) - -external caml_dumb_sqlite_column_text : int -> int -> (sqlerr * string) = "caml_dumb_sqlite_column_text" -let caml_dumb_sqlite_column_text ~s_index ~column = - let (sqlerr, res) = caml_dumb_sqlite_column_text s_index column in - ({sqlerr = sqlerr}, res) diff --git a/hack/heap/dune b/hack/heap/dune new file mode 100644 index 00000000000..15dc47026fa --- /dev/null +++ b/hack/heap/dune @@ -0,0 +1,52 @@ +(library + (name heap_libc) + (wrapped false) + (modules) + (c_names + hh_assert + hh_shared) + (c_flags (:standard + (:include config/c_flags.sexp))) + (c_library_flags (:standard + (:include config/c_library_flags.sexp))) + (libraries + utils_core)) + +(library + (name heap_ident) + (wrapped false) + (modules + ident) + (libraries + collections + heap_libc)) + +(library + (name heap_shared_mem) + (wrapped false) + (modules + prefix + sharedMem + value) + (libraries + heap_libc + logging_common + utils_core + worker_cancel)) + +(library + (name worker_cancel) + (wrapped false) + (modules + workerCancel) + (libraries + heap_libc + utils_core)) + +(library + (name heap_global_storage) + (wrapped false) + (modules + globalStorage) + (libraries + heap_libc)) diff --git a/hack/heap/globalStorage.ml b/hack/heap/globalStorage.ml index 836ee7c0b83..81cd0ae197d 100644 --- a/hack/heap/globalStorage.ml +++ b/hack/heap/globalStorage.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -17,34 +17,44 @@ *) (*****************************************************************************) -module Make: - functor(Value:sig type t end) -> sig - - (* "store v" stores the value v in the global storage. - * Can only be called by the master. - * 'hh_shared_init' must have been called prior to the first call. - * The store must be empty. - *) - val store: Value.t -> unit - - (* "load()" returns the value stored in the global storage. - * Can be called by any process (master or workers), "store" must have - * been called by the master before the call. - *) - val load: unit -> Value.t - - (* "clear()" empties the global storage. - * Can only be called by the master. - *) - val clear: unit -> unit - - end = functor(Value: sig type t end) -> struct - - external hh_shared_store : string -> unit = "hh_shared_store" - external hh_shared_load : unit -> string = "hh_shared_load" - external hh_shared_clear : unit -> unit = "hh_shared_clear" - - let store (x: Value.t) = hh_shared_store (Marshal.to_string x []) - let load () = (Marshal.from_string (hh_shared_load()) 0 : Value.t) - let clear () = hh_shared_clear() -end +module Make : functor + (Value : sig + type t + end) + -> sig + (* "store v" stores the value v in the global storage. + * Can only be called by the master. + * 'hh_shared_init' must have been called prior to the first call. + * The store must be empty. + *) + val store : Value.t -> unit + + (* "load()" returns the value stored in the global storage. + * Can be called by any process (master or workers), "store" must have + * been called by the master before the call. + *) + val load : unit -> Value.t + + (* "clear()" empties the global storage. + * Can only be called by the master. + *) + val clear : unit -> unit +end = +functor + (Value : sig + type t + end) + -> + struct + external hh_shared_store : string -> unit = "hh_shared_store" + + external hh_shared_load : unit -> string = "hh_shared_load" + + external hh_shared_clear : unit -> unit = "hh_shared_clear" + + let store (x : Value.t) = hh_shared_store (Marshal.to_string x []) + + let load () = (Marshal.from_string (hh_shared_load ()) 0 : Value.t) + + let clear () = hh_shared_clear () + end diff --git a/hack/heap/hh_shared.c b/hack/heap/hh_shared.c index 77301cc63ef..076f7c9c9f9 100644 --- a/hack/heap/hh_shared.c +++ b/hack/heap/hh_shared.c @@ -77,6 +77,10 @@ */ /*****************************************************************************/ +/* For printing uint64_t + * http://jhshi.me/2014/07/11/print-uint64-t-properly-in-c/index.html */ +#define __STDC_FORMAT_MACROS + /* define CAML_NAME_SPACE to ensure all the caml imports are prefixed with * 'caml_' */ #define CAML_NAME_SPACE @@ -102,12 +106,13 @@ #include #include #include -#include #include #include #endif +#include #include +#include #include #ifndef NO_SQLITE3 @@ -124,7 +129,6 @@ static sqlite3_stmt *g_get_dep_select_stmt = NULL; #include "hh_assert.h" -#include "hh_shared_sqlite.h" #define UNUSED(x) \ ((void)(x)) @@ -149,7 +153,7 @@ static sqlite3_stmt *g_get_dep_select_stmt = NULL; #endif -#define HASHTBL_WRITE_IN_PROGRESS ((char*)1) +#define HASHTBL_WRITE_IN_PROGRESS ((heap_entry_t*)1) /**************************************************************************** * Quoting the linux manpage: memfd_create() creates an anonymous file @@ -174,7 +178,7 @@ static sqlite3_stmt *g_get_dep_select_stmt = NULL; #elif defined(__aarch64__) #define SYS_memfd_create 385 #else - #error "hh_shared.c requires a architecture that supports memfd_create" + #error "hh_shared.c requires an architecture that supports memfd_create" #endif #endif @@ -233,35 +237,14 @@ static size_t bindings_size_b; static uint64_t hashtbl_size; static size_t hashtbl_size_b; +/* Used for worker-local data */ +static size_t locals_size_b; + typedef enum { KIND_STRING = 1, KIND_SERIALIZED = !KIND_STRING } storage_kind; -// Every heap entry starts with a 64-bit header with the following layout: -// -// 6 3 3 3 0 0 -// 3 3 2 1 1 0 -// +----------------------------------+-+-----------------------------------+-+ -// |11111111 11111111 11111111 1111111|0| 11111111 11111111 11111111 1111111|1| -// +----------------------------------+-+-----------------------------------+-+ -// | | | | -// | | | * 0 tag -// | | | -// | | * 31-1 uncompressed size (0 if uncompressed) -// | | -// | * 32 kind (0 = serialized, 1 = string) -// | -// * 63-33 size of heap entry -// -// The tag bit is always 1 and is used to differentiate headers from pointers -// during garbage collection (see hh_collect). -typedef uint64_t hh_header_t; - -#define Entry_size(x) ((x) >> 33) -#define Entry_kind(x) (((x) >> 32) & 1) -#define Entry_uncompressed_size(x) (((x) >> 1) & 0x7FFFFFFF) - typedef struct { // Size of the BLOB in bytes. size_t size; @@ -273,8 +256,10 @@ typedef struct { /* Too lazy to use getconf */ #define CACHE_LINE_SIZE (1 << 6) -#define CACHE_MASK (~(CACHE_LINE_SIZE - 1)) -#define ALIGNED(x) (((x) + CACHE_LINE_SIZE - 1) & CACHE_MASK) + +#define __ALIGN_MASK(x,mask) (((x)+(mask))&~(mask)) +#define ALIGN(x,a) __ALIGN_MASK(x,(typeof(x))(a)-1) +#define CACHE_ALIGN(x) ALIGN(x,CACHE_LINE_SIZE) /* Fix the location of our shared memory so we can save and restore the * hashtable easily */ @@ -300,10 +285,48 @@ extern const char* const BuildInfo_kRevision; /* Types */ /*****************************************************************************/ +/* Per-worker data which can be quickly updated non-atomically. Will be placed + * in cache-aligned array in the first few pages of shared memory, indexed by + * worker id. */ +typedef struct { + uint64_t counter; +} local_t; + +// Every heap entry starts with a 64-bit header with the following layout: +// +// 6 3 3 3 0 0 +// 3 3 2 1 1 0 +// +----------------------------------+-+-----------------------------------+-+ +// |11111111 11111111 11111111 1111111|0| 11111111 11111111 11111111 1111111|1| +// +----------------------------------+-+-----------------------------------+-+ +// | | | | +// | | | * 0 tag +// | | | +// | | * 31-1 uncompressed size (0 if uncompressed) +// | | +// | * 32 kind (0 = serialized, 1 = string) +// | +// * 63-33 size of heap entry +// +// The tag bit is always 1 and is used to differentiate headers from pointers +// during garbage collection (see hh_collect). +typedef uint64_t hh_header_t; + +#define Entry_size(x) ((x) >> 33) +#define Entry_kind(x) (((x) >> 32) & 1) +#define Entry_uncompressed_size(x) (((x) >> 1) & 0x7FFFFFFF) +#define Heap_entry_total_size(header) sizeof(heap_entry_t) + Entry_size(header) + +/* Shared memory structures. hh_shared.h typedefs this to heap_entry_t. */ +typedef struct { + hh_header_t header; + char data[]; +} heap_entry_t; + /* Cells of the Hashtable */ typedef struct { uint64_t hash; - char* addr; + heap_entry_t* addr; } helt_t; /*****************************************************************************/ @@ -435,25 +458,41 @@ static uint64_t* deptbl_bindings = NULL; /* The hashtable containing the shared values. */ static helt_t* hashtbl = NULL; -static uint64_t* hcounter = NULL; // the number of slots taken in the table +/* The number of nonempty slots in the hashtable. A nonempty slot has a + * non-zero hash. We never clear hashes so this monotonically increases */ +static uint64_t* hcounter = NULL; +/* The number of nonempty filled slots in the hashtable. A nonempty filled slot + * has a non-zero hash AND a non-null addr. It increments when we write data + * into a slot with addr==NULL and decrements when we clear data from a slot */ +static uint64_t* hcounter_filled = NULL; /* A counter increasing globally across all forks. */ static uintptr_t* counter = NULL; +/* Each process reserves a range of values at a time from the shared counter. + * Should be a power of two for more efficient modulo calculation. */ +#define COUNTER_RANGE 2048 + /* Logging level for shared memory statistics * 0 = nothing * 1 = log totals, averages, min, max bytes marshalled and unmarshalled */ static size_t* log_level = NULL; +static double* sample_rate = NULL; + static size_t* workers_should_exit = NULL; static size_t* allow_removes = NULL; static size_t* allow_dependency_table_reads = NULL; +/* Worker-local storage is cache line aligned. */ +static char* locals; +#define LOCAL(id) ((local_t *)(locals + id * CACHE_ALIGN(sizeof(local_t)))) + /* This should only be used before forking */ -static uintptr_t early_counter = 1; +static uintptr_t early_counter = 0; /* The top of the heap */ static char** heap = NULL; @@ -462,13 +501,17 @@ static char** heap = NULL; static pid_t* master_pid = NULL; static pid_t my_pid = 0; +static size_t num_workers; + +/* This is a process-local value. The master process is 0, workers are numbered + * starting at 1. This is an offset into the worker local values in the heap. */ +static size_t worker_id; + static size_t allow_hashtable_writes_by_current_process = 1; static size_t worker_can_exit = 1; static char *db_filename = NULL; -#define FILE_INFO_ON_DISK_PATH "FILE_INFO_ON_DISK_PATH" - /* Where the heap started (bottom) */ static char* heap_init = NULL; /* Where the heap will end (top) */ @@ -482,42 +525,34 @@ static size_t used_heap_size(void) { static long removed_count = 0; +/* Expose so we can display diagnostics */ +CAMLprim value hh_used_heap_size(void) { + return Val_long(used_heap_size()); +} + /* Part of the heap not reachable from hashtable entries. Can be reclaimed with * hh_collect. */ -static size_t get_wasted_heap_size(void) { +CAMLprim value hh_wasted_heap_size(void) { assert(wasted_heap_size != NULL); - return *wasted_heap_size; + return Val_long(*wasted_heap_size); } -/* Expose so we can display diagnostics */ -CAMLprim value hh_heap_size(void) { - CAMLparam0(); - CAMLreturn(Val_long(used_heap_size())); +CAMLprim value hh_log_level(void) { + return Val_long(*log_level); } -CAMLprim value hh_log_level(void) { +CAMLprim value hh_sample_rate(void) { CAMLparam0(); - CAMLreturn(Val_long(*log_level)); + CAMLreturn(caml_copy_double(*sample_rate)); } CAMLprim value hh_hash_used_slots(void) { CAMLparam0(); - uint64_t filled_slots = 0; - uint64_t nonempty_slots = 0; - uintptr_t i = 0; - for (i = 0; i < hashtbl_size; ++i) { - if (hashtbl[i].hash != 0) { - nonempty_slots++; - } - if (hashtbl[i].addr == NULL) { - continue; - } - filled_slots++; - } - assert(nonempty_slots == *hcounter); - value connector = caml_alloc_tuple(2); - Field(connector, 0) = Val_long(filled_slots); - Field(connector, 1) = Val_long(nonempty_slots); + CAMLlocal1(connector); + + connector = caml_alloc_tuple(2); + Field(connector, 0) = Val_long(*hcounter_filled); + Field(connector, 1) = Val_long(*hcounter); CAMLreturn(connector); } @@ -836,21 +871,31 @@ static void define_globals(char * shared_mem_init) { assert (CACHE_LINE_SIZE >= sizeof(size_t)); log_level = (size_t*)(mem + 5*CACHE_LINE_SIZE); + assert (CACHE_LINE_SIZE >= sizeof(double)); + sample_rate = (double*)(mem + 6*CACHE_LINE_SIZE); + assert (CACHE_LINE_SIZE >= sizeof(size_t)); - workers_should_exit = (size_t*)(mem + 6*CACHE_LINE_SIZE); + workers_should_exit = (size_t*)(mem + 7*CACHE_LINE_SIZE); assert (CACHE_LINE_SIZE >= sizeof(size_t)); - wasted_heap_size = (size_t*)(mem + 7*CACHE_LINE_SIZE); + wasted_heap_size = (size_t*)(mem + 8*CACHE_LINE_SIZE); assert (CACHE_LINE_SIZE >= sizeof(size_t)); - allow_removes = (size_t*)(mem + 8*CACHE_LINE_SIZE); + allow_removes = (size_t*)(mem + 9*CACHE_LINE_SIZE); assert (CACHE_LINE_SIZE >= sizeof(size_t)); - allow_dependency_table_reads = (size_t*)(mem + 9*CACHE_LINE_SIZE); + allow_dependency_table_reads = (size_t*)(mem + 10*CACHE_LINE_SIZE); + + assert (CACHE_LINE_SIZE >= sizeof(size_t)); + hcounter_filled = (size_t*)(mem + 11*CACHE_LINE_SIZE); mem += page_size; // Just checking that the page is large enough. - assert(page_size > 10*CACHE_LINE_SIZE + (int)sizeof(int)); + assert(page_size > 12*CACHE_LINE_SIZE + (int)sizeof(int)); + + assert (CACHE_LINE_SIZE >= sizeof(local_t)); + locals = mem; + mem += locals_size_b; /* File name we get in hh_load_dep_table_sqlite needs to be smaller than * page_size - it should be since page_size is quite big for a string @@ -895,22 +940,32 @@ static void define_globals(char * shared_mem_init) { static size_t get_shared_mem_size(void) { size_t page_size = getpagesize(); return (global_size_b + dep_size_b + bindings_size_b + hashtbl_size_b + - heap_size + 2 * page_size); + heap_size + 2 * page_size + locals_size_b); } -static void init_shared_globals(size_t config_log_level) { +static void init_shared_globals( + size_t config_log_level, + double config_sample_rate +) { // Initial size is zero for global storage is zero global_storage[0] = 0; // Initialize the number of element in the table *hcounter = 0; + *hcounter_filled = 0; *dcounter = 0; - *counter = early_counter + 1; + // Ensure the global counter starts on a COUNTER_RANGE boundary + *counter = ALIGN(early_counter + 1, COUNTER_RANGE); *log_level = config_log_level; + *sample_rate = config_sample_rate; *workers_should_exit = 0; *wasted_heap_size = 0; *allow_removes = 1; *allow_dependency_table_reads = 1; + for (uint64_t i = 0; i <= num_workers; i++) { + LOCAL(i)->counter = 0; + } + // Initialize top heap pointers *heap = heap_init; @@ -923,7 +978,10 @@ static void set_sizes( uint64_t config_global_size, uint64_t config_heap_size, uint64_t config_dep_table_pow, - uint64_t config_hash_table_pow) { + uint64_t config_hash_table_pow, + uint64_t config_num_workers) { + + size_t page_size = getpagesize(); global_size_b = config_global_size; heap_size = config_heap_size; @@ -934,6 +992,11 @@ static void set_sizes( hashtbl_size = 1ul << config_hash_table_pow; hashtbl_size_b = hashtbl_size * sizeof(hashtbl[0]); + // We will allocate a cache line for the master process and each worker + // process, then pad that out to the nearest page. + num_workers = config_num_workers; + locals_size_b = ALIGN((1 + num_workers) * CACHE_LINE_SIZE, page_size); + shared_mem_size = get_shared_mem_size(); } @@ -943,9 +1006,10 @@ static void set_sizes( CAMLprim value hh_shared_init( value config_val, - value shm_dir_val + value shm_dir_val, + value num_workers_val ) { - CAMLparam2(config_val, shm_dir_val); + CAMLparam3(config_val, shm_dir_val, num_workers_val); CAMLlocal5( connector, config_global_size_val, @@ -963,7 +1027,8 @@ CAMLprim value hh_shared_init( Long_val(config_global_size_val), Long_val(config_heap_size_val), Long_val(config_dep_table_pow_val), - Long_val(config_hash_table_pow_val) + Long_val(config_hash_table_pow_val), + Long_val(num_workers_val) ); // None -> NULL @@ -990,7 +1055,9 @@ CAMLprim value hh_shared_init( my_pid = *master_pid; #endif - init_shared_globals(Long_val(Field(config_val, 6))); + init_shared_globals( + Long_val(Field(config_val, 6)), + Double_val(Field(config_val, 7))); // Checking that we did the maths correctly. assert(*heap + heap_size == shared_mem + shared_mem_size); @@ -1006,26 +1073,29 @@ CAMLprim value hh_shared_init( sigaction(SIGSEGV, &sigact, NULL); #endif - connector = caml_alloc_tuple(5); + connector = caml_alloc_tuple(6); Field(connector, 0) = Val_handle(memfd); Field(connector, 1) = config_global_size_val; Field(connector, 2) = config_heap_size_val; Field(connector, 3) = config_dep_table_pow_val; Field(connector, 4) = config_hash_table_pow_val; + Field(connector, 5) = num_workers_val; CAMLreturn(connector); } /* Must be called by every worker before any operation is performed */ -value hh_connect(value connector, value is_master) { - CAMLparam2(connector, is_master); +value hh_connect(value connector, value worker_id_val) { + CAMLparam2(connector, worker_id_val); memfd = Handle_val(Field(connector, 0)); set_sizes( Long_val(Field(connector, 1)), Long_val(Field(connector, 2)), Long_val(Field(connector, 3)), - Long_val(Field(connector, 4)) + Long_val(Field(connector, 4)), + Long_val(Field(connector, 5)) ); + worker_id = Long_val(worker_id_val); #ifdef _WIN32 my_pid = 1; // Trick #else @@ -1034,10 +1104,6 @@ value hh_connect(value connector, value is_master) { char *shared_mem_init = memfd_map(shared_mem_size); define_globals(shared_mem_init); - if (Bool_val(is_master)) { - *master_pid = my_pid; - } - CAMLreturn(Val_unit); } @@ -1059,7 +1125,12 @@ CAMLprim value hh_counter_next(void) { uintptr_t v = 0; if (counter) { - v = __sync_fetch_and_add(counter, 1); + v = LOCAL(worker_id)->counter; + if (v % COUNTER_RANGE == 0) { + v = __atomic_fetch_add(counter, COUNTER_RANGE, __ATOMIC_RELAXED); + } + ++v; + LOCAL(worker_id)->counter = v; } else { v = ++early_counter; } @@ -1205,6 +1276,13 @@ void hh_shared_clear(void) { /*****************************************************************************/ static void raise_dep_table_full(void) { + fprintf( + stderr, + "dcounter: %"PRIu64" dep_size: %"PRIu64" \n", + *dcounter, + dep_size + ); + static value *exn = NULL; if (!exn) exn = caml_named_value("dep_table_full"); caml_raise_constant(*exn); @@ -1393,6 +1471,12 @@ void hh_add_dep(value ocaml_dep) { CAMLreturn0; } +void kill_dep_used_slots(void) { + CAMLparam0(); + memset(deptbl, 0, dep_size_b); + memset(deptbl_bindings, 0, bindings_size_b); +} + CAMLprim value hh_dep_used_slots(void) { CAMLparam0(); uint64_t count = 0; @@ -1462,21 +1546,6 @@ CAMLprim value hh_get_dep(value ocaml_key) { CAMLreturn(result); } -/*****************************************************************************/ -/* Must be called after the hack server is done initializing. - * We keep the original size of the heap to estimate how often we should - * garbage collect. - */ -/*****************************************************************************/ -void hh_call_after_init(void) { - CAMLparam0(); - if (2 * used_heap_size() >= heap_size) { - caml_failwith("Heap init size is too close to max heap size; " - "GC will never get triggered!"); - } - CAMLreturn0; -} - value hh_check_heap_overflow(void) { if (*heap >= shared_mem + shared_mem_size) { return Val_bool(1); @@ -1494,30 +1563,14 @@ value hh_check_heap_overflow(void) { */ /*****************************************************************************/ -static int should_collect(int aggressive) { - float space_overhead = aggressive ? 1.2 : 2.0; - size_t used = used_heap_size(); - size_t reachable = used - get_wasted_heap_size(); - return used >= (size_t)(space_overhead * reachable); -} - -CAMLprim value hh_should_collect(value aggressive_val) { - return Val_bool(should_collect(Bool_val(aggressive_val))); -} - -CAMLprim value hh_collect(value aggressive_val) { +CAMLprim value hh_collect(void) { // NOTE: explicitly do NOT call CAMLparam or any of the other functions/macros // defined in caml/memory.h . // This function takes a boolean and returns unit. // Those are both immediates in the OCaml runtime. - int aggressive = Bool_val(aggressive_val); assert_master(); assert_allow_removes(); - if (!should_collect(aggressive)) { - return Val_unit; - } - // Step 1: Walk the hashtbl entries, which are the roots of our marking pass. for (size_t i = 0; i < hashtbl_size; i++) { @@ -1537,12 +1590,15 @@ CAMLprim value hh_collect(value aggressive_val) { // be. Then, after moving the heap entry, we can follow the pointer to // restore our original header and update the addr field to our relocated // address. + // + // This is all super unsafe and only works because we constrain the size of + // an hh_header_t struct to the size of a pointer. // Location of the addr field (8 bytes) in the hashtable - char **hashtbl_addr = &hashtbl[i].addr; + char **hashtbl_addr = (char **)&hashtbl[i].addr; // Location of the header (8 bytes) in the heap - char *heap_addr = hashtbl[i].addr - sizeof(hh_header_t); + char *heap_addr = (char *)hashtbl[i].addr; // Swap hh_header_t header = *(hh_header_t *)heap_addr; @@ -1567,18 +1623,17 @@ CAMLprim value hh_collect(value aggressive_val) { // entry was not marked in the first pass and should be collected. Don't // move dest pointer, but advance src pointer to next heap entry. header = *(hh_header_t *)src; - aligned_size = ALIGNED(Entry_size(header) + sizeof(hh_header_t)); + aligned_size = CACHE_ALIGN(Heap_entry_total_size(header)); } else { // If the lsb is 0, this is a pointer to the addr field of the hashtable // element, which holds the header bytes. This entry is live. char *hashtbl_addr = *(char **)src; header = *(hh_header_t *)hashtbl_addr; - aligned_size = ALIGNED(Entry_size(header) + sizeof(hh_header_t)); + aligned_size = CACHE_ALIGN(Heap_entry_total_size(header)); // Fix the hashtbl addr field to point to our new location and restore the // heap header data temporarily stored in the addr field bits. - char *new_addr = dest + sizeof(hh_header_t); - *(uintptr_t *)hashtbl_addr = (uintptr_t)new_addr; + *(uintptr_t *)hashtbl_addr = (uintptr_t)dest; *(hh_header_t *)src = header; // Move the entry as far to the left as possible. @@ -1610,25 +1665,20 @@ static void raise_heap_full(void) { } /*****************************************************************************/ -/* Allocates in the shared heap. - * The chunks are cache aligned. - * The word before the chunk address contains the size of the chunk in bytes. - * The function returns a pointer to the data (the size can be accessed by - * looking at the address: chunk - sizeof(hh_header_t)). - */ +/* Allocates in the shared heap. The chunks are cache aligned. */ /*****************************************************************************/ -static char* hh_alloc(hh_header_t header) { +static heap_entry_t* hh_alloc(hh_header_t header) { // the size of this allocation needs to be kept in sync with wasted_heap_size // modification in hh_remove - size_t slot_size = ALIGNED(Entry_size(header) + sizeof(hh_header_t)); - char* chunk = __sync_fetch_and_add(heap, (char*)slot_size); + size_t slot_size = CACHE_ALIGN(Heap_entry_total_size(header)); + char *chunk = __sync_fetch_and_add(heap, (char*) slot_size); if (chunk + slot_size > heap_max) { raise_heap_full(); } memfd_reserve(chunk, slot_size); - *((hh_header_t*)chunk) = header; - return (chunk + sizeof(hh_header_t)); + ((heap_entry_t *)chunk)->header = header; + return (heap_entry_t *)chunk; } /*****************************************************************************/ @@ -1637,7 +1687,7 @@ static char* hh_alloc(hh_header_t header) { * the allocated chunk. */ /*****************************************************************************/ -static char* hh_store_ocaml( +static heap_entry_t* hh_store_ocaml( value data, /*out*/size_t *alloc_size, /*out*/size_t *orig_size @@ -1694,8 +1744,8 @@ static char* hh_store_ocaml( | uncompressed_size << 1 | 1; - char* addr = hh_alloc(header); - memcpy(addr, + heap_entry_t* addr = hh_alloc(header); + memcpy(&addr->data, uncompressed_size ? compressed_data : value, size); @@ -1718,6 +1768,10 @@ static uint64_t get_hash(value key) { return *((uint64_t*)String_val(key)); } +CAMLprim value get_hash_ocaml(value key) { + return caml_copy_int64(*((uint64_t*)String_val(key))); +} + /*****************************************************************************/ /* Writes the data in one of the slots of the hashtable. There might be * concurrent writers, when that happens, the first writer wins. @@ -1745,6 +1799,7 @@ static value write_at(unsigned int slot, value data) { hashtbl[slot].addr = hh_store_ocaml(data, &alloc_size, &orig_size); Field(result, 0) = Val_long(alloc_size); Field(result, 1) = Val_long(orig_size); + __sync_fetch_and_add(hcounter_filled, 1); } else { Field(result, 0) = Min_long; Field(result, 1) = Min_long; @@ -1915,18 +1970,15 @@ CAMLprim value hh_mem_status(value key) { } /*****************************************************************************/ -/* Deserializes the value pointed by src. */ -/* The src is an OCaml style pointer, */ -/* meaning that it points right behind the header */ +/* Deserializes the value pointed to by elt. */ /*****************************************************************************/ -CAMLprim value hh_deserialize(char *src) { +CAMLprim value hh_deserialize(heap_entry_t *elt) { CAMLparam0(); CAMLlocal1(result); - hh_header_t header = - *(hh_header_t*)(src - sizeof(hh_header_t)); - size_t size = Entry_size(header); - size_t uncompressed_size_exp = Entry_uncompressed_size(header); - char *data = src; + size_t size = Entry_size(elt->header); + size_t uncompressed_size_exp = Entry_uncompressed_size(elt->header); + char *src = elt->data; + char *data = elt->data; if (uncompressed_size_exp) { data = malloc(uncompressed_size_exp); size_t uncompressed_size = LZ4_decompress_safe( @@ -1938,14 +1990,14 @@ CAMLprim value hh_deserialize(char *src) { size = uncompressed_size; } - if (Entry_kind(header) == KIND_STRING) { + if (Entry_kind(elt->header) == KIND_STRING) { result = caml_alloc_string(size); memcpy(String_val(result), data, size); } else { result = caml_input_value_from_block(data, size); } - if (uncompressed_size_exp) { + if (data != src) { free(data); } CAMLreturn(result); @@ -1975,10 +2027,7 @@ CAMLprim value hh_get_size(value key) { unsigned int slot = find_slot(key); assert(hashtbl[slot].hash == get_hash(key)); - hh_header_t header = - *(hh_header_t*)(hashtbl[slot].addr - sizeof(hh_header_t)); - - CAMLreturn(Long_val(Entry_size(header))); + CAMLreturn(Long_val(Entry_size(hashtbl[slot].addr->header))); } /*****************************************************************************/ @@ -1997,6 +2046,8 @@ void hh_move(value key1, value key2) { assert(hashtbl[slot1].hash == get_hash(key1)); assert(hashtbl[slot2].addr == NULL); // We are taking up a previously empty slot. Let's increment the counter. + // hcounter_filled doesn't change, since slot1 becomes empty and slot2 becomes + // filled. if (hashtbl[slot2].hash == 0) { __sync_fetch_and_add(hcounter, 1); } @@ -2017,11 +2068,231 @@ void hh_remove(value key) { assert_allow_removes(); assert(hashtbl[slot].hash == get_hash(key)); // see hh_alloc for the source of this size - hh_header_t *header = (hh_header_t *)hashtbl[slot].addr - 1; - size_t slot_size = ALIGNED(Entry_size(*header) + sizeof(hh_header_t)); + size_t slot_size = + CACHE_ALIGN(Heap_entry_total_size(hashtbl[slot].addr->header)); __sync_fetch_and_add(wasted_heap_size, slot_size); hashtbl[slot].addr = NULL; removed_count += 1; + __sync_fetch_and_sub(hcounter_filled, 1); +} + +size_t deptbl_entry_count_for_slot(size_t slot) { + assert(slot < dep_size); + + size_t count = 0; + deptbl_entry_t slotval = deptbl[slot]; + + if (slotval.raw != 0 && slotval.s.key.tag == TAG_KEY) { + while (slotval.s.next.tag == TAG_NEXT) { + assert(slotval.s.next.num < dep_size); + slotval = deptbl[slotval.s.next.num]; + count++; + } + + // The final "next" in the list is always a value, not a next pointer. + count++; + } + + return count; +} + +/*****************************************************************************/ +/* Saved State as binary */ +/*****************************************************************************/ + +// TODO: MAGIC_CONSTANT +// use the same format as what's in the hashtable, but compact +// Question: is it better to deserialize into hashtbl or an OCaml Hashtable or +// into SQLite? +size_t hh_save_dep_table_blob_helper(const char* const out_filename) { + struct timeval start_t = { 0 }; + gettimeofday(&start_t, NULL); + + // Allocate space for all the values + FILE* dep_table_blob_file = fopen(out_filename, "wb+"); + + // TODO: T38685427 - write MAGIC_CONSTANT + // TODO: write the format version + size_t slot = 0; + size_t count = 0; + size_t count_of_values = 0; + size_t prev_count = 0; + tagged_uint_t *values = NULL; + size_t iter = 0; + size_t edges_added = 0; + size_t new_rows_count = 0; + for (slot = 0; slot < dep_size; ++slot) { + count_of_values = deptbl_entry_count_for_slot(slot); + // 1 key + count = count_of_values + 1; + if (count_of_values == 0) { + continue; + } + else if (count > prev_count) { + // No need to allocate new space if we can just reuse the old one + values = realloc(values, count * sizeof(uint32_t)); + prev_count = count; + } + + assert(values != NULL); + + iter = 0; + + deptbl_entry_t slotval = deptbl[slot]; + if (slotval.raw != 0 && slotval.s.key.tag == TAG_KEY) { + // This is the head of a linked list aka KEY VERTEX + values[iter] = slotval.s.key; + iter++; + + // Then combine each value to VALUE VERTEX + while (slotval.s.next.tag == TAG_NEXT) { + assert(slotval.s.next.num < dep_size); + slotval = deptbl[slotval.s.next.num]; + values[iter] = slotval.s.key; + values[iter].tag = TAG_NEXT; + iter++; + } + + // The final "next" in the list is always a value, not a next pointer. + // NOTE: the tag will be !TAG_NEXT + values[iter] = slotval.s.next; + iter++; + + new_rows_count += 1; + + fwrite(values, sizeof(uint32_t), iter, dep_table_blob_file); + } + + edges_added += iter; + } + + if (values != NULL) { + free(values); + } + + fprintf(stderr, "Wrote %lu new rows\n", new_rows_count); + fclose(dep_table_blob_file); + + log_duration("Finished writing the file", start_t); + + return edges_added; +} + +void hh_load_dep_table_blob_helper(const char* const in_filename) { + struct timeval start_t = { 0 }; + gettimeofday(&start_t, NULL); + + // Allocate space for all the values + FILE* dep_table_blob_file = fopen(in_filename, "rb"); + assert(dep_table_blob_file != NULL); + + // TODO: this is an arbitrary buffer size; do something better? + size_t buffer_size = 1000; + tagged_uint_t buffer[buffer_size]; + + // TODO: read MAGIC_CONSTANT + // TODO: read the format version + uint16_t is_key = 1; + tagged_uint_t slot; + tagged_uint_t key; + tagged_uint_t value; + size_t keys_count = 0; + size_t values_count = 0; + + // The number of bytes read from the file stream + size_t count; + + fprintf( + stderr, + "Start; dcounter: %"PRIu64" dep_size: %"PRIu64" \n", + *dcounter, + dep_size + ); + + do { + count = fread( + buffer, + sizeof(tagged_uint_t), + buffer_size, + dep_table_blob_file); + + if (count <= 0) { + assert(!ferror(dep_table_blob_file)); + } + else { + for (int i = 0; i < count; i++) { + slot = buffer[i]; + + if (is_key) { + is_key = 0; + keys_count++; + key = slot; + } + else { + value = slot; + values_count++; + + add_dep(key.num, value.num); + + if (value.tag != TAG_NEXT) { + is_key = 1; + } + } + } + } + } while (!feof(dep_table_blob_file)); + + fclose(dep_table_blob_file); + + fprintf( + stderr, + "End; dcounter: %"PRIu64" dep_size: %"PRIu64" \n", + *dcounter, + dep_size + ); + fprintf(stderr, "Read %lu keys and %lu values\n", keys_count, values_count); + + log_duration("Finished reading the file", start_t); +} + +/* + * Assumption: When we save the dependency table using this function, + * we do a fresh load, meaning that there was NO saved state loaded. + * From a loaded saved state, we call hh_update_dep_table_sqlite instead. + */ +CAMLprim value hh_save_dep_table_blob( + value out_filename, + value build_revision +) { + CAMLparam2(out_filename, build_revision); + char *out_filename_raw = String_val(out_filename); + + // TODO: use build_revision + size_t edges_added = + hh_save_dep_table_blob_helper(out_filename_raw); + CAMLreturn(Val_long(edges_added)); +} + +CAMLprim value hh_load_dep_table_blob( + value in_filename, + value ignore_hh_version +) { + CAMLparam1(in_filename); + struct timeval tv = { 0 }; + struct timeval tv2 = { 0 }; + gettimeofday(&tv, NULL); + + char *in_filename_raw = String_val(in_filename); + + // TODO: T38685889 - use ignore_hh_version + assert(ignore_hh_version); + + hh_load_dep_table_blob_helper(in_filename_raw); + + tv2 = log_duration("Loading the dependency blob file", tv); + int secs = tv2.tv_sec - tv.tv_sec; + // Reporting only seconds, ignore milli seconds + CAMLreturn(Val_long(secs)); } /*****************************************************************************/ @@ -2032,11 +2303,16 @@ void hh_remove(value key) { // not at the end of saving the state. void hh_cleanup_sqlite(void) { CAMLparam0(); + + // Reset the SQLite database file name size_t page_size = getpagesize(); memset(db_filename, 0, page_size); CAMLreturn0; } +#define ARRAY_SIZE(array) \ + (sizeof(array) / sizeof((array)[0])) + #define Val_none Val_int(0) value Val_some(value v) @@ -2053,63 +2329,58 @@ value Val_some(value v) #ifndef NO_SQLITE3 // ------------------------ START OF SQLITE3 SECTION -------------------------- -CAMLprim value hh_removed_count(value ml_unit) { - CAMLparam1(ml_unit); - UNUSED(ml_unit); - return Val_long(removed_count); -} -CAMLprim value get_file_info_on_disk( - value ml_unit +void assert_sql_with_line( + sqlite3 *db, + int result, + int correct_result, + int line_number ) { - CAMLparam1(ml_unit); - UNUSED(ml_unit); - const char *var = getenv(FILE_INFO_ON_DISK_PATH); - assert(var); - _Bool nonempty = strlen(var) > 0; - value ml_bool = Val_bool(nonempty); - CAMLreturn(ml_bool); -} + if (result == correct_result) { + return; + } -CAMLprim value set_file_info_on_disk_path( - value ml_str -) { - CAMLparam1(ml_str); - assert(Tag_val(ml_str) == String_tag); - const char *str = String_val(ml_str); - setenv(FILE_INFO_ON_DISK_PATH, str, 1); - CAMLreturn(Val_unit); -} + fprintf( + stderr, + "SQL assertion failure: Line: %d -> Expected: %d, Got: %d\n%s%s", + line_number, + correct_result, + result, + db == NULL ? "" : sqlite3_errmsg(db), + db == NULL ? "" : "\n"); + static value *exn = NULL; + if (!exn) { + exn = caml_named_value("sql_assertion_failure"); + } + caml_raise_with_arg(*exn, Val_long(result)); +} + +const char *create_tables_sql[] = { + "CREATE TABLE IF NOT EXISTS HEADER(" \ + " MAGIC_CONSTANT INTEGER PRIMARY KEY NOT NULL," \ + " BUILDINFO TEXT NOT NULL" \ + ");", + "CREATE TABLE IF NOT EXISTS DEPTABLE(" \ + " KEY_VERTEX INTEGER PRIMARY KEY NOT NULL," \ + " VALUE_VERTEX BLOB NOT NULL" \ + ");", +}; -CAMLprim value get_file_info_on_disk_path( - value ml_unit -) { - CAMLparam1(ml_unit); - const char *str = getenv(FILE_INFO_ON_DISK_PATH); - assert(str); - CAMLreturn(caml_copy_string(str)); +void make_all_tables(sqlite3 *db) { + assert(db); + for (int i = 0; i < ARRAY_SIZE(create_tables_sql); ++i) { + assert_sql( + db, + sqlite3_exec(db, create_tables_sql[i], NULL, 0, NULL), + SQLITE_OK); + } + return; } -CAMLprim value open_file_info_db( - value ml_unit -) { +CAMLprim value hh_removed_count(value ml_unit) { CAMLparam1(ml_unit); UNUSED(ml_unit); - const char *file_info_on_disk_path = getenv(FILE_INFO_ON_DISK_PATH); - assert(file_info_on_disk_path); - assert(strlen(file_info_on_disk_path) > 0); - if (g_db) { - CAMLreturn(Val_unit); - } - assert_sql( - sqlite3_open_v2( - file_info_on_disk_path, - &g_db, - SQLITE_OPEN_READONLY, - NULL - ), - SQLITE_OK); - CAMLreturn(Val_unit); + return Val_long(removed_count); } // Expects the database to be open @@ -2118,21 +2389,27 @@ static void write_sqlite_header(sqlite3 *db, const char* const buildInfo) { sqlite3_stmt *insert_stmt = NULL; const char *sql = \ "INSERT OR REPLACE INTO HEADER (MAGIC_CONSTANT, BUILDINFO) VALUES (?,?)"; - assert_sql(sqlite3_prepare_v2(db, sql, -1, &insert_stmt, NULL), SQLITE_OK); - assert_sql(sqlite3_bind_int64(insert_stmt, 1, MAGIC_CONSTANT), SQLITE_OK); - assert_sql(sqlite3_bind_text(insert_stmt, 2, - buildInfo, -1, - SQLITE_TRANSIENT), + assert_sql( + db, + sqlite3_prepare_v2(db, sql, -1, &insert_stmt, NULL), + SQLITE_OK); + assert_sql(db, sqlite3_bind_int64(insert_stmt, 1, MAGIC_CONSTANT), SQLITE_OK); + assert_sql( + db, + sqlite3_bind_text(insert_stmt, 2, buildInfo, -1, SQLITE_TRANSIENT), SQLITE_OK); - assert_sql(sqlite3_step(insert_stmt), SQLITE_DONE); - assert_sql(sqlite3_finalize(insert_stmt), SQLITE_OK); + assert_sql(db, sqlite3_step(insert_stmt), SQLITE_DONE); + assert_sql(db, sqlite3_finalize(insert_stmt), SQLITE_OK); } // Expects the database to be open static void verify_sqlite_header(sqlite3 *db, int ignore_hh_version) { sqlite3_stmt *select_stmt = NULL; const char *sql = "SELECT * FROM HEADER;"; - assert_sql(sqlite3_prepare_v2(db, sql, -1, &select_stmt, NULL), SQLITE_OK); + assert_sql( + db, + sqlite3_prepare_v2(db, sql, -1, &select_stmt, NULL), + SQLITE_OK); if (sqlite3_step(select_stmt) == SQLITE_ROW) { // Columns are 0 indexed @@ -2142,27 +2419,7 @@ static void verify_sqlite_header(sqlite3 *db, int ignore_hh_version) { BuildInfo_kRevision) == 0); } } - assert_sql(sqlite3_finalize(select_stmt), SQLITE_OK); -} - -size_t deptbl_entry_count_for_slot(size_t slot) { - assert(slot < dep_size); - - size_t count = 0; - deptbl_entry_t slotval = deptbl[slot]; - - if (slotval.raw != 0 && slotval.s.key.tag == TAG_KEY) { - while (slotval.s.next.tag == TAG_NEXT) { - assert(slotval.s.next.num < dep_size); - slotval = deptbl[slotval.s.next.num]; - count++; - } - - // The final "next" in the list is always a value, not a next pointer. - count++; - } - - return count; + assert_sql(db, sqlite3_finalize(select_stmt), SQLITE_OK); } static sqlite3 * connect_and_create_dep_table_helper( @@ -2173,7 +2430,7 @@ static sqlite3 * connect_and_create_dep_table_helper( sqlite3 *db_out = NULL; // sqlite3_open creates the db - assert_sql(sqlite3_open(out_filename, &db_out), SQLITE_OK); + assert_sql(NULL, sqlite3_open(out_filename, &db_out), SQLITE_OK); make_all_tables(db_out); return db_out; @@ -2206,32 +2463,59 @@ query_result_t get_dep_sqlite_blob_with_duration( return result; } +static void hh_swap_in_db(sqlite3 *db_out) { + if (g_get_dep_select_stmt != NULL) { + assert_sql( + db_out, + sqlite3_clear_bindings(g_get_dep_select_stmt), + SQLITE_OK); + assert_sql(db_out, sqlite3_reset(g_get_dep_select_stmt), SQLITE_OK); + assert_sql(db_out, sqlite3_finalize(g_get_dep_select_stmt), SQLITE_OK); + g_get_dep_select_stmt = NULL; + } + + if (g_db != NULL) { + sqlite3_close_v2(g_db); + g_db = NULL; + } + + g_db = db_out; + + kill_dep_used_slots(); +} + // Add all the entries in the in-memory deptable // into the connected database. This adds edges only, so the // resulting deptable may contain more edges than truly represented // in the code-base (after incremental changes), but never misses // any (modulo bugs). -static size_t hh_update_dep_table_helper( +static size_t hh_save_dep_table_helper( sqlite3* const db_out, - const char* const build_info -) { + const char* const build_info, + const size_t replace_state_after_saving, + int is_update) { struct timeval start_t = { 0 }; gettimeofday(&start_t, NULL); // Create header for verification write_sqlite_header(db_out, build_info); // Hand-off the data to the OS for writing and continue, // don't wait for it to complete - assert_sql(sqlite3_exec(db_out, "PRAGMA synchronous = OFF", NULL, 0, NULL), + assert_sql( + db_out, + sqlite3_exec(db_out, "PRAGMA synchronous = OFF", NULL, 0, NULL), SQLITE_OK); // Store the rollback journal in memory assert_sql( + db_out, sqlite3_exec(db_out, "PRAGMA journal_mode = MEMORY", NULL, 0, NULL), SQLITE_OK); // Use one transaction for all the insertions - assert_sql(sqlite3_exec(db_out, "BEGIN TRANSACTION", NULL, 0, NULL), + assert_sql( + db_out, + sqlite3_exec(db_out, "BEGIN TRANSACTION", NULL, 0, NULL), SQLITE_OK); - // Create entries on the table + // Create entries in the table size_t slot = 0; size_t count = 0; size_t prev_count = 0; @@ -2241,12 +2525,16 @@ static size_t hh_update_dep_table_helper( sqlite3_stmt *select_dep_stmt = NULL; const char * sql = "INSERT OR REPLACE INTO DEPTABLE (KEY_VERTEX, VALUE_VERTEX) VALUES (?,?)"; - assert_sql(sqlite3_prepare_v2(db_out, sql, -1, &insert_stmt, NULL), + assert_sql( + db_out, + sqlite3_prepare_v2(db_out, sql, -1, &insert_stmt, NULL), SQLITE_OK); size_t existing_rows_lookup_duration = 0L; size_t existing_rows_updated_count = 0; size_t edges_added = 0; size_t new_rows_count = 0; + query_result_t existing = { 0 }; + for (slot = 0; slot < dep_size; ++slot) { count = deptbl_entry_count_for_slot(slot); if (count == 0) { @@ -2254,12 +2542,14 @@ static size_t hh_update_dep_table_helper( } deptbl_entry_t slotval = deptbl[slot]; - query_result_t existing = - get_dep_sqlite_blob_with_duration( - db_out, - slotval.s.key.num, - &select_dep_stmt, - &existing_rows_lookup_duration); + if (is_update) { + existing = get_dep_sqlite_blob_with_duration( + db_out, + slotval.s.key.num, + &select_dep_stmt, + &existing_rows_lookup_duration); + } + // Make sure we don't have malformed output assert(existing.size % sizeof(uint32_t) == 0); size_t existing_count = existing.size / sizeof(uint32_t); @@ -2273,7 +2563,9 @@ static size_t hh_update_dep_table_helper( if (slotval.raw != 0 && slotval.s.key.tag == TAG_KEY) { // This is the head of a linked list aka KEY VERTEX - assert_sql(sqlite3_bind_int(insert_stmt, 1, slotval.s.key.num), + assert_sql( + db_out, + sqlite3_bind_int(insert_stmt, 1, slotval.s.key.num), SQLITE_OK); // Then combine each value to VALUE VERTEX @@ -2300,12 +2592,13 @@ static size_t hh_update_dep_table_helper( new_rows_count += 1; } assert_sql( + db_out, sqlite3_bind_blob(insert_stmt, 2, values, iter * sizeof(uint32_t), SQLITE_TRANSIENT), SQLITE_OK); - assert_sql(sqlite3_step(insert_stmt), SQLITE_DONE); - assert_sql(sqlite3_clear_bindings(insert_stmt), SQLITE_OK); - assert_sql(sqlite3_reset(insert_stmt), SQLITE_OK); + assert_sql(db_out, sqlite3_step(insert_stmt), SQLITE_DONE); + assert_sql(db_out, sqlite3_clear_bindings(insert_stmt), SQLITE_OK); + assert_sql(db_out, sqlite3_reset(insert_stmt), SQLITE_OK); } edges_added += iter - existing_count; } @@ -2314,23 +2607,44 @@ static size_t hh_update_dep_table_helper( free(values); } - assert_sql(sqlite3_finalize(insert_stmt), SQLITE_OK); - assert_sql(sqlite3_exec(db_out, "END TRANSACTION", NULL, 0, NULL), SQLITE_OK); + assert_sql(db_out, sqlite3_finalize(insert_stmt), SQLITE_OK); + assert_sql( + db_out, + sqlite3_exec(db_out, "END TRANSACTION", NULL, 0, NULL), + SQLITE_OK); start_t = log_duration("Finished SQL Transaction", start_t); fprintf(stderr, "Lookup of existing rows took %lu us\n", existing_rows_lookup_duration); fprintf(stderr, "Wrote %lu new rows\n", new_rows_count); fprintf(stderr, "Updated %lu existing rows\n", existing_rows_updated_count); - destroy_prepared_stmt(&select_dep_stmt); - assert_sql(sqlite3_close(db_out), SQLITE_OK); - log_duration("Finished closing SQL connection", start_t); + + if (replace_state_after_saving) { + hh_swap_in_db(db_out); + } else { + destroy_prepared_stmt(&select_dep_stmt); + assert_sql(NULL, sqlite3_close(db_out), SQLITE_OK); + log_duration("Finished closing SQL connection", start_t); + } + return edges_added; } +static void set_db_filename(const char* const out_filename) { + size_t filename_len = strlen(out_filename); + + /* Since we save the filename on the heap, and have allocated only + * getpagesize() space + */ + assert(filename_len < getpagesize()); + + memcpy(db_filename, out_filename, filename_len); + db_filename[filename_len] = '\0'; +} + static size_t hh_save_dep_table_helper_sqlite( const char* const out_filename, - const char* const build_info -) { + const char* const build_info, + const size_t replace_state_after_saving) { // This can only happen in the master assert_master(); @@ -2339,9 +2653,19 @@ static size_t hh_save_dep_table_helper_sqlite( gettimeofday(&tv, NULL); sqlite3 *db_out = connect_and_create_dep_table_helper(out_filename); - size_t edges_added = hh_update_dep_table_helper(db_out, build_info); + size_t edges_added = hh_save_dep_table_helper( + db_out, + build_info, + replace_state_after_saving, + 0); // is_update == false + + if (replace_state_after_saving) { + set_db_filename(out_filename); + } + tv2 = log_duration("Writing dependency file with sqlite", tv); UNUSED(tv2); + return edges_added; } @@ -2352,23 +2676,28 @@ static size_t hh_save_dep_table_helper_sqlite( */ CAMLprim value hh_save_dep_table_sqlite( value out_filename, - value build_revision -) { - CAMLparam2(out_filename, build_revision); + value build_revision, + value replace_state_after_saving) { + CAMLparam3(out_filename, build_revision, replace_state_after_saving); char *out_filename_raw = String_val(out_filename); char *build_revision_raw = String_val(build_revision); - size_t edges_added = - hh_save_dep_table_helper_sqlite(out_filename_raw, build_revision_raw); + size_t replace_state_after_saving_raw = Bool_val(replace_state_after_saving); + size_t edges_added = hh_save_dep_table_helper_sqlite( + out_filename_raw, + build_revision_raw, + replace_state_after_saving_raw); + CAMLreturn(Val_long(edges_added)); } CAMLprim value hh_update_dep_table_sqlite( value out_filename, - value build_revision -) { - CAMLparam2(out_filename, build_revision); + value build_revision, + value replace_state_after_saving) { + CAMLparam3(out_filename, build_revision, replace_state_after_saving); char *out_filename_raw = String_val(out_filename); char *build_revision_raw = String_val(build_revision); + size_t replace_state_after_saving_raw = Bool_val(replace_state_after_saving); sqlite3 *db_out = NULL; // This can only happen in the master @@ -2378,44 +2707,20 @@ CAMLprim value hh_update_dep_table_sqlite( struct timeval tv2 = { 0 }; gettimeofday(&tv, NULL); - assert_sql(sqlite3_open(out_filename_raw, &db_out), SQLITE_OK); - size_t edges_added = hh_update_dep_table_helper(db_out, build_revision_raw); - UNUSED(log_duration("Updated dependency file with sqlite", tv)); - CAMLreturn(Val_long(edges_added)); -} + assert_sql(NULL, sqlite3_open(out_filename_raw, &db_out), SQLITE_OK); -CAMLprim value hh_save_file_info_init( - value ml_path -) { - CAMLparam1(ml_path); - const char *path = String_val(ml_path); - hhfi_init_db(path); - make_all_tables(hhfi_get_db()); - CAMLreturn(Val_unit); -} + size_t edges_added = hh_save_dep_table_helper( + db_out, + build_revision_raw, + replace_state_after_saving_raw, + 1); // is_update == true -CAMLprim value hh_save_file_info_free( - value ml_unit -) { - CAMLparam1(ml_unit); - UNUSED(ml_unit); - hhfi_free_db(); - CAMLreturn(Val_unit); -} + if (replace_state_after_saving_raw) { + set_db_filename(out_filename_raw); + } -CAMLprim value hh_save_file_info_sqlite( - value ml_hash, - value ml_name, - value ml_kind, - value ml_filespec -) { - CAMLparam4(ml_hash, ml_name, ml_kind, ml_filespec); - assert_master(); - const char *name = String_val(ml_name); - int64_t kind = Int_val(ml_kind); - const char *filespec = String_val(ml_filespec); - hhfi_insert_row(hhfi_get_db(), get_hash(ml_hash), name, kind, filespec); - CAMLreturn(Val_unit); + UNUSED(log_duration("Updated dependency file with sqlite", tv)); + CAMLreturn(Val_long(edges_added)); } CAMLprim value hh_get_loaded_dep_table_filename() { @@ -2445,18 +2750,12 @@ CAMLprim value hh_load_dep_table_sqlite( assert_master(); const char *filename = String_val(in_filename); - size_t filename_len = strlen(filename); - - /* Since we save the filename on the heap, and have allocated only - * getpagesize() space - */ - assert(filename_len < getpagesize()); - - memcpy(db_filename, filename, filename_len); - db_filename[filename_len] = '\0'; + set_db_filename(filename); // SQLITE_OPEN_READONLY makes sure that we throw if the db doesn't exist - assert_sql(sqlite3_open_v2(db_filename, &g_db, SQLITE_OPEN_READONLY, NULL), + assert_sql( + g_db, + sqlite3_open_v2(db_filename, &g_db, SQLITE_OPEN_READONLY, NULL), SQLITE_OK); // Verify the header @@ -2474,9 +2773,9 @@ void destroy_prepared_stmt(sqlite3_stmt ** stmt) { if (*stmt == NULL) { return; } - assert_sql(sqlite3_clear_bindings(*stmt), SQLITE_OK); - assert_sql(sqlite3_reset(*stmt), SQLITE_OK); - assert_sql(sqlite3_finalize(*stmt), SQLITE_OK); + assert_sql(g_db, sqlite3_clear_bindings(*stmt), SQLITE_OK); + assert_sql(g_db, sqlite3_reset(*stmt), SQLITE_OK); + assert_sql(g_db, sqlite3_finalize(*stmt), SQLITE_OK); *stmt = NULL; } @@ -2503,15 +2802,17 @@ query_result_t get_dep_sqlite_blob( if (*select_stmt == NULL) { const char *sql = "SELECT VALUE_VERTEX FROM DEPTABLE WHERE KEY_VERTEX=?;"; - assert_sql(sqlite3_prepare_v2(db, sql, -1, select_stmt, NULL), + assert_sql( + db, + sqlite3_prepare_v2(db, sql, -1, select_stmt, NULL), SQLITE_OK); assert(*select_stmt != NULL); } else { - assert_sql(sqlite3_clear_bindings(*select_stmt), SQLITE_OK); - assert_sql(sqlite3_reset(*select_stmt), SQLITE_OK); + assert_sql(db, sqlite3_clear_bindings(*select_stmt), SQLITE_OK); + assert_sql(db, sqlite3_reset(*select_stmt), SQLITE_OK); } - assert_sql(sqlite3_bind_int(*select_stmt, 1, key), SQLITE_OK); + assert_sql(db, sqlite3_bind_int(*select_stmt, 1, key), SQLITE_OK); int err_num = sqlite3_step(*select_stmt); // err_num is SQLITE_ROW if there is a row to look at, @@ -2534,7 +2835,7 @@ query_result_t get_dep_sqlite_blob( // Remaining cases are SQLITE_BUSY, SQLITE_ERROR, or SQLITE_MISUSE. // The first should never happen since we are reading here. // Regardless, something went wrong in sqlite3_step, lets crash. - assert_sql(err_num, SQLITE_ROW); + assert_sql(db, err_num, SQLITE_ROW); } // Unreachable. assert(0); @@ -2565,7 +2866,9 @@ CAMLprim value hh_get_dep_sqlite(value ocaml_key) { // since we are not connected yet, soo.. try to connect assert_not_master(); // SQLITE_OPEN_READONLY makes sure that we throw if the db doesn't exist - assert_sql(sqlite3_open_v2(db_filename, &g_db, SQLITE_OPEN_READONLY, NULL), + assert_sql( + g_db, + sqlite3_open_v2(db_filename, &g_db, SQLITE_OPEN_READONLY, NULL), SQLITE_OK); assert(g_db != NULL); } @@ -2573,6 +2876,9 @@ CAMLprim value hh_get_dep_sqlite(value ocaml_key) { uint32_t *values = NULL; // The caller is required to pass a 32-bit node ID. const uint64_t key64 = Long_val(ocaml_key); + + sqlite3_stmt *insert_stmt = NULL; + query_result_t query_result = get_dep_sqlite_blob(g_db, key64, &g_get_dep_select_stmt); // Make sure we don't have malformed output @@ -2604,7 +2910,8 @@ CAMLprim value hh_get_loaded_dep_table_filename() { CAMLprim value hh_save_dep_table_sqlite( value out_filename, - value build_revision + value build_revision, + value replace_state_after_saving ) { CAMLparam0(); CAMLreturn(Val_long(0)); @@ -2612,17 +2919,8 @@ CAMLprim value hh_save_dep_table_sqlite( CAMLprim value hh_update_dep_table_sqlite( value out_filename, - value build_revision -) { - CAMLparam0(); - CAMLreturn(Val_long(0)); -} - -CAMLprim value hh_save_file_info_sqlite( - value out_filename, - value ml_name, - value ml_kind, - value ml_filespec + value build_revision, + value replace_state_after_saving ) { CAMLparam0(); CAMLreturn(Val_long(0)); @@ -2641,51 +2939,6 @@ CAMLprim value hh_get_dep_sqlite(value ocaml_key) { CAMLreturn(Val_int(0)); } -CAMLprim value set_file_info_on_disk(value ml_str) { - CAMLparam1(ml_str); - UNUSED(ml_str); - CAMLreturn(Val_long(0)); -} - -CAMLprim value get_file_info_on_disk(value ml_str) { - CAMLparam1(ml_str); - UNUSED(ml_str); - CAMLreturn(Val_long(0)); -} - -CAMLprim value get_file_info_on_disk_path(value ml_str) { - CAMLparam1(ml_str); - UNUSED(ml_str); - CAMLreturn(caml_copy_string("")); -} - -CAMLprim value set_file_info_on_disk_path(value ml_str) { - CAMLparam1(ml_str); - UNUSED(ml_str); - CAMLreturn(Val_unit); -} - -CAMLprim value open_file_info_db( - value ml_unit -) { - UNUSED(ml_unit); - return Val_unit; -} - -CAMLprim value hh_save_file_info_init( - value ml_path -) { - UNUSED(ml_path); - return Val_unit; -} - -CAMLprim value hh_save_file_info_free( - value ml_unit -) { - UNUSED(ml_unit); - return Val_unit; -} - CAMLprim value hh_removed_count(value ml_unit) { CAMLparam1(ml_unit); UNUSED(ml_unit); diff --git a/hack/heap/hh_shared.h b/hack/heap/hh_shared.h index 519b9a393d5..521b255de14 100644 --- a/hack/heap/hh_shared.h +++ b/hack/heap/hh_shared.h @@ -1,6 +1,20 @@ #ifndef HH_SHARED_H #define HH_SHARED_H +#ifndef NO_SQLITE3 +#include + +#define assert_sql(db, x, y) (assert_sql_with_line((db), (x), (y), __LINE__)) + +void assert_sql_with_line( + sqlite3 *db, + int result, + int correct_result, + int line_number); + +void make_all_tables(sqlite3 *db); +#endif // NO_SQLITE3 + #define CAML_NAME_SPACE #include @@ -9,20 +23,22 @@ /*****************************************************************************/ /* Initializes the shared heap. */ /* Must be called by the master BEFORE forking the workers! */ -CAMLprim value hh_shared_init( value config_val, value shm_dir_val); -/* Must be called after the program is done initializing. We keep the original - * size of the heap to estimate how often we should garbage collect. - */ -void hh_call_after_init(void); +CAMLprim value hh_shared_init( + value config_val, + value shm_dir_val, + value num_workers_val +); value hh_check_heap_overflow(void); /* Must be called by every worker before any operation is performed. */ -value hh_connect(value connector, value is_master); +value hh_connect(value connector, value worker_id_val); /*****************************************************************************/ /* Heap diagnostics. */ /*****************************************************************************/ -CAMLprim value hh_heap_size(void); +CAMLprim value hh_used_heap_size(void); +CAMLprim value hh_wasted_heap_size(void); CAMLprim value hh_log_level(void); +CAMLprim value hh_sample_rate(void); CAMLprim value hh_hash_used_slots(void); CAMLprim value hh_hash_slots(void); @@ -51,14 +67,11 @@ void hh_shared_clear(void); /*****************************************************************************/ /* Garbage collection. */ /*****************************************************************************/ -CAMLprim value hh_should_collect(value aggressive_val); -CAMLprim value hh_collect(value aggressive_val); +CAMLprim value hh_collect(void); /*****************************************************************************/ /* Deserialization. */ /*****************************************************************************/ -/* Deserializes the value pointed by src. */ -CAMLprim value hh_deserialize(char *src); /* Returns the value associated to a given key, and deserialize it. */ /* The key MUST be present. */ CAMLprim value hh_get_and_deserialize(value key); @@ -106,7 +119,13 @@ void hh_cleanup_sqlite(void); /* Dependency table. */ CAMLprim value hh_save_dep_table_sqlite( value out_filename, - value build_revision + value build_revision, + value replace_state_after_saving +); +CAMLprim value hh_update_dep_table_sqlite( + value out_filename, + value build_revision, + value replace_state_after_saving ); CAMLprim value hh_load_dep_table_sqlite( value in_filename, @@ -114,14 +133,4 @@ CAMLprim value hh_load_dep_table_sqlite( ); CAMLprim value hh_get_dep_sqlite(value ocaml_key); -/* File information. */ -CAMLprim value hh_save_file_info_init(value ml_path); -CAMLprim value hh_save_file_info_free(value ml_unit); -CAMLprim value hh_save_file_info_sqlite( - value ml_hash, - value ml_name, - value ml_kind, - value ml_filespec -); - #endif diff --git a/hack/heap/hh_shared_sqlite.c b/hack/heap/hh_shared_sqlite.c deleted file mode 100644 index d68113ec915..00000000000 --- a/hack/heap/hh_shared_sqlite.c +++ /dev/null @@ -1,163 +0,0 @@ -/** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - */ -#ifndef NO_SQLITE3 - -#include "hh_shared_sqlite.h" - -#define CAML_NAME_SPACE -#include -#include -#include - -#include - -#include -#include -#include -#include - -#include "hh_assert.h" - -#define ARRAY_SIZE(array) \ - (sizeof(array) / sizeof((array)[0])) - -#define UNUSED(x) \ - ((void)(x)) - -#define UNUSED2(a, b) \ - (UNUSED(a), UNUSED(b)) - - -const char *create_tables_sql[] = { - "CREATE TABLE IF NOT EXISTS HEADER(" \ - " MAGIC_CONSTANT INTEGER PRIMARY KEY NOT NULL," \ - " BUILDINFO TEXT NOT NULL" \ - ");", - "CREATE TABLE IF NOT EXISTS NAME_INFO(" \ - " HASH INTEGER PRIMARY KEY NOT NULL," \ - " NAME TEXT NOT NULL," \ - " NKIND INTEGER NOT NULL," \ - " FILESPEC TEXT NOT NULL" \ - ");", - "CREATE TABLE IF NOT EXISTS DEPTABLE(" \ - " KEY_VERTEX INTEGER PRIMARY KEY NOT NULL," \ - " VALUE_VERTEX BLOB NOT NULL" \ - ");", -}; - -void make_all_tables(sqlite3 *db) { - assert(db); - for (int i = 0; i < ARRAY_SIZE(create_tables_sql); ++i) { - assert_sql(sqlite3_exec(db, create_tables_sql[i], NULL, 0, NULL), - SQLITE_OK); - } - return; -} - -void assert_sql_with_line( - int result, - int correct_result, - int line_number -) { - if (result == correct_result) return; - fprintf(stderr, - "SQL assertion failure: Line: %d -> Expected: %d, Got: %d\n", - line_number, - correct_result, - result); - static value *exn = NULL; - if (!exn) exn = caml_named_value("sql_assertion_failure"); - caml_raise_with_arg(*exn, Val_long(result)); -} - -static const char *hhfi_insert_row_sql = \ - "INSERT INTO NAME_INFO (HASH, NAME, NKIND, FILESPEC) VALUES (?, ?, ?, ?);"; - -// insert a row into the name_info table -void hhfi_insert_row( - sqlite3_ptr db, - int64_t hash, - const char *name, - int64_t kind, - const char *filespec -) { - assert(db); - assert(name); - assert(filespec); - const char *sql = hhfi_insert_row_sql; - sqlite3_stmt *stmt = NULL; - assert_sql(sqlite3_prepare_v2(db, sql, -1, &stmt, NULL), SQLITE_OK); - assert_sql(sqlite3_bind_int64(stmt, 1, hash), SQLITE_OK); - assert_sql(sqlite3_bind_text(stmt, 2, name, -1, SQLITE_TRANSIENT), - SQLITE_OK); - assert_sql(sqlite3_bind_int64(stmt, 3, kind), SQLITE_OK); - assert_sql(sqlite3_bind_text(stmt, 4, filespec, -1, SQLITE_TRANSIENT), - SQLITE_OK); - assert_sql(sqlite3_step(stmt), SQLITE_DONE); - assert_sql(sqlite3_finalize(stmt), SQLITE_OK); - return; -} - -static char *copy_malloc(const char *s) { - char *d = malloc(1 + strlen(s)); - assert(d); - return strcpy(d, s); -} - -static sqlite3_ptr hhfi_db = NULL; - -static const char *hhfi_get_filespec_sql = \ - "SELECT FILESPEC FROM NAME_INFO WHERE (HASH = (?));"; - -char *hhfi_get_filespec( - sqlite3_ptr db, - int64_t hash -) { - assert(db); - const char *sql = hhfi_get_filespec_sql; - sqlite3_stmt *stmt = NULL; - assert_sql(sqlite3_prepare_v2(db, sql, -1, &stmt, NULL), SQLITE_OK); - assert_sql(sqlite3_bind_int64(stmt, 1, hash), SQLITE_OK); - int sqlerr = sqlite3_step(stmt); - char *out = NULL; - if (sqlerr == SQLITE_DONE) { - // do nothing - } else if (sqlerr == SQLITE_ROW) { - // sqlite returns const unsigned char* - out = copy_malloc((char *) sqlite3_column_text(stmt, 0)); - // make sure there are no more rows - assert_sql(sqlite3_step(stmt), SQLITE_DONE); - } else { - // unexpected sqlite status - assert(0); - } - sqlite3_finalize(stmt); - return out; -} - -void hhfi_init_db(const char *path) { - assert(hhfi_db == NULL); - assert_sql(sqlite3_open(path, &hhfi_db), SQLITE_OK); - assert_sql(sqlite3_exec(hhfi_db, "BEGIN TRANSACTION;", 0, 0, 0), SQLITE_OK); - return; -} - -void hhfi_free_db(void) { - assert(hhfi_db != NULL); - assert_sql(sqlite3_exec(hhfi_db, "END TRANSACTION;", 0, 0, 0), SQLITE_OK); - assert_sql(sqlite3_close(hhfi_db), SQLITE_OK); - return; -} - -sqlite3_ptr hhfi_get_db(void) { - assert(hhfi_db != NULL); - return hhfi_db; -} - -#endif /* NO_SQLITE3 */ diff --git a/hack/heap/hh_shared_sqlite.h b/hack/heap/hh_shared_sqlite.h deleted file mode 100644 index 2f6cdaacde0..00000000000 --- a/hack/heap/hh_shared_sqlite.h +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - */ - -#ifndef HH_SHARED_SQLITE_H -#define HH_SHARED_SQLITE_H - -#ifndef NO_SQLITE3 - -#include - -#include - -typedef sqlite3 *sqlite3_ptr; - -#define assert_sql(x, y) (assert_sql_with_line((x), (y), __LINE__)) - -void assert_sql_with_line( - int result, - int correct_result, - int line_number); - -void make_all_tables(sqlite3 *db); - -void hhfi_insert_row( - sqlite3_ptr db, - int64_t hash, - const char *name, - int64_t kind, - const char *filespec -); - -char *hhfi_get_filespec( - sqlite3_ptr db, - int64_t hash -); - -void hhfi_init_db(const char *path); -void hhfi_free_db(void); -sqlite3_ptr hhfi_get_db(void); - -#endif /* NO_SQLITE3 */ -#endif /* HH_SHARED_SQLITE_H */ diff --git a/hack/heap/ident.ml b/hack/heap/ident.ml index 71c4779d2d0..2dd5004725c 100644 --- a/hack/heap/ident.ml +++ b/hack/heap/ident.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -14,30 +14,28 @@ type t = int let compare x y = x - y let track_names = ref false + let trace = ref IMap.empty let tmp () = let res = hh_counter_next () in - if !track_names then begin - trace := IMap.add res ("__tmp"^string_of_int res) !trace ; - end; + if !track_names then + trace := IMap.add res ("__tmp" ^ string_of_int res) !trace; res let to_string x = - try IMap.find_unsafe x !trace - with Not_found -> "v"^string_of_int x + (try IMap.find_unsafe x !trace with Not_found -> "v" ^ string_of_int x) -let debug ?normalize:(f=fun x->x) x = +let debug ?normalize:(f = (fun x -> x)) x = let normalized_x = string_of_int (f x) in - try IMap.find_unsafe x !trace^"["^normalized_x^"]" - with Not_found -> "tvar_"^normalized_x + try IMap.find_unsafe x !trace ^ "[" ^ normalized_x ^ "]" + with Not_found -> "tvar_" ^ normalized_x let get_name x = - assert (!track_names); + assert !track_names; IMap.find_unsafe x !trace -let set_name x y = - trace := IMap.add x y !trace +let set_name x y = trace := IMap.add x y !trace let make x = let res = hh_counter_next () in diff --git a/hack/heap/ident.mli b/hack/heap/ident.mli index 5cf07399d8c..a1929990ef4 100644 --- a/hack/heap/ident.mli +++ b/hack/heap/ident.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * diff --git a/hack/heap/lib_dumbsqlite.c b/hack/heap/lib_dumbsqlite.c deleted file mode 100644 index 1157bfd2858..00000000000 --- a/hack/heap/lib_dumbsqlite.c +++ /dev/null @@ -1,475 +0,0 @@ -#ifdef NO_SQLITE3 -// nothing -#else -#include -#endif -#include -#include - -#define CAML_NAME_SPACE -#include -#include -#include - -#define SQLITE_DBS 100 -#define SQLITE_STMTS 1000 - -#define ERR_SLOT_TAKEN -100 -#define ERR_INDEX_OUT_OF_RANGE -101 -#define ERR_NULL_PTR -102 -#define ERR_INTERNAL -103 -#define ERR_DB_NOT_PRESENT -104 -#define ERR_STMT_INDEX_OUT_OF_RANGE -105 -#define ERR_STMT_SLOT_TAKEN -106 -#define ERR_STMT_NOT_PRESENT -107 -#define ERR_DB_ALLOC_FAILED -108 -#define ERR_PARAM_OUT_OF_RANGE -109 -#define ERR_COLUMN_OUT_OF_RANGE -110 -#define ERR_GOT_NULL_TEXT_COLUMN -111 -#define ERR_WOULD_CLOBBER -112 -#define ERR_NO_SQLITE3 -113 - -#ifdef NO_SQLITE3 -// nothing -#else -sqlite3 *dumb_sqlite_dbs[SQLITE_DBS] = { 0 }; -sqlite3_stmt *dumb_sqlite_stmts[SQLITE_STMTS] = { 0 }; -#endif - - -#ifdef NO_SQLITE3 -int dumb_sqlite_open(int index, const char *path, int readonly) -{ - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_open(int index, const char *path, int readonly) -{ - if (index < 0) - return ERR_INDEX_OUT_OF_RANGE; - if (index >= SQLITE_DBS) - return ERR_INDEX_OUT_OF_RANGE; - if (path == NULL) - return ERR_NULL_PTR; - if (dumb_sqlite_dbs[index] != NULL) - return ERR_SLOT_TAKEN; - int sqlerr = ERR_INTERNAL; - if (readonly) { - sqlerr = sqlite3_open_v2( - path, - &(dumb_sqlite_dbs[index]), - SQLITE_OPEN_READONLY, - NULL - ); - } else { - sqlerr = sqlite3_open( - path, - &(dumb_sqlite_dbs[index]) - ); - } - if (dumb_sqlite_dbs[index] == NULL) { - return ERR_DB_ALLOC_FAILED; - } - return sqlerr; -} -#endif //NO_SQLITE3 - - -CAMLprim value caml_dumb_sqlite_open( - value ml_index, value ml_path, value ml_readonly -) { - CAMLparam3(ml_index, ml_path, ml_readonly); - CAMLreturn( - Val_int( - dumb_sqlite_open( - Int_val(ml_index), - String_val(ml_path), - Bool_val(ml_readonly) - ) - ) - ); -} - - -#ifdef NO_SQLITE3 -int dumb_sqlite_close(int index) { - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_close(int index) { - if (index < 0) - return ERR_INDEX_OUT_OF_RANGE; - if (index >= SQLITE_DBS) - return ERR_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_dbs[index] == NULL) - return ERR_DB_NOT_PRESENT; - int sqlerr = sqlite3_close(dumb_sqlite_dbs[index]); - if (sqlerr == SQLITE_OK) { - dumb_sqlite_dbs[index] = NULL; - } - return sqlerr; -} -#endif //NO_SQLITE3 - - -CAMLprim value caml_dumb_sqlite_close( - value ml_index -) { - CAMLparam1(ml_index); - CAMLreturn( - Val_int( - dumb_sqlite_close( - Int_val(ml_index) - ) - ) - ); -} - -#ifdef NO_SQLITE3 -int dumb_sqlite_prepare(int index, int s_index, const char *sql) -{ - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_prepare(int index, int s_index, const char *sql) -{ - if (index < 0) - return ERR_INDEX_OUT_OF_RANGE; - if (index >= SQLITE_DBS) - return ERR_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_dbs[index] == NULL) - return ERR_DB_NOT_PRESENT; - if (s_index < 0) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (s_index >= SQLITE_DBS) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_stmts[s_index] != NULL) - return ERR_STMT_SLOT_TAKEN; - if (sql == NULL) - return ERR_NULL_PTR; - int sqlerr = sqlite3_prepare_v2( - dumb_sqlite_dbs[index], - sql, - -1, - &(dumb_sqlite_stmts[s_index]), - NULL - ); - return sqlerr; -} -#endif - - -CAMLprim value caml_dumb_sqlite_prepare( - value ml_index, value ml_s_index, value ml_sql -) { - CAMLparam3(ml_index, ml_s_index, ml_sql); - CAMLreturn( - Val_int( - dumb_sqlite_prepare( - Int_val(ml_index), - Int_val(ml_s_index), - String_val(ml_sql) - ) - ) - ); -} - -#ifdef NO_SQLITE3 -int dumb_sqlite_reset(int s_index) -{ - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_reset(int s_index) -{ - if (s_index < 0) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (s_index >= SQLITE_DBS) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_stmts[s_index] == NULL) - return ERR_STMT_NOT_PRESENT; - int sqlerr = sqlite3_reset( - dumb_sqlite_stmts[s_index] - ); - return sqlerr; -} -#endif - - -CAMLprim value caml_dumb_sqlite_reset( - value ml_s_index -) { - CAMLparam1(ml_s_index); - CAMLreturn( - Val_int( - dumb_sqlite_reset( - Int_val(ml_s_index) - ) - ) - ); -} - - -#ifdef NO_SQLITE3 -int dumb_sqlite_finalize(int s_index) -{ - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_finalize(int s_index) -{ - if (s_index < 0) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (s_index >= SQLITE_DBS) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_stmts[s_index] == NULL) - return ERR_STMT_NOT_PRESENT; - int sqlerr = sqlite3_finalize( - dumb_sqlite_stmts[s_index] - ); - if (sqlerr == SQLITE_OK) { - dumb_sqlite_stmts[s_index] = NULL; - } - return sqlerr; -} -#endif - - -CAMLprim value caml_dumb_sqlite_finalize( - value ml_s_index -) { - CAMLparam1(ml_s_index); - CAMLreturn( - Val_int( - dumb_sqlite_finalize( - Int_val(ml_s_index) - ) - ) - ); -} - - -#ifdef NO_SQLITE3 -int dumb_sqlite_step(int s_index) -{ - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_step(int s_index) -{ - if (s_index < 0) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (s_index >= SQLITE_DBS) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_stmts[s_index] == NULL) - return ERR_STMT_NOT_PRESENT; - int sqlerr = sqlite3_step( - dumb_sqlite_stmts[s_index] - ); - return sqlerr; -} -#endif - - -CAMLprim value caml_dumb_sqlite_step( - value ml_s_index -) { - CAMLparam1(ml_s_index); - CAMLreturn( - Val_int( - dumb_sqlite_step( - Int_val(ml_s_index) - ) - ) - ); -} - - -#ifdef NO_SQLITE3 -int dumb_sqlite_bind_int64(int s_index, int param, int64_t value_) -{ - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_bind_int64(int s_index, int param, int64_t value_) -{ - if (s_index < 0) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (s_index >= SQLITE_DBS) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_stmts[s_index] == NULL) - return ERR_STMT_NOT_PRESENT; - if (param < 1) - return ERR_PARAM_OUT_OF_RANGE; - int sqlerr = sqlite3_bind_int64( - dumb_sqlite_stmts[s_index], - param, - value_ - ); - return sqlerr; -} -#endif - - -CAMLprim value caml_dumb_sqlite_bind_int64( - value ml_s_index, value ml_param, value ml_value -) { - CAMLparam3(ml_s_index, ml_param, ml_value); - CAMLreturn( - Val_int( - dumb_sqlite_bind_int64( - Int64_val(ml_s_index), - Int_val(ml_param), - Long_val(ml_value) - ) - ) - ); -} - - -#ifdef NO_SQLITE3 -int dumb_sqlite_bind_text(int s_index, int param, const char *value_) -{ - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_bind_text(int s_index, int param, const char *value_) -{ - if (s_index < 0) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (s_index >= SQLITE_DBS) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_stmts[s_index] == NULL) - return ERR_STMT_NOT_PRESENT; - if (param < 1) - return ERR_PARAM_OUT_OF_RANGE; - int sqlerr = sqlite3_bind_text( - dumb_sqlite_stmts[s_index], - param, - value_, - -1, - SQLITE_TRANSIENT - ); - return sqlerr; -} -#endif - - -CAMLprim value caml_dumb_sqlite_bind_text( - value ml_s_index, value ml_param, value ml_value -) -{ - CAMLparam3(ml_s_index, ml_param, ml_value); - CAMLreturn( - Val_int( - dumb_sqlite_bind_text( - Int_val(ml_s_index), - Int_val(ml_param), - String_val(ml_value) - ) - ) - ); -} - -#ifdef NO_SQLITE3 -int dumb_sqlite_column_int64(int s_index, int column, /*out*/ int64_t *out_value) -{ - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_column_int64(int s_index, int column, /*out*/ int64_t *out_value) -{ - if (s_index < 0) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (s_index >= SQLITE_DBS) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_stmts[s_index] == NULL) - return ERR_STMT_NOT_PRESENT; - if (column < 0) - return ERR_COLUMN_OUT_OF_RANGE; - if (out_value == NULL) - return ERR_NULL_PTR; - int64_t res = sqlite3_column_int64( - dumb_sqlite_stmts[s_index], - column - ); - *out_value = res; - return SQLITE_OK; -} -#endif - - -CAMLprim value caml_dumb_sqlite_column_int64( - value ml_s_index, value ml_column -) { - CAMLparam2(ml_s_index, ml_column); - CAMLlocal3(ml_pair, ml_first, ml_second); - int64_t out = 0; - ml_first = - Val_int( - dumb_sqlite_column_int64( - Int_val(ml_s_index), - Int_val(ml_column), - &out - ) - ); - ml_second = caml_copy_int64(out); - ml_pair = caml_alloc_tuple(2); - Store_field(ml_pair, 0, ml_first); - Store_field(ml_pair, 1, ml_second); - CAMLreturn(ml_pair); -} - -#ifdef NO_SQLITE3 -int dumb_sqlite_column_text(int s_index, int column, /*out*/ char **out_value) -{ - return ERR_NO_SQLITE3; -} -#else -int dumb_sqlite_column_text(int s_index, int column, /*out*/ char **out_value) -{ - if (s_index < 0) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (s_index >= SQLITE_DBS) - return ERR_STMT_INDEX_OUT_OF_RANGE; - if (dumb_sqlite_stmts[s_index] == NULL) - return ERR_STMT_NOT_PRESENT; - if (column < 0) - return ERR_COLUMN_OUT_OF_RANGE; - if (out_value == NULL) - return ERR_NULL_PTR; - if (*out_value != NULL) - return ERR_WOULD_CLOBBER; - char *res = (char *) sqlite3_column_text( - dumb_sqlite_stmts[s_index], - column - ); - *out_value = res; - return SQLITE_OK; -} -#endif - - -CAMLprim value caml_dumb_sqlite_column_text( - value ml_s_index, value ml_column -) { - CAMLparam2(ml_s_index, ml_column); - CAMLlocal3(ml_pair, ml_first, ml_second); - char *out = NULL; - ml_first = - Val_int( - dumb_sqlite_column_text( - Int_val(ml_s_index), - Int_val(ml_column), - &out - ) - ); - if (out == NULL) { - out = ""; - } - ml_second = caml_copy_string(out); - ml_pair = caml_alloc_tuple(2); - Store_field(ml_pair, 0, ml_first); - Store_field(ml_pair, 1, ml_second); - CAMLreturn(ml_pair); -} diff --git a/hack/heap/prefix.ml b/hack/heap/prefix.ml index 71c0383d4e2..088bfe43fd0 100644 --- a/hack/heap/prefix.ml +++ b/hack/heap/prefix.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,14 +7,13 @@ * *) - (*****************************************************************************) (* The prefix is used to guarantee that we are not mixing different kind of * keys in the heap. * It just creates a new prefix every time its called. * The $ at the end of the prefix ensures that we don't have ambiguities if a key * happens to start with a digit. -*) + *) (*****************************************************************************) type t = string @@ -25,8 +24,7 @@ let make = incr prefix_count; string_of_int !prefix_count ^ "$" -let make_key prefix k = - prefix ^ k +let make_key prefix k = prefix ^ k let remove prefix k = let prefix_size = String.length prefix in diff --git a/hack/heap/prefix.mli b/hack/heap/prefix.mli index 4d650f9655e..e435d7eda45 100644 --- a/hack/heap/prefix.mli +++ b/hack/heap/prefix.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,20 +7,19 @@ * *) - (*****************************************************************************) (* The prefix is used to guarantee that we are not mixing different kind of * keys in the heap. * It just creates a new prefix every time its called. -*) + *) (*****************************************************************************) type t (* Better make the type abstract *) -val make: unit -> t +val make : unit -> t (* Given a prefix and a key make me a prefixed key *) -val make_key: t -> string -> string +val make_key : t -> string -> string (* Removes the prefix from a key *) -val remove: t -> string -> string +val remove : t -> string -> string diff --git a/hack/heap/sharedMem.ml b/hack/heap/sharedMem.ml index ccd1a2e56b4..73ca4b23c88 100644 --- a/hack/heap/sharedMem.ml +++ b/hack/heap/sharedMem.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -12,13 +12,14 @@ open Hh_core (* Don't change the ordering of this record without updating hh_shared_init in * hh_shared.c, which indexes into config objects *) type config = { - global_size : int; - heap_size : int; - dep_table_pow : int; - hash_table_pow : int; - shm_dirs : string list; - shm_min_avail : int; - log_level : int; + global_size: int; + heap_size: int; + dep_table_pow: int; + hash_table_pow: int; + shm_dirs: string list; + shm_min_avail: int; + log_level: int; + sample_rate: float; } (* Allocated in C only. *) @@ -28,40 +29,33 @@ type handle = private { h_heap_size: int; } -(* note: types are in the same kind as classes *) -let int_of_kind kind = match kind with - | `ConstantK -> 0 - | `ClassK -> 1 - | `FuncK -> 2 - -let kind_of_int x = match x with - | 0 -> `ConstantK - | 1 -> `ClassK - | 2 -> `FuncK - | _ when x < 0 -> failwith "kind_of_int: attempted to convert from negative int" - | _ -> assert (x > 0); failwith "kind_of_int: int too large, no corresponding kind" -let _kind_of_int = kind_of_int - - exception Out_of_shared_memory + exception Hash_table_full + exception Dep_table_full + exception Heap_full + exception Revision_length_is_zero + exception Sql_assertion_failure of int + exception Failed_anonymous_memfd_init + exception Less_than_minimum_available of int + exception Failed_to_use_shm_dir of string + exception C_assertion_failure of string + let () = Callback.register_exception "out_of_shared_memory" Out_of_shared_memory; Callback.register_exception "hash_table_full" Hash_table_full; Callback.register_exception "dep_table_full" Dep_table_full; Callback.register_exception "heap_full" Heap_full; Callback.register_exception "revision_length_is_zero" Revision_length_is_zero; - Callback.register_exception - "sql_assertion_failure" - (Sql_assertion_failure 0); + Callback.register_exception "sql_assertion_failure" (Sql_assertion_failure 0); Callback.register_exception "failed_anonymous_memfd_init" Failed_anonymous_memfd_init; @@ -69,112 +63,103 @@ let () = "less_than_minimum_available" (Less_than_minimum_available 0); Callback.register_exception - "c_assertion_failure" (C_assertion_failure "dummy string") + "c_assertion_failure" + (C_assertion_failure "dummy string") (*****************************************************************************) (* Initializes the shared memory. Must be called before forking. *) (*****************************************************************************) external hh_shared_init : - config:config -> shm_dir:string option -> handle = "hh_shared_init" + config:config -> shm_dir:string option -> num_workers:int -> handle + = "hh_shared_init" -let anonymous_init config = - hh_shared_init - ~config - ~shm_dir: None +let anonymous_init config ~num_workers = + hh_shared_init ~config ~shm_dir:None ~num_workers -let rec shm_dir_init config = function -| [] -> - Hh_logger.log - "We've run out of filesystems to use for shared memory"; +let rec shm_dir_init config ~num_workers = function + | [] -> + Hh_logger.log "We've run out of filesystems to use for shared memory"; raise Out_of_shared_memory -| shm_dir::shm_dirs -> + | shm_dir :: shm_dirs -> let shm_min_avail = config.shm_min_avail in - begin try + begin (* For some reason statvfs is segfaulting when the directory doesn't * exist, instead of returning -1 and an errno *) - if not (Sys.file_exists shm_dir) - then raise (Failed_to_use_shm_dir "shm_dir does not exist"); - hh_shared_init - ~config - ~shm_dir:(Some shm_dir) - with - | Less_than_minimum_available avail -> - EventLogger.(log_if_initialized (fun () -> - sharedmem_less_than_minimum_available - ~shm_dir - ~shm_min_avail - ~avail - )); - if !Utils.debug - then Hh_logger.log - "Filesystem %s only has %d bytes available, \ - which is less than the minimum %d bytes" - shm_dir - avail - config.shm_min_avail; - shm_dir_init config shm_dirs - | Unix.Unix_error (e, fn, arg) -> + try + if not (Sys.file_exists shm_dir) then + raise (Failed_to_use_shm_dir "shm_dir does not exist"); + hh_shared_init ~config ~shm_dir:(Some shm_dir) ~num_workers + with + | Less_than_minimum_available avail -> + EventLogger.( + log_if_initialized (fun () -> + sharedmem_less_than_minimum_available + ~shm_dir + ~shm_min_avail + ~avail)); + if !Utils.debug then + Hh_logger.log + "Filesystem %s only has %d bytes available, which is less than the minimum %d bytes" + shm_dir + avail + config.shm_min_avail; + shm_dir_init config ~num_workers shm_dirs + | Unix.Unix_error (e, fn, arg) -> let fn_string = - if fn = "" - then "" - else Utils.spf " thrown by %s(%s)" fn arg in + if fn = "" then + "" + else + Utils.spf " thrown by %s(%s)" fn arg + in let reason = - Utils.spf "Unix error%s: %s" fn_string (Unix.error_message e) in - EventLogger.(log_if_initialized (fun () -> - sharedmem_failed_to_use_shm_dir ~shm_dir ~reason - )); - if !Utils.debug - then Hh_logger.log - "Failed to use shm dir `%s`: %s" - shm_dir - reason; - shm_dir_init config shm_dirs - | Failed_to_use_shm_dir reason -> - EventLogger.(log_if_initialized (fun () -> - sharedmem_failed_to_use_shm_dir ~shm_dir ~reason - )); - if !Utils.debug - then Hh_logger.log - "Failed to use shm dir `%s`: %s" - shm_dir - reason; - shm_dir_init config shm_dirs + Utils.spf "Unix error%s: %s" fn_string (Unix.error_message e) + in + EventLogger.( + log_if_initialized (fun () -> + sharedmem_failed_to_use_shm_dir ~shm_dir ~reason)); + if !Utils.debug then + Hh_logger.log "Failed to use shm dir `%s`: %s" shm_dir reason; + shm_dir_init config ~num_workers shm_dirs + | Failed_to_use_shm_dir reason -> + EventLogger.( + log_if_initialized (fun () -> + sharedmem_failed_to_use_shm_dir ~shm_dir ~reason)); + if !Utils.debug then + Hh_logger.log "Failed to use shm dir `%s`: %s" shm_dir reason; + shm_dir_init config ~num_workers shm_dirs end -let init config = - try anonymous_init config +let init config ~num_workers = + try anonymous_init config ~num_workers with Failed_anonymous_memfd_init -> - EventLogger.(log_if_initialized (fun () -> - sharedmem_failed_anonymous_memfd_init () - )); - if !Utils.debug - then Hh_logger.log "Failed to use anonymous memfd init"; - shm_dir_init config config.shm_dirs + EventLogger.( + log_if_initialized (fun () -> sharedmem_failed_anonymous_memfd_init ())); + if !Utils.debug then Hh_logger.log "Failed to use anonymous memfd init"; + shm_dir_init config ~num_workers config.shm_dirs external allow_removes : bool -> unit = "hh_allow_removes" external allow_hashtable_writes_by_current_process : bool -> unit = "hh_allow_hashtable_writes_by_current_process" -external connect : handle -> is_master:bool -> unit = "hh_connect" +external connect : handle -> worker_id:int -> unit = "hh_connect" (*****************************************************************************) (* The shared memory garbage collector. It must be called every time we * free data (cf hh_shared.c for the underlying C implementation). *) (*****************************************************************************) -external hh_should_collect: bool -> bool = "hh_should_collect" [@@noalloc] - -external hh_collect: bool -> unit = "hh_collect" [@@noalloc] +external hh_collect : unit -> unit = "hh_collect" [@@noalloc] (*****************************************************************************) (* Serializes the dependency table and writes it to a file *) (*****************************************************************************) -external loaded_dep_table_filename_c: unit -> string = "hh_get_loaded_dep_table_filename" +external loaded_dep_table_filename_c : unit -> string + = "hh_get_loaded_dep_table_filename" -external get_in_memory_dep_table_entry_count: unit -> int = - "hh_get_in_memory_dep_table_entry_count" +external get_in_memory_dep_table_entry_count : unit -> int + = "hh_get_in_memory_dep_table_entry_count" let loaded_dep_table_filename () = let fn = loaded_dep_table_filename_c () in @@ -183,63 +168,82 @@ let loaded_dep_table_filename () = else Some fn -(** Returns number of dependency edges added. *) -external save_dep_table_sqlite_c: string -> string -> int = "hh_save_dep_table_sqlite" - -(** Returns number of dependency edges added. *) -external update_dep_table_sqlite_c: string -> string -> int ="hh_update_dep_table_sqlite" - -let save_dep_table_sqlite : string -> string -> int = fun fn build_revision -> - if (loaded_dep_table_filename ()) <> None then - failwith "save_dep_table_sqlite not supported when server is loaded from a saved state"; - Hh_logger.log "Dumping a saved state deptable."; - save_dep_table_sqlite_c fn build_revision - -let update_dep_table_sqlite : string -> string -> int = fun fn build_revision -> +external save_dep_table_blob_c : string -> string -> int + = "hh_save_dep_table_blob" + +(* Returns number of dependency edges added. *) +external save_dep_table_sqlite_c : string -> string -> bool -> int + = "hh_save_dep_table_sqlite" + +(* Returns number of dependency edges added. *) +external update_dep_table_sqlite_c : string -> string -> bool -> int + = "hh_update_dep_table_sqlite" + +let save_dep_table_sqlite : string -> string -> bool -> int = + fun fn build_revision replace_state_after_saving -> + if loaded_dep_table_filename () <> None then + failwith + "save_dep_table_sqlite not supported when server is loaded from a saved state; use update_dep_table_sqlite"; + Hh_logger.log "Dumping a saved state deptable into a SQLite DB."; + save_dep_table_sqlite_c fn build_revision replace_state_after_saving + +let save_dep_table_blob : string -> string -> bool -> int = + fun fn build_revision _replace_state_after_saving -> + if loaded_dep_table_filename () <> None then + failwith + "save_dep_table_blob not supported when the server is loaded from a saved state; use update_dep_table_sqlite"; + Hh_logger.log "Dumping a saved state deptable as a blob."; + + (* TODO: use replace_state_after_saving? *) + save_dep_table_blob_c fn build_revision + +let update_dep_table_sqlite : string -> string -> bool -> int = + fun fn build_revision replace_state_after_saving -> Hh_logger.log "Updating given saved state deptable."; - update_dep_table_sqlite_c fn build_revision + update_dep_table_sqlite_c fn build_revision replace_state_after_saving (*****************************************************************************) -(* Serializes the dependency table and writes it to a file *) +(* Loads the dependency table by reading from a file *) (*****************************************************************************) -external hh_save_file_info_sqlite: string -> string -> int -> string -> unit = - "hh_save_file_info_sqlite" -let save_file_info_sqlite ~hash ~name kind filespec = - hh_save_file_info_sqlite hash name (int_of_kind kind) filespec - -external hh_save_file_info_init : string -> unit = - "hh_save_file_info_init" -let save_file_info_init path = hh_save_file_info_init path -external hh_save_file_info_free : unit -> unit = - "hh_save_file_info_free" -let save_file_info_free = hh_save_file_info_free +external load_dep_table_blob_c : string -> bool -> int + = "hh_load_dep_table_blob" -(*****************************************************************************) -(* Loads the dependency table by reading from a file *) -(*****************************************************************************) +external load_dep_table_sqlite_c : string -> bool -> int + = "hh_load_dep_table_sqlite" -external load_dep_table_sqlite_c: string -> bool -> int = "hh_load_dep_table_sqlite" +let load_dep_table_blob : string -> bool -> int = + (fun fn ignore_hh_version -> load_dep_table_blob_c fn ignore_hh_version) -let load_dep_table_sqlite : string -> bool -> int = fun fn ignore_hh_version -> - load_dep_table_sqlite_c fn ignore_hh_version +let load_dep_table_sqlite : string -> bool -> int = + (fun fn ignore_hh_version -> load_dep_table_sqlite_c fn ignore_hh_version) (*****************************************************************************) (* Cleans up the artifacts generated by SQLite *) (*****************************************************************************) -external cleanup_sqlite: unit -> unit = "hh_cleanup_sqlite" +external cleanup_sqlite : unit -> unit = "hh_cleanup_sqlite" (*****************************************************************************) (* The size of the dynamically allocated shared memory section *) (*****************************************************************************) -external heap_size: unit -> int = "hh_heap_size" +external heap_size : unit -> int = "hh_used_heap_size" [@@noalloc] + +(*****************************************************************************) +(* Part of the heap not reachable from hashtable entries. *) +(*****************************************************************************) +external wasted_heap_size : unit -> int = "hh_wasted_heap_size" [@@noalloc] (*****************************************************************************) (* The logging level for shared memory statistics *) (* 0 = nothing *) (* 1 = log totals, averages, min, max bytes marshalled and unmarshalled *) (*****************************************************************************) -external hh_log_level : unit -> int = "hh_log_level" +external hh_log_level : unit -> int = "hh_log_level" [@@noalloc] + +(*****************************************************************************) +(* The sample rate for shared memory statistics *) +(*****************************************************************************) +external hh_sample_rate : unit -> float = "hh_sample_rate" (*****************************************************************************) (* The number of used slots in our hashtable *) @@ -261,6 +265,13 @@ external dep_used_slots : unit -> int = "hh_dep_used_slots" (*****************************************************************************) external dep_slots : unit -> int = "hh_dep_slots" +(*****************************************************************************) +(* Gets the hash of a string *) +(*****************************************************************************) +external get_hash : string -> int64 = "get_hash_ocaml" + +let get_hash s = get_hash (Digest.string s) + (*****************************************************************************) (* Must be called after the initialization of the hack server is over. * (cf serverInit.ml). *) @@ -268,98 +279,62 @@ external dep_slots : unit -> int = "hh_dep_slots" external hh_removed_count : unit -> int = "hh_removed_count" -external hh_init_done: unit -> unit = "hh_call_after_init" - -external hh_check_heap_overflow: unit -> bool = "hh_check_heap_overflow" - -external get_file_info_on_disk : unit -> bool = "get_file_info_on_disk" - -external get_file_info_on_disk_path : unit -> string = - "get_file_info_on_disk_path" +external hh_check_heap_overflow : unit -> bool = "hh_check_heap_overflow" -external set_file_info_on_disk_path : string -> unit = - "set_file_info_on_disk_path" - -external open_file_info_db : unit -> unit = "open_file_info_db" - -let init_done () = - hh_init_done (); - EventLogger.sharedmem_init_done (heap_size ()) +let init_done () = EventLogger.sharedmem_init_done (heap_size ()) type table_stats = { - nonempty_slots : int; - used_slots : int; - slots : int; + nonempty_slots: int; + used_slots: int; + slots: int; } let dep_stats () = let used = dep_used_slots () in - { - nonempty_slots = used; - used_slots = used; - slots = dep_slots (); - } + { nonempty_slots = used; used_slots = used; slots = dep_slots () } let hash_stats () = - let used_slots, nonempty_slots = hash_used_slots () in - { - nonempty_slots; - used_slots; - slots = hash_slots (); - } - -let should_collect (effort : [ `gentle | `aggressive ]) = - hh_should_collect (effort = `aggressive) - -let collect (effort : [ `gentle | `aggressive ]) = + let (used_slots, nonempty_slots) = hash_used_slots () in + { nonempty_slots; used_slots; slots = hash_slots () } + +let should_collect (effort : [ `gentle | `aggressive | `always_TEST ]) = + let overhead = + match effort with + | `always_TEST -> 1.0 + | `aggressive -> 1.2 + | `gentle -> 2.0 + in + let used = heap_size () in + let wasted = wasted_heap_size () in + let reachable = used - wasted in + used >= truncate (float reachable *. overhead) + +let collect (effort : [ `gentle | `aggressive | `always_TEST ]) = let old_size = heap_size () in Stats.update_max_heap_size old_size; let start_t = Unix.gettimeofday () in (* The wrapper is used to run the function in a worker instead of master. *) - hh_collect (effort = `aggressive); + if should_collect effort then hh_collect (); let new_size = heap_size () in let time_taken = Unix.gettimeofday () -. start_t in - if old_size <> new_size then begin + if old_size <> new_size then ( Hh_logger.log "Sharedmem GC: %d bytes before; %d bytes after; in %f seconds" - old_size new_size time_taken; + old_size + new_size + time_taken; EventLogger.sharedmem_gc_ran effort old_size new_size time_taken - end + ) let is_heap_overflow () = hh_check_heap_overflow () (*****************************************************************************) (* Compute size of values in the garbage-collected heap *) (*****************************************************************************) -module HeapSize = struct - - let rec traverse ((visited:ISet.t), acc) r = - if Obj.is_block r then begin - let p:int = Obj.magic r in - if ISet.mem p visited - then (visited,acc) - else begin - let visited' = ISet.add p visited in - let n = Obj.size r in - let acc' = acc + 1 + n in - if Obj.tag r < Obj.no_scan_tag - then traverse_fields (visited', acc') r n - else (visited', acc') - end - end else (visited, acc) - - and traverse_fields acc r i = - let i = i - 1 in - if i < 0 then acc - else traverse_fields (traverse acc (Obj.field r i)) r i - - (* Return size in bytes that o occupies in GC heap *) - let size r = - let (_, w) = traverse (ISet.empty, 0) r in - w * (Sys.word_size / 8) -end -let value_size = HeapSize.size +let value_size r = + let w = Obj.reachable_words r in + w * (Sys.word_size / 8) (*****************************************************************************) (* Module returning the MD5 of the key. It's because the code in C land @@ -369,7 +344,6 @@ let value_size = HeapSize.size (*****************************************************************************) module type Key = sig - (* The type of keys that OCaml-land callers try to insert *) type userkey @@ -383,28 +357,34 @@ module type Key = sig type md5 (* Creation/conversion primitives *) - val make : Prefix.t -> userkey -> t + val make : Prefix.t -> userkey -> t + val make_old : Prefix.t -> userkey -> old - val to_old : t -> old + val to_old : t -> old val new_from_old : old -> t (* Md5 primitives *) - val md5 : t -> md5 + val md5 : t -> md5 + val md5_old : old -> md5 + val string_of_md5 : md5 -> string end module KeyFunctor (UserKeyType : sig type t + val to_string : t -> string end) : Key with type userkey = UserKeyType.t = struct - type userkey = UserKeyType.t - type t = string - type old = string - type md5 = string + + type t = string + + type old = string + + type md5 = string (* The prefix we use for old keys. The prefix guarantees that we never * mix old and new data, because a key can never start with the prefix @@ -413,59 +393,77 @@ end) : Key with type userkey = UserKeyType.t = struct let old_prefix = "old_" let make prefix x = Prefix.make_key prefix (UserKeyType.to_string x) + let make_old prefix x = - old_prefix^Prefix.make_key prefix (UserKeyType.to_string x) + old_prefix ^ Prefix.make_key prefix (UserKeyType.to_string x) - let to_old x = old_prefix^x + let to_old x = old_prefix ^ x let new_from_old x = let module S = String in S.sub x (S.length old_prefix) (S.length x - S.length old_prefix) let md5 = Digest.string + let md5_old = Digest.string let string_of_md5 x = x end +module type Raw = functor (Key : Key) (Value : Value.Type) -> sig + val add : Key.md5 -> Value.t -> unit + + val mem : Key.md5 -> bool + + val get : Key.md5 -> Value.t + + val remove : Key.md5 -> unit + + val move : Key.md5 -> Key.md5 -> unit +end + (*****************************************************************************) -(* Raw interface to shared memory (cf hh_shared.c for the underlying +(* Immediate access to shared memory (cf hh_shared.c for the underlying * representation). *) (*****************************************************************************) -module Raw (Key: Key) (Value:Value.Type): sig - val add : Key.md5 -> Value.t -> unit - val mem : Key.md5 -> bool - val get : Key.md5 -> Value.t +module Immediate (Key : Key) (Value : Value.Type) : sig + val add : Key.md5 -> Value.t -> unit + + val mem : Key.md5 -> bool + + val get : Key.md5 -> Value.t + val remove : Key.md5 -> unit - val move : Key.md5 -> Key.md5 -> unit - module LocalChanges : sig - val has_local_changes : unit -> bool - val push_stack : unit -> unit - val pop_stack : unit -> unit - val revert : Key.md5 -> unit - val commit : Key.md5 -> unit - val revert_all : unit -> unit - val commit_all : unit -> unit - end + val move : Key.md5 -> Key.md5 -> unit end = struct (* Returns the number of bytes allocated in the heap, or a negative number * if no new memory was allocated *) - external hh_add : Key.md5 -> Value.t -> int * int = "hh_add" - external hh_mem : Key.md5 -> bool = "hh_mem" - external hh_mem_status : Key.md5 -> int = "hh_mem_status" - external hh_get_size : Key.md5 -> int = "hh_get_size" - external hh_get_and_deserialize: Key.md5 -> Value.t = "hh_get_and_deserialize" - external hh_remove : Key.md5 -> unit = "hh_remove" - external hh_move : Key.md5 -> Key.md5 -> unit = "hh_move" + external hh_add : Key.md5 -> Value.t -> int * int = "hh_add" + + external hh_mem : Key.md5 -> bool = "hh_mem" - let hh_mem_status x = WorkerCancel.with_worker_exit (fun () -> hh_mem_status x) + external hh_mem_status : Key.md5 -> int = "hh_mem_status" + + external hh_get_size : Key.md5 -> int = "hh_get_size" + + external hh_get_and_deserialize : Key.md5 -> Value.t + = "hh_get_and_deserialize" + + external hh_remove : Key.md5 -> unit = "hh_remove" + + external hh_move : Key.md5 -> Key.md5 -> unit = "hh_move" + + let hh_mem_status x = + WorkerCancel.with_worker_exit (fun () -> hh_mem_status x) let _ = hh_mem_status let hh_mem x = WorkerCancel.with_worker_exit (fun () -> hh_mem x) + let hh_add x y = WorkerCancel.with_worker_exit (fun () -> hh_add x y) + let hh_get_and_deserialize x = WorkerCancel.with_worker_exit (fun () -> hh_get_and_deserialize x) @@ -474,29 +472,147 @@ end = struct let original = float original in let saved = original -. compressed in let ratio = compressed /. original in - Measure.sample (Value.description - ^ " (bytes serialized into shared heap)") compressed; - Measure.sample ("ALL bytes serialized into shared heap") compressed; - Measure.sample (Value.description - ^ " (bytes saved in shared heap due to compression)") saved; - Measure.sample ("ALL bytes saved in shared heap due to compression") saved; - Measure.sample (Value.description - ^ " (shared heap compression ratio)") ratio; - Measure.sample ("ALL bytes shared heap compression ratio") ratio + Measure.sample + (Value.description ^ " (bytes serialized into shared heap)") + compressed; + Measure.sample "ALL bytes serialized into shared heap" compressed; + Measure.sample + (Value.description ^ " (bytes saved in shared heap due to compression)") + saved; + Measure.sample "ALL bytes saved in shared heap due to compression" saved; + Measure.sample + (Value.description ^ " (shared heap compression ratio)") + ratio; + Measure.sample "ALL bytes shared heap compression ratio" ratio let log_deserialize l r = let sharedheap = float l in - let localheap = float (value_size r) in - begin - Measure.sample (Value.description - ^ " (bytes deserialized from shared heap)") sharedheap; - Measure.sample ("ALL bytes deserialized from shared heap") sharedheap; - Measure.sample (Value.description - ^ " (bytes allocated for deserialized value)") localheap; - Measure.sample ("ALL bytes allocated for deserialized value") localheap + Measure.sample + (Value.description ^ " (bytes deserialized from shared heap)") + sharedheap; + Measure.sample "ALL bytes deserialized from shared heap" sharedheap; + + if hh_log_level () > 1 then ( + (* value_size is a bit expensive to call this often, so only run with log levels >= 2 *) + let localheap = float (value_size r) in + Measure.sample + (Value.description ^ " (bytes allocated for deserialized value)") + localheap; + Measure.sample "ALL bytes allocated for deserialized value" localheap + ) + + let add key value = + let (compressed_size, original_size) = hh_add key value in + if hh_log_level () > 0 && compressed_size > 0 then + log_serialize compressed_size original_size + + let mem key = hh_mem key + + let get key = + let v = hh_get_and_deserialize key in + if hh_log_level () > 0 then log_deserialize (hh_get_size key) (Obj.repr v); + v + + let remove key = hh_remove key + + let move from_key to_key = hh_move from_key to_key +end + +module ProfiledImmediate : functor (Key : Key) (Value : Value.Type) -> sig + include module type of Immediate (Key) (Value) +end = +functor + (Key : Key) + (Value : Value.Type) + -> + struct + module ProfiledValue = struct + (** Tagging a value as Raw (the 99.9999% case) only increases its marshalled + size by 1 byte, and does not change its unmarshalled memory + representation provided Value.t is a record type containing at least one + non-float member. *) + type t = + | Raw of Value.t + | Profiled of { + entry: Value.t; + write_time: float; + } + + let prefix = Value.prefix + + let description = Value.description end - (** + module Immediate = Immediate (Key) (ProfiledValue) + + let add x y = + let sample_rate = hh_sample_rate () in + let entry = + if hh_log_level () <> 0 && Random.float 1.0 < sample_rate then + ProfiledValue.Profiled + { entry = y; write_time = Unix.gettimeofday () } + else + ProfiledValue.Raw y + in + Immediate.add x entry + + let get x = + match Immediate.get x with + | ProfiledValue.Raw y -> y + | ProfiledValue.Profiled { entry; write_time } -> + EventLogger.( + log_if_initialized + @@ fun () -> + sharedmem_access_sample + ~heap_name:Value.description + ~key:(Key.string_of_md5 x) + ~write_time); + entry + + let mem = Immediate.mem + + let remove = Immediate.remove + + let move = Immediate.move + end + +(*****************************************************************************) +(* Direct access to shared memory, but with a layer of local changes that allow + * us to decide whether or not to commit specific values. + *) +(*****************************************************************************) +module WithLocalChanges : functor + (Raw : Raw) + (Key : Key) + (Value : Value.Type) + -> sig + include module type of Raw (Key) (Value) + + module LocalChanges : sig + val has_local_changes : unit -> bool + + val push_stack : unit -> unit + + val pop_stack : unit -> unit + + val revert : Key.md5 -> unit + + val commit : Key.md5 -> unit + + val revert_all : unit -> unit + + val commit_all : unit -> unit + end +end = +functor + (Raw : Raw) + (Key : Key) + (Value : Value.Type) + -> + struct + module Raw = Raw (Key) (Value) + + (** * Represents a set of local changes to the view of the shared memory heap * WITHOUT materializing to the changes in the actual heap. This allows us to * make speculative changes to the view of the world that can be reverted @@ -511,180 +627,180 @@ end = struct * Since changes are kept local to the process, this is NOT compatible with * the parallelism provided by MultiWorker.ml *) - module LocalChanges = struct - - type action = - (* The value does not exist in the current stack. When committed this - * action will invoke remove on the previous stack. + module LocalChanges = struct + type action = + (* The value does not exist in the current stack. When committed this + * action will invoke remove on the previous stack. + *) + | Remove + (* The value is added to a previously empty slot. When committed this + * action will invoke add on the previous stack. + *) + | Add of Value.t + (* The value is replacing a value already associated with a key in the + * previous stack. When committed this action will invoke remove then + * add on the previous stack. + *) + | Replace of Value.t + + type t = { + current: (Key.md5, action) Hashtbl.t; + prev: t option; + } + + let stack : t option ref = ref None + + let has_local_changes () = Option.is_some !stack + + let rec mem stack_opt key = + match stack_opt with + | None -> Raw.mem key + | Some stack -> + (try Hashtbl.find stack.current key <> Remove + with Not_found -> mem stack.prev key) + + let rec get stack_opt key = + match stack_opt with + | None -> Raw.get key + | Some stack -> + (try + match Hashtbl.find stack.current key with + | Remove -> failwith "Trying to get a non-existent value" + | Replace value + | Add value -> + value + with Not_found -> get stack.prev key) + + (* + * For remove/add it is best to think of them in terms of a state machine. + * A key can be in the following states: + * + * Remove: + * Local changeset removes a key from the previous stack + * Replace: + * Local changeset replaces value of a key in previous stack + * Add: + * Local changeset associates a value with a key. The key is not + * present in the previous stacks + * Empty: + * No local changes and key is not present in previous stack + * Filled: + * No local changes and key has an associated value in previous stack + * *Error*: + * This means an exception will occur *) - | Remove - (* The value is added to a previously empty slot. When committed this - * action will invoke add on the previous stack. + (* + * Transitions table: + * Remove -> *Error* + * Replace -> Remove + * Add -> Empty + * Empty -> *Error* + * Filled -> Remove *) - | Add of Value.t - (* The value is replacing a value already associated with a key in the - * previous stack. When committed this action will invoke remove then - * add on the previous stack. + let remove stack_opt key = + match stack_opt with + | None -> Raw.remove key + | Some stack -> + (try + match Hashtbl.find stack.current key with + | Remove -> failwith "Trying to remove a non-existent value" + | Replace _ -> Hashtbl.replace stack.current key Remove + | Add _ -> Hashtbl.remove stack.current key + with Not_found -> + if mem stack.prev key then + Hashtbl.replace stack.current key Remove + else + failwith "Trying to remove a non-existent value") + + (* + * Transitions table: + * Remove -> Replace + * Replace -> Replace + * Add -> Add + * Empty -> Add + * Filled -> Replace *) - | Replace of Value.t - - type t = { - current : (Key.md5, action) Hashtbl.t; - prev : t option; - } - - let stack: t option ref = ref None - - let has_local_changes () = Option.is_some (!stack) - - let rec mem stack_opt key = - match stack_opt with - | None -> hh_mem key - | Some stack -> - try Hashtbl.find stack.current key <> Remove - with Not_found -> mem stack.prev key - - let rec get stack_opt key = - match stack_opt with - | None -> - let v = hh_get_and_deserialize key in - if hh_log_level() > 0 - then (log_deserialize (hh_get_size key) (Obj.repr v)); - v - | Some stack -> - try match Hashtbl.find stack.current key with - | Remove -> failwith "Trying to get a non-existent value" - | Replace value - | Add value -> value - with Not_found -> - get stack.prev key + let add stack_opt key value = + match stack_opt with + | None -> Raw.add key value + | Some stack -> + (try + match Hashtbl.find stack.current key with + | Remove + | Replace _ -> + Hashtbl.replace stack.current key (Replace value) + | Add _ -> Hashtbl.replace stack.current key (Add value) + with Not_found -> + if mem stack.prev key then + Hashtbl.replace stack.current key (Replace value) + else + Hashtbl.replace stack.current key (Add value)) + + let move stack_opt from_key to_key = + match stack_opt with + | None -> Raw.move from_key to_key + | Some _stack -> + assert (mem stack_opt from_key); + assert (not @@ mem stack_opt to_key); + let value = get stack_opt from_key in + remove stack_opt from_key; + add stack_opt to_key value + + let commit_action changeset key elem = + match elem with + | Remove -> remove changeset key + | Add value -> add changeset key value + | Replace value -> + remove changeset key; + add changeset key value + + (* Public API **) + let push_stack () = + stack := Some { current = Hashtbl.create 128; prev = !stack } + + let pop_stack () = + match !stack with + | None -> + failwith "There are no active local change stacks. Nothing to pop!" + | Some { prev; _ } -> stack := prev + + let revert key = + match !stack with + | None -> () + | Some changeset -> Hashtbl.remove changeset.current key + + let commit key = + match !stack with + | None -> () + | Some changeset -> + (try + commit_action changeset.prev key + @@ Hashtbl.find changeset.current key + with Not_found -> ()) + + let revert_all () = + match !stack with + | None -> () + | Some changeset -> Hashtbl.clear changeset.current + + let commit_all () = + match !stack with + | None -> () + | Some changeset -> + Hashtbl.iter (commit_action changeset.prev) changeset.current + end - (** - * For remove/add it is best to think of them in terms of a state machine. - * A key can be in the following states: - * - * Remove: - * Local changeset removes a key from the previous stack - * Replace: - * Local changeset replaces value of a key in previous stack - * Add: - * Local changeset associates a value with a key. The key is not - * present in the previous stacks - * Empty: - * No local changes and key is not present in previous stack - * Filled: - * No local changes and key has an associated value in previous stack - * *Error*: - * This means an exception will occur - **) - (** - * Transitions table: - * Remove -> *Error* - * Replace -> Remove - * Add -> Empty - * Empty -> *Error* - * Filled -> Remove - *) - let remove stack_opt key = - match stack_opt with - | None -> hh_remove key - | Some stack -> - try match Hashtbl.find stack.current key with - | Remove -> failwith "Trying to remove a non-existent value" - | Replace _ -> Hashtbl.replace stack.current key Remove - | Add _ -> Hashtbl.remove stack.current key - with Not_found -> - if mem stack.prev key then - Hashtbl.replace stack.current key Remove - else - failwith "Trying to remove a non-existent value" + let add key value = LocalChanges.(add !stack key value) - (** - * Transitions table: - * Remove -> Replace - * Replace -> Replace - * Add -> Add - * Empty -> Add - * Filled -> Replace - *) - let add stack_opt key value = - match stack_opt with - | None -> - let compressed_size, original_size = hh_add key value in - if hh_log_level() > 0 && compressed_size > 0 - then log_serialize compressed_size original_size - | Some stack -> - try match Hashtbl.find stack.current key with - | Remove - | Replace _ -> Hashtbl.replace stack.current key (Replace value) - | Add _ -> Hashtbl.replace stack.current key (Add value) - with Not_found -> - if mem stack.prev key then - Hashtbl.replace stack.current key (Replace value) - else - Hashtbl.replace stack.current key (Add value) - - let move stack_opt from_key to_key = - match stack_opt with - | None -> hh_move from_key to_key - | Some _stack -> - assert (mem stack_opt from_key); - assert (not @@ mem stack_opt to_key); - let value = get stack_opt from_key in - remove stack_opt from_key; - add stack_opt to_key value - - let commit_action changeset key elem = - match elem with - | Remove -> remove changeset key - | Add value -> add changeset key value - | Replace value -> - remove changeset key; - add changeset key value - - (** Public API **) - let push_stack () = - stack := Some ({ current = Hashtbl.create 128; prev = !stack; }) + let mem key = LocalChanges.(mem !stack key) - let pop_stack () = - match !stack with - | None -> - failwith "There are no active local change stacks. Nothing to pop!" - | Some { prev; _ } -> stack := prev - - let revert key = - match !stack with - | None -> () - | Some changeset -> Hashtbl.remove changeset.current key - - let commit key = - match !stack with - | None -> () - | Some changeset -> - try - commit_action - changeset.prev key @@ Hashtbl.find changeset.current key - with Not_found -> () + let get key = LocalChanges.(get !stack key) - let revert_all () = - match !stack with - | None -> () - | Some changeset -> Hashtbl.clear changeset.current + let remove key = LocalChanges.(remove !stack key) - let commit_all () = - match !stack with - | None -> () - | Some changeset -> - Hashtbl.iter (commit_action changeset.prev) changeset.current + let move from_key to_key = LocalChanges.(move !stack from_key to_key) end - let add key value = LocalChanges.(add !stack key value) - let mem key = LocalChanges.(mem !stack key) - let get key = LocalChanges.(get !stack key) - let remove key = LocalChanges.(remove !stack key) - let move from_key to_key = LocalChanges.(move !stack from_key to_key) -end - (*****************************************************************************) (* Module used to access "new" values (as opposed to old ones). * There are several cases where we need to compare the old and the new @@ -697,97 +813,112 @@ end *) (*****************************************************************************) -module New : functor (Key : Key) -> functor(Value: Value.Type) -> sig - +module New : functor (Raw : Raw) (Key : Key) (Value : Value.Type) -> sig (* Adds a binding to the table, the table is left unchanged if the * key was already bound. *) - val add : Key.t -> Value.t -> unit + val add : Key.t -> Value.t -> unit + + val get : Key.t -> Value.t option - val get : Key.t -> Value.t option val find_unsafe : Key.t -> Value.t - val remove : Key.t -> unit - val mem : Key.t -> bool + + val remove : Key.t -> unit + + val mem : Key.t -> bool (* Binds the key to the old one. * If 'mykey' is bound to 'myvalue', oldifying 'mykey' makes 'mykey' * accessible to the "Old" module, in other words: "Old.mem mykey" returns * true and "New.mem mykey" returns false after oldifying. *) - val oldify : Key.t -> unit - - module Raw: module type of Raw (Key) (Value) - -end = functor (Key : Key) -> functor (Value : Value.Type) -> struct - - module Raw = Raw (Key) (Value) - - let add key value = Raw.add (Key.md5 key) value - let mem key = Raw.mem (Key.md5 key) - - let get key = - let key = Key.md5 key in - if Raw.mem key - then Some (Raw.get key) - else None + val oldify : Key.t -> unit + + module WithLocalChanges : module type of WithLocalChanges (Raw) (Key) (Value) +end = +functor + (Raw : Raw) + (Key : Key) + (Value : Value.Type) + -> + struct + module WithLocalChanges = WithLocalChanges (Raw) (Key) (Value) + + let add key value = WithLocalChanges.add (Key.md5 key) value + + let mem key = WithLocalChanges.mem (Key.md5 key) + + let get key = + let key = Key.md5 key in + if WithLocalChanges.mem key then + Some (WithLocalChanges.get key) + else + None + + let find_unsafe key = + match get key with + | None -> raise Not_found + | Some x -> x + + let remove key = + let key = Key.md5 key in + if WithLocalChanges.mem key then ( + WithLocalChanges.remove key; + assert (not (WithLocalChanges.mem key)) + ) else + () + + let oldify key = + if mem key then + let old_key = Key.to_old key in + WithLocalChanges.move (Key.md5 key) (Key.md5_old old_key) + else + () + end - let find_unsafe key = - match get key with - | None -> raise Not_found - | Some x -> x +(* Same as new, but for old values *) +module Old : functor + (Raw : Raw) + (Key : Key) + (Value : Value.Type) + (WithLocalChanges : module type of WithLocalChanges (Raw) (Key) (Value)) + -> sig + val get : Key.old -> Value.t option - let remove key = - let key = Key.md5 key in - if Raw.mem key - then begin - Raw.remove key; - assert (not (Raw.mem key)); - end - else () - - let oldify key = - if mem key - then - let old_key = Key.to_old key in - Raw.move (Key.md5 key) (Key.md5_old old_key) - else () -end + val remove : Key.old -> unit -(* Same as new, but for old values *) -module Old : functor (Key : Key) -> functor (Value : Value.Type) -> - functor (Raw : module type of Raw (Key) (Value)) -> sig + val mem : Key.old -> bool - val get : Key.old -> Value.t option - val remove : Key.old -> unit - val mem : Key.old -> bool (* Takes an old value and moves it back to a "new" one *) - val revive : Key.old -> unit + val revive : Key.old -> unit +end = +functor + (Raw : Raw) + (Key : Key) + (Value : Value.Type) + (WithLocalChanges : module type of WithLocalChanges (Raw) (Key) (Value)) + -> + struct + let get key = + let key = Key.md5_old key in + if WithLocalChanges.mem key then + Some (WithLocalChanges.get key) + else + None -end = functor (Key : Key) -> functor (Value: Value.Type) -> - functor (Raw : module type of Raw (Key) (Value)) -> struct + let mem key = WithLocalChanges.mem (Key.md5_old key) - let get key = - let key = Key.md5_old key in - if Raw.mem key - then Some (Raw.get key) - else None - - let mem key = Raw.mem (Key.md5_old key) - - let remove key = - if mem key - then Raw.remove (Key.md5_old key) - - let revive key = - if mem key - then - let new_key = Key.new_from_old key in - let new_key = Key.md5 new_key in - let old_key = Key.md5_old key in - if Raw.mem new_key - then Raw.remove new_key; - Raw.move old_key new_key -end + let remove key = if mem key then WithLocalChanges.remove (Key.md5_old key) + + let revive key = + if mem key then ( + let new_key = Key.new_from_old key in + let new_key = Key.md5 new_key in + let old_key = Key.md5_old key in + if WithLocalChanges.mem new_key then WithLocalChanges.remove new_key; + WithLocalChanges.move old_key new_key + ) + end (*****************************************************************************) (* The signatures of what we are actually going to expose to the user *) @@ -795,38 +926,61 @@ end module type NoCache = sig type key + type t + module KeySet : Set.S with type elt = key + module KeyMap : MyMap.S with type key = key - val add : key -> t -> unit - val get : key -> t option - val get_old : key -> t option - val get_old_batch : KeySet.t -> t option KeyMap.t + val add : key -> t -> unit + + val get : key -> t option + + val get_old : key -> t option + + val get_old_batch : KeySet.t -> t option KeyMap.t + val remove_old_batch : KeySet.t -> unit - val find_unsafe : key -> t - val get_batch : KeySet.t -> t option KeyMap.t - val remove_batch : KeySet.t -> unit - val string_of_key : key -> string - val mem : key -> bool - val mem_old : key -> bool - val oldify_batch : KeySet.t -> unit - val revive_batch : KeySet.t -> unit + + val find_unsafe : key -> t + + val get_batch : KeySet.t -> t option KeyMap.t + + val remove_batch : KeySet.t -> unit + + val string_of_key : key -> string + + val mem : key -> bool + + val mem_old : key -> bool + + val oldify_batch : KeySet.t -> unit + + val revive_batch : KeySet.t -> unit module LocalChanges : sig val has_local_changes : unit -> bool + val push_stack : unit -> unit + val pop_stack : unit -> unit + val revert_batch : KeySet.t -> unit + val commit_batch : KeySet.t -> unit + val revert_all : unit -> unit + val commit_all : unit -> unit end end module type WithCache = sig include NoCache - val write_through : key -> t -> unit + + val write_around : key -> t -> unit + val get_no_cache : key -> t option end @@ -836,7 +990,9 @@ end module type UserKeyType = sig type t + val to_string : t -> string + val compare : t -> t -> int end @@ -844,95 +1000,121 @@ end (* A functor returning an implementation of the S module without caching. *) (*****************************************************************************) -module NoCache (UserKeyType : UserKeyType) (Value : Value.Type) = struct - +module NoCache (Raw : Raw) (UserKeyType : UserKeyType) (Value : Value.Type) = +struct module Key = KeyFunctor (UserKeyType) - module New = New (Key) (Value) - module Old = Old (Key) (Value) (New.Raw) + module New = New (Raw) (Key) (Value) + module Old = Old (Raw) (Key) (Value) (New.WithLocalChanges) module KeySet = Set.Make (UserKeyType) module KeyMap = MyMap.Make (UserKeyType) type key = UserKeyType.t + type t = Value.t let string_of_key key = - key |> Key.make Value.prefix |> Key.md5 |> Key.string_of_md5;; + key |> Key.make Value.prefix |> Key.md5 |> Key.string_of_md5 let add x y = New.add (Key.make Value.prefix x) y + let find_unsafe x = New.find_unsafe (Key.make Value.prefix x) - let get x = - try Some (find_unsafe x) with Not_found -> None + let get x = (try Some (find_unsafe x) with Not_found -> None) let get_old x = let key = Key.make_old Value.prefix x in Old.get key let get_old_batch xs = - KeySet.fold begin fun str_key acc -> - let key = Key.make_old Value.prefix str_key in - KeyMap.add str_key (Old.get key) acc - end xs KeyMap.empty + KeySet.fold + begin + fun str_key acc -> + let key = Key.make_old Value.prefix str_key in + KeyMap.add str_key (Old.get key) acc + end + xs + KeyMap.empty let remove_batch xs = - KeySet.iter begin fun str_key -> - let key = Key.make Value.prefix str_key in - New.remove key - end xs + KeySet.iter + begin + fun str_key -> + let key = Key.make Value.prefix str_key in + New.remove key + end + xs let oldify_batch xs = - KeySet.iter begin fun str_key -> - let key = Key.make Value.prefix str_key in - if New.mem key - then - New.oldify key - else - let key = Key.make_old Value.prefix str_key in - Old.remove key - end xs + KeySet.iter + begin + fun str_key -> + let key = Key.make Value.prefix str_key in + if New.mem key then + New.oldify key + else + let key = Key.make_old Value.prefix str_key in + Old.remove key + end + xs let revive_batch xs = - KeySet.iter begin fun str_key -> - let old_key = Key.make_old Value.prefix str_key in - if Old.mem old_key - then - Old.revive old_key - else - let key = Key.make Value.prefix str_key in - New.remove key - end xs + KeySet.iter + begin + fun str_key -> + let old_key = Key.make_old Value.prefix str_key in + if Old.mem old_key then + Old.revive old_key + else + let key = Key.make Value.prefix str_key in + New.remove key + end + xs let get_batch xs = - KeySet.fold begin fun str_key acc -> - let key = Key.make Value.prefix str_key in - match New.get key with - | None -> KeyMap.add str_key None acc - | Some data -> KeyMap.add str_key (Some data) acc - end xs KeyMap.empty + KeySet.fold + begin + fun str_key acc -> + let key = Key.make Value.prefix str_key in + match New.get key with + | None -> KeyMap.add str_key None acc + | Some data -> KeyMap.add str_key (Some data) acc + end + xs + KeyMap.empty let mem x = New.mem (Key.make Value.prefix x) let mem_old x = Old.mem (Key.make_old Value.prefix x) let remove_old_batch xs = - KeySet.iter begin fun str_key -> - let key = Key.make_old Value.prefix str_key in - Old.remove key - end xs + KeySet.iter + begin + fun str_key -> + let key = Key.make_old Value.prefix str_key in + Old.remove key + end + xs module LocalChanges = struct - include New.Raw.LocalChanges + include New.WithLocalChanges.LocalChanges + let revert_batch keys = - KeySet.iter begin fun str_key -> - let key = Key.make Value.prefix str_key in - revert (Key.md5 key) - end keys + KeySet.iter + begin + fun str_key -> + let key = Key.make Value.prefix str_key in + revert (Key.md5 key) + end + keys let commit_batch keys = - KeySet.iter begin fun str_key -> - let key = Key.make Value.prefix str_key in - commit (Key.md5 key) - end keys + KeySet.iter + begin + fun str_key -> + let key = Key.make Value.prefix str_key in + commit (Key.md5 key) + end + keys end end @@ -941,13 +1123,11 @@ end (*****************************************************************************) module type ConfigType = sig - -(* The type of object we want to keep in cache *) + (* The type of object we want to keep in cache *) type value -(* The capacity of the cache *) + (* The capacity of the cache *) val capacity : int - end (*****************************************************************************) @@ -957,14 +1137,19 @@ end module type CacheType = sig type key + type value - val add: key -> value -> unit - val get: key -> value option - val remove: key -> unit - val clear: unit -> unit + val add : key -> value -> unit + + val get : key -> value option + + val remove : key -> unit + + val clear : unit -> unit val string_of_key : key -> string + val get_size : unit -> int end @@ -972,130 +1157,135 @@ end (* Cache keeping the objects the most frequently used. *) (*****************************************************************************) -module FreqCache (Key : sig type t end) (Config:ConfigType) : +module FreqCache (Key : sig + type t +end) +(Config : ConfigType) : CacheType with type key := Key.t and type value := Config.value = struct - type value = Config.value - let string_of_key _key = + let string_of_key _key = failwith "FreqCache does not support 'string_of_key'" -(* The cache itself *) - let (cache: (Key.t, int ref * value) Hashtbl.t) - = Hashtbl.create (2 * Config.capacity) + (* The cache itself *) + let (cache : (Key.t, int ref * value) Hashtbl.t) = + Hashtbl.create (2 * Config.capacity) + let size = ref 0 - let get_size () = - !size - let clear() = + let get_size () = !size + + let clear () = Hashtbl.clear cache; size := 0 -(* The collection function is called when we reach twice original - * capacity in size. When the collection is triggered, we only keep - * the most frequently used objects. - * So before collection: size = 2 * capacity - * After collection: size = capacity (with the most frequently used objects) - *) - let collect() = - if !size < 2 * Config.capacity then () else - let l = ref [] in - Hashtbl.iter begin fun key (freq, v) -> - l := (key, !freq, v) :: !l - end cache; - Hashtbl.clear cache; - l := List.sort (fun (_, x, _) (_, y, _) -> y - x) !l; - let i = ref 0 in - while !i < Config.capacity do - match !l with - | [] -> i := Config.capacity - | (k, _freq, v) :: rl -> + (* The collection function is called when we reach twice original + * capacity in size. When the collection is triggered, we only keep + * the most frequently used objects. + * So before collection: size = 2 * capacity + * After collection: size = capacity (with the most frequently used objects) + *) + let collect () = + if !size < 2 * Config.capacity then + () + else + let l = ref [] in + Hashtbl.iter + begin + fun key (freq, v) -> l := (key, !freq, v) :: !l + end + cache; + Hashtbl.clear cache; + l := List.sort (fun (_, x, _) (_, y, _) -> y - x) !l; + let i = ref 0 in + while !i < Config.capacity do + match !l with + | [] -> i := Config.capacity + | (k, _freq, v) :: rl -> Hashtbl.replace cache k (ref 0, v); l := rl; - incr i; - done; - size := Config.capacity; - () + incr i + done; + size := Config.capacity; + () let add x y = - collect(); + collect (); try - let freq, y' = Hashtbl.find cache x in + let (freq, y') = Hashtbl.find cache x in incr freq; - if y' == y - then () - else Hashtbl.replace cache x (freq, y) + if y' == y then + () + else + Hashtbl.replace cache x (freq, y) with Not_found -> incr size; - let elt = ref 0, y in + let elt = (ref 0, y) in Hashtbl.replace cache x elt; () let find x = - let freq, value = Hashtbl.find cache x in + let (freq, value) = Hashtbl.find cache x in incr freq; value - let get x = try Some (find x) with Not_found -> None + let get x = (try Some (find x) with Not_found -> None) let remove x = - if Hashtbl.mem cache x - then decr size; + if Hashtbl.mem cache x then decr size; Hashtbl.remove cache x - end (*****************************************************************************) (* An ordered cache keeps the most recently used objects *) (*****************************************************************************) -module OrderedCache (Key : sig type t end) (Config:ConfigType): +module OrderedCache (Key : sig + type t +end) +(Config : ConfigType) : CacheType with type key := Key.t and type value := Config.value = struct - let string_of_key _key = failwith "OrderedCache does not support 'string_of_key'" - let (cache: (Key.t, Config.value) Hashtbl.t) = + let (cache : (Key.t, Config.value) Hashtbl.t) = Hashtbl.create Config.capacity - let queue = Queue.create() + let queue = Queue.create () + let size = ref 0 - let get_size () = - !size - let clear() = + let get_size () = !size + + let clear () = Hashtbl.clear cache; size := 0; Queue.clear queue; () let add x y = - if !size >= Config.capacity - then begin + ( if !size >= Config.capacity then (* Remove oldest element - if it's still around. *) let elt = Queue.pop queue in - if Hashtbl.mem cache elt - then begin + if Hashtbl.mem cache elt then ( decr size; Hashtbl.remove cache elt - end; - end; + ) ); + (* Add the new element, but bump the size only if it's a new addition. *) Queue.push x queue; - if not (Hashtbl.mem cache x) - then incr size; + if not (Hashtbl.mem cache x) then incr size; Hashtbl.replace cache x y let find x = Hashtbl.find cache x - let get x = try Some (find x) with Not_found -> None + + let get x = (try Some (find x) with Not_found -> None) let remove x = try - if Hashtbl.mem cache x - then decr size; - Hashtbl.remove cache x; + if Hashtbl.mem cache x then decr size; + Hashtbl.remove cache x with Not_found -> () - end (*****************************************************************************) @@ -1105,21 +1295,24 @@ end (*****************************************************************************) let invalidate_callback_list = ref [] + let invalidate_caches () = - List.iter !invalidate_callback_list begin fun callback -> callback() end + List.iter !invalidate_callback_list (fun callback -> callback ()) module LocalCache (UserKeyType : UserKeyType) (Value : Value.Type) = struct - type key = UserKeyType.t + type value = Value.t module ConfValue = struct type value = Value.t + let capacity = 1000 end (* Young values cache *) module L1 = OrderedCache (UserKeyType) (ConfValue) + (* Frequent values cache *) module L2 = FreqCache (UserKeyType) (ConfValue) @@ -1134,11 +1327,10 @@ module LocalCache (UserKeyType : UserKeyType) (Value : Value.Type) = struct match L1.get x with | None -> (match L2.get x with - | None -> None - | Some v as result -> - L1.add x v; - result - ) + | None -> None + | Some v as result -> + L1.add x v; + result) | Some v as result -> L2.add x v; result @@ -1148,15 +1340,17 @@ module LocalCache (UserKeyType : UserKeyType) (Value : Value.Type) = struct L2.remove x let clear () = - L1.clear(); - L2.clear() + L1.clear (); + L2.clear () let () = - invalidate_callback_list := begin fun () -> - L1.clear(); - L2.clear() - end :: !invalidate_callback_list - + invalidate_callback_list := + begin + fun () -> + L1.clear (); + L2.clear () + end + :: !invalidate_callback_list end (*****************************************************************************) @@ -1165,20 +1359,19 @@ end * much time. The caches keep a deserialized version of the types. *) (*****************************************************************************) -module WithCache (UserKeyType : UserKeyType) (Value:Value.Type) = struct - - module Direct = NoCache (UserKeyType) (Value) +module WithCache (Raw : Raw) (UserKeyType : UserKeyType) (Value : Value.Type) = +struct + module Direct = NoCache (Raw) (UserKeyType) (Value) type key = Direct.key + type t = Direct.t module KeySet = Direct.KeySet module KeyMap = Direct.KeyMap - module Cache = LocalCache (UserKeyType) (Value) - let string_of_key key = - Direct.string_of_key key + let string_of_key key = Direct.string_of_key key let add x y = Direct.add x y; @@ -1186,24 +1379,35 @@ module WithCache (UserKeyType : UserKeyType) (Value:Value.Type) = struct let get_no_cache = Direct.get - let write_through x y = + let write_around x y = (* Note that we do not need to do any cache invalidation here because * Direct.add is a no-op if the key already exists. *) Direct.add x y let log_hit_rate ~hit = - Measure.sample (Value.description ^ " (cache hit rate)") (if hit then 1. else 0.); - Measure.sample ("(ALL cache hit rate)") (if hit then 1. else 0.) + Measure.sample + (Value.description ^ " (cache hit rate)") + ( if hit then + 1. + else + 0. ); + Measure.sample + "(ALL cache hit rate)" + ( if hit then + 1. + else + 0. ) let get x = match Cache.get x with | None -> - let result = (match Direct.get x with - | None -> None - | Some v as result -> - Cache.add x v; - result - ) in + let result = + match Direct.get x with + | None -> None + | Some v as result -> + Cache.add x v; + result + in if hh_log_level () > 0 then log_hit_rate ~hit:false; result | Some _ as result -> @@ -1212,7 +1416,9 @@ module WithCache (UserKeyType : UserKeyType) (Value:Value.Type) = struct (* We don't cache old objects, they are not accessed often enough. *) let get_old = Direct.get_old + let get_old_batch = Direct.get_old_batch + let mem_old = Direct.mem_old let find_unsafe x = @@ -1226,9 +1432,12 @@ module WithCache (UserKeyType : UserKeyType) (Value:Value.Type) = struct | Some _ -> true let get_batch keys = - KeySet.fold begin fun key acc -> - KeyMap.add key (get key) acc - end keys KeyMap.empty + KeySet.fold + begin + fun key acc -> KeyMap.add key (get key) acc + end + keys + KeyMap.empty let oldify_batch keys = Direct.oldify_batch keys; @@ -1243,14 +1452,14 @@ module WithCache (UserKeyType : UserKeyType) (Value:Value.Type) = struct KeySet.iter Cache.remove xs let () = - invalidate_callback_list := begin fun () -> - Cache.clear() - end :: !invalidate_callback_list + invalidate_callback_list := + begin + fun () -> Cache.clear () + end :: !invalidate_callback_list let remove_old_batch = Direct.remove_old_batch module LocalChanges = struct - let push_stack () = Direct.LocalChanges.push_stack (); Cache.clear () @@ -1275,7 +1484,6 @@ module WithCache (UserKeyType : UserKeyType) (Value:Value.Type) = struct Direct.LocalChanges.commit_all (); Cache.clear () - let has_local_changes () = - Direct.LocalChanges.has_local_changes () + let has_local_changes () = Direct.LocalChanges.has_local_changes () end end diff --git a/hack/heap/test/test_hh_fileinfo.cpp b/hack/heap/test/test_hh_fileinfo.cpp deleted file mode 100644 index 397611f10a9..00000000000 --- a/hack/heap/test/test_hh_fileinfo.cpp +++ /dev/null @@ -1,22 +0,0 @@ -#include -#include -#include "../hh_fileinfo.h" - -#define CREATE_SQL \ - "CREATE TABLE IF NOT EXISTS NAME_INFO(" \ - " HASH INTEGER PRIMARY KEY NOT NULL," \ - " NAME TEXT NOT NULL," \ - " NKIND INTEGER NOT NULL," \ - " FILESPEC TEXT NOT NULL" \ - ");" - -TEST(HhFileInfoTest, RoundTrip) { - sqlite3 *db = nullptr; - ASSERT_EQ(sqlite3_open(":memory:", &db), SQLITE_OK); - ASSERT_EQ(sqlite3_exec(db, CREATE_SQL, nullptr, nullptr, nullptr), SQLITE_OK); - hhfi_insert_row(db, 1, "name", 2, "filespec"); - auto res = hhfi_get_filespec(db, 1); - ASSERT_STREQ(res, "filespec"); - free(res); - ASSERT_EQ(sqlite3_close(db), SQLITE_OK); -} diff --git a/hack/heap/value.ml b/hack/heap/value.ml index a5131e0d08d..72b356a6b8d 100644 --- a/hack/heap/value.ml +++ b/hack/heap/value.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,19 +7,19 @@ * *) - (*****************************************************************************) (* Very simple module used to make sure we don't mix keys of different * type in the heap (cf shared.ml). * Because we have to "create" a new module every time, we have to make a new * prefix (cf prefix.ml). Since the prefixes are always different (for each * call to make), we are sure that they are not colliding. -*) + *) (*****************************************************************************) module type Type = sig type t - val prefix: Prefix.t - val description: string - val use_sqlite_fallback : unit -> bool + + val prefix : Prefix.t + + val description : string end diff --git a/hack/heap/value.mli b/hack/heap/value.mli index a5131e0d08d..72b356a6b8d 100644 --- a/hack/heap/value.mli +++ b/hack/heap/value.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,19 +7,19 @@ * *) - (*****************************************************************************) (* Very simple module used to make sure we don't mix keys of different * type in the heap (cf shared.ml). * Because we have to "create" a new module every time, we have to make a new * prefix (cf prefix.ml). Since the prefixes are always different (for each * call to make), we are sure that they are not colliding. -*) + *) (*****************************************************************************) module type Type = sig type t - val prefix: Prefix.t - val description: string - val use_sqlite_fallback : unit -> bool + + val prefix : Prefix.t + + val description : string end diff --git a/hack/heap/workerCancel.ml b/hack/heap/workerCancel.ml index cce486ba70b..6d3742ee4db 100644 --- a/hack/heap/workerCancel.ml +++ b/hack/heap/workerCancel.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,27 +8,34 @@ *) exception Worker_should_exit + let () = Callback.register_exception "worker_should_exit" Worker_should_exit external stop_workers : unit -> unit = "hh_stop_workers" + external resume_workers : unit -> unit = "hh_resume_workers" + external check_should_exit : unit -> unit = "hh_check_should_exit" + external set_can_worker_stop : bool -> unit = "hh_set_can_worker_stop" let on_worker_cancelled = ref (fun () -> ()) + let set_on_worker_cancelled f = on_worker_cancelled := f let with_no_cancellations f = Utils.try_finally - ~f:begin fun () -> - set_can_worker_stop false; - f () - end - ~finally:(fun () -> set_can_worker_stop true) + ~f: + begin + fun () -> + set_can_worker_stop false; + f () + end + ~finally:(fun () -> set_can_worker_stop true) let with_worker_exit f = - try f () with - | Worker_should_exit -> + try f () + with Worker_should_exit -> !on_worker_cancelled (); exit 0 diff --git a/hack/heap/workerCancel.mli b/hack/heap/workerCancel.mli index 25adf7649db..d65229403c1 100644 --- a/hack/heap/workerCancel.mli +++ b/hack/heap/workerCancel.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,10 +7,14 @@ * *) -val stop_workers: unit -> unit -val resume_workers: unit -> unit -val check_should_exit: unit -> unit +val stop_workers : unit -> unit -val set_on_worker_cancelled: (unit -> unit) -> unit -val with_no_cancellations: (unit -> 'a) -> 'a -val with_worker_exit: (unit -> 'a) -> 'a +val resume_workers : unit -> unit + +val check_should_exit : unit -> unit + +val set_on_worker_cancelled : (unit -> unit) -> unit + +val with_no_cancellations : (unit -> 'a) -> 'a + +val with_worker_exit : (unit -> 'a) -> 'a diff --git a/hack/hhi/.merlin b/hack/hhi/.merlin deleted file mode 100644 index f7232e1629c..00000000000 --- a/hack/hhi/.merlin +++ /dev/null @@ -1,2 +0,0 @@ -REC -FLG -ppx "../../../../buck-out/bin/hphp/hack/src/ppx/ppx_gen_hhi/ppx_gen_hhi ~/fbsource/fbcode/buck-out/gen/hphp/hack/hhi/hhi_lib/hhi_lib__srcs" diff --git a/hack/hhi/hhi.ml b/hack/hhi/hhi.ml deleted file mode 100644 index fd268d38ca0..00000000000 --- a/hack/hhi/hhi.ml +++ /dev/null @@ -1,59 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -(* OCaml handles the value restriction much better than SML. <3 *) -let root = ref None - -(* Compiler embeds the hhi contents directly into the source *) -let hhi_contents = [%hhi_contents] - -let get_raw_hhi_contents () = hhi_contents - -let write_hhi dir (filename, contents) = - let file = Path.(concat dir filename |> to_string) in - (* Make sure the subdirectory exists; this structure is nested *) - Sys_utils.mkdir_p (Filename.dirname file); - Sys_utils.write_file ~file contents - -let extract_hhis () = - let tmpdir = Path.make (Tmp.temp_dir GlobalConfig.tmp_dir "hhi") in - Array.iter (write_hhi tmpdir) hhi_contents; - tmpdir - -(* Touch functionality for all hhis below root *) -let touch_root r = - let filter file = Filename.check_suffix file ".hhi" in - Find.iter_files ~filter [ r ] (Sys_utils.try_touch ~follow_symlinks:true) - -let touch () = - match !root with - | Some r -> touch_root r - | _ -> () - -(* Entry points to actually extract the files and set up the hhi path. - * - * We want this to be idempotent so that later code can check if a given file - * came from the hhi unarchive directory or not, to provide better error - * messages. *) -let get_hhi_root () = - match !root with - | Some r -> r - | None -> begin - let r = extract_hhis () in - root := Some r; - Relative_path.set_path_prefix Relative_path.Hhi r; - r - end - -let set_hhi_root_for_unit_test dir = - (* no need to call realpath() on this; we never extract the hhi files for our - * unit tests, so this is just a dummy value and does not need to be a real - * path*) - root := Some dir; - Relative_path.set_path_prefix Relative_path.Hhi dir diff --git a/hack/hhi/hhi.mli b/hack/hhi/hhi.mli deleted file mode 100644 index b4d0207c387..00000000000 --- a/hack/hhi/hhi.mli +++ /dev/null @@ -1,16 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -val get_hhi_root : unit -> Path.t - -val set_hhi_root_for_unit_test : Path.t -> unit - -val get_raw_hhi_contents : unit -> (string * string) array - -val touch : unit -> unit diff --git a/hack/injection/default_injector/dune b/hack/injection/default_injector/dune new file mode 100644 index 00000000000..60fb7c864e9 --- /dev/null +++ b/hack/injection/default_injector/dune @@ -0,0 +1,3 @@ +(library + (name default_injector_config) + (implements injector_config)) diff --git a/hack/injection/default_injector/injector_config.ml b/hack/injection/default_injector/injector_config.ml index c67d4493add..5cbfe207976 100644 --- a/hack/injection/default_injector/injector_config.ml +++ b/hack/injection/default_injector/injector_config.ml @@ -1,2 +1 @@ -let use_error_tracing = false let use_test_stubbing = false diff --git a/hack/injection/default_injector/injector_config.mli b/hack/injection/default_injector/injector_config.mli deleted file mode 100644 index f5d93bdfdf9..00000000000 --- a/hack/injection/default_injector/injector_config.mli +++ /dev/null @@ -1,18 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -(** This file provides only the interface, so injector configuration - * can be retreived without depending on the *actual* implementation - * file. This is because we want libraries to be able to refer to the config, - * but the actual injector to be chosen by the binary being built. - * - * Note: Buck doesn't currently have a build rule to only build .mli files - * into .cmi, so you need to compile against this file directly. *) -val use_error_tracing: bool -val use_test_stubbing: bool diff --git a/hack/injection/dune b/hack/injection/dune new file mode 100644 index 00000000000..48b807f0473 --- /dev/null +++ b/hack/injection/dune @@ -0,0 +1,4 @@ +(library + (name injector_config) + (wrapped false) + (virtual_modules injector_config)) diff --git a/hack/injection/injector_config.mli b/hack/injection/injector_config.mli index f5d93bdfdf9..a3240e791f9 100644 --- a/hack/injection/injector_config.mli +++ b/hack/injection/injector_config.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,12 +7,11 @@ * *) -(** This file provides only the interface, so injector configuration +(* This file provides only the interface, so injector configuration * can be retreived without depending on the *actual* implementation * file. This is because we want libraries to be able to refer to the config, * but the actual injector to be chosen by the binary being built. * * Note: Buck doesn't currently have a build rule to only build .mli files * into .cmi, so you need to compile against this file directly. *) -val use_error_tracing: bool -val use_test_stubbing: bool +val use_test_stubbing : bool diff --git a/hack/libancillary/dune b/hack/libancillary/dune new file mode 100644 index 00000000000..8e010958072 --- /dev/null +++ b/hack/libancillary/dune @@ -0,0 +1,8 @@ +(library + (name libancillary) + (wrapped false) + (c_names + libancillary-stubs) + (libraries + libancillary_c) + (c_flags (:standard -I%{env:CMAKE_SOURCE_DIR=xxx}))) diff --git a/hack/libancillary/libancillary.ml b/hack/libancillary/libancillary.ml index a0ea468ac0d..5a16448bad5 100644 --- a/hack/libancillary/libancillary.ml +++ b/hack/libancillary/libancillary.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,24 +7,23 @@ * *) -let int_to_fd (i: int) : Unix.file_descr = Obj.magic i +let int_to_fd (i : int) : Unix.file_descr = Obj.magic i exception Receiving_Fd_Exception external ancil_send_fd : - Unix.file_descr (** The fd of the socket to send the payload over *) -> - Unix.file_descr (** The file descriptor you want to send *) -> - int (** Returns 0 for success, -1 on failure. *) = "stub_ancil_send_fd" + Unix.file_descr (* The fd of the socket to send the payload over *) -> + Unix.file_descr (* The file descriptor you want to send *) -> + int (* Returns 0 for success, -1 on failure. *) = "stub_ancil_send_fd" external ancil_recv_fd_ : - Unix.file_descr (** The fd of the socket to receive the payload over *) -> - int (** The fd received *) = "stub_ancil_recv_fd" + Unix.file_descr (* The fd of the socket to receive the payload over *) -> + int (* The fd received *) = "stub_ancil_recv_fd" (** Receives a file descriptor from socket_fd. Throws exception on error. *) let ancil_recv_fd socket_fd = let result = ancil_recv_fd_ socket_fd in - if result = -1 - then - raise Receiving_Fd_Exception + if result = -1 then + raise Receiving_Fd_Exception else int_to_fd result diff --git a/hack/libancillary/libancillary.mli b/hack/libancillary/libancillary.mli index 508e6f92ced..6748e310b13 100644 --- a/hack/libancillary/libancillary.mli +++ b/hack/libancillary/libancillary.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -9,11 +9,13 @@ exception Receiving_Fd_Exception -val ancil_send_fd: +val ancil_send_fd : Unix.file_descr (** The fd of the socket to send the payload over *) -> Unix.file_descr (** The file descriptor you want to send *) -> - int (** Returns 0 for success, -1 on failure. *) + int +(** Returns 0 for success, -1 on failure. *) -val ancil_recv_fd: +val ancil_recv_fd : Unix.file_descr (** The fd of the socket to receive the payload over *) -> - Unix.file_descr (** The fd received *) + Unix.file_descr +(** The fd received *) diff --git a/hack/monitor/informant_sig.ml b/hack/monitor/informant_sig.ml deleted file mode 100644 index 60ad17eec4f..00000000000 --- a/hack/monitor/informant_sig.ml +++ /dev/null @@ -1,48 +0,0 @@ -(** - * Copyright (c) 2016, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - - -type report = - (** Nothing to see here. *) - | Move_along - (** Kill the server (if one is running) and start a new one. *) - | Restart_server of ServerMonitorUtils.target_mini_state option - -type server_state = - | Server_not_yet_started - | Server_alive - | Server_dead - -(** The informant collects information to tell the monitor when to - * intelligently kill and restart the server daemon. - * - * For example: An informant may want to watch the repo state and tell - * the monitor to restart the server when a significant change in - * repo revision occurs since a fresh initialization could be faster - * than an incremental type check. *) -module type S = sig - type t - type init_env - val init : init_env -> t - (* Same as init, except it preserves internal Revision_map cache. - * This is used when server decides to restart itself due to rebase - we don't - * want Informant to then restart the server again. Reinitializing will discard - * the pending queue of state changes, and issue new query for base revision, - * in order to "synchronize" base revision understanding between server and - * monitor. *) - val reinit : t -> unit - val report : t -> server_state -> report - (** - * Returns true if the informant is actually running and will - * manage server lifetime. - *) - val is_managing : t -> bool - val should_start_first_server : t -> bool - val should_ignore_hh_version : init_env -> bool -end diff --git a/hack/monitor/monitorConnection.ml b/hack/monitor/monitorConnection.ml deleted file mode 100644 index 74a9f816177..00000000000 --- a/hack/monitor/monitorConnection.ml +++ /dev/null @@ -1,293 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -open ServerMonitorUtils - -let server_exists lock_file = not (Lock.check lock_file) - -let from_channel_without_buffering tic = - Marshal_tools.from_fd_with_preamble (Timeout.descr_of_in_channel tic) - -let wait_on_server_restart ic = - try - while true do - let _ = Timeout.input_char ic in - () - done - with - | End_of_file - | Sys_error _ -> - (* Server has exited and hung up on us *) - () - -let send_version oc = - Marshal_tools.to_fd_with_preamble (Unix.descr_of_out_channel oc) - Build_id.build_revision |> ignore; - (** For backwards-compatibility, newline has always followed the version *) - let _ = Unix.write (Unix.descr_of_out_channel oc) "\n" 0 1 in - () - -let send_server_handoff_rpc handoff_options oc = - Marshal_tools.to_fd_with_preamble (Unix.descr_of_out_channel oc) - (MonitorRpc.HANDOFF_TO_SERVER handoff_options) - |> ignore - -let send_shutdown_rpc oc = - Marshal_tools.to_fd_with_preamble (Unix.descr_of_out_channel oc) - MonitorRpc.SHUT_DOWN - |> ignore - -let establish_connection ~timeout config = - let sock_name = Socket.get_path config.socket_file in - let sockaddr = - if Sys.win32 then - let ic = open_in_bin sock_name in - let port = input_binary_int ic in - close_in ic; - Unix.(ADDR_INET (inet_addr_loopback, port)) - else - Unix.ADDR_UNIX sock_name in - try Ok (Timeout.open_connection ~timeout sockaddr) with - | Unix.Unix_error (Unix.ECONNREFUSED, _, _) - | Unix.Unix_error (Unix.ENOENT, _, _) -> - if not (server_exists config.lock_file) then Error Server_missing - else Error Monitor_socket_not_ready - -let get_cstate config (ic, oc) = - try - send_version oc; - let cstate : connection_state = from_channel_without_buffering ic in - Ok (ic, oc, cstate) - with _ -> - Timeout.shutdown_connection ic; - Timeout.close_in_noerr ic; - if not (server_exists config.lock_file) then Error Server_missing - else Error Monitor_connection_failure - -let verify_cstate ic cstate = - match cstate with - | Connection_ok -> Ok () - | Build_id_mismatch_ex mismatch_info -> - (* The server is out of date and is going to exit. Subsequent calls - * to connect on the Unix Domain Socket might succeed, connecting to - * the server that is about to die, and eventually we will be hung - * up on while trying to read from our end. - * - * To avoid that fate, when we know the server is about to exit, we - * wait for the connection to be closed, signaling that the server - * has exited and the OS has cleaned up after it, then we try again. - * - * See also: ServerMonitor.client_out_of_date - *) - wait_on_server_restart ic; - Timeout.close_in_noerr ic; - Error (Build_id_mismatched (Some mismatch_info)) - | Build_id_mismatch -> - (* The server no longer ever sends this message, as of July 2017 *) - failwith "Ancient version of server sent old Build_id_mismatch" - -(** Consume sequence of Prehandoff messages. *) -let rec consume_prehandoff_messages ic oc = - let module PH = Prehandoff in - let m: PH.msg = from_channel_without_buffering ic in - match m with - | PH.Sentinel -> Ok (ic, oc) - | PH.Server_dormant_connections_limit_reached -> - Printf.eprintf @@ "Connections limit on dormant server reached."^^ - " Be patient waiting for a server to be started."; - Error Server_dormant - | PH.Server_not_alive_dormant _ -> - Printf.eprintf "Waiting for a server to be started...%s\n%!" - ClientMessages.waiting_for_server_to_be_started_doc; - consume_prehandoff_messages ic oc - | PH.Server_died_config_change -> - Printf.eprintf ("Last server exited due to config change. Please re-run client" ^^ - " to force discovery of the correct version of the client."); - Error Server_died - | PH.Server_died {PH.status; PH.was_oom} -> - (match was_oom, status with - | true, _ -> - Printf.eprintf "Last server killed by OOM Manager.\n%!"; - | false, Unix.WEXITED exit_code -> - Printf.eprintf "Last server exited with code: %d.\n%!" exit_code - | false, Unix.WSIGNALED signal -> - Printf.eprintf "Last server killed by signal: %d.\n%!" signal - | false, Unix.WSTOPPED signal -> - Printf.eprintf "Last server stopped by signal: %d.\n%!" signal); - (** Monitor will exit now that it has provided a client with a reason - * for the last server dying. Wait for the Monitor to exit. *) - wait_on_server_restart ic; - Error Server_died - -let connect_to_monitor ~timeout config = - let open Core_result in - Timeout.with_timeout - ~timeout - ~on_timeout:(fun _ -> - (** - * Monitor should always readily accept connections. In theory, this will - * only timeout if the Monitor is being very heavily DDOS'd, or the Monitor - * has wedged itself (a bug). - * - * The DDOS occurs when the Monitor's new connections (arriving on - * the socket) queue grows faster than they are being processed. This can - * happen in two scenarios: - * 1) Malicious DDOSer fills up new connection queue (incoming - * connections on the socket) quicker than the queue is being - * consumed. - * 2) New client connections to the monitor are being created by the - * retry logic in hh_client faster than those cancelled connections - * (cancelled due to the timeout above) are being discarded by the - * monitor. This could happen from thousands of hh_clients being - * used to parallelize a job. This is effectively an inadvertent DDOS. - * In detail, suppose the timeout above is set to 1 ssecond and that - * 1000 thousand hh_client have timed out at the line above. Then these - * 1000 clients will cancel the connection and retry. But the Monitor's - * connection queue still has these dead/canceled connections waiting - * to be processed. Suppose it takes the monitor longer than 1 - * millisecond to handle and discard a dead connection. Then the - * 1000 retrying hh_clients will again add another 1000 dead - * connections during retrying even tho the monitor has discarded - * fewer than 1000 dead connections. Thus, no progress will be made - * on clearing out dead connections and all new connection attempts - * will time out. - * - * We ameliorate this by having the timeout be quite large - * (many seconds) and by not auto-retrying connections to the Monitor. - * *) - HackEventLogger.client_connect_to_monitor_timeout (); - if not (server_exists config.lock_file) then Error Server_missing - else Error ServerMonitorUtils.Monitor_establish_connection_timeout - ) - ~do_:begin fun timeout -> - establish_connection ~timeout config >>= fun (ic, oc) -> - get_cstate config (ic, oc) - end - -let connect_and_shut_down config = - let open Core_result in - connect_to_monitor ~timeout:3 config >>= fun (ic, oc, cstate) -> - verify_cstate ic cstate >>= fun () -> - send_shutdown_rpc oc; - Timeout.with_timeout - ~timeout:3 - ~on_timeout:(fun () -> - if not (server_exists config.lock_file) then Error Server_missing - else Ok ServerMonitorUtils.SHUTDOWN_UNVERIFIED - ) - ~do_:begin fun _ -> - wait_on_server_restart ic; - Ok ServerMonitorUtils.SHUTDOWN_VERIFIED - end - -let connect_once ~timeout config handoff_options = - (***************************************************************************) - (* CONNECTION HANDSHAKES *) - (* Explains what connect_once does+returns, and how callers use the result.*) - (***************************************************************************) - (* 1. OPEN SOCKET. After this point we have a working stdin/stdout to the *) - (* process. Implemented in establish_connection. *) - (* | catch EConnRefused/ENoEnt/Timeout 1s when lockfile present -> *) - (* Error Monitor_socket_not_ready. *) - (* This is unexpected! But can happen if you manage to catch the *) - (* monitor in the short timeframe after it has grabbed its lock but *) - (* before it has started listening in on its socket. *) - (* -> "hh_client check/ide" -> retry from step 1, up to 800 times. *) - (* The number 800 is hard-coded in 9 places through the codebase. *) - (* -> "hh_client start" -> print "replacing unresponsive server" *) - (* kill_server; start_server; exit. *) - (* | catch Timeout s when lockfile present -> *) - (* Error Monitor_establish_connection_timeout *) - (* This is unexpected! after all the monitor is always responsive, *) - (* and indeed start_server waits until responsive before returning. *) - (* But this can happen during a DDOS. *) - (* -> "hh_client check/ide" -> Its retry attempts are passed to the *) - (* monitor connection attempt already. So in this timeout all *) - (* the retries have already been consumed. Just exit. *) - (* -> "hh_client start" -> print "replacing unresponsive server" *) - (* kill_server; start_server; exit. *) - (* | catch EConnRefused/ENoEnt/Timeout when lockfile absent -> *) - (* Error Server_missing. *) - (* -> "hh_client ide" -> raise Exit_with IDE_no_server. *) - (* -> "hh_client check" -> start_server; retry step 1, up to 800x. *) - (* -> "hh_client start" -> start_server; exit. *) - (* | catch other exception -> unhandled. *) - (* *) - (* 2. SEND VERSION; READ VERSION; CHECK VERSIONS. After this point we can *) - (* safely marshal OCaml types back and forth. Implemented in get_cstate *) - (* and verify_cstate. *) - (* | catch any exception when lockfile present -> *) - (* close_connection; Error Monitor_connection_failure. *) - (* This is unexpected! *) - (* -> "hh_client check/ide" -> retry from step 1, up to 800 times. *) - (* -> "hh_client start" -> print "replacing unresponsive server" *) - (* kill_server; start_server; exit. *) - (* | catch any exception when lockfile absent -> *) - (* close_connection; Error Server_missing. *) - (* -> "hh_client ide" -> raise Exit_with IDE_no_server *) - (* -> "hh_client check" -> start_server; retry step 1, up to 800x. *) - (* -> "hh_client start" -> start_server; exit. *) - (* | if version numbers differ -> *) - (* Error Build_mismatch. *) - (* -> "hh_client ide" -> raise Exit_with IDE_no_server. *) - (* -> "hh_client check" -> close_log_tailer; retry from step 1. *) - (* -> "hh_client start" -> start_server; exit. *) - (* *) - (* 3. SEND HANDOFF; READ RESPONSE. After this point we have a working *) - (* connection to a server who we believe is ready to handle our messages. *) - (* Handoff is the stage of the protocol when we're speaking to the monitor *) - (* rather than directly to the server process itself. Implemented in *) - (* send_server_handoff_rpc and consume_prehandoff_message. *) - (* | response Server_name_not_found -> *) - (* raise Exit_with Server_name_not_found. *) - (* | response Server_not_alive_dormant -> *) - (* print "Waiting for server to start"; retry step 5, unlimited times. *) - (* | response Server_dormant_connections_limit_reached -> *) - (* Error Server_dormant. *) - (* -> "hh_client ide" -> raise Exit_with IDE_no_server. *) - (* -> "hh_client start" -> print "Server already exists but is *) - (* dormant"; exit. *) - (* -> "hh_client check" -> print "No server running, and connection *) - (* limit reached for waiting on the next server to be started. *) - (* Please wait patiently." raise Exit_with No_server_running. *) - (* | response Server_died -> *) - (* print "Last killed by OOM / signal / stopped by signal / exited"; *) - (* wait for server to close; Error Server_died. *) - (* -> "hh_client ide" -> raise Exit_with IDE_no_server. *) - (* -> "hh_client start" -> start_server. *) - (* -> "hh_client check" -> retry from step 1, up to 800 times. *) - (* | catch any exception -> unhandled. *) - (* *) - (* The following two steps aren't implemented inside connect_once but are *) - (* typically done by callers after connect_once has succeeded... *) - (* *) - (* 4. READ "HELLO" FROM SERVER. After this point we have evidence that the *) - (* server is ready to handle our messages. We basically gobble whatever *) - (* the server sends until it finally sends a line with just "hello". *) - (* Implemented in wait_for_server_hello. *) - (* | read anything other than "hello" -> retry from step 4, up to 800x. *) - (* | catch Timeout 1s -> retry from step 4, up to 800 times. *) - (* | catch exception EndOfFile/Sys_error -> *) - (* raise ServerHungUp. *) - (* -> "hh_client ide/check" -> program exit, code=No_server_running. *) - (* -> clientStart never actually bothers to do step 4. *) - (* | catch other exception -> unhandled. *) - (* *) - (* 5. SEND CONNECTION TYPE; READ RESPONSE. After this point we have *) - (* evidence that the server is able to handle our connection. The *) - (* connection type indicates Persistent vs Non-persistent. *) - (* | response Denied_due_to_existing_persistent_connection. *) - (* -> "hh_client lsp" -> raise Lsp.Error_server_start. *) - (* | catch any exception -> unhandled. *) - (***************************************************************************) - let open Core_result in - connect_to_monitor ~timeout config >>= fun (ic, oc, cstate) -> - verify_cstate ic cstate >>= fun () -> - send_server_handoff_rpc handoff_options oc; - consume_prehandoff_messages ic oc diff --git a/hack/monitor/monitorConnection.mli b/hack/monitor/monitorConnection.mli deleted file mode 100644 index 939490a8c26..00000000000 --- a/hack/monitor/monitorConnection.mli +++ /dev/null @@ -1,20 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -val server_exists : string -> bool - -val connect_once: - timeout:int -> - ServerMonitorUtils.monitor_config -> - MonitorRpc.handoff_options -> - (Timeout.in_channel * out_channel, ServerMonitorUtils.connection_error) result - -val connect_and_shut_down: ServerMonitorUtils.monitor_config -> - (ServerMonitorUtils.shutdown_result, ServerMonitorUtils.connection_error) - result diff --git a/hack/monitor/monitorRpc.ml b/hack/monitor/monitorRpc.ml deleted file mode 100644 index e62014b97c1..00000000000 --- a/hack/monitor/monitorRpc.ml +++ /dev/null @@ -1,23 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type handoff_options = { - (** If server is dormant because it is waiting for Informant to start one, - * set this to true to start a server anyway. *) - force_dormant_start : bool; - (* There can be multiple named channels between server and monitor in order - * to prioritize some requests over others. Connecting code needs to specify - * which channel it wants to use. *) - pipe_name : string; -} - -type command = - | HANDOFF_TO_SERVER of handoff_options - (** Shut down all servers and then the monitor. *) - | SHUT_DOWN diff --git a/hack/monitor/prehandoff.ml b/hack/monitor/prehandoff.ml deleted file mode 100644 index 8d49e9e5a9e..00000000000 --- a/hack/monitor/prehandoff.ml +++ /dev/null @@ -1,29 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type exit_status = { - status : Unix.process_status; - was_oom: bool; -} - -type msg = - (** Last of the prehandoff messages. *) - | Sentinel - (** The monitor keeps a queue of connections that will need to be passed - * onto the next server instance. This queue has a size limit that has been - * reached. *) - | Server_dormant_connections_limit_reached - (** Monitor is running but has no server - i.e. dormant. Connection has been - * placed on a queue to be sent to the next started server. *) - | Server_not_alive_dormant of string - (** Server process died. Connect another client to start another one. *) - | Server_died of exit_status - (** Server died from a config change, and the Monitor didn't automatically - * start a new one because a version change in the config file. *) - | Server_died_config_change diff --git a/hack/monitor/serverMonitor.ml b/hack/monitor/serverMonitor.ml deleted file mode 100644 index 7b58a8618ff..00000000000 --- a/hack/monitor/serverMonitor.ml +++ /dev/null @@ -1,648 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -(** - * The server monitor is the parent process for a server. It - * listens to a socket for client connections and passes the connections - * to the server and serves the following objectives: - * - * 1) Readily accepts client connections - * 2) Confirms a Build ID match (killing itself and the server quickly - * on mismatch) - * 3) Hands the client connection to the daemon server - * 4) Tracks when the server process crashes or OOMs and echos - * its fate to the next client. -*) - -open Hh_core -open ServerProcess -open ServerMonitorUtils - - -module Sent_fds_collector = struct - - (** - This module exists to fix an issue with libancillary (passing a file descriptor - to another process with sendmsg over Unix Domain Sockets) and certain operating - systems. It allows us to delay closing of a File Descriptor inside the Monitor - until it is safe to do so. - - Normally: - Monitor sends client FD to Server process, and immediately closes the FD. - This is fine even if the Server is busy and hasn't "recv_fd" the FD yet - because this doesn't really "close" the file. The kernel still considers - it to be open by the receiving process. If the server closes the FD - then reads on the client will get an EOF. If the client closes the FD - then reads on the server will get an EOF. - - Mac OS X: - EOF isn't showing up correctly on file descriptors passed between - processes. - When the Monitor closes the FD after sending it to the Server (and - before the Server receives it), the kernel thinks it is the last open - descriptor on the file and actually closes it. After the server - receieves the FD, it gets an EOF when reading from it (which it shouldn't - because the client is still there; aside: oddly enough, writing to it - succeeds instead of getting EPIPE). The server then closes that FD after - reading the EOF. Normally (as noted above) the client would read an - EOF after this. But (this is the bug) this EOF never shows up and the - client blocks forever on "select" instead. - - To get around this problem, we want to close the FD in the monitor only - after the server has received it. Unfortunately, we don't actually - have a way to reliably detect that it has been received. So we just delay - closing by 2 seconds. - - Note: It's not safe to detect the receiving by reading the - Hello message from the server (since it could/would be consumed - here instead of by the Client) nor by "select" (by a race condition - with the client, the select might miss the Hello, and could prevent - an EOF from being read by the server). - *) - - module Fd_scheduler = Scheduler.Make(struct - type t = (** Unix.time *) float - end) - - let cleanup_fd fd = - if Sys_utils.is_apple_os () then - (** Close it 2 seconds later. *) - let trigger = Unix.gettimeofday () +. 2.0 in - Fd_scheduler.wait_for_fun - ~once:true - ~priority:1 - (fun time -> time >= trigger) - (fun x -> - let () = Printf.eprintf "Closing client fd\n" in - let () = Unix.close fd in x) - else - Unix.close fd - - let collect_garbage () = - if Sys_utils.is_apple_os () then - ignore (Fd_scheduler.wait_and_run_ready (Unix.gettimeofday ())) - else - () -end;; - - -exception Malformed_build_id -exception Send_fd_failure of int - -module Make_monitor (SC : ServerMonitorUtils.Server_config) -(Informant : Informant_sig.S) = struct - - type env = { - informant: Informant.t; - server: ServerProcess.server_process; - server_start_options: SC.server_start_options; - (** How many times have we tried to relaunch it? *) - retries: int; - max_purgatory_clients: int; - (** Version of this running server, as specified in the config file. *) - current_version: string option; - (** After sending a Server_not_alive_dormant during Prehandoff, - * clients are put here waiting for a server to come alive, at - * which point they get pushed through the rest of prehandoff and - * then sent to the living server. - * - * String is the server name it wants to connect to. *) - purgatory_clients : (MonitorRpc.handoff_options * Unix.file_descr) Queue.t; - (** Whether to ignore hh version mismatches *) - ignore_hh_version : bool; - } - - type t = env * ServerMonitorUtils.monitor_config * Unix.file_descr - - let fd_to_int (x: Unix.file_descr) : int = Obj.magic x - - let msg_to_channel fd msg = - (* This FD will be passed to a server process, so avoid using Ocaml's - * channels which have built-in buffering. Even though we are only writing - * to the FD here, it seems using Ocaml's channels also causes read - * buffering to happen here, so the server process doesn't get what was - * meant for it. *) - Marshal_tools.to_fd_with_preamble fd msg - |> ignore - - let setup_handler_for_signals handler signals = - List.iter signals begin fun signal -> - Sys_utils.set_signal signal (Sys.Signal_handle handler) - end - - let setup_autokill_server_on_exit process = - try - setup_handler_for_signals begin fun _ -> - Hh_logger.log "Got an exit signal. Killing server and exiting."; - SC.kill_server process; - Exit_status.exit Exit_status.Interrupted - end [Sys.sigint; Sys.sigquit; Sys.sigterm; Sys.sighup]; - with - | _ -> - Hh_logger.log "Failed to set signal handler" - - let sleep_and_check socket = - let ready_socket_l, _, _ = Unix.select [socket] [] [] (1.0) in - ready_socket_l <> [] - - let start_server ?target_mini_state ~informant_managed options exit_status = - let server_process = SC.start_server - ?target_mini_state - ~prior_exit_status:exit_status - ~informant_managed options in - setup_autokill_server_on_exit server_process; - Alive server_process - - let maybe_start_first_server options informant = - if Informant.should_start_first_server informant then begin - Hh_logger.log "Starting first server"; - HackEventLogger.starting_first_server (); - start_server ~informant_managed:(Informant.is_managing informant) - options None - end - else begin - Hh_logger.log ("Not starting first server. " ^^ - "Starting will be triggered by informant later."); - Not_yet_started - end - - let kill_server_with_check = function - | Alive server -> - SC.kill_server server - | _ -> () - - let wait_for_server_exit_with_check server kill_signal_time = - match server with - | Alive server -> - SC.wait_for_server_exit server kill_signal_time - | _ -> () - - let kill_server_and_wait_for_exit env = - kill_server_with_check env.server; - let kill_signal_time = Unix.gettimeofday () in - wait_for_server_exit_with_check env.server kill_signal_time - - (** Reads current hhconfig contents from disk and returns true if the - * version specified in there matches our currently running version. *) - let is_config_version_matching env = - let filename = Relative_path.from_root Config_file.file_path_relative_to_repo_root in - let contents = Sys_utils.cat (Relative_path.to_absolute filename) in - let config = Config_file.parse_contents contents in - let new_version = SMap.get "version" config in - match env.current_version, new_version with - | None, None -> true - | None, Some _ - | Some _, None -> - false - | Some cv, Some nv -> - String.equal cv nv - - (** Actually starts a new server. *) - let start_new_server ?target_mini_state env exit_status = - let informant_managed = Informant.is_managing env.informant in - let new_server = start_server ?target_mini_state - ~informant_managed env.server_start_options exit_status in - { env with - server = new_server; - retries = env.retries + 1; - } - - (** Kill the server (if it's running) and restart it - maybe. Obeying the rules - * of state transitions. See docs on the ServerProcess.server_process ADT for - * state transitions. *) - let kill_and_maybe_restart_server ?target_mini_state env exit_status = - kill_server_and_wait_for_exit env; - let version_matches = is_config_version_matching env in - match env.server, version_matches with - | Died_config_changed, _ -> - (** Now we can start a new instance safely. - * See diagram on ServerProcess.server_process docs. *) - start_new_server ?target_mini_state env exit_status - | Not_yet_started, false - | Alive _, false - | Informant_killed, false - | Died_unexpectedly _, false -> - (** Can't start server instance. State goes to Died_config_changed - * See diagram on ServerProcess.server_process docs. *) - Hh_logger.log "Avoiding starting a new server because version in config no longer matches."; - { env with server = Died_config_changed } - | Not_yet_started, true - | Alive _, true - | Informant_killed, true - | Died_unexpectedly _, true -> - (** Start new server instance because config matches. - * See diagram on ServerProcess.server_process docs. *) - start_new_server ?target_mini_state env exit_status - - let read_version fd = - let client_build_id: string = Marshal_tools.from_fd_with_preamble fd in - let newline_byte = Bytes.create 1 in - let _ = Unix.read fd newline_byte 0 1 in - if newline_byte <> "\n" then - (Hh_logger.log "Did not find newline character after version"; - raise Malformed_build_id); - client_build_id - - let rec handle_monitor_rpc env client_fd = - let cmd : MonitorRpc.command = - Marshal_tools.from_fd_with_preamble client_fd in - match cmd with - | MonitorRpc.HANDOFF_TO_SERVER handoff_options -> - client_prehandoff ~is_purgatory_client:false env handoff_options client_fd - | MonitorRpc.SHUT_DOWN -> - Hh_logger.log "Got shutdown RPC. Shutting down."; - let kill_signal_time = Unix.gettimeofday () in - kill_server_with_check env.server; - wait_for_server_exit_with_check env.server kill_signal_time; - Exit_status.(exit No_error) - - and hand_off_client_connection server_fd client_fd = - let status = Libancillary.ancil_send_fd server_fd client_fd in - if (status <> 0) then - (Hh_logger.log "Failed to handoff FD to server."; - raise (Send_fd_failure status)) - else - Sent_fds_collector.cleanup_fd client_fd - - (** Sends the client connection FD to the server process then closes the - * FD. *) - and hand_off_client_connection_with_retries server_fd retries client_fd = - let _, ready_l, _ = Unix.select [] [server_fd] [] (0.5) in - if ready_l <> [] then - try hand_off_client_connection server_fd client_fd - with - | e -> - if retries > 0 then - (Hh_logger.log "Retrying FD handoff"; - hand_off_client_connection_with_retries - server_fd (retries - 1) client_fd) - else - (Hh_logger.log "No more retries. Ignoring request."; - HackEventLogger.send_fd_failure e; - Unix.close client_fd;) - else if retries > 0 then - (Hh_logger.log "server socket not yet ready. Retrying."; - hand_off_client_connection_with_retries - server_fd (retries - 1) client_fd) - else begin - Hh_logger.log - "server socket not yet ready. No more retries. Ignoring request."; - Unix.close client_fd - end - - (** Does not return. *) - and client_out_of_date_ client_fd mismatch_info = - msg_to_channel client_fd (Build_id_mismatch_ex mismatch_info); - HackEventLogger.out_of_date () - - (** Kills servers, sends build ID mismatch message to client, and exits. - * - * Does not return. Exits after waiting for server processes to exit. So - * the client can wait for socket closure as indication that both the monitor - * and server have exited. - *) - and client_out_of_date env client_fd mismatch_info = - Hh_logger.log "Client out of date. Killing server."; - kill_server_with_check env.server; - let kill_signal_time = Unix.gettimeofday () in - (** If we detect out of date client, should always kill server and exit - * monitor, even if messaging to channel or event logger fails. *) - (try client_out_of_date_ client_fd mismatch_info with - | e -> Hh_logger.log - "Handling client_out_of_date threw with: %s" (Printexc.to_string e)); - wait_for_server_exit_with_check env.server kill_signal_time; - Exit_status.exit Exit_status.Build_id_mismatch - - (** Send (possibly empty) sequences of messages before handing off to - * server. *) - and client_prehandoff ~is_purgatory_client env handoff_options client_fd = - let module PH = Prehandoff in - match env.server with - | Alive server -> - let server_fd = snd @@ List.find_exn server.out_fds - ~f:(fun x -> fst x = handoff_options.MonitorRpc.pipe_name) in - let since_last_request = - (Unix.time ()) -. !(server.last_request_handoff) in - (** TODO: Send this to client so it is visible. *) - Hh_logger.log "Got %s request for typechecker. Prior request %.1f seconds ago" - handoff_options.MonitorRpc.pipe_name since_last_request; - msg_to_channel client_fd PH.Sentinel; - hand_off_client_connection_with_retries server_fd 8 client_fd; - HackEventLogger.client_connection_sent (); - server.last_request_handoff := Unix.time (); - { env with server = (Alive server) } - | Died_unexpectedly (status, was_oom) -> - (** Server has died; notify the client *) - msg_to_channel client_fd (PH.Server_died {PH.status; PH.was_oom}); - (** Next client to connect starts a new server. *) - Exit_status.exit Exit_status.No_error - | Died_config_changed -> - if not is_purgatory_client then - let env = kill_and_maybe_restart_server env None in - (** Assert that the restart succeeded, and then push prehandoff through again. *) - begin match env.server with - | Alive _ -> - (** Server restarted. We want to re-run prehandoff, which will - * actually do the prehandoff this time. *) - client_prehandoff ~is_purgatory_client env handoff_options client_fd - | Died_unexpectedly _ - | Died_config_changed - | Not_yet_started - | Informant_killed -> - Hh_logger.log ("Unreachable state. Server should be alive after trying a restart" ^^ - " from Died_config_changed state"); - failwith "Failed starting server transitioning off Died_config_changed state" - end - else - (msg_to_channel client_fd PH.Server_died_config_change; - env) - | Not_yet_started - | Informant_killed -> - let env = - if handoff_options.MonitorRpc.force_dormant_start then begin - msg_to_channel client_fd (PH.Server_not_alive_dormant - "Warning - starting a server by force-dormant-start option..."); - kill_and_maybe_restart_server env None - end else begin - msg_to_channel client_fd (PH.Server_not_alive_dormant - "Server killed by informant. Waiting for next server..."); - env - end - in - if (Queue.length env.purgatory_clients) >= env.max_purgatory_clients then - let () = msg_to_channel - client_fd PH.Server_dormant_connections_limit_reached in - env - else - let () = Queue.add (handoff_options, client_fd) - env.purgatory_clients in - env - - and ack_and_handoff_client env client_fd = - try - let client_version = read_version client_fd in - if (not env.ignore_hh_version) && client_version <> Build_id.build_revision - then - client_out_of_date env client_fd ServerMonitorUtils.current_build_info - else ( - msg_to_channel client_fd Connection_ok; - handle_monitor_rpc env client_fd - ) - with - | Malformed_build_id as e -> - HackEventLogger.malformed_build_id (); - Hh_logger.log "Malformed Build ID"; - raise e - - and push_purgatory_clients env = - (** We create a queue and transfer all the purgatory clients to it before - * processing to avoid repeatedly retrying the same client even after - * an EBADF. Control flow is easier this way than trying to manage an - * immutable env in the face of exceptions. *) - let clients = Queue.create () in - Queue.transfer env.purgatory_clients clients; - let env = Queue.fold begin - fun env (handoff_options, client_fd) -> - try client_prehandoff ~is_purgatory_client:true env handoff_options client_fd with - | Unix.Unix_error(Unix.EPIPE, _, _) - | Unix.Unix_error(Unix.EBADF, _, _) -> - Hh_logger.log "Purgatory client disconnected. Dropping."; - env - end env clients in - env - - and maybe_push_purgatory_clients env = - match env.server, Queue.length env.purgatory_clients with - | Alive _, 0 -> - env - | Died_config_changed, _ -> - (** These clients are waiting for a server to be started. But this Monitor - * is waiting for a new client to connect (which confirms to us that we - * are running the correct version of the Monitor). So let them know - * that they might want to do something. *) - push_purgatory_clients env - | Alive _, _ -> - push_purgatory_clients env - | Not_yet_started, _ | Informant_killed, _ | Died_unexpectedly _, _-> - env - - (** Kill command from client is handled by server server, so the monitor - * needs to check liveness of the server process to know whether - * to stop itself. *) - let update_status_ (env: env) monitor_config = - let env = match env.server with - | Alive process -> - let pid, proc_stat = SC.wait_pid process in - (match pid, proc_stat with - | 0, _ -> - (* "pid=0" means the pid we waited for (i.e. process) hasn't yet died/stopped *) - env - | _, _ -> - (* "pid<>0" means the pid has died or received a stop signal *) - let oom_code = Exit_status.(exit_code Out_of_shared_memory) in - let was_oom = match proc_stat with - | Unix.WEXITED code when code = oom_code -> true - | _ -> Sys_utils.check_dmesg_for_oom process.pid "hh_server" in - SC.on_server_exit monitor_config; - ServerProcessTools.check_exit_status proc_stat process monitor_config; - { env with server = Died_unexpectedly (proc_stat, was_oom) }) - | _ -> env - in - let exit_status, server_state = match env.server with - | Alive _ -> - None, Informant_sig.Server_alive - | Died_unexpectedly ((Unix.WEXITED c), _) -> - Some c, Informant_sig.Server_dead - | Not_yet_started -> - None, Informant_sig.Server_not_yet_started - | Died_unexpectedly ((Unix.WSIGNALED _| Unix.WSTOPPED _), _) - | Died_config_changed - | Informant_killed -> - None, Informant_sig.Server_dead in - env, exit_status, server_state - - let update_status env monitor_config = - let env, exit_status, server_state = update_status_ env monitor_config in - let informant_report = Informant.report env.informant server_state in - let is_watchman_fresh_instance = match exit_status with - | Some c when c = Exit_status.(exit_code Watchman_fresh_instance) -> true - | _ -> false in - let is_watchman_failed = match exit_status with - | Some c when c = Exit_status.(exit_code Watchman_failed) -> true - | _ -> false in - let is_config_changed = match exit_status with - | Some c when c = Exit_status.(exit_code Hhconfig_changed) -> true - | _ -> false in - let is_file_heap_stale = match exit_status with - | Some c when c = Exit_status.(exit_code File_heap_stale) -> true - | _ -> false in - let is_sql_assertion_failure = match exit_status with - | Some c - when c = Exit_status.(exit_code Sql_assertion_failure) || - c = Exit_status.(exit_code Sql_cantopen) || - c = Exit_status.(exit_code Sql_corrupt) || - c = Exit_status.(exit_code Sql_misuse) -> true - | _ -> false in - let is_big_rebase = match exit_status with - | Some c when c = Exit_status.(exit_code Big_rebase_detected) -> true - | _ -> false in - let max_watchman_retries = 3 in - let max_sql_retries = 3 in - if (is_watchman_failed || is_watchman_fresh_instance) - && (env.retries < max_watchman_retries) then begin - Hh_logger.log "Watchman died. Restarting hh_server (attempt: %d)" - (env.retries + 1); - kill_and_maybe_restart_server env exit_status - end - else match informant_report with - | Informant_sig.Restart_server target_mini_state -> - Hh_logger.log "Informant directed server restart. Restarting server."; - HackEventLogger.informant_induced_restart (); - kill_and_maybe_restart_server ?target_mini_state env exit_status - | Informant_sig.Move_along -> - if is_config_changed then begin - Hh_logger.log "hh_server died from hh config change. Restarting"; - kill_and_maybe_restart_server env exit_status - end else if is_file_heap_stale then begin - Hh_logger.log - "Several large rebases caused FileHeap to be stale. Restarting"; - kill_and_maybe_restart_server env exit_status - end else if is_big_rebase then begin - (* Server detected rebase sooner than monitor. If we keep going, - * monitor will eventually discover the same rebase and restart the - * server again for no reason. Reinitializing informant to bring it to - * the same understanding of current revision as server. *) - Informant.reinit env.informant; - Hh_logger.log - "Server exited because of big rebase. Restarting"; - kill_and_maybe_restart_server env exit_status - end else if is_sql_assertion_failure - && (env.retries < max_sql_retries) then begin - Hh_logger.log - "Sql failed. Restarting hh_server in fresh mode (attempt: %d)" - (env.retries + 1); - kill_and_maybe_restart_server env exit_status - end - else - env - - let rec check_and_run_loop ?(consecutive_throws=0) env monitor_config - (socket: Unix.file_descr) = - let env, consecutive_throws = - try check_and_run_loop_ env monitor_config socket, 0 with - | Unix.Unix_error (Unix.ECHILD, _, _) -> - let stack = Printexc.get_backtrace () in - ignore (Hh_logger.log - "check_and_run_loop_ threw with Unix.ECHILD. Exiting. - %s" stack); - Exit_status.exit Exit_status.No_server_running - | Exit_status.Exit_with _ as e -> raise e - | e -> - let stack = Printexc.get_backtrace () in - if consecutive_throws > 500 then begin - Hh_logger.log "Too many consecutive exceptions."; - Hh_logger.log "Probably an uncaught exception rethrown each retry. Exiting. %s" stack; - HackEventLogger.uncaught_exception e; - Exit_status.exit Exit_status.Uncaught_exception - end; - Hh_logger.log "check_and_run_loop_ threw with exception: %s - %s" - (Printexc.to_string e) stack; - env, consecutive_throws + 1 - in - check_and_run_loop ~consecutive_throws env monitor_config socket - - and check_and_run_loop_ env monitor_config - (socket: Unix.file_descr) = - let lock_file = monitor_config.lock_file in - if not (Lock.grab lock_file) then - (Hh_logger.log "Lost lock; terminating.\n%!"; - HackEventLogger.lock_stolen lock_file; - Exit_status.(exit Lock_stolen)); - let env = maybe_push_purgatory_clients env in - let () = Sent_fds_collector.collect_garbage () in - let has_client = sleep_and_check socket in - let env = update_status env monitor_config in - if (not has_client) then - env - else - try - let fd, _ = Unix.accept socket in - try - HackEventLogger.accepted_client_fd (fd_to_int fd); - ack_and_handoff_client env fd - with - | Exit_status.Exit_with _ as e -> raise e - | e -> - (HackEventLogger.ack_and_handoff_exception e; - Hh_logger.log - "Handling client connection failed. Ignoring connection attempt."; - Unix.close fd; - env) - with - | Exit_status.Exit_with _ as e -> raise e - | e -> - (HackEventLogger.accepting_on_socket_exception e; - Hh_logger.log - "Accepting on socket failed. Ignoring client connection attempt."; - env) - - let check_and_run_loop_once (env, monitor_config, socket) = - let env = check_and_run_loop_ env monitor_config socket in - env, monitor_config, socket - - let start_monitor ?current_version ~waiting_client ~max_purgatory_clients - server_start_options informant_init_env - monitor_config = - let socket = Socket.init_unix_socket monitor_config.socket_file in - (* If the client started the server, it opened an FD before forking, so it - * can be notified when the monitor socket is ready. The FD number was - * passed in program args. *) - Option.iter waiting_client begin fun fd -> - let oc = Unix.out_channel_of_descr fd in - try - output_string oc (ServerMonitorUtils.ready^"\n"); - close_out oc; - with - | Sys_error _ - | Unix.Unix_error _ as e -> - Printf.eprintf "Caught exception while waking client: %s\n%!" - (Printexc.to_string e) - end; - (** It is essential that we initiate the Informant before the server if we - * want to give the opportunity for the Informant to truly take - * ownership over the lifetime of the server. - * - * This is because start_server won't actually start a server if it sees - * a hg update sentinel file indicating an hg update is in-progress. - * Starting the informant first ensures that its Watchman watch is started - * before we check for the hgupdate sentinel file - this is required - * for the informant to properly observe an update is complete without - * hitting race conditions. *) - let informant = Informant.init informant_init_env in - let server_process = maybe_start_first_server - server_start_options informant in - let env = { - informant; - max_purgatory_clients; - current_version; - purgatory_clients = Queue.create (); - server = server_process; - server_start_options; - retries = 0; - ignore_hh_version = Informant.should_ignore_hh_version informant_init_env; - } in - env, monitor_config, socket - - let start_monitoring ?current_version ~waiting_client ~max_purgatory_clients - server_start_options informant_init_env - monitor_config = - let env, monitor_config, socket = start_monitor - ?current_version ~waiting_client ~max_purgatory_clients - server_start_options informant_init_env monitor_config in - check_and_run_loop env monitor_config socket -end diff --git a/hack/monitor/serverMonitor.mli b/hack/monitor/serverMonitor.mli deleted file mode 100644 index 02867b24cf5..00000000000 --- a/hack/monitor/serverMonitor.mli +++ /dev/null @@ -1,39 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -module Make_monitor : - functor (SC : ServerMonitorUtils.Server_config) -> - functor (Informant : Informant_sig.S) -> - sig - type t - - (** Start a monitor without running the check loop. Useful for testing. *) - val start_monitor: - ?current_version:string -> - waiting_client:Unix.file_descr option -> - max_purgatory_clients:int -> - SC.server_start_options -> - Informant.init_env -> - ServerMonitorUtils.monitor_config -> - t - - (** Run the check loop once. Useful for testing. *) - val check_and_run_loop_once : t -> t - - (** Start the monitor and repeatedly run the check and run loop. - * Does not return. *) - val start_monitoring: - ?current_version:string -> - waiting_client:Unix.file_descr option -> - max_purgatory_clients:int -> - SC.server_start_options -> - Informant.init_env -> - ServerMonitorUtils.monitor_config -> - 'a - end diff --git a/hack/monitor/serverMonitorUtils.ml b/hack/monitor/serverMonitorUtils.ml deleted file mode 100644 index 3e3bff31575..00000000000 --- a/hack/monitor/serverMonitorUtils.ml +++ /dev/null @@ -1,140 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type monitor_config = - { - (** The socket file on which the monitor is listening for connections. *) - socket_file: string; - (** This lock is held when a monitor is alive. *) - lock_file: string; - (** The path to the server log file *) - server_log_file: string; - (** The path to the monitor log file *) - monitor_log_file: string; - } - -(** In an Informant-directed restart, Watchman provided a new - * mergebase, a new clock, and a list of files changed w.r.t. - * that mergebase. - * - * A new server instance can "resume" from that new mergebase - * given that it handles the list of files changed w.r.t. that - * new mergebase, and just starts a watchman subscription - * beginning with that clock. - *) -type watchman_mergebase = { - (** Watchman says current repo mergebase is this. *) - mergebase_svn_rev : int; - (** ... plus these files changed to represent its current state *) - files_changed : SSet.t; - (** ...as of this clock *) - watchman_clock : string; -} - -(** Informant-induced restart may specify the mini saved state - * we should load from. *) -type target_mini_state = { - mini_state_everstore_handle : string; - (** The SVN revision to which the above handle corresponds to. *) - target_svn_rev : int; - watchman_mergebase : watchman_mergebase option; -} - -let watchman_mergebase_to_string { mergebase_svn_rev; files_changed; watchman_clock; } = - Printf.sprintf - "watchman_mergebase (mergebase_svn_rev: %d; files_changed count: %d; watchman_clock: %s)" - mergebase_svn_rev - (SSet.cardinal files_changed) - watchman_clock - -module type Server_config = sig - - type server_start_options - - (** Start the server. Optionally takes in the exit code of the previously - * running server that exited. *) - val start_server : - ?target_mini_state:target_mini_state -> - informant_managed:bool -> - prior_exit_status:(int option) -> - server_start_options -> - ServerProcess.process_data - - val kill_server : ServerProcess.process_data -> unit - - val wait_for_server_exit : ServerProcess.process_data -> - float (** Kill signal time *) -> - unit - - val wait_pid : ServerProcess.process_data -> int * Unix.process_status - - (** Callback to run when server exits *) - val on_server_exit : monitor_config -> unit -end - -type build_mismatch_info = - { - existing_version: string; - existing_build_commit_time: string; - existing_argv: string list; - existing_launch_time: float; - } - -let current_build_info = - { - existing_version = Build_id.build_revision; - existing_build_commit_time = Build_id.get_build_commit_time_string (); - existing_argv = Array.to_list Sys.argv; - existing_launch_time = Unix.gettimeofday (); - } - -type connection_error = - (** - * This should be rare. The monitor rapidly accepts connections and does - * the version ID check very quickly. Only under very heavy load will that - * sequence time out. *) - | Monitor_establish_connection_timeout - | Server_missing - (** There is a brief period of time after the Monitor has grabbed its - * liveness lock and before it starts listening in on the socket - * (which can only happen after the socket file is created). During that - * period, either the socket file doesn't exist yet, or socket connections - * are refused. *) - | Monitor_socket_not_ready - | Server_died - (** Server dormant and can't join the (now full) queue of connections - * waiting for the next server. *) - | Server_dormant - | Build_id_mismatched of build_mismatch_info option - | Monitor_connection_failure - -type connection_state = - | Connection_ok - (* Build_is_mismatch is never used, but it can't be removed, because *) - (* the sequence of constructors here is part of the binary protocol *) - (* we want to support between mismatched versions of client_server. *) - | Build_id_mismatch - (* Build_id_mismatch_ex *is* used. *) - | Build_id_mismatch_ex of build_mismatch_info - -(** Result of a shutdown monitor RPC. *) -type shutdown_result = - (** Request sent and channel hung up, indicating the process has exited. *) - | SHUTDOWN_VERIFIED - (** Request sent, but channel hasn't hung up. *) - | SHUTDOWN_UNVERIFIED - -(* Message we send to the --waiting-client *) -let ready = "ready" - -let exit_if_parent_dead () = -(** Cross-platform compatible way; parent PID becomes 1 when parent dies. *) - if Unix.getppid() = 1 then - (Hh_logger.log "Server's parent has died; exiting.\n"; - Exit_status.exit Exit_status.Lost_parent_monitor); diff --git a/hack/monitor/serverProcess.ml b/hack/monitor/serverProcess.ml deleted file mode 100644 index 4f583797b70..00000000000 --- a/hack/monitor/serverProcess.ml +++ /dev/null @@ -1,86 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -type process_data = - { - (** Process ID. *) - pid : int; - start_t : float; - (** Get occasional updates about status/busyness from typechecker here. *) - in_fd: Unix.file_descr; - (** Send client's File Descriptors to the typechecker over this. *) - out_fds : (string * Unix.file_descr) list; - last_request_handoff : float ref; - } - -type server_process = - | Not_yet_started - | Alive of process_data - | Informant_killed - (** When the server crashes, we want to track that it has crashed and report - * that crash info to the next hh_client that connects. We keep that info - * here. *) - | Died_unexpectedly of Unix.process_status * bool - (** - * The problem we need to solve is this: when the Monitor wants to start - * a new Server instance, it might not be safe to do so because we might - * end up running the a version of the Server not meant for this state of - * the repo (as specified in the .hhconfig file). - * - * Monitor might want to start a Server because the last one died (crashed, - * or exited due to hhconfig change), or the Informant has decided to start - * a new instance on a better saved state. Because the .hhconfig file has - * indicated a version change is occuring and the logic to parse this version - * change and locate the binaries for that version are not inside server - * code, we need to force that logic to be exercised on the user side, and - * only start a new server after we've confirmed that it has been exercised - * (which is confirmed by a new client connecting and succeeding the build_id - * handshake; alternatively, failing that handshake and then this Monitor - * correctly exits). - * - * So, whenever we want to start a new Server, the Monitor must check the - * hhconfig file's version number. If it doesn't match the version at the - * time of this Monitor's startup, we enter this state Died_config_changed. - * - * Only a new client connection after entering this state can transition - * use away from this state. - * - * NB: Monitor could be mid-way handling a new client connection when it - * processes the next Informant decision. In which case, we can't guarantee - * that the client that connected did in fact exercise the version lookup - * logic; so the only safe thing to do is enter the Died_config_changed - * state until the *next* client that connects. - * - * These transitions are centralized in ServerMonitor.kill_and_maybe_restart_server - * Don't do them elsewhere or you will screw it up. - * - * State transition looks sort of like: - * - * - * [ Died_config_changed ] - * ^ \ - * / \ new client connection - * maybe / \ connection triggers - * restart but / \ maybe_restart which actually - * config not mat/ching \ does start one this time - * / \ - * / \ - * [ Any server state ]-------------> [ new server instance ] - * restart - * and - * config - * matches - * - * - * Why don't we just exit the Monitor automatically, instead of keeping it - * around with a server in this Died_config_changed state? We want Nuclide - * to know that things are dandy and it should retry connecting without - * the user having to click anything. - *) - | Died_config_changed diff --git a/hack/monitor/serverProcessTools.ml b/hack/monitor/serverProcessTools.ml deleted file mode 100644 index ef135cd9f7f..00000000000 --- a/hack/monitor/serverProcessTools.ml +++ /dev/null @@ -1,30 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -open ServerProcess -open ServerMonitorUtils - -let check_exit_status proc_stat process monitor_config = - match proc_stat with - | Unix.WEXITED 0 -> () - | _ -> - let exit_kind, exit_code = Exit_status.unpack proc_stat in - Hh_logger.log "typechecker %s with exit code %d\n" exit_kind exit_code; - let is_oom = match proc_stat with - | Unix.WEXITED i when i = (Exit_status.exit_code Exit_status.Worker_oomed) -> true - | _ -> false - in - let is_oom = is_oom || try - Sys_utils.check_dmesg_for_oom process.pid "hh_server" with _ -> false in - let time_taken = Unix.time () -. process.start_t in - HackEventLogger.bad_exit - time_taken proc_stat - (monitor_config.server_log_file, - monitor_config.monitor_log_file) - ~is_oom diff --git a/hack/procs/bucket.ml b/hack/procs/bucket.ml index 883e492824a..5e59fcc4b77 100644 --- a/hack/procs/bucket.ml +++ b/hack/procs/bucket.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,6 +7,8 @@ * *) +open Core_kernel + (****************************************************************************) (* Moduling Making buckets. * When we parallelize, we need to create "buckets" of tasks for the @@ -22,8 +24,7 @@ type 'a bucket = | Wait | Done -type 'a next = - unit -> 'a bucket +type 'a next = unit -> 'a bucket let max_size_ref = ref 500 @@ -32,9 +33,10 @@ let max_size () = !max_size_ref let set_max_bucket_size x = max_size_ref := x let calculate_bucket_size ~num_jobs ~num_workers ~max_size = - if num_jobs < num_workers * max_size - then max 1 (1 + (num_jobs / num_workers)) - else max_size + if num_jobs < num_workers * max_size then + max 1 (1 + (num_jobs / num_workers)) + else + max_size let make_ progress_fn bucket_size jobs = let i = ref 0 in @@ -46,10 +48,14 @@ let make_ progress_fn bucket_size jobs = Array.to_list result let make_list ~num_workers ?progress_fn ?max_size jobs = - let progress_fn = Option.value ~default:(fun ~total:_ ~start:_ ~length:_ -> ()) progress_fn in + let progress_fn = + Option.value ~default:(fun ~total:_ ~start:_ ~length:_ -> ()) progress_fn + in let max_size = Option.value max_size ~default:!max_size_ref in let jobs = Array.of_list jobs in - let bucket_size = calculate_bucket_size ~num_jobs:(Array.length jobs) ~num_workers ~max_size in + let bucket_size = + calculate_bucket_size ~num_jobs:(Array.length jobs) ~num_workers ~max_size + in make_ (progress_fn ~total:(Array.length jobs)) bucket_size jobs let of_list = function @@ -59,16 +65,20 @@ let of_list = function let make ~num_workers ?progress_fn ?max_size jobs = let max_size = Option.value max_size ~default:!max_size_ref in let maker = make_list ~num_workers ?progress_fn ~max_size jobs in - fun () -> of_list (maker ()) + (fun () -> of_list (maker ())) -type 'a of_n = { work: 'a; bucket: int; total: int } +type 'a of_n = { + work: 'a; + bucket: int; + total: int; +} let make_n_buckets ~buckets ~split = let next_bucket = ref 0 in fun () -> let current = !next_bucket in incr next_bucket; - if (current < buckets) then + if current < buckets then Job { work = split ~bucket:current; bucket = current; total = buckets } else Done diff --git a/hack/procs/bucket.mli b/hack/procs/bucket.mli index d6f6c944f5a..8eda085824b 100644 --- a/hack/procs/bucket.mli +++ b/hack/procs/bucket.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * @@ -15,14 +15,14 @@ type 'a bucket = | Wait | Done - - -type 'a next = - unit -> 'a bucket +type 'a next = unit -> 'a bucket val set_max_bucket_size : int -> unit + val max_size : unit -> int +val calculate_bucket_size : + num_jobs:int -> num_workers:int -> max_size:int -> int (** Given a number of jobs, number of workers, and a maximum bucket size, will calculate the optimal bucket size to get the work done as quickly as possible. @@ -30,7 +30,6 @@ val max_size : unit -> int Specifically, if the number of jobs is less than the number of workers times the maximum bucket size, smaller bucket sizes will be returned in order to utilize as many workers as possible. *) -val calculate_bucket_size : num_jobs:int -> num_workers:int -> max_size:int -> int (* Makes a bucket out of a list, without regard for number of workers or the size of the list. *) @@ -43,15 +42,18 @@ val make : 'a list -> 'a list next -type 'a of_n = { work: 'a; bucket: int; total: int } +type 'a of_n = { + work: 'a; + bucket: int; + total: int; +} +val make_n_buckets : buckets:int -> split:(bucket:int -> 'a) -> 'a of_n next (** * Make n buckets (where n = "buckets"). * * The "split" function provides the workload for the k'th bucket. *) -val make_n_buckets : buckets:int -> split:(bucket:int -> 'a) -> - 'a of_n next (* Specialized version to split into lists only. *) val make_list : @@ -59,4 +61,5 @@ val make_list : ?progress_fn:(total:int -> start:int -> length:int -> unit) -> ?max_size:int -> 'a list -> - (unit -> 'a list) + unit -> + 'a list diff --git a/hack/procs/dune b/hack/procs/dune new file mode 100644 index 00000000000..f021cfe80d2 --- /dev/null +++ b/hack/procs/dune @@ -0,0 +1,26 @@ +(library + (name procs_bucket) + (wrapped false) + (modules + bucket) + (libraries + core_kernel + imported_core)) + +(library + (name procs_procs) + (wrapped false) + (modules + mem_profile + multiThreadedCall + multiWorker + worker + workerController) + (libraries + core_kernel + heap_shared_mem + marshal_tools + procs_bucket + procfs + sys_utils + worker_cancel)) diff --git a/hack/procs/mem_profile.ml b/hack/procs/mem_profile.ml index 4ec5666966f..1eda45f193c 100644 --- a/hack/procs/mem_profile.ml +++ b/hack/procs/mem_profile.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * @@ -10,4 +10,5 @@ (* See src/facebook/profile/statMemProfMemProfile.ml for the implementation we use for statmemprof. *) let start () = () + let stop () = () diff --git a/hack/procs/mem_profile.mli b/hack/procs/mem_profile.mli index 478756e8439..592e3e5e5c0 100644 --- a/hack/procs/mem_profile.mli +++ b/hack/procs/mem_profile.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * @@ -7,5 +7,6 @@ * *) -val start: unit -> unit -val stop: unit -> unit +val start : unit -> unit + +val stop : unit -> unit diff --git a/hack/procs/multiThreadedCall.ml b/hack/procs/multiThreadedCall.ml index e7646fe1f34..e5802b2c169 100644 --- a/hack/procs/multiThreadedCall.ml +++ b/hack/procs/multiThreadedCall.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,32 +7,39 @@ * *) -open Hh_core +module Hh_bucket = Bucket +open Core_kernel -exception Coalesced_failures of (WorkerController.worker_failure list) +exception Coalesced_failures of WorkerController.worker_failure list let coalesced_failures_to_string failures = let failure_strings = - List.map failures ~f:WorkerController.failure_to_string in - Printf.sprintf "Coalesced_failures[%s]" (String.concat ", " failure_strings) + List.map failures ~f:WorkerController.failure_to_string + in + Printf.sprintf + "Coalesced_failures[%s]" + (String.concat ~sep:", " failure_strings) -let () = Printexc.register_printer @@ function +let () = + Caml.Printexc.register_printer + @@ function | Coalesced_failures failures -> Some (coalesced_failures_to_string failures) | _ -> None -type interrupt_result = Cancel | Continue +type interrupt_result = + | Cancel + | Continue type 'env interrupt_handler = 'env -> 'env * interrupt_result type 'env interrupt_config = { - env : 'env; - handlers : 'env -> (Unix.file_descr * 'env interrupt_handler) list; + env: 'env; + handlers: 'env -> (Unix.file_descr * 'env interrupt_handler) list; } -let no_interrupt env = { - handlers = (fun _ -> []); - env; -} +type worker_id = int + +let no_interrupt env = { handlers = (fun _ -> []); env } (* Integer that increases with every invocation of multi_threaded_call, used to * distinguish worker handles that belong to current job vs those that are still @@ -45,166 +52,199 @@ let call_id = ref 0 * them is to log and exit. Setting on_exception handler allows you to do it * before any caller has a chance to catch the exception and attempt to handle * it. *) -let nested_exception: (exn * Utils.callstack) option ref = ref None -let on_exception_ref = ref (fun (e,stack) -> nested_exception := Some (e, stack)) +let nested_exception : (exn * Utils.callstack) option ref = ref None -let multi_threaded_call - (type a) (type b) (type c) (type d) - workers - (job: c -> a -> b) - (merge: b -> c -> c) - (neutral: c) - (next: a Bucket.next) - ?(on_cancelled : (unit -> a list) option) - (interrupt: d interrupt_config) = +let on_exception_ref = + ref (fun (e, stack) -> nested_exception := Some (e, stack)) +let multi_threaded_call + (type a b c d) + workers + (job : worker_id * c -> a -> b) + (merge : worker_id * b -> c -> c) + (neutral : c) + (next : a Hh_bucket.next) + ?(on_cancelled : (unit -> a list) option) + (interrupt : d interrupt_config) = incr call_id; let call_id = !call_id in - (* Split workers into those that are free, and those that are still doing * previous jobs. *) - let workers, handles = List.fold workers - ~init:([], []) - ~f:begin fun (workers, handles) worker -> - (* Note than now some handles have mismatched types. We need to remember - * to check their get_call_id against this multi_threaded_call call_id - * before trusting the types. *) - match WorkerController.get_handle_UNSAFE worker with - | None -> worker::workers, handles - | Some handle -> workers, handle::handles - end + let (workers, handles) = + List.fold workers ~init:([], []) ~f:(fun (workers, handles) worker -> + (* Note than now some handles have mismatched types. We need to remember + * to check their get_call_id against this multi_threaded_call call_id + * before trusting the types. *) + match WorkerController.get_handle_UNSAFE worker with + | None -> (worker :: workers, handles) + | Some handle -> (workers, handle :: handles)) in - - let is_current h = (call_id = (WorkerController.get_call_id h)) in - + let is_current h = call_id = WorkerController.get_call_id h in (* merge accumulator, leaving environment and interrupt handlers untouched *) - let merge x (y1, y2, y3) = merge x y1, y2, y3 in - + let merge x (y1, y2, y3) = (merge x y1, y2, y3) in (* interrupt handlers are irrelevant after job is done *) - let unpack_result (acc, env, _handlers) = acc, env in - + let unpack_result (acc, env, _handlers) = (acc, env) in let handler_fds (_, _, handlers) = List.map handlers ~f:fst in - - let rec add_pending acc = match next () with - | Bucket.Done -> acc - | Bucket.Job a -> add_pending (a::acc) - | Bucket.Wait -> + let rec add_pending acc = + match next () with + | Hh_bucket.Done -> acc + | Hh_bucket.Job a -> add_pending (a :: acc) + | Hh_bucket.Wait -> (* There's not really a good solution to generically getting the pending work items when attempting to cancel a job that's in the Wait state, so we depend on those jobs to determine their own state in the [on_cancelled] handler. *) failwith "cancelling jobs with Wait not supported" in - (* When a job is cancelled, return all the jobs that were not started OR were * cancelled in the middle (so you better hope they are idempotent).*) let check_cancel handles ready_fds (acc, env, handlers) = - let env, decision, handlers = List.fold handlers - ~init:(env, Continue, handlers) - ~f:begin fun (env, decision, handlers) (fd, handler) -> - if decision = Cancel || not @@ List.mem ready_fds fd - then env, decision, handlers else - let env, decision = handler env in - (* Re-raise the exception even if handler have caught and ignored it *) - Option.iter !nested_exception ~f:(fun (x, _stack) -> raise x); - (* running a handler could have changed the handlers, - * so need to regenerate them based on new environment *) - let handlers = interrupt.handlers env in - env, decision, handlers - end + let (env, decision, handlers) = + List.fold + handlers + ~init:(env, Continue, handlers) + ~f:(fun (env, decision, handlers) (fd, handler) -> + if decision = Cancel || (not @@ List.mem ~equal:( = ) ready_fds fd) + then + (env, decision, handlers) + else + let (env, decision) = handler env in + (* Re-raise the exception even if handler have caught and ignored it *) + Option.iter !nested_exception ~f:(fun (x, _stack) -> raise x); + + (* running a handler could have changed the handlers, + * so need to regenerate them based on new environment *) + let handlers = interrupt.handlers env in + (env, decision, handlers)) in - let res = acc, env, handlers in - if decision = Cancel then begin + let res = (acc, env, handlers) in + if decision = Cancel then ( WorkerController.cancel handles; - let unfinished = match on_cancelled with + let unfinished = + match on_cancelled with | Some f -> f () | None -> let unfinished = List.map handles ~f:WorkerController.get_job in add_pending unfinished in - res, Some unfinished - end else res, None in - + (res, Some unfinished) + ) else + (res, None) + in let rec dispatch workers handles acc = (* 'worker' represents available workers. *) (* 'handles' represents pendings jobs. *) (* 'acc' are the accumulated results. *) match workers with - | None when (not @@ List.exists handles ~f:is_current) -> + | None when not @@ List.exists handles ~f:is_current -> (* No more handles at this recursion level *) - unpack_result acc, [] + (unpack_result acc, []) | None (* No more jobs to start *) | Some [] -> - (* No worker available: wait for some workers to finish. *) - collect [] handles acc + (* No worker available: wait for some workers to finish. *) + collect [] handles acc | Some (worker :: workers) -> - (* At least one worker is available... *) - match next () with - | Bucket.Wait -> collect (worker :: workers) handles acc - | Bucket.Done -> - (* ... but no more job to be distributed, let's collect results. *) - dispatch None handles acc - | Bucket.Job bucket -> - (* ... send a job to the worker.*) - let handle = - WorkerController.call ~call_id worker - (fun xl -> job neutral xl) - bucket in - dispatch (Some workers) (handle :: handles) acc + (* At least one worker is available... *) + (match next () with + | Hh_bucket.Wait -> collect (worker :: workers) handles acc + | Hh_bucket.Done -> + (* ... but no more job to be distributed, let's collect results. *) + dispatch None handles acc + | Hh_bucket.Job bucket -> + (* ... send a job to the worker.*) + let worker_id = WorkerController.worker_id worker in + let handle = + WorkerController.call + ~call_id + worker + (fun xl -> job (worker_id, neutral) xl) + bucket + in + dispatch (Some workers) (handle :: handles) acc) and collect workers handles acc = let { WorkerController.readys; waiters; ready_fds } = - WorkerController.select handles (handler_fds acc) in + WorkerController.select handles (handler_fds acc) + in let workers = List.map ~f:WorkerController.get_worker readys @ workers in (* Collect the results. *) - let acc, failures = - (** Fold the results of all the finished workers. Also, coalesce the exit + let (acc, failures) = + (* Fold the results of all the finished workers. Also, coalesce the exit * statuses for all the failed workers. *) List.fold_left - ~f:begin fun (acc, failures) h -> - try - let res = WorkerController.get_result h in - (* Results for handles from other calls are cached by get_result - * and will be retrieved later, so we ignore them here *) - let acc = if is_current h then merge res acc else acc in - acc, failures - with - | WorkerController.Worker_failed (_, failure) -> - acc, (failure :: failures) - end + ~f: + begin + fun (acc, failures) h -> + try + let res = WorkerController.get_result h in + (* Results for handles from other calls are cached by get_result + * and will be retrieved later, so we ignore them here *) + let acc = + if is_current h then + let worker_id = + WorkerController.get_worker h |> WorkerController.worker_id + in + merge (worker_id, res) acc + else + acc + in + (acc, failures) + with WorkerController.Worker_failed (_, failure) -> + (acc, failure :: failures) + end ~init:(acc, []) - readys in - if (failures <> []) then - (** If any single worker failed, we stop fanning out more jobs. *) + readys + in + if failures <> [] then + (* If any single worker failed, we stop fanning out more jobs. *) raise (Coalesced_failures failures) else - match check_cancel waiters ready_fds acc with - | acc, Some unfinished -> unpack_result acc, unfinished - | acc, None -> - (* And continue.. *) - dispatch (Some workers) waiters acc in + match check_cancel waiters ready_fds acc with + | (acc, Some unfinished) -> (unpack_result acc, unfinished) + | (acc, None) -> + (* And continue.. *) + dispatch (Some workers) waiters acc + in try let () = nested_exception := None in - dispatch (Some workers) handles (neutral, interrupt.env, interrupt.handlers interrupt.env) + dispatch + (Some workers) + handles + (neutral, interrupt.env, interrupt.handlers interrupt.env) with e -> let stack = Utils.Callstack (Printexc.get_backtrace ()) in !on_exception_ref (e, stack); raise e +let call_with_worker_id workers job merge neutral next = + let ((res, ()), unfinished) = + multi_threaded_call workers job merge neutral next (no_interrupt ()) + in + assert (unfinished = []); + res + let call workers job merge neutral next = - let (res, ()), unfinished = - multi_threaded_call workers job merge neutral next (no_interrupt ()) in + let job (_id, a) b = job a b in + let merge (_id, a) b = merge a b in + let ((res, ()), unfinished) = + multi_threaded_call workers job merge neutral next (no_interrupt ()) + in assert (unfinished = []); res -let call_with_interrupt workers job merge neutral next ?on_cancelled interrupt = +let call_with_interrupt workers job merge neutral next ?on_cancelled interrupt + = SharedMem.allow_removes false; + (* Interrupting of nested jobs is not implemented *) - assert (List.for_all workers - ~f:(fun x -> Option.is_none @@ WorkerController.get_handle_UNSAFE x) - ); - let (res, interrupt_env), unfinished = - multi_threaded_call workers job merge neutral next ?on_cancelled interrupt in + assert ( + List.for_all workers ~f:(fun x -> + Option.is_none @@ WorkerController.get_handle_UNSAFE x) ); + let job (_id, a) b = job a b in + let merge (_id, a) b = merge a b in + let ((res, interrupt_env), unfinished) = + multi_threaded_call workers job merge neutral next ?on_cancelled interrupt + in SharedMem.allow_removes true; - res, interrupt_env, unfinished + (res, interrupt_env, unfinished) let on_exception f = on_exception_ref := f diff --git a/hack/procs/multiThreadedCall.mli b/hack/procs/multiThreadedCall.mli index 4a871484578..3b7e810c9fb 100644 --- a/hack/procs/multiThreadedCall.mli +++ b/hack/procs/multiThreadedCall.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,6 +7,7 @@ * *) +exception Coalesced_failures of WorkerController.worker_failure list (** If a worker process fails, this is raised. * * Note: When one worker process fails, the remaining in-progress workers are checked @@ -16,23 +17,25 @@ * No further buckets are distributed to workers. * * Still-in-progress workers are left to their own accord. *) -exception Coalesced_failures of (WorkerController.worker_failure list) -val coalesced_failures_to_string: +val coalesced_failures_to_string : WorkerController.worker_failure list -> string -type interrupt_result = Cancel | Continue +type interrupt_result = + | Cancel + | Continue type 'env interrupt_handler = 'env -> 'env * interrupt_result type 'env interrupt_config = { - env : 'env; - handlers : 'env -> (Unix.file_descr * 'env interrupt_handler) list; + env: 'env; + handlers: 'env -> (Unix.file_descr * 'env interrupt_handler) list; } +type worker_id = int + val no_interrupt : 'a -> 'a interrupt_config -(** Can raise Coalesced_failures exception. *) val call : WorkerController.worker list -> ('c -> 'a -> 'b) -> @@ -40,17 +43,31 @@ val call : 'c -> 'a Bucket.next -> 'c +(** Can raise Coalesced_failures exception. *) + +val call_with_worker_id : + WorkerController.worker list -> + (worker_id * 'c -> 'a -> 'b) -> + (worker_id * 'b -> 'c -> 'c) -> + 'c -> + 'a Bucket.next -> + 'c +(** Invokes merge with a unique worker id. + Can raise Coalesced_failures exception. *) val call_with_interrupt : WorkerController.worker list -> ('c -> 'a -> 'b) -> - ('b -> 'c -> 'c) -> 'c -> + ('b -> 'c -> 'c) -> + 'c -> 'a Bucket.next -> - (* [on_cancelled] should be specified if your [next] function ever returns + ?on_cancelled: + ((* [on_cancelled] should be specified if your [next] function ever returns [Bucket.Wait], and it should return the list of all jobs that haven't finished or started yet. *) - ?on_cancelled:(unit -> 'a list) -> + unit -> + 'a list) -> 'd interrupt_config -> 'c * 'd * 'a list -val on_exception : ((exn * Utils.callstack) -> unit) -> unit +val on_exception : (exn * Utils.callstack -> unit) -> unit diff --git a/hack/procs/multiWorker.ml b/hack/procs/multiWorker.ml index fee3d283e85..442c4cdf089 100644 --- a/hack/procs/multiWorker.ml +++ b/hack/procs/multiWorker.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,76 +7,95 @@ * *) -open Hh_core +module Hh_bucket = Bucket +open Core_kernel (* Hide the worker type from our users *) type worker = WorkerController.worker type 'a interrupt_config = 'a MultiThreadedCall.interrupt_config -let single_threaded_call job merge neutral next = - let x = ref (next()) in +let single_threaded_call_with_worker_id job merge neutral next = + let x = ref (next ()) in let acc = ref neutral in (* This is a just a sanity check that the job is serializable and so * that the same code will work both in single threaded and parallel * mode. *) let _ = Marshal.to_string job [Marshal.Closures] in - while !x <> Bucket.Done do + while !x <> Hh_bucket.Done do match !x with - | Bucket.Wait -> - (* this state should never be reached in single threaded mode, since + | Hh_bucket.Wait -> + (* this state should never be reached in single threaded mode, since there is no hope for ever getting out of this state *) - failwith "stuck!" - | Bucket.Job l -> - let res = job neutral l in - acc := merge res !acc; - x := next() - | Bucket.Done -> () + failwith "stuck!" + | Hh_bucket.Job l -> + let res = job (0, neutral) l in + acc := merge (0, res) !acc; + x := next () + | Hh_bucket.Done -> () done; !acc +let single_threaded_call job merge neutral next = + let job (_worker_id, a) b = job a b in + let merge (_worker_id, a) b = merge a b in + single_threaded_call_with_worker_id job merge neutral next + module type CALLER = sig type 'a result - val return: 'a -> 'a result + val return : 'a -> 'a result - val multi_threaded_call: + val multi_threaded_call : WorkerController.worker list -> - ('c -> 'a -> 'b) -> - ('b -> 'c -> 'c) -> + (WorkerController.worker_id * 'c -> 'a -> 'b) -> + (WorkerController.worker_id * 'b -> 'c -> 'c) -> 'c -> - 'a Bucket.next -> + 'a Hh_bucket.next -> 'c result end -module CallFunctor(Caller: CALLER): sig - val call: +module CallFunctor (Caller : CALLER) : sig + val call : WorkerController.worker list option -> - job:('c -> 'a -> 'b) -> - merge:('b -> 'c -> 'c) -> neutral:'c -> - next:'a Bucket.next -> + job:(WorkerController.worker_id * 'c -> 'a -> 'b) -> + merge:(WorkerController.worker_id * 'b -> 'c -> 'c) -> + neutral:'c -> + next:'a Hh_bucket.next -> 'c Caller.result end = struct let call workers ~job ~merge ~neutral ~next = match workers with - | None -> - Caller.return (single_threaded_call job merge neutral next) - | Some workers -> Caller.multi_threaded_call workers job merge neutral next + | None -> + Caller.return + (single_threaded_call_with_worker_id job merge neutral next) + | Some workers -> Caller.multi_threaded_call workers job merge neutral next end -module Call = CallFunctor(struct +module Call = CallFunctor (struct type 'a result = 'a + let return x = x - let multi_threaded_call = MultiThreadedCall.call + + let multi_threaded_call = MultiThreadedCall.call_with_worker_id end) -let call = Call.call +let call_with_worker_id = Call.call + +let call workers ~job ~merge ~neutral ~next = + let job (_worker_id, a) b = job a b in + let merge (_worker_id, a) b = merge a b in + Call.call workers ~job ~merge ~neutral ~next (* If we ever want this in MultiWorkerLwt then move this into CallFunctor *) -let call_with_interrupt ?on_cancelled workers ~job ~merge ~neutral ~next ~interrupt = +let call_with_interrupt + ?on_cancelled workers ~job ~merge ~neutral ~next ~interrupt = match workers with - | None -> single_threaded_call job merge neutral next, interrupt.MultiThreadedCall.env, [] + | None -> + ( single_threaded_call job merge neutral next, + interrupt.MultiThreadedCall.env, + [] ) | Some workers -> MultiThreadedCall.call_with_interrupt ?on_cancelled @@ -88,8 +107,11 @@ let call_with_interrupt ?on_cancelled workers ~job ~merge ~neutral ~next ~interr interrupt let next ?progress_fn ?max_size workers = - Bucket.make - ~num_workers: (match workers with Some w -> List.length w | None -> 1) + Hh_bucket.make + ~num_workers: + (match workers with + | Some w -> List.length w + | None -> 1) ?progress_fn ?max_size diff --git a/hack/procs/multiWorker.mli b/hack/procs/multiWorker.mli index ae4fda6bc32..ade1da20de1 100644 --- a/hack/procs/multiWorker.mli +++ b/hack/procs/multiWorker.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,6 +7,9 @@ * *) +module Hh_bucket = Bucket +open Core_kernel + (* The protocol for a next function is to return a list of elements. * It will be called repeatedly until it returns an empty list. *) @@ -14,23 +17,24 @@ module type CALLER = sig type 'a result - val return: 'a -> 'a result + val return : 'a -> 'a result - val multi_threaded_call: + val multi_threaded_call : WorkerController.worker list -> - ('c -> 'a -> 'b) -> - ('b -> 'c -> 'c) -> + (WorkerController.worker_id * 'c -> 'a -> 'b) -> + (WorkerController.worker_id * 'b -> 'c -> 'c) -> 'c -> - 'a Bucket.next -> + 'a Hh_bucket.next -> 'c result end -module CallFunctor : functor (Caller: CALLER) -> sig - val call: +module CallFunctor (Caller : CALLER) : sig + val call : WorkerController.worker list option -> - job:('c -> 'a -> 'b) -> - merge:('b -> 'c -> 'c) -> neutral:'c -> - next:'a Bucket.next -> + job:(WorkerController.worker_id * 'c -> 'a -> 'b) -> + merge:(WorkerController.worker_id * 'b -> 'c -> 'c) -> + neutral:'c -> + next:'a Hh_bucket.next -> 'c Caller.result end @@ -42,38 +46,52 @@ type 'a interrupt_config = 'a MultiThreadedCall.interrupt_config val next : ?progress_fn:(total:int -> start:int -> length:int -> unit) -> - ?max_size: int -> + ?max_size:int -> worker list option -> 'a list -> - 'a list Bucket.next + 'a list Hh_bucket.next -(** Can raise MultiThreadedCall.Coalesced_failures unless in single-threaded mode. *) +(* Can raise MultiThreadedCall.Coalesced_failures unless in single-threaded mode. *) val call : worker list option -> job:('c -> 'a -> 'b) -> - merge:('b -> 'c -> 'c) -> neutral:'c -> - next:'a Bucket.next -> + merge:('b -> 'c -> 'c) -> + neutral:'c -> + next:'a Hh_bucket.next -> + 'c + +(* Can raise MultiThreadedCall.Coalesced_failures unless in single-threaded mode. *) +val call_with_worker_id : + worker list option -> + job:(WorkerController.worker_id * 'c -> 'a -> 'b) -> + merge:(WorkerController.worker_id * 'b -> 'c -> 'c) -> + neutral:'c -> + next:'a Hh_bucket.next -> 'c val call_with_interrupt : - (* [on_cancelled] should be specified if your [next] function ever returns - [Bucket.Wait], and it should return the list of all jobs that haven't + ?on_cancelled: + ((* [on_cancelled] should be specified if your [next] function ever returns + [Hh_bucket.Wait], and it should return the list of all jobs that haven't finished or started yet. *) - ?on_cancelled:(unit -> 'a list) -> + unit -> + 'a list) -> worker list option -> job:('c -> 'a -> 'b) -> - merge:('b -> 'c -> 'c) -> neutral:'c -> - next:'a Bucket.next -> + merge:('b -> 'c -> 'c) -> + neutral:'c -> + next:'a Hh_bucket.next -> interrupt:'d interrupt_config -> 'c * 'd * 'a list (* Creates a pool of workers. *) -val make: - (** See docs in WorkerController.worker for call_wrapper. *) - ?call_wrapper: WorkerController.call_wrapper -> - saved_state : 'a -> - entry : 'a WorkerController.entry -> - nbr_procs : int -> - gc_control : Gc.control -> - heap_handle : SharedMem.handle -> - worker list +val make : + ?call_wrapper: + (* See docs in WorkerController.worker for call_wrapper. *) + WorkerController.call_wrapper -> + saved_state:'a -> + entry:'a WorkerController.entry -> + nbr_procs:int -> + gc_control:Gc.control -> + heap_handle:SharedMem.handle -> + worker list diff --git a/hack/procs/multiWorkerLwt.ml b/hack/procs/multiWorkerLwt.ml deleted file mode 100644 index 6371fec2716..00000000000 --- a/hack/procs/multiWorkerLwt.ml +++ /dev/null @@ -1,125 +0,0 @@ -(** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -let report_canceled_callback = ref (fun ~total:_ ~finished:_ -> ()) -let set_report_canceled_callback callback = report_canceled_callback := callback -let report_canceled ~total ~finished = (!report_canceled_callback) ~total ~finished - -include MultiWorker.CallFunctor (struct - type 'a result = 'a Lwt.t - - let return = Lwt.return - - let multi_threaded_call - (type a) (type b) (type c) - workers - (job: c -> a -> b) - (merge: b -> c -> c) - (neutral: c) - (next: a Bucket.next) = - - let acc = ref neutral in - - let merge_with_acc = - (* Why do we need a lock? Well, we don't really know what is inside the merge function, and if - * something makes Lwt yield then we could end up with a race condition. At the moment, the - * merge function doesn't use Lwt, but it might in the future. Locking and unlocking is cheap, - * so I'm pre-emptively adding this lock *) - let merge_mutex = Lwt_mutex.create () in - fun result -> - Lwt_mutex.with_lock merge_mutex (fun () -> - acc := merge result !acc; - Lwt.return_unit - ) - in - - (* Our next() function may give us a job, say there are no more jobs left, or tell us to - * try again later. This signal is to wake up any workers who were told "try again later" - *) - let wait_signal = Lwt_condition.create () in - - (* Returns None if there will never be any more jobs *) - let rec get_job () = - match next () with - | Bucket.Job bucket -> Lwt.return (Some bucket) - | Bucket.Done -> Lwt.return None - | Bucket.Wait -> - let%lwt () = Lwt_condition.wait wait_signal in - get_job () - in - - let rec run_worker worker = - let idle_start_wall_time = Unix.gettimeofday () in - let%lwt bucket = get_job () in - match bucket with - | None -> Lwt.return idle_start_wall_time - | Some bucket -> - Measure.sample "worker_idle" (Unix.gettimeofday () -. idle_start_wall_time); - let%lwt result = WorkerControllerLwt.call worker (fun xl -> job neutral xl) bucket in - let%lwt () = merge_with_acc result in - (* Wait means "ask again after a worker has finished and has merged its result". So now that - * we've merged our response, let's wake any other workers which are waiting for work *) - Lwt_condition.broadcast wait_signal (); - run_worker worker - in - - let%lwt () = - let worker_threads = List.map run_worker workers in - try%lwt - let%lwt idle_start_times = LwtUtils.all worker_threads in - let idle_end_wall_time = Unix.gettimeofday () in - List.iter (fun idle_start_wall_time -> - Measure.sample "worker_idle" (idle_end_wall_time -. idle_start_wall_time); - ) idle_start_times; - Lwt.return_unit - with Lwt.Canceled -> - let total = List.length worker_threads in - let finished = ref 0 in - let worker_threads = List.map (fun thread -> - (let%lwt _ = thread in Lwt.return_unit) [%lwt.finally - incr finished; - report_canceled ~total ~finished:(!finished); - Lwt.return_unit - ] - ) worker_threads in - (* For most exceptions, we want to propagate the exception as soon as one worker throws. - * However, for Canceled we want to wait for all the workers to process the Canceled. - * Lwt.join will wait for every thread to finish or fail *) - (Lwt.join worker_threads) [%lwt.finally WorkerCancel.resume_workers (); Lwt.return_unit] - in - - Lwt.return (!acc) -end) - -exception MultiWorkersBusy - -(* Currently, MultiWorker calls may not be interleaved, which can happen with - * Lwt. Keep track of whether we have a call in flight and raise an exception if - * we do when another comes in. *) -let is_busy = ref false - -let call workers ~job ~merge ~neutral ~next = - if !is_busy then - raise MultiWorkersBusy - else begin - is_busy := true; - (call workers ~job ~merge ~neutral ~next) [%lwt.finally is_busy := false; Lwt.return_unit] - end - -(* A separate abstract type from MultiWorker.worker forces users to always use MultiWorkerLwt *) -type worker = WorkerController.worker - -let next ?progress_fn ?max_size workers = - Bucket.make - ~num_workers: (match workers with Some w -> List.length w | None -> 1) - ?progress_fn - ?max_size - -(* Wrap WorkerController.make to abstract out the worker type *) -let make = WorkerController.make diff --git a/hack/procs/multiWorkerLwt.mli b/hack/procs/multiWorkerLwt.mli deleted file mode 100644 index e62375944ed..00000000000 --- a/hack/procs/multiWorkerLwt.mli +++ /dev/null @@ -1,37 +0,0 @@ -(** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type worker - -val call : - worker list option -> - job:('c -> 'a -> 'b) -> - merge:('b -> 'c -> 'c) -> neutral:'c -> - next:'a Bucket.next -> - 'c Lwt.t - -val next : - ?progress_fn:(total:int -> start:int -> length:int -> unit) -> - ?max_size: int -> - worker list option -> - 'a list -> - 'a list Bucket.next - -(* Creates a pool of workers. *) -val make: - (** See docs in WorkerController.worker for call_wrapper. *) - ?call_wrapper: WorkerController.call_wrapper -> - saved_state : 'a -> - entry : 'a WorkerController.entry -> - nbr_procs : int -> - gc_control : Gc.control -> - heap_handle : SharedMem.handle -> - worker list - -val set_report_canceled_callback: (total:int -> finished:int -> unit) -> unit diff --git a/hack/procs/worker.ml b/hack/procs/worker.ml index e2bc71924b1..d34f9eb436a 100644 --- a/hack/procs/worker.ml +++ b/hack/procs/worker.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -19,10 +19,10 @@ *****************************************************************************) type request = Request of (serializer -> unit) + and serializer = { send: 'a. 'a -> unit } -type slave_job_status = - | Slave_terminated of Unix.process_status +type slave_job_status = Slave_terminated of Unix.process_status let on_slave_cancelled parent_outfd = (* The cancelling controller will ignore result of cancelled job anyway (see @@ -45,25 +45,25 @@ let slave_main ic oc = let start_minor_collections = ref 0 in let start_major_collections = ref 0 in let start_wall_time = ref 0. in - + let start_proc_fs_status = ref None in let infd = Daemon.descr_of_in_channel ic in let outfd = Daemon.descr_of_out_channel oc in - let send_result data = Mem_profile.stop (); let tm = Unix.times () in let end_user_time = tm.Unix.tms_utime +. tm.Unix.tms_cutime in let end_system_time = tm.Unix.tms_stime +. tm.Unix.tms_cstime in - let { Gc. - minor_words = end_minor_words; + let { + Gc.minor_words = end_minor_words; promoted_words = end_promoted_words; major_words = end_major_words; minor_collections = end_minor_collections; major_collections = end_major_collections; _; - } = Gc.quick_stat () in - - let major_time, minor_time = Sys_utils.get_gc_time () in + } = + Gc.quick_stat () + in + let (major_time, minor_time) = Sys_utils.get_gc_time () in Measure.sample "worker_gc_major_wall_time" major_time; Measure.sample "worker_gc_minor_wall_time" minor_time; @@ -71,49 +71,76 @@ let slave_main ic oc = Measure.sample "worker_system_time" (end_system_time -. !start_system_time); Measure.sample "worker_wall_time" (Unix.gettimeofday () -. !start_wall_time); - Measure.track_distribution "minor_words" ~bucket_size:(float (100 * 1024 * 1024)); + Measure.track_distribution + "minor_words" + ~bucket_size:(float (100 * 1024 * 1024)); Measure.sample "minor_words" (end_minor_words -. !start_minor_words); - Measure.track_distribution "promoted_words" ~bucket_size:(float (25 * 1024 * 1024)); - Measure.sample "promoted_words" (end_promoted_words -. !start_promoted_words); + Measure.track_distribution + "promoted_words" + ~bucket_size:(float (25 * 1024 * 1024)); + Measure.sample + "promoted_words" + (end_promoted_words -. !start_promoted_words); - Measure.track_distribution "major_words" ~bucket_size:(float (50 * 1024 * 1024)); + Measure.track_distribution + "major_words" + ~bucket_size:(float (50 * 1024 * 1024)); Measure.sample "major_words" (end_major_words -. !start_major_words); - Measure.sample "minor_collections" (float (end_minor_collections - !start_minor_collections)); - Measure.sample "major_collections" (float (end_major_collections - !start_major_collections)); + Measure.sample + "minor_collections" + (float (end_minor_collections - !start_minor_collections)); + Measure.sample + "major_collections" + (float (end_major_collections - !start_major_collections)); + + begin + match + (!start_proc_fs_status, ProcFS.status_for_pid (Unix.getpid ())) + with + | ( Some { ProcFS.rss_total = start; _ }, + Ok { ProcFS.rss_total = total; rss_hwm = hwm; _ } ) -> + Measure.sample "worker_rss_start" (float start); + Measure.sample "worker_rss_delta" (float (total - start)); + Measure.sample "worker_rss_hwm_delta" (float (hwm - start)) + | _ -> () + end; (* If we got so far, just let it finish "naturally" *) WorkerCancel.set_on_worker_cancelled (fun () -> ()); - let len = Measure.time "worker_send_response" (fun () -> - Marshal_tools.to_fd_with_preamble ~flags:[Marshal.Closures] outfd data - ) in - if len > 30 * 1024 * 1024 (* 30 MB *) then begin - Hh_logger.log "WARNING: you are sending quite a lot of data (%d bytes), \ - which may have an adverse performance impact. If you are sending \ - closures, double-check to ensure that they have not captured large - values in their environment." len; - Printf.eprintf "%s" (Printexc.raw_backtrace_to_string - (Printexc.get_callstack 100)); - end; + let len = + Measure.time "worker_send_response" (fun () -> + Marshal_tools.to_fd_with_preamble + ~flags:[Marshal.Closures] + outfd + data) + in + if len > 30 * 1024 * 1024 (* 30 MB *) then ( + Hh_logger.log + "WARNING: you are sending quite a lot of data (%d bytes), which may have an adverse performance impact. If you are sending closures, double-check to ensure that they have not captured large + values in their environment." + len; + Printf.eprintf + "%s" + (Printexc.raw_backtrace_to_string (Printexc.get_callstack 100)) + ); Measure.sample "worker_response_len" (float len); let stats = Measure.serialize (Measure.pop_global ()) in - let _ = Marshal_tools.to_fd_with_preamble outfd stats in () in - try Measure.push_global (); - let Request do_process = Measure.time "worker_read_request" (fun () -> - Marshal_tools.from_fd_with_preamble infd - ) in + let (Request do_process) = + Measure.time "worker_read_request" (fun () -> + Marshal_tools.from_fd_with_preamble infd) + in WorkerCancel.set_on_worker_cancelled (fun () -> on_slave_cancelled outfd); let tm = Unix.times () in let gc = Gc.quick_stat () in - Sys_utils.start_gc_profiling (); start_user_time := tm.Unix.tms_utime +. tm.Unix.tms_cutime; @@ -124,36 +151,37 @@ let slave_main ic oc = start_minor_collections := gc.Gc.minor_collections; start_major_collections := gc.Gc.major_collections; start_wall_time := Unix.gettimeofday (); + start_proc_fs_status := + ProcFS.status_for_pid (Unix.getpid ()) |> Core_kernel.Result.ok; Mem_profile.start (); do_process { send = send_result }; exit 0 with - | End_of_file -> - exit 1 - | SharedMem.Out_of_shared_memory -> - Exit_status.(exit Out_of_shared_memory) - | SharedMem.Hash_table_full -> - Exit_status.(exit Hash_table_full) - | SharedMem.Heap_full -> - Exit_status.(exit Heap_full) + | End_of_file -> exit 1 + | SharedMem.Out_of_shared_memory -> Exit_status.(exit Out_of_shared_memory) + | SharedMem.Hash_table_full -> Exit_status.(exit Hash_table_full) + | SharedMem.Heap_full -> Exit_status.(exit Heap_full) | SharedMem.Sql_assertion_failure err_num -> - let exit_code = match err_num with - | 11 -> Exit_status.Sql_corrupt - | 14 -> Exit_status.Sql_cantopen - | 21 -> Exit_status.Sql_misuse - | _ -> Exit_status.Sql_assertion_failure - in - Exit_status.exit exit_code + let exit_code = + match err_num with + | 11 -> Exit_status.Sql_corrupt + | 14 -> Exit_status.Sql_cantopen + | 21 -> Exit_status.Sql_misuse + | _ -> Exit_status.Sql_assertion_failure + in + Exit_status.exit exit_code | e -> - let e_str = Printexc.to_string e in - let pid = Unix.getpid () in - Printf.printf "Worker slave %d exception: %s\n%!" pid e_str; - EventLogger.log_if_initialized (fun () -> - EventLogger.worker_exception e_str - ); - Printf.printf "Worker slave %d Potential backtrace:\n%!" pid; - Printexc.print_backtrace stdout; - exit 2 + let e_backtrace = Printexc.get_backtrace () in + let e_str = Printexc.to_string e in + let pid = Unix.getpid () in + Printf.printf "Worker slave %d exception: %s\n%!" pid e_str; + EventLogger.log_if_initialized (fun () -> + EventLogger.worker_exception e_str); + Printf.printf + "Worker slave %d Potential backtrace:\n%s\n%!" + pid + e_backtrace; + exit 2 let win32_worker_main restore (state, _controller_fd) (ic, oc) = restore state; @@ -161,15 +189,13 @@ let win32_worker_main restore (state, _controller_fd) (ic, oc) = let maybe_send_status_to_controller fd status = match fd with - | None -> - () + | None -> () | Some fd -> let to_controller fd msg = ignore (Marshal_tools.to_fd_with_preamble fd msg : int) in - match status with - | Unix.WEXITED 0 -> - () + (match status with + | Unix.WEXITED 0 -> () | Unix.WEXITED 1 -> (* 1 is an expected exit code. On unix systems, when the master process exits, the pipe * becomes readable. We fork a worker slave, which reads 0 bytes and exits with code 1. @@ -180,11 +206,19 @@ let maybe_send_status_to_controller fd status = Timeout.with_timeout ~timeout:10 ~on_timeout:(fun _ -> - Hh_logger.log "Timed out sending status to controller" - ) - ~do_:(fun _ -> - to_controller fd (Slave_terminated status) - ) + Hh_logger.log "Timed out sending status to controller") + ~do_:(fun _ -> to_controller fd (Slave_terminated status))) + +(* On Unix each job runs in a forked process. The first thing these jobs do is + * deserialize a marshaled closure which is the job. + * + * The marshaled representation of a closure includes a MD5 digest of the code + * segment and an offset. The digest is lazily computed, but if it has not been + * computed before the fork, then each forked process will need to compute it. + * + * To avoid this, we deserialize a dummy closure before forking, so that we only + * need to calculate the digest once per worker instead of once per job. *) +let dummy_closure () = () (** * On Windows, the Worker is a process and runs the job directly. See above. @@ -211,37 +245,41 @@ let maybe_send_status_to_controller fd status = *) let unix_worker_main restore (state, controller_fd) (ic, oc) = restore state; + + (* see dummy_closure above *) + ignore Marshal.(from_bytes (to_bytes dummy_closure [Closures]) 0); + let in_fd = Daemon.descr_of_in_channel ic in if !Utils.profile then Utils.log := prerr_endline; try while true do (* Wait for an incoming job : is there something to read? But we don't read it yet. It will be read by the forked slave. *) - let readyl, _, _ = Unix.select [in_fd] [] [] (-1.0) in + let (readyl, _, _) = Unix.select [in_fd] [] [] (-1.0) in if readyl = [] then exit 0; + (* We fork a slave for every incoming request. And let it die after one request. This is the quickest GC. *) - match Fork.fork() with + match Fork.fork () with | 0 -> slave_main ic oc | pid -> - (* Wait for the slave termination... *) - let status = snd (Sys_utils.waitpid_non_intr [] pid) in - let () = maybe_send_status_to_controller controller_fd status in - match status with - | Unix.WEXITED 0 -> () - | Unix.WEXITED 1 -> - raise End_of_file - | Unix.WEXITED code -> - Printf.printf "Worker exited (code: %d)\n" code; - flush stdout; - Pervasives.exit code - | Unix.WSIGNALED x -> - let sig_str = PrintSignal.string_of_signal x in - Printf.printf "Worker interrupted with signal: %s\n" sig_str; - exit 2 - | Unix.WSTOPPED x -> - Printf.printf "Worker stopped with signal: %d\n" x; - exit 3 + (* Wait for the slave termination... *) + let status = snd (Sys_utils.waitpid_non_intr [] pid) in + let () = maybe_send_status_to_controller controller_fd status in + (match status with + | Unix.WEXITED 0 -> () + | Unix.WEXITED 1 -> raise End_of_file + | Unix.WEXITED code -> + Printf.printf "Worker exited (code: %d)\n" code; + flush stdout; + Pervasives.exit code + | Unix.WSIGNALED x -> + let sig_str = PrintSignal.string_of_signal x in + Printf.printf "Worker interrupted with signal: %s\n" sig_str; + exit 2 + | Unix.WSTOPPED x -> + Printf.printf "Worker stopped with signal: %d\n" x; + exit 3) done; assert false with End_of_file -> exit 0 diff --git a/hack/procs/worker.mli b/hack/procs/worker.mli index a9b665872c8..68561d5ee10 100644 --- a/hack/procs/worker.mli +++ b/hack/procs/worker.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,19 +8,19 @@ *) type request = Request of (serializer -> unit) + and serializer = { send: 'a. 'a -> unit } -type slave_job_status = - | Slave_terminated of Unix.process_status +type slave_job_status = Slave_terminated of Unix.process_status -val win32_worker_main: +val win32_worker_main : ('a -> 'b) -> - ('a * Unix.file_descr option) -> - request Daemon.in_channel * 'c Daemon.out_channel - -> 'd + 'a * Unix.file_descr option -> + request Daemon.in_channel * 'c Daemon.out_channel -> + 'd -val unix_worker_main: +val unix_worker_main : ('a -> 'b) -> - ('a * Unix.file_descr option) -> - request Daemon.in_channel * 'c Daemon.out_channel - -> 'd + 'a * Unix.file_descr option -> + request Daemon.in_channel * 'c Daemon.out_channel -> + 'd diff --git a/hack/procs/workerController.ml b/hack/procs/workerController.ml index e7bead69de6..1190491b0ef 100644 --- a/hack/procs/workerController.ml +++ b/hack/procs/workerController.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,7 +7,7 @@ * *) -open Hh_core +open Core_kernel open Worker (***************************************************************************** @@ -33,10 +33,14 @@ open Worker *****************************************************************************) type process_id = int + +type worker_id = int + type worker_failure = (* Worker killed by Out Of Memory. *) | Worker_oomed | Worker_quit of Unix.process_status + exception Worker_failed of (process_id * worker_failure) exception Worker_busy @@ -52,18 +56,24 @@ let status_string = function | Unix.WSIGNALED i -> Printf.sprintf "WSIGNALED %d" i | Unix.WSTOPPED i -> Printf.sprintf "WSTOPPED %d" i -let failure_to_string f = match f with +let failure_to_string f = + match f with | Worker_oomed -> "Worker_oomed" | Worker_quit s -> Printf.sprintf "(Worker_quit %s)" (status_string s) -let () = Printexc.register_printer @@ function - | Worker_failed_to_send_job Other_send_job_failure exn -> - Some (Printf.sprintf "Other_send_job_failure: %s" (Printexc.to_string exn)) - | Worker_failed_to_send_job Worker_already_exited status -> +let () = + Caml.Printexc.register_printer + @@ function + | Worker_failed_to_send_job (Other_send_job_failure exn) -> + Some (Printf.sprintf "Other_send_job_failure: %s" (Exn.to_string exn)) + | Worker_failed_to_send_job (Worker_already_exited status) -> Some (Printf.sprintf "Worker_already_exited: %s" (status_string status)) | Worker_failed (id, failure) -> - Some (Printf.sprintf "Worker_failed (process_id = %d): %s" id - (failure_to_string failure)) + Some + (Printf.sprintf + "Worker_failed (process_id = %d): %s" + id + (failure_to_string failure)) | _ -> None (* Should we 'prespawn' the worker ? *) @@ -73,6 +83,7 @@ let use_prespawned = not Sys.win32 let max_workers = 1000 type void (* an empty type *) + type call_wrapper = { wrap: 'x 'b. ('x -> 'b) -> 'x -> 'b } (***************************************************************************** @@ -81,11 +92,14 @@ type call_wrapper = { wrap: 'x 'b. ('x -> 'b) -> 'x -> 'b } *****************************************************************************) type worker = { - id: int; (* Simple id for the worker. This is not the worker pid: on - Windows, we spawn a new worker for each job. *) - - - (** The call wrapper will wrap any workload sent to the worker (via "call" + (* Simple id for the worker. This is not the worker pid: on Windows, we spawn + * a new worker for each job. + * + * This is also an offset into the shared heap segment, used to access + * worker-local data. As such, the numbering is important. The IDs must be + * dense and start at 1. (0 is the master process offset.) *) + id: int; + (* The call wrapper will wrap any workload sent to the worker (via "call" * below) before invoking the workload. * * That is, when calling the worker with workload `f x`, it will be wrapped @@ -95,24 +109,18 @@ type worker = { * workers. For example, this can be useful to handle exceptions uniformly * across workers regardless what workload is called on them. *) call_wrapper: call_wrapper option; - - (** On Unix, Worker Master sends status messages over this fd to this + (* On Unix, Worker Master sends status messages over this fd to this * Controller. On Windows, it doesn't send anything, so don't try to read from * it (it should be set to None). *) controller_fd: Unix.file_descr option; - (* Sanity check: is the worker still available ? *) mutable killed: bool; - (* Sanity check: is the worker currently busy ? *) mutable busy: bool; - (* If the worker is currently busy, handle of the job it's execuing *) mutable handle: 'a 'b. ('a, 'b) handle option; - (* On Unix, a reference to the 'prespawned' worker. *) prespawned: (void, request) Daemon.handle option; - (* On Windows, a function to spawn a slave. *) spawn: unit -> (void, request) Daemon.handle; } @@ -123,7 +131,6 @@ type worker = { * the result of the job when the task is done (cf multiWorker.ml). * *****************************************************************************) - and ('a, 'b) handle = ('a, 'b) delayed ref (* Integer represents job the handle belongs to. @@ -136,26 +143,23 @@ and 'b worker_handle = | Canceled | Failed of exn -(** The Controller's slave has a Worker. The Worker is itself a single process +(* The Controller's slave has a Worker. The Worker is itself a single process * on Windows. On Unix, the slave is itself a Worker Master process, and Worker * Slave. *) and 'a slave = { - - worker: worker; (* The associated worker *) - - slave_pid: int; (* The actual slave pid *) + worker: worker; + (* The associated worker *) + slave_pid: int; + (* The actual slave pid *) (* The file descriptor we might pass to select in order to wait for the slave to finish its job. *) infd: Unix.file_descr; - (* A blocking function that returns the job result. *) result: unit -> 'a; - (* A blocking function that waits for job cancellation (see Worker.cancel) * to finish *) wait_for_cancel: unit -> unit; - } let worker_id w = w.id @@ -176,34 +180,41 @@ let mark_free w = w.handle <- None (* If the worker isn't prespawned, spawn the worker *) -let spawn w = match w.prespawned with -| None -> w.spawn () -| Some handle -> handle +let spawn w = + match w.prespawned with + | None -> w.spawn () + | Some handle -> handle (* If the worker isn't prespawned, close the worker *) let close w h = if w.prespawned = None then Daemon.close h (* If there is a call_wrapper, apply it and create the Request *) -let wrap_request w f x = match w.call_wrapper with +let wrap_request w f x = + match w.call_wrapper with | Some { wrap } -> Request (fun { send } -> send (wrap f x)) | None -> Request (fun { send } -> send (f x)) -type 'a entry_state = 'a * Gc.control * SharedMem.handle -type 'a entry = ('a entry_state * (Unix.file_descr option), request, void) Daemon.entry +type 'a entry_state = 'a * Gc.control * SharedMem.handle * int + +type 'a entry = + ('a entry_state * Unix.file_descr option, request, void) Daemon.entry let entry_counter = ref 0 + let register_entry_point ~restore = incr entry_counter; - let restore (st, gc_control, heap_handle) = - restore st; - SharedMem.connect heap_handle ~is_master:false; - Gc.set gc_control in + let restore (st, gc_control, heap_handle, worker_id) = + restore st ~worker_id; + SharedMem.connect heap_handle ~worker_id; + Gc.set gc_control + in let name = Printf.sprintf "slave_%d" !entry_counter in Daemon.register_entry_point name - (if Sys.win32 - then win32_worker_main restore - else unix_worker_main restore) + ( if Sys.win32 then + win32_worker_main restore + else + unix_worker_main restore ) (************************************************************************** * Creates a pool of workers. @@ -216,51 +227,62 @@ let workers = ref [] let make_one ?call_wrapper controller_fd spawn id = if id >= max_workers then failwith "Too many workers"; - let prespawned = if not use_prespawned then None else Some (spawn ()) in - let worker = { - call_wrapper; - controller_fd; - id; - busy = false; - handle = None; - killed = false; - prespawned; - spawn } + let prespawned = + if not use_prespawned then + None + else + Some (spawn ()) + in + let worker = + { + call_wrapper; + controller_fd; + id; + busy = false; + handle = None; + killed = false; + prespawned; + spawn; + } in workers := worker :: !workers; worker -(** Make a few workers. When workload is given to a worker (via "call" below), +(* Make a few workers. When workload is given to a worker (via "call" below), * the workload is wrapped in the calL_wrapper. *) -let make ?call_wrapper ~saved_state ~entry ~nbr_procs ~gc_control ~heap_handle = +let make ?call_wrapper ~saved_state ~entry ~nbr_procs ~gc_control ~heap_handle + = let setup_controller_fd () = if use_prespawned then - let parent_fd, child_fd = Unix.pipe () in - (** parent_fd is only used in this process. Don't leak it to children. + let (parent_fd, child_fd) = Unix.pipe () in + (* parent_fd is only used in this process. Don't leak it to children. * This will auto-close parent_fd in children created with Daemon.spawn * since Daemon.spawn uses exec. *) let () = Unix.set_close_on_exec parent_fd in - Some parent_fd, Some child_fd + (Some parent_fd, Some child_fd) else - (** We don't use the side channel on Windows. *) - None, None + (* We don't use the side channel on Windows. *) + (None, None) in - let spawn name child_fd () = + let spawn worker_id name child_fd () = Unix.clear_close_on_exec heap_handle.SharedMem.h_fd; - (** Daemon.spawn runs exec after forking. We explicitly *do* want to "leak" + + (* Daemon.spawn runs exec after forking. We explicitly *do* want to "leak" * child_fd to this one spawned process because it will be using that FD to * send messages back up to us. Close_on_exec is probably already false, but * we force it again to be false here just in case. *) Option.iter child_fd ~f:Unix.clear_close_on_exec; - let state = (saved_state, gc_control, heap_handle) in + let state = (saved_state, gc_control, heap_handle, worker_id) in let handle = Daemon.spawn ~name (Daemon.null_fd (), Unix.stdout, Unix.stderr) entry - (state, child_fd) in + (state, child_fd) + in Unix.set_close_on_exec heap_handle.SharedMem.h_fd; - (** This process no longer needs child_fd after its spawned the child. + + (* This process no longer needs child_fd after its spawned the child. * Messages are read using controller_fd. *) Option.iter child_fd ~f:Unix.close; handle @@ -268,85 +290,90 @@ let make ?call_wrapper ~saved_state ~entry ~nbr_procs ~gc_control ~heap_handle = let made_workers = ref [] in let pid = Unix.getpid () in for n = 1 to nbr_procs do - let controller_fd, child_fd = setup_controller_fd () in - let name = Printf.sprintf "worker process %d/%d for server %d" n nbr_procs pid in - made_workers := make_one ?call_wrapper controller_fd (spawn name child_fd) n :: !made_workers + let (controller_fd, child_fd) = setup_controller_fd () in + let name = + Printf.sprintf "worker process %d/%d for server %d" n nbr_procs pid + in + made_workers := + make_one ?call_wrapper controller_fd (spawn n name child_fd) n + :: !made_workers done; !made_workers - (************************************************************************** * Send a job to a worker * **************************************************************************) -let call ?(call_id=0) w (type a) (type b) (f : a -> b) (x : a) : (a, b) handle = - if is_killed w then Printf.ksprintf failwith "killed worker (%d)" (worker_id w); +let call ?(call_id = 0) w (type a b) (f : a -> b) (x : a) : (a, b) handle = + if is_killed w then + Printf.ksprintf failwith "killed worker (%d)" (worker_id w); mark_busy w; (* Spawn the slave, if not prespawned. *) - let { Daemon.pid = slave_pid; channels = (inc, outc) } as h = spawn w in - + let ({ Daemon.pid = slave_pid; channels = (inc, outc) } as h) = spawn w in let infd = Daemon.descr_of_in_channel inc in let outfd = Daemon.descr_of_out_channel outc in - let worker_failed pid_stat controller_fd = - (** If we have a controller fd, we read the true pid status + (* If we have a controller fd, we read the true pid status * over that channel instead of using the one returned from the * Worker Master. *) - let pid_stat = match controller_fd with - | None -> - snd (pid_stat) + let pid_stat = + match controller_fd with + | None -> snd pid_stat | Some fd -> Timeout.with_timeout ~timeout:3 - ~on_timeout:(fun _ -> snd (pid_stat)) + ~on_timeout:(fun _ -> snd pid_stat) ~do_:(fun _ -> try - let Slave_terminated status = Marshal_tools.from_fd_with_preamble fd in + let (Slave_terminated status) = + Marshal_tools.from_fd_with_preamble fd + in status - with - | End_of_file -> - snd (pid_stat) - ) + with End_of_file -> snd pid_stat) in match pid_stat with | Unix.WEXITED i when i = Exit_status.(exit_code Out_of_shared_memory) -> raise SharedMem.Out_of_shared_memory - | Unix.WEXITED i -> + | Unix.WEXITED i -> Printf.eprintf "Subprocess(%d): fail %d" slave_pid i; raise (Worker_failed (slave_pid, Worker_quit (Unix.WEXITED i))) - | Unix.WSTOPPED i -> + | Unix.WSTOPPED i -> raise (Worker_failed (slave_pid, Worker_quit (Unix.WSTOPPED i))) - | Unix.WSIGNALED i -> + | Unix.WSIGNALED i -> raise (Worker_failed (slave_pid, Worker_quit (Unix.WSIGNALED i))) in - - (** Checks if the worker master has exited. *) - let with_exit_status_check ?(block_on_waitpid=false) slave_pid f = - let wait_flags = if block_on_waitpid then [] else [Unix.WNOHANG] in + (* Checks if the worker master has exited. *) + let with_exit_status_check ?(block_on_waitpid = false) slave_pid f = + let wait_flags = + if block_on_waitpid then + [] + else + [Unix.WNOHANG] + in let pid_stat = Unix.waitpid wait_flags slave_pid in match pid_stat with - | 0, _ -> - f () - | _, Unix.WEXITED 0 -> - (** This will never actually happen. Worker Master only exits if this - * Controller process has exited. *) - failwith "Worker Master exited 0 unexpectedly" - | _ -> - worker_failed pid_stat w.controller_fd + | (0, _) -> f () + | (_, Unix.WEXITED 0) -> + (* This will never actually happen. Worker Master only exits if this + * Controller process has exited. *) + failwith "Worker Master exited 0 unexpectedly" + | _ -> worker_failed pid_stat w.controller_fd in (* Prepare ourself to read answer from the slave. *) - let get_result_with_status_check ?(block_on_waitpid=false) () : b = - with_exit_status_check ~block_on_waitpid slave_pid begin fun () -> - let data : b = Marshal_tools.from_fd_with_preamble infd in - let stats : Measure.record_data = Marshal_tools.from_fd_with_preamble infd in - close w h; - Measure.merge (Measure.deserialize stats); - data - end in + let get_result_with_status_check ?(block_on_waitpid = false) () : b = + with_exit_status_check ~block_on_waitpid slave_pid (fun () -> + let data : b = Marshal_tools.from_fd_with_preamble infd in + let stats : Measure.record_data = + Marshal_tools.from_fd_with_preamble infd + in + close w h; + Measure.merge (Measure.deserialize stats); + data) + in let result () : b = - (** + (* * We run the "with_exit_status_check" twice (first time non-blockingly). * This is because of a race condition. * @@ -368,42 +395,41 @@ let call ?(call_id=0) w (type a) (type b) (f : a -> b) (x : a) : (a, b) handle = * non-interruptible waitpid that we expect it to be at. Eventually, it will also * fail accordingly, since its slave has failed. *) - try get_result_with_status_check () with - | End_of_file -> + try get_result_with_status_check () + with End_of_file -> get_result_with_status_check ~block_on_waitpid:true () in let wait_for_cancel () : unit = - with_exit_status_check slave_pid begin fun () -> - (* Depending on whether we manage to kill the slave before it starts writing - * results back, this will return either actual results, or "anything" - * (written by interrupt signal that exited). The types don't match, but we - * ignore both of them anyway. *) - let _ : 'c = Marshal_tools.from_fd_with_preamble infd in - let _ : 'c = Marshal_tools.from_fd_with_preamble infd in - () - end + with_exit_status_check slave_pid (fun () -> + (* Depending on whether we manage to kill the slave before it starts writing + * results back, this will return either actual results, or "anything" + * (written by interrupt signal that exited). The types don't match, but we + * ignore both of them anyway. *) + let (_ : 'c) = Marshal_tools.from_fd_with_preamble infd in + let (_ : 'c) = Marshal_tools.from_fd_with_preamble infd in + ()) in let slave = { result; slave_pid; infd; worker = w; wait_for_cancel } in let request = wrap_request w f x in - (* Send the job to the slave. *) let () = - try Marshal_tools.to_fd_with_preamble ~flags:[Marshal.Closures] outfd request |> ignore - with - | e -> begin - match Unix.waitpid [Unix.WNOHANG] slave_pid with - | 0, _ -> - raise (Worker_failed_to_send_job (Other_send_job_failure e)) - | _, status -> - raise (Worker_failed_to_send_job (Worker_already_exited status)) - end + try + Marshal_tools.to_fd_with_preamble ~flags:[Marshal.Closures] outfd request + |> ignore + with e -> + begin + match Unix.waitpid [Unix.WNOHANG] slave_pid with + | (0, _) -> + raise (Worker_failed_to_send_job (Other_send_job_failure e)) + | (_, status) -> + raise (Worker_failed_to_send_job (Worker_already_exited status)) + end in (* And returned the 'handle'. *) let handle = ref ((x, call_id), Processing slave) in w.handle <- Obj.magic (Some handle); handle - (************************************************************************** * Read results from a handle. * This might block if the worker hasn't finished yet. @@ -414,16 +440,16 @@ let with_worker_exn (handle : ('a, 'b) handle) slave f = try f () with | Worker_failed (pid, status) as exn -> mark_free slave.worker; - handle := fst !handle, Failed exn; - begin match status with - | Worker_quit (Unix.WSIGNALED -7) -> - raise (Worker_failed (pid, Worker_oomed)) - | _ -> - raise exn + handle := (fst !handle, Failed exn); + begin + match status with + | Worker_quit (Unix.WSIGNALED -7) -> + raise (Worker_failed (pid, Worker_oomed)) + | _ -> raise exn end | exn -> mark_free slave.worker; - handle := fst !handle, Failed exn; + handle := (fst !handle, Failed exn); raise exn let get_result d = @@ -432,12 +458,11 @@ let get_result d = | Failed exn -> raise exn | Canceled -> raise End_of_file | Processing s -> - with_worker_exn d s begin fun () -> - let res = s.result () in - mark_free s.worker; - d := fst !d, Cached (res, s.worker); - res - end + with_worker_exn d s (fun () -> + let res = s.result () in + mark_free s.worker; + d := (fst !d, Cached (res, s.worker)); + res) (***************************************************************************** * Our polling primitive on workers @@ -452,38 +477,43 @@ type ('a, 'b) selected = { } let get_processing ds = - List.rev_filter_map - ds - ~f:(fun d -> match snd !d with Processing p -> Some p | _ -> None) + List.rev_filter_map ds ~f:(fun d -> + match snd !d with + | Processing p -> Some p + | _ -> None) let select ds additional_fds = let processing = get_processing ds in - let fds = List.map ~f:(fun {infd; _} -> infd) processing in - let ready_fds, _, _ = + let fds = List.map ~f:(fun { infd; _ } -> infd) processing in + let (ready_fds, _, _) = if fds = [] || List.length processing <> List.length ds then - [], [], [] + ([], [], []) else - Sys_utils.select_non_intr (fds @ additional_fds) [] [] ~-.1. in + Sys_utils.select_non_intr (fds @ additional_fds) [] [] (-1.) + in let additional_ready_fds = - List.filter ~f:(List.mem ready_fds) additional_fds in + List.filter ~f:(List.mem ~equal:( = ) ready_fds) additional_fds + in List.fold_right ~f:(fun d acc -> match snd !d with - | Cached _ | Canceled | Failed _ -> - { acc with readys = d :: acc.readys } - | Processing s when List.mem ready_fds s.infd -> - { acc with readys = d :: acc.readys } - | Processing _ -> - { acc with waiters = d :: acc.waiters}) - ~init:{ readys = [] ; waiters = []; ready_fds = additional_ready_fds } + | Cached _ + | Canceled + | Failed _ -> + { acc with readys = d :: acc.readys } + | Processing s when List.mem ~equal:( = ) ready_fds s.infd -> + { acc with readys = d :: acc.readys } + | Processing _ -> { acc with waiters = d :: acc.waiters }) + ~init:{ readys = []; waiters = []; ready_fds = additional_ready_fds } ds let get_worker h = match snd !h with - | Processing {worker; _} -> worker + | Processing { worker; _ } -> worker | Cached (_, worker) -> worker | Canceled - | Failed _ -> invalid_arg "Worker.get_worker" + | Failed _ -> + invalid_arg "Worker.get_worker" let get_job h = fst (fst !h) @@ -494,22 +524,20 @@ let get_call_id h = snd (fst !h) **************************************************************************) let kill w = - if not (is_killed w) then begin + if not (is_killed w) then ( w.killed <- true; Option.iter ~f:Daemon.kill w.prespawned - end + ) -let killall () = - List.iter ~f:kill !workers +let killall () = List.iter ~f:kill !workers let wait_for_cancel d = match snd !d with | Processing s -> - with_worker_exn d s begin fun () -> - s.wait_for_cancel (); - mark_free s.worker; - d := fst !d, Canceled - end + with_worker_exn d s (fun () -> + s.wait_for_cancel (); + mark_free s.worker; + d := (fst !d, Canceled)) | _ -> () let cancel handles = diff --git a/hack/procs/workerController.mli b/hack/procs/workerController.mli index 8cea22d791f..5dd591d130b 100644 --- a/hack/procs/workerController.mli +++ b/hack/procs/workerController.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,6 +7,8 @@ * *) +open Core_kernel + (*****************************************************************************) (* Module building workers. * A worker is a subprocess executing an arbitrary function. @@ -17,12 +19,17 @@ (*****************************************************************************) type process_id = int + +type worker_id = int + type worker_failure = (* Worker killed by Out Of Memory. *) | Worker_oomed | Worker_quit of Unix.process_status + exception Worker_failed of (process_id * worker_failure) -(** Raise this exception when sending work to a worker that is already busy. + +(* Raise this exception when sending work to a worker that is already busy. * We should never be doing that, and this is an assertion error. *) exception Worker_busy @@ -36,6 +43,7 @@ exception Worker_failed_to_send_job of send_job_failure (* The type of a worker visible to the outside world *) type worker + (*****************************************************************************) (* The handle is what we get back when we start a job. It's a "future" * (sometimes called a "promise"). The scheduler uses the handle to retrieve @@ -43,55 +51,65 @@ type worker *) (*****************************************************************************) type ('a, 'b) handle + (* An empty type *) type void + (* Get the worker's id *) -val worker_id: worker -> int +val worker_id : worker -> worker_id + (* Has the worker been killed *) -val is_killed: worker -> bool +val is_killed : worker -> bool + (* Mark the worker as busy. Throw if it is already busy *) -val mark_busy: worker -> unit +val mark_busy : worker -> unit + (* If the worker is busy, what is it doing. Note that calling this is not * type safe: 'a and 'b are free type variables, and they depend on what is the * job being executed by worker. *) -val get_handle_UNSAFE: worker -> ('a, 'b) handle option +val get_handle_UNSAFE : worker -> ('a, 'b) handle option + (* Mark the worker as free *) -val mark_free: worker -> unit +val mark_free : worker -> unit + (* If the worker isn't prespawned, spawn the worker *) -val spawn: worker -> (void, Worker.request) Daemon.handle +val spawn : worker -> (void, Worker.request) Daemon.handle + (* If the worker isn't prespawned, close the worker *) -val close: worker -> (void, Worker.request) Daemon.handle -> unit +val close : worker -> (void, Worker.request) Daemon.handle -> unit + (* If there is a call_wrapper, apply it and create the Request *) -val wrap_request: worker -> ('x -> 'b) -> 'x -> Worker.request +val wrap_request : worker -> ('x -> 'b) -> 'x -> Worker.request type call_wrapper = { wrap: 'x 'b. ('x -> 'b) -> 'x -> 'b } type 'a entry -val register_entry_point: - restore:('a -> unit) -> 'a entry + +val register_entry_point : restore:('a -> worker_id:int -> unit) -> 'a entry (* Creates a pool of workers. *) -val make: - (** See docs in WorkerController.worker for call_wrapper. *) - ?call_wrapper: call_wrapper -> - saved_state : 'a -> - entry : 'a entry -> - nbr_procs : int -> - gc_control : Gc.control -> - heap_handle : SharedMem.handle -> - worker list +val make : + ?call_wrapper: + (* See docs in WorkerController.worker for call_wrapper. *) + call_wrapper -> + saved_state:'a -> + entry:'a entry -> + nbr_procs:int -> + gc_control:Gc.control -> + heap_handle:SharedMem.handle -> + worker list (* Call in a sub-process (CAREFUL, GLOBALS ARE COPIED) *) -val call: ?call_id:int -> worker -> ('a -> 'b) -> 'a -> ('a, 'b) handle +val call : ?call_id:int -> worker -> ('a -> 'b) -> 'a -> ('a, 'b) handle (* See MultiThreadedCall.call_id *) -val get_call_id: ('a, 'b) handle -> int +val get_call_id : ('a, 'b) handle -> int (* Retrieves the job that the worker is currently processing *) -val get_job: ('a, 'b) handle -> 'a +val get_job : ('a, 'b) handle -> 'a (* Retrieves the result (once the worker is done) hangs otherwise *) -val get_result: ('a, 'b) handle -> 'b +val get_result : ('a, 'b) handle -> 'b (* Selects among multiple handles those which are ready. *) type ('a, 'b) selected = { @@ -100,12 +118,13 @@ type ('a, 'b) selected = { (* Additional (non worker) ready fds that we selected on. *) ready_fds: Unix.file_descr list; } -val select: ('a, 'b) handle list -> Unix.file_descr list -> ('a, 'b) selected + +val select : ('a, 'b) handle list -> Unix.file_descr list -> ('a, 'b) selected (* Returns the worker which produces this handle *) -val get_worker: ('a, 'b) handle -> worker +val get_worker : ('a, 'b) handle -> worker (* Killall the workers *) -val killall: unit -> unit +val killall : unit -> unit val cancel : ('a, 'b) handle list -> unit diff --git a/hack/procs/workerControllerLwt.ml b/hack/procs/workerControllerLwt.ml deleted file mode 100644 index 66a841a01c6..00000000000 --- a/hack/procs/workerControllerLwt.ml +++ /dev/null @@ -1,114 +0,0 @@ -(** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -open WorkerController - -(* This is basically an lwt thread that writes a job to the worker, waits for the response, and - * then returns the result. - * - * The main complication is that I, glevi, found a perf regression when I used Marshal_tools_lwt - * to send the job to the worker. Here's my hypothesis: - * - * 1. On a machine with many CPUs (like 56) we create 56 threads to send a job to each worker. - * 2. Lwt attempts to write the jobs to the workers in parallel. - * 3. Each worker spends more time between getting the first byte and last byte - * 4. Something something this leads to more context switches for the worker - * 5. The worker spends more time on a job - * - * This is reinforced by the observation that the regression only happens as the number of workers - * grows. - * - * By switching from Marshal_tools_lwt.to_fd_with_preamble to Marshal_tools.to_fd_with_preamble, - * the issue seems to have disappeared. Reading from the worker didn't seem to trigger a perf issue - * in my testing, but there's really nothing more urgent than reading a response from a finished - * worker, so reading in a blocking manner is fine. - *) -let call w (type a) (type b) (f : a -> b) (x : a) : b Lwt.t = - if is_killed w - then Printf.ksprintf failwith "killed worker (%d)" (worker_id w); - mark_busy w; - - (* Spawn the slave, if not prespawned. *) - let { Daemon.pid = slave_pid; channels = (inc, outc) } as h = spawn w in - - let infd = Daemon.descr_of_in_channel inc in - let outfd = Daemon.descr_of_out_channel outc in - let infd_lwt = Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true infd in - let outfd_lwt = Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true outfd in - - let request = wrap_request w f x in - - (* Send the job *) - ( - let%lwt () = - try%lwt - (* Wait in an lwt-friendly manner for the worker to be writable (should be instant) *) - let%lwt () = Lwt_unix.wait_write outfd_lwt in - (* Write in a lwt-unfriendly, blocking manner to the worker *) - let _ = Marshal_tools.to_fd_with_preamble ~flags:[Marshal.Closures] outfd request in - Lwt.return_unit - with exn -> - Hh_logger.error ~exn "Failed to read response from work #%d" (worker_id w); - (* Failed to send the job to the worker. Is it because the worker is dead or is it - * something else? *) - let%lwt pid, status = Lwt_unix.waitpid [Unix.WNOHANG] slave_pid in - match pid with - | 0 -> raise (Worker_failed_to_send_job (Other_send_job_failure exn)) - | _ -> raise (Worker_failed_to_send_job (Worker_already_exited status)) - in - (* Get the job's result *) - let%lwt res = - try%lwt - (* Wait in an lwt-friendly manner for the worker to finish the job *) - let%lwt () = Lwt_unix.wait_read infd_lwt in - (* Read in a lwt-unfriendly, blocking manner from the worker *) - (* Due to https://github.com/ocsigen/lwt/issues/564, annotation cannot go on let%let node *) - let data : b = Marshal_tools.from_fd_with_preamble infd in - let stats : Measure.record_data = Marshal_tools.from_fd_with_preamble infd in - Lwt.return (data, stats) - with - | Lwt.Canceled as exn -> - (* Worker is handling a job but we're cancelling *) - - (* Each worker might call this but that's ok *) - WorkerCancel.stop_workers (); - (* Wait for the worker to finish cancelling *) - let%lwt () = Lwt_unix.wait_read infd_lwt in - (* Read the junk from the pipe *) - let _ = Marshal_tools.from_fd_with_preamble infd in - let _ = Marshal_tools.from_fd_with_preamble infd in - raise exn - | exn -> - let%lwt pid, status = Lwt_unix.waitpid [Unix.WNOHANG] slave_pid in - begin match pid, status with - | 0, _ | _, Unix.WEXITED 0 -> - (* The slave is still running or exited normally. It's odd that we failed to read - * the response, so just raise that exception *) - raise exn - | _, Unix.WEXITED i when i = Exit_status.(exit_code Out_of_shared_memory) -> - raise SharedMem.Out_of_shared_memory - | _, Unix.WEXITED i -> - let () = Printf.eprintf "Subprocess(%d): fail %d" slave_pid i in - raise (Worker_failed (slave_pid, Worker_quit (Unix.WEXITED i))) - | _, Unix.WSTOPPED i -> - let () = Printf.eprintf "Subprocess(%d): stopped %d" slave_pid i in - raise (Worker_failed (slave_pid, Worker_quit (Unix.WSTOPPED i))) - | _, Unix.WSIGNALED i -> - let () = Printf.eprintf "Subprocess(%d): signaled %d" slave_pid i in - raise (Worker_failed (slave_pid, Worker_quit (Unix.WSIGNALED i))) - end - in - close w h; - Measure.merge (Measure.deserialize (snd res)); - Lwt.return (fst res) - ) [%lwt.finally - (* No matter what, always mark worker as free when we're done *) - mark_free w; - Lwt.return_unit - ] diff --git a/hack/procs/workerControllerLwt.mli b/hack/procs/workerControllerLwt.mli deleted file mode 100644 index 130d86521e8..00000000000 --- a/hack/procs/workerControllerLwt.mli +++ /dev/null @@ -1,11 +0,0 @@ -(** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -(* Call in a sub-process *) -val call: WorkerController.worker -> ('a -> 'b) -> 'a -> 'b Lwt.t diff --git a/hack/socket/dune b/hack/socket/dune new file mode 100644 index 00000000000..5f10d49f167 --- /dev/null +++ b/hack/socket/dune @@ -0,0 +1,8 @@ +(library + (name socket) + (wrapped false) + (modules + socket) + (libraries + opaque_digest + sys_utils)) diff --git a/hack/socket/socket.ml b/hack/socket/socket.ml index bd2b12835c0..b4c5261c2aa 100644 --- a/hack/socket/socket.ml +++ b/hack/socket/socket.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,28 +10,29 @@ (* Initializes the unix domain socket *) let unix_socket sock_name = try - Sys_utils.with_umask 0o111 begin fun () -> - Sys_utils.mkdir_no_fail (Filename.dirname sock_name); - if Sys.file_exists sock_name then Sys.remove sock_name; - let domain, addr = - if Sys.win32 then - Unix.(PF_INET, Unix.ADDR_INET (inet_addr_loopback, 0)) - else - Unix.(PF_UNIX, Unix.ADDR_UNIX sock_name) in - let sock = Unix.socket domain Unix.SOCK_STREAM 0 in - let () = Unix.set_close_on_exec sock in - let () = Unix.setsockopt sock Unix.SO_REUSEADDR true in - let () = Unix.bind sock addr in - let () = Unix.listen sock 10 in - let () = - match Unix.getsockname sock with - | Unix.ADDR_UNIX _ -> () - | Unix.ADDR_INET (_, port) -> + Sys_utils.with_umask 0o111 (fun () -> + Sys_utils.mkdir_no_fail (Filename.dirname sock_name); + if Sys.file_exists sock_name then Sys.remove sock_name; + let (domain, addr) = + if Sys.win32 then + Unix.(PF_INET, Unix.ADDR_INET (inet_addr_loopback, 0)) + else + Unix.(PF_UNIX, Unix.ADDR_UNIX sock_name) + in + let sock = Unix.socket domain Unix.SOCK_STREAM 0 in + let () = Unix.set_close_on_exec sock in + let () = Unix.setsockopt sock Unix.SO_REUSEADDR true in + let () = Unix.bind sock addr in + let () = Unix.listen sock 10 in + let () = + match Unix.getsockname sock with + | Unix.ADDR_UNIX _ -> () + | Unix.ADDR_INET (_, port) -> let oc = open_out_bin sock_name in output_binary_int oc port; - close_out oc in - sock - end + close_out oc + in + sock) with Unix.Unix_error (err, _, _) -> Printf.eprintf "%s\n" (Unix.error_message err); Exit_status.(exit Socket_error) @@ -40,42 +41,48 @@ let unix_socket sock_name = * address. This appears to be 104 chars on mac os x and 108 chars on my * centos box. *) let max_addr_length = 103 + let min_name_length = 17 let get_path path = (* Path will resolve the realpath, in case two processes are referring to the * same socket using different paths (like with symlinks *) let path = path |> Path.make |> Path.to_string in - let dir = (Filename.dirname path)^"/" in + let dir = Filename.dirname path ^ "/" in let filename = Filename.basename path in let root_part = Filename.chop_extension filename in let root_length = String.length root_part in let extension_length = String.length filename - root_length in let extension = String.sub filename root_length extension_length in - (* It's possible that the directory path is too long. If so, let's give up and * use /tmp/ *) let dir = - if String.length dir > max_addr_length - min_name_length - then Filename.get_temp_dir_name () - else dir in + if String.length dir > max_addr_length - min_name_length then + Filename.get_temp_dir_name () + else + dir + in let max_root_part_length = - max_addr_length - (String.length dir) - extension_length in + max_addr_length - String.length dir - extension_length + in let root_part = - if root_length > max_root_part_length - then begin + if root_length > max_root_part_length then let len = String.length root_part in let prefix = String.sub root_part 0 5 in let suffix = String.sub root_part (len - 5) 5 in let digest = OpaqueDigest.to_hex (OpaqueDigest.string root_part) in (* 5 char prefix + 5 char suffix + 2 periods *) let max_digest_length = max_root_part_length - 12 in - let digest_part = if String.length digest > max_digest_length - then String.sub digest 0 max_digest_length - else digest in + let digest_part = + if String.length digest > max_digest_length then + String.sub digest 0 max_digest_length + else + digest + in prefix ^ "." ^ digest_part ^ "." ^ suffix - end else root_part in + else + root_part + in Filename.concat dir (Printf.sprintf "%s%s" root_part extension) -let init_unix_socket socket_file = - unix_socket (get_path socket_file) +let init_unix_socket socket_file = unix_socket (get_path socket_file) diff --git a/hack/stubs/eventLogger.ml b/hack/stubs/eventLogger.ml deleted file mode 100644 index 7463cb21a47..00000000000 --- a/hack/stubs/eventLogger.ml +++ /dev/null @@ -1,48 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type init_settings = { - scuba_table_name : string; - (** File descriptors for the logger daemon's stdout and stderr. *) - log_out : Unix.file_descr; - log_err : Unix.file_descr; -} - -type init_mode = - (** Sends everything to /dev/null. *) - | Event_logger_fake - | Event_logger_real of init_settings - -let init ?exit_on_parent_exit:_ ?log_pid:_ ?init_id:_ _ _ = () -let disable_logging _ = () -let log _ = () -let logger_pid () = None -let set_init_type _ = () -let log_if_initialized _ = () -let master_exception _ _ = () -let worker_exception _ = () -let sharedmem_gc_ran _ _ _ _ = () -let sharedmem_init_done _ = () -let sharedmem_failed_anonymous_memfd_init _ = () -let sharedmem_failed_to_use_shm_dir ~shm_dir:_ ~reason:_ = () -let sharedmem_less_than_minimum_available - ~shm_dir:_ - ~shm_min_avail:_ - ~avail:_ = () -let find_done ~time_taken:_ ~name:_ = () -let log_gc_stats () = () -let flush _ = () -let watchman_error _ = () -let watchman_warning _ = () -let watchman_died_caught _ = () -let watchman_uncaught_failure _ = () -let watchman_connection_reestablished _ = () -let watchman_connection_reestablishment_failed _ = () -let watchman_timeout _ = () -let dfind_ready _ _ = () diff --git a/hack/stubs/logging/common/dune b/hack/stubs/logging/common/dune new file mode 100644 index 00000000000..482f93233dd --- /dev/null +++ b/hack/stubs/logging/common/dune @@ -0,0 +1,3 @@ +(library + (name logging_common_stubs) + (wrapped false)) diff --git a/hack/stubs/logging/common/eventLogger.ml b/hack/stubs/logging/common/eventLogger.ml new file mode 100644 index 00000000000..568e566d425 --- /dev/null +++ b/hack/stubs/logging/common/eventLogger.ml @@ -0,0 +1,72 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +type init_settings = { + scuba_table_name: string; + (* File descriptors for the logger daemon's stdout and stderr. *) + log_out: Unix.file_descr; + log_err: Unix.file_descr; +} + +type init_mode = + (* Sends everything to /dev/null. *) + | Event_logger_fake + | Event_logger_real of init_settings + +let init ?exit_on_parent_exit:_ ?log_pid:_ ?init_id:_ _ _ = () + +let disable_logging _ = () + +let log _ = () + +let logger_pid () = None + +let set_init_type _ = () + +let log_if_initialized _ = () + +let master_exception _ ~stack:_ = () + +let worker_exception _ = () + +let sharedmem_gc_ran _ _ _ _ = () + +let sharedmem_init_done _ = () + +let sharedmem_failed_anonymous_memfd_init _ = () + +let sharedmem_failed_to_use_shm_dir ~shm_dir:_ ~reason:_ = () + +let sharedmem_less_than_minimum_available ~shm_dir:_ ~shm_min_avail:_ ~avail:_ + = + () + +let sharedmem_access_sample ~heap_name:_ ~key:_ ~write_time:_ = () + +let find_done ~time_taken:_ ~name:_ = () + +let log_gc_stats () = () + +let flush _ = () + +let watchman_error _ = () + +let watchman_warning _ = () + +let watchman_died_caught _ = () + +let watchman_uncaught_failure _ = () + +let watchman_connection_reestablished _ = () + +let watchman_connection_reestablishment_failed _ = () + +let watchman_timeout _ = () + +let dfind_ready _ _ = () diff --git a/hack/stubs/logging/common/scuba.ml b/hack/stubs/logging/common/scuba.ml new file mode 100644 index 00000000000..8bc3a598bde --- /dev/null +++ b/hack/stubs/logging/common/scuba.ml @@ -0,0 +1,24 @@ +(** + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +module Table = struct + let of_name _ = () +end + +let new_sample _ _ = () + +let add_int _ _ _ = () + +let add_normal _ _ _ = () + +let add_denorm _ _ _ = () + +let add_normvector _ _ _ = () + +let add_tags _ _ _ = () diff --git a/hack/third-party/avl/dune b/hack/third-party/avl/dune new file mode 100644 index 00000000000..ad42b974efb --- /dev/null +++ b/hack/third-party/avl/dune @@ -0,0 +1,4 @@ +(library + (name avl) + (wrapped false) + (flags (:standard -w @a-4-27))) diff --git a/hack/third-party/avl/monoidAvl.ml b/hack/third-party/avl/monoidAvl.ml index fa3f03e097c..4f78b27266d 100644 --- a/hack/third-party/avl/monoidAvl.ml +++ b/hack/third-party/avl/monoidAvl.ml @@ -33,320 +33,400 @@ * node we know how old the youngest element in a node is. * If the youngest element is too old, we can cut the branch. * (cf function walk in monoidAvl.ml) -*) - - -module type MonoidOrderedType = - sig - type elt - val compare: elt -> elt -> int - - type monoelt - val neutral: monoelt - (* This better be associative *) - val compose: monoelt -> monoelt -> monoelt - val make: elt -> monoelt - end - -module type S = - sig - type elt - type monoelt - - type t - - val empty: t - val is_empty: t -> bool - val mem: elt -> t -> bool - val add: elt -> t -> t - val singleton: elt -> t - val remove: elt -> t -> t - val union: t -> t -> t - val inter: t -> t -> t - val diff: t -> t -> t - val compare: t -> t -> int - val equal: t -> t -> bool - val subset: t -> t -> bool - val iter: (elt -> unit) -> t -> unit - val fold: (elt -> 'a -> 'a) -> t -> 'a -> 'a - val for_all: (elt -> bool) -> t -> bool - val exists: (elt -> bool) -> t -> bool - val filter: (elt -> bool) -> t -> t - val partition: (elt -> bool) -> t -> t * t - val cardinal: t -> int - val elements: t -> elt list - val min_elt: t -> elt - val max_elt: t -> elt - val choose: t -> elt - val split: elt -> t -> t * bool * t - val walk: (monoelt -> bool) -> (elt -> unit) -> t -> unit - end - -module Make(Ord: MonoidOrderedType) = struct + *) + +module type MonoidOrderedType = sig + type elt + + val compare : elt -> elt -> int + + type monoelt + + val neutral : monoelt + + (* This better be associative *) + val compose : monoelt -> monoelt -> monoelt + + val make : elt -> monoelt +end + +module type S = sig + type elt + + type monoelt + + type t + + val empty : t + + val is_empty : t -> bool + + val mem : elt -> t -> bool + + val add : elt -> t -> t + + val singleton : elt -> t + + val remove : elt -> t -> t + + val union : t -> t -> t + + val inter : t -> t -> t + + val diff : t -> t -> t + + val compare : t -> t -> int + + val equal : t -> t -> bool + + val subset : t -> t -> bool + + val iter : (elt -> unit) -> t -> unit + + val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a + + val for_all : (elt -> bool) -> t -> bool + + val exists : (elt -> bool) -> t -> bool + + val filter : (elt -> bool) -> t -> t + + val partition : (elt -> bool) -> t -> t * t + + val cardinal : t -> int + + val elements : t -> elt list + + val min_elt : t -> elt + + val max_elt : t -> elt + + val choose : t -> elt + + val split : elt -> t -> t * bool * t + + val walk : (monoelt -> bool) -> (elt -> unit) -> t -> unit +end + +module Make (Ord : MonoidOrderedType) = struct type elt = Ord.elt + type monoelt = Ord.monoelt - type t = Empty | Node of monoelt * t * elt * t * int + + type t = + | Empty + | Node of monoelt * t * elt * t * int let height = function - Empty -> 0 - | Node(_, _, _, _, h) -> h + | Empty -> 0 + | Node (_, _, _, _, h) -> h let get_monoelt = function | Empty -> Ord.neutral | Node (x, _, _, _, _) -> x let create l v r = - let hl = match l with Empty -> 0 | Node(_, _,_,_,h) -> h in - let hr = match r with Empty -> 0 | Node(_, _,_,_,h) -> h in + let hl = + match l with + | Empty -> 0 + | Node (_, _, _, _, h) -> h + in + let hr = + match r with + | Empty -> 0 + | Node (_, _, _, _, h) -> h + in let monoelt = Ord.make v in let monoelt = Ord.compose (get_monoelt l) monoelt in let monoelt = Ord.compose monoelt (get_monoelt r) in - Node(monoelt, l, v, r, (if hl >= hr then hl + 1 else hr + 1)) + Node + ( monoelt, + l, + v, + r, + if hl >= hr then + hl + 1 + else + hr + 1 ) let bal l v r = - let hl = match l with Empty -> 0 | Node(_, _,_,_,h) -> h in - let hr = match r with Empty -> 0 | Node(_, _,_,_,h) -> h in - if hl > hr + 2 then begin + let hl = + match l with + | Empty -> 0 + | Node (_, _, _, _, h) -> h + in + let hr = + match r with + | Empty -> 0 + | Node (_, _, _, _, h) -> h + in + if hl > hr + 2 then match l with - Empty -> invalid_arg "Set.bal" - | Node(_, ll, lv, lr, _) -> - if height ll >= height lr then - create ll lv (create lr v r) - else begin - match lr with - Empty -> invalid_arg "Set.bal" - | Node(_, lrl, lrv, lrr, _)-> - create (create ll lv lrl) lrv (create lrr v r) - end - end else if hr > hl + 2 then begin + | Empty -> invalid_arg "Set.bal" + | Node (_, ll, lv, lr, _) -> + if height ll >= height lr then + create ll lv (create lr v r) + else ( + match lr with + | Empty -> invalid_arg "Set.bal" + | Node (_, lrl, lrv, lrr, _) -> + create (create ll lv lrl) lrv (create lrr v r) + ) + else if hr > hl + 2 then match r with - Empty -> invalid_arg "Set.bal" - | Node(_, rl, rv, rr, _) -> - if height rr >= height rl then - create (create l v rl) rv rr - else begin - match rl with - Empty -> invalid_arg "Set.bal" - | Node(_, rll, rlv, rlr, _) -> - create (create l v rll) rlv (create rlr rv rr) - end - end else + | Empty -> invalid_arg "Set.bal" + | Node (_, rl, rv, rr, _) -> + if height rr >= height rl then + create (create l v rl) rv rr + else ( + match rl with + | Empty -> invalid_arg "Set.bal" + | Node (_, rll, rlv, rlr, _) -> + create (create l v rll) rlv (create rlr rv rr) + ) + else create l v r - let rec add x = function - Empty -> Node(Ord.make x, Empty, x, Empty, 1) - | Node(_, l, v, r, h) -> - let c = Ord.compare x v in - if c = 0 then create l x r else - if c < 0 then bal (add x l) v r else bal l v (add x r) + | Empty -> Node (Ord.make x, Empty, x, Empty, 1) + | Node (_, l, v, r, _) -> + let c = Ord.compare x v in + if c = 0 then + create l x r + else if c < 0 then + bal (add x l) v r + else + bal l v (add x r) let rec join l v r = match (l, r) with - (Empty, _) -> add v r + | (Empty, _) -> add v r | (_, Empty) -> add v l - | (Node(_, ll, lv, lr, lh), Node(_, rl, rv, rr, rh)) -> - if lh > rh + 2 then bal ll lv (join lr v r) else - if rh > lh + 2 then bal (join l v rl) rv rr else + | (Node (_, ll, lv, lr, lh), Node (_, rl, rv, rr, rh)) -> + if lh > rh + 2 then + bal ll lv (join lr v r) + else if rh > lh + 2 then + bal (join l v rl) rv rr + else create l v r - let rec min_elt = function - Empty -> raise Not_found - | Node(_, Empty, v, r, _) -> v - | Node(_, l, v, r, _) -> min_elt l + | Empty -> raise Not_found + | Node (_, Empty, v, _, _) -> v + | Node (_, l, _, _, _) -> min_elt l let rec max_elt = function - Empty -> raise Not_found - | Node(_, l, v, Empty, _) -> v - | Node(_, l, v, r, _) -> max_elt r - + | Empty -> raise Not_found + | Node (_, _, v, Empty, _) -> v + | Node (_, _, _, r, _) -> max_elt r let rec remove_min_elt = function - Empty -> invalid_arg "Set.remove_min_elt" - | Node(_, Empty, v, r, _) -> r - | Node(_, l, v, r, _) -> bal (remove_min_elt l) v r - + | Empty -> invalid_arg "Set.remove_min_elt" + | Node (_, Empty, _, r, _) -> r + | Node (_, l, v, r, _) -> bal (remove_min_elt l) v r let merge t1 t2 = match (t1, t2) with - (Empty, t) -> t + | (Empty, t) -> t | (t, Empty) -> t | (_, _) -> bal t1 (min_elt t2) (remove_min_elt t2) - let concat t1 t2 = match (t1, t2) with - (Empty, t) -> t + | (Empty, t) -> t | (t, Empty) -> t | (_, _) -> join t1 (min_elt t2) (remove_min_elt t2) - let rec split x = function - Empty -> - (Empty, false, Empty) - | Node(_, l, v, r, _) -> - let c = Ord.compare x v in - if c = 0 then (l, true, r) - else if c < 0 then - let (ll, pres, rl) = split x l in (ll, pres, join rl v r) - else - let (lr, pres, rr) = split x r in (join l v lr, pres, rr) - + | Empty -> (Empty, false, Empty) + | Node (_, l, v, r, _) -> + let c = Ord.compare x v in + if c = 0 then + (l, true, r) + else if c < 0 then + let (ll, pres, rl) = split x l in + (ll, pres, join rl v r) + else + let (lr, pres, rr) = split x r in + (join l v lr, pres, rr) let empty = Empty - let is_empty = function Empty -> true | _ -> false + let is_empty = function + | Empty -> true + | _ -> false let rec mem x = function - Empty -> false - | Node(_, l, v, r, _) -> - let c = Ord.compare x v in - c = 0 || mem x (if c < 0 then l else r) - - let singleton x = Node(Ord.make x, Empty, x, Empty, 1) + | Empty -> false + | Node (_, l, v, r, _) -> + let c = Ord.compare x v in + c = 0 + || mem + x + ( if c < 0 then + l + else + r ) + + let singleton x = Node (Ord.make x, Empty, x, Empty, 1) let rec remove x = function - Empty -> Empty - | Node(_, l, v, r, _) -> - let c = Ord.compare x v in - if c = 0 then merge l r else - if c < 0 then bal (remove x l) v r else bal l v (remove x r) + | Empty -> Empty + | Node (_, l, v, r, _) -> + let c = Ord.compare x v in + if c = 0 then + merge l r + else if c < 0 then + bal (remove x l) v r + else + bal l v (remove x r) let rec union s1 s2 = match (s1, s2) with - (Empty, t2) -> t2 + | (Empty, t2) -> t2 | (t1, Empty) -> t1 - | (Node(_, l1, v1, r1, h1), Node(_, l2, v2, r2, h2)) -> - if h1 >= h2 then - if h2 = 1 then add v2 s1 else begin - let (l2, _, r2) = split v1 s2 in - join (union l1 l2) v1 (union r1 r2) - end + | (Node (_, l1, v1, r1, h1), Node (_, l2, v2, r2, h2)) -> + if h1 >= h2 then + if h2 = 1 then + add v2 s1 else - if h1 = 1 then add v1 s2 else begin - let (l1, _, r1) = split v2 s1 in - join (union l1 l2) v2 (union r1 r2) - end + let (l2, _, r2) = split v1 s2 in + join (union l1 l2) v1 (union r1 r2) + else if h1 = 1 then + add v1 s2 + else + let (l1, _, r1) = split v2 s1 in + join (union l1 l2) v2 (union r1 r2) let rec inter s1 s2 = match (s1, s2) with - (Empty, t2) -> Empty - | (t1, Empty) -> Empty - | (Node(_, l1, v1, r1, _), t2) -> - match split v1 t2 with - (l2, false, r2) -> - concat (inter l1 l2) (inter r1 r2) - | (l2, true, r2) -> - join (inter l1 l2) v1 (inter r1 r2) + | (Empty, _) -> Empty + | (_, Empty) -> Empty + | (Node (_, l1, v1, r1, _), t2) -> + (match split v1 t2 with + | (l2, false, r2) -> concat (inter l1 l2) (inter r1 r2) + | (l2, true, r2) -> join (inter l1 l2) v1 (inter r1 r2)) let rec diff s1 s2 = match (s1, s2) with - (Empty, t2) -> Empty + | (Empty, _) -> Empty | (t1, Empty) -> t1 - | (Node(_, l1, v1, r1, _), t2) -> - match split v1 t2 with - (l2, false, r2) -> - join (diff l1 l2) v1 (diff r1 r2) - | (l2, true, r2) -> - concat (diff l1 l2) (diff r1 r2) + | (Node (_, l1, v1, r1, _), t2) -> + (match split v1 t2 with + | (l2, false, r2) -> join (diff l1 l2) v1 (diff r1 r2) + | (l2, true, r2) -> concat (diff l1 l2) (diff r1 r2)) - type enumeration = End | More of elt * t * enumeration + type enumeration = + | End + | More of elt * t * enumeration let rec cons_enum s e = match s with - Empty -> e - | Node(_, l, v, r, _) -> cons_enum l (More(v, r, e)) + | Empty -> e + | Node (_, l, v, r, _) -> cons_enum l (More (v, r, e)) let rec compare_aux e1 e2 = match (e1, e2) with - (End, End) -> 0 - | (End, _) -> -1 + | (End, End) -> 0 + | (End, _) -> -1 | (_, End) -> 1 - | (More(v1, r1, e1), More(v2, r2, e2)) -> - let c = Ord.compare v1 v2 in - if c <> 0 - then c - else compare_aux (cons_enum r1 e1) (cons_enum r2 e2) + | (More (v1, r1, e1), More (v2, r2, e2)) -> + let c = Ord.compare v1 v2 in + if c <> 0 then + c + else + compare_aux (cons_enum r1 e1) (cons_enum r2 e2) - let compare s1 s2 = - compare_aux (cons_enum s1 End) (cons_enum s2 End) + let compare s1 s2 = compare_aux (cons_enum s1 End) (cons_enum s2 End) - let equal s1 s2 = - compare s1 s2 = 0 + let equal s1 s2 = compare s1 s2 = 0 let rec subset s1 s2 = match (s1, s2) with - Empty, _ -> - true - | _, Empty -> - false - | Node (_, l1, v1, r1, _), (Node (_, l2, v2, r2, _) as t2) -> - let c = Ord.compare v1 v2 in - if c = 0 then - subset l1 l2 && subset r1 r2 - else if c < 0 then - let node = create l1 v1 Empty in - subset node l2 && subset r1 t2 - else - let node = create Empty v1 r1 in - subset node r2 && subset l1 t2 + | (Empty, _) -> true + | (_, Empty) -> false + | (Node (_, l1, v1, r1, _), (Node (_, l2, v2, r2, _) as t2)) -> + let c = Ord.compare v1 v2 in + if c = 0 then + subset l1 l2 && subset r1 r2 + else if c < 0 then + let node = create l1 v1 Empty in + subset node l2 && subset r1 t2 + else + let node = create Empty v1 r1 in + subset node r2 && subset l1 t2 let rec iter f = function - Empty -> () - | Node(_, l, v, r, _) -> iter f l; f v; iter f r + | Empty -> () + | Node (_, l, v, r, _) -> + iter f l; + f v; + iter f r let rec fold f s accu = match s with - Empty -> accu - | Node(_, l, v, r, _) -> fold f r (f v (fold f l accu)) + | Empty -> accu + | Node (_, l, v, r, _) -> fold f r (f v (fold f l accu)) let rec for_all p = function - Empty -> true - | Node(_, l, v, r, _) -> p v && for_all p l && for_all p r + | Empty -> true + | Node (_, l, v, r, _) -> p v && for_all p l && for_all p r let rec exists p = function - Empty -> false - | Node(_, l, v, r, _) -> p v || exists p l || exists p r + | Empty -> false + | Node (_, l, v, r, _) -> p v || exists p l || exists p r let filter p s = let rec filt accu = function | Empty -> accu - | Node(_, l, v, r, _) -> - filt (filt (if p v then add v accu else accu) l) r in + | Node (_, l, v, r, _) -> + filt + (filt + ( if p v then + add v accu + else + accu ) + l) + r + in filt Empty s let partition p s = - let rec part (t, f as accu) = function + let rec part ((t, f) as accu) = function | Empty -> accu - | Node(_, l, v, r, _) -> - part (part (if p v then (add v t, f) else (t, add v f)) l) r in + | Node (_, l, v, r, _) -> + part + (part + ( if p v then + (add v t, f) + else + (t, add v f) ) + l) + r + in part (Empty, Empty) s let rec cardinal = function - Empty -> 0 - | Node(_, l, v, r, _) -> cardinal l + 1 + cardinal r + | Empty -> 0 + | Node (_, l, _, r, _) -> cardinal l + 1 + cardinal r let rec elements_aux accu = function - Empty -> accu - | Node(_, l, v, r, _) -> elements_aux (v :: elements_aux accu r) l + | Empty -> accu + | Node (_, l, v, r, _) -> elements_aux (v :: elements_aux accu r) l - let elements s = - elements_aux [] s + let elements s = elements_aux [] s let choose = min_elt let rec walk cut_branch work t = match t with | Empty -> () - | Node (elt, _, _, _, _) when cut_branch elt -> + | Node (elt, _, _, _, _) when cut_branch elt -> () + | Node (_, l, key, r, _) -> + walk cut_branch work l; + if cut_branch (Ord.make key) then () - | Node (elt, l, key, r, _) -> - walk cut_branch work l; - if cut_branch (Ord.make key) - then () - else work key; - walk cut_branch work r - + else + work key; + walk cut_branch work r end - diff --git a/hack/third-party/avl/monoidAvl.mli b/hack/third-party/avl/monoidAvl.mli index 64eb4c30a63..704af59623b 100644 --- a/hack/third-party/avl/monoidAvl.mli +++ b/hack/third-party/avl/monoidAvl.mli @@ -33,51 +33,79 @@ * node we know how old the youngest element in a node is. * If the youngest element is too old, we can cut the branch. * (cf function walk in monoidAvl.ml) -*) - -module type MonoidOrderedType = - sig - type elt - val compare : elt -> elt -> int - - type monoelt - val neutral: monoelt - val compose: monoelt -> monoelt -> monoelt - val make: elt -> monoelt - end - -module type S = - sig - type elt - type monoelt - type t - val empty: t - val is_empty: t -> bool - val mem: elt -> t -> bool - val add: elt -> t -> t - val singleton: elt -> t - val remove: elt -> t -> t - val union: t -> t -> t - val inter: t -> t -> t - val diff: t -> t -> t - val compare: t -> t -> int - val equal: t -> t -> bool - val subset: t -> t -> bool - val iter: (elt -> unit) -> t -> unit - val fold: (elt -> 'a -> 'a) -> t -> 'a -> 'a - val for_all: (elt -> bool) -> t -> bool - val exists: (elt -> bool) -> t -> bool - val filter: (elt -> bool) -> t -> t - val partition: (elt -> bool) -> t -> t * t - val cardinal: t -> int - val elements: t -> elt list - val min_elt: t -> elt - val max_elt: t -> elt - val choose: t -> elt - val split: elt -> t -> t * bool * t - val walk: (monoelt -> bool) -> (elt -> unit) -> t -> unit - end - -module Make (Ord : MonoidOrderedType) : S -with type elt = Ord.elt -with type monoelt = Ord.monoelt + *) + +module type MonoidOrderedType = sig + type elt + + val compare : elt -> elt -> int + + type monoelt + + val neutral : monoelt + + val compose : monoelt -> monoelt -> monoelt + + val make : elt -> monoelt +end + +module type S = sig + type elt + + type monoelt + + type t + + val empty : t + + val is_empty : t -> bool + + val mem : elt -> t -> bool + + val add : elt -> t -> t + + val singleton : elt -> t + + val remove : elt -> t -> t + + val union : t -> t -> t + + val inter : t -> t -> t + + val diff : t -> t -> t + + val compare : t -> t -> int + + val equal : t -> t -> bool + + val subset : t -> t -> bool + + val iter : (elt -> unit) -> t -> unit + + val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a + + val for_all : (elt -> bool) -> t -> bool + + val exists : (elt -> bool) -> t -> bool + + val filter : (elt -> bool) -> t -> t + + val partition : (elt -> bool) -> t -> t * t + + val cardinal : t -> int + + val elements : t -> elt list + + val min_elt : t -> elt + + val max_elt : t -> elt + + val choose : t -> elt + + val split : elt -> t -> t * bool * t + + val walk : (monoelt -> bool) -> (elt -> unit) -> t -> unit +end + +module Make (Ord : MonoidOrderedType) : + S with type elt = Ord.elt with type monoelt = Ord.monoelt diff --git a/hack/third-party/core/commutative_group.ml b/hack/third-party/core/commutative_group.ml index aea49007153..43744423ce9 100644 --- a/hack/third-party/core/commutative_group.ml +++ b/hack/third-party/core/commutative_group.ml @@ -9,9 +9,11 @@ *) module type S = sig - type t (* an element of the group *) + type t (* an element of the group *) val zero : t - val (+) : t -> t -> t - val (-) : t -> t -> t + + val ( + ) : t -> t -> t + + val ( - ) : t -> t -> t end diff --git a/hack/third-party/core/container.ml b/hack/third-party/core/container.ml index 11d547f060a..1aa25d38ea1 100644 --- a/hack/third-party/core/container.ml +++ b/hack/third-party/core/container.ml @@ -1,19 +1,30 @@ -let fold_count fold t ~f = fold t ~init:0 ~f:(fun n a -> if f a then n + 1 else n) +let fold_count fold t ~f = + fold t ~init:0 ~f:(fun n a -> + if f a then + n + 1 + else + n) -let fold_sum (type a) (module M : Commutative_group.S with type t = a) fold t ~f = - fold t ~init:M.zero ~f:(fun n a -> M.(+) n (f a)) -;; +let fold_sum + (type a) (module M : Commutative_group.S with type t = a) fold t ~f = + fold t ~init:M.zero ~f:(fun n a -> M.( + ) n (f a)) let fold_min fold t ~cmp = fold t ~init:None ~f:(fun acc elt -> - match acc with - | None -> Some elt - | Some min -> if cmp min elt > 0 then Some elt else acc) -;; + match acc with + | None -> Some elt + | Some min -> + if cmp min elt > 0 then + Some elt + else + acc) let fold_max fold t ~cmp = fold t ~init:None ~f:(fun acc elt -> - match acc with - | None -> Some elt - | Some max -> if cmp max elt < 0 then Some elt else acc) -;; + match acc with + | None -> Some elt + | Some max -> + if cmp max elt < 0 then + Some elt + else + acc) diff --git a/hack/third-party/core/core_list.ml b/hack/third-party/core/core_list.ml index 2e2db7e5d0b..ccb84237418 100644 --- a/hack/third-party/core/core_list.ml +++ b/hack/third-party/core/core_list.ml @@ -10,17 +10,31 @@ end include T +module Or_unequal_lengths = struct + type 'a t = + | Ok of 'a + | Unequal_lengths +end + let of_list t = t -let range ?(stride=1) ?(start=`inclusive) ?(stop=`exclusive) start_i stop_i = - if stride = 0 then - invalid_arg "Core_list.range: stride must be non-zero"; +let range + ?(stride = 1) ?(start = `inclusive) ?(stop = `exclusive) start_i stop_i = + if stride = 0 then invalid_arg "Core_list.range: stride must be non-zero"; + (* Generate the range from the last element, so that we do not need to rev it *) let rec loop last counter accum = - if counter <= 0 then accum - else loop (last - stride) (counter - 1) (last :: accum) + if counter <= 0 then + accum + else + loop (last - stride) (counter - 1) (last :: accum) + in + let stride_sign = + if stride > 0 then + 1 + else + -1 in - let stride_sign = if stride > 0 then 1 else -1 in let start = match start with | `inclusive -> start_i @@ -33,144 +47,176 @@ let range ?(stride=1) ?(start=`inclusive) ?(stop=`exclusive) start_i stop_i = in let num_elts = (stop - start + stride - stride_sign) / stride in loop (start + (stride * (num_elts - 1))) num_elts [] -;; (* Standard functions *) let length = List.length + let hd_exn = List.hd + let tl_exn = List.tl let hd t = match t with | [] -> None | x :: _ -> Some x -;; let tl t = match t with | [] -> None | _ :: t' -> Some t' -;; let nth t n = - if n < 0 then None else - let rec nth_aux t n = - match t with - | [] -> None - | a :: t -> if n = 0 then Some a else nth_aux t (n-1) - in nth_aux t n -;; + if n < 0 then + None + else + let rec nth_aux t n = + match t with + | [] -> None + | a :: t -> + if n = 0 then + Some a + else + nth_aux t (n - 1) + in + nth_aux t n let nth_exn t n = match nth t n with | None -> - invalid_argf "List.nth_exn %d called on list of length %d" - n (length t) () + invalid_argf "List.nth_exn %d called on list of length %d" n (length t) () | Some a -> a -;; let rev_append = List.rev_append let rev = function - | [] | [_] as res -> res + | ([] | [_]) as res -> res | x :: y :: rest -> rev_append rest [y; x] let unordered_append l1 l2 = - match l1, l2 with - | [], l | l, [] -> l - | _ -> List.rev_append l1 l2 + match (l1, l2) with + | ([], l) + | (l, []) -> + l + | _ -> List.rev_append l1 l2 let rev_map t ~f = List.rev_map t ~f exception Length_mismatch of string * int * int -let check_length2 name l1 l2 = +let check_length2_exn name l1 l2 = let n1 = length l1 in let n2 = length l2 in if n1 <> n2 then raise (invalid_argf "length mismatch in %s: %d <> %d " name n1 n2 ()) -;; + +let check_length2 l1 l2 ~f = + if length l1 <> length l2 then + Or_unequal_lengths.Unequal_lengths + else + Or_unequal_lengths.Ok (f l1 l2) let check_length3 name l1 l2 l3 = let n1 = length l1 in let n2 = length l2 in let n3 = length l3 in if n1 <> n2 || n2 <> n3 then - raise (invalid_argf "length mismatch in %s: %d <> %d || %d <> %d" - name n1 n2 n2 n3 ()) -;; + raise + (invalid_argf + "length mismatch in %s: %d <> %d || %d <> %d" + name + n1 + n2 + n2 + n3 + ()) + +let iter2 l1 l2 ~f = check_length2 l1 l2 ~f:(List.iter2 ~f) let iter2_exn l1 l2 ~f = - check_length2 "iter2_exn" l1 l2; - List.iter2 l1 l2 ~f; -;; + check_length2_exn "iter2_exn" l1 l2; + List.iter2 l1 l2 ~f -let rev_map2_exn l1 l2 ~f = - check_length2 "rev_map2_exn" l1 l2; - List.rev_map2 l1 l2 ~f; -;; +let rev_map2 l1 l2 ~f = check_length2 l1 l2 ~f:(List.rev_map2 ~f) + +let rev_map2_exn l1 l2 ~f = + check_length2_exn "rev_map2_exn" l1 l2; + List.rev_map2 l1 l2 ~f + +let fold2 l1 l2 ~init ~f = check_length2 l1 l2 ~f:(List.fold_left2 ~init ~f) let fold2_exn l1 l2 ~init ~f = - check_length2 "fold2_exn" l1 l2; - List.fold_left2 l1 l2 ~init ~f; -;; + check_length2_exn "fold2_exn" l1 l2; + List.fold_left2 l1 l2 ~init ~f + +let for_all2 l1 l2 ~f = check_length2 l1 l2 ~f:(List.for_all2 ~f) let for_all2_exn l1 l2 ~f = - check_length2 "for_all2_exn" l1 l2; - List.for_all2 l1 l2 ~f; -;; + check_length2_exn "for_all2_exn" l1 l2; + List.for_all2 l1 l2 ~f + +let exists2 l1 l2 ~f = check_length2 l1 l2 ~f:(List.exists2 ~f) let exists2_exn l1 l2 ~f = - check_length2 "exists2_exn" l1 l2; - List.exists2 l1 l2 ~f; -;; + check_length2_exn "exists2_exn" l1 l2; + List.exists2 l1 l2 ~f -let mem ?(equal = (=)) t a = List.exists t ~f:(equal a) +let mem ?(equal = ( = )) t a = List.exists t ~f:(equal a) (* This is a copy of the standard library assq function. *) let rec assq x = function - [] -> raise Not_found - | (a,b)::l -> if a == x then b else assq x l + | [] -> raise Not_found + | (a, b) :: l -> + if a == x then + b + else + assq x l (* This is a copy of the code from the standard library, with an extra eta-expansion to avoid creating partial closures (showed up for List.filter in profiling). *) let rev_filter t ~f = let rec find ~f accu = function | [] -> accu - | x :: l -> if f x then find ~f (x :: accu) l else find ~f accu l + | x :: l -> + if f x then + find ~f (x :: accu) l + else + find ~f accu l in find ~f [] t -;; let filter t ~f = rev (rev_filter t ~f) let sort = List.sort + let stable_sort = List.stable_sort + let fast_sort = List.fast_sort (* 4.02 forgot to add sort_uniq to ListLabels, but it was added in 4.03: https://github.com/ocaml/ocaml/commit/512d128918544ae1da0c808e811f3a7f177524d2 *) -let sort_uniq ~(cmp:'a -> 'a -> int) (lst:'a list) = +let sort_uniq ~(cmp : 'a -> 'a -> int) (lst : 'a list) = UnlabeledList.sort_uniq cmp lst let find_map t ~f = let rec loop = function | [] -> None | x :: l -> - match f x with - | None -> loop l - | Some _ as r -> r + (match f x with + | None -> loop l + | Some _ as r -> r) in loop t -;; let find t ~f = let rec loop = function | [] -> None - | x :: l -> if f x then Some x else loop l + | x :: l -> + if f x then + Some x + else + loop l in loop t -;; let find_exn t ~f = List.find t ~f @@ -178,20 +224,28 @@ let findi t ~f = let rec loop i t = match t with | [] -> None - | x :: l -> if f i x then Some (i, x) else loop (i + 1) l + | x :: l -> + if f i x then + Some (i, x) + else + loop (i + 1) l in loop 0 t -;; (** changing the order of arguments on some standard [List] functions. *) let exists t ~f = List.exists t ~f + let for_all t ~f = List.for_all t ~f + let iter t ~f = List.iter t ~f (** For the container interface. *) let fold t ~init ~f = List.fold_left t ~f ~init + let fold_left = fold + let to_array = Hack_caml.Array.of_list + let to_list t = t (** Tail recursive versions of standard [List] module *) @@ -215,17 +269,23 @@ let rec count_append l1 l2 count = match l2 with | [] -> l1 | _ -> - match l1 with - | [] -> l2 - | [x1] -> x1 :: l2 - | [x1; x2] -> x1 :: x2 :: l2 - | [x1; x2; x3] -> x1 :: x2 :: x3 :: l2 + (match l1 with + | [] -> l2 + | [x1] -> x1 :: l2 + | [x1; x2] -> x1 :: x2 :: l2 + | [x1; x2; x3] -> x1 :: x2 :: x3 :: l2 | [x1; x2; x3; x4] -> x1 :: x2 :: x3 :: x4 :: l2 | x1 :: x2 :: x3 :: x4 :: x5 :: tl -> - x1 :: x2 :: x3 :: x4 :: x5 :: - (if count > 1000 - then slow_append tl l2 - else count_append tl l2 (count + 1)) + x1 + :: x2 + :: x3 + :: x4 + :: x5 + :: + ( if count > 1000 then + slow_append tl l2 + else + count_append tl l2 (count + 1) )) let append l1 l2 = count_append l1 l2 0 @@ -258,14 +318,20 @@ let rec count_map ~f l ctr = let f3 = f x3 in let f4 = f x4 in let f5 = f x5 in - f1 :: f2 :: f3 :: f4 :: f5 :: - (if ctr > 1000 - then map_slow ~f tl - else count_map ~f tl (ctr + 1)) + f1 + :: f2 + :: f3 + :: f4 + :: f5 + :: + ( if ctr > 1000 then + map_slow ~f tl + else + count_map ~f tl (ctr + 1) ) let map l ~f = count_map ~f l 0 -let (>>|) l f = map l ~f +let ( >>| ) l f = map l ~f let map2_exn l1 l2 ~f = List.rev (rev_map2_exn l1 l2 ~f) @@ -278,7 +344,6 @@ let rev_map3_exn l1 l2 l3 ~f = | _ -> assert false in loop l1 l2 l3 [] -;; let map3_exn l1 l2 l3 ~f = List.rev (rev_map3_exn l1 l2 l3 ~f) @@ -287,8 +352,7 @@ let rec rev_map_append l1 l2 ~f = | [] -> l2 | h :: t -> rev_map_append ~f t (f h :: l2) -let fold_right l ~f ~init = - fold ~f:(fun a b -> f b a) ~init (List.rev l) +let fold_right l ~f ~init = fold ~f:(fun a b -> f b a) ~init (List.rev l) let unzip list = let rec loop list l1 l2 = @@ -300,7 +364,7 @@ let unzip list = let zip_exn l1 l2 = map2_exn ~f:(fun a b -> (a, b)) l1 l2 -let zip l1 l2 = try Some (zip_exn l1 l2) with _ -> None +let zip l1 l2 = (try Some (zip_exn l1 l2) with _ -> None) (** Additional list operations *) @@ -313,22 +377,28 @@ let rev_mapi l ~f = let mapi l ~f = List.rev (rev_mapi l ~f) - let iteri l ~f = - ignore (fold l ~init:0 ~f:(fun i x -> let () = f i x in i + 1)) -;; + ignore + (fold l ~init:0 ~f:(fun i x -> + let () = f i x in + i + 1)) let foldi t ~f ~init = snd (fold t ~init:(0, init) ~f:(fun (i, acc) v -> (i + 1, f i acc v))) -;; let filteri l ~f = - List.rev (foldi l - ~f:(fun pos acc x -> - if f pos x then x :: acc else acc) - ~init:[]) - -let reduce l ~f = match l with + List.rev + (foldi + l + ~f:(fun pos acc x -> + if f pos x then + x :: acc + else + acc) + ~init:[]) + +let reduce l ~f = + match l with | [] -> None | hd :: tl -> Some (fold ~init:hd ~f tl) @@ -340,13 +410,15 @@ let reduce_exn l ~f = let groupi l ~break = let groups = foldi l ~init:[] ~f:(fun i acc x -> - match acc with - | [] -> [[x]] - | current_group :: tl -> - if break i (hd_exn current_group) x then - [x] :: current_group :: tl (* start new group *) - else - (x :: current_group) :: tl) (* extend current group *) + match acc with + | [] -> [[x]] + | current_group :: tl -> + if break i (hd_exn current_group) x then + [x] :: current_group :: tl + (* start new group *) + else + (x :: current_group) :: tl) + (* extend current group *) in match groups with | [] -> [] @@ -370,17 +442,16 @@ let concat_mapi l ~f = let merge l1 l2 ~cmp = let rec loop acc l1 l2 = - match l1,l2 with - | [], l2 -> rev_append acc l2 - | l1, [] -> rev_append acc l1 - | h1 :: t1, h2 :: t2 -> - if cmp h1 h2 <= 0 - then loop (h1 :: acc) t1 l2 - else loop (h2 :: acc) l1 t2 + match (l1, l2) with + | ([], l2) -> rev_append acc l2 + | (l1, []) -> rev_append acc l1 + | (h1 :: t1, h2 :: t2) -> + if cmp h1 h2 <= 0 then + loop (h1 :: acc) t1 l2 + else + loop (h2 :: acc) l1 t2 in loop [] l1 l2 -;; - include struct (* We are explicit about what we import from the general Monad functor so that @@ -388,29 +459,42 @@ include struct *) module Monad = Monad.Make (struct type 'a t = 'a list + let bind x f = concat_map x ~f + let map = `Custom map + let return x = [x] end) + open Monad module Monad_infix = Monad_infix + let ignore = ignore + let join = join + let bind = bind - let (>>=) = bind + + let ( >>= ) = bind + let return = return + let all = all + let all_ignore = all_ignore end (** returns final element of list *) -let rec last_exn list = match list with +let rec last_exn list = + match list with | [x] -> x | _ :: tl -> last_exn tl | [] -> raise (Invalid_argument "Core_list.last") (** optionally returns final element of list *) -let rec last list = match list with +let rec last list = + match list with | [x] -> Some x | _ :: tl -> last tl | [] -> None @@ -422,27 +506,32 @@ let find_consecutive_duplicate t ~equal = let rec loop a1 t = match t with | [] -> None - | a2 :: t -> if equal a1 a2 then Some (a1, a2) else loop a2 t + | a2 :: t -> + if equal a1 a2 then + Some (a1, a2) + else + loop a2 t in loop a1 t -;; (* returns list without adjacent duplicates *) let remove_consecutive_duplicates list ~equal = - let rec loop list accum = match list with + let rec loop list accum = + match list with | [] -> accum - | hd :: [] -> hd :: accum + | [hd] -> hd :: accum | hd1 :: hd2 :: tl -> - if equal hd1 hd2 - then loop (hd2 :: tl) accum - else loop (hd2 :: tl) (hd1 :: accum) + if equal hd1 hd2 then + loop (hd2 :: tl) accum + else + loop (hd2 :: tl) (hd1 :: accum) in rev (loop list []) (** returns sorted version of list with duplicates removed *) -let dedup ?(compare=Pervasives.compare) list = +let dedup ?(compare = Pervasives.compare) list = match list with - | [] -> [] (* performance hack *) + | [] -> [] (* performance hack *) | _ -> let equal x x' = compare x x' = 0 in let sorted = List.sort ~cmp:compare list in @@ -450,55 +539,64 @@ let dedup ?(compare=Pervasives.compare) list = let contains_dup ?compare lst = length (dedup ?compare lst) <> length lst -let find_a_dup ?(compare=Pervasives.compare) l = +let find_a_dup ?(compare = Pervasives.compare) l = let sorted = List.sort ~cmp:compare l in - let rec loop l = match l with - [] | [_] -> None + let rec loop l = + match l with + | [] + | [_] -> + None | hd1 :: hd2 :: tl -> - if compare hd1 hd2 = 0 then Some (hd1) else loop (hd2 :: tl) + if compare hd1 hd2 = 0 then + Some hd1 + else + loop (hd2 :: tl) in loop sorted let count t ~f = Container.fold_count fold t ~f + let sum m t ~f = Container.fold_sum m fold t ~f + let min_elt t ~cmp = Container.fold_min fold t ~cmp + let max_elt t ~cmp = Container.fold_max fold t ~cmp let init n ~f = if n < 0 then invalid_argf "List.init %d" n (); let rec loop i accum = assert (i >= 0); - if i = 0 then accum - else loop (i-1) (f (i-1) :: accum) + if i = 0 then + accum + else + loop (i - 1) (f (i - 1) :: accum) in loop n [] -;; let rev_filter_map l ~f = let rec loop l accum = match l with | [] -> accum | hd :: tl -> - match f hd with + (match f hd with | Some x -> loop tl (x :: accum) - | None -> loop tl accum + | None -> loop tl accum) in loop l [] -;; -let filter_map l ~f = List.rev (rev_filter_map l ~f) +let filter_map (l : 'a list) ~(f : 'a -> 'b option) : 'b list = + List.rev (rev_filter_map l ~f) let rev_filter_mapi l ~f = let rec loop i l accum = match l with | [] -> accum | hd :: tl -> - match f i hd with + (match f i hd with | Some x -> loop (i + 1) tl (x :: accum) - | None -> loop (i + 1) tl accum + | None -> loop (i + 1) tl accum) in loop 0 l [] -;; let filter_mapi l ~f = List.rev (rev_filter_mapi l ~f) @@ -509,45 +607,46 @@ let partition_map t ~f = match t with | [] -> (rev fst, rev snd) | x :: t -> - match f x with + (match f x with | `Fst y -> loop t (y :: fst) snd - | `Snd y -> loop t fst (y :: snd) + | `Snd y -> loop t fst (y :: snd)) in loop t [] [] -;; let partition_tf t ~f = - let f x = if f x then `Fst x else `Snd x in + let f x = + if f x then + `Fst x + else + `Snd x + in partition_map t ~f -;; module Assoc = struct - type ('a, 'b) t = ('a * 'b) list - let find t ?(equal=Poly.equal) key = + let find t ?(equal = Poly.equal) key = match find t ~f:(fun (key', _) -> equal key key') with | None -> None | Some x -> Some (snd x) - let find_exn t ?(equal=Poly.equal) key = + let find_exn t ?(equal = Poly.equal) key = match find t key ~equal with | None -> raise Not_found | Some value -> value - let mem t ?(equal=Poly.equal) key = (find t ~equal key) <> None + let mem t ?(equal = Poly.equal) key = find t ~equal key <> None - let remove t ?(equal=Poly.equal) key = + let remove t ?(equal = Poly.equal) key = filter t ~f:(fun (key', _) -> not (equal key key')) - let add t ?(equal=Poly.equal) key value = + let add t ?(equal = Poly.equal) key value = (* the remove doesn't change the map semantics, but keeps the list small *) (key, value) :: remove t ~equal key let inverse t = map t ~f:(fun (x, y) -> (y, x)) let map t ~f = List.map t ~f:(fun (key, value) -> (key, f value)) - end let sub l ~pos ~len = @@ -555,18 +654,15 @@ let sub l ~pos ~len = possibility of overflow. *) if pos < 0 || len < 0 || pos > length l - len then invalid_arg "List.sub"; List.rev - (foldi l ~init:[] - ~f:(fun i acc el -> - if i >= pos && i < (pos + len) - then el :: acc - else acc - ) - ) -;; + (foldi l ~init:[] ~f:(fun i acc el -> + if i >= pos && i < pos + len then + el :: acc + else + acc)) (*let slice a start stop =*) - (*Ordered_collection_common.slice ~length_fun:length ~sub_fun:sub*) - (*a start stop*) +(*Ordered_collection_common.slice ~length_fun:length ~sub_fun:sub*) +(*a start stop*) let split_n t_orig n = if n <= 0 then @@ -583,6 +679,7 @@ let split_n t_orig n = loop n t_orig [] let take t n = fst (split_n t n) + let drop t n = snd (split_n t n) let split_while xs ~f = @@ -591,20 +688,20 @@ let split_while xs ~f = | t -> (rev acc, t) in loop [] xs -;; let take_while t ~f = fst (split_while t ~f) + let drop_while t ~f = snd (split_while t ~f) let cartesian_product list1 list2 = - if list2 = [] then [] else - let rec loop l1 l2 accum = match l1 with + if list2 = [] then + [] + else + let rec loop l1 l2 accum = + match l1 with | [] -> accum - | (hd :: tl) -> - loop tl l2 - (List.rev_append - (map ~f:(fun x -> (hd,x)) l2) - accum) + | hd :: tl -> + loop tl l2 (List.rev_append (map ~f:(fun x -> (hd, x)) l2) accum) in List.rev (loop list1 list2 []) @@ -614,24 +711,30 @@ let concat_no_order l = fold l ~init:[] ~f:(fun acc l -> rev_append l acc) let cons x l = x :: l -let is_empty l = match l with [] -> true | _ -> false +let is_empty l = + match l with + | [] -> true + | _ -> false let is_sorted l ~compare = let rec loop l = match l with - | [] | [_] -> true - | x1 :: ((x2 :: _) as rest) -> - compare x1 x2 <= 0 && loop rest - in loop l + | [] + | [_] -> + true + | x1 :: (x2 :: _ as rest) -> compare x1 x2 <= 0 && loop rest + in + loop l let is_sorted_strictly l ~compare = let rec loop l = match l with - | [] | [_] -> true - | x1 :: ((x2 :: _) as rest) -> - compare x1 x2 < 0 && loop rest - in loop l -;; + | [] + | [_] -> + true + | x1 :: (x2 :: _ as rest) -> compare x1 x2 < 0 && loop rest + in + loop l module Infix = struct let ( @ ) = append @@ -639,39 +742,42 @@ end let compare a b ~cmp = let rec loop a b = - match a, b with - | [], [] -> 0 - | [], _ -> -1 - | _ , [] -> 1 - | x :: xs, y :: ys -> + match (a, b) with + | ([], []) -> 0 + | ([], _) -> -1 + | (_, []) -> 1 + | (x :: xs, y :: ys) -> let n = cmp x y in - if n = 0 then loop xs ys - else n + if n = 0 then + loop xs ys + else + n in loop a b -;; let equal t1 t2 ~equal = let rec loop t1 t2 = - match t1, t2 with - | [], [] -> true - | x1 :: t1, x2 :: t2 -> equal x1 x2 && loop t1 t2 + match (t1, t2) with + | ([], []) -> true + | (x1 :: t1, x2 :: t2) -> equal x1 x2 && loop t1 t2 | _ -> false in loop t1 t2 -;; let transpose = let rec transpose_aux t rev_columns = - match partition_map t ~f:(function [] -> `Snd () | x :: xs -> `Fst (x, xs)) with + match + partition_map t ~f:(function + | [] -> `Snd () + | x :: xs -> `Fst (x, xs)) + with | (_ :: _, _ :: _) -> None | ([], _) -> Some (rev_append rev_columns []) | (heads_and_tails, []) -> let (column, trimmed_rows) = unzip heads_and_tails in transpose_aux trimmed_rows (column :: rev_columns) in - fun t -> - transpose_aux t [] + (fun t -> transpose_aux t []) exception Transpose_got_lists_of_different_lengths of int list @@ -679,7 +785,8 @@ let transpose_exn l = match transpose l with | Some l -> l | None -> - raise (Transpose_got_lists_of_different_lengths (List.map l ~f:List.length)) + raise + (Transpose_got_lists_of_different_lengths (List.map l ~f:List.length)) let intersperse t ~sep = match t with @@ -690,5 +797,5 @@ let rec replicate ~num x = match num with | 0 -> [] | n when n < 0 -> - invalid_argf "List.replicate was called with %d argument" n () + invalid_argf "List.replicate was called with %d argument" n () | _ -> x :: replicate ~num:(num - 1) x diff --git a/hack/third-party/core/core_printf.ml b/hack/third-party/core/core_printf.ml index 9cb7d168a6c..682daa31843 100644 --- a/hack/third-party/core/core_printf.ml +++ b/hack/third-party/core/core_printf.ml @@ -2,6 +2,13 @@ include Printf (** failwith, invalid_arg, and exit accepting printf's format. *) -let failwithf fmt = ksprintf (fun s () -> failwith s) fmt -let invalid_argf fmt = ksprintf (fun s () -> invalid_arg s) fmt -let exitf fmt = ksprintf (fun s () -> Printf.eprintf "%s\n%!" s; exit 1) fmt +let failwithf fmt = ksprintf (fun s () -> failwith s) fmt + +let invalid_argf fmt = ksprintf (fun s () -> invalid_arg s) fmt + +let exitf fmt = + ksprintf + (fun s () -> + Printf.eprintf "%s\n%!" s; + exit 1) + fmt diff --git a/hack/third-party/core/core_result.ml b/hack/third-party/core/core_result.ml index 0bc17c216de..9a1f3c758cf 100644 --- a/hack/third-party/core/core_result.ml +++ b/hack/third-party/core/core_result.ml @@ -1,8 +1,8 @@ module Stable = struct module V1 = struct type ('a, 'b) t = ('a, 'b) Pervasives.result = - | Ok of 'a - | Error of 'b + | Ok of 'a + | Error of 'b end end @@ -11,13 +11,15 @@ include Stable.V1 type ('a, 'b) _t = ('a, 'b) t include Monad.Make2 (struct - type ('a, 'b) t = ('a,'b) _t + type ('a, 'b) t = ('a, 'b) _t - let bind x f = match x with + let bind x f = + match x with | Error _ as x -> x | Ok x -> f x - let map x ~f = match x with + let map x ~f = + match x with | Error _ as x -> x | Ok x -> Ok (f x) @@ -26,16 +28,19 @@ include Monad.Make2 (struct let return x = Ok x end) -let fail x = Error x;; +let fail x = Error x + let failf format = Printf.ksprintf fail format (* This definition shadows the version created by the functor application above, but it is much more efficient. *) -let map t ~f = match t with +let map t ~f = + match t with | Ok x -> Ok (f x) | Error _ as x -> x -let map_error t ~f = match t with +let map_error t ~f = + match t with | Ok _ as x -> x | Error x -> Error (f x) @@ -60,11 +65,13 @@ let of_option opt ~error = | Some x -> Ok x | None -> Error error -let iter v ~f = match v with +let iter v ~f = + match v with | Ok x -> f x | Error _ -> () -let iter_error v ~f = match v with +let iter_error v ~f = + match v with | Ok _ -> () | Error x -> f x @@ -73,13 +80,12 @@ let ok_fst = function | Error x -> `Snd x let ok_if_true bool ~error = - if bool - then Ok () - else Error error + if bool then + Ok () + else + Error error -let try_with f = - try Ok (f ()) - with exn -> Error exn +let try_with f = (try Ok (f ()) with exn -> Error exn) let ok_unit = Ok () @@ -92,18 +98,19 @@ let ok_or_failwith = function | Error str -> failwith str module Export = struct - type ('ok, 'err) _result = - ('ok, 'err) t = - | Ok of 'ok - | Error of 'err + type ('ok, 'err) _result = ('ok, 'err) t = + | Ok of 'ok + | Error of 'err + + let is_error = is_error - let is_error = is_error - let is_ok = is_ok + let is_ok = is_ok end let combine t1 t2 ~ok ~err = - match t1, t2 with - | Ok _, Error e | Error e, Ok _ -> Error e - | Ok ok1 , Ok ok2 -> Ok (ok ok1 ok2 ) - | Error err1, Error err2 -> Error (err err1 err2) -;; + match (t1, t2) with + | (Ok _, Error e) + | (Error e, Ok _) -> + Error e + | (Ok ok1, Ok ok2) -> Ok (ok ok1 ok2) + | (Error err1, Error err2) -> Error (err err1 err2) diff --git a/hack/third-party/core/core_result.mli b/hack/third-party/core/core_result.mli index 7cfc449dee6..cc95e7e9e60 100644 --- a/hack/third-party/core/core_result.mli +++ b/hack/third-party/core/core_result.mli @@ -13,63 +13,66 @@ type ('ok, 'err) t = ('ok, 'err) Pervasives.result = | Ok of 'ok | Error of 'err -include Monad.S2 with type ('a,'err) t := ('a,'err) t - +include Monad.S2 with type ('a, 'err) t := ('a, 'err) t val fail : 'err -> (_, 'err) t -(** e.g. [failf "Couldn't find bloogle %s" (Bloogle.to_string b)] *) val failf : ('a, unit, string, (_, string) t) format4 -> 'a +(** e.g. [failf "Couldn't find bloogle %s" (Bloogle.to_string b)] *) + +val is_ok : (_, _) t -> bool -val is_ok : (_, _) t -> bool val is_error : (_, _) t -> bool -val ok : ('ok, _ ) t -> 'ok option -val error : (_ , 'err) t -> 'err option +val ok : ('ok, _) t -> 'ok option + +val error : (_, 'err) t -> 'err option val of_option : 'ok option -> error:'err -> ('ok, 'err) t -val iter : ('ok, _ ) t -> f:('ok -> unit) -> unit -val iter_error : (_ , 'err) t -> f:('err -> unit) -> unit +val iter : ('ok, _) t -> f:('ok -> unit) -> unit + +val iter_error : (_, 'err) t -> f:('err -> unit) -> unit -val map : ('ok, 'err) t -> f:('ok -> 'c) -> ('c , 'err) t -val map_error : ('ok, 'err) t -> f:('err -> 'c) -> ('ok, 'c ) t +val map : ('ok, 'err) t -> f:('ok -> 'c) -> ('c, 'err) t + +val map_error : ('ok, 'err) t -> f:('err -> 'c) -> ('ok, 'c) t (* Returns Ok if both are Ok and Error otherwise. *) -val combine - : ('ok1, 'err) t - -> ('ok2, 'err) t - -> ok: ('ok1 -> 'ok2 -> 'ok3) - -> err:('err -> 'err -> 'err) - -> ('ok3, 'err) t +val combine : + ('ok1, 'err) t -> + ('ok2, 'err) t -> + ok:('ok1 -> 'ok2 -> 'ok3) -> + err:('err -> 'err -> 'err) -> + ('ok3, 'err) t +val ok_fst : ('ok, 'err) t -> [ `Fst of 'ok | `Snd of 'err ] (** [ok_fst] is useful with [List.partition_map]. Continuing the above example: {[ let rics, errors = List.partition_map ~f:Core_result.ok_fst (List.map ~f:ric_of_ticker ["AA"; "F"; "CSCO"; "AAPL"]) ]} *) -val ok_fst : ('ok, 'err) t -> [ `Fst of 'ok | `Snd of 'err ] (* [ok_if_true] returns [Ok ()] if [bool] is true, and [Error error] if it is false *) -val ok_if_true : bool -> error : 'err -> (unit, 'err) t +val ok_if_true : bool -> error:'err -> (unit, 'err) t val try_with : (unit -> 'a) -> ('a, exn) t -(** [ok_exn t] returns [x] if [t = Ok x], and raises [exn] if [t = Error exn] *) val ok_exn : ('ok, exn) t -> 'ok +(** [ok_exn t] returns [x] if [t = Ok x], and raises [exn] if [t = Error exn] *) (* raises Failure in the Error case *) val ok_or_failwith : ('ok, string) t -> 'ok -(** [ok_unit = Ok ()], used to avoid allocation as a performance hack *) val ok_unit : (unit, _) t +(** [ok_unit = Ok ()], used to avoid allocation as a performance hack *) module Export : sig - type ('ok, 'err) _result = - ('ok, 'err) t = - | Ok of 'ok - | Error of 'err + type ('ok, 'err) _result = ('ok, 'err) t = + | Ok of 'ok + | Error of 'err + + val is_ok : (_, _) t -> bool - val is_ok : (_, _) t -> bool val is_error : (_, _) t -> bool end diff --git a/hack/third-party/core/dune b/hack/third-party/core/dune new file mode 100644 index 00000000000..cb63e2688fe --- /dev/null +++ b/hack/third-party/core/dune @@ -0,0 +1,3 @@ +(library + (name imported_core) + (wrapped false)) diff --git a/hack/third-party/core/fn.ml b/hack/third-party/core/fn.ml index 2f8f0d456ed..e1dfb3b638b 100644 --- a/hack/third-party/core/fn.ml +++ b/hack/third-party/core/fn.ml @@ -1,23 +1,31 @@ -let const c = (); fun _ -> c +let const c = + (); + (fun _ -> c) -let non f = (); fun x -> not (f x) +let non f = + (); + (fun x -> not (f x)) let forever f = let rec forever () = f (); forever () in - try forever () - with e -> e + (try forever () with e -> e) (* The typical use case for these functions is to pass in functional arguments and get functions as a result. For this reason, we tell the compiler where to insert breakpoints in the argument-passing scheme. *) -let compose f g = (); fun x -> f (g x) +let compose f g = + (); + (fun x -> f (g x)) -let flip f = (); fun x y -> f y x +let flip f = + (); + (fun x y -> f y x) let rec apply_n_times ~n f x = - if n <= 0 - then x - else apply_n_times ~n:(n - 1) f (f x) + if n <= 0 then + x + else + apply_n_times ~n:(n - 1) f (f x) diff --git a/hack/third-party/core/hack_caml.ml b/hack/third-party/core/hack_caml.ml index bef6886ec44..7d5b5524cf8 100644 --- a/hack/third-party/core/hack_caml.ml +++ b/hack/third-party/core/hack_caml.ml @@ -10,6 +10,7 @@ module Buffer = Buffer module Callback = Callback module Char = Char module Complex = Complex + (* module Digest = Digest *) module Filename = Filename module Format = Format @@ -35,7 +36,6 @@ module Queue = Queue module Random = Random module Scanf = Scanf module Set = Set -module Sort = Sort module Stack = Stack module StdLabels = StdLabels module Stream = Stream @@ -43,5 +43,4 @@ module String = String module StringLabels = StringLabels module Sys = Sys module Weak = Weak - include Pervasives diff --git a/hack/third-party/core/monad.ml b/hack/third-party/core/monad.ml index 676a34e94e7..26f96d1df8f 100644 --- a/hack/third-party/core/monad.ml +++ b/hack/third-party/core/monad.ml @@ -1,6 +1,8 @@ module type Basic = sig type 'a t + val bind : 'a t -> ('a -> 'b t) -> 'b t + val return : 'a -> 'a t (* The [map] argument to [Monad.Make] says how to implement the monad's [map] function. @@ -10,53 +12,52 @@ module type Basic = sig Some other functions returned by [Monad.Make] are defined in terms of [map], so passing in a more efficient [map] will improve their efficiency as well. *) - val map : [ `Define_using_bind - | `Custom of ('a t -> f:('a -> 'b) -> 'b t) - ] + val map : [ `Define_using_bind | `Custom of 'a t -> f:('a -> 'b) -> 'b t ] end module type Infix = sig type 'a t + val ( >>= ) : 'a t -> ('a -> 'b t) -> 'b t (** [t >>= f] returns a computation that sequences the computations represented by two monad elements. The resulting computation first does [t] to yield a value [v], and then runs the computation returned by [f v]. *) - val (>>=) : 'a t -> ('a -> 'b t) -> 'b t + val ( >>| ) : 'a t -> ('a -> 'b) -> 'b t (** [t >>| f] is [t >>= (fun a -> return (f a))]. *) - val (>>|) : 'a t -> ('a -> 'b) -> 'b t - end module type S = sig + include + Infix (** A monad is an abstraction of the concept of sequencing of computations. A value of type 'a monad represents a computation that returns a value of type 'a. *) - include Infix module Monad_infix : Infix with type 'a t := 'a t - (** [bind t f] = [t >>= f] *) val bind : 'a t -> ('a -> 'b t) -> 'b t + (** [bind t f] = [t >>= f] *) - (** [return v] returns the (trivial) computation that returns v. *) val return : 'a -> 'a t + (** [return v] returns the (trivial) computation that returns v. *) - (** [map t ~f] is t >>| f. *) val map : 'a t -> f:('a -> 'b) -> 'b t + (** [map t ~f] is t >>| f. *) - (** [join t] is [t >>= (fun t' -> t')]. *) val join : 'a t t -> 'a t + (** [join t] is [t >>= (fun t' -> t')]. *) - (** [ignore t] = map t ~f:(fun _ -> ()). *) val ignore : 'a t -> unit t + (** [ignore t] = map t ~f:(fun _ -> ()). *) val all : 'a t list -> 'a list t + val all_ignore : unit t list -> unit t end module Make (M : Basic) : S with type 'a t := 'a M.t = struct + let bind = M.bind - let bind = M.bind let return = M.return let map_via_bind ma ~f = M.bind ma (fun a -> M.return (f a)) @@ -67,29 +68,27 @@ module Make (M : Basic) : S with type 'a t := 'a M.t = struct | `Custom x -> x module Monad_infix = struct + let ( >>= ) = bind - let (>>=) = bind - - let (>>|) t f = map t ~f + let ( >>| ) t f = map t ~f end include Monad_infix - let join t = t >>= fun t' -> t' + let join t = t >>= (fun t' -> t') let ignore t = map t ~f:(fun _ -> ()) let all = let rec loop vs = function | [] -> return (List.rev vs) - | t :: ts -> t >>= fun v -> loop (v :: vs) ts + | t :: ts -> t >>= (fun v -> loop (v :: vs) ts) in - fun ts -> loop [] ts + (fun ts -> loop [] ts) let rec all_ignore = function | [] -> return () - | t :: ts -> t >>= fun () -> all_ignore ts - + | t :: ts -> t >>= (fun () -> all_ignore ts) end (** @@ -100,10 +99,12 @@ end *) module type Basic2 = sig type ('a, 'd) t + val bind : ('a, 'd) t -> ('a -> ('b, 'd) t) -> ('b, 'd) t - val map : [ `Define_using_bind - | `Custom of (('a, 'd) t -> f:('a -> 'b) -> ('b, 'd) t) - ] + + val map : + [ `Define_using_bind | `Custom of ('a, 'd) t -> f:('a -> 'b) -> ('b, 'd) t ] + val return : 'a -> ('a, _) t end @@ -111,8 +112,10 @@ end passed through. *) module type Infix2 = sig type ('a, 'd) t - val (>>=) : ('a, 'd) t -> ('a -> ('b, 'd) t) -> ('b, 'd) t - val (>>|) : ('a, 'd) t -> ('a -> 'b) -> ('b, 'd) t + + val ( >>= ) : ('a, 'd) t -> ('a -> ('b, 'd) t) -> ('b, 'd) t + + val ( >>| ) : ('a, 'd) t -> ('a -> 'b) -> ('b, 'd) t end (** The same as S except the monad type has two arguments. The second is always just @@ -137,31 +140,43 @@ module type S2 = sig val all_ignore : (unit, 'd) t list -> (unit, 'd) t end -module Check_S2_refines_S (X : S) : (S2 with type ('a, 'd) t = 'a X.t) = -struct +module Check_S2_refines_S (X : S) : S2 with type ('a, 'd) t = 'a X.t = struct type ('a, 'd) t = 'a X.t + include struct open X - let (>>=) = (>>=) - let (>>|) = (>>|) - let bind = bind - let return = return - let map = map - let join = join - let ignore = ignore - let all = all + + let ( >>= ) = ( >>= ) + + let ( >>| ) = ( >>| ) + + let bind = bind + + let return = return + + let map = map + + let join = join + + let ignore = ignore + + let all = all + let all_ignore = all_ignore end + module Monad_infix = struct open X.Monad_infix - let (>>=) = (>>=) - let (>>|) = (>>|) + + let ( >>= ) = ( >>= ) + + let ( >>| ) = ( >>| ) end end module Make2 (M : Basic2) : S2 with type ('a, 'd) t := ('a, 'd) M.t = struct + let bind = M.bind - let bind = M.bind let return = M.return let map_via_bind ma ~f = M.bind ma (fun a -> M.return (f a)) @@ -172,27 +187,25 @@ module Make2 (M : Basic2) : S2 with type ('a, 'd) t := ('a, 'd) M.t = struct | `Custom x -> x module Monad_infix = struct + let ( >>= ) = bind - let (>>=) = bind - - let (>>|) t f = map t ~f + let ( >>| ) t f = map t ~f end include Monad_infix - let join t = t >>= fun t' -> t' + let join t = t >>= (fun t' -> t') let ignore t = map t ~f:(fun _ -> ()) let all = let rec loop vs = function | [] -> return (List.rev vs) - | t :: ts -> t >>= fun v -> loop (v :: vs) ts + | t :: ts -> t >>= (fun v -> loop (v :: vs) ts) in - fun ts -> loop [] ts + (fun ts -> loop [] ts) let rec all_ignore = function | [] -> return () - | t :: ts -> t >>= fun () -> all_ignore ts - + | t :: ts -> t >>= (fun () -> all_ignore ts) end diff --git a/hack/third-party/core/option.ml b/hack/third-party/core/option.ml index 5992485cb57..db3a16cfb52 100644 --- a/hack/third-party/core/option.ml +++ b/hack/third-party/core/option.ml @@ -1,25 +1,28 @@ type 'a t = 'a option -let is_none = function None -> true | _ -> false +let is_none = function + | None -> true + | _ -> false -let is_some = function Some _ -> true | _ -> false +let is_some = function + | Some _ -> true + | _ -> false let value_map o ~default ~f = match o with | Some x -> f x - | None -> default + | None -> default let iter o ~f = match o with | None -> () | Some a -> f a -;; let invariant f t = iter t ~f let map2 o1 o2 ~f = - match o1, o2 with - | Some a1, Some a2 -> Some (f a1 a2) + match (o1, o2) with + | (Some a1, Some a2) -> Some (f a1 a2) | _ -> None let call x ~f = @@ -31,62 +34,55 @@ let value t ~default = match t with | None -> default | Some x -> x -;; let value_exn ?here ?error ?message t = (* this function has been modified from the original form to remove *) (* the dependency on Ppx and the Error module: we don't handle the *) (* 'here' parameter, and can't combine 'error' with 'message'. *) - match t, here, error, message with - | Some x, _, _, _ -> x - | _, _, Some e, _ -> raise e - | _, _, _, Some m -> failwith m - | _, _, _, _ -> failwith "Option.value_exn None" -;; + match (t, here, error, message) with + | (Some x, _, _, _) -> x + | (_, _, Some e, _) -> raise e + | (_, _, _, Some m) -> failwith m + | (_, _, _, _) -> failwith "Option.value_exn None" let to_array t = match t with | None -> [||] | Some x -> [|x|] -;; let to_list t = match t with | None -> [] | Some x -> [x] -;; let min_elt t ~cmp:_ = t + let max_elt t ~cmp:_ = t + let sum (type a) (module M : Commutative_group.S with type t = a) t ~f = match t with | None -> M.zero | Some x -> f x -;; let for_all t ~f = match t with | None -> true | Some x -> f x -;; let exists t ~f = match t with | None -> false | Some x -> f x -;; let mem t a ~equal = match t with | None -> false | Some a' -> equal a a' -;; let length t = match t with | None -> 0 | Some _ -> 1 -;; let is_empty = is_none @@ -94,37 +90,41 @@ let fold t ~init ~f = match t with | None -> init | Some x -> f init x -;; let count t ~f = match t with | None -> 0 - | Some a -> if f a then 1 else 0 -;; + | Some a -> + if f a then + 1 + else + 0 let find t ~f = match t with | None -> None - | Some x -> if f x then Some x else None -;; + | Some x -> + if f x then + Some x + else + None let find_map t ~f = match t with | None -> None | Some a -> f a -;; let equal f t t' = - match t, t' with - | None, None -> true - | Some x, Some x' -> f x x' + match (t, t') with + | (None, None) -> true + | (Some x, Some x') -> f x x' | _ -> false let some x = Some x let both x y = - match x,y with - | Some a, Some b -> Some (a,b) + match (x, y) with + | (Some a, Some b) -> Some (a, b) | _ -> None let first_some x y = @@ -132,31 +132,48 @@ let first_some x y = | Some _ -> x | None -> y -let some_if cond x = if cond then Some x else None +let some_if cond x = + if cond then + Some x + else + None let merge a b ~f = - match a, b with - | None, x | x, None -> x - | Some a, Some b -> Some (f a b) + match (a, b) with + | (None, x) + | (x, None) -> + x + | (Some a, Some b) -> Some (f a b) let filter t ~f = match t with | Some v as o when f v -> o | _ -> None -let try_with f = - try Some (f ()) - with _ -> None +let compare ~cmp v1 v2 = + let tag_to_int = function + | Some _ -> 1 + | None -> 0 + in + match (v1, v2) with + | (Some v1, Some v2) -> cmp v1 v2 + | (None, None) -> 0 + | _ -> compare (tag_to_int v1) (tag_to_int v2) + +let try_with f = (try Some (f ()) with _ -> None) include Monad.Make (struct type 'a t = 'a option + let return x = Some x + let map t ~f = match t with | None -> None | Some a -> Some (f a) - ;; + let map = `Custom map + let bind o f = (* signature of bind has been modified from the original to fit *) (* with what's expected here. *) diff --git a/hack/third-party/core/polymorphic_compare.ml b/hack/third-party/core/polymorphic_compare.ml index 9fe948c7f52..f28f1cf6654 100644 --- a/hack/third-party/core/polymorphic_compare.ml +++ b/hack/third-party/core/polymorphic_compare.ml @@ -1,13 +1,23 @@ -external compare : 'a -> 'a -> int = "%compare" +external compare : 'a -> 'a -> int = "%compare" + external ascending : 'a -> 'a -> int = "%compare" + let descending x y = compare y x -let (<) = (<) -let (<=) = (<=) -let (>) = (>) -let (>=) = (>=) -let (=) = (=) -let (<>) = (<>) -let equal = (=) +let ( < ) = ( < ) + +let ( <= ) = ( <= ) + +let ( > ) = ( > ) + +let ( >= ) = ( >= ) + +let ( = ) = ( = ) + +let ( <> ) = ( <> ) + +let equal = ( = ) + let min = min + let max = max diff --git a/hack/third-party/core/polymorphic_compare.mli b/hack/third-party/core/polymorphic_compare.mli index bb2409db225..1eb796e5a35 100644 --- a/hack/third-party/core/polymorphic_compare.mli +++ b/hack/third-party/core/polymorphic_compare.mli @@ -1,20 +1,29 @@ (* For use in combination with [No_polymorphic_compare]. *) -val compare : 'a -> 'a -> int +val compare : 'a -> 'a -> int +val ascending : 'a -> 'a -> int (** [ascending] is identical to [compare]. [descending x y = ascending y x]. These are intended to be mnemonic when used like [List.sort ~cmp:ascending] and [List.sort ~cmp:descending], since they cause the list to be sorted in ascending or descending order, respectively. *) -val ascending : 'a -> 'a -> int + val descending : 'a -> 'a -> int -val (<) : 'a -> 'a -> bool -val (<=) : 'a -> 'a -> bool -val (>) : 'a -> 'a -> bool -val (>=) : 'a -> 'a -> bool -val (=) : 'a -> 'a -> bool -val (<>) : 'a -> 'a -> bool -val equal : 'a -> 'a -> bool -val min : 'a -> 'a -> 'a -val max : 'a -> 'a -> 'a +val ( < ) : 'a -> 'a -> bool + +val ( <= ) : 'a -> 'a -> bool + +val ( > ) : 'a -> 'a -> bool + +val ( >= ) : 'a -> 'a -> bool + +val ( = ) : 'a -> 'a -> bool + +val ( <> ) : 'a -> 'a -> bool + +val equal : 'a -> 'a -> bool + +val min : 'a -> 'a -> 'a + +val max : 'a -> 'a -> 'a diff --git a/hack/third-party/inotify/dune b/hack/third-party/inotify/dune new file mode 100644 index 00000000000..1b1c1f480e6 --- /dev/null +++ b/hack/third-party/inotify/dune @@ -0,0 +1,5 @@ +(library + (name inotify) + (wrapped false) + (c_names + inotify_stubs)) diff --git a/hack/third-party/inotify/inotify.ml b/hack/third-party/inotify/inotify.ml index 473a634c8d0..f933cd87b8f 100644 --- a/hack/third-party/inotify/inotify.ml +++ b/hack/third-party/inotify/inotify.ml @@ -15,74 +15,79 @@ *) type selector = -| S_Access -| S_Attrib -| S_Close_write -| S_Close_nowrite -| S_Create -| S_Delete -| S_Delete_self -| S_Modify -| S_Move_self -| S_Moved_from -| S_Moved_to -| S_Open -| S_Dont_follow -| S_Mask_add -| S_Oneshot -| S_Onlydir -(* convenience *) -| S_Move -| S_Close -| S_All + | S_Access + | S_Attrib + | S_Close_write + | S_Close_nowrite + | S_Create + | S_Delete + | S_Delete_self + | S_Modify + | S_Move_self + | S_Moved_from + | S_Moved_to + | S_Open + | S_Dont_follow + | S_Mask_add + | S_Oneshot + | S_Onlydir + (* convenience *) + | S_Move + | S_Close + | S_All type event_kind = -| Access -| Attrib -| Close_write -| Close_nowrite -| Create -| Delete -| Delete_self -| Modify -| Move_self -| Moved_from -| Moved_to -| Open -| Ignored -| Isdir -| Q_overflow -| Unmount + | Access + | Attrib + | Close_write + | Close_nowrite + | Create + | Delete + | Delete_self + | Modify + | Move_self + | Moved_from + | Moved_to + | Open + | Ignored + | Isdir + | Q_overflow + | Unmount let string_of_event_kind = function -| Access -> "ACCESS" -| Attrib -> "ATTRIB" -| Close_write -> "CLOSE_WRITE" -| Close_nowrite -> "CLOSE_NOWRITE" -| Create -> "CREATE" -| Delete -> "DELETE" -| Delete_self -> "DELETE_SELF" -| Modify -> "MODIFY" -| Move_self -> "MOVE_SELF" -| Moved_from -> "MOVED_FROM" -| Moved_to -> "MOVED_TO" -| Open -> "OPEN" -| Ignored -> "IGNORED" -| Isdir -> "ISDIR" -| Q_overflow -> "Q_OVERFLOW" -| Unmount -> "UNMOUNT" + | Access -> "ACCESS" + | Attrib -> "ATTRIB" + | Close_write -> "CLOSE_WRITE" + | Close_nowrite -> "CLOSE_NOWRITE" + | Create -> "CREATE" + | Delete -> "DELETE" + | Delete_self -> "DELETE_SELF" + | Modify -> "MODIFY" + | Move_self -> "MOVE_SELF" + | Moved_from -> "MOVED_FROM" + | Moved_to -> "MOVED_TO" + | Open -> "OPEN" + | Ignored -> "IGNORED" + | Isdir -> "ISDIR" + | Q_overflow -> "Q_OVERFLOW" + | Unmount -> "UNMOUNT" type watch = int + type event = watch * event_kind list * int32 * string option external create : unit -> Unix.file_descr = "caml_inotify_init" + external add_watch : Unix.file_descr -> string -> selector list -> watch = "caml_inotify_add_watch" + external rm_watch : Unix.file_descr -> watch -> unit = "caml_inotify_rm_watch" -external convert : Bytes.t -> (watch * event_kind list * int32 * int) +external convert : Bytes.t -> watch * event_kind list * int32 * int = "caml_inotify_convert" + external struct_size : unit -> int = "caml_inotify_struct_size" + external name_max : unit -> int = "caml_inotify_name_max" let int_of_watch watch = watch @@ -90,12 +95,14 @@ let int_of_watch watch = watch let watch_of_int watch = watch let string_of_event (watch, events, cookie, name) = - Printf.sprintf "watch=%d cookie=%ld events=%s%s" - watch cookie - (String.concat "|" (List.map string_of_event_kind events)) - (match name with - | None -> "" - | Some name' -> Printf.sprintf " %S" name') + Printf.sprintf + "watch=%d cookie=%ld events=%s%s" + watch + cookie + (String.concat "|" (List.map string_of_event_kind events)) + (match name with + | None -> "" + | Some name' -> Printf.sprintf " %S" name') let read fd = (* Turns out that reading from blocking descriptors always requires a buffer @@ -109,23 +116,28 @@ let read fd = sizeof(struct inotify_event) + NAME_MAX + 1 *) let event_size = struct_size () in - - let buf_size = event_size + (name_max ()) + 1 in + let buf_size = event_size + name_max () + 1 in let buf = Bytes.create buf_size in let bytes_read = Unix.read fd buf 0 buf_size in - let read_c_string pos = - Bytes.sub_string buf pos ((Bytes.index_from buf pos '\x00') - pos) + Bytes.sub_string buf pos (Bytes.index_from buf pos '\x00' - pos) in - let rec read_one pos rest = - if bytes_read < pos + event_size then rest + if bytes_read < pos + event_size then + rest else - let watch, mask, cookie, len = convert (Bytes.sub buf pos event_size) in - if bytes_read < pos + event_size + len then rest + let (watch, mask, cookie, len) = + convert (Bytes.sub buf pos event_size) + in + if bytes_read < pos + event_size + len then + rest else - let name = if len > 0 then Some (read_c_string (pos + event_size)) else None in + let name = + if len > 0 then + Some (read_c_string (pos + event_size)) + else + None + in read_one (pos + event_size + len) ((watch, mask, cookie, name) :: rest) in - List.rev (read_one 0 []) diff --git a/hack/third-party/inotify/inotify.mli b/hack/third-party/inotify/inotify.mli index 71bed250da3..5c5baf2eeee 100644 --- a/hack/third-party/inotify/inotify.mli +++ b/hack/third-party/inotify/inotify.mli @@ -20,90 +20,91 @@ (** Type of event masks. *) type selector = -| S_Access -| S_Attrib -| S_Close_write -| S_Close_nowrite -| S_Create -| S_Delete -| S_Delete_self -| S_Modify -| S_Move_self -| S_Moved_from -| S_Moved_to -| S_Open -| S_Dont_follow -| S_Mask_add -| S_Oneshot -| S_Onlydir -| S_Move -| S_Close -| S_All + | S_Access + | S_Attrib + | S_Close_write + | S_Close_nowrite + | S_Create + | S_Delete + | S_Delete_self + | S_Modify + | S_Move_self + | S_Moved_from + | S_Moved_to + | S_Open + | S_Dont_follow + | S_Mask_add + | S_Oneshot + | S_Onlydir + | S_Move + | S_Close + | S_All (** Type of observed events. *) type event_kind = -| Access -| Attrib -| Close_write -| Close_nowrite -| Create -| Delete -| Delete_self -| Modify -| Move_self -| Moved_from -| Moved_to -| Open -| Ignored -| Isdir -| Q_overflow -| Unmount + | Access + | Attrib + | Close_write + | Close_nowrite + | Create + | Delete + | Delete_self + | Modify + | Move_self + | Moved_from + | Moved_to + | Open + | Ignored + | Isdir + | Q_overflow + | Unmount -(** Type of watch descriptors. *) type watch +(** Type of watch descriptors. *) +type event = watch * event_kind list * int32 * string option (** Type of received events, corresponding to [struct inotify_event]. In event [wd, kinds, cookie, path], [wd] corresponds to [inotify_event.wd], [kinds] corresponds to the bits set in [inotify_event.mask], [cookie] corresponds to [inotify_event.cookie], [path] is [Some filename] if [inotify_event.len > 0] and [None] otherwise. *) -type event = watch * event_kind list * int32 * string option +val int_of_watch : watch -> int (** [int_of_watch wd] returns the underlying integer representation of watch descriptor [wd]. *) -val int_of_watch : watch -> int (**/**) (* [watch_of_int i] is the {!watch} corresponding to the integer [i]. It violates the construction privacy of the {!watch} type but is useful when using {!event} as a network portable type. *) -val watch_of_int : int -> watch +val watch_of_int : int -> watch (**/**) +val string_of_event_kind : event_kind -> string (** [string_of_event_kind ek] returns the string representation of event kind [ek], e.g. [string_of_event_kind Move_self] ≡ ["MOVE_SELF"]. *) -val string_of_event_kind : event_kind -> string +val string_of_event : event -> string (** [string_of_event event] returns the string representation of event [ev], e.g. [string_of_event] *) -val string_of_event : event -> string +val create : unit -> Unix.file_descr (** [create ()] returns a fresh inotify file descriptor or raises [Unix.Unix_error(errno, "inotify_init", "")]. *) -val create : unit -> Unix.file_descr +val add_watch : Unix.file_descr -> string -> selector list -> watch (** [add_watch fd path events] starts observing events from [events] for path [path] at inotify file descriptor [fd] and returns a fresh watch descriptor, or raises [Unix.Unix_error(errno, "inotify_add_watch", path)]. *) -val add_watch : Unix.file_descr -> string -> selector list -> watch +val rm_watch : Unix.file_descr -> watch -> unit (** [rm_watch fd watch] stops observing events corresponding to watch descriptor [watch] at inotify file descriptor [fd], or raises [Unix.Unix_error(errno, "inotify_rm_watch", path)]. *) -val rm_watch : Unix.file_descr -> watch -> unit +val read : Unix.file_descr -> event list (** [read fd] requests a list of events for inotify file descriptor [fd]. Each event will include the watch descriptor, which can be used to determine the path that caused it, and [Moved_to] and [Moved_from] events will include a cookie that allows @@ -112,4 +113,3 @@ val rm_watch : Unix.file_descr -> watch -> unit If {!read} is not called often enough, the kernel event buffer may overflow, in which case the event kind list will consist of [[Q_overflow]]. Such an event would be associated with a watch descriptor [-1], never returned from {!add_watch}. *) -val read : Unix.file_descr -> event list diff --git a/hack/third-party/libancillary/dune b/hack/third-party/libancillary/dune new file mode 100644 index 00000000000..839892cff50 --- /dev/null +++ b/hack/third-party/libancillary/dune @@ -0,0 +1,7 @@ +(library + (name libancillary_c) + (wrapped false) + (c_names + fd_recv + fd_send) + (c_flags (:standard -I%{env:CMAKE_SOURCE_DIR=xxx}))) diff --git a/hack/utils/buffered_line_reader.ml b/hack/utils/buffered_line_reader.ml deleted file mode 100644 index c646261885e..00000000000 --- a/hack/utils/buffered_line_reader.ml +++ /dev/null @@ -1,190 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -open Hh_core - -(** Our Unix systems only allow reading 64KB chunks at a time. - * Trying to read more than 64KB results in only 64KB being read. *) -let chunk_size = 65536 - -module Regular_reader: Buffered_line_reader_sig.READER - with type 'a result = 'a and type fd = Unix.file_descr = -struct - type 'a result = 'a - type fd = Unix.file_descr - - let return x = x - let fail exn = raise exn - let (>>=) a f = f a - - let read fd ~buffer ~offset ~size = Unix.read fd buffer offset size - - let is_readable fd = - let readable, _, _ = Unix.select [fd] [] [] 0.0 in - readable <> [] - - let open_devnull () = Unix.openfile "/dev/null" [Unix.O_RDONLY] 0o440 -end - -module Functor (Reader: Buffered_line_reader_sig.READER) - : Buffered_line_reader_sig.S with type 'a result = 'a Reader.result and type fd = Reader.fd = -struct - let (>>=) = Reader.(>>=) - - type fd = Reader.fd - type 'a result = 'a Reader.result - - type t = { - fd: Reader.fd; - (** The bytes left after the last content that haven't been consumed yet. *) - unconsumed_buffer: string option ref; - } - - let set_buffer r b = - r.unconsumed_buffer := b - - (** A non-throwing version of String.index. *) - let index s c = - try begin - let i = String.index s c in - `First_appearance i - end - with - | Not_found -> `No_appearance - - let trim_trailing_cr = function - | "" -> "" - | s -> - let len = String.length s in - if s.[len - 1] = '\r' then String.sub s 0 (len - 1) else s - - let merge_chunks last_chunk chunks_rev = - let chunks_rev = last_chunk :: chunks_rev in - let chunks = List.rev chunks_rev in - String.concat "" chunks - - (** This function reads a line, delimited by LF (unix) or CRLF (internet)... - * Recursively read a chunk from the file descriptor until we see a newline - * character, building up the chunks accumulator along the way. - * Any remaining bytes not consumed (bytes past the newline) are placed in the - * reader's unconsumed_buffer which will be consumed on the next - * call to get_next_line or get_next_bytes. *) - let rec read_line chunks r = - let b = Bytes.create chunk_size in - - Reader.read r.fd ~buffer:b ~offset:0 ~size:chunk_size >>= (fun bytes_read -> - if bytes_read == 0 then raise End_of_file; - let b = Bytes.sub_string b 0 bytes_read in - match index b '\n' with - | `No_appearance -> - read_line (b :: chunks) r - | `First_appearance i -> - let tail = String.sub b 0 i in - let result = merge_chunks tail chunks in - let () = if (i + 1) < bytes_read - then - (** We read some bytes beyond the first newline character. *) - let length = bytes_read - (i + 1) in - (** We skip the newline character. *) - let remainder = String.sub b (i + 1) length in - set_buffer r (Some remainder) - else - (** We didn't read any bytes beyond the first newline character. *) - set_buffer r None - in - Reader.return @@ trim_trailing_cr result - ) - - let get_next_line r = - match !(r.unconsumed_buffer) with - | None -> read_line [] r - | Some remainder -> begin - match index remainder '\n' with - | `No_appearance -> - let () = set_buffer r None in - read_line [remainder] r - | `First_appearance i -> - let result = String.sub remainder 0 i in - let () = if (i + 1) < (String.length remainder) - then - (** There are some bytes left beyond the first newline character. *) - let length = (String.length remainder) - (i + 1) in - let remainder = String.sub remainder (i + 1) length in - set_buffer r (Some remainder) - else - (** No bytes beyond the first newline character. *) - set_buffer r None - in - Reader.return @@ trim_trailing_cr result - end - - let rec read_bytes r size chunks = - let bytes_desired = min chunk_size size in - let b = Bytes.create bytes_desired in - Reader.read r.fd ~buffer:b ~offset:0 ~size:bytes_desired >>= (fun bytes_read -> - if bytes_read == 0 then raise End_of_file; - if bytes_read < size then - let b = Bytes.sub_string b 0 bytes_read in - read_bytes r (size - bytes_read) (b :: chunks) - else - let () = set_buffer r None in - (* `unsafe_to_string` is acceptable here because `merge_chunks` - immediately makes a copy via `concat` *) - Reader.return @@ merge_chunks (Bytes.unsafe_to_string b) chunks - ) - - let get_next_bytes r size = - assert (size > 0); - match !(r.unconsumed_buffer) with - | None -> read_bytes r size [] - | Some remainder -> begin - let remainder_length = String.length remainder in - if remainder_length < size then - let () = set_buffer r None in - read_bytes r (size - remainder_length) [remainder] - else if remainder_length = size then - let () = set_buffer r None in - Reader.return @@ remainder - else - let extra = String.sub remainder size (remainder_length - size) in - let () = set_buffer r (Some extra) in - Reader.return @@ String.sub remainder 0 size - end - - let has_buffered_content r = !(r.unconsumed_buffer) <> None - - - let is_readable r = - has_buffered_content r || Reader.is_readable r.fd - - let create fd = { - fd = fd; - unconsumed_buffer = ref None; - } - - let null_reader_ref = ref None - - let get_null_reader () = - match !null_reader_ref with - | Some x -> Reader.return x - | None -> - Reader.open_devnull () - >>= (fun fd -> - let null_reader = { - fd; - unconsumed_buffer = ref None; - } in - null_reader_ref := Some null_reader; - Reader.return null_reader - ) - - let get_fd r = r.fd -end - -include Functor (Regular_reader) diff --git a/hack/utils/buffered_line_reader.mli b/hack/utils/buffered_line_reader.mli deleted file mode 100644 index 618d34d6d75..00000000000 --- a/hack/utils/buffered_line_reader.mli +++ /dev/null @@ -1,13 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -include Buffered_line_reader_sig.S with type 'a result = 'a and type fd = Unix.file_descr - -module Functor : functor (Reader: Buffered_line_reader_sig.READER) - -> Buffered_line_reader_sig.S with type 'a result = 'a Reader.result and type fd = Reader.fd diff --git a/hack/utils/buffered_line_reader/buffered_line_reader.ml b/hack/utils/buffered_line_reader/buffered_line_reader.ml new file mode 100644 index 00000000000..0ce48f87bcd --- /dev/null +++ b/hack/utils/buffered_line_reader/buffered_line_reader.ml @@ -0,0 +1,185 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +open Hh_core + +(* Our Unix systems only allow reading 64KB chunks at a time. + * Trying to read more than 64KB results in only 64KB being read. *) +let chunk_size = 65536 + +module Regular_reader : + Buffered_line_reader_sig.READER + with type 'a result = 'a + and type fd = Unix.file_descr = struct + type 'a result = 'a + + type fd = Unix.file_descr + + let return x = x + + let fail exn = raise exn + + let ( >>= ) a f = f a + + let read fd ~buffer ~offset ~size = Unix.read fd buffer offset size + + let is_readable fd = + let (readable, _, _) = Unix.select [fd] [] [] 0.0 in + readable <> [] + + let open_devnull () = Unix.openfile "/dev/null" [Unix.O_RDONLY] 0o440 +end + +module Functor (Reader : Buffered_line_reader_sig.READER) : + Buffered_line_reader_sig.S + with type 'a result = 'a Reader.result + and type fd = Reader.fd = struct + let ( >>= ) = Reader.( >>= ) + + type fd = Reader.fd + + type 'a result = 'a Reader.result + + type t = { + fd: Reader.fd; + (* The bytes left after the last content that haven't been consumed yet. *) + unconsumed_buffer: string option ref; + } + + let set_buffer r b = r.unconsumed_buffer := b + + (** A non-throwing version of String.index. *) + let index s c = + try + let i = String.index s c in + `First_appearance i + with Not_found -> `No_appearance + + let trim_trailing_cr = function + | "" -> "" + | s -> + let len = String.length s in + if s.[len - 1] = '\r' then + String.sub s 0 (len - 1) + else + s + + let merge_chunks last_chunk chunks_rev = + let chunks_rev = last_chunk :: chunks_rev in + let chunks = List.rev chunks_rev in + String.concat "" chunks + + (* This function reads a line, delimited by LF (unix) or CRLF (internet)... + * Recursively read a chunk from the file descriptor until we see a newline + * character, building up the chunks accumulator along the way. + * Any remaining bytes not consumed (bytes past the newline) are placed in the + * reader's unconsumed_buffer which will be consumed on the next + * call to get_next_line or get_next_bytes. *) + let rec read_line chunks r = + let b = Bytes.create chunk_size in + Reader.read r.fd ~buffer:b ~offset:0 ~size:chunk_size + >>= fun bytes_read -> + if bytes_read == 0 then raise End_of_file; + let b = Bytes.sub_string b 0 bytes_read in + match index b '\n' with + | `No_appearance -> read_line (b :: chunks) r + | `First_appearance i -> + let tail = String.sub b 0 i in + let result = merge_chunks tail chunks in + let () = + if i + 1 < bytes_read then + (* We read some bytes beyond the first newline character. *) + let length = bytes_read - (i + 1) in + (* We skip the newline character. *) + let remainder = String.sub b (i + 1) length in + set_buffer r (Some remainder) + else + (* We didn't read any bytes beyond the first newline character. *) + set_buffer r None + in + Reader.return @@ trim_trailing_cr result + + let get_next_line r = + match !(r.unconsumed_buffer) with + | None -> read_line [] r + | Some remainder -> + begin + match index remainder '\n' with + | `No_appearance -> + let () = set_buffer r None in + read_line [remainder] r + | `First_appearance i -> + let result = String.sub remainder 0 i in + let () = + if i + 1 < String.length remainder then + (* There are some bytes left beyond the first newline character. *) + let length = String.length remainder - (i + 1) in + let remainder = String.sub remainder (i + 1) length in + set_buffer r (Some remainder) + else + (* No bytes beyond the first newline character. *) + set_buffer r None + in + Reader.return @@ trim_trailing_cr result + end + + let rec read_bytes r size chunks = + let bytes_desired = min chunk_size size in + let b = Bytes.create bytes_desired in + Reader.read r.fd ~buffer:b ~offset:0 ~size:bytes_desired + >>= fun bytes_read -> + if bytes_read == 0 then raise End_of_file; + if bytes_read < size then + let b = Bytes.sub_string b 0 bytes_read in + read_bytes r (size - bytes_read) (b :: chunks) + else + let () = set_buffer r None in + (* `unsafe_to_string` is acceptable here because `merge_chunks` + immediately makes a copy via `concat` *) + Reader.return @@ merge_chunks (Bytes.unsafe_to_string b) chunks + + let get_next_bytes r size = + assert (size > 0); + match !(r.unconsumed_buffer) with + | None -> read_bytes r size [] + | Some remainder -> + let remainder_length = String.length remainder in + if remainder_length < size then + let () = set_buffer r None in + read_bytes r (size - remainder_length) [remainder] + else if remainder_length = size then + let () = set_buffer r None in + Reader.return @@ remainder + else + let extra = String.sub remainder size (remainder_length - size) in + let () = set_buffer r (Some extra) in + Reader.return @@ String.sub remainder 0 size + + let has_buffered_content r = !(r.unconsumed_buffer) <> None + + let is_readable r = has_buffered_content r || Reader.is_readable r.fd + + let create fd = { fd; unconsumed_buffer = ref None } + + let null_reader_ref = ref None + + let get_null_reader () = + match !null_reader_ref with + | Some x -> Reader.return x + | None -> + Reader.open_devnull () + >>= fun fd -> + let null_reader = { fd; unconsumed_buffer = ref None } in + null_reader_ref := Some null_reader; + Reader.return null_reader + + let get_fd r = r.fd +end + +include Functor (Regular_reader) diff --git a/hack/utils/buffered_line_reader/buffered_line_reader.mli b/hack/utils/buffered_line_reader/buffered_line_reader.mli new file mode 100644 index 00000000000..8d5dcf52bd3 --- /dev/null +++ b/hack/utils/buffered_line_reader/buffered_line_reader.mli @@ -0,0 +1,18 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +include + Buffered_line_reader_sig.S + with type 'a result = 'a + and type fd = Unix.file_descr + +module Functor (Reader : Buffered_line_reader_sig.READER) : + Buffered_line_reader_sig.S + with type 'a result = 'a Reader.result + and type fd = Reader.fd diff --git a/hack/utils/buffered_line_reader/buffered_line_reader_lwt.ml b/hack/utils/buffered_line_reader/buffered_line_reader_lwt.ml new file mode 100644 index 00000000000..06e02983298 --- /dev/null +++ b/hack/utils/buffered_line_reader/buffered_line_reader_lwt.ml @@ -0,0 +1,31 @@ +(* + * Copyright (c) 2018, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +module Regular_reader : + Buffered_line_reader_sig.READER + with type 'a result = 'a Lwt.t + and type fd = Lwt_unix.file_descr = struct + type 'a result = 'a Lwt.t + + type fd = Lwt_unix.file_descr + + let return = Lwt.return + + let fail = Lwt.fail + + let ( >>= ) = Lwt.( >>= ) + + let read fd ~buffer ~offset ~size = Lwt_unix.read fd buffer offset size + + let is_readable = Lwt_unix.readable + + let open_devnull () = Lwt_unix.openfile "/dev/null" [Unix.O_RDONLY] 0o440 +end + +include Buffered_line_reader.Functor (Regular_reader) diff --git a/hack/utils/buffered_line_reader/buffered_line_reader_lwt.mli b/hack/utils/buffered_line_reader/buffered_line_reader_lwt.mli new file mode 100644 index 00000000000..1f9d3974168 --- /dev/null +++ b/hack/utils/buffered_line_reader/buffered_line_reader_lwt.mli @@ -0,0 +1,13 @@ +(* + * Copyright (c) 2018, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +include + Buffered_line_reader_sig.S + with type 'a result = 'a Lwt.t + and type fd = Lwt_unix.file_descr diff --git a/hack/utils/buffered_line_reader/buffered_line_reader_sig.ml b/hack/utils/buffered_line_reader/buffered_line_reader_sig.ml new file mode 100644 index 00000000000..e6cb76344b6 --- /dev/null +++ b/hack/utils/buffered_line_reader/buffered_line_reader_sig.ml @@ -0,0 +1,69 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(** + * This module is needed because Unix.select doesn't play well with + * input_line on Ocaml channels.. i.e., when a buffered read into an + * Ocaml channel consumes two complete lines from the file descriptor, the next + * select will say there is nothing to read when in fact there is + * something in the channel. This wouldn't be a problem if Ocaml channel's API + * supported a "has buffered content" call, so you could check if the + * buffer contains something as well as doing a Unix select to know for real if + * there is content coming. + * + * The "has_buffered_content" method below does exactly that. + * + * is_readable is a friendly wrapper around "has_buffered_content" and + * non-blocking Unix.select. + *) + +module type S = sig + type 'a result + + type fd + + type t + + val create : fd -> t + + val get_null_reader : unit -> t result + + val has_buffered_content : t -> bool + + val is_readable : t -> bool + (** + * Returns true if and only if there is content to be read (does not know if + * the incoming content is newline-terminated. So we can't actually know + * if get_next_line will be non-blocking. + *) + + val get_fd : t -> fd + + val get_next_line : t -> string result + + val get_next_bytes : t -> int -> string result +end + +module type READER = sig + type 'a result + + type fd + + val return : 'a -> 'a result + + val fail : exn -> 'a result + + val ( >>= ) : 'a result -> ('a -> 'b result) -> 'b result + + val read : fd -> buffer:bytes -> offset:int -> size:int -> int result + + val is_readable : fd -> bool + + val open_devnull : unit -> fd result +end diff --git a/hack/utils/buffered_line_reader/buffered_line_reader_sig.mli b/hack/utils/buffered_line_reader/buffered_line_reader_sig.mli new file mode 100644 index 00000000000..902bee5a4a8 --- /dev/null +++ b/hack/utils/buffered_line_reader/buffered_line_reader_sig.mli @@ -0,0 +1,54 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +module type S = sig + type 'a result + + type fd + + type t + + val create : fd -> t + + val get_null_reader : unit -> t result + + val has_buffered_content : t -> bool + + val is_readable : t -> bool + + val get_fd : t -> fd + + val get_next_line : t -> string result + + val get_next_bytes : t -> int -> string result +end + +module type READER = sig + type 'a result + + type fd + + val return : 'a -> 'a result + + val fail : exn -> 'a result + + val ( >>= ) : 'a result -> ('a -> 'b result) -> 'b result + + val read : fd -> buffer:bytes -> offset:int -> size:int -> int result + + val is_readable : fd -> bool + + val open_devnull : unit -> fd result +end diff --git a/hack/utils/buffered_line_reader/dune b/hack/utils/buffered_line_reader/dune new file mode 100644 index 00000000000..2211afb4dda --- /dev/null +++ b/hack/utils/buffered_line_reader/dune @@ -0,0 +1,18 @@ +(library + (name buffered_line_reader) + (wrapped false) + (modules + buffered_line_reader + buffered_line_reader_sig) + (libraries + utils_core)) + +(library + (name buffered_line_reader_lwt) + (wrapped false) + (modules + buffered_line_reader_lwt) + (libraries + buffered_line_reader + lwt + lwt.unix)) diff --git a/hack/utils/buffered_line_reader_lwt.ml b/hack/utils/buffered_line_reader_lwt.ml deleted file mode 100644 index d6586430929..00000000000 --- a/hack/utils/buffered_line_reader_lwt.ml +++ /dev/null @@ -1,27 +0,0 @@ -(** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -module Regular_reader : Buffered_line_reader_sig.READER - with type 'a result = 'a Lwt.t and type fd = Lwt_unix.file_descr = -struct - type 'a result = 'a Lwt.t - type fd = Lwt_unix.file_descr - - let return = Lwt.return - let fail = Lwt.fail - let (>>=) = Lwt.(>>=) - - let read fd ~buffer ~offset ~size = Lwt_unix.read fd buffer offset size - - let is_readable = Lwt_unix.readable - - let open_devnull () = Lwt_unix.openfile "/dev/null" [Unix.O_RDONLY] 0o440 -end - -include Buffered_line_reader.Functor(Regular_reader) diff --git a/hack/utils/buffered_line_reader_lwt.mli b/hack/utils/buffered_line_reader_lwt.mli deleted file mode 100644 index f2311d3ece3..00000000000 --- a/hack/utils/buffered_line_reader_lwt.mli +++ /dev/null @@ -1,10 +0,0 @@ -(** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -include Buffered_line_reader_sig.S with type 'a result = 'a Lwt.t and type fd = Lwt_unix.file_descr diff --git a/hack/utils/buffered_line_reader_sig.ml b/hack/utils/buffered_line_reader_sig.ml deleted file mode 100644 index deb98b1dc54..00000000000 --- a/hack/utils/buffered_line_reader_sig.ml +++ /dev/null @@ -1,62 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -(** - * This module is needed because Unix.select doesn't play well with - * input_line on Ocaml channels.. i.e., when a buffered read into an - * Ocaml channel consumes two complete lines from the file descriptor, the next - * select will say there is nothing to read when in fact there is - * something in the channel. This wouldn't be a problem if Ocaml channel's API - * supported a "has buffered content" call, so you could check if the - * buffer contains something as well as doing a Unix select to know for real if - * there is content coming. - * - * The "has_buffered_content" method below does exactly that. - * - * is_readable is a friendly wrapper around "has_buffered_content" and - * non-blocking Unix.select. - *) - -module type S = sig - type 'a result - type fd - type t - - val create: fd -> t - - val get_null_reader: unit -> t result - - val has_buffered_content: t -> bool - - (** - * Returns true if and only if there is content to be read (does not know if - * the incoming content is newline-terminated. So we can't actually know - * if get_next_line will be non-blocking. - *) - val is_readable : t -> bool - - val get_fd: t -> fd - - val get_next_line: t -> string result - - val get_next_bytes: t -> int -> string result -end - -module type READER = sig - type 'a result - type fd - - val return: 'a -> 'a result - val fail: exn -> 'a result - val (>>=): 'a result -> ('a -> 'b result) -> 'b result - - val read: fd -> buffer:bytes -> offset:int -> size:int -> int result - val is_readable: fd -> bool - val open_devnull: unit -> fd result -end diff --git a/hack/utils/build_id.ml b/hack/utils/build_id.ml deleted file mode 100644 index 46ebe389437..00000000000 --- a/hack/utils/build_id.ml +++ /dev/null @@ -1,33 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -external get_build_revision : unit -> string = "hh_get_build_revision" -external get_build_commit_time : unit -> int = "hh_get_build_commit_time" -external get_build_commit_time_string : unit -> string = - "hh_get_build_commit_time_string" -external get_build_major : unit -> int = "hh_get_build_major" -external get_build_minor : unit -> int = "hh_get_build_minor" - -let build_revision = get_build_revision () - -let build_id_ohai = build_revision ^ " " ^ get_build_commit_time_string () - -let build_commit_time = get_build_commit_time () - -let build_major_version = get_build_major () -let build_minor_version = get_build_minor () - -(* Monotonically increasing identifier that can be used when we introduce - * backward incompatible changes in hh_client commands, and to signal - * new capabilities to clients. - * v1 (hvvm 3.15, 11 May 2016) - persistent connection introduced - * v4 (hvvm 3.18, 7 Nov 2016) - persistent connection stable - * v5 (hvvm 3.23, 17 Nov 2017) - 'hh_client lsp' stable - *) -let build_api_version = 5 diff --git a/hack/utils/build_mode/dev/build_mode.ml b/hack/utils/build_mode/dev/build_mode.ml index b4a2dd94cb2..e1502939415 100644 --- a/hack/utils/build_mode/dev/build_mode.ml +++ b/hack/utils/build_mode/dev/build_mode.ml @@ -7,4 +7,4 @@ * *) - let dev = true +let dev = true diff --git a/hack/utils/build_mode/dev/dune b/hack/utils/build_mode/dev/dune new file mode 100644 index 00000000000..0e08745f91c --- /dev/null +++ b/hack/utils/build_mode/dev/dune @@ -0,0 +1,5 @@ +(library + (name build_mode_dev) + (wrapped false) + (modules build_mode) +) diff --git a/hack/utils/build_mode/dune b/hack/utils/build_mode/dune new file mode 100644 index 00000000000..daff7fe773e --- /dev/null +++ b/hack/utils/build_mode/dune @@ -0,0 +1,6 @@ +(library + (name build_mode) + (wrapped false) + (modules) + (libraries build_mode_prod) +) diff --git a/hack/utils/build_mode/prod/build_mode.ml b/hack/utils/build_mode/prod/build_mode.ml index e4b78d2d82c..e02a78cd794 100644 --- a/hack/utils/build_mode/prod/build_mode.ml +++ b/hack/utils/build_mode/prod/build_mode.ml @@ -7,4 +7,4 @@ * *) - let dev = false +let dev = false diff --git a/hack/utils/build_mode/prod/dune b/hack/utils/build_mode/prod/dune new file mode 100644 index 00000000000..acd71f16c03 --- /dev/null +++ b/hack/utils/build_mode/prod/dune @@ -0,0 +1,5 @@ +(library + (name build_mode_prod) + (wrapped false) + (modules build_mode) +) diff --git a/hack/utils/cgroup/cGroup.ml b/hack/utils/cgroup/cGroup.ml new file mode 100644 index 00000000000..dfc918e2e37 --- /dev/null +++ b/hack/utils/cgroup/cGroup.ml @@ -0,0 +1,167 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +open Core_kernel +open Result.Monad_infix + +let spf = Printf.sprintf + +(* Little helper module to help memoize things. Probably could be pulled out into its own module + * at some point *) +module Memoize : sig + val forever : f:(unit -> 'a Lwt.t) -> unit -> 'a Lwt.t + + val until : seconds:float -> f:(unit -> 'a Lwt.t) -> unit -> 'a Lwt.t +end = struct + let forever ~f = + let memoized_result = ref None in + fun () -> + match !memoized_result with + | None -> + let%lwt result = f () in + memoized_result := Some result; + Lwt.return result + | Some result -> Lwt.return result + + let until ~seconds ~f = + let memoized_result = ref None in + let fetch () = + let%lwt result = f () in + memoized_result := Some (Unix.gettimeofday () +. seconds, result); + Lwt.return result + in + fun () -> + match !memoized_result with + | Some (good_until, result) when Unix.gettimeofday () < good_until -> + Lwt.return result + | _ -> fetch () +end + +(* Like Sys_utils.cat but with lwt. Probably also could be moved to its own module *) +let cat file = + (* Some low level system operations cannot be canceled and will just ignore cancellation. If we + * wrap this code in `Lwt.protected` we get a thread that is cancellable while the underlying + * operations proceed as normal. *) + Lwt.protected + (try%lwt + let%lwt ic = + Lwt_io.open_file + ~flags:[Unix.O_RDONLY; Unix.O_NONBLOCK] + ~mode:Lwt_io.Input + ~perm:0o666 + file + in + let%lwt contents = Lwt_io.read ic in + let%lwt () = Lwt_io.close ic in + Lwt.return_ok contents + with + | Unix.Unix_error (Unix.ENOENT, _, _) -> + Lwt.return_error "File doesn't exist" + | Unix.Unix_error (Unix.EBADF, _, _) -> + Lwt.return_error "Lost fd for the file") + +(* I've never seen cgroup mounted elsewhere, so it's probably fine to hardcode this for now *) +let cgroup_dir = "/sys/fs/cgroup" + +let assert_is_using_cgroup_v2 = + Memoize.forever ~f:(fun () -> + if%lwt Lwt_unix.file_exists cgroup_dir then + (* /sys/fs/cgroup/memory exists for cgroup v1 but not v2. It's an easy way to tell the + * difference between versions *) + if%lwt Lwt_unix.file_exists (spf "%s/memory" cgroup_dir) then + Lwt.return_error + (spf "cgroup v1 is mounted at %s. We need v2" cgroup_dir) + else + Lwt.return_ok () + else + Lwt.return_error (spf "%s doesn't exist" cgroup_dir)) + +(* I don't really expect us to switch cgroups often, but let's only cache for 5 seconds *) +let get_cgroup_name = + Memoize.until ~seconds:5.0 ~f:(fun () -> + ProcFS.first_cgroup_for_pid (Unix.getpid ()) |> Lwt.return) + +type stats = { + total: int; + (* The total physical memory for the cgroup *) + total_swap: int; + (* The total amount of anonymous memory paged out to swap *) + + (* anon, file, and shmem are disjoint. If you add in the memory that the kernel uses, they should + * sum roughly to `total` *) + anon: int; + (* The amount of physical anonymous memory not used for shared memory *) + shmem: int; + (* The amount of physical anonymous memory being used as shared memory *) + file: int; (* The amount of physical memory which is not anonymous *) +} + +(* Some cgroup files contain only a single integer *) +let read_single_number_file path = + let%lwt contents_result = cat path in + Lwt.return + ( contents_result + >>= fun contents -> + try Ok (contents |> String.strip |> int_of_string) + with Failure _ -> Error "Failed to parse memory.current" ) + +let parse_stat stat_contents = + let stats = + String.split stat_contents ~on:'\n' + |> List.fold_left ~init:SMap.empty ~f:(fun stats line -> + match String.split line ~on:' ' with + | [key; raw_stat] -> + int_of_string_opt raw_stat + |> Option.value_map ~default:stats ~f:(fun stat -> + SMap.add key stat stats) + | _ -> stats) + in + let get key = + match SMap.get key stats with + | Some stat -> Ok stat + | None -> Error (spf "Failed to find %S in memory.stat" key) + in + get "anon" + >>= fun anon -> + get "file" + >>= fun file -> + get "shmem" + >>| fun shmem -> + (* In `memory.stat` the `file` stat includes `shmem` *) + (anon, file - shmem, shmem) + +let get_stats_for_cgroup (cgroup_name : string) : (stats, string) result Lwt.t + = + (* cgroup_name starts with a /, like /my_cgroup *) + let dir = spf "%s%s" cgroup_dir cgroup_name in + let%lwt total_result = + read_single_number_file (Filename.concat dir "memory.current") + and total_swap_result = + read_single_number_file (Filename.concat dir "memory.swap.current") + and stat_contents_result = cat (Filename.concat dir "memory.stat") in + Lwt.return + ( total_result + >>= fun total -> + total_swap_result + >>= fun total_swap -> + stat_contents_result + >>= fun stat_contents -> + parse_stat stat_contents + >>= (fun (anon, file, shmem) -> Ok { total; total_swap; anon; file; shmem }) + ) + +(* Like Result's >>= but for when you're dealing with result threads *) +let ( >>% ) + (type a b c) (thread : (a, b) result Lwt.t) (f : a -> (c, b) result Lwt.t) + : (c, b) result Lwt.t = + match%lwt thread with + | Ok value -> f value + | Error e -> Lwt.return_error e + +let get_stats () = + assert_is_using_cgroup_v2 () >>% get_cgroup_name >>% get_stats_for_cgroup diff --git a/hack/utils/cgroup/cGroup.mli b/hack/utils/cgroup/cGroup.mli new file mode 100644 index 00000000000..d13f8909a6a --- /dev/null +++ b/hack/utils/cgroup/cGroup.mli @@ -0,0 +1,16 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type stats = { + total: int; + total_swap: int; + anon: int; + shmem: int; + file: int; +} + +val get_stats : unit -> (stats, string) result Lwt.t diff --git a/hack/utils/cgroup/dune b/hack/utils/cgroup/dune new file mode 100644 index 00000000000..c33fffc0f09 --- /dev/null +++ b/hack/utils/cgroup/dune @@ -0,0 +1,10 @@ +(library + (name cgroup) + (wrapped false) + (libraries + core_kernel + lwt + lwt.unix + procfs + ) + (preprocess (pps lwt_ppx))) diff --git a/hack/utils/cli_args.ml b/hack/utils/cli_args.ml new file mode 100644 index 00000000000..44771baa53a --- /dev/null +++ b/hack/utils/cli_args.ml @@ -0,0 +1,262 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +open Core_kernel +include Cli_args_sig.Types + +let files_to_check_range_to_json (range : files_to_check_range) : Hh_json.json + = + let range_properties = + match range.to_prefix_excl with + | Some to_prefix_excl -> + [ + ( "to_prefix_excl", + Hh_json.JSON_String (Relative_path.suffix to_prefix_excl) ); + ] + | None -> [] + in + let range_properties = + match range.from_prefix_incl with + | Some from_prefix_incl -> + let from_prefix_incl = + ( "from_prefix_incl", + Hh_json.JSON_String (Relative_path.suffix from_prefix_incl) ) + in + from_prefix_incl :: range_properties + | None -> range_properties + in + Hh_json.JSON_Object range_properties + +let files_to_check_spec_to_json (files_to_check_spec : files_to_check_spec) : + Hh_json.json = + match files_to_check_spec with + | Range (range : files_to_check_range) -> files_to_check_range_to_json range + | Prefix (prefix : Relative_path.t) -> + Hh_json.JSON_String (Relative_path.suffix prefix) + +let get_save_state_spec_json (spec : save_state_spec_info) : string = + let files_to_check_spec_list = + List.map ~f:files_to_check_spec_to_json spec.files_to_check + in + let (properties : (string * Hh_json.json) list) = + [ + ("gen_with_errors", Hh_json.JSON_Bool spec.gen_with_errors); + ("files_to_check", Hh_json.JSON_Array files_to_check_spec_list); + ("filename", Hh_json.JSON_String spec.filename); + ] + in + Hh_json.json_to_string ~pretty:true (Hh_json.JSON_Object properties) + +let save_state_spec_json_example = + { + files_to_check = + [ + Prefix (Relative_path.from_root "/some/path/prefix1"); + Range + { + from_prefix_incl = + Some (Relative_path.from_root "/from/path/prefix1"); + to_prefix_excl = Some (Relative_path.from_root "/to/path/prefix1"); + }; + Range + { + from_prefix_incl = + Some (Relative_path.from_root "/from/path/prefix2"); + to_prefix_excl = Some (Relative_path.from_root "/to/path/prefix2"); + }; + Range + { + from_prefix_incl = Some (Relative_path.from_root "/from/path/only"); + to_prefix_excl = None; + }; + Range + { + from_prefix_incl = None; + to_prefix_excl = Some (Relative_path.from_root "/to/path/only"); + }; + ]; + filename = "/some/dir/some_filename"; + gen_with_errors = true; + } + +let save_state_spec_json_descr = + Printf.sprintf + {|A JSON specification of how and what to save, e.g.: +%s +|} + (get_save_state_spec_json save_state_spec_json_example) + +(* TODO: gen examples *) +let saved_state_json_descr = + {|A JSON specification for how to initialize with a saved state. + +Saved state JSON looks like this: +{ + "state": , + "corresponding_base_revision" : , + "deptable": , + "changes": [array of files changed since that saved state] +} + +For example: +{ + "deptable": "/home/unixname/saved-states/ss1.sql", + "state": "/home/unixname/saved-states/ss1", + "changes": [], + "prechecked_changes": [], + "corresponding_base_revision": "-1" +} + +You can put this saved state JSON into a file and pass this JSON as the argument: +{ + "from_file": "/home/unixname/saved-states/ss1.json" +} + +Alternatively, you can pass this JSON as the argument, with the saved state JSON embedded: +{ + "data_dump": + { + "deptable": "/home/unixname/saved-states/ss1.sql", + "state": "/home/unixname/saved-states/ss1", + "changes": [], + "prechecked_changes": [], + "corresponding_base_revision": "-1" + } +} +|} + +let get_path (key : string) json_obj : Relative_path.t option = + let value = Hh_json.Access.get_string key json_obj in + match value with + | Ok ((value : string), _keytrace) -> Some (Relative_path.from_root value) + | Error _ -> None + +let get_spec (spec_json : Hh_json.json) : files_to_check_spec = + try Prefix (Hh_json.get_string_exn spec_json |> Relative_path.from_root) + with _ -> + let from_prefix_incl = get_path "from_prefix_incl" (spec_json, []) in + let to_prefix_excl = get_path "to_prefix_excl" (spec_json, []) in + Range { from_prefix_incl; to_prefix_excl } + +let parse_save_state_json ((json : Hh_json.json), _keytrace) = + Hh_json.Access.( + let files_to_check = + Option.value + ~default:[] + (Hh_json.(get_field_opt (get_array "files_to_check")) json) + in + let files_to_check = List.map files_to_check ~f:get_spec in + let json = return json in + json + >>= get_string "filename" + >>= fun (filename, _filename_keytrace) -> + json + >>= get_bool "gen_with_errors" + >>= fun (gen_with_errors, _gen_with_errors_keytrace) -> + return { files_to_check; filename; gen_with_errors }) + +let get_save_state_spec (v : string option) : + (save_state_spec_info option, string) result = + match v with + | None -> Ok None + | Some blob -> + Hh_json.Access.( + let json = Hh_json.json_of_string blob in + let json = return json in + let parsed_spec_result = json >>= parse_save_state_json in + (match parsed_spec_result with + | Ok ((parsed_spec : save_state_spec_info), _keytrace) -> + Hh_logger.log "Parsed save state spec, everything's good"; + Ok (Some parsed_spec) + | Error spec_failure -> + let message = + Printf.sprintf + "Parsing failed:\n%s\nSee input: %s\n" + (access_failure_to_string spec_failure) + blob + in + Error message)) + +let parse_saved_state_json (json, _keytrace) = + let array_to_path_list = + List.map ~f:(fun file -> + Hh_json.get_string_exn file |> Relative_path.from_root) + in + let prechecked_changes = + Hh_json.(get_field_opt (Access.get_array "prechecked_changes")) json + in + let prechecked_changes = Option.value ~default:[] prechecked_changes in + let json = Hh_json.Access.return json in + Hh_json.Access.( + json + >>= get_string "state" + >>= fun (state, _state_keytrace) -> + json + >>= get_string "corresponding_base_revision" + >>= fun (for_base_rev, _for_base_rev_keytrace) -> + json + >>= get_string "deptable" + >>= fun (deptable, _deptable_keytrace) -> + json + >>= get_array "changes" + >>= fun (changes, _) -> + let naming_changes = + match json >>= get_val "naming_changes" with + | Ok (Hh_json.JSON_Array files, _) -> array_to_path_list files + | _ -> [] + in + let prechecked_changes = array_to_path_list prechecked_changes in + let changes = array_to_path_list changes in + return + { + saved_state_fn = state; + corresponding_base_revision = for_base_rev; + deptable_fn = deptable; + prechecked_changes; + changes; + naming_changes; + }) + +let get_saved_state_spec (v : string option) : + (saved_state_target_info option, string) result = + match v with + | None -> Ok None + | Some blob -> + let json = Hh_json.json_of_string blob in + let json = Hh_json.Access.return json in + Hh_json.Access.( + let data_dump_parse_result = + json >>= get_obj "data_dump" >>= parse_saved_state_json + in + let from_file_parse_result = + json + >>= get_string "from_file" + >>= fun (filename, _filename_keytrace) -> + let contents = Sys_utils.cat filename in + let json = Hh_json.json_of_string contents in + Hh_json.Access.return json >>= parse_saved_state_json + in + (match (data_dump_parse_result, from_file_parse_result) with + | (Ok (parsed_data_dump, _), Ok (_parsed_from_file, _)) -> + Hh_logger.log + "Warning - %s" + ( "Parsed saved state target from both JSON blob data dump" + ^ " and from contents of file." ); + Hh_logger.log "Preferring data dump result"; + Ok (Some parsed_data_dump) + | (Ok (parsed_data_dump, _), Error _) -> Ok (Some parsed_data_dump) + | (Error _, Ok (parsed_from_file, _)) -> Ok (Some parsed_from_file) + | (Error data_dump_failure, Error from_file_failure) -> + let message = + Printf.sprintf + "Parsing failed:\n data dump failure: %s\n from_file failure: %s\nSee input: %s\n" + (access_failure_to_string data_dump_failure) + (access_failure_to_string from_file_failure) + blob + in + Error message)) diff --git a/hack/utils/cli_args.mli b/hack/utils/cli_args.mli new file mode 100644 index 00000000000..399e80f71fe --- /dev/null +++ b/hack/utils/cli_args.mli @@ -0,0 +1,9 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +include Cli_args_sig.S diff --git a/hack/utils/cli_args_sig.ml b/hack/utils/cli_args_sig.ml new file mode 100644 index 00000000000..5071eb7ae3c --- /dev/null +++ b/hack/utils/cli_args_sig.ml @@ -0,0 +1,55 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +module Types = struct + type saved_state_target_info = { + changes: Relative_path.t list; + naming_changes: Relative_path.t list; + corresponding_base_revision: string; + deptable_fn: string; + prechecked_changes: Relative_path.t list; + saved_state_fn: string; + } + + (* The idea of a file range necessarily means that the hypothetical list + of them is sorted in some way. It is valid to have None as either endpoint + because that simply makes it open-ended. For example, a range of files + { None - "/some/path" } includes all files with path less than /some/path *) + type files_to_check_range = { + from_prefix_incl: Relative_path.t option; + to_prefix_excl: Relative_path.t option; + } + + type files_to_check_spec = + | Range of files_to_check_range + | Prefix of Relative_path.t + + type save_state_spec_info = { + files_to_check: files_to_check_spec list; + (* The base name of the file into which we should save the naming table. *) + filename: string; + (* Indicates whether we should generate a state in the presence of errors. *) + gen_with_errors: bool; + } +end + +module type S = sig + include module type of Types + + val save_state_spec_json_descr : string + + val get_save_state_spec : + string option -> (save_state_spec_info option, string) result + + val get_save_state_spec_json : save_state_spec_info -> string + + val saved_state_json_descr : string + + val get_saved_state_spec : + string option -> (saved_state_target_info option, string) result +end diff --git a/hack/utils/collections/cSet.ml b/hack/utils/collections/cSet.ml index 8fb52e0bce7..3928dc79e1f 100644 --- a/hack/utils/collections/cSet.ml +++ b/hack/utils/collections/cSet.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * diff --git a/hack/utils/collections/dune b/hack/utils/collections/dune new file mode 100644 index 00000000000..48422c91f5d --- /dev/null +++ b/hack/utils/collections/dune @@ -0,0 +1,5 @@ +(library + (name collections) + (wrapped false) + (libraries + core_kernel)) diff --git a/hack/utils/collections/hashSet.ml b/hack/utils/collections/hashSet.ml index 54467f39a94..e0898741ec8 100644 --- a/hack/utils/collections/hashSet.ml +++ b/hack/utils/collections/hashSet.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,11 +10,19 @@ type 'a t = ('a, unit) Hashtbl.t let create size = Hashtbl.create size + let clear set = Hashtbl.clear set + let copy set = Hashtbl.copy set + let add set x = Hashtbl.replace set x () + let mem set x = Hashtbl.mem set x + let remove set x = Hashtbl.remove set x + let iter f set = Hashtbl.iter (fun k _ -> f k) set + let fold f set acc = Hashtbl.fold (fun k _ acc -> f k acc) set acc + let length set = Hashtbl.length set diff --git a/hack/utils/collections/hashSet.mli b/hack/utils/collections/hashSet.mli index be0934d1c63..78aefdbd7d8 100644 --- a/hack/utils/collections/hashSet.mli +++ b/hack/utils/collections/hashSet.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -11,12 +11,20 @@ * ignore the actual values inside the HashTable. *) type 'a t -val create: int -> 'a t -val clear: 'a t -> unit -val copy: 'a t -> 'a t -val add: 'a t -> 'a -> unit -val mem: 'a t -> 'a -> bool -val remove: 'a t -> 'a -> unit -val iter: ('a -> unit) -> 'a t -> unit -val fold: ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b -val length: 'a t -> int +val create : int -> 'a t + +val clear : 'a t -> unit + +val copy : 'a t -> 'a t + +val add : 'a t -> 'a -> unit + +val mem : 'a t -> 'a -> bool + +val remove : 'a t -> 'a -> unit + +val iter : ('a -> unit) -> 'a t -> unit + +val fold : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b + +val length : 'a t -> int diff --git a/hack/utils/collections/iMap.ml b/hack/utils/collections/iMap.ml index 04775f4ae04..563ea37879b 100644 --- a/hack/utils/collections/iMap.ml +++ b/hack/utils/collections/iMap.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,4 +10,6 @@ include MyMap.Make (IntKey) let pp : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit = - fun pp_data -> make_pp Format.pp_print_int pp_data + (fun pp_data -> make_pp Format.pp_print_int pp_data) + +let show pp_data x = Format.asprintf "%a" (pp pp_data) x diff --git a/hack/utils/collections/iSet.ml b/hack/utils/collections/iSet.ml index 9f842b2018f..9426883ba72 100644 --- a/hack/utils/collections/iSet.ml +++ b/hack/utils/collections/iSet.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,9 +8,26 @@ *) include Set.Make (IntKey) -let to_string iset = - "{" ^ (String.concat "," (List.map string_of_int (elements iset))) ^ "}" -(* temporary implementations to placate deriving show *) -let show = to_string -let pp : Format.formatter -> t -> unit = fun _ x -> Printf.printf "%s\n" (show x) +let pp fmt iset = + Format.fprintf fmt "@[<2>{"; + let elements = elements iset in + (match elements with + | [] -> () + | _ -> Format.fprintf fmt " "); + ignore + (List.fold_left + (fun sep s -> + if sep then Format.fprintf fmt ";@ "; + Format.pp_print_int fmt s; + true) + false + elements); + (match elements with + | [] -> () + | _ -> Format.fprintf fmt " "); + Format.fprintf fmt "@,}@]" + +let show iset = Format.asprintf "%a" pp iset + +let to_string = show diff --git a/hack/utils/collections/immQueue.ml b/hack/utils/collections/immQueue.ml index f0d38160c91..449cc57eb63 100644 --- a/hack/utils/collections/immQueue.ml +++ b/hack/utils/collections/immQueue.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * @@ -15,51 +15,49 @@ type 'a t = { exception Empty -let empty = { - incoming = []; - outgoing = []; - length = 0; -} +let empty = { incoming = []; outgoing = []; length = 0 } let length t = t.length let is_empty t = length t = 0 -let push t x = - { t with - incoming = x :: t.incoming; - length = t.length + 1; - } +let push t x = { t with incoming = x :: t.incoming; length = t.length + 1 } + +let prepare_for_read t = + match t.outgoing with + | [] -> { t with incoming = []; outgoing = List.rev t.incoming } + | _ -> t let pop t = - let t = match t.outgoing with - | [] -> { t with incoming = []; outgoing = List.rev t.incoming } - | _ -> t - in + let t = prepare_for_read t in match t.outgoing with - | [] -> (None, t) - | hd::tl -> (Some hd, { t with outgoing = tl; length = t.length - 1 }) + | [] -> (None, t) + | hd :: tl -> (Some hd, { t with outgoing = tl; length = t.length - 1 }) + +let peek t = + let t = prepare_for_read t in + match t.outgoing with + | [] -> (None, t) + | hd :: _ -> (Some hd, t) let pop_unsafe t = match pop t with - | (Some x, t) -> (x, t) - | (None, _) -> raise Empty + | (Some x, t) -> (x, t) + | (None, _) -> raise Empty + +let exists t ~f = List.exists f t.outgoing || List.exists f t.incoming let iter t ~f = List.iter f t.outgoing; List.iter f (List.rev t.incoming) -let from_list x = - { incoming = []; - outgoing = x; - length = List.length x; - } +let from_list x = { incoming = []; outgoing = x; length = List.length x } -let to_list x = - x.outgoing @ (List.rev x.incoming) +let to_list x = x.outgoing @ List.rev x.incoming let concat t = - { incoming = []; + { + incoming = []; outgoing = List.map to_list t |> List.concat; - length = List.map (fun u -> u.length) t |> List.fold_left (+) 0; + length = List.map (fun u -> u.length) t |> List.fold_left ( + ) 0; } diff --git a/hack/utils/collections/immQueue.mli b/hack/utils/collections/immQueue.mli index b452fbe8340..3146b0d5f76 100644 --- a/hack/utils/collections/immQueue.mli +++ b/hack/utils/collections/immQueue.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * @@ -16,25 +16,29 @@ type 'a t exception Empty -val empty: 'a t +val empty : 'a t + +val push : 'a t -> 'a -> 'a t -val push: 'a t -> 'a -> 'a t +val pop : 'a t -> 'a option * 'a t -val pop: 'a t -> ('a option * 'a t) +val peek : 'a t -> 'a option * 'a t (* Raises Empty if the queue is empty *) -val pop_unsafe: 'a t -> ('a * 'a t) +val pop_unsafe : 'a t -> 'a * 'a t + +val is_empty : 'a t -> bool -val is_empty: 'a t -> bool +val length : 'a t -> int -val length: 'a t -> int +val exists : 'a t -> f:('a -> bool) -> bool -val iter: 'a t -> f:('a -> unit) -> unit +val iter : 'a t -> f:('a -> unit) -> unit (* from_list: the head of the list is the first one to be popped *) -val from_list: 'a list -> 'a t +val from_list : 'a list -> 'a t (* to_list: the head of the list is the first one to be popped *) -val to_list: 'a t -> 'a list +val to_list : 'a t -> 'a list -val concat: 'a t list -> 'a t +val concat : 'a t list -> 'a t diff --git a/hack/utils/collections/intKey.ml b/hack/utils/collections/intKey.ml index 470a20cd4bc..9865c8ec975 100644 --- a/hack/utils/collections/intKey.ml +++ b/hack/utils/collections/intKey.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,4 +8,5 @@ *) type t = int -let compare = (-) + +let compare = ( - ) diff --git a/hack/utils/collections/lSMap.ml b/hack/utils/collections/lSMap.ml index ca4d84b6107..0e6afbb7285 100644 --- a/hack/utils/collections/lSMap.ml +++ b/hack/utils/collections/lSMap.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,4 +10,4 @@ include MyMap.Make (LowerStringKey) let pp : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit = - fun pp_data -> make_pp Format.pp_print_string pp_data + (fun pp_data -> make_pp Format.pp_print_string pp_data) diff --git a/hack/utils/collections/lazy_string_table.ml b/hack/utils/collections/lazy_string_table.ml new file mode 100644 index 00000000000..9317d983e0f --- /dev/null +++ b/hack/utils/collections/lazy_string_table.ml @@ -0,0 +1,99 @@ +(* + * Copyright (c) 2018, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +open Core_kernel + +type 'a t = { + tbl: ('a * bool) String.Table.t; + mutable seq: (string * 'a) Sequence.t; + is_canonical: 'a -> bool; + merge: earlier:'a -> later:'a -> 'a; +} + +let make ~is_canonical ~merge seq = + let tbl = String.Table.create () in + { tbl; seq; is_canonical; merge } + +type 'a advance_result = + | Complete + (** The cache's [seq] has been exhausted, and its [tbl] now contains the + complete mapping of all elements. *) + | Skipped + (** The cache's [seq] emitted some non-canonical element, which may or may not + have replaced a previously emitted element stored in the [tbl]. *) + | Yield of string * 'a + (** The cache's [seq] emitted this canonical element (along with its ID). This + element may be immediately used without traversing the rest of the + sequence (since canonical elements cannot be replaced or updated as we + traverse the rest of the sequence). *) + +(** Fetch the next value from the cache's [seq]. Update its [tbl] by storing the + new value, ignoring the new value, or merging the new value with an existing + value as necessary. *) +let advance t = + match Sequence.next t.seq with + | None -> Complete + | Some ((id, v), rest) -> + t.seq <- rest; + let (extant_value, extant_value_is_canonical) = + match Hashtbl.find t.tbl id with + | None -> (None, false) + | Some (v, canonical) -> (Some v, canonical) + in + if extant_value_is_canonical then + Skipped + else + let replace_with v = + let canonical = t.is_canonical v in + Hashtbl.set t.tbl id (v, canonical); + if canonical then + Yield (id, v) + else + Skipped + in + (match extant_value with + | None -> replace_with v + | Some extant_value -> + let v = t.merge ~earlier:extant_value ~later:v in + if phys_equal v extant_value then + Skipped + else + replace_with v) + +let rec get t id = + match Hashtbl.find t.tbl id with + | Some (v, true) -> Some v + | (None | Some (_, false)) as result -> + (match advance t with + | Complete -> Option.map result fst + | Yield (id', v) when id' = id -> Some v + | Skipped + | Yield _ -> + get t id) + +let rec mem t id = + if Hashtbl.mem t.tbl id then + true + else + match advance t with + | Complete -> false + | Yield (id', _) when id' = id -> true + | Skipped + | Yield _ -> + mem t id + +let rec to_seq t = + match advance t with + | Skipped + | Yield _ -> + to_seq t + | Complete -> + Hashtbl.to_alist t.tbl + |> Sequence.of_list + |> Sequence.map ~f:(Tuple.T2.map_snd ~f:fst) diff --git a/hack/utils/collections/lazy_string_table.mli b/hack/utils/collections/lazy_string_table.mli new file mode 100644 index 00000000000..0f3c8e766bd --- /dev/null +++ b/hack/utils/collections/lazy_string_table.mli @@ -0,0 +1,80 @@ +(* + * Copyright (c) 2018, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +open Core_kernel + +type 'a t +(** [Lazy_string_table.t] provides a memoization cache for any + [(string * 'a) Sequence.t] where: + + + It is desirable to look up elements in the sequence by their [string] key + + Conflicts between multiple elements with the same key can be resolved by + a [merge] function + + The cache will advance the provided sequence only as needed, and the + user-provided [is_canonical] function helps to enable this (see {!make} for + more details). The cache is guaranteed to traverse the input sequence only + once (so it will behave correctly if the input sequence depends on mutable + state). + + Originally written for caches of class members, where we want to lazily + parse ancestor classes only as necessary, and our implementation of [merge] + provides the logic for member overriding. *) + +val make : + is_canonical:('a -> bool) -> + merge:(earlier:'a -> later:'a -> 'a) -> + (string * 'a) Sequence.t -> + 'a t +(** Create a new [Lazy_string_table.t] memoizing the given sequence. + + A good implementation of [merge] is necessary for correctness, since [merge] + is used when a key-value pair emitted later in the sequence has the same key + as a pair emitted earlier in the sequence. [merge] is provided with the + value emitted earlier in the sequence (or, potentially, the output of a + prior invocation of [merge]) and the value emitted later in the sequence. It + may return one or the other, or create a new value to be stored instead of + either. + + A good implementation of [is_canonical] is not necessary for correctness, + but it is necessary for performance. The contract of [is_canonical] is as + follows: [is_canonical v] must return [true] only when [merge v u] would + return [v] for all possible values of [u] that could be emitted later in the + sequence. Canonicity is what allows the cache to return results from {!get} + and {!mem} without traversing the entire sequence (i.e., the trivial + implementation [fun _ -> false] for [is_canonical] is always correct, but + will always force the cache to traverse the entire sequence on the first + lookup). *) + +val get : 'a t -> string -> 'a option +(** Return the value associated with the given key. If the value is canonical + and was already emitted by the input sequence, or if the input sequence has + been exhausted, this function is guaranteed to complete in constant time. + Otherwise, its worst-case runtime is proportional to the length of the input + sequence (provided that [is_canonical] and [merge] complete in constant + time, and ignoring any time required to compute elements of the sequence). + + Guaranteed not to advance the input sequence if the sequence has previously + emitted a canonical value for the given key. *) + +val mem : 'a t -> string -> bool +(** Return [true] if a value associated with the given key exists. If a value + associated with this key was already emitted by the input sequence, or if + the input sequence has been exhausted, this function is guaranteed to + complete in constant time. Otherwise, its worst-case runtime is proportional + to the length of the input sequence (provided that [is_canonical] and + [merge] complete in constant time, and ignoring any time required to compute + elements of the sequence). + + Guaranteed not to advance the input sequence if the sequence has previously + emitted any value for the given key. *) + +val to_seq : 'a t -> (string * 'a) Sequence.t +(** Eagerly exhaust the input sequence, then return a sequence iterating over + all values stored in the cache, in undefined order. *) diff --git a/hack/utils/collections/lowerStringKey.ml b/hack/utils/collections/lowerStringKey.ml index b3c05340f17..209de290fe4 100644 --- a/hack/utils/collections/lowerStringKey.ml +++ b/hack/utils/collections/lowerStringKey.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * @@ -9,7 +9,5 @@ type t = string -let compare (x: t) (y: t) = - (String.compare - (String.lowercase_ascii x) - (String.lowercase_ascii y)) +let compare (x : t) (y : t) = + String.compare (String.lowercase_ascii x) (String.lowercase_ascii y) diff --git a/hack/utils/collections/myMap.ml b/hack/utils/collections/myMap.ml index 97e883348a5..6861c41826a 100644 --- a/hack/utils/collections/myMap.ml +++ b/hack/utils/collections/myMap.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,11 +8,11 @@ *) module type S = MyMap_sig.S -module Make(Ord: Map.OrderedType) : S with type key = Ord.t = struct - include Map.Make(Ord) - let get x t = - try Some (find x t) with Not_found -> None +module Make (Ord : Map.OrderedType) : S with type key = Ord.t = struct + include Map.Make (Ord) + + let get x t = (try Some (find x t) with Not_found -> None) let has_key x m = try @@ -23,130 +23,174 @@ module Make(Ord: Map.OrderedType) : S with type key = Ord.t = struct let find_unsafe = find let union ?combine x y = - let combine = match combine with + let combine = + match combine with | None -> (fun _ fst _ -> Some fst) | Some f -> f in union combine x y - let rec fold_left_env env l ~init ~f = match l with - | [] -> env, init + let rec fold_left_env env l ~init ~f = + match l with + | [] -> (env, init) | x :: xs -> - let env, init = f env init x in + let (env, init) = f env init x in fold_left_env env xs ~init ~f let merge_env env s1 s2 ~combine = - let env, map = fold_left_env env ~init:empty ~f:( - fun env map (key, v2) -> - let v1opt = get key s1 in - let env, vopt = combine env key v1opt (Some v2) in - let map = match vopt with - | None -> map - | Some v -> add key v map in - env, map - ) (bindings s2) in - fold_left_env env ~init:map ~f:( - fun env map (key, v1) -> + let (env, map) = + fold_left_env + env + ~init:empty + ~f:(fun env map (key, v2) -> + let v1opt = get key s1 in + let (env, vopt) = combine env key v1opt (Some v2) in + let map = + match vopt with + | None -> map + | Some v -> add key v map + in + (env, map)) + (bindings s2) + in + fold_left_env + env + ~init:map + ~f:(fun env map (key, v1) -> let v2opt = get key s2 in match v2opt with | None -> - let env, vopt = combine env key (Some v1) None in - let map = match vopt with - | None -> map - | Some v -> add key v map in - env, map - | Some _ -> env, map - ) (bindings s1) + let (env, vopt) = combine env key (Some v1) None in + let map = + match vopt with + | None -> map + | Some v -> add key v map + in + (env, map) + | Some _ -> (env, map)) + (bindings s1) let union_env env s1 s2 ~combine = let f env key o1 o2 = - match o1, o2 with - | None, None -> env, None - | Some v, None - | None, Some v -> env, Some v - | Some v1, Some v2 -> combine env key v1 v2 in + match (o1, o2) with + | (None, None) -> (env, None) + | (Some v, None) + | (None, Some v) -> + (env, Some v) + | (Some v1, Some v2) -> combine env key v1 v2 + in merge_env env s1 s2 ~combine:f - let compare x y = compare Pervasives.compare x y + let compare ?(cmp = Pervasives.compare) x y = compare cmp x y + let equal x y = compare x y = 0 let keys m = fold (fun k _ acc -> k :: acc) m [] + let ordered_keys m = List.map fst (bindings m) let values m = fold (fun _ v acc -> v :: acc) m [] - let elements m = fold (fun k v acc -> (k,v)::acc) m [] + + let elements m = fold (fun k v acc -> (k, v) :: acc) m [] let fold_env env f m init = fold (fun key v (env, acc) -> f env key v acc) m (env, init) let map_env f env m = - fold_env env ( - fun env key v map -> - let env, v = f env key v in - env, add key v map - ) m empty + fold_env + env + (fun env key v map -> + let (env, v) = f env key v in + (env, add key v map)) + m + empty - let choose x = - try Some (choose x) with Not_found -> None + let choose x = (try Some (choose x) with Not_found -> None) - let max_binding x = - try Some (max_binding x) with Not_found -> None + let max_binding x = (try Some (max_binding x) with Not_found -> None) let from_keys keys ~f = - List.fold_left begin fun acc key -> - add key (f key) acc - end empty keys + List.fold_left begin + fun acc key -> add key (f key) acc + end empty keys let of_list elts = - List.fold_left begin fun acc (key, value) -> - add key value acc - end empty elts + List.fold_left + begin + fun acc (key, value) -> add key value acc + end + empty + elts + + let of_function domain f = + List.fold_left begin + fun acc key -> add key (f key) acc + end empty domain let add ?combine key new_value map = match combine with | None -> add key new_value map - | Some combine -> begin - match get key map with - | None -> add key new_value map - | Some old_value -> add key (combine old_value new_value) map - end + | Some combine -> + begin + match get key map with + | None -> add key new_value map + | Some old_value -> add key (combine old_value new_value) map + end let ident_map f map = - let map_, changed = fold (fun key item (map_, changed) -> - let item_ = f item in - add key item_ map_, changed || item_ != item - ) map (empty, false) in - if changed then map_ else map + let (map_, changed) = + fold + (fun key item (map_, changed) -> + let item_ = f item in + (add key item_ map_, changed || item_ != item)) + map + (empty, false) + in + if changed then + map_ + else + map let ident_map_key ?combine f map = - let map_, changed = fold (fun key item (map_, changed) -> - let new_key = f key in - add ?combine new_key item map_, changed || new_key != key - ) map (empty, false) in - if changed then map_ else map + let (map_, changed) = + fold + (fun key item (map_, changed) -> + let new_key = f key in + (add ?combine new_key item map_, changed || new_key != key)) + map + (empty, false) + in + if changed then + map_ + else + map let for_all2 ~f m1 m2 = let key_bool_map = - merge (fun k v1opt v2opt -> Some (f k v1opt v2opt)) m1 m2 in + merge (fun k v1opt v2opt -> Some (f k v1opt v2opt)) m1 m2 + in for_all (fun _k b -> b) key_bool_map let make_pp pp_key pp_data fmt x = Format.fprintf fmt "@[{"; let bindings = bindings x in - (match bindings with [] -> () | _ -> Format.fprintf fmt " "); + (match bindings with + | [] -> () + | _ -> Format.fprintf fmt " "); ignore (List.fold_left - (fun sep (key, data) -> - if sep then Format.fprintf fmt ";@ "; - Format.fprintf fmt "@["; - pp_key fmt key; - Format.fprintf fmt " ->@ "; - pp_data fmt data; - Format.fprintf fmt "@]"; - true) - false - bindings); - (match bindings with [] -> () | _ -> Format.fprintf fmt " "); + (fun sep (key, data) -> + if sep then Format.fprintf fmt ";@ "; + Format.fprintf fmt "@["; + pp_key fmt key; + Format.fprintf fmt " ->@ "; + pp_data fmt data; + Format.fprintf fmt "@]"; + true) + false + bindings); + (match bindings with + | [] -> () + | _ -> Format.fprintf fmt " "); Format.fprintf fmt "}@]" - end diff --git a/hack/utils/collections/myMap.mli b/hack/utils/collections/myMap.mli index 8bbdea0cdc8..865732a918e 100644 --- a/hack/utils/collections/myMap.mli +++ b/hack/utils/collections/myMap.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,5 +8,5 @@ *) module type S = MyMap_sig.S -module Make (Ord : Map.OrderedType) : S with type key = Ord.t +module Make (Ord : Map.OrderedType) : S with type key = Ord.t diff --git a/hack/utils/collections/myMap_sig.ml b/hack/utils/collections/myMap_sig.ml index b8480129abc..8201ca6f40f 100644 --- a/hack/utils/collections/myMap_sig.ml +++ b/hack/utils/collections/myMap_sig.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,35 +10,68 @@ module type S = sig include Map.S - val add: ?combine: ('a -> 'a -> 'a) -> key -> 'a -> 'a t -> 'a t - val get: key -> 'a t -> 'a option - val has_key: key -> 'a t -> bool - val find_unsafe: key -> 'a t -> 'a - val union: ?combine:(key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t - val union_env: 'a -> 'b t -> 'b t -> + val add : ?combine:('a -> 'a -> 'a) -> key -> 'a -> 'a t -> 'a t + + val get : key -> 'a t -> 'a option + + val has_key : key -> 'a t -> bool + + val find_unsafe : key -> 'a t -> 'a + + val union : ?combine:(key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t + + val union_env : + 'a -> + 'b t -> + 'b t -> combine:('a -> key -> 'b -> 'b -> 'a * 'b option) -> 'a * 'b t - val merge_env: 'a -> 'b t -> 'c t -> + + val merge_env : + 'a -> + 'b t -> + 'c t -> combine:('a -> key -> 'b option -> 'c option -> 'a * 'd option) -> 'a * 'd t - val compare: 'a t -> 'a t -> int - val equal: 'a t -> 'a t -> bool - val keys: 'a t -> key list - val ordered_keys: 'a t -> key list - val values: 'a t -> 'a list - val elements: 'a t -> (key * 'a) list - val fold_env: + + val compare : ?cmp:('a -> 'a -> int) -> 'a t -> 'a t -> int + + val equal : 'a t -> 'a t -> bool + + val keys : 'a t -> key list + + val ordered_keys : 'a t -> key list + + val values : 'a t -> 'a list + + val elements : 'a t -> (key * 'a) list + + val fold_env : 'a -> ('a -> key -> 'b -> 'c -> 'a * 'c) -> 'b t -> 'c -> 'a * 'c - val map_env: ('c -> key -> 'a -> 'c * 'b) -> 'c -> 'a t -> 'c * 'b t - val choose: 'a t -> (key * 'a) option - val max_binding: 'a t -> (key * 'a) option - val from_keys: key list -> f:(key -> 'a) -> 'a t - val of_list: (key * 'a) list -> 'a t - val ident_map: ('a -> 'a) -> 'a t -> 'a t - val ident_map_key: ?combine: ('a -> 'a -> 'a) -> (key -> key) -> 'a t -> 'a t - val for_all2: f:(key -> 'a option -> 'b option -> bool) -> 'a t -> 'b t -> bool - val make_pp: + + val map_env : ('c -> key -> 'a -> 'c * 'b) -> 'c -> 'a t -> 'c * 'b t + + val choose : 'a t -> (key * 'a) option + + val max_binding : 'a t -> (key * 'a) option + + val from_keys : key list -> f:(key -> 'a) -> 'a t + + val of_list : (key * 'a) list -> 'a t + + val of_function : key list -> (key -> 'a) -> 'a t + + val ident_map : ('a -> 'a) -> 'a t -> 'a t + + val ident_map_key : ?combine:('a -> 'a -> 'a) -> (key -> key) -> 'a t -> 'a t + + val for_all2 : + f:(key -> 'a option -> 'b option -> bool) -> 'a t -> 'b t -> bool + + val make_pp : (Format.formatter -> key -> unit) -> (Format.formatter -> 'a -> unit) -> - Format.formatter -> 'a t -> unit + Format.formatter -> + 'a t -> + unit end diff --git a/hack/utils/collections/priorityQueue.ml b/hack/utils/collections/priorityQueue.ml index fb6fa5bac14..86aeece0dd5 100644 --- a/hack/utils/collections/priorityQueue.ml +++ b/hack/utils/collections/priorityQueue.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * @@ -7,41 +7,39 @@ * *) -module Make(Ord: Set.OrderedType) = struct +module Make (Ord : Set.OrderedType) = struct type elt = Ord.t + type t = { mutable __queue: elt option array; mutable size: int; } - let rec make_empty n = { - __queue = Array.make n None; - size = 0; - } + let rec make_empty n = { __queue = Array.make n None; size = 0 } and is_empty t = t.size = 0 and pop t = if t.size = 0 then failwith "Popping from an empty priority queue"; - let v = Array.get t.__queue 0 in + let v = t.__queue.(0) in t.size <- t.size - 1; - if t.size <> 0 then begin - let last = Array.get t.__queue t.size in + if t.size <> 0 then ( + let last = t.__queue.(t.size) in t.__queue.(t.size) <- None; - __bubble_down t.__queue t.size last 0; - end; + __bubble_down t.__queue t.size last 0 + ); match v with - | None -> failwith "Attempting to return a null value" - | Some v -> v + | None -> failwith "Attempting to return a null value" + | Some v -> v and push t element = - if Array.length t.__queue = t.size then begin - let new_queue = Array.make ((Array.length t.__queue) * 2 + 1) None in + if Array.length t.__queue = t.size then ( + let new_queue = Array.make ((Array.length t.__queue * 2) + 1) None in Array.blit t.__queue 0 new_queue 0 (Array.length t.__queue); - t.__queue <- new_queue; - end; + t.__queue <- new_queue + ); t.__queue.(t.size) <- Some element; __bubble_up t.__queue t.size; @@ -56,50 +54,50 @@ module Make(Ord: Set.OrderedType) = struct and __bubble_up arr index = if index = 0 then (); let pindex = (index - 1) / 2 in - match Array.get arr index, Array.get arr pindex with - | None, _ | _, None -> - failwith "Unexpected null index found when calling __bubble_up" - | Some e, Some p -> - if Ord.compare e p < 0 then begin - __swap arr index pindex; - __bubble_up arr pindex - end; + match (arr.(index), arr.(pindex)) with + | (None, _) + | (_, None) -> + failwith "Unexpected null index found when calling __bubble_up" + | (Some e, Some p) -> + if Ord.compare e p < 0 then ( + __swap arr index pindex; + __bubble_up arr pindex + ) and __bubble_down arr size value index = - let right_child_index = index * 2 + 2 in + let right_child_index = (index * 2) + 2 in let left_child_index = right_child_index - 1 in - - if right_child_index < size then begin - match - Array.get arr right_child_index, - Array.get arr left_child_index, - value with - | None, _, _ | _, None, _ | _, _, None -> - failwith "Unexpected null index found when calling __bubble_down" - | Some r, Some l, Some v -> - let smaller_child, smaller_child_index = - if Ord.compare r l < 0 - then r, right_child_index - else l, left_child_index - in - - if Ord.compare v smaller_child <= 0 then - arr.(index) <- value - else begin - arr.(index) <- Array.get arr smaller_child_index; - __bubble_down arr size value smaller_child_index - end; - end else if left_child_index < size then begin - match Array.get arr left_child_index, value with - | None, _ | _, None -> - failwith "Unexpected null index found when calling __bubble_down" - | Some l, Some v -> - if Ord.compare v l <= 0 then - arr.(index) <- value - else begin - arr.(index) <- Array.get arr left_child_index; - arr.(left_child_index) <- value - end - end else arr.(index) <- value; - + if right_child_index < size then + match (arr.(right_child_index), arr.(left_child_index), value) with + | (None, _, _) + | (_, None, _) + | (_, _, None) -> + failwith "Unexpected null index found when calling __bubble_down" + | (Some r, Some l, Some v) -> + let (smaller_child, smaller_child_index) = + if Ord.compare r l < 0 then + (r, right_child_index) + else + (l, left_child_index) + in + if Ord.compare v smaller_child <= 0 then + arr.(index) <- value + else ( + arr.(index) <- arr.(smaller_child_index); + __bubble_down arr size value smaller_child_index + ) + else if left_child_index < size then + match (arr.(left_child_index), value) with + | (None, _) + | (_, None) -> + failwith "Unexpected null index found when calling __bubble_down" + | (Some l, Some v) -> + if Ord.compare v l <= 0 then + arr.(index) <- value + else ( + arr.(index) <- arr.(left_child_index); + arr.(left_child_index) <- value + ) + else + arr.(index) <- value end diff --git a/hack/utils/collections/reordered_argument_collections.ml b/hack/utils/collections/reordered_argument_collections.ml index 45f6257b96a..fe46416fe35 100644 --- a/hack/utils/collections/reordered_argument_collections.ml +++ b/hack/utils/collections/reordered_argument_collections.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -9,33 +9,87 @@ module Reordered_argument_map (S : MyMap.S) = struct include S + let add m ~key ~data = add key data m + let filter m ~f = filter f m + let fold m ~init ~f = fold f m init + let get m k = get k m + let find_unsafe m k = find_unsafe k m + let iter m ~f = iter f m + let map m ~f = map f m + let mapi m ~f = mapi f m + let mem m v = mem v m + let remove m v = remove v m + let exists m ~f = exists f m + let merge m1 m2 ~f = merge f m1 m2 + let filter m ~f = filter m f + let partition m ~f = partition f m end module Reordered_argument_set (S : Set.S) = struct include S + let add s v = add v s + let filter s ~f = filter f s + let fold s ~init ~f = fold f s init + let iter s ~f = iter f s + let mem s v = mem v s + let remove s v = remove v s + let exists s ~f = exists f s + let of_list l = List.fold_left add S.empty l + + let make_pp pp fmt x = + Format.fprintf fmt "@[{"; + let elts = elements x in + (match elts with + | [] -> () + | _ -> Format.fprintf fmt " "); + ignore + (List.fold_left + (fun sep elt -> + if sep then Format.fprintf fmt ";@ "; + let () = pp fmt elt in + true) + false + elts); + (match elts with + | [] -> () + | _ -> Format.fprintf fmt " "); + Format.fprintf fmt "}@]" end -module SSet = Reordered_argument_set(SSet) -module SMap = Reordered_argument_map(SMap) +module SSet = struct + include Reordered_argument_set (SSet) + + let pp = SSet.pp + + let show = SSet.show +end + +module SMap = struct + include Reordered_argument_map (SMap) + + let pp = SMap.pp + + let show = SMap.show +end diff --git a/hack/utils/collections/sMap.ml b/hack/utils/collections/sMap.ml index 5c9868f6dae..2cd960803d5 100644 --- a/hack/utils/collections/sMap.ml +++ b/hack/utils/collections/sMap.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,4 +10,6 @@ include MyMap.Make (StringKey) let pp : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit = - fun pp_data -> make_pp Format.pp_print_string pp_data + (fun pp_data -> make_pp (fun fmt s -> Format.fprintf fmt "%S" s) pp_data) + +let show pp_data x = Format.asprintf "%a" (pp pp_data) x diff --git a/hack/utils/collections/sSet.ml b/hack/utils/collections/sSet.ml index cb2fe302c03..1175b7b98c4 100644 --- a/hack/utils/collections/sSet.ml +++ b/hack/utils/collections/sSet.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,17 +8,26 @@ *) include Set.Make (StringKey) -let to_string sset = - "{" ^ (String.concat "," (elements sset)) ^ "}" let pp fmt sset = Format.fprintf fmt "@[<2>{"; + let elements = elements sset in + (match elements with + | [] -> () + | _ -> Format.fprintf fmt " "); ignore (List.fold_left - (fun sep s -> - if sep then Format.fprintf fmt ";@ "; - Format.fprintf fmt "%S" s; - true) - false - (elements sset)); + (fun sep s -> + if sep then Format.fprintf fmt ";@ "; + Format.fprintf fmt "%S" s; + true) + false + elements); + (match elements with + | [] -> () + | _ -> Format.fprintf fmt " "); Format.fprintf fmt "@,}@]" + +let show sset = Format.asprintf "%a" pp sset + +let to_string = show diff --git a/hack/utils/collections/stringKey.ml b/hack/utils/collections/stringKey.ml index 4fb275eac13..2ffcd9737e8 100644 --- a/hack/utils/collections/stringKey.ml +++ b/hack/utils/collections/stringKey.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,5 +8,7 @@ *) type t = string -let compare (x: t) (y: t) = String.compare x y + +let compare (x : t) (y : t) = String.compare x y + let to_string x = x diff --git a/hack/utils/compiler_id.ml b/hack/utils/compiler_id.ml deleted file mode 100644 index 72c9bb6865e..00000000000 --- a/hack/utils/compiler_id.ml +++ /dev/null @@ -1,10 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -external get_compiler_id : unit -> string = "hh_get_compiler_id" diff --git a/hack/utils/compiler_id_impl.c b/hack/utils/compiler_id_impl.c deleted file mode 100644 index 0a10f7a8cf1..00000000000 --- a/hack/utils/compiler_id_impl.c +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Copyright (c) 2014, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - */ -#define CAML_NAME_SPACE -#include -#include -#include - -#include "hphp/util/embedded-data.h" - -#define BUF_SIZE 64 - -static const char section_name[] = "build_id"; -static const char default_id[] = "hackc-unknown-version"; - -#define STRINGIFY_HELPER(x) #x -#define STRINGIFY_VALUE(x) STRINGIFY_HELPER(x) - -value hh_get_compiler_id(void) { - CAMLparam0(); -#ifdef HACKC_COMPILER_ID - const char* const buf = STRINGIFY_VALUE(HACKC_COMPILER_ID); - const ssize_t len = strlen(buf); -#else - char buf[BUF_SIZE]; - const ssize_t len = hphp_read_embedded_data(section_name, buf, BUF_SIZE); -#endif - value result; - - if (len < 0) { - result = caml_alloc_string(strlen(default_id)); - memcpy(String_val(result), default_id, strlen(default_id)); - CAMLreturn(result); - } else { - result = caml_alloc_string(len); - memcpy(String_val(result), buf, len); - CAMLreturn(result); - } -} diff --git a/hack/utils/core/build_id.ml b/hack/utils/core/build_id.ml new file mode 100644 index 00000000000..853e5a76950 --- /dev/null +++ b/hack/utils/core/build_id.ml @@ -0,0 +1,32 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +external get_build_revision : unit -> string = "hh_get_build_revision" + +external get_build_commit_time : unit -> int = "hh_get_build_commit_time" + +external get_build_commit_time_string : unit -> string + = "hh_get_build_commit_time_string" + +external get_build_mode : unit -> string = "hh_get_build_mode" + +let build_revision = get_build_revision () + +let build_commit_time = get_build_commit_time () + +let build_commit_time_string = get_build_commit_time_string () + +let build_mode = get_build_mode () + +let is_build_optimized = + String_utils.string_starts_with build_mode "dbgo" + || String_utils.string_starts_with build_mode "opt" + || build_mode = "" + +(* fail open if we don't know build mode *) diff --git a/hack/utils/core/build_id.mli b/hack/utils/core/build_id.mli new file mode 100644 index 00000000000..9a7f1e141fd --- /dev/null +++ b/hack/utils/core/build_id.mli @@ -0,0 +1,18 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +val build_revision : string + +val build_commit_time : int + +val build_commit_time_string : string + +val build_mode : string + +val is_build_optimized : bool diff --git a/hack/utils/core/config/dune b/hack/utils/core/config/dune new file mode 100644 index 00000000000..d6e5a19088c --- /dev/null +++ b/hack/utils/core/config/dune @@ -0,0 +1,5 @@ +(rule + (targets build-timestamp-opt) + (action + (with-stdout-to build-timestamp-opt + (bash "echo -DHH_BUILD_TIMESTAMP=$(date +%s)ul")))) diff --git a/hack/utils/core/dune b/hack/utils/core/dune new file mode 100644 index 00000000000..639479c2bdb --- /dev/null +++ b/hack/utils/core/dune @@ -0,0 +1,18 @@ +(copy_files ../../../scripts/get_build_id_gen.c) + +(library + (name utils_core) + (wrapped false) + (c_names + get_build_id + get_build_id_gen) + (c_flags (:standard + (:include config/build-timestamp-opt))) + (libraries + string + imported_core + hh_json + str + unix) + (preprocess + (pps lwt_ppx ppx_deriving.std ppx_deriving.enum))) diff --git a/hack/utils/core/exception.ml b/hack/utils/core/exception.ml new file mode 100644 index 00000000000..05aaa2ae797 --- /dev/null +++ b/hack/utils/core/exception.ml @@ -0,0 +1,48 @@ +(* + * Copyright (c) 2018-present, Facebook, Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type t = { + exn: exn; + backtrace: Printexc.raw_backtrace; +} + +(* In ocaml, backtraces (the path that the exception bubbled up after being thrown) are stored as + * global state and NOT with the exception itself. This means the only safe place to ever read the + * backtrace is immediately after the exception is caught in the `with` block of a `try...with`. + * + * Proper use of this module is something like + * + * try + * ... + * with exn -> + * let e = Exception.wrap exn in (* DO THIS FIRST!!! *) + * my_fun e; (* If this code throws internally it will overwrite the global backtrace *) + * Exception.reraise e + *) +let wrap exn = + let backtrace = Printexc.get_raw_backtrace () in + { exn; backtrace } + +let reraise { exn; backtrace } = Printexc.raise_with_backtrace exn backtrace + +let get_ctor_string { exn; backtrace = _ } = Printexc.to_string exn + +let get_backtrace_string { exn = _; backtrace } = + Printexc.raw_backtrace_to_string backtrace + +let to_string t = + let ctor = get_ctor_string t in + let bt = get_backtrace_string t in + if bt = "" then + ctor + else + ctor ^ "\n" ^ bt + +let get_current_callstack_string n = + Printexc.get_callstack n |> Printexc.raw_backtrace_to_string + +let record_backtrace = Printexc.record_backtrace diff --git a/hack/utils/core/exception.mli b/hack/utils/core/exception.mli new file mode 100644 index 00000000000..10a8d9ad1f2 --- /dev/null +++ b/hack/utils/core/exception.mli @@ -0,0 +1,22 @@ +(* + * Copyright (c) 2018-present, Facebook, Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type t + +val wrap : exn -> t + +val reraise : t -> 'a + +val to_string : t -> string + +val get_ctor_string : t -> string + +val get_backtrace_string : t -> string + +val get_current_callstack_string : int -> string + +val record_backtrace : bool -> unit diff --git a/hack/utils/core/exit_status.ml b/hack/utils/core/exit_status.ml new file mode 100644 index 00000000000..28526b38184 --- /dev/null +++ b/hack/utils/core/exit_status.ml @@ -0,0 +1,232 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +type t = + | No_error + | Build_error + | Build_terminated + | Checkpoint_error + | Input_error + | Kill_error + | No_server_running_should_retry + | Server_hung_up_should_retry + | Server_hung_up_should_abort + | Out_of_time + | Out_of_retries + | Server_already_exists + | Type_error + | Build_id_mismatch + | Monitor_connection_failure + | Unused_server + | Lock_stolen + | Lost_parent_monitor + | Interrupted + | Worker_oomed + | Worker_busy + (* An uncaught Not_found exception in the worker. *) + | Worker_not_found_exception + | Worker_failed_to_send_job + | Socket_error + | Missing_hhi + | Dfind_died + | Dfind_unresponsive + | EventLogger_Timeout + | EventLogger_restart_out_of_retries + | EventLogger_broken_pipe + | CantRunAI + | Watchman_failed + (* It is faster to exit the server (and have the Monitor restart the server) + * on a Watchman fresh instance than to compute the files that have been + * deleted and do an incremental check. + *) + | Watchman_fresh_instance + | Watchman_invalid_result + | File_provider_stale + | Hhconfig_deleted + | Hhconfig_changed + | Server_shutting_down + | IDE_malformed_request + | IDE_no_server + | IDE_out_of_retries + | Nfs_root + | IDE_init_failure + | IDE_typechecker_died + | Redecl_heap_overflow + | Out_of_shared_memory + | Shared_mem_assertion_failure + | Hash_table_full + | IDE_new_client_connected + | Lazy_decl_bug + | Decl_heap_elems_bug + | Parser_heap_build_error + | Heap_full + | Sql_assertion_failure + | Local_type_env_stale + | Sql_cantopen + | Sql_corrupt + | Sql_misuse + | Uncaught_exception + | Decl_not_found + | Big_rebase_detected + | Failed_to_load_should_retry + | Failed_to_load_should_abort + +exception Exit_with of t + +let exit_code = function + | Interrupted -> -6 + | No_error -> 0 + | Build_terminated -> 1 (* used in clientBuild *) + | Kill_error -> 1 (* used in clientStop/Start/Restart *) + | Server_shutting_down -> 1 (* used in server *) + | Build_error -> 2 (* used in clientBuild *) + | Type_error -> 2 (* used in clientCheck *) + | Uncaught_exception -> 2 (* used in server and clientIde *) + | Hhconfig_changed -> 4 + | Unused_server -> 5 + | No_server_running_should_retry -> + 6 (* gen by clientConnect, read by find_hh.sh *) + | Server_hung_up_should_retry -> + 6 (* gen by clientConnect, read by find_hh.sh *) + | Out_of_time -> 7 + | Out_of_retries -> 7 + | Checkpoint_error -> 8 + | Build_id_mismatch -> 9 + | Monitor_connection_failure -> 9 + | Input_error -> 10 + | Lock_stolen -> 11 + | Lost_parent_monitor -> 12 + | Shared_mem_assertion_failure -> 14 + | Out_of_shared_memory -> 15 + | Hash_table_full -> 16 + | Heap_full -> 17 + | Worker_oomed -> 30 + | Worker_busy -> 31 + | Worker_not_found_exception -> 32 + | Worker_failed_to_send_job -> 33 + | Server_already_exists -> 77 + | Missing_hhi -> 97 + | Socket_error -> 98 + | Dfind_died -> 99 + | Dfind_unresponsive -> 100 + | EventLogger_Timeout -> 101 + | CantRunAI -> 102 + | Watchman_failed -> 103 + | Hhconfig_deleted -> 104 + | EventLogger_broken_pipe -> 106 + | Redecl_heap_overflow -> 107 + | EventLogger_restart_out_of_retries -> 108 + | Watchman_fresh_instance -> 109 + | Watchman_invalid_result -> 110 + | Big_rebase_detected -> 111 + | IDE_malformed_request -> 201 + | IDE_no_server -> 202 + | IDE_out_of_retries -> 203 + | Nfs_root -> 204 + | IDE_init_failure -> 205 + | IDE_typechecker_died -> 206 + | IDE_new_client_connected -> 207 + | Lazy_decl_bug -> 208 + | Decl_heap_elems_bug -> 209 + | Parser_heap_build_error -> 210 + | File_provider_stale -> 211 + | Sql_assertion_failure -> 212 + | Local_type_env_stale -> 213 + | Sql_cantopen -> 214 + | Sql_corrupt -> 215 + | Sql_misuse -> 216 + | Decl_not_found -> 217 + | Failed_to_load_should_retry -> + 218 (* gen by serverInit, read by serverMonitor+clientConnect *) + | Failed_to_load_should_abort -> + 219 (* gen by serverInit, read by serverMonitor+clientConnect *) + | Server_hung_up_should_abort -> + (* generated by clientConnect, read by find_hh.sh *) + 220 + +let exit t = + let ec = exit_code t in + Pervasives.exit ec + +let to_string = function + | No_error -> "Ok" + | Build_error -> "Build_error" + | Build_terminated -> "Build_terminated" + | Checkpoint_error -> "Checkpoint_error" + | Input_error -> "Input_error" + | Kill_error -> "Kill_error" + | No_server_running_should_retry -> "No_server_running_should_retry" + | Server_hung_up_should_retry -> "Server_hung_up_should_retry" + | Server_hung_up_should_abort -> "Server_hung_up_should_abort" + | Out_of_time -> "Out_of_time" + | Out_of_retries -> "Out_of_retries" + | Server_already_exists -> "Server_already_exists" + | Server_shutting_down -> "Server_shutting_down" + | Type_error -> "Type_error" + | Build_id_mismatch -> "Build_id_mismatch" + | Monitor_connection_failure -> "Monitor_connection_failure" + | Unused_server -> "Unused_server" + | Lock_stolen -> "Lock_stolen" + | Lost_parent_monitor -> "Lost_parent_monitor" + | Interrupted -> "Interrupted" + | Worker_oomed -> "Worker_oomed" + | Worker_busy -> "Worker_busy" + | Worker_not_found_exception -> "Worker_not_found_exception" + | Worker_failed_to_send_job -> "Worker_failed_to_send_job" + | Socket_error -> "Socket_error" + | Missing_hhi -> "Missing_hhi" + | Dfind_died -> "Dfind_died" + | Dfind_unresponsive -> "Dfind_unresponsive" + | EventLogger_Timeout -> "EventLogger_Timeout" + | EventLogger_restart_out_of_retries -> "EventLogger_restart_out_of_retries" + | EventLogger_broken_pipe -> "EventLogger_broken_pipe" + | CantRunAI -> "CantRunAI" + | Watchman_failed -> "Watchman_failed" + | Watchman_fresh_instance -> "Watchman_fresh_instance" + | Watchman_invalid_result -> "Watchman_invalid_result" + | Hhconfig_deleted -> "Hhconfig_deleted" + | Hhconfig_changed -> "Hhconfig_changed" + | IDE_malformed_request -> "IDE_malformed_request" + | IDE_no_server -> "IDE_no_server" + | IDE_out_of_retries -> "IDE_out_of_retries" + | Nfs_root -> "Nfs_root" + | IDE_init_failure -> "IDE_init_failure" + | IDE_typechecker_died -> "IDE_typechecker_died" + | Redecl_heap_overflow -> "Redecl_heap_overflow" + | Shared_mem_assertion_failure -> "Shared_mem_assertion_failure" + | Out_of_shared_memory -> "Out_of_shared_memory" + | Hash_table_full -> "Hash_table_full" + | IDE_new_client_connected -> "IDE_new_client_connected" + | Lazy_decl_bug -> "Lazy_decl_bug" + | Decl_heap_elems_bug -> "Decl_heap_elems_bug" + | Parser_heap_build_error -> "Parser_heap_build_error" + | Heap_full -> "Heap_full" + | File_provider_stale -> "File_provider_stale" + | Sql_assertion_failure -> "Sql_assertion_failure" + | Local_type_env_stale -> "Local_type_env_stale" + | Sql_cantopen -> "Sql_cantopen" + | Sql_corrupt -> "Sql_corrupt" + | Sql_misuse -> "Sql_misuse" + | Uncaught_exception -> "Uncaught_exception" + | Decl_not_found -> "Decl_not_found" + | Big_rebase_detected -> "Big_rebase_detected" + | Failed_to_load_should_retry -> "Failed_to_load_should_retry" + | Failed_to_load_should_abort -> "Failed_to_load_should_abort" + +let unpack = function + | Unix.WEXITED n -> ("exit", n) + | Unix.WSIGNALED n -> + (* + * Ocaml signal numbers are mapped from System signal numbers. + * They are negative. + * See caml_convert_signal_number byterun/signals.c in Ocaml system source code + * to convert from Ocaml number to System number + *) + ("signaled", n) + | Unix.WSTOPPED n -> ("stopped", n) diff --git a/hack/utils/core/exit_status.mli b/hack/utils/core/exit_status.mli new file mode 100644 index 00000000000..0ff020ace66 --- /dev/null +++ b/hack/utils/core/exit_status.mli @@ -0,0 +1,89 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +type t = + | No_error + | Build_error + | Build_terminated + | Checkpoint_error + | Input_error + | Kill_error + | No_server_running_should_retry + | Server_hung_up_should_retry + | Server_hung_up_should_abort + | Out_of_time + | Out_of_retries + | Server_already_exists + | Type_error + | Build_id_mismatch + | Monitor_connection_failure + | Unused_server + | Lock_stolen + | Lost_parent_monitor + | Interrupted + | Worker_oomed + | Worker_busy + | Worker_not_found_exception + | Worker_failed_to_send_job + | Socket_error + | Missing_hhi + | Dfind_died + | Dfind_unresponsive + | EventLogger_Timeout + | EventLogger_restart_out_of_retries + | EventLogger_broken_pipe + | CantRunAI + | Watchman_failed + | Watchman_fresh_instance + | Watchman_invalid_result + | File_provider_stale + | Hhconfig_deleted + | Hhconfig_changed + | Server_shutting_down + | IDE_malformed_request + | IDE_no_server + | IDE_out_of_retries + | Nfs_root + | IDE_init_failure + | IDE_typechecker_died + | Redecl_heap_overflow + | Out_of_shared_memory + | Shared_mem_assertion_failure + | Hash_table_full + | IDE_new_client_connected + | Lazy_decl_bug + | Decl_heap_elems_bug + | Parser_heap_build_error + | Heap_full + | Sql_assertion_failure + | Local_type_env_stale + | Sql_cantopen + | Sql_corrupt + | Sql_misuse + | Uncaught_exception + | Decl_not_found + | Big_rebase_detected + | Failed_to_load_should_retry + | Failed_to_load_should_abort + +exception Exit_with of t + +val exit_code : t -> int + +val exit : t -> 'a + +val to_string : t -> string + +val unpack : Unix.process_status -> string * int diff --git a/hack/utils/core/get_build_id.c b/hack/utils/core/get_build_id.c new file mode 100644 index 00000000000..88140205c5d --- /dev/null +++ b/hack/utils/core/get_build_id.c @@ -0,0 +1,82 @@ +/** + * Copyright (c) 2014, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + */ +#define CAML_NAME_SPACE +#include +#include + +#include +#include +#include +#include +#include + +extern const char* const BuildInfo_kRevision; +extern const uint64_t BuildInfo_kRevisionCommitTimeUnix; +extern const char* const BuildInfo_kBuildMode; + +/** + * Export the constants provided by Facebook's build system to ocaml-land, since + * their FFI only allows you to call functions, not reference variables. Doing + * it this way makes sense for Facebook internally since our build system has + * machinery for providing these two constants automatically (and no machinery + * for doing codegen in a consistent way to build an ocaml file with them) but + * is very roundabout for external users who have to have CMake codegen these + * constants anyways. Sorry about that. + */ +value hh_get_build_revision(void) { + CAMLparam0(); + CAMLlocal1(result); + + const char* const buf = BuildInfo_kRevision; + const size_t len = strlen(buf); + result = caml_alloc_string(len); + + memcpy(String_val(result), buf, len); + + CAMLreturn(result); +} + +static struct tm *get_built_timestamp(void) { + unsigned long timestamp = BuildInfo_kRevisionCommitTimeUnix; +#ifdef HH_BUILD_TIMESTAMP + if (timestamp == 0) { + timestamp = HH_BUILD_TIMESTAMP; + } +#endif + // A previous version used localtime_r, which is not available on Windows + return localtime((time_t*)×tamp); +} + +value hh_get_build_commit_time_string(void) { + CAMLparam0(); + CAMLlocal1(result); + + char timestamp_string[25]; + struct tm *timestamp = get_built_timestamp(); + strftime(timestamp_string, sizeof(timestamp_string), "%c", timestamp); + + result = caml_copy_string(timestamp_string); + CAMLreturn(result); +} + +value hh_get_build_commit_time(void) { + return Val_long(BuildInfo_kRevisionCommitTimeUnix); +} + +value hh_get_build_mode(void) { + CAMLparam0(); + CAMLlocal1(result); + + const size_t len = strlen(BuildInfo_kBuildMode); + result = caml_alloc_string(len); + + memcpy(String_val(result), BuildInfo_kBuildMode, len); + + CAMLreturn(result); +} diff --git a/hack/utils/core/hh_core.ml b/hack/utils/core/hh_core.ml new file mode 100644 index 00000000000..403e03d9530 --- /dev/null +++ b/hack/utils/core/hh_core.ml @@ -0,0 +1,107 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +module List = struct + include Core_list + + let rec fold_left_env env l ~init ~f = + match l with + | [] -> (env, init) + | x :: xs -> + let (env, init) = f env init x in + fold_left_env env xs ~init ~f + + let rev_map_env env xs ~f = + let f2 env init x = + let (env, x) = f env x in + (env, x :: init) + in + fold_left_env env xs ~init:[] ~f:f2 + + let map_env env xs ~f = + let rec aux env xs counter = + match xs with + | [] -> (env, []) + | [y1] -> + let (env, z1) = f env y1 in + (env, [z1]) + | [y1; y2] -> + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + (env, [z1; z2]) + | [y1; y2; y3] -> + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + let (env, z3) = f env y3 in + (env, [z1; z2; z3]) + | [y1; y2; y3; y4] -> + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + let (env, z3) = f env y3 in + let (env, z4) = f env y4 in + (env, [z1; z2; z3; z4]) + | [y1; y2; y3; y4; y5] -> + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + let (env, z3) = f env y3 in + let (env, z4) = f env y4 in + let (env, z5) = f env y5 in + (env, [z1; z2; z3; z4; z5]) + | y1 :: y2 :: y3 :: y4 :: y5 :: ys -> + let (env, z1) = f env y1 in + let (env, z2) = f env y2 in + let (env, z3) = f env y3 in + let (env, z4) = f env y4 in + let (env, z5) = f env y5 in + let (env, zs) = + if counter > 1000 then + let (env, zs) = rev_map_env env ys ~f in + (env, List.rev zs) + else + aux env ys (counter + 1) + in + (env, z1 :: z2 :: z3 :: z4 :: z5 :: zs) + in + aux env xs 0 + + let rec map2_env env l1 l2 ~f = + match (l1, l2) with + | ([], []) -> (env, []) + | ([], _) + | (_, []) -> + raise @@ Invalid_argument "map2_env" + | (x1 :: rl1, x2 :: rl2) -> + let (env, x) = f env x1 x2 in + let (env, rl) = map2_env env rl1 rl2 ~f in + (env, x :: rl) + + let rec map3_env env l1 l2 l3 ~f = + if length l1 <> length l2 || length l2 <> length l3 then + raise @@ Invalid_argument "map3_env" + else + match (l1, l2, l3) with + | ([], [], []) -> (env, []) + | ([], _, _) + | (_, [], _) + | (_, _, []) -> + raise @@ Invalid_argument "map3_env" + | (x1 :: rl1, x2 :: rl2, x3 :: rl3) -> + let (env, x) = f env x1 x2 x3 in + let (env, rl) = map3_env env rl1 rl2 rl3 ~f in + (env, x :: rl) + + let filter_map_env env xs ~f = + let (env, l) = rev_map_env env xs ~f in + (env, rev_filter_map l ~f:(fun x -> x)) + + let for_all2 = List.for_all2 + + let same_length_and_for_all2 ~f l1 l2 = + List.length l1 = List.length l2 && for_all2 f l1 l2 +end diff --git a/hack/utils/core/hh_core.mli b/hack/utils/core/hh_core.mli new file mode 100644 index 00000000000..34a2d14466a --- /dev/null +++ b/hack/utils/core/hh_core.mli @@ -0,0 +1,44 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +module List : sig + include module type of Core_list + + val fold_left_env : + 'a -> 'b list -> init:'c -> f:('a -> 'c -> 'b -> 'a * 'c) -> 'a * 'c + + val rev_map_env : 'a -> 'b list -> f:('a -> 'b -> 'a * 'c) -> 'a * 'c list + + val map_env : 'a -> 'b list -> f:('a -> 'b -> 'a * 'c) -> 'a * 'c list + + val map2_env : + 'a -> 'b list -> 'c list -> f:('a -> 'b -> 'c -> 'a * 'd) -> 'a * 'd list + + val map3_env : + 'a -> + 'b list -> + 'c list -> + 'd list -> + f:('a -> 'b -> 'c -> 'd -> 'a * 'e) -> + 'a * 'e list + + val filter_map_env : + 'a -> 'b list -> f:('a -> 'b -> 'a * 'c option) -> 'a * 'c list + + val for_all2 : f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool + + val same_length_and_for_all2 : + f:('a -> 'b -> bool) -> 'a list -> 'b list -> bool +end diff --git a/hack/utils/core/hh_logger.ml b/hack/utils/core/hh_logger.ml new file mode 100644 index 00000000000..31c9e1297b1 --- /dev/null +++ b/hack/utils/core/hh_logger.ml @@ -0,0 +1,157 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +let timestamp_string () = + Unix.( + let tm = localtime (time ()) in + let ms = int_of_float (gettimeofday () *. 1000.) mod 1000 in + let year = tm.tm_year + 1900 in + Printf.sprintf + "[%d-%02d-%02d %02d:%02d:%02d.%03d]" + year + (tm.tm_mon + 1) + tm.tm_mday + tm.tm_hour + tm.tm_min + tm.tm_sec + ms) + +(* We might want to log to both stderr and a file. Shelling out to tee isn't cross-platform. + * We could dup2 stderr to a pipe and have a child process write to both original stderr and the + * file, but that's kind of overkill. This is good enough *) +let dupe_log : (string * out_channel) option ref = ref None + +let set_log filename fd = dupe_log := Some (filename, fd) + +let get_log_name () = Option.map !dupe_log ~f:fst + +let id : string option ref = ref None + +let set_id passed_id = id := Some passed_id + +let id_string () = + match !id with + | None -> "" + | Some id -> Printf.sprintf "[%s] " id + +let print_with_newline ?exn fmt = + let print_raw ?exn s = + let exn_str = + Option.value_map exn ~default:"" ~f:(fun exn -> + let bt = + String_utils.indent 8 + @@ String.trim + @@ Exception.get_backtrace_string exn + in + let bt = + if bt = "" then + "" + else + "\n Backtrace:\n" ^ bt + in + Printf.sprintf + "\n Exception: %s%s" + (Exception.get_ctor_string exn) + bt) + in + let time = timestamp_string () in + let id_str = id_string () in + begin + match !dupe_log with + | None -> () + | Some (_, dupe_log_oc) -> + Printf.fprintf dupe_log_oc "%s %s%s%s\n%!" time id_str s exn_str + end; + Printf.eprintf "%s %s%s%s\n%!" time id_str s exn_str + in + Printf.ksprintf (print_raw ?exn) fmt + +let print_duration name t = + let t2 = Unix.gettimeofday () in + print_with_newline "%s: %f" name (t2 -. t); + t2 + +let exc ?(prefix : string = "") ~(stack : string) (e : exn) : unit = + print_with_newline "%s%s\n%s" prefix (Printexc.to_string e) stack + +module Level : sig + type t = + | Off + | Fatal + | Error + | Warn + | Info + | Debug + + val min_level : unit -> t + + val set_min_level : t -> unit + + val passes_min_level : t -> bool + + val log : + t -> + ?exn:Exception.t -> + ('a, unit, string, string, string, unit) format6 -> + 'a + + val log_duration : t -> string -> float -> float +end = struct + type t = + | Off + | Fatal + | Error + | Warn + | Info + | Debug + + let int_of_level = function + | Off -> 6 + | Fatal -> 5 + | Error -> 4 + | Warn -> 3 + | Info -> 2 + | Debug -> 1 + + let min_level_ref = ref Info + + let min_level () = !min_level_ref + + let set_min_level level = min_level_ref := level + + let passes_min_level level = + int_of_level level >= int_of_level !min_level_ref + + let log level ?exn fmt = + if passes_min_level level then + print_with_newline ?exn fmt + else + Printf.ifprintf () fmt + + let log_duration level fmt t = + if passes_min_level level then + print_duration fmt t + else + t +end + +(* Default log instructions to INFO level *) +let log ?(lvl = Level.Info) fmt = Level.log lvl fmt + +let log_duration fmt t = Level.log_duration Level.Info fmt t + +let fatal ?exn fmt = Level.log Level.Fatal ?exn fmt + +let error ?exn fmt = Level.log Level.Error ?exn fmt + +let warn ?exn fmt = Level.log Level.Warn ?exn fmt + +let info ?exn fmt = Level.log Level.Info ?exn fmt + +let debug ?exn fmt = Level.log Level.Debug ?exn fmt diff --git a/hack/utils/core/hh_logger.mli b/hack/utils/core/hh_logger.mli new file mode 100644 index 00000000000..4d5ac14a182 --- /dev/null +++ b/hack/utils/core/hh_logger.mli @@ -0,0 +1,63 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +val timestamp_string : unit -> string + +val dupe_log : (string * out_channel) option ref + +val set_log : string -> out_channel -> unit + +val get_log_name : unit -> string option + +val set_id : string -> unit + +val print_with_newline : + ?exn:Exception.t -> ('a, unit, string, unit) format4 -> 'a + +val print_duration : string -> float -> float + +val exc : ?prefix:string -> stack:string -> exn -> unit + +module Level : sig + type t = + | Off + | Fatal + | Error + | Warn + | Info + | Debug + + val min_level : unit -> t + + val set_min_level : t -> unit + + val passes_min_level : t -> bool + + val log_duration : t -> string -> float -> float +end + +val log : + ?lvl:Level.t -> ('a, unit, string, string, string, unit) format6 -> 'a + +val log_duration : string -> float -> float + +val fatal : + ?exn:Exception.t -> ('a, unit, string, string, string, unit) format6 -> 'a + +val error : + ?exn:Exception.t -> ('a, unit, string, string, string, unit) format6 -> 'a + +val warn : + ?exn:Exception.t -> ('a, unit, string, string, string, unit) format6 -> 'a + +val info : + ?exn:Exception.t -> ('a, unit, string, string, string, unit) format6 -> 'a + +val debug : + ?exn:Exception.t -> ('a, unit, string, string, string, unit) format6 -> 'a diff --git a/hack/utils/core/local_id.ml b/hack/utils/core/local_id.ml new file mode 100644 index 00000000000..b5cc5e661ae --- /dev/null +++ b/hack/utils/core/local_id.ml @@ -0,0 +1,41 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +module S = struct + type t = int * string + + let compare = Pervasives.compare +end + +include S + +let ctr = ref 1 + +let next () = + incr ctr; + !ctr + +let to_string x = snd x + +let pp fmt x = Format.pp_print_string fmt (to_string x) + +let to_int x = fst x + +let get_name x = to_string x + +let make_scoped x = (next (), x) + +let make_unscoped x = (0, x) + +let tmp () = + let res = next () in + (res, "__tmp" ^ string_of_int res) + +module Set = Set.Make (S) +module Map = MyMap.Make (S) diff --git a/hack/utils/core/local_id.mli b/hack/utils/core/local_id.mli new file mode 100644 index 00000000000..0a9cb9482ee --- /dev/null +++ b/hack/utils/core/local_id.mli @@ -0,0 +1,43 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(** Used to represent local variables in the named AST. *) + +module S : sig + type t + + val compare : t -> t -> int +end + +type t = S.t + +val pp : Format.formatter -> t -> unit + +val compare : t -> t -> int + +val to_string : t -> string + +val to_int : t -> int + +val get_name : t -> string + +val make_scoped : string -> t +(** Make an id for a scoped variable. Return a fresh id every time. +This is used to enforce that two locals with the same name but with +different scopes have different ids. *) + +val make_unscoped : string -> t +(** Make an id for an unscoped variable. Two calls with the same input + * string will return the same id. *) + +val tmp : unit -> t + +module Set : module type of Set.Make (S) + +module Map : module type of MyMap.Make (S) diff --git a/hack/utils/core/measure.ml b/hack/utils/core/measure.ml new file mode 100644 index 00000000000..953da634f19 --- /dev/null +++ b/hack/utils/core/measure.ml @@ -0,0 +1,358 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(** + * The Measure module is primarily useful for debugging. It's particularly + * useful for gathering stats about something that happens a lot. Let's say you + * have some code like this + * + * let number_bunnies = count_bunnies () in + * + * If you want to debug how many bunnies are being counted, you could do + * something like + * + * let number_bunnies = count_bunnies () in + * Utils.prerr_endlinef "Num bunnies: %d" number_bunnies; + * + * but what if this code is called 1000 times? Then you end up with log spew. + * Using the Measure module helps with this. You can now do + * + * let number_bunnies = count_bunnies () in + * Measure.sample "num_bunnies" number_bunnies; + * + * and then later you do + * + * Measure.print_stats (); + * + * which will print the number of samples, the total, the average, the + * variance, the max and the min. + * + * Measure can keep track of the distribution of measurements if you give it a + * bucket size. Before we collect our measurements, call + * + * Measure.track_distribution "num_bunnies" ~bucket_size:10 = + * ...do logging + * Measure.print_distribution (); + * + * And this will print how many samples fall in the 0-9 bucket, how many fall + * into the 10-19 bucket, etc + * + * A common use case is timing, and there's an easy helper method. Let's say we + * wanted to see how long our code takes + * + * let number_bunnies = Measure.time "count_bunnies_time" (fun () -> + * count_bunnies () + * ) in + * + * now when we call print_stats we'll see how fast count_bunnies is and how + * much total time we spend counting bunnies. + * + * Measurements are stored in a stateful way in a record. You can either use a + * global record or a local record. + * + * Using a global record: + * Measure.sample "num_bunnies" number_bunnies; + * Measure.print_stats (); + * + * You can push and pop the global record. This is useful if you want to reset + * some counters without throwing away that data + * + * Measure.push_global (); + * ...measure stuff + * let record = Measure.pop_global () in + * Measure.print_stats ~record (); + * + * Using a local record: + * let record = Measure.create () in + * Measure.sample ~record "num_bunnies" number_bunnies; + * Measure.print_stats ~record (); + * + * A record does not store the individual measurements, just the aggregate + * stats, which are updated online. Records can be serialized in order to be + * sent across pipes. + *) + +module List = Hh_core.List + +module FloatMap = MyMap.Make (struct + type t = float + + let compare = compare +end) + +type distribution = { + bucket_size: float; + buckets: float FloatMap.t; +} + +type record_entry = { + count: float; + mean: float; + variance_sum: float; + max: float; + min: float; + distribution: distribution option; +} + +type record_data = record_entry SMap.t + +type record = record_data ref + +(* Creates a new empty record *) +let create () = ref SMap.empty + +let global : record list ref = ref [create ()] + +let push_global () = global := create () :: !global + +let pop_global () = + match !global with + | ret :: globals -> + global := globals; + ret + | _ -> failwith "Measure.pop_global called with empty stack" + +let serialize record = !record + +let deserialize data = ref data + +let new_entry = + { + count = 0.0; + mean = 0.0; + variance_sum = 0.0; + max = min_float; + min = max_float; + distribution = None; + } + +let new_distribution ~bucket_size = + Some { bucket_size; buckets = FloatMap.empty } + +let get_record = function + | Some record -> record + | None -> + (match List.hd !global with + | Some record -> record + | None -> + failwith + ( "No global record available! " + ^ "Did you forget to call Measure.push_global?" )) + +(* Measure can track how the values are distributed by creating buckets and + * keeping track of how many samples fall into each buckets. It will not track + * distribution by default, so call this function to turn it on *) +let track_distribution ?record name ~bucket_size = + let record = get_record record in + let entry = + match SMap.get name !record with + | None -> new_entry + | Some entry -> entry + in + let entry = { entry with distribution = new_distribution ~bucket_size } in + record := SMap.add name entry !record + +let round_down ~bucket_size value = bucket_size *. floor (value /. bucket_size) + +let update_distribution ~weight value = function + | None -> None + | Some { bucket_size; buckets } -> + let bucket = round_down ~bucket_size value in + let bucket_count = + match FloatMap.get bucket buckets with + | None -> weight + | Some count -> count +. weight + in + let buckets = FloatMap.add bucket bucket_count buckets in + Some { bucket_size; buckets } + +let sample ?record ?(weight = 1.0) name value = + let record = get_record record in + let { + count = old_count; + mean = old_mean; + variance_sum; + max; + min; + distribution; + } = + match SMap.get name !record with + | None -> new_entry + | Some entry -> entry + in + (* Add 1 * weight to the count *) + let count = old_count +. weight in + let mean = old_mean +. (weight *. (value -. old_mean) /. count) in + (* Knuth's online variance approximation algorithm, updated for weights. + * Weighted version from http://people.ds.cam.ac.uk/fanf2/hermes/doc/antiforgery/stats.pdf *) + let variance_sum = + variance_sum +. (weight *. (value -. old_mean) *. (value -. mean)) + in + let max = Pervasives.max max value in + let min = Pervasives.min min value in + let distribution = update_distribution ~weight value distribution in + let entry = { count; mean; variance_sum; max; min; distribution } in + record := SMap.add name entry !record + +let delete ?record name = + let record = get_record record in + record := SMap.remove name !record + +let merge_entries name from into = + match (from, into) with + | (None, into) -> into + | (from, None) -> from + | (Some from, into) when from.count = 0. -> into + | (from, Some into) when into.count = 0. -> from + | (Some from, Some into) -> + let count = from.count +. into.count in + (* Using this algorithm to combine the variance sums + * https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm + *) + (* d = meanB - meanA *) + let delta = from.mean -. into.mean in + (* mean = meanA + delta * (countB/count) *) + let mean = into.mean +. (delta *. from.count /. count) in + (* VarSum = VarSumA + VarSumB + delta * delta * countA * countB / count *) + let variance_sum = + from.variance_sum + +. into.variance_sum + +. (delta *. delta *. into.count *. from.count /. count) + in + let max = Pervasives.max from.max into.max in + let min = Pervasives.min from.min into.min in + let distribution = + match (from.distribution, into.distribution) with + | (None, into) -> into + | (from, None) -> from + | (Some { bucket_size = from; _ }, Some { bucket_size = into; _ }) + when from <> into -> + Printf.kprintf + failwith + "Merging buckets for %s failed: bucket sizes %f, %f" + name + from + into + | (Some { bucket_size; buckets = from }, Some { buckets = into; _ }) -> + let buckets = + FloatMap.merge + (fun _bucket from_count into_count -> + match (from_count, into_count) with + | (None, into) -> into + | (from, None) -> from + | (Some from_count, Some into_count) -> + Some (from_count +. into_count)) + from + into + in + Some { bucket_size; buckets } + in + Some { count; mean; variance_sum; max; min; distribution } + +(* Merges all the samples from "from" into "record". If "record" is omitted + * then it uses the global record *) +let merge ?record ~from = + let into = get_record record in + into := SMap.merge merge_entries !from !into + +let time (type a) ?record name (f : unit -> a) : a = + let record = get_record record in + let start_time = Unix.gettimeofday () in + let ret = f () in + let end_time = Unix.gettimeofday () in + sample ~record name (end_time -. start_time); + ret + +let get_helper f ?record name = + let record = get_record record in + match SMap.get name !record with + | None -> None + | Some entry -> Some (f entry) + +let get_sum = get_helper (fun { count; mean; _ } -> count *. mean) + +let get_mean = get_helper (fun { mean; _ } -> mean) + +let get_count = get_helper (fun { count; _ } -> count) + +let get_max = get_helper (fun { max; _ } -> max) + +let pretty_num f = + if f > 1000000000.0 then + Printf.sprintf "%.3fG" (f /. 1000000000.0) + else if f > 1000000.0 then + Printf.sprintf "%.3fM" (f /. 1000000.0) + else if f > 1000.0 then + Printf.sprintf "%.3fK" (f /. 1000.0) + else if f = floor f then + Printf.sprintf "%d" (int_of_float f) + else + Printf.sprintf "%f" f + +let print_entry_stats ?record ?print_raw name = + let print_raw = Option.value print_raw ~default:prerr_endline in + let record = get_record record in + let prefix = Printf.sprintf "%s stats --" name in + match SMap.get name !record with + | None + | Some { count = 0.0; _ } -> + Printf.ksprintf print_raw "%s NO DATA" prefix + | Some { count; mean; variance_sum; max; min; distribution = _ } -> + let total = count *. mean in + let std_dev = sqrt (variance_sum /. count) in + Printf.ksprintf + print_raw + "%s samples: %s, total: %s, avg: %s, stddev: %s, max: %s, min: %s)" + prefix + (pretty_num count) + (pretty_num total) + (pretty_num mean) + (pretty_num std_dev) + (pretty_num max) + (pretty_num min) + +let print_stats ?record ?print_raw () = + let record = get_record record in + SMap.iter (fun name _ -> print_entry_stats ~record ?print_raw name) !record + +let rec print_buckets ~low ~high ~bucket_size buckets = + if low <= high then ( + let count = + match FloatMap.get low buckets with + | None -> 0.0 + | Some count -> count + in + Printf.eprintf "[%s: %s] " (pretty_num low) (pretty_num count); + let low = low +. bucket_size in + print_buckets ~low ~high ~bucket_size buckets + ) + +let print_entry_distribution ?record name = + let record = get_record record in + Printf.eprintf "%s distribution -- " name; + match SMap.get name !record with + | None + | Some { count = 0.0; _ } -> + prerr_endline "NO DATA" + | Some { distribution = None; _ } -> + prerr_endline "NO DATA (did you forget to call track_distribution?)" + | Some { max; min; distribution = Some { bucket_size; buckets }; _ } -> + let low = round_down ~bucket_size min in + let high = round_down ~bucket_size max in + print_buckets ~low ~high ~bucket_size buckets; + prerr_newline () + +let print_distributions ?record () = + let record = get_record record in + SMap.iter + (fun name { distribution; _ } -> + match distribution with + | None -> () + | Some _ -> print_entry_distribution ~record name) + !record diff --git a/hack/utils/core/measure.mli b/hack/utils/core/measure.mli new file mode 100644 index 00000000000..a7537b33eb0 --- /dev/null +++ b/hack/utils/core/measure.mli @@ -0,0 +1,49 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +type record + +type record_data + +val create : unit -> record + +val push_global : unit -> unit + +val pop_global : unit -> record + +val serialize : record -> record_data + +val deserialize : record_data -> record + +val track_distribution : ?record:record -> string -> bucket_size:float -> unit + +val sample : ?record:record -> ?weight:float -> string -> float -> unit + +val time : ?record:record -> string -> (unit -> 'a) -> 'a + +val delete : ?record:record -> string -> unit + +val merge : ?record:record -> from:record -> unit + +val get_sum : ?record:record -> string -> float option + +val get_mean : ?record:record -> string -> float option + +val get_count : ?record:record -> string -> float option + +val get_max : ?record:record -> string -> float option + +val print_entry_stats : + ?record:record -> ?print_raw:(string -> unit) -> string -> unit + +val print_stats : ?record:record -> ?print_raw:(string -> unit) -> unit -> unit + +val print_entry_distribution : ?record:record -> string -> unit + +val print_distributions : ?record:record -> unit -> unit diff --git a/hack/utils/core/prim_defs.ml b/hack/utils/core/prim_defs.ml new file mode 100644 index 00000000000..84e193c49e1 --- /dev/null +++ b/hack/utils/core/prim_defs.ml @@ -0,0 +1,30 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + ** + * + * "Primitive" definitions; fighting the dependency web, this module is a leaf + * on the dependency tree. It may only depend on external libraries and not on + * a single module inside the repository. + * + *) + +type comment = + | CmtLine of string + | CmtBlock of string + | CmtMarkup of string +[@@deriving show] + +let is_line_comment = function + | CmtLine _ -> true + | _ -> false + +let string_of_comment = function + | CmtLine s + | CmtBlock s + | CmtMarkup s -> + s diff --git a/hack/utils/prim_defs.mli b/hack/utils/core/prim_defs.mli similarity index 77% rename from hack/utils/prim_defs.mli rename to hack/utils/core/prim_defs.mli index 09a3876ad01..8801dfc9c13 100644 --- a/hack/utils/prim_defs.mli +++ b/hack/utils/core/prim_defs.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -14,9 +14,11 @@ *) type comment = - | CmtLine of string - | CmtBlock of string - | CmtMarkup of string [@@deriving show] + | CmtLine of string + | CmtBlock of string + | CmtMarkup of string +[@@deriving show] val is_line_comment : comment -> bool -val string_of_comment :comment -> string + +val string_of_comment : comment -> string diff --git a/hack/utils/core/random_id.ml b/hack/utils/core/random_id.ml new file mode 100644 index 00000000000..5186c797a77 --- /dev/null +++ b/hack/utils/core/random_id.ml @@ -0,0 +1,31 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +let initialized = ref false + +(* Do not use / in random ids as they appear in filenames. *) +let alphanumeric_alphabet = + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + +let short_string_with_alphabet alphabet = + (* If we haven't seeded random then do it now *) + if not !initialized then ( + initialized := true; + Random.self_init () + ); + let r = ref ((Random.bits () lsl 30) lor Random.bits ()) in + let cs = ref [] in + while !r > 0 do + let c = alphabet.[!r mod String.length alphabet] in + cs := String.make 1 c :: !cs; + r := !r lsr 6 + done; + String.concat "" !cs + +let short_string () = short_string_with_alphabet alphanumeric_alphabet diff --git a/hack/utils/core/random_id.mli b/hack/utils/core/random_id.mli new file mode 100644 index 00000000000..339bfea4196 --- /dev/null +++ b/hack/utils/core/random_id.mli @@ -0,0 +1,22 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +val initialized : bool ref + +val alphanumeric_alphabet : string + +val short_string_with_alphabet : string -> string + +val short_string : unit -> string diff --git a/hack/utils/core/stats.ml b/hack/utils/core/stats.ml new file mode 100644 index 00000000000..a881f9a3109 --- /dev/null +++ b/hack/utils/core/stats.ml @@ -0,0 +1,44 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* Not all stats are worth logging for every user. Things like the initial heap + * size are pretty deterministic if you know the input (i.e. the files being + * checked). In fact, it's *only* useful information if you know the input. + * This file is for storing these types of stats: Things that would be useful + * for a benchmark script to know, so it can say "for these inputs, under these + * conditions, here's how hh_server behaves". + *) +type t = { + mutable init_parsing_heap_size: int; + mutable init_heap_size: int; + mutable max_heap_size: int; + gc_stat: Gc.stat; +} + +let stats : t = + { + init_parsing_heap_size = 0; + init_heap_size = 0; + max_heap_size = 0; + gc_stat = Gc.quick_stat (); + } + +let get_stats () = { stats with gc_stat = Gc.quick_stat () } + +let update_max_heap_size x = stats.max_heap_size <- max stats.max_heap_size x + +let to_json stats = + Hh_json.JSON_Object + [ + ("init_parsing_heap_size", Hh_json.int_ stats.init_parsing_heap_size); + ("init_shared_heap_size", Hh_json.int_ stats.init_heap_size); + ("max_shared_heap_size", Hh_json.int_ stats.max_heap_size); + ("master_heap_words", Hh_json.int_ stats.gc_stat.Gc.heap_words); + ("master_top_heap_words", Hh_json.int_ stats.gc_stat.Gc.top_heap_words); + ] diff --git a/hack/utils/core/stats.mli b/hack/utils/core/stats.mli new file mode 100644 index 00000000000..4c107c47374 --- /dev/null +++ b/hack/utils/core/stats.mli @@ -0,0 +1,29 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +type t = { + mutable init_parsing_heap_size: int; + mutable init_heap_size: int; + mutable max_heap_size: int; + gc_stat: Gc.stat; +} + +val stats : t + +val get_stats : unit -> t + +val update_max_heap_size : int -> unit + +val to_json : t -> Hh_json.json diff --git a/hack/utils/core/utils.ml b/hack/utils/core/utils.ml new file mode 100644 index 00000000000..ed728c3e680 --- /dev/null +++ b/hack/utils/core/utils.ml @@ -0,0 +1,318 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +open Hh_core + +(** Callstack is simply a typed way to indicate that a string is a callstack *) +type callstack = Callstack of string + +let () = Random.self_init () + +let debug = ref false + +let profile = ref false + +let log = ref (fun (_ : string) -> ()) + +let d s = + if !debug then ( + print_string s; + flush stdout + ) + +let dn s = + if !debug then ( + print_string s; + print_newline (); + flush stdout + ) + +module Map = struct end + +let spf = Printf.sprintf + +let print_endlinef fmt = Printf.ksprintf print_endline fmt + +let prerr_endlinef fmt = Printf.ksprintf prerr_endline fmt + +let opt f env = function + | None -> (env, None) + | Some x -> + let (env, x) = f env x in + (env, Some x) + +let opt_fold f env = function + | None -> env + | Some x -> f env x + +let singleton_if cond x = + if cond then + [x] + else + [] + +let smap_inter m1 m2 = + SMap.fold + (fun x y acc -> + if SMap.mem x m2 then + SMap.add x y acc + else + acc) + m1 + SMap.empty + +let imap_inter m1 m2 = + IMap.fold + (fun x y acc -> + if IMap.mem x m2 then + IMap.add x y acc + else + acc) + m1 + IMap.empty + +let smap_inter_list = function + | [] -> SMap.empty + | x :: rl -> List.fold_left rl ~f:smap_inter ~init:x + +let imap_inter_list = function + | [] -> IMap.empty + | x :: rl -> List.fold_left rl ~f:imap_inter ~init:x + +let rec wfold_left2 f env l1 l2 = + match (l1, l2) with + | ([], _) + | (_, []) -> + env + | (x1 :: rl1, x2 :: rl2) -> + let env = f env x1 x2 in + wfold_left2 f env rl1 rl2 + +let sl l = List.fold_right l ~f:( ^ ) ~init:"" + +let maybe f env = function + | None -> () + | Some x -> f env x + +(* Since OCaml usually runs w/o backtraces enabled, the note makes errors + * easier to debug. *) +let unsafe_opt_note note = function + | None -> raise (Invalid_argument note) + | Some x -> x + +let unsafe_opt x = unsafe_opt_note "unsafe_opt got None" x + +let inter_list = function + | [] -> SSet.empty + | x :: rl -> List.fold_left rl ~f:SSet.inter ~init:x + +let rec list_last f1 f2 = function + | [] -> () + | [x] -> f2 x + | x :: rl -> + f1 x; + list_last f1 f2 rl + +let is_prefix_dir dir fn = + let prefix = dir ^ Filename.dir_sep in + String.length fn > String.length prefix + && String.sub fn 0 (String.length prefix) = prefix + +let try_with_channel + (oc : out_channel) (f1 : out_channel -> 'a) (f2 : exn -> 'a) : 'a = + try + let result = f1 oc in + close_out oc; + result + with e -> + close_out oc; + f2 e + +let try_with_stack (f : unit -> 'a) : ('a, exn * callstack) result = + try Ok (f ()) + with exn -> + let stack = Callstack (Printexc.get_backtrace ()) in + Error (exn, stack) + +let iter_n_acc n f acc = + let acc = ref acc in + for i = 1 to n - 1 do + acc := fst (f !acc) + done; + f !acc + +let map_of_list list = + List.fold_left ~f:(fun m (k, v) -> SMap.add k v m) ~init:SMap.empty list + +let set_of_list l = List.fold_right l ~f:SSet.add ~init:SSet.empty + +(* \A\B\C -> A\B\C *) +let strip_ns s = + if String.length s == 0 || s.[0] <> '\\' then + s + else + String.sub s 1 (String.length s - 1) + +let strip_xhp_ns s = + if String.length s == 0 || s.[0] <> ':' then + s + else + String.sub s 1 (String.length s - 1) + +let strip_both_ns s = s |> strip_ns |> strip_xhp_ns + +(* A\B\C -> \A\B\C *) +let add_ns s = + if String.length s = 0 || s.[0] <> '\\' then + "\\" ^ s + else + s + +(* \A\B\C -> C *) +let strip_all_ns s = + try + let base_name_start = String.rindex s '\\' + 1 in + String.sub s base_name_start (String.length s - base_name_start) + with Not_found -> s + +(* "\\A\\B\\C" -> ("\\A\\B\\" * "C") *) +let split_ns_from_name (s : string) : string * string = + try + let base_name_start = String.rindex s '\\' + 1 in + let name_part = + String.sub s base_name_start (String.length s - base_name_start) + in + let namespace_part = String.sub s 0 base_name_start in + (namespace_part, name_part) + with Not_found -> ("\\", s) + +let double_colon = Str.regexp_string "::" + +(* Expands a namespace using the namespace map, a list of (string, string) tuples + * Ensures the beginning backslash is present + * + * "Str\\join" -> "\\HH\\Lib\\Str\\join" (when "Str", "HH\\Lib\\Str" is present in map) + * "HH\\Lib\\Str\\Join" -> "\\HH\\Lib\\Str\\join" + * "\\HH\\Lib\\Str\\Join" -> "\\HH\\Lib\\Str\\join" + * "just_plain_func" -> "\\just_plain_func" + *) +let expand_namespace (ns_map : (string * string) list) (s : string) : string = + let (raw_ns, name) = split_ns_from_name s in + (* Might need left backslash *) + let ns = add_ns raw_ns in + let matching_alias = + List.find ns_map (fun (alias, _) -> + let fixup = add_ns alias ^ "\\" in + fixup = ns) + in + match matching_alias with + | None -> add_ns s + | Some (_, expanded) -> add_ns (expanded ^ "\\" ^ name) + +(* + * "A::B" -> Some "A" * "B" + * "::B" "A::" "Abc" -> None + *) +let split_class_from_method (s : string) : (string * string) option = + try + let i = Str.search_forward double_colon s 0 in + let len = String.length s in + let class_part = String.sub s 0 i in + Printf.printf "Class part is [%s]\n" class_part; + let meth_part = String.sub s (i + 2) (len - i - 2) in + Printf.printf "Meth part is [%s]\n" meth_part; + if class_part = "" || meth_part = "" then + None + else + Some (class_part, meth_part) + with _ -> None + +(*****************************************************************************) +(* Same as List.iter2, except that we only iterate as far as the shortest + * of both lists. + *) +(*****************************************************************************) + +let rec iter2_shortest f l1 l2 = + match (l1, l2) with + | ([], _) + | (_, []) -> + () + | (x1 :: rl1, x2 :: rl2) -> + f x1 x2; + iter2_shortest f rl1 rl2 + +let fold_fun_list acc fl = List.fold_left fl ~f:( |> ) ~init:acc + +let compose f g x = f (g x) + +module With_complete_flag = struct + type 'a t = { + is_complete: bool; + value: 'a; + } +end + +let try_finally ~f ~(finally : unit -> unit) = + let res = + try f () + with e -> + finally (); + raise e + in + finally (); + res + +let with_context ~enter ~exit ~do_ = + enter (); + let result = + try do_ () + with e -> + let stack = Printexc.get_raw_backtrace () in + exit (); + Printexc.raise_with_backtrace e stack + in + exit (); + result + +(* We run with exception backtraces turned off for performance reasons. But for + * some kinds of catastrophic exceptions, which we never recover from (so the + * performance doesn't matter) we do want the backtrace. "assert false" is one + * of such conditions. + *) +let assert_false_log_backtrace msg = + Printf.eprintf "assert false with backtrace:\n"; + Option.iter msg ~f:(Printf.eprintf "%s\n"); + Printf.eprintf + "%s" + (Printexc.raw_backtrace_to_string (Printexc.get_callstack 100)); + assert false + +(* Returns the largest element in arr strictly less than `bound` *) +let infimum (arr : 'a array) (bound : 'b) (compare : 'a -> 'b -> int) : + int option = + let rec binary_search low high = + if low = high then + Some low + else if low > high then + None + else + let mid = (low + high + 1) / 2 in + let test = arr.(mid) in + if compare test bound < 0 then + binary_search mid high + else + binary_search low (mid - 1) + in + binary_search 0 (Array.length arr - 1) + +let unwrap_snd (a, b_opt) = + match b_opt with + | None -> None + | Some b -> Some (a, b) diff --git a/hack/utils/core/utils.mli b/hack/utils/core/utils.mli new file mode 100644 index 00000000000..2e7b87afffd --- /dev/null +++ b/hack/utils/core/utils.mli @@ -0,0 +1,118 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +type callstack = Callstack of string + +val debug : bool ref + +val profile : bool ref + +val log : (string -> unit) ref + +val d : string -> unit + +val dn : string -> unit + +module Map : sig end + +val spf : ('a, unit, string) format -> 'a + +val print_endlinef : ('a, unit, string, unit) format4 -> 'a + +val prerr_endlinef : ('a, unit, string, unit) format4 -> 'a + +val opt : ('a -> 'b -> 'a * 'c) -> 'a -> 'b option -> 'a * 'c option + +val opt_fold : ('a -> 'b -> 'a) -> 'a -> 'b option -> 'a + +val singleton_if : bool -> 'a -> 'a list + +val smap_inter : 'a SMap.t -> 'b SMap.t -> 'a SMap.t + +val imap_inter : 'a IMap.t -> 'b IMap.t -> 'a IMap.t + +val smap_inter_list : 'a SMap.t list -> 'a SMap.t + +val imap_inter_list : 'a IMap.t list -> 'a IMap.t + +val wfold_left2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b list -> 'c list -> 'a + +val sl : string list -> string + +val maybe : ('a -> 'b -> unit) -> 'a -> 'b option -> unit + +val unsafe_opt_note : string -> 'a option -> 'a + +val unsafe_opt : 'a option -> 'a + +val inter_list : SSet.t list -> SSet.t + +val list_last : ('a -> 'b) -> ('a -> unit) -> 'a list -> unit + +val is_prefix_dir : string -> string -> bool + +val try_with_channel : out_channel -> (out_channel -> 'a) -> (exn -> 'a) -> 'a + +val try_with_stack : (unit -> 'a) -> ('a, exn * callstack) result + +val iter_n_acc : int -> ('a -> 'a * 'b) -> 'a -> 'a * 'b + +val map_of_list : (SMap.key * 'a) list -> 'a SMap.t + +val set_of_list : SSet.elt list -> SSet.t + +(* Strip NS removes only the leading backslash *) +val strip_ns : string -> string + +(* Strip XHP removes only the leading colon *) +val strip_xhp_ns : string -> string + +(* Strip Both removes either leading backslash and colon, or both *) +val strip_both_ns : string -> string + +(* Strip All removes all backslash-based namespaces, but does nothing to XHP *) +val strip_all_ns : string -> string + +val add_ns : string -> string + +val split_ns_from_name : string -> string * string + +val expand_namespace : (string * string) list -> string -> string + +val split_class_from_method : string -> (string * string) option + +val iter2_shortest : ('a -> 'b -> 'c) -> 'a list -> 'b list -> unit + +val fold_fun_list : 'a -> ('a -> 'a) list -> 'a + +val compose : ('a -> 'b) -> ('c -> 'a) -> 'c -> 'b + +module With_complete_flag : sig + type 'a t = { + is_complete: bool; + value: 'a; + } +end + +val try_finally : f:(unit -> 'a) -> finally:(unit -> unit) -> 'a + +val with_context : + enter:(unit -> 'a) -> exit:(unit -> 'b) -> do_:(unit -> 'c) -> 'c + +val assert_false_log_backtrace : string option -> 'a + +val infimum : 'a array -> 'b -> ('a -> 'b -> int) -> int option + +val unwrap_snd : 'a * 'b option -> ('a * 'b) option diff --git a/hack/utils/cpu_cycles.ml b/hack/utils/cpu_cycles.ml index e2d6cc79a2c..aa7944fb11a 100644 --- a/hack/utils/cpu_cycles.ml +++ b/hack/utils/cpu_cycles.ml @@ -5,4 +5,5 @@ * process is interrupted or if you get scheduled onto a different * processor. * *) -external cpu_cycles : unit -> int = "ocaml_cpu_cycles" "ocaml_cpu_cycles" "noalloc";; +external cpu_cycles : unit -> int + = "ocaml_cpu_cycles" "ocaml_cpu_cycles" "noalloc" diff --git a/hack/utils/cpu_cycles.mli b/hack/utils/cpu_cycles.mli new file mode 100644 index 00000000000..d054ae8bb1d --- /dev/null +++ b/hack/utils/cpu_cycles.mli @@ -0,0 +1,16 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +external cpu_cycles : unit -> int = "ocaml_cpu_cycles" "ocaml_cpu_cycles" diff --git a/hack/utils/disk/disk.ml b/hack/utils/disk/disk.ml index 155134dc563..1f6785ee779 100644 --- a/hack/utils/disk/disk.ml +++ b/hack/utils/disk/disk.ml @@ -1,4 +1,4 @@ -include (val (if Injector_config.use_test_stubbing - then (module TestDisk : Disk_sig.S) - else (module RealDisk : Disk_sig.S) -)) +include ( val if Injector_config.use_test_stubbing then + (module TestDisk : Disk_sig.S) + else + (module RealDisk : Disk_sig.S) ) diff --git a/hack/utils/disk/disk_sig.ml b/hack/utils/disk/disk_sig.ml index 73943347fdf..b8641769ccd 100644 --- a/hack/utils/disk/disk_sig.ml +++ b/hack/utils/disk/disk_sig.ml @@ -1,24 +1,38 @@ module Types = struct exception NotADirectory of string + exception No_such_file_or_directory of string + exception Rename_target_already_exists of string + exception Rename_target_dir_not_empty of string end module type S = sig include module type of Types + val cat : string -> string + val write_file : file:string -> contents:string -> unit + val file_exists : string -> bool + val mkdir_p : string -> unit - (** Delete the given path - if it is a directory, delete recurisvely. *) + + (* Delete the given path - if it is a directory, delete recurisvely. *) val rm_dir_tree : string -> unit + val is_directory : string -> bool + val getcwd : unit -> string + val chdir : string -> unit + val mkdir : string -> int -> unit - (** Return the names of all files present in the given directory. *) + + (* Return the names of all files present in the given directory. *) val readdir : string -> string array - (** Rename from old path to new path. *) + + (* Rename from old path to new path. *) val rename : string -> string -> unit end diff --git a/hack/utils/disk/dune b/hack/utils/disk/dune new file mode 100644 index 00000000000..dc3a22fd2d2 --- /dev/null +++ b/hack/utils/disk/dune @@ -0,0 +1,6 @@ +(library + (name disk) + (wrapped false) + (libraries + injector_config + unix)) diff --git a/hack/utils/disk/realDisk.ml b/hack/utils/disk/realDisk.ml index bceff6b508a..5bb427cceff 100644 --- a/hack/utils/disk/realDisk.ml +++ b/hack/utils/disk/realDisk.ml @@ -2,95 +2,102 @@ include Disk_sig.Types let cat (filename : string) : string = let ic = open_in_bin filename in - let len = - try - in_channel_length ic - with Sys_error _ -> 0 in + let len = (try in_channel_length ic with Sys_error _ -> 0) in (* in_channel_length returns 0 for non-regular files; try reading it using a fixed-sized buffer if it appears to be empty. NOTE: JaneStreet's Core Sys module defines a function is_file which does a proper check on whether the file exists and is regular. *) - if len > 0 then - begin + if len > 0 then ( let buf = Buffer.create len in Buffer.add_channel buf ic len; close_in ic; Buffer.contents buf - end - else begin - let len = 1024 in (* for Buffer, that's the initial size of the internal byte sequence *) + ) else + let len = 1024 in + (* for Buffer, that's the initial size of the internal byte sequence *) let buf = Buffer.create len in let bytes = Bytes.create len in let rec read_bytes () : unit = try let n = input ic bytes 0 len in - if n = 0 then () - else begin - Buffer.add_subbytes buf bytes 0 n; (* 0 is offset *) - read_bytes() - end - with End_of_file -> () in - read_bytes(); + if n = 0 then + () + else ( + Buffer.add_subbytes buf bytes 0 n; + + (* 0 is offset *) + read_bytes () + ) + with End_of_file -> () + in + read_bytes (); close_in ic; Buffer.contents buf - end let is_file_not_exist_error ~file ~err_msg = let msg = Printf.sprintf "%s: No such file or directory" file in msg = err_msg let write_file ~file ~contents = - let chan = try open_out file with - | Sys_error err_msg when (is_file_not_exist_error ~file ~err_msg) -> - raise (No_such_file_or_directory file) + let chan = + try open_out file + with Sys_error err_msg when is_file_not_exist_error ~file ~err_msg -> + raise (No_such_file_or_directory file) in - (output_string chan contents; close_out chan) + output_string chan contents; + close_out chan let rec mkdir_p = function | "" -> failwith "Unexpected empty directory, should never happen" | d when not (Sys.file_exists d) -> mkdir_p (Filename.dirname d); - Unix.mkdir d 0o777; + Unix.mkdir d 0o777 | d when Sys.is_directory d -> () | d -> raise (NotADirectory d) let rec rm_dir_tree path = - try begin + try let stats = Unix.lstat path in match stats.Unix.st_kind with | Unix.S_DIR -> let contents = Sys.readdir path in - List.iter (fun name -> + List.iter + (fun name -> let name = Filename.concat path name in rm_dir_tree name) - (Array.to_list contents) ; + (Array.to_list contents); Unix.rmdir path - | Unix.S_LNK | Unix.S_REG | Unix.S_CHR | Unix.S_BLK | Unix.S_FIFO + | Unix.S_LNK + | Unix.S_REG + | Unix.S_CHR + | Unix.S_BLK + | Unix.S_FIFO | Unix.S_SOCK -> Unix.unlink path - end with - (** Path has been deleted out from under us - can ignore it. *) - | Sys_error(s) when s = Printf.sprintf "%s: No such file or directory" path -> - () - | Unix.Unix_error(Unix.ENOENT, _, _) -> + with + (* Path has been deleted out from under us - can ignore it. *) + | Sys_error s when s = Printf.sprintf "%s: No such file or directory" path -> () + | Unix.Unix_error (Unix.ENOENT, _, _) -> () -let is_directory x = - try Sys.is_directory x with - | Sys_error _ -> - false +let is_directory x = (try Sys.is_directory x with Sys_error _ -> false) let file_exists = Sys.file_exists + let getcwd = Sys.getcwd + let chdir = Sys.chdir + let mkdir = Unix.mkdir + let readdir = Sys.readdir + let rename old target = if not (file_exists old) then raise (No_such_file_or_directory old) else if not (file_exists (Filename.dirname target)) then raise (No_such_file_or_directory (Filename.dirname target)) else - try Sys.rename old target with - | Sys_error s when s = "Directory not empty" -> + try Sys.rename old target + with Sys_error s when s = "Directory not empty" -> raise (Rename_target_dir_not_empty target) diff --git a/hack/utils/disk/testDisk.ml b/hack/utils/disk/testDisk.ml index 19b261f4fdb..aaac8de10cc 100644 --- a/hack/utils/disk/testDisk.ml +++ b/hack/utils/disk/testDisk.ml @@ -1,15 +1,13 @@ include Disk_sig.Types - module Hashtbl_base = Hashtbl + module Hashtbl = struct include Hashtbl_base - let find_opt t x = try Some (find t x) with - | Not_found -> - None - let empty t = (length t) = 0 -end;; + let find_opt t x = (try Some (find t x) with Not_found -> None) + let empty t = length t = 0 +end type file = | Actual_file_with_contents of string @@ -17,13 +15,14 @@ type file = module Helpers = struct exception Relative_parent_not_supported + exception Is_not_actual_file - exception Cannot_overwrite_existing_directory_with_actual_file + exception Cannot_overwrite_existing_directory_with_actual_file end (** Directory for "/" *) -let root = Hashtbl.create 10;; +let root = Hashtbl.create 10 (** We avoid using Unix.getcwd () in TestDisk because: * 1) Getting global state from this clean test environment is gross @@ -33,82 +32,72 @@ let root = Hashtbl.create 10;; let cwd = ref "/fake/initial_cwd" let getcwd () = !cwd + let chdir s = cwd := s -let rec mkdir_p path root = match Filename.dirname path with - | "." -> - mkdir_p (getcwd ()) root - | "/" -> - root +let rec mkdir_p path root = + match Filename.dirname path with + | "." -> mkdir_p (getcwd ()) root + | "/" -> root | parent -> let parent = mkdir_p parent root in - if (Filename.basename path) = "." then + if Filename.basename path = "." then parent - else if (Filename.basename path) = ".." then + else if Filename.basename path = ".." then raise Helpers.Relative_parent_not_supported - else begin + else ( match Hashtbl.find_opt parent (Filename.basename path) with | None -> let dir = Hashtbl.create 10 in let () = Hashtbl.add parent (Filename.basename path) (Directory dir) in dir - | Some (Directory table) -> - table - | Some (Actual_file_with_contents _) -> - raise (NotADirectory path) - end - -and get_dir path root = match Filename.dirname path with - | "." -> - get_dir (getcwd ()) root - | "/" -> - root + | Some (Directory table) -> table + | Some (Actual_file_with_contents _) -> raise (NotADirectory path) + ) + +and get_dir path root = + match Filename.dirname path with + | "." -> get_dir (getcwd ()) root + | "/" -> root | parent -> let parent = get_dir parent root in - if (Filename.basename path) = "." then + if Filename.basename path = "." then parent - else if (Filename.basename path) = ".." then + else if Filename.basename path = ".." then raise Helpers.Relative_parent_not_supported - else begin + else ( match Hashtbl.find_opt parent (Filename.basename path) with - | None -> - raise (No_such_file_or_directory path) - | Some (Directory table) -> - table - | Some (Actual_file_with_contents _) -> - raise (NotADirectory path) - end + | None -> raise (No_such_file_or_directory path) + | Some (Directory table) -> table + | Some (Actual_file_with_contents _) -> raise (NotADirectory path) + ) (** Returns file at path (may be an actual file or a directory). *) and get_file path root = let parent = get_dir (Filename.dirname path) root in let basename = Filename.basename path in if basename = "." then - (Directory parent) - else begin - try Hashtbl.find parent basename with - | Not_found -> - raise (No_such_file_or_directory path) - end + Directory parent + else + try Hashtbl.find parent basename + with Not_found -> raise (No_such_file_or_directory path) (** Initialize creation of CWD. *) let () = ignore (mkdir_p "." root) let get x = match get_file x root with - | Actual_file_with_contents contents -> - contents - | Directory _ -> - raise Helpers.Is_not_actual_file - + | Actual_file_with_contents contents -> contents + | Directory _ -> raise Helpers.Is_not_actual_file (** Set the contents "y" for file "x". Has an option to create all parent * directories automatically. *) -let set ?(create_parent_dirs=true) x y = - let parent = if create_parent_dirs then - mkdir_p (Filename.dirname x) root - else - get_dir (Filename.dirname x) root +let set ?(create_parent_dirs = true) x y = + let parent = + if create_parent_dirs then + mkdir_p (Filename.dirname x) root + else + get_dir (Filename.dirname x) root in match Hashtbl.find_opt parent (Filename.basename x) with | None -> @@ -121,16 +110,11 @@ let set ?(create_parent_dirs=true) x y = raise Helpers.Cannot_overwrite_existing_directory_with_actual_file let is_directory x = - try begin + try match get_file x root with - | Directory _ -> - true - | Actual_file_with_contents _ -> - false - end - with - | No_such_file_or_directory _ -> - false + | Directory _ -> true + | Actual_file_with_contents _ -> false + with No_such_file_or_directory _ -> false let cat = get @@ -140,9 +124,7 @@ let file_exists x = | Actual_file_with_contents _ | Directory _ -> true - with - | No_such_file_or_directory _ -> - false + with No_such_file_or_directory _ -> false let write_file ~file ~contents = set ~create_parent_dirs:false file contents @@ -155,17 +137,17 @@ let mkdir_p path = ignore (mkdir_p path root) let rm_dir_tree path = if path = "/" then Hashtbl.clear root - else try - let dir = get_dir (Filename.dirname path) root in - Hashtbl.remove dir (Filename.basename path) - with - | No_such_file_or_directory _ -> - (** File already doesn't exist; ignore. *) - () - -let readdir x = match get_file x root with - | Actual_file_with_contents _ -> - raise (NotADirectory x) + else + try + let dir = get_dir (Filename.dirname path) root in + Hashtbl.remove dir (Filename.basename path) + with No_such_file_or_directory _ -> + (* File already doesn't exist; ignore. *) + () + +let readdir x = + match get_file x root with + | Actual_file_with_contents _ -> raise (NotADirectory x) | Directory directory -> let names = Hashtbl.fold (fun k _v acc -> k :: acc) directory [] in Array.of_list names @@ -178,15 +160,17 @@ let rename old target = else let old_parent = get_dir (Filename.dirname old) root in let old_file = get_file old root in - (** What if the last character in target is a "/"? What to do? *) + (* What if the last character in target is a "/"? What to do? *) let target_parent = get_dir (Filename.dirname target) root in - match old_file, Hashtbl.find_opt target_parent (Filename.basename target) with - | Directory _, Some (Directory target_files) when not (Hashtbl.empty target_files) -> + match + (old_file, Hashtbl.find_opt target_parent (Filename.basename target)) + with + | (Directory _, Some (Directory target_files)) + when not (Hashtbl.empty target_files) -> raise (Rename_target_dir_not_empty target) - | Directory _, Some (Directory _) - | _, None -> - (** Rename one directory to the other. *) + | (Directory _, Some (Directory _)) + | (_, None) -> + (* Rename one directory to the other. *) Hashtbl.replace target_parent (Filename.basename target) old_file; - Hashtbl.remove old_parent (Filename.basename old); - | _, _ -> - failwith "Not sure what to do here" + Hashtbl.remove old_parent (Filename.basename old) + | (_, _) -> failwith "Not sure what to do here" diff --git a/hack/utils/exit_status.ml b/hack/utils/exit_status.ml deleted file mode 100644 index 742a3132562..00000000000 --- a/hack/utils/exit_status.ml +++ /dev/null @@ -1,215 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type t = - | No_error - | Build_error - | Build_terminated - | Checkpoint_error - | Input_error - | Kill_error - | No_server_running - | Out_of_time - | Out_of_retries - | Server_already_exists - | Type_error - | Build_id_mismatch - | Monitor_connection_failure - | Unused_server - | Lock_stolen - | Lost_parent_monitor - | Interrupted - | Worker_oomed - | Worker_busy - (** An uncaught Not_found exception in the worker. *) - | Worker_not_found_exception - | Worker_failed_to_send_job - | Socket_error - | Missing_hhi - | Dfind_died - | Dfind_unresponsive - | EventLogger_Timeout - | EventLogger_restart_out_of_retries - | EventLogger_broken_pipe - | CantRunAI - | Watchman_failed - (** It is faster to exit the server (and have the Monitor restart the server) - * on a Watchman fresh instance than to compute the files that have been - * deleted and do an incremental check. - *) - | Watchman_fresh_instance - | Watchman_invalid_result - | File_heap_stale - | Hhconfig_deleted - | Hhconfig_changed - | Server_shutting_down - | IDE_malformed_request - | IDE_no_server - | IDE_out_of_retries - | Nfs_root - | IDE_init_failure - | IDE_typechecker_died - | Redecl_heap_overflow - | Out_of_shared_memory - | Shared_mem_assertion_failure - | Hash_table_full - | IDE_new_client_connected - | Lazy_decl_bug - | Decl_heap_elems_bug - | Parser_heap_build_error - | Heap_full - | Sql_assertion_failure - | Local_type_env_stale - | Sql_cantopen - | Sql_corrupt - | Sql_misuse - | Uncaught_exception - | Decl_not_found - | Big_rebase_detected - -exception Exit_with of t - -let exit_code = function - | Interrupted -> -6 - | No_error -> 0 - | Build_terminated -> 1 (* used in clientBuild *) - | Kill_error -> 1 (* used in clientStop/Start/Restart *) - | Server_shutting_down -> 1 (* used in server *) - | Build_error -> 2 (* used in clientBuild *) - | Type_error -> 2 (* used in clientCheck *) - | Uncaught_exception -> 2 (* used in server and clientIde *) - | Hhconfig_changed -> 4 - | Unused_server -> 5 - | No_server_running -> 6 - | Out_of_time -> 7 - | Out_of_retries -> 7 - | Checkpoint_error -> 8 - | Build_id_mismatch -> 9 - | Monitor_connection_failure -> 9 - | Input_error -> 10 - | Lock_stolen -> 11 - | Lost_parent_monitor -> 12 - | Shared_mem_assertion_failure -> 14 - | Out_of_shared_memory -> 15 - | Hash_table_full -> 16 - | Heap_full -> 17 - | Worker_oomed -> 30 - | Worker_busy -> 31 - | Worker_not_found_exception -> 32 - | Worker_failed_to_send_job -> 33 - | Server_already_exists -> 77 - | Missing_hhi -> 97 - | Socket_error -> 98 - | Dfind_died -> 99 - | Dfind_unresponsive -> 100 - | EventLogger_Timeout -> 101 - | CantRunAI -> 102 - | Watchman_failed -> 103 - | Hhconfig_deleted -> 104 - | EventLogger_broken_pipe -> 106 - | Redecl_heap_overflow -> 107 - | EventLogger_restart_out_of_retries -> 108 - | Watchman_fresh_instance -> 109 - | Watchman_invalid_result -> 110 - | Big_rebase_detected -> 111 - | IDE_malformed_request -> 201 - | IDE_no_server -> 202 - | IDE_out_of_retries -> 203 - | Nfs_root -> 204 - | IDE_init_failure -> 205 - | IDE_typechecker_died -> 206 - | IDE_new_client_connected -> 207 - | Lazy_decl_bug -> 208 - | Decl_heap_elems_bug -> 209 - | Parser_heap_build_error -> 210 - | File_heap_stale -> 211 - | Sql_assertion_failure -> 212 - | Local_type_env_stale -> 213 - | Sql_cantopen -> 214 - | Sql_corrupt -> 215 - | Sql_misuse -> 216 - | Decl_not_found -> 217 - -let exit t = - let ec = exit_code t in - Pervasives.exit ec - -let to_string = function - | No_error -> "Ok" - | Build_error -> "Build_error" - | Build_terminated -> "Build_terminated" - | Checkpoint_error -> "Checkpoint_error" - | Input_error -> "Input_error" - | Kill_error -> "Kill_error" - | No_server_running -> "No_server_running" - | Out_of_time -> "Out_of_time" - | Out_of_retries -> "Out_of_retries" - | Server_already_exists -> "Server_already_exists" - | Server_shutting_down -> "Server_shutting_down" - | Type_error -> "Type_error" - | Build_id_mismatch -> "Build_id_mismatch" - | Monitor_connection_failure -> "Monitor_connection_failure" - | Unused_server -> "Unused_server" - | Lock_stolen -> "Lock_stolen" - | Lost_parent_monitor -> "Lost_parent_monitor" - | Interrupted -> "Interrupted" - | Worker_oomed -> "Worker_oomed" - | Worker_busy -> "Worker_busy" - | Worker_not_found_exception -> "Worker_not_found_exception" - | Worker_failed_to_send_job -> "Worker_failed_to_send_job" - | Socket_error -> "Socket_error" - | Missing_hhi -> "Missing_hhi" - | Dfind_died -> "Dfind_died" - | Dfind_unresponsive -> "Dfind_unresponsive" - | EventLogger_Timeout -> "EventLogger_Timeout" - | EventLogger_restart_out_of_retries -> "EventLogger_restart_out_of_retries" - | EventLogger_broken_pipe -> "EventLogger_broken_pipe" - | CantRunAI -> "CantRunAI" - | Watchman_failed -> "Watchman_failed" - | Watchman_fresh_instance -> "Watchman_fresh_instance" - | Watchman_invalid_result -> "Watchman_invalid_result" - | Hhconfig_deleted -> "Hhconfig_deleted" - | Hhconfig_changed -> "Hhconfig_changed" - | IDE_malformed_request -> "IDE_malformed_request" - | IDE_no_server -> "IDE_no_server" - | IDE_out_of_retries -> "IDE_out_of_retries" - | Nfs_root -> "Nfs_root" - | IDE_init_failure -> "IDE_init_failure" - | IDE_typechecker_died -> "IDE_typechecker_died" - | Redecl_heap_overflow -> "Redecl_heap_overflow" - | Shared_mem_assertion_failure -> "Shared_mem_assertion_failure" - | Out_of_shared_memory -> "Out_of_shared_memory" - | Hash_table_full -> "Hash_table_full" - | IDE_new_client_connected -> "IDE_new_client_connected" - | Lazy_decl_bug -> "Lazy_decl_bug" - | Decl_heap_elems_bug -> "Decl_heap_elems_bug" - | Parser_heap_build_error -> "Parser_heap_build_error" - | Heap_full -> "Heap_full" - | File_heap_stale -> "File_heap_stale" - | Sql_assertion_failure -> "Sql_assertion_failure" - | Local_type_env_stale -> "Local_type_env_stale" - | Sql_cantopen -> "Sql_cantopen" - | Sql_corrupt -> "Sql_corrupt" - | Sql_misuse -> "Sql_misuse" - | Uncaught_exception -> "Uncaught_exception" - | Decl_not_found -> "Decl_not_found" - | Big_rebase_detected -> "Big_rebase_detected" - - -let unpack = function - | Unix.WEXITED n -> "exit", n - | Unix.WSIGNALED n -> - (** - * Ocaml signal numbers are mapped from System signal numbers. - * They are negative. - * See caml_convert_signal_number byterun/signals.c in Ocaml system source code - * to convert from Ocaml number to System number - *) - "signaled", n - | Unix.WSTOPPED n -> "stopped", n diff --git a/hack/utils/file_content.ml b/hack/utils/file_content.ml deleted file mode 100644 index 433bd35135c..00000000000 --- a/hack/utils/file_content.ml +++ /dev/null @@ -1,146 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) -open Hh_core - -type position = { - line : int; (* 1-based *) - column : int; (* 1-based *) -} - -type range = { - st : position; - ed : position; -} - -type text_edit = { - range : range option; - text : string; -} - -(* UTF-8 encoding character lengths. - * - * NOTE: at the moment, edit commands are the only place where we count - * UTF-8 encoded characters as opposed to ASCII bytes - in all of the other - * places (column numbers in errors, positions in IDE commands) we still use the - * latter. - * - * We make an exception here because that's the way Nuclide counts characters, - * and the consequences of mishandling it are much more dire than in other - * places - we'll not only fail the current single request, but diverge - * the synchronized state forever. - *) -let get_char_length c = - let c = Char.code c in - if c lsr 7 = 0b0 then 1 - else if c lsr 5 = 0b110 then 2 - else if c lsr 4 = 0b1110 then 3 - else if c lsr 3 = 0b11110 then 4 - else raise (Failure (Printf.sprintf "Invalid UTF-8 leading byte: %d" c)) - -let is_target t line column = t.line = line && t.column = column - -let get_char content offset = - (* sentinel newline to make things easier *) - if offset = String.length content then '\n' - else content.[offset] - -let rec get_offsets content queries line column offset acc = - match acc with - | (Some _, Some _) -> acc - | (None, r2) when is_target (fst queries) line column -> - get_offsets content queries line column offset (Some offset, r2) - | (Some _ as r1, None) when is_target (snd queries) line column -> - get_offsets content queries line column offset (r1, Some offset) - | acc -> - let line, column, offset = match get_char content offset with - | '\n' -> line + 1, 1, offset + 1 - | c -> line, column + 1, offset + (get_char_length c) - in - get_offsets content queries line column offset acc - -let invalid_position p = - raise (Failure (Printf.sprintf - "Invalid position: {line: %d; column: %d}" p.line p.column)) - -(* this returns 0-based offsets *) -let get_offsets - (content : string) - (queries : position * position) - : (int * int) = - match get_offsets content queries 1 1 0 (None, None) with - | Some r1, Some r2 -> r1, r2 - | None, _ -> invalid_position (fst queries) - | _, None -> invalid_position (snd queries) - -(* This returns a 0-based offset. If you need to get two offsets, use - `get_offsets` instead. *) -let get_offset (content : string) (position : position) : int = - fst (get_offsets content (position, position)) - - -(* This takes 0-based offsets and returns 1-based positions. *) -(* It gives the position of the character *immediately after* this offset, *) -(* e.g. "offset_to_position s 0" gives the 1-based position {line=1,col=1}. *) -(* It sounds confusing but is natural when you work with half-open ranges! *) -(* It is okay to ask for the position of the offset of the end of the file. *) -(* In case of multi-byte characters, if you give an offset inside a character,*) -(* it still gives the position immediately after. *) -let offset_to_position (content: string) (offset: int) : position = - let rec helper ~(line: int) ~(column: int) ~(index: int) = - if index >= offset then - {line; column;} - else - let c = get_char content index in - let clen = get_char_length c in - if c = '\n' then - helper (line + 1) 1 (index + clen) - else - helper line (column + 1) (index + clen) - in - if offset > String.length content then - raise (Failure (Printf.sprintf "Invalid offset: %d" offset)) - else - helper ~line:1 ~column:1 ~index:0 - - -let apply_edit = fun content {range; text} -> - match range with - | None -> text - | Some {st; ed} -> - let start_offset, end_offset = get_offsets content (st, ed) in - let prefix = Str.string_before content start_offset in - let suffix = Str.string_after content end_offset in - prefix ^ text ^ suffix - -let print_edit b edit = - let range = match edit.range with - | None -> "None" - | Some range -> Printf.sprintf "%d:%d - %d:%d" - range.st.line range.st.column range.ed.line range.ed.column - in - Printf.bprintf b "range = %s\n text = \n%s\n" range edit.text - -let edit_file content (edits: text_edit list) : (string, string * Utils.callstack) result = - try - Ok (List.fold ~init:content ~f:apply_edit edits) - with e -> - let stack = Printexc.get_backtrace () in - let b = Buffer.create 1024 in - Printf.bprintf b "Invalid edit: %s\n" (Printexc.to_string e); - Printf.bprintf b "Original content:\n%s\n" content; - Printf.bprintf b "Edits:\n"; - List.iter edits ~f:(print_edit b); - Error (Buffer.contents b, Utils.Callstack stack) - -let edit_file_unsafe fc edits = - match edit_file fc edits with - | Ok r -> r - | Error (e, _stack) -> - Printf.eprintf "%s" e; - failwith e diff --git a/hack/utils/file_content.mli b/hack/utils/file_content.mli deleted file mode 100644 index ba0e19d863f..00000000000 --- a/hack/utils/file_content.mli +++ /dev/null @@ -1,37 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type position = { - line : int; (* 1-based *) - column : int; (* 1-based *) -} - -type range = { - st : position; - ed : position; -} - -type text_edit = { - range : range option; - text : string; -} - -val edit_file : string -> text_edit list -> (string, string * Utils.callstack) result - -val edit_file_unsafe : string -> text_edit list -> string - -(* NOTE: If you need two offsets, use `get_offsets` below instead. *) -val get_offset : string -> position -> int - -(* May raise Invalid_argument "out of bounds" if out of bounds *) -val get_offsets : string -> position * position -> int * int - -val offset_to_position : string -> int -> position - -val get_char : string -> int -> char diff --git a/hack/utils/file_content/dune b/hack/utils/file_content/dune new file mode 100644 index 00000000000..0fc7a44dfe2 --- /dev/null +++ b/hack/utils/file_content/dune @@ -0,0 +1,5 @@ +(library + (name file_content) + (wrapped false) + (libraries + utils_core)) diff --git a/hack/utils/file_content/file_content.ml b/hack/utils/file_content/file_content.ml new file mode 100644 index 00000000000..3f0c4425cf6 --- /dev/null +++ b/hack/utils/file_content/file_content.ml @@ -0,0 +1,161 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) +open Hh_core + +type position = { + line: int; + (* 1-based *) + column: int; (* 1-based *) +} + +type range = { + st: position; + ed: position; +} + +type text_edit = { + range: range option; + text: string; +} + +(* UTF-8 encoding character lengths. + * + * NOTE: at the moment, edit commands are the only place where we count + * UTF-8 encoded characters as opposed to ASCII bytes - in all of the other + * places (column numbers in errors, positions in IDE commands) we still use the + * latter. + * + * We make an exception here because that's the way Nuclide counts characters, + * and the consequences of mishandling it are much more dire than in other + * places - we'll not only fail the current single request, but diverge + * the synchronized state forever. + *) +let get_char_length c = + let c = Char.code c in + if c lsr 7 = 0b0 then + 1 + else if c lsr 5 = 0b110 then + 2 + else if c lsr 4 = 0b1110 then + 3 + else if c lsr 3 = 0b11110 then + 4 + else + raise (Failure (Printf.sprintf "Invalid UTF-8 leading byte: %d" c)) + +let is_target t line column = t.line = line && t.column = column + +let get_char content offset = + (* sentinel newline to make things easier *) + if offset = String.length content then + '\n' + else + content.[offset] + +let rec get_offsets content queries line column offset acc = + match acc with + | (Some _, Some _) -> acc + | (None, r2) when is_target (fst queries) line column -> + get_offsets content queries line column offset (Some offset, r2) + | ((Some _ as r1), None) when is_target (snd queries) line column -> + get_offsets content queries line column offset (r1, Some offset) + | acc -> + let (line, column, offset) = + match get_char content offset with + | '\n' -> (line + 1, 1, offset + 1) + | c -> (line, column + 1, offset + get_char_length c) + in + get_offsets content queries line column offset acc + +let invalid_position p = + raise + (Failure + (Printf.sprintf + "Invalid position: {line: %d; column: %d}" + p.line + p.column)) + +(* this returns 0-based offsets *) +let get_offsets (content : string) (queries : position * position) : int * int + = + match get_offsets content queries 1 1 0 (None, None) with + | (Some r1, Some r2) -> (r1, r2) + | (None, _) -> invalid_position (fst queries) + | (_, None) -> invalid_position (snd queries) + +(* This returns a 0-based offset. If you need to get two offsets, use + `get_offsets` instead. *) +let get_offset (content : string) (position : position) : int = + fst (get_offsets content (position, position)) + +(* This takes 0-based offsets and returns 1-based positions. *) +(* It gives the position of the character *immediately after* this offset, *) +(* e.g. "offset_to_position s 0" gives the 1-based position {line=1,col=1}. *) +(* It sounds confusing but is natural when you work with half-open ranges! *) +(* It is okay to ask for the position of the offset of the end of the file. *) +(* In case of multi-byte characters, if you give an offset inside a character,*) +(* it still gives the position immediately after. *) +let offset_to_position (content : string) (offset : int) : position = + let rec helper ~(line : int) ~(column : int) ~(index : int) = + if index >= offset then + { line; column } + else + let c = get_char content index in + let clen = get_char_length c in + if c = '\n' then + helper (line + 1) 1 (index + clen) + else + helper line (column + 1) (index + clen) + in + if offset > String.length content then + raise (Failure (Printf.sprintf "Invalid offset: %d" offset)) + else + helper ~line:1 ~column:1 ~index:0 + +let apply_edit content { range; text } = + match range with + | None -> text + | Some { st; ed } -> + let (start_offset, end_offset) = get_offsets content (st, ed) in + let prefix = Str.string_before content start_offset in + let suffix = Str.string_after content end_offset in + prefix ^ text ^ suffix + +let print_edit b edit = + let range = + match edit.range with + | None -> "None" + | Some range -> + Printf.sprintf + "%d:%d - %d:%d" + range.st.line + range.st.column + range.ed.line + range.ed.column + in + Printf.bprintf b "range = %s\n text = \n%s\n" range edit.text + +let edit_file content (edits : text_edit list) : + (string, string * Utils.callstack) result = + try Ok (List.fold ~init:content ~f:apply_edit edits) + with e -> + let stack = Printexc.get_backtrace () in + let b = Buffer.create 1024 in + Printf.bprintf b "Invalid edit: %s\n" (Printexc.to_string e); + Printf.bprintf b "Original content:\n%s\n" content; + Printf.bprintf b "Edits:\n"; + List.iter edits ~f:(print_edit b); + Error (Buffer.contents b, Utils.Callstack stack) + +let edit_file_unsafe fc edits = + match edit_file fc edits with + | Ok r -> r + | Error (e, _stack) -> + Printf.eprintf "%s" e; + failwith e diff --git a/hack/utils/file_content/file_content.mli b/hack/utils/file_content/file_content.mli new file mode 100644 index 00000000000..5a08ae326ad --- /dev/null +++ b/hack/utils/file_content/file_content.mli @@ -0,0 +1,39 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +type position = { + line: int; + (* 1-based *) + column: int; (* 1-based *) +} + +type range = { + st: position; + ed: position; +} + +type text_edit = { + range: range option; + text: string; +} + +val edit_file : + string -> text_edit list -> (string, string * Utils.callstack) result + +val edit_file_unsafe : string -> text_edit list -> string + +(* NOTE: If you need two offsets, use `get_offsets` below instead. *) +val get_offset : string -> position -> int + +(* May raise Invalid_argument "out of bounds" if out of bounds *) +val get_offsets : string -> position * position -> int * int + +val offset_to_position : string -> int -> position + +val get_char : string -> int -> char diff --git a/hack/utils/file_pos_large.ml b/hack/utils/file_pos_large.ml index d95ad3cfead..128168c2b15 100644 --- a/hack/utils/file_pos_large.ml +++ b/hack/utils/file_pos_large.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,46 +8,35 @@ *) type t = { - pos_lnum : int; - pos_bol : int; - pos_cnum : int; + pos_lnum: int; + pos_bol: int; + pos_cnum: int; } -let pp fmt pos = begin +let pp fmt pos = Format.pp_print_int fmt pos.pos_lnum; Format.pp_print_string fmt ":"; - Format.pp_print_int fmt (pos.pos_cnum - pos.pos_bol + 1); -end + Format.pp_print_int fmt (pos.pos_cnum - pos.pos_bol + 1) let compare = Pervasives.compare -let dummy = { - pos_lnum = 0; - pos_bol = 0; - pos_cnum = -1; -} +let dummy = { pos_lnum = 0; pos_bol = 0; pos_cnum = -1 } -let is_dummy t = (t = dummy) +let is_dummy t = t = dummy -let beg_of_file = { - pos_lnum = 1; - pos_bol = 0; - pos_cnum = 0; -} +let beg_of_file = { pos_lnum = 1; pos_bol = 0; pos_cnum = 0 } (* constructors *) -let of_line_column_offset ~line ~column ~offset = { - pos_lnum = line; - pos_bol = offset - column; - pos_cnum = offset; -} +let of_line_column_offset ~line ~column ~offset = + { pos_lnum = line; pos_bol = offset - column; pos_cnum = offset } -let of_lexing_pos lp = { - pos_lnum = lp.Lexing.pos_lnum; - pos_bol = lp.Lexing.pos_bol; - pos_cnum = lp.Lexing.pos_cnum; -} +let of_lexing_pos lp = + { + pos_lnum = lp.Lexing.pos_lnum; + pos_bol = lp.Lexing.pos_bol; + pos_cnum = lp.Lexing.pos_cnum; + } let of_lnum_bol_cnum ~pos_lnum ~pos_bol ~pos_cnum = { pos_lnum; pos_bol; pos_cnum } @@ -55,29 +44,30 @@ let of_lnum_bol_cnum ~pos_lnum ~pos_bol ~pos_cnum = (* accessors *) let offset t = t.pos_cnum + let line t = t.pos_lnum + let column t = t.pos_cnum - t.pos_bol + let beg_of_line t = t.pos_bol let set_column c p = - { pos_lnum = p.pos_lnum; - pos_bol = p.pos_bol; - pos_cnum = p.pos_bol + c; - } + { pos_lnum = p.pos_lnum; pos_bol = p.pos_bol; pos_cnum = p.pos_bol + c } -let line_beg t = t.pos_lnum, t.pos_bol +let line_beg t = (t.pos_lnum, t.pos_bol) -let line_column t = t.pos_lnum, t.pos_cnum - t.pos_bol +let line_column t = (t.pos_lnum, t.pos_cnum - t.pos_bol) -let line_column_beg t = t.pos_lnum, t.pos_cnum - t.pos_bol, t.pos_bol +let line_column_beg t = (t.pos_lnum, t.pos_cnum - t.pos_bol, t.pos_bol) -let line_column_offset t = t.pos_lnum, t.pos_cnum - t.pos_bol, t.pos_cnum +let line_column_offset t = (t.pos_lnum, t.pos_cnum - t.pos_bol, t.pos_cnum) -let line_beg_offset t = t.pos_lnum, t.pos_bol, t.pos_cnum +let line_beg_offset t = (t.pos_lnum, t.pos_bol, t.pos_cnum) -let to_lexing_pos pos_fname t = { - Lexing.pos_fname; - Lexing.pos_lnum = t.pos_lnum; - Lexing.pos_bol = t.pos_bol; - Lexing.pos_cnum = t.pos_cnum; -} +let to_lexing_pos pos_fname t = + { + Lexing.pos_fname; + Lexing.pos_lnum = t.pos_lnum; + Lexing.pos_bol = t.pos_bol; + Lexing.pos_cnum = t.pos_cnum; + } diff --git a/hack/utils/file_pos_large.mli b/hack/utils/file_pos_large.mli new file mode 100644 index 00000000000..17850242c96 --- /dev/null +++ b/hack/utils/file_pos_large.mli @@ -0,0 +1,58 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +type t = { + pos_lnum: int; + pos_bol: int; + pos_cnum: int; +} + +val pp : Format.formatter -> t -> unit + +val compare : 'a -> 'a -> int + +val dummy : t + +val is_dummy : t -> bool + +val beg_of_file : t + +val of_line_column_offset : line:int -> column:int -> offset:int -> t + +val of_lexing_pos : Lexing.position -> t + +val of_lnum_bol_cnum : pos_lnum:int -> pos_bol:int -> pos_cnum:int -> t + +val offset : t -> int + +val line : t -> int + +val column : t -> int + +val beg_of_line : t -> int + +val set_column : int -> t -> t + +val line_beg : t -> int * int + +val line_column : t -> int * int + +val line_column_beg : t -> int * int * int + +val line_column_offset : t -> int * int * int + +val line_beg_offset : t -> int * int * int + +val to_lexing_pos : string -> t -> Lexing.position diff --git a/hack/utils/file_pos_sig.ml b/hack/utils/file_pos_sig.ml deleted file mode 100644 index e0732a1a7c4..00000000000 --- a/hack/utils/file_pos_sig.ml +++ /dev/null @@ -1,53 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -module type S = -sig -type t - -val pp : Format.formatter -> t -> unit - -(* compatible with Pervasives.compare *) -val compare : t -> t -> int - -(* compatible with Lexing.dummy_pos - and is always smaller than any valid position *) -val dummy : t - -val is_dummy : t -> bool - -(* line 1, column 0, offset 0 *) -val beg_of_file : t - -val of_line_column_offset : line:int -> column:int -> offset:int -> t - -val of_lnum_bol_cnum : pos_lnum:int -> pos_bol:int -> pos_cnum:int -> t - -val of_lexing_pos : Lexing.position -> t - -val offset : t -> int - -val line : t -> int - -val column : t -> int - -val beg_of_line : t -> int - -val line_beg : t -> int * int - -val line_column : t -> int * int - -val line_column_beg : t -> int * int * int - -val line_column_offset : t -> int * int * int - -val line_beg_offset : t -> int * int * int - -val to_lexing_pos : string -> t -> Lexing.position -end diff --git a/hack/utils/file_pos_small.ml b/hack/utils/file_pos_small.ml index 304fae1b582..0626a0efae0 100644 --- a/hack/utils/file_pos_small.ml +++ b/hack/utils/file_pos_small.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,6 +7,7 @@ * *) +type t = int (** * Three values packed into one 64-bit integer: * @@ -25,52 +26,64 @@ * * *) -type t = int let column_bits = 9 + let line_bits = 24 + let bol_bits = 30 -let mask bits = 1 lsl bits - 1 -let mask_by bits x = x land (mask bits) +let mask bits = (1 lsl bits) - 1 + +let mask_by bits x = x land mask bits let max_column = mask column_bits + let max_line = mask line_bits + let max_bol = mask bol_bits let dummy = -1 -let is_dummy t = (t = dummy) +let is_dummy t = t = dummy -let beg_of_line (pos:t) = - if is_dummy pos then 0 else mask_by bol_bits (pos lsr (line_bits + column_bits)) +let beg_of_line (pos : t) = + if is_dummy pos then + 0 + else + mask_by bol_bits (pos lsr (line_bits + column_bits)) -let line (pos:t) = - if is_dummy pos then 0 else mask_by line_bits (pos lsr column_bits) +let line (pos : t) = + if is_dummy pos then + 0 + else + mask_by line_bits (pos lsr column_bits) -let column (pos:t) = - if is_dummy pos then -1 else mask_by column_bits pos +let column (pos : t) = + if is_dummy pos then + -1 + else + mask_by column_bits pos let bol_line_col_unchecked bol line col = - if col < 0 - then dummy + if col < 0 then + dummy else - bol lsl (column_bits + line_bits) + (line lsl column_bits) + col + (bol lsl (column_bits + line_bits)) + (line lsl column_bits) + col let bol_line_col bol line col = - if col > max_column || line > max_line || bol > max_bol - then None - else Some (bol_line_col_unchecked bol line col) + if col > max_column || line > max_line || bol > max_bol then + None + else + Some (bol_line_col_unchecked bol line col) -let pp fmt pos = begin +let pp fmt pos = Format.pp_print_int fmt (line pos); Format.pp_print_string fmt ":"; - Format.pp_print_int fmt (column pos + 1); -end + Format.pp_print_int fmt (column pos + 1) let compare = Pervasives.compare - let beg_of_file = bol_line_col_unchecked 0 1 0 (* constructors *) @@ -79,7 +92,9 @@ let of_line_column_offset ~line ~column ~offset = bol_line_col (offset - column) line column let of_lexing_pos lp = - bol_line_col lp.Lexing.pos_bol lp.Lexing.pos_lnum + bol_line_col + lp.Lexing.pos_bol + lp.Lexing.pos_lnum (lp.Lexing.pos_cnum - lp.Lexing.pos_bol) let of_lnum_bol_cnum ~pos_lnum ~pos_bol ~pos_cnum = @@ -87,25 +102,24 @@ let of_lnum_bol_cnum ~pos_lnum ~pos_bol ~pos_cnum = (* accessors *) -let offset t = - beg_of_line t + column t +let offset t = beg_of_line t + column t -let line_beg t = line t, beg_of_line t +let line_beg t = (line t, beg_of_line t) -let line_column t = line t, column t +let line_column t = (line t, column t) -let line_column_beg t = line t, column t, beg_of_line t +let line_column_beg t = (line t, column t, beg_of_line t) -let line_column_offset t = line t, column t, offset t +let line_column_offset t = (line t, column t, offset t) -let line_beg_offset t = line t, beg_of_line t, offset t +let line_beg_offset t = (line t, beg_of_line t, offset t) -let set_column c p = - bol_line_col_unchecked (beg_of_line p) (line p) c +let set_column c p = bol_line_col_unchecked (beg_of_line p) (line p) c -let to_lexing_pos pos_fname t = { - Lexing.pos_fname; - Lexing.pos_lnum = line t; - Lexing.pos_bol = beg_of_line t; - Lexing.pos_cnum = offset t; -} +let to_lexing_pos pos_fname t = + { + Lexing.pos_fname; + Lexing.pos_lnum = line t; + Lexing.pos_bol = beg_of_line t; + Lexing.pos_cnum = offset t; + } diff --git a/hack/utils/file_pos_small.mli b/hack/utils/file_pos_small.mli new file mode 100644 index 00000000000..7ff467c78ab --- /dev/null +++ b/hack/utils/file_pos_small.mli @@ -0,0 +1,75 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +type t = int + +val column_bits : int + +val line_bits : int + +val bol_bits : int + +val mask : int -> int + +val mask_by : int -> int -> int + +val max_column : int + +val max_line : int + +val max_bol : int + +val dummy : int + +val is_dummy : int -> bool + +val beg_of_line : t -> int + +val line : t -> int + +val column : t -> int + +val bol_line_col_unchecked : int -> int -> int -> int + +val bol_line_col : int -> int -> int -> int option + +val pp : Format.formatter -> t -> unit + +val compare : 'a -> 'a -> int + +val beg_of_file : int + +val of_line_column_offset : line:int -> column:int -> offset:int -> int option + +val of_lexing_pos : Lexing.position -> int option + +val of_lnum_bol_cnum : + pos_lnum:int -> pos_bol:int -> pos_cnum:int -> int option + +val offset : t -> int + +val line_beg : t -> int * int + +val line_column : t -> int * int + +val line_column_beg : t -> int * int * int + +val line_column_offset : t -> int * int * int + +val line_beg_offset : t -> int * int * int + +val set_column : int -> t -> int + +val to_lexing_pos : string -> t -> Lexing.position diff --git a/hack/utils/file_url/dune b/hack/utils/file_url/dune new file mode 100644 index 00000000000..22dc6b2d913 --- /dev/null +++ b/hack/utils/file_url/dune @@ -0,0 +1,6 @@ +(library + (name file_url) + (wrapped false) + (libraries + string + utils_core)) diff --git a/hack/utils/file_url.ml b/hack/utils/file_url/file_url.ml similarity index 76% rename from hack/utils/file_url.ml rename to hack/utils/file_url/file_url.ml index 52393853e16..e5459704ebc 100644 --- a/hack/utils/file_url.ml +++ b/hack/utils/file_url/file_url.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * @@ -14,39 +14,37 @@ (* https://blogs.msdn.microsoft.com/ie/2006/12/06/file-uris-in-windows/ *) (* https://msdn.microsoft.com/en-us/library/windows/desktop/ff819129(v=vs.85).aspx *) - (* unescape: given a string with %-escapes like "foo%4Abar", unescapes it. *) (* Throws if there are incorrect %-escapes (not followed by two hex digits) *) (* and for %-escapes that are outside 7-bit printable ascii. *) let percent_re = Str.regexp {|%\([0-9a-fA-F]?[0-9a-fA-F]?\)|} -let unescape (s: string) : string = +let unescape (s : string) : string = let subst _ = let hex = Str.matched_group 1 s in if String.length hex <> 2 then failwith ("incorrect %-escape in " ^ s); let code = int_of_string ("0x" ^ hex) in if code < 32 || code > 127 then failwith ("only 7bit ascii allowed in " ^ s); - (String.make 1 (Char.chr code)) + String.make 1 (Char.chr code) in Str.global_substitute percent_re subst s - (* escape: given a string, uses %-escaping for all but the safe chars. *) (* Throws if asked to escape something outside 7-bit printable ascii. *) -let escape ~(safe_chars: string) (s:string) : string = - let buf = Buffer.create ((String.length s) * 2) in - let f (c: char) : unit = +let escape ~(safe_chars : string) (s : string) : string = + let buf = Buffer.create (String.length s * 2) in + let f (c : char) : unit = if String.contains safe_chars c then Buffer.add_char buf c else let code = Char.code c in - if code < 32 || code > 127 then failwith ("only 7bit ascii allowed in " ^ s); + if code < 32 || code > 127 then + failwith ("only 7bit ascii allowed in " ^ s); Buffer.add_string buf (Printf.sprintf "%%%02x" code) in String.iter f s; Buffer.contents buf - (* parse: turns a file url into an absolute path. It will turn a unix-style *) (* url "file://localhost/path" into "/path", and will turn a dos-style url *) (* "file://localhost/C|/path" into "C:/path". It doesn't try to turn forward *) @@ -57,28 +55,32 @@ let escape ~(safe_chars: string) (s:string) : string = (* ascii. This function doesn't attempt to validate the escaping of the url: *) (* doesn't complain if the uri has %-encoding where it wasn't needed, nor if *) (* the uri fails to %-encode where it should. *) -let dos_url_re = Str.regexp {|^\([a-zA-Z]\)[:|]\([/\].*\)$|} (* e.g. c:\ or z|/ *) +let dos_url_re = + (* e.g. c:\ or z|/ *) + Str.regexp {|^\([a-zA-Z]\)[:|]\([/\].*\)$|} let url_re = Str.regexp {|^file://\([^/?#]*\)/\([^?#]*\)\(.*\)$|} -let parse (uri: string) : string = - if not (Str.string_match url_re uri 0) then failwith ("not a file url - " ^ uri); +let parse (uri : string) : string = + if not (Str.string_match url_re uri 0) then + failwith ("not a file url - " ^ uri); let host = Str.matched_group 1 uri in let path = Str.matched_group 2 uri in let query_fragment = Str.matched_group 3 uri in - let path = unescape path in (* this uses regexp internally *) + let path = unescape path in + (* this uses regexp internally *) if host <> "" && host <> "localhost" then failwith ("not localhost - " ^ uri); - if query_fragment <> "" then failwith ("file url can't have query/fragment - " ^ uri); - if Str.string_match dos_url_re path 0 then begin + if query_fragment <> "" then + failwith ("file url can't have query/fragment - " ^ uri); + if Str.string_match dos_url_re path 0 then let drive_letter = Str.matched_group 1 path in let rest = Str.matched_group 2 path in drive_letter ^ ":" ^ rest - end else if String.length path > 0 && path.[0] = '/' then + else if String.length path > 0 && path.[0] = '/' then failwith ("UNC file urls not supported - " ^ uri) else "/" ^ path - (* create: turns an absolute path into a file uri. The absolute path must be *) (* either unix-style absolute path "/path" or dos-style "c:\path" (in which *) (* case it treats both forward- and back-slashes as path separators, and *) @@ -99,14 +101,18 @@ let backslash_re = Str.regexp {|\\|} (* matches a single backslash *) let path_safe_chars = "/-._~!$&'()*+,;=:@0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" -let create (path: string) : string = - let absolute_path = if Str.string_match dos_re path 0 then - let drive_letter = Str.matched_group 1 path in - let rest = Str.matched_group 2 path in - Printf.sprintf "%s:%s" drive_letter (Str.global_replace backslash_re "/" rest) - else if String_utils.string_starts_with path "/" then - String_utils.lstrip path "/" - else - failwith ("Not an absolute filepath - " ^ path) +let create (path : string) : string = + let absolute_path = + if Str.string_match dos_re path 0 then + let drive_letter = Str.matched_group 1 path in + let rest = Str.matched_group 2 path in + Printf.sprintf + "%s:%s" + drive_letter + (Str.global_replace backslash_re "/" rest) + else if String_utils.string_starts_with path "/" then + String_utils.lstrip path "/" + else + failwith ("Not an absolute filepath - " ^ path) in - "file:///" ^ (escape path_safe_chars absolute_path) + "file:///" ^ escape path_safe_chars absolute_path diff --git a/hack/utils/file_url.mli b/hack/utils/file_url/file_url.mli similarity index 76% rename from hack/utils/file_url.mli rename to hack/utils/file_url/file_url.mli index fc58c809528..b81f7890cdf 100644 --- a/hack/utils/file_url.mli +++ b/hack/utils/file_url/file_url.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * @@ -7,5 +7,6 @@ * *) -val parse: string -> string -val create: string -> string +val parse : string -> string + +val create : string -> string diff --git a/hack/utils/findUtils.ml b/hack/utils/findUtils.ml index f8ce3684d61..a5b25185486 100644 --- a/hack/utils/findUtils.ml +++ b/hack/utils/findUtils.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -13,39 +13,48 @@ open Hh_core (* The file extensions we are interested in *) (*****************************************************************************) -let extensions = [ - ".php" ; (* normal php file *) - ".phpt" ; (* our php template or test files *) - ".hack" ; (* open source hack: bikeshed entry *) - ".hck" ; (* open source hack: bikeshed entry *) - ".hh" ; (* open source hack: biekshed entry *) - ".hhi" ; (* interface files only visible to the type checker *) - ".xhp" ; (* XHP extensions *) -] +let extensions = + [ + ".php"; + (* normal php file *) + ".phpt"; + (* our php template or test files *) + ".hack"; + (* open source hack: bikeshed entry *) + ".hck"; + (* open source hack: bikeshed entry *) + ".hh"; + (* open source hack: biekshed entry *) + ".hhi"; + (* interface files only visible to the type checker *) + ".xhp"; + (* XHP extensions *) + + ] let is_dot_file path = let filename = Filename.basename path in String.length filename > 0 && filename.[0] = '.' let is_php path = - not (is_dot_file path) && - List.exists extensions (Filename.check_suffix path) + (not (is_dot_file path)) + && List.exists extensions (Filename.check_suffix path) -(** Returns whether one of the ancestral directories of path has the given +(* Returns whether one of the ancestral directories of path has the given * name. *) let rec has_ancestor path ancestor_name = let dirname = Filename.dirname path in if dirname = path then - (** Terminal condition *) + (* Terminal condition *) false - else if (Filename.basename dirname) = ancestor_name then + else if Filename.basename dirname = ancestor_name then true else has_ancestor dirname ancestor_name let file_filter f = (* Filter the relative path *) - let f = Relative_path.strip_root_if_possible f in - (is_php f && not (FilesToIgnore.should_ignore f)) + let f = Relative_path.strip_root_if_possible f |> Option.value ~default:f in + is_php f && not (FilesToIgnore.should_ignore f) let path_filter f = Relative_path.suffix f |> file_filter diff --git a/hack/utils/findUtils.mli b/hack/utils/findUtils.mli index c830a616aee..465003c0892 100644 --- a/hack/utils/findUtils.mli +++ b/hack/utils/findUtils.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,8 +7,12 @@ * *) -val is_dot_file: string -> bool -val is_php: string -> bool -val has_ancestor: string -> string -> bool -val file_filter: string -> bool -val path_filter: Relative_path.t -> bool +val is_dot_file : string -> bool + +val is_php : string -> bool + +val has_ancestor : string -> string -> bool + +val file_filter : string -> bool + +val path_filter : Relative_path.t -> bool diff --git a/hack/utils/get_build_id.c b/hack/utils/get_build_id.c deleted file mode 100644 index f0a634e0e9e..00000000000 --- a/hack/utils/get_build_id.c +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Copyright (c) 2014, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - */ -#define CAML_NAME_SPACE -#include -#include - -#include -#include -#include - -#include - -#ifdef NO_HHVM -#define HHVM_VERSION_MAJOR 0 -#define HHVM_VERSION_MINOR 0 -#else -#include "hphp/runtime/version.h" -#endif - -extern const char* const BuildInfo_kRevision; -extern const uint64_t BuildInfo_kRevisionCommitTimeUnix; - -#define STRINGIFY_HELPER(x) #x -#define STRINGIFY_VALUE(x) STRINGIFY_HELPER(x) - -/** - * Export the constants provided by Facebook's build system to ocaml-land, since - * their FFI only allows you to call functions, not reference variables. Doing - * it this way makes sense for Facebook internally since our build system has - * machinery for providing these two constants automatically (and no machinery - * for doing codegen in a consistent way to build an ocaml file with them) but - * is very roundabout for external users who have to have CMake codegen these - * constants anyways. Sorry about that. - */ -value hh_get_build_revision(void) { - CAMLparam0(); - CAMLlocal1(result); - -#ifdef HH_BUILD_ID - const char* const buf = - STRINGIFY_VALUE(HH_BUILD_ID) "-" HHVM_VERSION_C_STRING_LITERALS; -#else - const char* const buf = BuildInfo_kRevision; -#endif - const size_t len = strlen(buf); - result = caml_alloc_string(len); - - memcpy(String_val(result), buf, len); - - CAMLreturn(result); -} - -value hh_get_build_commit_time_string(void) { - CAMLparam0(); - CAMLlocal1(result); - - char s[25]; - unsigned long timestamp = BuildInfo_kRevisionCommitTimeUnix; -#ifdef HH_BUILD_TIMESTAMP - if (timestamp == 0) { - timestamp = HH_BUILD_TIMESTAMP; - } -#endif - - // A previous version used localtime_r, which is not available on Windows - struct tm *p = localtime((time_t*)×tamp); - strftime(s, sizeof(s), "%c", p); - - result = caml_copy_string(s); - CAMLreturn(result); -} - -value hh_get_build_major(void) { - return Val_long(HHVM_VERSION_MAJOR); -} - -value hh_get_build_minor(void) { - return Val_long(HHVM_VERSION_MINOR); -} - -value hh_get_build_commit_time(void) { - return Val_long(BuildInfo_kRevisionCommitTimeUnix); -} diff --git a/hack/utils/hh_core.ml b/hack/utils/hh_core.ml deleted file mode 100644 index 404f0513c32..00000000000 --- a/hack/utils/hh_core.ml +++ /dev/null @@ -1,100 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -module List = struct - include Core_list - - let rec fold_left_env env l ~init ~f = match l with - | [] -> env, init - | x :: xs -> - let env, init = f env init x in - fold_left_env env xs ~init ~f - - let rev_map_env env xs ~f = - let f2 env init x = - let env, x = f env x in - env, x :: init - in - fold_left_env env xs ~init:[] ~f:f2 - - let map_env env xs ~f = - let rec aux env xs counter = - match xs with - | [] -> env, [] - | [y1] -> - let env, z1 = f env y1 in - env, [z1] - | [y1; y2] -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - env, [z1; z2] - | [y1; y2; y3] -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - let env, z3 = f env y3 in - env, [z1; z2; z3] - | [y1; y2; y3; y4] -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - let env, z3 = f env y3 in - let env, z4 = f env y4 in - env, [z1; z2; z3; z4] - | [y1; y2; y3; y4; y5] -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - let env, z3 = f env y3 in - let env, z4 = f env y4 in - let env, z5 = f env y5 in - env, [z1; z2; z3; z4; z5] - | y1::y2::y3::y4::y5::ys -> - let env, z1 = f env y1 in - let env, z2 = f env y2 in - let env, z3 = f env y3 in - let env, z4 = f env y4 in - let env, z5 = f env y5 in - let env, zs = - if counter > 1000 - then - let env, zs = rev_map_env env ys ~f in - env, List.rev zs - else - aux env ys (counter + 1) - in - env, z1::z2::z3::z4::z5::zs - in - aux env xs 0 - - let rec map2_env env l1 l2 ~f = match l1, l2 with - | [], [] -> env, [] - | [], _ | _, [] -> raise @@ Invalid_argument "map2_env" - | x1 :: rl1, x2 :: rl2 -> - let env, x = f env x1 x2 in - let env, rl = map2_env env rl1 rl2 ~f in - env, x :: rl - - let rec map3_env env l1 l2 l3 ~f = - if length l1 <> length l2 || length l2 <> length l3 - then raise @@ Invalid_argument "map3_env" - else match l1, l2, l3 with - | [], [], [] -> env, [] - | [], _, _ | _, [], _ | _, _, [] -> raise @@ Invalid_argument "map3_env" - | x1 :: rl1, x2 :: rl2, x3 :: rl3 -> - let env, x = f env x1 x2 x3 in - let env, rl = map3_env env rl1 rl2 rl3 ~f in - env, x :: rl - - let filter_map_env env xs ~f = - let env, l = rev_map_env env xs ~f in - env, rev_filter_map l ~f:(fun x -> x) - - let for_all2 = List.for_all2 - - let same_length_and_for_all2 ~f l1 l2 = - (List.length l1 = List.length l2) && for_all2 f l1 l2 -end diff --git a/hack/utils/hh_json/dune b/hack/utils/hh_json/dune new file mode 100644 index 00000000000..fc561e182d2 --- /dev/null +++ b/hack/utils/hh_json/dune @@ -0,0 +1,6 @@ +(library + (name hh_json) + (wrapped false) + (libraries + collections + imported_core)) diff --git a/hack/utils/hh_json/hh_json.ml b/hack/utils/hh_json/hh_json.ml index 25d73b7dc89..bff74207335 100644 --- a/hack/utils/hh_json/hh_json.ml +++ b/hack/utils/hh_json/hh_json.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -45,14 +45,22 @@ type json = | JSON_Bool of bool | JSON_Null -let is_digit = function '0' .. '9' -> true | _ -> false +let is_digit = function + | '0' .. '9' -> true + | _ -> false -let is_whitespace = function ' ' | '\n' | '\r' | '\t' -> true | _ -> false +let is_whitespace = function + | ' ' + | '\n' + | '\r' + | '\t' -> + true + | _ -> false type env = { - allow_trailing_comma : bool; - data : string; - mutable pos : int; + allow_trailing_comma: bool; + data: string; + mutable pos: int; } exception Syntax_error of string @@ -61,7 +69,7 @@ exception Syntax_error of string cleaner *) -let peek env = String.get env.data env.pos +let peek env = env.data.[env.pos] let has_more env = String.length env.data > env.pos @@ -70,7 +78,8 @@ let syntax_error env msg = if has_more env then Printf.sprintf "%s at char[%d]=%c" msg env.pos env.data.[env.pos] else - Printf.sprintf "%s after the last character" msg in + Printf.sprintf "%s after the last character" msg + in raise (Syntax_error err_msg) (* skip all blank and new line characters *) @@ -80,7 +89,7 @@ let skip_blank_chars env = done let create_env strict s = - let env = { allow_trailing_comma=not strict; data=s; pos=0 } in + let env = { allow_trailing_comma = not strict; data = s; pos = 0 } in skip_blank_chars env; env @@ -95,12 +104,10 @@ let eat_ws env c = let eat env c = skip_blank_chars env; let c' = peek env in - if c' = c then - begin - env.pos <- env.pos + 1; - skip_blank_chars env - end - else + if c' = c then ( + env.pos <- env.pos + 1; + skip_blank_chars env + ) else let err_msg = Printf.sprintf "eat: expected %c, saw %c" c c' in syntax_error env err_msg @@ -109,8 +116,7 @@ let match_substring_at s offset ss = if String.length s - offset >= ss_len then try for i = 0 to ss_len - 1 do - if s.[i + offset] <> ss.[i] then - raise Exit + if s.[i + offset] <> ss.[i] then raise Exit done; true with Exit -> false @@ -119,33 +125,44 @@ let match_substring_at s offset ss = let js_literal env s js = skip_blank_chars env; - if match_substring_at env.data env.pos s then - begin env.pos <- env.pos + String.length s; js end - else - let err_msg = - Printf.sprintf "expected '%s'" s in + if match_substring_at env.data env.pos s then ( + env.pos <- env.pos + String.length s; + js + ) else + let err_msg = Printf.sprintf "expected '%s'" s in syntax_error env err_msg -let js_true env = js_literal env "true" (JSON_Bool(true)) -let js_false env = js_literal env "false" (JSON_Bool(false)) -let js_null env = js_literal env "null" JSON_Null +let js_true env = js_literal env "true" (JSON_Bool true) + +let js_false env = js_literal env "false" (JSON_Bool false) + +let js_null env = js_literal env "null" JSON_Null -let buf_eat buf env c = (eat env c; Buffer.add_char buf c) -let buf_eat_all buf env c = (eat_ws env c; Buffer.add_char buf c) +let buf_eat buf env c = + eat env c; + Buffer.add_char buf c + +let buf_eat_all buf env c = + eat_ws env c; + Buffer.add_char buf c let char_code env = let rec char_code_ (acc : int) env len = - if len = 0 then acc - else begin + if len = 0 then + acc + else ( env.pos <- env.pos + 1; let c = Char.lowercase_ascii (peek env) in let i = - if '0' <= c && c <= '9' then (Char.code c) - (Char.code '0') - else if 'a' <= c && c <= 'f' then 10 + (Char.code c) - (Char.code 'a') - else syntax_error env "expected hexadecimal digit" + if '0' <= c && c <= '9' then + Char.code c - Char.code '0' + else if 'a' <= c && c <= 'f' then + 10 + Char.code c - Char.code 'a' + else + syntax_error env "expected hexadecimal digit" in - char_code_ (16*acc + i) env (len-1) - end + char_code_ ((16 * acc) + i) env (len - 1) + ) in char_code_ 0 env 4 @@ -156,30 +173,32 @@ let js_string env = match c with | '"' -> JSON_String (Buffer.contents buf) | '\\' -> - env.pos <- env.pos + 1; - let c' = peek env in - let c' = match c' with - | 'n' -> '\n' - | 'r' -> '\r' - | 't' -> '\t' - | 'u' -> - let code = char_code env in - Char.chr code - | x -> x in - - env.pos <- env.pos + 1; - Buffer.add_char buf c'; - loop env + env.pos <- env.pos + 1; + let c' = peek env in + let c' = + match c' with + | 'n' -> '\n' + | 'r' -> '\r' + | 't' -> '\t' + | 'u' -> + let code = char_code env in + Char.chr code + | x -> x + in + env.pos <- env.pos + 1; + Buffer.add_char buf c'; + loop env | _ -> - buf_eat_all buf env c; - loop env + buf_eat_all buf env c; + loop env in (match peek env with | '"' -> env.pos <- env.pos + 1 - | _ -> syntax_error env "expected '\"' character"); - if peek env = '"' then - begin eat env '"'; JSON_String("") end - else + | _ -> syntax_error env "expected '\"' character"); + if peek env = '"' then ( + eat env '"'; + JSON_String "" + ) else let res = loop env in eat env '"'; res @@ -187,45 +206,49 @@ let js_string env = let rec buf_eat_digits buf env = if has_more env then let c = peek env in - if is_digit c then - begin buf_eat buf env c; buf_eat_digits buf env end - else - () (* encountered a non-digit char, stop *) + if is_digit c then ( + buf_eat buf env c; + buf_eat_digits buf env + ) else + () + (* encountered a non-digit char, stop *) else - () (* end of string, stop *) + (* end of string, stop *) + () let buf_eat_exp buf env = let c = peek env in - if c = 'e' || c = 'E' then - begin - buf_eat buf env c; - let sign = peek env in - if sign = '+' || sign = '-' then - buf_eat buf env sign; - buf_eat_digits buf env; - end + if c = 'e' || c = 'E' then ( + buf_eat buf env c; + let sign = peek env in + if sign = '+' || sign = '-' then buf_eat buf env sign; + buf_eat_digits buf env + ) let js_number env = let buf = Buffer.create 32 in let c = peek env in - if c = '-' then - buf_eat buf env '-'; - buf_eat_digits buf env; (* ['-'] digits *) + if c = '-' then buf_eat buf env '-'; + buf_eat_digits buf env; + + (* ['-'] digits *) let c = peek env in - if c = '.' then (* ['.' digits ] *) - begin - buf_eat buf env '.'; - buf_eat_digits buf env; - end; - buf_eat_exp buf env; (* [exp digits] *) - JSON_Number(Buffer.contents buf) + if c = '.' then ( + (* ['.' digits ] *) + buf_eat buf env '.'; + buf_eat_digits buf env + ); + buf_eat_exp buf env; + + (* [exp digits] *) + JSON_Number (Buffer.contents buf) (* The recursive rules *) let rec js_value env = let js_value_syntax_error () = let err_msg = "expected '{[\"0123456789' or {t,f,n}" in - syntax_error env err_msg in - + syntax_error env err_msg + in if not (has_more env) then js_value_syntax_error () else @@ -237,28 +260,29 @@ let rec js_value env = | 't' -> js_true env | 'f' -> js_false env | 'n' -> js_null env - | _ -> js_value_syntax_error () + | _ -> js_value_syntax_error () + and js_object env = let rec loop members = let p = js_pair env in if peek env <> ',' then - JSON_Object(List.rev (p::members)) - else - begin - eat env ','; - if peek env = '}' then - if env.allow_trailing_comma then - JSON_Object(List.rev (p::members)) - else - syntax_error env "Hh_json.object: trailing comma" + JSON_Object (List.rev (p :: members)) + else ( + eat env ','; + if peek env = '}' then + if env.allow_trailing_comma then + JSON_Object (List.rev (p :: members)) else - loop (p::members) - end + syntax_error env "Hh_json.object: trailing comma" + else + loop (p :: members) + ) in eat env '{'; - if peek env = '}' then - begin eat env '}'; JSON_Object([]) end - else + if peek env = '}' then ( + eat env '}'; + JSON_Object [] + ) else let res = loop [] in eat env '}'; res @@ -267,25 +291,26 @@ and js_array env = let rec elements accum = let v = js_value env in if peek env <> ',' then - JSON_Array(List.rev (v::accum)) - else - begin - eat env ','; - if peek env = ']' then - if env.allow_trailing_comma then - JSON_Array(List.rev (v::accum)) - else - syntax_error env "Hh_json.array: trailing comma" + JSON_Array (List.rev (v :: accum)) + else ( + eat env ','; + if peek env = ']' then + if env.allow_trailing_comma then + JSON_Array (List.rev (v :: accum)) else - elements (v::accum) - end + syntax_error env "Hh_json.array: trailing comma" + else + elements (v :: accum) + ) in eat env '['; - if peek env = ']' then - begin eat env ']'; JSON_Array([]) end - else + if peek env = ']' then ( + eat env ']'; + JSON_Array [] + ) else let res = elements [] in - begin eat env ']'; res end + eat env ']'; + res and js_pair env = skip_blank_chars env; @@ -294,45 +319,55 @@ and js_pair env = eat env ':'; let v = js_value env in match k with - | JSON_String s -> (s,v) + | JSON_String s -> (s, v) | _ -> syntax_error env "Hh_json.js_pair: expected a JSON String" let string_of_file filename = let ic = open_in filename in let buf = Buffer.create 5096 in let rec loop () = - match try Some(input_line ic) with _ -> None with + match (try Some (input_line ic) with _ -> None) with | None -> Buffer.contents buf | Some l -> - begin - Buffer.add_string buf l; - Buffer.add_char buf '\n'; - loop (); - end + Buffer.add_string buf l; + Buffer.add_char buf '\n'; + loop () in loop () - (* Writing JSON *) +let sort_object obj_entries = + List.sort ~cmp:(fun (k1, _) (k2, _) -> Pervasives.compare k1 k2) obj_entries + module type Output_stream_intf = sig type t - val add_char: t -> char -> unit - val add_string: t -> string -> unit - val add_substring: t -> string -> int -> int -> unit + + val add_char : t -> char -> unit + + val add_string : t -> string -> unit + + val add_substring : t -> string -> int -> int -> unit end module Buffer_stream : Output_stream_intf with type t = Buffer.t = struct type t = Buffer.t + let add_char b c = Buffer.add_char b c + let add_string b s = Buffer.add_string b s + let add_substring b s ofs len = Buffer.add_substring b s ofs len end -module Channel_stream : Output_stream_intf with type t = Pervasives.out_channel = struct +module Channel_stream : + Output_stream_intf with type t = Pervasives.out_channel = struct type t = Pervasives.out_channel + let add_char b c = Pervasives.output_char b c + let add_string b s = Pervasives.output_string b s + let add_substring b s ofs len = Pervasives.output_substring b s ofs len end @@ -349,12 +384,12 @@ module Make_streamer (Out : Output_stream_intf) = struct let concat ~lb ~rb ~sep ~concat_elt buf l = Out.add_string buf lb; (match l with - | [] -> () - | elt :: elts -> - concat_elt buf elt; - List.iter elts begin fun e -> - Out.add_string buf sep; concat_elt buf e - end); + | [] -> () + | elt :: elts -> + concat_elt buf elt; + List.iter elts (fun e -> + Out.add_string buf sep; + concat_elt buf e)); Out.add_string buf rb let escape b s = @@ -368,11 +403,11 @@ module Make_streamer (Out : Output_stream_intf) = struct for i = 0 to String.length s - 1 do match s.[i] with | '\\' -> add_escaped i "\\\\" - | '"' -> add_escaped i "\\\"" + | '"' -> add_escaped i "\\\"" | '\n' -> add_escaped i "\\n" | '\r' -> add_escaped i "\\r" | '\t' -> add_escaped i "\\t" - | '\x00'..'\x1f' as c -> + | '\x00' .. '\x1f' as c -> let code = Char.code c in add_escaped i (Printf.sprintf "\\u%04x" code) | _ -> () @@ -380,91 +415,138 @@ module Make_streamer (Out : Output_stream_intf) = struct Out.add_substring b s !pos (String.length s - !pos); Out.add_char b '"' - let rec add_json (buf:Out.t) (json:json): unit = + let rec add_json ~sort_keys (buf : Out.t) (json : json) : unit = match json with | JSON_Object l -> - concat ~lb:"{" ~rb:"}" ~sep:"," ~concat_elt:add_assoc buf l + (* Make the pretty output deterministic by sorting the keys *) + let l = + if sort_keys then + sort_object l + else + l + in + concat ~lb:"{" ~rb:"}" ~sep:"," ~concat_elt:(add_assoc ~sort_keys) buf l | JSON_Array l -> - concat ~lb:"[" ~rb:"]" ~sep:"," ~concat_elt:add_json buf l + concat ~lb:"[" ~rb:"]" ~sep:"," ~concat_elt:(add_json ~sort_keys) buf l | JSON_String s -> escape buf s | JSON_Number n -> Out.add_string buf n - | JSON_Bool b -> Out.add_string buf (if b then "true" else "false") + | JSON_Bool b -> + Out.add_string + buf + ( if b then + "true" + else + "false" ) | JSON_Null -> Out.add_string buf "null" - and add_assoc (buf:Out.t) (k,v) = + and add_assoc ~sort_keys (buf : Out.t) (k, v) = escape buf k; Out.add_char buf ':'; - add_json buf v + add_json ~sort_keys buf v end module Out_buffer = Make_streamer (Buffer_stream) module Out_channel = Make_streamer (Channel_stream) -let rec json_to_string ?(pretty=false) (json:json): string = - if pretty - then json_to_multiline json +let rec json_to_string ?(sort_keys = false) ?(pretty = false) (json : json) : + string = + if pretty then + json_to_multiline ~sort_keys json else - let buf = Buffer.create 1024 in (* need a better estimate! *) - Out_buffer.add_json buf json; + let buf = Buffer.create 1024 in + (* need a better estimate! *) + Out_buffer.add_json ~sort_keys buf json; Buffer.contents buf -and json_to_multiline json = +and json_to_multiline ?(sort_keys = false) json = let rec loop indent json = - let single = json_to_string json in - if String.length single < 80 then single else - match json with - | JSON_Array l -> + let single = json_to_string ~sort_keys json in + if String.length single < 80 then + single + else + match json with + | JSON_Array l -> let nl = List.map l (loop (indent ^ " ")) in - "[\n" ^ indent ^ " " ^ (String.concat (",\n" ^ indent ^ " ") nl) ^ - "\n" ^ indent ^ "]" - | JSON_Object l -> - let nl = - List.map l - (fun (k, v) -> - indent ^ " " ^ (json_to_string (JSON_String k)) ^ ":" ^ - (loop (indent ^ " ") v)) - in - "{\n" ^ (String.concat ",\n" nl) ^ "\n" ^ indent ^ "}" - | _ -> single + "[\n" + ^ indent + ^ " " + ^ String.concat (",\n" ^ indent ^ " ") nl + ^ "\n" + ^ indent + ^ "]" + | JSON_Object l -> + (* Make the pretty output deterministic by sorting the keys *) + let l = + if sort_keys then + sort_object l + else + l + in + let nl = + List.map l (fun (k, v) -> + indent + ^ " " + ^ json_to_string ~sort_keys (JSON_String k) + ^ ":" + ^ loop (indent ^ " ") v) + in + "{\n" ^ String.concat ",\n" nl ^ "\n" ^ indent ^ "}" + | _ -> single in loop "" json -let json_to_output oc (json:json): unit = - Out_channel.add_json oc json +let json_to_output oc (json : json) : unit = + Out_channel.add_json ~sort_keys:false oc json -let rec json_to_multiline_output oc (json:json): unit = - let json_assoc_to_output oc (k,v) : unit = +let rec json_to_multiline_output oc (json : json) : unit = + let json_assoc_to_output oc (k, v) : unit = Out_channel.escape oc k; output_string oc ":"; json_to_multiline_output oc v in match json with | JSON_Object l -> - Out_channel.concat ~lb:"{" ~rb:"}" ~sep:",\n" ~concat_elt:json_assoc_to_output oc l + Out_channel.concat + ~lb:"{" + ~rb:"}" + ~sep:",\n" + ~concat_elt:json_assoc_to_output + oc + l | JSON_Array l -> - Out_channel.concat ~lb:"[" ~rb:"]" ~sep:",\n" ~concat_elt:json_to_multiline_output oc l - | JSON_String s -> - Out_channel.escape oc s - | JSON_Number n -> - output_string oc n + Out_channel.concat + ~lb:"[" + ~rb:"]" + ~sep:",\n" + ~concat_elt:json_to_multiline_output + oc + l + | JSON_String s -> Out_channel.escape oc s + | JSON_Number n -> output_string oc n | JSON_Bool b -> - output_string oc (if b then "true" else "false") - | JSON_Null -> - output_string oc "null" - -let output_json_endline ~pretty (oc: out_channel) (json:json) = - if pretty then output_string oc (json_to_multiline json) - else json_to_output oc json; + output_string + oc + ( if b then + "true" + else + "false" ) + | JSON_Null -> output_string oc "null" + +let output_json_endline ~pretty (oc : out_channel) (json : json) = + if pretty then + output_string oc (json_to_multiline json) + else + json_to_output oc json; output_char oc '\n'; flush oc -let print_json_endline ?(pretty=false) (json:json) = +let print_json_endline ?(pretty = false) (json : json) = output_json_endline ~pretty stdout json -let prerr_json_endline ?(pretty=false) (json:json) = +let prerr_json_endline ?(pretty = false) (json : json) = output_json_endline ~pretty stderr json -let json_of_string ?(strict=true) s = +let json_of_string ?(strict = true) s = let lb = create_env strict s in js_value lb @@ -477,7 +559,12 @@ let float_ n = let s = string_of_float n in (* ocaml strings can end in '.', which isn't allowed in json *) let len = String.length s in - let s = if String.get s (len - 1) = '.' then String.sub s 0 (len - 1) else s in + let s = + if s.[len - 1] = '.' then + String.sub s 0 (len - 1) + else + s + in JSON_Number s let string_ s = JSON_String s @@ -538,23 +625,33 @@ module type Access = sig | Missing_key_error of string * keytrace | Wrong_type_error of keytrace * json_type - type 'a m = (('a * keytrace), access_failure) result + type 'a m = ('a * keytrace, access_failure) result val keytrace_to_string : keytrace -> string + val access_failure_to_string : access_failure -> string val return : 'a -> 'a m - val (>>=) : 'a m -> (('a * keytrace) -> 'b m) -> 'b m + val ( >>= ) : 'a m -> ('a * keytrace -> 'b m) -> 'b m + val counit_with : (access_failure -> 'a) -> 'a m -> 'a + val to_option : 'a m -> 'a option + val get_obj : string -> json * keytrace -> json m + val get_bool : string -> json * keytrace -> bool m + val get_string : string -> json * keytrace -> string m + val get_number : string -> json * keytrace -> string m + val get_number_int : string -> json * keytrace -> int m - val get_array: string -> json * keytrace -> (json list) m - val get_val: string -> json * keytrace -> json m + + val get_array : string -> json * keytrace -> json list m + + val get_val : string -> json * keytrace -> json m end module Access = struct @@ -565,12 +662,14 @@ module Access = struct | Missing_key_error of string * keytrace | Wrong_type_error of keytrace * json_type - type 'a m = (('a * keytrace), access_failure) result + type 'a m = ('a * keytrace, access_failure) result let keytrace_to_string x = - if x = [] then "" else - let res = List.rev x |> String.concat "." in - " (at field `" ^ res ^ "`)" + if x = [] then + "" + else + let res = List.rev x |> String.concat "." in + " (at field `" ^ res ^ "`)" let access_failure_to_string = function | Not_an_object x -> @@ -578,20 +677,22 @@ module Access = struct | Missing_key_error (x, y) -> Printf.sprintf "Missing key: %s%s" x (keytrace_to_string y) | Wrong_type_error (x, y) -> - Printf.sprintf "Value expected to be %s%s" - (json_type_to_string y) (keytrace_to_string x) + Printf.sprintf + "Value expected to be %s%s" + (json_type_to_string y) + (keytrace_to_string x) let return v = Ok (v, []) - let (>>=) m f = match m with + let ( >>= ) m f = + match m with | Error _ as x -> x | Ok x -> f x - let counit_with f m = match m with - | Ok (v, _) -> - v - | Error e -> - f e + let counit_with f m = + match m with + | Ok (v, _) -> v + | Error e -> f e let to_option = function | Ok (v, _) -> Some v @@ -599,29 +700,28 @@ module Access = struct let catch_type_error exp f (v, keytrace) = try Ok (f v, keytrace) with - | Failure msg when (String.equal "int_of_string" msg) -> - Error (Wrong_type_error (keytrace, exp)) - | Assert_failure _ -> - Error (Wrong_type_error (keytrace, exp)) + | Failure msg when String.equal "int_of_string" msg -> + Error (Wrong_type_error (keytrace, exp)) + | Assert_failure _ -> Error (Wrong_type_error (keytrace, exp)) let get_val k (v, keytrace) = - try begin + try let obj = get_object_exn v in - let candidate = List.fold_left obj ~init:None ~f:(fun opt (key, json) -> - if opt <> None then opt - else if key = k then (Some json) - else None - ) in + let candidate = + List.fold_left obj ~init:None ~f:(fun opt (key, json) -> + if opt <> None then + opt + else if key = k then + Some json + else + None) + in match candidate with | None -> Error (Missing_key_error (k, keytrace)) - | Some obj -> - Ok (obj, k :: keytrace) - end with - | Assert_failure _ -> - Error (Not_an_object (keytrace)) + | Some obj -> Ok (obj, k :: keytrace) + with Assert_failure _ -> Error (Not_an_object keytrace) - let make_object_json v = - JSON_Object (get_object_exn v) + let make_object_json v = JSON_Object (get_object_exn v) let get_obj k (v, keytrace) = get_val k (v, keytrace) >>= catch_type_error Object_t make_object_json @@ -642,42 +742,47 @@ module Access = struct get_val k (v, keytrace) >>= catch_type_error Array_t get_array_exn end -let ( >=@ ) : int -> int option -> bool = fun lhs rhs -> +let ( >=@ ) : int -> int option -> bool = + fun lhs rhs -> match rhs with | None -> false | Some rhs -> lhs >= rhs -let ( <=@ ) : int -> int option -> bool = fun lhs rhs -> +let ( <=@ ) : int -> int option -> bool = + fun lhs rhs -> match rhs with | None -> false | Some rhs -> lhs <= rhs let json_truncate - ?(max_string_length:int option) - ?(max_child_count:int option) - ?(max_depth:int option) - ?(max_total_count:int option) - ?(has_changed:bool ref option) - (json: json) - : json = + ?(max_string_length : int option) + ?(max_child_count : int option) + ?(max_depth : int option) + ?(max_total_count : int option) + ?(has_changed : bool ref option) + (json : json) : json = let total_count = ref 0 in - let mark_changed () = match has_changed with + let mark_changed () = + match has_changed with | None -> () | Some r -> r := true in - let rec truncate_children ~child_count children ~f = match children with | [] -> [] - | _ when !total_count >=@ max_total_count -> mark_changed (); [] - | _ when child_count >=@ max_child_count -> mark_changed (); [] + | _ when !total_count >=@ max_total_count -> + mark_changed (); + [] + | _ when child_count >=@ max_child_count -> + mark_changed (); + [] | c :: rest -> incr total_count; - let c' = f c in (* because of mutable variable, it's important to do this first *) - c' :: (truncate_children (child_count + 1) rest f) + let c' = f c in + (* because of mutable variable, it's important to do this first *) + c' :: truncate_children (child_count + 1) rest f in - - let rec truncate ~(depth: int) (json: json) : json = + let rec truncate ~(depth : int) (json : json) : json = match json with | JSON_Object [] | JSON_Array [] @@ -687,35 +792,45 @@ let json_truncate json | JSON_Object props -> let f (k, v) = (k, truncate (depth + 1) v) in - if depth >=@ max_depth then begin mark_changed (); JSON_Object [] end - else JSON_Object (truncate_children ~child_count:0 props ~f) + if depth >=@ max_depth then ( + mark_changed (); + JSON_Object [] + ) else + JSON_Object (truncate_children ~child_count:0 props ~f) | JSON_Array values -> let f v = truncate (depth + 1) v in - if depth >=@ max_depth then begin mark_changed (); JSON_Array [] end - else JSON_Array (truncate_children ~child_count:0 values ~f) - | JSON_String s -> begin + if depth >=@ max_depth then ( + mark_changed (); + JSON_Array [] + ) else + JSON_Array (truncate_children ~child_count:0 values ~f) + | JSON_String s -> + begin match max_string_length with | None -> json | Some max_string_length -> - if String.length s <= max_string_length then JSON_String s - else begin mark_changed (); JSON_String ((String.sub s 0 max_string_length) ^ "...") end + if String.length s <= max_string_length then + JSON_String s + else ( + mark_changed (); + JSON_String (String.sub s 0 max_string_length ^ "...") + ) end in truncate ~depth:0 json - let json_truncate_string - ?(max_string_length:int option) - ?(max_child_count:int option) - ?(max_depth:int option) - ?(max_total_count:int option) - ?(allowed_total_length:int option) + ?(max_string_length : int option) + ?(max_child_count : int option) + ?(max_depth : int option) + ?(max_total_count : int option) + ?(allowed_total_length : int option) ?(if_reformat_multiline = true) - (s: string) - : string = + (s : string) : string = if String.length s <=@ allowed_total_length then - s (* fast zero-allocation path for the commonest case *) - else begin + s + (* fast zero-allocation path for the commonest case *) + else let has_changed = ref false in let json = json_of_string s in let truncated_json = @@ -725,22 +840,22 @@ let json_truncate_string ?max_depth ?max_total_count ~has_changed - json in + json + in if not !has_changed then - s (* moderately fast fewer-string-allocating for another common case *) + s + (* moderately fast fewer-string-allocating for another common case *) + else if if_reformat_multiline then + json_to_multiline truncated_json else - if if_reformat_multiline then json_to_multiline truncated_json - else json_to_string truncated_json - end + json_to_string truncated_json let get_field accessor on_failure json = - let open Access in - let on_failure af = on_failure (access_failure_to_string af) in - counit_with on_failure (return json >>= accessor) + Access.( + let on_failure af = on_failure (access_failure_to_string af) in + counit_with on_failure (return json >>= accessor)) -let get_field_opt accessor json = - let open Access in - to_option (return json >>= accessor) +let get_field_opt accessor json = Access.(to_option (return json >>= accessor)) module JsonKey = struct type t = json @@ -749,42 +864,48 @@ module JsonKey = struct (* Numbers are compared by string value, so "1" and "1.0" and "1.00" are *) (* all different; this way we don't worry about different floating point *) (* semantics between ocaml and json. *) - let rec compare (x: t) (y: t) = - match x, y with - | JSON_Null, JSON_Null -> 0 - | JSON_Null, _ -> -1 - | _, JSON_Null -> 1 - | JSON_Bool false, JSON_Bool false -> 0 - | JSON_Bool false, JSON_Bool true -> -1 - | JSON_Bool true, JSON_Bool false -> 1 - | JSON_Bool true, JSON_Bool true -> 0 - | JSON_Bool _, _ -> -1 - | _, JSON_Bool _ -> 1 - | JSON_Number x, JSON_Number y -> String.compare x y - | JSON_Number _, _ -> -1 - | _, JSON_Number _ -> 1 - | JSON_String x, JSON_String y -> String.compare x y - | JSON_String _, _ -> -1 - | _, JSON_String _ -> 1 - | JSON_Array (x::xs), JSON_Array (y::ys) -> + let rec compare (x : t) (y : t) = + match (x, y) with + | (JSON_Null, JSON_Null) -> 0 + | (JSON_Null, _) -> -1 + | (_, JSON_Null) -> 1 + | (JSON_Bool false, JSON_Bool false) -> 0 + | (JSON_Bool false, JSON_Bool true) -> -1 + | (JSON_Bool true, JSON_Bool false) -> 1 + | (JSON_Bool true, JSON_Bool true) -> 0 + | (JSON_Bool _, _) -> -1 + | (_, JSON_Bool _) -> 1 + | (JSON_Number x, JSON_Number y) -> String.compare x y + | (JSON_Number _, _) -> -1 + | (_, JSON_Number _) -> 1 + | (JSON_String x, JSON_String y) -> String.compare x y + | (JSON_String _, _) -> -1 + | (_, JSON_String _) -> 1 + | (JSON_Array (x :: xs), JSON_Array (y :: ys)) -> let r = compare x y in - if r <> 0 then r else compare (JSON_Array xs) (JSON_Array ys) - | JSON_Array [], JSON_Array [] -> 0 - | JSON_Array [], JSON_Array _ -> -1 - | JSON_Array _, JSON_Array [] -> 1 - | JSON_Array _, _ -> -1 - | _, JSON_Array _ -> 1 - | JSON_Object ((kx,vx)::xs), JSON_Object ((ky,vy)::ys) -> + if r <> 0 then + r + else + compare (JSON_Array xs) (JSON_Array ys) + | (JSON_Array [], JSON_Array []) -> 0 + | (JSON_Array [], JSON_Array _) -> -1 + | (JSON_Array _, JSON_Array []) -> 1 + | (JSON_Array _, _) -> -1 + | (_, JSON_Array _) -> 1 + | (JSON_Object ((kx, vx) :: xs), JSON_Object ((ky, vy) :: ys)) -> let r = String.compare kx ky in - if r <> 0 then r else - let r = compare vx vy in - if r <> 0 then r else - compare (JSON_Object xs) (JSON_Object ys) - | JSON_Object [], JSON_Object [] -> 0 - | JSON_Object [], JSON_Object _ -> -1 - | JSON_Object _, JSON_Object [] -> 1 + if r <> 0 then + r + else + let r = compare vx vy in + if r <> 0 then + r + else + compare (JSON_Object xs) (JSON_Object ys) + | (JSON_Object [], JSON_Object []) -> 0 + | (JSON_Object [], JSON_Object _) -> -1 + | (JSON_Object _, JSON_Object []) -> 1 end module JSet = Set.Make (JsonKey) - module JMap = MyMap.Make (JsonKey) diff --git a/hack/utils/hh_json/hh_json.mli b/hack/utils/hh_json/hh_json.mli index 016f1981491..dbfeb22a39d 100644 --- a/hack/utils/hh_json/hh_json.mli +++ b/hack/utils/hh_json/hh_json.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,12 +7,12 @@ * *) -(** +(* * Hh_json parsing and pretty printing library. *) type json = - JSON_Object of (string * json) list + | JSON_Object of (string * json) list | JSON_Array of json list | JSON_String of string | JSON_Number of string @@ -21,39 +21,64 @@ type json = exception Syntax_error of string -val json_to_string : ?pretty:bool -> json -> string -val json_to_multiline : json -> string -val json_to_output: out_channel -> json -> unit -val json_to_multiline_output: out_channel -> json -> unit +val json_to_string : ?sort_keys:bool -> ?pretty:bool -> json -> string + +val json_to_multiline : ?sort_keys:bool -> json -> string + +val json_to_output : out_channel -> json -> unit + +val json_to_multiline_output : out_channel -> json -> unit + val json_of_string : ?strict:bool -> string -> json + val json_of_file : ?strict:bool -> string -> json -val json_truncate : ?max_string_length:int -> ?max_child_count:int - -> ?max_depth:int -> ?max_total_count:int - -> ?has_changed:bool ref - -> json -> json -val json_truncate_string : ?max_string_length:int -> ?max_child_count:int - -> ?max_depth:int -> ?max_total_count:int - -> ?allowed_total_length:int -> ?if_reformat_multiline:bool - -> string -> string + +val json_truncate : + ?max_string_length:int -> + ?max_child_count:int -> + ?max_depth:int -> + ?max_total_count:int -> + ?has_changed:bool ref -> + json -> + json + +val json_truncate_string : + ?max_string_length:int -> + ?max_child_count:int -> + ?max_depth:int -> + ?max_total_count:int -> + ?allowed_total_length:int -> + ?if_reformat_multiline:bool -> + string -> + string val print_json_endline : ?pretty:bool -> json -> unit + val prerr_json_endline : ?pretty:bool -> json -> unit val get_object_exn : json -> (string * json) list + val get_array_exn : json -> json list + val get_string_exn : json -> string + val get_number_exn : json -> string + val get_number_int_exn : json -> int + val get_bool_exn : json -> bool val opt_string_to_json : string option -> json + val opt_int_to_json : int option -> json val int_ : int -> json + val float_ : float -> json + val string_ : string -> json -(** Types and functions for monadic API for traversing a JSON object. *) +(* Types and functions for monadic API for traversing a JSON object. *) type json_type = | Object_t @@ -63,7 +88,7 @@ type json_type = | Integer_t | Bool_t -(** +(* * This module gives monadic recursive access to values within objects by key. * It uses Pervasives.result to manage control flow in the monad when an error * is encountered. It also tracks the backtrace of the keys accessed to give @@ -133,32 +158,33 @@ module type Access = sig type keytrace = string list type access_failure = - (** You can't access keys on a non-object JSON thing. *) + (* You can't access keys on a non-object JSON thing. *) | Not_an_object of keytrace - (** The key is missing. *) + (* The key is missing. *) | Missing_key_error of string * keytrace - (** The key has the wrong type. *) + (* The key has the wrong type. *) | Wrong_type_error of keytrace * json_type - (** Our type for the result monad. It isn't just the json because it tracks + (* Our type for the result monad. It isn't just the json because it tracks * a history of the keys traversed to arrive at the current point. This helps * produce more informative error states. *) - type 'a m = (('a * keytrace), access_failure) result + type 'a m = ('a * keytrace, access_failure) result val keytrace_to_string : keytrace -> string + val access_failure_to_string : access_failure -> string val return : 'a -> 'a m - val (>>=) : 'a m -> (('a * keytrace) -> 'b m) -> 'b m + val ( >>= ) : 'a m -> ('a * keytrace -> 'b m) -> 'b m - (** This is a comonad, but we need a little help to deal with failure *) + (* This is a comonad, but we need a little help to deal with failure *) val counit_with : (access_failure -> 'a) -> 'a m -> 'a - (** From the Error monad to the Option monad. Error states go to None. *) + (* From the Error monad to the Option monad. Error states go to None. *) val to_option : 'a m -> 'a option - (** + (* * The following getters operate on a JSON_Object by accessing keys on it, * and asserting the returned value has the given expected type (types * are asserted by which getter you choose to use). @@ -172,20 +198,27 @@ module type Access = sig * *) val get_obj : string -> json * keytrace -> json m + val get_bool : string -> json * keytrace -> bool m + val get_string : string -> json * keytrace -> string m + val get_number : string -> json * keytrace -> string m + val get_number_int : string -> json * keytrace -> int m - val get_array: string -> json * keytrace -> (json list) m - val get_val: string -> json * keytrace -> json m (* any expected type *) + val get_array : string -> json * keytrace -> json list m + val get_val : string -> json * keytrace -> json m (* any expected type *) end module Access : Access -val get_field : (json * Access.keytrace -> 'a Access.m) -> (string -> 'a) -> json -> 'a -val get_field_opt : (json * Access.keytrace -> 'a Access.m) -> json -> 'a option +val get_field : + (json * Access.keytrace -> 'a Access.m) -> (string -> 'a) -> json -> 'a + +val get_field_opt : + (json * Access.keytrace -> 'a Access.m) -> json -> 'a option module JsonKey : Set.OrderedType with type t = json diff --git a/hack/utils/hh_json/hh_json_helpers.ml b/hack/utils/hh_json/hh_json_helpers.ml index b25fcc20231..253ddb581cf 100644 --- a/hack/utils/hh_json/hh_json_helpers.ml +++ b/hack/utils/hh_json/hh_json_helpers.ml @@ -1,16 +1,16 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * - **) + * *) open Hh_json (************************************************************************) -(** Helpers for parsing & printing **) +(* Helpers for parsing & printing *) (************************************************************************) module Jget = struct @@ -39,118 +39,138 @@ module Jget = struct accessors will throw. *) - let get_opt hhjson_getter json key = match json with + let get_opt hhjson_getter json key = + match json with | None -> None - | Some json -> match hhjson_getter key (json, []) with + | Some json -> + (match hhjson_getter key (json, []) with | Ok (r, _keytrace) -> Some r - | _ -> None + | _ -> None) - let get_exn opt_getter json key = match opt_getter json key with + let get_exn opt_getter json key = + match opt_getter json key with | None -> raise (Parse key) | Some v -> v - let int_string_opt (s: string option) : int option = match s with + let int_string_opt (s : string option) : int option = + match s with | None -> None | Some s -> - try Some (int_of_string s) - with Failure _ -> raise (Parse ("not an int: " ^ s)) + (try Some (int_of_string s) + with Failure _ -> raise (Parse ("not an int: " ^ s))) - let float_string_opt (s: string option) : float option = match s with + let float_string_opt (s : string option) : float option = + match s with | None -> None | Some s -> - try Some (float_of_string s) - with Failure _ -> raise (Parse ("not a float: " ^ s)) + (try Some (float_of_string s) + with Failure _ -> raise (Parse ("not a float: " ^ s))) - let list_opt (l: 'a list option) : 'a option list option = match l with + let list_opt (l : 'a list option) : 'a option list option = + match l with | None -> None | Some x -> Some (List.map (fun a -> Some a) x) (* Accessors which return None on absence *) let string_opt = get_opt Access.get_string + let bool_opt = get_opt Access.get_bool + let obj_opt = get_opt Access.get_obj + let val_opt = get_opt Access.get_val + let int_opt json key = get_opt Access.get_number json key |> int_string_opt - let float_opt json key = get_opt Access.get_number json key |> float_string_opt + + let float_opt json key = + get_opt Access.get_number json key |> float_string_opt + let array_opt json key = get_opt Access.get_array json key |> list_opt + (* array_opt lifts all the array's members into the "json option" monad *) (* Accessors which return a supplied default on absence *) let string_d json key ~default = Option.value (string_opt json key) ~default + let bool_d json key ~default = Option.value (bool_opt json key) ~default + let int_d json key ~default = Option.value (int_opt json key) ~default + let float_d json key ~default = Option.value (float_opt json key) ~default + let array_d json key ~default = Option.value (array_opt json key) ~default (* Accessors which throw "Error.Parse key" on absence *) let bool_exn = get_exn bool_opt + let string_exn = get_exn string_opt + let val_exn = get_exn val_opt + let int_exn = get_exn int_opt + let float_exn = get_exn float_opt + + let array_exn = get_exn array_opt + let obj_exn json key = Some (get_exn obj_opt json key) + (* obj_exn lifts the result into the "json option" monad *) end module Jprint = struct (* object_opt is like Hh_json.JSON_Object constructor except it takes key * (value option): if a value is None, then it omits this member. *) - let object_opt (keyvalues : (string * (json option)) list) : json = + let object_opt (keyvalues : (string * json option) list) : json = let rec filter keyvalues = match keyvalues with | [] -> [] | (_key, None) :: rest -> filter rest - | (key, Some value) :: rest -> (key, value) :: (filter rest) + | (key, Some value) :: rest -> (key, value) :: filter rest in JSON_Object (filter keyvalues) (* Convenience function to convert string list to JSON_Array *) - let string_array (l: string list) : json = - JSON_Array (List.map string_ l) + let string_array (l : string list) : json = JSON_Array (List.map string_ l) end - - (* Some ad-hoc JSON processing helpers. *) module AdhocJsonHelpers = struct - let try_get_val key json = let obj = Hh_json.get_object_exn json in Core_list.Assoc.find obj key let get_string_val key ?default json = let v = try_get_val key json in - match v, default with - | Some v, _ -> Hh_json.get_string_exn v - | None, Some def -> def - | None, None -> raise Not_found + match (v, default) with + | (Some v, _) -> Hh_json.get_string_exn v + | (None, Some def) -> def + | (None, None) -> raise Not_found let get_number_val key ?default json = let v = try_get_val key json in - match v, default with - | Some v, _ -> Hh_json.get_number_exn v - | None, Some def -> def - | None, None -> raise Not_found + match (v, default) with + | (Some v, _) -> Hh_json.get_number_exn v + | (None, Some def) -> def + | (None, None) -> raise Not_found let get_bool_val key ?default json = let v = try_get_val key json in - match v, default with - | Some v, _ -> Hh_json.get_bool_exn v - | None, Some def -> def - | None, None -> raise Not_found + match (v, default) with + | (Some v, _) -> Hh_json.get_bool_exn v + | (None, Some def) -> def + | (None, None) -> raise Not_found let get_array_val key ?default json = let v = try_get_val key json in - match v, default with - | Some v, _ -> Hh_json.get_array_exn v - | None, Some def -> def - | None, None -> raise Not_found + match (v, default) with + | (Some v, _) -> Hh_json.get_array_exn v + | (None, Some def) -> def + | (None, None) -> raise Not_found let strlist args = - Hh_json.JSON_Array begin - List.map (fun arg -> Hh_json.JSON_String arg) args - end + Hh_json.JSON_Array (List.map (fun arg -> Hh_json.JSON_String arg) args) (* Useful for building an array like [ "suffix", [".txt", ".js", ".php" ]] *) let assoc_strlist name args = @@ -159,8 +179,5 @@ module AdhocJsonHelpers = struct (* Prepend a string to a JSON array of strings. pred stands for predicate, * because that's how they are typically represented in watchman. See e.g. * https://facebook.github.io/watchman/docs/expr/allof.html *) - let pred name args = - let open Hh_json in - JSON_Array (JSON_String name :: args) - + let pred name args = Hh_json.(JSON_Array (JSON_String name :: args)) end diff --git a/hack/utils/hh_logger.ml b/hack/utils/hh_logger.ml deleted file mode 100644 index 7e089662c8f..00000000000 --- a/hack/utils/hh_logger.ml +++ /dev/null @@ -1,112 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -let timestamp_string () = - let open Unix in - let tm = localtime (time ()) in - let ms = int_of_float (gettimeofday () *. 1000.) mod 1000 in - let year = tm.tm_year + 1900 in - Printf.sprintf "[%d-%02d-%02d %02d:%02d:%02d.%03d]" - year (tm.tm_mon + 1) tm.tm_mday tm.tm_hour tm.tm_min tm.tm_sec ms - -(* We might want to log to both stderr and a file. Shelling out to tee isn't cross-platform. - * We could dup2 stderr to a pipe and have a child process write to both original stderr and the - * file, but that's kind of overkill. This is good enough *) -let dupe_log: (string * out_channel) option ref = ref None -let set_log filename fd = - dupe_log := Some (filename, fd) -let get_log_name () = Option.map !dupe_log ~f:fst - -let print_with_newline = - let print_raw ?exn s = - let time = timestamp_string () in - let exn_str = Option.value_map exn ~default:"" ~f:(fun exn -> - let bt = String_utils.indent 8 @@ String.trim @@ Printexc.get_backtrace () in - let bt = if bt = "" then "" else ("\n Backtrace:\n" ^ bt) in - Printf.sprintf "\n Exception: %s%s" (Printexc.to_string exn) bt - ) in - begin match !dupe_log with - | None -> () - | Some (_, dupe_log_oc) -> Printf.fprintf dupe_log_oc "%s %s%s\n%!" time s exn_str end; - Printf.eprintf "%s %s%s\n%!" time s exn_str - in - fun ?exn fmt -> Printf.ksprintf (print_raw ?exn) fmt - -let print_duration name t = - let t2 = Unix.gettimeofday () in - print_with_newline "%s: %f" name (t2 -. t); - t2 - -let exc_with_dodgy_backtrace ?(prefix: string = "") (e: exn) : unit = - (* TODO - delete this function and use call normal Hh_logger functions with ~exn *) - print_with_newline "%s%s" prefix (Printexc.to_string e); - Printexc.print_backtrace stderr; - () - -let exc ?(prefix: string = "") ~(stack: string) (e: exn) : unit = - print_with_newline "%s%s\n%s" prefix (Printexc.to_string e) stack - -module Level : sig - type t = - | Off - | Fatal - | Error - | Warn - | Info - | Debug - val min_level : unit -> t - val set_min_level : t -> unit - val passes_min_level: t -> bool - val log : t -> ?exn:exn -> ('a, unit, string, string, string, unit) format6 -> 'a - val log_duration : t -> string -> float -> float -end = struct - type t = - | Off - | Fatal - | Error - | Warn - | Info - | Debug - - let int_of_level = function - | Off -> 6 - | Fatal -> 5 - | Error -> 4 - | Warn -> 3 - | Info -> 2 - | Debug -> 1 - - let min_level_ref = ref Info - let min_level () = !min_level_ref - let set_min_level level = min_level_ref := level - - let passes_min_level level = - int_of_level level >= int_of_level !min_level_ref - - let log level ?exn fmt = - if passes_min_level level - then print_with_newline ?exn fmt - else Printf.ifprintf () fmt - - let log_duration level fmt t = - if passes_min_level level - then print_duration fmt t - else t - -end - -(* Default log instructions to INFO level *) -let log ?(lvl=Level.Info) fmt = Level.log lvl fmt -let log_duration fmt t = Level.log_duration Level.Info fmt t - -let fatal ?exn fmt = Level.log Level.Fatal ?exn fmt -let error ?exn fmt = Level.log Level.Error ?exn fmt -let warn ?exn fmt = Level.log Level.Warn ?exn fmt -let info ?exn fmt = Level.log Level.Info ?exn fmt -let debug ?exn fmt = Level.log Level.Debug ?exn fmt diff --git a/hack/utils/http_lite.ml b/hack/utils/http_lite.ml deleted file mode 100644 index 12b793e404c..00000000000 --- a/hack/utils/http_lite.ml +++ /dev/null @@ -1,93 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -open Hh_core - -(* This is a lightweight library for reading and writing messages in the HTTP - format, with headers and body. So far it only supports the small set of - features needed by the Language Server Protocol. It follows the internet - robustness principle of being as permissive as possible in what it expects, - i.e. no validation beyond what it essentially needs. *) - -exception Malformed of string - -(** read_headers: reads LF/CRLF-terminated lines until it gets an empty line *) -let read_headers (reader: Buffered_line_reader.t) : string list = - let rec read_internal acc = - try - match Buffered_line_reader.get_next_line reader with - | "" -> acc - | line -> read_internal (line::acc) - with Unix.Unix_error _ -> - raise (Malformed "Can't read next header") - in - List.rev (read_internal []) - -(** parse_headers_to_lowercase_map: turns list of "Key: Value" string headers - * into a map, with keys normalized to lower-case. HTTP actually allows - * multiple headers of the same key, but we prefer the simplicity of - * returning just a string map so we only take the last header for - * a given key. Note: if any header isn't in Key:Value format, we ignore it. *) -let parse_headers_to_lowercase_map (headers: string list) : string SMap.t = - let rec parse_internal acc = function - | [] -> acc - | line :: rest -> - begin match Str.bounded_split (Str.regexp ":") line 2 with - | [k; v] -> let k', v' = String.lowercase_ascii k, String.trim v in - parse_internal (SMap.add k' v' acc) rest - | _ -> parse_internal acc rest - end - in - parse_internal SMap.empty headers - -(** parse_charset: given a Content-Type value like "mime/type; charset=foo" - * it returns the "foo" bit of it, if present. - * https://www.w3.org/Protocols/rfc1341/4_Content-Type.html - * Note: RFC822 allows comments in this string, but we don't handle them. - *) -let parse_charset (header_value: string) : string option = - (* charset_value: if given a param string "charset=b" then it returns Some b *) - let charset_value param = match Str.bounded_split (Str.regexp "=") param 2 with - | [k; v] when (String.trim k) = "charset" -> Some (String.trim v) - | _ -> None - in - match Str.split (Str.regexp ";") header_value with - | _mime_type :: params -> List.find_map params ~f:charset_value - | _ -> None - -(** read_message_utf8: reads an http-style message "Headers...Body" - * The headers must include at least Content-Length (to know how long is - * the body). If they also include Content-Type, then the charset must be utf-8 - * or absent. Errors in these respects produce a Malformed exception. - * The content of all other headers are ignored. - * This function returns an OCaml string, which is a sequence of 8bit bytes, - * so it's up to the caller to handle any unicode characters and their - * encoding. *) -let read_message_utf8 (reader: Buffered_line_reader.t) : string = - let headers = read_headers reader |> parse_headers_to_lowercase_map in - let len = - try SMap.find "content-length" headers |> int_of_string - with _ -> raise (Malformed "Missing Content-Length") in - let charset = - try SMap.find "content-type" headers |> parse_charset - with _ -> None in - let body = Buffered_line_reader.get_next_bytes reader len in - if charset <> Some "utf-8" && charset <> None then - raise (Malformed ("Charset not utf-8")); - body - -(** write_message: writes "Content-Length:...body" *) -let write_message (outchan: out_channel) (body: string) : unit = - (* Without this, Windows will change the \r\n to \r\r\n *) - Pervasives.set_binary_mode_out outchan true; - - Printf.fprintf outchan "Content-Length: %n\r\n" (String.length body); - Printf.fprintf outchan "\r\n"; - Printf.fprintf outchan "%s" body; - flush outchan; diff --git a/hack/utils/http_lite/dune b/hack/utils/http_lite/dune new file mode 100644 index 00000000000..454211997f0 --- /dev/null +++ b/hack/utils/http_lite/dune @@ -0,0 +1,6 @@ +(library + (name http_lite) + (wrapped false) + (libraries + buffered_line_reader + utils_core)) diff --git a/hack/utils/http_lite/http_lite.ml b/hack/utils/http_lite/http_lite.ml new file mode 100644 index 00000000000..359a404fed7 --- /dev/null +++ b/hack/utils/http_lite/http_lite.ml @@ -0,0 +1,96 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +open Hh_core + +(* This is a lightweight library for reading and writing messages in the HTTP + format, with headers and body. So far it only supports the small set of + features needed by the Language Server Protocol. It follows the internet + robustness principle of being as permissive as possible in what it expects, + i.e. no validation beyond what it essentially needs. *) + +exception Malformed of string + +(** read_headers: reads LF/CRLF-terminated lines until it gets an empty line *) +let read_headers (reader : Buffered_line_reader.t) : string list = + let rec read_internal acc = + try + match Buffered_line_reader.get_next_line reader with + | "" -> acc + | line -> read_internal (line :: acc) + with Unix.Unix_error _ -> raise (Malformed "Can't read next header") + in + List.rev (read_internal []) + +(** parse_headers_to_lowercase_map: turns list of "Key: Value" string headers + * into a map, with keys normalized to lower-case. HTTP actually allows + * multiple headers of the same key, but we prefer the simplicity of + * returning just a string map so we only take the last header for + * a given key. Note: if any header isn't in Key:Value format, we ignore it. *) +let parse_headers_to_lowercase_map (headers : string list) : string SMap.t = + let rec parse_internal acc = function + | [] -> acc + | line :: rest -> + begin + match Str.bounded_split (Str.regexp ":") line 2 with + | [k; v] -> + let (k', v') = (String.lowercase_ascii k, String.trim v) in + parse_internal (SMap.add k' v' acc) rest + | _ -> parse_internal acc rest + end + in + parse_internal SMap.empty headers + +(** parse_charset: given a Content-Type value like "mime/type; charset=foo" + * it returns the "foo" bit of it, if present. + * https://www.w3.org/Protocols/rfc1341/4_Content-Type.html + * Note: RFC822 allows comments in this string, but we don't handle them. + *) +let parse_charset (header_value : string) : string option = + (* charset_value: if given a param string "charset=b" then it returns Some b *) + let charset_value param = + match Str.bounded_split (Str.regexp "=") param 2 with + | [k; v] when String.trim k = "charset" -> Some (String.trim v) + | _ -> None + in + match Str.split (Str.regexp ";") header_value with + | _mime_type :: params -> List.find_map params ~f:charset_value + | _ -> None + +(** read_message_utf8: reads an http-style message "Headers...Body" + * The headers must include at least Content-Length (to know how long is + * the body). If they also include Content-Type, then the charset must be utf-8 + * or absent. Errors in these respects produce a Malformed exception. + * The content of all other headers are ignored. + * This function returns an OCaml string, which is a sequence of 8bit bytes, + * so it's up to the caller to handle any unicode characters and their + * encoding. *) +let read_message_utf8 (reader : Buffered_line_reader.t) : string = + let headers = read_headers reader |> parse_headers_to_lowercase_map in + let len = + try SMap.find "content-length" headers |> int_of_string + with _ -> raise (Malformed "Missing Content-Length") + in + let charset = + (try SMap.find "content-type" headers |> parse_charset with _ -> None) + in + let body = Buffered_line_reader.get_next_bytes reader len in + if charset <> Some "utf-8" && charset <> None then + raise (Malformed "Charset not utf-8"); + body + +(** write_message: writes "Content-Length:...body" *) +let write_message (outchan : out_channel) (body : string) : unit = + (* Without this, Windows will change the \r\n to \r\r\n *) + Pervasives.set_binary_mode_out outchan true; + + Printf.fprintf outchan "Content-Length: %n\r\n" (String.length body); + Printf.fprintf outchan "\r\n"; + Printf.fprintf outchan "%s" body; + flush outchan diff --git a/hack/utils/http_lite/http_lite.mli b/hack/utils/http_lite/http_lite.mli new file mode 100644 index 00000000000..0f19b8f6b35 --- /dev/null +++ b/hack/utils/http_lite/http_lite.mli @@ -0,0 +1,26 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +exception Malformed of string + +val read_headers : Buffered_line_reader.t -> string list + +val parse_headers_to_lowercase_map : string list -> string SMap.t + +val parse_charset : string -> string option + +val read_message_utf8 : Buffered_line_reader.t -> string + +val write_message : out_channel -> string -> unit diff --git a/hack/utils/jsonrpc.ml b/hack/utils/jsonrpc.ml deleted file mode 100644 index 37484e39c8d..00000000000 --- a/hack/utils/jsonrpc.ml +++ /dev/null @@ -1,325 +0,0 @@ -(* Wrapper for handling JSON-RPC *) -(* Spec: http://www.jsonrpc.org/specification *) -(* Practical readbable guide: https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md#base-protocol-json-structures *) - -open Hh_core -module J = Hh_json_helpers.AdhocJsonHelpers - -type writer = Hh_json.json -> unit -type kind = Request | Notification | Response - -let kind_to_string (kind: kind) : string = - match kind with - | Request -> "Request" - | Notification -> "Notification" - | Response -> "Response" - -type message = { - json : Hh_json.json; (* the json payload *) - timestamp : float; (* time this message arrived at stdin *) - (* Following fields are decompositions of 'json'... *) - kind : kind; - method_ : string; (* mandatory for request+notification; empty otherwise *) - id : Hh_json.json option; (* mandatory for request+response *) - params : Hh_json.json option; (* optional for request+notification *) - result : Hh_json.json option; (* optional for response *) - error: Hh_json.json option; (* optional for response *) -} - -let message_to_short_string (c: message) : string = - let open Hh_json in - let disposition = match c.kind, c.result, c.error with - | Response, Some _, None -> "[result]" - | Response, None, Some _ -> "[error]" - | _, _, _ -> "" in - let method_ = match c.method_ with - | "" -> "" - | s -> Printf.sprintf "method=%s," s in - let id = match c.id with - | Some (JSON_String s) -> Printf.sprintf "id=\"%s\"" s - | Some (JSON_Number n) -> Printf.sprintf "id=#%s" n - | Some json -> Printf.sprintf "id=%s" (json_to_string json) - | None -> "id=[None]" - in - Printf.sprintf "{%s%s,%s%s}" (kind_to_string c.kind) disposition method_ id - -let parse_message ~(json: Hh_json.json) ~(timestamp: float) : message = - let id = J.try_get_val "id" json in - let method_opt = J.try_get_val "method" json - |> Option.map ~f:Hh_json.get_string_exn in - let method_ = Option.value method_opt ~default:"" in (* is easier to consume *) - let params = J.try_get_val "params" json in - let result = J.try_get_val "result" json in - let error = J.try_get_val "error" json in - (* Following categorization mostly mirrors that of VSCode except that *) - (* VSCode allows number+string+null ID for response, but we allow any ID. *) - let kind = match id, method_opt, result, error with - | Some _id, Some _method, _, _ -> Request - | None, Some _method, _, _ -> Notification - | _, _, Some _result, _ -> Response - | _, _, _, Some _error -> Response - | _ -> raise (Hh_json.Syntax_error "Not JsonRPC") - in - { json; timestamp; id; method_; params; result; error; kind; } - - -(***************************************************************) -(* Internal queue functions that run in the daemon process. *) -(***************************************************************) - -type queue = { - daemon_in_fd : Unix.file_descr; (* fd used by main process to read messages from queue *) - messages : queue_message Queue.t; -} - -and timestamped_json = { - tj_json: Hh_json.json; - tj_timestamp: float; -} - -and queue_message = - | Timestamped_json of timestamped_json - | Fatal_exception of Marshal_tools.remote_exception_data - | Recoverable_exception of Marshal_tools.remote_exception_data - -and daemon_operation = - | Read - | Write - - -(* Try to read a message from the daemon's stdin, which is where all of the - editor messages can be read from. May throw if the message is malformed. *) -let internal_read_message (reader : Buffered_line_reader.t) : timestamped_json = - let message = reader |> Http_lite.read_message_utf8 in - let tj_json = Hh_json.json_of_string message in - let tj_timestamp = Unix.gettimeofday () - in - { tj_json; tj_timestamp; } - - -(* Reads messages from the editor on stdin, parses them, and sends them to the - main process. - This runs in a different process because we also timestamp the messages, so - we need to read them as soon as they come in. That is, we can't wait for any - server computation to finish if we want to get an accurate timestamp. *) -let internal_run_daemon' (oc : queue_message Daemon.out_channel) : unit = - let out_fd = Daemon.descr_of_out_channel oc in - let reader = Buffered_line_reader.create Unix.stdin in - let messages_to_send = Queue.create () in - - let rec loop () = - let operation = - if Buffered_line_reader.has_buffered_content reader - then Read - else begin - let read_fds = [Unix.stdin] in - let has_messages_to_send = not (Queue.is_empty messages_to_send) in - let write_fds = - if has_messages_to_send - then [out_fd] - else [] - in - - (* Note that if there are no queued messages, this will always block - until we're ready to read, rather than returning `Write`, even if - stdout is capable of being written to. Furthermore, we will never - need to queue a message to be written until we have read - something. *) - let readable_fds, _, _ = Unix.select read_fds write_fds [] (-1.0) in - let ready_for_read = not (List.is_empty readable_fds) in - if ready_for_read - then Read - else Write - end - in - - let should_continue = match operation with - | Read -> begin - try - let timestamped_json = internal_read_message reader in - Queue.push timestamped_json messages_to_send; - true - with e -> - let message = Printexc.to_string e in - let stack = Printexc.get_backtrace () in - let edata = { Marshal_tools.message; stack; } in - let (should_continue, marshal) = match e with - | Hh_json.Syntax_error _ -> true, Recoverable_exception edata - | _ -> false, Fatal_exception edata - in - Marshal_tools.to_fd_with_preamble out_fd marshal |> ignore; - should_continue - end - | Write -> - assert (not (Queue.is_empty messages_to_send)); - let timestamped_json = Queue.pop messages_to_send in - (* We can assume that the entire write will succeed, since otherwise - Marshal_tools.to_fd_with_preamble will throw an exception. *) - Marshal_tools.to_fd_with_preamble out_fd (Timestamped_json timestamped_json) |> ignore; - true - in - if should_continue then loop () - in - loop () - -(* Main function for the daemon process. *) -let internal_run_daemon - (_dummy_param : unit) - (_ic, (oc : queue_message Daemon.out_channel)) = - Printexc.record_backtrace true; - try - internal_run_daemon' oc - with e -> - (* An exception that's gotten here is not simply a parse error, but - something else, so we should terminate the daemon at this point. *) - let message = Printexc.to_string e in - let stack = Printexc.get_backtrace () in - try - let out_fd = Daemon.descr_of_out_channel oc in - Marshal_tools.to_fd_with_preamble out_fd - (Fatal_exception { Marshal_tools.message; stack; }) - |> ignore - with _ -> - (* There may be a broken pipe, for example. We should just give up on - reporting the error. *) - () - - -let internal_entry_point : (unit, unit, queue_message) Daemon.entry = - Daemon.register_entry_point "Jsonrpc" internal_run_daemon - - -(************************************************) -(* Queue functions that run in the main process *) -(************************************************) - -let make_queue () : queue = - let handle = Daemon.spawn - ~channel_mode:`pipe - (* We don't technically need to inherit stdout or stderr, but this might be - useful in the event that we throw an unexpected exception in the daemon. - It's also useful for print-statement debugging of the daemon. *) - (Unix.stdin, Unix.stdout, Unix.stderr) - internal_entry_point - () - in - let (ic, _) = handle.Daemon.channels in - { - daemon_in_fd = Daemon.descr_of_in_channel ic; - messages = Queue.create (); - } - -let get_read_fd (queue : queue) : Unix.file_descr = - queue.daemon_in_fd - -(* Read a message into the queue, and return the just-read message. *) -let read_single_message_into_queue_blocking (message_queue : queue) = - let message = - try Marshal_tools.from_fd_with_preamble message_queue.daemon_in_fd - with End_of_file as e -> - (* This is different from when the client hangs up. It handles the case - that the daemon process exited: for example, if it was killed. *) - let message = Printexc.to_string e in - let stack = Printexc.get_backtrace () in - Fatal_exception { Marshal_tools.message; stack; } - in - - Queue.push message message_queue.messages; - message - -let rec read_messages_into_queue_nonblocking (message_queue : queue) : unit = - let readable_fds, _, _ = Unix.select [message_queue.daemon_in_fd] [] [] 0.0 in - if not (List.is_empty readable_fds) then begin - (* We're expecting this not to block because we just checked `Unix.select` - to make sure that there's something there. *) - let message = read_single_message_into_queue_blocking message_queue in - - (* Now read any more messages that might be queued up. Only try to read more - messages if the daemon is still available to read from. Otherwise, we may - infinite loop as a result of `Unix.select` returning that a file - descriptor is available to read on. *) - match message with - | Fatal_exception _ -> () - | _ -> read_messages_into_queue_nonblocking message_queue; - end - -let has_message (queue : queue) : bool = - read_messages_into_queue_nonblocking queue; - not (Queue.is_empty queue.messages) - -let get_message (queue : queue) = - (* Read one in a blocking manner to ensure that we have one. *) - if Queue.is_empty queue.messages - then ignore (read_single_message_into_queue_blocking queue); - (* Then read any others that got queued up so that we can see the maximum - number of messages at once for invalidation purposes. *) - read_messages_into_queue_nonblocking queue; - - let item = Queue.pop queue.messages in - match item with - | Timestamped_json {tj_json; tj_timestamp;} -> `Message (parse_message tj_json tj_timestamp) - | Fatal_exception data -> `Fatal_exception data - | Recoverable_exception data -> `Recoverable_exception data - - -(************************************************) -(* Output functions for respond+notify *) -(************************************************) - -let last_sent_ref : Hh_json.json option ref = ref None - -let clear_last_sent () : unit = - last_sent_ref := None - -let last_sent () : Hh_json.json option = - !last_sent_ref - -(* respond: sends either a Response or an Error message, according - to whether the json has an error-code or not. *) -let respond - (writer: writer) - (in_response_to: message) - (result_or_error: Hh_json.json) - : unit = - let open Hh_json in - let is_error = match result_or_error with - | JSON_Object _ -> - J.try_get_val "code" result_or_error - |> Option.is_some - | _ -> false in - let response = JSON_Object ( - ["jsonrpc", JSON_String "2.0"] - @ - ["id", Option.value in_response_to.id ~default:JSON_Null] - @ - (if is_error then ["error", result_or_error] else ["result", result_or_error]) - ) - in - last_sent_ref := Some response; - writer response - - -(* notify: sends a Notify message *) -let notify (writer: writer) (method_: string) (params: Hh_json.json) - : unit = - let open Hh_json in - let message = JSON_Object [ - "jsonrpc", JSON_String "2.0"; - "method", JSON_String method_; - "params", params; - ] - in - last_sent_ref := Some message; - writer message - - -(************************************************) -(* Output functions for request *) -(************************************************) - - -let requests_counter: IMap.key ref = ref 0 - -let get_next_request_id () : int = - incr requests_counter; - !requests_counter diff --git a/hack/utils/jsonrpc.mli b/hack/utils/jsonrpc.mli deleted file mode 100644 index b67b4f09e85..00000000000 --- a/hack/utils/jsonrpc.mli +++ /dev/null @@ -1,48 +0,0 @@ -(** - * Copyright (c) 2017, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type writer = Hh_json.json -> unit -type kind = Request | Notification | Response -val kind_to_string : kind -> string - -type message = { - json : Hh_json.json; (* the json payload *) - timestamp : float; (* time this message arrived at stdin *) - (* Following fields are decompositions of 'json'... *) - kind : kind; - method_ : string; (* mandatory for request+notification; empty otherwise *) - id : Hh_json.json option; (* mandatory for request+response *) - params : Hh_json.json option; (* optional for request+notification *) - result : Hh_json.json option; (* optional for response *) - error: Hh_json.json option; (* optional for response *) -} - -val parse_message : json:Hh_json.json -> timestamp:float -> message -val message_to_short_string : message -> string - -type queue -val make_queue : unit -> queue (* must call Daemon.entry_point at start of your main *) -val get_read_fd : queue -> Unix.file_descr (* can be used for 'select' *) -val has_message : queue -> bool -val get_message : queue -> [> -| `Message of message -| `Fatal_exception of Marshal_tools.remote_exception_data -| `Recoverable_exception of Marshal_tools.remote_exception_data ] - -(* 'respond to_this with_that' is for replying to a JsonRPC request. It will send either *) -(* a response or an error depending on whether 'with_that' has an error id in it. *) -val respond : writer -> message -> Hh_json.json -> unit -(* notify/request are for initiating JsonRPC messages *) -val notify : writer -> string -> Hh_json.json -> unit -val get_next_request_id : unit -> int - -(* For logging purposes, you can get a copy of which JsonRPC message was last *) -(* sent by this module - be it a response, notification, request or cancellation *) -val last_sent : unit -> Hh_json.json option -val clear_last_sent : unit -> unit diff --git a/hack/utils/jsonrpc/dune b/hack/utils/jsonrpc/dune new file mode 100644 index 00000000000..145aeeef68b --- /dev/null +++ b/hack/utils/jsonrpc/dune @@ -0,0 +1,12 @@ +(library + (name jsonrpc) + (wrapped false) + (libraries + buffered_line_reader + hh_json + http_lite + marshal_tools + marshal_tools_lwt + sys_utils) + (preprocess + (pps lwt_ppx ppx_deriving.std ppx_deriving.enum))) diff --git a/hack/utils/jsonrpc/jsonrpc.ml b/hack/utils/jsonrpc/jsonrpc.ml new file mode 100644 index 00000000000..b3f147f7ec5 --- /dev/null +++ b/hack/utils/jsonrpc/jsonrpc.ml @@ -0,0 +1,368 @@ +(* Wrapper for handling JSON-RPC *) +(* Spec: http://www.jsonrpc.org/specification *) +(* Practical readbable guide: https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md#base-protocol-json-structures *) + +open Hh_core +module J = Hh_json_helpers.AdhocJsonHelpers + +type writer = Hh_json.json -> unit + +type kind = + | Request + | Notification + | Response + +let kind_to_string (kind : kind) : string = + match kind with + | Request -> "Request" + | Notification -> "Notification" + | Response -> "Response" + +type message = { + json: Hh_json.json; + (* the json payload *) + timestamp: float; + (* time this message arrived at stdin *) + (* Following fields are decompositions of 'json'... *) + kind: kind; + method_: string; + (* mandatory for request+notification; empty otherwise *) + id: Hh_json.json option; + (* mandatory for request+response *) + params: Hh_json.json option; + (* optional for request+notification *) + result: Hh_json.json option; + (* optional for response *) + error: Hh_json.json option; (* optional for response *) +} + +let message_to_short_string (c : message) : string = + Hh_json.( + let disposition = + match (c.kind, c.result, c.error) with + | (Response, Some _, None) -> "[result]" + | (Response, None, Some _) -> "[error]" + | (_, _, _) -> "" + in + let method_ = + match c.method_ with + | "" -> "" + | s -> Printf.sprintf "method=%s," s + in + let id = + match c.id with + | Some (JSON_String s) -> Printf.sprintf "id=\"%s\"" s + | Some (JSON_Number n) -> Printf.sprintf "id=#%s" n + | Some json -> Printf.sprintf "id=%s" (json_to_string json) + | None -> "id=[None]" + in + Printf.sprintf "{%s%s,%s%s}" (kind_to_string c.kind) disposition method_ id) + +let parse_message ~(json : Hh_json.json) ~(timestamp : float) : message = + let id = J.try_get_val "id" json in + let method_opt = + J.try_get_val "method" json |> Option.map ~f:Hh_json.get_string_exn + in + let method_ = Option.value method_opt ~default:"" in + (* is easier to consume *) + let params = J.try_get_val "params" json in + let result = J.try_get_val "result" json in + let error = J.try_get_val "error" json in + (* Following categorization mostly mirrors that of VSCode except that *) + (* VSCode allows number+string+null ID for response, but we allow any ID. *) + let kind = + match (id, method_opt, result, error) with + | (Some _id, Some _method, _, _) -> Request + | (None, Some _method, _, _) -> Notification + | (_, _, Some _result, _) -> Response + | (_, _, _, Some _error) -> Response + | _ -> raise (Hh_json.Syntax_error "Not JsonRPC") + in + { json; timestamp; id; method_; params; result; error; kind } + +(***************************************************************) +(* Internal queue functions that run in the daemon process. *) +(***************************************************************) + +type queue = { + daemon_in_fd: Unix.file_descr; + (* fd used by main process to read messages from queue *) + messages: queue_message Queue.t; +} + +and timestamped_json = { + tj_json: Hh_json.json; + tj_timestamp: float; +} + +and queue_message = + | Timestamped_json of timestamped_json + | Fatal_exception of Marshal_tools.remote_exception_data + | Recoverable_exception of Marshal_tools.remote_exception_data + +and daemon_operation = + | Read + | Write + +(* Try to read a message from the daemon's stdin, which is where all of the + editor messages can be read from. May throw if the message is malformed. *) +let internal_read_message (reader : Buffered_line_reader.t) : timestamped_json + = + let message = reader |> Http_lite.read_message_utf8 in + let tj_json = Hh_json.json_of_string message in + let tj_timestamp = Unix.gettimeofday () in + { tj_json; tj_timestamp } + +(* Reads messages from the editor on stdin, parses them, and sends them to the + main process. + This runs in a different process because we also timestamp the messages, so + we need to read them as soon as they come in. That is, we can't wait for any + server computation to finish if we want to get an accurate timestamp. *) +let internal_run_daemon' (oc : queue_message Daemon.out_channel) : unit = + let out_fd = Daemon.descr_of_out_channel oc in + let reader = Buffered_line_reader.create Unix.stdin in + let messages_to_send = Queue.create () in + let rec loop () = + let operation = + if Buffered_line_reader.has_buffered_content reader then + Read + else + let read_fds = [Unix.stdin] in + let has_messages_to_send = not (Queue.is_empty messages_to_send) in + let write_fds = + if has_messages_to_send then + [out_fd] + else + [] + in + (* Note that if there are no queued messages, this will always block + until we're ready to read, rather than returning `Write`, even if + stdout is capable of being written to. Furthermore, we will never + need to queue a message to be written until we have read + something. *) + let (readable_fds, _, _) = Unix.select read_fds write_fds [] (-1.0) in + let ready_for_read = not (List.is_empty readable_fds) in + if ready_for_read then + Read + else + Write + in + let should_continue = + match operation with + | Read -> + begin + try + let timestamped_json = internal_read_message reader in + Queue.push timestamped_json messages_to_send; + true + with e -> + let message = Printexc.to_string e in + let stack = Printexc.get_backtrace () in + let edata = { Marshal_tools.message; stack } in + let (should_continue, marshal) = + match e with + | Hh_json.Syntax_error _ -> (true, Recoverable_exception edata) + | _ -> (false, Fatal_exception edata) + in + Marshal_tools.to_fd_with_preamble out_fd marshal |> ignore; + should_continue + end + | Write -> + assert (not (Queue.is_empty messages_to_send)); + let timestamped_json = Queue.pop messages_to_send in + (* We can assume that the entire write will succeed, since otherwise + Marshal_tools.to_fd_with_preamble will throw an exception. *) + Marshal_tools.to_fd_with_preamble + out_fd + (Timestamped_json timestamped_json) + |> ignore; + true + in + if should_continue then loop () + in + loop () + +(* Main function for the daemon process. *) +let internal_run_daemon + (_dummy_param : unit) (_ic, (oc : queue_message Daemon.out_channel)) = + Printexc.record_backtrace true; + try internal_run_daemon' oc + with e -> + (* An exception that's gotten here is not simply a parse error, but + something else, so we should terminate the daemon at this point. *) + let message = Printexc.to_string e in + let stack = Printexc.get_backtrace () in + (try + let out_fd = Daemon.descr_of_out_channel oc in + Marshal_tools.to_fd_with_preamble + out_fd + (Fatal_exception { Marshal_tools.message; stack }) + |> ignore + with _ -> + (* There may be a broken pipe, for example. We should just give up on + reporting the error. *) + ()) + +let internal_entry_point : (unit, unit, queue_message) Daemon.entry = + Daemon.register_entry_point "Jsonrpc" internal_run_daemon + +(************************************************) +(* Queue functions that run in the main process *) +(************************************************) + +let make_queue () : queue = + let handle = + Daemon.spawn + ~channel_mode:`pipe + (* We don't technically need to inherit stdout or stderr, but this might be + useful in the event that we throw an unexpected exception in the daemon. + It's also useful for print-statement debugging of the daemon. *) + (Unix.stdin, Unix.stdout, Unix.stderr) + internal_entry_point + () + in + let (ic, _) = handle.Daemon.channels in + { daemon_in_fd = Daemon.descr_of_in_channel ic; messages = Queue.create () } + +let get_read_fd (queue : queue) : Unix.file_descr = queue.daemon_in_fd + +(* Read a message into the queue, and return the just-read message. *) +let read_single_message_into_queue_wait (message_queue : queue) : + queue_message Lwt.t = + let%lwt message = + try%lwt + let%lwt message = + Marshal_tools_lwt.from_fd_with_preamble + (Lwt_unix.of_unix_file_descr message_queue.daemon_in_fd) + in + Lwt.return message + with End_of_file as e -> + (* This is different from when the client hangs up. It handles the case + that the daemon process exited: for example, if it was killed. *) + let message = Printexc.to_string e in + let stack = Printexc.get_backtrace () in + Lwt.return (Fatal_exception { Marshal_tools.message; stack }) + in + Queue.push message message_queue.messages; + Lwt.return message + +let rec read_messages_into_queue_no_wait (message_queue : queue) : unit Lwt.t = + let is_readable = + Lwt_unix.readable (Lwt_unix.of_unix_file_descr message_queue.daemon_in_fd) + in + let%lwt () = + if is_readable then + (* We're expecting this not to block because we just checked + to make sure that there's something there. *) + let%lwt message = read_single_message_into_queue_wait message_queue in + (* Now read any more messages that might be queued up. Only try to read more + messages if the daemon is still available to read from. Otherwise, we may + infinite loop as a result of `Unix.select` returning that a file + descriptor is available to read on. *) + match message with + | Fatal_exception _ -> Lwt.return_unit + | _ -> + let%lwt () = read_messages_into_queue_no_wait message_queue in + Lwt.return_unit + else + Lwt.return_unit + in + Lwt.return_unit + +let has_message (queue : queue) : bool = + let is_readable = + Lwt_unix.readable (Lwt_unix.of_unix_file_descr queue.daemon_in_fd) + in + is_readable || not (Queue.is_empty queue.messages) + +let get_message (queue : queue) = + (* Read one in a blocking manner to ensure that we have one. *) + let%lwt () = + if Queue.is_empty queue.messages then + let%lwt (_message : queue_message) = + read_single_message_into_queue_wait queue + in + Lwt.return_unit + else + Lwt.return_unit + in + (* Then read any others that got queued up so that we can see the maximum + number of messages at once for invalidation purposes. *) + let%lwt () = read_messages_into_queue_no_wait queue in + let item = Queue.pop queue.messages in + match item with + | Timestamped_json { tj_json; tj_timestamp } -> + Lwt.return (`Message (parse_message tj_json tj_timestamp)) + | Fatal_exception data -> Lwt.return (`Fatal_exception data) + | Recoverable_exception data -> Lwt.return (`Recoverable_exception data) + +(************************************************) +(* Output functions for respond+notify *) +(************************************************) + +let last_sent_ref : Hh_json.json option ref = ref None + +let clear_last_sent () : unit = last_sent_ref := None + +let last_sent () : Hh_json.json option = !last_sent_ref + +(* respond: sends either a Response or an Error message, according + to whether the json has an error-code or not. *) +let respond + (writer : writer) + ?(powered_by : string option) + (in_response_to : message) + (result_or_error : Hh_json.json) : unit = + Hh_json.( + let is_error = + match result_or_error with + | JSON_Object _ -> J.try_get_val "code" result_or_error |> Option.is_some + | _ -> false + in + let response = + JSON_Object + ( [("jsonrpc", JSON_String "2.0")] + @ [("id", Option.value in_response_to.id ~default:JSON_Null)] + @ ( if is_error then + [("error", result_or_error)] + else + [("result", result_or_error)] ) + @ + match powered_by with + | Some powered_by -> [("powered_by", JSON_String powered_by)] + | None -> [] ) + in + last_sent_ref := Some response; + writer response) + +(* notify: sends a Notify message *) +let notify + (writer : writer) + ?(powered_by : string option) + (method_ : string) + (params : Hh_json.json) : unit = + Hh_json.( + let message = + JSON_Object + ( [ + ("jsonrpc", JSON_String "2.0"); + ("method", JSON_String method_); + ("params", params); + ] + @ + match powered_by with + | Some powered_by -> [("powered_by", JSON_String powered_by)] + | None -> [] ) + in + last_sent_ref := Some message; + writer message) + +(************************************************) +(* Output functions for request *) +(************************************************) + +let requests_counter : IMap.key ref = ref 0 + +let get_next_request_id () : int = + incr requests_counter; + !requests_counter diff --git a/hack/utils/jsonrpc/jsonrpc.mli b/hack/utils/jsonrpc/jsonrpc.mli new file mode 100644 index 00000000000..2a305b872d7 --- /dev/null +++ b/hack/utils/jsonrpc/jsonrpc.mli @@ -0,0 +1,73 @@ +(* + * Copyright (c) 2017, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +type writer = Hh_json.json -> unit + +type kind = + | Request + | Notification + | Response + +val kind_to_string : kind -> string + +type message = { + json: Hh_json.json; + (* the json payload *) + timestamp: float; + (* time this message arrived at stdin *) + (* Following fields are decompositions of 'json'... *) + kind: kind; + method_: string; + (* mandatory for request+notification; empty otherwise *) + id: Hh_json.json option; + (* mandatory for request+response *) + params: Hh_json.json option; + (* optional for request+notification *) + result: Hh_json.json option; + (* optional for response *) + error: Hh_json.json option; (* optional for response *) +} + +val parse_message : json:Hh_json.json -> timestamp:float -> message + +val message_to_short_string : message -> string + +type queue + +(* must call Daemon.entry_point at start of your main *) +val make_queue : unit -> queue + +val get_read_fd : queue -> Unix.file_descr (* can be used for 'select' *) + +val has_message : queue -> bool + +val get_message : + queue -> + [> `Message of message + | `Fatal_exception of Marshal_tools.remote_exception_data + | `Recoverable_exception of Marshal_tools.remote_exception_data + ] + Lwt.t + +(* 'respond to_this with_that' is for replying to a JsonRPC request. It will send either *) +(* a response or an error depending on whether 'with_that' has an error id in it. *) +(* [powered_by] is our own non-standard extension to JsonRPC, which lets the +client know which back-end served the request. *) +val respond : writer -> ?powered_by:string -> message -> Hh_json.json -> unit + +(* notify/request are for initiating JsonRPC messages *) +val notify : writer -> ?powered_by:string -> string -> Hh_json.json -> unit + +val get_next_request_id : unit -> int + +(* For logging purposes, you can get a copy of which JsonRPC message was last *) +(* sent by this module - be it a response, notification, request or cancellation *) +val last_sent : unit -> Hh_json.json option + +val clear_last_sent : unit -> unit diff --git a/hack/utils/line_break_map.ml b/hack/utils/line_break_map.ml index 86206294ed3..359be55b6e5 100644 --- a/hack/utils/line_break_map.ml +++ b/hack/utils/line_break_map.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * @@ -10,11 +10,16 @@ type t = int array [@@deriving show] let last_offset = ref 0 + let curr_index = ref 0 -let make text = +let reset_global_state () = last_offset := 0; - curr_index := 0; + curr_index := 0 + +let make text = + reset_global_state (); + (* Clever Tricks Warning * --------------------- * We prepend 0, so as to make the invariant hold that there is always a @@ -29,54 +34,68 @@ let make text = let newline_list = let result = ref [] in for i = 1 to len do - let prev = text.[i-1] in - if prev = '\r' && text.[i] != '\n' || prev = '\n' - then result := i :: !result; + let prev = text.[i - 1] in + if (prev = '\r' && text.[i] != '\n') || prev = '\n' then + result := i :: !result done; (match !result with - | (r :: _) as rs when r <> len -> result := len :: rs - | _ -> () - ); + | r :: _ as rs when r <> len -> result := len :: rs + | _ -> ()); 0 :: List.rev !result in Array.of_list newline_list let offset_to_file_pos_triple bolmap offset = let len = Array.length bolmap in + if !curr_index >= len then curr_index := len - 1; let rec forward_search i = let offset_at_i = Array.unsafe_get bolmap i in - if offset < offset_at_i then i - 1 else - (if i+1 >= len then len - 1 else forward_search (i+1)) in + if offset < offset_at_i then + i - 1 + else if i + 1 >= len then + len - 1 + else + forward_search (i + 1) + in let rec backward_search i = let offset_at_i = Array.unsafe_get bolmap i in - if offset >= offset_at_i then i else - (if i = 0 then 0 else backward_search (i-1)) in - let index = if !last_offset < offset && !curr_index <> len - 1 then - forward_search (!curr_index + 1) else if !last_offset > offset then - backward_search !curr_index else !curr_index in - let line_start = Array.get bolmap index in + if offset >= offset_at_i then + i + else if i = 0 then + 0 + else + backward_search (i - 1) + in + let index = + if !last_offset < offset && !curr_index <> len - 1 then + forward_search (!curr_index + 1) + else if !last_offset > offset then + backward_search !curr_index + else + !curr_index + in + let line_start = bolmap.(index) in curr_index := index; last_offset := offset; - index + 1, line_start, offset + (index + 1, line_start, offset) let offset_to_position bolmap offset = - let index, line_start, offset = - offset_to_file_pos_triple bolmap offset - in + let (index, line_start, offset) = offset_to_file_pos_triple bolmap offset in (index, offset - line_start + 1) -let position_to_offset ?(existing = false) - bolmap (line, column) = +let position_to_offset ?(existing = false) bolmap (line, column) = let len = Array.length bolmap in let file_line = line in - - let line_start = Array.get bolmap (file_line - 1) in + (* Treat all file_line errors the same: Not_found *) + let line_start = (try bolmap.(file_line - 1) with _ -> raise Not_found) in let offset = line_start + column - 1 in + if + (not existing) + || (offset >= 0 && offset <= bolmap.(min (len - 1) file_line)) + then + offset + else + raise Not_found - if not existing - || offset >= 0 && offset <= Array.get bolmap (min (len-1) file_line) - then offset - else raise Not_found - -let offset_to_line_start_offset bolmap offset = - offset - snd (offset_to_position bolmap offset) + 1 +let offset_to_line_start_offset bolmap offset = + offset - snd (offset_to_position bolmap offset) + 1 diff --git a/hack/utils/line_break_map.mli b/hack/utils/line_break_map.mli index 614b0ef67a9..3b5b11066a1 100644 --- a/hack/utils/line_break_map.mli +++ b/hack/utils/line_break_map.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * @@ -14,10 +14,12 @@ *) type t [@@deriving show] +val reset_global_state : unit -> unit + (* Creates a line break map from/for the given string. *) val make : string -> t -val offset_to_file_pos_triple: t -> int -> int * int * int +val offset_to_file_pos_triple : t -> int -> int * int * int (* Take a zero-based offset, produce a one-based (line, column) pair. * @@ -26,7 +28,7 @@ val offset_to_file_pos_triple: t -> int -> int * int * int * an offset x where x < -l defaults to offset 0, i.e. offsets only wrap around\ * once. *) -val offset_to_position: t -> int -> int * int +val offset_to_position : t -> int -> int * int (* Take a one-based (line, column) pair, produce a zero-based offset. * diff --git a/hack/utils/local_id.ml b/hack/utils/local_id.ml deleted file mode 100644 index fd0c4eb967b..00000000000 --- a/hack/utils/local_id.ml +++ /dev/null @@ -1,43 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -module S = struct - type t = int * string - let compare x y = fst x - fst y -end - -include S - -let ctr = ref 1 - -let next () = - incr ctr; - !ctr - -let to_string x = snd x - -let pp fmt x = Format.pp_print_string fmt (to_string x) - -let to_int x = fst x - -let get_name x = to_string x - -let make x = (next (), x) - -(* `make` always returns a positive value. By multiplying the hash by -1 we - * ensure that the value returned by `get` never overlaps with those returned - * by `make` *) -let get x = (-(Hashtbl.hash x), x) - -let tmp () = - let res = next () in - (res, ("__tmp"^string_of_int res)) - -module Set = Set.Make(S) -module Map = MyMap.Make(S) diff --git a/hack/utils/local_id.mli b/hack/utils/local_id.mli deleted file mode 100644 index b79e767d396..00000000000 --- a/hack/utils/local_id.mli +++ /dev/null @@ -1,41 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -(* Used to represent local variables in the named AST. *) - -module S : sig - type t - val compare : t -> t -> int -end - -type t = S.t - -val pp : Format.formatter -> t -> unit - -val compare : t -> t -> int - -val to_string : t -> string - -val to_int : t -> int - -val get_name : t -> string - -(* Returns a fresh id every time. *) -val make : string -> t - -(* Returns the same id every time for a given string argument. Used for - * function / method parameters. - * The ids returned here are guaranteed not to overlap with those returned by - * Local_id.make, which is used for naming local variables. *) -val get : string -> t - -val tmp : unit -> t - -module Set : module type of Set.Make(S) -module Map : module type of MyMap.Make(S) diff --git a/hack/utils/logging/common/dune b/hack/utils/logging/common/dune new file mode 100644 index 00000000000..575764e56f5 --- /dev/null +++ b/hack/utils/logging/common/dune @@ -0,0 +1,33 @@ +(* -*- tuareg -*- *) + +let library_entry name suffix = + Printf.sprintf +"(library + (name %s) + (wrapped false) + (modules) + (libraries %s_%s))" name name suffix + +let fb_entry name = + library_entry name "fb" + +let stubs_entry name = + library_entry name "stubs" + +let entry is_fb name = + if is_fb then + fb_entry name + else + stubs_entry name + +let () = + (* test presence of fb subfolder *) + let current_dir = Sys.getcwd () in + (* we are in src/utils/logging/common, locate src/facebook *) + let src_dir = Filename.(dirname @@ dirname @@ dirname current_dir) in + let fb_dir = Filename.concat src_dir "facebook" in + (* locate src/facebook/dune *) + let fb_dune = Filename.concat fb_dir "dune" in + let is_fb = Sys.file_exists fb_dune in + let logging_common = entry is_fb "logging_common" in + Jbuild_plugin.V1.send logging_common diff --git a/hack/utils/lsp.ml b/hack/utils/lsp.ml deleted file mode 100644 index f6885d6dcd6..00000000000 --- a/hack/utils/lsp.ml +++ /dev/null @@ -1,1024 +0,0 @@ -(** - * Copyright (c) 2016, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -(** - * This file is an OCaml representation of the Language Server Protocol - * https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md - * based on the current v3. - * - * Changes to make it more natural in OCaml: - * - We don't represent the common base types of Requests/Errors/Notifications - * because base types don't naturally mix with abstract data types, and - * because code for these things is done more naturally at the JSON layer - * - We avoid option types where we can. The idea is to follow the internet - * "robustness" rule of being liberal in what we accept, conservative in - * what we emit: if we're parsing a message and it lacks a field, and if - * the spec tells us how to interpret absence, then we do that interpretation - * at the JSON->LSP parsing level (so long as the interpretation is lossless). - * On the emitting side, we might as well emit all fields. - * - For every request, like Initialize or workspace/Symbol, we've invented - * "Initialize.response = (Initialize.result, Initialize.error) Result" - * or "Symbol.response = (Symbol.result, Error.error) Result" to show - * the two possible return types from this request. Note that each different - * request can have its own custom error type, although most don't. - * - Most datatypes go in modules since there are so many name-clashes in - * the protocol and OCaml doesn't like name-clashes. Only exceptions are - * the really primitive types like location and documentUri. - * The few places where we still had to rename fields to avoid OCaml name - * clashes I've noted in the comments with the word "wire" to indicate the - * over-the-wire form of the name. - * - Names have been translated from jsonConvention into ocaml convention - * only where necessary, e.g. because ocaml uses lowercase for types. - * - The spec has space for extra fields like "experimental". It obviously - * doesn't make sense to encode them in a type system. I've omitted them - * entirely. -*) - -type lsp_id = - | NumberId of int - | StringId of string - -type documentUri = string - -(* A position is between two characters like an 'insert' cursor in a editor *) -type position = { - line: int; (* line position in a document [zero-based] *) - character: int; (* character offset on a line in a document [zero-based] *) -} - -(* A range is comparable to a selection in an editor *) -type range = { - start: position; (* the range's start position *) - end_: position; (* the range's end position [exclusive] *) -} - -(* Represents a location inside a resource, such as a line inside a text file *) -module Location = struct - type t = { - uri: documentUri; - range: range; - } -end - -(* Represents a location inside a resource which also wants to display a - friendly name to the user. *) -module DefinitionLocation = struct - type t = { - location: Location.t; - title: string option; - } -end - -(* markedString can be used to render human readable text. It is either a - * markdown string or a code-block that provides a language and a code snippet. - * Note that markdown strings will be sanitized by the client - including - * escaping html *) -type markedString = - | MarkedString of string - | MarkedCode of string * string (* lang, value *) - -(* Represents a reference to a command. Provides a title which will be used to - * represent a command in the UI. Commands are identitifed using a string - * identifier and the protocol currently doesn't specify a set of well known - * commands. So executing a command requires some tool extension code. *) -module Command = struct - type t = { - title: string; (* title of the command, like `save` *) - command: string; (* the identifier of the actual command handler *) - arguments: Hh_json.json list; (* wire: it can be omitted *) - } -end - -(* A textual edit applicable to a text document. If n textEdits are applied - to a text document all text edits describe changes to the initial document - version. Execution wise text edits should applied from the bottom to the - top of the text document. Overlapping text edits are not supported. *) -module TextEdit = struct - type t = { - range: range; (* to insert text, use a range where start = end *) - newText: string; (* for delete operations, use an empty string *) - } -end - -(* Text documents are identified using a URI. *) -module TextDocumentIdentifier = struct - type t = { - uri: documentUri; (* the text document's URI *) - } -end - -(* An identifier to denote a specific version of a text document. *) -module VersionedTextDocumentIdentifier = struct - type t = { - uri: documentUri; (* the text document's URI *) - version: int; (* the version number of this document *) - } -end - -(* Describes textual changes on a single text document. The text document is - referred to as a VersionedTextDocumentIdentifier to allow clients to check - the text document version before an edit is applied. *) -module TextDocumentEdit = struct - type t = { - textDocument: VersionedTextDocumentIdentifier.t; - edits: TextEdit.t list; - } -end - -(* A workspace edit represents changes to many resources managed in the - workspace. A workspace edit consists of a mapping from a URI to an - array of TextEdits to be applied to the document with that URI. *) -module WorkspaceEdit = struct - type t = { - changes: TextEdit.t list SMap.t; (* holds changes to existing docs *) - } -end - -(* An item to transfer a text document from the client to the server. The - version number strictly increases after each change, including undo/redo. *) -module TextDocumentItem = struct - type t = { - uri: documentUri; (* the text document's URI *) - languageId: string; (* the text document's language identifier *) - version: int; (* the version of the document *) - text: string; (* the content of the opened text document *) - } -end - -(* A parameter literal used in requests to pass a text document and a position - inside that document. *) -module TextDocumentPositionParams = struct - type t = { - textDocument: TextDocumentIdentifier.t; (* the text document *) - position: position; (* the position inside the text document *) - } -end - -(* A document filter denotes a document through properties like language, - schema or pattern. E.g. language:"typescript",scheme:"file" - or language:"json",pattern:"**/package.json" *) -module DocumentFilter = struct - type t = { - language: string option; (* a language id, like "typescript" *) - scheme: string option; (* a uri scheme, like "file" or "untitled" *) - pattern: string option; (* a glob pattern, like "*.{ts,js}" *) - } -end - -(* A document selector is the combination of one or many document filters. *) -module DocumentSelector = struct - type t = DocumentFilter.t list -end - - -(* Represents information about programming constructs like variables etc. *) -module SymbolInformation = struct - type t = { - name: string; - kind: symbolKind; - location: Location.t; (* the span of the symbol including its contents *) - containerName: string option; (* the symbol containing this symbol *) - } - - and symbolKind = - | File (* 1 *) - | Module (* 2 *) - | Namespace (* 3 *) - | Package (* 4 *) - | Class (* 5 *) - | Method (* 6 *) - | Property (* 7 *) - | Field (* 8 *) - | Constructor (* 9 *) - | Enum (* 10 *) - | Interface (* 11 *) - | Function (* 12 *) - | Variable (* 13 *) - | Constant (* 14 *) - | String (* 15 *) - | Number (* 16 *) - | Boolean (* 17 *) - | Array (* 18 *) -end - - -(* For showing messages (not diagnostics) in the user interface. *) -module MessageType = struct - type t = - | ErrorMessage (* 1 *) - | WarningMessage (* 2 *) - | InfoMessage (* 3 *) - | LogMessage (* 4 *) -end - - -(* Cancellation notification, method="$/cancelRequest" *) -module CancelRequest = struct - type params = cancelParams - - and cancelParams = { - id: lsp_id; (* the request id to cancel *) - } -end - -(* Initialize request, method="initialize" *) -module Initialize = struct - type params = { - processId: int option; (* pid of parent process *) - rootPath: string option; (* deprecated *) - rootUri: documentUri option; (* the root URI of the workspace *) - initializationOptions: initializationOptions; - client_capabilities: client_capabilities; (* "capabilities" over wire *) - trace: trace; (* the initial trace setting, default="off" *) - } - - and result = { - server_capabilities: server_capabilities; (* "capabilities" over wire *) - } - - and errorData = { - retry: bool; (* should client retry the initialize request *) - } - - and trace = - | Off - | Messages - | Verbose - - (* Following initialization options are unfortunately a mix of Hack - * and Flow. We should find a way to separate them. - * Anyway, they're all optional in the source json, but we pick - * a default if necessary while parsing. *) - and initializationOptions = { - useTextEditAutocomplete: bool; (* only supported for Hack so far *) - liveSyntaxErrors: bool; (* implicitly true for Hack; supported in Flow *) - } - - and client_capabilities = { - workspace: workspaceClientCapabilities; - textDocument: textDocumentClientCapabilities; - window: windowClientCapabilities; - telemetry: telemetryClientCapabilities; - (* omitted: experimental *) - } - - and workspaceClientCapabilities = { - applyEdit: bool; (* client supports appling batch edits *) - workspaceEdit: workspaceEdit; - (* omitted: dynamic-registration fields *) - } - - and workspaceEdit = { - documentChanges: bool; (* client supports versioned doc changes *) - } - - and textDocumentClientCapabilities = { - synchronization: synchronization; - completion: completion; (* textDocument/completion *) - (* omitted: dynamic-registration fields *) - } - - (* synchronization capabilities say what messages the client is capable - * of sending, should be be so asked by the server. - * We use the "can_" prefix for OCaml naming reasons; it's absent in LSP *) - and synchronization = { - can_willSave: bool; (* client can send textDocument/willSave *) - can_willSaveWaitUntil: bool; (* textDoc.../willSaveWaitUntil *) - can_didSave: bool; (* textDocument/didSave *) - } - - and completion = { - completionItem: completionItem; - } - - and completionItem = { - snippetSupport: bool; (* client can do snippets as insert text *) - } - - and windowClientCapabilities = { - status: bool; (* Nuclide-specific: client supports window/showStatusRequest *) - progress: bool; (* Nuclide-specific: client supports window/progress *) - actionRequired: bool; (* Nuclide-specific: client supports window/actionRequired *) - } - - and telemetryClientCapabilities = { - connectionStatus: bool; (* Nuclide-specific: client supports telemetry/connectionStatus *) - } - - (* What capabilities the server provides *) - and server_capabilities = { - textDocumentSync: textDocumentSyncOptions; (* how to sync *) - hoverProvider: bool; - completionProvider: completionOptions option; - signatureHelpProvider: signatureHelpOptions option; - definitionProvider: bool; - referencesProvider: bool; - documentHighlightProvider: bool; - documentSymbolProvider: bool; (* ie. document outline *) - workspaceSymbolProvider: bool; (* ie. find-symbol-in-project *) - codeActionProvider: bool; - codeLensProvider: codeLensOptions option; - documentFormattingProvider: bool; - documentRangeFormattingProvider: bool; - documentOnTypeFormattingProvider: documentOnTypeFormattingOptions option; - renameProvider: bool; - documentLinkProvider: documentLinkOptions option; - executeCommandProvider: executeCommandOptions option; - typeCoverageProvider: bool; (* Nuclide-specific feature *) - rageProvider: bool; - (* omitted: experimental *) - } - - and completionOptions = { - resolveProvider: bool; (* server resolves extra info on demand *) - completion_triggerCharacters: string list; (* wire "triggerCharacters" *) - } - - and signatureHelpOptions = { - sighelp_triggerCharacters: string list; (* wire "triggerCharacters" *) - } - - and codeLensOptions = { - codelens_resolveProvider: bool; (* wire "resolveProvider" *) - } - - and documentOnTypeFormattingOptions = { - firstTriggerCharacter: string; (* e.g. "}" *) - moreTriggerCharacter: string list; - } - - and documentLinkOptions = { - doclink_resolveProvider: bool; (* wire "resolveProvider" *) - } - - and executeCommandOptions = { - commands: string list; (* the commands to be executed on the server *) - } - - (* text document sync options say what messages the server requests the - * client to send. We use the "want_" prefix for OCaml naming reasons; - * this prefix is absent in LSP. *) - and textDocumentSyncOptions = { - want_openClose: bool; (* textDocument/didOpen+didClose *) - want_change: textDocumentSyncKind; - want_willSave: bool; (* textDocument/willSave *) - want_willSaveWaitUntil: bool; (* textDoc.../willSaveWaitUntil *) - want_didSave: saveOptions option; (* textDocument/didSave *) - } - - and textDocumentSyncKind = - | NoSync (* 0 *) (* docs should not be synced at all. Wire "None" *) - | FullSync (* 1 *) (* synced by always sending full content. Wire "Full" *) - | IncrementalSync (* 2 *) (* full only on open. Wire "Incremental" *) - - and saveOptions = { - includeText: bool; (* the client should include content on save *) - } -end - -(* Shutdown request, method="shutdown" *) -module Shutdown = struct -end - -(* Exit notification, method="exit" *) -module Exit = struct -end - -(* Rage request, method="telemetry/rage" *) -module Rage = struct - type result = rageItem list - - and rageItem = { - title: string option; - data: string; - } -end - - -(* Hover request, method="textDocument/hover" *) -module Hover = struct - type params = TextDocumentPositionParams.t - - and result = hoverResult option - - and hoverResult = { - contents: markedString list; (* wire: either a single one or an array *) - range: range option; - } -end - -(* PublishDiagnostics notification, method="textDocument/PublishDiagnostics" *) -module PublishDiagnostics = struct - type params = publishDiagnosticsParams - - and publishDiagnosticsParams = { - uri: documentUri; - diagnostics: diagnostic list; - } - - and diagnostic = { - range: range; (* the range at which the message applies *) - severity: diagnosticSeverity option; (* if omitted, client decides *) - code: diagnosticCode; (* the diagnostic's code. *) - source: string option; (* human-readable string, eg. typescript/lint *) - message: string; (* the diagnostic's message *) - relatedInformation: diagnosticRelatedInformation list; - relatedLocations: relatedLocation list; (* legacy FB extension *) - } - - and diagnosticCode = - | IntCode of int - | StringCode of string - | NoCode - - and diagnosticSeverity = - | Error (* 1 *) - | Warning (* 2 *) - | Information (* 3 *) - | Hint (* 4 *) - - and diagnosticRelatedInformation = { - relatedLocation: Location.t; (* wire: just "location" *) - relatedMessage: string; (* wire: just "message" *) - } - - (* legacy FB extension *) - and relatedLocation = diagnosticRelatedInformation -end - -(* DidOpenTextDocument notification, method="textDocument/didOpen" *) -module DidOpen = struct - type params = didOpenTextDocumentParams - - and didOpenTextDocumentParams = { - textDocument: TextDocumentItem.t; (* the document that was opened *) - } -end - -(* DidCloseTextDocument notification, method="textDocument/didClose" *) -module DidClose = struct - type params = didCloseTextDocumentParams - - and didCloseTextDocumentParams = { - textDocument: TextDocumentIdentifier.t; (* the doc that was closed *) - } -end - -(* DidSaveTextDocument notification, method="textDocument/didSave" *) -module DidSave = struct - type params = didSaveTextDocumentParams - - and didSaveTextDocumentParams = { - textDocument: TextDocumentIdentifier.t; (* the doc that was saved *) - text: string option; (* content when saved; depends on includeText *) - } -end - -(* DidChangeTextDocument notification, method="textDocument/didChange" *) -module DidChange = struct - type params = didChangeTextDocumentParams - - and didChangeTextDocumentParams = { - textDocument: VersionedTextDocumentIdentifier.t; - contentChanges: textDocumentContentChangeEvent list; - } - - and textDocumentContentChangeEvent = { - range: range option; (* the range of the document that changed *) - rangeLength: int option; (* the length that got replaced *) - text: string; (* the new text of the range/document *) - } -end - -(* Goto Definition request, method="textDocument/definition" *) -module Definition = struct - type params = TextDocumentPositionParams.t - - and result = DefinitionLocation.t list (* wire: either a single one or an array *) -end - -(* Completion request, method="textDocument/completion" *) -module Completion = struct - type params = completionParams - - and completionParams = { - loc: TextDocumentPositionParams.t; - context: completionContext option; - } - - and completionContext = { - triggerKind: completionTriggerKind; - } - - and completionTriggerKind = - | Invoked (* 1 *) - | TriggerCharacter (* 2 *) - | TriggerForIncompleteCompletions (* 3 *) - - and result = completionList (* wire: can also be 'completionItem list' *) - - and completionList = { - isIncomplete: bool; (* further typing should result in recomputing *) - items: completionItem list; - } - - and completionItem = { - label: string; (* the label in the UI *) - kind: completionItemKind option; (* tells editor which icon to use *) - detail: string option; (* human-readable string like type/symbol info *) - inlineDetail: string option; (* nuclide-specific, right column *) - itemType: string option; (* nuclide-specific, left column *) - documentation: string option; (* human-readable doc-comment *) - sortText: string option; (* used for sorting; if absent, uses label *) - filterText: string option; (* used for filtering; if absent, uses label *) - insertText: string option; (* used for inserting; if absent, uses label *) - insertTextFormat: insertTextFormat option; - textEdits: TextEdit.t list; (* wire: split into hd and tl *) - command: Command.t option; (* if present, is executed after completion *) - data: Hh_json.json option; - } - - and completionItemKind = - | Text (* 1 *) - | Method (* 2 *) - | Function (* 3 *) - | Constructor (* 4 *) - | Field (* 5 *) - | Variable (* 6 *) - | Class (* 7 *) - | Interface (* 8 *) - | Module (* 9 *) - | Property (* 10 *) - | Unit (* 11 *) - | Value (* 12 *) - | Enum (* 13 *) - | Keyword (* 14 *) - | Snippet (* 15 *) - | Color (* 16 *) - | File (* 17 *) - | Reference (* 18 *) - - (** Keep this in sync with `int_of_completionItemKind`. *) - and insertTextFormat = - | PlainText (* 1 *) (* the insertText/textEdits are just plain strings *) - | SnippetFormat (* 2 *) (* wire: just "Snippet" *) - -(** Once we get better PPX support we can use [@@deriving enum]. - Keep in sync with completionItemKind_of_int_opt. *) - let int_of_completionItemKind = function - | Text -> 1 - | Method -> 2 - | Function -> 3 - | Constructor -> 4 - | Field -> 5 - | Variable -> 6 - | Class -> 7 - | Interface -> 8 - | Module -> 9 - | Property -> 10 - | Unit -> 11 - | Value -> 12 - | Enum -> 13 - | Keyword -> 14 - | Snippet -> 15 - | Color -> 16 - | File -> 17 - | Reference -> 18 - -(** Once we get better PPX support we can use [@@deriving enum]. - Keep in sync with int_of_completionItemKind. *) - let completionItemKind_of_int_opt = function - | 1 -> Some Text - | 2 -> Some Method - | 3 -> Some Function - | 4 -> Some Constructor - | 5 -> Some Field - | 6 -> Some Variable - | 7 -> Some Class - | 8 -> Some Interface - | 9 -> Some Module - | 10 -> Some Property - | 11 -> Some Unit - | 12 -> Some Value - | 13 -> Some Enum - | 14 -> Some Keyword - | 15 -> Some Snippet - | 16 -> Some Color - | 17 -> Some File - | 18 -> Some Reference - | _ -> None - -(** Once we get better PPX support we can use [@@deriving enum]. - Keep in sync with insertFormat_of_int_opt. *) - let int_of_insertFormat = function - | PlainText -> 1 - | SnippetFormat -> 2 - -(** Once we get better PPX support we can use [@@deriving enum]. - Keep in sync with int_of_insertFormat. *) - let insertFormat_of_int_opt = function - | 1 -> Some PlainText - | 2 -> Some SnippetFormat - | _ -> None -end - - -(* Completion Item Resolve request, method="completionItem/resolve" *) -module CompletionItemResolve = struct - type params = Completion.completionItem - - and result = Completion.completionItem -end - - -(* Workspace Symbols request, method="workspace/symbol" *) -module WorkspaceSymbol = struct - type params = workspaceSymbolParams - - and result = SymbolInformation.t list - - and workspaceSymbolParams = { - query: string; (* a non-empty query string *) - } -end - - -(* Document Symbols request, method="textDocument/documentSymbols" *) -module DocumentSymbol = struct - type params = documentSymbolParams - - and result = SymbolInformation.t list - - and documentSymbolParams = { - textDocument: TextDocumentIdentifier.t; - } -end - - -(* Find References request, method="textDocument/references" *) -module FindReferences = struct - type params = referenceParams - - and result = Location.t list - - and referenceParams = { - loc: TextDocumentPositionParams.t; (* wire: loc's members are part of referenceParams *) - context: referenceContext; - } - - and referenceContext = { - includeDeclaration: bool; (* include declaration of current symbol *) - includeIndirectReferences: bool; - } -end - - -(* Document Highlights request, method="textDocument/documentHighlight" *) -module DocumentHighlight = struct - type params = TextDocumentPositionParams.t - - and result = documentHighlight list - - and documentHighlight = { - range: range; (* the range this highlight applies to *) - kind: documentHighlightKind option; - } - - and documentHighlightKind = - | Text (* 1 *) (* a textual occurrence *) - | Read (* 2 *) (* read-access of a symbol, like reading a variable *) - | Write (* 3 *) (* write-access of a symbol, like writing a variable *) -end - - -(* Type Coverage request, method="textDocument/typeCoverage" *) -(* THIS IS A NUCLIDE-SPECIFIC EXTENSION TO LSP. *) -module TypeCoverage = struct - type params = typeCoverageParams - - and result = { - coveredPercent: int; - uncoveredRanges: uncoveredRange list; - defaultMessage: string; - } - - and typeCoverageParams = { - textDocument: TextDocumentIdentifier.t; - } - - and uncoveredRange = { - range: range; - message: string option; - } -end - - -(* Document Formatting request, method="textDocument/formatting" *) -module DocumentFormatting = struct - type params = documentFormattingParams - - and result = TextEdit.t list - - and documentFormattingParams = { - textDocument: TextDocumentIdentifier.t; - options: formattingOptions; - } - - and formattingOptions = { - tabSize: int; (* size of a tab in spaces *) - insertSpaces: bool; (* prefer spaces over tabs *) - (* omitted: signature for further properties *) - } -end - - -(* Document Range Formatting request, method="textDocument/rangeFormatting" *) -module DocumentRangeFormatting = struct - type params = documentRangeFormattingParams - - and result = TextEdit.t list - - and documentRangeFormattingParams = { - textDocument: TextDocumentIdentifier.t; - range: range; - options: DocumentFormatting.formattingOptions; - } -end - - -(* Document On Type Formatting req., method="textDocument/onTypeFormatting" *) -module DocumentOnTypeFormatting = struct - type params = documentOnTypeFormattingParams - - and result = TextEdit.t list - - and documentOnTypeFormattingParams = { - textDocument: TextDocumentIdentifier.t; - position: position; (* the position at which this request was sent *) - ch: string; (* the character that has been typed *) - options: DocumentFormatting.formattingOptions; - } -end - - -(* Document Signature Help request, method="textDocument/signatureHelp" *) -module SignatureHelp = struct - type params = TextDocumentPositionParams.t - - and result = t option - - and t = { - signatures: signature_information list; - activeSignature: int; - activeParameter: int; - } - - and signature_information = { - siginfo_label: string; - siginfo_documentation: string option; - parameters: parameter_information list; - } - - and parameter_information = { - parinfo_label: string; - parinfo_documentation: string option; - } -end - -(* Workspace Rename request, method="textDocument/rename" *) -module Rename = struct - type params = renameParams - - and result = WorkspaceEdit.t - - and renameParams = { - textDocument: TextDocumentIdentifier.t; - position: position; - newName: string; - } -end - - -(* LogMessage notification, method="window/logMessage" *) -module LogMessage = struct - type params = logMessageParams - - and logMessageParams = { - type_: MessageType.t; - message: string; - } -end - - -(* ShowMessage notification, method="window/showMessage" *) -module ShowMessage = struct - type params = showMessageParams - - and showMessageParams = { - type_: MessageType.t; - message: string; - } -end - - -(* ShowMessage request, method="window/showMessageRequest" *) -module ShowMessageRequest = struct - type t = Present of {id: lsp_id;} | Absent - - and params = showMessageRequestParams - - and result = messageActionItem option - - and showMessageRequestParams = { - type_: MessageType.t; - message: string; - actions: messageActionItem list; - } - - and messageActionItem = { - title: string; - } -end - - -(* ShowStatus request, method="window/showStatus" *) -module ShowStatus = struct - type params = showStatusParams - - and result = ShowMessageRequest.messageActionItem option - - and showStatusParams = { - request: ShowMessageRequest.showMessageRequestParams; - progress: int option; - total: int option; - shortMessage: string option; - } -end - - -(* Progress notification, method="window/progress" *) -module Progress = struct - type t = Present of {id: int; label: string;} | Absent - - and params = progressParams - - and progressParams = { - (* LSP progress notifications have a lifetime that starts with their 1st *) - (* window/progress update message and ends with an update message with *) - (* label = None. They use an ID number (not JsonRPC id) to associate *) - (* multiple messages to a single lifetime stream. *) - id: int; - label: string option; - } -end - - -(* ActionRequired notification, method="window/actionRequired" *) -module ActionRequired = struct - type t = Present of {id: int; label: string;} | Absent - - and params = actionRequiredParams - - and actionRequiredParams = { - (* See progressParams.id for an explanation of this field. *) - id: int; - label: string option; - } -end - - -(* ConnectionStatus notification, method="telemetry/connectionStatus" *) -module ConnectionStatus = struct - type params = connectionStatusParams - - and connectionStatusParams = { - isConnected: bool; - } -end - - -(* Module for dynamic view, method="workspace/toggleTypeCoverage" *) -module ToggleTypeCoverage = struct - type params = toggleTypeCoverageParams - and toggleTypeCoverageParams = { - toggle: bool; - } -end - -(* ErrorResponse *) -module Error = struct - type t = {code: int; message: string; data: Hh_json.json option} - - (* Legacy: some code uses exceptions instead of Error.t. *) - (* Be careful with that since if you unmarshal one then you can't pattern-match it. *) - - (* Defined by JSON-RPC. *) - exception Parse of string (* -32700 *) - exception InvalidRequest of string (* -32600 *) - exception MethodNotFound of string (* -32601 *) - exception InvalidParams of string (* -32602 *) - exception InternalError of string (* -32603 *) - exception ServerErrorStart of string * Initialize.errorData (* -32099 *) - exception ServerErrorEnd of string (* -32000 *) - exception ServerNotInitialized of string (* -32002 *) - exception Unknown of string (* -32001 *) - - (* Defined by the protocol. *) - exception RequestCancelled of string (* -32800 *) -end - - -(** - * Here are gathered-up ADTs for all the messages we handle -*) - -type lsp_request = - | InitializeRequest of Initialize.params - | ShutdownRequest - | HoverRequest of Hover.params - | DefinitionRequest of Definition.params - | CompletionRequest of Completion.params - | CompletionItemResolveRequest of CompletionItemResolve.params - | WorkspaceSymbolRequest of WorkspaceSymbol.params - | DocumentSymbolRequest of DocumentSymbol.params - | FindReferencesRequest of FindReferences.params - | DocumentHighlightRequest of DocumentHighlight.params - | TypeCoverageRequest of TypeCoverage.params - | DocumentFormattingRequest of DocumentFormatting.params - | DocumentRangeFormattingRequest of DocumentRangeFormatting.params - | DocumentOnTypeFormattingRequest of DocumentOnTypeFormatting.params - | ShowMessageRequestRequest of ShowMessageRequest.params - | ShowStatusRequest of ShowStatus.params - | RageRequest - | RenameRequest of Rename.params - | UnknownRequest of string * Hh_json.json option - -type lsp_result = - | InitializeResult of Initialize.result - | ShutdownResult - | HoverResult of Hover.result - | DefinitionResult of Definition.result - | CompletionResult of Completion.result - | CompletionItemResolveResult of CompletionItemResolve.result - | WorkspaceSymbolResult of WorkspaceSymbol.result - | DocumentSymbolResult of DocumentSymbol.result - | FindReferencesResult of FindReferences.result - | DocumentHighlightResult of DocumentHighlight.result - | TypeCoverageResult of TypeCoverage.result - | DocumentFormattingResult of DocumentFormatting.result - | DocumentRangeFormattingResult of DocumentRangeFormatting.result - | DocumentOnTypeFormattingResult of DocumentOnTypeFormatting.result - | ShowMessageRequestResult of ShowMessageRequest.result - | ShowStatusResult of ShowStatus.result - | RageResult of Rage.result - | RenameResult of Rename.result - | ErrorResult of Error.t * string (* the string is a stacktrace *) - -type lsp_notification = - | ExitNotification - | CancelRequestNotification of CancelRequest.params - | PublishDiagnosticsNotification of PublishDiagnostics.params - | DidOpenNotification of DidOpen.params - | DidCloseNotification of DidClose.params - | DidSaveNotification of DidSave.params - | DidChangeNotification of DidChange.params - | LogMessageNotification of LogMessage.params - | TelemetryNotification of LogMessage.params (* LSP allows 'any' but we only send these *) - | ShowMessageNotification of ShowMessage.params - | ProgressNotification of Progress.params - | ActionRequiredNotification of ActionRequired.params - | ConnectionStatusNotification of ConnectionStatus.params - | UnknownNotification of string * Hh_json.json option - -type lsp_message = - | RequestMessage of lsp_id * lsp_request - | ResponseMessage of lsp_id * lsp_result - | NotificationMessage of lsp_notification - -type 'a lsp_handler = 'a lsp_result_handler * 'a lsp_error_handler - -and 'a lsp_error_handler = (Error.t * string) -> 'a -> 'a - -and 'a lsp_result_handler = - | ShowMessageHandler of (ShowMessageRequest.result -> 'a -> 'a) - | ShowStatusHandler of (ShowStatus.result -> 'a -> 'a) - -module IdKey = struct - type t = lsp_id - - let compare (x: t) (y:t) = - match x, y with - | NumberId x, NumberId y -> x - y - | NumberId _, StringId _ -> -1 - | StringId x, StringId y -> String.compare x y - | StringId _, NumberId _ -> 1 -end - -module IdSet = Set.Make (IdKey) -module IdMap = MyMap.Make (IdKey) diff --git a/hack/utils/lsp/dune b/hack/utils/lsp/dune new file mode 100644 index 00000000000..d3d06e5653f --- /dev/null +++ b/hack/utils/lsp/dune @@ -0,0 +1,11 @@ +(library + (name lsp) + (wrapped false) + (libraries + file_content + file_url + hh_json + jsonrpc + utils_core) + (preprocess + (pps lwt_ppx ppx_deriving.std ppx_deriving.enum))) diff --git a/hack/utils/lsp/lsp.ml b/hack/utils/lsp/lsp.ml new file mode 100644 index 00000000000..0acafd491b4 --- /dev/null +++ b/hack/utils/lsp/lsp.ml @@ -0,0 +1,1303 @@ +(* + * Copyright (c) 2016, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(** + * This file is an OCaml representation of the Language Server Protocol + * https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md + * based on the current v3. + * + * Changes to make it more natural in OCaml: + * - We don't represent the common base types of Requests/Errors/Notifications + * because base types don't naturally mix with abstract data types, and + * because code for these things is done more naturally at the JSON layer + * - We avoid option types where we can. The idea is to follow the internet + * "robustness" rule of being liberal in what we accept, conservative in + * what we emit: if we're parsing a message and it lacks a field, and if + * the spec tells us how to interpret absence, then we do that interpretation + * at the JSON->LSP parsing level (so long as the interpretation is lossless). + * On the emitting side, we might as well emit all fields. + * - For every request, like Initialize or workspace/Symbol, we've invented + * "Initialize.response = (Initialize.result, Initialize.error) Result" + * or "Symbol.response = (Symbol.result, Error.error) Result" to show + * the two possible return types from this request. Note that each different + * request can have its own custom error type, although most don't. + * - Most datatypes go in modules since there are so many name-clashes in + * the protocol and OCaml doesn't like name-clashes. Only exceptions are + * the really primitive types like location and documentUri. + * The few places where we still had to rename fields to avoid OCaml name + * clashes I've noted in the comments with the word "wire" to indicate the + * over-the-wire form of the name. + * - Names have been translated from jsonConvention into ocaml convention + * only where necessary, e.g. because ocaml uses lowercase for types. + * - The spec has space for extra fields like "experimental". It obviously + * doesn't make sense to encode them in a type system. I've omitted them + * entirely. +*) + +type lsp_id = + | NumberId of int + | StringId of string + +type documentUri = string + +(* A position is between two characters like an 'insert' cursor in a editor *) +type position = { + line: int; + (* line position in a document [zero-based] *) + character: int; (* character offset on a line in a document [zero-based] *) +} + +(* A range is comparable to a selection in an editor *) +type range = { + start: position; + (* the range's start position *) + end_: position; (* the range's end position [exclusive] *) +} + +(* Represents a location inside a resource, such as a line inside a text file *) +module Location = struct + type t = { + uri: documentUri; + range: range; + } +end + +(* Represents a location inside a resource which also wants to display a + friendly name to the user. *) +module DefinitionLocation = struct + type t = { + location: Location.t; + title: string option; + } +end + +(* markedString can be used to render human readable text. It is either a + * markdown string or a code-block that provides a language and a code snippet. + * Note that markdown strings will be sanitized by the client - including + * escaping html *) +type markedString = + | MarkedString of string + (* lang, value *) + | MarkedCode of string * string + +(* Represents a reference to a command. Provides a title which will be used to + * represent a command in the UI. Commands are identitifed using a string + * identifier and the protocol currently doesn't specify a set of well known + * commands. So executing a command requires some tool extension code. *) +module Command = struct + type t = { + title: string; + (* title of the command, like `save` *) + command: string; + (* the identifier of the actual command handler *) + arguments: Hh_json.json list; (* wire: it can be omitted *) + } +end + +(* A textual edit applicable to a text document. If n textEdits are applied + to a text document all text edits describe changes to the initial document + version. Execution wise text edits should applied from the bottom to the + top of the text document. Overlapping text edits are not supported. *) +module TextEdit = struct + type t = { + range: range; + (* to insert text, use a range where start = end *) + newText: string; (* for delete operations, use an empty string *) + } +end + +(* Text documents are identified using a URI. *) +module TextDocumentIdentifier = struct + type t = { uri: documentUri (* the text document's URI *) } +end + +(* An identifier to denote a specific version of a text document. *) +module VersionedTextDocumentIdentifier = struct + type t = { + uri: documentUri; + (* the text document's URI *) + version: int; (* the version number of this document *) + } +end + +(* Describes textual changes on a single text document. The text document is + referred to as a VersionedTextDocumentIdentifier to allow clients to check + the text document version before an edit is applied. *) +module TextDocumentEdit = struct + type t = { + textDocument: VersionedTextDocumentIdentifier.t; + edits: TextEdit.t list; + } +end + +(* A workspace edit represents changes to many resources managed in the + workspace. A workspace edit consists of a mapping from a URI to an + array of TextEdits to be applied to the document with that URI. *) +module WorkspaceEdit = struct + type t = { + changes: TextEdit.t list SMap.t; (* holds changes to existing docs *) + } +end + +(* An item to transfer a text document from the client to the server. The + version number strictly increases after each change, including undo/redo. *) +module TextDocumentItem = struct + type t = { + uri: documentUri; + (* the text document's URI *) + languageId: string; + (* the text document's language identifier *) + version: int; + (* the version of the document *) + text: string; (* the content of the opened text document *) + } +end + +(** + * A code lens represents a command that should be shown along with + * source text, like the number of references, a way to run tests, etc. + * + * A code lens is _unresolved_ when no command is associated to it. For performance + * reasons the creation of a code lens and resolving should be done in two stages. + *) +module CodeLens = struct + type t = { + range: range; + command: Command.t; + data: Hh_json.json option; + } +end + +(* A parameter literal used in requests to pass a text document and a position + inside that document. *) +module TextDocumentPositionParams = struct + type t = { + textDocument: TextDocumentIdentifier.t; + (* the text document *) + position: position; (* the position inside the text document *) + } +end + +(* A document filter denotes a document through properties like language, + schema or pattern. E.g. language:"typescript",scheme:"file" + or language:"json",pattern:"**/package.json" *) +module DocumentFilter = struct + type t = { + language: string option; + (* a language id, like "typescript" *) + scheme: string option; + (* a uri scheme, like "file" or "untitled" *) + pattern: string option; (* a glob pattern, like "*.{ts,js}" *) + } +end + +(* A document selector is the combination of one or many document filters. *) +module DocumentSelector = struct + type t = DocumentFilter.t list +end + +(* Represents information about programming constructs like variables etc. *) +module SymbolInformation = struct + (* These numbers should match + * https://microsoft.github.io/language-server-protocol/specification#textDocument_documentSymbol + *) + type symbolKind = + | File [@value 1] + | Module [@value 2] + | Namespace [@value 3] + | Package [@value 4] + | Class [@value 5] + | Method [@value 6] + | Property [@value 7] + | Field [@value 8] + | Constructor [@value 9] + | Enum [@value 10] + | Interface [@value 11] + | Function [@value 12] + | Variable [@value 13] + | Constant [@value 14] + | String [@value 15] + | Number [@value 16] + | Boolean [@value 17] + | Array [@value 18] + | Object [@value 19] + | Key [@value 20] + | Null [@value 21] + | EnumMember [@value 22] + | Struct [@value 23] + [@@deriving enum] + + type t = { + name: string; + kind: symbolKind; + location: Location.t; + (* the span of the symbol including its contents *) + containerName: string option; (* the symbol containing this symbol *) + } +end + +(* For showing messages (not diagnostics) in the user interface. *) +module MessageType = struct + type t = + | ErrorMessage [@value 1] + | WarningMessage [@value 2] + | InfoMessage [@value 3] + | LogMessage [@value 4] + [@@deriving enum] +end + +module CodeActionKind = struct + (* The kind of a code action. + * Kinds are a hierarchical list of identifiers separated by `.`, e.g. + * `"refactor.extract.function"`. + * The set of kinds is open and client needs to announce the kinds it supports + * to the server during initialization. + * CodeActionKind.t uses a pair to represent a non-empty list and provides utility + * functions for creation, membership, printing. + * Module CodeAction below also references this module as Kind. + *) + type t = string * string list + + (* is x of kind k? *) + let is_kind : t -> t -> bool = + let rec is_prefix_of ks xs = + match (ks, xs) with + | ([], _) -> true + | (k :: ks, x :: xs) when String.equal k x -> is_prefix_of ks xs + | (_, _) -> false + in + (fun (k, ks) (x, xs) -> String.equal k x && is_prefix_of ks xs) + + (* does `ks` contain kind `k` *) + let contains_kind k ks = List.exists (is_kind k) ks + + (* does an optional list of kinds `ks` contain kind `k` *) + let contains_kind_opt ~default k ks = + match ks with + | Some ks -> contains_kind k ks + | None -> default + + (* Create a kind from a string that follows the spec *) + let kind_of_string : string -> t = + fun s -> + match String.split_on_char '.' s with + | [] -> failwith "split_on_char does not return an empty list" + | k :: ks -> (k, ks) + + (* Create the equivalent string that the spec would have required *) + let string_of_kind : t -> string = + (fun (k, ks) -> String.concat "." (k :: ks)) + + (* Create a new sub-kind of an existing kind *) + let sub_kind : t -> string -> t = + let cons_to_end (ss : string list) (s : string) = + Core_list.(fold_right ss ~f:cons ~init:[s]) + in + (fun (k, ks) s -> (k, cons_to_end ks s)) + + (* Some of the constants defined by the spec *) + let quickfix = kind_of_string "quickfix" + + (* Document wide code actions *) + let source = kind_of_string "source" +end + +(* Cancellation notification, method="$/cancelRequest" *) +module CancelRequest = struct + type params = cancelParams + + and cancelParams = { id: lsp_id (* the request id to cancel *) } +end + +(* Initialize request, method="initialize" *) +module Initialize = struct + type textDocumentSyncKind = + (* docs should not be synced at all. Wire "None" *) + | NoSync [@value 0] + (* synced by always sending full content. Wire "Full" *) + | FullSync [@value 1] + | IncrementalSync [@value 2] + [@@deriving enum] + + type params = { + processId: int option; + (* pid of parent process *) + rootPath: string option; + (* deprecated *) + rootUri: documentUri option; + (* the root URI of the workspace *) + initializationOptions: initializationOptions; + client_capabilities: client_capabilities; + (* "capabilities" over wire *) + trace: trace; (* the initial trace setting, default="off" *) + } + + and result = { + server_capabilities: server_capabilities; (* "capabilities" over wire *) + } + + and errorData = { + retry: bool; (* should client retry the initialize request *) + } + + and trace = + | Off + | Messages + | Verbose + + (* Following initialization options are unfortunately a mix of Hack + * and Flow. We should find a way to separate them. + * Anyway, they're all optional in the source json, but we pick + * a default if necessary while parsing. *) + and initializationOptions = { + useTextEditAutocomplete: bool; + (* only supported for Hack so far *) + liveSyntaxErrors: bool; + (* implicitly true for Hack; supported in Flow *) + namingTableSavedStatePath: string option; + (* only supported for Hack *) + sendServerStatusEvents: bool; (* only supported for Hack *) + } + + and client_capabilities = { + workspace: workspaceClientCapabilities; + textDocument: textDocumentClientCapabilities; + window: windowClientCapabilities; + telemetry: telemetryClientCapabilities; (* omitted: experimental *) + } + + and workspaceClientCapabilities = { + applyEdit: bool; + (* client supports appling batch edits *) + workspaceEdit: workspaceEdit; + didChangeWatchedFiles: dynamicRegistration; + (* omitted: other dynamic-registration fields *) + } + + and dynamicRegistration = { + dynamicRegistration: bool; + (* client supports dynamic registration for this capability *) + } + + and workspaceEdit = { + documentChanges: bool; (* client supports versioned doc changes *) + } + + and textDocumentClientCapabilities = { + synchronization: synchronization; + completion: completion; + (* textDocument/completion *) + codeAction: codeAction; (* omitted: dynamic-registration fields *) + } + + (* synchronization capabilities say what messages the client is capable + * of sending, should be be so asked by the server. + * We use the "can_" prefix for OCaml naming reasons; it's absent in LSP *) + and synchronization = { + can_willSave: bool; + (* client can send textDocument/willSave *) + can_willSaveWaitUntil: bool; + (* textDoc.../willSaveWaitUntil *) + can_didSave: bool; (* textDocument/didSave *) + } + + and completion = { completionItem: completionItem } + + and completionItem = { + snippetSupport: bool; (* client can do snippets as insert text *) + } + + and codeAction = { + (* Whether code action supports dynamic registration. *) + codeAction_dynamicRegistration: bool; + (* wire: dynamicRegistraction *) + (* The client support code action literals as a valid + * response of the `textDocument/codeAction` request. *) + codeActionLiteralSupport: codeActionliteralSupport option; + } + + and codeActionliteralSupport = { + (* The code action kind values the client supports. When this + * property exists the client also guarantees that it will + * handle values outside its set gracefully and falls back + * to a default value when unknown. *) + codeAction_valueSet: CodeActionKind.t list; (* wire: valueSet *) + } + + and windowClientCapabilities = { + status: bool; + (* Nuclide-specific: client supports window/showStatusRequest *) + progress: bool; + (* Nuclide-specific: client supports window/progress *) + actionRequired: bool; + (* Nuclide-specific: client supports window/actionRequired *) + } + + and telemetryClientCapabilities = { + connectionStatus: bool; + (* Nuclide-specific: client supports telemetry/connectionStatus *) + } + + (* What capabilities the server provides *) + and server_capabilities = { + textDocumentSync: textDocumentSyncOptions; + (* how to sync *) + hoverProvider: bool; + completionProvider: completionOptions option; + signatureHelpProvider: signatureHelpOptions option; + definitionProvider: bool; + typeDefinitionProvider: bool; + referencesProvider: bool; + documentHighlightProvider: bool; + documentSymbolProvider: bool; + (* ie. document outline *) + workspaceSymbolProvider: bool; + (* ie. find-symbol-in-project *) + codeActionProvider: bool; + codeLensProvider: codeLensOptions option; + documentFormattingProvider: bool; + documentRangeFormattingProvider: bool; + documentOnTypeFormattingProvider: documentOnTypeFormattingOptions option; + renameProvider: bool; + documentLinkProvider: documentLinkOptions option; + executeCommandProvider: executeCommandOptions option; + typeCoverageProvider: bool; + (* Nuclide-specific feature *) + rageProvider: bool; (* omitted: experimental *) + } + + and completionOptions = { + resolveProvider: bool; + (* server resolves extra info on demand *) + completion_triggerCharacters: string list; (* wire "triggerCharacters" *) + } + + and signatureHelpOptions = { + sighelp_triggerCharacters: string list; (* wire "triggerCharacters" *) + } + + and codeLensOptions = { + codelens_resolveProvider: bool; (* wire "resolveProvider" *) + } + + and documentOnTypeFormattingOptions = { + firstTriggerCharacter: string; + (* e.g. "}" *) + moreTriggerCharacter: string list; + } + + and documentLinkOptions = { + doclink_resolveProvider: bool; (* wire "resolveProvider" *) + } + + and executeCommandOptions = { + commands: string list; (* the commands to be executed on the server *) + } + + (* text document sync options say what messages the server requests the + * client to send. We use the "want_" prefix for OCaml naming reasons; + * this prefix is absent in LSP. *) + and textDocumentSyncOptions = { + want_openClose: bool; + (* textDocument/didOpen+didClose *) + want_change: textDocumentSyncKind; + want_willSave: bool; + (* textDocument/willSave *) + want_willSaveWaitUntil: bool; + (* textDoc.../willSaveWaitUntil *) + want_didSave: saveOptions option; (* textDocument/didSave *) + } + + (* full only on open. Wire "Incremental" *) + and saveOptions = { + includeText: bool; (* the client should include content on save *) + } +end + +(* Shutdown request, method="shutdown" *) +module Shutdown = struct end + +(* Exit notification, method="exit" *) +module Exit = struct end + +(* Rage request, method="telemetry/rage" *) +module Rage = struct + type result = rageItem list + + and rageItem = { + title: string option; + data: string; + } +end + +(* Code Lens resolve request, method="codeLens/resolve" *) +module CodeLensResolve = struct + type params = CodeLens.t + + and result = CodeLens.t +end + +(* Hover request, method="textDocument/hover" *) +module Hover = struct + type params = TextDocumentPositionParams.t + + and result = hoverResult option + + and hoverResult = { + contents: markedString list; + (* wire: either a single one or an array *) + range: range option; + } +end + +(* PublishDiagnostics notification, method="textDocument/PublishDiagnostics" *) +module PublishDiagnostics = struct + type diagnosticSeverity = + | Error [@value 1] + | Warning [@value 2] + | Information [@value 3] + | Hint [@value 4] + [@@deriving enum] + + type params = publishDiagnosticsParams + + and publishDiagnosticsParams = { + uri: documentUri; + diagnostics: diagnostic list; + } + + and diagnostic = { + range: range; + (* the range at which the message applies *) + severity: diagnosticSeverity option; + (* if omitted, client decides *) + code: diagnosticCode; + (* the diagnostic's code. *) + source: string option; + (* human-readable string, eg. typescript/lint *) + message: string; + (* the diagnostic's message *) + relatedInformation: diagnosticRelatedInformation list; + relatedLocations: relatedLocation list; (* legacy FB extension *) + } + + and diagnosticCode = + | IntCode of int + | StringCode of string + | NoCode + + and diagnosticRelatedInformation = { + relatedLocation: Location.t; + (* wire: just "location" *) + relatedMessage: string; (* wire: just "message" *) + } + + (* legacy FB extension *) + and relatedLocation = diagnosticRelatedInformation +end + +(* DidOpenTextDocument notification, method="textDocument/didOpen" *) +module DidOpen = struct + type params = didOpenTextDocumentParams + + and didOpenTextDocumentParams = { + textDocument: TextDocumentItem.t; (* the document that was opened *) + } +end + +(* DidCloseTextDocument notification, method="textDocument/didClose" *) +module DidClose = struct + type params = didCloseTextDocumentParams + + and didCloseTextDocumentParams = { + textDocument: TextDocumentIdentifier.t; (* the doc that was closed *) + } +end + +(* DidSaveTextDocument notification, method="textDocument/didSave" *) +module DidSave = struct + type params = didSaveTextDocumentParams + + and didSaveTextDocumentParams = { + textDocument: TextDocumentIdentifier.t; + (* the doc that was saved *) + text: string option; (* content when saved; depends on includeText *) + } +end + +(* DidChangeTextDocument notification, method="textDocument/didChange" *) +module DidChange = struct + type params = didChangeTextDocumentParams + + and didChangeTextDocumentParams = { + textDocument: VersionedTextDocumentIdentifier.t; + contentChanges: textDocumentContentChangeEvent list; + } + + and textDocumentContentChangeEvent = { + range: range option; + (* the range of the document that changed *) + rangeLength: int option; + (* the length that got replaced *) + text: string; (* the new text of the range/document *) + } +end + +(* Watched files changed notification, method="workspace/didChangeWatchedFiles" *) +module DidChangeWatchedFiles = struct + type registerOptions = { watchers: fileSystemWatcher list } + + and fileSystemWatcher = { globPattern: string } + + type fileChangeType = + | Created [@value 1] + | Updated [@value 2] + | Deleted [@value 3] + [@@deriving enum] + + type params = { changes: fileEvent list } + + and fileEvent = { + uri: documentUri; + type_: fileChangeType; + } +end + +(* Goto Definition request, method="textDocument/definition" *) +module Definition = struct + type params = TextDocumentPositionParams.t + + and result = DefinitionLocation.t list + + (* wire: either a single one or an array *) +end + +(* Goto TypeDefinition request, method="textDocument/typeDefinition" *) +module TypeDefinition = struct + type params = TextDocumentPositionParams.t + + and result = DefinitionLocation.t list +end + +module CodeAction = struct + (* A code action represents a change that can be performed in code, e.g. to fix a problem or + to refactor code. *) + type t = { + (* A short, human-readable, title for this code action. *) + title: string; + (* The kind of the code action. Used to filter code actions. *) + kind: CodeActionKind.t; + (* The diagnostics that this code action resolves. *) + diagnostics: PublishDiagnostics.diagnostic list; + (* A CodeAction must set either `edit` and/or a `command`. + If both are supplied, the `edit` is applied first, then the `command` is executed. *) + action: edit_and_or_command; + } + + and edit_and_or_command = + | EditOnly of WorkspaceEdit.t + | CommandOnly of Command.t + | BothEditThenCommand of (WorkspaceEdit.t * Command.t) + + type result = command_or_action list + + and command_or_action = + | Command of Command.t + | Action of t +end + +(* Code Action Request, method="textDocument/codeAction" *) +module CodeActionRequest = struct + type params = { + (* The document in which the command was invoked. *) + textDocument: TextDocumentIdentifier.t; + (* The range for which the command was invoked. *) + range: range; + (* Context carrying additional information. *) + context: codeActionContext; + } + + (* Contains additional diagnostic information about the context in which + a code action is run. *) + and codeActionContext = { + diagnostics: PublishDiagnostics.diagnostic list; + only: CodeActionKind.t list option; + } +end + +(* Completion request, method="textDocument/completion" *) +module Completion = struct + (* These numbers should match + * https://microsoft.github.io/language-server-protocol/specification#textDocument_completion + *) + type completionItemKind = + | Text [@value 1] + | Method [@value 2] + | Function [@value 3] + | Constructor [@value 4] + | Field [@value 5] + | Variable [@value 6] + | Class [@value 7] + | Interface [@value 8] + | Module [@value 9] + | Property [@value 10] + | Unit [@value 11] + | Value [@value 12] + | Enum [@value 13] + | Keyword [@value 14] + | Snippet [@value 15] + | Color [@value 16] + | File [@value 17] + | Reference [@value 18] + | Folder [@value 19] + | EnumMember [@value 20] + | Constant [@value 21] + | Struct [@value 22] + | Event [@value 23] + | Operator [@value 24] + | TypeParameter [@value 25] + [@@deriving enum] + + (* These numbers should match + * https://microsoft.github.io/language-server-protocol/specification#textDocument_completion + *) + type insertTextFormat = + | PlainText [@value 1] (* the insertText/textEdits are just plain strings *) + | SnippetFormat [@value 2] (* wire: just "Snippet" *) + [@@deriving enum] + + type completionTriggerKind = + | Invoked [@value 1] + | TriggerCharacter [@value 2] + | TriggerForIncompleteCompletions [@value 3] + [@@deriving enum] + + type params = completionParams + + and completionParams = { + loc: TextDocumentPositionParams.t; + context: completionContext option; + } + + and completionContext = { + triggerKind: completionTriggerKind; + triggerCharacter: string option; + } + + and result = completionList + + (* wire: can also be 'completionItem list' *) + and completionList = { + isIncomplete: bool; + (* further typing should result in recomputing *) + items: completionItem list; + } + + and completionItem = { + label: string; + (* the label in the UI *) + kind: completionItemKind option; + (* tells editor which icon to use *) + detail: string option; + (* human-readable string like type/symbol info *) + inlineDetail: string option; + (* nuclide-specific, right column *) + itemType: string option; + (* nuclide-specific, left column *) + documentation: markedString list option; + (* human-readable doc-comment *) + sortText: string option; + (* used for sorting; if absent, uses label *) + filterText: string option; + (* used for filtering; if absent, uses label *) + insertText: string option; + (* used for inserting; if absent, uses label *) + insertTextFormat: insertTextFormat option; + textEdits: TextEdit.t list; + (* wire: split into hd and tl *) + command: Command.t option; + (* if present, is executed after completion *) + data: Hh_json.json option; + } +end + +(* Completion Item Resolve request, method="completionItem/resolve" *) +module CompletionItemResolve = struct + type params = Completion.completionItem + + and result = Completion.completionItem +end + +(* Workspace Symbols request, method="workspace/symbol" *) +module WorkspaceSymbol = struct + type params = workspaceSymbolParams + + and result = SymbolInformation.t list + + and workspaceSymbolParams = { query: string (* a non-empty query string *) } +end + +(* Document Symbols request, method="textDocument/documentSymbol" *) +module DocumentSymbol = struct + type params = documentSymbolParams + + and result = SymbolInformation.t list + + and documentSymbolParams = { textDocument: TextDocumentIdentifier.t } +end + +(* Find References request, method="textDocument/references" *) +module FindReferences = struct + type params = referenceParams + + and result = Location.t list + + and referenceParams = { + loc: TextDocumentPositionParams.t; + (* wire: loc's members are part of referenceParams *) + context: referenceContext; + } + + and referenceContext = { + includeDeclaration: bool; + (* include declaration of current symbol *) + includeIndirectReferences: bool; + } +end + +(* Document Highlights request, method="textDocument/documentHighlight" *) +module DocumentHighlight = struct + type params = TextDocumentPositionParams.t + + type documentHighlightKind = + (* a textual occurrence *) + | Text [@value 1] + (* read-access of a symbol, like reading a variable *) + | Read [@value 2] + (* write-access of a symbol, like writing a variable *) + | Write [@value 3] + [@@deriving enum] + + type result = documentHighlight list + + and documentHighlight = { + range: range; + (* the range this highlight applies to *) + kind: documentHighlightKind option; + } +end + +(* Type Coverage request, method="textDocument/typeCoverage" *) +(* THIS IS A NUCLIDE-SPECIFIC EXTENSION TO LSP. *) +module TypeCoverage = struct + type params = typeCoverageParams + + and result = { + coveredPercent: int; + uncoveredRanges: uncoveredRange list; + defaultMessage: string; + } + + and typeCoverageParams = { textDocument: TextDocumentIdentifier.t } + + and uncoveredRange = { + range: range; + message: string option; + } +end + +(* Document Formatting request, method="textDocument/formatting" *) +module DocumentFormatting = struct + type params = documentFormattingParams + + and result = TextEdit.t list + + and documentFormattingParams = { + textDocument: TextDocumentIdentifier.t; + options: formattingOptions; + } + + and formattingOptions = { + tabSize: int; + (* size of a tab in spaces *) + insertSpaces: bool; + (* prefer spaces over tabs *) + (* omitted: signature for further properties *) + } +end + +(* Document Range Formatting request, method="textDocument/rangeFormatting" *) +module DocumentRangeFormatting = struct + type params = documentRangeFormattingParams + + and result = TextEdit.t list + + and documentRangeFormattingParams = { + textDocument: TextDocumentIdentifier.t; + range: range; + options: DocumentFormatting.formattingOptions; + } +end + +(* Document On Type Formatting req., method="textDocument/onTypeFormatting" *) +module DocumentOnTypeFormatting = struct + type params = documentOnTypeFormattingParams + + and result = TextEdit.t list + + and documentOnTypeFormattingParams = { + textDocument: TextDocumentIdentifier.t; + position: position; + (* the position at which this request was sent *) + ch: string; + (* the character that has been typed *) + options: DocumentFormatting.formattingOptions; + } +end + +(* Document Signature Help request, method="textDocument/signatureHelp" *) +module SignatureHelp = struct + type params = TextDocumentPositionParams.t + + and result = t option + + and t = { + signatures: signature_information list; + activeSignature: int; + activeParameter: int; + } + + and signature_information = { + siginfo_label: string; + siginfo_documentation: string option; + parameters: parameter_information list; + } + + and parameter_information = { + parinfo_label: string; + parinfo_documentation: string option; + } +end + +(* Workspace Rename request, method="textDocument/rename" *) +module Rename = struct + type params = renameParams + + and result = WorkspaceEdit.t + + and renameParams = { + textDocument: TextDocumentIdentifier.t; + position: position; + newName: string; + } +end + +(* Code Lens request, method="textDocument/codeLens" *) +module DocumentCodeLens = struct + type params = codelensParams + + and result = CodeLens.t list + + and codelensParams = { textDocument: TextDocumentIdentifier.t } +end + +(* LogMessage notification, method="window/logMessage" *) +module LogMessage = struct + type params = logMessageParams + + and logMessageParams = { + type_: MessageType.t; + message: string; + } +end + +(* ShowMessage notification, method="window/showMessage" *) +module ShowMessage = struct + type params = showMessageParams + + and showMessageParams = { + type_: MessageType.t; + message: string; + } +end + +(* ShowMessage request, method="window/showMessageRequest" *) +module ShowMessageRequest = struct + type t = + | Present of { id: lsp_id } + | Absent + + and params = showMessageRequestParams + + and result = messageActionItem option + + and showMessageRequestParams = { + type_: MessageType.t; + message: string; + actions: messageActionItem list; + } + + and messageActionItem = { title: string } +end + +(* ShowStatus request, method="window/showStatus" *) +module ShowStatus = struct + type params = showStatusParams + + and result = ShowMessageRequest.messageActionItem option + + and showStatusParams = { + request: ShowMessageRequest.showMessageRequestParams; + progress: int option; + total: int option; + shortMessage: string option; + } +end + +(* Progress notification, method="window/progress" *) +module Progress = struct + type t = + | Present of { + id: int; + label: string; + } + | Absent + + and params = progressParams + + and progressParams = { + (* LSP progress notifications have a lifetime that starts with their 1st *) + (* window/progress update message and ends with an update message with *) + (* label = None. They use an ID number (not JsonRPC id) to associate *) + (* multiple messages to a single lifetime stream. *) + id: int; + label: string option; + } +end + +(* ActionRequired notification, method="window/actionRequired" *) +module ActionRequired = struct + type t = + | Present of { + id: int; + label: string; + } + | Absent + + and params = actionRequiredParams + + and actionRequiredParams = { + (* See progressParams.id for an explanation of this field. *) + id: int; + label: string option; + } +end + +(* ConnectionStatus notification, method="telemetry/connectionStatus" *) +module ConnectionStatus = struct + type params = connectionStatusParams + + and connectionStatusParams = { isConnected: bool } +end + +(* Module for dynamic view, method="workspace/toggleTypeCoverage" *) +module ToggleTypeCoverage = struct + type params = toggleTypeCoverageParams + + and toggleTypeCoverageParams = { toggle: bool } +end + +(* ErrorResponse *) +module Error = struct + type t = { + code: int; + message: string; + data: Hh_json.json option; + } + + (* Legacy: some code uses exceptions instead of Error.t. *) + (* Be careful with that since if you unmarshal one then you can't pattern-match it. *) + + (* Defined by JSON-RPC. *) + exception Parse of string (* -32700 *) + + exception InvalidRequest of string (* -32600 *) + + exception MethodNotFound of string (* -32601 *) + + exception InvalidParams of string (* -32602 *) + + exception InternalError of string (* -32603 *) + + exception ServerErrorStart of string * Initialize.errorData (* -32099 *) + + exception ServerErrorEnd of string (* -32000 *) + + exception ServerNotInitialized of string (* -32002 *) + + exception Unknown of string (* -32001 *) + + (* Defined by the protocol. *) + exception RequestCancelled of string (* -32800 *) + + module Code = struct + (* Defined by JSON RPC *) + let parseError = -32700 + + let invalidRequest = -32600 + + let methodNotFound = -32601 + + let invalidParams = -32602 + + let internalError = -32603 + + let serverErrorStart = -32099 + + let serverErrorEnd = -32000 + + let serverNotInitialized = -32002 + + let unknownErrorCode = -32001 + + (* Defined by the protocol. *) + let requestCancelled = -32800 + + let contentModified = -32801 + end +end + +type lsp_registration_options = + | DidChangeWatchedFilesRegistrationOptions of + DidChangeWatchedFiles.registerOptions + +(* Register capability request, method="client/registerCapability" *) +module RegisterCapability = struct + type params = { registrations: registration list } + + and registration = { + id: string; + method_: string; + registerOptions: lsp_registration_options; + } + + let make_registration (registerOptions : lsp_registration_options) : + registration = + (* The ID field is arbitrary but unique per type of capability (for future + deregistering, which we don't do). *) + let (id, method_) = + match registerOptions with + | DidChangeWatchedFilesRegistrationOptions _ -> + ("did-change-watched-files", "workspace/didChangeWatchedFiles") + in + { id; method_; registerOptions } +end + +(** + * Here are gathered-up ADTs for all the messages we handle +*) + +type lsp_request = + | InitializeRequest of Initialize.params + | RegisterCapabilityRequest of RegisterCapability.params + | ShutdownRequest + | CodeLensResolveRequest of CodeLensResolve.params + | HoverRequest of Hover.params + | DefinitionRequest of Definition.params + | TypeDefinitionRequest of TypeDefinition.params + | CodeActionRequest of CodeActionRequest.params + | CompletionRequest of Completion.params + | CompletionItemResolveRequest of CompletionItemResolve.params + | WorkspaceSymbolRequest of WorkspaceSymbol.params + | DocumentSymbolRequest of DocumentSymbol.params + | FindReferencesRequest of FindReferences.params + | DocumentHighlightRequest of DocumentHighlight.params + | TypeCoverageRequest of TypeCoverage.params + | DocumentFormattingRequest of DocumentFormatting.params + | DocumentRangeFormattingRequest of DocumentRangeFormatting.params + | DocumentOnTypeFormattingRequest of DocumentOnTypeFormatting.params + | ShowMessageRequestRequest of ShowMessageRequest.params + | ShowStatusRequest of ShowStatus.params + | RageRequest + | RenameRequest of Rename.params + | DocumentCodeLensRequest of DocumentCodeLens.params + | UnknownRequest of string * Hh_json.json option + +type lsp_result = + | InitializeResult of Initialize.result + | ShutdownResult + | CodeLensResolveResult of CodeLensResolve.result + | HoverResult of Hover.result + | DefinitionResult of Definition.result + | TypeDefinitionResult of TypeDefinition.result + | CodeActionResult of CodeAction.result + | CompletionResult of Completion.result + | CompletionItemResolveResult of CompletionItemResolve.result + | WorkspaceSymbolResult of WorkspaceSymbol.result + | DocumentSymbolResult of DocumentSymbol.result + | FindReferencesResult of FindReferences.result + | DocumentHighlightResult of DocumentHighlight.result + | TypeCoverageResult of TypeCoverage.result + | DocumentFormattingResult of DocumentFormatting.result + | DocumentRangeFormattingResult of DocumentRangeFormatting.result + | DocumentOnTypeFormattingResult of DocumentOnTypeFormatting.result + | ShowMessageRequestResult of ShowMessageRequest.result + | ShowStatusResult of ShowStatus.result + | RageResult of Rage.result + | RenameResult of Rename.result + | DocumentCodeLensResult of DocumentCodeLens.result + (* the string is a stacktrace *) + | ErrorResult of Error.t * string + +type lsp_notification = + | ExitNotification + | CancelRequestNotification of CancelRequest.params + | PublishDiagnosticsNotification of PublishDiagnostics.params + | DidOpenNotification of DidOpen.params + | DidCloseNotification of DidClose.params + | DidSaveNotification of DidSave.params + | DidChangeNotification of DidChange.params + | DidChangeWatchedFilesNotification of DidChangeWatchedFiles.params + | LogMessageNotification of LogMessage.params + | TelemetryNotification of LogMessage.params (* LSP allows 'any' but we only send these *) + | ShowMessageNotification of ShowMessage.params + | ProgressNotification of Progress.params + | ActionRequiredNotification of ActionRequired.params + | ConnectionStatusNotification of ConnectionStatus.params + | InitializedNotification + | SetTraceNotification (* $/setTraceNotification *) + | LogTraceNotification (* $/logTraceNotification *) + | UnknownNotification of string * Hh_json.json option + +type lsp_message = + | RequestMessage of lsp_id * lsp_request + | ResponseMessage of lsp_id * lsp_result + | NotificationMessage of lsp_notification + +type 'a lsp_handler = 'a lsp_result_handler * 'a lsp_error_handler + +and 'a lsp_error_handler = Error.t * string -> 'a -> 'a + +and 'a lsp_result_handler = + | ShowMessageHandler of (ShowMessageRequest.result -> 'a -> 'a) + | ShowStatusHandler of (ShowStatus.result -> 'a -> 'a) + +module IdKey = struct + type t = lsp_id + + let compare (x : t) (y : t) = + match (x, y) with + | (NumberId x, NumberId y) -> x - y + | (NumberId _, StringId _) -> -1 + | (StringId x, StringId y) -> String.compare x y + | (StringId _, NumberId _) -> 1 +end + +module IdSet = Set.Make (IdKey) +module IdMap = MyMap.Make (IdKey) diff --git a/hack/utils/lsp/lsp.mli b/hack/utils/lsp/lsp.mli new file mode 100644 index 00000000000..315492b89ca --- /dev/null +++ b/hack/utils/lsp/lsp.mli @@ -0,0 +1,1004 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +type lsp_id = + | NumberId of int + | StringId of string + +type documentUri = string + +type position = { + line: int; + character: int; +} + +type range = { + start: position; + end_: position; +} + +module Location : sig + type t = { + uri: documentUri; + range: range; + } +end + +module DefinitionLocation : sig + type t = { + location: Location.t; + title: string option; + } +end + +type markedString = + | MarkedString of string + | MarkedCode of string * string + +module Command : sig + type t = { + (* title of the command, like `save` *) + title: string; + (* the identifier of the actual command handler *) + command: string; + (* wire: it can be omitted *) + arguments: Hh_json.json list; + } +end + +module TextEdit : sig + type t = { + range: range; + newText: string; + } +end + +module TextDocumentIdentifier : sig + type t = { uri: documentUri } +end + +module VersionedTextDocumentIdentifier : sig + type t = { + uri: documentUri; + version: int; + } +end + +module TextDocumentEdit : sig + type t = { + textDocument: VersionedTextDocumentIdentifier.t; + edits: TextEdit.t list; + } +end + +module WorkspaceEdit : sig + type t = { + changes: TextEdit.t list SMap.t; (* holds changes to existing docs *) + } +end + +module TextDocumentItem : sig + type t = { + uri: documentUri; + languageId: string; + version: int; + text: string; + } +end + +module CodeLens : sig + type t = { + range: range; + command: Command.t; + data: Hh_json.json option; + } +end + +module TextDocumentPositionParams : sig + type t = { + textDocument: TextDocumentIdentifier.t; + position: position; + } +end + +module DocumentFilter : sig + type t = { + language: string option; + scheme: string option; + pattern: string option; + } +end + +module DocumentSelector : sig + type t = DocumentFilter.t list +end + +module SymbolInformation : sig + type symbolKind = + | File [@value 1] + | Module [@value 2] + | Namespace [@value 3] + | Package [@value 4] + | Class [@value 5] + | Method [@value 6] + | Property [@value 7] + | Field [@value 8] + | Constructor [@value 9] + | Enum [@value 10] + | Interface [@value 11] + | Function [@value 12] + | Variable [@value 13] + | Constant [@value 14] + | String [@value 15] + | Number [@value 16] + | Boolean [@value 17] + | Array [@value 18] + | Object [@value 19] + | Key [@value 20] + | Null [@value 21] + | EnumMember [@value 22] + | Struct [@value 23] + [@@deriving enum] + + type t = { + name: string; + kind: symbolKind; + location: Location.t; + containerName: string option; + } +end + +module MessageType : sig + type t = + | ErrorMessage [@value 1] + | WarningMessage [@value 2] + | InfoMessage [@value 3] + | LogMessage [@value 4] + [@@deriving enum] +end + +module CancelRequest : sig + type params = cancelParams + + and cancelParams = { id: lsp_id } +end + +module CodeActionKind : sig + type t = string * string list + + val is_kind : t -> t -> bool + + val contains_kind : t -> t list -> bool + + val contains_kind_opt : default:bool -> t -> t list option -> bool + + val kind_of_string : string -> t + + val string_of_kind : t -> string + + val sub_kind : t -> string -> t + + val quickfix : t + + val source : t +end + +module Initialize : sig + type textDocumentSyncKind = + | NoSync [@value 0] + | FullSync [@value 1] + | IncrementalSync [@value 2] + [@@deriving enum] + + type params = { + processId: int option; + rootPath: string option; + rootUri: documentUri option; + initializationOptions: initializationOptions; + client_capabilities: client_capabilities; + trace: trace; + } + + and result = { server_capabilities: server_capabilities } + + and errorData = { retry: bool } + + and trace = + | Off + | Messages + | Verbose + + and initializationOptions = { + useTextEditAutocomplete: bool; + liveSyntaxErrors: bool; + namingTableSavedStatePath: string option; + sendServerStatusEvents: bool; + } + + and client_capabilities = { + workspace: workspaceClientCapabilities; + textDocument: textDocumentClientCapabilities; + window: windowClientCapabilities; + telemetry: telemetryClientCapabilities; + } + + and workspaceClientCapabilities = { + applyEdit: bool; + workspaceEdit: workspaceEdit; + didChangeWatchedFiles: dynamicRegistration; + } + + and dynamicRegistration = { dynamicRegistration: bool } + + and workspaceEdit = { documentChanges: bool } + + and textDocumentClientCapabilities = { + synchronization: synchronization; + completion: completion; + codeAction: codeAction; + } + + and synchronization = { + can_willSave: bool; + can_willSaveWaitUntil: bool; + can_didSave: bool; + } + + and completion = { completionItem: completionItem } + + and completionItem = { snippetSupport: bool } + + and codeAction = { + codeAction_dynamicRegistration: bool; + codeActionLiteralSupport: codeActionliteralSupport option; + } + + and codeActionliteralSupport = { codeAction_valueSet: CodeActionKind.t list } + + and windowClientCapabilities = { + status: bool; + progress: bool; + actionRequired: bool; + } + + and telemetryClientCapabilities = { connectionStatus: bool } + + and server_capabilities = { + textDocumentSync: textDocumentSyncOptions; + hoverProvider: bool; + completionProvider: completionOptions option; + signatureHelpProvider: signatureHelpOptions option; + definitionProvider: bool; + typeDefinitionProvider: bool; + referencesProvider: bool; + documentHighlightProvider: bool; + documentSymbolProvider: bool; + workspaceSymbolProvider: bool; + codeActionProvider: bool; + codeLensProvider: codeLensOptions option; + documentFormattingProvider: bool; + documentRangeFormattingProvider: bool; + documentOnTypeFormattingProvider: documentOnTypeFormattingOptions option; + renameProvider: bool; + documentLinkProvider: documentLinkOptions option; + executeCommandProvider: executeCommandOptions option; + typeCoverageProvider: bool; + rageProvider: bool; + } + + and completionOptions = { + resolveProvider: bool; + completion_triggerCharacters: string list; + } + + and signatureHelpOptions = { sighelp_triggerCharacters: string list } + + and codeLensOptions = { codelens_resolveProvider: bool } + + and documentOnTypeFormattingOptions = { + firstTriggerCharacter: string; + moreTriggerCharacter: string list; + } + + and documentLinkOptions = { doclink_resolveProvider: bool } + + and executeCommandOptions = { commands: string list } + + and textDocumentSyncOptions = { + want_openClose: bool; + want_change: textDocumentSyncKind; + want_willSave: bool; + want_willSaveWaitUntil: bool; + want_didSave: saveOptions option; + } + + and saveOptions = { includeText: bool } +end + +module Shutdown : sig end + +module Exit : sig end + +module Rage : sig + type result = rageItem list + + and rageItem = { + title: string option; + data: string; + } +end + +module CodeLensResolve : sig + type params = CodeLens.t + + and result = CodeLens.t +end + +module Hover : sig + type params = TextDocumentPositionParams.t + + and result = hoverResult option + + and hoverResult = { + contents: markedString list; + range: range option; + } +end + +module PublishDiagnostics : sig + type diagnosticCode = + | IntCode of int + | StringCode of string + | NoCode + + type diagnosticSeverity = + | Error + | Warning + | Information + | Hint + + val min_diagnosticSeverity : int + + val max_diagnosticSeverity : int + + val diagnosticSeverity_to_enum : diagnosticSeverity -> int + + val diagnosticSeverity_of_enum : int -> diagnosticSeverity option + + type params = publishDiagnosticsParams + + and publishDiagnosticsParams = { + uri: documentUri; + diagnostics: diagnostic list; + } + + and diagnostic = { + range: range; + severity: diagnosticSeverity option; + code: diagnosticCode; + source: string option; + message: string; + relatedInformation: diagnosticRelatedInformation list; + relatedLocations: relatedLocation list; + } + + and diagnosticRelatedInformation = { + relatedLocation: Location.t; + relatedMessage: string; + } + + and relatedLocation = diagnosticRelatedInformation +end + +module DidOpen : sig + type params = didOpenTextDocumentParams + + and didOpenTextDocumentParams = { textDocument: TextDocumentItem.t } +end + +module DidClose : sig + type params = didCloseTextDocumentParams + + and didCloseTextDocumentParams = { textDocument: TextDocumentIdentifier.t } +end + +module DidSave : sig + type params = didSaveTextDocumentParams + + and didSaveTextDocumentParams = { + textDocument: TextDocumentIdentifier.t; + text: string option; + } +end + +module DidChange : sig + type params = didChangeTextDocumentParams + + and didChangeTextDocumentParams = { + textDocument: VersionedTextDocumentIdentifier.t; + contentChanges: textDocumentContentChangeEvent list; + } + + and textDocumentContentChangeEvent = { + range: range option; + rangeLength: int option; + text: string; + } +end + +module DidChangeWatchedFiles : sig + type registerOptions = { watchers: fileSystemWatcher list } + + and fileSystemWatcher = { globPattern: string } + + type fileChangeType = + | Created + | Updated + | Deleted + [@@deriving enum] + + type params = { changes: fileEvent list } + + and fileEvent = { + uri: documentUri; + type_: fileChangeType; + } +end + +module Definition : sig + type params = TextDocumentPositionParams.t + + and result = DefinitionLocation.t list +end + +module TypeDefinition : sig + type params = TextDocumentPositionParams.t + + and result = DefinitionLocation.t list +end + +module CodeAction : sig + type t = { + title: string; + kind: CodeActionKind.t; + diagnostics: PublishDiagnostics.diagnostic list; + action: edit_and_or_command; + } + + and edit_and_or_command = + | EditOnly of WorkspaceEdit.t + | CommandOnly of Command.t + | BothEditThenCommand of (WorkspaceEdit.t * Command.t) + + type result = command_or_action list + + and command_or_action = + | Command of Command.t + | Action of t +end + +module CodeActionRequest : sig + type params = { + textDocument: TextDocumentIdentifier.t; + range: range; + context: codeActionContext; + } + + and codeActionContext = { + diagnostics: PublishDiagnostics.diagnostic list; + only: CodeActionKind.t list option; + } +end + +module Completion : sig + type completionItemKind = + | Text (* 1 *) + | Method (* 2 *) + | Function (* 3 *) + | Constructor (* 4 *) + | Field (* 5 *) + | Variable (* 6 *) + | Class (* 7 *) + | Interface (* 8 *) + | Module (* 9 *) + | Property (* 10 *) + | Unit (* 11 *) + | Value (* 12 *) + | Enum (* 13 *) + | Keyword (* 14 *) + | Snippet (* 15 *) + | Color (* 16 *) + | File (* 17 *) + | Reference (* 18 *) + | Folder (* 19 *) + | EnumMember (* 20 *) + | Constant (* 21 *) + | Struct (* 22 *) + | Event (* 23 *) + | Operator (* 24 *) + | TypeParameter (* 25 *) + [@@deriving enum] + + type insertTextFormat = + | PlainText (* 1 *) + (* the insertText/textEdits are just plain strings *) + | SnippetFormat (* 2 *) + (* wire: just "Snippet" *) + [@@deriving enum] + + type completionTriggerKind = + | Invoked [@value 1] + | TriggerCharacter [@value 2] + | TriggerForIncompleteCompletions [@value 3] + [@@deriving enum] + + type params = completionParams + + and completionParams = { + loc: TextDocumentPositionParams.t; + context: completionContext option; + } + + and completionContext = { + triggerKind: completionTriggerKind; + triggerCharacter: string option; + } + + and result = completionList + + (* wire: can also be 'completionItem list' *) + and completionList = { + isIncomplete: bool; + (* further typing should result in recomputing *) + items: completionItem list; + } + + and completionItem = { + label: string; + (* the label in the UI *) + kind: completionItemKind option; + (* tells editor which icon to use *) + detail: string option; + (* human-readable string like type/symbol info *) + inlineDetail: string option; + (* nuclide-specific, right column *) + itemType: string option; + (* nuclide-specific, left column *) + documentation: markedString list option; + (* human-readable doc-comment *) + sortText: string option; + (* used for sorting; if absent, uses label *) + filterText: string option; + (* used for filtering; if absent, uses label *) + insertText: string option; + (* used for inserting; if absent, uses label *) + insertTextFormat: insertTextFormat option; + textEdits: TextEdit.t list; + (* wire: split into hd and tl *) + command: Command.t option; + (* if present, is executed after completion *) + data: Hh_json.json option; + } +end + +module CompletionItemResolve : sig + type params = Completion.completionItem + + and result = Completion.completionItem +end + +module WorkspaceSymbol : sig + type params = workspaceSymbolParams + + and result = SymbolInformation.t list + + and workspaceSymbolParams = { query: string } +end + +module DocumentSymbol : sig + type params = documentSymbolParams + + and result = SymbolInformation.t list + + and documentSymbolParams = { textDocument: TextDocumentIdentifier.t } +end + +module FindReferences : sig + type params = referenceParams + + and result = Location.t list + + and referenceParams = { + loc: TextDocumentPositionParams.t; + context: referenceContext; + } + + and referenceContext = { + includeDeclaration: bool; + includeIndirectReferences: bool; + } +end + +module DocumentHighlight : sig + type params = TextDocumentPositionParams.t + + type documentHighlightKind = + | Text [@value 1] + | Read [@value 2] + | Write [@value 3] + [@@deriving enum] + + type result = documentHighlight list + + and documentHighlight = { + range: range; + kind: documentHighlightKind option; + } +end + +module TypeCoverage : sig + type params = typeCoverageParams + + and result = { + coveredPercent: int; + uncoveredRanges: uncoveredRange list; + defaultMessage: string; + } + + and typeCoverageParams = { textDocument: TextDocumentIdentifier.t } + + and uncoveredRange = { + range: range; + message: string option; + } +end + +module DocumentFormatting : sig + type params = documentFormattingParams + + and result = TextEdit.t list + + and documentFormattingParams = { + textDocument: TextDocumentIdentifier.t; + options: formattingOptions; + } + + and formattingOptions = { + tabSize: int; + insertSpaces: bool; + } +end + +module DocumentRangeFormatting : sig + type params = documentRangeFormattingParams + + and result = TextEdit.t list + + and documentRangeFormattingParams = { + textDocument: TextDocumentIdentifier.t; + range: range; + options: DocumentFormatting.formattingOptions; + } +end + +module DocumentOnTypeFormatting : sig + type params = documentOnTypeFormattingParams + + and result = TextEdit.t list + + and documentOnTypeFormattingParams = { + textDocument: TextDocumentIdentifier.t; + position: position; + ch: string; + options: DocumentFormatting.formattingOptions; + } +end + +module SignatureHelp : sig + type params = TextDocumentPositionParams.t + + and result = t option + + and t = { + signatures: signature_information list; + activeSignature: int; + activeParameter: int; + } + + and signature_information = { + siginfo_label: string; + siginfo_documentation: string option; + parameters: parameter_information list; + } + + and parameter_information = { + parinfo_label: string; + parinfo_documentation: string option; + } +end + +module Rename : sig + type params = renameParams + + and result = WorkspaceEdit.t + + and renameParams = { + textDocument: TextDocumentIdentifier.t; + position: position; + newName: string; + } +end + +module DocumentCodeLens : sig + type params = codelensParams + + and result = CodeLens.t list + + and codelensParams = { textDocument: TextDocumentIdentifier.t } +end + +module LogMessage : sig + type params = logMessageParams + + and logMessageParams = { + type_: MessageType.t; + message: string; + } +end + +module ShowMessage : sig + type params = showMessageParams + + and showMessageParams = { + type_: MessageType.t; + message: string; + } +end + +module ShowMessageRequest : sig + type t = + | Present of { id: lsp_id } + | Absent + + and params = showMessageRequestParams + + and result = messageActionItem option + + and showMessageRequestParams = { + type_: MessageType.t; + message: string; + actions: messageActionItem list; + } + + and messageActionItem = { title: string } +end + +module ShowStatus : sig + type params = showStatusParams + + and result = ShowMessageRequest.messageActionItem option + + and showStatusParams = { + request: ShowMessageRequest.showMessageRequestParams; + progress: int option; + total: int option; + shortMessage: string option; + } +end + +module Progress : sig + type t = + | Present of { + id: int; + label: string; + } + | Absent + + and params = progressParams + + and progressParams = { + id: int; + label: string option; + } +end + +module ActionRequired : sig + type t = + | Present of { + id: int; + label: string; + } + | Absent + + and params = actionRequiredParams + + and actionRequiredParams = { + id: int; + label: string option; + } +end + +module ConnectionStatus : sig + type params = connectionStatusParams + + and connectionStatusParams = { isConnected: bool } +end + +module ToggleTypeCoverage : sig + type params = toggleTypeCoverageParams + + and toggleTypeCoverageParams = { toggle: bool } +end + +module Error : sig + type t = { + code: int; + message: string; + data: Hh_json.json option; + } + + exception Parse of string + + exception InvalidRequest of string + + exception MethodNotFound of string + + exception InvalidParams of string + + exception InternalError of string + + exception ServerErrorStart of string * Initialize.errorData + + exception ServerErrorEnd of string + + exception ServerNotInitialized of string + + exception Unknown of string + + exception RequestCancelled of string + + module Code : sig + val parseError : int + + val invalidRequest : int + + val methodNotFound : int + + val invalidParams : int + + val internalError : int + + val serverErrorStart : int + + val serverErrorEnd : int + + val serverNotInitialized : int + + val unknownErrorCode : int + + val requestCancelled : int + + val contentModified : int + end +end + +type lsp_registration_options = + | DidChangeWatchedFilesRegistrationOptions of + DidChangeWatchedFiles.registerOptions + +module RegisterCapability : sig + type params = { registrations: registration list } + + and registration = { + id: string; + method_: string; + registerOptions: lsp_registration_options; + } + + val make_registration : lsp_registration_options -> registration +end + +type lsp_request = + | InitializeRequest of Initialize.params + | RegisterCapabilityRequest of RegisterCapability.params + | ShutdownRequest + | CodeLensResolveRequest of CodeLensResolve.params + | HoverRequest of Hover.params + | DefinitionRequest of Definition.params + | TypeDefinitionRequest of TypeDefinition.params + | CodeActionRequest of CodeActionRequest.params + | CompletionRequest of Completion.params + | CompletionItemResolveRequest of CompletionItemResolve.params + | WorkspaceSymbolRequest of WorkspaceSymbol.params + | DocumentSymbolRequest of DocumentSymbol.params + | FindReferencesRequest of FindReferences.params + | DocumentHighlightRequest of DocumentHighlight.params + | TypeCoverageRequest of TypeCoverage.params + | DocumentFormattingRequest of DocumentFormatting.params + | DocumentRangeFormattingRequest of DocumentRangeFormatting.params + | DocumentOnTypeFormattingRequest of DocumentOnTypeFormatting.params + | ShowMessageRequestRequest of ShowMessageRequest.params + | ShowStatusRequest of ShowStatus.params + | RageRequest + | RenameRequest of Rename.params + | DocumentCodeLensRequest of DocumentCodeLens.params + | UnknownRequest of string * Hh_json.json option + +type lsp_result = + | InitializeResult of Initialize.result + | ShutdownResult + | CodeLensResolveResult of CodeLensResolve.result + | HoverResult of Hover.result + | DefinitionResult of Definition.result + | TypeDefinitionResult of TypeDefinition.result + | CodeActionResult of CodeAction.result + | CompletionResult of Completion.result + | CompletionItemResolveResult of CompletionItemResolve.result + | WorkspaceSymbolResult of WorkspaceSymbol.result + | DocumentSymbolResult of DocumentSymbol.result + | FindReferencesResult of FindReferences.result + | DocumentHighlightResult of DocumentHighlight.result + | TypeCoverageResult of TypeCoverage.result + | DocumentFormattingResult of DocumentFormatting.result + | DocumentRangeFormattingResult of DocumentRangeFormatting.result + | DocumentOnTypeFormattingResult of DocumentOnTypeFormatting.result + | ShowMessageRequestResult of ShowMessageRequest.result + | ShowStatusResult of ShowStatus.result + | RageResult of Rage.result + | RenameResult of Rename.result + | DocumentCodeLensResult of DocumentCodeLens.result + (* the string is a stacktrace *) + | ErrorResult of Error.t * string + +type lsp_notification = + | ExitNotification + | CancelRequestNotification of CancelRequest.params + | PublishDiagnosticsNotification of PublishDiagnostics.params + | DidOpenNotification of DidOpen.params + | DidCloseNotification of DidClose.params + | DidSaveNotification of DidSave.params + | DidChangeNotification of DidChange.params + | DidChangeWatchedFilesNotification of DidChangeWatchedFiles.params + | LogMessageNotification of LogMessage.params + | TelemetryNotification of LogMessage.params (* LSP allows 'any' but we only send these *) + | ShowMessageNotification of ShowMessage.params + | ProgressNotification of Progress.params + | ActionRequiredNotification of ActionRequired.params + | ConnectionStatusNotification of ConnectionStatus.params + | InitializedNotification + | SetTraceNotification (* $/setTraceNotification *) + | LogTraceNotification (* $/logTraceNotification *) + | UnknownNotification of string * Hh_json.json option + +type lsp_message = + | RequestMessage of lsp_id * lsp_request + | ResponseMessage of lsp_id * lsp_result + | NotificationMessage of lsp_notification + +type 'a lsp_handler = 'a lsp_result_handler * 'a lsp_error_handler + +and 'a lsp_error_handler = Error.t * string -> 'a -> 'a + +and 'a lsp_result_handler = + | ShowMessageHandler of (ShowMessageRequest.result -> 'a -> 'a) + | ShowStatusHandler of (ShowStatus.result -> 'a -> 'a) + +module IdKey : sig + type t = lsp_id + + val compare : t -> t -> int +end + +module IdSet : sig + include module type of Set.Make (IdKey) +end + +module IdMap : sig + include module type of MyMap.Make (IdKey) +end diff --git a/hack/utils/lsp/lsp_fmt.ml b/hack/utils/lsp/lsp_fmt.ml new file mode 100644 index 00000000000..c3b91e5d75a --- /dev/null +++ b/hack/utils/lsp/lsp_fmt.ml @@ -0,0 +1,1607 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +open Hh_core +open Lsp +open Hh_json +open Hh_json_helpers + +(************************************************************************) +(* Miscellaneous LSP structures *) +(************************************************************************) + +let parse_id (json : json) : lsp_id = + match json with + | JSON_Number s -> + begin + try NumberId (int_of_string s) + with Failure _ -> raise (Error.Parse ("float ids not allowed: " ^ s)) + end + | JSON_String s -> StringId s + | _ -> raise (Error.Parse ("not an id: " ^ Hh_json.json_to_string json)) + +let parse_id_opt (json : json option) : lsp_id option = + Option.map json ~f:parse_id + +let print_id (id : lsp_id) : json = + match id with + | NumberId n -> JSON_Number (string_of_int n) + | StringId s -> JSON_String s + +let id_to_string (id : lsp_id) : string = + match id with + | NumberId n -> string_of_int n + | StringId s -> Printf.sprintf "\"%s\"" s + +let parse_position (json : json option) : position = + { + line = Jget.int_exn json "line"; + character = Jget.int_exn json "character"; + } + +let print_position (position : position) : json = + JSON_Object + [ + ("line", position.line |> int_); + ("character", position.character |> int_); + ] + +let print_range (range : range) : json = + JSON_Object + [("start", print_position range.start); ("end", print_position range.end_)] + +let print_location (location : Location.t) : json = + Location.( + JSON_Object + [ + ("uri", JSON_String location.uri); + ("range", print_range location.range); + ]) + +let print_definition_location (definition_location : DefinitionLocation.t) : + json = + DefinitionLocation.( + let location = definition_location.location in + Jprint.object_opt + [ + ("uri", Some (JSON_String location.Location.uri)); + ("range", Some (print_range location.Location.range)); + ("title", Option.map definition_location.title ~f:string_); + ]) + +let parse_range_exn (json : json option) : range = + { + start = Jget.obj_exn json "start" |> parse_position; + end_ = Jget.obj_exn json "end" |> parse_position; + } + +let parse_location (j : json option) : Location.t = + Location. + { + uri = Jget.string_exn j "uri"; + range = Jget.obj_exn j "range" |> parse_range_exn; + } + +let parse_range_opt (json : json option) : range option = + if json = None then + None + else + Some (parse_range_exn json) + +let parse_textDocumentIdentifier (json : json option) : + TextDocumentIdentifier.t = + TextDocumentIdentifier.{ uri = Jget.string_exn json "uri" } + +let parse_versionedTextDocumentIdentifier (json : json option) : + VersionedTextDocumentIdentifier.t = + VersionedTextDocumentIdentifier. + { uri = Jget.string_exn json "uri"; version = Jget.int_d json "version" 0 } + +let parse_textDocumentItem (json : json option) : TextDocumentItem.t = + TextDocumentItem. + { + uri = Jget.string_exn json "uri"; + languageId = Jget.string_d json "languageId" ""; + version = Jget.int_d json "version" 0; + text = Jget.string_exn json "text"; + } + +let print_textDocumentItem (item : TextDocumentItem.t) : json = + TextDocumentItem.( + JSON_Object + [ + ("uri", JSON_String item.uri); + ("languageId", JSON_String item.languageId); + ("version", JSON_Number (string_of_int item.version)); + ("text", JSON_String item.text); + ]) + +let print_markedItem (item : markedString) : json = + match item with + | MarkedString s -> JSON_String s + | MarkedCode (language, value) -> + JSON_Object + [("language", JSON_String language); ("value", JSON_String value)] + +let parse_textDocumentPositionParams (params : json option) : + TextDocumentPositionParams.t = + TextDocumentPositionParams. + { + textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + position = Jget.obj_exn params "position" |> parse_position; + } + +let parse_textEdit (params : json option) : TextEdit.t option = + match params with + | None -> None + | _ -> + TextEdit.( + Some + { + range = Jget.obj_exn params "range" |> parse_range_exn; + newText = Jget.string_exn params "newText"; + }) + +let print_textEdit (edit : TextEdit.t) : json = + TextEdit.( + JSON_Object + [ + ("range", print_range edit.range); + ("newText", JSON_String edit.newText); + ]) + +let print_workspaceEdit (r : WorkspaceEdit.t) : json = + WorkspaceEdit.( + let print_workspace_edit_changes (uri, text_edits) = + (uri, JSON_Array (List.map ~f:print_textEdit text_edits)) + in + JSON_Object + [ + ( "changes", + JSON_Object + (List.map (SMap.elements r.changes) ~f:print_workspace_edit_changes) + ); + ]) + +let print_command (command : Command.t) : json = + Command.( + JSON_Object + [ + ("title", JSON_String command.title); + ("command", JSON_String command.command); + ("arguments", JSON_Array command.arguments); + ]) + +let parse_command (json : json option) : Command.t = + Command. + { + title = Jget.string_d json "title" ""; + command = Jget.string_d json "command" ""; + arguments = Jget.array_d json "arguments" ~default:[] |> List.filter_opt; + } + +let parse_formattingOptions (json : json option) : + DocumentFormatting.formattingOptions = + { + DocumentFormatting.tabSize = Jget.int_d json "tabSize" 2; + insertSpaces = Jget.bool_d json "insertSpaces" true; + } + +let print_symbolInformation (info : SymbolInformation.t) : json = + SymbolInformation.( + let print_symbol_kind k = int_ (SymbolInformation.symbolKind_to_enum k) in + Jprint.object_opt + [ + ("name", Some (JSON_String info.name)); + ("kind", Some (print_symbol_kind info.kind)); + ("location", Some (print_location info.location)); + ("containerName", Option.map info.containerName string_); + ]) + +let parse_codeLens (json : json option) : CodeLens.t = + CodeLens. + { + range = Jget.obj_exn json "range" |> parse_range_exn; + command = Jget.obj_exn json "command" |> parse_command; + data = Jget.obj_exn json "data"; + } + +let print_codeLens (codeLens : CodeLens.t) : json = + CodeLens.( + JSON_Object + [ + ("range", print_range codeLens.range); + ("command", print_command codeLens.command); + ( "data", + match codeLens.data with + | None -> JSON_Null + | Some json -> json ); + ]) + +(************************************************************************) +(* shutdown request *) +(************************************************************************) + +let print_shutdown () : json = JSON_Null + +(************************************************************************) +(* $/cancelRequest notification *) +(************************************************************************) + +let parse_cancelRequest (params : json option) : CancelRequest.params = + CancelRequest.{ id = Jget.val_exn params "id" |> parse_id } + +let print_cancelRequest (p : CancelRequest.params) : json = + CancelRequest.(JSON_Object [("id", print_id p.id)]) + +(************************************************************************) +(* rage request *) +(************************************************************************) + +let print_rage (r : Rage.result) : json = + Rage.( + let print_item (item : rageItem) : json = + JSON_Object + [ + ("data", JSON_String item.data); + ( "title", + match item.title with + | None -> JSON_Null + | Some s -> JSON_String s ); + ] + in + JSON_Array (List.map r ~f:print_item)) + +(************************************************************************) +(* textDocument/didOpen notification *) +(************************************************************************) + +let parse_didOpen (params : json option) : DidOpen.params = + DidOpen. + { + textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentItem; + } + +let print_didOpen (params : DidOpen.params) : json = + DidOpen.( + JSON_Object + [("textDocument", params.textDocument |> print_textDocumentItem)]) + +(************************************************************************) +(* textDocument/didClose notification *) +(************************************************************************) + +let parse_didClose (params : json option) : DidClose.params = + DidClose. + { + textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + } + +(************************************************************************) +(* textDocument/didSave notification *) +(************************************************************************) + +let parse_didSave (params : json option) : DidSave.params = + DidSave. + { + textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + text = Jget.string_opt params "text"; + } + +(************************************************************************) +(* textDocument/didChange notification *) +(************************************************************************) + +let parse_didChange (params : json option) : DidChange.params = + DidChange.( + let parse_textDocumentContentChangeEvent json = + { + range = Jget.obj_opt json "range" |> parse_range_opt; + rangeLength = Jget.int_opt json "rangeLength"; + text = Jget.string_exn json "text"; + } + in + { + textDocument = + Jget.obj_exn params "textDocument" + |> parse_versionedTextDocumentIdentifier; + contentChanges = + Jget.array_d params "contentChanges" ~default:[] + |> List.map ~f:parse_textDocumentContentChangeEvent; + }) + +(************************************************************************) +(* textDocument/signatureHelp notification *) +(************************************************************************) + +let parse_signatureHelp (params : json option) : SignatureHelp.params = + parse_textDocumentPositionParams params + +let print_signatureHelp (r : SignatureHelp.result) : json = + SignatureHelp.( + let print_parInfo parInfo = + Jprint.object_opt + [ + ("label", Some (Hh_json.JSON_String parInfo.parinfo_label)); + ( "documentation", + Option.map ~f:Hh_json.string_ parInfo.parinfo_documentation ); + ] + in + let print_sigInfo sigInfo = + Jprint.object_opt + [ + ("label", Some (Hh_json.JSON_String sigInfo.siginfo_label)); + ( "documentation", + Option.map ~f:Hh_json.string_ sigInfo.siginfo_documentation ); + ( "parameters", + Some + (Hh_json.JSON_Array + (List.map ~f:print_parInfo sigInfo.parameters)) ); + ] + in + match r with + | None -> Hh_json.JSON_Null + | Some r -> + Hh_json.JSON_Object + [ + ( "signatures", + Hh_json.JSON_Array (List.map ~f:print_sigInfo r.signatures) ); + ("activeSignature", Hh_json.int_ r.activeSignature); + ("activeParameter", Hh_json.int_ r.activeParameter); + ]) + +(************************************************************************) +(* codeLens/resolve Request *) +(************************************************************************) + +let parse_codeLensResolve (params : json option) : CodeLensResolve.params = + parse_codeLens params + +let print_codeLensResolve (r : CodeLensResolve.result) : json = + print_codeLens r + +(************************************************************************) +(* textDocument/rename Request *) +(************************************************************************) + +let parse_documentRename (params : json option) : Rename.params = + Rename. + { + textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + position = Jget.obj_exn params "position" |> parse_position; + newName = Jget.string_exn params "newName"; + } + +let print_documentRename : Rename.result -> json = print_workspaceEdit + +(************************************************************************) +(* textDocument/codeLens Request *) +(************************************************************************) + +let parse_documentCodeLens (params : json option) : DocumentCodeLens.params = + DocumentCodeLens. + { + textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + } + +let print_documentCodeLens (r : DocumentCodeLens.result) : json = + JSON_Array (List.map r ~f:print_codeLens) + +(************************************************************************) +(* textDocument/publishDiagnostics notification *) +(************************************************************************) + +let print_diagnostic (diagnostic : PublishDiagnostics.diagnostic) : json = + PublishDiagnostics.( + let print_diagnosticSeverity = + Fn.compose int_ diagnosticSeverity_to_enum + in + let print_diagnosticCode = function + | IntCode i -> Some (int_ i) + | StringCode s -> Some (string_ s) + | NoCode -> None + in + let print_related (related : relatedLocation) : json = + Hh_json.JSON_Object + [ + ("location", print_location related.relatedLocation); + ("message", string_ related.relatedMessage); + ] + in + Jprint.object_opt + [ + ("range", Some (print_range diagnostic.range)); + ("severity", Option.map diagnostic.severity print_diagnosticSeverity); + ("code", print_diagnosticCode diagnostic.code); + ("source", Option.map diagnostic.source string_); + ("message", Some (JSON_String diagnostic.message)); + ( "relatedInformation", + Some + (JSON_Array + (List.map diagnostic.relatedInformation ~f:print_related)) ); + ( "relatedLocations", + Some + (JSON_Array (List.map diagnostic.relatedLocations ~f:print_related)) + ); + ]) + +let print_diagnostic_list (ds : PublishDiagnostics.diagnostic list) : json = + JSON_Array (List.map ds ~f:print_diagnostic) + +let print_diagnostics (r : PublishDiagnostics.params) : json = + PublishDiagnostics.( + JSON_Object + [ + ("uri", JSON_String r.uri); + ("diagnostics", print_diagnostic_list r.diagnostics); + ]) + +let parse_diagnostic (j : json option) : PublishDiagnostics.diagnostic = + PublishDiagnostics.( + let parse_code = function + | None -> NoCode + | Some (JSON_String s) -> StringCode s + | Some (JSON_Number s) -> + begin + try IntCode (int_of_string s) + with Failure _ -> + let msg = "Diagnostic code expected to be an int: " ^ s in + raise (Error.Parse msg) + end + | _ -> + raise (Error.Parse "Diagnostic code expected to be an int or string") + in + let parse_info j = + { + relatedLocation = Jget.obj_exn j "location" |> parse_location; + relatedMessage = Jget.string_exn j "message"; + } + in + { + range = Jget.obj_exn j "range" |> parse_range_exn; + severity = + Jget.int_opt j "severity" + |> Option.map ~f:diagnosticSeverity_of_enum + |> Option.join; + code = Jget.val_opt j "code" |> parse_code; + source = Jget.string_opt j "source"; + message = Jget.string_exn j "message"; + relatedInformation = + Jget.array_d j "relatedInformation" ~default:[] + |> List.map ~f:parse_info; + relatedLocations = + Jget.array_d j "relatedLocations" ~default:[] |> List.map ~f:parse_info; + }) + +let parse_kind json : CodeActionKind.t option = + CodeActionKind.( + match json with + | Some (JSON_String s) -> Some (kind_of_string s) + | _ -> None) + +let parse_kinds jsons : CodeActionKind.t list = + List.map ~f:parse_kind jsons |> List.filter_opt + +let parse_codeActionRequest (j : json option) : CodeActionRequest.params = + CodeActionRequest.( + let parse_context c : CodeActionRequest.codeActionContext = + { + diagnostics = + Jget.array_exn c "diagnostics" |> List.map ~f:parse_diagnostic; + only = Jget.array_opt c "only" |> Option.map ~f:parse_kinds; + } + in + { + textDocument = + Jget.obj_exn j "textDocument" |> parse_textDocumentIdentifier; + range = Jget.obj_exn j "range" |> parse_range_exn; + context = Jget.obj_exn j "context" |> parse_context; + }) + +(************************************************************************) +(* textDocument/CodeAction result *) +(************************************************************************) + +let print_codeAction (c : CodeAction.t) : json = + CodeAction.( + let (edit, command) = + match c.action with + | EditOnly e -> (Some e, None) + | CommandOnly c -> (None, Some c) + | BothEditThenCommand (e, c) -> (Some e, Some c) + in + Jprint.object_opt + [ + ("title", Some (JSON_String c.title)); + ("kind", Some (JSON_String (CodeActionKind.string_of_kind c.kind))); + ("diagnostics", Some (print_diagnostic_list c.diagnostics)); + ("edit", Option.map edit ~f:print_documentRename); + ("command", Option.map command ~f:print_command); + ]) + +let print_codeActionResult (c : CodeAction.result) : json = + CodeAction.( + let print_command_or_action = function + | Command c -> print_command c + | Action c -> print_codeAction c + in + JSON_Array (List.map c ~f:print_command_or_action)) + +(* print_codeAction *) + +(************************************************************************) +(* window/logMessage notification *) +(************************************************************************) + +let print_logMessage (type_ : MessageType.t) (message : string) : json = + LogMessage.( + let r = { type_; message } in + JSON_Object + [ + ("type", int_ (MessageType.to_enum r.type_)); + ("message", JSON_String r.message); + ]) + +(************************************************************************) +(* window/showMessage notification *) +(************************************************************************) + +let print_showMessage (type_ : MessageType.t) (message : string) : json = + ShowMessage.( + let r = { type_; message } in + JSON_Object + [ + ("type", int_ (MessageType.to_enum r.type_)); + ("message", JSON_String r.message); + ]) + +(************************************************************************) +(* window/showMessage request *) +(************************************************************************) + +let print_showMessageRequest (r : ShowMessageRequest.showMessageRequestParams) + : json = + let print_action (action : ShowMessageRequest.messageActionItem) : json = + JSON_Object [("title", JSON_String action.ShowMessageRequest.title)] + in + Jprint.object_opt + [ + ("type", Some (int_ (MessageType.to_enum r.ShowMessageRequest.type_))); + ("message", Some (JSON_String r.ShowMessageRequest.message)); + ( "actions", + Some + (JSON_Array (List.map r.ShowMessageRequest.actions ~f:print_action)) + ); + ] + +let parse_result_showMessageRequest (result : json option) : + ShowMessageRequest.result = + ShowMessageRequest.( + let title = Jget.string_opt result "title" in + Option.map title ~f:(fun title -> { title })) + +(************************************************************************) +(* window/showStatus request *) +(************************************************************************) + +let print_showStatus (r : ShowStatus.showStatusParams) : json = + let print_action (action : ShowMessageRequest.messageActionItem) : json = + JSON_Object [("title", JSON_String action.ShowMessageRequest.title)] + in + let rr = r.ShowStatus.request in + Jprint.object_opt + [ + ("type", Some (int_ (MessageType.to_enum rr.ShowMessageRequest.type_))); + ( "actions", + Some + (JSON_Array (List.map rr.ShowMessageRequest.actions ~f:print_action)) + ); + ("message", Some (JSON_String rr.ShowMessageRequest.message)); + ("shortMessage", Option.map r.ShowStatus.shortMessage ~f:string_); + ( "progress", + Option.map r.ShowStatus.progress ~f:(fun progress -> + Jprint.object_opt + [ + ("numerator", Some (int_ progress)); + ("denominator", Option.map r.ShowStatus.total ~f:int_); + ]) ); + ] + +(************************************************************************) +(* window/progress notification *) +(************************************************************************) + +let print_progress (id : int) (label : string option) : json = + let r = { Progress.id; label } in + JSON_Object + [ + ("id", r.Progress.id |> int_); + ( "label", + match r.Progress.label with + | None -> JSON_Null + | Some s -> JSON_String s ); + ] + +(************************************************************************) +(* window/actionRequired notification *) +(************************************************************************) + +let print_actionRequired (id : int) (label : string option) : json = + let r = { ActionRequired.id; label } in + JSON_Object + [ + ("id", r.ActionRequired.id |> int_); + ( "label", + match r.ActionRequired.label with + | None -> JSON_Null + | Some s -> JSON_String s ); + ] + +(************************************************************************) +(* telemetry/connectionStatus notification *) +(************************************************************************) + +let print_connectionStatus (p : ConnectionStatus.params) : json = + ConnectionStatus.(JSON_Object [("isConnected", JSON_Bool p.isConnected)]) + +(************************************************************************) +(* textDocument/hover request *) +(************************************************************************) + +let parse_hover (params : json option) : Hover.params = + parse_textDocumentPositionParams params + +let print_hover (r : Hover.result) : json = + Hover.( + match r with + | None -> JSON_Null + | Some r -> + Jprint.object_opt + [ + ( "contents", + Some (JSON_Array (List.map r.Hover.contents ~f:print_markedItem)) + ); + ("range", Option.map r.range ~f:print_range); + ]) + +(************************************************************************) +(* textDocument/definition request *) +(************************************************************************) + +let parse_definition (params : json option) : Definition.params = + parse_textDocumentPositionParams params + +let print_definition (r : Definition.result) : json = + JSON_Array (List.map r ~f:print_definition_location) + +(************************************************************************) +(* completionItem/resolve request *) +(************************************************************************) + +let parse_completionItem (params : json option) : CompletionItemResolve.params + = + Completion.( + let textEdits = + Jget.obj_opt params "textEdit" + :: Jget.array_d params "additionalTextEdits" ~default:[] + |> List.filter_map ~f:parse_textEdit + in + let command = + match Jget.obj_opt params "command" with + | None -> None + | c -> Some (parse_command c) + in + { + label = Jget.string_exn params "label"; + kind = + Option.bind (Jget.int_opt params "kind") completionItemKind_of_enum; + detail = Jget.string_opt params "detail"; + inlineDetail = Jget.string_opt params "inlineDetail"; + itemType = Jget.string_opt params "itemType"; + documentation = None; + sortText = Jget.string_opt params "sortText"; + filterText = Jget.string_opt params "filterText"; + insertText = Jget.string_opt params "insertText"; + insertTextFormat = + Option.bind + (Jget.int_opt params "insertTextFormat") + insertTextFormat_of_enum; + textEdits; + command; + data = Jget.obj_opt params "data"; + }) + +let string_of_markedString (acc : string) (marked : markedString) : string = + match marked with + | MarkedCode (lang, code) -> + acc ^ "```" ^ lang ^ "\n" ^ code ^ "\n" ^ "```\n" + | MarkedString str -> acc ^ str ^ "\n" + +let print_completionItem (item : Completion.completionItem) : json = + Completion.( + Jprint.object_opt + [ + ("label", Some (JSON_String item.label)); + ( "kind", + Option.map item.kind (fun x -> int_ @@ completionItemKind_to_enum x) + ); + ("detail", Option.map item.detail string_); + ("inlineDetail", Option.map item.inlineDetail string_); + ("itemType", Option.map item.itemType string_); + ( "documentation", + Option.map item.documentation ~f:(fun doc -> + JSON_Object + [ + ("kind", JSON_String "markdown"); + ( "value", + JSON_String + (String.trim + (List.fold doc ~init:"" ~f:string_of_markedString)) ); + ]) ); + ("sortText", Option.map item.sortText string_); + ("filterText", Option.map item.filterText string_); + ("insertText", Option.map item.insertText string_); + ( "insertTextFormat", + Option.map item.insertTextFormat (fun x -> + int_ @@ insertTextFormat_to_enum x) ); + ("textEdit", Option.map (List.hd item.textEdits) print_textEdit); + ( "additionalTextEdits", + match List.tl item.textEdits with + | None + | Some [] -> + None + | Some l -> Some (JSON_Array (List.map l ~f:print_textEdit)) ); + ("command", Option.map item.command print_command); + ("data", item.data); + ]) + +(************************************************************************) +(* textDocument/completion request *) +(************************************************************************) + +let parse_completion (params : json option) : Completion.params = + Lsp.Completion.( + let context = Jget.obj_opt params "context" in + { + loc = parse_textDocumentPositionParams params; + context = + (match context with + | Some _ -> + let tk = Jget.int_exn context "triggerKind" in + Some + { + triggerKind = + Option.value_exn + ~message:(Printf.sprintf "Unsupported trigger kind: %d" tk) + (Lsp.Completion.completionTriggerKind_of_enum tk); + triggerCharacter = Jget.string_opt context "triggerCharacter"; + } + | None -> None); + }) + +let print_completion (r : Completion.result) : json = + Completion.( + JSON_Object + [ + ("isIncomplete", JSON_Bool r.isIncomplete); + ("items", JSON_Array (List.map r.items ~f:print_completionItem)); + ]) + +(************************************************************************) +(* workspace/symbol request *) +(************************************************************************) + +let parse_workspaceSymbol (params : json option) : WorkspaceSymbol.params = + WorkspaceSymbol.{ query = Jget.string_exn params "query" } + +let print_workspaceSymbol (r : WorkspaceSymbol.result) : json = + JSON_Array (List.map r ~f:print_symbolInformation) + +(************************************************************************) +(* textDocument/documentSymbol request *) +(************************************************************************) + +let parse_documentSymbol (params : json option) : DocumentSymbol.params = + DocumentSymbol. + { + textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + } + +let print_documentSymbol (r : DocumentSymbol.result) : json = + JSON_Array (List.map r ~f:print_symbolInformation) + +(************************************************************************) +(* textDocument/references request *) +(************************************************************************) + +let parse_findReferences (params : json option) : FindReferences.params = + let context = Jget.obj_opt params "context" in + { + FindReferences.loc = parse_textDocumentPositionParams params; + context = + { + FindReferences.includeDeclaration = + Jget.bool_d context "includeDeclaration" true; + includeIndirectReferences = + Jget.bool_d context "includeIndirectReferences" false; + }; + } + +let print_findReferences (r : Location.t list) : json = + JSON_Array (List.map r ~f:print_location) + +(************************************************************************) +(* textDocument/documentHighlight request *) +(************************************************************************) + +let parse_documentHighlight (params : json option) : DocumentHighlight.params = + parse_textDocumentPositionParams params + +let print_documentHighlight (r : DocumentHighlight.result) : json = + DocumentHighlight.( + let print_highlightKind kind = int_ (documentHighlightKind_to_enum kind) in + let print_highlight highlight = + Jprint.object_opt + [ + ("range", Some (print_range highlight.range)); + ("kind", Option.map highlight.kind ~f:print_highlightKind); + ] + in + JSON_Array (List.map r ~f:print_highlight)) + +(************************************************************************) +(* textDocument/typeCoverage request *) +(************************************************************************) + +let parse_typeCoverage (params : json option) : TypeCoverage.params = + { + TypeCoverage.textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + } + +let print_typeCoverage (r : TypeCoverage.result) : json = + TypeCoverage.( + let print_uncov (uncov : uncoveredRange) : json = + Jprint.object_opt + [ + ("range", Some (print_range uncov.range)); + ("message", Option.map uncov.message ~f:string_); + ] + in + JSON_Object + [ + ("coveredPercent", int_ r.coveredPercent); + ( "uncoveredRanges", + JSON_Array (List.map r.uncoveredRanges ~f:print_uncov) ); + ("defaultMessage", JSON_String r.defaultMessage); + ]) + +(************************************************************************) +(* workspace/toggleTypeCoverage request *) +(************************************************************************) +let parse_toggleTypeCoverage (params : json option) : ToggleTypeCoverage.params + = + { ToggleTypeCoverage.toggle = Jget.bool_d params "toggle" ~default:false } + +(************************************************************************) +(* textDocument/formatting request *) +(************************************************************************) + +let parse_documentFormatting (params : json option) : DocumentFormatting.params + = + { + DocumentFormatting.textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + options = Jget.obj_opt params "options" |> parse_formattingOptions; + } + +let print_documentFormatting (r : DocumentFormatting.result) : json = + JSON_Array (List.map r ~f:print_textEdit) + +(************************************************************************) +(* textDocument/rangeFormatting request *) +(************************************************************************) + +let parse_documentRangeFormatting (params : json option) : + DocumentRangeFormatting.params = + { + DocumentRangeFormatting.textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + range = Jget.obj_exn params "range" |> parse_range_exn; + options = Jget.obj_opt params "options" |> parse_formattingOptions; + } + +let print_documentRangeFormatting (r : DocumentRangeFormatting.result) : json = + JSON_Array (List.map r ~f:print_textEdit) + +(************************************************************************) +(* textDocument/onTypeFormatting request *) +(************************************************************************) + +let parse_documentOnTypeFormatting (params : json option) : + DocumentOnTypeFormatting.params = + { + DocumentOnTypeFormatting.textDocument = + Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; + position = Jget.obj_exn params "position" |> parse_position; + ch = Jget.string_exn params "ch"; + options = Jget.obj_opt params "options" |> parse_formattingOptions; + } + +let print_documentOnTypeFormatting (r : DocumentOnTypeFormatting.result) : json + = + JSON_Array (List.map r ~f:print_textEdit) + +(************************************************************************) +(* initialize request *) +(************************************************************************) + +let parse_initialize (params : json option) : Initialize.params = + Initialize.( + let rec parse_initialize json = + { + processId = Jget.int_opt json "processId"; + rootPath = Jget.string_opt json "rootPath"; + rootUri = Jget.string_opt json "rootUri"; + initializationOptions = + Jget.obj_opt json "initializationOptions" + |> parse_initializationOptions; + client_capabilities = + Jget.obj_opt json "capabilities" |> parse_capabilities; + trace = Jget.string_opt json "trace" |> parse_trace; + } + and parse_trace (s : string option) : trace = + match s with + | Some "messages" -> Messages + | Some "verbose" -> Verbose + | _ -> Off + and parse_initializationOptions json = + { + useTextEditAutocomplete = + Jget.bool_d json "useTextEditAutocomplete" ~default:false; + liveSyntaxErrors = Jget.bool_d json "liveSyntaxErrors" ~default:true; + namingTableSavedStatePath = + Jget.string_opt json "namingTableSavedStatePath"; + sendServerStatusEvents = + Jget.bool_d json "sendServerStatusEvents" ~default:false; + } + and parse_capabilities json = + { + workspace = Jget.obj_opt json "workspace" |> parse_workspace; + textDocument = Jget.obj_opt json "textDocument" |> parse_textDocument; + window = Jget.obj_opt json "window" |> parse_window; + telemetry = Jget.obj_opt json "telemetry" |> parse_telemetry; + } + and parse_workspace json = + { + applyEdit = Jget.bool_d json "applyEdit" ~default:false; + workspaceEdit = + Jget.obj_opt json "workspaceEdit" |> parse_workspaceEdit; + didChangeWatchedFiles = + Jget.obj_opt json "didChangeWatchedFiles" + |> parse_dynamicRegistration; + } + and parse_dynamicRegistration json = + { + dynamicRegistration = + Jget.bool_d json "dynamicRegistration" ~default:false; + } + and parse_workspaceEdit json = + { documentChanges = Jget.bool_d json "documentChanges" ~default:false } + and parse_textDocument json = + { + synchronization = + Jget.obj_opt json "synchronization" |> parse_synchronization; + completion = Jget.obj_opt json "completion" |> parse_completion; + codeAction = Jget.obj_opt json "codeAction" |> parse_codeAction; + } + and parse_synchronization json = + { + can_willSave = Jget.bool_d json "willSave" ~default:false; + can_willSaveWaitUntil = + Jget.bool_d json "willSaveWaitUntil" ~default:false; + can_didSave = Jget.bool_d json "didSave" ~default:false; + } + and parse_completion json = + { + completionItem = + Jget.obj_opt json "completionItem" |> parse_completionItem; + } + and parse_completionItem json = + { snippetSupport = Jget.bool_d json "snippetSupport" ~default:false } + and parse_codeAction json = + { + codeAction_dynamicRegistration = + Jget.bool_d json "dynamicRegistration" ~default:false; + codeActionLiteralSupport = + Jget.obj_opt json "codeActionLiteralSupport" + |> parse_codeActionLiteralSupport; + } + and parse_codeActionLiteralSupport json = + Option.( + Jget.array_opt json "valueSet" + >>= (fun ls -> Some { codeAction_valueSet = parse_kinds ls })) + and parse_window json = + { + status = Jget.obj_opt json "status" |> Option.is_some; + progress = Jget.obj_opt json "progress" |> Option.is_some; + actionRequired = Jget.obj_opt json "actionRequired" |> Option.is_some; + } + and parse_telemetry json = + { + connectionStatus = + Jget.obj_opt json "connectionStatus" |> Option.is_some; + } + in + parse_initialize params) + +let print_initializeError (r : Initialize.errorData) : json = + Initialize.(JSON_Object [("retry", JSON_Bool r.retry)]) + +let print_initialize (r : Initialize.result) : json = + Initialize.( + let print_textDocumentSyncKind kind = + int_ (textDocumentSyncKind_to_enum kind) + in + let cap = r.server_capabilities in + let sync = cap.textDocumentSync in + JSON_Object + [ + ( "capabilities", + Jprint.object_opt + [ + ( "textDocumentSync", + Some + (Jprint.object_opt + [ + ("openClose", Some (JSON_Bool sync.want_openClose)); + ( "change", + Some (print_textDocumentSyncKind sync.want_change) ); + ("willSave", Some (JSON_Bool sync.want_willSave)); + ( "willSaveWaitUntil", + Some (JSON_Bool sync.want_willSaveWaitUntil) ); + ( "save", + Option.map sync.want_didSave ~f:(fun save -> + JSON_Object + [("includeText", JSON_Bool save.includeText)]) + ); + ]) ); + ("hoverProvider", Some (JSON_Bool cap.hoverProvider)); + ( "completionProvider", + Option.map cap.completionProvider ~f:(fun comp -> + JSON_Object + [ + ("resolveProvider", JSON_Bool comp.resolveProvider); + ( "triggerCharacters", + Jprint.string_array comp.completion_triggerCharacters + ); + ]) ); + ( "signatureHelpProvider", + Option.map cap.signatureHelpProvider ~f:(fun shp -> + JSON_Object + [ + ( "triggerCharacters", + Jprint.string_array shp.sighelp_triggerCharacters ); + ]) ); + ("definitionProvider", Some (JSON_Bool cap.definitionProvider)); + ( "typeDefinitionProvider", + Some (JSON_Bool cap.typeDefinitionProvider) ); + ("referencesProvider", Some (JSON_Bool cap.referencesProvider)); + ( "documentHighlightProvider", + Some (JSON_Bool cap.documentHighlightProvider) ); + ( "documentSymbolProvider", + Some (JSON_Bool cap.documentSymbolProvider) ); + ( "workspaceSymbolProvider", + Some (JSON_Bool cap.workspaceSymbolProvider) ); + ("codeActionProvider", Some (JSON_Bool cap.codeActionProvider)); + ( "codeLensProvider", + Option.map cap.codeLensProvider ~f:(fun codelens -> + JSON_Object + [ + ( "resolveProvider", + JSON_Bool codelens.codelens_resolveProvider ); + ]) ); + ( "documentFormattingProvider", + Some (JSON_Bool cap.documentFormattingProvider) ); + ( "documentRangeFormattingProvider", + Some (JSON_Bool cap.documentRangeFormattingProvider) ); + ( "documentOnTypeFormattingProvider", + Option.map cap.documentOnTypeFormattingProvider ~f:(fun o -> + JSON_Object + [ + ( "firstTriggerCharacter", + JSON_String o.firstTriggerCharacter ); + ( "moreTriggerCharacter", + Jprint.string_array o.moreTriggerCharacter ); + ]) ); + ("renameProvider", Some (JSON_Bool cap.renameProvider)); + ( "documentLinkProvider", + Option.map cap.documentLinkProvider ~f:(fun dlp -> + JSON_Object + [ + ( "resolveProvider", + JSON_Bool dlp.doclink_resolveProvider ); + ]) ); + ( "executeCommandProvider", + Option.map cap.executeCommandProvider ~f:(fun p -> + JSON_Object [("commands", Jprint.string_array p.commands)]) + ); + ( "typeCoverageProvider", + Some (JSON_Bool cap.typeCoverageProvider) ); + ("rageProvider", Some (JSON_Bool cap.rageProvider)); + ] ); + ]) + +(************************************************************************) +(* capabilities *) +(************************************************************************) + +let print_registrationOptions (registerOptions : Lsp.lsp_registration_options) + : Hh_json.json = + match registerOptions with + | Lsp.DidChangeWatchedFilesRegistrationOptions registerOptions -> + Lsp.DidChangeWatchedFiles.( + JSON_Object + [ + ( "watchers", + JSON_Array + (List.map registerOptions.watchers ~f:(fun watcher -> + JSON_Object + [ + ("globPattern", JSON_String watcher.globPattern); + ("kind", int_ 7); + (* all events: create, change, and delete *) + + ])) ); + ]) + +let print_registerCapability (params : Lsp.RegisterCapability.params) : + Hh_json.json = + Lsp.RegisterCapability.( + JSON_Object + [ + ( "registrations", + JSON_Array + (List.map params.registrations ~f:(fun registration -> + JSON_Object + [ + ("id", string_ registration.id); + ("method", string_ registration.method_); + ( "registerOptions", + print_registrationOptions registration.registerOptions + ); + ])) ); + ]) + +let parse_didChangeWatchedFiles (json : Hh_json.json option) : + DidChangeWatchedFiles.params = + let changes = + Jget.array_exn json "changes" + |> List.map ~f:(fun change -> + let uri = Jget.string_exn change "uri" in + let type_ = Jget.int_exn change "type" in + let type_ = + match DidChangeWatchedFiles.fileChangeType_of_enum type_ with + | Some type_ -> type_ + | None -> + failwith (Printf.sprintf "Invalid file change type %d" type_) + in + { DidChangeWatchedFiles.uri; type_ }) + in + { DidChangeWatchedFiles.changes } + +(************************************************************************) +(* error response *) +(************************************************************************) + +let error_of_exn (e : exn) : Lsp.Error.t = + Lsp.Error.( + match e with + | Error.Parse message -> { code = Code.parseError; message; data = None } + | Error.InvalidRequest message -> + { code = Code.invalidRequest; message; data = None } + | Error.MethodNotFound message -> + { code = Code.methodNotFound; message; data = None } + | Error.InvalidParams message -> + { code = Code.invalidParams; message; data = None } + | Error.InternalError message -> + { code = Code.internalError; message; data = None } + | Error.ServerErrorStart (message, data) -> + { + code = Code.serverErrorStart; + message; + data = Some (print_initializeError data); + } + | Error.ServerErrorEnd message -> + { code = Code.serverErrorEnd; message; data = None } + | Error.ServerNotInitialized message -> + { code = Code.serverNotInitialized; message; data = None } + | Error.Unknown message -> + { code = Code.unknownErrorCode; message; data = None } + | Error.RequestCancelled message -> + { code = Code.requestCancelled; message; data = None } + | Exit_status.Exit_with code -> + { + code = Code.unknownErrorCode; + message = Exit_status.to_string code; + data = None; + } + | _ -> + { + code = Code.unknownErrorCode; + message = Printexc.to_string e; + data = None; + }) + +let print_error + ?(include_error_stack_trace = true) (e : Error.t) (stack : string) : json = + Hh_json.( + Error.( + let entries = + if include_error_stack_trace then + let stack_json_property = ("stack", string_ stack) in + (* We'd like to add a stack-trace. The only place we can fit it, that will *) + (* be respected by vscode-jsonrpc, is inside the 'data' field. And we can *) + (* do that only if data is an object. We can synthesize one if needed. *) + let data = + match e.data with + | None -> JSON_Object [stack_json_property] + | Some (JSON_Object o) -> JSON_Object (stack_json_property :: o) + | Some primitive -> primitive + in + [("data", data)] + else + [] + in + let entries = + ("code", int_ e.code) :: ("message", string_ e.message) :: entries + in + JSON_Object entries)) + +let parse_error (error : json) : Error.t = + let json = Some error in + let code = Jget.int_exn json "code" in + let message = Jget.string_exn json "message" in + let data = Jget.val_opt json "data" in + { Error.code; message; data } + +(************************************************************************) +(* universal parser+printer *) +(************************************************************************) + +let request_name_to_string (request : lsp_request) : string = + match request with + | ShowMessageRequestRequest _ -> "window/showMessageRequest" + | ShowStatusRequest _ -> "window/showStatus" + | InitializeRequest _ -> "initialize" + | RegisterCapabilityRequest _ -> "client/registerCapability" + | ShutdownRequest -> "shutdown" + | CodeLensResolveRequest _ -> "codeLens/resolve" + | HoverRequest _ -> "textDocument/hover" + | CodeActionRequest _ -> "textDocument/codeAction" + | CompletionRequest _ -> "textDocument/completion" + | CompletionItemResolveRequest _ -> "completionItem/resolve" + | DefinitionRequest _ -> "textDocument/definition" + | TypeDefinitionRequest _ -> "textDocument/typeDefinition" + | WorkspaceSymbolRequest _ -> "workspace/symbol" + | DocumentSymbolRequest _ -> "textDocument/documentSymbol" + | FindReferencesRequest _ -> "textDocument/references" + | DocumentHighlightRequest _ -> "textDocument/documentHighlight" + | TypeCoverageRequest _ -> "textDocument/typeCoverage" + | DocumentFormattingRequest _ -> "textDocument/formatting" + | DocumentRangeFormattingRequest _ -> "textDocument/rangeFormatting" + | DocumentOnTypeFormattingRequest _ -> "textDocument/onTypeFormatting" + | RageRequest -> "telemetry/rage" + | RenameRequest _ -> "textDocument/rename" + | DocumentCodeLensRequest _ -> "textDocument/codeLens" + | UnknownRequest (method_, _params) -> method_ + +let result_name_to_string (result : lsp_result) : string = + match result with + | ShowMessageRequestResult _ -> "window/showMessageRequest" + | ShowStatusResult _ -> "window/showStatus" + | InitializeResult _ -> "initialize" + | ShutdownResult -> "shutdown" + | CodeLensResolveResult _ -> "codeLens/resolve" + | HoverResult _ -> "textDocument/hover" + | CodeActionResult _ -> "textDocument/codeAction" + | CompletionResult _ -> "textDocument/completion" + | CompletionItemResolveResult _ -> "completionItem/resolve" + | DefinitionResult _ -> "textDocument/definition" + | TypeDefinitionResult _ -> "textDocument/typeDefinition" + | WorkspaceSymbolResult _ -> "workspace/symbol" + | DocumentSymbolResult _ -> "textDocument/documentSymbol" + | FindReferencesResult _ -> "textDocument/references" + | DocumentHighlightResult _ -> "textDocument/documentHighlight" + | TypeCoverageResult _ -> "textDocument/typeCoverage" + | DocumentFormattingResult _ -> "textDocument/formatting" + | DocumentRangeFormattingResult _ -> "textDocument/rangeFormatting" + | DocumentOnTypeFormattingResult _ -> "textDocument/onTypeFormatting" + | RageResult _ -> "telemetry/rage" + | RenameResult _ -> "textDocument/rename" + | DocumentCodeLensResult _ -> "textDocument/codeLens" + | ErrorResult (e, _stack) -> "ERROR/" ^ e.Error.message + +let notification_name_to_string (notification : lsp_notification) : string = + match notification with + | ExitNotification -> "exit" + | CancelRequestNotification _ -> "$/cancelRequest" + | PublishDiagnosticsNotification _ -> "textDocument/publishDiagnostics" + | DidOpenNotification _ -> "textDocument/didOpen" + | DidCloseNotification _ -> "textDocument/didClose" + | DidSaveNotification _ -> "textDocument/didSave" + | DidChangeNotification _ -> "textDocument/didChange" + | DidChangeWatchedFilesNotification _ -> "workspace/didChangeWatchedFiles" + | TelemetryNotification _ -> "telemetry/event" + | LogMessageNotification _ -> "window/logMessage" + | ShowMessageNotification _ -> "window/showMessage" + | ProgressNotification _ -> "window/progress" + | ActionRequiredNotification _ -> "window/actionRequired" + | ConnectionStatusNotification _ -> "telemetry/connectionStatus" + | InitializedNotification -> "initialized" + | SetTraceNotification -> "$/setTraceNotification" + | LogTraceNotification -> "$/logTraceNotification" + | UnknownNotification (method_, _params) -> method_ + +let message_name_to_string (message : lsp_message) : string = + match message with + | RequestMessage (_, r) -> request_name_to_string r + | NotificationMessage n -> notification_name_to_string n + | ResponseMessage (_, r) -> result_name_to_string r + +let denorm_message_to_string (message : lsp_message) : string = + match message with + | RequestMessage (id, r) -> + Printf.sprintf "request %s %s" (id_to_string id) (request_name_to_string r) + | NotificationMessage n -> + Printf.sprintf "notification %s" (notification_name_to_string n) + | ResponseMessage (id, ErrorResult (e, _stack)) -> + Printf.sprintf "error %s %s" (id_to_string id) e.Error.message + | ResponseMessage (id, r) -> + Printf.sprintf "result %s %s" (id_to_string id) (result_name_to_string r) + +let parse_lsp_request (method_ : string) (params : json option) : lsp_request = + match method_ with + | "initialize" -> InitializeRequest (parse_initialize params) + | "shutdown" -> ShutdownRequest + | "codeLens/resolve" -> CodeLensResolveRequest (parse_codeLensResolve params) + | "textDocument/hover" -> HoverRequest (parse_hover params) + | "textDocument/codeAction" -> + CodeActionRequest (parse_codeActionRequest params) + | "textDocument/completion" -> CompletionRequest (parse_completion params) + | "textDocument/definition" -> DefinitionRequest (parse_definition params) + | "workspace/symbol" -> WorkspaceSymbolRequest (parse_workspaceSymbol params) + | "textDocument/documentSymbol" -> + DocumentSymbolRequest (parse_documentSymbol params) + | "textDocument/references" -> + FindReferencesRequest (parse_findReferences params) + | "textDocument/rename" -> RenameRequest (parse_documentRename params) + | "textDocument/documentHighlight" -> + DocumentHighlightRequest (parse_documentHighlight params) + | "textDocument/typeCoverage" -> + TypeCoverageRequest (parse_typeCoverage params) + | "textDocument/formatting" -> + DocumentFormattingRequest (parse_documentFormatting params) + | "textDocument/rangeFormatting" -> + DocumentRangeFormattingRequest (parse_documentRangeFormatting params) + | "textDocument/onTypeFormatting" -> + DocumentOnTypeFormattingRequest (parse_documentOnTypeFormatting params) + | "textDocument/codeLens" -> + DocumentCodeLensRequest (parse_documentCodeLens params) + | "telemetry/rage" -> RageRequest + | "completionItem/resolve" + | "window/showMessageRequest" + | "window/showStatus" + | _ -> + UnknownRequest (method_, params) + +let parse_lsp_notification (method_ : string) (params : json option) : + lsp_notification = + match method_ with + | "$/cancelRequest" -> CancelRequestNotification (parse_cancelRequest params) + | "$/setTraceNotification" -> SetTraceNotification + | "$/logTraceNotification" -> LogTraceNotification + | "initialized" -> InitializedNotification + | "exit" -> ExitNotification + | "textDocument/didOpen" -> DidOpenNotification (parse_didOpen params) + | "textDocument/didClose" -> DidCloseNotification (parse_didClose params) + | "textDocument/didSave" -> DidSaveNotification (parse_didSave params) + | "textDocument/didChange" -> DidChangeNotification (parse_didChange params) + | "workspace/didChangeWatchedFiles" -> + DidChangeWatchedFilesNotification (parse_didChangeWatchedFiles params) + | "textDocument/publishDiagnostics" + | "window/logMessage" + | "window/showMessage" + | "window/progress" + | "window/actionRequired" + | "telemetry/connectionStatus" + | _ -> + UnknownNotification (method_, params) + +let parse_lsp_result (request : lsp_request) (result : json) : lsp_result = + let method_ = request_name_to_string request in + match request with + | ShowMessageRequestRequest _ -> + ShowMessageRequestResult (parse_result_showMessageRequest (Some result)) + | ShowStatusRequest _ -> + ShowStatusResult (parse_result_showMessageRequest (Some result)) + (* shares result type *) + | InitializeRequest _ + | RegisterCapabilityRequest _ + | ShutdownRequest + | CodeLensResolveRequest _ + | HoverRequest _ + | CodeActionRequest _ + | CompletionRequest _ + | CompletionItemResolveRequest _ + | DefinitionRequest _ + | TypeDefinitionRequest _ + | WorkspaceSymbolRequest _ + | DocumentSymbolRequest _ + | FindReferencesRequest _ + | DocumentHighlightRequest _ + | TypeCoverageRequest _ + | DocumentFormattingRequest _ + | DocumentRangeFormattingRequest _ + | DocumentOnTypeFormattingRequest _ + | RageRequest + | RenameRequest _ + | DocumentCodeLensRequest _ + | UnknownRequest _ -> + raise (Error.Parse ("Don't know how to parse LSP response " ^ method_)) + +(* parse_lsp: non-jsonrpc inputs - will raise an exception *) +(* requests and notifications - will raise an exception if they're malformed, *) +(* otherwise return Some *) +(* responses - will raise an exception if they're malformed, will return None *) +(* if they're absent from the "outstanding" map, otherwise return Some. *) +let parse_lsp (json : json) (outstanding : lsp_id -> lsp_request) : lsp_message + = + let json = Some json in + let id = Jget.val_opt json "id" |> parse_id_opt in + let method_opt = Jget.string_opt json "method" in + let params = Jget.val_opt json "params" in + let result = Jget.val_opt json "result" in + let error = Jget.val_opt json "error" in + match (id, method_opt, result, error) with + | (None, Some method_, _, _) -> + NotificationMessage (parse_lsp_notification method_ params) + | (Some id, Some method_, _, _) -> + RequestMessage (id, parse_lsp_request method_ params) + | (Some id, _, Some result, _) -> + let request = outstanding id in + ResponseMessage (id, parse_lsp_result request result) + | (Some id, _, _, Some error) -> + ResponseMessage (id, ErrorResult (parse_error error, "")) + | (_, _, _, _) -> raise (Error.Parse "Not JsonRPC") + +let print_lsp_request (id : lsp_id) (request : lsp_request) : json = + let method_ = request_name_to_string request in + let params = + match request with + | ShowMessageRequestRequest r -> print_showMessageRequest r + | ShowStatusRequest r -> print_showStatus r + | RegisterCapabilityRequest r -> print_registerCapability r + | InitializeRequest _ + | ShutdownRequest + | HoverRequest _ + | CodeActionRequest _ + | CodeLensResolveRequest _ + | CompletionRequest _ + | CompletionItemResolveRequest _ + | DefinitionRequest _ + | TypeDefinitionRequest _ + | WorkspaceSymbolRequest _ + | DocumentSymbolRequest _ + | FindReferencesRequest _ + | DocumentHighlightRequest _ + | TypeCoverageRequest _ + | DocumentFormattingRequest _ + | DocumentRangeFormattingRequest _ + | DocumentOnTypeFormattingRequest _ + | RageRequest + | RenameRequest _ + | DocumentCodeLensRequest _ + | UnknownRequest _ -> + failwith ("Don't know how to print request " ^ method_) + in + JSON_Object + [ + ("jsonrpc", JSON_String "2.0"); + ("id", print_id id); + ("method", JSON_String method_); + ("params", params); + ] + +let print_lsp_response + ?include_error_stack_trace (id : lsp_id) (result : lsp_result) : json = + let method_ = result_name_to_string result in + let json = + match result with + | InitializeResult r -> print_initialize r + | ShutdownResult -> print_shutdown () + | CodeLensResolveResult r -> print_codeLensResolve r + | HoverResult r -> print_hover r + | CodeActionResult r -> print_codeActionResult r + | CompletionResult r -> print_completion r + | DefinitionResult r -> print_definition r + | TypeDefinitionResult r -> print_definition r + | WorkspaceSymbolResult r -> print_workspaceSymbol r + | DocumentSymbolResult r -> print_documentSymbol r + | FindReferencesResult r -> print_findReferences r + | DocumentHighlightResult r -> print_documentHighlight r + | TypeCoverageResult r -> print_typeCoverage r + | DocumentFormattingResult r -> print_documentFormatting r + | DocumentRangeFormattingResult r -> print_documentRangeFormatting r + | DocumentOnTypeFormattingResult r -> print_documentOnTypeFormatting r + | RageResult r -> print_rage r + | RenameResult r -> print_documentRename r + | DocumentCodeLensResult r -> print_documentCodeLens r + | ShowMessageRequestResult _ + | ShowStatusResult _ + | CompletionItemResolveResult _ -> + failwith ("Don't know how to print result " ^ method_) + | ErrorResult (e, stack) -> print_error ?include_error_stack_trace e stack + in + match result with + | ErrorResult _ -> + JSON_Object + [("jsonrpc", JSON_String "2.0"); ("id", print_id id); ("error", json)] + | _ -> + JSON_Object + [("jsonrpc", JSON_String "2.0"); ("id", print_id id); ("result", json)] + +let print_lsp_notification (notification : lsp_notification) : json = + let method_ = notification_name_to_string notification in + let params = + match notification with + | CancelRequestNotification r -> print_cancelRequest r + | PublishDiagnosticsNotification r -> print_diagnostics r + | TelemetryNotification r -> + print_logMessage r.LogMessage.type_ r.LogMessage.message + | LogMessageNotification r -> + print_logMessage r.LogMessage.type_ r.LogMessage.message + | ShowMessageNotification r -> + print_showMessage r.ShowMessage.type_ r.ShowMessage.message + | ProgressNotification r -> print_progress r.Progress.id r.Progress.label + | ActionRequiredNotification r -> + print_actionRequired r.ActionRequired.id r.ActionRequired.label + | ConnectionStatusNotification r -> print_connectionStatus r + | ExitNotification + | InitializedNotification + | SetTraceNotification + | LogTraceNotification + | DidOpenNotification _ + | DidCloseNotification _ + | DidSaveNotification _ + | DidChangeNotification _ + | DidChangeWatchedFilesNotification _ + | UnknownNotification _ -> + failwith ("Don't know how to print notification " ^ method_) + in + JSON_Object + [ + ("jsonrpc", JSON_String "2.0"); + ("method", JSON_String method_); + ("params", params); + ] + +let print_lsp ?include_error_stack_trace (message : lsp_message) : json = + match message with + | RequestMessage (id, request) -> print_lsp_request id request + | ResponseMessage (id, result) -> + print_lsp_response ?include_error_stack_trace id result + | NotificationMessage notification -> print_lsp_notification notification diff --git a/hack/utils/lsp/lsp_fmt.mli b/hack/utils/lsp/lsp_fmt.mli new file mode 100644 index 00000000000..f5e1454aa43 --- /dev/null +++ b/hack/utils/lsp/lsp_fmt.mli @@ -0,0 +1,213 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +val parse_id : Hh_json.json -> Lsp.lsp_id + +val parse_id_opt : Hh_json.json option -> Lsp.lsp_id option + +val print_id : Lsp.lsp_id -> Hh_json.json + +val id_to_string : Lsp.lsp_id -> string + +val parse_position : Hh_json.json option -> Lsp.position + +val print_position : Lsp.position -> Hh_json.json + +val print_range : Lsp.range -> Hh_json.json + +val print_location : Lsp.Location.t -> Hh_json.json + +val print_definition_location : Lsp.DefinitionLocation.t -> Hh_json.json + +val parse_range_exn : Hh_json.json option -> Lsp.range + +val parse_range_opt : Hh_json.json option -> Lsp.range option + +val parse_textDocumentIdentifier : + Hh_json.json option -> Lsp.TextDocumentIdentifier.t + +val parse_versionedTextDocumentIdentifier : + Hh_json.json option -> Lsp.VersionedTextDocumentIdentifier.t + +val parse_textDocumentItem : Hh_json.json option -> Lsp.TextDocumentItem.t + +val print_textDocumentItem : Lsp.TextDocumentItem.t -> Hh_json.json + +val print_markedItem : Lsp.markedString -> Hh_json.json + +val parse_textDocumentPositionParams : + Hh_json.json option -> Lsp.TextDocumentPositionParams.t + +val parse_textEdit : Hh_json.json option -> Lsp.TextEdit.t option + +val print_textEdit : Lsp.TextEdit.t -> Hh_json.json + +val print_command : Lsp.Command.t -> Hh_json.json + +val parse_command : Hh_json.json option -> Lsp.Command.t + +val parse_formattingOptions : + Hh_json.json option -> Lsp.DocumentFormatting.formattingOptions + +val print_symbolInformation : Lsp.SymbolInformation.t -> Hh_json.json + +val print_shutdown : unit -> Hh_json.json + +val parse_cancelRequest : Hh_json.json option -> Lsp.CancelRequest.params + +val print_cancelRequest : Lsp.CancelRequest.params -> Hh_json.json + +val print_rage : Lsp.Rage.result -> Hh_json.json + +val parse_didOpen : Hh_json.json option -> Lsp.DidOpen.params + +val print_didOpen : Lsp.DidOpen.params -> Hh_json.json + +val parse_didClose : Hh_json.json option -> Lsp.DidClose.params + +val parse_didSave : Hh_json.json option -> Lsp.DidSave.params + +val parse_didChange : Hh_json.json option -> Lsp.DidChange.params + +val parse_signatureHelp : Hh_json.json option -> Lsp.SignatureHelp.params + +val print_signatureHelp : Lsp.SignatureHelp.result -> Hh_json.json + +val parse_documentRename : Hh_json.json option -> Lsp.Rename.params + +val print_documentRename : Lsp.Rename.result -> Hh_json.json + +val print_diagnostics : Lsp.PublishDiagnostics.params -> Hh_json.json + +val print_logMessage : Lsp.MessageType.t -> string -> Hh_json.json + +val print_showMessage : Lsp.MessageType.t -> string -> Hh_json.json + +val print_showMessageRequest : + Lsp.ShowMessageRequest.showMessageRequestParams -> Hh_json.json + +val parse_result_showMessageRequest : + Hh_json.json option -> Lsp.ShowMessageRequest.result + +val print_showStatus : Lsp.ShowStatus.showStatusParams -> Hh_json.json + +val print_progress : int -> string option -> Hh_json.json + +val print_actionRequired : int -> string option -> Hh_json.json + +val print_connectionStatus : Lsp.ConnectionStatus.params -> Hh_json.json + +val parse_hover : Hh_json.json option -> Lsp.Hover.params + +val print_hover : Lsp.Hover.result -> Hh_json.json + +val parse_definition : Hh_json.json option -> Lsp.Definition.params + +val print_definition : Lsp.Definition.result -> Hh_json.json + +val parse_completionItem : + Hh_json.json option -> Lsp.CompletionItemResolve.params + +val print_completionItem : Lsp.Completion.completionItem -> Hh_json.json + +val parse_completion : Hh_json.json option -> Lsp.Completion.params + +val print_completion : Lsp.Completion.result -> Hh_json.json + +val parse_workspaceSymbol : Hh_json.json option -> Lsp.WorkspaceSymbol.params + +val print_workspaceSymbol : Lsp.WorkspaceSymbol.result -> Hh_json.json + +val parse_documentSymbol : Hh_json.json option -> Lsp.DocumentSymbol.params + +val print_documentSymbol : Lsp.DocumentSymbol.result -> Hh_json.json + +val parse_findReferences : Hh_json.json option -> Lsp.FindReferences.params + +val print_findReferences : Lsp.Location.t list -> Hh_json.json + +val parse_documentHighlight : + Hh_json.json option -> Lsp.DocumentHighlight.params + +val print_documentHighlight : Lsp.DocumentHighlight.result -> Hh_json.json + +val parse_typeCoverage : Hh_json.json option -> Lsp.TypeCoverage.params + +val print_typeCoverage : Lsp.TypeCoverage.result -> Hh_json.json + +val parse_toggleTypeCoverage : + Hh_json.json option -> Lsp.ToggleTypeCoverage.params + +val parse_documentFormatting : + Hh_json.json option -> Lsp.DocumentFormatting.params + +val print_documentFormatting : Lsp.DocumentFormatting.result -> Hh_json.json + +val parse_documentRangeFormatting : + Hh_json.json option -> Lsp.DocumentRangeFormatting.params + +val print_documentRangeFormatting : + Lsp.DocumentRangeFormatting.result -> Hh_json.json + +val parse_documentOnTypeFormatting : + Hh_json.json option -> Lsp.DocumentOnTypeFormatting.params + +val print_documentOnTypeFormatting : + Lsp.DocumentOnTypeFormatting.result -> Hh_json.json + +val parse_initialize : Hh_json.json option -> Lsp.Initialize.params + +val print_initializeError : Lsp.Initialize.errorData -> Hh_json.json + +val print_initialize : Lsp.Initialize.result -> Hh_json.json + +val print_registerCapability : Lsp.RegisterCapability.params -> Hh_json.json + +val parse_didChangeWatchedFiles : + Hh_json.json option -> Lsp.DidChangeWatchedFiles.params + +val error_of_exn : exn -> Lsp.Error.t + +val print_error : + ?include_error_stack_trace:bool -> Lsp.Error.t -> string -> Hh_json.json + +val parse_error : Hh_json.json -> Lsp.Error.t + +val request_name_to_string : Lsp.lsp_request -> string + +val result_name_to_string : Lsp.lsp_result -> string + +val notification_name_to_string : Lsp.lsp_notification -> string + +val message_name_to_string : Lsp.lsp_message -> string + +val denorm_message_to_string : Lsp.lsp_message -> string + +val parse_lsp_request : string -> Hh_json.json option -> Lsp.lsp_request + +val parse_lsp_notification : + string -> Hh_json.json option -> Lsp.lsp_notification + +val parse_lsp_result : Lsp.lsp_request -> Hh_json.json -> Lsp.lsp_result + +val parse_lsp : + Hh_json.json -> (Lsp.lsp_id -> Lsp.lsp_request) -> Lsp.lsp_message + +val print_lsp_request : Lsp.lsp_id -> Lsp.lsp_request -> Hh_json.json + +val print_lsp_response : + ?include_error_stack_trace:bool -> + Lsp.lsp_id -> + Lsp.lsp_result -> + Hh_json.json + +val print_lsp_notification : Lsp.lsp_notification -> Hh_json.json + +val print_lsp : + ?include_error_stack_trace:bool -> Lsp.lsp_message -> Hh_json.json diff --git a/hack/utils/lsp/lsp_helpers.ml b/hack/utils/lsp/lsp_helpers.ml new file mode 100644 index 00000000000..7b933406abe --- /dev/null +++ b/hack/utils/lsp/lsp_helpers.ml @@ -0,0 +1,418 @@ +(* A few helpful wrappers around LSP *) + +open Lsp +open Lsp_fmt + +let progress_and_actionRequired_counter = ref 0 + +(************************************************************************) +(* Conversions *) +(************************************************************************) + +let url_scheme_regex = Str.regexp "^\\([a-zA-Z][a-zA-Z0-9+.-]+\\):" + +(* this requires schemes with 2+ characters, so "c:\path" isn't considered a scheme *) + +let lsp_uri_to_path (uri : string) : string = + if Str.string_match url_scheme_regex uri 0 then + let scheme = Str.matched_group 1 uri in + if scheme = "file" then + File_url.parse uri + else + raise + (Error.InvalidParams (Printf.sprintf "Not a valid file url '%s'" uri)) + else + uri + +let path_to_lsp_uri (path : string) ~(default_path : string) : string = + if path = "" then + File_url.create default_path + else + File_url.create path + +let lsp_textDocumentIdentifier_to_filename + (identifier : Lsp.TextDocumentIdentifier.t) : string = + Lsp.TextDocumentIdentifier.(lsp_uri_to_path identifier.uri) + +let lsp_position_to_fc (pos : Lsp.position) : File_content.position = + { + File_content.line = pos.Lsp.line + 1; + (* LSP is 0-based; File_content is 1-based. *) + column = pos.Lsp.character + 1; + } + +let lsp_range_to_fc (range : Lsp.range) : File_content.range = + { + File_content.st = lsp_position_to_fc range.Lsp.start; + ed = lsp_position_to_fc range.Lsp.end_; + } + +let lsp_edit_to_fc (edit : Lsp.DidChange.textDocumentContentChangeEvent) : + File_content.text_edit = + { + File_content.range = Option.map edit.DidChange.range ~f:lsp_range_to_fc; + text = edit.DidChange.text; + } + +let apply_changes + (text : string) + (contentChanges : DidChange.textDocumentContentChangeEvent list) : + (string, string * Utils.callstack) result = + let edits = List.map lsp_edit_to_fc contentChanges in + File_content.edit_file text edits + +let get_char_from_lsp_position (content : string) (position : Lsp.position) : + char = + let fc_position = lsp_position_to_fc position in + File_content.(get_char content (get_offset content fc_position)) + +let apply_changes_unsafe + text (contentChanges : DidChange.textDocumentContentChangeEvent list) : + string = + match apply_changes text contentChanges with + | Ok r -> r + | Error (e, _stack) -> failwith e + +(************************************************************************) +(* Range calculations *) +(************************************************************************) + +(* We need to do intersection and other calculations on ranges. + * The functions in the following module all assume LSP 0-based ranges, + * and assume without testing that a range's start is equal to or before + * its end. *) +let pos_compare (p1 : position) (p2 : position) : int = + if p1.line < p2.line then + -1 + else if p1.line > p2.line then + 1 + else + p1.character - p2.character + +(* Given a "selection" range A..B and a "squiggle" range a..b, how do they overlap? + * There are 12 ways to order the four letters ABab, of which six + * satisfy both A<=B and a<=b. Here they are. *) +type range_overlap = + | Selection_before_start_of_squiggle (* ABab *) + | Selection_overlaps_start_of_squiggle (* AaBb *) + | Selection_covers_whole_squiggle (* AabB *) + | Selection_in_middle_of_squiggle (* aABb *) + | Selection_overlaps_end_of_squiggle (* aAbB *) + (* abAB *) + | Selection_after_end_of_squiggle + +(* Computes how two ranges "selection" and "squiggle" overlap *) +let get_range_overlap (selection : range) (squiggle : range) : range_overlap = + let selStart_leq_squiggleStart = + pos_compare selection.start squiggle.start <= 0 + in + let selStart_leq_squiggleEnd = + pos_compare selection.start squiggle.end_ <= 0 + in + let selEnd_lt_squiggleStart = + pos_compare selection.end_ squiggle.start < 0 + in + let selEnd_lt_squiggleEnd = pos_compare selection.end_ squiggle.end_ < 0 in + (* Q. Why does it test "<=" for the first two and "<" for the last two? *) + (* Intuitively you can trust that it has something to do with how ranges are *) + (* inclusive at their start and exclusive at their end. But the real reason *) + (* is just that I did an exhaustive case analysis to look at all cases where *) + (* A,B,a,b might be equal, and decided which outcome I wanted for each of them *) + (* because of how I'm going to treat them in other functions, and retrofitted *) + (* those answers into this function. For instance, if squiggleStart==selEnd, *) + (* I'll want to handle it in the same way as squiggleStart Selection_before_start_of_squiggle + | (true, true, false, true) -> Selection_overlaps_start_of_squiggle + | (true, true, false, false) -> Selection_covers_whole_squiggle + | (false, true, false, true) -> Selection_in_middle_of_squiggle + | (false, true, false, false) -> Selection_overlaps_end_of_squiggle + | (false, false, false, false) -> Selection_after_end_of_squiggle + | (true, false, _, _) -> + failwith "sel.start proves squiggle.start > squiggle.end_" + | (_, _, true, false) -> + failwith "sel.end proves squiggle.start > squiggle.end_" + | (false, _, true, _) -> + failwith "squiggle.start proves sel.start > sel.end_" + | (_, false, _, true) -> failwith "squiggle.end_ proves sel.start > sel.end_" + +(* this structure models a change where a certain range is replaced with + * a block of text. For instance, if you merely insert a single character, + * then remove_range.start==remove_range.end_ and insert_lines=0 + * and insert_chars_on_final_line=1. *) +type range_replace = { + remove_range: range; + insert_lines: int; + insert_chars_on_final_line: int; +} + +(* If you have a position "p", and some range before this point is replaced with + * text of a certain number of lines, the last line having a certain number of characters, + * then how will the position be shifted? + * Note: this function assumes but doesn't verify that the range ends on or before + * the position. *) +let update_pos_due_to_prior_replace (p : position) (replace : range_replace) : + position = + if replace.remove_range.end_.line < p.line then + (* The replaced range doesn't touch the position, so position merely gets shifted up/down *) + let line = + p.line + - (replace.remove_range.end_.line - replace.remove_range.start.line) + + replace.insert_lines + in + { p with line } + else if replace.insert_lines > 0 then + (* The position is on the final line and multiple lines were inserted *) + let line = + p.line + - (replace.remove_range.end_.line - replace.remove_range.start.line) + + replace.insert_lines + in + let character = + replace.insert_chars_on_final_line + + (p.character - replace.remove_range.end_.character) + in + { line; character } + else + (* The position is on the line where a few characters were inserted *) + let line = + p.line + - (replace.remove_range.end_.line - replace.remove_range.start.line) + in + let character = + replace.remove_range.start.character + + replace.insert_chars_on_final_line + + (p.character - replace.remove_range.end_.character) + in + { line; character } + +(* If you have a squiggle, and some range in the document is replaced with a block + * some lines long and with insert_chars on the final line, then what's the new + * range of the squiggle? *) +let update_range_due_to_replace (squiggle : range) (replace : range_replace) : + range option = + match get_range_overlap replace.remove_range squiggle with + | Selection_before_start_of_squiggle -> + let start = update_pos_due_to_prior_replace squiggle.start replace in + let end_ = update_pos_due_to_prior_replace squiggle.end_ replace in + Some { start; end_ } + | Selection_overlaps_start_of_squiggle -> + let line = replace.remove_range.start.line + replace.insert_lines in + let character = + if replace.insert_lines = 0 then + replace.remove_range.start.character + + replace.insert_chars_on_final_line + else + replace.insert_chars_on_final_line + in + let start = { line; character } in + let end_ = update_pos_due_to_prior_replace squiggle.end_ replace in + Some { start; end_ } + | Selection_covers_whole_squiggle -> None + | Selection_in_middle_of_squiggle -> + let start = squiggle.start in + let end_ = update_pos_due_to_prior_replace squiggle.end_ replace in + Some { start; end_ } + | Selection_overlaps_end_of_squiggle -> + let start = squiggle.start in + let end_ = replace.remove_range.start in + Some { start; end_ } + | Selection_after_end_of_squiggle -> Some squiggle + +(* Moves all diagnostics in response to an LSP change. + * The change might insert text before a diagnostic squiggle (so the squiggle + * has to be moved down or to the right); it might delete text before the squiggle; + * it might modify text inside the squiggle; it might replace text that overlaps + * the squiggle in which case the squiggle gets truncated/moved; it might replace + * the squiggle in its entirety in which case the squiggle gets removed. + * Note that an LSP change is actually a set of changes, applied in order. *) +let update_diagnostics_due_to_change + (diagnostics : PublishDiagnostics.diagnostic list) + (change : Lsp.DidChange.params) : PublishDiagnostics.diagnostic list = + PublishDiagnostics.( + let replace_of_change change = + match change.DidChange.range with + | None -> None + | Some remove_range -> + let offset = String.length change.DidChange.text in + let pos = + File_content.offset_to_position change.DidChange.text offset + in + (* 1-based *) + let insert_lines = pos.File_content.line - 1 in + let insert_chars_on_final_line = pos.File_content.column - 1 in + Some { remove_range; insert_lines; insert_chars_on_final_line } + in + let apply_replace diagnostic_opt replace_opt = + match (diagnostic_opt, replace_opt) with + | (Some diagnostic, Some replace) -> + let range = update_range_due_to_replace diagnostic.range replace in + Option.map range ~f:(fun range -> { diagnostic with range }) + | _ -> None + in + let replaces = + Core_list.map change.DidChange.contentChanges ~f:replace_of_change + in + let apply_all_replaces diagnostic = + Core_list.fold replaces ~init:(Some diagnostic) ~f:apply_replace + in + Core_list.filter_map diagnostics ~f:apply_all_replaces) + +(************************************************************************) +(* Accessors *) +(************************************************************************) + +let get_root (p : Lsp.Initialize.params) : string = + Lsp.Initialize.( + match (p.rootUri, p.rootPath) with + | (Some uri, _) -> lsp_uri_to_path uri + | (None, Some path) -> path + | (None, None) -> failwith "Initialize params missing root") + +let supports_progress (p : Lsp.Initialize.params) : bool = + Lsp.Initialize.(p.client_capabilities.window.progress) + +let supports_actionRequired (p : Lsp.Initialize.params) : bool = + Lsp.Initialize.(p.client_capabilities.window.actionRequired) + +let supports_status (p : Lsp.Initialize.params) : bool = + Lsp.Initialize.(p.client_capabilities.window.status) + +let supports_snippets (p : Lsp.Initialize.params) : bool = + Lsp.Initialize.( + p.client_capabilities.textDocument.completion.completionItem.snippetSupport) + +let supports_connectionStatus (p : Lsp.Initialize.params) : bool = + Lsp.Initialize.(p.client_capabilities.telemetry.connectionStatus) + +(************************************************************************) +(* Wrappers for some LSP methods *) +(************************************************************************) + +let telemetry + (writer : Jsonrpc.writer) (level : MessageType.t) (message : string) : unit + = + print_logMessage level message |> Jsonrpc.notify writer "telemetry/event" + +let telemetry_error (writer : Jsonrpc.writer) = + telemetry writer MessageType.ErrorMessage + +let telemetry_log (writer : Jsonrpc.writer) = + telemetry writer MessageType.LogMessage + +let log (writer : Jsonrpc.writer) (level : MessageType.t) (message : string) : + unit = + print_logMessage level message |> Jsonrpc.notify writer "window/logMessage" + +let log_error (writer : Jsonrpc.writer) = log writer MessageType.ErrorMessage + +let log_warning (writer : Jsonrpc.writer) = + log writer MessageType.WarningMessage + +let log_info (writer : Jsonrpc.writer) = log writer MessageType.InfoMessage + +let dismiss_diagnostics (writer : Jsonrpc.writer) (diagnostic_uris : SSet.t) : + SSet.t = + let dismiss_one (uri : string) : unit = + let message = { Lsp.PublishDiagnostics.uri; diagnostics = [] } in + message + |> print_diagnostics + |> Jsonrpc.notify writer "textDocument/publishDiagnostics" + in + SSet.iter dismiss_one diagnostic_uris; + SSet.empty + +let notify_connectionStatus + (p : Lsp.Initialize.params) + (writer : Jsonrpc.writer) + (wasConnected : bool) + (isConnected : bool) : bool = + ( if supports_connectionStatus p && wasConnected <> isConnected then + let message = { Lsp.ConnectionStatus.isConnected } in + message + |> print_connectionStatus + |> Jsonrpc.notify writer "telemetry/connectionStatus" ); + isConnected + +(* notify_progress: for sending/updating/closing progress messages. *) +(* To start a new indicator: id=None, message=Some, and get back the new id *) +(* To update an existing one: id=Some, message=Some, and get back same id *) +(* To close an existing one: id=Some, message=None, and get back None *) +(* No-op, for convenience: id=None, message=None, and you get back None *) +(* messages. To start a new progress notifier, put id=None and message=Some *) + +let notify_progress_raw + (state : 'a) + (p : Lsp.Initialize.params) + (writer : 'a -> Progress.params -> 'a) + (id : Progress.t) + (label : string option) : 'a * Progress.t = + match (id, label) with + | (Progress.Absent, Some label) -> + if supports_progress p then + let () = incr progress_and_actionRequired_counter in + let id = !progress_and_actionRequired_counter in + let msg = { Progress.id; label = Some label } in + let state = writer state msg in + (state, Progress.Present { id; label }) + else + (state, Progress.Absent) + | (Progress.Present { id; label }, Some new_label) when label = new_label -> + (state, Progress.Present { id; label }) + | (Progress.Present { id; _ }, Some label) -> + let msg = { Progress.id; label = Some label } in + let state = writer state msg in + (state, Progress.Present { id; label }) + | (Progress.Present { id; _ }, None) -> + let msg = { Progress.id; label = None } in + let state = writer state msg in + (state, Progress.Absent) + | (Progress.Absent, None) -> (state, Progress.Absent) + +let notify_progress + (p : Lsp.Initialize.params) + (writer : Jsonrpc.writer) + (id : Progress.t) + (label : string option) : Progress.t = + let writer_wrapper () params = + let json = print_progress params.Progress.id params.Progress.label in + Jsonrpc.notify writer "window/progress" json + in + let ((), id) = notify_progress_raw () p writer_wrapper id label in + id + +let notify_actionRequired + (p : Lsp.Initialize.params) + (writer : Jsonrpc.writer) + (id : ActionRequired.t) + (label : string option) : ActionRequired.t = + match (id, label) with + | (ActionRequired.Absent, Some label) -> + if supports_actionRequired p then + let () = incr progress_and_actionRequired_counter in + let id = !progress_and_actionRequired_counter in + let () = + print_actionRequired id (Some label) + |> Jsonrpc.notify writer "window/actionRequired" + in + ActionRequired.Present { id; label } + else + ActionRequired.Absent + | (ActionRequired.Present { id; label }, Some new_label) + when label = new_label -> + ActionRequired.Present { id; label } + | (ActionRequired.Present { id; _ }, Some label) -> + print_actionRequired id (Some label) + |> Jsonrpc.notify writer "window/actionRequired"; + ActionRequired.Present { id; label } + | (ActionRequired.Present { id; _ }, None) -> + print_actionRequired id None + |> Jsonrpc.notify writer "window/actionRequired"; + ActionRequired.Absent + | (ActionRequired.Absent, None) -> ActionRequired.Absent diff --git a/hack/utils/lsp/lsp_helpers.mli b/hack/utils/lsp/lsp_helpers.mli new file mode 100644 index 00000000000..5137cf93249 --- /dev/null +++ b/hack/utils/lsp/lsp_helpers.mli @@ -0,0 +1,124 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +type range_replace = { + remove_range: Lsp.range; + insert_lines: int; + insert_chars_on_final_line: int; +} + +val progress_and_actionRequired_counter : int ref + +val url_scheme_regex : Str.regexp + +val lsp_uri_to_path : string -> string + +val path_to_lsp_uri : string -> default_path:string -> string + +val lsp_textDocumentIdentifier_to_filename : + Lsp.TextDocumentIdentifier.t -> string + +val lsp_position_to_fc : Lsp.position -> File_content.position + +val lsp_range_to_fc : Lsp.range -> File_content.range + +val lsp_edit_to_fc : + Lsp.DidChange.textDocumentContentChangeEvent -> File_content.text_edit + +val apply_changes : + string -> + Lsp.DidChange.textDocumentContentChangeEvent list -> + (string, string * Utils.callstack) result + +val get_char_from_lsp_position : string -> Lsp.position -> char + +val apply_changes_unsafe : + string -> Lsp.DidChange.textDocumentContentChangeEvent list -> string + +val pos_compare : Lsp.position -> Lsp.position -> int + +type range_overlap = + | Selection_before_start_of_squiggle + | Selection_overlaps_start_of_squiggle + | Selection_covers_whole_squiggle + | Selection_in_middle_of_squiggle + | Selection_overlaps_end_of_squiggle + | Selection_after_end_of_squiggle + +val get_range_overlap : Lsp.range -> Lsp.range -> range_overlap + +val update_pos_due_to_prior_replace : + Lsp.position -> range_replace -> Lsp.position + +val update_range_due_to_replace : + Lsp.range -> range_replace -> Lsp.range option + +val update_diagnostics_due_to_change : + Lsp.PublishDiagnostics.diagnostic list -> + Lsp.DidChange.params -> + Lsp.PublishDiagnostics.diagnostic list + +val get_root : Lsp.Initialize.params -> string + +val supports_progress : Lsp.Initialize.params -> bool + +val supports_actionRequired : Lsp.Initialize.params -> bool + +val supports_status : Lsp.Initialize.params -> bool + +val supports_snippets : Lsp.Initialize.params -> bool + +val supports_connectionStatus : Lsp.Initialize.params -> bool + +val telemetry : Jsonrpc.writer -> Lsp.MessageType.t -> string -> unit + +val telemetry_error : Jsonrpc.writer -> string -> unit + +val telemetry_log : Jsonrpc.writer -> string -> unit + +val log : Jsonrpc.writer -> Lsp.MessageType.t -> string -> unit + +val log_error : Jsonrpc.writer -> string -> unit + +val log_warning : Jsonrpc.writer -> string -> unit + +val log_info : Jsonrpc.writer -> string -> unit + +val dismiss_diagnostics : Jsonrpc.writer -> SSet.t -> SSet.t + +val notify_connectionStatus : + Lsp.Initialize.params -> Jsonrpc.writer -> bool -> bool -> bool + +val notify_progress_raw : + 'a -> + Lsp.Initialize.params -> + ('a -> Lsp.Progress.params -> 'a) -> + Lsp.Progress.t -> + string option -> + 'a * Lsp.Progress.t + +val notify_progress : + Lsp.Initialize.params -> + Jsonrpc.writer -> + Lsp.Progress.t -> + string option -> + Lsp.Progress.t + +val notify_actionRequired : + Lsp.Initialize.params -> + Jsonrpc.writer -> + Lsp.ActionRequired.t -> + string option -> + Lsp.ActionRequired.t diff --git a/hack/utils/lsp_fmt.ml b/hack/utils/lsp_fmt.ml deleted file mode 100644 index 0915b90fade..00000000000 --- a/hack/utils/lsp_fmt.ml +++ /dev/null @@ -1,1250 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * -*) - -open Hh_core -open Lsp -open Hh_json -open Hh_json_helpers - - -(************************************************************************) -(** Miscellaneous LSP structures **) -(************************************************************************) - -let parse_id (json: json) : lsp_id = - match json with - | JSON_Number s -> - begin try NumberId (int_of_string s) - with Failure _ -> raise (Error.Parse ("float ids not allowed: " ^ s)) end - | JSON_String s -> - StringId s - | _ -> - raise (Error.Parse ("not an id: " ^ (Hh_json.json_to_string json))) - -let parse_id_opt (json: json option) : lsp_id option = - Option.map json ~f:parse_id - -let print_id (id: lsp_id) : json = - match id with - | NumberId n -> JSON_Number (string_of_int n) - | StringId s -> JSON_String s - -let id_to_string (id: lsp_id) : string = - match id with - | NumberId n -> string_of_int n - | StringId s -> Printf.sprintf "\"%s\"" s - -let parse_position (json: json option) : position = - { - line = Jget.int_exn json "line"; - character = Jget.int_exn json "character"; - } - -let print_position (position: position) : json = - JSON_Object [ - "line", position.line |> int_; - "character", position.character |> int_; - ] - -let print_range (range: range) : json = - JSON_Object [ - "start", print_position range.start; - "end", print_position range.end_; - ] - -let print_location (location: Location.t) : json = - let open Location in - JSON_Object [ - "uri", JSON_String location.uri; - "range", print_range location.range; - ] - -let print_definition_location (definition_location: DefinitionLocation.t) : json = - let open DefinitionLocation in - let location = definition_location.location in - Jprint.object_opt [ - "uri", Some (JSON_String location.Location.uri); - "range", Some (print_range location.Location.range); - "title", Option.map definition_location.title ~f:string_; - ] - -let parse_range_exn (json: json option) : range = - { - start = Jget.obj_exn json "start" |> parse_position; - end_ = Jget.obj_exn json "end" |> parse_position; - } - -let parse_range_opt (json: json option) : range option = - if json = None then None - else Some (parse_range_exn json) - -let parse_textDocumentIdentifier (json: json option) - : TextDocumentIdentifier.t = - let open TextDocumentIdentifier in - { - uri = Jget.string_exn json "uri"; - } - -let parse_versionedTextDocumentIdentifier (json: json option) - : VersionedTextDocumentIdentifier.t = - let open VersionedTextDocumentIdentifier in - { - uri = Jget.string_exn json "uri"; - version = Jget.int_d json "version" 0; - } - -let parse_textDocumentItem (json: json option) : TextDocumentItem.t = - let open TextDocumentItem in - { - uri = Jget.string_exn json "uri"; - languageId = Jget.string_d json "languageId" ""; - version = Jget.int_d json "version" 0; - text = Jget.string_exn json "text"; - } - -let print_textDocumentItem (item: TextDocumentItem.t) : json = - let open TextDocumentItem in - JSON_Object [ - "uri", JSON_String item.uri; - "languageId", JSON_String item.languageId; - "version", JSON_Number (string_of_int item.version); - "text", JSON_String item.text; - ] - -let print_markedItem (item: markedString) : json = - match item with - | MarkedString s -> JSON_String s - | MarkedCode (language, value) -> JSON_Object - [ - "language", JSON_String language; - "value", JSON_String value; - ] - -let parse_textDocumentPositionParams (params: json option) - : TextDocumentPositionParams.t = - let open TextDocumentPositionParams in - { - textDocument = Jget.obj_exn params "textDocument" - |> parse_textDocumentIdentifier; - position = Jget.obj_exn params "position" |> parse_position; - } - -let parse_textEdit (params: json option) : TextEdit.t option = - match params with - | None -> None - | _ -> - let open TextEdit in - Some { - range = Jget.obj_exn params "range" |> parse_range_exn; - newText = Jget.string_exn params "newText"; - } - -let print_textEdit (edit: TextEdit.t) : json = - let open TextEdit in - JSON_Object [ - "range", print_range edit.range; - "newText", JSON_String edit.newText; - ] - -let print_command (command: Command.t) : json = - let open Command in - JSON_Object [ - "title", JSON_String command.title; - "command", JSON_String command.command; - "arguments", JSON_Array command.arguments; - ] - -let parse_command (json: json option) : Command.t = - let open Command in - { - title = Jget.string_d json "title" ""; - command = Jget.string_d json "command" ""; - arguments = Jget.array_d json "arguments" ~default:[] |> List.filter_opt; - } - -let parse_formattingOptions (json: json option) - : DocumentFormatting.formattingOptions = - { DocumentFormatting. - tabSize = Jget.int_d json "tabSize" 2; - insertSpaces = Jget.bool_d json "insertSpaces" true; - } - -let print_symbolInformation (info: SymbolInformation.t) : json = - let open SymbolInformation in - let print_symbol_kind = function - | File -> int_ 1 - | Module -> int_ 2 - | Namespace -> int_ 3 - | Package -> int_ 4 - | Class -> int_ 5 - | Method -> int_ 6 - | Property -> int_ 7 - | Field -> int_ 8 - | Constructor -> int_ 9 - | Enum -> int_ 10 - | Interface -> int_ 11 - | Function -> int_ 12 - | Variable -> int_ 13 - | Constant -> int_ 14 - | String -> int_ 15 - | Number -> int_ 16 - | Boolean -> int_ 17 - | Array -> int_ 18 - in - Jprint.object_opt [ - "name", Some (JSON_String info.name); - "kind", Some (print_symbol_kind info.kind); - "location", Some (print_location info.location); - "containerName", Option.map info.containerName string_; - ] - -let print_messageType (type_: MessageType.t) : json = - let open MessageType in - match type_ with - | ErrorMessage -> int_ 1 - | WarningMessage -> int_ 2 - | InfoMessage -> int_ 3 - | LogMessage -> int_ 4 - - -(************************************************************************) -(** shutdown request **) -(************************************************************************) - -let print_shutdown () : json = - JSON_Null - - -(************************************************************************) -(** $/cancelRequest notification **) -(************************************************************************) - -let parse_cancelRequest (params: json option) : CancelRequest.params = - let open CancelRequest in - { - id = Jget.val_exn params "id" |> parse_id - } - -let print_cancelRequest (p: CancelRequest.params) : json = - let open CancelRequest in - JSON_Object [ - "id", print_id p.id - ] - -(************************************************************************) -(** rage request **) -(************************************************************************) - -let print_rage (r: Rage.result) : json = - let open Rage in - let print_item (item: rageItem) : json = - JSON_Object [ - "data", JSON_String item.data; - "title", match item.title with None -> JSON_Null | Some s -> JSON_String s; - ] in - JSON_Array (List.map r ~f:print_item) - - -(************************************************************************) -(** textDocument/didOpen notification **) -(************************************************************************) - -let parse_didOpen (params: json option) : DidOpen.params = - let open DidOpen in - { - textDocument = Jget.obj_exn params "textDocument" - |> parse_textDocumentItem; - } - -let print_didOpen (params: DidOpen.params) : json = - let open DidOpen in - JSON_Object [ - "textDocument", params.textDocument |> print_textDocumentItem; - ] - -(************************************************************************) -(** textDocument/didClose notification **) -(************************************************************************) - -let parse_didClose (params: json option) : DidClose.params = - let open DidClose in - { - textDocument = Jget.obj_exn params "textDocument" - |> parse_textDocumentIdentifier; - } - - -(************************************************************************) -(** textDocument/didSave notification **) -(************************************************************************) - -let parse_didSave (params: json option) : DidSave.params = - let open DidSave in - { - textDocument = Jget.obj_exn params "textDocument" |> parse_textDocumentIdentifier; - text = Jget.string_opt params "text"; - } - - - -(************************************************************************) -(** textDocument/didChange notification **) -(************************************************************************) - -let parse_didChange (params: json option) : DidChange.params = - let open DidChange in - let parse_textDocumentContentChangeEvent json = - { - range = Jget.obj_opt json "range" |> parse_range_opt; - rangeLength = Jget.int_opt json "rangeLength"; - text = Jget.string_exn json "text"; - } - in - { - textDocument = Jget.obj_exn params "textDocument" - |> parse_versionedTextDocumentIdentifier; - contentChanges = Jget.array_d params "contentChanges" ~default:[] - |> List.map ~f:parse_textDocumentContentChangeEvent; - } - - - -(************************************************************************) -(** textDocument/signatureHelp notification **) -(************************************************************************) - -let parse_signatureHelp (params: json option) : SignatureHelp.params = - parse_textDocumentPositionParams params - -let print_signatureHelp (r: SignatureHelp.result) : json = - let open SignatureHelp in - let print_parInfo parInfo = - Jprint.object_opt [ - "label", Some (Hh_json.JSON_String parInfo.parinfo_label); - "documentation", Option.map ~f:Hh_json.string_ parInfo.parinfo_documentation; - ] - in - let print_sigInfo sigInfo = - Jprint.object_opt [ - "label", Some (Hh_json.JSON_String sigInfo.siginfo_label); - "documentation", Option.map ~f:Hh_json.string_ sigInfo.siginfo_documentation; - "parameters", Some (Hh_json.JSON_Array (List.map ~f:print_parInfo sigInfo.parameters)) - ] - in - match r with - | None -> Hh_json.JSON_Null - | Some r -> - Hh_json.JSON_Object [ - "signatures", Hh_json.JSON_Array (List.map ~f:print_sigInfo r.signatures); - "activeSignature", Hh_json.int_ r.activeSignature; - "activeParameter", Hh_json.int_ r.activeParameter; - ] - - - -(************************************************************************) -(** textDocument/rename Request **) -(************************************************************************) - -let parse_documentRename (params: json option) : Rename.params = - let open Rename in - { - textDocument = Jget.obj_exn params "textDocument" - |> parse_textDocumentIdentifier; - position = Jget.obj_exn params "position" |> parse_position; - newName = Jget.string_exn params "newName"; - } - -let print_documentRename (r: Rename.result) : json = - let open WorkspaceEdit in - let print_workspace_edit_changes (uri, text_edits) = - uri, JSON_Array (List.map ~f:print_textEdit text_edits) - in - JSON_Object [ - "changes", JSON_Object (List.map (SMap.elements r.changes) ~f:print_workspace_edit_changes); - ] - - - -(************************************************************************) -(** textDocument/publishDiagnostics notification **) -(************************************************************************) - -let print_diagnostics (r: PublishDiagnostics.params) : json = - let open PublishDiagnostics in - let print_diagnosticSeverity = function - | PublishDiagnostics.Error -> int_ 1 - | PublishDiagnostics.Warning -> int_ 2 - | PublishDiagnostics.Information -> int_ 3 - | PublishDiagnostics.Hint -> int_ 4 in - let print_diagnosticCode = function - | IntCode i -> Some (int_ i) - | StringCode s -> Some (string_ s) - | NoCode -> None - in - let print_related (related: relatedLocation) : json = - Hh_json.JSON_Object [ - "location", print_location related.relatedLocation; - "message", string_ related.relatedMessage; - ] - in - let print_diagnostic (diagnostic: diagnostic) : json = - Jprint.object_opt [ - "range", Some (print_range diagnostic.range); - "severity", Option.map diagnostic.severity print_diagnosticSeverity; - "code", print_diagnosticCode diagnostic.code; - "source", Option.map diagnostic.source string_; - "message", Some (JSON_String diagnostic.message); - "relatedInformation", - Some (JSON_Array (List.map diagnostic.relatedInformation ~f:print_related)); - "relatedLocations", Some (JSON_Array (List.map diagnostic.relatedLocations ~f:print_related)); - ] - in - JSON_Object [ - "uri", JSON_String r.uri; - "diagnostics", JSON_Array (List.map r.diagnostics ~f:print_diagnostic) - ] - - -(************************************************************************) -(** window/logMessage notification **) -(************************************************************************) - -let print_logMessage (type_: MessageType.t) (message: string) : json = - let open LogMessage in - let r = { type_; message; } in - JSON_Object [ - "type", print_messageType r.type_; - "message", JSON_String r.message; - ] - - -(************************************************************************) -(** window/showMessage notification **) -(************************************************************************) - -let print_showMessage (type_: MessageType.t) (message: string) : json = - let open ShowMessage in - let r = { type_; message; } in - JSON_Object [ - "type", print_messageType r.type_; - "message", JSON_String r.message; - ] - -(************************************************************************) -(** window/showMessage request **) -(************************************************************************) - -let print_showMessageRequest (r: ShowMessageRequest.showMessageRequestParams) : json = - let print_action (action: ShowMessageRequest.messageActionItem) : json = - JSON_Object [ - "title", JSON_String action.ShowMessageRequest.title; - ] - in - Jprint.object_opt [ - "type", Some (print_messageType r.ShowMessageRequest.type_); - "message", Some (JSON_String r.ShowMessageRequest.message); - "actions", Some (JSON_Array (List.map r.ShowMessageRequest.actions ~f:print_action)); - ] - -let parse_result_showMessageRequest (result: json option) : ShowMessageRequest.result = - let open ShowMessageRequest in - let title = Jget.string_opt result "title" in - Option.map title ~f:(fun title -> { title; }) - - -(************************************************************************) -(** window/showStatus request **) -(************************************************************************) - -let print_showStatus (r: ShowStatus.showStatusParams) : json = - let print_action (action: ShowMessageRequest.messageActionItem) : json = - JSON_Object [ - "title", JSON_String action.ShowMessageRequest.title; - ] - in - let rr = r.ShowStatus.request in - Jprint.object_opt [ - "type", Some (print_messageType rr.ShowMessageRequest.type_); - "actions", Some (JSON_Array (List.map rr.ShowMessageRequest.actions ~f:print_action)); - "message", Some (JSON_String rr.ShowMessageRequest.message); - "shortMessage", Option.map r.ShowStatus.shortMessage ~f:string_; - "progress", Option.map r.ShowStatus.progress ~f:(fun progress -> Jprint.object_opt [ - "numerator", Some (int_ progress); - "denominator", Option.map r.ShowStatus.total ~f:int_; - ]); - ] - - -(************************************************************************) -(** window/progress notification **) -(************************************************************************) - -let print_progress (id: int) (label: string option) : json = - let r = { Progress.id; label; } in - JSON_Object [ - "id", r.Progress.id |> int_; - "label", match r.Progress.label with None -> JSON_Null | Some s -> JSON_String s; - ] - - -(************************************************************************) -(** window/actionRequired notification **) -(************************************************************************) - -let print_actionRequired (id: int) (label: string option) : json = - let r = { ActionRequired.id; label; } in - JSON_Object [ - "id", r.ActionRequired.id |> int_; - "label", match r.ActionRequired.label with None -> JSON_Null | Some s -> JSON_String s; - ] - - -(************************************************************************) -(** telemetry/connectionStatus notification **) -(************************************************************************) - -let print_connectionStatus (p: ConnectionStatus.params) : json = - let open ConnectionStatus in - JSON_Object [ - "isConnected", JSON_Bool p.isConnected; - ] - - -(************************************************************************) -(** textDocument/hover request **) -(************************************************************************) - -let parse_hover (params: json option) : Hover.params = - parse_textDocumentPositionParams params - -let print_hover (r: Hover.result) : json = - let open Hover in - match r with - | None -> - JSON_Null - | Some r -> - Jprint.object_opt [ - "contents", Some (JSON_Array - (List.map r.Hover.contents ~f:print_markedItem)); - "range", Option.map r.range ~f:print_range; - ] - - -(************************************************************************) -(** textDocument/definition request **) -(************************************************************************) - -let parse_definition (params: json option) : Definition.params = - parse_textDocumentPositionParams params - -let print_definition (r: Definition.result) : json = - JSON_Array (List.map r ~f:print_definition_location) - - -(************************************************************************) -(** completionItem/resolve request **) -(************************************************************************) - -let parse_completionItem (params: json option) : CompletionItemResolve.params = - let open Completion in - let textEdits = - (Jget.obj_opt params "textEdit") :: (Jget.array_d params "additionalTextEdits" ~default:[]) - |> List.filter_map ~f:parse_textEdit - in - let command = match Jget.obj_opt params "command" with - | None -> None - | c -> Some (parse_command c) - in - { - label = Jget.string_exn params "label"; - kind = Option.bind (Jget.int_opt params "kind") completionItemKind_of_int_opt; - detail = Jget.string_opt params "detail"; - inlineDetail = Jget.string_opt params "inlineDetail"; - itemType = Jget.string_opt params "itemType"; - documentation = Jget.string_opt params "documentation"; - sortText = Jget.string_opt params "sortText"; - filterText = Jget.string_opt params "filterText"; - insertText = Jget.string_opt params "insertText"; - insertTextFormat = Option.bind (Jget.int_opt params "insertTextFormat") insertFormat_of_int_opt; - textEdits; - command; - data = Jget.obj_opt params "data" - } - - -let print_completionItem (item: Completion.completionItem) : json = - let open Completion in - Jprint.object_opt [ - "label", Some (JSON_String item.label); - "kind", Option.map item.kind (fun x -> int_ @@ int_of_completionItemKind x); - "detail", Option.map item.detail string_; - "inlineDetail", Option.map item.inlineDetail string_; - "itemType", Option.map item.itemType string_; - "documentation", Option.map item.documentation string_; - "sortText", Option.map item.sortText string_; - "filterText", Option.map item.filterText string_; - "insertText", Option.map item.insertText string_; - "insertTextFormat", Option.map item.insertTextFormat (fun x -> int_ @@ int_of_insertFormat x); - "textEdit", Option.map (List.hd item.textEdits) print_textEdit; - "additionalTextEdit", (match (List.tl item.textEdits) with - | None | Some [] -> None - | Some l -> Some (JSON_Array (List.map l ~f:print_textEdit))); - "command", Option.map item.command print_command; - "data", item.data; - ] - - -(************************************************************************) -(** textDocument/completion request **) -(************************************************************************) - -let parse_completion (params: json option) : Completion.params = - let open Lsp.Completion in - let context = Jget.obj_opt params "context" in - { - loc = parse_textDocumentPositionParams params; - context = match context with - | Some _ -> - Some { - triggerKind = (match Jget.int_exn context "triggerKind" with - | 1 -> Invoked - | 2 -> TriggerCharacter - | 3 -> TriggerForIncompleteCompletions - | x -> failwith ("Unsupported trigger kind: "^(string_of_int x)) - ); - } - | None -> None - } - -let print_completion (r: Completion.result) : json = - let open Completion in - JSON_Object [ - "isIncomplete", JSON_Bool r.isIncomplete; - "items", JSON_Array (List.map r.items ~f:print_completionItem); - ] - - -(************************************************************************) -(** workspace/symbol request **) -(************************************************************************) - - -let parse_workspaceSymbol (params: json option) : WorkspaceSymbol.params = - let open WorkspaceSymbol in - { - query = Jget.string_exn params "query"; - } - -let print_workspaceSymbol (r: WorkspaceSymbol.result) : json = - JSON_Array (List.map r ~f:print_symbolInformation) - - -(************************************************************************) -(** textDocument/documentSymbol request **) -(************************************************************************) - -let parse_documentSymbol (params: json option) : DocumentSymbol.params = - let open DocumentSymbol in - { - textDocument = Jget.obj_exn params "textDocument" - |> parse_textDocumentIdentifier; - } - -let print_documentSymbol (r: DocumentSymbol.result) : json = - JSON_Array (List.map r ~f:print_symbolInformation) - - -(************************************************************************) -(** textDocument/references request **) -(************************************************************************) - -let parse_findReferences (params: json option) : FindReferences.params = - let context = Jget.obj_opt params "context" in - { FindReferences. - loc = parse_textDocumentPositionParams params; - context = - { FindReferences. - includeDeclaration = Jget.bool_d context "includeDeclaration" true; - includeIndirectReferences = Jget.bool_d context "includeIndirectReferences" false; - } - } - -let print_findReferences (r: Location.t list) : json = - JSON_Array (List.map r ~f:print_location) - - -(************************************************************************) -(** textDocument/documentHighlight request **) -(************************************************************************) - -let parse_documentHighlight (params: json option) - : DocumentHighlight.params = - parse_textDocumentPositionParams params - -let print_documentHighlight (r: DocumentHighlight.result) : json = - let open DocumentHighlight in - let print_highlightKind kind = match kind with - | Text -> int_ 1 - | Read -> int_ 2 - | Write -> int_ 3 - in - let print_highlight highlight = - Jprint.object_opt [ - "range", Some (print_range highlight.range); - "kind", Option.map highlight.kind ~f:print_highlightKind - ] - in - JSON_Array (List.map r ~f:print_highlight) - - -(************************************************************************) -(** textDocument/typeCoverage request **) -(************************************************************************) - -let parse_typeCoverage (params: json option) - : TypeCoverage.params = - { TypeCoverage. - textDocument = Jget.obj_exn params "textDocument" - |> parse_textDocumentIdentifier; - } - -let print_typeCoverage (r: TypeCoverage.result) : json = - let open TypeCoverage in - let print_uncov (uncov: uncoveredRange) : json = - Jprint.object_opt [ - "range", Some (print_range uncov.range); - "message", Option.map uncov.message ~f:string_; - ] - in - JSON_Object [ - "coveredPercent", int_ r.coveredPercent; - "uncoveredRanges", JSON_Array (List.map r.uncoveredRanges ~f:print_uncov); - "defaultMessage", JSON_String r.defaultMessage; - ] - -(************************************************************************) -(** workspace/toggleTypeCoverage request **) -(************************************************************************) -let parse_toggleTypeCoverage (params: json option) - : ToggleTypeCoverage.params = - { ToggleTypeCoverage. - toggle = Jget.bool_d params "toggle" ~default:false - } - -(************************************************************************) -(** textDocument/formatting request **) -(************************************************************************) - -let parse_documentFormatting (params: json option) - : DocumentFormatting.params = - { DocumentFormatting. - textDocument = Jget.obj_exn params "textDocument" - |> parse_textDocumentIdentifier; - options = Jget.obj_opt params "options" |> parse_formattingOptions; - } - -let print_documentFormatting (r: DocumentFormatting.result) - : json = - JSON_Array (List.map r ~f:print_textEdit) - - -(************************************************************************) -(** textDocument/rangeFormatting request **) -(************************************************************************) - -let parse_documentRangeFormatting (params: json option) - : DocumentRangeFormatting.params = - { DocumentRangeFormatting. - textDocument = Jget.obj_exn params "textDocument" - |> parse_textDocumentIdentifier; - range = Jget.obj_exn params "range" |> parse_range_exn; - options = Jget.obj_opt params "options" |> parse_formattingOptions; - } - -let print_documentRangeFormatting (r: DocumentRangeFormatting.result) - : json = - JSON_Array (List.map r ~f:print_textEdit) - - -(************************************************************************) -(** textDocument/onTypeFormatting request **) -(************************************************************************) - -let parse_documentOnTypeFormatting (params: json option) - : DocumentOnTypeFormatting.params = - { DocumentOnTypeFormatting. - textDocument = Jget.obj_exn params "textDocument" - |> parse_textDocumentIdentifier; - position = Jget.obj_exn params "position" |> parse_position; - ch = Jget.string_exn params "ch"; - options = Jget.obj_opt params "options" |> parse_formattingOptions; - } - -let print_documentOnTypeFormatting (r: DocumentOnTypeFormatting.result) - : json = - JSON_Array (List.map r ~f:print_textEdit) - - -(************************************************************************) -(** initialize request **) -(************************************************************************) - -let parse_initialize (params: json option) : Initialize.params = - let open Initialize in - let rec parse_initialize json = - { - processId = Jget.int_opt json "processId"; - rootPath = Jget.string_opt json "rootPath"; - rootUri = Jget.string_opt json "rootUri"; - initializationOptions = Jget.obj_opt json "initializationOptions" - |> parse_initializationOptions; - client_capabilities = Jget.obj_opt json "capabilities" - |> parse_capabilities; - trace = Jget.string_opt json "trace" |> parse_trace; - } - and parse_trace (s : string option) : trace = match s with - | Some "messages" -> Messages - | Some "verbose" -> Verbose - | _ -> Off - and parse_initializationOptions json = - { - useTextEditAutocomplete = Jget.bool_d json "useTextEditAutocomplete" ~default:false; - liveSyntaxErrors = Jget.bool_d json "liveSyntaxErrors" ~default:true; - } - and parse_capabilities json = - { - workspace = Jget.obj_opt json "workspace" |> parse_workspace; - textDocument = Jget.obj_opt json "textDocument" |> parse_textDocument; - window = Jget.obj_opt json "window" |> parse_window; - telemetry = Jget.obj_opt json "telemetry" |> parse_telemetry; - } - and parse_workspace json = - { - applyEdit = Jget.bool_d json "applyEdit" ~default:false; - workspaceEdit = Jget.obj_opt json "workspaceEdit" - |> parse_workspaceEdit; - } - and parse_workspaceEdit json = - { - documentChanges = Jget.bool_d json "documentChanges" ~default:false; - } - and parse_textDocument json = - { - synchronization = - Jget.obj_opt json "synchronization" |> parse_synchronization; - completion = Jget.obj_opt json "completion" |> parse_completion; - } - and parse_synchronization json = - { - can_willSave = Jget.bool_d json "willSave" ~default:false; - can_willSaveWaitUntil = - Jget.bool_d json "willSaveWaitUntil" ~default:false; - can_didSave = Jget.bool_d json "didSave" ~default:false; - } - and parse_completion json = - { completionItem = - Jget.obj_opt json "completionItem" |> parse_completionItem; - } - and parse_completionItem json = - { snippetSupport = Jget.bool_d json "snippetSupport" ~default:false; - } - and parse_window json = - { - status = Jget.obj_opt json "status" |> Option.is_some; - progress = Jget.obj_opt json "progress" |> Option.is_some; - actionRequired = Jget.obj_opt json "actionRequired" |> Option.is_some; - } - and parse_telemetry json = - { - connectionStatus = Jget.obj_opt json "connectionStatus" |> Option.is_some; - } - in - parse_initialize params - -let print_initializeError (r: Initialize.errorData) : json = - let open Initialize in - JSON_Object [ - "retry", JSON_Bool r.retry; - ] - -let print_initialize (r: Initialize.result) : json = - let open Initialize in - let print_textDocumentSyncKind = function - | NoSync -> int_ 0 - | FullSync -> int_ 1 - | IncrementalSync -> int_ 2 in - let cap = r.server_capabilities in - let sync = cap.textDocumentSync - in - JSON_Object [ - "capabilities", Jprint.object_opt [ - "textDocumentSync", Some (Jprint.object_opt [ - "openClose", Some (JSON_Bool sync.want_openClose); - "change", Some (print_textDocumentSyncKind sync.want_change); - "willSave", Some (JSON_Bool sync.want_willSave); - "willSaveWaitUntil", Some (JSON_Bool sync.want_willSaveWaitUntil); - "save", Option.map sync.want_didSave ~f:(fun save -> JSON_Object [ - "includeText", JSON_Bool save.includeText; - ]); - ]); - "hoverProvider", Some (JSON_Bool cap.hoverProvider); - "completionProvider", Option.map cap.completionProvider ~f:(fun comp -> JSON_Object [ - "resolveProvider", JSON_Bool comp.resolveProvider; - "triggerCharacters", Jprint.string_array comp.completion_triggerCharacters; - ]); - "signatureHelpProvider", Option.map cap.signatureHelpProvider ~f:(fun shp -> JSON_Object [ - "triggerCharacters", Jprint.string_array shp.sighelp_triggerCharacters; - ]); - "definitionProvider", Some (JSON_Bool cap.definitionProvider); - "referencesProvider", Some (JSON_Bool cap.referencesProvider); - "documentHighlightProvider", Some (JSON_Bool cap.documentHighlightProvider); - "documentSymbolProvider", Some (JSON_Bool cap.documentSymbolProvider); - "workspaceSymbolProvider", Some (JSON_Bool cap.workspaceSymbolProvider); - "codeActionProvider", Some (JSON_Bool cap.codeActionProvider); - "codeLensProvider", Option.map cap.codeLensProvider ~f:(fun codelens -> JSON_Object [ - "resolveProvider", JSON_Bool codelens.codelens_resolveProvider; - ]); - "documentFormattingProvider", Some (JSON_Bool cap.documentFormattingProvider); - "documentRangeFormattingProvider", Some (JSON_Bool cap.documentRangeFormattingProvider); - "documentOnTypeFormattingProvider", Option.map - cap.documentOnTypeFormattingProvider ~f:(fun o -> JSON_Object [ - "firstTriggerCharacter", JSON_String o.firstTriggerCharacter; - "moreTriggerCharacter", Jprint.string_array o.moreTriggerCharacter; - ]); - "renameProvider", Some (JSON_Bool cap.renameProvider); - "documentLinkProvider", Option.map cap.documentLinkProvider ~f:(fun dlp -> JSON_Object [ - "resolveProvider", JSON_Bool dlp.doclink_resolveProvider; - ]); - "executeCommandProvider", Option.map cap.executeCommandProvider ~f:(fun p -> JSON_Object [ - "commands", Jprint.string_array p.commands; - ]); - "typeCoverageProvider", Some (JSON_Bool cap.typeCoverageProvider); - "rageProvider", Some (JSON_Bool cap.rageProvider); - ]; - ] - - -(************************************************************************) -(** error response **) -(************************************************************************) - -let error_of_exn (e: exn) : Lsp.Error.t = - let open Lsp.Error in - match e with - | Error.Parse message -> {code= -32700; message; data=None;} - | Error.InvalidRequest message -> {code= -32600; message; data=None;} - | Error.MethodNotFound message -> {code= -32601; message; data=None;} - | Error.InvalidParams message -> {code= -32602; message; data=None;} - | Error.InternalError message -> {code= -32603; message; data=None;} - | Error.ServerErrorStart (message, data) -> - {code= -32099; message; data=Some (print_initializeError data);} - | Error.ServerErrorEnd message -> {code= -32000; message; data=None;} - | Error.ServerNotInitialized message -> {code= -32002; message; data=None;} - | Error.Unknown message -> {code= -32001; message; data=None;} - | Error.RequestCancelled message -> {code= -32800; message; data=None;} - | Exit_status.Exit_with code -> {code= -32001; message=Exit_status.to_string code; data=None;} - | _ -> {code= -32001; message=Printexc.to_string e; data=None;} - -let print_error (e: Error.t) (stack: string) : json = - let open Hh_json in - let open Error in - let stack_json_property = ("stack", string_ stack) in - (* We'd like to add a stack-trace. The only place we can fit it, that will *) - (* be respected by vscode-jsonrpc, is inside the 'data' field. And we can *) - (* do that only if data is an object. We can synthesize one if needed. *) - let data = match e.data with - | None -> JSON_Object [stack_json_property] - | Some (JSON_Object o) -> JSON_Object (stack_json_property :: o) - | Some primitive -> primitive - in - JSON_Object [ - "code", int_ e.code; - "message", string_ e.message; - "data", data; - ] - -let parse_error (error: json) : Error.t = - let json = Some error in - let code = Jget.int_exn json "code" in - let message = Jget.string_exn json "message" in - let data = Jget.val_opt json "data" - in - {Error.code; message; data} - - -(************************************************************************) -(** universal parser+printer **) -(************************************************************************) - -let request_name_to_string (request: lsp_request) : string = - match request with - | ShowMessageRequestRequest _ -> "window/showMessageRequest" - | ShowStatusRequest _ -> "window/showStatus" - | InitializeRequest _ -> "initialize" - | ShutdownRequest -> "shutdown" - | HoverRequest _ -> "textDocument/hover" - | CompletionRequest _ -> "textDocument/completion" - | CompletionItemResolveRequest _ -> "completionItem/resolve" - | DefinitionRequest _ -> "textDocument/definition" - | WorkspaceSymbolRequest _ -> "workspace/symbol" - | DocumentSymbolRequest _ -> "textDocument/documentSymbol" - | FindReferencesRequest _ -> "textDocument/references" - | DocumentHighlightRequest _ -> "textDocument/documentHighlight" - | TypeCoverageRequest _ -> "textDocument/typeCoverage" - | DocumentFormattingRequest _ -> "textDocument/formatting" - | DocumentRangeFormattingRequest _ -> "textDocument/rangeFormatting" - | DocumentOnTypeFormattingRequest _ -> "textDocument/onTypeFormatting" - | RageRequest -> "telemetry/rage" - | RenameRequest _ -> "textDocument/rename" - | UnknownRequest (method_, _params) -> method_ - -let result_name_to_string (result: lsp_result) : string = - match result with - | ShowMessageRequestResult _ -> "window/showMessageRequest" - | ShowStatusResult _ -> "window/showStatus" - | InitializeResult _ -> "initialize" - | ShutdownResult -> "shutdown" - | HoverResult _ -> "textDocument/hover" - | CompletionResult _ -> "textDocument/completion" - | CompletionItemResolveResult _ -> "completionItem/resolve" - | DefinitionResult _ -> "textDocument/definition" - | WorkspaceSymbolResult _ -> "workspace/symbol" - | DocumentSymbolResult _ -> "textDocument/documentSymbol" - | FindReferencesResult _ -> "textDocument/references" - | DocumentHighlightResult _ -> "textDocument/documentHighlight" - | TypeCoverageResult _ -> "textDocument/typeCoverage" - | DocumentFormattingResult _ -> "textDocument/formatting" - | DocumentRangeFormattingResult _ -> "textDocument/rangeFormatting" - | DocumentOnTypeFormattingResult _ -> "textDocument/onTypeFormatting" - | RageResult _ -> "telemetry/rage" - | RenameResult _ -> "textDocument/rename" - | ErrorResult (e, _stack) -> "ERROR/" ^ (e.Error.message) - -let notification_name_to_string (notification: lsp_notification) : string = - match notification with - | ExitNotification -> "exit" - | CancelRequestNotification _ -> "$/cancelRequest" - | PublishDiagnosticsNotification _ -> "textDocument/publishDiagnostics" - | DidOpenNotification _ -> "textDocument/didOpen" - | DidCloseNotification _ -> "textDocument/didClose" - | DidSaveNotification _ -> "textDocument/didSave" - | DidChangeNotification _ -> "textDocument/didChange" - | TelemetryNotification _ -> "telemetry/event" - | LogMessageNotification _ -> "window/logMessage" - | ShowMessageNotification _ -> "window/showMessage" - | ProgressNotification _ -> "window/progress" - | ActionRequiredNotification _ -> "window/actionRequired" - | ConnectionStatusNotification _ -> "telemetry/connectionStatus" - | UnknownNotification (method_, _params) -> method_ - -let message_name_to_string (message: lsp_message) : string = - match message with - | RequestMessage (_, r) -> request_name_to_string r - | NotificationMessage n -> notification_name_to_string n - | ResponseMessage (_, r) -> result_name_to_string r - -let denorm_message_to_string (message: lsp_message) : string = - match message with - | RequestMessage (id, r) -> - Printf.sprintf "request %s %s" (id_to_string id) (request_name_to_string r) - | NotificationMessage n -> - Printf.sprintf "notification %s" (notification_name_to_string n) - | ResponseMessage (id, ErrorResult (e, _stack)) -> - Printf.sprintf "error %s %s" (id_to_string id) (e.Error.message) - | ResponseMessage (id, r) -> - Printf.sprintf "result %s %s" (id_to_string id) (result_name_to_string r) - -let parse_lsp_request (method_: string) (params: json option) : lsp_request = - match method_ with - | "initialize" -> InitializeRequest (parse_initialize params) - | "shutdown" -> ShutdownRequest - | "textDocument/hover" -> HoverRequest (parse_hover params) - | "textDocument/completion" -> CompletionRequest (parse_completion params) - | "textDocument/definition" -> DefinitionRequest (parse_definition params) - | "workspace/symbol" -> WorkspaceSymbolRequest (parse_workspaceSymbol params) - | "textDocument/documentSymbol" -> DocumentSymbolRequest (parse_documentSymbol params) - | "textDocument/references" -> FindReferencesRequest (parse_findReferences params) - | "textDocument/rename" -> RenameRequest (parse_documentRename params) - | "textDocument/documentHighlight" -> DocumentHighlightRequest (parse_documentHighlight params) - | "textDocument/typeCoverage" -> TypeCoverageRequest (parse_typeCoverage params) - | "textDocument/formatting" -> DocumentFormattingRequest (parse_documentFormatting params) - | "textDocument/rangeFormatting" -> - DocumentRangeFormattingRequest (parse_documentRangeFormatting params) - | "textDocument/onTypeFormatting" -> - DocumentOnTypeFormattingRequest (parse_documentOnTypeFormatting params) - | "telemetry/rage" -> RageRequest - | "completionItem/resolve" - | "window/showMessageRequest" - | "window/showStatus" - | _ -> UnknownRequest (method_, params) - -let parse_lsp_notification (method_: string) (params: json option) : lsp_notification = - match method_ with - | "$/cancelRequest" -> CancelRequestNotification (parse_cancelRequest params) - | "exit" -> ExitNotification - | "textDocument/didOpen" -> DidOpenNotification (parse_didOpen params) - | "textDocument/didClose" -> DidCloseNotification (parse_didClose params) - | "textDocument/didSave" -> DidSaveNotification (parse_didSave params) - | "textDocument/didChange" -> DidChangeNotification (parse_didChange params) - | "textDocument/publishDiagnostics" - | "window/logMessage" - | "window/showMessage" - | "window/progress" - | "window/actionRequired" - | "telemetry/connectionStatus" - | _ -> UnknownNotification (method_, params) - -let parse_lsp_result (request: lsp_request) (result: json) : lsp_result = - let method_ = request_name_to_string request in - match request with - | ShowMessageRequestRequest _ -> - ShowMessageRequestResult (parse_result_showMessageRequest (Some result)) - | ShowStatusRequest _ -> - ShowStatusResult (parse_result_showMessageRequest (Some result)) (* shares result type *) - | InitializeRequest _ - | ShutdownRequest - | HoverRequest _ - | CompletionRequest _ - | CompletionItemResolveRequest _ - | DefinitionRequest _ - | WorkspaceSymbolRequest _ - | DocumentSymbolRequest _ - | FindReferencesRequest _ - | DocumentHighlightRequest _ - | TypeCoverageRequest _ - | DocumentFormattingRequest _ - | DocumentRangeFormattingRequest _ - | DocumentOnTypeFormattingRequest _ - | RageRequest - | RenameRequest _ - | UnknownRequest _ -> - raise (Error.Parse ("Don't know how to parse LSP response " ^ method_)) - -(* parse_lsp: non-jsonrpc inputs - will raise an exception *) -(* requests and notifications - will raise an exception if they're malformed, *) -(* otherwise return Some *) -(* responses - will raise an exception if they're malformed, will return None *) -(* if they're absent from the "outstanding" map, otherwise return Some. *) -let parse_lsp (json: json) (outstanding: lsp_id -> lsp_request) : lsp_message = - let json = Some json in - let id = Jget.val_opt json "id" |> parse_id_opt in - let method_opt = Jget.string_opt json "method" in - let params = Jget.val_opt json "params" in - let result = Jget.val_opt json "result" in - let error = Jget.val_opt json "error" in - match id, method_opt, result, error with - | None, Some method_, _, _ -> NotificationMessage (parse_lsp_notification method_ params) - | Some id, Some method_, _, _ -> RequestMessage (id, parse_lsp_request method_ params) - | Some id, _, Some result, _ -> - let request = outstanding id in ResponseMessage (id, parse_lsp_result request result) - | Some id, _, _, Some error -> ResponseMessage (id, ErrorResult (parse_error error, "")) - | _, _, _, _ -> raise (Error.Parse "Not JsonRPC") - - -let print_lsp_request (id: lsp_id) (request: lsp_request) : json = - let method_ = request_name_to_string request in - let params = match request with - | ShowMessageRequestRequest r -> print_showMessageRequest r - | ShowStatusRequest r -> print_showStatus r - | InitializeRequest _ - | ShutdownRequest - | HoverRequest _ - | CompletionRequest _ - | CompletionItemResolveRequest _ - | DefinitionRequest _ - | WorkspaceSymbolRequest _ - | DocumentSymbolRequest _ - | FindReferencesRequest _ - | DocumentHighlightRequest _ - | TypeCoverageRequest _ - | DocumentFormattingRequest _ - | DocumentRangeFormattingRequest _ - | DocumentOnTypeFormattingRequest _ - | RageRequest - | RenameRequest _ - | UnknownRequest _ -> - failwith ("Don't know how to print request " ^ method_) - in - JSON_Object [ - "jsonrpc", JSON_String "2.0"; - "id", print_id id; - "method", JSON_String method_; - "params", params; - ] - -let print_lsp_response (id: lsp_id) (result: lsp_result) : json = - let method_ = result_name_to_string result in - let json = match result with - | InitializeResult r -> print_initialize r - | ShutdownResult -> print_shutdown () - | HoverResult r -> print_hover r - | CompletionResult r -> print_completion r - | DefinitionResult r -> print_definition r - | WorkspaceSymbolResult r -> print_workspaceSymbol r - | DocumentSymbolResult r -> print_documentSymbol r - | FindReferencesResult r -> print_findReferences r - | DocumentHighlightResult r -> print_documentHighlight r - | TypeCoverageResult r -> print_typeCoverage r - | DocumentFormattingResult r -> print_documentFormatting r - | DocumentRangeFormattingResult r -> print_documentRangeFormatting r - | DocumentOnTypeFormattingResult r -> print_documentOnTypeFormatting r - | RageResult r -> print_rage r - | RenameResult r -> print_documentRename r - | ShowMessageRequestResult _ - | ShowStatusResult _ - | CompletionItemResolveResult _ -> - failwith ("Don't know how to print result " ^ method_) - | ErrorResult (e, stack) -> print_error e stack - in - match result with - | ErrorResult _ -> - JSON_Object [ - "jsonrpc", JSON_String "2.0"; - "id", print_id id; - "error", json; - ] - | _ -> - JSON_Object [ - "jsonrpc", JSON_String "2.0"; - "id", print_id id; - "result", json; - ] - -let print_lsp_notification (notification: lsp_notification) : json = - let method_ = notification_name_to_string notification in - let params = match notification with - | CancelRequestNotification r -> print_cancelRequest r - | PublishDiagnosticsNotification r -> print_diagnostics r - | TelemetryNotification r -> print_logMessage r.LogMessage.type_ r.LogMessage.message - | LogMessageNotification r -> print_logMessage r.LogMessage.type_ r.LogMessage.message - | ShowMessageNotification r -> print_showMessage r.ShowMessage.type_ r.ShowMessage.message - | ProgressNotification r -> print_progress r.Progress.id r.Progress.label - | ActionRequiredNotification r -> - print_actionRequired r.ActionRequired.id r.ActionRequired.label - | ConnectionStatusNotification r -> print_connectionStatus r - | ExitNotification - | DidOpenNotification _ - | DidCloseNotification _ - | DidSaveNotification _ - | DidChangeNotification _ - | UnknownNotification _ -> - failwith ("Don't know how to print notification " ^ method_) - in - JSON_Object [ - "jsonrpc", JSON_String "2.0"; - "method", JSON_String method_; - "params", params; - ] - -let print_lsp (message: lsp_message) : json = - match message with - | RequestMessage (id, request) -> print_lsp_request id request - | ResponseMessage (id, result) -> print_lsp_response id result - | NotificationMessage notification -> print_lsp_notification notification diff --git a/hack/utils/lsp_helpers.ml b/hack/utils/lsp_helpers.ml deleted file mode 100644 index f3cb9a1ec63..00000000000 --- a/hack/utils/lsp_helpers.ml +++ /dev/null @@ -1,367 +0,0 @@ -(* A few helpful wrappers around LSP *) - -open Lsp -open Lsp_fmt - -let progress_and_actionRequired_counter = ref 0 - -(************************************************************************) -(** Conversions **) -(************************************************************************) - -let url_scheme_regex = Str.regexp "^\\([a-zA-Z][a-zA-Z0-9+.-]+\\):" -(* this requires schemes with 2+ characters, so "c:\path" isn't considered a scheme *) - -let lsp_uri_to_path (uri: string) : string = - if Str.string_match url_scheme_regex uri 0 then - let scheme = Str.matched_group 1 uri in - if scheme = "file" then - File_url.parse uri - else - raise (Error.InvalidParams (Printf.sprintf "Not a valid file url '%s'" uri)) - else - uri - -let path_to_lsp_uri (path: string) ~(default_path: string): string = - if path = "" then File_url.create default_path - else File_url.create path - -let lsp_textDocumentIdentifier_to_filename - (identifier: Lsp.TextDocumentIdentifier.t) - : string = - let open Lsp.TextDocumentIdentifier in - lsp_uri_to_path identifier.uri - -let lsp_position_to_fc (pos: Lsp.position) : File_content.position = - { File_content. - line = pos.Lsp.line + 1; (* LSP is 0-based; File_content is 1-based. *) - column = pos.Lsp.character + 1; - } - -let lsp_range_to_fc (range: Lsp.range) : File_content.range = - { File_content. - st = lsp_position_to_fc range.Lsp.start; - ed = lsp_position_to_fc range.Lsp.end_; - } - -let lsp_edit_to_fc (edit: Lsp.DidChange.textDocumentContentChangeEvent) : File_content.text_edit = - { File_content. - range = Option.map edit.DidChange.range ~f:lsp_range_to_fc; - text = edit.DidChange.text; - } - -let apply_changes (text: string) (contentChanges: DidChange.textDocumentContentChangeEvent list) - : (string, string * Utils.callstack) result = - let edits = List.map lsp_edit_to_fc contentChanges - in - File_content.edit_file text edits - -let get_char_from_lsp_position (content: string) (position: Lsp.position) - : char = - let fc_position = lsp_position_to_fc position in - let open File_content in - get_char content (get_offset content fc_position) - -let apply_changes_unsafe text (contentChanges: DidChange.textDocumentContentChangeEvent list) - : string = - match apply_changes text contentChanges with - | Ok r -> r - | Error (e, _stack) -> failwith e - - -(************************************************************************) -(** Range calculations **) -(************************************************************************) - -(* We need to do intersection and other calculations on ranges. - * The functions in the following module all assume LSP 0-based ranges, - * and assume without testing that a range's start is equal to or before - * its end. *) -let pos_compare (p1: position) (p2: position) : int = - if p1.line < p2.line then -1 - else if p1.line > p2.line then 1 - else p1.character - p2.character - -(* Given a "selection" range A..B and a "squiggle" range a..b, how do they overlap? - * There are 12 ways to order the four letters ABab, of which six - * satisfy both A<=B and a<=b. Here they are. *) -type range_overlap = - | Selection_before_start_of_squiggle (* ABab *) - | Selection_overlaps_start_of_squiggle (* AaBb *) - | Selection_covers_whole_squiggle (* AabB *) - | Selection_in_middle_of_squiggle (* aABb *) - | Selection_overlaps_end_of_squiggle (* aAbB *) - | Selection_after_end_of_squiggle (* abAB *) - -(* Computes how two ranges "selection" and "squiggle" overlap *) -let get_range_overlap (selection: range) (squiggle: range) : range_overlap = - let selStart_leq_squiggleStart = pos_compare selection.start squiggle.start <= 0 in - let selStart_leq_squiggleEnd = pos_compare selection.start squiggle.end_ <= 0 in - let selEnd_lt_squiggleStart = pos_compare selection.end_ squiggle.start < 0 in - let selEnd_lt_squiggleEnd = pos_compare selection.end_ squiggle.end_ < 0 in - (* Q. Why does it test "<=" for the first two and "<" for the last two? *) - (* Intuitively you can trust that it has something to do with how ranges are *) - (* inclusive at their start and exclusive at their end. But the real reason *) - (* is just that I did an exhaustive case analysis to look at all cases where *) - (* A,B,a,b might be equal, and decided which outcome I wanted for each of them *) - (* because of how I'm going to treat them in other functions, and retrofitted *) - (* those answers into this function. For instance, if squiggleStart==selEnd, *) - (* I'll want to handle it in the same way as squiggleStart Selection_before_start_of_squiggle - | true, true, false, true -> Selection_overlaps_start_of_squiggle - | true, true, false, false -> Selection_covers_whole_squiggle - | false, true, false, true -> Selection_in_middle_of_squiggle - | false, true, false, false -> Selection_overlaps_end_of_squiggle - | false, false, false, false -> Selection_after_end_of_squiggle - | true, false, _, _ -> failwith "sel.start proves squiggle.start > squiggle.end_" - | _, _, true, false -> failwith "sel.end proves squiggle.start > squiggle.end_" - | false, _, true, _ -> failwith "squiggle.start proves sel.start > sel.end_" - | _, false, _, true -> failwith "squiggle.end_ proves sel.start > sel.end_" - -(* this structure models a change where a certain range is replaced with - * a block of text. For instance, if you merely insert a single character, - * then remove_range.start==remove_range.end_ and insert_lines=0 - * and insert_chars_on_final_line=1. *) -type range_replace = { - remove_range: range; - insert_lines: int; - insert_chars_on_final_line: int; -} - -(* If you have a position "p", and some range before this point is replaced with - * text of a certain number of lines, the last line having a certain number of characters, - * then how will the position be shifted? - * Note: this function assumes but doesn't verify that the range ends on or before - * the position. *) -let update_pos_due_to_prior_replace (p: position) (replace: range_replace) : position = - if replace.remove_range.end_.line < p.line then - (* The replaced range doesn't touch the position, so position merely gets shifted up/down *) - let line = p.line - (replace.remove_range.end_.line - replace.remove_range.start.line) - + replace.insert_lines in - { p with line; } - else if replace.insert_lines > 0 then - (* The position is on the final line and multiple lines were inserted *) - let line = p.line - (replace.remove_range.end_.line - replace.remove_range.start.line) - + replace.insert_lines in - let character = replace.insert_chars_on_final_line + - (p.character - replace.remove_range.end_.character) in - { line; character; } - else - (* The position is on the line where a few characters were inserted *) - let line = p.line - (replace.remove_range.end_.line - replace.remove_range.start.line) in - let character = replace.remove_range.start.character + replace.insert_chars_on_final_line + - (p.character - replace.remove_range.end_.character) in - { line; character; } - -(* If you have a squiggle, and some range in the document is replaced with a block - * some lines long and with insert_chars on the final line, then what's the new - * range of the squiggle? *) -let update_range_due_to_replace (squiggle: range) (replace: range_replace) : range option = - match get_range_overlap replace.remove_range squiggle with - | Selection_before_start_of_squiggle -> - let start = update_pos_due_to_prior_replace squiggle.start replace in - let end_ = update_pos_due_to_prior_replace squiggle.end_ replace in - Some { start; end_; } - | Selection_overlaps_start_of_squiggle -> - let line = replace.remove_range.start.line + replace.insert_lines in - let character = if replace.insert_lines = 0 then - replace.remove_range.start.character + replace.insert_chars_on_final_line - else - replace.insert_chars_on_final_line in - let start = { line; character; } in - let end_ = update_pos_due_to_prior_replace squiggle.end_ replace in - Some { start; end_; } - | Selection_covers_whole_squiggle -> - None - | Selection_in_middle_of_squiggle -> - let start = squiggle.start in - let end_ = update_pos_due_to_prior_replace squiggle.end_ replace in - Some { start; end_; } - | Selection_overlaps_end_of_squiggle -> - let start = squiggle.start in - let end_ = replace.remove_range.start in - Some { start; end_; } - | Selection_after_end_of_squiggle -> - Some squiggle - -(* Moves all diagnostics in response to an LSP change. - * The change might insert text before a diagnostic squiggle (so the squiggle - * has to be moved down or to the right); it might delete text before the squiggle; - * it might modify text inside the squiggle; it might replace text that overlaps - * the squiggle in which case the squiggle gets truncated/moved; it might replace - * the squiggle in its entirety in which case the squiggle gets removed. - * Note that an LSP change is actually a set of changes, applied in order. *) -let update_diagnostics_due_to_change - (diagnostics: PublishDiagnostics.diagnostic list) - (change: Lsp.DidChange.params) - : PublishDiagnostics.diagnostic list = - let open PublishDiagnostics in - let replace_of_change change = - match change.DidChange.range with - | None -> None - | Some remove_range -> - let offset = String.length change.DidChange.text in - let pos = File_content.offset_to_position change.DidChange.text offset in (* 1-based *) - let insert_lines = pos.File_content.line - 1 in - let insert_chars_on_final_line = pos.File_content.column - 1 in - Some {remove_range; insert_lines; insert_chars_on_final_line; } in - let apply_replace diagnostic_opt replace_opt = - match diagnostic_opt, replace_opt with - | Some diagnostic, Some replace -> - let range = update_range_due_to_replace diagnostic.range replace in - Option.map range ~f:(fun range -> { diagnostic with range; }) - | _ -> None in - let replaces = Core_list.map change.DidChange.contentChanges ~f:replace_of_change in - let apply_all_replaces diagnostic = - Core_list.fold replaces ~init:(Some diagnostic) ~f:apply_replace - in - Core_list.filter_map diagnostics ~f:apply_all_replaces - - -(************************************************************************) -(** Accessors **) -(************************************************************************) - -let get_root (p: Lsp.Initialize.params) : string = - let open Lsp.Initialize in - match p.rootUri, p.rootPath with - | Some uri, _ -> lsp_uri_to_path uri - | None, Some path -> path - | None, None -> failwith "Initialize params missing root" - -let supports_progress (p: Lsp.Initialize.params) : bool = - let open Lsp.Initialize in - p.client_capabilities.window.progress - -let supports_actionRequired (p: Lsp.Initialize.params) : bool = - let open Lsp.Initialize in - p.client_capabilities.window.actionRequired - -let supports_status (p: Lsp.Initialize.params) : bool = - let open Lsp.Initialize in - p.client_capabilities.window.status - -let supports_snippets (p: Lsp.Initialize.params) : bool = - let open Lsp.Initialize in - p.client_capabilities.textDocument.completion.completionItem.snippetSupport - -let supports_connectionStatus (p: Lsp.Initialize.params) : bool = - let open Lsp.Initialize in - p.client_capabilities.telemetry.connectionStatus - -(************************************************************************) -(** Wrappers for some LSP methods **) -(************************************************************************) - -let telemetry (writer: Jsonrpc.writer) (level: MessageType.t) (message: string) : unit = - print_logMessage level message |> Jsonrpc.notify writer "telemetry/event" - -let telemetry_error (writer: Jsonrpc.writer) = telemetry writer MessageType.ErrorMessage -let telemetry_log (writer: Jsonrpc.writer) = telemetry writer MessageType.LogMessage - -let log (writer: Jsonrpc.writer) (level: MessageType.t) (message: string) : unit = - print_logMessage level message |> Jsonrpc.notify writer "window/logMessage" - -let log_error (writer: Jsonrpc.writer) = log writer MessageType.ErrorMessage -let log_warning (writer: Jsonrpc.writer) = log writer MessageType.WarningMessage -let log_info (writer: Jsonrpc.writer) = log writer MessageType.InfoMessage - -let dismiss_diagnostics (writer: Jsonrpc.writer) (diagnostic_uris: SSet.t) : SSet.t = - let dismiss_one (uri: string) : unit = - let message = { Lsp.PublishDiagnostics.uri; diagnostics = []; } in - message |> print_diagnostics |> Jsonrpc.notify writer "textDocument/publishDiagnostics" - in - SSet.iter dismiss_one diagnostic_uris; - SSet.empty - -let notify_connectionStatus - (p: Lsp.Initialize.params) - (writer: Jsonrpc.writer) - (wasConnected: bool) - (isConnected: bool) - : bool = - if supports_connectionStatus p && wasConnected <> isConnected then begin - let message = { Lsp.ConnectionStatus.isConnected; } in - message |> print_connectionStatus |> Jsonrpc.notify writer "telemetry/connectionStatus" - end; - isConnected - -(* notify_progress: for sending/updating/closing progress messages. *) -(* To start a new indicator: id=None, message=Some, and get back the new id *) -(* To update an existing one: id=Some, message=Some, and get back same id *) -(* To close an existing one: id=Some, message=None, and get back None *) -(* No-op, for convenience: id=None, message=None, and you get back None *) -(* messages. To start a new progress notifier, put id=None and message=Some *) - -let notify_progress_raw - (state: 'a) - (p: Lsp.Initialize.params) - (writer: 'a -> Progress.params -> 'a) - (id: Progress.t) - (label: string option) - : 'a * Progress.t = - match id, label with - | Progress.Absent, Some label -> - if supports_progress p then - let () = incr progress_and_actionRequired_counter in - let id = !progress_and_actionRequired_counter in - let msg = { Progress.id; label = Some label; } in - let state = writer state msg in - (state, Progress.Present { id; label; }) - else - (state, Progress.Absent) - | Progress.Present { id; label; }, Some new_label when label = new_label -> - (state, Progress.Present { id; label; }) - | Progress.Present { id; _ }, Some label -> - let msg = { Progress.id; label = Some label; } in - let state = writer state msg in - (state, Progress.Present { id; label; }) - | Progress.Present { id; _ }, None -> - let msg = { Progress.id; label = None; } in - let state = writer state msg in - (state, Progress.Absent) - | Progress.Absent, None -> - (state, Progress.Absent) - -let notify_progress - (p: Lsp.Initialize.params) - (writer: Jsonrpc.writer) - (id: Progress.t) - (label: string option) - : Progress.t = - let writer_wrapper () params = - let json = print_progress params.Progress.id params.Progress.label in - Jsonrpc.notify writer "window/progress" json in - let ((), id) = notify_progress_raw () p writer_wrapper id label - in - id - -let notify_actionRequired - (p: Lsp.Initialize.params) - (writer: Jsonrpc.writer) - (id: ActionRequired.t) - (label: string option) - : ActionRequired.t = - match id, label with - | ActionRequired.Absent, Some label -> - if supports_actionRequired p then - let () = incr progress_and_actionRequired_counter in - let id = !progress_and_actionRequired_counter in - let () = print_actionRequired id (Some label) - |> Jsonrpc.notify writer "window/actionRequired" in - ActionRequired.Present { id; label; } - else - ActionRequired.Absent - | ActionRequired.Present { id; label; }, Some new_label when label = new_label -> - ActionRequired.Present { id; label; } - | ActionRequired.Present { id; _ }, Some label -> - print_actionRequired id (Some label) |> Jsonrpc.notify writer "window/actionRequired"; - ActionRequired.Present { id; label; } - | ActionRequired.Present { id; _ }, None -> - print_actionRequired id None |> Jsonrpc.notify writer "window/actionRequired"; - ActionRequired.Absent - | ActionRequired.Absent, None -> - ActionRequired.Absent diff --git a/hack/utils/lwt_message_queue.ml b/hack/utils/lwt_message_queue.ml new file mode 100644 index 00000000000..77cc80ec7ae --- /dev/null +++ b/hack/utils/lwt_message_queue.ml @@ -0,0 +1,54 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +type 'a t = { + mutable messages: 'a ImmQueue.t; + mutable is_open: bool; + cv: unit Lwt_condition.t; + (** Broadcasted whenever any of the above state changes. *) +} + +let create () : 'a t = + { messages = ImmQueue.empty; is_open = true; cv = Lwt_condition.create () } + +let set_messages (queue : 'a t) (messages : 'a ImmQueue.t) : unit = + queue.messages <- messages; + Lwt_condition.broadcast queue.cv () + +let set_is_open (queue : 'a t) (is_open : bool) : unit = + queue.is_open <- is_open; + Lwt_condition.broadcast queue.cv () + +let push (queue : 'a t) (message : 'a) : bool = + if queue.is_open then ( + set_messages queue (ImmQueue.push queue.messages message); + true + ) else + false + +let rec pop (queue : 'a t) : 'a option Lwt.t = + match (queue.is_open, ImmQueue.pop queue.messages) with + | (false, _) -> Lwt.return None + | (true, (None, _)) -> + let%lwt () = Lwt_condition.wait queue.cv in + pop queue + | (true, (Some hd, tl)) -> + set_messages queue tl; + Lwt.return (Some hd) + +let close (queue : 'a t) : unit = + set_messages queue ImmQueue.empty; + set_is_open queue false + +let is_empty (queue : 'a t) : bool = ImmQueue.is_empty queue.messages + +let length (queue : 'a t) : int = ImmQueue.length queue.messages + +let exists (queue : 'a t) ~(f : 'a -> bool) : bool = + ImmQueue.exists ~f queue.messages diff --git a/hack/utils/lwt_message_queue.mli b/hack/utils/lwt_message_queue.mli new file mode 100644 index 00000000000..39691bf00d1 --- /dev/null +++ b/hack/utils/lwt_message_queue.mli @@ -0,0 +1,48 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +type 'a t +(** A mutable queue containing messages of type ['a]. *) + +val create : unit -> 'a t +(** Create a new [Lwt_message_queue.t]. *) + +val push : 'a t -> 'a -> bool +(** Push a message into the queue. Wakes up the task waiting to [pop] from it, +if any. *) + +val pop : 'a t -> 'a option Lwt.t +(** Get and remove the next message in the queue. If there are currently no +messages in the queue, wait until one becomes available. If the queue is or +becomes closed, returns [None]; otherwise returns [Some message]. + +The behavior of multiple tasks waiting to [pop] the queue simultaneously is +unspecified (similar to issues raised in +https://github.com/ocsigen/lwt/issues/250). Only one task should [pop] at a +time. The message queue is therefore mostly useful for code organization +purposes, making it possible to split the code for the producer and consumer of +the message queue in a principled way. *) + +val close : 'a t -> unit +(** Close the message queue for further reads and writes. All messages currently +in the queue will be dropped. Future calls to [push] will return [false], and +future calls to [pop] will return [None]. + +Either the producer or consumer end of the queue may close it. *) + +val is_empty : 'a t -> bool +(** Whether or not the queue has any pending messages at this moment. *) + +val length : 'a t -> int +(** Returns the number of messages currently in the queue. If the queue is +closed, returns [0]. *) + +val exists : 'a t -> f:('a -> bool) -> bool +(** Returns whether or not a message satisfying predicate [f] exists in the +current queue. *) diff --git a/hack/utils/lwt_utils.ml b/hack/utils/lwt_utils.ml new file mode 100644 index 00000000000..adc42c8235d --- /dev/null +++ b/hack/utils/lwt_utils.ml @@ -0,0 +1,205 @@ +open Core_kernel + +let select + (read_fds : Unix.file_descr list) + (write_fds : Unix.file_descr list) + (exn_fds : Unix.file_descr list) + (timeout : float) : + (Unix.file_descr list * Unix.file_descr list * Unix.file_descr list) Lwt.t + = + let make_task + ~(fds : Unix.file_descr list) + ~(condition : Lwt_unix.file_descr -> bool) + ~(wait_f : Lwt_unix.file_descr -> unit Lwt.t) : + (Unix.file_descr list, Unix.file_descr list) result Lwt.t = + try%lwt + let fds = List.map fds ~f:Lwt_unix.of_unix_file_descr in + let%lwt () = Lwt.pick (List.map fds ~f:wait_f) in + let actionable_fds = + fds |> List.filter ~f:condition |> List.map ~f:Lwt_unix.unix_file_descr + in + Lwt.return (Ok actionable_fds) + with _ -> + (* Although we gather a list of exceptional file descriptors here, it + happens that no call site of `Unix.select` in the codebase has checked + this list, so we could in theory just return any list (or not return any + exceptional file descriptors at all). *) + let exceptional_fds = + List.filter exn_fds ~f:(fun fd -> List.mem ~equal:( = ) fds fd) + in + Lwt.return (Error exceptional_fds) + in + let read_task = + let%lwt readable_fds = + make_task + ~fds:read_fds + ~condition:Lwt_unix.readable + ~wait_f:Lwt_unix.wait_read + in + match readable_fds with + | Ok fds -> Lwt.return (fds, [], []) + | Error fds -> Lwt.return ([], [], fds) + in + let write_task = + let%lwt writeable_fds = + make_task + ~fds:write_fds + ~condition:Lwt_unix.writable + ~wait_f:Lwt_unix.wait_write + in + match writeable_fds with + | Ok fds -> Lwt.return ([], fds, []) + | Error fds -> Lwt.return ([], [], fds) + in + let tasks = [read_task; write_task] in + let tasks = + if timeout > 0.0 then + let timeout_task = + let%lwt () = Lwt_unix.sleep timeout in + Lwt.return ([], [], []) + in + timeout_task :: tasks + else + failwith "Timeout <= 0 not implemented" + in + Lwt.pick tasks + +module Process_success = struct + type t = { + command_line: string; + stdout: string; + stderr: string; + } +end + +module Process_failure = struct + type t = { + command_line: string; + process_status: Unix.process_status; + stdout: string; + stderr: string; + exn: exn option; + } + + let to_string (process_failure : t) : string = + let exn_message = + match process_failure.exn with + | Some exn -> Exn.to_string exn + | None -> "" + in + let exit_code = + Unix.( + match process_failure.process_status with + | WEXITED exit_code -> "WEXITED " ^ string_of_int exit_code + | WSIGNALED exit_code -> "WSIGNALED " ^ string_of_int exit_code + | WSTOPPED exit_code -> "WSTOPPED " ^ string_of_int exit_code) + in + let stderr = + match process_failure.stderr with + | "" -> "" + | stderr -> stderr + in + Printf.sprintf + ( "Process '%s' failed with\n" + ^^ "Exception: %s\n" + ^^ "Exit code: %s\n" + ^^ "Stderr: %s" ) + process_failure.command_line + exn_message + exit_code + stderr +end + +let exec_checked + ?(input : string option) + ?(env : string array option) + (program : string) + (args : string array) : (Process_success.t, Process_failure.t) Lwt_result.t + = + let command_line = + let args = + args |> Array.map ~f:(fun x -> " " ^ x) |> String.concat_array ~sep:"" + in + program ^ args + in + let process = + let command = (program, Array.append [|program|] args) in + Lwt_process.open_process_full command ?env + in + (let%lwt (exn, stdout, stderr) = + let exn = ref None in + let stdout = ref "" in + let stderr = ref "" in + let%lwt () = + try%lwt + let%lwt () = + match input with + | Some input -> + let%lwt () = Lwt_io.write process#stdin input in + let%lwt () = Lwt_io.close process#stdin in + Lwt.return_unit + | None -> Lwt.return_unit + and () = + let%lwt result = Lwt_io.read process#stdout in + stdout := result; + Lwt.return_unit + and () = + let%lwt result = Lwt_io.read process#stderr in + stderr := result; + Lwt.return_unit + in + Lwt.return_unit + with e -> + exn := Some e; + Lwt.return_unit + in + Lwt.return (!exn, !stdout, !stderr) + in + let%lwt state = process#close in + match state with + | Unix.WEXITED 0 -> + Lwt.return_ok { Process_success.command_line; stdout; stderr } + | process_status -> + Lwt.return_error + { Process_failure.command_line; process_status; stdout; stderr; exn }) + [%finally + let%lwt (_ : Unix.process_status) = process#close in + Lwt.return_unit] + +let try_finally ~(f : unit -> 'a Lwt.t) ~(finally : unit -> unit Lwt.t) : + 'a Lwt.t = + let%lwt res = + try%lwt + let%lwt result = f () in + Lwt.return result + with e -> + let%lwt () = finally () in + raise e + in + let%lwt () = finally () in + Lwt.return res + +let read_all (path : string) : (string, string) Lwt_result.t = + try%lwt + let%lwt contents = + Lwt_io.with_file ~mode:Lwt_io.Input path (fun ic -> + let%lwt contents = Lwt_io.read ic in + Lwt.return contents) + in + Lwt.return (Ok contents) + with _ -> + Lwt.return + (Error + (Printf.sprintf + "Could not read the contents of the file at path %s" + path)) + +module Promise = struct + type 'a t = 'a Lwt.t + + let return = Lwt.return + + let map e f = Lwt.map f e + + let bind = Lwt.bind +end diff --git a/hack/utils/lwt_utils.mli b/hack/utils/lwt_utils.mli new file mode 100644 index 00000000000..e7f7a4b4696 --- /dev/null +++ b/hack/utils/lwt_utils.mli @@ -0,0 +1,62 @@ +val select : + Unix.file_descr list -> + Unix.file_descr list -> + Unix.file_descr list -> + float -> + (Unix.file_descr list * Unix.file_descr list * Unix.file_descr list) Lwt.t +(** Drop-in replacement for [Unix.select] that works even when the Lwt main loop +is running (i.e. your function has [Lwt_main.run] somewhere higher up in the +call stack). + +The Lwt main loop is an event loop pumped by [Unix.select], and so regular +[Unix.select] calls are prone to raising `EINTR`. The implementation of this +function does not use [Unix.select] at all, but Lwt primitives that accomplish +the same thing. +*) + +module Process_success : sig + type t = { + command_line: string; + stdout: string; + stderr: string; + } +end + +module Process_failure : sig + type t = { + command_line: string; + process_status: Unix.process_status; + stdout: string; + stderr: string; + exn: exn option; + } + + val to_string : t -> string +end + +val exec_checked : + ?input:string -> + ?env:string array -> + string -> + string array -> + (Process_success.t, Process_failure.t) Lwt_result.t +(** Run a command with a given input and return the output. If the command exits +with an exit status other than zero, raises [Process_failure] instead. + +NOTE: on cancellation, this function will not kill the underlying process. (I +tried to implement it, but after killing the process, both [Lwt_io.close] and +[Lwt_io.abort] would hang when trying to close the process's +stdin/stdout/stderr.) +*) + +val try_finally : + f:(unit -> 'a Lwt.t) -> finally:(unit -> unit Lwt.t) -> 'a Lwt.t +(** Asynchronous version of [Utils.try_finally]. Run and wait for [f] to +complete, and be sure to invoke [finally] asynchronously afterward, even if [f] +raises an exception. *) + +val read_all : string -> (string, string) Lwt_result.t +(** Reads all the contents from the given file on disk, or returns an error +message if unable to do so. *) + +module Promise : Promise.S with type 'a t = 'a Lwt.t diff --git a/hack/utils/marshal_tools.ml b/hack/utils/marshal_tools.ml deleted file mode 100644 index 2a244a250f5..00000000000 --- a/hack/utils/marshal_tools.ml +++ /dev/null @@ -1,207 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -(** - * This tool allows for marshaling directly over file descriptors (instead of - * ocaml "channels") to avoid buffering so that we can safely use marshaling - * and libancillary together. - * - * The problem: - * Ocaml's marshaling is done over channels, which have their own internal - * buffer. This means after reading a marshaled object from a channel, the - * FD's position is not guaranteed to be pointing to the beginning of the - * next marshaled object (but instead points to the position after the - * buffered read). So another process cannot receive this FD (over - * libancillary) to start reading the next object. - * - * The solution: - * Start each message with a fixed-size preamble that describes the - * size of the payload to read. Read precisely that many bytes directly - * from the FD avoiding Ocaml channels entirely. - *) - -exception Invalid_Int_Size_Exception -exception Payload_Size_Too_Large_Exception -exception Malformed_Preamble_Exception -exception Writing_Preamble_Exception -exception Writing_Payload_Exception -exception Reading_Preamble_Exception -exception Reading_Payload_Exception - -(* We want to marshal exceptions (or at least their message+stacktrace) over *) -(* the wire. This type ensures that no one will attempt to pattern-match on *) -(* the thing we marshal: 'Values of extensible variant types, for example *) -(* exceptions (of extensible type exn), returned by the unmarhsaller should *) -(* not be pattern-matched over, because unmarshalling does not preserve the *) -(* information required for matching their constructors.' *) -(* https://caml.inria.fr/pub/docs/manual-ocaml/libref/Marshal.html *) -type remote_exception_data = { - message : string; - stack : string; -} - -module type WRITER_READER = sig - type 'a result - type fd - - val return: 'a -> 'a result - val fail: exn -> 'a result - val (>>=): 'a result -> ('a -> 'b result) -> 'b result - - val write: ?timeout:Timeout.t -> fd -> buffer:bytes -> offset:int -> size:int -> int result - val read: ?timeout:Timeout.t -> fd -> buffer:bytes -> offset:int -> size:int -> int result - - val log: string -> unit -end - -module type REGULAR_WRITER_READER = WRITER_READER - with type 'a result = 'a and type fd = Unix.file_descr - -module RegularWriterReader : REGULAR_WRITER_READER = struct - type 'a result = 'a - type fd = Unix.file_descr - - let return x = x - let fail exn = raise exn - let (>>=) a f = f a - - let write ?timeout fd ~buffer ~offset ~size = - match Timeout.select ?timeout [] [fd] [] ~-.1.0 with - | _, [], _ -> 0 - | _ -> Unix.write fd buffer offset size - - - (* Marshal_tools reads from file descriptors. These file descriptors might be for some - * non-blocking socket. Normally if you try to read from an fd, it will block until some data is - * ready. But if you try to read from a non-blocking socket and it's not ready, you get an - * EWOULDBLOCK error. - * - * People using Marshal_tools probably are calling Unix.select first. However that only guarantees - * that the first read won't block. Marshal_tools will always do at least 2 reads (one for the - * preamble and one or more for the data). Any read after the first might block. - *) - let read ?timeout fd ~buffer ~offset ~size = - match Timeout.select ?timeout [fd] [] [] ~-.1.0 with - | [], _, _ -> 0 - | _ -> Unix.read fd buffer offset size - - let log str = Printf.eprintf "%s\n%!" str -end - -module MarshalToolsFunctor (WriterReader: WRITER_READER): sig - val expected_preamble_size: int - val to_fd_with_preamble: - ?timeout:Timeout.t -> - ?flags:Marshal.extern_flags list -> - WriterReader.fd -> - 'a -> - int WriterReader.result - val from_fd_with_preamble: ?timeout:Timeout.t -> WriterReader.fd -> 'a WriterReader.result -end = struct - - let (>>=) = WriterReader.(>>=) - - let preamble_start_sentinel = '\142' - (** Size in bytes. *) - let preamble_core_size = 4 - let expected_preamble_size = preamble_core_size + 1 - (** Payload size in bytes = 2^31 - 1. *) - let maximum_payload_size = (1 lsl (preamble_core_size * 8)) - 1 - - let get_preamble_core (size : int) = - (** We limit payload size to 2^31 - 1 bytes. *) - if size >= maximum_payload_size then - raise Payload_Size_Too_Large_Exception; - let rec loop i (remainder: int) acc = - if i < 0 then acc - else loop (i - 1) (remainder / 256) - (Bytes.set acc i (Char.chr (remainder mod 256)); acc) in - loop (preamble_core_size - 1) size (Bytes.create preamble_core_size) - - let make_preamble (size : int) = - let preamble_core = get_preamble_core size in - let preamble = Bytes.create (preamble_core_size + 1) in - Bytes.set preamble 0 preamble_start_sentinel; - Bytes.blit preamble_core 0 preamble 1 4; - preamble - - let parse_preamble preamble = - if (Bytes.length preamble) <> expected_preamble_size - || (Bytes.get preamble 0) <> preamble_start_sentinel then - raise Malformed_Preamble_Exception; - let rec loop i acc = - if i >= 5 then acc - else loop (i + 1) ((acc * 256) + (int_of_char (Bytes.get preamble i))) in - loop 1 0 - - let rec write_payload ?timeout fd buffer offset to_write = - if to_write = 0 then WriterReader.return offset else begin - WriterReader.write ?timeout fd ~buffer ~offset ~size:to_write - >>= (fun bytes_written -> - if bytes_written = 0 then WriterReader.return offset else begin - write_payload ?timeout fd buffer (offset+bytes_written) (to_write-bytes_written) - end - ) - end - - (* Returns the size of the marshaled payload *) - let to_fd_with_preamble ?timeout ?(flags=[]) fd obj = - let payload = Marshal.to_bytes obj flags in - let size = Bytes.length payload in - let preamble = make_preamble size in - write_payload ?timeout fd preamble 0 expected_preamble_size - >>= (fun preamble_bytes_written -> - if preamble_bytes_written <> expected_preamble_size - then WriterReader.fail Writing_Preamble_Exception - else WriterReader.return ()) - >>= (fun () -> write_payload ?timeout fd payload 0 size) - >>= (fun bytes_written -> - if bytes_written <> size - then WriterReader.fail Writing_Payload_Exception - else WriterReader.return size) - - let rec read_payload ?timeout fd buffer offset to_read = - if to_read = 0 then WriterReader.return offset else begin - WriterReader.read ?timeout fd ~buffer ~offset ~size:to_read - >>= (fun bytes_read -> - if bytes_read = 0 then WriterReader.return offset else begin - read_payload ?timeout fd buffer (offset+bytes_read) (to_read-bytes_read) - end - ) - end - - let from_fd_with_preamble ?timeout fd = - let preamble = Bytes.create expected_preamble_size in - WriterReader.read ?timeout fd ~buffer:preamble ~offset:0 ~size:expected_preamble_size - >>= (fun bytes_read -> - if bytes_read = 0 - (** Unix manpage for read says 0 bytes read indicates end of file. *) - then WriterReader.fail End_of_file - else - if (bytes_read <> expected_preamble_size) - then begin - WriterReader.log (Printf.sprintf "Error, only read %d bytes for preamble." bytes_read); - WriterReader.fail Reading_Preamble_Exception - end - else WriterReader.return () - ) - >>= (fun () -> - let payload_size = parse_preamble preamble in - let payload = Bytes.create payload_size in - read_payload ?timeout fd payload 0 payload_size - >>= (fun payload_size_read -> - if (payload_size_read <> payload_size) - then WriterReader.fail Reading_Payload_Exception - else WriterReader.return (Marshal.from_bytes payload 0) - ) - ) -end - -module RegularMarshalTools = MarshalToolsFunctor (RegularWriterReader) -include RegularMarshalTools diff --git a/hack/utils/marshal_tools.mli b/hack/utils/marshal_tools.mli deleted file mode 100644 index a3a90c2c0ab..00000000000 --- a/hack/utils/marshal_tools.mli +++ /dev/null @@ -1,61 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -exception Invalid_Int_Size_Exception -exception Payload_Size_Too_Large_Exception -exception Malformed_Preamble_Exception -exception Writing_Preamble_Exception -exception Writing_Payload_Exception -exception Reading_Preamble_Exception -exception Reading_Payload_Exception - -type remote_exception_data = { - message : string; - stack : string; -} - -val to_fd_with_preamble: - ?timeout:Timeout.t -> - ?flags:Marshal.extern_flags list -> - Unix.file_descr -> - 'a -> - int -val from_fd_with_preamble: - ?timeout:Timeout.t -> - Unix.file_descr -> - 'a - -module type WRITER_READER = sig - type 'a result - type fd - - val return: 'a -> 'a result - val fail: exn -> 'a result - val (>>=): 'a result -> ('a -> 'b result) -> 'b result - - val write: ?timeout:Timeout.t -> fd -> buffer:bytes -> offset:int -> size:int -> int result - val read: ?timeout:Timeout.t -> fd -> buffer:bytes -> offset:int -> size:int -> int result - - val log: string -> unit -end - -module MarshalToolsFunctor : functor (WriterReader: WRITER_READER) -> sig - val expected_preamble_size: int - - val to_fd_with_preamble: - ?timeout:Timeout.t -> - ?flags:Marshal.extern_flags list -> - WriterReader.fd -> - 'a -> - int WriterReader.result - val from_fd_with_preamble: - ?timeout:Timeout.t -> - WriterReader.fd -> - 'a WriterReader.result -end diff --git a/hack/utils/marshal_tools/dune b/hack/utils/marshal_tools/dune new file mode 100644 index 00000000000..2b186d7061c --- /dev/null +++ b/hack/utils/marshal_tools/dune @@ -0,0 +1,18 @@ +(library + (name marshal_tools) + (wrapped false) + (modules + marshal_tools) + (libraries + sys_utils)) + +(library + (name marshal_tools_lwt) + (wrapped false) + (modules + marshal_tools_lwt) + (libraries + marshal_tools + lwt + lwt.unix + lwt_log)) diff --git a/hack/utils/marshal_tools/marshal_tools.ml b/hack/utils/marshal_tools/marshal_tools.ml new file mode 100644 index 00000000000..07f36333aaa --- /dev/null +++ b/hack/utils/marshal_tools/marshal_tools.ml @@ -0,0 +1,280 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(** + * This tool allows for marshaling directly over file descriptors (instead of + * ocaml "channels") to avoid buffering so that we can safely use marshaling + * and libancillary together. + * + * The problem: + * Ocaml's marshaling is done over channels, which have their own internal + * buffer. This means after reading a marshaled object from a channel, the + * FD's position is not guaranteed to be pointing to the beginning of the + * next marshaled object (but instead points to the position after the + * buffered read). So another process cannot receive this FD (over + * libancillary) to start reading the next object. + * + * The solution: + * Start each message with a fixed-size preamble that describes the + * size of the payload to read. Read precisely that many bytes directly + * from the FD avoiding Ocaml channels entirely. + *) + +exception Invalid_Int_Size_Exception + +exception Payload_Size_Too_Large_Exception + +exception Malformed_Preamble_Exception + +exception Writing_Preamble_Exception + +exception Writing_Payload_Exception + +exception Reading_Preamble_Exception + +exception Reading_Payload_Exception + +(* We want to marshal exceptions (or at least their message+stacktrace) over *) +(* the wire. This type ensures that no one will attempt to pattern-match on *) +(* the thing we marshal: 'Values of extensible variant types, for example *) +(* exceptions (of extensible type exn), returned by the unmarhsaller should *) +(* not be pattern-matched over, because unmarshalling does not preserve the *) +(* information required for matching their constructors.' *) +(* https://caml.inria.fr/pub/docs/manual-ocaml/libref/Marshal.html *) +type remote_exception_data = { + message: string; + stack: string; +} + +module type WRITER_READER = sig + type 'a result + + type fd + + val return : 'a -> 'a result + + val fail : exn -> 'a result + + val ( >>= ) : 'a result -> ('a -> 'b result) -> 'b result + + val write : + ?timeout:Timeout.t -> + fd -> + buffer:bytes -> + offset:int -> + size:int -> + int result + + val read : + ?timeout:Timeout.t -> + fd -> + buffer:bytes -> + offset:int -> + size:int -> + int result + + val log : string -> unit +end + +module type REGULAR_WRITER_READER = + WRITER_READER with type 'a result = 'a and type fd = Unix.file_descr + +module RegularWriterReader : REGULAR_WRITER_READER = struct + type 'a result = 'a + + type fd = Unix.file_descr + + let return x = x + + let fail exn = raise exn + + let ( >>= ) a f = f a + + let rec write ?timeout fd ~buffer ~offset ~size = + match Timeout.select ?timeout [] [fd] [] ~-.1.0 with + | (_, [], _) -> 0 + | _ -> + (* Timeout.select handles EINTR, but the Unix.write call can also be interrupted. If the write + * is interrupted before any bytes are written, the call fails with EINTR. Otherwise, the call + * succeeds and returns the number of bytes written. + *) + (try Unix.write fd buffer offset size + with Unix.Unix_error (Unix.EINTR, _, _) -> + write ?timeout fd ~buffer ~offset ~size) + + (* Marshal_tools reads from file descriptors. These file descriptors might be for some + * non-blocking socket. Normally if you try to read from an fd, it will block until some data is + * ready. But if you try to read from a non-blocking socket and it's not ready, you get an + * EWOULDBLOCK error. + * + * People using Marshal_tools probably are calling Unix.select first. However that only guarantees + * that the first read won't block. Marshal_tools will always do at least 2 reads (one for the + * preamble and one or more for the data). Any read after the first might block. + *) + let rec read ?timeout fd ~buffer ~offset ~size = + match Timeout.select ?timeout [fd] [] [] ~-.1.0 with + | ([], _, _) -> 0 + | _ -> + (* Timeout.select handles EINTR, but the Unix.read call can also be interrupted. If the read + * is interrupted before any bytes are read, the call fails with EINTR. Otherwise, the call + * succeeds and returns the number of bytes read. + *) + (try Unix.read fd buffer offset size + with Unix.Unix_error (Unix.EINTR, _, _) -> + read ?timeout fd ~buffer ~offset ~size) + + let log str = Printf.eprintf "%s\n%!" str +end + +module MarshalToolsFunctor (WriterReader : WRITER_READER) : sig + val expected_preamble_size : int + + val to_fd_with_preamble : + ?timeout:Timeout.t -> + ?flags:Marshal.extern_flags list -> + WriterReader.fd -> + 'a -> + int WriterReader.result + + val from_fd_with_preamble : + ?timeout:Timeout.t -> WriterReader.fd -> 'a WriterReader.result +end = struct + let ( >>= ) = WriterReader.( >>= ) + + let preamble_start_sentinel = '\142' + + (* Size in bytes. *) + let preamble_core_size = 4 + + let expected_preamble_size = preamble_core_size + 1 + + (* Payload size in bytes = 2^31 - 1. *) + let maximum_payload_size = (1 lsl (preamble_core_size * 8)) - 1 + + let get_preamble_core (size : int) = + (* We limit payload size to 2^31 - 1 bytes. *) + if size >= maximum_payload_size then raise Payload_Size_Too_Large_Exception; + let rec loop i (remainder : int) acc = + if i < 0 then + acc + else + loop + (i - 1) + (remainder / 256) + ( Bytes.set acc i (Char.chr (remainder mod 256)); + acc ) + in + loop (preamble_core_size - 1) size (Bytes.create preamble_core_size) + + let make_preamble (size : int) = + let preamble_core = get_preamble_core size in + let preamble = Bytes.create (preamble_core_size + 1) in + Bytes.set preamble 0 preamble_start_sentinel; + Bytes.blit preamble_core 0 preamble 1 4; + preamble + + let parse_preamble preamble = + if + Bytes.length preamble <> expected_preamble_size + || Bytes.get preamble 0 <> preamble_start_sentinel + then + raise Malformed_Preamble_Exception; + let rec loop i acc = + if i >= 5 then + acc + else + loop (i + 1) ((acc * 256) + int_of_char (Bytes.get preamble i)) + in + loop 1 0 + + let rec write_payload ?timeout fd buffer offset to_write = + if to_write = 0 then + WriterReader.return offset + else + WriterReader.write ?timeout fd ~buffer ~offset ~size:to_write + >>= fun bytes_written -> + if bytes_written = 0 then + WriterReader.return offset + else + write_payload + ?timeout + fd + buffer + (offset + bytes_written) + (to_write - bytes_written) + + (* Returns the size of the marshaled payload *) + let to_fd_with_preamble ?timeout ?(flags = []) fd obj = + let payload = Marshal.to_bytes obj flags in + let size = Bytes.length payload in + let preamble = make_preamble size in + write_payload ?timeout fd preamble 0 expected_preamble_size + >>= (fun preamble_bytes_written -> + if preamble_bytes_written <> expected_preamble_size then + WriterReader.fail Writing_Preamble_Exception + else + WriterReader.return ()) + >>= (fun () -> write_payload ?timeout fd payload 0 size) + >>= fun bytes_written -> + if bytes_written <> size then + WriterReader.fail Writing_Payload_Exception + else + WriterReader.return size + + let rec read_payload ?timeout fd buffer offset to_read = + if to_read = 0 then + WriterReader.return offset + else + WriterReader.read ?timeout fd ~buffer ~offset ~size:to_read + >>= fun bytes_read -> + if bytes_read = 0 then + WriterReader.return offset + else + read_payload + ?timeout + fd + buffer + (offset + bytes_read) + (to_read - bytes_read) + + let from_fd_with_preamble ?timeout fd = + let preamble = Bytes.create expected_preamble_size in + WriterReader.read + ?timeout + fd + ~buffer:preamble + ~offset:0 + ~size:expected_preamble_size + >>= (fun bytes_read -> + if + bytes_read = 0 + (* Unix manpage for read says 0 bytes read indicates end of file. *) + then + WriterReader.fail End_of_file + else if bytes_read <> expected_preamble_size then ( + WriterReader.log + (Printf.sprintf + "Error, only read %d bytes for preamble." + bytes_read); + WriterReader.fail Reading_Preamble_Exception + ) else + WriterReader.return ()) + >>= fun () -> + let payload_size = parse_preamble preamble in + let payload = Bytes.create payload_size in + read_payload ?timeout fd payload 0 payload_size + >>= fun payload_size_read -> + if payload_size_read <> payload_size then + WriterReader.fail Reading_Payload_Exception + else + WriterReader.return (Marshal.from_bytes payload 0) +end + +module RegularMarshalTools = MarshalToolsFunctor (RegularWriterReader) +include RegularMarshalTools diff --git a/hack/utils/marshal_tools/marshal_tools.mli b/hack/utils/marshal_tools/marshal_tools.mli new file mode 100644 index 00000000000..cb663860187 --- /dev/null +++ b/hack/utils/marshal_tools/marshal_tools.mli @@ -0,0 +1,80 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +exception Invalid_Int_Size_Exception + +exception Payload_Size_Too_Large_Exception + +exception Malformed_Preamble_Exception + +exception Writing_Preamble_Exception + +exception Writing_Payload_Exception + +exception Reading_Preamble_Exception + +exception Reading_Payload_Exception + +type remote_exception_data = { + message: string; + stack: string; +} + +val to_fd_with_preamble : + ?timeout:Timeout.t -> + ?flags:Marshal.extern_flags list -> + Unix.file_descr -> + 'a -> + int + +val from_fd_with_preamble : ?timeout:Timeout.t -> Unix.file_descr -> 'a + +module type WRITER_READER = sig + type 'a result + + type fd + + val return : 'a -> 'a result + + val fail : exn -> 'a result + + val ( >>= ) : 'a result -> ('a -> 'b result) -> 'b result + + val write : + ?timeout:Timeout.t -> + fd -> + buffer:bytes -> + offset:int -> + size:int -> + int result + + val read : + ?timeout:Timeout.t -> + fd -> + buffer:bytes -> + offset:int -> + size:int -> + int result + + val log : string -> unit +end + +module MarshalToolsFunctor (WriterReader : WRITER_READER) : sig + val expected_preamble_size : int + + val to_fd_with_preamble : + ?timeout:Timeout.t -> + ?flags:Marshal.extern_flags list -> + WriterReader.fd -> + 'a -> + int WriterReader.result + + val from_fd_with_preamble : + ?timeout:Timeout.t -> WriterReader.fd -> 'a WriterReader.result +end diff --git a/hack/utils/marshal_tools/marshal_tools_lwt.ml b/hack/utils/marshal_tools/marshal_tools_lwt.ml new file mode 100644 index 00000000000..a0b349933a6 --- /dev/null +++ b/hack/utils/marshal_tools/marshal_tools_lwt.ml @@ -0,0 +1,40 @@ +(* + * Copyright (c) 2017, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +module Marshal_tools_lwt = Marshal_tools.MarshalToolsFunctor (struct + type 'a result = 'a Lwt.t + + type fd = Lwt_unix.file_descr + + let return = Lwt.return + + let fail = Lwt.fail + + let ( >>= ) = Lwt.( >>= ) + + let write ?timeout fd ~buffer ~offset ~size = + if timeout <> None then + raise (Invalid_argument "Use Lwt timeouts directly"); + Lwt_unix.wait_write fd >>= (fun () -> Lwt_unix.write fd buffer offset size) + + let read ?timeout fd ~buffer ~offset ~size = + if timeout <> None then + raise (Invalid_argument "Use lwt timeouts directly"); + Lwt_unix.wait_read fd >>= (fun () -> Lwt_unix.read fd buffer offset size) + + let log str = Lwt_log_core.ign_error str +end) + +include Marshal_tools_lwt + +(* The Timeout module probably doesn't work terribly well with Lwt. Luckily, timeouts are super easy + * to write in Lwt, so we don't **really** need them *) +let to_fd_with_preamble ?flags fd obj = to_fd_with_preamble ?flags fd obj + +let from_fd_with_preamble fd = from_fd_with_preamble fd diff --git a/hack/utils/marshal_tools/marshal_tools_lwt.mli b/hack/utils/marshal_tools/marshal_tools_lwt.mli new file mode 100644 index 00000000000..6479d54807d --- /dev/null +++ b/hack/utils/marshal_tools/marshal_tools_lwt.mli @@ -0,0 +1,15 @@ +(* + * Copyright (c) 2017, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +val expected_preamble_size : int + +val to_fd_with_preamble : + ?flags:Marshal.extern_flags list -> Lwt_unix.file_descr -> 'a -> int Lwt.t + +val from_fd_with_preamble : Lwt_unix.file_descr -> 'a Lwt.t diff --git a/hack/utils/marshal_tools_lwt.ml b/hack/utils/marshal_tools_lwt.ml deleted file mode 100644 index 0f660cdefdd..00000000000 --- a/hack/utils/marshal_tools_lwt.ml +++ /dev/null @@ -1,38 +0,0 @@ -(** - * Copyright (c) 2017, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -module Marshal_tools_lwt = Marshal_tools.MarshalToolsFunctor (struct - type 'a result = 'a Lwt.t - type fd = Lwt_unix.file_descr - - let return = Lwt.return - let fail = Lwt.fail - let (>>=) = Lwt.(>>=) - - let write ?timeout fd ~buffer ~offset ~size = - if timeout <> None - then raise (Invalid_argument "Use Lwt timeouts directly"); - Lwt_unix.wait_write fd - >>= (fun () -> Lwt_unix.write fd buffer offset size) - - let read ?timeout fd ~buffer ~offset ~size = - if timeout <> None - then raise (Invalid_argument "Use lwt timeouts directly"); - Lwt_unix.wait_read fd - >>= (fun () -> Lwt_unix.read fd buffer offset size) - - let log str = Lwt_log_core.ign_error str -end) - -include Marshal_tools_lwt - -(* The Timeout module probably doesn't work terribly well with Lwt. Luckily, timeouts are super easy - * to write in Lwt, so we don't **really** need them *) -let to_fd_with_preamble ?flags fd obj = to_fd_with_preamble ?flags fd obj -let from_fd_with_preamble fd = from_fd_with_preamble fd diff --git a/hack/utils/marshal_tools_lwt.mli b/hack/utils/marshal_tools_lwt.mli deleted file mode 100644 index 074fec27a98..00000000000 --- a/hack/utils/marshal_tools_lwt.mli +++ /dev/null @@ -1,12 +0,0 @@ -(** - * Copyright (c) 2017, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -val expected_preamble_size: int -val to_fd_with_preamble: ?flags:Marshal.extern_flags list -> Lwt_unix.file_descr -> 'a -> int Lwt.t -val from_fd_with_preamble: Lwt_unix.file_descr -> 'a Lwt.t diff --git a/hack/utils/measure.ml b/hack/utils/measure.ml deleted file mode 100644 index 6f27555444a..00000000000 --- a/hack/utils/measure.ml +++ /dev/null @@ -1,321 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -(** - * The Measure module is primarily useful for debugging. It's particularly - * useful for gathering stats about something that happens a lot. Let's say you - * have some code like this - * - * let number_bunnies = count_bunnies () in - * - * If you want to debug how many bunnies are being counted, you could do - * something like - * - * let number_bunnies = count_bunnies () in - * Utils.prerr_endlinef "Num bunnies: %d" number_bunnies; - * - * but what if this code is called 1000 times? Then you end up with log spew. - * Using the Measure module helps with this. You can now do - * - * let number_bunnies = count_bunnies () in - * Measure.sample "num_bunnies" number_bunnies; - * - * and then later you do - * - * Measure.print_stats (); - * - * which will print the number of samples, the total, the average, the - * variance, the max and the min. - * - * Measure can keep track of the distribution of measurements if you give it a - * bucket size. Before we collect our measurements, call - * - * Measure.track_distribution "num_bunnies" ~bucket_size:10 = - * ...do logging - * Measure.print_distribution (); - * - * And this will print how many samples fall in the 0-9 bucket, how many fall - * into the 10-19 bucket, etc - * - * A common use case is timing, and there's an easy helper method. Let's say we - * wanted to see how long our code takes - * - * let number_bunnies = Measure.time "count_bunnies_time" (fun () -> - * count_bunnies () - * ) in - * - * now when we call print_stats we'll see how fast count_bunnies is and how - * much total time we spend counting bunnies. - * - * Measurements are stored in a stateful way in a record. You can either use a - * global record or a local record. - * - * Using a global record: - * Measure.sample "num_bunnies" number_bunnies; - * Measure.print_stats (); - * - * You can push and pop the global record. This is useful if you want to reset - * some counters without throwing away that data - * - * Measure.push_global (); - * ...measure stuff - * let record = Measure.pop_global () in - * Measure.print_stats ~record (); - * - * Using a local record: - * let record = Measure.create () in - * Measure.sample ~record "num_bunnies" number_bunnies; - * Measure.print_stats ~record (); - * - * A record does not store the individual measurements, just the aggregate - * stats, which are updated online. Records can be serialized in order to be - * sent across pipes. - *) - -module List = Hh_core.List -module FloatMap = MyMap.Make(struct type t = float let compare = compare end) - -type distribution = { - bucket_size: float; - buckets: float FloatMap.t; -} - -type record_entry = { - count: float; - mean: float; - variance_sum: float; - max: float; - min: float; - distribution: distribution option; -} -type record_data = record_entry SMap.t -type record = record_data ref - -(* Creates a new empty record *) -let create () = ref SMap.empty - -let global: (record list) ref = ref [create ()] -let push_global () = - global := (create ()) :: (!global) -let pop_global () = - match !global with - | ret::globals -> - global := globals; - ret - | _ -> failwith "Measure.pop_global called with empty stack" - - -let serialize record = !record -let deserialize data = ref data - -let new_entry = { - count = 0.0; - mean = 0.0; - variance_sum = 0.0; - max = min_float; - min = max_float; - distribution = None; -} - -let new_distribution ~bucket_size = Some { - bucket_size; - buckets = FloatMap.empty; -} - -let get_record = function - | Some record -> record - | None -> (match List.hd (!global) with - | Some record -> record - | None -> - failwith ("No global record available! " ^ - "Did you forget to call Measure.push_global?")) - - -(* Measure can track how the values are distributed by creating buckets and - * keeping track of how many samples fall into each buckets. It will not track - * distribution by default, so call this function to turn it on *) -let track_distribution ?record name ~bucket_size = - let record = get_record record in - let entry = match SMap.get name (!record) with - | None -> new_entry - | Some entry -> entry in - let entry = { entry with distribution = new_distribution ~bucket_size; } in - record := SMap.add name entry (!record) - -let round_down ~bucket_size value = - bucket_size *. (floor (value /. bucket_size)) - -let update_distribution ~weight value = function - | None -> None - | Some { bucket_size; buckets } -> - let bucket = round_down ~bucket_size value in - let bucket_count = match FloatMap.get bucket buckets with - | None -> weight - | Some count -> count +. weight in - let buckets = FloatMap.add bucket bucket_count buckets in - Some { bucket_size; buckets; } - -let sample ?record ?(weight=1.0) name value = - let record = get_record record in - let { - count = old_count; - mean = old_mean; - variance_sum; - max; - min; - distribution; - } = match SMap.get name (!record) with - | None -> new_entry - | Some entry -> entry in - - (* Add 1 * weight to the count *) - let count = old_count +. weight in - let mean = old_mean +. weight *. (value -. old_mean) /. count in - (* Knuth's online variance approximation algorithm, updated for weights. - * Weighted version from http://people.ds.cam.ac.uk/fanf2/hermes/doc/antiforgery/stats.pdf *) - let variance_sum = variance_sum +. weight *. (value -. old_mean) *. (value -. mean) in - - let max = Pervasives.max max value in - let min = Pervasives.min min value in - - let distribution = update_distribution ~weight value distribution in - - let entry = { count; mean; variance_sum; max; min; distribution; } in - record := SMap.add name entry (!record) - -let merge_entries name from into = match (from, into) with - | None, into -> into - | from, None -> from - | Some from, Some into -> - let count = from.count +. into.count in - - (* Using this algorithm to combine the variance sums - * https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm - *) - (* d = meanB - meanA *) - let delta = from.mean -. into.mean in - (* mean = meanA + delta * (countB/count) *) - let mean = into.mean +. (delta *. from.count /. count) in - (* VarSum = VarSumA + VarSumB + delta * delta * countA * countB / count *) - let variance_sum = from.variance_sum +. into.variance_sum +. - delta *. delta *. into.count *. from.count /. count in - - let max = Pervasives.max from.max into.max in - let min = Pervasives.min from.min into.min in - - let distribution = match (from.distribution, into.distribution) with - | None, into -> into - | from, None -> from - | Some { bucket_size = from; _; }, Some { bucket_size = into; _; } - when from <> into -> Printf.kprintf failwith - "Merging buckets for %s failed: bucket sizes %f, %f" - name from into - | Some { bucket_size; buckets = from; }, Some { buckets = into; _; } -> - let buckets = FloatMap.merge (fun _bucket from_count into_count -> - match (from_count, into_count) with - | None, into -> into - | from, None -> from - | Some from_count, Some into_count -> Some (from_count +. into_count)) - from into in - Some { bucket_size; buckets; } in - Some { count; mean; variance_sum; max; min; distribution; } - -(* Merges all the samples from "from" into "record". If "record" is omitted - * then it uses the global record *) -let merge ?record ~from = - let into = get_record record in - into := SMap.merge merge_entries (!from) (!into) - -let time (type a) ?record name (f: unit -> a) : a = - let record = get_record record in - let start_time = Unix.gettimeofday () in - let ret = f () in - let end_time = Unix.gettimeofday () in - sample ~record name (end_time -. start_time); - ret - -let get_sum ?record name = - let record = get_record record in - match SMap.get name !record with - | None -> None - | Some { count; mean; _; } -> Some (count *. mean) - -let get_mean ?record name = - let record = get_record record in - match SMap.get name !record with - | None -> None - | Some { mean; _; } -> Some mean - -let get_count ?record name = - let record = get_record record in - match SMap.get name !record with - | None -> None - | Some { count; _; } -> Some count - -let pretty_num f = - if f > 1000000000.0 - then Printf.sprintf "%.3fG" (f /. 1000000000.0) - else if f > 1000000.0 - then Printf.sprintf "%.3fM" (f /. 1000000.0) - else if f > 1000.0 - then Printf.sprintf "%.3fK" (f /. 1000.0) - else if f = (floor f) - then Printf.sprintf "%d" (int_of_float f) - else Printf.sprintf "%f" f - -let print_entry_stats ?record ?print_raw name = - let print_raw = Option.value print_raw ~default:prerr_endline in - let record = get_record record in - let prefix = Printf.sprintf "%s stats --" name in - match SMap.get name (!record) with - | None - | Some { count = 0.0; _; } -> Printf.ksprintf print_raw "%s NO DATA" prefix - | Some { count; mean; variance_sum; max; min; distribution = _; } -> - let total = count *. mean in - let std_dev = sqrt (variance_sum /. count) in - Printf.ksprintf print_raw - "%s samples: %s, total: %s, avg: %s, stddev: %s, max: %s, min: %s)" - prefix - (pretty_num count) (pretty_num total) (pretty_num mean) - (pretty_num std_dev) (pretty_num max) (pretty_num min) - -let print_stats ?record ?print_raw () = - let record = get_record record in - SMap.iter (fun name _ -> print_entry_stats ~record ?print_raw name) (!record) - -let rec print_buckets ~low ~high ~bucket_size buckets = - if low <= high - then begin - let count = match FloatMap.get low buckets with - | None -> 0.0 - | Some count -> count in - Printf.eprintf "[%s: %s] " (pretty_num low) (pretty_num count); - let low = low +. bucket_size in - print_buckets ~low ~high ~bucket_size buckets - end - -let print_entry_distribution ?record name = - let record = get_record record in - Printf.eprintf "%s distribution -- " name; - match SMap.get name (!record) with - | None - | Some { count = 0.0; _; } -> prerr_endline "NO DATA" - | Some { distribution = None; _; } -> - prerr_endline "NO DATA (did you forget to call track_distribution?)" - | Some { max; min; distribution = Some { bucket_size; buckets; }; _; } -> - let low = round_down ~bucket_size min in - let high = round_down ~bucket_size max in - print_buckets ~low ~high ~bucket_size buckets; - prerr_newline () - -let print_distributions ?record () = - let record = get_record record in - SMap.iter (fun name { distribution; _; } -> match distribution with - | None -> () - | Some _ -> print_entry_distribution ~record name) (!record) diff --git a/hack/utils/measure.mli b/hack/utils/measure.mli deleted file mode 100644 index c83b85b9707..00000000000 --- a/hack/utils/measure.mli +++ /dev/null @@ -1,35 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type record -type record_data - -val create: unit -> record - -val push_global: unit -> unit -val pop_global: unit -> record - -val serialize: record -> record_data -val deserialize: record_data -> record - -val track_distribution: ?record:record -> string -> bucket_size:float -> unit - -val sample: ?record:record -> ?weight:float -> string -> float -> unit -val time: ?record:record -> string -> (unit -> 'a) -> 'a - -val merge: ?record:record -> from:record -> unit - -val get_sum: ?record:record -> string -> float option -val get_mean: ?record:record -> string -> float option -val get_count: ?record:record -> string -> float option - -val print_entry_stats: ?record:record -> ?print_raw:(string -> unit) -> string -> unit -val print_stats: ?record:record -> ?print_raw:(string -> unit) -> unit -> unit -val print_entry_distribution: ?record:record -> string -> unit -val print_distributions: ?record:record -> unit -> unit diff --git a/hack/utils/multifile.ml b/hack/utils/multifile.ml new file mode 100644 index 00000000000..bcf7566e970 --- /dev/null +++ b/hack/utils/multifile.ml @@ -0,0 +1,68 @@ +(* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +open Core_kernel + +(* This allows one to fake having multiple files in one file. This + * is used only in unit test files. + * Indeed, there are some features that require mutliple files to be tested. + * For example, newtype has a different meaning depending on the file. + *) +let rec make_files = function + | [] -> [] + | Str.Delim header :: Str.Text content :: rl -> + let pattern = Str.regexp "////" in + let header = Str.global_replace pattern "" header in + let pattern = Str.regexp "[ ]*" in + let filename = Str.global_replace pattern "" header in + (filename, content) :: make_files rl + | _ -> assert false + +(* We have some hacky "syntax extensions" to have one file contain multiple + * files, which can be located at arbitrary paths. This is useful e.g. for + * testing lint rules, some of which activate only on certain paths. It's also + * useful for testing abstract types, since the abstraction is enforced at the + * file boundary. + * Takes the path to a single file, returns a map of filenames to file contents. + *) +let file_to_files file = + let abs_fn = Relative_path.to_absolute file in + let content = Sys_utils.cat abs_fn in + let delim = Str.regexp "////.*\n" in + if Str.string_match delim content 0 then + let contentl = Str.full_split delim content in + let files = make_files contentl in + List.fold_left + ~f: + begin + fun acc (sub_fn, content) -> + let file = + Relative_path.create Relative_path.Dummy (abs_fn ^ "--" ^ sub_fn) + in + Relative_path.Map.add acc ~key:file ~data:content + end + ~init:Relative_path.Map.empty + files + else if String.is_prefix content ~prefix:"// @directory " then ( + let contentl = Str.split (Str.regexp "\n") content in + let first_line = List.hd_exn contentl in + let regexp = + Str.regexp "^// @directory *\\([^ ]*\\) *\\(@file *\\([^ ]*\\)*\\)?" + in + let has_match = Str.string_match regexp first_line 0 in + assert has_match; + let dir = Str.matched_group 1 first_line in + let file_name = + (try Str.matched_group 3 first_line with Caml.Not_found -> abs_fn) + in + let file = Relative_path.create Relative_path.Dummy (dir ^ file_name) in + let content = String.concat ~sep:"\n" (List.tl_exn contentl) in + Relative_path.Map.singleton file content + ) else + Relative_path.Map.singleton file content diff --git a/hack/utils/multifile.mli b/hack/utils/multifile.mli new file mode 100644 index 00000000000..f9d6d378aea --- /dev/null +++ b/hack/utils/multifile.mli @@ -0,0 +1,10 @@ +(* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +val file_to_files : Relative_path.t -> string Relative_path.Map.t diff --git a/hack/utils/mutable_accumulator.ml b/hack/utils/mutable_accumulator.ml index 79ebf2c60b2..ff2ad8c6008 100644 --- a/hack/utils/mutable_accumulator.ml +++ b/hack/utils/mutable_accumulator.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * @@ -9,11 +9,8 @@ type 'a t = 'a list ref -let create () = - ref [] +let create () = ref [] -let add t s = - t := s :: !t +let add t s = t := s :: !t -let segments t = - List.rev !t +let segments t = List.rev !t diff --git a/hack/utils/mutable_accumulator.mli b/hack/utils/mutable_accumulator.mli index 4341e6b8397..ef96027a21d 100644 --- a/hack/utils/mutable_accumulator.mli +++ b/hack/utils/mutable_accumulator.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * diff --git a/hack/utils/ocaml_overrides.ml b/hack/utils/ocaml_overrides.ml index 091a2949150..8bbb38c2ffa 100644 --- a/hack/utils/ocaml_overrides.ml +++ b/hack/utils/ocaml_overrides.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -20,18 +20,28 @@ module Ocaml_Sys = Sys module Unix = struct include Ocaml_unix + let getcwd () = Disk.getcwd () + let chdir = Disk.chdir + let mkdir = Disk.mkdir + let rename = Disk.rename end module Sys = struct include Ocaml_Sys + let getcwd () = Disk.getcwd () + let chdir = Disk.chdir + let is_directory = Disk.is_directory + let rename = Disk.rename + let file_exists = Disk.file_exists + let readdir = Disk.readdir end diff --git a/hack/utils/ocaml_overrides.mli b/hack/utils/ocaml_overrides.mli new file mode 100644 index 00000000000..969e1cc99d3 --- /dev/null +++ b/hack/utils/ocaml_overrides.mli @@ -0,0 +1,863 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +module Ocaml_unix = Unix +module Ocaml_Sys = Sys + +module Unix : sig + type error = Unix.error = + | E2BIG + | EACCES + | EAGAIN + | EBADF + | EBUSY + | ECHILD + | EDEADLK + | EDOM + | EEXIST + | EFAULT + | EFBIG + | EINTR + | EINVAL + | EIO + | EISDIR + | EMFILE + | EMLINK + | ENAMETOOLONG + | ENFILE + | ENODEV + | ENOENT + | ENOEXEC + | ENOLCK + | ENOMEM + | ENOSPC + | ENOSYS + | ENOTDIR + | ENOTEMPTY + | ENOTTY + | ENXIO + | EPERM + | EPIPE + | ERANGE + | EROFS + | ESPIPE + | ESRCH + | EXDEV + | EWOULDBLOCK + | EINPROGRESS + | EALREADY + | ENOTSOCK + | EDESTADDRREQ + | EMSGSIZE + | EPROTOTYPE + | ENOPROTOOPT + | EPROTONOSUPPORT + | ESOCKTNOSUPPORT + | EOPNOTSUPP + | EPFNOSUPPORT + | EAFNOSUPPORT + | EADDRINUSE + | EADDRNOTAVAIL + | ENETDOWN + | ENETUNREACH + | ENETRESET + | ECONNABORTED + | ECONNRESET + | ENOBUFS + | EISCONN + | ENOTCONN + | ESHUTDOWN + | ETOOMANYREFS + | ETIMEDOUT + | ECONNREFUSED + | EHOSTDOWN + | EHOSTUNREACH + | ELOOP + | EOVERFLOW + | EUNKNOWNERR of int + + exception Unix_error of error * string * string + + val error_message : error -> string + + val handle_unix_error : ('a -> 'b) -> 'a -> 'b + + val environment : unit -> string array + + val getenv : string -> string + + val putenv : string -> string -> unit + + type process_status = Unix.process_status = + | WEXITED of int + | WSIGNALED of int + | WSTOPPED of int + + type wait_flag = Unix.wait_flag = + | WNOHANG + | WUNTRACED + + val execv : string -> string array -> 'a + + val execve : string -> string array -> string array -> 'a + + val execvp : string -> string array -> 'a + + val execvpe : string -> string array -> string array -> 'a + + val fork : unit -> int + + val wait : unit -> int * process_status + + val waitpid : wait_flag list -> int -> int * process_status + + val system : string -> process_status + + val getpid : unit -> int + + val getppid : unit -> int + + val nice : int -> int + + type file_descr = Unix.file_descr + + val stdin : file_descr + + val stdout : file_descr + + val stderr : file_descr + + type open_flag = Unix.open_flag = + | O_RDONLY + | O_WRONLY + | O_RDWR + | O_NONBLOCK + | O_APPEND + | O_CREAT + | O_TRUNC + | O_EXCL + | O_NOCTTY + | O_DSYNC + | O_SYNC + | O_RSYNC + | O_SHARE_DELETE + | O_CLOEXEC + | O_KEEPEXEC + + type file_perm = int + + val openfile : string -> open_flag list -> file_perm -> file_descr + + val close : file_descr -> unit + + val read : file_descr -> bytes -> int -> int -> int + + val write : file_descr -> bytes -> int -> int -> int + + val single_write : file_descr -> bytes -> int -> int -> int + + val write_substring : file_descr -> string -> int -> int -> int + + val single_write_substring : file_descr -> string -> int -> int -> int + + val in_channel_of_descr : file_descr -> in_channel + + val out_channel_of_descr : file_descr -> out_channel + + val descr_of_in_channel : in_channel -> file_descr + + val descr_of_out_channel : out_channel -> file_descr + + type seek_command = Unix.seek_command = + | SEEK_SET + | SEEK_CUR + | SEEK_END + + val lseek : file_descr -> int -> seek_command -> int + + val truncate : string -> int -> unit + + val ftruncate : file_descr -> int -> unit + + type file_kind = Unix.file_kind = + | S_REG + | S_DIR + | S_CHR + | S_BLK + | S_LNK + | S_FIFO + | S_SOCK + + type stats = Unix.stats = { + st_dev: int; + st_ino: int; + st_kind: file_kind; + st_perm: file_perm; + st_nlink: int; + st_uid: int; + st_gid: int; + st_rdev: int; + st_size: int; + st_atime: float; + st_mtime: float; + st_ctime: float; + } + + val stat : string -> stats + + val lstat : string -> stats + + val fstat : file_descr -> stats + + val isatty : file_descr -> bool + + module LargeFile = Unix.LargeFile + + val unlink : string -> unit + + val link : string -> string -> unit + + type access_permission = Unix.access_permission = + | R_OK + | W_OK + | X_OK + | F_OK + + val chmod : string -> file_perm -> unit + + val fchmod : file_descr -> file_perm -> unit + + val chown : string -> int -> int -> unit + + val fchown : file_descr -> int -> int -> unit + + val umask : int -> int + + val access : string -> access_permission list -> unit + + val dup : ?cloexec:bool -> file_descr -> file_descr + + val dup2 : ?cloexec:bool -> file_descr -> file_descr -> unit + + val set_nonblock : file_descr -> unit + + val clear_nonblock : file_descr -> unit + + val set_close_on_exec : file_descr -> unit + + val clear_close_on_exec : file_descr -> unit + + val rmdir : string -> unit + + val chroot : string -> unit + + type dir_handle = Unix.dir_handle + + val opendir : string -> dir_handle + + val readdir : dir_handle -> string + + val rewinddir : dir_handle -> unit + + val closedir : dir_handle -> unit + + val pipe : ?cloexec:bool -> unit -> file_descr * file_descr + + val mkfifo : string -> file_perm -> unit + + val create_process : + string -> string array -> file_descr -> file_descr -> file_descr -> int + + val create_process_env : + string -> + string array -> + string array -> + file_descr -> + file_descr -> + file_descr -> + int + + val open_process_in : string -> in_channel + + val open_process_out : string -> out_channel + + val open_process : string -> in_channel * out_channel + + val open_process_full : + string -> string array -> in_channel * out_channel * in_channel + + val close_process_in : in_channel -> process_status + + val close_process_out : out_channel -> process_status + + val close_process : in_channel * out_channel -> process_status + + val close_process_full : + in_channel * out_channel * in_channel -> process_status + + val symlink : ?to_dir:bool -> string -> string -> unit + + val has_symlink : unit -> bool + + val readlink : string -> string + + val select : + file_descr list -> + file_descr list -> + file_descr list -> + float -> + file_descr list * file_descr list * file_descr list + + type lock_command = Unix.lock_command = + | F_ULOCK + | F_LOCK + | F_TLOCK + | F_TEST + | F_RLOCK + | F_TRLOCK + + val lockf : file_descr -> lock_command -> int -> unit + + val kill : int -> int -> unit + + type sigprocmask_command = Unix.sigprocmask_command = + | SIG_SETMASK + | SIG_BLOCK + | SIG_UNBLOCK + + val sigprocmask : sigprocmask_command -> int list -> int list + + val sigpending : unit -> int list + + val sigsuspend : int list -> unit + + val pause : unit -> unit + + type process_times = Unix.process_times = { + tms_utime: float; + tms_stime: float; + tms_cutime: float; + tms_cstime: float; + } + + type tm = Unix.tm = { + tm_sec: int; + tm_min: int; + tm_hour: int; + tm_mday: int; + tm_mon: int; + tm_year: int; + tm_wday: int; + tm_yday: int; + tm_isdst: bool; + } + + val time : unit -> float + + val gettimeofday : unit -> float + + val gmtime : float -> tm + + val localtime : float -> tm + + val mktime : tm -> float * tm + + val alarm : int -> int + + val sleep : int -> unit + + val sleepf : float -> unit + + val times : unit -> process_times + + val utimes : string -> float -> float -> unit + + type interval_timer = Unix.interval_timer = + | ITIMER_REAL + | ITIMER_VIRTUAL + | ITIMER_PROF + + type interval_timer_status = Unix.interval_timer_status = { + it_interval: float; + it_value: float; + } + + val getitimer : interval_timer -> interval_timer_status + + val setitimer : + interval_timer -> interval_timer_status -> interval_timer_status + + val getuid : unit -> int + + val geteuid : unit -> int + + val setuid : int -> unit + + val getgid : unit -> int + + val getegid : unit -> int + + val setgid : int -> unit + + val getgroups : unit -> int array + + val setgroups : int array -> unit + + val initgroups : string -> int -> unit + + type passwd_entry = Unix.passwd_entry = { + pw_name: string; + pw_passwd: string; + pw_uid: int; + pw_gid: int; + pw_gecos: string; + pw_dir: string; + pw_shell: string; + } + + type group_entry = Unix.group_entry = { + gr_name: string; + gr_passwd: string; + gr_gid: int; + gr_mem: string array; + } + + val getlogin : unit -> string + + val getpwnam : string -> passwd_entry + + val getgrnam : string -> group_entry + + val getpwuid : int -> passwd_entry + + val getgrgid : int -> group_entry + + type inet_addr = Unix.inet_addr + + val inet_addr_of_string : string -> inet_addr + + val string_of_inet_addr : inet_addr -> string + + val inet_addr_any : inet_addr + + val inet_addr_loopback : inet_addr + + val inet6_addr_any : inet_addr + + val inet6_addr_loopback : inet_addr + + type socket_domain = Unix.socket_domain = + | PF_UNIX + | PF_INET + | PF_INET6 + + type socket_type = Unix.socket_type = + | SOCK_STREAM + | SOCK_DGRAM + | SOCK_RAW + | SOCK_SEQPACKET + + type sockaddr = Unix.sockaddr = + | ADDR_UNIX of string + | ADDR_INET of inet_addr * int + + val socket : + ?cloexec:bool -> socket_domain -> socket_type -> int -> file_descr + + val domain_of_sockaddr : sockaddr -> socket_domain + + val socketpair : + ?cloexec:bool -> + socket_domain -> + socket_type -> + int -> + file_descr * file_descr + + val accept : ?cloexec:bool -> file_descr -> file_descr * sockaddr + + val bind : file_descr -> sockaddr -> unit + + val connect : file_descr -> sockaddr -> unit + + val listen : file_descr -> int -> unit + + type shutdown_command = Unix.shutdown_command = + | SHUTDOWN_RECEIVE + | SHUTDOWN_SEND + | SHUTDOWN_ALL + + val shutdown : file_descr -> shutdown_command -> unit + + val getsockname : file_descr -> sockaddr + + val getpeername : file_descr -> sockaddr + + type msg_flag = Unix.msg_flag = + | MSG_OOB + | MSG_DONTROUTE + | MSG_PEEK + + val recv : file_descr -> bytes -> int -> int -> msg_flag list -> int + + val recvfrom : + file_descr -> bytes -> int -> int -> msg_flag list -> int * sockaddr + + val send : file_descr -> bytes -> int -> int -> msg_flag list -> int + + val send_substring : + file_descr -> string -> int -> int -> msg_flag list -> int + + val sendto : + file_descr -> bytes -> int -> int -> msg_flag list -> sockaddr -> int + + val sendto_substring : + file_descr -> string -> int -> int -> msg_flag list -> sockaddr -> int + + type socket_bool_option = Unix.socket_bool_option = + | SO_DEBUG + | SO_BROADCAST + | SO_REUSEADDR + | SO_KEEPALIVE + | SO_DONTROUTE + | SO_OOBINLINE + | SO_ACCEPTCONN + | TCP_NODELAY + | IPV6_ONLY + + type socket_int_option = Unix.socket_int_option = + | SO_SNDBUF + | SO_RCVBUF + | SO_ERROR + | SO_TYPE + | SO_RCVLOWAT + | SO_SNDLOWAT + + type socket_optint_option = Unix.socket_optint_option = SO_LINGER + + type socket_float_option = Unix.socket_float_option = + | SO_RCVTIMEO + | SO_SNDTIMEO + + val getsockopt : file_descr -> socket_bool_option -> bool + + val setsockopt : file_descr -> socket_bool_option -> bool -> unit + + val getsockopt_int : file_descr -> socket_int_option -> int + + val setsockopt_int : file_descr -> socket_int_option -> int -> unit + + val getsockopt_optint : file_descr -> socket_optint_option -> int option + + val setsockopt_optint : + file_descr -> socket_optint_option -> int option -> unit + + val getsockopt_float : file_descr -> socket_float_option -> float + + val setsockopt_float : file_descr -> socket_float_option -> float -> unit + + val getsockopt_error : file_descr -> error option + + val open_connection : sockaddr -> in_channel * out_channel + + val shutdown_connection : in_channel -> unit + + val establish_server : + (in_channel -> out_channel -> unit) -> sockaddr -> unit + + type host_entry = Unix.host_entry = { + h_name: string; + h_aliases: string array; + h_addrtype: socket_domain; + h_addr_list: inet_addr array; + } + + type protocol_entry = Unix.protocol_entry = { + p_name: string; + p_aliases: string array; + p_proto: int; + } + + type service_entry = Unix.service_entry = { + s_name: string; + s_aliases: string array; + s_port: int; + s_proto: string; + } + + val gethostname : unit -> string + + val gethostbyname : string -> host_entry + + val gethostbyaddr : inet_addr -> host_entry + + val getprotobyname : string -> protocol_entry + + val getprotobynumber : int -> protocol_entry + + val getservbyname : string -> string -> service_entry + + val getservbyport : int -> string -> service_entry + + type addr_info = Unix.addr_info = { + ai_family: socket_domain; + ai_socktype: socket_type; + ai_protocol: int; + ai_addr: sockaddr; + ai_canonname: string; + } + + type getaddrinfo_option = Unix.getaddrinfo_option = + | AI_FAMILY of socket_domain + | AI_SOCKTYPE of socket_type + | AI_PROTOCOL of int + | AI_NUMERICHOST + | AI_CANONNAME + | AI_PASSIVE + + val getaddrinfo : + string -> string -> getaddrinfo_option list -> addr_info list + + type name_info = Unix.name_info = { + ni_hostname: string; + ni_service: string; + } + + type getnameinfo_option = Unix.getnameinfo_option = + | NI_NOFQDN + | NI_NUMERICHOST + | NI_NAMEREQD + | NI_NUMERICSERV + | NI_DGRAM + + val getnameinfo : sockaddr -> getnameinfo_option list -> name_info + + type terminal_io = Unix.terminal_io = { + mutable c_ignbrk: bool; + mutable c_brkint: bool; + mutable c_ignpar: bool; + mutable c_parmrk: bool; + mutable c_inpck: bool; + mutable c_istrip: bool; + mutable c_inlcr: bool; + mutable c_igncr: bool; + mutable c_icrnl: bool; + mutable c_ixon: bool; + mutable c_ixoff: bool; + mutable c_opost: bool; + mutable c_obaud: int; + mutable c_ibaud: int; + mutable c_csize: int; + mutable c_cstopb: int; + mutable c_cread: bool; + mutable c_parenb: bool; + mutable c_parodd: bool; + mutable c_hupcl: bool; + mutable c_clocal: bool; + mutable c_isig: bool; + mutable c_icanon: bool; + mutable c_noflsh: bool; + mutable c_echo: bool; + mutable c_echoe: bool; + mutable c_echok: bool; + mutable c_echonl: bool; + mutable c_vintr: char; + mutable c_vquit: char; + mutable c_verase: char; + mutable c_vkill: char; + mutable c_veof: char; + mutable c_veol: char; + mutable c_vmin: int; + mutable c_vtime: int; + mutable c_vstart: char; + mutable c_vstop: char; + } + + val tcgetattr : file_descr -> terminal_io + + type setattr_when = Unix.setattr_when = + | TCSANOW + | TCSADRAIN + | TCSAFLUSH + + val tcsetattr : file_descr -> setattr_when -> terminal_io -> unit + + val tcsendbreak : file_descr -> int -> unit + + val tcdrain : file_descr -> unit + + type flush_queue = Unix.flush_queue = + | TCIFLUSH + | TCOFLUSH + | TCIOFLUSH + + val tcflush : file_descr -> flush_queue -> unit + + type flow_action = Unix.flow_action = + | TCOOFF + | TCOON + | TCIOFF + | TCION + + val tcflow : file_descr -> flow_action -> unit + + val setsid : unit -> int + + val getcwd : unit -> string + + val chdir : string -> unit + + val mkdir : string -> int -> unit + + val rename : string -> string -> unit +end + +module Sys : sig + val argv : string array + + val executable_name : string + + external remove : string -> unit = "caml_sys_remove" + + external getenv : string -> string = "caml_sys_getenv" + + val getenv_opt : string -> string option + + external command : string -> int = "caml_sys_system_command" + + external time : unit -> (float[@unboxed]) + = "caml_sys_time" "caml_sys_time_unboxed" + [@@noalloc] + + val interactive : bool ref + + val os_type : string + + type backend_type = Sys.backend_type = + | Native + | Bytecode + | Other of string + + val backend_type : backend_type + + val unix : bool + + val win32 : bool + + val cygwin : bool + + val word_size : int + + val int_size : int + + val big_endian : bool + + val max_string_length : int + + val max_array_length : int + + external runtime_variant : unit -> string = "caml_runtime_variant" + + external runtime_parameters : unit -> string = "caml_runtime_parameters" + + type signal_behavior = Sys.signal_behavior = + | Signal_default + | Signal_ignore + | Signal_handle of (int -> unit) + + external signal : int -> signal_behavior -> signal_behavior + = "caml_install_signal_handler" + + val set_signal : int -> signal_behavior -> unit + + val sigabrt : int + + val sigalrm : int + + val sigfpe : int + + val sighup : int + + val sigill : int + + val sigint : int + + val sigkill : int + + val sigpipe : int + + val sigquit : int + + val sigsegv : int + + val sigterm : int + + val sigusr1 : int + + val sigusr2 : int + + val sigchld : int + + val sigcont : int + + val sigstop : int + + val sigtstp : int + + val sigttin : int + + val sigttou : int + + val sigvtalrm : int + + val sigprof : int + + val sigbus : int + + val sigpoll : int + + val sigsys : int + + val sigtrap : int + + val sigurg : int + + val sigxcpu : int + + val sigxfsz : int + + exception Break + + val catch_break : bool -> unit + + val ocaml_version : string + + val enable_runtime_warnings : bool -> unit + + val runtime_warnings_enabled : unit -> bool + + external opaque_identity : 'a -> 'a = "%opaque" + + val getcwd : unit -> string + + val chdir : string -> unit + + val is_directory : string -> bool + + val rename : string -> string -> unit + + val file_exists : string -> bool + + val readdir : string -> string array +end diff --git a/hack/utils/opaque_digest/dune b/hack/utils/opaque_digest/dune new file mode 100644 index 00000000000..9bec7089c61 --- /dev/null +++ b/hack/utils/opaque_digest/dune @@ -0,0 +1,3 @@ +(library + (name opaque_digest) + (wrapped false)) diff --git a/hack/utils/opaqueDigest.ml b/hack/utils/opaque_digest/opaqueDigest.ml similarity index 83% rename from hack/utils/opaqueDigest.ml rename to hack/utils/opaque_digest/opaqueDigest.ml index d6351808cdc..34e9e4a376d 100644 --- a/hack/utils/opaqueDigest.ml +++ b/hack/utils/opaque_digest/opaqueDigest.ml @@ -7,5 +7,6 @@ let to_raw_contents x = x to_hex'd *) let from_raw_contents x = try - let _ : string = to_hex x in Some x + let (_ : string) = to_hex x in + Some x with Invalid_argument _ -> None diff --git a/hack/utils/opaqueDigest.mli b/hack/utils/opaque_digest/opaqueDigest.mli similarity index 97% rename from hack/utils/opaqueDigest.mli rename to hack/utils/opaque_digest/opaqueDigest.mli index f4ebd414101..40b76b7d176 100644 --- a/hack/utils/opaqueDigest.mli +++ b/hack/utils/opaque_digest/opaqueDigest.mli @@ -1,15 +1,29 @@ type t + val compare : t -> t -> int + val equal : t -> t -> bool + val string : string -> t + val bytes : bytes -> t + val substring : string -> int -> int -> t + val subbytes : bytes -> int -> int -> t + val channel : Pervasives.in_channel -> int -> t + val file : string -> t + val output : Pervasives.out_channel -> t -> unit + val input : Pervasives.in_channel -> t + val to_hex : t -> string + val from_hex : string -> t + val to_raw_contents : t -> string + val from_raw_contents : string -> t option diff --git a/hack/utils/php_escaping.ml b/hack/utils/php_escaping.ml index 48e0de1b84f..d8a132d6c18 100644 --- a/hack/utils/php_escaping.ml +++ b/hack/utils/php_escaping.ml @@ -10,14 +10,17 @@ (* Implementation of string escaping stuff. Ugggggggh. * See http://php.net/manual/en/language.types.string.php *) -exception Invalid_string of string;; +open Core_kernel + +exception Invalid_string of string let is_printable c = c >= ' ' && c <= '~' + let is_lit_printable c = is_printable c && c <> '\\' && c <> '\"' -let is_hex c = (c >= '0' && c <= '9') || - (c >= 'a' && c <= 'f') || - (c >= 'A' && c <= 'F') +let is_hex c = + (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F') + let is_oct c = c >= '0' && c <= '7' (* This escapes a string using the format understood by the assembler @@ -35,42 +38,47 @@ let escape_char = function | '$' -> "$" | '?' -> "\\?" | c when is_lit_printable c -> String.make 1 c - | c -> Printf.sprintf "\\%03o" (Char.code c) + | c -> Printf.sprintf "\\%03o" (Char.to_int c) let escape ?(f = escape_char) s = let buf = Buffer.create (String.length s) in - String.iter (fun c -> Buffer.add_string buf @@ f c) s; + Caml.String.iter (fun c -> Buffer.add_string buf @@ f c) s; Buffer.contents buf (* Convert a codepoint to utf-8, appending the the bytes to a buffer *) let codepoint_to_utf8 n buf = - let add i = Buffer.add_char buf (Char.chr i) in - if n <= 0x7F then begin + let add i = Buffer.add_char buf (Char.of_int_exn i) in + if n <= 0x7F then add n - end else if n <= 0x7FF then begin - add (0xC0 lor ((n lsr 6) )); - add (0x80 lor ((n ) land 0x3F)); - end else if n <= 0x00FFFF then begin - add (0xE0 lor ((n lsr 12) )); - add (0x80 lor ((n lsr 6) land 0x3F)); - add (0x80 lor ((n ) land 0x3F)); - end else if n <= 0x10FFFF then begin - add (0xF0 lor ((n lsr 18) )); + else if n <= 0x7FF then ( + add (0xC0 lor (n lsr 6)); + add (0x80 lor (n land 0x3F)) + ) else if n <= 0x00FFFF then ( + add (0xE0 lor (n lsr 12)); + add (0x80 lor ((n lsr 6) land 0x3F)); + add (0x80 lor (n land 0x3F)) + ) else if n <= 0x10FFFF then ( + add (0xF0 lor (n lsr 18)); add (0x80 lor ((n lsr 12) land 0x3F)); - add (0x80 lor ((n lsr 6) land 0x3F)); - add (0x80 lor ((n ) land 0x3F)); - end else + add (0x80 lor ((n lsr 6) land 0x3F)); + add (0x80 lor (n land 0x3F)) + ) else raise (Invalid_string "UTF-8 codepoint too large") let parse_int s = - try - int_of_string s + try int_of_string s with _ -> raise (Invalid_string "invalid numeric escape") + let parse_numeric_escape ?(trim_to_byte = false) s = try let v = parse_int s in - let v = if trim_to_byte then v land 0xFF else v in - Char.chr v + let v = + if trim_to_byte then + v land 0xFF + else + v + in + Char.of_int_exn v with _ -> raise (Invalid_string "escaped character too large") type literal_kind = @@ -85,21 +93,27 @@ let unescape_literal literal_kind s = let buf = Buffer.create len in let idx = ref 0 in let next () = - if !idx >= len then raise (Invalid_string "string ended early") else - let c = s.[!idx] in (incr idx; c) + if !idx >= len then + raise (Invalid_string "string ended early") + else + let c = s.[!idx] in + incr idx; + c in - (* Count how many characters, starting at the current string index. * Will always stop at i=max. *) let rec count_f f ~max i = - if i >= max || !idx + i >= len || not (f s.[!idx+i]) then i - else count_f f max (i+1) + if i >= max || !idx + i >= len || not (f s.[!idx + i]) then + i + else + count_f f max (i + 1) in - while !idx < len do let c = next () in (* If it's the last character we're done *) - if c <> '\\' || !idx = len then Buffer.add_char buf c else begin + if c <> '\\' || !idx = len then + Buffer.add_char buf c + else let c = next () in match c with | 'a' when literal_kind = Literal_long_string -> @@ -107,81 +121,94 @@ let unescape_literal literal_kind s = | 'b' when literal_kind = Literal_long_string -> Buffer.add_char buf '\x08' | '\'' -> Buffer.add_string buf "\\\'" - | 'n' -> - if literal_kind <> Literal_long_string then - Buffer.add_char buf '\n' - | 'r' -> - if literal_kind <> Literal_long_string then - Buffer.add_char buf '\r' - | 't' -> Buffer.add_char buf '\t' - | 'v' -> Buffer.add_char buf '\x0b' - | 'e' -> Buffer.add_char buf '\x1b' - | 'f' -> Buffer.add_char buf '\x0c' + | 'n' -> + if literal_kind <> Literal_long_string then Buffer.add_char buf '\n' + | 'r' -> + if literal_kind <> Literal_long_string then Buffer.add_char buf '\r' + | 't' -> Buffer.add_char buf '\t' + | 'v' -> Buffer.add_char buf '\x0b' + | 'e' -> Buffer.add_char buf '\x1b' + | 'f' -> Buffer.add_char buf '\x0c' | '\\' -> Buffer.add_char buf '\\' - | '?' when literal_kind = Literal_long_string -> Buffer.add_char buf '\x3f' + | '?' when literal_kind = Literal_long_string -> + Buffer.add_char buf '\x3f' | '$' when literal_kind <> Literal_long_string -> Buffer.add_char buf '$' | '`' when literal_kind <> Literal_long_string -> - if literal_kind = Literal_backtick - then Buffer.add_char buf '`' - else Buffer.add_string buf "\\`" + if literal_kind = Literal_backtick then + Buffer.add_char buf '`' + else + Buffer.add_string buf "\\`" | '\"' -> - if literal_kind = Literal_double_quote || literal_kind = Literal_long_string - then Buffer.add_char buf '\"' - else Buffer.add_string buf "\\\"" - | 'u' when literal_kind <> Literal_long_string && !idx < len && s.[!idx] = '{' -> + if + literal_kind = Literal_double_quote + || literal_kind = Literal_long_string + then + Buffer.add_char buf '\"' + else + Buffer.add_string buf "\\\"" + | 'u' + when literal_kind <> Literal_long_string + && !idx < len + && s.[!idx] = '{' -> let _ = next () in let unicode_count = count_f (fun c -> c <> '}') ~max:6 0 in - let n = parse_int ("0x" ^ String.sub s (!idx) unicode_count) in + let n = parse_int ("0x" ^ Caml.String.sub s !idx unicode_count) in codepoint_to_utf8 n buf; idx := !idx + unicode_count; if next () <> '}' then raise (Invalid_string "Invalid UTF-8 escape sequence") - | 'x' | 'X' -> + | ('x' | 'X') as c -> let hex_count = count_f is_hex ~max:2 0 in - if hex_count = 0 then - Buffer.add_string buf "\\x" - else - let c = parse_numeric_escape ("0x" ^ String.sub s (!idx) hex_count) in + if hex_count = 0 then ( + Buffer.add_char buf '\\'; + Buffer.add_char buf c + ) else + let c = + parse_numeric_escape ("0x" ^ Caml.String.sub s !idx hex_count) + in Buffer.add_char buf c; idx := !idx + hex_count | c when is_oct c -> idx := !idx - 1; let oct_count = count_f is_oct ~max:3 0 in - let c = parse_numeric_escape - ~trim_to_byte:true ("0o" ^ String.sub s (!idx) oct_count) in + let c = + parse_numeric_escape + ~trim_to_byte:true + ("0o" ^ Caml.String.sub s !idx oct_count) + in Buffer.add_char buf c; idx := !idx + oct_count (* unrecognized escapes are just copied over *) | c -> Buffer.add_char buf '\\'; Buffer.add_char buf c - end; - done; Buffer.contents buf -let unescape_double s = - unescape_literal Literal_double_quote s +let unescape_double s = unescape_literal Literal_double_quote s -let unescape_backtick s = - unescape_literal Literal_backtick s +let unescape_backtick s = unescape_literal Literal_backtick s -let unescape_heredoc s = - unescape_literal Literal_heredoc s +let unescape_heredoc s = unescape_literal Literal_heredoc s let unescape_single_or_nowdoc ~is_nowdoc s = let len = String.length s in let buf = Buffer.create len in let idx = ref 0 in let next () = - if !idx >= len then raise (Invalid_string "string ended early") else - let c = s.[!idx] in (incr idx; c) + if !idx >= len then + raise (Invalid_string "string ended early") + else + let c = s.[!idx] in + incr idx; + c in - while !idx < len do let c = next () in - if is_nowdoc || c <> '\\' then Buffer.add_char buf c else begin + if is_nowdoc || c <> '\\' then + Buffer.add_char buf c + else let c = next () in match c with | '\'' -> Buffer.add_char buf '\'' @@ -190,17 +217,51 @@ let unescape_single_or_nowdoc ~is_nowdoc s = | c -> Buffer.add_char buf '\\'; Buffer.add_char buf c - end; - done; Buffer.contents buf -let unescape_single s = - unescape_single_or_nowdoc ~is_nowdoc:false s +let unescape_single s = unescape_single_or_nowdoc ~is_nowdoc:false s + +let unescape_nowdoc s = unescape_single_or_nowdoc ~is_nowdoc:true s -let unescape_nowdoc s = - unescape_single_or_nowdoc ~is_nowdoc:true s +let unescape_long_string s = unescape_literal Literal_long_string s -let unescape_long_string s = - unescape_literal Literal_long_string s +let extract_unquoted_string ~start ~len content = + (* Using String.sub; Invalid_argument when str too short *) + try + if + len >= 3 && Caml.String.sub content start 3 = "<<<" + (* The heredoc case *) + then + (* These types of strings begin with an opening line containing <<< + * followed by a string to use as a terminator (which is optionally + * quoted) and end with a line containing only the terminator and a + * semicolon followed by a blank line. We need to drop the opening line + * as well as the blank line and preceding terminator line. + *) + let start_ = Core_kernel.String.index_from_exn content start '\n' + 1 in + let end_ = + Core_kernel.String.rindex_from_exn content (start + len - 2) '\n' + in + (* An empty heredoc, this way, will have start >= end *) + if start_ >= end_ then + "" + else + Caml.String.sub content start_ (end_ - start_) + else + match (content.[start], content.[start + len - 1]) with + | ('"', '"') + | ('\'', '\'') + | ('`', '`') -> + Caml.String.sub content (start + 1) (len - 2) + | _ -> + if start = 0 && len = String.length content then + content + else + Caml.String.sub content start len + with + | Invalid_argument _ + | Core_kernel.Not_found_s _ + | Caml.Not_found -> + content diff --git a/hack/utils/php_escaping.mli b/hack/utils/php_escaping.mli new file mode 100644 index 00000000000..7e1c862e31b --- /dev/null +++ b/hack/utils/php_escaping.mli @@ -0,0 +1,58 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +exception Invalid_string of string + +val is_printable : char -> bool + +val is_lit_printable : char -> bool + +val is_hex : char -> bool + +val is_oct : char -> bool + +val escape_char : char -> string + +val escape : ?f:(char -> string) -> string -> string + +val codepoint_to_utf8 : int -> Buffer.t -> unit + +val parse_int : string -> int + +val parse_numeric_escape : ?trim_to_byte:bool -> string -> char + +type literal_kind = + | Literal_heredoc + | Literal_double_quote + | Literal_backtick + | Literal_long_string + +val unescape_literal : literal_kind -> string -> string + +val unescape_double : string -> string + +val unescape_backtick : string -> string + +val unescape_heredoc : string -> string + +val unescape_single_or_nowdoc : is_nowdoc:bool -> string -> string + +val unescape_single : string -> string + +val unescape_nowdoc : string -> string + +val unescape_long_string : string -> string + +val extract_unquoted_string : start:int -> len:int -> string -> string diff --git a/hack/utils/pos.ml b/hack/utils/pos.ml index be711bd8385..224ad77f972 100644 --- a/hack/utils/pos.ml +++ b/hack/utils/pos.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * diff --git a/hack/utils/pos.mli b/hack/utils/pos.mli index 2814914a1f4..d287eb9c9b6 100644 --- a/hack/utils/pos.mli +++ b/hack/utils/pos.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,118 +7,4 @@ * *) -(* Note: While Pos.string prints out positions as closed intervals, pos_start - * and pos_end actually form a half-open interval (i.e. pos_end points to the - * character *after* the last character of the relevant lexeme.) *) -type 'a pos - -(** The underlying type used to construct Pos instances. - * - * See "val make: 'a -> b -> 'a pos" *) -type b = Pos_source.t - -type t = Relative_path.t pos - -val pp : Format.formatter -> t -> unit - -type absolute = string pos - -val none : t - -val filename : 'a pos -> 'a - -val start_cnum : 'a pos -> int - -val end_cnum : 'a pos -> int - -val line : 'a pos -> int - -val end_line : 'a pos -> int - -(* This returns a closed interval that's incorrect for multi-line spans. *) -val info_pos : 'a pos -> int * int * int - -(* This returns a closed interval. *) -val info_pos_extended : 'a pos -> int * int * int * int - -val info_raw : 'a pos -> int * int - -val length : 'a pos -> int - -(* This returns a closed interval. *) -val string : absolute -> string - -(* This returns a half-open interval. *) -val multiline_string : absolute -> string - -(* This returns a closed interval. *) -val string_no_file : 'a pos -> string - -(* This returns a half-open interval. *) -val multiline_string_no_file : 'a pos -> string - -(* This returns a closed interval. *) -val json : absolute -> Hh_json.json - -(* This returns a half-open interval. *) -val multiline_json : absolute -> Hh_json.json - -val line_beg_offset : t -> int * int * int - -val inside : 'a pos -> int -> int -> bool - -val exactly_matches_range : - 'a pos -> start_line:int -> start_col:int -> end_line:int -> end_col:int -> bool - -val contains : 'a pos -> 'a pos -> bool - -(* Does first position strictly overlap the second position? *) -val overlaps : 'a pos -> 'a pos -> bool - -val make : 'a -> b -> 'a pos - -val make_from_lexing_pos : 'a -> Lexing.position -> Lexing.position -> 'a pos - -val make_from : 'a -> 'a pos - -val btw_nocheck : 'a pos -> 'a pos -> 'a pos - -(* Fill in the gap "between" first position and second position. - * Not valid if from different files or second position precedes first *) -val btw : t -> t -> t - -(* Symmetric version of above: order doesn't matter *) -val merge : t -> t -> t - -val last_char : t -> t - -val first_char_of_line : t -> t - -val to_absolute : t -> absolute - -val to_relative: absolute -> t - -val to_relative_string : t -> string pos - -val get_text_from_pos: content:string -> 'a pos -> string - -(* This returns a half-open interval. *) -val destruct_range : 'a pos -> (int * int * int * int) - -(* Compare by filename, then tie-break by start position, and finally by the - * end position *) -val compare : 'a pos -> 'a pos -> int - -val set_file : 'a -> 'a pos -> 'a pos - -val make_from_lnum_bol_cnum : - pos_file:Relative_path.t -> - pos_start:int*int*int -> - pos_end:int*int*int -> - t -module Map : MyMap.S with type key = t -module AbsolutePosMap : MyMap.S with type key = absolute - - -val print_verbose_absolute : absolute -> string -val print_verbose_relative : t -> string +include module type of Pos_embedded diff --git a/hack/utils/pos_embedded.ml b/hack/utils/pos_embedded.ml index 92752a97f0d..ad131a65b68 100644 --- a/hack/utils/pos_embedded.ml +++ b/hack/utils/pos_embedded.ml @@ -6,6 +6,7 @@ * when choosing a different Pos module, you must also choose its * compatible Pos_source module. *) +open Core_kernel open Lexing type b = Pos_source.t @@ -13,49 +14,59 @@ type b = Pos_source.t (* Note: While Pos.string and Pos.info_pos return positions as closed intervals, * pos_start and pos_end actually form a half-open interval (i.e. pos_end points * to the character *after* the last character of the relevant lexeme.) *) +[@@@warning "-32"] + type 'a pos = -| Pos_small of { - pos_file: 'a ; - pos_start: File_pos_small.t ; - pos_end: File_pos_small.t ; -} -| Pos_large of { - pos_file: 'a ; - pos_start : File_pos_large.t ; - pos_end: File_pos_large.t ; -} [@@deriving show] + | Pos_small of { + pos_file: 'a; + pos_start: File_pos_small.t; + pos_end: File_pos_small.t; + } + | Pos_large of { + pos_file: 'a; + pos_start: File_pos_large.t; + pos_end: File_pos_large.t; + } +[@@deriving show] type t = Relative_path.t pos [@@deriving show] type absolute = string pos [@@deriving show] -let none = Pos_small { - pos_file = Relative_path.default ; - pos_start = File_pos_small.dummy ; - pos_end = File_pos_small.dummy ; -} +[@@@warning "+32"] + +let none = + Pos_small + { + pos_file = Relative_path.default; + pos_start = File_pos_small.dummy; + pos_end = File_pos_small.dummy; + } let pp fmt pos = if pos = none then Format.pp_print_string fmt "[Pos.none]" - else begin + else ( Format.pp_print_string fmt "["; - begin match pos with - | Pos_small { pos_start; pos_end; _ } -> - File_pos_small.pp fmt pos_start; - Format.pp_print_string fmt "-"; - if File_pos_small.line pos_start = File_pos_small.line pos_end - then Format.pp_print_int fmt @@ File_pos_small.column pos_end + 1 - else File_pos_small.pp fmt pos_end - | Pos_large { pos_start; pos_end; _ } -> - File_pos_large.pp fmt pos_start; - Format.pp_print_string fmt "-"; - if File_pos_large.line pos_start = File_pos_large.line pos_end - then Format.pp_print_int fmt @@ File_pos_large.column pos_end + 1 - else File_pos_large.pp fmt pos_end + begin + match pos with + | Pos_small { pos_start; pos_end; _ } -> + File_pos_small.pp fmt pos_start; + Format.pp_print_string fmt "-"; + if File_pos_small.line pos_start = File_pos_small.line pos_end then + Format.pp_print_int fmt @@ (File_pos_small.column pos_end + 1) + else + File_pos_small.pp fmt pos_end + | Pos_large { pos_start; pos_end; _ } -> + File_pos_large.pp fmt pos_start; + Format.pp_print_string fmt "-"; + if File_pos_large.line pos_start = File_pos_large.line pos_end then + Format.pp_print_int fmt @@ (File_pos_large.column pos_end + 1) + else + File_pos_large.pp fmt pos_end end; - Format.pp_print_string fmt "]"; - end + Format.pp_print_string fmt "]" + ) let filename p = match p with @@ -66,7 +77,7 @@ let filename p = let info_pos p = match p with | Pos_small { pos_start; pos_end; _ } -> - let line, start_minus1, bol = File_pos_small.line_column_beg pos_start in + let (line, start_minus1, bol) = File_pos_small.line_column_beg pos_start in let start = start_minus1 + 1 in let end_offset = File_pos_small.offset pos_end in let end_ = end_offset - bol in @@ -75,10 +86,15 @@ let info_pos p = we print characters N to N (highlighting a single character) rather than characters N to (N-1), which is very unintuitive. *) - let end_ = if start = end_ + 1 then start else end_ in - line, start, end_ + let end_ = + if start = end_ + 1 then + start + else + end_ + in + (line, start, end_) | Pos_large { pos_start; pos_end; _ } -> - let line, start_minus1, bol = File_pos_large.line_column_beg pos_start in + let (line, start_minus1, bol) = File_pos_large.line_column_beg pos_start in let start = start_minus1 + 1 in let end_offset = File_pos_large.offset pos_end in let end_ = end_offset - bol in @@ -87,26 +103,31 @@ let info_pos p = we print characters N to N (highlighting a single character) rather than characters N to (N-1), which is very unintuitive. *) - let end_ = if start = end_ + 1 then start else end_ in - line, start, end_ + let end_ = + if start = end_ + 1 then + start + else + end_ + in + (line, start, end_) (* This returns a closed interval. *) let info_pos_extended p = - let line_begin, start, end_ = info_pos p in + let (line_begin, start, end_) = info_pos p in match p with | Pos_small { pos_end; _ } -> - let line_end, _, _ = File_pos_small.line_column_beg pos_end in - line_begin, line_end, start, end_ + let (line_end, _, _) = File_pos_small.line_column_beg pos_end in + (line_begin, line_end, start, end_) | Pos_large { pos_end; _ } -> - let line_end, _, _ = File_pos_large.line_column_beg pos_end in - line_begin, line_end, start, end_ + let (line_end, _, _) = File_pos_large.line_column_beg pos_end in + (line_begin, line_end, start, end_) let info_raw p = match p with | Pos_small { pos_start; pos_end; _ } -> - File_pos_small.offset pos_start, File_pos_small.offset pos_end + (File_pos_small.offset pos_start, File_pos_small.offset pos_end) | Pos_large { pos_start; pos_end; _ } -> - File_pos_large.offset pos_start, File_pos_large.offset pos_end + (File_pos_large.offset pos_start, File_pos_large.offset pos_end) let length p = match p with @@ -137,28 +158,38 @@ let end_line p = (* This returns a closed interval. *) let string t = - let line, start, end_ = info_pos t in - Printf.sprintf "File %S, line %d, characters %d-%d:" - (String.trim (filename t)) line start end_ + let (line, start, end_) = info_pos t in + Printf.sprintf + "File %S, line %d, characters %d-%d:" + (String.strip (filename t)) + line + start + end_ (* Some positions, like those in buffers sent by IDE/created by unit tests might * not have a file specified. * This returns a closed interval. *) let string_no_file t = - let line, start, end_ = info_pos t in + let (line, start, end_) = info_pos t in Printf.sprintf "line %d, characters %d-%d" line start end_ (* This returns a closed interval. *) let json pos = - let line, start, end_ = info_pos pos in + let (line, start, end_) = info_pos pos in let fn = filename pos in - Hh_json.JSON_Object [ - "filename", Hh_json.JSON_String fn; - "line", Hh_json.int_ line; - "char_start", Hh_json.int_ start; - "char_end", Hh_json.int_ end_; - ] - + Hh_json.JSON_Object + [ + ("filename", Hh_json.JSON_String fn); + ("line", Hh_json.int_ line); + ("char_start", Hh_json.int_ start); + ("char_end", Hh_json.int_ end_); + ] + +(* + * !!! Be careful !!! + * This method returns zero-based column numbers, but one-based line numbers. + * Consider using info_pos instead. + *) let line_column p = match p with | Pos_small { pos_start; _ } -> File_pos_small.line_column pos_start @@ -170,82 +201,86 @@ let end_line_column p = | Pos_large { pos_end; _ } -> File_pos_large.line_column pos_end let inside p line char_pos = - let first_line, first_col = line_column p in - let last_line, last_col = end_line_column p in + let (first_line, first_col) = line_column p in + let (last_line, last_col) = end_line_column p in if first_line = last_line then first_line = line && first_col + 1 <= char_pos && char_pos <= last_col + else if line = first_line then + char_pos > first_col + else if line = last_line then + char_pos <= last_col else - if line = first_line then char_pos > first_col - else if line = last_line then char_pos <= last_col - else line > first_line && line < last_line + line > first_line && line < last_line let exactly_matches_range p ~start_line ~start_col ~end_line ~end_col = - let p_start_line, p_start_col = line_column p in - let p_end_line, p_end_col = end_line_column p in - p_start_line = start_line && - p_start_col = start_col - 1 && - p_end_line = end_line && - p_end_col = end_col - 1 + let (p_start_line, p_start_col) = line_column p in + let (p_end_line, p_end_col) = end_line_column p in + p_start_line = start_line + && p_start_col = start_col - 1 + && p_end_line = end_line + && p_end_col = end_col - 1 let contains pos_container pos = - let cstart, cend = info_raw pos_container in - let pstart, pend = info_raw pos in - filename pos_container = filename pos && - pstart >= cstart && - pend <= cend + let (cstart, cend) = info_raw pos_container in + let (pstart, pend) = info_raw pos in + filename pos_container = filename pos && pstart >= cstart && pend <= cend let overlaps pos1 pos2 = - let _start1, end1 = info_raw pos1 in - let start2, _end2 = info_raw pos2 in - filename pos1 = filename pos2 && - end1 > start2 + let (start1, end1) = info_raw pos1 in + let (start2, end2) = info_raw pos2 in + filename pos1 = filename pos2 && end1 > start2 && start1 < end2 let make_from_lexing_pos pos_file pos_start pos_end = - match File_pos_small.of_lexing_pos pos_start, File_pos_small.of_lexing_pos pos_end with - | Some pos_start, Some pos_end -> - Pos_small - { pos_file; pos_start; pos_end } - | _, _ -> + match + ( File_pos_small.of_lexing_pos pos_start, + File_pos_small.of_lexing_pos pos_end ) + with + | (Some pos_start, Some pos_end) -> + Pos_small { pos_file; pos_start; pos_end } + | (_, _) -> Pos_large - { pos_file; - pos_start = File_pos_large.of_lexing_pos pos_start; - pos_end = File_pos_large.of_lexing_pos pos_end; - } + { + pos_file; + pos_start = File_pos_large.of_lexing_pos pos_start; + pos_end = File_pos_large.of_lexing_pos pos_end; + } -let make file (lb: b) = +let make file (lb : b) = let pos_start = lexeme_start_p lb in let pos_end = lexeme_end_p lb in make_from_lexing_pos file pos_start pos_end let make_from file = let pos = File_pos_small.dummy in - Pos_small - { pos_file = file; - pos_start = pos; - pos_end = pos; - } + Pos_small { pos_file = file; pos_start = pos; pos_end = pos } let small_to_large_file_pos p = - let lnum, col, bol = File_pos_small.line_column_beg p in + let (lnum, col, bol) = File_pos_small.line_column_beg p in File_pos_large.of_lnum_bol_cnum lnum bol (bol + col) let as_large_pos p = match p with | Pos_small { pos_file; pos_start; pos_end } -> - Pos_large { pos_file; pos_start = small_to_large_file_pos pos_start; - pos_end = small_to_large_file_pos pos_end } + Pos_large + { + pos_file; + pos_start = small_to_large_file_pos pos_start; + pos_end = small_to_large_file_pos pos_end; + } | _ -> p let btw_nocheck x1 x2 = - match x1, x2 with - | Pos_small { pos_file; pos_start; _ }, Pos_small { pos_end; _ } -> - Pos_small { pos_file; pos_start; pos_end; } - | Pos_large { pos_file; pos_start; _ }, Pos_large { pos_end; _ } -> - Pos_large { pos_file; pos_start; pos_end; } - | Pos_small { pos_file; pos_start; _ }, Pos_large { pos_end; _ } -> - Pos_large { pos_file; pos_start = small_to_large_file_pos pos_start; pos_end } - | Pos_large { pos_file; pos_start; _ }, Pos_small { pos_end; _ } -> - Pos_large { pos_file; pos_start; pos_end = small_to_large_file_pos pos_end } + match (x1, x2) with + | (Pos_small { pos_file; pos_start; _ }, Pos_small { pos_end; _ }) -> + Pos_small { pos_file; pos_start; pos_end } + | (Pos_large { pos_file; pos_start; _ }, Pos_large { pos_end; _ }) -> + Pos_large { pos_file; pos_start; pos_end } + | (Pos_small { pos_file; pos_start; _ }, Pos_large { pos_end; _ }) -> + Pos_large + { pos_file; pos_start = small_to_large_file_pos pos_start; pos_end } + | (Pos_large { pos_file; pos_start; _ }, Pos_small { pos_end; _ }) -> + Pos_large + { pos_file; pos_start; pos_end = small_to_large_file_pos pos_end } let set_file pos_file pos = match pos with @@ -254,83 +289,94 @@ let set_file pos_file pos = | Pos_large { pos_start; pos_end; _ } -> Pos_large { pos_file; pos_start; pos_end } -let to_absolute p = - set_file (Relative_path.to_absolute (filename p)) p +let to_absolute p = set_file (Relative_path.to_absolute (filename p)) p let to_relative p = set_file (Relative_path.create_detect_prefix (filename p)) p let btw x1 x2 = - if filename x1 <> filename x2 - then failwith "Position in separate files"; - if end_cnum x1 > end_cnum x2 - then failwith (Printf.sprintf "btw: invalid positions %s and %s" - (string (to_absolute x1)) (string (to_absolute x2))); + if filename x1 <> filename x2 then failwith "Position in separate files"; + if end_cnum x1 > end_cnum x2 then + failwith + (Printf.sprintf + "btw: invalid positions %s and %s" + (string (to_absolute x1)) + (string (to_absolute x2))); btw_nocheck x1 x2 let rec merge x1 x2 = - match x1, x2 with - | Pos_small { pos_file = file1; pos_start = start1; pos_end = end1; }, - Pos_small { pos_file = _; pos_start = start2; pos_end = end2; } -> + match (x1, x2) with + | ( Pos_small { pos_file = file1; pos_start = start1; pos_end = end1 }, + Pos_small { pos_file = _; pos_start = start2; pos_end = end2 } ) -> let pos_start = - if File_pos_small.is_dummy start1 - then start2 + if File_pos_small.is_dummy start1 then + start2 + else if File_pos_small.is_dummy start2 then + start1 + else if start_cnum x1 < start_cnum x2 then + start1 else - if File_pos_small.is_dummy start2 - then start1 - else if start_cnum x1 < start_cnum x2 then start1 else start2 in + start2 + in let pos_end = - if File_pos_small.is_dummy end1 - then end2 + if File_pos_small.is_dummy end1 then + end2 + else if File_pos_small.is_dummy end2 then + end1 + else if end_cnum x1 < end_cnum x2 then + end2 else - if File_pos_small.is_dummy end2 - then end1 - else if end_cnum x1 < end_cnum x2 then end2 else end1 in - Pos_small { pos_file = file1; pos_start = pos_start; pos_end = pos_end } - | Pos_large { pos_file = file1; pos_start = start1; pos_end = end1; }, - Pos_large { pos_file = _; pos_start = start2; pos_end = end2; } -> + end1 + in + Pos_small { pos_file = file1; pos_start; pos_end } + | ( Pos_large { pos_file = file1; pos_start = start1; pos_end = end1 }, + Pos_large { pos_file = _; pos_start = start2; pos_end = end2 } ) -> let pos_start = - if File_pos_large.is_dummy start1 - then start2 + if File_pos_large.is_dummy start1 then + start2 + else if File_pos_large.is_dummy start2 then + start1 + else if start_cnum x1 < start_cnum x2 then + start1 else - if File_pos_large.is_dummy start2 - then start1 - else if start_cnum x1 < start_cnum x2 then start1 else start2 in + start2 + in let pos_end = - if File_pos_large.is_dummy end1 - then end2 + if File_pos_large.is_dummy end1 then + end2 + else if File_pos_large.is_dummy end2 then + end1 + else if end_cnum x1 < end_cnum x2 then + end2 else - if File_pos_large.is_dummy end2 - then end1 - else if end_cnum x1 < end_cnum x2 then end2 else end1 in - Pos_large { pos_file = file1; pos_start = pos_start; pos_end = pos_end } - | _, _ -> - merge (as_large_pos x1) (as_large_pos x2) + end1 + in + Pos_large { pos_file = file1; pos_start; pos_end } + | (_, _) -> merge (as_large_pos x1) (as_large_pos x2) let last_char p = - if p = none - then none + if p = none then + none else - match p with - | Pos_small { pos_start = _ ; pos_end; pos_file } -> - Pos_small { pos_start = pos_end; pos_end; pos_file } - | Pos_large { pos_start = _; pos_end; pos_file } -> - Pos_large { pos_start = pos_end; pos_end; pos_file } + match p with + | Pos_small { pos_start = _; pos_end; pos_file } -> + Pos_small { pos_start = pos_end; pos_end; pos_file } + | Pos_large { pos_start = _; pos_end; pos_file } -> + Pos_large { pos_start = pos_end; pos_end; pos_file } let first_char_of_line p = - if p = none - then none + if p = none then + none else - match p with - | Pos_small { pos_start; pos_end = _; pos_file } -> - let start = File_pos_small.set_column 0 pos_start in - Pos_small { pos_start = start; pos_end = start; pos_file } - | Pos_large { pos_start; pos_end = _; pos_file } -> - let start = File_pos_large.set_column 0 pos_start in - Pos_large { pos_start = start; pos_end = start; pos_file } + match p with + | Pos_small { pos_start; pos_end = _; pos_file } -> + let start = File_pos_small.set_column 0 pos_start in + Pos_small { pos_start = start; pos_end = start; pos_file } + | Pos_large { pos_start; pos_end = _; pos_file } -> + let start = File_pos_large.set_column 0 pos_start in + Pos_large { pos_start = start; pos_end = start; pos_file } -let to_relative_string p = - set_file (Relative_path.suffix (filename p)) p +let to_relative_string p = set_file (Relative_path.suffix (filename p)) p let get_text_from_pos ~content pos = let pos_length = length pos in @@ -342,93 +388,177 @@ let get_text_from_pos ~content pos = *) let compare x y = let r = Pervasives.compare (filename x) (filename y) in - if r <> 0 then r + if r <> 0 then + r else - let xstart, xend = info_raw x in - let ystart, yend = info_raw y in + let (xstart, xend) = info_raw x in + let (ystart, yend) = info_raw y in let r = xstart - ystart in - if r <> 0 then r - else xend - yend + if r <> 0 then + r + else + xend - yend (* This returns a half-open interval. *) -let destruct_range (p : 'a pos) : (int * int * int * int) = - let line_start, col_start_minus1 = line_column p in - let line_end, col_end_minus1 = end_line_column p in - line_start, col_start_minus1 + 1, - line_end, col_end_minus1 + 1 +let destruct_range (p : 'a pos) : int * int * int * int = + let (line_start, col_start_minus1) = line_column p in + let (line_end, col_end_minus1) = end_line_column p in + (line_start, col_start_minus1 + 1, line_end, col_end_minus1 + 1) + +let advance_one (p : 'a pos) : 'a pos = + match p with + | Pos_small { pos_file; pos_start; pos_end } -> + Pos_small + { + pos_file; + pos_start; + pos_end = + (let column = File_pos_small.column pos_end in + File_pos_small.set_column (column + 1) pos_end); + } + | Pos_large { pos_file; pos_start; pos_end } -> + Pos_large + { + pos_file; + pos_start; + pos_end = + (let column = File_pos_large.column pos_end in + File_pos_large.set_column (column + 1) pos_end); + } + +(* This function is used when we have captured a position that includes + * outside boundary characters like apostrophes. If we need to remove these + * apostrophes, this function shrinks by one character in each direction. *) +let shrink_by_one_char_both_sides (p : 'a pos) : 'a pos = + match p with + | Pos_small { pos_file; pos_start; pos_end } -> + let new_pos_start = + let column = File_pos_small.column pos_start in + File_pos_small.set_column (column + 1) pos_start + in + let new_pos_end = + let column = File_pos_small.column pos_end in + File_pos_small.set_column (column - 1) pos_end + in + Pos_small { pos_file; pos_start = new_pos_start; pos_end = new_pos_end } + | Pos_large { pos_file; pos_start; pos_end } -> + let new_pos_start = + let column = File_pos_large.column pos_start in + File_pos_large.set_column (column + 1) pos_start + in + let new_pos_end = + let column = File_pos_large.column pos_end in + File_pos_large.set_column (column - 1) pos_end + in + Pos_large { pos_file; pos_start = new_pos_start; pos_end = new_pos_end } (* This returns a half-open interval. *) let multiline_string t = - let line_start, char_start, line_end, char_end = destruct_range t in - Printf.sprintf "File %S, line %d, character %d - line %d, character %d:" - (String.trim (filename t)) line_start char_start line_end (char_end - 1) + let (line_start, char_start, line_end, char_end) = destruct_range t in + Printf.sprintf + "File %S, line %d, character %d - line %d, character %d:" + (String.strip (filename t)) + line_start + char_start + line_end + (char_end - 1) (* This returns a half-open interval. *) let multiline_string_no_file t = - let line_start, char_start, line_end, char_end = destruct_range t in - Printf.sprintf "line %d, character %d - line %d, character %d" - line_start char_start line_end (char_end - 1) + let (line_start, char_start, line_end, char_end) = destruct_range t in + Printf.sprintf + "line %d, character %d - line %d, character %d" + line_start + char_start + line_end + (char_end - 1) (* This returns a half-open interval. *) let multiline_json t = - let line_start, char_start, line_end, char_end = destruct_range t in + let (line_start, char_start, line_end, char_end) = destruct_range t in let fn = filename t in - Hh_json.JSON_Object [ - "filename", Hh_json.JSON_String fn; - "line_start", Hh_json.int_ line_start; - "char_start", Hh_json.int_ char_start; - "line_end", Hh_json.int_ line_end; - "char_end", Hh_json.int_ (char_end - 1); - ] + Hh_json.JSON_Object + [ + ("filename", Hh_json.JSON_String fn); + ("line_start", Hh_json.int_ line_start); + ("char_start", Hh_json.int_ char_start); + ("line_end", Hh_json.int_ line_end); + ("char_end", Hh_json.int_ (char_end - 1)); + ] let line_beg_offset p = match p with - | Pos_small { pos_start; _ } -> - File_pos_small.line_beg_offset pos_start - | Pos_large { pos_start; _ } -> - File_pos_large.line_beg_offset pos_start + | Pos_small { pos_start; _ } -> File_pos_small.line_beg_offset pos_start + | Pos_large { pos_start; _ } -> File_pos_large.line_beg_offset pos_start let end_line_beg_offset p = match p with - | Pos_small { pos_end; _ } -> - File_pos_small.line_beg_offset pos_end - | Pos_large { pos_end; _ } -> - File_pos_large.line_beg_offset pos_end + | Pos_small { pos_end; _ } -> File_pos_small.line_beg_offset pos_end + | Pos_large { pos_end; _ } -> File_pos_large.line_beg_offset pos_end let make_from_lnum_bol_cnum ~pos_file ~pos_start ~pos_end = - let lnum_start, bol_start, cnum_start = pos_start in - let lnum_end, bol_end, cnum_end = pos_end in -match File_pos_small.of_lnum_bol_cnum ~pos_lnum:lnum_start ~pos_bol:bol_start ~pos_cnum:cnum_start, - File_pos_small.of_lnum_bol_cnum ~pos_lnum:lnum_end ~pos_bol:bol_end ~pos_cnum:cnum_end with -| Some pos_start, Some pos_end -> - Pos_small - { pos_file; pos_start; pos_end } -| _, _ -> - Pos_large - { pos_file; - pos_start = File_pos_large.of_lnum_bol_cnum - ~pos_lnum:lnum_start ~pos_bol:bol_start ~pos_cnum:cnum_start; - pos_end = File_pos_large.of_lnum_bol_cnum - ~pos_lnum:lnum_end ~pos_bol:bol_end ~pos_cnum:cnum_end; - } - + let (lnum_start, bol_start, cnum_start) = pos_start in + let (lnum_end, bol_end, cnum_end) = pos_end in + match + ( File_pos_small.of_lnum_bol_cnum + ~pos_lnum:lnum_start + ~pos_bol:bol_start + ~pos_cnum:cnum_start, + File_pos_small.of_lnum_bol_cnum + ~pos_lnum:lnum_end + ~pos_bol:bol_end + ~pos_cnum:cnum_end ) + with + | (Some pos_start, Some pos_end) -> + Pos_small { pos_file; pos_start; pos_end } + | (_, _) -> + Pos_large + { + pos_file; + pos_start = + File_pos_large.of_lnum_bol_cnum + ~pos_lnum:lnum_start + ~pos_bol:bol_start + ~pos_cnum:cnum_start; + pos_end = + File_pos_large.of_lnum_bol_cnum + ~pos_lnum:lnum_end + ~pos_bol:bol_end + ~pos_cnum:cnum_end; + } + +let pessimize_enabled pos pessimize_coefficient = + let path = filename pos in + match Relative_path.prefix path with + | Relative_path.Root when pessimize_coefficient > 0.0 -> + let range = 2000000 in + let filename = Relative_path.suffix path in + let hash = Hashtbl.hash filename in + let r = hash % range in + Float.of_int r /. Float.of_int range <= pessimize_coefficient + | _ -> pessimize_coefficient = 1.0 + +(* hack for test cases *) let print_verbose_absolute p = - let a, b, c = line_beg_offset p in - let d, e, f = end_line_beg_offset p in + let (a, b, c) = line_beg_offset p in + let (d, e, f) = end_line_beg_offset p in Printf.sprintf "Pos('%s', <%d,%d,%d>, <%d,%d,%d>)" (filename p) a b c d e f let print_verbose_relative p = print_verbose_absolute (to_absolute p) module Map = MyMap.Make (struct type path = t + (* The definition below needs to refer to the t in the outer scope, but MyMap * expects a module with a type of name t, so we define t in a second step *) type t = path + let compare = compare end) module AbsolutePosMap = MyMap.Make (struct type t = absolute + let compare = compare end) diff --git a/hack/utils/pos_embedded.mli b/hack/utils/pos_embedded.mli new file mode 100644 index 00000000000..4a2ff806dd8 --- /dev/null +++ b/hack/utils/pos_embedded.mli @@ -0,0 +1,146 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* Note: While Pos.string prints out positions as closed intervals, pos_start + * and pos_end actually form a half-open interval (i.e. pos_end points to the + * character *after* the last character of the relevant lexeme.) *) +type 'a pos + +type b = Pos_source.t +(** The underlying type used to construct Pos instances. + * + * See "val make: 'a -> b -> 'a pos" *) + +type t = Relative_path.t pos + +val pp : Format.formatter -> t -> unit + +type absolute = string pos + +val none : t + +val filename : 'a pos -> 'a + +val start_cnum : 'a pos -> int + +val end_cnum : 'a pos -> int + +val line : 'a pos -> int + +val line_column : 'a pos -> int * int + +val end_line : 'a pos -> int + +val end_line_column : 'a pos -> int * int + +(* This returns a closed interval that's incorrect for multi-line spans. *) +val info_pos : 'a pos -> int * int * int + +(* This returns a closed interval. *) +val info_pos_extended : 'a pos -> int * int * int * int + +val info_raw : 'a pos -> int * int + +val length : 'a pos -> int + +(* This returns a closed interval. *) +val string : absolute -> string + +(* This returns a half-open interval. *) +val multiline_string : absolute -> string + +(* This returns a closed interval. *) +val string_no_file : 'a pos -> string + +(* This returns a half-open interval. *) +val multiline_string_no_file : 'a pos -> string + +(* This returns a closed interval. *) +val json : absolute -> Hh_json.json + +(* This returns a half-open interval. *) +val multiline_json : absolute -> Hh_json.json + +val line_beg_offset : t -> int * int * int + +val inside : 'a pos -> int -> int -> bool + +val exactly_matches_range : + 'a pos -> + start_line:int -> + start_col:int -> + end_line:int -> + end_col:int -> + bool + +val contains : 'a pos -> 'a pos -> bool + +(* Does first position strictly overlap the second position? *) +val overlaps : 'a pos -> 'a pos -> bool + +val make : 'a -> b -> 'a pos + +val make_from_lexing_pos : 'a -> Lexing.position -> Lexing.position -> 'a pos + +val make_from : 'a -> 'a pos + +val btw_nocheck : 'a pos -> 'a pos -> 'a pos + +(* Fill in the gap "between" first position and second position. + * Not valid if from different files or second position precedes first *) +val btw : t -> t -> t + +(* Symmetric version of above: order doesn't matter *) +val merge : t -> t -> t + +val last_char : t -> t + +val first_char_of_line : t -> t + +val to_absolute : t -> absolute + +val to_relative : absolute -> t + +val to_relative_string : t -> string pos + +val get_text_from_pos : content:string -> 'a pos -> string + +(* This returns a half-open interval. *) +val destruct_range : 'a pos -> int * int * int * int + +(* Advance the ending position by one character *) +val advance_one : 'a pos -> 'a pos + +(* Reduce the size of this position element by one character on the left and + * one character on the right. For example, if you've captured a position + * that includes outside apostrophes, this will shrink it to only the contents + * within the apostrophes. *) +val shrink_by_one_char_both_sides : 'a pos -> 'a pos + +(* Compare by filename, then tie-break by start position, and finally by the + * end position *) +val compare : 'a pos -> 'a pos -> int + +val set_file : 'a -> 'a pos -> 'a pos + +val make_from_lnum_bol_cnum : + pos_file:Relative_path.t -> + pos_start:int * int * int -> + pos_end:int * int * int -> + t + +module Map : MyMap.S with type key = t + +module AbsolutePosMap : MyMap.S with type key = absolute + +val print_verbose_absolute : absolute -> string + +val print_verbose_relative : t -> string + +val pessimize_enabled : t -> float -> bool diff --git a/hack/utils/pos_source.mli b/hack/utils/pos_source.mli new file mode 100644 index 00000000000..d7506e16e8f --- /dev/null +++ b/hack/utils/pos_source.mli @@ -0,0 +1,20 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +module Lexbuf_based_pos_source : sig + type t = Lexing.lexbuf +end + +type t = Lexing.lexbuf diff --git a/hack/utils/prim_defs.ml b/hack/utils/prim_defs.ml deleted file mode 100644 index 1deb3be1085..00000000000 --- a/hack/utils/prim_defs.ml +++ /dev/null @@ -1,23 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - ** - * - * "Primitive" definitions; fighting the dependency web, this module is a leaf - * on the dependency tree. It may only depend on external libraries and not on - * a single module inside the repository. - * - *) - -type comment = - | CmtLine of string - | CmtBlock of string - | CmtMarkup of string -[@@deriving show] - -let is_line_comment = function | CmtLine _ -> true | _ -> false -let string_of_comment = function | CmtLine s | CmtBlock s | CmtMarkup s -> s diff --git a/hack/utils/procfs/dune b/hack/utils/procfs/dune new file mode 100644 index 00000000000..d84babf075e --- /dev/null +++ b/hack/utils/procfs/dune @@ -0,0 +1,7 @@ +(library + (name procfs) + (wrapped false) + (libraries + collections + sys_utils + utils_core)) diff --git a/hack/utils/procfs/procFS.ml b/hack/utils/procfs/procFS.ml new file mode 100644 index 00000000000..917e92be8c7 --- /dev/null +++ b/hack/utils/procfs/procFS.ml @@ -0,0 +1,103 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Core_kernel +open Result.Monad_infix + +let spf = Printf.sprintf + +let read_proc_file filename pid = + let file = spf "/proc/%d/%s" pid filename in + try Ok (Sys_utils.cat file) + with exn -> + let exn = Exception.wrap exn in + Error (Exception.to_string exn) + +type status = { + (* The total number of bytes currently in memory used for anonymous memory *) + rss_anon: int; + (* The total number of bytes currently in memory used for file mappings *) + rss_file: int; + (* The total number of bytes currently in memory used for shared memory *) + rss_shmem: int; + (* The total number of bytes currently in memory. It should be the sum of + * rss_anon + rss_file + rss_shmem *) + rss_total: int; + (* The high water mark for the number of bytes in memory at one time *) + rss_hwm: int; +} + +(* The stats we're reading always end in kB. If we start reading more stats then we'll need to beef + * up this logic *) +let humanReadableToBytes str = + (try Scanf.sscanf str "%d kB" (fun kb -> 1000 * kb) with _ -> 0) + +let parse_status raw_status_contents = + let stats = + String.split raw_status_contents ~on:'\n' + |> List.fold_left ~init:SMap.empty ~f:(fun stats line -> + match String.split line ~on:':' with + | [raw_key; raw_stat] -> + let key = String.strip raw_key in + let stat = String.strip raw_stat in + SMap.add key stat stats + | _ -> stats) + in + { + rss_anon = + SMap.get "RssAnon" stats + |> Option.value_map ~default:0 ~f:humanReadableToBytes; + rss_file = + SMap.get "RssFile" stats + |> Option.value_map ~default:0 ~f:humanReadableToBytes; + rss_shmem = + SMap.get "RssShmem" stats + |> Option.value_map ~default:0 ~f:humanReadableToBytes; + rss_total = + SMap.get "VmRSS" stats + |> Option.value_map ~default:0 ~f:humanReadableToBytes; + rss_hwm = + SMap.get "VmHWM" stats + |> Option.value_map ~default:0 ~f:humanReadableToBytes; + } + +let parse_cgroup raw_cgroup_contents = + match String.split raw_cgroup_contents ~on:'\n' with + | [] -> Error "Expected at least one cgroup in /proc//cgroup file" + | first_line :: _ -> + begin + match String.split first_line ~on:':' with + | [_id; _controllers; cgroup] -> Ok cgroup + | _ -> + Error + "First line of /proc//cgroup file was not correctly formatted" + end + +let asset_procfs_supported = + let memoized_result = ref None in + fun () -> + match !memoized_result with + | Some supported -> supported + | None -> + let supported = + if Sys.unix && Sys.file_exists "/proc" then + Ok () + else + Error "Proc filesystem not supported" + in + memoized_result := Some supported; + supported + +let status_for_pid pid = + asset_procfs_supported () + >>= (fun () -> read_proc_file "status" pid >>| parse_status) + +(* In cgroup v1 a pid can be in multiple cgroups. In cgroup v2 it will only be in a single cgroup. + *) +let first_cgroup_for_pid pid = + asset_procfs_supported () + >>= (fun () -> read_proc_file "cgroup" pid >>= parse_cgroup) diff --git a/hack/utils/procfs/procFS.mli b/hack/utils/procfs/procFS.mli new file mode 100644 index 00000000000..ed5e8a24ceb --- /dev/null +++ b/hack/utils/procfs/procFS.mli @@ -0,0 +1,18 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type status = { + rss_anon: int; + rss_file: int; + rss_shmem: int; + rss_total: int; + rss_hwm: int; +} + +val status_for_pid : int -> (status, string) result + +val first_cgroup_for_pid : int -> (string, string) result diff --git a/hack/utils/promise.ml b/hack/utils/promise.ml new file mode 100644 index 00000000000..f4b75384f22 --- /dev/null +++ b/hack/utils/promise.ml @@ -0,0 +1,20 @@ +(* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +module type S = sig + type 'a t + + val return : 'a -> 'a t + (** Creates a promise that returns the given value immediately. *) + + val map : 'a t -> ('a -> 'b) -> 'b t + (** Returns a new promise that will map the result of the given one. *) + + val bind : 'a t -> ('a -> 'b t) -> 'b t + (** Returns a new promise generated from the results of the given one. *) +end diff --git a/hack/utils/random_id.ml b/hack/utils/random_id.ml deleted file mode 100644 index 2289861622b..00000000000 --- a/hack/utils/random_id.ml +++ /dev/null @@ -1,37 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -let initialized = ref false - -let base64_alphabet = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" - -let alphanumeric_alphabet = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - -let () = - assert (String.length base64_alphabet = 64) - -let short_string_with_alphabet alphabet = - (* If we haven't seeded random then do it now *) - if not !initialized then begin - initialized := true; - Random.self_init () - end; - let r = ref ((Random.bits () lsl 30) lor Random.bits ()) in - let cs = ref [] in - while !r > 0 do - let c = alphabet.[!r mod (String.length alphabet)] in - cs := String.make 1 c :: !cs; - r := !r lsr 6 - done; - String.concat "" !cs - -let short_string () = - short_string_with_alphabet base64_alphabet diff --git a/hack/utils/regexp_utils.ml b/hack/utils/regexp_utils.ml index 147092edc45..b727866d69c 100644 --- a/hack/utils/regexp_utils.ml +++ b/hack/utils/regexp_utils.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -12,6 +12,7 @@ * JS regexp library differs. *) let nonempty_ws_regexp = Str.regexp "[ \n\t\r\012]+" + (* Squash the whitespace in a string down the way that xhp expects it. * In particular, replace all whitespace with spaces and replace all * strings of multiple spaces with a single space. *) diff --git a/hack/utils/regexp_utils.mli b/hack/utils/regexp_utils.mli new file mode 100644 index 00000000000..d7da89c9db2 --- /dev/null +++ b/hack/utils/regexp_utils.mli @@ -0,0 +1,18 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +val nonempty_ws_regexp : Str.regexp + +val squash_whitespace : string -> string diff --git a/hack/utils/relative_path.ml b/hack/utils/relative_path.ml index 22878beef97..03a6cd11dc1 100644 --- a/hack/utils/relative_path.ml +++ b/hack/utils/relative_path.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -17,10 +17,12 @@ type prefix = | Hhi | Dummy | Tmp - +[@@deriving show, enum] let root = ref None + let hhi = ref None + let tmp = ref None let path_ref_of_prefix = function @@ -29,29 +31,39 @@ let path_ref_of_prefix = function | Tmp -> tmp | Dummy -> ref (Some "") -let path_of_prefix x = - unsafe_opt_note "Prefix has not been set!" !(path_ref_of_prefix x) - let string_of_prefix = function | Root -> "root" | Hhi -> "hhi" | Tmp -> "tmp" | Dummy -> "" +let path_of_prefix prefix = + match !(path_ref_of_prefix prefix) with + | Some path -> path + | None -> + let message = + Printf.sprintf "Prefix '%s' has not been set!" (string_of_prefix prefix) + in + raise (Invalid_argument message) + let set_path_prefix prefix v = let v = Path.to_string v in assert (String.length v > 0); + (* Ensure that there is a trailing slash *) let v = - if string_ends_with v Filename.dir_sep then v - else v ^ Filename.dir_sep + if string_ends_with v Filename.dir_sep then + v + else + v ^ Filename.dir_sep in match prefix with | Dummy -> raise (Failure "Dummy is always represented by an empty string") | _ -> path_ref_of_prefix prefix := Some v -type relative_path = prefix * string -type t = relative_path +type t = prefix * string [@@deriving show] + +type relative_path = t let prefix (p : t) = fst p @@ -63,26 +75,25 @@ let default = (Dummy, "") * better on space usage. *) let storage_to_string (p, rest) = string_of_prefix p ^ "|" ^ rest -let index_opt str ch = - try Some (String.index str ch) - with Not_found -> None +let index_opt str ch = (try Some (String.index str ch) with Not_found -> None) let storage_of_string str = match index_opt str '|' with | Some idx -> - let (a, a') = (0, idx) in - let b = idx + 1 in - let b' = String.length str - b in - let prefix = String.sub str a a' in - let content = String.sub str b b' in - let prefix = begin match prefix with + let (a, a') = (0, idx) in + let b = idx + 1 in + let b' = String.length str - b in + let prefix = String.sub str a a' in + let content = String.sub str b b' in + let prefix = + match prefix with | "root" -> Root | "hhi" -> Hhi | "tmp" -> Tmp | "" -> Dummy | _ -> failwith "invalid prefix" - end in - (prefix, content) + in + (prefix, content) | None -> failwith "not a Relative_path.t" module S = struct @@ -99,30 +110,47 @@ let to_tmp (_, rest) = (Tmp, rest) let to_root (_, rest) = (Root, rest) -let pp fmt rp = Format.pp_print_string fmt (S.to_string rp) +module Set = struct + include Reordered_argument_set (Set.Make (S)) -module Set = Reordered_argument_set(Set.Make(S)) -module Map = Reordered_argument_map(MyMap.Make(S)) + let pp fmt x = + Format.fprintf fmt "@[<2>{"; + ignore + @@ List.fold_left (elements x) ~init:false ~f:(fun sep s -> + if sep then Format.fprintf fmt ";@ "; + pp fmt s; + true); + Format.fprintf fmt "@,}@]" + + let show x = Format.asprintf "%a" pp x +end + +module Map = struct + include Reordered_argument_map (MyMap.Make (S)) + + let pp pp_data = make_pp pp pp_data + + let show pp_data x = Format.asprintf "%a" (pp pp_data) x +end let create prefix s = let prefix_s = path_of_prefix prefix in let prefix_len = String.length prefix_s in - if not (string_starts_with s prefix_s) - then begin + if not (string_starts_with s prefix_s) then ( Printf.eprintf "%s is not a prefix of %s" prefix_s s; - assert_false_log_backtrace None; - end; - prefix, String.sub s prefix_len (String.length s - prefix_len) + assert_false_log_backtrace None + ); + (prefix, String.sub s prefix_len (String.length s - prefix_len)) let create_detect_prefix s = let file_prefix = [Root; Hhi; Tmp] - |> List.find ~f:(fun prefix -> String_utils.string_starts_with s (path_of_prefix prefix)) - |> begin fun x -> - match x with - | Some prefix -> prefix - | None -> Dummy - end + |> List.find ~f:(fun prefix -> + String_utils.string_starts_with s (path_of_prefix prefix)) + |> fun x -> + match x with + | Some prefix -> prefix + | None -> Dummy in create file_prefix s @@ -131,15 +159,14 @@ let create_detect_prefix s = let strip_root_if_possible s = let prefix_s = path_of_prefix Root in let prefix_len = String.length prefix_s in - if not (string_starts_with s prefix_s) - then s else - String.sub s prefix_len (String.length s - prefix_len) - + if not (string_starts_with s prefix_s) then + None + else + Some (String.sub s prefix_len (String.length s - prefix_len)) -let from_root (s : string) : t = Root, s +let from_root (s : string) : t = (Root, s) let relativize_set prefix m = SSet.fold m ~init:Set.empty ~f:(fun k a -> Set.add a (create prefix k)) -let set_of_list xs = - List.fold_left xs ~f:Set.add ~init:Set.empty +let set_of_list xs = List.fold_left xs ~f:Set.add ~init:Set.empty diff --git a/hack/utils/relative_path.mli b/hack/utils/relative_path.mli index 9563d86d7db..49b39de298f 100644 --- a/hack/utils/relative_path.mli +++ b/hack/utils/relative_path.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -14,39 +14,65 @@ type prefix = | Hhi | Dummy | Tmp +[@@deriving show, enum] val set_path_prefix : prefix -> Path.t -> unit + val path_of_prefix : prefix -> string module S : sig type t + val compare : t -> t -> int + val to_string : t -> string end -type t = S.t - -val pp : Format.formatter -> t -> unit +type t = S.t [@@deriving show] val default : t + (* Checks that string indeed has the given prefix before constructing path *) val create : prefix -> string -> t -(** Creates a new path, inferring the prefix. Will default to Dummy. *) + +(* Creates a new path, inferring the prefix. Will default to Dummy. *) val create_detect_prefix : string -> t + (* Creates a Relative_path.t relative to the root *) val from_root : string -> t + val prefix : t -> prefix + val suffix : t -> string + val to_absolute : t -> string + val to_tmp : t -> t + val to_root : t -> t -val strip_root_if_possible : string -> string -module Set : module type of Reordered_argument_set(Set.Make(S)) -module Map : module type of Reordered_argument_map(MyMap.Make(S)) +val strip_root_if_possible : string -> string option + +module Set : sig + include module type of Reordered_argument_set (Set.Make (S)) + + val pp : Format.formatter -> t -> unit + + val show : t -> string +end + +module Map : sig + include module type of Reordered_argument_map (MyMap.Make (S)) + + val pp : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit + + val show : (Format.formatter -> 'a -> unit) -> 'a t -> string +end val relativize_set : prefix -> SSet.t -> Set.t + val set_of_list : t list -> Set.t val storage_to_string : t -> string + val storage_of_string : string -> t diff --git a/hack/utils/scheduler.ml b/hack/utils/scheduler.ml index 741baa7662e..fc7e4aa529b 100644 --- a/hack/utils/scheduler.ml +++ b/hack/utils/scheduler.ml @@ -1,19 +1,22 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * -*) + *) open Hh_core -module Make = functor (EnvType: sig type t end) -> struct +module Make (EnvType : sig + type t +end) = +struct type t = EnvType.t type job = { - priority : int; - run : t -> t; + priority: int; + run: t -> t; } type wait_handle = @@ -21,83 +24,89 @@ module Make = functor (EnvType: sig type t end) -> struct * something to do *) | Fun of (t -> bool) * job (* Job that should be run when file descriptor is ready *) - | Channel of Unix.file_descr * job + | Channel of Unix.file_descr * job type env = { - waiting_jobs : wait_handle list; - ready_jobs : job list; + waiting_jobs: wait_handle list; + ready_jobs: job list; } - let empty () = { - waiting_jobs = []; - ready_jobs = []; - } + let empty () = { waiting_jobs = []; ready_jobs = [] } let env = ref (empty ()) - let reset () = - env := empty () + let reset () = env := empty () - let rec wait_for_fun ?(once=false) ~priority is_ready f = - let f' = if once then f else begin fun job_env -> + let rec wait_for_fun ?(once = false) ~priority is_ready f = + let f' = + if once then + f + else + fun job_env -> wait_for_fun ~priority is_ready f; let job_env = f job_env in job_env - end in - let wait_handle = Fun (is_ready, { priority; run = f'}) in + in + let wait_handle = Fun (is_ready, { priority; run = f' }) in env := { !env with waiting_jobs = wait_handle :: !env.waiting_jobs } let rec wait_for_channel ~priority fd f = - let f' = begin fun env -> + let f' env = wait_for_channel ~priority fd f; f env - end in - let wait_handle = Channel (fd, { priority ; run = f'}) in + in + let wait_handle = Channel (fd, { priority; run = f' }) in env := { !env with waiting_jobs = wait_handle :: !env.waiting_jobs } let stop_waiting_for_channel fd = - let waiting_jobs = List.filter !env.waiting_jobs begin function - | Channel (x, _) -> x <> fd - | _ -> true - end in + let waiting_jobs = + List.filter !env.waiting_jobs (function + | Channel (x, _) -> x <> fd + | _ -> true) + in env := { !env with waiting_jobs } let wait_for_ready_jobs job_env = - let funs, channels = List.partition_map !env.waiting_jobs ~f:begin function - | Fun (x, y) -> `Fst (x, y) - | Channel (fd, f) -> `Snd (fd, f) - end in - - let ready_funs, waiting_funs = List.partition_map funs - ~f:begin fun (is_ready, job) -> - if is_ready job_env then `Fst job - else `Snd (Fun (is_ready, job)) end in - + let (funs, channels) = + List.partition_map !env.waiting_jobs ~f:(function + | Fun (x, y) -> `Fst (x, y) + | Channel (fd, f) -> `Snd (fd, f)) + in + let (ready_funs, waiting_funs) = + List.partition_map funs ~f:(fun (is_ready, job) -> + if is_ready job_env then + `Fst job + else + `Snd (Fun (is_ready, job))) + in let wait_time = - if ready_funs = [] && !env.ready_jobs = [] then 1.0 else 0.0 in - + if ready_funs = [] && !env.ready_jobs = [] then + 1.0 + else + 0.0 + in let fds = List.map channels ~f:fst in - let readable, _, _ = Unix.select fds [] [] wait_time in - - let ready_channels, waiting_channels = List.partition_map channels - ~f:begin fun (fd, job) -> - if List.exists readable ~f:(fun x -> x = fd) - then `Fst job else `Snd (Channel (fd, job)) end in - + let (readable, _, _) = Unix.select fds [] [] wait_time in + let (ready_channels, waiting_channels) = + List.partition_map channels ~f:(fun (fd, job) -> + if List.exists readable ~f:(fun x -> x = fd) then + `Fst job + else + `Snd (Channel (fd, job))) + in let ready_jobs = ready_funs @ ready_channels @ !env.ready_jobs in - let ready_jobs = List.sort - ready_jobs - ~cmp:(fun x y -> x.priority - y.priority) in - - env := { - ready_jobs; - waiting_jobs = waiting_funs @ waiting_channels; - } + let ready_jobs = + List.sort ready_jobs ~cmp:(fun x y -> x.priority - y.priority) + in + env := { ready_jobs; waiting_jobs = waiting_funs @ waiting_channels } let rec wait_and_run_ready job_env = wait_for_ready_jobs job_env; - let job = match !env.ready_jobs with - | h :: t -> env := { !env with ready_jobs = t }; Some h + let job = + match !env.ready_jobs with + | h :: t -> + env := { !env with ready_jobs = t }; + Some h | [] -> None in match job with @@ -105,5 +114,4 @@ module Make = functor (EnvType: sig type t end) -> struct let job_env = job.run job_env in wait_and_run_ready job_env | None -> job_env - end diff --git a/hack/utils/scheduler.mli b/hack/utils/scheduler.mli index 9cc3a37c4b8..b68a1da2b37 100644 --- a/hack/utils/scheduler.mli +++ b/hack/utils/scheduler.mli @@ -1,34 +1,41 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * -*) + *) -module Make : functor (EnvType: sig type t end) -> sig +module Make (EnvType : sig + type t +end) : sig type t = EnvType.t (* Remove all scheduled jobs *) - val reset: unit -> unit + val reset : unit -> unit - val wait_for_fun: - ?once:bool -> (* Should the job be removed after it's executed *) - priority:int -> - (t -> bool) -> (* The job can run when this function return true *) - (t -> t) -> (* The job to run *) + val wait_for_fun : + ?once:bool -> + priority:(* Should the job be removed after it's executed *) + int -> + (t -> bool) -> + ((* The job can run when this function return true *) + t -> t) -> + (* The job to run *) unit - val wait_for_channel: + val wait_for_channel : priority:int -> - Unix.file_descr -> (* The job can run when this fd is readable *) - (t -> t) -> (* The job to run *) + Unix.file_descr -> + ((* The job can run when this fd is readable *) + t -> t) -> + (* The job to run *) unit - val stop_waiting_for_channel: + val stop_waiting_for_channel : Unix.file_descr -> (* fd that was passed to wait_for_channel before *) - unit + unit - val wait_and_run_ready: t -> t + val wait_and_run_ready : t -> t end diff --git a/hack/utils/stack_utils.ml b/hack/utils/stack_utils.ml index 2c376c32649..0ae228ed3f2 100644 --- a/hack/utils/stack_utils.ml +++ b/hack/utils/stack_utils.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -13,7 +13,9 @@ module Ocaml_stack = Stack module Stack = struct include Ocaml_stack - let merge_bytes : string Stack.t -> string = function stack -> - let strs = Stack.fold (fun acc x -> x :: acc) [] stack in - String.concat "" strs + + let merge_bytes : string Stack.t -> string = function + | stack -> + let strs = Stack.fold (fun acc x -> x :: acc) [] stack in + String.concat "" strs end diff --git a/hack/utils/stack_utils.mli b/hack/utils/stack_utils.mli new file mode 100644 index 00000000000..6dec2378381 --- /dev/null +++ b/hack/utils/stack_utils.mli @@ -0,0 +1,44 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +module Ocaml_stack = Stack + +module Stack : sig + type 'a t = 'a Stack.t + + exception Empty + + val create : unit -> 'a t + + val push : 'a -> 'a t -> unit + + val pop : 'a t -> 'a + + val top : 'a t -> 'a + + val clear : 'a t -> unit + + val copy : 'a t -> 'a t + + val is_empty : 'a t -> bool + + val length : 'a t -> int + + val iter : ('a -> unit) -> 'a t -> unit + + val fold : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b + + val merge_bytes : string Stack.t -> string +end diff --git a/hack/utils/stats.ml b/hack/utils/stats.ml deleted file mode 100644 index 0b5e8e5267e..00000000000 --- a/hack/utils/stats.ml +++ /dev/null @@ -1,43 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -(* Not all stats are worth logging for every user. Things like the initial heap - * size are pretty deterministic if you know the input (i.e. the files being - * checked). In fact, it's *only* useful information if you know the input. - * This file is for storing these types of stats: Things that would be useful - * for a benchmark script to know, so it can say "for these inputs, under these - * conditions, here's how hh_server behaves". - *) -type t = { - mutable init_parsing_heap_size : int; - mutable init_heap_size : int; - mutable max_heap_size : int; - gc_stat : Gc.stat; -} - -let stats : t = { - init_parsing_heap_size = 0; - init_heap_size = 0; - max_heap_size = 0; - gc_stat = Gc.quick_stat (); -} - -let get_stats () = {stats with gc_stat = Gc.quick_stat ()} - -let update_max_heap_size x = - stats.max_heap_size <- max stats.max_heap_size x - -let to_json stats = - Hh_json.JSON_Object [ - ("init_parsing_heap_size", Hh_json.int_ stats.init_parsing_heap_size); - ("init_shared_heap_size", Hh_json.int_ stats.init_heap_size); - ("max_shared_heap_size", Hh_json.int_ stats.max_heap_size); - ("master_heap_words", Hh_json.int_ stats.gc_stat.Gc.heap_words); - ("master_top_heap_words", Hh_json.int_ stats.gc_stat.Gc.top_heap_words); - ] diff --git a/hack/utils/string/dune b/hack/utils/string/dune new file mode 100644 index 00000000000..9b6688f6206 --- /dev/null +++ b/hack/utils/string/dune @@ -0,0 +1,4 @@ +(library + (name string) + (wrapped false) + (libraries str)) diff --git a/hack/utils/string/string_utils.ml b/hack/utils/string/string_utils.ml new file mode 100644 index 00000000000..6872f6f3d4d --- /dev/null +++ b/hack/utils/string/string_utils.ml @@ -0,0 +1,312 @@ +(* + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +exception Incorrect_format + +let soi = string_of_int + +let string_of_char = String.make 1 + +let string_before s n = String.sub s 0 n + +let string_after s n = String.sub s n (String.length s - n) + +let string_starts_with long short = + try + let long = String.sub long 0 (String.length short) in + long = short + with Invalid_argument _ -> false + +let string_ends_with long short = + try + let len = String.length short in + let long = String.sub long (String.length long - len) len in + long = short + with Invalid_argument _ -> false + +(* Returns the index of the first occurrence of string `needle` in string + `haystack`. If not found, returns -1. + + An implementation of the Knuth-Morris-Pratt (KMP) algorithm. *) +let substring_index needle = + (* see Wikipedia pseudocode *) + let needle_len = String.length needle in + if needle_len = 0 then raise (Invalid_argument needle); + let table = Array.make needle_len 0 in + table.(0) <- -1; + let pos = ref 2 and cnd = ref 0 in + while !pos < needle_len do + if needle.[!pos - 1] = needle.[!cnd] then ( + table.(!pos) <- !cnd + 1; + incr pos; + incr cnd + ) else if !cnd > 0 then + cnd := table.(!cnd) + else ( + table.(!pos) <- 0; + incr pos + ) + done; + fun haystack -> + let len = String.length haystack in + let p = ref 0 in + let q = ref 0 in + while !p < len && !q < needle_len do + if haystack.[!p] = needle.[!q] then ( + incr p; + incr q + ) else if !q = 0 then + incr p + else + q := table.(!q) + done; + if !q >= needle_len then + !p - needle_len + else + -1 + +let is_substring needle = + let substring_index_memo = substring_index needle in + (fun haystack -> substring_index_memo haystack >= 0) + +(* Return a copy of the string with prefixing string removed. + * The function is a no-op if it s does not start with prefix. + * Modeled after Python's string.lstrip. + *) +let lstrip s prefix = + let prefix_length = String.length prefix in + if string_starts_with s prefix then + String.sub s prefix_length (String.length s - prefix_length) + else + s + +let rstrip s suffix = + let result_length = String.length s - String.length suffix in + if string_ends_with s suffix then + String.sub s 0 result_length + else + s + +let rpartition s c = + let sep_idx = String.rindex s c in + let first = String.sub s 0 sep_idx in + let second = String.sub s (sep_idx + 1) (String.length s - sep_idx - 1) in + (first, second) + +(** If s is longer than length len, return a copy of s truncated to length len. *) +let truncate len s = + if String.length s <= len then + s + else + String.sub s 0 len + +(** [index_not_from_opt str i chars] is like [index_from_opt], but returns the index of the first + char in [str] after position [i] that is not in [chars] if it exists, or [None] otherwise. *) +let index_not_from_opt = + let rec helper i len str chars = + if i = len then + None + else if not (String.contains chars str.[i]) then + Some i + else + helper (i + 1) len str chars + in + (fun str i chars -> helper i (String.length str) str chars) + +(** [index_not_opt str chars] is like [index_opt], but returns the index of the first char in + [str] that is not in [chars] if it exists, or [None] otherwise. *) +let index_not_opt str chars = index_not_from_opt str 0 chars + +(** [rindex_not_from_opt str i chars] is like [rindex_from_opt], but returns the index of the last + char in [str] before position [i+1] that is not in [chars] if it exists, or [None] otherwise. *) +let rec rindex_not_from_opt str i chars = + if i < 0 then + None + else if not (String.contains chars str.[i]) then + Some i + else + rindex_not_from_opt str (i - 1) chars + +(** [rindex_not_opt str chars] is like [rindex_opt], but returns the index of the last char in + [str] that is not in [chars] if it exists, or [None] otherwise. *) +let rindex_not_opt str chars = + rindex_not_from_opt str (String.length str - 1) chars + +let (zero_code, nine_code) = (Char.code '0', Char.code '9') + +let is_decimal_digit chr = + let code = Char.code chr in + zero_code <= code && code <= nine_code + +let is_lowercase_char = + let (a_code, z_code) = (Char.code 'a', Char.code 'z') in + fun chr -> + let code = Char.code chr in + a_code <= code && code <= z_code + +let rec is_not_lowercase str i j = + if is_lowercase_char str.[i] then + false + else if i = j then + true + else + is_not_lowercase str (i + 1) j + +(* String provides map and iter but not fold. It also is missing a char_list_of + * function. Oh well. You can use fold to simulate anything you need, I suppose + *) +let fold_left ~f ~acc str = + let acc = ref acc in + String.iter (fun c -> acc := f !acc c) str; + !acc + +let split c = Str.split (Str.regexp @@ Char.escaped c) + +let split2 c s = + let parts = split c s in + match parts with + | [first; second] -> Some (first, second) + | _ -> None + +let split2_exn c s = + match split2 c s with + | Some s -> s + | None -> raise Incorrect_format + +(* Replaces all instances of the needle character with the replacement character + *) +let replace_char needle replacement = + String.map (fun c -> + if c = needle then + replacement + else + c) + +(* Splits a string into a list of strings using "\n", "\r" or "\r\n" as + * delimiters. If the string starts or ends with a delimiter, there WILL be an + * empty string at the beginning or end of the list, like Str.split_delim does + *) +let split_into_lines str = + (* To avoid unnecessary string allocations, we're going to keep a list of + * the start index of each line and how long it is. Then, at the end, we can + * use String.sub to create the actual strings. *) + let (_, (last_start, lines)) = + fold_left + ~f:(fun (idx, (start, lines)) c -> + (* For \r\n, we've already processed the newline *) + if c = '\n' && idx > 0 && str.[idx - 1] = '\r' then + (idx + 1, (idx + 1, lines)) + else if c = '\n' || c = '\r' then + (idx + 1, (idx + 1, (start, idx - start) :: lines)) + else + (idx + 1, (start, lines))) + ~acc:(0, (0, [])) + str + in + (* Reverses the list of start,len and turns them into strings *) + List.fold_left + (fun lines (start, len) -> String.sub str start len :: lines) + [] + ((last_start, String.length str - last_start) :: lines) + +(* Splits a string into lines, indents each non-empty line, and concats with newlines *) +let indent indent_size str = + let padding = String.make indent_size ' ' in + str + |> split_into_lines + |> List.map (fun str -> + if str = "" then + "" + else + padding ^ str) + |> String.concat "\n" + +(* Splits a string into a list of strings using only "\n" as a delimiter. + * If the string ends with a delimiter, an empty string representing the + * contents after the final delimiter is NOT included (unlike Str.split_delim). + *) +let split_on_newlines content = + let re = Str.regexp "[\n]" in + let lines = Str.split_delim re content in + (* don't create a list entry for the line after a trailing newline *) + match List.rev lines with + | "" :: rest -> List.rev rest + | _ -> lines + +module Internal = struct + let to_list s = + let rec loop acc i = + if i < 0 then + acc + else + (loop [@tailcall]) (s.[i] :: acc) (i - 1) + in + loop [] (String.length s - 1) + + let of_list l = + let s = Bytes.create (List.length l) in + List.iteri (Bytes.set s) l; + Bytes.unsafe_to_string s +end + +let to_list = Internal.to_list + +let of_list = Internal.of_list + +module CharSet = struct + include Set.Make (Char) + + let of_string str = of_list (Internal.to_list str) + + let to_string set = Internal.of_list (elements set) +end + +(* Levenshtein distance algorithm. + +Based on the public domain implementation at +https://bitbucket.org/camlspotter/ocaml_levenshtein/src/default/ + + *) + +let levenshtein_distance (xs : string) (ys : string) = + let min3 (x : int) y z = + let m' (a : int) b = + if a < b then + a + else + b + in + m' (m' x y) z + in + let cache = + Array.init + (String.length xs + 1) + (fun _ -> Array.make (String.length ys + 1) (-1)) + in + let rec d i j = + match (i, j) with + | (0, _) -> j + | (_, 0) -> i + | _ -> + let cache_i = Array.unsafe_get cache i in + (match Array.unsafe_get cache_i j with + | -1 -> + let res = + let i' = i - 1 in + let j' = j - 1 in + min3 + (d i' j + 1) + (d i j' + 1) + (d i' j' + abs (compare xs.[i'] ys.[j'])) + in + Array.unsafe_set cache_i j res; + res + | res -> res) + in + d (String.length xs) (String.length ys) diff --git a/hack/utils/string/string_utils.mli b/hack/utils/string/string_utils.mli new file mode 100644 index 00000000000..768a7170172 --- /dev/null +++ b/hack/utils/string/string_utils.mli @@ -0,0 +1,166 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +exception Incorrect_format + +val soi : int -> string + +val string_of_char : char -> string + +val string_before : string -> int -> string + +val string_after : string -> int -> string + +val string_starts_with : string -> string -> bool + +val string_ends_with : string -> string -> bool + +val substring_index : string -> string -> int + +val is_substring : string -> string -> bool + +val lstrip : string -> string -> string + +val rstrip : string -> string -> string + +val rpartition : string -> char -> string * string + +val truncate : int -> string -> string + +val index_not_from_opt : string -> int -> string -> int option + +val index_not_opt : string -> string -> int option + +val rindex_not_from_opt : string -> int -> string -> int option + +val rindex_not_opt : string -> string -> int option + +val zero_code : int + +val nine_code : int + +val is_decimal_digit : char -> bool + +val is_lowercase_char : char -> bool + +val is_not_lowercase : string -> int -> int -> bool + +val fold_left : f:('a -> char -> 'a) -> acc:'a -> string -> 'a + +val split : char -> string -> string list + +val split2 : char -> string -> (string * string) option + +val split2_exn : char -> string -> string * string + +val replace_char : char -> char -> string -> string + +val split_into_lines : string -> string list + +val indent : int -> string -> string + +val split_on_newlines : string -> string list + +module Internal : sig + val to_list : string -> char list + + val of_list : char list -> string +end + +val to_list : string -> char list + +val of_list : char list -> string + +module CharSet : sig + type elt = Char.t + + type t = Set.Make(Char).t + + val empty : t + + val is_empty : t -> bool + + val mem : elt -> t -> bool + + val add : elt -> t -> t + + val singleton : elt -> t + + val remove : elt -> t -> t + + val union : t -> t -> t + + val inter : t -> t -> t + + val diff : t -> t -> t + + val compare : t -> t -> int + + val equal : t -> t -> bool + + val subset : t -> t -> bool + + val iter : (elt -> unit) -> t -> unit + + val map : (elt -> elt) -> t -> t + + val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a + + val for_all : (elt -> bool) -> t -> bool + + val exists : (elt -> bool) -> t -> bool + + val filter : (elt -> bool) -> t -> t + + val partition : (elt -> bool) -> t -> t * t + + val cardinal : t -> int + + val elements : t -> elt list + + val min_elt : t -> elt + + val min_elt_opt : t -> elt option + + val max_elt : t -> elt + + val max_elt_opt : t -> elt option + + val choose : t -> elt + + val choose_opt : t -> elt option + + val split : elt -> t -> t * bool * t + + val find : elt -> t -> elt + + val find_opt : elt -> t -> elt option + + val find_first : (elt -> bool) -> t -> elt + + val find_first_opt : (elt -> bool) -> t -> elt option + + val find_last : (elt -> bool) -> t -> elt + + val find_last_opt : (elt -> bool) -> t -> elt option + + val of_list : elt list -> t + + val of_string : string -> t + + val to_string : t -> string +end + +val levenshtein_distance : string -> string -> int diff --git a/hack/utils/string_utils.ml b/hack/utils/string_utils.ml deleted file mode 100644 index 83a469824a3..00000000000 --- a/hack/utils/string_utils.ml +++ /dev/null @@ -1,256 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -exception Incorrect_format - -let soi = string_of_int -let string_of_char = String.make 1 - -let string_before s n = String.sub s 0 n -let string_after s n = String.sub s n (String.length s - n) - -let string_starts_with long short = - try - let long = String.sub long 0 (String.length short) in - long = short - with Invalid_argument _ -> - false - -let string_ends_with long short = - try - let len = String.length short in - let long = String.sub long (String.length long - len) len in - long = short - with Invalid_argument _ -> - false - -(* Returns the index of the first occurrence of string `needle` in string - `haystack`. If not found, returns -1. - - An implementation of the Knuth-Morris-Pratt (KMP) algorithm. *) -let substring_index needle = - (* see Wikipedia pseudocode *) - let needle_len = String.length needle in - if needle_len = 0 then raise (Invalid_argument needle); - let table = Array.make needle_len 0 in - table.(0) <- (-1); - let pos = ref 2 and cnd = ref 0 in - while !pos < needle_len do - if needle.[!pos - 1] = needle.[!cnd] then - (table.(!pos) <- !cnd + 1; incr pos; incr cnd) - else if !cnd > 0 then - cnd := table.(!cnd) - else - (table.(!pos) <- 0; incr pos) - done; - fun haystack -> - let len = String.length haystack in - let p = ref 0 in - let q = ref 0 in - while !p < len && !q < needle_len do - if haystack.[!p] = needle.[!q] then (incr p; incr q) - else if !q = 0 then incr p - else q := table.(!q) - done; - if !q >= needle_len then !p - needle_len - else -1 - -let is_substring needle = - let substring_index_memo = substring_index needle in - fun haystack -> (substring_index_memo haystack) >= 0 - -(* Return a copy of the string with prefixing string removed. - * The function is a no-op if it s does not start with prefix. - * Modeled after Python's string.lstrip. - *) -let lstrip s prefix = - let prefix_length = String.length prefix in - if string_starts_with s prefix - then String.sub s prefix_length (String.length s - prefix_length) - else s - -let rstrip s suffix = - let result_length = String.length s - String.length suffix in - if string_ends_with s suffix - then String.sub s 0 result_length - else s - -let rpartition s c = - let sep_idx = String.rindex s c in - let first = String.sub s 0 sep_idx in - let second = - String.sub s (sep_idx + 1) (String.length s - sep_idx - 1) in - first, second - -(** If s is longer than length len, return a copy of s truncated to length len. *) -let truncate len s = - if String.length s <= len then - s - else - String.sub s 0 len - -(** [index_not_from_opt str i chars] is like [index_from_opt], but returns the index of the first - char in [str] after position [i] that is not in [chars] if it exists, or [None] otherwise. *) -let index_not_from_opt = - let rec helper i len str chars = - if i = len then None - else if not (String.contains chars str.[i]) then Some i - else helper (i + 1) len str chars - in - fun str i chars -> - helper i (String.length str) str chars - - -(** [index_not_opt str chars] is like [index_opt], but returns the index of the first char in - [str] that is not in [chars] if it exists, or [None] otherwise. *) -let index_not_opt str chars = index_not_from_opt str 0 chars - - -(** [rindex_not_from_opt str i chars] is like [rindex_from_opt], but returns the index of the last - char in [str] before position [i+1] that is not in [chars] if it exists, or [None] otherwise. *) -let rec rindex_not_from_opt str i chars = - if i < 0 then None - else if not (String.contains chars str.[i]) then Some i - else rindex_not_from_opt str (i - 1) chars - - -(** [rindex_not_opt str chars] is like [rindex_opt], but returns the index of the last char in - [str] that is not in [chars] if it exists, or [None] otherwise. *) -let rindex_not_opt str chars = rindex_not_from_opt str (String.length str - 1) chars - - -let zero_code, nine_code = Char.code '0', Char.code '9' - -let is_decimal_digit = - fun chr -> - let code = Char.code chr in - zero_code <= code && code <= nine_code - -let is_lowercase_char = - let a_code, z_code = Char.code 'a', Char.code 'z' in - fun chr -> - let code = Char.code chr in - a_code <= code && code <= z_code - -let rec is_not_lowercase str i j = - if is_lowercase_char str.[i] then false - else if i = j then true - else is_not_lowercase str (i + 1) j - -(* String provides map and iter but not fold. It also is missing a char_list_of - * function. Oh well. You can use fold to simulate anything you need, I suppose - *) -let fold_left ~f ~acc str = - let acc = ref acc in - String.iter (fun c -> acc := f (!acc) c) str; - !acc - -let split c = Str.split (Str.regexp @@ Char.escaped c) - -let split2 c s = - let parts = split c s in - match parts with - | [first; second] -> Some (first, second) - | _ -> None -let split2_exn c s = - match split2 c s with - | Some s -> s - | None -> raise Incorrect_format - -(* Replaces all instances of the needle character with the replacement character - *) -let replace_char needle replacement = - String.map (fun c -> if c = needle then replacement else c) - -(* Splits a string into a list of strings using "\n", "\r" or "\r\n" as - * delimiters. If the string starts or ends with a delimiter, there WILL be an - * empty string at the beginning or end of the list, like Str.split_delim does - *) -let split_into_lines str = - (* To avoid unnecessary string allocations, we're going to keep a list of - * the start index of each line and how long it is. Then, at the end, we can - * use String.sub to create the actual strings. *) - let _, (last_start, lines) = fold_left - ~f: (fun (idx, (start, lines)) c -> - (* For \r\n, we've already processed the newline *) - if c = '\n' && idx > 0 && String.get str (idx-1) = '\r' - then idx+1, (idx+1, lines) - else - if c = '\n' || c = '\r' - then idx+1, (idx+1, (start, idx-start)::lines) - else idx+1, (start, lines) - ) - ~acc:(0, (0, [])) - str - in - - (* Reverses the list of start,len and turns them into strings *) - List.fold_left - (fun lines (start, len) -> (String.sub str start len)::lines) - [] - ((last_start, String.length str - last_start)::lines) - -(* Splits a string into lines, indents each non-empty line, and concats with newlines *) -let indent indent_size str = - let padding = String.make indent_size ' ' in - str - |> split_into_lines - |> List.map (fun str -> if str = "" then "" else (padding ^ str)) - |> String.concat "\n" - - -(* Splits a string into a list of strings using only "\n" as a delimiter. - * If the string ends with a delimiter, an empty string representing the - * contents after the final delimiter is NOT included (unlike Str.split_delim). - *) -let split_on_newlines content = - let re = Str.regexp "[\n]" in - let lines = Str.split_delim re content in - (* don't create a list entry for the line after a trailing newline *) - match List.rev lines with - | "" :: rest -> List.rev rest - | _ -> lines - - -(* TODO: remove after upgrading to ocaml 4.05 *) -let split_on_char sep s = - let open String in - let r = ref [] in - let j = ref (length s) in - for i = length s - 1 downto 0 do - if unsafe_get s i = sep then begin - r := sub s (i + 1) (!j - i - 1) :: !r; - j := i - end - done; - sub s 0 !j :: !r - - -module Internal = struct - let to_list s = - let rec loop acc i = - if i < 0 then acc - else (loop [@tailcall]) (s.[i] :: acc) (i - 1) - in - loop [] (String.length s - 1) - - let of_list l = - let s = Bytes.create (List.length l) in - List.iteri (Bytes.set s) l; - Bytes.unsafe_to_string s -end - -let to_list = Internal.to_list -let of_list = Internal.of_list - -module CharSet = struct - include Set.Make(Char) - let of_string str = of_list (Internal.to_list str) - let to_string set = Internal.of_list (elements set) -end diff --git a/hack/utils/symbolDefinition.ml b/hack/utils/symbolDefinition.ml index 03b093222b1..dc64ff02b74 100644 --- a/hack/utils/symbolDefinition.ml +++ b/hack/utils/symbolDefinition.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -22,6 +22,7 @@ type kind = | Typeconst | Param | Typedef + | RecordDef and modifier = | Final @@ -33,35 +34,48 @@ and modifier = | Async | Inout +and reactivity_attributes = + | Rx + | Shallow + | Local + | Nonreactive + | OnlyRxIfImpl + | AtMostRxAsArgs + and 'a t = { - kind : kind; - name : string; - full_name : string; - id : string option; - pos : 'a Pos.pos; (* covers the span of just the identifier *) - span : 'a Pos.pos; (* covers the span of the entire construct, including children *) - modifiers : modifier list; - children : 'a t list option; - params : 'a t list option; - docblock : string option; + kind: kind; + name: string; + full_name: string; + id: string option; + pos: 'a Pos.pos; + (* covers the span of just the identifier *) + span: 'a Pos.pos; + (* covers the span of the entire construct, including children *) + modifiers: modifier list; + children: 'a t list option; + params: 'a t list option; + docblock: string option; + reactivity_attributes: reactivity_attributes list; } -let rec to_absolute x = { - x with - pos = Pos.to_absolute x.pos; - span = Pos.to_absolute x.span; - children = Option.map x.children (fun x -> List.map x to_absolute); - params = Option.map x.params (fun x -> List.map x to_absolute); - docblock = x.docblock; -} +let rec to_absolute x = + { + x with + pos = Pos.to_absolute x.pos; + span = Pos.to_absolute x.span; + children = Option.map x.children (fun x -> List.map x to_absolute); + params = Option.map x.params (fun x -> List.map x to_absolute); + docblock = x.docblock; + } -let rec to_relative x = { - x with - pos = Pos.to_relative x.pos; - span = Pos.to_relative x.span; - children = Option.map x.children (fun x -> List.map x to_relative); - params = Option.map x.params (fun x -> List.map x to_relative); -} +let rec to_relative x = + { + x with + pos = Pos.to_relative x.pos; + span = Pos.to_relative x.span; + children = Option.map x.children (fun x -> List.map x to_relative); + params = Option.map x.params (fun x -> List.map x to_relative); + } let string_of_kind = function | Function -> "function" @@ -76,6 +90,7 @@ let string_of_kind = function | LocalVar -> "local" | Param -> "param" | Typedef -> "typedef" + | RecordDef -> "record" let string_of_modifier = function | Final -> "final" @@ -87,24 +102,46 @@ let string_of_modifier = function | Async -> "async" | Inout -> "inout" +let string_of_reactivity_attribute = function + | Rx -> "reactive" + | Shallow -> "shallow" + | Local -> "local" + | Nonreactive -> "non_reactive" + | OnlyRxIfImpl -> "only_rx_if_impl" + | AtMostRxAsArgs -> "at_most_rx_as_args" + let function_kind_name = "function" + let type_id_kind_name = "type_id" + let method_kind_name = "method" + let property_kind_name = "property" + let class_const_kind_name = "class_const" let get_symbol_id kind parent_class name = - let prefix = match kind with + let prefix = + match kind with | Function -> Some function_kind_name - | Class | Typedef | Enum | Interface | Trait -> Some type_id_kind_name + | Class + | Typedef + | Enum + | Interface + | Trait + | RecordDef -> + Some type_id_kind_name | Method -> Some method_kind_name | Property -> Some property_kind_name - | Typeconst | Const -> Some class_const_kind_name - | LocalVar | Param -> None + | Typeconst + | Const -> + Some class_const_kind_name + | LocalVar + | Param -> + None in - match prefix, parent_class with - | Some prefix, Some parent_class -> - Some (Printf.sprintf "%s::%s::%s" prefix parent_class name) - | Some prefix, None -> - Some (Printf.sprintf "%s::%s" prefix name) - | None, _ -> None + match (prefix, parent_class) with + | (Some prefix, Some parent_class) -> + Some (Printf.sprintf "%s::%s::%s" prefix parent_class name) + | (Some prefix, None) -> Some (Printf.sprintf "%s::%s" prefix name) + | (None, _) -> None diff --git a/hack/utils/symbolDefinition.mli b/hack/utils/symbolDefinition.mli new file mode 100644 index 00000000000..d044b0d4ef2 --- /dev/null +++ b/hack/utils/symbolDefinition.mli @@ -0,0 +1,85 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +type kind = + | Function + | Class + | Method + | Property + | Const + | Enum + | Interface + | Trait + | LocalVar + | Typeconst + | Param + | Typedef + | RecordDef + +and modifier = + | Final + | Static + | Abstract + | Private + | Public + | Protected + | Async + | Inout + +and reactivity_attributes = + | Rx + | Shallow + | Local + | Nonreactive + | OnlyRxIfImpl + | AtMostRxAsArgs + +and 'a t = { + kind: kind; + name: string; + full_name: string; + id: string option; + pos: 'a Pos.pos; + (* covers the span of just the identifier *) + span: 'a Pos.pos; + (* covers the span of the entire construct, including children *) + modifiers: modifier list; + children: 'a t list option; + params: 'a t list option; + docblock: string option; + reactivity_attributes: reactivity_attributes list; +} + +val to_absolute : Relative_path.t t -> string t + +val to_relative : string t -> Relative_path.t t + +val string_of_kind : kind -> string + +val string_of_modifier : modifier -> string + +val string_of_reactivity_attribute : reactivity_attributes -> string + +val function_kind_name : string + +val type_id_kind_name : string + +val method_kind_name : string + +val property_kind_name : string + +val class_const_kind_name : string + +val get_symbol_id : kind -> string option -> string -> string option diff --git a/hack/utils/symbolOccurrence.ml b/hack/utils/symbolOccurrence.ml index f2eb3d62e19..abfcb80e2ae 100644 --- a/hack/utils/symbolOccurrence.ml +++ b/hack/utils/symbolOccurrence.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -9,6 +9,7 @@ type kind = | Class + | Record | Function | Method of string * string | LocalVar @@ -18,19 +19,18 @@ type kind = | GConst type 'a t = { - name: string; + name: string; type_: kind; is_declaration: bool; (* Span of the symbol itself *) pos: 'a Pos.pos; } -let to_absolute x = { x with - pos = Pos.to_absolute x.pos; -} +let to_absolute x = { x with pos = Pos.to_absolute x.pos } let kind_to_string = function | Class -> "type_id" + | Record -> "record" | Method _ -> "method" | Function -> "function" | LocalVar -> "local" @@ -44,18 +44,23 @@ let enclosing_class occurrence = | Method (c, _) | Property (c, _) | ClassConst (c, _) - | Typeconst (c, _) -> Some c + | Typeconst (c, _) -> + Some c | _ -> None let get_class_name occurrence = match enclosing_class occurrence with | Some _ as res -> res | None -> - if occurrence.type_ = Class then Some occurrence.name else None + if occurrence.type_ = Class then + Some occurrence.name + else + None let is_constructor occurrence = match occurrence.type_ with - | Method (_, name) when name = Naming_special_names.Members.__construct -> true + | Method (_, name) when name = Naming_special_names.Members.__construct -> + true | _ -> false let is_class occurrence = diff --git a/hack/utils/symbolOccurrence.mli b/hack/utils/symbolOccurrence.mli new file mode 100644 index 00000000000..be8d22bdf06 --- /dev/null +++ b/hack/utils/symbolOccurrence.mli @@ -0,0 +1,45 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +type kind = + | Class + | Record + | Function + | Method of string * string + | LocalVar + | Property of string * string + | ClassConst of string * string + | Typeconst of string * string + | GConst + +type 'a t = { + name: string; + type_: kind; + is_declaration: bool; + (* Span of the symbol itself *) + pos: 'a Pos.pos; +} + +val to_absolute : Relative_path.t t -> string t + +val kind_to_string : kind -> string + +val enclosing_class : 'a t -> string option + +val get_class_name : 'a t -> string option + +val is_constructor : 'a t -> bool + +val is_class : 'a t -> bool diff --git a/hack/utils/sys/daemon.ml b/hack/utils/sys/daemon.ml index 26ed363ff79..56820792df5 100644 --- a/hack/utils/sys/daemon.ml +++ b/hack/utils/sys/daemon.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -8,13 +8,14 @@ *) type 'a in_channel = Timeout.in_channel + type 'a out_channel = Pervasives.out_channel type ('in_, 'out) channel_pair = 'in_ in_channel * 'out out_channel type ('in_, 'out) handle = { - channels : ('in_, 'out) channel_pair; - pid : int; + channels: ('in_, 'out) channel_pair; + pid: int; } (* Windows: ensure that the serialize/deserialize functions @@ -22,14 +23,17 @@ type ('in_, 'out) handle = { let () = Lazy.force Handle.init let to_channel : - 'a out_channel -> ?flags:Marshal.extern_flags list -> ?flush:bool -> - 'a -> unit = - fun oc ?(flags = []) ?flush:(should_flush=true) v -> - Marshal.to_channel oc v flags; - if should_flush then flush oc - -let from_channel : ?timeout:Timeout.t -> 'a in_channel -> 'a = fun ?timeout ic -> - Timeout.input_value ?timeout ic + 'a out_channel -> + ?flags:Marshal.extern_flags list -> + ?flush:bool -> + 'a -> + unit = + fun oc ?(flags = []) ?flush:(should_flush = true) v -> + Marshal.to_channel oc v flags; + if should_flush then flush oc + +let from_channel : ?timeout:Timeout.t -> 'a in_channel -> 'a = + (fun ?timeout ic -> Timeout.input_value ?timeout ic) let flush : 'a out_channel -> unit = Pervasives.flush @@ -40,6 +44,7 @@ let descr_of_out_channel : 'a out_channel -> Unix.file_descr = Unix.descr_of_out_channel let cast_in ic = ic + let cast_out oc = oc (* We cannot fork() on Windows, so in order to emulate this in a @@ -54,42 +59,49 @@ let cast_out oc = oc * altogether and dispense with this emulation. *) module Entry : sig - (* All the 'untyped' operations---that are required for the entry-points hashtable and the parameters stored in env variable---are hidden in this sub-module, behind a 'type-safe' interface. *) type ('param, 'input, 'output) t - val name_of_entry: ('param, 'input, 'output) t -> string - val register: - string -> ('param -> ('input, 'output) channel_pair -> unit) -> + + val name_of_entry : ('param, 'input, 'output) t -> string + + val register : + string -> + ('param -> ('input, 'output) channel_pair -> unit) -> ('param, 'input, 'output) t - val find: + + val find : + ('param, 'input, 'output) t -> + 'param -> + ('input, 'output) channel_pair -> + unit + + val set_context : ('param, 'input, 'output) t -> 'param -> - ('input, 'output) channel_pair -> unit - val set_context: - ('param, 'input, 'output) t -> 'param -> Unix.file_descr * Unix.file_descr -> unit - val get_context: + + val get_context : unit -> - (('param, 'input, 'output) t * 'param * ('input, 'output) channel_pair) - val clear_context: - unit -> unit + ('param, 'input, 'output) t * 'param * ('input, 'output) channel_pair + val clear_context : unit -> unit end = struct - type ('param, 'input, 'output) t = string let name_of_entry name = name (* Store functions as 'Obj.t' *) let entry_points : (string, Obj.t) Hashtbl.t = Hashtbl.create 23 + let register name f = if Hashtbl.mem entry_points name then - Printf.ksprintf failwith + Printf.ksprintf + failwith "Daemon.register_entry_point: duplicate entry point %S." name; Hashtbl.add entry_points name (Obj.repr f); @@ -97,18 +109,18 @@ end = struct let find name = try Obj.obj (Hashtbl.find entry_points name) - with Not_found -> - Printf.ksprintf failwith - "Unknown entry point %S" name + with Not_found -> Printf.ksprintf failwith "Unknown entry point %S" name let set_context entry param (ic, oc) = let data = (ic, oc, param) in Unix.putenv "HH_SERVER_DAEMON" entry; - let file, oc = + let (file, oc) = Filename.open_temp_file ~mode:[Open_binary] ~temp_dir:Sys_utils.temp_dir_name - "daemon_param" ".bin" in + "daemon_param" + ".bin" + in Marshal.to_channel oc data [Marshal.Closures]; close_out oc; Unix.putenv "HH_SERVER_DAEMON_PARAM" file @@ -134,21 +146,22 @@ end = struct Sys.remove file; res with exn -> - failwith ("Can't find daemon parameters: " ^ (Printexc.to_string exn)) in - (entry, param, - (Timeout.in_channel_of_descr in_handle, - Unix.out_channel_of_descr out_handle)) + failwith ("Can't find daemon parameters: " ^ Printexc.to_string exn) + in + ( entry, + param, + ( Timeout.in_channel_of_descr in_handle, + Unix.out_channel_of_descr out_handle ) ) let clear_context () = Unix.putenv "HH_SERVER_DAEMON" ""; - Unix.putenv "HH_SERVER_DAEMON_PARAM" ""; - + Unix.putenv "HH_SERVER_DAEMON_PARAM" "" end type ('param, 'input, 'output) entry = ('param, 'input, 'output) Entry.t let exec entry param ic oc = - (** + (* * The name "exec" is a bit of a misnomer. By the time we * get here, the "exec" syscall has already finished and the * process image has been replaced. We're using "exec" here to mean @@ -160,7 +173,9 @@ let exec entry param ic oc = let () = Unix.set_close_on_exec (descr_of_in_channel ic) in let () = Unix.set_close_on_exec (descr_of_out_channel oc) in let f = Entry.find entry in - try f param (ic, oc); exit 0 + try + f param (ic, oc); + exit 0 with e -> prerr_endline (Printexc.to_string e); Printexc.print_backtrace stderr; @@ -171,80 +186,84 @@ let register_entry_point = Entry.register let name_of_entry = Entry.name_of_entry let fd_of_path path = - Sys_utils.with_umask 0o111 begin fun () -> - Sys_utils.mkdir_no_fail (Filename.dirname path); - Unix.openfile path [Unix.O_RDWR; Unix.O_CREAT; Unix.O_TRUNC] 0o666 - end + Sys_utils.with_umask 0o111 (fun () -> + Sys_utils.mkdir_no_fail (Filename.dirname path); + Unix.openfile path [Unix.O_RDWR; Unix.O_CREAT; Unix.O_TRUNC] 0o666) let null_fd () = fd_of_path Sys_utils.null_path let setup_channels channel_mode = - let mk = match channel_mode with - | `pipe -> fun () -> Unix.pipe () - | `socket -> fun () -> Unix.socketpair Unix.PF_UNIX Unix.SOCK_STREAM 0 + let mk = + match channel_mode with + | `pipe -> (fun () -> Unix.pipe ()) + | `socket -> (fun () -> Unix.socketpair Unix.PF_UNIX Unix.SOCK_STREAM 0) in - let parent_in, child_out = mk () in - let child_in, parent_out = mk () in + let (parent_in, child_out) = mk () in + let (child_in, parent_out) = mk () in Unix.set_close_on_exec parent_in; Unix.set_close_on_exec parent_out; - (parent_in, child_out), (child_in, parent_out) + ((parent_in, child_out), (child_in, parent_out)) -let descr_as_channels (descr_in, descr_out) = +let descr_as_channels (descr_in, descr_out) = let ic = Timeout.in_channel_of_descr descr_in in let oc = Unix.out_channel_of_descr descr_out in - ic, oc + (ic, oc) (* This only works on Unix, and should be avoided as far as possible. Use * Daemon.spawn instead. *) let fork ?(channel_mode = `pipe) (type param) - (log_stdout, log_stderr) (f : param -> ('a, 'b) channel_pair -> unit) + (log_stdout, log_stderr) + (f : param -> ('a, 'b) channel_pair -> unit) (param : param) : ('b, 'a) handle = - let (parent_in, child_out), (child_in, parent_out) - = setup_channels channel_mode in - (** Since don't use exec, we can actually set CLOEXEC before the fork. *) + let ((parent_in, child_out), (child_in, parent_out)) = + setup_channels channel_mode + in + (* Since don't use exec, we can actually set CLOEXEC before the fork. *) Unix.set_close_on_exec child_in; Unix.set_close_on_exec child_out; let (parent_in, child_out) = descr_as_channels (parent_in, child_out) in let (child_in, parent_out) = descr_as_channels (child_in, parent_out) in match Fork.fork () with | -1 -> failwith "Go get yourself a real computer" - | 0 -> (* child *) + | 0 -> + (* child *) (try - ignore(Unix.setsid()); - Timeout.close_in parent_in; - close_out parent_out; - Sys_utils.with_umask 0o111 begin fun () -> - let fd = null_fd () in - Unix.dup2 fd Unix.stdin; - Unix.close fd; - end; - Unix.dup2 log_stdout Unix.stdout; - Unix.dup2 log_stderr Unix.stderr; - if log_stdout <> Unix.stdout then Unix.close log_stdout; - if log_stderr <> Unix.stderr && log_stderr <> log_stdout then - Unix.close log_stderr; - f param (child_in, child_out); - exit 0 - with e -> - prerr_endline (Printexc.to_string e); - Printexc.print_backtrace stderr; - exit 1) - | pid -> (* parent *) + ignore (Unix.setsid ()); + Timeout.close_in parent_in; + close_out parent_out; + Sys_utils.with_umask 0o111 (fun () -> + let fd = null_fd () in + Unix.dup2 fd Unix.stdin; + Unix.close fd); + Unix.dup2 log_stdout Unix.stdout; + Unix.dup2 log_stderr Unix.stderr; + if log_stdout <> Unix.stdout then Unix.close log_stdout; + if log_stderr <> Unix.stderr && log_stderr <> log_stdout then + Unix.close log_stderr; + f param (child_in, child_out); + exit 0 + with e -> + prerr_endline (Printexc.to_string e); + Printexc.print_backtrace stderr; + exit 1) + | pid -> + (* parent *) Timeout.close_in child_in; close_out child_out; - { channels = parent_in, parent_out; pid } + { channels = (parent_in, parent_out); pid } let spawn - (type param) (type input) (type output) + (type param input output) ?(channel_mode = `pipe) ?name (stdin, stdout, stderr) - (entry: (param, input, output) entry) - (param: param) : (output, input) handle = - let (parent_in, child_out), (child_in, parent_out) = - setup_channels channel_mode in + (entry : (param, input, output) entry) + (param : param) : (output, input) handle = + let ((parent_in, child_out), (child_in, parent_out)) = + setup_channels channel_mode + in Entry.set_context entry param (child_in, child_out); let exe = Sys_utils.executable_path () in let name = Option.value ~default:(Entry.name_of_entry entry) name in @@ -253,26 +272,25 @@ let spawn Unix.close child_in; Unix.close child_out; let close_if_open fd = - try Unix.close fd - with Unix.Unix_error (Unix.EBADF, _, _) -> () + (try Unix.close fd with Unix.Unix_error (Unix.EBADF, _, _) -> ()) in if stdin <> Unix.stdin then close_if_open stdin; if stdout <> Unix.stdout then close_if_open stdout; if stderr <> Unix.stderr && stderr <> stdout then close_if_open stderr; - PidLog.log - ~reason:(Entry.name_of_entry entry) - ~no_fail:true + PidLog.log ~reason:(Entry.name_of_entry entry) ~no_fail:true pid; + { + channels = + ( Timeout.in_channel_of_descr parent_in, + Unix.out_channel_of_descr parent_out ); pid; - { channels = Timeout.in_channel_of_descr parent_in, - Unix.out_channel_of_descr parent_out; - pid } + } (* for testing code *) let devnull () = let ic = Timeout.open_in "/dev/null" in let oc = open_out "/dev/null" in - {channels = ic, oc; pid = 0} + { channels = (ic, oc); pid = 0 } (** * In order for the Daemon infrastructure to work, the beginning of your @@ -288,7 +306,7 @@ let devnull () = *) let check_entry_point () = try - let entry, param, (ic, oc) = Entry.get_context () in + let (entry, param, (ic, oc)) = Entry.get_context () in Entry.clear_context (); exec entry param ic oc with Not_found -> () @@ -302,9 +320,13 @@ let kill h = Sys_utils.terminate_process h.pid let close_out = close_out + let output_string = output_string + let flush = flush let close_in = Timeout.close_in + let input_char ic = Timeout.input_char ic + let input_value ic = Timeout.input_value ic diff --git a/hack/utils/sys/daemon.mli b/hack/utils/sys/daemon.mli index 01401d81482..359fae70c56 100644 --- a/hack/utils/sys/daemon.mli +++ b/hack/utils/sys/daemon.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -10,26 +10,39 @@ (** Type-safe versions of the channels in Pervasives. *) type 'a in_channel + type 'a out_channel + type ('in_, 'out) channel_pair = 'in_ in_channel * 'out out_channel val to_channel : - 'a out_channel -> ?flags:Marshal.extern_flags list -> ?flush:bool -> - 'a -> unit + 'a out_channel -> + ?flags:Marshal.extern_flags list -> + ?flush:bool -> + 'a -> + unit + val from_channel : ?timeout:Timeout.t -> 'a in_channel -> 'a + val flush : 'a out_channel -> unit (* This breaks the type safety, but is necessary in order to allow select() *) val descr_of_in_channel : 'a in_channel -> Unix.file_descr + val descr_of_out_channel : 'a out_channel -> Unix.file_descr + val cast_in : 'a in_channel -> Timeout.in_channel + val cast_out : 'a out_channel -> Pervasives.out_channel val close_out : 'a out_channel -> unit + val output_string : 'a out_channel -> string -> unit val close_in : 'a in_channel -> unit + val input_char : 'a in_channel -> char + val input_value : 'a in_channel -> 'b (** Spawning new process *) @@ -56,39 +69,44 @@ type ('param, 'input, 'output) entry evaluated when `Daemon.check_entry_point` is called at the beginning of `ServerMain.start`. *) val register_entry_point : - string -> ('param -> ('input, 'output) channel_pair -> unit) -> + string -> + ('param -> ('input, 'output) channel_pair -> unit) -> ('param, 'input, 'output) entry val name_of_entry : ('param, 'input, 'output) entry -> string (* Handler upon spawn and forked process. *) type ('in_, 'out) handle = { - channels : ('in_, 'out) channel_pair; - pid : int; + channels: ('in_, 'out) channel_pair; + pid: int; } (* for unit tests *) val devnull : unit -> ('a, 'b) handle val fd_of_path : string -> Unix.file_descr + val null_fd : unit -> Unix.file_descr (* Fork and run a function that communicates via the typed channels *) val fork : ?channel_mode:[ `pipe | `socket ] -> (* Where the daemon's output should go *) - (Unix.file_descr * Unix.file_descr) -> - ('param -> ('input, 'output) channel_pair -> unit) -> 'param -> + Unix.file_descr * Unix.file_descr -> + ('param -> ('input, 'output) channel_pair -> unit) -> + 'param -> ('output, 'input) handle (* Spawn a new instance of the current process, and execute the alternate entry point. *) val spawn : ?channel_mode:[ `pipe | `socket ] -> - ?name: string -> + ?name:string -> (* Where the daemon's input and output should go *) - (Unix.file_descr * Unix.file_descr * Unix.file_descr) -> - ('param, 'input, 'output) entry -> 'param -> ('output, 'input) handle + Unix.file_descr * Unix.file_descr * Unix.file_descr -> + ('param, 'input, 'output) entry -> + 'param -> + ('output, 'input) handle (* Close the typed channels associated to a 'spawned' child. *) val close : ('a, 'b) handle -> unit diff --git a/hack/utils/sys/dune b/hack/utils/sys/dune new file mode 100644 index 00000000000..8b2c4a4c8ff --- /dev/null +++ b/hack/utils/sys/dune @@ -0,0 +1,19 @@ +(library + (name sys_utils) + (wrapped false) + (libraries + collections + disk + str + unix + utils_core) + (c_names + files + gc_profiling + getrusage + handle_stubs + nproc + priorities + processor_info + realpath + sysinfo)) diff --git a/hack/utils/sys/fork.ml b/hack/utils/sys/fork.ml index 535ac066da9..ea4b93ee05b 100644 --- a/hack/utils/sys/fork.ml +++ b/hack/utils/sys/fork.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -31,21 +31,20 @@ let post_fork_child f = (* You should always use this instead of Unix.fork, so that the callbacks get * invoked *) let fork () = - List.iter !pre_fork_callbacks (fun f -> f()); + List.iter !pre_fork_callbacks (fun f -> f ()); match Unix.fork () with | 0 -> - List.iter !post_fork_child_callbacks (fun f -> f()); + List.iter !post_fork_child_callbacks (fun f -> f ()); 0 - | i -> - i + | i -> i (* should only be called from hh_server, which initializes the PidLog *) let fork_and_log ?reason () = - let result = fork() in + let result = fork () in (match result with - | -1 -> () - | 0 -> PidLog.close (); - | pid -> PidLog.log ?reason pid); + | -1 -> () + | 0 -> PidLog.close () + | pid -> PidLog.log ?reason pid); result let fork_and_may_log ?reason () = diff --git a/hack/utils/sys/gc_profiling.c b/hack/utils/sys/gc_profiling.c index 1dab5c3da62..afbb4ad4ec8 100644 --- a/hack/utils/sys/gc_profiling.c +++ b/hack/utils/sys/gc_profiling.c @@ -63,6 +63,8 @@ void major_end() { } void hh_start_gc_profiling() { + major_time = 0.0; + minor_time = 0.0; caml_minor_gc_begin_hook = minor_begin; caml_minor_gc_end_hook = minor_end; caml_major_slice_begin_hook = major_begin; @@ -76,6 +78,10 @@ void hh_start_gc_profiling() { * definitely don't allocate anything during GC */ value hh_get_gc_time() { + caml_minor_gc_begin_hook = NULL; + caml_minor_gc_end_hook = NULL; + caml_major_slice_begin_hook = NULL; + caml_major_slice_end_hook = NULL; CAMLparam0(); CAMLlocal1(ret); diff --git a/hack/utils/sys/getrusage.c b/hack/utils/sys/getrusage.c index 7ed4fe125f1..e6b4a027f80 100644 --- a/hack/utils/sys/getrusage.c +++ b/hack/utils/sys/getrusage.c @@ -10,6 +10,7 @@ #include #include #ifdef _WIN32 +#include #include value hh_getrusage(void) { diff --git a/hack/utils/sys/handle.ml b/hack/utils/sys/handle.ml index e558481e2aa..26fa0226c57 100644 --- a/hack/utils/sys/handle.ml +++ b/hack/utils/sys/handle.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -12,27 +12,38 @@ an integer command-line argument. *) type handle = int -external raw_get_handle : - Unix.file_descr -> handle = "caml_hh_worker_get_handle" [@@noalloc] -external raw_wrap_handle : - handle -> Unix.file_descr = "caml_hh_worker_create_handle" -external win_setup_handle_serialization : - unit -> unit = "win_setup_handle_serialization" + +external raw_get_handle : Unix.file_descr -> handle + = "caml_hh_worker_get_handle" + [@@noalloc] + +external raw_wrap_handle : handle -> Unix.file_descr + = "caml_hh_worker_create_handle" + +external win_setup_handle_serialization : unit -> unit + = "win_setup_handle_serialization" let init = (* Windows: register the serialize/desarialize functions for the custom block of "Unix.file_descr". *) - lazy begin - win_setup_handle_serialization () - end + lazy (win_setup_handle_serialization ()) let () = Lazy.force init let () = assert (Sys.win32 || Obj.is_int (Obj.repr Unix.stdin)) + let get_handle = - if Sys.win32 then raw_get_handle else Obj.magic + if Sys.win32 then + raw_get_handle + else + Obj.magic + let wrap_handle = - if Sys.win32 then raw_wrap_handle else Obj.magic + if Sys.win32 then + raw_wrap_handle + else + Obj.magic let to_in_channel h = wrap_handle h |> Unix.in_channel_of_descr + let to_out_channel h = wrap_handle h |> Unix.out_channel_of_descr diff --git a/hack/utils/sys/lock.ml b/hack/utils/sys/lock.ml index f851b7d77bf..6842b1fe6c8 100644 --- a/hack/utils/sys/lock.ml +++ b/hack/utils/sys/lock.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -19,12 +19,11 @@ let lock_fds = ref SMap.empty let register_lock lock_file = let _ = Sys_utils.mkdir_no_fail (Filename.dirname lock_file) in - Sys_utils.with_umask 0o111 begin fun () -> - let fd = Unix.descr_of_out_channel (open_out lock_file) in - let st = Unix.fstat fd in - lock_fds := SMap.add lock_file (fd, st) !lock_fds; - fd - end + Sys_utils.with_umask 0o111 (fun () -> + let fd = Unix.descr_of_out_channel (open_out lock_file) in + let st = Unix.fstat fd in + lock_fds := SMap.add lock_file (fd, st) !lock_fds; + fd) (** * Grab or check if a file lock is available. @@ -33,47 +32,48 @@ let register_lock lock_file = *) let _operations lock_file op : bool = try - let fd = match SMap.get lock_file !lock_fds with + let fd = + match SMap.get lock_file !lock_fds with | None -> register_lock lock_file | Some (fd, st) -> - let identical_file = - try - (* Note: I'm carefully avoiding opening another fd to the - * lock_file when doing this check, because closing any file - * descriptor to a given file will release the locks on *all* - * file descriptors that point to that file. Fortunately, stat() - * gets us our information without opening a fd *) - let current_st = Unix.stat lock_file in - Unix.(st.st_dev = current_st.st_dev && - st.st_ino = current_st.st_ino) - with _ -> - false - in - if not (Sys.win32 || identical_file) then - (* Looks like someone (tmpwatch?) deleted the lock file; don't - * create another one, because our socket is probably gone too. - * We are dead in the water. *) - raise Exit - else - fd + let identical_file = + try + (* Note: I'm carefully avoiding opening another fd to the + * lock_file when doing this check, because closing any file + * descriptor to a given file will release the locks on *all* + * file descriptors that point to that file. Fortunately, stat() + * gets us our information without opening a fd *) + let current_st = Unix.stat lock_file in + Unix.( + st.st_dev = current_st.st_dev && st.st_ino = current_st.st_ino) + with _ -> false + in + if not (Sys.win32 || identical_file) then + (* Looks like someone (tmpwatch?) deleted the lock file; don't + * create another one, because our socket is probably gone too. + * We are dead in the water. *) + raise Exit + else + fd in let _ = try Unix.lockf fd op 1 with _ when Sys.win32 && (op = Unix.F_TLOCK || op = Unix.F_TEST) -> - (* On Windows, F_TLOCK and F_TEST fail if we have the lock ourself *) - (* However, we then are the only one to be able to write there. *) - ignore (Unix.lseek fd 0 Unix.SEEK_SET : int); - (* If we don't have the lock, the following 'write' will + (* On Windows, F_TLOCK and F_TEST fail if we have the lock ourself *) + (* However, we then are the only one to be able to write there. *) + ignore (Unix.lseek fd 0 Unix.SEEK_SET : int); + + (* If we don't have the lock, the following 'write' will throw an exception. *) - let wb = Unix.write fd (Bytes.make 1 ' ') 0 1 in - (* When not throwing an exception, the current + let wb = Unix.write fd (Bytes.make 1 ' ') 0 1 in + (* When not throwing an exception, the current implementation of `Unix.write` always return `1`. But let's be protective against semantic changes, and better fails than wrongly assume that we own a lock. *) - assert (wb = 1) in + assert (wb = 1) + in true - with _ -> - false + with _ -> false (** * Grabs the file lock and returns true if it the lock was grabbed @@ -89,14 +89,13 @@ let blocking_grab_then_release lock_file = ignore (_operations lock_file Unix.F_LOCK); ignore (release lock_file) - (** * Gets the server instance-unique integral fd for a given lock file. *) let fd_of lock_file : int = match SMap.get lock_file !lock_fds with - | None -> -1 - | Some fd -> Obj.magic fd + | None -> -1 + | Some fd -> Obj.magic fd (** * Check if the file lock is available without grabbing it. diff --git a/hack/utils/sys/lock.mli b/hack/utils/sys/lock.mli index 219e749a630..e2e9ada80c0 100644 --- a/hack/utils/sys/lock.mli +++ b/hack/utils/sys/lock.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * @@ -8,7 +8,11 @@ *) val grab : string -> bool + val release : string -> bool + val blocking_grab_then_release : string -> unit + val fd_of : string -> int + val check : string -> bool diff --git a/hack/utils/sys/path.ml b/hack/utils/sys/path.ml index 9e2ba438d1f..2ed78ce554c 100644 --- a/hack/utils/sys/path.ml +++ b/hack/utils/sys/path.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,16 +7,29 @@ * *) +open Reordered_argument_collections include Sys -type t = string +module S = struct + type t = string + + let compare = Pervasives.compare + + let to_string x = x +end + +type t = S.t let dummy_path : t = "" let cat = Sys_utils.cat + let compare = Pervasives.compare + let dirname = Filename.dirname +let basename = Filename.basename + (** * Resolves a path (using realpath) * @@ -31,7 +44,7 @@ let dirname = Filename.dirname let make path = match Sys_utils.realpath path with | Some path -> path - | None -> path (* assert false? *) + | None -> (* assert false? *) path (** * Creates a Path without running it through `realpath`. This is unsafe because @@ -42,49 +55,58 @@ let make_unsafe path = path let to_string path = path -let concat path more = - make (Filename.concat path more) +let concat path more = make (Filename.concat path more) let parent path = - if is_directory path - then make (concat path Filename.parent_dir_name) - else make (Filename.dirname path) + if is_directory path then + make (concat path Filename.parent_dir_name) + else + make (Filename.dirname path) let output = output_string let slash_escaped_string_of_path path = let buf = Buffer.create (String.length path) in - String.iter (fun ch -> - match ch with - | '\\' -> Buffer.add_string buf "zB" - | ':' -> Buffer.add_string buf "zC" - | '/' -> Buffer.add_string buf "zS" - | '\x00' -> Buffer.add_string buf "z0" - | 'z' -> Buffer.add_string buf "zZ" - | _ -> Buffer.add_char buf ch - ) path; + String.iter + (fun ch -> + match ch with + | '\\' -> Buffer.add_string buf "zB" + | ':' -> Buffer.add_string buf "zC" + | '/' -> Buffer.add_string buf "zS" + | '\x00' -> Buffer.add_string buf "z0" + | 'z' -> Buffer.add_string buf "zZ" + | _ -> Buffer.add_char buf ch) + path; Buffer.contents buf let path_of_slash_escaped_string str = let length = String.length str in let buf = Buffer.create length in let rec consume i = - if i >= length then () + if i >= length then + () else let replacement = - if i < length - 1 && str.[i] = 'z' - then match str.[i+1] with + if i < length - 1 && str.[i] = 'z' then + match str.[i + 1] with | 'B' -> Some '\\' | 'C' -> Some ':' | 'S' -> Some '/' | '0' -> Some '\x00' | 'Z' -> Some 'z' | _ -> None - else None in - let c, next_i = match replacement with - | Some r -> r, i+2 - | None -> str.[i], i+1 in + else + None + in + let (c, next_i) = + match replacement with + | Some r -> (r, i + 2) + | None -> (str.[i], i + 1) + in Buffer.add_char buf c; consume next_i - in consume 0; + in + consume 0; make (Buffer.contents buf) + +module Set = Reordered_argument_set (Set.Make (S)) diff --git a/hack/utils/sys/path.mli b/hack/utils/sys/path.mli index a2b4a9d54c6..9e47ab74915 100644 --- a/hack/utils/sys/path.mli +++ b/hack/utils/sys/path.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,25 +7,54 @@ * *) +open Reordered_argument_collections -type t = private string - -val dummy_path: t -val make: string -> t -val make_unsafe: string -> t -val to_string: t -> string -val file_exists: t -> bool -val is_directory: t -> bool -val compare: t -> t -> int -val concat: t -> string -> t -val chdir: t -> unit -val dirname: t -> t -val getcwd: unit -> t -val output: out_channel -> t -> unit -val remove: t -> unit -val parent: t -> t -val executable_name: t -val cat: t -> string - -val slash_escaped_string_of_path: t -> string -val path_of_slash_escaped_string: string -> t +module S : sig + type t = private string + + val compare : t -> t -> int + + val to_string : t -> string +end + +type t = S.t + +val dummy_path : t + +val make : string -> t + +val make_unsafe : string -> t + +val to_string : t -> string + +val file_exists : t -> bool + +val is_directory : t -> bool + +val compare : t -> t -> int + +val concat : t -> string -> t + +val chdir : t -> unit + +val dirname : t -> t + +val basename : t -> string + +val getcwd : unit -> t + +val output : out_channel -> t -> unit + +val remove : t -> unit + +val parent : t -> t + +val executable_name : t + +val cat : t -> string + +val slash_escaped_string_of_path : t -> string + +val path_of_slash_escaped_string : string -> t + +module Set : module type of Reordered_argument_set (Set.Make (S)) diff --git a/hack/utils/sys/pidLog.ml b/hack/utils/sys/pidLog.ml index 8612467b0c0..1cafc5497c7 100644 --- a/hack/utils/sys/pidLog.ml +++ b/hack/utils/sys/pidLog.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -17,24 +17,24 @@ let disable () = enabled := false let init pids_file = assert (!log_oc = None); - Sys_utils.with_umask 0o111 begin fun () -> - Sys_utils.mkdir_no_fail (Filename.dirname pids_file); - let oc = open_out pids_file in - log_oc := Some oc; - Unix.(set_close_on_exec (descr_of_out_channel oc)) - end - -let log ?reason ?(no_fail=false) pid = - if !enabled - then + Sys_utils.with_umask 0o111 (fun () -> + Sys_utils.mkdir_no_fail (Filename.dirname pids_file); + let oc = open_out pids_file in + log_oc := Some oc; + Unix.(set_close_on_exec (descr_of_out_channel oc))) + +let log ?reason ?(no_fail = false) pid = + if !enabled then let pid = Sys_utils.pid_of_handle pid in - let reason = match reason with + let reason = + match reason with | None -> "unknown" - | Some s -> s in + | Some s -> s + in match !log_oc with - | None when no_fail -> () - | None -> failwith "Can't write pid to uninitialized pids log" - | Some oc -> Printf.fprintf oc "%d\t%s\n%!" pid reason + | None when no_fail -> () + | None -> failwith "Can't write pid to uninitialized pids log" + | Some oc -> Printf.fprintf oc "%d\t%s\n%!" pid reason exception FailedToGetPids @@ -42,20 +42,20 @@ let get_pids pids_file = try let ic = open_in pids_file in let results = ref [] in - begin try - while true do - let row = input_line ic in - if Str.string_match (Str.regexp "^\\([0-9]+\\)\t\\(.+\\)") row 0 - then - let pid = int_of_string (Str.matched_group 1 row) in - let reason = Str.matched_group 2 row in - results := (pid, reason)::!results; - done; - with End_of_file -> () end; + begin + try + while true do + let row = input_line ic in + if Str.string_match (Str.regexp "^\\([0-9]+\\)\t\\(.+\\)") row 0 then + let pid = int_of_string (Str.matched_group 1 row) in + let reason = Str.matched_group 2 row in + results := (pid, reason) :: !results + done + with End_of_file -> () + end; close_in ic; List.rev !results - with Sys_error _ -> - raise FailedToGetPids + with Sys_error _ -> raise FailedToGetPids let close () = Option.iter !log_oc ~f:close_out; diff --git a/hack/utils/sys/printSignal.ml b/hack/utils/sys/printSignal.ml index 26d50942815..8e24e009169 100644 --- a/hack/utils/sys/printSignal.ml +++ b/hack/utils/sys/printSignal.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * diff --git a/hack/utils/sys/proc.ml b/hack/utils/sys/proc.ml index 928589f585b..80bd1fefaec 100644 --- a/hack/utils/sys/proc.ml +++ b/hack/utils/sys/proc.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * @@ -7,7 +7,10 @@ * *) -type proc_stat = { cmdline: string; ppid: int; } +type proc_stat = { + cmdline: string; + ppid: int; +} let cmdline_delimiter_re = Str.regexp "\x00" @@ -15,21 +18,22 @@ let cmdline_delimiter_re = Str.regexp "\x00" let get_cmdline (pid : int) : (string, string) result = (* NOTE: Linux's OS type is Unix *) if Sys.os_type <> "Unix" then - Error "Getting cmdline is not implemented for non-Unix OS types" else - - let cmdline_path = Printf.sprintf "/proc/%d/cmdline" pid in - try - let line = Str.global_replace - cmdline_delimiter_re - " " - (Disk.cat cmdline_path) in - Ok line - with e -> - let error = Printf.sprintf - "No 'cmdline' file found for PID %d: '%s'" - pid - (Printexc.to_string e) in - Error error + Error "Getting cmdline is not implemented for non-Unix OS types" + else + let cmdline_path = Printf.sprintf "/proc/%d/cmdline" pid in + try + let line = + Str.global_replace cmdline_delimiter_re " " (Disk.cat cmdline_path) + in + Ok line + with e -> + let error = + Printf.sprintf + "No 'cmdline' file found for PID %d: '%s'" + pid + (Printexc.to_string e) + in + Error error (* Takes a PID and returns the information about the process, including the name and the PID of the parent process (PPID) *) @@ -42,51 +46,57 @@ let get_proc_stat (pid : int) : (proc_stat, string) result = try let stat = Scanf.Scanning.from_string (Disk.cat stat_path) in try - let record = Scanf.bscanf - stat - "%d (%s@) %c %d" - (fun _my_pid _comm _state ppid : (proc_stat, string) result -> - match (get_cmdline pid) with - | Ok cmdline -> Ok { cmdline = cmdline; ppid = ppid; } - | Error err -> Error err) in + let record = + Scanf.bscanf stat "%d (%s@) %c %d" (fun _my_pid _comm _state ppid -> + ( match get_cmdline pid with + | Ok cmdline -> Ok { cmdline; ppid } + | Error err -> Error err + : (proc_stat, string) result )) + in record with e -> - let error = Printf.sprintf - "Error reading 'stat' for PID %d: %s" - pid - (Printexc.to_string e) in + let error = + Printf.sprintf + "Error reading 'stat' for PID %d: %s" + pid + (Printexc.to_string e) + in Error error with e -> - let error = Printf.sprintf - "No 'stat' file found for PID %d: '%s'" - pid - (Printexc.to_string e) in + let error = + Printf.sprintf + "No 'stat' file found for PID %d: '%s'" + pid + (Printexc.to_string e) + in Error error let get_proc_stack - ?(max_depth : int = -1) - ?(max_length : int = max_int) - (pid : int) : (string list, string) result = + ?(max_depth : int = -1) ?(max_length : int = max_int) (pid : int) : + (string list, string) result = let prepare_cmdline (cmdline : string) : string = let cmdline = String.trim cmdline in - if (max_length >= String.length cmdline) then + if max_length >= String.length cmdline then cmdline else - (String.trim (String.sub cmdline 0 max_length)) ^ "..." in - + String.trim (String.sub cmdline 0 max_length) ^ "..." + in (* We could have max_depth as optional, but then everybody would have to pass in None *) (* let max_depth = match max_depth with | None -> -1 | Some max_depth -> max_depth in *) let rec build_proc_stack - (curr_pid : int) - (proc_stack : string list) - (counter : int) : (string list, string) result = - if curr_pid = 0 then Ok proc_stack else - if counter = max_depth then - Ok proc_stack - else begin - match (get_proc_stat curr_pid) with - | Ok stat -> - build_proc_stack stat.ppid ((prepare_cmdline stat.cmdline) :: proc_stack) (counter + 1) - | Error e -> Error e - end in - build_proc_stack pid [] 0 + (curr_pid : int) (proc_stack : string list) (counter : int) : + (string list, string) result = + if curr_pid = 0 then + Ok proc_stack + else if counter = max_depth then + Ok proc_stack + else + match get_proc_stat curr_pid with + | Ok stat -> + build_proc_stack + stat.ppid + (prepare_cmdline stat.cmdline :: proc_stack) + (counter + 1) + | Error e -> Error e + in + build_proc_stack pid [] 0 diff --git a/hack/utils/sys/sys_utils.ml b/hack/utils/sys/sys_utils.ml index 0f6e9779e35..c9b3d31e0f6 100644 --- a/hack/utils/sys/sys_utils.ml +++ b/hack/utils/sys/sys_utils.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -9,37 +9,62 @@ open Hh_core -external realpath: string -> string option = "hh_realpath" -external is_nfs: string -> bool = "hh_is_nfs" +external realpath : string -> string option = "hh_realpath" + +external is_nfs : string -> bool = "hh_is_nfs" + external is_apple_os : unit -> bool = "hh_sysinfo_is_apple_os" (** Option type intead of exception throwing. *) -let get_env name = - try Some (Sys.getenv name) with - | Not_found -> None +let get_env name = (try Some (Sys.getenv name) with Not_found -> None) let getenv_user () = - let user_var = if Sys.win32 then "USERNAME" else "USER" in + let user_var = + if Sys.win32 then + "USERNAME" + else + "USER" + in let logname_var = "LOGNAME" in let user = get_env user_var in let logname = get_env logname_var in Option.first_some user logname let getenv_home () = - let home_var = if Sys.win32 then "APPDATA" else "HOME" in + let home_var = + if Sys.win32 then + "APPDATA" + else + "HOME" + in get_env home_var let getenv_term () = - let term_var = "TERM" in (* This variable does not exist on windows. *) + let term_var = "TERM" in + (* This variable does not exist on windows. *) get_env term_var -let path_sep = if Sys.win32 then ";" else ":" -let null_path = if Sys.win32 then "nul" else "/dev/null" +let path_sep = + if Sys.win32 then + ";" + else + ":" + +let null_path = + if Sys.win32 then + "nul" + else + "/dev/null" + let temp_dir_name = - if Sys.win32 then Filename.get_temp_dir_name () else "/tmp" + if Sys.win32 then + Filename.get_temp_dir_name () + else + "/tmp" let getenv_path () = - let path_var = "PATH" in (* Same variable on windows *) + let path_var = "PATH" in + (* Same variable on windows *) get_env path_var let open_in_no_fail fn = @@ -57,7 +82,8 @@ let open_in_bin_no_fail fn = exit 3 let close_in_no_fail fn ic = - try close_in ic with e -> + try close_in ic + with e -> let e = Printexc.to_string e in Printf.fprintf stderr "Could not close: '%s' (%s)\n" fn e; exit 3 @@ -77,7 +103,8 @@ let open_out_bin_no_fail fn = exit 3 let close_out_no_fail fn oc = - try close_out oc with e -> + try close_out oc + with e -> let e = Printexc.to_string e in Printf.fprintf stderr "Could not close: '%s' (%s)\n" fn e; exit 3 @@ -100,13 +127,14 @@ let cat_no_fail filename = content let nl_regexp = Str.regexp "[\r\n]" + let split_lines = Str.split nl_regexp (** Returns true if substring occurs somewhere inside str. *) let string_contains str substring = - (** regexp_string matches only this string and nothing else. *) + (* regexp_string matches only this string and nothing else. *) let re = Str.regexp_string substring in - try (Str.search_forward re str 0) >= 0 with Not_found -> false + (try Str.search_forward re str 0 >= 0 with Not_found -> false) let exec_read cmd = let ic = Unix.open_process_in cmd in @@ -114,16 +142,19 @@ let exec_read cmd = assert (Unix.close_process_in ic = Unix.WEXITED 0); result -let exec_read_lines ?(reverse=false) cmd = +let exec_read_lines ?(reverse = false) cmd = let ic = Unix.open_process_in cmd in let result = ref [] in (try - while true do - result := input_line ic :: !result - done; - with End_of_file -> ()); + while true do + result := input_line ic :: !result + done + with End_of_file -> ()); assert (Unix.close_process_in ic = Unix.WEXITED 0); - if not reverse then List.rev !result else !result + if not reverse then + List.rev !result + else + !result (** * Collects paths that satisfy a predicate, recursively traversing directories. @@ -131,10 +162,10 @@ let exec_read_lines ?(reverse=false) cmd = let rec collect_paths path_predicate path = if Sys.is_directory path then path - |> Sys.readdir - |> Array.to_list - |> List.map ~f:(Filename.concat path) - |> List.concat_map ~f:(collect_paths path_predicate) + |> Sys.readdir + |> Array.to_list + |> List.map ~f:(Filename.concat path) + |> List.concat_map ~f:(collect_paths path_predicate) else Utils.singleton_if (path_predicate path) path @@ -147,23 +178,23 @@ let rec collect_paths path_predicate path = * to get the necessary information (in this case, containing a list of files, * one per line). *) -let parse_path_list (paths: string list): string list = +let parse_path_list (paths : string list) : string list = List.concat_map paths ~f:(fun path -> - if String_utils.string_starts_with path "@" - then - let path = String_utils.lstrip path "@" in - cat path |> split_lines - else - [path] -) |> List.map ~f:(fun path -> - match realpath path with - | Some path -> path - | None -> failwith (Printf.sprintf "Invalid path: %s" path) -) - -let rm_dir_tree ?(skip_mocking=false) = - if skip_mocking then RealDisk.rm_dir_tree - else Disk.rm_dir_tree + if String_utils.string_starts_with path "@" then + let path = String_utils.lstrip path "@" in + cat path |> split_lines + else + [path]) + |> List.map ~f:(fun path -> + match realpath path with + | Some path -> path + | None -> failwith (Printf.sprintf "Invalid path: %s" path)) + +let rm_dir_tree ?(skip_mocking = false) = + if skip_mocking then + RealDisk.rm_dir_tree + else + Disk.rm_dir_tree let restart () = let cmd = Sys.argv.(0) in @@ -172,22 +203,25 @@ let restart () = let logname_impl () = match getenv_user () with - | Some user -> user - | None -> - (* If this function is generally useful, it can be lifted to toplevel + | Some user -> user + | None -> + (* If this function is generally useful, it can be lifted to toplevel in this file, but this is the only place we need it for now. *) - let exec_try_read cmd = - let ic = Unix.open_process_in cmd in - let out = try Some (input_line ic) with End_of_file -> None in - let status = Unix.close_process_in ic in - match out, status with - | Some _, Unix.WEXITED 0 -> out - | _ -> None in - try Utils.unsafe_opt (exec_try_read "logname") with Invalid_argument _ -> - try Utils.unsafe_opt (exec_try_read "id -un") with Invalid_argument _ -> - "[unknown]" + let exec_try_read cmd = + let ic = Unix.open_process_in cmd in + let out = (try Some (input_line ic) with End_of_file -> None) in + let status = Unix.close_process_in ic in + match (out, status) with + | (Some _, Unix.WEXITED 0) -> out + | _ -> None + in + (try Utils.unsafe_opt (exec_try_read "logname") + with Invalid_argument _ -> + (try Utils.unsafe_opt (exec_try_read "id -un") + with Invalid_argument _ -> "[unknown]")) let logname_ref = ref None + let logname () = if !logname_ref = None then logname_ref := Some (logname_impl ()); Utils.unsafe_opt !logname_ref @@ -196,10 +230,16 @@ let with_umask umask f = let old_umask = ref 0 in Utils.with_context ~enter:(fun () -> old_umask := Unix.umask umask) - ~exit:(fun () -> Unix.umask !old_umask) + ~exit:(fun () -> + let _ = Unix.umask !old_umask in + ()) ~do_:f + let with_umask umask f = - if Sys.win32 then f () else with_umask umask f + if Sys.win32 then + f () + else + with_umask umask f let read_stdin_to_string () = let buf = Buffer.create 4096 in @@ -209,19 +249,18 @@ let read_stdin_to_string () = Buffer.add_char buf '\n' done; assert false - with End_of_file -> - Buffer.contents buf + with End_of_file -> Buffer.contents buf -let read_all ?(buf_size=4096) ic = +let read_all ?(buf_size = 4096) ic = let buf = Buffer.create buf_size in (try - while true do - let data = Bytes.create buf_size in - let bytes_read = input ic data 0 buf_size in - if bytes_read = 0 then raise Exit; - Buffer.add_subbytes buf data 0 bytes_read; - done - with Exit -> ()); + while true do + let data = Bytes.create buf_size in + let bytes_read = input ic data 0 buf_size in + if bytes_read = 0 then raise Exit; + Buffer.add_subbytes buf data 0 bytes_read + done + with Exit -> ()); Buffer.contents buf (** @@ -235,17 +274,19 @@ let read_all ?(buf_size=4096) ic = let expanduser path = Str.substitute_first (Str.regexp "^~\\([^/]*\\)") - begin fun s -> + begin + fun s -> match Str.matched_group 1 s with - | "" -> - begin - match getenv_home () with - | None -> (Unix.getpwuid (Unix.getuid())).Unix.pw_dir - | Some home -> home - end - | unixname -> - try (Unix.getpwnam unixname).Unix.pw_dir - with Not_found -> Str.matched_string s end + | "" -> + begin + match getenv_home () with + | None -> (Unix.getpwuid (Unix.getuid ())).Unix.pw_dir + | Some home -> home + end + | unixname -> + (try (Unix.getpwnam unixname).Unix.pw_dir + with Not_found -> Str.matched_string s) + end path (* Turns out it's surprisingly complex to figure out the path to the current @@ -263,38 +304,46 @@ let executable_path : unit -> string = let search_path path = let paths = match getenv_path () with - | None -> failwith "Unable to determine executable path" - | Some paths -> - Str.split (Str.regexp_string path_sep) paths in - let path = List.fold_left paths ~f:begin fun acc p -> - match acc with - | Some _ -> acc - | None -> realpath (expanduser (Filename.concat p path)) - end ~init:None + | None -> failwith "Unable to determine executable path" + | Some paths -> Str.split (Str.regexp_string path_sep) paths + in + let path = + List.fold_left + paths + ~f: + begin + fun acc p -> + match acc with + | Some _ -> acc + | None -> realpath (expanduser (Filename.concat p path)) + end + ~init:None in match path with | Some path -> path | None -> failwith "Unable to determine executable path" in - fun () -> match !executable_path_ with - | Some path -> path - | None -> + fun () -> + match !executable_path_ with + | Some path -> path + | None -> let path = Sys.executable_name in let path = if String.contains path dir_sep then match realpath path with | Some path -> path | None -> failwith "Unable to determine executable path" - else search_path path + else + search_path path in executable_path_ := Some path; path let lines_of_in_channel ic = let rec loop accum = - match try Some(input_line ic) with _ -> None with + match (try Some (input_line ic) with _ -> None) with | None -> List.rev accum - | Some(line) -> loop (line::accum) + | Some line -> loop (line :: accum) in loop [] @@ -308,21 +357,20 @@ let lines_of_file filename = close_in ic; [] - let read_file file = - let ic = open_in_bin file in + let ic = open_in_bin file in let size = in_channel_length ic in let buf = Bytes.create size in really_input ic buf 0 size; close_in ic; buf -let write_file ~file s = - Disk.write_file ~file ~contents:s +let write_file ~file s = Disk.write_file ~file ~contents:s let append_file ~file s = let chan = open_out_gen [Open_wronly; Open_append; Open_creat] 0o666 file in - (output_string chan s; close_out chan) + output_string chan s; + close_out chan let write_strings_to_file ~file ss = let chan = open_out_gen [Open_wronly; Open_creat] 0o666 file in @@ -331,32 +379,33 @@ let write_strings_to_file ~file ss = (* could be in control section too *) -let filemtime file = - (Unix.stat file).Unix.st_mtime +let filemtime file = (Unix.stat file).Unix.st_mtime external lutimes : string -> unit = "hh_lutimes" let try_touch ~follow_symlinks file = try - if follow_symlinks then Unix.utimes file 0.0 0.0 - else lutimes file - with _ -> - () + if follow_symlinks then + Unix.utimes file 0.0 0.0 + else + lutimes file + with _ -> () -let mkdir_p ?(skip_mocking=false) = - if skip_mocking then RealDisk.mkdir_p - else Disk.mkdir_p +let mkdir_p ?(skip_mocking = false) = + if skip_mocking then + RealDisk.mkdir_p + else + Disk.mkdir_p (* Emulate "mkdir -p", i.e., no error if already exists. *) let mkdir_no_fail dir = - with_umask 0 begin fun () -> - (* Don't set sticky bit since the socket opening code wants to remove any - * old sockets it finds, which may be owned by a different user. *) - try Unix.mkdir dir 0o777 with Unix.Unix_error (Unix.EEXIST, _, _) -> () - end + with_umask 0 (fun () -> + (* Don't set sticky bit since the socket opening code wants to remove any + * old sockets it finds, which may be owned by a different user. *) + try Unix.mkdir dir 0o777 with Unix.Unix_error (Unix.EEXIST, _, _) -> ()) let unlink_no_fail fn = - try Unix.unlink fn with Unix.Unix_error (Unix.ENOENT, _, _) -> () + (try Unix.unlink fn with Unix.Unix_error (Unix.ENOENT, _, _) -> ()) let readlink_no_fail fn = if Sys.win32 && Sys.file_exists fn then @@ -370,7 +419,7 @@ let splitext filename = (* -1 because the extension includes the period, e.g. ".foo" *) let ext_length = String.length filename - root_length - 1 in let ext = String.sub filename (root_length + 1) ext_length in - root, ext + (root, ext) let is_test_mode () = try @@ -378,8 +427,7 @@ let is_test_mode () = true with _ -> false -let sleep ~seconds = - ignore @@ Unix.select [] [] [] seconds +let sleep ~seconds = ignore @@ Unix.select [] [] [] seconds let symlink = (* Dummy implementation of `symlink` on Windows: we create a text @@ -387,72 +435,102 @@ let symlink = on Windows since Vista, but until Seven (included), one should have administratrive rights in order to create symlink. *) let win32_symlink source dest = write_file ~file:dest source in - if Sys.win32 - then win32_symlink + if Sys.win32 then + win32_symlink else (* 4.03 adds an optional argument to Unix.symlink that we want to ignore *) - fun source dest -> Unix.symlink source dest + fun source dest -> + Unix.symlink source dest (* Creates a symlink at / to * //-. *) let make_link_of_timestamped linkname = - let open Unix in - let dir = Filename.dirname linkname in - mkdir_no_fail dir; - let base = Filename.basename linkname in - let base, ext = splitext base in - let dir = Filename.concat dir (Printf.sprintf "%ss" ext) in - mkdir_no_fail dir; - let tm = localtime (time ()) in - let year = tm.tm_year + 1900 in - let time_str = Printf.sprintf "%d-%02d-%02d-%02d-%02d-%02d" - year (tm.tm_mon + 1) tm.tm_mday tm.tm_hour tm.tm_min tm.tm_sec in - let filename = Filename.concat dir - (Printf.sprintf "%s-%s.%s" base time_str ext) in - unlink_no_fail linkname; - symlink filename linkname; - filename + Unix.( + let dir = Filename.dirname linkname in + mkdir_no_fail dir; + let base = Filename.basename linkname in + let (base, ext) = splitext base in + let dir = Filename.concat dir (Printf.sprintf "%ss" ext) in + mkdir_no_fail dir; + let tm = localtime (time ()) in + let year = tm.tm_year + 1900 in + let time_str = + Printf.sprintf + "%d-%02d-%02d-%02d-%02d-%02d" + year + (tm.tm_mon + 1) + tm.tm_mday + tm.tm_hour + tm.tm_min + tm.tm_sec + in + let filename = + Filename.concat dir (Printf.sprintf "%s-%s.%s" base time_str ext) + in + unlink_no_fail linkname; + symlink filename linkname; + filename) let setsid = (* Not implemented on Windows. Let's just return the pid *) - if Sys.win32 then Unix.getpid else Unix.setsid + if Sys.win32 then + Unix.getpid + else + Unix.setsid + +let set_signal = + if not Sys.win32 then + Sys.set_signal + else + fun _ _ -> + () -let set_signal = if not Sys.win32 then Sys.set_signal else (fun _ _ -> ()) let signal = - if not Sys.win32 - then (fun a b -> ignore (Sys.signal a b)) - else (fun _ _ -> ()) + if not Sys.win32 then + fun a b -> + ignore (Sys.signal a b) + else + fun _ _ -> + () external get_total_ram : unit -> int = "hh_sysinfo_totalram" + external uptime : unit -> int = "hh_sysinfo_uptime" -external nproc: unit -> int = "nproc" + +external nproc : unit -> int = "nproc" let total_ram = get_total_ram () + let nbr_procs = nproc () -external set_priorities : cpu_priority:int -> io_priority:int -> unit = - "hh_set_priorities" +external set_priorities : cpu_priority:int -> io_priority:int -> unit + = "hh_set_priorities" -external pid_of_handle: int -> int = "pid_of_handle" -external handle_of_pid_for_termination: int -> int = - "handle_of_pid_for_termination" +external pid_of_handle : int -> int = "pid_of_handle" + +external handle_of_pid_for_termination : int -> int + = "handle_of_pid_for_termination" let terminate_process pid = Unix.kill pid Sys.sigkill let lstat path = (* WTF, on Windows `lstat` fails if a directory path ends with an '/' (or a '\', whatever) *) - Unix.lstat @@ + Unix.lstat + @@ if Sys.win32 && String_utils.string_ends_with path Filename.dir_sep then String.sub path 0 (String.length path - 1) else path let normalize_filename_dir_sep = - let dir_sep_char = String.get Filename.dir_sep 0 in - String.map (fun c -> if c = dir_sep_char then '/' else c) - + let dir_sep_char = Filename.dir_sep.[0] in + String.map (fun c -> + if c = dir_sep_char then + '/' + else + c) let name_of_signal = function | s when s = Sys.sigabrt -> "SIGABRT (Abnormal termination)" @@ -473,7 +551,8 @@ let name_of_signal = function | s when s = Sys.sigstop -> "SIGSTOP (Stop)" | s when s = Sys.sigtstp -> "SIGTSTP (Interactive stop)" | s when s = Sys.sigttin -> "SIGTTIN (Terminal read from background process)" - | s when s = Sys.sigttou -> "SIGTTOU (Terminal write from background process)" + | s when s = Sys.sigttou -> + "SIGTTOU (Terminal write from background process)" | s when s = Sys.sigvtalrm -> "SIGVTALRM (Timeout in virtual time)" | s when s = Sys.sigprof -> "SIGPROF (Profiling interrupt)" | s when s = Sys.sigbus -> "SIGBUS (Bus error)" @@ -494,12 +573,13 @@ type cpu_info = { cpu_system: float; cpu_idle: float; } + type processor_info = { proc_totals: cpu_info; proc_per_cpu: cpu_info array; } -external processor_info: unit -> processor_info = "hh_processor_info" +external processor_info : unit -> processor_info = "hh_processor_info" (* We implement timers using sigalarm which means selects can be interrupted. This is a wrapper * around EINTR which continues the select if it gets interrupted by a signal *) @@ -509,9 +589,11 @@ let rec select_non_intr read write exn timeout = with Unix.Unix_error (Unix.EINTR, _, _) -> (* Negative timeouts mean no timeout *) let timeout = - if timeout < 0.0 - then timeout - else max 0.0 (timeout -. (Unix.gettimeofday () -. start_time)) in + if timeout < 0.0 then + timeout + else + max 0.0 (timeout -. (Unix.gettimeofday () -. start_time)) + in select_non_intr read write exn timeout (* Flow uses lwt, which installs a sigchld handler. So the old pattern of fork & waitpid will hit @@ -523,39 +605,56 @@ let rec waitpid_non_intr flags pid = (* Exposing this for a unit test *) let find_oom_in_dmesg_output pid name lines = - let re = Str.regexp (Printf.sprintf - "Out of memory: Kill process \\([0-9]+\\) (%s)" name) in - List.exists lines begin fun line -> - try - ignore @@ Str.search_forward re line 0; - let pid_s = Str.matched_group 1 line in - int_of_string pid_s = pid - with Not_found -> false - end + let re = + Str.regexp + (Printf.sprintf "Out of memory: Kill process \\([0-9]+\\) (%s)" name) + in + List.exists lines (fun line -> + try + ignore @@ Str.search_forward re line 0; + let pid_s = Str.matched_group 1 line in + int_of_string pid_s = pid + with Not_found -> false) let check_dmesg_for_oom pid name = - let dmesg = exec_read_lines ~reverse:true "dmesg" in - find_oom_in_dmesg_output pid name dmesg + let dmesg = exec_read_lines ~reverse:true "dmesg" in + find_oom_in_dmesg_output pid name dmesg (* Be careful modifying the rusage type! Like other types that interact with C, the order matters! * If you change things here you must update hh_getrusage too! *) type rusage = { - ru_maxrss: int; (* maximum resident set size *) - ru_ixrss: int; (* integral shared memory size *) - ru_idrss: int; (* integral unshared data size *) - ru_isrss: int; (* integral unshared stack size *) - ru_minflt: int; (* page reclaims (soft page faults) *) - ru_majflt: int; (* page faults (hard page faults) *) - ru_nswap: int; (* swaps *) - ru_inblock: int; (* block input operations *) - ru_oublock: int; (* block output operations *) - ru_msgsnd: int; (* IPC messages sent *) - ru_msgrcv: int; (* IPC messages received *) - ru_nsignals: int; (* signals received *) - ru_nvcsw: int; (* voluntary context switches *) - ru_nivcsw: int; (* involuntary context switches *) + ru_maxrss: int; + (* maximum resident set size *) + ru_ixrss: int; + (* integral shared memory size *) + ru_idrss: int; + (* integral unshared data size *) + ru_isrss: int; + (* integral unshared stack size *) + ru_minflt: int; + (* page reclaims (soft page faults) *) + ru_majflt: int; + (* page faults (hard page faults) *) + ru_nswap: int; + (* swaps *) + ru_inblock: int; + (* block input operations *) + ru_oublock: int; + (* block output operations *) + ru_msgsnd: int; + (* IPC messages sent *) + ru_msgrcv: int; + (* IPC messages received *) + ru_nsignals: int; + (* signals received *) + ru_nvcsw: int; + (* voluntary context switches *) + ru_nivcsw: int; (* involuntary context switches *) } -external getrusage: unit -> rusage = "hh_getrusage" -external start_gc_profiling: unit -> unit = "hh_start_gc_profiling" [@@noalloc] -external get_gc_time: unit -> float * float = "hh_get_gc_time" +external getrusage : unit -> rusage = "hh_getrusage" + +external start_gc_profiling : unit -> unit = "hh_start_gc_profiling" + [@@noalloc] + +external get_gc_time : unit -> float * float = "hh_get_gc_time" diff --git a/hack/utils/sys/tail.ml b/hack/utils/sys/tail.ml deleted file mode 100644 index 51600efcd7a..00000000000 --- a/hack/utils/sys/tail.ml +++ /dev/null @@ -1,79 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type env = - { filename : string; - mutable ic_opt : in_channel option; - mutable lines : string list; - mutable last_line : string option; - } - -let open_in_opt filename = - let open Unix in - try - Sys_utils.mkdir_no_fail (Filename.dirname filename); - let fd = openfile filename [O_RDONLY;O_NONBLOCK;O_CREAT] 0o777 in - let ic = in_channel_of_descr fd in - begin set_binary_mode_in ic false; Some(ic) end - with _ -> - (Printf.eprintf "Tail.open_in_opt: Couldn't open file %s\n%!" filename; - None) - -let create_env filename = - { filename; ic_opt=None; lines=[]; last_line=None } - -let open_env env = - match env.ic_opt with - | None -> - begin - let target_file = Sys_utils.readlink_no_fail env.filename in - env.lines <- []; - env.last_line <- None; - env.ic_opt <- open_in_opt target_file; - end - | Some _ -> () - -let close_env env = - env.lines <- []; - match env.ic_opt with - | None -> () - | Some ic -> (close_in ic; env.ic_opt <- None) - -let update_env filter_fn env = - match env.ic_opt with - | None -> () - | Some ic -> - let continue = ref true in - let line = ref "" in - while !continue do - try - line := input_line ic; - if filter_fn !line then - begin - env.lines <- !line :: env.lines; - env.last_line <- Some(!line) - end - with End_of_file -> - continue := false; - done - -let is_open_env env = - match env.ic_opt with - | None -> false - | Some _ -> true - -let last_line env = - match env.ic_opt, env.last_line with - | None, _ -> "" - | Some _, None -> "" - | Some _, Some l -> l - -let get_lines env = env.lines - -let set_lines env l = env.lines <- l diff --git a/hack/utils/sys/tail.mli b/hack/utils/sys/tail.mli deleted file mode 100644 index 8738b672206..00000000000 --- a/hack/utils/sys/tail.mli +++ /dev/null @@ -1,26 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -type env - -val create_env: string -> env - -val open_env: env -> unit - -val close_env: env -> unit - -val update_env: (string -> bool) -> env -> unit - -val is_open_env: env -> bool - -val last_line: env -> string - -val get_lines: env -> string list - -val set_lines: env -> string list -> unit diff --git a/hack/utils/sys/timeout.ml b/hack/utils/sys/timeout.ml index 60cd7da892c..02cd895a346 100644 --- a/hack/utils/sys/timeout.ml +++ b/hack/utils/sys/timeout.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -12,13 +12,16 @@ exception Timeout of int (* The IDs are used to tell the difference between timeout A timing out and timeout B timing out. * So they only really need to be unique between any two active timeouts in the same process. *) let id_counter = ref 0 -let mk_id () = incr id_counter; !id_counter -module Alarm_timeout = struct +let mk_id () = + incr id_counter; + !id_counter +module Alarm_timeout = struct (** Timeout *) type t = int + let with_timeout ~timeout ~on_timeout ~do_ = let id = mk_id () in let callback () = raise (Timeout id) in @@ -27,14 +30,14 @@ module Alarm_timeout = struct let ret = try do_ id with exn -> + let stack = Printexc.get_raw_backtrace () in (* Any uncaught exception will cancel the timeout *) Timer.cancel_timer timer; - raise exn + Printexc.raise_with_backtrace exn stack in Timer.cancel_timer timer; ret - with Timeout exn_id when exn_id = id -> - on_timeout () + with Timeout exn_id when exn_id = id -> on_timeout () let check_timeout _ = () @@ -43,10 +46,15 @@ module Alarm_timeout = struct (** Channel *) type in_channel = Pervasives.in_channel * int option + let ignore_timeout f ?timeout:_ (ic, _pid) = f ic + let input = ignore_timeout Pervasives.input + let really_input = ignore_timeout Pervasives.really_input + let input_char = ignore_timeout Pervasives.input_char + let input_line = ignore_timeout Pervasives.input_line let input_value_with_workaround ic = @@ -57,23 +65,28 @@ module Alarm_timeout = struct * http://caml.inria.fr/mantis/view.php?id=7142 *) try Pervasives.input_value ic with Failure msg as e -> - if msg = "input_value: truncated object" - then Pervasives.input_char ic |> ignore; + if msg = "input_value: truncated object" then + Pervasives.input_char ic |> ignore; raise e let input_value = ignore_timeout input_value_with_workaround - let open_in name = Pervasives.open_in name, None + + let open_in name = (Pervasives.open_in name, None) + let close_in (ic, _) = Pervasives.close_in ic + let close_in_noerr (ic, _) = Pervasives.close_in_noerr ic - let in_channel_of_descr fd = Unix.in_channel_of_descr fd, None + + let in_channel_of_descr fd = (Unix.in_channel_of_descr fd, None) + let descr_of_in_channel (ic, _) = Unix.descr_of_in_channel ic + let open_process cmd args = - let child_in_fd, out_fd = Unix.pipe () in - let in_fd, child_out_fd = Unix.pipe () in + let (child_in_fd, out_fd) = Unix.pipe () in + let (in_fd, child_out_fd) = Unix.pipe () in Unix.set_close_on_exec in_fd; Unix.set_close_on_exec out_fd; - let pid = - Unix.(create_process cmd args child_in_fd child_out_fd stderr) in + let pid = Unix.(create_process cmd args child_in_fd child_out_fd stderr) in Unix.close child_out_fd; Unix.close child_in_fd; let ic = (Unix.in_channel_of_descr in_fd, Some pid) in @@ -81,13 +94,12 @@ module Alarm_timeout = struct (ic, oc) let open_process_in cmd args = - let child_in_fd, out_fd = Unix.pipe () in - let in_fd, child_out_fd = Unix.pipe () in + let (child_in_fd, out_fd) = Unix.pipe () in + let (in_fd, child_out_fd) = Unix.pipe () in Unix.set_close_on_exec in_fd; Unix.set_close_on_exec out_fd; Unix.close out_fd; - let pid = - Unix.(create_process cmd args child_in_fd child_out_fd stderr) in + let pid = Unix.(create_process cmd args child_in_fd child_out_fd stderr) in Unix.close child_out_fd; Unix.close child_in_fd; let ic = (Unix.in_channel_of_descr in_fd, Some pid) in @@ -97,44 +109,44 @@ module Alarm_timeout = struct match pid with | None -> invalid_arg "Timeout.close_process_in" | Some pid -> - Pervasives.close_in ic; - snd(Sys_utils.waitpid_non_intr [] pid) + Pervasives.close_in ic; + snd (Sys_utils.waitpid_non_intr [] pid) let read_process ~timeout ~on_timeout ~reader cmd args = let (ic, oc) = open_process cmd args in - with_timeout ~timeout ~on_timeout - ~do_:(fun timeout -> - try reader timeout ic oc - with exn -> close_in ic; close_out oc; raise exn) + with_timeout ~timeout ~on_timeout ~do_:(fun timeout -> + try reader timeout ic oc + with exn -> + close_in ic; + close_out oc; + raise exn) let open_connection ?timeout:_ sockaddr = (* timeout isn't used in this Alarm_timeout implementation, but is used in Select_timeout *) let (ic, oc) = Unix.open_connection sockaddr in ((ic, None), oc) - let shutdown_connection (ic, _) = - Unix.shutdown_connection ic + let shutdown_connection (ic, _) = Unix.shutdown_connection ic let is_timeout_exn id = function - | Timeout exn_id -> exn_id = id - | _ -> false - + | Timeout exn_id -> exn_id = id + | _ -> false end module Select_timeout = struct - (** Timeout *) type t = { timeout: float; id: int; } + let create timeout = { timeout = Unix.gettimeofday () +. timeout; id = mk_id () } + let with_timeout ~timeout ~on_timeout ~do_ = let t = create (float timeout) in - try do_ t - with Timeout exn_id when exn_id = t.id -> on_timeout () + (try do_ t with Timeout exn_id when exn_id = t.id -> on_timeout ()) let check_timeout t = if Unix.gettimeofday () > t.timeout then raise (Timeout t.id) @@ -151,7 +163,7 @@ module Select_timeout = struct type in_channel = channel - let buffer_size = 65536-9 (* From ocaml/byterun/io.h *) + let buffer_size = 65536 - 9 (* From ocaml/byterun/io.h *) let in_channel_of_descr fd = let buf = Bytes.create buffer_size in @@ -165,16 +177,14 @@ module Select_timeout = struct let close_in tic = Unix.close tic.fd - let close_in_noerr tic = - try Unix.close tic.fd - with _ -> () + let close_in_noerr tic = (try Unix.close tic.fd with _ -> ()) let close_process_in tic = match tic.pid with | None -> invalid_arg "Timeout.close_process_in" | Some pid -> - close_in tic; - snd (Sys_utils.waitpid_non_intr [] pid) + close_in tic; + snd (Sys_utils.waitpid_non_intr [] pid) (* A negative timeout for select means block until a fd is ready *) let no_select_timeout = ~-.1.0 @@ -185,35 +195,41 @@ module Select_timeout = struct match timeout with (* No timeout set, fallback to Sys_utils.select_non_intr *) | None -> Sys_utils.select_non_intr rfds wfds xfds select_timeout - | Some { timeout; id } -> + | Some { timeout; id } -> let timeout = timeout -. Unix.gettimeofday () in (* Whoops, timeout already fired, throw right away! *) if timeout < 0. then raise (Timeout id); + (* A negative select_timeout would mean wait forever *) - if select_timeout >= 0.0 && select_timeout < timeout - (* The select's timeout is smaller than our timeout, so leave it alone *) - then Sys_utils.select_non_intr rfds wfds xfds select_timeout - else + if + select_timeout >= 0.0 && select_timeout < timeout + (* The select's timeout is smaller than our timeout, so leave it alone *) + then + Sys_utils.select_non_intr rfds wfds xfds select_timeout + else ( (* Our timeout is smaller, so use that *) match Sys_utils.select_non_intr rfds wfds xfds timeout with (* Timeout hit! Throw an exception! *) - | [], [], [] -> raise (Timeout id) + | ([], [], []) -> raise (Timeout id) (* Got a result before the timeout fired, so just return that *) | ret -> ret + ) let do_read ?timeout tic = - match select ?timeout [ tic.fd ] [] [] no_select_timeout with - | [], _, _ -> + match select ?timeout [tic.fd] [] [] no_select_timeout with + | ([], _, _) -> failwith "This should be unreachable. How did select return with no fd when there is no timeout?" - | [_], _, _ -> - let read = try - Unix.read tic.fd tic.buf tic.max (buffer_size - tic.max) - with Unix.Unix_error (Unix.EPIPE, _, _) -> - raise End_of_file in - tic.max <- tic.max + read; - read - | _ :: _, _, _-> assert false (* Should never happen *) + | ([_], _, _) -> + let read = + try Unix.read tic.fd tic.buf tic.max (buffer_size - tic.max) + with Unix.Unix_error (Unix.EPIPE, _, _) -> raise End_of_file + in + tic.max <- tic.max + read; + read + | (_ :: _, _, _) -> assert false + + (* Should never happen *) let refill ?timeout tic = tic.curr <- 0; @@ -223,23 +239,30 @@ module Select_timeout = struct nread let unsafe_input ?timeout tic s ofs len = - let n = if len > max_int then max_int else len in + let n = + if len > max_int then + max_int + else + len + in let avail = tic.max - tic.curr in - if n <= avail then begin (* There is enough to read in the buffer. *) + if n <= avail then ( + (* There is enough to read in the buffer. *) Bytes.blit tic.buf tic.curr s ofs n; tic.curr <- tic.curr + n; n - end else if avail > 0 then begin (* Read the rest of the buffer. *) + ) else if avail > 0 then ( + (* Read the rest of the buffer. *) Bytes.blit tic.buf tic.curr s ofs avail; tic.curr <- tic.curr + avail; avail - end else begin (* No input to read, refill buffer. *) + ) else + (* No input to read, refill buffer. *) let nread = refill ?timeout tic in let n = min nread n in Bytes.blit tic.buf tic.curr s ofs n; tic.curr <- tic.curr + n; n - end let input ?timeout tic s ofs len = if ofs < 0 || len < 0 || ofs > Bytes.length s - len then @@ -259,66 +282,62 @@ module Select_timeout = struct if Bytes.get tic.buf pos = '\n' then pos - tic.curr + 1 else - scan_line tic (pos+1) - else begin + scan_line tic (pos + 1) + else let pos = - if tic.curr <> 0 then begin + if tic.curr <> 0 then ( tic.max <- tic.max - tic.curr; Bytes.blit tic.buf tic.curr tic.buf 0 tic.max; tic.curr <- 0; tic.max - end else + ) else pos in if tic.max = buffer_size then - - (tic.max - tic.curr) + -(tic.max - tic.curr) else let nread = do_read ?timeout tic in if nread = 0 then - - (tic.max - tic.curr) - else begin + -(tic.max - tic.curr) + else scan_line tic pos - end - end in + in scan_line tic tic.curr - let input_line ?timeout tic = - + let input_line ?timeout tic = let rec build_result buf pos = function | [] -> buf | hd :: tl -> - let len = Bytes.length hd in - Bytes.blit hd 0 buf (pos - len) len; - build_result buf (pos - len) tl in - + let len = Bytes.length hd in + Bytes.blit hd 0 buf (pos - len) len; + build_result buf (pos - len) tl + in let rec scan accu len = - let n = input_scan_line ?timeout tic in - (* End of file, if accu is not empty, return the last line. *) - if n = 0 then begin + if n = 0 then match accu with | [] -> raise End_of_file | _ -> build_result (Bytes.create len) len accu - - (* New line found in the buffer. *) - end else if n > 0 then begin - let result = Bytes.create (n - 1) in (* No need to keep '\n' *) + (* New line found in the buffer. *) + else if n > 0 then ( + let result = Bytes.create (n - 1) in + (* No need to keep '\n' *) ignore (unsafe_input tic result 0 (n - 1)); - ignore (input_char tic); (* Skip newline *) + ignore (input_char tic); + + (* Skip newline *) match accu with | [] -> result | _ -> - let len = len + n - 1 in - build_result (Bytes.create len) len (result :: accu) - + let len = len + n - 1 in + build_result (Bytes.create len) len (result :: accu) (* New line not found in the buffer *) - end else begin + ) else let ofs = Bytes.create (-n) in ignore (unsafe_input tic ofs 0 (-n)); scan (ofs :: accu) (len - n) - end in - + in Bytes.unsafe_to_string (scan [] 0) let rec unsafe_really_input ?timeout tic buf ofs len = @@ -326,9 +345,10 @@ module Select_timeout = struct () else let r = unsafe_input ?timeout tic buf ofs len in - if r = 0 - then raise End_of_file - else unsafe_really_input ?timeout tic buf (ofs + r) (len - r) + if r = 0 then + raise End_of_file + else + unsafe_really_input ?timeout tic buf (ofs + r) (len - r) let really_input ?timeout tic buf ofs len = if ofs < 0 || len < 0 || ofs > Bytes.length buf - len then @@ -339,14 +359,14 @@ module Select_timeout = struct (** Marshal *) let marshal_magic = Bytes.of_string "\x84\x95\xA6\xBE" + let input_value ?timeout tic = let magic = Bytes.create 4 in Bytes.set magic 0 (input_char ?timeout tic); Bytes.set magic 1 (input_char ?timeout tic); Bytes.set magic 2 (input_char ?timeout tic); Bytes.set magic 3 (input_char ?timeout tic); - if magic <> marshal_magic then - failwith "Select.input_value: bad object."; + if magic <> marshal_magic then failwith "Select.input_value: bad object."; let b1 = int_of_char (input_char ?timeout tic) in let b2 = int_of_char (input_char ?timeout tic) in let b3 = int_of_char (input_char ?timeout tic) in @@ -360,20 +380,18 @@ module Select_timeout = struct Bytes.set data 7 (char_of_int b4); begin try unsafe_really_input ?timeout tic data 8 len - with End_of_file -> - failwith "Select.input_value: truncated object." + with End_of_file -> failwith "Select.input_value: truncated object." end; Marshal.from_bytes data 0 (** Process *) let open_process cmd args = - let child_in_fd, out_fd = Unix.pipe () in - let in_fd, child_out_fd = Unix.pipe () in + let (child_in_fd, out_fd) = Unix.pipe () in + let (in_fd, child_out_fd) = Unix.pipe () in Unix.set_close_on_exec in_fd; Unix.set_close_on_exec out_fd; - let pid = - Unix.(create_process cmd args child_in_fd child_out_fd stderr) in + let pid = Unix.(create_process cmd args child_in_fd child_out_fd stderr) in Unix.close child_out_fd; Unix.close child_in_fd; let tic = in_channel_of_descr in_fd in @@ -382,13 +400,12 @@ module Select_timeout = struct (tic, oc) let open_process_in cmd args = - let child_in_fd, out_fd = Unix.pipe () in - let in_fd, child_out_fd = Unix.pipe () in + let (child_in_fd, out_fd) = Unix.pipe () in + let (in_fd, child_out_fd) = Unix.pipe () in Unix.set_close_on_exec in_fd; Unix.set_close_on_exec out_fd; Unix.close out_fd; - let pid = - Unix.(create_process cmd args child_in_fd child_out_fd stderr) in + let pid = Unix.(create_process cmd args child_in_fd child_out_fd stderr) in Unix.close child_out_fd; Unix.close child_in_fd; let tic = in_channel_of_descr in_fd in @@ -402,47 +419,49 @@ module Select_timeout = struct tic.pid <- None; on_timeout () in - with_timeout ~timeout ~on_timeout - ~do_:(fun timeout -> - try reader timeout tic oc - with exn -> - Option.iter ~f:Sys_utils.terminate_process tic.pid; - tic.pid <- None; - close_in tic; - close_out oc; - raise exn) + with_timeout ~timeout ~on_timeout ~do_:(fun timeout -> + try reader timeout tic oc + with exn -> + Option.iter ~f:Sys_utils.terminate_process tic.pid; + tic.pid <- None; + close_in tic; + close_out oc; + raise exn) (** Socket *) let open_connection ?timeout sockaddr = let connect sock sockaddr = - try - (* connect binds the fd sock to the socket at sockaddr. If sock is nonblocking, and the - * connect call would block, it errors. You can then use select to wait for the connect - * to finish. - * - * On Windows, if the connect succeeds, sock will be returned in the writable fd set. - * If the connect fails, the sock will be returned in the exception fd set. - * https://msdn.microsoft.com/en-us/library/windows/desktop/ms737625(v=vs.85).aspx - * - * On Linux, the sock will always be returned in the writable fd set, and you're supposed - * to use getsockopt to read the SO_ERROR option at level SOL_SOCKET to figure out if the - * connect worked. However, this code is only used on Windows, so that's fine *) - Unix.connect sock sockaddr; - with - | Unix.Unix_error ((Unix.EINPROGRESS | Unix.EWOULDBLOCK), _, _) -> begin + (* connect binds the fd sock to the socket at sockaddr. If sock is nonblocking, and the + * connect call would block, it errors. You can then use select to wait for the connect + * to finish. + * + * On Windows, if the connect succeeds, sock will be returned in the writable fd set. + * If the connect fails, the sock will be returned in the exception fd set. + * https://msdn.microsoft.com/en-us/library/windows/desktop/ms737625(v=vs.85).aspx + * + * On Linux, the sock will always be returned in the writable fd set, and you're supposed + * to use getsockopt to read the SO_ERROR option at level SOL_SOCKET to figure out if the + * connect worked. However, this code is only used on Windows, so that's fine *) + try Unix.connect sock sockaddr with + | Unix.Unix_error ((Unix.EINPROGRESS | Unix.EWOULDBLOCK), _, _) -> + begin match select ?timeout [] [sock] [] no_select_timeout with - | _, [], [exn_sock] when exn_sock = sock -> + | (_, [], [exn_sock]) when exn_sock = sock -> failwith "Failed to connect to socket" - | _, [], _ -> + | (_, [], _) -> failwith - "This should be unreachable. How did select return with no fd when there is no timeout?" - | _, [_sock], _ -> () - | _, _, _ -> assert false + "This should be unreachable. How did select return with no fd when there is no timeout?" + | (_, [_sock], _) -> () + | (_, _, _) -> assert false end - | exn -> Unix.close sock; raise exn in + | exn -> + Unix.close sock; + raise exn + in let sock = - Unix.socket (Unix.domain_of_sockaddr sockaddr) Unix.SOCK_STREAM 0 in + Unix.socket (Unix.domain_of_sockaddr sockaddr) Unix.SOCK_STREAM 0 + in Unix.set_nonblock sock; connect sock sockaddr; Unix.clear_nonblock sock; @@ -451,67 +470,85 @@ module Select_timeout = struct let oc = Unix.out_channel_of_descr sock in (tic, oc) - let shutdown_connection { fd; _ } = - Unix.(shutdown fd SHUTDOWN_SEND) - - let is_timeout_exn {id; timeout = _;} = function - | Timeout exn_id -> exn_id = id - | _ -> false + let shutdown_connection { fd; _ } = Unix.(shutdown fd SHUTDOWN_SEND) + let is_timeout_exn { id; timeout = _ } = function + | Timeout exn_id -> exn_id = id + | _ -> false end module type S = sig - type t - val with_timeout: - timeout:int -> - on_timeout:(unit -> 'a) -> - do_:(t -> 'a) -> 'a - val check_timeout: t -> unit + + val with_timeout : + timeout:int -> on_timeout:(unit -> 'a) -> do_:(t -> 'a) -> 'a + + val check_timeout : t -> unit type in_channel - val in_channel_of_descr: Unix.file_descr -> in_channel - val descr_of_in_channel: in_channel -> Unix.file_descr - val open_in: string -> in_channel - val close_in: in_channel -> unit - val close_in_noerr: in_channel -> unit - val select: + + val in_channel_of_descr : Unix.file_descr -> in_channel + + val descr_of_in_channel : in_channel -> Unix.file_descr + + val open_in : string -> in_channel + + val close_in : in_channel -> unit + + val close_in_noerr : in_channel -> unit + + val select : ?timeout:t -> Unix.file_descr list -> Unix.file_descr list -> Unix.file_descr list -> float -> Unix.file_descr list * Unix.file_descr list * Unix.file_descr list - val input: ?timeout:t -> in_channel -> bytes -> int -> int -> int - val really_input: ?timeout:t -> in_channel -> bytes -> int -> int -> unit - val input_char: ?timeout:t -> in_channel -> char - val input_line: ?timeout:t -> in_channel -> string - val input_value: ?timeout:t -> in_channel -> 'a - val open_process: string -> string array -> in_channel * out_channel - val open_process_in: string -> string array -> in_channel - val close_process_in: in_channel -> Unix.process_status - val read_process: + + val input : ?timeout:t -> in_channel -> bytes -> int -> int -> int + + val really_input : ?timeout:t -> in_channel -> bytes -> int -> int -> unit + + val input_char : ?timeout:t -> in_channel -> char + + val input_line : ?timeout:t -> in_channel -> string + + val input_value : ?timeout:t -> in_channel -> 'a + + val open_process : string -> string array -> in_channel * out_channel + + val open_process_in : string -> string array -> in_channel + + val close_process_in : in_channel -> Unix.process_status + + val read_process : timeout:int -> on_timeout:(unit -> 'a) -> reader:(t -> in_channel -> out_channel -> 'a) -> - string -> string array -> 'a - val open_connection: - ?timeout:t -> Unix.sockaddr -> in_channel * out_channel - val shutdown_connection: in_channel -> unit + string -> + string array -> + 'a + + val open_connection : ?timeout:t -> Unix.sockaddr -> in_channel * out_channel - val is_timeout_exn: t -> exn -> bool + val shutdown_connection : in_channel -> unit + + val is_timeout_exn : t -> exn -> bool end let select = (module Select_timeout : S) + let alarm = (module Alarm_timeout : S) -include (val (if Sys.win32 then select else alarm)) +include ( val if Sys.win32 then + select + else + alarm ) let read_connection ~timeout ~on_timeout ~reader sockaddr = - with_timeout ~timeout ~on_timeout - ~do_:(fun timeout -> - let (tic, oc) = open_connection ~timeout sockaddr in - try reader timeout tic oc - with exn -> - close_out oc; - raise exn) + with_timeout ~timeout ~on_timeout ~do_:(fun timeout -> + let (tic, oc) = open_connection ~timeout sockaddr in + try reader timeout tic oc + with exn -> + close_out oc; + raise exn) diff --git a/hack/utils/sys/timeout.mli b/hack/utils/sys/timeout.mli index b7da42c3010..ec76449fe6e 100644 --- a/hack/utils/sys/timeout.mli +++ b/hack/utils/sys/timeout.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -30,23 +30,24 @@ type t `Pervasives.in_channel`. *) -val with_timeout: - timeout:int -> - on_timeout:(unit -> 'a) -> - do_:(t -> 'a) -> 'a +val with_timeout : + timeout:int -> on_timeout:(unit -> 'a) -> do_:(t -> 'a) -> 'a -val check_timeout: t -> unit +val check_timeout : t -> unit type in_channel -val open_in: string -> in_channel -val close_in: in_channel -> unit -val close_in_noerr: in_channel -> unit +val open_in : string -> in_channel + +val close_in : in_channel -> unit + +val close_in_noerr : in_channel -> unit -val in_channel_of_descr: Unix.file_descr -> in_channel -val descr_of_in_channel: in_channel -> Unix.file_descr +val in_channel_of_descr : Unix.file_descr -> in_channel -val select: +val descr_of_in_channel : in_channel -> Unix.file_descr + +val select : ?timeout:t -> Unix.file_descr list -> Unix.file_descr list -> @@ -54,30 +55,41 @@ val select: float -> Unix.file_descr list * Unix.file_descr list * Unix.file_descr list -val input: ?timeout:t -> in_channel -> bytes -> int -> int -> int -val really_input: ?timeout:t -> in_channel -> bytes -> int -> int -> unit -val input_char: ?timeout:t -> in_channel -> char -val input_line: ?timeout:t -> in_channel -> string -val input_value: ?timeout:t -> in_channel -> 'a +val input : ?timeout:t -> in_channel -> bytes -> int -> int -> int + +val really_input : ?timeout:t -> in_channel -> bytes -> int -> int -> unit + +val input_char : ?timeout:t -> in_channel -> char -val open_process: string -> string array -> in_channel * out_channel -val open_process_in: string -> string array -> in_channel -val close_process_in: in_channel -> Unix.process_status -val read_process: +val input_line : ?timeout:t -> in_channel -> string + +val input_value : ?timeout:t -> in_channel -> 'a + +val open_process : string -> string array -> in_channel * out_channel + +val open_process_in : string -> string array -> in_channel + +val close_process_in : in_channel -> Unix.process_status + +val read_process : timeout:int -> on_timeout:(unit -> 'a) -> reader:(t -> in_channel -> out_channel -> 'a) -> - string -> string array -> 'a + string -> + string array -> + 'a + +val open_connection : ?timeout:t -> Unix.sockaddr -> in_channel * out_channel -val open_connection: - ?timeout:t -> Unix.sockaddr -> in_channel * out_channel -val read_connection: +val read_connection : timeout:int -> on_timeout:(unit -> 'a) -> reader:(t -> in_channel -> out_channel -> 'a) -> - Unix.sockaddr -> 'a -val shutdown_connection: in_channel -> unit + Unix.sockaddr -> + 'a + +val shutdown_connection : in_channel -> unit (* Some silly people like to catch all exceptions. This means they need to explicitly detect and * reraise the timeout exn. *) -val is_timeout_exn: t -> exn -> bool +val is_timeout_exn : t -> exn -> bool diff --git a/hack/utils/sys/timer.ml b/hack/utils/sys/timer.ml index 7a979058f37..83f88da4e7c 100644 --- a/hack/utils/sys/timer.ml +++ b/hack/utils/sys/timer.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -37,86 +37,95 @@ type timer = { module TimerKey = struct type t = timer + let compare a b = compare a.target_time b.target_time end (* Mutable priority queue with O(log(n)) pushes and pops *) -module TimerQueue = PriorityQueue.Make(TimerKey) +module TimerQueue = PriorityQueue.Make (TimerKey) let next_id = ref 1 + let queue = TimerQueue.make_empty 8 + let current_timer = ref None let cancelled = ref ISet.empty (* Get's the next timer. Any expired timers have their callbacks invoked *) let rec get_next_timer ~exns = - if TimerQueue.is_empty queue - then None, List.rev exns + if TimerQueue.is_empty queue then + (None, List.rev exns) else let timer = TimerQueue.pop queue in - (* Skip cancelled timers *) - if ISet.mem timer.id !cancelled - then get_next_timer ~exns + if ISet.mem timer.id !cancelled then + get_next_timer ~exns else let interval = timer.target_time -. Unix.gettimeofday () in - if interval <= 0.0 - then begin - let exns = try - timer.callback (); - exns - with exn -> exn::exns in + if interval <= 0.0 then + let exns = + try + timer.callback (); + exns + with exn -> exn :: exns + in get_next_timer ~exns - end else - (Some timer), List.rev exns + else + (Some timer, List.rev exns) (* Schedules an alarm for interval seconds *) let schedule_non_recurring interval = - let open Unix in - let interval_timer = { - it_interval = 0.0; (* Don't restart timer when it finishes *) - it_value = interval; (* How long to wait *) - } in - ignore (setitimer ITIMER_REAL interval_timer) + Unix.( + let interval_timer = + { + it_interval = 0.0; + (* Don't restart timer when it finishes *) + it_value = interval (* How long to wait *); + } + in + ignore (setitimer ITIMER_REAL interval_timer)) external reraise : exn -> 'a = "%reraise" let rec ding_fries_are_done _ = - let exns = try - Option.iter !current_timer ~f:(fun timer -> timer.callback ()); - [] - with exn -> [exn] in + let exns = + try + Option.iter !current_timer ~f:(fun timer -> timer.callback ()); + [] + with exn -> [exn] + in current_timer := None; schedule ~exns () -and schedule ?(exns=[]) () = +and schedule ?(exns = []) () = (* Stop the current timer, if there is one, to avoid races *) schedule_non_recurring 0.0; (* If there's a current timer, requeue it *) Option.iter !current_timer ~f:(TimerQueue.push queue); - let timer, exns = get_next_timer ~exns in + let (timer, exns) = get_next_timer ~exns in current_timer := timer; ignore (Sys.signal Sys.sigalrm (Sys.Signal_handle ding_fries_are_done)); (* Start the timer back up *) - Option.iter timer ~f:(fun t -> schedule_non_recurring (t.target_time -. (Unix.gettimeofday ()))); + Option.iter timer ~f:(fun t -> + schedule_non_recurring (t.target_time -. Unix.gettimeofday ())); (* If we executed more than one callback this time and more than one callback threw an * exception, then we just arbitrarily choose one to throw. Oh well :/ *) - (match exns with - | exn::_ -> reraise exn - | _ -> ()) + match exns with + | exn :: _ -> reraise exn + | _ -> () (* Will invoke callback () after interval seconds *) let set_timer ~interval ~callback = let target_time = Unix.gettimeofday () +. interval in let id = !next_id in incr next_id; - TimerQueue.push queue { target_time; callback; id; }; + TimerQueue.push queue { target_time; callback; id }; (match !current_timer with | Some current_timer when target_time >= current_timer.target_time -> (* There's currently a timer and the new timer will fire after it. As an optimization we can diff --git a/hack/utils/sys/timer.mli b/hack/utils/sys/timer.mli index a6d556a0eb4..5d716ee1bab 100644 --- a/hack/utils/sys/timer.mli +++ b/hack/utils/sys/timer.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -23,7 +23,7 @@ type t (* Will invoke callback () after interval seconds *) -val set_timer: interval:float -> callback:(unit -> unit) -> t +val set_timer : interval:float -> callback:(unit -> unit) -> t (* Will prevent a future timer from firing *) -val cancel_timer: t -> unit +val cancel_timer : t -> unit diff --git a/hack/utils/sys/tmp.ml b/hack/utils/sys/tmp.ml index 1ed963b6d3e..1ece1554882 100644 --- a/hack/utils/sys/tmp.ml +++ b/hack/utils/sys/tmp.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,18 +7,18 @@ * *) - (*****************************************************************************) (* Handling where our temporary files go *) (*****************************************************************************) (* The missing counterpart to Filename.temp_file. Put in a random location * under get_dir() above. *) -let temp_dir parent_dir prefix = +let temp_dir parent_dir prefix = Sys_utils.mkdir_no_fail parent_dir; let tmpdir = Filename.concat parent_dir - (Printf.sprintf "%s_%06x" prefix (Random.bits ())) in + (Printf.sprintf "%s_%06x" prefix (Random.bits ())) + in Sys_utils.mkdir_no_fail tmpdir; tmpdir diff --git a/hack/utils/sys/tty.ml b/hack/utils/sys/tty.ml index 73aeb8a4ac7..d92389e3c27 100644 --- a/hack/utils/sys/tty.ml +++ b/hack/utils/sys/tty.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -37,25 +37,25 @@ type color_mode = let text_num = function | Default -> "39" - | Black -> "30" - | Red -> "31" - | Green -> "32" - | Yellow -> "33" - | Blue -> "34" + | Black -> "30" + | Red -> "31" + | Green -> "32" + | Yellow -> "33" + | Blue -> "34" | Magenta -> "35" - | Cyan -> "36" - | White -> "37" + | Cyan -> "36" + | White -> "37" let background_num = function | Default -> "49" - | Black -> "40" - | Red -> "41" - | Green -> "42" - | Yellow -> "43" - | Blue -> "44" + | Black -> "40" + | Red -> "41" + | Green -> "42" + | Yellow -> "43" + | Blue -> "44" | Magenta -> "45" - | Cyan -> "46" - | White -> "47" + | Cyan -> "46" + | White -> "47" let color_num = function | Default -> "0" @@ -63,25 +63,27 @@ let color_num = function let style_num = function | Normal c -> color_num c - | Bold c -> color_num c ^ ";1" - | Dim c -> color_num c ^ ";2" + | Bold c -> color_num c ^ ";1" + | Dim c -> color_num c ^ ";2" | Underline c -> color_num c ^ ";4" | BoldUnderline c -> color_num c ^ ";1;4" | DimUnderline c -> color_num c ^ ";2;4" - | NormalWithBG (text, bg) -> (text_num text) ^ ";" ^ (background_num bg) - | BoldWithBG (text, bg) -> (text_num text) ^ ";" ^ (background_num bg) ^ ";1" + | NormalWithBG (text, bg) -> text_num text ^ ";" ^ background_num bg + | BoldWithBG (text, bg) -> text_num text ^ ";" ^ background_num bg ^ ";1" let supports_color = let memo = ref None in fun () -> - match !memo with Some x -> x | None -> begin - let value = match Sys_utils.getenv_term () with - | None -> false - | Some term -> Unix.isatty Unix.stdout && term <> "dumb" + match !memo with + | Some x -> x + | None -> + let value = + match Sys_utils.getenv_term () with + | None -> false + | Some term -> Unix.isatty Unix.stdout && term <> "dumb" in memo := Some value; value - end let should_color color_mode = match color_mode with @@ -91,61 +93,71 @@ let should_color color_mode = let emoji_spinner = List.map - (* Some terminals display the emoji using only one column, even though they + (* Some terminals display the emoji using only one column, even though they may take up two columns, and put the cursor immediately after it in an illegible manner. Add an extra space to separate the cursor from the emoji. *) ~f:(fun x -> x ^ " ") [ - "\xF0\x9F\x98\xA1"; (* Angry Face *) - "\xF0\x9F\x98\x82"; (* Face With Tears of Joy *) - "\xF0\x9F\xA4\x94"; (* Thinking Face *) - "\xF0\x9F\x92\xAF" (* Hundred Points *) + "\xF0\x9F\x98\xA1"; + (* Angry Face *) + "\xF0\x9F\x98\x82"; + (* Face With Tears of Joy *) + "\xF0\x9F\xA4\x94"; + (* Thinking Face *) + "\xF0\x9F\x92\xAF"; + (* Hundred Points *) + ] (* See https://github.com/yarnpkg/yarn/issues/405. *) let supports_emoji () = Sys.os_type <> "Win32" && supports_color () -let print_one ?(color_mode=Color_Auto) ?(out_channel=stdout) c s = - if should_color color_mode - then Printf.fprintf out_channel "\x1b[%sm%s\x1b[0m" (style_num c) (s) - else Printf.fprintf out_channel "%s" s +let apply_color ?(color_mode = Color_Auto) c s : string = + if should_color color_mode then + Printf.sprintf "\x1b[%sm%s\x1b[0m" (style_num c) s + else + Printf.sprintf "%s" s + +let print_one ?(color_mode = Color_Auto) ?(out_channel = stdout) c s = + Printf.fprintf out_channel "%s" (apply_color ~color_mode c s) -let cprint ?(color_mode=Color_Auto) ?(out_channel=stdout) strs = +let cprint ?(color_mode = Color_Auto) ?(out_channel = stdout) strs = List.iter strs (fun (c, s) -> print_one ~color_mode ~out_channel c s) -let cprintf ?(color_mode=Color_Auto) ?(out_channel=stdout) c = +let cprintf ?(color_mode = Color_Auto) ?(out_channel = stdout) c = Printf.ksprintf (print_one ~color_mode ~out_channel c) let (spinner, spinner_used) = let state = ref 0 in - (fun ?(angery_reaccs_only=false) () -> - begin + ( (fun ?(angery_reaccs_only = false) () -> let spinner = - if angery_reaccs_only then emoji_spinner else ["-"; "\\"; "|"; "/"] in + if angery_reaccs_only then + emoji_spinner + else + ["-"; "\\"; "|"; "/"] + in let str = List.nth_exn spinner (!state mod 4) in state := !state + 1; - str - end), - (fun () -> !state <> 0) + str), + (fun () -> !state <> 0) ) (* ANSI escape sequence to clear whole line *) let clear_line_seq = "\r\x1b[0K" let print_clear_line chan = - if Unix.isatty (Unix.descr_of_out_channel chan) - then Printf.fprintf chan "%s%!" clear_line_seq - else () + if Unix.isatty (Unix.descr_of_out_channel chan) then + Printf.fprintf chan "%s%!" clear_line_seq + else + () (* Read a single char and return immediately, without waiting for a newline. * `man termios` to see how termio works. *) let read_char () = let tty = Unix.(openfile "/dev/tty" [O_RDWR] 0o777) in let termio = Unix.tcgetattr tty in - let new_termio = {termio with Unix. - c_icanon = false; - c_vmin = 1; - c_vtime = 0; - } in + let new_termio = + { termio with Unix.c_icanon = false; c_vmin = 1; c_vtime = 0 } + in Unix.tcsetattr tty Unix.TCSANOW new_termio; let buf = Bytes.create 1 in let bytes_read = UnixLabels.read tty ~buf ~pos:0 ~len:1 in @@ -157,17 +169,24 @@ let read_char () = * characters are entered, the prompt repeats indefinitely. *) let read_choice message choices = let rec loop () = - Printf.printf "%s (%s)%!" message + Printf.printf + "%s (%s)%!" + message (String.concat "|" (List.map choices String_utils.string_of_char)); let choice = read_char () in print_newline (); - if List.mem choices choice then choice else loop () - in loop () + if List.mem choices choice then + choice + else + loop () + in + loop () let eprintf fmt = - if Unix.(isatty stderr) - then Printf.eprintf fmt - else Printf.ifprintf stderr fmt + if Unix.(isatty stderr) then + Printf.eprintf fmt + else + Printf.ifprintf stderr fmt (* Gets the number of columns in the current terminal window through * [`tput cols`][1]. If the command fails in any way then `None` will @@ -179,10 +198,7 @@ let eprintf fmt = * [1]: http://invisible-island.net/ncurses/man/tput.1.html *) let get_term_cols () = - if not Sys.unix || not (supports_color ()) then + if (not Sys.unix) || not (supports_color ()) then None else - try - Some (int_of_string (Sys_utils.exec_read "tput cols")) - with - _ -> None + try Some (int_of_string (Sys_utils.exec_read "tput cols")) with _ -> None diff --git a/hack/utils/sys/tty.mli b/hack/utils/sys/tty.mli index faa92bdf242..6ef9059ffe4 100644 --- a/hack/utils/sys/tty.mli +++ b/hack/utils/sys/tty.mli @@ -1,11 +1,11 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * - **) + * *) type raw_color = | Default @@ -33,18 +33,30 @@ type color_mode = | Color_Never | Color_Auto +val apply_color : ?color_mode:color_mode -> style -> string -> string + (* * Print a sequence of colorized strings to stdout/stderr, using ANSI color * escapes codes. *) -val cprint : ?color_mode:color_mode -> ?out_channel:out_channel -> (style * string) list -> unit -val cprintf : ?color_mode:color_mode -> ?out_channel:out_channel -> style -> - ('a, unit, string, unit) format4 -> 'a +val cprint : + ?color_mode:color_mode -> + ?out_channel:out_channel -> + (style * string) list -> + unit + +val cprintf : + ?color_mode:color_mode -> + ?out_channel:out_channel -> + style -> + ('a, unit, string, unit) format4 -> + 'a (* These two functions provide a four-state TTY-friendly spinner that * a client can output between sleeps if it happens to be waiting on * a busy server (e.g. one that's initializing) *) val spinner : ?angery_reaccs_only:bool -> unit -> string + val spinner_used : unit -> bool (* Output a "clear current line" escape sequence to out_channel if it's @@ -63,6 +75,7 @@ val eprintf : ('a, out_channel, unit) format -> 'a (* Whether the terminal supports color *) val supports_color : unit -> bool + val should_color : color_mode -> bool (* Whether the terminal supports emoji *) diff --git a/hack/utils/tempfile.ml b/hack/utils/tempfile.ml index 7c42b716f75..893dedaf81c 100644 --- a/hack/utils/tempfile.ml +++ b/hack/utils/tempfile.ml @@ -11,18 +11,15 @@ let rec mkdtemp ~skip_mocking ~retries = try let () = Sys_utils.mkdir_p (Path.to_string tmp_dir) ~skip_mocking in tmp_dir - with - | Unix.Unix_error _ -> - mkdtemp ~skip_mocking ~retries:(retries - 1) + with Unix.Unix_error _ -> mkdtemp ~skip_mocking ~retries:(retries - 1) -let mkdtemp ~skip_mocking = - mkdtemp ~skip_mocking ~retries:30 +let mkdtemp ~skip_mocking = mkdtemp ~skip_mocking ~retries:30 let with_tempdir ~skip_mocking g = let dir = mkdtemp skip_mocking in - let f = (fun () -> g dir) in + let f () = g dir in Utils.try_finally ~f ~finally:(fun () -> - Sys_utils.rm_dir_tree (Path.to_string dir) ~skip_mocking) + Sys_utils.rm_dir_tree (Path.to_string dir) ~skip_mocking) let with_real_tempdir g = Random.self_init (); diff --git a/hack/utils/tempfile.mli b/hack/utils/tempfile.mli new file mode 100644 index 00000000000..f7b148270e0 --- /dev/null +++ b/hack/utils/tempfile.mli @@ -0,0 +1,22 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +exception Out_of_retries + +val mkdtemp : skip_mocking:bool -> Path.t + +val with_real_tempdir : (Path.t -> 'a) -> 'a + +val with_tempdir : (Path.t -> 'a) -> 'a diff --git a/hack/utils/tempfile_lwt.ml b/hack/utils/tempfile_lwt.ml new file mode 100644 index 00000000000..b127d03bd98 --- /dev/null +++ b/hack/utils/tempfile_lwt.ml @@ -0,0 +1,32 @@ +exception Out_of_retries + +let rec mkdtemp ~skip_mocking ~retries = + if retries < 0 then + raise Out_of_retries + else + let tmp_dir = Sys_utils.temp_dir_name in + let tmp_dir = Path.make tmp_dir in + let name = Random_id.(short_string_with_alphabet alphanumeric_alphabet) in + let tmp_dir = Path.concat tmp_dir name in + try + let () = Sys_utils.mkdir_p (Path.to_string tmp_dir) ~skip_mocking in + tmp_dir + with Unix.Unix_error _ -> mkdtemp ~skip_mocking ~retries:(retries - 1) + +let mkdtemp ~skip_mocking = mkdtemp ~skip_mocking ~retries:30 + +let with_tempdir ~skip_mocking g = + let dir = mkdtemp ~skip_mocking in + let f () = g dir in + let%lwt result = + Lwt_utils.try_finally ~f ~finally:(fun () -> + Sys_utils.rm_dir_tree (Path.to_string dir) ~skip_mocking; + Lwt.return_unit) + in + Lwt.return result + +let with_real_tempdir g = + Random.self_init (); + with_tempdir ~skip_mocking:true g + +let with_tempdir g = with_tempdir ~skip_mocking:false g diff --git a/hack/utils/tempfile_lwt.mli b/hack/utils/tempfile_lwt.mli new file mode 100644 index 00000000000..e6ac8e38f26 --- /dev/null +++ b/hack/utils/tempfile_lwt.mli @@ -0,0 +1,22 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +exception Out_of_retries + +val mkdtemp : skip_mocking:bool -> Path.t + +val with_real_tempdir : (Path.t -> 'a Lwt.t) -> 'a Lwt.t + +val with_tempdir : (Path.t -> 'a Lwt.t) -> 'a Lwt.t diff --git a/hack/utils/trie.ml b/hack/utils/trie.ml index 2b4f53c3a12..688178c320d 100644 --- a/hack/utils/trie.ml +++ b/hack/utils/trie.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -11,83 +11,85 @@ open Hh_core (* Utility functions *) -let make_pair (a: 'a) (b: 'b) : ('a*'b) = - (a,b) +let make_pair (a : 'a) (b : 'b) : 'a * 'b = (a, b) -let common_prefix (s1: string) (s2: string) : int = - let i=ref 0 in +let common_prefix (s1 : string) (s2 : string) : int = + let i = ref 0 in let l1 = String.length s1 in let l2 = String.length s2 in - while (!i < l1) && (!i < l2) && (s1.[!i] = s2.[!i]) do - i:=!i+1 + while !i < l1 && !i < l2 && s1.[!i] = s2.[!i] do + i := !i + 1 done; !i let drop s c = let l = String.length s in - String.sub s c (l-c) + String.sub s c (l - c) -let take s c = - String.sub s 0 c +let take s c = String.sub s 0 c -let (|>) (o : 'a) (f : 'a -> 'b) : 'b = f o +let ( |> ) (o : 'a) (f : 'a -> 'b) : 'b = f o let id (x : 'a) : 'a = x -type 'a return = { return : 'b . 'a -> 'b } +type 'a return = { return: 'b. 'a -> 'b } let with_return (type t) (f : _ -> t) = - let module Capture = - struct exception Return of t end - in - let return = { return = (fun x -> raise (Capture.Return x)); } in - try f return with Capture.Return x -> x + let module Capture = struct + exception Return of t + end in + let return = { return = (fun x -> raise (Capture.Return x)) } in + (try f return with Capture.Return x -> x) (* Trie implementation *) -type 'a t = Leaf of 'a | Node of ('a t) SMap.t ref -let create (): 'a t = Node (ref (SMap.empty)) +type 'a t = + | Leaf of 'a + | Node of 'a t SMap.t ref + +let create () : 'a t = Node (ref SMap.empty) exception Inconsistent_trie of string -let get_node (trie : 'a t) : ('a t) SMap.t ref = match trie with +let get_node (trie : 'a t) : 'a t SMap.t ref = + match trie with | Node n -> n - | _ -> raise (Inconsistent_trie "Cannot match to leaf") + | _ -> raise (Inconsistent_trie "Cannot match to leaf") -let get_leaf (trie : 'a t) : 'a = match trie with +let get_leaf (trie : 'a t) : 'a = + match trie with | Leaf v -> v - | _ -> raise (Inconsistent_trie "Cannot match to node") + | _ -> raise (Inconsistent_trie "Cannot match to node") (* Match a string s with a key; return a tuple: i : int -- position where the match ends k : string -- the full key matched n : 'a t -- the node associated with key k *) -let trie_assoc_partial (trie : 'a t) (w : string) : (int * string * 'a t) option = +let trie_assoc_partial (trie : 'a t) (w : string) : + (int * string * 'a t) option = with_return (fun e -> - !(get_node trie) - |> SMap.iter (fun key elt -> - let c = common_prefix key w in - if (not (c = 0)) || ((key = "") && w = "") then - e.return (Some (c, key, elt))); - None) - + !(get_node trie) + |> SMap.iter (fun key elt -> + let c = common_prefix key w in + if (not (c = 0)) || (key = "" && w = "") then + e.return (Some (c, key, elt))); + None) let rec mem (trie : 'a t) (w : string) : bool = with_return (fun e -> - let (i, key, child) = match trie_assoc_partial trie w with - | Some x -> x - | None -> e.return false in - - if key = "" then - e.return true; + let (i, key, child) = + match trie_assoc_partial trie w with + | Some x -> x + | None -> e.return false + in + if key = "" then e.return true; - if String.length key = i then - e.return (mem child (drop w i)); + if String.length key = i then e.return (mem child (drop w i)); - false) + false) -let add_one (node : 'a t) (c : string) (inner :'a t): unit = +let add_one (node : 'a t) (c : string) (inner : 'a t) : unit = let elts = get_node node in elts := SMap.add c inner !elts @@ -105,127 +107,125 @@ let split_key (parent : 'a t) (key : string) (child : 'a t) (c : int) : 'a t = n let add_leaf (node : 'a t) (key : string) (v : 'a) : unit = - let leaf = match key with "" -> Leaf v - | _ -> let res = create () in - add_one res "" (Leaf v); - res in + let leaf = + match key with + | "" -> Leaf v + | _ -> + let res = create () in + add_one res "" (Leaf v); + res + in add_one node key leaf - -let rec add ?(if_exist : 'b -> 'a -> unit = fun _ _ -> ()) - ~(transform : 'a -> 'b) - (trie : 'b t) - (w : string) - (v : 'a) : unit = - +let rec add + ?(if_exist : 'b -> 'a -> unit = (fun _ _ -> ())) + ~(transform : 'a -> 'b) + (trie : 'b t) + (w : string) + (v : 'a) : unit = with_return (fun e -> - let (c, key, child) = match trie_assoc_partial trie w with - | Some x -> x - | None -> e.return (add_leaf trie w (transform v)) in - - if (String.length key = c) && (w = "") then (* leaf exists; use if_exists callback *) - e.return (if_exist (get_leaf child) v); - - if c = String.length key then (* full key match; do final recursive call *) - e.return (add child (drop w c) v ~if_exist ~transform); - - (* Partial match: need to split key with common parts *) - let n = split_key trie key child c in - add_leaf n (drop w c) (transform v)) - - -let to_list (limit : int option) - (trie : 'b t) - (kmap : string -> 'a) - (vmap : 'a -> 'b -> 'c) : 'c list = - + let (c, key, child) = + match trie_assoc_partial trie w with + | Some x -> x + | None -> e.return (add_leaf trie w (transform v)) + in + if String.length key = c && w = "" then + (* leaf exists; use if_exists callback *) + e.return (if_exist (get_leaf child) v); + + if c = String.length key then + (* full key match; do final recursive call *) + e.return (add child (drop w c) v ~if_exist ~transform); + + (* Partial match: need to split key with common parts *) + let n = split_key trie key child c in + add_leaf n (drop w c) (transform v)) + +let to_list + (limit : int option) + (trie : 'b t) + (kmap : string -> 'a) + (vmap : 'a -> 'b -> 'c) : 'c list = with_return (fun e -> - let reslist = ref [] in - let rescount = ref 0 in - - let more () = match limit with Some i -> i > !rescount | None -> true in - - let rec to_list_aux t s = match t with - | Leaf v -> if more () then - (reslist := (vmap (kmap s) v)::!reslist; - incr rescount) - else - e.return (List.rev !reslist) - | Node cs -> - SMap.fold (fun tail rhs _acc -> - to_list_aux rhs (s ^ tail)) - !cs () - - in - to_list_aux trie ""; - List.rev !reslist) - - -let find_impl ?(limit : int option = None) - (exact : bool) - (trie : 'a t) - (pre : string) - (vmap : string -> 'a -> 'c) : 'c list = - + let reslist = ref [] in + let rescount = ref 0 in + let more () = + match limit with + | Some i -> i > !rescount + | None -> true + in + let rec to_list_aux t s = + match t with + | Leaf v -> + if more () then ( + reslist := vmap (kmap s) v :: !reslist; + incr rescount + ) else + e.return (List.rev !reslist) + | Node cs -> + SMap.fold (fun tail rhs _acc -> to_list_aux rhs (s ^ tail)) !cs () + in + to_list_aux trie ""; + List.rev !reslist) + +let find_impl + ?(limit : int option = None) + (exact : bool) + (trie : 'a t) + (pre : string) + (vmap : string -> 'a -> 'c) : 'c list = with_return (fun e -> - let append = (^) pre in - let rec find_impl_aux trie p = - let (c, key, child) = match trie_assoc_partial trie p with - | Some x -> x - | None -> e.return [] in - - match (String.length key = c), (not exact) && (String.length p = c) with - | true, _ when String.length p = 0 -> to_list limit child append vmap - | true, true -> to_list limit child append vmap - | true, _ -> find_impl_aux child (drop p c) - - | false, true -> to_list limit child (fun k -> pre ^ (drop key c) ^ k) vmap - - | _ -> [] - in - find_impl_aux trie pre) - + let append = ( ^ ) pre in + let rec find_impl_aux trie p = + let (c, key, child) = + match trie_assoc_partial trie p with + | Some x -> x + | None -> e.return [] + in + match (String.length key = c, (not exact) && String.length p = c) with + | (true, _) when String.length p = 0 -> to_list limit child append vmap + | (true, true) -> to_list limit child append vmap + | (true, _) -> find_impl_aux child (drop p c) + | (false, true) -> + to_list limit child (fun k -> pre ^ drop key c ^ k) vmap + | _ -> [] + in + find_impl_aux trie pre) let find (trie : 'a t) (s : string) : 'a = match find_impl true trie s make_pair with - | (_s, v)::_tl -> v + | (_s, v) :: _tl -> v | _ -> raise Not_found - -let find_prefix (trie : 'a t) - (s : string) - (vmap : string -> 'a -> 'b): 'b list = +let find_prefix (trie : 'a t) (s : string) (vmap : string -> 'a -> 'b) : + 'b list = find_impl false trie s vmap -let find_prefix_limit (i : int) - (trie : 'a t) - (s : string) - (vmap : string -> 'a -> 'b) : 'b list = +let find_prefix_limit + (i : int) (trie : 'a t) (s : string) (vmap : string -> 'a -> 'b) : 'b list + = find_impl false trie s vmap ~limit:(Some i) let remove_one (trie : 'a t) (key : string) : unit = let elts = get_node trie in elts := SMap.remove key !elts -let rec remove_impl (exact: bool) (trie : 'a t) (s : string): unit = +let rec remove_impl (exact : bool) (trie : 'a t) (s : string) : unit = with_return (fun e -> - let (c, key, child) = match trie_assoc_partial trie s with - | Some x -> x - | None -> e.return () in - - match (String.length key = c), exact, (String.length s = c) with - | true, true, true when c = 0 -> remove_one trie (take s c) - | true, false, true -> remove_one trie (take s c) + let (c, key, child) = + match trie_assoc_partial trie s with + | Some x -> x + | None -> e.return () + in + match (String.length key = c, exact, String.length s = c) with + | (true, true, true) when c = 0 -> remove_one trie (take s c) + | (true, false, true) -> remove_one trie (take s c) + | (true, _, _) -> remove_impl exact child (drop s c) + | _ -> ()) - | true, _, _ -> remove_impl exact child (drop s c) - | _ -> ()) +let remove (trie : 'a t) (s : string) : unit = remove_impl true trie s - -let remove (trie : 'a t) (s : string) : unit = - remove_impl true trie s - -let remove_prefix (trie : 'a t) (s : string) : unit = - remove_impl false trie s +let remove_prefix (trie : 'a t) (s : string) : unit = remove_impl false trie s (* let rec merge ?(if_exist : 'a -> 'a -> unit = fun _ _ -> ()) *) (* (trieDes : 'a t) *) @@ -272,14 +272,15 @@ let remove_prefix (trie : 'a t) (s : string) : unit = let rec to_string_impl (buf : Buffer.t) (trie : 'a t) : unit = match trie with - | Node elts -> SMap.fold - (fun k v _ -> - Printf.bprintf buf "%S:{" k; - to_string_impl buf v; - Printf.bprintf buf "}") - !elts () - | Leaf _v -> () - + | Node elts -> + SMap.fold + (fun k v _ -> + Printf.bprintf buf "%S:{" k; + to_string_impl buf v; + Printf.bprintf buf "}") + !elts + () + | Leaf _v -> () let to_string (trie : 'a t) : string = let buf = Buffer.create 250 in diff --git a/hack/utils/trie.mli b/hack/utils/trie.mli new file mode 100644 index 00000000000..0e646fc75ee --- /dev/null +++ b/hack/utils/trie.mli @@ -0,0 +1,88 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +type 'a t + +val make_pair : 'a -> 'b -> 'a * 'b + +val common_prefix : string -> string -> int + +val drop : string -> int -> string + +val take : string -> int -> string + +val ( |> ) : 'a -> ('a -> 'b) -> 'b + +val id : 'a -> 'a + +type 'a return = { return: 'b. 'a -> 'b } + +val with_return : ('a return -> 'a) -> 'a + +val create : unit -> 'a t + +exception Inconsistent_trie of string + +val get_node : 'a t -> 'a t SMap.t ref + +val get_leaf : 'a t -> 'a + +val trie_assoc_partial : 'a t -> string -> (int * string * 'a t) option + +val mem : 'a t -> string -> bool + +val add_one : 'a t -> string -> 'a t -> unit + +val split_key : 'a t -> string -> 'a t -> int -> 'a t + +val add_leaf : 'a t -> string -> 'a -> unit + +val add : + ?if_exist:('b -> 'a -> unit) -> + transform:('a -> 'b) -> + 'b t -> + string -> + 'a -> + unit + +val to_list : + int option -> 'b t -> (string -> 'a) -> ('a -> 'b -> 'c) -> 'c list + +val find_impl : + ?limit:int option -> + bool -> + 'a t -> + string -> + (string -> 'a -> 'c) -> + 'c list + +val find : 'a t -> string -> 'a + +val find_prefix : 'a t -> string -> (string -> 'a -> 'b) -> 'b list + +val find_prefix_limit : + int -> 'a t -> string -> (string -> 'a -> 'b) -> 'b list + +val remove_one : 'a t -> string -> unit + +val remove_impl : bool -> 'a t -> string -> unit + +val remove : 'a t -> string -> unit + +val remove_prefix : 'a t -> string -> unit + +val to_string_impl : Buffer.t -> 'a t -> unit + +val to_string : 'a t -> string diff --git a/hack/utils/utils.ml b/hack/utils/utils.ml deleted file mode 100644 index 40e556e64d3..00000000000 --- a/hack/utils/utils.ml +++ /dev/null @@ -1,231 +0,0 @@ -(** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * - *) - -open Hh_core - -let () = Random.self_init () -let debug = ref false -let profile = ref false - -let log = ref (fun (_ : string) -> ()) - -let d s = - if !debug - then begin - print_string s; - flush stdout; - end - -let dn s = - if !debug - then begin - print_string s; - print_newline(); - flush stdout; - end - -module Map = struct end - -let spf = Printf.sprintf -let print_endlinef fmt = Printf.ksprintf print_endline fmt -let prerr_endlinef fmt = Printf.ksprintf prerr_endline fmt - -let opt f env = function - | None -> env, None - | Some x -> let env, x = f env x in env, Some x - -let opt_fold f env = function - | None -> env - | Some x -> f env x - -let singleton_if cond x = if cond then [x] else [] - -let smap_inter m1 m2 = - SMap.fold ( - fun x y acc -> - if SMap.mem x m2 - then SMap.add x y acc - else acc - ) m1 SMap.empty - -let imap_inter m1 m2 = - IMap.fold ( - fun x y acc -> - if IMap.mem x m2 - then IMap.add x y acc - else acc - ) m1 IMap.empty - -let smap_inter_list = function - | [] -> SMap.empty - | x :: rl -> - List.fold_left rl ~f:smap_inter ~init:x - -let imap_inter_list = function - | [] -> IMap.empty - | x :: rl -> - List.fold_left rl ~f:imap_inter ~init:x - -let rec wfold_left2 f env l1 l2 = - match l1, l2 with - | [], _ | _, [] -> env - | x1 :: rl1, x2 :: rl2 -> - let env = f env x1 x2 in - wfold_left2 f env rl1 rl2 - -let sl l = - List.fold_right l ~f:(^) ~init:"" - -let maybe f env = function - | None -> () - | Some x -> f env x - -(* Since OCaml usually runs w/o backtraces enabled, the note makes errors - * easier to debug. *) -let unsafe_opt_note note = function - | None -> raise (Invalid_argument note) - | Some x -> x - -let unsafe_opt x = unsafe_opt_note "unsafe_opt got None" x - -let inter_list = function - | [] -> SSet.empty - | x :: rl -> - List.fold_left rl ~f:SSet.inter ~init:x - -let rec list_last f1 f2 = - function - | [] -> () - | [x] -> f2 x - | x :: rl -> f1 x; list_last f1 f2 rl - -let is_prefix_dir dir fn = - let prefix = dir ^ Filename.dir_sep in - String.length fn > String.length prefix && - String.sub fn 0 (String.length prefix) = prefix - -let try_with_channel oc f1 f2 = - try - let result = f1 oc in - close_out oc; - result - with e -> - close_out oc; - f2 e - -let iter_n_acc n f acc = - let acc = ref acc in - for i = 1 to n-1 do - acc := fst (f !acc) - done; - f !acc - -let map_of_list list = - List.fold_left ~f:(fun m (k, v) -> SMap.add k v m) ~init:SMap.empty list - -let set_of_list l = - List.fold_right l ~f:SSet.add ~init:SSet.empty - -(* \A\B\C -> A\B\C *) -let strip_ns s = - if String.length s == 0 || s.[0] <> '\\' then s - else String.sub s 1 ((String.length s) - 1) - -(* \A\B\C -> C *) -let strip_all_ns s = - try - let base_name_start = String.rindex s '\\' + 1 in - String.sub s base_name_start ((String.length s) - base_name_start) - with Not_found -> s - -(*****************************************************************************) -(* Same as List.iter2, except that we only iterate as far as the shortest - * of both lists. - *) -(*****************************************************************************) - -let rec iter2_shortest f l1 l2 = - match l1, l2 with - | [], _ | _, [] -> () - | x1 :: rl1, x2 :: rl2 -> f x1 x2; iter2_shortest f rl1 rl2 - -let fold_fun_list acc fl = - List.fold_left fl ~f:(|>) ~init:acc - -let compose f g x = f (g x) - -module With_complete_flag = struct - type 'a t = { - is_complete : bool; - value : 'a; - } -end - -let try_finally ~f ~(finally: unit -> unit) = - let res = try f () with e -> finally (); raise e in - finally (); - res - -let with_context ~enter ~exit ~do_ = - enter (); - let result = try do_ () with e -> - exit (); - raise e in - exit (); - result - -(* We run with exception backtraces turned off for performance reasons. But for - * some kinds of catastrophic exceptions, which we never recover from (so the - * performance doesn't matter) we do want the backtrace. "assert false" is one - * of such conditions. - *) -let assert_false_log_backtrace msg = - Printf.eprintf "assert false with backtrace:\n"; - Option.iter msg ~f:(Printf.eprintf "%s\n"); - Printf.eprintf "%s" (Printexc.raw_backtrace_to_string - (Printexc.get_callstack 100)); - assert false - -(* Returns the largest element in arr strictly less than `bound` *) -let infimum (arr : 'a array) - (bound : 'b) - (compare : 'a -> 'b -> int) : int option = - let rec binary_search low high = begin - if low = high then - Some low - else if low > high then - None - else begin - let mid = (low + high + 1) / 2 in - let test = Array.get arr mid in - if compare test bound < 0 then - binary_search mid high - else - binary_search low (mid - 1) - end - end in - binary_search 0 ((Array.length arr) - 1) - -(** Callstack is simply a typed way to indicate that a string is a callstack *) -type callstack = Callstack of string - -let unwrap_snd (a, b_opt) = - match b_opt with - | None -> None - | Some b -> Some (a, b) - -let memoize_naive f = - let m = ref None in - fun () -> - match !m with - | None -> - let res = f () in - m := Some res ; - res - | Some s -> s diff --git a/hack/utils/wwwroot.ml b/hack/utils/wwwroot.ml index 98121d2b872..3bde79bc9e3 100644 --- a/hack/utils/wwwroot.ml +++ b/hack/utils/wwwroot.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,29 +7,51 @@ * *) - (** * Checks if x is a www directory by looking for ".hhconfig". *) -let is_www_directory ?(config=".hhconfig") (path : Path.t) : bool = - let arcconfig = Path.concat path config in - Path.file_exists arcconfig +let is_www_directory ?(config = ".hhconfig") (path : Path.t) : bool = + let arcconfig = Path.concat path config in + Path.file_exists arcconfig + +let assert_www_directory ?(config = ".hhconfig") (path : Path.t) : unit = + if not (Path.file_exists path && Path.is_directory path) then ( + Printf.eprintf "Error: %s is not a directory\n%!" (Path.to_string path); + exit 1 + ); + if not (is_www_directory ~config path) then ( + Printf.fprintf + stderr + "Error: could not find a %s file in %s or any of its parent directories. Do you have a %s in your code's root directory?\n" + config + (Path.to_string path) + config; + flush stderr; + exit 1 + ) + +let rec guess_root config start ~recursion_limit : Path.t option = + if start = Path.parent start then + None + (* Reached file system root *) + else if is_www_directory ~config start then + Some start + else if recursion_limit <= 0 then + None + else + guess_root config (Path.parent start) (recursion_limit - 1) -let assert_www_directory ?(config=".hhconfig") (path : Path.t) : unit = - if not (Path.file_exists path && Path.is_directory path) - then begin - Printf.eprintf "Error: %s is not a directory\n%!" (Path.to_string path); - exit 1 - end; - if not (is_www_directory ~config path) - then begin - Printf.fprintf stderr -"Error: could not find a %s file in %s \ - or any of its parent directories. \ - Do you have a %s in your code's root directory?\n" - config - (Path.to_string path) - config; - flush stderr; - exit 1 - end +let get ?(config = ".hhconfig") (path : string option) : Path.t = + let start_str = + match path with + | None -> "." + | Some s -> s + in + let start_path = Path.make start_str in + let root = + match guess_root config start_path ~recursion_limit:50 with + | None -> start_path + | Some r -> r + in + assert_www_directory ~config root; + root diff --git a/hack/utils/wwwroot.mli b/hack/utils/wwwroot.mli new file mode 100644 index 00000000000..a0b0e4d85d7 --- /dev/null +++ b/hack/utils/wwwroot.mli @@ -0,0 +1,20 @@ +(* + * Copyright (c) 2019, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + *) + +(* This `.mli` file was generated automatically. It may include extra +definitions that should not actually be exposed to the caller. If you notice +that this interface file is a poor interface, please take a few minutes to +clean it up manually, and then delete this comment once the interface is in +shape. *) + +val is_www_directory : ?config:string -> Path.t -> bool + +val assert_www_directory : ?config:string -> Path.t -> unit + +val get : ?config:string -> string option -> Path.t diff --git a/hack/watchman/dune b/hack/watchman/dune new file mode 100644 index 00000000000..19dcee8867d --- /dev/null +++ b/hack/watchman/dune @@ -0,0 +1,40 @@ +(library + (name watchman) + (wrapped false) + (modules + watchman + watchman_sig) + (libraries + buffered_line_reader + core_kernel + hh_json + logging_common + sys_utils + utils_core) + (preprocess (pps lwt_ppx)) ; See T41851208 +) + +(library + (name watchman_lwt) + (wrapped false) + (modules + watchman_lwt + ) + (libraries + watchman + buffered_line_reader_lwt + ) + (preprocess (pps lwt_ppx)) +) + +(library + (name watchman_utils) + (wrapped false) + (modules + watchman_utils) + (libraries + hh_json + logging + utils_core) + (preprocess (pps lwt_ppx)) ; See T41851208 +) diff --git a/hack/watchman/watchman.ml b/hack/watchman/watchman.ml index d43b6162f0f..366ae9157fa 100644 --- a/hack/watchman/watchman.ml +++ b/hack/watchman/watchman.ml @@ -1,13 +1,13 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the "hack" directory of this source tree. * - **) + * *) -open Hh_core +open Core_kernel open Utils (* @@ -23,121 +23,154 @@ open Utils (** Stuff shared between Actual and Mocked implementations. *) module Testing_common = struct open Watchman_sig.Types - let test_settings = { - subscribe_mode = Some Defer_changes; - init_timeout = 0; - expression_terms = []; - debug_logging = false; - roots = [Path.dummy_path]; - subscription_prefix = "dummy_prefix" - } + + let test_settings = + { + subscribe_mode = Some Defer_changes; + init_timeout = Watchman_sig.Types.No_timeout; + expression_terms = []; + debug_logging = false; + roots = [Path.dummy_path]; + subscription_prefix = "dummy_prefix"; + } end module Watchman_process_helpers = struct include Watchman_sig.Types module J = Hh_json_helpers.AdhocJsonHelpers + let timeout_to_secs = function + | No_timeout -> None + | Default_timeout -> Some 120. + | Explicit_timeout timeout -> Some timeout + let debug = false - (** Throw this exception when we know there is something to read from + (* Throw this exception when we know there is something to read from * the watchman channel, but reading took too long. *) exception Read_payload_too_long (* Looks for common errors in watchman responses *) let assert_no_error obj = (try - let warning = J.get_string_val "warning" obj in - EventLogger.watchman_warning warning; - Hh_logger.log "Watchman warning: %s\n" warning - with Not_found -> ()); - (try - let error = J.get_string_val "error" obj in - EventLogger.watchman_error error; - raise @@ Watchman_error error - with Not_found -> ()); + let warning = J.get_string_val "warning" obj in + EventLogger.watchman_warning warning; + Hh_logger.log "Watchman warning: %s\n" warning + with Caml.Not_found -> ()); (try + let error = J.get_string_val "error" obj in + EventLogger.watchman_error error; + raise @@ Watchman_error error + with Caml.Not_found -> ()); + try let canceled = J.get_bool_val "canceled" obj in - if canceled then begin - EventLogger.watchman_error "Subscription canceled by watchman"; - raise @@ Subscription_canceled_by_watchman - end else () - with Not_found -> ()) + if canceled then ( + EventLogger.watchman_error "Subscription canceled by watchman"; + raise @@ Subscription_canceled_by_watchman + ) else + () + with Caml.Not_found -> () (* Verifies that a watchman response is valid JSON and free from common errors *) let sanitize_watchman_response ~debug_logging output = if debug_logging then Hh_logger.info "Watchman response: %s" output; let response = - try Hh_json.json_of_string output - with e -> - Hh_logger.error ~exn:e "Failed to parse string as JSON: %s" output; - raise e + try Hh_json.json_of_string output + with e -> + let raw_stack = Caml.Printexc.get_raw_backtrace () in + let stack = Caml.Printexc.raw_backtrace_to_string raw_stack in + Hh_logger.error + "Failed to parse string as JSON: %s\nEXCEPTION:%s\nSTACK:%s\n" + output + (Exn.to_string e) + stack; + Caml.Printexc.raise_with_backtrace e raw_stack in assert_no_error response; response end -module Regular_watchman_process: sig +module Regular_watchman_process : sig include Watchman_sig.WATCHMAN_PROCESS with type 'a result = 'a - val get_reader: conn -> Buffered_line_reader.t + val get_reader : conn -> Buffered_line_reader.t end = struct include Watchman_process_helpers type 'a result = 'a - type conn = Buffered_line_reader.t * out_channel - let (>>=) a f = f a - let (>|=) a f = f a + type conn = Buffered_line_reader.t * Out_channel.t + + let ( >>= ) a f = f a + + let ( >|= ) a f = f a + let return x = x - let catch ~f ~catch = try f () with exn -> catch exn + let catch ~f ~catch = + try f () + with exn -> + let stack = Printexc.get_backtrace () in + catch ~stack exn let list_fold_values = List.fold (* Send a request to the watchman process *) let send_request ~debug_logging oc json = let json_str = Hh_json.(json_to_string json) in - if debug_logging then Hh_logger.info "Watchman request: %s" json_str ; - output_string oc json_str; - output_string oc "\n"; - flush oc + if debug_logging then Hh_logger.info "Watchman request: %s" json_str; + Out_channel.output_string oc json_str; + Out_channel.output_string oc "\n"; + Out_channel.flush oc (***************************************************************************) (* Handling requests and responses. *) (***************************************************************************) let has_input timeout reader = - if Buffered_line_reader.has_buffered_content reader - then true + if Buffered_line_reader.has_buffered_content reader then + true else - match Sys_utils.select_non_intr [Buffered_line_reader.get_fd reader] [] [] timeout with - | [], _, _ -> false + (* Negative means "no timeout" to select *) + let timeout = Option.value (timeout_to_secs timeout) ~default:~-.1. in + match + Sys_utils.select_non_intr + [Buffered_line_reader.get_fd reader] + [] + [] + timeout + with + | ([], _, _) -> false | _ -> true let read_with_timeout timeout reader = let start_t = Unix.time () in - if not (has_input timeout reader) - then + if not (has_input timeout reader) then raise Timeout else - let remaining = start_t +. timeout -. Unix.time () in - let timeout = int_of_float remaining in - let timeout = max timeout 10 in - Timeout.with_timeout - ~do_: (fun _ -> Buffered_line_reader.get_next_line reader) - ~timeout - ~on_timeout:(fun () -> - let () = EventLogger.watchman_timeout () in - raise Read_payload_too_long - ) + match timeout_to_secs timeout with + | None -> Buffered_line_reader.get_next_line reader + | Some timeout -> + let remaining = start_t +. timeout -. Unix.time () in + let timeout = int_of_float remaining in + let timeout = max timeout 10 in + Timeout.with_timeout + ~do_:(fun _ -> Buffered_line_reader.get_next_line reader) + ~timeout + ~on_timeout:(fun () -> + let () = EventLogger.watchman_timeout () in + raise Read_payload_too_long) (* Asks watchman for the path to the socket file *) let get_sockname timeout = let ic = - Timeout.open_process_in "watchman" - [| "watchman"; "get-sockname"; "--no-pretty" |] in - let reader = Buffered_line_reader.create @@ - Timeout.descr_of_in_channel ic in + Timeout.open_process_in + "watchman" + [|"watchman"; "get-sockname"; "--no-pretty"|] + in + let reader = + Buffered_line_reader.create @@ Timeout.descr_of_in_channel ic + in let output = read_with_timeout timeout reader in assert (Timeout.close_process_in ic = Unix.WEXITED 0); let json = Hh_json.json_of_string output in @@ -147,75 +180,92 @@ end = struct let open_connection ~timeout = let sockname = get_sockname timeout in let (tic, oc) = Timeout.open_connection (Unix.ADDR_UNIX sockname) in - let reader = Buffered_line_reader.create - @@ Timeout.descr_of_in_channel @@ tic in - reader, oc + let reader = + Buffered_line_reader.create @@ Timeout.descr_of_in_channel @@ tic + in + (reader, oc) let close_connection conn = - let reader, _ = conn in + let (reader, _) = conn in Unix.close @@ Buffered_line_reader.get_fd reader (** Open a connection to the watchman socket, call the continuation, then * close. *) let with_watchman_conn ~timeout f = let conn = open_connection ~timeout in - let result = try f conn with - | e -> + let result = + try f conn + with e -> + let stack = Caml.Printexc.get_raw_backtrace () in Unix.close @@ Buffered_line_reader.get_fd @@ fst conn; - raise e + Caml.Printexc.raise_with_backtrace e stack in Unix.close @@ Buffered_line_reader.get_fd @@ fst conn; result (* Sends a request to watchman and returns the response. If we don't have a connection, * a new connection will be created before the request and destroyed after the response *) - let rec request ~debug_logging ?conn ?(timeout=120.0) json = + let rec request ~debug_logging ?conn ?(timeout = Default_timeout) json = match conn with | None -> - with_watchman_conn ~timeout (fun conn -> request ~debug_logging ~conn ~timeout json) - | Some (reader, oc) -> begin + with_watchman_conn ~timeout (fun conn -> + request ~debug_logging ~conn ~timeout json) + | Some (reader, oc) -> send_request ~debug_logging oc json; - sanitize_watchman_response ~debug_logging (read_with_timeout timeout reader) - end + sanitize_watchman_response + ~debug_logging + (read_with_timeout timeout reader) - let send_request_and_do_not_wait_for_response ~debug_logging ~conn:(_, oc) json = + let send_request_and_do_not_wait_for_response + ~debug_logging ~conn:(_, oc) json = send_request ~debug_logging oc json - let blocking_read ~debug_logging ?timeout ~conn = - let timeout = Option.value timeout ~default:0.0 in + let blocking_read ~debug_logging ?(timeout = Explicit_timeout 0.) ~conn = let ready = has_input timeout @@ fst conn in if not ready then - if timeout = 0.0 then None - else raise Timeout + match timeout with + | No_timeout -> None + | Explicit_timeout timeout when timeout = 0. -> None + | _ -> raise Timeout else (* Use the timeout mechanism to limit maximum time to read payload (cap * data size) so we don't freeze if watchman sends an inordinate amount of * data, or if it is malformed (i.e. doesn't end in a newline). *) let timeout = 40 in - let output = Timeout.with_timeout - ~do_: (fun _ -> Buffered_line_reader.get_next_line @@ fst conn) - ~timeout - ~on_timeout:begin fun () -> - let () = Hh_logger.log "Regular_watchman_process.blocking_read timed out" in - raise Read_payload_too_long - end + let output = + Timeout.with_timeout + ~do_:(fun _ -> Buffered_line_reader.get_next_line @@ fst conn) + ~timeout + ~on_timeout: + begin + fun () -> + let () = + Hh_logger.log + "Regular_watchman_process.blocking_read timed out" + in + raise Read_payload_too_long + end in Some (sanitize_watchman_response ~debug_logging output) let get_reader (reader, _) = reader module Testing = struct - let get_test_conn () = (Buffered_line_reader.get_null_reader (), open_out "/dev/null") + let get_test_conn () = + (Buffered_line_reader.get_null_reader (), Out_channel.create "/dev/null") end end -module Functor (Watchman_process: Watchman_sig.WATCHMAN_PROCESS): Watchman_sig.S - with type 'a result = 'a Watchman_process.result and type conn = Watchman_process.conn = -struct - let (>>=) = Watchman_process.(>>=) - let (>|=) = Watchman_process.(>|=) +module Functor (Watchman_process : Watchman_sig.WATCHMAN_PROCESS) : + Watchman_sig.S + with type 'a result = 'a Watchman_process.result + and type conn = Watchman_process.conn = struct + let ( >>= ) = Watchman_process.( >>= ) + + let ( >|= ) = Watchman_process.( >|= ) type 'a result = 'a Watchman_process.result + type conn = Watchman_process.conn (** @@ -229,12 +279,11 @@ struct module Mocking = struct exception Cannot_set_when_mocks_disabled - let print_env _ = - raise Cannot_set_when_mocks_disabled - let get_changes_returns _ = - raise Cannot_set_when_mocks_disabled - let init_returns _ = - raise Cannot_set_when_mocks_disabled + let print_env _ = raise Cannot_set_when_mocks_disabled + + let get_changes_returns _ = raise Cannot_set_when_mocks_disabled + + let init_returns _ = raise Cannot_set_when_mocks_disabled end (** This number is totally arbitrary. Just need some cap. *) @@ -249,11 +298,11 @@ struct include Watchman_sig.Types type dead_env = { - (** Will reuse original settings to reinitializing watchman subscription. *) - prior_settings : init_settings; + (* Will reuse original settings to reinitializing watchman subscription. *) + prior_settings: init_settings; reinit_attempts: int; dead_since: float; - prior_clockspec : string; + prior_clockspec: string; } type env = { @@ -276,7 +325,7 @@ struct prior_settings = env.settings; dead_since = Unix.time (); reinit_attempts = 0; - (** When we start a new watchman connection, we continue to use the prior + (* When we start a new watchman connection, we continue to use the prior * clockspec. If the same watchman server is still alive, then all is * good. If not, the clockspec allows us to detect whether a new watchman * server had to be started. See also "is_fresh_instance" on watchman's @@ -285,7 +334,7 @@ struct } type watchman_instance = - (** Indicates a dead watchman instance (most likely due to chef upgrading, + (* Indicates a dead watchman instance (most likely due to chef upgrading, * reconfiguration, or a user terminating watchman, or a timeout reading * from the connection) detected by, for example, a pipe error or a timeout. * @@ -304,268 +353,439 @@ struct let clock root = J.strlist ["clock"; root] - type watch_command = Subscribe | Query + type watch_command = + | Subscribe + | Query (** Conjunction of extra_expressions and expression_terms. *) let request_json - ?(extra_kv=[]) ?(extra_expressions=[]) watchman_command env = - let open Hh_json in - let command = begin match watchman_command with - | Subscribe -> "subscribe" - | Query -> "query" end in - let header = - [JSON_String command ; JSON_String env.watch_root] @ - begin - match watchman_command with - | Subscribe -> [JSON_String env.subscription] - | _ -> [] - end in - let expressions = extra_expressions @ (env.settings.expression_terms) in - let expressions = match env.watched_path_expression_terms with - | Some terms -> terms :: expressions - | None -> expressions - in - assert (expressions <> []); - let directives = [ - JSON_Object (extra_kv - @ [ - "fields", J.strlist ["name"]; - (** Watchman doesn't allow an empty allof expression. But expressions is never empty *) - "expression", J.pred "allof" expressions; - ]) - ] in - let request = JSON_Array (header @ directives) in - request + ?(extra_kv = []) ?(extra_expressions = []) watchman_command env = + Hh_json.( + let command = + match watchman_command with + | Subscribe -> "subscribe" + | Query -> "query" + in + let header = + [JSON_String command; JSON_String env.watch_root] + @ + match watchman_command with + | Subscribe -> [JSON_String env.subscription] + | _ -> [] + in + let expressions = extra_expressions @ env.settings.expression_terms in + let expressions = + match env.watched_path_expression_terms with + | Some terms -> terms :: expressions + | None -> expressions + in + assert (expressions <> []); + let directives = + [ + JSON_Object + ( extra_kv + @ [ + ("fields", J.strlist ["name"]); + (* Watchman doesn't allow an empty allof expression. But expressions is never empty *) + ("expression", J.pred "allof" expressions); + ] ); + ] + in + let request = JSON_Array (header @ directives) in + request) let all_query env = - request_json - ~extra_expressions:([Hh_json.JSON_String "exists"]) - Query env + request_json ~extra_expressions:[Hh_json.JSON_String "exists"] Query env + + let get_changes_since_mergebase_query env = + let extra_kv = + [ + ( "since", + Hh_json.JSON_Object + [ + ( "scm", + Hh_json.JSON_Object + [("mergebase-with", Hh_json.JSON_String "master")] ); + ] ); + ] + in + request_json ~extra_kv Query env let since_query env = request_json - ~extra_kv: ["since", Hh_json.JSON_String env.clockspec; - "empty_on_fresh_instance", Hh_json.JSON_Bool true] - Query env + ~extra_kv: + [ + ("since", Hh_json.JSON_String env.clockspec); + ("empty_on_fresh_instance", Hh_json.JSON_Bool true); + ] + Query + env let subscribe ~mode env = - let since, mode = match mode with - | All_changes -> Hh_json.JSON_String env.clockspec, [] - | Defer_changes -> - Hh_json.JSON_String env.clockspec, ["defer", J.strlist ["hg.update"]] - | Drop_changes -> Hh_json.JSON_String env.clockspec, ["drop", J.strlist ["hg.update"]] - | Scm_aware -> + let (since, mode) = + match mode with + | All_changes -> (Hh_json.JSON_String env.clockspec, []) + | Defer_changes -> + ( Hh_json.JSON_String env.clockspec, + [("defer", J.strlist ["hg.update"])] ) + | Drop_changes -> + (Hh_json.JSON_String env.clockspec, [("drop", J.strlist ["hg.update"])]) + | Scm_aware -> Hh_logger.log "Making Scm_aware subscription"; - let scm = Hh_json.JSON_Object - [("mergebase-with", Hh_json.JSON_String "master")] in - let since = Hh_json.JSON_Object - [("scm", scm); ("drop", J.strlist ["hg.update"]);] in - since, [] + let scm = + Hh_json.JSON_Object + [("mergebase-with", Hh_json.JSON_String "master")] + in + let since = + Hh_json.JSON_Object [("scm", scm); ("drop", J.strlist ["hg.update"])] + in + (since, []) in request_json - ~extra_kv:((["since", since] @ mode) @ - ["empty_on_fresh_instance", - Hh_json.JSON_Bool true]) - Subscribe env + ~extra_kv: + ( ([("since", since)] @ mode) + @ [("empty_on_fresh_instance", Hh_json.JSON_Bool true)] ) + Subscribe + env let watch_project root = J.strlist ["watch-project"; root] (* See https://facebook.github.io/watchman/docs/cmd/version.html *) - let capability_check ?(optional=[]) required = - let open Hh_json in - JSON_Array begin - [JSON_String "version"] @ [ - JSON_Object [ - "optional", J.strlist optional; - "required", J.strlist required; - ] - ] - end + let capability_check ?(optional = []) required = + Hh_json.( + JSON_Array + ( [JSON_String "version"] + @ [ + JSON_Object + [ + ("optional", J.strlist optional); + ("required", J.strlist required); + ]; + ] )) (** We filter all responses from get_changes through this. This is to detect * Watchman server crashes. * * See also Watchman docs on "since" query parameter. *) let assert_no_fresh_instance obj = - let open Hh_json.Access in - let _ = (return obj) - >>= get_bool "is_fresh_instance" - >>= (fun (is_fresh, trace) -> - if is_fresh then begin - Hh_logger.log "Watchman server is fresh instance. Exiting."; - raise Exit_status.(Exit_with Watchman_fresh_instance) - end - else - Ok ((), trace) - ) in - () + Hh_json.Access.( + let _ = + return obj + >>= get_bool "is_fresh_instance" + >>= fun (is_fresh, trace) -> + if is_fresh then ( + Hh_logger.log "Watchman server is fresh instance. Exiting."; + raise Exit_status.(Exit_with Watchman_fresh_instance) + ) else + Ok ((), trace) + in + ()) (****************************************************************************) (* Initialization, reinitialization, and crash-tracking. *) (****************************************************************************) let with_crash_record_exn source f = - Watchman_process.catch ~f ~catch:(fun e -> - Hh_logger.exc_with_dodgy_backtrace ~prefix:("Watchman " ^ source ^ ": ") e; - raise e - ) + Watchman_process.catch ~f ~catch:(fun ~stack e -> + Hh_logger.exc ~prefix:("Watchman " ^ source ^ ": ") ~stack e; + raise e) let with_crash_record_opt source f = Watchman_process.catch - ~f:(fun () -> with_crash_record_exn source f >|= fun v -> Some v) - ~catch:(fun _ -> Watchman_process.return None) + ~f:(fun () -> with_crash_record_exn source f >|= (fun v -> Some v)) + ~catch:(fun ~stack:_ e -> + let exn = Exception.wrap e in + match e with + (* Avoid swallowing these *) + | Exit_status.Exit_with _ + | Watchman_restarted -> + Exception.reraise exn + | _ -> Watchman_process.return None) let has_capability name capabilities = - (** Projects down from the boolean error monad into booleans. + (* Projects down from the boolean error monad into booleans. * Error states go to false, values are projected directly. *) - let project_bool m = match m with - | Ok (v, _) -> - v - | Error _ -> - false + let project_bool m = + match m with + | Ok (v, _) -> v + | Error _ -> false in - let open Hh_json.Access in - (return capabilities) + Hh_json.Access.( + return capabilities >>= get_obj "capabilities" >>= get_bool name - |> project_bool - - let re_init ?prior_clockspec - { init_timeout; subscribe_mode; expression_terms; debug_logging; roots; subscription_prefix } = + |> project_bool) - with_crash_record_opt "init" @@ fun () -> - Watchman_process.open_connection ~timeout:(float_of_int init_timeout) >>= fun conn -> - Watchman_process.request ~debug_logging ~conn - (capability_check ~optional:[ flush_subscriptions_cmd ] - ["relative_root"]) >>= fun capabilities -> + (* When we re-init our connection to Watchman, we use the old clockspec to get all the changes + * since our last response. However, if Watchman has restarted and the old clockspec pre-dates + * the new Watchman, then we may miss updates. It is important for Flow and Hack to restart + * in that case. + * + * Unfortunately, the response to "subscribe" doesn't have the "is_fresh_instance" field. So + * we'll instead send a small "query" request. It should always return 0 files, but it should + * tell us whether the Watchman service has restarted since clockspec. + *) + let assert_watchman_has_not_restarted_since + ~debug_logging ~conn ~watch_root ~clockspec = + let hard_to_match_name = "irrelevant.potato" in + let query = + Hh_json.( + JSON_Array + [ + JSON_String "query"; + JSON_String watch_root; + JSON_Object + [ + ("since", JSON_String clockspec); + ("empty_on_fresh_instance", JSON_Bool true); + ( "expression", + JSON_Array + [JSON_String "name"; JSON_String hard_to_match_name] ); + ]; + ]) + in + Watchman_process.request ~debug_logging ~conn query + >>= fun response -> + match + Hh_json_helpers.Jget.bool_opt (Some response) "is_fresh_instance" + with + | Some false -> Watchman_process.return () + | Some true -> + Hh_logger.error + "Watchman server restarted so we may have missed some updates"; + raise Watchman_restarted + | None -> + (* The response to this query **always** should include the `is_fresh_instance` boolean + * property. If it is missing then something has gone wrong with Watchman. Since we can't + * prove that Watchman has not restarted, we must treat this as an error. *) + Hh_logger.error + "Invalid Watchman response to `empty_on_fresh_instance` query:\n%s" + (Hh_json.json_to_string ~pretty:true response); + raise Exit_status.(Exit_with Watchman_failed) + + let prepend_relative_path_term ~relative_path ~terms = + match terms with + | None -> None + | Some _ when relative_path = "" -> + (* If we're watching the watch root directory, then there's no point in specifying a list of + * files and directories to watch. We're already subscribed to any change in this watch root + * anyway *) + None + | Some terms -> + (* So lets say we're being told to watch foo/bar. Is foo/bar a directory? Is it a file? If it + * is a file now, might it become a directory later? I'm not aware of aterm which will watch for either a file or a directory, so let's add two terms *) + Some + ( J.strlist ["dirname"; relative_path] + :: J.strlist ["name"; relative_path] + :: terms ) + + let re_init + ?prior_clockspec + { + init_timeout; + subscribe_mode; + expression_terms; + debug_logging; + roots; + subscription_prefix; + } = + with_crash_record_opt "init" + @@ fun () -> + Watchman_process.open_connection ~timeout:init_timeout + >>= fun conn -> + Watchman_process.request + ~debug_logging + ~conn + ~timeout:Default_timeout + (capability_check ~optional:[flush_subscriptions_cmd] ["relative_root"]) + >>= fun capabilities -> let supports_flush = has_capability flush_subscriptions_cmd capabilities in - (** Disable subscribe if Watchman flush feature isn't supported. *) - let subscribe_mode = if supports_flush then subscribe_mode else None in - - Watchman_process.list_fold_values roots - ~init:(Some [], SSet.empty) - ~f: (fun (terms, watch_roots) path -> - (* Watch this root *) - Watchman_process.request - ~debug_logging ~conn (watch_project (Path.to_string path)) >|= fun response -> - let watch_root = J.get_string_val "watch" response in - let relative_path = J.get_string_val "relative_path" ~default:"" response in - - let terms = match terms with - | None -> None - | Some _ when relative_path = "" -> - (* If we're watching the watch root directory, then there's no point in specifying a list - * of files and directories to watch. We're already subscribed to any change in this - * watch root anyway *) - None - | Some terms -> - (* So lets say we're being told to watch foo/bar. Is foo/bar a directory? Is it a file? - * If it is a file now, might it become a directory later? I'm not aware of a term which - * will watch for either a file or a directory, so let's add two terms *) - Some (J.strlist ["dirname"; relative_path] :: J.strlist ["name"; relative_path] :: terms) - in - - let watch_roots = SSet.add watch_root watch_roots in - terms, watch_roots - ) - >>= fun (watched_path_expression_terms, watch_roots) -> - + (* Disable subscribe if Watchman flush feature isn't supported. *) + let subscribe_mode = + if supports_flush then + subscribe_mode + else + None + in + Watchman_process.list_fold_values + roots + ~init:(Some [], SSet.empty, SSet.empty) + ~f:(fun (terms, watch_roots, failed_paths) path -> + (* Watch this root. If the path doesn't exist, watch_project will throw. In that case catch + * the error and continue for now. *) + Watchman_process.catch + ~f:(fun () -> + Watchman_process.request + ~debug_logging + ~conn + (watch_project (Path.to_string path)) + >|= (fun response -> Some response)) + ~catch:(fun ~stack:_ _ -> Watchman_process.return None) + >|= fun response -> + match response with + | None -> + (terms, watch_roots, SSet.add (Path.to_string path) failed_paths) + | Some response -> + let watch_root = J.get_string_val "watch" response in + let relative_path = + J.get_string_val "relative_path" ~default:"" response + in + let terms = prepend_relative_path_term ~relative_path ~terms in + let watch_roots = SSet.add watch_root watch_roots in + (terms, watch_roots, failed_paths)) + >>= fun (watched_path_expression_terms, watch_roots, failed_paths) -> + (* The failed_paths are likely includes which don't exist on the filesystem, so watch_project + * returned an error. Let's do a best effort attempt to infer the watch root and relative + * path for each bad include *) + let watched_path_expression_terms = + SSet.fold + (fun path terms -> + String_utils.( + match + SSet.find_first_opt + (fun root -> string_starts_with path root) + watch_roots + with + | None -> + failwith (spf "Cannot deduce watch root for path %s" path) + | Some root -> + let relative_path = lstrip (lstrip path root) Filename.dir_sep in + prepend_relative_path_term ~relative_path ~terms)) + failed_paths + watched_path_expression_terms + in (* All of our watched paths should have the same watch root. Let's assert that *) - let watch_root = match SSet.elements watch_roots with - | [] -> failwith "Cannot run watchman with fewer than 1 root"; - | [watch_root] -> watch_root - | _ -> - failwith ( - spf "Can't watch paths across multiple Watchman watch_roots. Found %d watch_roots" - (SSet.cardinal watch_roots) - ) + let watch_root = + match SSet.elements watch_roots with + | [] -> failwith "Cannot run watchman with fewer than 1 root" + | [watch_root] -> watch_root + | _ -> + failwith + (spf + "Can't watch paths across multiple Watchman watch_roots. Found %d watch_roots" + (SSet.cardinal watch_roots)) in - (* If we don't have a prior clockspec, grab the current clock *) (match prior_clockspec with - | Some s -> Watchman_process.return s - | None -> - Watchman_process.request ~debug_logging ~conn (clock watch_root) - >|= J.get_string_val "clock" - ) >>= fun clockspec -> - + | Some clockspec -> + assert_watchman_has_not_restarted_since + ~debug_logging + ~conn + ~watch_root + ~clockspec + >>= (fun () -> Watchman_process.return clockspec) + | None -> + Watchman_process.request ~debug_logging ~conn (clock watch_root) + >|= J.get_string_val "clock") + >>= fun clockspec -> let watched_path_expression_terms = Option.map watched_path_expression_terms ~f:(J.pred "anyof") in - - let env = { - settings = { - init_timeout; - debug_logging; - subscribe_mode; - expression_terms; - roots; - subscription_prefix; - }; - conn; - watch_root; - watched_path_expression_terms; - clockspec; - subscription = Printf.sprintf "%s.%d" subscription_prefix (Unix.getpid ()); - } in + let env = + { + settings = + { + init_timeout; + debug_logging; + subscribe_mode; + expression_terms; + roots; + subscription_prefix; + }; + conn; + watch_root; + watched_path_expression_terms; + clockspec; + subscription = + Printf.sprintf "%s.%d" subscription_prefix (Unix.getpid ()); + } + in (match subscribe_mode with | None -> Watchman_process.return () | Some mode -> - Watchman_process.request ~debug_logging ~conn (subscribe ~mode env) >|= ignore - ) >|= fun () -> - env + Watchman_process.request ~debug_logging ~conn (subscribe ~mode env) + >|= ignore) + >|= (fun () -> env) let init ?since_clockspec settings () = let prior_clockspec = since_clockspec in re_init ?prior_clockspec settings let extract_file_names env json = - let files = try J.get_array_val "files" json with - (** When an hg.update happens, it shows up in the watchman subscription + let files = + try J.get_array_val "files" json + with + (* When an hg.update happens, it shows up in the watchman subscription * as a notification with no files key present. *) - | Not_found -> [] + | Caml.Not_found -> + [] + in + let files = + List.map files (fun json -> + let s = Hh_json.get_string_exn json in + let abs = Filename.concat env.watch_root s in + abs) in - let files = List.map files begin fun json -> - let s = Hh_json.get_string_exn json in - let abs = - Filename.concat env.watch_root s in - abs - end in files let within_backoff_time attempts time = - let offset = 4.0 *. (2.0 ** float (if attempts > 3 then 3 else attempts)) in - (Unix.time ()) >= time +. offset + let offset = + 4.0 + *. 2.0 + ** float + ( if attempts > 3 then + 3 + else + attempts ) + in + Unix.time () >= time +. offset - let maybe_restart_instance instance = match instance with + let maybe_restart_instance instance = + match instance with | Watchman_alive _ -> Watchman_process.return instance | Watchman_dead dead_env -> if dead_env.reinit_attempts >= max_reinit_attempts then - let () = Hh_logger.log - "Ran out of watchman reinit attempts. Exiting." in + let () = + Hh_logger.log "Ran out of watchman reinit attempts. Exiting." + in raise Exit_status.(Exit_with Watchman_failed) else if within_backoff_time dead_env.reinit_attempts dead_env.dead_since - then + then ( let () = - Hh_logger.log "Attemping to reestablish watchman subscription" in - re_init ~prior_clockspec:dead_env.prior_clockspec dead_env.prior_settings + Hh_logger.log "Attemping to reestablish watchman subscription" + in + re_init + ~prior_clockspec:dead_env.prior_clockspec + dead_env.prior_settings >|= function | None -> Hh_logger.log "Reestablishing watchman subscription failed."; EventLogger.watchman_connection_reestablishment_failed (); - Watchman_dead { dead_env with - reinit_attempts = dead_env.reinit_attempts + 1 } + Watchman_dead + { dead_env with reinit_attempts = dead_env.reinit_attempts + 1 } | Some env -> Hh_logger.log "Watchman connection reestablished."; EventLogger.watchman_connection_reestablished (); Watchman_alive env - else + ) else Watchman_process.return instance + let close env = Watchman_process.close_connection env.conn + let close_channel_on_instance env = - Watchman_process.close_connection env.conn >|= fun () -> + close env + >|= fun () -> EventLogger.watchman_died_caught (); - Watchman_dead (dead_env_from_alive env), Watchman_unavailable + (Watchman_dead (dead_env_from_alive env), Watchman_unavailable) + + let with_instance instance ~try_to_restart ~on_alive ~on_dead = + ( if try_to_restart then + maybe_restart_instance instance + else + Watchman_process.return instance ) + >>= function + | Watchman_dead dead_env -> on_dead dead_env + | Watchman_alive env -> on_alive env (** Calls f on the instance, maybe restarting it if its dead and maybe * reverting it to a dead state if things go south. For example, if watchman @@ -574,18 +794,17 @@ struct * Alternatively, we also proactively revert to a dead instance if it appears * to be unresponsive (Timeout), and if reading the payload from it is * taking too long. *) - let call_on_instance instance source f = - maybe_restart_instance instance >>= fun instance -> - match instance with - | Watchman_dead _ -> - Watchman_process.return (instance, Watchman_unavailable) - | Watchman_alive env -> begin + let call_on_instance = + let on_dead dead_env = + Watchman_process.return (Watchman_dead dead_env, Watchman_unavailable) + in + let on_alive source f env = Watchman_process.catch ~f:(fun () -> - with_crash_record_exn source (fun () -> f env) >|= fun (env, result) -> - Watchman_alive env, result - ) - ~catch:(function + with_crash_record_exn source (fun () -> f env) + >|= (fun (env, result) -> (Watchman_alive env, result))) + ~catch:(fun ~stack exn -> + match exn with | Sys_error msg when msg = "Broken pipe" -> Hh_logger.log "Watchman Pipe broken."; close_channel_on_instance env @@ -593,7 +812,7 @@ struct Hh_logger.log "Watchman connection reset by peer."; close_channel_on_instance env | Sys_error msg when msg = "Bad file descriptor" -> - (** This happens when watchman is tearing itself down after we + (* This happens when watchman is tearing itself down after we * retrieved a sock address and connected to the sock address. That's * because Unix.open_connection (via Timeout.open_connection) doesn't * error even when the sock adddress is no longer valid and actually - @@ -604,7 +823,8 @@ struct * to start with. *) Hh_logger.log "Watchman bad file descriptor."; EventLogger.watchman_died_caught (); - Watchman_process.return (Watchman_dead (dead_env_from_alive env), Watchman_unavailable) + Watchman_process.return + (Watchman_dead (dead_env_from_alive env), Watchman_unavailable) | End_of_file -> Hh_logger.log "Watchman connection End_of_file. Closing channel"; close_channel_on_instance env @@ -618,210 +838,286 @@ struct Hh_logger.log "Watchman error: %s. Closing channel" msg; close_channel_on_instance env | e -> - let msg = Printexc.to_string e in + let msg = Printf.sprintf "%s\n%s" (Exn.to_string e) stack in EventLogger.watchman_uncaught_failure msg; - raise Exit_status.(Exit_with Watchman_failed) - ) - end + raise Exit_status.(Exit_with Watchman_failed)) + in + fun instance source f -> + with_instance + instance + ~try_to_restart:true + ~on_dead + ~on_alive:(on_alive source f) (** This is a large >50MB payload, which could longer than 2 minutes for * Watchman to generate and push down the channel. *) let get_all_files env = Watchman_process.catch ~f:(fun () -> - with_crash_record_exn "get_all_files" @@ fun () -> - Watchman_process.request - ~debug_logging:env.settings.debug_logging - (all_query env) - >|= fun response -> - env.clockspec <- J.get_string_val "clock" response; - extract_file_names env response - ) - ~catch:(fun _ -> raise Exit_status.(Exit_with Watchman_failed)) + with_crash_record_exn "get_all_files" + @@ fun () -> + Watchman_process.request + ~debug_logging:env.settings.debug_logging + ~timeout:Default_timeout + (all_query env) + >|= fun response -> + env.clockspec <- J.get_string_val "clock" response; + extract_file_names env response) + ~catch:(fun ~stack:_ _ -> raise Exit_status.(Exit_with Watchman_failed)) let make_state_change_response state name data = let metadata = J.try_get_val "metadata" data in match state with - | `Enter -> - State_enter (name, metadata) - | `Leave -> - State_leave (name, metadata) + | `Enter -> State_enter (name, metadata) + | `Leave -> State_leave (name, metadata) + + let extract_mergebase data = + Hh_json.Access.( + let accessor = return data in + let ret = + accessor + >>= get_obj "clock" + >>= get_string "clock" + >>= fun (clock, _) -> + accessor + >>= get_obj "clock" + >>= get_obj "scm" + >>= get_string "mergebase" + >>= (fun (mergebase, _) -> return (clock, mergebase)) + in + to_option ret) let make_mergebase_changed_response env data = - let open Hh_json.Access in - let accessor = return data in - accessor >>= - get_obj "clock" >>= - get_string "clock" >>= fun (clock, _) -> - accessor >>= get_obj "clock" >>= - get_obj "scm" >>= - get_string "mergebase" >>= fun (mergebase, keytrace) -> - let files = set_of_list @@ extract_file_names env data in - env.clockspec <- clock; - let response = Changed_merge_base (mergebase, files, clock) in - Ok ((env, response), keytrace) - - let transform_asynchronous_get_changes_response env data = match data with - | None -> - env, Files_changed (SSet.empty) - | Some data -> begin - - match make_mergebase_changed_response env data with - | Ok ((env, response), _) -> env, response - | Error _ -> - env.clockspec <- J.get_string_val "clock" data; - assert_no_fresh_instance data; - try env, make_state_change_response `Enter - (J.get_string_val "state-enter" data) data with - | Not_found -> - try env, make_state_change_response `Leave - (J.get_string_val "state-leave" data) data with - | Not_found -> - env, Files_changed (set_of_list @@ extract_file_names env data) - end + match extract_mergebase data with + | None -> Error "Failed to extract mergebase" + | Some (clock, mergebase) -> + let files = set_of_list @@ extract_file_names env data in + env.clockspec <- clock; + let response = Changed_merge_base (mergebase, files, clock) in + Ok (env, response) + + let transform_asynchronous_get_changes_response env data = + match data with + | None -> (env, Files_changed SSet.empty) + | Some data -> + begin + match make_mergebase_changed_response env data with + | Ok (env, response) -> (env, response) + | Error _ -> + env.clockspec <- J.get_string_val "clock" data; + assert_no_fresh_instance data; + (try + ( env, + make_state_change_response + `Enter + (J.get_string_val "state-enter" data) + data ) + with Caml.Not_found -> + (try + ( env, + make_state_change_response + `Leave + (J.get_string_val "state-leave" data) + data ) + with Caml.Not_found -> + ( env, + Files_changed (set_of_list @@ extract_file_names env data) ))) + end let get_changes ?deadline instance = - let timeout = Option.map deadline ~f:(fun deadline -> - let timeout = deadline -. (Unix.time ()) in - max timeout 0.0 - ) in - call_on_instance instance "get_changes" @@ fun env -> - let debug_logging = env.settings.debug_logging in - if env.settings.subscribe_mode <> None - then - Watchman_process.blocking_read ~debug_logging ?timeout ~conn:env.conn >|= fun response -> - let env, result = transform_asynchronous_get_changes_response env response in - env, Watchman_pushed result - else - let query = since_query env in - Watchman_process.request ~debug_logging ~conn:env.conn ?timeout query - >|= fun response -> - let env, changes = transform_asynchronous_get_changes_response env (Some response) in - env, Watchman_synchronous [changes] - - let flush_request ~(timeout:int) watch_root = - let open Hh_json in - let directive = JSON_Object [ - (** Watchman expects timeout milliseconds. *) - ("sync_timeout", (JSON_Number (string_of_int @@ timeout * 1000))) ] in - JSON_Array [ - JSON_String "flush-subscriptions"; - JSON_String watch_root; - directive; - ] + call_on_instance instance "get_changes" + @@ fun env -> + let timeout = + Option.map deadline (fun deadline -> + let timeout = deadline -. Unix.time () in + Explicit_timeout (max timeout 0.0)) + in + let debug_logging = env.settings.debug_logging in + if env.settings.subscribe_mode <> None then + Watchman_process.blocking_read ~debug_logging ?timeout ~conn:env.conn + >|= fun response -> + let (env, result) = + transform_asynchronous_get_changes_response env response + in + (env, Watchman_pushed result) + else + let query = since_query env in + Watchman_process.request ~debug_logging ~conn:env.conn ?timeout query + >|= fun response -> + let (env, changes) = + transform_asynchronous_get_changes_response env (Some response) + in + (env, Watchman_synchronous [changes]) + + let get_changes_since_mergebase ?timeout env = + Watchman_process.request + ?timeout + ~debug_logging:env.settings.debug_logging + (get_changes_since_mergebase_query env) + >|= extract_file_names env + + let get_mergebase ?timeout env = + Watchman_process.request + ?timeout + ~debug_logging:env.settings.debug_logging + (get_changes_since_mergebase_query env) + >|= fun response -> + match extract_mergebase response with + | Some (_clock, mergebase) -> mergebase + | None -> + raise (Watchman_error "Failed to extract mergebase from response") + + let flush_request ~(timeout : int) watch_root = + Hh_json.( + let directive = + JSON_Object + [ + (* Watchman expects timeout milliseconds. *) + ("sync_timeout", JSON_Number (string_of_int @@ (timeout * 1000))); + ] + in + JSON_Array + [JSON_String "flush-subscriptions"; JSON_String watch_root; directive]) let rec poll_until_sync ~deadline env acc = - let is_finished_flush_response json = match json with + let is_finished_flush_response json = + match json with | None -> false - | Some json -> begin - let open Hh_json.Access in - let is_synced = lazy ((return json) >>= get_array "synced" |> function - | Error _ -> false - | Ok (vs, _) -> - List.exists vs ~f:(fun str -> Hh_json.get_string_exn str = env.subscription) - ) in - let is_not_needed = lazy ((return json) >>= get_array "no_sync_needed" |> function - | Error _ -> false - | Ok (vs, _) -> - List.exists vs ~f:(fun str -> Hh_json.get_string_exn str = env.subscription) - ) in - Lazy.force is_synced || Lazy.force is_not_needed - end + | Some json -> + Hh_json.Access.( + let is_synced = + lazy + ( return json + >>= get_array "synced" + |> function + | Error _ -> false + | Ok (vs, _) -> + List.exists vs ~f:(fun str -> + Hh_json.get_string_exn str = env.subscription) ) + in + let is_not_needed = + lazy + ( return json + >>= get_array "no_sync_needed" + |> function + | Error _ -> false + | Ok (vs, _) -> + List.exists vs ~f:(fun str -> + Hh_json.get_string_exn str = env.subscription) ) + in + Lazy.force is_synced || Lazy.force is_not_needed) + in + let timeout = + let timeout = deadline -. Unix.time () in + if timeout <= 0.0 then + raise Timeout + else + Explicit_timeout timeout in - let timeout = deadline -. Unix.time () in - if timeout < 0.0 then raise Timeout else (); - let debug_logging = env.settings.debug_logging in - Watchman_process.blocking_read ~debug_logging ~timeout ~conn:env.conn >>= fun json -> - - if is_finished_flush_response json - then Watchman_process.return (env, acc) + Watchman_process.blocking_read ~debug_logging ~timeout ~conn:env.conn + >>= fun json -> + if is_finished_flush_response json then + Watchman_process.return (env, acc) else - let env, acc = match json with - | None -> env, acc - | Some json -> - let env, result = transform_asynchronous_get_changes_response env (Some json) in - env, (result::acc) + let (env, acc) = + match json with + | None -> (env, acc) + | Some json -> + let (env, result) = + transform_asynchronous_get_changes_response env (Some json) + in + (env, result :: acc) in poll_until_sync ~deadline env acc - let poll_until_sync ~deadline env = - poll_until_sync ~deadline env [] - - let get_changes_synchronously ~(timeout:int) instance = - call_on_instance instance "get_changes_synchronously" - @@ (fun env -> - if env.settings.subscribe_mode = None - then - let timeout = float_of_int timeout in - let query = since_query env in - Watchman_process.request - ~debug_logging:env.settings.debug_logging - ~conn:env.conn ~timeout query - >|= fun response -> - let env, changes = transform_asynchronous_get_changes_response env (Some response) in - env, Watchman_synchronous [changes] - else - let request = flush_request ~timeout env.watch_root in - let conn = env.conn in - Watchman_process.send_request_and_do_not_wait_for_response - ~debug_logging:env.settings.debug_logging ~conn request >>= fun () -> - let deadline = Unix.time () +. (float_of_int timeout) in - poll_until_sync ~deadline env >|= fun (env, changes) -> - env, Watchman_synchronous (List.rev changes) - ) + let poll_until_sync ~deadline env = poll_until_sync ~deadline env [] + + let get_changes_synchronously ~(timeout : int) instance = + ( call_on_instance instance "get_changes_synchronously" + @@ fun env -> + if env.settings.subscribe_mode = None then + let timeout = Explicit_timeout (float timeout) in + let query = since_query env in + Watchman_process.request + ~debug_logging:env.settings.debug_logging + ~conn:env.conn + ~timeout + query + >|= fun response -> + let (env, changes) = + transform_asynchronous_get_changes_response env (Some response) + in + (env, Watchman_synchronous [changes]) + else + let request = flush_request ~timeout env.watch_root in + let conn = env.conn in + Watchman_process.send_request_and_do_not_wait_for_response + ~debug_logging:env.settings.debug_logging + ~conn + request + >>= fun () -> + let deadline = Unix.time () +. float_of_int timeout in + poll_until_sync ~deadline env + >|= (fun (env, changes) -> (env, Watchman_synchronous (List.rev changes))) + ) >|= function - | _, Watchman_unavailable -> + | (_, Watchman_unavailable) -> raise (Watchman_error "Watchman unavailable for synchronous response") - | _, Watchman_pushed _ -> + | (_, Watchman_pushed _) -> raise (Watchman_error "Wtf? pushed response from synchronous request") - | instance, Watchman_synchronous files -> - instance, files + | (instance, Watchman_synchronous files) -> (instance, files) let conn_of_instance = function | Watchman_dead _ -> None - | Watchman_alive {conn; _} -> Some conn + | Watchman_alive { conn; _ } -> Some conn module Testing = struct include Testing_common let get_test_env () = - Watchman_process.Testing.get_test_conn () >|= fun conn -> + Watchman_process.Testing.get_test_conn () + >|= fun conn -> { settings = test_settings; conn; watch_root = "/path/to/root"; clockspec = ""; - watched_path_expression_terms = Some (J.pred "anyof" [ - J.strlist ["dirname"; "foo"]; - J.strlist ["name"; "foo"]; - ]); + watched_path_expression_terms = + Some + (J.pred + "anyof" + [J.strlist ["dirname"; "foo"]; J.strlist ["name"; "foo"]]); subscription = "dummy_prefix.123456789"; } let transform_asynchronous_get_changes_response env json = transform_asynchronous_get_changes_response env json end - -end;; +end module Watchman_actual = struct include Functor (Regular_watchman_process) let get_reader instance = - Option.map (conn_of_instance instance) ~f:Regular_watchman_process.get_reader + Option.map + (conn_of_instance instance) + ~f:Regular_watchman_process.get_reader end module Watchman_mock = struct - exception Not_available_in_mocking type 'a result = 'a + type conn include Watchman_sig.Types + type env = string + type dead_env = unit + type watchman_instance = | Watchman_dead of dead_env | Watchman_alive of env @@ -830,39 +1126,40 @@ module Watchman_mock = struct let print_env env = env let init = ref None - let init_returns v = - init := v + + let init_returns v = init := v let changes = ref Watchman_unavailable - let get_changes_returns v = - changes := v + + let get_changes_returns v = changes := v let changes_synchronously = ref [] let all_files = ref [] - end module Testing = struct include Testing_common + let get_test_env () = "test_env" + let transform_asynchronous_get_changes_response _ _ = raise Not_available_in_mocking - end let init ?since_clockspec:_ _ () = !Mocking.init + let get_changes ?deadline instance = let _ = deadline in let result = !Mocking.changes in Mocking.changes := Watchman_unavailable; - instance, result + (instance, result) let get_changes_synchronously ~timeout instance = let _ = timeout in let result = !Mocking.changes_synchronously in Mocking.changes_synchronously := []; - instance, result + (instance, result) let get_reader _ = None @@ -873,15 +1170,25 @@ module Watchman_mock = struct Mocking.all_files := []; result + let get_changes_since_mergebase ?timeout:_ _ = [] + + let get_mergebase ?timeout:_ _ = "mergebase" + + let close _ = () + + let with_instance instance ~try_to_restart:_ ~on_alive ~on_dead = + match instance with + | Watchman_dead dead_env -> on_dead dead_env + | Watchman_alive env -> on_alive env end module type S = sig include Watchman_sig.S with type 'a result = 'a - val get_reader: watchman_instance -> Buffered_line_reader.t option + val get_reader : watchman_instance -> Buffered_line_reader.t option end -include (val (if Injector_config.use_test_stubbing - then (module Watchman_mock : S) - else (module Watchman_actual : S) -)) +include ( val if Injector_config.use_test_stubbing then + (module Watchman_mock : S) + else + (module Watchman_actual : S) ) diff --git a/hack/watchman/watchman.mli b/hack/watchman/watchman.mli index 89bdbc72ac3..cbdb8148a39 100644 --- a/hack/watchman/watchman.mli +++ b/hack/watchman/watchman.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * @@ -7,20 +7,23 @@ * *) -module Watchman_process_helpers: sig +module Watchman_process_helpers : sig module J = Hh_json_helpers.AdhocJsonHelpers - val debug: bool + val debug : bool + + val timeout_to_secs : Watchman_sig.Types.timeout -> float option exception Read_payload_too_long - val assert_no_error: Hh_json.json -> unit - val sanitize_watchman_response: debug_logging:bool -> string -> Hh_json.json + val assert_no_error : Hh_json.json -> unit + + val sanitize_watchman_response : debug_logging:bool -> string -> Hh_json.json end -module Functor : functor (Watchman_process: Watchman_sig.WATCHMAN_PROCESS) - -> Watchman_sig.S with type 'a result = 'a Watchman_process.result +module Functor (Watchman_process : Watchman_sig.WATCHMAN_PROCESS) : + Watchman_sig.S with type 'a result = 'a Watchman_process.result include Watchman_sig.S with type 'a result = 'a -val get_reader: watchman_instance -> Buffered_line_reader.t option +val get_reader : watchman_instance -> Buffered_line_reader.t option diff --git a/hack/watchman/watchman_lwt.ml b/hack/watchman/watchman_lwt.ml index ff586ba656d..2d05f13b8fd 100644 --- a/hack/watchman/watchman_lwt.ml +++ b/hack/watchman/watchman_lwt.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * @@ -7,39 +7,53 @@ * *) -module Lwt_watchman_process: Watchman_sig.WATCHMAN_PROCESS - with type 'a result = 'a Lwt.t = -struct +module Lwt_watchman_process : + Watchman_sig.WATCHMAN_PROCESS with type 'a result = 'a Lwt.t = struct include Watchman_sig.Types include Watchman.Watchman_process_helpers type 'a result = 'a Lwt.t + type conn = Buffered_line_reader_lwt.t * Lwt_io.output_channel - let (>>=) = Lwt.(>>=) - let (>|=) = Lwt.(>|=) + let ( >>= ) = Lwt.( >>= ) + + let ( >|= ) = Lwt.( >|= ) + let return = Lwt.return - let catch ~f ~catch = Lwt.catch f catch + external reraise : exn -> 'a = "%reraise" - let list_fold_values l ~init ~f = - Lwt_list.fold_left_s f init l + let catch ~f ~catch = + Lwt.catch f (fun e -> + match e with + | Lwt.Canceled -> reraise e + | e -> catch ~stack:(Printexc.get_backtrace ()) e) + + let list_fold_values l ~init ~f = Lwt_list.fold_left_s f init l (* Send a request to the watchman process *) let send_request ~debug_logging oc json = let json_str = Hh_json.(json_to_string json) in - if debug_logging then Hh_logger.info "Watchman request: %s" json_str ; + if debug_logging then Hh_logger.info "Watchman request: %s" json_str; + (* Print the json with a newline and then flush *) let%lwt () = Lwt_io.fprintl oc json_str in Lwt_io.flush oc let get_sockname timeout = - let process = Lwt_process.open_process_in - ("", [| "watchman"; "--no-pretty"; "get-sockname"; |]) + let process = + Lwt_process.open_process_in + ("", [|"watchman"; "--no-pretty"; "get-sockname"|]) in let%lwt output = - try%lwt Lwt_unix.with_timeout timeout @@ fun () -> Lwt_io.read_line process#stdout - with Lwt_unix.Timeout -> raise Timeout + match timeout_to_secs timeout with + | None -> Lwt_io.read_line process#stdout + | Some timeout -> + (try%lwt + Lwt_unix.with_timeout timeout + @@ (fun () -> Lwt_io.read_line process#stdout) + with Lwt_unix.Timeout -> raise Timeout) in let%lwt status = process#close in assert (status = Unix.WEXITED 0); @@ -49,18 +63,20 @@ struct (* Opens a connection to the watchman process through the socket *) let open_connection ~timeout = let%lwt sockname = get_sockname timeout in - let (ic, oc) = - if Sys.os_type = "Unix" - (* Yes, I know that Unix.open_connection uses the same fd for input and output. But I don't - * want to hardcode that assumption here. So let's pretend like ic and oc might be back by - * different fds *) - then Unix.open_connection (Unix.ADDR_UNIX sockname) + if + Sys.os_type = "Unix" + (* Yes, I know that Unix.open_connection uses the same fd for input and output. But I don't + * want to hardcode that assumption here. So let's pretend like ic and oc might be back by + * different fds *) + then + Unix.open_connection (Unix.ADDR_UNIX sockname) (* On Windows, however, named pipes behave like regular files from the client's perspective. * We just open the file and create in/out channels for it. The file permissions attribute * is not needed because the file should exist already but we have to pass something. *) - else let fd = Unix.openfile sockname [Unix.O_RDWR] 0o640 in - (Unix.in_channel_of_descr fd, Unix.out_channel_of_descr fd) + else + let fd = Unix.openfile sockname [Unix.O_RDWR] 0o640 in + (Unix.in_channel_of_descr fd, Unix.out_channel_of_descr fd) in let reader = Unix.descr_of_in_channel ic @@ -72,71 +88,83 @@ struct |> Lwt_unix.of_unix_file_descr ~blocking:true |> Lwt_io.of_fd ~mode:Lwt_io.output in - Lwt.return (reader, oc) let close_connection (reader, oc) = let%lwt () = Lwt_unix.close @@ Buffered_line_reader_lwt.get_fd reader in - Lwt_io.close oc + (* As mention above, if we open the connection with Unix.open_connection, we use a single fd for + * both input and output. That means we might be trying to close it twice here. If so, this + * second close with throw. So let's catch that exception and ignore it. *) + try%lwt Lwt_io.close oc + with Unix.Unix_error (Unix.EBADF, _, _) -> Lwt.return_unit let with_watchman_conn ~timeout f = let%lwt conn = open_connection ~timeout in let%lwt result = - try%lwt - f conn + try%lwt f conn with e -> + let e = Exception.wrap e in let%lwt () = close_connection conn in - raise e + Exception.reraise e in let%lwt () = close_connection conn in Lwt.return result (* Sends a request to watchman and returns the response. If we don't have a connection, * a new connection will be created before the request and destroyed after the response *) - let rec request ~debug_logging ?conn ?(timeout=120.0) json = + let rec request ~debug_logging ?conn ?(timeout = Default_timeout) json = match conn with | None -> - with_watchman_conn ~timeout (fun conn -> request ~debug_logging ~conn ~timeout json) - | Some (reader, oc) -> begin + with_watchman_conn ~timeout (fun conn -> + request ~debug_logging ~conn ~timeout json) + | Some (reader, oc) -> let%lwt () = send_request ~debug_logging oc json in let%lwt line = - try%lwt - Lwt_unix.with_timeout timeout @@ fun () -> Buffered_line_reader_lwt.get_next_line reader - with Lwt_unix.Timeout -> raise Timeout + match timeout_to_secs timeout with + | None -> Buffered_line_reader_lwt.get_next_line reader + | Some timeout -> + (try%lwt + Lwt_unix.with_timeout timeout + @@ (fun () -> Buffered_line_reader_lwt.get_next_line reader) + with Lwt_unix.Timeout -> raise Timeout) in Lwt.return @@ sanitize_watchman_response ~debug_logging line - end - let send_request_and_do_not_wait_for_response ~debug_logging ~conn:(_, oc) json = + let send_request_and_do_not_wait_for_response + ~debug_logging ~conn:(_, oc) json = send_request ~debug_logging oc json let has_input ~timeout reader = let fd = Buffered_line_reader_lwt.get_fd reader in - match timeout with + match timeout_to_secs timeout with | None -> Lwt.return @@ Lwt_unix.readable fd | Some timeout -> - try%lwt Lwt_unix.with_timeout timeout @@ fun () -> - let%lwt () = Lwt_unix.wait_read fd in - Lwt.return true - with Lwt_unix.Timeout -> Lwt.return false - - let blocking_read ~debug_logging ?timeout ~conn:(reader, _) = + (try%lwt + Lwt_unix.with_timeout timeout + @@ fun () -> + let%lwt () = Lwt_unix.wait_read fd in + Lwt.return true + with Lwt_unix.Timeout -> Lwt.return false) + + let blocking_read ~debug_logging ?(timeout = No_timeout) ~conn:(reader, _) = let%lwt ready = has_input ~timeout reader in if not ready then - if timeout = None || timeout = (Some 0.0) - then Lwt.return None - else raise Timeout + match timeout with + | No_timeout -> Lwt.return None + | _ -> raise Timeout else let%lwt output = try%lwt - Lwt_unix.with_timeout 40.0 @@ fun () -> Buffered_line_reader_lwt.get_next_line reader + Lwt_unix.with_timeout 40.0 + @@ (fun () -> Buffered_line_reader_lwt.get_next_line reader) with Lwt_unix.Timeout -> - let () = Hh_logger.log "Lwt_watchman_process.blocking_read timed out" in + let () = + Hh_logger.log "Lwt_watchman_process.blocking_read timed out" + in raise Read_payload_too_long in Lwt.return @@ Some (sanitize_watchman_response ~debug_logging output) - module Testing = struct let get_test_conn () = let%lwt reader = Buffered_line_reader_lwt.get_null_reader () diff --git a/hack/watchman/watchman_lwt.mli b/hack/watchman/watchman_lwt.mli index 902e96cf642..05dca8f6aea 100644 --- a/hack/watchman/watchman_lwt.mli +++ b/hack/watchman/watchman_lwt.mli @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2018, Facebook, Inc. * All rights reserved. * diff --git a/hack/watchman/watchman_sig.ml b/hack/watchman/watchman_sig.ml index 8b10eb6b34c..2f3da6877bb 100644 --- a/hack/watchman/watchman_sig.ml +++ b/hack/watchman/watchman_sig.ml @@ -1,4 +1,4 @@ -(** +(* * Copyright (c) 2016, Facebook, Inc. * All rights reserved. * @@ -8,16 +8,18 @@ *) module Types = struct - exception Timeout + exception Watchman_error of string + exception Subscription_canceled_by_watchman + exception Watchman_restarted type subscribe_mode = | All_changes | Defer_changes - (** See also Watchman docs on drop. This means the subscriber will not + (* See also Watchman docs on drop. This means the subscriber will not * get a list of files changed during a repo update. Practically, this * is not useful for the typechecker process which needs to actually * know which files were changed. This is useful for the monitor to @@ -25,23 +27,28 @@ module Types = struct | Drop_changes | Scm_aware + type timeout = + | No_timeout + | Default_timeout + | Explicit_timeout of float + type init_settings = { - (** None for query mode, otherwise specify subscriptions mode. *) + (* None for query mode, otherwise specify subscriptions mode. *) subscribe_mode: subscribe_mode option; - (** Seconds used for init timeout - will be reused for reinitialization. *) - init_timeout: int; - (** See watchman expression terms. *) + (* Seconds used for init timeout - will be reused for reinitialization. None -> no timeout *) + init_timeout: timeout; + (* See watchman expression terms. *) expression_terms: Hh_json.json list; debug_logging: bool; roots: Path.t list; subscription_prefix: string; } - (** The message's clock. *) type clock = string + (** The message's clock. *) type pushed_changes = - (** + (* * State name and metadata. * * For example: @@ -75,62 +82,105 @@ end * if you change this to a functor). *) module Abstract_types = struct type env + type dead_env - (** This has to be repeated because they depend on the abstract types. *) + + (* This has to be repeated because they depend on the abstract types. *) type watchman_instance = | Watchman_dead of dead_env | Watchman_alive of env end - module type WATCHMAN_PROCESS = sig type 'a result + type conn exception Read_payload_too_long - val (>>=): 'a result -> ('a -> 'b result) -> 'b result - val (>|=): 'a result -> ('a -> 'b) -> 'b result - val return: 'a -> 'a result - val catch: f:(unit -> 'b result) -> catch:(exn -> 'b result) -> 'b result + val ( >>= ) : 'a result -> ('a -> 'b result) -> 'b result + + val ( >|= ) : 'a result -> ('a -> 'b) -> 'b result + + val return : 'a -> 'a result - val list_fold_values: 'a list -> init:'b -> f:('b -> 'a -> 'b result) -> 'b result + val catch : + f:(unit -> 'b result) -> + catch:(stack:string -> exn -> 'b result) -> + 'b result - val open_connection: timeout:float -> conn result - val request: - debug_logging:bool -> ?conn:conn -> ?timeout:float -> Hh_json.json -> Hh_json.json result - val send_request_and_do_not_wait_for_response: + val list_fold_values : + 'a list -> init:'b -> f:('b -> 'a -> 'b result) -> 'b result + + val open_connection : timeout:Types.timeout -> conn result + + val request : + debug_logging:bool -> + ?conn:conn -> + ?timeout:Types.timeout -> + Hh_json.json -> + Hh_json.json result + + val send_request_and_do_not_wait_for_response : debug_logging:bool -> conn:conn -> Hh_json.json -> unit result - val blocking_read: debug_logging:bool -> ?timeout:float -> conn:conn -> Hh_json.json option result - val close_connection: conn -> unit result - module Testing: sig - val get_test_conn: unit -> conn result + val blocking_read : + debug_logging:bool -> + ?timeout:Types.timeout -> + conn:conn -> + Hh_json.json option result + + val close_connection : conn -> unit result + + module Testing : sig + val get_test_conn : unit -> conn result end end module type S = sig - include module type of Types + include module type of Abstract_types type 'a result + type conn - val init: ?since_clockspec:string -> init_settings -> unit -> env option result + val init : + ?since_clockspec:string -> init_settings -> unit -> env option result + + val get_all_files : env -> string list result + + val get_changes_since_mergebase : + ?timeout:timeout -> env -> string list result + + val get_mergebase : ?timeout:timeout -> env -> string result - val get_all_files: env -> string list result + val get_changes : + ?deadline:float -> + watchman_instance -> + (watchman_instance * changes) result - val get_changes: ?deadline:float -> - watchman_instance -> (watchman_instance * changes) result - val get_changes_synchronously: timeout:int -> - watchman_instance -> (watchman_instance * (pushed_changes list)) result + val get_changes_synchronously : + timeout:int -> + watchman_instance -> + (watchman_instance * pushed_changes list) result - val conn_of_instance: watchman_instance -> conn option + val conn_of_instance : watchman_instance -> conn option - (** Expose some things for testing. *) + val close : env -> unit result + + val with_instance : + watchman_instance -> + try_to_restart:bool -> + on_alive:(env -> 'a result) -> + on_dead:(dead_env -> 'a result) -> + 'a result + + (* Expose some things for testing. *) module Testing : sig val get_test_env : unit -> env result + val test_settings : init_settings val transform_asynchronous_get_changes_response : @@ -139,8 +189,9 @@ module type S = sig module Mocking : sig val print_env : env -> string + val init_returns : string option -> unit + val get_changes_returns : changes -> unit end - -end;; +end diff --git a/hack/watchman/watchman_utils.ml b/hack/watchman/watchman_utils.ml index 7430ec8dda6..34ad16bce7f 100644 --- a/hack/watchman/watchman_utils.ml +++ b/hack/watchman/watchman_utils.ml @@ -1,25 +1,29 @@ - - (** State_enter and State_leave events contains a JSON blob specifying * the revision we are moving to. This gets it. *) let rev_in_state_change json = - let open Hh_json.Access in - (return json) >>= - get_string "rev" |> function + Hh_json.Access.( + return json + >>= get_string "rev" + |> function | Error _ -> - let () = Hh_logger.log - "Watchman_utils failed to get rev in json: %s" - (Hh_json.json_to_string json) in + let () = + Hh_logger.log + "Watchman_utils failed to get rev in json: %s" + (Hh_json.json_to_string json) + in None - | Ok (v, _) -> Some v + | Ok (v, _) -> Some v) let merge_in_state_change json = - let open Hh_json.Access in - (return json) >>= - get_bool "merge" |> function + Hh_json.Access.( + return json + >>= get_bool "merge" + |> function | Error _ -> - let () = Hh_logger.log - "Watchman_utils failed to get merge in json: %s" - (Hh_json.json_to_string json) in + let () = + Hh_logger.log + "Watchman_utils failed to get merge in json: %s" + (Hh_json.json_to_string json) + in None - | Ok (v, _) -> Some v + | Ok (v, _) -> Some v) diff --git a/lib/bom.js b/lib/bom.js index 30633bfecc4..4030881dd09 100644 --- a/lib/bom.js +++ b/lib/bom.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,138 +7,307 @@ /* BOM */ declare class Screen { - availHeight: number; - availLeft: number; - availTop: number; - availWidth: number; - colorDepth: number; - height: number; - left: number; - mozOrientation?: string; - onmozorientationchange?: any; - orientation?: { + +availHeight: number; + +availWidth: number; + +availLeft: number; + +availTop: number; + +top: number; + +left: number; + +colorDepth: number; + +pixelDepth: number; + +width: number; + +height: number; + +orientation?: { lock(): Promise; unlock(): void; angle: number; onchange: () => mixed; type: 'portrait-primary' | 'portrait-secondary' | 'landscape-primary' | 'landscape-secondary'; + ... }; - pixelDepth: number; - top: number; - width: number; - mozLockOrientation?: Function; - mozUnlockOrientation?: Function; + // deprecated + mozLockOrientation?: (orientation: string | Array) => boolean; + mozUnlockOrientation?: () => void; mozOrientation?: string; - onmozorientationchange?: Function; + onmozorientationchange?: (...args: any[]) => mixed; } declare var screen: Screen; declare var window: any; type GamepadButton = { - pressed: bool; - value: number; + pressed: bool, + value: number, + ... } type GamepadHapticActuator = { - type: 'vibration'; - pulse(value: number, duration: number): Promise; + type: 'vibration', + pulse(value: number, duration: number): Promise, + ... } type GamepadPose = { - angularAcceleration: null | Float32Array; - angularVelocity: null | Float32Array; - hasOrientation: boolean; - hasPosition: boolean; - linearAcceleration: null | Float32Array; - linearVelocity: null | Float32Array; - orientation: null | Float32Array; - position: null | Float32Array; + angularAcceleration: null | Float32Array, + angularVelocity: null | Float32Array, + hasOrientation: boolean, + hasPosition: boolean, + linearAcceleration: null | Float32Array, + linearVelocity: null | Float32Array, + orientation: null | Float32Array, + position: null | Float32Array, + ... } type Gamepad = { - axes: number[]; - buttons: GamepadButton[]; - connected: bool; - displayId?: number; - hapticActuators?: GamepadHapticActuator[]; - hand?: '' | 'left' | 'right'; - id: string; - index: number; - mapping: string; - pose?: null | GamepadPose; - timestamp: number; -} - + axes: number[], + buttons: GamepadButton[], + connected: bool, + displayId?: number, + hapticActuators?: GamepadHapticActuator[], + hand?: '' | 'left' | 'right', + id: string, + index: number, + mapping: string, + pose?: null | GamepadPose, + timestamp: number, + ... +} + +// deprecated type BatteryManager = { - charging: boolean; - chargingTime: number; - dischargingTime: number; - level: number; - onchargingchange: ?Function; - onchargingtimechange: ?Function; - ondischargingtimechange: ?Function; - onlevelchange: ?Function; + +charging: boolean, + +chargingTime: number, + +dischargingTime: number, + +level: number, + onchargingchange: ?((event: any) => mixed), + onchargingtimechange: ?((event: any) => mixed), + ondischargingtimechange: ?((event: any) => mixed), + onlevelchange: ?((event: any) => mixed), + ... } // https://wicg.github.io/web-share type ShareData = { - title?: string; - text?: string; - url?: string; + title?: string, + text?: string, + url?: string, + ... +} + +type PermissionName = + | "geolocation" + | "notifications" + | "push" + | "midi" + | "camera" + | "microphone" + | "speaker" + | "device-info" + | "background-sync" + | "bluetooth" + | "persistent-storage" + | "ambient-light-sensor" + | "accelerometer" + | "gyroscope" + | "magnetometer" + | "clipboard-read" + | "clipboard-write"; + +type PermissionState = + | "granted" + | "denied" + | "prompt"; + +type PermissionDescriptor = {| + name: PermissionName; +|} + +type DevicePermissionDescriptor = {| + deviceId?: string; + name: "camera" | "microphone" | "speaker"; +|} + +type MidiPermissionDescriptor = {| + name: "midi"; + sysex?: boolean; +|} + +type PushPermissionDescriptor = {| + name: "push"; + userVisibleOnly?: boolean; +|} + +type ClipboardPermissionDescriptor = {| + name: "clipboard-read" | "clipboard-write"; + allowWithoutGesture: boolean; +|} + +declare class PermissionStatus extends EventTarget { + onchange: ?((event: any) => mixed); + +state: PermissionState; +} + +declare class Permissions { + query( + permissionDesc: + | DevicePermissionDescriptor + | MidiPermissionDescriptor + | PushPermissionDescriptor + | ClipboardPermissionDescriptor + | PermissionDescriptor + ): Promise; } -declare class NavigatorCommon { +type MIDIPortType = 'input' | 'output'; +type MIDIPortDeviceState = 'connected' | 'disconnected'; +type MIDIPortConnectionState = 'open' | 'closed' | 'pending'; + +type MIDIOptions = {| + sysex: boolean; + software: boolean; +|} + +type MIDIMessageEvent$Init = Event$Init & { + data: Uint8Array; + ... +} + +declare class MIDIMessageEvent extends Event { + constructor(type: string, eventInitDict: MIDIMessageEvent$Init): void; + +data: Uint8Array; +} + +type MIDIConnectionEvent$Init = Event$Init & { + port: MIDIPort; + ... +} + +declare class MIDIConnectionEvent extends Event { + constructor(type: string, eventInitDict: MIDIConnectionEvent$Init): void; + +port: MIDIPort; +} + +declare class MIDIPort extends EventTarget { + +id: string; + +manufacturer?: string; + +name?: string; + +type: MIDIPortType; + +version?: string; + +state: MIDIPortDeviceState; + +connection: MIDIPortConnectionState; + onstatechange: ?((ev: MIDIConnectionEvent) => mixed); + open(): Promise; + close(): Promise; +} + +declare class MIDIInput extends MIDIPort { + onmidimessage: ?((ev: MIDIMessageEvent) => mixed); +} + +declare class MIDIOutput extends MIDIPort { + send(data: Iterable, timestamp?: number): void; + clear(): void; +} + +declare class MIDIInputMap extends $ReadOnlyMap {} + +declare class MIDIOutputMap extends $ReadOnlyMap {} + +declare class MIDIAccess extends EventTarget { + +inputs: MIDIInputMap; + +outputs: MIDIOutputMap; + +sysexEnabled: boolean; + onstatechange: ?((ev: MIDIConnectionEvent) => mixed); +} + +declare class NavigatorID { appName: 'Netscape'; + appCodeName: 'Mozilla'; + product: 'Gecko'; appVersion: string; platform: string; userAgent: string; - language: string; - languages: Array; - onLine: boolean; - hardwareConcurrency: number; } -declare class Navigator mixins NavigatorCommon { +declare class NavigatorLanguage { + +language: string; + +languages: $ReadOnlyArray; +} + +declare class NavigatorContentUtils { + registerContentHandler(mimeType: string, uri: string, title: string): void; + registerProtocolHandler(protocol: string, uri: string, title: string): void; +} + +declare class NavigatorCookies { + +cookieEnabled: boolean; +} + +declare class NavigatorPlugins { + +plugins: PluginArray; + +mimeTypes: MimeTypeArray; + javaEnabled(): boolean; +} + +declare class NavigatorOnLine { + +onLine: boolean; +} + +declare class NavigatorConcurrentHardware { + +hardwareConcurrency: number; +} + +declare class Navigator mixins + NavigatorID, + NavigatorLanguage, + NavigatorOnLine, + NavigatorContentUtils, + NavigatorCookies, + NavigatorPlugins, + NavigatorConcurrentHardware { + productSub: '20030107' | '20100101'; + vendor: '' | 'Google Inc.' | 'Apple Computer, Inc'; + vendorSub: ''; + activeVRDisplays?: VRDisplay[]; appCodeName: 'Mozilla'; buildID: string; - cookieEnabled: boolean; - doNotTrack?: any; + doNotTrack: string | null; geolocation: Geolocation; - mediaDevices?: Object; - javaEnabled: Function; + mediaDevices?: MediaDevices; maxTouchPoints: number; - mimeTypes: MimeTypeArray; - oscpu: string; - permissions: any; - plugins: PluginArray; - product: 'Gecko'; - productSub: '20030107' | '20100101'; - serviceWorker?: Object; - vendor: '' | 'Google Inc.' | 'Apple Computer, Inc'; - vendorSub: ''; - getBattery?: () => Promise; - mozGetBattery?: () => Promise; - getGamepads?: () => Object[]; + permissions: Permissions; + serviceWorker?: ServiceWorkerContainer; + getGamepads?: () => Array; webkitGetGamepads?: Function; mozGetGamepads?: Function; mozGamepads?: any; gamepads?: any; webkitGamepads?: any; getVRDisplays?: () => Promise; - requestMIDIAccess?: Function; registerContentHandler(mimeType: string, uri: string, title: string): void; registerProtocolHandler(protocol: string, uri: string, title: string): void; + requestMIDIAccess?: (options?: MIDIOptions) => Promise; requestMediaKeySystemAccess?: (keySystem: string, supportedConfigurations: any[]) => Promise; - sendBeacon?: Function; + sendBeacon?: (url: string, data?: BodyInit) => boolean; + vibrate?: (pattern: number | number[]) => boolean; + mozVibrate?: (pattern: number | number[]) => boolean; + webkitVibrate?: (pattern: number | number[]) => boolean; + share?: (shareData: ShareData) => Promise; + clipboard: Clipboard; + credentials?: CredMgmtCredentialsContainer; + + // deprecated + getBattery?: () => Promise; + mozGetBattery?: () => Promise; + + // deprecated getUserMedia?: Function; webkitGetUserMedia?: Function; mozGetUserMedia?: Function; msGetUserMedia?: Function; - taintEnabled?: Function; - vibrate?: (pattern: number|number[]) => bool; - mozVibrate?: (pattern: number|number[]) => bool; - webkitVibrate?: (pattern: number|number[]) => bool; - share?: (shareData: ShareData) => Promise; - clipboard: Clipboard; + + // Gecko + taintEnabled?: () => false; + oscpu: string; } declare class Clipboard extends EventTarget { @@ -223,9 +392,10 @@ declare class PerformanceNavigation { } type PerformanceEntryFilterOptions = { - name: string; - entryType: string; - initiatorType: string; + name: string, + entryType: string, + initiatorType: string, + ... } // https://www.w3.org/TR/performance-timeline-2/ @@ -268,9 +438,11 @@ declare class PerformanceNavigationTiming extends PerformanceResourceTiming { } declare class Performance { + // deprecated navigation: PerformanceNavigation; - onresourcetimingbufferfull: (ev: Event) => any; timing: PerformanceTiming; + + onresourcetimingbufferfull: (ev: any) => mixed; clearMarks(name?: string): void; clearMeasures(name?: string): void; clearResourceTimings(): void; @@ -286,13 +458,38 @@ declare class Performance { declare var performance: Performance; +type PerformanceEntryList = PerformanceEntry[]; + +declare interface PerformanceObserverEntryList { + getEntries(): PerformanceEntryList; + getEntriesByType(type: string): PerformanceEntryList; + getEntriesByName(name: string, type: ?string): PerformanceEntryList; +} + +type PerformanceObserverInit = { + entryTypes?: string[]; + type?: string; + buffered?: boolean; + ... +} + +declare class PerformanceObserver { + constructor(callback: (entries: PerformanceObserverEntryList, observer: PerformanceObserver) => mixed): void; + + observe(options: ?PerformanceObserverInit): void; + disconnect(): void; + takeRecords(): PerformanceEntryList; + + static supportedEntryTypes: string[]; +} + declare class History { length: number; scrollRestoration: 'auto' | 'manual'; state: any; back(): void; forward(): void; - go(delta?: any): void; + go(delta?: number): void; pushState(statedata: any, title: string, url?: string): void; replaceState(statedata: any, title: string, url?: string): void; } @@ -361,66 +558,96 @@ declare class MutationRecord { } type MutationObserverInitRequired = - | { childList: true } - | { attributes: true } - | { characterData: true } + | { childList: true, ... } + | { attributes: true, ... } + | { characterData: true, ... } declare type MutationObserverInit = MutationObserverInitRequired & { - subtree?: boolean; - attributeOldValue?: boolean; - characterDataOldValue?: boolean; - attributeFilter?: Array; + subtree?: boolean, + attributeOldValue?: boolean, + characterDataOldValue?: boolean, + attributeFilter?: Array, + ... } declare class MutationObserver { - constructor(callback: (arr: Array, observer: MutationObserver) => any): void; + constructor(callback: (arr: Array, observer: MutationObserver) => mixed): void; observe(target: Node, options: MutationObserverInit): void; takeRecords(): Array; disconnect(): void; } declare class DOMRectReadOnly { + static fromRect(rectangle?: { x: number, y: number, width: number, height: number, - top: number, - right: number, - bottom: number, - left: number, - constructor(x: number, y: number, width: number, height: number): void, - static fromRect(rectangle?: { x?: number, y?: number, width?: number, height?: number }): DOMRect, + ... + }): DOMRectReadOnly; + constructor(x: number, y: number, width: number, height: number): DOMRectReadOnly; + +bottom: number; + +height: number; + +left: number; + +right: number; + +top: number; + +width: number; + +x: number; + +y: number; } declare class DOMRect extends DOMRectReadOnly { - constructor(x: number, y: number, width: number, height: number): void, + static fromRect(rectangle?: { + x: number, + y: number, + width: number, + height: number, + ... + }): DOMRect; + bottom: number; + height: number; + left: number; + right: number; + top: number; + width: number; + x: number; + y: number; +} + +declare class DOMRectList { + @@iterator(): Iterator; + length: number; + item(index: number): DOMRect; + [index: number]: DOMRect; } declare type IntersectionObserverEntry = { - boundingClientRect: DOMRectReadOnly, - intersectionRatio: number, - intersectionRect: DOMRectReadOnly, - isIntersecting: boolean, - rootBounds: DOMRectReadOnly, - target: HTMLElement, - time: DOMHighResTimeStamp, + boundingClientRect: DOMRectReadOnly, + intersectionRatio: number, + intersectionRect: DOMRectReadOnly, + isIntersecting: boolean, + rootBounds: DOMRectReadOnly, + target: HTMLElement, + time: DOMHighResTimeStamp, + ... }; declare type IntersectionObserverCallback = ( entries: Array, observer: IntersectionObserver, -) => any; +) => mixed; declare type IntersectionObserverOptions = { - root?: Node | null, - rootMargin?: string, - threshold?: number | Array, + root?: Node | null, + rootMargin?: string, + threshold?: number | Array, + ... }; declare class IntersectionObserver { constructor( callback: IntersectionObserverCallback, - options: IntersectionObserverOptions + options?: IntersectionObserverOptions ): void, observe(target: HTMLElement): void, unobserve(target: HTMLElement): void, @@ -441,23 +668,24 @@ declare class ResizeObserver { } declare var NodeFilter: { - acceptNode(n: Node): number; - SHOW_ENTITY_REFERENCE: number; - SHOW_NOTATION: number; - SHOW_ENTITY: number; - SHOW_DOCUMENT: number; - SHOW_PROCESSING_INSTRUCTION: number; - FILTER_REJECT: number; - SHOW_CDATA_SECTION: number; - FILTER_ACCEPT: number; - SHOW_ALL: number; - SHOW_DOCUMENT_TYPE: number; - SHOW_TEXT: number; - SHOW_ELEMENT: number; - SHOW_COMMENT: number; - FILTER_SKIP: number; - SHOW_ATTRIBUTE: number; - SHOW_DOCUMENT_FRAGMENT: number; + acceptNode(n: Node): number, + SHOW_ENTITY_REFERENCE: number, + SHOW_NOTATION: number, + SHOW_ENTITY: number, + SHOW_DOCUMENT: number, + SHOW_PROCESSING_INSTRUCTION: number, + FILTER_REJECT: number, + SHOW_CDATA_SECTION: number, + FILTER_ACCEPT: number, + SHOW_ALL: number, + SHOW_DOCUMENT_TYPE: number, + SHOW_TEXT: number, + SHOW_ELEMENT: number, + SHOW_COMMENT: number, + FILTER_SKIP: number, + SHOW_ATTRIBUTE: number, + SHOW_DOCUMENT_FRAGMENT: number, + ... }; declare class CloseEvent extends Event { @@ -475,60 +703,72 @@ declare class WebSocket extends EventTarget { protocol: string; readyState: number; bufferedAmount: number; - onopen: (ev: Event) => any; extensions: string; - onmessage: (ev: MessageEvent) => any; - onclose: (ev: CloseEvent) => any; - onerror: (ev: Event) => any; - binaryType: string; + onopen: (ev: any) => mixed; + onmessage: (ev: MessageEvent) => mixed; + onclose: (ev: CloseEvent) => mixed; + onerror: (ev: any) => mixed; + binaryType: 'blob' | 'arraybuffer'; url: string; close(code?: number, reason?: string): void; - send(data: any): void; + send(data: string): void; + send(data: Blob): void; + send(data: ArrayBuffer): void; + send(data: $ArrayBufferView): void; CONNECTING: 0; OPEN: 1; CLOSING: 2; CLOSED: 3; } +type WorkerOptions = { + type?: WorkerType, + credentials?: CredentialsType, + name?: string, + ... +} + declare class Worker extends EventTarget { - constructor(stringUrl: string): void; - onerror: null | (ev: Event) => any; - onmessage: null | (ev: MessageEvent) => any; - onmessageerror: null | (ev: MessageEvent) => any; + constructor(stringUrl: string, workerOptions?: WorkerOptions): void; + onerror: null | (ev: any) => mixed; + onmessage: null | (ev: MessageEvent) => mixed; + onmessageerror: null | (ev: MessageEvent) => mixed; postMessage(message: any, ports?: any): void; terminate(): void; } declare class SharedWorker extends EventTarget { - constructor(stringUrl: string): void; + constructor(stringUrl: string, name?: string): void; + constructor(stringUrl: string, workerOptions?: WorkerOptions): void; port: MessagePort; - onerror: (ev: Event) => any; + onerror: (ev: any) => mixed; } declare function importScripts(...urls: Array): void; declare class WorkerGlobalScope extends EventTarget { - self: WorkerGlobalScope; + self: this; location: WorkerLocation; navigator: WorkerNavigator; close(): void; importScripts(...urls: Array): void; - onerror: (ev: Event) => any; - onlanguagechange: (ev: Event) => any; - onoffline: (ev: Event) => any; - ononline: (ev: Event) => any; - onrejectionhandled: (ev: PromiseRejectionEvent) => any; - onunhandledrejection: (ev: PromiseRejectionEvent) => any; + onerror: (ev: any) => mixed; + onlanguagechange: (ev: any) => mixed; + onoffline: (ev: any) => mixed; + ononline: (ev: any) => mixed; + onrejectionhandled: (ev: PromiseRejectionEvent) => mixed; + onunhandledrejection: (ev: PromiseRejectionEvent) => mixed; } declare class DedicatedWorkerGlobalScope extends WorkerGlobalScope { - onmessage(): (ev: MessageEvent) => any; - postMessage(message: any, transfer?: Iterable): void; + onmessage: (ev: MessageEvent) => mixed; + onmessageerror: (ev: MessageEvent) => mixed; + postMessage(message: any, transfer?: Iterable): void; } declare class SharedWorkerGlobalScope extends WorkerGlobalScope { name: string; - onconnect: (ev: MessageEvent) => any; + onconnect: (ev: MessageEvent) => mixed; } declare class WorkerLocation { @@ -542,28 +782,37 @@ declare class WorkerLocation { hash: string; } -declare class WorkerNavigator mixins NavigatorCommon {} +declare class WorkerNavigator mixins + NavigatorID, + NavigatorLanguage, + NavigatorOnLine, + NavigatorConcurrentHardware { + permissions: Permissions; + } + +// deprecated declare class XDomainRequest { timeout: number; - onerror: (ev: Event) => any; - onload: (ev: Event) => any; - onprogress: (ev: Event) => any; - ontimeout: (ev: Event) => any; - responseText: string; - contentType: string; - open(method: string, url: string): void; + onerror: () => mixed; + onload: () => mixed; + onprogress: () => mixed; + ontimeout: () => mixed; + +responseText: string; + +contentType: string; + open(method: "GET" | "POST", url: string): void; abort(): void; - send(data?: any): void; - addEventListener(type: string, listener: (evt: any) => void, useCapture?: boolean): void; + send(data?: string): void; - statics: { - create(): XDomainRequest; - } + statics: { create(): XDomainRequest, ... } } - declare class XMLHttpRequest extends EventTarget { + static LOADING: number; + static DONE: number; + static UNSENT: number; + static OPENED: number; + static HEADERS_RECEIVED: number; responseBody: any; status: number; readyState: number; @@ -572,7 +821,7 @@ declare class XMLHttpRequest extends EventTarget { responseURL: string; ontimeout: ProgressEventHandler; statusText: string; - onreadystatechange: (ev: Event) => any; + onreadystatechange: (ev: any) => mixed; timeout: number; onload: ProgressEventHandler; response: any; @@ -599,15 +848,7 @@ declare class XMLHttpRequest extends EventTarget { OPENED: number; HEADERS_RECEIVED: number; - statics: { - create(): XMLHttpRequest; - - LOADING: number; - DONE: number; - UNSENT: number; - OPENED: number; - HEADERS_RECEIVED: number; - } + statics: { create(): XMLHttpRequest, ... } } declare class XMLHttpRequestEventTarget extends EventTarget { @@ -625,17 +866,17 @@ declare class XMLSerializer { } declare class Geolocation { - getCurrentPosition: ( - success: (position: Position) => any, - error?: (error: PositionError) => any, + getCurrentPosition( + success: (position: Position) => mixed, + error?: (error: PositionError) => mixed, options?: PositionOptions - ) => void; - watchPosition: ( - success: (position: Position) => any, - error?: (error: PositionError) => any, + ): void; + watchPosition( + success: (position: Position) => mixed, + error?: (error: PositionError) => mixed, options?: PositionOptions - ) => number; - clearWatch: (number) => void; + ): number; + clearWatch(id: number): void; } declare class Position { @@ -656,59 +897,116 @@ declare class Coordinates { declare class PositionError { code: number; message: string; - PERMISSION_DENIED: number; - POSITION_UNAVAILABLE: number; - TIMEOUT: number; + PERMISSION_DENIED: 1; + POSITION_UNAVAILABLE: 2; + TIMEOUT: 3; } type PositionOptions = { - enableHighAccuracy: boolean; - timeout: number; - maximumAge: number; + enableHighAccuracy?: boolean, + timeout?: number, + maximumAge?: number, + ... +} + +type AudioContextState = 'suspended' | 'running' | 'closed'; + +// deprecated +type AudioProcessingEvent$Init = Event$Init & { + playbackTime: number; + inputBuffer: AudioBuffer; + outputBuffer: AudioBuffer; + ... +} + +// deprecated +declare class AudioProcessingEvent extends Event { + constructor(type: string, eventInitDict: AudioProcessingEvent$Init): void; + + +playbackTime: number; + +inputBuffer: AudioBuffer; + +outputBuffer: AudioBuffer; +} + +type OfflineAudioCompletionEvent$Init = Event$Init & { + renderedBuffer: AudioBuffer; + ... } -declare class AudioContext { +declare class OfflineAudioCompletionEvent extends Event { + constructor(type: string, eventInitDict: OfflineAudioCompletionEvent$Init): void; + + +renderedBuffer: AudioBuffer; +} + +declare class BaseAudioContext extends EventTarget { currentTime: number; destination: AudioDestinationNode; listener: AudioListener; sampleRate: number; - state: any; - onstatechange: (ev: any) => any; - close(): void; + state: AudioContextState; + onstatechange: (ev: any) => mixed; createBuffer(numOfChannels: number, length: number, sampleRate: number): AudioBuffer; createBufferSource(myMediaElement?: HTMLMediaElement): AudioBufferSourceNode; createMediaElementSource(myMediaElement: HTMLMediaElement): MediaElementAudioSourceNode; createMediaStreamSource(stream: MediaStream): MediaStreamAudioSourceNode; createMediaStreamDestination(): MediaStreamAudioDestinationNode; + + // deprecated createScriptProcessor(bufferSize: number, numberOfInputChannels: number, numberOfOutputChannels: number): ScriptProcessorNode; + createAnalyser(): AnalyserNode; createBiquadFilter(): BiquadFilterNode; createChannelMerger(numberOfInputs?: number): ChannelMergerNode; createChannelSplitter(numberOfInputs?: number): ChannelSplitterNode; + createConstantSource(): ConstantSourceNode; createConvolver(): ConvolverNode; createDelay(maxDelayTime?: number): DelayNode; createDynamicsCompressor(): DynamicsCompressorNode; createGain(): GainNode; + createIIRFilter (feedforward: Float32Array, feedback: Float32Array): IIRFilterNode; createOscillator(): OscillatorNode; createPanner(): PannerNode; - createPeriodicWave(real: Float32Array, img: Float32Array, options?: { - disableNormalization: bool, - }): PeriodicWave; + createStereoPanner(): StereoPannerNode; + createPeriodicWave(real: Float32Array, img: Float32Array, options?: { disableNormalization: bool, ... }): PeriodicWave; createStereoPanner(): StereoPannerNode; createWaveShaper(): WaveShaperNode; - decodeAudioData(arrayBuffer: ArrayBuffer, decodeSuccessCallback: Function, decodeErrorCallback: Function): void; + decodeAudioData(arrayBuffer: ArrayBuffer, decodeSuccessCallback: (decodedData: AudioBuffer) => mixed, decodeErrorCallback: (err: DOMError) => mixed): void; decodeAudioData(arrayBuffer: ArrayBuffer): Promise; +} + +declare class AudioTimestamp { + contextTime: number; + performanceTime: number; +} + +declare class AudioContext extends BaseAudioContext { + baseLatency: number; + outputLatency: number; + getOutputTimestamp(): AudioTimestamp; resume(): Promise; suspend(): Promise; + close(): Promise; + createMediaElementSource(myMediaElement: HTMLMediaElement): MediaElementAudioSourceNode; + createMediaStreamSource(myMediaStream: MediaStream): MediaStreamAudioSourceNode; + createMediaStreamTrackSource(myMediaStreamTrack: MediaStreamTrack): MediaStreamTrackAudioSourceNode; + createMediaStreamDestination(): MediaStreamAudioDestinationNode; } -declare class AudioNode { +declare class OfflineAudioContext extends BaseAudioContext { + startRendering(): Promise; + suspend(suspendTime: number): Promise; + length: number; + oncomplete: (ev: OfflineAudioCompletionEvent) => mixed; +} + +declare class AudioNode extends EventTarget { context: AudioContext; numberOfInputs: number; numberOfOutputs: number; channelCount: number; - channelCountMode: any; - channelInterpretation: 'speakers'|'discrete'; + channelCountMode: 'max' | 'clamped-max' | 'explicit'; + channelInterpretation: 'speakers' | 'discrete'; connect(audioNode: AudioNode, output?: number, input?: number): AudioNode; connect(destination: AudioParam, output?: number): void; disconnect(destination?: AudioNode, output?: number, input?: number): void; @@ -760,7 +1058,7 @@ declare class AudioBufferSourceNode extends AudioNode { loopStart: number; loopEnd: number; playbackRate: AudioParam; - onended: (ev: any) => any; + onended: (ev: any) => mixed; start(when?: number, offset?: number, duration?: number): void; stop(when?: number): void; } @@ -770,15 +1068,145 @@ declare class CanvasCaptureMediaStream extends MediaStream { requestFrame(): void; } +interface DoubleRange { + max?: number; + min?: number; +} + +interface LongRange { + max?: number; + min?: number; +} + +interface ConstrainBooleanParameters { + exact?: boolean; + ideal?: boolean; +} + +interface ConstrainDOMStringParameters { + exact?: string | string[]; + ideal?: string | string[]; +} + +interface ConstrainDoubleRange extends DoubleRange { + exact?: number; + ideal?: number; +} + +interface ConstrainLongRange extends LongRange { + exact?: number; + ideal?: number; +} + +type MediaTrackSupportedConstraints = {| + width: boolean; + height: boolean; + aspectRatio: boolean; + frameRate: boolean; + facingMode: boolean; + resizeMode: boolean; + volume: boolean; + sampleRate: boolean; + sampleSize: boolean; + echoCancellation: boolean; + autoGainControl: boolean; + noiseSuppression: boolean; + latency: boolean; + channelCount: boolean; + deviceId: boolean; + groupId: boolean; +|} + +interface MediaTrackConstraintSet { + width?: number | ConstrainLongRange; + height?: number | ConstrainLongRange; + aspectRatio?: number | ConstrainDoubleRange; + frameRate?: number | ConstrainDoubleRange; + facingMode?: string | string[] | ConstrainDOMStringParameters; + resizeMode?: string | string[] | ConstrainDOMStringParameters; + volume?: number | ConstrainDoubleRange; + sampleRate?: number | ConstrainLongRange; + sampleSize?: number | ConstrainLongRange; + echoCancellation?: boolean | ConstrainBooleanParameters; + autoGainControl?: boolean | ConstrainBooleanParameters; + noiseSuppression?: boolean | ConstrainBooleanParameters; + latency?: number | ConstrainDoubleRange; + channelCount?: number | ConstrainLongRange; + deviceId?: string | string[] | ConstrainDOMStringParameters; + groupId?: string | string[] | ConstrainDOMStringParameters; +} + +interface MediaTrackConstraints extends MediaTrackConstraintSet { + advanced?: Array; +} + +type DisplayMediaStreamConstraints = { + video?: boolean | MediaTrackConstraints; + audio?: boolean | MediaTrackConstraints; + ... +} + +type MediaStreamConstraints = { + audio?: boolean | MediaTrackConstraints; + video?: boolean | MediaTrackConstraints; + peerIdentity?: string; + ... +} + +type MediaTrackSettings = { + aspectRatio?: number; + deviceId?: string; + echoCancellation?: boolean; + facingMode?: string; + frameRate?: number; + groupId?: string; + height?: number; + sampleRate?: number; + sampleSize?: number; + volume?: number; + width?: number; + ... +} + +type MediaTrackCapabilities = { + aspectRatio?: number | DoubleRange; + deviceId?: string; + echoCancellation?: boolean[]; + facingMode?: string; + frameRate?: number | DoubleRange; + groupId?: string; + height?: number | LongRange; + sampleRate?: number | LongRange; + sampleSize?: number | LongRange; + volume?: number | DoubleRange; + width?: number | LongRange; + ... +} + +declare class MediaDevices extends EventTarget { + ondevicechange: (ev: any) => mixed; + enumerateDevices: () => Promise>; + getSupportedConstraints: () => MediaTrackSupportedConstraints; + getDisplayMedia: (constraints?: DisplayMediaStreamConstraints) => Promise; + getUserMedia: (constraints: MediaStreamConstraints) => Promise; +} + +declare class MediaDeviceInfo { + +deviceId: string; + +groupId: string; + +kind: 'videoinput' | 'audioinput' | 'audiooutput'; + +label: string; +} + declare class MediaStream extends EventTarget { active: bool; ended: bool; id: string; - onactive: (ev: any) => any; - onaddtrack: (ev: MediaStreamTrackEvent) => any; - onended: (ev: any) => any; - oninactive: (ev: any) => any; - onremovetrack: (ev: any) => any; + onactive: (ev: any) => mixed; + oninactive: (ev: any) => mixed; + onended: (ev: any) => mixed; + onaddtrack: (ev: MediaStreamTrackEvent) => mixed; + onremovetrack: (ev: MediaStreamTrackEvent) => mixed; addTrack(track: MediaStreamTrack): void; clone(): MediaStream; getAudioTracks(): MediaStreamTrack[]; @@ -795,17 +1223,17 @@ declare class MediaStreamTrack extends EventTarget { label: string; muted: bool; readonly: bool; - readyState: 'live'|'ended'; + readyState: 'live' | 'ended'; remote: bool; - onstarted: (ev: any) => any; - onmute: (ev: any) => any; - onunmute: (ev: any) => any; - onoverconstrained: (ev: any) => any; - onended: (ev: any) => any; - getConstraints(): any; - applyConstraints(): any; - getSettings(): any; - getCapabilities(): any; + onstarted: (ev: any) => mixed; + onmute: (ev: any) => mixed; + onunmute: (ev: any) => mixed; + onoverconstrained: (ev: any) => mixed; + onended: (ev: any) => mixed; + getConstraints(): MediaTrackConstraints; + applyConstraints(): Promise; + getSettings(): MediaTrackSettings; + getCapabilities(): MediaTrackCapabilities; clone(): MediaStreamTrack; stop(): void; } @@ -816,14 +1244,16 @@ declare class MediaStreamTrackEvent extends Event { declare class MediaElementAudioSourceNode extends AudioNode {} declare class MediaStreamAudioSourceNode extends AudioNode {} +declare class MediaStreamTrackAudioSourceNode extends AudioNode {} declare class MediaStreamAudioDestinationNode extends AudioNode { stream: MediaStream; } +// deprecated declare class ScriptProcessorNode extends AudioNode { bufferSize: number; - onaudioprocess: (ev: any) => any; + onaudioprocess: (ev: AudioProcessingEvent) => mixed; } declare class AnalyserNode extends AudioNode { @@ -844,11 +1274,21 @@ declare class BiquadFilterNode extends AudioNode { Q: AudioParam; gain: AudioParam; type: 'lowpass'|'highpass'|'bandpass'|'lowshelf'|'highshelf'|'peaking'|'notch'|'allpass'; - getFrequencyResponse(frequencyHz: Float32Array, magResponse: Float32Array, phaseResponse: Float32Array): BiquadFilterNode; + getFrequencyResponse(frequencyHz: Float32Array, magResponse: Float32Array, phaseResponse: Float32Array): void; } declare class ChannelMergerNode extends AudioNode {} declare class ChannelSplitterNode extends AudioNode {} + +type ConstantSourceOptions = { offset?: number, ... } +declare class ConstantSourceNode extends AudioNode { + constructor(context: BaseAudioContext, options?: ConstantSourceOptions): ConstantSourceNode; + offset: AudioParam; + onended: (ev: any) => mixed; + start(when?: number): void; + stop(when?: number): void; +} + declare class ConvolverNode extends AudioNode { buffer: AudioBuffer; normalize: bool; @@ -871,6 +1311,10 @@ declare class GainNode extends AudioNode { gain: AudioParam; } +declare class IIRFilterNode extends AudioNode { + getFrequencyResponse(frequencyHz: Float32Array, magResponse: Float32Array, phaseResponse: Float32Array): void; +} + declare class OscillatorNode extends AudioNode { frequency: AudioParam; detune: AudioParam; @@ -906,7 +1350,7 @@ declare class WaveShaperNode extends AudioNode { // this part of spec is not finished yet, apparently // https://stackoverflow.com/questions/35296664/can-fetch-get-object-as-headers -type HeadersInit = Headers | {[key: string]: string}; +type HeadersInit = Headers | { [key: string]: string, ... }; // TODO Heades and URLSearchParams are almost the same thing. @@ -917,7 +1361,7 @@ declare class Headers { append(name: string, value: string): void; delete(name: string): void; entries(): Iterator<[string, string]>; - forEach((value: string, name: string, headers: Headers) => any, thisArg?: any): void; + forEach(callback: (value: string, name: string, headers: Headers) => mixed, thisArg?: any): void; get(name: string): null | string; has(name: string): boolean; keys(): Iterator; @@ -927,11 +1371,11 @@ declare class Headers { declare class URLSearchParams { @@iterator(): Iterator<[string, string]>; - constructor(query?: string | URLSearchParams | Array<[string, string]> | {[string]: string} ): void; + constructor(query?: string | URLSearchParams | Array<[string, string]> | { [string]: string, ... } ): void; append(name: string, value: string): void; delete(name: string): void; entries(): Iterator<[string, string]>; - forEach((value: string, name: string, params: URLSearchParams) => any, thisArg?: any): void; + forEach(callback: (value: string, name: string, params: URLSearchParams) => mixed, thisArg?: any): void; get(name: string): null | string; getAll(name: string): Array; has(name: string): boolean; @@ -956,25 +1400,27 @@ type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $Arra type RequestInfo = Request | URL | string; type RequestOptions = { - body?: ?BodyInit; - - cache?: CacheType; - credentials?: CredentialsType; - headers?: HeadersInit; - integrity?: string; - keepalive?: boolean; - method?: string; - mode?: ModeType; - redirect?: RedirectType; - referrer?: string; - referrerPolicy?: ReferrerPolicyType; - window?: any; + body?: ?BodyInit, + cache?: CacheType, + credentials?: CredentialsType, + headers?: HeadersInit, + integrity?: string, + keepalive?: boolean, + method?: string, + mode?: ModeType, + redirect?: RedirectType, + referrer?: string, + referrerPolicy?: ReferrerPolicyType, + signal?: ?AbortSignal, + window?: any, + ... } type ResponseOptions = { - status?: number; - statusText?: string; - headers?: HeadersInit + status?: number, + statusText?: string, + headers?: HeadersInit, + ... } declare class Response { @@ -1029,6 +1475,17 @@ declare class Request { text(): Promise; } +declare class AbortController { + constructor(): void; + +signal: AbortSignal; + abort(): void; +} + +declare class AbortSignal extends EventTarget { + +aborted: boolean; + onabort: (event: any) => mixed; +} + declare function fetch(input: RequestInfo, init?: RequestOptions): Promise; @@ -1036,9 +1493,7 @@ type TextEncoder$availableEncodings = 'utf-8' | 'utf8' | 'unicode-1-1-utf-8' | ' declare class TextEncoder { constructor(encoding?: TextEncoder$availableEncodings): TextEncoder; - encode(buffer: string, options?: { - stream: bool, - }): Uint8Array; + encode(buffer: string, options?: { stream: bool, ... }): Uint8Array; encoding: TextEncoder$availableEncodings; } @@ -1261,19 +1716,20 @@ type TextDecoder$availableEncodings = declare class TextDecoder { - constructor(encoding?: TextDecoder$availableEncodings, options?: { fatal: bool }): TextDecoder; + constructor(encoding?: TextDecoder$availableEncodings, options?: { fatal: bool, ... }): TextDecoder; encoding: TextDecoder$availableEncodings; fatal: bool; ignoreBOM: bool; - decode(buffer?: ArrayBuffer | $ArrayBufferView, options?: { stream: bool }): string; + decode(buffer?: ArrayBuffer | $ArrayBufferView, options?: { stream: bool, ... }): string; } declare class MessagePort extends EventTarget { - postMessage(message: any, transfer?: Iterable): void; + postMessage(message: any, transfer?: Iterable): void; start(): void; close(): void; - onmessage: (ev: MessageEvent) => any; + onmessage: null | (ev: MessageEvent) => mixed; + onmessageerror: null | (ev: MessageEvent) => mixed; } declare class MessageChannel { @@ -1303,27 +1759,30 @@ declare class VRDisplay extends EventTarget { type VRSource = HTMLCanvasElement; type VRLayerInit = { - leftBounds?: number[]; - rightBounds?: number[]; - source?: null | VRSource; + leftBounds?: number[], + rightBounds?: number[], + source?: null | VRSource, + ... }; type VRDisplayCapabilities = { - canPresent: boolean; - hasExternalDisplay: boolean; - hasPosition: boolean; - maxLayers: number; + canPresent: boolean, + hasExternalDisplay: boolean, + hasPosition: boolean, + maxLayers: number, + ... }; type VREye = 'left' | 'right'; type VRPose = { - angularAcceleration?: Float32Array; - angularVelocity?: Float32Array; - linearAcceleration?: Float32Array; - linearVelocity?: Float32Array; - orientation?: Float32Array; - position?: Float32Array; + angularAcceleration?: Float32Array, + angularVelocity?: Float32Array, + linearAcceleration?: Float32Array, + linearVelocity?: Float32Array, + orientation?: Float32Array, + position?: Float32Array, + ... }; declare class VRFrameData { @@ -1336,22 +1795,25 @@ declare class VRFrameData { } type VREyeParameters = { - offset: Float32Array; - renderWidth: number; - renderHeight: number; + offset: Float32Array, + renderWidth: number, + renderHeight: number, + ... }; type VRStageParameters = { - sittingToStandingTransform: Float32Array; - sizeX: number; - sizeZ: number; + sittingToStandingTransform: Float32Array, + sizeX: number, + sizeZ: number, + ... }; type VRDisplayEventReason = 'mounted' | 'navigation' | 'requested' | 'unmounted'; type VRDisplayEventInit = { - display: VRDisplay; - reason: VRDisplayEventReason; + display: VRDisplay, + reason: VRDisplayEventReason, + ... }; declare class VRDisplayEvent extends Event { @@ -1377,3 +1839,115 @@ declare class MediaQueryList extends EventTarget { } declare var matchMedia: string => MediaQueryList; + +// https://w3c.github.io/webappsec-credential-management/#idl-index +declare type CredMgmtCredentialRequestOptions = { + mediation: 'silent' | 'optional' | 'required', + signal: AbortSignal, + ... +} + +declare type CredMgmtCredentialCreationOptions = { signal: AbortSignal, ... } + +declare interface CredMgmtCredential { + id: string; + type: string; +} + +declare interface CredMgmtPasswordCredential extends CredMgmtCredential { + password: string; +} + +declare interface CredMgmtCredentialsContainer { + get(option?: CredMgmtCredentialRequestOptions): Promise; + store(credential: CredMgmtCredential): Promise; + create( + creationOption?: CredMgmtCredentialCreationOptions, + ): Promise; + preventSilentAccess(): Promise; +} + +type SpeechSynthesisErrorCode = + | "canceled" + | "interrupted" + | "audio-busy" + | "audio-hardware" + | "network" + | "synthesis-unavailable" + | "synthesis-failed" + | "language-unavailable" + | "voice-unavailable" + | "text-too-long" + | "invalid-argument" + | "not-allowed"; + +declare class SpeechSynthesis extends EventTarget { + +pending: boolean; + +speaking: boolean; + +paused: boolean; + + onvoiceschanged: ?((ev: Event) => mixed); + + speak(utterance: SpeechSynthesisUtterance): void; + cancel(): void; + pause(): void; + resume(): void; + getVoices(): Array; +} + +declare var speechSynthesis: SpeechSynthesis; + +declare class SpeechSynthesisUtterance extends EventTarget { + constructor(text?: string): SpeechSynthesisUtterance; + + text: string; + lang: string; + voice: SpeechSynthesisVoice | null; + volume: number; + rate: number; + pitch: number; + + onstart: ?((ev: SpeechSynthesisEvent) => mixed); + onend: ?((ev: SpeechSynthesisEvent) => mixed); + onerror: ?((ev: SpeechSynthesisErrorEvent) => mixed); + onpause: ?((ev: SpeechSynthesisEvent) => mixed); + onresume: ?((ev: SpeechSynthesisEvent) => mixed); + onmark: ?((ev: SpeechSynthesisEvent) => mixed); + onboundary: ?((ev: SpeechSynthesisEvent) => mixed); +} + +type SpeechSynthesisEvent$Init = Event$Init & { + utterance: SpeechSynthesisUtterance; + charIndex?: number; + charLength?: number; + elapsedTime?: number; + name?: string; + ... +} + +declare class SpeechSynthesisEvent extends Event { + constructor(type: string, eventInitDict?: SpeechSynthesisEvent$Init): SpeechSynthesisEvent; + +utterance: SpeechSynthesisUtterance; + charIndex: number; + charLength: number; + elapsedTime: number; + name: string; +} + +type SpeechSynthesisErrorEvent$Init = SpeechSynthesisEvent$Init & { + error: SpeechSynthesisErrorCode; + ... +} + +declare class SpeechSynthesisErrorEvent extends SpeechSynthesisEvent { + constructor(type: string, eventInitDict?: SpeechSynthesisErrorEvent$Init): SpeechSynthesisErrorEvent; + +error: SpeechSynthesisErrorCode; +} + +declare class SpeechSynthesisVoice { + +voiceURI: string; + +name: string; + +lang: string; + +localService: boolean; + +default: boolean; +} diff --git a/lib/core.js b/lib/core.js index 714334283a5..03e5591d9cb 100644 --- a/lib/core.js +++ b/lib/core.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -22,47 +22,64 @@ declare function encodeURI(uri: string): string; declare function encodeURIComponent(uriComponent: string): string; type PropertyDescriptor = { - enumerable?: boolean; - configurable?: boolean; - writable?: boolean; - value?: T; - get?: () => T; - set?: (value: T) => void; + enumerable?: boolean, + configurable?: boolean, + writable?: boolean, + value?: T, + get?: () => T, + set?: (value: T) => void, + ... }; -type PropertyDescriptorMap = { - [s: string]: PropertyDescriptor; -} +type PropertyDescriptorMap = { [s: string]: PropertyDescriptor, ... } + +type $NotNullOrVoid = +| number +| string +| boolean +| {...} +| $ReadOnlyArray; // TODO: instance declare class Object { - static (o: ?void): {[key: any]: any}; + static (o: ?void): { [key: any]: any, ... }; static (o: boolean): Boolean; static (o: number): Number; static (o: string): String; - static (o: T): T; + static (o: T): T; static assign: Object$Assign; static create(o: any, properties?: PropertyDescriptorMap): any; // compiler magic static defineProperties(o: any, properties: PropertyDescriptorMap): any; static defineProperty(o: any, p: any, attributes: PropertyDescriptor): any; - static entries(object: any): Array<[string, mixed]>; + static entries(object: $NotNullOrVoid): Array<[string, mixed]>; static freeze(o: T): T; - static getOwnPropertyDescriptor(o: any, p: any): PropertyDescriptor | void; - static getOwnPropertyNames(o: any): Array; - static getOwnPropertySymbols(o: any): Symbol[]; + static fromEntries(entries: Iterable<[K, V] | { + '0': K, + '1': V, + ... + }>): { [K]: V, ... }; + + static getOwnPropertyDescriptor(o: $NotNullOrVoid, p: any): PropertyDescriptor | void; + static getOwnPropertyDescriptors(o: {...}): PropertyDescriptorMap; + // This is documentation only. Object.getOwnPropertyNames is implemented in OCaml code + // https://github.com/facebook/flow/blob/8ac01bc604a6827e6ee9a71b197bb974f8080049/src/typing/statement.ml#L6308 + static getOwnPropertyNames(o: $NotNullOrVoid): Array; + static getOwnPropertySymbols(o: $NotNullOrVoid): Symbol[]; static getPrototypeOf: Object$GetPrototypeOf; - static is(a: any, b: any): boolean; - static isExtensible(o: any): boolean; - static isFrozen(o: any): boolean; - static isSealed(o: any): boolean; - static keys(o: any): Array; + static is(a: T, b: T): boolean; + static isExtensible(o: $NotNullOrVoid): boolean; + static isFrozen(o: $NotNullOrVoid): boolean; + static isSealed(o: $NotNullOrVoid): boolean; + // This is documentation only. Object.keys is implemented in OCaml code. + // https://github.com/facebook/flow/blob/8ac01bc604a6827e6ee9a71b197bb974f8080049/src/typing/statement.ml#L6308 + static keys(o: $NotNullOrVoid): Array; static preventExtensions(o: T): T; static seal(o: T): T; - static setPrototypeOf(o: any, proto: ?Object): any; - static values(object: any): Array; - hasOwnProperty(prop: any): boolean; - isPrototypeOf(o: any): boolean; - propertyIsEnumerable(prop: any): boolean; + static setPrototypeOf(o: T, proto: ?{...}): T; + static values(object: $NotNullOrVoid): Array; + hasOwnProperty(prop: mixed): boolean; + isPrototypeOf(o: mixed): boolean; + propertyIsEnumerable(prop: mixed): boolean; toLocaleString(): string; toString(): string; valueOf(): mixed; @@ -73,6 +90,7 @@ declare class $SymbolHasInstance mixins Symbol {} declare class $SymboIsConcatSpreadable mixins Symbol {} declare class $SymbolIterator mixins Symbol {} declare class $SymbolMatch mixins Symbol {} +declare class $SymbolMatchAll mixins Symbol {} declare class $SymbolReplace mixins Symbol {} declare class $SymbolSearch mixins Symbol {} declare class $SymbolSpecies mixins Symbol {} @@ -84,12 +102,14 @@ declare class $SymbolUnscopables mixins Symbol {} declare class Symbol { static (value?:any): Symbol; static for(key: string): Symbol; + +description: string | void; static hasInstance: $SymbolHasInstance; static isConcatSpreadable: $SymboIsConcatSpreadable; static iterator: string; // polyfill '@@iterator' static keyFor(sym: Symbol): ?string; static length: 0; static match: $SymbolMatch; + static matchAll: $SymbolMatchAll; static replace: $SymbolReplace; static search: $SymbolSearch; static species: $SymbolSpecies; @@ -108,14 +128,14 @@ declare class Function { proto call: Function$Prototype$Call; // (thisArg: any, ...argArray: Array) => any toString(): string; arguments: any; - caller: Function | null; + caller: any | null; length: number; name: string; } declare class Boolean { constructor(value?: mixed): void; - static (value:any):boolean; + static (value:mixed):boolean; valueOf(): boolean; toString(): string; } @@ -129,11 +149,11 @@ declare class Number { static NaN: number; static NEGATIVE_INFINITY: number; static POSITIVE_INFINITY: number; - static (value:any):number; - static isFinite(value: any): boolean; - static isInteger(value: any): boolean; - static isNaN(value: any): boolean; - static isSafeInteger(value: any): boolean; + static (value:mixed):number; + static isFinite(value: mixed): boolean; + static isInteger(value: mixed): boolean; + static isNaN(value: mixed): boolean; + static isSafeInteger(value: mixed): boolean; static parseFloat(value: string): number; static parseInt(value: string, radix?: number): number; constructor(value?: mixed): void; @@ -146,49 +166,50 @@ declare class Number { } declare var Math: { - E: number; - LN10: number; - LN2: number; - LOG10E: number; - LOG2E: number; - PI: number; - SQRT1_2: number; - SQRT2: number; - abs(x: number): number; - acos(x: number): number; - acosh(x: number): number; - asin(x: number): number; - asinh(x: number): number; - atan(x: number): number; - atan2(y: number, x: number): number; - atanh(x: number): number; - cbrt(x: number): number; - ceil(x: number): number; - clz32(x: number): number; - cos(x: number): number; - cosh(x: number): number; - exp(x: number): number; - expm1(x: number): number; - floor(x: number): number; - fround(x: number): number; - hypot(...values: Array): number; - imul(y: number, x: number): number; - log(x: number): number; - log10(x: number): number; - log1p(x: number): number; - log2(x: number): number; - max(...values: Array): number; - min(...values: Array): number; - pow(x: number, y: number): number; - random(): number; - round(x: number): number; - sign(x: number): number; - sin(x: number): number; - sinh(x: number): number; - sqrt(x: number): number; - tan(x: number): number; - tanh(x: number): number; - trunc(x: number): number; + E: number, + LN10: number, + LN2: number, + LOG10E: number, + LOG2E: number, + PI: number, + SQRT1_2: number, + SQRT2: number, + abs(x: number): number, + acos(x: number): number, + acosh(x: number): number, + asin(x: number): number, + asinh(x: number): number, + atan(x: number): number, + atan2(y: number, x: number): number, + atanh(x: number): number, + cbrt(x: number): number, + ceil(x: number): number, + clz32(x: number): number, + cos(x: number): number, + cosh(x: number): number, + exp(x: number): number, + expm1(x: number): number, + floor(x: number): number, + fround(x: number): number, + hypot(...values: Array): number, + imul(y: number, x: number): number, + log(x: number): number, + log10(x: number): number, + log1p(x: number): number, + log2(x: number): number, + max(...values: Array): number, + min(...values: Array): number, + pow(x: number, y: number): number, + random(): number, + round(x: number): number, + sign(x: number): number, + sin(x: number): number, + sinh(x: number): number, + sqrt(x: number): number, + tan(x: number): number, + tanh(x: number): number, + trunc(x: number): number, + ... }; /* All the Array.prototype methods and properties that don't mutate the array. @@ -199,22 +220,22 @@ declare class $ReadOnlyArray<+T> { // concat creates a new array concat | S>(...items: Array): Array; entries(): Iterator<[number, T]>; - every(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => any, thisArg?: any): boolean; + every(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => mixed, thisArg?: any): boolean; filter(callbackfn: typeof Boolean): Array<$NonMaybeType>; - filter(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => any, thisArg?: any): Array; - find(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => any, thisArg?: any): T | void; - findIndex(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => any, thisArg?: any): number; - forEach(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => any, thisArg?: any): void; + filter(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => mixed, thisArg?: any): Array; + find(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => mixed, thisArg?: any): T | void; + findIndex(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => mixed, thisArg?: any): number; + forEach(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => mixed, thisArg?: any): void; includes(searchElement: mixed, fromIndex?: number): boolean; indexOf(searchElement: mixed, fromIndex?: number): number; join(separator?: string): string; keys(): Iterator; lastIndexOf(searchElement: mixed, fromIndex?: number): number; map(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => U, thisArg?: any): Array; + flatMap(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => $ReadOnlyArray | U, thisArg?: any): Array; reduce( callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: $ReadOnlyArray) => T, - initialValue: void ): T; reduce( callbackfn: (previousValue: U, currentValue: T, currentIndex: number, array: $ReadOnlyArray) => U, @@ -222,14 +243,13 @@ declare class $ReadOnlyArray<+T> { ): U; reduceRight( callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: $ReadOnlyArray) => T, - initialValue: void ): T; reduceRight( callbackfn: (previousValue: U, currentValue: T, currentIndex: number, array: $ReadOnlyArray) => U, initialValue: U ): U; slice(start?: number, end?: number): Array; - some(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => any, thisArg?: any): boolean; + some(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => mixed, thisArg?: any): boolean; values(): Iterator; +[key: number]: T; +length: number; @@ -237,19 +257,19 @@ declare class $ReadOnlyArray<+T> { declare class Array extends $ReadOnlyArray { copyWithin(target: number, start: number, end?: number): T[]; - every(callbackfn: (value: T, index: number, array: Array) => any, thisArg?: any): boolean; + every(callbackfn: (value: T, index: number, array: Array) => mixed, thisArg?: any): boolean; fill(value: T, begin?: number, end?: number): Array; filter(callbackfn: typeof Boolean): Array<$NonMaybeType>; - filter(callbackfn: (value: T, index: number, array: Array) => any, thisArg?: any): Array; - find(callbackfn: (value: T, index: number, array: Array) => any, thisArg?: any): T | void; - findIndex(callbackfn: (value: T, index: number, array: Array) => any, thisArg?: any): number; - forEach(callbackfn: (value: T, index: number, array: Array) => any, thisArg?: any): void; + filter(callbackfn: (value: T, index: number, array: Array) => mixed, thisArg?: any): Array; + find(callbackfn: (value: T, index: number, array: Array) => mixed, thisArg?: any): T | void; + findIndex(callbackfn: (value: T, index: number, array: Array) => mixed, thisArg?: any): number; + forEach(callbackfn: (value: T, index: number, array: Array) => mixed, thisArg?: any): void; map(callbackfn: (value: T, index: number, array: Array) => U, thisArg?: any): Array; + flatMap(callbackfn: (value: T, index: number, array: Array) => $ReadOnlyArray | U, thisArg?: any): Array; pop(): T; push(...items: Array): number; reduce( callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: Array) => T, - initialValue: void ): T; reduce( callbackfn: (previousValue: U, currentValue: T, currentIndex: number, array: Array) => U, @@ -257,7 +277,6 @@ declare class Array extends $ReadOnlyArray { ): U; reduceRight( callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: Array) => T, - initialValue: void ): T; reduceRight( callbackfn: (previousValue: U, currentValue: T, currentIndex: number, array: Array) => U, @@ -265,7 +284,7 @@ declare class Array extends $ReadOnlyArray { ): U; reverse(): Array; shift(): T; - some(callbackfn: (value: T, index: number, array: Array) => any, thisArg?: any): boolean; + some(callbackfn: (value: T, index: number, array: Array) => mixed, thisArg?: any): boolean; sort(compareFn?: (a: T, b: T) => number): Array; splice(start: number, deleteCount?: number, ...items: Array): Array; unshift(...items: Array): number; @@ -274,18 +293,29 @@ declare class Array extends $ReadOnlyArray { [key: number]: T; length: number; static (...values:Array): Array; - static isArray(obj: any): bool; + static isArray(obj: mixed): bool; static from(iter: Iterable, mapFn: (elem: A, index: number) => B, thisArg?: any): Array; static from(iter: Iterable, mapFn: void): Array; static from(iter: Iterator, mapFn: (elem: A, index: number) => B, thisArg?: any): Array; static from(iter: Iterator, mapFn: void): Array; - static from(arrayLike: {length: number}, mapFn: (elem: void, index: number) => A, thisArg?: any): Array; - static from(arrayLike: {length: number}, mapFn: void): Array; + static from(arrayLike: { length: number, ... }, mapFn: (elem: void, index: number) => A, thisArg?: any): Array; + static from(arrayLike: { length: number, ... }, mapFn: void): Array; static of(...values: Array): Array; } +type $ArrayLike = { + [indexer: number]: T, + length: number, + ... +} + type RegExp$flags = $CharSet<"gimsuy">; -type RegExp$matchResult = Array & {index: number, input: string, groups: ?{[name: string]: string}}; +type RegExp$matchResult = Array & { + index: number, + input: string, + groups: ?{ [name: string]: string, ... }, + ... +}; declare class String { @@iterator(): Iterator; @@ -302,6 +332,7 @@ declare class String { link(href: string): string; localeCompare(that: string, locales?: string | Array, options?: Intl$CollatorOptions): number; match(regexp: string | RegExp): RegExp$matchResult | null; + matchAll(regexp: string | RegExp): Iterator; normalize(format?: string): string; padEnd(targetLength: number, padString?: string): string; padStart(targetLength: number, padString?: string): string; @@ -318,16 +349,19 @@ declare class String { toLowerCase(): string; toUpperCase(): string; trim(): string; + trimEnd(): string; trimLeft(): string; trimRight(): string; + trimStart(): string; valueOf(): string; toString(): string; length: number; - static (value:any):string; + [key: number]: string; + static (value:mixed):string; static fromCharCode(...codes: Array): string; static fromCodePoint(...codes: Array): string; static raw(templateString: string): string; - static raw(callSite: $Shape<{raw: string}>, ...substitutions: any[]): string; + static raw(callSite: $Shape<{ raw: string, ... }>, ...substitutions: any[]): string; } declare class RegExp { @@ -341,15 +375,18 @@ declare class RegExp { lastIndex: number; multiline: boolean; source: string; - sticky: bool; - unicode: bool; + sticky: boolean; + unicode: boolean; + dotAll: boolean; test(string: string): boolean; toString(): string; + +[key: $SymbolMatch | $SymbolMatchAll]: (str: string) => Iterator } declare class Date { constructor(): void; constructor(timestamp: number): void; + constructor(date: Date): void; constructor(dateString: string): void; constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; getDate(): number; @@ -387,7 +424,7 @@ declare class Date { setUTCSeconds(sec: number, ms?: number): number; toDateString(): string; toISOString(): string; - toJSON(key?: any): string; + toJSON(key?: mixed): string; toLocaleDateString(locales?: string | Array, options?: Intl$DateTimeFormatOptions): string; toLocaleString(locales?: string | Array, options?: Intl$DateTimeFormatOptions): string; toLocaleTimeString(locales?: string | Array, options?: Intl$DateTimeFormatOptions): string; @@ -406,7 +443,7 @@ declare class Date { declare class CallSite { getThis(): any; getTypeName(): string; - getFunction(): ?Function; + getFunction(): ?((...any) => any); getFunctionName(): string; getMethodName(): string; getFileName(): ?string; @@ -439,7 +476,7 @@ declare class Error { columnNumber?: number; // note: v8 only (node/chrome) - static captureStackTrace(target: Object, constructor?: Function): void; + static captureStackTrace(target: {[any] : any, ...}, constructor?: any): void; static stackTraceLimit: number; static prepareStackTrace: (err: Error, stack: CallSite[]) => mixed; @@ -472,17 +509,30 @@ declare class URIError extends Error { declare class JSON { static parse(text: string, reviver?: (key: any, value: any) => any): any; static stringify( - value: any, + value: null | string | number | boolean | {...} | $ReadOnlyArray, replacer?: ?((key: string, value: any) => any) | Array, space?: string | number ): string; + static stringify( + value: mixed, + replacer?: ?((key: string, value: any) => any) | Array, + space?: string | number + ): string | void; } /* Iterable/Iterator/Generator */ type IteratorResult<+Yield,+Return> = - | { done: true, +value?: Return } - | { done: false, +value: Yield }; + | { + done: true, + +value?: Return, + ... +} + | { + done: false, + +value: Yield, + ... +}; interface $Iterator<+Yield,+Return,-Next> { @@iterator(): $Iterator; @@ -528,7 +578,18 @@ declare function $asyncIterator(p: AsyncIterable): T; /* Maps and Sets */ -declare class Map { +declare class $ReadOnlyMap { + @@iterator(): Iterator<[K, V]>; + entries(): Iterator<[K, V]>; + forEach(callbackfn: (value: V, index: K, map: $ReadOnlyMap) => mixed, thisArg?: any): void; + get(key: K): V | void; + has(key: K): boolean; + keys(): Iterator; + size: number; + values(): Iterator; +} + +declare class Map extends $ReadOnlyMap { @@iterator(): Iterator<[K, V]>; constructor(iterable: ?Iterable<[K, V]>): void; clear(): void; @@ -542,10 +603,16 @@ declare class Map { size: number; values(): Iterator; // Multiple Indexers not yet supported - [key: $SymbolToStringTag | $SymbolSpecies]: Function; + +[key: $SymbolToStringTag]: any; + static +[key: $SymbolSpecies]: any; +} + +declare class $ReadOnlyWeakMap, +V> { + get(key: K): V | void; + has(key: K): boolean; } -declare class WeakMap { +declare class WeakMap, V> extends $ReadOnlyWeakMap { constructor(iterable: ?Iterable<[K, V]>): void; delete(key: K): boolean; get(key: K): V | void; @@ -553,7 +620,17 @@ declare class WeakMap { set(key: K, value: V): WeakMap; } -declare class Set { +declare class $ReadOnlySet { + @@iterator(): Iterator; + entries(): Iterator<[T, T]>; + forEach(callbackfn: (value: T, index: T, set: $ReadOnlySet) => mixed, thisArg?: any): void; + has(value: T): boolean; + keys(): Iterator; + size: number; + values(): Iterator; +} + +declare class Set extends $ReadOnlySet { @@iterator(): Iterator; constructor(iterable: ?Iterable): void; add(value: T): Set; @@ -565,10 +642,15 @@ declare class Set { keys(): Iterator; size: number; values(): Iterator; - [key: $SymbolSpecies]: Function; // This would the Set constructor, can't think of a way to correctly type this + +[key: $SymbolToStringTag]: (...any) => any; + static +[key: $SymbolSpecies]: (...any) => any; // This would the Set constructor, can't think of a way to correctly type this } -declare class WeakSet { +declare class $ReadOnlyWeakSet> { + has(value: T): boolean; +} + +declare class WeakSet> extends $ReadOnlyWeakSet { constructor(iterable?: Iterable): void; add(value: T): WeakSet; delete(value: T): boolean; @@ -728,8 +810,8 @@ declare function unescape(str: string): string; declare opaque type TimeoutID; declare opaque type IntervalID; -declare function clearInterval(intervalId?: IntervalID): void; -declare function clearTimeout(timeoutId?: TimeoutID): void; +declare function clearInterval(intervalId: ?IntervalID): void; +declare function clearTimeout(timeoutId: ?TimeoutID): void; declare function setTimeout>( callback: (...args: TArguments) => mixed, ms?: number, @@ -744,42 +826,42 @@ declare function setInterval>( /* Reflect API */ declare var Reflect: { - apply(target: Function, thisArg?: any, argumentsList?: Array): any; - construct(target: Function, argumentsList?: Array, newTarget?: Function): any; - defineProperty(o: any, p: any, attributes: any): boolean; - deleteProperty(o: any, p: any): boolean; - get(o: any, p: any, receiver?: any): any; - getOwnPropertyDescriptor(o: any, p: any): any; - getPrototypeOf: Object$GetPrototypeOf; - setPrototypeOf: Object$SetPrototypeOf; - has(o: any, p: any): boolean; - isExtensible(o: any): boolean; - ownKeys(o: any): Array; - preventExtensions(o: any): boolean; - set(o: any, p: any, value: any, receiver?: any): boolean; + apply(target: (...any) => any, thisArg?: any, argumentsList?: Array): any, + construct(target: (...any) => any, argumentsList?: Array, newTarget?: any): any, + defineProperty(o: any, p: any, attributes: any): boolean, + deleteProperty(o: any, p: any): boolean, + get(o: any, p: any, receiver?: any): any, + getOwnPropertyDescriptor(o: any, p: any): any, + getPrototypeOf: Object$GetPrototypeOf, + setPrototypeOf: Object$SetPrototypeOf, + has(o: any, p: any): boolean, + isExtensible(o: any): boolean, + ownKeys(o: any): Array, + preventExtensions(o: any): boolean, + set(o: any, p: any, value: any, receiver?: any): boolean, + ... } /* Proxy */ type Proxy$traps = { - getPrototypeOf?: (target: T) => Object|null; - setPrototypeOf?: (target: T, prototype: Object|null) => boolean; - isExtensible?: (target: T) => boolean; - preventExtensions?: (target: T) => boolean; - getOwnPropertyDescriptor?: (target: T, property: string) => void | PropertyDescriptor; - defineProperty?: (target: T, property: string, descriptor: PropertyDescriptor) => boolean; - has?: (target: T, key: string) => boolean; - get?: (target: T, property: string, receiver: Proxy) => any; - set?: (target: T, property: string, value: any, receiver: Proxy) => boolean; - deleteProperty?: (target: T, property: string) => boolean; - ownKeys?: (target: T) => Array; - apply?: (target: T, context: any, args: Array) => any; - construct?: (target: T, args: Array, newTarget: Function) => Object; + getPrototypeOf?: (target: T) => {[any] : any, ...} | null, + setPrototypeOf?: (target: T, prototype: {[any] : any, ...} | null) => boolean, + isExtensible?: (target: T) => boolean, + preventExtensions?: (target: T) => boolean, + getOwnPropertyDescriptor?: (target: T, property: string) => void | PropertyDescriptor, + defineProperty?: (target: T, property: string, descriptor: PropertyDescriptor) => boolean, + has?: (target: T, key: string) => boolean, + get?: (target: T, property: string, receiver: Proxy) => any, + set?: (target: T, property: string, value: any, receiver: Proxy) => boolean, + deleteProperty?: (target: T, property: string) => boolean, + ownKeys?: (target: T) => Array, + apply?: (target: T, context: any, args: Array) => any, + construct?: (target: T, args: Array, newTarget: (...any) => any) => {[any] : any, ...}, + ... }; -type Proxy$revocable = T & { - revoke(): void; -}; +type Proxy$revocable = T & { revoke(): void, ... }; declare class Proxy { constructor(target: T, handler: Proxy$traps): T; @@ -792,43 +874,50 @@ declare class Proxy { declare var global: any; declare var module: { - exports: any; - require(id: string): any; - id: string; - filename: string; - loaded: boolean; - parent: any; - children: Array; + exports: any, + require(id: string): any, + id: string, + filename: string, + loaded: boolean, + parent: any, + children: Array, + builtinModules: Array, + ... }; declare var require: { - (id: string): any; - resolve: (id: string) => string; - cache: any; - main: typeof module; + (id: string): any, + resolve: (id: string) => string, + cache: any, + main: typeof module, + ... }; declare var exports: any; +/* Opaque type for module reference magic strings */ +declare opaque type $Flow$ModuleRef; + /* Commonly available, shared between node and dom */ declare var console: { - assert(condition: mixed, ...data: Array): void; - clear(): void; - count(label: string): void; - debug(...data: Array): void; - dir(...data: Array): void; - dirxml(...data: Array): void; - error(...data: Array): void; - _exception(...data: Array): void; - group(...data: Array): void; - groupCollapsed(...data: Array): void; - groupEnd(): void; - info(...data: Array): void; - log(...data: Array): void; - profile(name?: string): void; - profileEnd(name?: string): void; - table(tabularData: { [key: string]: any } | Array<{ [key: string]: any }> | Array>): void; - time(label: string): void; - timeEnd(label: string): void; - timeStamp(label?: string): void; - trace(...data: Array): void; - warn(...data: Array): void; + assert(condition: mixed, ...data: Array): void, + clear(): void, + count(label: string): void, + debug(...data: Array): void, + dir(...data: Array): void, + dirxml(...data: Array): void, + error(...data: Array): void, + _exception(...data: Array): void, + group(...data: Array): void, + groupCollapsed(...data: Array): void, + groupEnd(): void, + info(...data: Array): void, + log(...data: Array): void, + profile(name?: string): void, + profileEnd(name?: string): void, + table(tabularData: { [key: string]: any, ... } | Array<{ [key: string]: any, ... }> | Array>): void, + time(label: string): void, + timeEnd(label: string): void, + timeStamp(label?: string): void, + trace(...data: Array): void, + warn(...data: Array): void, + ... }; diff --git a/lib/cssom.js b/lib/cssom.js index ef161351ecb..a3183d825cf 100644 --- a/lib/cssom.js +++ b/lib/cssom.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,12 +7,12 @@ declare class StyleSheet { disabled: boolean; - href: string; - media: MediaList; - ownerNode: Node; - parentStyleSheet: ?StyleSheet; - title: string; - type: string; + +href: string; + +media: MediaList; + +ownerNode: Node; + +parentStyleSheet: ?StyleSheet; + +title: string; + +type: string; } declare class StyleSheetList { @@ -32,17 +32,36 @@ declare class MediaList { } declare class CSSStyleSheet extends StyleSheet { - cssRules: CSSRuleList; - ownerRule: ?CSSRule; + +cssRules: CSSRuleList; + +ownerRule: ?CSSRule; deleteRule(index: number): void; - insertRule(rule: string, index: number): void; + insertRule(rule: string, index: number): number; +} + +declare class CSSGroupingRule extends CSSRule { + +cssRules: CSSRuleList; + deleteRule(index: number): void; + insertRule(rule: string, index: number): number; +} + +declare class CSSConditionRule extends CSSGroupingRule { + conditionText: string; +} + +declare class CSSMediaRule extends CSSConditionRule { + +media: MediaList; +} + +declare class CSSStyleRule extends CSSRule { + selectorText: string; + +style: CSSStyleDeclaration; } declare class CSSRule { cssText: string; - parentRule: ?CSSRule; - parentStyleSheet: ?CSSStyleSheet; - type: number; + +parentRule: ?CSSRule; + +parentStyleSheet: ?CSSStyleSheet; + +type: number; static STYLE_RULE: number; static MEDIA_RULE: number; static FONT_FACE_RULE: number; @@ -61,6 +80,19 @@ declare class CSSRule { static REGION_STYLE_RULE: number; } +declare class CSSKeyframeRule extends CSSRule { + keyText: string; + +style: CSSStyleDeclaration; +} + +declare class CSSKeyframesRule extends CSSRule { + name: string; + +cssRules: CSSRuleList; + appendRule(rule: string): void; + deleteRule(select: string): void; + findRule(select: string): CSSKeyframeRule | null; +} + declare class CSSRuleList { @@iterator(): Iterator; length: number; @@ -348,6 +380,7 @@ declare class CSSStyleDeclaration { turn: string; unicodeBidi: string; unicodeRange: string; + userSelect: string; verticalAlign: string; visibility: string; webkitOverflowScrolling: string; @@ -385,3 +418,153 @@ declare class TransitionEvent extends Event { pseudoElement: string; // readonly propertyName: string; // readonly } + +type AnimationPlayState = 'idle' | 'running' | 'paused' | 'finished' +type AnimationReplaceState = 'active' | 'removed' | 'persisted' +type FillMode = 'none' | 'forwards' | 'backwards' | 'both' | 'auto' +type PlaybackDirection = 'normal' | 'reverse' | 'alternate' | 'alternate-reverse' +type IterationCompositeOperation = 'replace' | 'accumulate' +type CompositeOperation = 'replace' | 'add' | 'accumulate' +type CompositeOperationOrAuto = CompositeOperation | 'auto' + +declare class AnimationTimeline { + +currentTime: number | null; +} + +type DocumentTimelineOptions = { + originTime?: DOMHighResTimeStamp; + ... +} + +declare class DocumentTimeline extends AnimationTimeline { + constructor(options?: DocumentTimelineOptions): DocumentTimeline; +} + +type EffectTiming = { + delay: number; + endDelay: number; + fill: FillMode; + iterationStart: number; + iterations: number; + duration: number | string; + direction: PlaybackDirection; + easing: string; + ... +} + +type OptionalEffectTiming = $Rest + +type ComputedEffectTiming = EffectTiming & { + endTime: number; + activeDuration: number; + localTime: number | null; + progress: number | null; + currentIteration: number | null; + ... +} + +declare class AnimationEffect { + getTiming(): EffectTiming; + getComputedTiming(): ComputedEffectTiming; + updateTiming(timing?: OptionalEffectTiming): void; +} + +type Keyframe = { + composite?: CompositeOperationOrAuto; + easing?: string; + offset?: number | null; + [property: string]: string | number | null | void; + ... +} + +type ComputedKeyframe = { + composite: CompositeOperationOrAuto; + computedOffset: number; + easing: string; + offset: number | null; + [property: string]: string | number | null | void; + ... +} + +type PropertyIndexedKeyframes = { + composite?: CompositeOperationOrAuto | CompositeOperationOrAuto[]; + easing?: string | string[]; + offset?: number | (number | null)[]; + [property: string]: string | string[] | number | null | (number | null)[] | void; + ... +} + +type KeyframeEffectOptions = $Rest & { + iterationComposite?: IterationCompositeOperation; + composite?: CompositeOperation; + ... +} + +declare class KeyframeEffect extends AnimationEffect { + constructor( + target: Element | null, + keyframes: Keyframe[] | PropertyIndexedKeyframes | null, + options?: number | KeyframeEffectOptions, + ): KeyframeEffect; + constructor(source: KeyframeEffect): KeyframeEffect; + + target: Element | null; + iterationComposite: IterationCompositeOperation; + composite: CompositeOperation; + getKeyframes(): ComputedKeyframe[]; + setKeyframes(keyframes: Keyframe[] | PropertyIndexedKeyframes | null): void; +} + +declare class Animation extends EventTarget { + constructor(effect?: AnimationEffect | null, timeline?: AnimationTimeline | null): Animation; + + id: string; + effect: AnimationEffect | null; + timeline: AnimationTimeline | null; + startTime: number | null; + currentTime: number | null; + playbackRate: number; + +playState: AnimationPlayState; + +replaceState: AnimationReplaceState; + +pending: boolean; + +ready: Promise; + +finished: Promise; + onfinish: ?((ev: AnimationPlaybackEvent) => mixed); + oncancel: ?((ev: AnimationPlaybackEvent) => mixed); + onremove: ?((ev: AnimationPlaybackEvent) => mixed); + cancel(): void; + finish(): void; + play(): void; + pause(): void; + updatePlaybackRate(playbackRate: number): void; + reverse(): void; + persist(): void; + commitStyles(): void; +} + +type KeyframeAnimationOptions = KeyframeEffectOptions & { + id?: string; + ... +} + +type GetAnimationsOptions = { + subtree?: boolean; + ... +} + +interface Animatable { + animate(keyframes: Keyframe[] | PropertyIndexedKeyframes | null, options?: number | KeyframeAnimationOptions): Animation; + getAnimations(options?: GetAnimationsOptions): Animation[]; +} + +type AnimationPlaybackEvent$Init = Event$Init & { + currentTime?: number | null; + timelineTime?: number | null; + ... +} + +declare class AnimationPlaybackEvent extends Event { + constructor(type: string, animationEventInitDict?: AnimationPlaybackEvent$Init): AnimationPlaybackEvent; + +currentTime: number | null; + +timelineTime: number | null; +} diff --git a/lib/dom.js b/lib/dom.js index 4c5758a2b96..7eabb12cc67 100644 --- a/lib/dom.js +++ b/lib/dom.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,8 +8,9 @@ declare class Blob { constructor(blobParts?: Array, options?: { - type?: string; - endings?: string; + type?: string, + endings?: string, + ... }): void; isClosed: bool; size: number; @@ -19,28 +20,29 @@ declare class Blob { } declare class FileReader extends EventTarget { + +EMPTY: 0; + +LOADING: 1; + +DONE: 2; + +error: null | DOMError; + +readyState: 0 | 1 | 2; + +result: null | string | ArrayBuffer; abort(): void; - DONE: number; - EMPTY: number; - error: DOMError; - LOADING: number; - onabort: (ev: any) => any; - onerror: (ev: any) => any; - onload: (ev: any) => any; - onloadend: (ev: any) => any; - onloadstart: (ev: any) => any; - onprogress: (ev: any) => any; + onabort: null | (ev: ProgressEvent) => any; + onerror: null | (ev: ProgressEvent) => any; + onload: null | (ev: ProgressEvent) => any; + onloadend: null | (ev: ProgressEvent) => any; + onloadstart: null | (ev: ProgressEvent) => any; + onprogress: null | (ev: ProgressEvent) => any; readAsArrayBuffer(blob: Blob): void; readAsBinaryString(blob: Blob): void; readAsDataURL(blob: Blob): void; readAsText(blob: Blob, encoding?: string): void; - readyState: 0 | 1 | 2; - result: string | ArrayBuffer; } declare type FilePropertyBag = { - type?: string, - lastModified?: number, + type?: string, + lastModified?: number, + ... }; declare class File extends Blob { constructor( @@ -88,17 +90,23 @@ declare class DataTransferItem { type: string; // readonly getAsString(_callback: ?(data: string) => mixed): void; getAsFile(): ?File; + /* + * This is not supported by all browsers, please have a fallback plan for it. + * For more information, please checkout + * https://developer.mozilla.org/en-US/docs/Web/API/DataTransferItem/webkitGetAsEntry + */ + webkitGetAsEntry(): void | () => any; }; /* DOM */ +declare type DOMStringMap = { [key:string]: string, ... } + declare class DOMError { name: string; } -declare type ElementDefinitionOptions = { - extends?: string; -} +declare type ElementDefinitionOptions = { extends?: string, ... } declare interface CustomElementRegistry { define(name: string, ctor: Class, options?: ElementDefinitionOptions): void; @@ -114,61 +122,92 @@ declare interface ShadowRoot extends DocumentFragment { declare type ShadowRootMode = 'open'|'closed'; declare type ShadowRootInit = { - delegatesFocus?: boolean; - mode: ShadowRootMode; + delegatesFocus?: boolean, + mode: ShadowRootMode, + ... +} + +declare type ScrollToOptions = { + top?: number; + left?: number; + behavior?: 'auto' | 'smooth'; + ... } type EventHandler = (event: Event) => mixed -type EventListener = {handleEvent: EventHandler} | EventHandler +type EventListener = { handleEvent: EventHandler, ... } | EventHandler type MouseEventHandler = (event: MouseEvent) => mixed -type MouseEventListener = {handleEvent: MouseEventHandler} | MouseEventHandler +type MouseEventListener = { handleEvent: MouseEventHandler, ... } | MouseEventHandler type FocusEventHandler = (event: FocusEvent) => mixed -type FocusEventListener = {handleEvent: FocusEventHandler} | FocusEventHandler +type FocusEventListener = { handleEvent: FocusEventHandler, ... } | FocusEventHandler type KeyboardEventHandler = (event: KeyboardEvent) => mixed -type KeyboardEventListener = {handleEvent: KeyboardEventHandler} | KeyboardEventHandler +type KeyboardEventListener = { handleEvent: KeyboardEventHandler, ... } | KeyboardEventHandler +type InputEventHandler = (event: InputEvent) => mixed +type InputEventListener = { handleEvent: InputEventHandler, ... } | InputEventHandler type TouchEventHandler = (event: TouchEvent) => mixed -type TouchEventListener = {handleEvent: TouchEventHandler} | TouchEventHandler +type TouchEventListener = { handleEvent: TouchEventHandler, ... } | TouchEventHandler type WheelEventHandler = (event: WheelEvent) => mixed -type WheelEventListener = {handleEvent: WheelEventHandler} | WheelEventHandler +type WheelEventListener = { handleEvent: WheelEventHandler, ... } | WheelEventHandler +type AbortProgressEventHandler = (event: ProgressEvent) => mixed +type AbortProgressEventListener = { handleEvent: AbortProgressEventHandler, ... } | AbortProgressEventHandler type ProgressEventHandler = (event: ProgressEvent) => mixed -type ProgressEventListener = {handleEvent: ProgressEventHandler} | ProgressEventHandler +type ProgressEventListener = { handleEvent: ProgressEventHandler, ... } | ProgressEventHandler type DragEventHandler = (event: DragEvent) => mixed -type DragEventListener = {handleEvent: DragEventHandler} | DragEventHandler +type DragEventListener = { handleEvent: DragEventHandler, ... } | DragEventHandler type PointerEventHandler = (event: PointerEvent) => mixed -type PointerEventListener = {handleEvent: PointerEventHandler} | PointerEventHandler +type PointerEventListener = { handleEvent: PointerEventHandler, ... } | PointerEventHandler type AnimationEventHandler = (event: AnimationEvent) => mixed -type AnimationEventListener = {handleEvent: AnimationEventHandler} | AnimationEventHandler +type AnimationEventListener = { handleEvent: AnimationEventHandler, ... } | AnimationEventHandler type ClipboardEventHandler = (event: ClipboardEvent) => mixed -type ClipboardEventListener = {handleEvent: ClipboardEventHandler} | ClipboardEventHandler - +type ClipboardEventListener = { handleEvent: ClipboardEventHandler, ... } | ClipboardEventHandler +type TransitionEventHandler = (event: TransitionEvent) => mixed +type TransitionEventListener = { handleEvent: TransitionEventHandler, ... } | TransitionEventHandler +type MessageEventHandler = (event: MessageEvent) => mixed +type MessageEventListener = { handleEvent: MessageEventHandler, ... } | MessageEventHandler +type BeforeUnloadEventHandler = (event: BeforeUnloadEvent) => mixed +type BeforeUnloadEventListener = { handleEvent: BeforeUnloadEventHandler, ... } | BeforeUnloadEventHandler + +type MediaKeySessionType = 'temporary' | 'persistent-license'; +type MediaKeyStatus = 'usable' | 'expired' | 'released' | 'output-restricted' | 'output-downscaled' | 'status-pending' | 'internal-error'; type MouseEventTypes = 'contextmenu' | 'mousedown' | 'mouseenter' | 'mouseleave' | 'mousemove' | 'mouseout' | 'mouseover' | 'mouseup' | 'click' | 'dblclick'; type FocusEventTypes = 'blur' | 'focus' | 'focusin' | 'focusout'; type KeyboardEventTypes = 'keydown' | 'keyup' | 'keypress'; +type InputEventTypes = 'input' | 'beforeinput' type TouchEventTypes = 'touchstart' | 'touchmove' | 'touchend' | 'touchcancel'; type WheelEventTypes = 'wheel'; +type AbortProgressEventTypes = 'abort'; type ProgressEventTypes = 'abort' | 'error' | 'load' | 'loadend' | 'loadstart' | 'progress' | 'timeout'; type DragEventTypes = 'drag' | 'dragend' | 'dragenter' | 'dragexit' | 'dragleave' | 'dragover' | 'dragstart' | 'drop'; type PointerEventTypes = 'pointerover' | 'pointerenter' | 'pointerdown' | 'pointermove' | 'pointerup' | 'pointercancel' | 'pointerout' | 'pointerleave' | 'gotpointercapture' | 'lostpointercapture'; type AnimationEventTypes = 'animationstart' | 'animationend' | 'animationiteration'; type ClipboardEventTypes = 'clipboardchange' | 'cut' | 'copy' | 'paste'; +type TransitionEventTypes = 'transitionrun' | 'transitionstart' | 'transitionend' | 'transitioncancel'; +type MessageEventTypes = string; +type BeforeUnloadEventTypes = 'beforeunload'; type EventListenerOptionsOrUseCapture = boolean | { - capture?: boolean, - once?: boolean, - passive?: boolean + capture?: boolean, + once?: boolean, + passive?: boolean, + ... }; declare class EventTarget { addEventListener(type: MouseEventTypes, listener: MouseEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: FocusEventTypes, listener: FocusEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: KeyboardEventTypes, listener: KeyboardEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; + addEventListener(type: InputEventTypes, listener: InputEventHandler, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: TouchEventTypes, listener: TouchEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: WheelEventTypes, listener: WheelEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; + addEventListener(type: AbortProgressEventTypes, listener: AbortProgressEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: ProgressEventTypes, listener: ProgressEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: DragEventTypes, listener: DragEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: PointerEventTypes, listener: PointerEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: AnimationEventTypes, listener: AnimationEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: ClipboardEventTypes, listener: ClipboardEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; + addEventListener(type: TransitionEventTypes, listener: TransitionEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; + addEventListener(type: MessageEventTypes, listener: MessageEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; + addEventListener(type: BeforeUnloadEventTypes, listener: BeforeUnloadEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; addEventListener(type: string, listener: EventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; removeEventListener(type: MouseEventTypes, listener: MouseEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; @@ -176,35 +215,49 @@ declare class EventTarget { removeEventListener(type: KeyboardEventTypes, listener: KeyboardEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; removeEventListener(type: TouchEventTypes, listener: TouchEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; removeEventListener(type: WheelEventTypes, listener: WheelEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; + removeEventListener(type: AbortProgressEventTypes, listener: AbortProgressEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; removeEventListener(type: ProgressEventTypes, listener: ProgressEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; removeEventListener(type: DragEventTypes, listener: DragEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; removeEventListener(type: PointerEventTypes, listener: PointerEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; removeEventListener(type: AnimationEventTypes, listener: AnimationEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; removeEventListener(type: ClipboardEventTypes, listener: ClipboardEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; + removeEventListener(type: TransitionEventTypes, listener: TransitionEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; + removeEventListener(type: MessageEventTypes, listener: MessageEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; + removeEventListener(type: BeforeUnloadEventTypes, listener: BeforeUnloadEventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; removeEventListener(type: string, listener: EventListener, optionsOrUseCapture?: EventListenerOptionsOrUseCapture): void; attachEvent?: (type: MouseEventTypes, listener: MouseEventListener) => void; attachEvent?: (type: FocusEventTypes, listener: FocusEventListener) => void; attachEvent?: (type: KeyboardEventTypes, listener: KeyboardEventListener) => void; + attachEvent?: (type: InputEventTypes, listener: InputEventHandler) => void; attachEvent?: (type: TouchEventTypes, listener: TouchEventListener) => void; attachEvent?: (type: WheelEventTypes, listener: WheelEventListener) => void; + attachEvent?: (type: AbortProgressEventTypes, listener: AbortProgressEventListener) => void; attachEvent?: (type: ProgressEventTypes, listener: ProgressEventListener) => void; attachEvent?: (type: DragEventTypes, listener: DragEventListener) => void; attachEvent?: (type: PointerEventTypes, listener: PointerEventListener) => void; attachEvent?: (type: AnimationEventTypes, listener: AnimationEventListener) => void; attachEvent?: (type: ClipboardEventTypes, listener: ClipboardEventListener) => void; + attachEvent?: (type: TransitionEventTypes, listener: TransitionEventListener) => void; + attachEvent?: (type: MessageEventTypes, listener: MessageEventListener) => void; + attachEvent?: (type: BeforeUnloadEventTypes, listener: BeforeUnloadEventListener) => void; attachEvent?: (type: string, listener: EventListener) => void; detachEvent?: (type: MouseEventTypes, listener: MouseEventListener) => void; detachEvent?: (type: FocusEventTypes, listener: FocusEventListener) => void; detachEvent?: (type: KeyboardEventTypes, listener: KeyboardEventListener) => void; + detachEvent?: (type: InputEventTypes, listener: InputEventListener) => void; detachEvent?: (type: TouchEventTypes, listener: TouchEventListener) => void; detachEvent?: (type: WheelEventTypes, listener: WheelEventListener) => void; + detachEvent?: (type: AbortProgressEventTypes, listener: AbortProgressEventListener) => void; detachEvent?: (type: ProgressEventTypes, listener: ProgressEventListener) => void; detachEvent?: (type: DragEventTypes, listener: DragEventListener) => void; detachEvent?: (type: PointerEventTypes, listener: PointerEventListener) => void; detachEvent?: (type: AnimationEventTypes, listener: AnimationEventListener) => void; detachEvent?: (type: ClipboardEventTypes, listener: ClipboardEventListener) => void; + detachEvent?: (type: TransitionEventTypes, listener: TransitionEventListener) => void; + detachEvent?: (type: MessageEventTypes, listener: MessageEventListener) => void; + detachEvent?: (type: BeforeUnloadEventTypes, listener: BeforeUnloadEventListener) => void; detachEvent?: (type: string, listener: EventListener) => void; dispatchEvent(evt: Event): boolean; @@ -216,10 +269,11 @@ declare class EventTarget { } type Event$Init = { - bubbles?: boolean, - cancelable?: boolean, - composed?: boolean, - scoped?: boolean + bubbles?: boolean, + cancelable?: boolean, + composed?: boolean, + scoped?: boolean, + ... } declare class Event { @@ -251,9 +305,7 @@ declare class Event { ): void; } -type CustomEvent$Init = Event$Init & { - detail?: any; -} +type CustomEvent$Init = Event$Init & { detail?: any, ... } declare class CustomEvent extends Event { constructor(type: string, eventInitDict?: CustomEvent$Init): void; @@ -274,18 +326,19 @@ declare class UIEvent extends Event { } type MouseEvent$MouseEventInit = { - screenX?: number, - screenY?: number, - clientX?: number, - clientY?: number, - ctrlKey?: boolean, - shiftKey?: boolean, - altKey?: boolean, - metaKey?: boolean, - button?: number, - buttons?: number, - region?: string | null, - relatedTarget?: string | null, + screenX?: number, + screenY?: number, + clientX?: number, + clientY?: number, + ctrlKey?: boolean, + shiftKey?: boolean, + altKey?: boolean, + metaKey?: boolean, + button?: number, + buttons?: number, + region?: string | null, + relatedTarget?: EventTarget | null, + ... }; declare class MouseEvent extends UIEvent { @@ -306,11 +359,13 @@ declare class MouseEvent extends UIEvent { offsetY: number; pageX: number; pageY: number; - region: ?string; + region: string | null; + relatedTarget: EventTarget | null; screenX: number; screenY: number; shiftKey: boolean; - relatedTarget: ?EventTarget; + x: number; + y: number; getModifierState(keyArg: string): boolean; } @@ -330,16 +385,17 @@ declare class DragEvent extends MouseEvent { } type PointerEvent$PointerEventInit = MouseEvent$MouseEventInit & { - pointerId?: number; - width?: number; - height?: number; - pressure?: number; - tangentialPressure?: number; - tiltX?: number; - tiltY?: number; - twist?: number; - pointerType?: string; - isPrimary?: boolean; + pointerId?: number, + width?: number, + height?: number, + pressure?: number, + tangentialPressure?: number, + tiltX?: number, + tiltY?: number, + twist?: number, + pointerType?: string, + isPrimary?: boolean, + ... }; declare class PointerEvent extends MouseEvent { @@ -381,9 +437,9 @@ declare class PromiseRejectionEvent extends Event { } // used for websockets and postMessage, for example. See: -// http://www.w3.org/TR/2011/WD-websockets-20110419/ +// https://www.w3.org/TR/2011/WD-websockets-20110419/ // and -// http://www.w3.org/TR/2008/WD-html5-20080610/comms.html +// https://www.w3.org/TR/2008/WD-html5-20080610/comms.html // and // https://html.spec.whatwg.org/multipage/comms.html#the-messageevent-interfaces declare class MessageEvent extends Event { @@ -393,6 +449,21 @@ declare class MessageEvent extends Event { source: WindowProxy; } +// https://www.w3.org/TR/eventsource/ +declare class EventSource extends EventTarget { + constructor(url: string, configuration?: { withCredentials: boolean, ... }): void; + +CLOSED: 2; + +CONNECTING: 0; + +OPEN: 1; + +readyState: 0 | 1 | 2; + +url: string; + +withCredentials: boolean; + onerror: () => void; + onmessage: MessageEventListener; + onopen: () => void; + close: () => void; +} + declare class KeyboardEvent extends UIEvent { altKey: boolean; code: string; @@ -411,7 +482,12 @@ declare class KeyboardEvent extends UIEvent { which: number; } -declare class AnimationEvent extends UIEvent { +declare class InputEvent extends UIEvent { + data: string | null; + isComposing: boolean; +} + +declare class AnimationEvent extends Event { animationName: string; elapsedTime: number; pseudoElement: string; @@ -427,6 +503,17 @@ declare class AnimationEvent extends UIEvent { ) => void; } +// https://html.spec.whatwg.org/multipage/web-messaging.html#broadcasting-to-other-browsing-contexts +declare class BroadcastChannel extends EventTarget { + name: string; + onmessage: ?(event: MessageEvent) => void; + onmessageerror: ?(event: MessageEvent) => void; + + constructor(name: string): void; + postMessage(msg: mixed): void; + close(): void; +} + // https://www.w3.org/TR/touch-events/#idl-def-Touch declare class Touch { clientX: number, @@ -470,21 +557,41 @@ declare class StorageEvent extends Event { } // https://w3c.github.io/clipboard-apis/ as of 15 May 2018 -type ClipboardEvent$Init = Event$Init & { - clipboardData: DataTransfer | null; -}; +type ClipboardEvent$Init = Event$Init & { clipboardData: DataTransfer | null, ... }; declare class ClipboardEvent extends Event { constructor(type: ClipboardEventTypes, eventInit?: ClipboardEvent$Init): void; +clipboardData: ?DataTransfer; // readonly } +// https://www.w3.org/TR/2017/WD-css-transitions-1-20171130/#interface-transitionevent +type TransitionEvent$Init = Event$Init & { + propertyName: string, + elapsedTime: number, + pseudoElement: string, + ... +}; + +declare class TransitionEvent extends Event { + constructor(type: TransitionEventTypes, eventInit?: TransitionEvent$Init): void; + + +propertyName: string; // readonly + +elapsedTime: number; // readonly + +pseudoElement: string; // readonly +} + +// https://www.w3.org/TR/html50/browsers.html#beforeunloadevent +declare class BeforeUnloadEvent extends Event { + returnValue: string, +} + // TODO: *Event declare class Node extends EventTarget { baseURI: ?string; childNodes: NodeList; firstChild: ?Node; + +isConnected: boolean; lastChild: ?Node; nextSibling: ?Node; nodeName: string; @@ -500,6 +607,7 @@ declare class Node extends EventTarget { cloneNode(deep?: boolean): this; compareDocumentPosition(other: Node): number; contains(other: ?Node): boolean; + getRootNode(options?: { composed: boolean, ... }): Node; hasChildNodes(): boolean; insertBefore(newChild: T, refChild?: ?Node): T; isDefaultNamespace(namespaceURI: string): boolean; @@ -570,7 +678,7 @@ declare class Attr extends Node { localName: string; } -declare class HTMLCollection { +declare class HTMLCollection<+Elem: HTMLElement> { @@iterator(): Iterator; length: number; item(nameOrIndex?: any, optionalIndex?: any): Elem | null; @@ -579,39 +687,45 @@ declare class HTMLCollection { } // from https://www.w3.org/TR/custom-elements/#extensions-to-document-interface-to-register +// See also https://github.com/w3c/webcomponents/ type ElementRegistrationOptions = { - +prototype?: { - // from https://www.w3.org/TR/custom-elements/#types-of-callbacks - +createdCallback?: () => mixed; - +attachedCallback?: () => mixed; - +detachedCallback?: () => mixed; - +attributeChangedCallback?: - // attribute is set - (( - attributeLocalName: string, - oldAttributeValue: null, - newAttributeValue: string, - attributeNamespace: string - ) => mixed) & - // attribute is changed - (( - attributeLocalName: string, - oldAttributeValue: string, - newAttributeValue: string, - attributeNamespace: string - ) => mixed) & - // attribute is removed - (( - attributeLocalName: string, - oldAttributeValue: string, - newAttributeValue: null, - attributeNamespace: string - ) => mixed); - }; - +extends?: string; + +prototype?: { + // from https://www.w3.org/TR/custom-elements/#types-of-callbacks + // See also https://github.com/w3c/webcomponents/ + +createdCallback?: () => mixed, + +attachedCallback?: () => mixed, + +detachedCallback?: () => mixed, + +attributeChangedCallback?: + // attribute is set + (( + attributeLocalName: string, + oldAttributeValue: null, + newAttributeValue: string, + attributeNamespace: string + ) => mixed) & + // attribute is changed + (( + attributeLocalName: string, + oldAttributeValue: string, + newAttributeValue: string, + attributeNamespace: string + ) => mixed) & + // attribute is removed + (( + attributeLocalName: string, + oldAttributeValue: string, + newAttributeValue: null, + attributeNamespace: string + ) => mixed), + ... + }, + +extends?: string, + ... } declare class Document extends Node { + +timeline: DocumentTimeline; + getAnimations(): Array; URL: string; adoptNode(source: T): T; anchors: HTMLCollection; @@ -696,8 +810,12 @@ declare class Document extends Node { documentMode: number; domain: string | null; embeds: HTMLCollection; + exitFullscreen(): Promise, + queryCommandSupported(cmdID: string): boolean; execCommand(cmdID: string, showUI?: boolean, value?: any): boolean; forms: HTMLCollection; + fullscreenElement: Element | null; + fullscreenEnabled: boolean; getElementById(elementId: string): HTMLElement | null; getElementsByClassName(classNames: string): HTMLCollection; getElementsByName(elementName: string): HTMLCollection; @@ -827,7 +945,7 @@ declare class Document extends Node { getElementsByTagNameNS(namespaceURI: string | null, localName: 'template'): HTMLCollection; getElementsByTagNameNS(namespaceURI: string | null, localName: 'ul'): HTMLCollection; getElementsByTagNameNS(namespaceURI: string | null, localName: string): HTMLCollection; - head: HTMLElement | null; + head: HTMLHeadElement | null; images: HTMLCollection; implementation: DOMImplementation; importNode(importedNode: T, deep: boolean): T; @@ -861,11 +979,16 @@ declare class Document extends Node { createEvent(eventInterface: 'CustomEvent'): CustomEvent; createEvent(eventInterface: string): Event; createRange(): Range; - elementFromPoint(x: number, y: number): HTMLElement; + elementFromPoint(x: number, y: number): HTMLElement | null; + elementsFromPoint(x: number, y: number): Array; defaultView: any; compatMode: 'BackCompat' | 'CSS1Compat'; hidden: boolean; + // Pointer Lock specification + exitPointerLock(): void; + pointerLockElement: Element | null; + // from ParentNode interface childElementCount: number; children: HTMLCollection; @@ -1182,7 +1305,7 @@ declare class Range { // extension setStartAfter(refNode: Node): void; extractContents(): DocumentFragment; setEndAfter(refNode: Node): void; - createContextualFragment(fragment: string): Node; + createContextualFragment(fragment: string): DocumentFragment; static END_TO_END: number; static START_TO_START: number; static START_TO_END: number; @@ -1192,6 +1315,7 @@ declare class Range { // extension declare var document: Document; // TODO: HTMLDocument +type FocusOptions = { preventScroll?: boolean, ... } declare class DOMTokenList { @@iterator(): Iterator; @@ -1209,7 +1333,9 @@ declare class DOMTokenList { } -declare class Element extends Node { +declare class Element extends Node implements Animatable { + animate(keyframes: Keyframe[] | PropertyIndexedKeyframes | null, options?: number | KeyframeAnimationOptions): Animation; + getAnimations(options?: GetAnimationsOptions): Animation[]; assignedSlot: ?HTMLSlotElement; attachShadow(shadowRootInitDict: ShadowRootInit): ShadowRoot; attributes: NamedNodeMap; @@ -1363,10 +1489,22 @@ declare class Element extends Node { removeAttribute(name?: string): void; removeAttributeNode(attributeNode: Attr): Attr; removeAttributeNS(namespaceURI: string | null, localName: string): void; - requestFullscreen(): void; + requestFullscreen(options?: { navigationUI: 'auto' | 'show' | 'hide', ... }): Promise; requestPointerLock(): void; - scrollIntoView(arg?: (boolean | { behavior?: ('auto' | 'instant' | 'smooth'), block?: ('start' | 'center' | 'end' | 'nearest'), inline?: ('start' | 'center' | 'end' | 'nearest') })): void; + scrollIntoView(arg?: (boolean | { + behavior?: ('auto' | 'instant' | 'smooth'), + block?: ('start' | 'center' | 'end' | 'nearest'), + inline?: ('start' | 'center' | 'end' | 'nearest'), + ... + })): void; + scroll(x: number, y: number): void; + scroll(options: ScrollToOptions): void; + scrollTo(x: number, y: number): void; + scrollTo(options: ScrollToOptions): void; + scrollBy(x: number, y: number): void; + scrollBy(options: ScrollToOptions): void; setAttribute(name?: string, value?: string): void; + toggleAttribute(name?: string, force?: boolean): void; setAttributeNS(namespaceURI: string | null, qualifiedName: string, value: string): void; setAttributeNode(newAttr: Attr): Attr | null; setAttributeNodeNS(newAttr: Attr): Attr | null; @@ -1520,7 +1658,7 @@ declare class Element extends Node { declare class HTMLElement extends Element { blur(): void; click(): void; - focus(): void; + focus(options?: FocusOptions): void; getBoundingClientRect(): ClientRect; forceSpellcheck(): void; accessKey: string; @@ -1528,7 +1666,7 @@ declare class HTMLElement extends Element { className: string; contentEditable: string; contextMenu: ?HTMLMenuElement; - dataset: {[key:string]: string}; + dataset: DOMStringMap; dir: 'ltr' | 'rtl' | 'auto'; draggable: bool; dropzone: any; @@ -1553,6 +1691,7 @@ declare class HTMLElement extends Element { oncanplaythrough: ?Function; onchange: ?Function; onclick: ?Function; + oncontextmenu: ?Function; oncuechange: ?Function; ondblclick: ?Function; ondurationchange: ?Function; @@ -1560,6 +1699,8 @@ declare class HTMLElement extends Element { onended: ?Function; onerror: ?Function; onfocus: ?Function; + onfullscreenchange: ?Function; + onfullscreenerror: ?Function; ongotpointercapture: ?Function, oninput: ?Function; oninvalid: ?Function; @@ -1617,7 +1758,7 @@ declare class HTMLElement extends Element { declare class HTMLSlotElement extends HTMLElement { name: string; - assignedNodes(options?: {flatten: boolean}): Node[]; + assignedNodes(options?: { flatten: boolean, ... }): Node[]; } declare class HTMLTableElement extends HTMLElement { @@ -1747,6 +1888,8 @@ declare class TextMetrics { }; declare class Path2D { + constructor(path?: Path2D | string): void; + addPath(path: Path2D, transformation?: ?SVGMatrix): void; addPathByStrokingPath(path: Path2D, styles: CanvasDrawingStyles, transformation?: ?SVGMatrix): void; addText(text: string, styles: CanvasDrawingStyles, transformation: ?SVGMatrix, x: number, y: number, maxWidth?: number): void; @@ -1908,14 +2051,15 @@ declare class CanvasRenderingContext2D { // WebGL idl: https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl type WebGLContextAttributes = { - alpha: bool, - depth: bool, - stencil: bool, - antialias: bool, - premultipliedAlpha: bool, - preserveDrawingBuffer: bool, - preferLowPowerToHighPerformance: bool, - failIfMajorPerformanceCaveat: bool + alpha: bool, + depth: bool, + stencil: bool, + antialias: bool, + premultipliedAlpha: bool, + preserveDrawingBuffer: bool, + preferLowPowerToHighPerformance: bool, + failIfMajorPerformanceCaveat: bool, + ... }; interface WebGLObject { @@ -2800,7 +2944,7 @@ declare class WebGLContextEvent extends Event { // http://www.w3.org/TR/html5/scripting-1.html#renderingcontext type RenderingContext = CanvasRenderingContext2D | WebGLRenderingContext; -// http://www.w3.org/TR/html5/scripting-1.html#htmlcanvaselement +// https://www.w3.org/TR/html5/scripting-1.html#htmlcanvaselement declare class HTMLCanvasElement extends HTMLElement { width: number; height: number; @@ -2866,7 +3010,7 @@ declare class HTMLIFrameElement extends HTMLElement { scrolling: string; sandbox: DOMTokenList; src: string; - srcDoc: string; + srcdoc: string; width: string; } @@ -2876,6 +3020,7 @@ declare class HTMLImageElement extends HTMLElement { crossOrigin: ?string; currentSrc: string; // readonly height: number; + decode(): Promise; isMap: boolean; naturalHeight: number; // readonly naturalWidth: number; // readonly @@ -2904,6 +3049,9 @@ declare class TimeRanges { start(index: number): number; end(index: number): number; } +declare class Audio extends HTMLAudioElement { + constructor(URLString?: string): void; +} declare class AudioTrack { id: string; @@ -2995,6 +3143,38 @@ declare class TextTrackList extends EventTarget { onremovetrack: (ev: any) => any; } +declare class MediaKeyStatusMap { + @@iterator(): Iterator<[BufferDataSource, MediaKeyStatus]>; + size: number; + entries(): Iterator<[BufferDataSource, MediaKeyStatus]>; + forEach(callbackfn: (value: MediaKeyStatus, key: BufferDataSource, map: MediaKeyStatusMap) => any, thisArg?: any): void; + get(key: BufferDataSource): MediaKeyStatus; + has(key: BufferDataSource): boolean; + keys(): Iterator; + values(): Iterator; +} + +declare class MediaKeySession extends EventTarget { + sessionId: string; + expiration: number; + closed: Promise; + keyStatuses: MediaKeyStatusMap; + + generateRequest(initDataType: string, initData: BufferDataSource): Promise; + load(sessionId: string): Promise; + update(response: BufferDataSource): Promise; + close(): Promise; + remove(): Promise; + + onkeystatuschange: (ev: any) => any; + onmessage: (ev: any) => any; +} + +declare class MediaKeys { + createSession(mediaKeySessionType: MediaKeySessionType): MediaKeySession; + setServerCertificate(serverCertificate: BufferDataSource): Promise; +} + declare class HTMLMediaElement extends HTMLElement { // error state error: ?MediaError; @@ -3049,12 +3229,17 @@ declare class HTMLMediaElement extends HTMLElement { volume: number; muted: boolean; defaultMuted: boolean; + controlsList?: DOMTokenList; // tracks audioTracks: AudioTrackList; videoTracks: VideoTrackList; textTracks: TextTrackList; addTextTrack(kind: string, label?: string, language?: string): TextTrack; + + // media keys + mediaKeys?: ?MediaKeys; + setMediakeys?: (mediakeys: ?MediaKeys) => Promise; } declare class HTMLAudioElement extends HTMLMediaElement { @@ -3092,7 +3277,7 @@ declare class ValidityState { valid: boolean; } -// http://www.w3.org/TR/html5/forms.html#dom-textarea/input-setselectionrange +// https://w3c.github.io/html/sec-forms.html#dom-selectionapielements-setselectionrange type SelectionDirection = 'backward' | 'forward' | 'none'; type SelectionMode = 'select' | 'start' | 'end' | 'preserve'; declare class HTMLInputElement extends HTMLElement { @@ -3183,7 +3368,7 @@ declare class HTMLButtonElement extends HTMLElement { setCustomValidity(error: string): void; } -// http://dev.w3.org/html5/spec-preview/the-textarea-element.html +// https://w3c.github.io/html/sec-forms.html#the-textarea-element declare class HTMLTextAreaElement extends HTMLElement { autofocus: boolean; cols: number; @@ -3294,7 +3479,7 @@ declare class HTMLAnchorElement extends HTMLElement { username: string; } -// http://dev.w3.org/html5/spec-preview/the-label-element.html +// https://w3c.github.io/html/sec-forms.html#the-label-element declare class HTMLLabelElement extends HTMLElement { form: HTMLFormElement | null; htmlFor: string; @@ -3326,7 +3511,7 @@ declare class HTMLStyleElement extends HTMLElement { disabled: boolean; media: string; scoped: boolean; - sheet: ?StyleSheet; + sheet: ?CSSStyleSheet; type: string; } @@ -3529,7 +3714,7 @@ declare class TextRange { scrollIntoView(fStart?: boolean): void; findText(string: string, count?: number, flags?: number): boolean; execCommand(cmdID: string, showUI?: boolean, value?: any): boolean; - getBoundingClientRect(): ClientRect; + getBoundingClientRect(): ClientRect | DOMRect; moveToBookmark(bookmark: string): boolean; isEqual(range: TextRange): boolean; duplicate(): TextRange; @@ -3539,7 +3724,7 @@ declare class TextRange { pasteHTML(html: string): void; inRange(range: TextRange): boolean; moveEnd(unit: string, count?: number): number; - getClientRects(): ClientRectList; + getClientRects(): ClientRectList | DOMRectList; moveStart(unit: string, count?: number): number; parentElement(): Element; queryCommandState(cmdID: string): boolean; @@ -3708,7 +3893,7 @@ typeof NodeFilter.FILTER_ACCEPT | typeof NodeFilter.FILTER_REJECT | typeof NodeFilter.FILTER_SKIP; -type NodeFilterInterface = NodeFilterCallback | { acceptNode: NodeFilterCallback } +type NodeFilterInterface = NodeFilterCallback | { acceptNode: NodeFilterCallback, ... } // TODO: window.NodeFilter exists at runtime and behaves as a constructor // as far as `instanceof` is concerned, but it is not callable. @@ -3776,8 +3961,12 @@ declare function requestAnimationFrame(callback: (timestamp: number) => void): A declare function cancelAnimationFrame(requestId: AnimationFrameID): void; declare opaque type IdleCallbackID; declare function requestIdleCallback( - cb: (deadline: {didTimeout: boolean, timeRemaining: () => number}) => void, - opts?: {timeout: number}, + cb: (deadline: { + didTimeout: boolean, + timeRemaining: () => number, + ... + }) => void, + opts?: { timeout: number, ... }, ): IdleCallbackID; declare function cancelIdleCallback(id: IdleCallbackID): void; declare var localStorage: Storage; @@ -3793,28 +3982,40 @@ declare var status: string; declare var top: WindowProxy; declare function getSelection(): Selection | null; declare var customElements: CustomElementRegistry; +declare function scroll(x: number, y: number): void; +declare function scroll(options: ScrollToOptions): void; +declare function scrollTo(x: number, y: number): void; +declare function scrollTo(options: ScrollToOptions): void; +declare function scrollBy(x: number, y: number): void; +declare function scrollBy(options: ScrollToOptions): void; /* Notification */ type NotificationPermission = 'default' | 'denied' | 'granted'; type NotificationDirection = 'auto' | 'ltr' | 'rtl'; type VibratePattern = number | Array; -type NotificationAction = {action: string, title: string, icon?: string}; +type NotificationAction = { + action: string, + title: string, + icon?: string, + ... +}; type NotificationOptions = { - dir: NotificationDirection, - lang: string, - body: string, - tag: string, - image: string, - icon: string, - badge: string, - sound: string, - vibrate: VibratePattern, - timestamp: number, - renotify: boolean, - silent: boolean, - requireInteraction: boolean, - data: ?any, - actions: Array + dir?: NotificationDirection, + lang?: string, + body?: string, + tag?: string, + image?: string, + icon?: string, + badge?: string, + sound?: string, + vibrate?: VibratePattern, + timestamp?: number, + renotify?: boolean, + silent?: boolean, + requireInteraction?: boolean, + data?: ?any, + actions?: Array, + ... }; declare class Notification extends EventTarget { @@ -3824,18 +4025,19 @@ declare class Notification extends EventTarget { callback?: (perm: NotificationPermission) => mixed ): Promise; static maxActions: number; - onclick: (evt: Event) => any; - onerror: (evt: Event) => any; + onclick: ?(evt: Event) => mixed; + onclose: ?(evt: Event) => mixed; + onerror: ?(evt: Event) => mixed; + onshow: ?(evt: Event) => mixed; title: string; dir: NotificationDirection; lang: string; body: string; tag: string; - image: string; - icon: string; - badge: string; - sound: string; - vibrate: Array; + image?: string; + icon?: string; + badge?: string; + vibrate?: Array; timestamp: number; renotify: boolean; silent: boolean; diff --git a/lib/indexeddb.js b/lib/indexeddb.js index 98f098064be..6ea6ee152ba 100644 --- a/lib/indexeddb.js +++ b/lib/indexeddb.js @@ -31,7 +31,8 @@ declare interface IDBDatabase extends EventTarget { close(): void; createObjectStore(name: string, options?: { keyPath?: ?(string|string[]), - autoIncrement?: bool + autoIncrement?: bool, + ... }): IDBObjectStore; deleteObjectStore(name: string): void; transaction(storeNames: string|string[], mode?: 'readonly'|'readwrite'|'versionchange'): IDBTransaction; @@ -63,6 +64,7 @@ declare interface IDBObjectStore { createIndex(indexName: string, keyPath: string|string[], optionalParameter?: { unique?: bool, multiEntry?: bool, + ... }): IDBIndex; count(keyRange?: any|IDBKeyRange): IDBRequest; delete(key: any): IDBRequest; diff --git a/lib/intl.js b/lib/intl.js index 321930d75d3..275502858f1 100644 --- a/lib/intl.js +++ b/lib/intl.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,8 +10,8 @@ declare var Intl: { DateTimeFormat: Class, NumberFormat: Class, PluralRules: ?Class, - - getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[] + getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[], + ... } type Intl$Locale = string @@ -37,7 +37,8 @@ declare class Intl$Collator { ignorePunctuation: boolean, collation: string, numeric: boolean, - caseFirst?: 'upper' | 'lower' | 'false' + caseFirst?: 'upper' | 'lower' | 'false', + ... }; static supportedLocalesOf (locales?: Intl$Locales): Intl$Locale[]; @@ -49,7 +50,8 @@ declare type Intl$CollatorOptions = { sensitivity?: 'base' | 'accent' | 'case' | 'variant', ignorePunctuation?: boolean, numeric?: boolean, - caseFirst?: 'upper' | 'lower' | 'false' + caseFirst?: 'upper' | 'lower' | 'false', + ... } type FormatToPartsType = | 'day' | 'dayPeriod' | 'era' | 'hour' | 'literal' @@ -71,6 +73,7 @@ declare class Intl$DateTimeFormat { formatToParts (value?: Date | number): Array<{ type: FormatToPartsType, value: string, + ... }>; resolvedOptions (): { @@ -87,7 +90,8 @@ declare class Intl$DateTimeFormat { hour?: 'numeric' | '2-digit', minute?: 'numeric' | '2-digit', second?: 'numeric' | '2-digit', - timeZoneName?: 'short' | 'long' + timeZoneName?: 'short' | 'long', + ... }; static supportedLocalesOf (locales?: Intl$Locales): Intl$Locale[]; @@ -106,7 +110,8 @@ declare type Intl$DateTimeFormatOptions = { hour?: 'numeric' | '2-digit', minute?: 'numeric' | '2-digit', second?: 'numeric' | '2-digit', - timeZoneName?: 'short' | 'long' + timeZoneName?: 'short' | 'long', + ... } declare class Intl$NumberFormat { @@ -133,7 +138,8 @@ declare class Intl$NumberFormat { minimumFractionDigits?: number, maximumFractionDigits?: number, minimumSignificantDigits?: number, - maximumSignificantDigits?: number + maximumSignificantDigits?: number, + ... }; static supportedLocalesOf (locales?: Intl$Locales): Intl$Locale[]; @@ -149,7 +155,8 @@ declare type Intl$NumberFormatOptions = { minimumFractionDigits?: number, maximumFractionDigits?: number, minimumSignificantDigits?: number, - maximumSignificantDigits?: number + maximumSignificantDigits?: number, + ... } declare class Intl$PluralRules { @@ -169,6 +176,7 @@ declare class Intl$PluralRules { minimumSignificantDigits?: number, maximumSignificantDigits?: number, pluralCategories: Intl$PluralRule[], + ... }; static supportedLocalesOf (locales?: Intl$Locales): Intl$Locale[]; @@ -183,5 +191,6 @@ declare type Intl$PluralRulesOptions = { minimumFractionDigits?: number, maximumFractionDigits?: number, minimumSignificantDigits?: number, - maximumSignificantDigits?: number + maximumSignificantDigits?: number, + ... } diff --git a/lib/node.js b/lib/node.js index 17083f3bbdc..03446b66e46 100644 --- a/lib/node.js +++ b/lib/node.js @@ -1,14 +1,18 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ interface ErrnoError extends Error { - errno?: number; + address?: string; code?: string; + dest?: string; + errno?: string | number; + info?: Object; path?: string; + port?: number; syscall?: string; } @@ -21,7 +25,11 @@ type buffer$NonBufferEncoding = 'ucs2' | 'UCS2' | 'ucs-2' | 'UCS-2' | 'utf16le' | 'UTF16LE' | 'utf-16le' | 'UTF-16LE' | 'latin1'; type buffer$Encoding = buffer$NonBufferEncoding | 'buffer' -type buffer$ToJSONRet = { type: string, data: Array } +type buffer$ToJSONRet = { + type: string, + data: Array, + ... +} declare class Buffer extends Uint8Array { constructor( @@ -122,121 +130,147 @@ declare module "buffer" { } type child_process$execOpts = { - cwd?: string; - env?: Object; - encoding?: string; - shell?: string; - timeout?: number; - maxBuffer?: number; - killSignal?: string; - uid?: number; - gid?: number; + cwd?: string, + env?: Object, + encoding?: string, + shell?: string, + timeout?: number, + maxBuffer?: number, + killSignal?: string | number, + uid?: number, + gid?: number, + windowsHide?: boolean, + ... }; declare class child_process$Error extends Error { - code: number, - signal: ?string, + code: number | string | null, + errno?: string, + syscall?: string, + path?: string, + spawnargs?: Array, + killed?: boolean, + signal?: string | null, + cmd: string, } type child_process$execCallback = (error: ?child_process$Error, stdout: string | Buffer, stderr: string | Buffer) => void; type child_process$execSyncOpts = { - cwd?: string; - input?: string | Buffer; - stdio?: string | Array; - env?: Object; + cwd?: string, + input?: string | Buffer | $TypedArray | DataView, + stdio?: string | Array, + env?: Object, shell?: string, - uid?: number; - gid?: number; - timeout?: number; - killSignal?: string; - maxBuffer?: number; - encoding?: string; + uid?: number, + gid?: number, + timeout?: number, + killSignal?: string | number, + maxBuffer?: number, + encoding?: string, + windowsHide?: boolean, + ... }; type child_process$execFileOpts = { - cwd?: string; - env?: Object; - encoding?: string; - timeout?: number; - maxBuffer?: number; - killSignal?: string; - uid?: number; - gid?: number; + cwd?: string, + env?: Object, + encoding?: string, + timeout?: number, + maxBuffer?: number, + killSignal?: string | number, + uid?: number, + gid?: number, + windowsHide?: boolean, + windowsVerbatimArguments?: boolean, + shell?: boolean | string, + ... }; -type child_process$execFileCallback = (error: ?child_process$Error, stdout: Buffer, stderr: Buffer) => void; +type child_process$execFileCallback = (error: ?child_process$Error, stdout: string | Buffer, stderr: string | Buffer) => void; type child_process$execFileSyncOpts = { - cwd?: string; - input?: string | Buffer; - stdio?: string | Array; - env?: Object; - uid?: number; - gid?: number; - timeout?: number; - killSignal?: string; - maxBuffer?: number; - encoding?: string; + cwd?: string, + input?: string | Buffer | $TypedArray | DataView, + stdio?: string | Array, + env?: Object, + uid?: number, + gid?: number, + timeout?: number, + killSignal?: string | number, + maxBuffer?: number, + encoding?: string, + windowsHide?: boolean, + shell?: boolean | string, + ... }; type child_process$forkOpts = { - cwd?: string; - env?: Object; - execPath?: string; - execArgv?: Array; - silent?: boolean; - stdio?: Array; - uid?: number; - gid?: number; + cwd?: string, + env?: Object, + execPath?: string, + execArgv?: Array, + silent?: boolean, + stdio?: Array | string, + windowsVerbatimArguments?: boolean, + uid?: number, + gid?: number, + ... }; type child_process$Handle = any; // TODO type child_process$spawnOpts = { - cwd?: string; - env?: Object; - argv0?: string; - stdio?: string | Array; - detached?: boolean; - uid?: number; - gid?: number; - shell?: boolean | string; + cwd?: string, + env?: Object, + argv0?: string, + stdio?: string | Array, + detached?: boolean, + uid?: number, + gid?: number, + shell?: boolean | string, + windowsVerbatimArguments?: boolean, + windowsHide?: boolean, + ... }; type child_process$spawnRet = { - pid: number; - output: Array; - stdout: Buffer | string; - stderr: Buffer | string; - status: number; - signal: string; - error: Error; + pid: number, + output: Array, + stdout: Buffer | string, + stderr: Buffer | string, + status: number, + signal: string, + error: Error, + ... }; type child_process$spawnSyncOpts = { - cwd?: string; - input?: string | Buffer; - stdio?: string | Array; - env?: Object; - uid?: number; - gid?: number; - timeout?: number; - killSignal?: string; - maxBuffer?: number; - encoding?: string; - shell?: boolean | string; + cwd?: string, + input?: string | Buffer, + stdio?: string | Array, + env?: Object, + uid?: number, + gid?: number, + timeout?: number, + killSignal?: string, + maxBuffer?: number, + encoding?: string, + shell?: boolean | string, + ... }; type child_process$spawnSyncRet = child_process$spawnRet; declare class child_process$ChildProcess extends events$EventEmitter { + channel: Object; connected: boolean; + killed: boolean; + pid: number; stderr: stream$Readable; stdin: stream$Writable; stdio: Array; stdout: stream$Readable; - pid: number; disconnect(): void; kill(signal?: string): void; @@ -261,7 +295,7 @@ declare module "child_process" { declare function execSync( command: string, - options: {encoding: buffer$NonBufferEncoding} & child_process$execSyncOpts + options: { encoding: buffer$NonBufferEncoding, ... } & child_process$execSyncOpts ): string; declare function execSync( @@ -319,10 +353,11 @@ declare class cluster$Worker extends events$EventEmitter { } type cluster$setupMasterOpts = { - exec?: string; - args?: Array; - silent?: boolean; - stdio?: Array; + exec?: string, + args?: Array, + silent?: boolean, + stdio?: Array, + ... } declare module "cluster" { @@ -330,13 +365,14 @@ declare module "cluster" { isMaster: boolean; isWorker: boolean; settings: { - execArgv: Array; - exec: string; - args: Array; - silent: boolean; - stdio: Array; - uid: number; - gid: number; + execArgv: Array, + exec: string, + args: Array, + silent: boolean, + stdio: Array, + uid: number, + gid: number, + ... }; worker: cluster$Worker; workers: Object; @@ -380,23 +416,22 @@ declare class crypto$Cipher extends stream$Duplex { ): Buffer; } -type crypto$Credentials = { - // TODO -} +type crypto$Credentials = {...} type crypto$DiffieHellman = { computeSecret( other_public_key: string, input_encoding?: string, output_encoding?: string - ): any; - generateKeys(encoding?: string): any; - getGenerator(encoding?: string): any; - getPrime(encoding?: string): any; - getPrivateKey(encoding?: string): any; - getPublicKey(encoding?: string): any; - setPrivateKey(private_key: any, encoding?: string): void; - setPublicKey(public_key: any, encoding?: string): void; + ): any, + generateKeys(encoding?: string): any, + getGenerator(encoding?: string): any, + getPrime(encoding?: string): any, + getPrivateKey(encoding?: string): any, + getPublicKey(encoding?: string): any, + setPrivateKey(private_key: any, encoding?: string): void, + setPublicKey(public_key: any, encoding?: string): void, + ... } type crypto$ECDH$Encoding = 'latin1' | 'hex' | 'base64'; @@ -477,6 +512,7 @@ declare class crypto$Hmac extends stream$Duplex { type crypto$Sign$private_key = string | { key: string, passphrase: string, + ... } declare class crypto$Sign extends stream$Writable { static(algorithm: string, options?: writableStreamOptions): crypto$Sign, @@ -510,7 +546,9 @@ declare class crypto$Verify extends stream$Writable { type crypto$key = string | { key: string, passphrase?: string, - padding?: string // TODO: enum type in crypto.constants + // TODO: enum type in crypto.constants + padding?: string, + ... }; declare module "crypto" { @@ -572,7 +610,7 @@ declare module "crypto" { key: crypto$key, buffer: Buffer ): Buffer; - declare function publicDecrypt( + declare function publicEncrypt( key: crypto$key, buffer: Buffer ): Buffer; @@ -590,14 +628,47 @@ declare module "crypto" { size: number, callback: (err: ?Error, buffer: Buffer) => void ): void; + declare function randomFillSync(buffer: Buffer): void + declare function randomFillSync(buffer: Buffer, offset: number): void + declare function randomFillSync( + buffer: Buffer, + offset: number, + size: number + ): void + declare function randomFill( + buffer: Buffer, + callback: (err: ?Error, buffer: Buffer) => void + ): void + declare function randomFill( + buffer: Buffer, + offset: number, + callback: (err: ?Error, buffer: Buffer) => void + ): void + declare function randomFill( + buffer: Buffer, + offset: number, + size: number, + callback: (err: ?Error, buffer: Buffer) => void + ): void declare function timingSafeEqual( a: Buffer | $TypedArray | DataView, b: Buffer | $TypedArray | DataView ): boolean; } -type net$Socket$address = {address: string; family: string; port: number}; -type dgram$Socket$rinfo = {address: string; family: 'IPv4' | 'IPv6'; port: number, size: number}; +type net$Socket$address = { + address: string, + family: string, + port: number, + ... +}; +type dgram$Socket$rinfo = { + address: string, + family: 'IPv4' | 'IPv6', + port: number, + size: number, + ... +}; declare class dgram$Socket extends events$EventEmitter { addMembership(multicastAddress: string, multicastInterface?: string): void; @@ -629,7 +700,7 @@ declare class dgram$Socket extends events$EventEmitter { declare module "dgram" { declare function createSocket( - options: string | { type: string }, + options: string | { type: string, ... }, callback?: () => void ): dgram$Socket; } @@ -659,11 +730,25 @@ declare module "dns" { declare var REFUSED: string; declare var SERVFAIL: string; declare var TIMEOUT: string; + declare var ADDRCONFIG: number; + declare var V4MAPPED: number; + + declare type LookupOptions = { + family?: number, + hints?: number, + verbatim?: boolean, + all?: boolean, + ... + }; declare function lookup( domain: string, - options?: ?number | ?Object, - callback?: (err: ?Error, address: string, family: number) => void + options: number | LookupOptions, + callback: (err: ?Error, address: string, family: number) => void + ): void; + declare function lookup( + domain: string, + callback: (err: ?Error, address: string, family: number) => void ): void; declare function resolve( @@ -711,11 +796,16 @@ declare module "dns" { ip: string, callback: (err: ?Error, domains: Array) => void ): void; + declare function timingSafeEqual( + a: Buffer | $TypedArray | DataView, + b: Buffer | $TypedArray | DataView + ): boolean; } declare class events$EventEmitter { // deprecated static listenerCount(emitter: events$EventEmitter, event: string): number; + static defaultMaxListeners: number; addListener(event: string, listener: Function): this; emit(event: string, ...args:Array): boolean; @@ -728,8 +818,10 @@ declare class events$EventEmitter { prependOnceListener(event: string, listener: Function): this; removeAllListeners(event?: string): this; removeListener(event: string, listener: Function): this; + off(event: string, listener: Function): this; setMaxListeners(n: number): this; getMaxListeners(): number; + rawListeners(event: string): Array; } @@ -837,13 +929,21 @@ declare module "fs" { declare function unlinkSync(path: string): void; declare function rmdir(path: string, callback?: (err: ?ErrnoError) => void): void; declare function rmdirSync(path: string): void; - declare function mkdir(path: string, mode?: number, callback?: (err: ?ErrnoError) => void): void; - declare function mkdirSync(path: string, mode?: number): void; + declare function mkdir(path: string, mode?: number | { + recursive?: boolean, + mode?: number, + ... + }, callback?: (err: ?ErrnoError) => void): void; + declare function mkdirSync(path: string, mode?: number | { + recursive?: boolean, + mode?: number, + ... + }): void; declare function mkdtemp(prefix: string, callback: (err: ?ErrnoError, folderPath: string) => void): void; declare function mkdtempSync(prefix: string): string; declare function readdir( path: string, - options: string | { encoding: string }, + options: string | { encoding: string, ... }, callback: (err: ?ErrnoError, files: Array) => void ): void; declare function readdir( @@ -852,7 +952,7 @@ declare module "fs" { ): void; declare function readdirSync( path: string, - options?: string | { encoding: string } + options?: string | { encoding: string, ... } ): Array; declare function close(fd: number, callback: (err: ?ErrnoError) => void): void; declare function closeSync(fd: number): void; @@ -973,12 +1073,16 @@ declare module "fs" { ): void; declare function readFile( path: string | Buffer | URL | number, - options: { encoding: string; flag?: string }, + options: { + encoding: string, + flag?: string, + ... + }, callback: (err: ?ErrnoError, data: string) => void ): void; declare function readFile( path: string | Buffer | URL | number, - options: { flag?: string }, + options: { flag?: string, ... }, callback: (err: ?ErrnoError, data: Buffer) => void ): void; declare function readFileSync( @@ -988,15 +1092,24 @@ declare module "fs" { path: string | Buffer | URL | number, encoding: string ): string; - declare function readFileSync(path: string | Buffer | URL | number, options: { encoding: string, flag?: string }): string; - declare function readFileSync(path: string | Buffer | URL | number, options: { encoding?: void, flag?: string }): Buffer; + declare function readFileSync(path: string | Buffer | URL | number, options: { + encoding: string, + flag?: string, + ... + }): string; + declare function readFileSync(path: string | Buffer | URL | number, options: { + encoding?: void, + flag?: string, + ... + }): Buffer; declare function writeFile( filename: string | Buffer | number, data: Buffer | string, options: string | { encoding?: ?string, mode?: number, - flag?: string + flag?: string, + ... }, callback: (err: ?ErrnoError) => void ): void; @@ -1011,16 +1124,18 @@ declare module "fs" { options?: string | { encoding?: ?string, mode?: number, - flag?: string + flag?: string, + ... } ): void; declare function appendFile( filename: string | Buffer | number, data: string | Buffer, - options: { + options: string | { encoding?: ?string, - mode?: number, - flag?: string + mode?: number, + flag?: string, + ... }, callback: (err: ?ErrnoError) => void ): void; @@ -1032,10 +1147,11 @@ declare module "fs" { declare function appendFileSync( filename: string | Buffer | number, data: string | Buffer, - options?: { + options?: string | { encoding?: ?string, - mode?: number, - flag?: string + mode?: number, + flag?: string, + ... } ): void; declare function watchFile(filename: string, options?: Object, listener?: (curr: Stats, prev: Stats) => void): void; @@ -1102,22 +1218,15 @@ declare module "fs" { S_IROTH: number, // 4 S_IWOTH: number, // 2 S_IXOTH: number, // 1 + ... }; declare type BufferEncoding = | 'buffer' - | { - encoding: 'buffer', - }; - declare type EncodingOptions = { - encoding?: string, - }; - declare type EncodingFlag = EncodingOptions & { - flag?: string, - }; - declare type WriteOptions = EncodingFlag & { - mode?: number, - }; + | { encoding: 'buffer', ... }; + declare type EncodingOptions = { encoding?: string, ... }; + declare type EncodingFlag = EncodingOptions & { flag?: string, ... }; + declare type WriteOptions = EncodingFlag & { mode?: number, ... }; declare class FileHandle { appendFile(data: string | Buffer, options: WriteOptions | string): Promise; chmod(mode: number): Promise; @@ -1130,7 +1239,11 @@ declare module "fs" { offset: number, length: number, position: number - ): Promise<{ bytesRead: number, buffer: T }>; + ): Promise<{ + bytesRead: number, + buffer: T, + ... + }>; readFile(options: EncodingFlag): Promise; readFile(options: string): Promise; stat(): Promise; @@ -1142,7 +1255,7 @@ declare module "fs" { } declare type FSPromisePath = string | Buffer | URL; - declare type FSPromise = { + declare class FSPromise { access(path: FSPromisePath, mode?: number): Promise, appendFile(path: FSPromisePath | FileHandle, data: string | Buffer, options: WriteOptions | string): Promise, chmod(path: FSPromisePath, mode: number): Promise, @@ -1167,7 +1280,11 @@ declare module "fs" { offset: number, length: number, position?: number - ): Promise<{ bytesRead: number, buffer: T }>, + ): Promise<{ + bytesRead: number, + buffer: T, + ... + }>, readdir(path: FSPromisePath, options?: string | EncodingOptions): Promise, readFile(path: FSPromisePath | FileHandle, options: string): Promise, readFile(path: FSPromisePath | FileHandle, options?: EncodingFlag): Promise, @@ -1188,13 +1305,17 @@ declare module "fs" { offset: number, length: number, position?: number - ): Promise<{ bytesRead: number, buffer: T }>, + ): Promise<{ + bytesRead: number, + buffer: T, + ... + }>, writeFile( FSPromisePath | FileHandle, data: string | Buffer | Uint8Array, options?: string | WriteOptions ): Promise, - }; + } declare var promises: FSPromise; } @@ -1204,37 +1325,42 @@ type http$agentOptions = { keepAliveMsecs?: number, maxSockets?: number, maxFreeSockets?: number, + ... } -declare class http$Agent { +declare class http$Agent<+SocketT = net$Socket> { constructor(options: http$agentOptions): void; destroy(): void; - freeSockets: {[name: string]: Array}; - getName(options: {host: string, port: number, localAddress: string}): string; + freeSockets: { [name: string]: $ReadOnlyArray, ... }; + getName(options: { + host: string, + port: number, + localAddress: string, + ... + }): string; maxFreeSockets: number; maxSockets: number; - requests: {[name: string]: Array}; - sockets: {[name: string]: Array}; + requests: { [name: string]: $ReadOnlyArray>, ... }; + sockets: { [name: string]: $ReadOnlyArray, ... }; } -declare class http$IncomingMessage extends stream$Readable { - destroy(error?: Error): void; +declare class http$IncomingMessage extends stream$Readable { headers: Object; rawHeaders: Array; httpVersion: string; method: string; trailers: Object; setTimeout(msecs: number, callback: Function): void; - socket: net$Socket; + socket: SocketT; statusCode: number; statusMessage: string; url: string; } -declare class http$ClientRequest extends stream$Writable { +declare class http$ClientRequest<+SocketT = net$Socket> extends stream$Writable { abort(): void; aborted: boolean; - connection: net$Socket | null; + +connection: SocketT | null; flushHeaders(): void; getHeader(name: string): string; removeHeader(name: string): void; @@ -1242,89 +1368,158 @@ declare class http$ClientRequest extends stream$Writable { setNoDelay(noDelay?: boolean): void; setSocketKeepAlive(enable?: boolean, initialDelay?: number): void; setTimeout(msecs: number, callback?: Function): void; - socket: net$Socket | null; + +socket: SocketT | null; } declare class http$ServerResponse extends stream$Writable { - addTrailers(headers: {[key: string] : string}): void; + addTrailers(headers: { [key: string] : string, ... }): void; + connection: net$Socket; finished: boolean; getHeader(name: string): string; + getHeaderNames(): Array; + getHeaders(): { [key: string] : string | Array, ...}; + hasHeader(name: string): boolean; headersSent: boolean; removeHeader(name: string): void; sendDate: boolean; setHeader(name: string, value: string | Array): void; setTimeout(msecs: number, callback?: Function): http$ServerResponse; + socket: net$Socket; statusCode: number; statusMessage: string; writeContinue(): void; - writeHead(status: number, statusMessage?: string, headers?: {[key: string] : string}): void; - writeHead(status: number, headers?: {[key: string] : string}): void; -} + writeHead(status: number, statusMessage?: string, headers?: { [key: string] : string, ... }): void; + writeHead(status: number, headers?: { [key: string] : string, ... }): void; + writeProcessing(): void; +} + +declare class http$Server extends net$Server { + listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + // The following signatures are added to allow omitting intermediate arguments + listen(port?: number, backlog?: number, callback?: Function): this; + listen(port?: number, hostname?: string, callback?: Function): this; + listen(port?: number, callback?: Function): this; + listen(path: string, callback?: Function): this; + listen(handle: { + port?: number, + host?: string, + path?: string, + backlog?: number, + exclusive?: boolean, + readableAll?: boolean, + writableAll?: boolean, + ipv6Only?: boolean, + ... + }, callback?: Function): this; + listening: boolean; + close(callback?: (error: ?Error) => mixed): this; + maxHeadersCount: number; + keepAliveTimeout: number; + setTimeout(msecs: number, callback: Function): this; + timeout: number; +} + +declare class https$Server extends tls$Server { + listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + // The following signatures are added to allow omitting intermediate arguments + listen(port?: number, backlog?: number, callback?: Function): this; + listen(port?: number, hostname?: string, callback?: Function): this; + listen(port?: number, callback?: Function): this; + listen(path: string, callback?: Function): this; + listen(handle: { + port?: number, + host?: string, + path?: string, + backlog?: number, + exclusive?: boolean, + readableAll?: boolean, + writableAll?: boolean, + ipv6Only?: boolean, + ... + }, callback?: Function): this; + close(callback?: (error: ?Error) => mixed): this; + keepAliveTimeout: number; + setTimeout(msecs: number, callback: Function): this; + timeout: number; +} + +type requestOptions = {| + auth?: string, + defaultPort?: number, + family?: number, + headers?: { [key: string] : mixed, ... }, + host?: string, + hostname?: string, + localAddress?: string, + method?: string, + path?: string, + port?: number, + protocol?: string, + setHost?: boolean, + socketPath?: string, + timeout?: number, +|} -declare module "http" { - declare class Server extends net$Server { - listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - // The following signatures are added to allow omitting intermediate arguments - listen(port?: number, backlog?: number, callback?: Function): Server; - listen(port?: number, hostname?: string, callback?: Function): Server; - listen(port?: number, callback?: Function): Server; - listen(path: string, callback?: Function): Server; - listen(handle: Object, callback?: Function): Server; - listening: boolean; - close(callback?: (error: ?Error) => mixed): Server; - maxHeadersCount: number; - keepAliveTimeout: number; - setTimeout(msecs: number, callback: Function): Server; - timeout: number; - } +type http$requestOptions = { + ...requestOptions, + agent?: boolean | http$Agent, + createConnection?: (options: net$connectOptions, callback?: Function) => net$Socket, + ... +}; - declare class Agent extends http$Agent { +declare module "http" { + declare class Server extends http$Server {} + declare class Agent extends http$Agent { createConnection(options: net$connectOptions, callback?: Function): net$Socket; } - declare class ClientRequest extends http$ClientRequest {} - declare class IncomingMessage extends http$IncomingMessage {} + declare class ClientRequest extends http$ClientRequest {} + declare class IncomingMessage extends http$IncomingMessage {} declare class ServerResponse extends http$ServerResponse {} declare function createServer( requestListener?: (request: IncomingMessage, response: ServerResponse) => void ): Server; declare function request( - options: Object | string, + options: http$requestOptions, + callback?: (response: IncomingMessage) => void + ): ClientRequest; + declare function request( + url: string, + options?: http$requestOptions, callback?: (response: IncomingMessage) => void ): ClientRequest; declare function get( - options: Object | string, + options: http$requestOptions, + callback?: (response: IncomingMessage) => void + ): ClientRequest; + declare function get( + url: string, + options?: http$requestOptions, callback?: (response: IncomingMessage) => void ): ClientRequest; declare var METHODS: Array; - declare var STATUS_CODES: {[key: number]: string}; + declare var STATUS_CODES: { [key: number]: string, ... }; declare var globalAgent: Agent; } -declare module "https" { - declare class Server extends tls$Server { - listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - // The following signatures are added to allow omitting intermediate arguments - listen(port?: number, backlog?: number, callback?: Function): Server; - listen(port?: number, hostname?: string, callback?: Function): Server; - listen(port?: number, callback?: Function): Server; - listen(path: string, callback?: Function): Server; - listen(handle: Object, callback?: Function): Server; - close(callback?: (error: ?Error) => mixed): Server; - keepAliveTimeout: number; - setTimeout(msecs: number, callback: Function): Server; - timeout: number; - } +type https$requestOptions = { + ...requestOptions, + agent?: boolean | http$Agent, + createConnection?: (options: tls$connectOptions, callback?: Function) => tls$TLSSocket, + ... +}; - declare class Agent extends http$Agent { +declare module "https" { + declare class Server extends https$Server {} + declare class Agent extends http$Agent { createConnection(port: ?number, host: ?string, options: tls$connectOptions): tls$TLSSocket; createConnection(port: ?number, options: tls$connectOptions): tls$TLSSocket; createConnection(options: tls$connectOptions): tls$TLSSocket; } - declare class ClientRequest extends http$ClientRequest {} - declare class IncomingMessage extends http$IncomingMessage {} + declare class ClientRequest extends http$ClientRequest {} + declare class IncomingMessage extends http$IncomingMessage {} declare class ServerResponse extends http$ServerResponse {} declare function createServer( @@ -1332,11 +1527,21 @@ declare module "https" { requestListener?: (request: IncomingMessage, response: ServerResponse) => void ): Server; declare function request( - options: Object | string, + options: https$requestOptions, + callback?: (response: IncomingMessage) => void + ): ClientRequest; + declare function request( + url: string, + options?: https$requestOptions, callback?: (response: IncomingMessage) => void ): ClientRequest; declare function get( - options: Object | string, + options: https$requestOptions, + callback?: (response: IncomingMessage) => void + ): ClientRequest; + declare function get( + url: string, + options?: https$requestOptions, callback?: (response: IncomingMessage) => void ): ClientRequest; @@ -1349,29 +1554,31 @@ declare class net$Socket extends stream$Duplex { bufferSize: number; bytesRead: number; bytesWritten: number; - connect(options: Object, connectListener?: Function): void; - destroy(exception?: Error): void; + connect(path: string, connectListener?: () => mixed): net$Socket; + connect(port: number, host?: string, connectListener?: () => mixed): net$Socket; + connect(port: number, connectListener?: () => mixed): net$Socket; + connect(options: Object, connectListener?: () => mixed): net$Socket; destroyed: boolean; end( - chunk?: string | Buffer, + chunkOrEncodingOrCallback?: Buffer | Uint8Array | string | (data: any) => void, encodingOrCallback?: string | (data: any) => void, callback?: (data: any) => void - ): void; + ): this; localAddress: string; localPort: number; - pause(): stream$Readable; - ref(): net$Socket; + pause(): this; + ref(): this; remoteAddress: string | void; remoteFamily: string; remotePort: number; - resume(): stream$Readable; - setEncoding(encoding?: string): stream$Readable; - setKeepAlive(enable?: boolean, initialDelay?: number): net$Socket; - setNoDelay(noDelay?: boolean): net$Socket; - setTimeout(timeout: number, callback?: Function): net$Socket; - unref(): net$Socket; + resume(): this; + setEncoding(encoding?: string): this; + setKeepAlive(enable?: boolean, initialDelay?: number): this; + setNoDelay(noDelay?: boolean): this; + setTimeout(timeout: number, callback?: Function): this; + unref(): this; write( - chunk?: string | Buffer, + chunk: Buffer | Uint8Array | string, encodingOrCallback?: string | (data: any) => void, callback?: (data: any) => void ): boolean; @@ -1404,6 +1611,7 @@ type net$connectOptions = { callback?: (err: ?Error, address: string, family: number) => void ) => mixed, path?: string, + ... }; declare module "net" { @@ -1421,6 +1629,7 @@ declare module "net" { options?: { allowHalfOpen?: boolean, pauseOnConnect?: boolean, + ... } | connectionListener, connectionListener?: connectionListener, ): Server; @@ -1448,7 +1657,9 @@ type os$CPU = { nice: number, sys: number, user: number, - } + ... + }, + ... }; type os$NetIFAddr = { @@ -1456,7 +1667,8 @@ type os$NetIFAddr = { family: string, internal: boolean, mac: string, - netmask: string + netmask: string, + ... }; type os$UserInfo$buffer = { @@ -1465,6 +1677,7 @@ type os$UserInfo$buffer = { username: Buffer, homedir: Buffer, shell: ?Buffer, + ... }; type os$UserInfo$string = { @@ -1473,6 +1686,7 @@ type os$UserInfo$string = { username: string, homedir: string, shell: ?string, + ... }; declare module "os" { @@ -1483,15 +1697,15 @@ declare module "os" { declare function homedir(): string; declare function hostname(): string; declare function loadavg(): [number, number, number]; - declare function networkInterfaces(): {[ifName: string]: Array}; + declare function networkInterfaces(): { [ifName: string]: Array, ... }; declare function platform(): string; declare function release(): string; declare function tmpdir(): string; declare function totalmem(): number; declare function type(): string; declare function uptime(): number; - declare function userInfo(options: {encoding: 'buffer'}): os$UserInfo$buffer; - declare function userInfo(options?: {encoding: 'utf8'}): os$UserInfo$string; + declare function userInfo(options: { encoding: 'buffer', ... }): os$UserInfo$buffer; + declare function userInfo(options?: { encoding: 'utf8', ... }): os$UserInfo$string; declare var EOL: string; } @@ -1507,18 +1721,20 @@ declare module "path" { declare var sep: string; declare var delimiter: string; declare function parse(pathString: string): { - root: string; - dir: string; - base: string; - ext: string; - name: string; + root: string, + dir: string, + base: string, + ext: string, + name: string, + ... }; declare function format(pathObject: { - root?: string; - dir?: string; - base?: string; - ext?: string; - name?: string; + root?: string, + dir?: string, + base?: string, + ext?: string, + name?: string, + ... }): string; declare var posix: any; declare var win32: any; @@ -1531,7 +1747,8 @@ declare module "punycode" { declare function toUnicode(domain: string): string; declare var ucs2: { decode: (str: string) => Array, - encode: (codePoints: Array) => string + encode: (codePoints: Array) => string, + ... }; declare var version : string; } @@ -1541,17 +1758,16 @@ declare module "querystring" { obj: Object, separator?: string, equal?: string, - options?: { - encodeURIComponent?: (str: string) => string; - } + options?: { encodeURIComponent?: (str: string) => string, ... } ): string; declare function parse( str: string, separator: ?string, equal: ?string, options?: { - decodeURIComponent?: (str: string) => string; - maxKeys?: number; + decodeURIComponent?: (str: string) => string, + maxKeys?: number, + ... } ): any; declare function escape(str: string): string; @@ -1573,8 +1789,10 @@ declare class readline$Interface extends events$EventEmitter { name: string, ctrl?: boolean, shift?: boolean, - meta?: boolean + meta?: boolean, + ... }): void; + @@asyncIterator(): AsyncIterator; } declare module "readline" { @@ -1583,10 +1801,15 @@ declare module "readline" { declare function clearScreenDown(stream: stream$Stream): void; declare function createInterface(opts: { input: stream$Readable, - output?: stream$Stream, + output?: ?stream$Stream, completer?: readline$InterfaceCompleter, terminal?: boolean, - historySize?: number + historySize?: number, + prompt?: string, + crlfDelay?: number, + removeHistoryDuplicates?: boolean, + escapeCodeTimeout?: number, + ... }): readline$Interface; declare function cursorTo(stream: stream$Stream, x?: number, y?: number): void; declare function moveCursor(stream: stream$Stream, dx: number, dy: number): void; @@ -1595,43 +1818,75 @@ declare module "readline" { declare class stream$Stream extends events$EventEmitter {} -type readableStreamOptions = { highWaterMark? : number, encoding? : ?string, objectMode? : boolean }; +type readableStreamOptions = { + highWaterMark?: number, + encoding?: string, + objectMode?: boolean, + read?: (size?: number) => void, + destroy?: (error: ?Error, callback: (error?: Error) => void) => void, + autoDestroy?: boolean, + ... +}; declare class stream$Readable extends stream$Stream { constructor(options?: readableStreamOptions): void; - setEncoding(encoding : string): stream$Readable; + destroy(error?: Error): this; isPaused(): boolean; - pause(): stream$Readable; - pipe(dest: stream$Duplex, options?: { end? : boolean }): stream$Duplex; - pipe(dest: stream$Writable, options?: { end? : boolean }): stream$Writable; + pause(): this; + pipe(dest: T, options?: { end? : boolean, ... }): T; read(size?: number): ?(string | Buffer); - resume(): stream$Readable; - unpipe(dest?: (stream$Writable | stream$Duplex)): void; - unshift(chunk: Buffer | string): void; - push(chunk: ?(Buffer | string), encoding? : string): boolean; - wrap(oldReadable: any): stream$Readable; -} - -type writableStreamOptions = { highWaterMark? : number, decodeString? : boolean, objectMode? : boolean }; + readable: boolean; + readableHighWaterMark: number; + readableLength: number; + resume(): this; + setEncoding(encoding: string): this; + unpipe(dest?: stream$Writable): this; + unshift(chunk: Buffer | Uint8Array | string): void; + wrap(oldReadable: stream$Stream): this; + _read(size?: number): void; + _destroy(error: ?Error, callback: (error?: Error) => void): void; + push(chunk: ?(Buffer | Uint8Array | string), encoding? : string): boolean; + @@asyncIterator(): AsyncIterator; +} + +type writableStreamOptions = { + highWaterMark?: number, + decodeStrings?: boolean, + defaultEncoding?: string, + objectMode?: boolean, + emitClose?: boolean, + write?: (chunk: Buffer | string, encoding: string, callback: (error?: Error) => void) => void, + writev?: (chunks: Array<{ + chunk: Buffer | string, + encoding: string, + ... + }>, callback: (error?: Error) => void) => void, + destroy?: (error: ?Error, callback: (error?: Error) => void) => void, + final?: (callback: (error?: Error) => void) => void, + autoDestroy?: boolean, + ... +}; declare class stream$Writable extends stream$Stream { constructor(options?: writableStreamOptions): void; cork(): void; - end( - chunkOrEncodingOrCallback?: Buffer | string | Function, - encodingOrCallback?: string | Function, - callback?: Function - ): void; - setDefaultEncoding(encoding: string): boolean; - uncork() : void; - write( - chunk: Buffer | string, - encodingOrCallback?: string | Function, - callback?: Function - ): boolean; - _write( + destroy(error?: Error): this; + end(callback?: () => void): this; + end(chunk?: string | Buffer | Uint8Array, callback?: () => void): this; + end(chunk?: string | Buffer | Uint8Array, encoding?: string, callback?: () => void): this; + setDefaultEncoding(encoding: string): this; + uncork(): void; + writable: boolean; + writableHighWaterMark: number; + writableLength: number; + write(chunk: string | Buffer | Uint8Array, callback?: (error?: Error) => void): boolean; + write(chunk: string | Buffer | Uint8Array, encoding?: string, callback?: (error?: Error) => void): boolean; + _write(chunk: Buffer | string, encoding: string, callback: (error?: Error) => void): void; + _writev(chunks: Array<{ chunk: Buffer | string, encoding: string, - callback: (error: ?Error, data?: Buffer | string) => void - ): boolean; + ... + }>, callback: (error?: Error) => void): void; + _destroy(error: ?Error, callback: (error?: Error) => void): void; + _final(callback: (error?: Error) => void): void; } //According to the NodeJS docs: @@ -1639,49 +1894,106 @@ declare class stream$Writable extends stream$Stream { //prototypally inherits from Readable, and then parasitically from Writable." //Source: void) => void, + transform?: ( chunk: Buffer | string, encoding: string, - callback: (error: ?Error, data?: Buffer | string) => void - ): boolean; -} + callback: (error: ?Error, data: ?(Buffer | string)) => void, + ) => void, + ... +}; declare class stream$Transform extends stream$Duplex { + constructor(options?: transformStreamOptions): void; + _flush(callback: (error: ?Error, data: ?(Buffer | string)) => void): void; _transform( chunk: Buffer | string, encoding: string, - callback: (error: ?Error, data?: Buffer | string) => void - ): void; - _flush( - callback: (error: ?Error) => void + callback: (error: ?Error, data: ?(Buffer | string)) => void ): void; } declare class stream$PassThrough extends stream$Transform {} declare module "stream" { + declare var Stream : typeof stream$Stream declare var Readable : typeof stream$Readable declare var Writable : typeof stream$Writable declare var Duplex : typeof stream$Duplex declare var Transform : typeof stream$Transform declare var PassThrough : typeof stream$PassThrough + declare function finished( + stream: stream$Stream, + callback: (error?: Error) => void, + ): () => void; + declare function finished( + stream: stream$Stream, + options: ?{ + error?: boolean, + readable?: boolean, + writable?: boolean, + ... + }, + callback: (error?: Error) => void, + ): () => void; + declare function pipeline( + s1: stream$Readable, + last: T, + cb: (error?: Error) => void, + ): T; + declare function pipeline( + s1: stream$Readable, + s2: stream$Duplex, + last: T, + cb: (error?: Error) => void, + ): T; + declare function pipeline( + s1: stream$Readable, + s2: stream$Duplex, + s3: stream$Duplex, + last: T, + cb: (error?: Error) => void, + ): T; + declare function pipeline( + s1: stream$Readable, + s2: stream$Duplex, + s3: stream$Duplex, + s4: stream$Duplex, + last: T, + cb: (error?: Error) => void, + ): T; + declare function pipeline( + s1: stream$Readable, + s2: stream$Duplex, + s3: stream$Duplex, + s4: stream$Duplex, + s5: stream$Duplex, + last: T, + cb: (error?: Error) => void, + ): T; + declare function pipeline( + s1: stream$Readable, + s2: stream$Duplex, + s3: stream$Duplex, + s4: stream$Duplex, + s5: stream$Duplex, + s6: stream$Duplex, + last: T, + cb: (error?: Error) => void, + ): T; + declare function pipeline( + streams: Array, + cb: (error?: Error) => void, + ): stream$Stream; } declare class tty$ReadStream extends net$Socket { @@ -1726,6 +2038,33 @@ type tls$connectOptions = { callback?: (err: ?Error, address: string, family: number) => void ) => mixed, requestOCSP?: boolean, + ... +}; + +type tls$Certificate$Subject = { + C?: string, + ST?: string, + L?: string, + O?: string, + OU?: string, + CN?: string, + ... +}; + +type tls$Certificate = { + raw: Buffer, + subject: tls$Certificate$Subject, + issuer: tls$Certificate$Subject, + valid_from: string, + valid_to: string, + serialNumber: string, + fingerprint: string, + fingerprint256: string, + ext_key_usage?: Array, + subjectaltname?: string, + infoAccess?: { [string]: Array, ... }, + issuerCertificate?: tls$Certificate, + ... }; declare class tls$TLSSocket extends net$Socket { @@ -1733,9 +2072,22 @@ declare class tls$TLSSocket extends net$Socket { authorized: boolean; authorizationError: string | null; encrypted: true; - getCipher(): { name: string, version: string } | null; - getEphemeralKeyInfo(): { type: 'DH', size: number } | { type: 'EDHC', name: string, size: number } | null; - getPeerCertificate(detailed?: boolean): Object | null; + getCipher(): { + name: string, + version: string, + ... + } | null; + getEphemeralKeyInfo(): { + type: 'DH', + size: number, + ... + } | { + type: 'EDHC', + name: string, + size: number, + ... + } | null; + getPeerCertificate(detailed?: boolean): tls$Certificate | null; getSession(): ?Buffer; getTLSTicket(): Buffer | void; renegotiate(options: Object, callback: Function): boolean | void; @@ -1773,17 +2125,18 @@ declare module "tls" { } type url$urlObject = { - +href?: string; - +protocol?: string; - +slashes?: boolean; - +auth?: string; - +hostname?: string; - +port?: string | number; - +host?: string; - +pathname?: string; - +search?: string; - +query?: Object; - +hash?: string; + +href?: string, + +protocol?: string, + +slashes?: boolean, + +auth?: string, + +hostname?: string, + +port?: string | number, + +host?: string, + +pathname?: string, + +search?: string, + +query?: Object, + +hash?: string, + ... }; declare module "url" { @@ -1792,25 +2145,29 @@ declare module "url" { parseQueryString?: boolean, slashesDenoteHost?: boolean ): { - protocol?: string; - slashes?: boolean; - auth?: string; - host?: string; - port?: string; - hostname?: string; - hash?: string; - search?: string; - query?: any; // null | string | Object - pathname?: string; - path?: string; - href: string; + protocol?: string, + slashes?: boolean, + auth?: string, + host?: string, + port?: string, + hostname?: string, + hash?: string, + search?: string, + // null | string | Object + query?: any, + pathname?: string, + path?: string, + href: string, + ... }; declare function format(urlObj: url$urlObject): string; declare function resolve(from: string, to: string): string; declare function domainToASCII(domain: string): string; declare function domainToUnicode(domain: string): string; + declare function pathToFileURL(path: string): url$urlObject; + declare function fileURLToPath(path: url$urlObject | string): url$urlObject; declare class URLSearchParams { - constructor(init?: string | Array<[string, string]> | {[string]: string} ): void; + constructor(init?: string | Array<[string, string]> | { [string]: string, ... } ): void; append(name: string, value: string): void; delete(name: string): void; entries(): Iterator<[string, string]>; @@ -1845,10 +2202,11 @@ declare module "url" { } type util$InspectOptions = { - showHidden?: boolean; - depth?: ?number; - colors?: boolean; - customInspect?: boolean; + showHidden?: boolean, + depth?: ?number, + colors?: boolean, + customInspect?: boolean, + ... }; declare module "util" { @@ -1863,6 +2221,25 @@ declare module "util" { declare function inherits(constructor: Function, superConstructor: Function): void; declare function deprecate(f: Function, string: string): Function; declare function promisify(f: Function): Function; + declare function callbackify(f: Function): Function; + + declare class TextDecoder { + constructor(encoding?: string, options: { + fatal?: boolean, + ignoreBOM?: boolean, + ... + }): void; + decode(input?: ArrayBuffer | DataView | $TypedArray, options?: { stream?: boolean, ... }): string; + encoding: string; + fatal: boolean; + ignoreBOM: boolean; + } + + declare class TextEncoder { + constructor(): void; + encode(input?: string): Uint8Array; + encoding: string; + } } type vm$ScriptOptions = { @@ -1873,38 +2250,64 @@ type vm$ScriptOptions = { lineOffset?: number, produceCachedData?: boolean, timeout?: number, + ... }; +type vm$CreateContextOptions = { + name?: string; + origin?: string; + codeGeneration?: { + strings?: boolean; + wasm?: boolean; + ... + }; + ... +} + +type vm$CompileFunctionOptions = { + filename?: string; + lineOffset?: number; + columnOffset?: number; + cachedData?: Buffer; + produceCachedData?: boolean; + parsingContext?: { [key: string]: any, ... }; + contextExtensions?: Array<{ [key: string]: any, ... }>; + ... +} + declare class vm$Script { - constructor(code: string, options: Object): void; + constructor(code: string, options?: vm$ScriptOptions | string): void; cachedData: ?Buffer; cachedDataRejected: ?boolean; cachedDataProduced: ?boolean; runInContext(contextifiedSandbox: vm$Context, options?: vm$ScriptOptions): any; - runInNewContext(sandbox?: Object, options?: vm$ScriptOptions): any; + runInNewContext(sandbox?: { [key: string]: any, ... }, options?: vm$ScriptOptions): any; runInThisContext(options?: vm$ScriptOptions): any; + createCachedData(): Buffer; } declare class vm$Context {} declare module "vm" { - declare var Script : typeof vm$Script - declare function createContext(sandbox?: Object): vm$Context; - declare function isContext(sandbox: any): boolean; - declare function runInContext(code: string, contextifiedSandbox: vm$Context, options?: vm$ScriptOptions): any; + declare var Script: typeof vm$Script; + declare function createContext(sandbox?: { [key: string]: any, ... }, options?: vm$CreateContextOptions): vm$Context; + declare function isContext(sandbox: { [key: string]: any, ... }): boolean; + declare function runInContext(code: string, contextifiedSandbox: vm$Context, options?: vm$ScriptOptions | string): any; declare function runInDebugContext(code: string): any; - declare function runInNewContext(code: string, sandbox?: Object, options?: vm$ScriptOptions): any; - declare function runInThisContext(code: string, options?: vm$ScriptOptions): any; + declare function runInNewContext(code: string, sandbox?: { [key: string]: any, ... }, options?: vm$ScriptOptions | string): any; + declare function runInThisContext(code: string, options?: vm$ScriptOptions | string): any; + declare function compileFunction(code: string, params: string[], options: vm$CompileFunctionOptions): Function; } type zlib$options = { - flush?: number; - chunkSize?: number; - windowBits?: number; - level?: number; - memLevel?: number; - strategy?: number; - dictionary?: Buffer; + flush?: number, + chunkSize?: number, + windowBits?: number, + level?: number, + memLevel?: number, + strategy?: number, + dictionary?: Buffer, + ... }; type zlib$syncFn = ( @@ -1966,49 +2369,50 @@ declare module "zlib" { declare var Z_MIN_MEMLEVEL: number; declare var Z_MIN_WINDOWBITS: number; declare var constants: { - Z_NO_FLUSH: number; - Z_PARTIAL_FLUSH: number; - Z_SYNC_FLUSH: number; - Z_FULL_FLUSH: number; - Z_FINISH: number; - Z_BLOCK: number; - Z_TREES: number; - Z_OK: number; - Z_STREAM_END: number; - Z_NEED_DICT: number; - Z_ERRNO: number; - Z_STREAM_ERROR: number; - Z_DATA_ERROR: number; - Z_MEM_ERROR: number; - Z_BUF_ERROR: number; - Z_VERSION_ERROR: number; - Z_NO_COMPRESSION: number; - Z_BEST_SPEED: number; - Z_BEST_COMPRESSION: number; - Z_DEFAULT_COMPRESSION: number; - Z_FILTERED: number; - Z_HUFFMAN_ONLY: number; - Z_RLE: number; - Z_FIXED: number; - Z_DEFAULT_STRATEGY: number; - Z_BINARY: number; - Z_TEXT: number; - Z_ASCII: number; - Z_UNKNOWN: number; - Z_DEFLATED: number; - Z_NULL: number; - Z_DEFAULT_CHUNK: number; - Z_DEFAULT_LEVEL: number; - Z_DEFAULT_MEMLEVEL: number; - Z_DEFAULT_WINDOWBITS: number; - Z_MAX_CHUNK: number; - Z_MAX_LEVEL: number; - Z_MAX_MEMLEVEL: number; - Z_MAX_WINDOWBITS: number; - Z_MIN_CHUNK: number; - Z_MIN_LEVEL: number; - Z_MIN_MEMLEVEL: number; - Z_MIN_WINDOWBITS: number; + Z_NO_FLUSH: number, + Z_PARTIAL_FLUSH: number, + Z_SYNC_FLUSH: number, + Z_FULL_FLUSH: number, + Z_FINISH: number, + Z_BLOCK: number, + Z_TREES: number, + Z_OK: number, + Z_STREAM_END: number, + Z_NEED_DICT: number, + Z_ERRNO: number, + Z_STREAM_ERROR: number, + Z_DATA_ERROR: number, + Z_MEM_ERROR: number, + Z_BUF_ERROR: number, + Z_VERSION_ERROR: number, + Z_NO_COMPRESSION: number, + Z_BEST_SPEED: number, + Z_BEST_COMPRESSION: number, + Z_DEFAULT_COMPRESSION: number, + Z_FILTERED: number, + Z_HUFFMAN_ONLY: number, + Z_RLE: number, + Z_FIXED: number, + Z_DEFAULT_STRATEGY: number, + Z_BINARY: number, + Z_TEXT: number, + Z_ASCII: number, + Z_UNKNOWN: number, + Z_DEFLATED: number, + Z_NULL: number, + Z_DEFAULT_CHUNK: number, + Z_DEFAULT_LEVEL: number, + Z_DEFAULT_MEMLEVEL: number, + Z_DEFAULT_WINDOWBITS: number, + Z_MAX_CHUNK: number, + Z_MAX_LEVEL: number, + Z_MAX_MEMLEVEL: number, + Z_MAX_WINDOWBITS: number, + Z_MIN_CHUNK: number, + Z_MIN_LEVEL: number, + Z_MIN_MEMLEVEL: number, + Z_MIN_WINDOWBITS: number, + ... }; declare var codes: { Z_OK: number, @@ -2019,7 +2423,8 @@ declare module "zlib" { Z_DATA_ERROR: number, Z_MEM_ERROR: number, Z_BUF_ERROR: number, - Z_VERSION_ERROR: number + Z_VERSION_ERROR: number, + ... }; declare class Zlib extends stream$Duplex { // TODO @@ -2057,25 +2462,28 @@ declare module "zlib" { declare module "assert" { declare class AssertionError extends Error {} declare module.exports: { - (value: any, message?: string): void; - ok(value: any, message?: string): void; - fail(actual: any, expected: any, message: string, operator: string): void; - equal(actual: any, expected: any, message?: string): void; - notEqual(actual: any, expected: any, message?: string): void; - deepEqual(actual: any, expected: any, message?: string): void; - notDeepEqual(actual: any, expected: any, message?: string): void; - strictEqual(actual: any, expected: any, message?: string): void; - notStrictEqual(actual: any, expected: any, message?: string): void; - deepStrictEqual(actual: any, expected: any, message?: string): void; - notDeepStrictEqual(actual: any, expected: any, message?: string): void; + (value: any, message?: string): void, + ok(value: any, message?: string): void, + fail(message?: string | Error): void, + // deprecated since v10.15 + fail(actual: any, expected: any, message: string, operator: string): void, + equal(actual: any, expected: any, message?: string): void, + notEqual(actual: any, expected: any, message?: string): void, + deepEqual(actual: any, expected: any, message?: string): void, + notDeepEqual(actual: any, expected: any, message?: string): void, + strictEqual(actual: any, expected: any, message?: string): void, + notStrictEqual(actual: any, expected: any, message?: string): void, + deepStrictEqual(actual: any, expected: any, message?: string): void, + notDeepStrictEqual(actual: any, expected: any, message?: string): void, throws( block: Function, error?: Function | RegExp | (err: any) => boolean, message?: string - ): void; - doesNotThrow(block: Function, message?: string): void; - ifError(value: any): void; - AssertionError: typeof AssertionError; + ): void, + doesNotThrow(block: Function, message?: string): void, + ifError(value: any): void, + AssertionError: typeof AssertionError, + ... } } @@ -2088,7 +2496,8 @@ type HeapStatistics = { heap_size_limit: number, malloced_memory: number, peak_malloced_memory: number, - does_zap_garbage: number + does_zap_garbage: number, + ... } type HeapSpaceStatistics = { @@ -2096,7 +2505,8 @@ type HeapSpaceStatistics = { space_size: number, space_used_size: number, space_available_size: number, - physical_space_size: number + physical_space_size: number, + ... } declare module "v8" { @@ -2107,7 +2517,11 @@ declare module "v8" { type repl$DefineCommandOptions = | (...args: Array) => void - | { action: (...args: Array) => void, help?: string }; + | { + action: (...args: Array) => void, + help?: string, + ... +}; declare class $SymbolReplModeMagic mixins Symbol {} declare class $SymbolReplModeSloppy mixins Symbol {} @@ -2126,18 +2540,19 @@ declare module 'repl' { declare function start(prompt: string): REPLServer; declare function start(options: { - prompt?: string; - input?: stream$Readable; - output?: stream$Writable; + prompt?: string, + input?: stream$Readable, + output?: stream$Writable, terminal?: boolean, - eval?: Function; - useColors?: boolean; - useGlobal?: boolean; - ignoreUndefined?: boolean; - writer?: (object: any, options?: util$InspectOptions) => string; - completer?: readline$InterfaceCompleter; - replMode?: $SymbolReplModeMagic | $SymbolReplModeSloppy | $SymbolReplModeStrict; - breakEvalOnSigint?: boolean; + eval?: Function, + useColors?: boolean, + useGlobal?: boolean, + ignoreUndefined?: boolean, + writer?: (object: any, options?: util$InspectOptions) => string, + completer?: readline$InterfaceCompleter, + replMode?: $SymbolReplModeMagic | $SymbolReplModeSloppy | $SymbolReplModeStrict, + breakEvalOnSigint?: boolean, + ... }): REPLServer; declare class Recoverable extends SyntaxError { @@ -2149,11 +2564,13 @@ declare module 'repl' { type process$CPUUsage = { user: number, - system: number + system: number, + ... } declare class Process extends events$EventEmitter { abort() : void; + allowedNodeEnvironmentFlags: Set; arch : string; argv : Array; chdir(directory : string) : void; @@ -2163,15 +2580,15 @@ declare class Process extends events$EventEmitter { cwd() : string; disconnect? : () => void; domain? : domain$Domain; - env : { [key: string] : ?string }; + env : { [key: string] : ?string, ... }; emitWarning(warning: string | Error): void; - emitWarning(warning: string, typeOrCtor: string | Function): void; - emitWarning(warning: string, type: string, codeOrCtor: string | Function): void; + emitWarning(warning: string, typeOrCtor: string | (...empty) => mixed): void; + emitWarning(warning: string, type: string, codeOrCtor: string | (...empty) => mixed): void; emitWarning( warning: string, type: string, code: string, - ctor?: Function + ctor?: (...empty) => mixed ): void; execArgv : Array; execPath : string; @@ -2187,20 +2604,22 @@ declare class Process extends events$EventEmitter { kill(pid : number, signal? : string | number) : void; mainModule : Object; memoryUsage() : { - rss : number; - heapTotal : number; - heapUsed : number; - external : number; + rss : number, + heapTotal : number, + heapUsed : number, + external : number, + ... }; nextTick: (cb: (...T) => mixed, ...T) => void; pid : number; platform : string; release : { - name : string; - lts? : string; - sourceUrl : string; - headersUrl : string; - libUrl : string; + name : string, + lts? : string, + sourceUrl : string, + headersUrl : string, + libUrl : string, + ... }; send? : (message : any, sendHandleOrCallback? : net$Socket | net$Server | Function, @@ -2218,9 +2637,10 @@ declare class Process extends events$EventEmitter { uptime() : number; version : string; versions : { - node : string; - v8 : string; - [key: string] : ?string; + [key: string] : ?string, + node : string, + v8 : string, + ... }; } declare var process: Process; diff --git a/lib/react-dom.js b/lib/react-dom.js index 8d76dc2b319..e7471326e20 100644 --- a/lib/react-dom.js +++ b/lib/react-dom.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -55,14 +55,16 @@ declare module 'react-dom/server' { declare function renderToString(element: React$Node): string; declare function renderToStaticMarkup(element: React$Node): string; declare function renderToNodeStream(element: React$Node): stream$Readable; - declare function renderToStaticNodeStream(element: React$Node): stream$Readable; + declare function renderToStaticNodeStream( + element: React$Node, + ): stream$Readable; declare var version: string; } +type Thenable = { then(resolve: () => mixed, reject?: () => mixed): mixed, ... }; + declare module 'react-dom/test-utils' { - declare var Simulate: { - [eventName: string]: (element: Element, eventData?: Object) => void, - }; + declare var Simulate: { [eventName: string]: (element: Element, eventData?: Object) => void, ... }; declare function renderIntoDocument( instance: React$Element, ): React$Component; @@ -111,9 +113,10 @@ declare module 'react-dom/test-utils' { tree: React$Component, componentClass: React$ElementType, ): ?React$Component; + declare function act(callback: () => void | Thenable): Thenable; } -declare class SyntheticEvent<+T: EventTarget = EventTarget> { +declare class SyntheticEvent<+T: EventTarget = EventTarget, +E: Event = Event> { bubbles: boolean; cancelable: boolean; +currentTarget: T; @@ -122,7 +125,7 @@ declare class SyntheticEvent<+T: EventTarget = EventTarget> { isDefaultPrevented(): boolean; isPropagationStopped(): boolean; isTrusted: boolean; - nativeEvent: Event; + nativeEvent: E; preventDefault(): void; stopPropagation(): void; // This should not be `T`. Use `currentTarget` instead. See: @@ -162,7 +165,8 @@ declare class SyntheticInputEvent< declare class SyntheticUIEvent< +T: EventTarget = EventTarget, -> extends SyntheticEvent { + +E: Event = Event, +> extends SyntheticEvent { detail: number; view: any; } @@ -175,11 +179,11 @@ declare class SyntheticFocusEvent< declare class SyntheticKeyboardEvent< +T: EventTarget = EventTarget, -> extends SyntheticUIEvent { +> extends SyntheticUIEvent { altKey: boolean; charCode: number; ctrlKey: boolean; - getModifierState: any; + getModifierState(keyArg?: string): boolean; key: string; keyCode: number; locale: string; @@ -192,14 +196,15 @@ declare class SyntheticKeyboardEvent< declare class SyntheticMouseEvent< +T: EventTarget = EventTarget, -> extends SyntheticUIEvent { + +E: Event = MouseEvent, +> extends SyntheticUIEvent { altKey: boolean; button: number; buttons: number; clientX: number; clientY: number; ctrlKey: boolean; - getModifierState: any; + getModifierState(keyArg: string): boolean; metaKey: boolean; pageX: number; pageY: number; @@ -211,13 +216,13 @@ declare class SyntheticMouseEvent< declare class SyntheticDragEvent< +T: EventTarget = EventTarget, -> extends SyntheticMouseEvent { +> extends SyntheticMouseEvent { dataTransfer: any; } declare class SyntheticWheelEvent< +T: EventTarget = EventTarget, -> extends SyntheticMouseEvent { +> extends SyntheticMouseEvent { deltaMode: number; deltaX: number; deltaY: number; @@ -226,7 +231,7 @@ declare class SyntheticWheelEvent< declare class SyntheticPointerEvent< +T: EventTarget = EventTarget, -> extends SyntheticMouseEvent { +> extends SyntheticMouseEvent { pointerId: number; width: number; height: number; @@ -241,7 +246,7 @@ declare class SyntheticPointerEvent< declare class SyntheticTouchEvent< +T: EventTarget = EventTarget, -> extends SyntheticUIEvent { +> extends SyntheticUIEvent { altKey: boolean; changedTouches: any; ctrlKey: boolean; @@ -262,14 +267,32 @@ declare class SyntheticTransitionEvent< // prettier-ignore declare type $JSXIntrinsics = { + // Catch-all for custom elements. + [string]: ReactDOM$HTMLElementJSXIntrinsic, // HTML - a: {instance: HTMLAnchorElement, props: {children?: React$Node, [key: string]: any}}, + a: { + instance: HTMLAnchorElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, abbr: ReactDOM$HTMLElementJSXIntrinsic, address: ReactDOM$HTMLElementJSXIntrinsic, area: ReactDOM$HTMLElementJSXIntrinsic, article: ReactDOM$HTMLElementJSXIntrinsic, aside: ReactDOM$HTMLElementJSXIntrinsic, - audio: {instance: HTMLAudioElement, props: {children?: React$Node, [key: string]: any}}, + audio: { + instance: HTMLAudioElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, b: ReactDOM$HTMLElementJSXIntrinsic, base: ReactDOM$HTMLElementJSXIntrinsic, bdi: ReactDOM$HTMLElementJSXIntrinsic, @@ -277,10 +300,42 @@ declare type $JSXIntrinsics = { big: ReactDOM$HTMLElementJSXIntrinsic, blockquote: ReactDOM$HTMLElementJSXIntrinsic, body: ReactDOM$HTMLElementJSXIntrinsic, - br: {instance: HTMLBRElement, props: {children?: React$Node, [key: string]: any}}, - button: {instance: HTMLButtonElement, props: {children?: React$Node, [key: string]: any}}, - canvas: {instance: HTMLCanvasElement, props: {children?: React$Node, [key: string]: any}}, - caption: {instance: HTMLTableCaptionElement, props: {children?: React$Node, [key: string]: any}}, + br: { + instance: HTMLBRElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + button: { + instance: HTMLButtonElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + canvas: { + instance: HTMLCanvasElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + caption: { + instance: HTMLTableCaptionElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, cite: ReactDOM$HTMLElementJSXIntrinsic, code: ReactDOM$HTMLElementJSXIntrinsic, col: ReactDOM$HTMLElementJSXIntrinsic, @@ -289,58 +344,250 @@ declare type $JSXIntrinsics = { datalist: ReactDOM$HTMLElementJSXIntrinsic, dd: ReactDOM$HTMLElementJSXIntrinsic, del: ReactDOM$HTMLElementJSXIntrinsic, - details: {instance: HTMLDetailsElement, props: {children?: React$Node, [key: string]: any}}, + details: { + instance: HTMLDetailsElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, dfn: ReactDOM$HTMLElementJSXIntrinsic, dialog: ReactDOM$HTMLElementJSXIntrinsic, - div: {instance: HTMLDivElement, props: {children?: React$Node, [key: string]: any}}, - dl: {instance: HTMLDListElement, props: {children?: React$Node, [key: string]: any}}, + div: { + instance: HTMLDivElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + dl: { + instance: HTMLDListElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, dt: ReactDOM$HTMLElementJSXIntrinsic, em: ReactDOM$HTMLElementJSXIntrinsic, embed: ReactDOM$HTMLElementJSXIntrinsic, - fieldset: {instance: HTMLFieldSetElement, props: {children?: React$Node, [key: string]: any}}, + fieldset: { + instance: HTMLFieldSetElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, figcaption: ReactDOM$HTMLElementJSXIntrinsic, figure: ReactDOM$HTMLElementJSXIntrinsic, footer: ReactDOM$HTMLElementJSXIntrinsic, - form: {instance: HTMLFormElement, props: {children?: React$Node, [key: string]: any}}, - h1: {instance: HTMLHeadingElement, props: {children?: React$Node, [key: string]: any}}, - h2: {instance: HTMLHeadingElement, props: {children?: React$Node, [key: string]: any}}, - h3: {instance: HTMLHeadingElement, props: {children?: React$Node, [key: string]: any}}, - h4: {instance: HTMLHeadingElement, props: {children?: React$Node, [key: string]: any}}, - h5: {instance: HTMLHeadingElement, props: {children?: React$Node, [key: string]: any}}, - h6: {instance: HTMLHeadingElement, props: {children?: React$Node, [key: string]: any}}, + form: { + instance: HTMLFormElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + h1: { + instance: HTMLHeadingElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + h2: { + instance: HTMLHeadingElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + h3: { + instance: HTMLHeadingElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + h4: { + instance: HTMLHeadingElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + h5: { + instance: HTMLHeadingElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + h6: { + instance: HTMLHeadingElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, head: ReactDOM$HTMLElementJSXIntrinsic, header: ReactDOM$HTMLElementJSXIntrinsic, hgroup: ReactDOM$HTMLElementJSXIntrinsic, - hr: {instance: HTMLHRElement, props: {children?: React$Node, [key: string]: any}}, + hr: { + instance: HTMLHRElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, html: ReactDOM$HTMLElementJSXIntrinsic, i: ReactDOM$HTMLElementJSXIntrinsic, - iframe: {instance: HTMLIFrameElement, props: {children?: React$Node, [key: string]: any}}, - img: {instance: HTMLImageElement, props: {children?: React$Node, [key: string]: any}}, + iframe: { + instance: HTMLIFrameElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + img: { + instance: HTMLImageElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, ins: ReactDOM$HTMLElementJSXIntrinsic, kbd: ReactDOM$HTMLElementJSXIntrinsic, keygen: ReactDOM$HTMLElementJSXIntrinsic, - label: {instance: HTMLLabelElement, props: {children?: React$Node, [key: string]: any}}, - legend: {instance: HTMLLegendElement, props: {children?: React$Node, [key: string]: any}}, - li: {instance: HTMLLIElement, props: {children?: React$Node, [key: string]: any}}, - link: {instance: HTMLLinkElement, props: {children?: React$Node, [key: string]: any}}, + label: { + instance: HTMLLabelElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + legend: { + instance: HTMLLegendElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + li: { + instance: HTMLLIElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + link: { + instance: HTMLLinkElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, main: ReactDOM$HTMLElementJSXIntrinsic, map: ReactDOM$HTMLElementJSXIntrinsic, mark: ReactDOM$HTMLElementJSXIntrinsic, menu: ReactDOM$HTMLElementJSXIntrinsic, menuitem: ReactDOM$HTMLElementJSXIntrinsic, - meta: {instance: HTMLMetaElement, props: {children?: React$Node, [key: string]: any}}, + meta: { + instance: HTMLMetaElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, meter: ReactDOM$HTMLElementJSXIntrinsic, nav: ReactDOM$HTMLElementJSXIntrinsic, noscript: ReactDOM$HTMLElementJSXIntrinsic, object: ReactDOM$HTMLElementJSXIntrinsic, - ol: {instance: HTMLOListElement, props: {children?: React$Node, [key: string]: any}}, - optgroup: {instance: HTMLOptGroupElement, props: {children?: React$Node, [key: string]: any}}, - option: {instance: HTMLOptionElement, props: {children?: React$Node, [key: string]: any}}, + ol: { + instance: HTMLOListElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + optgroup: { + instance: HTMLOptGroupElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + option: { + instance: HTMLOptionElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, output: ReactDOM$HTMLElementJSXIntrinsic, - p: {instance: HTMLParagraphElement, props: {children?: React$Node, [key: string]: any}}, + p: { + instance: HTMLParagraphElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, param: ReactDOM$HTMLElementJSXIntrinsic, picture: ReactDOM$HTMLElementJSXIntrinsic, - pre: {instance: HTMLPreElement, props: {children?: React$Node, [key: string]: any}}, + pre: { + instance: HTMLPreElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, progress: ReactDOM$HTMLElementJSXIntrinsic, q: ReactDOM$HTMLElementJSXIntrinsic, rp: ReactDOM$HTMLElementJSXIntrinsic, @@ -348,30 +595,134 @@ declare type $JSXIntrinsics = { ruby: ReactDOM$HTMLElementJSXIntrinsic, s: ReactDOM$HTMLElementJSXIntrinsic, samp: ReactDOM$HTMLElementJSXIntrinsic, - script: {instance: HTMLScriptElement, props: {children?: React$Node, [key: string]: any}}, + script: { + instance: HTMLScriptElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, section: ReactDOM$HTMLElementJSXIntrinsic, small: ReactDOM$HTMLElementJSXIntrinsic, - source: {instance: HTMLSourceElement, props: {children?: React$Node, [key: string]: any}}, - span: {instance: HTMLSpanElement, props: {children?: React$Node, [key: string]: any}}, + source: { + instance: HTMLSourceElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + span: { + instance: HTMLSpanElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, strong: ReactDOM$HTMLElementJSXIntrinsic, - style: {instance: HTMLStyleElement, props: {children?: React$Node, [key: string]: any}}, + style: { + instance: HTMLStyleElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, sub: ReactDOM$HTMLElementJSXIntrinsic, summary: ReactDOM$HTMLElementJSXIntrinsic, sup: ReactDOM$HTMLElementJSXIntrinsic, - table: {instance: HTMLTableElement, props: {children?: React$Node, [key: string]: any}}, - tbody: {instance: HTMLTableSectionElement, props: {children?: React$Node, [key: string]: any}}, - td: {instance: HTMLTableCellElement, props: {children?: React$Node, [key: string]: any}}, - tfoot: {instance: HTMLTableSectionElement, props: {children?: React$Node, [key: string]: any}}, - th: {instance: HTMLTableCellElement, props: {children?: React$Node, [key: string]: any}}, - thead: {instance: HTMLTableSectionElement, props: {children?: React$Node, [key: string]: any}}, + table: { + instance: HTMLTableElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + tbody: { + instance: HTMLTableSectionElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + td: { + instance: HTMLTableCellElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + tfoot: { + instance: HTMLTableSectionElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + th: { + instance: HTMLTableCellElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + thead: { + instance: HTMLTableSectionElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, time: ReactDOM$HTMLElementJSXIntrinsic, title: ReactDOM$HTMLElementJSXIntrinsic, - tr: {instance: HTMLTableRowElement, props: {children?: React$Node, [key: string]: any}}, + tr: { + instance: HTMLTableRowElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, track: ReactDOM$HTMLElementJSXIntrinsic, u: ReactDOM$HTMLElementJSXIntrinsic, - ul: {instance: HTMLUListElement, props: {children?: React$Node, [key: string]: any}}, + ul: { + instance: HTMLUListElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, 'var': ReactDOM$HTMLElementJSXIntrinsic, - video: {instance: HTMLVideoElement, props: {children?: React$Node, [key: string]: any}}, + video: { + instance: HTMLVideoElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, wbr: ReactDOM$HTMLElementJSXIntrinsic, // SVG svg: ReactDOM$SVGElementJSXIntrinsic, @@ -396,19 +747,52 @@ declare type $JSXIntrinsics = { tspan: ReactDOM$SVGElementJSXIntrinsic, use: ReactDOM$SVGElementJSXIntrinsic, // Elements React adds extra props for. - input: {instance: HTMLInputElement, props: {children?: React$Node, [key: string]: any}}, - textarea: {instance: HTMLTextAreaElement, props: {children?: React$Node, [key: string]: any}}, - select: {instance: HTMLSelectElement, props: {children?: React$Node, [key: string]: any}}, - // Catch-all for custom elements. - [string]: ReactDOM$HTMLElementJSXIntrinsic, + input: { + instance: HTMLInputElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + textarea: { + instance: HTMLTextAreaElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + select: { + instance: HTMLSelectElement, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... + }, + ... }; type ReactDOM$HTMLElementJSXIntrinsic = { instance: HTMLElement, - props: {children?: React$Node, [key: string]: any}, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... }; type ReactDOM$SVGElementJSXIntrinsic = { instance: Element, - props: {children?: React$Node, [key: string]: any}, + props: { + [key: string]: any, + children?: React$Node, + ... + }, + ... }; diff --git a/lib/react.js b/lib/react.js index c59257db22d..103a1f08a46 100644 --- a/lib/react.js +++ b/lib/react.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -32,7 +32,7 @@ declare class React$Component { // action methods setState( - partialState: $Shape | ((State, Props) => $Shape | void), + partialState: ?$Shape | ((State, Props) => ?$Shape), callback?: () => mixed, ): void; @@ -76,9 +76,7 @@ declare class React$Component { componentWillUnmount(): mixed; componentDidCatch( error: Error, - info: { - componentStack: string, - } + info: { componentStack: string, ... } ): mixed; // long tail of other stuff not modeled very well @@ -89,7 +87,7 @@ declare class React$Component { static displayName?: ?string; static childContextTypes: any; static contextTypes: any; - static propTypes: $Subtype<{[_: $Keys]: any}>; + static propTypes: any; // We don't add a type for `defaultProps` so that its type may be entirely // inferred when we diff the type for `defaultProps` with `Props`. Otherwise @@ -129,6 +127,14 @@ declare class LegacyReactComponent state: State; } +declare type React$AbstractComponentStatics = { + displayName?: ?string, + // This is only on function components, but trying to access name when + // displayName is undefined is a common pattern. + name?: ?string, + ... +}; + /** * The type of a stateless functional component. In most cases these components * are a single function. However, they may have some static properties that we @@ -137,8 +143,9 @@ declare class LegacyReactComponent declare type React$StatelessFunctionalComponent = { (props: Props, context: any): React$Node, displayName?: ?string, - propTypes?: $Subtype<{[_: $Keys]: any}>, - contextTypes?: any + propTypes?: any, + contextTypes?: any, + ... }; /** @@ -149,9 +156,7 @@ declare type React$StatelessFunctionalComponent = { * - ES6 class component. Components with state defined either using the ES6 * class syntax, or with the legacy `React.createClass()` helper. */ -declare type React$ComponentType = - | React$StatelessFunctionalComponent - | Class>; +declare type React$ComponentType<-Config> = React$AbstractComponent; /** * The type of an element in React. A React element may be a: @@ -163,8 +168,7 @@ declare type React$ComponentType = */ declare type React$ElementType = | string - | React$StatelessFunctionalComponent - | Class>; + | React$AbstractComponent; /** * Type of a React element. React elements are commonly created using JSX @@ -177,6 +181,8 @@ declare type React$Element<+ElementType: React$ElementType> = {| +ref: any, |}; +declare type React$MixedElement = React$Element; + /** * The type of the key that React uses to determine where items in a new list * have moved. @@ -187,17 +193,24 @@ declare type React$Key = string | number; * The type of the ref prop available on all React components. */ declare type React$Ref = - | {current: React$ElementRef | null} + | { -current: React$ElementRef | null, ... } | ((React$ElementRef | null) => mixed) - | string; + | number | string; /** * The type of a React Context. React Contexts are created by calling * createContext() with a default value. */ declare type React$Context = { - Provider: React$ComponentType<{ value: T, children?: ?React$Node }>, - Consumer: React$ComponentType<{ children: (value: T) => ?React$Node }>, + Provider: React$ComponentType<{ + value: T, + children?: React$Node, + ... + }>, + Consumer: React$ComponentType<{ children: (value: T) => ?React$Node, ... }>, + // Optional, user-specified value for custom display label in React DevTools. + displayName?: string, + ... } /** @@ -212,7 +225,7 @@ declare module react { declare export var version: string; declare export function checkPropTypes( - propTypes: $Subtype<{[_: $Keys]: ReactPropsCheckType}>, + propTypes : any, values: V, location: string, componentName: string, @@ -222,14 +235,15 @@ declare module react { declare export var createClass: React$CreateClass; declare export function createContext( defaultValue: T, + calculateChangedBits: ?(a: T, b: T) => number, ): React$Context; declare export var createElement: React$CreateElement; declare export var cloneElement: React$CloneElement; declare export function createFactory( type: ElementType, ): React$ElementFactory; - declare export function createRef( - ): {current: null | React$ElementRef}; + declare export function createRef( + ): {|current: null | T|}; declare export function isValidElement(element: any): boolean; @@ -237,19 +251,33 @@ declare module react { declare export var PureComponent: typeof React$PureComponent; declare export type StatelessFunctionalComponent

= React$StatelessFunctionalComponent

; - declare export type ComponentType

= React$ComponentType

; + declare export type ComponentType<-P> = React$ComponentType

; + declare export type AbstractComponent< + -Config, + +Instance = mixed, + > = React$AbstractComponent; + declare export type MixedElement = React$MixedElement; declare export type ElementType = React$ElementType; declare export type Element<+C> = React$Element; - declare export var Fragment: ({children: ?React$Node}) => React$Node; + declare export var Fragment: ({ children?: React$Node, ... }) => React$Node; declare export type Key = React$Key; declare export type Ref = React$Ref; declare export type Node = React$Node; declare export type Context = React$Context; declare export type Portal = React$Portal; + declare export var ConcurrentMode: ({ children?: React$Node, ... }) => React$Node; // 16.7+ + declare export var StrictMode: ({ children?: React$Node, ... }) => React$Node; + + declare export var Suspense: React$ComponentType<{ + children?: React$Node, + fallback?: React$Node, + ... + }>; // 16.6+ declare export type ElementProps = React$ElementProps; declare export type ElementConfig = React$ElementConfig; declare export type ElementRef = React$ElementRef; + declare export type Config = React$Config; declare export type ChildrenArray<+T> = $ReadOnlyArray> | T; declare export var Children: { @@ -257,33 +285,148 @@ declare module react { children: ChildrenArray, fn: (child: $NonMaybeType, index: number) => U, thisArg?: mixed, - ): Array<$NonMaybeType>; + ): Array<$NonMaybeType>, forEach( children: ChildrenArray, fn: (child: T, index: number) => mixed, thisArg?: mixed, - ): void; - count(children: ChildrenArray): number; - only(children: ChildrenArray): $NonMaybeType; - toArray(children: ChildrenArray): Array<$NonMaybeType>; + ): void, + count(children: ChildrenArray): number, + only(children: ChildrenArray): $NonMaybeType, + toArray(children: ChildrenArray): Array<$NonMaybeType>, + ... + }; + + declare export function forwardRef( + render: ( + props: Config, + ref: { current: null | Instance, ... } | ((null | Instance) => mixed), + ) => React$Node, + ): React$AbstractComponent; + + declare export function memo( + component: React$AbstractComponent, + equal?: (Config, Config) => boolean, + ): React$AbstractComponent; + + declare export function lazy( + component: () => Promise<{ default: React$AbstractComponent, ... }>, + ): React$AbstractComponent; + + declare type MaybeCleanUpFn = void | (() => void); + + declare export function useContext( + context: React$Context, + observedBits: void | number | boolean, + ): T; + + declare export function useState( + initialState: (() => S) | S, + ): [S, ((S => S) | S) => void]; + + declare type Dispatch = (A) => void; + + declare export function useReducer( + reducer: (S, A) => S, + initialState: S, + ): [S, Dispatch]; + + declare export function useReducer( + reducer: (S, A) => S, + initialState: S, + init: void, + ): [S, Dispatch]; + + declare export function useReducer( + reducer: (S, A) => S, + initialArg: I, + init: (I) => S, + ): [S, Dispatch]; + + declare export function useRef(initialValue: T): {|current: T|}; + + declare export function useDebugValue(value: any): void; + + declare export function useEffect( + create: () => MaybeCleanUpFn, + inputs: ?$ReadOnlyArray, + ): void; + + declare export function useLayoutEffect( + create: () => MaybeCleanUpFn, + inputs: ?$ReadOnlyArray, + ): void; + + declare export function useCallback) => mixed>( + callback: T, + inputs: ?$ReadOnlyArray, + ): T; + + declare export function useMemo( + create: () => T, + inputs: ?$ReadOnlyArray, + ): T; + + declare export function useImperativeHandle( + ref: { current: T | null, ... } | ((inst: T | null) => mixed) | null | void, + create: () => T, + inputs: ?$ReadOnlyArray, + ): void; + + declare export type Interaction = { + name: string, + timestamp: number, + ... }; + declare type ProfilerOnRenderFnType = ( + id: string, + phase: "mount" | "update", + actualDuration: number, + baseDuration: number, + startTime: number, + commitTime: number, + interactions: Set, + ) => void; + + declare export var Profiler: React$AbstractComponent<{| + children?: React$Node, + id: string, + onRender: ProfilerOnRenderFnType, + |}, void>; + declare export default {| +DOM: typeof DOM, +PropTypes: typeof PropTypes, +version: typeof version, +checkPropTypes: typeof checkPropTypes, + +memo: typeof memo, + +lazy: typeof lazy, +createClass: typeof createClass, +createContext: typeof createContext, +createElement: typeof createElement, +cloneElement: typeof cloneElement, +createFactory: typeof createFactory, +createRef: typeof createRef, + +forwardRef: typeof forwardRef, +isValidElement: typeof isValidElement, +Component: typeof Component, +PureComponent: typeof PureComponent, +Fragment: typeof Fragment, +Children: typeof Children, + +ConcurrentMode: typeof ConcurrentMode, + +StrictMode: typeof StrictMode, + +Profiler: typeof Profiler, + +Suspense: typeof Suspense, + +useContext: typeof useContext, + +useState: typeof useState, + +useReducer: typeof useReducer, + +useRef: typeof useRef, + +useEffect: typeof useEffect, + +useLayoutEffect: typeof useLayoutEffect, + +useCallback: typeof useCallback, + +useMemo: typeof useMemo, + +useImperativeHandle: typeof useImperativeHandle, |}; } @@ -300,8 +443,9 @@ type ReactPropsCheckType = ( href?: string) => ?Error; type ReactPropsChainableTypeChecker = { - isRequired: ReactPropsCheckType; - (props: any, propName: string, componentName: string, href?: string): ?Error; + (props: any, propName: string, componentName: string, href?: string): ?Error, + isRequired: ReactPropsCheckType, + ... }; type React$PropTypes$arrayOf = @@ -316,23 +460,26 @@ type React$PropTypes$oneOfType = (arrayOfTypeCheckers: Array) => ReactPropsChainableTypeChecker; type React$PropTypes$shape = - (shapeTypes: { [key: string]: ReactPropsCheckType }) => + (shapeTypes: { [key: string]: ReactPropsCheckType, ... }) => ReactPropsChainableTypeChecker; type ReactPropTypes = { - array: React$PropType$Primitive>; - bool: React$PropType$Primitive; - func: React$PropType$Primitive; - number: React$PropType$Primitive; - object: React$PropType$Primitive; - string: React$PropType$Primitive; - any: React$PropType$Primitive; - arrayOf: React$PropType$ArrayOf; - element: React$PropType$Primitive; /* TODO */ - instanceOf: React$PropType$InstanceOf; - node: React$PropType$Primitive; /* TODO */ - objectOf: React$PropType$ObjectOf; - oneOf: React$PropType$OneOf; - oneOfType: React$PropType$OneOfType; - shape: React$PropType$Shape; + array: React$PropType$Primitive>, + bool: React$PropType$Primitive, + func: React$PropType$Primitive, + number: React$PropType$Primitive, + object: React$PropType$Primitive, + string: React$PropType$Primitive, + any: React$PropType$Primitive, + arrayOf: React$PropType$ArrayOf, + element: React$PropType$Primitive, + /* TODO */ + instanceOf: React$PropType$InstanceOf, + node: React$PropType$Primitive, + /* TODO */ + objectOf: React$PropType$ObjectOf, + oneOf: React$PropType$OneOf, + oneOfType: React$PropType$OneOfType, + shape: React$PropType$Shape, + ... } diff --git a/lib/serviceworkers.js b/lib/serviceworkers.js index 5a41b61f9a9..bfb0a950aff 100644 --- a/lib/serviceworkers.js +++ b/lib/serviceworkers.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -30,6 +30,7 @@ declare class ExtendableEvent extends Event { type ForeignFetchOptions = { scopes: Iterator, origins: Iterator, + ... }; declare class InstallEvent extends ExtendableEvent { @@ -49,6 +50,7 @@ type ClientQueryOptions = { includeUncontrolled?: boolean, includeReserved?: boolean, type?: ClientType, + ... }; declare class Clients { @@ -85,13 +87,44 @@ declare class NavigationPreloadManager { getState: Promise, } -declare class ServiceWorkerRegistration extends EventTarget { - installing: ?ServiceWorker, - waiting: ?ServiceWorker, - active: ?ServiceWorker, - navigationPreload: NavigationPreloadManager, +type PushSubscriptionOptions = { + userVisibleOnly?: boolean, + applicationServerKey?: string | ArrayBuffer | $ArrayBufferView, + ... +} + +declare class PushSubscriptionJSON { + endpoint: string, + expirationTime: number | null, + keys: { [string]: string, ... }; +} + +declare class PushSubscription { + +endpoint: string, + +expirationTime: number | null, + +options: PushSubscriptionOptions, + getKey(name: string): ArrayBuffer | null, + toJSON(): PushSubscriptionJSON, + unsubscribe(): Promise, +} + +declare class PushManager { + +supportedContentEncodings: Array, + subscribe(options?: PushSubscriptionOptions): Promise, + getSubscription(): Promise, + permissionState(options?: PushSubscriptionOptions): Promise<'granted' | 'denied' | 'prompt'>, +} + +type ServiceWorkerUpdateViaCache = 'imports' | 'all' | 'none'; - scope: string, +declare class ServiceWorkerRegistration extends EventTarget { + +installing: ?ServiceWorker, + +waiting: ?ServiceWorker, + +active: ?ServiceWorker, + +navigationPreload: NavigationPreloadManager, + +scope: string, + +updateViaCache: ServiceWorkerUpdateViaCache, + +pushManager: PushManager, update(): Promise, unregister(): Promise, @@ -102,29 +135,43 @@ declare class ServiceWorkerRegistration extends EventTarget { type WorkerType = 'classic' | 'module'; type RegistrationOptions = { - scope: string, - type: WorkerType, + scope?: string, + type?: WorkerType, + updateViaCache?: ServiceWorkerUpdateViaCache, + ... }; declare class ServiceWorkerContainer extends EventTarget { - controller: ?ServiceWorker, - ready: Promise, + +controller: ?ServiceWorker, + +ready: Promise, getRegistration(clientURL: string): Promise, getRegistrations(): Promise>, register( scriptURL: string, - options: ?RegistrationOptions + options?: RegistrationOptions ): Promise, startMessages(): void, oncontrollerchange?: EventHandler, onmessage?: EventHandler, + onmessageerror?: EventHandler, +} + +/** + * This feature has been removed from the Web standards. + */ +declare class ServiceWorkerMessageEvent extends Event { + data: any, + lastEventId: string, + origin: string, + ports: Array, + source: ?(ServiceWorker | MessagePort), } -declare class ServiceWorkerMessageEvent { +declare class ExtendableMessageEvent extends ExtendableEvent { data: any, - lastEventId :string, + lastEventId: string, origin: string, ports: Array, source: ?(ServiceWorker | MessagePort), @@ -135,6 +182,7 @@ type CacheQueryOptions = { ignoreMethod?: boolean, ignoreVary?: boolean, cacheName?: string, + ... } declare class Cache { diff --git a/lib/streams.js b/lib/streams.js index c8f9c85464f..290f99ee674 100644 --- a/lib/streams.js +++ b/lib/streams.js @@ -1,11 +1,11 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ -type TextEncodeOptions = {options?: boolean}; +type TextEncodeOptions = { options?: boolean, ... }; declare class TextEncoder { encode(buffer: string, options?: TextEncodeOptions): Uint8Array, @@ -51,7 +51,11 @@ declare class ReadableStreamReader { closed: boolean, cancel(reason: string): void, - read(): Promise<{value: ?any, done: boolean}>, + read(): Promise<{ + value: ?any, + done: boolean, + ... + }>, releaseLock(): void, } @@ -73,12 +77,13 @@ type PipeToOptions = { preventClose?: boolean, preventAbort?: boolean, preventCancel?: boolean, + ... }; type QueuingStrategy = { highWaterMark: number, - size(chunk: ?any): number, + ... }; declare class ReadableStream { @@ -92,7 +97,7 @@ declare class ReadableStream { cancel(reason: string): void, getReader(): ReadableStreamReader, pipeThrough(transform: TransformStream, options: ?any): void, - pipeTo(dest: WritableStream, options: ?PipeToOptions): void, + pipeTo(dest: WritableStream, options: ?PipeToOptions): Promise, tee(): [ReadableStream, ReadableStream], }; diff --git a/lib/webassembly.js b/lib/webassembly.js index 3b205473660..72e7d7a1973 100644 --- a/lib/webassembly.js +++ b/lib/webassembly.js @@ -5,7 +5,11 @@ type BufferSource = $TypedArray | ArrayBuffer; type ImportExportKind = 'function' | 'table' | 'memory' | 'global'; type ImportObject = Object; -type ResultObject = { module: WebAssembly$Module, instance: WebAssembly$Instance }; +type ResultObject = { + module: WebAssembly$Module, + instance: WebAssembly$Instance, + ... +}; // https://github.com/WebAssembly/design/blob/master/JS.md#exported-function-exotic-objects declare class ExportedFunctionExoticObject extends Function { @@ -15,18 +19,31 @@ declare class ExportedFunctionExoticObject extends Function { declare class WebAssembly$Module { constructor(bufferSource: BufferSource): void; - static exports(moduleObject: WebAssembly$Module): Array<{ name: string, kind: ImportExportKind }>; - static imports(moduleObject: WebAssembly$Module): Array<{ name: string, name: string, kind: ImportExportKind }>; + static exports(moduleObject: WebAssembly$Module): Array<{ + name: string, + kind: ImportExportKind, + ... + }>; + static imports(moduleObject: WebAssembly$Module): Array<{ + name: string, + name: string, + kind: ImportExportKind, + ... + }>; static customSections(moduleObject: WebAssembly$Module, sectionName: string): Array; } declare class WebAssembly$Instance { constructor(moduleObject: WebAssembly$Module, importObject?: ImportObject): void; - +exports: { [exportedFunction: string]: ExportedFunctionExoticObject }; + +exports: { [exportedFunction: string]: ExportedFunctionExoticObject, ... }; } -type MemoryDescriptor = { initial: number, maximum?: number }; +type MemoryDescriptor = { + initial: number, + maximum?: number, + ... +}; declare class WebAssembly$Memory { constructor(memoryDescriptor: MemoryDescriptor): void; @@ -36,7 +53,12 @@ declare class WebAssembly$Memory { grow(delta: number): number; } -type TableDescriptor = { element: 'anyfunc', initial: number, maximum?: number }; +type TableDescriptor = { + element: 'anyfunc', + initial: number, + maximum?: number, + ... +}; declare class WebAssembly$Table { constructor(tableDescriptor: TableDescriptor): void; @@ -56,19 +78,18 @@ declare function WebAssembly$instantiate(bufferSource: BufferSource, importObjec declare function WebAssembly$instantiate(moduleObject: WebAssembly$Module, importObject?: ImportObject): Promise; declare var WebAssembly: { - Module: typeof WebAssembly$Module; - Instance: typeof WebAssembly$Instance; - Memory: typeof WebAssembly$Memory; - Table: typeof WebAssembly$Table; - CompileError: typeof WebAssembly$CompileError; - LinkError: typeof WebAssembly$LinkError; - RuntimeError: typeof WebAssembly$RuntimeError; - - validate(bufferSource: BufferSource): boolean; - compile(bufferSource: BufferSource): Promise; - instantiate: typeof WebAssembly$instantiate; - + Module: typeof WebAssembly$Module, + Instance: typeof WebAssembly$Instance, + Memory: typeof WebAssembly$Memory, + Table: typeof WebAssembly$Table, + CompileError: typeof WebAssembly$CompileError, + LinkError: typeof WebAssembly$LinkError, + RuntimeError: typeof WebAssembly$RuntimeError, + validate(bufferSource: BufferSource): boolean, + compile(bufferSource: BufferSource): Promise, + instantiate: typeof WebAssembly$instantiate, // web embedding API - compileStreaming(source: Response | Promise): Promise; - instantiateStreaming(source: Response | Promise, importObject?: ImportObject): Promise; + compileStreaming(source: Response | Promise): Promise, + instantiateStreaming(source: Response | Promise, importObject?: ImportObject): Promise, + ... } diff --git a/newtests/.flowconfig b/newtests/.flowconfig index 514831bb0f8..4c6c94b30dc 100644 --- a/newtests/.flowconfig +++ b/newtests/.flowconfig @@ -5,6 +5,10 @@ .*/json5/test/parse-cases/.* .*/newtests/gen_flow_files_command/type_error.js .*/newtests/ide/lazy/errorsWithFlowPragma.js +.*/newtests/types-first-ide/fileWithNonLocalError.js +.*/newtests/types-first-lazy-ide1/errorsWithFlowPragma.js +.*/newtests/types-first-lazy-ide2/errorsWithFlowPragma.js +.*/newtests/lsp/completion/jsx.js [options] suppress_comment=.*\\$FlowFixMe diff --git a/newtests/FacebookismIdx/test.js b/newtests/FacebookismIdx/test.js index 0dcede0d36a..741d8244aac 100644 --- a/newtests/FacebookismIdx/test.js +++ b/newtests/FacebookismIdx/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; @@ -291,13 +290,7 @@ export default suite(({addFile, addFiles, addCode}) => [ // Using an annotation obscures the type wrapper mechanism that idx() uses // around the parameter it passes to the callback addCode('(idx({}, (obj: Object) => obj.a.b.c): ?number);\n') - .newErrors( - ` - test.js:6 - 6: (idx({}, (obj: Object) => obj.a.b.c): ?number); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Cannot call \`idx(...)\` because the callback argument must not be annotated. - `, - ), + .noNewErrors(), // Can't do anything with the callback parameter other than get elements and // properties off of it diff --git a/newtests/any_named_import/test.js b/newtests/any_named_import/test.js index bc2684d9746..01935e08445 100644 --- a/newtests/any_named_import/test.js +++ b/newtests/any_named_import/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ @@ -61,36 +60,14 @@ export default suite(({addFile, addFiles, addCode}) => [ addFile('flow-typed/lib.js') .addCode('import obj from "object";') .addCode('(obj: string);') - .newErrors( - ` - test.js:5 - 5: (obj: string); - ^^^ Cannot cast \`obj\` to string because object type [1] is incompatible with string [2]. - References: - 6: declare module.exports: Object; - ^^^^^^ [1]. See lib: [LIB] flow-typed/lib.js:6 - 5: (obj: string); - ^^^^^^ [2] - `, - ) + .noNewErrors() .because('obj should have the type Object'), ]), test('The cjs require for the object module should be Object', [ addFile('flow-typed/lib.js') .addCode('const obj = require("object");') .addCode('(obj: string);') - .newErrors( - ` - test.js:5 - 5: (obj: string); - ^^^ Cannot cast \`obj\` to string because object type [1] is incompatible with string [2]. - References: - 6: declare module.exports: Object; - ^^^^^^ [1]. See lib: [LIB] flow-typed/lib.js:6 - 5: (obj: string); - ^^^^^^ [2] - `, - ) + .noNewErrors() .because('obj should have the type Object'), ]), test('The namespace import for the object module should be object', [ diff --git a/newtests/array_literal_tuple_spread/test.js b/newtests/array_literal_tuple_spread/test.js index dd13ac18dd8..f4cb0b1d192 100644 --- a/newtests/array_literal_tuple_spread/test.js +++ b/newtests/array_literal_tuple_spread/test.js @@ -325,8 +325,8 @@ export default suite(({addFile, addFiles, addCode}) => [ 3: const arr: Array = [..."hello"]; ^^^^^^^^^^^^ Cannot assign array literal to \`arr\` because string [1] is incompatible with number [2] in array element. References: - 291: @@iterator(): Iterator; - ^^^^^^ [1]. See lib: [LIB] core.js:291 + 321: @@iterator(): Iterator; + ^^^^^^ [1]. See lib: [LIB] core.js:321 3: const arr: Array = [..."hello"]; ^^^^^^ [2] `, diff --git a/newtests/autocomplete/test.js b/newtests/autocomplete/test.js index c9e179e4772..26f7d978467 100644 --- a/newtests/autocomplete/test.js +++ b/newtests/autocomplete/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ @@ -14,10 +13,10 @@ export default suite(({addFile, flowCmd}) => [ 'foo_parse_fail.js', ).stdout( ` - hasOwnProperty (prop: any) => boolean - isPrototypeOf (o: any) => boolean + hasOwnProperty (prop: mixed) => boolean + isPrototypeOf (o: mixed) => boolean num number - propertyIsEnumerable (prop: any) => boolean + propertyIsEnumerable (prop: mixed) => boolean str string toLocaleString () => string toString () => string @@ -38,39 +37,39 @@ export default suite(({addFile, flowCmd}) => [ "result": [ { "name": "hasOwnProperty", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 63, - "endline": 63, + "line": 80, + "endline": 80, "start": 5, - "end": 38 + "end": 40 }, { "name": "isPrototypeOf", - "type": "(o: any) => boolean", + "type": "(o: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "o", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 64, - "endline": 64, + "line": 81, + "endline": 81, "start": 5, - "end": 34 + "end": 36 }, { "name": "num", @@ -84,21 +83,21 @@ export default suite(({addFile, flowCmd}) => [ }, { "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 65, - "endline": 65, + "line": 82, + "endline": 82, "start": 5, - "end": 44 + "end": 46 }, { "name": "str", @@ -118,8 +117,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 66, - "endline": 66, + "line": 83, + "endline": 83, "start": 5, "end": 28 }, @@ -131,8 +130,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 67, - "endline": 67, + "line": 84, + "endline": 84, "start": 5, "end": 22 }, @@ -144,8 +143,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 68, - "endline": 68, + "line": 85, + "endline": 85, "start": 5, "end": 20 } @@ -205,8 +204,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 291, - "endline": 291, + "line": 321, + "endline": 321, "start": 5, "end": 34 }, @@ -223,8 +222,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 292, - "endline": 292, + "line": 322, + "endline": 322, "start": 5, "end": 32 }, @@ -241,8 +240,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 293, - "endline": 293, + "line": 323, + "endline": 323, "start": 5, "end": 31 }, @@ -259,8 +258,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 294, - "endline": 294, + "line": 324, + "endline": 324, "start": 5, "end": 37 }, @@ -277,8 +276,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 295, - "endline": 295, + "line": 325, + "endline": 325, "start": 5, "end": 38 }, @@ -295,8 +294,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 296, - "endline": 296, + "line": 326, + "endline": 326, "start": 5, "end": 45 }, @@ -317,8 +316,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 298, - "endline": 298, + "line": 328, + "endline": 328, "start": 5, "end": 62 }, @@ -339,8 +338,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 299, - "endline": 299, + "line": 329, + "endline": 329, "start": 5, "end": 62 }, @@ -361,8 +360,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 300, - "endline": 300, + "line": 330, + "endline": 330, "start": 5, "end": 60 }, @@ -383,8 +382,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 301, - "endline": 301, + "line": 331, + "endline": 331, "start": 5, "end": 64 }, @@ -393,8 +392,8 @@ export default suite(({addFile, flowCmd}) => [ "type": "number", "func_details": null, "path": "[LIB] core.js", - "line": 325, - "endline": 325, + "line": 358, + "endline": 358, "start": 13, "end": 18 }, @@ -411,8 +410,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 302, - "endline": 302, + "line": 332, + "endline": 332, "start": 5, "end": 30 }, @@ -437,8 +436,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 303, - "endline": 303, + "line": 333, + "endline": 333, "start": 5, "end": 105 }, @@ -455,11 +454,29 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 304, - "endline": 304, + "line": 334, + "endline": 334, "start": 5, "end": 61 }, + { + "name": "matchAll", + "type": "(regexp: (string | RegExp)) => Iterator", + "func_details": { + "return_type": "Iterator", + "params": [ + { + "name": "regexp", + "type": "string | RegExp" + } + ] + }, + "path": "[LIB] core.js", + "line": 335, + "endline": 335, + "start": 5, + "end": 67 + }, { "name": "normalize", "type": "(format?: string) => string", @@ -473,8 +490,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 305, - "endline": 305, + "line": 336, + "endline": 336, "start": 5, "end": 38 }, @@ -495,8 +512,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 306, - "endline": 306, + "line": 337, + "endline": 337, "start": 5, "end": 60 }, @@ -517,8 +534,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 307, - "endline": 307, + "line": 338, + "endline": 338, "start": 5, "end": 62 }, @@ -535,8 +552,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 308, - "endline": 308, + "line": 339, + "endline": 339, "start": 5, "end": 33 }, @@ -557,8 +574,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 309, - "endline": 309, + "line": 340, + "endline": 340, "start": 5, "end": 124 }, @@ -575,8 +592,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 310, - "endline": 310, + "line": 341, + "endline": 341, "start": 5, "end": 43 }, @@ -597,8 +614,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 311, - "endline": 311, + "line": 342, + "endline": 342, "start": 5, "end": 47 }, @@ -619,8 +636,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 312, - "endline": 312, + "line": 343, + "endline": 343, "start": 5, "end": 69 }, @@ -641,8 +658,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 313, - "endline": 313, + "line": 344, + "endline": 344, "start": 5, "end": 64 }, @@ -663,8 +680,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 314, - "endline": 314, + "line": 345, + "endline": 345, "start": 5, "end": 49 }, @@ -685,8 +702,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 315, - "endline": 315, + "line": 346, + "endline": 346, "start": 5, "end": 50 }, @@ -703,8 +720,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 316, - "endline": 316, + "line": 347, + "endline": 347, "start": 5, "end": 62 }, @@ -721,8 +738,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 317, - "endline": 317, + "line": 348, + "endline": 348, "start": 5, "end": 62 }, @@ -734,8 +751,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 318, - "endline": 318, + "line": 349, + "endline": 349, "start": 5, "end": 25 }, @@ -747,8 +764,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 324, - "endline": 324, + "line": 357, + "endline": 357, "start": 5, "end": 22 }, @@ -760,8 +777,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 319, - "endline": 319, + "line": 350, + "endline": 350, "start": 5, "end": 25 }, @@ -773,11 +790,24 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 320, - "endline": 320, + "line": 351, + "endline": 351, "start": 5, "end": 18 }, + { + "name": "trimEnd", + "type": "() => string", + "func_details": { + "return_type": "string", + "params": [] + }, + "path": "[LIB] core.js", + "line": 352, + "endline": 352, + "start": 5, + "end": 21 + }, { "name": "trimLeft", "type": "() => string", @@ -786,8 +816,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 321, - "endline": 321, + "line": 353, + "endline": 353, "start": 5, "end": 22 }, @@ -799,8 +829,21 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 322, - "endline": 322, + "line": 354, + "endline": 354, + "start": 5, + "end": 23 + }, + { + "name": "trimStart", + "type": "() => string", + "func_details": { + "return_type": "string", + "params": [] + }, + "path": "[LIB] core.js", + "line": 355, + "endline": 355, "start": 5, "end": 23 }, @@ -812,8 +855,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 323, - "endline": 323, + "line": 356, + "endline": 356, "start": 5, "end": 21 } @@ -844,8 +887,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 140, - "endline": 140, + "line": 160, + "endline": 160, "start": 5, "end": 50 }, @@ -862,8 +905,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 141, - "endline": 141, + "line": 161, + "endline": 161, "start": 5, "end": 44 }, @@ -884,8 +927,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 142, - "endline": 142, + "line": 162, + "endline": 162, "start": 5, "end": 96 }, @@ -902,8 +945,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 143, - "endline": 143, + "line": 163, + "endline": 163, "start": 5, "end": 43 }, @@ -920,8 +963,8 @@ export default suite(({addFile, flowCmd}) => [ ] }, "path": "[LIB] core.js", - "line": 144, - "endline": 144, + "line": 164, + "endline": 164, "start": 5, "end": 36 }, @@ -933,8 +976,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 145, - "endline": 145, + "line": 165, + "endline": 165, "start": 5, "end": 21 } @@ -960,8 +1003,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 120, - "endline": 120, + "line": 140, + "endline": 140, "start": 5, "end": 22 }, @@ -973,8 +1016,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 119, - "endline": 119, + "line": 139, + "endline": 139, "start": 5, "end": 22 } @@ -1004,57 +1047,57 @@ export default suite(({addFile, flowCmd}) => [ }, { "name": "hasOwnProperty", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 63, - "endline": 63, + "line": 80, + "endline": 80, "start": 5, - "end": 38 + "end": 40 }, { "name": "isPrototypeOf", - "type": "(o: any) => boolean", + "type": "(o: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "o", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 64, - "endline": 64, + "line": 81, + "endline": 81, "start": 5, - "end": 34 + "end": 36 }, { "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 65, - "endline": 65, + "line": 82, + "endline": 82, "start": 5, - "end": 44 + "end": 46 }, { "name": "toLocaleString", @@ -1064,8 +1107,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 66, - "endline": 66, + "line": 83, + "endline": 83, "start": 5, "end": 28 }, @@ -1077,8 +1120,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 67, - "endline": 67, + "line": 84, + "endline": 84, "start": 5, "end": 22 }, @@ -1090,8 +1133,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 68, - "endline": 68, + "line": 85, + "endline": 85, "start": 5, "end": 20 } @@ -1105,63 +1148,91 @@ export default suite(({addFile, flowCmd}) => [ flowCmd( ['autocomplete', '--strip-root', '--json', 'object_builtins.js', '4', '5'], 'object_builtins.js', + ).stdout( + ` + { + "error": "not enough type information to autocomplete", + "result": [] + } + `, + ).exitCodes([0]), + + + addFile("function_builtins.js"), + flowCmd( + ['autocomplete', '--strip-root', '--json', 'function_builtins.js', '4', '5'], + 'function_builtins.js', + ).stdout( + ` + { + "error": "not enough type information to autocomplete", + "result": [] + } + `, + ).exitCodes([0]), + + + addFile("fun.js"), + flowCmd( + ['autocomplete', '--strip-root', '--json', 'fun.js', '4', '5'], + 'fun.js', ).stdout( ` { "result": [ { "name": "hasOwnProperty", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 63, - "endline": 63, + "line": 80, + "endline": 80, "start": 5, - "end": 38 + "end": 40 }, { "name": "isPrototypeOf", - "type": "(o: any) => boolean", + "type": "(o: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "o", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 64, - "endline": 64, + "line": 81, + "endline": 81, "start": 5, - "end": 34 + "end": 36 }, { "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 65, - "endline": 65, + "line": 82, + "endline": 82, "start": 5, - "end": 44 + "end": 46 }, { "name": "toLocaleString", @@ -1171,8 +1242,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 66, - "endline": 66, + "line": 83, + "endline": 83, "start": 5, "end": 28 }, @@ -1184,8 +1255,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 67, - "endline": 67, + "line": 84, + "endline": 84, "start": 5, "end": 22 }, @@ -1197,8 +1268,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 68, - "endline": 68, + "line": 85, + "endline": 85, "start": 5, "end": 20 } @@ -1208,594 +1279,168 @@ export default suite(({addFile, flowCmd}) => [ ).exitCodes([0]), - addFile("function_builtins.js"), + addFile("this.js"), flowCmd( - ['autocomplete', '--strip-root', '--json', 'function_builtins.js', '4', '5'], - 'function_builtins.js', + ['autocomplete', '--strip-root', '--json', 'this.js', '8', '10'], + 'this.js', ).stdout( ` { "result": [ { - "name": "apply", - "type": "(thisArg: any, argArray?: any) => any", + "name": "bar", + "type": "() => void", "func_details": { - "return_type": "any", - "params": [ - { - "name": "thisArg", - "type": "any" - }, - { - "name": "argArray?", - "type": "any" - } - ] + "return_type": "void", + "params": [] }, - "path": "[LIB] core.js", - "line": 106, - "endline": 106, - "start": 18, - "end": 41 + "path": "this.js", + "line": 6, + "endline": 6, + "start": 3, + "end": 16 }, { - "name": "arguments", - "type": "any", + "name": "baz", + "type": "string", "func_details": null, - "path": "[LIB] core.js", - "line": 110, - "endline": 110, - "start": 16, - "end": 18 + "path": "this.js", + "line": 5, + "endline": 5, + "start": 8, + "end": 13 }, { - "name": "bind", - "type": "(thisArg: any, ...argArray: Array) => any", + "name": "hello", + "type": "() => void", "func_details": { - "return_type": "any", + "return_type": "void", + "params": [] + }, + "path": "this.js", + "line": 7, + "endline": 9, + "start": 3, + "end": 3 + } + ] + } + `, + ).exitCodes([0]), + + + addFile("typeparams.js"), + flowCmd( + ['autocomplete', '--strip-root', '--json', 'typeparams.js', '6', '16'], + 'typeparams.js', + ).stdout( + ` + { + "result": [ + { + "name": "toExponential", + "type": "(fractionDigits?: number) => string", + "func_details": { + "return_type": "string", "params": [ { - "name": "thisArg", - "type": "any" - }, - { - "name": "...argArray", - "type": "Array" + "name": "fractionDigits?", + "type": "number" } ] }, "path": "[LIB] core.js", - "line": 107, - "endline": 107, - "start": 17, - "end": 39 + "line": 160, + "endline": 160, + "start": 5, + "end": 50 }, { - "name": "call", - "type": "(thisArg: any, ...argArray: Array) => any", + "name": "toFixed", + "type": "(fractionDigits?: number) => string", "func_details": { - "return_type": "any", + "return_type": "string", "params": [ { - "name": "thisArg", - "type": "any" - }, - { - "name": "...argArray", - "type": "Array" + "name": "fractionDigits?", + "type": "number" } ] }, "path": "[LIB] core.js", - "line": 108, - "endline": 108, - "start": 17, - "end": 39 - }, - { - "name": "caller", - "type": "Function | null", - "func_details": null, - "path": "[LIB] core.js", - "line": 111, - "endline": 111, - "start": 13, - "end": 27 + "line": 161, + "endline": 161, + "start": 5, + "end": 44 }, { - "name": "hasOwnProperty", - "type": "(prop: any) => boolean", + "name": "toLocaleString", + "type": "(locales?: (string | Array), options?: Intl$NumberFormatOptions) => string", "func_details": { - "return_type": "boolean", + "return_type": "string", "params": [ { - "name": "prop", - "type": "any" + "name": "locales?", + "type": "string | Array" + }, + { + "name": "options?", + "type": "Intl$NumberFormatOptions" } ] }, "path": "[LIB] core.js", - "line": 63, - "endline": 63, + "line": 162, + "endline": 162, "start": 5, - "end": 38 + "end": 96 }, { - "name": "isPrototypeOf", - "type": "(o: any) => boolean", + "name": "toPrecision", + "type": "(precision?: number) => string", "func_details": { - "return_type": "boolean", + "return_type": "string", "params": [ { - "name": "o", - "type": "any" + "name": "precision?", + "type": "number" } ] }, "path": "[LIB] core.js", - "line": 64, - "endline": 64, + "line": 163, + "endline": 163, "start": 5, - "end": 34 - }, - { - "name": "length", - "type": "number", - "func_details": null, - "path": "[LIB] core.js", - "line": 112, - "endline": 112, - "start": 13, - "end": 18 - }, - { - "name": "name", - "type": "string", - "func_details": null, - "path": "[LIB] core.js", - "line": 113, - "endline": 113, - "start": 11, - "end": 16 + "end": 43 }, { - "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", + "name": "toString", + "type": "(radix?: number) => string", "func_details": { - "return_type": "boolean", + "return_type": "string", "params": [ { - "name": "prop", - "type": "any" + "name": "radix?", + "type": "number" } ] }, "path": "[LIB] core.js", - "line": 65, - "endline": 65, + "line": 164, + "endline": 164, "start": 5, - "end": 44 + "end": 36 }, { - "name": "toLocaleString", - "type": "() => string", + "name": "valueOf", + "type": "() => number", "func_details": { - "return_type": "string", + "return_type": "number", "params": [] }, "path": "[LIB] core.js", - "line": 66, - "endline": 66, - "start": 5, - "end": 28 - }, - { - "name": "toString", - "type": "() => string", - "func_details": { - "return_type": "string", - "params": [] - }, - "path": "[LIB] core.js", - "line": 67, - "endline": 67, - "start": 5, - "end": 22 - }, - { - "name": "valueOf", - "type": "() => mixed", - "func_details": { - "return_type": "mixed", - "params": [] - }, - "path": "[LIB] core.js", - "line": 68, - "endline": 68, - "start": 5, - "end": 20 - } - ] - } - `, - ).exitCodes([0]), - - - addFile("fun.js"), - flowCmd( - ['autocomplete', '--strip-root', '--json', 'fun.js', '4', '5'], - 'fun.js', - ).stdout( - ` - { - "result": [ - { - "name": "apply", - "type": "(thisArg: any, argArray?: any) => any", - "func_details": { - "return_type": "any", - "params": [ - { - "name": "thisArg", - "type": "any" - }, - { - "name": "argArray?", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 106, - "endline": 106, - "start": 18, - "end": 41 - }, - { - "name": "arguments", - "type": "any", - "func_details": null, - "path": "[LIB] core.js", - "line": 110, - "endline": 110, - "start": 16, - "end": 18 - }, - { - "name": "bind", - "type": "(thisArg: any, ...argArray: Array) => any", - "func_details": { - "return_type": "any", - "params": [ - { - "name": "thisArg", - "type": "any" - }, - { - "name": "...argArray", - "type": "Array" - } - ] - }, - "path": "[LIB] core.js", - "line": 107, - "endline": 107, - "start": 17, - "end": 39 - }, - { - "name": "call", - "type": "(thisArg: any, ...argArray: Array) => any", - "func_details": { - "return_type": "any", - "params": [ - { - "name": "thisArg", - "type": "any" - }, - { - "name": "...argArray", - "type": "Array" - } - ] - }, - "path": "[LIB] core.js", - "line": 108, - "endline": 108, - "start": 17, - "end": 39 - }, - { - "name": "caller", - "type": "Function | null", - "func_details": null, - "path": "[LIB] core.js", - "line": 111, - "endline": 111, - "start": 13, - "end": 27 - }, - { - "name": "hasOwnProperty", - "type": "(prop: any) => boolean", - "func_details": { - "return_type": "boolean", - "params": [ - { - "name": "prop", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 63, - "endline": 63, - "start": 5, - "end": 38 - }, - { - "name": "isPrototypeOf", - "type": "(o: any) => boolean", - "func_details": { - "return_type": "boolean", - "params": [ - { - "name": "o", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 64, - "endline": 64, - "start": 5, - "end": 34 - }, - { - "name": "length", - "type": "number", - "func_details": null, - "path": "[LIB] core.js", - "line": 112, - "endline": 112, - "start": 13, - "end": 18 - }, - { - "name": "name", - "type": "string", - "func_details": null, - "path": "[LIB] core.js", - "line": 113, - "endline": 113, - "start": 11, - "end": 16 - }, - { - "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", - "func_details": { - "return_type": "boolean", - "params": [ - { - "name": "prop", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 65, - "endline": 65, - "start": 5, - "end": 44 - }, - { - "name": "toLocaleString", - "type": "() => string", - "func_details": { - "return_type": "string", - "params": [] - }, - "path": "[LIB] core.js", - "line": 66, - "endline": 66, - "start": 5, - "end": 28 - }, - { - "name": "toString", - "type": "() => string", - "func_details": { - "return_type": "string", - "params": [] - }, - "path": "[LIB] core.js", - "line": 67, - "endline": 67, - "start": 5, - "end": 22 - }, - { - "name": "valueOf", - "type": "() => mixed", - "func_details": { - "return_type": "mixed", - "params": [] - }, - "path": "[LIB] core.js", - "line": 68, - "endline": 68, - "start": 5, - "end": 20 - } - ] - } - `, - ).exitCodes([0]), - - - addFile("this.js"), - flowCmd( - ['autocomplete', '--strip-root', '--json', 'this.js', '8', '10'], - 'this.js', - ).stdout( - ` - { - "result": [ - { - "name": "bar", - "type": "() => void", - "func_details": { - "return_type": "void", - "params": [] - }, - "path": "this.js", - "line": 6, - "endline": 6, - "start": 3, - "end": 16 - }, - { - "name": "baz", - "type": "string", - "func_details": null, - "path": "this.js", - "line": 5, - "endline": 5, - "start": 8, - "end": 13 - }, - { - "name": "hello", - "type": "() => void", - "func_details": { - "return_type": "void", - "params": [] - }, - "path": "this.js", - "line": 7, - "endline": 9, - "start": 3, - "end": 3 - } - ] - } - `, - ).exitCodes([0]), - - - addFile("typeparams.js"), - flowCmd( - ['autocomplete', '--strip-root', '--json', 'typeparams.js', '6', '16'], - 'typeparams.js', - ).stdout( - ` - { - "result": [ - { - "name": "toExponential", - "type": "(fractionDigits?: number) => string", - "func_details": { - "return_type": "string", - "params": [ - { - "name": "fractionDigits?", - "type": "number" - } - ] - }, - "path": "[LIB] core.js", - "line": 140, - "endline": 140, - "start": 5, - "end": 50 - }, - { - "name": "toFixed", - "type": "(fractionDigits?: number) => string", - "func_details": { - "return_type": "string", - "params": [ - { - "name": "fractionDigits?", - "type": "number" - } - ] - }, - "path": "[LIB] core.js", - "line": 141, - "endline": 141, - "start": 5, - "end": 44 - }, - { - "name": "toLocaleString", - "type": "(locales?: (string | Array), options?: Intl$NumberFormatOptions) => string", - "func_details": { - "return_type": "string", - "params": [ - { - "name": "locales?", - "type": "string | Array" - }, - { - "name": "options?", - "type": "Intl$NumberFormatOptions" - } - ] - }, - "path": "[LIB] core.js", - "line": 142, - "endline": 142, - "start": 5, - "end": 96 - }, - { - "name": "toPrecision", - "type": "(precision?: number) => string", - "func_details": { - "return_type": "string", - "params": [ - { - "name": "precision?", - "type": "number" - } - ] - }, - "path": "[LIB] core.js", - "line": 143, - "endline": 143, - "start": 5, - "end": 43 - }, - { - "name": "toString", - "type": "(radix?: number) => string", - "func_details": { - "return_type": "string", - "params": [ - { - "name": "radix?", - "type": "number" - } - ] - }, - "path": "[LIB] core.js", - "line": 144, - "endline": 144, - "start": 5, - "end": 36 - }, - { - "name": "valueOf", - "type": "() => number", - "func_details": { - "return_type": "number", - "params": [] - }, - "path": "[LIB] core.js", - "line": 145, - "endline": 145, + "line": 165, + "endline": 165, "start": 5, "end": 21 } @@ -1825,57 +1470,57 @@ export default suite(({addFile, flowCmd}) => [ }, { "name": "hasOwnProperty", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 63, - "endline": 63, + "line": 80, + "endline": 80, "start": 5, - "end": 38 + "end": 40 }, { "name": "isPrototypeOf", - "type": "(o: any) => boolean", + "type": "(o: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "o", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 64, - "endline": 64, + "line": 81, + "endline": 81, "start": 5, - "end": 34 + "end": 36 }, { "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 65, - "endline": 65, + "line": 82, + "endline": 82, "start": 5, - "end": 44 + "end": 46 }, { "name": "toLocaleString", @@ -1885,8 +1530,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 66, - "endline": 66, + "line": 83, + "endline": 83, "start": 5, "end": 28 }, @@ -1898,8 +1543,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 67, - "endline": 67, + "line": 84, + "endline": 84, "start": 5, "end": 22 }, @@ -1911,8 +1556,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 68, - "endline": 68, + "line": 85, + "endline": 85, "start": 5, "end": 20 } @@ -1950,39 +1595,39 @@ export default suite(({addFile, flowCmd}) => [ }, { "name": "hasOwnProperty", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 63, - "endline": 63, + "line": 80, + "endline": 80, "start": 5, - "end": 38 + "end": 40 }, { "name": "isPrototypeOf", - "type": "(o: any) => boolean", + "type": "(o: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "o", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 64, - "endline": 64, + "line": 81, + "endline": 81, "start": 5, - "end": 34 + "end": 36 }, { "name": "o", @@ -1996,21 +1641,21 @@ export default suite(({addFile, flowCmd}) => [ }, { "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 65, - "endline": 65, + "line": 82, + "endline": 82, "start": 5, - "end": 44 + "end": 46 }, { "name": "toLocaleString", @@ -2020,8 +1665,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 66, - "endline": 66, + "line": 83, + "endline": 83, "start": 5, "end": 28 }, @@ -2033,8 +1678,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 67, - "endline": 67, + "line": 84, + "endline": 84, "start": 5, "end": 22 }, @@ -2046,8 +1691,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 68, - "endline": 68, + "line": 85, + "endline": 85, "start": 5, "end": 20 }, @@ -2075,99 +1720,6 @@ export default suite(({addFile, flowCmd}) => [ ` { "result": [ - { - "name": "hasOwnProperty", - "type": "(prop: any) => boolean", - "func_details": { - "return_type": "boolean", - "params": [ - { - "name": "prop", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 63, - "endline": 63, - "start": 5, - "end": 38 - }, - { - "name": "isPrototypeOf", - "type": "(o: any) => boolean", - "func_details": { - "return_type": "boolean", - "params": [ - { - "name": "o", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 64, - "endline": 64, - "start": 5, - "end": 34 - }, - { - "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", - "func_details": { - "return_type": "boolean", - "params": [ - { - "name": "prop", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 65, - "endline": 65, - "start": 5, - "end": 44 - }, - { - "name": "toLocaleString", - "type": "() => string", - "func_details": { - "return_type": "string", - "params": [] - }, - "path": "[LIB] core.js", - "line": 66, - "endline": 66, - "start": 5, - "end": 28 - }, - { - "name": "toString", - "type": "() => string", - "func_details": { - "return_type": "string", - "params": [] - }, - "path": "[LIB] core.js", - "line": 67, - "endline": 67, - "start": 5, - "end": 22 - }, - { - "name": "valueOf", - "type": "() => mixed", - "func_details": { - "return_type": "mixed", - "params": [] - }, - "path": "[LIB] core.js", - "line": 68, - "endline": 68, - "start": 5, - "end": 20 - }, { "name": "x", "type": "number", @@ -2192,99 +1744,6 @@ export default suite(({addFile, flowCmd}) => [ ` { "result": [ - { - "name": "hasOwnProperty", - "type": "(prop: any) => boolean", - "func_details": { - "return_type": "boolean", - "params": [ - { - "name": "prop", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 63, - "endline": 63, - "start": 5, - "end": 38 - }, - { - "name": "isPrototypeOf", - "type": "(o: any) => boolean", - "func_details": { - "return_type": "boolean", - "params": [ - { - "name": "o", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 64, - "endline": 64, - "start": 5, - "end": 34 - }, - { - "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", - "func_details": { - "return_type": "boolean", - "params": [ - { - "name": "prop", - "type": "any" - } - ] - }, - "path": "[LIB] core.js", - "line": 65, - "endline": 65, - "start": 5, - "end": 44 - }, - { - "name": "toLocaleString", - "type": "() => string", - "func_details": { - "return_type": "string", - "params": [] - }, - "path": "[LIB] core.js", - "line": 66, - "endline": 66, - "start": 5, - "end": 28 - }, - { - "name": "toString", - "type": "() => string", - "func_details": { - "return_type": "string", - "params": [] - }, - "path": "[LIB] core.js", - "line": 67, - "endline": 67, - "start": 5, - "end": 22 - }, - { - "name": "valueOf", - "type": "() => mixed", - "func_details": { - "return_type": "mixed", - "params": [] - }, - "path": "[LIB] core.js", - "line": 68, - "endline": 68, - "start": 5, - "end": 20 - }, { "name": "x", "type": "number", @@ -2361,7 +1820,7 @@ export default suite(({addFile, flowCmd}) => [ }, { "name": "idx", - "type": "(obj: IdxObject, pathCallback: (demaybefiedObj: IdxObject) => IdxResult) => ?IdxResult", + "type": "(obj: IdxObject, pathCallback: (demaybefiedObj: IdxObject) => IdxResult) => ?IdxResult", "func_details": { "return_type": "?IdxResult", "params": [ @@ -2385,7 +1844,7 @@ export default suite(({addFile, flowCmd}) => [ "name": "exports", "type": "{||}", "func_details": null, - "path": "", + "path": "customfun.js", "line": 0, "endline": 0, "start": 1, @@ -2451,39 +1910,39 @@ export default suite(({addFile, flowCmd}) => [ "result": [ { "name": "hasOwnProperty", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 63, - "endline": 63, + "line": 80, + "endline": 80, "start": 5, - "end": 38 + "end": 40 }, { "name": "isPrototypeOf", - "type": "(o: any) => boolean", + "type": "(o: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "o", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 64, - "endline": 64, + "line": 81, + "endline": 81, "start": 5, - "end": 34 + "end": 36 }, { "name": "num", @@ -2497,21 +1956,21 @@ export default suite(({addFile, flowCmd}) => [ }, { "name": "propertyIsEnumerable", - "type": "(prop: any) => boolean", + "type": "(prop: mixed) => boolean", "func_details": { "return_type": "boolean", "params": [ { "name": "prop", - "type": "any" + "type": "mixed" } ] }, "path": "[LIB] core.js", - "line": 65, - "endline": 65, + "line": 82, + "endline": 82, "start": 5, - "end": 44 + "end": 46 }, { "name": "str", @@ -2531,8 +1990,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 66, - "endline": 66, + "line": 83, + "endline": 83, "start": 5, "end": 28 }, @@ -2544,8 +2003,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 67, - "endline": 67, + "line": 84, + "endline": 84, "start": 5, "end": 22 }, @@ -2557,8 +2016,8 @@ export default suite(({addFile, flowCmd}) => [ "params": [] }, "path": "[LIB] core.js", - "line": 68, - "endline": 68, + "line": 85, + "endline": 85, "start": 5, "end": 20 } @@ -2589,7 +2048,7 @@ export default suite(({addFile, flowCmd}) => [ "name": "exports", "type": "{||}", "func_details": null, - "path": "", + "path": "exact.js", "line": 0, "endline": 0, "start": 1, diff --git a/newtests/bigint/_flowconfig b/newtests/bigint/_flowconfig new file mode 100644 index 00000000000..1562e53144b --- /dev/null +++ b/newtests/bigint/_flowconfig @@ -0,0 +1,9 @@ +[ignore] + +[include] + +[libs] + +[options] +all=true + diff --git a/newtests/bigint/test.js b/newtests/bigint/test.js new file mode 100644 index 00000000000..cc79e884855 --- /dev/null +++ b/newtests/bigint/test.js @@ -0,0 +1,374 @@ +/* + * @flow + */ + +import { suite, test } from "flow-dev-tools/src/test/Tester"; + +export default suite(({ addFile, addFiles, addCode }) => [ + test("BigInt invalid decimal type literal", [ + addCode(` + type InvalidDecimal = 1.0n; + `).newErrors( + ` + test.js:4 + 4: type InvalidDecimal = 1.0n; + ^^^^ A bigint literal must be an integer + `, + ) + ]), + + test("BigInt invalid negative decimal type literal", [ + addCode(` + type InvalidNegDecimal = -1.0n; + `).newErrors( + ` + test.js:4 + 4: type InvalidNegDecimal = -1.0n; + ^^^^^ A bigint literal must be an integer + `, + ) + ]), + + test("BigInt invalid decimal literal", [ + addCode(` + const invalid_decimal = 1.0n; + `).newErrors( + ` + test.js:4 + 4: const invalid_decimal = 1.0n; + ^^^^ A bigint literal must be an integer + `, + ) + ]), + + test("BigInt invalid negative decimal literal", [ + addCode(` + const invalid_neg_decimal = -1.0n; + `).newErrors( + ` + test.js:4 + 4: const invalid_neg_decimal = -1.0n; + ^^^^ A bigint literal must be an integer + `, + ) + ]), + + test("BigInt invalid scientific type literal", [ + addCode(` + type InvalidE = 2e9n; + `).newErrors( + ` + test.js:4 + 4: type InvalidE = 2e9n; + ^^^^ A bigint literal cannot use exponential notation + `, + ) + ]), + + test("BigInt invalid negative scientific type literal", [ + addCode(` + type InvalidNegE = -2e9n; + `).newErrors( + ` + test.js:4 + 4: type InvalidNegE = -2e9n; + ^^^^^ A bigint literal cannot use exponential notation + `, + ) + ]), + + test("BigInt invalid scientific decimal type literal", [ + addCode(` + type InvalidNegDecimalE = 2.0e9n; + `).newErrors( + ` + test.js:4 + 4: type InvalidNegDecimalE = 2.0e9n; + ^^^^^^ A bigint literal cannot use exponential notation + `, + ) + ]), + + test("BigInt invalid negative scientific decimal type literal", [ + addCode(` + type InvalidNegDecimalE = -2.0e9n; + `).newErrors( + ` + test.js:4 + 4: type InvalidNegDecimalE = -2.0e9n; + ^^^^^^^ A bigint literal cannot use exponential notation + `, + ) + ]), + + test("BigInt invalid scientific literal", [ + addCode(` + const invalid_e = 2e9n; + `).newErrors( + ` + test.js:4 + 4: const invalid_e = 2e9n; + ^^^^ A bigint literal cannot use exponential notation + `, + ) + ]), + + test("BigInt invalid negative scientific literal", [ + addCode(` + const invalid_neg_e = -2e9n; + `).newErrors( + ` + test.js:4 + 4: const invalid_neg_e = -2e9n; + ^^^^ A bigint literal cannot use exponential notation + `, + ) + ]), + + test("BigInt invalid octal legacy type literal", [ + addCode(` + type InvalidOctalLegacy = 016432n; + `).newErrors( + ` + test.js:4 + 4: type InvalidOctalLegacy = 016432n; + ^^^^^^^ Unexpected token ILLEGAL + `, + ) + ]), + + test("BigInt invalid negative octal legacy type literal", [ + addCode(` + type InvalidNegOctalLegacy = -016432n; + `).newErrors( + ` + test.js:4 + 4: type InvalidNegOctalLegacy = -016432n; + ^^^^^^^^ Unexpected token ILLEGAL + `, + ) + ]), + + test("BigInt invalid octal legacy literal", [ + addCode(` + const invalid_octal_legacy = 016432n; + `).newErrors( + ` + test.js:4 + 4: const invalid_octal_legacy = 016432n; + ^^^^^^^ Unexpected token ILLEGAL + `, + ) + ]), + + test("BigInt invalid negative octal legacy literal", [ + addCode(` + const invalid_neg_octal_legacy = -016432n; + `).newErrors( + ` + test.js:4 + 4: const invalid_neg_octal_legacy = -016432n; + ^^^^^^^ Unexpected token ILLEGAL + `, + ) + ]), + + test("BigInt is not supported yet", [ + addCode(` + type S = bigint; + + const valid_binary = 0b101011101n; + const valid_neg_binary = -0b101011101n; + type ValidBinary = 0b101011101n; + type ValidNegBinary = -0b101011101n; + + const valid_hex = 0xfff123n; + const valid_neg_hex = -0xfff123n; + type ValidHex = 0xfff123n; + type ValidNegHex = -0xfff123n; + + const valid_large = 9223372036854775807n; + const valid_neg_large = -9223372036854775807n; + type ValidLarge = 9223372036854775807n; + type ValidNegLarge = -9223372036854775807n; + + const valid_octal_new = 0o16432n; + const valid_neg_octal_new = -0o16432n; + type ValidOctalNew = 0o16432n; + type ValidNegOctalNew = -0o16432n; + + const valid_small = 100n; + const valid_neg_small = -100n; + type ValidSmall = 100n; + type ValidNegSmall = -1n; + `).newErrors( + ` + test.js:4 + 4: type S = bigint; + ^^^^^^ BigInt bigint [1] is not yet supported. + References: + 4: type S = bigint; + ^^^^^^ [1] + + test.js:6 + 6: const valid_binary = 0b101011101n; + ^^^^^^^^^^^^ BigInt bigint literal \`0b101011101n\` [1] is not yet supported. + References: + 6: const valid_binary = 0b101011101n; + ^^^^^^^^^^^^ [1] + + test.js:7 + 7: const valid_neg_binary = -0b101011101n; + ^^^^^^^^^^^^ BigInt bigint literal \`0b101011101n\` [1] is not yet supported. + References: + 7: const valid_neg_binary = -0b101011101n; + ^^^^^^^^^^^^ [1] + + test.js:8 + 8: type ValidBinary = 0b101011101n; + ^^^^^^^^^^^^ BigInt bigint literal \`0b101011101n\` [1] is not yet supported. + References: + 8: type ValidBinary = 0b101011101n; + ^^^^^^^^^^^^ [1] + + test.js:9 + 9: type ValidNegBinary = -0b101011101n; + ^^^^^^^^^^^^^ BigInt bigint literal \`-0b101011101n\` [1] is not yet supported. + References: + 9: type ValidNegBinary = -0b101011101n; + ^^^^^^^^^^^^^ [1] + + test.js:11 + 11: const valid_hex = 0xfff123n; + ^^^^^^^^^ BigInt bigint literal \`0xfff123n\` [1] is not yet supported. + References: + 11: const valid_hex = 0xfff123n; + ^^^^^^^^^ [1] + + test.js:12 + 12: const valid_neg_hex = -0xfff123n; + ^^^^^^^^^ BigInt bigint literal \`0xfff123n\` [1] is not yet supported. + References: + 12: const valid_neg_hex = -0xfff123n; + ^^^^^^^^^ [1] + + test.js:13 + 13: type ValidHex = 0xfff123n; + ^^^^^^^^^ BigInt bigint literal \`0xfff123n\` [1] is not yet supported. + References: + 13: type ValidHex = 0xfff123n; + ^^^^^^^^^ [1] + + test.js:14 + 14: type ValidNegHex = -0xfff123n; + ^^^^^^^^^^ BigInt bigint literal \`-0xfff123n\` [1] is not yet supported. + References: + 14: type ValidNegHex = -0xfff123n; + ^^^^^^^^^^ [1] + + test.js:16 + 16: const valid_large = 9223372036854775807n; + ^^^^^^^^^^^^^^^^^^^^ BigInt bigint literal \`9223372036854775807n\` [1] is not yet supported. + References: + 16: const valid_large = 9223372036854775807n; + ^^^^^^^^^^^^^^^^^^^^ [1] + + test.js:17 + 17: const valid_neg_large = -9223372036854775807n; + ^^^^^^^^^^^^^^^^^^^^ BigInt bigint literal \`9223372036854775807n\` [1] is not yet supported. + References: + 17: const valid_neg_large = -9223372036854775807n; + ^^^^^^^^^^^^^^^^^^^^ [1] + + test.js:18 + 18: type ValidLarge = 9223372036854775807n; + ^^^^^^^^^^^^^^^^^^^^ BigInt bigint literal \`9223372036854775807n\` [1] is not yet supported. + References: + 18: type ValidLarge = 9223372036854775807n; + ^^^^^^^^^^^^^^^^^^^^ [1] + + test.js:19 + 19: type ValidNegLarge = -9223372036854775807n; + ^^^^^^^^^^^^^^^^^^^^^ BigInt bigint literal \`-9223372036854775807n\` [1] is not yet supported. + References: + 19: type ValidNegLarge = -9223372036854775807n; + ^^^^^^^^^^^^^^^^^^^^^ [1] + + test.js:21 + 21: const valid_octal_new = 0o16432n; + ^^^^^^^^ BigInt bigint literal \`0o16432n\` [1] is not yet supported. + References: + 21: const valid_octal_new = 0o16432n; + ^^^^^^^^ [1] + + test.js:22 + 22: const valid_neg_octal_new = -0o16432n; + ^^^^^^^^ BigInt bigint literal \`0o16432n\` [1] is not yet supported. + References: + 22: const valid_neg_octal_new = -0o16432n; + ^^^^^^^^ [1] + + test.js:23 + 23: type ValidOctalNew = 0o16432n; + ^^^^^^^^ BigInt bigint literal \`0o16432n\` [1] is not yet supported. + References: + 23: type ValidOctalNew = 0o16432n; + ^^^^^^^^ [1] + + test.js:24 + 24: type ValidNegOctalNew = -0o16432n; + ^^^^^^^^^ BigInt bigint literal \`-0o16432n\` [1] is not yet supported. + References: + 24: type ValidNegOctalNew = -0o16432n; + ^^^^^^^^^ [1] + + test.js:26 + 26: const valid_small = 100n; + ^^^^ BigInt bigint literal \`100n\` [1] is not yet supported. + References: + 26: const valid_small = 100n; + ^^^^ [1] + + test.js:27 + 27: const valid_neg_small = -100n; + ^^^^ BigInt bigint literal \`100n\` [1] is not yet supported. + References: + 27: const valid_neg_small = -100n; + ^^^^ [1] + + test.js:28 + 28: type ValidSmall = 100n; + ^^^^ BigInt bigint literal \`100n\` [1] is not yet supported. + References: + 28: type ValidSmall = 100n; + ^^^^ [1] + + test.js:29 + 29: type ValidNegSmall = -1n; + ^^^ BigInt bigint literal \`-1n\` [1] is not yet supported. + References: + 29: type ValidNegSmall = -1n; + ^^^ [1] + `, + ) + ]), + + test("BigInt can be suppressed", [ + addCode(` + //$FlowFixMe + type S = bigint; + //$FlowFixMe + type A = 1n; + //$FlowFixMe + const valid_binary = 0b101011101n; + //$FlowFixMe + const valid_hex = 0xfff123n; + //$FlowFixMe + const valid_large = 9223372036854775807n; + //$FlowFixMe + const valid_octal_new = 0o16432n; + //$FlowFixMe + const valid_small = 100n; + `).noNewErrors() + ]) +]); diff --git a/newtests/class_field_initializer_inference/test.js b/newtests/class_field_initializer_inference/test.js index 41f21caf40c..d62d8f6ee74 100644 --- a/newtests/class_field_initializer_inference/test.js +++ b/newtests/class_field_initializer_inference/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/config_all/test.js b/newtests/config_all/test.js index 597b98e1767..87a079703d6 100644 --- a/newtests/config_all/test.js +++ b/newtests/config_all/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; diff --git a/newtests/const_imports/test.js b/newtests/const_imports/test.js index 22c53a7181e..b97c4718e3b 100644 --- a/newtests/const_imports/test.js +++ b/newtests/const_imports/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/contents/test.js b/newtests/contents/test.js index 40c806d6f97..2c54c588889 100644 --- a/newtests/contents/test.js +++ b/newtests/contents/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/csx/test.js b/newtests/csx/test.js index ae21184327b..7f8e7f362d7 100644 --- a/newtests/csx/test.js +++ b/newtests/csx/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ @@ -117,8 +116,8 @@ export default suite(({addFile, addFiles, addCode}) => [ References: 7: const x = 42; ^^ [1] - 31: interface $Iterable<+Yield,+Return,-Next> { - ^^^^^^^^^ [2]. See lib: [LIB] prelude.js:31 + 41: interface $Iterable<+Yield,+Return,-Next> { + ^^^^^^^^^ [2]. See lib: [LIB] prelude.js:41 `, ) ]), diff --git a/newtests/declare_module_imports/test.js b/newtests/declare_module_imports/test.js index 409d342f9ab..662ec57b4cf 100644 --- a/newtests/declare_module_imports/test.js +++ b/newtests/declare_module_imports/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ @@ -56,57 +55,13 @@ export default suite(({addFile, addFiles, addCode}) => [ addCode('(cVal: CT);').noNewErrors(), addCode('(cVal.D: DT);').noNewErrors(), - addCode('(cVal: DT);').newErrors( - ` - test.js:18 - 18: (cVal: DT); - ^^^^ Cannot cast \`cVal\` to \`DT\` because property \`C\` is missing in object literal [1] but exists in \`DT\` [2]. - References: - 8: const cVal = {}; - ^^ [1] - 18: (cVal: DT); - ^^ [2] - `, - ), - addCode('(cVal.D: CT);').newErrors( - ` - test.js:20 - 20: (cVal.D: CT); - ^^^^^^ Cannot cast \`cVal.D\` to \`CT\` because property \`D\` is missing in object literal [1] but exists in \`CT\` [2]. - References: - 9: const dVal = {}; - ^^ [1] - 20: (cVal.D: CT); - ^^ [2] - `, - ), + addCode('(cVal: DT);').noNewErrors(), + addCode('(cVal.D: CT);').noNewErrors(), addCode('(dVal: DT);').noNewErrors(), addCode('(dVal.C: CT);').noNewErrors(), - addCode('(dVal: CT);').newErrors( - ` - test.js:26 - 26: (dVal: CT); - ^^^^ Cannot cast \`dVal\` to \`CT\` because property \`D\` is missing in object literal [1] but exists in \`CT\` [2]. - References: - 9: const dVal = {}; - ^^ [1] - 26: (dVal: CT); - ^^ [2] - `, - ), - addCode('(dVal.C: DT);').newErrors( - ` - test.js:28 - 28: (dVal.C: DT); - ^^^^^^ Cannot cast \`dVal.C\` to \`DT\` because property \`C\` is missing in object literal [1] but exists in \`DT\` [2]. - References: - 8: const cVal = {}; - ^^ [1] - 28: (dVal.C: DT); - ^^ [2] - `, - ), + addCode('(dVal: CT);').noNewErrors(), + addCode('(dVal.C: DT);').noNewErrors(), ]), /** diff --git a/newtests/dynamic_import/test.js b/newtests/dynamic_import/test.js index c508ae1dc19..4c8b25ee888 100644 --- a/newtests/dynamic_import/test.js +++ b/newtests/dynamic_import/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/export_star_from/test.js b/newtests/export_star_from/test.js index 84cf6c601fb..c441c86191f 100644 --- a/newtests/export_star_from/test.js +++ b/newtests/export_star_from/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/export_type_star_from/test.js b/newtests/export_type_star_from/test.js index fbbf1e1c30f..d1b4423a54b 100644 --- a/newtests/export_type_star_from/test.js +++ b/newtests/export_type_star_from/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/gen_flow_files_command/_flowconfig b/newtests/gen_flow_files_command/_flowconfig deleted file mode 100644 index 244d0f16ccd..00000000000 --- a/newtests/gen_flow_files_command/_flowconfig +++ /dev/null @@ -1,10 +0,0 @@ -[ignore] - -[include] - -[libs] - -[options] -suppress_comment=\\(.\\|\n\\)*\\$FlowFixMe -no_flowlib=true - diff --git a/newtests/gen_flow_files_command/default_class_export b/newtests/gen_flow_files_command/default_class_export deleted file mode 100644 index 59c905c4135..00000000000 --- a/newtests/gen_flow_files_command/default_class_export +++ /dev/null @@ -1,11 +0,0 @@ -// @flow - -export class Base { -}; - -interface IFoo {} -interface IBar {} - -export default class Child extends Base implements IFoo, IBar { - p: number -} diff --git a/newtests/gen_flow_files_command/default_function_exports.js b/newtests/gen_flow_files_command/default_function_exports.js deleted file mode 100644 index e379157a3d9..00000000000 --- a/newtests/gen_flow_files_command/default_function_exports.js +++ /dev/null @@ -1,4 +0,0 @@ -// @flow - -export default function (a: V) { return a; } -export function mono(a: number, b: {c: number}) { return a + b.c; }; diff --git a/newtests/gen_flow_files_command/default_variable_exports.js b/newtests/gen_flow_files_command/default_variable_exports.js deleted file mode 100644 index e0b36c3c7b0..00000000000 --- a/newtests/gen_flow_files_command/default_variable_exports.js +++ /dev/null @@ -1,4 +0,0 @@ -// @flow - -export default 42; -export const str = "asdf"; diff --git a/newtests/gen_flow_files_command/export_imported_type b/newtests/gen_flow_files_command/export_imported_type deleted file mode 100644 index 9c05503b4e4..00000000000 --- a/newtests/gen_flow_files_command/export_imported_type +++ /dev/null @@ -1,5 +0,0 @@ -// @flow - -import {Child} from "./named_class_exports"; - -export const a: Child = new Child(); diff --git a/newtests/gen_flow_files_command/exports_builtins.js b/newtests/gen_flow_files_command/exports_builtins.js deleted file mode 100644 index 592cfc4e0b5..00000000000 --- a/newtests/gen_flow_files_command/exports_builtins.js +++ /dev/null @@ -1,3 +0,0 @@ -// @flow - -export function fn(a: Array) {}; diff --git a/newtests/gen_flow_files_command/literal_types.js b/newtests/gen_flow_files_command/literal_types.js deleted file mode 100644 index 92a49f4b751..00000000000 --- a/newtests/gen_flow_files_command/literal_types.js +++ /dev/null @@ -1,16 +0,0 @@ -// @flow - -export var varBool = true; -export var varBoolLiteral: true = true; -export var varNum = 42; -export var varNumLiteral: 42 = 42; -export var varStr = "asdf"; -export var varStrLiteral: "asdf" = "asdf"; - -export function f1(p: number) { - return "asdf"; -}; - -export function f2(p: 42): "asdf" { - return "asdf"; -}; diff --git a/newtests/gen_flow_files_command/named_class_exports b/newtests/gen_flow_files_command/named_class_exports deleted file mode 100644 index a9b9b607b04..00000000000 --- a/newtests/gen_flow_files_command/named_class_exports +++ /dev/null @@ -1,33 +0,0 @@ -// @flow - -export class Base { - static baseStaticMethod(a: number, b: string) { return a; } - static overriddenStaticMethod(a: {b: number, c: number}) { return a.b + a.c; } - - // Testing infinite type recursion - baseInst: Base; - - // Testing forward references - childInst: Child; - - baseMethod(a: number, b: string) { return a; } - overriddenMethod(a: {b: number, c: number}) { return a.b + a.c; } -}; - -export class Child extends Base { - static overriddenStaticMethod(a: {b: number}) { return a.b; } - - notExported: NotExportedUsed; - overriddenMethod(a: {b: number}) { return a.b; } -} - -class NotExportedUsed { - map(f: (x:T) => U): NotExportedUsed { - return new NotExportedUsed(); - }; -} -class NotExportedNotUsed {} - -interface IFoo { foo: string } - -export class Foo implements IFoo { foo: string } diff --git a/newtests/gen_flow_files_command/named_function_exports.js b/newtests/gen_flow_files_command/named_function_exports.js deleted file mode 100644 index ea4626de330..00000000000 --- a/newtests/gen_flow_files_command/named_function_exports.js +++ /dev/null @@ -1,4 +0,0 @@ -// @flow - -export function mono(a: number, b: {c: number}) { return a + b.c; }; -export function poly (a: V) { return a; } diff --git a/newtests/gen_flow_files_command/named_type_exports.js b/newtests/gen_flow_files_command/named_type_exports.js deleted file mode 100644 index fcf1e58bc65..00000000000 --- a/newtests/gen_flow_files_command/named_type_exports.js +++ /dev/null @@ -1,4 +0,0 @@ -// @flow - -export type T1 = number; -export type T2 = Array; diff --git a/newtests/gen_flow_files_command/named_variable_exports.js b/newtests/gen_flow_files_command/named_variable_exports.js deleted file mode 100644 index 5d5f1c333ae..00000000000 --- a/newtests/gen_flow_files_command/named_variable_exports.js +++ /dev/null @@ -1,13 +0,0 @@ -// @flow - -export const constExport = 42; -export let letExport = 43; -export var varExport = 44; - -export type typeExport = number; - -type UnexportedT = string; -export const unexportedAlias = ((0: any): UnexportedT); - -class C {} -export const unexportedNominal = ((0: any): C); diff --git a/newtests/gen_flow_files_command/non_flow_file.js b/newtests/gen_flow_files_command/non_flow_file.js deleted file mode 100644 index 6c293163448..00000000000 --- a/newtests/gen_flow_files_command/non_flow_file.js +++ /dev/null @@ -1 +0,0 @@ -export function addNum(a: number, b: number) { return a + b; } diff --git a/newtests/gen_flow_files_command/object_literal_method.js b/newtests/gen_flow_files_command/object_literal_method.js deleted file mode 100644 index cc55469afb5..00000000000 --- a/newtests/gen_flow_files_command/object_literal_method.js +++ /dev/null @@ -1,17 +0,0 @@ -// @flow - -export var a = { - bar(): void {} -}; - -export var b = { - bar: function (): void {} -}; - -export var c = { - m(x: T): T { return x; } -}; - -export var d = { - m: function(x: T): T { return x; } -}; diff --git a/newtests/gen_flow_files_command/object_types.js b/newtests/gen_flow_files_command/object_types.js deleted file mode 100644 index ed77b6e1153..00000000000 --- a/newtests/gen_flow_files_command/object_types.js +++ /dev/null @@ -1,13 +0,0 @@ -// @flow - -export var emptyObj = {}; - -export var singleProp = {p1: 42}; -export var multiProp = {p1: 42, p2: 42}; -export var nestedObject = {p1: {p2: 42}}; - -export var dict: {[key: string]: string} = {}; -export var dictWithProps: { - p1: string, - [key: string]: number, -} = {p1: "asdf"}; diff --git a/newtests/gen_flow_files_command/optional_types.js b/newtests/gen_flow_files_command/optional_types.js deleted file mode 100644 index a39df6860cb..00000000000 --- a/newtests/gen_flow_files_command/optional_types.js +++ /dev/null @@ -1,7 +0,0 @@ -// @flow - -var obj: {b?: number} = {b: 42}; - -export {obj}; -export var optNum = obj.b; -export var optFunc = (p?: number) => p; diff --git a/newtests/gen_flow_files_command/suppressions.js b/newtests/gen_flow_files_command/suppressions.js deleted file mode 100644 index d004dac32c3..00000000000 --- a/newtests/gen_flow_files_command/suppressions.js +++ /dev/null @@ -1,4 +0,0 @@ -// @flow - -// $FlowFixMe -export function fn() { return ('asdf': number); }; diff --git a/newtests/gen_flow_files_command/test.js b/newtests/gen_flow_files_command/test.js deleted file mode 100644 index da5be613093..00000000000 --- a/newtests/gen_flow_files_command/test.js +++ /dev/null @@ -1,429 +0,0 @@ -/* - * @flow - * @lint-ignore-every LINEWRAP1 - */ - - -import {suite, test} from 'flow-dev-tools/src/test/Tester'; - -export default suite(({addFile, addFiles, flowCmd}) => [ - test('named class exports', [ - addFile('named_class_exports', 'named_class_exports.js'), - flowCmd(['gen-flow-files', '--quiet', 'named_class_exports.js']) - .stdout( - ` - // @flow - - declare class Class0 { - - map(f: (x: T) => U): Class0; - } - declare interface Class1 { - - foo: string; - } - declare export class Base { - static baseStaticMethod(a: number, b: string): number; - static overriddenStaticMethod(a: {b: number, c: number}): number; - - baseInst: Base; - childInst: Child; - baseMethod(a: number, b: string): number; - overriddenMethod(a: {b: number, c: number}): number; - } - - declare export class Child extends Base { - static overriddenStaticMethod(a: {b: number}): number; - - notExported: Class0; - overriddenMethod(a: {b: number}): number; - } - - declare export class Foo implements Class1 { - - foo: string; - } - - - `, - ) - .stderr('') - ]), - - test('named variable exports', [ - addFile('named_variable_exports.js'), - flowCmd(['gen-flow-files', '--quiet', 'named_variable_exports.js']) - .stderr('') - .stdout( - ` - // @flow - - declare class Class0 { - - } - declare export var constExport: number; - declare export var letExport: number; - export type typeExport = number; - declare export var unexportedAlias: string; - declare export var unexportedNominal: Class0; - declare export var varExport: number; - - - `, - ) - ]), - - test('named function exports', [ - addFile('named_function_exports.js'), - flowCmd(['gen-flow-files', '--quiet', 'named_function_exports.js']) - .stderr('') - .stdout( - ` - // @flow - - declare export function mono(a: number, b: {c: number}): number; - declare export function poly(a: V): number; - - - - `, - ) - ]), - - test('named type exports', [ - addFile('named_type_exports.js'), - flowCmd(['gen-flow-files', '--quiet', 'named_type_exports.js']) - .stderr('') - .stdout( - ` - // @flow - - export type T1 = number; - export type T2 = Array; - - - declare module.exports: {}; - - `, - ), - ]), - - test('default class exports', [ - addFile('default_class_export', 'default_class_export.js'), - flowCmd(['gen-flow-files', '--quiet', 'default_class_export.js']) - .stderr('') - .stdout( - ` - // @flow - - declare interface Class0 { - - } - declare interface Class1 { - - } - declare export class Base { - - } - - declare export default class extends Base implements Class0, Class1 { - - p: number; - } - - - - `, - ), - ]), - - test('default variable exports', [ - addFile('default_variable_exports.js'), - flowCmd(['gen-flow-files', '--quiet', 'default_variable_exports.js']) - .stderr('') - .stdout( - ` - // @flow - - declare export default number; - declare export var str: string; - - - `, - ), - ]), - - test('default function exports', [ - addFile('default_function_exports.js'), - flowCmd(['gen-flow-files', '--quiet', 'default_function_exports.js']) - .stderr('') - .stdout( - ` - // @flow - - declare export default function(a: V): number; - - declare export function mono(a: number, b: {c: number}): number; - - - `, - ), - ]), - - test('non-@flow files', [ - addFile('non_flow_file.js'), - flowCmd([ - 'gen-flow-files', - '--quiet', - '--strip-root', - 'non_flow_file.js', - ]) - .stderr('') - .stdout( - ` - // This file does not have an @flow at the top! - - `, - ), - ]), - - test('type errors halt and print to stderr', [ - addFile('type_error.js'), - flowCmd(['gen-flow-files', '--quiet', 'type_error.js']).stdout('').stderr( - ` - Error ----------------------------------------------------------------------------------------------- type_error.js:3:24 - - Cannot assign \`42\` to \`a\` because number [1] is incompatible with string [2]. - - type_error.js:3:24 - 3| export var a: string = 42; - ^^ [1] - - References: - type_error.js:3:15 - 3| export var a: string = 42; - ^^^^^^ [2] - - - Found 1 error - - In order to generate a shadow file there must be no type errors! - - `, - ) - ]), - - test('imported class types arent redefined', [ - addFile('named_class_exports', 'named_class_exports.js'), - addFile('export_imported_type', 'export_imported_type.js'), - flowCmd(['gen-flow-files', '--quiet', 'export_imported_type.js']) - .stderr('') - .stdout( - ` - // @flow - - import {Child} from "./named_class_exports"; - declare export var a: Child; - - - `, - ) - ]), - - test('builtin class types arent redefined', [ - addFile('exports_builtins.js'), - flowCmd(['gen-flow-files', '--quiet', 'exports_builtins.js']) - .stderr('') - .stdout( - ` - // @flow - - declare export function fn(a: Array): void; - - - `, - ) - ]), - - test('suppressed type errors get normalized', [ - addFile('suppressions.js'), - flowCmd(['gen-flow-files', '--quiet', 'suppressions.js']) - .stderr('') - .stdout( - ` - // @flow - - declare export function fn(): number; - - - `, - ) - ]), - - test('literal types respect polarity', [ - addFile('literal_types.js'), - flowCmd(['gen-flow-files', '--quiet', 'literal_types.js']) - .stderr('') - .stdout( - ` - // @flow - - declare export function f1(p: number): string; - declare export function f2(p: 42): "asdf"; - declare export var varBool: boolean; - declare export var varBoolLiteral: true; - declare export var varNum: number; - declare export var varNumLiteral: 42; - declare export var varStr: string; - declare export var varStrLiteral: "asdf"; - - - `, - ) - ]), - - test('optional types', [ - addFile('optional_types.js'), - flowCmd(['gen-flow-files', '--quiet', 'optional_types.js']) - .stderr('') - .stdout( - ` - // @flow - - declare export var obj: {b?: number}; - declare export function optFunc(p?: number): void | number; - declare export var optNum: void | number; - - - `, - ) - ]), - - test('object types', [ - addFile('object_types.js'), - flowCmd(['gen-flow-files', '--quiet', 'object_types.js']) - .stderr('') - .stdout( - ` - // @flow - - declare export var dict: {[key: string]: string}; - declare export var dictWithProps: {p1: string, [key: string]: number}; - declare export var emptyObj: {}; - declare export var multiProp: {p1: number, p2: number}; - declare export var nestedObject: {p1: {p2: number}}; - declare export var singleProp: {p1: number}; - - - `, - ) - ]), - - /** - * TODO: Add checks that assert that the following files actually get written - * to disk. - */ - - test('single file with --out-dir', [ - addFiles( - 'test_project/dist/main.js', - 'test_project/src/main.js', - ), - flowCmd([ - 'gen-flow-files', - '--quiet', - '--strip-root', - './test_project/src/main.js', - '--out-dir=./test_project/dist' - ]).stderr('').stdout(` - test_project/src/main.js -> test_project/dist/main.js.flow - `), - ]), - - test('single file in nested directory with --out-dir', [ - addFiles( - 'test_project/dist/main.js', - 'test_project/src/main.js', - 'test_project/src/lib/utils.js', - ), - flowCmd([ - 'gen-flow-files', - '--quiet', - '--strip-root', - './test_project/src/lib/utils.js', - '--out-dir=./test_project/dist' - ]).stderr('').stdout(` - test_project/src/lib/utils.js -> test_project/dist/utils.js.flow - `), - ]), - - test('directory without --out-dir (error)', [ - addFiles( - 'test_project/dist/main.js', - 'test_project/src/main.js', - ), - flowCmd([ - 'gen-flow-files', - '--quiet', - '--strip-root', - './test_project/src', - ]).stdout('').stderr(` - When the ${'`'}src${'`'} arg is a directory, the ${'`'}--out-dir${'`'} flag is required. - `) - ]), - - test('directory with --out-dir', [ - addFiles( - 'test_project/dist/main.js', - 'test_project/src/main.js', - 'test_project/src/lib/utils.js', - ), - flowCmd([ - 'gen-flow-files', - '--quiet', - '--strip-root', - './test_project/src', - '--out-dir=./test_project/dist', - ]).stderr('').stdout(` - Found 2 files, generating libdefs... - test_project/src/lib/utils.js -> test_project/dist/lib/utils.js.flow - test_project/src/main.js -> test_project/dist/main.js.flow - `), - ]), - - test('directory with non-@flow files', [ - addFiles( - 'test_project/dist/main.js', - 'test_project/src/main.js', - 'test_project/src/noflow.js', - 'test_project/src/lib/utils.js', - 'test_project/src/lib/noflow.js', - ), - flowCmd([ - 'gen-flow-files', - '--quiet', - '--strip-root', - './test_project/src', - '--out-dir=./test_project/dist', - ]).stderr('').stdout(` - Found 4 files, generating libdefs... - test_project/src/lib/utils.js -> test_project/dist/lib/utils.js.flow - test_project/src/main.js -> test_project/dist/main.js.flow - `), - ]), - - test('object literals with method declarations', [ - addFile('object_literal_method.js'), - flowCmd(['gen-flow-files', '--quiet', 'object_literal_method.js']).stdout(` - // @flow - - declare export var a: {bar: () => void}; - declare export var b: {bar: () => void}; - declare export var c: {m: (x: T) => T}; - declare export var d: {m: (x: T) => T}; - - - `) - .stderr('') - ]), -]); diff --git a/newtests/gen_flow_files_command/test_project/dist/main.js b/newtests/gen_flow_files_command/test_project/dist/main.js deleted file mode 100644 index 85e65e12132..00000000000 --- a/newtests/gen_flow_files_command/test_project/dist/main.js +++ /dev/null @@ -1,3 +0,0 @@ -// @flow - -export const name = "main.js"; diff --git a/newtests/gen_flow_files_command/test_project/src/lib/noflow.js b/newtests/gen_flow_files_command/test_project/src/lib/noflow.js deleted file mode 100644 index 1a8a8c95813..00000000000 --- a/newtests/gen_flow_files_command/test_project/src/lib/noflow.js +++ /dev/null @@ -1 +0,0 @@ -export const name = "noflow.js"; diff --git a/newtests/gen_flow_files_command/test_project/src/lib/utils.js b/newtests/gen_flow_files_command/test_project/src/lib/utils.js deleted file mode 100644 index f5f050cd40c..00000000000 --- a/newtests/gen_flow_files_command/test_project/src/lib/utils.js +++ /dev/null @@ -1,3 +0,0 @@ -// @flow - -export const name: "utils.js" = "utils.js"; diff --git a/newtests/gen_flow_files_command/test_project/src/main.js b/newtests/gen_flow_files_command/test_project/src/main.js deleted file mode 100644 index 7b3654a8c5c..00000000000 --- a/newtests/gen_flow_files_command/test_project/src/main.js +++ /dev/null @@ -1,3 +0,0 @@ -// @flow - -export const name: "main.js" = "main.js"; diff --git a/newtests/gen_flow_files_command/test_project/src/noflow.js b/newtests/gen_flow_files_command/test_project/src/noflow.js deleted file mode 100644 index 1a8a8c95813..00000000000 --- a/newtests/gen_flow_files_command/test_project/src/noflow.js +++ /dev/null @@ -1 +0,0 @@ -export const name = "noflow.js"; diff --git a/newtests/gen_flow_files_command/type_error.js b/newtests/gen_flow_files_command/type_error.js deleted file mode 100644 index 0437eaf8ff1..00000000000 --- a/newtests/gen_flow_files_command/type_error.js +++ /dev/null @@ -1,3 +0,0 @@ -// @flow - -export var a: string = 42; diff --git a/newtests/ide/_flowconfig b/newtests/ide/_flowconfig deleted file mode 100644 index 682838601b4..00000000000 --- a/newtests/ide/_flowconfig +++ /dev/null @@ -1,11 +0,0 @@ -[ignore] - -[include] - -[libs] - -[lints] - -[options] -all=true -no_flowlib=true diff --git a/newtests/ide/existingError.js b/newtests/ide/existingError.js deleted file mode 100644 index fea8ad40045..00000000000 --- a/newtests/ide/existingError.js +++ /dev/null @@ -1 +0,0 @@ -var existingError: number = true; diff --git a/newtests/ide/fileWithWarning.js b/newtests/ide/fileWithWarning.js deleted file mode 100644 index 24236612053..00000000000 --- a/newtests/ide/fileWithWarning.js +++ /dev/null @@ -1,4 +0,0 @@ -// flowlint sketchy-null:warn -var x: ?boolean = true; -if (x) { -} diff --git a/newtests/ide/lazy/_flowconfig b/newtests/ide/lazy/_flowconfig deleted file mode 100644 index 77abf9d584d..00000000000 --- a/newtests/ide/lazy/_flowconfig +++ /dev/null @@ -1,12 +0,0 @@ -[ignore] -/ignored\.js - -[include] - -[libs] - -[lints] - -[options] -all=true -no_flowlib=true diff --git a/newtests/ide/lazy/_flowconfig_all_false b/newtests/ide/lazy/_flowconfig_all_false deleted file mode 100644 index 948729c02ee..00000000000 --- a/newtests/ide/lazy/_flowconfig_all_false +++ /dev/null @@ -1,11 +0,0 @@ -[ignore] - -[include] - -[libs] - -[lints] - -[options] -all=false -no_flowlib=true diff --git a/newtests/ide/lazy/cycleA.js b/newtests/ide/lazy/cycleA.js deleted file mode 100644 index 78e989c103c..00000000000 --- a/newtests/ide/lazy/cycleA.js +++ /dev/null @@ -1 +0,0 @@ -require('./cycleA'); diff --git a/newtests/ide/lazy/cycleB.js b/newtests/ide/lazy/cycleB.js deleted file mode 100644 index 78e989c103c..00000000000 --- a/newtests/ide/lazy/cycleB.js +++ /dev/null @@ -1 +0,0 @@ -require('./cycleA'); diff --git a/newtests/ide/lazy/dependency.js b/newtests/ide/lazy/dependency.js deleted file mode 100644 index 2d28723c560..00000000000 --- a/newtests/ide/lazy/dependency.js +++ /dev/null @@ -1,3 +0,0 @@ -export default 123; - -var dependencyError: string = 123; diff --git a/newtests/ide/lazy/dependent.js b/newtests/ide/lazy/dependent.js deleted file mode 100644 index e4930d08938..00000000000 --- a/newtests/ide/lazy/dependent.js +++ /dev/null @@ -1,3 +0,0 @@ -import focused from './focused'; - -var dependentError: string = 123; diff --git a/newtests/ide/lazy/errorsWithFlowPragma.js b/newtests/ide/lazy/errorsWithFlowPragma.js deleted file mode 100644 index 09b6e819739..00000000000 --- a/newtests/ide/lazy/errorsWithFlowPragma.js +++ /dev/null @@ -1,2 +0,0 @@ -// @flow -var x: string = 123; diff --git a/newtests/ide/lazy/errorsWithNoFlowPragma.js b/newtests/ide/lazy/errorsWithNoFlowPragma.js deleted file mode 100644 index 59856c35cf9..00000000000 --- a/newtests/ide/lazy/errorsWithNoFlowPragma.js +++ /dev/null @@ -1,2 +0,0 @@ -// @noflow -var x: string = 123; diff --git a/newtests/ide/lazy/focused.js b/newtests/ide/lazy/focused.js deleted file mode 100644 index 7563827fa49..00000000000 --- a/newtests/ide/lazy/focused.js +++ /dev/null @@ -1,5 +0,0 @@ -import dependency from './dependency'; - -var focusedError: string = 123; - -export default 'hello'; diff --git a/newtests/ide/lazy/focusedWithCyclicDependency.js b/newtests/ide/lazy/focusedWithCyclicDependency.js deleted file mode 100644 index a04ee077850..00000000000 --- a/newtests/ide/lazy/focusedWithCyclicDependency.js +++ /dev/null @@ -1,3 +0,0 @@ -require('./cycleA'); - -var x: string = 123; diff --git a/newtests/ide/lazy/ignored.js b/newtests/ide/lazy/ignored.js deleted file mode 100644 index 8ab1313d769..00000000000 --- a/newtests/ide/lazy/ignored.js +++ /dev/null @@ -1 +0,0 @@ -var x: string = 123; diff --git a/newtests/ide/lazy/otherDependent.js b/newtests/ide/lazy/otherDependent.js deleted file mode 100644 index c0d636dc302..00000000000 --- a/newtests/ide/lazy/otherDependent.js +++ /dev/null @@ -1,3 +0,0 @@ -import dependency from './dependency'; - -var otherDependentError: string = 123; diff --git a/newtests/ide/lazy/test.js b/newtests/ide/lazy/test.js deleted file mode 100644 index a223be34b8c..00000000000 --- a/newtests/ide/lazy/test.js +++ /dev/null @@ -1,2407 +0,0 @@ -/* - * @flow - * @lint-ignore-every LINEWRAP1 - */ - - -import {suite, test} from 'flow-dev-tools/src/test/Tester'; - -export default suite(({ - addCode, addFile, addFiles, removeFile, ideStart, ideNotification, flowCmd -}) => [ - test('Opening and closing ignored file', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - - addFile('ignored.js') - .waitAndVerifyNoIDEMessagesSinceStartOfStep(500) - .noNewErrors() - .because('The IDE has not opened ignored.js yet'), - - ideNotification('didOpen', 'ignored.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 50000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .because('The file is ignored'), - - ideNotification('didClose', 'ignored.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 50000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .because('Closing the file does not trigger recheck, just sends errors'), - - flowCmd(['status', '--strip-root']) - .stdout( - ` - No errors! - - The Flow server is currently in IDE lazy mode and is only checking 0/1 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('Still no errors'), - ]).lazy('ide'), - - test('Opening and closing single file with no dependents or dependencies', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - - addCode('var x: string = 123') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .noNewErrors() - .because('The IDE has not opened test.js yet'), - - ideNotification('didOpen', 'test.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 29 - }, - "end": { - "line": 3, - "column": 19, - "offset": 32 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 8, - "offset": 20 - }, - "end": { - "line": 3, - "column": 13, - "offset": 26 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 8, - "end": 13 - } - ] - } - ], - "message": [ - { - "context": "var x: string = 123", - "descr": "Cannot assign `123` to `x` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 29 - }, - "end": { - "line": 3, - "column": 19, - "offset": 32 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - } - ], - "passed": false - } - ] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 29 - }, - "end": { - "line": 3, - "column": 19, - "offset": 32 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 8, - "offset": 20 - }, - "end": { - "line": 3, - "column": 13, - "offset": 26 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 8, - "end": 13 - } - ] - } - ], - "message": [ - { - "context": "var x: string = 123", - "descr": "Cannot assign `123` to `x` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 29 - }, - "end": { - "line": 3, - "column": 19, - "offset": 32 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - } - ], - "passed": false - } - ] - } - ], - ) - .because('Opening the file triggers a recheck which sees the error'), - - ideNotification('didClose', 'test.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 29 - }, - "end": { - "line": 3, - "column": 19, - "offset": 32 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 8, - "offset": 20 - }, - "end": { - "line": 3, - "column": 13, - "offset": 26 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 8, - "end": 13 - } - ] - } - ], - "message": [ - { - "context": "var x: string = 123", - "descr": "Cannot assign `123` to `x` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 29 - }, - "end": { - "line": 3, - "column": 19, - "offset": 32 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - } - ], - "passed": false - } - ] - } - ], - ) - .because('Closing the file does not trigger recheck, just send errors'), - - flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ----------------------------------------------------------------------------------------------------- test.js:3:17 - - Cannot assign \`123\` to \`x\` because number [1] is incompatible with string [2]. - - test.js:3:17 - 3| var x: string = 123 - ^^^ [1] - - References: - test.js:3:8 - 3| var x: string = 123 - ^^^^^^ [2] - - - - Found 1 error - - The Flow server is currently in IDE lazy mode and is only checking 1/1 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('Even though test.js is closed, it is still focused'), - - addCode('var anotherError: bool = 123') - .newErrors( - ` - test.js:5 - 5: var anotherError: bool = 123 - ^^^ Cannot assign \`123\` to \`anotherError\` because number [1] is incompatible with boolean [2]. - References: - 5: var anotherError: bool = 123 - ^^^ [1] - 5: var anotherError: bool = 123 - ^^^^ [2] - `, - ) - .because('Changes are still noticed'), - ]).lazy('ide'), - - // Somewhat similar to tests/quick-start-add-dependency - test('New dependent', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - addFiles('focused.js', 'dependency.js', 'otherDependent.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .because('Nothing is open, so we just get the recheck start and end'), - ideNotification('didOpen', 'focused.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var dependencyError: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "dependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 31, - "offset": 51 - }, - "end": { - "line": 3, - "column": 33, - "offset": 54 - } - }, - "path": "dependency.js", - "line": 3, - "endline": 3, - "start": 31, - "end": 33 - } - ] - }, - { - "message": [ - { - "context": "var dependencyError: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "dependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 22, - "offset": 42 - }, - "end": { - "line": 3, - "column": 27, - "offset": 48 - } - }, - "path": "dependency.js", - "line": 3, - "endline": 3, - "start": 22, - "end": 27 - } - ] - } - ], - "message": [ - { - "context": "var dependencyError: string = 123;", - "descr": "Cannot assign `123` to `dependencyError` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "dependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 31, - "offset": 51 - }, - "end": { - "line": 3, - "column": 33, - "offset": 54 - } - }, - "path": "dependency.js", - "line": 3, - "endline": 3, - "start": 31, - "end": 33 - } - ] - } - ], - "passed": false - } - ] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var focusedError: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "focused.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 28, - "offset": 67 - }, - "end": { - "line": 3, - "column": 30, - "offset": 70 - } - }, - "path": "focused.js", - "line": 3, - "endline": 3, - "start": 28, - "end": 30 - } - ] - }, - { - "message": [ - { - "context": "var focusedError: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "focused.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 19, - "offset": 58 - }, - "end": { - "line": 3, - "column": 24, - "offset": 64 - } - }, - "path": "focused.js", - "line": 3, - "endline": 3, - "start": 19, - "end": 24 - } - ] - } - ], - "message": [ - { - "context": "var focusedError: string = 123;", - "descr": "Cannot assign `123` to `focusedError` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "focused.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 28, - "offset": 67 - }, - "end": { - "line": 3, - "column": 30, - "offset": 70 - } - }, - "path": "focused.js", - "line": 3, - "endline": 3, - "start": 28, - "end": 30 - } - ] - } - ], - "passed": false - } - ] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var dependencyError: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "dependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 31, - "offset": 51 - }, - "end": { - "line": 3, - "column": 33, - "offset": 54 - } - }, - "path": "dependency.js", - "line": 3, - "endline": 3, - "start": 31, - "end": 33 - } - ] - }, - { - "message": [ - { - "context": "var dependencyError: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "dependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 22, - "offset": 42 - }, - "end": { - "line": 3, - "column": 27, - "offset": 48 - } - }, - "path": "dependency.js", - "line": 3, - "endline": 3, - "start": 22, - "end": 27 - } - ] - } - ], - "message": [ - { - "context": "var dependencyError: string = 123;", - "descr": "Cannot assign `123` to `dependencyError` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "dependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 31, - "offset": 51 - }, - "end": { - "line": 3, - "column": 33, - "offset": 54 - } - }, - "path": "dependency.js", - "line": 3, - "endline": 3, - "start": 31, - "end": 33 - } - ] - }, - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var focusedError: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "focused.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 28, - "offset": 67 - }, - "end": { - "line": 3, - "column": 30, - "offset": 70 - } - }, - "path": "focused.js", - "line": 3, - "endline": 3, - "start": 28, - "end": 30 - } - ] - }, - { - "message": [ - { - "context": "var focusedError: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "focused.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 19, - "offset": 58 - }, - "end": { - "line": 3, - "column": 24, - "offset": 64 - } - }, - "path": "focused.js", - "line": 3, - "endline": 3, - "start": 19, - "end": 24 - } - ] - } - ], - "message": [ - { - "context": "var focusedError: string = 123;", - "descr": "Cannot assign `123` to `focusedError` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "focused.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 28, - "offset": 67 - }, - "end": { - "line": 3, - "column": 30, - "offset": 70 - } - }, - "path": "focused.js", - "line": 3, - "endline": 3, - "start": 28, - "end": 30 - } - ] - } - ], - "passed": false - } - ] - } - ], - ) - .because( - 'Opening focused.js will cause a recheck and show the errors in focused.js and dependency.js' - ), - flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ----------------------------------------------------------------------------------------------- dependency.js:3:31 - - Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - - dependency.js:3:31 - 3| var dependencyError: string = 123; - ^^^ [1] - - References: - dependency.js:3:22 - 3| var dependencyError: string = 123; - ^^^^^^ [2] - - - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - - Found 2 errors - - The Flow server is currently in IDE lazy mode and is only checking 2/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ).because('There are two errors now'), - - addFile('dependent.js') - .newErrors( - ` - dependent.js:3 - 3: var dependentError: string = 123; - ^^^ Cannot assign \`123\` to \`dependentError\` because number [1] is incompatible with string [2]. - References: - 3: var dependentError: string = 123; - ^^^ [1] - 3: var dependentError: string = 123; - ^^^^^^ [2] - `, - ) - .because('Adding a new dependency will correctly recheck that dependency'), - - ]).lazy('ide'), - - // Based on tests/quick-start - test('@flow and @noflow pragmas', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .ideNotification('didOpen', 'errors.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - }, - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - addFile('errorsWithFlowPragma.js', 'errors.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "errors.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 17, - "offset": 25 - }, - "end": { - "line": 2, - "column": 19, - "offset": 28 - } - }, - "path": "errors.js", - "line": 2, - "endline": 2, - "start": 17, - "end": 19 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "errors.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 8, - "offset": 16 - }, - "end": { - "line": 2, - "column": 13, - "offset": 22 - } - }, - "path": "errors.js", - "line": 2, - "endline": 2, - "start": 8, - "end": 13 - } - ] - } - ], - "message": [ - { - "context": "var x: string = 123;", - "descr": "Cannot assign `123` to `x` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "errors.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 17, - "offset": 25 - }, - "end": { - "line": 2, - "column": 19, - "offset": 28 - } - }, - "path": "errors.js", - "line": 2, - "endline": 2, - "start": 17, - "end": 19 - } - ] - } - ], - "passed": false - } - ] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "errors.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 17, - "offset": 25 - }, - "end": { - "line": 2, - "column": 19, - "offset": 28 - } - }, - "path": "errors.js", - "line": 2, - "endline": 2, - "start": 17, - "end": 19 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "errors.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 8, - "offset": 16 - }, - "end": { - "line": 2, - "column": 13, - "offset": 22 - } - }, - "path": "errors.js", - "line": 2, - "endline": 2, - "start": 8, - "end": 13 - } - ] - } - ], - "message": [ - { - "context": "var x: string = 123;", - "descr": "Cannot assign `123` to `x` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "errors.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 17, - "offset": 25 - }, - "end": { - "line": 2, - "column": 19, - "offset": 28 - } - }, - "path": "errors.js", - "line": 2, - "endline": 2, - "start": 17, - "end": 19 - } - ] - } - ], - "passed": false - } - ] - } - ], - ) - .newErrors( - ` - errors.js:2 - 2: var x: string = 123; - ^^^ Cannot assign \`123\` to \`x\` because number [1] is incompatible with string [2]. - References: - 2: var x: string = 123; - ^^^ [1] - 2: var x: string = 123; - ^^^^^^ [2] - `, - ) - .because('File is open and has @flow so we should get the error'), - addFile('errorsWithNoFlowPragma.js', 'errors.js') - .flowCmd(['status', '--strip-root']) - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .stdout( - ` - No errors! - - The Flow server is currently in IDE lazy mode and is only checking 0/1 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('File is open but has @noflow so we should no errors'), - addFile('errorsWithFlowPragma.js', 'errors.js') - .newErrors( - ` - errors.js:2 - 2: var x: string = 123; - ^^^ Cannot assign \`123\` to \`x\` because number [1] is incompatible with string [2]. - References: - 2: var x: string = 123; - ^^^ [1] - 2: var x: string = 123; - ^^^^^^ [2] - `, - ) - .because('Reverting back to @flow should show the error again'), - ]).lazy('ide').flowConfig('_flowconfig_all_false'), - - // Based on tests/quick-start-add-dependency-on-cycle - test('Open file cyclic dependency', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - addFiles('cycleA.js', 'cycleB.js', 'focusedWithCyclicDependency.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - ideNotification('didOpen', 'focusedWithCyclicDependency.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "focusedWithCyclicDependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 38 - }, - "end": { - "line": 3, - "column": 19, - "offset": 41 - } - }, - "path": "focusedWithCyclicDependency.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "focusedWithCyclicDependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 8, - "offset": 29 - }, - "end": { - "line": 3, - "column": 13, - "offset": 35 - } - }, - "path": "focusedWithCyclicDependency.js", - "line": 3, - "endline": 3, - "start": 8, - "end": 13 - } - ] - } - ], - "message": [ - { - "context": "var x: string = 123;", - "descr": "Cannot assign `123` to `x` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "focusedWithCyclicDependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 38 - }, - "end": { - "line": 3, - "column": 19, - "offset": 41 - } - }, - "path": "focusedWithCyclicDependency.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - } - ], - "passed": false - } - ] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "focusedWithCyclicDependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 38 - }, - "end": { - "line": 3, - "column": 19, - "offset": 41 - } - }, - "path": "focusedWithCyclicDependency.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "focusedWithCyclicDependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 8, - "offset": 29 - }, - "end": { - "line": 3, - "column": 13, - "offset": 35 - } - }, - "path": "focusedWithCyclicDependency.js", - "line": 3, - "endline": 3, - "start": 8, - "end": 13 - } - ] - } - ], - "message": [ - { - "context": "var x: string = 123;", - "descr": "Cannot assign `123` to `x` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "focusedWithCyclicDependency.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 38 - }, - "end": { - "line": 3, - "column": 19, - "offset": 41 - } - }, - "path": "focusedWithCyclicDependency.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - } - ], - "passed": false - } - ] - } - ], - ), - flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ------------------------------------------------------------------------------ focusedWithCyclicDependency.js:3:17 - - Cannot assign \`123\` to \`x\` because number [1] is incompatible with string [2]. - - focusedWithCyclicDependency.js:3:17 - 3| var x: string = 123; - ^^^ [1] - - References: - focusedWithCyclicDependency.js:3:8 - 3| var x: string = 123; - ^^^^^^ [2] - - - - Found 1 error - - The Flow server is currently in IDE lazy mode and is only checking 2/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('Nothing should blow up and we should see the single error'), - ]).lazy('ide'), - - // Based on tests/quick-start-check-contents - test('check-contents should pull in dependency', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - addFiles('focused.js', 'dependency.js', 'otherDependent.js') - .flowCmd(['status', '--strip-root']) - .stdout( - ` - No errors! - - The Flow server is currently in IDE lazy mode and is only checking 0/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('No files are open yet'), - flowCmd(['check-contents', 'focused.js'], 'focused.js') - .stdout( - ` - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with - string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - - Found 1 error - - `, - ) - .because('check-contents will report the error in the file it checks'), - flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ----------------------------------------------------------------------------------------------- dependency.js:3:31 - - Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - - dependency.js:3:31 - 3| var dependencyError: string = 123; - ^^^ [1] - - References: - dependency.js:3:22 - 3| var dependencyError: string = 123; - ^^^^^^ [2] - - - - Found 1 error - - The Flow server is currently in IDE lazy mode and is only checking 1/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because( - 'As a side effect of check-contents, dependency.js is added to the checkset. At the moment we do not prune the checked set.' - ), - ]).lazy('ide'), - - // Based on tests/quick-start-delete-dependency - test('Delete dependency', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .ideNotification('didOpen', 'focused.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - }, - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - addFiles('focused.js', 'dependency.js', 'otherDependent.js') - .flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ----------------------------------------------------------------------------------------------- dependency.js:3:31 - - Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - - dependency.js:3:31 - 3| var dependencyError: string = 123; - ^^^ [1] - - References: - dependency.js:3:22 - 3| var dependencyError: string = 123; - ^^^^^^ [2] - - - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - - Found 2 errors - - The Flow server is currently in IDE lazy mode and is only checking 2/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('Two errors: one in each file'), - removeFile('dependency.js') - .flowCmd(['status', '--strip-root']) - .stdout( - ` - Error -------------------------------------------------------------------------------------------------- focused.js:1:24 - - Cannot resolve module \`./dependency\`. - - 1| import dependency from './dependency'; - ^^^^^^^^^^^^^^ - - - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - - Found 2 errors - - The Flow server is currently in IDE lazy mode and is only checking 1/3 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('Error in dependency.js disappears and we get missing module error'), - addFile('dependency.js') - .flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ----------------------------------------------------------------------------------------------- dependency.js:3:31 - - Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - - dependency.js:3:31 - 3| var dependencyError: string = 123; - ^^^ [1] - - References: - dependency.js:3:22 - 3| var dependencyError: string = 123; - ^^^^^^ [2] - - - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - - Found 2 errors - - The Flow server is currently in IDE lazy mode and is only checking 2/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('Restoring the file takes us back to the original two errors'), - ]).lazy('ide'), - - test('Open a dependency', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .ideNotification('didOpen', 'focused.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - }, - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - addFiles('focused.js', 'dependency.js', 'otherDependent.js') - .newErrors( - ` - dependency.js:3 - 3: var dependencyError: string = 123; - ^^^ Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - References: - 3: var dependencyError: string = 123; - ^^^ [1] - 3: var dependencyError: string = 123; - ^^^^^^ [2] - - focused.js:3 - 3: var focusedError: string = 123; - ^^^ Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - References: - 3: var focusedError: string = 123; - ^^^ [1] - 3: var focusedError: string = 123; - ^^^^^^ [2] - `, - ) - .because('Other dependent is a dependent of a dependency, so is not checked'), - ideNotification('didOpen', 'dependency.js') - // Unfortunately ideMessagesSinceStartOfStep doesn't work here since the - // order of the streamed errors isn't fixed :( - .sleep(500), - flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ----------------------------------------------------------------------------------------------- dependency.js:3:31 - - Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - - dependency.js:3:31 - 3| var dependencyError: string = 123; - ^^^ [1] - - References: - dependency.js:3:22 - 3| var dependencyError: string = 123; - ^^^^^^ [2] - - - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - Error ------------------------------------------------------------------------------------------- otherDependent.js:3:35 - - Cannot assign \`123\` to \`otherDependentError\` because number [1] is incompatible with string [2]. - - otherDependent.js:3:35 - 3| var otherDependentError: string = 123; - ^^^ [1] - - References: - otherDependent.js:3:26 - 3| var otherDependentError: string = 123; - ^^^^^^ [2] - - - - Found 3 errors - - The Flow server is currently in IDE lazy mode and is only checking 3/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('Focusing on a dependency now checks its dependents'), - ]).lazy('ide'), - - test('Remove and restore an open file', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .ideNotification('didOpen', 'focused.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - }, - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - addFiles('focused.js', 'dependency.js', 'otherDependent.js') - .flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ----------------------------------------------------------------------------------------------- dependency.js:3:31 - - Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - - dependency.js:3:31 - 3| var dependencyError: string = 123; - ^^^ [1] - - References: - dependency.js:3:22 - 3| var dependencyError: string = 123; - ^^^^^^ [2] - - - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - - Found 2 errors - - The Flow server is currently in IDE lazy mode and is only checking 2/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ), - removeFile('focused.js') - .addFile('focused.js') - .flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ----------------------------------------------------------------------------------------------- dependency.js:3:31 - - Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - - dependency.js:3:31 - 3| var dependencyError: string = 123; - ^^^ [1] - - References: - dependency.js:3:22 - 3| var dependencyError: string = 123; - ^^^^^^ [2] - - - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - - Found 2 errors - - The Flow server is currently in IDE lazy mode and is only checking 2/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ) - .because('We should be back at our starting state'), - ]).lazy('ide'), - - test('flow force-recheck --focus', [ - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .ideNotification('didOpen', 'focused.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - }, - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ), - addFiles('focused.js', 'dependency.js', 'otherDependent.js') - .flowCmd(['status', '--strip-root']) - .stdout( - ` - Error ----------------------------------------------------------------------------------------------- dependency.js:3:31 - - Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - - dependency.js:3:31 - 3| var dependencyError: string = 123; - ^^^ [1] - - References: - dependency.js:3:22 - 3| var dependencyError: string = 123; - ^^^^^^ [2] - - - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - - Found 2 errors - - The Flow server is currently in IDE lazy mode and is only checking 2/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ).because("otherDependent's errors are ignored due to lazy mode"), - flowCmd(['force-recheck', '--focus', 'dependency.js']) - .flowCmd(['status', '--strip-root']) - .stdout( - ` - - Error ----------------------------------------------------------------------------------------------- dependency.js:3:31 - - Cannot assign \`123\` to \`dependencyError\` because number [1] is incompatible with string [2]. - - dependency.js:3:31 - 3| var dependencyError: string = 123; - ^^^ [1] - - References: - dependency.js:3:22 - 3| var dependencyError: string = 123; - ^^^^^^ [2] - - - Error -------------------------------------------------------------------------------------------------- focused.js:3:28 - - Cannot assign \`123\` to \`focusedError\` because number [1] is incompatible with string [2]. - - focused.js:3:28 - 3| var focusedError: string = 123; - ^^^ [1] - - References: - focused.js:3:19 - 3| var focusedError: string = 123; - ^^^^^^ [2] - - - Error ------------------------------------------------------------------------------------------- otherDependent.js:3:35 - - Cannot assign \`123\` to \`otherDependentError\` because number [1] is incompatible with string [2]. - - otherDependent.js:3:35 - 3| var otherDependentError: string = 123; - ^^^ [1] - - References: - otherDependent.js:3:26 - 3| var otherDependentError: string = 123; - ^^^^^^ [2] - - - - Found 3 errors - - The Flow server is currently in IDE lazy mode and is only checking 3/4 files. - To learn more, visit flow.org/en/docs/lang/lazy-modes - - `, - ).because('force-recheck --focus promotes dependency to focused, so we see the error in otherDependent'), - ]).lazy('ide'), -]); diff --git a/newtests/ide/test.js b/newtests/ide/test.js deleted file mode 100644 index 9beaabac862..00000000000 --- a/newtests/ide/test.js +++ /dev/null @@ -1,1331 +0,0 @@ -/* - * @flow - * @lint-ignore-every LINEWRAP1 - */ - - -import {suite, test} from 'flow-dev-tools/src/test/Tester'; - -export default suite(({ideStart, ideNotification, ideRequest, addCode, addFile}) => [ - test('The initial subscribe does not send existing errors', [ - addCode('var x: string = 123;') - .newErrors( - ` - test.js:3 - 3: var x: string = 123; - ^^^ Cannot assign \`123\` to \`x\` because number [1] is incompatible with string [2]. - References: - 3: var x: string = 123; - ^^^ [1] - 3: var x: string = 123; - ^^^^^^ [2] - `, - ), - ideStart({mode: 'legacy'}) - .waitAndVerifyNoIDEMessagesSinceStartOfStep(500) - .because('We are connected, but not subscribed'), - ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 29 - }, - "end": { - "line": 3, - "column": 19, - "offset": 32 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - }, - { - "message": [ - { - "context": "var x: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 8, - "offset": 20 - }, - "end": { - "line": 3, - "column": 13, - "offset": 26 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 8, - "end": 13 - } - ] - } - ], - "message": [ - { - "context": "var x: string = 123;", - "descr": "Cannot assign `123` to `x` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 17, - "offset": 29 - }, - "end": { - "line": 3, - "column": 19, - "offset": 32 - } - }, - "path": "test.js", - "line": 3, - "endline": 3, - "start": 17, - "end": 19 - } - ] - } - ], - "passed": false - } - ] - } - ], - ) - .because('Subscribing sends all the existing errors'), - ]), - - test('Recheck behavior', [ - addFile('existingError.js') - .ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var existingError: number = true;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "existingError.js", - "type": "SourceFile", - "start": { - "line": 1, - "column": 29, - "offset": 28 - }, - "end": { - "line": 1, - "column": 32, - "offset": 32 - } - }, - "path": "existingError.js", - "line": 1, - "endline": 1, - "start": 29, - "end": 32 - } - ] - }, - { - "message": [ - { - "context": "var existingError: number = true;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "existingError.js", - "type": "SourceFile", - "start": { - "line": 1, - "column": 20, - "offset": 19 - }, - "end": { - "line": 1, - "column": 25, - "offset": 25 - } - }, - "path": "existingError.js", - "line": 1, - "endline": 1, - "start": 20, - "end": 25 - } - ] - } - ], - "message": [ - { - "context": "var existingError: number = true;", - "descr": "Cannot assign `true` to `existingError` because boolean [1] is incompatible with number [2].", - "type": "Blame", - "loc": { - "source": "existingError.js", - "type": "SourceFile", - "start": { - "line": 1, - "column": 29, - "offset": 28 - }, - "end": { - "line": 1, - "column": 32, - "offset": 32 - } - }, - "path": "existingError.js", - "line": 1, - "endline": 1, - "start": 29, - "end": 32 - } - ] - } - ], - "passed": false - } - ] - } - ], - ) - .newErrors( - ` - existingError.js:1 - 1: var existingError: number = true; - ^^^^ Cannot assign \`true\` to \`existingError\` because boolean [1] is incompatible with number [2]. - References: - 1: var existingError: number = true; - ^^^^ [1] - 1: var existingError: number = true; - ^^^^^^ [2] - `, - ), - addCode('var notAnError: number = 123;') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var existingError: number = true;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "existingError.js", - "type": "SourceFile", - "start": { - "line": 1, - "column": 29, - "offset": 28 - }, - "end": { - "line": 1, - "column": 32, - "offset": 32 - } - }, - "path": "existingError.js", - "line": 1, - "endline": 1, - "start": 29, - "end": 32 - } - ] - }, - { - "message": [ - { - "context": "var existingError: number = true;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "existingError.js", - "type": "SourceFile", - "start": { - "line": 1, - "column": 20, - "offset": 19 - }, - "end": { - "line": 1, - "column": 25, - "offset": 25 - } - }, - "path": "existingError.js", - "line": 1, - "endline": 1, - "start": 20, - "end": 25 - } - ] - } - ], - "message": [ - { - "context": "var existingError: number = true;", - "descr": "Cannot assign `true` to `existingError` because boolean [1] is incompatible with number [2].", - "type": "Blame", - "loc": { - "source": "existingError.js", - "type": "SourceFile", - "start": { - "line": 1, - "column": 29, - "offset": 28 - }, - "end": { - "line": 1, - "column": 32, - "offset": 32 - } - }, - "path": "existingError.js", - "line": 1, - "endline": 1, - "start": 29, - "end": 32 - } - ] - } - ], - "passed": false - } - ] - } - ], - ) - .because('No errors should be streamed during the recheck'), - addCode('var newError: string = 123;') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "startRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var newError: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 5, - "column": 24, - "offset": 67 - }, - "end": { - "line": 5, - "column": 26, - "offset": 70 - } - }, - "path": "test.js", - "line": 5, - "endline": 5, - "start": 24, - "end": 26 - } - ] - }, - { - "message": [ - { - "context": "var newError: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 5, - "column": 15, - "offset": 58 - }, - "end": { - "line": 5, - "column": 20, - "offset": 64 - } - }, - "path": "test.js", - "line": 5, - "endline": 5, - "start": 15, - "end": 20 - } - ] - } - ], - "message": [ - { - "context": "var newError: string = 123;", - "descr": "Cannot assign `123` to `newError` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 5, - "column": 24, - "offset": 67 - }, - "end": { - "line": 5, - "column": 26, - "offset": 70 - } - }, - "path": "test.js", - "line": 5, - "endline": 5, - "start": 24, - "end": 26 - } - ] - } - ], - "passed": false - } - ] - }, - { - "method": "endRecheck", - "params": [] - }, - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var existingError: number = true;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "existingError.js", - "type": "SourceFile", - "start": { - "line": 1, - "column": 29, - "offset": 28 - }, - "end": { - "line": 1, - "column": 32, - "offset": 32 - } - }, - "path": "existingError.js", - "line": 1, - "endline": 1, - "start": 29, - "end": 32 - } - ] - }, - { - "message": [ - { - "context": "var existingError: number = true;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "existingError.js", - "type": "SourceFile", - "start": { - "line": 1, - "column": 20, - "offset": 19 - }, - "end": { - "line": 1, - "column": 25, - "offset": 25 - } - }, - "path": "existingError.js", - "line": 1, - "endline": 1, - "start": 20, - "end": 25 - } - ] - } - ], - "message": [ - { - "context": "var existingError: number = true;", - "descr": "Cannot assign `true` to `existingError` because boolean [1] is incompatible with number [2].", - "type": "Blame", - "loc": { - "source": "existingError.js", - "type": "SourceFile", - "start": { - "line": 1, - "column": 29, - "offset": 28 - }, - "end": { - "line": 1, - "column": 32, - "offset": 32 - } - }, - "path": "existingError.js", - "line": 1, - "endline": 1, - "start": 29, - "end": 32 - } - ] - }, - { - "kind": "infer", - "level": "error", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var newError: string = 123;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 5, - "column": 24, - "offset": 67 - }, - "end": { - "line": 5, - "column": 26, - "offset": 70 - } - }, - "path": "test.js", - "line": 5, - "endline": 5, - "start": 24, - "end": 26 - } - ] - }, - { - "message": [ - { - "context": "var newError: string = 123;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 5, - "column": 15, - "offset": 58 - }, - "end": { - "line": 5, - "column": 20, - "offset": 64 - } - }, - "path": "test.js", - "line": 5, - "endline": 5, - "start": 15, - "end": 20 - } - ] - } - ], - "message": [ - { - "context": "var newError: string = 123;", - "descr": "Cannot assign `123` to `newError` because number [1] is incompatible with string [2].", - "type": "Blame", - "loc": { - "source": "test.js", - "type": "SourceFile", - "start": { - "line": 5, - "column": 24, - "offset": 67 - }, - "end": { - "line": 5, - "column": 26, - "offset": 70 - } - }, - "path": "test.js", - "line": 5, - "endline": 5, - "start": 24, - "end": 26 - } - ] - } - ], - "passed": false - } - ] - } - ], - ) - .because( - 'We get startRecheck when recheck starts and endRecheck when it ends.' - + ' During recheck, we get incremental results.' - + ' After recheck, we get total results.' - ), - ]), - - test('autocomplete', [ - ideStart({mode:'legacy'}) - .ideRequestAndWaitUntilResponse('autocomplete', 'test.js', 1, 12, "({x: 123}).;") - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 0, // no need for timeout here since we already waited for the response - [ - { - "method": "autocomplete", - "result": { - "result": [ - { - "name": "x", - "type": "number", - "func_details": null, - "path": "test.js", - "line": 1, - "endline": 1, - "start": 6, - "end": 8 - } - ] - } - } - ], - ), - ]), - - /* This is a regression test for a bug where we would drop new connections - * that appeared during a recheck */ - test('connect during recheck', [ - // For some reason this order of actions triggered the bug - ideStart({mode:'legacy'}) - .addCode('var x = 123') - .waitAndVerifyNoIDEMessagesSinceStartOfStep(100) - .because('Starting the IDE does not fire any messages'), - ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .because('There are no errors'), - ]), - - - test('didOpen before subscribe', [ - addFile('fileWithWarning.js'), - ideStart({mode:'legacy'}) - .ideNotification('didOpen', 'fileWithWarning.js') - .waitAndVerifyNoIDEMessagesSinceStartOfStep(500) - .because('We have not subscribed yet, so there is no response on open'), - - ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "lint", - "level": "warning", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: ?boolean = true;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "fileWithWarning.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 9, - "offset": 38 - }, - "end": { - "line": 2, - "column": 15, - "offset": 45 - } - }, - "path": "fileWithWarning.js", - "line": 2, - "endline": 2, - "start": 9, - "end": 15 - } - ] - }, - { - "message": [ - { - "context": "var x: ?boolean = true;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "fileWithWarning.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 8, - "offset": 37 - }, - "end": { - "line": 2, - "column": 15, - "offset": 45 - } - }, - "path": "fileWithWarning.js", - "line": 2, - "endline": 2, - "start": 8, - "end": 15 - } - ] - } - ], - "message": [ - { - "context": "if (x) {", - "descr": "Sketchy null check on boolean [1] which is potentially false. Perhaps you meant to check for null or undefined [2]? (`sketchy-null-bool`)", - "type": "Blame", - "loc": { - "source": "fileWithWarning.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 5, - "offset": 58 - }, - "end": { - "line": 3, - "column": 5, - "offset": 59 - } - }, - "path": "fileWithWarning.js", - "line": 3, - "endline": 3, - "start": 5, - "end": 5 - } - ] - } - ], - "passed": true - } - ] - } - ], - ) - .because('We report warnings on subscribe for the open file'), - ]), - - test('didOpen after subscribe', [ - addFile('fileWithWarning.js'), - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .because('We do not report warnings in files that are not open'), - - ideNotification('didOpen', 'fileWithWarning.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "lint", - "level": "warning", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: ?boolean = true;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "fileWithWarning.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 9, - "offset": 38 - }, - "end": { - "line": 2, - "column": 15, - "offset": 45 - } - }, - "path": "fileWithWarning.js", - "line": 2, - "endline": 2, - "start": 9, - "end": 15 - } - ] - }, - { - "message": [ - { - "context": "var x: ?boolean = true;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "fileWithWarning.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 8, - "offset": 37 - }, - "end": { - "line": 2, - "column": 15, - "offset": 45 - } - }, - "path": "fileWithWarning.js", - "line": 2, - "endline": 2, - "start": 8, - "end": 15 - } - ] - } - ], - "message": [ - { - "context": "if (x) {", - "descr": "Sketchy null check on boolean [1] which is potentially false. Perhaps you meant to check for null or undefined [2]? (`sketchy-null-bool`)", - "type": "Blame", - "loc": { - "source": "fileWithWarning.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 5, - "offset": 58 - }, - "end": { - "line": 3, - "column": 5, - "offset": 59 - } - }, - "path": "fileWithWarning.js", - "line": 3, - "endline": 3, - "start": 5, - "end": 5 - } - ] - } - ], - "passed": true - } - ] - } - ], - ) - .because('We should receive the warning when we open the file'), - - ideNotification('didOpen', 'fileWithWarning.js') - .waitAndVerifyNoIDEMessagesSinceStartOfStep(500) - .because( - 'When we open an already open file, we dont get the current errors', - ), - ]), - - test('didClose before subscribe', [ - addFile('fileWithWarning.js'), - ideStart({mode:'legacy'}) - .ideNotification('didOpen', 'fileWithWarning.js') - .ideNotification('didClose', 'fileWithWarning.js') - .waitAndVerifyNoIDEMessagesSinceStartOfStep(500) - .because( - 'We have not subscribed yet, so there is no response on open or close', - ), - - ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .because('We closed the file, so we dont report the warning'), - ]), - - test('didClose after subscribe', [ - addFile('fileWithWarning.js'), - ideStart({mode:'legacy'}) - .ideNotification('subscribeToDiagnostics') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .because('Subscribing gives us the current errors'), - - ideNotification('didOpen', 'fileWithWarning.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [ - { - "kind": "lint", - "level": "warning", - "suppressions": [], - "extra": [ - { - "message": [ - { - "context": null, - "descr": "References:", - "type": "Blame", - "path": "", - "line": 0, - "endline": 0, - "start": 1, - "end": 0 - } - ] - }, - { - "message": [ - { - "context": "var x: ?boolean = true;", - "descr": "[1]", - "type": "Blame", - "loc": { - "source": "fileWithWarning.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 9, - "offset": 38 - }, - "end": { - "line": 2, - "column": 15, - "offset": 45 - } - }, - "path": "fileWithWarning.js", - "line": 2, - "endline": 2, - "start": 9, - "end": 15 - } - ] - }, - { - "message": [ - { - "context": "var x: ?boolean = true;", - "descr": "[2]", - "type": "Blame", - "loc": { - "source": "fileWithWarning.js", - "type": "SourceFile", - "start": { - "line": 2, - "column": 8, - "offset": 37 - }, - "end": { - "line": 2, - "column": 15, - "offset": 45 - } - }, - "path": "fileWithWarning.js", - "line": 2, - "endline": 2, - "start": 8, - "end": 15 - } - ] - } - ], - "message": [ - { - "context": "if (x) {", - "descr": "Sketchy null check on boolean [1] which is potentially false. Perhaps you meant to check for null or undefined [2]? (`sketchy-null-bool`)", - "type": "Blame", - "loc": { - "source": "fileWithWarning.js", - "type": "SourceFile", - "start": { - "line": 3, - "column": 5, - "offset": 58 - }, - "end": { - "line": 3, - "column": 5, - "offset": 59 - } - }, - "path": "fileWithWarning.js", - "line": 3, - "endline": 3, - "start": 5, - "end": 5 - } - ] - } - ], - "passed": true - } - ] - } - ], - ) - .because('When we open a new file we get the current errors'), - - ideNotification('didClose', 'fileWithWarning.js') - .waitAndVerifyAllIDEMessagesContentSinceStartOfStep( - 10000, - [ - { - "method": "diagnosticsNotification", - "params": [ - { - "flowVersion": "", - "jsonVersion": "1", - "errors": [], - "passed": true - } - ] - } - ], - ) - .because('When we close a new file we get the current errors'), - - ideNotification('didClose', 'fileWithWarning.js') - .waitAndVerifyNoIDEMessagesSinceStartOfStep(500) - .because( - 'When we close an already closed file, we dont get the current errors', - ), - ]), - - test('Stop the flow ide command without killing the server', [ - ideStart({mode: 'legacy'}), - addCode('var x = 123') - .ideStop() - .sleep(500), - addCode('var y: string = 123') - .newErrors( - ` - test.js:5 - 5: var y: string = 123 - ^^^ Cannot assign \`123\` to \`y\` because number [1] is incompatible with string [2]. - References: - 5: var y: string = 123 - ^^^ [1] - 5: var y: string = 123 - ^^^^^^ [2] - `, - ) - .because('Stopping the flow ide command used to kill the server accidentally'), - ]), -]); diff --git a/newtests/import_errors/test.js b/newtests/import_errors/test.js index 4ee3299c635..515c235ca2c 100644 --- a/newtests/import_errors/test.js +++ b/newtests/import_errors/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/import_type_shorthand/test.js b/newtests/import_type_shorthand/test.js index ba47226141d..cf0b1c22fca 100644 --- a/newtests/import_type_shorthand/test.js +++ b/newtests/import_type_shorthand/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/jsx_pragma/test.js b/newtests/jsx_pragma/test.js index 0a66d2bb5d4..f9ba45b001b 100644 --- a/newtests/jsx_pragma/test.js +++ b/newtests/jsx_pragma/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ @@ -23,7 +22,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:3 3: // @jsx (x)=>x - ^^^^^^ Invalid \`@jsx\` declaration. Should have the form \`@jsx LeftHandSideExpression\` with no spaces. Parse error: Unexpected token =>. + ^^^^^^ Invalid \`@jsx\` declaration. Should have the form \`@jsx LeftHandSideExpression\` with no spaces. Parse error: Unexpected token \`=>\`, expected the end of input. `, ), ]), @@ -37,7 +36,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:5 5: (x)=>x - ^^^^^^ Invalid \`@jsx\` declaration. Should have the form \`@jsx LeftHandSideExpression\` with no spaces. Parse error: Unexpected token =>. + ^^^^^^ Invalid \`@jsx\` declaration. Should have the form \`@jsx LeftHandSideExpression\` with no spaces. Parse error: Unexpected token \`=>\`, expected the end of input. `, ), ]), diff --git a/newtests/ls_command/test.js b/newtests/ls_command/test.js index ef616a3c3d4..a5d29b2e39d 100644 --- a/newtests/ls_command/test.js +++ b/newtests/ls_command/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/lsp/code-action-disabled/_flowconfig b/newtests/lsp/code-action-disabled/_flowconfig new file mode 100644 index 00000000000..d402cfe696c --- /dev/null +++ b/newtests/lsp/code-action-disabled/_flowconfig @@ -0,0 +1,4 @@ +[options] +experimental.types_first=true +experimental.well_formed_exports=true +all=true diff --git a/newtests/lsp/code-action-disabled/error1.js.ignored b/newtests/lsp/code-action-disabled/error1.js.ignored new file mode 100644 index 00000000000..a852799e636 --- /dev/null +++ b/newtests/lsp/code-action-disabled/error1.js.ignored @@ -0,0 +1,7 @@ +// @flow +export function fred(a, b: string) { + const x = 15; + return x; +} + +export var barney = {a: 1, b: fred} diff --git a/newtests/lsp/code-action-disabled/test.js b/newtests/lsp/code-action-disabled/test.js new file mode 100644 index 00000000000..25ba978ea9e --- /dev/null +++ b/newtests/lsp/code-action-disabled/test.js @@ -0,0 +1,111 @@ +/* + * @flow + * @format + */ + +import {suite, test} from 'flow-dev-tools/src/test/Tester'; + +export default suite( + ({ + lspStartAndConnect, + lspStart, + lspRequest, + lspInitializeParams, + lspRequestAndWaitUntilResponse, + addFile, + lspIgnoreStatusAndCancellation, + }) => [ + test('textDocument/codeAction #0', [ + addFile('error1.js.ignored', 'error1.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/codeAction', { + textDocument: { + uri: 'error1.js', + }, + range: { + start: { + line: 0, + character: 1, + }, + end: { + line: 0, + character: 2, + }, + }, + context: { + diagnostics: [], + }, + }).verifyAllLSPMessagesInStep( + [`textDocument/codeAction{[]}`], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/codeAction #1', [ + addFile('error1.js.ignored', 'error1.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/codeAction', { + textDocument: { + uri: 'error1.js', + }, + range: { + start: { + line: 1, + character: 21, + }, + end: { + line: 1, + character: 22, + }, + }, + context: { + diagnostics: [ + { + range: { + start: { + line: 1, + character: 21, + }, + end: { + line: 1, + character: 22, + }, + }, + severity: 1, + code: 'InferError', + source: 'Flow', + message: 'Missing type annotation for `a`.', + }, + ], + }, + }).verifyAllLSPMessagesInStep( + [`textDocument/codeAction{[]}`], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/codeAction #2', [ + addFile('error1.js.ignored', 'error1.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/codeAction', { + textDocument: { + uri: 'error1.js', + }, + range: { + start: { + line: 6, + character: 11, + }, + end: { + line: 6, + character: 17, + }, + }, + context: { + diagnostics: [], + }, + }).verifyAllLSPMessagesInStep( + [`textDocument/codeAction{[]}`], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + ], +); diff --git a/newtests/lsp/code-action/_flowconfig b/newtests/lsp/code-action/_flowconfig new file mode 100644 index 00000000000..1da75b27cb8 --- /dev/null +++ b/newtests/lsp/code-action/_flowconfig @@ -0,0 +1,5 @@ +[options] +experimental.types_first=true +experimental.well_formed_exports=true +experimental.lsp.code_actions=true +all=true diff --git a/newtests/lsp/code-action/error1.js.ignored b/newtests/lsp/code-action/error1.js.ignored new file mode 100644 index 00000000000..a852799e636 --- /dev/null +++ b/newtests/lsp/code-action/error1.js.ignored @@ -0,0 +1,7 @@ +// @flow +export function fred(a, b: string) { + const x = 15; + return x; +} + +export var barney = {a: 1, b: fred} diff --git a/newtests/lsp/code-action/test.js b/newtests/lsp/code-action/test.js new file mode 100644 index 00000000000..9e315c2176b --- /dev/null +++ b/newtests/lsp/code-action/test.js @@ -0,0 +1,173 @@ +/* + * @flow + * @format + */ + +import {suite, test} from 'flow-dev-tools/src/test/Tester'; + +export default suite( + ({ + lspStartAndConnect, + lspStart, + lspRequest, + lspInitializeParams, + lspRequestAndWaitUntilResponse, + addFile, + lspIgnoreStatusAndCancellation, + }) => [ + test('textDocument/codeAction #0', [ + addFile('error1.js.ignored', 'error1.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/codeAction', { + textDocument: { + uri: 'error1.js', + }, + range: { + start: { + line: 0, + character: 1, + }, + end: { + line: 0, + character: 2, + }, + }, + context: { + diagnostics: [], + }, + }).verifyAllLSPMessagesInStep( + [`textDocument/codeAction{[]}`], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/codeAction #1', [ + addFile('error1.js.ignored', 'error1.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/codeAction', { + textDocument: { + uri: 'error1.js', + }, + range: { + start: { + line: 1, + character: 21, + }, + end: { + line: 1, + character: 22, + }, + }, + context: { + diagnostics: [ + { + range: { + start: { + line: 1, + character: 21, + }, + end: { + line: 1, + character: 22, + }, + }, + severity: 1, + code: 'InferError', + source: 'Flow', + message: 'Missing type annotation for `a`.', + }, + ], + }, + }).verifyAllLSPMessagesInStep( + [ + `textDocument/codeAction{${JSON.stringify([ + { + title: 'insert type annotation', + kind: 'quickfix', + diagnostics: [ + { + range: { + start: {line: 1, character: 21}, + end: {line: 1, character: 22}, + }, + severity: 1, + code: 'InferError', + source: 'Flow', + message: 'Missing type annotation for `a`.', + relatedInformation: [], + relatedLocations: [], + }, + ], + edit: { + changes: { + 'error1.js': [ + { + range: { + start: {line: 1, character: 22}, + end: {line: 1, character: 22}, + }, + newText: ': any', + }, + ], + }, + }, + }, + ])}}`, + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/codeAction #2', [ + addFile('error1.js.ignored', 'error1.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/codeAction', { + textDocument: { + uri: 'error1.js', + }, + range: { + start: { + line: 6, + character: 11, + }, + end: { + line: 6, + character: 17, + }, + }, + context: { + diagnostics: [], + }, + }).verifyAllLSPMessagesInStep( + [ + `textDocument/codeAction{${JSON.stringify([ + { + title: 'insert type annotation', + kind: 'quickfix', + diagnostics: [], + edit: { + changes: { + 'error1.js': [ + { + range: { + start: { + line: 6, + character: 17, + }, + end: { + line: 6, + character: 17, + }, + }, + newText: + ': {a: number, b: (a: any, b: string) => number}', + }, + ], + }, + }, + }, + ])}}`, + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + ], +); diff --git a/newtests/lsp/queries/completion.js b/newtests/lsp/completion/completion.js similarity index 100% rename from newtests/lsp/queries/completion.js rename to newtests/lsp/completion/completion.js diff --git a/newtests/lsp/completion/jsx.js b/newtests/lsp/completion/jsx.js new file mode 100644 index 00000000000..b3a252a165c --- /dev/null +++ b/newtests/lsp/completion/jsx.js @@ -0,0 +1,15 @@ +// @flow + +declare var React: { + createElement: any +}; + +type Props = {a :number} + +class C { + props: Props +} + // <- space +// <- space + +C. diff --git a/newtests/lsp/completion/kind.js b/newtests/lsp/completion/kind.js new file mode 100644 index 00000000000..895979879a4 --- /dev/null +++ b/newtests/lsp/completion/kind.js @@ -0,0 +1,15 @@ +// @flow + +// Tests different auto complete items use the correct `kind`. +// In VSCode, for example, this includes the autocomplete +// icons used. + +let aNumber: number = 10; +class aClass { }; +interface anInterface {}; +type aUnion = 'a' | 'b'; +let aFunction = () => null; + +function foo() { + const x = 15; +} diff --git a/newtests/lsp/completion/params.js b/newtests/lsp/completion/params.js new file mode 100644 index 00000000000..778ebc00dc3 --- /dev/null +++ b/newtests/lsp/completion/params.js @@ -0,0 +1,11 @@ +// @flow + +// Tests different auto complete items use the correct `kind`. +// In VSCode, for example, this includes the autocomplete +// icons used. + +let aFunction = (arg1: number, arg2: string) => null; + +function foo() { + const x = 15; +} diff --git a/newtests/lsp/completion/test.js b/newtests/lsp/completion/test.js new file mode 100644 index 00000000000..beab9313633 --- /dev/null +++ b/newtests/lsp/completion/test.js @@ -0,0 +1,552 @@ +/* + * @flow + * @format + */ + +import {suite, test} from 'flow-dev-tools/src/test/Tester'; + +export default suite( + ({ + lspStartAndConnect, + lspStart, + lspRequest, + lspInitializeParams, + lspRequestAndWaitUntilResponse, + addFile, + lspIgnoreStatusAndCancellation, + }) => [ + test('textDocument/completion', [ + addFile('completion.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'completion.js'}, + position: {line: 10, character: 15}, // statement position + }).verifyAllLSPMessagesInStep( + [ + (() => { + const expectedResponse = { + isIncomplete: false, + items: [ + { + label: 'x', + kind: 6, + detail: 'number', + inlineDetail: 'number', + insertTextFormat: 1, + }, + { + label: 'fred', + kind: 3, + detail: '(a: number, b: string) => number', + inlineDetail: '(a: number, b: string)', + itemType: 'number', + insertTextFormat: 1, + }, + { + label: 'exports', + kind: 6, + detail: '{||}', + inlineDetail: '{||}', + insertTextFormat: 1, + }, + { + label: 'b', + kind: 6, + detail: 'string', + inlineDetail: 'string', + insertTextFormat: 1, + }, + { + label: 'a', + kind: 6, + detail: 'number', + inlineDetail: 'number', + insertTextFormat: 1, + }, + { + label: 'this', + kind: 6, + detail: 'empty', + inlineDetail: 'empty', + insertTextFormat: 1, + }, + { + label: 'super', + kind: 6, + detail: 'typeof Object.prototype', + inlineDetail: 'typeof Object.prototype', + insertTextFormat: 1, + }, + ], + }; + return `textDocument/completion${JSON.stringify(expectedResponse)}`; + })(), + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/completion', [ + addFile('kind.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'kind.js'}, + position: {line: 13, character: 15}, + context: {triggerKind: 1}, + }).verifyAllLSPMessagesInStep( + [ + (() => { + const expectedResponse = { + isIncomplete: false, + items: [ + { + label: 'x', + kind: 6, + detail: 'number', + inlineDetail: 'number', + insertTextFormat: 1, + }, + { + label: 'foo', + kind: 3, + detail: '() => void', + inlineDetail: '()', + itemType: 'void', + insertTextFormat: 1, + }, + { + label: 'exports', + kind: 6, + detail: '{||}', + inlineDetail: '{||}', + insertTextFormat: 1, + }, + { + label: 'anInterface', + kind: 8, + detail: 'interface anInterface', + inlineDetail: 'interface anInterface', + insertTextFormat: 1, + }, + { + label: 'aUnion', + kind: 13, + detail: 'type aUnion = "a" | "b"', + inlineDetail: 'type aUnion = "a" | "b"', + insertTextFormat: 1, + }, + { + label: 'aNumber', + kind: 6, + detail: 'number', + inlineDetail: 'number', + insertTextFormat: 1, + }, + { + label: 'aFunction', + kind: 3, + detail: '() => null', + inlineDetail: '()', + itemType: 'null', + insertTextFormat: 1, + }, + { + label: 'aClass', + kind: 7, + detail: 'class aClass', + inlineDetail: 'class aClass', + insertTextFormat: 1, + }, + { + label: 'this', + kind: 6, + detail: 'empty', + inlineDetail: 'empty', + insertTextFormat: 1, + }, + { + label: 'super', + kind: 6, + detail: 'typeof Object.prototype', + inlineDetail: 'typeof Object.prototype', + insertTextFormat: 1, + }, + ], + }; + return `textDocument/completion${JSON.stringify(expectedResponse)}`; + })(), + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/completion', [ + addFile('params.js'), + lspStartAndConnect(6000, { + ...lspInitializeParams, + capabilities: { + ...lspInitializeParams.capabilities, + textDocument: { + ...lspInitializeParams.capabilities.textDocument, + completion: { + completionItem: { + // snippet support needs to be enabled. + snippetSupport: true, + }, + }, + }, + }, + }), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'params.js'}, + position: {line: 9, character: 15}, + context: {triggerKind: 1}, + }).verifyAllLSPMessagesInStep( + [ + (() => { + const expectedResponse = { + isIncomplete: false, + items: [ + { + label: 'x', + kind: 6, + detail: 'number', + inlineDetail: 'number', + insertTextFormat: 1, + }, + { + label: 'foo', + kind: 3, + detail: '() => void', + inlineDetail: '()', + itemType: 'void', + insertTextFormat: 2, + textEdit: { + range: { + start: {line: 9, character: 15}, + end: {line: 9, character: 15}, + }, + newText: 'foo()', + }, + }, + { + label: 'exports', + kind: 6, + detail: '{||}', + inlineDetail: '{||}', + insertTextFormat: 1, + }, + { + label: 'aFunction', + kind: 3, + detail: '(arg1: number, arg2: string) => null', + inlineDetail: '(arg1: number, arg2: string)', + itemType: 'null', + insertTextFormat: 2, + textEdit: { + range: { + start: {line: 9, character: 15}, + end: {line: 9, character: 15}, + }, + newText: 'aFunction(${1:arg1}, ${2:arg2})', + }, + }, + { + label: 'this', + kind: 6, + detail: 'empty', + inlineDetail: 'empty', + insertTextFormat: 1, + }, + { + label: 'super', + kind: 6, + detail: 'typeof Object.prototype', + inlineDetail: 'typeof Object.prototype', + insertTextFormat: 1, + }, + ], + }; + return `textDocument/completion${JSON.stringify(expectedResponse)}`; + })(), + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/completion triggered by space in jsx', [ + addFile('jsx.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'jsx.js'}, + position: {line: 12, character: 4}, + context: {triggerKind: 2, triggerCharacter: ' '}, + }).verifyAllLSPMessagesInStep( + [ + (() => { + const expectedResponse = { + isIncomplete: false, + items: [ + { + label: 'a', + kind: 6, + detail: 'number', + inlineDetail: 'number', + insertTextFormat: 1, + }, + ], + }; + return `textDocument/completion${JSON.stringify(expectedResponse)}`; + })(), + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/completion triggered by space outside of jsx', [ + addFile('jsx.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'jsx.js'}, + position: {line: 11, character: 1}, + context: {triggerKind: 2, triggerCharacter: ' '}, + }).verifyAllLSPMessagesInStep( + [ + (() => { + const expectedResponse = { + isIncomplete: false, + items: [], + }; + return `textDocument/completion${JSON.stringify(expectedResponse)}`; + })(), + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/completion invoked outside of jsx', [ + addFile('jsx.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'jsx.js'}, + position: {line: 11, character: 1}, + context: {triggerKind: 1}, + }).verifyAllLSPMessagesInStep( + [ + (() => { + const expectedResponse = { + isIncomplete: false, + items: [ + { + label: 'exports', + kind: 6, + detail: '{||}', + inlineDetail: '{||}', + insertTextFormat: 1, + }, + { + label: 'React', + kind: 6, + detail: '{createElement: any}', + inlineDetail: '{createElement: any}', + insertTextFormat: 1, + }, + { + label: 'Props', + kind: 13, + detail: 'type Props = {a: number}', + inlineDetail: 'type Props = {a: number}', + insertTextFormat: 1, + }, + { + label: 'C', + kind: 7, + detail: 'class C', + inlineDetail: 'class C', + insertTextFormat: 1, + }, + ], + }; + return `textDocument/completion${JSON.stringify(expectedResponse)}`; + })(), + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/completion invoked in jsx', [ + addFile('jsx.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'jsx.js'}, + position: {line: 12, character: 4}, + context: {triggerKind: 1}, + }).verifyAllLSPMessagesInStep( + [ + (() => { + const expectedResponse = { + isIncomplete: false, + items: [ + { + label: 'a', + kind: 6, + detail: 'number', + inlineDetail: 'number', + insertTextFormat: 1, + }, + ], + }; + return `textDocument/completion${JSON.stringify(expectedResponse)}`; + })(), + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/completion triggered by dot in jsx', [ + addFile('jsx.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'jsx.js'}, + position: {line: 13, character: 3}, + context: {triggerKind: 2, triggerCharacter: '.'}, + }).verifyAllLSPMessagesInStep( + [ + (() => { + const expectedResponse = { + isIncomplete: false, + items: [ + { + label: 'hasOwnProperty', + kind: 3, + detail: '(prop: mixed) => boolean', + inlineDetail: '(prop: mixed)', + itemType: 'boolean', + insertTextFormat: 1, + }, + { + label: 'isPrototypeOf', + kind: 3, + detail: '(o: mixed) => boolean', + inlineDetail: '(o: mixed)', + itemType: 'boolean', + insertTextFormat: 1, + }, + { + label: 'name', + kind: 6, + detail: 'string', + inlineDetail: 'string', + insertTextFormat: 1, + }, + { + label: 'propertyIsEnumerable', + kind: 3, + detail: '(prop: mixed) => boolean', + inlineDetail: '(prop: mixed)', + itemType: 'boolean', + insertTextFormat: 1, + }, + { + label: 'toLocaleString', + kind: 3, + detail: '() => string', + inlineDetail: '()', + itemType: 'string', + insertTextFormat: 1, + }, + { + label: 'toString', + kind: 3, + detail: '() => string', + inlineDetail: '()', + itemType: 'string', + insertTextFormat: 1, + }, + { + label: 'valueOf', + kind: 3, + detail: '() => mixed', + inlineDetail: '()', + itemType: 'mixed', + insertTextFormat: 1, + }, + ], + }; + return `textDocument/completion${JSON.stringify(expectedResponse)}`; + })(), + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + test('textDocument/completion triggered by dot outside jsx', [ + addFile('jsx.js'), + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'jsx.js'}, + position: {line: 14, character: 2}, + context: {triggerKind: 2, triggerCharacter: '.'}, + }).verifyAllLSPMessagesInStep( + [ + (() => { + const expectedResponse = { + isIncomplete: false, + items: [ + { + label: 'hasOwnProperty', + kind: 3, + detail: '(prop: mixed) => boolean', + inlineDetail: '(prop: mixed)', + itemType: 'boolean', + insertTextFormat: 1, + }, + { + label: 'isPrototypeOf', + kind: 3, + detail: '(o: mixed) => boolean', + inlineDetail: '(o: mixed)', + itemType: 'boolean', + insertTextFormat: 1, + }, + { + label: 'name', + kind: 6, + detail: 'string', + inlineDetail: 'string', + insertTextFormat: 1, + }, + { + label: 'propertyIsEnumerable', + kind: 3, + detail: '(prop: mixed) => boolean', + inlineDetail: '(prop: mixed)', + itemType: 'boolean', + insertTextFormat: 1, + }, + { + label: 'toLocaleString', + kind: 3, + detail: '() => string', + inlineDetail: '()', + itemType: 'string', + insertTextFormat: 1, + }, + { + label: 'toString', + kind: 3, + detail: '() => string', + inlineDetail: '()', + itemType: 'string', + insertTextFormat: 1, + }, + { + label: 'valueOf', + kind: 3, + detail: '() => mixed', + inlineDetail: '()', + itemType: 'mixed', + insertTextFormat: 1, + }, + ], + }; + return `textDocument/completion${JSON.stringify(expectedResponse)}`; + })(), + ], + ['textDocument/publishDiagnostics', ...lspIgnoreStatusAndCancellation], + ), + ]), + ], +); diff --git a/newtests/lsp/connection/test.js b/newtests/lsp/connection/test.js index e0f4afbb2af..6615ea0b0c5 100644 --- a/newtests/lsp/connection/test.js +++ b/newtests/lsp/connection/test.js @@ -1,7 +1,6 @@ /* * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; @@ -9,13 +8,13 @@ import {suite, test} from 'flow-dev-tools/src/test/Tester'; export default suite( ({ startFlowServer, - ideStart, - ideStartAndConnect, - ideRequest, - ideNotification, - ideResponse, - ideRequestAndWaitUntilResponse, - waitUntilIDEStatus, + lspStart, + lspStartAndConnect, + lspRequest, + lspNotification, + lspResponse, + lspRequestAndWaitUntilResponse, + waitUntilLSPStatus, waitUntilServerStatus, flowCmd, modifyFile, @@ -23,30 +22,30 @@ export default suite( lspIgnoreStatusAndCancellation, }) => [ test('Warm flow starts up, and server remains running after shutdown', [ - ideStart({mode: 'lsp', needsFlowServer: true}), - ideRequest('initialize', lspInitializeParams) - .waitUntilIDEMessage(60000, 'telemetry/connectionStatus') - .verifyAllIDEMessagesInStep( + lspStart({needsFlowServer: true}), + lspRequest('initialize', lspInitializeParams) + .waitUntilLSPMessage(60000, 'telemetry/connectionStatus') + .verifyAllLSPMessagesInStep( ['initialize', 'telemetry/connectionStatus{true}'], [...lspIgnoreStatusAndCancellation], ), - ideRequest('shutdown') - .waitUntilIDEMessage(20000, 'shutdown') - .verifyAllIDEMessagesInStep( + lspRequest('shutdown') + .waitUntilLSPMessage(20000, 'shutdown') + .verifyAllLSPMessagesInStep( ['shutdown'], ['telemetry/connectionStatus', ...lspIgnoreStatusAndCancellation], ), - ideNotification('exit') - .waitUntilIDEStatus(20000, 'stopped') - .verifyIDEStatus('stopped') + lspNotification('exit') + .waitUntilLSPStatus(20000, 'stopped') + .verifyLSPStatus('stopped') .verifyServerStatus('running'), ]), test('Cold flow starts up with progress, and shuts down', [ - ideStart({mode: 'lsp', needsFlowServer: false}), - ideRequest('initialize', lspInitializeParams) - .waitUntilIDEMessage(60000, 'telemetry/connectionStatus') - .verifyAllIDEMessagesInStep( + lspStart({needsFlowServer: false}), + lspRequest('initialize', lspInitializeParams) + .waitUntilLSPMessage(60000, 'telemetry/connectionStatus') + .verifyAllLSPMessagesInStep( [ 'initialize', 'window/logMessage{Starting Flow server}', @@ -54,25 +53,25 @@ export default suite( ], [...lspIgnoreStatusAndCancellation], ), - ideRequest('shutdown') - .waitUntilIDEMessage(20000, 'shutdown') - .verifyAllIDEMessagesInStep( + lspRequest('shutdown') + .waitUntilLSPMessage(20000, 'shutdown') + .verifyAllLSPMessagesInStep( ['shutdown'], ['telemetry/connectionStatus', ...lspIgnoreStatusAndCancellation], ), - ideNotification('exit') - .waitUntilIDEStatus(20000, 'stopped') + lspNotification('exit') + .waitUntilLSPStatus(20000, 'stopped') .waitUntilServerStatus(20000, 'stopped') - .verifyIDEStatus('stopped') + .verifyLSPStatus('stopped') .verifyServerStatus('stopped'), ]), test('Termination in-flight, and external restart', [ - ideStartAndConnect(), + lspStartAndConnect(), flowCmd(['stop']) .waitUntilServerStatus(20000, 'stopped') - .waitUntilIDEMessage(20000, 'window/showStatus{stopped}') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(20000, 'window/showStatus{stopped}') + .verifyAllLSPMessagesInStep( [ 'telemetry/connectionStatus{false}', 'telemetry/event{End_of_file}', @@ -86,20 +85,20 @@ export default suite( ], ), startFlowServer() - .waitUntilIDEMessage(60000, 'telemetry/connectionStatus') + .waitUntilLSPMessage(60000, 'telemetry/connectionStatus') // it really can take a while for flow to be ready to connect - .verifyAllIDEMessagesInStep( + .verifyAllLSPMessagesInStep( ['telemetry/connectionStatus{true}'], [...lspIgnoreStatusAndCancellation], ), ]), test('Termination in-flight, and internal restart', [ - ideStartAndConnect(), + lspStartAndConnect(), flowCmd(['stop']) .waitUntilServerStatus(20000, 'stopped') - .waitUntilIDEMessage(20000, 'window/showStatus{stopped}') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(20000, 'window/showStatus{stopped}') + .verifyAllLSPMessagesInStep( [ 'telemetry/connectionStatus{false}', 'telemetry/event{End_of_file}', @@ -107,18 +106,18 @@ export default suite( ], [...lspIgnoreStatusAndCancellation], ), - ideResponse('mostRecent', {title: 'Restart'}) + lspResponse('mostRecent', {title: 'Restart'}) // .waitUntilServerStatus(60000, 'running') -- commented out because // the method currently only waits for servers that the test infrastructure // launched; not ones that lspCommand launched. - .waitUntilIDEMessage(60000, 'telemetry/connectionStatus') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(60000, 'telemetry/connectionStatus') + .verifyAllLSPMessagesInStep( ['window/logMessage{Starting}', 'telemetry/connectionStatus{true}'], [...lspIgnoreStatusAndCancellation], ), ]), -/* + /* TODO(ljw): fix race. The following test is fine in theory... But on AppVeyor, what happens 1 in 50 runs is that 'flow force-recheck --no-auto-start .flowconfig' sends a message to the monitor and thence the server telling it to force-recheck, @@ -132,16 +131,16 @@ export default suite( In any case, it won't have a bad user experience - the retry behavior of forceRecheckCommand doesn't correspond to any real watchman behavior; and if the user does happen to do forceRecheckCommand in a way that stops flow, - then the worst that will happen is that Nuclide pops up a box saying + then the worst that will happen is that Nucllsp pops up a box saying "flow is stopped [restart]". test('Restarts a lost server in response to flowconfig benign change', [ - ideStartAndConnect(), + lspStartAndConnect(), modifyFile('.flowconfig', '#placeholder', '#replaced') - .waitUntilIDEMessage(20000, 'telemetry/connectionStatus{false}') + .waitUntilLSPMessage(20000, 'telemetry/connectionStatus{false}') .dontMindServerDeath() - .waitUntilIDEMessage(60000, 'telemetry/connectionStatus{true}') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(60000, 'telemetry/connectionStatus{true}') + .verifyAllLSPMessagesInStep( [ 'telemetry/connectionStatus{false}', 'telemetry/event{Server fatal exception}', @@ -154,11 +153,11 @@ export default suite( */ test('Terminates in response to flowconfig version change', [ - ideStartAndConnect(), + lspStartAndConnect(), modifyFile('.flowconfig', '>0.60.0', '>0.61.0') .waitUntilServerStatus(20000, 'stopped') - .waitUntilIDEStatus(20000, 'stopped') - .verifyAllIDEMessagesInStep( + .waitUntilLSPStatus(20000, 'stopped') + .verifyAllLSPMessagesInStep( [ 'telemetry/connectionStatus{false}', 'telemetry/event{Server fatal exception}', @@ -171,8 +170,8 @@ export default suite( ]), test('Editor open files outlive server', [ - ideStartAndConnect(), - ideNotification('textDocument/didOpen', { + lspStartAndConnect(), + lspNotification('textDocument/didOpen', { textDocument: { uri: 'open.js', languageId: 'javascript', @@ -183,45 +182,45 @@ jones(); `, }, }) - .ideRequestAndWaitUntilResponse('textDocument/definition', { + .lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'open.js'}, position: {line: 2, character: 1}, }) - .verifyAllIDEMessagesInStep( + .verifyAllLSPMessagesInStep( ['textDocument/definition{open.js,"line":1}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/definition', { + lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'open.js'}, position: {line: 2, character: 1}, - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/definition{open.js,"line":1}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/definition', { + lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'open.js'}, position: {line: 2, character: 1}, - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/definition{open.js,"line":1}'], [...lspIgnoreStatusAndCancellation], ), flowCmd(['stop']) .waitUntilServerStatus(20000, 'stopped') - .waitUntilIDEMessage(20000, 'telemetry/connectionStatus{false}') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(20000, 'telemetry/connectionStatus{false}') + .verifyAllLSPMessagesInStep( ['telemetry/connectionStatus{false}'], ['telemetry/event{End_of_file}', ...lspIgnoreStatusAndCancellation], ), startFlowServer() - .waitUntilIDEMessage(60000, 'telemetry/connectionStatus') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(60000, 'telemetry/connectionStatus') + .verifyAllLSPMessagesInStep( ['telemetry/connectionStatus{true}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/definition', { + lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'open.js'}, position: {line: 2, character: 1}, - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/definition{open.js,line":1}'], [...lspIgnoreStatusAndCancellation], ), diff --git a/newtests/lsp/diagnostics/_flowconfig b/newtests/lsp/diagnostics/_flowconfig new file mode 100644 index 00000000000..add1e2aecff --- /dev/null +++ b/newtests/lsp/diagnostics/_flowconfig @@ -0,0 +1,2 @@ +[options] +esproposal.optional_chaining=enable diff --git a/newtests/lsp/diagnostics/empty.js b/newtests/lsp/diagnostics/empty.js new file mode 100644 index 00000000000..06e5345f818 --- /dev/null +++ b/newtests/lsp/diagnostics/empty.js @@ -0,0 +1 @@ +// @thisWillBeFlowInTest diff --git a/newtests/lsp/diagnostics/importsFakeSymbol.js b/newtests/lsp/diagnostics/importsFakeSymbol.js new file mode 100644 index 00000000000..6436e1bf492 --- /dev/null +++ b/newtests/lsp/diagnostics/importsFakeSymbol.js @@ -0,0 +1,3 @@ +// @thisWillBeFlowInTest + +const {foo} = require('./empty'); diff --git a/newtests/lsp/diagnostics/pseudo_parse_error.js b/newtests/lsp/diagnostics/pseudo_parse_error.js new file mode 100644 index 00000000000..ee81e763a79 --- /dev/null +++ b/newtests/lsp/diagnostics/pseudo_parse_error.js @@ -0,0 +1,6 @@ +// @thisWillBeFlowInTest + +const obj = {}; +// Flow does not yet support method or property calls in optional chains, so +// this will produce a pseudo parse error +obj?.foo(); // Error diff --git a/newtests/lsp/diagnostics/test.js b/newtests/lsp/diagnostics/test.js index b4f0fb77dc6..166aeb172df 100644 --- a/newtests/lsp/diagnostics/test.js +++ b/newtests/lsp/diagnostics/test.js @@ -1,22 +1,22 @@ /* * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; export default suite( ({ - ideStartAndConnect, - ideRequestAndWaitUntilResponse, - ideNotification, + lspStartAndConnect, + lspRequestAndWaitUntilResponse, + lspNotification, addFile, + addFiles, modifyFile, lspIgnoreStatusAndCancellation, }) => [ test('textDocument/publishDiagnostics #1', [ - ideStartAndConnect(), + lspStartAndConnect(), addFile('witherrors1.js') // Flow may send multiple publishDiagnostics when reporting partial // progress and then complete results, e.g. an empty publishDiagnostics @@ -28,11 +28,11 @@ export default suite( // To be robust against races: we'll wait up to 9s to get the // expected publishDiagnostic, then verify that this expected publishDiagnostic // was sent at least once, and ignore any additional publishDiagnostics. - .waitUntilIDEMessage( + .waitUntilLSPMessage( 9000, 'textDocument/publishDiagnostics{Cannot return}', ) - .verifyAllIDEMessagesInStep( + .verifyAllLSPMessagesInStep( [ 'textDocument/publishDiagnostics{"Cannot return `23` because number [1] is incompatible with string [2].","message":"[1] number","message":"[2] string"}', ], @@ -44,13 +44,13 @@ export default suite( ]), test('textDocument/publishDiagnostics #2', [ - ideStartAndConnect(), + lspStartAndConnect(), addFile('witherrors2.js') - .waitUntilIDEMessage( + .waitUntilLSPMessage( 9000, 'textDocument/publishDiagnostics{Cannot extend}', ) - .verifyAllIDEMessagesInStep( + .verifyAllLSPMessagesInStep( [ 'textDocument/publishDiagnostics{"Cannot extend `H` [1] with `I` because `H` [1] is not inheritable.","message":"[1] `H`"}', ], @@ -62,13 +62,13 @@ export default suite( ]), test('textDocument/publishDiagnostics clears errors', [ - ideStartAndConnect(), + lspStartAndConnect(), addFile('witherrors1.js') - .waitUntilIDEMessage( + .waitUntilLSPMessage( 9000, 'textDocument/publishDiagnostics{Cannot return}', ) - .verifyAllIDEMessagesInStep( + .verifyAllLSPMessagesInStep( [ 'textDocument/publishDiagnostics{"Cannot return `23` because number [1] is incompatible with string [2].","message":"[1] number","message":"[2] string"}', ], @@ -78,11 +78,11 @@ export default suite( ], ), modifyFile('witherrors1.js', 'return 23;', 'return "";') - .waitUntilIDEMessage( + .waitUntilLSPMessage( 9000, 'textDocument/publishDiagnostics{"diagnostics":[]}', ) - .verifyAllIDEMessagesInStep( + .verifyAllLSPMessagesInStep( ['textDocument/publishDiagnostics{"diagnostics":[]}'], [ 'textDocument/publishDiagnostics', @@ -92,9 +92,9 @@ export default suite( ]), test('live diagnostics', [ - ideStartAndConnect(), + lspStartAndConnect(), // Open a document with errors. We should get a live syntax error immediately. - ideNotification('textDocument/didOpen', { + lspNotification('textDocument/didOpen', { textDocument: { uri: 'syntaxError1.js', languageId: 'javascript', @@ -104,13 +104,13 @@ function fred(): number {return 1+;} `, }, }) - .waitUntilIDEMessage(9000, 'textDocument/publishDiagnostics') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(9000, 'textDocument/publishDiagnostics') + .verifyAllLSPMessagesInStep( ['textDocument/publishDiagnostics{Unexpected token}'], ['window/showStatus'], ), // Edit it fix the problem. The live syntax error should be dismissed immediately. - ideNotification('textDocument/didChange', { + lspNotification('textDocument/didChange', { textDocument: { uri: 'syntaxError1.js', version: 2, @@ -123,13 +123,13 @@ function fred(): number {return 1+2;} }, ], }) - .waitUntilIDEMessage(9000, 'textDocument/publishDiagnostics') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(9000, 'textDocument/publishDiagnostics') + .verifyAllLSPMessagesInStep( ['textDocument/publishDiagnostics{"diagnostics":[]}'], [], ), // Make another change that doesn't introduce errors. We should get no reports. - ideNotification('textDocument/didChange', { + lspNotification('textDocument/didChange', { textDocument: { uri: 'syntaxError1.js', version: 2, @@ -143,9 +143,9 @@ function fred(): number {return 1+2;} ], }) .sleep(1000) - .verifyAllIDEMessagesInStep([], []), + .verifyAllLSPMessagesInStep([], []), // Make a change that introduces the error. We should get a report immediately. - ideNotification('textDocument/didChange', { + lspNotification('textDocument/didChange', { textDocument: { uri: 'syntaxError1.js', version: 3, @@ -158,23 +158,136 @@ function fred(): number {return 1+2;} }, ], }) - .waitUntilIDEMessage(9000, 'textDocument/publishDiagnostics') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(9000, 'textDocument/publishDiagnostics') + .verifyAllLSPMessagesInStep( ['textDocument/publishDiagnostics{Unexpected token}'], [], ), // Close the file. The live error should go away. - ideNotification('textDocument/didClose', { + lspNotification('textDocument/didClose', { textDocument: { uri: 'syntaxError1.js', version: 3, }, }) - .waitUntilIDEMessage(9000, 'textDocument/publishDiagnostics') - .verifyAllIDEMessagesInStep( + .waitUntilLSPMessage(9000, 'textDocument/publishDiagnostics') + .verifyAllLSPMessagesInStep( ['textDocument/publishDiagnostics{"diagnostics":[]}'], [], ), ]), + test('pseudo parse errors', [ + lspStartAndConnect(), + addFile('pseudo_parse_error.js') + .waitUntilLSPMessage( + 9000, + 'textDocument/publishDiagnostics{Cannot return}', + ) + .verifyAllLSPMessagesInStep( + [ + 'textDocument/publishDiagnostics{"Flow does not yet support method or property calls in optional chains."}', + ], + [ + 'textDocument/publishDiagnostics', + ...lspIgnoreStatusAndCancellation, + ], + ) + .newErrors( + ` + pseudo_parse_error.js:6 + 6: obj?.foo(); // Error + ^^^^^^^^^^ Flow does not yet support method or property calls in optional chains. + `, + ), + lspNotification('textDocument/didOpen', { + textDocument: { + uri: 'pseudo_parse_error.js', + languageId: 'javascript', + version: 1, + text: `// @flow + +const obj = {}; +// Flow does not yet support method or property calls in optional chains, so +// this will produce a pseudo parse error +obj?.foo(); // Error +`, + }, + }) + .waitUntilLSPMessage( + 9000, + 'textDocument/publishDiagnostics{Cannot return}', + ) + .verifyAllLSPMessagesInStep( + [ + 'textDocument/publishDiagnostics{"Flow does not yet support method or property calls in optional chains."}', + ], + [ + 'textDocument/publishDiagnostics', + ...lspIgnoreStatusAndCancellation, + ], + ), + ]), + test('Errors with Loc.none', [ + lspStartAndConnect(), + addFiles('empty.js', 'importsFakeSymbol.js').waitUntilLSPMessage( + 9000, + (() => { + const expectedMessage = { + uri: 'importsFakeSymbol.js', + diagnostics: [ + { + range: { + start: { + line: 2, + character: 7, + }, + end: { + line: 2, + character: 10, + }, + }, + severity: 1, + code: 'InferError', + source: 'Flow', + message: 'property `foo` is missing in exports [1].', + relatedInformation: [ + { + location: { + uri: 'empty.js', + range: { + start: { + line: 0, + character: 0, + }, + end: { + line: 0, + character: 0, + }, + }, + }, + message: '[1] exports', + }, + ], + relatedLocations: [ + { + location: { + uri: 'empty.js', + range: { + start: {line: 0, character: 0}, + end: {line: 0, character: 0}, + }, + }, + message: '[1] exports', + }, + ], + }, + ], + }; + return `textDocument/publishDiagnostics${JSON.stringify( + expectedMessage, + )}`; + })(), + ), + ]), ], ); diff --git a/newtests/lsp/edits/test.js b/newtests/lsp/edits/test.js index 1fa5a94f492..a7867652718 100644 --- a/newtests/lsp/edits/test.js +++ b/newtests/lsp/edits/test.js @@ -1,22 +1,21 @@ /* * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; export default suite( ({ - ideStartAndConnect, - ideRequestAndWaitUntilResponse, - ideNotification, + lspStartAndConnect, + lspRequestAndWaitUntilResponse, + lspNotification, addFile, lspIgnoreStatusAndCancellation, }) => [ test('didOpen+didChange+didClose', [ - ideStartAndConnect(), - ideNotification('textDocument/didOpen', { + lspStartAndConnect(), + lspNotification('textDocument/didOpen', { textDocument: { uri: 'open.js', languageId: 'javascript', @@ -27,15 +26,15 @@ function jones(): number { return 15; } jones(); `, }, - }).verifyAllIDEMessagesInStep([''], [...lspIgnoreStatusAndCancellation]), - ideRequestAndWaitUntilResponse('textDocument/definition', { + }).verifyAllLSPMessagesInStep([''], [...lspIgnoreStatusAndCancellation]), + lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'open.js'}, position: {line: 3, character: 1}, - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/definition{open.js,line":2}'], [...lspIgnoreStatusAndCancellation], ), - ideNotification('textDocument/didChange', { + lspNotification('textDocument/didChange', { textDocument: { uri: 'open.js', version: 2, @@ -49,22 +48,22 @@ wilbur(); `, }, ], - }).verifyAllIDEMessagesInStep([''], [...lspIgnoreStatusAndCancellation]), - ideRequestAndWaitUntilResponse('textDocument/definition', { + }).verifyAllLSPMessagesInStep([''], [...lspIgnoreStatusAndCancellation]), + lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'open.js'}, position: {line: 3, character: 1}, - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/definition{open.js,"line":1}'], [...lspIgnoreStatusAndCancellation], ), - ideNotification('textDocument/didClose', { + lspNotification('textDocument/didClose', { textDocument: {uri: 'open.js'}, - }).verifyAllIDEMessagesInStep([''], [...lspIgnoreStatusAndCancellation]), - ideRequestAndWaitUntilResponse('textDocument/definition', { + }).verifyAllLSPMessagesInStep([''], [...lspIgnoreStatusAndCancellation]), + lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'open.js'}, position: {line: 3, character: 1}, - }).verifyAllIDEMessagesInStep( - ['textDocument/definition{No such file or directory}'], + }).verifyAllLSPMessagesInStep( + ['textDocument/definition{unexpected error}'], [...lspIgnoreStatusAndCancellation], ), ]), diff --git a/newtests/lsp/queries/test.js b/newtests/lsp/queries/test.js index 27d50c21bce..0d61d91f6d7 100644 --- a/newtests/lsp/queries/test.js +++ b/newtests/lsp/queries/test.js @@ -1,29 +1,28 @@ /* * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; export default suite( ({ - ideStartAndConnect, - ideNotification, - ideRequestAndWaitUntilResponse, + lspStartAndConnect, + lspNotification, + lspRequestAndWaitUntilResponse, addFile, addFiles, lspIgnoreStatusAndCancellation, }) => [ test('invalid_method', [ - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('foobar', {}).verifyAllIDEMessagesInStep( - ['foobar{not implemented}'], + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('foobar', {}).verifyAllLSPMessagesInStep( + ['foobar{unexpected error}'], [...lspIgnoreStatusAndCancellation], ), - ideNotification('barfoo', {}) - .waitUntilIDEMessage(2000, 'barfoo') - .verifyAllIDEMessagesInStep( + lspNotification('barfoo', {}) + .waitUntilLSPMessage(2000, 'barfoo') + .verifyAllLSPMessagesInStep( ['telemetry/event{not implemented}'], [...lspIgnoreStatusAndCancellation], ), @@ -31,11 +30,11 @@ export default suite( test('textDocument/definition', [ addFile('definition.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/definition', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'definition.js'}, position: {line: 6, character: 1}, - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( [ 'textDocument/definition{definition.js,"start":{"line":2,"character":0}}', ], @@ -45,11 +44,11 @@ export default suite( test('textDocument/definition', [ addFile('definition.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/definition', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'definition.js'}, position: {line: 7, character: 11}, // over a comment - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/definition{[]}'], [...lspIgnoreStatusAndCancellation], ), @@ -57,11 +56,11 @@ export default suite( test('textDocument/definition', [ addFile('definition.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/definition', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/definition', { textDocument: {uri: 'definition.js'}, position: {line: 7, character: 1}, // over whitespace - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/definition{[]}'], [...lspIgnoreStatusAndCancellation], ), @@ -69,93 +68,79 @@ export default suite( test('textDocument/hover', [ addFile('hover.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/hover', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/hover', { textDocument: {uri: 'hover.js'}, position: {line: 6, character: 1}, // over a function use - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/hover{() => number}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/hover', { + lspRequestAndWaitUntilResponse('textDocument/hover', { textDocument: {uri: 'hover.js'}, position: {line: 3, character: 1}, // over whitespace - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/hover{null}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/hover', { + lspRequestAndWaitUntilResponse('textDocument/hover', { textDocument: {uri: 'hover.js'}, position: {line: 2, character: 1}, // over a keyword - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/hover{null}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/hover', { + lspRequestAndWaitUntilResponse('textDocument/hover', { textDocument: {uri: 'hover.js'}, position: {line: 0, character: 1}, // over a comment - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/hover{null}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/hover', { + lspRequestAndWaitUntilResponse('textDocument/hover', { textDocument: {uri: 'hover.js'}, position: {line: 6, character: 100}, // past the end of a line - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/hover{null}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/hover', { + lspRequestAndWaitUntilResponse('textDocument/hover', { textDocument: {uri: 'hover.js'}, position: {line: 100, character: 0}, // past the end of the file - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/hover{null}'], [...lspIgnoreStatusAndCancellation], ), ]), - test('textDocument/completion', [ - addFile('completion.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/completion', { - textDocument: {uri: 'completion.js'}, - position: {line: 10, character: 15}, // statement position - }).verifyAllIDEMessagesInStep( - [ - 'textDocument/completion{"label":"x","label":"fred","detail":"(a: number, b: string) => number","inlineDetail":"(a: number, b: string)"}', - ], - [...lspIgnoreStatusAndCancellation], - ), - ]), - test('textDocument/documentHighlight', [ addFiles('references.js', 'references2.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/documentHighlight', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/documentHighlight', { textDocument: {uri: 'references.js'}, position: {line: 9, character: 17}, // on an identifier - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/documentHighlight{"line":3,"line":9}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/documentHighlight', { + lspRequestAndWaitUntilResponse('textDocument/documentHighlight', { textDocument: {uri: 'references.js'}, position: {line: 9, character: 1}, // on a keyword - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/documentHighlight{[]}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/documentHighlight', { + lspRequestAndWaitUntilResponse('textDocument/documentHighlight', { textDocument: {uri: 'references.js'}, position: {line: 6, character: 0}, // on whitespace - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/documentHighlight{[]}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/documentHighlight', { + lspRequestAndWaitUntilResponse('textDocument/documentHighlight', { textDocument: {uri: 'references.js'}, position: {line: 6, character: 100}, // off the right edge of the text - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/documentHighlight{[]}'], [...lspIgnoreStatusAndCancellation], ), @@ -163,22 +148,22 @@ export default suite( test('textDocument/references', [ addFiles('references.js', 'references2.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/references', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/references', { textDocument: {uri: 'references.js'}, position: {line: 9, character: 17}, // on an identifier - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/references{line":3,"line":5,"line":9}'], [...lspIgnoreStatusAndCancellation], ), - ideRequestAndWaitUntilResponse('textDocument/references', { + lspRequestAndWaitUntilResponse('textDocument/references', { textDocument: {uri: 'references.js'}, position: { line: 9, character: 17, }, // on an identifier context: {includeIndirectReferences: true}, - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/references{line":3,"line":5,"line":6,"line":9}'], [...lspIgnoreStatusAndCancellation], ), @@ -186,12 +171,12 @@ export default suite( test('textDocument/rename', [ addFiles('references.js', 'references2.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/rename', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/rename', { textDocument: {uri: 'references.js'}, position: {line: 9, character: 17}, // on an identifier newName: 'foobar', - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/rename{"line":3,"line":5,"line":9}'], [...lspIgnoreStatusAndCancellation], ), @@ -199,10 +184,10 @@ export default suite( test('textDocument/documentSymbol', [ addFiles('outline.js', 'references.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/documentSymbol', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/documentSymbol', { textDocument: {uri: 'outline.js'}, - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( [ 'textDocument/documentSymbol{WORD_REGEX,State,Preferences,pref1,EPrefs,pref2,MyClass1,_projectRoot,command,constructor,dispose,MyInterface2,getFoo,myFunction3}', ], @@ -212,32 +197,32 @@ export default suite( test('textDocument/typeCoverage', [ addFiles('coverage.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/typeCoverage', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/typeCoverage', { textDocument: {uri: 'coverage.js'}, - }).verifyAllIDEMessagesInStep( - ['textDocument/typeCoverage{"line":12,"line":8,"line":6}'], - [...lspIgnoreStatusAndCancellation], + }).verifyAllLSPMessagesInStep( + ['textDocument/typeCoverage'], + ['window/showStatus', '$/cancelRequest'], ), ]), test('textDocument/typeCoverage 2', [ addFiles('coverage2.js'), - ideStartAndConnect(), - ideRequestAndWaitUntilResponse('textDocument/typeCoverage', { + lspStartAndConnect(), + lspRequestAndWaitUntilResponse('textDocument/typeCoverage', { textDocument: {uri: '/coverage2.js'}, - }).verifyAllIDEMessagesInStep( + }).verifyAllLSPMessagesInStep( ['textDocument/typeCoverage{Use @flow}'], [...lspIgnoreStatusAndCancellation], ), ]), test('telemetry/rage', [ - ideStartAndConnect(), - ideRequestAndWaitUntilResponse( + lspStartAndConnect(), + lspRequestAndWaitUntilResponse( 'telemetry/rage', {}, - ).verifyAllIDEMessagesInStep( + ).verifyAllLSPMessagesInStep( [ 'telemetry/rage{Focused: 1,LSP adapter state: Connected,.monitor_log,.log}', ], diff --git a/newtests/lsp/version/test.js b/newtests/lsp/version/test.js index 787173d5e63..9d2064384d8 100644 --- a/newtests/lsp/version/test.js +++ b/newtests/lsp/version/test.js @@ -1,37 +1,36 @@ /* * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; export default suite( ({ - ideStart, - ideRequest, - ideNotification, + lspStart, + lspRequest, + lspNotification, lspInitializeParams, lspIgnoreStatusAndCancellation, }) => [ test('initialize error for wrong version', [ - ideStart({mode: 'lsp', needsFlowServer: false}), - ideRequest('initialize', lspInitializeParams) - .waitUntilIDEMessage(10000, 'initialize') - .verifyAllIDEMessagesInStep( + lspStart({needsFlowServer: false}), + lspRequest('initialize', lspInitializeParams) + .waitUntilLSPMessage(30000, 'initialize') + .verifyAllLSPMessagesInStep( ['initialize{Wrong version of Flow. The config specifies}'], [...lspIgnoreStatusAndCancellation], ), - ideRequest('shutdown') - .waitUntilIDEMessage(3000, 'shutdown') - .verifyAllIDEMessagesInStep( + lspRequest('shutdown') + .waitUntilLSPMessage(10000, 'shutdown') + .verifyAllLSPMessagesInStep( ['shutdown'], [...lspIgnoreStatusAndCancellation], ), - ideNotification('exit') - .waitUntilIDEStatus(3000, 'stopped') - .waitUntilServerStatus(3000, 'stopped') - .verifyIDEStatus('stopped') + lspNotification('exit') + .waitUntilLSPStatus(10000, 'stopped') + .waitUntilServerStatus(10000, 'stopped') + .verifyLSPStatus('stopped') .verifyServerStatus('stopped'), ]), ], diff --git a/newtests/lsp/wait_for_recheck/_flowconfig b/newtests/lsp/wait_for_recheck/_flowconfig new file mode 100644 index 00000000000..08bb313bf3e --- /dev/null +++ b/newtests/lsp/wait_for_recheck/_flowconfig @@ -0,0 +1,12 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +all=true + +[strict] diff --git a/newtests/lsp/wait_for_recheck/completion.js b/newtests/lsp/wait_for_recheck/completion.js new file mode 100644 index 00000000000..2f599809c3f --- /dev/null +++ b/newtests/lsp/wait_for_recheck/completion.js @@ -0,0 +1,13 @@ +// @flow + +/** + * Docblock for 'fred' + * + * @param {number} a Docblock for param a + * @param {string} b Docblock for param b + * @return {number} Docblock for return + */ +function fred(a: number, b: string): number { + const x = 15; + return 5; +} diff --git a/newtests/lsp/wait_for_recheck/coverage.js b/newtests/lsp/wait_for_recheck/coverage.js new file mode 100644 index 00000000000..4fa2e86918f --- /dev/null +++ b/newtests/lsp/wait_for_recheck/coverage.js @@ -0,0 +1,15 @@ +// @flow + +function f(x: number): void {} + +const a: number = 1; +const b: number = 2; +const c: any = 3; +f(a+b); +f(b+c); + +try { + f(1); +} catch (e) { + f(1); +} diff --git a/newtests/lsp/wait_for_recheck/definition.js b/newtests/lsp/wait_for_recheck/definition.js new file mode 100644 index 00000000000..a53ddfe4a76 --- /dev/null +++ b/newtests/lsp/wait_for_recheck/definition.js @@ -0,0 +1,8 @@ +// @flow + +function fred(): number { + return 15; +} + +fred(); + // command has no definition diff --git a/newtests/lsp/wait_for_recheck/hover.js b/newtests/lsp/wait_for_recheck/hover.js new file mode 100644 index 00000000000..e13ce27ca4d --- /dev/null +++ b/newtests/lsp/wait_for_recheck/hover.js @@ -0,0 +1,7 @@ +// @flow + +function fred(): number { + return 15; +} + +fred(); diff --git a/newtests/lsp/wait_for_recheck/outline.js b/newtests/lsp/wait_for_recheck/outline.js new file mode 100644 index 00000000000..a947a86b6b7 --- /dev/null +++ b/newtests/lsp/wait_for_recheck/outline.js @@ -0,0 +1,36 @@ +// @flow + +import type {Bar} from './references'; + +const WORD_REGEX = /\w+/gi; + +type State = + | 'Initial' + | 'Starting'; + +type Preferences = { + pref1?: Map, +}; + +export type EPrefs = { + pref2: number; +} + +class MyClass1 { + _projectRoot: string; + command: string; + + constructor( + param1: string) { + this.command = param1; + } + + dispose(): void { + } +} + +interface MyInterface2 { + getFoo(): string; +} + +function myFunction3() {} diff --git a/newtests/lsp/wait_for_recheck/references.js b/newtests/lsp/wait_for_recheck/references.js new file mode 100644 index 00000000000..b292456b66b --- /dev/null +++ b/newtests/lsp/wait_for_recheck/references.js @@ -0,0 +1,11 @@ +// @flow + +export type Foo = {baz: string}; +export type Bar = {baz: string}; + +function takesFoo(x: Foo): void { + x.baz; +} + +const y: Bar = {baz: ''}; +takesFoo(y); diff --git a/newtests/lsp/wait_for_recheck/references2.js b/newtests/lsp/wait_for_recheck/references2.js new file mode 100644 index 00000000000..6f8f2c9e088 --- /dev/null +++ b/newtests/lsp/wait_for_recheck/references2.js @@ -0,0 +1,7 @@ +// @flow + +import type {Bar} from './references'; + +function takesBar(y: Bar): void { + y.baz; +} diff --git a/newtests/lsp/wait_for_recheck/sleep.js.ignored b/newtests/lsp/wait_for_recheck/sleep.js.ignored new file mode 100644 index 00000000000..9a21d26abea --- /dev/null +++ b/newtests/lsp/wait_for_recheck/sleep.js.ignored @@ -0,0 +1,3 @@ +declare var sleep: $Flow$DebugSleep; + +sleep(20); // sleep for 20 seconds diff --git a/newtests/lsp/wait_for_recheck/test.js b/newtests/lsp/wait_for_recheck/test.js new file mode 100644 index 00000000000..4fdfee3bb05 --- /dev/null +++ b/newtests/lsp/wait_for_recheck/test.js @@ -0,0 +1,365 @@ +/* + * @flow + * @format + */ + +import {suite, test} from '../../../packages/flow-dev-tools/src/test/Tester'; + +/** + * This test suite takes each query tested by lsp/queries and runs it once with + * wait_for_recheck=true and once with wait_for_recheck=false. In each case, + * we've triggered a 20s recheck and have a 2s timeout. Queries that run in + * parallel with a recheck will return a response. Those that don't will + * timeout + */ +export default suite( + ({ + lspStartAndConnect, + lspNotification, + lspRequestAndWaitUntilResponse, + addFile, + addFiles, + lspIgnoreStatusAndCancellation, + }) => [ + test('invalid_method handled immediately when wait_for_recheck=true', [ + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('foobar', {}).verifyAllLSPMessagesInStep( + ['foobar{unexpected error}'], + [...lspIgnoreStatusAndCancellation], + ), + lspNotification('barfoo', {}) + .waitUntilLSPMessage(2000, 'barfoo') + .verifyAllLSPMessagesInStep( + ['telemetry/event{not implemented}'], + [...lspIgnoreStatusAndCancellation], + ), + ]).waitForRecheck(true), + + test('invalid_method handled immediately when wait_for_recheck=false', [ + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('foobar', {}).verifyAllLSPMessagesInStep( + ['foobar{unexpected error}'], + [...lspIgnoreStatusAndCancellation], + ), + lspNotification('barfoo', {}) + .waitUntilLSPMessage(2000, 'barfoo') + .verifyAllLSPMessagesInStep( + ['telemetry/event{not implemented}'], + [...lspIgnoreStatusAndCancellation], + ), + ]).waitForRecheck(false), + + test('textDocument/definition will time out with wait_for_recheck=true', [ + addFile('definition.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/definition', { + textDocument: {uri: 'definition.js'}, + position: {line: 6, character: 1}, + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ]).waitForRecheck(true), + + test('textDocument/definition will return with wait_for_recheck=false', [ + addFile('definition.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/definition', { + textDocument: {uri: 'definition.js'}, + position: {line: 6, character: 1}, + }) + .verifyAllLSPMessagesInStep( + [ + 'textDocument/definition{definition.js,"start":{"line":2,"character":0}}', + ], + [...lspIgnoreStatusAndCancellation], + ) + .timeout(2000), + ]).waitForRecheck(false), + + test('textDocument/hover will time out with wait_for_recheck=true', [ + addFile('hover.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/hover', { + textDocument: {uri: 'hover.js'}, + position: {line: 6, character: 1}, // over a function use + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ]).waitForRecheck(true), + + test('textDocument/hover will return with wait_for_recheck=false', [ + addFile('hover.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/hover', { + textDocument: {uri: 'hover.js'}, + position: {line: 6, character: 1}, // over a function use + }) + .verifyAllLSPMessagesInStep( + ['textDocument/hover{() => number}'], + [...lspIgnoreStatusAndCancellation], + ) + .timeout(2000), + ]).waitForRecheck(false), + + test('textDocument/completion will time out with wait_for_recheck=true', [ + addFile('completion.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'completion.js'}, + position: {line: 10, character: 15}, // statement position + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ]).waitForRecheck(true), + + test('textDocument/completion will return with wait_for_recheck=false', [ + addFile('completion.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/completion', { + textDocument: {uri: 'completion.js'}, + position: {line: 10, character: 15}, // statement position + }) + .verifyAllLSPMessagesInStep( + [ + 'textDocument/completion{"label":"x","label":"fred","detail":"(a: number, b: string) => number","inlineDetail":"(a: number, b: string)"}', + ], + [...lspIgnoreStatusAndCancellation], + ) + .timeout(2000), + ]).waitForRecheck(false), + + test( + 'textDocument/documentHighlight will return with wait_for_recheck=false', + [ + addFiles('references.js', 'references2.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/documentHighlight', { + textDocument: {uri: 'references.js'}, + position: {line: 9, character: 17}, // on an identifier + }) + .verifyAllLSPMessagesInStep( + ['textDocument/documentHighlight'], + [...lspIgnoreStatusAndCancellation], + ) + .timeout(2000), + ], + ).waitForRecheck(false), + + test( + 'textDocument/documentHighlight will time out with wait_for_recheck=true', + [ + addFiles('references.js', 'references2.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/documentHighlight', { + textDocument: {uri: 'references.js'}, + position: {line: 9, character: 17}, // on an identifier + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ], + ).waitForRecheck(true), + + test('textDocument/references will time out with wait_for_recheck=true', [ + addFiles('references.js', 'references2.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/references', { + textDocument: {uri: 'references.js'}, + position: {line: 9, character: 17}, // on an identifier + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ]).waitForRecheck(true), + + test('textDocument/references will time out with wait_for_recheck=false', [ + addFiles('references.js', 'references2.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/references', { + textDocument: {uri: 'references.js'}, + position: {line: 9, character: 17}, // on an identifier + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ]).waitForRecheck(false), + + test('textDocument/rename will time out with wait_for_recheck=true', [ + addFiles('references.js', 'references2.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/rename', { + textDocument: {uri: 'references.js'}, + position: {line: 9, character: 17}, // on an identifier + newName: 'foobar', + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ]).waitForRecheck(true), + + test('textDocument/rename will time out with wait_for_recheck=false', [ + addFiles('references.js', 'references2.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/rename', { + textDocument: {uri: 'references.js'}, + position: {line: 9, character: 17}, // on an identifier + newName: 'foobar', + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ]).waitForRecheck(false), + + test('textDocument/documentSymbol will return with wait_for_recheck=true', [ + addFiles('outline.js', 'references.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/documentSymbol', { + textDocument: {uri: 'outline.js'}, + }) + .verifyAllLSPMessagesInStep( + [ + 'textDocument/documentSymbol{WORD_REGEX,State,Preferences,pref1,EPrefs,pref2,MyClass1,_projectRoot,command,constructor,dispose,MyInterface2,getFoo,myFunction3}', + ], + [...lspIgnoreStatusAndCancellation], + ) + .timeout(2000), + ]).waitForRecheck(true), + + test( + 'textDocument/documentSymbol will return with wait_for_recheck=false', + [ + addFiles('outline.js', 'references.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/documentSymbol', { + textDocument: {uri: 'outline.js'}, + }) + .verifyAllLSPMessagesInStep( + [ + 'textDocument/documentSymbol{WORD_REGEX,State,Preferences,pref1,EPrefs,pref2,MyClass1,_projectRoot,command,constructor,dispose,MyInterface2,getFoo,myFunction3}', + ], + [...lspIgnoreStatusAndCancellation], + ) + .timeout(2000), + ], + ).waitForRecheck(false), + + test('textDocument/typeCoverage will time out with wait_for_recheck=true', [ + addFiles('coverage.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/typeCoverage', { + textDocument: {uri: 'coverage.js'}, + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ]).waitForRecheck(true), + + test('textDocument/typeCoverage will return with wait_for_recheck=false', [ + addFiles('coverage.js'), + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/typeCoverage', { + textDocument: {uri: 'coverage.js'}, + }) + .verifyAllLSPMessagesInStep( + ['textDocument/typeCoverage{"line":12,"line":8,"line":6}'], + [...lspIgnoreStatusAndCancellation], + ) + .timeout(2000), + ]).waitForRecheck(false), + + test('telemetry/rage will time out with wait_for_recheck=true', [ + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/typeCoverage', { + textDocument: {uri: '/coverage2.js'}, + }) + .verifyAllLSPMessagesInStep([], [...lspIgnoreStatusAndCancellation]) + .timeout(2000), + ]).waitForRecheck(true), + + test('telemetry/rage will return with wait_for_recheck=false', [ + lspStartAndConnect(), + addFile('sleep.js.ignored', 'sleep.js').verifyAllLSPMessagesInStep( + [''], + [...lspIgnoreStatusAndCancellation], + ), + lspRequestAndWaitUntilResponse('textDocument/typeCoverage', { + textDocument: {uri: '/coverage2.js'}, + }) + .verifyAllLSPMessagesInStep( + ['textDocument/typeCoverage{Use @flow}'], + [...lspIgnoreStatusAndCancellation], + ) + .timeout(2000), + ]).waitForRecheck(false), + ], +); diff --git a/newtests/munged_class_member_inference/test.js b/newtests/munged_class_member_inference/test.js index 797ba482bb8..dd4a8c43afe 100644 --- a/newtests/munged_class_member_inference/test.js +++ b/newtests/munged_class_member_inference/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/new_target/test.js b/newtests/new_target/test.js index 873451e0c18..c5ccacead84 100644 --- a/newtests/new_target/test.js +++ b/newtests/new_target/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/package_json_changes/invalidPackage.json b/newtests/package_json_changes/invalidPackage.json new file mode 100644 index 00000000000..86628eb71c6 --- /dev/null +++ b/newtests/package_json_changes/invalidPackage.json @@ -0,0 +1 @@ +["valid json but invalid package"] diff --git a/newtests/package_json_changes/invalidPackage2.json b/newtests/package_json_changes/invalidPackage2.json new file mode 100644 index 00000000000..044f9680c29 --- /dev/null +++ b/newtests/package_json_changes/invalidPackage2.json @@ -0,0 +1 @@ +["another invalid package"] diff --git a/newtests/package_json_changes/test.js b/newtests/package_json_changes/test.js index a7d81963f02..b1610bcf2c7 100644 --- a/newtests/package_json_changes/test.js +++ b/newtests/package_json_changes/test.js @@ -1,11 +1,10 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; -export default suite(({addFile, removeFile, exitCode, flowCmd}) => [ +export default suite(({addFile, removeFile, flowCmd}) => [ test('node - Adding a package.json should kill the server', [ addFile('start.json', 'package.json') .startFlowServer() @@ -78,4 +77,36 @@ export default suite(({addFile, removeFile, exitCode, flowCmd}) => [ .waitUntilServerStatus(2000, 'stopped') // only 2s not 10s so as not to waste time .verifyServerStatus('running'), ]).flowConfig('haste_flowconfig'), + + test('node - Making package invalid should kill the server', [ + addFile('start.json', 'package.json'), + addFile('invalidPackage.json', 'package.json') + .startFlowServer() + .waitUntilServerStatus(10000, 'stopped') + .verifyServerStatus('stopped'), + ]).flowConfig('node_flowconfig'), + test('haste - Making package invalid should kill the server', [ + addFile('start.json', 'package.json'), + addFile('invalidPackage.json', 'package.json') + .startFlowServer() + .waitUntilServerStatus(10000, 'stopped') + .verifyServerStatus('stopped'), + ]).flowConfig('haste_flowconfig'), + + test('node - Changes to an invalid package should NOT kill the server', [ + addFile('invalidPackage.json', 'package.json'), + addFile('invalidPackage2.json', 'package.json') + .startFlowServer() // makes this step start Flow before invalidPackage2 is added + .waitUntilServerStatus(2000, 'stopped') // only 2s not 10s so as not to waste time + .verifyServerStatus('running'), + ]).flowConfig('node_flowconfig'), + test('haste - Changes to an invalid package should NOT kill the server', [ + addFile('invalidPackage.json', 'package.json') + .startFlowServer() // makes this step start Flow before invalidPackage2 is added + .waitUntilServerStatus(2000, 'stopped') // only 2s not 10s so as not to waste time + .verifyServerStatus('running'), + addFile('invalidPackage2.json', 'package.json') + .verifyServerStatus('running'), + ]).flowConfig('haste_flowconfig'), + ]); diff --git a/newtests/recheck_parse_error/test.js b/newtests/recheck_parse_error/test.js index c0fe1b66485..ec6f6c0672a 100644 --- a/newtests/recheck_parse_error/test.js +++ b/newtests/recheck_parse_error/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ @@ -15,7 +14,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:5 5: I am a parse error - ^^ Unexpected identifier + ^^ Unexpected identifier, expected the end of an expression statement (\`;\`) `, ), ]), diff --git a/newtests/repos-union/test.js b/newtests/repos-union/test.js index cc39c455e7a..c61760f774f 100644 --- a/newtests/repos-union/test.js +++ b/newtests/repos-union/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/resource_files/test.js b/newtests/resource_files/test.js index d2d45abec4f..9a187f94363 100644 --- a/newtests/resource_files/test.js +++ b/newtests/resource_files/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ @@ -29,18 +28,7 @@ export default suite(({addFile, addFiles, addCode}) => [ addFile('foo.css') .addCode("const css = require('./foo.css');") .addCode("(css: string)") - .newErrors( - ` - test.js:5 - 5: (css: string) - ^^^ Cannot cast \`css\` to string because object type [1] is incompatible with string [2]. - References: - 3: const css = require('./foo.css'); - ^^^^^^^^^^^ [1] - 5: (css: string) - ^^^^^^ [2] - `, - ), + .noNewErrors(), ]), test('Typical use of a .css file', [ diff --git a/newtests/shadow/test.js b/newtests/shadow/test.js index 795bea46309..10b9a2166e7 100644 --- a/newtests/shadow/test.js +++ b/newtests/shadow/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; @@ -153,18 +152,7 @@ export default suite(({addFile, addFiles, addCode}) => [ `).noNewErrors(), addCode(`var o: {p: string} = Object.create(proto);`) - .newErrors( - ` - test.js:8 - 8: var o: {p: string} = Object.create(proto); - ^^^^^^^^^^^^^^^^^^^^ Cannot assign \`Object.create(...)\` to \`o\` because number [1] is incompatible with string [2] in property \`p\`. - References: - 5: proto.p = 0; - ^ [1] - 8: var o: {p: string} = Object.create(proto); - ^^^^^^ [2] - `, - ), + .noNewErrors(), ]), test('derived object subtyping -- read before write', [ @@ -176,32 +164,10 @@ export default suite(({addFile, addFiles, addCode}) => [ inconsistent with GetPropT/MethodT. It would be confusing if this didn't error, though: var o: { p: string } = {} */ addCode(`var o: {p: string} = Object.create(proto);`) - .newErrors( - ` - test.js:7 - 7: var o: {p: string} = Object.create(proto); - ^^^^^^^^^^^^^^^^^^^^ Cannot assign \`Object.create(...)\` to \`o\` because property \`p\` is missing in \`Object.create\` [1] but exists in object type [2]. - References: - 7: var o: {p: string} = Object.create(proto); - ^^^^^^^^^^^^^^^^^^^^ [1] - 7: var o: {p: string} = Object.create(proto); - ^^^^^^^^^^^ [2] - `, - ), + .noNewErrors(), addCode(`proto.p = 0;`) - .newErrors( - ` - test.js:9 - 9: proto.p = 0; - ^ Cannot assign \`0\` to \`proto.p\` because number [1] is incompatible with string [2]. - References: - 9: proto.p = 0; - ^ [1] - 7: var o: {p: string} = Object.create(proto); - ^^^^^^ [2] - `, - ), + .noNewErrors(), ]), /* Because shadow operations execute on failure, a builtin or import can cause diff --git a/newtests/suppression_default/test.js b/newtests/suppression_default/test.js index 210a991462f..79845bb3eb0 100644 --- a/newtests/suppression_default/test.js +++ b/newtests/suppression_default/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/tool_test_example/test.js b/newtests/tool_test_example/test.js index dac634c521e..834201b9b63 100644 --- a/newtests/tool_test_example/test.js +++ b/newtests/tool_test_example/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/tuples/test.js b/newtests/tuples/test.js index 2eeeeae7a0e..6edc98d3269 100644 --- a/newtests/tuples/test.js +++ b/newtests/tuples/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ @@ -78,8 +77,8 @@ export default suite(({addFile, addFiles, addCode}) => [ 3: function foo(x: [1,2]): string { return x.length; } ^^^^^^^^ Cannot return \`x.length\` because number [1] is incompatible with string [2]. References: - 235: +length: number; - ^^^^^^ [1]. See lib: [LIB] core.js:235 + 255: +length: number; + ^^^^^^ [1]. See lib: [LIB] core.js:255 3: function foo(x: [1,2]): string { return x.length; } ^^^^^^ [2] `, @@ -103,8 +102,8 @@ export default suite(({addFile, addFiles, addCode}) => [ 6: readOnlyRef.push(123); ^^^^ Cannot call \`readOnlyRef.push\` because property \`push\` is missing in \`$ReadOnlyArray\` [1]. References: - 207: forEach(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => any, thisArg?: any): void; - ^^^^^^^^^^^^^^^^^ [1]. See lib: [LIB] core.js:207 + 228: forEach(callbackfn: (value: T, index: number, array: $ReadOnlyArray) => mixed, thisArg?: any): void; + ^^^^^^^^^^^^^^^^^ [1]. See lib: [LIB] core.js:228 test.js:7 7: (readOnlyRef[0]: 1); @@ -184,18 +183,9 @@ export default suite(({addFile, addFiles, addCode}) => [ References: 3: function foo(x: [1,2]): number { return x[2]; } ^^^^^ [1] - - test.js:3 - 3: function foo(x: [1,2]): number { return x[2]; } - ^^^^ Cannot return \`x[2]\` because undefined (out of bounds tuple access) [1] is incompatible with number [2]. - References: - 3: function foo(x: [1,2]): number { return x[2]; } - ^^^^ [1] - 3: function foo(x: [1,2]): number { return x[2]; } - ^^^^^^ [2] `, ) - .because('Out of bounds access causes an error and results in void'), + .because('Out of bounds access causes an error'), addCode('function foo(x: [1,2]): number { return x[-1]; }') .newErrors( ` @@ -205,37 +195,6 @@ export default suite(({addFile, addFiles, addCode}) => [ References: 5: function foo(x: [1,2]): number { return x[-1]; } ^^^^^ [1] - - test.js:5 - 5: function foo(x: [1,2]): number { return x[-1]; } - ^^^^^ Cannot return \`x[-1]\` because undefined (out of bounds tuple access) [1] is incompatible with number [2]. - References: - 5: function foo(x: [1,2]): number { return x[-1]; } - ^^^^^ [1] - 5: function foo(x: [1,2]): number { return x[-1]; } - ^^^^^^ [2] - `, - ), - ]), - test('Out of bounds access returns void', [ - addCode('function foo(x: [1]): string { return x[2]; }') - .newErrors( - ` - test.js:3 - 3: function foo(x: [1]): string { return x[2]; } - ^^^^ Cannot get \`x[2]\` because tuple type [1] only has 1 element, so index 2 is out of bounds. - References: - 3: function foo(x: [1]): string { return x[2]; } - ^^^ [1] - - test.js:3 - 3: function foo(x: [1]): string { return x[2]; } - ^^^^ Cannot return \`x[2]\` because undefined (out of bounds tuple access) [1] is incompatible with string [2]. - References: - 3: function foo(x: [1]): string { return x[2]; } - ^^^^ [1] - 3: function foo(x: [1]): string { return x[2]; } - ^^^^^^ [2] `, ), ]), @@ -458,15 +417,6 @@ export default suite(({addFile, addFiles, addCode}) => [ References: 4: function foo(tup: ?[number, number]): number { ^^^^^^^^^^^^^^^^ [1] - - test.js:6 - 6: return tup[3]; - ^^^^^^ Cannot return \`tup[3]\` because undefined (out of bounds tuple access) [1] is incompatible with number [2]. - References: - 6: return tup[3]; - ^^^^^^ [1] - 4: function foo(tup: ?[number, number]): number { - ^^^^^^ [2] `, ), ]), diff --git a/newtests/union_new/test.js b/newtests/union_new/test.js index 060fb237c68..b6666bdf8ac 100644 --- a/newtests/union_new/test.js +++ b/newtests/union_new/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/newtests/unsupported_spread_args/test.js b/newtests/unsupported_spread_args/test.js index ecfa2786305..5bff3c684d2 100644 --- a/newtests/unsupported_spread_args/test.js +++ b/newtests/unsupported_spread_args/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ @@ -167,7 +166,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:17 17: Object.assign({}, ...[1]) - ^^^^^^^^^^^^^^^^^^^^^^^^^ number [1] is not an object. + ^^^^^^^^^^^^^^^^^^^^^^^^^ Incorrect arguments passed to call of method \`assign\` because number [1] is not an object. References: 17: Object.assign({}, ...[1]) ^ [1] diff --git a/newtests/variance/test.js b/newtests/variance/test.js index fb3ae2ac641..c0a4876aa32 100644 --- a/newtests/variance/test.js +++ b/newtests/variance/test.js @@ -1,6 +1,5 @@ /* * @flow - * @lint-ignore-every LINEWRAP1 */ import {suite, test} from 'flow-dev-tools/src/test/Tester'; @@ -34,7 +33,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:52 52: (droA: dRWA); - ^^^^ Cannot cast \`droA\` to \`dRWA\` because an indexer property is read-only in \`dROA\` [1] but writable in \`dRWA\` [2]. + ^^^^ Cannot cast \`droA\` to \`dRWA\` because an index signature declaring the expected key / value type is read-only in \`dROA\` [1] but writable in \`dRWA\` [2]. References: 34: declare var droA: dROA; ^^^^ [1] @@ -62,7 +61,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:56 56: (dwoA: dRWA); - ^^^^ Cannot cast \`dwoA\` to \`dRWA\` because an indexer property is write-only in \`dWOA\` [1] but readable in \`dRWA\` [2]. + ^^^^ Cannot cast \`dwoA\` to \`dRWA\` because an index signature declaring the expected key / value type is write-only in \`dWOA\` [1] but readable in \`dRWA\` [2]. References: 35: declare var dwoA: dWOA; ^^^^ [1] @@ -123,7 +122,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:68 68: (droB: dRWA); - ^^^^ Cannot cast \`droB\` to \`dRWA\` because an indexer property is read-only in \`dROB\` [1] but writable in \`dRWA\` [2]. + ^^^^ Cannot cast \`droB\` to \`dRWA\` because an index signature declaring the expected key / value type is read-only in \`dROB\` [1] but writable in \`dRWA\` [2]. References: 38: declare var droB: dROB; ^^^^ [1] @@ -160,7 +159,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:72 72: (dwoB: dRWA); - ^^^^ Cannot cast \`dwoB\` to \`dRWA\` because an indexer property is write-only in \`dWOB\` [1] but readable in \`dRWA\` [2]. + ^^^^ Cannot cast \`dwoB\` to \`dRWA\` because an index signature declaring the expected key / value type is write-only in \`dWOB\` [1] but readable in \`dRWA\` [2]. References: 39: declare var dwoB: dWOB; ^^^^ [1] @@ -211,7 +210,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:56 56: (dwoA: dROA); - ^^^^ Cannot cast \`dwoA\` to \`dROA\` because an indexer property is write-only in \`dWOA\` [1] but read-only in \`dROA\` [2]. + ^^^^ Cannot cast \`dwoA\` to \`dROA\` because an index signature declaring the expected key / value type is write-only in \`dWOA\` [1] but read-only in \`dROA\` [2]. References: 35: declare var dwoA: dWOA; ^^^^ [1] @@ -251,7 +250,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:72 72: (dwoB: dROA); - ^^^^ Cannot cast \`dwoB\` to \`dROA\` because an indexer property is write-only in \`dWOB\` [1] but read-only in \`dROA\` [2]. + ^^^^ Cannot cast \`dwoB\` to \`dROA\` because an index signature declaring the expected key / value type is write-only in \`dWOB\` [1] but read-only in \`dROA\` [2]. References: 39: declare var dwoB: dWOB; ^^^^ [1] @@ -289,7 +288,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:52 52: (droA: dWOA); - ^^^^ Cannot cast \`droA\` to \`dWOA\` because an indexer property is read-only in \`dROA\` [1] but write-only in \`dWOA\` [2]. + ^^^^ Cannot cast \`droA\` to \`dWOA\` because an index signature declaring the expected key / value type is read-only in \`dROA\` [1] but write-only in \`dWOA\` [2]. References: 34: declare var droA: dROA; ^^^^ [1] @@ -353,7 +352,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:68 68: (droB: dWOA); - ^^^^ Cannot cast \`droB\` to \`dWOA\` because an indexer property is read-only in \`dROB\` [1] but write-only in \`dWOA\` [2]. + ^^^^ Cannot cast \`droB\` to \`dWOA\` because an index signature declaring the expected key / value type is read-only in \`dROB\` [1] but write-only in \`dWOA\` [2]. References: 38: declare var droB: dROB; ^^^^ [1] @@ -476,7 +475,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:52 52: (droA: dRWB); - ^^^^ Cannot cast \`droA\` to \`dRWB\` because an indexer property is read-only in \`dROA\` [1] but writable in \`dRWB\` [2]. + ^^^^ Cannot cast \`droA\` to \`dRWB\` because an index signature declaring the expected key / value type is read-only in \`dROA\` [1] but writable in \`dRWB\` [2]. References: 34: declare var droA: dROA; ^^^^ [1] @@ -513,7 +512,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:56 56: (dwoA: dRWB); - ^^^^ Cannot cast \`dwoA\` to \`dRWB\` because an indexer property is write-only in \`dWOA\` [1] but readable in \`dRWB\` [2]. + ^^^^ Cannot cast \`dwoA\` to \`dRWB\` because an index signature declaring the expected key / value type is write-only in \`dWOA\` [1] but readable in \`dRWB\` [2]. References: 35: declare var dwoA: dWOA; ^^^^ [1] @@ -627,7 +626,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:56 56: (dwoA: dROB); - ^^^^ Cannot cast \`dwoA\` to \`dROB\` because an indexer property is write-only in \`dWOA\` [1] but read-only in \`dROB\` [2]. + ^^^^ Cannot cast \`dwoA\` to \`dROB\` because an index signature declaring the expected key / value type is write-only in \`dWOA\` [1] but read-only in \`dROB\` [2]. References: 35: declare var dwoA: dWOA; ^^^^ [1] @@ -665,7 +664,7 @@ export default suite(({addFile, addFiles, addCode}) => [ ` test.js:52 52: (droA: dWOB); - ^^^^ Cannot cast \`droA\` to \`dWOB\` because an indexer property is read-only in \`dROA\` [1] but write-only in \`dWOB\` [2]. + ^^^^ Cannot cast \`droA\` to \`dWOB\` because an index signature declaring the expected key / value type is read-only in \`dROA\` [1] but write-only in \`dWOB\` [2]. References: 34: declare var droA: dROA; ^^^^ [1] diff --git a/opam b/opam index 05976712538..61eea0ebc31 100644 --- a/opam +++ b/opam @@ -1,32 +1,48 @@ -opam-version: "1.2" +opam-version: "2.0" name: "flowtype" -version: "0.82.0" +version: "0.108.0" maintainer: "flow@fb.com" +authors: "Flow Team " +license: "MIT" homepage: "https://flow.org" -dev-repo: "https://github.com/facebook/flow.git" -bug-reports: "https://github.com/facebook/flow/issues" -authors: ["Flow Team "] doc: "https://flow.org/en/docs/getting-started/" -license: "MIT" +bug-reports: "https://github.com/facebook/flow/issues" depends: [ + "ocaml" {>= "4.07.1"} "base-unix" "base-bytes" - "dtoa" { >= "0.3.1" } + "dtoa" {>= "0.3.1"} "ocamlbuild" {build} "ocamlfind" {build} - "core_kernel" { = "v0.11.1" } - "sedlex" { >= "1.99.4" } - "lwt" { >= "3.3.0" } - "lwt_log" { = "1.0.0" } - "lwt_ppx" { >= "1.1.0" } + "ocaml-migrate-parsetree" {build} + "ounit" {with-test} + "core_kernel" {= "v0.11.1"} + "sedlex" {= "1.99.4"} + "lwt" {>= "4.2.1"} + "lwt_log" {= "1.1.0"} + "lwt_ppx" {>= "1.2.0"} + "ppx_let" {>= "0.11.0"} "ppx_deriving" {build} "ppx_gen_rec" {build} - "ppx_tools_versioned" { = "5.2" } + "ppx_tools_versioned" {= "5.2"} "visitors" {build} "wtf8" ] -available: [ocaml-version >= "4.05.0"] -build: [ [ "env" "FLOW_RELEASE=1" make ] ] -depexts: [ - [["alpine"] ["linux-headers"]] -] +build: ["env" "FLOW_RELEASE=1" make] +depexts: ["linux-headers"] {os-distribution = "alpine"} +dev-repo: "git+https://github.com/facebook/flow.git" +synopsis: "Flow is a static typechecker for JavaScript" +description: """ +To find out more about Flow, check out . + +Flow adds static typing to JavaScript to improve developer productivity and +code quality. In particular, static typing offers benefits like early error +checking, which helps you avoid certain kinds of runtime failures, and code +intelligence, which aids code maintenance, navigation, transformation, and +optimization. + +We have designed Flow so developers can reap its benefits without losing the +"feel" of coding in JavaScript. Flow adds minimal compile-time overhead, as it +does all its work proactively in the background. And Flow does not force you to +change how you code -- it performs sophisticated program analysis to work with +the idioms you already know and love.""" diff --git a/package.json b/package.json index c4c240bf0f7..18b8eaf306d 100644 --- a/package.json +++ b/package.json @@ -3,8 +3,14 @@ "workspaces": [ "newtests", "packages/flow-dev-tools", - "packages/flow-parser" + "packages/flow-parser", + "packages/flow-remove-types" ], + "resolutions": { + "handlebars": ">=4.0.14", + "lodash": ">=4.17.14", + "sshpk": ">=1.13.2" + }, "scripts": { "prepublish": "yarn workspace flow-parser run prepublish" } diff --git a/packages/flow-dev-tools/.flowconfig b/packages/flow-dev-tools/.flowconfig index 41d4230bd61..7c61528f243 100644 --- a/packages/flow-dev-tools/.flowconfig +++ b/packages/flow-dev-tools/.flowconfig @@ -4,9 +4,9 @@ [ignore] .*/json5/test/parse-cases/.* - +.*/newtests/lsp/completion/jsx.js .*/newtests/gen_flow_files_command/type_error.js -.*/newtests/ide/lazy/errorsWithFlowPragma.js +.*/packages/flow-remove-types/.* [options] suppress_comment=.*\\$FlowFixMe diff --git a/packages/flow-dev-tools/Makefile b/packages/flow-dev-tools/Makefile index f82c399306e..e3459dfb98f 100644 --- a/packages/flow-dev-tools/Makefile +++ b/packages/flow-dev-tools/Makefile @@ -1,4 +1,4 @@ -# Copyright (c) 2013-present, Facebook, Inc. +# Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. test: diff --git a/packages/flow-dev-tools/bin/tool b/packages/flow-dev-tools/bin/tool index dc5a5c233c3..2da1e31e829 100755 --- a/packages/flow-dev-tools/bin/tool +++ b/packages/flow-dev-tools/bin/tool @@ -12,8 +12,12 @@ babelConfig.plugins = babelConfig.plugins.map(function(p) { return resolvePlugin(p, __dirname); }); babelConfig.presets = babelConfig.presets.map(function(p) { + if (Array.isArray(p)) { + return [resolvePreset(p[0], __dirname), p[1]]; + } return resolvePreset(p, __dirname); }); require("babel-register")(babelConfig); +require("babel-polyfill"); require("../src/main.js").run(); diff --git a/packages/flow-dev-tools/package.json b/packages/flow-dev-tools/package.json index ceba8432045..26a2f49da5b 100644 --- a/packages/flow-dev-tools/package.json +++ b/packages/flow-dev-tools/package.json @@ -4,20 +4,18 @@ "license": "MIT", "version": "0.1.0", "dependencies": { + "babel-core": "^6.8.0", "babel-jest": "^21.0.2", - "babel-plugin-syntax-async-functions": "^6.8.0", "babel-plugin-syntax-flow": "^6.8.0", - "babel-plugin-syntax-trailing-function-commas": "^6.8.0", "babel-plugin-transform-class-properties": "^6.8.0", "babel-plugin-transform-flow-strip-types": "^6.8.0", "babel-plugin-transform-object-rest-spread": "^6.20.2", - "babel-plugin-transform-regenerator": "^6.8.0", "babel-polyfill": "^6.8.0", - "babel-preset-es2015": "^6.8.0", + "babel-preset-env": "^1.7.0", "babel-register": "^6.8.0", "blessed": "^0.1.81", - "colors": "~1.1.2", - "diff": "~3.2.0", + "colors": "^1.1.2", + "diff": "^3.5.0", "flow-parser": "~0.21.0", "glob": "^7.0.3", "jest": "^21.1.0", @@ -27,26 +25,34 @@ "newtests": "*", "resolve": "^1.1.7", "rimraf": "^2.5.2", - "sane": "^1.4.0", + "sane": "^1.4.0 || ^2.0.0 || ^3.0.0", + "semver": "^5.6.0", "source-map-support": "~0.4.0", "twit": "^2.1.5", - "vscode-jsonrpc": "^3.3.0" + "vscode-jsonrpc": "^4.0.0" }, "babel": { "presets": [ - "es2015" + [ + "env", + { + "targets": { + "node": "6" + } + } + ] ], "plugins": [ - "syntax-async-functions", "syntax-flow", - "syntax-trailing-function-commas", "transform-class-properties", "transform-flow-strip-types", - "transform-object-rest-spread", - "transform-regenerator" + "transform-object-rest-spread" ] }, "scripts": { "test": "make -C ../.. test" + }, + "engines": { + "node": ">=6" } } diff --git a/packages/flow-dev-tools/src/comment/add-commentsRunner.js b/packages/flow-dev-tools/src/comment/add-commentsRunner.js index cc4ebb501f8..5e1e734295f 100644 --- a/packages/flow-dev-tools/src/comment/add-commentsRunner.js +++ b/packages/flow-dev-tools/src/comment/add-commentsRunner.js @@ -14,7 +14,7 @@ import { } from '../flowResult'; import getPathToLoc from './getPathToLoc'; import getFlowErrors from './getFlowErrors'; -import getContext, {NORMAL, JSX, TEMPLATE} from './getContext'; +import getContext, {NORMAL, JSX, JSX_FRAGMENT, TEMPLATE} from './getContext'; import getAst from './getAst'; import type {PathNode} from './getPathToLoc'; @@ -61,7 +61,7 @@ class BlessedError { if (this.selectedMessage != null) { return this.messages[this.selectedMessage].loc; } else { - return mainLocOfError(this.error); + return mainSourceLocOfError(this.error); } } @@ -122,7 +122,50 @@ class BlessedError { } } -export default async function(args: Args): Promise { +function mainSourceLocOfError(error: FlowError): ?FlowLoc { + const { operation, message } = error; + for (const msg of [operation, ...message]) { + if (msg && msg.loc && msg.loc.type === 'SourceFile') { + return msg.loc; + } + } + return null; +} + +/** + * Filter out errors without a main location or a source file + */ +function filterErrors(errors: Array): Array { + return errors.filter(e => mainSourceLocOfError(e) != null); +} + +/** + * Wrap errors with some extra functionality + */ +function blessErrors(errors: Array): Array { + return errors.map(e => new BlessedError(e)); +} + +async function nonInteractive(args: Args): Promise { + let flowResult = await getFlowErrors( + args.bin, + args.errorCheckCommand, + args.root, + args.flowconfigName, + ); + + if (flowResult.passed) { + console.log("No errors found. Nothing to do. Exiting"); + return; + } + + const errors = blessErrors(filterErrors(flowResult.errors)); + + await addComments(args, errors); + process.exit(0); +} + +async function interactive(args: Args): Promise { // Use blessed to select which comments to remove // Create a screen object. @@ -217,9 +260,7 @@ export default async function(args: Args): Promise { }); // Filter out errors without a main location - const errors = flowResult.errors - .filter(e => mainLocOfError(e) != null) - .map(e => new BlessedError(e)); + const errors = blessErrors(filterErrors(flowResult.errors)); let locationToErrorsMap: Map>; let scrollToLocationMap; @@ -537,13 +578,13 @@ export default async function(args: Args): Promise { if (err == null && value != null) { args.comment = value; screen.destroy(); - await addComments(args, selectedErrors) + await addComments(args, selectedErrors); process.exit(0); } }); } else { screen.destroy(); - await addComments(args, selectedErrors) + await addComments(args, selectedErrors); process.exit(0); } }; @@ -553,10 +594,13 @@ export default async function(args: Args): Promise { showDetails(1); locations.focus(); screen.render(); +} +export default async function(args: Args): Promise { if (args.all) { - errors.map(e => e.select()); - await doAddComments(); + await nonInteractive(args); + } else { + await interactive(args); } } @@ -699,7 +743,7 @@ function addCommentToCode(comment: string, code: string, loc: FlowLoc, path: Arr formatComment(comment, lines[loc.start.line-1]), lines.slice(loc.start.line-1), ).join("\n"); - } else if (inside === JSX && ast.type === 'JSXElement') { + } else if ((inside === JSX_FRAGMENT || inside === JSX) && ast.type === 'JSXElement') { /* Ok, so we have something like * * diff --git a/packages/flow-dev-tools/src/comment/add-commentsRunner.test.js b/packages/flow-dev-tools/src/comment/add-commentsRunner.test.js index 93207ee527f..0a3ce3b396f 100644 --- a/packages/flow-dev-tools/src/comment/add-commentsRunner.test.js +++ b/packages/flow-dev-tools/src/comment/add-commentsRunner.test.js @@ -12,12 +12,12 @@ test('addCommentsToCode', async () => { longComment, testInput, /* Intentionally made these out of order to test that they are still inserted properly */ - [1, 5, 3].map(makeLoc), + [1, 6, 5, 3].map(makeLoc), flowBinPath, ) ).toEqual([ testOutput, - 3 + 4 ]); }); @@ -26,11 +26,13 @@ const testInput = `const bar = 4; const foo = 4; const baz = 3; -
- - foo - -
+<> +
+ + foo + +
+ `; const testOutput = @@ -41,13 +43,17 @@ const foo = 4; /* this is a really long comment that definitely goes over the line length * limit so the tool has to wrap it */ const baz = 3; -
+<> {/* this is a really long comment that definitely goes over the line length * limit so the tool has to wrap it */} - - foo - -
+
+ {/* this is a really long comment that definitely goes over the line length + * limit so the tool has to wrap it */} + + foo + +
+ `; // This simulates an error location spanning the entire line. The code looks for AST nodes that are diff --git a/packages/flow-dev-tools/src/comment/getContext.js b/packages/flow-dev-tools/src/comment/getContext.js index 1a68a39af55..2d311cc1653 100644 --- a/packages/flow-dev-tools/src/comment/getContext.js +++ b/packages/flow-dev-tools/src/comment/getContext.js @@ -3,9 +3,10 @@ import type {PathNode} from './getPathToLoc'; import type {FlowLoc} from '../flowResult'; -export opaque type Context = 'normal' | 'jsx' | 'template'; +export opaque type Context = 'normal' | 'jsx' | 'jsx_fragment' | 'template'; export const NORMAL: Context = 'normal'; export const JSX: Context = 'jsx'; +export const JSX_FRAGMENT: Context = 'jsx_fragment'; export const TEMPLATE: Context = 'template'; export default function(loc: FlowLoc, path: Array): [Context, Object /* ast */] { @@ -24,6 +25,12 @@ export default function(loc: FlowLoc, path: Array): [Context, Object / // We've entered a JSX children block inside = JSX; i++; + } else if (i < path.length - 1 && + ast.type === 'JSXFragment' && + path[i+1].key === 'children') { + // We've entered a JSX fragment block + inside = JSX_FRAGMENT; + i++; } else if (i < path.length - 1 && ast.type === 'TemplateLiteral' && path[i+1].key === 'expressions') { diff --git a/packages/flow-dev-tools/src/comment/remove-commentsRunner.js b/packages/flow-dev-tools/src/comment/remove-commentsRunner.js index ed4eef75ddb..f07b924b4ca 100644 --- a/packages/flow-dev-tools/src/comment/remove-commentsRunner.js +++ b/packages/flow-dev-tools/src/comment/remove-commentsRunner.js @@ -56,6 +56,77 @@ async function removeUnusedErrorSuppressions( await writeFile(filename, contents); } +const edible = /[\t ]/; +/* This is the most confusing part of this command. A simple version of this + * code would just remove exact characters of a comment. This might leave + * extra whitespace and blank lines. So this code tries to expand the range + * we remove to cover the following cases + * + * /* Comment with nothing before or after it * / + * var foo; /* Comment with something before it * / + * /* Comment with something after it * / var foo; + * var foo; /* Comment with something before and after it * / var bar; + * + * The TL;DR is that we only want to expand the range and remove the newline + * in the case where there is nothing before or after it + */ +function expandComment( + contents: string, + startOffset: number, + endOffset: number, + context: Context, +) { + const length = contents.length; + + // ranges are [start, end). we're interested in the values of the characters + // within the range, so for origEnd, we subtract 1 from the end offset to + // make it inclusive. + let origStart = startOffset; + let origEnd = endOffset - 1; + + if (context === JSX && contents[origStart-1] === '{' && contents[origEnd+1] === '}') { + origStart--; + origEnd++; + } + + let start = origStart; + let end = origEnd; + + while (start > 0) { + // Eat whitespace towards the start of the line + if (contents[start-1].match(edible)) { + start--; + } else if (contents[start-1] === "\n") { + // If we make it to the beginning of the line, awesome! Let's try and + // expand the end too! + while (end < length - 1) { + // Eat whitespace towards the end of the line + if (contents[end+1].match(edible)) { + end++; + } else if (contents[end+1] === "\n") { + // If we make it to both the beginning and the end of the line, + // then we can totally remove a newline! + end++; + break; + } else { + // Otherwise we can't, undo the expansion + start = origStart; + end = origEnd; + break; + } + } + break; + } else { + // If we hit something else, then undo the start expansion + start = origStart; + break; + } + } + + // as above, ranges are [X, Y) but in this function, `end` is inclusive. so + // add 1 to make it exclusive again. + return [start, end + 1]; +} // Exported for testing export async function removeUnusedErrorSuppressionsFromText( contents: string, @@ -69,20 +140,6 @@ export async function removeUnusedErrorSuppressionsFromText( const ast = await getAst(contents, flowBinPath) - /* This is the most confusing part of this command. A simple version of this - * code would just remove exact characters of a comment. This might leave - * extra whitespace and blank lines. So this code tries to expand the range - * we remove to cover the following cases - * - * /* Comment with nothing before or after it * / - * var foo; /* Comment with something before it * / - * /* Comment with something after it * / var foo; - * var foo; /* Comment with something before and after it * / var bar; - * - * The TL;DR is that we only want to expand the range and remove the newline - * in the case where there is nothing before or after it - */ - const edible = /[\t ]/; for (const error of errors) { const path = getPathToLoc(error, ast); let context: Context; @@ -92,49 +149,12 @@ export async function removeUnusedErrorSuppressionsFromText( [context] = getContext(error, path); } - const length = contents.length; - let origStart = error.start.offset; - let origEnd = error.end.offset - 1; - - if (context === JSX && contents[origStart-1] === '{' && contents[origEnd+1] === '}') { - origStart--; - origEnd++; - } + const origStart = error.start.offset; + const origEnd = error.end.offset; - let start = origStart; - let end = origEnd; - - while (start > 0) { - // Eat whitespace towards the start of the line - if (contents[start-1].match(edible)) { - start--; - } else if (contents[start-1] === "\n") { - // If we make it to the beginning of the line, awesome! Let's try and - // expand the end too! - while (end < length - 1) { - // Eat whitespace towards the end of the line - if (contents[end+1].match(edible)) { - end++; - } else if (contents[end+1] === "\n") { - // If we make it to both the beginning and the end of the line, - // then we can totally remove a newline! - end++; - break; - } else { - // Otherwise we can't, undo the expansion - start = origStart; - end = origEnd; - break; - } - } - break; - } else { - // If we hit something else, then undo the start expansion - start = origStart; - break; - } - } - contents = contents.slice(0, start) + contents.slice(end+1); + // remove the comment and surrounding whitespace + let [start, end] = expandComment(contents, origStart, origEnd, context); + contents = contents.slice(0, start) + contents.slice(end); } return contents; } diff --git a/packages/flow-dev-tools/src/new-test/new-testRunner.js b/packages/flow-dev-tools/src/new-test/new-testRunner.js index d8c2be89ef2..aff5189a1c5 100644 --- a/packages/flow-dev-tools/src/new-test/new-testRunner.js +++ b/packages/flow-dev-tools/src/new-test/new-testRunner.js @@ -31,7 +31,6 @@ async function newTest(bin: string, suiteName: string): Promise { join(dest, 'test.js'), `/* * @flow - * @lint-ignore-every LINEWRAP1 */ diff --git a/packages/flow-dev-tools/src/test/RunQueue.js b/packages/flow-dev-tools/src/test/RunQueue.js index 20ba8662955..86e6a1f5410 100644 --- a/packages/flow-dev-tools/src/test/RunQueue.js +++ b/packages/flow-dev-tools/src/test/RunQueue.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import colors from 'colors/safe'; diff --git a/packages/flow-dev-tools/src/test/Suite.js b/packages/flow-dev-tools/src/test/Suite.js index 58d552a0991..05cd7ff3131 100644 --- a/packages/flow-dev-tools/src/test/Suite.js +++ b/packages/flow-dev-tools/src/test/Suite.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import type {Tests, Steps} from './Tester'; diff --git a/packages/flow-dev-tools/src/test/Test.js b/packages/flow-dev-tools/src/test/Test.js index 7ca5fe3f8da..5e1049904b1 100644 --- a/packages/flow-dev-tools/src/test/Test.js +++ b/packages/flow-dev-tools/src/test/Test.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {defaultFlowConfigName} from '../constants'; @@ -14,6 +13,7 @@ export default class Test { steps: StepList; tags: Array; lazyMode: 'ide' | 'fs' | null = null; + shouldWaitForRecheck: boolean = true; constructor(name: string, steps: StepList) { this.name = name; @@ -35,4 +35,9 @@ export default class Test { this.lazyMode = mode; return this; } + + waitForRecheck(wait_for_recheck: boolean): this { + this.shouldWaitForRecheck = wait_for_recheck; + return this; + } } diff --git a/packages/flow-dev-tools/src/test/TestStep.js b/packages/flow-dev-tools/src/test/TestStep.js index 9870d694d5a..74a6cc0972f 100644 --- a/packages/flow-dev-tools/src/test/TestStep.js +++ b/packages/flow-dev-tools/src/test/TestStep.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import Builder from './builder'; @@ -14,9 +13,9 @@ import sortedStdout from './assertions/sortedStdout'; import exitCodes from './assertions/exitCodes'; import serverRunning from './assertions/serverRunning'; import noop from './assertions/noop'; -import ideNoNewMessagesAfterSleep from './assertions/ideNoNewMessagesAfterSleep'; -import ideNewMessagesWithTimeout from './assertions/ideNewMessagesWithTimeout'; -import ideStderr from './assertions/ideStderr'; +import lspNoNewMessagesAfterSleep from './assertions/lspNoNewMessagesAfterSleep'; +import lspNewMessagesWithTimeout from './assertions/lspNewMessagesWithTimeout'; +import lspStderr from './assertions/lspStderr'; import simpleDiffAssertion from './assertions/simpleDiffAssertion'; import {sleep} from '../utils/async'; @@ -30,7 +29,7 @@ import type { import type {TestBuilder} from './builder'; import type {FlowResult} from '../flowResult'; import type {StepEnvReadable, StepEnvWriteable} from './stepEnv'; -import type {IDEMessage} from './ide'; +import type {LSPMessage} from './lsp'; type Action = ( builder: TestBuilder, @@ -76,6 +75,7 @@ export class TestStep { _startsIde: boolean; _readsIdeMessages: boolean; _allowServerToDie: boolean; + _timeout: ?number; constructor(step?: TestStep) { this._actions = step == null ? [] : step._actions.slice(); @@ -86,6 +86,7 @@ export class TestStep { this._startsIde = step == null ? false : step._startsIde; this._readsIdeMessages = step == null ? false : step._readsIdeMessages; this._allowServerToDie = step == null ? false : step._allowServerToDie; + this._timeout = step == null ? null : step._timeout; } async performActions( @@ -126,6 +127,10 @@ export class TestStep { allowFlowServerToDie(): boolean { return this._allowServerToDie; } + + getTimeout(): ?number { + return this._timeout; + } } class TestStepFirstOrSecondStage extends TestStep { @@ -169,20 +174,26 @@ class TestStepFirstOrSecondStage extends TestStep { } /* This is mainly useful for debugging. Actual tests probably shouldn't - * test the stderr output. But when you're working on `flow ide`, you can + * test the stderr output. But when you're working on `flow lsp`, you can * log things to stderr and use this assertion to see what's being logged * * addCode('foo') - * .ideNoNewMessagesAfterSleep(500) - * .ideStderr('Foo') + * .lspNoNewMessagesAfterSleep(500) + * .lspStderr('Foo') */ - ideStderr(expected: string): TestStepSecondStage { + lspStderr(expected: string): TestStepSecondStage { const assertLoc = searchStackForTestAssertion(); - const ret = this._cloneWithAssertion(ideStderr(expected, assertLoc)); + const ret = this._cloneWithAssertion(lspStderr(expected, assertLoc)); ret._needsFlowCheck = true; return ret; } + timeout(seconds: number): TestStepSecondStage { + const ret = new TestStepSecondStage(this); + ret._timeout = seconds; + return ret; + } + _cloneWithAssertion(assertion: ErrorAssertion) { const ret = new TestStepSecondStage(this); ret._assertions.push(assertion); @@ -253,29 +264,29 @@ export class TestStepFirstStage extends TestStepFirstOrSecondStage { return ret; }; - waitUntilIDEStatus: (number, 'stopped' | 'running') => TestStepFirstStage = ( + waitUntilLSPStatus: (number, 'stopped' | 'running') => TestStepFirstStage = ( timeoutMs, expected, ) => { const ret = this._cloneWithAction(async (builder, env) => { - await builder.waitUntilIDEStatus(timeoutMs, expected); + await builder.waitUntilLSPStatus(timeoutMs, expected); }); return ret; }; - verifyIDEStatus: ( + verifyLSPStatus: ( 'stopped' | 'running', ) => TestStepSecondStage = expected => { const assertLoc = searchStackForTestAssertion(); const ret = this._cloneWithAssertion((reason, env) => { - const actual = env.getIDERunning(); - const suggestion = {method: 'verifyIDEStatus', args: [actual]}; + const actual = env.getLSPRunning(); + const suggestion = {method: 'verifyLSPStatus', args: [actual]}; return simpleDiffAssertion( expected, actual, assertLoc, reason, - "'is IDE running?'", + "'is LSP running?'", suggestion, ); }); @@ -320,41 +331,38 @@ export class TestStepFirstStage extends TestStepFirstOrSecondStage { return ret; }; - ideStart: ( - {|mode: 'legacy'|} | {|mode: 'lsp', needsFlowServer: boolean|}, - ) => TestStepFirstStage = arg => { - const mode = arg.mode; - const needsFlowServer = arg.mode === 'legacy' ? true : arg.needsFlowServer; - const doFlowCheck = arg.mode === 'legacy' ? true : false; + lspStart: ({|needsFlowServer: boolean|}) => TestStepFirstStage = arg => { + const needsFlowServer = arg.needsFlowServer; const ret = this._cloneWithAction(async (builder, env) => { - await builder.createIDEConnection(mode); - if (doFlowCheck) { - env.triggerFlowCheck(); - } + await builder.createLSPConnection(); }); ret._startsIde = true; ret._needsFlowServer = needsFlowServer; // to start flow server before action is executed return ret; }; - ideStartAndConnect: (?number) => TestStepSecondStage = timeoutMsOpt => { + lspStartAndConnect: (?number, ?{}) => TestStepSecondStage = ( + timeoutMsOpt, + initParamsOpt, + ) => { const assertLoc = searchStackForTestAssertion(); const timeoutMs = timeoutMsOpt || 60000; + const initParams = initParamsOpt || this.lspInitializeParams; const expected = 'telemetry/connectionStatus{true}'; const ret = this._cloneWithAction(async (builder, env) => { - await builder.createIDEConnection('lsp'); - const promise = builder.sendIDERequestAndWaitForResponse('initialize', [ - this.lspInitializeParams, + await builder.createLSPConnection(); + const promise = builder.sendLSPRequestAndWaitForResponse('initialize', [ + initParams, ]); // discarding the promise; instead we wait in the next statement... - await builder.waitUntilIDEMessage(timeoutMs, expected); + await builder.waitUntilLSPMessage(timeoutMs, expected); })._cloneWithAssertion((reason, env) => { const isConnected = env - .getIDEMessagesSinceStartOfStep() + .getLSPMessagesSinceStartOfStep() .some(msg => Builder.doesMessageMatch(msg, expected)); const suggestion = { - method: 'ideStartAndConnect', + method: 'lspStartAndConnect', args: [timeoutMs * 2], }; return simpleDiffAssertion( @@ -362,7 +370,7 @@ export class TestStepFirstStage extends TestStepFirstOrSecondStage { isConnected ? 'connected' : 'disconnected' + - JSON.stringify(env.getIDEMessagesSinceStartOfStep()), + JSON.stringify(env.getLSPMessagesSinceStartOfStep()), assertLoc, reason, "'is connected to flow server?'", @@ -374,70 +382,70 @@ export class TestStepFirstStage extends TestStepFirstOrSecondStage { return ret; }; - ideStop: () => TestStepFirstStage = () => { + lspStop: () => TestStepFirstStage = () => { const ret = this._cloneWithAction(async (builder, env) => { - await builder.cleanupIDEConnection(); + await builder.cleanupLSPConnection(); }); ret._needsFlowServer = true; return ret; }; - ideNotification: (string, ...params: Array) => TestStepFirstStage = ( + lspNotification: (string, ...params: Array) => TestStepFirstStage = ( method, ...params ) => { const ret = this._cloneWithAction(async (builder, env) => - builder.sendIDENotification(method, params), + builder.sendLSPNotification(method, params), ); return ret; }; - ideResponse: ( + lspResponse: ( number | 'mostRecent', ...params: Array ) => TestStepFirstStage = (id, ...params) => { const ret = this._cloneWithAction(async (builder, env) => { - await builder.sendIDEResponse(id, params); + await builder.sendLSPResponse(id, params); }); return ret; }; - ideRequest: (string, ...params: Array) => TestStepFirstStage = ( + lspRequest: (string, ...params: Array) => TestStepFirstStage = ( method, ...params ) => { const ret = this._cloneWithAction(async (builder, env) => { - const promise = builder.sendIDERequestAndWaitForResponse(method, params); + const promise = builder.sendLSPRequestAndWaitForResponse(method, params); // We don't do anything with that promise; user will wait for messages later. // TODO(ljw): at end of step, verify that no promises are left outstanding }); return ret; }; - ideRequestAndWaitUntilResponse: ( + lspRequestAndWaitUntilResponse: ( string, ...params: Array ) => TestStepFirstStage = (method, ...params) => { const ret = this._cloneWithAction(async (builder, env) => { - await builder.sendIDERequestAndWaitForResponse(method, params); + await builder.sendLSPRequestAndWaitForResponse(method, params); }); ret._readsIdeMessages = true; return ret; }; - waitUntilIDEMessage: (number, string) => TestStepFirstStage = ( + waitUntilLSPMessage: (number, string) => TestStepFirstStage = ( timeoutMs, method, ) => { const ret = this._cloneWithAction( async (builder, env) => - await builder.waitUntilIDEMessage(timeoutMs, method), + await builder.waitUntilLSPMessage(timeoutMs, method), ); ret._readsIdeMessages = true; return ret; }; - // verifyAllIDEMessagesInStep(expects=['A','B{C,D}'], ignores=['B','E']) + // verifyAllLSPMessagesInStep(expects=['A','B{C,D}'], ignores=['B','E']) // will look at all the actual messages that arrived in this step. // In this case there must be an "A", followed by a "B" which has // the strings C and D in its JSON representation (up to whitespace). @@ -445,13 +453,13 @@ export class TestStepFirstStage extends TestStepFirstOrSecondStage { // ignores list - in this case we'd ignore any "B" (either because it came // in the wrong order or because it didn't have C and D), and ignore any "E". // But if there are unexpected messages not in the ignore list, then we fail. - verifyAllIDEMessagesInStep: ( + verifyAllLSPMessagesInStep: ( Array, Array, ) => TestStepSecondStage = (expects, ignores) => { const assertLoc = searchStackForTestAssertion(); const ret = this._cloneWithAssertion((reason, env) => { - const actualMessages = env.getIDEMessagesSinceStartOfStep(); + const actualMessages = env.getLSPMessagesSinceStartOfStep(); let actuals: Array = []; let iExpect = 0; for (let iActual = 0; iActual < actualMessages.length; iActual++) { @@ -473,7 +481,7 @@ export class TestStepFirstStage extends TestStepFirstOrSecondStage { } const suggestion = { - method: 'verifyAllIDEMessagesInStep', + method: 'verifyAllLSPMessagesInStep', args: [actuals, ignores], }; return simpleDiffAssertion( @@ -489,14 +497,14 @@ export class TestStepFirstStage extends TestStepFirstOrSecondStage { return ret; }; - // waitAndVerifyNoIDEMessagesSinceStartOfStep: if any messages arrive since the start + // waitAndVerifyNoLSPMessagesSinceStartOfStep: if any messages arrive since the start // of this step until the timeout then it fails; otherwise it succeeds - waitAndVerifyNoIDEMessagesSinceStartOfStep: number => TestStepSecondStage = timeoutMs => { + waitAndVerifyNoLSPMessagesSinceStartOfStep: number => TestStepSecondStage = timeoutMs => { const assertLoc = searchStackForTestAssertion(); const ret = this._cloneWithAction(async (builder, env) => { await sleep(timeoutMs); - })._cloneWithAssertion(ideNoNewMessagesAfterSleep(timeoutMs, assertLoc)); + })._cloneWithAssertion(lspNoNewMessagesAfterSleep(timeoutMs, assertLoc)); ret._readsIdeMessages = true; return ret; }; @@ -506,22 +514,22 @@ export class TestStepFirstStage extends TestStepFirstOrSecondStage { await sleep(timeoutMs); }); - // waitAndVerifyAllIDEMessagesContentSinceStartOfStep: will consider all messages that + // waitAndVerifyAllLSPMessagesContentSinceStartOfStep: will consider all messages that // have arrived since the start of the step, and will consider further // messages that arrive up to the expected message count until the timeout. // (This set of messages to consider may therefore be larger than, same // size, or smaller than the expected count). If the messages to consider // are identical to the expected messages, then it succeeds. - waitAndVerifyAllIDEMessagesContentSinceStartOfStep: ( + waitAndVerifyAllLSPMessagesContentSinceStartOfStep: ( number, - $ReadOnlyArray, + $ReadOnlyArray, ) => TestStepSecondStage = (timeoutMs, expected) => { const assertLoc = searchStackForTestAssertion(); const ret = this._cloneWithAction(async (builder, env) => { - await builder.waitUntilIDEMessageCount(timeoutMs, expected.length); + await builder.waitUntilLSPMessageCount(timeoutMs, expected.length); })._cloneWithAssertion( - ideNewMessagesWithTimeout(timeoutMs, expected, assertLoc), + lspNewMessagesWithTimeout(timeoutMs, expected, assertLoc), ); ret._readsIdeMessages = true; return ret; diff --git a/packages/flow-dev-tools/src/test/Tester.js b/packages/flow-dev-tools/src/test/Tester.js index a4c7858c156..7bb625a72b7 100644 --- a/packages/flow-dev-tools/src/test/Tester.js +++ b/packages/flow-dev-tools/src/test/Tester.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {format} from 'util'; diff --git a/packages/flow-dev-tools/src/test/assertions/assertionTypes.js b/packages/flow-dev-tools/src/test/assertions/assertionTypes.js index cd2e84a7b0a..316d2e5ba90 100644 --- a/packages/flow-dev-tools/src/test/assertions/assertionTypes.js +++ b/packages/flow-dev-tools/src/test/assertions/assertionTypes.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import type {StepEnvReadable} from '../stepEnv'; @@ -19,13 +18,13 @@ type AssertionMethod = | 'stderr' | 'sortedStdout' | 'exitCodes' - | 'ideStartAndConnect' + | 'lspStartAndConnect' | 'verifyServerStatus' - | 'verifyIDEStatus' - | 'verifyAllIDEMessagesInStep' - | 'waitAndVerifyAllIDEMessagesContentSinceStartOfStep' - | 'waitAndVerifyNoIDEMessagesSinceStartOfStep' - | 'ideStderr'; + | 'verifyLSPStatus' + | 'verifyAllLSPMessagesInStep' + | 'waitAndVerifyAllLSPMessagesContentSinceStartOfStep' + | 'waitAndVerifyNoLSPMessagesSinceStartOfStep' + | 'lspStderr'; export type Suggestion = { method: AssertionMethod, diff --git a/packages/flow-dev-tools/src/test/assertions/exitCodes.js b/packages/flow-dev-tools/src/test/assertions/exitCodes.js index c4bbaa2e28b..b9b405302a8 100644 --- a/packages/flow-dev-tools/src/test/assertions/exitCodes.js +++ b/packages/flow-dev-tools/src/test/assertions/exitCodes.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import simpleDiffAssertion from './simpleDiffAssertion'; diff --git a/packages/flow-dev-tools/src/test/assertions/ideNewMessagesWithTimeout.js b/packages/flow-dev-tools/src/test/assertions/ideNewMessagesWithTimeout.js deleted file mode 100644 index 88fb073e269..00000000000 --- a/packages/flow-dev-tools/src/test/assertions/ideNewMessagesWithTimeout.js +++ /dev/null @@ -1,39 +0,0 @@ -/** - * @flow - * @format - * @lint-ignore-every LINEWRAP1 - */ - -import simpleDiffAssertion from './simpleDiffAssertion'; - -import type {AssertionLocation, ErrorAssertion} from './assertionTypes'; -import type {IDEMessage} from '../ide'; - -export default function( - timeoutMs: number, - expected: $ReadOnlyArray, - assertLoc: ?AssertionLocation, -): ErrorAssertion { - return (reason: ?string, env) => { - const actual = env.getIDEMessagesSinceStartOfStep(); - - let suggestion = { - method: 'waitAndVerifyNoIDEMessagesSinceStartOfStep', - args: [Math.round(timeoutMs / 10)], - }; - if (actual.length > 0) { - suggestion = { - method: 'waitAndVerifyAllIDEMessagesContentSinceStartOfStep', - args: [timeoutMs, actual], - }; - } - return simpleDiffAssertion( - JSON.stringify(expected, null, 2), - JSON.stringify(actual, null, 2), - assertLoc, - reason, - 'new ide messages', - suggestion, - ); - }; -} diff --git a/packages/flow-dev-tools/src/test/assertions/ideNoNewMessagesAfterSleep.js b/packages/flow-dev-tools/src/test/assertions/ideNoNewMessagesAfterSleep.js deleted file mode 100644 index 1cba060ec26..00000000000 --- a/packages/flow-dev-tools/src/test/assertions/ideNoNewMessagesAfterSleep.js +++ /dev/null @@ -1,61 +0,0 @@ -/** - * @flow - * @format - * @lint-ignore-every LINEWRAP1 - */ - -import {format} from 'util'; - -import colors from 'colors/safe'; - -import type { - AssertionLocation, - ErrorAssertion, - ErrorAssertionResult, -} from './assertionTypes'; - -export default function( - timeoutMs: number, - assertLoc: ?AssertionLocation, -): ErrorAssertion { - return (reason: ?string, env): ErrorAssertionResult => { - const actual = env.getIDEMessagesSinceStartOfStep(); - if (actual.length > 0) { - const locMessage = - assertLoc == null - ? [] - : [ - format( - colors.white('%s line %d col %d'), - assertLoc.filename, - assertLoc.line, - assertLoc.column, - ), - ]; - const keyMessage = [ - colors.green('Actual IDE messages (+)') + - colors.grey(" didn't match expected no new IDE messages"), - ]; - const errorMessages = JSON.stringify(actual, null, 2) - .split('\n') - .map(line => colors.green('+ ' + line)); - const reasonMessage = - reason == null - ? [] - : [format(colors.grey('Reason: ') + colors.red('%s'), reason)]; - const messages = [].concat( - locMessage, - reasonMessage, - keyMessage, - errorMessages, - ); - const suggestion = { - method: 'waitAndVerifyAllIDEMessagesContentSinceStartOfStep', - args: [timeoutMs * 10, actual], - }; - - return {type: 'fail', messages, assertLoc, suggestion}; - } - return {type: 'pass'}; - }; -} diff --git a/packages/flow-dev-tools/src/test/assertions/ideStderr.js b/packages/flow-dev-tools/src/test/assertions/ideStderr.js deleted file mode 100644 index 56eba322f82..00000000000 --- a/packages/flow-dev-tools/src/test/assertions/ideStderr.js +++ /dev/null @@ -1,36 +0,0 @@ -/** - * @flow - * @format - * @lint-ignore-every LINEWRAP1 - */ - -import simpleDiffAssertion from './simpleDiffAssertion'; - -import type {AssertionLocation, ErrorAssertion} from './assertionTypes'; - -function formatIfJSON(actual: string) { - try { - return JSON.stringify(JSON.parse(actual), null, 2); - } catch (e) { - return actual; - } -} - -export default function( - expected: string, - assertLoc: ?AssertionLocation, -): ErrorAssertion { - return (reason: ?string, env) => { - const actual = formatIfJSON(env.getIDEStderrSinceStartOfStep()); - expected = formatIfJSON(expected); - const suggestion = {method: 'ideStderr', args: [formatIfJSON(actual)]}; - return simpleDiffAssertion( - expected, - actual, - assertLoc, - reason, - 'ideStderr', - suggestion, - ); - }; -} diff --git a/packages/flow-dev-tools/src/test/assertions/lspNewMessagesWithTimeout.js b/packages/flow-dev-tools/src/test/assertions/lspNewMessagesWithTimeout.js new file mode 100644 index 00000000000..6736a11c95e --- /dev/null +++ b/packages/flow-dev-tools/src/test/assertions/lspNewMessagesWithTimeout.js @@ -0,0 +1,38 @@ +/** + * @flow + * @format + */ + +import simpleDiffAssertion from './simpleDiffAssertion'; + +import type {AssertionLocation, ErrorAssertion} from './assertionTypes'; +import type {LSPMessage} from '../lsp'; + +export default function( + timeoutMs: number, + expected: $ReadOnlyArray, + assertLoc: ?AssertionLocation, +): ErrorAssertion { + return (reason: ?string, env) => { + const actual = env.getLSPMessagesSinceStartOfStep(); + + let suggestion = { + method: 'waitAndVerifyNoLSPMessagesSinceStartOfStep', + args: [Math.round(timeoutMs / 10)], + }; + if (actual.length > 0) { + suggestion = { + method: 'waitAndVerifyAllLSPMessagesContentSinceStartOfStep', + args: [timeoutMs, actual], + }; + } + return simpleDiffAssertion( + JSON.stringify(expected, null, 2), + JSON.stringify(actual, null, 2), + assertLoc, + reason, + 'new lsp messages', + suggestion, + ); + }; +} diff --git a/packages/flow-dev-tools/src/test/assertions/lspNoNewMessagesAfterSleep.js b/packages/flow-dev-tools/src/test/assertions/lspNoNewMessagesAfterSleep.js new file mode 100644 index 00000000000..d35705fbadc --- /dev/null +++ b/packages/flow-dev-tools/src/test/assertions/lspNoNewMessagesAfterSleep.js @@ -0,0 +1,60 @@ +/** + * @flow + * @format + */ + +import {format} from 'util'; + +import colors from 'colors/safe'; + +import type { + AssertionLocation, + ErrorAssertion, + ErrorAssertionResult, +} from './assertionTypes'; + +export default function( + timeoutMs: number, + assertLoc: ?AssertionLocation, +): ErrorAssertion { + return (reason: ?string, env): ErrorAssertionResult => { + const actual = env.getLSPMessagesSinceStartOfStep(); + if (actual.length > 0) { + const locMessage = + assertLoc == null + ? [] + : [ + format( + colors.white('%s line %d col %d'), + assertLoc.filename, + assertLoc.line, + assertLoc.column, + ), + ]; + const keyMessage = [ + colors.green('Actual LSP messages (+)') + + colors.grey(" didn't match expected no new LSP messages"), + ]; + const errorMessages = JSON.stringify(actual, null, 2) + .split('\n') + .map(line => colors.green('+ ' + line)); + const reasonMessage = + reason == null + ? [] + : [format(colors.grey('Reason: ') + colors.red('%s'), reason)]; + const messages = [].concat( + locMessage, + reasonMessage, + keyMessage, + errorMessages, + ); + const suggestion = { + method: 'waitAndVerifyAllLSPMessagesContentSinceStartOfStep', + args: [timeoutMs * 10, actual], + }; + + return {type: 'fail', messages, assertLoc, suggestion}; + } + return {type: 'pass'}; + }; +} diff --git a/packages/flow-dev-tools/src/test/assertions/lspStderr.js b/packages/flow-dev-tools/src/test/assertions/lspStderr.js new file mode 100644 index 00000000000..4c89dab7298 --- /dev/null +++ b/packages/flow-dev-tools/src/test/assertions/lspStderr.js @@ -0,0 +1,35 @@ +/** + * @flow + * @format + */ + +import simpleDiffAssertion from './simpleDiffAssertion'; + +import type {AssertionLocation, ErrorAssertion} from './assertionTypes'; + +function formatIfJSON(actual: string) { + try { + return JSON.stringify(JSON.parse(actual), null, 2); + } catch (e) { + return actual; + } +} + +export default function( + expected: string, + assertLoc: ?AssertionLocation, +): ErrorAssertion { + return (reason: ?string, env) => { + const actual = formatIfJSON(env.getLSPStderrSinceStartOfStep()); + expected = formatIfJSON(expected); + const suggestion = {method: 'lspStderr', args: [formatIfJSON(actual)]}; + return simpleDiffAssertion( + expected, + actual, + assertLoc, + reason, + 'lspStderr', + suggestion, + ); + }; +} diff --git a/packages/flow-dev-tools/src/test/assertions/newErrors.js b/packages/flow-dev-tools/src/test/assertions/newErrors.js index 3cb0b315802..b362b4100a2 100644 --- a/packages/flow-dev-tools/src/test/assertions/newErrors.js +++ b/packages/flow-dev-tools/src/test/assertions/newErrors.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {difference, prettyPrint} from '../../flowResult'; diff --git a/packages/flow-dev-tools/src/test/assertions/noNewErrors.js b/packages/flow-dev-tools/src/test/assertions/noNewErrors.js index 66b255921f4..ae054801070 100644 --- a/packages/flow-dev-tools/src/test/assertions/noNewErrors.js +++ b/packages/flow-dev-tools/src/test/assertions/noNewErrors.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {format} from 'util'; diff --git a/packages/flow-dev-tools/src/test/assertions/noop.js b/packages/flow-dev-tools/src/test/assertions/noop.js index 7e68dea1694..3f51744d93c 100644 --- a/packages/flow-dev-tools/src/test/assertions/noop.js +++ b/packages/flow-dev-tools/src/test/assertions/noop.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import type {ErrorAssertionResult} from './assertionTypes'; diff --git a/packages/flow-dev-tools/src/test/assertions/serverRunning.js b/packages/flow-dev-tools/src/test/assertions/serverRunning.js index 65f04290668..3c6c0d36768 100644 --- a/packages/flow-dev-tools/src/test/assertions/serverRunning.js +++ b/packages/flow-dev-tools/src/test/assertions/serverRunning.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import simpleDiffAssertion from './simpleDiffAssertion'; diff --git a/packages/flow-dev-tools/src/test/assertions/simpleDiffAssertion.js b/packages/flow-dev-tools/src/test/assertions/simpleDiffAssertion.js index d594e25c9f4..d1659a7ddff 100644 --- a/packages/flow-dev-tools/src/test/assertions/simpleDiffAssertion.js +++ b/packages/flow-dev-tools/src/test/assertions/simpleDiffAssertion.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {format} from 'util'; diff --git a/packages/flow-dev-tools/src/test/assertions/sortedStdout.js b/packages/flow-dev-tools/src/test/assertions/sortedStdout.js index e0a0f3cf16e..dc2555fc044 100644 --- a/packages/flow-dev-tools/src/test/assertions/sortedStdout.js +++ b/packages/flow-dev-tools/src/test/assertions/sortedStdout.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import simpleDiffAssertion from './simpleDiffAssertion'; diff --git a/packages/flow-dev-tools/src/test/assertions/stderr.js b/packages/flow-dev-tools/src/test/assertions/stderr.js index 490da67e75c..9bf333799d1 100644 --- a/packages/flow-dev-tools/src/test/assertions/stderr.js +++ b/packages/flow-dev-tools/src/test/assertions/stderr.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import simpleDiffAssertion from './simpleDiffAssertion'; diff --git a/packages/flow-dev-tools/src/test/assertions/stdout.js b/packages/flow-dev-tools/src/test/assertions/stdout.js index 12c1585595b..508952d85b1 100644 --- a/packages/flow-dev-tools/src/test/assertions/stdout.js +++ b/packages/flow-dev-tools/src/test/assertions/stdout.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import simpleDiffAssertion from './simpleDiffAssertion'; diff --git a/packages/flow-dev-tools/src/test/builder.js b/packages/flow-dev-tools/src/test/builder.js index 63778905e2e..b2942570b9a 100644 --- a/packages/flow-dev-tools/src/test/builder.js +++ b/packages/flow-dev-tools/src/test/builder.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {execSync, spawn} from 'child_process'; @@ -14,7 +13,7 @@ import EventEmitter from 'events'; import * as rpc from 'vscode-jsonrpc'; -import type {IDEMessage, RpcConnection} from './ide'; +import type {LSPMessage, RpcConnection} from './lsp'; import { appendFile, @@ -46,8 +45,7 @@ export class TestBuilder { flowConfigFilename: string; lazyMode: 'ide' | 'fs' | null; server: null | child_process$ChildProcess = null; - ide: null | { - mode: 'legacy' | 'lsp', + lsp: null | { connection: RpcConnection, process: child_process$ChildProcess, outstandingRequestsFromServer: Map< @@ -58,8 +56,8 @@ export class TestBuilder { stderr: Array, messageEmitter: EventEmitter, } = null; - ideMessages: Array; // this should outlive the death of the ide+server in a step - ideEmitter: EventEmitter; + lspMessages: Array; // this should outlive the death of the lsp+server in a step + lspEmitter: EventEmitter; serverEmitter: EventEmitter; sourceDir: string; suiteName: string; @@ -67,6 +65,7 @@ export class TestBuilder { testErrors = []; allowFlowServerToDie = false; logStream: stream$Writable | null; + waitForRecheck: boolean; constructor( bin: string, @@ -76,20 +75,22 @@ export class TestBuilder { testNum: number, flowConfigFilename: string, lazyMode: 'ide' | 'fs' | null, + wait_for_recheck: boolean, ) { this.bin = bin; // If we're testing lazy mode, then we must use status this.errorCheckCommand = lazyMode == null ? errorCheckCommand : 'status'; this.suiteName = suiteName; - this.ideEmitter = new EventEmitter(); + this.lspEmitter = new EventEmitter(); this.serverEmitter = new EventEmitter(); this.dir = join(baseDir, String(testNum)); this.sourceDir = join(getTestsDir(), suiteName); this.tmpDir = join(baseDir, 'tmp', String(testNum)); this.flowConfigFilename = flowConfigFilename; this.lazyMode = lazyMode; - this.ide = null; - this.ideMessages = []; + this.lsp = null; + this.lspMessages = []; + this.waitForRecheck = wait_for_recheck; } getFileName(): string { @@ -118,7 +119,7 @@ export class TestBuilder { format(fmt, ...args), ); return new Promise((resolve, reject) => { - logStream.write(msg, 'utf8', resolve); + logStream.write(msg, 'utf8', err => (err ? reject(err) : resolve())); }); } } @@ -337,6 +338,8 @@ export class TestBuilder { '--no-auto-restart', '--file-watcher', 'none', + '--wait-for-recheck', + String(this.waitForRecheck), ] .concat(lazyMode) .concat([this.dir]); @@ -399,40 +402,21 @@ export class TestBuilder { } } - async createIDEConnection(mode: 'legacy' | 'lsp'): Promise { - if (this.ide != null) { - // No-op if the server is already running - if (this.ide.mode == mode) { - return; - } else { - throw new Error(`IDE process already running as ${this.ide.mode}.`); - } + async createLSPConnection(): Promise { + if (this.lsp != null) { + // No-op if the lsp server is already running } - const primaryArg = mode === 'lsp' ? 'lsp' : 'ide'; - const args = - mode === 'lsp' - ? ['lsp', '--autostop', '--lazy-mode', 'ide'] - : [ - 'ide', - '--protocol', - 'very-unstable', - '--no-auto-start', - '--strip-root', - '--temp-dir', - this.tmpDir, - '--root', - this.dir, - ]; - const ideProcess = spawn(this.bin, args, { - // Useful for debugging flow ide + const args = ['lsp', '--autostop', '--lazy-mode', 'ide']; + const lspProcess = spawn(this.bin, args, { + // Useful for debugging flow lsp // stdio: ["pipe", "pipe", process.stderr], cwd: this.dir, env: {...process.env, OCAMLRUNPARAM: 'b'}, }); const connection = rpc.createMessageConnection( - new rpc.StreamMessageReader(ideProcess.stdout), - new rpc.StreamMessageWriter(ideProcess.stdin), + new rpc.StreamMessageReader(lspProcess.stdout), + new rpc.StreamMessageWriter(lspProcess.stdin), ); connection.listen(); @@ -440,25 +424,14 @@ export class TestBuilder { // the 'close' event is fired when its stdio streams have been closed. // The streams might be closed before exit if the process manually // closes them, or after if it leaves that to the system. - ideProcess.on('exit', (code, signal) => { - if (this.ide != null && this.ide.mode == 'legacy') { - this.testErrors.push( - format( - 'flow %s mysteriously died. Code: %d, Signal: %s, stderr:\n%s', - primaryArg, - code, - signal, - this.getIDEStderrSinceStartOfStep(), - ), - ); - } - this.cleanupIDEConnection(); + lspProcess.on('exit', (code, signal) => { + this.cleanupLSPConnection(); }); - ideProcess.on('close', () => this.cleanupIDEConnection()); + lspProcess.on('close', () => this.cleanupLSPConnection()); const messageEmitter = new EventEmitter(); - const messages: Array = []; + const messages: Array = []; const outstandingRequestsInfo = { nextId: 1, mostRecent: (null: ?number), @@ -477,9 +450,9 @@ export class TestBuilder { const cancellationToken = ((rawParams.pop(): any): CancellationToken); // We'll add our own {id: ...} to the array of params, so it's present // in our messages[] array, so that people can match on it. - const params = [{id}, ...this.sanitizeIncomingIDEMessage(rawParams)]; + const params = [{id}, ...this.sanitizeIncomingLSPMessage(rawParams)]; messages.push({method, params}); - this.log('IDE < { @@ -489,7 +462,7 @@ export class TestBuilder { const synthesizedParams = [{id}]; messages.push({method: '$/cancelRequest', params: synthesizedParams}); this.log( - 'IDE <) => { - const params = this.sanitizeIncomingIDEMessage(rawParams); + const params = this.sanitizeIncomingLSPMessage(rawParams); messages.push({method, params}); - this.log('IDE < { + lspProcess.stderr.on('data', data => { stderr.push(data.toString()); }); - await this.log('Created IDE process with pid %d', ideProcess.pid); + await this.log('Created LSP process with pid %d', lspProcess.pid); - // Execing a process can take some time. Let's wait for the ide process - // to be up and connected to the server. - const log = this.log.bind(this); - await new Promise((resolve, reject) => { - if (mode === 'lsp') { - resolve(); - return; - } - const timeout = setTimeout(onTimeout, 20000); // Max 20 seconds - function cleanup(then) { - ideProcess.stderr.removeListener('data', onData); - ideProcess.removeListener('exit', onExit); - clearTimeout(timeout); - then(); - } - function onData(data) { - stderr.join('').match(/Connected to server/) && cleanup(resolve); - } - function onExit() { - cleanup(resolve); - } - function onTimeout() { - log('flow ide start up timed out. stderr:\n%s', stderr.join('')).then( - () => { - cleanup(() => { - reject(new Error('Timed out waiting for flow ide to start up')); - }); - }, - ); - } - ideProcess.stderr.on('data', onData); - ideProcess.on('exit', onExit); - }); - - this.ideMessages = messages; - this.ide = { - mode, - process: ideProcess, + this.lspMessages = messages; + this.lsp = { + process: lspProcess, connection, outstandingRequestsFromServer, outstandingRequestsInfo, stderr, messageEmitter, }; - this.ideEmitter.emit('ide'); + this.lspEmitter.emit('lsp'); } - cleanupIDEConnection(): void { - const ide = this.ide; - if (ide != null) { - this.ide = null; - ide.process.stdin.end(); - ide.process.kill(); - ide.connection.dispose(); - this.ideEmitter.emit('ide'); - // but leave ideMessages so it can be examined even after IDE has gone + cleanupLSPConnection(): void { + const lsp = this.lsp; + if (lsp != null) { + this.lsp = null; + lsp.process.stdin.end(); + lsp.process.kill(); + lsp.connection.dispose(); + this.lspEmitter.emit('lsp'); + // but leave lspMessages so it can be examined even after LSP has gone } } @@ -584,10 +522,10 @@ export class TestBuilder { } } - // sanitizeIncomingIDEMessage: removes a few known fields from server output + // sanitizeIncomingLSPMessage: removes a few known fields from server output // that are known to be specific to an instance of a test, and replaces // them with something fixed. - sanitizeIncomingIDEMessage(params: any): any { + sanitizeIncomingLSPMessage(params: any): any { const params2 = JSON.parse(JSON.stringify(params)); // Legacy IDE sends back an array of objects where those objects have @@ -596,7 +534,29 @@ export class TestBuilder { const url = this.getDirUrl(); const urlslash = url + dir_sep; function replace(obj: Object) { - for (const k in obj) { + function do_url_replace(str: string): string { + if (str.startsWith(urlslash)) { + return ( + '' + str.substr(urlslash.length) + ); + } else if (str.startsWith(url)) { + return '' + str.substr(url.length); + } else { + return str; + } + } + for (var k in obj) { + // workspace edits contain urls in the keys of a dictionary + if (typeof k == 'string') { + let new_k = do_url_replace(k); + + if (k != new_k) { + obj[new_k] = obj[k]; + delete obj[k]; + k = new_k; + } + } + if (!obj.hasOwnProperty(k)) { continue; } @@ -610,12 +570,8 @@ export class TestBuilder { case 'string': if (k == 'flowVersion') { obj[k] = ''; - } else if (obj[k].startsWith(urlslash)) { - obj[k] = - '' + - obj[k].substr(urlslash.length); - } else if (obj[k].startsWith(url)) { - obj[k] = '' + obj[k].substr(url.length); + } else { + obj[k] = do_url_replace(obj[k]); } break; } @@ -626,9 +582,9 @@ export class TestBuilder { return params2; } - // sanitizeOutoingIDEMessage: replaces some placeholders with values + // sanitizeOutgoingLSPMessage: replaces some placeholders with values // that can only be computed by the builder instance - sanitizeOutgoingIDEMessage(params: Array): Array { + sanitizeOutgoingLSPMessage(params: Array): Array { const params2: any = JSON.parse(JSON.stringify(params)); const dir = this.dir; @@ -659,83 +615,83 @@ export class TestBuilder { return params2; } - async sendIDENotification( + async sendLSPNotification( method: string, argsRaw: Array, ): Promise { - const ide = this.ide; - if (ide == null) { - throw new Error('No IDE process running! Cannot sendIDENotification'); + const lsp = this.lsp; + if (lsp == null) { + throw new Error('No LSP process running! Cannot sendLSPNotification'); } - const args = this.sanitizeOutgoingIDEMessage(argsRaw); - await this.log('IDE >>notification %s\n%s', method, JSON.stringify(args)); - ide.connection.sendNotification(method, ...args); + const args = this.sanitizeOutgoingLSPMessage(argsRaw); + await this.log('LSP >>notification %s\n%s', method, JSON.stringify(args)); + lsp.connection.sendNotification(method, ...args); } - async sendIDEResponse( + async sendLSPResponse( id: number | 'mostRecent', argsRaw: Array, ): Promise { - const ide = this.ide; - if (ide == null) { - throw new Error('No IDE process running! Cannot sendIDEResponse'); + const lsp = this.lsp; + if (lsp == null) { + throw new Error('No LSP process running! Cannot sendLSPResponse'); } if (id === 'mostRecent') { - id = ide.outstandingRequestsInfo.mostRecent || 0; + id = lsp.outstandingRequestsInfo.mostRecent || 0; } - const callbacks = ide.outstandingRequestsFromServer.get(id); + const callbacks = lsp.outstandingRequestsFromServer.get(id); if (callbacks == null) { throw new Error(`No request id ${id} has arrived`); } - ide.outstandingRequestsFromServer.delete(id); + lsp.outstandingRequestsFromServer.delete(id); if (argsRaw.length == 1 && argsRaw[0] instanceof Error) { const e = (argsRaw[0]: Error); - await this.log('IDE >>response "id":%d\n%s', id, JSON.stringify(e)); + await this.log('LSP >>response "id":%d\n%s', id, JSON.stringify(e)); callbacks.reject(e); } else { - const args = this.sanitizeOutgoingIDEMessage(argsRaw); - await this.log('IDE >>response "id":%d\n%s', id, JSON.stringify(args)); + const args = this.sanitizeOutgoingLSPMessage(argsRaw); + await this.log('LSP >>response "id":%d\n%s', id, JSON.stringify(args)); callbacks.resolve(...args); } } - // This sends an IDE request and, when the response comes back, adds + // This sends an LSP request and, when the response comes back, adds // the response to the message queue. It doesn't fulfil its returned promise // until that time. - async sendIDERequestAndWaitForResponse( + async sendLSPRequestAndWaitForResponse( method: string, argsRaw: Array, ): Promise { - const ide = this.ide; - const ideMessages = this.ideMessages; - if (ide == null) { + const lsp = this.lsp; + const lspMessages = this.lspMessages; + if (lsp == null) { throw new Error( - 'No ide process running! Cannot sendIDERequestAndWaitForResponse', + 'No lsp process running! Cannot sendLSPRequestAndWaitForResponse', ); } - const args = this.sanitizeOutgoingIDEMessage(argsRaw); - await this.log('IDE >>request %s\n%s', method, JSON.stringify(args)); + const args = this.sanitizeOutgoingLSPMessage(argsRaw); + await this.log('LSP >>request %s\n%s', method, JSON.stringify(args)); let resultRaw; try { - resultRaw = await ide.connection.sendRequest(method, ...args); + resultRaw = await lsp.connection.sendRequest(method, ...args); } catch (error) { const message = error.message; error = {message, ...error}; // otherwise it doesn't show up in JSON.stringify - ideMessages.push({method, error}); - await this.log('IDE < { - const ide = this.ide; - const ideMessages = this.ideMessages; + waitUntilLSPMessage(timeoutMs: number, expected: string): Promise { + const lsp = this.lsp; + const lspMessages = this.lspMessages; return new Promise(resolve => { var timeout = null; @@ -743,8 +699,8 @@ export class TestBuilder { var alreadyDone = false; const startTime = new Date().getTime(); - if (ide == null) { - this.log('No IDE process running! Cannot waitUntilIDEMessage'); + if (lsp == null) { + this.log('No LSP process running! Cannot waitUntilLSPMessage'); resolve(); return; } @@ -755,20 +711,21 @@ export class TestBuilder { } alreadyDone = true; const duration = new Date().getTime() - startTime; - emitter && emitter.removeListener('message', checkLastMessage); + emitter && emitter.removeListener('message', checkMessages); timeout && clearTimeout(timeout); await this.log('%s message %s in %dms', verb, expected, duration); resolve(); }; - const checkMessage = message => { - if (Builder.doesMessageMatch(message, expected)) { - doneWithVerb('Got'); + let nextMessageIndex = 0; + const checkMessages = () => { + for (; nextMessageIndex < lspMessages.length; nextMessageIndex++) { + const message = lspMessages[nextMessageIndex]; + if (Builder.doesMessageMatch(message, expected)) { + doneWithVerb('Got'); + } } }; - const checkLastMessage = () => { - checkMessage(ideMessages.slice(-1)[0]); - }; // It's unavoidably racey whether the log message gets printed // before or after we get the right message @@ -777,31 +734,31 @@ export class TestBuilder { // Our backlog of messages gets cleared out at the start of each step. // If we've already received some messages since the start of the step, // let's account for them - ideMessages.forEach(checkMessage); + checkMessages(); // And account for all further messages that arrive - emitter = ide.messageEmitter.on('message', checkLastMessage); + emitter = lsp.messageEmitter.on('message', checkMessages); // ... until our stopping condition timeout = setTimeout(() => doneWithVerb('Failed to get'), timeoutMs); }); } - // waitUntilIDEMessageCount is a confusing method. It looks at all messages + // waitUntilLSPMessageCount is a confusing method. It looks at all messages // that have arrived in this step so far, plus all messages that arrive // until the timeout, including both notifications and responses. It stops // as soon as either the specified number of messages or until the timeout // happens, whichever comes first. - waitUntilIDEMessageCount( + waitUntilLSPMessageCount( timeoutMs: number, expectedCount: number, ): Promise { - const ide = this.ide; - const ideMessages = this.ideMessages; + const lsp = this.lsp; + const lspMessages = this.lspMessages; return new Promise(resolve => { - if (ide == null) { - this.log('No IDE process running! Cannot waitUntilIDEMessageCount'); + if (lsp == null) { + this.log('No LSP process running! Cannot waitUntilLSPMessageCount'); resolve(); return; } @@ -818,8 +775,8 @@ export class TestBuilder { }; var timeout = null; const done = ok => { - this.ide && - this.ide.messageEmitter.removeListener('message', onMessage); + this.lsp && + this.lsp.messageEmitter.removeListener('message', onMessage); timeout && clearTimeout(timeout); this.log( '%s all %d messages in under %dms', @@ -832,7 +789,7 @@ export class TestBuilder { // Our backlog of messages gets cleared out at the start of each step. // If we've already received some messages since the start of the step, // let's account for them: - ideMessages.forEach(onMessage); + lspMessages.forEach(onMessage); // And account for all further messages that arrive, until our stopping // condition: @@ -845,27 +802,27 @@ export class TestBuilder { timeout = setTimeout(onTimeout, timeoutMs); }); - ide.messageEmitter.on('message', onMessage); + lsp.messageEmitter.on('message', onMessage); }); } - getIDEMessagesSinceStartOfStep(): Array { - return this.ideMessages; + getLSPMessagesSinceStartOfStep(): Array { + return this.lspMessages; } - getIDEStderrSinceStartOfStep(): string { - return this.ide ? this.ide.stderr.join('') : ''; + getLSPStderrSinceStartOfStep(): string { + return this.lsp ? this.lsp.stderr.join('') : ''; } - clearIDEMessages(): void { - this.ideMessages.splice(0, this.ideMessages.length); + clearLSPMessages(): void { + this.lspMessages.splice(0, this.lspMessages.length); } - clearIDEStderr(): void { - this.ide && this.ide.stderr.splice(0, this.ide.stderr.length); + clearLSPStderr(): void { + this.lsp && this.lsp.stderr.splice(0, this.lsp.stderr.length); } - waitUntilIDEStatus( + waitUntilLSPStatus( timeoutMs: number, expected: 'stopped' | 'running', ): Promise { @@ -882,15 +839,15 @@ export class TestBuilder { alreadyDone = true; const duration = new Date().getTime() - startTime; timeout && clearTimeout(timeout); - emitter && emitter.removeListener('ide', checkStatus); - await this.log('%s IDE %s status in %dms', verb, expected, duration); + emitter && emitter.removeListener('lsp', checkStatus); + await this.log('%s LSP %s status in %dms', verb, expected, duration); resolve(); }; const checkStatus = () => { if ( - (expected === 'running' && this.ide != null) || - (expected === 'stopped' && this.ide == null) + (expected === 'running' && this.lsp != null) || + (expected === 'stopped' && this.lsp == null) ) { doneWithVerb('Got'); } @@ -898,13 +855,13 @@ export class TestBuilder { // It's unavoidably racey whether the async logger does its work before // or after we get the first successfull checkStatus - this.log('Waiting up to %dms for %s IDE status', timeoutMs, expected); + this.log('Waiting up to %dms for %s LSP status', timeoutMs, expected); // Test whether we're okay already? checkStatus(); // And look for further changes - emitter = this.ideEmitter.on('ide', checkStatus); + emitter = this.lspEmitter.on('lsp', checkStatus); // ... until our stopping condition timeout = setTimeout(() => doneWithVerb('Failed to get'), timeoutMs); @@ -959,7 +916,7 @@ export class TestBuilder { } async cleanup(): Promise { - this.cleanupIDEConnection(); + this.cleanupLSPConnection(); this.stopFlowServer(); this.closeLog(); // We'll also do a belt-and-braces "flow stop" in case @@ -1034,7 +991,7 @@ export default class Builder { // message was M. And doesMethodMatch(actual, 'M{C1,C2,...}') judges also // whether the strings C1, C2, ... were all found in the JSON representation // of the actual message. - static doesMessageMatch(actual: IDEMessage, expected: string): boolean { + static doesMessageMatch(actual: LSPMessage, expected: string): boolean { const iOpenBrace = expected.indexOf('{'); const iCloseBrace = expected.lastIndexOf('}'); if (iOpenBrace == -1 || iCloseBrace == -1) { @@ -1083,6 +1040,7 @@ export default class Builder { testNum: number, flowConfigFilename: string, lazyMode: 'fs' | 'ide' | null, + wait_for_recheck: boolean, ): Promise { const testBuilder = new TestBuilder( bin, @@ -1092,6 +1050,7 @@ export default class Builder { testNum, flowConfigFilename, lazyMode, + wait_for_recheck, ); Builder.builders.push(testBuilder); await testBuilder.createFreshDir(); diff --git a/packages/flow-dev-tools/src/test/findTests.js b/packages/flow-dev-tools/src/test/findTests.js index f46fe37f039..57c14a927fd 100644 --- a/packages/flow-dev-tools/src/test/findTests.js +++ b/packages/flow-dev-tools/src/test/findTests.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {dirname, normalize, relative, resolve} from 'path'; diff --git a/packages/flow-dev-tools/src/test/ide.js b/packages/flow-dev-tools/src/test/ide.js deleted file mode 100644 index 9b79fcd837f..00000000000 --- a/packages/flow-dev-tools/src/test/ide.js +++ /dev/null @@ -1,20 +0,0 @@ -/** - * @flow - * @format - * @lint-ignore-every LINEWRAP1 - */ - -export type MessageHandler = (...args: any) => mixed; -export type RpcConnection = { - onNotification(methodName: string, handler: MessageHandler): void, - sendNotification(methodName: string, ...args: any): void, - sendRequest(methodName: string, ...args: any): Promise, - // TODO requests - listen(): void, - dispose(): void, -}; - -export type IDEMessage = - | {method: string, params: $ReadOnlyArray} // Notification/request - | {method: string, result: mixed} // Response - | {method: string, error: mixed}; // Response-error diff --git a/packages/flow-dev-tools/src/test/lsp.js b/packages/flow-dev-tools/src/test/lsp.js new file mode 100644 index 00000000000..5dcdb750562 --- /dev/null +++ b/packages/flow-dev-tools/src/test/lsp.js @@ -0,0 +1,19 @@ +/** + * @flow + * @format + */ + +export type MessageHandler = (...args: any) => mixed; +export type RpcConnection = { + onNotification(methodName: string, handler: MessageHandler): void, + sendNotification(methodName: string, ...args: any): void, + sendRequest(methodName: string, ...args: any): Promise, + // TODO requests + listen(): void, + dispose(): void, +}; + +export type LSPMessage = + | {method: string, params: $ReadOnlyArray} // Notification/request + | {method: string, result: mixed} // Response + | {method: string, error: mixed}; // Response-error diff --git a/packages/flow-dev-tools/src/test/runTestSuite.js b/packages/flow-dev-tools/src/test/runTestSuite.js index ae0ea4a095d..89ff0f16972 100644 --- a/packages/flow-dev-tools/src/test/runTestSuite.js +++ b/packages/flow-dev-tools/src/test/runTestSuite.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import colors from 'colors/safe'; @@ -10,7 +9,7 @@ import {format} from 'util'; import {noErrors} from '../flowResult'; import {TestStep, TestStepFirstStage} from './TestStep'; import {newEnv} from './stepEnv'; -import {writeFile} from '../utils/async'; +import {withTimeout, writeFile} from '../utils/async'; import type Builder, {TestBuilder} from './builder'; import type Suite from './Suite'; @@ -103,6 +102,7 @@ export default (async function( testNum, test.flowConfigFilename, test.lazyMode, + test.shouldWaitForRecheck, ); let firstIdeStartStep = null; @@ -124,10 +124,10 @@ export default (async function( throw new Error( format( "Test '%s' step %d/%d must call either " + - 'waitAndVerifyAllIDEMessagesContentSinceStartOfStep or waitAndVerifyNoIDEMessagesSinceStartOfStep. ' + - 'We enforce this as a sanity-check, because testing flow IDE is tricky... ' + - 'Every step after the first ideStart step until the last ideExpect step ' + - 'must read IDE messages.\n\n', + 'waitAndVerifyAllLSPMessagesContentSinceStartOfStep or waitAndVerifyNoLSPMessagesSinceStartOfStep. ' + + 'We enforce this as a sanity-check, because testing flow lsp is tricky... ' + + 'Every step after the first lspStart step until the last lspExpect step ' + + 'must read LSP messages.\n\n', test.name, i + 1, steps.length, @@ -155,13 +155,22 @@ export default (async function( if (flowErrors == null && step.needsFlowCheck()) { flowErrors = await testBuilder.getFlowErrors(); } - testBuilder.clearIDEMessages(); - testBuilder.clearIDEStderr(); + testBuilder.clearLSPMessages(); + testBuilder.clearLSPStderr(); let {envRead, envWrite} = newEnv(flowErrors || noErrors); testBuilder.setAllowFlowServerToDie(step.allowFlowServerToDie()); - await step.performActions(testBuilder, envWrite); + const timeout = step.getTimeout(); + + let actions_promise = step.performActions(testBuilder, envWrite); + if (timeout == null) { + await actions_promise; + } else { + await withTimeout(timeout, actions_promise, () => { + testBuilder.log('Hit timeout of %dms.', timeout); + }); + } let oldErrors = flowErrors; @@ -174,16 +183,16 @@ export default (async function( flowErrors = null; } - envWrite.setIDEMessagesSinceStartOfStep( - testBuilder.getIDEMessagesSinceStartOfStep(), + envWrite.setLSPMessagesSinceStartOfStep( + testBuilder.getLSPMessagesSinceStartOfStep(), ); - envWrite.setIDEStderrSinceStartOfStep( - testBuilder.getIDEStderrSinceStartOfStep(), + envWrite.setLSPStderrSinceStartOfStep( + testBuilder.getLSPStderrSinceStartOfStep(), ); envWrite.setServerRunning( testBuilder.server == null ? 'stopped' : 'running', ); - envWrite.setIDERunning(testBuilder.ide == null ? 'stopped' : 'running'); + envWrite.setLSPRunning(testBuilder.lsp == null ? 'stopped' : 'running'); let result = step.checkAssertions(envRead); testBuilder.assertNoErrors(); diff --git a/packages/flow-dev-tools/src/test/searchStackForTestAssertion.js b/packages/flow-dev-tools/src/test/searchStackForTestAssertion.js index b3f783e0958..61a3133c1d1 100644 --- a/packages/flow-dev-tools/src/test/searchStackForTestAssertion.js +++ b/packages/flow-dev-tools/src/test/searchStackForTestAssertion.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {sync as resolve} from 'resolve'; diff --git a/packages/flow-dev-tools/src/test/stepEnv.js b/packages/flow-dev-tools/src/test/stepEnv.js index 4cb33a59e1e..aa3d77f0a45 100644 --- a/packages/flow-dev-tools/src/test/stepEnv.js +++ b/packages/flow-dev-tools/src/test/stepEnv.js @@ -1,21 +1,20 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import type {FlowResult} from '../flowResult'; -import type {IDEMessage} from './ide'; +import type {LSPMessage} from './lsp'; export interface StepEnvWriteable { reportStdout(output: string): void; reportStderr(output: string): void; reportExitCode(code: number): void; - setIDEMessagesSinceStartOfStep(messages: Array): void; - setIDEStderrSinceStartOfStep(stderr: string): void; + setLSPMessagesSinceStartOfStep(messages: Array): void; + setLSPStderrSinceStartOfStep(stderr: string): void; setNewErrors(errors: FlowResult): void; setServerRunning(running: 'stopped' | 'running'): void; - setIDERunning(running: 'stopped' | 'running'): void; + setLSPRunning(running: 'stopped' | 'running'): void; triggerFlowCheck(): void; } @@ -23,12 +22,12 @@ export interface StepEnvReadable { getStdout(): string; getStderr(): string; getExitCodes(): Array; - getIDEMessagesSinceStartOfStep(): Array; - getIDEStderrSinceStartOfStep(): string; + getLSPMessagesSinceStartOfStep(): Array; + getLSPStderrSinceStartOfStep(): string; getOldErrors(): FlowResult; getNewErrors(): FlowResult; getServerRunning(): 'stopped' | 'running'; - getIDERunning(): 'stopped' | 'running'; + getLSPRunning(): 'stopped' | 'running'; shouldRunFlow(): boolean; } @@ -40,10 +39,10 @@ export function newEnv( let exitCodes = []; let newErrors = oldErrors; let serverRunning = 'stopped'; - let ideRunning = 'stopped'; + let lspRunning = 'stopped'; let shouldRunFlow = false; - let ideMessagesSinceStartOfStep = []; - let ideStderrSinceStartOfStep = ''; + let lspMessagesSinceStartOfStep = []; + let lspStderrSinceStartOfStep = ''; const envWrite = { reportStdout(output) { @@ -58,12 +57,12 @@ export function newEnv( exitCodes.push(code); }, - setIDEMessagesSinceStartOfStep(messages) { - ideMessagesSinceStartOfStep = messages; + setLSPMessagesSinceStartOfStep(messages) { + lspMessagesSinceStartOfStep = messages; }, - setIDEStderrSinceStartOfStep(stderr) { - ideStderrSinceStartOfStep = stderr; + setLSPStderrSinceStartOfStep(stderr) { + lspStderrSinceStartOfStep = stderr; }, setNewErrors(errors) { @@ -74,8 +73,8 @@ export function newEnv( serverRunning = running; }, - setIDERunning(running) { - ideRunning = running; + setLSPRunning(running) { + lspRunning = running; }, triggerFlowCheck() { @@ -96,12 +95,12 @@ export function newEnv( return exitCodes.slice(); }, - getIDEMessagesSinceStartOfStep() { - return ideMessagesSinceStartOfStep; + getLSPMessagesSinceStartOfStep() { + return lspMessagesSinceStartOfStep; }, - getIDEStderrSinceStartOfStep() { - return ideStderrSinceStartOfStep; + getLSPStderrSinceStartOfStep() { + return lspStderrSinceStartOfStep; }, getOldErrors() { @@ -116,8 +115,8 @@ export function newEnv( return serverRunning; }, - getIDERunning() { - return ideRunning; + getLSPRunning() { + return lspRunning; }, shouldRunFlow() { diff --git a/packages/flow-dev-tools/src/test/testCommand.js b/packages/flow-dev-tools/src/test/testCommand.js index 5ac7e2c7237..ee6c1112b31 100644 --- a/packages/flow-dev-tools/src/test/testCommand.js +++ b/packages/flow-dev-tools/src/test/testCommand.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import {resolve} from 'path'; diff --git a/packages/flow-dev-tools/src/test/testRunner.js b/packages/flow-dev-tools/src/test/testRunner.js index 256dac4ba40..eb3babbe163 100644 --- a/packages/flow-dev-tools/src/test/testRunner.js +++ b/packages/flow-dev-tools/src/test/testRunner.js @@ -1,7 +1,6 @@ /** * @flow * @format - * @lint-ignore-every LINEWRAP1 */ import colors from 'colors/safe'; @@ -69,7 +68,7 @@ function startWatchAndRun(suites, args) { const startListeningForShortcuts = () => { if (typeof process.stdin.setRawMode === 'function') { - process.stdin.setRawMode(true); + (process.stdin.setRawMode: any).call(process.stdin, true); process.stdin.resume(); process.stdin.setEncoding('utf8'); process.stdin.on('data', keydown); @@ -80,7 +79,7 @@ function startWatchAndRun(suites, args) { const stopListeningForShortcuts = () => { if (typeof process.stdin.setRawMode === 'function') { - process.stdin.setRawMode(false); + (process.stdin.setRawMode: any).call(process.stdin, false); process.stdin.resume(); process.stdin.setEncoding('utf8'); process.stdin.removeListener('data', keydown); diff --git a/packages/flow-dev-tools/src/utils/async.js b/packages/flow-dev-tools/src/utils/async.js index 53b13ca422f..cac39d4787c 100644 --- a/packages/flow-dev-tools/src/utils/async.js +++ b/packages/flow-dev-tools/src/utils/async.js @@ -1,7 +1,10 @@ -/* @flow */ +/* + * @flow + * @format + */ -import { exec as cp_exec } from 'child_process'; -import { createInterface as rl_createInterface} from 'readline'; +import {exec as cp_exec} from 'child_process'; +import {createInterface as rl_createInterface} from 'readline'; import { appendFile as fs_appendFile, exists as fs_exists, @@ -24,9 +27,10 @@ import type {ReadStream, WriteStream} from 'fs'; export type ExecOpts = child_process$execOpts & { stdin?: string, -} +}; -// Based on nothing but a few experiments on my laptop, this seems like a pretty safe size. +// Based on nothing but a few experiments on my laptop, +// this seems like a pretty safe size. const STDIN_WRITE_CHUNK_SIZE = 10000; export function exec(cmd: string, options?: ExecOpts): Promise { @@ -58,13 +62,17 @@ export function exec(cmd: string, options?: ExecOpts): Promise { }); } -export function execManual(cmd: string, options?: Object): Promise<[?Object, string | Buffer, string | Buffer]> { +export function execManual( + cmd: string, + options?: Object, +): Promise<[?Object, string | Buffer, string | Buffer]> { return new Promise((resolve, reject) => - cp_exec(cmd, options, (err, stdout, stderr) => resolve([err, stdout, stderr])) - ) + cp_exec(cmd, options, (err, stdout, stderr) => + resolve([err, stdout, stderr]), + ), + ); } - type WriteFileOptions = { encoding?: string | null, mode?: number, @@ -76,25 +84,25 @@ export function writeFile( options?: WriteFileOptions = {}, ): Promise { return new Promise((resolve, reject) => { - fs_writeFile(filename, data, options, (err) => { + fs_writeFile(filename, data, options, err => { if (err == null) { resolve(); } else { reject(err); } - }) + }); }); } export function appendFile(filename: string, data: string): Promise { return new Promise((resolve, reject) => { - fs_appendFile(filename, data, (err) => { + fs_appendFile(filename, data, err => { if (err == null) { resolve(); } else { reject(err); } - }) + }); }); } @@ -104,11 +112,11 @@ export function readFile(filename: string): Promise { if (err == null) { // Even if we check out the files without CRLF, reading seems to add it // in. - resolve(data.replace(/\r\n/g, "\n")); + resolve(data.replace(/\r\n/g, '\n')); } else { reject(err); } - }) + }); }); } @@ -120,13 +128,13 @@ export function readdir(dir: string): Promise> { } else { reject(err); } - }) + }); }); } export function rename(old_path: string, new_path: string): Promise { return new Promise((resolve, reject) => { - fs_rename(old_path, new_path, (err) => { + fs_rename(old_path, new_path, err => { if (err == null) { resolve(); } else { @@ -138,7 +146,7 @@ export function rename(old_path: string, new_path: string): Promise { export function rimraf(path: string): Promise { return new Promise((resolve, reject) => { - rimraf_rimraf(path, (err) => { + rimraf_rimraf(path, err => { if (err == null) { resolve(); } else { @@ -150,7 +158,7 @@ export function rimraf(path: string): Promise { export function unlink(file: string): Promise { return new Promise((resolve, reject) => { - fs_unlink(file, (err) => { + fs_unlink(file, err => { if (err == null) { resolve(); } else { @@ -162,7 +170,7 @@ export function unlink(file: string): Promise { export function mkdirp(dir: string): Promise { return new Promise((resolve, reject) => { - mkdirp_mkdirp(dir, (err) => { + mkdirp_mkdirp(dir, err => { if (err) { reject(err); } else { @@ -180,14 +188,18 @@ export type NCPFile = { }; type NCPOptions = { - filter?: RegExp | (filename: string) => boolean, + filter?: RegExp | ((filename: string) => boolean), transform?: (read: ReadStream, write: WriteStream, file: NCPFile) => mixed, clobber?: boolean, dereference?: boolean, stopOnErr?: boolean, errs?: any, }; -export function ncp(source: string, dest: string, options?: NCPOptions): Promise { +export function ncp( + source: string, + dest: string, + options?: NCPOptions, +): Promise { return new Promise((resolve, reject) => { ncp_ncp(source, dest, options || {}, err => { if (err) { @@ -199,9 +211,11 @@ export function ncp(source: string, dest: string, options?: NCPOptions): Promise }); } -export function drain(writer: stream$Writable | tty$WriteStream): Promise { +export function drain( + writer: stream$Writable | tty$WriteStream, +): Promise { return new Promise((resolve, reject) => { - writer.once('drain', resolve) + writer.once('drain', resolve); }); } @@ -218,7 +232,7 @@ export function symlink( return new Promise((resolve, reject) => { // $FlowIssue - symlink can omit the type fs_symlink(target.toString(), path.toString(), resolve); - }) + }); } type GlobOptions = { @@ -242,7 +256,7 @@ export function glob( } export function isRunning(pid: number): Promise { - return new Promise((resolve) => { + return new Promise(resolve => { try { process.kill(pid, 0); resolve(true); @@ -253,7 +267,7 @@ export function isRunning(pid: number): Promise { } export function sleep(timeoutMs: number): Promise { - return new Promise((resolve) => { + return new Promise(resolve => { setTimeout(resolve, timeoutMs); }); } @@ -261,12 +275,26 @@ export function sleep(timeoutMs: number): Promise { export function prompt(message: string): Promise { const rl = rl_createInterface({ input: process.stdin, - output: process.stdout + output: process.stdout, }); - return new Promise((resolve) => { - rl.question(message, (result) => { + return new Promise(resolve => { + rl.question(message, result => { rl.close(); resolve(result); }); }); } + +export function withTimeout( + timeout_ms: number, + promise: Promise, + onTimeout: () => B, +): Promise { + let timer; + const timeout = new Promise(resolve => { + timer = setTimeout(() => resolve(onTimeout()), timeout_ms); + }); + return Promise.race([timeout, promise]).finally( + () => timer && clearTimeout(timer), + ); +} diff --git a/packages/flow-dev-tools/yarn.lock b/packages/flow-dev-tools/yarn.lock deleted file mode 100644 index 99e1e3e1054..00000000000 --- a/packages/flow-dev-tools/yarn.lock +++ /dev/null @@ -1,3137 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -abab@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.4.tgz#5faad9c2c07f60dd76770f71cf025b62a63cfd4e" - -abbrev@1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f" - -acorn-globals@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-3.1.0.tgz#fd8270f71fbb4996b004fa880ee5d46573a731bf" - dependencies: - acorn "^4.0.4" - -acorn@^4.0.4: - version "4.0.13" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787" - -ajv@^4.9.1: - version "4.11.7" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.7.tgz#8655a5d86d0824985cc471a1d913fb6729a0ec48" - dependencies: - co "^4.6.0" - json-stable-stringify "^1.0.1" - -ajv@^5.1.0: - version "5.2.2" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.2.2.tgz#47c68d69e86f5d953103b0074a9430dc63da5e39" - dependencies: - co "^4.6.0" - fast-deep-equal "^1.0.0" - json-schema-traverse "^0.3.0" - json-stable-stringify "^1.0.1" - -align-text@^0.1.1, align-text@^0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" - dependencies: - kind-of "^3.0.2" - longest "^1.0.1" - repeat-string "^1.5.2" - -amdefine@>=0.0.4: - version "1.0.1" - resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" - -ansi-escapes@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.0.0.tgz#ec3e8b4e9f8064fc02c3ac9b65f1c275bda8ef92" - -ansi-regex@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" - -ansi-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" - -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - -ansi-styles@^3.1.0, ansi-styles@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.0.tgz#c159b8d5be0f9e5a6f346dab94f16ce022161b88" - dependencies: - color-convert "^1.9.0" - -anymatch@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.0.tgz#a3e52fa39168c825ff57b0248126ce5a8ff95507" - dependencies: - arrify "^1.0.0" - micromatch "^2.1.5" - -append-transform@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-0.4.0.tgz#d76ebf8ca94d276e247a36bad44a4b74ab611991" - dependencies: - default-require-extensions "^1.0.0" - -aproba@^1.0.3: - version "1.2.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" - -are-we-there-yet@~1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.4.tgz#bb5dca382bb94f05e15194373d16fd3ba1ca110d" - dependencies: - delegates "^1.0.0" - readable-stream "^2.0.6" - -argparse@^1.0.7: - version "1.0.9" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.9.tgz#73d83bc263f86e97f8cc4f6bae1b0e90a7d22c86" - dependencies: - sprintf-js "~1.0.2" - -arr-diff@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf" - dependencies: - arr-flatten "^1.0.1" - -arr-flatten@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.0.3.tgz#a274ed85ac08849b6bd7847c4580745dc51adfb1" - -array-equal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" - -array-unique@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" - -arrify@^1.0.0, arrify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" - -asn1@~0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86" - -assert-plus@1.0.0, assert-plus@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" - -assert-plus@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" - -ast-types@0.8.14: - version "0.8.14" - resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.8.14.tgz#65bf58559a23488e076981627b5eb987aba67cda" - -astral-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" - -async@^1.4.0: - version "1.5.2" - resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" - -async@^2.1.4: - version "2.5.0" - resolved "https://registry.yarnpkg.com/async/-/async-2.5.0.tgz#843190fd6b7357a0b9e1c956edddd5ec8462b54d" - dependencies: - lodash "^4.14.0" - -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - -aws-sign2@~0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" - -aws-sign2@~0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" - -aws4@^1.2.1, aws4@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e" - -babel-code-frame@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" - dependencies: - chalk "^1.1.3" - esutils "^2.0.2" - js-tokens "^3.0.2" - -babel-core@^6.0.0, babel-core@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.26.0.tgz#af32f78b31a6fcef119c87b0fd8d9753f03a0bb8" - dependencies: - babel-code-frame "^6.26.0" - babel-generator "^6.26.0" - babel-helpers "^6.24.1" - babel-messages "^6.23.0" - babel-register "^6.26.0" - babel-runtime "^6.26.0" - babel-template "^6.26.0" - babel-traverse "^6.26.0" - babel-types "^6.26.0" - babylon "^6.18.0" - convert-source-map "^1.5.0" - debug "^2.6.8" - json5 "^0.5.1" - lodash "^4.17.4" - minimatch "^3.0.4" - path-is-absolute "^1.0.1" - private "^0.1.7" - slash "^1.0.0" - source-map "^0.5.6" - -babel-generator@^6.18.0, babel-generator@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.0.tgz#ac1ae20070b79f6e3ca1d3269613053774f20dc5" - dependencies: - babel-messages "^6.23.0" - babel-runtime "^6.26.0" - babel-types "^6.26.0" - detect-indent "^4.0.0" - jsesc "^1.3.0" - lodash "^4.17.4" - source-map "^0.5.6" - trim-right "^1.0.1" - -babel-helper-call-delegate@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-call-delegate/-/babel-helper-call-delegate-6.24.1.tgz#ece6aacddc76e41c3461f88bfc575bd0daa2df8d" - dependencies: - babel-helper-hoist-variables "^6.24.1" - babel-runtime "^6.22.0" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - -babel-helper-define-map@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-define-map/-/babel-helper-define-map-6.24.1.tgz#7a9747f258d8947d32d515f6aa1c7bd02204a080" - dependencies: - babel-helper-function-name "^6.24.1" - babel-runtime "^6.22.0" - babel-types "^6.24.1" - lodash "^4.2.0" - -babel-helper-function-name@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-function-name/-/babel-helper-function-name-6.24.1.tgz#d3475b8c03ed98242a25b48351ab18399d3580a9" - dependencies: - babel-helper-get-function-arity "^6.24.1" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - -babel-helper-get-function-arity@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-get-function-arity/-/babel-helper-get-function-arity-6.24.1.tgz#8f7782aa93407c41d3aa50908f89b031b1b6853d" - dependencies: - babel-runtime "^6.22.0" - babel-types "^6.24.1" - -babel-helper-hoist-variables@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-hoist-variables/-/babel-helper-hoist-variables-6.24.1.tgz#1ecb27689c9d25513eadbc9914a73f5408be7a76" - dependencies: - babel-runtime "^6.22.0" - babel-types "^6.24.1" - -babel-helper-optimise-call-expression@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-optimise-call-expression/-/babel-helper-optimise-call-expression-6.24.1.tgz#f7a13427ba9f73f8f4fa993c54a97882d1244257" - dependencies: - babel-runtime "^6.22.0" - babel-types "^6.24.1" - -babel-helper-regex@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-regex/-/babel-helper-regex-6.24.1.tgz#d36e22fab1008d79d88648e32116868128456ce8" - dependencies: - babel-runtime "^6.22.0" - babel-types "^6.24.1" - lodash "^4.2.0" - -babel-helper-replace-supers@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-replace-supers/-/babel-helper-replace-supers-6.24.1.tgz#bf6dbfe43938d17369a213ca8a8bf74b6a90ab1a" - dependencies: - babel-helper-optimise-call-expression "^6.24.1" - babel-messages "^6.23.0" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - -babel-helpers@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helpers/-/babel-helpers-6.24.1.tgz#3471de9caec388e5c850e597e58a26ddf37602b2" - dependencies: - babel-runtime "^6.22.0" - babel-template "^6.24.1" - -babel-jest@^21.0.2: - version "21.0.2" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-21.0.2.tgz#817ea52c23f1c6c4b684d6960968416b6a9e9c6c" - dependencies: - babel-plugin-istanbul "^4.0.0" - babel-preset-jest "^21.0.2" - -babel-messages@^6.23.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-check-es2015-constants@^6.22.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-plugin-check-es2015-constants/-/babel-plugin-check-es2015-constants-6.22.0.tgz#35157b101426fd2ffd3da3f75c7d1e91835bbf8a" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-istanbul@^4.0.0: - version "4.1.5" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.5.tgz#6760cdd977f411d3e175bb064f2bc327d99b2b6e" - dependencies: - find-up "^2.1.0" - istanbul-lib-instrument "^1.7.5" - test-exclude "^4.1.1" - -babel-plugin-jest-hoist@^21.0.2: - version "21.0.2" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-21.0.2.tgz#cfdce5bca40d772a056cb8528ad159c7bb4bb03d" - -babel-plugin-syntax-async-functions@^6.8.0: - version "6.13.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz#cad9cad1191b5ad634bf30ae0872391e0647be95" - -babel-plugin-syntax-class-properties@^6.8.0: - version "6.13.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-class-properties/-/babel-plugin-syntax-class-properties-6.13.0.tgz#d7eb23b79a317f8543962c505b827c7d6cac27de" - -babel-plugin-syntax-flow@^6.18.0, babel-plugin-syntax-flow@^6.8.0: - version "6.18.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-flow/-/babel-plugin-syntax-flow-6.18.0.tgz#4c3ab20a2af26aa20cd25995c398c4eb70310c8d" - -babel-plugin-syntax-object-rest-spread@^6.8.0: - version "6.13.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz#fd6536f2bce13836ffa3a5458c4903a597bb3bf5" - -babel-plugin-syntax-trailing-function-commas@^6.8.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-6.22.0.tgz#ba0360937f8d06e40180a43fe0d5616fff532cf3" - -babel-plugin-transform-class-properties@^6.8.0: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-class-properties/-/babel-plugin-transform-class-properties-6.24.1.tgz#6a79763ea61d33d36f37b611aa9def81a81b46ac" - dependencies: - babel-helper-function-name "^6.24.1" - babel-plugin-syntax-class-properties "^6.8.0" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - -babel-plugin-transform-es2015-arrow-functions@^6.22.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-arrow-functions/-/babel-plugin-transform-es2015-arrow-functions-6.22.0.tgz#452692cb711d5f79dc7f85e440ce41b9f244d221" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-transform-es2015-block-scoped-functions@^6.22.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoped-functions/-/babel-plugin-transform-es2015-block-scoped-functions-6.22.0.tgz#bbc51b49f964d70cb8d8e0b94e820246ce3a6141" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-transform-es2015-block-scoping@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576" - dependencies: - babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - lodash "^4.2.0" - -babel-plugin-transform-es2015-classes@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-classes/-/babel-plugin-transform-es2015-classes-6.24.1.tgz#5a4c58a50c9c9461e564b4b2a3bfabc97a2584db" - dependencies: - babel-helper-define-map "^6.24.1" - babel-helper-function-name "^6.24.1" - babel-helper-optimise-call-expression "^6.24.1" - babel-helper-replace-supers "^6.24.1" - babel-messages "^6.23.0" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - -babel-plugin-transform-es2015-computed-properties@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-computed-properties/-/babel-plugin-transform-es2015-computed-properties-6.24.1.tgz#6fe2a8d16895d5634f4cd999b6d3480a308159b3" - dependencies: - babel-runtime "^6.22.0" - babel-template "^6.24.1" - -babel-plugin-transform-es2015-destructuring@^6.22.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-destructuring/-/babel-plugin-transform-es2015-destructuring-6.23.0.tgz#997bb1f1ab967f682d2b0876fe358d60e765c56d" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-transform-es2015-duplicate-keys@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-duplicate-keys/-/babel-plugin-transform-es2015-duplicate-keys-6.24.1.tgz#73eb3d310ca969e3ef9ec91c53741a6f1576423e" - dependencies: - babel-runtime "^6.22.0" - babel-types "^6.24.1" - -babel-plugin-transform-es2015-for-of@^6.22.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-for-of/-/babel-plugin-transform-es2015-for-of-6.23.0.tgz#f47c95b2b613df1d3ecc2fdb7573623c75248691" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-transform-es2015-function-name@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-function-name/-/babel-plugin-transform-es2015-function-name-6.24.1.tgz#834c89853bc36b1af0f3a4c5dbaa94fd8eacaa8b" - dependencies: - babel-helper-function-name "^6.24.1" - babel-runtime "^6.22.0" - babel-types "^6.24.1" - -babel-plugin-transform-es2015-literals@^6.22.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-literals/-/babel-plugin-transform-es2015-literals-6.22.0.tgz#4f54a02d6cd66cf915280019a31d31925377ca2e" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-transform-es2015-modules-amd@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.24.1.tgz#3b3e54017239842d6d19c3011c4bd2f00a00d154" - dependencies: - babel-plugin-transform-es2015-modules-commonjs "^6.24.1" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - -babel-plugin-transform-es2015-modules-commonjs@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.24.1.tgz#d3e310b40ef664a36622200097c6d440298f2bfe" - dependencies: - babel-plugin-transform-strict-mode "^6.24.1" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-types "^6.24.1" - -babel-plugin-transform-es2015-modules-systemjs@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-systemjs/-/babel-plugin-transform-es2015-modules-systemjs-6.24.1.tgz#ff89a142b9119a906195f5f106ecf305d9407d23" - dependencies: - babel-helper-hoist-variables "^6.24.1" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - -babel-plugin-transform-es2015-modules-umd@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.24.1.tgz#ac997e6285cd18ed6176adb607d602344ad38468" - dependencies: - babel-plugin-transform-es2015-modules-amd "^6.24.1" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - -babel-plugin-transform-es2015-object-super@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-object-super/-/babel-plugin-transform-es2015-object-super-6.24.1.tgz#24cef69ae21cb83a7f8603dad021f572eb278f8d" - dependencies: - babel-helper-replace-supers "^6.24.1" - babel-runtime "^6.22.0" - -babel-plugin-transform-es2015-parameters@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-parameters/-/babel-plugin-transform-es2015-parameters-6.24.1.tgz#57ac351ab49caf14a97cd13b09f66fdf0a625f2b" - dependencies: - babel-helper-call-delegate "^6.24.1" - babel-helper-get-function-arity "^6.24.1" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - -babel-plugin-transform-es2015-shorthand-properties@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-shorthand-properties/-/babel-plugin-transform-es2015-shorthand-properties-6.24.1.tgz#24f875d6721c87661bbd99a4622e51f14de38aa0" - dependencies: - babel-runtime "^6.22.0" - babel-types "^6.24.1" - -babel-plugin-transform-es2015-spread@^6.22.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-spread/-/babel-plugin-transform-es2015-spread-6.22.0.tgz#d6d68a99f89aedc4536c81a542e8dd9f1746f8d1" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-transform-es2015-sticky-regex@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-sticky-regex/-/babel-plugin-transform-es2015-sticky-regex-6.24.1.tgz#00c1cdb1aca71112cdf0cf6126c2ed6b457ccdbc" - dependencies: - babel-helper-regex "^6.24.1" - babel-runtime "^6.22.0" - babel-types "^6.24.1" - -babel-plugin-transform-es2015-template-literals@^6.22.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-template-literals/-/babel-plugin-transform-es2015-template-literals-6.22.0.tgz#a84b3450f7e9f8f1f6839d6d687da84bb1236d8d" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-transform-es2015-typeof-symbol@^6.22.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-typeof-symbol/-/babel-plugin-transform-es2015-typeof-symbol-6.23.0.tgz#dec09f1cddff94b52ac73d505c84df59dcceb372" - dependencies: - babel-runtime "^6.22.0" - -babel-plugin-transform-es2015-unicode-regex@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-unicode-regex/-/babel-plugin-transform-es2015-unicode-regex-6.24.1.tgz#d38b12f42ea7323f729387f18a7c5ae1faeb35e9" - dependencies: - babel-helper-regex "^6.24.1" - babel-runtime "^6.22.0" - regexpu-core "^2.0.0" - -babel-plugin-transform-flow-strip-types@^6.8.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-flow-strip-types/-/babel-plugin-transform-flow-strip-types-6.22.0.tgz#84cb672935d43714fdc32bce84568d87441cf7cf" - dependencies: - babel-plugin-syntax-flow "^6.18.0" - babel-runtime "^6.22.0" - -babel-plugin-transform-object-rest-spread@^6.20.2: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.23.0.tgz#875d6bc9be761c58a2ae3feee5dc4895d8c7f921" - dependencies: - babel-plugin-syntax-object-rest-spread "^6.8.0" - babel-runtime "^6.22.0" - -babel-plugin-transform-regenerator@^6.24.1, babel-plugin-transform-regenerator@^6.8.0: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-regenerator/-/babel-plugin-transform-regenerator-6.24.1.tgz#b8da305ad43c3c99b4848e4fe4037b770d23c418" - dependencies: - regenerator-transform "0.9.11" - -babel-plugin-transform-strict-mode@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz#d5faf7aa578a65bbe591cf5edae04a0c67020758" - dependencies: - babel-runtime "^6.22.0" - babel-types "^6.24.1" - -babel-polyfill@^6.8.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-6.23.0.tgz#8364ca62df8eafb830499f699177466c3b03499d" - dependencies: - babel-runtime "^6.22.0" - core-js "^2.4.0" - regenerator-runtime "^0.10.0" - -babel-preset-es2015@^6.8.0: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-preset-es2015/-/babel-preset-es2015-6.24.1.tgz#d44050d6bc2c9feea702aaf38d727a0210538939" - dependencies: - babel-plugin-check-es2015-constants "^6.22.0" - babel-plugin-transform-es2015-arrow-functions "^6.22.0" - babel-plugin-transform-es2015-block-scoped-functions "^6.22.0" - babel-plugin-transform-es2015-block-scoping "^6.24.1" - babel-plugin-transform-es2015-classes "^6.24.1" - babel-plugin-transform-es2015-computed-properties "^6.24.1" - babel-plugin-transform-es2015-destructuring "^6.22.0" - babel-plugin-transform-es2015-duplicate-keys "^6.24.1" - babel-plugin-transform-es2015-for-of "^6.22.0" - babel-plugin-transform-es2015-function-name "^6.24.1" - babel-plugin-transform-es2015-literals "^6.22.0" - babel-plugin-transform-es2015-modules-amd "^6.24.1" - babel-plugin-transform-es2015-modules-commonjs "^6.24.1" - babel-plugin-transform-es2015-modules-systemjs "^6.24.1" - babel-plugin-transform-es2015-modules-umd "^6.24.1" - babel-plugin-transform-es2015-object-super "^6.24.1" - babel-plugin-transform-es2015-parameters "^6.24.1" - babel-plugin-transform-es2015-shorthand-properties "^6.24.1" - babel-plugin-transform-es2015-spread "^6.22.0" - babel-plugin-transform-es2015-sticky-regex "^6.24.1" - babel-plugin-transform-es2015-template-literals "^6.22.0" - babel-plugin-transform-es2015-typeof-symbol "^6.22.0" - babel-plugin-transform-es2015-unicode-regex "^6.24.1" - babel-plugin-transform-regenerator "^6.24.1" - -babel-preset-jest@^21.0.2: - version "21.0.2" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-21.0.2.tgz#9db25def2329f49eace3f5ea0de42a0b898d12cc" - dependencies: - babel-plugin-jest-hoist "^21.0.2" - -babel-register@^6.26.0, babel-register@^6.8.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-register/-/babel-register-6.26.0.tgz#6ed021173e2fcb486d7acb45c6009a856f647071" - dependencies: - babel-core "^6.26.0" - babel-runtime "^6.26.0" - core-js "^2.5.0" - home-or-tmp "^2.0.0" - lodash "^4.17.4" - mkdirp "^0.5.1" - source-map-support "^0.4.15" - -babel-runtime@^6.18.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" - dependencies: - core-js "^2.4.0" - regenerator-runtime "^0.11.0" - -babel-template@^6.16.0, babel-template@^6.24.1, babel-template@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" - dependencies: - babel-runtime "^6.26.0" - babel-traverse "^6.26.0" - babel-types "^6.26.0" - babylon "^6.18.0" - lodash "^4.17.4" - -babel-traverse@^6.18.0, babel-traverse@^6.24.1, babel-traverse@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" - dependencies: - babel-code-frame "^6.26.0" - babel-messages "^6.23.0" - babel-runtime "^6.26.0" - babel-types "^6.26.0" - babylon "^6.18.0" - debug "^2.6.8" - globals "^9.18.0" - invariant "^2.2.2" - lodash "^4.17.4" - -babel-types@^6.18.0, babel-types@^6.19.0, babel-types@^6.24.1, babel-types@^6.26.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" - dependencies: - babel-runtime "^6.26.0" - esutils "^2.0.2" - lodash "^4.17.4" - to-fast-properties "^1.0.3" - -babylon@^6.18.0: - version "6.18.0" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" - -balanced-match@^0.4.1: - version "0.4.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-0.4.2.tgz#cb3f3e3c732dc0f01ee70b403f302e61d7709838" - -balanced-match@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" - -bcrypt-pbkdf@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" - dependencies: - tweetnacl "^0.14.3" - -blessed@^0.1.81: - version "0.1.81" - resolved "https://registry.yarnpkg.com/blessed/-/blessed-0.1.81.tgz#f962d687ec2c369570ae71af843256e6d0ca1129" - -block-stream@*: - version "0.0.9" - resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a" - dependencies: - inherits "~2.0.0" - -bluebird@^3.1.5: - version "3.5.0" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.0.tgz#791420d7f551eea2897453a8a77653f96606d67c" - -boom@2.x.x: - version "2.10.1" - resolved "https://registry.yarnpkg.com/boom/-/boom-2.10.1.tgz#39c8918ceff5799f83f9492a848f625add0c766f" - dependencies: - hoek "2.x.x" - -boom@4.x.x: - version "4.3.1" - resolved "https://registry.yarnpkg.com/boom/-/boom-4.3.1.tgz#4f8a3005cb4a7e3889f749030fd25b96e01d2e31" - dependencies: - hoek "4.x.x" - -boom@5.x.x: - version "5.2.0" - resolved "https://registry.yarnpkg.com/boom/-/boom-5.2.0.tgz#5dd9da6ee3a5f302077436290cb717d3f4a54e02" - dependencies: - hoek "4.x.x" - -brace-expansion@^1.0.0: - version "1.1.7" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.7.tgz#3effc3c50e000531fb720eaff80f0ae8ef23cf59" - dependencies: - balanced-match "^0.4.1" - concat-map "0.0.1" - -brace-expansion@^1.1.7: - version "1.1.8" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292" - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^1.8.2: - version "1.8.5" - resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7" - dependencies: - expand-range "^1.8.1" - preserve "^0.2.0" - repeat-element "^1.1.2" - -browser-resolve@^1.11.2: - version "1.11.2" - resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-1.11.2.tgz#8ff09b0a2c421718a1051c260b32e48f442938ce" - dependencies: - resolve "1.1.7" - -bser@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/bser/-/bser-1.0.2.tgz#381116970b2a6deea5646dd15dd7278444b56169" - dependencies: - node-int64 "^0.4.0" - -bser@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.0.0.tgz#9ac78d3ed5d915804fd87acb158bc797147a1719" - dependencies: - node-int64 "^0.4.0" - -builtin-modules@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" - -callsites@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" - -camelcase@^1.0.2: - version "1.2.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" - -camelcase@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" - -caseless@~0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" - -center-align@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" - dependencies: - align-text "^0.1.3" - lazy-cache "^1.0.3" - -chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" - -chalk@^2.0.1: - version "2.1.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.1.0.tgz#ac5becf14fa21b99c6c92ca7a7d7cfd5b17e743e" - dependencies: - ansi-styles "^3.1.0" - escape-string-regexp "^1.0.5" - supports-color "^4.0.0" - -ci-info@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.1.1.tgz#47b44df118c48d2597b56d342e7e25791060171a" - -cliui@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" - dependencies: - center-align "^0.1.1" - right-align "^0.1.1" - wordwrap "0.0.2" - -cliui@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" - dependencies: - string-width "^1.0.1" - strip-ansi "^3.0.1" - wrap-ansi "^2.0.0" - -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - -code-point-at@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" - -color-convert@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.0.tgz#1accf97dd739b983bf994d56fec8f95853641b7a" - dependencies: - color-name "^1.1.1" - -color-name@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - -colors@>=0.6.2, colors@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" - -combined-stream@^1.0.5, combined-stream@~1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009" - dependencies: - delayed-stream "~1.0.0" - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - -console-control-strings@^1.0.0, console-control-strings@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" - -content-type-parser@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/content-type-parser/-/content-type-parser-1.0.1.tgz#c3e56988c53c65127fb46d4032a3a900246fdc94" - -convert-source-map@^1.4.0, convert-source-map@^1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.0.tgz#9acd70851c6d5dfdd93d9282e5edf94a03ff46b5" - -core-js@^2.4.0, core-js@^2.5.0: - version "2.5.1" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.1.tgz#ae6874dc66937789b80754ff5428df66819ca50b" - -core-util-is@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" - -cross-spawn@^5.0.1: - version "5.1.0" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" - dependencies: - lru-cache "^4.0.1" - shebang-command "^1.2.0" - which "^1.2.9" - -cryptiles@2.x.x: - version "2.0.5" - resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" - dependencies: - boom "2.x.x" - -cryptiles@3.x.x: - version "3.1.2" - resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-3.1.2.tgz#a89fbb220f5ce25ec56e8c4aa8a4fd7b5b0d29fe" - dependencies: - boom "5.x.x" - -cssom@0.3.x, "cssom@>= 0.3.2 < 0.4.0": - version "0.3.2" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.2.tgz#b8036170c79f07a90ff2f16e22284027a243848b" - -"cssstyle@>= 0.2.37 < 0.3.0": - version "0.2.37" - resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-0.2.37.tgz#541097234cb2513c83ceed3acddc27ff27987d54" - dependencies: - cssom "0.3.x" - -dashdash@^1.12.0: - version "1.14.1" - resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" - dependencies: - assert-plus "^1.0.0" - -debug@^2.2.0, debug@^2.6.3, debug@^2.6.8: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - dependencies: - ms "2.0.0" - -decamelize@^1.0.0, decamelize@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" - -deep-equal@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.0.1.tgz#f5d260292b660e084eff4cdbc9f08ad3247448b5" - -deep-extend@~0.4.0: - version "0.4.2" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" - -deep-is@~0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" - -default-require-extensions@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-1.0.0.tgz#f37ea15d3e13ffd9b437d33e1a75b5fb97874cb8" - dependencies: - strip-bom "^2.0.0" - -define-properties@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.2.tgz#83a73f2fea569898fb737193c8f873caf6d45c94" - dependencies: - foreach "^2.0.5" - object-keys "^1.0.8" - -defined@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - -delegates@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" - -detect-indent@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" - dependencies: - repeating "^2.0.0" - -diff@^3.2.0: - version "3.3.1" - resolved "https://registry.yarnpkg.com/diff/-/diff-3.3.1.tgz#aa8567a6eed03c531fc89d3f711cd0e5259dec75" - -diff@~3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-3.2.0.tgz#c9ce393a4b7cbd0b058a725c93df299027868ff9" - -ecc-jsbn@~0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" - dependencies: - jsbn "~0.1.0" - -errno@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.4.tgz#b896e23a9e5e8ba33871fc996abd3635fc9a1c7d" - dependencies: - prr "~0.0.0" - -error-ex@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc" - dependencies: - is-arrayish "^0.2.1" - -es-abstract@^1.5.0: - version "1.8.2" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.8.2.tgz#25103263dc4decbda60e0c737ca32313518027ee" - dependencies: - es-to-primitive "^1.1.1" - function-bind "^1.1.1" - has "^1.0.1" - is-callable "^1.1.3" - is-regex "^1.0.4" - -es-to-primitive@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.1.1.tgz#45355248a88979034b6792e19bb81f2b7975dd0d" - dependencies: - is-callable "^1.1.1" - is-date-object "^1.0.1" - is-symbol "^1.0.1" - -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - -escodegen@^1.6.1: - version "1.9.0" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.9.0.tgz#9811a2f265dc1cd3894420ee3717064b632b8852" - dependencies: - esprima "^3.1.3" - estraverse "^4.2.0" - esutils "^2.0.2" - optionator "^0.8.1" - optionalDependencies: - source-map "~0.5.6" - -esprima@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" - -esprima@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" - -estraverse@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" - -esutils@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" - -exec-sh@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.2.0.tgz#14f75de3f20d286ef933099b2ce50a90359cef10" - dependencies: - merge "^1.1.3" - -execa@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" - dependencies: - cross-spawn "^5.0.1" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -expand-brackets@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" - dependencies: - is-posix-bracket "^0.1.0" - -expand-range@^1.8.1: - version "1.8.2" - resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" - dependencies: - fill-range "^2.1.0" - -expect@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/expect/-/expect-21.1.0.tgz#1c138ec803c72d28cbd10dfe97104966d967c24a" - dependencies: - ansi-styles "^3.2.0" - jest-diff "^21.1.0" - jest-get-type "^21.0.2" - jest-matcher-utils "^21.1.0" - jest-message-util "^21.1.0" - jest-regex-util "^21.1.0" - -extend@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.0.tgz#5a474353b9f3353ddd8176dfd37b91c83a46f1d4" - -extend@~3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444" - -extglob@^0.3.1: - version "0.3.2" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1" - dependencies: - is-extglob "^1.0.0" - -extsprintf@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.0.2.tgz#e1080e0658e300b06294990cc70e1502235fd550" - -fast-deep-equal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz#96256a3bc975595eb36d82e9929d060d893439ff" - -fast-levenshtein@~2.0.4: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - -fb-watchman@^1.8.0: - version "1.9.2" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-1.9.2.tgz#a24cf47827f82d38fb59a69ad70b76e3b6ae7383" - dependencies: - bser "1.0.2" - -fb-watchman@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.0.tgz#54e9abf7dfa2f26cd9b1636c588c1afc05de5d58" - dependencies: - bser "^2.0.0" - -filename-regex@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.0.tgz#996e3e80479b98b9897f15a8a58b3d084e926775" - -fileset@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/fileset/-/fileset-2.0.3.tgz#8e7548a96d3cc2327ee5e674168723a333bba2a0" - dependencies: - glob "^7.0.3" - minimatch "^3.0.3" - -fill-range@^2.1.0: - version "2.2.3" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.3.tgz#50b77dfd7e469bc7492470963699fe7a8485a723" - dependencies: - is-number "^2.1.0" - isobject "^2.0.0" - randomatic "^1.1.3" - repeat-element "^1.1.2" - repeat-string "^1.5.2" - -find-up@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" - dependencies: - path-exists "^2.0.0" - pinkie-promise "^2.0.0" - -find-up@^2.0.0, find-up@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" - dependencies: - locate-path "^2.0.0" - -flow-parser@~0.21.0: - version "0.21.0" - resolved "https://registry.yarnpkg.com/flow-parser/-/flow-parser-0.21.0.tgz#80b575c7e67c2c6e483ed3aade98a190e2ef7ae9" - dependencies: - ast-types "0.8.14" - colors ">=0.6.2" - minimist ">=0.2.0" - -for-each@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.2.tgz#2c40450b9348e97f281322593ba96704b9abd4d4" - dependencies: - is-function "~1.0.0" - -for-in@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - -for-own@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce" - dependencies: - for-in "^1.0.1" - -foreach@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" - -forever-agent@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" - -form-data@~2.1.1: - version "2.1.4" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.4.tgz#33c183acf193276ecaa98143a69e94bfee1750d1" - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.5" - mime-types "^2.1.12" - -form-data@~2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.1.tgz#6fb94fbd71885306d73d15cc497fe4cc4ecd44bf" - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.5" - mime-types "^2.1.12" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - -fsevents@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.2.tgz#3282b713fb3ad80ede0e9fcf4611b5aa6fc033f4" - dependencies: - nan "^2.3.0" - node-pre-gyp "^0.6.36" - -fstream-ignore@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105" - dependencies: - fstream "^1.0.0" - inherits "2" - minimatch "^3.0.0" - -fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2: - version "1.0.11" - resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.11.tgz#5c1fb1f117477114f0632a0eb4b71b3cb0fd3171" - dependencies: - graceful-fs "^4.1.2" - inherits "~2.0.0" - mkdirp ">=0.5 0" - rimraf "2" - -function-bind@^1.0.2, function-bind@^1.1.1, function-bind@~1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - -gauge@~2.7.3: - version "2.7.4" - resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" - dependencies: - aproba "^1.0.3" - console-control-strings "^1.0.0" - has-unicode "^2.0.0" - object-assign "^4.1.0" - signal-exit "^3.0.0" - string-width "^1.0.1" - strip-ansi "^3.0.1" - wide-align "^1.1.0" - -get-caller-file@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5" - -get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" - -getpass@^0.1.1: - version "0.1.6" - resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.6.tgz#283ffd9fc1256840875311c1b60e8c40187110e6" - dependencies: - assert-plus "^1.0.0" - -glob-base@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" - dependencies: - glob-parent "^2.0.0" - is-glob "^2.0.0" - -glob-parent@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" - dependencies: - is-glob "^2.0.0" - -glob@^7.0.3, glob@^7.0.5: - version "7.1.1" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.2" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@^7.1.1, glob@^7.1.2, glob@~7.1.2: - version "7.1.2" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -globals@^9.18.0: - version "9.18.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" - -graceful-fs@^4.1.11, graceful-fs@^4.1.2: - version "4.1.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" - -growly@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" - -handlebars@^4.0.3: - version "4.0.10" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.10.tgz#3d30c718b09a3d96f23ea4cc1f403c4d3ba9ff4f" - dependencies: - async "^1.4.0" - optimist "^0.6.1" - source-map "^0.4.4" - optionalDependencies: - uglify-js "^2.6" - -har-schema@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e" - -har-schema@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" - -har-validator@~4.2.1: - version "4.2.1" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a" - dependencies: - ajv "^4.9.1" - har-schema "^1.0.5" - -har-validator@~5.0.3: - version "5.0.3" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.0.3.tgz#ba402c266194f15956ef15e0fcf242993f6a7dfd" - dependencies: - ajv "^5.1.0" - har-schema "^2.0.0" - -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - dependencies: - ansi-regex "^2.0.0" - -has-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" - -has-flag@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" - -has-unicode@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" - -has@^1.0.1, has@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28" - dependencies: - function-bind "^1.0.2" - -hawk@~3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4" - dependencies: - boom "2.x.x" - cryptiles "2.x.x" - hoek "2.x.x" - sntp "1.x.x" - -hawk@~6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/hawk/-/hawk-6.0.2.tgz#af4d914eb065f9b5ce4d9d11c1cb2126eecc3038" - dependencies: - boom "4.x.x" - cryptiles "3.x.x" - hoek "4.x.x" - sntp "2.x.x" - -hoek@2.x.x: - version "2.16.3" - resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed" - -hoek@4.x.x: - version "4.2.0" - resolved "https://registry.yarnpkg.com/hoek/-/hoek-4.2.0.tgz#72d9d0754f7fe25ca2d01ad8f8f9a9449a89526d" - -home-or-tmp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8" - dependencies: - os-homedir "^1.0.0" - os-tmpdir "^1.0.1" - -hosted-git-info@^2.1.4: - version "2.5.0" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c" - -html-encoding-sniffer@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-1.0.1.tgz#79bf7a785ea495fe66165e734153f363ff5437da" - dependencies: - whatwg-encoding "^1.0.1" - -http-signature@~1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.1.1.tgz#df72e267066cd0ac67fb76adf8e134a8fbcf91bf" - dependencies: - assert-plus "^0.2.0" - jsprim "^1.2.2" - sshpk "^1.7.0" - -http-signature@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" - dependencies: - assert-plus "^1.0.0" - jsprim "^1.2.2" - sshpk "^1.7.0" - -iconv-lite@0.4.13: - version "0.4.13" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.13.tgz#1f88aba4ab0b1508e8312acc39345f36e992e2f2" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@~2.0.0, inherits@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - -ini@~1.3.0: - version "1.3.4" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e" - -invariant@^2.2.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.2.tgz#9e1f56ac0acdb6bf303306f338be3b204ae60360" - dependencies: - loose-envify "^1.0.0" - -invert-kv@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - -is-buffer@^1.0.2: - version "1.1.5" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.5.tgz#1f3b26ef613b214b88cbca23cc6c01d87961eecc" - -is-builtin-module@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe" - dependencies: - builtin-modules "^1.0.0" - -is-callable@^1.1.1, is-callable@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.3.tgz#86eb75392805ddc33af71c92a0eedf74ee7604b2" - -is-ci@^1.0.10: - version "1.0.10" - resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.0.10.tgz#f739336b2632365061a9d48270cd56ae3369318e" - dependencies: - ci-info "^1.0.0" - -is-date-object@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" - -is-dotfile@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.2.tgz#2c132383f39199f8edc268ca01b9b007d205cc4d" - -is-equal-shallow@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534" - dependencies: - is-primitive "^2.0.0" - -is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - -is-extglob@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" - -is-finite@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - -is-function@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-function/-/is-function-1.0.1.tgz#12cfb98b65b57dd3d193a3121f5f6e2f437602b5" - -is-glob@^2.0.0, is-glob@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" - dependencies: - is-extglob "^1.0.0" - -is-number@^2.0.2, is-number@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" - dependencies: - kind-of "^3.0.2" - -is-posix-bracket@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" - -is-primitive@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" - -is-regex@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" - dependencies: - has "^1.0.1" - -is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - -is-symbol@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.1.tgz#3cc59f00025194b6ab2e38dbae6689256b660572" - -is-typedarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - -is-utf8@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" - -isarray@1.0.0, isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - dependencies: - isarray "1.0.0" - -isstream@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" - -istanbul-api@^1.1.1: - version "1.1.14" - resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.1.14.tgz#25bc5701f7c680c0ffff913de46e3619a3a6e680" - dependencies: - async "^2.1.4" - fileset "^2.0.2" - istanbul-lib-coverage "^1.1.1" - istanbul-lib-hook "^1.0.7" - istanbul-lib-instrument "^1.8.0" - istanbul-lib-report "^1.1.1" - istanbul-lib-source-maps "^1.2.1" - istanbul-reports "^1.1.2" - js-yaml "^3.7.0" - mkdirp "^0.5.1" - once "^1.4.0" - -istanbul-lib-coverage@^1.0.1, istanbul-lib-coverage@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.1.1.tgz#73bfb998885299415c93d38a3e9adf784a77a9da" - -istanbul-lib-hook@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.0.7.tgz#dd6607f03076578fe7d6f2a630cf143b49bacddc" - dependencies: - append-transform "^0.4.0" - -istanbul-lib-instrument@^1.4.2, istanbul-lib-instrument@^1.7.5, istanbul-lib-instrument@^1.8.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.8.0.tgz#66f6c9421cc9ec4704f76f2db084ba9078a2b532" - dependencies: - babel-generator "^6.18.0" - babel-template "^6.16.0" - babel-traverse "^6.18.0" - babel-types "^6.18.0" - babylon "^6.18.0" - istanbul-lib-coverage "^1.1.1" - semver "^5.3.0" - -istanbul-lib-report@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.1.tgz#f0e55f56655ffa34222080b7a0cd4760e1405fc9" - dependencies: - istanbul-lib-coverage "^1.1.1" - mkdirp "^0.5.1" - path-parse "^1.0.5" - supports-color "^3.1.2" - -istanbul-lib-source-maps@^1.1.0, istanbul-lib-source-maps@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.1.tgz#a6fe1acba8ce08eebc638e572e294d267008aa0c" - dependencies: - debug "^2.6.3" - istanbul-lib-coverage "^1.1.1" - mkdirp "^0.5.1" - rimraf "^2.6.1" - source-map "^0.5.3" - -istanbul-reports@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.1.2.tgz#0fb2e3f6aa9922bd3ce45d05d8ab4d5e8e07bd4f" - dependencies: - handlebars "^4.0.3" - -jest-changed-files@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-21.1.0.tgz#e70f6b33b75d5987f4eae07e35bea5525635f92a" - dependencies: - throat "^4.0.0" - -jest-cli@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-21.1.0.tgz#4f671885ea3521803c96a1fd95baaa6a1ba8d70f" - dependencies: - ansi-escapes "^3.0.0" - chalk "^2.0.1" - glob "^7.1.2" - graceful-fs "^4.1.11" - is-ci "^1.0.10" - istanbul-api "^1.1.1" - istanbul-lib-coverage "^1.0.1" - istanbul-lib-instrument "^1.4.2" - istanbul-lib-source-maps "^1.1.0" - jest-changed-files "^21.1.0" - jest-config "^21.1.0" - jest-environment-jsdom "^21.1.0" - jest-haste-map "^21.1.0" - jest-message-util "^21.1.0" - jest-regex-util "^21.1.0" - jest-resolve-dependencies "^21.1.0" - jest-runner "^21.1.0" - jest-runtime "^21.1.0" - jest-snapshot "^21.1.0" - jest-util "^21.1.0" - micromatch "^2.3.11" - node-notifier "^5.0.2" - pify "^3.0.0" - slash "^1.0.0" - string-length "^2.0.0" - strip-ansi "^4.0.0" - which "^1.2.12" - worker-farm "^1.3.1" - yargs "^9.0.0" - -jest-config@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-21.1.0.tgz#7ef8778af679de30dad75e355a0dfbb0330b8d2f" - dependencies: - chalk "^2.0.1" - glob "^7.1.1" - jest-environment-jsdom "^21.1.0" - jest-environment-node "^21.1.0" - jest-get-type "^21.0.2" - jest-jasmine2 "^21.1.0" - jest-regex-util "^21.1.0" - jest-resolve "^21.1.0" - jest-util "^21.1.0" - jest-validate "^21.1.0" - pretty-format "^21.1.0" - -jest-diff@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-21.1.0.tgz#ca4c9d40272a6901dcde6c4c0bb2f568c363cc42" - dependencies: - chalk "^2.0.1" - diff "^3.2.0" - jest-get-type "^21.0.2" - pretty-format "^21.1.0" - -jest-docblock@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-21.1.0.tgz#43154be2441fb91403e36bb35cb791a5017cea81" - -jest-environment-jsdom@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-21.1.0.tgz#40729a60cd4544625f7d3a33c32bdaad63e57db7" - dependencies: - jest-mock "^21.1.0" - jest-util "^21.1.0" - jsdom "^9.12.0" - -jest-environment-node@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-21.1.0.tgz#a11fd611e8ae6c3e02b785aa1b12a3009f4fd0f1" - dependencies: - jest-mock "^21.1.0" - jest-util "^21.1.0" - -jest-get-type@^21.0.2: - version "21.0.2" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-21.0.2.tgz#304e6b816dd33cd1f47aba0597bcad258a509fc6" - -jest-haste-map@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-21.1.0.tgz#08e7a8c584008d4b790b8dddf7dd3e3db03b75d3" - dependencies: - fb-watchman "^2.0.0" - graceful-fs "^4.1.11" - jest-docblock "^21.1.0" - micromatch "^2.3.11" - sane "^2.0.0" - worker-farm "^1.3.1" - -jest-jasmine2@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-21.1.0.tgz#975c3cd3ecd9d50d385bfe3c680dd61979f50c9c" - dependencies: - chalk "^2.0.1" - expect "^21.1.0" - graceful-fs "^4.1.11" - jest-diff "^21.1.0" - jest-matcher-utils "^21.1.0" - jest-message-util "^21.1.0" - jest-snapshot "^21.1.0" - p-cancelable "^0.3.0" - -jest-matcher-utils@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-21.1.0.tgz#b02e237b287c58915ce9a5bf3c7138dba95125a7" - dependencies: - chalk "^2.0.1" - jest-get-type "^21.0.2" - pretty-format "^21.1.0" - -jest-message-util@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-21.1.0.tgz#7f9a52535d1a640af0d4c800edde737e14ea0526" - dependencies: - chalk "^2.0.1" - micromatch "^2.3.11" - slash "^1.0.0" - -jest-mock@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-21.1.0.tgz#c4dddfa893a0b120b72b5ae87c7506745213a790" - -jest-regex-util@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-21.1.0.tgz#59e4bad74f5ffd62a3835225f9bc1ee3796b5adb" - -jest-resolve-dependencies@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-21.1.0.tgz#9f78852e65d864d04ad0919ac8226b3f1434e7b0" - dependencies: - jest-regex-util "^21.1.0" - -jest-resolve@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-21.1.0.tgz#6bb806ca5ad876c250044fe62f298321d2da5c06" - dependencies: - browser-resolve "^1.11.2" - chalk "^2.0.1" - is-builtin-module "^1.0.0" - -jest-runner@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-21.1.0.tgz#d7ea7e2fa10ed673d4dd25ba2f3faae2efb89a07" - dependencies: - jest-config "^21.1.0" - jest-docblock "^21.1.0" - jest-haste-map "^21.1.0" - jest-jasmine2 "^21.1.0" - jest-message-util "^21.1.0" - jest-runtime "^21.1.0" - jest-util "^21.1.0" - pify "^3.0.0" - throat "^4.0.0" - worker-farm "^1.3.1" - -jest-runtime@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-21.1.0.tgz#c9a180a9e06ef046d0ad157dea52355abb7cbad4" - dependencies: - babel-core "^6.0.0" - babel-jest "^21.0.2" - babel-plugin-istanbul "^4.0.0" - chalk "^2.0.1" - convert-source-map "^1.4.0" - graceful-fs "^4.1.11" - jest-config "^21.1.0" - jest-haste-map "^21.1.0" - jest-regex-util "^21.1.0" - jest-resolve "^21.1.0" - jest-util "^21.1.0" - json-stable-stringify "^1.0.1" - micromatch "^2.3.11" - slash "^1.0.0" - strip-bom "3.0.0" - write-file-atomic "^2.1.0" - yargs "^9.0.0" - -jest-snapshot@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-21.1.0.tgz#a5fa9d52847d8f52e19a1df6ccae9de699193ccc" - dependencies: - chalk "^2.0.1" - jest-diff "^21.1.0" - jest-matcher-utils "^21.1.0" - mkdirp "^0.5.1" - natural-compare "^1.4.0" - pretty-format "^21.1.0" - -jest-util@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-21.1.0.tgz#f92ff756422cc0609ddf5a9bfa4d34b2835d8c30" - dependencies: - callsites "^2.0.0" - chalk "^2.0.1" - graceful-fs "^4.1.11" - jest-message-util "^21.1.0" - jest-mock "^21.1.0" - jest-validate "^21.1.0" - mkdirp "^0.5.1" - -jest-validate@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-21.1.0.tgz#39d01115544a758bce49f221a5fcbb24ebdecc65" - dependencies: - chalk "^2.0.1" - jest-get-type "^21.0.2" - leven "^2.1.0" - pretty-format "^21.1.0" - -jest@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/jest/-/jest-21.1.0.tgz#77c7baa8aa9e8bace7fe41a30d748ab56e89476a" - dependencies: - jest-cli "^21.1.0" - -jodid25519@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/jodid25519/-/jodid25519-1.0.2.tgz#06d4912255093419477d425633606e0e90782967" - dependencies: - jsbn "~0.1.0" - -js-tokens@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.1.tgz#08e9f132484a2c45a30907e9dc4d5567b7f114d7" - -js-tokens@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" - -js-yaml@^3.7.0: - version "3.10.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.10.0.tgz#2e78441646bd4682e963f22b6e92823c309c62dc" - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -jsbn@~0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" - -jsdom@^9.12.0: - version "9.12.0" - resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-9.12.0.tgz#e8c546fffcb06c00d4833ca84410fed7f8a097d4" - dependencies: - abab "^1.0.3" - acorn "^4.0.4" - acorn-globals "^3.1.0" - array-equal "^1.0.0" - content-type-parser "^1.0.1" - cssom ">= 0.3.2 < 0.4.0" - cssstyle ">= 0.2.37 < 0.3.0" - escodegen "^1.6.1" - html-encoding-sniffer "^1.0.1" - nwmatcher ">= 1.3.9 < 2.0.0" - parse5 "^1.5.1" - request "^2.79.0" - sax "^1.2.1" - symbol-tree "^3.2.1" - tough-cookie "^2.3.2" - webidl-conversions "^4.0.0" - whatwg-encoding "^1.0.1" - whatwg-url "^4.3.0" - xml-name-validator "^2.0.1" - -jsesc@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" - -jsesc@~0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" - -json-schema-traverse@^0.3.0: - version "0.3.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" - -json-schema@0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" - -json-stable-stringify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" - dependencies: - jsonify "~0.0.0" - -json-stringify-safe@~5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" - -json5@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" - -jsonify@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" - -jsprim@^1.2.2: - version "1.4.0" - resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.0.tgz#a3b87e40298d8c380552d8cc7628a0bb95a22918" - dependencies: - assert-plus "1.0.0" - extsprintf "1.0.2" - json-schema "0.2.3" - verror "1.3.6" - -kind-of@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.1.0.tgz#475d698a5e49ff5e53d14e3e732429dc8bf4cf47" - dependencies: - is-buffer "^1.0.2" - -lazy-cache@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" - -lcid@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" - dependencies: - invert-kv "^1.0.0" - -leven@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-2.1.0.tgz#c2e7a9f772094dee9d34202ae8acce4687875580" - -levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - -load-json-file@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" - dependencies: - graceful-fs "^4.1.2" - parse-json "^2.2.0" - pify "^2.0.0" - pinkie-promise "^2.0.0" - strip-bom "^2.0.0" - -load-json-file@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" - dependencies: - graceful-fs "^4.1.2" - parse-json "^2.2.0" - pify "^2.0.0" - strip-bom "^3.0.0" - -locate-path@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" - dependencies: - p-locate "^2.0.0" - path-exists "^3.0.0" - -lodash@^4.14.0, lodash@^4.17.4, lodash@^4.2.0: - version "4.17.4" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" - -longest@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" - -loose-envify@^1.0.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.3.1.tgz#d1a8ad33fa9ce0e713d65fdd0ac8b748d478c848" - dependencies: - js-tokens "^3.0.0" - -lru-cache@^4.0.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55" - dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" - -makeerror@1.0.x: - version "1.0.11" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c" - dependencies: - tmpl "1.0.x" - -mem@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76" - dependencies: - mimic-fn "^1.0.0" - -merge@^1.1.3: - version "1.2.0" - resolved "https://registry.yarnpkg.com/merge/-/merge-1.2.0.tgz#7531e39d4949c281a66b8c5a6e0265e8b05894da" - -micromatch@^2.1.5, micromatch@^2.3.11: - version "2.3.11" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" - dependencies: - arr-diff "^2.0.0" - array-unique "^0.2.1" - braces "^1.8.2" - expand-brackets "^0.1.4" - extglob "^0.3.1" - filename-regex "^2.0.0" - is-extglob "^1.0.0" - is-glob "^2.0.1" - kind-of "^3.0.2" - normalize-path "^2.0.1" - object.omit "^2.0.0" - parse-glob "^3.0.4" - regex-cache "^0.4.2" - -mime-db@~1.27.0: - version "1.27.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.27.0.tgz#820f572296bbd20ec25ed55e5b5de869e5436eb1" - -mime-db@~1.30.0: - version "1.30.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.30.0.tgz#74c643da2dd9d6a45399963465b26d5ca7d71f01" - -mime-types@^2.1.12, mime-types@~2.1.7: - version "2.1.15" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.15.tgz#a4ebf5064094569237b8cf70046776d09fc92aed" - dependencies: - mime-db "~1.27.0" - -mime-types@~2.1.17: - version "2.1.17" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.17.tgz#09d7a393f03e995a79f8af857b70a9e0ab16557a" - dependencies: - mime-db "~1.30.0" - -mime@^1.3.4: - version "1.3.4" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.4.tgz#115f9e3b6b3daf2959983cb38f149a2d40eb5d53" - -mimic-fn@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.1.0.tgz#e667783d92e89dbd342818b5230b9d62a672ad18" - -minimatch@^3.0.0, minimatch@^3.0.3, minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - dependencies: - brace-expansion "^1.1.7" - -minimatch@^3.0.2: - version "3.0.3" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.3.tgz#2a4e4090b96b2db06a9d7df01055a62a77c9b774" - dependencies: - brace-expansion "^1.0.0" - -minimist@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" - -minimist@>=0.2.0, minimist@^1.1.1, minimist@^1.2.0, minimist@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" - -minimist@~0.0.1: - version "0.0.10" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" - -"mkdirp@>=0.5 0", mkdirp@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" - dependencies: - minimist "0.0.8" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - -nan@^2.3.0: - version "2.7.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.7.0.tgz#d95bf721ec877e08db276ed3fc6eb78f9083ad46" - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - -ncp@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ncp/-/ncp-2.0.0.tgz#195a21d6c46e361d2fb1281ba38b91e9df7bdbb3" - -node-int64@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" - -node-notifier@^5.0.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-5.1.2.tgz#2fa9e12605fa10009d44549d6fcd8a63dde0e4ff" - dependencies: - growly "^1.3.0" - semver "^5.3.0" - shellwords "^0.1.0" - which "^1.2.12" - -node-pre-gyp@^0.6.36: - version "0.6.37" - resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.37.tgz#3c872b236b2e266e4140578fe1ee88f693323a05" - dependencies: - mkdirp "^0.5.1" - nopt "^4.0.1" - npmlog "^4.0.2" - rc "^1.1.7" - request "^2.81.0" - rimraf "^2.6.1" - semver "^5.3.0" - tape "^4.6.3" - tar "^2.2.1" - tar-pack "^3.4.0" - -nopt@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" - dependencies: - abbrev "1" - osenv "^0.1.4" - -normalize-package-data@^2.3.2: - version "2.4.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f" - dependencies: - hosted-git-info "^2.1.4" - is-builtin-module "^1.0.0" - semver "2 || 3 || 4 || 5" - validate-npm-package-license "^3.0.1" - -normalize-path@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" - dependencies: - remove-trailing-separator "^1.0.1" - -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - dependencies: - path-key "^2.0.0" - -npmlog@^4.0.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" - dependencies: - are-we-there-yet "~1.1.2" - console-control-strings "~1.1.0" - gauge "~2.7.3" - set-blocking "~2.0.0" - -number-is-nan@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" - -"nwmatcher@>= 1.3.9 < 2.0.0": - version "1.4.1" - resolved "https://registry.yarnpkg.com/nwmatcher/-/nwmatcher-1.4.1.tgz#7ae9b07b0ea804db7e25f05cb5fe4097d4e4949f" - -oauth-sign@~0.8.1, oauth-sign@~0.8.2: - version "0.8.2" - resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" - -object-assign@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - -object-inspect@~1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.3.0.tgz#5b1eb8e6742e2ee83342a637034d844928ba2f6d" - -object-keys@^1.0.8: - version "1.0.11" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d" - -object.omit@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" - dependencies: - for-own "^0.1.4" - is-extendable "^0.1.1" - -once@^1.3.0, once@^1.3.3, once@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - dependencies: - wrappy "1" - -optimist@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" - dependencies: - minimist "~0.0.1" - wordwrap "~0.0.2" - -optionator@^0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.4" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - wordwrap "~1.0.0" - -os-homedir@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" - -os-locale@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2" - dependencies: - execa "^0.7.0" - lcid "^1.0.0" - mem "^1.1.0" - -os-tmpdir@^1.0.0, os-tmpdir@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - -osenv@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.4.tgz#42fe6d5953df06c8064be6f176c3d05aaaa34644" - dependencies: - os-homedir "^1.0.0" - os-tmpdir "^1.0.0" - -p-cancelable@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.3.0.tgz#b9e123800bcebb7ac13a479be195b507b98d30fa" - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - -p-limit@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.1.0.tgz#b07ff2d9a5d88bec806035895a2bab66a27988bc" - -p-locate@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" - dependencies: - p-limit "^1.1.0" - -parse-glob@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" - dependencies: - glob-base "^0.3.0" - is-dotfile "^1.0.0" - is-extglob "^1.0.0" - is-glob "^2.0.0" - -parse-json@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" - dependencies: - error-ex "^1.2.0" - -parse5@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-1.5.1.tgz#9b7f3b0de32be78dc2401b17573ccaf0f6f59d94" - -path-exists@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" - dependencies: - pinkie-promise "^2.0.0" - -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - -path-is-absolute@^1.0.0, path-is-absolute@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - -path-key@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - -path-parse@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" - -path-type@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" - dependencies: - graceful-fs "^4.1.2" - pify "^2.0.0" - pinkie-promise "^2.0.0" - -path-type@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" - dependencies: - pify "^2.0.0" - -performance-now@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5" - -performance-now@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" - -pify@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - -preserve@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" - -pretty-format@^21.1.0: - version "21.1.0" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-21.1.0.tgz#557428254323832ee8b7c971cb613442bea67f61" - dependencies: - ansi-regex "^3.0.0" - ansi-styles "^3.2.0" - -private@^0.1.6, private@^0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/private/-/private-0.1.7.tgz#68ce5e8a1ef0a23bb570cc28537b5332aba63ef1" - -process-nextick-args@~1.0.6: - version "1.0.7" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3" - -prr@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a" - -pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - -punycode@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" - -qs@~6.4.0: - version "6.4.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233" - -qs@~6.5.1: - version "6.5.1" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8" - -randomatic@^1.1.3: - version "1.1.6" - resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.6.tgz#110dcabff397e9dcff7c0789ccc0a49adf1ec5bb" - dependencies: - is-number "^2.0.2" - kind-of "^3.0.2" - -rc@^1.1.7: - version "1.2.1" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.1.tgz#2e03e8e42ee450b8cb3dce65be1bf8974e1dfd95" - dependencies: - deep-extend "~0.4.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - -read-pkg-up@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" - dependencies: - find-up "^1.0.0" - read-pkg "^1.0.0" - -read-pkg-up@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" - dependencies: - find-up "^2.0.0" - read-pkg "^2.0.0" - -read-pkg@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" - dependencies: - load-json-file "^1.0.0" - normalize-package-data "^2.3.2" - path-type "^1.0.0" - -read-pkg@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" - dependencies: - load-json-file "^2.0.0" - normalize-package-data "^2.3.2" - path-type "^2.0.0" - -readable-stream@^2.0.6, readable-stream@^2.1.4: - version "2.3.3" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c" - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~1.0.6" - safe-buffer "~5.1.1" - string_decoder "~1.0.3" - util-deprecate "~1.0.1" - -regenerate@^1.2.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.3.2.tgz#d1941c67bad437e1be76433add5b385f95b19260" - -regenerator-runtime@^0.10.0: - version "0.10.3" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.10.3.tgz#8c4367a904b51ea62a908ac310bf99ff90a82a3e" - -regenerator-runtime@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.0.tgz#7e54fe5b5ccd5d6624ea6255c3473be090b802e1" - -regenerator-transform@0.9.11: - version "0.9.11" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.9.11.tgz#3a7d067520cb7b7176769eb5ff868691befe1283" - dependencies: - babel-runtime "^6.18.0" - babel-types "^6.19.0" - private "^0.1.6" - -regex-cache@^0.4.2: - version "0.4.3" - resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.3.tgz#9b1a6c35d4d0dfcef5711ae651e8e9d3d7114145" - dependencies: - is-equal-shallow "^0.1.3" - is-primitive "^2.0.0" - -regexpu-core@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-2.0.0.tgz#49d038837b8dcf8bfa5b9a42139938e6ea2ae240" - dependencies: - regenerate "^1.2.1" - regjsgen "^0.2.0" - regjsparser "^0.1.4" - -regjsgen@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.2.0.tgz#6c016adeac554f75823fe37ac05b92d5a4edb1f7" - -regjsparser@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.1.5.tgz#7ee8f84dc6fa792d3fd0ae228d24bd949ead205c" - dependencies: - jsesc "~0.5.0" - -remove-trailing-separator@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.0.1.tgz#615ebb96af559552d4bf4057c8436d486ab63cc4" - -repeat-element@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" - -repeat-string@^1.5.2: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - -repeating@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" - dependencies: - is-finite "^1.0.0" - -request@^2.68.0: - version "2.81.0" - resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0" - dependencies: - aws-sign2 "~0.6.0" - aws4 "^1.2.1" - caseless "~0.12.0" - combined-stream "~1.0.5" - extend "~3.0.0" - forever-agent "~0.6.1" - form-data "~2.1.1" - har-validator "~4.2.1" - hawk "~3.1.3" - http-signature "~1.1.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.7" - oauth-sign "~0.8.1" - performance-now "^0.2.0" - qs "~6.4.0" - safe-buffer "^5.0.1" - stringstream "~0.0.4" - tough-cookie "~2.3.0" - tunnel-agent "^0.6.0" - uuid "^3.0.0" - -request@^2.79.0, request@^2.81.0: - version "2.82.0" - resolved "https://registry.yarnpkg.com/request/-/request-2.82.0.tgz#2ba8a92cd7ac45660ea2b10a53ae67cd247516ea" - dependencies: - aws-sign2 "~0.7.0" - aws4 "^1.6.0" - caseless "~0.12.0" - combined-stream "~1.0.5" - extend "~3.0.1" - forever-agent "~0.6.1" - form-data "~2.3.1" - har-validator "~5.0.3" - hawk "~6.0.2" - http-signature "~1.2.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.17" - oauth-sign "~0.8.2" - performance-now "^2.1.0" - qs "~6.5.1" - safe-buffer "^5.1.1" - stringstream "~0.0.5" - tough-cookie "~2.3.2" - tunnel-agent "^0.6.0" - uuid "^3.1.0" - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - -require-main-filename@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" - -resolve@1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" - -resolve@^1.1.7: - version "1.3.2" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.3.2.tgz#1f0442c9e0cbb8136e87b9305f932f46c7f28235" - dependencies: - path-parse "^1.0.5" - -resolve@~1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.4.0.tgz#a75be01c53da25d934a98ebd0e4c4a7312f92a86" - dependencies: - path-parse "^1.0.5" - -resumer@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/resumer/-/resumer-0.0.0.tgz#f1e8f461e4064ba39e82af3cdc2a8c893d076759" - dependencies: - through "~2.3.4" - -right-align@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" - dependencies: - align-text "^0.1.1" - -rimraf@2, rimraf@^2.5.1, rimraf@^2.6.1: - version "2.6.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" - dependencies: - glob "^7.0.5" - -rimraf@^2.5.2: - version "2.6.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.1.tgz#c2338ec643df7a1b7fe5c54fa86f57428a55f33d" - dependencies: - glob "^7.0.5" - -safe-buffer@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.0.1.tgz#d263ca54696cd8a306b5ca6551e92de57918fbe7" - -safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" - -sane@^1.4.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/sane/-/sane-1.6.0.tgz#9610c452307a135d29c1fdfe2547034180c46775" - dependencies: - anymatch "^1.3.0" - exec-sh "^0.2.0" - fb-watchman "^1.8.0" - minimatch "^3.0.2" - minimist "^1.1.1" - walker "~1.0.5" - watch "~0.10.0" - -sane@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/sane/-/sane-2.0.0.tgz#99cb79f21f4a53a69d4d0cd957c2db04024b8eb2" - dependencies: - anymatch "^1.3.0" - exec-sh "^0.2.0" - fb-watchman "^2.0.0" - minimatch "^3.0.2" - minimist "^1.1.1" - walker "~1.0.5" - watch "~0.10.0" - optionalDependencies: - fsevents "^1.1.1" - -sax@^1.2.1: - version "1.2.4" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" - -"semver@2 || 3 || 4 || 5", semver@^5.3.0: - version "5.4.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.4.1.tgz#e059c09d8571f0540823733433505d3a2f00b18e" - -set-blocking@^2.0.0, set-blocking@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - dependencies: - shebang-regex "^1.0.0" - -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - -shellwords@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" - -signal-exit@^3.0.0, signal-exit@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" - -slash@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" - -sntp@1.x.x: - version "1.0.9" - resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" - dependencies: - hoek "2.x.x" - -sntp@2.x.x: - version "2.0.2" - resolved "https://registry.yarnpkg.com/sntp/-/sntp-2.0.2.tgz#5064110f0af85f7cfdb7d6b67a40028ce52b4b2b" - dependencies: - hoek "4.x.x" - -source-map-support@^0.4.15: - version "0.4.18" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f" - dependencies: - source-map "^0.5.6" - -source-map-support@~0.4.0: - version "0.4.14" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.14.tgz#9d4463772598b86271b4f523f6c1f4e02a7d6aef" - dependencies: - source-map "^0.5.6" - -source-map@^0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" - dependencies: - amdefine ">=0.0.4" - -source-map@^0.5.3, source-map@~0.5.1, source-map@~0.5.6: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - -source-map@^0.5.6: - version "0.5.6" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" - -spdx-correct@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-1.0.2.tgz#4b3073d933ff51f3912f03ac5519498a4150db40" - dependencies: - spdx-license-ids "^1.0.2" - -spdx-expression-parse@~1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz#9bdf2f20e1f40ed447fbe273266191fced51626c" - -spdx-license-ids@^1.0.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - -sshpk@^1.7.0: - version "1.13.0" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.0.tgz#ff2a3e4fd04497555fed97b39a0fd82fafb3a33c" - dependencies: - asn1 "~0.2.3" - assert-plus "^1.0.0" - dashdash "^1.12.0" - getpass "^0.1.1" - optionalDependencies: - bcrypt-pbkdf "^1.0.0" - ecc-jsbn "~0.1.1" - jodid25519 "^1.0.0" - jsbn "~0.1.0" - tweetnacl "~0.14.0" - -string-length@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-2.0.0.tgz#d40dbb686a3ace960c1cffca562bf2c45f8363ed" - dependencies: - astral-regex "^1.0.0" - strip-ansi "^4.0.0" - -string-width@^1.0.1, string-width@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - strip-ansi "^3.0.0" - -string-width@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^4.0.0" - -string.prototype.trim@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.1.2.tgz#d04de2c89e137f4d7d206f086b5ed2fae6be8cea" - dependencies: - define-properties "^1.1.2" - es-abstract "^1.5.0" - function-bind "^1.0.2" - -string_decoder@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab" - dependencies: - safe-buffer "~5.1.0" - -stringstream@~0.0.4, stringstream@~0.0.5: - version "0.0.5" - resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878" - -strip-ansi@^3.0.0, strip-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - dependencies: - ansi-regex "^2.0.0" - -strip-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" - dependencies: - ansi-regex "^3.0.0" - -strip-bom@3.0.0, strip-bom@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" - -strip-bom@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" - dependencies: - is-utf8 "^0.2.0" - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - -strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - -supports-color@^3.1.2: - version "3.2.3" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" - dependencies: - has-flag "^1.0.0" - -supports-color@^4.0.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.4.0.tgz#883f7ddabc165142b2a61427f3352ded195d1a3e" - dependencies: - has-flag "^2.0.0" - -symbol-tree@^3.2.1: - version "3.2.2" - resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.2.tgz#ae27db38f660a7ae2e1c3b7d1bc290819b8519e6" - -tape@^4.6.3: - version "4.8.0" - resolved "https://registry.yarnpkg.com/tape/-/tape-4.8.0.tgz#f6a9fec41cc50a1de50fa33603ab580991f6068e" - dependencies: - deep-equal "~1.0.1" - defined "~1.0.0" - for-each "~0.3.2" - function-bind "~1.1.0" - glob "~7.1.2" - has "~1.0.1" - inherits "~2.0.3" - minimist "~1.2.0" - object-inspect "~1.3.0" - resolve "~1.4.0" - resumer "~0.0.0" - string.prototype.trim "~1.1.2" - through "~2.3.8" - -tar-pack@^3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.0.tgz#23be2d7f671a8339376cbdb0b8fe3fdebf317984" - dependencies: - debug "^2.2.0" - fstream "^1.0.10" - fstream-ignore "^1.0.5" - once "^1.3.3" - readable-stream "^2.1.4" - rimraf "^2.5.1" - tar "^2.2.1" - uid-number "^0.0.6" - -tar@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1" - dependencies: - block-stream "*" - fstream "^1.0.2" - inherits "2" - -test-exclude@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-4.1.1.tgz#4d84964b0966b0087ecc334a2ce002d3d9341e26" - dependencies: - arrify "^1.0.1" - micromatch "^2.3.11" - object-assign "^4.1.0" - read-pkg-up "^1.0.1" - require-main-filename "^1.0.1" - -throat@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/throat/-/throat-4.1.0.tgz#89037cbc92c56ab18926e6ba4cbb200e15672a6a" - -through@~2.3.4, through@~2.3.8: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - -tmpl@1.0.x: - version "1.0.4" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" - -to-fast-properties@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" - -tough-cookie@^2.3.2, tough-cookie@~2.3.0, tough-cookie@~2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a" - dependencies: - punycode "^1.4.1" - -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - -trim-right@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" - -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - dependencies: - safe-buffer "^5.0.1" - -tweetnacl@^0.14.3, tweetnacl@~0.14.0: - version "0.14.5" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - -twit@^2.1.5: - version "2.2.5" - resolved "https://registry.yarnpkg.com/twit/-/twit-2.2.5.tgz#241480bab71731162d2a87b27450e4aa3bb5be5f" - dependencies: - bluebird "^3.1.5" - mime "^1.3.4" - request "^2.68.0" - -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - dependencies: - prelude-ls "~1.1.2" - -uglify-js@^2.6: - version "2.8.29" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" - dependencies: - source-map "~0.5.1" - yargs "~3.10.0" - optionalDependencies: - uglify-to-browserify "~1.0.0" - -uglify-to-browserify@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" - -uid-number@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" - -util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - -uuid@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1" - -uuid@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04" - -validate-npm-package-license@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz#2804babe712ad3379459acfbe24746ab2c303fbc" - dependencies: - spdx-correct "~1.0.0" - spdx-expression-parse "~1.0.0" - -verror@1.3.6: - version "1.3.6" - resolved "https://registry.yarnpkg.com/verror/-/verror-1.3.6.tgz#cff5df12946d297d2baaefaa2689e25be01c005c" - dependencies: - extsprintf "1.0.2" - -vscode-jsonrpc@^3.3.0: - version "3.3.1" - resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-3.3.1.tgz#b7857be58b97af664a8cdd071c91891d6c7d6a67" - -walker@~1.0.5: - version "1.0.7" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb" - dependencies: - makeerror "1.0.x" - -watch@~0.10.0: - version "0.10.0" - resolved "https://registry.yarnpkg.com/watch/-/watch-0.10.0.tgz#77798b2da0f9910d595f1ace5b0c2258521f21dc" - -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - -webidl-conversions@^4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" - -whatwg-encoding@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.1.tgz#3c6c451a198ee7aec55b1ec61d0920c67801a5f4" - dependencies: - iconv-lite "0.4.13" - -whatwg-url@^4.3.0: - version "4.8.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-4.8.0.tgz#d2981aa9148c1e00a41c5a6131166ab4683bbcc0" - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - -which-module@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" - -which@^1.2.12, which@^1.2.9: - version "1.3.0" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a" - dependencies: - isexe "^2.0.0" - -wide-align@^1.1.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.2.tgz#571e0f1b0604636ebc0dfc21b0339bbe31341710" - dependencies: - string-width "^1.0.2" - -window-size@0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" - -wordwrap@0.0.2: - version "0.0.2" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" - -wordwrap@~0.0.2: - version "0.0.3" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" - -wordwrap@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - -worker-farm@^1.3.1: - version "1.5.0" - resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.5.0.tgz#adfdf0cd40581465ed0a1f648f9735722afd5c8d" - dependencies: - errno "^0.1.4" - xtend "^4.0.1" - -wrap-ansi@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" - dependencies: - string-width "^1.0.1" - strip-ansi "^3.0.1" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - -write-file-atomic@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.3.0.tgz#1ff61575c2e2a4e8e510d6fa4e243cce183999ab" - dependencies: - graceful-fs "^4.1.11" - imurmurhash "^0.1.4" - signal-exit "^3.0.2" - -xml-name-validator@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-2.0.1.tgz#4d8b8f1eccd3419aa362061becef515e1e559635" - -xtend@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" - -y18n@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" - -yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - -yargs-parser@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-7.0.0.tgz#8d0ac42f16ea55debd332caf4c4038b3e3f5dfd9" - dependencies: - camelcase "^4.1.0" - -yargs@^9.0.0: - version "9.0.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-9.0.1.tgz#52acc23feecac34042078ee78c0c007f5085db4c" - dependencies: - camelcase "^4.1.0" - cliui "^3.2.0" - decamelize "^1.1.1" - get-caller-file "^1.0.1" - os-locale "^2.0.0" - read-pkg-up "^2.0.0" - require-directory "^2.1.1" - require-main-filename "^1.0.1" - set-blocking "^2.0.0" - string-width "^2.0.0" - which-module "^2.0.0" - y18n "^3.2.1" - yargs-parser "^7.0.0" - -yargs@~3.10.0: - version "3.10.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" - dependencies: - camelcase "^1.0.2" - cliui "^2.1.0" - decamelize "^1.0.0" - window-size "0.1.0" diff --git a/packages/flow-parser-bin/Makefile b/packages/flow-parser-bin/Makefile index da4921bfda4..47127873142 100644 --- a/packages/flow-parser-bin/Makefile +++ b/packages/flow-parser-bin/Makefile @@ -1,4 +1,4 @@ -# Copyright (c) 2017-present, Facebook, Inc. +# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/packages/flow-parser-bin/index.js b/packages/flow-parser-bin/index.js index 6afca2fa446..b658ef64616 100644 --- a/packages/flow-parser-bin/index.js +++ b/packages/flow-parser-bin/index.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/packages/flow-parser-bin/package.json b/packages/flow-parser-bin/package.json index 8e74bc8f015..fbe6b618cc5 100644 --- a/packages/flow-parser-bin/package.json +++ b/packages/flow-parser-bin/package.json @@ -1,6 +1,6 @@ { "name": "flow-parser-bin", - "version": "0.82.0", + "version": "0.108.0", "description": "The Flow JavaScript parser, via bindings to the native OCaml implementation", "main": "index.js", "repository": "https://github.com/facebook/flow.git", diff --git a/packages/flow-parser-bin/src/flow_parser_node.cc b/packages/flow-parser-bin/src/flow_parser_node.cc index facf7a453c8..b794c4bb77c 100644 --- a/packages/flow-parser-bin/src/flow_parser_node.cc +++ b/packages/flow-parser-bin/src/flow_parser_node.cc @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/packages/flow-parser/Makefile b/packages/flow-parser/Makefile index 560a382417a..3b0f8bbb0b9 100644 --- a/packages/flow-parser/Makefile +++ b/packages/flow-parser/Makefile @@ -1,4 +1,4 @@ -# Copyright (c) 2013-present, Facebook, Inc. +# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/packages/flow-parser/package.json b/packages/flow-parser/package.json index 999bce77f83..43526f75165 100644 --- a/packages/flow-parser/package.json +++ b/packages/flow-parser/package.json @@ -1,6 +1,6 @@ { "name": "flow-parser", - "version": "0.82.0", + "version": "0.108.0", "description": "JavaScript parser written in OCaml. Produces ESTree AST", "homepage": "https://flow.org", "license": "MIT", diff --git a/packages/flow-parser/test/custom_ast_types.js b/packages/flow-parser/test/custom_ast_types.js index e04aa894a0d..7a007197ba8 100644 --- a/packages/flow-parser/test/custom_ast_types.js +++ b/packages/flow-parser/test/custom_ast_types.js @@ -64,7 +64,7 @@ def("DeclareOpaqueType") def("PrivateName") .bases("Expression") - .field("name", def("Identifier")) + .field("id", def("Identifier")) def("ClassPrivateProperty") .bases("ClassProperty") @@ -96,6 +96,7 @@ def("ExportDefaultDeclaration") def("FunctionDeclaration"), // TODO: should be NullableFunctionDeclaration def("VariableDeclaration"), def("InterfaceDeclaration"), + def("EnumDeclaration"), def("TypeAlias"), def("OpaqueType"), def("Expression"))) @@ -178,6 +179,7 @@ def("Function") def("ObjectTypeAnnotation") .field("exact", Boolean) + .field("inexact", or(Boolean, void 0), defaults["undefined"]) .field("properties", [or( def("ObjectTypeProperty"), def("ObjectTypeSpreadProperty"))]) @@ -267,3 +269,64 @@ def("CatchClause") .field("param", or(def("Pattern"), null), defaults["null"]) .field("guard", or(def("Expression"), null), defaults["null"]) .field("body", def("BlockStatement")); + +def("BigIntLiteral") + .bases("Literal") + .build("value", "bigint") + .field("value", or(def("BigInt"), null)) + .field("bigint", String); + +def("BigIntLiteralTypeAnnotation") + .bases("Type") + .build("value", "raw") + .field("value", or(def("BigInt"), null)) + .field("raw", String); + +// Enums +def("EnumDeclaration") + .bases("Declaration") + .build("id", "body") + .field("id", def("Identifier")) + .field("body", or( + def("EnumBooleanBody"), + def("EnumNumberBody"), + def("EnumStringBody"), + def("EnumSymbolBody"))) + +def("EnumBooleanBody") + .build("members", "explicitType") + .field("members", [def("EnumBooleanMember")]) + .field("explicitType", Boolean) + +def("EnumNumberBody") + .build("members", "explicitType") + .field("members", [def("EnumNumberMember")]) + .field("explicitType", Boolean) + +def("EnumStringBody") + .build("members", "explicitType") + .field("members", [or(def("EnumStringMember"), def("EnumDefaultedMember"))]) + .field("explicitType", Boolean) + +def("EnumSymbolBody") + .build("members") + .field("members", [def("EnumDefaultedMember")]) + +def("EnumBooleanMember") + .build("id", "init") + .field("id", def("Identifier")) + .field("init", Boolean) + +def("EnumNumberMember") + .build("id", "init") + .field("id", def("Identifier")) + .field("init", def("Literal")) + +def("EnumStringMember") + .build("id", "init") + .field("id", def("Identifier")) + .field("init", def("Literal")) + +def("EnumDefaultedMember") + .build("id") + .field("id", def("Identifier")) diff --git a/packages/flow-parser/test/esprima_test_runner.js b/packages/flow-parser/test/esprima_test_runner.js index 5d78404c71c..670dd9624b9 100644 --- a/packages/flow-parser/test/esprima_test_runner.js +++ b/packages/flow-parser/test/esprima_test_runner.js @@ -199,6 +199,7 @@ function handleSpecialObjectCompare(esprima, flow, env) { break; case 'ObjectTypeAnnotation': esprima.exact = esprima.exact || false; + delete flow.inexact; esprima.internalSlots = esprima.internalSlots || []; break; case 'ObjectTypeProperty': diff --git a/packages/flow-parser/test/hardcoded_test_runner.js b/packages/flow-parser/test/hardcoded_test_runner.js index de20f09b047..23a5ef5b3e6 100644 --- a/packages/flow-parser/test/hardcoded_test_runner.js +++ b/packages/flow-parser/test/hardcoded_test_runner.js @@ -98,7 +98,7 @@ function check_ast(env, ast) { } } -function compare(env, ast, spec) { +function compare(env, ast, spec, skip_comments) { if (Array.isArray(spec)) { if (Array.isArray(ast)) { if (spec.length != ast.length) { @@ -108,7 +108,7 @@ function compare(env, ast, spec) { if (spec.hasOwnProperty(i)) { if (ast.hasOwnProperty(i)) { env.push_path(i); - compare(env, ast[i], spec[i]); + compare(env, ast[i], spec[i], skip_comments); env.pop_path(); } } @@ -118,23 +118,29 @@ function compare(env, ast, spec) { } } else if (spec != null && typeof spec == "object") { for (var prop in spec) { + var is_comments_prop = + prop === "trailingComments" || prop === "leadingComments"; + if (skip_comments && is_comments_prop) { + continue; + } if (spec.hasOwnProperty(prop)) { var path = prop.split("."); var sub_ast = ast; var found = true; var i; for (i = 0; i < path.length; i++) { - if (sub_ast && sub_ast.hasOwnProperty(path[i])) { - sub_ast = sub_ast[path[i]]; + var pathProp = path[i]; + if (sub_ast && sub_ast.hasOwnProperty(pathProp)) { + sub_ast = sub_ast[pathProp]; } else { - env.diff('Missing property "'+path[i]+'"'); + env.diff('Missing property "'+pathProp+'"'); found = false; break; } - env.push_path(path[i]); + env.push_path(pathProp); } if (found) { - compare(env, sub_ast, spec[prop]); + compare(env, sub_ast, spec[prop], skip_comments); } for (; i > 0; i--) { env.pop_path(); @@ -177,7 +183,7 @@ function runTest(test, parse_options, test_options) { // there are no parse errors. check_ast(env, flow_ast); } - compare(env, flow_ast, test.expected_ast); + compare(env, flow_ast, test.expected_ast, true); var diffs = env.get_diffs(); if (test_options.jsonErrors) { diff --git a/packages/flow-remove-types/.gitignore b/packages/flow-remove-types/.gitignore new file mode 100644 index 00000000000..6ace32bff66 --- /dev/null +++ b/packages/flow-remove-types/.gitignore @@ -0,0 +1,4 @@ +.DS_Store +npm-debug.log +node_modules +/npm-alias diff --git a/packages/flow-remove-types/LICENSE b/packages/flow-remove-types/LICENSE new file mode 100644 index 00000000000..b96dcb0480a --- /dev/null +++ b/packages/flow-remove-types/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Facebook, Inc. and its affiliates. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/packages/flow-remove-types/README.md b/packages/flow-remove-types/README.md new file mode 100644 index 00000000000..190e1cf4291 --- /dev/null +++ b/packages/flow-remove-types/README.md @@ -0,0 +1,247 @@ +flow-remove-types +================= + +[![npm](https://img.shields.io/npm/v/flow-remove-types.svg?maxAge=86400)](https://www.npmjs.com/package/flow-remove-types) +[![Build Status](https://img.shields.io/travis/flowtype/flow-remove-types.svg?style=flat&label=travis&branch=master)](https://travis-ci.org/flowtype/flow-remove-types) + +Turn your JavaScript with [Flow](https://flowtype.org/) type annotations into +standard JavaScript in an instant with no configuration and minimal setup. + +[Flow](https://flowtype.org/) provides static type checking to JavaScript which +can both help find and detect bugs long before code is deployed and can make +code easier to read and more self-documenting. The Flow tool itself only reads +and analyzes code. Running code with Flow type annotations requires first +removing the annotations which are non-standard JavaScript. Typically this is +done via adding a plugin to your [Babel](https://babeljs.io/) configuration, +however Babel may be overkill if you're only targeting modern versions of +Node.js or just not using the modern ES2015 features that may not be in +every browser. + +`flow-remove-types` is a faster, simpler, zero-configuration alternative with +minimal dependencies for super-fast `npm install` time. + + +## Get Started! + +Use the command line: + +``` +npm install --global flow-remove-types +``` + +``` +flow-remove-types --help +flow-remove-types input.js > output.js +``` + +Or the JavaScript API: + +``` +npm install flow-remove-types +``` + +```js +var flowRemoveTypes = require('flow-remove-types'); +var fs = require('fs'); + +var input = fs.readFileSync('input.js', 'utf8'); +var output = flowRemoveTypes(input); +fs.writeFileSync('output.js', output.toString()); +``` + +When using the `flow-remove-types` script, be sure [not to direct the output to itself](https://superuser.com/questions/597244/why-does-redirecting-the-output-of-a-file-to-itself-produce-a-blank-file)! + +## Use in Build Systems: + +**Rollup:** [`rollup-plugin-flow`](https://github.com/leebyron/rollup-plugin-flow) + +**Browserify:** [`unflowify`](https://github.com/leebyron/unflowify) + +**Webpack:** [`remove-flow-types-loader`](https://github.com/conorhastings/remove-flow-types-loader) + +**Gulp:** [`gulp-flow-remove-types`](https://github.com/wain-pc/gulp-flow-remove-types) + + +## Use with existing development tools + + * ESLint: [eslint-plugin-flowtype](https://github.com/gajus/eslint-plugin-flowtype) + * Mocha: `mocha -r flow-remove-types/register` + + +## Use `flow-node` + +Wherever you use `node` you can substitute `flow-node` and have a super fast +flow-types aware evaluator or REPL. + +``` +$ flow-node +> var x: number = 42 +undefined +> x +42 +``` + +> Note: This package is also available under the *alias* `flow-node` since it's +> often looked for at that location due to the popularity of this script. Both +> scripts are available no matter which package you install. + +## Use the require hook + +Using the require hook allows you to automatically compile files on the fly when +requiring in node, useful during development: + +```js +require('flow-remove-types/register') +require('./some-module-with-flow-type-syntax') +``` + +You can also provide options to the require hook: + +```js +// Transforms all files, not just those with a "@flow" comment. +require('flow-remove-types/register')({ all: true }) +``` + +Use options to define exactly which files to `includes` or `excludes` with regular +expressions. All files are included by default except those found in the +`node_modules` folder, which is excluded by default. + +```js +require('flow-remove-types/register')({ includes: /\/custom_path\// }) +``` + +> #### Don't use the require hook in packages distributed on NPM +> As always, don't forget to use `flow-remove-types` to compile files before distributing +> your code on npm, as using the require hook affects the whole runtime and not +> just your module and may hurt the runtime performance of code that includes it. + + +## Dead-Simple Transforms + +When `flow-remove-types` removes Flow types, it replaces them with whitespace. +This ensures that the transformed output has exactly the same number of lines +and characters and that all character offsets remain the same. This removes the +need for sourcemaps, maintains legible output, and ensures that it is super easy +to include `flow-remove-types` at any point in your existing build tools. + +Built atop the official Flow [parser](https://github.com/facebook/flow/tree/master/packages/flow-parser), +`flow-remove-types` is designed to operate on the same syntax Flow itself understands. +It also passes through other common non-standard syntax such as [JSX](https://facebook.github.io/jsx/) +and experimental ECMAScript proposals that Flow supports. + +**Before:** + +```js +import SomeClass from 'some-module' +import type { SomeInterface } from 'some-module' + +export class MyClass extends SomeClass implements SomeInterface { + + value: T + + constructor(value: T) { + this.value = value + } + + get(): T { + return this.value + } + +} + +``` + +**After:** + +```js +import SomeClass from 'some-module' + + +export class MyClass extends SomeClass { + + + + constructor(value ) { + this.value = value + } + + get() { + return this.value + } + +} +``` + +### Pretty Transform + +Rather not have the whitespace? Pass the `--pretty` flag to remove the whitespace. + +``` +flow-remove-types --pretty --sourcemaps source.js +``` + +Or using the JS API: + +```js +var flowRemoveTypes = require('flow-remove-types'); +var fs = require('fs'); + +var input = fs.readFileSync('input.js', 'utf8'); +var output = flowRemoveTypes(input, { pretty: true }); +fs.writeFileSync('output.js', output.toString()); +var sourceMap = output.generateMap(); +fs.writeFileSync('output.js.map', JSON.stringify(sourceMap)); +``` + + +## Performance + +> *NOTE*: These timings are for `flow-remove-types` v1. + +### Install: + +Installing via `npm` from an empty project: + +**flow-remove-types:** + +``` +time npm install flow-remove-types + +real 0m3.193s +user 0m1.643s +sys 0m0.775s +``` + +**Babel:** + +``` +time npm install babel-cli babel-plugin-transform-flow-strip-types + +real 0m23.200s +user 0m10.395s +sys 0m4.238s +``` + +### Transform: + +Transforming a directory of 20 files of 100 lines each: + +**flow-remove-types:** + +``` +time flow-remove-types src/ --out-dir dest/ + +real 0m0.431s +user 0m0.436s +sys 0m0.068s +``` + +**Babel:** + +``` +time babel src/ --out-dir dest/ + +real 0m1.074s +user 0m1.092s +sys 0m0.149s +``` diff --git a/packages/flow-remove-types/flow-node b/packages/flow-remove-types/flow-node new file mode 100755 index 00000000000..05f978f7964 --- /dev/null +++ b/packages/flow-remove-types/flow-node @@ -0,0 +1,159 @@ +#!/usr/bin/env node +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +var fs = require('fs'); +var Module = require('module'); +var path = require('path'); +var repl = require('repl'); +var util = require('util'); +var vm = require('vm'); + +var flowRemoveTypes = require('./index'); + +var usage = 'Usage: flow-node [options] [ script.js ] [arguments] \n' + + +'\nOptions:\n' + +' -h, --help Show this message\n' + +' -v, --version Prints the current version of flow-node\n' + +' -e, --eval script Evaluate script\n' + +' -p, --print script Evaluate script and print result\n' + +' -c, --check Syntax check script without executing\n' + +' -a, --all Interpret all files as flow-typed, not just those with a @flow comment\n'; + +// Collect arguments +var evalScript; +var printScript; +var checkSource; +var all; +var source; +var options = []; +var i = 2; +while (i < process.argv.length) { + var arg = process.argv[i++]; + if (arg === '-h' || arg === '--help') { + process.stdout.write(usage); + process.exit(0); + } else if (arg === '-v' || arg === '--version') { + process.stdout.write('v' + require('./package').version + '\n'); + process.exit(0); + } else if (arg === '-e' || arg === '--eval') { + evalScript = process.argv[i++]; + if (!evalScript) { + process.stderr.write('flow-node: ' + arg + ' requires an argument'); + return process.exit(1); + } + } else if (arg === '-p' || arg === '--print') { + printScript = process.argv[i++]; + if (!printScript) { + process.stderr.write('flow-node: ' + arg + ' requires an argument'); + return process.exit(1); + } + } else if (arg === '-c' || arg === '--check') { + checkSource = true; + } else if (arg === '-a' || arg === '--all') { + all = true; + } else if (arg[0] === '-' || arg === 'debug') { + options.push(arg); + } else { + source = arg; + break; + } +} + +// If node options were provided, forward to another process with the options +// applied before other arguments. +if (options.length > 0) { + var nodePath = process.argv.shift(); + var nodeArgs = options.concat(process.argv.filter(function (arg) { + return options.indexOf(arg) === -1; + })); + + const child_process = require('child_process'); + const proc = child_process.spawn(nodePath, nodeArgs, { stdio: 'inherit' }); + proc.on('exit', function (code, signal) { + process.on('exit', function () { + if (signal) { + process.kill(process.pid, signal); + } else { + process.exit(code); + } + }); + }); + + return; +} + +require('./register')({ all: all }); + +// Evaluate and possibly also print a script. +if (evalScript || printScript) { + global.__filename = '[eval]'; + global.__dirname = process.cwd(); + var evalModule = new Module(global.__filename); + evalModule.filename = global.__filename; + evalModule.paths = Module._nodeModulePaths(global.__dirname); + global.exports = evalModule.exports; + global.module = evalModule; + global.require = evalModule.require.bind(evalModule); + var result = vm.runInThisContext( + flowRemoveTypes(evalScript || printScript, { all: true }).toString(), + { filename: global.__filename } + ); + + if (printScript) { + process.stdout.write((typeof result === 'string' ? result : util.inspect(result)) + '\n'); + } + +// Or check the source for syntax errors but do not run it. +} else if (source && checkSource) { + var code = fs.readFileSync(source, 'utf8'); + try { + flowRemoveTypes(code, { all: all }); + } catch (error) { + var lines = code.split(/\r\n?|\n|\u2028|\u2029/); + process.stdout.write(source + ':' + error.loc.line + '\n'); + process.stdout.write(lines[error.loc.line - 1] + '\n'); + process.stdout.write(Array(error.loc.column + 1).join(' ') + '^\n'); + process.stdout.write(error.stack + '\n'); + return process.exit(1); + } + +// Or run the script. +} else if (source) { + var absoluteSource = path.resolve(process.cwd(), source); + process.argv = [ 'node' ].concat( + absoluteSource, + process.argv.slice(i) + ); + process.execArgv.unshift(__filename); + Module.runMain(); + +// Or begin a REPL. +} else { + repl.start({ + prompt: '> ', + input: process.stdin, + output: process.stdout, + useGlobal: true, + eval: function (code, context, filename, callback) { + var error; + var result; + try { + var runCode = flowRemoveTypes(code, { all: true }).toString(); + try { + result = vm.runInThisContext(runCode, { filename: filename }); + } catch (runError) { + error = runError; + } + } catch (transformError) { + error = repl.Recoverable ? new repl.Recoverable(transformError) : transformError; + } + callback(error, result); + } + }); +} diff --git a/packages/flow-remove-types/flow-remove-types b/packages/flow-remove-types/flow-remove-types new file mode 100755 index 00000000000..cee1862d9f0 --- /dev/null +++ b/packages/flow-remove-types/flow-remove-types @@ -0,0 +1,261 @@ +#!/usr/bin/env node +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +var fs = require('fs'); +var path = require('path'); + +var flowRemoveTypes = require('./index'); + +var usage = 'Usage: flow-remove-types [options] [sources] \n' + + +'\nOptions:\n' + +' -h, --help Show this message\n' + +' -v, --version Prints the current version of flow-remove-types\n' + +' -i, --ignore Paths to ignore, Regular Expression\n' + +' -x, --extensions File extensions to transform\n' + +' -o, --out-file The file path to write transformed file to\n' + +' -d, --out-dir The directory path to write transformed files within\n' + +' -a, --all Transform all files, not just those with a @flow comment\n' + +' -p, --pretty Remove flow types without replacing with spaces, \n' + +' producing prettier output but may require using source maps\n' + +' -m, --sourcemaps Also output source map files. Optionally pass "inline"\n' + +' -q, --quiet Does not produce any output concerning successful progress.\n' + + +'\nExamples:\n' + + +'\nTransform one file:\n' + +' flow-remove-types --out-file output.js input.js\n' + + +'\nTransform many files:\n' + +' flow-remove-types --out-dir out/ input1.js input2.js\n' + + +'\nTransform files in directory:\n' + +' flow-remove-types --out-dir out/ indir/\n' + + +'\nTransform files with source maps:\n' + +' flow-remove-types --out-dir out/ indir/ --sourcemaps\n' + + +'\nTransform files with inline source maps:\n' + +' flow-remove-types --out-dir out/ indir/ --sourcemaps inline\n' + + +'\nTransform stdin:\n' + +' cat input.js | flow-remove-types > output.js\n'; + +var _memo = {}; + +function mkdirp(dirpath) { + if (_memo[dirpath]) { + return; + } + _memo[dirpath] = true; + try { + fs.mkdirSync(dirpath); + } catch (err) { + if (err.code === 'ENOENT') { + mkdirp(path.dirname(dirpath)); + fs.mkdirSync(dirpath); + } else { + try { + stat = fs.statSync(dirpath); + } catch (ignored) { + throw err; + } + if (!stat.isDirectory()) { + throw err; + } + } + } +} + +// Collect arguments +var ignore = /node_modules/; +var extensions = [ '.js', '.mjs', '.jsx', '.flow', '.es6' ]; +var outDir; +var outFile; +var all; +var pretty; +var sourceMaps; +var inlineSourceMaps; +var quiet; +var sources = []; +var i = 2; +while (i < process.argv.length) { + var arg = process.argv[i++]; + if (arg === '-h' || arg === '--help') { + process.stdout.write(usage); + process.exit(0); + } else if (arg === '-v' || arg === '--version') { + process.stdout.write('v' + require('./package').version); + process.exit(0); + } else if (arg === '-i' || arg === '--ignore') { + ignore = new RegExp(process.argv[i++]); + } else if (arg === '-x' || arg === '--extensions') { + extensions = process.argv[i++].split(','); + } else if (arg === '-o' || arg === '--out-file') { + outFile = process.argv[i++]; + } else if (arg === '-d' || arg === '--out-dir') { + outDir = process.argv[i++]; + } else if (arg === '-a' || arg === '--all') { + all = true; + } else if (arg === '-p' || arg === '--pretty') { + pretty = true; + } else if (arg === '-m' || arg === '--sourcemaps') { + sourceMaps = true; + if (process.argv[i] === 'inline') { + inlineSourceMaps = true; + i++; + } + } else if (arg === '-q' || arg === '--quiet') { + quiet = true; + } else { + sources.push(arg); + } +} + +function info(msg) { + if (!quiet) { + process.stderr.write(msg); + } +} + +function error(msg) { + process.stderr.write('\n\033[31m ' + msg + '\033[0m\n\n'); + process.exit(1); +} + +// Validate arguments +if (outDir && outFile) { + error('Only specify one of --out-dir or --out-file'); +} + +if (outDir && sources.length === 0) { + error('Must specify files when providing --out-dir'); +} + +if (!outDir && !outFile && sourceMaps && !inlineSourceMaps) { + error('Must specify either an output path or inline source maps'); +} + +// Ensure all sources exist +for (var i = 0; i < sources.length; i++) { + try { + var stat = fs.lstatSync(sources[i]); + if (sources.length > 1 && !stat.isFile()) { + error('Source "' + sources[i] + '" is not a file.'); + } + } catch (err) { + error('Source "' + sources[i] + '" does not exist.'); + } +} + +// Process stdin if no sources were provided +if (sources.length === 0) { + var content = ''; + process.stdin.setEncoding('utf-8'); + process.stdin.resume(); + process.stdin.on('data', function (str) { content += str; }); + process.stdin.on('end', function () { + transformAndOutput(content, outFile); + }); + return; +} + +var isDirSource = sources.length === 1 && fs.statSync(sources[0]).isDirectory(); + +if ((sources.length > 1 || isDirSource) && !outDir) { + error('Multiple files require providing --out-dir'); +} + +// Process multiple files +for (var i = 0; i < sources.length; i++) { + var source = sources[i]; + var stat = fs.lstatSync(source); + if (stat.isDirectory()) { + var files = fs.readdirSync(source); + for (var j = 0; j < files.length; j++) { + var subSource = path.join(source, files[j]); + if (!ignore || !ignore.test(subSource)) { + sources.push(subSource); + } + } + } else if (stat.isFile() && extensions.indexOf(path.extname(source)) !== -1) { + if (outDir) { + outFile = path.join(outDir, isDirSource ? path.relative(sources[0], source) : source); + mkdirp(path.dirname(outFile)); + } + var content = fs.readFileSync(source, 'utf8'); + transformAndOutput(content, outFile, source); + } +} + +function transformAndOutput(content, outFile, source) { + var fileName = source || ''; + var result = transformSource(content, fileName); + var code = result.toString(); + + if (sourceMaps) { + var map = result.generateMap(); + delete map.file; + map.sources[0] = fileName; + + if (source) { + delete map.sourcesContent; + if (outFile) { + map.sources[0] = path.join(path.relative(path.dirname(outFile), path.dirname(source)), path.basename(source)); + } + } else { + map.sourcesContent = [content]; + } + + code += '\n//# sourceMappingURL=' + (inlineSourceMaps ? + 'data:application/json;charset=utf-8;base64,' + btoa(JSON.stringify(map)) : + path.basename(outFile) + '.map' + ) + '\n'; + } + + if (outFile) { + fs.writeFileSync(outFile, code); + info(fileName + '\n \u21B3 \033[32m' + outFile + '\033[0m\n'); + if (sourceMaps && !inlineSourceMaps) { + var mapOutFile = outFile + '.map'; + fs.writeFileSync(mapOutFile, JSON.stringify(map) + '\n'); + info('\033[2m \u21B3 \033[32m' + mapOutFile + '\033[0m\n'); + } + } else { + process.stdout.write(code); + } +} + +function btoa(str) { + // There are 5.x versions of Node that have `Buffer.from` but don't have the + // `Buffer.from(string)` overload, so check for other new methods to be sure. + return (Buffer.from && Buffer.alloc && Buffer.allocUnsafe + ? Buffer.from(str) + : new Buffer(str) + ).toString('base64'); +} + +function transformSource(content, filepath) { + try { + return flowRemoveTypes(content, { all: all, pretty: pretty }); + } catch (error) { + if (error.loc) { + var line = error.loc.line - 1; + var col = error.loc.column; + var text = content.split(/\r\n?|\n|\u2028|\u2029/)[line]; + process.stderr.write( + filepath + '\n' + + ' \u21B3 \033[31mSyntax Error: ' + error.message + '\033[0m\n' + + ' \033[90m' + line + ': \033[0m' + + text.slice(0, col) + '\033[7;31m' + text[col] + '\033[0m' + text.slice(col + 1) + '\n' + ); + process.exit(1); + } + throw error; + } +} diff --git a/packages/flow-remove-types/index.js b/packages/flow-remove-types/index.js new file mode 100644 index 00000000000..46c8588d783 --- /dev/null +++ b/packages/flow-remove-types/index.js @@ -0,0 +1,619 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +var parse = require('flow-parser').parse; +var vlq = require('vlq'); + +/** + * Given a string JavaScript source which contains Flow types, return a string + * which has removed those types. + * + * Options: + * + * - all: (default: false) + * If true, bypasses looking for an @flow pragma comment before parsing. + * + * - pretty: (default: false) + * If true, removes types completely rather than replacing with spaces. + * This may require using source maps. + * + * Returns an object with two methods: + * + * - .toString() + * Returns the transformed source code. + * + * - .generateMap() + * Returns a v3 source map. + */ +module.exports = function flowRemoveTypes(source, options) { + // Options + var all = Boolean(options && options.all); + if (options && options.checkPragma) { + throw new Error( + 'flow-remove-types: the "checkPragma" option has been replaced by "all".' + ); + } + + // If there's no @flow or @noflow flag, then expect no annotation. + var pragmaStart = source.indexOf('@' + 'flow'); + var pragmaSize = 5; + if (pragmaStart === -1) { + pragmaStart = source.indexOf('@' + 'noflow'); + pragmaSize = 7; + if (pragmaStart === -1 && !all) { + return resultPrinter(options, source); + } + } + + // This parse configuration is intended to be as permissive as possible. + var ast = parse(source, { + esproposal_decorators: true, + esproposal_class_instance_fields: true, + esproposal_class_static_fields: true, + esproposal_export_star_as: true, + esproposal_optional_chaining: true, + esproposal_nullish_coalescing: true, + types: true, + tokens: true, + }); + + var removedNodes = []; + + var context = { + ast: ast, + source: source, + removedNodes: removedNodes, + pretty: Boolean(options && options.pretty), + }; + + // Remove the flow pragma. + if (pragmaStart !== -1) { + var comments = getComments(ast); + var pragmaIdx = findTokenIndex(comments, pragmaStart); + if (pragmaIdx >= 0 && pragmaIdx < comments.length) { + var pragmaType = comments[pragmaIdx].type; + if (pragmaType === 'Line' || pragmaType === 'Block') { + removedNodes.push(getPragmaNode(context, pragmaStart, pragmaSize)); + } + } + } + + // Remove all flow type definitions. + visit(ast, context, removeFlowVisitor); + + return resultPrinter(options, source, removedNodes); +}; + +function resultPrinter(options, source, removedNodes) { + // Options + var pretty = Boolean(options && options.pretty); + + return { + toString: function() { + if (!removedNodes || removedNodes.length === 0) { + return source; + } + + var result = ''; + var lastPos = 0; + + // Step through the removed nodes, building up the resulting string. + for (var i = 0; i < removedNodes.length; i++) { + var node = removedNodes[i]; + result += source.slice(lastPos, startOf(node)); + lastPos = endOf(node); + if (typeof node.__spliceValue === 'string') { + result += node.__spliceValue; + } + if (!pretty) { + var toReplace = source.slice(startOf(node), endOf(node)); + if (!node.loc || node.loc.start.line === node.loc.end.line) { + result += space(toReplace.length); + } else { + var toReplaceLines = toReplace.split(LINE_RX); + result += space(toReplaceLines[0].length); + for (var j = 1; j < toReplaceLines.length; j += 2) { + result += toReplaceLines[j] + space(toReplaceLines[j + 1].length); + } + } + } + } + + return (result += source.slice(lastPos)); + }, + generateMap: function() { + return { + version: 3, + sources: ['source.js'], + names: [], + mappings: pretty ? generateSourceMappings(removedNodes) : '', + }; + }, + }; +} + +var LINE_RX = /(\r\n?|\n|\u2028|\u2029)/; + +// A collection of methods for each AST type names which contain Flow types to +// be removed. +var removeFlowVisitor = { + DeclareClass: removeNode, + DeclareFunction: removeNode, + DeclareInterface: removeNode, + DeclareModule: removeNode, + DeclareTypeAlias: removeNode, + DeclareVariable: removeNode, + InterfaceDeclaration: removeNode, + TypeAlias: removeNode, + TypeAnnotation: removeNodeIfNotCommentType, + TypeParameterDeclaration: removeNode, + TypeParameterInstantiation: removeNode, + InferredPredicate: removeNode, + OpaqueType: removeNode, + DeclareOpaqueType: removeNode, + DeclareExportDeclaration: removeNode, + + ClassDeclaration: removeImplementedInterfaces, + ClassExpression: removeImplementedInterfaces, + + Identifier: function(context, node, ast) { + if (node.optional) { + // Find the optional token. + var idx = findTokenIndex(ast.tokens, startOf(node)); + do { + idx++; + } while (getLabel(ast.tokens[idx]) !== '?'); + removeNode(context, ast.tokens[idx]); + } + }, + + ClassProperty: function(context, node) { + if (!node.value) { + return removeNode(context, node); + } + }, + + ExportNamedDeclaration: function(context, node) { + if (node.exportKind === 'type' || node.exportKind === 'typeof') { + return removeNode(context, node); + } + }, + + ImportDeclaration: function(context, node) { + if (node.importKind === 'type' || node.importKind === 'typeof') { + return removeNode(context, node); + } + }, + + ImportSpecifier: function(context, node) { + if (node.importKind === 'type' || node.importKind === 'typeof') { + var ast = context.ast; + + // Flow quirk: Remove importKind which is outside the node + var idxStart = findTokenIndex(ast.tokens, startOf(node)); + var maybeImportKind = ast.tokens[idxStart - 1]; + var maybeImportKindLabel = getLabel(maybeImportKind); + if ( + maybeImportKindLabel === 'type' || + maybeImportKindLabel === 'typeof' + ) { + removeNode(context, maybeImportKind); + } + + // Remove the node itself + removeNode(context, node); + + // Remove trailing comma + var idx = findTokenIndex(ast.tokens, endOf(node)); + + while (isComment(ast.tokens[idx])) { + // NOTE: ast.tokens has no comments in Flow + idx++; + } + if (getLabel(ast.tokens[idx]) === ',') { + removeNode(context, ast.tokens[idx]); + } + return false; + } + }, + + ArrowFunctionExpression: function(context, node) { + // Naively erasing a multi-line return type from an arrow function will + // leave a newline between the parameter list and the arrow, which is not + // valid JS. Detect this here and move the arrow up to the correct line. + + if (context.pretty) { + // Pretty-printing solves this naturally. Good, because our arrow-fudging + // below doesn't play nice with source maps... Which are only created when + // using --pretty. + return; + } + var returnType = node.returnType; + if (returnType) { + var ast = context.ast; + var paramEndIdx = findTokenIndex(ast.tokens, startOf(returnType)); + do { + paramEndIdx--; + } while (isComment(ast.tokens[paramEndIdx])); + + var arrowIdx = findTokenIndex(ast.tokens, endOf(returnType)); + while (getLabel(ast.tokens[arrowIdx]) !== '=>') { + arrowIdx++; + } + + if ( + ast.tokens[paramEndIdx].loc.end.line < + ast.tokens[arrowIdx].loc.start.line + ) { + // Insert an arrow immediately after the parameter list. + removeNode( + context, + getSpliceNodeAtPos( + context, + endOf(ast.tokens[paramEndIdx]), + ast.tokens[paramEndIdx].loc.end, + ' =>' + ) + ); + + // Delete the original arrow token. + removeNode(context, ast.tokens[arrowIdx]); + } + } + }, +}; + +// If this class declaration or expression implements interfaces, remove +// the associated tokens. +function removeImplementedInterfaces(context, node, ast) { + if (node.implements && node.implements.length > 0) { + var first = node.implements[0]; + var last = node.implements[node.implements.length - 1]; + var idx = findTokenIndex(ast.tokens, startOf(first)); + do { + idx--; + } while (ast.tokens[idx].value !== 'implements'); + + var lastIdx = findTokenIndex(ast.tokens, startOf(last)); + do { + if (!isComment(ast.tokens[idx])) { + // NOTE: ast.tokens has no comments in Flow + removeNode(context, ast.tokens[idx]); + } + } while (idx++ !== lastIdx); + } +} + +// Append node to the list of removed nodes, ensuring the order of the nodes +// in the list. +function removeNode(context, node) { + var removedNodes = context.removedNodes; + var length = removedNodes.length; + var index = length; + + // Check for line's leading and trailing space to be removed. + var spaceNode = context.pretty ? getLeadingSpaceNode(context, node) : null; + var lineNode = context.pretty ? getTrailingLineNode(context, node) : null; + + while (index > 0 && endOf(removedNodes[index - 1]) > startOf(node)) { + index--; + } + + if (index === length) { + if (spaceNode) { + removedNodes.push(spaceNode); + } + removedNodes.push(node); + if (lineNode) { + removedNodes.push(lineNode); + } + } else { + if (lineNode) { + if (spaceNode) { + removedNodes.splice(index, 0, spaceNode, node, lineNode); + } else { + removedNodes.splice(index, 0, node, lineNode); + } + } else if (spaceNode) { + removedNodes.splice(index, 0, spaceNode, node); + } else { + removedNodes.splice(index, 0, node); + } + } + + return false; +} + +function removeNodeIfNotCommentType(context, node) { + var source = context.source; + var start = startOf(node); + if (source[start] === '/') { + return false; + } + return removeNode(context, node); +} + +function getPragmaNode(context, start, size) { + var source = context.source; + var line = 1; + var column = 0; + for (var position = 0; position < start; position++) { + var char = source[position]; + if (char === '\n') { + line++; + column = 0; + } else if (char === '\r') { + if (source[position + 1] === '\n') { + position++; + } + line++; + column = 0; + } else { + column++; + } + } + return createNode({ + start: start, + end: start + size, + loc: { + start: {line: line, column: column}, + end: {line: line, column: column + size}, + }, + }); +} + +function getLeadingSpaceNode(context, node) { + var source = context.source; + var end = startOf(node); + var start = end; + while (source[start - 1] === ' ' || source[start - 1] === '\t') { + start--; + } + if (start !== end) { + return createNode({ + start: start, + end: end, + loc: {start: node.loc.start, end: node.loc.start}, + }); + } +} + +function getTrailingLineNode(context, node) { + var source = context.source; + var start = endOf(node); + var end = start; + while (source[end] === ' ' || source[end] === '\t') { + end++; + } + + // Remove all space including the line break if this token was alone on the line. + if (source[end] === '\n' || source[end] === '\r') { + if (source[end] === '\r' && source[end + 1] === '\n') { + end++; + } + end++; + + if (isLastNodeRemovedFromLine(context, node)) { + return createNode({ + start: start, + end: end, + loc: {start: node.loc.end, end: node.loc.end}, + }); + } + } +} + +// Creates a zero-width "node" with a value to splice at that position. +// WARNING: This is only safe to use when source maps are off! +function getSpliceNodeAtPos(context, pos, loc, value) { + return createNode({ + start: pos, + end: pos, + loc: {start: loc, end: loc}, + __spliceValue: value, + }); +} + +// Returns true if node is the last to be removed from a line. +function isLastNodeRemovedFromLine(context, node) { + var tokens = context.ast.tokens; + var priorTokenIdx = findTokenIndex(tokens, startOf(node)) - 1; + var token = tokens[priorTokenIdx]; + var line = node.loc.end.line; + + // Find previous token that was not removed on the same line. + while ( + priorTokenIdx >= 0 && + token.loc.end.line === line && + isRemovedToken(context, token) + ) { + token = tokens[--priorTokenIdx]; + } + + // If there's no prior token (start of file), or the prior token is on another + // line, this line must be fully removed. + return !token || token.loc.end.line !== line; +} + +// Returns true if the provided token was previously marked as removed. +function isRemovedToken(context, token) { + var removedNodes = context.removedNodes; + var nodeIdx = removedNodes.length - 1; + + // Find the last removed node which could possibly contain this token. + while (nodeIdx >= 0 && startOf(removedNodes[nodeIdx]) > startOf(token)) { + nodeIdx--; + } + + var node = removedNodes[nodeIdx]; + + // This token couldn't be removed if not contained within the removed node. + if (nodeIdx === -1 || endOf(node) < endOf(token)) { + return false; + } + + // Iterate through the tokens contained by the removed node to find a match. + var tokens = context.ast.tokens; + var tokenIdx = findTokenIndex(tokens, startOf(node)); + while (endOf(tokens[tokenIdx]) <= endOf(node)) { + if (token === tokens[tokenIdx]) { + return true; + } + tokenIdx++; + } + + return false; +} + +// Given the AST output from the parser, walk through in a depth-first order, +// calling methods on the given visitor, providing context as the first argument. +function visit(ast, context, visitor) { + var stack; + var parent; + var keys = []; + var index = -1; + + do { + index++; + if (stack && index === keys.length) { + parent = stack.parent; + keys = stack.keys; + index = stack.index; + stack = stack.prev; + } else { + var node = parent ? parent[keys[index]] : getProgram(ast); + if (node && typeof node === 'object' && (node.type || node.length)) { + if (node.type) { + var visitFn = visitor[node.type]; + if (visitFn && visitFn(context, node, ast) === false) { + continue; + } + } + stack = {parent: parent, keys: keys, index: index, prev: stack}; + parent = node; + keys = Object.keys(node); + index = -1; + } + } + } while (stack); +} + +// Given an array of sorted tokens, find the index of the token which contains +// the given offset. Uses binary search for O(log N) performance. +function findTokenIndex(tokens, offset) { + var min = 0; + var max = tokens.length - 1; + + while (min <= max) { + var ptr = ((min + max) / 2) | 0; + var token = tokens[ptr]; + if (endOf(token) <= offset) { + min = ptr + 1; + } else if (startOf(token) > offset) { + max = ptr - 1; + } else { + return ptr; + } + } + + return ptr; +} + +// True if the provided token is a comment. +function isComment(token) { + return token.type === 'Block' || token.type === 'Line'; +} + +// Produce a string full of space characters of a given size. +function space(size) { + var sp = ' '; + var result = ''; + + for (;;) { + if ((size & 1) === 1) { + result += sp; + } + size >>>= 1; + if (size === 0) { + break; + } + sp += sp; + } + return result; +} + +// Generate a source map when *removing* nodes rather than replacing them +// with spaces. +function generateSourceMappings(removedNodes) { + var mappings = ''; + if (!removedNodes || removedNodes.length === '') { + return mappings; + } + + var end = {line: 1, column: 0}; + + for (var i = 0; i < removedNodes.length; i++) { + var start = removedNodes[i].loc.start; + var lineDiff = start.line - end.line; + var columnDiff = start.column - end.column; + if (lineDiff) { + for (var l = 0; l !== lineDiff; l++) { + mappings += ';'; + } + mappings += vlq.encode([start.column, 0, lineDiff, columnDiff]); + } else if (columnDiff) { + if (i) { + mappings += ','; + } + mappings += vlq.encode([columnDiff, 0, lineDiff, columnDiff]); + } + + end = removedNodes[i].loc.end; + mappings += ','; + mappings += vlq.encode([ + 0, + 0, + end.line - start.line, + end.column - start.column, + ]); + } + + return mappings; +} + +/** + * A lightweight layer to abstract over the slightly different ASTs returned by + * Flow vs Babylon. + */ + +function startOf(token) { + return token.range[0]; +} + +function endOf(token) { + return token.range[1]; +} + +function getComments(ast) { + return ast.comments; +} + +function createNode(data) { + return { + range: [data.start, data.end], + loc: data.loc, + __spliceValue: data.__spliceValue, + }; +} + +function getLabel(token) { + return token.value; +} + +function getProgram(ast) { + return ast; +} diff --git a/packages/flow-remove-types/package.json b/packages/flow-remove-types/package.json new file mode 100644 index 00000000000..ee7c6401e42 --- /dev/null +++ b/packages/flow-remove-types/package.json @@ -0,0 +1,53 @@ +{ + "name": "flow-remove-types", + "version": "2.108.0", + "description": "Removes Flow type annotations from JavaScript files with speed and simplicity.", + "author": { + "name": "Flow Team", + "email": "flow@fb.com" + }, + "contributors": [ + "Lee Byron (http://leebyron.com/)" + ], + "license": "MIT", + "main": "index.js", + "bin": { + "flow-remove-types": "./flow-remove-types", + "flow-node": "./flow-node" + }, + "files": [ + "index.js", + "register.js", + "flow-remove-types", + "flow-node", + "LICENSE" + ], + "homepage": "https://flow.org", + "bugs": { + "url": "https://github.com/facebook/flow/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/facebook/flow.git" + }, + "scripts": { + "test": "./test.sh", + "test-update": "./test-update.sh" + }, + "keywords": [ + "flow", + "flowtype", + "compiler", + "transpiler", + "transform", + "es6" + ], + "dependencies": { + "flow-parser": "^0.108.0", + "pirates": "^3.0.2", + "vlq": "^0.2.1" + }, + "engines": { + "node": ">=4" + } +} diff --git a/packages/flow-remove-types/register.js b/packages/flow-remove-types/register.js new file mode 100644 index 00000000000..6841aee5d17 --- /dev/null +++ b/packages/flow-remove-types/register.js @@ -0,0 +1,76 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +var flowRemoveTypes = require('./index'); +var pirates = require('pirates'); + +// Supported options: +// +// - all: Transform all files, not just those with a @flow comment. +// - includes: A Regexp/String to determine which files should be transformed. +// (alias: include) +// - excludes: A Regexp/String to determine which files should not be +// transformed, defaults to ignoring /node_modules/, provide null +// to exclude nothing. (alias: exclude) +var options; +module.exports = function setOptions(newOptions) { + options = newOptions; +}; + +var jsLoader = require.extensions['.js']; +var exts = ['.js', '.mjs', '.jsx', '.flow', '.es6']; + +var revert = pirates.addHook( + function hook(code, filename) { + try { + return flowRemoveTypes(code, options).toString(); + } catch (e) { + e.message = filename + ': ' + e.message; + throw e; + } + }, + {exts: exts, matcher: shouldTransform} +); + +function shouldTransform(filename) { + var includes = options && regexpPattern(options.includes || options.include); + var excludes = + options && 'excludes' in options + ? regexpPattern(options.excludes) + : options && 'exclude' in options + ? regexpPattern(options.exclude) + : /\/node_modules\//; + return ( + (!includes || includes.test(filename)) && + !(excludes && excludes.test(filename)) + ); +} + +// Given a null | string | RegExp | any, returns null | Regexp or throws a +// more helpful error. +function regexpPattern(pattern) { + if (!pattern) { + return pattern; + } + // A very simplified glob transform which allows passing legible strings like + // "myPath/*.js" instead of a harder to read RegExp like /\/myPath\/.*\.js/. + if (typeof pattern === 'string') { + pattern = pattern.replace(/\./g, '\\.').replace(/\*/g, '.*'); + if (pattern[0] !== '/') { + pattern = '/' + pattern; + } + return new RegExp(pattern); + } + if (typeof pattern.test === 'function') { + return pattern; + } + throw new Error( + 'flow-remove-types: ' + + 'includes and excludes must be RegExp or path strings. Got: ' + + pattern + ); +} diff --git a/packages/flow-remove-types/test-update.sh b/packages/flow-remove-types/test-update.sh new file mode 100755 index 00000000000..04da9df3cc2 --- /dev/null +++ b/packages/flow-remove-types/test-update.sh @@ -0,0 +1,20 @@ +#!/bin/sh +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Generate expected output +./flow-remove-types test/source.js > test/expected.js; + +# Generate expected output with --pretty flag +./flow-remove-types --pretty test/source.js > test/expected-pretty.js; + +# Test expected source maps with --pretty --sourcemaps +./flow-remove-types --pretty --sourcemaps test/source.js -d test/expected-with-maps; + +# Test expected source maps with --pretty --sourcemaps inline +./flow-remove-types --pretty --sourcemaps inline test/source.js > test/expected-pretty-inlinemap.js; + +# Test expected source maps with --pretty --sourcemaps inline, from stdin +./flow-remove-types --pretty --sourcemaps inline < test/source.js > test/expected-pretty-inlinemap-stdin.js; diff --git a/packages/flow-remove-types/test.sh b/packages/flow-remove-types/test.sh new file mode 100755 index 00000000000..344f9b9fb8d --- /dev/null +++ b/packages/flow-remove-types/test.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +# Test expected output +echo "Test: flow-remove-types test/source.js" +DIFF=$(./flow-remove-types test/source.js | diff test/expected.js -); +if [ -n "$DIFF" ]; then echo "$DIFF"; exit 1; fi; + +# Test expected output with --pretty flag +echo "Test: flow-remove-types --pretty test/source.js" +DIFF=$(./flow-remove-types --pretty test/source.js | diff test/expected-pretty.js -); +if [ -n "$DIFF" ]; then echo "$DIFF"; exit 1; fi; + +# Test expected source maps with --pretty --sourcemaps +echo "Test: flow-remove-types --pretty --sourcemaps test/source.js -d test/expected-with-maps" +TEST_DIR=$( + DIR=$(dirname "${BASH_SOURCE[0]}"); + cd "$DIR" && pwd +) +DIR=$(mktemp -d) +cp -r test "$DIR" +pushd "$DIR" || exit 1 > /dev/null +"$TEST_DIR/flow-remove-types" --pretty --sourcemaps test/source.js -d test/expected-with-maps; +popd || exit 1 > /dev/null +DIFF_SOURCE=$(diff test/expected-with-maps/test/source.js "$DIR/test/expected-with-maps/test/source.js"); +DIFF_MAP=$(diff test/expected-with-maps/test/source.js.map "$DIR/test/expected-with-maps/test/source.js.map"); +rm -rf "$DIR" +if [ -n "$DIFF_SOURCE" ]; then echo "$DIFF_SOURCE"; exit 1; fi; +if [ -n "$DIFF_MAP" ]; then echo "$DIFF_MAP"; exit 1; fi; + +# Test expected source maps with --pretty --sourcemaps inline +echo "Test: flow-remove-types --pretty --sourcemaps inline test/source.js" +DIFF=$(./flow-remove-types --pretty --sourcemaps inline test/source.js | diff test/expected-pretty-inlinemap.js -); +if [ -n "$DIFF" ]; then echo "$DIFF"; exit 1; fi; + +# Test expected source maps with --pretty --sourcemaps inline, from stdin +echo "Test: flow-remove-types --pretty --sourcemaps inline < test/source.js" +DIFF=$(./flow-remove-types --pretty --sourcemaps inline < test/source.js | diff test/expected-pretty-inlinemap-stdin.js -); +if [ -n "$DIFF" ]; then echo "$DIFF"; exit 1; fi; + +# Test expected output with @flow outside of comments +echo "Test: flow-remove-types test/without-flow.js" +DIFF=$(./flow-remove-types test/without-flow.js | diff test/without-flow.js -); +if [ -n "$DIFF" ]; then echo "$DIFF"; exit 1; fi; + +# Test node require hook +echo "Test: node require hook" +RES=$(node -e 'require("./register");require("./test/test-node-module.js")'); +if [ "$RES" != 42 ]; then echo 'Node require hook failed'; exit 1; fi; + +# Test flow-node +echo "Test: flow-node" +FLOW_NODE=$(./flow-node ./test/test-node-module.js); +if [ "$FLOW_NODE" != 42 ]; then echo 'flow-node failed'; exit 1; fi; + +# Test flow-node with options +echo "Test: flow-node with options" +FLOW_NODE_OPTS=$(./flow-node --code-comments -p 'process.argv.length'); +if [ "$FLOW_NODE_OPTS" != 4 ]; then echo 'flow-node with options failed'; exit 1; fi; diff --git a/packages/flow-remove-types/test/expected-pretty-inlinemap-stdin.js b/packages/flow-remove-types/test/expected-pretty-inlinemap-stdin.js new file mode 100644 index 00000000000..0acc5bf22ae --- /dev/null +++ b/packages/flow-remove-types/test/expected-pretty-inlinemap-stdin.js @@ -0,0 +1,160 @@ +/* */ +// @nolint + +// Regular import +import { + Something, +} from 'some-module'; + +// Import types + +// Typed function +async function test(x, y /*.*/ /*.*/ , z /*.*/ /*.*/ = 123) { + // Typed expression + return await (x); +} + +// Interface + +// Exported interface + +// Interface extends + +// Implements interface +class Bar extends Other /*.*/ { + // Class Property with default value + answer = 42; + + // Class Property + + method() { + return; + } +} + +// Class expression implements interface +var SomeClass = class Baz { + + method() { + return; + } +}; + +// Parametric class +class Wrapper { + get() { + return this.value; + } + + map() { + // do something + } +} + +// Extends Parametric class +class StringWrapper extends Wrapper { + // ... +} + +// Declare class + +// Declare funtion + +// Declare interface + +// Declare module + +// Declare type alias + +// Declare variable + +// Type alias + +// Export type + +// Regular export +export { Wrapper }; + +// Exported type alias + +// Object with types within +var someObj = { + objMethod() { + // do nothing. + } +} + +// Example from README +import SomeClass from 'some-module' + +export class MyClass extends SomeClass { + + + constructor(value) { + this.value = value + } + + get() { + return this.value + } + +} + +// Test async/await functions +async function asyncFunction(input) { + return await t; +} + +// Test read-only data + +// Test covariant type variant class with constaint and default. +export class TestClassWithDefault { + + constructor() {} +} + +var newline_arrow = () => 42; + +var newline_arrow_2 = ()=>42; + +// Test calling a function with explicit type arguments +doSomething(3); +doSomething(3); + +// Test invoking a constructor with explicit type arguments +new Event(); + +// Test type union and intersection syntax with leading "operator" +var union; +var intersection; + +// Test generic async arrow funcion +const f = async() => {}; + +// Comment type annotations are preserved +var X /*: { + version: string, +} */ = { version: '42'}; + +function method(param /*: string */) /*: number */ { + // ... +} + +// Comment type includes are emptied out +class MyClass { + /*:: */ +} + +// Inferred predicate +function testit(arg) { + return !!arg; +} + +// Test function with default type parameter +function f() {} + +// Opaque types + +// Declare export + +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIjxzdGRpbj4iXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkdBQUcsQUFBSzs7Ozs7O0VBTU4sQUFBQSxBQUFJLENBQUMsQUFBQSxBQUFRLEFBQUMsQUFBQTtFQUNkLEFBQUEsQUFBTSxDQUFDLEFBQUEsQUFBYyxBQUFBOzs7O0FBSXZCLEFBQTRDLEFBQUE7OztxQkFHdkIsQUFBTSxVQUFVLEFBQUEsQUFBQyxpQkFBaUIsQUFBQSxBQUFDLE9BQU8sQUFBQSxBQUFjLE9BQU8sQUFBUTs7aUJBRTNFLEFBQUs7Ozs7QUFJdEIsQUFJQyxBQUFBOzs7QUFHRCxBQUVDLEFBQUE7OztBQUdELEFBRUMsQUFBQTs7O3dCQUd1QixBQUFBLEFBQVUsT0FBTyxBQUFBLEFBQUcsQUFBQyxDQUFDLEFBQUEsQUFBVTs7UUFFaEQsQUFBUTs7O0VBR2QsQUFBQSxBQUFVLEFBQUE7O1VBRUYsQUFBTzs7Ozs7OzBCQU1TLEFBQUEsQUFBVSxDQUFDLEFBQUEsQUFBRztFQUN0QyxBQUFBLEFBQVUsQUFBQTs7VUFFRixBQUFPOzs7Ozs7YUFNSixBQUFHO09BQ1QsQUFBRzs7OztLQUlMLEFBQUcsRUFBRSxBQUFZOzs7Ozs7bUNBTWEsQUFBUTs7Ozs7QUFLM0MsQUFFQyxBQUFBOzs7QUFHRCxBQUFrQyxBQUFBOzs7QUFHbEMsQUFFQyxBQUFBOzs7QUFHRCxBQUVDLEFBQUE7OztBQUdELEFBR0UsQUFBQTs7O0FBR0YsQUFBK0IsQUFBQTs7O0FBRy9CLEFBQWdCLEFBQUE7OztBQUdoQixBQUFrQixBQUFBOzs7Ozs7QUFNbEIsQUFBa0MsQUFBQTs7OzthQUlyQixBQUFNOzs7Ozs7O0FBT25CLEFBQWdELEFBQUE7O29CQUU1QixBQUFHLG1CQUFtQixBQUFBLEFBQVUsQ0FBQyxBQUFBLEFBQWE7O0VBRWhFLEFBQUEsQUFBUSxBQUFBOzttQkFFUyxBQUFHOzs7O09BSWYsQUFBRzs7Ozs7Ozs0QkFPa0IsQUFBRyxNQUFNLEFBQUcsQ0FBQyxBQUFZOzs7OztBQUtyRCxBQUVHLEFBQUE7OztpQ0FHOEIsQUFBaUM7Ozs7O3NCQUs1QyxBQUNoQjs7eUJBRW1CLEFBQUEsQUFDbkI7OztXQUdLLEFBQVE7WUFDUCxBQUFBLEFBQU07OztTQUdULEFBQVE7OztTQUdSLEFBQVM7Z0JBQ0YsQUFBUzs7O2dCQUdULEFBQUEsQUFBRyxFQUFFLEFBQUc7Ozs7Ozs7Ozs7Ozs7T0FhakIsQUFBQSxBQUFhOzs7O21CQUlELEFBQU8sQ0FBQyxBQUFTLENBQUMsQUFBQSxBQUFPOzs7OztVQUtsQyxBQUFVOzs7QUFHcEIsQUFBdUIsQUFBQTtBQUN2QixBQUErQixBQUFBO0FBQy9CLEFBQXNCLEFBQUE7QUFDdEIsQUFBOEIsQUFBQTtBQUM5QixBQUE4QixBQUFBOzs7QUFHOUIsQUFBNkIsQUFBQTtBQUM3QixBQUFxQixBQUFBO0FBQ3JCLEFBQWtDLEFBQUE7QUFDbEMsQUFBeUIsQUFBQSIsInNvdXJjZXNDb250ZW50IjpbIi8qIEBmbG93ICovXG4vLyBAbm9saW50XG5cbi8vIFJlZ3VsYXIgaW1wb3J0XG5pbXBvcnQge1xuICBTb21ldGhpbmcsXG4gIHR5cGUgU29tZVR5cGUsXG4gIHR5cGVvZiBTb21lT3RoZXJUaGluZ1xufSBmcm9tICdzb21lLW1vZHVsZSc7XG5cbi8vIEltcG9ydCB0eXBlc1xuaW1wb3J0IHR5cGUgeyBTb21lVHlwZSB9IGZyb20gJ3NvbWUtbW9kdWxlJztcblxuLy8gVHlwZWQgZnVuY3Rpb25cbmFzeW5jIGZ1bmN0aW9uIHRlc3QoeDogVHlwZSwgeSAvKi4qLyA/IC8qLiovICwgeiAvKi4qLyA/IC8qLiovIDogLyouKi8gbnVtYmVyID0gMTIzKTogc3RyaW5nIHtcbiAgLy8gVHlwZWQgZXhwcmVzc2lvblxuICByZXR1cm4gYXdhaXQgKHg6IGFueSk7XG59XG5cbi8vIEludGVyZmFjZVxuaW50ZXJmYWNlIEZvbyB7XG4gIHByb3A6IGFueTtcblxuICBtZXRob2QoKTogbWl4ZWQ7XG59XG5cbi8vIEV4cG9ydGVkIGludGVyZmFjZVxuZXhwb3J0IGludGVyZmFjZSBJVGhpbmcge1xuICBleHBvcnRlZDogdHJ1ZTtcbn1cblxuLy8gSW50ZXJmYWNlIGV4dGVuZHNcbmludGVyZmFjZSBTaWxseUZvbyBleHRlbmRzIEZvbyB7XG4gIHNpbGx5OiBzdHJpbmc7XG59XG5cbi8vIEltcGxlbWVudHMgaW50ZXJmYWNlXG5jbGFzcyBCYXIgZXh0ZW5kcyBPdGhlciBpbXBsZW1lbnRzIC8qLiovIEZvbywgSVNvbWV0aGluZyB7XG4gIC8vIENsYXNzIFByb3BlcnR5IHdpdGggZGVmYXVsdCB2YWx1ZVxuICBhbnN3ZXI6IG51bWJlciA9IDQyO1xuXG4gIC8vIENsYXNzIFByb3BlcnR5XG4gIHByb3A6IGFueTtcblxuICBtZXRob2QoKTogbWl4ZWQge1xuICAgIHJldHVybjtcbiAgfVxufVxuXG4vLyBDbGFzcyBleHByZXNzaW9uIGltcGxlbWVudHMgaW50ZXJmYWNlXG52YXIgU29tZUNsYXNzID0gY2xhc3MgQmF6IGltcGxlbWVudHMgRm9vIHtcbiAgcHJvcDogYW55O1xuXG4gIG1ldGhvZCgpOiBtaXhlZCB7XG4gICAgcmV0dXJuO1xuICB9XG59O1xuXG4vLyBQYXJhbWV0cmljIGNsYXNzXG5jbGFzcyBXcmFwcGVyPFQ+IHtcbiAgZ2V0KCk6IFQge1xuICAgIHJldHVybiB0aGlzLnZhbHVlO1xuICB9XG5cbiAgbWFwPE0+KCk6IFdyYXBwZXI8TT4ge1xuICAgIC8vIGRvIHNvbWV0aGluZ1xuICB9XG59XG5cbi8vIEV4dGVuZHMgUGFyYW1ldHJpYyBjbGFzc1xuY2xhc3MgU3RyaW5nV3JhcHBlciBleHRlbmRzIFdyYXBwZXI8c3RyaW5nPiB7XG4gIC8vIC4uLlxufVxuXG4vLyBEZWNsYXJlIGNsYXNzXG5kZWNsYXJlIGNsYXNzIEJheiB7XG4gIG1ldGhvZCgpOiBtaXhlZDtcbn1cblxuLy8gRGVjbGFyZSBmdW50aW9uXG5kZWNsYXJlIGZ1bmN0aW9uIHNvbWVGdW5jKCk6IHZvaWQ7XG5cbi8vIERlY2xhcmUgaW50ZXJmYWNlXG5kZWNsYXJlIGludGVyZmFjZSBJU29tZXRoaW5nIHtcbiAgYW5zd2VyOiBudW1iZXI7XG59XG5cbi8vIERlY2xhcmUgbW9kdWxlXG5kZWNsYXJlIG1vZHVsZSAnZnMnIHtcbiAgZGVjbGFyZSBmdW5jdGlvbiByZWFkVGhpbmcocGF0aDogc3RyaW5nKTogc3RyaW5nO1xufVxuXG4vLyBEZWNsYXJlIHR5cGUgYWxpYXNcbmRlY2xhcmUgdHlwZSBMb2NhdGlvbiA9IHtcbiAgbGF0OiBudW1iZXIsXG4gIGxvbjogbnVtYmVyXG59O1xuXG4vLyBEZWNsYXJlIHZhcmlhYmxlXG5kZWNsYXJlIHZhciBTT01FX0NPTlNUOiBzdHJpbmc7XG5cbi8vIFR5cGUgYWxpYXNcbnR5cGUgVCA9IHN0cmluZztcblxuLy8gRXhwb3J0IHR5cGVcbmV4cG9ydCB0eXBlIHsgVCB9O1xuXG4vLyBSZWd1bGFyIGV4cG9ydFxuZXhwb3J0IHsgV3JhcHBlciB9O1xuXG4vLyBFeHBvcnRlZCB0eXBlIGFsaWFzXG5leHBvcnQgdHlwZSBPTkUgPSB7IG9uZTogbnVtYmVyIH07XG5cbi8vIE9iamVjdCB3aXRoIHR5cGVzIHdpdGhpblxudmFyIHNvbWVPYmogPSB7XG4gIG9iak1ldGhvZCgpOiB2b2lkIHtcbiAgICAvLyBkbyBub3RoaW5nLlxuICB9XG59XG5cbi8vIEV4YW1wbGUgZnJvbSBSRUFETUVcbmltcG9ydCBTb21lQ2xhc3MgZnJvbSAnc29tZS1tb2R1bGUnXG5pbXBvcnQgdHlwZSB7IFNvbWVJbnRlcmZhY2UgfSBmcm9tICdzb21lLW1vZHVsZSdcblxuZXhwb3J0IGNsYXNzIE15Q2xhc3M8VD4gZXh0ZW5kcyBTb21lQ2xhc3MgaW1wbGVtZW50cyBTb21lSW50ZXJmYWNlIHtcblxuICB2YWx1ZTogVFxuXG4gIGNvbnN0cnVjdG9yKHZhbHVlOiBUKSB7XG4gICAgdGhpcy52YWx1ZSA9IHZhbHVlXG4gIH1cblxuICBnZXQoKTogVCB7XG4gICAgcmV0dXJuIHRoaXMudmFsdWVcbiAgfVxuXG59XG5cbi8vIFRlc3QgYXN5bmMvYXdhaXQgZnVuY3Rpb25zXG5hc3luYyBmdW5jdGlvbiBhc3luY0Z1bmN0aW9uPFQ+KGlucHV0OiBUKTogUHJvbWlzZTxUPiB7XG4gIHJldHVybiBhd2FpdCB0O1xufVxuXG4vLyBUZXN0IHJlYWQtb25seSBkYXRhXG5leHBvcnQgdHlwZSBUZXN0UmVhZE9ubHkgPSB7fFxuICArcmVhZE9ubHk6ICRSZWFkT25seUFycmF5PD5cbnx9O1xuXG4vLyBUZXN0IGNvdmFyaWFudCB0eXBlIHZhcmlhbnQgY2xhc3Mgd2l0aCBjb25zdGFpbnQgYW5kIGRlZmF1bHQuXG5leHBvcnQgY2xhc3MgVGVzdENsYXNzV2l0aERlZmF1bHQ8K1Q6IFRlc3RSZWFkT25seSA9IFRlc3RSZWFkT25seT4ge1xuXG4gIGNvbnN0cnVjdG9yKCkge31cbn1cblxudmFyIG5ld2xpbmVfYXJyb3cgPSAoKTpcbm51bWJlciA9PiA0MjtcblxudmFyIG5ld2xpbmVfYXJyb3dfMiA9ICgpIDpcbm51bWJlcj0+NDI7XG5cbi8vIFRlc3QgY2FsbGluZyBhIGZ1bmN0aW9uIHdpdGggZXhwbGljaXQgdHlwZSBhcmd1bWVudHNcbmRvU29tZXRoaW5nPG51bWJlcj4oMyk7XG5kb1NvbWV0aGluZyA8VCwgVT4oMyk7XG5cbi8vIFRlc3QgaW52b2tpbmcgYSBjb25zdHJ1Y3RvciB3aXRoIGV4cGxpY2l0IHR5cGUgYXJndW1lbnRzXG5uZXcgRXZlbnQ8bnVtYmVyPigpO1xuXG4vLyBUZXN0IHR5cGUgdW5pb24gYW5kIGludGVyc2VjdGlvbiBzeW50YXggd2l0aCBsZWFkaW5nIFwib3BlcmF0b3JcIlxudmFyIHVuaW9uOiB8IFQgfCBVO1xudmFyIGludGVyc2VjdGlvbjogJiBUICYgVTtcblxuLy8gVGVzdCBnZW5lcmljIGFzeW5jIGFycm93IGZ1bmNpb25cbmNvbnN0IGYgPSBhc3luYyA8VD4oKTogVCA9PiB7fTtcblxuLy8gQ29tbWVudCB0eXBlIGFubm90YXRpb25zIGFyZSBwcmVzZXJ2ZWRcbnZhciBYIC8qOiB7XG4gIHZlcnNpb246IHN0cmluZyxcbn0gKi8gPSB7IHZlcnNpb246ICc0Mid9O1xuXG5mdW5jdGlvbiBtZXRob2QocGFyYW0gLyo6IHN0cmluZyAqLykgLyo6IG51bWJlciAqLyB7XG4gIC8vIC4uLlxufVxuXG4vLyBDb21tZW50IHR5cGUgaW5jbHVkZXMgYXJlIGVtcHRpZWQgb3V0XG5jbGFzcyBNeUNsYXNzIHtcbiAgLyo6OiBwcm9wOiBzdHJpbmc7ICovXG59XG5cbi8vIEluZmVycmVkIHByZWRpY2F0ZVxuZnVuY3Rpb24gdGVzdGl0KGFyZzogbWl4ZWQpOiBib29sZWFuICVjaGVja3Mge1xuICByZXR1cm4gISFhcmc7XG59XG5cbi8vIFRlc3QgZnVuY3Rpb24gd2l0aCBkZWZhdWx0IHR5cGUgcGFyYW1ldGVyXG5mdW5jdGlvbiBmPFQsIFMgPSBUPigpIHt9XG5cbi8vIE9wYXF1ZSB0eXBlc1xub3BhcXVlIHR5cGUgQSA9IG51bWJlcjtcbm9wYXF1ZSB0eXBlIEI6IHN0cmluZyA9IHN0cmluZztcbmRlY2xhcmUgb3BhcXVlIHR5cGUgQTtcbmRlY2xhcmUgb3BhcXVlIHR5cGUgQjogc3RyaW5nO1xuZXhwb3J0IG9wYXF1ZSB0eXBlIEEgPSBudW1iZXI7XG5cbi8vIERlY2xhcmUgZXhwb3J0XG5kZWNsYXJlIGV4cG9ydCBvcGFxdWUgdHlwZSBCO1xuZGVjbGFyZSBleHBvcnQgdmFyIHg7XG5kZWNsYXJlIGV4cG9ydCBmdW5jdGlvbiB4KCk6IHZvaWQ7XG5kZWNsYXJlIGV4cG9ydCBkZWZhdWx0IFQ7XG4iXX0= diff --git a/packages/flow-remove-types/test/expected-pretty-inlinemap.js b/packages/flow-remove-types/test/expected-pretty-inlinemap.js new file mode 100644 index 00000000000..d46fd43c8e0 --- /dev/null +++ b/packages/flow-remove-types/test/expected-pretty-inlinemap.js @@ -0,0 +1,160 @@ +/* */ +// @nolint + +// Regular import +import { + Something, +} from 'some-module'; + +// Import types + +// Typed function +async function test(x, y /*.*/ /*.*/ , z /*.*/ /*.*/ = 123) { + // Typed expression + return await (x); +} + +// Interface + +// Exported interface + +// Interface extends + +// Implements interface +class Bar extends Other /*.*/ { + // Class Property with default value + answer = 42; + + // Class Property + + method() { + return; + } +} + +// Class expression implements interface +var SomeClass = class Baz { + + method() { + return; + } +}; + +// Parametric class +class Wrapper { + get() { + return this.value; + } + + map() { + // do something + } +} + +// Extends Parametric class +class StringWrapper extends Wrapper { + // ... +} + +// Declare class + +// Declare funtion + +// Declare interface + +// Declare module + +// Declare type alias + +// Declare variable + +// Type alias + +// Export type + +// Regular export +export { Wrapper }; + +// Exported type alias + +// Object with types within +var someObj = { + objMethod() { + // do nothing. + } +} + +// Example from README +import SomeClass from 'some-module' + +export class MyClass extends SomeClass { + + + constructor(value) { + this.value = value + } + + get() { + return this.value + } + +} + +// Test async/await functions +async function asyncFunction(input) { + return await t; +} + +// Test read-only data + +// Test covariant type variant class with constaint and default. +export class TestClassWithDefault { + + constructor() {} +} + +var newline_arrow = () => 42; + +var newline_arrow_2 = ()=>42; + +// Test calling a function with explicit type arguments +doSomething(3); +doSomething(3); + +// Test invoking a constructor with explicit type arguments +new Event(); + +// Test type union and intersection syntax with leading "operator" +var union; +var intersection; + +// Test generic async arrow funcion +const f = async() => {}; + +// Comment type annotations are preserved +var X /*: { + version: string, +} */ = { version: '42'}; + +function method(param /*: string */) /*: number */ { + // ... +} + +// Comment type includes are emptied out +class MyClass { + /*:: */ +} + +// Inferred predicate +function testit(arg) { + return !!arg; +} + +// Test function with default type parameter +function f() {} + +// Opaque types + +// Declare export + +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbInRlc3Qvc291cmNlLmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJHQUFHLEFBQUs7Ozs7OztFQU1OLEFBQUEsQUFBSSxDQUFDLEFBQUEsQUFBUSxBQUFDLEFBQUE7RUFDZCxBQUFBLEFBQU0sQ0FBQyxBQUFBLEFBQWMsQUFBQTs7OztBQUl2QixBQUE0QyxBQUFBOzs7cUJBR3ZCLEFBQU0sVUFBVSxBQUFBLEFBQUMsaUJBQWlCLEFBQUEsQUFBQyxPQUFPLEFBQUEsQUFBYyxPQUFPLEFBQVE7O2lCQUUzRSxBQUFLOzs7O0FBSXRCLEFBSUMsQUFBQTs7O0FBR0QsQUFFQyxBQUFBOzs7QUFHRCxBQUVDLEFBQUE7Ozt3QkFHdUIsQUFBQSxBQUFVLE9BQU8sQUFBQSxBQUFHLEFBQUMsQ0FBQyxBQUFBLEFBQVU7O1FBRWhELEFBQVE7OztFQUdkLEFBQUEsQUFBVSxBQUFBOztVQUVGLEFBQU87Ozs7OzswQkFNUyxBQUFBLEFBQVUsQ0FBQyxBQUFBLEFBQUc7RUFDdEMsQUFBQSxBQUFVLEFBQUE7O1VBRUYsQUFBTzs7Ozs7O2FBTUosQUFBRztPQUNULEFBQUc7Ozs7S0FJTCxBQUFHLEVBQUUsQUFBWTs7Ozs7O21DQU1hLEFBQVE7Ozs7O0FBSzNDLEFBRUMsQUFBQTs7O0FBR0QsQUFBa0MsQUFBQTs7O0FBR2xDLEFBRUMsQUFBQTs7O0FBR0QsQUFFQyxBQUFBOzs7QUFHRCxBQUdFLEFBQUE7OztBQUdGLEFBQStCLEFBQUE7OztBQUcvQixBQUFnQixBQUFBOzs7QUFHaEIsQUFBa0IsQUFBQTs7Ozs7O0FBTWxCLEFBQWtDLEFBQUE7Ozs7YUFJckIsQUFBTTs7Ozs7OztBQU9uQixBQUFnRCxBQUFBOztvQkFFNUIsQUFBRyxtQkFBbUIsQUFBQSxBQUFVLENBQUMsQUFBQSxBQUFhOztFQUVoRSxBQUFBLEFBQVEsQUFBQTs7bUJBRVMsQUFBRzs7OztPQUlmLEFBQUc7Ozs7Ozs7NEJBT2tCLEFBQUcsTUFBTSxBQUFHLENBQUMsQUFBWTs7Ozs7QUFLckQsQUFFRyxBQUFBOzs7aUNBRzhCLEFBQWlDOzs7OztzQkFLNUMsQUFDaEI7O3lCQUVtQixBQUFBLEFBQ25COzs7V0FHSyxBQUFRO1lBQ1AsQUFBQSxBQUFNOzs7U0FHVCxBQUFROzs7U0FHUixBQUFTO2dCQUNGLEFBQVM7OztnQkFHVCxBQUFBLEFBQUcsRUFBRSxBQUFHOzs7Ozs7Ozs7Ozs7O09BYWpCLEFBQUEsQUFBYTs7OzttQkFJRCxBQUFPLENBQUMsQUFBUyxDQUFDLEFBQUEsQUFBTzs7Ozs7VUFLbEMsQUFBVTs7O0FBR3BCLEFBQXVCLEFBQUE7QUFDdkIsQUFBK0IsQUFBQTtBQUMvQixBQUFzQixBQUFBO0FBQ3RCLEFBQThCLEFBQUE7QUFDOUIsQUFBOEIsQUFBQTs7O0FBRzlCLEFBQTZCLEFBQUE7QUFDN0IsQUFBcUIsQUFBQTtBQUNyQixBQUFrQyxBQUFBO0FBQ2xDLEFBQXlCLEFBQUEifQ== diff --git a/packages/flow-remove-types/test/expected-pretty.js b/packages/flow-remove-types/test/expected-pretty.js new file mode 100644 index 00000000000..094f1a773fa --- /dev/null +++ b/packages/flow-remove-types/test/expected-pretty.js @@ -0,0 +1,158 @@ +/* */ +// @nolint + +// Regular import +import { + Something, +} from 'some-module'; + +// Import types + +// Typed function +async function test(x, y /*.*/ /*.*/ , z /*.*/ /*.*/ = 123) { + // Typed expression + return await (x); +} + +// Interface + +// Exported interface + +// Interface extends + +// Implements interface +class Bar extends Other /*.*/ { + // Class Property with default value + answer = 42; + + // Class Property + + method() { + return; + } +} + +// Class expression implements interface +var SomeClass = class Baz { + + method() { + return; + } +}; + +// Parametric class +class Wrapper { + get() { + return this.value; + } + + map() { + // do something + } +} + +// Extends Parametric class +class StringWrapper extends Wrapper { + // ... +} + +// Declare class + +// Declare funtion + +// Declare interface + +// Declare module + +// Declare type alias + +// Declare variable + +// Type alias + +// Export type + +// Regular export +export { Wrapper }; + +// Exported type alias + +// Object with types within +var someObj = { + objMethod() { + // do nothing. + } +} + +// Example from README +import SomeClass from 'some-module' + +export class MyClass extends SomeClass { + + + constructor(value) { + this.value = value + } + + get() { + return this.value + } + +} + +// Test async/await functions +async function asyncFunction(input) { + return await t; +} + +// Test read-only data + +// Test covariant type variant class with constaint and default. +export class TestClassWithDefault { + + constructor() {} +} + +var newline_arrow = () => 42; + +var newline_arrow_2 = ()=>42; + +// Test calling a function with explicit type arguments +doSomething(3); +doSomething(3); + +// Test invoking a constructor with explicit type arguments +new Event(); + +// Test type union and intersection syntax with leading "operator" +var union; +var intersection; + +// Test generic async arrow funcion +const f = async() => {}; + +// Comment type annotations are preserved +var X /*: { + version: string, +} */ = { version: '42'}; + +function method(param /*: string */) /*: number */ { + // ... +} + +// Comment type includes are emptied out +class MyClass { + /*:: */ +} + +// Inferred predicate +function testit(arg) { + return !!arg; +} + +// Test function with default type parameter +function f() {} + +// Opaque types + +// Declare export diff --git a/packages/flow-remove-types/test/expected-with-maps/test/source.js b/packages/flow-remove-types/test/expected-with-maps/test/source.js new file mode 100644 index 00000000000..7dae0aa69df --- /dev/null +++ b/packages/flow-remove-types/test/expected-with-maps/test/source.js @@ -0,0 +1,160 @@ +/* */ +// @nolint + +// Regular import +import { + Something, +} from 'some-module'; + +// Import types + +// Typed function +async function test(x, y /*.*/ /*.*/ , z /*.*/ /*.*/ = 123) { + // Typed expression + return await (x); +} + +// Interface + +// Exported interface + +// Interface extends + +// Implements interface +class Bar extends Other /*.*/ { + // Class Property with default value + answer = 42; + + // Class Property + + method() { + return; + } +} + +// Class expression implements interface +var SomeClass = class Baz { + + method() { + return; + } +}; + +// Parametric class +class Wrapper { + get() { + return this.value; + } + + map() { + // do something + } +} + +// Extends Parametric class +class StringWrapper extends Wrapper { + // ... +} + +// Declare class + +// Declare funtion + +// Declare interface + +// Declare module + +// Declare type alias + +// Declare variable + +// Type alias + +// Export type + +// Regular export +export { Wrapper }; + +// Exported type alias + +// Object with types within +var someObj = { + objMethod() { + // do nothing. + } +} + +// Example from README +import SomeClass from 'some-module' + +export class MyClass extends SomeClass { + + + constructor(value) { + this.value = value + } + + get() { + return this.value + } + +} + +// Test async/await functions +async function asyncFunction(input) { + return await t; +} + +// Test read-only data + +// Test covariant type variant class with constaint and default. +export class TestClassWithDefault { + + constructor() {} +} + +var newline_arrow = () => 42; + +var newline_arrow_2 = ()=>42; + +// Test calling a function with explicit type arguments +doSomething(3); +doSomething(3); + +// Test invoking a constructor with explicit type arguments +new Event(); + +// Test type union and intersection syntax with leading "operator" +var union; +var intersection; + +// Test generic async arrow funcion +const f = async() => {}; + +// Comment type annotations are preserved +var X /*: { + version: string, +} */ = { version: '42'}; + +function method(param /*: string */) /*: number */ { + // ... +} + +// Comment type includes are emptied out +class MyClass { + /*:: */ +} + +// Inferred predicate +function testit(arg) { + return !!arg; +} + +// Test function with default type parameter +function f() {} + +// Opaque types + +// Declare export + +//# sourceMappingURL=source.js.map diff --git a/packages/flow-remove-types/test/expected-with-maps/test/source.js.map b/packages/flow-remove-types/test/expected-with-maps/test/source.js.map new file mode 100644 index 00000000000..d7dfcad6b5c --- /dev/null +++ b/packages/flow-remove-types/test/expected-with-maps/test/source.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../../source.js"],"names":[],"mappings":"GAAG,AAAK;;;;;;EAMN,AAAA,AAAI,CAAC,AAAA,AAAQ,AAAC,AAAA;EACd,AAAA,AAAM,CAAC,AAAA,AAAc,AAAA;;;;AAIvB,AAA4C,AAAA;;;qBAGvB,AAAM,UAAU,AAAA,AAAC,iBAAiB,AAAA,AAAC,OAAO,AAAA,AAAc,OAAO,AAAQ;;iBAE3E,AAAK;;;;AAItB,AAIC,AAAA;;;AAGD,AAEC,AAAA;;;AAGD,AAEC,AAAA;;;wBAGuB,AAAA,AAAU,OAAO,AAAA,AAAG,AAAC,CAAC,AAAA,AAAU;;QAEhD,AAAQ;;;EAGd,AAAA,AAAU,AAAA;;UAEF,AAAO;;;;;;0BAMS,AAAA,AAAU,CAAC,AAAA,AAAG;EACtC,AAAA,AAAU,AAAA;;UAEF,AAAO;;;;;;aAMJ,AAAG;OACT,AAAG;;;;KAIL,AAAG,EAAE,AAAY;;;;;;mCAMa,AAAQ;;;;;AAK3C,AAEC,AAAA;;;AAGD,AAAkC,AAAA;;;AAGlC,AAEC,AAAA;;;AAGD,AAEC,AAAA;;;AAGD,AAGE,AAAA;;;AAGF,AAA+B,AAAA;;;AAG/B,AAAgB,AAAA;;;AAGhB,AAAkB,AAAA;;;;;;AAMlB,AAAkC,AAAA;;;;aAIrB,AAAM;;;;;;;AAOnB,AAAgD,AAAA;;oBAE5B,AAAG,mBAAmB,AAAA,AAAU,CAAC,AAAA,AAAa;;EAEhE,AAAA,AAAQ,AAAA;;mBAES,AAAG;;;;OAIf,AAAG;;;;;;;4BAOkB,AAAG,MAAM,AAAG,CAAC,AAAY;;;;;AAKrD,AAEG,AAAA;;;iCAG8B,AAAiC;;;;;sBAK5C,AAChB;;yBAEmB,AAAA,AACnB;;;WAGK,AAAQ;YACP,AAAA,AAAM;;;SAGT,AAAQ;;;SAGR,AAAS;gBACF,AAAS;;;gBAGT,AAAA,AAAG,EAAE,AAAG;;;;;;;;;;;;;OAajB,AAAA,AAAa;;;;mBAID,AAAO,CAAC,AAAS,CAAC,AAAA,AAAO;;;;;UAKlC,AAAU;;;AAGpB,AAAuB,AAAA;AACvB,AAA+B,AAAA;AAC/B,AAAsB,AAAA;AACtB,AAA8B,AAAA;AAC9B,AAA8B,AAAA;;;AAG9B,AAA6B,AAAA;AAC7B,AAAqB,AAAA;AACrB,AAAkC,AAAA;AAClC,AAAyB,AAAA"} diff --git a/packages/flow-remove-types/test/expected.js b/packages/flow-remove-types/test/expected.js new file mode 100644 index 00000000000..868c5054380 --- /dev/null +++ b/packages/flow-remove-types/test/expected.js @@ -0,0 +1,208 @@ +/* */ +// @nolint + +// Regular import +import { + Something, + + +} from 'some-module'; + +// Import types + + +// Typed function +async function test(x , y /*.*/ /*.*/ , z /*.*/ /*.*/ = 123) { + // Typed expression + return await (x ); +} + +// Interface + + + + + + +// Exported interface + + + + +// Interface extends + + + + +// Implements interface +class Bar extends Other /*.*/ { + // Class Property with default value + answer = 42; + + // Class Property + + + method() { + return; + } +} + +// Class expression implements interface +var SomeClass = class Baz { + + + method() { + return; + } +}; + +// Parametric class +class Wrapper { + get() { + return this.value; + } + + map () { + // do something + } +} + +// Extends Parametric class +class StringWrapper extends Wrapper { + // ... +} + +// Declare class + + + + +// Declare funtion + + +// Declare interface + + + + +// Declare module + + + + +// Declare type alias + + + + + +// Declare variable + + +// Type alias + + +// Export type + + +// Regular export +export { Wrapper }; + +// Exported type alias + + +// Object with types within +var someObj = { + objMethod() { + // do nothing. + } +} + +// Example from README +import SomeClass from 'some-module' + + +export class MyClass extends SomeClass { + + + + constructor(value ) { + this.value = value + } + + get() { + return this.value + } + +} + +// Test async/await functions +async function asyncFunction (input ) { + return await t; +} + +// Test read-only data + + + + +// Test covariant type variant class with constaint and default. +export class TestClassWithDefault { + + constructor() {} +} + +var newline_arrow = () => + 42; + +var newline_arrow_2 = () => + 42; + +// Test calling a function with explicit type arguments +doSomething (3); +doSomething (3); + +// Test invoking a constructor with explicit type arguments +new Event (); + +// Test type union and intersection syntax with leading "operator" +var union ; +var intersection ; + +// Test generic async arrow funcion +const f = async () => {}; + +// Comment type annotations are preserved +var X /*: { + version: string, +} */ = { version: '42'}; + +function method(param /*: string */) /*: number */ { + // ... +} + +// Comment type includes are emptied out +class MyClass { + /*:: */ +} + +// Inferred predicate +function testit(arg ) { + return !!arg; +} + +// Test function with default type parameter +function f () {} + +// Opaque types + + + + + + +// Declare export + + + + diff --git a/packages/flow-remove-types/test/source.js b/packages/flow-remove-types/test/source.js new file mode 100644 index 00000000000..070cca7e062 --- /dev/null +++ b/packages/flow-remove-types/test/source.js @@ -0,0 +1,208 @@ +/* @flow */ +// @nolint + +// Regular import +import { + Something, + type SomeType, + typeof SomeOtherThing +} from 'some-module'; + +// Import types +import type { SomeType } from 'some-module'; + +// Typed function +async function test(x: Type, y /*.*/ ? /*.*/ , z /*.*/ ? /*.*/ : /*.*/ number = 123): string { + // Typed expression + return await (x: any); +} + +// Interface +interface Foo { + prop: any; + + method(): mixed; +} + +// Exported interface +export interface IThing { + exported: true; +} + +// Interface extends +interface SillyFoo extends Foo { + silly: string; +} + +// Implements interface +class Bar extends Other implements /*.*/ Foo, ISomething { + // Class Property with default value + answer: number = 42; + + // Class Property + prop: any; + + method(): mixed { + return; + } +} + +// Class expression implements interface +var SomeClass = class Baz implements Foo { + prop: any; + + method(): mixed { + return; + } +}; + +// Parametric class +class Wrapper { + get(): T { + return this.value; + } + + map(): Wrapper { + // do something + } +} + +// Extends Parametric class +class StringWrapper extends Wrapper { + // ... +} + +// Declare class +declare class Baz { + method(): mixed; +} + +// Declare funtion +declare function someFunc(): void; + +// Declare interface +declare interface ISomething { + answer: number; +} + +// Declare module +declare module 'fs' { + declare function readThing(path: string): string; +} + +// Declare type alias +declare type Location = { + lat: number, + lon: number +}; + +// Declare variable +declare var SOME_CONST: string; + +// Type alias +type T = string; + +// Export type +export type { T }; + +// Regular export +export { Wrapper }; + +// Exported type alias +export type ONE = { one: number }; + +// Object with types within +var someObj = { + objMethod(): void { + // do nothing. + } +} + +// Example from README +import SomeClass from 'some-module' +import type { SomeInterface } from 'some-module' + +export class MyClass extends SomeClass implements SomeInterface { + + value: T + + constructor(value: T) { + this.value = value + } + + get(): T { + return this.value + } + +} + +// Test async/await functions +async function asyncFunction(input: T): Promise { + return await t; +} + +// Test read-only data +export type TestReadOnly = {| + +readOnly: $ReadOnlyArray<> +|}; + +// Test covariant type variant class with constaint and default. +export class TestClassWithDefault<+T: TestReadOnly = TestReadOnly> { + + constructor() {} +} + +var newline_arrow = (): +number => 42; + +var newline_arrow_2 = () : +number=>42; + +// Test calling a function with explicit type arguments +doSomething(3); +doSomething (3); + +// Test invoking a constructor with explicit type arguments +new Event(); + +// Test type union and intersection syntax with leading "operator" +var union: | T | U; +var intersection: & T & U; + +// Test generic async arrow funcion +const f = async (): T => {}; + +// Comment type annotations are preserved +var X /*: { + version: string, +} */ = { version: '42'}; + +function method(param /*: string */) /*: number */ { + // ... +} + +// Comment type includes are emptied out +class MyClass { + /*:: prop: string; */ +} + +// Inferred predicate +function testit(arg: mixed): boolean %checks { + return !!arg; +} + +// Test function with default type parameter +function f() {} + +// Opaque types +opaque type A = number; +opaque type B: string = string; +declare opaque type A; +declare opaque type B: string; +export opaque type A = number; + +// Declare export +declare export opaque type B; +declare export var x; +declare export function x(): void; +declare export default T; diff --git a/packages/flow-remove-types/test/test-node-module.js b/packages/flow-remove-types/test/test-node-module.js new file mode 100644 index 00000000000..7fa6126d19e --- /dev/null +++ b/packages/flow-remove-types/test/test-node-module.js @@ -0,0 +1,5 @@ +// @flow +// @nolint + +var n: number = 42; +console.log(n); diff --git a/packages/flow-remove-types/test/without-flow.js b/packages/flow-remove-types/test/without-flow.js new file mode 100644 index 00000000000..3bc86a9b74c --- /dev/null +++ b/packages/flow-remove-types/test/without-flow.js @@ -0,0 +1,3 @@ +function hasNoFlow(flow) { + return '@flow'.test(/@flow/); +} diff --git a/packages/flow-upgrade/.flowconfig b/packages/flow-upgrade/.flowconfig index fa7b42f0062..d6263e6da70 100644 --- a/packages/flow-upgrade/.flowconfig +++ b/packages/flow-upgrade/.flowconfig @@ -1,5 +1,5 @@ [version] -^0.74.0 +^0.84.0 [ignore] .*/node_modules/.* diff --git a/packages/flow-upgrade/package.json b/packages/flow-upgrade/package.json index 327fbce974d..561333886f5 100644 --- a/packages/flow-upgrade/package.json +++ b/packages/flow-upgrade/package.json @@ -1,6 +1,6 @@ { "name": "flow-upgrade", - "version": "1.0.5", + "version": "1.1.1", "description": "A utility for upgrading your codebase to the latest version of Flow.", "engines": { "node": ">=6" @@ -32,7 +32,7 @@ "dependencies": { "chalk": "^2.0.1", "graceful-fs": "^4.1.11", - "jscodeshift": "^0.4.0", + "jscodeshift": "https://github.com/jbrown215/jscodeshift.git", "ora": "^1.3.0", "prompt-confirm": "^1.2.0", "semver": "^5.3.0", @@ -44,5 +44,8 @@ "babel-preset-flow": "^6.23.0", "flow-bin": "0.74.0", "jest": "^20.0.4" + }, + "resolutions": { + "lodash": ">=4.17.14" } } diff --git a/packages/flow-upgrade/src/codemods/runCodemods.js b/packages/flow-upgrade/src/codemods/runCodemods.js index 46a0b0e63ed..d27f6a5cf3a 100644 --- a/packages/flow-upgrade/src/codemods/runCodemods.js +++ b/packages/flow-upgrade/src/codemods/runCodemods.js @@ -36,8 +36,8 @@ module.exports = async function runCodemods( */ module.exports = require(${JSON.stringify(AGGREGATE_CODEMOD_UTIL)})([ ${transformPaths - .map(transformPath => ` ${JSON.stringify(transformPath)},`) - .join('\n')} + .map(transformPath => ` ${JSON.stringify(transformPath)},`) + .join('\n')} ]); `.slice(1); // Write the codemod to the folder we created for it. diff --git a/packages/flow-upgrade/src/index.js b/packages/flow-upgrade/src/index.js index d48f37a39a7..1432e43545a 100644 --- a/packages/flow-upgrade/src/index.js +++ b/packages/flow-upgrade/src/index.js @@ -13,12 +13,24 @@ const options = { all: !!yargs.all, }; -// For now we are hardcoding the version numbers out of convenience. When we add +function printUsage(exitCode: number) { + console.log('Usage: flow-upgrade '); + process.exit(exitCode); +} + +if (!!yargs.help) { + printUsage(0); +} + +// For now we are asking for the version numbers out of convenience. When we add // upgrades for future versions we will need to check `.flowconfig` or // `flow-bin` for the current version and allow the new version to be // configurable. (But still default to the latest version.) -const fromVersion = '0.52.0'; -const toVersion = '0.53.0'; +if (yargs._.length != 2) { + printUsage(1); +} +const fromVersion = yargs._[0]; +const toVersion = yargs._[1]; upgrade(process.cwd(), fromVersion, toVersion, options).catch(error => { console.error(chalk.red(error ? error.stack || error : error)); diff --git a/packages/flow-upgrade/src/upgrade.js b/packages/flow-upgrade/src/upgrade.js index d5a37830a1f..cf393cdd1bc 100644 --- a/packages/flow-upgrade/src/upgrade.js +++ b/packages/flow-upgrade/src/upgrade.js @@ -32,6 +32,10 @@ const VERSION_UPGRADES: Array<{| require('./upgrades/0.53.0/ReactUtilityTypes'), ], }, + { + version: '0.84.0', + upgrades: [require('./upgrades/0.84.0/ExplicitInexactObjectSyntax')], + }, ]; /** diff --git a/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentExplicitTypeArgs/__tests__/__snapshots__/codemod-test.js.snap b/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentExplicitTypeArgs/__tests__/__snapshots__/codemod-test.js.snap index 32931f17248..e03678904a8 100644 --- a/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentExplicitTypeArgs/__tests__/__snapshots__/codemod-test.js.snap +++ b/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentExplicitTypeArgs/__tests__/__snapshots__/codemod-test.js.snap @@ -19,7 +19,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: DefaultProps = {}; state: State = {}; @@ -30,7 +30,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -46,11 +46,11 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: BadDefaultProps = {}; props: BadProps; state: BadState = {}; - } + }) " `; @@ -73,7 +73,7 @@ class MyComponent extends Component { } const expression = () => - class extends Component { + (class extends Component { static defaultProps: DefaultProps = {}; state: State = {}; @@ -84,7 +84,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -106,7 +106,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: DefaultProps = {}; defaultProps: T; @@ -116,7 +116,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -138,7 +138,7 @@ class MyComponent extends React.Component<$FlowFixMeDefaultProps, Props> { } const expression = () => - class extends React.Component<$FlowFixMeDefaultProps, Props> { + (class extends React.Component<$FlowFixMeDefaultProps, Props> { static defaultProps = {}; defaultProps: T; @@ -148,7 +148,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -170,7 +170,7 @@ class MyComponent extends React.Component<{a: number, b: number, c: number}, Pro } const expression = () => - class extends React.Component<{a: number, b: number, c: number}, Props> { + (class extends React.Component<{a: number, b: number, c: number}, Props> { static defaultProps: {a: number, b: number, c: number} = {a: 1, b: 2, c: 3}; defaultProps: T; @@ -180,7 +180,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -200,7 +200,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { defaultProps: T; static props: T; static state: T; @@ -208,7 +208,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -229,7 +229,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: DefaultProps = {}; state: State = {}; @@ -240,7 +240,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -266,24 +266,24 @@ class MyComponent4 extends React.Component - class extends React.Component { + (class extends React.Component { componentWillReceiveProps(nextProps) {} - } + }) const expression2 = () => - class extends React.Component { + (class extends React.Component { shouldComponentUpdate(prevProps, prevState) {} - } + }) const expression3 = () => - class extends React.Component { + (class extends React.Component { componentWillUpdate(prevProps, prevState) {} - } + }) const expression4 = () => - class extends React.Component { + (class extends React.Component { componentDidUpdate(prevProps, prevState) {} - } + }) " `; @@ -309,24 +309,24 @@ class MyComponent4 extends React.Component { } const expression1 = () => - class extends React.Component { + (class extends React.Component { componentWillReceiveProps() {} - } + }) const expression2 = () => - class extends React.Component { + (class extends React.Component { shouldComponentUpdate() {} - } + }) const expression3 = () => - class extends React.Component { + (class extends React.Component { componentWillUpdate() {} - } + }) const expression4 = () => - class extends React.Component { + (class extends React.Component { componentDidUpdate() {} - } + }) " `; @@ -352,24 +352,24 @@ class MyComponent4 extends React.Component { } const expression1 = () => - class extends React.Component { + (class extends React.Component { componentWillReceiveProps(nextProps: Props) {} - } + }) const expression2 = () => - class extends React.Component { + (class extends React.Component { shouldComponentUpdate(prevProps: Props) {} - } + }) const expression3 = () => - class extends React.Component { + (class extends React.Component { componentWillUpdate(prevProps: Props) {} - } + }) const expression4 = () => - class extends React.Component { + (class extends React.Component { componentDidUpdate(prevProps: Props) {} - } + }) " `; @@ -395,24 +395,24 @@ class MyComponent4 extends React.Component { } const expression1 = () => - class extends React.Component { + (class extends React.Component { componentWillReceiveProps(nextProps: Props) {} - } + }) const expression2 = () => - class extends React.Component { + (class extends React.Component { shouldComponentUpdate(prevProps: Props, prevState: State) {} - } + }) const expression3 = () => - class extends React.Component { + (class extends React.Component { componentWillUpdate(prevProps: Props, prevState: State) {} - } + }) const expression4 = () => - class extends React.Component { + (class extends React.Component { componentDidUpdate(prevProps: Props, prevState: State) {} - } + }) " `; @@ -432,7 +432,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { defaultProps: T; static props: T; static state: T; @@ -440,7 +440,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -474,7 +474,7 @@ class MyComponent2 extends React.Component { } const expression1 = () => - class extends React.Component { + (class extends React.Component { constructor(props: Props) {} defaultProps: T; @@ -484,10 +484,10 @@ const expression1 = () => b = 5; c: T = 5; method() {} - } + }) const expression2 = () => - class extends React.Component { + (class extends React.Component { constructor(props: Props) {} defaultProps: T; @@ -497,7 +497,7 @@ const expression2 = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -519,7 +519,7 @@ class MyComponent extends React.Component - class extends React.Component { + (class extends React.Component { constructor(props: {a: number, b: number, c: number}) {} defaultProps: T; @@ -529,7 +529,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -563,7 +563,7 @@ class MyComponent2 extends React.Component { } const expression1 = () => - class extends React.Component { + (class extends React.Component { constructor(props) {} defaultProps: T; @@ -573,10 +573,10 @@ const expression1 = () => b = 5; c: T = 5; method() {} - } + }) const expression2 = () => - class extends React.Component { + (class extends React.Component { constructor(props) {} defaultProps: T; @@ -586,7 +586,7 @@ const expression2 = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -607,7 +607,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { constructor() {} defaultProps: T; @@ -616,7 +616,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -636,7 +636,7 @@ class MyComponent extends React.Component - class extends React.Component { + (class extends React.Component { defaultProps: T; static props: T; static state: T; @@ -644,7 +644,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -663,14 +663,14 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { defaultProps: T; static state: T; a: T; b = 5; c: T = 5; method() {} - } + }) " `; @@ -699,26 +699,26 @@ class MyComponent3 extends React.Component { } const expression1 = () => - class extends React.Component { + (class extends React.Component { state: State = { initialValue: this.props.value, }; - } + }) const expression2 = () => - class extends React.Component { + (class extends React.Component { render() { return this.props.children; } - } + }) const expression3 = () => - class extends React.Component { + (class extends React.Component { render() { const props = {}; return props; } - } + }) " `; @@ -741,7 +741,7 @@ class MyComponent extends React.PureComponent { } const expression = () => - class extends React.PureComponent { + (class extends React.PureComponent { static defaultProps: DefaultProps = {}; state: State = {}; @@ -752,7 +752,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -775,7 +775,7 @@ class MyComponent extends PureComponent { } const expression = () => - class extends PureComponent { + (class extends PureComponent { static defaultProps: DefaultProps = {}; state: State = {}; @@ -786,7 +786,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -843,7 +843,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { state: State = {}; defaultProps: T; @@ -853,7 +853,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -875,7 +875,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { state = {}; defaultProps: T; @@ -885,7 +885,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -907,7 +907,7 @@ class MyComponent extends React.Component - class extends React.Component { + (class extends React.Component { state: {a: number, b: number, c: number} = {a: 1, b: 2, c: 3}; defaultProps: T; @@ -917,7 +917,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -940,19 +940,19 @@ class MyComponent2 extends React.Component { } const expression1 = () => - class extends React.Component { + (class extends React.Component { render() { return this.state.children; } - } + }) const expression2 = () => - class extends React.Component { + (class extends React.Component { render() { const state = {}; return state; } - } + }) " `; @@ -972,7 +972,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { defaultProps: T; static props: T; static state: T; @@ -980,6 +980,6 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; diff --git a/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentExplicitTypeArgs/__tests__/codemod-test.js b/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentExplicitTypeArgs/__tests__/codemod-test.js index 974e0ce3d1a..da6ebe5621a 100644 --- a/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentExplicitTypeArgs/__tests__/codemod-test.js +++ b/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentExplicitTypeArgs/__tests__/codemod-test.js @@ -8,7 +8,7 @@ const path = require('path'); const fs = require('fs'); const cp = require('child_process'); const jscodeshiftFlowParser = require('jscodeshift/parser/flow'); -const jscodeshift = require('jscodeshift').withParser(jscodeshiftFlowParser); +const jscodeshift = require('jscodeshift').withParser('flow'); const transform = require('../codemod'); const FIXTURES_DIR = path.join(__dirname, './fixtures'); diff --git a/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentSimplifyTypeArgs/__tests__/__snapshots__/codemod-test.js.snap b/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentSimplifyTypeArgs/__tests__/__snapshots__/codemod-test.js.snap index 21d8086085a..ed1a8547f32 100644 --- a/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentSimplifyTypeArgs/__tests__/__snapshots__/codemod-test.js.snap +++ b/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentSimplifyTypeArgs/__tests__/__snapshots__/codemod-test.js.snap @@ -19,7 +19,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: DefaultProps = {}; state: State = {}; @@ -30,7 +30,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -53,7 +53,7 @@ class MyComponent extends Component { } const expression = () => - class extends Component { + (class extends Component { static defaultProps: DefaultProps = {}; state: State = {}; @@ -64,7 +64,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -86,7 +86,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: DefaultProps = {}; defaultProps: T; @@ -96,7 +96,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -118,7 +118,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps = {}; defaultProps: T; @@ -128,7 +128,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -150,7 +150,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: {a: number, b: number, c: number} = {a: 1, b: 2, c: 3}; defaultProps: T; @@ -160,7 +160,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -180,7 +180,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { defaultProps: T; static props: T; static state: T; @@ -188,7 +188,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -208,7 +208,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { defaultProps: T; static props: T; static state: T; @@ -216,7 +216,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -237,7 +237,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: any; defaultProps: T; static props: T; @@ -246,7 +246,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -268,7 +268,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: DefaultProps; defaultProps: T; @@ -278,7 +278,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -299,7 +299,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: DefaultProps = {}; state: State = {}; @@ -310,7 +310,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -333,7 +333,7 @@ class MyComponent extends React.PureComponent { } const expression = () => - class extends React.PureComponent { + (class extends React.PureComponent { static defaultProps: DefaultProps = {}; state: State = {}; @@ -344,7 +344,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -367,7 +367,7 @@ class MyComponent extends PureComponent { } const expression = () => - class extends PureComponent { + (class extends PureComponent { static defaultProps: DefaultProps = {}; state: State = {}; @@ -378,7 +378,7 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; @@ -403,7 +403,7 @@ class MyComponent extends React.Component { } const expression = () => - class extends React.Component { + (class extends React.Component { static defaultProps: DefaultProps = {}; state: State = {}; @@ -414,6 +414,6 @@ const expression = () => b = 5; c: T = 5; method() {} - } + }) " `; diff --git a/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentSimplifyTypeArgs/__tests__/codemod-test.js b/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentSimplifyTypeArgs/__tests__/codemod-test.js index 974e0ce3d1a..68f5d3a650d 100644 --- a/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentSimplifyTypeArgs/__tests__/codemod-test.js +++ b/packages/flow-upgrade/src/upgrades/0.53.0/ReactComponentSimplifyTypeArgs/__tests__/codemod-test.js @@ -7,8 +7,7 @@ const path = require('path'); const fs = require('fs'); const cp = require('child_process'); -const jscodeshiftFlowParser = require('jscodeshift/parser/flow'); -const jscodeshift = require('jscodeshift').withParser(jscodeshiftFlowParser); +const jscodeshift = require('jscodeshift').withParser('flow'); const transform = require('../codemod'); const FIXTURES_DIR = path.join(__dirname, './fixtures'); diff --git a/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/__tests__/__snapshots__/codemod-test.js.snap b/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/__tests__/__snapshots__/codemod-test.js.snap index 5abb2786f02..bb8df7e4ad1 100644 --- a/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/__tests__/__snapshots__/codemod-test.js.snap +++ b/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/__tests__/__snapshots__/codemod-test.js.snap @@ -7,10 +7,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -21,10 +21,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -35,10 +35,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -49,10 +49,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -63,10 +63,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -77,10 +77,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -91,10 +91,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -105,10 +105,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -119,10 +119,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -133,10 +133,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -148,10 +148,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -163,10 +163,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -178,10 +178,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; @@ -193,10 +193,10 @@ class Buz {} class Foo extends Bar> {} const expression1 = () => - class {} + (class {}) const expression2 = () => - class extends Bar> {} + (class extends Bar> {}) " `; diff --git a/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/__tests__/codemod-test.js b/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/__tests__/codemod-test.js index c1c28ec2ae1..571e3ce58bc 100644 --- a/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/__tests__/codemod-test.js +++ b/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/__tests__/codemod-test.js @@ -7,8 +7,7 @@ const path = require('path'); const fs = require('fs'); const cp = require('child_process'); -const jscodeshiftFlowParser = require('jscodeshift/parser/flow'); -const jscodeshift = require('jscodeshift').withParser(jscodeshiftFlowParser); +const jscodeshift = require('jscodeshift').withParser('flow'); const transform = require('../codemod'); const FIXTURES_DIR = path.join(__dirname, './fixtures'); diff --git a/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/codemod.js b/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/codemod.js index 4f3076d2ec5..7497a4f0a78 100644 --- a/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/codemod.js +++ b/packages/flow-upgrade/src/upgrades/0.53.0/ReactUtilityTypes/codemod.js @@ -33,37 +33,13 @@ const SYNTHETIC_EVENT_NAMES = new Set([ * - The return type of React component render. */ module.exports = (j, root) => { - const recast = require('recast'); const ReactUtils = require('../../../codemods/ReactUtils')(j); const reactName = ReactUtils.getImportedReactName(root); const componentPattern = ReactUtils.getImportedComponentClassPattern(root); const hasElement = ReactUtils.hasDestructuredElement(root, reactName); - // There is a bug in recast so we can't use `root.find()`. Instead we need to - // visit with recast manually so we can visit the nodes it misses. - root.paths().forEach(path => { - recast.visit(path, { - // Here is where we do all of our transformations. - visitGenericTypeAnnotation: visitGenericTypeAnnotation, - - // We want to replace default React imports with a namespace import. - visitImportDeclaration: visitImportDeclaration, - - // recast has a bug where it won't visit `superTypeParameters` on class - // declarations and expressions. Fix that here. - visitClassDeclaration: visitClass, - visitClassExpression: visitClass, - }); - }); - - function visitClass(path) { - // Continue traversing the path. This is the default behavior. - this.traverse(path); - // There is a bug in recast which means we end up not traversing - // `superTypeParameters`! So handle that that bug by visiting - // `superTypeParameters` here. - this.visitWithoutReset(path.get('superTypeParameters')); - } + root.find(j.GenericTypeAnnotation).forEach(visitGenericTypeAnnotation); + root.find(j.ImportDeclaration).forEach(visitImportDeclaration); function visitImportDeclaration(path) { const node = path.node; @@ -79,10 +55,6 @@ module.exports = (j, root) => { ) { node.specifiers[0] = j.importNamespaceSpecifier(node.specifiers[0].local); } - - // Continue traversal if we do not want to apply a transformation to - // this node. - this.traverse(path); } function visitGenericTypeAnnotation(path) { @@ -103,12 +75,10 @@ module.exports = (j, root) => { j.identifier('Node'), ) : j.identifier('React$Node'); - // We have handled this node. No need to continue traversing. - return false; } // React$Element ==> React.Element> - if ( + else if ( // React$Element or ReactElement (node.id.type === 'Identifier' && (node.id.name === 'React$Element' || @@ -177,34 +147,28 @@ module.exports = (j, root) => { j.identifier('Element'), ) : j.identifier('React$Element'); - // We have handled this node. No need to continue traversing. - return false; } // ReactClass ==> React.ComponentType - if ( + else if ( node.id.type === 'Identifier' && node.id.name === 'ReactClass' && node.typeParameters && node.typeParameters.type === 'TypeParameterInstantiation' ) { // Replace ReactClass with the new type. - return j.genericTypeAnnotation( - // Get the component type name. Either as a qualified name from our - // React import or using the global. - reactName - ? j.qualifiedTypeIdentifier( - j.identifier(reactName), - j.identifier('ComponentType'), - ) - : j.identifier('React$ComponentType'), - // Keep the type parameters we had previously. - node.typeParameters, - ); + // Get the component type name. Either as a qualified name from our + // React import or using the global. + node.id = reactName + ? j.qualifiedTypeIdentifier( + j.identifier(reactName), + j.identifier('ComponentType'), + ) + : j.identifier('React$ComponentType'); } // React$Component ==> React.Component - if ( + else if ( ((node.id.type === 'Identifier' && (node.id.name === 'ReactComponent' || node.id.name === 'React$Component')) || @@ -219,34 +183,27 @@ module.exports = (j, root) => { // If the user has destructured React and used Component or PureComponent // directly then we should not update the identifier of this node. if ( - node.id.type === 'Identifier' && - (node.id.name === 'Component' || node.id.name === 'PureComponent') + !( + node.id.type === 'Identifier' && + (node.id.name === 'Component' || node.id.name === 'PureComponent') + ) ) { - return false; + node.id = reactName + ? j.qualifiedTypeIdentifier( + j.identifier(reactName), + j.identifier('Component'), + ) + : j.identifier('React$Component'); } - // Replace the identifier with React.Component - node.id = reactName - ? j.qualifiedTypeIdentifier( - j.identifier(reactName), - j.identifier('Component'), - ) - : j.identifier('React$Component'); - // We have handled this node. No need to continue traversing. - return false; } // SyntheticEvent ==> SyntheticEvent<> - if ( + else if ( node.id.type === 'Identifier' && SYNTHETIC_EVENT_NAMES.has(node.id.name) ) { node.typeParameters = j.typeParameterInstantiation([]); - return false; } - - // Continue traversal if we do not want to apply a transformation to - // this node. - this.traverse(path); } // render(): React.Element ==> render(): React.Node diff --git a/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/__tests__/__snapshots__/codemod-test.js.snap b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/__tests__/__snapshots__/codemod-test.js.snap new file mode 100644 index 00000000000..e923f23fc55 --- /dev/null +++ b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/__tests__/__snapshots__/codemod-test.js.snap @@ -0,0 +1,74 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`test.js 1`] = ` +"//@flow +type T = {...}; +opaque type T: {...} = {...}; + +function test(x: {...}): {...} {} +class A {} +interface B {} +declare class C {} +declare interface D {} + +type Nested = { x: {...}, ... }; +type Exact = {| |}; + +type Generic = T; +opaque type Generic = T; + +class A { + static x: {...} = {} +} + +type T = { + a: number, + b: string, + c: {...}, + d: string, + ... +}; +type U = { + a: number, + b: {...}, + c: { x : {...}, ... }, + d: { x: { y :{...}, ... }, ... }, + ... +}; + +// We should also make sure objects already using this syntax aren't broken by the codemod +type T = {...}; +opaque type T: {...} = {...}; + +function test(x: {...}): {...} {} +class A {} +interface B {} +declare class C {} +declare interface D {} + +type Nested = { x: {...}, ... }; +type Exact = {| |}; + +type Generic = T; +opaque type Generic = T; + +class A { + static x: {...} = {} +} + +type T = { + a: number, + b: string, + c: {...}, + d: string, + ... +}; +type U = { + a: number, + b: {...}, + c: { x : {...}, ... }, + d: { x: { y :{...}, ... }, ... }, + ... +}; +" +`; diff --git a/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/__tests__/codemod-test.js b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/__tests__/codemod-test.js new file mode 100644 index 00000000000..5c11254f657 --- /dev/null +++ b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/__tests__/codemod-test.js @@ -0,0 +1,29 @@ +/** + * Copyright (c) 2013-present, Facebook, Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @flow + */ + +'use strict'; + +const path = require('path'); +const fs = require('fs'); +const cp = require('child_process'); +const jscodeshift = require('jscodeshift').withParser('flow'); +const transform = require('../codemod'); + +const FIXTURES_DIR = path.join(__dirname, './fixtures'); + +fs.readdirSync(FIXTURES_DIR).forEach(fixture => { + test(fixture, () => { + const file = path.join(FIXTURES_DIR, fixture); + const source = fs.readFileSync(file, 'utf8'); + const root = jscodeshift(source); + const skipped = !transform(jscodeshift, root); + expect(skipped ? null : root.toSource()).toMatchSnapshot(); + }); +}); diff --git a/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/__tests__/fixtures/test.js b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/__tests__/fixtures/test.js new file mode 100644 index 00000000000..338d6da7514 --- /dev/null +++ b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/__tests__/fixtures/test.js @@ -0,0 +1,67 @@ +//@flow +type T = {}; +opaque type T: {} = {}; + +function test(x: {}): {} {} +class A {} +interface B {} +declare class C {} +declare interface D {} + +type Nested = { x: {}, }; +type Exact = {| |}; + +type Generic = T; +opaque type Generic = T; + +class A { + static x: {} = {} +} + +type T = { + a: number, + b: string, + c: {}, + d: string, +}; +type U = { + a: number, + b: {}, + c: { x : {}, }, + d: { x: { y :{}, }, }, +}; + +// We should also make sure objects already using this syntax aren't broken by the codemod +type T = {...}; +opaque type T: {...} = {...}; + +function test(x: {...}): {...} {} +class A {} +interface B {} +declare class C {} +declare interface D {} + +type Nested = { x: {...}, ... }; +type Exact = {| |}; + +type Generic = T; +opaque type Generic = T; + +class A { + static x: {...} = {} +} + +type T = { + a: number, + b: string, + c: {...}, + d: string, + ... +}; +type U = { + a: number, + b: {...}, + c: { x : {...}, ... }, + d: { x: { y :{...}, ... }, ... }, + ... +}; diff --git a/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/codemod.js b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/codemod.js new file mode 100644 index 00000000000..75f926f2a61 --- /dev/null +++ b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/codemod.js @@ -0,0 +1,20 @@ +/** + * Copyright (c) 2013-present, Facebook, Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @flow + */ + +'use strict'; + +module.exports = (j: any, root: any) => { + root + .find(j.ObjectTypeAnnotation, {inexact: false, exact: false}) + .forEach(path => { + path.node.inexact = true; + }); + return root.toSource({tabWidth: 2}); +}; diff --git a/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/index.js b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/index.js new file mode 100644 index 00000000000..d5b9779ad4a --- /dev/null +++ b/packages/flow-upgrade/src/upgrades/0.84.0/ExplicitInexactObjectSyntax/index.js @@ -0,0 +1,23 @@ +/** + * Copyright (c) 2013-present, Facebook, Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @flow + */ + +const path = require('path'); +const Styled = require('../../../Styled'); + +exports.kind = 'codemod'; + +exports.title = 'Adds `...` to the end of all inexact object types.'; + +exports.description = ` +Flow is changing its object type syntax to be exact by default. See the blog +post at https://medium.com/flow-type/on-the-roadmap-exact-objects-by-default-16b72933c5cf +for details. This codemod will add '...' to the end of all inexact object types. +`; +exports.transformPath = path.join(__dirname, './codemod.js'); diff --git a/packages/flow-upgrade/yarn.lock b/packages/flow-upgrade/yarn.lock index 7e76f727ef8..ba2d1fff109 100644 --- a/packages/flow-upgrade/yarn.lock +++ b/packages/flow-upgrade/yarn.lock @@ -2,112 +2,832 @@ # yarn lockfile v1 +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.5.5.tgz#bc0782f6d69f7b7d49531219699b988f669a8f9d" + integrity sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw== + dependencies: + "@babel/highlight" "^7.0.0" + +"@babel/core@^7.1.6": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.6.2.tgz#069a776e8d5e9eefff76236bc8845566bd31dd91" + integrity sha512-l8zto/fuoZIbncm+01p8zPSDZu/VuuJhAfA7d/AbzM09WR7iVhavvfNDYCNpo1VvLk6E6xgAoP9P+/EMJHuRkQ== + dependencies: + "@babel/code-frame" "^7.5.5" + "@babel/generator" "^7.6.2" + "@babel/helpers" "^7.6.2" + "@babel/parser" "^7.6.2" + "@babel/template" "^7.6.0" + "@babel/traverse" "^7.6.2" + "@babel/types" "^7.6.0" + convert-source-map "^1.1.0" + debug "^4.1.0" + json5 "^2.1.0" + lodash "^4.17.13" + resolve "^1.3.2" + semver "^5.4.1" + source-map "^0.5.0" + +"@babel/generator@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.6.2.tgz#dac8a3c2df118334c2a29ff3446da1636a8f8c03" + integrity sha512-j8iHaIW4gGPnViaIHI7e9t/Hl8qLjERI6DcV9kEpAIDJsAOrcnXqRS7t+QbhL76pwbtqP+QCQLL0z1CyVmtjjQ== + dependencies: + "@babel/types" "^7.6.0" + jsesc "^2.5.1" + lodash "^4.17.13" + source-map "^0.5.0" + +"@babel/helper-annotate-as-pure@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.0.0.tgz#323d39dd0b50e10c7c06ca7d7638e6864d8c5c32" + integrity sha512-3UYcJUj9kvSLbLbUIfQTqzcy5VX7GRZ/CCDrnOaZorFFM01aXp1+GJwuFGV4NDDoAS+mOUyHcO6UD/RfqOks3Q== + dependencies: + "@babel/types" "^7.0.0" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.1.0.tgz#6b69628dfe4087798e0c4ed98e3d4a6b2fbd2f5f" + integrity sha512-qNSR4jrmJ8M1VMM9tibvyRAHXQs2PmaksQF7c1CGJNipfe3D8p+wgNwgso/P2A2r2mdgBWAXljNWR0QRZAMW8w== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.1.0" + "@babel/types" "^7.0.0" + +"@babel/helper-call-delegate@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/helper-call-delegate/-/helper-call-delegate-7.4.4.tgz#87c1f8ca19ad552a736a7a27b1c1fcf8b1ff1f43" + integrity sha512-l79boDFJ8S1c5hvQvG+rc+wHw6IuH7YldmRKsYtpbawsxURu/paVy57FZMomGK22/JckepaikOkY0MoAmdyOlQ== + dependencies: + "@babel/helper-hoist-variables" "^7.4.4" + "@babel/traverse" "^7.4.4" + "@babel/types" "^7.4.4" + +"@babel/helper-create-class-features-plugin@^7.5.5", "@babel/helper-create-class-features-plugin@^7.6.0": + version "7.6.0" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.6.0.tgz#769711acca889be371e9bc2eb68641d55218021f" + integrity sha512-O1QWBko4fzGju6VoVvrZg0RROCVifcLxiApnGP3OWfWzvxRZFCoBD81K5ur5e3bVY2Vf/5rIJm8cqPKn8HUJng== + dependencies: + "@babel/helper-function-name" "^7.1.0" + "@babel/helper-member-expression-to-functions" "^7.5.5" + "@babel/helper-optimise-call-expression" "^7.0.0" + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-replace-supers" "^7.5.5" + "@babel/helper-split-export-declaration" "^7.4.4" + +"@babel/helper-define-map@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.5.5.tgz#3dec32c2046f37e09b28c93eb0b103fd2a25d369" + integrity sha512-fTfxx7i0B5NJqvUOBBGREnrqbTxRh7zinBANpZXAVDlsZxYdclDp467G1sQ8VZYMnAURY3RpBUAgOYT9GfzHBg== + dependencies: + "@babel/helper-function-name" "^7.1.0" + "@babel/types" "^7.5.5" + lodash "^4.17.13" + +"@babel/helper-explode-assignable-expression@^7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.1.0.tgz#537fa13f6f1674df745b0c00ec8fe4e99681c8f6" + integrity sha512-NRQpfHrJ1msCHtKjbzs9YcMmJZOg6mQMmGRB+hbamEdG5PNpaSm95275VD92DvJKuyl0s2sFiDmMZ+EnnvufqA== + dependencies: + "@babel/traverse" "^7.1.0" + "@babel/types" "^7.0.0" + +"@babel/helper-function-name@^7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.1.0.tgz#a0ceb01685f73355d4360c1247f582bfafc8ff53" + integrity sha512-A95XEoCpb3TO+KZzJ4S/5uW5fNe26DjBGqf1o9ucyLyCmi1dXq/B3c8iaWTfBk3VvetUxl16e8tIrd5teOCfGw== + dependencies: + "@babel/helper-get-function-arity" "^7.0.0" + "@babel/template" "^7.1.0" + "@babel/types" "^7.0.0" + +"@babel/helper-get-function-arity@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0.tgz#83572d4320e2a4657263734113c42868b64e49c3" + integrity sha512-r2DbJeg4svYvt3HOS74U4eWKsUAMRH01Z1ds1zx8KNTPtpTL5JAsdFv8BNyOpVqdFhHkkRDIg5B4AsxmkjAlmQ== + dependencies: + "@babel/types" "^7.0.0" + +"@babel/helper-hoist-variables@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.4.4.tgz#0298b5f25c8c09c53102d52ac4a98f773eb2850a" + integrity sha512-VYk2/H/BnYbZDDg39hr3t2kKyifAm1W6zHRfhx8jGjIHpQEBv9dry7oQ2f3+J703TLu69nYdxsovl0XYfcnK4w== + dependencies: + "@babel/types" "^7.4.4" + +"@babel/helper-member-expression-to-functions@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.5.5.tgz#1fb5b8ec4453a93c439ee9fe3aeea4a84b76b590" + integrity sha512-5qZ3D1uMclSNqYcXqiHoA0meVdv+xUEex9em2fqMnrk/scphGlGgg66zjMrPJESPwrFJ6sbfFQYUSa0Mz7FabA== + dependencies: + "@babel/types" "^7.5.5" + +"@babel/helper-module-imports@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.0.0.tgz#96081b7111e486da4d2cd971ad1a4fe216cc2e3d" + integrity sha512-aP/hlLq01DWNEiDg4Jn23i+CXxW/owM4WpDLFUbpjxe4NS3BhLVZQ5i7E0ZrxuQ/vwekIeciyamgB1UIYxxM6A== + dependencies: + "@babel/types" "^7.0.0" + +"@babel/helper-module-transforms@^7.1.0", "@babel/helper-module-transforms@^7.4.4": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.5.5.tgz#f84ff8a09038dcbca1fd4355661a500937165b4a" + integrity sha512-jBeCvETKuJqeiaCdyaheF40aXnnU1+wkSiUs/IQg3tB85up1LyL8x77ClY8qJpuRJUcXQo+ZtdNESmZl4j56Pw== + dependencies: + "@babel/helper-module-imports" "^7.0.0" + "@babel/helper-simple-access" "^7.1.0" + "@babel/helper-split-export-declaration" "^7.4.4" + "@babel/template" "^7.4.4" + "@babel/types" "^7.5.5" + lodash "^4.17.13" + +"@babel/helper-optimise-call-expression@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.0.0.tgz#a2920c5702b073c15de51106200aa8cad20497d5" + integrity sha512-u8nd9NQePYNQV8iPWu/pLLYBqZBa4ZaY1YWRFMuxrid94wKI1QNt67NEZ7GAe5Kc/0LLScbim05xZFWkAdrj9g== + dependencies: + "@babel/types" "^7.0.0" + +"@babel/helper-plugin-utils@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz#bbb3fbee98661c569034237cc03967ba99b4f250" + integrity sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA== + +"@babel/helper-regex@^7.0.0", "@babel/helper-regex@^7.4.4": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.5.5.tgz#0aa6824f7100a2e0e89c1527c23936c152cab351" + integrity sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw== + dependencies: + lodash "^4.17.13" + +"@babel/helper-remap-async-to-generator@^7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.1.0.tgz#361d80821b6f38da75bd3f0785ece20a88c5fe7f" + integrity sha512-3fOK0L+Fdlg8S5al8u/hWE6vhufGSn0bN09xm2LXMy//REAF8kDCrYoOBKYmA8m5Nom+sV9LyLCwrFynA8/slg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.0.0" + "@babel/helper-wrap-function" "^7.1.0" + "@babel/template" "^7.1.0" + "@babel/traverse" "^7.1.0" + "@babel/types" "^7.0.0" + +"@babel/helper-replace-supers@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.5.5.tgz#f84ce43df031222d2bad068d2626cb5799c34bc2" + integrity sha512-XvRFWrNnlsow2u7jXDuH4jDDctkxbS7gXssrP4q2nUD606ukXHRvydj346wmNg+zAgpFx4MWf4+usfC93bElJg== + dependencies: + "@babel/helper-member-expression-to-functions" "^7.5.5" + "@babel/helper-optimise-call-expression" "^7.0.0" + "@babel/traverse" "^7.5.5" + "@babel/types" "^7.5.5" + +"@babel/helper-simple-access@^7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.1.0.tgz#65eeb954c8c245beaa4e859da6188f39d71e585c" + integrity sha512-Vk+78hNjRbsiu49zAPALxTb+JUQCz1aolpd8osOF16BGnLtseD21nbHgLPGUwrXEurZgiCOUmvs3ExTu4F5x6w== + dependencies: + "@babel/template" "^7.1.0" + "@babel/types" "^7.0.0" + +"@babel/helper-split-export-declaration@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.4.4.tgz#ff94894a340be78f53f06af038b205c49d993677" + integrity sha512-Ro/XkzLf3JFITkW6b+hNxzZ1n5OQ80NvIUdmHspih1XAhtN3vPTuUFT4eQnela+2MaZ5ulH+iyP513KJrxbN7Q== + dependencies: + "@babel/types" "^7.4.4" + +"@babel/helper-wrap-function@^7.1.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.2.0.tgz#c4e0012445769e2815b55296ead43a958549f6fa" + integrity sha512-o9fP1BZLLSrYlxYEYyl2aS+Flun5gtjTIG8iln+XuEzQTs0PLagAGSXUcqruJwD5fM48jzIEggCKpIfWTcR7pQ== + dependencies: + "@babel/helper-function-name" "^7.1.0" + "@babel/template" "^7.1.0" + "@babel/traverse" "^7.1.0" + "@babel/types" "^7.2.0" + +"@babel/helpers@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.6.2.tgz#681ffe489ea4dcc55f23ce469e58e59c1c045153" + integrity sha512-3/bAUL8zZxYs1cdX2ilEE0WobqbCmKWr/889lf2SS0PpDcpEIY8pb1CCyz0pEcX3pEb+MCbks1jIokz2xLtGTA== + dependencies: + "@babel/template" "^7.6.0" + "@babel/traverse" "^7.6.2" + "@babel/types" "^7.6.0" + +"@babel/highlight@^7.0.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.5.0.tgz#56d11312bd9248fa619591d02472be6e8cb32540" + integrity sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ== + dependencies: + chalk "^2.0.0" + esutils "^2.0.2" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.6", "@babel/parser@^7.6.0", "@babel/parser@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.6.2.tgz#205e9c95e16ba3b8b96090677a67c9d6075b70a1" + integrity sha512-mdFqWrSPCmikBoaBYMuBulzTIKuXVPtEISFbRRVNwMWpCms/hmE2kRq0bblUHaNRKrjRlmVbx1sDHmjmRgD2Xg== + +"@babel/plugin-proposal-async-generator-functions@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.2.0.tgz#b289b306669dce4ad20b0252889a15768c9d417e" + integrity sha512-+Dfo/SCQqrwx48ptLVGLdE39YtWRuKc/Y9I5Fy0P1DDBB9lsAHpjcEJQt+4IifuSOSTLBKJObJqMvaO1pIE8LQ== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-remap-async-to-generator" "^7.1.0" + "@babel/plugin-syntax-async-generators" "^7.2.0" + +"@babel/plugin-proposal-class-properties@^7.1.0": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.5.5.tgz#a974cfae1e37c3110e71f3c6a2e48b8e71958cd4" + integrity sha512-AF79FsnWFxjlaosgdi421vmYG6/jg79bVD0dpD44QdgobzHKuLZ6S3vl8la9qIeSwGi8i1fS0O1mfuDAAdo1/A== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.5.5" + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-proposal-dynamic-import@^7.5.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.5.0.tgz#e532202db4838723691b10a67b8ce509e397c506" + integrity sha512-x/iMjggsKTFHYC6g11PL7Qy58IK8H5zqfm9e6hu4z1iH2IRyAp9u9dL80zA6R76yFovETFLKz2VJIC2iIPBuFw== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-syntax-dynamic-import" "^7.2.0" + +"@babel/plugin-proposal-json-strings@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.2.0.tgz#568ecc446c6148ae6b267f02551130891e29f317" + integrity sha512-MAFV1CA/YVmYwZG0fBQyXhmj0BHCB5egZHCKWIFVv/XCxAeVGIHfos3SwDck4LvCllENIAg7xMKOG5kH0dzyUg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-syntax-json-strings" "^7.2.0" + +"@babel/plugin-proposal-object-rest-spread@^7.0.0", "@babel/plugin-proposal-object-rest-spread@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.6.2.tgz#8ffccc8f3a6545e9f78988b6bf4fe881b88e8096" + integrity sha512-LDBXlmADCsMZV1Y9OQwMc0MyGZ8Ta/zlD9N67BfQT8uYwkRswiu2hU6nJKrjrt/58aH/vqfQlR/9yId/7A2gWw== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-syntax-object-rest-spread" "^7.2.0" + +"@babel/plugin-proposal-optional-catch-binding@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.2.0.tgz#135d81edb68a081e55e56ec48541ece8065c38f5" + integrity sha512-mgYj3jCcxug6KUcX4OBoOJz3CMrwRfQELPQ5560F70YQUBZB7uac9fqaWamKR1iWUzGiK2t0ygzjTScZnVz75g== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-syntax-optional-catch-binding" "^7.2.0" + +"@babel/plugin-proposal-unicode-property-regex@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.6.2.tgz#05413762894f41bfe42b9a5e80919bd575dcc802" + integrity sha512-NxHETdmpeSCtiatMRYWVJo7266rrvAC3DTeG5exQBIH/fMIUK7ejDNznBbn3HQl/o9peymRRg7Yqkx6PdUXmMw== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-regex" "^7.4.4" + regexpu-core "^4.6.0" + +"@babel/plugin-syntax-async-generators@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.2.0.tgz#69e1f0db34c6f5a0cf7e2b3323bf159a76c8cb7f" + integrity sha512-1ZrIRBv2t0GSlcwVoQ6VgSLpLgiN/FVQUzt9znxo7v2Ov4jJrs8RY8tv0wvDmFN3qIdMKWrmMMW6yZ0G19MfGg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-syntax-dynamic-import@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.2.0.tgz#69c159ffaf4998122161ad8ebc5e6d1f55df8612" + integrity sha512-mVxuJ0YroI/h/tbFTPGZR8cv6ai+STMKNBq0f8hFxsxWjl94qqhsb+wXbpNMDPU3cfR1TIsVFzU3nXyZMqyK4w== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-syntax-flow@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.2.0.tgz#a765f061f803bc48f240c26f8747faf97c26bf7c" + integrity sha512-r6YMuZDWLtLlu0kqIim5o/3TNRAlWb073HwT3e2nKf9I8IIvOggPrnILYPsrrKilmn/mYEMCf/Z07w3yQJF6dg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-syntax-json-strings@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.2.0.tgz#72bd13f6ffe1d25938129d2a186b11fd62951470" + integrity sha512-5UGYnMSLRE1dqqZwug+1LISpA403HzlSfsg6P9VXU6TBjcSHeNlw4DxDx7LgpF+iKZoOG/+uzqoRHTdcUpiZNg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-syntax-object-rest-spread@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.2.0.tgz#3b7a3e733510c57e820b9142a6579ac8b0dfad2e" + integrity sha512-t0JKGgqk2We+9may3t0xDdmneaXmyxq0xieYcKHxIsrJO64n1OiMWNUtc5gQK1PA0NpdCRrtZp4z+IUaKugrSA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.2.0.tgz#a94013d6eda8908dfe6a477e7f9eda85656ecf5c" + integrity sha512-bDe4xKNhb0LI7IvZHiA13kff0KEfaGX/Hv4lMA9+7TEc63hMNvfKo6ZFpXhKuEp+II/q35Gc4NoMeDZyaUbj9w== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-syntax-typescript@^7.2.0": + version "7.3.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.3.3.tgz#a7cc3f66119a9f7ebe2de5383cce193473d65991" + integrity sha512-dGwbSMA1YhVS8+31CnPR7LB4pcbrzcV99wQzby4uAfrkZPYZlQ7ImwdpzLqi6Z6IL02b8IAL379CaMwo0x5Lag== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-arrow-functions@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.2.0.tgz#9aeafbe4d6ffc6563bf8f8372091628f00779550" + integrity sha512-ER77Cax1+8/8jCB9fo4Ud161OZzWN5qawi4GusDuRLcDbDG+bIGYY20zb2dfAFdTRGzrfq2xZPvF0R64EHnimg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-async-to-generator@^7.5.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.5.0.tgz#89a3848a0166623b5bc481164b5936ab947e887e" + integrity sha512-mqvkzwIGkq0bEF1zLRRiTdjfomZJDV33AH3oQzHVGkI2VzEmXLpKKOBvEVaFZBJdN0XTyH38s9j/Kiqr68dggg== + dependencies: + "@babel/helper-module-imports" "^7.0.0" + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-remap-async-to-generator" "^7.1.0" + +"@babel/plugin-transform-block-scoped-functions@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.2.0.tgz#5d3cc11e8d5ddd752aa64c9148d0db6cb79fd190" + integrity sha512-ntQPR6q1/NKuphly49+QiQiTN0O63uOwjdD6dhIjSWBI5xlrbUFh720TIpzBhpnrLfv2tNH/BXvLIab1+BAI0w== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-block-scoping@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.6.2.tgz#96c33ab97a9ae500cc6f5b19e04a7e6553360a79" + integrity sha512-zZT8ivau9LOQQaOGC7bQLQOT4XPkPXgN2ERfUgk1X8ql+mVkLc4E8eKk+FO3o0154kxzqenWCorfmEXpEZcrSQ== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + lodash "^4.17.13" + +"@babel/plugin-transform-classes@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.5.5.tgz#d094299d9bd680a14a2a0edae38305ad60fb4de9" + integrity sha512-U2htCNK/6e9K7jGyJ++1p5XRU+LJjrwtoiVn9SzRlDT2KubcZ11OOwy3s24TjHxPgxNwonCYP7U2K51uVYCMDg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.0.0" + "@babel/helper-define-map" "^7.5.5" + "@babel/helper-function-name" "^7.1.0" + "@babel/helper-optimise-call-expression" "^7.0.0" + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-replace-supers" "^7.5.5" + "@babel/helper-split-export-declaration" "^7.4.4" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.2.0.tgz#83a7df6a658865b1c8f641d510c6f3af220216da" + integrity sha512-kP/drqTxY6Xt3NNpKiMomfgkNn4o7+vKxK2DDKcBG9sHj51vHqMBGy8wbDS/J4lMxnqs153/T3+DmCEAkC5cpA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-destructuring@^7.6.0": + version "7.6.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.6.0.tgz#44bbe08b57f4480094d57d9ffbcd96d309075ba6" + integrity sha512-2bGIS5P1v4+sWTCnKNDZDxbGvEqi0ijeqM/YqHtVGrvG2y0ySgnEEhXErvE9dA0bnIzY9bIzdFK0jFA46ASIIQ== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-dotall-regex@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.6.2.tgz#44abb948b88f0199a627024e1508acaf8dc9b2f9" + integrity sha512-KGKT9aqKV+9YMZSkowzYoYEiHqgaDhGmPNZlZxX6UeHC4z30nC1J9IrZuGqbYFB1jaIGdv91ujpze0exiVK8bA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-regex" "^7.4.4" + regexpu-core "^4.6.0" + +"@babel/plugin-transform-duplicate-keys@^7.5.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.5.0.tgz#c5dbf5106bf84cdf691222c0974c12b1df931853" + integrity sha512-igcziksHizyQPlX9gfSjHkE2wmoCH3evvD2qR5w29/Dk0SMKE/eOI7f1HhBdNhR/zxJDqrgpoDTq5YSLH/XMsQ== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-exponentiation-operator@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.2.0.tgz#a63868289e5b4007f7054d46491af51435766008" + integrity sha512-umh4hR6N7mu4Elq9GG8TOu9M0bakvlsREEC+ialrQN6ABS4oDQ69qJv1VtR3uxlKMCQMCvzk7vr17RHKcjx68A== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.1.0" + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-flow-strip-types@^7.0.0": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.4.4.tgz#d267a081f49a8705fc9146de0768c6b58dccd8f7" + integrity sha512-WyVedfeEIILYEaWGAUWzVNyqG4sfsNooMhXWsu/YzOvVGcsnPb5PguysjJqI3t3qiaYj0BR8T2f5njdjTGe44Q== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-syntax-flow" "^7.2.0" + +"@babel/plugin-transform-for-of@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.4.4.tgz#0267fc735e24c808ba173866c6c4d1440fc3c556" + integrity sha512-9T/5Dlr14Z9TIEXLXkt8T1DU7F24cbhwhMNUziN3hB1AXoZcdzPcTiKGRn/6iOymDqtTKWnr/BtRKN9JwbKtdQ== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-function-name@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.4.4.tgz#e1436116abb0610c2259094848754ac5230922ad" + integrity sha512-iU9pv7U+2jC9ANQkKeNF6DrPy4GBa4NWQtl6dHB4Pb3izX2JOEvDTFarlNsBj/63ZEzNNIAMs3Qw4fNCcSOXJA== + dependencies: + "@babel/helper-function-name" "^7.1.0" + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-literals@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.2.0.tgz#690353e81f9267dad4fd8cfd77eafa86aba53ea1" + integrity sha512-2ThDhm4lI4oV7fVQ6pNNK+sx+c/GM5/SaML0w/r4ZB7sAneD/piDJtwdKlNckXeyGK7wlwg2E2w33C/Hh+VFCg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-member-expression-literals@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.2.0.tgz#fa10aa5c58a2cb6afcf2c9ffa8cb4d8b3d489a2d" + integrity sha512-HiU3zKkSU6scTidmnFJ0bMX8hz5ixC93b4MHMiYebmk2lUVNGOboPsqQvx5LzooihijUoLR/v7Nc1rbBtnc7FA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-modules-amd@^7.5.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.5.0.tgz#ef00435d46da0a5961aa728a1d2ecff063e4fb91" + integrity sha512-n20UsQMKnWrltocZZm24cRURxQnWIvsABPJlw/fvoy9c6AgHZzoelAIzajDHAQrDpuKFFPPcFGd7ChsYuIUMpg== + dependencies: + "@babel/helper-module-transforms" "^7.1.0" + "@babel/helper-plugin-utils" "^7.0.0" + babel-plugin-dynamic-import-node "^2.3.0" + +"@babel/plugin-transform-modules-commonjs@^7.6.0": + version "7.6.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.6.0.tgz#39dfe957de4420445f1fcf88b68a2e4aa4515486" + integrity sha512-Ma93Ix95PNSEngqomy5LSBMAQvYKVe3dy+JlVJSHEXZR5ASL9lQBedMiCyVtmTLraIDVRE3ZjTZvmXXD2Ozw3g== + dependencies: + "@babel/helper-module-transforms" "^7.4.4" + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-simple-access" "^7.1.0" + babel-plugin-dynamic-import-node "^2.3.0" + +"@babel/plugin-transform-modules-systemjs@^7.5.0": + version "7.5.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.5.0.tgz#e75266a13ef94202db2a0620977756f51d52d249" + integrity sha512-Q2m56tyoQWmuNGxEtUyeEkm6qJYFqs4c+XyXH5RAuYxObRNz9Zgj/1g2GMnjYp2EUyEy7YTrxliGCXzecl/vJg== + dependencies: + "@babel/helper-hoist-variables" "^7.4.4" + "@babel/helper-plugin-utils" "^7.0.0" + babel-plugin-dynamic-import-node "^2.3.0" + +"@babel/plugin-transform-modules-umd@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.2.0.tgz#7678ce75169f0877b8eb2235538c074268dd01ae" + integrity sha512-BV3bw6MyUH1iIsGhXlOK6sXhmSarZjtJ/vMiD9dNmpY8QXFFQTj+6v92pcfy1iqa8DeAfJFwoxcrS/TUZda6sw== + dependencies: + "@babel/helper-module-transforms" "^7.1.0" + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.6.2.tgz#c1ca0bb84b94f385ca302c3932e870b0fb0e522b" + integrity sha512-xBdB+XOs+lgbZc2/4F5BVDVcDNS4tcSKQc96KmlqLEAwz6tpYPEvPdmDfvVG0Ssn8lAhronaRs6Z6KSexIpK5g== + dependencies: + regexpu-core "^4.6.0" + +"@babel/plugin-transform-new-target@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.4.4.tgz#18d120438b0cc9ee95a47f2c72bc9768fbed60a5" + integrity sha512-r1z3T2DNGQwwe2vPGZMBNjioT2scgWzK9BCnDEh+46z8EEwXBq24uRzd65I7pjtugzPSj921aM15RpESgzsSuA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-object-super@^7.5.5": + version "7.5.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.5.5.tgz#c70021df834073c65eb613b8679cc4a381d1a9f9" + integrity sha512-un1zJQAhSosGFBduPgN/YFNvWVpRuHKU7IHBglLoLZsGmruJPOo6pbInneflUdmq7YvSVqhpPs5zdBvLnteltQ== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-replace-supers" "^7.5.5" + +"@babel/plugin-transform-parameters@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.4.4.tgz#7556cf03f318bd2719fe4c922d2d808be5571e16" + integrity sha512-oMh5DUO1V63nZcu/ZVLQFqiihBGo4OpxJxR1otF50GMeCLiRx5nUdtokd+u9SuVJrvvuIh9OosRFPP4pIPnwmw== + dependencies: + "@babel/helper-call-delegate" "^7.4.4" + "@babel/helper-get-function-arity" "^7.0.0" + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-property-literals@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.2.0.tgz#03e33f653f5b25c4eb572c98b9485055b389e905" + integrity sha512-9q7Dbk4RhgcLp8ebduOpCbtjh7C0itoLYHXd9ueASKAG/is5PQtMR5VJGka9NKqGhYEGn5ITahd4h9QeBMylWQ== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-regenerator@^7.4.5": + version "7.4.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.4.5.tgz#629dc82512c55cee01341fb27bdfcb210354680f" + integrity sha512-gBKRh5qAaCWntnd09S8QC7r3auLCqq5DI6O0DlfoyDjslSBVqBibrMdsqO+Uhmx3+BlOmE/Kw1HFxmGbv0N9dA== + dependencies: + regenerator-transform "^0.14.0" + +"@babel/plugin-transform-reserved-words@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.2.0.tgz#4792af87c998a49367597d07fedf02636d2e1634" + integrity sha512-fz43fqW8E1tAB3DKF19/vxbpib1fuyCwSPE418ge5ZxILnBhWyhtPgz8eh1RCGGJlwvksHkyxMxh0eenFi+kFw== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-shorthand-properties@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.2.0.tgz#6333aee2f8d6ee7e28615457298934a3b46198f0" + integrity sha512-QP4eUM83ha9zmYtpbnyjTLAGKQritA5XW/iG9cjtuOI8s1RuL/3V6a3DeSHfKutJQ+ayUfeZJPcnCYEQzaPQqg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-spread@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.6.2.tgz#fc77cf798b24b10c46e1b51b1b88c2bf661bb8dd" + integrity sha512-DpSvPFryKdK1x+EDJYCy28nmAaIMdxmhot62jAXF/o99iA33Zj2Lmcp3vDmz+MUh0LNYVPvfj5iC3feb3/+PFg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-sticky-regex@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.2.0.tgz#a1e454b5995560a9c1e0d537dfc15061fd2687e1" + integrity sha512-KKYCoGaRAf+ckH8gEL3JHUaFVyNHKe3ASNsZ+AlktgHevvxGigoIttrEJb8iKN03Q7Eazlv1s6cx2B2cQ3Jabw== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-regex" "^7.0.0" + +"@babel/plugin-transform-template-literals@^7.4.4": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.4.4.tgz#9d28fea7bbce637fb7612a0750989d8321d4bcb0" + integrity sha512-mQrEC4TWkhLN0z8ygIvEL9ZEToPhG5K7KDW3pzGqOfIGZ28Jb0POUkeWcoz8HnHvhFy6dwAT1j8OzqN8s804+g== + dependencies: + "@babel/helper-annotate-as-pure" "^7.0.0" + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-typeof-symbol@^7.2.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.2.0.tgz#117d2bcec2fbf64b4b59d1f9819894682d29f2b2" + integrity sha512-2LNhETWYxiYysBtrBTqL8+La0jIoQQnIScUJc74OYvUGRmkskNY4EzLCnjHBzdmb38wqtTaixpo1NctEcvMDZw== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + +"@babel/plugin-transform-typescript@^7.6.0": + version "7.6.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.6.0.tgz#48d78405f1aa856ebeea7288a48a19ed8da377a6" + integrity sha512-yzw7EopOOr6saONZ3KA3lpizKnWRTe+rfBqg4AmQbSow7ik7fqmzrfIqt053osLwLE2AaTqGinLM2tl6+M/uog== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.6.0" + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-syntax-typescript" "^7.2.0" + +"@babel/plugin-transform-unicode-regex@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.6.2.tgz#b692aad888a7e8d8b1b214be6b9dc03d5031f698" + integrity sha512-orZI6cWlR3nk2YmYdb0gImrgCUwb5cBUwjf6Ks6dvNVvXERkwtJWOQaEOjPiu0Gu1Tq6Yq/hruCZZOOi9F34Dw== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/helper-regex" "^7.4.4" + regexpu-core "^4.6.0" + +"@babel/preset-env@^7.1.6": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.6.2.tgz#abbb3ed785c7fe4220d4c82a53621d71fc0c75d3" + integrity sha512-Ru7+mfzy9M1/YTEtlDS8CD45jd22ngb9tXnn64DvQK3ooyqSw9K4K9DUWmYknTTVk4TqygL9dqCrZgm1HMea/Q== + dependencies: + "@babel/helper-module-imports" "^7.0.0" + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-async-generator-functions" "^7.2.0" + "@babel/plugin-proposal-dynamic-import" "^7.5.0" + "@babel/plugin-proposal-json-strings" "^7.2.0" + "@babel/plugin-proposal-object-rest-spread" "^7.6.2" + "@babel/plugin-proposal-optional-catch-binding" "^7.2.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.6.2" + "@babel/plugin-syntax-async-generators" "^7.2.0" + "@babel/plugin-syntax-dynamic-import" "^7.2.0" + "@babel/plugin-syntax-json-strings" "^7.2.0" + "@babel/plugin-syntax-object-rest-spread" "^7.2.0" + "@babel/plugin-syntax-optional-catch-binding" "^7.2.0" + "@babel/plugin-transform-arrow-functions" "^7.2.0" + "@babel/plugin-transform-async-to-generator" "^7.5.0" + "@babel/plugin-transform-block-scoped-functions" "^7.2.0" + "@babel/plugin-transform-block-scoping" "^7.6.2" + "@babel/plugin-transform-classes" "^7.5.5" + "@babel/plugin-transform-computed-properties" "^7.2.0" + "@babel/plugin-transform-destructuring" "^7.6.0" + "@babel/plugin-transform-dotall-regex" "^7.6.2" + "@babel/plugin-transform-duplicate-keys" "^7.5.0" + "@babel/plugin-transform-exponentiation-operator" "^7.2.0" + "@babel/plugin-transform-for-of" "^7.4.4" + "@babel/plugin-transform-function-name" "^7.4.4" + "@babel/plugin-transform-literals" "^7.2.0" + "@babel/plugin-transform-member-expression-literals" "^7.2.0" + "@babel/plugin-transform-modules-amd" "^7.5.0" + "@babel/plugin-transform-modules-commonjs" "^7.6.0" + "@babel/plugin-transform-modules-systemjs" "^7.5.0" + "@babel/plugin-transform-modules-umd" "^7.2.0" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.6.2" + "@babel/plugin-transform-new-target" "^7.4.4" + "@babel/plugin-transform-object-super" "^7.5.5" + "@babel/plugin-transform-parameters" "^7.4.4" + "@babel/plugin-transform-property-literals" "^7.2.0" + "@babel/plugin-transform-regenerator" "^7.4.5" + "@babel/plugin-transform-reserved-words" "^7.2.0" + "@babel/plugin-transform-shorthand-properties" "^7.2.0" + "@babel/plugin-transform-spread" "^7.6.2" + "@babel/plugin-transform-sticky-regex" "^7.2.0" + "@babel/plugin-transform-template-literals" "^7.4.4" + "@babel/plugin-transform-typeof-symbol" "^7.2.0" + "@babel/plugin-transform-unicode-regex" "^7.6.2" + "@babel/types" "^7.6.0" + browserslist "^4.6.0" + core-js-compat "^3.1.1" + invariant "^2.2.2" + js-levenshtein "^1.1.3" + semver "^5.5.0" + +"@babel/preset-flow@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@babel/preset-flow/-/preset-flow-7.0.0.tgz#afd764835d9535ec63d8c7d4caf1c06457263da2" + integrity sha512-bJOHrYOPqJZCkPVbG1Lot2r5OSsB+iUOaxiHdlOeB1yPWS6evswVHwvkDLZ54WTaTRIk89ds0iHmGZSnxlPejQ== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-transform-flow-strip-types" "^7.0.0" + +"@babel/preset-typescript@^7.1.0": + version "7.6.0" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.6.0.tgz#25768cb8830280baf47c45ab1a519a9977498c98" + integrity sha512-4xKw3tTcCm0qApyT6PqM9qniseCE79xGHiUnNdKGdxNsGUc2X7WwZybqIpnTmoukg3nhPceI5KPNzNqLNeIJww== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-transform-typescript" "^7.6.0" + +"@babel/register@^7.0.0": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/register/-/register-7.6.2.tgz#25765a922202cb06f8bdac5a3b1e70cd6bf3dd45" + integrity sha512-xgZk2LRZvt6i2SAUWxc7ellk4+OYRgS3Zpsnr13nMS1Qo25w21Uu8o6vTOAqNaxiqrnv30KTYzh9YWY2k21CeQ== + dependencies: + find-cache-dir "^2.0.0" + lodash "^4.17.13" + mkdirp "^0.5.1" + pirates "^4.0.0" + source-map-support "^0.5.9" + +"@babel/template@^7.1.0", "@babel/template@^7.4.4", "@babel/template@^7.6.0": + version "7.6.0" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.6.0.tgz#7f0159c7f5012230dad64cca42ec9bdb5c9536e6" + integrity sha512-5AEH2EXD8euCk446b7edmgFdub/qfH1SN6Nii3+fyXP807QRx9Q73A2N5hNwRRslC2H9sNzaFhsPubkS4L8oNQ== + dependencies: + "@babel/code-frame" "^7.0.0" + "@babel/parser" "^7.6.0" + "@babel/types" "^7.6.0" + +"@babel/traverse@^7.1.0", "@babel/traverse@^7.4.4", "@babel/traverse@^7.5.5", "@babel/traverse@^7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.6.2.tgz#b0e2bfd401d339ce0e6c05690206d1e11502ce2c" + integrity sha512-8fRE76xNwNttVEF2TwxJDGBLWthUkHWSldmfuBzVRmEDWOtu4XdINTgN7TDWzuLg4bbeIMLvfMFD9we5YcWkRQ== + dependencies: + "@babel/code-frame" "^7.5.5" + "@babel/generator" "^7.6.2" + "@babel/helper-function-name" "^7.1.0" + "@babel/helper-split-export-declaration" "^7.4.4" + "@babel/parser" "^7.6.2" + "@babel/types" "^7.6.0" + debug "^4.1.0" + globals "^11.1.0" + lodash "^4.17.13" + +"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5", "@babel/types@^7.6.0": + version "7.6.1" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.6.1.tgz#53abf3308add3ac2a2884d539151c57c4b3ac648" + integrity sha512-X7gdiuaCmA0uRjCmRtYJNAVCc/q+5xSgsfKJHqMN4iNLILX39677fJE1O40arPMh0TTtS9ItH67yre6c7k6t0g== + dependencies: + esutils "^2.0.2" + lodash "^4.17.13" + to-fast-properties "^2.0.0" + abab@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.3.tgz#b81de5f7274ec4e756d797cd834f303642724e5d" + version "1.0.4" + resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.4.tgz#5faad9c2c07f60dd76770f71cf025b62a63cfd4e" + integrity sha1-X6rZwsB/YN12dw9xzwJbYqY8/U4= abbrev@1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f" + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== acorn-globals@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-3.1.0.tgz#fd8270f71fbb4996b004fa880ee5d46573a731bf" + integrity sha1-/YJw9x+7SZawBPqIDuXUZXOnMb8= dependencies: acorn "^4.0.4" acorn@^4.0.4: version "4.0.13" resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787" + integrity sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c= -ajv@^4.9.1: - version "4.11.8" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536" - dependencies: - co "^4.6.0" - json-stable-stringify "^1.0.1" - -align-text@^0.1.1, align-text@^0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" +ajv@^6.5.5: + version "6.10.2" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.10.2.tgz#d3cea04d6b017b2894ad69040fec8b623eb4bd52" + integrity sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw== dependencies: - kind-of "^3.0.2" - longest "^1.0.1" - repeat-string "^1.5.2" - -amdefine@>=0.0.4: - version "1.0.1" - resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" + fast-deep-equal "^2.0.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" ansi-bgblack@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-bgblack/-/ansi-bgblack-0.1.1.tgz#a68ba5007887701b6aafbe3fa0dadfdfa8ee3ca2" + integrity sha1-poulAHiHcBtqr74/oNrf36juPKI= dependencies: ansi-wrap "0.1.0" ansi-bgblue@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-bgblue/-/ansi-bgblue-0.1.1.tgz#67bdc04edc9b9b5278969da196dea3d75c8c3613" + integrity sha1-Z73ATtybm1J4lp2hlt6j11yMNhM= dependencies: ansi-wrap "0.1.0" ansi-bgcyan@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-bgcyan/-/ansi-bgcyan-0.1.1.tgz#58489425600bde9f5507068dd969ebfdb50fe768" + integrity sha1-WEiUJWAL3p9VBwaN2Wnr/bUP52g= dependencies: ansi-wrap "0.1.0" ansi-bggreen@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-bggreen/-/ansi-bggreen-0.1.1.tgz#4e3191248529943f4321e96bf131d1c13816af49" + integrity sha1-TjGRJIUplD9DIelr8THRwTgWr0k= dependencies: ansi-wrap "0.1.0" ansi-bgmagenta@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-bgmagenta/-/ansi-bgmagenta-0.1.1.tgz#9b28432c076eaa999418672a3efbe19391c2c7a1" + integrity sha1-myhDLAduqpmUGGcqPvvhk5HCx6E= dependencies: ansi-wrap "0.1.0" ansi-bgred@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-bgred/-/ansi-bgred-0.1.1.tgz#a76f92838382ba43290a6c1778424f984d6f1041" + integrity sha1-p2+Sg4OCukMpCmwXeEJPmE1vEEE= dependencies: ansi-wrap "0.1.0" ansi-bgwhite@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-bgwhite/-/ansi-bgwhite-0.1.1.tgz#6504651377a58a6ececd0331994e480258e11ba8" + integrity sha1-ZQRlE3elim7OzQMxmU5IAljhG6g= dependencies: ansi-wrap "0.1.0" ansi-bgyellow@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-bgyellow/-/ansi-bgyellow-0.1.1.tgz#c3fe2eb08cd476648029e6874d15a0b38f61d44f" + integrity sha1-w/4usIzUdmSAKeaHTRWgs49h1E8= dependencies: ansi-wrap "0.1.0" ansi-black@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-black/-/ansi-black-0.1.1.tgz#f6185e889360b2545a1ec50c0bf063fc43032453" + integrity sha1-9hheiJNgslRaHsUMC/Bj/EMDJFM= dependencies: ansi-wrap "0.1.0" ansi-blue@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-blue/-/ansi-blue-0.1.1.tgz#15b804990e92fc9ca8c5476ce8f699777c21edbf" + integrity sha1-FbgEmQ6S/JyoxUds6PaZd3wh7b8= dependencies: ansi-wrap "0.1.0" ansi-bold@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-bold/-/ansi-bold-0.1.1.tgz#3e63950af5acc2ae2e670e6f67deb115d1a5f505" + integrity sha1-PmOVCvWswq4uZw5vZ96xFdGl9QU= dependencies: ansi-wrap "0.1.0" ansi-colors@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-0.2.0.tgz#72c31de2a0d9a2ccd0cac30cc9823eeb2f6434b5" + integrity sha1-csMd4qDZoszQysMMyYI+6y9kNLU= dependencies: ansi-bgblack "^0.1.1" ansi-bgblue "^0.1.1" @@ -140,300 +860,357 @@ ansi-colors@^0.2.0: ansi-cyan@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-cyan/-/ansi-cyan-0.1.1.tgz#538ae528af8982f28ae30d86f2f17456d2609873" + integrity sha1-U4rlKK+JgvKK4w2G8vF0VtJgmHM= dependencies: ansi-wrap "0.1.0" ansi-dim@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-dim/-/ansi-dim-0.1.1.tgz#40de4c603aa8086d8e7a86b8ff998d5c36eefd6c" + integrity sha1-QN5MYDqoCG2Oeoa4/5mNXDbu/Ww= dependencies: ansi-wrap "0.1.0" ansi-escapes@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" + integrity sha1-06ioOzGapneTZisT52HHkRQiMG4= ansi-gray@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-gray/-/ansi-gray-0.1.1.tgz#2962cf54ec9792c48510a3deb524436861ef7251" + integrity sha1-KWLPVOyXksSFEKPetSRDaGHvclE= dependencies: ansi-wrap "0.1.0" ansi-green@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-green/-/ansi-green-0.1.1.tgz#8a5d9a979e458d57c40e33580b37390b8e10d0f7" + integrity sha1-il2al55FjVfEDjNYCzc5C44Q0Pc= dependencies: ansi-wrap "0.1.0" ansi-grey@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-grey/-/ansi-grey-0.1.1.tgz#59d98b6ac2ba19f8a51798e9853fba78339a33c1" + integrity sha1-WdmLasK6GfilF5jphT+6eDOaM8E= dependencies: ansi-wrap "0.1.0" ansi-hidden@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-hidden/-/ansi-hidden-0.1.1.tgz#ed6a4c498d2bb7cbb289dbf2a8d1dcc8567fae0f" + integrity sha1-7WpMSY0rt8uyidvyqNHcyFZ/rg8= dependencies: ansi-wrap "0.1.0" ansi-inverse@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-inverse/-/ansi-inverse-0.1.1.tgz#b6af45826fe826bfb528a6c79885794355ccd269" + integrity sha1-tq9Fgm/oJr+1KKbHmIV5Q1XM0mk= dependencies: ansi-wrap "0.1.0" ansi-italic@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-italic/-/ansi-italic-0.1.1.tgz#104743463f625c142a036739cf85eda688986f23" + integrity sha1-EEdDRj9iXBQqA2c5z4XtpoiYbyM= dependencies: ansi-wrap "0.1.0" ansi-magenta@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-magenta/-/ansi-magenta-0.1.1.tgz#063b5ba16fb3f23e1cfda2b07c0a89de11e430ae" + integrity sha1-BjtboW+z8j4c/aKwfAqJ3hHkMK4= dependencies: ansi-wrap "0.1.0" ansi-red@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-red/-/ansi-red-0.1.1.tgz#8c638f9d1080800a353c9c28c8a81ca4705d946c" + integrity sha1-jGOPnRCAgAo1PJwoyKgcpHBdlGw= dependencies: ansi-wrap "0.1.0" ansi-regex@^2.0.0, ansi-regex@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= ansi-reset@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-reset/-/ansi-reset-0.1.1.tgz#e7e71292c3c7ddcd4d62ef4a6c7c05980911c3b7" + integrity sha1-5+cSksPH3c1NYu9KbHwFmAkRw7c= dependencies: ansi-wrap "0.1.0" ansi-strikethrough@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-strikethrough/-/ansi-strikethrough-0.1.1.tgz#d84877140b2cff07d1c93ebce69904f68885e568" + integrity sha1-2Eh3FAss/wfRyT685pkE9oiF5Wg= dependencies: ansi-wrap "0.1.0" ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= -ansi-styles@^3.0.0, ansi-styles@^3.1.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.0.tgz#c159b8d5be0f9e5a6f346dab94f16ce022161b88" +ansi-styles@^3.0.0, ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" -ansi-styles@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.0.0.tgz#cb102df1c56f5123eab8b67cd7b98027a0279178" - ansi-underline@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-underline/-/ansi-underline-0.1.1.tgz#dfc920f4c97b5977ea162df8ffb988308aaa71a4" + integrity sha1-38kg9Ml7WXfqFi34/7mIMIqqcaQ= dependencies: ansi-wrap "0.1.0" ansi-white@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-white/-/ansi-white-0.1.1.tgz#9c77b7c193c5ee992e6011d36ec4c921b4578944" + integrity sha1-nHe3wZPF7pkuYBHTbsTJIbRXiUQ= dependencies: ansi-wrap "0.1.0" ansi-wrap@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/ansi-wrap/-/ansi-wrap-0.1.0.tgz#a82250ddb0015e9a27ca82e82ea603bbfa45efaf" + integrity sha1-qCJQ3bABXponyoLoLqYDu/pF768= ansi-yellow@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ansi-yellow/-/ansi-yellow-0.1.1.tgz#cb9356f2f46c732f0e3199e6102955a77da83c1d" + integrity sha1-y5NW8vRscy8OMZnmEClVp32oPB0= dependencies: ansi-wrap "0.1.0" anymatch@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.0.tgz#a3e52fa39168c825ff57b0248126ce5a8ff95507" + version "1.3.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.2.tgz#553dcb8f91e3c889845dfdba34c77721b90b9d7a" + integrity sha512-0XNayC8lTHQ2OI8aljNCN3sSx6hsr/1+rlcDAotXJR7C1oZZHCNsfpbKwMjRA3Uqb5tF1Rae2oloTr4xpq+WjA== dependencies: - arrify "^1.0.0" micromatch "^2.1.5" + normalize-path "^2.0.0" append-transform@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-0.4.0.tgz#d76ebf8ca94d276e247a36bad44a4b74ab611991" + integrity sha1-126/jKlNJ24keja61EpLdKthGZE= dependencies: default-require-extensions "^1.0.0" aproba@^1.0.3: - version "1.1.2" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.1.2.tgz#45c6629094de4e96f693ef7eab74ae079c240fc1" + version "1.2.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" + integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== are-we-there-yet@~1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.4.tgz#bb5dca382bb94f05e15194373d16fd3ba1ca110d" + version "1.1.5" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" + integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== dependencies: delegates "^1.0.0" readable-stream "^2.0.6" argparse@^1.0.7: - version "1.0.9" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.9.tgz#73d83bc263f86e97f8cc4f6bae1b0e90a7d22c86" + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" arr-diff@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf" + integrity sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8= dependencies: arr-flatten "^1.0.1" -arr-flatten@^1.0.1, arr-flatten@^1.0.3, arr-flatten@^1.1.0: +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= + +arr-flatten@^1.0.1, arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-swap@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/arr-swap/-/arr-swap-1.0.1.tgz#147590ed65fc815bc07fef0997c2e5823d643534" + integrity sha1-FHWQ7WX8gVvAf+8Jl8Llgj1kNTQ= dependencies: is-number "^3.0.0" +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= + array-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" + integrity sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM= array-unique@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" + integrity sha1-odl8yvy8JiXMcPrc6zalDFiwGlM= + +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= -arrify@^1.0.0, arrify@^1.0.1: +arrify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= asn1@~0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86" + version "0.2.4" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" + integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== + dependencies: + safer-buffer "~2.1.0" assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= -assert-plus@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= -ast-types@0.10.1: - version "0.10.1" - resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.10.1.tgz#f52fca9715579a14f841d67d7f8d25432ab6a3dd" +ast-types@0.13.2: + version "0.13.2" + resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.13.2.tgz#df39b677a911a83f3a049644fb74fdded23cea48" + integrity sha512-uWMHxJxtfj/1oZClOxDEV1sQ1HCDkA4MG8Gr69KKeBjEVH0R84WlejZ0y2DcwyBlpAEMltmVYkVgqfLFb2oyiA== async-each@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d" - -async@^1.4.0, async@^1.5.0: - version "1.5.2" - resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" + version "1.0.3" + resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" + integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== async@^2.1.4: - version "2.5.0" - resolved "https://registry.yarnpkg.com/async/-/async-2.5.0.tgz#843190fd6b7357a0b9e1c956edddd5ec8462b54d" + version "2.6.3" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" + integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== dependencies: - lodash "^4.14.0" + lodash "^4.17.14" asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= -aws-sign2@~0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" +atob@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== -aws4@^1.2.1: - version "1.6.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e" +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= + +aws4@^1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" + integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== babel-cli@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-cli/-/babel-cli-6.24.1.tgz#207cd705bba61489b2ea41b5312341cf6aca2283" - dependencies: - babel-core "^6.24.1" - babel-polyfill "^6.23.0" - babel-register "^6.24.1" - babel-runtime "^6.22.0" - commander "^2.8.1" - convert-source-map "^1.1.0" + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-cli/-/babel-cli-6.26.0.tgz#502ab54874d7db88ad00b887a06383ce03d002f1" + integrity sha1-UCq1SHTX24itALiHoGODzgPQAvE= + dependencies: + babel-core "^6.26.0" + babel-polyfill "^6.26.0" + babel-register "^6.26.0" + babel-runtime "^6.26.0" + commander "^2.11.0" + convert-source-map "^1.5.0" fs-readdir-recursive "^1.0.0" - glob "^7.0.0" - lodash "^4.2.0" - output-file-sync "^1.1.0" - path-is-absolute "^1.0.0" + glob "^7.1.2" + lodash "^4.17.4" + output-file-sync "^1.1.2" + path-is-absolute "^1.0.1" slash "^1.0.0" - source-map "^0.5.0" - v8flags "^2.0.10" + source-map "^0.5.6" + v8flags "^2.1.1" optionalDependencies: chokidar "^1.6.1" -babel-code-frame@^6.22.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4" +babel-code-frame@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" + integrity sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= dependencies: - chalk "^1.1.0" + chalk "^1.1.3" esutils "^2.0.2" - js-tokens "^3.0.0" + js-tokens "^3.0.2" -babel-core@^6.0.0, babel-core@^6.24.1: - version "6.25.0" - resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.25.0.tgz#7dd42b0463c742e9d5296deb3ec67a9322dad729" +babel-core@^6.0.0, babel-core@^6.26.0: + version "6.26.3" + resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.26.3.tgz#b2e2f09e342d0f0c88e2f02e067794125e75c207" + integrity sha512-6jyFLuDmeidKmUEb3NM+/yawG0M2bDZ9Z1qbZP59cyHLz8kYGKYwpJP0UwUKKUiTRNvxfLesJnTedqczP7cTDA== dependencies: - babel-code-frame "^6.22.0" - babel-generator "^6.25.0" + babel-code-frame "^6.26.0" + babel-generator "^6.26.0" babel-helpers "^6.24.1" babel-messages "^6.23.0" - babel-register "^6.24.1" - babel-runtime "^6.22.0" - babel-template "^6.25.0" - babel-traverse "^6.25.0" - babel-types "^6.25.0" - babylon "^6.17.2" - convert-source-map "^1.1.0" - debug "^2.1.1" - json5 "^0.5.0" - lodash "^4.2.0" - minimatch "^3.0.2" - path-is-absolute "^1.0.0" - private "^0.1.6" + babel-register "^6.26.0" + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + convert-source-map "^1.5.1" + debug "^2.6.9" + json5 "^0.5.1" + lodash "^4.17.4" + minimatch "^3.0.4" + path-is-absolute "^1.0.1" + private "^0.1.8" slash "^1.0.0" - source-map "^0.5.0" + source-map "^0.5.7" + +babel-core@^7.0.0-bridge.0: + version "7.0.0-bridge.0" + resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-7.0.0-bridge.0.tgz#95a492ddd90f9b4e9a4a1da14eb335b87b634ece" + integrity sha512-poPX9mZH/5CSanm50Q+1toVci6pv5KSRv/5TWCwtzQS5XEwn40BcCrgIeMFWP9CKKIniKXNxoIOnOq4VVlGXhg== -babel-generator@^6.18.0, babel-generator@^6.25.0: - version "6.25.0" - resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.25.0.tgz#33a1af70d5f2890aeb465a4a7793c1df6a9ea9fc" +babel-generator@^6.18.0, babel-generator@^6.26.0: + version "6.26.1" + resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" + integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== dependencies: babel-messages "^6.23.0" - babel-runtime "^6.22.0" - babel-types "^6.25.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" detect-indent "^4.0.0" jsesc "^1.3.0" - lodash "^4.2.0" - source-map "^0.5.0" + lodash "^4.17.4" + source-map "^0.5.7" trim-right "^1.0.1" -babel-helper-bindify-decorators@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-bindify-decorators/-/babel-helper-bindify-decorators-6.24.1.tgz#14c19e5f142d7b47f19a52431e52b1ccbc40a330" - dependencies: - babel-runtime "^6.22.0" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - babel-helper-builder-binary-assignment-operator-visitor@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helper-builder-binary-assignment-operator-visitor/-/babel-helper-builder-binary-assignment-operator-visitor-6.24.1.tgz#cce4517ada356f4220bcae8a02c2b346f9a56664" + integrity sha1-zORReto1b0IgvK6KAsKzRvmlZmQ= dependencies: babel-helper-explode-assignable-expression "^6.24.1" babel-runtime "^6.22.0" @@ -442,6 +1219,7 @@ babel-helper-builder-binary-assignment-operator-visitor@^6.24.1: babel-helper-call-delegate@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helper-call-delegate/-/babel-helper-call-delegate-6.24.1.tgz#ece6aacddc76e41c3461f88bfc575bd0daa2df8d" + integrity sha1-7Oaqzdx25Bw0YfiL/Fdb0Nqi340= dependencies: babel-helper-hoist-variables "^6.24.1" babel-runtime "^6.22.0" @@ -449,34 +1227,28 @@ babel-helper-call-delegate@^6.24.1: babel-types "^6.24.1" babel-helper-define-map@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-define-map/-/babel-helper-define-map-6.24.1.tgz#7a9747f258d8947d32d515f6aa1c7bd02204a080" + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-helper-define-map/-/babel-helper-define-map-6.26.0.tgz#a5f56dab41a25f97ecb498c7ebaca9819f95be5f" + integrity sha1-pfVtq0GiX5fstJjH66ypgZ+Vvl8= dependencies: babel-helper-function-name "^6.24.1" - babel-runtime "^6.22.0" - babel-types "^6.24.1" - lodash "^4.2.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + lodash "^4.17.4" babel-helper-explode-assignable-expression@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helper-explode-assignable-expression/-/babel-helper-explode-assignable-expression-6.24.1.tgz#f25b82cf7dc10433c55f70592d5746400ac22caa" + integrity sha1-8luCz33BBDPFX3BZLVdGQArCLKo= dependencies: babel-runtime "^6.22.0" babel-traverse "^6.24.1" babel-types "^6.24.1" -babel-helper-explode-class@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-explode-class/-/babel-helper-explode-class-6.24.1.tgz#7dc2a3910dee007056e1e31d640ced3d54eaa9eb" - dependencies: - babel-helper-bindify-decorators "^6.24.1" - babel-runtime "^6.22.0" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - babel-helper-function-name@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helper-function-name/-/babel-helper-function-name-6.24.1.tgz#d3475b8c03ed98242a25b48351ab18399d3580a9" + integrity sha1-00dbjAPtmCQqJbSDUasYOZ01gKk= dependencies: babel-helper-get-function-arity "^6.24.1" babel-runtime "^6.22.0" @@ -487,6 +1259,7 @@ babel-helper-function-name@^6.24.1: babel-helper-get-function-arity@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helper-get-function-arity/-/babel-helper-get-function-arity-6.24.1.tgz#8f7782aa93407c41d3aa50908f89b031b1b6853d" + integrity sha1-j3eCqpNAfEHTqlCQj4mwMbG2hT0= dependencies: babel-runtime "^6.22.0" babel-types "^6.24.1" @@ -494,6 +1267,7 @@ babel-helper-get-function-arity@^6.24.1: babel-helper-hoist-variables@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helper-hoist-variables/-/babel-helper-hoist-variables-6.24.1.tgz#1ecb27689c9d25513eadbc9914a73f5408be7a76" + integrity sha1-HssnaJydJVE+rbyZFKc/VAi+enY= dependencies: babel-runtime "^6.22.0" babel-types "^6.24.1" @@ -501,21 +1275,24 @@ babel-helper-hoist-variables@^6.24.1: babel-helper-optimise-call-expression@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helper-optimise-call-expression/-/babel-helper-optimise-call-expression-6.24.1.tgz#f7a13427ba9f73f8f4fa993c54a97882d1244257" + integrity sha1-96E0J7qfc/j0+pk8VKl4gtEkQlc= dependencies: babel-runtime "^6.22.0" babel-types "^6.24.1" babel-helper-regex@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-helper-regex/-/babel-helper-regex-6.24.1.tgz#d36e22fab1008d79d88648e32116868128456ce8" + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-helper-regex/-/babel-helper-regex-6.26.0.tgz#325c59f902f82f24b74faceed0363954f6495e72" + integrity sha1-MlxZ+QL4LyS3T6zu0DY5VPZJXnI= dependencies: - babel-runtime "^6.22.0" - babel-types "^6.24.1" - lodash "^4.2.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + lodash "^4.17.4" babel-helper-remap-async-to-generator@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helper-remap-async-to-generator/-/babel-helper-remap-async-to-generator-6.24.1.tgz#5ec581827ad723fecdd381f1c928390676e4551b" + integrity sha1-XsWBgnrXI/7N04HxySg5BnbkVRs= dependencies: babel-helper-function-name "^6.24.1" babel-runtime "^6.22.0" @@ -526,6 +1303,7 @@ babel-helper-remap-async-to-generator@^6.24.1: babel-helper-replace-supers@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helper-replace-supers/-/babel-helper-replace-supers-6.24.1.tgz#bf6dbfe43938d17369a213ca8a8bf74b6a90ab1a" + integrity sha1-v22/5Dk40XNpohPKiov3S2qQqxo= dependencies: babel-helper-optimise-call-expression "^6.24.1" babel-messages "^6.23.0" @@ -537,6 +1315,7 @@ babel-helper-replace-supers@^6.24.1: babel-helpers@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-helpers/-/babel-helpers-6.24.1.tgz#3471de9caec388e5c850e597e58a26ddf37602b2" + integrity sha1-NHHenK7DiOXIUOWX5Yom3fN2ArI= dependencies: babel-runtime "^6.22.0" babel-template "^6.24.1" @@ -544,6 +1323,7 @@ babel-helpers@^6.24.1: babel-jest@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-20.0.3.tgz#e4a03b13dc10389e140fc645d09ffc4ced301671" + integrity sha1-5KA7E9wQOJ4UD8ZF0J/8TO0wFnE= dependencies: babel-core "^6.0.0" babel-plugin-istanbul "^4.0.0" @@ -552,139 +1332,102 @@ babel-jest@^20.0.3: babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" + integrity sha1-8830cDhYA1sqKVHG7F7fbGLyYw4= dependencies: babel-runtime "^6.22.0" babel-plugin-check-es2015-constants@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-plugin-check-es2015-constants/-/babel-plugin-check-es2015-constants-6.22.0.tgz#35157b101426fd2ffd3da3f75c7d1e91835bbf8a" + integrity sha1-NRV7EBQm/S/9PaP3XH0ekYNbv4o= dependencies: babel-runtime "^6.22.0" +babel-plugin-dynamic-import-node@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f" + integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ== + dependencies: + object.assign "^4.1.0" + babel-plugin-istanbul@^4.0.0: - version "4.1.4" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.4.tgz#18dde84bf3ce329fddf3f4103fae921456d8e587" + version "4.1.6" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.6.tgz#36c59b2192efce81c5b378321b74175add1c9a45" + integrity sha512-PWP9FQ1AhZhS01T/4qLSKoHGY/xvkZdVBGlKM/HuxxS3+sC66HhTNR7+MpbO/so/cz/wY94MeSWJuP1hXIPfwQ== dependencies: + babel-plugin-syntax-object-rest-spread "^6.13.0" find-up "^2.1.0" - istanbul-lib-instrument "^1.7.2" - test-exclude "^4.1.1" + istanbul-lib-instrument "^1.10.1" + test-exclude "^4.2.1" babel-plugin-jest-hoist@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-20.0.3.tgz#afedc853bd3f8dc3548ea671fbe69d03cc2c1767" + integrity sha1-r+3IU70/jcNUjqZx++adA8wsF2c= babel-plugin-syntax-async-functions@^6.8.0: version "6.13.0" resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz#cad9cad1191b5ad634bf30ae0872391e0647be95" - -babel-plugin-syntax-async-generators@^6.5.0: - version "6.13.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-generators/-/babel-plugin-syntax-async-generators-6.13.0.tgz#6bc963ebb16eccbae6b92b596eb7f35c342a8b9a" - -babel-plugin-syntax-class-constructor-call@^6.18.0: - version "6.18.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-class-constructor-call/-/babel-plugin-syntax-class-constructor-call-6.18.0.tgz#9cb9d39fe43c8600bec8146456ddcbd4e1a76416" - -babel-plugin-syntax-class-properties@^6.8.0: - version "6.13.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-class-properties/-/babel-plugin-syntax-class-properties-6.13.0.tgz#d7eb23b79a317f8543962c505b827c7d6cac27de" - -babel-plugin-syntax-decorators@^6.13.0: - version "6.13.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-decorators/-/babel-plugin-syntax-decorators-6.13.0.tgz#312563b4dbde3cc806cee3e416cceeaddd11ac0b" - -babel-plugin-syntax-dynamic-import@^6.18.0: - version "6.18.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-dynamic-import/-/babel-plugin-syntax-dynamic-import-6.18.0.tgz#8d6a26229c83745a9982a441051572caa179b1da" + integrity sha1-ytnK0RkbWtY0vzCuCHI5HgZHvpU= babel-plugin-syntax-exponentiation-operator@^6.8.0: version "6.13.0" resolved "https://registry.yarnpkg.com/babel-plugin-syntax-exponentiation-operator/-/babel-plugin-syntax-exponentiation-operator-6.13.0.tgz#9ee7e8337290da95288201a6a57f4170317830de" - -babel-plugin-syntax-export-extensions@^6.8.0: - version "6.13.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-export-extensions/-/babel-plugin-syntax-export-extensions-6.13.0.tgz#70a1484f0f9089a4e84ad44bac353c95b9b12721" + integrity sha1-nufoM3KQ2pUoggGmpX9BcDF4MN4= babel-plugin-syntax-flow@^6.18.0: version "6.18.0" resolved "https://registry.yarnpkg.com/babel-plugin-syntax-flow/-/babel-plugin-syntax-flow-6.18.0.tgz#4c3ab20a2af26aa20cd25995c398c4eb70310c8d" + integrity sha1-TDqyCiryaqIM0lmVw5jE63AxDI0= -babel-plugin-syntax-object-rest-spread@^6.8.0: +babel-plugin-syntax-object-rest-spread@^6.13.0: version "6.13.0" resolved "https://registry.yarnpkg.com/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz#fd6536f2bce13836ffa3a5458c4903a597bb3bf5" + integrity sha1-/WU28rzhODb/o6VFjEkDpZe7O/U= babel-plugin-syntax-trailing-function-commas@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-6.22.0.tgz#ba0360937f8d06e40180a43fe0d5616fff532cf3" + integrity sha1-ugNgk3+NBuQBgKQ/4NVhb/9TLPM= -babel-plugin-transform-async-generator-functions@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-async-generator-functions/-/babel-plugin-transform-async-generator-functions-6.24.1.tgz#f058900145fd3e9907a6ddf28da59f215258a5db" - dependencies: - babel-helper-remap-async-to-generator "^6.24.1" - babel-plugin-syntax-async-generators "^6.5.0" - babel-runtime "^6.22.0" - -babel-plugin-transform-async-to-generator@^6.22.0, babel-plugin-transform-async-to-generator@^6.24.1: +babel-plugin-transform-async-to-generator@^6.22.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-async-to-generator/-/babel-plugin-transform-async-to-generator-6.24.1.tgz#6536e378aff6cb1d5517ac0e40eb3e9fc8d08761" + integrity sha1-ZTbjeK/2yx1VF6wOQOs+n8jQh2E= dependencies: babel-helper-remap-async-to-generator "^6.24.1" babel-plugin-syntax-async-functions "^6.8.0" babel-runtime "^6.22.0" -babel-plugin-transform-class-constructor-call@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-class-constructor-call/-/babel-plugin-transform-class-constructor-call-6.24.1.tgz#80dc285505ac067dcb8d6c65e2f6f11ab7765ef9" - dependencies: - babel-plugin-syntax-class-constructor-call "^6.18.0" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - -babel-plugin-transform-class-properties@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-class-properties/-/babel-plugin-transform-class-properties-6.24.1.tgz#6a79763ea61d33d36f37b611aa9def81a81b46ac" - dependencies: - babel-helper-function-name "^6.24.1" - babel-plugin-syntax-class-properties "^6.8.0" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - -babel-plugin-transform-decorators@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-decorators/-/babel-plugin-transform-decorators-6.24.1.tgz#788013d8f8c6b5222bdf7b344390dfd77569e24d" - dependencies: - babel-helper-explode-class "^6.24.1" - babel-plugin-syntax-decorators "^6.13.0" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-types "^6.24.1" - babel-plugin-transform-es2015-arrow-functions@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-arrow-functions/-/babel-plugin-transform-es2015-arrow-functions-6.22.0.tgz#452692cb711d5f79dc7f85e440ce41b9f244d221" + integrity sha1-RSaSy3EdX3ncf4XkQM5BufJE0iE= dependencies: babel-runtime "^6.22.0" babel-plugin-transform-es2015-block-scoped-functions@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoped-functions/-/babel-plugin-transform-es2015-block-scoped-functions-6.22.0.tgz#bbc51b49f964d70cb8d8e0b94e820246ce3a6141" + integrity sha1-u8UbSflk1wy42OC5ToICRs46YUE= dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-block-scoping@^6.23.0, babel-plugin-transform-es2015-block-scoping@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576" +babel-plugin-transform-es2015-block-scoping@^6.23.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.26.0.tgz#d70f5299c1308d05c12f463813b0a09e73b1895f" + integrity sha1-1w9SmcEwjQXBL0Y4E7CgnnOxiV8= dependencies: - babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-traverse "^6.24.1" - babel-types "^6.24.1" - lodash "^4.2.0" + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + lodash "^4.17.4" -babel-plugin-transform-es2015-classes@^6.23.0, babel-plugin-transform-es2015-classes@^6.24.1: +babel-plugin-transform-es2015-classes@^6.23.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-classes/-/babel-plugin-transform-es2015-classes-6.24.1.tgz#5a4c58a50c9c9461e564b4b2a3bfabc97a2584db" + integrity sha1-WkxYpQyclGHlZLSyo7+ryXolhNs= dependencies: babel-helper-define-map "^6.24.1" babel-helper-function-name "^6.24.1" @@ -696,35 +1439,40 @@ babel-plugin-transform-es2015-classes@^6.23.0, babel-plugin-transform-es2015-cla babel-traverse "^6.24.1" babel-types "^6.24.1" -babel-plugin-transform-es2015-computed-properties@^6.22.0, babel-plugin-transform-es2015-computed-properties@^6.24.1: +babel-plugin-transform-es2015-computed-properties@^6.22.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-computed-properties/-/babel-plugin-transform-es2015-computed-properties-6.24.1.tgz#6fe2a8d16895d5634f4cd999b6d3480a308159b3" + integrity sha1-b+Ko0WiV1WNPTNmZttNICjCBWbM= dependencies: babel-runtime "^6.22.0" babel-template "^6.24.1" -babel-plugin-transform-es2015-destructuring@^6.22.0, babel-plugin-transform-es2015-destructuring@^6.23.0: +babel-plugin-transform-es2015-destructuring@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-destructuring/-/babel-plugin-transform-es2015-destructuring-6.23.0.tgz#997bb1f1ab967f682d2b0876fe358d60e765c56d" + integrity sha1-mXux8auWf2gtKwh2/jWNYOdlxW0= dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-duplicate-keys@^6.22.0, babel-plugin-transform-es2015-duplicate-keys@^6.24.1: +babel-plugin-transform-es2015-duplicate-keys@^6.22.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-duplicate-keys/-/babel-plugin-transform-es2015-duplicate-keys-6.24.1.tgz#73eb3d310ca969e3ef9ec91c53741a6f1576423e" + integrity sha1-c+s9MQypaePvnskcU3QabxV2Qj4= dependencies: babel-runtime "^6.22.0" babel-types "^6.24.1" -babel-plugin-transform-es2015-for-of@^6.22.0, babel-plugin-transform-es2015-for-of@^6.23.0: +babel-plugin-transform-es2015-for-of@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-for-of/-/babel-plugin-transform-es2015-for-of-6.23.0.tgz#f47c95b2b613df1d3ecc2fdb7573623c75248691" + integrity sha1-9HyVsrYT3x0+zC/bdXNiPHUkhpE= dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-function-name@^6.22.0, babel-plugin-transform-es2015-function-name@^6.24.1: +babel-plugin-transform-es2015-function-name@^6.22.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-function-name/-/babel-plugin-transform-es2015-function-name-6.24.1.tgz#834c89853bc36b1af0f3a4c5dbaa94fd8eacaa8b" + integrity sha1-g0yJhTvDaxrw86TF26qU/Y6sqos= dependencies: babel-helper-function-name "^6.24.1" babel-runtime "^6.22.0" @@ -733,52 +1481,59 @@ babel-plugin-transform-es2015-function-name@^6.22.0, babel-plugin-transform-es20 babel-plugin-transform-es2015-literals@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-literals/-/babel-plugin-transform-es2015-literals-6.22.0.tgz#4f54a02d6cd66cf915280019a31d31925377ca2e" + integrity sha1-T1SgLWzWbPkVKAAZox0xklN3yi4= dependencies: babel-runtime "^6.22.0" babel-plugin-transform-es2015-modules-amd@^6.22.0, babel-plugin-transform-es2015-modules-amd@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.24.1.tgz#3b3e54017239842d6d19c3011c4bd2f00a00d154" + integrity sha1-Oz5UAXI5hC1tGcMBHEvS8AoA0VQ= dependencies: babel-plugin-transform-es2015-modules-commonjs "^6.24.1" babel-runtime "^6.22.0" babel-template "^6.24.1" babel-plugin-transform-es2015-modules-commonjs@^6.23.0, babel-plugin-transform-es2015-modules-commonjs@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.24.1.tgz#d3e310b40ef664a36622200097c6d440298f2bfe" + version "6.26.2" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.2.tgz#58a793863a9e7ca870bdc5a881117ffac27db6f3" + integrity sha512-CV9ROOHEdrjcwhIaJNBGMBCodN+1cfkwtM1SbUHmvyy35KGT7fohbpOxkE2uLz1o6odKK2Ck/tz47z+VqQfi9Q== dependencies: babel-plugin-transform-strict-mode "^6.24.1" - babel-runtime "^6.22.0" - babel-template "^6.24.1" - babel-types "^6.24.1" + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-types "^6.26.0" -babel-plugin-transform-es2015-modules-systemjs@^6.23.0, babel-plugin-transform-es2015-modules-systemjs@^6.24.1: +babel-plugin-transform-es2015-modules-systemjs@^6.23.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-systemjs/-/babel-plugin-transform-es2015-modules-systemjs-6.24.1.tgz#ff89a142b9119a906195f5f106ecf305d9407d23" + integrity sha1-/4mhQrkRmpBhlfXxBuzzBdlAfSM= dependencies: babel-helper-hoist-variables "^6.24.1" babel-runtime "^6.22.0" babel-template "^6.24.1" -babel-plugin-transform-es2015-modules-umd@^6.23.0, babel-plugin-transform-es2015-modules-umd@^6.24.1: +babel-plugin-transform-es2015-modules-umd@^6.23.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.24.1.tgz#ac997e6285cd18ed6176adb607d602344ad38468" + integrity sha1-rJl+YoXNGO1hdq22B9YCNErThGg= dependencies: babel-plugin-transform-es2015-modules-amd "^6.24.1" babel-runtime "^6.22.0" babel-template "^6.24.1" -babel-plugin-transform-es2015-object-super@^6.22.0, babel-plugin-transform-es2015-object-super@^6.24.1: +babel-plugin-transform-es2015-object-super@^6.22.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-object-super/-/babel-plugin-transform-es2015-object-super-6.24.1.tgz#24cef69ae21cb83a7f8603dad021f572eb278f8d" + integrity sha1-JM72muIcuDp/hgPa0CH1cusnj40= dependencies: babel-helper-replace-supers "^6.24.1" babel-runtime "^6.22.0" -babel-plugin-transform-es2015-parameters@^6.23.0, babel-plugin-transform-es2015-parameters@^6.24.1: +babel-plugin-transform-es2015-parameters@^6.23.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-parameters/-/babel-plugin-transform-es2015-parameters-6.24.1.tgz#57ac351ab49caf14a97cd13b09f66fdf0a625f2b" + integrity sha1-V6w1GrScrxSpfNE7CfZv3wpiXys= dependencies: babel-helper-call-delegate "^6.24.1" babel-helper-get-function-arity "^6.24.1" @@ -787,9 +1542,10 @@ babel-plugin-transform-es2015-parameters@^6.23.0, babel-plugin-transform-es2015- babel-traverse "^6.24.1" babel-types "^6.24.1" -babel-plugin-transform-es2015-shorthand-properties@^6.22.0, babel-plugin-transform-es2015-shorthand-properties@^6.24.1: +babel-plugin-transform-es2015-shorthand-properties@^6.22.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-shorthand-properties/-/babel-plugin-transform-es2015-shorthand-properties-6.24.1.tgz#24f875d6721c87661bbd99a4622e51f14de38aa0" + integrity sha1-JPh11nIch2YbvZmkYi5R8U3jiqA= dependencies: babel-runtime "^6.22.0" babel-types "^6.24.1" @@ -797,12 +1553,14 @@ babel-plugin-transform-es2015-shorthand-properties@^6.22.0, babel-plugin-transfo babel-plugin-transform-es2015-spread@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-spread/-/babel-plugin-transform-es2015-spread-6.22.0.tgz#d6d68a99f89aedc4536c81a542e8dd9f1746f8d1" + integrity sha1-1taKmfia7cRTbIGlQujdnxdG+NE= dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-sticky-regex@^6.22.0, babel-plugin-transform-es2015-sticky-regex@^6.24.1: +babel-plugin-transform-es2015-sticky-regex@^6.22.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-sticky-regex/-/babel-plugin-transform-es2015-sticky-regex-6.24.1.tgz#00c1cdb1aca71112cdf0cf6126c2ed6b457ccdbc" + integrity sha1-AMHNsaynERLN8M9hJsLta0V8zbw= dependencies: babel-helper-regex "^6.24.1" babel-runtime "^6.22.0" @@ -811,76 +1569,71 @@ babel-plugin-transform-es2015-sticky-regex@^6.22.0, babel-plugin-transform-es201 babel-plugin-transform-es2015-template-literals@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-template-literals/-/babel-plugin-transform-es2015-template-literals-6.22.0.tgz#a84b3450f7e9f8f1f6839d6d687da84bb1236d8d" + integrity sha1-qEs0UPfp+PH2g51taH2oS7EjbY0= dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-typeof-symbol@^6.22.0, babel-plugin-transform-es2015-typeof-symbol@^6.23.0: +babel-plugin-transform-es2015-typeof-symbol@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-typeof-symbol/-/babel-plugin-transform-es2015-typeof-symbol-6.23.0.tgz#dec09f1cddff94b52ac73d505c84df59dcceb372" + integrity sha1-3sCfHN3/lLUqxz1QXITfWdzOs3I= dependencies: babel-runtime "^6.22.0" -babel-plugin-transform-es2015-unicode-regex@^6.22.0, babel-plugin-transform-es2015-unicode-regex@^6.24.1: +babel-plugin-transform-es2015-unicode-regex@^6.22.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-unicode-regex/-/babel-plugin-transform-es2015-unicode-regex-6.24.1.tgz#d38b12f42ea7323f729387f18a7c5ae1faeb35e9" + integrity sha1-04sS9C6nMj9yk4fxinxa4frrNek= dependencies: babel-helper-regex "^6.24.1" babel-runtime "^6.22.0" regexpu-core "^2.0.0" -babel-plugin-transform-exponentiation-operator@^6.22.0, babel-plugin-transform-exponentiation-operator@^6.24.1: +babel-plugin-transform-exponentiation-operator@^6.22.0: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-exponentiation-operator/-/babel-plugin-transform-exponentiation-operator-6.24.1.tgz#2ab0c9c7f3098fa48907772bb813fe41e8de3a0e" + integrity sha1-KrDJx/MJj6SJB3cruBP+QejeOg4= dependencies: babel-helper-builder-binary-assignment-operator-visitor "^6.24.1" babel-plugin-syntax-exponentiation-operator "^6.8.0" babel-runtime "^6.22.0" -babel-plugin-transform-export-extensions@^6.22.0: - version "6.22.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-export-extensions/-/babel-plugin-transform-export-extensions-6.22.0.tgz#53738b47e75e8218589eea946cbbd39109bbe653" - dependencies: - babel-plugin-syntax-export-extensions "^6.8.0" - babel-runtime "^6.22.0" - -babel-plugin-transform-flow-strip-types@^6.22.0, babel-plugin-transform-flow-strip-types@^6.8.0: +babel-plugin-transform-flow-strip-types@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-plugin-transform-flow-strip-types/-/babel-plugin-transform-flow-strip-types-6.22.0.tgz#84cb672935d43714fdc32bce84568d87441cf7cf" + integrity sha1-hMtnKTXUNxT9wyvOhFaNh0Qc988= dependencies: babel-plugin-syntax-flow "^6.18.0" babel-runtime "^6.22.0" -babel-plugin-transform-object-rest-spread@^6.22.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.23.0.tgz#875d6bc9be761c58a2ae3feee5dc4895d8c7f921" +babel-plugin-transform-regenerator@^6.22.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-regenerator/-/babel-plugin-transform-regenerator-6.26.0.tgz#e0703696fbde27f0a3efcacf8b4dca2f7b3a8f2f" + integrity sha1-4HA2lvveJ/Cj78rPi03KL3s6jy8= dependencies: - babel-plugin-syntax-object-rest-spread "^6.8.0" - babel-runtime "^6.22.0" - -babel-plugin-transform-regenerator@^6.22.0, babel-plugin-transform-regenerator@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-regenerator/-/babel-plugin-transform-regenerator-6.24.1.tgz#b8da305ad43c3c99b4848e4fe4037b770d23c418" - dependencies: - regenerator-transform "0.9.11" + regenerator-transform "^0.10.0" babel-plugin-transform-strict-mode@^6.24.1: version "6.24.1" resolved "https://registry.yarnpkg.com/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz#d5faf7aa578a65bbe591cf5edae04a0c67020758" + integrity sha1-1fr3qleKZbvlkc9e2uBKDGcCB1g= dependencies: babel-runtime "^6.22.0" babel-types "^6.24.1" -babel-polyfill@^6.23.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-6.23.0.tgz#8364ca62df8eafb830499f699177466c3b03499d" +babel-polyfill@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-6.26.0.tgz#379937abc67d7895970adc621f284cd966cf2153" + integrity sha1-N5k3q8Z9eJWXCtxiHyhM2WbPIVM= dependencies: - babel-runtime "^6.22.0" - core-js "^2.4.0" - regenerator-runtime "^0.10.0" + babel-runtime "^6.26.0" + core-js "^2.5.0" + regenerator-runtime "^0.10.5" babel-preset-env@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/babel-preset-env/-/babel-preset-env-1.6.0.tgz#2de1c782a780a0a5d605d199c957596da43c44e4" + version "1.7.0" + resolved "https://registry.yarnpkg.com/babel-preset-env/-/babel-preset-env-1.7.0.tgz#dea79fa4ebeb883cd35dab07e260c1c9c04df77a" + integrity sha512-9OR2afuKDneX2/q2EurSftUYM0xGu4O2D9adAhVfADDhrYDaxXV0rBbevVYoY9n6nyX1PmQW/0jtpJvUNr9CHg== dependencies: babel-plugin-check-es2015-constants "^6.22.0" babel-plugin-syntax-trailing-function-commas "^6.22.0" @@ -909,163 +1662,120 @@ babel-preset-env@^1.6.0: babel-plugin-transform-es2015-unicode-regex "^6.22.0" babel-plugin-transform-exponentiation-operator "^6.22.0" babel-plugin-transform-regenerator "^6.22.0" - browserslist "^2.1.2" + browserslist "^3.2.6" invariant "^2.2.2" semver "^5.3.0" -babel-preset-es2015@^6.9.0: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-preset-es2015/-/babel-preset-es2015-6.24.1.tgz#d44050d6bc2c9feea702aaf38d727a0210538939" - dependencies: - babel-plugin-check-es2015-constants "^6.22.0" - babel-plugin-transform-es2015-arrow-functions "^6.22.0" - babel-plugin-transform-es2015-block-scoped-functions "^6.22.0" - babel-plugin-transform-es2015-block-scoping "^6.24.1" - babel-plugin-transform-es2015-classes "^6.24.1" - babel-plugin-transform-es2015-computed-properties "^6.24.1" - babel-plugin-transform-es2015-destructuring "^6.22.0" - babel-plugin-transform-es2015-duplicate-keys "^6.24.1" - babel-plugin-transform-es2015-for-of "^6.22.0" - babel-plugin-transform-es2015-function-name "^6.24.1" - babel-plugin-transform-es2015-literals "^6.22.0" - babel-plugin-transform-es2015-modules-amd "^6.24.1" - babel-plugin-transform-es2015-modules-commonjs "^6.24.1" - babel-plugin-transform-es2015-modules-systemjs "^6.24.1" - babel-plugin-transform-es2015-modules-umd "^6.24.1" - babel-plugin-transform-es2015-object-super "^6.24.1" - babel-plugin-transform-es2015-parameters "^6.24.1" - babel-plugin-transform-es2015-shorthand-properties "^6.24.1" - babel-plugin-transform-es2015-spread "^6.22.0" - babel-plugin-transform-es2015-sticky-regex "^6.24.1" - babel-plugin-transform-es2015-template-literals "^6.22.0" - babel-plugin-transform-es2015-typeof-symbol "^6.22.0" - babel-plugin-transform-es2015-unicode-regex "^6.24.1" - babel-plugin-transform-regenerator "^6.24.1" - babel-preset-flow@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-preset-flow/-/babel-preset-flow-6.23.0.tgz#e71218887085ae9a24b5be4169affb599816c49d" + integrity sha1-5xIYiHCFrpoktb5Baa/7WZgWxJ0= dependencies: babel-plugin-transform-flow-strip-types "^6.22.0" babel-preset-jest@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-20.0.3.tgz#cbacaadecb5d689ca1e1de1360ebfc66862c178a" + integrity sha1-y6yq3stdaJyh4d4TYOv8ZoYsF4o= dependencies: babel-plugin-jest-hoist "^20.0.3" -babel-preset-stage-1@^6.5.0: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-preset-stage-1/-/babel-preset-stage-1-6.24.1.tgz#7692cd7dcd6849907e6ae4a0a85589cfb9e2bfb0" - dependencies: - babel-plugin-transform-class-constructor-call "^6.24.1" - babel-plugin-transform-export-extensions "^6.22.0" - babel-preset-stage-2 "^6.24.1" - -babel-preset-stage-2@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-preset-stage-2/-/babel-preset-stage-2-6.24.1.tgz#d9e2960fb3d71187f0e64eec62bc07767219bdc1" +babel-register@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-register/-/babel-register-6.26.0.tgz#6ed021173e2fcb486d7acb45c6009a856f647071" + integrity sha1-btAhFz4vy0htestFxgCahW9kcHE= dependencies: - babel-plugin-syntax-dynamic-import "^6.18.0" - babel-plugin-transform-class-properties "^6.24.1" - babel-plugin-transform-decorators "^6.24.1" - babel-preset-stage-3 "^6.24.1" - -babel-preset-stage-3@^6.24.1: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-preset-stage-3/-/babel-preset-stage-3-6.24.1.tgz#836ada0a9e7a7fa37cb138fb9326f87934a48395" - dependencies: - babel-plugin-syntax-trailing-function-commas "^6.22.0" - babel-plugin-transform-async-generator-functions "^6.24.1" - babel-plugin-transform-async-to-generator "^6.24.1" - babel-plugin-transform-exponentiation-operator "^6.24.1" - babel-plugin-transform-object-rest-spread "^6.22.0" - -babel-register@^6.24.1, babel-register@^6.9.0: - version "6.24.1" - resolved "https://registry.yarnpkg.com/babel-register/-/babel-register-6.24.1.tgz#7e10e13a2f71065bdfad5a1787ba45bca6ded75f" - dependencies: - babel-core "^6.24.1" - babel-runtime "^6.22.0" - core-js "^2.4.0" + babel-core "^6.26.0" + babel-runtime "^6.26.0" + core-js "^2.5.0" home-or-tmp "^2.0.0" - lodash "^4.2.0" + lodash "^4.17.4" mkdirp "^0.5.1" - source-map-support "^0.4.2" + source-map-support "^0.4.15" -babel-runtime@^6.18.0, babel-runtime@^6.22.0: - version "6.23.0" - resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.23.0.tgz#0a9489f144de70efb3ce4300accdb329e2fc543b" +babel-runtime@^6.18.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" + integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= dependencies: core-js "^2.4.0" - regenerator-runtime "^0.10.0" + regenerator-runtime "^0.11.0" -babel-template@^6.16.0, babel-template@^6.24.1, babel-template@^6.25.0: - version "6.25.0" - resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.25.0.tgz#665241166b7c2aa4c619d71e192969552b10c071" +babel-template@^6.16.0, babel-template@^6.24.1, babel-template@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" + integrity sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI= dependencies: - babel-runtime "^6.22.0" - babel-traverse "^6.25.0" - babel-types "^6.25.0" - babylon "^6.17.2" - lodash "^4.2.0" + babel-runtime "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + lodash "^4.17.4" -babel-traverse@^6.18.0, babel-traverse@^6.24.1, babel-traverse@^6.25.0: - version "6.25.0" - resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.25.0.tgz#2257497e2fcd19b89edc13c4c91381f9512496f1" +babel-traverse@^6.18.0, babel-traverse@^6.24.1, babel-traverse@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" + integrity sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4= dependencies: - babel-code-frame "^6.22.0" + babel-code-frame "^6.26.0" babel-messages "^6.23.0" - babel-runtime "^6.22.0" - babel-types "^6.25.0" - babylon "^6.17.2" - debug "^2.2.0" - globals "^9.0.0" - invariant "^2.2.0" - lodash "^4.2.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + debug "^2.6.8" + globals "^9.18.0" + invariant "^2.2.2" + lodash "^4.17.4" -babel-types@^6.18.0, babel-types@^6.19.0, babel-types@^6.24.1, babel-types@^6.25.0: - version "6.25.0" - resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.25.0.tgz#70afb248d5660e5d18f811d91c8303b54134a18e" +babel-types@^6.18.0, babel-types@^6.19.0, babel-types@^6.24.1, babel-types@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" + integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= dependencies: - babel-runtime "^6.22.0" + babel-runtime "^6.26.0" esutils "^2.0.2" - lodash "^4.2.0" - to-fast-properties "^1.0.1" + lodash "^4.17.4" + to-fast-properties "^1.0.3" -babylon@^6.17.2, babylon@^6.17.3, babylon@^6.17.4: - version "6.17.4" - resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.17.4.tgz#3e8b7402b88d22c3423e137a1577883b15ff869a" +babylon@^6.18.0: + version "6.18.0" + resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" + integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" bcrypt-pbkdf@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" + version "1.0.2" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= dependencies: tweetnacl "^0.14.3" binary-extensions@^1.0.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.9.0.tgz#66506c16ce6f4d6928a5b3cd6a33ca41e941e37b" - -block-stream@*: - version "0.0.9" - resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a" - dependencies: - inherits "~2.0.0" - -boom@2.x.x: - version "2.10.1" - resolved "https://registry.yarnpkg.com/boom/-/boom-2.10.1.tgz#39c8918ceff5799f83f9492a848f625add0c766f" - dependencies: - hoek "2.x.x" + version "1.13.1" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" + integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== brace-expansion@^1.1.7: - version "1.1.8" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292" + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" @@ -1073,74 +1783,120 @@ brace-expansion@^1.1.7: braces@^1.8.2: version "1.8.5" resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7" + integrity sha1-uneWLhLf+WnWt2cR6RS3N4V79qc= dependencies: expand-range "^1.8.1" preserve "^0.2.0" repeat-element "^1.1.2" +braces@^2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + browser-resolve@^1.11.2: - version "1.11.2" - resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-1.11.2.tgz#8ff09b0a2c421718a1051c260b32e48f442938ce" + version "1.11.3" + resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-1.11.3.tgz#9b7cbb3d0f510e4cb86bdbd796124d28b5890af6" + integrity sha512-exDi1BYWB/6raKHmDTCicQfTkqwN5fioMFV4j8BsfMU4R2DK/QfZfK7kOVkmWCNANf0snkBzqGqAJBao9gZMdQ== dependencies: resolve "1.1.7" -browserslist@^2.1.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-2.2.2.tgz#e9b4618b8a01c193f9786beea09f6fd10dbe31c3" +browserslist@^3.2.6: + version "3.2.8" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-3.2.8.tgz#b0005361d6471f0f5952797a76fc985f1f978fc6" + integrity sha512-WHVocJYavUwVgVViC0ORikPHQquXwVh939TaelZ4WDqpWgTX/FsGhl/+P4qBUAGcRvtOgDgC+xftNWWp2RUTAQ== dependencies: - caniuse-lite "^1.0.30000704" - electron-to-chromium "^1.3.16" + caniuse-lite "^1.0.30000844" + electron-to-chromium "^1.3.47" + +browserslist@^4.6.0, browserslist@^4.6.6: + version "4.7.0" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.7.0.tgz#9ee89225ffc07db03409f2fee524dc8227458a17" + integrity sha512-9rGNDtnj+HaahxiVV38Gn8n8Lr8REKsel68v1sPFfIGEK6uSXTY3h9acgiT1dZVtOOUtifo/Dn8daDQ5dUgVsA== + dependencies: + caniuse-lite "^1.0.30000989" + electron-to-chromium "^1.3.247" + node-releases "^1.1.29" bser@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/bser/-/bser-1.0.2.tgz#381116970b2a6deea5646dd15dd7278444b56169" + integrity sha1-OBEWlwsqbe6lZG3RXdcnhES1YWk= dependencies: node-int64 "^0.4.0" bser@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.0.0.tgz#9ac78d3ed5d915804fd87acb158bc797147a1719" + version "2.1.0" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.0.tgz#65fc784bf7f87c009b973c12db6546902fa9c7b5" + integrity sha512-8zsjWrQkkBoLK6uxASk1nJ2SKv97ltiGDo6A3wA0/yRPz+CwmEyDo0hUrhIuukG2JHpAl3bvFIixw2/3Hi0DOg== dependencies: node-int64 "^0.4.0" +buffer-from@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" + integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== + builtin-modules@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" + integrity sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8= + +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" - -camelcase@^1.0.2: - version "1.2.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" + integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= camelcase@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" + integrity sha1-MvxLn82vhF/N9+c7uXysImHwqwo= camelcase@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" + integrity sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0= -caniuse-lite@^1.0.30000704: - version "1.0.30000704" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000704.tgz#adb6ea01134515663682db93abab291d4c02946b" +caniuse-lite@^1.0.30000844, caniuse-lite@^1.0.30000989: + version "1.0.30000997" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000997.tgz#ba44a606804f8680894b7042612c2c7f65685b7e" + integrity sha512-BQLFPIdj2ntgBNWp9Q64LGUIEmvhKkzzHhUHR3CD5A9Lb7ZKF20/+sgadhFap69lk5XmK1fTUleDclaRFvgVUA== caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= -center-align@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" - dependencies: - align-text "^0.1.3" - lazy-cache "^1.0.3" - -chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3: +chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= dependencies: ansi-styles "^2.2.1" escape-string-regexp "^1.0.2" @@ -1148,25 +1904,19 @@ chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3: strip-ansi "^3.0.0" supports-color "^2.0.0" -chalk@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.0.1.tgz#dbec49436d2ae15f536114e76d14656cdbc0f44d" +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: - ansi-styles "^3.1.0" + ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" - supports-color "^4.0.0" - -chalk@~0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.4.0.tgz#5199a3ddcd0c1efe23bc08c1b027b06176e0c64f" - dependencies: - ansi-styles "~1.0.0" - has-color "~0.1.0" - strip-ansi "~0.1.0" + supports-color "^5.3.0" choices-separator@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/choices-separator/-/choices-separator-2.0.0.tgz#92fd1763182d79033f5c5c51d0ba352e5567c696" + integrity sha1-kv0XYxgteQM/XFxR0Lo1LlVnxpY= dependencies: ansi-dim "^0.1.1" debug "^2.6.6" @@ -1175,6 +1925,7 @@ choices-separator@^2.0.0: chokidar@^1.6.1: version "1.7.0" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468" + integrity sha1-eY5ol3gVHIB2tLNg5e3SjNortGg= dependencies: anymatch "^1.3.0" async-each "^1.0.0" @@ -1187,266 +1938,375 @@ chokidar@^1.6.1: optionalDependencies: fsevents "^1.0.0" -ci-info@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.0.0.tgz#dc5285f2b4e251821683681c381c3388f46ec534" +chownr@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.2.tgz#a18f1e0b269c8a6a5d3c86eb298beb14c3dd7bf6" + integrity sha512-GkfeAQh+QNy3wquu9oIZr6SS5x7wGdSgNQvD10X3r+AZr1Oys22HW8kAmDMvNg2+Dm0TeGaEuO8gFwdBXxwO8A== + +ci-info@^1.5.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.6.0.tgz#2ca20dbb9ceb32d4524a683303313f0304b1e497" + integrity sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A== + +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" cli-cursor@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= dependencies: restore-cursor "^2.0.0" -cli-spinners@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-1.0.0.tgz#ef987ed3d48391ac3dab9180b406a742180d6e6a" - -cliui@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" - dependencies: - center-align "^0.1.1" - right-align "^0.1.1" - wordwrap "0.0.2" +cli-spinners@^1.0.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-1.3.1.tgz#002c1990912d0d59580c93bd36c056de99e4259a" + integrity sha512-1QL4544moEsDVH9T/l6Cemov/37iv1RtoKf7NJ04A60+4MREXNfx/QvavbH6QoGdsD4N4Mwy49cmaINR/o2mdg== cliui@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" + integrity sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" wrap-ansi "^2.0.0" -clone-deep@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-0.3.0.tgz#348c61ae9cdbe0edfe053d91ff4cc521d790ede8" +clone-deep@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-1.0.0.tgz#b2f354444b5d4a0ce58faca337ef34da2b14a6c7" + integrity sha512-hmJRX8x1QOJVV+GUjOBzi6iauhPqc9hIF6xitWRBbiPZOBb6vGo/mDRIK9P74RTKSQK7AE8B0DDWY/vpRrPmQw== dependencies: for-own "^1.0.0" - is-plain-object "^2.0.1" - kind-of "^3.2.2" - shallow-clone "^0.1.2" + is-plain-object "^2.0.4" + kind-of "^5.0.0" + shallow-clone "^1.0.0" -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" +clone-deep@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= collection-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= dependencies: map-visit "^1.0.0" object-visit "^1.0.0" color-convert@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.0.tgz#1accf97dd739b983bf994d56fec8f95853641b7a" + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: - color-name "^1.1.1" + color-name "1.1.3" -color-name@^1.1.1: +color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= colors@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" + version "1.4.0" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" + integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== -combined-stream@^1.0.5, combined-stream@~1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009" +combined-stream@^1.0.6, combined-stream@~1.0.6: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" -commander@^2.8.1: - version "2.11.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.11.0.tgz#157152fd1e7a6c8d98a5b715cf376df928004563" +commander@^2.11.0, commander@~2.20.0: + version "2.20.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.0.tgz#d58bb2b5c1ee8f87b0d340027e9e94e222c5a422" + integrity sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ== + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= component-emitter@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" + version "1.3.0" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" + integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" + integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= content-type-parser@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/content-type-parser/-/content-type-parser-1.0.1.tgz#c3e56988c53c65127fb46d4032a3a900246fdc94" + version "1.0.2" + resolved "https://registry.yarnpkg.com/content-type-parser/-/content-type-parser-1.0.2.tgz#caabe80623e63638b2502fd4c7f12ff4ce2352e7" + integrity sha512-lM4l4CnMEwOLHAHr/P6MEZwZFPJFtAAKgL6pogbXmVZggIqXhdB6RbBtPOTsw2FcXwYhehRGERJmRrjOiIB8pQ== -convert-source-map@^1.1.0, convert-source-map@^1.4.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.0.tgz#9acd70851c6d5dfdd93d9282e5edf94a03ff46b5" +convert-source-map@^1.1.0, convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.5.1: + version "1.6.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.6.0.tgz#51b537a8c43e0f04dec1993bffcdd504e758ac20" + integrity sha512-eFu7XigvxdZ1ETfbgPBohgyQ/Z++C0eEhTor0qRwBw9unw+L0/6V8wkSuGgzdThkiS5lSpdptOQPD8Ak40a+7A== + dependencies: + safe-buffer "~5.1.1" copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= + +core-js-compat@^3.1.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.2.1.tgz#0cbdbc2e386e8e00d3b85dc81c848effec5b8150" + integrity sha512-MwPZle5CF9dEaMYdDeWm73ao/IflDH+FjeJCWEADcEgFSE9TLimFKwJsfmkwzI8eC0Aj0mgvMDjeQjrElkz4/A== + dependencies: + browserslist "^4.6.6" + semver "^6.3.0" -core-js@^2.4.0, core-js@^2.4.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.4.1.tgz#4de911e667b0eae9124e34254b53aea6fc618d3e" +core-js@^2.4.0, core-js@^2.5.0: + version "2.6.9" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.9.tgz#6b4b214620c834152e179323727fc19741b084f2" + integrity sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A== -core-util-is@~1.0.0: +core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= cross-spawn@^5.0.1: version "5.1.0" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" + integrity sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk= dependencies: lru-cache "^4.0.1" shebang-command "^1.2.0" which "^1.2.9" -cryptiles@2.x.x: - version "2.0.5" - resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" - dependencies: - boom "2.x.x" - cssom@0.3.x, "cssom@>= 0.3.2 < 0.4.0": - version "0.3.2" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.2.tgz#b8036170c79f07a90ff2f16e22284027a243848b" + version "0.3.8" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== "cssstyle@>= 0.2.37 < 0.3.0": version "0.2.37" resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-0.2.37.tgz#541097234cb2513c83ceed3acddc27ff27987d54" + integrity sha1-VBCXI0yyUTyDzu06zdwn/yeYfVQ= dependencies: cssom "0.3.x" dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= dependencies: assert-plus "^1.0.0" -debug@^2.1.1, debug@^2.2.0, debug@^2.6.3, debug@^2.6.6, debug@^2.6.8: - version "2.6.8" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.8.tgz#e731531ca2ede27d188222427da17821d68ff4fc" +debug@^2.2.0, debug@^2.3.3, debug@^2.6.6, debug@^2.6.8, debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" -decamelize@^1.0.0, decamelize@^1.1.1: +debug@^3.0.1, debug@^3.1.0, debug@^3.2.6: + version "3.2.6" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" + integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== + dependencies: + ms "^2.1.1" + +debug@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" + integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== + dependencies: + ms "^2.1.1" + +decamelize@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= + +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= -deep-extend@~0.4.0: - version "0.4.2" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= default-require-extensions@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-1.0.0.tgz#f37ea15d3e13ffd9b437d33e1a75b5fb97874cb8" + integrity sha1-836hXT4T/9m0N9M+GnW1+5eHTLg= dependencies: strip-bom "^2.0.0" +define-properties@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + define-property@^0.2.5: version "0.2.5" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= dependencies: is-descriptor "^1.0.0" +define-property@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== + dependencies: + is-descriptor "^1.0.2" + isobject "^3.0.1" + delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= delegates@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" + integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= detect-indent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" + integrity sha1-920GQ1LN9Docts5hnE7jqUdd4gg= dependencies: repeating "^2.0.0" +detect-libc@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" + integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= + diff@^3.2.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-3.3.0.tgz#056695150d7aa93237ca7e378ac3b1682b7963b9" + version "3.5.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" + integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== ecc-jsbn@~0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" + version "0.1.2" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= dependencies: jsbn "~0.1.0" + safer-buffer "^2.1.0" -electron-to-chromium@^1.3.16: - version "1.3.16" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.16.tgz#d0e026735754770901ae301a21664cba45d92f7d" +electron-to-chromium@^1.3.247, electron-to-chromium@^1.3.47: + version "1.3.265" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.265.tgz#d69afa05a33e551b913a9798f072a6442f46b348" + integrity sha512-ypHt5Nv1Abr27QvJqk3VC4YDNqsrrWYMCmpmR7BNfCpcgYEwmCDoi3uJpp6kvj/MIjpScQoZMCQzLqfMQGmOsg== -errno@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.4.tgz#b896e23a9e5e8ba33871fc996abd3635fc9a1c7d" +errno@~0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" + integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== dependencies: - prr "~0.0.0" + prr "~1.0.1" error-ex@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc" + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" error-symbol@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/error-symbol/-/error-symbol-0.1.0.tgz#0a4dae37d600d15a29ba453d8ef920f1844333f6" + integrity sha1-Ck2uN9YA0VopukU9jvkg8YRDM/Y= escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= escodegen@^1.6.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.8.1.tgz#5a5b53af4693110bebb0867aa3430dd3b70a1018" + version "1.12.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.12.0.tgz#f763daf840af172bb3a2b6dd7219c0e17f7ff541" + integrity sha512-TuA+EhsanGcme5T3R0L80u4t8CpbXQjegRmf7+FPTJrtCTErXFeelblRgHQa1FofEzqYYJmJ/OqjTwREp9qgmg== dependencies: - esprima "^2.7.1" - estraverse "^1.9.1" + esprima "^3.1.3" + estraverse "^4.2.0" esutils "^2.0.2" optionator "^0.8.1" optionalDependencies: - source-map "~0.2.0" + source-map "~0.6.1" -esprima@^2.7.1: - version "2.7.3" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.7.3.tgz#96e3b70d5779f6ad49cd032673d1c312767ba581" +esprima@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" + integrity sha1-/cpRzuYTOJXjyI1TXOSdv/YqRjM= esprima@^4.0.0, esprima@~4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== -estraverse@^1.9.1: - version "1.9.3" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" +estraverse@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== esutils@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== exec-sh@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.2.0.tgz#14f75de3f20d286ef933099b2ce50a90359cef10" + version "0.2.2" + resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.2.2.tgz#2a5e7ffcbd7d0ba2755bdecb16e5a427dfbdec36" + integrity sha512-FIUCJz1RbuS0FKTdaAafAByGS0CPvU3R0MeHxgtl+djzCc//F8HakL8GzmVNZanasTbTAY/3DRFA0KpVqj/eAw== dependencies: - merge "^1.1.3" + merge "^1.2.0" execa@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" + integrity sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c= dependencies: cross-spawn "^5.0.1" get-stream "^3.0.0" @@ -1459,75 +2319,157 @@ execa@^0.7.0: expand-brackets@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" + integrity sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s= dependencies: is-posix-bracket "^0.1.0" +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + expand-range@^1.8.1: version "1.8.2" resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" + integrity sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc= dependencies: fill-range "^2.1.0" extend-shallow@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= dependencies: is-extendable "^0.1.0" -extend@~3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444" +extend-shallow@^3.0.0, extend-shallow@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + +extend@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^0.3.1: version "0.3.2" resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1" + integrity sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE= dependencies: is-extglob "^1.0.0" -extsprintf@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.0.2.tgz#e1080e0658e300b06294990cc70e1502235fd550" +extglob@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= + +extsprintf@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" + integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= + +fast-deep-equal@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" + integrity sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk= + +fast-json-stable-stringify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" + integrity sha1-1RQsDK7msRifh9OnYREGT4bIu/I= fast-levenshtein@~2.0.4: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= fb-watchman@^1.8.0: version "1.9.2" resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-1.9.2.tgz#a24cf47827f82d38fb59a69ad70b76e3b6ae7383" + integrity sha1-okz0eCf4LTj7Waaa1wt247auc4M= dependencies: bser "1.0.2" fb-watchman@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.0.tgz#54e9abf7dfa2f26cd9b1636c588c1afc05de5d58" + integrity sha1-VOmr99+i8mzZsWNsWIwa/AXeXVg= dependencies: bser "^2.0.0" filename-regex@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26" + integrity sha1-wcS5vuPglyXdsQa3XB4wH+LxiyY= fileset@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/fileset/-/fileset-2.0.3.tgz#8e7548a96d3cc2327ee5e674168723a333bba2a0" + integrity sha1-jnVIqW08wjJ+5eZ0FocjozO7oqA= dependencies: glob "^7.0.3" minimatch "^3.0.3" fill-range@^2.1.0: - version "2.2.3" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.3.tgz#50b77dfd7e469bc7492470963699fe7a8485a723" + version "2.2.4" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.4.tgz#eb1e773abb056dcd8df2bfdf6af59b8b3a936565" + integrity sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q== dependencies: is-number "^2.1.0" isobject "^2.0.0" - randomatic "^1.1.3" + randomatic "^3.0.0" repeat-element "^1.1.2" repeat-string "^1.5.2" +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + +find-cache-dir@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" + integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== + dependencies: + commondir "^1.0.1" + make-dir "^2.0.0" + pkg-dir "^3.0.0" + find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" + integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= dependencies: path-exists "^2.0.0" pinkie-promise "^2.0.0" @@ -1535,84 +2477,106 @@ find-up@^1.0.0: find-up@^2.0.0, find-up@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= dependencies: locate-path "^2.0.0" +find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + flow-bin@0.74.0: version "0.74.0" resolved "https://registry.yarnpkg.com/flow-bin/-/flow-bin-0.74.0.tgz#8017bb00efb37cbe8d81fbb7f464038bde06adc9" + integrity sha512-tIN9J5qg71S4UbofCu80tve8a+p7Hj7ytwUtu79cLg9KJVVTNnVVJXKgCghVzaZT1Rvl9SMHVPlDs9uYhPHEGQ== -flow-parser@^0.*: - version "0.51.0" - resolved "https://registry.yarnpkg.com/flow-parser/-/flow-parser-0.51.0.tgz#e1c0ceb6f802ba21d16c2fda8e42c824f40f4684" +flow-parser@0.*: + version "0.108.0" + resolved "https://registry.yarnpkg.com/flow-parser/-/flow-parser-0.108.0.tgz#36a8d35e6346b5d18adbafbdc91ad0770d8917d7" + integrity sha512-Ug8VuwlyDIZq5Xgrf+T7XLpKydhqYyNd8lmFtf7PZbu90T5LL+FeHjWzxyrBn35RCCZMw7pXrjCrHOSs+2zXyg== for-in@^0.1.3: version "0.1.8" resolved "https://registry.yarnpkg.com/for-in/-/for-in-0.1.8.tgz#d8773908e31256109952b1fdb9b3fa867d2775e1" + integrity sha1-2Hc5COMSVhCZUrH9ubP6hn0ndeE= -for-in@^1.0.1: +for-in@^1.0.1, for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= for-own@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce" + integrity sha1-UmXGgaTylNq78XyVCbZ2OqhFEM4= dependencies: for-in "^1.0.1" for-own@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b" + integrity sha1-xjMy9BXO3EsE2/5wz4NklMU8tEs= dependencies: for-in "^1.0.1" forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= -form-data@~2.1.1: - version "2.1.4" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.4.tgz#33c183acf193276ecaa98143a69e94bfee1750d1" +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== dependencies: asynckit "^0.4.0" - combined-stream "^1.0.5" + combined-stream "^1.0.6" mime-types "^2.1.12" +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= + dependencies: + map-cache "^0.2.2" + +fs-minipass@^1.2.5: + version "1.2.7" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" + integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== + dependencies: + minipass "^2.6.0" + fs-readdir-recursive@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-readdir-recursive/-/fs-readdir-recursive-1.0.0.tgz#8cd1745c8b4f8a29c8caec392476921ba195f560" + version "1.1.0" + resolved "https://registry.yarnpkg.com/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz#e32fc030a2ccee44a6b5371308da54be0b397d27" + integrity sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA== fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= fsevents@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.2.tgz#3282b713fb3ad80ede0e9fcf4611b5aa6fc033f4" + version "1.2.9" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.9.tgz#3f5ed66583ccd6f400b5a00db6f7e861363e388f" + integrity sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw== dependencies: - nan "^2.3.0" - node-pre-gyp "^0.6.36" - -fstream-ignore@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105" - dependencies: - fstream "^1.0.0" - inherits "2" - minimatch "^3.0.0" + nan "^2.12.1" + node-pre-gyp "^0.12.0" -fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2: - version "1.0.11" - resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.11.tgz#5c1fb1f117477114f0632a0eb4b71b3cb0fd3171" - dependencies: - graceful-fs "^4.1.2" - inherits "~2.0.0" - mkdirp ">=0.5 0" - rimraf "2" +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" + integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= dependencies: aproba "^1.0.3" console-control-strings "^1.0.0" @@ -1624,22 +2588,31 @@ gauge@~2.7.3: wide-align "^1.1.0" get-caller-file@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5" + version "1.0.3" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" + integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-stream@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" + integrity sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ= + +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= dependencies: assert-plus "^1.0.0" glob-base@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" + integrity sha1-27Fk9iIbHAscz4Kuoyi0l98Oo8Q= dependencies: glob-parent "^2.0.0" is-glob "^2.0.0" @@ -1647,12 +2620,14 @@ glob-base@^0.3.0: glob-parent@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" + integrity sha1-gTg9ctsFT8zPUzbaqQLxgvbtuyg= dependencies: is-glob "^2.0.0" -glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1: - version "7.1.2" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" +glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3: + version "7.1.4" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" + integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" @@ -1661,110 +2636,160 @@ glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1: once "^1.3.0" path-is-absolute "^1.0.0" -globals@^9.0.0: +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" + integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.4: - version "4.1.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" + version "4.2.2" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.2.tgz#6f0952605d0140c1cfdb138ed005775b92d67b02" + integrity sha512-IItsdsea19BoLC7ELy13q1iJFNmd7ofZH5+X/pJr90/nRoPEX0DJo1dHDbgtYWOhJhcCgMDTOw84RZ72q6lB+Q== growly@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" + integrity sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE= handlebars@^4.0.3: - version "4.0.10" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.10.tgz#3d30c718b09a3d96f23ea4cc1f403c4d3ba9ff4f" + version "4.3.0" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.3.0.tgz#427391b584626c9c9c6ffb7d1fb90aa9789221cc" + integrity sha512-7XlnO8yBXOdi7AzowjZssQr47Ctidqm7GbgARapOaqSN9HQhlClnOkR9HieGauIT3A8MBC6u9wPCXs97PCYpWg== dependencies: - async "^1.4.0" + neo-async "^2.6.0" optimist "^0.6.1" - source-map "^0.4.4" + source-map "^0.6.1" optionalDependencies: - uglify-js "^2.6" + uglify-js "^3.1.4" -har-schema@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e" +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= -har-validator@~4.2.1: - version "4.2.1" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a" +har-validator@~5.1.0: + version "5.1.3" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.3.tgz#1ef89ebd3e4996557675eed9893110dc350fa080" + integrity sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g== dependencies: - ajv "^4.9.1" - har-schema "^1.0.5" + ajv "^6.5.5" + har-schema "^2.0.0" has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= dependencies: ansi-regex "^2.0.0" -has-color@~0.1.0: - version "0.1.7" - resolved "https://registry.yarnpkg.com/has-color/-/has-color-0.1.7.tgz#67144a5260c34fc3cca677d041daf52fe7b78b2f" - has-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" + integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= -has-flag@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.0.tgz#ba1a8f1af2a0fc39650f5c850367704122063b44" + integrity sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q= has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" + integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= -hawk@~3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4" +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= dependencies: - boom "2.x.x" - cryptiles "2.x.x" - hoek "2.x.x" - sntp "1.x.x" + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= -hoek@2.x.x: - version "2.16.3" - resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed" +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" home-or-tmp@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8" + integrity sha1-42w/LSyufXRqhX440Y1fMqeILbg= dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.1" hosted-git-info@^2.1.4: - version "2.5.0" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c" + version "2.8.4" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.4.tgz#44119abaf4bc64692a16ace34700fed9c03e2546" + integrity sha512-pzXIvANXEFrc5oFFXRMkbLPQ2rXRoDERwDLyrcUxGhaZhgP54BBSl9Oheh7Vv0T090cszWBxPjkQQ5Sq1PbBRQ== html-encoding-sniffer@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-1.0.1.tgz#79bf7a785ea495fe66165e734153f363ff5437da" + version "1.0.2" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz#e70d84b94da53aa375e11fe3a351be6642ca46f8" + integrity sha512-71lZziiDnsuabfdYiUeWdCVyKuqwWi23L8YeIgV9jSSZHCtb6wB1BKWooH7L3tn4/FuZJMVWyNaIDr4RGmaSYw== dependencies: whatwg-encoding "^1.0.1" -http-signature@~1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.1.1.tgz#df72e267066cd0ac67fb76adf8e134a8fbcf91bf" +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= dependencies: - assert-plus "^0.2.0" + assert-plus "^1.0.0" jsprim "^1.2.2" sshpk "^1.7.0" -iconv-lite@0.4.13: - version "0.4.13" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.13.tgz#1f88aba4ab0b1508e8312acc39345f36e992e2f2" +iconv-lite@0.4.24, iconv-lite@^0.4.4: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +ignore-walk@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.2.tgz#99d83a246c196ea5c93ef9315ad7b0819c35069b" + integrity sha512-EXyErtpHbn75ZTsOADsfx6J/FPo6/5cjev46PXrcTpd8z3BoRkXgYu9/JVqrI7tusjmwCZutGeRJeU0Wo1e4Cw== + dependencies: + minimatch "^3.0.4" imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= dependencies: once "^1.3.0" wrappy "1" @@ -1772,253 +2797,334 @@ inflight@^1.0.4: info-symbol@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/info-symbol/-/info-symbol-0.1.0.tgz#27841d72867ddb4242cd612d79c10633881c6a78" + integrity sha1-J4QdcoZ920JCzWEtecEGM4gcang= -inherits@2, inherits@^2.0.1, inherits@~2.0.0, inherits@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" +inherits@2, inherits@^2.0.1, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== ini@~1.3.0: - version "1.3.4" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e" + version "1.3.5" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== -invariant@^2.2.0, invariant@^2.2.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.2.tgz#9e1f56ac0acdb6bf303306f338be3b204ae60360" +invariant@^2.2.2: + version "2.2.4" + resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" + integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" invert-kv@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" + integrity sha1-EEqOSqym09jNFXqO+L+rLXo//bY= is-accessor-descriptor@^0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= dependencies: kind-of "^3.0.2" +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== + dependencies: + kind-of "^6.0.0" + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" + integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= dependencies: binary-extensions "^1.0.0" -is-buffer@^1.0.2, is-buffer@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.5.tgz#1f3b26ef613b214b88cbca23cc6c01d87961eecc" +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-builtin-module@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe" + integrity sha1-VAVy0096wxGfj3bDDLwbHgN6/74= dependencies: builtin-modules "^1.0.0" is-ci@^1.0.10: - version "1.0.10" - resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.0.10.tgz#f739336b2632365061a9d48270cd56ae3369318e" + version "1.2.1" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.2.1.tgz#e3779c8ee17fccf428488f6e281187f2e632841c" + integrity sha512-s6tfsaQaQi3JNciBH6shVqEDvhGut0SUXr31ag8Pd8BBbVVlcGfWhpPmEOoM6RJ5TFhbypvf5yyRw/VXW1IiWg== dependencies: - ci-info "^1.0.0" + ci-info "^1.5.0" is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= dependencies: kind-of "^3.0.2" +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== + dependencies: + kind-of "^6.0.0" + is-descriptor@^0.1.0: version "0.1.6" resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" is-data-descriptor "^0.1.4" kind-of "^5.0.0" -is-descriptor@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.1.tgz#2c6023599bde2de9d5d2c8b9a9d94082036b6ef2" +is-descriptor@^1.0.0, is-descriptor@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" is-dotfile@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1" + integrity sha1-pqLzL/0t+wT1yiXs0Pa4PPeYoeE= is-equal-shallow@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534" + integrity sha1-IjgJj8Ih3gvPpdnqxMRdY4qhxTQ= dependencies: is-primitive "^2.0.0" is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= + +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== + dependencies: + is-plain-object "^2.0.4" is-extglob@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" + integrity sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA= is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" + integrity sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= is-glob@^2.0.0, is-glob@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" + integrity sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM= dependencies: is-extglob "^1.0.0" is-number@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" + integrity sha1-Afy7s5NGOlSPL0ZszhbezknbkI8= dependencies: kind-of "^3.0.2" is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= dependencies: kind-of "^3.0.2" -is-plain-object@^2.0.1: +is-number@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff" + integrity sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ== + +is-number@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-6.0.0.tgz#e6d15ad31fc262887cccf217ae5f9316f81b1995" + integrity sha512-Wu1VHeILBK8KAWJUAiSZQX94GmOE45Rg6/538fKwiloUu21KncEkYGPqob2oSZ5mUT73vLGrHQjKw3KMPwfDzg== + +is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-posix-bracket@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" + integrity sha1-MzTceXdDaOkvAW5vvAqI9c1ua8Q= is-primitive@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" + integrity sha1-IHurkWOEmcB7Kt8kCkGochADRXU= is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" + integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= -is-windows@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.1.tgz#310db70f742d259a16a369202b51af84233310d9" +is-windows@^1.0.1, is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + +is-wsl@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" + integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= isarray@1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= isobject@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= istanbul-api@^1.1.1: - version "1.1.11" - resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.1.11.tgz#fcc0b461e2b3bda71e305155138238768257d9de" + version "1.3.7" + resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.3.7.tgz#a86c770d2b03e11e3f778cd7aedd82d2722092aa" + integrity sha512-4/ApBnMVeEPG3EkSzcw25wDe4N66wxwn+KKn6b47vyek8Xb3NBAcg4xfuQbS7BqcZuTX4wxfD5lVagdggR3gyA== dependencies: async "^2.1.4" fileset "^2.0.2" - istanbul-lib-coverage "^1.1.1" - istanbul-lib-hook "^1.0.7" - istanbul-lib-instrument "^1.7.4" - istanbul-lib-report "^1.1.1" - istanbul-lib-source-maps "^1.2.1" - istanbul-reports "^1.1.1" + istanbul-lib-coverage "^1.2.1" + istanbul-lib-hook "^1.2.2" + istanbul-lib-instrument "^1.10.2" + istanbul-lib-report "^1.1.5" + istanbul-lib-source-maps "^1.2.6" + istanbul-reports "^1.5.1" js-yaml "^3.7.0" mkdirp "^0.5.1" once "^1.4.0" -istanbul-lib-coverage@^1.0.1, istanbul-lib-coverage@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.1.1.tgz#73bfb998885299415c93d38a3e9adf784a77a9da" +istanbul-lib-coverage@^1.0.1, istanbul-lib-coverage@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz#ccf7edcd0a0bb9b8f729feeb0930470f9af664f0" + integrity sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ== -istanbul-lib-hook@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.0.7.tgz#dd6607f03076578fe7d6f2a630cf143b49bacddc" +istanbul-lib-hook@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.2.2.tgz#bc6bf07f12a641fbf1c85391d0daa8f0aea6bf86" + integrity sha512-/Jmq7Y1VeHnZEQ3TL10VHyb564mn6VrQXHchON9Jf/AEcmQ3ZIiyD1BVzNOKTZf/G3gE+kiGK6SmpF9y3qGPLw== dependencies: append-transform "^0.4.0" -istanbul-lib-instrument@^1.4.2, istanbul-lib-instrument@^1.7.2, istanbul-lib-instrument@^1.7.4: - version "1.7.4" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.7.4.tgz#e9fd920e4767f3d19edc765e2d6b3f5ccbd0eea8" +istanbul-lib-instrument@^1.10.1, istanbul-lib-instrument@^1.10.2, istanbul-lib-instrument@^1.4.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz#1f55ed10ac3c47f2bdddd5307935126754d0a9ca" + integrity sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A== dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" - babylon "^6.17.4" - istanbul-lib-coverage "^1.1.1" + babylon "^6.18.0" + istanbul-lib-coverage "^1.2.1" semver "^5.3.0" -istanbul-lib-report@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.1.tgz#f0e55f56655ffa34222080b7a0cd4760e1405fc9" +istanbul-lib-report@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.5.tgz#f2a657fc6282f96170aaf281eb30a458f7f4170c" + integrity sha512-UsYfRMoi6QO/doUshYNqcKJqVmFe9w51GZz8BS3WB0lYxAllQYklka2wP9+dGZeHYaWIdcXUx8JGdbqaoXRXzw== dependencies: - istanbul-lib-coverage "^1.1.1" + istanbul-lib-coverage "^1.2.1" mkdirp "^0.5.1" path-parse "^1.0.5" supports-color "^3.1.2" -istanbul-lib-source-maps@^1.1.0, istanbul-lib-source-maps@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.1.tgz#a6fe1acba8ce08eebc638e572e294d267008aa0c" +istanbul-lib-source-maps@^1.1.0, istanbul-lib-source-maps@^1.2.6: + version "1.2.6" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.6.tgz#37b9ff661580f8fca11232752ee42e08c6675d8f" + integrity sha512-TtbsY5GIHgbMsMiRw35YBHGpZ1DVFEO19vxxeiDMYaeOFOCzfnYVxvl6pOUIZR4dtPhAGpSMup8OyF8ubsaqEg== dependencies: - debug "^2.6.3" - istanbul-lib-coverage "^1.1.1" + debug "^3.1.0" + istanbul-lib-coverage "^1.2.1" mkdirp "^0.5.1" rimraf "^2.6.1" source-map "^0.5.3" -istanbul-reports@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.1.1.tgz#042be5c89e175bc3f86523caab29c014e77fee4e" +istanbul-reports@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.5.1.tgz#97e4dbf3b515e8c484caea15d6524eebd3ff4e1a" + integrity sha512-+cfoZ0UXzWjhAdzosCPP3AN8vvef8XDkWtTfgaN+7L3YTpNYITnCaEkceo5SEYy644VkHka/P1FvkWvrG/rrJw== dependencies: handlebars "^4.0.3" jest-changed-files@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-20.0.3.tgz#9394d5cc65c438406149bef1bf4d52b68e03e3f8" + integrity sha1-k5TVzGXEOEBhSb7xv01Sto4D4/g= jest-cli@^20.0.4: version "20.0.4" resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-20.0.4.tgz#e532b19d88ae5bc6c417e8b0593a6fe954b1dc93" + integrity sha1-5TKxnYiuW8bEF+iwWTpv6VSx3JM= dependencies: ansi-escapes "^1.4.0" callsites "^2.0.0" @@ -2054,6 +3160,7 @@ jest-cli@^20.0.4: jest-config@^20.0.4: version "20.0.4" resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-20.0.4.tgz#e37930ab2217c913605eff13e7bd763ec48faeea" + integrity sha1-43kwqyIXyRNgXv8T5712PsSPruo= dependencies: chalk "^1.1.3" glob "^7.1.1" @@ -2069,6 +3176,7 @@ jest-config@^20.0.4: jest-diff@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-20.0.3.tgz#81f288fd9e675f0fb23c75f1c2b19445fe586617" + integrity sha1-gfKI/Z5nXw+yPHXxwrGURf5YZhc= dependencies: chalk "^1.1.3" diff "^3.2.0" @@ -2078,10 +3186,12 @@ jest-diff@^20.0.3: jest-docblock@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-20.0.3.tgz#17bea984342cc33d83c50fbe1545ea0efaa44712" + integrity sha1-F76phDQswz2DxQ++FUXqDvqkRxI= jest-environment-jsdom@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-20.0.3.tgz#048a8ac12ee225f7190417713834bb999787de99" + integrity sha1-BIqKwS7iJfcZBBdxODS7mZeH3pk= dependencies: jest-mock "^20.0.3" jest-util "^20.0.3" @@ -2090,6 +3200,7 @@ jest-environment-jsdom@^20.0.3: jest-environment-node@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-20.0.3.tgz#d488bc4612af2c246e986e8ae7671a099163d403" + integrity sha1-1Ii8RhKvLCRumG6K52caCZFj1AM= dependencies: jest-mock "^20.0.3" jest-util "^20.0.3" @@ -2097,6 +3208,7 @@ jest-environment-node@^20.0.3: jest-haste-map@^20.0.4: version "20.0.5" resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-20.0.5.tgz#abad74efb1a005974a7b6517e11010709cab9112" + integrity sha512-0IKAQjUvuZjMCNi/0VNQQF74/H9KB67hsHJqGiwTWQC6XO5Azs7kLWm+6Q/dwuhvDUvABDOBMFK2/FwZ3sZ07Q== dependencies: fb-watchman "^2.0.0" graceful-fs "^4.1.11" @@ -2108,6 +3220,7 @@ jest-haste-map@^20.0.4: jest-jasmine2@^20.0.4: version "20.0.4" resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-20.0.4.tgz#fcc5b1411780d911d042902ef1859e852e60d5e1" + integrity sha1-/MWxQReA2RHQQpAu8YWehS5g1eE= dependencies: chalk "^1.1.3" graceful-fs "^4.1.11" @@ -2122,6 +3235,7 @@ jest-jasmine2@^20.0.4: jest-matcher-utils@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-20.0.3.tgz#b3a6b8e37ca577803b0832a98b164f44b7815612" + integrity sha1-s6a443yld4A7CDKpixZPRLeBVhI= dependencies: chalk "^1.1.3" pretty-format "^20.0.3" @@ -2129,6 +3243,7 @@ jest-matcher-utils@^20.0.3: jest-matchers@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-matchers/-/jest-matchers-20.0.3.tgz#ca69db1c32db5a6f707fa5e0401abb55700dfd60" + integrity sha1-ymnbHDLbWm9wf6XgQBq7VXAN/WA= dependencies: jest-diff "^20.0.3" jest-matcher-utils "^20.0.3" @@ -2138,6 +3253,7 @@ jest-matchers@^20.0.3: jest-message-util@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-20.0.3.tgz#6aec2844306fcb0e6e74d5796c1006d96fdd831c" + integrity sha1-auwoRDBvyw5udNV5bBAG2W/dgxw= dependencies: chalk "^1.1.3" micromatch "^2.3.11" @@ -2146,20 +3262,24 @@ jest-message-util@^20.0.3: jest-mock@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-20.0.3.tgz#8bc070e90414aa155c11a8d64c869a0d5c71da59" + integrity sha1-i8Bw6QQUqhVcEajWTIaaDVxx2lk= jest-regex-util@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-20.0.3.tgz#85bbab5d133e44625b19faf8c6aa5122d085d762" + integrity sha1-hburXRM+RGJbGfr4xqpRItCF12I= jest-resolve-dependencies@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-20.0.3.tgz#6e14a7b717af0f2cb3667c549de40af017b1723a" + integrity sha1-bhSntxevDyyzZnxUneQK8Bexcjo= dependencies: jest-regex-util "^20.0.3" jest-resolve@^20.0.4: version "20.0.4" resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-20.0.4.tgz#9448b3e8b6bafc15479444c6499045b7ffe597a5" + integrity sha1-lEiz6La6/BVHlETGSZBFt//ll6U= dependencies: browser-resolve "^1.11.2" is-builtin-module "^1.0.0" @@ -2168,6 +3288,7 @@ jest-resolve@^20.0.4: jest-runtime@^20.0.4: version "20.0.4" resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-20.0.4.tgz#a2c802219c4203f754df1404e490186169d124d8" + integrity sha1-osgCIZxCA/dU3xQE5JAYYWnRJNg= dependencies: babel-core "^6.0.0" babel-jest "^20.0.3" @@ -2188,6 +3309,7 @@ jest-runtime@^20.0.4: jest-snapshot@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-20.0.3.tgz#5b847e1adb1a4d90852a7f9f125086e187c76566" + integrity sha1-W4R+GtsaTZCFKn+fElCG4YfHZWY= dependencies: chalk "^1.1.3" jest-diff "^20.0.3" @@ -2199,6 +3321,7 @@ jest-snapshot@^20.0.3: jest-util@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-20.0.3.tgz#0c07f7d80d82f4e5a67c6f8b9c3fe7f65cfd32ad" + integrity sha1-DAf32A2C9OWmfG+LnD/n9lz9Mq0= dependencies: chalk "^1.1.3" graceful-fs "^4.1.11" @@ -2211,6 +3334,7 @@ jest-util@^20.0.3: jest-validate@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-20.0.3.tgz#d0cfd1de4f579f298484925c280f8f1d94ec3cab" + integrity sha1-0M/R3k9XnymEhJJcKA+PHZTsPKs= dependencies: chalk "^1.1.3" jest-matcher-utils "^20.0.3" @@ -2220,16 +3344,29 @@ jest-validate@^20.0.3: jest@^20.0.4: version "20.0.4" resolved "https://registry.yarnpkg.com/jest/-/jest-20.0.4.tgz#3dd260c2989d6dad678b1e9cc4d91944f6d602ac" + integrity sha1-PdJgwpidba1nix6cxNkZRPbWAqw= dependencies: jest-cli "^20.0.4" -js-tokens@^3.0.0: +js-levenshtein@^1.1.3: + version "1.1.6" + resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" + integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" + integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= js-yaml@^3.7.0: - version "3.9.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.9.0.tgz#4ffbbf25c2ac963b8299dc74da7e3740de1c18ce" + version "3.13.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" + integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== dependencies: argparse "^1.0.7" esprima "^4.0.0" @@ -2237,30 +3374,35 @@ js-yaml@^3.7.0: jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" - -jscodeshift@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/jscodeshift/-/jscodeshift-0.4.0.tgz#a76afdbfc6f4e78c3fd0d1a60470dfa43c03190e" - dependencies: - async "^1.5.0" - babel-plugin-transform-flow-strip-types "^6.8.0" - babel-preset-es2015 "^6.9.0" - babel-preset-stage-1 "^6.5.0" - babel-register "^6.9.0" - babylon "^6.17.3" + integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= + +"jscodeshift@https://github.com/jbrown215/jscodeshift.git": + version "0.6.4" + resolved "https://github.com/jbrown215/jscodeshift.git#3f5b13d61855b2b6a96fac50a60dceb4ce18f770" + dependencies: + "@babel/core" "^7.1.6" + "@babel/parser" "^7.1.6" + "@babel/plugin-proposal-class-properties" "^7.1.0" + "@babel/plugin-proposal-object-rest-spread" "^7.0.0" + "@babel/preset-env" "^7.1.6" + "@babel/preset-flow" "^7.0.0" + "@babel/preset-typescript" "^7.1.0" + "@babel/register" "^7.0.0" + babel-core "^7.0.0-bridge.0" colors "^1.1.2" - flow-parser "^0.*" - lodash "^4.13.1" - micromatch "^2.3.7" - node-dir "0.1.8" - nomnom "^1.8.1" - recast "^0.12.5" + flow-parser "0.*" + graceful-fs "^4.1.11" + micromatch "^3.1.10" + neo-async "^2.5.0" + node-dir "^0.1.17" + recast "^0.18.1" temp "^0.8.1" - write-file-atomic "^1.2.0" + write-file-atomic "^2.3.0" jsdom@^9.12.0: version "9.12.0" resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-9.12.0.tgz#e8c546fffcb06c00d4833ca84410fed7f8a097d4" + integrity sha1-6MVG//ywbADUgzyoRBD+1/igl9Q= dependencies: abab "^1.0.3" acorn "^4.0.4" @@ -2285,95 +3427,119 @@ jsdom@^9.12.0: jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" + integrity sha1-RsP+yMGJKxKwgz25vHYiF226s0s= + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= json-stable-stringify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" + integrity sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8= dependencies: jsonify "~0.0.0" json-stringify-safe@~5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= -json5@^0.5.0: +json5@^0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" + integrity sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE= + +json5@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.0.tgz#e7a0c62c48285c628d20a10b85c89bb807c32850" + integrity sha512-8Mh9h6xViijj36g7Dxi+Y4S6hNGV96vcJZr/SrlHh1LR/pEn/8j/+qIBbs44YKl69Lrfctp4QD+AdWLTMqEZAQ== + dependencies: + minimist "^1.2.0" jsonify@~0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" + integrity sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM= jsprim@^1.2.2: - version "1.4.0" - resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.0.tgz#a3b87e40298d8c380552d8cc7628a0bb95a22918" + version "1.4.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" + integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= dependencies: assert-plus "1.0.0" - extsprintf "1.0.2" + extsprintf "1.3.0" json-schema "0.2.3" - verror "1.3.6" + verror "1.10.0" -kind-of@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-2.0.1.tgz#018ec7a4ce7e3a86cb9141be519d24c8faa981b5" - dependencies: - is-buffer "^1.0.2" - -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.2: +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= dependencies: is-buffer "^1.1.5" -kind-of@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.0.0.tgz#9038420f740b2e836ce48b34617bcb855947f2a9" +kind-of@^5.0.0, kind-of@^5.0.2: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" + integrity sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA== koalas@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/koalas/-/koalas-1.0.2.tgz#318433f074235db78fae5661a02a8ca53ee295cd" + integrity sha1-MYQz8HQjXbePrlZhoCqMpT7ilc0= -lazy-cache@^0.2.3: - version "0.2.7" - resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-0.2.7.tgz#7feddf2dcb6edb77d11ef1d117ab5ffdf0ab1b65" - -lazy-cache@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" - -lazy-cache@^2.0.1, lazy-cache@^2.0.2: +lazy-cache@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-2.0.2.tgz#b9190a4f913354694840859f8a8f7084d8822264" + integrity sha1-uRkKT5EzVGlIQIWfio9whNiCImQ= dependencies: set-getter "^0.1.0" lcid@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" + integrity sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU= dependencies: invert-kv "^1.0.0" leven@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/leven/-/leven-2.1.0.tgz#c2e7a9f772094dee9d34202ae8acce4687875580" + integrity sha1-wuep93IJTe6dNCAq6KzORoeHVYA= levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= dependencies: prelude-ls "~1.1.2" type-check "~0.3.2" @@ -2381,6 +3547,7 @@ levn@~0.3.0: load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" + integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" @@ -2391,6 +3558,7 @@ load-json-file@^1.0.0: load-json-file@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" + integrity sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg= dependencies: graceful-fs "^4.1.2" parse-json "^2.2.0" @@ -2400,30 +3568,43 @@ load-json-file@^2.0.0: locate-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= dependencies: p-locate "^2.0.0" path-exists "^3.0.0" -lodash@^4.13.1, lodash@^4.14.0, lodash@^4.2.0: - version "4.17.4" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" +locate-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +lodash@>=4.17.14, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.4: + version "4.17.15" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" + integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== log-ok@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/log-ok/-/log-ok-0.1.1.tgz#bea3dd36acd0b8a7240d78736b5b97c65444a334" + integrity sha1-vqPdNqzQuKckDXhza1uXxlREozQ= dependencies: ansi-green "^0.1.1" success-symbol "^0.1.0" -log-symbols@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" +log-symbols@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" + integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== dependencies: - chalk "^1.0.0" + chalk "^2.0.1" log-utils@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/log-utils/-/log-utils-0.2.1.tgz#a4c217a0dd9a50515d9b920206091ab3d4e031cf" + integrity sha1-pMIXoN2aUFFdm5ICBgkas9TgMc8= dependencies: ansi-colors "^0.2.0" error-symbol "^0.1.0" @@ -2433,48 +3614,69 @@ log-utils@^0.2.1: time-stamp "^1.0.1" warning-symbol "^0.1.0" -longest@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" - loose-envify@^1.0.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.3.1.tgz#d1a8ad33fa9ce0e713d65fdd0ac8b748d478c848" + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: - js-tokens "^3.0.0" + js-tokens "^3.0.0 || ^4.0.0" lru-cache@^4.0.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55" + version "4.1.5" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" + integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== dependencies: pseudomap "^1.0.2" yallist "^2.1.2" +make-dir@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" + integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== + dependencies: + pify "^4.0.1" + semver "^5.6.0" + makeerror@1.0.x: version "1.0.11" resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c" + integrity sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw= dependencies: tmpl "1.0.x" +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= + map-visit@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= dependencies: object-visit "^1.0.0" +math-random@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c" + integrity sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A== + mem@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76" + integrity sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y= dependencies: mimic-fn "^1.0.0" -merge@^1.1.3: - version "1.2.0" - resolved "https://registry.yarnpkg.com/merge/-/merge-1.2.0.tgz#7531e39d4949c281a66b8c5a6e0265e8b05894da" +merge@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/merge/-/merge-1.2.1.tgz#38bebf80c3220a8a487b6fcfb3941bb11720c145" + integrity sha512-VjFo4P5Whtj4vsLzsYBu5ayHhoHJ0UqNm7ibvShmbmoz7tGi0vXaoJbGdB+GmDMLUdg8DpQXEIeVDAe8MaABvQ== -micromatch@^2.1.5, micromatch@^2.3.11, micromatch@^2.3.7: +micromatch@^2.1.5, micromatch@^2.3.11: version "2.3.11" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" + integrity sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU= dependencies: arr-diff "^2.0.0" array-unique "^0.2.1" @@ -2490,136 +3692,258 @@ micromatch@^2.1.5, micromatch@^2.3.11, micromatch@^2.3.7: parse-glob "^3.0.4" regex-cache "^0.4.2" -mime-db@~1.27.0: - version "1.27.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.27.0.tgz#820f572296bbd20ec25ed55e5b5de869e5436eb1" - -mime-types@^2.1.12, mime-types@~2.1.7: - version "2.1.15" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.15.tgz#a4ebf5064094569237b8cf70046776d09fc92aed" - dependencies: - mime-db "~1.27.0" +micromatch@^3.1.10: + version "3.1.10" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.1" + define-property "^2.0.2" + extend-shallow "^3.0.2" + extglob "^2.0.4" + fragment-cache "^0.2.1" + kind-of "^6.0.2" + nanomatch "^1.2.9" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.2" + +mime-db@1.40.0: + version "1.40.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.40.0.tgz#a65057e998db090f732a68f6c276d387d4126c32" + integrity sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA== + +mime-types@^2.1.12, mime-types@~2.1.19: + version "2.1.24" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.24.tgz#b6f8d0b3e951efb77dedeca194cff6d16f676f81" + integrity sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ== + dependencies: + mime-db "1.40.0" mimic-fn@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.1.0.tgz#e667783d92e89dbd342818b5230b9d62a672ad18" + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== -minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4: +minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimist@0.0.8: version "0.0.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= minimist@^1.1.1, minimist@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" + integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" + integrity sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8= + +minipass@^2.2.1, minipass@^2.6.0, minipass@^2.8.6: + version "2.8.6" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.8.6.tgz#620d889ace26356391d010ecb9458749df9b6db5" + integrity sha512-lFG7d6g3+/UaFDCOtqPiKAC9zngWWsQZl1g5q6gaONqrjq61SX2xFqXMleQiFVyDpYwa018E9hmlAFY22PCb+A== + dependencies: + safe-buffer "^5.1.2" + yallist "^3.0.0" + +minizlib@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.2.2.tgz#6f0ccc82fa53e1bf2ff145f220d2da9fa6e3a166" + integrity sha512-hR3At21uSrsjjDTWrbu0IMLTpnkpv8IIMFDFaoz43Tmu4LkmAXfH44vNNzpTnf+OAQQCHrb91y/wc2J4x5XgSQ== + dependencies: + minipass "^2.2.1" + +mixin-deep@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" + integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" mixin-object@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/mixin-object/-/mixin-object-2.0.1.tgz#4fb949441dab182540f1fe035ba60e1947a5e57e" + integrity sha1-T7lJRB2rGCVA8f4DW6YOGUel5X4= dependencies: for-in "^0.1.3" is-extendable "^0.1.1" -"mkdirp@>=0.5 0", mkdirp@^0.5.1: +mkdirp@^0.5.0, mkdirp@^0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= dependencies: minimist "0.0.8" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== mute-stream@0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" - -nan@^2.3.0: - version "2.6.2" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.6.2.tgz#e4ff34e6c95fdfb5aecc08de6596f43605a7db45" + integrity sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s= + +nan@^2.12.1: + version "2.14.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" + integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== + +nanomatch@^1.2.9: + version "1.2.13" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" + integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^2.0.2" + extend-shallow "^3.0.2" + fragment-cache "^0.2.1" + is-windows "^1.0.2" + kind-of "^6.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" natural-compare@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= -node-dir@0.1.8: - version "0.1.8" - resolved "https://registry.yarnpkg.com/node-dir/-/node-dir-0.1.8.tgz#55fb8deb699070707fb67f91a460f0448294c77d" +needle@^2.2.1: + version "2.4.0" + resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" + integrity sha512-4Hnwzr3mi5L97hMYeNl8wRW/Onhy4nUKR/lVemJ8gJedxxUyBLm9kkrDColJvoSfwi0jCNhD+xCdOtiGDQiRZg== + dependencies: + debug "^3.2.6" + iconv-lite "^0.4.4" + sax "^1.2.4" + +neo-async@^2.5.0, neo-async@^2.6.0: + version "2.6.1" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.1.tgz#ac27ada66167fa8849a6addd837f6b189ad2081c" + integrity sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw== + +node-dir@^0.1.17: + version "0.1.17" + resolved "https://registry.yarnpkg.com/node-dir/-/node-dir-0.1.17.tgz#5f5665d93351335caabef8f1c554516cf5f1e4e5" + integrity sha1-X1Zl2TNRM1yqvvjxxVRRbPXx5OU= + dependencies: + minimatch "^3.0.2" node-int64@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= + +node-modules-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40" + integrity sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA= node-notifier@^5.0.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-5.1.2.tgz#2fa9e12605fa10009d44549d6fcd8a63dde0e4ff" + version "5.4.3" + resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-5.4.3.tgz#cb72daf94c93904098e28b9c590fd866e464bd50" + integrity sha512-M4UBGcs4jeOK9CjTsYwkvH6/MzuUmGCyTW+kCY7uO+1ZVr0+FHGdPdIf5CCLqAaxnRrWidyoQlNkMIIVwbKB8Q== dependencies: growly "^1.3.0" - semver "^5.3.0" - shellwords "^0.1.0" - which "^1.2.12" + is-wsl "^1.1.0" + semver "^5.5.0" + shellwords "^0.1.1" + which "^1.3.0" -node-pre-gyp@^0.6.36: - version "0.6.36" - resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.36.tgz#db604112cb74e0d477554e9b505b17abddfab786" +node-pre-gyp@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.12.0.tgz#39ba4bb1439da030295f899e3b520b7785766149" + integrity sha512-4KghwV8vH5k+g2ylT+sLTjy5wmUOb9vPhnM8NHvRf9dHmnW/CndrFXy2aRPaPST6dugXSdHXfeaHQm77PIz/1A== dependencies: + detect-libc "^1.0.2" mkdirp "^0.5.1" + needle "^2.2.1" nopt "^4.0.1" + npm-packlist "^1.1.6" npmlog "^4.0.2" - rc "^1.1.7" - request "^2.81.0" + rc "^1.2.7" rimraf "^2.6.1" semver "^5.3.0" - tar "^2.2.1" - tar-pack "^3.4.0" + tar "^4" -nomnom@^1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/nomnom/-/nomnom-1.8.1.tgz#2151f722472ba79e50a76fc125bb8c8f2e4dc2a7" +node-releases@^1.1.29: + version "1.1.32" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.32.tgz#485b35c1bf9b4d8baa105d782f8ca731e518276e" + integrity sha512-VhVknkitq8dqtWoluagsGPn3dxTvN9fwgR59fV3D7sLBHe0JfDramsMI8n8mY//ccq/Kkrf8ZRHRpsyVZ3qw1A== dependencies: - chalk "~0.4.0" - underscore "~1.6.0" + semver "^5.3.0" nopt@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" + integrity sha1-0NRoWv1UFRk8jHUFYC0NF81kR00= dependencies: abbrev "1" osenv "^0.1.4" normalize-package-data@^2.3.2: - version "2.4.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f" + version "2.5.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" - is-builtin-module "^1.0.0" + resolve "^1.10.0" semver "2 || 3 || 4 || 5" validate-npm-package-license "^3.0.1" -normalize-path@^2.0.1: +normalize-path@^2.0.0, normalize-path@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= dependencies: remove-trailing-separator "^1.0.1" +npm-bundled@^1.0.1: + version "1.0.6" + resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.6.tgz#e7ba9aadcef962bb61248f91721cd932b3fe6bdd" + integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== + +npm-packlist@^1.1.6: + version "1.4.4" + resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.4.tgz#866224233850ac534b63d1a6e76050092b5d2f44" + integrity sha512-zTLo8UcVYtDU3gdeaFu2Xu0n0EvelfHDGuqtNIn5RO7yQj4H1TqNdBc/yZjxnWA0PVB8D3Woyp0i5B43JwQ6Vw== + dependencies: + ignore-walk "^3.0.1" + npm-bundled "^1.0.1" + npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" npmlog@^4.0.2: version "4.1.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" + integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== dependencies: are-we-there-yet "~1.1.2" console-control-strings "~1.1.0" @@ -2629,55 +3953,87 @@ npmlog@^4.0.2: number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= "nwmatcher@>= 1.3.9 < 2.0.0": - version "1.4.1" - resolved "https://registry.yarnpkg.com/nwmatcher/-/nwmatcher-1.4.1.tgz#7ae9b07b0ea804db7e25f05cb5fe4097d4e4949f" + version "1.4.4" + resolved "https://registry.yarnpkg.com/nwmatcher/-/nwmatcher-1.4.4.tgz#2285631f34a95f0d0395cd900c96ed39b58f346e" + integrity sha512-3iuY4N5dhgMpCUrOVnuAdGrgxVqV2cJpM+XNccjR2DKOB1RUP0aA+wGXEiNziG/UKboFyGBIoKOaNlJxx8bciQ== -oauth-sign@~0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== object-assign@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-copy@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= dependencies: copy-descriptor "^0.1.0" define-property "^0.2.5" kind-of "^3.0.3" +object-keys@^1.0.11, object-keys@^1.0.12: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= dependencies: isobject "^3.0.0" +object.assign@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" + integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== + dependencies: + define-properties "^1.1.2" + function-bind "^1.1.1" + has-symbols "^1.0.0" + object-keys "^1.0.11" + object.omit@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" + integrity sha1-Gpx0SCnznbuFjHbKNXmuKlTr0fo= dependencies: for-own "^0.1.4" is-extendable "^0.1.1" -once@^1.3.0, once@^1.3.3, once@^1.4.0: +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= + dependencies: + isobject "^3.0.1" + +once@^1.3.0, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" onetime@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= dependencies: mimic-fn "^1.0.0" optimist@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" + integrity sha1-2j6nRob6IaGaERwybpDrFaAZZoY= dependencies: minimist "~0.0.1" wordwrap "~0.0.2" @@ -2685,6 +4041,7 @@ optimist@^0.6.1: optionator@^0.8.1: version "0.8.2" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" + integrity sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q= dependencies: deep-is "~0.1.3" fast-levenshtein "~2.0.4" @@ -2694,27 +4051,31 @@ optionator@^0.8.1: wordwrap "~1.0.0" ora@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/ora/-/ora-1.3.0.tgz#80078dd2b92a934af66a3ad72a5b910694ede51a" + version "1.4.0" + resolved "https://registry.yarnpkg.com/ora/-/ora-1.4.0.tgz#884458215b3a5d4097592285f93321bb7a79e2e5" + integrity sha512-iMK1DOQxzzh2MBlVsU42G80mnrvUhqsMh74phHtDlrcTZPK0pH6o7l7DRshK+0YsxDyEuaOkziVdvM3T0QTzpw== dependencies: - chalk "^1.1.1" + chalk "^2.1.0" cli-cursor "^2.1.0" - cli-spinners "^1.0.0" - log-symbols "^1.0.2" + cli-spinners "^1.0.1" + log-symbols "^2.1.0" os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= os-locale@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9" + integrity sha1-IPnxeuKe00XoveWDsT0gCYA8FNk= dependencies: lcid "^1.0.0" os-locale@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2" + integrity sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA== dependencies: execa "^0.7.0" lcid "^1.0.0" @@ -2723,17 +4084,20 @@ os-locale@^2.0.0: os-tmpdir@^1.0.0, os-tmpdir@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= osenv@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.4.tgz#42fe6d5953df06c8064be6f176c3d05aaaa34644" + version "0.1.5" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" + integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== dependencies: os-homedir "^1.0.0" os-tmpdir "^1.0.0" -output-file-sync@^1.1.0: +output-file-sync@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/output-file-sync/-/output-file-sync-1.1.2.tgz#d0a33eefe61a205facb90092e826598d5245ce76" + integrity sha1-0KM+7+YaIF+suQCS6CZZjVJFznY= dependencies: graceful-fs "^4.1.4" mkdirp "^0.5.1" @@ -2742,24 +4106,55 @@ output-file-sync@^1.1.0: p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-limit@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.1.0.tgz#b07ff2d9a5d88bec806035895a2bab66a27988bc" + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + dependencies: + p-try "^1.0.0" + +p-limit@^2.0.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.1.tgz#aa07a788cc3151c939b5131f63570f0dd2009537" + integrity sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg== + dependencies: + p-try "^2.0.0" p-locate@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= dependencies: p-limit "^1.1.0" +p-locate@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + p-map@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-1.1.1.tgz#05f5e4ae97a068371bc2a5cc86bfbdbc19c4ae7a" + version "1.2.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-1.2.0.tgz#e4e94f311eabbc8633a1e79908165fca26241b6b" + integrity sha512-r6zKACMNhjPJMTl8KcFH4li//gkrXWfbD6feV8l6doRHlzljFWGJ2AP6iKaCJXyZmAUMOPtvbW7EXkbWO/pLEA== + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== parse-glob@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" + integrity sha1-ssN2z7EfNVE7rdFz7wu246OIORw= dependencies: glob-base "^0.3.0" is-dotfile "^1.0.0" @@ -2769,38 +4164,51 @@ parse-glob@^3.0.4: parse-json@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= dependencies: error-ex "^1.2.0" parse5@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/parse5/-/parse5-1.5.1.tgz#9b7f3b0de32be78dc2401b17573ccaf0f6f59d94" + integrity sha1-m387DeMr543CQBsXVzzK8Pb1nZQ= + +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" + integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= dependencies: pinkie-promise "^2.0.0" path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= -path-is-absolute@^1.0.0: +path-is-absolute@^1.0.0, path-is-absolute@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= path-key@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= -path-parse@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" +path-parse@^1.0.5, path-parse@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" + integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" + integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= dependencies: graceful-fs "^4.1.2" pify "^2.0.0" @@ -2809,94 +4217,129 @@ path-type@^1.0.0: path-type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" + integrity sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM= dependencies: pify "^2.0.0" -performance-now@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5" +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= pify@^2.0.0, pify@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= + +pify@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" + integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= + +pirates@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87" + integrity sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA== + dependencies: + node-modules-regexp "^1.0.0" + +pkg-dir@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" + integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== + dependencies: + find-up "^3.0.0" pointer-symbol@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/pointer-symbol/-/pointer-symbol-1.0.0.tgz#60f9110204ea7a929b62644a21315543cbb3d447" + integrity sha1-YPkRAgTqepKbYmRKITFVQ8uz1Ec= + +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= preserve@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" + integrity sha1-gV7R9uvGWSb4ZbMQwHE7yzMVzks= pretty-format@^20.0.3: version "20.0.3" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-20.0.3.tgz#020e350a560a1fe1a98dc3beb6ccffb386de8b14" + integrity sha1-Ag41ClYKH+GpjcO+tsz/s4beixQ= dependencies: ansi-regex "^2.1.1" ansi-styles "^3.0.0" -private@^0.1.6, private@~0.1.5: - version "0.1.7" - resolved "https://registry.yarnpkg.com/private/-/private-0.1.7.tgz#68ce5e8a1ef0a23bb570cc28537b5332aba63ef1" +private@^0.1.6, private@^0.1.8: + version "0.1.8" + resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" + integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== -process-nextick-args@~1.0.6: - version "1.0.7" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3" +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== prompt-actions@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/prompt-actions/-/prompt-actions-3.0.2.tgz#537eee52241c940379f354a06eae8528e44ceeba" + integrity sha512-dhz2Fl7vK+LPpmnQ/S/eSut4BnH4NZDLyddHKi5uTU/2PDn3grEMGkgsll16V5RpVUh/yxdiam0xsM0RD4xvtg== dependencies: debug "^2.6.8" prompt-base@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/prompt-base/-/prompt-base-4.0.2.tgz#295d68c50949ee93de211c618efe87850271637c" + version "4.1.0" + resolved "https://registry.yarnpkg.com/prompt-base/-/prompt-base-4.1.0.tgz#7b88e4c01b096c83d2f4e501a7e85f0d369ecd1f" + integrity sha512-svGzgLUKZoqomz9SGMkf1hBG8Wl3K7JGuRCXc/Pv7xw8239hhaTBXrmjt7EXA9P/QZzdyT8uNWt9F/iJTXq75g== dependencies: component-emitter "^1.2.1" - debug "^2.6.8" + debug "^3.0.1" koalas "^1.0.2" log-utils "^0.2.1" prompt-actions "^3.0.2" - prompt-question "^5.0.0" + prompt-question "^5.0.1" readline-ui "^2.2.3" - readline-utils "^2.2.1" + readline-utils "^2.2.3" static-extend "^0.1.2" -prompt-choices@^4.0.3: - version "4.0.4" - resolved "https://registry.yarnpkg.com/prompt-choices/-/prompt-choices-4.0.4.tgz#e0115eb42646f47d6ac39f68f5214ed439b98b62" +prompt-choices@^4.0.5: + version "4.1.0" + resolved "https://registry.yarnpkg.com/prompt-choices/-/prompt-choices-4.1.0.tgz#6094202c4e55d0762e49c1e53735727e53fd484f" + integrity sha512-ZNYLv6rW9z9n0WdwCkEuS+w5nUAGzRgtRt6GQ5aFNFz6MIcU7nHFlHOwZtzy7RQBk80KzUGPSRQphvMiQzB8pg== dependencies: - arr-flatten "^1.0.3" + arr-flatten "^1.1.0" arr-swap "^1.0.1" choices-separator "^2.0.0" - clone-deep "^0.3.0" + clone-deep "^4.0.0" collection-visit "^1.0.0" - debug "^2.6.8" - define-property "^1.0.0" - extend-shallow "^2.0.1" - is-number "^3.0.0" - kind-of "^4.0.0" + define-property "^2.0.2" + is-number "^6.0.0" + kind-of "^6.0.2" koalas "^1.0.2" - lazy-cache "^2.0.2" log-utils "^0.2.1" pointer-symbol "^1.0.0" radio-symbol "^2.0.0" - set-value "^1.0.0" + set-value "^3.0.0" strip-color "^0.1.0" terminal-paginator "^2.0.2" toggle-array "^1.0.1" @@ -2904,57 +4347,78 @@ prompt-choices@^4.0.3: prompt-confirm@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/prompt-confirm/-/prompt-confirm-1.2.0.tgz#ed96d0ecc3a3485c7c9d7103bf19444e7811631f" + integrity sha512-r7XZxI5J5/oPtUskN0ZYO+lkv/WJHMQgfd1GTKAuxnHuViQShiFHdUnj6DamL4gQExaKAX7rnIcTKoRSpVVquA== dependencies: debug "^2.6.8" prompt-base "^4.0.1" -prompt-question@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/prompt-question/-/prompt-question-5.0.1.tgz#a50ed63443b3c7ce107c14f7449d999f39506f89" +prompt-question@^5.0.1: + version "5.0.2" + resolved "https://registry.yarnpkg.com/prompt-question/-/prompt-question-5.0.2.tgz#81a479f38f0bafecc758e5d6f7bc586e599610b3" + integrity sha512-wreaLbbu8f5+7zXds199uiT11Ojp59Z4iBi6hONlSLtsKGTvL2UY8VglcxQ3t/X4qWIxsNCg6aT4O8keO65v6Q== dependencies: - clone-deep "^0.3.0" - debug "^2.6.8" + clone-deep "^1.0.0" + debug "^3.0.1" define-property "^1.0.0" - kind-of "^4.0.0" + isobject "^3.0.1" + kind-of "^5.0.2" koalas "^1.0.2" - prompt-choices "^4.0.3" + prompt-choices "^4.0.5" -prr@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a" +prr@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" + integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= + +psl@^1.1.24, psl@^1.1.28: + version "1.4.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.4.0.tgz#5dd26156cdb69fa1fdb8ab1991667d3f80ced7c2" + integrity sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw== punycode@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== -qs@~6.4.0: - version "6.4.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233" +qs@~6.5.2: + version "6.5.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" + integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== radio-symbol@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/radio-symbol/-/radio-symbol-2.0.0.tgz#7aa9bfc50485636d52dd76d6a8e631b290799ae1" + integrity sha1-eqm/xQSFY21S3XbWqOYxspB5muE= dependencies: ansi-gray "^0.1.1" ansi-green "^0.1.1" is-windows "^1.0.1" -randomatic@^1.1.3: - version "1.1.7" - resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.7.tgz#c7abe9cc8b87c0baa876b19fde83fd464797e38c" +randomatic@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.1.1.tgz#b776efc59375984e36c537b2f51a1f0aff0da1ed" + integrity sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw== dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" + is-number "^4.0.0" + kind-of "^6.0.0" + math-random "^1.0.1" -rc@^1.1.7: - version "1.2.1" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.1.tgz#2e03e8e42ee450b8cb3dce65be1bf8974e1dfd95" +rc@^1.2.7: + version "1.2.8" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== dependencies: - deep-extend "~0.4.0" + deep-extend "^0.6.0" ini "~1.3.0" minimist "^1.2.0" strip-json-comments "~2.0.1" @@ -2962,6 +4426,7 @@ rc@^1.1.7: read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" + integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= dependencies: find-up "^1.0.0" read-pkg "^1.0.0" @@ -2969,6 +4434,7 @@ read-pkg-up@^1.0.1: read-pkg-up@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" + integrity sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4= dependencies: find-up "^2.0.0" read-pkg "^2.0.0" @@ -2976,6 +4442,7 @@ read-pkg-up@^2.0.0: read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" + integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= dependencies: load-json-file "^1.0.0" normalize-package-data "^2.3.2" @@ -2984,44 +4451,48 @@ read-pkg@^1.0.0: read-pkg@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" + integrity sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg= dependencies: load-json-file "^2.0.0" normalize-package-data "^2.3.2" path-type "^2.0.0" -readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.4: - version "2.3.3" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c" +readable-stream@^2.0.2, readable-stream@^2.0.6: + version "2.3.6" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" + integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" isarray "~1.0.0" - process-nextick-args "~1.0.6" + process-nextick-args "~2.0.0" safe-buffer "~5.1.1" - string_decoder "~1.0.3" + string_decoder "~1.1.1" util-deprecate "~1.0.1" readdirp@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78" + version "2.2.1" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" + integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: - graceful-fs "^4.1.2" - minimatch "^3.0.2" + graceful-fs "^4.1.11" + micromatch "^3.1.10" readable-stream "^2.0.2" - set-immediate-shim "^1.0.1" readline-ui@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/readline-ui/-/readline-ui-2.2.3.tgz#9e873a7668bbd8ca8a5573ce810a6bafb70a5089" + integrity sha512-ix7jz0PxqQqcIuq3yQTHv1TOhlD2IHO74aNO+lSuXsRYm1d+pdyup1yF3zKyLK1wWZrVNGjkzw5tUegO2IDy+A== dependencies: component-emitter "^1.2.1" debug "^2.6.8" readline-utils "^2.2.1" string-width "^2.0.0" -readline-utils@^2.2.1: +readline-utils@^2.2.1, readline-utils@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/readline-utils/-/readline-utils-2.2.3.tgz#6f847d6b8f1915c391b581c367cd47873862351a" + integrity sha1-b4R9a48ZFcORtYHDZ81HhzhiNRo= dependencies: arr-flatten "^1.1.0" extend-shallow "^2.0.1" @@ -3033,150 +4504,240 @@ readline-utils@^2.2.1: strip-color "^0.1.0" window-size "^1.1.0" -recast@^0.12.5: - version "0.12.9" - resolved "https://registry.yarnpkg.com/recast/-/recast-0.12.9.tgz#e8e52bdb9691af462ccbd7c15d5a5113647a15f1" +recast@^0.18.1: + version "0.18.2" + resolved "https://registry.yarnpkg.com/recast/-/recast-0.18.2.tgz#ada263677edc70c45408caf20e6ae990958fdea8" + integrity sha512-MbuHc1lzIDIn7bpxaqIAGwwtyaokkzPqINf1Vm/LA0BSyVrTgXNVTTT7RzWC9kP+vqrUoYVpd6wHhI8x75ej8w== dependencies: - ast-types "0.10.1" - core-js "^2.4.1" + ast-types "0.13.2" esprima "~4.0.0" - private "~0.1.5" + private "^0.1.8" source-map "~0.6.1" -regenerate@^1.2.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.3.2.tgz#d1941c67bad437e1be76433add5b385f95b19260" +regenerate-unicode-properties@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz#ef51e0f0ea4ad424b77bf7cb41f3e015c70a3f0e" + integrity sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA== + dependencies: + regenerate "^1.4.0" -regenerator-runtime@^0.10.0: +regenerate@^1.2.1, regenerate@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" + integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== + +regenerator-runtime@^0.10.5: version "0.10.5" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.10.5.tgz#336c3efc1220adcedda2c9fab67b5a7955a33658" + integrity sha1-M2w+/BIgrc7dosn6tntaeVWjNlg= -regenerator-transform@0.9.11: - version "0.9.11" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.9.11.tgz#3a7d067520cb7b7176769eb5ff868691befe1283" +regenerator-runtime@^0.11.0: + version "0.11.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" + integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== + +regenerator-transform@^0.10.0: + version "0.10.1" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.10.1.tgz#1e4996837231da8b7f3cf4114d71b5691a0680dd" + integrity sha512-PJepbvDbuK1xgIgnau7Y90cwaAmO/LCLMI2mPvaXq2heGMR3aWW5/BQvYrhJ8jgmQjXewXvBjzfqKcVOmhjZ6Q== dependencies: babel-runtime "^6.18.0" babel-types "^6.19.0" private "^0.1.6" +regenerator-transform@^0.14.0: + version "0.14.1" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.1.tgz#3b2fce4e1ab7732c08f665dfdb314749c7ddd2fb" + integrity sha512-flVuee02C3FKRISbxhXl9mGzdbWUVHubl1SMaknjxkFB1/iqpJhArQUvRxOOPEc/9tAiX0BaQ28FJH10E4isSQ== + dependencies: + private "^0.1.6" + regex-cache@^0.4.2: - version "0.4.3" - resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.3.tgz#9b1a6c35d4d0dfcef5711ae651e8e9d3d7114145" + version "0.4.4" + resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.4.tgz#75bdc58a2a1496cec48a12835bc54c8d562336dd" + integrity sha512-nVIZwtCjkC9YgvWkpM55B5rBhBYRZhAaJbgcFYXXsHnbZ9UZI9nnVWYZpBlCqv9ho2eZryPnWrZGsOdPwVWXWQ== dependencies: is-equal-shallow "^0.1.3" - is-primitive "^2.0.0" + +regex-not@^1.0.0, regex-not@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== + dependencies: + extend-shallow "^3.0.2" + safe-regex "^1.1.0" regexpu-core@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-2.0.0.tgz#49d038837b8dcf8bfa5b9a42139938e6ea2ae240" + integrity sha1-SdA4g3uNz4v6W5pCE5k45uoq4kA= dependencies: regenerate "^1.2.1" regjsgen "^0.2.0" regjsparser "^0.1.4" +regexpu-core@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.6.0.tgz#2037c18b327cfce8a6fea2a4ec441f2432afb8b6" + integrity sha512-YlVaefl8P5BnFYOITTNzDvan1ulLOiXJzCNZxduTIosN17b87h3bvG9yHMoHaRuo88H4mQ06Aodj5VtYGGGiTg== + dependencies: + regenerate "^1.4.0" + regenerate-unicode-properties "^8.1.0" + regjsgen "^0.5.0" + regjsparser "^0.6.0" + unicode-match-property-ecmascript "^1.0.4" + unicode-match-property-value-ecmascript "^1.1.0" + regjsgen@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.2.0.tgz#6c016adeac554f75823fe37ac05b92d5a4edb1f7" + integrity sha1-bAFq3qxVT3WCP+N6wFuS1aTtsfc= + +regjsgen@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.0.tgz#a7634dc08f89209c2049adda3525711fb97265dd" + integrity sha512-RnIrLhrXCX5ow/E5/Mh2O4e/oa1/jW0eaBKTSy3LaCj+M3Bqvm97GWDp2yUtzIs4LEn65zR2yiYGFqb2ApnzDA== regjsparser@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.1.5.tgz#7ee8f84dc6fa792d3fd0ae228d24bd949ead205c" + integrity sha1-fuj4Tcb6eS0/0K4ijSS9lJ6tIFw= + dependencies: + jsesc "~0.5.0" + +regjsparser@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.0.tgz#f1e6ae8b7da2bae96c99399b868cd6c933a2ba9c" + integrity sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ== dependencies: jsesc "~0.5.0" remove-trailing-separator@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.0.2.tgz#69b062d978727ad14dc6b56ba4ab772fd8d70511" + version "1.1.0" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= repeat-element@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" + version "1.1.3" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" + integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== -repeat-string@^1.5.2: +repeat-string@^1.5.2, repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= repeating@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" + integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= dependencies: is-finite "^1.0.0" -request@^2.79.0, request@^2.81.0: - version "2.81.0" - resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0" +request@^2.79.0: + version "2.88.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" + integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== dependencies: - aws-sign2 "~0.6.0" - aws4 "^1.2.1" + aws-sign2 "~0.7.0" + aws4 "^1.8.0" caseless "~0.12.0" - combined-stream "~1.0.5" - extend "~3.0.0" + combined-stream "~1.0.6" + extend "~3.0.2" forever-agent "~0.6.1" - form-data "~2.1.1" - har-validator "~4.2.1" - hawk "~3.1.3" - http-signature "~1.1.0" + form-data "~2.3.2" + har-validator "~5.1.0" + http-signature "~1.2.0" is-typedarray "~1.0.0" isstream "~0.1.2" json-stringify-safe "~5.0.1" - mime-types "~2.1.7" - oauth-sign "~0.8.1" - performance-now "^0.2.0" - qs "~6.4.0" - safe-buffer "^5.0.1" - stringstream "~0.0.4" - tough-cookie "~2.3.0" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.4.3" tunnel-agent "^0.6.0" - uuid "^3.0.0" + uuid "^3.3.2" require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= + +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= resolve@1.1.7: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" + integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= -resolve@^1.3.2: - version "1.3.3" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.3.3.tgz#655907c3469a8680dc2de3a275a8fdd69691f0e5" +resolve@^1.10.0, resolve@^1.3.2: + version "1.12.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.0.tgz#3fc644a35c84a48554609ff26ec52b66fa577df6" + integrity sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w== dependencies: - path-parse "^1.0.5" + path-parse "^1.0.6" restore-cursor@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= dependencies: onetime "^2.0.0" signal-exit "^3.0.2" -right-align@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" - dependencies: - align-text "^0.1.1" +ret@~0.1.10: + version "0.1.15" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== -rimraf@2, rimraf@^2.5.1, rimraf@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.1.tgz#c2338ec643df7a1b7fe5c54fa86f57428a55f33d" +rimraf@^2.6.1: + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== dependencies: - glob "^7.0.5" + glob "^7.1.3" rimraf@~2.2.6: version "2.2.8" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.2.8.tgz#e439be2aaee327321952730f99a8929e4fc50582" + integrity sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI= -safe-buffer@^5.0.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" +safe-buffer@^5.0.1, safe-buffer@^5.1.2: + version "5.2.0" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519" + integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg== + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= + dependencies: + ret "~0.1.10" + +"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sane@~1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/sane/-/sane-1.6.0.tgz#9610c452307a135d29c1fdfe2547034180c46775" + integrity sha1-lhDEUjB6E10pwf3+JUcDQYDEZ3U= dependencies: anymatch "^1.3.0" exec-sh "^0.2.0" @@ -3186,139 +4747,221 @@ sane@~1.6.0: walker "~1.0.5" watch "~0.10.0" -sax@^1.2.1: +sax@^1.2.1, sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.6.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== -"semver@2 || 3 || 4 || 5", semver@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" +semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= set-getter@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/set-getter/-/set-getter-0.1.0.tgz#d769c182c9d5a51f409145f2fba82e5e86e80376" + integrity sha1-12nBgsnVpR9AkUXy+6guXoboA3Y= dependencies: to-object-path "^0.3.0" -set-immediate-shim@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" - -set-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-1.0.0.tgz#bcc76f71a0f1e07a24b987d0a02afec9f665304f" +set-value@^2.0.0, set-value@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" + integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" is-extendable "^0.1.1" - is-plain-object "^2.0.1" - to-object-path "^0.3.0" + is-plain-object "^2.0.3" + split-string "^3.0.1" -shallow-clone@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-0.1.2.tgz#5909e874ba77106d73ac414cfec1ffca87d97060" +set-value@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-3.0.1.tgz#52c82af7653ba69eb1db92e81f5cdb32739b9e95" + integrity sha512-w6n3GUPYAWQj4ZyHWzD7K2FnFXHx9OTwJYbWg+6nXjG8sCLfs9DGv+KlqglKIIJx+ks7MlFuwFW2RBPb+8V+xg== + dependencies: + is-plain-object "^2.0.4" + +shallow-clone@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-1.0.0.tgz#4480cd06e882ef68b2ad88a3ea54832e2c48b571" + integrity sha512-oeXreoKR/SyNJtRJMAKPDSvd28OqEwG4eR/xc856cRGBII7gX9lvAqDxusPm0846z/w/hWYjI1NpKwJ00NHzRA== dependencies: is-extendable "^0.1.1" - kind-of "^2.0.1" - lazy-cache "^0.2.3" + kind-of "^5.0.0" mixin-object "^2.0.1" +shallow-clone@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + dependencies: + kind-of "^6.0.2" + shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= dependencies: shebang-regex "^1.0.0" shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= -shellwords@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.0.tgz#66afd47b6a12932d9071cbfd98a52e785cd0ba14" +shellwords@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" + integrity sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww== signal-exit@^3.0.0, signal-exit@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" + integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0= slash@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" + integrity sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU= -slide@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707" +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" -sntp@1.x.x: - version "1.0.9" - resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: - hoek "2.x.x" + kind-of "^3.2.0" -source-map-support@^0.4.2: - version "0.4.15" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.15.tgz#03202df65c06d2bd8c7ec2362a193056fef8d3b1" +snapdragon@^0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^3.1.0" -source-map@^0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" +source-map-resolve@^0.5.0: + version "0.5.2" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" + integrity sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA== dependencies: - amdefine ">=0.0.4" + atob "^2.1.1" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" -source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6, source-map@~0.5.1: - version "0.5.6" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" +source-map-support@^0.4.15: + version "0.4.18" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f" + integrity sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA== + dependencies: + source-map "^0.5.6" -source-map@~0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.2.0.tgz#dab73fbcfc2ba819b4de03bd6f6eaa48164b3f9d" +source-map-support@^0.5.9: + version "0.5.13" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== dependencies: - amdefine ">=0.0.4" + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-url@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" + integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= -source-map@~0.6.1: +source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6, source-map@^0.5.7: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== -spdx-correct@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-1.0.2.tgz#4b3073d933ff51f3912f03ac5519498a4150db40" +spdx-correct@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" + integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== dependencies: - spdx-license-ids "^1.0.2" + spdx-expression-parse "^3.0.0" + spdx-license-ids "^3.0.0" -spdx-expression-parse@~1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz#9bdf2f20e1f40ed447fbe273266191fced51626c" +spdx-exceptions@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" + integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== -spdx-license-ids@^1.0.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57" +spdx-expression-parse@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" + integrity sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg== + dependencies: + spdx-exceptions "^2.1.0" + spdx-license-ids "^3.0.0" + +spdx-license-ids@^3.0.0: + version "3.0.5" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" + integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== + +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== + dependencies: + extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sshpk@^1.7.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.1.tgz#512df6da6287144316dc4c18fe1cf1d940739be3" + version "1.16.1" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" + integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" - dashdash "^1.12.0" - getpass "^0.1.1" - optionalDependencies: bcrypt-pbkdf "^1.0.0" + dashdash "^1.12.0" ecc-jsbn "~0.1.1" + getpass "^0.1.1" jsbn "~0.1.0" + safer-buffer "^2.0.2" tweetnacl "~0.14.0" -static-extend@^0.1.2: +static-extend@^0.1.1, static-extend@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= dependencies: define-property "^0.2.5" object-copy "^0.1.0" @@ -3326,120 +4969,121 @@ static-extend@^0.1.2: string-length@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/string-length/-/string-length-1.0.1.tgz#56970fb1c38558e9e70b728bf3de269ac45adfac" + integrity sha1-VpcPscOFWOnnC3KL894mmsRa36w= dependencies: strip-ansi "^3.0.0" string-width@^1.0.1, string-width@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" -string-width@^2.0.0: +"string-width@^1.0.2 || 2", string-width@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" -string_decoder@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab" +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" -stringstream@~0.0.4: - version "0.0.5" - resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878" - strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" strip-ansi@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= dependencies: ansi-regex "^3.0.0" -strip-ansi@~0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.1.1.tgz#39e8a98d044d150660abe4a6808acf70bb7bc991" - strip-bom@3.0.0, strip-bom@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" + integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= dependencies: is-utf8 "^0.2.0" strip-color@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/strip-color/-/strip-color-0.1.0.tgz#106f65d3d3e6a2d9401cac0eb0ce8b8a702b4f7b" + integrity sha1-EG9l09PmotlAHKwOsM6LinArT3s= strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= success-symbol@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/success-symbol/-/success-symbol-0.1.0.tgz#24022e486f3bf1cdca094283b769c472d3b72897" + integrity sha1-JAIuSG878c3KCUKDt2nEctO3KJc= supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= supports-color@^3.1.2: version "3.2.3" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" + integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= dependencies: has-flag "^1.0.0" -supports-color@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.2.1.tgz#65a4bb2631e90e02420dba5554c375a4754bb836" +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: - has-flag "^2.0.0" + has-flag "^3.0.0" symbol-tree@^3.2.1: - version "3.2.2" - resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.2.tgz#ae27db38f660a7ae2e1c3b7d1bc290819b8519e6" - -tar-pack@^3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.0.tgz#23be2d7f671a8339376cbdb0b8fe3fdebf317984" - dependencies: - debug "^2.2.0" - fstream "^1.0.10" - fstream-ignore "^1.0.5" - once "^1.3.3" - readable-stream "^2.1.4" - rimraf "^2.5.1" - tar "^2.2.1" - uid-number "^0.0.6" - -tar@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1" - dependencies: - block-stream "*" - fstream "^1.0.2" - inherits "2" + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +tar@^4: + version "4.4.12" + resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.12.tgz#6a1275a870a782f92828e24d28fa6aa253193af7" + integrity sha512-4GwpJwdSjIHlUrWd/1yJrl63UqcqjJyVglgIwn4gcG+Lrp9TXpZ1ZRrGLIRBNqLTUvz6yoPJrX4B/MISxY/Ukg== + dependencies: + chownr "^1.1.1" + fs-minipass "^1.2.5" + minipass "^2.8.6" + minizlib "^1.2.1" + mkdirp "^0.5.0" + safe-buffer "^5.1.2" + yallist "^3.0.3" temp@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/temp/-/temp-0.8.3.tgz#e0c6bc4d26b903124410e4fed81103014dfc1f59" + integrity sha1-4Ma8TSa5AxJEEOT+2BEDAU38H1k= dependencies: os-tmpdir "^1.0.0" rimraf "~2.2.6" @@ -3447,14 +5091,16 @@ temp@^0.8.1: terminal-paginator@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/terminal-paginator/-/terminal-paginator-2.0.2.tgz#967e66056f28fe8f55ba7c1eebfb7c3ef371c1d3" + integrity sha512-IZMT5ECF9p4s+sNCV8uvZSW9E1+9zy9Ji9xz2oee8Jfo7hUFpauyjxkhfRcIH6Lu3Wdepv5D1kVRc8Hx74/LfQ== dependencies: debug "^2.6.6" extend-shallow "^2.0.1" log-utils "^0.2.1" -test-exclude@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-4.1.1.tgz#4d84964b0966b0087ecc334a2ce002d3d9341e26" +test-exclude@^4.2.1: + version "4.2.3" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-4.2.3.tgz#a9a5e64474e4398339245a0a769ad7c2f4a97c20" + integrity sha512-SYbXgY64PT+4GAL2ocI3HwPa4Q4TBKm0cwAVeKOt/Aoc0gSpNRjJX8w0pA1LMKZ3LBmd8pYBqApFNQLII9kavA== dependencies: arrify "^1.0.1" micromatch "^2.3.11" @@ -3465,144 +5111,248 @@ test-exclude@^4.1.1: throat@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/throat/-/throat-3.2.0.tgz#50cb0670edbc40237b9e347d7e1f88e4620af836" + integrity sha512-/EY8VpvlqJ+sFtLPeOgc8Pl7kQVOWv0woD87KTXVHPIAE842FGT+rokxIhe8xIUP1cfgrkt0as0vDLjDiMtr8w== time-stamp@^1.0.1: version "1.1.0" resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-1.1.0.tgz#764a5a11af50561921b133f3b44e618687e0f5c3" + integrity sha1-dkpaEa9QVhkhsTPztE5hhofg9cM= tmpl@1.0.x: version "1.0.4" resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" + integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= -to-fast-properties@^1.0.1: +to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" + integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= to-object-path@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= dependencies: kind-of "^3.0.2" +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex@^3.0.1, to-regex@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== + dependencies: + define-property "^2.0.2" + extend-shallow "^3.0.2" + regex-not "^1.0.2" + safe-regex "^1.1.0" + toggle-array@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/toggle-array/-/toggle-array-1.0.1.tgz#cbf5840792bd5097f33117ae824c932affe87d58" + integrity sha1-y/WEB5K9UJfzMReugkyTKv/ofVg= dependencies: isobject "^3.0.0" -tough-cookie@^2.3.2, tough-cookie@~2.3.0: - version "2.3.2" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a" +tough-cookie@^2.3.2: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" + integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + dependencies: + psl "^1.1.28" + punycode "^2.1.1" + +tough-cookie@~2.4.3: + version "2.4.3" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" + integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== dependencies: + psl "^1.1.24" punycode "^1.4.1" tr46@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" + integrity sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM= tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= dependencies: safe-buffer "^5.0.1" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= dependencies: prelude-ls "~1.1.2" -uglify-js@^2.6: - version "2.8.29" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" +uglify-js@^3.1.4: + version "3.6.0" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.6.0.tgz#704681345c53a8b2079fb6cec294b05ead242ff5" + integrity sha512-W+jrUHJr3DXKhrsS7NUVxn3zqMOFn0hL/Ei6v0anCIMoKC93TjcflTagwIHLW7SfMFfiQuktQyFVCFHGUE0+yg== dependencies: - source-map "~0.5.1" - yargs "~3.10.0" - optionalDependencies: - uglify-to-browserify "~1.0.0" + commander "~2.20.0" + source-map "~0.6.1" -uglify-to-browserify@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" +unicode-canonical-property-names-ecmascript@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" + integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== -uid-number@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" +unicode-match-property-ecmascript@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" + integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== + dependencies: + unicode-canonical-property-names-ecmascript "^1.0.4" + unicode-property-aliases-ecmascript "^1.0.4" -underscore@~1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.6.0.tgz#8b38b10cacdef63337b8b24e4ff86d45aea529a8" +unicode-match-property-value-ecmascript@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz#5b4b426e08d13a80365e0d657ac7a6c1ec46a277" + integrity sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g== + +unicode-property-aliases-ecmascript@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz#a9cc6cc7ce63a0a3023fc99e341b94431d405a57" + integrity sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw== + +union-value@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" + integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^2.0.1" + +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + +uri-js@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" + integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== + dependencies: + punycode "^2.1.0" + +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= + +use@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" + integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== user-home@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/user-home/-/user-home-1.1.1.tgz#2b5be23a32b63a7c9deb8d0f28d485724a3df190" + integrity sha1-K1viOjK2Onyd640PKNSFcko98ZA= util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= -uuid@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04" +uuid@^3.3.2: + version "3.3.3" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.3.tgz#4568f0216e78760ee1dbf3a4d2cf53e224112866" + integrity sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ== -v8flags@^2.0.10: +v8flags@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-2.1.1.tgz#aab1a1fa30d45f88dd321148875ac02c0b55e5b4" + integrity sha1-qrGh+jDUX4jdMhFIh1rALAtV5bQ= dependencies: user-home "^1.1.1" validate-npm-package-license@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz#2804babe712ad3379459acfbe24746ab2c303fbc" + version "3.0.4" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: - spdx-correct "~1.0.0" - spdx-expression-parse "~1.0.0" + spdx-correct "^3.0.0" + spdx-expression-parse "^3.0.0" -verror@1.3.6: - version "1.3.6" - resolved "https://registry.yarnpkg.com/verror/-/verror-1.3.6.tgz#cff5df12946d297d2baaefaa2689e25be01c005c" +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= dependencies: - extsprintf "1.0.2" + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" walker@~1.0.5: version "1.0.7" resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb" + integrity sha1-L3+bj9ENZ3JisYqITijRlhjgKPs= dependencies: makeerror "1.0.x" warning-symbol@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/warning-symbol/-/warning-symbol-0.1.0.tgz#bb31dd11b7a0f9d67ab2ed95f457b65825bbad21" + integrity sha1-uzHdEbeg+dZ6su2V9Fe2WCW7rSE= watch@~0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/watch/-/watch-0.10.0.tgz#77798b2da0f9910d595f1ace5b0c2258521f21dc" + integrity sha1-d3mLLaD5kQ1ZXxrOWwwiWFIfIdw= webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= webidl-conversions@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.1.tgz#8015a17ab83e7e1b311638486ace81da6ce206a0" + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== whatwg-encoding@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.1.tgz#3c6c451a198ee7aec55b1ec61d0920c67801a5f4" + version "1.0.5" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== dependencies: - iconv-lite "0.4.13" + iconv-lite "0.4.24" whatwg-url@^4.3.0: version "4.8.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-4.8.0.tgz#d2981aa9148c1e00a41c5a6131166ab4683bbcc0" + integrity sha1-0pgaqRSMHgCkHFphMRZqtGg7vMA= dependencies: tr46 "~0.0.3" webidl-conversions "^3.0.0" @@ -3610,62 +5360,56 @@ whatwg-url@^4.3.0: which-module@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f" + integrity sha1-u6Y8qGGUiZT/MHc2CJ47lgJsKk8= which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= -which@^1.2.12: - version "1.2.14" - resolved "https://registry.yarnpkg.com/which/-/which-1.2.14.tgz#9a87c4378f03e827cecaf1acdf56c736c01c14e5" - dependencies: - isexe "^2.0.0" - -which@^1.2.9: - version "1.3.0" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a" +which@^1.2.12, which@^1.2.9, which@^1.3.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" wide-align@^1.1.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.2.tgz#571e0f1b0604636ebc0dfc21b0339bbe31341710" + version "1.1.3" + resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" + integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: - string-width "^1.0.2" - -window-size@0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" + string-width "^1.0.2 || 2" window-size@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/window-size/-/window-size-1.1.0.tgz#3b402d3244f35561db2c9761ad9d1e5286b07a2d" + version "1.1.1" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-1.1.1.tgz#9858586580ada78ab26ecd6978a6e03115c1af20" + integrity sha512-5D/9vujkmVQ7pSmc0SCBmHXbkv6eaHwXEx65MywhmUMsI8sGqJ972APq1lotfcwMKPFLuCFfL8xGHLIp7jaBmA== dependencies: define-property "^1.0.0" is-number "^3.0.0" -wordwrap@0.0.2: - version "0.0.2" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" - wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" + integrity sha1-o9XabNXAvAAI03I0u68b7WMFkQc= wordwrap@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= worker-farm@^1.3.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.4.1.tgz#a438bc993a7a7d133bcb6547c95eca7cff4897d8" + version "1.7.0" + resolved "https://registry.yarnpkg.com/worker-farm/-/worker-farm-1.7.0.tgz#26a94c5391bbca926152002f69b84a4bf772e5a8" + integrity sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw== dependencies: - errno "^0.1.4" - xtend "^4.0.1" + errno "~0.1.7" wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" @@ -3673,46 +5417,55 @@ wrap-ansi@^2.0.0: wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= -write-file-atomic@^1.2.0: - version "1.3.4" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-1.3.4.tgz#f807a4f0b1d9e913ae7a48112e6cc3af1991b45f" +write-file-atomic@^2.3.0: + version "2.4.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.4.3.tgz#1fd2e9ae1df3e75b8d8c367443c692d4ca81f481" + integrity sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ== dependencies: graceful-fs "^4.1.11" imurmurhash "^0.1.4" - slide "^1.1.5" + signal-exit "^3.0.2" xml-name-validator@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-2.0.1.tgz#4d8b8f1eccd3419aa362061becef515e1e559635" - -xtend@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" + integrity sha1-TYuPHszTQZqjYgYb7O9RXh5VljU= y18n@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" + integrity sha1-bRX7qITAhnnA136I53WegR4H+kE= yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" + integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= + +yallist@^3.0.0, yallist@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.3.tgz#b4b049e314be545e3ce802236d6cd22cd91c3de9" + integrity sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A== yargs-parser@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-5.0.0.tgz#275ecf0d7ffe05c77e64e7c86e4cd94bf0e1228a" + integrity sha1-J17PDX/+Bcd+ZOfIbkzZS/DhIoo= dependencies: camelcase "^3.0.0" yargs-parser@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-7.0.0.tgz#8d0ac42f16ea55debd332caf4c4038b3e3f5dfd9" + integrity sha1-jQrELxbqVd69MyyvTEA4s+P139k= dependencies: camelcase "^4.1.0" yargs@^7.0.2: version "7.1.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-7.1.0.tgz#6ba318eb16961727f5d284f8ea003e8d6154d0c8" + integrity sha1-a6MY6xaWFyf10oT46gA+jWFU0Mg= dependencies: camelcase "^3.0.0" cliui "^3.2.0" @@ -3731,6 +5484,7 @@ yargs@^7.0.2: yargs@^8.0.2: version "8.0.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-8.0.2.tgz#6299a9055b1cefc969ff7e79c1d918dceb22c360" + integrity sha1-YpmpBVsc78lp/355wdkY3Osiw2A= dependencies: camelcase "^4.1.0" cliui "^3.2.0" @@ -3745,12 +5499,3 @@ yargs@^8.0.2: which-module "^2.0.0" y18n "^3.2.1" yargs-parser "^7.0.0" - -yargs@~3.10.0: - version "3.10.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" - dependencies: - camelcase "^1.0.2" - cliui "^2.1.0" - decamelize "^1.0.0" - window-size "0.1.0" diff --git a/prelude/prelude.js b/prelude/prelude.js index 3a6fad06850..6661cf0c54a 100644 --- a/prelude/prelude.js +++ b/prelude/prelude.js @@ -1,3 +1,7 @@ +declare var undefined: void; + +type PropertyDescriptor = any + declare class Object {} declare class Function {} @@ -18,6 +22,12 @@ declare class $ReadOnlyArray<+T> { declare class Array extends $ReadOnlyArray {} +type $ArrayLike = { + [indexer: number]: T, + length: number, + ... +} + // Promise declare class Promise<+R> {} diff --git a/resources/appveyor/build.sh b/resources/appveyor/build.sh index 48c077f5cef..f180f619c61 100644 --- a/resources/appveyor/build.sh +++ b/resources/appveyor/build.sh @@ -9,14 +9,11 @@ pwd # print opam config opam config list -eval $(opam config env) +eval $(opam env) # print ocaml config ocamlc -config cd "${APPVEYOR_BUILD_FOLDER}" -opam pin add flowtype-ci . -n -opam depext -u flowtype-ci -opam install flowtype-ci --deps-only make all make -C src/parser/ ../../_build/src/parser/test/run_tests.native diff --git a/runtests.sh b/runtests.sh index 2b321b97f6a..4c5f107dbae 100755 --- a/runtests.sh +++ b/runtests.sh @@ -1,5 +1,6 @@ #!/bin/bash +THIS_DIR=$(cd -P "$(dirname "$(readlink "${BASH_SOURCE[0]}" || echo "${BASH_SOURCE[0]}")")" && pwd) # Use the assert functions below to expect specific exit codes. @@ -39,6 +40,17 @@ assert_errors() { assert_exit_on_line "${BASH_LINENO[0]}" "$EXIT_ERRS" "$@" } +show_skipping_stats_classic() { + printf "\\n========Skipping stats========\\n" + grep -o "Merge skipped [0-9]\+ of [0-9]\+ modules" $1 | tail -n 1 +} + +show_skipping_stats_types_first() { + printf "\\n========Skipping stats========\\n" + grep -o "Merge skipped [0-9]\+ of [0-9]\+ modules" $1 | tail -n 1 + grep -o "Check will skip [0-9]\+ of [0-9]\+ files" $1 | tail -n 1 +} + show_help() { printf "Usage: runtests.sh [-hlqrv] [-d DIR] [-t TEST] [-b] FLOW_BINARY [[-f] TEST_FILTER]\n\n" printf "Runs Flow's tests.\n\n" @@ -66,13 +78,14 @@ show_help() { export IN_FLOW_TEST=1 export FLOW_LOG_LEVEL=debug +export FLOW_NODE_BINARY=${FLOW_NODE_BINARY:-${NODE_BINARY:-$(which node)}} OPTIND=1 record=0 saved_state=0 verbose=0 quiet=0 -relative="$(dirname "${BASH_SOURCE[0]}")" +relative="$THIS_DIR" list_tests=0 while getopts "b:d:f:lqrst:vh?" opt; do case "$opt" in @@ -137,6 +150,9 @@ if [[ "$OSTYPE" == "darwin"* ]]; then else FLOW=$(readlink -f "$FLOW") fi + +VERSION=$("$FLOW" version --semver) + if [ -t 1 ]; then COLOR_RESET="\x1b[0m" COLOR_DEFAULT="\x1b[39;49;0m" @@ -184,8 +200,11 @@ print_failure() { fi if [[ "$record" -eq 1 ]]; then - mv "${dir}${name}.out" "${dir}${name}.exp" - rm "$err_file" + # Copy .out to .exp, replacing the current version, if present, with + # , so that the .exp doesn't have to be updated on each release. + sed 's/'"${VERSION//./\\.}"'//g' "${dir}${name}.out" > "${dir}${name}.exp" + rm "${dir}${name}.out" + rm -f "$err_file" rm "$diff_file" fi } @@ -346,6 +365,7 @@ runtest() { # stop the server after the script exits. # all=" --all" + auto_start=true flowlib=" --no-flowlib" shell="" cmd="check" @@ -355,6 +375,7 @@ runtest() { cwd="" start_args="" file_watcher="none" + wait_for_recheck="true" if [ -e ".testconfig" ] then # all @@ -362,6 +383,11 @@ runtest() { then all="" fi + # auto_start + if [ "$(awk '$1=="auto_start:"{print $2}' .testconfig)" == "false" ] + then + auto_start=false + fi # cwd (current directory) cwd="$(awk '$1=="cwd:"{print $2}' .testconfig)" # ignore_stderr @@ -393,6 +419,7 @@ runtest() { fi # cmd config_cmd="$(awk '$1=="cmd:"{$1="";print}' .testconfig)" + config_cmd="${config_cmd## }" # trim leading space if [ "$config_cmd" != "" ] then cmd="$config_cmd" @@ -409,6 +436,12 @@ runtest() { then return $RUNTEST_SKIP fi + + # wait_for_recheck + if [ "$(awk '$1=="wait_for_recheck:"{print $2}' .testconfig)" == "false" ] + then + wait_for_recheck="false" + fi fi if [ "$cwd" != "" ]; then @@ -416,7 +449,7 @@ runtest() { fi # if .flowconfig sets no_flowlib, don't pass the cli flag - if grep -q "no_flowlib" .flowconfig; then + if [ -f .flowconfig ] && grep -q "no_flowlib" .flowconfig; then flowlib="" fi @@ -429,7 +462,9 @@ runtest() { set -e # start lazy server and wait "$FLOW" start "$root" \ - $all $flowlib --wait --lazy \ + $all $flowlib --wait \ + --wait-for-recheck "$wait_for_recheck" \ + --lazy \ --file-watcher "$file_watcher" \ --flowconfig-name "$flowconfig_name" \ --log-file "$abs_log_file" \ @@ -500,8 +535,10 @@ runtest() { if create_saved_state "$root" "$flowconfig_name" then + PATH="$THIS_DIR/scripts/tests_bin:$PATH" \ "$FLOW" start "$root" \ $all $flowlib --wait \ + --wait-for-recheck "$wait_for_recheck" \ --saved-state-fetcher "local" \ --saved-state-no-fallback \ --file-watcher "$file_watcher" \ @@ -515,8 +552,9 @@ runtest() { fi else # start server and wait + PATH="$THIS_DIR/scripts/tests_bin:$PATH" \ "$FLOW" start "$root" \ - $all $flowlib --wait \ + $all $flowlib --wait --wait-for-recheck "$wait_for_recheck" \ --file-watcher "$file_watcher" \ --log-file "$abs_log_file" \ --monitor-log-file "$abs_monitor_log_file" \ @@ -529,37 +567,46 @@ runtest() { start_flow () { assert_ok start_flow_unsafe "$@" } - start_flow_unsafe . $start_args > /dev/null 2>> "$abs_err_file" - code=$? - if [ $code -ne 0 ]; then - # flow failed to start - printf "flow start exited code %s\\n" "$code" >> "$abs_out_file" - return_status=$RUNTEST_ERROR - elif [ "$shell" != "" ]; then - # run test script in subshell so it inherits functions - ( - set -e # The script should probably use this option - source "$shell" "$FLOW" - ) 1> "$abs_out_file" 2> "$stderr_dest" + + if [ $auto_start = true ]; then + start_flow_unsafe . $start_args > /dev/null 2>> "$abs_err_file" code=$? if [ $code -ne 0 ]; then - printf "%s exited code %s\\n" "$shell" "$code" >> "$abs_out_file" + # flow failed to start + printf "flow start exited code %s\\n" "$code" >> "$abs_out_file" return_status=$RUNTEST_ERROR fi - else - # If there's stdin, then direct that in - # cmd should NOT be double quoted...it may contain many commands - # and we do want word splitting - if [ "$stdin" != "" ] - then - cmd="$FLOW $cmd < $stdin 1> $abs_out_file 2> $stderr_dest" - else - cmd="$FLOW $cmd 1> $abs_out_file 2> $stderr_dest" + fi + + if [ $return_status -ne $RUNTEST_ERROR ]; then + if [ "$shell" != "" ]; then + # run test script in subshell so it inherits functions + ( + set -e # The script should probably use this option + export PATH="$THIS_DIR/scripts/tests_bin:$PATH" + source "$shell" "$FLOW" + ) 1> "$abs_out_file" 2> "$stderr_dest" + code=$? + if [ $code -ne 0 ]; then + printf "%s exited code %s\\n" "$shell" "$code" >> "$abs_out_file" + return_status=$RUNTEST_ERROR fi - eval "$cmd" + else + # If there's stdin, then direct that in + # cmd should NOT be double quoted...it may contain many commands + # and we do want word splitting + if [ "$stdin" != "" ] + then + cmd="$FLOW $cmd < $stdin 1> $abs_out_file 2> $stderr_dest" + else + cmd="$FLOW $cmd 1> $abs_out_file 2> $stderr_dest" + fi + eval "$cmd" + fi + + # stop server, even if we didn't start it + "$FLOW" stop . 1> /dev/null 2>&1 fi - # stop server - "$FLOW" stop . 1> /dev/null 2>&1 fi if [ "$cwd" != "" ]; then @@ -571,7 +618,15 @@ runtest() { if [ $return_status -eq $RUNTEST_SUCCESS ]; then pushd "$OUT_PARENT_DIR" >/dev/null - diff -u --strip-trailing-cr "$exp_file" "$out_file" > "$diff_file" 2>&1 + # When diffing the .exp against the .out, replace in the + # .exp with the actual version, so the diff shows which version we + # were expecting, but the .exp doesn't need to be updated for each + # release. + diff -u --strip-trailing-cr \ + --label "$exp_file" --label "$out_file" \ + <(awk '{gsub(//, "'"$VERSION"'"); print $0}' "$exp_file") \ + "$out_file" \ + > "$diff_file" 2>&1 popd >/dev/null fi diff --git a/scripts/.ocamlformat-ignore b/scripts/.ocamlformat-ignore new file mode 100644 index 00000000000..2e62e127874 --- /dev/null +++ b/scripts/.ocamlformat-ignore @@ -0,0 +1 @@ +gen_build_id.ml diff --git a/scripts/_tags b/scripts/_tags index 26eeab34c00..9fcf2c0cb65 100644 --- a/scripts/_tags +++ b/scripts/_tags @@ -1,3 +1,2 @@ true: safe_string -: package(compiler-libs.common), package(unix) : package(compiler-libs.common) diff --git a/scripts/dune b/scripts/dune new file mode 100644 index 00000000000..c585dda9f8b --- /dev/null +++ b/scripts/dune @@ -0,0 +1,12 @@ +(rule + (targets get_build_id_gen.c) + (deps gen_build_id.ml script_utils.ml) + (action + (run ocaml -I scripts -w -3 -unsafe-string unix.cma gen_build_id.ml get_build_id_gen.c))) + +(library + (name flow_script_utils) + (wrapped false) + (modules script_utils) + (libraries unix) +) diff --git a/scripts/gen_build_id.ml b/scripts/gen_build_id.ml index 170f700c2d0..95594000b3a 100644 --- a/scripts/gen_build_id.ml +++ b/scripts/gen_build_id.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -24,9 +24,9 @@ let () = try read_process_stdout "git" [|"git"; "rev-parse"; "HEAD"|] with Failure msg -> ( Printf.eprintf "Failed git rev-parse: %s\n%!" msg; - try read_process_stdout "hg" [|"hg"; "id"; "-i"|] + try read_process_stdout "hg" [|"hg"; "log"; "-r"; "."; "-T"; "{node}\\n"|] with Failure msg -> ( - Printf.eprintf "Failed hg id: %s\n%!" msg; + Printf.eprintf "Failed hg log: %s\n%!" msg; "" ) ) @@ -47,8 +47,8 @@ let () = ) in let content = Printf.sprintf - "const char* const BuildInfo_kRevision = %S;\nconst unsigned long BuildInfo_kRevisionCommitTimeUnix = %sul;\n" - rev time in + "const char* const BuildInfo_kRevision = %S;\nconst unsigned long BuildInfo_kRevisionCommitTimeUnix = %sul;\nconst char* const BuildInfo_kBuildMode = %S;\n" + rev time "" (* not implemented *) in let do_dump = not (Sys.file_exists out_file) || string_of_file out_file <> content in if do_dump then diff --git a/scripts/ppx_gen_flowlibs.ml b/scripts/ppx_gen_flowlibs.ml deleted file mode 100644 index 5589dd6eaa4..00000000000 --- a/scripts/ppx_gen_flowlibs.ml +++ /dev/null @@ -1,49 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open Asttypes -open Parsetree -open Ast_mapper -open Ast_helper - -(* Read in all the flowlib files *) -let get_libs dir = - Sys.readdir dir - |> Array.fold_left (fun acc file -> - let contents = Script_utils.string_of_file (Filename.concat dir file) in - (file, contents)::acc - ) [] - -(* Turn the (name, contents) list into a PPX ast (string * string) array - * expression *) -let contents lib_dir = - get_libs lib_dir - |> List.map (fun (name, contents) -> Exp.tuple [ - Exp.constant (Const.string name); Exp.constant (Const.string contents); - ]) - |> Exp.array - -(* Whenever we see [%flowlib_contents], replace it wil the flowlib contents *) -let ppx_gen_flowlibs_mapper argv = - let flowlib_contents, prelude_contents = - match argv with - | [flowlib_dir; prelude_dir] -> contents flowlib_dir, contents prelude_dir - | _ -> - failwith - (Printf.sprintf "Expected two arguments, got %d." (List.length argv)) - in - { default_mapper with - expr = fun mapper expr -> - match expr with - | { pexp_desc = Pexp_extension ({ txt = "flowlib_contents"; _ }, PStr []); _} -> - flowlib_contents - | { pexp_desc = Pexp_extension ({ txt = "prelude_contents"; _ }, PStr []); _} -> - prelude_contents - | other -> default_mapper.expr mapper other; } - - let () = - register "ppx_gen_flowlibs" ppx_gen_flowlibs_mapper diff --git a/scripts/ppx_gen_flowlibs/dune b/scripts/ppx_gen_flowlibs/dune new file mode 100644 index 00000000000..5317fe5b4c3 --- /dev/null +++ b/scripts/ppx_gen_flowlibs/dune @@ -0,0 +1,6 @@ +(library + (name ppx_gen_flowlibs) + (modules ppx_gen_flowlibs) + (libraries flow_script_utils ocaml-migrate-parsetree) + (kind ppx_rewriter) +) diff --git a/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs.ml b/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs.ml new file mode 100644 index 00000000000..1e581b24c9c --- /dev/null +++ b/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs.ml @@ -0,0 +1,66 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open! Migrate_parsetree +open Ast_405 + +let ocaml_version = Versions.ocaml_405 + +open Asttypes +open Parsetree +open Ast_mapper +open Ast_helper + +(* Read in all the flowlib files *) +let get_libs dir = + Sys.readdir dir + |> Array.fold_left + (fun acc file -> + let contents = Script_utils.string_of_file (Filename.concat dir file) in + (file, contents) :: acc) + [] + +(* Turn the (name, contents) list into a PPX ast (string * string) array + * expression *) +let contents lib_dir = + get_libs lib_dir + |> List.map (fun (name, contents) -> + Exp.tuple [Exp.constant (Const.string name); Exp.constant (Const.string contents)]) + |> Exp.array + +(* Whenever we see [%flowlib_contents], replace it wil the flowlib contents *) +let ppx_gen_flowlibs_mapper ~flowlib_contents ~prelude_contents = + { + default_mapper with + expr = + (fun mapper expr -> + match expr with + | { pexp_desc = Pexp_extension ({ txt = "flowlib_contents"; _ }, PStr []); _ } -> + flowlib_contents + | { pexp_desc = Pexp_extension ({ txt = "prelude_contents"; _ }, PStr []); _ } -> + prelude_contents + | other -> default_mapper.expr mapper other); + } + +let () = + let flowlib_dir_ref = ref "" in + let prelude_dir_ref = ref "" in + let args = + [ + ("-flowlib", Arg.Set_string flowlib_dir_ref, "Path to flowlib directory"); + ("-prelude", Arg.Set_string prelude_dir_ref, "Path to prelude directory"); + ] + in + Driver.register ~name:"ppx_gen_flowlibs" ~args ocaml_version (fun _config _cookies -> + let (flowlib_contents, prelude_contents) = + match (!flowlib_dir_ref, !prelude_dir_ref) with + | ("", _) + | (_, "") -> + failwith "Expected two arguments." + | (flowlib_dir, prelude_dir) -> (contents flowlib_dir, contents prelude_dir) + in + ppx_gen_flowlibs_mapper ~flowlib_contents ~prelude_contents) diff --git a/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs_standalone.ml b/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs_standalone.ml new file mode 100644 index 00000000000..2c663110a3e --- /dev/null +++ b/scripts/ppx_gen_flowlibs/ppx_gen_flowlibs_standalone.ml @@ -0,0 +1,10 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Migrate_parsetree + +let () = Driver.run_main () diff --git a/scripts/script_utils.ml b/scripts/script_utils.ml index 0036267b9b7..8e506019424 100644 --- a/scripts/script_utils.ml +++ b/scripts/script_utils.ml @@ -1,12 +1,12 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) let with_pipe f = - let fd_r, fd_w = Unix.pipe () in + let (fd_r, fd_w) = Unix.pipe () in try let res = f (fd_r, fd_w) in Unix.close fd_r; @@ -19,13 +19,23 @@ let with_pipe f = let with_in_channel filename f = let ic = open_in_bin filename in - try let res = f ic in close_in ic; res - with exn -> close_in ic; raise exn + try + let res = f ic in + close_in ic; + res + with exn -> + close_in ic; + raise exn let with_out_channel filename f = let oc = open_out_bin filename in - try let res = f oc in close_out oc; res - with exn -> close_out oc; raise exn + try + let res = f oc in + close_out oc; + res + with exn -> + close_out oc; + raise exn let read_process name args (in_r, _in_w) (out_r, out_w) (err_r, err_w) = let pid = @@ -34,45 +44,45 @@ let read_process name args (in_r, _in_w) (out_r, out_w) (err_r, err_w) = (* On Windows, this is what happens if you call create_process * non_existent_thing *) raise (Failure (name ^ ": command not found")) - in + in match Unix.waitpid [] pid with - | _, Unix.WEXITED 0 -> - input_line (Unix.in_channel_of_descr out_r) - | _, Unix.WEXITED 127 -> - (* On Linux & OSX, this is what happens if you call create_process - * non_existent_thing *) - raise (Failure (name ^ ": command not found")) - | _, Unix.WEXITED 128 -> - raise (Failure (input_line (Unix.in_channel_of_descr err_r))) - | _, Unix.WEXITED code -> - raise (Failure (name ^ ": exited code "^(string_of_int code))) - | _, Unix.WSIGNALED signal -> - raise (Failure (name ^ ": killed by signal " ^ (string_of_int signal))) - | _, Unix.WSTOPPED signal -> - raise (Failure (name ^ ": stopped by signal " ^ (string_of_int signal))) + | (_, Unix.WEXITED 0) -> input_line (Unix.in_channel_of_descr out_r) + | (_, Unix.WEXITED 127) -> + (* On Linux & OSX, this is what happens if you call create_process + * non_existent_thing *) + raise (Failure (name ^ ": command not found")) + | (_, Unix.WEXITED 128) -> raise (Failure (input_line (Unix.in_channel_of_descr err_r))) + | (_, Unix.WEXITED code) -> raise (Failure (name ^ ": exited code " ^ string_of_int code)) + | (_, Unix.WSIGNALED signal) -> + raise (Failure (name ^ ": killed by signal " ^ string_of_int signal)) + | (_, Unix.WSTOPPED signal) -> + raise (Failure (name ^ ": stopped by signal " ^ string_of_int signal)) (* Read the first line in stdout or stderr of an external command. *) let read_process_output name args = - with_pipe @@ fun in_pipe -> - with_pipe @@ fun out_pipe -> - read_process name args in_pipe out_pipe out_pipe + with_pipe + @@ fun in_pipe -> + with_pipe @@ (fun out_pipe -> read_process name args in_pipe out_pipe out_pipe) (* Read the first line in stdout of an external command. *) let read_process_stdout name args = - with_pipe @@ fun in_pipe -> - with_pipe @@ fun out_pipe -> - with_pipe @@ fun err_pipe -> - read_process name args in_pipe out_pipe err_pipe + with_pipe + @@ fun in_pipe -> + with_pipe + @@ fun out_pipe -> + with_pipe @@ (fun err_pipe -> read_process name args in_pipe out_pipe err_pipe) let string_of_file filename = - with_in_channel filename @@ fun ic -> + with_in_channel filename + @@ fun ic -> let s = Bytes.create 32759 in let b = Buffer.create 1000 in let rec iter ic b s = let nread = input ic s 0 32759 in - if nread > 0 then begin + if nread > 0 then ( Buffer.add_subbytes b s 0 nread; iter ic b s - end in + ) + in iter ic b s; Buffer.contents b diff --git a/scripts/tests_bin/node b/scripts/tests_bin/node new file mode 100755 index 00000000000..6e4aecd43b0 --- /dev/null +++ b/scripts/tests_bin/node @@ -0,0 +1,4 @@ +#!/bin/bash + +# FLOW_NODE_BINARY is set by runtests.sh +exec "$FLOW_NODE_BINARY" "$@" diff --git a/src/commands/astCommand.ml b/src/commands/astCommand.ml index c8a00fbe00e..e7a021c0f6d 100644 --- a/src/commands/astCommand.ml +++ b/src/commands/astCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -15,36 +15,39 @@ type ast_file_type = | File_json | File_js -let spec = { - CommandSpec. - name = "ast"; - doc = "Print the AST"; - usage = Printf.sprintf - "Usage: %s ast [OPTION]... [FILE]\n\n\ - e.g. %s ast foo.js\n\ - or %s ast < foo.js\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> flag "--tokens" no_arg - ~doc:"Include a list of syntax tokens in the output" - |> flag "--pretty" no_arg - ~doc:"Pretty-print JSON output" - |> flag "--check" no_arg - ~doc:"Checks whether the file parses, returning any errors but not the AST" - |> flag "--debug" no_arg - ~doc:"" (* undocumented *) - |> flag "--type" (enum ["js", File_js; "json", File_json]) - ~doc:"Type of input file (js or json)" - |> flag "--strict" no_arg - ~doc:"Parse in strict mode" - |> CommandUtils.from_flag - |> CommandUtils.path_flag - |> anon "file" (optional string) - ) -} +let spec = + { + CommandSpec.name = "ast"; + doc = "Print the AST"; + usage = + Printf.sprintf + "Usage: %s ast [OPTION]... [FILE]\n\ne.g. %s ast foo.js\nor %s ast < foo.js\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> flag "--tokens" no_arg ~doc:"Include a list of syntax tokens in the output" + |> flag "--pretty" no_arg ~doc:"Pretty-print JSON output" + |> flag + "--check" + no_arg + ~doc:"Checks whether the file parses, returning any errors but not the AST" + |> flag "--debug" no_arg ~doc:"" (* undocumented *) + |> flag + "--pattern" + no_arg + ~doc:"Prints the AST structurally without locations to be used in pattern matching" + |> flag + "--type" + (enum [("js", File_js); ("json", File_json)]) + ~doc:"Type of input file (js or json)" + |> flag "--strict" no_arg ~doc:"Parse in strict mode" + |> CommandUtils.from_flag + |> CommandUtils.path_flag + |> anon "file" (optional string)); + } type ast_result_type = | Ast_json of (Loc.t, Loc.t) Ast.Expression.t @@ -52,107 +55,133 @@ type ast_result_type = let get_file path = function | Some filename -> File_input.FileName (CommandUtils.expand_path filename) - | None -> - File_input.FileContent (path, Sys_utils.read_stdin_to_string ()) + | None -> File_input.FileContent (path, Sys_utils.read_stdin_to_string ()) -module Translate = Estree_translator.Translate (Json_of_estree) (struct - (* TODO: make these configurable via CLI flags *) - let include_comments = true - let include_locs = true -end) +module Translate = + Estree_translator.Translate + (Json_of_estree) + (struct + (* TODO: make these configurable via CLI flags *) + let include_interned_comments = false + + let include_comments = true + + let include_locs = true + end) module Token_translator = Token_translator.Translate (Json_of_estree) -let main include_tokens pretty check debug file_type_opt use_strict from path filename () = - FlowEventLogger.set_from from; +let pp_underscore_loc fmt _ = Format.pp_print_string fmt "_" + +let main include_tokens pretty check debug pattern file_type_opt use_strict path filename () = let use_relative_path = Option.value_map filename ~default:false ~f:Filename.is_relative in let file = get_file path filename in let content = File_input.content_of_file_input_unsafe file in - let file_type = match file_type_opt with | Some t -> t | None -> - begin match filename with - | Some fn -> if Files.is_json_file fn then File_json else File_js - | None -> File_js + begin + match filename with + | Some fn -> + if Files.is_json_file fn then + File_json + else + File_js + | None -> File_js end in - - (** + (* * Record token stream into a list when the --tokens flag is passed. * Note that tokens stream in in order, so the list is constructed in reverse * order. *) let tokens = ref [] in + let offset_table = lazy (Offset_utils.make content) in let token_sink = - if not include_tokens then None else (Some(fun token_data -> - tokens := (Token_translator.token token_data)::!tokens - )) in - - let open Hh_json in - let results = - try - (* Make the parser as permissive as possible. + if not include_tokens then + None + else + Some + (fun token_data -> + tokens := Token_translator.token (Lazy.force offset_table) token_data :: !tokens) + in + Hh_json.( + let results = + try + (* Make the parser as permissive as possible. TODO: make these CLI flags *) - let parse_options = Some Parser_env.({ - esproposal_class_instance_fields = true; - esproposal_class_static_fields = true; - esproposal_decorators = true; - esproposal_export_star_as = true; - esproposal_optional_chaining = true; - esproposal_nullish_coalescing = true; - types = true; - use_strict; - }) in - - let filename = File_input.path_of_file_input file in - let filename = if use_relative_path - then Option.map filename ~f:(Files.relative_path (Sys.getcwd ())) - else filename - in - let (ast, errors) = - match file_type with - | File_js -> - let filekey = Option.map filename ~f:(fun s -> File_key.SourceFile s) in - let (ocaml_ast, errors) = - Parser_flow.program_file ~fail:false ~parse_options ~token_sink content filekey - in - if debug then begin - Ast.pp_program Loc.pp Loc.pp Format.err_formatter ocaml_ast; - Printf.eprintf "\n%!" - end; - Ast_js ocaml_ast, errors - | File_json -> - let filekey = Option.map filename ~f:(fun s -> File_key.JsonFile s) in - let (ocaml_ast, errors) = - Parser_flow.json_file ~fail:false ~parse_options ~token_sink content filekey - in - if debug then begin - Ast.Expression.pp Loc.pp Loc.pp Format.err_formatter ocaml_ast; - Printf.eprintf "\n%!" - end; - Ast_json ocaml_ast, errors - in - if check then - JSON_Object [ - "errors", Translate.errors errors; - "tokens", JSON_Array (List.rev !tokens); - ] - else - let translated_ast = match ast with - | Ast_js ast -> Translate.program ast - | Ast_json ast -> Translate.expression ast + let parse_options = + Some + Parser_env. + { + enums = true; + esproposal_class_instance_fields = true; + esproposal_class_static_fields = true; + esproposal_decorators = true; + esproposal_export_star_as = true; + esproposal_optional_chaining = true; + esproposal_nullish_coalescing = true; + types = true; + use_strict; + } + in + let filename = File_input.path_of_file_input file in + let filename = + if use_relative_path then + Option.map filename ~f:(Files.relative_path (Sys.getcwd ())) + else + filename in - match translated_ast with - | JSON_Object params -> + let (ast, errors) = + match file_type with + | File_js -> + let filekey = Option.map filename ~f:(fun s -> File_key.SourceFile s) in + let (ocaml_ast, errors) = + Parser_flow.program_file ~fail:false ~parse_options ~token_sink content filekey + in + if debug then ( + Ast.pp_program Loc.pp Loc.pp Format.err_formatter ocaml_ast; + Printf.eprintf "\n%!" + ); + if pattern then ( + Ast.pp_program pp_underscore_loc pp_underscore_loc Format.err_formatter ocaml_ast; + Printf.eprintf "\n%!" + ); + (Ast_js ocaml_ast, errors) + | File_json -> + let filekey = Option.map filename ~f:(fun s -> File_key.JsonFile s) in + let (ocaml_ast, errors) = + Parser_flow.json_file ~fail:false ~parse_options ~token_sink content filekey + in + if debug then ( + Ast.Expression.pp Loc.pp Loc.pp Format.err_formatter ocaml_ast; + Printf.eprintf "\n%!" + ); + if pattern then ( + Ast.Expression.pp pp_underscore_loc pp_underscore_loc Format.err_formatter ocaml_ast; + Printf.eprintf "\n%!" + ); + (Ast_json ocaml_ast, errors) + in + if check then + JSON_Object + [("errors", Translate.errors errors); ("tokens", JSON_Array (List.rev !tokens))] + else + let offset_table = Some (Offset_utils.make content) in + let translated_ast = + match ast with + | Ast_js ast -> Translate.program offset_table ast + | Ast_json ast -> Translate.expression offset_table ast + in + match translated_ast with + | JSON_Object params -> let errors_prop = ("errors", Translate.errors errors) in let tokens_prop = ("tokens", JSON_Array (List.rev !tokens)) in - JSON_Object (errors_prop::tokens_prop::params) - | _ -> assert false - with Parse_error.Error l -> - JSON_Object ["errors", Translate.errors l] - in - print_json_endline ~pretty results + JSON_Object (errors_prop :: tokens_prop :: params) + | _ -> assert false + with Parse_error.Error l -> JSON_Object [("errors", Translate.errors l)] + in + print_json_endline ~pretty results) let command = CommandSpec.command spec main diff --git a/src/commands/autocompleteCommand.ml b/src/commands/autocompleteCommand.ml index 06004de1aae..30a370e0b1f 100644 --- a/src/commands/autocompleteCommand.ml +++ b/src/commands/autocompleteCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,92 +12,93 @@ open CommandUtils open Utils_js -let spec = { - CommandSpec. - name = "autocomplete"; - doc = "Queries autocompletion information"; - usage = Printf.sprintf - "Usage: %s autocomplete [OPTION] [FILE] [LINE COLUMN]...\n\n\ - Queries autocompletion information.\n\n\ - If line and column is specified, then the magic autocomplete token is\n\ - automatically inserted at the specified position.\n\n\ - Example usage:\n\ - \t%s autocomplete < foo.js\n\ - \t%s autocomplete path/to/foo.js < foo.js +let spec = + { + CommandSpec.name = "autocomplete"; + doc = "Queries autocompletion information"; + usage = + Printf.sprintf + "Usage: %s autocomplete [OPTION] [FILE] [LINE COLUMN]...\n\nQueries autocompletion information.\n\nIf line and column is specified, then the magic autocomplete token is\nautomatically inserted at the specified position.\n\nExample usage:\n\t%s autocomplete < foo.js\n\t%s autocomplete path/to/foo.js < foo.js \t%s autocomplete 12 35 < foo.js\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> strip_root_flag - |> from_flag - |> anon "args" (optional (list_of string)) - ) -} + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> from_flag + |> wait_for_recheck_flag + |> anon "args" (optional (list_of string))); + } let parse_args = function | None | Some [] -> - File_input.FileContent (None, - Sys_utils.read_stdin_to_string ()) + File_input.FileContent (None, Sys_utils.read_stdin_to_string ()) | Some [filename] -> - let filename = get_path_of_file filename in - File_input.FileContent (Some filename, - Sys_utils.read_stdin_to_string ()) + let filename = get_path_of_file filename in + File_input.FileContent (Some filename, Sys_utils.read_stdin_to_string ()) | Some [line; column] -> - let line = int_of_string line in - let column = int_of_string column in - let contents = Sys_utils.read_stdin_to_string () in - let (line, column) = convert_input_pos (line, column) in - File_input.FileContent (None, - AutocompleteService_js.add_autocomplete_token contents line column) + let line = int_of_string line in + let column = int_of_string column in + let contents = Sys_utils.read_stdin_to_string () in + let (line, column) = convert_input_pos (line, column) in + File_input.FileContent + (None, AutocompleteService_js.add_autocomplete_token contents line column) | Some [filename; line; column] -> - let line = int_of_string line in - let column = int_of_string column in - let contents = Sys_utils.read_stdin_to_string () in - let filename = get_path_of_file filename in - let (line, column) = convert_input_pos (line, column) in - File_input.FileContent (Some filename, - AutocompleteService_js.add_autocomplete_token contents line column) + let line = int_of_string line in + let column = int_of_string column in + let contents = Sys_utils.read_stdin_to_string () in + let filename = get_path_of_file filename in + let (line, column) = convert_input_pos (line, column) in + File_input.FileContent + (Some filename, AutocompleteService_js.add_autocomplete_token contents line column) | _ -> - CommandSpec.usage spec; - FlowExitStatus.(exit Commandline_usage_error) + CommandSpec.usage spec; + FlowExitStatus.(exit Commandline_usage_error) -let main base_flags option_values json pretty root strip_root from args () = - FlowEventLogger.set_from from; +let main base_flags option_values json pretty root strip_root wait_for_recheck args () = let file = parse_args args in let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> File_input.path_of_file_input file - ) in - let strip_root = if strip_root then Some root else None in - let request = ServerProt.Request.AUTOCOMPLETE file in - let results = match connect_and_make_request flowconfig_name option_values root request with - | ServerProt.Response.AUTOCOMPLETE response -> response - | response -> failwith_bad_response ~request ~response + let root = + guess_root + flowconfig_name + (match root with + | Some root -> Some root + | None -> File_input.path_of_file_input file) in - if json || pretty - then ( + let strip_root = + if strip_root then + Some root + else + None + in + let request = + ServerProt.Request.AUTOCOMPLETE { input = file; wait_for_recheck; trigger_character = None } + in + let results = + match connect_and_make_request flowconfig_name option_values root request with + | ServerProt.Response.AUTOCOMPLETE response -> response + | response -> failwith_bad_response ~request ~response + in + if json || pretty then results - |> AutocompleteService_js.autocomplete_response_to_json ~strip_root - |> Hh_json.print_json_endline ~pretty - ) else ( + |> AutocompleteService_js.autocomplete_response_to_json ~strip_root + |> Hh_json.print_json_endline ~pretty + else match results with - | Error error -> - prerr_endlinef "Error: %s" error + | Error error -> prerr_endlinef "Error: %s" error | Ok completions -> - List.iter (fun res -> - let name = res.ServerProt.Response.res_name in - let ty = res.ServerProt.Response.res_ty in - print_endline (Printf.sprintf "%s %s" name ty) - ) completions - ) + List.iter + (fun res -> + let name = res.ServerProt.Response.res_name in + let (_ty_loc, ty) = res.ServerProt.Response.res_ty in + print_endline (Printf.sprintf "%s %s" name ty)) + completions let command = CommandSpec.command spec main diff --git a/src/commands/autofixCommand.ml b/src/commands/autofixCommand.ml new file mode 100644 index 00000000000..5ddf5b7ef18 --- /dev/null +++ b/src/commands/autofixCommand.ml @@ -0,0 +1,274 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open CommandUtils +open CommandSpec + +(* This module implements the flow command `autofix insert-type` which inserts + a type annotation in a file at a position. *) +module InsertType = struct + let spec = + Autofix_options.( + let ambiguity_strategies_list = String.concat ", " @@ List.map fst ambiguity_strategies in + { + name = "insert type"; + doc = "[EXPERIMENTAL] Insert type information at file and position"; + usage = + Printf.sprintf + "Usage: %s autofix insert-type [OPTION]... [FILE] LINE COLUMN [END_LINE] [END_COLUMN]\n\ne.g. %s autofix insert-type foo.js 12 3\nor %s autofix insert-type 12 3 < foo.js\n" + exe_name + exe_name + exe_name; + args = + ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> verbose_flags + |> from_flag + |> path_flag + |> wait_for_recheck_flag + |> flag + "--strict-location" + no_arg + ~doc:"Restrict the number of valid positions for each annotation" + |> flag + "--strategy" + (required ~default:Generalize (enum ambiguity_strategies)) + ~doc: + ( "Set how to resolve ambiguity in possible types (" + ^ ambiguity_strategies_list + ^ ")" ) + |> flag + "--in-place" + no_arg + ~doc:"Overwrite the input file or file specified by the path flag" + |> flag "--expand-type-aliases" no_arg ~doc:"Replace type aliases with their bodies" + |> flag + "--omit-typearg-defaults" + no_arg + ~doc: + "Omit type arguments when defaults exist and match the provided type argument" + |> anon "args" (required (list_of string))); + }) + + let handle_error ?(code = FlowExitStatus.Unknown_error) msg = FlowExitStatus.(exit ~msg code) + + let rec parse_args args : Loc.t = + let parse_pos line col : Loc.position = + let (line, column) = + try convert_input_pos (int_of_string line, int_of_string col) + with Failure _ -> handle_error "flow autofix insert-type: failed to parse position" + in + Loc.{ line; column } + in + match args with + | [start_line; start_col; end_line; end_col] -> + let start = parse_pos start_line start_col in + let _end = parse_pos end_line end_col in + Loc.{ source = None; start; _end } + | [start_line; start_col] -> + let start = parse_pos start_line start_col in + Loc.{ source = None; start; _end = start } + | file :: (([_; _] | [_; _; _; _]) as loc) -> + let loc = parse_args loc in + Loc.{ loc with source = Some (File_key.SourceFile (expand_path file)) } + | [] -> handle_error "flow autofix insert-type: No position given" + | _ -> handle_error "flow autofix insert-type: Invalid position given" + + let select_output_channel in_place path source_path = + match (in_place, path, source_path) with + | (false, _, _) -> stdout + | (true, Some p, _) + | (true, None, Some p) -> + begin + try open_out p + with _ -> + handle_error ~code:FlowExitStatus.Path_is_not_a_file + @@ Printf.sprintf "failed to open output file: %s" p + end + | (true, None, None) -> + handle_error "Flow: --in-place flag used without input file or explicit path" + + let handle_ok patch input in_place path source_path = + match File_input.content_of_file_input input with + | Ok content -> + let out = select_output_channel in_place path source_path in + output_string out @@ Replacement_printer.print patch content; + close_out out + | Error msg -> handle_error msg + + let main + base_flags + option_values + json + _pretty + root_arg + strip_root_arg + verbose + path + wait_for_recheck + location_is_strict + ambiguity_strategy + in_place + expand_aliases + omit_targ_defaults + args + () = + let (Loc.{ source; _ } as target) = parse_args args in + let source_path = Option.map ~f:File_key.to_string source in + let input = get_file_from_filename_or_stdin ~cmd:spec.name path source_path in + let root = get_the_root ~base_flags ~input root_arg in + (* TODO Figure out how to implement root striping *) + let _strip_root = + if strip_root_arg then + Some root + else + None + in + let flowconfig_name = base_flags.Base_flags.flowconfig_name in + if (not json) && verbose <> None then + prerr_endline "NOTE: --verbose writes to the server log file"; + let request = + ServerProt.Request.INSERT_TYPE + { + input; + target; + verbose; + location_is_strict; + ambiguity_strategy; + wait_for_recheck; + expand_aliases; + omit_targ_defaults; + } + in + let result = connect_and_make_request flowconfig_name option_values root request in + match result with + | ServerProt.Response.INSERT_TYPE (Error err) -> handle_error err + (* TODO implement a more useful set of error conditions *) + | ServerProt.Response.INSERT_TYPE (Ok resp) -> handle_ok resp input in_place path source_path + | _ -> handle_error "Flow: invalid server response" + + let command = CommandSpec.command spec main +end + +module Exports = struct + let spec = + { + name = "exports"; + doc = "[EXPERIMENTAL] automatically fix signature verification errors"; + usage = Printf.sprintf "Usage: %s autofix exports [OPTION]... [FILE]\n" exe_name; + args = + ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> verbose_flags + |> from_flag + |> path_flag + |> wait_for_recheck_flag + |> flag + "--in-place" + no_arg + ~doc:"Overwrite the input file or file specified by the path flag" + |> flag "--force" no_arg ~doc:"Write the results even if errors are encountered" + |> anon "file" (required string)); + } + + let handle_error ?(code = FlowExitStatus.Unknown_error) msg = FlowExitStatus.(exit ~msg code) + + let select_output_channel in_place path source_path = + match (in_place, path, source_path) with + | (false, _, _) -> stdout + | (true, Some p, _) + | (true, None, p) -> + begin + try open_out p + with _ -> + handle_error ~code:FlowExitStatus.Path_is_not_a_file + @@ Printf.sprintf "failed to open output file: %s" p + end + + let avg_error_size = 100 + + let append_errors errors = + Buffer.( + let buff = create (avg_error_size * List.length errors) in + List.fold_left (fun () -> add_string buff) () errors; + contents buff) + + let handle_ok patch errors input in_place forced path source_path = + let write_patch content = + let out = select_output_channel in_place path source_path in + output_string out @@ Replacement_printer.print patch content; + close_out out + in + match (File_input.content_of_file_input input, errors, forced) with + | (Ok content, [], _) + | (Ok content, _, true) -> + output_string stderr (append_errors errors); + write_patch content + | (Ok _, errors, false) -> handle_error (append_errors errors) + | (Error msg, _, _) -> handle_error msg + + let main + base_flags + option_values + json + _pretty + root_arg + _strip_root_arg + verbose + path + wait_for_recheck + in_place + forced + source_path + () = + let source_path = expand_path source_path in + let input = get_file_from_filename_or_stdin ~cmd:spec.name path (Some source_path) in + let root = get_the_root ~base_flags ~input root_arg in + let flowconfig_name = base_flags.Base_flags.flowconfig_name in + if (not json) && verbose <> None then + prerr_endline "NOTE: --verbose writes to the server log file"; + let request = ServerProt.Request.AUTOFIX_EXPORTS { input; verbose; wait_for_recheck } in + let result = connect_and_make_request flowconfig_name option_values root request in + match result with + | ServerProt.Response.AUTOFIX_EXPORTS (Error err) -> handle_error err + | ServerProt.Response.AUTOFIX_EXPORTS (Ok (patch, errors)) -> + handle_ok patch errors input in_place forced path source_path + | _ -> handle_error "Flow: invalid server response" + + let command = CommandSpec.command spec main +end + +let command = + let main (cmd, argv) () = CommandUtils.run_command cmd argv in + let spec = + { + CommandSpec.name = "autofix"; + doc = ""; + usage = Printf.sprintf "Usage: %s autofix SUBCOMMAND [OPTIONS]...\n" CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> anon + "subcommand" + (required + (command + [ + ("suggest", SuggestCommand.command); + ("insert-type", InsertType.command); + ("exports", Exports.command); + ]))); + } + in + CommandSpec.command spec main diff --git a/src/commands/batchCoverageCommand.ml b/src/commands/batchCoverageCommand.ml new file mode 100644 index 00000000000..0d55f6bafc9 --- /dev/null +++ b/src/commands/batchCoverageCommand.ml @@ -0,0 +1,217 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open CommandUtils +open Utils_js + +let spec = + { + CommandSpec.name = "batch-coverage"; + doc = "Shows aggregate coverage information for a group of files or directories "; + usage = + Printf.sprintf + "Usage: %s batch-coverage [OPTION]... [FILE...] \n\ne.g. %s batch-coverage foo.js bar.js baz.js dirname1 dirname2 --show-all \nor %s batch-coverage --input-file filenames.txt\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_flags_no_lazy + |> json_flags + |> root_flag + |> strip_root_flag + |> wait_for_recheck_flag + |> flag + "--input-file" + string + ~doc: + "File containing list of files or directories to compute coverage for, one per line. If -, the list is read from standard input." + |> flag + "--show-all" + no_arg + ~doc: + "Whether to output the coverage for all files. If not specified, this command will only print coverage for 50 files. " + |> flag + "--show-trust" + no_arg + ~doc:"EXPERIMENTAL: Whether to include trust information in output" + |> anon "FILE..." (list_of string)); + } + +let output_results ~root ~strip_root ~json ~pretty ~show_all ~trust stats = + let strip_root = + if strip_root then + Some root + else + None + in + let percent top bottom = + if bottom = 0 then + 0. + else + float_of_int top /. float_of_int bottom *. 100. + in + (* Compute aggregate stats *) + let (trusted, untrusted, any, empty) = + Core_list.fold_left + ~f: + (fun (acc_trust, acc_untrust, acc_any, acc_empty) + (_, { Coverage_response.untainted; tainted; empty; uncovered }) -> + (acc_trust + untainted, acc_untrust + tainted, acc_any + uncovered, acc_empty + empty)) + stats + ~init:(0, 0, 0, 0) + in + let num_files_in_dir = Core_list.length stats in + let covered = trusted + untrusted in + let total = covered + any + empty in + let trusted_percentage = percent trusted total in + let percentage = percent covered total in + let file_stats (file_key, { Coverage_response.untainted; tainted; empty; uncovered }) = + let covered = untainted + tainted in + let total = covered + uncovered + empty in + let percentage = percent covered total in + let trusted_percentage = + if trust then + Some (percent untainted total) + else + None + in + let file = Reason.string_of_source ~strip_root file_key in + let untainted = + if trust then + Some untainted + else + None + in + (file, untainted, covered, total, trusted_percentage, percentage) + in + if json then + Hh_json.( + let file_to_json stats = + let (file, trusted, covered, total, trusted_percentage, percentage) = file_stats stats in + let percentage = [("percentage", JSON_Number (spf "%0.2f" percentage))] in + let percentage = + match trusted_percentage with + | Some p -> ("trusted_percentage", JSON_Number (spf "%0.2f" p)) :: percentage + | None -> percentage + in + let covered = [("covered", int_ covered)] in + let covered = + match trusted with + | Some t -> ("trusted", int_ t) :: covered + | None -> covered + in + JSON_Object ([("file", string_ file)] @ percentage @ covered @ [("total", int_ total)]) + in + let array_ elts = JSON_Array elts in + let file_list = + Core_list.sort ~cmp:(fun (a, _) (b, _) -> Pervasives.compare a b) stats + |> Core_list.map ~f:file_to_json + in + let covered_expressions = [("covered_expressions", int_ covered)] in + let covered_expressions = + if trust then + ("trusted_expressions", int_ trusted) :: covered_expressions + else + covered_expressions + in + let percentage = [("percentage", JSON_Number (spf "%0.2f" percentage))] in + let percentage = + if trust then + ("trusted_percentage", JSON_Number (spf "%0.2f" trusted_percentage)) :: percentage + else + percentage + in + let json_output = + JSON_Object + [ + ("files", array_ file_list); + ( "statistics", + JSON_Object + ( [("files_in_directory", int_ num_files_in_dir)] + @ covered_expressions + @ [("total_expressions", int_ total)] + @ percentage ) ); + ] + in + print_json_endline ~pretty json_output) + else + let (truncation_text, truncated_stats) = + if num_files_in_dir > 50 && not show_all then + ( spf + "\nOnly showing coverage for 50 of %d files. To show more, rerun with --show-all.\n" + num_files_in_dir, + Core_list.take stats 50 ) + else + ("", stats) + in + if num_files_in_dir > 0 then ( + print_endlinef "\nCoverage results from %d file(s):\n" num_files_in_dir; + Core_list.iter + ~f:(fun fstats -> + let (file, trusted, covered, total, trusted_percentage, percentage) = + file_stats fstats + in + match (trusted_percentage, trusted) with + | (Some p, Some t) -> + print_endlinef + "%s: %0.2f%% trusted (%d of %d expressions), %0.2f%% covered (%d of %d expressions)" + file + p + t + total + percentage + covered + total + | _ -> print_endlinef "%s: %0.2f%% (%d of %d expressions)" file percentage covered total) + truncated_stats; + print_endline truncation_text + ); + + print_endlinef "-----------------------------------"; + print_endlinef "Aggregate coverage statistics"; + print_endlinef "-----------------------------------"; + print_endlinef "Files : %d" num_files_in_dir; + print_endlinef "Expressions :"; + if trust then print_endlinef " Trusted : %d" trusted; + print_endlinef " Covered : %d" covered; + print_endlinef " Total : %d" total; + if trust then print_endlinef " Trust Percentage : %0.2f%%" trusted_percentage; + print_endlinef " Covered Percentage : %0.2f%%" percentage; + print_endlinef "" + +let main + base_flags + option_values + json + pretty + root + strip_root + wait_for_recheck + input + show_all + trust + files + () = + let flowconfig_name = base_flags.Base_flags.flowconfig_name in + let batch = + get_filenames_from_input input files |> Core_list.map ~f:(Path.make %> Path.to_string) + in + let input = Option.map (Core_list.hd batch) (fun x -> File_input.FileName x) in + let root = get_the_root ~base_flags ?input root in + (* pretty implies json *) + let json = json || pretty in + let request = ServerProt.Request.BATCH_COVERAGE { batch; wait_for_recheck; trust } in + match connect_and_make_request flowconfig_name option_values root request with + | ServerProt.Response.BATCH_COVERAGE (Error msg) -> FlowExitStatus.(exit ~msg Unknown_error) + | ServerProt.Response.BATCH_COVERAGE (Ok resp) -> + output_results ~root ~strip_root ~json ~pretty ~show_all ~trust resp + | response -> failwith_bad_response ~request ~response + +let command = CommandSpec.command spec main diff --git a/src/commands/checkCommands.ml b/src/commands/checkCommands.ml index 5b7cc798dc4..421e5f58f1e 100644 --- a/src/commands/checkCommands.ml +++ b/src/commands/checkCommands.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,169 +9,222 @@ open CommandUtils open Utils_js type printer = - | Json of { pretty: bool; version: Errors.Json_output.json_version option } + | Json of { + pretty: bool; + version: Errors.Json_output.json_version option; + } | Cli of Errors.Cli_output.error_flags (* helper - print errors. used in check-and-die runs *) -let print_errors ~printer ~profiling ~suppressed_errors options ~errors ~warnings = - let strip_root = - if Options.should_strip_root options - then Some (Options.root options) - else None +let format_errors ~printer ~client_include_warnings options (errors, warnings, suppressed_errors) = + let include_warnings = client_include_warnings || Options.should_include_warnings options in + let warnings = + if include_warnings then + warnings + else + Errors.ConcreteLocPrintableErrorSet.empty in - - match printer with - | Json { pretty; version } -> - let profiling = - if options.Options.opt_profile - then Some profiling - else None in - Errors.Json_output.print_errors - ~out_channel:stdout - ~strip_root - ~profiling - ~pretty - ?version - ~suppressed_errors - ~errors - ~warnings - () - | Cli flags -> - let errors = List.fold_left - (fun acc (error, _) -> Errors.ErrorSet.add error acc) - errors + let suppressed_errors = + if Options.include_suppressions options then suppressed_errors - in - Errors.Cli_output.print_errors - ~out_channel:stdout - ~flags - ~strip_root - ~errors - ~warnings - ~lazy_msg:None - () + else + [] + in + let strip_root = + if Options.should_strip_root options then + Some (Options.root options) + else + None + in + (* The print_errors functions in the Errors modules are carefully defined to + * perform any expensive, non-printing work when partially applied, before + * receiving the `profiling` argument. + * + * We use this trick in order to actually profile this work. So, the + * annotation on the `print_errors` binding below serves to ensure that the + * error functions are applied enough that this expensive work happens. *) + let print_errors : Profiling_js.finished option -> unit = + match printer with + | Json { pretty; version } -> + let finish_formatting = + Errors.Json_output.format_errors + ~out_channel:stdout + ~strip_root + ~pretty + ?version + ~suppressed_errors + ~errors + ~warnings + () + in + fun profiling -> + let profiling_props = + Option.value_map profiling ~default:[] ~f:Profiling_js.to_legacy_json_properties + in + finish_formatting ~profiling_props + | Cli flags -> + let errors = + List.fold_left + (fun acc (error, _) -> Errors.ConcreteLocPrintableErrorSet.add error acc) + errors + suppressed_errors + in + let () = + Errors.Cli_output.format_errors + ~out_channel:stdout + ~flags + ~strip_root + ~errors + ~warnings + ~lazy_msg:None + () + in + (fun _profiling -> ()) + in + fun profiling -> + if options.Options.opt_profile then + print_errors (Some profiling) + else + print_errors None module CheckCommand = struct - let spec = { CommandSpec. - name = "check"; - doc = "Does a full Flow check and prints the results"; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> error_flags - |> flag "--include-suppressed" no_arg - ~doc:"Ignore any `suppress_comment` lines in .flowconfig" - |> options_and_json_flags - |> json_version_flag - |> shm_flags - |> ignore_version_flag - |> from_flag - |> anon "root" (optional string) - ); - usage = Printf.sprintf - "Usage: %s check [OPTION]... [ROOT]\n\n\ - Does a full Flow check and prints the results.\n\n\ - Flow will search upward for a .flowconfig file, beginning at ROOT.\n\ - ROOT is assumed to be the current directory if unspecified.\n" - exe_name; - } + let spec = + { + CommandSpec.name = "check"; + doc = "Does a full Flow check and prints the results"; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> error_flags + |> options_and_json_flags + |> json_version_flag + |> shm_flags + |> ignore_version_flag + |> from_flag + |> no_cgroup_flag + |> anon "root" (optional string)); + usage = + Printf.sprintf + "Usage: %s check [OPTION]... [ROOT]\n\nDoes a full Flow check and prints the results.\n\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be the current directory if unspecified.\n" + exe_name; + } let main - base_flags error_flags include_suppressed options_flags json pretty json_version - shm_flags ignore_version from path_opt + base_flags + error_flags + options_flags + json + pretty + json_version + shm_flags + ignore_version + path_opt () = - let flowconfig_name = base_flags.Base_flags.flowconfig_name in let root = CommandUtils.guess_root flowconfig_name path_opt in - let flowconfig = FlowConfig.get (Server_files_js.config_file flowconfig_name root) in - let options = make_options ~flowconfig_name ~flowconfig ~lazy_mode:None ~root options_flags in - - if Options.should_profile options && not Sys.win32 - then begin + let flowconfig = + let flowconfig_path = Server_files_js.config_file flowconfig_name root in + read_config_or_exit ~enforce_warnings:(not ignore_version) flowconfig_path + in + let options = + let lazy_mode = Some Options.NON_LAZY_MODE in + make_options ~flowconfig_name ~flowconfig ~lazy_mode ~root options_flags + in + if Options.should_profile options && not Sys.win32 then ( Flow_server_profile.init (); let rec sample_processor_info () = Flow_server_profile.processor_sample (); Timer.set_timer ~interval:1.0 ~callback:sample_processor_info |> ignore in - sample_processor_info (); - end; + sample_processor_info () + ); (* initialize loggers before doing too much, especially anything that might exit *) - LoggingUtils.init_loggers ~from ~options ~min_level:Hh_logger.Level.Error (); + LoggingUtils.init_loggers ~options ~min_level:Hh_logger.Level.Error (); if not ignore_version then assert_version flowconfig; let shared_mem_config = shm_config shm_flags flowconfig in - - let client_include_warnings = error_flags.Errors.Cli_output.include_warnings in - - let profiling, errors, warnings, suppressed_errors = Server.check_once - ~shared_mem_config ~client_include_warnings options in - let suppressed_errors = - if include_suppressed then suppressed_errors else [] in - let printer = - if json || Option.is_some json_version || pretty then - Json { pretty; version = json_version } - else - Cli error_flags in - print_errors ~printer ~profiling ~suppressed_errors options ~errors ~warnings; + let format_errors = + let client_include_warnings = error_flags.Errors.Cli_output.include_warnings in + let printer = + if json || Option.is_some json_version || pretty then + Json { pretty; version = json_version } + else + Cli error_flags + in + format_errors ~printer ~client_include_warnings options + in + let (errors, warnings) = Server.check_once options ~shared_mem_config ~format_errors in Flow_server_profile.print_url (); - FlowExitStatus.exit (get_check_or_status_exit_code errors warnings error_flags.Errors.Cli_output.max_warnings) + FlowExitStatus.exit + (get_check_or_status_exit_code errors warnings error_flags.Errors.Cli_output.max_warnings) let command = CommandSpec.command spec main end module FocusCheckCommand = struct - let spec = { CommandSpec. - name = "focus-check"; - doc = "EXPERIMENTAL: " ^ - "Does a focused Flow check on a file (and its dependents and their dependencies) " ^ - "and prints the results"; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> error_flags - |> flag "--include-suppressed" no_arg - ~doc:"Ignore any `suppress_comment` lines in .flowconfig" - |> options_and_json_flags - |> json_version_flag - |> shm_flags - |> ignore_version_flag - |> from_flag - |> root_flag - |> input_file_flag "check" - |> anon "root" (list_of string) - ); - usage = Printf.sprintf - "Usage: %s focus-check [OPTION]... [FILES/DIRS]\n\n\ - EXPERIMENTAL: Does a focused Flow check on the input files/directories (and each of their \ - dependents and dependencies) and prints the results.\n\n\ - If --root is not specified, Flow will search upward for a .flowconfig file from the first \ - file or dir in FILES/DIR.\n\ - If --root is not specified and FILES/DIR is omitted, a focus check is ran on the current \ - directory.\n" - exe_name; - } + let spec = + { + CommandSpec.name = "focus-check"; + doc = + "EXPERIMENTAL: " + ^ "Does a focused Flow check on a file (and its dependents and their dependencies) " + ^ "and prints the results"; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> error_flags + |> options_and_json_flags + |> json_version_flag + |> shm_flags + |> ignore_version_flag + |> from_flag + |> root_flag + |> input_file_flag "check" + |> no_cgroup_flag + |> anon "root" (list_of string)); + usage = + Printf.sprintf + "Usage: %s focus-check [OPTION]... [FILES/DIRS]\n\nEXPERIMENTAL: Does a focused Flow check on the input files/directories (and each of their dependents and dependencies) and prints the results.\n\nIf --root is not specified, Flow will search upward for a .flowconfig file from the first file or dir in FILES/DIR.\nIf --root is not specified and FILES/DIR is omitted, a focus check is ran on the current directory.\n" + exe_name; + } let main - base_flags error_flags include_suppressed options_flags json pretty json_version - shm_flags ignore_version from root input_file filenames + base_flags + error_flags + options_flags + json + pretty + json_version + shm_flags + ignore_version + root + input_file + filenames () = - let filenames = get_filenames_from_input input_file filenames in - let flowconfig_name = base_flags.Base_flags.flowconfig_name in (* If --root is explicitly set, then use that as the root. Otherwise, use the first file *) - let root = CommandUtils.guess_root flowconfig_name ( - if root <> None - then root - else match filenames with [] -> None | x::_ -> Some x - ) in - let flowconfig = FlowConfig.get (Server_files_js.config_file flowconfig_name root) in - let options = make_options ~flowconfig_name ~flowconfig ~lazy_mode:None ~root options_flags in - + let root = + CommandUtils.guess_root + flowconfig_name + ( if root <> None then + root + else + match filenames with + | [] -> None + | x :: _ -> Some x ) + in + let flowconfig = read_config_or_exit (Server_files_js.config_file flowconfig_name root) in + let options = + let lazy_mode = Some Options.NON_LAZY_MODE in + make_options ~flowconfig_name ~flowconfig ~lazy_mode ~root options_flags + in (* initialize loggers before doing too much, especially anything that might exit *) - LoggingUtils.init_loggers ~from ~options (); + LoggingUtils.init_loggers ~options (); (* do this after loggers are initialized, so we can complain properly *) let file_options = Options.file_options options in @@ -181,25 +234,27 @@ module FocusCheckCommand = struct if not ignore_version then assert_version flowconfig; let shared_mem_config = shm_config shm_flags flowconfig in - - let client_include_warnings = error_flags.Errors.Cli_output.include_warnings in - - let focus_targets = SSet.fold - (fun file acc -> FilenameSet.add (File_key.SourceFile Path.(to_string (make file))) acc) - filenames - FilenameSet.empty in - - let profiling, errors, warnings, suppressed_errors = Server.check_once - ~shared_mem_config ~client_include_warnings ~focus_targets options in - let suppressed_errors = - if include_suppressed then suppressed_errors else [] in - let printer = - if json || Option.is_some json_version || pretty then - Json { pretty; version = json_version } - else - Cli error_flags + let focus_targets = + SSet.fold + (fun file acc -> FilenameSet.add (File_key.SourceFile Path.(to_string (make file))) acc) + filenames + FilenameSet.empty in - print_errors ~printer ~profiling ~suppressed_errors options ~errors ~warnings; - FlowExitStatus.exit (get_check_or_status_exit_code errors warnings error_flags.Errors.Cli_output.max_warnings) + let format_errors = + let client_include_warnings = error_flags.Errors.Cli_output.include_warnings in + let printer = + if json || Option.is_some json_version || pretty then + Json { pretty; version = json_version } + else + Cli error_flags + in + format_errors ~printer ~client_include_warnings options + in + let (errors, warnings) = + Server.check_once options ~shared_mem_config ~focus_targets ~format_errors + in + FlowExitStatus.exit + (get_check_or_status_exit_code errors warnings error_flags.Errors.Cli_output.max_warnings) + let command = CommandSpec.command spec main end diff --git a/src/commands/checkContentsCommand.ml b/src/commands/checkContentsCommand.ml index 4850fa035cf..ec9fd37e1f3 100644 --- a/src/commands/checkContentsCommand.ml +++ b/src/commands/checkContentsCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,55 +11,64 @@ open CommandUtils -let spec = { - CommandSpec. - name = "check-contents"; - doc = "Run typechecker on contents from stdin"; - usage = Printf.sprintf - "Usage: %s check-contents [OPTION]... [FILE]\n\n\ - Runs a flow check on the contents of stdin. If FILE is provided, then\n\ - check-contents pretends that the contents of stdin come from FILE\n\n\ - e.g. %s check-contents < foo.js\n\ - or %s check-contents foo.js < foo.js\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> json_version_flag - |> root_flag - |> error_flags - |> strip_root_flag - |> verbose_flags - |> from_flag - |> flag "--respect-pragma" no_arg ~doc:"" (* deprecated *) - |> flag "--all" no_arg ~doc:"Ignore absence of an @flow pragma" - |> anon "filename" (optional string) - ) -} +let spec = + { + CommandSpec.name = "check-contents"; + doc = "Run typechecker on contents from stdin"; + usage = + Printf.sprintf + "Usage: %s check-contents [OPTION]... [FILE]\n\nRuns a flow check on the contents of stdin. If FILE is provided, then\ncheck-contents pretends that the contents of stdin come from FILE\n\ne.g. %s check-contents < foo.js\nor %s check-contents foo.js < foo.js\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> json_version_flag + |> root_flag + |> error_flags + |> strip_root_flag + |> verbose_flags + |> from_flag + |> wait_for_recheck_flag + |> flag "--respect-pragma" no_arg ~doc:"" (* deprecated *) + |> flag "--all" no_arg ~doc:"Ignore absence of an @flow pragma" + |> anon "filename" (optional string)); + } -let main base_flags option_values json pretty json_version root error_flags strip_root verbose from - respect_pragma all file () = - FlowEventLogger.set_from from; - let file = get_file_from_filename_or_stdin file - ~cmd:CommandSpec.(spec.name) None in +let main + base_flags + option_values + json + pretty + json_version + root + error_flags + strip_root + verbose + wait_for_recheck + respect_pragma + all + file + () = + let file = get_file_from_filename_or_stdin file ~cmd:CommandSpec.(spec.name) None in let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> File_input.path_of_file_input file - ) in - + let root = + guess_root + flowconfig_name + (match root with + | Some root -> Some root + | None -> File_input.path_of_file_input file) + in (* pretty implies json *) let json = json || Option.is_some json_version || pretty in + if (not option_values.quiet) && verbose <> None then + prerr_endline "NOTE: --verbose writes to the server log file"; - if not option_values.quiet && (verbose <> None) - then prerr_endline "NOTE: --verbose writes to the server log file"; - - if not option_values.quiet && all && respect_pragma then prerr_endline - "Warning: --all and --respect-pragma cannot be used together. --all wins."; + if (not option_values.quiet) && all && respect_pragma then + prerr_endline "Warning: --all and --respect-pragma cannot be used together. --all wins."; (* TODO: --respect-pragma is deprecated. We will soon flip the default. As a transition, --all defaults to enabled. To maintain the current behavior @@ -67,58 +76,78 @@ let main base_flags option_values json pretty json_version root error_flags stri Once we flip the default, --respect-pragma will have no effect and will be removed. *) let all = all || not respect_pragma in - let include_warnings = error_flags.Errors.Cli_output.include_warnings in - - let request = ServerProt.Request.CHECK_FILE (file, verbose, all, include_warnings) in - let response = match connect_and_make_request flowconfig_name option_values root request with - | ServerProt.Response.CHECK_FILE response -> response - | response -> failwith_bad_response ~request ~response + let request = + ServerProt.Request.CHECK_FILE + { input = file; verbose; force = all; include_warnings; wait_for_recheck } in - - let stdin_file = match file with - | File_input.FileContent (None, contents) -> - Some (Path.make_unsafe "-", contents) - | File_input.FileContent (Some path, contents) -> - Some (Path.make path, contents) + let response = + match connect_and_make_request flowconfig_name option_values root request with + | ServerProt.Response.CHECK_FILE response -> response + | response -> failwith_bad_response ~request ~response + in + let stdin_file = + match file with + | File_input.FileContent (None, contents) -> Some (Path.make_unsafe "-", contents) + | File_input.FileContent (Some path, contents) -> Some (Path.make path, contents) | _ -> None in - let strip_root = if strip_root then Some root else None in - let print_json = Errors.Json_output.print_errors - ~out_channel:stdout ~strip_root ~pretty - ?version:json_version - ~stdin_file ~suppressed_errors:([]) in + let strip_root = + if strip_root then + Some root + else + None + in + let print_json = + Errors.Json_output.print_errors + ~out_channel:stdout + ~strip_root + ~pretty + ?version:json_version + ~stdin_file + in match response with - | ServerProt.Response.ERRORS {errors; warnings} -> - if json - then - print_json ~errors ~warnings () - else ( - Errors.Cli_output.print_errors - ~out_channel:stdout - ~flags:error_flags - ~stdin_file - ~strip_root - ~errors - ~warnings - ~lazy_msg:None - (); - (* Return a successful exit code if there were only warnings. *) - let open FlowExitStatus in - exit (get_check_or_status_exit_code errors warnings error_flags.Errors.Cli_output.max_warnings) - ) + | ServerProt.Response.ERRORS { errors; warnings; suppressed_errors } -> + if json then + print_json ~errors ~warnings ~suppressed_errors () + else ( + Errors.Cli_output.print_errors + ~out_channel:stdout + ~flags:error_flags + ~stdin_file + ~strip_root + ~errors + ~warnings + ~lazy_msg:None + (); + + (* Return a successful exit code if there were only warnings. *) + FlowExitStatus.( + exit + (get_check_or_status_exit_code errors warnings error_flags.Errors.Cli_output.max_warnings)) + ) | ServerProt.Response.NO_ERRORS -> - if json then - print_json ~errors:Errors.ErrorSet.empty ~warnings:Errors.ErrorSet.empty () - else Printf.printf "No errors!\n%!"; - FlowExitStatus.(exit No_error) + if json then + print_json + ~errors:Errors.ConcreteLocPrintableErrorSet.empty + ~warnings:Errors.ConcreteLocPrintableErrorSet.empty + ~suppressed_errors:[] + () + else + Printf.printf "No errors!\n%!"; + FlowExitStatus.(exit No_error) | ServerProt.Response.NOT_COVERED -> - if json then - print_json ~errors:Errors.ErrorSet.empty ~warnings:Errors.ErrorSet.empty () - else Printf.printf "File is not @flow!\n%!"; - FlowExitStatus.(exit No_error) + if json then + print_json + ~errors:Errors.ConcreteLocPrintableErrorSet.empty + ~warnings:Errors.ConcreteLocPrintableErrorSet.empty + ~suppressed_errors:[] + () + else + Printf.printf "File is not @flow!\n%!"; + FlowExitStatus.(exit No_error) | _ -> - let msg = "Unexpected server response!" in - FlowExitStatus.(exit ~msg Unknown_error) + let msg = "Unexpected server response!" in + FlowExitStatus.(exit ~msg Unknown_error) let command = CommandSpec.command spec main diff --git a/src/commands/commandConnect.ml b/src/commands/commandConnect.ml index edda3c103af..aa0c8eb7f36 100644 --- a/src/commands/commandConnect.ml +++ b/src/commands/commandConnect.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,35 +8,40 @@ module CCS = CommandConnectSimple type env = { - root : Path.t; - autostart : bool; - retries : int; - expiry : float option; - lazy_mode : Options.lazy_mode option; - autostop : bool; - tmp_dir : string; - shm_dirs : string list option; - shm_min_avail : int option; - shm_dep_table_pow : int option; - shm_hash_table_pow : int option; - shm_log_level : int option; - log_file : string; - ignore_version : bool; - emoji : bool; - quiet : bool; + root: Path.t; + autostart: bool; + retries: int; + expiry: float option; + lazy_mode: Options.lazy_mode option; + autostop: bool; + tmp_dir: string; + shm_dirs: string list option; + shm_min_avail: int option; + shm_dep_table_pow: int option; + shm_hash_table_pow: int option; + shm_log_level: int option; + log_file: string; + ignore_version: bool; + emoji: bool; + quiet: bool; flowconfig_name: string; + rerun_on_mismatch: bool; } -let arg name value arr = match value with -| None -> arr -| Some value -> name::value::arr +let arg name value arr = + match value with + | None -> arr + | Some value -> name :: value :: arr let arg_map name ~f value arr = let value = Option.map ~f value in arg name value arr let flag name value arr = - if value then name::arr else arr + if value then + name :: arr + else + arr (* Starts up a flow server by literally calling flow start *) let start_flow_server env = @@ -54,43 +59,44 @@ let start_flow_server env = quiet; flowconfig_name; _; - } = env in - if not quiet then Utils_js.prerr_endlinef - "Launching Flow server for %s" - (Path.to_string root); + } = + env + in + if not quiet then Utils_js.prerr_endlinef "Launching Flow server for %s" (Path.to_string root); let exe = Sys.executable_name in - let args = [ Path.to_string root ] - |> arg_map "--sharedmemory-hash-table-pow" ~f:string_of_int shm_hash_table_pow - |> arg_map "--sharedmemory-dep-table-pow" ~f:string_of_int shm_dep_table_pow - |> arg_map "--sharedmemory-minimum-available" ~f:string_of_int shm_min_avail - |> arg_map "--sharedmemory-log-level" ~f:string_of_int shm_log_level - |> arg_map "--sharedmemory-dirs" ~f:(String.concat ",") shm_dirs - |> arg_map "--lazy-mode" lazy_mode - ~f:(function Options.LAZY_MODE_FILESYSTEM -> "fs" | Options.LAZY_MODE_IDE -> "ide") - |> arg "--temp-dir" (Some tmp_dir) - |> arg "--from" FlowEventLogger.((get_context ()).from) - |> flag "--ignore-version" ignore_version - |> flag "--quiet" quiet - |> flag "--autostop" autostop - |> arg "--flowconfig-name" (Some flowconfig_name) + let args = + [Path.to_string root] + |> arg_map "--sharedmemory-hash-table-pow" ~f:string_of_int shm_hash_table_pow + |> arg_map "--sharedmemory-dep-table-pow" ~f:string_of_int shm_dep_table_pow + |> arg_map "--sharedmemory-minimum-available" ~f:string_of_int shm_min_avail + |> arg_map "--sharedmemory-log-level" ~f:string_of_int shm_log_level + |> arg_map "--sharedmemory-dirs" ~f:(String.concat ",") shm_dirs + |> arg_map "--lazy-mode" lazy_mode ~f:Options.lazy_mode_to_string + |> arg "--temp-dir" (Some tmp_dir) + |> arg "--from" (FlowEventLogger.get_from_I_AM_A_CLOWN ()) + |> flag "--ignore-version" ignore_version + |> flag "--quiet" quiet + |> flag "--autostop" autostop + |> arg "--flowconfig-name" (Some flowconfig_name) in try let server_pid = - Unix.(create_process exe - (Array.of_list (exe::"start"::args)) - stdin stdout stderr) in + Unix.(create_process exe (Array.of_list (exe :: "start" :: args)) stdin stdout stderr) + in match Sys_utils.waitpid_non_intr [] server_pid with - | _, Unix.WEXITED 0 -> - Ok () - | _, Unix.WEXITED code when code = FlowExitStatus.(error_code Lock_stolen) -> - Error ("Lock stolen", FlowExitStatus.Lock_stolen) - | _, status -> - let msg = "Could not start Flow server!" in - Error (msg, FlowExitStatus.Server_start_failed status) + | (_, Unix.WEXITED 0) -> Ok () + | (_, Unix.WEXITED code) when code = FlowExitStatus.(error_code Lock_stolen) -> + Error ("Lock stolen", FlowExitStatus.Lock_stolen) + | (_, status) -> + let msg = "Could not start Flow server!" in + Error (msg, FlowExitStatus.Server_start_failed status) with exn -> - let msg = Printf.sprintf - "Could not start Flow server! Unexpected exception: %s" - (Printexc.to_string exn) in + let exn = Exception.wrap exn in + let msg = + Printf.sprintf + "Could not start Flow server! Unexpected exception: %s" + (Exception.to_string exn) + in Error (msg, FlowExitStatus.Unknown_error) type retry_info = { @@ -100,136 +106,157 @@ type retry_info = { } let reset_retries_if_necessary retries = function - | Error (CCS.Server_busy (CCS.Fail_on_init _)) -> - { retries with - retries_remaining = 0; - } + | Error (CCS.Server_busy (CCS.Fail_on_init _)) -> { retries with retries_remaining = 0 } | Error CCS.Server_missing - | Error CCS.Server_busy _ -> retries + | Error (CCS.Server_busy _) -> + retries | Ok _ | Error CCS.Server_socket_missing - | Error CCS.Build_id_mismatch -> - { retries with - retries_remaining = retries.original_retries; - } + | Error (CCS.Build_id_mismatch _) -> + { retries with retries_remaining = retries.original_retries } let rate_limit retries = (* Make sure there is at least 1 second between retries *) - let sleep_time = int_of_float - (ceil (1.0 -. (Unix.gettimeofday() -. retries.last_connect_time))) in - if sleep_time > 0 - then Unix.sleep sleep_time + let sleep_time = + int_of_float (ceil (1.0 -. (Unix.gettimeofday () -. retries.last_connect_time))) + in + if sleep_time > 0 then Unix.sleep sleep_time let consume_retry retries = let retries_remaining = retries.retries_remaining - 1 in if retries_remaining >= 0 then rate_limit retries; - { retries with retries_remaining; } + { retries with retries_remaining } (* A featureful wrapper around CommandConnectSimple.connect_once. This * function handles retries, timeouts, displaying messages during * initialization, etc *) let rec connect ~flowconfig_name ~client_handshake env retries start_time = let connect = connect ~flowconfig_name in - if retries.retries_remaining < 0 - then - FlowExitStatus.(exit ~msg:"\nOut of retries, exiting!" Out_of_retries); - let has_timed_out = match env.expiry with + ( if retries.retries_remaining < 0 then + FlowExitStatus.(exit ~msg:"\nOut of retries, exiting!" Out_of_retries) ); + let has_timed_out = + match env.expiry with | None -> false - | Some t -> Unix.gettimeofday() > t + | Some t -> Unix.gettimeofday () > t in - if has_timed_out - then FlowExitStatus.(exit ~msg:"\nTimeout exceeded, exiting" Out_of_time); + (if has_timed_out then FlowExitStatus.(exit ~msg:"\nTimeout exceeded, exiting" Out_of_time)); let retries = { retries with last_connect_time = Unix.gettimeofday () } in let conn = CCS.connect_once ~flowconfig_name ~client_handshake ~tmp_dir:env.tmp_dir env.root in - if Tty.spinner_used () then Tty.print_clear_line stderr; let retries = reset_retries_if_necessary retries conn in match conn with | Ok (ic, oc) -> (ic, oc) | Error CCS.Server_missing -> - handle_missing_server ~flowconfig_name ~client_handshake env retries start_time - | Error CCS.Server_busy busy_reason -> - let busy_reason = match busy_reason with + handle_missing_server ~flowconfig_name ~client_handshake env retries start_time + | Error (CCS.Server_busy busy_reason) -> + let busy_reason = + match busy_reason with | CCS.Too_many_clients -> "has too many clients and rejected our connection" | CCS.Not_responding -> "is not responding" | CCS.Fail_on_init _ -> "is still initializing and the client used --retry-if-init false" - in - if not env.quiet then Printf.eprintf + in + if not env.quiet then + Printf.eprintf "The flow server %s (%d %s remaining): %s%!" busy_reason retries.retries_remaining - (if retries.retries_remaining = 1 then "retry" else "retries") - (Tty.spinner()); - connect ~client_handshake env (consume_retry retries) start_time - | Error CCS.Build_id_mismatch -> - let msg = "The flow server's version didn't match the client's, so it exited." in - if env.autostart - then - let start_time = Unix.gettimeofday () in - begin - if not env.quiet then - Utils_js.prerr_endlinef "%s\nGoing to launch a new one.\n%!" msg; - (* Don't decrement retries -- the server is definitely not running, - * so the next time round will hit Server_missing above, *but* - * before that will actually start the server -- we need to make - * sure that happens. - *) - connect ~client_handshake env retries start_time - end - else - let msg = "\n"^msg in - FlowExitStatus.(exit ~msg Build_id_mismatch) + ( if retries.retries_remaining = 1 then + "retry" + else + "retries" ) + (Tty.spinner ()); + connect ~client_handshake env (consume_retry retries) start_time + | Error CCS.(Build_id_mismatch Server_exited) -> + let msg = "The flow server's version didn't match the client's, so it exited." in + if env.autostart then ( + let start_time = Unix.gettimeofday () in + if not env.quiet then Utils_js.prerr_endlinef "%s\nGoing to launch a new one.\n%!" msg; + + (* Don't decrement retries -- the server is definitely not running, + * so the next time round will hit Server_missing above, *but* + * before that will actually start the server -- we need to make + * sure that happens. + *) + connect ~client_handshake env retries start_time + ) else + let msg = "\n" ^ msg in + FlowExitStatus.(exit ~msg Build_id_mismatch) + | Error CCS.(Build_id_mismatch (Client_should_error { server_bin; server_version })) -> + if env.rerun_on_mismatch then ( + if not env.quiet then ( + Printf.eprintf + "Version mismatch! Server binary is Flow v%s but we are using v%s\n%!" + server_version + Flow_version.version; + Printf.eprintf "Restarting command using the same binary as the server\n%!" + ); + let argv = Array.copy Sys.argv in + argv.(0) <- server_bin; + Unix.execv server_bin argv + ) else + let msg = + Printf.sprintf + "\nThe Flow server's version (v%s) didn't match the client's (v%s). Exiting" + server_version + Flow_version.version + in + FlowExitStatus.(exit ~msg Build_id_mismatch) | Error CCS.Server_socket_missing -> - begin try - if not env.quiet then Utils_js.prerr_endlinef - "Attempting to kill server for `%s`" - (Path.to_string env.root); + begin + try + if not env.quiet then + Utils_js.prerr_endlinef "Attempting to kill server for `%s`" (Path.to_string env.root); CommandMeanKill.mean_kill ~flowconfig_name ~tmp_dir:env.tmp_dir env.root; - if not env.quiet then Utils_js.prerr_endlinef - "Successfully killed server for `%s`" - (Path.to_string env.root); + if not env.quiet then + Utils_js.prerr_endlinef "Successfully killed server for `%s`" (Path.to_string env.root); let start_time = Unix.gettimeofday () in handle_missing_server ~flowconfig_name ~client_handshake env retries start_time with CommandMeanKill.FailedToKill err -> - begin if not env.quiet then match err with - | Some err -> prerr_endline err - | None -> () + begin + if not env.quiet then + match err with + | Some err -> prerr_endline err + | None -> () end; let msg = Utils_js.spf "Failed to kill server for `%s`" (Path.to_string env.root) in FlowExitStatus.(exit ~msg Kill_error) - end + end and handle_missing_server ~flowconfig_name ~client_handshake env retries start_time = - if env.autostart then begin - let retries = match start_flow_server env with + if env.autostart then + let retries = + match start_flow_server env with | Ok () -> - if not env.quiet then Printf.eprintf "Started a new flow server: %s%!" (Tty.spinner()); + if not env.quiet then Printf.eprintf "Started a new flow server: %s%!" (Tty.spinner ()); retries | Error (_, FlowExitStatus.Lock_stolen) -> if not env.quiet then - Printf.eprintf "Failed to start a new flow server (%d %s remaining): %s%!" + Printf.eprintf + "Failed to start a new flow server (%d %s remaining): %s%!" retries.retries_remaining - (if retries.retries_remaining = 1 then "retry" else "retries") - (Tty.spinner()); + ( if retries.retries_remaining = 1 then + "retry" + else + "retries" ) + (Tty.spinner ()); consume_retry retries - | Error (msg, code) -> - FlowExitStatus.exit ~msg code + | Error (msg, code) -> FlowExitStatus.exit ~msg code in connect ~flowconfig_name ~client_handshake env retries start_time - end else begin - let msg = Utils_js.spf - "\nError: There is no Flow server running in '%s'." - (Path.to_string env.root) in + else + let msg = + Utils_js.spf "\nError: There is no Flow server running in '%s'." (Path.to_string env.root) + in FlowExitStatus.(exit ~msg No_server_running) - end let connect ~flowconfig_name ~client_handshake env = let start_time = Unix.gettimeofday () in - let retries = { - retries_remaining = env.retries; - original_retries = env.retries; - last_connect_time = Unix.gettimeofday (); - } in - + let retries = + { + retries_remaining = env.retries; + original_retries = env.retries; + last_connect_time = Unix.gettimeofday (); + } + in let res = connect ~flowconfig_name ~client_handshake env retries start_time in res diff --git a/src/commands/commandConnectSimple.ml b/src/commands/commandConnectSimple.ml index 0dc56c96e6e..500c458e962 100644 --- a/src/commands/commandConnectSimple.ml +++ b/src/commands/commandConnectSimple.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,19 +8,31 @@ module Server_files = Server_files_js type busy_reason = -| Too_many_clients -| Not_responding -| Fail_on_init of (ServerStatus.status * FileWatcherStatus.status) + | Too_many_clients + | Not_responding + | Fail_on_init of (ServerStatus.status * FileWatcherStatus.status) + +type mismatch_behavior = + (* The server exited due to the build id mismatch *) + | Server_exited + (* The server is still alive but the client should error *) + | Client_should_error of { + server_bin: string; + server_version: string; + } type error = - | Build_id_mismatch + | Build_id_mismatch of mismatch_behavior | Server_busy of busy_reason | Server_missing - | Server_socket_missing (* pre-server-monitor versions used a different socket *) + | Server_socket_missing + +(* pre-server-monitor versions used a different socket *) type connect_exn = | Timeout | Missing_socket + exception ConnectError of connect_exn let server_exists ~flowconfig_name ~tmp_dir root = @@ -35,11 +47,12 @@ let wait_on_server_restart ic = with | End_of_file | Sys_error _ -> - (* Server has exited and hung up on us *) - () + (* Server has exited and hung up on us *) + () module SockMap = MyMap.Make (struct type t = Unix.sockaddr + let compare = Pervasives.compare end) @@ -50,148 +63,154 @@ end) * connection, since there's nothing wrong with it. *) let connections = ref SockMap.empty + let open_connection ~timeout ~client_handshake sockaddr = match SockMap.get sockaddr !connections with | Some conn -> conn | None -> - let conn = - try Timeout.open_connection ~timeout sockaddr - with Unix.Unix_error (Unix.ENOENT, "connect", _) -> - raise (ConnectError Missing_socket) + let conn = + try Timeout.open_connection ~timeout sockaddr + with Unix.Unix_error (Unix.ENOENT, "connect", _) -> raise (ConnectError Missing_socket) + in + connections := SockMap.add sockaddr conn !connections; + + (* It's important that we only write this once per connection *) + let fd = Unix.descr_of_out_channel (snd conn) in + SocketHandshake.( + let wire : client_handshake_wire = + ( fst client_handshake |> client_to_monitor_1__to_json |> Hh_json.json_to_string, + Marshal.to_string (snd client_handshake) [] ) in - connections := SockMap.add sockaddr conn !connections; - (* It's important that we only write this once per connection *) - let fd = Unix.descr_of_out_channel (snd conn) in - let open SocketHandshake in - let wire : client_handshake_wire = ( - fst client_handshake |> client_to_monitor_1__to_json |> Hh_json.json_to_string, - Marshal.to_string (snd client_handshake) [] - ) in Marshal_tools.to_fd_with_preamble fd wire |> ignore; - conn + conn) let close_connection sockaddr = match SockMap.get sockaddr !connections with | None -> () | Some (ic, _) -> - connections := SockMap.remove sockaddr !connections; - Timeout.shutdown_connection ic; - Timeout.close_in_noerr ic + connections := SockMap.remove sockaddr !connections; + Timeout.shutdown_connection ic; + Timeout.close_in_noerr ic let establish_connection ~flowconfig_name ~timeout ~client_handshake ~tmp_dir root = let sock_name = Socket.get_path (Server_files.socket_file ~flowconfig_name ~tmp_dir root) in let sockaddr = - if Sys.win32 then + if Sys.win32 then ( let ic = open_in_bin sock_name in let port = input_binary_int ic in close_in ic; Unix.(ADDR_INET (inet_addr_loopback, port)) - else - Unix.ADDR_UNIX sock_name in + ) else + Unix.ADDR_UNIX sock_name + in Ok (sockaddr, open_connection ~timeout ~client_handshake sockaddr) let get_handshake ~timeout:_ sockaddr ic oc = - let open SocketHandshake in - try + SocketHandshake.( (* TODO (glevi) - If we want this read to timeout on Windows, we need to make Marshal_tools * respect Timeout. That said, this is a lower priority fix, since we rarely run into * trouble right here. *) - let fd = Timeout.descr_of_in_channel ic in - let wire = (Marshal_tools.from_fd_with_preamble fd : server_handshake_wire) in - let server_handshake = ( - fst wire |> Hh_json.json_of_string |> json_to__monitor_to_client_1, - snd wire |> Option.map ~f:(fun s -> (Marshal.from_string s 0 : monitor_to_client_2)) - (* Server invariant: it only sends us snd=Some if it knows client+server versions match *) - ) in - Ok (sockaddr, ic, oc, server_handshake) - with - | ConnectError Timeout as e -> + try + let fd = Timeout.descr_of_in_channel ic in + let wire = (Marshal_tools.from_fd_with_preamble fd : server_handshake_wire) in + let server_handshake = + ( fst wire |> Hh_json.json_of_string |> json_to__monitor_to_client_1, + snd wire |> Option.map ~f:(fun s -> (Marshal.from_string s 0 : monitor_to_client_2)) ) + (* Server invariant: it only sends us snd=Some if it knows client+server versions match *) + in + Ok (sockaddr, ic, oc, server_handshake) + with + | ConnectError Timeout as e -> (* Timeouts are expected *) raise e - | e -> + | e -> (* Other exceptions may indicate a bad connection, so let's close it *) close_connection sockaddr; - raise e + raise e) let verify_handshake ~client_handshake ~server_handshake sockaddr ic = - let open SocketHandshake in - let (client1, _client2) = client_handshake in - let (server1, server2) = server_handshake in - (* First, let's close the connection as needed *) - begin match server1 with - | {server_intent = Server_will_continue; _} -> - () - | {server_intent = Server_will_hangup; _} -> - close_connection sockaddr; - | {server_intent = Server_will_exit; _} -> - (* If the server will exit shortly, we wouldn't want subsequent connection - * attempts on the Unix Domain Socket to succeed (only to be doomed to failure). - * To avoid that fate, we'll wait for the connection to be closed. *) - wait_on_server_restart ic; - Timeout.close_in_noerr ic - end; - (* Next, let's interpret the server's response into our own response code *) - match (server1, server2) with - | ({server_intent = Server_will_continue; _}, Some Server_ready) -> - Ok () - | ({server_intent = Server_will_continue; _}, Some (Server_still_initializing _)) -> - Ok () - | ({server_intent = Server_will_hangup; _}, Some Server_has_too_many_clients) -> - Error (Server_busy Too_many_clients) - | ({server_intent = Server_will_hangup; _}, Some (Server_still_initializing status)) -> - Error (Server_busy (Fail_on_init status)) - | ({server_intent = Server_will_hangup; _}, None) -> - if client1.client_build_id <> server1.server_build_id then - Error Build_id_mismatch - (* TODO(glevi) - let server survive, and have client exec a matching client. *) - else - failwith "Don't know why server closed the connection" - | ({server_intent = Server_will_exit; _}, None) -> - if client1.is_stop_request then - Ok () - else - (* either the build ids were different, or client1 wasn't valid for server *) - Error Build_id_mismatch - | _ -> - failwith "Monitor sent incorrect handshake" + SocketHandshake.( + let (client1, _client2) = client_handshake in + let (server1, server2) = server_handshake in + (* First, let's close the connection as needed *) + begin + match server1 with + | { server_intent = Server_will_continue; _ } -> () + | { server_intent = Server_will_hangup; _ } -> close_connection sockaddr + | { server_intent = Server_will_exit; _ } -> + (* If the server will exit shortly, we wouldn't want subsequent connection + * attempts on the Unix Domain Socket to succeed (only to be doomed to failure). + * To avoid that fate, we'll wait for the connection to be closed. *) + wait_on_server_restart ic; + Timeout.close_in_noerr ic + end; + + (* Next, let's interpret the server's response into our own response code *) + match (server1, server2) with + | ({ server_intent = Server_will_continue; _ }, Some Server_ready) -> Ok () + | ({ server_intent = Server_will_continue; _ }, Some (Server_still_initializing _)) -> Ok () + | ({ server_intent = Server_will_hangup; _ }, Some Server_has_too_many_clients) -> + Error (Server_busy Too_many_clients) + | ({ server_intent = Server_will_hangup; _ }, Some (Server_still_initializing status)) -> + Error (Server_busy (Fail_on_init status)) + | ({ server_intent = Server_will_hangup; server_bin; server_version; _ }, None) -> + if client1.client_build_id <> server1.server_build_id then + Error (Build_id_mismatch (Client_should_error { server_bin; server_version })) + else + failwith "Don't know why server closed the connection" + | ({ server_intent = Server_will_exit; _ }, None) -> + if client1.is_stop_request then + Ok () + else + (* either the build ids were different, or client1 wasn't valid for server *) + Error (Build_id_mismatch Server_exited) + | _ -> failwith "Monitor sent incorrect handshake") (* Connects to the monitor via a socket. *) let connect_once ~flowconfig_name ~client_handshake ~tmp_dir root = - let (>>=) = Core_result.(>>=) in + let ( >>= ) = Core_result.( >>= ) in try Timeout.with_timeout ~timeout:1 ~on_timeout:(fun _ -> raise (ConnectError Timeout)) - ~do_:begin fun timeout -> - establish_connection ~flowconfig_name ~timeout ~client_handshake ~tmp_dir root - >>= fun (sockaddr, (ic, oc)) -> - get_handshake ~timeout sockaddr ic oc - end >>= fun (sockaddr, ic, oc, server_handshake) -> - verify_handshake ~client_handshake ~server_handshake sockaddr ic >>= fun () -> - Ok (ic, oc) + ~do_: + begin + fun timeout -> + establish_connection ~flowconfig_name ~timeout ~client_handshake ~tmp_dir root + >>= (fun (sockaddr, (ic, oc)) -> get_handshake ~timeout sockaddr ic oc) + end + >>= fun (sockaddr, ic, oc, server_handshake) -> + verify_handshake ~client_handshake ~server_handshake sockaddr ic >>= (fun () -> Ok (ic, oc)) with | ConnectError Missing_socket -> - if server_exists ~flowconfig_name ~tmp_dir root - then Error Server_socket_missing - else Error Server_missing + if server_exists ~flowconfig_name ~tmp_dir root then + Error Server_socket_missing + else + Error Server_missing | ConnectError Timeout | _ -> - if server_exists ~flowconfig_name ~tmp_dir root - then Error (Server_busy Not_responding) - else Error Server_missing + if server_exists ~flowconfig_name ~tmp_dir root then + Error (Server_busy Not_responding) + else + Error Server_missing -let busy_reason_to_string (busy_reason: busy_reason) : string = +let busy_reason_to_string (busy_reason : busy_reason) : string = match busy_reason with | Too_many_clients -> "Too_many_clients" | Not_responding -> "Not_responding" | Fail_on_init (server_status, watcher_status) -> "Fail_on_init(" - ^ "server_status=" ^ (ServerStatus.string_of_status server_status) ^ "," - ^ "watcher_status=" ^ (FileWatcherStatus.string_of_status watcher_status) ^ ")" + ^ "server_status=" + ^ ServerStatus.string_of_status server_status + ^ "," + ^ "watcher_status=" + ^ FileWatcherStatus.string_of_status watcher_status + ^ ")" -let error_to_string (error: error) : string = +let error_to_string (error : error) : string = match error with - | Build_id_mismatch -> "Build_id_mismatch" - | Server_busy busy_reason -> "Server_busy(" ^ (busy_reason_to_string busy_reason) ^ ")" + | Build_id_mismatch Server_exited -> "Build_id_mismatch(Server_exited)" + | Build_id_mismatch (Client_should_error _) -> "Build_id_mismatch(Client_should_error)" + | Server_busy busy_reason -> "Server_busy(" ^ busy_reason_to_string busy_reason ^ ")" | Server_missing -> "Server_missing" | Server_socket_missing -> "Server_socket_missing" diff --git a/src/commands/commandConnectSimple.mli b/src/commands/commandConnectSimple.mli index 390c96d1e74..117f546fb4f 100644 --- a/src/commands/commandConnectSimple.mli +++ b/src/commands/commandConnectSimple.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,8 +10,15 @@ type busy_reason = | Not_responding | Fail_on_init of (ServerStatus.status * FileWatcherStatus.status) +type mismatch_behavior = + | Server_exited + | Client_should_error of { + server_bin: string; + server_version: string; + } + type error = - | Build_id_mismatch + | Build_id_mismatch of mismatch_behavior | Server_busy of busy_reason | Server_missing | Server_socket_missing @@ -20,7 +27,7 @@ val server_exists : flowconfig_name:string -> tmp_dir:string -> Path.t -> bool val connect_once : flowconfig_name:string -> - client_handshake: (SocketHandshake.client_handshake) -> + client_handshake:SocketHandshake.client_handshake -> tmp_dir:string -> Path.t -> (Timeout.in_channel * out_channel, error) result diff --git a/src/commands/commandInfo.ml b/src/commands/commandInfo.ml index e2e974b1ef7..606def38acd 100644 --- a/src/commands/commandInfo.ml +++ b/src/commands/commandInfo.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/commands/commandMeanKill.ml b/src/commands/commandMeanKill.ml index f8d88629aa0..343dbf029a5 100644 --- a/src/commands/commandMeanKill.ml +++ b/src/commands/commandMeanKill.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,24 +11,20 @@ let mean_kill ~flowconfig_name ~tmp_dir root = let pids = try PidLog.get_pids (Server_files_js.pids_file ~flowconfig_name ~tmp_dir root) with PidLog.FailedToGetPids -> - let msg = Printf.sprintf - "Unable to figure out pids of running Flow server. \ - Try manually killing it with 'pkill %s' (be careful on shared \ - devservers)" - Utils_js.exe_name + let msg = + Printf.sprintf + "Unable to figure out pids of running Flow server. Try manually killing it with 'pkill %s' (be careful on shared devservers)" + Utils_js.exe_name in raise (FailedToKill (Some msg)) in - List.iter (fun (pid, _) -> - try - pid - |> Sys_utils.handle_of_pid_for_termination - |> Sys_utils.terminate_process - with Unix.Unix_error (Unix.ESRCH, "kill", _) -> - (* no such process *) - () - ) pids; - ignore(Unix.sleep 1); - if CommandConnectSimple.server_exists ~flowconfig_name ~tmp_dir root - then raise (FailedToKill None); + List.iter + (fun (pid, _) -> + try pid |> Sys_utils.handle_of_pid_for_termination |> Sys_utils.terminate_process + with Unix.Unix_error (Unix.ESRCH, "kill", _) -> (* no such process *) + ()) + pids; + ignore (Unix.sleep 1); + if CommandConnectSimple.server_exists ~flowconfig_name ~tmp_dir root then + raise (FailedToKill None); () diff --git a/src/commands/commandSpec.ml b/src/commands/commandSpec.ml index ee798ebe76d..474329716da 100644 --- a/src/commands/commandSpec.ml +++ b/src/commands/commandSpec.ml @@ -1,37 +1,41 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) exception Show_help + exception Failed_to_parse of string * string module ArgSpec = struct type values_t = string list SMap.t type flag_arg_count = - | No_Arg - | Arg - | Arg_List - | Arg_Rest + | No_Arg + | Arg + | Arg_List + | Arg_Rest (* consumes a '--' and all remaining args *) + | Arg_Command + + (* consumes all the remaining args verbatim, to pass to a subcommand *) type 'a flag_t = { - parse : name:string -> string list option -> 'a; - arg : flag_arg_count; + parse: name:string -> string list option -> 'a; + arg: flag_arg_count; } type flag_metadata = { - doc : string; - env : string option; - arg_count : flag_arg_count; + doc: string; + env: string option; + arg_count: flag_arg_count; } type ('a, 'b) t = { - f : (values_t * 'a) -> (values_t * 'b); - flags : flag_metadata SMap.t; - anons : (string * flag_arg_count) list; + f: values_t * 'a -> values_t * 'b; + flags: flag_metadata SMap.t; + anons: (string * flag_arg_count) list; } (* Partially applies [fn] with the values from [values]. Uses [spec] to @@ -42,143 +46,189 @@ module ArgSpec = struct let apply_arg name arg_type f (values, main) = let (values, main) = f (values, main) in - let value = - try Some (SMap.find_unsafe name values : string list) - with Not_found -> None - in + let value = (try Some (SMap.find_unsafe name values : string list) with Not_found -> None) in (values, main (arg_type.parse ~name value)) let pop_anon spec = match spec.anons with | [] -> (None, spec) - | hd::tl -> (Some hd, {spec with anons = tl}) - - let string = { - parse = (fun ~name:_ -> function - | Some [x] -> Some x - | _ -> None - ); - arg = Arg; - } - let bool = { - parse = (fun ~name:_ -> function - | Some ["0"] - | Some ["false"] - | None -> Some false - | Some _ -> Some true - ); - arg = Arg; - } - let int = { - parse = (fun ~name:_ -> function - | Some [x] -> Some (int_of_string x) - | _ -> None - ); - arg = Arg; - } - let enum values = { - parse = (fun ~name -> function - | Some [x] -> - begin match List.find_opt (fun (s, _) -> s = x) values with - | Some (_, v) -> Some v - | None -> - raise (Failed_to_parse (name, Utils_js.spf - "expected one of: %s" - (String.concat ", " (List.map fst values)) - )) - end - | _ -> None - ); - arg = Arg; - } - - let no_arg = { - parse = (fun ~name:_ -> function - | Some _ -> true - | None -> false - ); - arg = No_Arg; - } - - let required ?default arg_type = { - parse = (fun ~name -> function - | None -> - begin match default with - | Some default -> default - | None -> raise (Failed_to_parse (name, "missing required arguments")) - end - | value -> match arg_type.parse ~name value with - | None -> raise (Failed_to_parse (name, Utils_js.spf - "wrong type for required argument%s" - (match value with Some [x] -> ": " ^ x | _ -> ""))) - | Some result -> result - ); - arg = arg_type.arg; - } - - let optional arg_type = { - parse = (fun ~name -> function - | None -> None - | value -> arg_type.parse ~name value); - arg = arg_type.arg; - } - - let list_of arg_type = { - parse = (fun ~name -> function - | None -> Some [] - | Some values -> - Some (List.map (fun x -> - match arg_type.parse ~name (Some [x]) with - | Some result -> result - | None -> raise (Failed_to_parse (name, Utils_js.spf - "wrong type for argument list item: %s" x)) - ) values) - ); - arg = Arg_List; - } - - let delimited delim arg_type = { - parse = (fun ~name -> function - | Some [x] -> - let args = Str.split (Str.regexp_string delim) x in - Some (List.map (fun arg -> - match arg_type.parse ~name (Some [arg]) with - | None -> - raise (Failed_to_parse (name, Utils_js.spf "wrong type for value: %s" arg)) - | Some result -> - result - ) args) - | _ -> None - ); - arg = Arg; - } - - let key_value delim (key_type, value_type) = { - parse = (fun ~name -> function - | Some [x] -> - let key, value = match Str.bounded_split (Str.regexp_string delim) x 2 with - | [key; value] -> key, Some [value] - | [key] -> key, None - | _ -> raise (Failed_to_parse (name, Utils_js.spf "unexpected value: %s" x)) - in - let key = match key_type.parse ~name (Some [key]) with - | None -> - raise (Failed_to_parse (name, Utils_js.spf "wrong type for key: %s" key)) - | Some result -> - result - in - let value = value_type.parse ~name value in - Some (key, value) - | _ -> None - ); - arg = Arg; - } - - let help_flag = SMap.empty |> SMap.add "--help" { - doc = "This list of options"; - env = None; - arg_count = No_Arg; - } + | hd :: tl -> (Some hd, { spec with anons = tl }) + + let string = + { + parse = + (fun ~name:_ -> function + | Some [x] -> Some x + | _ -> None); + arg = Arg; + } + + let bool = + { + parse = + (fun ~name:_ -> function + | Some ["0"] + | Some ["false"] + | None -> + Some false + | Some _ -> Some true); + arg = Arg; + } + + let int = + { + parse = + (fun ~name -> function + | Some [x] -> + Some + (try int_of_string x + with Failure _ -> + raise (Failed_to_parse (name, Utils_js.spf "expected an integer, got %S" x))) + | _ -> None); + arg = Arg; + } + + let enum values = + { + parse = + (fun ~name -> function + | Some [x] -> + begin + match Core_list.find ~f:(fun (s, _) -> s = x) values with + | Some (_, v) -> Some v + | None -> + raise + (Failed_to_parse + ( name, + Utils_js.spf + "expected one of: %s" + (String.concat ", " (Core_list.map ~f:fst values)) )) + end + | _ -> None); + arg = Arg; + } + + let command cmds = + { + parse = + (fun ~name -> function + | Some (cmd_name :: argv) -> + begin + match (enum cmds).parse ~name (Some [cmd_name]) with + | Some cmd -> Some (cmd, argv) + | None -> None + end + | Some [] + | None -> + None); + arg = Arg_Command; + } + + let no_arg = + { + parse = + (fun ~name:_ -> function + | Some _ -> true + | None -> false); + arg = No_Arg; + } + + let required ?default arg_type = + { + parse = + (fun ~name -> function + | None -> + begin + match default with + | Some default -> default + | None -> raise (Failed_to_parse (name, "missing required arguments")) + end + | value -> + (match arg_type.parse ~name value with + | None -> + raise + (Failed_to_parse + ( name, + Utils_js.spf + "wrong type for required argument%s" + (match value with + | Some [x] -> ": " ^ x + | _ -> "") )) + | Some result -> result)); + arg = arg_type.arg; + } + + let optional arg_type = + { + parse = + (fun ~name -> function + | None -> None + | value -> arg_type.parse ~name value); + arg = arg_type.arg; + } + + let list_of arg_type = + { + parse = + (fun ~name -> function + | None -> Some [] + | Some values -> + Some + (Core_list.map + ~f:(fun x -> + match arg_type.parse ~name (Some [x]) with + | Some result -> result + | None -> + raise + (Failed_to_parse + (name, Utils_js.spf "wrong type for argument list item: %s" x))) + values)); + arg = Arg_List; + } + + let delimited delim arg_type = + { + parse = + (fun ~name -> function + | Some [x] -> + let args = Str.split (Str.regexp_string delim) x in + Some + (Core_list.map + ~f:(fun arg -> + match arg_type.parse ~name (Some [arg]) with + | None -> + raise (Failed_to_parse (name, Utils_js.spf "wrong type for value: %s" arg)) + | Some result -> result) + args) + | _ -> None); + arg = Arg; + } + + let key_value delim (key_type, value_type) = + { + parse = + (fun ~name -> function + | Some [x] -> + let (key, value) = + match Str.bounded_split (Str.regexp_string delim) x 2 with + | [key; value] -> (key, Some [value]) + | [key] -> (key, None) + | _ -> raise (Failed_to_parse (name, Utils_js.spf "unexpected value: %s" x)) + in + let key = + match key_type.parse ~name (Some [key]) with + | None -> raise (Failed_to_parse (name, Utils_js.spf "wrong type for key: %s" key)) + | Some result -> result + in + let value = value_type.parse ~name value in + Some (key, value) + | _ -> None); + arg = Arg; + } + + let help_flag = + SMap.empty + |> SMap.add "--help" { doc = "This list of options"; env = None; arg_count = No_Arg } let apply_help (values, main) = let main help = @@ -188,92 +238,98 @@ module ArgSpec = struct apply_arg "--help" no_arg (fun x -> x) (values, main) (* Base spec, defines --help *) - let empty = { - f = apply_help; - flags = help_flag; - anons = []; - } - - let flag name arg_type ~doc ?env prev = { - f = apply_arg name arg_type prev.f; - flags = prev.flags |> SMap.add name { - doc; - env; - arg_count = arg_type.arg; - }; - anons = prev.anons; - } - - let anon name arg_type prev = { - f = apply_arg name arg_type prev.f; - flags = prev.flags; - anons = List.append prev.anons [(name, arg_type.arg)]; - } - - let rest prev = { - f = apply_arg "--" (optional (list_of string)) prev.f; - flags = prev.flags; - anons = List.append prev.anons [("--", Arg_Rest)]; - } - - let dummy value prev = { - f = (fun x -> let (values, main) = prev.f x in (values, main value)); - flags = prev.flags; - anons = prev.anons; - } - - let collect fn prev = { - f = (fun x -> let (values, main) = prev.f x in (values, fn main)); - flags = prev.flags; - anons = prev.anons; - } + let empty = { f = apply_help; flags = help_flag; anons = [] } + + let flag name arg_type ~doc ?env prev = + { + f = apply_arg name arg_type prev.f; + flags = prev.flags |> SMap.add name { doc; env; arg_count = arg_type.arg }; + anons = prev.anons; + } + + let anon name arg_type prev = + { + f = apply_arg name arg_type prev.f; + flags = prev.flags; + anons = Core_list.append prev.anons [(name, arg_type.arg)]; + } + + let rest prev = + { + f = apply_arg "--" (optional (list_of string)) prev.f; + flags = prev.flags; + anons = Core_list.append prev.anons [("--", Arg_Rest)]; + } + + let dummy value prev = + { + f = + (fun x -> + let (values, main) = prev.f x in + (values, main value)); + flags = prev.flags; + anons = prev.anons; + } + + let collect fn prev = + { + f = + (fun x -> + let (values, main) = prev.f x in + (values, fn main)); + flags = prev.flags; + anons = prev.anons; + } end type ('a, 'b) builder_t = { - name : string; - doc : string; - usage : string; - args : ('a, 'b) ArgSpec.t; + name: string; + doc: string; + usage: string; + args: ('a, 'b) ArgSpec.t; } type t = { - cmdname : string; - cmddoc : string; - flags : ArgSpec.flag_metadata SMap.t; - args_of_argv : string list -> string list SMap.t; - string_of_usage : unit -> string; - main : string list SMap.t -> unit; + cmdname: string; + cmddoc: string; + flags: ArgSpec.flag_metadata SMap.t; + args_of_argv: string list -> string list SMap.t; + string_of_usage: unit -> string; + main: string list SMap.t -> unit; } let no_dashes opt = - if opt.[0] != '-' then opt - else if opt.[1] != '-' then String.sub opt 1 ((String.length opt) - 1) - else String.sub opt 2 ((String.length opt) - 2) + if opt.[0] != '-' then + opt + else if opt.[1] != '-' then + String.sub opt 1 (String.length opt - 1) + else + String.sub opt 2 (String.length opt - 2) let is_arg arg = String.length arg > 1 && arg <> "--" && arg.[0] = '-' let consume_args args = let is_done = ref false in - List.partition - (fun value -> - (if not !is_done && is_arg value then is_done := true); - not !is_done - ) + Core_list.partition_tf + ~f:(fun value -> + if (not !is_done) && is_arg value then is_done := true; + not !is_done) args let rec parse values spec = function | [] -> values - | arg::args -> - if is_arg arg - then - (* split "--foo=bar"::args into "--foo"::"bar"::args *) - let arg, args = match (Str.bounded_split (Str.regexp "=") arg 2) with - | arg::value::[] -> arg, value::args - | arg::[] -> arg, args + | arg :: args -> + if is_arg arg then + (* split "--foo=bar"::args into "--foo"::"bar"::args *) + let (arg, args) = + match Str.bounded_split (Str.regexp "=") arg 2 with + | [arg; value] -> (arg, value :: args) + | [arg] -> (arg, args) | _ -> assert false - in - parse_flag values spec arg args - else parse_anon values spec arg args + in + parse_flag values spec arg args + else + parse_anon values spec arg args and parse_flag values spec arg args = let flags = spec.ArgSpec.flags in @@ -283,27 +339,22 @@ and parse_flag values spec arg args = | ArgSpec.No_Arg -> let values = SMap.add arg ["true"] values in parse values spec args - | ArgSpec.Arg -> - begin match args with - | [] -> - raise (Failed_to_parse (arg, "option needs an argument.")) - | value::args -> - if is_arg value then - raise (Failed_to_parse (arg, "option needs an argument.")); - let values = SMap.add arg [value] values in - parse values spec args + begin + match args with + | [] -> raise (Failed_to_parse (arg, "option needs an argument.")) + | value :: args -> + if is_arg value then raise (Failed_to_parse (arg, "option needs an argument.")); + let values = SMap.add arg [value] values in + parse values spec args end - | ArgSpec.Arg_List -> let (value_list, args) = consume_args args in let values = SMap.add arg value_list values in parse values spec args - | ArgSpec.Arg_Rest -> failwith "Not supported" - with - | Not_found -> - raise (Failed_to_parse (arg, "unknown option")) + | ArgSpec.Arg_Command -> failwith "Not supported" + with Not_found -> raise (Failed_to_parse (arg, "unknown option")) and parse_anon values spec arg args = let (anon, spec) = ArgSpec.pop_anon spec in @@ -312,66 +363,80 @@ and parse_anon values spec arg args = let values = SMap.add name [arg] values in parse values spec args | Some (name, ArgSpec.Arg_List) -> - let (value_list, args) = consume_args (arg::args) in + let (value_list, args) = consume_args (arg :: args) in let values = SMap.add name value_list values in parse values spec args | Some (name, ArgSpec.Arg_Rest) -> + let args = + if arg = "--" then + args + else + arg :: args + in let values = SMap.add name args values in parse values spec [] - | Some (_, ArgSpec.No_Arg) -> - assert false - | None -> - raise (Failed_to_parse ("anon", Utils_js.spf - "unexpected argument '%s'." - arg - )) + | Some (name, ArgSpec.Arg_Command) -> + let values = SMap.add name (arg :: args) values in + parse values spec [] + | Some (_, ArgSpec.No_Arg) -> assert false + | None -> raise (Failed_to_parse ("anon", Utils_js.spf "unexpected argument '%s'." arg)) let init_from_env spec = let flags = spec.ArgSpec.flags in - SMap.fold (fun arg flag acc -> - match flag.ArgSpec.env with - | Some env -> - begin - try SMap.add arg [Sys.getenv env] acc - with Not_found -> acc - end - | None -> acc - ) flags SMap.empty + SMap.fold + (fun arg flag acc -> + match flag.ArgSpec.env with + | Some env -> + begin + match Sys.getenv env with + | "" -> acc + | env -> SMap.add arg [env] acc + | exception Not_found -> acc + end + | None -> acc) + flags + SMap.empty let usage_string spec = let usage = spec.usage in - let flags = SMap.fold (fun k v a -> (k, v)::a) spec.args.ArgSpec.flags [] in + let flags = SMap.fold (fun k v a -> (k, v) :: a) spec.args.ArgSpec.flags [] in let cmp (a, _) (b, _) = String.compare (no_dashes a) (no_dashes b) in - let flags = List.sort cmp flags in - let col_width = flags |> List.fold_left (fun acc (a, _) -> - max acc (String.length a) - ) 0 in - let flag_usage = flags - |> List.filter (fun (_, meta) -> meta.ArgSpec.doc <> "") - |> List.map (fun (name, meta) -> - Utils_js.spf " %-*s %s" col_width name meta.ArgSpec.doc - ) + let flags = Core_list.sort ~cmp flags in + let col_width = + flags |> Core_list.fold_left ~f:(fun acc (a, _) -> max acc (String.length a)) ~init:0 + in + let flag_usage = + flags + |> Core_list.filter ~f:(fun (_, meta) -> meta.ArgSpec.doc <> "") + |> Core_list.map ~f:(fun (name, meta) -> + Utils_js.spf " %-*s %s" col_width name meta.ArgSpec.doc) |> String.concat "\n" in - (usage ^ "\n" ^ flag_usage) - -let usage spec = - print_endline (usage_string spec) - -let command spec main = { - cmdname = spec.name; - cmddoc = spec.doc; - flags = spec.args.ArgSpec.flags; - string_of_usage = (fun () -> usage_string spec); - args_of_argv = parse (init_from_env spec.args) spec.args; - main = (fun args -> - let main = ArgSpec.apply spec.args args main in - main ()); -} + usage ^ "\n" ^ flag_usage + +let usage spec = print_endline (usage_string spec) + +let command spec main = + { + cmdname = spec.name; + cmddoc = spec.doc; + flags = spec.args.ArgSpec.flags; + string_of_usage = (fun () -> usage_string spec); + args_of_argv = parse (init_from_env spec.args) spec.args; + main = + (fun args -> + let main = ArgSpec.apply spec.args args main in + main ()); + } let run command = command.main + let name command = command.cmdname + let doc command = command.cmddoc + let flags command = command.flags + let args_of_argv command = command.args_of_argv + let string_of_usage command = command.string_of_usage () diff --git a/src/commands/commandSpec.mli b/src/commands/commandSpec.mli index 34d922bd106..9cff3172b24 100644 --- a/src/commands/commandSpec.mli +++ b/src/commands/commandSpec.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,50 +7,68 @@ module ArgSpec : sig type 'a flag_t + type ('a, 'b) t type flag_arg_count = - | No_Arg - | Arg - | Arg_List - | Arg_Rest + | No_Arg + | Arg + | Arg_List + | Arg_Rest + | Arg_Command type flag_metadata = { - doc : string; - env : string option; - arg_count : flag_arg_count; + doc: string; + env: string option; + arg_count: flag_arg_count; } val empty : ('a, 'a) t + val flag : string -> 'a flag_t -> doc:string -> ?env:string -> ('b, 'a -> 'c) t -> ('b, 'c) t + val anon : string -> 'a flag_t -> ('b, 'a -> 'c) t -> ('b, 'c) t + val rest : ('a, string list option -> 'b) t -> ('a, 'b) t + val dummy : 'a -> ('b, 'a -> 'c) t -> ('b, 'c) t + val collect : ('main -> 'a -> 'new_main) -> ('b, 'main) t -> ('b, 'a -> 'new_main) t val no_arg : bool flag_t + val string : string option flag_t + val bool : bool option flag_t + val int : int option flag_t + val enum : (string * 't) list -> 't option flag_t + val command : (string * 'cmd) list -> ('cmd * string list) option flag_t + val required : ?default:'a -> 'a option flag_t -> 'a flag_t + val optional : 'a option flag_t -> 'a option flag_t + val list_of : 'a option flag_t -> 'a list option flag_t + val delimited : string -> 'a option flag_t -> 'a list option flag_t - val key_value : string -> ('a option flag_t * 'b flag_t) -> ('a * 'b) option flag_t + + val key_value : string -> 'a option flag_t * 'b flag_t -> ('a * 'b) option flag_t end type ('a, 'b) builder_t = { - name : string; - doc : string; - usage : string; - args : ('a, 'b) ArgSpec.t; + name: string; + doc: string; + usage: string; + args: ('a, 'b) ArgSpec.t; } type t exception Show_help + exception Failed_to_parse of string * string val usage : ('a, 'b) builder_t -> unit @@ -59,8 +77,13 @@ val command : ('main, unit -> unit) builder_t -> 'main -> t (* accessors *) val run : t -> string list SMap.t -> unit + val name : t -> string + val doc : t -> string + val flags : t -> ArgSpec.flag_metadata SMap.t + val args_of_argv : t -> string list -> string list SMap.t + val string_of_usage : t -> string diff --git a/src/commands/commandUtils.ml b/src/commands/commandUtils.ml index 6ee3c09970e..fdf7325e0ce 100644 --- a/src/commands/commandUtils.ml +++ b/src/commands/commandUtils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,182 +7,241 @@ open Utils_js +let run_command command argv = + try + let args = CommandSpec.args_of_argv command argv in + CommandSpec.run command args + with + | CommandSpec.Show_help -> + print_endline (CommandSpec.string_of_usage command); + FlowExitStatus.(exit No_error) + | CommandSpec.Failed_to_parse (arg_name, msg) -> + begin + try + let json_arg = + Core_list.find_exn + ~f:(fun s -> + String_utils.string_starts_with s "--pretty" + || String_utils.string_starts_with s "--json") + argv + in + let pretty = String_utils.string_starts_with json_arg "--pretty" in + FlowExitStatus.set_json_mode ~pretty + with Not_found -> () + end; + let msg = + Utils_js.spf + "%s: %s %s\n%s" + (Filename.basename Sys.executable_name) + arg_name + msg + (CommandSpec.string_of_usage command) + in + FlowExitStatus.(exit ~msg Commandline_usage_error) + let expand_file_list ?options filenames = - let paths = List.map Path.make filenames in - let next_files = match paths with - | [] -> fun () -> [] - | _ -> - let filter = - begin match options with - | Some options -> Files.is_valid_path ~options - | _ -> fun filename -> Filename.check_suffix filename ".js" - end in - Find.make_next_files - ~filter - ~others:(List.tl paths) - (List.hd paths) in - Files.get_all next_files - -let get_filenames_from_input ?(allow_imaginary=false) input_file filenames = + let paths = Core_list.map ~f:Path.make filenames in + let next_files = + match paths with + | [] -> (fun () -> []) + | _ -> + let filter = + match options with + | Some options -> Files.is_valid_path ~options + | _ -> (fun filename -> Filename.check_suffix filename ".js") + in + Find.make_next_files ~filter ~others:(Core_list.tl_exn paths) (Core_list.hd_exn paths) + in + Files.get_all next_files + +let get_filenames_from_input ?(allow_imaginary = false) input_file filenames = let cwd = Sys.getcwd () in let handle_imaginary = - if allow_imaginary - then Files.imaginary_realpath - else fun fn -> FlowExitStatus.(exit ~msg:(Printf.sprintf "File not found: %S" fn) No_input) + if allow_imaginary then + Files.imaginary_realpath + else + fun fn -> + FlowExitStatus.(exit ~msg:(Printf.sprintf "File not found: %S" fn) No_input) in - let input_file_filenames = match input_file with - | Some "-" -> - Sys_utils.lines_of_in_channel stdin - |> Files.canonicalize_filenames ~handle_imaginary ~cwd - | Some input_file -> - Sys_utils.lines_of_file input_file - |> Files.canonicalize_filenames ~handle_imaginary ~cwd:(Filename.dirname input_file) - | None -> [] + let input_file_filenames = + match input_file with + | Some "-" -> + Sys_utils.lines_of_in_channel stdin |> Files.canonicalize_filenames ~handle_imaginary ~cwd + | Some input_file -> + Sys_utils.lines_of_file input_file + |> Files.canonicalize_filenames ~handle_imaginary ~cwd:(Filename.dirname input_file) + | None -> [] in - let cli_filenames = match filenames with - | Some filenames -> Files.canonicalize_filenames ~handle_imaginary ~cwd filenames - | None -> [] + let cli_filenames = + match filenames with + | Some filenames -> Files.canonicalize_filenames ~handle_imaginary ~cwd filenames + | None -> [] in cli_filenames @ input_file_filenames let print_version () = - print_endlinef - "Flow, a static type checker for JavaScript, version %s" - Flow_version.version + print_endlinef "Flow, a static type checker for JavaScript, version %s" Flow_version.version let expand_path file = let path = Path.make file in - if Path.file_exists path - then Path.to_string path + if Path.file_exists path then + Path.to_string path else - let file = Filename.concat (Sys.getcwd()) file in + let file = Filename.concat (Sys.getcwd ()) file in let path = Path.make file in - if Path.file_exists path - then Path.to_string path - else begin + if Path.file_exists path then + Path.to_string path + else let msg = Printf.sprintf "File not found: %s" (Path.to_string path) in FlowExitStatus.(exit ~msg Input_error) - end - -let collect_error_flags main - color - include_warnings - max_warnings - one_line - show_all_errors - show_all_branches - unicode - message_width -= - let include_warnings = match max_warnings with - | Some _ -> true - | None -> include_warnings + +let collect_error_flags + main + color + include_warnings + max_warnings + one_line + show_all_errors + show_all_branches + unicode + message_width = + let include_warnings = + match max_warnings with + | Some _ -> true + | None -> include_warnings in - let unicode = match unicode with - | `Never -> false - | `Always -> true - | `Auto -> Tty.supports_emoji () + let unicode = + match unicode with + | `Never -> false + | `Always -> true + | `Auto -> Tty.supports_emoji () in - let message_width = match message_width with - | Some message_width -> message_width - | None -> Option.value_map (Tty.get_term_cols ()) ~default:120 ~f:(min 120) + let message_width = + match message_width with + | Some message_width -> message_width + | None -> Option.value_map (Tty.get_term_cols ()) ~default:120 ~f:(min 120) in - main { - Errors.Cli_output.color; - include_warnings; - max_warnings; - one_line; - show_all_errors; - show_all_branches; - unicode; - message_width; - } + main + { + Errors.Cli_output.color; + include_warnings; + max_warnings; + one_line; + show_all_errors; + show_all_branches; + unicode; + message_width; + } -let warning_flags prev = CommandSpec.ArgSpec.( - prev - |> flag "--include-warnings" no_arg - ~doc:"Include warnings in the error output (warnings are excluded by default)" - |> flag "--max-warnings" int - ~doc:"Warnings above this number will cause a nonzero exit code (implies --include-warnings)" -) - -let profile_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--profile" no_arg - ~doc:"Output profiling information" -) - -let error_flags prev = CommandSpec.ArgSpec.( - prev - |> collect collect_error_flags - |> flag "--color" - (required ~default:Tty.Color_Auto (enum [ - "never", Tty.Color_Never; - "always", Tty.Color_Always; - "auto", Tty.Color_Auto; - ])) - ~doc:"Display terminal output in color. never, always, auto (default: auto)" - |> warning_flags - |> flag "--one-line" no_arg - ~doc:"Escapes newlines so that each error prints on one line" - |> flag "--show-all-errors" no_arg - ~doc:"Print all errors (the default is to truncate after 50 errors)" - |> flag "--show-all-branches" no_arg - ~doc:"Print all branch errors (the default is to print the most relevant branches)" - |> flag "--unicode" - (required ~default:`Auto (enum [ - "never", `Never; - "always", `Always; - "auto", `Auto; - ])) - ~doc:"Display terminal output with unicode decoration. never, always, auto (default: auto)" - |> flag "--message-width" int - ~doc:( - "Sets the width of messages but not code snippets (defaults to the \ - smaller of 120 or the terminal width)" - ) -) +let warning_flags prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--include-warnings" + no_arg + ~doc:"Include warnings in the error output (warnings are excluded by default)" + |> flag + "--max-warnings" + int + ~doc: + "Warnings above this number will cause a nonzero exit code (implies --include-warnings)") + +let profile_flag prev = + CommandSpec.ArgSpec.(prev |> flag "--profile" no_arg ~doc:"Output profiling information") + +let error_flags prev = + CommandSpec.ArgSpec.( + prev + |> collect collect_error_flags + |> flag + "--color" + (required + ~default:Tty.Color_Auto + (enum + [("never", Tty.Color_Never); ("always", Tty.Color_Always); ("auto", Tty.Color_Auto)])) + ~doc:"Display terminal output in color. never, always, auto (default: auto)" + |> warning_flags + |> flag "--one-line" no_arg ~doc:"Escapes newlines so that each error prints on one line" + |> flag + "--show-all-errors" + no_arg + ~doc:"Print all errors (the default is to truncate after 50 errors)" + |> flag + "--show-all-branches" + no_arg + ~doc:"Print all branch errors (the default is to print the most relevant branches)" + |> flag + "--unicode" + (required ~default:`Auto (enum [("never", `Never); ("always", `Always); ("auto", `Auto)])) + ~doc: + "Display terminal output with unicode decoration. never, always, auto (default: auto)" + |> flag + "--message-width" + int + ~doc: + "Sets the width of messages but not code snippets (defaults to the smaller of 120 or the terminal width)") let collect_json_flags main json pretty = if json || pretty then FlowExitStatus.set_json_mode ~pretty; main json pretty -let json_flags prev = CommandSpec.ArgSpec.( - prev - |> collect collect_json_flags - |> flag "--json" no_arg ~doc:"Output results in JSON format" - |> flag "--pretty" no_arg ~doc:"Pretty-print JSON output (implies --json)" -) +let json_flags prev = + CommandSpec.ArgSpec.( + prev + |> collect collect_json_flags + |> flag "--json" no_arg ~doc:"Output results in JSON format" + |> flag "--pretty" no_arg ~doc:"Pretty-print JSON output (implies --json)") -let temp_dir_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--temp-dir" string - ~doc:"Directory in which to store temp files (default: FLOW_TEMP_DIR, or /tmp/flow/)" - ~env:"FLOW_TEMP_DIR" -) +let temp_dir_flag prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--temp-dir" + string + ~doc:"Directory in which to store temp files (default: FLOW_TEMP_DIR, or /tmp/flow/)" + ~env:"FLOW_TEMP_DIR") let collect_lazy_flags main lazy_ lazy_mode = - main (match lazy_mode with - | None when lazy_ -> Some Options.LAZY_MODE_FILESYSTEM - | Some `Fs -> Some Options.LAZY_MODE_FILESYSTEM - | Some `Ide -> Some Options.LAZY_MODE_IDE - | _ -> None) - -let lazy_flags prev = CommandSpec.ArgSpec.( - prev - |> collect collect_lazy_flags - |> flag "--lazy" no_arg - ~doc:"EXPERIMENTAL: Don't run a full check" - |> flag "--lazy-mode" (enum ["fs", `Fs; "ide", `Ide]) - ~doc:"EXPERIMENTAL: Which type of lazy mode to use: fs or ide (default: fs, implies --lazy)" -) - -let input_file_flag verb prev = CommandSpec.ArgSpec.( - prev - |> flag "--input-file" string - ~doc:("File containing list of files to " ^ verb ^ ", one per line. If -, list of files is " ^ - "read from the standard input.") -) + let lazy_mode = + if lazy_ && lazy_mode = None then + Some Options.LAZY_MODE_FILESYSTEM + (* --lazy === --lazy-mode fs *) + else + lazy_mode + in + main lazy_mode + +let lazy_flags prev = + CommandSpec.ArgSpec.( + prev + |> collect collect_lazy_flags + |> flag "--lazy" no_arg ~doc:"Don't run a full check. Shorthand for `--lazy-mode fs`" + |> flag + "--lazy-mode" + (enum + [ + ("fs", Options.LAZY_MODE_FILESYSTEM); + ("ide", Options.LAZY_MODE_IDE); + ("watchman", Options.LAZY_MODE_WATCHMAN); + ("none", Options.NON_LAZY_MODE); + ]) + ~doc: + ( "Which lazy mode to use: 'fs', 'watchman', 'ide' or 'none'. Use this flag to " + ^ "override the lazy mode set in the .flowconfig (which defaults to 'none' if not set)" + ) + ~env:"FLOW_LAZY_MODE") + +let input_file_flag verb prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--input-file" + string + ~doc: + ( "File containing list of files to " + ^ verb + ^ ", one per line. If -, list of files is " + ^ "read from the standard input." )) type shared_mem_params = { shm_dirs: string option; @@ -192,162 +251,278 @@ type shared_mem_params = { shm_log_level: int option; } -let collect_shm_flags main - shm_dirs shm_min_avail shm_dep_table_pow shm_hash_table_pow shm_log_level = - main { shm_dirs; shm_min_avail; shm_dep_table_pow; shm_hash_table_pow; shm_log_level; } - -let shm_flags prev = CommandSpec.ArgSpec.( - prev - |> collect collect_shm_flags - |> flag "--sharedmemory-dirs" string - ~doc:"Directory in which to store shared memory heap (default: /dev/shm/)" - |> flag "--sharedmemory-minimum-available" int - ~doc:"Flow will only use a filesystem for shared memory if it has at \ - least these many bytes available (default: 536870912 - which is 512MB)" - |> flag "--sharedmemory-dep-table-pow" int - ~doc:"The exponent for the size of the shared memory dependency table. \ - The default is 17, implying a size of 2^17 bytes" - |> flag "--sharedmemory-hash-table-pow" int - ~doc:"The exponent for the size of the shared memory hash table. \ - The default is 19, implying a size of 2^19 bytes" - |> flag "--sharedmemory-log-level" int - ~doc:"The logging level for shared memory statistics. \ - 0=none, 1=some" -) +let collect_shm_flags + main shm_dirs shm_min_avail shm_dep_table_pow shm_hash_table_pow shm_log_level = + main { shm_dirs; shm_min_avail; shm_dep_table_pow; shm_hash_table_pow; shm_log_level } + +let shm_flags prev = + CommandSpec.ArgSpec.( + prev + |> collect collect_shm_flags + |> flag + "--sharedmemory-dirs" + string + ~doc:"Directory in which to store shared memory heap (default: /dev/shm/)" + |> flag + "--sharedmemory-minimum-available" + int + ~doc: + "Flow will only use a filesystem for shared memory if it has at least these many bytes available (default: 536870912 - which is 512MB)" + |> flag + "--sharedmemory-dep-table-pow" + int + ~doc: + "The exponent for the size of the shared memory dependency table. The default is 17, implying a size of 2^17 bytes" + |> flag + "--sharedmemory-hash-table-pow" + int + ~doc: + "The exponent for the size of the shared memory hash table. The default is 19, implying a size of 2^19 bytes" + |> flag + "--sharedmemory-log-level" + int + ~doc:"The logging level for shared memory statistics. 0=none, 1=some") let shm_config shm_flags flowconfig = - let dep_table_pow = Option.value shm_flags.shm_dep_table_pow - ~default:(FlowConfig.shm_dep_table_pow flowconfig) in - let hash_table_pow = Option.value shm_flags.shm_hash_table_pow - ~default:(FlowConfig.shm_hash_table_pow flowconfig) in - let shm_dirs = Option.value_map shm_flags.shm_dirs - ~default:(FlowConfig.shm_dirs flowconfig) - ~f:(Str.split (Str.regexp ",")) - |> List.map Path.(fun dir -> dir |> make |> to_string) in - let shm_min_avail = Option.value shm_flags.shm_min_avail - ~default:(FlowConfig.shm_min_avail flowconfig) in - let log_level = Option.value shm_flags.shm_log_level - ~default:(FlowConfig.shm_log_level flowconfig) in - { SharedMem_js. - global_size = FlowConfig.shm_global_size flowconfig; + let dep_table_pow = + Option.value shm_flags.shm_dep_table_pow ~default:(FlowConfig.shm_dep_table_pow flowconfig) + in + let hash_table_pow = + Option.value shm_flags.shm_hash_table_pow ~default:(FlowConfig.shm_hash_table_pow flowconfig) + in + let shm_dirs = + Option.value_map + shm_flags.shm_dirs + ~default:(FlowConfig.shm_dirs flowconfig) + ~f:(Str.split (Str.regexp ",")) + |> Core_list.map ~f:Path.(make %> to_string) + in + let shm_min_avail = + Option.value shm_flags.shm_min_avail ~default:(FlowConfig.shm_min_avail flowconfig) + in + let log_level = + Option.value shm_flags.shm_log_level ~default:(FlowConfig.shm_log_level flowconfig) + in + let sample_rate = 0.0 in + { + SharedMem_js.global_size = 0; + (* we don't use GlobalStorage, don't waste space on it *) heap_size = FlowConfig.shm_heap_size flowconfig; dep_table_pow; hash_table_pow; shm_dirs; shm_min_avail; log_level; + sample_rate; } -let from_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--from" (optional string) - ~doc:"Specify client (for use by editor plugins)" -) - -let strip_root_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--strip-root" no_arg - ~doc:"Print paths without the root" -) - -let path_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--path" (optional string) - ~doc:"Specify (fake) path to file when reading data from stdin" -) - -let autostop_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--autostop" no_arg - ~doc:"" (* empty to omit it from --help *) -) +let from_flag = + let collector main from = + let from = + match from with + | Some from -> Some from + | None -> + Core_result.( + let parent_cmdline = + Proc.get_proc_stat (Unix.getpid ()) + >>= fun proc_stat -> + let ppid = proc_stat.Proc.ppid in + Proc.get_proc_stat ppid + >>| (fun parent_proc_stat -> String.trim parent_proc_stat.Proc.cmdline) + in + (match parent_cmdline with + | Ok cmdline -> Some ("parent cmdline: " ^ cmdline) + | Error _ -> None)) + in + FlowEventLogger.set_from from; + main + in + fun prev -> + CommandSpec.ArgSpec.( + prev + |> collect collector + |> flag + "--from" + (optional string) + ~doc:"Specify who is calling this CLI command (used by logging)") + +let strip_root_flag prev = + CommandSpec.ArgSpec.(prev |> flag "--strip-root" no_arg ~doc:"Print paths without the root") + +let wait_for_recheck_flag prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--wait-for-recheck" + (optional bool) + ~doc: + ( "If the server is rechecking, wait for it to complete rather than run sooner using " + ^ "outdated data" )) + +let path_flag prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--path" + (optional string) + ~doc:"Specify (fake) path to file when reading data from stdin") + +let autostop_flag prev = + CommandSpec.ArgSpec.(prev |> flag "--autostop" no_arg ~doc:"" (* empty to omit it from --help *)) let verbose_flags = let collector main verbose indent depth enabled_during_flowlib = let opt_verbose = - if verbose || indent || depth != None - then Some { Verbose. - indent = if indent then 2 else 0; - depth = (match depth with - | Some n when n >= 0 -> n - | _ -> 1); - enabled_during_flowlib; - } - else None + if verbose || indent || depth != None then + Some + { + Verbose.indent = + ( if indent then + 2 + else + 0 ); + depth = + (match depth with + | Some n when n >= 0 -> n + | _ -> 1); + enabled_during_flowlib; + } + else + None in main opt_verbose in - fun prev -> CommandSpec.ArgSpec.( + fun prev -> + CommandSpec.ArgSpec.( + prev + |> collect collector + |> flag "--verbose" no_arg ~doc:"Print verbose info during typecheck" + |> flag + "--verbose-indent" + no_arg + ~doc:"Indent verbose info during typecheck (implies --verbose)" + |> flag + "--verbose-depth" + int + ~doc:"Recursively print types up to specified depth (default 1, implies --verbose)" + |> flag "--verbose-flowlib" no_arg ~doc:"Print verbose info while initializing the flowlib") + +let quiet_flag prev = + CommandSpec.ArgSpec.(prev |> flag "--quiet" no_arg ~doc:"Suppress output about server startup") + +type on_mismatch_behavior = + | Choose_newest + | Stop_server + | Restart_client + | Error_client + +let on_mismatch_flag prev = + CommandSpec.ArgSpec.( prev - |> collect collector - |> flag "--verbose" no_arg - ~doc:"Print verbose info during typecheck" - |> flag "--verbose-indent" no_arg - ~doc:"Indent verbose info during typecheck (implies --verbose)" - |> flag "--verbose-depth" int - ~doc:"Recursively print types up to specified depth (default 1, implies --verbose)" - |> flag "--verbose-flowlib" no_arg - ~doc:"Print verbose info while initializing the flowlib" - ) - -let quiet_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--quiet" no_arg - ~doc:"Suppress output about server startup" -) - -let root_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--root" string - ~doc:"Project root directory containing the .flowconfig" -) - -let ignore_version_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--ignore-version" no_arg - ~doc:"Ignore the version constraint in .flowconfig" -) + |> flag + "--on-mismatch" + (required + ~default:Choose_newest + (enum + [ + ("choose-newest", Choose_newest); + ("stop-server", Stop_server); + ("restart-client", Restart_client); + ("error-client", Error_client); + ])) + ~doc: + "What to do when the client and server are different versions (choose-newest, stop-server, restart-client, error-client) (default: choose-newest)") + +let root_flag prev = + CommandSpec.ArgSpec.( + prev |> flag "--root" string ~doc:"Project root directory containing the .flowconfig") + +let ignore_version_flag prev = + CommandSpec.ArgSpec.( + prev |> flag "--ignore-version" no_arg ~doc:"Ignore the version constraint in .flowconfig") let log_file_flags = let normalize log_file = let dirname = Path.make (Filename.dirname log_file) in let basename = Filename.basename log_file in - Path.concat dirname basename - |> Path.to_string + Path.concat dirname basename |> Path.to_string in - let collector main server_log_file monitor_log_file = main (Option.map ~f:normalize server_log_file) (Option.map ~f:normalize monitor_log_file) in + fun prev -> + CommandSpec.ArgSpec.( + prev + |> collect collector + |> flag + "--log-file" + string + ~doc:"Path to log file (default: /tmp/flow/.log)" + ~env:"FLOW_LOG_FILE" + |> flag + "--monitor-log-file" + string + ~doc:"Path to log file (default: /tmp/flow/.monitor_log)" + ~env:"FLOW_MONITOR_LOG_FILE") + +let flowconfig_multi_error rev_errs = + let msg = + rev_errs + |> Core_list.map ~f:(fun (ln, msg) -> spf ".flowconfig:%d %s" ln msg) + |> String.concat "\n" + in + FlowExitStatus.(exit ~msg Invalid_flowconfig) - fun prev -> CommandSpec.ArgSpec.( - prev - |> collect collector - |> flag "--log-file" string - ~doc:"Path to log file (default: /tmp/flow/.log)" - ~env:"FLOW_LOG_FILE" - |> flag "--monitor-log-file" string - ~doc:"Path to log file (default: /tmp/flow/.monitor_log)" - ~env:"FLOW_MONITOR_LOG_FILE" - ) +let flowconfig_multi_warn rev_errs = + let msg = + rev_errs + |> Core_list.map ~f:(fun (ln, msg) -> spf ".flowconfig:%d %s" ln msg) + |> String.concat "\n" + in + prerr_endline msg + +let read_config_or_exit ?(enforce_warnings = true) ?allow_cache flowconfig_path = + match FlowConfig.get ?allow_cache flowconfig_path with + | Ok (config, []) -> config + | Ok (config, warnings) -> + if enforce_warnings then + flowconfig_multi_error warnings + else + flowconfig_multi_warn warnings; + config + | Error err -> flowconfig_multi_error [err] let check_version required_version = match required_version with | None -> Ok () | Some version_constraint -> - if Semver.satisfies version_constraint Flow_version.version then + (* For the purposes of checking whether the *currently-running* version of Flow is compatible + with the given project, we'll include pre-releases. For example, this means that 0.61.0-beta + is compatible with >0.60.0, because it presumably implements the minimum necessary features + of 0.60.0. + + This is subtly different than determining which version of Flow to run in the first place, + like when npm/yarn is solving the `flow-bin` constraint. In that case, we do not want + >0.60.0 to opt into pre-releases automatically. Those sorts of checks should not pass + `~includes_prereleases`. + + So, if you've explicitly run v0.61.0-beta, and the flowconfig says `>0.60.0`, we'll allow it; + but if we were looking at the flowconfig to decide which version to run, you should not + choose the beta. *) + if Semver.satisfies ~include_prereleases:true version_constraint Flow_version.version then Ok () else - let msg = Utils_js.spf - "Wrong version of Flow. The config specifies version %s but this is version %s" - version_constraint - Flow_version.version + let msg = + Utils_js.spf + "Wrong version of Flow. The config specifies version %s but this is version %s" + version_constraint + Flow_version.version in Error msg let assert_version flowconfig = let required_version = FlowConfig.required_version flowconfig in - match (check_version required_version) with + match check_version required_version with | Ok () -> () | Error msg -> FlowExitStatus.(exit ~msg Invalid_flowconfig) @@ -364,18 +539,24 @@ type flowconfig_params = { } let list_of_string_arg = function -| None -> [] -| Some arg_str -> Str.split (Str.regexp ",") arg_str + | None -> [] + | Some arg_str -> Str.split (Str.regexp ",") arg_str -let collect_flowconfig_flags main ignores_str untyped_str declarations_str includes_str lib_str - lints_str = +let collect_flowconfig_flags + main ignores_str untyped_str declarations_str includes_str lib_str lints_str = let ignores = list_of_string_arg ignores_str in let untyped = list_of_string_arg untyped_str in let declarations = list_of_string_arg declarations_str in let includes = list_of_string_arg includes_str in let libs = list_of_string_arg lib_str in let raw_lint_severities = list_of_string_arg lints_str in - main { ignores; includes; libs; raw_lint_severities; untyped; declarations; } + main { ignores; includes; libs; raw_lint_severities; untyped; declarations } + +let remove_exclusion pattern = + if String_utils.string_starts_with pattern "!" then + String.sub pattern 1 (String.length pattern - 1) + else + pattern let file_options = let default_lib_dir ~no_flowlib tmp_dir = @@ -388,83 +569,88 @@ let file_options = FlowExitStatus.(exit ~msg Could_not_find_flowconfig) in let ignores_of_arg root patterns extras = - let patterns = List.rev_append extras patterns in - List.map (fun s -> - let root = Path.to_string root - |> Sys_utils.normalize_filename_dir_sep in - let reg = s - |> Str.split_delim Files.project_root_token - |> String.concat root - |> Str.regexp in - (s, reg) - ) patterns + let patterns = Core_list.rev_append extras patterns in + Core_list.map + ~f:(fun s -> + let root = Path.to_string root |> Sys_utils.normalize_filename_dir_sep in + let reg = + s + |> remove_exclusion + |> Str.split_delim Files.project_root_token + |> String.concat root + |> Str.regexp + in + (s, reg)) + patterns in let includes_of_arg ~root ~lib_paths paths = (* Explicitly included paths are always added to the path_matcher *) - let path_matcher = List.fold_left (fun acc path -> - Path_matcher.add acc (Files.make_path_absolute root path) - ) Path_matcher.empty paths in + let path_matcher = + Core_list.fold_left + ~f:(fun acc path -> Path_matcher.add acc (Files.make_path_absolute root path)) + ~init:Path_matcher.empty + paths + in (* Implicitly included paths are added only if they're not already being watched *) let path_len path = path |> Path.to_string |> String.length in let implicitly_included_paths_sorted = - List.sort (fun a b -> (path_len a) - (path_len b)) (root::lib_paths) (* Shortest path first *) + Core_list.sort ~cmp:(fun a b -> path_len a - path_len b) (root :: lib_paths) + (* Shortest path first *) in - List.fold_left (fun acc path -> - (* If this include is already covered by an explicit include or a shorter implicit include, - * then skip it *) - if Path_matcher.matches acc (Path.to_string path) - then acc - else Path_matcher.add acc path - ) path_matcher implicitly_included_paths_sorted + Core_list.fold_left + ~f:(fun acc path -> + (* If this include is already covered by an explicit include or a shorter implicit include, + * then skip it *) + if Path_matcher.matches acc (Path.to_string path) then + acc + else + Path_matcher.add acc path) + ~init:path_matcher + implicitly_included_paths_sorted in let lib_paths ~root flowconfig extras = let flowtyped_path = Files.get_flowtyped_path root in let has_explicit_flowtyped_lib = ref false in let config_libs = - List.fold_right (fun lib abs_libs -> - let abs_lib = Files.make_path_absolute root lib in - (** - * "flow-typed" is always included in the libs list for convenience, - * but there's no guarantee that it exists on the filesystem. - *) - if abs_lib = flowtyped_path then has_explicit_flowtyped_lib := true; - abs_lib::abs_libs - ) (FlowConfig.libs flowconfig) [] + Core_list.fold_right + ~f:(fun lib abs_libs -> + let abs_lib = Files.make_path_absolute root lib in + (* + * "flow-typed" is always included in the libs list for convenience, + * but there's no guarantee that it exists on the filesystem. + *) + if abs_lib = flowtyped_path then has_explicit_flowtyped_lib := true; + abs_lib :: abs_libs) + (FlowConfig.libs flowconfig) + ~init:[] in let config_libs = - if !has_explicit_flowtyped_lib = false - && (Sys.file_exists (Path.to_string flowtyped_path)) - then flowtyped_path::config_libs - else config_libs + if !has_explicit_flowtyped_lib = false && Sys.file_exists (Path.to_string flowtyped_path) + then + flowtyped_path :: config_libs + else + config_libs in match extras with | [] -> config_libs - | _ -> config_libs @ (List.map (Files.make_path_absolute root) extras) + | _ -> config_libs @ Core_list.map ~f:(Files.make_path_absolute root) extras in fun ~root ~no_flowlib ~temp_dir ~includes ~ignores ~libs ~untyped ~declarations flowconfig -> let default_lib_dir = let no_flowlib = no_flowlib || FlowConfig.no_flowlib flowconfig in Some (default_lib_dir ~no_flowlib temp_dir) in - let ignores = ignores_of_arg - root - (FlowConfig.ignores flowconfig) - ignores in - let untyped = ignores_of_arg - root - (FlowConfig.untyped flowconfig) - untyped in - let declarations = ignores_of_arg - root - (FlowConfig.declarations flowconfig) - declarations in + let ignores = ignores_of_arg root (FlowConfig.ignores flowconfig) ignores in + let untyped = ignores_of_arg root (FlowConfig.untyped flowconfig) untyped in + let declarations = ignores_of_arg root (FlowConfig.declarations flowconfig) declarations in let lib_paths = lib_paths ~root flowconfig libs in let includes = includes - |> List.rev_append (FlowConfig.includes flowconfig) - |> includes_of_arg ~root ~lib_paths in - { Files. - default_lib_dir; + |> Core_list.rev_append (FlowConfig.includes flowconfig) + |> includes_of_arg ~root ~lib_paths + in + { + Files.default_lib_dir; ignores; untyped; declarations; @@ -475,145 +661,164 @@ let file_options = node_resolver_dirnames = FlowConfig.node_resolver_dirnames flowconfig; } -let ignore_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--ignore" (optional string) - ~doc:"Specify one or more ignore patterns, comma separated" -) - -let untyped_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--untyped" (optional string) - ~doc:"Specify one or more patterns, comma separated, for files to treat as untyped" -) - -let declaration_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--declaration" (optional string) - ~doc:"Specify one or more patterns, comma separated, for files to treat as declarations" -) - -let include_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--include" (optional string) - ~doc:"Specify one or more include patterns, comma separated" -) - -let lib_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--lib" (optional string) - ~doc:"Specify one or more lib files/directories, comma separated" -) - -let lints_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--lints" (optional string) - ~doc:"Specify one or more lint rules, comma separated" -) - -let no_restart_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--no-auto-restart" no_arg - ~doc:"If the server dies, do not try and restart it; just exit" -) - -let flowconfig_flags prev = CommandSpec.ArgSpec.( - prev - |> collect collect_flowconfig_flags - |> ignore_flag - |> untyped_flag - |> declaration_flag - |> include_flag - |> lib_flag - |> lints_flag -) +let ignore_flag prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--ignore" + (optional string) + ~doc:"Specify one or more ignore patterns, comma separated") + +let untyped_flag prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--untyped" + (optional string) + ~doc:"Specify one or more patterns, comma separated, for files to treat as untyped") + +let declaration_flag prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--declaration" + (optional string) + ~doc:"Specify one or more patterns, comma separated, for files to treat as declarations") + +let include_flag prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--include" + (optional string) + ~doc:"Specify one or more include patterns, comma separated") + +let lib_flag prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--lib" + (optional string) + ~doc:"Specify one or more lib files/directories, comma separated") + +let lints_flag prev = + CommandSpec.ArgSpec.( + prev |> flag "--lints" (optional string) ~doc:"Specify one or more lint rules, comma separated") + +let no_restart_flag prev = + CommandSpec.ArgSpec.( + prev + |> flag + "--no-auto-restart" + no_arg + ~doc:"If the server dies, do not try and restart it; just exit") + +let flowconfig_flags prev = + CommandSpec.ArgSpec.( + prev + |> collect collect_flowconfig_flags + |> ignore_flag + |> untyped_flag + |> declaration_flag + |> include_flag + |> lib_flag + |> lints_flag) type connect_params = { - from : string; - retries : int; - retry_if_init : bool; - timeout : int option; - no_auto_start : bool; - autostop : bool; - lazy_mode : Options.lazy_mode option; - temp_dir : string option; - shm_flags : shared_mem_params; - ignore_version : bool; - quiet : bool; + retries: int; + retry_if_init: bool; + timeout: int option; + no_auto_start: bool; + autostop: bool; + lazy_mode: Options.lazy_mode option; + temp_dir: string option; + shm_flags: shared_mem_params; + ignore_version: bool; + quiet: bool; + on_mismatch: on_mismatch_behavior; } let collect_connect_flags main + lazy_mode timeout retries retry_if_init no_auto_start temp_dir shm_flags - from ignore_version - quiet = + quiet + on_mismatch = let default def = function - | Some x -> x - | None -> def in - FlowEventLogger.set_from from; + | Some x -> x + | None -> def + in (match timeout with | Some n when n <= 0 -> let msg = spf "Timeout must be a positive integer. Got %d" n in FlowExitStatus.(exit ~msg Commandline_usage_error) | _ -> ()); - main { - from = (default "" from); - retries = (default 3 retries); - retry_if_init = (default true retry_if_init); - timeout = timeout; - no_auto_start = no_auto_start; - temp_dir; - autostop = false; - lazy_mode = None; - shm_flags; - ignore_version; - quiet; - } + main + { + retries = default 3 retries; + retry_if_init = default true retry_if_init; + timeout; + no_auto_start; + temp_dir; + autostop = false; + lazy_mode; + shm_flags; + ignore_version; + quiet; + on_mismatch; + } + +let collect_connect_flags_without_lazy main = collect_connect_flags main None + +let connect_flags_with_lazy_collector collector = + CommandSpec.ArgSpec.( + collector + |> flag "--timeout" (optional int) ~doc:"Maximum time to wait, in seconds" + |> flag "--retries" (optional int) ~doc:"Set the number of retries. (default: 3)" + |> flag + "--retry-if-init" + (optional bool) + ~doc:"retry if the server is initializing (default: true)" + |> flag + "--no-auto-start" + no_arg + ~doc:"If the server is not running, do not start it; just exit" + |> temp_dir_flag + |> shm_flags + |> from_flag + |> ignore_version_flag + |> quiet_flag + |> on_mismatch_flag) + +let connect_flags_no_lazy prev = + CommandSpec.ArgSpec.( + prev |> collect collect_connect_flags_without_lazy |> connect_flags_with_lazy_collector) -let connect_flags prev = CommandSpec.ArgSpec.( - prev - |> collect collect_connect_flags - |> flag "--timeout" (optional int) - ~doc:"Maximum time to wait, in seconds" - |> flag "--retries" (optional int) - ~doc:"Set the number of retries. (default: 3)" - |> flag "--retry-if-init" (optional bool) - ~doc:"retry if the server is initializing (default: true)" - |> flag "--no-auto-start" no_arg - ~doc:"If the server is not running, do not start it; just exit" - |> temp_dir_flag - |> shm_flags - |> from_flag - |> ignore_version_flag - |> quiet_flag -) +let connect_flags prev = + CommandSpec.ArgSpec.( + prev |> collect collect_connect_flags |> lazy_flags |> connect_flags_with_lazy_collector) (* For commands that take both --quiet and --json or --pretty, make the latter two imply --quiet *) let connect_and_json_flags = let collect_connect_and_json main connect_flags json pretty = - main { connect_flags with - quiet = connect_flags.quiet || json || pretty - } json pretty + main { connect_flags with quiet = connect_flags.quiet || json || pretty } json pretty in fun prev -> - prev - |> CommandSpec.ArgSpec.collect collect_connect_and_json - |> connect_flags - |> json_flags + prev |> CommandSpec.ArgSpec.collect collect_connect_and_json |> connect_flags |> json_flags let server_log_file ~flowconfig_name ~tmp_dir root flowconfig = match FlowConfig.log_file flowconfig with | Some x -> x - | None -> Path.make (Server_files_js.file_of_root ~flowconfig_name "log" ~tmp_dir root) + | None -> Path.make (Server_files_js.log_file ~flowconfig_name ~tmp_dir root) let monitor_log_file ~flowconfig_name ~tmp_dir root = - Path.make (Server_files_js.file_of_root ~flowconfig_name "monitor_log" ~tmp_dir root) + Path.make (Server_files_js.monitor_log_file ~flowconfig_name ~tmp_dir root) module Options_flags = struct type t = { @@ -630,29 +835,33 @@ module Options_flags = struct profile: bool; quiet: bool; saved_state_fetcher: Options.saved_state_fetcher option; + saved_state_force_recheck: bool; saved_state_no_fallback: bool; strip_root: bool; temp_dir: string option; traces: int option; + trust_mode: Options.trust_mode option; + types_first: bool; + abstract_locations: bool; verbose: Verbose.t option; + wait_for_recheck: bool option; weak: bool; + include_suppressions: bool; } end module Base_flags = struct - type t = { - flowconfig_name: string; - } + type t = { flowconfig_name: string } end let parse_lints_flag = let number = let rec number' index acc = function - | [] -> List.rev acc - | head::tail -> number' (index + 1) ((index, head)::acc) tail - in number' 1 [] + | [] -> Core_list.rev acc + | head :: tail -> number' (index + 1) ((index, head) :: acc) tail + in + number' 1 [] in - fun base_settings flag_settings -> let lines = number flag_settings in match LintSettings.of_lines base_settings lines with @@ -662,263 +871,450 @@ let parse_lints_flag = FlowExitStatus.(exit ~msg Commandline_usage_error) let options_flags = - let collect_options_flags main - debug profile all weak traces no_flowlib munge_underscore_members max_workers - include_warnings max_warnings flowconfig_flags verbose strip_root temp_dir quiet - merge_timeout saved_state_fetcher saved_state_no_fallback no_saved_state = + let collect_options_flags + main + debug + profile + all + wait_for_recheck + weak + traces + no_flowlib + munge_underscore_members + max_workers + include_warnings + max_warnings + flowconfig_flags + verbose + strip_root + temp_dir + quiet + merge_timeout + saved_state_fetcher + saved_state_force_recheck + saved_state_no_fallback + no_saved_state + types_first + abstract_locations + include_suppressions + trust_mode = (match merge_timeout with | Some timeout when timeout < 0 -> FlowExitStatus.(exit ~msg:"--merge-timeout must be non-negative" Commandline_usage_error) | _ -> ()); - main { Options_flags. - debug; - profile; - all; - weak; - traces; - no_flowlib; - munge_underscore_members; - max_workers; - include_warnings; - max_warnings; - flowconfig_flags; - verbose; - strip_root; - temp_dir; - quiet; - merge_timeout; - saved_state_fetcher; - saved_state_no_fallback; - no_saved_state; - } + main + { + Options_flags.debug; + profile; + all; + wait_for_recheck; + weak; + traces; + no_flowlib; + munge_underscore_members; + max_workers; + include_warnings; + max_warnings; + flowconfig_flags; + verbose; + strip_root; + temp_dir; + quiet; + merge_timeout; + saved_state_fetcher; + saved_state_force_recheck; + saved_state_no_fallback; + no_saved_state; + trust_mode; + types_first; + abstract_locations; + include_suppressions; + } in fun prev -> - let open CommandSpec.ArgSpec in + CommandSpec.ArgSpec.( + prev + |> collect collect_options_flags + |> flag "--debug" no_arg ~doc:"Print debug info during typecheck" + |> profile_flag + |> flag "--all" no_arg ~doc:"Typecheck all files, not just @flow" + |> flag + "--wait-for-recheck" + (optional bool) + ~doc: + "If true, always wait for rechecks to finish before serving commands (default: false)" + |> flag + "--weak" + no_arg + ~doc:"Typecheck with weak inference, assuming dynamic types by default" + |> flag "--traces" (optional int) ~doc:"Outline an error path up to a specified level" + |> flag "--no-flowlib" no_arg ~doc:"Do not include embedded declarations" + |> flag + "--munge-underscore-members" + no_arg + ~doc:"Treat any class member name with a leading underscore as private" + |> flag + "--max-workers" + (optional int) + ~doc:"Maximum number of workers to create (capped by number of cores)" + ~env:"FLOW_MAX_WORKERS" + |> warning_flags + |> flowconfig_flags + |> verbose_flags + |> strip_root_flag + |> temp_dir_flag + |> quiet_flag + |> flag + "--merge-timeout" + int + ~doc: + ( "The maximum time in seconds to attempt to typecheck a file or cycle of files. " + ^ "0 means no timeout (default: 100)" ) + ~env:"FLOW_MERGE_TIMEOUT" + |> flag + "--saved-state-fetcher" + (enum + [ + ("none", Options.Dummy_fetcher); + ("local", Options.Local_fetcher); + ("fb", Options.Fb_fetcher); + ]) + ~doc:"Which saved state fetcher Flow should use (none, local) (default: none)" + |> flag + "--saved-state-force-recheck" + no_arg + ~doc:"Force a lazy server to recheck the changes since the saved state was generated" + |> flag + "--saved-state-no-fallback" + no_arg + ~doc: + "If saved state fails to load, exit (normally fallback is to initialize from scratch)" + |> flag + "--no-saved-state" + no_arg + ~doc:"Do not load from a saved state even if one is available" + |> flag "--types-first" no_arg ~doc:"[EXPERIMENTAL] types-first architecture" + |> flag + "--abstract-locations" + no_arg + ~doc: + "[EXPERIMENTAL] Use abstract locations to improve recheck times. Has no effect unless types-first is also enabled" + |> flag + "--include-suppressed" + no_arg + ~doc:"Ignore any `suppress_comment` lines in .flowconfig" + |> flag + "--trust-mode" + (optional + (enum + [ + ("check", Options.CheckTrust); + ("silent", Options.SilentTrust); + ("none", Options.NoTrust); + ])) + ~doc:"") + +let flowconfig_name_flag prev = + CommandSpec.ArgSpec.( prev - |> collect collect_options_flags - |> flag "--debug" no_arg - ~doc:"Print debug info during typecheck" - |> profile_flag - |> flag "--all" no_arg - ~doc:"Typecheck all files, not just @flow" - |> flag "--weak" no_arg - ~doc:"Typecheck with weak inference, assuming dynamic types by default" - |> flag "--traces" (optional int) - ~doc:"Outline an error path up to a specified level" - |> flag "--no-flowlib" no_arg - ~doc:"Do not include embedded declarations" - |> flag "--munge-underscore-members" no_arg - ~doc:"Treat any class member name with a leading underscore as private" - |> flag "--max-workers" (optional int) - ~doc:"Maximum number of workers to create (capped by number of cores)" - ~env:"FLOW_MAX_WORKERS" - |> warning_flags - |> flowconfig_flags - |> verbose_flags - |> strip_root_flag - |> temp_dir_flag - |> quiet_flag - |> flag "--merge-timeout" int - ~doc:("The maximum time in seconds to attempt to typecheck a file or cycle of files. " ^ - "0 means no timeout (default: 100)") - ~env:"FLOW_MERGE_TIMEOUT" - |> flag "--saved-state-fetcher" - (enum [ - "none", Options.Dummy_fetcher; - "local", Options.Local_fetcher; - "fb", Options.Fb_fetcher; - ]) - ~doc:("Which saved state fetcher Flow should use (none, local) (default: none)") - |> flag "--saved-state-no-fallback" no_arg - ~doc:"If saved state fails to load, exit (normally fallback is to initialize from scratch)" - |> flag "--no-saved-state" no_arg - ~doc:"Do not load from a saved state even if one is available" + |> flag + "--flowconfig-name" + (required ~default:Server_files_js.default_flowconfig_name string) + ~doc: + (Printf.sprintf + "Set the name of the flow configuration file. (default: %s)" + Server_files_js.default_flowconfig_name)) let base_flags = - let collect_base_flags main flowconfig_name = - main { Base_flags. - flowconfig_name; - } - in - fun prev -> - let open CommandSpec.ArgSpec in - prev - |> collect collect_base_flags - |> flag "--flowconfig-name" (required ~default:Server_files_js.default_flowconfig_name string) - ~doc:(Printf.sprintf "Set the name of the flow configuration file. (default: %s)" - Server_files_js.default_flowconfig_name) + let collect_base_flags main flowconfig_name = main { Base_flags.flowconfig_name } in + (fun prev -> CommandSpec.ArgSpec.(prev |> collect collect_base_flags |> flowconfig_name_flag)) let file_watcher_flag prev = - let open CommandSpec.ArgSpec in - prev - |> flag "--file-watcher" - (enum [ - "none", Options.NoFileWatcher; - "dfind", Options.DFind; - "watchman", Options.Watchman; - ]) - ~doc:("Which file watcher Flow should use (none, dfind, watchman). " ^ - "Flow will ignore file system events if this is set to none. (default: dfind)") - |> flag "--file-watcher-debug" no_arg - ~doc:("Enable debug logging for the file watcher. This is very noisy") + CommandSpec.ArgSpec.( + prev + |> flag + "--file-watcher" + (enum + [ + ("none", Options.NoFileWatcher); + ("dfind", Options.DFind); + ("watchman", Options.Watchman); + ]) + ~doc: + ( "Which file watcher Flow should use (none, dfind, watchman). " + ^ "Flow will ignore file system events if this is set to none. (default: dfind)" ) + |> flag + "--file-watcher-debug" + no_arg + ~doc:"Enable debug logging for the file watcher. This is very noisy") (* For commands that take both --quiet and --json or --pretty, make the latter two imply --quiet *) let options_and_json_flags = let collect_options_and_json main options_flags json pretty = - main { options_flags with - Options_flags.quiet = options_flags.Options_flags.quiet || json || pretty - } json pretty + main + { + options_flags with + Options_flags.quiet = options_flags.Options_flags.quiet || json || pretty; + } + json + pretty in fun prev -> + prev |> CommandSpec.ArgSpec.collect collect_options_and_json |> options_flags |> json_flags + +let json_version_flag prev = + CommandSpec.ArgSpec.( prev - |> CommandSpec.ArgSpec.collect collect_options_and_json - |> options_flags - |> json_flags - -let json_version_flag prev = CommandSpec.ArgSpec.( - prev - |> flag "--json-version" - (enum [ - "1", Errors.Json_output.JsonV1; - "2", Errors.Json_output.JsonV2; - ]) - ~doc:"The version of the JSON format (defaults to 1)" -) - -let make_options ~flowconfig_name ~flowconfig ~lazy_mode ~root (options_flags: Options_flags.t) = + |> flag + "--json-version" + (enum [("1", Errors.Json_output.JsonV1); ("2", Errors.Json_output.JsonV2)]) + ~doc:"The version of the JSON format (defaults to 1)") + +(* If a command uses this flag, then it will automatically exec systemd-run (if systemd-run is + * available). This can add a couple hundred ms to the start up time of the command. Only commands + * that are resource-intensive (or spawn resource intensive processes) and probably should run in + * cgroups should use this flag. + *) +let no_cgroup_flag = + (* We only trigger this behavior if we're on Unix and systemd-run is in the path *) + let get_systemd_binary () = + if Sys.unix then + let ic = Unix.open_process_in "which systemd-run 2> /dev/null" in + let systemd_exe = (try Some (input_line ic) with _ -> None) in + if Unix.close_process_in ic = Unix.WEXITED 0 then + systemd_exe + else + None + else + None + in + (* Sometimes systemd-run is available but we can't use it. For example, the systemd might not have + a proper working user session, so we might not be able to run commands via systemd-run as a + user process. Notably, `--user --scope` is broken under cgroupv2 in systemd < 238, and exits + code 1 (https://github.com/facebook/flow/issues/8012). *) + let can_run_systemd () = + (* Use `timeout` in case it hangs mysteriously. `--quiet` only suppresses stdout. *) + let ic = + Unix.open_process_in "timeout 1 systemd-run --quiet --user --scope -- true 2> /dev/null" + in + (* If all goes right, `systemd-run` will return immediately with exit code 0 and run `true` + * asynchronously as a service. If it goes wrong, it will exit with a non-zero exit code *) + Unix.close_process_in ic = Unix.WEXITED 0 + in + (* Basically re-exec ourselves with the --no-cgroup flag using systemd-run *) + let exec_in_cgroup_if_systemd_available () = + match get_systemd_binary () with + | None -> () + | Some systemd_exe -> + if can_run_systemd () then + let flow_args = + match Array.to_list Sys.argv with + | [] -> failwith "The argv should never be empty. Element 0 should be the executable." + | [_] -> failwith "`flow` doesn't use `--no-cgroup` so we shouldn't hit this" + | flow_exe :: command :: args -> flow_exe :: command :: "--no-cgroup" :: args + in + systemd_exe + :: "--quiet" + :: "--user" + :: "--scope" + :: "--slice" + :: "flow.slice" + :: "--" + :: flow_args + |> Array.of_list + |> Unix.execv systemd_exe + in + let collect_no_cgroup_flag main no_cgroup = + if not no_cgroup then exec_in_cgroup_if_systemd_available (); + main + in + fun prev -> + CommandSpec.ArgSpec.( + prev + |> CommandSpec.ArgSpec.collect collect_no_cgroup_flag + |> flag + "--no-cgroup" + no_arg + ~doc:"Don't automatically run this command in a cgroup (if cgroups are available)") + +let make_options ~flowconfig_name ~flowconfig ~lazy_mode ~root (options_flags : Options_flags.t) = let temp_dir = options_flags.Options_flags.temp_dir |> Option.value ~default:(FlowConfig.temp_dir flowconfig) |> Path.make |> Path.to_string in - let open Options_flags in - let file_options = - let no_flowlib = options_flags.no_flowlib in - let { - includes; - ignores; - libs; - raw_lint_severities=_; - untyped; - declarations; - } = options_flags.flowconfig_flags in - file_options ~root ~no_flowlib ~temp_dir ~includes ~ignores ~libs ~untyped ~declarations - flowconfig - in - let lint_severities = parse_lints_flag - (FlowConfig.lint_severities flowconfig) options_flags.flowconfig_flags.raw_lint_severities - in - let opt_merge_timeout = - (match options_flags.merge_timeout with - | None -> FlowConfig.merge_timeout flowconfig - | Some 0 -> None - | timeout -> timeout) |> Option.map ~f:float_of_int - in - - let expand_project_root_token path root = - let str_root = Path.to_string root - |> Sys_utils.normalize_filename_dir_sep in - Path.to_string path + Options_flags.( + let file_options = + let no_flowlib = options_flags.no_flowlib in + let { includes; ignores; libs; raw_lint_severities = _; untyped; declarations } = + options_flags.flowconfig_flags + in + file_options + ~root + ~no_flowlib + ~temp_dir + ~includes + ~ignores + ~libs + ~untyped + ~declarations + flowconfig + in + let lint_severities = + parse_lints_flag + (FlowConfig.lint_severities flowconfig) + options_flags.flowconfig_flags.raw_lint_severities + in + let opt_merge_timeout = + (match options_flags.merge_timeout with + | None -> FlowConfig.merge_timeout flowconfig + | Some 0 -> None + | timeout -> timeout) + |> Option.map ~f:float_of_int + in + let expand_project_root_token path root = + let str_root = Path.to_string root |> Sys_utils.normalize_filename_dir_sep in + Path.to_string path |> Str.split_delim Files.project_root_token |> String.concat str_root |> Path.make - in - - (* The CLI flag overrides the .flowconfig *) - let opt_saved_state_fetcher = Option.value - options_flags.saved_state_fetcher - ~default:(FlowConfig.saved_state_fetcher flowconfig) - in - - (* We need cancelable rechecks for saved state. This can be deleted when - * experimental.cancelable_rechecks is deleted *) - let opt_enable_cancelable_rechecks = - FlowConfig.enable_cancelable_rechecks flowconfig - || opt_saved_state_fetcher <> Options.Dummy_fetcher - in - - let strict_mode = FlowConfig.strict_mode flowconfig in - { Options. - opt_flowconfig_name = flowconfig_name; - opt_lazy_mode = lazy_mode; - opt_root = root; - opt_root_name = FlowConfig.root_name flowconfig; - opt_debug = options_flags.debug; - opt_verbose = options_flags.verbose; - opt_all = options_flags.all || FlowConfig.all flowconfig; - opt_weak = options_flags.weak || FlowConfig.weak flowconfig; - opt_traces = Option.value options_flags.traces ~default:(FlowConfig.traces flowconfig); - opt_quiet = options_flags.Options_flags.quiet; - opt_module_name_mappers = FlowConfig.module_name_mappers flowconfig; - opt_modules_are_use_strict = FlowConfig.modules_are_use_strict flowconfig; - opt_profile = options_flags.profile; - opt_strip_root = options_flags.strip_root; - opt_module = FlowConfig.module_system flowconfig; - opt_module_resolver = Option.value_map (FlowConfig.module_resolver flowconfig) ~default:None - ~f:(fun module_resolver -> - Some (expand_project_root_token module_resolver root)); - opt_munge_underscores = - options_flags.munge_underscore_members || FlowConfig.munge_underscores flowconfig; - opt_temp_dir = temp_dir; - opt_max_workers = - Option.value options_flags.max_workers ~default:(FlowConfig.max_workers flowconfig) - |> min Sys_utils.nbr_procs; - opt_suppress_comments = FlowConfig.suppress_comments flowconfig; - opt_suppress_types = FlowConfig.suppress_types flowconfig; - opt_max_literal_length = FlowConfig.max_literal_length flowconfig; - opt_enable_cancelable_rechecks; - opt_enable_const_params = FlowConfig.enable_const_params flowconfig; - opt_enforce_strict_call_arity = FlowConfig.enforce_strict_call_arity flowconfig; - opt_enforce_well_formed_exports = FlowConfig.enforce_well_formed_exports flowconfig; - opt_esproposal_decorators = FlowConfig.esproposal_decorators flowconfig; - opt_esproposal_export_star_as = FlowConfig.esproposal_export_star_as flowconfig; - opt_facebook_fbt = FlowConfig.facebook_fbt flowconfig; - opt_ignore_non_literal_requires = FlowConfig.ignore_non_literal_requires flowconfig; - opt_include_warnings = - options_flags.include_warnings - || options_flags.max_warnings <> None - || FlowConfig.include_warnings flowconfig; - opt_esproposal_class_static_fields = FlowConfig.esproposal_class_static_fields flowconfig; - opt_esproposal_class_instance_fields = - FlowConfig.esproposal_class_instance_fields flowconfig; - opt_esproposal_optional_chaining = FlowConfig.esproposal_optional_chaining flowconfig; - opt_esproposal_nullish_coalescing = FlowConfig.esproposal_nullish_coalescing flowconfig; - opt_max_header_tokens = FlowConfig.max_header_tokens flowconfig; - opt_haste_name_reducers = FlowConfig.haste_name_reducers flowconfig; - opt_haste_paths_blacklist = FlowConfig.haste_paths_blacklist flowconfig; - opt_haste_paths_whitelist = FlowConfig.haste_paths_whitelist flowconfig; - opt_haste_use_name_reducers = FlowConfig.haste_use_name_reducers flowconfig; - opt_file_options = file_options; - opt_lint_severities = lint_severities; - opt_strict_mode = strict_mode; - opt_merge_timeout; - opt_saved_state_fetcher; - opt_saved_state_no_fallback = options_flags.saved_state_no_fallback; - opt_no_saved_state = options_flags.no_saved_state; - } + in + (* The CLI flag overrides the .flowconfig *) + let opt_saved_state_fetcher = + Option.value + options_flags.saved_state_fetcher + ~default:(FlowConfig.saved_state_fetcher flowconfig) + in + let opt_lazy_mode = + let default = + Option.value (FlowConfig.lazy_mode flowconfig) ~default:Options.NON_LAZY_MODE + in + Option.value lazy_mode ~default + in + let opt_arch = + if options_flags.types_first || FlowConfig.types_first flowconfig then + Options.TypesFirst + else + Options.Classic + in + let opt_abstract_locations = + options_flags.abstract_locations || FlowConfig.abstract_locations flowconfig + in + let opt_wait_for_recheck = + Option.value options_flags.wait_for_recheck ~default:(FlowConfig.wait_for_recheck flowconfig) + in + let strict_mode = FlowConfig.strict_mode flowconfig in + { + Options.opt_flowconfig_name = flowconfig_name; + opt_lazy_mode; + opt_root = root; + opt_root_name = FlowConfig.root_name flowconfig; + opt_debug = options_flags.debug; + opt_verbose = options_flags.verbose; + opt_all = options_flags.all || FlowConfig.all flowconfig; + opt_wait_for_recheck; + opt_weak = options_flags.weak || FlowConfig.weak flowconfig; + opt_traces = Option.value options_flags.traces ~default:(FlowConfig.traces flowconfig); + opt_quiet = options_flags.Options_flags.quiet; + opt_module_name_mappers = FlowConfig.module_name_mappers flowconfig; + opt_modules_are_use_strict = FlowConfig.modules_are_use_strict flowconfig; + opt_profile = options_flags.profile; + opt_strip_root = options_flags.strip_root; + opt_module = FlowConfig.module_system flowconfig; + opt_module_resolver = + Option.value_map + (FlowConfig.module_resolver flowconfig) + ~default:None + ~f:(fun module_resolver -> Some (expand_project_root_token module_resolver root)); + opt_munge_underscores = + options_flags.munge_underscore_members || FlowConfig.munge_underscores flowconfig; + opt_temp_dir = temp_dir; + opt_max_workers = + Option.value options_flags.max_workers ~default:(FlowConfig.max_workers flowconfig) + |> min Sys_utils.nbr_procs; + opt_suppress_comments = FlowConfig.suppress_comments flowconfig; + opt_suppress_types = FlowConfig.suppress_types flowconfig; + opt_max_literal_length = FlowConfig.max_literal_length flowconfig; + opt_enable_const_params = FlowConfig.enable_const_params flowconfig; + opt_enabled_rollouts = FlowConfig.enabled_rollouts flowconfig; + opt_enforce_strict_call_arity = FlowConfig.enforce_strict_call_arity flowconfig; + opt_enforce_well_formed_exports = FlowConfig.enforce_well_formed_exports flowconfig; + opt_enforce_well_formed_exports_whitelist = + FlowConfig.enforce_well_formed_exports_whitelist flowconfig; + opt_enums = FlowConfig.enums flowconfig; + opt_esproposal_decorators = FlowConfig.esproposal_decorators flowconfig; + opt_esproposal_export_star_as = FlowConfig.esproposal_export_star_as flowconfig; + opt_exact_by_default = FlowConfig.exact_by_default flowconfig; + opt_facebook_fbs = FlowConfig.facebook_fbs flowconfig; + opt_facebook_fbt = FlowConfig.facebook_fbt flowconfig; + opt_ignore_non_literal_requires = FlowConfig.ignore_non_literal_requires flowconfig; + opt_include_warnings = + options_flags.include_warnings + || options_flags.max_warnings <> None + || FlowConfig.include_warnings flowconfig; + opt_esproposal_class_static_fields = FlowConfig.esproposal_class_static_fields flowconfig; + opt_esproposal_class_instance_fields = FlowConfig.esproposal_class_instance_fields flowconfig; + opt_esproposal_optional_chaining = FlowConfig.esproposal_optional_chaining flowconfig; + opt_esproposal_nullish_coalescing = FlowConfig.esproposal_nullish_coalescing flowconfig; + opt_max_header_tokens = FlowConfig.max_header_tokens flowconfig; + opt_haste_module_ref_prefix = FlowConfig.haste_module_ref_prefix flowconfig; + opt_haste_name_reducers = FlowConfig.haste_name_reducers flowconfig; + opt_haste_paths_blacklist = FlowConfig.haste_paths_blacklist flowconfig; + opt_haste_paths_whitelist = FlowConfig.haste_paths_whitelist flowconfig; + opt_haste_use_name_reducers = FlowConfig.haste_use_name_reducers flowconfig; + opt_file_options = file_options; + opt_lint_severities = lint_severities; + opt_lsp_code_actions = FlowConfig.lsp_code_actions flowconfig; + opt_strict_mode = strict_mode; + opt_merge_timeout; + opt_saved_state_fetcher; + opt_saved_state_force_recheck = options_flags.saved_state_force_recheck; + opt_saved_state_no_fallback = options_flags.saved_state_no_fallback; + opt_no_saved_state = options_flags.no_saved_state; + opt_arch; + opt_abstract_locations; + opt_cache_direct_dependents = FlowConfig.cache_direct_dependents flowconfig; + opt_allow_skip_direct_dependents = FlowConfig.allow_skip_direct_dependents flowconfig; + opt_include_suppressions = options_flags.include_suppressions; + opt_trust_mode = + Option.value options_flags.trust_mode ~default:(FlowConfig.trust_mode flowconfig); + opt_recursion_limit = FlowConfig.recursion_limit flowconfig; + opt_max_files_checked_per_worker = FlowConfig.max_files_checked_per_worker flowconfig; + opt_type_asserts = FlowConfig.type_asserts flowconfig; + }) let make_env flowconfig_name connect_flags root = let flowconfig_path = Server_files_js.config_file flowconfig_name root in - let flowconfig = FlowConfig.get flowconfig_path in + let flowconfig = read_config_or_exit flowconfig_path in let normalize dir = Path.(dir |> make |> to_string) in - let tmp_dir = Option.value_map - ~f:normalize - ~default:(FlowConfig.temp_dir flowconfig) - connect_flags.temp_dir in - let shm_dirs = Option.map - ~f:(fun dirs -> dirs |> Str.split (Str.regexp ",") |> List.map normalize) - connect_flags.shm_flags.shm_dirs in - let log_file = - Path.to_string (server_log_file ~flowconfig_name ~tmp_dir root flowconfig) in + let tmp_dir = + Option.value_map ~f:normalize ~default:(FlowConfig.temp_dir flowconfig) connect_flags.temp_dir + in + let shm_dirs = + Option.map + ~f:(Str.split (Str.regexp ",") %> Core_list.map ~f:normalize) + connect_flags.shm_flags.shm_dirs + in + let log_file = Path.to_string (server_log_file ~flowconfig_name ~tmp_dir root flowconfig) in let retries = connect_flags.retries in - let expiry = match connect_flags.timeout with - | None -> None - | Some n -> Some (Unix.gettimeofday () +. float n) + let expiry = + match connect_flags.timeout with + | None -> None + | Some n -> Some (Unix.gettimeofday () +. float n) + in + let rerun_on_mismatch = + match connect_flags.on_mismatch with + | Choose_newest + | Restart_client -> + true + | Stop_server + | Error_client -> + false in - { CommandConnect. - root; + { + CommandConnect.root; autostart = not connect_flags.no_auto_start; lazy_mode = connect_flags.lazy_mode; retries; @@ -935,95 +1331,173 @@ let make_env flowconfig_name connect_flags root = emoji = FlowConfig.emoji flowconfig; quiet = connect_flags.quiet; flowconfig_name; + rerun_on_mismatch; } let connect ~flowconfig_name ~client_handshake connect_flags root = - let env = make_env flowconfig_name connect_flags root - in + let env = make_env flowconfig_name connect_flags root in CommandConnect.connect ~flowconfig_name ~client_handshake env let rec search_for_root config start recursion_limit : Path.t option = - if start = Path.parent start then None (* Reach fs root, nothing to do. *) - else if Path.file_exists (Path.concat start config) then Some start - else if recursion_limit <= 0 then None - else search_for_root config (Path.parent start) (recursion_limit - 1) + if start = Path.parent start then + None + (* Reach fs root, nothing to do. *) + else if Path.file_exists (Path.concat start config) then + Some start + else if recursion_limit <= 0 then + None + else + search_for_root config (Path.parent start) (recursion_limit - 1) (* Given a valid file or directory, find a valid Flow root directory *) (* NOTE: exits on invalid file or .flowconfig not found! *) let guess_root flowconfig_name dir_or_file = - let dir_or_file = match dir_or_file with - | Some dir_or_file -> dir_or_file - | None -> "." in - if not (Sys.file_exists dir_or_file) then ( - let msg = spf - "Could not find file or directory %s; canceling \ - search for %s.\nSee \"flow init --help\" for more info" - dir_or_file flowconfig_name in + let dir_or_file = + match dir_or_file with + | Some dir_or_file -> dir_or_file + | None -> "." + in + if not (Sys.file_exists dir_or_file) then + let msg = + spf + "Could not find file or directory %s; canceling search for %s.\nSee \"flow init --help\" for more info" + dir_or_file + flowconfig_name + in FlowExitStatus.(exit ~msg Could_not_find_flowconfig) - ) else ( - let dir = if Sys.is_directory dir_or_file - then dir_or_file - else Filename.dirname dir_or_file in + else + let dir = + if Sys.is_directory dir_or_file then + dir_or_file + else + Filename.dirname dir_or_file + in match search_for_root flowconfig_name (Path.make dir) 50 with | Some root -> - FlowEventLogger.set_root (Some (Path.to_string root)); - root + FlowEventLogger.set_root (Some (Path.to_string root)); + root | None -> - let msg = spf - "Could not find a %s in %s or any \ - of its parent directories.\nSee \"flow init --help\" for more info\n%!" - flowconfig_name dir in + let msg = + spf + "Could not find a %s in %s or any of its parent directories.\nSee \"flow init --help\" for more info\n%!" + flowconfig_name + dir + in + FlowExitStatus.(exit ~msg Could_not_find_flowconfig) + +(* Favor the root argument, over the input file, over the current directory + as the place to begin searching for the root. *) +let find_a_root ?input ~base_flags root_arg = + let flowconfig_name = Base_flags.(base_flags.flowconfig_name) in + guess_root + flowconfig_name + (match (root_arg, input) with + | (Some provided_root, _) -> Some provided_root + | (None, Some provided_input) -> File_input.path_of_file_input provided_input + | (None, None) -> None) + +(* If a root is given then validate it and use it. Otherwise, favor the input file + over the current directory as the place to begin searching for the root. *) +let get_the_root ?input ~base_flags root_arg = + match root_arg with + | Some provided_root -> + let root_dir = Path.make provided_root in + if Path.file_exists root_dir && Path.is_directory root_dir then + let flowconfig_name = Base_flags.(base_flags.flowconfig_name) in + let root_config = Path.concat root_dir flowconfig_name in + if Path.file_exists root_config then + root_dir + else + let msg = spf "Failed to open %s" @@ Path.to_string root_config in FlowExitStatus.(exit ~msg Could_not_find_flowconfig) - ) + else + let msg = spf "Invalid root directory %s" provided_root in + FlowExitStatus.(exit ~msg Could_not_find_flowconfig) + | None -> find_a_root ?input ~base_flags None (* convert 1,1 based line/column to 1,0 for internal use *) let convert_input_pos (line, column) = let column = - if column > 1 - then column - 1 - else 0 in + if column > 1 then + column - 1 + else + 0 + in (line, column) (* copied (and adapted) from Hack's ClientCheck module *) let get_path_of_file file = let path = Path.make file in - if Path.file_exists path - then Path.to_string path + if Path.file_exists path then + Path.to_string path else (* Filename.concat does not return a normalized path when the file does not exist. Thus, we do it on our own... *) - let file = Files.normalize_path (Sys.getcwd()) file in + let file = Files.normalize_path (Sys.getcwd ()) file in let path = Path.make file in Path.to_string path let get_file_from_filename_or_stdin ~cmd path = function | Some filename -> - if Sys.is_directory filename then - let msg = spf - "Provided argument %s is not a file; canceling.\ - \nSee \"flow %s --help\" for more info" - filename cmd in - FlowExitStatus.(exit ~msg Path_is_not_a_file) - else - File_input.FileName (expand_path filename) + if not (Sys.file_exists filename) then + let msg = + spf "Could not find file %s; canceling.\nSee \"flow %s --help\" for more info" filename cmd + in + FlowExitStatus.(exit ~msg No_input) + else if Sys.is_directory filename then + let msg = + spf + "Provided argument %s is not a file; canceling.\nSee \"flow %s --help\" for more info" + filename + cmd + in + FlowExitStatus.(exit ~msg Path_is_not_a_file) + else + File_input.FileName (expand_path filename) | None -> - let contents = Sys_utils.read_stdin_to_string () in - let filename = (match path with - | Some "" - | None -> None - | Some str -> Some (get_path_of_file str) - ) in - File_input.FileContent (filename, contents) - -let range_string_of_loc ~strip_root loc = Loc.( - let file = match loc.source with - | Some file -> Reason.string_of_source ~strip_root file - | None -> "" + let contents = Sys_utils.read_stdin_to_string () in + let filename = + match path with + | Some "" + | None -> + None + | Some str -> Some (get_path_of_file str) + in + File_input.FileContent (filename, contents) + +(* Takes a list of strings. If there are 2 then they are both parsed as intengers + and stdin is read from. If there are 3 then the first is treated as a input file + and the following 2 are parsed as integers. *) +let parse_location_with_optional_filename spec path args = + let exit () = + CommandSpec.usage spec; + FlowExitStatus.(exit Commandline_usage_error) + in + let (file, line, column) = + match args with + | [file; line; column] -> + let file = expand_path file in + (File_input.FileName file, line, column) + | [line; column] -> + (get_file_from_filename_or_stdin ~cmd:CommandSpec.(spec.name) path None, line, column) + | _ -> exit () + in + let (line, column) = + (try (int_of_string line, int_of_string column) with Failure _ -> exit ()) in - let l0, c0 = loc.start.line, loc.start.column + 1 in - let l1, c1 = loc._end.line, loc._end.column in - spf "%s:%d:%d,%d:%d" file l0 c0 l1 c1 -) + let (line, column) = convert_input_pos (line, column) in + (file, line, column) + +let range_string_of_loc ~strip_root loc = + Loc.( + let file = + match loc.source with + | Some file -> Reason.string_of_source ~strip_root file + | None -> "" + in + let (l0, c0) = (loc.start.line, loc.start.column + 1) in + let (l1, c1) = (loc._end.line, loc._end.column) in + spf "%s:%d:%d,%d:%d" file l0 c0 l1 c1) let exe_name = Utils_js.exe_name @@ -1032,108 +1506,114 @@ let exe_name = Utils_js.exe_name * server *) let rec connect_and_make_request flowconfig_name = (* Sends the command over the socket *) - let send_command ?timeout (oc:out_channel) (cmd:ServerProt.Request.command): unit = - let command = { ServerProt.Request. - client_logging_context = FlowEventLogger.get_context (); - command = cmd; - } in + let send_command ?timeout (oc : out_channel) (cmd : ServerProt.Request.command) : unit = + let command = + { ServerProt.Request.client_logging_context = FlowEventLogger.get_context (); command = cmd } + in Marshal_tools.to_fd_with_preamble ?timeout (Unix.descr_of_out_channel oc) command |> ignore; flush oc in - let eprintf_with_spinner msg = - if Unix.isatty Unix.stderr - then begin + if Unix.isatty Unix.stderr then ( if Tty.spinner_used () then Tty.print_clear_line stderr; - Printf.eprintf "%s: %s%!" msg (Tty.spinner()) - end else + Printf.eprintf "%s: %s%!" msg (Tty.spinner ()) + ) else Printf.eprintf "%s\n%!" msg in - let eprintf_with_spinner fmt = Printf.ksprintf eprintf_with_spinner fmt in - (* Waits for a response over the socket. If the connection dies, this will throw an exception *) - let rec wait_for_response ?timeout ~quiet ~root (ic: Timeout.in_channel) = - let use_emoji = Tty.supports_emoji () && - Server_files_js.config_file flowconfig_name root - |> FlowConfig.get - |> FlowConfig.emoji in - - let response: MonitorProt.monitor_to_client_message = try - Marshal_tools.from_fd_with_preamble ?timeout (Timeout.descr_of_in_channel ic) - with - | Unix.Unix_error ((Unix.EPIPE | Unix.ECONNRESET), _, _) -> - if not quiet && Tty.spinner_used () then Tty.print_clear_line stderr; - raise End_of_file - | exn -> - if not quiet && Tty.spinner_used () then Tty.print_clear_line stderr; - raise exn + let rec wait_for_response ?timeout ~quiet ~root (ic : Timeout.in_channel) = + let use_emoji = + Tty.supports_emoji () + && Server_files_js.config_file flowconfig_name root + |> read_config_or_exit + |> FlowConfig.emoji + in + let response : MonitorProt.monitor_to_client_message = + try Marshal_tools.from_fd_with_preamble ?timeout (Timeout.descr_of_in_channel ic) with + | Unix.Unix_error ((Unix.EPIPE | Unix.ECONNRESET), _, _) -> + if (not quiet) && Tty.spinner_used () then Tty.print_clear_line stderr; + raise End_of_file + | exn -> + if (not quiet) && Tty.spinner_used () then Tty.print_clear_line stderr; + raise exn in - match response with | MonitorProt.Please_hold status -> - let status_string = match status with - | server_status, watcher_status when ServerStatus.is_free server_status -> - (* Let's ignore messages from the server that it is free. It's a confusing message for the - * user *) - if snd watcher_status = FileWatcherStatus.Ready - then None - else Some (FileWatcherStatus.string_of_status watcher_status) - | server_status, _ -> - Some (ServerStatus.string_of_status ~use_emoji server_status) + let status_string = + match status with + | (server_status, watcher_status) when ServerStatus.is_free server_status -> + (* Let's ignore messages from the server that it is free. It's a confusing message for the + * user *) + if snd watcher_status = FileWatcherStatus.Ready then + None + else + Some (FileWatcherStatus.string_of_status watcher_status) + | (server_status, _) -> Some (ServerStatus.string_of_status ~use_emoji server_status) in - Option.iter status_string (fun status_string -> - if not quiet then eprintf_with_spinner "Please wait. %s" status_string - ); + if not quiet then eprintf_with_spinner "Please wait. %s" status_string); wait_for_response ?timeout ~quiet ~root ic | MonitorProt.Data response -> - if not quiet && Tty.spinner_used () then Tty.print_clear_line stderr; + if (not quiet) && Tty.spinner_used () then Tty.print_clear_line stderr; response | MonitorProt.ServerException exn_str -> if Tty.spinner_used () then Tty.print_clear_line stderr; let msg = Utils_js.spf "Server threw an exception: %s" exn_str in FlowExitStatus.(exit ~msg Unknown_error) in - fun ?timeout ?retries connect_flags root request -> - let retries = match retries with - | None -> connect_flags.retries - | Some retries -> retries in - - if retries < 0 - then FlowExitStatus.(exit ~msg:"Out of retries, exiting!" Out_of_retries); - + let retries = + match retries with + | None -> connect_flags.retries + | Some retries -> retries + in + (if retries < 0 then FlowExitStatus.(exit ~msg:"Out of retries, exiting!" Out_of_retries)); + + let version_mismatch_strategy = + match connect_flags.on_mismatch with + | Choose_newest -> SocketHandshake.Stop_server_if_older + | Stop_server -> SocketHandshake.Always_stop_server + | Restart_client -> SocketHandshake.Error_client + | Error_client -> SocketHandshake.Error_client + in let quiet = connect_flags.quiet in - let client_handshake = ({ SocketHandshake. - client_build_id = SocketHandshake.build_revision; - is_stop_request = false; - server_should_hangup_if_still_initializing = not connect_flags.retry_if_init; - server_should_exit_if_version_mismatch = true; - }, { SocketHandshake. - client_type = SocketHandshake.Ephemeral; - }) in + let client_handshake = + ( { + SocketHandshake.client_build_id = SocketHandshake.build_revision; + client_version = Flow_version.version; + is_stop_request = false; + server_should_hangup_if_still_initializing = not connect_flags.retry_if_init; + version_mismatch_strategy; + }, + { SocketHandshake.client_type = SocketHandshake.Ephemeral } ) + in (* connect handles timeouts itself *) - let ic, oc = connect ~flowconfig_name ~client_handshake connect_flags root in + let (ic, oc) = connect ~flowconfig_name ~client_handshake connect_flags root in send_command ?timeout oc request; try wait_for_response ?timeout ~quiet ~root ic with End_of_file -> - if not quiet - then begin + if not quiet then eprintf_with_spinner "Lost connection to the flow server (%d %s remaining)%!" retries - (if retries = 1 then "retry" else "retries") - end; - connect_and_make_request flowconfig_name ?timeout ~retries:(retries - 1) connect_flags root + ( if retries = 1 then + "retry" + else + "retries" ); + connect_and_make_request + flowconfig_name + ?timeout + ~retries:(retries - 1) + connect_flags + root request (* If --timeout is set, wrap connect_and_make_request in a timeout *) let connect_and_make_request ?retries flowconfig_name connect_flags root request = match connect_flags.timeout with - | None -> - connect_and_make_request ?retries flowconfig_name connect_flags root request + | None -> connect_and_make_request ?retries flowconfig_name connect_flags root request | Some timeout -> Timeout.with_timeout ~timeout @@ -1142,18 +1622,53 @@ let connect_and_make_request ?retries flowconfig_name connect_flags root request connect_and_make_request ~timeout ?retries flowconfig_name connect_flags root request) let failwith_bad_response ~request ~response = - let msg = Printf.sprintf - "Bad response to %S: received %S" - (ServerProt.Request.to_string request) - (ServerProt.Response.to_string response) in + let msg = + Printf.sprintf + "Bad response to %S: received %S" + (ServerProt.Request.to_string request) + (ServerProt.Response.to_string response) + in failwith msg let get_check_or_status_exit_code errors warnings max_warnings = - let open FlowExitStatus in - let open Errors in - if ErrorSet.is_empty errors then begin - match max_warnings with - | Some x when ErrorSet.cardinal warnings > x -> Type_error - | None | Some _ -> No_error - end else - Type_error + FlowExitStatus.( + Errors.( + if ConcreteLocPrintableErrorSet.is_empty errors then + match max_warnings with + | Some x when ConcreteLocPrintableErrorSet.cardinal warnings > x -> Type_error + | None + | Some _ -> + No_error + else + Type_error)) + +let choose_file_watcher ~options ~file_watcher ~flowconfig = + match (Options.lazy_mode options, file_watcher) with + | (Options.LAZY_MODE_WATCHMAN, (None | Some Options.Watchman)) -> + (* --lazy-mode watchman implies --file-watcher watchman *) + Options.Watchman + | (Options.LAZY_MODE_WATCHMAN, Some _) -> + (* Error on something like --lazy-mode watchman --file-watcher dfind *) + let msg = + "Using Watchman lazy mode implicitly uses the Watchman file watcher, " + ^ "but you tried to use a different file watcher via the `--file-watcher` flag." + in + raise (CommandSpec.Failed_to_parse ("--file-watcher", msg)) + | (_, Some file_watcher) -> file_watcher + | (_, None) -> Option.value ~default:Options.DFind (FlowConfig.file_watcher flowconfig) + +(* Reads the file from disk to compute the offset. This can lead to strange results -- if the file + * has changed since the location was constructed, the offset could be incorrect. If the file has + * changed such that the contents no longer have text at the given line/column, the offset is not + * included in the JSON output. *) +let json_of_loc_with_offset ?stdin_file ~strip_root loc = + Option.( + let file_content = + let path = Loc.source loc >>= File_key.to_path %> Core_result.ok in + match stdin_file with + | Some fileinput when path = File_input.path_of_file_input fileinput -> + Some (File_input.content_of_file_input_unsafe fileinput) + | _ -> path >>= Sys_utils.cat_or_failed + in + let offset_table = Option.map file_content ~f:Offset_utils.make in + Reason.json_of_loc ~strip_root ~offset_table ~catch_offset_errors:true loc) diff --git a/src/commands/config/__tests__/command_config_tests.ml b/src/commands/config/__tests__/command_config_tests.ml new file mode 100644 index 00000000000..dc06ba2522f --- /dev/null +++ b/src/commands/config/__tests__/command_config_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "config" >::: [Version_regex_test.tests] + +let () = run_test_tt_main tests diff --git a/src/commands/config/__tests__/test.ml b/src/commands/config/__tests__/test.ml deleted file mode 100644 index 7a9688fc6ce..00000000000 --- a/src/commands/config/__tests__/test.ml +++ /dev/null @@ -1,14 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open OUnit2 - -let tests = "config" >::: [ - Version_regex_test.tests; -] - -let () = run_test_tt_main tests diff --git a/src/commands/config/__tests__/version_regex_test.ml b/src/commands/config/__tests__/version_regex_test.ml index 7adc4f09bf6..225aca1c254 100644 --- a/src/commands/config/__tests__/version_regex_test.ml +++ b/src/commands/config/__tests__/version_regex_test.ml @@ -1,78 +1,68 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - open OUnit2 let assert_regex ~ctxt ~regex exp act = - assert_equal ~ctxt ~printer:(fun x -> x) ~msg:("regex for "^act^" didn't match") exp (regex act) - -let tests = "version_regex" >::: [ - "less_than_or_equal_to_zero" >:: begin fun ctxt -> - let regex = Version_regex.less_than_or_equal_to_version in - let assert_regex = assert_regex ~ctxt ~regex in - - assert_regex "0" "0"; - assert_regex "0\\(\\.0\\)?" "0.0"; - assert_regex "0\\(\\.0\\(\\.0\\)?\\)?" "0.0.0"; - end; - - "less_than_or_equal_to_pre_1_0" >:: begin fun ctxt -> - let regex = Version_regex.less_than_or_equal_to_version in - let assert_regex = assert_regex ~ctxt ~regex in - - assert_regex "0\\(\\.[0-1]\\)?" "0.1"; - assert_regex "0\\(\\.\\(1\\(\\.0\\)?\\|0\\(\\.[0-9]+\\)?\\)\\)?" "0.1.0"; - assert_regex "0\\(\\.\\(2\\(\\.[0-4]\\)?\\|[0-1]\\(\\.[0-9]+\\)?\\)\\)?" "0.2.4"; - end; - - "less_than_or_equal_to_1_X" >:: begin fun ctxt -> - let regex = Version_regex.less_than_or_equal_to_version in - let assert_regex = assert_regex ~ctxt ~regex in - - assert_regex "[0-1]" "1"; - assert_regex "\\(1\\(\\.0\\)?\\|0\\(\\.[0-9]+\\)?\\)" "1.0"; - assert_regex "\\(1\\(\\.0\\(\\.0\\)?\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" "1.0.0"; - assert_regex "\\(1\\(\\.[0-1]\\)?\\|0\\(\\.[0-9]+\\)?\\)" "1.1"; - assert_regex - "\\(1\\(\\.\\(1\\(\\.0\\)?\\|0\\(\\.[0-9]+\\)?\\)\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" - "1.1.0"; - assert_regex - "\\(1\\(\\.\\(1\\(\\.[0-1]\\)?\\|0\\(\\.[0-9]+\\)?\\)\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" - "1.1.1"; - assert_regex "\\(1\\(\\.[0-2]\\)?\\|0\\(\\.[0-9]+\\)?\\)" "1.2"; - assert_regex - "\\(1\\(\\.\\(2\\(\\.0\\)?\\|[0-1]\\(\\.[0-9]+\\)?\\)\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" - "1.2.0"; - assert_regex - "\\(1\\(\\.\\(2\\(\\.[0-4]\\)?\\|[0-1]\\(\\.[0-9]+\\)?\\)\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" - "1.2.4"; - end; - - "less_than_or_equal_to_10_X" >:: begin fun ctxt -> - let regex = Version_regex.less_than_or_equal_to_version in - let assert_regex = assert_regex ~ctxt ~regex in + assert_equal + ~ctxt + ~printer:(fun x -> x) + ~msg:("regex for " ^ act ^ " didn't match") + exp + (regex act) - assert_regex "\\(10\\|[0-9]\\)" "10"; - assert_regex "\\(10\\(\\.0\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\)" "10.0"; - assert_regex "\\(10\\(\\.0\\(\\.0\\)?\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" "10.0.0"; - assert_regex "\\(10\\(\\.\\(10\\|[0-9]\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\)" "10.10"; - assert_regex - "\\(10\\(\\.\\(10\\(\\.0\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" - "10.10.0"; - assert_regex - "\\(10\\(\\.\\(10\\(\\.\\(10\\|[0-9]\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" - "10.10.10"; - assert_regex "\\(10\\(\\.\\(20\\|[0-1][0-9]\\|[0-9]\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\)" "10.20"; - assert_regex - "\\(10\\(\\.\\(20\\(\\.0\\)?\\|\\(1[0-9]\\|[0-9]\\)\\(\\.[0-9]+\\)?\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" - "10.20.0"; - assert_regex - "\\(10\\(\\.\\(20\\(\\.[0-4]\\)?\\|\\(1[0-9]\\|[0-9]\\)\\(\\.[0-9]+\\)?\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" - "10.20.4"; - end; -] +let tests = + "version_regex" + >::: [ + ( "less_than_or_equal_to_zero" + >:: fun ctxt -> + let regex = Version_regex.less_than_or_equal_to_version in + let assert_regex = assert_regex ~ctxt ~regex in + assert_regex "0\\(\\.0\\(\\.0\\)?\\)?" "0.0.0" ); + ( "less_than_or_equal_to_pre_1_0" + >:: fun ctxt -> + let regex = Version_regex.less_than_or_equal_to_version in + let assert_regex = assert_regex ~ctxt ~regex in + assert_regex "0\\(\\.\\(1\\(\\.0\\)?\\|0\\(\\.[0-9]+\\)?\\)\\)?" "0.1.0"; + assert_regex "0\\(\\.\\(2\\(\\.[0-4]\\)?\\|[0-1]\\(\\.[0-9]+\\)?\\)\\)?" "0.2.4" ); + ( "less_than_or_equal_to_1_X" + >:: fun ctxt -> + let regex = Version_regex.less_than_or_equal_to_version in + let assert_regex = assert_regex ~ctxt ~regex in + assert_regex "\\(1\\(\\.0\\(\\.0\\)?\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" "1.0.0"; + assert_regex + "\\(1\\(\\.\\(1\\(\\.0\\)?\\|0\\(\\.[0-9]+\\)?\\)\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" + "1.1.0"; + assert_regex + "\\(1\\(\\.\\(1\\(\\.[0-1]\\)?\\|0\\(\\.[0-9]+\\)?\\)\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" + "1.1.1"; + assert_regex + "\\(1\\(\\.\\(2\\(\\.0\\)?\\|[0-1]\\(\\.[0-9]+\\)?\\)\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" + "1.2.0"; + assert_regex + "\\(1\\(\\.\\(2\\(\\.[0-4]\\)?\\|[0-1]\\(\\.[0-9]+\\)?\\)\\)?\\|0\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" + "1.2.4" ); + ( "less_than_or_equal_to_10_X" + >:: fun ctxt -> + let regex = Version_regex.less_than_or_equal_to_version in + let assert_regex = assert_regex ~ctxt ~regex in + assert_regex + "\\(10\\(\\.0\\(\\.0\\)?\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" + "10.0.0"; + assert_regex + "\\(10\\(\\.\\(10\\(\\.0\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" + "10.10.0"; + assert_regex + "\\(10\\(\\.\\(10\\(\\.\\(10\\|[0-9]\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" + "10.10.10"; + assert_regex + "\\(10\\(\\.\\(20\\(\\.0\\)?\\|\\(1[0-9]\\|[0-9]\\)\\(\\.[0-9]+\\)?\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" + "10.20.0"; + assert_regex + "\\(10\\(\\.\\(20\\(\\.[0-4]\\)?\\|\\(1[0-9]\\|[0-9]\\)\\(\\.[0-9]+\\)?\\)\\)?\\|[0-9]\\(\\.[0-9]+\\)?\\(\\.[0-9]+\\)?\\)" + "10.20.4" ); + ] diff --git a/src/commands/config/dune b/src/commands/config/dune new file mode 100644 index 00000000000..4c4582ff9e7 --- /dev/null +++ b/src/commands/config/dune @@ -0,0 +1,9 @@ +(library + (name flow_config) + (wrapped false) + (libraries + flow_common + semver + xx + ) +) diff --git a/src/commands/config/flowConfig.ml b/src/commands/config/flowConfig.ml index bdfdc018159..f693acf681c 100644 --- a/src/commands/config/flowConfig.ml +++ b/src/commands/config/flowConfig.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,53 +7,77 @@ open Utils_js +let ( >>= ) = Core_result.( >>= ) + +type line = int * string + +type section = line * line list + +type warning = int * string + +type error = int * string + let default_temp_dir = Filename.concat Sys_utils.temp_dir_name "flow" + let default_shm_dirs = - try - Sys.getenv "FLOW_SHMDIR" - |> Str.(split (regexp ",")) - with _ -> [ "/dev/shm"; default_temp_dir; ] + try Sys.getenv "FLOW_SHMDIR" |> Str.(split (regexp ",")) + with _ -> ["/dev/shm"; default_temp_dir] (* Half a gig *) let default_shm_min_avail = 1024 * 1024 * 512 let version_regex = Str.regexp_string "" +let less_or_equal_curr_version = Version_regex.less_than_or_equal_to_version Flow_version.version + let map_add map (key, value) = SMap.add key value map -let multi_error (errs:(int * string) list) = - let msg = - errs - |> List.map (fun (ln, msg) -> spf ".flowconfig:%d %s" ln msg) - |> String.concat "\n" - in - FlowExitStatus.(exit ~msg Invalid_flowconfig) +module Opts = struct + type raw_value = int * string -let error ln msg = multi_error [(ln, msg)] + type raw_values = raw_value list + + type raw_options = raw_values SMap.t + + type error_kind = + | Failed_to_parse_value of string + | Failed_to_set of string + | Duplicate_option + + type opt_error = int * error_kind -module Opts = struct type t = { + abstract_locations: bool; all: bool; + allow_skip_direct_dependents: bool; + cache_direct_dependents: bool; emoji: bool; - enable_cancelable_rechecks: bool; enable_const_params: bool; enforce_strict_call_arity: bool; enforce_well_formed_exports: bool; + enforce_well_formed_exports_whitelist: string list; + enums: bool; esproposal_class_instance_fields: Options.esproposal_feature_mode; esproposal_class_static_fields: Options.esproposal_feature_mode; esproposal_decorators: Options.esproposal_feature_mode; esproposal_export_star_as: Options.esproposal_feature_mode; esproposal_nullish_coalescing: Options.esproposal_feature_mode; esproposal_optional_chaining: Options.esproposal_feature_mode; + exact_by_default: bool; + facebook_fbs: string option; facebook_fbt: string option; file_watcher: Options.file_watcher option; + haste_module_ref_prefix: string option; haste_name_reducers: (Str.regexp * string) list; haste_paths_blacklist: string list; haste_paths_whitelist: string list; haste_use_name_reducers: bool; ignore_non_literal_requires: bool; include_warnings: bool; + lazy_mode: Options.lazy_mode option; log_file: Path.t option; + lsp_code_actions: bool; + max_files_checked_per_worker: int; max_header_tokens: int; max_literal_length: int; max_workers: int; @@ -67,11 +91,11 @@ module Opts = struct munge_underscores: bool; no_flowlib: bool; node_resolver_dirnames: string list; + recursion_limit: int; root_name: string option; saved_state_fetcher: Options.saved_state_fetcher; shm_dep_table_pow: int; shm_dirs: string list; - shm_global_size: int; shm_hash_table_pow: int; shm_heap_size: int; shm_log_level: int; @@ -80,58 +104,30 @@ module Opts = struct suppress_types: SSet.t; temp_dir: string; traces: int; - version: string option; + trust_mode: Options.trust_mode; + type_asserts: bool; + types_first: bool; + wait_for_recheck: bool; weak: bool; } - type initializer_ = - | USE_DEFAULT - | INIT_FN of (t -> t) - - type option_flag = - | ALLOW_DUPLICATE - - type 'a option_definition = { - (** - * The initializer_ gets set on the options object immediately before - * parsing the *first* occurrence of the user-specified config option. This - * is useful in cases where the user's value should blow away the default - * value (rather than being aggregated to it). - * - * For example: We want the default value of 'module.file_ext' to be - * ['.js'; '.jsx'], but if the user specifies any 'module.file_ext' - * settings, we want to start from a clean list. - *) - initializer_: initializer_; - flags: option_flag list; - setter: (t -> 'a -> (t, string) result); - optparser: (string -> ('a, string) result); - } - - let get_defined_opts (raw_opts, config) = - (* If the user specified any options that aren't defined, issue an error *) - if SMap.cardinal raw_opts > 0 then ( - let errors = - SMap.elements raw_opts - |> List.map (fun (k, v) -> - let msg = spf "Unsupported option specified! (%s)" k in - List.map (fun (line_num, _) -> (line_num, msg)) v - ) - |> List.flatten - |> List.rev - in - multi_error errors - ); - - config + let warn_on_unknown_opts (raw_opts, config) : (t * warning list, error) result = + (* If the user specified any options that aren't defined, issue a warning *) + let warnings = + SMap.elements raw_opts + |> Core_list.fold_left + ~f:(fun acc (k, v) -> + let msg = spf "Unsupported option specified! (%s)" k in + Core_list.fold_left ~f:(fun acc (line_num, _) -> (line_num, msg) :: acc) ~init:acc v) + ~init:[] + in + Ok (config, warnings) - let module_file_exts = SSet.empty - |> SSet.add ".js" - |> SSet.add ".jsx" - |> SSet.add ".json" - |> SSet.add ".mjs" + let module_file_exts = + SSet.empty |> SSet.add ".js" |> SSet.add ".jsx" |> SSet.add ".json" |> SSet.add ".mjs" - let module_resource_exts = SSet.empty + let module_resource_exts = + SSet.empty |> SSet.add ".css" |> SSet.add ".jpg" |> SSet.add ".png" @@ -144,164 +140,183 @@ module Opts = struct |> SSet.add ".mp4" |> SSet.add ".webm" - let default_options = { - all = false; - emoji = false; - enable_cancelable_rechecks = false; - enable_const_params = false; - enforce_strict_call_arity = true; - enforce_well_formed_exports = false; - esproposal_class_instance_fields = Options.ESPROPOSAL_ENABLE; - esproposal_class_static_fields = Options.ESPROPOSAL_ENABLE; - esproposal_decorators = Options.ESPROPOSAL_WARN; - esproposal_export_star_as = Options.ESPROPOSAL_WARN; - esproposal_nullish_coalescing = Options.ESPROPOSAL_WARN; - esproposal_optional_chaining = Options.ESPROPOSAL_WARN; - facebook_fbt = None; - file_watcher = None; - haste_name_reducers = [(Str.regexp "^\\(.*/\\)?\\([a-zA-Z0-9$_.-]+\\)\\.js\\(\\.flow\\)?$", "\\2")]; - haste_paths_blacklist = ["\\(.*\\)?/node_modules/.*"]; - haste_paths_whitelist = ["/.*"]; - haste_use_name_reducers = false; - ignore_non_literal_requires = false; - include_warnings = false; - log_file = None; - max_header_tokens = 10; - max_literal_length = 100; - max_workers = Sys_utils.nbr_procs; - merge_timeout = Some 100; - module_file_exts; - module_name_mappers = []; - module_resolver = None; - module_resource_exts; - module_system = Options.Node; - modules_are_use_strict = false; - munge_underscores = false; - no_flowlib = false; - node_resolver_dirnames = ["node_modules"]; - root_name = None; - saved_state_fetcher = Options.Dummy_fetcher; - shm_dep_table_pow = 17; - shm_dirs = default_shm_dirs; - shm_global_size = 1024 * 1024 * 1024; (* 1 gig *) - shm_hash_table_pow = 19; - shm_heap_size = 1024 * 1024 * 1024 * 25; (* 25 gigs *) - shm_log_level = 0; - shm_min_avail = default_shm_min_avail; - suppress_comments = [Str.regexp "\\(.\\|\n\\)*\\$FlowFixMe"]; - suppress_types = SSet.empty |> SSet.add "$FlowFixMe"; - temp_dir = default_temp_dir; - traces = 0; - version = None; - weak = false; - } + let default_options = + { + abstract_locations = false; + all = false; + allow_skip_direct_dependents = false; + cache_direct_dependents = true; + emoji = false; + enable_const_params = false; + enforce_strict_call_arity = true; + enforce_well_formed_exports = false; + enforce_well_formed_exports_whitelist = []; + enums = false; + esproposal_class_instance_fields = Options.ESPROPOSAL_ENABLE; + esproposal_class_static_fields = Options.ESPROPOSAL_ENABLE; + esproposal_decorators = Options.ESPROPOSAL_WARN; + esproposal_export_star_as = Options.ESPROPOSAL_WARN; + esproposal_nullish_coalescing = Options.ESPROPOSAL_WARN; + esproposal_optional_chaining = Options.ESPROPOSAL_WARN; + exact_by_default = false; + facebook_fbs = None; + facebook_fbt = None; + file_watcher = None; + haste_module_ref_prefix = None; + haste_name_reducers = + [(Str.regexp "^\\(.*/\\)?\\([a-zA-Z0-9$_.-]+\\)\\.js\\(\\.flow\\)?$", "\\2")]; + haste_paths_blacklist = ["\\(.*\\)?/node_modules/.*"]; + haste_paths_whitelist = ["/.*"]; + haste_use_name_reducers = false; + ignore_non_literal_requires = false; + include_warnings = false; + lazy_mode = None; + log_file = None; + lsp_code_actions = false; + max_header_tokens = 10; + max_files_checked_per_worker = 100; + max_literal_length = 100; + max_workers = Sys_utils.nbr_procs; + merge_timeout = Some 100; + module_file_exts; + module_name_mappers = []; + module_resolver = None; + module_resource_exts; + module_system = Options.Node; + modules_are_use_strict = false; + munge_underscores = false; + no_flowlib = false; + node_resolver_dirnames = ["node_modules"]; + recursion_limit = 10000; + root_name = None; + saved_state_fetcher = Options.Dummy_fetcher; + shm_dep_table_pow = 17; + shm_dirs = default_shm_dirs; + shm_hash_table_pow = 19; + shm_heap_size = 1024 * 1024 * 1024 * 25; + (* 25 gigs *) + shm_log_level = 0; + shm_min_avail = default_shm_min_avail; + suppress_comments = [Str.regexp "\\(.\\|\n\\)*\\$FlowFixMe"]; + suppress_types = SSet.empty |> SSet.add "$FlowFixMe"; + temp_dir = default_temp_dir; + traces = 0; + trust_mode = Options.NoTrust; + type_asserts = false; + types_first = false; + wait_for_recheck = false; + weak = false; + } - let parse = - let parse_line map (line_num, line) = - if Str.string_match (Str.regexp "^\\([a-zA-Z0-9._]+\\)=\\(.*\\)$") line 0 - then - let key = Str.matched_group 1 line in - let value = Str.matched_group 2 line in - SMap.add key ((line_num, value)::( - match SMap.get key map with - | Some values -> values - | None -> [] - )) map - else error line_num "Unable to parse line." + let parse_lines : line list -> (raw_options, error) result = + let rec loop acc lines = + acc + >>= fun map -> + match lines with + | [] -> Ok map + | (line_num, line) :: rest -> + if Str.string_match (Str.regexp "^\\([a-zA-Z0-9._]+\\)=\\(.*\\)$") line 0 then + let key = Str.matched_group 1 line in + let value = Str.matched_group 2 line in + let map = + SMap.add + key + ( (line_num, value) + :: + (match SMap.get key map with + | Some values -> values + | None -> []) ) + map + in + loop (Ok map) rest + else + Error (line_num, "Unable to parse line.") in - - fun config lines -> - let lines = lines - |> List.map (fun (ln, line) -> ln, String.trim line) - |> List.filter (fun (_, s) -> s <> "") + fun lines -> + let lines = + lines + |> Core_list.map ~f:(fun (ln, line) -> (ln, String.trim line)) + |> Core_list.filter ~f:(fun (_, s) -> s <> "") in - let raw_options = List.fold_left parse_line SMap.empty lines in - (raw_options, config) - - let define_opt key definition (raw_opts, config) = - let new_raw_opts = SMap.remove key raw_opts in - - match SMap.get key raw_opts with - | None -> (new_raw_opts, config) - | Some values -> - let config = ( - match definition.initializer_ with - | USE_DEFAULT -> config - | INIT_FN f -> f config - ) in - + loop (Ok SMap.empty) lines + + (** + * `init` gets called on the options object immediately before + * parsing the *first* occurrence of the user-specified config option. This + * is useful in cases where the user's value should blow away the default + * value (rather than being aggregated to it). + * + * For example: We want the default value of 'module.file_ext' to be + * ['.js'; '.jsx'], but if the user specifies any 'module.file_ext' + * settings, we want to start from a clean list. + *) + let opt = + let rec loop optparser setter values config = + match values with + | [] -> Ok config + | (line_num, value_str) :: rest -> + let value = + optparser value_str + |> Core_result.map_error ~f:(fun msg -> (line_num, Failed_to_parse_value msg)) + in + let config = + value + >>= fun value -> + setter config value + |> Core_result.map_error ~f:(fun msg -> (line_num, Failed_to_set msg)) + in + config >>= loop optparser setter rest + in + fun (optparser : string -> ('a, string) result) + ?init + ?(multiple = false) + (setter : t -> 'a -> (t, string) result) + (values : raw_values) + config -> + ( let config = + match init with + | None -> config + | Some f -> f config + in (* Error when duplicate options were incorrectly given *) - let allow_dupes = List.mem ALLOW_DUPLICATE definition.flags in - if (not allow_dupes) && (List.length values) > 1 then ( - let line_num = fst (List.nth values 1) in - error line_num (spf "Duplicate option: \"%s\"" key) - ); - - let config = List.fold_left (fun config (line_num, value_str) -> - let value = - match definition.optparser value_str with - | Ok value -> value - | Error msg -> error line_num ( - spf "Error parsing value for \"%s\". %s" key msg - ) - in - match definition.setter config value with - | Ok config -> config - | Error msg -> error line_num ( - spf "Error setting value for \"%s\". %s" key msg - ) - ) config values in - - (new_raw_opts, config) - - let optparse_enum values str = - let values = List.fold_left map_add SMap.empty values in - match SMap.get str values with - | Some v -> Ok v - | None -> Error ( - spf "Unsupported value: \"%s\". Supported values are: %s" - str - (String.concat ", " (SMap.keys values)) - ) - - let optparse_boolean = optparse_enum [ - ("true", true); - ("false", false); - ] - - let optparse_uint str = - let v = int_of_string str in - if v < 0 then Error "Number cannot be negative!" else Ok v + match (multiple, values) with + | (false, _ :: (dupe_ln, _) :: _) -> Error (dupe_ln, Duplicate_option) + | _ -> loop optparser setter values config + : (t, opt_error) result ) let optparse_string str = try Ok (Scanf.unescaped str) - with Scanf.Scan_failure reason -> Error ( - spf "Invalid ocaml string: %s" reason - ) + with Scanf.Scan_failure reason -> Error (spf "Invalid ocaml string: %s" reason) let optparse_regexp str = - match optparse_string str with - | Ok unescaped -> - begin try Ok (Str.regexp unescaped) - with Failure reason -> Error ( - spf "Invalid regex \"%s\" (%s)" unescaped reason - ) - end - | Error _ as err -> err - - let optparse_esproposal_feature_flag ?(allow_enable=false) = - let values = [ - ("ignore", Options.ESPROPOSAL_IGNORE); - ("warn", Options.ESPROPOSAL_WARN); - ] in + optparse_string str + >>= fun unescaped -> + try Ok (Str.regexp unescaped) + with Failure reason -> Error (spf "Invalid regex \"%s\" (%s)" unescaped reason) + + let enum values = + opt (fun str -> + let values = Core_list.fold_left ~f:map_add ~init:SMap.empty values in + match SMap.get str values with + | Some v -> Ok v + | None -> + Error + (spf + "Unsupported value: \"%s\". Supported values are: %s" + str + (String.concat ", " (SMap.keys values)))) + + let esproposal_feature_flag ?(allow_enable = false) = + let values = [("ignore", Options.ESPROPOSAL_IGNORE); ("warn", Options.ESPROPOSAL_WARN)] in let values = - if allow_enable - then ("enable", Options.ESPROPOSAL_ENABLE)::values - else values + if allow_enable then + ("enable", Options.ESPROPOSAL_ENABLE) :: values + else + values in - optparse_enum values + enum values - let optparse_filepath str = Ok (Path.make str) + let filepath = opt (fun str -> Ok (Path.make str)) let optparse_mapping str = let regexp_str = "^'\\([^']*\\)'[ \t]*->[ \t]*'\\([^']*\\)'$" in @@ -309,14 +324,278 @@ module Opts = struct if Str.string_match regexp str 0 then Ok (Str.matched_group 1 str, Str.matched_group 2 str) else - Error ( - "Expected a mapping of form: " ^ - "'single-quoted-string' -> 'single-quoted-string'" - ) + Error ("Expected a mapping of form: " ^ "'single-quoted-string' -> 'single-quoted-string'") + + let boolean = enum [("true", true); ("false", false)] + + let string = opt optparse_string + + let uint = + opt (fun str -> + let v = int_of_string str in + if v < 0 then + Error "Number cannot be negative!" + else + Ok v) + + let mapping fn = opt (fun str -> optparse_mapping str >>= fn) + + let parsers = + [ + ("emoji", boolean (fun opts v -> Ok { opts with emoji = v })); + ( "esproposal.class_instance_fields", + esproposal_feature_flag ~allow_enable:true (fun opts v -> + Ok { opts with esproposal_class_instance_fields = v }) ); + ( "esproposal.class_static_fields", + esproposal_feature_flag ~allow_enable:true (fun opts v -> + Ok { opts with esproposal_class_static_fields = v }) ); + ( "esproposal.decorators", + esproposal_feature_flag (fun opts v -> Ok { opts with esproposal_decorators = v }) ); + ( "esproposal.export_star_as", + esproposal_feature_flag ~allow_enable:true (fun opts v -> + Ok { opts with esproposal_export_star_as = v }) ); + ( "esproposal.optional_chaining", + esproposal_feature_flag ~allow_enable:true (fun opts v -> + Ok { opts with esproposal_optional_chaining = v }) ); + ( "esproposal.nullish_coalescing", + esproposal_feature_flag ~allow_enable:true (fun opts v -> + Ok { opts with esproposal_nullish_coalescing = v }) ); + ("exact_by_default", boolean (fun opts v -> Ok { opts with exact_by_default = v })); + ("facebook.fbs", string (fun opts v -> Ok { opts with facebook_fbs = Some v })); + ("facebook.fbt", string (fun opts v -> Ok { opts with facebook_fbt = Some v })); + ( "file_watcher", + enum + [ + ("none", Options.NoFileWatcher); + ("dfind", Options.DFind); + ("watchman", Options.Watchman); + ] + (fun opts v -> Ok { opts with file_watcher = Some v }) ); + ("include_warnings", boolean (fun opts v -> Ok { opts with include_warnings = v })); + ( "lazy_mode", + enum + [ + ("fs", Options.LAZY_MODE_FILESYSTEM); + ("ide", Options.LAZY_MODE_IDE); + ("watchman", Options.LAZY_MODE_WATCHMAN); + ("none", Options.NON_LAZY_MODE); + ] + (fun opts v -> Ok { opts with lazy_mode = Some v }) ); + ( "merge_timeout", + uint (fun opts v -> + let merge_timeout = + if v = 0 then + None + else + Some v + in + Ok { opts with merge_timeout }) ); + ( "module.system.haste.module_ref_prefix", + string (fun opts v -> Ok { opts with haste_module_ref_prefix = Some v }) ); + ( "module.system.haste.name_reducers", + mapping + ~init:(fun opts -> { opts with haste_name_reducers = [] }) + ~multiple:true + (fun (pattern, template) -> Ok (Str.regexp pattern, template)) + (fun opts v -> Ok { opts with haste_name_reducers = v :: opts.haste_name_reducers }) ); + ( "module.system.haste.paths.blacklist", + string + ~init:(fun opts -> { opts with haste_paths_blacklist = [] }) + ~multiple:true + (fun opts v -> Ok { opts with haste_paths_blacklist = v :: opts.haste_paths_blacklist }) + ); + ( "module.system.haste.paths.whitelist", + string + ~init:(fun opts -> { opts with haste_paths_whitelist = [] }) + ~multiple:true + (fun opts v -> Ok { opts with haste_paths_whitelist = v :: opts.haste_paths_whitelist }) + ); + ( "module.system.haste.use_name_reducers", + boolean + ~init:(fun opts -> { opts with haste_use_name_reducers = false }) + (fun opts v -> Ok { opts with haste_use_name_reducers = v }) ); + ("log.file", filepath (fun opts v -> Ok { opts with log_file = Some v })); + ("max_header_tokens", uint (fun opts v -> Ok { opts with max_header_tokens = v })); + ( "module.ignore_non_literal_requires", + boolean (fun opts v -> Ok { opts with ignore_non_literal_requires = v }) ); + ( "module.file_ext", + string + ~init:(fun opts -> { opts with module_file_exts = SSet.empty }) + ~multiple:true + (fun opts v -> + if String_utils.string_ends_with v Files.flow_ext then + Error + ( "Cannot use file extension '" + ^ v + ^ "' since it ends with the reserved extension '" + ^ Files.flow_ext + ^ "'" ) + else + let module_file_exts = SSet.add v opts.module_file_exts in + Ok { opts with module_file_exts }) ); + ( "module.name_mapper", + mapping + ~multiple:true + (fun (pattern, template) -> Ok (Str.regexp pattern, template)) + (fun opts v -> + let module_name_mappers = v :: opts.module_name_mappers in + Ok { opts with module_name_mappers }) ); + ( "module.name_mapper.extension", + mapping + ~multiple:true + (fun (file_ext, template) -> + Ok (Str.regexp ("^\\(.*\\)\\." ^ Str.quote file_ext ^ "$"), template)) + (fun opts v -> + let module_name_mappers = v :: opts.module_name_mappers in + Ok { opts with module_name_mappers }) ); + ("module.resolver", filepath (fun opts v -> Ok { opts with module_resolver = Some v })); + ( "module.system", + enum [("node", Options.Node); ("haste", Options.Haste)] (fun opts v -> + Ok { opts with module_system = v }) ); + ( "module.system.node.resolve_dirname", + string + ~init:(fun opts -> { opts with node_resolver_dirnames = [] }) + ~multiple:true + (fun opts v -> + let node_resolver_dirnames = v :: opts.node_resolver_dirnames in + Ok { opts with node_resolver_dirnames }) ); + ("module.use_strict", boolean (fun opts v -> Ok { opts with modules_are_use_strict = v })); + ("munge_underscores", boolean (fun opts v -> Ok { opts with munge_underscores = v })); + ( "name", + string (fun opts v -> + FlowEventLogger.set_root_name (Some v); + Ok { opts with root_name = Some v }) ); + ("server.max_workers", uint (fun opts v -> Ok { opts with max_workers = v })); + ("all", boolean (fun opts v -> Ok { opts with all = v })); + ("wait_for_recheck", boolean (fun opts v -> Ok { opts with wait_for_recheck = v })); + ("weak", boolean (fun opts v -> Ok { opts with weak = v })); + ( "suppress_comment", + string + ~init:(fun opts -> { opts with suppress_comments = [] }) + ~multiple:true + (fun opts v -> + Str.split_delim version_regex v + |> String.concat (">=" ^ less_or_equal_curr_version) + |> String.escaped + |> Core_result.return + >>= optparse_regexp + >>= (fun v -> Ok { opts with suppress_comments = v :: opts.suppress_comments })) ); + ( "suppress_type", + string + ~init:(fun opts -> { opts with suppress_types = SSet.empty }) + ~multiple:true + (fun opts v -> Ok { opts with suppress_types = SSet.add v opts.suppress_types }) ); + ("temp_dir", string (fun opts v -> Ok { opts with temp_dir = v })); + ( "saved_state.fetcher", + enum + [ + ("none", Options.Dummy_fetcher); + ("local", Options.Local_fetcher); + ("fb", Options.Fb_fetcher); + ] + (fun opts saved_state_fetcher -> Ok { opts with saved_state_fetcher }) ); + ( "sharedmemory.dirs", + string ~multiple:true (fun opts v -> Ok { opts with shm_dirs = opts.shm_dirs @ [v] }) ); + ( "sharedmemory.minimum_available", + uint (fun opts shm_min_avail -> Ok { opts with shm_min_avail }) ); + ( "sharedmemory.dep_table_pow", + uint (fun opts shm_dep_table_pow -> Ok { opts with shm_dep_table_pow }) ); + ( "sharedmemory.hash_table_pow", + uint (fun opts shm_hash_table_pow -> Ok { opts with shm_hash_table_pow }) ); + ("sharedmemory.heap_size", uint (fun opts shm_heap_size -> Ok { opts with shm_heap_size })); + ("sharedmemory.log_level", uint (fun opts shm_log_level -> Ok { opts with shm_log_level })); + ("traces", uint (fun opts v -> Ok { opts with traces = v })); + ("max_literal_length", uint (fun opts v -> Ok { opts with max_literal_length = v })); + ( "experimental.const_params", + boolean (fun opts v -> Ok { opts with enable_const_params = v }) ); + ("experimental.enums", boolean (fun opts v -> Ok { opts with enums = v })); + ( "experimental.lsp.code_actions", + boolean (fun opts v -> Ok { opts with lsp_code_actions = v }) ); + ( "experimental.strict_call_arity", + boolean (fun opts v -> Ok { opts with enforce_strict_call_arity = v }) ); + ( "experimental.well_formed_exports", + boolean (fun opts v -> Ok { opts with enforce_well_formed_exports = v }) ); + ( "experimental.well_formed_exports.whitelist", + string + ~init:(fun opts -> { opts with enforce_well_formed_exports_whitelist = [] }) + ~multiple:true + (fun opts v -> + if opts.enforce_well_formed_exports then + Ok + { + opts with + enforce_well_formed_exports_whitelist = + v :: opts.enforce_well_formed_exports_whitelist; + } + else + Error + "This option requires \"experimental.enforce_well_formed_exports\" set to \"true\".") + ); + ("experimental.type_asserts", boolean (fun opts v -> Ok { opts with type_asserts = v })); + ("experimental.types_first", boolean (fun opts v -> Ok { opts with types_first = v })); + ( "experimental.abstract_locations", + boolean (fun opts v -> Ok { opts with abstract_locations = v }) ); + ( "experimental.cache_direct_dependents", + boolean (fun opts v -> Ok { opts with cache_direct_dependents = v }) ); + ( "experimental.allow_skip_direct_dependents", + boolean (fun opts v -> Ok { opts with allow_skip_direct_dependents = v }) ); + ("no_flowlib", boolean (fun opts v -> Ok { opts with no_flowlib = v })); + ( "trust_mode", + enum + [ + ("check", Options.CheckTrust); + ("silent", Options.SilentTrust); + ("none", Options.NoTrust); + ] + (fun opts trust_mode -> Ok { opts with trust_mode }) ); + ("recursion_limit", uint (fun opts v -> Ok { opts with recursion_limit = v })); + ( "experimental.types_first.max_files_checked_per_worker", + uint (fun opts v -> Ok { opts with max_files_checked_per_worker = v }) ); + ] + let parse = + let error_of_opt_error key (line_num, opt_error) = + let msg = + match opt_error with + | Failed_to_parse_value msg -> spf "Error parsing value for \"%s\". %s" key msg + | Failed_to_set msg -> spf "Error setting value for \"%s\". %s" key msg + | Duplicate_option -> spf "Duplicate option: \"%s\"" key + in + (line_num, msg) + in + let rec loop + (acc : (raw_options * t, error) result) + (parsers : (string * (raw_values -> t -> (t, opt_error) result)) list) = + acc + >>= fun (raw_opts, config) -> + match parsers with + | [] -> Ok (raw_opts, config) + | (key, f) :: rest -> + let acc = + match SMap.get key raw_opts with + | None -> Ok (raw_opts, config) + | Some values -> + f values config + |> Core_result.map_error ~f:(error_of_opt_error key) + >>= fun config -> + let new_raw_opts = SMap.remove key raw_opts in + Ok (new_raw_opts, config) + in + loop acc rest + in + fun (init : t) (lines : line list) -> + ( parse_lines lines + >>= (fun raw_options -> loop (Ok (raw_options, init)) parsers >>= warn_on_unknown_opts) + : (t * warning list, error) result ) end +type rollout = { + enabled_group: string; + disabled_groups: SSet.t; +} + type config = { + rollouts: rollout SMap.t; (* completely ignored files (both module resolving and typing) *) ignores: string list; (* files that should be treated as untyped *) @@ -333,6 +612,8 @@ type config = { strict_mode: StrictModeSettings.t; (* config options *) options: Opts.t; + (* version constraint *) + version: string option; } module Pp : sig @@ -340,66 +621,52 @@ module Pp : sig end = struct open Printf - let section_header o section = - fprintf o "[%s]\n" section + let section_header o section = fprintf o "[%s]\n" section - let ignores o ignores = - List.iter (fun ex -> (fprintf o "%s\n" ex)) ignores + let ignores o = Core_list.iter ~f:(fprintf o "%s\n") - let untyped o untyped = - List.iter (fun ex -> (fprintf o "%s\n" ex)) untyped + let untyped o = Core_list.iter ~f:(fprintf o "%s\n") - let declarations o declarations = - List.iter (fun ex -> (fprintf o "%s\n" ex)) declarations + let declarations o = Core_list.iter ~f:(fprintf o "%s\n") - let includes o includes = - List.iter (fun inc -> (fprintf o "%s\n" inc)) includes + let includes o = Core_list.iter ~f:(fprintf o "%s\n") - let libs o libs = - List.iter (fun lib -> (fprintf o "%s\n" lib)) libs + let libs o = Core_list.iter ~f:(fprintf o "%s\n") let options = - let opt o name value = fprintf o "%s=%s\n" name value - - in let module_system = function + let pp_opt o name value = fprintf o "%s=%s\n" name value in + let module_system = function | Options.Node -> "node" | Options.Haste -> "haste" - - in fun o config -> Opts.( - let options = config.options in - if options.module_system <> default_options.module_system - then opt o "module.system" (module_system options.module_system); - if options.all <> default_options.all - then opt o "all" (string_of_bool options.all); - if options.weak <> default_options.weak - then opt o "weak" (string_of_bool options.weak); - if options.temp_dir <> default_options.temp_dir - then opt o "temp_dir" options.temp_dir; - if options.include_warnings <> default_options.include_warnings - then opt o "include_warnings" (string_of_bool options.include_warnings) - ) + in + fun o config -> + Opts.( + let options = config.options in + if options.module_system <> default_options.module_system then + pp_opt o "module.system" (module_system options.module_system); + if options.all <> default_options.all then pp_opt o "all" (string_of_bool options.all); + if options.weak <> default_options.weak then pp_opt o "weak" (string_of_bool options.weak); + if options.temp_dir <> default_options.temp_dir then pp_opt o "temp_dir" options.temp_dir; + if options.include_warnings <> default_options.include_warnings then + pp_opt o "include_warnings" (string_of_bool options.include_warnings)) let lints o config = - let open Lints in - let open Severity in - let lint_severities = config.lint_severities in - let lint_default = LintSettings.get_default lint_severities in - (* Don't print an 'all' setting if it matches the default setting. *) - if (lint_default <> LintSettings.get_default LintSettings.empty_severities) then - fprintf o "all=%s\n" (string_of_severity lint_default); - LintSettings.iter (fun kind (state, _) -> - (fprintf o "%s=%s\n" - (string_of_kind kind) - (string_of_severity state))) - lint_severities + Lints.( + Severity.( + let lint_severities = config.lint_severities in + let lint_default = LintSettings.get_default lint_severities in + (* Don't print an 'all' setting if it matches the default setting. *) + if lint_default <> LintSettings.get_default LintSettings.empty_severities then + fprintf o "all=%s\n" (string_of_severity lint_default); + LintSettings.iter + (fun kind (state, _) -> + fprintf o "%s=%s\n" (string_of_kind kind) (string_of_severity state)) + lint_severities)) let strict o config = - let open Lints in - let strict_mode = config.strict_mode in - StrictModeSettings.iter (fun kind -> - (fprintf o "%s\n" - (string_of_kind kind))) - strict_mode + Lints.( + let strict_mode = config.strict_mode in + StrictModeSettings.iter (fun kind -> fprintf o "%s\n" (string_of_kind kind)) strict_mode) let section_if_nonempty o header f = function | [] -> () @@ -430,614 +697,297 @@ end = struct strict o config end -let empty_config = { - ignores = []; - untyped = []; - declarations = []; - includes = []; - libs = []; - lint_severities = LintSettings.empty_severities; - strict_mode = StrictModeSettings.empty; - options = Opts.default_options -} +let empty_config = + { + rollouts = SMap.empty; + ignores = []; + untyped = []; + declarations = []; + includes = []; + libs = []; + lint_severities = LintSettings.empty_severities; + strict_mode = StrictModeSettings.empty; + options = Opts.default_options; + version = None; + } -let group_into_sections lines = +let group_into_sections : line list -> (section list, error) result = let is_section_header = Str.regexp "^\\[\\(.*\\)\\]$" in - let _, sections, section = - List.fold_left (fun (seen, sections, (section, lines)) (ln, line) -> - if Str.string_match is_section_header line 0 - then begin - let sections = (section, List.rev lines)::sections in + let rec loop acc lines = + acc + >>= fun (seen, sections, (section_name, section_lines)) -> + match lines with + | [] -> + let section = (section_name, Core_list.rev section_lines) in + Ok (Core_list.rev (section :: sections)) + | (ln, line) :: rest -> + if Str.string_match is_section_header line 0 then + let sections = (section_name, Core_list.rev section_lines) :: sections in let section_name = Str.matched_group 1 line in - if SSet.mem section_name seen - then error ln (spf "contains duplicate section: \"%s\"" section_name); - SSet.add section_name seen, sections, ((ln, section_name), []) - end else - seen, sections, (section, (ln, line)::lines) - ) (SSet.empty, [], ((0, ""), [])) lines in - let (section, section_lines) = section in - List.rev ((section, List.rev section_lines)::sections) + if SSet.mem section_name seen then + Error (ln, spf "contains duplicate section: \"%s\"" section_name) + else + let seen = SSet.add section_name seen in + let section = ((ln, section_name), []) in + let acc = Ok (seen, sections, section) in + loop acc rest + else + let acc = Ok (seen, sections, (section_name, (ln, line) :: section_lines)) in + loop acc rest + in + (fun lines -> loop (Ok (SSet.empty, [], ((0, ""), []))) lines) let trim_lines lines = lines - |> List.map (fun (_, line) -> String.trim line) - |> List.filter (fun s -> s <> "") + |> Core_list.map ~f:(fun (_, line) -> String.trim line) + |> Core_list.filter ~f:(fun s -> s <> "") let trim_labeled_lines lines = lines - |> List.map (fun (label, line) -> (label, String.trim line)) - |> List.filter (fun (_, s) -> s <> "") - -let less_or_equal_curr_version = Version_regex.less_than_or_equal_to_version (Flow_version.version) + |> Core_list.map ~f:(fun (label, line) -> (label, String.trim line)) + |> Core_list.filter ~f:(fun (_, s) -> s <> "") (* parse [include] lines *) -let parse_includes config lines = +let parse_includes lines config = let includes = trim_lines lines in - { config with includes; } + Ok ({ config with includes }, []) -let parse_libs config lines = +let parse_libs lines config : (config * warning list, error) result = let libs = trim_lines lines in - { config with libs; } + Ok ({ config with libs }, []) -let parse_ignores config lines = +let parse_ignores lines config = let ignores = trim_lines lines in - { config with ignores; } + Ok ({ config with ignores }, []) -let parse_untyped config lines = +let parse_untyped lines config = let untyped = trim_lines lines in - { config with untyped; } + Ok ({ config with untyped }, []) -let parse_declarations config lines = +let parse_declarations lines config = let declarations = trim_lines lines in - { config with declarations; } - -let parse_options config lines = - let open Opts in - let (>>=) = Core_result.(>>=) in - let options = parse config.options lines - |> define_opt "emoji" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with emoji = v;} - ); - } - - |> define_opt "esproposal.class_instance_fields" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_esproposal_feature_flag ~allow_enable:true; - setter = (fun opts v -> Ok { - opts with esproposal_class_instance_fields = v; - }); - } - - |> define_opt "esproposal.class_static_fields" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_esproposal_feature_flag ~allow_enable:true; - setter = (fun opts v -> Ok { - opts with esproposal_class_static_fields = v; - }); - } - - |> define_opt "esproposal.decorators" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_esproposal_feature_flag; - setter = (fun opts v -> Ok { - opts with esproposal_decorators = v; - }); - } - - |> define_opt "esproposal.export_star_as" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_esproposal_feature_flag ~allow_enable:true; - setter = (fun opts v -> Ok { - opts with esproposal_export_star_as = v; - }); - } - - |> define_opt "esproposal.optional_chaining" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_esproposal_feature_flag ~allow_enable:true; - setter = (fun opts v -> Ok { - opts with esproposal_optional_chaining = v; - }); - } - - |> define_opt "esproposal.nullish_coalescing" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_esproposal_feature_flag ~allow_enable:true; - setter = (fun opts v -> Ok { - opts with esproposal_nullish_coalescing = v; - }); - } - - |> define_opt "facebook.fbt" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_string; - setter = (fun opts v -> Ok { - opts with facebook_fbt = Some v; - }); - } + Ok ({ config with declarations }, []) - |> define_opt "file_watcher" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_enum [ - "none", Options.NoFileWatcher; - "dfind", Options.DFind; - "watchman", Options.Watchman; - ]; - setter = (fun opts v -> Ok { - opts with file_watcher = Some v; - }); - } +let parse_options lines config : (config * warning list, error) result = + Opts.parse config.options lines + >>= (fun (options, warnings) -> Ok ({ config with options }, warnings)) - |> define_opt "include_warnings" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with include_warnings = v;} - ); - } - - |> define_opt "merge_timeout" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_uint; - setter = (fun opts v -> - let merge_timeout = if v = 0 then None else Some v in - Ok {opts with merge_timeout} - ); - } - - |> define_opt "module.system.haste.name_reducers" { - initializer_ = INIT_FN (fun opts -> { - opts with haste_name_reducers = []; - }); - flags = [ALLOW_DUPLICATE]; - optparser = (fun str -> - match optparse_mapping str with - | Ok (pattern, template) -> Ok (Str.regexp pattern, template) - | Error _ as err -> err - ); - setter = (fun opts v -> Ok { - opts with haste_name_reducers = v::(opts.haste_name_reducers); - }); - } - - |> define_opt "module.system.haste.paths.blacklist" { - initializer_ = INIT_FN (fun opts -> { - opts with haste_paths_blacklist = []; - }); - flags = [ALLOW_DUPLICATE]; - optparser = optparse_string; - setter = (fun opts v -> Ok { - opts with haste_paths_blacklist = v::(opts.haste_paths_blacklist); - }); - } - - |> define_opt "module.system.haste.paths.whitelist" { - initializer_ = INIT_FN (fun opts -> { - opts with haste_paths_whitelist = []; - }); - flags = [ALLOW_DUPLICATE]; - optparser = optparse_string; - setter = (fun opts v -> Ok { - opts with haste_paths_whitelist = v::(opts.haste_paths_whitelist); - }); - } - - |> define_opt "module.system.haste.use_name_reducers" { - initializer_ = INIT_FN (fun opts -> { - opts with haste_use_name_reducers = false; - }); - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with haste_use_name_reducers = v;} - ); - } - - |> define_opt "log.file" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_filepath; - setter = (fun opts v -> Ok { - opts with log_file = Some v; - }); - } - - |> define_opt "max_header_tokens" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_uint; - setter = (fun opts v -> - Ok {opts with max_header_tokens = v;} - ); - } - - |> define_opt "module.ignore_non_literal_requires" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with ignore_non_literal_requires = v;} - ); - } - - |> define_opt "module.file_ext" { - initializer_ = INIT_FN (fun opts -> { - opts with module_file_exts = SSet.empty; - }); - flags = [ALLOW_DUPLICATE]; - optparser = optparse_string; - setter = (fun opts v -> - if String_utils.string_ends_with v Files.flow_ext - then Error ( - "Cannot use file extension '" ^ - v ^ - "' since it ends with the reserved extension '"^ - Files.flow_ext^ - "'" - ) else - let module_file_exts = SSet.add v opts.module_file_exts in - Ok {opts with module_file_exts;} - ); - } - - |> define_opt "module.name_mapper" { - initializer_ = USE_DEFAULT; - flags = [ALLOW_DUPLICATE]; - optparser = (fun str -> - match optparse_mapping str with - | Ok (pattern, template) -> Ok (Str.regexp pattern, template) - | Error _ as err -> err - ); - setter = (fun opts v -> - let module_name_mappers = v :: opts.module_name_mappers in - Ok {opts with module_name_mappers;} - ); - } - - |> define_opt "module.name_mapper.extension" { - initializer_ = USE_DEFAULT; - flags = [ALLOW_DUPLICATE]; - optparser = (fun str -> - match optparse_mapping str with - | Ok (file_ext, template) -> Ok ( - Str.regexp ("^\\(.*\\)\\." ^ (Str.quote file_ext) ^ "$"), - template - ) - | Error _ as err -> err - ); - setter = (fun opts v -> - let module_name_mappers = v :: opts.module_name_mappers in - Ok {opts with module_name_mappers;} - ); - } - - |> define_opt "module.resolver" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_filepath; - setter = (fun opts v -> Ok { - opts with module_resolver = Some v; - }); - } - - |> define_opt "module.system" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_enum [ - ("node", Options.Node); - ("haste", Options.Haste); - ]; - setter = (fun opts v -> Ok { - opts with module_system = v; - }); - } - - |> define_opt "module.system.node.resolve_dirname" { - initializer_ = INIT_FN (fun opts -> { - opts with node_resolver_dirnames = []; - }); - flags = [ALLOW_DUPLICATE]; - optparser = optparse_string; - setter = (fun opts v -> - let node_resolver_dirnames = v :: opts.node_resolver_dirnames in - Ok {opts with node_resolver_dirnames;} - ); - } - - |> define_opt "module.use_strict" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with modules_are_use_strict = v;} - ); - } - - |> define_opt "munge_underscores" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with munge_underscores = v;} - ); - } - - |> define_opt "name" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_string; - setter = (fun opts v -> - FlowEventLogger.set_root_name (Some v); - Ok {opts with root_name = Some v;} - ); - } - - |> define_opt "server.max_workers" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_uint; - setter = (fun opts v -> - Ok {opts with max_workers = v;} - ); - } - - |> define_opt "all" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with all = v;} - ); - } - - |> define_opt "weak" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with weak = v;} - ); - } - - |> define_opt "suppress_comment" { - initializer_ = INIT_FN (fun opts -> - {opts with suppress_comments = [];} - ); - flags = [ALLOW_DUPLICATE]; - optparser = optparse_string; - setter = (fun opts v -> - Str.split_delim version_regex v - |> String.concat (">=" ^ less_or_equal_curr_version) - |> String.escaped - |> Core_result.return - >>= optparse_regexp - >>= fun v -> Ok { opts with suppress_comments = v::(opts.suppress_comments) } - ); - } - - |> define_opt "suppress_type" { - initializer_ = INIT_FN (fun opts -> - {opts with suppress_types = SSet.empty;} - ); - flags = [ALLOW_DUPLICATE]; - optparser = optparse_string; - setter = (fun opts v -> Ok { - opts with suppress_types = SSet.add v opts.suppress_types; - }); - } - - |> define_opt "temp_dir" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_string; - setter = (fun opts v -> Ok { - opts with temp_dir = v; - }); - } - - |> define_opt "saved_state.fetcher" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_enum [ - ("none", Options.Dummy_fetcher); - ("local", Options.Local_fetcher); - ("fb", Options.Fb_fetcher); - ]; - setter = (fun opts saved_state_fetcher -> Ok { - opts with saved_state_fetcher; - }); - } - - |> define_opt "sharedmemory.dirs" { - initializer_ = USE_DEFAULT; - flags = [ALLOW_DUPLICATE]; - optparser = optparse_string; - setter = (fun opts v -> Ok { - opts with shm_dirs = opts.shm_dirs @ [v]; - }); - } - - |> define_opt "sharedmemory.minimum_available" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_uint; - setter = (fun opts shm_min_avail -> Ok { - opts with shm_min_avail; - }); - } - - |> define_opt "sharedmemory.dep_table_pow" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_uint; - setter = (fun opts shm_dep_table_pow -> Ok { - opts with shm_dep_table_pow; - }); - } - - |> define_opt "sharedmemory.hash_table_pow" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_uint; - setter = (fun opts shm_hash_table_pow -> Ok { - opts with shm_hash_table_pow; - }); - } - - |> define_opt "sharedmemory.log_level" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_uint; - setter = (fun opts shm_log_level -> Ok { - opts with shm_log_level; - }); - } - - |> define_opt "traces" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_uint; - setter = (fun opts v -> - Ok {opts with traces = v;} - ); - } - - |> define_opt "max_literal_length" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_uint; - setter = (fun opts v -> - Ok {opts with max_literal_length = v;} - ); - } - - - |> define_opt "experimental.cancelable_rechecks" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with enable_cancelable_rechecks = v;} - ); - } - - |> define_opt "experimental.const_params" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with enable_const_params = v;} - ); - } - - |> define_opt "experimental.strict_call_arity" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with enforce_strict_call_arity = v;} - ); - } - - |> define_opt "experimental.well_formed_exports" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with enforce_well_formed_exports = v;} - ); - } - - |> define_opt "no_flowlib" { - initializer_ = USE_DEFAULT; - flags = []; - optparser = optparse_boolean; - setter = (fun opts v -> - Ok {opts with no_flowlib = v;} - ); - } - - |> get_defined_opts - in - {config with options} - -let parse_version config lines = - let potential_versions = lines - |> List.map (fun (ln, line) -> ln, String.trim line) - |> List.filter (fun (_, s) -> s <> "") +let parse_version lines config = + let potential_versions = + lines + |> Core_list.map ~f:(fun (ln, line) -> (ln, String.trim line)) + |> Core_list.filter ~f:(fun (_, s) -> s <> "") in - match potential_versions with | (ln, version_str) :: _ -> if not (Semver.is_valid_range version_str) then - error ln ( - spf - "Expected version to match %%d.%%d.%%d, with an optional leading ^, got %s" - version_str - ); - - let options = { config.options with Opts.version = Some version_str } in - { config with options } - | _ -> config - -let parse_lints config lines = - match lines |> trim_labeled_lines |> LintSettings.of_lines config.lint_severities with - | Ok lint_severities -> {config with lint_severities} - | Error (ln, msg) -> error ln msg - -let parse_strict config lines = - match lines |> trim_labeled_lines |> StrictModeSettings.of_lines with - | Ok strict_mode -> {config with strict_mode} - | Error (ln, msg) -> error ln msg - -let parse_section config ((section_ln, section), lines) = - match section, lines with - | "", [] when section_ln = 0 -> config - | "", (ln, _)::_ when section_ln = 0 -> - error ln "Unexpected config line not in any section" - | "include", _ -> parse_includes config lines - | "ignore", _ -> parse_ignores config lines - | "libs", _ -> parse_libs config lines - | "lints", _ -> parse_lints config lines - | "declarations", _ -> parse_declarations config lines - | "strict", _ -> parse_strict config lines - | "options", _ -> parse_options config lines - | "untyped", _ -> parse_untyped config lines - | "version", _ -> parse_version config lines - | _ -> error section_ln (spf "Unsupported config section: \"%s\"" section) - -let parse config lines = - let sections = group_into_sections lines in - List.fold_left parse_section config sections + Error + ( ln, + spf + "Expected version to match %%d.%%d.%%d, with an optional leading ^, got %s" + version_str ) + else + Ok ({ config with version = Some version_str }, []) + | _ -> Ok (config, []) + +let parse_lints lines config : (config * warning list, error) result = + let lines = trim_labeled_lines lines in + LintSettings.of_lines config.lint_severities lines + >>= (fun lint_severities -> Ok ({ config with lint_severities }, [])) + +let parse_strict lines config = + let lines = trim_labeled_lines lines in + StrictModeSettings.of_lines lines >>= (fun strict_mode -> Ok ({ config with strict_mode }, [])) + +(* Basically fold_left but with early exit when f returns an Error *) +let rec fold_left_stop_on_error + (l : 'elem list) ~(acc : 'acc) ~(f : 'acc -> 'elem -> ('acc, 'error) result) : + ('acc, 'error) result = + match l with + | [] -> Ok acc + | elem :: rest -> f acc elem >>= (fun acc -> fold_left_stop_on_error rest ~acc ~f) + +(* Rollouts are based on randomness, but we want it to be stable from run to run. So we seed our + * pseudo random number generator with + * + * 1. The hostname + * 2. The user + * 3. The name of the rollout + *) +let calculate_pct rollout_name = + let state = Xx.init () in + Xx.update state (Unix.gethostname ()); + Xx.update_int state (Unix.getuid ()); + Xx.update state rollout_name; + let hash = Xx.digest state in + Xx.modulo hash 100 + +(* The optional rollout section has 0 or more lines. Each line defines a single rollout. For example + * + * [rollouts] + * + * testA=40% on, 60% off + * testB=50% blue, 20% yellow, 30% pink + * + * The first line defines a rollout named "testA" with two groups. + * The second line defines a rollout named "testB" with three groups. + * + * Each rollout's groups must sum to 100. + *) +let parse_rollouts config lines = + Option.value_map lines ~default:(Ok config) ~f:(fun lines -> + let lines = trim_labeled_lines lines in + fold_left_stop_on_error lines ~acc:SMap.empty ~f:(fun rollouts (line_num, line) -> + (* A rollout's name is can only contain [a-zA-Z0-9._] *) + if Str.string_match (Str.regexp "^\\([a-zA-Z0-9._]+\\)=\\(.*\\)$") line 0 then + let rollout_name = Str.matched_group 1 line in + let rollout_values_raw = Str.matched_group 2 line in + let my_pct = calculate_pct rollout_name in + fold_left_stop_on_error + (* Groups are delimited with commas *) + Str.(split (regexp ",") rollout_values_raw) + ~acc:(None, SSet.empty, 0) + ~f:(fun (enabled_group, disabled_groups, pct_total) raw_group -> + let raw_group = String.trim raw_group in + (* A rollout group has the for "X% label", where label can only contain + * [a-zA-Z0-9._] *) + if Str.string_match (Str.regexp "^\\([0-9]+\\)% \\([a-zA-Z0-9._]+\\)$") raw_group 0 + then + let group_pct = Str.matched_group 1 raw_group |> int_of_string in + let group_name = Str.matched_group 2 raw_group in + if enabled_group = Some group_name || SSet.mem group_name disabled_groups then + Error + ( line_num, + spf + "Groups must have unique names. There is more than one %S group" + group_name ) + else + let (enabled_group, disabled_groups) = + match enabled_group with + | None when my_pct < group_pct + pct_total -> + (* This is the first group that passes my_pct, so we enable it *) + (Some group_name, disabled_groups) + | _ -> + (* Either we've already chosen the enabled group or we haven't passed my_pct *) + (enabled_group, SSet.add group_name disabled_groups) + in + Ok (enabled_group, disabled_groups, pct_total + group_pct) + else + Error + ( line_num, + "Malformed rollout group. A group should be a percentage and an identifier, " + ^ "like `50% on`" )) + >>= fun (enabled_group, disabled_groups, pct_total) -> + if pct_total = 100 then + if SMap.mem rollout_name rollouts then + Error + ( line_num, + spf + "Rollouts must have unique names. There already is a %S rollout" + rollout_name ) + else + match enabled_group with + | None -> Error (line_num, "Invariant violation: failed to choose a group") + | Some enabled_group -> + Ok (SMap.add rollout_name { enabled_group; disabled_groups } rollouts) + else + Error + ( line_num, + spf "Rollout groups must sum to 100%%. %S sums to %d%%" rollout_name pct_total ) + else + Error + ( line_num, + "Malformed rollout. A rollout should be an identifier followed by a list of groups, " + ^ "like `myRollout=10% on, 50% off`" )) + >>= (fun rollouts -> Ok { config with rollouts })) + +let parse_section config ((section_ln, section), lines) : (config * warning list, error) result = + match (section, lines) with + | ("", []) when section_ln = 0 -> Ok (config, []) + | ("", (ln, _) :: _) when section_ln = 0 -> + Error (ln, "Unexpected config line not in any section") + | ("include", _) -> parse_includes lines config + | ("ignore", _) -> parse_ignores lines config + | ("libs", _) -> parse_libs lines config + | ("lints", _) -> parse_lints lines config + | ("declarations", _) -> parse_declarations lines config + | ("strict", _) -> parse_strict lines config + | ("options", _) -> parse_options lines config + | ("untyped", _) -> parse_untyped lines config + | ("version", _) -> parse_version lines config + | _ -> Ok (config, [(section_ln, spf "Unsupported config section: \"%s\"" section)]) + +let parse = + (* Filter every section (except the rollouts section) for disabled rollouts. For example, if a + * line starts with (my_rollout=on) and the "on" group is not enabled for the "my_rollout" + * rollout, then drop the line completely. + * + * Lines with enabled rollouts just have the prefix stripped + *) + let filter_sections_by_rollout sections config = + (* The rollout prefix looks like `(rollout_name=group_name)` *) + let rollout_regex = Str.regexp "^(\\([a-zA-Z0-9._]+\\)=\\([a-zA-Z0-9._]+\\))\\(.*\\)$" in + fold_left_stop_on_error sections ~acc:[] ~f:(fun acc (section_name, lines) -> + fold_left_stop_on_error lines ~acc:[] ~f:(fun acc (line_num, line) -> + if Str.string_match rollout_regex line 0 then + let rollout_name = Str.matched_group 1 line in + let group_name = Str.matched_group 2 line in + let line = Str.matched_group 3 line in + match SMap.get rollout_name config.rollouts with + | None -> Error (line_num, spf "Unknown rollout %S" rollout_name) + | Some { enabled_group; disabled_groups } -> + if enabled_group = group_name then + Ok ((line_num, line) :: acc) + else if SSet.mem group_name disabled_groups then + Ok acc + else + Error (line_num, spf "Unknown group %S in rollout %S" group_name rollout_name) + else + Ok ((line_num, line) :: acc)) + >>= (fun lines -> Ok ((section_name, Core_list.rev lines) :: acc))) + >>= (fun sections -> Ok (config, Core_list.rev sections)) + in + let process_rollouts config sections = + let rollout_section_lines = ref None in + let sections = + Core_list.filter sections ~f:(function + | ((_, "rollouts"), lines) -> + rollout_section_lines := Some lines; + false + | _ -> true) + in + parse_rollouts config !rollout_section_lines >>= filter_sections_by_rollout sections + in + let rec loop acc sections = + acc + >>= fun (config, warn_acc) -> + match sections with + | [] -> Ok (config, Core_list.rev warn_acc) + | section :: rest -> + parse_section config section + >>= fun (config, warnings) -> + let acc = Ok (config, Core_list.rev_append warnings warn_acc) in + loop acc rest + in + fun config lines -> + group_into_sections lines + >>= process_rollouts config + >>= (fun (config, sections) -> loop (Ok (config, [])) sections) let is_not_comment = - let comment_regexps = [ - Str.regexp_string "#"; (* Line starts with # *) - Str.regexp_string ";"; (* Line starts with ; *) - Str.regexp_string "\240\159\146\169"; (* Line starts with poop emoji *) - ] in + let comment_regexps = + [ + Str.regexp_string "#"; + (* Line starts with # *) + Str.regexp_string ";"; + (* Line starts with ; *) + Str.regexp_string "\240\159\146\169"; + (* Line starts with poop emoji *) + + ] + in fun (_, line) -> - not (List.exists - (fun (regexp) -> Str.string_match regexp line 0) - comment_regexps) - -let default_lint_severities = [ - Lints.DeprecatedCallSyntax, (Severity.Err, None); -] + not (Core_list.exists ~f:(fun regexp -> Str.string_match regexp line 0) comment_regexps) let read filename = let contents = Sys_utils.cat_no_fail filename in @@ -1046,50 +996,64 @@ let read filename = Xx.update xx_state contents; Xx.digest xx_state in - let lines = contents + let lines = + contents |> Sys_utils.split_lines - |> List.mapi (fun i line -> (i+1, String.trim line)) - |> List.filter is_not_comment in - let config = { - empty_config with - lint_severities = List.fold_left (fun acc (lint, severity) -> - LintSettings.set_value lint severity acc - ) empty_config.lint_severities default_lint_severities - } in - parse config lines, hash + |> Core_list.mapi ~f:(fun i line -> (i + 1, String.trim line)) + |> Core_list.filter ~f:is_not_comment + in + (lines, hash) + +let get_empty_config () = + let lint_severities = + Core_list.fold_left + ~f:(fun acc (lint, severity) -> LintSettings.set_value lint severity acc) + ~init:empty_config.lint_severities + LintSettings.default_lint_severities + in + { empty_config with lint_severities } let init ~ignores ~untyped ~declarations ~includes ~libs ~options ~lints = - let ignores_lines = List.map (fun s -> (1, s)) ignores in - let untyped_lines = List.map (fun s -> (1, s)) untyped in - let declarations_lines = List.map (fun s -> (1, s)) declarations in - let includes_lines = List.map (fun s -> (1, s)) includes in - let options_lines = List.map (fun s -> (1, s)) options in - let lib_lines = List.map (fun s -> (1, s)) libs in - let lint_lines = List.map (fun s -> (1, s)) lints in - let config = parse_ignores empty_config ignores_lines in - let config = parse_untyped config untyped_lines in - let config = parse_declarations config declarations_lines in - let config = parse_includes config includes_lines in - let config = parse_options config options_lines in - let config = parse_libs config lib_lines in - let config = parse_lints config lint_lines in - config + let ( >>= ) + (acc : (config * warning list, error) result) + (fn : config -> (config * warning list, error) result) = + let ( >>= ) = Core_result.( >>= ) in + acc + >>= fun (config, warn_acc) -> + fn config >>= (fun (config, warnings) -> Ok (config, Core_list.rev_append warnings warn_acc)) + in + let ignores_lines = Core_list.map ~f:(fun s -> (1, s)) ignores in + let untyped_lines = Core_list.map ~f:(fun s -> (1, s)) untyped in + let declarations_lines = Core_list.map ~f:(fun s -> (1, s)) declarations in + let includes_lines = Core_list.map ~f:(fun s -> (1, s)) includes in + let options_lines = Core_list.map ~f:(fun s -> (1, s)) options in + let lib_lines = Core_list.map ~f:(fun s -> (1, s)) libs in + let lint_lines = Core_list.map ~f:(fun s -> (1, s)) lints in + Ok (empty_config, []) + >>= parse_ignores ignores_lines + >>= parse_untyped untyped_lines + >>= parse_declarations declarations_lines + >>= parse_includes includes_lines + >>= parse_options options_lines + >>= parse_libs lib_lines + >>= parse_lints lint_lines let write config oc = Pp.config oc config (* We should restart every time the config changes, so it's generally cool to cache it *) let cache = ref None -let get_from_cache ?(allow_cache=true) filename = +let get_from_cache ?(allow_cache = true) filename = match !cache with - | Some (cached_filename, _, _ as cached_data) when allow_cache -> - assert (filename = cached_filename); - cached_data + | Some ((cached_filename, _, _) as cached_data) when allow_cache -> + assert (filename = cached_filename); + cached_data | _ -> - let config, hash = read filename in - let cached_data = filename, config, hash in - cache := Some cached_data; - filename, config, hash + let (lines, hash) = read filename in + let config = parse (get_empty_config ()) lines in + let cached_data = (filename, config, hash) in + cache := Some cached_data; + cached_data let get ?allow_cache filename = let (_, config, _) = get_from_cache ?allow_cache filename in @@ -1101,68 +1065,151 @@ let get_hash ?allow_cache filename = (* Accessors *) +let enabled_rollouts config = SMap.map (fun { enabled_group; _ } -> enabled_group) config.rollouts + (* completely ignored files (both module resolving and typing) *) let ignores config = config.ignores + (* files that should be treated as untyped *) let untyped config = config.untyped + (* files that should be treated as declarations *) let declarations config = config.declarations + (* non-root include paths *) let includes config = config.includes + (* library paths. no wildcards *) let libs config = config.libs (* options *) +let abstract_locations c = c.options.Opts.abstract_locations + let all c = c.options.Opts.all + +let allow_skip_direct_dependents c = c.options.Opts.allow_skip_direct_dependents + +let cache_direct_dependents c = c.options.Opts.cache_direct_dependents + let emoji c = c.options.Opts.emoji + let max_literal_length c = c.options.Opts.max_literal_length -let enable_cancelable_rechecks c = c.options.Opts.enable_cancelable_rechecks + let enable_const_params c = c.options.Opts.enable_const_params + let enforce_strict_call_arity c = c.options.Opts.enforce_strict_call_arity + let enforce_well_formed_exports c = c.options.Opts.enforce_well_formed_exports + +let enforce_well_formed_exports_whitelist c = c.options.Opts.enforce_well_formed_exports_whitelist + +let enums c = c.options.Opts.enums + let esproposal_class_instance_fields c = c.options.Opts.esproposal_class_instance_fields + let esproposal_class_static_fields c = c.options.Opts.esproposal_class_static_fields + let esproposal_decorators c = c.options.Opts.esproposal_decorators + let esproposal_export_star_as c = c.options.Opts.esproposal_export_star_as + let esproposal_optional_chaining c = c.options.Opts.esproposal_optional_chaining + let esproposal_nullish_coalescing c = c.options.Opts.esproposal_nullish_coalescing + +let exact_by_default c = c.options.Opts.exact_by_default + let file_watcher c = c.options.Opts.file_watcher + +let facebook_fbs c = c.options.Opts.facebook_fbs + let facebook_fbt c = c.options.Opts.facebook_fbt + +let haste_module_ref_prefix c = c.options.Opts.haste_module_ref_prefix + let haste_name_reducers c = c.options.Opts.haste_name_reducers + let haste_paths_blacklist c = c.options.Opts.haste_paths_blacklist + let haste_paths_whitelist c = c.options.Opts.haste_paths_whitelist + let haste_use_name_reducers c = c.options.Opts.haste_use_name_reducers + let ignore_non_literal_requires c = c.options.Opts.ignore_non_literal_requires + let include_warnings c = c.options.Opts.include_warnings + +let lazy_mode c = c.options.Opts.lazy_mode + let log_file c = c.options.Opts.log_file + +let lsp_code_actions c = c.options.Opts.lsp_code_actions + +let max_files_checked_per_worker c = c.options.Opts.max_files_checked_per_worker + let max_header_tokens c = c.options.Opts.max_header_tokens + let max_workers c = c.options.Opts.max_workers + let merge_timeout c = c.options.Opts.merge_timeout + let module_file_exts c = c.options.Opts.module_file_exts + let module_name_mappers c = c.options.Opts.module_name_mappers + let module_resolver c = c.options.Opts.module_resolver + let module_resource_exts c = c.options.Opts.module_resource_exts + let module_system c = c.options.Opts.module_system + let modules_are_use_strict c = c.options.Opts.modules_are_use_strict + let munge_underscores c = c.options.Opts.munge_underscores + let no_flowlib c = c.options.Opts.no_flowlib + let node_resolver_dirnames c = c.options.Opts.node_resolver_dirnames + +let recursion_limit c = c.options.Opts.recursion_limit + let root_name c = c.options.Opts.root_name + let saved_state_fetcher c = c.options.Opts.saved_state_fetcher + let shm_dep_table_pow c = c.options.Opts.shm_dep_table_pow + let shm_dirs c = c.options.Opts.shm_dirs -let shm_global_size c = c.options.Opts.shm_global_size + let shm_hash_table_pow c = c.options.Opts.shm_hash_table_pow + let shm_heap_size c = c.options.Opts.shm_heap_size + let shm_log_level c = c.options.Opts.shm_log_level + let shm_min_avail c = c.options.Opts.shm_min_avail + let suppress_comments c = c.options.Opts.suppress_comments + let suppress_types c = c.options.Opts.suppress_types + let temp_dir c = c.options.Opts.temp_dir + let traces c = c.options.Opts.traces -let required_version c = c.options.Opts.version + +let trust_mode c = c.options.Opts.trust_mode + +let type_asserts c = c.options.Opts.type_asserts + +let types_first c = c.options.Opts.types_first + +let required_version c = c.version + +let wait_for_recheck c = c.options.Opts.wait_for_recheck + let weak c = c.options.Opts.weak (* global defaults for lint severities and strict mode *) let lint_severities c = c.lint_severities + let strict_mode c = c.strict_mode diff --git a/src/commands/config/flowConfig.mli b/src/commands/config/flowConfig.mli index 173055ceee5..547077d4096 100644 --- a/src/commands/config/flowConfig.mli +++ b/src/commands/config/flowConfig.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,85 +7,176 @@ type config -val get: ?allow_cache:bool -> string -> config -val get_hash: ?allow_cache:bool -> string -> Xx.hash -val empty_config: config - -val init: - ignores: string list -> - untyped: string list -> - declarations: string list -> - includes: string list -> - libs: string list -> - options: string list -> - lints: string list -> - config -val write: config -> out_channel -> unit +type warning = int * string + +type error = int * string + +val get : ?allow_cache:bool -> string -> (config * warning list, error) result + +val get_hash : ?allow_cache:bool -> string -> Xx.hash + +val empty_config : config + +val init : + ignores:string list -> + untyped:string list -> + declarations:string list -> + includes:string list -> + libs:string list -> + options:string list -> + lints:string list -> + (config * warning list, error) result + +val write : config -> out_channel -> unit (* Accessors *) (* completely ignored files (both module resolving and typing) *) -val ignores: config -> string list +val ignores : config -> string list + (* files that should be treated as untyped *) -val untyped: config -> string list +val untyped : config -> string list + (* files that should be treated as declarations *) -val declarations: config -> string list +val declarations : config -> string list + (* non-root include paths *) -val includes: config -> string list +val includes : config -> string list + (* library paths. no wildcards *) -val libs: config -> string list +val libs : config -> string list + +(* A map from the rollout's name to the enabled group's name *) +val enabled_rollouts : config -> string SMap.t (* options *) -val all: config -> bool -val emoji: config -> bool -val enable_cancelable_rechecks: config -> bool -val enable_const_params: config -> bool -val enforce_strict_call_arity: config -> bool -val enforce_well_formed_exports: config -> bool -val esproposal_class_instance_fields: config -> Options.esproposal_feature_mode -val esproposal_class_static_fields: config -> Options.esproposal_feature_mode -val esproposal_decorators: config -> Options.esproposal_feature_mode -val esproposal_export_star_as: config -> Options.esproposal_feature_mode -val esproposal_nullish_coalescing: config -> Options.esproposal_feature_mode -val esproposal_optional_chaining: config -> Options.esproposal_feature_mode -val facebook_fbt: config -> string option -val file_watcher: config -> Options.file_watcher option -val haste_name_reducers: config -> (Str.regexp * string) list -val haste_paths_blacklist: config -> string list -val haste_paths_whitelist: config -> string list -val haste_use_name_reducers: config -> bool -val ignore_non_literal_requires: config -> bool -val include_warnings: config -> bool -val log_file: config -> Path.t option -val max_header_tokens: config -> int -val max_literal_length: config -> int -val max_workers: config -> int -val merge_timeout: config -> int option -val module_file_exts: config -> SSet.t -val module_name_mappers: config -> (Str.regexp * string) list -val module_resolver: config -> Path.t option -val module_resource_exts: config -> SSet.t -val module_system: config -> Options.module_system -val modules_are_use_strict: config -> bool -val munge_underscores: config -> bool -val no_flowlib: config -> bool -val node_resolver_dirnames: config -> string list -val required_version: config -> string option -val root_name: config -> string option -val saved_state_fetcher: config -> Options.saved_state_fetcher -val shm_dep_table_pow: config -> int -val shm_dirs: config -> string list -val shm_global_size: config -> int -val shm_hash_table_pow: config -> int -val shm_heap_size: config -> int -val shm_log_level: config -> int -val shm_min_avail: config -> int -val suppress_comments: config -> Str.regexp list -val suppress_types: config -> SSet.t -val temp_dir: config -> string -val traces: config -> int -val weak: config -> bool +val abstract_locations : config -> bool + +val all : config -> bool + +val allow_skip_direct_dependents : config -> bool + +val cache_direct_dependents : config -> bool + +val emoji : config -> bool + +val enable_const_params : config -> bool + +val enforce_strict_call_arity : config -> bool + +val enforce_well_formed_exports : config -> bool + +val enforce_well_formed_exports_whitelist : config -> string list + +val enums : config -> bool + +val esproposal_class_instance_fields : config -> Options.esproposal_feature_mode + +val esproposal_class_static_fields : config -> Options.esproposal_feature_mode + +val esproposal_decorators : config -> Options.esproposal_feature_mode + +val esproposal_export_star_as : config -> Options.esproposal_feature_mode + +val esproposal_nullish_coalescing : config -> Options.esproposal_feature_mode + +val esproposal_optional_chaining : config -> Options.esproposal_feature_mode + +val exact_by_default : config -> bool + +val facebook_fbs : config -> string option + +val facebook_fbt : config -> string option + +val file_watcher : config -> Options.file_watcher option + +val haste_module_ref_prefix : config -> string option + +val haste_name_reducers : config -> (Str.regexp * string) list + +val haste_paths_blacklist : config -> string list + +val haste_paths_whitelist : config -> string list + +val haste_use_name_reducers : config -> bool + +val ignore_non_literal_requires : config -> bool + +val include_warnings : config -> bool + +val lazy_mode : config -> Options.lazy_mode option + +val log_file : config -> Path.t option + +val lsp_code_actions : config -> bool + +val max_files_checked_per_worker : config -> int + +val max_header_tokens : config -> int + +val max_literal_length : config -> int + +val max_workers : config -> int + +val merge_timeout : config -> int option + +val module_file_exts : config -> SSet.t + +val module_name_mappers : config -> (Str.regexp * string) list + +val module_resolver : config -> Path.t option + +val module_resource_exts : config -> SSet.t + +val module_system : config -> Options.module_system + +val modules_are_use_strict : config -> bool + +val munge_underscores : config -> bool + +val no_flowlib : config -> bool + +val node_resolver_dirnames : config -> string list + +val required_version : config -> string option + +val recursion_limit : config -> int + +val root_name : config -> string option + +val saved_state_fetcher : config -> Options.saved_state_fetcher + +val shm_dep_table_pow : config -> int + +val shm_dirs : config -> string list + +val shm_hash_table_pow : config -> int + +val shm_heap_size : config -> int + +val shm_log_level : config -> int + +val shm_min_avail : config -> int + +val suppress_comments : config -> Str.regexp list + +val suppress_types : config -> SSet.t + +val temp_dir : config -> string + +val traces : config -> int + +val trust_mode : config -> Options.trust_mode + +val type_asserts : config -> bool + +val types_first : config -> bool + +val wait_for_recheck : config -> bool + +val weak : config -> bool (* global defaults for lint suppressions and strict mode *) -val lint_severities: config -> Severity.severity LintSettings.t -val strict_mode: config -> StrictModeSettings.t +val lint_severities : config -> Severity.severity LintSettings.t + +val strict_mode : config -> StrictModeSettings.t diff --git a/src/commands/config/version_regex.ml b/src/commands/config/version_regex.ml index c78fd627797..9d0af8c3a8c 100644 --- a/src/commands/config/version_regex.ml +++ b/src/commands/config/version_regex.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -24,17 +24,27 @@ *) let less_than_or_equal_to_version = let int_of_string x = - try Pervasives.int_of_string x with Failure _ -> raise (Failure ("int_of_string: " ^ x)) + (try Pervasives.int_of_string x with Failure _ -> raise (Failure ("int_of_string: " ^ x))) in let rec all_nines str i len = - if i >= len then true - else if str.[i] = '9' then (all_nines [@tailcall]) str (succ i) len - else false + if i >= len then + true + else if str.[i] = '9' then + (all_nines [@tailcall]) str (succ i) len + else + false + in + let range_of_digit digit = + if digit = '0' then + "0" + else + "[0-" ^ String.make 1 digit ^ "]" in - let range_of_digit digit = if digit = '0' then "0" else "[0-"^(String.make 1 digit)^"]" in let union parts = - if List.length parts = 1 then List.hd parts - else "\\(" ^ (String.concat "\\|" parts) ^ "\\)" + if List.length parts = 1 then + List.hd parts + else + "\\(" ^ String.concat "\\|" parts ^ "\\)" in let rec range part = let len = String.length part in @@ -42,37 +52,44 @@ let less_than_or_equal_to_version = [range_of_digit part.[0]] else if all_nines part 0 len then let rest = range (String.sub part 1 (len - 1)) in - ("[0-9]" ^ (union rest))::rest - else begin + ("[0-9]" ^ union rest) :: rest + else let msd_str = String.sub part 0 1 in let rest = String.sub part 1 (len - 1) in let msd = int_of_string msd_str in - let x = msd_str ^ (union (range rest)) in + let x = msd_str ^ union (range rest) in if msd > 1 then - let prev_msd_range = range_of_digit (Char.chr (Char.code '0' + (pred msd))) in + let prev_msd_range = range_of_digit (Char.chr (Char.code '0' + pred msd)) in let rest = String.make (len - 1) '9' in let rest_range = range rest in - let prev_range = prev_msd_range ^ (union rest_range) in - x::prev_range::rest_range - else if msd = 1 then begin + let prev_range = prev_msd_range ^ union rest_range in + x :: prev_range :: rest_range + else if msd = 1 then let rest = String.make (len - 1) '9' in - x::(range rest) - end else [x] - end + x :: range rest + else + [x] in - let rec helper = function - | [] -> "" - | part::[] -> union (range part) - | part::rest -> - let x = part ^ "\\(\\." ^ (helper rest) ^ "\\)?" in - if part = "0" then + let rec helper : int list -> string = function + | [] -> "" + | [part] -> union (range (string_of_int part)) + | part :: rest -> + let str = string_of_int part in + let x = str ^ "\\(\\." ^ helper rest ^ "\\)?" in + if part = 0 then x else - let prev = part |> int_of_string |> pred |> string_of_int in - let rest = List.map (fun _ -> "\\(\\.[0-9]+\\)?") rest in - union [x; (union (range prev)) ^ (String.concat "" rest)] + let prev = pred part |> string_of_int in + let rest = Core_list.map ~f:(fun _ -> "\\(\\.[0-9]+\\)?") rest in + union [x; union (range prev) ^ String.concat "" rest] in fun version -> - version - |> Str.split (Str.regexp_string ".") - |> helper + let parts = + try Scanf.sscanf version "%u.%u.%u" (fun major minor patch -> [major; minor; patch]) with + | End_of_file -> + raise (Failure ("Unable to parse version " ^ version ^ ": does not match \"%u.%u.%u\"")) + | Scanf.Scan_failure err + | Failure err -> + raise (Failure ("Unable to parse version " ^ version ^ ": " ^ err)) + in + helper parts diff --git a/src/commands/configCommand.ml b/src/commands/configCommand.ml new file mode 100644 index 00000000000..0384a195e5e --- /dev/null +++ b/src/commands/configCommand.ml @@ -0,0 +1,150 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(***********************************************************************) +(* flow config command *) +(***********************************************************************) + +open CommandUtils + +let find_subcommand = + let spec = + { + CommandSpec.name = "config find"; + doc = "Return path to .flowconfig"; + usage = + Printf.sprintf + "Usage: %s config find [ROOT]\nReturn the path to the .flowconfig file\n\ne.g. %s config find /path/to/root" + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty |> flowconfig_name_flag |> json_flags |> anon "root" (optional string)); + } + in + let main flowconfig_name json pretty root () = + let root = guess_root flowconfig_name root |> Path.to_string in + FlowEventLogger.set_root (Some root); + if json || pretty then + Hh_json.( + let json = JSON_Object [("root", JSON_String root)] in + print_json_endline ~pretty json) + else + print_endline root + in + CommandSpec.command spec main + +let check_subcommand = + let spec = + { + CommandSpec.name = "config check"; + doc = "Validates the .flowconfig file"; + usage = + Printf.sprintf + "Usage: %s config check [FILE]\nValidates the .flowconfig file\n\ne.g. %s config check /path/to/.flowconfig" + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> flowconfig_name_flag + |> json_flags + |> root_flag + |> ignore_version_flag + |> anon "file" (optional string)); + } + in + (* If a flowconfig was passed in, confirm it exists; otherwise, search for it using the + --root and --flowconfig-name flags. *) + let find_flowconfig flowconfig_name root = function + | Some file -> + ( if not (Sys.file_exists file) then + let msg = Utils_js.spf "Could not find file %s" file in + FlowExitStatus.(exit ~msg Could_not_find_flowconfig) ); + let root = Path.make (Filename.dirname file) in + (file, root |> Path.to_string) + | None -> + let root = guess_root flowconfig_name root in + let file = Server_files_js.config_file flowconfig_name root in + (file, root |> Path.to_string) + in + let json_of_issue kind (line, msg) = + Hh_json.( + JSON_Object + [ + ("line", JSON_Number (string_of_int line)); + ("message", JSON_String msg); + ( "level", + JSON_String + (match kind with + | `Error -> "error" + | `Warning -> "warning") ); + ]) + in + let exit_with_json ~pretty json = + Hh_json.( + FlowExitStatus.( + let code = Invalid_flowconfig in + let json = JSON_Object (("errors", json) :: FlowExitStatus.json_props_of_t code) in + Hh_json.print_json_endline ~pretty json; + FlowExitStatus.unset_json_mode (); + FlowExitStatus.(exit code))) + in + let main flowconfig_name json pretty root ignore_version file () = + let (file, root) = find_flowconfig flowconfig_name root file in + FlowEventLogger.set_root (Some root); + match FlowConfig.get ~allow_cache:false file with + | Ok (config, []) -> + if not ignore_version then + assert_version config + else + () + | Ok (config, warnings) -> + if ignore_version then + if json || pretty then + Hh_json.JSON_Object [] |> Hh_json.json_to_string ~pretty |> print_endline + else + () + else ( + assert_version config; + if json || pretty then + Hh_json.( + let json = JSON_Array (List.map (json_of_issue `Warning) warnings) in + exit_with_json ~pretty json) + else + flowconfig_multi_error warnings + ) + | Error err -> + if json || pretty then + Hh_json.( + let json = JSON_Array [json_of_issue `Error err] in + exit_with_json ~pretty json) + else + flowconfig_multi_error [err] + in + CommandSpec.command spec main + +let command = + let spec = + { + CommandSpec.name = "config"; + doc = "Read or write the .flowconfig file"; + usage = + Printf.sprintf + "Usage: %s config SUBCOMMAND [ROOT]\nRead or write the .flowconfig file\n\nSUBCOMMANDS:\nfind: Return the path to the .flowconfig\n" + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> CommandUtils.from_flag + |> anon + "subcommand" + (required (command [("check", check_subcommand); ("find", find_subcommand)]))); + } + in + let main (cmd, argv) () = CommandUtils.run_command cmd argv in + CommandSpec.command spec main diff --git a/src/commands/coverageCommand.ml b/src/commands/coverageCommand.ml index 097785a0886..1a87fc31ed2 100644 --- a/src/commands/coverageCommand.ml +++ b/src/commands/coverageCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,122 +12,133 @@ open CommandUtils open Utils_js -let spec = { - CommandSpec. - name = "coverage"; - doc = "Shows coverage information for a given file"; - usage = Printf.sprintf - "Usage: %s coverage [OPTION]... [FILE]\n\n\ - e.g. %s coverage foo.js\n\ - or %s coverage < foo.js\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> from_flag - |> flag "--color" no_arg - ~doc:("Print the file with colors showing which parts have unknown types. " ^ - "Cannot be used with --json or --pretty") - |> flag "--debug" no_arg - ~doc:("Print debugging info about each range in the file to stderr. " ^ - "Cannot be used with --json or --pretty") - |> path_flag - |> flag "--respect-pragma" no_arg ~doc:"" (* deprecated *) - |> flag "--all" no_arg - ~doc:"Ignore absence of @flow pragma" - |> anon "file" (optional string) - ) -} +let spec = + { + CommandSpec.name = "coverage"; + doc = "Shows coverage information for a given file"; + usage = + Printf.sprintf + "Usage: %s coverage [OPTION]... [FILE]\n\ne.g. %s coverage foo.js\nor %s coverage < foo.js\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> from_flag + |> wait_for_recheck_flag + |> flag + "--color" + no_arg + ~doc: + ( "Print the file with colors showing which parts have unknown types " + ^ "(blue for 'empty' and red for 'any'). " + ^ "Cannot be used with --json or --pretty" ) + |> flag + "--debug" + no_arg + ~doc: + ( "Print debugging info about each range in the file to stderr. " + ^ "Cannot be used with --json or --pretty" ) + |> path_flag + |> flag "--respect-pragma" no_arg ~doc:"" (* deprecated *) + |> flag "--all" no_arg ~doc:"Ignore absence of @flow pragma" + |> flag "--show-trust" no_arg ~doc:"EXPERIMENTAL: Include trust information in output" + |> anon "file" (optional string)); + } let handle_error ~json ~pretty err = - if json - then ( - let open Hh_json in - let json = JSON_Object ["error", JSON_String err] in - prerr_json_endline ~pretty json; - ) else ( - prerr_endline err; - ) - -let accum_coverage (covered, total) (_loc, is_covered) = - if is_covered - then (covered + 1, total + 1) - else (covered, total + 1) + if json then + Hh_json.( + let json = JSON_Object [("error", JSON_String err)] in + prerr_json_endline ~pretty json) + else + prerr_endline err + +let accum_coverage (untainted, tainted, empty, total) (_loc, cov) = + match cov with + | Coverage_response.Uncovered -> (untainted, tainted, empty, total + 1) + | Coverage_response.Empty -> (untainted, tainted, empty + 1, total + 1) + | Coverage_response.Untainted -> (untainted + 1, tainted, empty, total + 1) + | Coverage_response.Tainted -> (untainted, tainted + 1, empty, total + 1) + +let accum_coverage_locs (untainted, tainted, empty, uncovered) (loc, cov) = + match cov with + | Coverage_response.Uncovered -> (untainted, tainted, empty, loc :: uncovered) + | Coverage_response.Empty -> (untainted, tainted, loc :: empty, loc :: uncovered) + | Coverage_response.Untainted -> (loc :: untainted, tainted, empty, uncovered) + | Coverage_response.Tainted -> (untainted, loc :: tainted, empty, uncovered) let colorize content from_offset to_offset color accum = if to_offset > from_offset then let substr = String.sub content from_offset (to_offset - from_offset) in - (Tty.Normal color, substr)::accum, to_offset - else accum, from_offset + ((Tty.Normal color, substr) :: accum, to_offset) + else + (accum, from_offset) -let debug_range (loc, is_covered) = Loc.( - prerr_endlinef "%d:%d,%d:%d: (%b)" - loc.start.line loc.start.column - loc._end.line loc._end.column - is_covered -) +let debug_range (loc, cov) = + Loc.( + prerr_endlinef + "%d:%d,%d:%d: %b" + loc.start.line + loc.start.column + loc._end.line + loc._end.column + (Coverage.to_bool cov)) -let rec colorize_file content last_offset accum = Loc.(function +let rec colorize_file content last_offset accum = function | [] -> colorize content last_offset (String.length content) Tty.Default accum - | (loc, is_covered)::rest -> - let offset, end_offset = loc.start.offset, loc._end.offset in - + | ((offset, end_offset), kind) :: rest -> (* catch up to the start of this range *) - let accum, offset = colorize content last_offset offset Tty.Default accum in - - let color = if not (is_covered) then Tty.Red else Tty.Default in - let accum, offset = colorize content offset end_offset color accum in + let (accum, offset) = colorize content last_offset offset Tty.Default accum in + let color = + match kind with + | Coverage_response.Uncovered -> Tty.Red + | Coverage_response.Empty -> Tty.Blue + | Coverage_response.Tainted -> Tty.Yellow + | Coverage_response.Untainted -> Tty.Default + in + let (accum, offset) = colorize content offset end_offset color accum in colorize_file content offset accum rest -) -let sort_ranges (a_loc, _) (b_loc, _) = Loc.(Pervasives.compare - (a_loc.start.offset, a_loc._end.offset) - (b_loc.start.offset, b_loc._end.offset) -) +let sort_ranges ((a_line, a_col), _) ((b_line, b_col), _) = + let line = a_line - b_line in + if line = 0 then + a_col - b_col + else + line -let rec split_overlapping_ranges accum = Loc.(function +let rec split_overlapping_ranges accum = function | [] -> accum - | range::[] -> range::accum - | (loc1, is_covered1)::(loc2, is_covered2)::rest -> - let accum, todo = - if loc1._end.offset < loc2.start.offset then - (* range 1 is completely before range 2, so consume range 1 *) - (loc1, is_covered1)::accum, - (loc2, is_covered2)::rest - - else if loc1.start.offset = loc2.start.offset then - (* range 1 and 2 start at the same place, so consume range 1 and + | [range] -> range :: accum + | (loc1, is_covered1) :: (loc2, is_covered2) :: rest -> + let ((loc1_start, loc1_end), (loc2_start, loc2_end)) = (loc1, loc2) in + let (accum, todo) = + if loc1_end < loc2_start then + (* range 1 is completely before range 2, so consume range 1 *) + ((loc1, is_covered1) :: accum, (loc2, is_covered2) :: rest) + else if loc1_start = loc2_start then + (* range 1 and 2 start at the same place, so consume range 1 and create a new range for the remainder of range 2, if any *) - let rest = - if loc1._end.offset <> loc2._end.offset then - let tail_loc = { loc2 with - start = { loc2.start with offset = loc1._end.offset + 1 } - } in - List.sort sort_ranges ( - (loc1, is_covered1):: - (tail_loc, is_covered2):: - rest - ) - else - (loc1, is_covered1)::rest - in - accum, rest - - else if loc1._end.offset = loc2._end.offset then - (* range 1 and 2 end at the same place, so split range 1 and consume + let rest = + if loc1_end <> loc2_end then + let tail_loc = (loc1_end + 1, loc2_end) in + List.sort sort_ranges ((loc1, is_covered1) :: (tail_loc, is_covered2) :: rest) + else + (loc1, is_covered1) :: rest + in + (accum, rest) + else if loc1_end = loc2_end then + (* range 1 and 2 end at the same place, so split range 1 and consume the first part, which doesn't overlap *) - let head_loc = { loc1 with - _end = { loc1._end with offset = loc2.start.offset - 1 } - } in - (head_loc, is_covered1)::accum, - (loc2, is_covered2)::rest - - else if loc1._end.offset < loc2._end.offset then - (* TODO: Given that at this point we also have loc1.start.offset < + let head_loc = (loc1_start, loc2_start - 1) in + ((head_loc, is_covered1) :: accum, (loc2, is_covered2) :: rest) + else if loc1_end < loc2_end then + (* TODO: Given that at this point we also have loc1.start.offset < loc2.start.offset, it means that range 1 and 2 overlap but don't nest. Ideally, this case should never arise: we should be able to guarantee the invariant that ranges (same as "spans" in @@ -143,87 +154,152 @@ let rec split_overlapping_ranges accum = Loc.(function range1 or range2 is covered (because the alternative is 1-token islands of uncovered stuff). *) - let head_loc = { loc1 with - _end = { loc1._end with offset = loc2.start.offset - 1 } - } in - let overlap_loc = { loc1 with - start = loc2.start - } in - let tail_loc = { loc2 with - start = { loc2.start with offset = loc1._end.offset + 1 } - } in - (head_loc, is_covered1)::(overlap_loc, is_covered1 || is_covered2)::accum, - (tail_loc, is_covered2)::rest - - else - (* range 2 is in the middle of range 1, so split range 1 and consume + let head_loc = (loc1_start, loc2_start - 1) in + let overlap_loc = (loc2_start, loc1_end) in + let tail_loc = (loc1_end + 1, loc2_end) in + ( (head_loc, is_covered1) + :: (overlap_loc, Coverage.m_or (is_covered1, is_covered2)) + :: accum, + (tail_loc, is_covered2) :: rest ) + else + (* range 2 is in the middle of range 1, so split range 1 and consume the first part, which doesn't overlap, and then recurse on range2::range1tail::rest *) - let head_loc = { loc1 with - _end = { loc1._end with offset = loc2.start.offset - 1 } - } in - let tail_loc = { loc1 with - start = { loc1.start with offset = loc2._end.offset + 1 } - } in - let todo = - (loc2, is_covered2):: - (tail_loc, is_covered1):: - rest - in - (head_loc, is_covered1)::accum, - List.sort sort_ranges todo - in - split_overlapping_ranges accum todo -) + let head_loc = (loc1_start, loc2_start - 1) in + let tail_loc = (loc2_end + 1, loc1_end) in + let todo = (loc2, is_covered2) :: (tail_loc, is_covered1) :: rest in + ((head_loc, is_covered1) :: accum, Core_list.sort ~cmp:sort_ranges todo) + in + split_overlapping_ranges accum todo -let handle_response ~json ~pretty ~color ~debug (types : (Loc.t * bool) list) content = - if debug then List.iter debug_range types; +let handle_response + ~json + ~pretty + ~strip_root + ~color + ~debug + ~trust + (types : (Loc.t * Coverage_response.expression_coverage) list) + content = + if debug then Core_list.iter ~f:debug_range types; - begin if color then - let types = split_overlapping_ranges [] types |> List.rev in - let colors, _ = colorize_file content 0 [] types in - Tty.cprint (List.rev colors); + let offset_table = lazy (Offset_utils.make content) in + if color then ( + let coverage_offsets = + let offset_table = Lazy.force offset_table in + let loc_to_offset_pair loc = + Loc.(Offset_utils.offset offset_table loc.start, Offset_utils.offset offset_table loc._end) + in + Core_list.map ~f:(fun (loc, covered) -> (loc_to_offset_pair loc, covered)) types + in + let coverage_offsets = Core_list.rev (split_overlapping_ranges [] coverage_offsets) in + let (colors, _) = colorize_file content 0 [] coverage_offsets in + Tty.cprint ~color_mode:Tty.Color_Always (Core_list.rev colors); print_endline "" - end; - - let covered, total = List.fold_left accum_coverage (0, 0) types in - let percent = if total = 0 then 100. else (float_of_int covered /. float_of_int total) *. 100. in + ); + let (untainted, tainted, empty, total) = + Core_list.fold_left ~f:accum_coverage ~init:(0, 0, 0, 0) types + in + (* In trust mode, we only consider untainted locations covered. In normal mode we consider both *) + let covered = + if trust then + untainted + else + untainted + tainted + in + let percent = + if total = 0 then + 100. + else + float_of_int covered /. float_of_int total *. 100. + in if json then - let covered_locs, uncovered_locs = - let covered, uncovered = List.partition (fun (_, is_covered) -> is_covered) types in - let locs_of = List.map (fun (loc, _) -> loc) in - locs_of covered, locs_of uncovered + let offset_table = Some (Lazy.force offset_table) in + let (untainted_locs, tainted_locs, empty_locs, uncovered_locs) = + let (untainted, tainted, empty, uncovered) = + Core_list.fold_left ~f:accum_coverage_locs ~init:([], [], [], []) types + in + (Core_list.rev untainted, Core_list.rev tainted, Core_list.rev empty, Core_list.rev uncovered) in - let open Hh_json in - JSON_Object [ - "expressions", JSON_Object [ - "covered_count", int_ covered; - "covered_locs", JSON_Array (covered_locs |> List.map Reason.json_of_loc); - "uncovered_count", int_ (total - covered); - "uncovered_locs", JSON_Array (uncovered_locs |> List.map Reason.json_of_loc); - ]; - ] - |> print_json_endline ~pretty + Hh_json.( + let covered_data = + if trust then + [ + ("untainted_count", int_ untainted); + ( "untainted_locs", + JSON_Array + (Core_list.map ~f:(Reason.json_of_loc ~strip_root ~offset_table) untainted_locs) ); + ("tainted_count", int_ tainted); + ( "tainted_locs", + JSON_Array + (Core_list.map ~f:(Reason.json_of_loc ~strip_root ~offset_table) tainted_locs) ); + ] + else + let covered_locs = untainted_locs @ tainted_locs |> Core_list.sort ~cmp:compare in + [ + ("covered_count", int_ covered); + ( "covered_locs", + JSON_Array + (Core_list.map ~f:(Reason.json_of_loc ~strip_root ~offset_table) covered_locs) ); + ] + in + JSON_Object + [ + ( "expressions", + JSON_Object + ( covered_data + @ [ + ("uncovered_count", int_ (total - covered)); + ( "uncovered_locs", + JSON_Array + (Core_list.map + ~f:(Reason.json_of_loc ~strip_root ~offset_table) + uncovered_locs) ); + ("empty_count", int_ empty); + ( "empty_locs", + JSON_Array + (Core_list.map ~f:(Reason.json_of_loc ~strip_root ~offset_table) empty_locs) + ); + ] ) ); + ] + |> print_json_endline ~pretty) else - Utils_js.print_endlinef - "Covered: %0.2f%% (%d of %d expressions)\n" percent covered total + Utils_js.print_endlinef "Covered: %0.2f%% (%d of %d expressions)\n" percent covered total let main - base_flags option_values json pretty root from color debug path respect_pragma - all filename () = - FlowEventLogger.set_from from; - let file = get_file_from_filename_or_stdin ~cmd:CommandSpec.(spec.name) - path filename in + base_flags + option_values + json + pretty + root + strip_root + wait_for_recheck + color + debug + path + respect_pragma + all + trust + filename + () = + let file = get_file_from_filename_or_stdin ~cmd:CommandSpec.(spec.name) path filename in let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> File_input.path_of_file_input file - ) in - - if not option_values.quiet && all && respect_pragma then prerr_endline - "Warning: --all and --respect-pragma cannot be used together. --all wins."; + let root = + guess_root + flowconfig_name + (match root with + | Some root -> Some root + | None -> File_input.path_of_file_input file) + in + let strip_root = + if strip_root then + Some root + else + None + in + if (not option_values.quiet) && all && respect_pragma then + prerr_endline "Warning: --all and --respect-pragma cannot be used together. --all wins."; (* TODO: --respect-pragma is deprecated. We will soon flip the default. As a transition, --all defaults to enabled. To maintain the current behavior @@ -231,23 +307,21 @@ let main Once we flip the default, --respect-pragma will have no effect and will be removed. *) let all = all || not respect_pragma in - (* pretty implies json *) let json = json || pretty in + if color && json then + raise (CommandSpec.Failed_to_parse ("--color", "Can't be used with json flags")); + if debug && json then + raise (CommandSpec.Failed_to_parse ("--debug", "Can't be used with json flags")); - if color && json - then raise (CommandSpec.Failed_to_parse ("--color", "Can't be used with json flags")); - if debug && json - then raise (CommandSpec.Failed_to_parse ("--debug", "Can't be used with json flags")); - - let request = ServerProt.Request.COVERAGE (file, all) in - + let request = + ServerProt.Request.COVERAGE { input = file; force = all; wait_for_recheck; trust } + in match connect_and_make_request flowconfig_name option_values root request with - | ServerProt.Response.COVERAGE (Error err) -> - handle_error ~json ~pretty err + | ServerProt.Response.COVERAGE (Error err) -> handle_error ~json ~pretty err | ServerProt.Response.COVERAGE (Ok resp) -> let content = File_input.content_of_file_input_unsafe file in - handle_response ~json ~pretty ~color ~debug resp content + handle_response ~json ~pretty ~strip_root ~color ~debug ~trust resp content | response -> failwith_bad_response ~request ~response let command = CommandSpec.command spec main diff --git a/src/commands/cycleCommand.ml b/src/commands/cycleCommand.ml index 4e15409aed3..622466a0196 100644 --- a/src/commands/cycleCommand.ml +++ b/src/commands/cycleCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,52 +8,46 @@ open CommandUtils let print_endlinef = Utils_js.print_endlinef + let prerr_endlinef = Utils_js.prerr_endlinef -let spec = { - CommandSpec. - name = "cycle"; - doc = "Output .dot file for cycle containing the given file"; - usage = Printf.sprintf - "Usage: %s cycle [OPTION]...\n\n\ - e.g. %s cycle path/to/file.js \n" - Utils_js.exe_name - Utils_js.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_flags - |> root_flag - |> strip_root_flag - |> anon "FILE..." (required string) - ) -} +let spec = + { + CommandSpec.name = "cycle"; + doc = "Output .dot file for cycle containing the given file"; + usage = + Printf.sprintf + "Usage: %s cycle [OPTION]...\n\ne.g. %s cycle path/to/file.js \n" + Utils_js.exe_name + Utils_js.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_flags + |> root_flag + |> strip_root_flag + |> flag "--types" no_arg ~doc:"Only consider type dependencies" + |> anon "FILE..." (required string)); + } -let main base_flags option_values root strip_root file () = +let main base_flags option_values root strip_root types_only file () = let flowconfig_name = base_flags.Base_flags.flowconfig_name in let file = expand_path file in let root = guess_root flowconfig_name root in let strip_root f = - if strip_root - then Files.relative_path (Path.to_string root) f - else f + if strip_root then + Files.relative_path (Path.to_string root) f + else + f in (* connect to server *) - let request = ServerProt.Request.CYCLE file in + let request = ServerProt.Request.CYCLE { filename = file; types_only } in match connect_and_make_request flowconfig_name option_values root request with - | ServerProt.Response.CYCLE (Error msg) -> - prerr_endline msg + | ServerProt.Response.CYCLE (Error msg) -> FlowExitStatus.(exit ~msg Unknown_error) | ServerProt.Response.CYCLE (Ok dep_graph) -> (* print .dot file to stdout *) - print_endline "digraph {"; - List.iter (fun (f, dep_fs) -> - List.iter (fun dep_f -> - print_endlinef " \"%s\" -> \"%s\"" - (strip_root f) - (strip_root dep_f) - ) dep_fs - ) dep_graph; - print_endline "}" + LwtUtils.output_graph Lwt_io.stdout strip_root dep_graph |> Lwt_main.run | response -> failwith_bad_response ~request ~response let command = CommandSpec.command spec main diff --git a/src/commands/dumpTypesCommand.ml b/src/commands/dumpTypesCommand.ml index 4a469d6f86e..d969cba3cb8 100644 --- a/src/commands/dumpTypesCommand.ml +++ b/src/commands/dumpTypesCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,91 +12,95 @@ open CommandUtils open Utils_js -let spec = { - CommandSpec. - name = "dump-types"; - doc = ""; (* Outputs list of all types in the file *) - usage = Printf.sprintf - "Usage: %s dump-types [OPTION]... [FILE]\n\n\ - e.g. %s dump-types foo.js\n\ - or %s dump-types < foo.js\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> strip_root_flag - |> from_flag - |> path_flag - |> anon "file" (optional string) - ) -} +let spec = + { + CommandSpec.name = "dump-types"; + doc = ""; + (* Outputs list of all types in the file *) + usage = + Printf.sprintf + "Usage: %s dump-types [OPTION]... [FILE]\n\ne.g. %s dump-types foo.js\nor %s dump-types < foo.js\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> from_flag + |> path_flag + |> wait_for_recheck_flag + |> anon "file" (optional string)); + } -let types_to_json types ~strip_root = - let open Hh_json in - let open Reason in - let types_json = types |> List.map (fun (loc, t) -> - let json_assoc = ( - ("type", JSON_String t) :: - ("reasons", JSON_Array []) :: - ("loc", json_of_loc ~strip_root loc) :: - (Errors.deprecated_json_props_of_loc ~strip_root loc) - ) in - JSON_Object json_assoc - ) in - JSON_Array types_json +let types_to_json ~file_content types ~strip_root = + Hh_json.( + Reason.( + let offset_table = Option.map file_content ~f:Offset_utils.make in + let types_json = + types + |> Core_list.map ~f:(fun (loc, t) -> + let json_assoc = + ("type", JSON_String t) + :: ("reasons", JSON_Array []) + :: ("loc", json_of_loc ~strip_root ~offset_table loc) + :: Errors.deprecated_json_props_of_loc ~strip_root loc + in + JSON_Object json_assoc) + in + JSON_Array types_json)) -let handle_response types ~json ~pretty ~strip_root = - if json - then ( - let types_json = types_to_json types ~strip_root in +let handle_response types ~json ~file_content ~pretty ~strip_root = + if json then + let types_json = types_to_json ~file_content types ~strip_root in Hh_json.print_json_endline ~pretty types_json - ) else ( - let out = types - |> List.map (fun (loc, str) -> - (spf "%s: %s" (Reason.string_of_loc ~strip_root loc) str) - ) + else + let out = + types + |> Core_list.map ~f:(fun (loc, str) -> + spf "%s: %s" (Reason.string_of_loc ~strip_root loc) str) |> String.concat "\n" in print_endline out - ) -let handle_error err ~json ~pretty ~strip_root = - if json - then ( - let open Hh_json in - let error_json = JSON_Object ["error", JSON_String err] in - prerr_json_endline ~pretty error_json; - (* also output an empty array on stdout, for JSON parsers *) - handle_response [] ~json ~pretty ~strip_root - ) else ( +let handle_error err ~file_content ~json ~pretty ~strip_root = + if json then ( + Hh_json.( + let error_json = JSON_Object [("error", JSON_String err)] in + prerr_json_endline ~pretty error_json; + + (* also output an empty array on stdout, for JSON parsers *) + handle_response [] ~file_content ~json ~pretty ~strip_root) + ) else prerr_endline err - ) -let main base_flags option_values json pretty root strip_root from path filename () = - FlowEventLogger.set_from from; +let main base_flags option_values json pretty root strip_root path wait_for_recheck filename () = let json = json || pretty in - let file = get_file_from_filename_or_stdin ~cmd:CommandSpec.(spec.name) - path filename in + let file = get_file_from_filename_or_stdin ~cmd:CommandSpec.(spec.name) path filename in + let file_content = File_input.content_of_file_input file |> Core_result.ok in let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> File_input.path_of_file_input file - ) in - - let strip_root = if strip_root then Some root else None in - - let request = ServerProt.Request.DUMP_TYPES file in - + let root = + guess_root + flowconfig_name + (match root with + | Some root -> Some root + | None -> File_input.path_of_file_input file) + in + let strip_root = + if strip_root then + Some root + else + None + in + let request = ServerProt.Request.DUMP_TYPES { input = file; wait_for_recheck } in match connect_and_make_request flowconfig_name option_values root request with | ServerProt.Response.DUMP_TYPES (Error err) -> - handle_error err ~json ~pretty ~strip_root + handle_error err ~file_content ~json ~pretty ~strip_root | ServerProt.Response.DUMP_TYPES (Ok resp) -> - handle_response resp ~json ~pretty ~strip_root + handle_response resp ~file_content ~json ~pretty ~strip_root | response -> failwith_bad_response ~request ~response let command = CommandSpec.command spec main diff --git a/src/commands/dune b/src/commands/dune new file mode 100644 index 00000000000..d45661c95ed --- /dev/null +++ b/src/commands/dune @@ -0,0 +1,73 @@ +(library + (name flow_command_spec) + (wrapped false) + (modules + commandSpec) + (libraries + flow_common_utils + collections ; hack + ) +) + +(library + (name flow_commands_connect) + (wrapped false) + (modules + commandConnect + commandConnectSimple + commandMeanKill + ) + (libraries + flow_exit_status + flow_server_files + flow_server_protocol + flow_server_status + socket ; hack + ) +) + +(library + (name flow_commands_utils) + (wrapped false) + (modules + commandUtils + ) + (libraries + flow_command_spec + flow_commands_connect + flow_common + flow_common_errors + flow_common_lwt + flow_config + flow_exit_status + flow_flowlib + flow_server + flow_server_files + flow_shared_mem + find ; hack + ) +) + +(library + (name flow_commands) + (wrapped false) + (modules (:standard \ + commandConnect + commandConnectSimple + commandMeanKill + commandSpec + commandUtils + )) + (libraries + flow_command_spec + flow_commands_utils + flow_common_build_id + flow_exit_status + flow_logging_utils + flow_lsp + flow_monitor + flow_parser + flow_server_status + flow_service_autocomplete + ) +) diff --git a/src/commands/findModuleCommand.ml b/src/commands/findModuleCommand.ml index 731a92cd604..5376508d379 100644 --- a/src/commands/findModuleCommand.ml +++ b/src/commands/findModuleCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,57 +11,59 @@ open CommandUtils -let spec = { - CommandSpec. - name = "find-module"; - doc = "Resolves a module reference to a file"; - usage = Printf.sprintf - "Usage: %s find-module [OPTION]... [FILE]...\n\n\ - Resolves a module reference to a file\n\n\ - Example usage:\n\ - \t%s find-module moduleref filename\n" - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> strip_root_flag - |> from_flag - |> anon "module" (required string) - |> anon "file" (required string) - ) -} +let spec = + { + CommandSpec.name = "find-module"; + doc = "Resolves a module reference to a file"; + usage = + Printf.sprintf + "Usage: %s find-module [OPTION]... [FILE]...\n\nResolves a module reference to a file\n\nExample usage:\n\t%s find-module moduleref filename\n" + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> from_flag + |> wait_for_recheck_flag + |> anon "module" (required string) + |> anon "file" (required string)); + } -let main base_flags option_values json pretty root strip_root from moduleref filename () = - FlowEventLogger.set_from from; +let main + base_flags option_values json pretty root strip_root wait_for_recheck moduleref filename () = let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with Some root -> Some root | None -> Some filename - ) in - - let request = ServerProt.Request.FIND_MODULE (moduleref, filename) in - - let result = match connect_and_make_request flowconfig_name option_values root request with - | ServerProt.Response.FIND_MODULE ( - Some File_key.LibFile file - | Some File_key.SourceFile file - | Some File_key.JsonFile file - | Some File_key.ResourceFile file - ) -> - if strip_root then Files.relative_path (Path.to_string root) file - else file - | ServerProt.Response.FIND_MODULE (Some File_key.Builtins) -> "(global)" - | ServerProt.Response.FIND_MODULE None -> "(unknown)" - | response -> failwith_bad_response ~request ~response + let root = + guess_root + flowconfig_name + (match root with + | Some root -> Some root + | None -> Some filename) + in + let request = ServerProt.Request.FIND_MODULE { moduleref; filename; wait_for_recheck } in + let result = + match connect_and_make_request flowconfig_name option_values root request with + | ServerProt.Response.FIND_MODULE + ( Some (File_key.LibFile file) + | Some (File_key.SourceFile file) + | Some (File_key.JsonFile file) + | Some (File_key.ResourceFile file) ) -> + if strip_root then + Files.relative_path (Path.to_string root) file + else + file + | ServerProt.Response.FIND_MODULE (Some File_key.Builtins) -> "(global)" + | ServerProt.Response.FIND_MODULE None -> "(unknown)" + | response -> failwith_bad_response ~request ~response in - if json || pretty - then ( - let open Hh_json in - let json = JSON_Object (["file", JSON_String result]) in - print_json_endline ~pretty json - ) else + if json || pretty then + Hh_json.( + let json = JSON_Object [("file", JSON_String result)] in + print_json_endline ~pretty json) + else Printf.printf "%s\n%!" result let command = CommandSpec.command spec main diff --git a/src/commands/findRefsCommand.ml b/src/commands/findRefsCommand.ml index 38e7fdd0496..c6e5a3c1547 100644 --- a/src/commands/findRefsCommand.ml +++ b/src/commands/findRefsCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2014, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,72 +11,82 @@ open CommandUtils -let spec = { - CommandSpec. - name = "find-refs"; - doc = "Gets the reference locations of a variable or property"; - usage = Printf.sprintf - "Usage: %s find-refs [OPTION]... [FILE] LINE COLUMN\n\n\ - e.g. %s find-refs foo.js 12 3\n\ - or %s find-refs 12 3 < foo.js\n" +let spec = + { + CommandSpec.name = "find-refs"; + doc = "Gets the reference locations of a variable or property"; + usage = + Printf.sprintf + "Usage: %s find-refs [OPTION]... [FILE] LINE COLUMN\n\ne.g. %s find-refs foo.js 12 3\nor %s find-refs 12 3 < foo.js\n" CommandUtils.exe_name CommandUtils.exe_name CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> strip_root_flag - |> from_flag - |> path_flag - |> flag "--global" no_arg ~doc:"Search for references in other files (beta)" - |> flag "--multi-hop" no_arg ~doc:"Include references on related object types (implies `--global`; experimental)" - |> anon "args" (required (list_of string)) - ) -} + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> from_flag + |> path_flag + |> flag "--global" no_arg ~doc:"Search for references in other files (beta)" + |> flag + "--multi-hop" + no_arg + ~doc:"Include references on related object types (implies `--global`; experimental)" + |> anon "args" (required (list_of string))); + } let parse_args path args = - let (file, line, column) = match args with + let (file, line, column) = + match args with | [file; line; column] -> let file = expand_path file in - File_input.FileName file, (int_of_string line), (int_of_string column) + (File_input.FileName file, int_of_string line, int_of_string column) | [line; column] -> - get_file_from_filename_or_stdin path ~cmd:CommandSpec.(spec.name) None, - (int_of_string line), - (int_of_string column) + ( get_file_from_filename_or_stdin path ~cmd:CommandSpec.(spec.name) None, + int_of_string line, + int_of_string column ) | _ -> CommandSpec.usage spec; FlowExitStatus.(exit Commandline_usage_error) in let (line, column) = convert_input_pos (line, column) in - file, line, column + (file, line, column) -let print_json result ~pretty ~strip_root = - let open Hh_json in - let json = match result with - | None -> JSON_Object ["kind", JSON_String "no-symbol-found"] - | Some (name, locs) -> - JSON_Object [ - "kind", JSON_String "symbol-found"; - "name", JSON_String name; - "locs", JSON_Array (List.map (Reason.json_of_loc ~strip_root) locs) - ] - in - print_json_endline ~pretty json +let print_json result ~stdin_file ~pretty ~strip_root = + Hh_json.( + let json = + match result with + | None -> JSON_Object [("kind", JSON_String "no-symbol-found")] + | Some (name, locs) -> + JSON_Object + [ + ("kind", JSON_String "symbol-found"); + ("name", JSON_String name); + ( "locs", + JSON_Array (Core_list.map ~f:(json_of_loc_with_offset ~stdin_file ~strip_root) locs) + ); + ] + in + print_json_endline ~pretty json) -let to_string result option_values ~strip_root = - let locs = match result with +let to_string result ~strip_root = + let locs = + match result with | None -> [] | Some (_, locs) -> locs in - String.concat "\n" @@ - if option_values.from = "vim" || option_values.from = "emacs" - then List.map (Errors.Vim_emacs_output.string_of_loc ~strip_root) locs - else List.map (range_string_of_loc ~strip_root) locs - + let from = FlowEventLogger.get_from_I_AM_A_CLOWN () in + String.concat "\n" + @@ + if from = Some "vim" || from = Some "emacs" then + Core_list.map ~f:(Errors.Vim_emacs_output.string_of_loc ~strip_root) locs + else + Core_list.map ~f:(range_string_of_loc ~strip_root) locs - (* find-refs command handler. +(* find-refs command handler. - json toggles JSON output - strip_root toggles whether output positions are relativized w.r.t. root - path is a user-specified path to use as incoming content source path @@ -84,29 +94,40 @@ let to_string result option_values ~strip_root = - multi_hop indicates whether to include properties on related objects (even slower) - args is mandatory command args; see parse_args above *) -let main base_flags option_values json pretty root strip_root from path global multi_hop args () = - FlowEventLogger.set_from from; +let main base_flags option_values json pretty root strip_root path global multi_hop args () = let (file, line, column) = parse_args path args in let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> File_input.path_of_file_input file - ) in - let strip_root = if strip_root then Some root else None in - - let request = ServerProt.Request.FIND_REFS (file, line, column, global, multi_hop) in + let root = + guess_root + flowconfig_name + (match root with + | Some root -> Some root + | None -> File_input.path_of_file_input file) + in + let strip_root = + if strip_root then + Some root + else + None + in + let request = + ServerProt.Request.FIND_REFS { filename = file; line; char = column; global; multi_hop } + in (* command result will be a position structure with full file path *) match connect_and_make_request flowconfig_name option_values root request with | ServerProt.Response.FIND_REFS (Ok result) -> (* format output *) - if json || pretty - then print_json result ~pretty ~strip_root - else print_endline (to_string result option_values ~strip_root) + if json || pretty then + print_json result ~stdin_file:file ~pretty ~strip_root + else + print_endline (to_string result ~strip_root) | ServerProt.Response.FIND_REFS (Error exn_msg) -> Utils_js.prerr_endlinef "Could not find refs for %s:%d:%d\n%s" - (File_input.filename_of_file_input file) line column exn_msg + (File_input.filename_of_file_input file) + line + column + exn_msg | response -> failwith_bad_response ~request ~response let command = CommandSpec.command spec main diff --git a/src/commands/forceRecheckCommand.ml b/src/commands/forceRecheckCommand.ml index 3b8a650a68e..14f79133f90 100644 --- a/src/commands/forceRecheckCommand.ml +++ b/src/commands/forceRecheckCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,32 +11,39 @@ open CommandUtils -let spec = { - CommandSpec. - name = "force-recheck"; - doc = "Forces the server to recheck a given list of files"; - usage = Printf.sprintf - "Usage: %s force-recheck [OPTION]... [FILES]\n\ - Forces the Flow server to recheck a given list of files.\n\n\ - FILES may be omitted if and only if --input-file is used.\n" - exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> from_flag - |> profile_flag - |> flag "--focus" no_arg - ~doc:("If the server is running in lazy mode, force it to focus on these files") - |> flag "--input-file" string - ~doc:("File containing list of files to recheck, one per line. If -, list of files is "^ - "read from the standard input.") - |> anon "files" (list_of string) - ) -} +let spec = + { + CommandSpec.name = "force-recheck"; + doc = "Forces the server to recheck a given list of files"; + usage = + Printf.sprintf + "Usage: %s force-recheck [OPTION]... [FILES]\nForces the Flow server to recheck a given list of files.\n\nFILES may be omitted if and only if --input-file is used.\n" + exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> from_flag + |> profile_flag + |> flag + "--focus" + no_arg + ~doc:"If the server is running in lazy mode, force it to focus on these files" + |> flag + "--input-file" + string + ~doc: + ( "File containing list of files to recheck, one per line. If -, list of files is " + ^ "read from the standard input." ) + |> anon "files" (list_of string)); + } + +type json = + | JSON + | Pretty -type json = JSON | Pretty type args = { root: Path.t; files: string list; @@ -45,20 +52,20 @@ type args = { json: json option; } -let force_recheck flowconfig_name (args:args) connect_flags = - let files = List.map get_path_of_file args.files in - let request = ServerProt.Request.FORCE_RECHECK {files; focus=args.focus; profile=args.profile} in - - let profiling = begin match connect_and_make_request flowconfig_name connect_flags args.root - request with - | ServerProt.Response.FORCE_RECHECK profiling -> profiling - | response -> failwith_bad_response ~request ~response - end in - +let force_recheck flowconfig_name (args : args) connect_flags = + let files = Core_list.map ~f:get_path_of_file args.files in + let request = + ServerProt.Request.FORCE_RECHECK { files; focus = args.focus; profile = args.profile } + in + let profiling = + match connect_and_make_request flowconfig_name connect_flags args.root request with + | ServerProt.Response.FORCE_RECHECK profiling -> profiling + | response -> failwith_bad_response ~request ~response + in (* Print profiling info *) begin - if args.json = None - then Option.iter ~f:Profiling_js.print_summary profiling + if args.json = None then + Option.iter ~f:Profiling_js.print_summary profiling else let properties = Option.value_map ~default:[] ~f:Profiling_js.to_json_properties profiling in Hh_json.(print_json_endline ~pretty:(args.json = Some Pretty) (JSON_Object properties)) @@ -67,39 +74,46 @@ let force_recheck flowconfig_name (args:args) connect_flags = FlowExitStatus.(exit No_error) let rec find_parent_that_exists path = - if Sys.file_exists path - then path - else begin + if Sys.file_exists path then + path + else let newpath = Filename.dirname path in (* dirname called repeatedly should eventually return ".", which should * always exist. But no harm in being overly cautious. Let's detect * infinite recursion *) - if newpath = path - then path - else find_parent_that_exists newpath - end - -let main base_flags connect_flags json pretty root from profile focus input_file files () = - FlowEventLogger.set_from from; + if newpath = path then + path + else + find_parent_that_exists newpath - begin match input_file, files with - | None, (None | Some []) -> - CommandSpec.usage spec; - let msg = "FILES may be omitted if and only if --input-file is used" in - FlowExitStatus.(exit ~msg Commandline_usage_error) - | _ -> () +let main base_flags connect_flags json pretty root profile focus input_file files () = + begin + match (input_file, files) with + | (None, (None | Some [])) -> + CommandSpec.usage spec; + let msg = "FILES may be omitted if and only if --input-file is used" in + FlowExitStatus.(exit ~msg Commandline_usage_error) + | _ -> () end; let files = get_filenames_from_input ~allow_imaginary:true input_file files in - let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root, files with - | Some root, _ -> Some root - | None, file::_ -> (Some (find_parent_that_exists file)) - | None, [] -> None - ) in - let json = if pretty then Some Pretty else (if json then Some JSON else None ) in + let root = + guess_root + flowconfig_name + (match (root, files) with + | (Some root, _) -> Some root + | (None, file :: _) -> Some (find_parent_that_exists file) + | (None, []) -> None) + in + let json = + if pretty then + Some Pretty + else if json then + Some JSON + else + None + in let args = { root; files; focus; profile; json } in force_recheck flowconfig_name args connect_flags diff --git a/src/commands/genFlowFilesCommand.ml b/src/commands/genFlowFilesCommand.ml deleted file mode 100644 index 4fbd4682f93..00000000000 --- a/src/commands/genFlowFilesCommand.ml +++ /dev/null @@ -1,204 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open Utils_js -open CommandUtils - -let name = "gen-flow-files" -let spec = { - CommandSpec. - name; - - (** - * Still iterating on this command but wanted to leave the eventual - * docs/usage info that we're targeting here in comments here to - * foreshadow what's to come a bit. - *) - (* - doc = "Generate minimal .js.flow files for publishing to npm."; - usage = Printf.sprintf - ("Usage: %s %s [OPTIONS] SRC_DIR OUT_DIR\n" ^^ - " or\n" ^^ - " %s %s [OPTIONS] INPUT_FILE\n" ^^ - "\n" ^^ - "e.g. %s %s ./src ./dist\n" ^^ - "or %s %s ./src/lib/foo.js > ./dist/lib/foo.js.flow\n") - - CommandUtils.exe_name - name - CommandUtils.exe_name - name - CommandUtils.exe_name - name - CommandUtils.exe_name - name - ; - *) - doc = "EXPERIMENTAL: Generate minimal .js.flow files for publishing to npm."; - usage = Printf.sprintf - "Usage (EXPERIMENTAL): %s %s [OPTIONS] [FILE]\n\n\ - e.g. %s %s ./src/foo.js > ./dist/foo.js.flow\n" - CommandUtils.exe_name - name - CommandUtils.exe_name - name - ; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_flags - |> root_flag - |> error_flags - |> strip_root_flag - |> ignore_flag - |> include_flag - |> untyped_flag - |> declaration_flag - |> from_flag - |> anon "src" (required string) - |> flag "--out-dir" string - ~doc:"The path to write the generated .js.flow files into" - ) -} - -let write_file strip_root root content perm src_file_path dest_file_path = - let fd = Unix.(openfile dest_file_path [O_CREAT; O_TRUNC; O_WRONLY;] perm) in - let root_str = Path.to_string root in - let printed_src_file_path = - if strip_root - then Files.relative_path root_str src_file_path - else src_file_path - in - print_string (spf "%s -> " printed_src_file_path); - flush stdout; - (try ( - ignore (Unix.single_write_substring fd content 0 (String.length content)); - let printed_dest_file_path = - if strip_root - then Files.relative_path root_str dest_file_path - else dest_file_path - in - print_endline printed_dest_file_path - ) with exn -> print_endline "ERROR!"; Unix.close fd; raise exn); - Unix.close fd - -let main base_flags option_values root error_flags strip_root ignore_flag - include_flag untyped_flag declaration_flag from src out_dir () = ( - FlowEventLogger.set_from from; - let src = expand_path src in - let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> Some src - ) in - - (match out_dir with None -> () | Some out_dir -> - if not (Sys.is_directory out_dir) then - let msg = spf "%s: `--out-dir` must be a directory!" out_dir in - FlowExitStatus.exit ~msg FlowExitStatus.Commandline_usage_error - ); - - let src_is_dir = Sys.is_directory src in - let filenames = - if not src_is_dir then [File_input.FileName src] else ( - (* If `src` is a directory, we require that an out_dir was specified *) - (if out_dir = None then - let msg = - "When the `src` arg is a directory, the `--out-dir` flag is required." - in - FlowExitStatus.exit ~msg FlowExitStatus.Commandline_usage_error - ); - - let options = LsCommand.make_options ~flowconfig_name ~root ~ignore_flag ~include_flag - ~untyped_flag ~declaration_flag in - let _, libs = Files.init options in - let next_files = - LsCommand.get_ls_files ~root ~all:false ~options ~libs ~imaginary:false (Some src) - in - let files = Files.get_all next_files in - let num_files = SSet.cardinal files in - print_endlinef "Found %d files, generating libdefs..." num_files; - List.map (fun f -> File_input.FileName f) (SSet.elements files) - ) - in - - let include_warnings = error_flags.Errors.Cli_output.include_warnings in - let request = ServerProt.Request.GEN_FLOW_FILES (filenames, include_warnings) in - let open ServerProt.Response in - match connect_and_make_request flowconfig_name option_values root request, out_dir with - | GEN_FLOW_FILES (Error (GenFlowFiles_TypecheckError {errors; warnings})), _ -> - let strip_root = if strip_root then Some root else None in - Errors.Cli_output.print_errors - ~out_channel:stderr - ~flags:error_flags - ~strip_root - ~errors - ~warnings - ~lazy_msg:None - (); - let msg = - "\nIn order to generate a shadow file there must be no type errors!" - in - FlowExitStatus.exit ~msg FlowExitStatus.Type_error; - | GEN_FLOW_FILES (Error (GenFlowFiles_UnexpectedError error_msg)), _ -> - let msg = spf "Error: %s" error_msg in - FlowExitStatus.exit ~msg FlowExitStatus.Unknown_error - | GEN_FLOW_FILES (Ok results), None -> - (if List.length results <> 1 then ( - let msg = - "Internal Error: Received multiple results for a single file!" - in - FlowExitStatus.exit ~msg FlowExitStatus.Unknown_error - )); - let (_filepath, result) = List.hd results in - (match result with - | GenFlowFiles_FlowFile content -> - print_endline content - | GenFlowFiles_NonFlowFile -> - print_endline "// This file does not have an @flow at the top!" - ) - | GEN_FLOW_FILES (Ok results), Some out_dir -> - let out_dir = expand_path out_dir in - let src_stat = Unix.stat src in - results |> List.iter (fun (file_path, result) -> - match result with - | GenFlowFiles_FlowFile content when src_is_dir -> - let dest_path = file_path - (* File path relative to the src dir *) - |> Files.relative_path src - (* Make the path OS specific *) - |> Str.split_delim (Str.regexp "/") - |> String.concat Filename.dir_sep - (* Concatenated with the output dir *) - |> Filename.concat out_dir in - - (* Replace file extension .js -> .js.flow *) - let dest_path = dest_path ^ ".flow" in - - let dest_dir = Filename.dirname dest_path in - Files.mkdirp dest_dir src_stat.Unix.st_perm; - - let file_path_stat = Unix.stat file_path in - (try write_file strip_root root content file_path_stat.Unix.st_perm file_path dest_path - with exn -> prerr_endlinef "Error writing %s:" dest_path; raise exn) - - | GenFlowFiles_FlowFile content -> - let file_name = Filename.basename file_path in - let dest_path = Filename.concat out_dir file_name in - let dest_path = dest_path ^ ".flow" in - - let file_path_stat = Unix.stat file_path in - (try write_file strip_root root content file_path_stat.Unix.st_perm file_path dest_path - with exn -> prerr_endlinef "Error writing %s:" dest_path; raise exn) - - | GenFlowFiles_NonFlowFile -> () - ) - | response, _ -> failwith_bad_response ~request ~response -) - -let command = CommandSpec.command spec main diff --git a/src/commands/getDefCommand.ml b/src/commands/getDefCommand.ml index 2464478e327..0a052e83828 100644 --- a/src/commands/getDefCommand.ml +++ b/src/commands/getDefCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,44 +11,45 @@ open CommandUtils -let spec = { - CommandSpec. - name = "get-def"; - doc = "Gets the definition location of a variable or property"; - usage = Printf.sprintf - "Usage: %s get-def [OPTION]... [FILE] LINE COLUMN\n\n\ - e.g. %s get-def foo.js 12 3\n\ - or %s get-def 12 3 < foo.js\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> strip_root_flag - |> from_flag - |> path_flag - |> anon "args" (required (list_of string)) - ) -} +let spec = + { + CommandSpec.name = "get-def"; + doc = "Gets the definition location of a variable or property"; + usage = + Printf.sprintf + "Usage: %s get-def [OPTION]... [FILE] LINE COLUMN\n\ne.g. %s get-def foo.js 12 3\nor %s get-def 12 3 < foo.js\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> from_flag + |> path_flag + |> wait_for_recheck_flag + |> anon "args" (required (list_of string))); + } let parse_args path args = - let (file, line, column) = match args with - | [file; line; column] -> + let (file, line, column) = + match args with + | [file; line; column] -> let file = expand_path file in - File_input.FileName file, (int_of_string line), (int_of_string column) - | [line; column] -> - get_file_from_filename_or_stdin ~cmd:CommandSpec.(spec.name) path None, - (int_of_string line), - (int_of_string column) - | _ -> + (File_input.FileName file, int_of_string line, int_of_string column) + | [line; column] -> + ( get_file_from_filename_or_stdin ~cmd:CommandSpec.(spec.name) path None, + int_of_string line, + int_of_string column ) + | _ -> CommandSpec.usage spec; FlowExitStatus.(exit Commandline_usage_error) in let (line, column) = convert_input_pos (line, column) in - file, line, column + (file, line, column) (* get-def command handler. - json toggles JSON output @@ -56,39 +57,47 @@ let parse_args path args = - path is a user-specified path to use as incoming content source path - args is mandatory command args; see parse_args above *) -let main base_flags option_values json pretty root strip_root from path args () = - FlowEventLogger.set_from from; +let main base_flags option_values json pretty root strip_root path wait_for_recheck args () = let (file, line, column) = parse_args path args in let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> File_input.path_of_file_input file - ) in - let strip_root = if strip_root then Some root else None in - - let request = ServerProt.Request.GET_DEF (file, line, column) in - + let root = + guess_root + flowconfig_name + (match root with + | Some root -> Some root + | None -> File_input.path_of_file_input file) + in + let strip_root = + if strip_root then + Some root + else + None + in + let request = + ServerProt.Request.GET_DEF { filename = file; line; char = column; wait_for_recheck } + in match connect_and_make_request flowconfig_name option_values root request with | ServerProt.Response.GET_DEF (Ok loc) -> (* format output *) - if json || pretty - then ( + if json || pretty then (* TODO: this format is deprecated but can't be backwards-compatible. should be replaced with just `Reason.json_of_loc loc`. *) - let open Hh_json in - let json = - JSON_Object (Errors.deprecated_json_props_of_loc ~strip_root loc) in - print_json_endline ~pretty json - ) else - if option_values.from = "vim" || option_values.from = "emacs" - then print_endline (Errors.Vim_emacs_output.string_of_loc ~strip_root loc) - else print_endline (range_string_of_loc ~strip_root loc) + Hh_json.( + let json = JSON_Object (Errors.deprecated_json_props_of_loc ~strip_root loc) in + print_json_endline ~pretty json) + else + let from = FlowEventLogger.get_from_I_AM_A_CLOWN () in + if from = Some "vim" || from = Some "emacs" then + print_endline (Errors.Vim_emacs_output.string_of_loc ~strip_root loc) + else + print_endline (range_string_of_loc ~strip_root loc) | ServerProt.Response.GET_DEF (Error exn_msg) -> - Utils_js.prerr_endlinef - "Could not get definition for %s:%d:%d\n%s" - (File_input.filename_of_file_input file) line column - exn_msg + Utils_js.prerr_endlinef + "Could not get definition for %s:%d:%d\n%s" + (File_input.filename_of_file_input file) + line + column + exn_msg | response -> failwith_bad_response ~request ~response let command = CommandSpec.command spec main diff --git a/src/commands/getImportsCommand.ml b/src/commands/getImportsCommand.ml index 752b5f89571..d9bb25b1d65 100644 --- a/src/commands/getImportsCommand.ml +++ b/src/commands/getImportsCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,106 +11,129 @@ open CommandUtils -let spec = { - CommandSpec. - name = "get-imports"; - doc = "Get names of all modules imported by one or more given modules"; - usage = Printf.sprintf - "Usage: %s get-requirements [OPTION]... [FILE]...\n\n\ - Get names of all modules imported by one or more given modules\n\n\ - Example usage:\n\ - \t%s get-imports FirstModule SecondModule\n" - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> strip_root_flag - |> from_flag - |> anon "modules" (required (list_of string)) - ) -} +let spec = + { + CommandSpec.name = "get-imports"; + doc = "Get names of all modules imported by one or more given modules"; + usage = + Printf.sprintf + "Usage: %s get-requirements [OPTION]... [FILE]...\n\nGet names of all modules imported by one or more given modules\n\nExample usage:\n\t%s get-imports FirstModule SecondModule\n" + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> from_flag + |> wait_for_recheck_flag + |> anon "modules" (required (list_of string))); + } -let main base_flags option_values json pretty root strip_root from modules () = - FlowEventLogger.set_from from; +let main base_flags option_values json pretty root strip_root wait_for_recheck modules () = let flowconfig_name = base_flags.Base_flags.flowconfig_name in let root = guess_root flowconfig_name root in - - let request = ServerProt.Request.GET_IMPORTS modules in - let (requirements_map, non_flow) = match connect_and_make_request flowconfig_name option_values - root request with - | ServerProt.Response.GET_IMPORTS response -> response - | response -> failwith_bad_response ~request ~response + let request = ServerProt.Request.GET_IMPORTS { module_names = modules; wait_for_recheck } in + let (requirements_map, non_flow) = + match connect_and_make_request flowconfig_name option_values root request with + | ServerProt.Response.GET_IMPORTS response -> response + | response -> failwith_bad_response ~request ~response in - - let requirements_map = SMap.fold - begin fun module_name reqlocs map -> - let requirements = Modulename.Map.fold (fun req loc assoc -> - let req = match req with - | Modulename.String s -> s - | Modulename.Filename f -> - let f = File_key.to_string f in - if strip_root then Files.relative_path (Path.to_string root) f - else f + let requirements_map = + SMap.fold + begin + fun module_name reqlocs map -> + let requirements = + Modulename.Map.fold + (fun req loc assoc -> + let req = + match req with + | Modulename.String s -> s + | Modulename.Filename f -> + let f = File_key.to_string f in + if strip_root then + Files.relative_path (Path.to_string root) f + else + f + in + (req, loc) :: assoc) + reqlocs + [] in - (req, loc)::assoc - ) reqlocs [] in - SMap.add module_name requirements map - end - requirements_map SMap.empty in - - let strip_root = if strip_root then Some root else None in - if json || pretty - then ( - let open Hh_json in - let json_non_flow = SSet.fold (fun module_name acc -> - let json = JSON_Object [ - "not_flow", JSON_Bool true; - "requirements", JSON_Array [] - ] in - (module_name, json) :: acc - ) non_flow [] in - let json_imports = SMap.fold (fun module_name assoc acc -> - let requirements = List.fold_left (fun acc (req, locs) -> - Nel.fold_left (fun acc loc -> - JSON_Object ( - ("import", JSON_String req) :: - ("loc", Reason.json_of_loc ~strip_root loc) :: - (Errors.deprecated_json_props_of_loc ~strip_root loc) - ) :: acc - ) acc locs - ) [] assoc in - let json = JSON_Object [ - "not_flow", JSON_Bool false; - "requirements", JSON_Array requirements - ] in - (module_name, json) :: acc - ) requirements_map [] in - let json = JSON_Object (List.append json_non_flow json_imports) in - print_json_endline ~pretty json - ) else ( + SMap.add module_name requirements map + end + requirements_map + SMap.empty + in + let strip_root = + if strip_root then + Some root + else + None + in + if json || pretty then + Hh_json.( + let json_non_flow = + SSet.fold + (fun module_name acc -> + let json = + JSON_Object [("not_flow", JSON_Bool true); ("requirements", JSON_Array [])] + in + (module_name, json) :: acc) + non_flow + [] + in + let json_imports = + SMap.fold + (fun module_name assoc acc -> + let requirements = + List.fold_left + (fun acc (req, locs) -> + Nel.fold_left + (fun acc loc -> + JSON_Object + ( ("import", JSON_String req) + :: ("loc", json_of_loc_with_offset ~strip_root loc) + :: Errors.deprecated_json_props_of_loc ~strip_root loc ) + :: acc) + acc + locs) + [] + assoc + in + let json = + JSON_Object + [("not_flow", JSON_Bool false); ("requirements", JSON_Array requirements)] + in + (module_name, json) :: acc) + requirements_map + [] + in + let json = JSON_Object (List.append json_non_flow json_imports) in + print_json_endline ~pretty json) + else let print_imports module_name = - if (SMap.mem module_name requirements_map) - then begin + if SMap.mem module_name requirements_map then ( let requirements = SMap.find_unsafe module_name requirements_map in Printf.printf "Imports for module '%s':\n" module_name; - List.iter (fun (req, locs) -> - Nel.iter (fun loc -> - let loc_str = range_string_of_loc ~strip_root loc in - Printf.printf "\t%s@%s\n" req loc_str - ) locs - ) requirements - end else if (SSet.mem module_name non_flow) - then - Printf.printf "Cannot obtain imports for module '%s' because is not\ - \ marked for processing by flow!\n" module_name + List.iter + (fun (req, locs) -> + Nel.iter + (fun loc -> + let loc_str = range_string_of_loc ~strip_root loc in + Printf.printf "\t%s@%s\n" req loc_str) + locs) + requirements + ) else if SSet.mem module_name non_flow then + Printf.printf + "Cannot obtain imports for module '%s' because is not\ marked for processing by flow!\n" + module_name else Printf.printf "Module '%s' could not be found!\n" module_name in List.iter print_imports modules; flush stdout - ) let command = CommandSpec.command spec main diff --git a/src/commands/graphCommand.ml b/src/commands/graphCommand.ml new file mode 100644 index 00000000000..c7fe12f6f13 --- /dev/null +++ b/src/commands/graphCommand.ml @@ -0,0 +1,87 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open CommandUtils + +let print_endlinef = Utils_js.print_endlinef + +let prerr_endlinef = Utils_js.prerr_endlinef + +let depgraph_subcommand = + let spec = + { + CommandSpec.name = "dep-graph"; + doc = "Output .dot file for the dependency graph of a repository"; + usage = + Printf.sprintf + "Usage: %s graph dep-graph [OPTION]...\n\ne.g. %s graph dep-graph --out path/to/output --root path/to/root\ne.g. %s graph dep-graph --out path/to/output \nor %s graph dep-graph --strip-root --out path/to/output --root path/to/root\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be the current directory if unspecified.\nIf --strip-root is specified, the file paths in the output graph + will be relative to ROOT.\nThe graph will be output in FILE.\n\n" + Utils_js.exe_name + Utils_js.exe_name + Utils_js.exe_name + Utils_js.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_flags + |> strip_root_flag + |> flag "--out" (required string) ~doc:"Location to print the output file" + |> flag "--types" no_arg ~doc:"Only consider type dependencies" + |> root_flag); + } + in + let main base_flags option_values strip_root outfile types_only path_opt () = + let flowconfig_name = base_flags.Base_flags.flowconfig_name in + let root = CommandUtils.guess_root flowconfig_name path_opt in + (* Create the outfile if it doesn't already exist *) + let outpath = Files.imaginary_realpath outfile |> Path.make |> Path.to_string in + (* connect to server *) + let request = + ServerProt.Request.GRAPH_DEP_GRAPH + { root = Path.to_string root; strip_root; outfile = outpath; types_only } + in + match connect_and_make_request flowconfig_name option_values root request with + | ServerProt.Response.GRAPH_DEP_GRAPH (Error msg) -> FlowExitStatus.(exit ~msg Unknown_error) + | ServerProt.Response.GRAPH_DEP_GRAPH (Ok _) -> () + | response -> failwith_bad_response ~request ~response + in + CommandSpec.command spec main + +let cycle_subcommand = + let spec = + { + CycleCommand.spec with + CommandSpec.usage = + Printf.sprintf + "Usage: %s graph cycle [OPTION]...\n\ne.g. %s graph cycle path/to/file.js \n" + Utils_js.exe_name + Utils_js.exe_name; + } + in + CommandSpec.command spec CycleCommand.main + +let command = + let spec = + { + CommandSpec.name = "graph"; + doc = "Outputs dependency graphs of flow repositories"; + usage = + Printf.sprintf + "Usage: %s graph SUBCOMMAND [OPTIONS]...\nOutputs dependency graphs of flow repositories\n\nSUBCOMMANDS:\ncycle: Produces a graph of the dependency cycle containing the input file\ndep-graph: Produces the dependency graph of a repository\n" + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> anon + "subcommand" + (required + (command [("cycle", cycle_subcommand); ("dep-graph", depgraph_subcommand)]))); + } + in + let main (cmd, argv) () = CommandUtils.run_command cmd argv in + CommandSpec.command spec main diff --git a/src/commands/ideCommand.ml b/src/commands/ideCommand.ml deleted file mode 100644 index 3e556393878..00000000000 --- a/src/commands/ideCommand.ml +++ /dev/null @@ -1,394 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -(***********************************************************************) -(* flow ide command *) -(***********************************************************************) - -open CommandUtils - -module Prot = Persistent_connection_prot - -let protocol_options = [ - "very-unstable", `Very_unstable; - "human-readable", `Human_readable; -] - -let protocol_options_string = String.concat ", " (List.map fst protocol_options) - -let spec = { - CommandSpec. - name = "ide"; - doc = - "Starts a persistent connection to the server. Currently in development and highly unstable"; - usage = Printf.sprintf - "Usage: %s ide\n\n\ - Starts a persistent connection to the server\n" - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_flags - |> root_flag - |> from_flag - |> flag "--protocol" (required (enum protocol_options)) - ~doc:("Indicates the protocol to be used. One of: " ^ protocol_options_string) - |> strip_root_flag - |> json_version_flag - (* TODO use this somehow? |> verbose_flags *) - ) -} - -module type ClientProtocol = sig - val server_request_of_stdin_message: Buffered_line_reader.t -> Prot.request option - val handle_server_response: - strip_root:Path.t option -> - json_version:Errors.Json_output.json_version option -> - Prot.response -> - unit -end - -module HumanReadable: ClientProtocol = struct - let server_request_of_stdin_message buffered_stdin = - let line = Buffered_line_reader.get_next_line buffered_stdin in - let tokens = Str.split (Str.regexp "[ \t\r\n]+") line in - match tokens with - | ["subscribe"] -> Some Prot.Subscribe - (* For human-readable mode (which is just for playing around, basically) - * you need to include the file contents on the same line, and it must - * also have the magic token. *) - | "autocomplete"::file::contents -> - let fileinput = File_input.FileContent (Some file, String.concat " " contents) in - Some (Prot.Autocomplete (fileinput, 0 (* use a dummy id *))) - (* Ensure files is not empty *) - | "open"::f::fs -> - Some (Prot.DidOpen (f, fs)) - | "close"::f::fs -> - Some (Prot.DidClose (f, fs)) - | _ -> - prerr_endline ("Command not recognized: " ^ line); None - - let handle_autocomplete = function - | Error _ -> print_endline "Autocomplete Error" - | Ok completions -> - print_endline "Autocomplete results:"; - completions |> - List.map (fun r -> r.ServerProt.Response.res_name) |> - List.iter (Printf.printf " %s\n"); - flush stdout - - - let handle_server_response ~strip_root:_ ~json_version:_ = function - | Prot.Errors {errors; warnings} -> - let err_count = Errors.ErrorSet.cardinal errors in - let warn_count = Errors.ErrorSet.cardinal warnings in - print_endline ("Received " ^ (string_of_int err_count) ^ " errors and " - ^ (string_of_int warn_count) ^ " warnings") - | Prot.ServerExit _code -> () (* ignored here; used in lspCommand *) - | Prot.Please_hold _status -> () (* ignored here; used in lspCommand *) - | Prot.LspFromServer _ -> failwith "no lspFromServer to ideCommand" - | Prot.StartRecheck -> print_endline "Start recheck" - | Prot.EndRecheck _ -> print_endline "End recheck" - | Prot.AutocompleteResult (result, _ (* ignore id *)) -> handle_autocomplete result - | Prot.DidOpenAck -> print_endline "Received file open ack" - | Prot.DidCloseAck -> print_endline "Received file close ack" - | Prot.EOF -> () (* ignored here; used in lspCommand *) - -end - -module VeryUnstable: ClientProtocol = struct - let print_errors ~strip_root ~json_version errors warnings = - (* Because the file-tracking portion of the protocol already handles which warnings - * we display, we don't want the printer removing them. *) - let json_errors = Errors.Json_output.full_status_json_of_errors - ~strip_root ?version:json_version - ~suppressed_errors:([]) ~errors ~warnings () - in - let json_message = Json_rpc.jsonrpcize_notification "diagnosticsNotification" [json_errors] in - let json_string = Hh_json.json_to_string json_message in - Http_lite.write_message stdout json_string; - prerr_endline "sent diagnostics notification" - - let print_message message () = - [] - |> Json_rpc.jsonrpcize_notification message - |> Hh_json.json_to_string - |> Http_lite.write_message stdout - - let print_start_recheck = print_message "startRecheck" - - let print_end_recheck = print_message "endRecheck" - - let print_autocomplete ~strip_root response id = - AutocompleteService_js.autocomplete_response_to_json ~strip_root response - |> Json_rpc.jsonrpcize_response id - |> Hh_json.json_to_string - |> Http_lite.write_message stdout - - let handle_server_response ~strip_root ~json_version = function - | Prot.Errors {errors; warnings} -> - print_errors ~strip_root ~json_version errors warnings - | Prot.ServerExit _code -> () (* ignored here, but used in lspCommand *) - | Prot.Please_hold _status -> () (* ignored here, but used in lspCommand *) - | Prot.LspFromServer _ -> failwith "no lspFromServer to ideCommand" - | Prot.StartRecheck -> print_start_recheck () - | Prot.EndRecheck _ -> print_end_recheck () - | Prot.AutocompleteResult (result, id) -> print_autocomplete ~strip_root result id - (* No need to send the client anything; these acks are to prevent deadlocks - * involving the buffers between the ide command and the flow server *) - | Prot.DidOpenAck -> () - | Prot.DidCloseAck -> () - | Prot.EOF -> () - - let handle_autocomplete id = Hh_json.(function - | [JSON_String file; JSON_Number line_str; JSON_Number column_str; JSON_String contents] -> - let file = get_path_of_file file in - let line = int_of_string line_str in - let column = int_of_string column_str in - let (line, column) = convert_input_pos (line, column) in - let with_token = AutocompleteService_js.add_autocomplete_token contents line column in - Some (Prot.Autocomplete (File_input.FileContent (Some file, with_token), id)) - | _ -> - prerr_endline - "Incorrect arguments passed to autocomplete. Should be filepath, line, column, contents"; - None - ) - - (* Converts a list of json strings into a non-empty string list. - * Returns Some files on success; None otherwise. *) - let unjsonify_files = - let unjsonify_file files = Hh_json.(function - | JSON_String file -> - let file = get_path_of_file file in - Option.bind files (fun files -> Some (file::files)) - (* Fail on a non-string argument. *) - | _ -> None - ) in - fun files -> - match List.fold_left unjsonify_file (Some []) files with - | None -> None - (* Fail on an empty argument list. *) - | Some [] -> None - | Some (f::fs) -> Some (f, fs) - - let handle_did_open files = - match unjsonify_files files with - | Some processed_files -> Some (Prot.DidOpen processed_files) - | None -> - prerr_endline "Incorrect arguments passed to didOpen. Should be filepath, ...filepaths"; - None - - let handle_did_close files = - match unjsonify_files files with - | Some processed_files -> Some (Prot.DidClose processed_files) - | None -> - prerr_endline "Incorrect arguments passed to didClose. Should be filepath, ...filepaths"; - None - - let server_request_of_stdin_message buffered_stdin = - let message = try - Some (Http_lite.read_message_utf8 buffered_stdin) - with Http_lite.Malformed _ -> - prerr_endline "Received a malformed http message"; - None - in - match message with - | None -> None - | Some message -> - let obj = Json_rpc.parse_json_rpc_response message in - match obj with - | Json_rpc.Obj ("subscribeToDiagnostics", _, None) -> - prerr_endline "received subscribe request"; - Some Prot.Subscribe - | Json_rpc.Obj ("autocomplete", params, Some id) -> - handle_autocomplete id params - | Json_rpc.Obj ("didOpen", params, None) -> - handle_did_open params - | Json_rpc.Obj ("didClose", params, None) -> - handle_did_close params - | Json_rpc.Obj (method_name, _, id) -> - let id_str = match id with None -> "no id" | Some _ -> "an id" in - prerr_endline - ("unrecognized method: " ^ method_name ^ " with " ^ id_str ^ " provided"); - None - | Json_rpc.Malformed err -> - prerr_endline ("Received a malformed message: " ^ err); - None -end - -module PendingRequests : sig - type t - val empty: t - val add_request: t -> Prot.request -> t - val add_response: t -> Prot.response -> t - val ready_request: t -> (Prot.request option * t) -end = struct - type t = { - queue: Prot.request ImmQueue.t; - outstanding: Prot.request option; - } - - let empty = { - queue = ImmQueue.empty; - outstanding = None; - } - - let add_request t req = - { t with - queue = ImmQueue.push t.queue req; - } - - let add_response t response = - let open Prot in - match response, t.outstanding with - | Errors _, _ - | ServerExit _, _ - | LspFromServer _, _ - | Please_hold _, _ - | StartRecheck, _ - | EndRecheck _, _ - | EOF, _ -> - t - | AutocompleteResult (_, response_id), Some (Autocomplete (_, request_id)) -> - if response_id <> request_id then begin - failwith "Internal error: request and response id mismatch." - end; - { t with outstanding = None } - | DidOpenAck, Some (DidOpen _) -> - { t with outstanding = None } - | DidCloseAck, Some (DidClose _) -> - { t with outstanding = None } - (* Explicit matches on response instead of `_` to make adding to the protocol easier. *) - | (AutocompleteResult _ | DidOpenAck | DidCloseAck), Some _ -> - failwith "Internal error: received a mismatched response type" - | (AutocompleteResult _ | DidOpenAck | DidCloseAck), None -> - failwith "Internal error: received a response when there was no outstanding request." - - let ready_request t = - match t.outstanding with - | Some _ -> (None, t) - | None -> begin - match ImmQueue.pop t.queue with - | None, q -> (None, { t with queue = q }) - | Some req, q -> - let outstanding = match req with - (* We do not expect a response from `subscribe` *) - | Prot.Subscribe -> None - | Prot.Autocomplete _ | Prot.DidOpen _ | Prot.DidClose _ -> Some req - | Prot.LspToServer _ -> failwith "no lspToServer from ideCommand" - in - (Some req, { outstanding; queue = q }) - end -end - -module ProtocolFunctor (Protocol: ClientProtocol) = struct - (* not to be confused with genv or env -- this is state local to the IDE - * command process *) - type local_env = { - pending_requests: PendingRequests.t; - } - - let handle_server_response ~strip_root ~json_version fd local_env = - let (message : Prot.response) = - try - Marshal_tools.from_fd_with_preamble fd - with - | Unix.Unix_error (Unix.ECONNRESET, _, _) -> - (* Windows throws ECONNRESET when the connection dies *) - let msg = "Server closed the connection via an ECONNRESET" in - FlowExitStatus.(exit ~msg No_server_running) - | End_of_file -> - let msg = "Server closed the connection via an End_of_file" in - FlowExitStatus.(exit ~msg No_server_running) - in - let pending_requests = - PendingRequests.add_response local_env.pending_requests message - in - Protocol.handle_server_response ~strip_root ~json_version message; - { pending_requests } - - let send_server_request fd msg = - Marshal_tools.to_fd_with_preamble fd (msg: Prot.request) |> ignore - - let handle_stdin_message buffered_stdin local_env = - match Protocol.server_request_of_stdin_message buffered_stdin with - | None -> local_env - | Some req -> - let pending_requests = - PendingRequests.add_request local_env.pending_requests req - in - { pending_requests } - - let rec handle_all_stdin_messages buffered_stdin local_env = - let local_env = handle_stdin_message buffered_stdin local_env in - if Buffered_line_reader.has_buffered_content buffered_stdin then - handle_all_stdin_messages buffered_stdin local_env - else - local_env - - let rec send_pending_requests fd local_env = - let (req, pending_requests) = - PendingRequests.ready_request local_env.pending_requests - in - let local_env = { pending_requests } in - match req with - | None -> local_env - | Some req -> begin - send_server_request fd req; - send_pending_requests fd local_env - end - - let main_loop ~buffered_stdin ~ic_fd ~oc_fd ~strip_root ~json_version = - let stdin_fd = Buffered_line_reader.get_fd buffered_stdin in - let local_env = - ref { - pending_requests = PendingRequests.empty; - } - in - while true do - local_env := send_pending_requests oc_fd !local_env; - (* Negative timeout means this call will wait indefinitely *) - let readable_fds, _, _ = Unix.select [stdin_fd; ic_fd] [] [] ~-.1.0 in - List.iter (fun fd -> - if fd = ic_fd then begin - local_env := handle_server_response ~strip_root ~json_version ic_fd !local_env - end else if fd = stdin_fd then begin - local_env := handle_all_stdin_messages buffered_stdin !local_env - end else - failwith "Internal error: select returned an unknown fd" - ) readable_fds - done -end - -module VeryUnstableProtocol = ProtocolFunctor(VeryUnstable) -module HumanReadableProtocol = ProtocolFunctor(HumanReadable) - -let main base_flags option_values root from protocol strip_root json_version () = - FlowEventLogger.set_from from; - let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = CommandUtils.guess_root flowconfig_name root in - let strip_root = if strip_root then Some root else None in - let client_handshake = SocketHandshake.({ - client_build_id = build_revision; - is_stop_request = false; - server_should_hangup_if_still_initializing = false; - server_should_exit_if_version_mismatch = true; }, { - client_type = Persistent { logging_context = FlowEventLogger.get_context (); lsp = None; }; - }) in - Printf.eprintf "Connecting to server...\n%!"; - let ic, oc = connect ~flowconfig_name ~client_handshake option_values root in - Printf.eprintf "Connected to server\n%!"; - let buffered_stdin = stdin |> Unix.descr_of_in_channel |> Buffered_line_reader.create in - let ic_fd = Timeout.descr_of_in_channel ic in - let oc_fd = Unix.descr_of_out_channel oc in - let main_loop = match protocol with - | `Very_unstable -> VeryUnstableProtocol.main_loop - | `Human_readable -> HumanReadableProtocol.main_loop - in - main_loop ~buffered_stdin ~ic_fd ~oc_fd ~strip_root ~json_version - -let command = CommandSpec.command spec main diff --git a/src/commands/initCommand.ml b/src/commands/initCommand.ml index f372c9ed880..2fd9d0e8a3d 100644 --- a/src/commands/initCommand.ml +++ b/src/commands/initCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,45 +9,47 @@ (* flow init command *) (***********************************************************************) -let spec = { - CommandSpec. - name = "init"; - doc = "Initializes a directory to be used as a flow root directory"; - usage = Printf.sprintf - "Usage: %s init [ROOT]\n\ - Initializes a directory to be used as a flow root directory\n\n\ - e.g. %s init /path/to/root\n\ - or %s init\n\ - or %s init --options \"optionA=123;optionB=456\"\n\ - or %s init --lints \"lintA=on,lintB=off\"\n\n\ - If the root is not specified it is assumed to be the current working directory\n\n\ - This command will create and initialize /path/to/root/.flowconfig\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> CommandUtils.base_flags - |> CommandUtils.from_flag - |> CommandUtils.flowconfig_flags - |> flag "--options" (optional string) - ~doc:"Semicolon-delimited list of key=value pairs" - |> anon "root" (optional string) - ) -} +let spec = + { + CommandSpec.name = "init"; + doc = "Initializes a directory to be used as a flow root directory"; + usage = + Printf.sprintf + "Usage: %s init [ROOT]\nInitializes a directory to be used as a flow root directory\n\ne.g. %s init /path/to/root\nor %s init\nor %s init --options \"optionA=123;optionB=456\"\nor %s init --lints \"lintA=on,lintB=off\"\n\nIf the root is not specified it is assumed to be the current working directory\n\nThis command will create and initialize /path/to/root/.flowconfig\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> CommandUtils.base_flags + |> CommandUtils.from_flag + |> CommandUtils.flowconfig_flags + |> flag "--options" (optional string) ~doc:"Semicolon-delimited list of key=value pairs" + |> anon "root" (optional string)); + } -let main base_flags from flowconfig_flags options root () = - FlowEventLogger.set_from from; - let root = match root with - | None -> Sys.getcwd () |> Path.make - | Some root -> Path.make root +let error (errs : (int * string) list) = + let msg = + errs + |> Core_list.map ~f:(fun (ln, msg) -> Utils_js.spf ".flowconfig:%d %s" ln msg) + |> String.concat "\n" + in + FlowExitStatus.(exit ~msg Invalid_flowconfig) + +let main base_flags flowconfig_flags options root () = + let root = + match root with + | None -> Sys.getcwd () |> Path.make + | Some root -> Path.make root in FlowEventLogger.set_root (Some (Path.to_string root)); - let options = match options with - | None -> [] - | Some str -> Str.split (Str.regexp ";") str + let options = + match options with + | None -> [] + | Some str -> Str.split (Str.regexp ";") str in let ignores = flowconfig_flags.CommandUtils.ignores in let untyped = flowconfig_flags.CommandUtils.untyped in @@ -55,16 +57,18 @@ let main base_flags from flowconfig_flags options root () = let includes = flowconfig_flags.CommandUtils.includes in let libs = flowconfig_flags.CommandUtils.libs in let lints = flowconfig_flags.CommandUtils.raw_lint_severities in - let file = Server_files_js.config_file base_flags.CommandUtils.Base_flags.flowconfig_name root in - if Sys.file_exists file - then begin + ( if Sys.file_exists file then let msg = Utils_js.spf "Error: \"%s\" already exists!\n%!" file in - FlowExitStatus.(exit ~msg Invalid_flowconfig) - end; + FlowExitStatus.(exit ~msg Invalid_flowconfig) ); let config = FlowConfig.init ~ignores ~untyped ~declarations ~includes ~libs ~options ~lints in - + let config = + match config with + | Ok (config, []) -> config + | Ok (_, warnings) -> error warnings (* TODO: write warnings to stderr instead of exiting *) + | Error err -> error [err] + in let out = Sys_utils.open_out_no_fail file in FlowConfig.write config out; Sys_utils.close_out_no_fail file out diff --git a/src/commands/lsCommand.ml b/src/commands/lsCommand.ml index 179907c19c9..a2326adbdda 100644 --- a/src/commands/lsCommand.ml +++ b/src/commands/lsCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,36 +12,39 @@ open CommandUtils open Utils_js -let spec = { - CommandSpec. - name = "ls"; - doc = "Lists files visible to Flow"; - usage = Printf.sprintf - "Usage: %s ls [OPTION]... [FILE]...\n\n\ - Lists files visible to Flow\n" - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> strip_root_flag - |> ignore_flag - |> include_flag - |> untyped_flag - |> declaration_flag - |> root_flag - |> json_flags - |> from_flag - |> flag "--all" no_arg - ~doc:"Even list ignored files" - |> flag "--imaginary" no_arg - ~doc:"Even list non-existent specified files (normally they are silently dropped). \ - Non-existent files are never considered to be libs." - |> flag "--explain" no_arg - ~doc:"Output what kind of file each file is and why Flow cares about it" - |> input_file_flag "ls" - |> anon "files or dirs" (list_of string) - ) -} +let spec = + { + CommandSpec.name = "ls"; + doc = "Lists files visible to Flow"; + usage = + Printf.sprintf + "Usage: %s ls [OPTION]... [FILE]...\n\nLists files visible to Flow\n" + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> strip_root_flag + |> ignore_flag + |> include_flag + |> untyped_flag + |> declaration_flag + |> root_flag + |> json_flags + |> from_flag + |> flag "--all" no_arg ~doc:"Even list ignored files" + |> flag + "--imaginary" + no_arg + ~doc: + "Even list non-existent specified files (normally they are silently dropped). Non-existent files are never considered to be libs." + |> flag + "--explain" + no_arg + ~doc:"Output what kind of file each file is and why Flow cares about it" + |> input_file_flag "ls" + |> anon "files or dirs" (list_of string)); + } type file_result = | ImplicitlyIncluded @@ -53,116 +56,128 @@ type file_result = | ConfigFile let string_of_file_result = function -| ImplicitlyIncluded -> "ImplicitlyIncluded" -| ExplicitlyIncluded -> "ExplicitlyIncluded" -| ImplicitlyIgnored -> "ImplicitlyIgnored" -| ExplicitlyIgnored -> "ExplicitlyIgnored" -| ImplicitLib -> "ImplicitLib" -| ExplicitLib -> "ExplicitLib" -| ConfigFile -> "ConfigFile" + | ImplicitlyIncluded -> "ImplicitlyIncluded" + | ExplicitlyIncluded -> "ExplicitlyIncluded" + | ImplicitlyIgnored -> "ImplicitlyIgnored" + | ExplicitlyIgnored -> "ExplicitlyIgnored" + | ImplicitLib -> "ImplicitLib" + | ExplicitLib -> "ExplicitLib" + | ConfigFile -> "ConfigFile" let string_of_file_result_with_padding = function -| ImplicitlyIncluded -> "ImplicitlyIncluded" -| ExplicitlyIncluded -> "ExplicitlyIncluded" -| ImplicitlyIgnored -> "ImplicitlyIgnored " -| ExplicitlyIgnored -> "ExplicitlyIgnored " -| ImplicitLib -> "ImplicitLib " -| ExplicitLib -> "ExplicitLib " -| ConfigFile -> "ConfigFile " + | ImplicitlyIncluded -> "ImplicitlyIncluded" + | ExplicitlyIncluded -> "ExplicitlyIncluded" + | ImplicitlyIgnored -> "ImplicitlyIgnored " + | ExplicitlyIgnored -> "ExplicitlyIgnored " + | ImplicitLib -> "ImplicitLib " + | ExplicitLib -> "ExplicitLib " + | ConfigFile -> "ConfigFile " let explain ~flowconfig_name ~root ~options ~libs raw_file = let file = raw_file |> Path.make |> Path.to_string in let root_str = Path.to_string root in let result = - if SSet.mem file libs - then begin + if SSet.mem file libs then (* This is a lib file *) let flowtyped_path = Files.get_flowtyped_path root in - if String_utils.string_starts_with file (Path.to_string flowtyped_path) - then ImplicitLib - else ExplicitLib - end else if Server_files_js.config_file flowconfig_name root = file - then ConfigFile - else if Files.is_ignored options file - then ExplicitlyIgnored - else if String_utils.string_starts_with file root_str - then ImplicitlyIncluded - else if Files.is_included options file - then ExplicitlyIncluded - else ImplicitlyIgnored - in (raw_file, result) + if String_utils.string_starts_with file (Path.to_string flowtyped_path) then + ImplicitLib + else + ExplicitLib + else if Server_files_js.config_file flowconfig_name root = file then + ConfigFile + else if Files.is_ignored options file then + ExplicitlyIgnored + else if String_utils.string_starts_with file root_str then + ImplicitlyIncluded + else if Files.is_included options file then + ExplicitlyIncluded + else + ImplicitlyIgnored + in + (raw_file, result) let json_of_files_with_explanations files = - let open Hh_json in - let properties = List.map - (fun (file,res) -> file, JSON_Object [ - "explanation", JSON_String (string_of_file_result res); - ]) - files in - JSON_Object properties + Hh_json.( + let properties = + List.map + (fun (file, res) -> + (file, JSON_Object [("explanation", JSON_String (string_of_file_result res))])) + files + in + JSON_Object properties) let rec iter_get_next ~f get_next = match get_next () with | [] -> () | result -> - List.iter f result; - iter_get_next ~f get_next + List.iter f result; + iter_get_next ~f get_next -let make_options ~flowconfig_name ~root ~ignore_flag ~include_flag ~untyped_flag ~declaration_flag = - let flowconfig = FlowConfig.get (Server_files_js.config_file flowconfig_name root) in +let make_options ~flowconfig_name ~root ~ignore_flag ~include_flag ~untyped_flag ~declaration_flag + = + let flowconfig = read_config_or_exit (Server_files_js.config_file flowconfig_name root) in let temp_dir = FlowConfig.temp_dir flowconfig in let includes = CommandUtils.list_of_string_arg include_flag in let ignores = CommandUtils.list_of_string_arg ignore_flag in let untyped = CommandUtils.list_of_string_arg untyped_flag in let declarations = CommandUtils.list_of_string_arg declaration_flag in let libs = [] in - CommandUtils.file_options flowconfig - ~root ~no_flowlib:true ~temp_dir ~ignores ~includes ~libs ~untyped ~declarations + CommandUtils.file_options + flowconfig + ~root + ~no_flowlib:true + ~temp_dir + ~ignores + ~includes + ~libs + ~untyped + ~declarations (* The problem with Files.wanted is that it says yes to everything except ignored files and libs. * So implicitly ignored files (like files in another directory) pass the Files.wanted check *) let wanted ~root ~options libs file = - Files.wanted ~options libs file && ( - let root_str = spf "%s%s" (Path.to_string root) Filename.dir_sep in - String_utils.string_starts_with file root_str || Files.is_included options file - ) + Files.wanted ~options libs file + && + let root_str = spf "%s%s" (Path.to_string root) Filename.dir_sep in + String_utils.string_starts_with file root_str || Files.is_included options file (* Directories will return a closure that returns every file under that directory. Individual files will return a closure that returns just that file *) let get_ls_files ~root ~all ~options ~libs ~imaginary = function -| None -> - Files.make_next_files ~root ~all ~subdir:None ~options ~libs -| Some dir when try Sys.is_directory dir with _ -> false -> + | None -> Files.make_next_files ~root ~all ~subdir:None ~options ~libs + | Some dir when (try Sys.is_directory dir with _ -> false) -> let subdir = Some (Path.make dir) in Files.make_next_files ~root ~all ~subdir ~options ~libs -| Some file -> - if (Sys.file_exists file || imaginary) && (all || wanted ~root ~options libs file) - then begin + | Some file -> + if (Sys.file_exists file || imaginary) && (all || wanted ~root ~options libs file) then let file = file |> Path.make |> Path.to_string in - let rec cb = ref begin fun () -> - cb := begin fun () -> [] end; - [file] - end in - fun () -> !cb () - end else fun () -> [] + let rec cb = + ref (fun () -> + (cb := (fun () -> [])); + [file]) + in + (fun () -> !cb ()) + else + fun () -> + [] (* We have a list of get_next() functions. This combines them into a single get_next function *) let concat_get_next get_nexts = let get_nexts = ref get_nexts in - let rec concat () = match !get_nexts with | [] -> [] - | get_next::rest -> - (match get_next () with - | [] -> - get_nexts := rest; - concat () - | ret -> ret) - - in concat + | get_next :: rest -> + (match get_next () with + | [] -> + get_nexts := rest; + concat () + | ret -> ret) + in + concat (* Append a constant list of files to the get_next function *) let get_next_append_const get_next const = @@ -173,89 +188,111 @@ let get_next_append_const get_next const = let ret = !const in const := []; ret - | ret -> - ret + | ret -> ret let main - base_flags strip_root ignore_flag include_flag untyped_flag declaration_flag root_flag json pretty - from all imaginary reason - input_file root_or_files () = - + base_flags + strip_root + ignore_flag + include_flag + untyped_flag + declaration_flag + root_flag + json + pretty + all + imaginary + reason + input_file + root_or_files + () = let files_or_dirs = get_filenames_from_input ~allow_imaginary:true input_file root_or_files in - let flowconfig_name = base_flags.Base_flags.flowconfig_name in - FlowEventLogger.set_from from; - let root = guess_root flowconfig_name ( - match root_flag with - | Some root -> Some root - | None -> (match files_or_dirs with - | first_file::_ -> - (* If the first_file doesn't exist or if we can't find a .flowconfig, we'll error. If - * --strip-root is passed, we want the error to contain a relative path. *) - let first_file = if strip_root - then Files.relative_path (Sys.getcwd ()) first_file - else first_file in - Some first_file - | _ -> None) - ) in - - let options = make_options ~flowconfig_name ~root ~ignore_flag ~include_flag ~untyped_flag - ~declaration_flag in + let root = + guess_root + flowconfig_name + (match root_flag with + | Some root -> Some root + | None -> + (match files_or_dirs with + | first_file :: _ -> + (* If the first_file doesn't exist or if we can't find a .flowconfig, we'll error. If + * --strip-root is passed, we want the error to contain a relative path. *) + let first_file = + if strip_root then + Files.relative_path (Sys.getcwd ()) first_file + else + first_file + in + Some first_file + | _ -> None)) + in + let options = + make_options ~flowconfig_name ~root ~ignore_flag ~include_flag ~untyped_flag ~declaration_flag + in (* Turn on --no-flowlib by default, so that flow ls never reports flowlib files *) - let options = { options with Files.default_lib_dir = None; } in - let _, libs = Files.init options in + let options = { options with Files.default_lib_dir = None } in + let (_, libs) = Files.init options in (* `flow ls` and `flow ls dir` will list out all the flow files. We want to include lib files, so * we pass in ~libs:SSet.empty, which means we won't filter out any lib files *) - let next_files = (match files_or_dirs with - | [] -> - get_ls_files ~root ~all ~options ~libs:SSet.empty ~imaginary None - | files_or_dirs -> + let next_files = + match files_or_dirs with + | [] -> get_ls_files ~root ~all ~options ~libs:SSet.empty ~imaginary None + | files_or_dirs -> files_or_dirs - |> List.map (fun f -> get_ls_files ~root ~all ~options ~libs:SSet.empty ~imaginary (Some f)) - |> concat_get_next) in - + |> Core_list.map ~f:(fun f -> + get_ls_files ~root ~all ~options ~libs:SSet.empty ~imaginary (Some f)) + |> concat_get_next + in let root_str = spf "%s%s" (Path.to_string root) Filename.dir_sep in let config_file_absolute = Server_files_js.config_file flowconfig_name root in let config_file_relative = Files.relative_path root_str config_file_absolute in - let include_config_file = files_or_dirs = [] || List.exists (fun file_or_dir -> - file_or_dir = config_file_relative || String_utils.string_starts_with root_str file_or_dir - ) files_or_dirs in - let next_files = if include_config_file - then get_next_append_const next_files [ config_file_absolute ] - else next_files in - + let include_config_file = + files_or_dirs = [] + || List.exists + (fun file_or_dir -> + file_or_dir = config_file_relative + || String_utils.string_starts_with root_str file_or_dir) + files_or_dirs + in + let next_files = + if include_config_file then + get_next_append_const next_files [config_file_absolute] + else + next_files + in let normalize_filename filename = - if not strip_root then filename - else Files.relative_path root_str filename + if not strip_root then + filename + else + Files.relative_path root_str filename in - - if json || pretty - then Hh_json.(begin - let files = Files.get_all next_files |> SSet.elements in - let json = - if reason - then - files - |> List.map (explain ~flowconfig_name ~root ~options ~libs) - |> List.map (fun (f, r) -> normalize_filename f, r) - |> json_of_files_with_explanations - else JSON_Array ( - List.map (fun f -> JSON_String (normalize_filename f)) files - ) in - print_json_endline ~pretty json - end) else begin - let f = if reason - then begin fun filename -> - let f, r = explain ~flowconfig_name ~root ~options ~libs filename in - Printf.printf - "%s %s\n%!" - (string_of_file_result_with_padding r) - (normalize_filename f) - end else begin fun filename -> + if json || pretty then + Hh_json.( + let files = Files.get_all next_files |> SSet.elements in + let json = + if reason then + files + (* Mapping may cause a stack overflow. To avoid that, we always use rev_map. + * Since the amount of rev_maps we use is odd, we reverse the list once more + * at the end *) + |> List.rev_map (explain ~flowconfig_name ~root ~options ~libs) + |> List.rev_map (fun (f, r) -> (normalize_filename f, r)) + |> json_of_files_with_explanations + else + JSON_Array (List.rev (List.rev_map (fun f -> JSON_String (normalize_filename f)) files)) + in + print_json_endline ~pretty json) + else + let f = + if reason then + fun filename -> + let (f, r) = explain ~flowconfig_name ~root ~options ~libs filename in + Printf.printf "%s %s\n%!" (string_of_file_result_with_padding r) (normalize_filename f) + else + fun filename -> Printf.printf "%s\n%!" (normalize_filename filename) - end in - + in iter_get_next ~f next_files - end let command = CommandSpec.command spec main diff --git a/src/commands/lspCommand.ml b/src/commands/lspCommand.ml index 45f31899339..75a85ac4692 100644 --- a/src/commands/lspCommand.ml +++ b/src/commands/lspCommand.ml @@ -1,1790 +1,58 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the BSD-style license found in the - * LICENSE file in the "flow" directory of this source tree. An additional grant - * of patent rights can be found in the PATENTS file in the same directory. + * Copyright (c) Facebook, Inc. and its affiliates. * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. *) open CommandUtils -open Lsp -open Lsp_fmt -module List = Core_list (***********************************************************************) (* flow lsp command *) (***********************************************************************) -let spec = { - CommandSpec. - name = "lsp"; - doc = - "Acts as a server for the Language Server Protocol over stdin/stdout [experimental]"; - usage = Printf.sprintf - "Usage: %s lsp\n\n\ - Runs a server for the Language Server Protocol\n" - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> temp_dir_flag - |> shm_flags - |> lazy_flags - |> autostop_flag - |> from_flag - ) -} - - -(************************************************************************) -(** Protocol orchestration & helpers **) -(************************************************************************) - -(* LSP exit codes are specified at https://microsoft.github.io/language-server-protocol/specification#exit *) -let lsp_exit_ok () = exit 0 -let lsp_exit_bad () = exit 1 - - -(* Given an ID that came from the server, we have to wrap it when we pass it *) -(* on to the client, to encode which instance of the server it came from. *) -(* That way, if a response comes back later from the client after the server *) -(* has died, we'll know to discard it. We wrap it as "serverid:#id" for *) -(* numeric ids, and "serverid:'id" for strings. *) -type wrapped_id = { server_id: int; message_id: lsp_id; } - -let encode_wrapped (wrapped_id: wrapped_id) : lsp_id = - let {server_id; message_id;} = wrapped_id in - match message_id with - | NumberId id -> StringId (Printf.sprintf "%d:#%d" server_id id) - | StringId id -> StringId (Printf.sprintf "%d:'%s" server_id id) - -let decode_wrapped (lsp: lsp_id) : wrapped_id = - let s = match lsp with - | NumberId _ -> failwith "not a wrapped id" - | StringId s -> s in - let icolon = String.index s ':' in - let server_id = int_of_string (String.sub s 0 icolon) in - let id = String.sub s (icolon+1) ((String.length s) - icolon - 1) in - let message_id = if (String.get s (icolon+1)) = '#' - then NumberId (int_of_string id) - else StringId id in - { server_id; message_id; } - -module WrappedKey = struct - type t = wrapped_id - let compare (x: t) (y:t) = - if x.server_id <> y.server_id then IntKey.compare x.server_id y.server_id - else IdKey.compare x.message_id y.message_id -end -module WrappedSet = Set.Make (WrappedKey) -module WrappedMap = MyMap.Make (WrappedKey) - - -type server_conn = { - ic: Timeout.in_channel; - oc: out_channel; -} - -type show_status_t = - | Never_shown - | Shown of lsp_id option * ShowStatus.showStatusParams - (* Shown (Some id, params) -- means it is currently shown *) - (* Shown (None, params) - means it was shown but user dismissed it *) - -type open_file_info = { - (* o_open_doc is guaranteed to be up-to-date with respect to the editor *) - o_open_doc: Lsp.TextDocumentItem.t; - (* o_ast, if present, is guaranteed to be up-to-date. It gets computed lazily. *) - o_ast: (Loc.t, Loc.t) Flow_ast.program option; - (* o_live_diagnostics, if present, is guaranteed to be up-to-date, and to only contain - * parse errors, and to be a better source of truth about the parse errors - * in this file than what the flow server has told us. It also gets computed lazily. *) - o_live_diagnostics: PublishDiagnostics.diagnostic list option; -} - -type initialized_env = { - i_initialize_params: Lsp.Initialize.params; - i_connect_params: connect_params; - i_root: Path.t; - i_version: string option; - i_server_id: int; - i_can_autostart_after_version_mismatch: bool; - i_outstanding_local_handlers: state lsp_handler IdMap.t; - i_outstanding_local_requests: lsp_request IdMap.t; - i_outstanding_requests_from_server: Lsp.lsp_request WrappedMap.t; - i_isConnected: bool; (* what we've told the client about our connection status *) - i_status: show_status_t; - i_open_files: open_file_info SMap.t; - i_outstanding_diagnostics: SSet.t; -} - -and disconnected_env = { - d_ienv: initialized_env; - d_autostart: bool; - d_server_status: (ServerStatus.status * FileWatcherStatus.status) option; -} - -and connected_env = { - c_ienv: initialized_env; - c_conn: server_conn; - c_server_status: ServerStatus.status * (FileWatcherStatus.status option); - c_recent_summaries: (float * ServerStatus.summary) list; (* newest at head of list *) - c_about_to_exit_code: FlowExitStatus.t option; - (* stateful handling of Errors+status from server... *) - c_is_rechecking: bool; - c_lazy_stats: ServerProt.Response.lazy_stats option; - c_diagnostics: PublishDiagnostics.diagnostic list SMap.t; - (* if server gets disconnected, we will tidy up these things... *) - c_outstanding_requests_to_server: Lsp.IdSet.t; - c_outstanding_diagnostics: SSet.t; (* we'll send publishDiagnostics([]) *) -} - -and state = - (* Pre_init: we haven't yet received the initialize request. *) - | Pre_init of connect_params - (* Disconnected: we'll attempt to reconnect once a tick. *) - | Disconnected of disconnected_env - (* Main_loop: we have a working connection to both server and client. *) - | Connected of connected_env - (* Post_shutdown: we received the shutdown request. *) - | Post_shutdown - -exception Client_fatal_connection_exception of Marshal_tools.remote_exception_data -exception Client_recoverable_connection_exception of Marshal_tools.remote_exception_data -exception Server_fatal_connection_exception of Marshal_tools.remote_exception_data - -type event = - | Server_message of Persistent_connection_prot.response - | Client_message of Lsp.lsp_message * Persistent_connection_prot.metadata - | Tick (* once per second, on idle *) - - -let string_of_state (state: state) : string = - match state with - | Pre_init _ -> "Pre_init" - | Disconnected _ -> "Disconnected" - | Connected _ -> "Connected" - | Post_shutdown -> "Post_shutdown" - - -let denorm_string_of_event (event: event) : string = - match event with - | Server_message response -> - Printf.sprintf "Server_message(%s)" (Persistent_connection_prot.string_of_response response) - | Client_message (c, _) -> - Printf.sprintf "Client_message(%s)" (Lsp_fmt.denorm_message_to_string c) - | Tick -> - "Tick" - -let to_stdout (json: Hh_json.json) : unit = - (* Extra \r\n purely for easier logfile reading; not required by protocol. *) - let s = (Hh_json.json_to_string json) ^ "\r\n\r\n" in - Http_lite.write_message stdout s - -let get_current_version flowconfig_name (root: Path.t) : string option = - Server_files_js.config_file flowconfig_name root - |> FlowConfig.get ~allow_cache:false - |> FlowConfig.required_version - -let get_root (state: state) : Path.t option = - match state with - | Connected cenv -> Some cenv.c_ienv.i_root - | Disconnected denv -> Some denv.d_ienv.i_root - | _ -> None - -let get_open_files (state: state) : open_file_info SMap.t option = - match state with - | Connected cenv -> Some cenv.c_ienv.i_open_files - | Disconnected denv -> Some denv.d_ienv.i_open_files - | _ -> None - -let update_open_file (uri: string) (open_file_info: open_file_info option) (state: state) : state = - let update_ienv ienv = - match open_file_info with - | Some open_file_info -> {ienv with i_open_files=SMap.add uri open_file_info ienv.i_open_files} - | None -> { ienv with i_open_files = SMap.remove uri ienv.i_open_files} - in - match state with - | Connected cenv -> Connected { cenv with c_ienv=update_ienv cenv.c_ienv } - | Disconnected denv -> Disconnected { denv with d_ienv=update_ienv denv.d_ienv } - | _ -> failwith ("client shouldn't be updating files in state " ^ (string_of_state state)) - - -let new_metadata (state: state) (message: Jsonrpc.message) : Persistent_connection_prot.metadata = - let start_lsp_state, start_lsp_state_reason, start_server_status, start_watcher_status = - match state with - | Connected {c_server_status=(s,w); _} -> - None, None, Some s, w - | Disconnected {d_server_status=Some (s,w); _} -> - Some (string_of_state state), None, Some s, Some w - | Disconnected {d_server_status=None; d_ienv; _} -> - Some (string_of_state state), Some d_ienv.i_status, None, None - | _ -> - Some (string_of_state state), None, None, None in - let start_lsp_state_reason = match start_lsp_state_reason with - | None - | Some Never_shown -> None - | Some (Shown (_, params)) -> Some params.ShowStatus.request.ShowMessageRequest.message - in - { Persistent_connection_prot. - start_wall_time = message.Jsonrpc.timestamp; - start_server_status; - start_watcher_status; - start_json_truncated = Hh_json.json_truncate message.Jsonrpc.json - ~max_string_length:256 ~max_child_count:4; - start_lsp_state; - start_lsp_state_reason; - error_info = None; - server_profiling = None; - client_duration = None; - extra_data = []; - server_logging_context = None; - } - - -let get_next_event_from_server (fd: Unix.file_descr) : event = - let r = begin try - Server_message (Marshal_tools.from_fd_with_preamble fd) - with e -> - let message = Printexc.to_string e in - let stack = Printexc.get_backtrace () in - raise (Server_fatal_connection_exception { Marshal_tools.message; stack; }) - end in - (* The server sends an explicit 'EOF' message in case the underlying *) - (* transport protocol doesn't result in EOF normally. We'll respond *) - (* to it by synthesizing the EOF exception we'd otherwise get. *) - if r = Server_message Persistent_connection_prot.EOF then begin - let stack = Printexc.get_callstack 100 |> Printexc.raw_backtrace_to_string in - raise (Server_fatal_connection_exception { Marshal_tools.message="End_of_file"; stack; }); - end else - r - -let get_next_event_from_client - (state: state) - (client: Jsonrpc.queue) - (parser: Jsonrpc.message -> Lsp.lsp_message) - : event = - let message = Jsonrpc.get_message client in - match message with - | `Message message -> Client_message (parser message, new_metadata state message) - | `Fatal_exception edata -> raise (Client_fatal_connection_exception edata) - | `Recoverable_exception edata -> raise (Client_recoverable_connection_exception edata) - -let get_next_event - (state: state) - (client: Jsonrpc.queue) - (parser: Jsonrpc.message -> Lsp.lsp_message) - : event = - if Jsonrpc.has_message client then - get_next_event_from_client state client parser - else - let client_fd = Jsonrpc.get_read_fd client in - match state with - | Connected { c_conn; _ } -> - let server_fd = Timeout.descr_of_in_channel c_conn.ic in - let fds, _, _ = Unix.select [server_fd; client_fd] [] [] 1.0 in - if fds = [] then Tick - else if List.mem fds server_fd then get_next_event_from_server server_fd - else get_next_event_from_client state client parser - | _ -> - let fds, _, _ = Unix.select [client_fd] [] [] 1.0 in - if fds = [] then Tick - else get_next_event_from_client state client parser - -let show_status - ?(titles = []) - ?(handler = fun _title state -> state) - ~(type_: MessageType.t) - ~(message: string) - ~(shortMessage: string option) - ~(progress: int option) - ~(total: int option) - (ienv: initialized_env) - : initialized_env = - let open ShowStatus in - let open ShowMessageRequest in - let open MessageType in - let use_status = Lsp_helpers.supports_status ienv.i_initialize_params in - let actions = List.map titles ~f:(fun title -> { title; }) in - let params = {request={type_; message; actions;}; shortMessage; progress; total;} in - - (* What should we display/hide? It's a tricky question... *) - let will_dismiss_old, will_show_new = match use_status, ienv.i_status, params with - (* If the new status is identical to the old, then no-op *) - | _, Shown (_, existingParams), params when existingParams = params -> false, false - (* If the client supports status reporting, then we'll blindly send everything *) - | true, _, _ -> false, true - (* If the client only supports dialog boxes, then we'll be very limited: *) - (* only every display failures; and if there was already an error up even *) - (* a different one then leave it undisturbed. *) - | false, Shown (_, {request={type_=ErrorMessage; _};_}), - {request={type_=ErrorMessage;_};_} -> false, false - | false, Shown (id, _), {request={type_=ErrorMessage;_};_} -> Option.is_some id, true - | false, Shown (id, _), _ -> Option.is_some id, false - | false, Never_shown, {request={type_=ErrorMessage;_};_} -> false, true - | false, Never_shown, _ -> false, false in - - (* dismiss the old one *) - let ienv = match will_dismiss_old, ienv.i_status with - | true, Shown (id, existingParams) -> - let id = Option.value_exn id in - let notification = CancelRequestNotification { CancelRequest.id; } in - let json = Lsp_fmt.print_lsp (NotificationMessage notification) in - to_stdout json; - { ienv with i_status = Shown (None, existingParams); } - | _, _ -> ienv in - - (* show the new one *) - if not will_show_new then - ienv - else begin - let id = NumberId (Jsonrpc.get_next_request_id ()) in - let request = if use_status - then ShowStatusRequest params else ShowMessageRequestRequest params.request in - let json = Lsp_fmt.print_lsp (RequestMessage (id, request)) in - to_stdout json; - - let mark_ienv_shown future_ienv = - match future_ienv.i_status with - | Shown (Some future_id, future_params) when future_id = id -> - { future_ienv with i_status = Shown (None, future_params); } - | _ -> future_ienv in - let mark_state_shown state = - match state with - | Connected cenv -> Connected { cenv with c_ienv = mark_ienv_shown cenv.c_ienv; } - | Disconnected denv -> Disconnected { denv with d_ienv = mark_ienv_shown denv.d_ienv; } - | _ -> state in - let handle_error _e state = - mark_state_shown state in - let handle_result (r: ShowMessageRequest.result) state = - let state = mark_state_shown state in - match r with - | Some {ShowMessageRequest.title} -> handler title state - | None -> state in - let handle_result = if use_status - then (ShowStatusHandler handle_result) else (ShowMessageHandler handle_result) in - let handlers = (handle_result, handle_error) in - let i_outstanding_local_requests = IdMap.add id request ienv.i_outstanding_local_requests in - let i_outstanding_local_handlers = IdMap.add id handlers ienv.i_outstanding_local_handlers - in - { ienv with - i_status = Shown (Some id, params); - i_outstanding_local_requests; - i_outstanding_local_handlers; - } - end - - -let send_to_server (env: connected_env) (request: Persistent_connection_prot.request) : unit = - let _bytesWritten = - Marshal_tools.to_fd_with_preamble (Unix.descr_of_out_channel env.c_conn.oc) request in - () - - -let send_lsp_to_server - (cenv: connected_env) - (metadata: Persistent_connection_prot.metadata) - (message: lsp_message) - : unit = - send_to_server cenv (Persistent_connection_prot.LspToServer (message, metadata)) - - -(************************************************************************) -(** Protocol **) -(************************************************************************) - -let do_initialize () : Initialize.result = - let open Initialize in +let spec = { - server_capabilities = { - textDocumentSync = { - want_openClose = true; - want_change = IncrementalSync; - want_willSave = false; - want_willSaveWaitUntil = false; - want_didSave = None; - }; - hoverProvider = true; - completionProvider = Some { - resolveProvider = false; - completion_triggerCharacters = ["."]; - }; - signatureHelpProvider = None; - definitionProvider = true; - referencesProvider = true; - documentHighlightProvider = true; - documentSymbolProvider = true; - workspaceSymbolProvider = false; - codeActionProvider = false; - codeLensProvider = None; - documentFormattingProvider = false; - documentRangeFormattingProvider = false; - documentOnTypeFormattingProvider = None; - renameProvider = true; - documentLinkProvider = None; - executeCommandProvider = None; - typeCoverageProvider = true; - rageProvider = true; - } - } - - -let show_connected (env: connected_env) : state = - (* report that we're connected to telemetry/connectionStatus *) - let i_isConnected = Lsp_helpers.notify_connectionStatus env.c_ienv.i_initialize_params - to_stdout env.c_ienv.i_isConnected true in - let env = { env with c_ienv = { env.c_ienv with i_isConnected; }; } in - (* show green status *) - let message = "Flow server is now ready" in - let c_ienv = show_status - ~type_:MessageType.InfoMessage ~message ~shortMessage:None ~progress:None ~total:None env.c_ienv - in - Connected { env with c_ienv; } - - -let show_connecting (reason: CommandConnectSimple.error) (env: disconnected_env) : state = - if reason = CommandConnectSimple.Server_missing then - Lsp_helpers.log_info to_stdout "Starting Flow server"; - - let message, shortMessage, progress, total = match reason, env.d_server_status with - | CommandConnectSimple.Server_missing, _ -> "Flow: Server starting", None, None, None - | CommandConnectSimple.Server_socket_missing, _ -> "Flow: Server starting?", None, None, None - | CommandConnectSimple.Build_id_mismatch, _ -> "Flow: Server is wrong version", None, None, None - | CommandConnectSimple.Server_busy (CommandConnectSimple.Too_many_clients), _ -> - "Flow: Server busy", None, None, None - | CommandConnectSimple.Server_busy _, None -> "Flow: Server busy", None, None, None - | CommandConnectSimple.Server_busy _, Some (server_status, watcher_status) -> - if not (ServerStatus.is_free server_status) then - let shortMessage, progress, total = ServerStatus.get_progress server_status in - "Flow: " ^ (ServerStatus.string_of_status ~use_emoji:true server_status), - shortMessage, progress, total - else - "Flow: " ^ (FileWatcherStatus.string_of_status watcher_status), None, None, None - - in - Disconnected { env with d_ienv = show_status - ~type_:MessageType.WarningMessage ~message ~shortMessage ~progress ~total env.d_ienv; - } - - -let show_disconnected - (code: FlowExitStatus.t option) - (message: string option) - (env: disconnected_env) - : state = - (* report that we're disconnected to telemetry/connectionStatus *) - let i_isConnected = Lsp_helpers.notify_connectionStatus env.d_ienv.i_initialize_params - to_stdout env.d_ienv.i_isConnected false in - let env = { env with d_ienv = { env.d_ienv with i_isConnected; }; } in - - (* show red status *) - let message = Option.value message ~default:"Flow: server is stopped" in - let message = match code with - | Some code -> Printf.sprintf "%s [%s]" message (FlowExitStatus.to_string code) - | None -> message in - let handler r state = match state, r with - | Disconnected e, "Restart" -> Disconnected { e with d_autostart = true; } - | _ -> state - in - Disconnected { env with - d_ienv = show_status ~handler ~titles:["Restart"] ~type_:MessageType.ErrorMessage - ~message ~shortMessage:None ~progress:None ~total:None env.d_ienv; - } - - -let try_connect flowconfig_name (env: disconnected_env) : state = - (* If the version in .flowconfig has changed under our feet then we mustn't *) - (* connect. We'll terminate and trust the editor to relaunch an ok version. *) - let current_version = get_current_version flowconfig_name env.d_ienv.i_root in - if env.d_ienv.i_version <> current_version then begin - let prev_version_str = Option.value env.d_ienv.i_version ~default: "[None]" in - let current_version_str = Option.value current_version ~default: "[None]" in - let message = - "\nVersion in flowconfig that spawned the existing flow server: " ^ prev_version_str ^ - "\nVersion in flowconfig currently: " ^ current_version_str ^ - "\n" in - Lsp_helpers.telemetry_log to_stdout message; - lsp_exit_bad () - end; - let start_env = CommandUtils.make_env flowconfig_name env.d_ienv.i_connect_params - env.d_ienv.i_root in - - let client_handshake = SocketHandshake.({ - client_build_id = build_revision; - is_stop_request = false; - server_should_exit_if_version_mismatch = env.d_autostart; (* only exit if we'll restart it *) - server_should_hangup_if_still_initializing = true; }, { - client_type = Persistent { - logging_context = FlowEventLogger.get_context (); - lsp = Some env.d_ienv.i_initialize_params; - }; - }) in - let conn = CommandConnectSimple.connect_once - ~flowconfig_name ~client_handshake ~tmp_dir:start_env.CommandConnect.tmp_dir - start_env.CommandConnect.root in - - match conn with - | Ok (ic, oc) -> - let _bytesWritten = Marshal_tools.to_fd_with_preamble - (Unix.descr_of_out_channel oc) - Persistent_connection_prot.Subscribe in - let i_server_id = env.d_ienv.i_server_id + 1 in - let new_env = { - c_ienv = { env.d_ienv with i_server_id; }; - c_conn = { ic; oc; }; - c_server_status = (ServerStatus.initial_status, None); - c_about_to_exit_code = None; - c_is_rechecking = false; - c_lazy_stats = None; - c_diagnostics = SMap.empty; - c_outstanding_requests_to_server = Lsp.IdSet.empty; - c_outstanding_diagnostics = SSet.empty; - c_recent_summaries = []; - } in - (* send the initial messages to the server *) - send_to_server new_env Persistent_connection_prot.Subscribe; - let make_open_message (textDocument: TextDocumentItem.t) : lsp_message = - NotificationMessage (DidOpenNotification { DidOpen.textDocument; }) in - let open_messages = env.d_ienv.i_open_files |> SMap.bindings - |> List.map ~f:(fun (_, {o_open_doc; _}) -> make_open_message o_open_doc) in - let open Hh_json in - let metadata = { Persistent_connection_prot. - start_wall_time = Unix.gettimeofday (); - start_server_status = Some (fst new_env.c_server_status); - start_watcher_status = snd new_env.c_server_status; - start_json_truncated = JSON_Object ["method", JSON_String "synthetic/open"]; - start_lsp_state = None; - start_lsp_state_reason = None; - error_info = None; - server_profiling = None; - client_duration = None; - extra_data = []; - server_logging_context = None; - } in - List.iter open_messages ~f:(send_lsp_to_server new_env metadata); - (* close the old UI and bring up the new *) - let new_state = show_connected new_env in - new_state - - (* Server_missing means the lock file is absent, because the server isn't running *) - | Error (CommandConnectSimple.Server_missing as reason) -> - let new_env = { env with d_autostart = false; d_server_status = None; } in - if env.d_autostart then - let start_result = CommandConnect.start_flow_server start_env in - match start_result with - | Ok () -> show_connecting reason new_env - | Error (msg, code) -> show_disconnected (Some code) (Some msg) new_env - else - show_disconnected None None new_env - - (* Server_socket_missing means the server is present but lacks its sock *) - (* file. There's a tiny race possibility that the server has created a *) - (* lock but not yet created a sock file. More likely is that the server *) - (* is an old version of the server which doesn't even create the right *) - (* sock file. We'll kill the server now so we can start a new one next. *) - (* And if it was in that race? bad luck... *) - | Error (CommandConnectSimple.Server_socket_missing as reason) -> - begin try - let tmp_dir = start_env.CommandConnect.tmp_dir in - let root = start_env.CommandConnect.root in - CommandMeanKill.mean_kill ~flowconfig_name ~tmp_dir root; - show_connecting reason { env with d_server_status = None; } - with CommandMeanKill.FailedToKill _ -> - let msg = "An old version of the Flow server is running. Please stop it." in - show_disconnected None (Some msg) { env with d_server_status = None; } - end - - (* Build_id_mismatch is because the server version was different from client *) - (* If we didn't ask the server to exit on mismatch, then we're stuck. *) - | Error (CommandConnectSimple.Build_id_mismatch as _reason) when not env.d_autostart -> - let msg = "Flow: the running server is the wrong version" in - show_disconnected None (Some msg) { env with d_server_status = None; } - - (* If we did ask the server to terminate upon version mismatch, then we'll *) - (* just keep trying to connect, and next time we'll start a new server. *) - (* and the server terminates immediately after telling us this - so we'll *) - (* just keep trying to connect, and the next time we'll start a new server. *) - | Error (CommandConnectSimple.Build_id_mismatch as reason) -> - show_connecting reason { env with d_server_status = None; } - - (* While the server is busy initializing, sometimes we get Server_busy.Fail_on_init *) - (* with a server-status telling us how far it is through init. And sometimes we get *) - (* just ServerStatus.Not_responding if the server was just too busy to give us a *) - (* status update. These are cases where the right version of the server is running *) - (* but it's not speaking to us just now. So we'll keep trying until it's ready. *) - | Error ((CommandConnectSimple.Server_busy (CommandConnectSimple.Fail_on_init st)) as reason) -> - show_connecting reason { env with d_server_status = Some st; } - - (* The following codes mean the right version of the server is running so *) - (* we'll retry. They provide no information about the d_server_status of *) - (* the server, so we'll leave it as it was before. *) - | Error ((CommandConnectSimple.Server_busy CommandConnectSimple.Not_responding) as reason) - | Error ((CommandConnectSimple.Server_busy CommandConnectSimple.Too_many_clients) as reason) -> - show_connecting reason env - -let close_conn (env: connected_env) : unit = - try Timeout.shutdown_connection env.c_conn.ic with _ -> (); - try Timeout.close_in_noerr env.c_conn.ic with _ -> () - - -(************************************************************************) -(** Tracking **) -(************************************************************************) -(* The goal of tracking is that, if a server goes down, then all errors *) -(* and dialogs and things it created should be taken down with it. *) -(* *) -(* "track_to_server" is called for client->lsp messages when they get *) -(* sent to the current server. *) -(* "track_from_server" is called for server->lsp messages which *) -(* immediately get passed on to the client. *) -(* "dismiss_tracks" is called when a server gets disconnected. *) -(* *) -(* EDITOR_OPEN_FILES - we keep the current contents of all editor open *) -(* files. Updated in response to client->lsp notifications *) -(* didOpen/Change/Save/Close. When a new server starts, we synthesize *) -(* didOpen messages to the new server. *) -(* OUTSTANDING_REQUESTS_TO_SERVER - for all client->lsp requests that *) -(* have been sent to the server. Added to this list when we *) -(* track_to_server(request); removed on track_from_server(response). *) -(* When a server dies, we synthesize RequestCancelled responses *) -(* ourselves since the server will no longer do that. *) -(* OUTSTANDING_REQUESTS_FROM_SERVER - for all server->lsp requests. We *) -(* generate a "wrapped-id" that encodes which server it came from, *) -(* and send immediately to the client. Added to this list when we *) -(* track_from_server(request), removed in track_to_server(response). *) -(* When a server dies, we emit CancelRequest notifications to the *) -(* client so it can dismiss dialogs or similar. When any response *) -(* comes back from the client, we ignore ones that are destined for *) -(* now-defunct servers, and only forward on the ones for the current *) -(* server. *) -(* OUTSTANDING_DIAGNOSTICS - for all server->lsp publishDiagnostics *) -(* notifications which are being displayed in the client. Added to *) -(* this list when we track_from_server(publishDiagnostics) a file *) -(* with non-empty error list; removed when we *) -(* track_from_server(publishDiagnostics) a file with empty error list.*) -(* When a server dies, we synthesize publishDiagnostics notifications *) -(* to the client so it can erase all diagnostics. *) -(* OUTSTANDING_PROGRESS - for all server->lsp progress notifications *) -(* which are being displayed in the client. Added to this list when *) -(* we track_from_server(progress) a non-empty progress; removed *) -(* when we track_from_server(progress) an empty progress. When a *) -(* server dies, we synthesize progress notifications to the client *) -(* so it can erase all outstanding progress messages. *) -(* OUTSTANDING_ACTION_REQUIRED - similar to outstanding_progress. *) - -type track_effect = { - changed_live_uri: string option; -} - -let track_to_server (state: state) (c: Lsp.lsp_message) : (state * track_effect) = - let changed_open_file = match (get_open_files state), c with - | _, NotificationMessage (DidOpenNotification params) -> - let o_open_doc = params.DidOpen.textDocument in - let uri = params.DidOpen.textDocument.TextDocumentItem.uri in - Some (uri, Some {o_open_doc; o_ast=None; o_live_diagnostics=None;}) - - | _, NotificationMessage (DidCloseNotification params) -> - let uri = params.DidClose.textDocument.TextDocumentIdentifier.uri in - Some (uri, None) - - | Some open_files, NotificationMessage (DidChangeNotification params) -> - let uri = params.DidChange.textDocument.VersionedTextDocumentIdentifier.uri in - let {o_open_doc; _} = SMap.find uri open_files in - let text = o_open_doc.TextDocumentItem.text in - let text = Lsp_helpers.apply_changes_unsafe text params.DidChange.contentChanges in - let o_open_doc = { Lsp.TextDocumentItem. - uri; - languageId = o_open_doc.TextDocumentItem.languageId; - version = params.DidChange.textDocument.VersionedTextDocumentIdentifier.version; - text; - } in - Some (uri, Some {o_open_doc; o_ast=None; o_live_diagnostics=None;}) - - | _, _ -> - None - in - (* update ienv.i_open_files... *) - let state, changed_live_uri = match changed_open_file with - | Some (uri, open_file_info) -> update_open_file uri open_file_info state, Some uri - | None -> state, None - in - (* update cenv.c_diagnostics... we don't need to send updated squiggle locations *) - (* right now ourselves, since all editors take care of that; but if ever we *) - (* re-send the server's existing diagnostics for this file then that should take *) - (* into account any user edits since then. This isn't perfect - e.g. if the user *) - (* modifies a file we'll update squiggles, but if the user subsquently closes the *) - (* file unsaved and then re-opens it then we'll be left with wrong squiggles. *) - (* It also doesn't compensate if the flow server starts a typecheck, then receives *) - (* a DidChange, then sends error spans from as it was at the start of the typecheck. *) - (* Still, at least we're doing better on the common case -- where the server has sent *) - (* diagnostics, then the user types, then we re-send live syntax errors. *) - let state = match state, c with - | Connected cenv, NotificationMessage (DidChangeNotification params) -> begin - let uri = params.DidChange.textDocument.VersionedTextDocumentIdentifier.uri in - match SMap.find_opt uri cenv.c_diagnostics with - | Some diagnostics_for_uri -> - let diagnostics_for_uri = Lsp_helpers.update_diagnostics_due_to_change - diagnostics_for_uri params in - let c_diagnostics = SMap.add uri diagnostics_for_uri cenv.c_diagnostics in - Connected {cenv with c_diagnostics; } - | _ -> state - end - | _ -> state - in - (* update cenv.c_outstanding_requests*... *) - let state = match state, c with - (* client->server requests *) - | Connected env, RequestMessage (id, _) -> - Connected { env with c_outstanding_requests_to_server = - IdSet.add id env.c_outstanding_requests_to_server; - } - (* client->server responses *) - | Connected env, ResponseMessage (id, _) -> - let wrapped = decode_wrapped id in - let c_ienv = { env.c_ienv with i_outstanding_requests_from_server = - WrappedMap.remove wrapped env.c_ienv.i_outstanding_requests_from_server; - } in - Connected { env with c_ienv; } - | _ -> state - in - state, { changed_live_uri; } - - -let track_from_server (state: state) (c: Lsp.lsp_message) : state = - match state, c with - (* server->client response *) - | Connected env, ResponseMessage (id, _) -> - Connected { env with c_outstanding_requests_to_server = - IdSet.remove id env.c_outstanding_requests_to_server; - } - (* server->client request *) - | Connected env, RequestMessage (id, params) -> - let wrapped = { server_id = env.c_ienv.i_server_id; message_id = id; } in - let c_ienv = { env.c_ienv with i_outstanding_requests_from_server = - WrappedMap.add wrapped params env.c_ienv.i_outstanding_requests_from_server; - } in - Connected { env with c_ienv; } - (* server->client publishDiagnostics: save up all URIs with non-empty diagnostics *) - | Connected env, NotificationMessage (PublishDiagnosticsNotification params) -> - let uri = params.PublishDiagnostics.uri in - let published = params.PublishDiagnostics.diagnostics in - let c_outstanding_diagnostics = match published with - | [] -> SSet.remove uri env.c_outstanding_diagnostics - | _ -> SSet.add uri env.c_outstanding_diagnostics in - Connected { env with c_outstanding_diagnostics } - | _, _ -> state - -let dismiss_tracks (state: state) : state = - let decline_request_to_server (id: lsp_id) : unit = - let e = Lsp_fmt.error_of_exn (Error.RequestCancelled "Connection to server has been lost") in - let stack = Printexc.get_callstack 100 |> Printexc.raw_backtrace_to_string in - let json = Lsp_fmt.print_lsp_response id (ErrorResult (e, stack)) in - to_stdout json - in - let cancel_request_from_server - (server_id: int) - (wrapped: wrapped_id) - (_request: lsp_request): unit = - if server_id = wrapped.server_id then - let id = encode_wrapped wrapped in - let notification = CancelRequestNotification { CancelRequest.id; } in - let json = Lsp_fmt.print_lsp_notification notification in - to_stdout json - else - () - in - let clear_diagnostics (uri: string) : unit = - let notification = PublishDiagnosticsNotification { - PublishDiagnostics.uri; diagnostics = []; } in - let json = Lsp_fmt.print_lsp_notification notification in - to_stdout json - in - match state with - | Connected env -> - WrappedMap.iter (cancel_request_from_server env.c_ienv.i_server_id) - env.c_ienv.i_outstanding_requests_from_server; - IdSet.iter decline_request_to_server env.c_outstanding_requests_to_server; - SSet.iter clear_diagnostics env.c_outstanding_diagnostics; - Connected { env with - c_outstanding_requests_to_server = IdSet.empty; - c_outstanding_diagnostics = SSet.empty; - } - | _ -> state - - -let lsp_DocumentItem_to_flow (open_doc: Lsp.TextDocumentItem.t) : File_input.t = - let uri = open_doc.TextDocumentItem.uri in - let fn = Lsp_helpers.lsp_uri_to_path uri in - let fn = Option.value (Sys_utils.realpath fn) ~default:fn in - File_input.FileContent (Some fn, open_doc.TextDocumentItem.text) - - -let lsp_DocumentIdentifier_to_flow - (textDocument: Lsp.TextDocumentIdentifier.t) - ~(state: state) - : File_input.t = - let uri = textDocument.TextDocumentIdentifier.uri in - let editor_open_files = get_open_files state in - match Option.bind editor_open_files (SMap.get uri) with - | None -> - let fn = Lsp_helpers.lsp_uri_to_path uri in - let fn = Option.value (Sys_utils.realpath fn) ~default:fn in - File_input.FileName fn - | Some {o_open_doc; _} -> lsp_DocumentItem_to_flow o_open_doc - - -(******************************************************************************) -(* Diagnostics *) -(* These should really be handle inside the flow server so it sends out *) -(* LSP publishDiagnostics notifications and we track them in the normal way. *) -(* But while the flow server has to handle legacy clients as well as LSP *) -(* clients, we don't want to make the flow server code too complex, so we're *) -(* handling them here for now. *) -(******************************************************************************) - -let error_to_lsp - ~(severity: PublishDiagnostics.diagnosticSeverity option) - ~(default_uri: string) - (error: Errors.error) - : string * PublishDiagnostics.diagnostic = - let error = Errors.Lsp_output.lsp_of_error error in - let location = Flow_lsp_conversions.loc_to_lsp_with_default - error.Errors.Lsp_output.loc ~default_uri in - let uri = location.Lsp.Location.uri in - let related_to_lsp (loc, relatedMessage) = - let relatedLocation = Flow_lsp_conversions.loc_to_lsp_with_default loc ~default_uri in - { Lsp.PublishDiagnostics.relatedLocation; relatedMessage; } in - let relatedInformation = - List.map error.Errors.Lsp_output.relatedLocations ~f:related_to_lsp - in - uri, { Lsp.PublishDiagnostics. - range = location.Lsp.Location.range; - severity; - code = Lsp.PublishDiagnostics.StringCode error.Errors.Lsp_output.code; - source = Some "Flow"; - message = error.Errors.Lsp_output.message; - relatedInformation; - relatedLocations = relatedInformation; (* legacy fb extension *) + CommandSpec.name = "lsp"; + doc = "Acts as a server for the Language Server Protocol over stdin/stdout [experimental]"; + usage = + Printf.sprintf + "Usage: %s lsp\n\nRuns a server for the Language Server Protocol\n" + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> temp_dir_flag + |> shm_flags + |> lazy_flags + |> autostop_flag + |> from_flag); } - -(* parse_and_cache: either the uri is an open file for which we already - * have parse results (ast+diagnostics), so we can just return them; - * or it's an open file and we are expected to lazily compute the parse results - * and store them in the state; - * or it's an unopened file in which case we'll retrieve parse results but - * won't store them. *) -let parse_and_cache flowconfig_name (state: state) (uri: string) - : state * (Loc.t, Loc.t) Flow_ast.program * PublishDiagnostics.diagnostic list option = - (* part of parsing is producing parse errors, if so desired *) - let liveSyntaxErrors = let open Initialize in match state with - | Connected cenv -> cenv.c_ienv.i_initialize_params.initializationOptions.liveSyntaxErrors - | Disconnected denv -> denv.d_ienv.i_initialize_params.initializationOptions.liveSyntaxErrors - | _ -> false in - - let error_to_diagnostic (loc, parse_error) = - let message = Errors.Friendly.message_of_string (Parse_error.PP.error parse_error) in - let error = Errors.mk_error ~kind:Errors.ParseError (loc |> ALoc.of_loc) message in - let _, diagnostic = error_to_lsp - ~default_uri:uri ~severity:(Some PublishDiagnostics.Error) error in - diagnostic in - - (* The way flow compilation works in the flow server is that parser options *) - (* are permissive to allow all constructs, so that parsing works well; if *) - (* the user choses not to enable features through the user's .flowconfig *) - (* then use of impermissable constructs will be reported at typecheck time *) - (* (not as parse errors). We'll do the same here, with permissive parsing *) - (* and only reporting parse errors. *) - let get_parse_options () = - let root = get_root state in - let use_strict = Option.value_map root ~default:false ~f:(fun root -> - Server_files_js.config_file flowconfig_name root - |> FlowConfig.get |> FlowConfig.modules_are_use_strict) in - Some Parser_env.({ - esproposal_class_instance_fields = true; - esproposal_class_static_fields = true; - esproposal_decorators = true; - esproposal_export_star_as = true; - esproposal_optional_chaining = true; - esproposal_nullish_coalescing = true; - types = true; - use_strict; - }) in - - let parse file = - let (program, errors) = try - let content = File_input.content_of_file_input_unsafe file in - let filename_opt = File_input.path_of_file_input file in - let filekey = Option.map filename_opt ~f:(fun fn -> File_key.SourceFile fn) in - let parse_options = get_parse_options () in - Parser_flow.program_file ~fail:false ~parse_options ~token_sink:None content filekey - with _ -> - (Loc.none,[],[]), [] - in - program, if liveSyntaxErrors then Some (List.map errors ~f:error_to_diagnostic) else None in - - let open_files = get_open_files state in - let existing_open_file_info = Option.bind open_files (SMap.get uri) in - match existing_open_file_info with - | Some {o_ast=Some o_ast; o_live_diagnostics; _} -> - state, o_ast, o_live_diagnostics - | Some {o_open_doc; _} -> - let file = lsp_DocumentItem_to_flow o_open_doc in - let o_ast, o_live_diagnostics = parse file in - let open_file_info = Some {o_open_doc; o_ast=Some o_ast; o_live_diagnostics;} in - let state = update_open_file uri open_file_info state in - state, o_ast, o_live_diagnostics - | None -> - let fn = Lsp_helpers.lsp_uri_to_path uri in - let fn = Option.value (Sys_utils.realpath fn) ~default:fn in - let file = File_input.FileName fn in - let open_ast, open_diagnostics = parse file in - state, open_ast, open_diagnostics - - -(* print_diagnostics: just pushes the set of diagnostics for this uri to the client - * taking into account whether there are superceding local parse errors as well. - * We actually only send the first 200 errors per file to the client, since - * more than that wouldn't add value to the user, and makes clients sluggish. *) -let print_diagnostics - (uri: string) - (diagnostics: PublishDiagnostics.diagnostic list) - (state: state) - : state = - let open PublishDiagnostics in - - let prev_server_reported, prev_open_reported = match state with - | Connected cenv -> - SSet.mem uri cenv.c_outstanding_diagnostics, - SSet.mem uri cenv.c_ienv.i_outstanding_diagnostics - | Disconnected denv -> - false, SSet.mem uri denv.d_ienv.i_outstanding_diagnostics - | _ -> false, false in - - (* First we'll look at server tracks, update then appropriately. *) - (* This is to maintain the list of all URIs for which the server has sent *) - (* diagnostics, so all those URIs can be cleared should the server disconnect. *) - let msg = NotificationMessage - (PublishDiagnosticsNotification { PublishDiagnostics.uri; diagnostics; }) in - let state = track_from_server state msg in - - (* Next look at open-file tracks, update them appropriately. *) - let open_file_info = Option.bind (get_open_files state) (SMap.get uri) in - let state, use_live, o_live_diagnostics = match open_file_info with - | None - | Some {o_live_diagnostics=None; _} -> - state, false, [] - | Some {o_live_diagnostics=Some o_live_diagnostics; _} -> - let update_ienv ienv = - let i_outstanding_diagnostics = match o_live_diagnostics with - | [] -> SSet.remove uri ienv.i_outstanding_diagnostics - | _ -> SSet.add uri ienv.i_outstanding_diagnostics in - { ienv with i_outstanding_diagnostics; } in - let state = match state with - | Connected cenv -> Connected {cenv with c_ienv = update_ienv cenv.c_ienv} - | Disconnected denv -> Disconnected {denv with d_ienv=update_ienv denv.d_ienv} - | _ -> state in - state, true, o_live_diagnostics in - - (* If using live-diagnostics, then strip out parse errors from the server diagnostics *) - (* and instead include the local ones. *) - let diagnostics = if use_live then - let parse_code = Errors.string_of_kind Errors.ParseError in - let diagnostics = List.filter diagnostics ~f:(fun d -> d.code <> StringCode (parse_code)) in - o_live_diagnostics @ diagnostics - else - diagnostics in - - (* Send only the first 'cap' diagnostics per file to the client *) - let cap = 200 in - let is_below_cap = (List.nth diagnostics cap) = None in - let diagnostics = if is_below_cap then - (* avoid O(nlogn) sort in this case *) - diagnostics - else begin - let cmp d1 d2 = Lsp_helpers.pos_compare d1.range.start d2.range.start in - let diagnostics = List.sort cmp diagnostics in - let (retain, discard) = List.split_n diagnostics cap in - match discard with - | [] -> retain - | discard -> - let discard_count = List.length discard in - let message = Printf.sprintf "[Only showing %i/%i diagnostics]" cap (cap + discard_count) in - let diagnostic = { PublishDiagnostics. - (* the following range displays fine in all editors, regardless of contents *) - range = {start={line=0; character=0;}; end_={line=0; character=0;}}; - severity = Some PublishDiagnostics.Information; - code = NoCode; - source = Some "Flow"; - message; - relatedInformation = []; - relatedLocations = []; - } in - diagnostic :: retain - end in - - (* Avoid sending the message if it was empty before and is empty now. *) - (* This isn't needed for correct client behavior, but it makes the transcripts *) - (* easier to write unit-tests for! *) - let msg = NotificationMessage - (PublishDiagnosticsNotification { PublishDiagnostics.uri; diagnostics; }) in - let new_reported = match diagnostics with [] -> false | _ -> true in - if prev_open_reported || prev_server_reported || new_reported then - to_stdout (Lsp_fmt.print_lsp msg); - state - -let do_additional_diagnostics - (cenv: connected_env) - (diagnostics: PublishDiagnostics.diagnostic list SMap.t) - : state = - (* Merge the additional diagnostics into cenv *) - let uris = SMap.bindings diagnostics |> List.map ~f:fst |> SSet.of_list in - let combine _uri existing additions = Some (existing @ additions) in - let c_diagnostics = SMap.union ~combine cenv.c_diagnostics diagnostics in - let state = Connected { cenv with c_diagnostics; } in - - (* Send publishDiagnostics for all files touched by the additions. *) - let to_send = SMap.filter (fun uri _ -> SSet.mem uri uris) c_diagnostics in - let state = SMap.fold print_diagnostics to_send state - in - state - -let do_replacement_diagnostics - (cenv: connected_env) - (c_diagnostics: PublishDiagnostics.diagnostic list SMap.t) - : state = - let state = Connected { cenv with c_diagnostics; } in - - (* Send publishDiagnostics for all files that no longer have diagnostics *) - let old_uris = SMap.bindings cenv.c_diagnostics |> List.map ~f:fst |> SSet.of_list in - let new_uris = SMap.bindings c_diagnostics |> List.map ~f:fst |> SSet.of_list in - let now_empty_uris = SSet.diff old_uris new_uris in - let print_empty uri state = print_diagnostics uri [] state in - let state = SSet.fold print_empty now_empty_uris state in - - (* Send publishDiagnostics for all files that have diagnostics *) - let state = SMap.fold print_diagnostics c_diagnostics state - in - state - -let do_live_diagnostics flowconfig_name (state: state) (uri: string): state = - (* reparse the file and write it into the state's editor_open_files as needed *) - let state, _, _ = parse_and_cache flowconfig_name state uri in - (* republish the diagnostics for this file based on a mix of server-generated ones *) - (* if present, and client-generated ones if the file is open *) - let server_diagnostics = match state with - | Connected cenv -> Option.value (SMap.get uri cenv.c_diagnostics) ~default:[] - | _ -> [] in - let state = print_diagnostics uri server_diagnostics state in - state - - -let show_recheck_progress (cenv: connected_env) : state = - let type_, message, shortMessage, progress, total = - match cenv.c_is_rechecking, cenv.c_server_status, cenv.c_lazy_stats with - | true, (server_status, _), _ when not (ServerStatus.is_free server_status) -> - let shortMessage, progress, total = ServerStatus.get_progress server_status in - let message = "Flow: " ^ (ServerStatus.string_of_status ~use_emoji:true server_status) in - MessageType.WarningMessage, message, shortMessage, progress, total - | true, _, _ -> - MessageType.WarningMessage, "Flow: Server is rechecking...", None, None, None - | false, _, Some {ServerProt.Response.lazy_mode=Some mode; checked_files; total_files} - when checked_files < total_files -> - let message = Printf.sprintf - "Flow: done recheck. (%s lazy mode let it check only %d/%d files [[more...](%s)])" - Options.(match mode with LAZY_MODE_FILESYSTEM -> "fs" | LAZY_MODE_IDE -> "ide") - checked_files total_files "https://flow.org/en/docs/lang/lazy-modes/" in - MessageType.InfoMessage, message, None, None, None - | false, _, _ -> - MessageType.InfoMessage, "Flow: done recheck", None, None, None - in - Connected { cenv with - c_ienv = show_status~type_ ~message ~shortMessage ~progress ~total cenv.c_ienv - } - -let do_documentSymbol flowconfig_name (state: state) (id: lsp_id) - (params: DocumentSymbol.params): state = - let uri = params.DocumentSymbol.textDocument.TextDocumentIdentifier.uri in - let state, ast, _ = parse_and_cache flowconfig_name state uri in - let result = Flow_lsp_conversions.flow_ast_to_lsp_symbols ~uri ast in - let json = Lsp_fmt.print_lsp (ResponseMessage (id, DocumentSymbolResult result)) in - to_stdout json; - state - - - - -module RagePrint = struct - let addline (b: Buffer.t) (prefix: string) (s: string) : unit = - Buffer.add_string b prefix; - Buffer.add_string b s; - Buffer.add_string b "\n"; - () - - let string_of_lazy_stats (lazy_stats: ServerProt.Response.lazy_stats) : string = - let open ServerProt in - Printf.sprintf "lazy_mode=%s, checked_files=%d, total_files=%d" - (Option.value_map lazy_stats.Response.lazy_mode ~default:"" ~f:Options.lazy_mode_to_string) - lazy_stats.Response.checked_files lazy_stats.Response.total_files - - let string_of_connect_params (p: connect_params) : string = - let open CommandUtils in - Printf.sprintf ( - "from=%s, retries=%d, retry_if_init=%B, no_auto_start=%B, autostop=%B, \ - ignore_version=%B quiet=%B, temp_dir=%s, \ - timeout=%s, lazy_mode=%s") - p.from p.retries p.retry_if_init p.no_auto_start p.autostop - p.ignore_version p.quiet (Option.value ~default:"None" p.temp_dir) - (Option.value_map p.timeout ~default:"None" ~f:string_of_int) - (Option.value_map p.lazy_mode ~default:"None" ~f:Options.lazy_mode_to_string) - - let string_of_open_file {o_open_doc; o_ast; o_live_diagnostics} : string = - Printf.sprintf "(uri=%s version=%d text=[%d bytes] ast=[%s] diagnostics=[%s])" - o_open_doc.TextDocumentItem.uri - o_open_doc.TextDocumentItem.version - (String.length o_open_doc.TextDocumentItem.text) - (Option.value_map o_ast ~default:"absent" ~f:(fun _ -> "present")) - (Option.value_map o_live_diagnostics ~default:"absent" - ~f:(fun d -> List.length d |> string_of_int)) - - let string_of_open_files (files: open_file_info SMap.t) : string = - SMap.bindings files - |> List.map ~f:(fun (_,ofi) -> string_of_open_file ofi) - |> String.concat "," - - let string_of_show_status (show_status: show_status_t) : string = - match show_status with - | Never_shown -> "Never_shown" - | Shown (id_opt, params) -> Printf.sprintf "Shown id=%s params=%s" - (Option.value_map id_opt ~default:"None" ~f:Lsp_fmt.id_to_string) - (print_showStatus params |> Hh_json.json_to_string) - - let add_ienv (b: Buffer.t) (ienv: initialized_env) : unit = - addline b "i_connect_params=" (ienv.i_connect_params |> string_of_connect_params); - addline b "i_root=" (ienv.i_root |> Path.to_string); - addline b "i_version=" (ienv.i_version |> Option.value ~default:"None"); - addline b "i_server_id=" (ienv.i_server_id |> string_of_int); - addline b "i_can_autostart_after_version_mismatch=" (ienv.i_can_autostart_after_version_mismatch - |> string_of_bool); - addline b "i_outstanding_local_handlers=" (ienv.i_outstanding_local_handlers - |> IdMap.bindings |> List.map ~f:(fun (id,_handler) -> Lsp_fmt.id_to_string id) - |> String.concat ","); - addline b "i_outstanding_local_requests=" (ienv.i_outstanding_local_requests - |> IdMap.bindings |> List.map ~f:(fun (id,req) -> Printf.sprintf "%s:%s" - (Lsp_fmt.id_to_string id) (Lsp_fmt.request_name_to_string req)) - |> String.concat ","); - addline b "i_outstanding_requests_from_server=" (ienv.i_outstanding_requests_from_server - |> WrappedMap.bindings - |> List.map ~f:(fun (id,req) -> Printf.sprintf "#%d:%s:%s" - id.server_id (Lsp_fmt.id_to_string id.message_id) (Lsp_fmt.request_name_to_string req)) - |> String.concat ","); - addline b "i_isConnected=" (ienv.i_isConnected |> string_of_bool); - addline b "i_status=" (ienv.i_status |> string_of_show_status); - addline b "i_open_files=" (ienv.i_open_files |> string_of_open_files); - addline b "i_outstanding_diagnostics=" (ienv.i_outstanding_diagnostics - |> SSet.elements |> String.concat ", "); - () - - let add_denv (b: Buffer.t) (denv: disconnected_env) : unit = - let server_status, watcher_status = match denv.d_server_status with - | None -> None, None - | Some (s, w) -> Some s, Some w in - add_ienv b denv.d_ienv; - addline b "d_autostart=" (denv.d_autostart |> string_of_bool); - addline b "d_server_status:server=" (server_status - |> Option.value_map ~default:"None" ~f:ServerStatus.string_of_status); - addline b "d_server_status:watcher=" (watcher_status - |> Option.value_map ~default:"None" ~f:FileWatcherStatus.string_of_status); - () - - let add_cenv (b: Buffer.t) (cenv: connected_env) : unit = - let server_status, watcher_status = cenv.c_server_status in - add_ienv b cenv.c_ienv; - addline b "c_server_status:server=" (server_status |> ServerStatus.string_of_status); - addline b "c_server_status:watcher=" (watcher_status - |> Option.value_map ~default:"None" ~f:FileWatcherStatus.string_of_status); - addline b "c_about_to_exit_code=" (cenv.c_about_to_exit_code - |> Option.value_map ~default:"None" ~f:FlowExitStatus.to_string); - addline b "c_is_rechecking=" (cenv.c_is_rechecking |> string_of_bool); - addline b "c_diagnostics=" (cenv.c_diagnostics - |> SMap.bindings |> List.map ~f:(fun (uri, d) -> Printf.sprintf "%s:%d" uri (List.length d)) - |> String.concat ", "); - addline b "c_lazy_stats=" (cenv.c_lazy_stats - |> Option.value_map ~default:"None" ~f:string_of_lazy_stats); - addline b "c_outstanding_requests_to_server=" (cenv.c_outstanding_requests_to_server - |> IdSet.elements |> List.map ~f:Lsp_fmt.id_to_string |> String.concat ","); - addline b "c_outstanding_diagnostics=" (cenv.c_outstanding_diagnostics - |> SSet.elements |> String.concat ", "); - () - - let string_of_state (state: state) : string = - let b = Buffer.create 10000 in - begin match state with - | Pre_init p -> Buffer.add_string b (Printf.sprintf "Pre_init:\n%s\n" - (string_of_connect_params p)) - | Post_shutdown -> Buffer.add_string b "Post_shutdown:\n[]\n" - | Disconnected denv -> Buffer.add_string b "Disconnected:\n"; add_denv b denv; - | Connected cenv -> Buffer.add_string b "Connected:\n"; add_cenv b cenv; - end; - Buffer.contents b -end - -let do_rage flowconfig_name (state: state) : Rage.result = - let open Rage in - - (* Some helpers to add various types of data to the rage output... *) - let add_file (items: rageItem list) (file: Path.t) : rageItem list = - if Path.file_exists file then - let data = Path.cat file in (* cat even up to 1gig is workable even if ugly *) - let len = String.length data in - let max_len = 10 * 1024 * 1024 in (* maximum 10mb *) - let data = if len <= max_len then data else String.sub data (len - max_len) max_len in - { title = Some (Path.to_string file); data; } :: items - else - items in - let add_string (items: rageItem list) (data: string) : rageItem list = - { title = None; data; } :: items in - let add_pid (items: rageItem list) ((pid, reason): (int * string)) : rageItem list = - if String_utils.string_starts_with reason "slave" then - items - else - let pid = string_of_int pid in - (* some systems have "pstack", some have "gstack", some have neither... *) - let stack = try Sys_utils.exec_read_lines ~reverse:true ("pstack " ^ pid) - with _ -> begin - try Sys_utils.exec_read_lines ~reverse:true ("gstack " ^ pid) - with e -> ["unable to pstack - " ^ (Printexc.to_string e)] - end in - let stack = String.concat "\n" stack in - add_string items (Printf.sprintf "PSTACK %s (%s) - %s\n\n" pid reason stack) - in - - let items: rageItem list = [] in - - (* LOGFILES. *) - (* Where are the logs? Specified explicitly by the user with --log-file and *) - (* --monitor-log-file when they launched the server. Failing that, the *) - (* values in environment variables FLOW_LOG_FILE and FLOW_MONITOR_LOG_FILE *) - (* upon launch. Failing that, CommandUtils.server_log_file will look in the *) - (* flowconfig for a "log.file" option. Failing that it will synthesize one *) - (* from `Server_files_js.file_of_root "log"` in the tmp-directory. And *) - (* CommandUtils.monitor_log_file is similar except it bypasses flowconfig. *) - (* As for tmp dir, that's --temp_dir, failing that FLOW_TEMP_DIR, failing *) - (* that temp_dir in flowconfig, failing that Sys_utils.temp_dir_name /flow. *) - (* WOW! *) - (* Notionally the only authoritative way to find logs is to connect to a *) - (* running monitor and ask it. But we're a 'rage' command whose whole point *) - (* is to give good answers even when things are not working, e.g. when the *) - (* monitor is down. And in any case, by design, a flow client can only ever *) - (* interact with a server if the client was launched with the same flags *) - (* (minimum tmp_dir and flowconfig) as the server was launched with. *) - (* Therefore there's no need to ask the monitor. We'll just work with what *) - (* log files we'd write to were we ourselves asked to start a server. *) - let ienv = match state with - | Pre_init _ -> None - | Disconnected denv -> Some denv.d_ienv - | Connected cenv -> Some cenv.c_ienv - | Post_shutdown -> None in - let items = match ienv with - | None -> items - | Some ienv -> - let start_env = CommandUtils.make_env flowconfig_name ienv.i_connect_params ienv.i_root in - let tmp_dir = start_env.CommandConnect.tmp_dir in - let server_log_file = Path.make start_env.CommandConnect.log_file in - (* monitor log file isn't retained anywhere. But since flow lsp doesn't *) - (* take a --monitor-log-file option, then we know where it must be. *) - let monitor_log_file = - CommandUtils.monitor_log_file flowconfig_name tmp_dir start_env.CommandConnect.root in - let items = add_file items server_log_file in - let items = add_file items monitor_log_file in - (* Let's pick up the old files in case user reported bug after a crash *) - let items = add_file items (Path.concat server_log_file ".old") in - let items = add_file items (Path.concat monitor_log_file ".old") in - (* And the pids file *) - let items = try - let pids = PidLog.get_pids (Server_files_js.pids_file ~flowconfig_name ~tmp_dir ienv.i_root) - in - Core_list.fold pids ~init:items ~f:add_pid - with e -> - let message = Printexc.to_string e in - let stack = Printexc.get_backtrace () in - add_string items (Printf.sprintf "Failed to get PIDs: %s - %s" message stack) - in - items - in - - (* CLIENT. This includes the client's perception of the server state. *) - let items = add_string items ("LSP adapter state: " ^ (RagePrint.string_of_state state) ^ "\n") in - - (* DONE! *) - items - - -let parse_json (state: state) (json: Jsonrpc.message) : lsp_message = - (* to know how to parse a response, we must provide the corresponding request *) - let outstanding (id: lsp_id) : lsp_request = - let ienv = match state with - | Connected env -> env.c_ienv - | Disconnected env -> env.d_ienv - | _ -> failwith "Didn't expect an LSP response yet" in - try - IdMap.find id ienv.i_outstanding_local_requests - with Not_found -> - WrappedMap.find (decode_wrapped id) ienv.i_outstanding_requests_from_server - in - Lsp_fmt.parse_lsp json.Jsonrpc.json outstanding - -let with_timer (f: unit -> 'a) : (float * 'a) = - let start = Unix.gettimeofday () in - let ret = f () in - let duration = Unix.gettimeofday () -. start in - duration, ret - - -(************************************************************************) -(** Main loop **) -(************************************************************************) - -type log_needed = LogNeeded of Persistent_connection_prot.metadata | LogDeferred | LogNotNeeded - -let rec main +let main base_flags - (temp_dir: string option) - (shm_flags: CommandUtils.shared_mem_params) - (lazy_mode: Options.lazy_mode option) - (autostop: bool) - (from: string option) - ((): unit) - : unit = - let connect_params = { - from = Option.value from ~default:""; - retries = 0; - retry_if_init = false; - timeout = None; - no_auto_start = false; - temp_dir; - autostop; - lazy_mode; - shm_flags; - ignore_version = false; - quiet = false; - } in - let client = Jsonrpc.make_queue () in - let state = (Pre_init connect_params) in - main_loop base_flags.Base_flags.flowconfig_name client state - -and main_loop flowconfig_name (client: Jsonrpc.queue) (state: state) : unit = - let event = try Ok (get_next_event state client (parse_json state)) - with e -> Error (state, e, Utils.Callstack (Printexc.get_backtrace ())) in - let result = match event with - | Error (state, e, stack) -> Error (state, e, stack, None) - | Ok event -> - let (client_duration, result) = with_timer (fun () -> - try main_handle_unsafe flowconfig_name state event - with e -> Error (state, e, Utils.Callstack (Printexc.get_backtrace ()))) in - match result with - | Ok (state, logneeded) -> Ok (state, logneeded, client_duration) - | Error (state, e, stack) -> Error (state, e, stack, Some event) - in - let state = match result with - | Ok (state, LogNeeded metadata, client_duration) -> - let open Persistent_connection_prot in - let client_duration = if metadata.client_duration = None then Some client_duration - else metadata.client_duration in - let metadata = {metadata with client_duration} in - main_log_command state metadata; - state - | Ok (state, _, _) -> - state - | Error (state, e, stack, event) -> main_handle_error e stack state event - in - main_loop flowconfig_name client state - - -and main_handle_unsafe flowconfig_name (state: state) (event: event) - : (state * log_needed, state * exn * Utils.callstack) result = -begin - match state, event with - | Pre_init i_connect_params, - Client_message (RequestMessage (id, InitializeRequest i_initialize_params), metadata) -> - let i_root = Lsp_helpers.get_root i_initialize_params |> Path.make in - let d_ienv = { - i_initialize_params; - i_connect_params; - i_root; - i_version = get_current_version flowconfig_name i_root; - i_can_autostart_after_version_mismatch = true; - i_server_id = 0; - i_outstanding_local_requests = IdMap.empty; - i_outstanding_local_handlers = IdMap.empty; - i_outstanding_requests_from_server = WrappedMap.empty; - i_isConnected = false; - i_status = Never_shown; - i_open_files = SMap.empty; - i_outstanding_diagnostics = SSet.empty; - } in - (* If the version in .flowconfig is simply incompatible with our current *) - (* binary then it doesn't even make sense for us to start up. And future *) - (* attempts by the client to launch us will fail as well. Clients which *) - (* receive the following response are expected to shut down their LSP. *) - let required_version = get_current_version flowconfig_name i_root in - begin match CommandUtils.check_version required_version with - | Ok () -> () - | Error msg -> raise (Error.ServerErrorStart (msg, {Initialize.retry=false;})) - end; - let response = ResponseMessage (id, InitializeResult (do_initialize ())) in - let json = Lsp_fmt.print_lsp response in - to_stdout json; - let env = { - d_ienv; - d_autostart = true; - d_server_status = None; - } in - Ok (try_connect flowconfig_name env, LogNeeded metadata) - - | _, Client_message (RequestMessage (id, ShutdownRequest), _metadata) -> - begin match state with Connected env -> close_conn env | _ -> () end; - let response = ResponseMessage (id, ShutdownResult) in - let json = Lsp_fmt.print_lsp response in - to_stdout json; - Ok (Post_shutdown, LogNotNeeded) - - | _, Client_message (NotificationMessage ExitNotification, _metadata) -> - if state = Post_shutdown then lsp_exit_ok () else lsp_exit_bad () - - | Pre_init _, Client_message _ -> - raise (Error.ServerNotInitialized "Server not initialized") - - | _, Client_message ((ResponseMessage (id, result)) as c, metadata) -> - let ienv = match state with - | Connected env -> env.c_ienv - | Disconnected env -> env.d_ienv - | _ -> failwith "Didn't expect an LSP response yet" in - begin try - (* was it a response to a request issued by lspCommand? *) - let (handle, handle_error) = IdMap.find id ienv.i_outstanding_local_handlers in - let i_outstanding_local_handlers = IdMap.remove id ienv.i_outstanding_local_handlers in - let i_outstanding_local_requests = IdMap.remove id ienv.i_outstanding_local_requests in - let ienv = { ienv with i_outstanding_local_handlers; i_outstanding_local_requests; } in - let state = match state with - | Connected env -> Connected { env with c_ienv = ienv; } - | Disconnected env -> Disconnected { env with d_ienv = ienv; } - | _ -> failwith "Didn't expect an LSP response to be found yet" - in - match result, handle with - | ShowMessageRequestResult result, ShowMessageHandler handle -> - Ok (handle result state, LogNotNeeded) - | ShowStatusResult result, ShowStatusHandler handle -> Ok (handle result state, LogNotNeeded) - | ErrorResult (e, msg), _ -> Ok (handle_error (e, msg) state, LogNotNeeded) - | _ -> failwith (Printf.sprintf "Response %s has mistyped handler" (message_name_to_string c)) - with Not_found -> - (* if not, it must be a response to a request issued by the server *) - match state with - | Connected cenv -> - let (state, _) = track_to_server state c in - let wrapped = decode_wrapped id in (* only forward responses if they're to current server *) - if wrapped.server_id = cenv.c_ienv.i_server_id then send_lsp_to_server cenv metadata c; - Ok (state, LogNotNeeded) - | _ -> - failwith (Printf.sprintf "Response %s has missing handler" (message_name_to_string c)) - end - - | _, Client_message (RequestMessage (id, DocumentSymbolRequest params), metadata) -> - (* documentSymbols is handled in the client, not the server, since it's *) - (* purely syntax-driven and we'd like it to work even if the server is *) - (* busy or disconnected *) - let state = do_documentSymbol flowconfig_name state id params in - Ok (state, LogNeeded metadata) - - | Connected cenv, Client_message (c, metadata) -> - (* We'll track what's being sent to the server. This might involve some client *) - (* computation work, which we'll profile, and send it over in metadata. *) - (* Note: in the case where c is a cancel-notification for a request that *) - (* was already handled in lspCommand like ShutdownRequest or DocSymbolsRequest *) - (* we'll still forward it; that's okay since server already has to be *) - (* hardened against unrecognized ids in cancel requests. *) - let client_duration, state = with_timer (fun () -> - let state, {changed_live_uri} = track_to_server state c in - let state = Option.value_map changed_live_uri ~default:state - ~f:(do_live_diagnostics flowconfig_name state) in - state) in - let metadata = { metadata with - Persistent_connection_prot.client_duration=Some client_duration } in - send_lsp_to_server cenv metadata c; - Ok (state, LogDeferred) - - | _, Client_message (RequestMessage (id, RageRequest), metadata) -> - (* How to handle a rage request? If we're connected to a server, then the *) - (* above case will just have forwarded the message on to the server (and *) - (* we'll patch in our own extra information when the server replies). But *) - (* if there's no server then we have to reply here and now. *) - let result = do_rage flowconfig_name state in - let response = ResponseMessage (id, RageResult result) in - let json = Lsp_fmt.print_lsp response in - to_stdout json; - Ok (state, LogNeeded metadata) - - | _, Client_message ((NotificationMessage (DidOpenNotification _)) as c, metadata) - | _, Client_message ((NotificationMessage (DidChangeNotification _)) as c, metadata) - | _, Client_message ((NotificationMessage (DidSaveNotification _)) as c, metadata) - | _, Client_message ((NotificationMessage (DidCloseNotification _)) as c, metadata) -> - (* these are editor events that happen while disconnected. *) - let client_duration, state = with_timer (fun () -> - let state, {changed_live_uri} = track_to_server state c in - let state = Option.value_map changed_live_uri ~default:state - ~f:(do_live_diagnostics flowconfig_name state) in - state) in - let metadata = { metadata with - Persistent_connection_prot.client_duration=Some client_duration } in - Ok (state, LogNeeded metadata) - - | _, Client_message (NotificationMessage (CancelRequestNotification _), _metadata) -> - (* let's just not bother reporting any error in this case *) - Ok (state, LogNotNeeded) - - | Disconnected _, Client_message (c, _metadata) -> - let (state, _) = track_to_server state c in - let method_ = Lsp_fmt.denorm_message_to_string c in - let e = Error.RequestCancelled ("Server not connected; can't handle " ^ method_) in - let stack = Printexc.get_callstack 100 |> Printexc.raw_backtrace_to_string in - Error (state, e, Utils.Callstack stack) - - | Post_shutdown, Client_message (_, _metadata) -> - raise (Error.RequestCancelled "Server shutting down") - - | Connected cenv, Server_message (Persistent_connection_prot.ServerExit exit_code) -> - let state = Connected { cenv with c_about_to_exit_code = Some exit_code; } in - Ok (state, LogNotNeeded) - - | Connected cenv, Server_message (Persistent_connection_prot.LspFromServer (msg, metadata)) -> - let state, metadata = match msg with - | None -> state, metadata - | Some outgoing -> - let state = track_from_server state outgoing in - let outgoing, metadata = match outgoing with - | RequestMessage (id, request) -> - let wrapped = { server_id = cenv.c_ienv.i_server_id; message_id = id; } in - RequestMessage (encode_wrapped wrapped, request), metadata - | ResponseMessage (id, RageResult items) -> - (* we'll zero out the "client_duration", which at the moment represents client-side *) - (* work we did before sending out the request. By zeroing it out now, it'll get *) - (* filled out with the client-side work that gets done right here and now. *) - let metadata = { metadata with Persistent_connection_prot.client_duration = None} in - ResponseMessage (id, RageResult (items @ (do_rage flowconfig_name state))), metadata - | _ -> outgoing, metadata - in - to_stdout (Lsp_fmt.print_lsp outgoing); - state, metadata - in - Ok (state, LogNeeded metadata) - - | Connected cenv, Server_message (Persistent_connection_prot.Errors {errors; warnings}) -> - (* A note about the errors reported by this server message: *) - (* While a recheck is in progress, between StartRecheck and EndRecheck, *) - (* the server will periodically send errors+warnings. These are additive *) - (* to the errors which have previously been reported. Once the recheck *) - (* has finished then the server will send a new exhaustive set of errors. *) - (* At this opportunity we should erase all errors not in this set. *) - (* This differs considerably from the semantics of LSP publishDiagnostics *) - (* which says "whenever you send publishDiagnostics for a file, that *) - (* now contains the complete truth for that file." *) - - (* I hope that flow won't produce errors with an empty path. But such errors are *) - (* fatal to Nuclide, so if it does, then we'll at least use a fall-back path. *) - let default_uri = cenv.c_ienv.i_root |> Path.to_string |> File_url.create in - (* 'all' is an SMap from uri to diagnostic list, and 'add' appends the error within the map *) - let add severity error all = - let uri, diagnostic = error_to_lsp ~severity ~default_uri error in - SMap.add ~combine:List.append uri [diagnostic] all in - (* First construct an SMap from uri to diagnostic list, which gathers together *) - (* all the errors and warnings per uri *) - let all = Errors.ErrorSet.fold (add (Some PublishDiagnostics.Error)) errors SMap.empty in - let all = Errors.ErrorSet.fold (add (Some PublishDiagnostics.Warning)) warnings all - in - if cenv.c_is_rechecking then - Ok (do_additional_diagnostics cenv all, LogNotNeeded) - else - Ok (do_replacement_diagnostics cenv all, LogNotNeeded) - - | Connected cenv, Server_message Persistent_connection_prot.StartRecheck -> - let state = show_recheck_progress { cenv with - c_is_rechecking = true; - c_lazy_stats = None; - } in - Ok (state, LogNotNeeded) - - | Connected cenv, Server_message Persistent_connection_prot.EndRecheck lazy_stats -> - let state = show_recheck_progress { cenv with - c_is_rechecking = false; - c_lazy_stats = Some lazy_stats; - } in - Ok (state, LogNotNeeded) - - | Connected cenv, Server_message (Persistent_connection_prot.Please_hold status) -> - let (server_status, watcher_status) = status in - let c_server_status = (server_status, Some watcher_status) in - (* We keep a log of typecheck summaries over the past 2mins. *) - let c_recent_summaries = cenv.c_recent_summaries in - let new_time = Unix.gettimeofday () in - let summary = ServerStatus.get_summary server_status in - let c_recent_summaries = Option.value_map summary ~default:c_recent_summaries ~f:(fun summary -> - (new_time, summary) :: cenv.c_recent_summaries - |> List.filter ~f:(fun (t,_) -> t >= new_time -. 120.0)) - in - let state = show_recheck_progress { cenv with c_server_status; c_recent_summaries; } in - Ok (state, LogNotNeeded) - - | _, Server_message _ -> - failwith (Printf.sprintf "In state %s, unexpected event %s" - (string_of_state state) (denorm_string_of_event event)) - - | Disconnected env, Tick -> - let state = try_connect flowconfig_name env in - Ok (state, LogNotNeeded) - - | _, Tick -> - Lwt.async EventLoggerLwt.flush; - Ok (state, LogNotNeeded) -end - - -and main_log_command - (state: state) - (metadata: Persistent_connection_prot.metadata) - : unit = - let open Persistent_connection_prot in - let client_context = FlowEventLogger.get_context () in - let request = metadata.start_json_truncated |> Hh_json.json_to_string in - let wall_start = metadata.start_wall_time in - let server_profiling = metadata.server_profiling in - let client_duration = metadata.client_duration in - let extra_data = metadata.extra_data in - let persistent_context = Some { FlowEventLogger. - start_lsp_state = metadata.start_lsp_state; - start_lsp_state_reason = metadata.start_lsp_state_reason; - start_server_status = Option.map metadata.start_server_status - ~f:(ServerStatus.string_of_status ~terse:true); - start_watcher_status = Option.map metadata.start_watcher_status - ~f:(FileWatcherStatus.string_of_status); - } in - let server_logging_context = metadata.server_logging_context in - (* gather any recent typechecks that finished after the request had arrived *) - let delays = match state with - | Connected cenv -> Core_list.filter_map cenv.c_recent_summaries ~f:(fun (t,s) -> - if t > wall_start then Some s else None) - | _ -> [] in - let root = Option.value ~default:Path.dummy_path (get_root state) in - let persistent_delay = if delays = [] then None - else Some (ServerStatus.log_of_summaries ~root delays) in - - match metadata.error_info with - | None -> FlowEventLogger.persistent_command_success - ~server_logging_context ~request ~extra_data - ~client_context ~persistent_context ~persistent_delay - ~server_profiling ~client_duration ~wall_start ~error:None - | Some (ExpectedError, msg, stack) -> FlowEventLogger.persistent_command_success - ~server_logging_context ~request ~extra_data - ~client_context ~persistent_context ~persistent_delay - ~server_profiling ~client_duration ~wall_start ~error:(Some (msg, stack)) - | Some (UnexpectedError, msg, stack) -> FlowEventLogger.persistent_command_failure - ~server_logging_context ~request ~extra_data - ~client_context ~persistent_context ~persistent_delay - ~server_profiling ~client_duration ~wall_start ~error:(msg, stack) - -and main_log_error ~(expected: bool) (msg: string) (stack: string) : unit = - let error = (msg, Utils.Callstack stack) in - let client_context = FlowEventLogger.get_context () in - match expected with - | true -> FlowEventLogger.persistent_expected_error ~client_context ~error - | false -> FlowEventLogger.persistent_unexpected_error ~client_context ~error - -and main_handle_error - (e: exn) - (Utils.Callstack stack) - (state: state) - (event: event option) - : state = - let open Marshal_tools in - match e with - | Server_fatal_connection_exception _edata when state = Post_shutdown -> - state - - | Server_fatal_connection_exception edata -> begin - (* log the error *) - let stack = edata.stack ^ "---\n" ^ stack in - main_log_error ~expected:true ("[Server fatal] " ^ edata.message) stack; - (* report that we're disconnected to telemetry/connectionStatus *) - let state = begin match state with - | Connected env -> - let i_isConnected = Lsp_helpers.notify_connectionStatus env.c_ienv.i_initialize_params - to_stdout env.c_ienv.i_isConnected false in - let env = { env with c_ienv = { env.c_ienv with i_isConnected; }; } in - Connected env - | _ -> state - end in - (* send the error report *) - let code = match state with - | Connected cenv -> cenv.c_about_to_exit_code - | _ -> None in - let code = Option.value_map code ~f:FlowExitStatus.to_string ~default:"" in - let report = Printf.sprintf "Server fatal exception: [%s] %s\n%s" code edata.message stack in - Lsp_helpers.telemetry_error to_stdout report; - let d_autostart, d_ienv = match state with - | Connected { c_ienv; c_about_to_exit_code; _ } - when c_about_to_exit_code = Some FlowExitStatus.Flowconfig_changed - || c_about_to_exit_code = Some FlowExitStatus.Server_out_of_date -> - (* we allow at most one autostart_after_version_mismatch per *) - (* instance so as to avoid getting into version battles. *) - let previous = c_ienv.i_can_autostart_after_version_mismatch in - let d_ienv = { c_ienv with i_can_autostart_after_version_mismatch = false; } in - previous, d_ienv - | Connected { c_ienv; _ } -> - false, c_ienv - | Disconnected { d_ienv; _ } -> - false, d_ienv - | Pre_init _ - | Post_shutdown -> - failwith "Unexpected server error in inapplicable state" (* crash *) - in - let env = { - d_ienv; - d_autostart; - d_server_status = None; + (temp_dir : string option) + (shm_flags : CommandUtils.shared_mem_params) + (lazy_mode : Options.lazy_mode option) + (autostop : bool) + (() : unit) : unit = + let connect_params = + { + retries = 0; + retry_if_init = false; + timeout = None; + no_auto_start = false; + temp_dir; + autostop; + lazy_mode; + shm_flags; + ignore_version = false; + quiet = false; + on_mismatch = Choose_newest; } - in - let _state = state |> dismiss_tracks in - let state = Disconnected env in - state - end - - | Client_recoverable_connection_exception edata -> - let stack = edata.stack ^ "---\n" ^ stack in - main_log_error ~expected:true ("[Client recoverable] " ^ edata.message) stack; - let report = Printf.sprintf "Client exception: %s\n%s" edata.message stack in - Lsp_helpers.telemetry_error to_stdout report; - state - - | Client_fatal_connection_exception edata -> - let stack = edata.stack ^ "---\n" ^ stack in - main_log_error ~expected:true ("[Client fatal] " ^ edata.message) stack; - let report = Printf.sprintf "Client fatal exception: %s\n%s" edata.message stack in - Printf.eprintf "%s" report; - lsp_exit_bad () - - | e -> - let e = Lsp_fmt.error_of_exn e in - main_log_error ~expected:true ("[FlowLSP] " ^ e.Error.message) stack; - let text = Printf.sprintf "FlowLSP exception %s [%i]\n%s" e.Error.message e.Error.code stack in - let () = match event with - | Some (Client_message (RequestMessage (id, _request), _metadata)) -> - let json = Lsp_fmt.print_lsp_response id (ErrorResult (e, stack)) in - to_stdout json; - | _ -> - Lsp_helpers.telemetry_error to_stdout text - in - state - + in + let flowconfig_name = base_flags.Base_flags.flowconfig_name in + FlowLsp.run ~flowconfig_name ~connect_params let command = CommandSpec.command spec main diff --git a/src/commands/options/autofix_options.ml b/src/commands/options/autofix_options.ml new file mode 100644 index 00000000000..32d0ec6e66d --- /dev/null +++ b/src/commands/options/autofix_options.ml @@ -0,0 +1,24 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type ambiguity_strategy = + | Fail + | Temporary + | Generalize + | Specialize + | Fixme + | Suppress + +let ambiguity_strategies = + [ + ("fail", Fail); + ("temporary", Temporary); + ("generalize", Generalize); + ("specialize", Specialize); + ("fixme", Fixme); + ("suppress", Suppress); + ] diff --git a/src/commands/options/dune b/src/commands/options/dune new file mode 100644 index 00000000000..a6e99e1725d --- /dev/null +++ b/src/commands/options/dune @@ -0,0 +1,4 @@ +(library + (name flow_autofix_options) + (wrapped false) +) diff --git a/src/commands/portCommand.ml b/src/commands/portCommand.ml deleted file mode 100644 index 2868f890596..00000000000 --- a/src/commands/portCommand.ml +++ /dev/null @@ -1,62 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -(***********************************************************************) -(* flow port (transform docblock-style annotations) command *) -(***********************************************************************) - -open CommandUtils - -let spec = { - CommandSpec. - name = "port"; - doc = "Shows ported type annotations for given files"; - usage = Printf.sprintf - "Usage: %s port [OPTION]... [FILE]...\n\n\ - Ports types in one or more files\n\n\ - Example usage:\n\ - \t%s port file1 file2\n" - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_flags - |> root_flag - |> from_flag - |> anon "files" (required (list_of string)) - ) -} - -let main base_flags option_values root from files () = - FlowEventLogger.set_from from; - let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> Some (List.hd files) - ) in - let files = List.map expand_path files in - let request = ServerProt.Request.PORT files in - let patch_map = match connect_and_make_request flowconfig_name option_values root request with - | ServerProt.Response.PORT patch_map -> patch_map - | response -> failwith_bad_response ~request ~response - in - SMap.iter (fun file patches_or_err -> - match patches_or_err with - | Ok patches -> - Printf.printf "%s\n%s" file patches - | Error exn -> - Printf.eprintf - "Could not port docblock-style annotations for %s\n%s" - file - ((Printexc.to_string exn) ^ "\n" ^ (Printexc.get_backtrace ())); - ) patch_map; - flush stderr; - flush stdout - -let command = CommandSpec.command spec main diff --git a/src/commands/refactorCommand.ml b/src/commands/refactorCommand.ml index 85902474171..5e0d8e73036 100644 --- a/src/commands/refactorCommand.ml +++ b/src/commands/refactorCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2014, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,115 +11,130 @@ open CommandUtils -let spec = { - CommandSpec. - name = "refactor"; - doc = "Provides refactoring capabilities (early alpha)"; - usage = Printf.sprintf - "Usage: %s refactor [OPTION]... [FILE] LINE COLUMN\n\n\ - e.g. %s refactor foo.js 12 3 --rename newName\n" +let spec = + { + CommandSpec.name = "refactor"; + doc = "Provides refactoring capabilities (early alpha)"; + usage = + Printf.sprintf + "Usage: %s refactor [OPTION]... [FILE] LINE COLUMN\n\ne.g. %s refactor foo.js 12 3 --rename newName\n" CommandUtils.exe_name CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> strip_root_flag - |> from_flag - |> path_flag - |> flag "--rename" (optional string) ~doc:"Renames the symbol to the name given" - |> anon "args" (required (list_of string)) - ) -} + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> from_flag + |> path_flag + |> flag "--rename" (optional string) ~doc:"Renames the symbol to the name given" + |> anon "args" (required (list_of string))); + } let usage error = print_endline error; print_newline (); CommandSpec.usage spec; - FlowExitStatus.(exit Commandline_usage_error) + FlowExitStatus.(exit Commandline_usage_error) let parse_args path args = - let (file, line, column) = match args with + let (file, line, column) = + match args with | [file; line; column] -> let file = expand_path file in - File_input.FileName file, (int_of_string line), (int_of_string column) + (File_input.FileName file, int_of_string line, int_of_string column) | [line; column] -> - get_file_from_filename_or_stdin path ~cmd:CommandSpec.(spec.name) None, - (int_of_string line), - (int_of_string column) + ( get_file_from_filename_or_stdin path ~cmd:CommandSpec.(spec.name) None, + int_of_string line, + int_of_string column ) | _ -> CommandSpec.usage spec; FlowExitStatus.(exit Commandline_usage_error) in let (line, column) = convert_input_pos (line, column) in - file, line, column + (file, line, column) -let print_json result ~pretty ~strip_root = - let open Hh_json in - let open ServerProt.Response in - let json_of_edit (loc, text) = - JSON_Object [ - "oldRange", (Reason.json_of_loc ~strip_root loc); - "newText", JSON_String text; - ] - in - let json = match result with - (* TODO might be nice to provide details when this happens *) - | None -> JSON_Object ["kind", JSON_String "no-refactor-performed"] - | Some {refactor_edits} -> - JSON_Object [ - "kind", JSON_String "refactor-performed"; - "edits", JSON_Array (List.map json_of_edit refactor_edits); - ] - in - print_json_endline ~pretty json +let print_json result ~stdin_file ~pretty ~strip_root = + Hh_json.( + ServerProt.Response.( + let json_of_edit (loc, text) = + JSON_Object + [ + ("oldRange", json_of_loc_with_offset ~stdin_file ~strip_root loc); + ("newText", JSON_String text); + ] + in + let json = + match result with + (* TODO might be nice to provide details when this happens *) + | None -> JSON_Object [("kind", JSON_String "no-refactor-performed")] + | Some { refactor_edits } -> + JSON_Object + [ + ("kind", JSON_String "refactor-performed"); + ("edits", JSON_Array (Core_list.map ~f:json_of_edit refactor_edits)); + ] + in + print_json_endline ~pretty json)) -let to_string result option_values ~strip_root = - let open ServerProt.Response in - let edits = match result with - | None -> [] - | Some {refactor_edits} -> refactor_edits - in - let string_of_loc = - if option_values.from = "vim" || option_values.from = "emacs" then - Errors.Vim_emacs_output.string_of_loc ~strip_root - else - range_string_of_loc ~strip_root - in - let string_of_edit (loc, new_text) = - Printf.sprintf "%s: %s" (string_of_loc loc) new_text - in - String.concat "\n" @@ List.map string_of_edit edits +let to_string result ~strip_root = + ServerProt.Response.( + let edits = + match result with + | None -> [] + | Some { refactor_edits } -> refactor_edits + in + let string_of_loc = + let from = FlowEventLogger.get_from_I_AM_A_CLOWN () in + if from = Some "vim" || from = Some "emacs" then + Errors.Vim_emacs_output.string_of_loc ~strip_root + else + range_string_of_loc ~strip_root + in + let string_of_edit (loc, new_text) = Printf.sprintf "%s: %s" (string_of_loc loc) new_text in + String.concat "\n" @@ Core_list.map ~f:string_of_edit edits) -let main base_flags option_values json pretty root strip_root from path rename args () = - FlowEventLogger.set_from from; +let main base_flags option_values json pretty root strip_root path rename args () = let (file, line, column) = parse_args path args in let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> File_input.path_of_file_input file - ) in - let strip_root = if strip_root then Some root else None in - - let refactor_variant = match rename with + let root = + guess_root + flowconfig_name + (match root with + | Some root -> Some root + | None -> File_input.path_of_file_input file) + in + let strip_root = + if strip_root then + Some root + else + None + in + let refactor_variant = + match rename with | Some new_name -> ServerProt.Request.RENAME new_name | None -> usage "The kind of refactor (e.g. rename) must be specified with a flag" in - - let request = ServerProt.Request.REFACTOR (file, line, column, refactor_variant) in + let request = + ServerProt.Request.REFACTOR { input = file; line; char = column; refactor_variant } + in (* command result will be a position structure with full file path *) match connect_and_make_request flowconfig_name option_values root request with | ServerProt.Response.REFACTOR (Ok result) -> (* format output *) - if json || pretty - then print_json result ~pretty ~strip_root - else print_endline (to_string result option_values ~strip_root) + if json || pretty then + print_json result ~stdin_file:file ~pretty ~strip_root + else + print_endline (to_string result ~strip_root) | ServerProt.Response.REFACTOR (Error exn_msg) -> Utils_js.prerr_endlinef "Could not refactor for %s:%d:%d\n%s" - (File_input.filename_of_file_input file) line column exn_msg + (File_input.filename_of_file_input file) + line + column + exn_msg | response -> failwith_bad_response ~request ~response let command = CommandSpec.command spec main diff --git a/src/commands/saveStateCommand.ml b/src/commands/saveStateCommand.ml index 0a0a2d96c37..0a4c7b42a28 100644 --- a/src/commands/saveStateCommand.ml +++ b/src/commands/saveStateCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,36 +11,33 @@ open CommandUtils -let spec = { - CommandSpec. - name = "save-state"; - doc = "Tell the server to create a saved-state file"; - usage = Printf.sprintf - "Usage: %s save-state [OPTION]...\n\n\ - e.g. %s save-state --root path/to/root --out path/to/my_saved_state\n" - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_flags - |> root_flag - |> from_flag - |> flag "--out" (required string) - ~doc:"The path to the new saved-state file" - ) -} - -let main base_flags option_values root _from out () = +let spec = + { + CommandSpec.name = "save-state"; + doc = "Tell the server to create a saved-state file"; + usage = + Printf.sprintf + "Usage: %s save-state [OPTION]...\n\ne.g. %s save-state --root path/to/root --out path/to/my_saved_state\n" + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_flags + |> root_flag + |> from_flag + |> flag "--out" (required string) ~doc:"The path to the new saved-state file"); + } + +let main base_flags option_values root out () = let flowconfig_name = base_flags.Base_flags.flowconfig_name in let root = guess_root flowconfig_name root in - let out = Path.make @@ Files.imaginary_realpath out in - let out_str = Path.to_string out in Printf.printf "Asking server to create a saved-state file at `%s`\n%!" out_str; - let request = ServerProt.Request.SAVE_STATE out in + let request = ServerProt.Request.SAVE_STATE { outfile = out } in match connect_and_make_request flowconfig_name option_values root request with | ServerProt.Response.SAVE_STATE (Error err) -> Printf.printf "Failed to create saved-state file `%s`:\n%s\n%!" out_str err diff --git a/src/commands/serverCommand.ml b/src/commands/serverCommand.ml index a573e1fbf2f..5fffda6426f 100644 --- a/src/commands/serverCommand.ml +++ b/src/commands/serverCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,75 +11,88 @@ open CommandUtils -let spec = { CommandSpec. - name = "server"; - doc = "Runs a Flow server in the foreground"; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> lazy_flags - |> options_flags - |> shm_flags - |> ignore_version_flag - |> from_flag - |> log_file_flags - |> no_restart_flag - |> file_watcher_flag - |> anon "root" (optional string) - ); - usage = Printf.sprintf - "Usage: %s server [OPTION]... [ROOT]\n\n\ - Runs a Flow server in the foreground.\n\n\ - Flow will search upward for a .flowconfig file, beginning at ROOT.\n\ - ROOT is assumed to be the current directory if unspecified.\n" - exe_name; -} - -let main base_flags lazy_mode options_flags shm_flags ignore_version from - server_log_file monitor_log_file no_restart file_watcher file_watcher_debug path_opt () = +let spec = + { + CommandSpec.name = "server"; + doc = "Runs a Flow server in the foreground"; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> lazy_flags + |> options_flags + |> shm_flags + |> ignore_version_flag + |> from_flag + |> log_file_flags + |> no_restart_flag + |> file_watcher_flag + |> no_cgroup_flag + |> anon "root" (optional string)); + usage = + Printf.sprintf + "Usage: %s server [OPTION]... [ROOT]\n\nRuns a Flow server in the foreground.\n\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be the current directory if unspecified.\n" + exe_name; + } +let main + base_flags + lazy_mode + options_flags + shm_flags + ignore_version + server_log_file + monitor_log_file + no_restart + file_watcher + file_watcher_debug + path_opt + () = let flowconfig_name = base_flags.Base_flags.flowconfig_name in let root = CommandUtils.guess_root flowconfig_name path_opt in - let flowconfig = FlowConfig.get (Server_files_js.config_file flowconfig_name root) in + let flowconfig = + let flowconfig_path = Server_files_js.config_file flowconfig_name root in + read_config_or_exit ~enforce_warnings:(not ignore_version) flowconfig_path + in let options = make_options ~flowconfig_name ~flowconfig ~lazy_mode ~root options_flags in - (* initialize loggers before doing too much, especially anything that might exit *) - LoggingUtils.init_loggers ~from ~options (); + LoggingUtils.init_loggers ~options (); if not ignore_version then assert_version flowconfig; let shared_mem_config = shm_config shm_flags flowconfig in - - let server_log_file = match server_log_file with - | Some s -> s - | None -> - CommandUtils.server_log_file ~flowconfig_name ~tmp_dir:(Options.temp_dir options) root - flowconfig - |> Path.to_string + let server_log_file = + match server_log_file with + | Some s -> s + | None -> + CommandUtils.server_log_file + ~flowconfig_name + ~tmp_dir:(Options.temp_dir options) + root + flowconfig + |> Path.to_string in - - let monitor_log_file = match monitor_log_file with - | Some s -> s - | None -> - CommandUtils.monitor_log_file ~flowconfig_name ~tmp_dir:(Options.temp_dir options) root - |> Path.to_string + let monitor_log_file = + match monitor_log_file with + | Some s -> s + | None -> + CommandUtils.monitor_log_file ~flowconfig_name ~tmp_dir:(Options.temp_dir options) root + |> Path.to_string + in + let file_watcher = choose_file_watcher ~options ~file_watcher ~flowconfig in + let monitor_options = + { + FlowServerMonitorOptions.log_file = monitor_log_file; + autostop = false; + no_restart; + server_log_file; + server_options = options; + shared_mem_config; + argv = Sys.argv; + file_watcher; + file_watcher_debug; + } in - - let file_watcher = Option.first_some file_watcher (FlowConfig.file_watcher flowconfig) - |> Option.value ~default:Options.DFind in - - let monitor_options = { FlowServerMonitorOptions. - log_file = monitor_log_file; - autostop = false; - no_restart; - server_log_file; - server_options = options; - shared_mem_config; - argv = Sys.argv; - file_watcher; - file_watcher_debug; - } in - FlowServerMonitor.start monitor_options let command = CommandSpec.command spec main diff --git a/src/commands/shellCompleteCommand.ml b/src/commands/shellCompleteCommand.ml index 1f519fc1f6a..40a8173d8d2 100644 --- a/src/commands/shellCompleteCommand.ml +++ b/src/commands/shellCompleteCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,71 +11,72 @@ open CommandInfo (* flow shell-complete --current num -- command *) (***********************************************************************) -module Command(CommandList : COMMAND_LIST) = struct +module Command (CommandList : COMMAND_LIST) = struct + let spec = + { + CommandSpec.name = "shell-complete"; + doc = ""; + usage = Printf.sprintf "Usage: %s shell-complete --current N -- ARGV\n" CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> CommandUtils.from_flag + |> flag + "--current" + (optional int) + ~doc:"Current term in the argument list being completed." + |> rest); + } - let spec = { - CommandSpec. - name = "shell-complete"; - doc = ""; - usage = Printf.sprintf - "Usage: %s shell-complete --current N -- ARGV\n" - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> CommandUtils.from_flag - |> flag "--current" (optional int) - ~doc:"Current term in the argument list being completed." - |> rest - ) - } - - let is_partial_flag substr = - Str.string_match (Str.regexp "^-") substr 0 + let is_partial_flag substr = Str.string_match (Str.regexp "^-") substr 0 let find_flag key flags = - if not (is_partial_flag key) - then None - else try - let metadata = SMap.find_unsafe key flags in - Some metadata.CommandSpec.ArgSpec.arg_count - with Not_found -> None + if not (is_partial_flag key) then + None + else + try + let metadata = SMap.find_unsafe key flags in + Some metadata.CommandSpec.ArgSpec.arg_count + with Not_found -> None let get_completion command current rest = let flags = CommandSpec.flags command in let prev = List.nth rest (current - 1) in match find_flag prev flags with - | Some CommandSpec.ArgSpec.No_Arg - | None -> - if (current < List.length rest && - is_partial_flag (List.nth rest current)) - then ( - let flags = SMap.keys flags in - String.concat " " flags - ) else ( - "FILE" - ) - | _ -> "ARGUMENT" + | Some CommandSpec.ArgSpec.No_Arg + | None -> + if current < List.length rest && is_partial_flag (List.nth rest current) then + let flags = SMap.keys flags in + String.concat " " flags + else + "FILE" + | _ -> "ARGUMENT" - let main from current rest () = - FlowEventLogger.set_from from; - let current = match current with Some x -> x | None -> 0 in - let rest = match rest with Some x -> x | None -> [] in - if current <= 1 then ( - let commands = CommandList.commands |> List.map (fun (command) -> - CommandSpec.name command - ) in + let main current rest () = + let current = + match current with + | Some x -> x + | None -> 0 + in + let rest = + match rest with + | Some x -> x + | None -> [] + in + if current <= 1 then + let commands = + CommandList.commands |> Core_list.map ~f:(fun command -> CommandSpec.name command) + in print_endline (String.concat " " commands) - ) else ( + else try let cmdstr = String.lowercase_ascii (List.nth rest 1) in - let command = CommandList.commands |> List.find (fun (command) -> - CommandSpec.name command = cmdstr - ) in + let command = + CommandList.commands |> List.find (fun command -> CommandSpec.name command = cmdstr) + in let completion = get_completion command current rest in print_endline completion with Not_found -> () - ) let command = CommandSpec.command spec main - end diff --git a/src/commands/startCommand.ml b/src/commands/startCommand.ml index 3549009ce66..bcb48f07bb4 100644 --- a/src/commands/startCommand.ml +++ b/src/commands/startCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,101 +11,113 @@ open CommandUtils -let spec = { CommandSpec. - name = "start"; - doc = "Starts a Flow server"; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> options_and_json_flags - |> log_file_flags - |> flag "--wait" no_arg - ~doc:"Wait for the server to finish initializing" - |> lazy_flags - |> autostop_flag - |> shm_flags - |> ignore_version_flag - |> from_flag - |> no_restart_flag - |> file_watcher_flag - |> anon "root" (optional string) - ); - usage = Printf.sprintf - "Usage: %s start [OPTION]... [ROOT]\n\n\ - Starts a Flow server.\n\n\ - Flow will search upward for a .flowconfig file, beginning at ROOT.\n\ - ROOT is assumed to be the current directory if unspecified.\n\ - A server will be started if none is running over ROOT.\n" - exe_name; -} +let spec = + { + CommandSpec.name = "start"; + doc = "Starts a Flow server"; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> options_and_json_flags + |> log_file_flags + |> flag "--wait" no_arg ~doc:"Wait for the server to finish initializing" + |> lazy_flags + |> autostop_flag + |> shm_flags + |> ignore_version_flag + |> from_flag + |> no_restart_flag + |> file_watcher_flag + |> no_cgroup_flag + |> anon "root" (optional string)); + usage = + Printf.sprintf + "Usage: %s start [OPTION]... [ROOT]\n\nStarts a Flow server.\n\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be the current directory if unspecified.\nA server will be started if none is running over ROOT.\n" + exe_name; + } let main - base_flags options_flags json pretty server_log_file monitor_log_file wait lazy_mode - autostop shm_flags ignore_version from no_restart file_watcher file_watcher_debug path_opt () = - + base_flags + options_flags + json + pretty + server_log_file + monitor_log_file + wait + lazy_mode + autostop + shm_flags + ignore_version + no_restart + file_watcher + file_watcher_debug + path_opt + () = let flowconfig_name = base_flags.Base_flags.flowconfig_name in let root = CommandUtils.guess_root flowconfig_name path_opt in - let flowconfig = FlowConfig.get (Server_files_js.config_file flowconfig_name root) in + let flowconfig = + let flowconfig_path = Server_files_js.config_file flowconfig_name root in + read_config_or_exit ~enforce_warnings:(not ignore_version) flowconfig_path + in let options = make_options ~flowconfig_name ~flowconfig ~lazy_mode ~root options_flags in - (* initialize loggers before doing too much, especially anything that might exit *) - LoggingUtils.init_loggers ~from ~options (); + LoggingUtils.init_loggers ~options (); if not ignore_version then assert_version flowconfig; let shared_mem_config = shm_config shm_flags flowconfig in - - let server_log_file = match server_log_file with - | Some s -> s - | None -> - CommandUtils.server_log_file ~flowconfig_name ~tmp_dir:(Options.temp_dir options) root - flowconfig - |> Path.to_string + let server_log_file = + match server_log_file with + | Some s -> s + | None -> + CommandUtils.server_log_file + ~flowconfig_name + ~tmp_dir:(Options.temp_dir options) + root + flowconfig + |> Path.to_string in - - let monitor_log_file = match monitor_log_file with - | Some s -> s - | None -> - CommandUtils.monitor_log_file ~flowconfig_name ~tmp_dir:(Options.temp_dir options) root - |> Path.to_string + let monitor_log_file = + match monitor_log_file with + | Some s -> s + | None -> + CommandUtils.monitor_log_file ~flowconfig_name ~tmp_dir:(Options.temp_dir options) root + |> Path.to_string in - let on_spawn pid = - if pretty || json then begin - let open Hh_json in - print_json_endline ~pretty (JSON_Object [ - "pid", JSON_String (string_of_int pid); - "log_file", JSON_String server_log_file; - "monitor_log_file", JSON_String monitor_log_file; - ]) - end else if not (Options.is_quiet options) then begin - Printf.eprintf - "Spawned flow server (pid=%d)\n" pid; - Printf.eprintf - "Logs will go to %s\n%!" server_log_file; - Printf.eprintf - "Monitor logs will go to %s\n%!" monitor_log_file - end + if pretty || json then + Hh_json.( + print_json_endline + ~pretty + (JSON_Object + [ + ("pid", JSON_String (string_of_int pid)); + ("log_file", JSON_String server_log_file); + ("monitor_log_file", JSON_String monitor_log_file); + ])) + else if not (Options.is_quiet options) then ( + Printf.eprintf "Spawned flow server (pid=%d)\n" pid; + Printf.eprintf "Logs will go to %s\n%!" server_log_file; + Printf.eprintf "Monitor logs will go to %s\n%!" monitor_log_file + ) in - (* A quiet `flow start` doesn't imply a quiet `flow server` *) let server_options = { options with Options.opt_quiet = false } in - - let file_watcher = Option.first_some file_watcher (FlowConfig.file_watcher flowconfig) - |> Option.value ~default:Options.DFind in - - let monitor_options = { FlowServerMonitorOptions. - log_file = monitor_log_file; - autostop; - no_restart; - server_log_file; - server_options; - shared_mem_config; - argv = Sys.argv; - file_watcher; - file_watcher_debug; - } in - + let file_watcher = choose_file_watcher ~options ~file_watcher ~flowconfig in + let monitor_options = + { + FlowServerMonitorOptions.log_file = monitor_log_file; + autostop; + no_restart; + server_log_file; + server_options; + shared_mem_config; + argv = Sys.argv; + file_watcher; + file_watcher_debug; + } + in FlowServerMonitor.daemonize ~wait ~on_spawn monitor_options let command = CommandSpec.command spec main diff --git a/src/commands/statusCommands.ml b/src/commands/statusCommands.ml index 5b414e5a58b..7f0c9ca6795 100644 --- a/src/commands/statusCommands.ml +++ b/src/commands/statusCommands.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -19,81 +19,65 @@ module type CONFIG = sig end module Impl (CommandList : COMMAND_LIST) (Config : CONFIG) = struct - - let spec = if Config.explicit - then - { - CommandSpec. - name = "status"; - doc = "(default) Shows current Flow errors by asking the Flow server"; - usage = Printf.sprintf - "Usage: %s status [OPTION]... [ROOT]\n\ - Shows current Flow errors by asking the Flow server.\n\n\ - Flow will search upward for a .flowconfig file, beginning at ROOT.\n\ - ROOT is assumed to be the current directory if unspecified.\n\ - A server will be started if none is running over ROOT.\n\ - \n\ - Status command options:" - exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> json_version_flag - |> error_flags - |> strip_root_flag - |> from_flag - |> dummy false (* match --version below *) - |> anon "root" (optional string) - ) - } - else - let command_info = CommandList.commands - |> List.map (fun (command) -> - (CommandSpec.name command, CommandSpec.doc command) - ) - |> List.filter (fun (cmd, doc) -> cmd <> "" && doc <> "") - |> List.sort (fun (a, _) (b, _) -> String.compare a b) - in - let col_width = List.fold_left - (fun acc (cmd, _) -> max acc (String.length cmd)) 0 command_info in - let cmd_usage = command_info - |> List.map (fun (cmd, doc) -> - Utils_js.spf " %-*s %s" col_width cmd doc - ) - |> String.concat "\n" - in - { - CommandSpec. - name = "default"; - doc = ""; - usage = Printf.sprintf - "Usage: %s [COMMAND] \n\n\ - Valid values for COMMAND:\n%s\n\n\ - Default values if unspecified:\n\ - \ \ COMMAND\ - \tstatus\n\ - \n\ - Status command options:" - exe_name - cmd_usage; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> json_version_flag - |> error_flags - |> strip_root_flag - |> from_flag - |> flag "--version" no_arg - ~doc:"(Deprecated, use `flow version` instead) Print version number and exit" - |> anon "root" (optional string) - ) - } + let spec = + if Config.explicit then + { + CommandSpec.name = "status"; + doc = "(default) Shows current Flow errors by asking the Flow server"; + usage = + Printf.sprintf + "Usage: %s status [OPTION]... [ROOT]\nShows current Flow errors by asking the Flow server.\n\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be the current directory if unspecified.\nA server will be started if none is running over ROOT.\n\nStatus command options:" + exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> json_version_flag + |> error_flags + |> strip_root_flag + |> from_flag + |> dummy false (* match --version below *) + |> anon "root" (optional string)); + } + else + let command_info = + CommandList.commands + |> Core_list.map ~f:(fun command -> (CommandSpec.name command, CommandSpec.doc command)) + |> List.filter (fun (cmd, doc) -> cmd <> "" && doc <> "") + |> List.sort (fun (a, _) (b, _) -> String.compare a b) + in + let col_width = + List.fold_left (fun acc (cmd, _) -> max acc (String.length cmd)) 0 command_info + in + let cmd_usage = + command_info + |> Core_list.map ~f:(fun (cmd, doc) -> Utils_js.spf " %-*s %s" col_width cmd doc) + |> String.concat "\n" + in + { + CommandSpec.name = "default"; + doc = ""; + usage = + Printf.sprintf + "Usage: %s [COMMAND] \n\nValid values for COMMAND:\n%s\n\nDefault values if unspecified:\n\ \ COMMAND\tstatus\n\nStatus command options:" + exe_name + cmd_usage; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> json_version_flag + |> error_flags + |> strip_root_flag + |> from_flag + |> flag "--version" no_arg ~doc:"Print version number and exit" + |> anon "root" (optional string)); + } type args = { root: Path.t; - from: string; output_json: bool; output_json_version: Errors.Json_output.json_version option; pretty: bool; @@ -101,104 +85,138 @@ module Impl (CommandList : COMMAND_LIST) (Config : CONFIG) = struct strip_root: bool; } - let check_status flowconfig_name (args:args) connect_flags = + let check_status flowconfig_name (args : args) connect_flags = let name = "flow" in - let include_warnings = args.error_flags.Errors.Cli_output.include_warnings in - let request = ServerProt.Request.STATUS (args.root, include_warnings) in - let response, lazy_stats = match connect_and_make_request flowconfig_name connect_flags - args.root request with - | ServerProt.Response.STATUS {status_response; lazy_stats} -> status_response, lazy_stats - | response -> failwith_bad_response ~request ~response + let request = ServerProt.Request.STATUS { client_root = args.root; include_warnings } in + let (response, lazy_stats) = + match connect_and_make_request flowconfig_name connect_flags args.root request with + | ServerProt.Response.STATUS { status_response; lazy_stats } -> (status_response, lazy_stats) + | response -> failwith_bad_response ~request ~response in - let strip_root = if args.strip_root then Some args.root else None in - let print_json = Errors.Json_output.print_errors - ~out_channel:stdout ~strip_root ~pretty:args.pretty ?version:args.output_json_version - ~suppressed_errors:([]) + let strip_root = + if args.strip_root then + Some args.root + else + None + in + let print_json = + Errors.Json_output.print_errors + ~out_channel:stdout + ~strip_root + ~pretty:args.pretty + ?version:args.output_json_version + in + let lazy_msg = + match lazy_stats.ServerProt.Response.lazy_mode with + | Options.NON_LAZY_MODE -> None + | mode -> + Some + (Printf.sprintf + ( "The Flow server is currently in %s lazy mode and is only checking %d/%d files.\n" + ^^ "To learn more, visit flow.org/en/docs/lang/lazy-modes" ) + Options.( + match mode with + | LAZY_MODE_FILESYSTEM -> "filesystem" + | LAZY_MODE_IDE -> "IDE" + | LAZY_MODE_WATCHMAN -> "Watchman" + | NON_LAZY_MODE -> assert false) + lazy_stats.ServerProt.Response.checked_files + lazy_stats.ServerProt.Response.total_files) in - let lazy_msg = match lazy_stats.ServerProt.Response.lazy_mode with - | Some mode -> Some ( - Printf.sprintf - ("The Flow server is currently in %s lazy mode and is only checking %d/%d files.\n" ^^ - "To learn more, visit flow.org/en/docs/lang/lazy-modes") - Options.(match mode with | LAZY_MODE_FILESYSTEM -> "filesystem" | LAZY_MODE_IDE -> "IDE") - lazy_stats.ServerProt.Response.checked_files - lazy_stats.ServerProt.Response.total_files - ) - | None -> None in match response with | ServerProt.Response.DIRECTORY_MISMATCH d -> - let msg = Printf.sprintf - ("%s is running on a different directory.\n" ^^ - "server_root: %s, client_root: %s") - name - (Path.to_string d.ServerProt.Response.server) - (Path.to_string d.ServerProt.Response.client) + let msg = + Printf.sprintf + ("%s is running on a different directory.\n" ^^ "server_root: %s, client_root: %s") + name + (Path.to_string d.ServerProt.Response.server) + (Path.to_string d.ServerProt.Response.client) in FlowExitStatus.(exit ~msg Server_client_directory_mismatch) - | ServerProt.Response.ERRORS {errors; warnings} -> + | ServerProt.Response.ERRORS { errors; warnings; suppressed_errors } -> let error_flags = args.error_flags in - begin if args.output_json then - print_json ~errors ~warnings () - else if args.from = "vim" || args.from = "emacs" then - Errors.Vim_emacs_output.print_errors ~strip_root - stdout ~errors ~warnings () - else - Errors.Cli_output.print_errors - ~strip_root - ~flags:error_flags - ~out_channel:stdout - ~errors - ~warnings - ~lazy_msg - () + let from = FlowEventLogger.get_from_I_AM_A_CLOWN () in + begin + if args.output_json then + print_json ~errors ~warnings ~suppressed_errors () + else if from = Some "vim" || from = Some "emacs" then + Errors.Vim_emacs_output.print_errors ~strip_root stdout ~errors ~warnings () + else + let errors = + List.fold_left + (fun acc (error, _) -> Errors.ConcreteLocPrintableErrorSet.add error acc) + errors + suppressed_errors + in + Errors.Cli_output.print_errors + ~strip_root + ~flags:error_flags + ~out_channel:stdout + ~errors + ~warnings + ~lazy_msg + () end; - FlowExitStatus.exit (get_check_or_status_exit_code errors warnings error_flags.Errors.Cli_output.max_warnings) + FlowExitStatus.exit + (get_check_or_status_exit_code errors warnings error_flags.Errors.Cli_output.max_warnings) | ServerProt.Response.NO_ERRORS -> if args.output_json then - print_json ~errors:Errors.ErrorSet.empty ~warnings:Errors.ErrorSet.empty () - else begin + print_json + ~errors:Errors.ConcreteLocPrintableErrorSet.empty + ~warnings:Errors.ConcreteLocPrintableErrorSet.empty + ~suppressed_errors:[] + () + else ( Printf.printf "No errors!\n%!"; Option.iter lazy_msg ~f:(Printf.printf "\n%s\n%!") - end; + ); FlowExitStatus.(exit No_error) | ServerProt.Response.NOT_COVERED -> let msg = "Why on earth did the server respond with NOT_COVERED?" in FlowExitStatus.(exit ~msg Unknown_error) - let main base_flags connect_flags json pretty json_version error_flags strip_root from version - root () = - FlowEventLogger.set_from from; + let main base_flags connect_flags json pretty json_version error_flags strip_root version root () + = if version then ( - prerr_endline "Warning: \ - `flow --version` is deprecated in favor of `flow version`"; print_version (); FlowExitStatus.(exit No_error) ); let flowconfig_name = base_flags.Base_flags.flowconfig_name in let root = guess_root flowconfig_name root in - let json = json || Option.is_some json_version || pretty in - - let args = { - root; - from = connect_flags.CommandUtils.from; - output_json = json; - output_json_version = json_version; - pretty; - error_flags; - strip_root; - } in + let args = + { + root; + output_json = json; + output_json_version = json_version; + pretty; + error_flags; + strip_root; + } + in check_status flowconfig_name args connect_flags end -module Status(CommandList : COMMAND_LIST) = struct - module Main = Impl (CommandList) (struct let explicit = true end) +module Status (CommandList : COMMAND_LIST) = struct + module Main = + Impl + (CommandList) + (struct + let explicit = true + end) + let command = CommandSpec.command Main.spec Main.main end -module Default(CommandList : COMMAND_LIST) = struct - module Main = Impl (CommandList) (struct let explicit = false end) +module Default (CommandList : COMMAND_LIST) = struct + module Main = + Impl + (CommandList) + (struct + let explicit = false + end) + let command = CommandSpec.command Main.spec Main.main end diff --git a/src/commands/stopCommand.ml b/src/commands/stopCommand.ml index d7072278739..a10b9ddd5bd 100644 --- a/src/commands/stopCommand.ml +++ b/src/commands/stopCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,90 +12,95 @@ open CommandUtils open Utils_js -let spec = { - CommandSpec. - name = "stop"; - doc = "Stops a Flow server"; - usage = Printf.sprintf - "Usage: %s stop [OPTION]... [ROOT]\n\ - Stops a flow server\n\n\ - Flow will search upward for a .flowconfig file, beginning at ROOT.\n\ - ROOT is assumed to be current directory if unspecified\n" - exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> temp_dir_flag - |> from_flag - |> quiet_flag - |> anon "root" (optional string) - ) -} +let spec = + { + CommandSpec.name = "stop"; + doc = "Stops a Flow server"; + usage = + Printf.sprintf + "Usage: %s stop [OPTION]... [ROOT]\nStops a flow server\n\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be current directory if unspecified\n" + exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> temp_dir_flag + |> from_flag + |> quiet_flag + |> anon "root" (optional string)); + } exception FailedToKillNicely -let main base_flags temp_dir from quiet root () = +let main base_flags temp_dir quiet root () = let flowconfig_name = base_flags.Base_flags.flowconfig_name in let root = guess_root flowconfig_name root in - let config = FlowConfig.get (Server_files_js.config_file flowconfig_name root) in + let config = read_config_or_exit (Server_files_js.config_file flowconfig_name root) in let root_s = Path.to_string root in - let tmp_dir = match temp_dir with - | Some x -> x - | None -> FlowConfig.temp_dir config + let tmp_dir = + match temp_dir with + | Some x -> x + | None -> FlowConfig.temp_dir config in let tmp_dir = Path.to_string (Path.make tmp_dir) in - FlowEventLogger.set_from from; if not quiet then prerr_endlinef "Trying to connect to server for `%s`" (Path.to_string root); - let client_handshake = SocketHandshake.({ - client_build_id = build_revision; - is_stop_request = true; - server_should_hangup_if_still_initializing = false; - server_should_exit_if_version_mismatch = true; }, { - client_type = Ephemeral; - }) in + let client_handshake = + SocketHandshake. + ( { + client_build_id = build_revision; + client_version = Flow_version.version; + is_stop_request = true; + server_should_hangup_if_still_initializing = false; + version_mismatch_strategy = Always_stop_server; + }, + { client_type = Ephemeral } ) + in CommandConnectSimple.( match connect_once ~flowconfig_name ~client_handshake ~tmp_dir root with | Ok _ -> - begin try - if not quiet then prerr_endlinef - "Told server for `%s` to die. Waiting for confirmation..." - (Path.to_string root); + begin + try + if not quiet then + prerr_endlinef + "Told server for `%s` to die. Waiting for confirmation..." + (Path.to_string root); let i = ref 0 in while CommandConnectSimple.server_exists ~flowconfig_name ~tmp_dir root do incr i; - if !i < 5 then ignore @@ Unix.sleep 1 - else raise FailedToKillNicely + if !i < 5 then + ignore @@ Unix.sleep 1 + else + raise FailedToKillNicely done; - if not quiet then prerr_endlinef - "Successfully killed server for `%s`" - (Path.to_string root) + if not quiet then + prerr_endlinef "Successfully killed server for `%s`" (Path.to_string root) with FailedToKillNicely -> let msg = spf "Failed to kill server nicely for `%s`" root_s in FlowExitStatus.(exit ~msg Kill_error) - end + end | Error Server_missing -> - if not quiet then prerr_endlinef - "Warning: no server to kill for `%s`" root_s - | Error Build_id_mismatch -> - if not quiet then prerr_endlinef - "Successfully killed server for `%s`" root_s - | Error Server_busy _ + if not quiet then prerr_endlinef "Warning: no server to kill for `%s`" root_s + | Error (Build_id_mismatch Server_exited) -> + if not quiet then prerr_endlinef "Successfully killed server for `%s`" root_s + | Error (Build_id_mismatch (Client_should_error _)) + | Error (Server_busy _) | Error Server_socket_missing -> - begin try - if not quiet then prerr_endlinef - "Attempting to meanly kill server for `%s`" - (Path.to_string root); + begin + try + if not quiet then + prerr_endlinef "Attempting to meanly kill server for `%s`" (Path.to_string root); CommandMeanKill.mean_kill ~flowconfig_name ~tmp_dir root; - if not quiet then prerr_endlinef - "Successfully killed server for `%s`" - (Path.to_string root) + if not quiet then + prerr_endlinef "Successfully killed server for `%s`" (Path.to_string root) with CommandMeanKill.FailedToKill err -> - if not quiet then match err with - | Some err -> prerr_endline err - | None -> (); - let msg = spf "Failed to kill server meanly for `%s`" root_s in - FlowExitStatus.(exit ~msg Kill_error) - end - ) + if not quiet then ( + match err with + | Some err -> prerr_endline err + | None -> + (); + let msg = spf "Failed to kill server meanly for `%s`" root_s in + FlowExitStatus.(exit ~msg Kill_error) + ) + end) let command = CommandSpec.command spec main diff --git a/src/commands/suggestCommand.ml b/src/commands/suggestCommand.ml index d9cadf4672c..6a89042e879 100644 --- a/src/commands/suggestCommand.ml +++ b/src/commands/suggestCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,113 +11,144 @@ open CommandUtils -let spec = { - CommandSpec. - name = "suggest"; - doc = "Provides type annotation suggestions for a given program"; - usage = Printf.sprintf - "Usage: %s suggest [OPTION]... [FILE]\n\n\ - Prints a prettified version of the input program with suggested type\n\ - annotations filling in missing function parameters and return types.\n\n\ - e.g. %s suggest file.js\n\ - or %s suggest < file.js\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_flags - |> root_flag - |> error_flags - |> strip_root_flag - |> from_flag - |> path_flag - |> flag "--fail-on-tc-errors" no_arg - ~doc:"Fail on typechecking errors (similar behavior to \"check\")" - |> flag "--fail-on-suggest-warnings" no_arg - ~doc:"Fail on suggest warnings (inferred empty type or normalizer failures)" - |> anon "file" (optional string) - ) -} - +let spec = + { + CommandSpec.name = "suggest"; + doc = "Provides type annotation suggestions for a given program"; + usage = + Printf.sprintf + "Usage: %s suggest [OPTION]... [FILE]\n\nPrints a prettified version of the input program with suggested type\nannotations filling in missing function parameters and return types.\n\ne.g. %s suggest file.js\nor %s suggest < file.js\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_flags + |> root_flag + |> error_flags + |> strip_root_flag + |> from_flag + |> path_flag + |> wait_for_recheck_flag + |> flag + "--fail-on-tc-errors" + no_arg + ~doc:"Fail on typechecking errors (similar behavior to \"check\")" + |> flag + "--fail-on-suggest-warnings" + no_arg + ~doc:"Fail on suggest warnings (inferred empty type or normalizer failures)" + |> anon "file" (optional string)); + } let handle_error err = prerr_endline err; FlowExitStatus.(exit Unknown_error) -let layout_prettier ast = - let attached_comments = Flow_prettier_comments.attach_comments ast in - Js_layout_generator.with_attached_comments := Some attached_comments ; - let layout = Js_layout_generator.program_simple ast in - Js_layout_generator.with_attached_comments := None ; - layout - -let print_annotated_program annot_ast = - annot_ast - |> layout_prettier - |> Pretty_printer.print ~source_maps:None - |> Source.contents - |> print_endline - -let handle_response strip_root error_flags fail_on_tc_errors fail_on_suggest_warnings = +let handle_response strip_root error_flags fail_on_tc_errors fail_on_suggest_warnings content = let with_errors_and_warnings do_step errors warnings max_warnings next = let err () = - Errors.Cli_output.print_errors ~out_channel:stderr ~flags:error_flags - ~strip_root ~errors ~warnings ~lazy_msg:None (); - FlowExitStatus.exit - (get_check_or_status_exit_code errors warnings max_warnings) + Errors.Cli_output.print_errors + ~out_channel:stderr + ~flags:error_flags + ~strip_root + ~errors + ~warnings + ~lazy_msg:None + (); + FlowExitStatus.exit (get_check_or_status_exit_code errors warnings max_warnings) in - if not do_step then next () - else if Errors.ErrorSet.is_empty errors then begin + if not do_step then + next () + else if Errors.ConcreteLocPrintableErrorSet.is_empty errors then match max_warnings with - | Some x when Errors.ErrorSet.cardinal warnings > x -> err () - | None | Some _ -> next () - end - else err () + | Some x when Errors.ConcreteLocPrintableErrorSet.cardinal warnings > x -> err () + | None + | Some _ -> + next () + else + err () in function - | ServerProt.Response.Suggest_Ok { - tc_errors; tc_warnings; suggest_warnings; annotated_program - } -> + | ServerProt.Response.Suggest_Ok { tc_errors; tc_warnings; suggest_warnings; file_patch } -> (* First, see if the command should fail on a typechecking error. Use * tc_errors and tc_warnings with the `with_errors_and_warnings` defined * eralier. *) - with_errors_and_warnings fail_on_tc_errors tc_errors tc_warnings - error_flags.Errors.Cli_output.max_warnings @@ fun () -> + with_errors_and_warnings + fail_on_tc_errors + tc_errors + tc_warnings + error_flags.Errors.Cli_output.max_warnings + @@ fun () -> (* Then, check the case where we should fail on suggest-related warnings. * Use suggest_errors as warnings for `with_errors_and_warnings` and an * empty warning set. *) - with_errors_and_warnings fail_on_suggest_warnings Errors.ErrorSet.empty - suggest_warnings (Some 0) @@ fun () -> + with_errors_and_warnings + fail_on_suggest_warnings + Errors.ConcreteLocPrintableErrorSet.empty + suggest_warnings + (Some 0) + @@ fun () -> (* Finally, print the AST if no error has been flagged. *) - print_annotated_program annotated_program + print_string @@ Replacement_printer.print file_patch content | ServerProt.Response.Suggest_Error errors -> (* This is the case of a parse fail (no context is created). The `errors` * set ought to be non-empty. Otherwise, we throw an exception. If this * happens, see types_js.ml `typecheck_contents`. *) - with_errors_and_warnings true errors Errors.ErrorSet.empty None @@ fun () -> - failwith "SuggestCommand: Parsing failed with no errors" + with_errors_and_warnings true errors Errors.ConcreteLocPrintableErrorSet.empty None + @@ (fun () -> failwith "SuggestCommand: Parsing failed with no errors") -let main base_flags option_values root error_flags strip_root from path - fail_on_tc_errors fail_on_suggest_warnings filename () = - FlowEventLogger.set_from from; +let main + base_flags + option_values + root + error_flags + strip_root + path + wait_for_recheck + fail_on_tc_errors + fail_on_suggest_warnings + filename + () = let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let file = get_file_from_filename_or_stdin ~cmd:CommandSpec.(spec.name) - path (Option.map ~f:expand_path filename) in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> File_input.path_of_file_input file - ) in - let strip_root = if strip_root then Some root else None in - let request = ServerProt.Request.SUGGEST file in - match connect_and_make_request flowconfig_name option_values root request with - | ServerProt.Response.SUGGEST (Ok result) -> - handle_response strip_root error_flags fail_on_tc_errors fail_on_suggest_warnings result; - flush stdout - | ServerProt.Response.SUGGEST (Error error) -> - handle_error error - | response -> failwith_bad_response ~request ~response + let file = + get_file_from_filename_or_stdin + ~cmd:CommandSpec.(spec.name) + path + (Option.map ~f:expand_path filename) + in + File_input.( + let content = + match content_of_file_input file with + | Ok content -> content + (* If the File_input is from stdin we would have been in the previous line. + If the File_input is from a file then expand_path verified the file exists. *) + | _ -> + let msg = Printf.sprintf "Failed to open file: %s" @@ filename_of_file_input file in + FlowExitStatus.(exit ~msg Input_error) + in + let file = FileContent (path_of_file_input file, content) in + let root = find_a_root ~base_flags ~input:file root in + let strip_root = + if strip_root then + Some root + else + None + in + let request = ServerProt.Request.SUGGEST { input = file; wait_for_recheck } in + match connect_and_make_request flowconfig_name option_values root request with + | ServerProt.Response.SUGGEST (Ok result) -> + handle_response + strip_root + error_flags + fail_on_tc_errors + fail_on_suggest_warnings + content + result; + flush stdout + | ServerProt.Response.SUGGEST (Error error) -> handle_error error + | response -> failwith_bad_response ~request ~response) let command = CommandSpec.command spec main diff --git a/src/commands/typeAtPosCommand.ml b/src/commands/typeAtPosCommand.ml index 8967694894a..d5b92059a05 100644 --- a/src/commands/typeAtPosCommand.ml +++ b/src/commands/typeAtPosCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,122 +12,129 @@ open CommandUtils open Utils_js -let spec = { - CommandSpec. - name = "type-at-pos"; - doc = "Shows the type at a given file and position"; - usage = Printf.sprintf - "Usage: %s type-at-pos [OPTION]... [FILE] LINE COLUMN\n\n\ - e.g. %s type-at-pos foo.js 12 3\n\ - or %s type-at-pos 12 3 < foo.js\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> base_flags - |> connect_and_json_flags - |> root_flag - |> strip_root_flag - |> verbose_flags - |> from_flag - |> path_flag - |> flag "--expand-json-output" no_arg - ~doc:"Includes an expanded version of the returned JSON type (implies --json)" - |> flag "--expand-type-aliases" no_arg - ~doc:"Replace type aliases with their bodies" - |> anon "args" (required (list_of string)) - ) -} +let spec = + { + CommandSpec.name = "type-at-pos"; + doc = "Shows the type at a given file and position"; + usage = + Printf.sprintf + "Usage: %s type-at-pos [OPTION]... [FILE] LINE COLUMN\n\ne.g. %s type-at-pos foo.js 12 3\nor %s type-at-pos 12 3 < foo.js\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> base_flags + |> connect_and_json_flags + |> root_flag + |> strip_root_flag + |> verbose_flags + |> from_flag + |> path_flag + |> wait_for_recheck_flag + |> flag + "--expand-json-output" + no_arg + ~doc:"Includes an expanded version of the returned JSON type (implies --json)" + |> flag "--expand-type-aliases" no_arg ~doc:"Replace type aliases with their bodies" + |> flag + "--omit-typearg-defaults" + no_arg + ~doc:"Omit type arguments when defaults exist and match the provided type argument" + |> anon "args" (required (list_of string))); + } -let exit () = - CommandSpec.usage spec; - FlowExitStatus.(exit Commandline_usage_error) - -let parse_line_and_column line column = - try (int_of_string line), (int_of_string column) - with Failure(_) -> exit () - -let parse_args path args = - let (file, line, column) = match args with - | [file; line; column] -> - let file = expand_path file in - let line, column = parse_line_and_column line column in - File_input.FileName file, line, column - | [line; column] -> - let line, column = parse_line_and_column line column in - get_file_from_filename_or_stdin ~cmd:CommandSpec.(spec.name) path None, - line, - column - | _ -> - exit () - in - let (line, column) = convert_input_pos (line, column) in - file, line, column - -let handle_response (loc, t) ~json ~pretty ~strip_root ~expanded = - let ty = match t with +let handle_response (loc, t) ~file_contents ~json ~pretty ~strip_root ~expanded = + let ty = + match t with | None -> "(unknown)" | Some ty -> Ty_printer.string_of_t ty in - if json - then ( - let open Hh_json in - let open Reason in - let json_assoc = ( - ("type", JSON_String ty) :: - ("reasons", JSON_Array []) :: - ("loc", json_of_loc ~strip_root loc) :: - (Errors.deprecated_json_props_of_loc ~strip_root loc) - ) in - let json_assoc = - if expanded then - ("expanded_type", match t with - | Some ty -> Ty_debug.json_of_t ~strip_root ty - | None -> JSON_Null) :: json_assoc - else json_assoc in - let json = JSON_Object json_assoc in - print_json_endline ~pretty json - ) else ( + if json then + Hh_json.( + Reason.( + let offset_table = Option.map file_contents ~f:Offset_utils.make in + let json_assoc = + ("type", JSON_String ty) + :: ("reasons", JSON_Array []) + :: ("loc", json_of_loc ~strip_root ~offset_table loc) + :: Errors.deprecated_json_props_of_loc ~strip_root loc + in + let json_assoc = + if expanded then + ( "expanded_type", + match t with + | Some ty -> Ty_debug.json_of_t ~strip_root ty + | None -> JSON_Null ) + :: json_assoc + else + json_assoc + in + let json = JSON_Object json_assoc in + print_json_endline ~pretty json)) + else let range = - if loc = Loc.none then "" - else spf "\n%s" (range_string_of_loc ~strip_root loc) + if loc = Loc.none then + "" + else + spf "\n%s" (range_string_of_loc ~strip_root loc) in - print_endline (ty^range) - ) + print_endline (ty ^ range) let handle_error err ~json ~pretty = - if json - then ( - let open Hh_json in - let json = JSON_Object ["error", JSON_String err] in - prerr_json_endline ~pretty json - ) else ( + if json then + Hh_json.( + let json = JSON_Object [("error", JSON_String err)] in + prerr_json_endline ~pretty json) + else prerr_endline err - ) -let main base_flags option_values json pretty root strip_root verbose from path expanded - expand_aliases args () = - FlowEventLogger.set_from from; +let main + base_flags + option_values + json + pretty + root + strip_root + verbose + path + wait_for_recheck + expanded + expand_aliases + omit_targ_defaults + args + () = let json = json || pretty || expanded in - let (file, line, column) = parse_args path args in + let (file, line, column) = parse_location_with_optional_filename spec path args in let flowconfig_name = base_flags.Base_flags.flowconfig_name in - let root = guess_root flowconfig_name ( - match root with - | Some root -> Some root - | None -> File_input.path_of_file_input file - ) in - let strip_root = if strip_root then Some root else None in - - if not json && (verbose <> None) - then prerr_endline "NOTE: --verbose writes to the server log file"; + let root = find_a_root ~base_flags ~input:file root in + let strip_root = + if strip_root then + Some root + else + None + in + if (not json) && verbose <> None then + prerr_endline "NOTE: --verbose writes to the server log file"; - let request = ServerProt.Request.INFER_TYPE - (file, line, column, verbose, expand_aliases) in + let request = + ServerProt.Request.INFER_TYPE + { + input = file; + line; + char = column; + verbose; + expand_aliases; + omit_targ_defaults; + wait_for_recheck; + } + in + let file_contents = File_input.content_of_file_input file |> Core_result.ok in match connect_and_make_request flowconfig_name option_values root request with | ServerProt.Response.INFER_TYPE (Error err) -> handle_error err ~json ~pretty | ServerProt.Response.INFER_TYPE (Ok resp) -> - handle_response resp ~json ~pretty ~strip_root ~expanded + handle_response resp ~file_contents ~json ~pretty ~strip_root ~expanded | response -> failwith_bad_response ~request ~response let command = CommandSpec.command spec main diff --git a/src/commands/versionCommand.ml b/src/commands/versionCommand.ml index a357eb2bbba..0342980a17e 100644 --- a/src/commands/versionCommand.ml +++ b/src/commands/versionCommand.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,42 +9,51 @@ (* flow ast command *) (***********************************************************************) -let spec = { - CommandSpec. - name = "version"; - doc = "Print version information"; - usage = Printf.sprintf - "Usage: %s version [OPTION]... [ROOT]\n\n\ - e.g. %s version\n\ - or %s version --json\n\ - or %s version /path/to/root\n" - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name - CommandUtils.exe_name; - args = CommandSpec.ArgSpec.( - empty - |> CommandUtils.json_flags - |> CommandUtils.from_flag - |> anon "root" (optional string) - ) -} +let spec = + { + CommandSpec.name = "version"; + doc = "Print version information"; + usage = + Printf.sprintf + "Usage: %s version [OPTION]... [ROOT]\n\ne.g. %s version\nor %s version --json\nor %s version /path/to/root\n" + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name + CommandUtils.exe_name; + args = + CommandSpec.ArgSpec.( + empty + |> CommandUtils.json_flags + |> CommandUtils.from_flag + |> flag "--semver" no_arg ~doc:"Return only the version number" + |> anon "root" (optional string)); + } -let main json pretty from _root () = - FlowEventLogger.set_from from; - if json || pretty - then begin - let open Hh_json in - let json = JSON_Object [ - "semver", JSON_String Flow_version.version; - "binary", JSON_String (Sys_utils.executable_path ()); - "build_id", JSON_String Build_id.build_id_ohai; - "flow_build_id", JSON_String (Flow_build_id.get_build_id ()); - ] in - print_json_endline ~pretty json - end else begin - CommandUtils.print_version () - end; +let print_semver json pretty = + if json || pretty then + Hh_json.( + let json = JSON_Object [("semver", JSON_String Flow_version.version)] in + print_json_endline ~pretty json) + else + print_endline Flow_version.version + +let main json pretty semver _root () = + if semver then + print_semver json pretty + else if json || pretty then + Hh_json.( + let json = + JSON_Object + [ + ("semver", JSON_String Flow_version.version); + ("binary", JSON_String (Sys_utils.executable_path ())); + ("build_id", JSON_String Build_id.build_revision); + ("flow_build_id", JSON_String (Flow_build_id.get_build_id ())); + ] + in + print_json_endline ~pretty json) + else + CommandUtils.print_version (); FlowExitStatus.(exit No_error) let command = CommandSpec.command spec main diff --git a/src/common/audit/dune b/src/common/audit/dune new file mode 100644 index 00000000000..7e7ce8a8235 --- /dev/null +++ b/src/common/audit/dune @@ -0,0 +1,4 @@ +(library + (name flow_common_audit) + (wrapped false) +) diff --git a/src/common/audit/expensive.ml b/src/common/audit/expensive.ml index 5fd601f88c0..e179f45e3e9 100644 --- a/src/common/audit/expensive.ml +++ b/src/common/audit/expensive.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -22,7 +22,6 @@ tokens describing the results of manual audits of calls to the API. *) - type audit = unit (* Use this token to describe expensive calls that are deemed OK. @@ -32,7 +31,7 @@ type audit = unit time cost is OK) and their memory is reclaimed after they die (so memory cost is OK). *) -let ok: audit = () +let ok : audit = () (* Use this token to describe expensive calls that either need immediate fixing or careful monitoring of costs. @@ -43,9 +42,10 @@ let ok: audit = () pressure that might eventually cultimate in memory leaks or long / frequent GC pauses. *) -let warn: audit = () +let warn : audit = () (* Given a function `f`, `wrap f` demands an extra argument, ~audit preceding other arguments, but otherwise behaves the same way. *) type 'a t = audit:audit -> 'a + let wrap f ~audit:_ = f diff --git a/src/common/audit/expensive.mli b/src/common/audit/expensive.mli index a0ce07524e8..92b9cbcee83 100644 --- a/src/common/audit/expensive.mli +++ b/src/common/audit/expensive.mli @@ -1,13 +1,16 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type audit -val ok: audit -val warn: audit + +val ok : audit + +val warn : audit type 'a t = audit:audit -> 'a -val wrap: 'a -> 'a t + +val wrap : 'a -> 'a t diff --git a/src/common/build_id/dune b/src/common/build_id/dune new file mode 100644 index 00000000000..7f8342ad370 --- /dev/null +++ b/src/common/build_id/dune @@ -0,0 +1,8 @@ +(library + (name flow_common_build_id) + (wrapped false) + (libraries + sys_utils + xx + ) +) diff --git a/src/common/build_id/flow_build_id.ml b/src/common/build_id/flow_build_id.ml index acb67494857..5f0d62a3879 100644 --- a/src/common/build_id/flow_build_id.ml +++ b/src/common/build_id/flow_build_id.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/common/build_id/flow_build_id.mli b/src/common/build_id/flow_build_id.mli index d69ca41008f..36f8bc9d7d6 100644 --- a/src/common/build_id/flow_build_id.mli +++ b/src/common/build_id/flow_build_id.mli @@ -1,8 +1,8 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val get_build_id: unit -> string +val get_build_id : unit -> string diff --git a/src/common/diff.ml b/src/common/diff.ml deleted file mode 100644 index 20613b2892c..00000000000 --- a/src/common/diff.ml +++ /dev/null @@ -1,23 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open Sys_utils - -let diff_of_file_and_string file new_content = - let new_file = Filename.temp_file "" "" in - write_file new_file new_content; - let patch_file = Filename.temp_file "" "" in - let diff_cmd = - if Sys.win32 then - Printf.sprintf "fc %s %s > %s" - file new_file patch_file - else - Printf.sprintf "diff -u --label old --label new %s %s > %s" - file new_file patch_file in - diff_cmd - |> Sys.command |> ignore; - cat patch_file diff --git a/src/common/docblock.ml b/src/common/docblock.ml index ab0063ace71..809006d9a8f 100644 --- a/src/common/docblock.ml +++ b/src/common/docblock.ml @@ -1,22 +1,25 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - -type flow_mode = OptIn | OptInStrict | OptInStrictLocal | OptInWeak | OptOut +type flow_mode = + | OptIn + | OptInStrict + | OptInStrictLocal + | OptInWeak + | OptOut type jsx_pragma = - (** + (* * Specifies a function that should be invoked instead of React.createElement * when interpreting JSX syntax. Otherwise, the usual rules of JSX are * followed: children are varargs after a props argument. *) | Jsx_pragma of (string * (Loc.t, Loc.t) Flow_ast.Expression.t) - - (** + (* * Alternate mode for interpreting JSX syntax. The element name is treated * as a function to be directly invoked, e.g. -> Foo({}). * Children are part of props instead of a separate argument. @@ -32,81 +35,93 @@ type t = { jsx: jsx_pragma option; } -let default_info = { - flow = None; - typeAssert = false; - preventMunge = None; - providesModule = None; - isDeclarationFile = false; - jsx = None; -} +let default_info = + { + flow = None; + typeAssert = false; + preventMunge = None; + providesModule = None; + isDeclarationFile = false; + jsx = None; + } (* accessors *) let flow info = info.flow + let typeAssert info = info.typeAssert + let preventMunge info = info.preventMunge + let providesModule info = info.providesModule + let isDeclarationFile info = info.isDeclarationFile + let jsx info = info.jsx -let is_flow info = match info.flow with +let is_flow info = + match info.flow with | Some OptIn | Some OptInStrict | Some OptInStrictLocal - | Some OptInWeak -> true + | Some OptInWeak -> + true | Some OptOut - | None -> false + | None -> + false let set_flow_mode_for_ide_command info = - let flow = match flow info with - (* If the file does not specify a @flow pragma, we still want to try + let flow = + match flow info with + (* If the file does not specify a @flow pragma, we still want to try to infer something, but the file might be huge and unannotated, which can cause performance issues (including non-termination). To avoid this case, we infer the file using "weak mode." *) - | None -> OptInWeak - (* Respect @flow pragma *) - | Some OptIn -> OptIn - (* Respect @flow strict pragma *) - | Some OptInStrict -> OptInStrict - (* Respect @flow strict-local pragma *) - | Some OptInStrictLocal -> OptInStrictLocal - (* Respect @flow weak pragma *) - | Some OptInWeak -> OptInWeak - (* Respect @noflow, which `apply_docblock_overrides` does not by + | None -> OptInWeak + (* Respect @flow pragma *) + | Some OptIn -> OptIn + (* Respect @flow strict pragma *) + | Some OptInStrict -> OptInStrict + (* Respect @flow strict-local pragma *) + | Some OptInStrictLocal -> OptInStrictLocal + (* Respect @flow weak pragma *) + | Some OptInWeak -> OptInWeak + (* Respect @noflow, which `apply_docblock_overrides` does not by default. Again, large files can cause non-termination, so respecting this pragma gives programmers a way to tell Flow to avoid inference on such files. *) - | Some OptOut -> OptInWeak + | Some OptOut -> OptInWeak in { info with flow = Some flow } (* debugging *) let json_of_docblock info = - let open Hh_json in - let flow = match flow info with - | Some OptIn -> JSON_String "OptIn" - | Some OptInStrict -> JSON_String "OptInStrict" - | Some OptInStrictLocal -> JSON_String "OptInStrictLocal" - | Some OptInWeak -> JSON_String "OptInWeak" - | Some OptOut -> JSON_String "OptOut" - | None -> JSON_Null in - - let preventsMunge = match preventMunge info with - | Some b -> JSON_Bool b - | None -> JSON_Null in - - let providesModule = match providesModule info with - | Some str -> JSON_String str - | None -> JSON_Null in - - let isDeclarationFile = JSON_Bool (isDeclarationFile info) in - - let typeAssert = JSON_Bool (typeAssert info) in - - JSON_Object [ - "flow", flow; - "typeAssert", typeAssert; - "preventMunge", preventsMunge; - "providesModule", providesModule; - "isDeclarationFile", isDeclarationFile; - ] + Hh_json.( + let flow = + match flow info with + | Some OptIn -> JSON_String "OptIn" + | Some OptInStrict -> JSON_String "OptInStrict" + | Some OptInStrictLocal -> JSON_String "OptInStrictLocal" + | Some OptInWeak -> JSON_String "OptInWeak" + | Some OptOut -> JSON_String "OptOut" + | None -> JSON_Null + in + let preventsMunge = + match preventMunge info with + | Some b -> JSON_Bool b + | None -> JSON_Null + in + let providesModule = + match providesModule info with + | Some str -> JSON_String str + | None -> JSON_Null + in + let isDeclarationFile = JSON_Bool (isDeclarationFile info) in + let typeAssert = JSON_Bool (typeAssert info) in + JSON_Object + [ + ("flow", flow); + ("typeAssert", typeAssert); + ("preventMunge", preventsMunge); + ("providesModule", providesModule); + ("isDeclarationFile", isDeclarationFile); + ]) diff --git a/src/common/dune b/src/common/dune new file mode 100644 index 00000000000..98dae6cf3f3 --- /dev/null +++ b/src/common/dune @@ -0,0 +1,38 @@ +(library + (name flow_version) + (wrapped false) + (preprocess (pps ppx_deriving.eq)) ; See T41851208 + (modules flow_version)) + +(library + (name flow_common) + (wrapped false) + (modules (:standard \ flow_version flow_lsp_conversions)) + (flags -w +a-4-6-29-35-44-48-50) + (libraries + flow_common_lints + flow_common_span + flow_common_utils + flow_logging + flow_parser_utils_aloc + flow_version + logging_common ; hack + heap_ident ; hack + hh_json ; hack + sys_utils ; hack + ) + (preprocess (pps ppx_deriving.eq)) +) + +(library + (name flow_common_lsp_conversions) + (wrapped false) + (modules flow_lsp_conversions) + (libraries + flow_parser + flow_server_persistent_connection + flow_server_protocol + lsp + ) + (preprocess (pps ppx_deriving.eq)) ; See T41851208 +) diff --git a/src/common/errors/dune b/src/common/errors/dune new file mode 100644 index 00000000000..d92618a3a14 --- /dev/null +++ b/src/common/errors/dune @@ -0,0 +1,8 @@ +(library + (name flow_common_errors) + (wrapped false) + (libraries + flow_common + flow_common_lints + ) +) diff --git a/src/common/errors/error_suppressions.ml b/src/common/errors/error_suppressions.ml deleted file mode 100644 index 300a46caaa6..00000000000 --- a/src/common/errors/error_suppressions.ml +++ /dev/null @@ -1,255 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -(* This is a data structure used to track what locations are being suppressed - * and which suppressions have yet to be used. - *) - -open Severity -open Utils_js - -exception No_source of string -exception Missing_lint_settings of string - -module FileSuppressions : sig - type t - - val empty: t - - val is_empty: t -> bool - - val add: Loc.t -> t -> t - val remove: Loc.t -> t -> t - val union: t -> t -> t - val add_lint_suppression: Loc.t -> t -> t - val remove_lint_suppression: Loc.t -> t -> t - - val suppression_at_loc: Loc.t -> t -> LocSet.t option - val all_locs: t -> LocSet.t -end = struct - type error_suppressions = LocSet.t SpanMap.t - type t = { - suppressions: error_suppressions; - lint_suppressions: LocSet.t - } - - let empty = { - suppressions = SpanMap.empty; - lint_suppressions = LocSet.empty; - } - - let is_empty { suppressions; lint_suppressions } = - SpanMap.is_empty suppressions && LocSet.is_empty lint_suppressions - - let add loc { suppressions; lint_suppressions } = - let suppression_loc = Loc.( - let start = { loc.start with line = loc._end.line + 1; column = 0 } in - let _end = { loc._end with line = loc._end.line + 2; column = 0 } in - { loc with start; _end } - ) in - let suppressions = - SpanMap.add suppression_loc (LocSet.singleton loc) suppressions - ~combine:LocSet.union - in - { suppressions; lint_suppressions } - - let remove loc ({ suppressions; _ } as orig) = - { orig with suppressions = SpanMap.remove loc suppressions } - - let union a b = { - suppressions = SpanMap.union a.suppressions b.suppressions; - lint_suppressions = LocSet.union a.lint_suppressions b.lint_suppressions; - } - - let add_lint_suppression lint_suppression t = { - t with - lint_suppressions = LocSet.add lint_suppression t.lint_suppressions; - } - - let remove_lint_suppression lint_suppression ({ lint_suppressions; _} as orig) = - { orig with lint_suppressions = LocSet.remove lint_suppression lint_suppressions } - - let suppression_at_loc loc {suppressions; _} = - SpanMap.get loc suppressions - - let all_locs { suppressions; lint_suppressions } = - suppressions - |> SpanMap.values - |> List.fold_left LocSet.union lint_suppressions -end - -type t = FileSuppressions.t FilenameMap.t - -let empty = FilenameMap.empty - -let file_of_loc_unsafe loc = - match loc.Loc.source with - | Some x -> x - | None -> raise (No_source (Loc.to_string loc)) - -let add loc map = - let file = file_of_loc_unsafe loc in - let suppressions = FileSuppressions.empty |> FileSuppressions.add loc in - FilenameMap.add ~combine:FileSuppressions.union file suppressions map - -let union = - let combine _key x y = Some (FileSuppressions.union x y) in - fun a b -> Utils_js.FilenameMap.union ~combine a b - -let add_lint_suppressions lint_suppressions map = - LocSet.fold begin fun loc acc -> - let file = file_of_loc_unsafe loc in - let file_suppressions = FilenameMap.get file acc |> Option.value ~default:FileSuppressions.empty in - let file_suppressions = FileSuppressions.add_lint_suppression loc file_suppressions in - FilenameMap.add file file_suppressions acc - end lint_suppressions map - -let remove = FilenameMap.remove - -(* raises if `loc` has no filename or `severity_cover` contains no entry for `loc`'s filename *) -let lint_settings_at_loc loc severity_cover = - let file = file_of_loc_unsafe loc in - let file_cover = match FilenameMap.get file severity_cover with - | Some x -> x - | None -> raise (Missing_lint_settings (Loc.to_string loc)) - in - ExactCover.find loc file_cover - -(* raises if `loc` has no filename *) -let file_suppressions_of_loc loc suppressions_map = - let file = file_of_loc_unsafe loc in - match FilenameMap.get file suppressions_map with - | Some x -> x - | None -> FileSuppressions.empty - -(* raises if `loc` has no filename *) -let suppression_at_loc loc suppressions_map = - let file_suppressions = file_suppressions_of_loc loc suppressions_map in - FileSuppressions.suppression_at_loc loc file_suppressions - -(* raises if `loc` has no filename. - * no-op if suppressions_map does not contain an entry for that file. *) -let update_file_suppressions f loc suppressions_map = - let file = file_of_loc_unsafe loc in - match FilenameMap.get file suppressions_map with - | None -> suppressions_map - | Some file_suppressions -> - let file_suppressions = f file_suppressions in - FilenameMap.add file file_suppressions suppressions_map - -let remove_suppression_from_map loc (suppressions_map: t) = - update_file_suppressions (FileSuppressions.remove loc) loc suppressions_map - -let remove_lint_suppression_from_map loc (suppressions_map: t) = - update_file_suppressions (FileSuppressions.remove_lint_suppression loc) loc suppressions_map - -let check_loc lint_kind suppressions severity_cover - ((result, used, (unused: t), is_primary_loc) as acc) loc = - (* We only want to check the starting position of the reason *) - let loc = Loc.first_char loc in - match suppression_at_loc loc suppressions with - | Some locs -> - let used = LocSet.union locs used in - let unused = remove_suppression_from_map loc unused in - Off, used, unused, false - | None -> - (* Only respect lint settings at the primary (first) location *) - if is_primary_loc - then Option.value_map lint_kind ~default:acc ~f:(fun some_lint_kind -> - let lint_settings = lint_settings_at_loc loc severity_cover in - let state = LintSettings.get_value some_lint_kind lint_settings in - let unused = - match LintSettings.get_loc some_lint_kind lint_settings with - | Some used_suppression when state = Off -> - (* TODO: consume this lint suppression by adding to used set *) - remove_lint_suppression_from_map used_suppression unused - | _ -> unused - in - state, used, unused, false - ) - else result, used, unused, false - -(* Checks if any of the given locations should be suppressed. *) -let check_locs locs lint_kind (suppressions: t) severity_cover (unused: t) = - (* We need to check every location in order to figure out which suppressions - are really unused...that's why we don't shortcircuit as soon as we find a - matching error suppression. - If the "primary" location has severity = Off, the error should be - suppressed even if it is not explicit. *) - List.fold_left - (check_loc lint_kind suppressions severity_cover) - (Err, LocSet.empty, unused, true) - locs - -let check err (suppressions: t) severity_cover (unused: t) = - let locs = - Errors.locs_of_error err - (* It is possible for errors to contain locations without a source, but suppressions always - * exist in an actual file so there is no point checking if suppressions exist at locations - * without a source. *) - |> List.filter (fun {Loc.source; _} -> Option.is_some source) - in - (* Ignore lint errors which were never enabled in the first place. *) - let lint_kind, ignore = - match Errors.kind_of_error err with - | Errors.LintError kind -> - let severity, is_explicit = List.fold_left (fun (s, e) loc -> - let lint_settings = lint_settings_at_loc loc severity_cover in - let s' = LintSettings.get_value kind lint_settings in - let e' = LintSettings.is_explicit kind lint_settings in - (severity_min s s', e || e') - ) (Err, false) locs in - let ignore = severity = Off && not is_explicit in - Some kind, ignore - | _ -> None, false - in - if ignore then None else - let result, used, unused, _ = - check_locs locs lint_kind suppressions severity_cover unused - in - (* Ignore lints in node_modules folders (which we assume to be dependencies). *) - let is_in_dependency = - let primary_loc = Errors.loc_of_error err in - Option.value_map (Loc.source primary_loc) ~default:false ~f:(fun filename -> - String_utils.is_substring "/node_modules/" (File_key.to_string filename)) - in - let result = match Errors.kind_of_error err with - | Errors.RecursionLimitError -> - (* TODO: any related suppressions should not be considered used *) - Err - | _ -> if (is_in_dependency && (Option.is_some lint_kind)) - then Off (* TODO: this should not show up with --include-suppressed *) - else result - in Some (result, used, unused) - -(* Gets the locations of the suppression comments that are yet unused *) - -let all_locs map = - map - |> FilenameMap.values - |> List.map FileSuppressions.all_locs - |> List.fold_left LocSet.union LocSet.empty - |> LocSet.elements - -let filter_suppressed_errors suppressions severity_cover errors ~unused = - (* Filter out suppressed errors. also track which suppressions are used. *) - Errors.ErrorSet.fold (fun error ((errors, warnings, suppressed, unused) as acc) -> - match check error suppressions severity_cover unused with - | None -> acc - | Some (severity, used, unused) -> - match severity with - | Off -> errors, warnings, (error, used)::suppressed, unused - | Warn -> errors, Errors.ErrorSet.add error warnings, suppressed, unused - | Err -> Errors.ErrorSet.add error errors, warnings, suppressed, unused - ) errors (Errors.ErrorSet.empty, Errors.ErrorSet.empty, [], unused) - -let update_suppressions current_suppressions new_suppressions = - FilenameMap.fold begin fun file file_suppressions acc -> - if FileSuppressions.is_empty file_suppressions - then FilenameMap.remove file acc - else FilenameMap.add file file_suppressions acc - end new_suppressions current_suppressions diff --git a/src/common/errors/error_suppressions.mli b/src/common/errors/error_suppressions.mli deleted file mode 100644 index 6474c24fc42..00000000000 --- a/src/common/errors/error_suppressions.mli +++ /dev/null @@ -1,29 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type t - -val empty: t - -(* Raises if the given loc has `source` set to `None` *) -val add: Loc.t -> t -> t -val add_lint_suppressions: Utils_js.LocSet.t -> t -> t - -val remove: File_key.t -> t -> t - -(* Union the two collections of suppressions. If they both contain suppressions for a given file, - * include both sets of suppressions. *) -val union: t -> t -> t -(* Union the two collections of suppressions. If they both contain suppressions for a given file, - * discard those included in the first argument. *) -val update_suppressions: t -> t -> t - -val all_locs: t -> Loc.t list - -val filter_suppressed_errors : - t -> ExactCover.lint_severity_cover Utils_js.FilenameMap.t -> Errors.ErrorSet.t -> unused:t -> - (Errors.ErrorSet.t * Errors.ErrorSet.t * (Errors.error * Utils_js.LocSet.t) list * t) diff --git a/src/common/errors/errors.ml b/src/common/errors/errors.ml index 3e67a1e782d..0c059d6f237 100644 --- a/src/common/errors/errors.ml +++ b/src/common/errors/errors.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,45 +7,51 @@ open Severity +type infer_warning_kind = + | ExportKind + | OtherKind + type error_kind = - | ParseError + | ParseError (* An error produced by the parser *) + | PseudoParseError (* An error produced elsewhere but reported as a parse error *) | InferError - | InferWarning + | InferWarning of infer_warning_kind | InternalError | DuplicateProviderError | RecursionLimitError | LintError of Lints.lint_kind let string_of_kind = function -| ParseError -> "ParseError" -| InferError -> "InferError" -| InferWarning -> "InferWarning" -| InternalError -> "InternalError" -| DuplicateProviderError -> "DuplicateProviderError" -| RecursionLimitError -> "RecursionLimitError" -| LintError lint_kind -> "LintError" ^ "-" ^ Lints.string_of_kind lint_kind + | ParseError -> "ParseError" + | PseudoParseError -> "PseudoParseError" + | InferError -> "InferError" + | InferWarning _ -> "InferWarning" + | InternalError -> "InternalError" + | DuplicateProviderError -> "DuplicateProviderError" + | RecursionLimitError -> "RecursionLimitError" + | LintError lint_kind -> "LintError" ^ "-" ^ Lints.string_of_kind lint_kind (* internal rep for core info *) -type message = - | BlameM of Loc.t * string +type 'a message = + | BlameM of 'a * string | CommentM of string (* simple structure for callers to specify error message content, converted to message internally. *) -type info = Loc.t * string list +type 'a info = 'a * string list (** for extra info, enough structure to do simple tree-shaped output *) -type info_tree = - | InfoLeaf of info list - | InfoNode of info list * info_tree list +type 'a info_tree = + | InfoLeaf of 'a info list + | InfoNode of 'a info list * 'a info_tree list -type classic_error = { - messages: message list; - extra: info_tree list +type 'a classic_error = { + messages: 'a message list; + extra: 'a info_tree list; } -module LocSet = Utils_js.LocSet -module LocMap = Utils_js.LocMap +module LocSet = Loc_collections.LocSet +module LocMap = Loc_collections.LocMap (* Types and utilities for friendly errors. *) module Friendly = struct @@ -74,19 +80,25 @@ module Friendly = struct * merge errors with the same root cause into a single block. *) type 'a t' = { - loc: Loc.t; + loc: 'a; root: 'a error_root option; message: 'a error_message; } and 'a error_root = { - root_loc: Loc.t; + root_loc: 'a; root_message: 'a message; } and 'a error_message = - | Normal of { message: 'a message; frames: 'a message list option } - | Speculation of { frames: 'a message list; branches: (int * 'a t') list } + | Normal of { + message: 'a message; + frames: 'a message list option; + } + | Speculation of { + frames: 'a message list; + branches: (int * 'a t') list; + } and 'a message = 'a message_feature list @@ -109,7 +121,22 @@ module Friendly = struct group_message_list: 'a message_group list; } - type t = Loc.t t' + type t = ALoc.t t' + + type docs = { + call: string; + tuplemap: string; + objmap: string; + objmapi: string; + } + + let docs = + { + call = "https://flow.org/en/docs/types/utilities/#toc-call"; + tuplemap = "https://flow.org/en/docs/types/utilities/#toc-tuplemap"; + objmap = "https://flow.org/en/docs/types/utilities/#toc-objmap"; + objmapi = "https://flow.org/en/docs/types/utilities/#toc-objmapi"; + } (* This function was introduced into the OCaml standard library in 4.04.0. Not * all of our tooling supports 4.04.0 yet, so we have a small @@ -118,9 +145,8 @@ module Friendly = struct let rec loop i c s = try let k = String.index_from s i c in - (String.sub s i (k - i))::(loop (k + 1) c s) - with - Not_found -> [String.sub s i (String.length s - i)] + String.sub s i (k - i) :: loop (k + 1) c s + with Not_found -> [String.sub s i (String.length s - i)] in loop 0 c s @@ -129,26 +155,28 @@ module Friendly = struct * * The inverse of string_of_message_inlines. *) let message_inlines_of_string s = - List.mapi (fun i s -> - if i mod 2 = 0 - then Text s - else Code s - ) (split_on_char '`' s) + Core_list.mapi + ~f:(fun i s -> + if i mod 2 = 0 then + Text s + else + Code s) + (split_on_char '`' s) (* Converts a string into a message. e.g.: * "hello `world`" becomes a message where "world" is styled as inline code. *) - let message_of_string s = - [Inline (message_inlines_of_string s)] + let message_of_string s = [Inline (message_inlines_of_string s)] (* Converts a message_inline list into a string. e.g.: * [Text "hello "; Code "world"] becomes: "hello `world`" * * The inverse of message_inlines_of_string. *) let string_of_message_inlines = - List.fold_left (fun message -> function - | Text text -> message ^ text - | Code text -> message ^ "`" ^ text ^ "`" - ) "" + List.fold_left + (fun message -> function + | Text text -> message ^ text + | Code text -> message ^ "`" ^ text ^ "`") + "" (* Convenience functions for constructing friendly error messages. e.g. * @@ -157,47 +185,44 @@ module Friendly = struct * Is an example of an incompatibility error message. *) let text s = Inline [Text s] + let code s = Inline [Code s] - let ref ?(loc=true) r = + let ref ?(loc = true) r = let desc = desc_of_reason ~unwrap:(is_scalar_reason r) r in - let desc = match desc with - | RCode code -> [Code code] - | _ -> message_inlines_of_string (string_of_desc desc) + let desc = + match desc with + | RCode code -> [Code code] + | _ -> message_inlines_of_string (string_of_desc desc) in - if loc then ( - let loc = match annot_loc_of_reason r with - | Some loc -> loc - | None -> def_loc_of_reason r + if loc then + let loc = + match annot_loc_of_reason r with + | Some loc -> loc + | None -> def_loc_of_reason r in if loc = Loc.none then Inline desc else Reference (desc, loc) - ) else ( + else Inline desc - ) - - (* Intersperses a given separator in between each element of a list. *) - let rec intersperse sep = function - | [] -> [] - | x :: [] -> x :: [] - | x1 :: x2 :: xs -> x1 :: sep :: intersperse sep (x2 :: xs) (* Concatenates a list of messages with a conjunction according to the "rules" * of the English language. *) - let conjunction_concat ?(conjunction="and") = function - | [] -> [] - | x::[] -> x - | x1::x2::[] -> x1 @ [Inline [Text (" " ^ conjunction ^ " ")]] @ x2 - | xs -> - let rec loop = function - | [] - | _::[] -> failwith "unreachable" - | x1::x2::[] -> x1 @ [Inline [Text (", " ^ conjunction ^ " ")]] @ x2 - | x::xs -> x @ [Inline [Text ", "]] @ loop xs - in - loop xs + let conjunction_concat ?(conjunction = "and") = function + | [] -> [] + | [x] -> x + | [x1; x2] -> x1 @ [Inline [Text (" " ^ conjunction ^ " ")]] @ x2 + | xs -> + let rec loop = function + | [] + | [_] -> + failwith "unreachable" + | [x1; x2] -> x1 @ [Inline [Text (", " ^ conjunction ^ " ")]] @ x2 + | x :: xs -> x @ [Inline [Text ", "]] @ loop xs + in + loop xs (* Flattens out the Inline and Text constructors in an error message. Helpful * for hiding implementation details in our JSON output. *) @@ -205,14 +230,13 @@ module Friendly = struct let rec loop_inlines inlines = match inlines with | [] -> [] - | ((Code _) as inline) :: inlines -> inline :: loop_inlines inlines - | ((Text text) as inline) :: inlines -> + | (Code _ as inline) :: inlines -> inline :: loop_inlines inlines + | (Text text as inline) :: inlines -> let inlines = loop_inlines inlines in (match inlines with | [] -> [inline] - | (Text text') :: inlines -> Text (text ^ text') :: inlines - | inlines -> inline :: inlines - ) + | Text text' :: inlines -> Text (text ^ text') :: inlines + | inlines -> inline :: inlines) in let rec loop features = match features with @@ -223,16 +247,14 @@ module Friendly = struct let feature = Reference (inlines, loc) in (match features with | [] -> [feature] - | Inline inlines' :: features -> feature :: (Inline (loop_inlines inlines')) :: features - | features -> feature :: features - ) - | ((Inline inlines) as feature) :: features -> + | Inline inlines' :: features -> feature :: Inline (loop_inlines inlines') :: features + | features -> feature :: features) + | (Inline inlines as feature) :: features -> let features = loop features in (match features with | [] -> [feature] - | (Inline inlines') :: features -> Inline (inlines @ inlines') :: features - | features -> feature :: features - ) + | Inline inlines' :: features -> Inline (inlines @ inlines') :: features + | features -> feature :: features) in fun features -> let features = loop features in @@ -244,82 +266,73 @@ module Friendly = struct (* Capitalizes the first letter in the message. Does not capitalize code or * text in references. *) let capitalize = function - | [] -> [] - | (Inline ((Text s)::xs))::message -> (Inline ((Text (String.capitalize_ascii s))::xs))::message - | message -> message + | [] -> [] + | Inline (Text s :: xs) :: message -> + Inline (Text (String.capitalize_ascii s) :: xs) :: message + | message -> message (* Uncapitalizes the first letter in the message. Does not uncapitalize code * or text in references. *) let uncapitalize = function - | [] -> [] - | (Inline ((Text s)::xs))::message -> (Inline ((Text (String.uncapitalize_ascii s))::xs))::message - | message -> message + | [] -> [] + | Inline (Text s :: xs) :: message -> + Inline (Text (String.uncapitalize_ascii s) :: xs) :: message + | message -> message (* Adds some message to the beginning of a group message. *) - let append_group_message message { - group_message; - group_message_list; - } = { - group_message = message @ (text " " :: uncapitalize group_message); - group_message_list; - } + let append_group_message message { group_message; group_message_list } = + { group_message = message @ (text " " :: uncapitalize group_message); group_message_list } (* Creates a message group from the error_message type. If show_all_branches * is false then we will hide speculation branches with a lower score. If any * speculation branches are hidden then the boolean we return will be true. *) let message_group_of_error = let message_of_frames frames acc_frames = - let frames = List.concat (List.rev (frames :: acc_frames)) in - List.concat (intersperse [text " of "] (List.rev frames)) + let frames = Core_list.concat (List.rev (frames :: acc_frames)) in + Core_list.concat (Core_list.intersperse (List.rev frames) [text " of "]) in let rec flatten_speculation_branches - ~show_all_branches - ~hidden_branches - ~high_score - acc_frames - acc - = - function - | [] -> (hidden_branches, high_score, acc) - | (score, error) :: branches -> ( - match error.message with - (* If we have a speculation error with no frames and no root then we want - * to flatten the branches of that error. - * - * We ignore the score for these errors. Instead propagating the - * high_score we already have. *) - | Speculation { branches = nested_branches; frames } when Option.is_none error.root -> - (* We don't perform tail-call recursion here, but it's unlikely that - * speculations will be so deeply nested that we blow the stack. *) - let (hidden_branches, high_score, acc) = + ~show_all_branches ~hidden_branches ~high_score acc_frames acc = function + | [] -> (hidden_branches, high_score, acc) + | (score, error) :: branches -> + (match error.message with + (* If we have a speculation error with no frames and no root then we want + * to flatten the branches of that error. + * + * We ignore the score for these errors. Instead propagating the + * high_score we already have. *) + | Speculation { branches = nested_branches; frames } when Option.is_none error.root -> + (* We don't perform tail-call recursion here, but it's unlikely that + * speculations will be so deeply nested that we blow the stack. *) + let (hidden_branches, high_score, acc) = + flatten_speculation_branches + ~show_all_branches + ~hidden_branches + ~high_score + (frames :: acc_frames) + acc + nested_branches + in + (* Resume recursion in our branches list. *) flatten_speculation_branches ~show_all_branches ~hidden_branches ~high_score - (frames :: acc_frames) + acc_frames acc - nested_branches - in - (* Resume recursion in our branches list. *) - flatten_speculation_branches - ~show_all_branches - ~hidden_branches - ~high_score - acc_frames - acc - branches - - (* We add every other error if it has an appropriate score. *) - | _ -> - let (high_score, hidden_branches, acc) = ( - if show_all_branches then ( - (* If we are configured to show all branches then always add our - * error to acc. *) - (high_score, hidden_branches, (acc_frames, error) :: acc) - ) else ( - (* If this message has a better score then throw away all old - * messages. We are now hiding some messages. *) - if score > high_score then + branches + (* We add every other error if it has an appropriate score. *) + | _ -> + let (high_score, hidden_branches, acc) = + if show_all_branches then + (* If we are configured to show all branches then always add our + * error to acc. *) + (high_score, hidden_branches, (acc_frames, error) :: acc) + else if + (* If this message has a better score then throw away all old + * messages. We are now hiding some messages. *) + score > high_score + then (score, hidden_branches || acc <> [], [(acc_frames, error)]) (* If this message has the same score as our high score then add * it to acc and keep our high score. *) @@ -329,58 +342,50 @@ module Friendly = struct * the error. We are now hiding at least one message. *) else (high_score, true, acc) - ) - ) in - (* Recurse... *) - flatten_speculation_branches - ~show_all_branches - ~hidden_branches - ~high_score - acc_frames - acc - branches - ) in - let rec loop - ~show_root - ~show_all_branches - ~hidden_branches - acc_frames - error - = + in + (* Recurse... *) + flatten_speculation_branches + ~show_all_branches + ~hidden_branches + ~high_score + acc_frames + acc + branches) + in + let rec loop ~show_root ~show_all_branches ~hidden_branches acc_frames error = match error.message with (* Create normal error messages. *) | Normal { message; frames = Some frames } -> (* Add the frames to our error message. *) let frames = message_of_frames frames acc_frames in - let message = ( - if frames = [] then ( + let message = + if frames = [] then message - ) else ( + else message @ (text " in " :: frames) - ) - ) in + in (* Add the root to our error message when we are configured to show * the root. *) - let message = match error.root with - | Some { root_message; _ } when show_root -> - root_message @ (text " " :: message) - | _ -> - message + let message = + match error.root with + | Some { root_message; _ } when show_root -> root_message @ (text " " :: message) + | _ -> message in (* Finish our error message with a period. But only if frames * is empty! *) let message = message @ [text "."] in (* Get the primary location. It is the root location if error.loc is * outside of the root location. *) - let primary_loc = match error.root with - | None -> error.loc - | Some { root_loc; _ } -> - if Loc.contains root_loc error.loc then error.loc else root_loc + let primary_loc = + match error.root with + | None -> error.loc + | Some { root_loc; _ } -> + if Loc.contains root_loc error.loc then + error.loc + else + root_loc in - (hidden_branches, primary_loc, { - group_message = message; - group_message_list = []; - }) + (hidden_branches, primary_loc, { group_message = message; group_message_list = [] }) (* If there are no frames then we only want to add the root message (if we * have one) and return. We can safely ignore acc_frames. If a message has * frames set to None then the message is not equipped to handle @@ -388,21 +393,17 @@ module Friendly = struct | Normal { message; frames = None } -> (* Add the root to our error message when we are configured to show * the root. *) - let message = match error.root with - | Some { root_message; _ } when show_root -> - root_message @ (text " " :: message) - | _ -> - message + let message = + match error.root with + | Some { root_message; _ } when show_root -> root_message @ (text " " :: message) + | _ -> message in - (hidden_branches, error.loc, { - group_message = message; - group_message_list = []; - }) + (hidden_branches, error.loc, { group_message = message; group_message_list = [] }) (* When we have a speculation error, do some work to create a message * group. Flatten out nested speculation errors with no frames. Hide * frames with low scores. Use a single message_group if we hide all but * one branches. *) - | Speculation { frames; branches } -> ( + | Speculation { frames; branches } -> (* Loop through our speculation branches. We will flatten out relevant * union branches and hide branches with a low score in this loop. *) let (hidden_branches, _, speculation_errors_rev) = @@ -414,11 +415,11 @@ module Friendly = struct [] branches in - match speculation_errors_rev with + (match speculation_errors_rev with (* When there is only one branch in acc (we had one branch with a * "high score") and this error does not have a root then loop while * adding the frames from this speculation error message. *) - | (acc_frames', speculation_error) :: [] when Option.is_none speculation_error.root -> + | [(acc_frames', speculation_error)] when Option.is_none speculation_error.root -> loop ~show_root ~show_all_branches @@ -440,17 +441,22 @@ module Friendly = struct else (* Otherwise create a message with our frames and optionally the * error message root. *) - let message = if frames = [] then [] else text "in " :: frames in + let message = + if frames = [] then + [] + else + text "in " :: frames + in (* Add the root to our error message when we are configured to * show the root. *) - let message = match error.root with - | Some { root_message; _ } when show_root -> - if message = [] then - root_message - else - root_message @ (text " " :: message) - | _ -> - message + let message = + match error.root with + | Some { root_message; _ } when show_root -> + if message = [] then + root_message + else + root_message @ (text " " :: message) + | _ -> message in (* Finish our error message with a colon. *) let message = message @ [text ":"] in @@ -458,89 +464,92 @@ module Friendly = struct in (* Get the message group for all of our speculation errors. *) let (hidden_branches, group_message_list) = - List.fold_left (fun (hidden_branches, group_message_list) (acc_frames', error) -> - let (hidden_branches, _, message_group) = - loop - ~show_root:true - ~show_all_branches - ~hidden_branches - acc_frames' - error - in - (hidden_branches, message_group :: group_message_list) - ) (hidden_branches, []) (List.rev speculation_errors_rev) + List.fold_left + (fun (hidden_branches, group_message_list) (acc_frames', error) -> + let (hidden_branches, _, message_group) = + loop ~show_root:true ~show_all_branches ~hidden_branches acc_frames' error + in + (hidden_branches, message_group :: group_message_list)) + (hidden_branches, []) + (List.rev speculation_errors_rev) in - (hidden_branches, error.loc, { - group_message = message; - group_message_list = List.mapi (fun i message_group -> - append_group_message - (if i = 0 then [text "Either"] else [text "Or"]) - message_group - ) group_message_list; - }) - ) + ( hidden_branches, + error.loc, + { + group_message = message; + group_message_list = + Core_list.mapi + ~f:(fun i message_group -> + append_group_message + ( if i = 0 then + [text "Either"] + else + [text "Or"] ) + message_group) + group_message_list; + } )) in (* Partially apply loop with the state it needs. Have fun! *) loop ~hidden_branches:false [] - let extract_references_message_intermediate - ~next_id ~loc_to_id ~id_to_loc ~message = + let extract_references_message_intermediate ~next_id ~loc_to_id ~id_to_loc ~message = let (next_id, loc_to_id, id_to_loc, message) = - List.fold_left (fun (next_id, loc_to_id, id_to_loc, message) message_feature -> - match message_feature with - | Inline inlines -> - (next_id, loc_to_id, id_to_loc, Inline inlines :: message) - | Reference (inlines, loc) -> - (match LocMap.get loc loc_to_id with - | Some id -> - (next_id, loc_to_id, id_to_loc, Reference (inlines, id) :: message) - | None -> - let id = next_id in - let loc_to_id = LocMap.add loc id loc_to_id in - let id_to_loc = IMap.add id loc id_to_loc in - (next_id + 1, loc_to_id, id_to_loc, Reference (inlines, id) :: message) - ) - ) (next_id, loc_to_id, id_to_loc, []) message + List.fold_left + (fun (next_id, loc_to_id, id_to_loc, message) message_feature -> + match message_feature with + | Inline inlines -> (next_id, loc_to_id, id_to_loc, Inline inlines :: message) + | Reference (inlines, loc) -> + (match LocMap.get loc loc_to_id with + | Some id -> (next_id, loc_to_id, id_to_loc, Reference (inlines, id) :: message) + | None -> + let id = next_id in + let loc_to_id = LocMap.add loc id loc_to_id in + let id_to_loc = IMap.add id loc id_to_loc in + (next_id + 1, loc_to_id, id_to_loc, Reference (inlines, id) :: message))) + (next_id, loc_to_id, id_to_loc, []) + message in (next_id, loc_to_id, id_to_loc, List.rev message) (* The intermediate fold extract_references uses. Returns both a loc_to_id map * and an id_to_loc map. These maps are the inverses of one another. Also * returns a transformed message. *) - let rec extract_references_intermediate - ~next_id ~loc_to_id ~id_to_loc ~message_group = + let rec extract_references_intermediate ~next_id ~loc_to_id ~id_to_loc ~message_group = let (next_id, loc_to_id, id_to_loc, group_message) = extract_references_message_intermediate - ~next_id ~loc_to_id ~id_to_loc - ~message:(message_group.group_message) + ~next_id + ~loc_to_id + ~id_to_loc + ~message:message_group.group_message in let (next_id, loc_to_id, id_to_loc, group_message_list_rev) = - List.fold_left (fun (next_id, loc_to_id, id_to_loc, group_message_list_rev) message_group -> - let (next_id, loc_to_id, id_to_loc, message_group) = - extract_references_intermediate - ~next_id ~loc_to_id ~id_to_loc ~message_group - in - (next_id, loc_to_id, id_to_loc, message_group :: group_message_list_rev) - ) (next_id, loc_to_id, id_to_loc, []) message_group.group_message_list + List.fold_left + (fun (next_id, loc_to_id, id_to_loc, group_message_list_rev) message_group -> + let (next_id, loc_to_id, id_to_loc, message_group) = + extract_references_intermediate ~next_id ~loc_to_id ~id_to_loc ~message_group + in + (next_id, loc_to_id, id_to_loc, message_group :: group_message_list_rev)) + (next_id, loc_to_id, id_to_loc, []) + message_group.group_message_list in - (next_id, loc_to_id, id_to_loc, { - group_message; - group_message_list = List.rev group_message_list_rev; - }) + ( next_id, + loc_to_id, + id_to_loc, + { group_message; group_message_list = List.rev group_message_list_rev } ) (* Extracts common location references from a message. In order, each location * will be replaced with an integer reference starting at 1. If some reference * has the same location as another then they will share an id. *) - let extract_references : Loc.t message_group -> (Loc.t IMap.t * int message_group) = - fun message_group -> - let (_, _, id_to_loc, message) = - extract_references_intermediate - ~next_id:1 - ~loc_to_id:LocMap.empty - ~id_to_loc:IMap.empty - ~message_group - in - (id_to_loc, message) + let extract_references : Loc.t message_group -> Loc.t IMap.t * int message_group = + fun message_group -> + let (_, _, id_to_loc, message) = + extract_references_intermediate + ~next_id:1 + ~loc_to_id:LocMap.empty + ~id_to_loc:IMap.empty + ~message_group + in + (id_to_loc, message) (* Turns a group_message back into a message. We do this by adding all the * messages together. We don't insert newlines. This is a suboptimal @@ -551,12 +560,13 @@ module Friendly = struct in fun message_group -> let acc = loop [] message_group in - List.concat (intersperse [text " "] (List.rev acc)) + Core_list.concat (Core_list.intersperse (List.rev acc) [text " "]) (* Converts our friendly error to a classic error message. *) let to_classic error = let (_, loc, message) = - message_group_of_error ~show_all_branches:false ~show_root:true error in + message_group_of_error ~show_all_branches:false ~show_root:true error + in (* Extract the references from the message. *) let (references, message) = extract_references message in (* We use a basic strategy that concatenates all group messages together. @@ -564,119 +574,121 @@ module Friendly = struct * the classic format. *) let message = message_of_group_message message in (* Turn the message into a string. *) - let message = List.fold_left (fun message -> function - | Inline inlines -> - message ^ string_of_message_inlines inlines - | Reference (inlines, id) -> - message ^ string_of_message_inlines inlines ^ " [" ^ string_of_int id ^ "]" - ) "" message in + let message = + List.fold_left + (fun message -> function + | Inline inlines -> message ^ string_of_message_inlines inlines + | Reference (inlines, id) -> + message ^ string_of_message_inlines inlines ^ " [" ^ string_of_int id ^ "]") + "" + message + in { messages = [BlameM (loc, message)]; - extra = ( - if not (IMap.is_empty references) then - (InfoLeaf [(Loc.none, ["References:"])]):: - (references - |> IMap.bindings - |> List.map (fun (id, loc) -> - InfoLeaf [(loc, ["[" ^ string_of_int id ^ "]"])])) + extra = + ( if not (IMap.is_empty references) then + InfoLeaf [(Loc.none, ["References:"])] + :: ( references + |> IMap.bindings + |> Core_list.map ~f:(fun (id, loc) -> + InfoLeaf [(loc, ["[" ^ string_of_int id ^ "]"])]) ) else - [] - ); + [] ); } end -type error = error_kind * message list * Friendly.t - -let is_duplicate_provider_error (kind, _, _) = kind = DuplicateProviderError +type 'loc printable_error = error_kind * 'loc message list * 'loc Friendly.t' let info_to_messages = function -| loc, [] -> [BlameM (loc, "")] -| loc, msg :: msgs -> - BlameM (loc, msg) :: - (msgs |> List.map (fun msg -> CommentM msg)) + | (loc, []) -> [BlameM (loc, "")] + | (loc, msg :: msgs) -> BlameM (loc, msg) :: (msgs |> Core_list.map ~f:(fun msg -> CommentM msg)) -let infos_to_messages infos = - List.concat (List.map info_to_messages infos) +let infos_to_messages infos = Core_list.(infos >>= info_to_messages) let mk_error - ?(kind=InferError) - ?trace_infos - ?root - ?frames - loc - message -= - let open Friendly in - let trace = Option.value_map trace_infos ~default:[] ~f:infos_to_messages in - let message = match kind with - | LintError kind -> message @ [text " ("; code (Lints.string_of_kind kind); text ")"] - | _ -> message - in - (kind, trace, { - loc = ALoc.to_loc loc; - root = Option.map root (fun (root_loc, root_message) -> { root_loc; root_message }); - message = Normal { message; frames }; - }) - -let mk_speculation_error - ?(kind=InferError) - ?trace_infos - ~loc - ~root - ~frames - ~speculation_errors -= - let open Friendly in - let trace = Option.value_map trace_infos ~default:[] ~f:infos_to_messages in - let branches = List.map (fun (score, (_, _, error)) -> - (score, error) - ) speculation_errors in - (kind, trace, { - loc; - root = Option.map root (fun (root_loc, root_message) -> { root_loc; root_message }); - message = Speculation { frames; branches }; - }) + ?(kind = InferError) + ?(trace_infos : 'loc info list option) + ?(root : ('loc * 'loc Friendly.message) option) + ?(frames : 'loc Friendly.message list option) + (loc : 'loc) + (message : 'loc Friendly.message) : 'loc printable_error = + Friendly.( + let trace = Option.value_map trace_infos ~default:[] ~f:infos_to_messages in + let message = + match kind with + | LintError kind -> message @ [text " ("; code (Lints.string_of_kind kind); text ")"] + | _ -> message + in + ( kind, + trace, + { + loc; + root = Option.map root (fun (root_loc, root_message) -> { root_loc; root_message }); + message = Normal { message; frames }; + } )) + +let mk_speculation_error ?(kind = InferError) ?trace_infos ~loc ~root ~frames ~speculation_errors = + Friendly.( + let trace = Option.value_map trace_infos ~default:[] ~f:infos_to_messages in + let branches = + Core_list.map ~f:(fun (score, (_, _, error)) -> (score, error)) speculation_errors + |> ListUtils.dedup + in + ( kind, + trace, + { + loc; + root = Option.map root (fun (root_loc, root_message) -> { root_loc; root_message }); + message = Speculation { frames; branches }; + } )) (*******************************) let to_pp = function - | BlameM (loc, s) -> loc, s - | CommentM s -> Loc.none, s + | BlameM (loc, s) -> (loc, s) + | CommentM s -> (Loc.none, s) type stdin_file = (Path.t * string) option let append_trace_reasons message_list trace_reasons = match trace_reasons with | [] -> message_list - | _ -> - message_list @ (BlameM (Loc.none, "Trace:") :: trace_reasons) - + | _ -> message_list @ (BlameM (Loc.none, "Trace:") :: trace_reasons) let default_style text = (Tty.Normal Tty.Default, text) + let source_fragment_style text = (Tty.Normal Tty.Default, text) + let error_fragment_style text = (Tty.Normal Tty.Red, text) + let warning_fragment_style text = (Tty.Normal Tty.Yellow, text) + let line_number_style text = (Tty.Bold Tty.Default, text) + let comment_style text = (Tty.Bold Tty.Default, text) + let comment_file_style text = (Tty.BoldUnderline Tty.Default, text) -let dim_style text = (Tty.Dim Tty.Default, text) +let dim_style text = (Tty.Dim Tty.Default, text) let lib_prefix = "[LIB] " + let is_short_lib filename = let len = String.length lib_prefix in String.length filename > len && String.sub filename 0 len = lib_prefix let relative_path ~strip_root filename = - if is_short_lib filename || Filename.is_relative filename - then filename - else match strip_root with - | Some root -> Files.relative_path (Path.to_string root) filename - | None -> - let relname = Files.relative_path (Sys.getcwd ()) filename in - if String.length relname < String.length filename - then relname - else filename + if is_short_lib filename || Filename.is_relative filename then + filename + else + match strip_root with + | Some root -> Files.relative_path (Path.to_string root) filename + | None -> + let relname = Files.relative_path (Sys.getcwd ()) filename in + if String.length relname < String.length filename then + relname + else + filename let relative_lib_path ~strip_root filename = let sep = Filename.dir_sep in @@ -692,12 +704,8 @@ let relative_lib_path ~strip_root filename = let highlight_error_in_line ~severity_style line c0 c1 = let prefix = String.sub line 0 c0 in let fragment = String.sub line c0 (c1 - c0) in - let suffix = String.sub line c1 ((String.length line) - c1) in - [ - source_fragment_style prefix; - severity_style fragment; - source_fragment_style suffix; - ] + let suffix = String.sub line c1 (String.length line - c1) in + [source_fragment_style prefix; severity_style fragment; source_fragment_style suffix] (* 0-indexed * @@ -705,304 +713,348 @@ let highlight_error_in_line ~severity_style line c0 c1 = * will not be returned. *) let get_lines ~start ~len content = let rec loop ~start ~len ~acc ~pos content = - if len = 0 || pos > String.length content - then List.rev acc - else begin + if len = 0 || pos > String.length content then + List.rev acc + else let next_newline = - try String.index_from content pos '\n' - with Not_found -> String.length content + (try String.index_from content pos '\n' with Not_found -> String.length content) in let continue = if start < 0 then raise (Invalid_argument "can't choose negative line") else if start = 0 then - let acc = (String.sub content pos (next_newline - pos))::acc in + let acc = String.sub content pos (next_newline - pos) :: acc in loop ~start ~len:(len - 1) ~acc else loop ~start:(start - 1) ~len ~acc - in - - continue ~pos:(next_newline + 1) content - end + in + continue ~pos:(next_newline + 1) content in loop ~start ~len ~acc:[] ~pos:0 content +let read_file filename = + if Filename.is_relative filename then + failwith (Utils_js.spf "Expected absolute location, got %s" filename); + Sys_utils.cat_or_failed filename + +let get_offset_table_expensive ~stdin_file loc = + Option.( + Utils_js.( + let content = + let path = Loc.source loc >>= File_key.to_path %> Core_result.ok in + match stdin_file with + | Some (stdin_path, contents) when path = Some (Path.to_string stdin_path) -> Some contents + | _ -> path >>= read_file + in + content >>| Offset_utils.make)) + let read_lines_in_file loc filename stdin_file = match filename with - | None -> - None + | None -> None | Some filename -> - try begin - let content = match stdin_file with - | Some (stdin_filename, content) - when Path.to_string stdin_filename = filename -> - content - | _ -> - if Filename.is_relative filename then failwith - (Utils_js.spf "Expected absolute location, got %s" filename); - Sys_utils.cat filename - in - try - let open Loc in - let lines = get_lines - ~start:(loc.start.line - 1) - ~len:(loc._end.line - loc.start.line + 1) - content in - match lines with - | [] -> None - | first::rest -> Some (first, rest) - with Invalid_argument _ -> None - end with Sys_error _ -> None + let content_opt = + match stdin_file with + | Some (stdin_filename, content) when Path.to_string stdin_filename = filename -> + Some content + | _ -> read_file filename + in + (match content_opt with + | None -> None + | Some content -> + (try + Loc.( + let lines = + get_lines + ~start:(loc.start.line - 1) + ~len:(loc._end.line - loc.start.line + 1) + content + in + match lines with + | [] -> None + | first :: rest -> Some (first, rest)) + with Invalid_argument _ -> None)) let file_of_source source = match source with - | Some File_key.LibFile filename -> - let filename = - if is_short_lib filename - then begin - let prefix_len = String.length lib_prefix in - String.sub filename prefix_len (String.length filename - prefix_len) - end else filename in - Some filename - | Some File_key.SourceFile filename - | Some File_key.JsonFile filename - | Some File_key.ResourceFile filename -> - Some filename - | Some File_key.Builtins -> None - | None -> None - -let loc_of_error ((_, _, { Friendly.loc; _ }): error) = - loc - -let loc_of_error_for_compare ((_, _, err): error) = - let open Friendly in - match err with - | { root=Some {root_loc; _}; _ } -> root_loc - | { loc; _ } -> loc - -let locs_of_error = - let locs_of_message locs message = Friendly.( - List.fold_left (fun locs feature -> - match feature with - | Inline _ -> locs - | Reference (_, loc) -> loc :: locs - ) locs message - ) in - - let rec locs_of_friendly_error locs error = Friendly.( - let { loc; root; message } = error in - let locs = - Option.value_map root - ~default:locs - ~f:(fun { root_message; root_loc } -> - root_loc :: locs_of_message locs root_message) - in - let locs = match message with - | Normal { frames; message } -> - let locs = Option.value_map frames ~default:locs ~f:(List.fold_left locs_of_message locs) in - let locs = locs_of_message locs message in - locs - | Speculation { frames; branches } -> - let locs = List.fold_left locs_of_message locs frames in - let locs = List.fold_left (fun locs (_, error) -> - locs_of_friendly_error locs error) locs branches in - locs + | Some (File_key.LibFile filename) -> + let filename = + if is_short_lib filename then + let prefix_len = String.length lib_prefix in + String.sub filename prefix_len (String.length filename - prefix_len) + else + filename in - let locs = loc :: locs in - locs - ) in - - fun ((_, _, error): error) -> - locs_of_friendly_error [] error + Some filename + | Some (File_key.SourceFile filename) + | Some (File_key.JsonFile filename) + | Some (File_key.ResourceFile filename) -> + Some filename + | Some File_key.Builtins -> None + | None -> None + +let loc_of_printable_error ((_, _, { Friendly.loc; _ }) : 'loc printable_error) = loc + +let loc_of_printable_error_for_compare ((_, _, err) : 'a printable_error) = + Friendly.( + match err with + | { root = Some { root_loc; _ }; _ } -> root_loc + | { loc; _ } -> loc) + +let locs_of_printable_error = + let locs_of_message locs message = + Friendly.( + List.fold_left + (fun locs feature -> + match feature with + | Inline _ -> locs + | Reference (_, loc) -> loc :: locs) + locs + message) + in + let rec locs_of_friendly_error locs error = + Friendly.( + let { loc; root; message } = error in + let locs = + Option.value_map root ~default:locs ~f:(fun { root_message; root_loc } -> + root_loc :: locs_of_message locs root_message) + in + let locs = + match message with + | Normal { frames; message } -> + let locs = + Option.value_map frames ~default:locs ~f:(List.fold_left locs_of_message locs) + in + let locs = locs_of_message locs message in + locs + | Speculation { frames; branches } -> + let locs = List.fold_left locs_of_message locs frames in + let locs = + List.fold_left (fun locs (_, error) -> locs_of_friendly_error locs error) locs branches + in + locs + in + let locs = loc :: locs in + locs) + in + (fun ((_, _, error) : 'loc printable_error) -> locs_of_friendly_error [] error) -let kind_of_error (kind, _, _) = kind +let kind_of_printable_error (kind, _, _) = kind (* TODO: deprecate this in favor of Reason.json_of_loc *) -let deprecated_json_props_of_loc ~strip_root loc = Loc.( - let file = match loc.source with - | Some x -> Hh_json.JSON_String (Reason.string_of_source ~strip_root x) - | None -> Hh_json.JSON_String "" (* TODO: return Hh_json.JSON_Null *) - in - [ "path", file; - "line", Hh_json.int_ loc.start.line; - "endline", Hh_json.int_ loc._end.line; - "start", Hh_json.int_ (loc.start.column + 1); - "end", Hh_json.int_ loc._end.column ] -) +let deprecated_json_props_of_loc ~strip_root loc = + Loc.( + let file = + match loc.source with + | Some x -> Hh_json.JSON_String (Reason.string_of_source ~strip_root x) + | None -> Hh_json.JSON_String "" + (* TODO: return Hh_json.JSON_Null *) + in + [ + ("path", file); + ("line", Hh_json.int_ loc.start.line); + ("endline", Hh_json.int_ loc._end.line); + ("start", Hh_json.int_ (loc.start.column + 1)); + ("end", Hh_json.int_ loc._end.column); + ]) (* first reason's position, then second reason's position, etc.; if all positions match then first message, then second message, etc. for friendly errors check the location, docs slug, and then message. *) -let rec compare = +let rec compare compare_loc = let kind_cmp = (* show internal errors first, then duplicate provider errors, then parse errors, then recursion limit errors. then both infer warnings and errors at the same priority. then lint errors *) let order_of_kind = function - | InternalError -> 1 - | DuplicateProviderError -> 2 - | ParseError -> 3 - | RecursionLimitError -> 4 - | InferError -> 5 - | InferWarning -> 5 - | LintError _ -> 6 + | InternalError -> 1 + | DuplicateProviderError -> 2 + | ParseError -> 3 + | PseudoParseError -> 3 + | RecursionLimitError -> 4 + | InferError -> 5 + | InferWarning _ -> 5 + | LintError _ -> 6 in - fun k1 k2 -> (order_of_kind k1) - (order_of_kind k2) + (fun k1 k2 -> order_of_kind k1 - order_of_kind k2) in let rec compare_lists f list1 list2 = - match list1, list2 with - | [], [] -> 0 - | [], _ -> -1 - | _, [] -> 1 - | hd1::tl1, hd2::tl2 -> - let k = f hd1 hd2 in - if k = 0 then compare_lists f tl1 tl2 else k + match (list1, list2) with + | ([], []) -> 0 + | ([], _) -> -1 + | (_, []) -> 1 + | (hd1 :: tl1, hd2 :: tl2) -> + let k = f hd1 hd2 in + if k = 0 then + compare_lists f tl1 tl2 + else + k in let compare_option f o1 o2 = - match o1, o2 with - | Some x1, Some x2 -> f x1 x2 - | Some _, None -> 1 - | None, Some _ -> -1 - | None, None -> 0 + match (o1, o2) with + | (Some x1, Some x2) -> f x1 x2 + | (Some _, None) -> 1 + | (None, Some _) -> -1 + | (None, None) -> 0 in let compare_message_inline m1 m2 = - let open Friendly in - match m1, m2 with - | Text s1, Text s2 -> String.compare s1 s2 - | Text _, Code _ -> 1 - | Code _, Text _ -> -1 - | Code s1, Code s2 -> String.compare s1 s2 + Friendly.( + match (m1, m2) with + | (Text s1, Text s2) -> String.compare s1 s2 + | (Text _, Code _) -> 1 + | (Code _, Text _) -> -1 + | (Code s1, Code s2) -> String.compare s1 s2) in let compare_message_feature m1 m2 = - let open Friendly in - match m1, m2 with - | Inline m1, Inline m2 -> compare_lists compare_message_inline m1 m2 - | Inline _, Reference _ -> 1 - | Reference _, Inline _ -> -1 - | Reference (m1, loc1), Reference (m2, loc2) -> - let k = Loc.compare loc1 loc2 in - if k = 0 then compare_lists compare_message_inline m1 m2 else k + Friendly.( + match (m1, m2) with + | (Inline m1, Inline m2) -> compare_lists compare_message_inline m1 m2 + | (Inline _, Reference _) -> 1 + | (Reference _, Inline _) -> -1 + | (Reference (m1, loc1), Reference (m2, loc2)) -> + let k = compare_loc loc1 loc2 in + if k = 0 then + compare_lists compare_message_inline m1 m2 + else + k) in let compare_friendly_message m1 m2 = - let open Friendly in - match m1, m2 with - | Normal { frames=fs1; message=m1 }, Normal { frames=fs2; message=m2 } -> - let k = compare_option (compare_lists (compare_lists compare_message_feature)) fs1 fs2 in - if k = 0 then compare_lists compare_message_feature m1 m2 else k - | Normal _, Speculation _ -> -1 - | Speculation _, Normal _ -> 1 - | Speculation { frames=fs1; branches=b1 }, Speculation { frames=fs2; branches=b2 } -> - let k = compare_lists (compare_lists compare_message_feature) fs1 fs2 in - if k = 0 then - let k = List.length b1 - List.length b2 in + Friendly.( + match (m1, m2) with + | (Normal { frames = fs1; message = m1 }, Normal { frames = fs2; message = m2 }) -> + let k = compare_option (compare_lists (compare_lists compare_message_feature)) fs1 fs2 in + if k = 0 then + compare_lists compare_message_feature m1 m2 + else + k + | (Normal _, Speculation _) -> -1 + | (Speculation _, Normal _) -> 1 + | (Speculation { frames = fs1; branches = b1 }, Speculation { frames = fs2; branches = b2 }) + -> + let k = compare_lists (compare_lists compare_message_feature) fs1 fs2 in if k = 0 then - compare_lists (fun (_, err1) (_, err2) -> - compare (InferError, [], err1) (InferError, [], err2) - ) b1 b2 - else k - else k + let k = List.length b1 - List.length b2 in + if k = 0 then + compare_lists + (fun (_, err1) (_, err2) -> + compare compare_loc (InferError, [], err1) (InferError, [], err2)) + b1 + b2 + else + k + else + k) in fun err1 err2 -> - let open Friendly in - let loc1, loc2 = loc_of_error_for_compare err1, loc_of_error_for_compare err2 in - let (k1, _, err1), (k2, _, err2) = err1, err2 in - let k = Loc.compare loc1 loc2 in - if k = 0 then - let k = kind_cmp k1 k2 in - if k = 0 then match err1, err2 with - | { root=Some _; _ }, { root=None; _ } -> -1 - | { root=None; _ }, { root=Some _; _ } -> 1 - | { root=Some { root_message=rm1; _ }; loc=loc1; message=m1 }, - { root=Some { root_message=rm2; _ }; loc=loc2; message=m2 } -> - let k = compare_lists compare_message_feature rm1 rm2 in + Friendly.( + let (loc1, loc2) = + (loc_of_printable_error_for_compare err1, loc_of_printable_error_for_compare err2) + in + let ((k1, _, err1), (k2, _, err2)) = (err1, err2) in + let k = compare_loc loc1 loc2 in + if k = 0 then + let k = kind_cmp k1 k2 in if k = 0 then - let k = Loc.compare loc1 loc2 in - if k = 0 then compare_friendly_message m1 m2 - else k - else k - | { root=None; message=m1; _ }, - { root=None; message=m2; _ } -> - compare_friendly_message m1 m2 - else k - else k - -module Error = struct - type t = error - let compare = compare -end + match (err1, err2) with + | ({ root = Some _; _ }, { root = None; _ }) -> -1 + | ({ root = None; _ }, { root = Some _; _ }) -> 1 + | ( { root = Some { root_message = rm1; _ }; loc = loc1; message = m1 }, + { root = Some { root_message = rm2; _ }; loc = loc2; message = m2 } ) -> + let k = compare_lists compare_message_feature rm1 rm2 in + if k = 0 then + let k = compare_loc loc1 loc2 in + if k = 0 then + compare_friendly_message m1 m2 + else + k + else + k + | ({ root = None; message = m1; _ }, { root = None; message = m2; _ }) -> + compare_friendly_message m1 m2 + else + k + else + k) -(* we store errors in sets, currently, because distinct - traces may share endpoints, and produce the same error *) -module ErrorSet = Set.Make(Error) +module ConcreteLocPrintableErrorSet = Set.Make (struct + type t = Loc.t printable_error -type error_group = + let compare = compare Loc.compare +end) + +type 'a error_group = (* Friendly errors without a root are never grouped. When traces are enabled * all friendly errors will never group. *) - | Singleton of error_kind * message list * Friendly.t + | Singleton of error_kind * 'a message list * 'a Friendly.t' (* Friendly errors that share a root are grouped together. The errors list * is reversed. *) | Group of { kind: error_kind; - root: Loc.t Friendly.error_root; - errors_rev: Friendly.t Nel.t; + root: 'a Friendly.error_root; + errors_rev: 'a Friendly.t' Nel.t; omitted: int; } -exception Interrupt_ErrorSet_fold of error_group list +exception Interrupt_PrintableErrorSet_fold of Loc.t error_group list -(* Folds an ErrorSet into a grouped list. However, the group and all sub-groups +(* Folds an PrintableErrorSet into a grouped list. However, the group and all sub-groups * are in reverse order. *) let collect_errors_into_groups max set = - let open Friendly in - try - let (_, acc) = ErrorSet.fold (fun (kind, trace, error) (n, acc) -> - let omit = Option.value_map max ~default:false ~f:(fun max -> max <= n) in - let acc = match error with - | error when trace <> [] -> - if omit then raise (Interrupt_ErrorSet_fold acc); - Singleton (kind, trace, error) :: acc - | ({ root = None; _ } as error) -> - if omit then raise (Interrupt_ErrorSet_fold acc); - Singleton (kind, trace, error) :: acc - (* Friendly errors with a root might need to be grouped. *) - | ({ root = Some root; _ } as error) -> - (match acc with - (* When the root location and message match the previous group, add our - * friendly error to the group. We can do this by only looking at the last - * group because ErrorSet is sorted so that friendly errors with the same - * root loc/message are stored next to each other. - * - * If we are now omitting errors then increment the omitted count - * instead of adding a message. *) - | Group {kind = kind'; root = root'; errors_rev; omitted} :: acc' when ( - kind = kind' && - Loc.compare root.root_loc root'.root_loc = 0 && - root.root_message = root'.root_message - ) -> - Group { - kind = kind'; - root = root'; - errors_rev = if omit then errors_rev else Nel.cons error errors_rev; - omitted = if omit then omitted + 1 else omitted; - } :: acc' - (* If the roots did not match then we have a friendly singleton. *) - | _ -> - if omit then raise (Interrupt_ErrorSet_fold acc); - Group { - kind = kind; - root = root; - errors_rev = Nel.one error; - omitted = 0; - } :: acc - ) + Friendly.( + try + let (_, acc) = + ConcreteLocPrintableErrorSet.fold + (fun (kind, trace, error) (n, acc) -> + let omit = Option.value_map max ~default:false ~f:(fun max -> max <= n) in + let acc = + match error with + | error when trace <> [] -> + if omit then raise (Interrupt_PrintableErrorSet_fold acc); + Singleton (kind, trace, error) :: acc + | { root = None; _ } as error -> + if omit then raise (Interrupt_PrintableErrorSet_fold acc); + Singleton (kind, trace, error) :: acc + (* Friendly errors with a root might need to be grouped. *) + | { root = Some root; _ } as error -> + (match acc with + (* When the root location and message match the previous group, add our + * friendly error to the group. We can do this by only looking at the last + * group because PrintableErrorSet is sorted so that friendly errors with the same + * root loc/message are stored next to each other. + * + * If we are now omitting errors then increment the omitted count + * instead of adding a message. *) + | Group { kind = kind'; root = root'; errors_rev; omitted } :: acc' + when kind = kind' + && Loc.compare root.root_loc root'.root_loc = 0 + && root.root_message = root'.root_message -> + Group + { + kind = kind'; + root = root'; + errors_rev = + ( if omit then + errors_rev + else + Nel.cons error errors_rev ); + omitted = + ( if omit then + omitted + 1 + else + omitted ); + } + :: acc' + (* If the roots did not match then we have a friendly singleton. *) + | _ -> + if omit then raise (Interrupt_PrintableErrorSet_fold acc); + Group { kind; root; errors_rev = Nel.one error; omitted = 0 } :: acc) + in + (n + 1, acc)) + set + (0, []) in - (n + 1, acc) - ) set (0, []) in - acc - with - Interrupt_ErrorSet_fold acc -> acc + with Interrupt_PrintableErrorSet_fold acc -> acc) (* Human readable output *) module Cli_output = struct @@ -1020,177 +1072,184 @@ module Cli_output = struct } let severity_fragment_style = function - | Err -> error_fragment_style - | Warn -> warning_fragment_style - | Off -> - Utils_js.assert_false "CLI output is only called with warnings and errors." - - let print_file_at_location ~strip_root ~severity stdin_file main_file loc s = Loc.( - let l0 = loc.start.line in - let l1 = loc._end.line in - let c0 = loc.start.column in - let c1 = loc._end.column in - let filename = file_of_source loc.source in - let severity_style = severity_fragment_style severity in - - let see_another_file ~is_lib filename = - if filename = main_file - then [(default_style "")] - else - let prefix = Printf.sprintf ". See%s: " - (if is_lib then " lib" else "") in - let filename = if is_lib - then relative_lib_path ~strip_root filename - else relative_path ~strip_root filename - in - [ - comment_style prefix; - comment_file_style (Printf.sprintf "%s:%d" filename l0) - ] - in - - let code_line = read_lines_in_file loc filename stdin_file in - - match code_line, filename with - | _, None -> - [ - comment_style s; - default_style "\n"; - ] - | None, _ -> - let original_filename, is_lib = match filename, loc.source with - | Some filename, Some File_key.LibFile _ - | None, Some File_key.LibFile filename -> filename,true - | Some filename, _ - | None, Some File_key.SourceFile filename - | None, Some File_key.JsonFile filename - | None, Some File_key.ResourceFile filename -> filename, false - | None, Some File_key.Builtins - | None, None -> - failwith "Should only have lib and source files at this point" in - [comment_style s] @ - (see_another_file ~is_lib original_filename) @ - [default_style "\n"]; - | Some code_lines, Some filename -> - let is_lib = match loc.source with - | Some File_key.LibFile _ -> true - | _ -> false in - begin match code_lines with - | code_line, [] -> - (* Here we have a single line of context *) - let line_number_text = Printf.sprintf "%3d: " l0 in - let highlighted_line = if (l1 == l0) && (String.length code_line) >= c1 - then highlight_error_in_line ~severity_style code_line c0 c1 - else [source_fragment_style code_line] - in - let padding = - let line_num = String.make (String.length line_number_text) ' ' in - let spaces = - let prefix = if String.length code_line <= c0 - then code_line - else String.sub code_line 0 c0 - in - Str.global_replace (Str.regexp "[^\t ]") " " prefix - in - line_num ^ spaces + | Err -> error_fragment_style + | Warn -> warning_fragment_style + | Off -> Utils_js.assert_false "CLI output is only called with warnings and errors." + + let print_file_at_location ~strip_root ~severity stdin_file main_file loc s = + Loc.( + let l0 = loc.start.line in + let l1 = loc._end.line in + let c0 = loc.start.column in + let c1 = loc._end.column in + let filename = file_of_source loc.source in + let severity_style = severity_fragment_style severity in + let see_another_file ~is_lib filename = + if filename = main_file then + [default_style ""] + else + let prefix = + Printf.sprintf + ". See%s: " + ( if is_lib then + " lib" + else + "" ) in - let underline_size = if l1 == l0 - then max 1 (c1 - c0) - else 1 + let filename = + if is_lib then + relative_lib_path ~strip_root filename + else + relative_path ~strip_root filename in - let underline = String.make underline_size '^' in - line_number_style line_number_text :: - highlighted_line @ - [comment_style (Printf.sprintf "\n%s%s %s" padding underline s)] @ - (see_another_file ~is_lib filename) @ - [default_style "\n"] - | code_lines -> - (* Here we have multiple lines of context *) - - (* The most lines of context that we'll show before abridging *) - let max_lines = 5 in - - (* Don't abridge if we could just show all the lines *) - let abridged = l1 - l0 + 1 > max_lines in - - (* Highlight the context *) - let highlighted_lines = code_lines - |> Nel.to_list - |> List.fold_left (fun (line_num, acc) line -> - if not abridged || line_num - l0 < max_lines - 2 || line_num = l1 - then - let line_number_text = - line_number_style (Utils_js.spf "\n%3d: " line_num) in - let highlighted_line = - (* First line *) - if line_num = l0 - then highlight_error_in_line ~severity_style line c0 (String.length line) - (* Last line *) - else if line_num = l1 - then highlight_error_in_line ~severity_style line 0 c1 - (* middle lines *) - else [error_fragment_style line] in - line_num + 1, (line_number_text :: highlighted_line)::acc - else if line_num - l0 = max_lines - 1 - then line_num + 1, [line_number_style "\n...:"]::acc - else line_num + 1, acc - ) (l0, []) - |> snd - |> List.rev - |> List.flatten in - - let first_line = code_lines |> Nel.hd in - let last_line = code_lines |> Nel.rev |> Nel.hd in - - (* Don't underline the whitespace at the beginning of the last line *) - let underline_prefix = - if Str.string_match (Str.regexp "^\\([\t ]*\\).*") last_line 0 - then Str.matched_group 1 last_line - else "" in - - let overline_size = max 1 (String.length first_line - c0) in - let underline_size = max 1 (c1 - String.length underline_prefix) in - - let line len = if len > 0 then String.make len '-' else "" in - let overline = "v" ^ (line (overline_size - 1)) in - let underline = (line (underline_size - 1)) ^ "^" in - - let overline_padding = - let line_num = - String.make (String.length (Printf.sprintf "%3d: " l0)) ' ' in - let spaces = - if String.length first_line <= c0 then "" + [comment_style prefix; comment_file_style (Printf.sprintf "%s:%d" filename l0)] + in + let code_line = read_lines_in_file loc filename stdin_file in + match (code_line, filename) with + | (_, None) -> [comment_style s; default_style "\n"] + | (None, _) -> + let (original_filename, is_lib) = + match (filename, loc.source) with + | (Some filename, Some (File_key.LibFile _)) + | (None, Some (File_key.LibFile filename)) -> + (filename, true) + | (Some filename, _) + | (None, Some (File_key.SourceFile filename)) + | (None, Some (File_key.JsonFile filename)) + | (None, Some (File_key.ResourceFile filename)) -> + (filename, false) + | (None, Some File_key.Builtins) + | (None, None) -> + failwith "Should only have lib and source files at this point" + in + [comment_style s] @ see_another_file ~is_lib original_filename @ [default_style "\n"] + | (Some code_lines, Some filename) -> + let is_lib = + match loc.source with + | Some (File_key.LibFile _) -> true + | _ -> false + in + begin + match code_lines with + | (code_line, []) -> + (* Here we have a single line of context *) + let line_number_text = Printf.sprintf "%3d: " l0 in + let highlighted_line = + if l1 == l0 && String.length code_line >= c1 then + highlight_error_in_line ~severity_style code_line c0 c1 else - let prefix = String.sub first_line 0 c0 in + [source_fragment_style code_line] + in + let padding = + let line_num = String.make (String.length line_number_text) ' ' in + let spaces = + let prefix = + if String.length code_line <= c0 then + code_line + else + String.sub code_line 0 c0 + in Str.global_replace (Str.regexp "[^\t ]") " " prefix + in + line_num ^ spaces in - line_num ^ spaces - in - - let underlineline_padding = - String.make (String.length (Printf.sprintf "%3d: " l1)) ' ' in - - let comment = Printf.sprintf - "\n%s%s%s %s" - underlineline_padding - underline_prefix - underline - s in - - [comment_style (Printf.sprintf "%s%s" overline_padding overline)] @ - highlighted_lines @ - [comment_style comment] @ - (see_another_file ~is_lib filename) @ - [default_style "\n"] - end - ) + let underline_size = + if l1 == l0 then + max 1 (c1 - c0) + else + 1 + in + let underline = String.make underline_size '^' in + (line_number_style line_number_text :: highlighted_line) + @ [comment_style (Printf.sprintf "\n%s%s %s" padding underline s)] + @ see_another_file ~is_lib filename + @ [default_style "\n"] + | code_lines -> + (* Here we have multiple lines of context *) + + (* The most lines of context that we'll show before abridging *) + let max_lines = 5 in + (* Don't abridge if we could just show all the lines *) + let abridged = l1 - l0 + 1 > max_lines in + (* Highlight the context *) + let highlighted_lines = + code_lines + |> Nel.to_list + |> List.fold_left + (fun (line_num, acc) line -> + if (not abridged) || line_num - l0 < max_lines - 2 || line_num = l1 then + let line_number_text = + line_number_style (Utils_js.spf "\n%3d: " line_num) + in + let highlighted_line = + (* First line *) + if line_num = l0 then + highlight_error_in_line ~severity_style line c0 (String.length line) + (* Last line *) + else if line_num = l1 then + highlight_error_in_line ~severity_style line 0 c1 + (* middle lines *) + else + [error_fragment_style line] + in + (line_num + 1, (line_number_text :: highlighted_line) :: acc) + else if line_num - l0 = max_lines - 1 then + (line_num + 1, [line_number_style "\n...:"] :: acc) + else + (line_num + 1, acc)) + (l0, []) + |> snd + |> List.rev + |> List.flatten + in + let first_line = code_lines |> Nel.hd in + let last_line = code_lines |> Nel.rev |> Nel.hd in + (* Don't underline the whitespace at the beginning of the last line *) + let underline_prefix = + if Str.string_match (Str.regexp "^\\([\t ]*\\).*") last_line 0 then + Str.matched_group 1 last_line + else + "" + in + let overline_size = max 1 (String.length first_line - c0) in + let underline_size = max 1 (c1 - String.length underline_prefix) in + let line len = + if len > 0 then + String.make len '-' + else + "" + in + let overline = "v" ^ line (overline_size - 1) in + let underline = line (underline_size - 1) ^ "^" in + let overline_padding = + let line_num = String.make (String.length (Printf.sprintf "%3d: " l0)) ' ' in + let spaces = + if String.length first_line <= c0 then + "" + else + let prefix = String.sub first_line 0 c0 in + Str.global_replace (Str.regexp "[^\t ]") " " prefix + in + line_num ^ spaces + in + let underlineline_padding = + String.make (String.length (Printf.sprintf "%3d: " l1)) ' ' + in + let comment = + Printf.sprintf "\n%s%s%s %s" underlineline_padding underline_prefix underline s + in + [comment_style (Printf.sprintf "%s%s" overline_padding overline)] + @ highlighted_lines + @ [comment_style comment] + @ see_another_file ~is_lib filename + @ [default_style "\n"] + end) let print_message_nice ~strip_root ~severity stdin_file main_file message = - let loc, s = to_pp message in + let (loc, s) = to_pp message in print_file_at_location ~strip_root ~severity stdin_file main_file loc s - let remove_newlines (color, text) = - (color, Str.global_replace (Str.regexp "\n") "\\n" text) + let remove_newlines (color, text) = (color, Str.global_replace (Str.regexp "\n") "\\n" text) (* ========================== * Full Terminal Width Header @@ -1200,29 +1259,27 @@ module Cli_output = struct * is the lesser of the terminal length and 120 characters. *) let print_header_friendly ~strip_root ~flags ~severity loc = let severity_style = severity_fragment_style severity in - let severity_name = match severity with - | Err -> "Error" - | Warn -> "Warning" - | Off -> failwith "unreachable" - in - let horizontal_line_length = - flags.message_width - (String.length severity_name + 1) + let severity_name = + match severity with + | Err -> "Error" + | Warn -> "Warning" + | Off -> failwith "unreachable" in + let horizontal_line_length = flags.message_width - (String.length severity_name + 1) in let filename = - let open Loc in - let open File_key in - let { source; start = { line; column; _ }; _ } = loc in - let pos = ":" ^ string_of_int line ^ ":" ^ string_of_int (column + 1) in - match source with - | Some (LibFile filename) - -> relative_lib_path ~strip_root filename ^ pos - | Some (SourceFile filename) - | Some (JsonFile filename) - | Some (ResourceFile filename) - -> relative_path ~strip_root filename ^ pos - | Some Builtins - | None - -> "" + Loc.( + File_key.( + let { source; start = { line; column; _ }; _ } = loc in + let pos = ":" ^ string_of_int line ^ ":" ^ string_of_int (column + 1) in + match source with + | Some (LibFile filename) -> relative_lib_path ~strip_root filename ^ pos + | Some (SourceFile filename) + | Some (JsonFile filename) + | Some (ResourceFile filename) -> + relative_path ~strip_root filename ^ pos + | Some Builtins + | None -> + "")) in (* If the filename is longer then the remaining horizontal line length we * put the filename on a new line. Otherwise the filename eats some of the @@ -1247,28 +1304,29 @@ module Cli_output = struct let horizontal_line = if flags.unicode then String.init (horizontal_line_length * 3) (fun i -> - match i mod 3 with - | 0 -> '\xE2' - | 1 -> '\x94' - | 2 -> '\x88' - | _ -> failwith "unreachable" - ) + match i mod 3 with + | 0 -> '\xE2' + | 1 -> '\x94' + | 2 -> '\x88' + | _ -> failwith "unreachable") else String.make horizontal_line_length '-' in (* Construct the header by appending the constituent pieces. *) [ - severity_style ( - severity_name - ^ " " - ^ horizontal_line - ^ (if filename_on_newline then "\n" else " ") - ^ filename - ); + severity_style + ( severity_name + ^ " " + ^ horizontal_line + ^ ( if filename_on_newline then + "\n" + else + " " ) + ^ filename ); default_style "\n"; ] - module FileKeyMap = MyMap.Make(File_key) + module FileKeyMap = MyMap.Make (File_key) type tag_kind = | Open of Loc.position @@ -1338,152 +1396,159 @@ module Cli_output = struct * - A map of reference ids to reference colors. * - A map of filenames to lists of open/close tags to be used in rendering * source text. *) - let layout_references ~(custom_colors:'a IMap.t) : Loc.t IMap.t -> ('a color IMap.t * tags) = - let open Loc in - let rec add_tags colors opened id start end' tags tags_acc = - match tags with - (* For an empty array, add both the open and close tags. Also add a color - * of 0 for this id. *) - | [] -> - let color = Option.value_map (IMap.get id custom_colors) - ~f:(fun custom -> CustomColor custom) ~default:(Color 0) in - let colors = IMap.add id color colors in - (colors, List.rev ((end', id, Close) :: (start, id, Open end') :: tags_acc)) - (* Search for the correct place where start should appear in our list. - * - * If pos and start are equal and the current tag ends _after_ the end - * position we are inserting then we want to open inside the current tag. - * - * Note that we may introduce an Open tag _before_ a Close tag at the same - * position! This is intentional as it introduces overlapping. If we have - * two tags that open/close at the same position we want them to be - * different colors. Intersecting accomplishes this. *) - | ((pos, tag_id, tag_kind) as tag) :: tags when ( - let k = pos_cmp pos start in - k < 0 || (k = 0 && - match tag_kind with - | Close -> false - | Open tag_end -> pos_cmp end' tag_end < 0 - ) - ) -> - (* Keep track of the ids which are opening and closing so that we can - * increment their colors if start is inside them. + let layout_references ~(custom_colors : 'a IMap.t) : Loc.t IMap.t -> 'a color IMap.t * tags = + Loc.( + let rec add_tags colors opened id start end' tags tags_acc = + match tags with + (* For an empty array, add both the open and close tags. Also add a color + * of 0 for this id. *) + | [] -> + let color = + Option.value_map + (IMap.get id custom_colors) + ~f:(fun custom -> CustomColor custom) + ~default:(Color 0) + in + let colors = IMap.add id color colors in + (colors, List.rev ((end', id, Close) :: (start, id, Open end') :: tags_acc)) + (* Search for the correct place where start should appear in our list. * - * Also keep track of the ids which were opened at the time this tag was - * opened. This allows us to move backwards through the containment tree - * and update tag colors appropriately. + * If pos and start are equal and the current tag ends _after_ the end + * position we are inserting then we want to open inside the current tag. * - * See the definition of update_colors for more information. *) - let opened = Opened ( - let Opened opened' = opened in - match tag_kind with - | Open _ -> IMap.add tag_id opened opened' - | Close -> IMap.remove tag_id opened' - ) in - add_tags colors opened id start end' tags (tag :: tags_acc) - (* We've found the correct place for start! If there are any tags which - * are currently open then we need to increment their colors. *) - | tags -> - (* Add our closing tag. We will get from this operation the color which - * we should add for our current reference. *) - let (color, tags) = add_close_tag colors id end' tags 0 [] in - (* Add a color for this id to the colors map. If our id exists in - * custom_colors then we add a custom color. Otherwise we add a color - * based on the current rank and update similar rank-based colors. *) - let colors = match IMap.get id custom_colors with - | Some custom -> - IMap.add id (CustomColor custom) colors - | None -> - (* Increment the colors of all open references by the color of this tag. - * It is a logic error if some open_id does not exist in colors. *) - let colors = update_colors opened colors color in - (* Add the color for this reference. *) - IMap.add id (Color color) colors - in - (* Finish by adding an open tag. A corresponding closing tag will have - * been added by add_close_tag. *) - (colors, List.rev_append ((start, id, Open end') :: tags_acc) tags) - - and add_close_tag colors id end' tags color_acc tags_acc = - match tags with - | [] -> - (color_acc, List.rev ((end', id, Close) :: tags_acc)) - (* Search for the last place to add our end position. + * Note that we may introduce an Open tag _before_ a Close tag at the same + * position! This is intentional as it introduces overlapping. If we have + * two tags that open/close at the same position we want them to be + * different colors. Intersecting accomplishes this. *) + | ((pos, tag_id, tag_kind) as tag) :: tags + when let k = pos_cmp pos start in + k < 0 + || k = 0 + && + match tag_kind with + | Close -> false + | Open tag_end -> pos_cmp end' tag_end < 0 -> + (* Keep track of the ids which are opening and closing so that we can + * increment their colors if start is inside them. + * + * Also keep track of the ids which were opened at the time this tag was + * opened. This allows us to move backwards through the containment tree + * and update tag colors appropriately. + * + * See the definition of update_colors for more information. *) + let opened = + Opened + (let (Opened opened') = opened in + match tag_kind with + | Open _ -> IMap.add tag_id opened opened' + | Close -> IMap.remove tag_id opened') + in + add_tags colors opened id start end' tags (tag :: tags_acc) + (* We've found the correct place for start! If there are any tags which + * are currently open then we need to increment their colors. *) + | tags -> + (* Add our closing tag. We will get from this operation the color which + * we should add for our current reference. *) + let (color, tags) = add_close_tag colors id end' tags 0 [] in + (* Add a color for this id to the colors map. If our id exists in + * custom_colors then we add a custom color. Otherwise we add a color + * based on the current rank and update similar rank-based colors. *) + let colors = + match IMap.get id custom_colors with + | Some custom -> IMap.add id (CustomColor custom) colors + | None -> + (* Increment the colors of all open references by the color of this tag. + * It is a logic error if some open_id does not exist in colors. *) + let colors = update_colors opened colors color in + (* Add the color for this reference. *) + IMap.add id (Color color) colors + in + (* Finish by adding an open tag. A corresponding closing tag will have + * been added by add_close_tag. *) + (colors, List.rev_append ((start, id, Open end') :: tags_acc) tags) + and add_close_tag colors id end' tags color_acc tags_acc = + match tags with + | [] -> (color_acc, List.rev ((end', id, Close) :: tags_acc)) + (* Search for the last place to add our end position. + * + * Note that we may introduce a Close tag _after_ an Open tag at the same + * position! This is intentional as it introduces overlapping. If we have + * two tags that open/close at the same position we want them to be + * different colors. Intersecting accomplishes this. *) + | ((pos, tag_id, tag_kind) as tag) :: tags when pos_cmp pos end' <= 0 -> + (* If we run into an open tag then our color must be at least 1 greater + * then the color of the opened tag. *) + let color_acc = + match tag_kind with + | Close -> color_acc + | Open _ -> + (match IMap.get tag_id colors with + | None -> max color_acc (0 + 1) + | Some (Color tag_color) -> max color_acc (tag_color + 1) + | Some (CustomColor _) -> color_acc) + in + add_close_tag colors id end' tags color_acc (tag :: tags_acc) + (* When we find the location for our close tag, add it. *) + | tags -> (color_acc, List.rev_append ((end', id, Close) :: tags_acc) tags) + (* According to our design, the color of a location is one plus the largest + * color opened inside of the location. See add_close_tag for the + * implementation of this logic when we add a location that opens *before* + * a location that already exists in tags. * - * Note that we may introduce a Close tag _after_ an Open tag at the same - * position! This is intentional as it introduces overlapping. If we have - * two tags that open/close at the same position we want them to be - * different colors. Intersecting accomplishes this. *) - | ((pos, tag_id, tag_kind) as tag) :: tags when pos_cmp pos end' <= 0 -> - (* If we run into an open tag then our color must be at least 1 greater - * then the color of the opened tag. *) - let color_acc = match tag_kind with - | Close -> color_acc - | Open _ -> - (match IMap.get tag_id colors with - | None -> max color_acc (0 + 1) - | Some (Color tag_color) -> max color_acc (tag_color + 1) - | Some (CustomColor _) -> color_acc - ) - in - add_close_tag colors id end' tags color_acc (tag :: tags_acc) - (* When we find the location for our close tag, add it. *) - | tags -> - (color_acc, List.rev_append ((end', id, Close) :: tags_acc) tags) - - (* According to our design, the color of a location is one plus the largest - * color opened inside of the location. See add_close_tag for the - * implementation of this logic when we add a location that opens *before* - * a location that already exists in tags. - * - * The logic for updating colors when a location opens *after* a location - * that already exists in tags is more difficult. Not only must we try to - * update the locations which are currently opened, but we must also - * propagate updates to previously opened tags. Consider the uncommon case - * our logic takes pain to support: - * - * Reference 1 > |-------| - * Reference 2 > |--------| - * Reference 3 > |--------| - * - * Let's say references 1 and 2 already exist. We are adding reference 3. At - * this point, reference 1 will have a color of 1 and reference 2 will have - * a color of 0. - * - * We add reference 3 which has a color of 0. Reference 3 opens inside of - * reference 2 so we update reference 2's color to 0 + 1. Now reference 2 - * and reference 1 have a color of 1. This is incorrect. Reference 1 should - * have a color of 2 at this point. - * - * Our `opened` type keeps track of the open references when another - * reference opens. So we have access to reference 1 through reference 2. - * Update reference 1's color to 1 + 1. - * - * We can short-circuit the recursive traversal of reference trees when we - * can't update the color of most recently opened tags. *) - and update_colors (Opened opened) colors color = - IMap.fold (fun open_id opened colors -> - let open_color = Option.value (IMap.get open_id colors) ~default:(Color 0) in - match open_color with - | CustomColor _ -> colors - | Color open_color -> - if open_color >= color + 1 then - colors - else - let colors = IMap.add open_id (Color (color + 1)) colors in - update_colors opened colors (color + 1) - ) opened colors - in - fun references -> - List.fold_left (fun (colors, file_tags) (id, loc) -> - match loc.source with - | None -> (colors, file_tags) - | Some source -> - let tags = Option.value (FileKeyMap.get source file_tags) ~default:[] in - let (colors, tags) = add_tags colors (Opened IMap.empty) id loc.start loc._end tags [] in - let file_tags = FileKeyMap.add source tags file_tags in - (colors, file_tags) - ) (IMap.empty, FileKeyMap.empty) (IMap.bindings references) + * The logic for updating colors when a location opens *after* a location + * that already exists in tags is more difficult. Not only must we try to + * update the locations which are currently opened, but we must also + * propagate updates to previously opened tags. Consider the uncommon case + * our logic takes pain to support: + * + * Reference 1 > |-------| + * Reference 2 > |--------| + * Reference 3 > |--------| + * + * Let's say references 1 and 2 already exist. We are adding reference 3. At + * this point, reference 1 will have a color of 1 and reference 2 will have + * a color of 0. + * + * We add reference 3 which has a color of 0. Reference 3 opens inside of + * reference 2 so we update reference 2's color to 0 + 1. Now reference 2 + * and reference 1 have a color of 1. This is incorrect. Reference 1 should + * have a color of 2 at this point. + * + * Our `opened` type keeps track of the open references when another + * reference opens. So we have access to reference 1 through reference 2. + * Update reference 1's color to 1 + 1. + * + * We can short-circuit the recursive traversal of reference trees when we + * can't update the color of most recently opened tags. *) + and update_colors (Opened opened) colors color = + IMap.fold + (fun open_id opened colors -> + let open_color = Option.value (IMap.get open_id colors) ~default:(Color 0) in + match open_color with + | CustomColor _ -> colors + | Color open_color -> + if open_color >= color + 1 then + colors + else + let colors = IMap.add open_id (Color (color + 1)) colors in + update_colors opened colors (color + 1)) + opened + colors + in + fun references -> + List.fold_left + (fun (colors, file_tags) (id, loc) -> + match loc.source with + | None -> (colors, file_tags) + | Some source -> + let tags = Option.value (FileKeyMap.get source file_tags) ~default:[] in + let (colors, tags) = + add_tags colors (Opened IMap.empty) id loc.start loc._end tags [] + in + let file_tags = FileKeyMap.add source tags file_tags in + (colors, file_tags)) + (IMap.empty, FileKeyMap.empty) + (IMap.bindings references)) (* To be used with the result of layout_references. * @@ -1496,16 +1561,15 @@ module Cli_output = struct * should always be the default terminal color. *) let get_tty_color_internal = function - | CustomColor `Primary -> Tty.Red - | CustomColor `Root -> Tty.Default - | Color rank -> - (match rank mod 4 with - | 0 -> Tty.Cyan - | 1 -> Tty.Yellow - | 2 -> Tty.Green - | 3 -> Tty.Magenta - | _ -> failwith "unreachable" - ) + | CustomColor `Primary -> Tty.Red + | CustomColor `Root -> Tty.Default + | Color rank -> + (match rank mod 4 with + | 0 -> Tty.Cyan + | 1 -> Tty.Yellow + | 2 -> Tty.Green + | 3 -> Tty.Magenta + | _ -> failwith "unreachable") let get_tty_color id colors = get_tty_color_internal (Option.value (IMap.get id colors) ~default:(Color 0)) @@ -1518,17 +1582,20 @@ module Cli_output = struct | id :: ids -> (match Option.value (IMap.get id colors) ~default:(Color 0) with | CustomColor `Root -> - Option.value_map (get_tty_color_from_stack ids colors) - ~f:(fun x -> Some x) ~default:(Some Tty.Default) - | color -> - Some (get_tty_color_internal color) - ) + Option.value_map + (get_tty_color_from_stack ids colors) + ~f:(fun x -> Some x) + ~default:(Some Tty.Default) + | color -> Some (get_tty_color_internal color)) let bullet_char ~flags = (* Use [U+2022][1] for the bullet character if unicode is enabled. * * [1]: http://graphemica.com/%E2%80%A2 *) - if flags.unicode then "\xE2\x80\xA2" else "-" + if flags.unicode then + "\xE2\x80\xA2" + else + "-" (* ================== * Error Message Text @@ -1540,162 +1607,144 @@ module Cli_output = struct * Most of the following code is responsible for splitting our message onto * multiple lines. This would make for a great interview question. *) let print_message_friendly = - let open Friendly in - (* Takes an input list of styled inline code parts of the following shape: - * - * (bool * Tty.style * string) list - * - * The first boolean tells us whether or not we are allowed to break the - * string inside into multiple lines. - * - * We return a data structure of: - * - * (int * (Tty.style * string) list) list - * - * Each item in the list is a "word". Each "word" is a tuple. The first - * element of the tuple is the length of the word. The second element of - * the tuple is a list of styled strings which make up the word. A word - * can have multiple different styles inside of it. - * - * We can put line breaks in between each "word". However, we cannot put - * a line break inside the strings after we have split them into words. - * - * Note that a word may have spaces in it. The input strs includes a - * boolean that determines whether or not a string is breakable even if it - * has spaces. Not breaking even when we have spaces is useful for - * rendering code. *) - let split_into_words = - let merge style str word_in_progress_acc = - match word_in_progress_acc with - (* If we do not have a word in progress then create a new word - * from str. *) - | None -> - (String.length str, [(style, str)]) - (* If there is another word then we want to merge str with - * that word. We can assume that there is no breakpoint between - * the end of str and the beginning of the next word. *) - | Some (len, word) -> - (String.length str + len, (style, str) :: word) + Friendly.( + (* Takes an input list of styled inline code parts of the following shape: + * + * (bool * Tty.style * string) list + * + * The first boolean tells us whether or not we are allowed to break the + * string inside into multiple lines. + * + * We return a data structure of: + * + * (int * (Tty.style * string) list) list + * + * Each item in the list is a "word". Each "word" is a tuple. The first + * element of the tuple is the length of the word. The second element of + * the tuple is a list of styled strings which make up the word. A word + * can have multiple different styles inside of it. + * + * We can put line breaks in between each "word". However, we cannot put + * a line break inside the strings after we have split them into words. + * + * Note that a word may have spaces in it. The input strs includes a + * boolean that determines whether or not a string is breakable even if it + * has spaces. Not breaking even when we have spaces is useful for + * rendering code. *) + let split_into_words = + let merge style str word_in_progress_acc = + match word_in_progress_acc with + (* If we do not have a word in progress then create a new word + * from str. *) + | None -> (String.length str, [(style, str)]) + (* If there is another word then we want to merge str with + * that word. We can assume that there is no breakpoint between + * the end of str and the beginning of the next word. *) + | Some (len, word) -> (String.length str + len, (style, str) :: word) + in + let rec loop finished_words_acc word_in_progress_acc strs = + match strs with + | [] -> + let finished_words_acc = + match word_in_progress_acc with + | None -> finished_words_acc + | Some word -> word :: finished_words_acc + in + List.rev_map (fun (n, words) -> (n, List.rev words)) finished_words_acc + | (breakable, style, str) :: strs -> + (* If our string is breakable then try to find the first space. Use + * that space as the breakpoint. *) + let bp = + if breakable then + try Some (String.index str ' ') with Not_found -> None + else + None + in + (match bp with + (* If we have no breakpoint then we want to either create a new word + * or combine our str with the first word in the result of + * recursively calling split_into_words. *) + | None -> loop finished_words_acc (Some (merge style str word_in_progress_acc)) strs + (* If we have a breakpoint then we need to split up our str and + * create a new word with the left half and recurse with the + * right half. *) + | Some bp -> + let left = String.sub str 0 bp in + let right = String.sub str (bp + 1) (String.length str - (bp + 1)) in + (* We need to recurse with the right half of our split str because + * there may be more words we need to split out in it. bp is only + * the first breakpoint in str. We can assume that our right half + * is breakable since we would have no breakpoint if str was + * not breakable. *) + loop + (merge style left word_in_progress_acc :: finished_words_acc) + None + ((true, style, right) :: strs)) + in + loop [] None in - let rec loop - finished_words_acc - word_in_progress_acc - strs - = - match strs with - | [] -> - let finished_words_acc = (match word_in_progress_acc with - | None -> finished_words_acc - | Some word -> word :: finished_words_acc - ) in - List.rev_map (fun (n, words) -> (n, List.rev words)) finished_words_acc - - | (breakable, style, str) :: strs -> - (* If our string is breakable then try to find the first space. Use - * that space as the breakpoint. *) - let bp = if breakable - then try Some (String.index str ' ') with Not_found -> None - else None - in - (match bp with - (* If we have no breakpoint then we want to either create a new word - * or combine our str with the first word in the result of - * recursively calling split_into_words. *) - | None -> - loop - finished_words_acc - (Some (merge style str word_in_progress_acc)) - strs - - (* If we have a breakpoint then we need to split up our str and - * create a new word with the left half and recurse with the - * right half. *) - | Some bp -> - let left = String.sub str 0 bp in - let right = String.sub str (bp + 1) ((String.length str) - (bp + 1)) in - (* We need to recurse with the right half of our split str because - * there may be more words we need to split out in it. bp is only - * the first breakpoint in str. We can assume that our right half - * is breakable since we would have no breakpoint if str was - * not breakable. *) - loop - (merge style left word_in_progress_acc :: finished_words_acc) - None - ((true, style, right) :: strs) - ) + let is_style_underlined = function + | Tty.Normal _ -> false + | Tty.Bold _ -> false + | Tty.Dim _ -> false + | Tty.Underline _ -> true + | Tty.BoldUnderline _ -> true + | Tty.DimUnderline _ -> true + | Tty.NormalWithBG _ -> false + | Tty.BoldWithBG _ -> false in - loop [] None - in - let is_style_underlined = function - | Tty.Normal _ -> false - | Tty.Bold _ -> false - | Tty.Dim _ -> false - | Tty.Underline _ -> true - | Tty.BoldUnderline _ -> true - | Tty.DimUnderline _ -> true - | Tty.NormalWithBG _ -> false - | Tty.BoldWithBG _ -> false - in - let is_first_underlined = function - | [] -> false - | (style, _) :: _ -> is_style_underlined style - in - let is_last_underlined words = words |> List.rev |> is_first_underlined in - let with_len = function - | None -> None - | Some styles -> - Some (List.fold_left (fun acc (_, s) -> acc + String.length s) 0 styles, styles) - in - (* Hard breaks a single Tty style list returned by split_into_words. We use - * this when a word is, by itself, larger then our line length. Returns a: - * - * (int * (Tty.style * string) list) - * - * Where int represents the length of the last line. *) - let hard_break_styles = - let rec loop acc pos ~line_length = function - | [] -> (pos, List.rev acc) - | (style, str) :: styles -> - let bp = line_length - pos in - let len = String.length str in - if len > bp then - let left = String.sub str 0 bp in - let right = String.sub str bp (len - bp) in - loop - (default_style "\n" :: (style, left) :: acc) - 0 - ~line_length - ((style, right) :: styles) - else - loop - ((style, str) :: acc) - (pos + len) - ~line_length - styles + let is_first_underlined = function + | [] -> false + | (style, _) :: _ -> is_style_underlined style in - loop [] 0 - in - (* Concatenates a words data structure created by split_into_words into a: - * - * (Tty.style * string) list - * - * Which can be rendered by the Tty module. This is where we will line - * break depending on the length of our word and the position of the word - * on the current line. - * - * TODO: Handle orphans gracefully. *) - let concat_words_into_lines - ~line_length - ?indentation_first - ?indentation - words - = - let indentation = with_len indentation in - let indentation_first = match indentation_first with - | (Some _ as indentation_first) -> with_len indentation_first - | None -> indentation + let is_last_underlined words = words |> List.rev |> is_first_underlined in + let with_len = function + | None -> None + | Some styles -> + Some (List.fold_left (fun acc (_, s) -> acc + String.length s) 0 styles, styles) in - match words with + (* Hard breaks a single Tty style list returned by split_into_words. We use + * this when a word is, by itself, larger then our line length. Returns a: + * + * (int * (Tty.style * string) list) + * + * Where int represents the length of the last line. *) + let hard_break_styles = + let rec loop acc pos ~line_length = function + | [] -> (pos, List.rev acc) + | (style, str) :: styles -> + let bp = line_length - pos in + let len = String.length str in + if len > bp then + let left = String.sub str 0 bp in + let right = String.sub str bp (len - bp) in + loop + (default_style "\n" :: (style, left) :: acc) + 0 + ~line_length + ((style, right) :: styles) + else + loop ((style, str) :: acc) (pos + len) ~line_length styles + in + loop [] 0 + in + (* Concatenates a words data structure created by split_into_words into a: + * + * (Tty.style * string) list + * + * Which can be rendered by the Tty module. This is where we will line + * break depending on the length of our word and the position of the word + * on the current line. + * + * TODO: Handle orphans gracefully. *) + let concat_words_into_lines ~line_length ?indentation_first ?indentation words = + let indentation = with_len indentation in + let indentation_first = + match indentation_first with + | Some _ as indentation_first -> with_len indentation_first + | None -> indentation + in + match words with (* No words means no string. *) | [] -> Option.value_map indentation_first ~default:[] ~f:snd (* If we have a single word we will use that as our initializer for @@ -1703,104 +1752,113 @@ module Cli_output = struct | init :: words -> let init = let (init_len, init_word) = init in - let init = match indentation_first with - | None -> (init_len, [init_word]) - | Some (indentation_first_len, indentation_first) -> - (indentation_first_len + init_len, [init_word; indentation_first]) + let init = + match indentation_first with + | None -> (init_len, [init_word]) + | Some (indentation_first_len, indentation_first) -> + (indentation_first_len + init_len, [init_word; indentation_first]) in ((fun () -> is_last_underlined init_word), init) in - let (_, (_, acc)) = List.fold_left (fun (last_underlined, (pos, acc)) (len, word) -> - (* If our position on the line plus one (for the space we would - * insert) plus the length of our word will fit in our line length - * then add the word to the current line separated from acc with a - * space. Otherwise start a new line where word is the only text. *) - if pos + 1 + len > line_length then - let last_underlined () = is_last_underlined word in - let (newline_len, newline) = match indentation with - | None -> - (0, [default_style "\n"]) - | Some (indentation_len, indentation) -> - (indentation_len, default_style "\n" :: indentation) - in - if len <= line_length then - (last_underlined, (len + newline_len, word :: newline :: acc)) - else - let (len, word) = hard_break_styles ~line_length word in - (last_underlined, (len + newline_len, word :: newline :: acc)) - else - (* If both the end of the last word was underlined *and* the - * beginning of the next word is underlined then we also want to - * underline the space we insert. *) - let should_underline = is_first_underlined word && last_underlined () in - let space = - let style = - if should_underline then - (Tty.Underline Tty.Default) + let (_, (_, acc)) = + List.fold_left + (fun (last_underlined, (pos, acc)) (len, word) -> + (* If our position on the line plus one (for the space we would + * insert) plus the length of our word will fit in our line length + * then add the word to the current line separated from acc with a + * space. Otherwise start a new line where word is the only text. *) + if pos + 1 + len > line_length then + let last_underlined () = is_last_underlined word in + let (newline_len, newline) = + match indentation with + | None -> (0, [default_style "\n"]) + | Some (indentation_len, indentation) -> + (indentation_len, default_style "\n" :: indentation) + in + if len <= line_length then + (last_underlined, (len + newline_len, word :: newline :: acc)) else - (Tty.Normal Tty.Default) - in - (style, " ") - in - let last_underlined () = is_last_underlined word in - (last_underlined, (pos + 1 + len, (space :: word) :: acc)) - ) init words in - List.concat (List.rev acc) - in - (* Create the tuple structure we pass into split_into_words. Code is not - * breakable but Text is breakable. *) - let print_message_inline ~flags ~reference = function - | Code s when not (Tty.should_color flags.color) -> - (false, Tty.Normal Tty.Default, "`" ^ s ^ "`") - | Text s when reference -> (true, Tty.Underline Tty.Default, s) - | Code s when reference -> (false, Tty.BoldUnderline Tty.Default, s) - | Text s -> (true, Tty.Normal Tty.Default, s) - | Code s -> (false, Tty.Bold Tty.Default, s) - in - (* Put it all together! *) - fun ~flags ~colors ~indentation message -> - let message = List.rev (List.fold_left (fun acc feature -> - match feature with - | Inline inlines -> - List.rev_append - (List.map (print_message_inline ~flags ~reference:false) inlines) - acc - | Reference (inlines, id) -> - let message = - List.map (print_message_inline ~flags ~reference:true) inlines @ - [ - (false, Tty.Normal Tty.Default, " "); - (false, Tty.Dim Tty.Default, "["); - (false, Tty.Normal (get_tty_color id colors), string_of_int id); - (false, Tty.Dim Tty.Default, "]"); - ] + let (len, word) = hard_break_styles ~line_length word in + (last_underlined, (len + newline_len, word :: newline :: acc)) + else + (* If both the end of the last word was underlined *and* the + * beginning of the next word is underlined then we also want to + * underline the space we insert. *) + let should_underline = is_first_underlined word && last_underlined () in + let space = + let style = + if should_underline then + Tty.Underline Tty.Default + else + Tty.Normal Tty.Default + in + (style, " ") + in + let last_underlined () = is_last_underlined word in + (last_underlined, (pos + 1 + len, (space :: word) :: acc))) + init + words in - List.rev_append message acc - ) [] message) in - (* Create the indentation for our message. The first line of indentation - * will contain a bullet character. *) - let indentation_space = - if indentation <= 0 then - None - else - Some (String.make ((indentation - 1) * 3) ' ') - in - let indentation_first = - Option.map indentation_space ~f:(fun space -> - [default_style (space ^ " " ^ bullet_char ~flags ^ " ")]) + Core_list.concat (List.rev acc) in - let indentation = - Option.map indentation_space ~f:(fun space -> - [default_style (space ^ " ")]) - in - let message = - concat_words_into_lines - ~line_length:flags.message_width - ?indentation_first - ?indentation - (split_into_words message) + (* Create the tuple structure we pass into split_into_words. Code is not + * breakable but Text is breakable. *) + let print_message_inline ~flags ~reference = function + | Code s when not (Tty.should_color flags.color) -> + (false, Tty.Normal Tty.Default, "`" ^ s ^ "`") + | Text s when reference -> (true, Tty.Underline Tty.Default, s) + | Code s when reference -> (false, Tty.BoldUnderline Tty.Default, s) + | Text s -> (true, Tty.Normal Tty.Default, s) + | Code s -> (false, Tty.Bold Tty.Default, s) in - message @ [default_style "\n"] + (* Put it all together! *) + fun ~flags ~colors ~indentation message -> + let message = + List.rev + (List.fold_left + (fun acc feature -> + match feature with + | Inline inlines -> + List.rev_append + (Core_list.map ~f:(print_message_inline ~flags ~reference:false) inlines) + acc + | Reference (inlines, id) -> + let message = + Core_list.map ~f:(print_message_inline ~flags ~reference:true) inlines + @ [ + (false, Tty.Normal Tty.Default, " "); + (false, Tty.Dim Tty.Default, "["); + (false, Tty.Normal (get_tty_color id colors), string_of_int id); + (false, Tty.Dim Tty.Default, "]"); + ] + in + List.rev_append message acc) + [] + message) + in + (* Create the indentation for our message. The first line of indentation + * will contain a bullet character. *) + let indentation_space = + if indentation <= 0 then + None + else + Some (String.make ((indentation - 1) * 3) ' ') + in + let indentation_first = + Option.map indentation_space ~f:(fun space -> + [default_style (space ^ " " ^ bullet_char ~flags ^ " ")]) + in + let indentation = + Option.map indentation_space ~f:(fun space -> [default_style (space ^ " ")]) + in + let message = + concat_words_into_lines + ~line_length:flags.message_width + ?indentation_first + ?indentation + (split_into_words message) + in + message @ [default_style "\n"]) (* Shows 3 lines of context in both directions from the root location. *) let root_context_lines = 3 @@ -1819,21 +1877,24 @@ module Cli_output = struct * drawing [U+2502][1] character. Otherwise we use an ascii pipe symbol. * * [1]: http://graphemica.com/%E2%94%82 *) - let vertical_line ~flags = if flags.unicode then "\xE2\x94\x82" else "|" + let vertical_line ~flags = + if flags.unicode then + "\xE2\x94\x82" + else + "|" (* Prints a File_key.t to a string. *) let print_file_key ~strip_root file_key = - let open File_key in - match file_key with - | Some (LibFile filename) - -> relative_lib_path ~strip_root filename - | Some (SourceFile filename) - | Some (JsonFile filename) - | Some (ResourceFile filename) - -> relative_path ~strip_root filename - | Some (Builtins) - | None - -> "(builtins)" + File_key.( + match file_key with + | Some (LibFile filename) -> relative_lib_path ~strip_root filename + | Some (SourceFile filename) + | Some (JsonFile filename) + | Some (ResourceFile filename) -> + relative_path ~strip_root filename + | Some Builtins + | None -> + "(builtins)") exception Oh_no_file_contents_have_changed @@ -1843,300 +1904,368 @@ module Cli_output = struct * * We render the root location for our friendly error message. Decorated with * the reference locations from the message. *) - let print_code_frames_friendly - ~stdin_file - ~strip_root - ~flags - ~references - ~colors - ~tags - root_loc - = - let open Loc in - (* Get a list of all the locations we will want to display. We want to - * display references and the root location with some extra lines - * for context. *) - let locs = - (* Expand the root location with 3 lines of context in either direction. - * However, don't expand before the first line or after the last line. If - * we expand past the last line then read_lines_in_file will skip - * those lines. *) - let expanded_root_loc = - let start_line = max 1 (root_loc.start.line - root_context_lines) in - let end_line = root_loc._end.line + root_context_lines in - { root_loc with - start = { root_loc.start with line = start_line }; - _end = { root_loc._end with line = end_line }; - } - in - expanded_root_loc :: List.map snd (IMap.bindings references) - in - (* Group our locs by their file key. - * - * Also split large locs into two smaller locs. - * - * Also compute the largest line number. We need this to compute the - * gutter width. *) - let (max_line, locs) = List.fold_left (fun (max_line, acc) loc -> - match loc.source with - | None -> failwith "expected loc to have a source" - | Some source -> - (* If our loc is larger then some threshold determined by - * omit_after_lines then split it into two locs. *) - let new_locs = - if ((loc._end.line - loc.start.line) + 1) <= (omit_after_lines * 2) then - [loc] - else - let loc1 = { loc with - _end = { loc.start with - line = loc.start.line + (omit_after_lines - 1); - }; - } in - let loc2 = { loc with - start = { loc._end with - line = loc._end.line - (omit_after_lines - 1); - }; - } in - [loc1; loc2] - in - (* Add the new locs to our FileKeyMap. *) - let locs = Option.value (FileKeyMap.get source acc) ~default:[] in - (max max_line loc._end.line, FileKeyMap.add source (new_locs @ locs) acc) - ) (0, FileKeyMap.empty) locs in - (* Perform some organization operations on our locs. *) - let locs = FileKeyMap.map (fun locs -> - (* Sort all of the locations we want to display. Locations in the root - * file should appear first. Sort in the reverse direction. Our next merge - * step will flip the list back around. *) - let locs = List.sort Loc.compare locs in - (* Merge the locations we want to display. We start with the location with - * the lowest line number. Our fold depends on this to merge correctly. *) - let locs = List.fold_left (fun acc loc -> - match acc with - (* Init. *) - | [] -> [loc] - (* If the previous loc + 3 lines below intersects with the next loc then - * we want to merge those locs into one code frame. *) - | last_loc :: acc when ( - Loc.lines_intersect loc - { last_loc with - _end = { last_loc._end with - line = last_loc._end.line + (merge_nearby_lines + 1); - }; - } - ) -> - let loc = { - source = last_loc.source; - start = if pos_cmp loc.start last_loc.start < 0 then loc.start else last_loc.start; - _end = if pos_cmp loc._end last_loc._end > 0 then loc._end else last_loc._end; - } in - loc :: acc - (* Otherwise, add the loc by itself. *) - | acc -> loc :: acc - ) [] locs in - (* Return the reversed locs. *) - List.rev locs - ) locs in - (* Organize all our references onto the line which we will find them on. We - * do a second pass to sort these references and determine the - * gutter width. *) - let file_line_references = IMap.fold (fun id loc file_line_references -> - if id < 0 then - file_line_references - else - match loc.source with - | None -> failwith "expected loc to have a source" - | Some source -> - let line_references = - Option.value (FileKeyMap.get source file_line_references) ~default:IMap.empty in - let references = - Option.value (IMap.get loc.start.line line_references) ~default:[] in - let references = (id, loc.start) :: references in - let line_references = IMap.add loc.start.line references line_references in - FileKeyMap.add source line_references file_line_references - ) references FileKeyMap.empty in - (* Create the styled text which we will put in the gutter for all of our - * file line references. - * - * We use a ref for gutter_width since we want to map file_line_references - * in place instead of using a fold which would re-create the map. *) - let gutter_width = ref 5 in - let file_line_references = FileKeyMap.map (IMap.map (fun references -> - (* Reverse sort the references by their start position. We reverse sort - * since we fold_left next. *) - let references = List.sort (fun (_, a) (_, b) -> pos_cmp b a) references in - (* Fold the list. Creating the width and the string we will - * ultimately render. *) - let (width, references) = List.fold_left (fun (width, acc) (id, _) -> - let string_id = string_of_int id in - let width = width + 2 + String.length string_id in - let acc = - (dim_style "[") - :: (Tty.Normal (get_tty_color id colors), string_id) - :: (dim_style "]") - :: acc + let print_code_frames_friendly ~stdin_file ~strip_root ~flags ~references ~colors ~tags root_loc + = + Loc.( + (* Get a list of all the locations we will want to display. We want to + * display references and the root location with some extra lines + * for context. *) + let locs = + (* Expand the root location with 3 lines of context in either direction. + * However, don't expand before the first line or after the last line. If + * we expand past the last line then read_lines_in_file will skip + * those lines. *) + let expanded_root_loc = + let start_line = max 1 (root_loc.start.line - root_context_lines) in + let end_line = root_loc._end.line + root_context_lines in + { + root_loc with + start = { root_loc.start with line = start_line }; + _end = { root_loc._end with line = end_line }; + } in - (width, acc) - ) (1, [default_style " "]) references in - let (width, references) = (width + 1, default_style " " :: references) in - (* Set gutter_width to the larger of the current gutter_width or the width - * for this line. *) - gutter_width := max !gutter_width width; - (* Return the final list of references for the line along with the width. *) - (width, references) - )) file_line_references in - let gutter_width = !gutter_width in - (* Get the line number gutter length by looking at the string length for the - * maximum line number. - * - * Sometimes, the maximum line number will not be read. So this might not be - * the true maximum line number. However, for the purposes of - * max_line_number_length this imprecision is not important. - * - * The penalty for this imprecision is our code frame gutter might be a - * little wider then it needs to be in unlikely edge cases. *) - let max_line_number_length = String.length (string_of_int max_line) in - let vertical_line = vertical_line ~flags in - (* Print the code frame for each loc. Highlighting appropriate references. *) - let code_frames = FileKeyMap.mapi (fun file_key locs -> - (* Used by read_lines_in_file. *) - let filename = file_of_source (Some file_key) in - (* Get some data structures associated with this file. *) - let tags = Option.value (FileKeyMap.get file_key tags) ~default:[] in - let line_references = - Option.value (FileKeyMap.get file_key file_line_references) ~default:IMap.empty in - (* Fold all the locs for this file into code frames. *) - let (_, _, code_frames) = List.fold_left (fun (tags, opened, code_frames) loc -> - (* Read the lines from this location. *) - let lines = read_lines_in_file loc filename stdin_file in - match lines with - | None -> - (* Failed to read the file, so skip this code frame *) - (tags, opened, code_frames) - | Some lines -> - try - (* Create the code frame styles. *) - let (_, tags, opened, code_frame) = List.fold_left (fun (n, tags, opened, acc) line -> - (* Loop which will paint the different parts of a line of code in - * our code frame. Eats tags on the current line. *) - let rec loop acc col tags opened line = - (* Get the current style for the line. *) - let style = Option.value_map (get_tty_color_from_stack opened colors) - ~f:(fun color -> Tty.Normal color) ~default:(Tty.Dim Tty.Default) in - match tags with - (* If we have no more tags then use our current style with - * the line. *) - | [] -> - (tags, opened, (style, line) :: acc) - (* If we have a tag on this line then eat it and add the new - * opened tag to `opened`. Note that our condition depends on tag - * being well formed by layout_references! *) - | (pos, tag_id, tag_kind) :: tags when pos.line = n -> - let opened = match tag_kind with - | Open _ -> tag_id :: opened - | Close -> List.filter (fun id -> tag_id <> id) opened + expanded_root_loc :: Core_list.map ~f:snd (IMap.bindings references) + in + (* Group our locs by their file key. + * + * Also split large locs into two smaller locs. + * + * Also compute the largest line number. We need this to compute the + * gutter width. *) + let (max_line, locs) = + List.fold_left + (fun (max_line, acc) loc -> + match loc.source with + | None -> failwith "expected loc to have a source" + | Some source -> + (* If our loc is larger then some threshold determined by + * omit_after_lines then split it into two locs. *) + let new_locs = + if loc._end.line - loc.start.line + 1 <= omit_after_lines * 2 then + [loc] + else + let loc1 = + { + loc with + _end = { loc.start with line = loc.start.line + (omit_after_lines - 1) }; + } in - let split = pos.column - col in - let left, right = - (* TODO: Get a SHA for each file when we parse it, and include the SHA in the - * loc. Then, we can know for sure whether a file has changed or not when we - * go to pretty print an error. - * - * Here we only know for sure that a file has changed when a particular line - * is too short, which means we can sometimes print bad code frames. - *) - try - String.sub line 0 split, String.sub line split (String.length line - split) - with Invalid_argument _ -> - raise Oh_no_file_contents_have_changed + let loc2 = + { + loc with + start = { loc._end with line = loc._end.line - (omit_after_lines - 1) }; + } in - let acc = (style, left) :: acc in - loop acc pos.column tags opened right - (* If we do not have a tag on this line then use our current style - * with this line of code. *) - | tags -> - (tags, opened, (style, line) :: acc) + [loc1; loc2] in - (* Start that loop! *) - let (tags, opened, code_line) = loop [] 0 tags opened line in - let code_line = List.rev code_line in - (* Create the gutter text. *) - let gutter = - match IMap.get n line_references with - | None -> - [default_style (String.make gutter_width ' ')] - | Some (width, references) when width < gutter_width -> - default_style (String.make (gutter_width - width) ' ') :: references - | Some (_, references) -> - references - in - (* Create the next line. *) - let next_line = - (* Get the line number string with appropriate padding. *) - let line_number = - let n = string_of_int n in - let padding = String.make (max_line_number_length - (String.length n)) ' ' in - let n = (Tty.Dim Tty.Default, n) in - [default_style padding; n; dim_style vertical_line] + (* Add the new locs to our FileKeyMap. *) + let locs = Option.value (FileKeyMap.get source acc) ~default:[] in + (max max_line loc._end.line, FileKeyMap.add source (new_locs @ locs) acc)) + (0, FileKeyMap.empty) + locs + in + (* Perform some organization operations on our locs. *) + let locs = + FileKeyMap.map + (fun locs -> + (* Sort all of the locations we want to display. Locations in the root + * file should appear first. Sort in the reverse direction. Our next merge + * step will flip the list back around. *) + let locs = List.sort Loc.compare locs in + (* Merge the locations we want to display. We start with the location with + * the lowest line number. Our fold depends on this to merge correctly. *) + let locs = + List.fold_left + (fun acc loc -> + match acc with + (* Init. *) + | [] -> [loc] + (* If the previous loc + 3 lines below intersects with the next loc then + * we want to merge those locs into one code frame. *) + | last_loc :: acc + when Loc.lines_intersect + loc + { + last_loc with + _end = + { + last_loc._end with + line = last_loc._end.line + (merge_nearby_lines + 1); + }; + } -> + let loc = + { + source = last_loc.source; + start = + ( if pos_cmp loc.start last_loc.start < 0 then + loc.start + else + last_loc.start ); + _end = + ( if pos_cmp loc._end last_loc._end > 0 then + loc._end + else + last_loc._end ); + } + in + loc :: acc + (* Otherwise, add the loc by itself. *) + | acc -> loc :: acc) + [] + locs + in + (* Return the reversed locs. *) + List.rev locs) + locs + in + (* Organize all our references onto the line which we will find them on. We + * do a second pass to sort these references and determine the + * gutter width. *) + let file_line_references = + IMap.fold + (fun id loc file_line_references -> + if id < 0 then + file_line_references + else + match loc.source with + | None -> failwith "expected loc to have a source" + | Some source -> + let line_references = + Option.value (FileKeyMap.get source file_line_references) ~default:IMap.empty in - gutter @ - line_number @ - (* If the line is empty then strip the whitespace which would be - * trailing whitespace anyways. *) - (if line = "" then [] else [default_style " "]) @ - code_line @ - [default_style "\n"] - in - (* Increment our line count and add the next line to - * our accumulator. *) - (n + 1, tags, opened, next_line :: acc) - ) (loc.start.line, tags, opened, []) (Nel.to_list lines) in - (tags, opened, List.concat (List.rev code_frame) :: code_frames) - with - | Oh_no_file_contents_have_changed -> - (* Realized the file has changed, so skip this code frame *) - (tags, opened, code_frames) - ) (tags, [], []) locs in - match code_frames with - | [] -> [] - | code_frame :: code_frames -> - (* Add all of our code frames together with a colon for omitted chunks - * of code in the file. *) - List.concat (List.fold_left (fun acc code_frame -> - code_frame :: [ - default_style (String.make (gutter_width + max_line_number_length) ' '); - dim_style ":"; - default_style "\n"; - ] :: acc - ) [code_frame] code_frames) - ) locs in - (* Get the root code frame from our map of code frames. We will start with - * this code frame. *) - let root_file_key = match root_loc.source with - | None -> failwith "expected loc to have a source" - | Some file_key -> file_key - in - let root_code_frame = FileKeyMap.get root_file_key code_frames in - let code_frames = FileKeyMap.remove root_file_key code_frames in - (* If we only have a root code frame then only render that. *) - if FileKeyMap.is_empty code_frames then - Option.value root_code_frame ~default:[] - else - let code_frames = FileKeyMap.bindings code_frames in - let code_frames = match root_code_frame with - | None -> code_frames - | Some root_code_frame -> (root_file_key, root_code_frame) :: code_frames + let references = + Option.value (IMap.get loc.start.line line_references) ~default:[] + in + let references = (id, loc.start) :: references in + let line_references = IMap.add loc.start.line references line_references in + FileKeyMap.add source line_references file_line_references) + references + FileKeyMap.empty + in + (* Create the styled text which we will put in the gutter for all of our + * file line references. + * + * We use a ref for gutter_width since we want to map file_line_references + * in place instead of using a fold which would re-create the map. *) + let gutter_width = ref 5 in + let file_line_references = + FileKeyMap.map + (IMap.map (fun references -> + (* Reverse sort the references by their start position. We reverse sort + * since we fold_left next. *) + let references = List.sort (fun (_, a) (_, b) -> pos_cmp b a) references in + (* Fold the list. Creating the width and the string we will + * ultimately render. *) + let (width, references) = + List.fold_left + (fun (width, acc) (id, _) -> + let string_id = string_of_int id in + let width = width + 2 + String.length string_id in + let acc = + dim_style "[" + :: (Tty.Normal (get_tty_color id colors), string_id) + :: dim_style "]" + :: acc + in + (width, acc)) + (1, [default_style " "]) + references + in + let (width, references) = (width + 1, default_style " " :: references) in + (* Set gutter_width to the larger of the current gutter_width or the width + * for this line. *) + gutter_width := max !gutter_width width; + + (* Return the final list of references for the line along with the width. *) + (width, references))) + file_line_references + in + let gutter_width = !gutter_width in + (* Get the line number gutter length by looking at the string length for the + * maximum line number. + * + * Sometimes, the maximum line number will not be read. So this might not be + * the true maximum line number. However, for the purposes of + * max_line_number_length this imprecision is not important. + * + * The penalty for this imprecision is our code frame gutter might be a + * little wider then it needs to be in unlikely edge cases. *) + let max_line_number_length = String.length (string_of_int max_line) in + let vertical_line = vertical_line ~flags in + (* Print the code frame for each loc. Highlighting appropriate references. *) + let code_frames = + FileKeyMap.mapi + (fun file_key locs -> + (* Used by read_lines_in_file. *) + let filename = file_of_source (Some file_key) in + (* Get some data structures associated with this file. *) + let tags = Option.value (FileKeyMap.get file_key tags) ~default:[] in + let line_references = + Option.value (FileKeyMap.get file_key file_line_references) ~default:IMap.empty + in + (* Fold all the locs for this file into code frames. *) + let (_, _, code_frames) = + List.fold_left + (fun (tags, opened, code_frames) loc -> + (* Read the lines from this location. *) + let lines = read_lines_in_file loc filename stdin_file in + match lines with + | None -> + (* Failed to read the file, so skip this code frame *) + (tags, opened, code_frames) + | Some lines -> + (try + (* Create the code frame styles. *) + let (_, tags, opened, code_frame) = + List.fold_left + (fun (n, tags, opened, acc) line -> + (* Loop which will paint the different parts of a line of code in + * our code frame. Eats tags on the current line. *) + let rec loop acc col tags opened line = + (* Get the current style for the line. *) + let style = + Option.value_map + (get_tty_color_from_stack opened colors) + ~f:(fun color -> Tty.Normal color) + ~default:(Tty.Dim Tty.Default) + in + match tags with + (* If we have no more tags then use our current style with + * the line. *) + | [] -> (tags, opened, (style, line) :: acc) + (* If we have a tag on this line then eat it and add the new + * opened tag to `opened`. Note that our condition depends on tag + * being well formed by layout_references! *) + | (pos, tag_id, tag_kind) :: tags when pos.line = n -> + let opened = + match tag_kind with + | Open _ -> tag_id :: opened + | Close -> List.filter (fun id -> tag_id <> id) opened + in + let split = pos.column - col in + let (left, right) = + (* TODO: Get a SHA for each file when we parse it, and include the SHA in the + * loc. Then, we can know for sure whether a file has changed or not when we + * go to pretty print an error. + * + * Here we only know for sure that a file has changed when a particular line + * is too short, which means we can sometimes print bad code frames. + *) + try + ( String.sub line 0 split, + String.sub line split (String.length line - split) ) + with Invalid_argument _ -> + raise Oh_no_file_contents_have_changed + in + let acc = (style, left) :: acc in + loop acc pos.column tags opened right + (* If we do not have a tag on this line then use our current style + * with this line of code. *) + | tags -> (tags, opened, (style, line) :: acc) + in + (* Start that loop! *) + let (tags, opened, code_line) = loop [] 0 tags opened line in + let code_line = List.rev code_line in + (* Create the gutter text. *) + let gutter = + match IMap.get n line_references with + | None -> [default_style (String.make gutter_width ' ')] + | Some (width, references) when width < gutter_width -> + default_style (String.make (gutter_width - width) ' ') + :: references + | Some (_, references) -> references + in + (* Create the next line. *) + let next_line = + (* Get the line number string with appropriate padding. *) + let line_number = + let n = string_of_int n in + let padding = + String.make (max_line_number_length - String.length n) ' ' + in + let n = (Tty.Dim Tty.Default, n) in + [default_style padding; n; dim_style vertical_line] + in + gutter + @ line_number + (* If the line is empty then strip the whitespace which would be + * trailing whitespace anyways. *) + @ ( if line = "" then + [] + else + [default_style " "] ) + @ code_line + @ [default_style "\n"] + in + (* Increment our line count and add the next line to + * our accumulator. *) + (n + 1, tags, opened, next_line :: acc)) + (loc.start.line, tags, opened, []) + (Nel.to_list lines) + in + (tags, opened, Core_list.concat (List.rev code_frame) :: code_frames) + with Oh_no_file_contents_have_changed -> + (* Realized the file has changed, so skip this code frame *) + (tags, opened, code_frames))) + (tags, [], []) + locs + in + match code_frames with + | [] -> [] + | code_frame :: code_frames -> + (* Add all of our code frames together with a colon for omitted chunks + * of code in the file. *) + Core_list.concat + (List.fold_left + (fun acc code_frame -> + code_frame + :: [ + default_style (String.make (gutter_width + max_line_number_length) ' '); + dim_style ":"; + default_style "\n"; + ] + :: acc) + [code_frame] + code_frames)) + locs + in + (* Get the root code frame from our map of code frames. We will start with + * this code frame. *) + let root_file_key = + match root_loc.source with + | None -> failwith "expected loc to have a source" + | Some file_key -> file_key in - (* Add a title to non-root code frames and concatenate them all together! *) - List.concat (List.rev (List.fold_left (fun acc (file_key, code_frame) -> - let file_key = print_file_key ~strip_root (Some file_key) in - let header = [ - default_style (String.make gutter_width ' '); - default_style (file_key ^ "\n"); - ] in - let header = if acc = [] then header else default_style "\n" :: header in - (header @ code_frame) :: acc - ) [] code_frames)) + let root_code_frame = FileKeyMap.get root_file_key code_frames in + let code_frames = FileKeyMap.remove root_file_key code_frames in + (* If we only have a root code frame then only render that. *) + if FileKeyMap.is_empty code_frames then + Option.value root_code_frame ~default:[] + else + let code_frames = FileKeyMap.bindings code_frames in + let code_frames = + match root_code_frame with + | None -> code_frames + | Some root_code_frame -> (root_file_key, root_code_frame) :: code_frames + in + (* Add a title to non-root code frames and concatenate them all together! *) + Core_list.concat + (List.rev + (List.fold_left + (fun acc (file_key, code_frame) -> + let file_key = print_file_key ~strip_root (Some file_key) in + let header = + [default_style (String.make gutter_width ' '); default_style (file_key ^ "\n")] + in + let header = + if acc = [] then + header + else + default_style "\n" :: header + in + (header @ code_frame) :: acc) + [] + code_frames))) (* =================================== * Error Message Colorless Code Frames @@ -2145,150 +2274,159 @@ module Cli_output = struct * Renders the root location along with reference locations, but * without color! *) let print_colorless_code_frames_friendly - ~stdin_file - ~strip_root - ~flags - ~references - ~root_reference_id - root_loc - = - let open Loc in - let vertical_line = vertical_line ~flags in - (* Get the maximum end line number. We will use this for computing our - * gutter width. *) - let max_end_line = - IMap.fold (fun _ loc max_end_line -> - max max_end_line loc._end.line - ) references root_loc._end.line - in - (* Get the max gutter extension length which is the length of the longest - * line number plus 3. *) - let gutter_width = 3 + String.length (string_of_int max_end_line) in - (* Prints a single, colorless, location. *) - let print_loc ~with_filename id loc = - (* Get the lines for the location... *) - let filename = file_of_source loc.source in - let lines = read_lines_in_file loc filename stdin_file in - let lines = Option.map lines (fun line_list -> - (* Print every line by appending the line number and appropriate - * gutter width. *) - let (_, lines) = - Nel.fold_left (fun (n, lines) line -> - (* If we show more lines then some upper limit omit any extra code. *) - if n >= loc.start.line + omit_after_lines && n <= loc._end.line - omit_after_lines then - if n = loc.start.line + omit_after_lines then - let gutter = String.make gutter_width ' ' in - (n + 1, lines ^ gutter ^ ":\n") - else - (n + 1, lines) - (* Otherwise, render the line. *) - else - let n_string = string_of_int n in - let gutter_space = String.make (gutter_width - String.length n_string) ' ' in - let gutter = gutter_space ^ n_string ^ vertical_line in + ~stdin_file ~strip_root ~flags ~references ~root_reference_id root_loc = + Loc.( + let vertical_line = vertical_line ~flags in + (* Get the maximum end line number. We will use this for computing our + * gutter width. *) + let max_end_line = + IMap.fold + (fun _ loc max_end_line -> max max_end_line loc._end.line) + references + root_loc._end.line + in + (* Get the max gutter extension length which is the length of the longest + * line number plus 3. *) + let gutter_width = 3 + String.length (string_of_int max_end_line) in + (* Prints a single, colorless, location. *) + let print_loc ~with_filename id loc = + (* Get the lines for the location... *) + let filename = file_of_source loc.source in + let lines = read_lines_in_file loc filename stdin_file in + let lines = + Option.map lines (fun line_list -> + (* Print every line by appending the line number and appropriate + * gutter width. *) + let (_, lines) = + Nel.fold_left + (fun (n, lines) line -> + (* If we show more lines then some upper limit omit any extra code. *) + if + n >= loc.start.line + omit_after_lines + && n <= loc._end.line - omit_after_lines + then + if n = loc.start.line + omit_after_lines then + let gutter = String.make gutter_width ' ' in + (n + 1, lines ^ gutter ^ ":\n") + else + (n + 1, lines) + (* Otherwise, render the line. *) + else + let n_string = string_of_int n in + let gutter_space = String.make (gutter_width - String.length n_string) ' ' in + let gutter = gutter_space ^ n_string ^ vertical_line in + let lines = + if line = "" then + lines ^ gutter ^ "\n" + else + lines ^ gutter ^ " " ^ line ^ "\n" + in + (n + 1, lines)) + (loc.start.line, "") + line_list + in + (* Get our gutter space for the underline and overline. *) + let gutter_space = String.make (gutter_width + 2) ' ' in + (* Add the overline for our loc. *) let lines = - if line = "" then - lines ^ gutter ^ "\n" + if loc.start.line = loc._end.line then + lines else - lines ^ gutter ^ " " ^ line ^ "\n" - in - (n + 1, lines) - ) (loc.start.line, "") line_list - in - (* Get our gutter space for the underline and overline. *) - let gutter_space = String.make (gutter_width + 2) ' ' in - (* Add the overline for our loc. *) - let lines = - if loc.start.line = loc._end.line - then lines - else - let first_line_len = String.length (Nel.hd line_list) in - (* In some cases, we create a location that starts at or after the + let first_line_len = String.length (Nel.hd line_list) in + (* In some cases, we create a location that starts at or after the end of a line. This probably shouldn't happen, but if it does, we can still create an overline with a carat pointing to that column position. *) - let first_line_len = max first_line_len (loc.start.column + 1) in - gutter_space ^ - String.make loc.start.column ' ' ^ "v" ^ - String.make (first_line_len - loc.start.column - 1) '-' ^ "\n" ^ - lines + let first_line_len = max first_line_len (loc.start.column + 1) in + gutter_space + ^ String.make loc.start.column ' ' + ^ "v" + ^ String.make (first_line_len - loc.start.column - 1) '-' + ^ "\n" + ^ lines + in + (* Add the underline for our loc. *) + let lines = + lines + ^ gutter_space + ^ + if loc.start.line = loc._end.line then + String.make loc.start.column ' ' + ^ String.make (loc._end.column - loc.start.column) '^' + else + let last_line = Nel.hd (Nel.rev line_list) in + (* Don't underline the whitespace at the beginning of the last line *) + let underline_prefix = + if Str.string_match (Str.regexp "^\\([\t ]*\\).*") last_line 0 then + Str.matched_group 1 last_line + else + "" + in + (* TODO - if dash_length is less than 0 that means the line in question probably + * changed. As mentioned in another comment in this file, we should have better + * detection and behavior when we notice that the file we're reading for context has + * changed. But at the very least we shouldn't crash, which is what will happen if + * we call String.make with a negative length *) + let dash_length = loc._end.column - String.length underline_prefix - 1 in + underline_prefix ^ String.make (max dash_length 0) '-' ^ "^" + in + (* If we have a reference id then add it just after the underline. *) + let lines = + match id with + | Some id when id > 0 -> lines ^ " [" ^ string_of_int id ^ "]" + | _ -> lines + in + (* Add a final newline to lines. *) + let lines = lines ^ "\n" in + (* Return our final lines string *) + lines) in - (* Add the underline for our loc. *) - let lines = lines ^ gutter_space ^ ( - if loc.start.line = loc._end.line then - String.make loc.start.column ' ' ^ - String.make (loc._end.column - loc.start.column) '^' - else - let last_line = Nel.hd (Nel.rev line_list) in - (* Don't underline the whitespace at the beginning of the last line *) - let underline_prefix = - if Str.string_match (Str.regexp "^\\([\t ]*\\).*") last_line 0 then - Str.matched_group 1 last_line - else - "" + (* If we were configured to print the filename then add it to our lines + * before returning. *) + if not with_filename then + lines + else + Option.map lines (fun lines -> + let space = String.make 3 ' ' in + let filename = print_file_key ~strip_root loc.source in + let filename = + filename + ^ ":" + ^ string_of_int loc.start.line + ^ ":" + ^ string_of_int (loc.start.column + 1) + in + space ^ filename ^ "\n" ^ lines) + in + (* Print the locations for all of our references. *) + let references = + IMap.fold + (fun id loc acc -> + let is_root = + Option.value_map root_reference_id ~default:false ~f:(fun root_id -> root_id = id) in - (* TODO - if dash_length is less than 0 that means the line in question probably - * changed. As mentioned in another comment in this file, we should have better - * detection and behavior when we notice that the file we're reading for context has - * changed. But at the very least we shouldn't crash, which is what will happen if - * we call String.make with a negative length *) - let dash_length = loc._end.column - String.length underline_prefix - 1 in - underline_prefix ^ - String.make (max dash_length 0) '-' ^ - "^" - ) in - (* If we have a reference id then add it just after the underline. *) - let lines = match id with - | Some id when id > 0 -> lines ^ " [" ^ string_of_int id ^ "]" - | _ -> lines - in - (* Add a final newline to lines. *) - let lines = lines ^ "\n" in - (* Return our final lines string *) - lines - ) in - (* If we were configured to print the filename then add it to our lines - * before returning. *) - if not with_filename then - lines - else Option.map lines (fun lines -> - let space = String.make 3 ' ' in - let filename = print_file_key ~strip_root loc.source in - let filename = - filename - ^ ":" ^ string_of_int loc.start.line - ^ ":" ^ string_of_int (loc.start.column + 1) - in - space ^ filename ^ "\n" ^ lines - ) - in - (* Print the locations for all of our references. *) - let references = IMap.fold (fun id loc acc -> - let is_root = - Option.value_map root_reference_id - ~default:false - ~f:(fun root_id -> root_id = id) + (* Skip this reference if either it is a "shadow reference" or it is the + * reference for the root. *) + if id <= 0 || is_root then + acc + else + let code = print_loc ~with_filename:true (Some id) loc in + match code with + | None -> acc + | Some code -> acc ^ code) + references + "" in - (* Skip this reference if either it is a "shadow reference" or it is the - * reference for the root. *) - if id <= 0 || is_root then - acc - else - let code = print_loc ~with_filename:true (Some id) loc in - match code with - | None -> acc - | Some code -> acc ^ code - ) references "" in - (* Add the "References:" label if we have some references. *) - let references = - match references with - | "" -> "" - | _ -> "\nReferences:\n" ^ references - in - (* Print the root location. *) - match print_loc ~with_filename:(references <> "") root_reference_id root_loc with - | Some root_code -> [default_style (root_code ^ references)] - | None -> [default_style references] + (* Add the "References:" label if we have some references. *) + let references = + match references with + | "" -> "" + | _ -> "\nReferences:\n" ^ references + in + (* Print the root location. *) + match print_loc ~with_filename:(references <> "") root_reference_id root_loc with + | Some root_code -> [default_style (root_code ^ references)] + | None -> [default_style references]) (* Goes through the process of laying out a friendly error message group by * combining our lower level functions like extract_references_intermediate @@ -2309,177 +2447,170 @@ module Cli_output = struct * * We generally also provide a custom color to layout_references for these * "shadow references" so we don't use their default layout color. *) - let layout_friendly_error_group - ~root_loc - ~primary_locs - ~message_group - = - let open Friendly in - (* Setup our initial loc_to_id and id_to_loc maps. *) - let (next_id, loc_to_id, id_to_loc) = (1, LocMap.empty, IMap.empty) in - (* Extract all our references from the message group. *) - let (next_id, loc_to_id, id_to_loc, message_group) = - extract_references_intermediate - ~next_id ~loc_to_id ~id_to_loc ~message_group - in - (* Find all the references for primary locations. If there is not yet a - * reference for a primary location then we create one. *) - let (next_id, loc_to_id, id_to_loc, primary_loc_ids) = - LocSet.fold (fun loc (next_id, loc_to_id, id_to_loc, primary_loc_ids) -> - match LocMap.get loc loc_to_id with - (* If there is a reference for this primary location then don't alter - * our loc_to_id or id_to_loc maps. *) - | Some id -> (next_id, loc_to_id, id_to_loc, ISet.add id primary_loc_ids) - (* If there is no reference for this primary location then create a - * negative id. Negative ids will not be rendered in the code - * frame gutter. *) + let layout_friendly_error_group ~root_loc ~primary_locs ~message_group = + Friendly.( + (* Setup our initial loc_to_id and id_to_loc maps. *) + let (next_id, loc_to_id, id_to_loc) = (1, LocMap.empty, IMap.empty) in + (* Extract all our references from the message group. *) + let (next_id, loc_to_id, id_to_loc, message_group) = + extract_references_intermediate ~next_id ~loc_to_id ~id_to_loc ~message_group + in + (* Find all the references for primary locations. If there is not yet a + * reference for a primary location then we create one. *) + let (next_id, loc_to_id, id_to_loc, primary_loc_ids) = + LocSet.fold + (fun loc (next_id, loc_to_id, id_to_loc, primary_loc_ids) -> + match LocMap.get loc loc_to_id with + (* If there is a reference for this primary location then don't alter + * our loc_to_id or id_to_loc maps. *) + | Some id -> (next_id, loc_to_id, id_to_loc, ISet.add id primary_loc_ids) + (* If there is no reference for this primary location then create a + * negative id. Negative ids will not be rendered in the code + * frame gutter. *) + | None -> + let id = -1 * next_id in + let next_id = next_id + 1 in + let loc_to_id = LocMap.add loc id loc_to_id in + let id_to_loc = IMap.add id loc id_to_loc in + let primary_loc_ids = ISet.add id primary_loc_ids in + (next_id, loc_to_id, id_to_loc, primary_loc_ids)) + primary_locs + (next_id, loc_to_id, id_to_loc, ISet.empty) + in + (* Go through a very similar process as primary locations to add a reference + * for the root location and record its id. If a reference already exists + * then we will not higlight our root location any differently! *) + let (next_id, loc_to_id, id_to_loc, root_id, custom_root_color) = + match LocMap.get root_loc loc_to_id with + | Some id -> (next_id, loc_to_id, id_to_loc, Some id, false) | None -> let id = -1 * next_id in let next_id = next_id + 1 in - let loc_to_id = LocMap.add loc id loc_to_id in - let id_to_loc = IMap.add id loc id_to_loc in - let primary_loc_ids = ISet.add id primary_loc_ids in - (next_id, loc_to_id, id_to_loc, primary_loc_ids) - ) primary_locs (next_id, loc_to_id, id_to_loc, ISet.empty) - in - (* Go through a very similar process as primary locations to add a reference - * for the root location and record its id. If a reference already exists - * then we will not higlight our root location any differently! *) - let (next_id, loc_to_id, id_to_loc, root_id, custom_root_color) = - match LocMap.get root_loc loc_to_id with - | Some id -> (next_id, loc_to_id, id_to_loc, Some id, false) - | None -> - let id = -1 * next_id in - let next_id = next_id + 1 in - let loc_to_id = LocMap.add root_loc id loc_to_id in - let id_to_loc = IMap.add id root_loc id_to_loc in - (next_id, loc_to_id, id_to_loc, Some id, true) - in - (* Create a custom color map for our primary location and root locations. *) - let custom_colors = IMap.empty in - (* Set the custom color for all primary loc ids to `Primary. *) - let custom_colors = ISet.fold (fun id custom_colors -> - IMap.add id `Primary custom_colors - ) primary_loc_ids custom_colors in - (* Manually set the custom color for the root loc to `Root. *) - let custom_colors = match root_id with - | Some id when custom_root_color -> IMap.add id `Root custom_colors - | _ -> custom_colors - in - (* Layout all of our references. Including the negative ones. *) - let (colors, tags) = layout_references ~custom_colors id_to_loc in - (* Return everything we need for printing error messages. *) - let _ = (next_id, loc_to_id) in - (id_to_loc, root_id, colors, tags, message_group) + let loc_to_id = LocMap.add root_loc id loc_to_id in + let id_to_loc = IMap.add id root_loc id_to_loc in + (next_id, loc_to_id, id_to_loc, Some id, true) + in + (* Create a custom color map for our primary location and root locations. *) + let custom_colors = IMap.empty in + (* Set the custom color for all primary loc ids to `Primary. *) + let custom_colors = + ISet.fold + (fun id custom_colors -> IMap.add id `Primary custom_colors) + primary_loc_ids + custom_colors + in + (* Manually set the custom color for the root loc to `Root. *) + let custom_colors = + match root_id with + | Some id when custom_root_color -> IMap.add id `Root custom_colors + | _ -> custom_colors + in + (* Layout all of our references. Including the negative ones. *) + let (colors, tags) = layout_references ~custom_colors id_to_loc in + (* Return everything we need for printing error messages. *) + let _ = (next_id, loc_to_id) in + (id_to_loc, root_id, colors, tags, message_group)) let get_pretty_printed_friendly_error_group - ~stdin_file - ~strip_root - ~flags - ~severity - ~trace - ~root_loc - ~primary_locs - ~message_group - = - let open Friendly in - (* Get the primary and root locations. *) - let primary_loc = - if LocSet.cardinal primary_locs = 1 then - LocSet.min_elt primary_locs - else - root_loc - in - (* The header location is the primary location when we have color and the - * root location when we don't have color. *) - let header = - print_header_friendly ~strip_root ~flags ~severity - (if Tty.should_color flags.color then primary_loc else root_loc) - in - (* Layout our entire friendly error group. This returns a bunch of data we - * will need to print our friendly error group. *) - let (references, root_reference_id, colors, tags, message_group) = - layout_friendly_error_group - ~root_loc - ~primary_locs - ~message_group - in - (* Print the text of our error message by traversing the message_group. We - * print group_message at the current indentation and group_message_list at - * the current indentation plus 1. *) - let message = - let rec loop ~indentation acc message_group = - let acc = - (print_message_friendly ~flags ~colors ~indentation message_group.group_message) :: acc in - loop_list - ~indentation:(indentation + 1) acc message_group.group_message_list - - and loop_list ~indentation acc message_group_list = - match message_group_list with - | [] -> acc - | message_group :: message_group_list -> - (* Not tail-recursive for message_group depth. Generally message_group - * should not be more then 5 or so deep. *) - let acc = loop ~indentation acc message_group in - loop_list ~indentation acc message_group_list - in - List.concat (List.rev (loop ~indentation:0 [] message_group)) - in - (* Print the code frame for our error message. *) - let code_frame = - if Tty.should_color flags.color then - print_code_frames_friendly - ~stdin_file - ~strip_root - ~flags - ~references - ~colors - ~tags + ~stdin_file ~strip_root ~flags ~severity ~trace ~root_loc ~primary_locs ~message_group = + Friendly.( + (* Get the primary and root locations. *) + let primary_loc = + if LocSet.cardinal primary_locs = 1 then + LocSet.min_elt primary_locs + else root_loc - else - print_colorless_code_frames_friendly - ~stdin_file + in + (* The header location is the primary location when we have color and the + * root location when we don't have color. *) + let header = + print_header_friendly ~strip_root ~flags - ~references - ~root_reference_id - root_loc - in - (* Put it all together! *) - List.concat [ - (* Header: *) - header; - [default_style "\n"]; - - (* Error Message: *) - message; - - (* Code frame: *) - (match code_frame with [] -> [] | code_frame -> default_style "\n" :: code_frame); - - (* Trace: *) - (match trace with [] -> [] | _ -> [default_style "\n"]); - List.concat (List.map ( - print_message_nice ~strip_root ~severity stdin_file ( - match file_of_source root_loc.Loc.source with - | Some filename -> filename - | None -> "[No file]" - ) - ) (append_trace_reasons [] trace)); - - (* Next error: *) - [default_style "\n"]; - ] + ~severity + ( if Tty.should_color flags.color then + primary_loc + else + root_loc ) + in + (* Layout our entire friendly error group. This returns a bunch of data we + * will need to print our friendly error group. *) + let (references, root_reference_id, colors, tags, message_group) = + layout_friendly_error_group ~root_loc ~primary_locs ~message_group + in + (* Print the text of our error message by traversing the message_group. We + * print group_message at the current indentation and group_message_list at + * the current indentation plus 1. *) + let message = + let rec loop ~indentation acc message_group = + let acc = + print_message_friendly ~flags ~colors ~indentation message_group.group_message :: acc + in + loop_list ~indentation:(indentation + 1) acc message_group.group_message_list + and loop_list ~indentation acc message_group_list = + match message_group_list with + | [] -> acc + | message_group :: message_group_list -> + (* Not tail-recursive for message_group depth. Generally message_group + * should not be more then 5 or so deep. *) + let acc = loop ~indentation acc message_group in + loop_list ~indentation acc message_group_list + in + Core_list.concat (List.rev (loop ~indentation:0 [] message_group)) + in + (* Print the code frame for our error message. *) + let code_frame = + if Tty.should_color flags.color then + print_code_frames_friendly + ~stdin_file + ~strip_root + ~flags + ~references + ~colors + ~tags + root_loc + else + print_colorless_code_frames_friendly + ~stdin_file + ~strip_root + ~flags + ~references + ~root_reference_id + root_loc + in + (* Put it all together! *) + Core_list.concat + [ + (* Header: *) + header; + [default_style "\n"]; + (* Error Message: *) + message; + (* Code frame: *) + (match code_frame with + | [] -> [] + | code_frame -> default_style "\n" :: code_frame); + (* Trace: *) + (match trace with + | [] -> [] + | _ -> [default_style "\n"]); + Core_list.concat + (Core_list.map + ~f: + (print_message_nice + ~strip_root + ~severity + stdin_file + (match file_of_source root_loc.Loc.source with + | Some filename -> filename + | None -> "[No file]")) + (append_trace_reasons [] trace)); + (* Next error: *) + [default_style "\n"]; + ]) let get_pretty_printed_error - ~flags - ~stdin_file - ~strip_root - ~severity - ~show_all_branches - ~on_hidden_branches - group - = + ~flags ~stdin_file ~strip_root ~severity ~show_all_branches ~on_hidden_branches group = let check (hidden_branches, a, b) = if hidden_branches then on_hidden_branches (); (a, b) @@ -2489,105 +2620,134 @@ module Cli_output = struct * message and render a single message. Sometimes singletons will also * render a trace. *) | Singleton (_, trace, error) -> - let open Friendly in - let (primary_loc, { group_message; group_message_list }) = - check (message_group_of_error ~show_all_branches ~show_root:true error) in - get_pretty_printed_friendly_error_group - ~stdin_file - ~strip_root - ~flags - ~severity - ~trace - ~root_loc:(match error.root with Some { root_loc; _ } -> root_loc | None -> error.loc) - ~primary_locs:(LocSet.singleton primary_loc) - ~message_group:{ - group_message = capitalize group_message; - group_message_list; - } + Friendly.( + let (primary_loc, { group_message; group_message_list }) = + check (message_group_of_error ~show_all_branches ~show_root:true error) + in + get_pretty_printed_friendly_error_group + ~stdin_file + ~strip_root + ~flags + ~severity + ~trace + ~root_loc: + (match error.root with + | Some { root_loc; _ } -> root_loc + | None -> error.loc) + ~primary_locs:(LocSet.singleton primary_loc) + ~message_group:{ group_message = capitalize group_message; group_message_list }) (* Groups either render a single error (if there is only a single group * member) or a group will render a list of errors where some are * optionally omitted. *) - | Group { kind=_; root; errors_rev; omitted } -> - let open Friendly in - (* Constructs the message group. *) - let (primary_locs, message_group) = match errors_rev with - (* When we only have a single message, append the root and move on. *) - | (error, []) when omitted = 0 -> - let (primary_loc, message_group) = - check (message_group_of_error ~show_all_branches ~show_root:true error) in - (LocSet.singleton primary_loc, message_group) - (* When we have multiple members we need to put them in a group with the - * root message as the group message. *) - | _ -> - let acc = ( - if omitted = 0 then - [] - else [{ - group_message_list = []; - group_message = [text ( - "... " ^ - string_of_int omitted ^ - " more error" ^ - (if omitted = 1 then "" else "s") ^ - "." - )]; - }] - ) in - let (primary_locs, group_message_list) = List.fold_left (fun (primary_locs, acc) error -> - let (primary_loc, message_group) = - check (message_group_of_error ~show_all_branches ~show_root:false error) in - (LocSet.add primary_loc primary_locs, message_group :: acc) - ) (LocSet.empty, acc) (Nel.to_list errors_rev) in - (primary_locs, { - group_message = root.root_message @ [text ":"]; - group_message_list; - }) - in - get_pretty_printed_friendly_error_group - ~stdin_file - ~strip_root - ~flags - ~severity - ~trace:[] - ~root_loc:root.root_loc - ~primary_locs - ~message_group + | Group { kind = _; root; errors_rev; omitted } -> + Friendly.( + (* Constructs the message group. *) + let (primary_locs, message_group) = + match errors_rev with + (* When we only have a single message, append the root and move on. *) + | (error, []) when omitted = 0 -> + let (primary_loc, message_group) = + check (message_group_of_error ~show_all_branches ~show_root:true error) + in + (LocSet.singleton primary_loc, message_group) + (* When we have multiple members we need to put them in a group with the + * root message as the group message. *) + | _ -> + let acc = + if omitted = 0 then + [] + else + [ + { + group_message_list = []; + group_message = + [ + text + ( "... " + ^ string_of_int omitted + ^ " more error" + ^ ( if omitted = 1 then + "" + else + "s" ) + ^ "." ); + ]; + }; + ] + in + let (primary_locs, group_message_list) = + List.fold_left + (fun (primary_locs, acc) error -> + let (primary_loc, message_group) = + check (message_group_of_error ~show_all_branches ~show_root:false error) + in + (LocSet.add primary_loc primary_locs, message_group :: acc)) + (LocSet.empty, acc) + (Nel.to_list errors_rev) + in + (primary_locs, { group_message = root.root_message @ [text ":"]; group_message_list }) + in + get_pretty_printed_friendly_error_group + ~stdin_file + ~strip_root + ~flags + ~severity + ~trace:[] + ~root_loc:root.root_loc + ~primary_locs + ~message_group) let print_styles ~out_channel ~flags styles = let styles = if flags.one_line then - List.map remove_newlines styles + Core_list.map ~f:remove_newlines styles else styles in Tty.cprint ~out_channel ~color_mode:flags.color styles; Tty.cprint ~out_channel ~color_mode:flags.color [default_style "\n"] - let print_errors = + let format_errors = let render_counts = - let error_or_errors n = if n != 1 then "errors" else "error" in - let warning_or_warnings n = if n != 1 then "warnings" else "warning" in - + let error_or_errors n = + if n != 1 then + "errors" + else + "error" + in + let warning_or_warnings n = + if n != 1 then + "warnings" + else + "warning" + in fun ~err_count ~warn_count sep -> (* If there are 0 errors and 0 warnings, just render "0 errors" *) if warn_count = 0 then - Printf.sprintf "%d%s%s" - err_count sep (error_or_errors err_count) + Printf.sprintf "%d%s%s" err_count sep (error_or_errors err_count) else if err_count = 0 then - Printf.sprintf "%d%s%s" - warn_count sep (warning_or_warnings warn_count) - else (* err_count > 0 and warn_count > 0 *) - Printf.sprintf "%d%s%s and %d%s%s" - err_count sep (error_or_errors err_count) - warn_count sep (warning_or_warnings warn_count) + Printf.sprintf "%d%s%s" warn_count sep (warning_or_warnings warn_count) + else + (* err_count > 0 and warn_count > 0 *) + Printf.sprintf + "%d%s%s and %d%s%s" + err_count + sep + (error_or_errors err_count) + warn_count + sep + (warning_or_warnings warn_count) in - - fun ~out_channel ~flags ?(stdin_file=None) - ~strip_root ~errors ~warnings ~lazy_msg () -> - let truncate = not (flags.show_all_errors) in - let max_count = if truncate then Some 50 else None in - let err_count = ErrorSet.cardinal errors in - let warn_count = ErrorSet.cardinal warnings in + fun ~out_channel ~flags ?(stdin_file = None) ~strip_root ~errors ~warnings ~lazy_msg () -> + let truncate = not flags.show_all_errors in + let max_count = + if truncate then + Some 50 + else + None + in + let err_count = ConcreteLocPrintableErrorSet.cardinal errors in + let warn_count = ConcreteLocPrintableErrorSet.cardinal warnings in let errors = collect_errors_into_groups max_count errors in let warnings = collect_errors_into_groups @@ -2604,319 +2764,414 @@ module Cli_output = struct ~stdin_file ~strip_root ~severity - ~show_all_branches:flags.show_all_branches - (* Feels like React... *) + ~show_all_branches:flags.show_all_branches (* Feels like React... *) ~on_hidden_branches:(fun () -> hidden_branches := true) group in print_styles ~out_channel ~flags styles in List.iter (iter_group ~severity:Err) (List.rev errors); - List.iter (iter_group ~severity:Warn) (List.rev warnings); + List.iter (iter_group ~severity:Warn) (List.rev warnings) in let hidden_branches = !hidden_branches in if total_count > 0 then print_newline (); if truncate && total_count > 50 then ( - let remaining_errs, remaining_warns = if err_count - 50 < 0 - then 0, warn_count - (50 - err_count) - else err_count - 50, warn_count + let (remaining_errs, remaining_warns) = + if err_count - 50 < 0 then + (0, warn_count - (50 - err_count)) + else + (err_count - 50, warn_count) in Printf.fprintf out_channel "... %s (only 50 out of %s displayed)\n" (render_counts ~err_count:remaining_errs ~warn_count:remaining_warns " more ") (render_counts ~err_count ~warn_count " "); + Printf.fprintf out_channel "To see all errors, re-run Flow with --show-all-errors\n"; + flush out_channel + ) else + Printf.fprintf out_channel "Found %s\n" (render_counts ~err_count ~warn_count " "); + if hidden_branches then Printf.fprintf out_channel - "To see all errors, re-run Flow with --show-all-errors\n"; - flush out_channel - ) else ( - Printf.fprintf out_channel "Found %s\n" - (render_counts ~err_count ~warn_count " ") - ); - if hidden_branches then ( - Printf.fprintf out_channel - "\nOnly showing the most relevant union/intersection branches.\n\ - To see all branches, re-run Flow with --show-all-branches\n" - ); + "\nOnly showing the most relevant union/intersection branches.\nTo see all branches, re-run Flow with --show-all-branches\n"; Option.iter lazy_msg ~f:(Printf.fprintf out_channel "\n%s\n"); () + + let print_errors + ~out_channel ~flags ?(stdin_file = None) ~strip_root ~errors ~warnings ~lazy_msg () = + format_errors ~out_channel ~flags ~stdin_file ~strip_root ~errors ~warnings ~lazy_msg () end (* JSON output *) module Json_output = struct type json_version = - | JsonV1 - | JsonV2 + | JsonV1 + | JsonV2 let unwrap_message = function - | BlameM (loc, str) when loc <> Loc.none -> str, Some loc - | BlameM (_, str) | CommentM str -> str, None - - let json_of_message_props ~strip_root message = - let open Hh_json in - let desc, loc = unwrap_message message in - let type_ = match message with - | BlameM _ -> "Blame" - | CommentM _ -> "Comment" in - ("descr", JSON_String desc) :: - ("type", JSON_String type_) :: - match loc with - | None -> deprecated_json_props_of_loc ~strip_root Loc.none - | Some loc -> - ("loc", Reason.json_of_loc ~strip_root loc) :: - deprecated_json_props_of_loc ~strip_root loc + | BlameM (loc, str) when loc <> Loc.none -> (str, Some loc) + | BlameM (_, str) + | CommentM str -> + (str, None) + + let json_of_message_props ~stdin_file ~strip_root message = + Hh_json.( + let (desc, loc) = unwrap_message message in + let type_ = + match message with + | BlameM _ -> "Blame" + | CommentM _ -> "Comment" + in + ("descr", JSON_String desc) + :: ("type", JSON_String type_) + :: + (match loc with + | None -> deprecated_json_props_of_loc ~strip_root Loc.none + | Some loc -> + let offset_table = get_offset_table_expensive ~stdin_file loc in + ("loc", Reason.json_of_loc ~strip_root ~offset_table ~catch_offset_errors:true loc) + :: deprecated_json_props_of_loc ~strip_root loc)) (* Returns the first line of the context *) let json_of_loc_context ~stdin_file loc = - let open Hh_json in - let code_line = match loc with - | None -> None - | Some loc -> - let open Loc in - let filename = file_of_source loc.source in - (match read_lines_in_file loc filename stdin_file with - | Some l -> Some (Nel.hd l) - | None -> None) - in - match code_line with - | None -> JSON_Null - | Some context -> JSON_String context + Hh_json.( + let code_line = + match loc with + | None -> None + | Some loc -> + Loc.( + let filename = file_of_source loc.source in + (match read_lines_in_file loc filename stdin_file with + | Some l -> Some (Nel.hd l) + | None -> None)) + in + match code_line with + | None -> JSON_Null + | Some context -> JSON_String context) let json_of_loc_context_abridged ~stdin_file ~max_len loc = - let open Hh_json in - let code_lines = match loc with - | None -> None - | Some loc -> - let open Loc in - let filename = file_of_source loc.source in - (* Read the lines referenced in the loc *) - (match read_lines_in_file loc filename stdin_file with - | Some l -> - let lines = Nel.to_list l in - let num_lines = List.length lines in - let numbered_lines = - List.mapi (fun i line -> (string_of_int (i + loc.start.line), JSON_String line)) lines in - if num_lines <= max_len - (* There are few enough lines that we can use them all for context *) - then Some numbered_lines - else - (* There are too many lines for context. Let's take some lines from the start of the loc - * and some from the end of the loc *) - let start_len = (max_len + 1) / 2 in (* ceil *) - let end_len = max_len / 2 in (* floor *) - Some ( - (Core_list.sub numbered_lines ~pos:0 ~len:start_len) - @ (Core_list.sub numbered_lines ~pos:(num_lines - end_len) ~len:end_len) - ) - | None -> None) - in - match code_lines with - | None -> JSON_Null - | Some code_lines -> JSON_Object code_lines + Hh_json.( + let code_lines = + match loc with + | None -> None + | Some loc -> + Loc.( + let filename = file_of_source loc.source in + (* Read the lines referenced in the loc *) + (match read_lines_in_file loc filename stdin_file with + | Some l -> + let lines = Nel.to_list l in + let num_lines = List.length lines in + let numbered_lines = + Core_list.mapi + ~f:(fun i line -> (string_of_int (i + loc.start.line), JSON_String line)) + lines + in + if + num_lines <= max_len + (* There are few enough lines that we can use them all for context *) + then + Some numbered_lines + else + (* There are too many lines for context. Let's take some lines from the start of the loc + * and some from the end of the loc *) + let start_len = (max_len + 1) / 2 in + (* ceil *) + let end_len = max_len / 2 in + (* floor *) + Some + ( Core_list.sub numbered_lines ~pos:0 ~len:start_len + @ Core_list.sub numbered_lines ~pos:(num_lines - end_len) ~len:end_len ) + | None -> None)) + in + match code_lines with + | None -> JSON_Null + | Some code_lines -> JSON_Object code_lines) let json_of_loc_with_context ~strip_root ~stdin_file loc = - let open Hh_json in - let props = - Reason.json_of_loc_props ~strip_root loc @ - [("context", json_of_loc_context_abridged ~stdin_file ~max_len:5 (Some loc))] - in - JSON_Object props + Hh_json.( + let props = + let offset_table = get_offset_table_expensive ~stdin_file loc in + Reason.json_of_loc_props ~strip_root ~offset_table ~catch_offset_errors:true loc + @ [("context", json_of_loc_context_abridged ~stdin_file ~max_len:5 (Some loc))] + in + JSON_Object props) let json_of_message_with_context ~strip_root ~stdin_file message = - let open Hh_json in - let _, loc = unwrap_message message in - let context = ("context", json_of_loc_context ~stdin_file loc) in - JSON_Object (context :: (json_of_message_props ~strip_root message)) + Hh_json.( + let (_, loc) = unwrap_message message in + let context = ("context", json_of_loc_context ~stdin_file loc) in + JSON_Object (context :: json_of_message_props ~stdin_file ~strip_root message)) let json_of_infos ~json_of_message infos = - let open Hh_json in - JSON_Array (List.map json_of_message (infos_to_messages infos)) + Hh_json.(JSON_Array (Core_list.map ~f:json_of_message (infos_to_messages infos))) let rec json_of_info_tree ~json_of_message tree = - let open Hh_json in - let infos, kids = match tree with - | InfoLeaf infos -> infos, None - | InfoNode (infos, kids) -> infos, Some kids - in - JSON_Object ( - ("message", json_of_infos ~json_of_message infos) :: - match kids with - | None -> [] - | Some kids -> - let kids = List.map (json_of_info_tree ~json_of_message) kids in - ["children", JSON_Array kids] - ) + Hh_json.( + let (infos, kids) = + match tree with + | InfoLeaf infos -> (infos, None) + | InfoNode (infos, kids) -> (infos, Some kids) + in + JSON_Object + ( ("message", json_of_infos ~json_of_message infos) + :: + (match kids with + | None -> [] + | Some kids -> + let kids = Core_list.map ~f:(json_of_info_tree ~json_of_message) kids in + [("children", JSON_Array kids)]) )) let json_of_classic_error_props ~json_of_message error = - let open Hh_json in - let { messages; extra } = error in - let props = [ - "message", JSON_Array (List.map json_of_message messages); - ] in - (* add extra if present *) - if extra = [] then props - else - let extra = List.map (json_of_info_tree ~json_of_message) extra in - ("extra", JSON_Array extra) :: props + Hh_json.( + let { messages; extra } = error in + let props = [("message", JSON_Array (Core_list.map ~f:json_of_message messages))] in + (* add extra if present *) + if extra = [] then + props + else + let extra = Core_list.map ~f:(json_of_info_tree ~json_of_message) extra in + ("extra", JSON_Array extra) :: props) let json_of_message_inline_friendly message_inline = - let open Hh_json in - let open Friendly in - match message_inline with - | Text text -> - JSON_Object [ - ("kind", JSON_String "Text"); - ("text", JSON_String text); - ] - | Code code -> - JSON_Object [ - ("kind", JSON_String "Code"); - ("text", JSON_String code); - ] + Hh_json.( + Friendly.( + match message_inline with + | Text text -> JSON_Object [("kind", JSON_String "Text"); ("text", JSON_String text)] + | Code code -> JSON_Object [("kind", JSON_String "Code"); ("text", JSON_String code)])) let json_of_message_friendly message = - let open Hh_json in - let open Friendly in - let message = flatten_message message in - JSON_Array (List.concat (List.map (function - | Inline inlines -> List.map json_of_message_inline_friendly inlines - | Reference (inlines, id) -> [ - JSON_Object [ - ("kind", JSON_String "Reference"); - ("referenceId", JSON_String (string_of_int id)); - ("message", JSON_Array (List.map json_of_message_inline_friendly inlines)); - ]; - ] - ) message)) + Hh_json.( + Friendly.( + let message = flatten_message message in + JSON_Array + (Core_list.concat + (Core_list.map + ~f:(function + | Inline inlines -> Core_list.map ~f:json_of_message_inline_friendly inlines + | Reference (inlines, id) -> + [ + JSON_Object + [ + ("kind", JSON_String "Reference"); + ("referenceId", JSON_String (string_of_int id)); + ( "message", + JSON_Array (Core_list.map ~f:json_of_message_inline_friendly inlines) + ); + ]; + ]) + message)))) let rec json_of_message_group_friendly message_group = - let open Hh_json in - let open Friendly in - let { group_message; group_message_list } = message_group in - let group_message = json_of_message_friendly group_message in - if group_message_list = [] then - group_message - else - JSON_Object [ - ("kind", JSON_String "UnorderedList"); - ("message", group_message); - ("items", JSON_Array (List.map json_of_message_group_friendly group_message_list)); - ] + Hh_json.( + Friendly.( + let { group_message; group_message_list } = message_group in + let group_message = json_of_message_friendly group_message in + if group_message_list = [] then + group_message + else + JSON_Object + [ + ("kind", JSON_String "UnorderedList"); + ("message", group_message); + ( "items", + JSON_Array (Core_list.map ~f:json_of_message_group_friendly group_message_list) ); + ])) let json_of_references ~strip_root ~stdin_file references = - let open Hh_json in - JSON_Object (List.rev (IMap.fold (fun id loc acc -> - (string_of_int id, json_of_loc_with_context ~strip_root ~stdin_file loc) :: acc - ) references [])) + Hh_json.( + JSON_Object + (List.rev + (IMap.fold + (fun id loc acc -> + (string_of_int id, json_of_loc_with_context ~strip_root ~stdin_file loc) :: acc) + references + []))) let json_of_friendly_error_props ~strip_root ~stdin_file error = - let open Hh_json in - let open Friendly in - let (_, primary_loc, message_group) = - message_group_of_error - ~show_all_branches:false ~show_root:true error in - let (references, message_group) = - extract_references message_group in - let root_loc = match error.root with - | None -> JSON_Null - | Some { root_loc; _ } -> json_of_loc_with_context ~strip_root ~stdin_file root_loc - in - [ - (* Unfortunately, Nuclide currently depends on this flag. Remove it in - * the future? *) - ("classic", JSON_Bool false); - (* NOTE: `primaryLoc` is the location we want to show in an IDE! `rootLoc` - * is another loc which Flow associates with some errors. We include it - * for tools which are interested in using the location to enhance - * their rendering. `primaryLoc` will always be inside `rootLoc`. *) - ("primaryLoc", json_of_loc_with_context ~strip_root ~stdin_file primary_loc); - ("rootLoc", root_loc); - (* NOTE: This `messageMarkup` can be concatenated into a string when - * implementing the LSP error output. *) - ("messageMarkup", json_of_message_group_friendly message_group); - (* NOTE: These `referenceLocs` can become `relatedLocations` when - * implementing the LSP error output. *) - ("referenceLocs", json_of_references ~strip_root ~stdin_file references); - ] + Hh_json.( + Friendly.( + let (_, primary_loc, message_group) = + message_group_of_error ~show_all_branches:false ~show_root:true error + in + let (references, message_group) = extract_references message_group in + let root_loc = + match error.root with + | None -> JSON_Null + | Some { root_loc; _ } -> json_of_loc_with_context ~strip_root ~stdin_file root_loc + in + [ + (* Unfortunately, Nuclide currently depends on this flag. Remove it in + * the future? *) + ("classic", JSON_Bool false); + (* NOTE: `primaryLoc` is the location we want to show in an IDE! `rootLoc` + * is another loc which Flow associates with some errors. We include it + * for tools which are interested in using the location to enhance + * their rendering. `primaryLoc` will always be inside `rootLoc`. *) + ("primaryLoc", json_of_loc_with_context ~strip_root ~stdin_file primary_loc); + ("rootLoc", root_loc); + (* NOTE: This `messageMarkup` can be concatenated into a string when + * implementing the LSP error output. *) + ("messageMarkup", json_of_message_group_friendly message_group); + (* NOTE: These `referenceLocs` can become `relatedLocations` when + * implementing the LSP error output. *) + ("referenceLocs", json_of_references ~strip_root ~stdin_file references); + ])) let json_of_error_props - ~strip_root ~stdin_file ~version ~json_of_message ~severity - ?(suppression_locs=Utils_js.LocSet.empty) (kind, trace, error) = - let open Hh_json in - let kind_str = match kind with - | ParseError -> "parse" - | InferError -> "infer" - (* "InferWarning"s should still really be treated as errors. (The name is outdated.) *) - | InferWarning -> "infer" - | InternalError -> "internal" - | DuplicateProviderError -> "duplicate provider" - | RecursionLimitError -> "recursion limit exceeded" - | LintError _ -> "lint" - in - let severity_str = output_string_of_severity severity in - let suppressions = suppression_locs - |> Utils_js.LocSet.elements - |> List.map (fun loc -> - JSON_Object [ "loc", Reason.json_of_loc ~strip_root loc] - ) in - let props = [ - "kind", JSON_String kind_str; - "level", JSON_String severity_str; - "suppressions", JSON_Array suppressions; - ] in - props @ - (* add the error type specific props *) - (match version with - | JsonV1 -> json_of_classic_error_props ~json_of_message (Friendly.to_classic error) - | JsonV2 -> json_of_friendly_error_props ~strip_root ~stdin_file error - ) @ - (* add trace if present *) - match trace with - | [] -> [] - | _ -> ["trace", JSON_Array (List.map json_of_message trace)] + ~strip_root + ~stdin_file + ~version + ~json_of_message + ~severity + ?(suppression_locs = Loc_collections.LocSet.empty) + (kind, trace, error) = + Hh_json.( + let kind_str = + match kind with + | ParseError -> "parse" + (* We report this as a parse error even though it wasn't produced by the parser *) + | PseudoParseError -> "parse" + | InferError -> "infer" + (* "InferWarning"s should still really be treated as errors. (The name is outdated.) *) + | InferWarning _ -> "infer" + | InternalError -> "internal" + | DuplicateProviderError -> "duplicate provider" + | RecursionLimitError -> "recursion limit exceeded" + | LintError _ -> "lint" + in + let severity_str = output_string_of_severity severity in + let suppressions = + suppression_locs + |> Loc_collections.LocSet.elements + |> Core_list.map ~f:(fun loc -> + let offset_table = get_offset_table_expensive ~stdin_file loc in + JSON_Object + [ + ( "loc", + Reason.json_of_loc ~strip_root ~offset_table ~catch_offset_errors:true loc ); + ]) + in + let props = + [ + ("kind", JSON_String kind_str); + ("level", JSON_String severity_str); + ("suppressions", JSON_Array suppressions); + ] + in + props + (* add the error type specific props *) + @ (match version with + | JsonV1 -> json_of_classic_error_props ~json_of_message (Friendly.to_classic error) + | JsonV2 -> json_of_friendly_error_props ~strip_root ~stdin_file error) + @ + (* add trace if present *) + match trace with + | [] -> [] + | _ -> [("trace", JSON_Array (Core_list.map ~f:json_of_message trace))]) let json_of_error_with_context - ~strip_root ~stdin_file ~version ~severity (error, suppression_locs) = - let json_of_message = - json_of_message_with_context ~strip_root ~stdin_file in - Hh_json.JSON_Object (json_of_error_props ~strip_root ~stdin_file ~version ~json_of_message - ~severity ~suppression_locs error) + ~strip_root ~stdin_file ~version ~severity (error, suppression_locs) = + let json_of_message = json_of_message_with_context ~strip_root ~stdin_file in + Hh_json.JSON_Object + (json_of_error_props + ~strip_root + ~stdin_file + ~version + ~json_of_message + ~severity + ~suppression_locs + error) let json_of_errors_with_context - ~strip_root ~stdin_file ~suppressed_errors ?(version=JsonV1) ~errors ~warnings () = + ~strip_root ~stdin_file ~suppressed_errors ?(version = JsonV1) ~errors ~warnings () = let f = json_of_error_with_context ~strip_root ~stdin_file ~version in let obj_props_rev = [] - |> ErrorSet.fold (fun error acc -> - f ~severity:Err (error, Utils_js.LocSet.empty) :: acc) errors - |> ErrorSet.fold (fun warn acc -> - f ~severity:Warn (warn, Utils_js.LocSet.empty) :: acc) warnings + |> ConcreteLocPrintableErrorSet.fold + (fun error acc -> f ~severity:Err (error, Loc_collections.LocSet.empty) :: acc) + errors + |> ConcreteLocPrintableErrorSet.fold + (fun warn acc -> f ~severity:Warn (warn, Loc_collections.LocSet.empty) :: acc) + warnings in (* We want these to show up as "suppressed error"s, not "suppressed off"s *) - let obj_props_rev = List.fold_left (fun acc suppressed_error -> - f ~severity:Err suppressed_error :: acc - ) obj_props_rev suppressed_errors + let obj_props_rev = + List.fold_left + (fun acc suppressed_error -> + let suppressed_error = + let (err, suppressions) = suppressed_error in + (err, suppressions) + in + f ~severity:Err suppressed_error :: acc) + obj_props_rev + suppressed_errors in Hh_json.JSON_Array (List.rev obj_props_rev) + (* This function has an unusual signature because the first part can be + expensive -- specifically `json_of_errors_with_context` can take a while, + and we would like to include the time spent in our profiling data. - let full_status_json_of_errors ~strip_root ~suppressed_errors - ?(version=JsonV1) ?(profiling=None) ?(stdin_file=None) ~errors ~warnings () = - let open Hh_json in - - let props = [ - "flowVersion", JSON_String Flow_version.version; - "jsonVersion", JSON_String (match version with JsonV1 -> "1" | JsonV2 -> "2"); - "errors", json_of_errors_with_context - ~strip_root ~stdin_file ~suppressed_errors ~version ~errors ~warnings (); - "passed", JSON_Bool (ErrorSet.is_empty errors); - ] in - let props = match profiling with - | None -> props - | Some profiling -> props @ Profiling_js.to_legacy_json_properties profiling - in - JSON_Object props + However, that profiling data is also included in the output. This function + is designed to be partially applied, with the partial application + performing the expensive work within a running profiling segment. The + returned closure can be passed the finished profiling data. *) + let full_status_json_of_errors + ~strip_root ~suppressed_errors ?(version = JsonV1) ?(stdin_file = None) ~errors ~warnings () + = + Hh_json.( + let props = + [ + ("flowVersion", JSON_String Flow_version.version); + ( "jsonVersion", + JSON_String + (match version with + | JsonV1 -> "1" + | JsonV2 -> "2") ); + ( "errors", + json_of_errors_with_context + ~strip_root + ~stdin_file + ~suppressed_errors + ~version + ~errors + ~warnings + () ); + ("passed", JSON_Bool (ConcreteLocPrintableErrorSet.is_empty errors)); + ] + in + (fun ~profiling_props -> JSON_Object (props @ profiling_props))) + + let format_errors + ~out_channel + ~strip_root + ~suppressed_errors + ~pretty + ?version + ?(stdin_file = None) + ~errors + ~warnings + () = + Hh_json.( + let get_json = + full_status_json_of_errors + ~strip_root + ?version + ~stdin_file + ~suppressed_errors + ~errors + ~warnings + () + in + fun ~profiling_props -> + let res = get_json ~profiling_props in + if pretty then + output_string out_channel (json_to_multiline res) + else + json_to_output out_channel res; + flush out_channel) let print_errors ~out_channel @@ -2924,103 +3179,122 @@ module Json_output = struct ~suppressed_errors ~pretty ?version - ?(profiling=None) - ?(stdin_file=None) + ?(stdin_file = None) ~errors ~warnings () = - let open Hh_json in - let res = full_status_json_of_errors ~strip_root ?version ~profiling ~stdin_file - ~suppressed_errors ~errors ~warnings () in - if pretty then output_string out_channel (json_to_multiline res) - else json_to_output out_channel res; - flush out_channel + format_errors + ~out_channel + ~strip_root + ~suppressed_errors + ~pretty + ?version + ~stdin_file + ~errors + ~warnings + () + ~profiling_props:[] end (* for vim and emacs plugins *) module Vim_emacs_output = struct - let string_of_loc ~strip_root loc = Loc.( - match loc.source with - | None - | Some File_key.Builtins -> "" - | Some file -> - let file = Reason.string_of_source ~strip_root file in - let line = loc.start.line in - let start = loc.start.column + 1 in - let end_ = loc._end.column in - let spf = Utils_js.spf in - if line <= 0 then - spf "File \"%s\", line 0" file - else if line = loc._end.line && start - end_ = 1 then - spf "File \"%s\", line %d, character %d" file line start - else - spf "File \"%s\", line %d, characters %d-%d" file line start end_ - ) + let string_of_loc ~strip_root loc = + Loc.( + match loc.source with + | None + | Some File_key.Builtins -> + "" + | Some file -> + let file = Reason.string_of_source ~strip_root file in + let line = loc.start.line in + let start = loc.start.column + 1 in + let end_ = loc._end.column in + let spf = Utils_js.spf in + if line <= 0 then + spf "File \"%s\", line 0" file + else if line = loc._end.line && start - end_ = 1 then + spf "File \"%s\", line %d, character %d" file line start + else + spf "File \"%s\", line %d, characters %d-%d" file line start end_) let print_errors = - let endline s = if s = "" then "" else s ^ "\n" in + let endline s = + if s = "" then + "" + else + s ^ "\n" + in let to_pp_string ~strip_root prefix message = - let loc, msg = to_pp message in + let (loc, msg) = to_pp message in let loc_str = string_of_loc ~strip_root loc in Printf.sprintf "%s%s%s" (endline loc_str) prefix (endline msg) in let classic_to_string ~strip_root prefix trace error = - let {messages; _} = error in + let { messages; _ } = error in let messages = append_trace_reasons messages trace in let buf = Buffer.create 50 in (match messages with | [] -> assert false | message1 :: rest_of_error -> - Buffer.add_string buf (to_pp_string ~strip_root prefix message1); - List.iter begin fun message -> - Buffer.add_string buf (to_pp_string ~strip_root "" message) - end rest_of_error - ); + Buffer.add_string buf (to_pp_string ~strip_root prefix message1); + List.iter + begin + fun message -> Buffer.add_string buf (to_pp_string ~strip_root "" message) + end + rest_of_error); Buffer.contents buf in - let to_string ~strip_root prefix ((_, trace, error) : error) : string = - classic_to_string ~strip_root prefix trace (Friendly.to_classic error) in + let to_string ~strip_root prefix ((_, trace, error) : Loc.t printable_error) : string = + classic_to_string ~strip_root prefix trace (Friendly.to_classic error) + in fun ~strip_root oc ~errors ~warnings () -> - let sl = [] - |> ErrorSet.fold (fun err acc -> - (to_string ~strip_root "Error: " err)::acc - ) (errors) - |> ErrorSet.fold (fun warn acc -> - (to_string ~strip_root "Warning: " warn)::acc - ) (warnings) + let sl = + [] + |> ConcreteLocPrintableErrorSet.fold + (fun err acc -> to_string ~strip_root "Error: " err :: acc) + errors + |> ConcreteLocPrintableErrorSet.fold + (fun warn acc -> to_string ~strip_root "Warning: " warn :: acc) + warnings |> List.sort String.compare |> ListUtils.uniq in - List.iter begin fun s -> - output_string oc s; - output_string oc "\n"; - end sl; + List.iter + begin + fun s -> + output_string oc s; + output_string oc "\n" + end + sl; flush oc end - module Lsp_output = struct type t = { - loc: Loc.t; (* the file+range at which the message applies *) - message: string; (* the diagnostic's message *) - code: string; (* an error code *) + loc: Loc.t; + (* the file+range at which the message applies *) + message: string; + (* the diagnostic's message *) + code: string; + (* an error code *) relatedLocations: (Loc.t * string) list; } - let lsp_of_error (error: error) : t = + let lsp_of_error (error : Loc.t printable_error) : t = (* e.g. "Error about `code` in type Ref(`foo`)" *) (* will produce LSP message "Error about `code` in type `foo` [1]" *) (* and the LSP related location will have message "[1]: `foo`" *) let (kind, _, friendly) = error in let (_, loc, group) = - Friendly.message_group_of_error ~show_all_branches:false ~show_root:true friendly in + Friendly.message_group_of_error ~show_all_branches:false ~show_root:true friendly + in let (references, group) = Friendly.extract_references group in let features = Friendly.message_of_group_message group in (* This is the accumulator function to build up the message and related locations... *) let f (message, relatedLocations) feature = match feature with | Friendly.Inline inlines -> - let message = (message ^ Friendly.string_of_message_inlines inlines) in + let message = message ^ Friendly.string_of_message_inlines inlines in (message, relatedLocations) | Friendly.Reference (inlines, id) -> let ref_id = string_of_int id in @@ -3028,7 +3302,7 @@ module Lsp_output = struct let ref_loc = IMap.find id references in let ref_message = Printf.sprintf "[%s] %s" ref_id ref_text in let message = Printf.sprintf "%s %s [%s]" message ref_text ref_id in - (message, (ref_loc, ref_message)::relatedLocations) + (message, (ref_loc, ref_message) :: relatedLocations) in let (message, relatedLocations) = List.fold_left f ("", []) features in { @@ -3038,84 +3312,3 @@ module Lsp_output = struct relatedLocations = List.rev relatedLocations; } end - -class mapper = object(this) - method error (error: error) = - let (error_kind, messages, friendly_error) = error in - let error_kind' = this#error_kind error_kind in - let messages' = ListUtils.ident_map this#message messages in - let friendly_error' = this#friendly_error friendly_error in - if error_kind == error_kind' && messages == messages' && friendly_error == friendly_error' - then error - else (error_kind', messages', friendly_error') - - method error_kind (error_kind: error_kind) = error_kind - - method private message (message: message) = - match message with - | BlameM (loc, str) -> - let loc' = this#loc loc in - if loc == loc' - then message - else BlameM (loc', str) - | CommentM _ -> message - - method private friendly_error (friendly_error: Friendly.t) = - let { Friendly.loc; root; message; } = friendly_error in - let loc' = this#loc loc in - let root' = OptionUtils.ident_map this#error_root root in - let message' = this#error_message message in - if loc == loc' && root == root' && message == message' - then friendly_error - else { Friendly.loc = loc'; root = root'; message = message'; } - - method private error_root (error_root: Loc.t Friendly.error_root) = - let { Friendly.root_loc; root_message; } = error_root in - let root_loc' = this#loc root_loc in - let root_message' = this#friendly_message root_message in - if root_loc == root_loc' && root_message == root_message' - then error_root - else { Friendly.root_loc = root_loc'; root_message = root_message'; } - - method private error_message (error_message: Loc.t Friendly.error_message) = - match error_message with - | Friendly.Normal { message; frames; } -> - let message' = this#friendly_message message in - let frames' = OptionUtils.ident_map (ListUtils.ident_map this#friendly_message) frames in - if message == message' && frames == frames' - then error_message - else Friendly.Normal { message = message'; frames = frames' } - | Friendly.Speculation { frames; branches; } -> - let frames' = ListUtils.ident_map this#friendly_message frames in - let branches' = ListUtils.ident_map (fun branch -> - let (score, friendly_error) = branch in - let friendly_error' = this#friendly_error friendly_error in - if friendly_error == friendly_error' - then branch - else (score, friendly_error') - ) branches in - if frames == frames' && branches == branches' - then error_message - else Friendly.Speculation { frames = frames'; branches = branches'; } - - method friendly_message (friendly_message: Loc.t Friendly.message) = - ListUtils.ident_map this#message_feature friendly_message - - method message_feature (message_feature: Loc.t Friendly.message_feature) = - match message_feature with - | Friendly.Inline inlines -> - let inlines' = ListUtils.ident_map this#message_inline inlines in - if inlines == inlines' - then message_feature - else Friendly.Inline inlines' - | Friendly.Reference (inlines, loc) -> - let inlines' = ListUtils.ident_map this#message_inline inlines in - let loc' = this#loc loc in - if inlines == inlines' && loc == loc' - then message_feature - else Friendly.Reference (inlines', loc') - - method message_inline (message_inline: Friendly.message_inline) = message_inline - - method loc (loc: Loc.t) = loc -end diff --git a/src/common/errors/errors.mli b/src/common/errors/errors.mli index f9434371876..0b7e18bb6c2 100644 --- a/src/common/errors/errors.mli +++ b/src/common/errors/errors.mli @@ -1,34 +1,41 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +type infer_warning_kind = + | ExportKind + | OtherKind + type error_kind = | ParseError + | PseudoParseError | InferError - | InferWarning + | InferWarning of infer_warning_kind | InternalError | DuplicateProviderError | RecursionLimitError | LintError of Lints.lint_kind -val string_of_kind: error_kind -> string +val string_of_kind : error_kind -> string +type 'a info = 'a * string list (** simple structure for callers to specify message content. an info list looks like e.g.: [ location1, ["number"; "Type is incompatible with"]; location2, ["string"] ] *) -type info = Loc.t * string list (** for extra info, enough structure to do simple tree-shaped output *) -type info_tree = - | InfoLeaf of info list - | InfoNode of info list * info_tree list +type 'a info_tree = + | InfoLeaf of 'a info list + | InfoNode of 'a info list * 'a info_tree list module Friendly : sig + type t + type 'a message = 'a message_feature list and 'a message_feature = @@ -39,55 +46,64 @@ module Friendly : sig | Text of string | Code of string - val message_of_string: string -> 'a message - val text: string -> 'a message_feature - val code: string -> 'a message_feature - val ref: ?loc:bool -> Reason.reason -> Loc.t message_feature - val intersperse: 'a -> 'a list -> 'a list - val conjunction_concat: ?conjunction:string -> 'a message list -> 'a message - val capitalize: 'a message -> 'a message + type docs = { + call: string; + tuplemap: string; + objmap: string; + objmapi: string; + } + + val docs : docs + + val message_of_string : string -> 'a message + + val text : string -> 'a message_feature + + val code : string -> 'a message_feature + + val ref : ?loc:bool -> Reason.concrete_reason -> Loc.t message_feature + + val conjunction_concat : ?conjunction:string -> 'a message list -> 'a message + + val capitalize : 'a message -> 'a message end (* error structure *) -type error +type 'loc printable_error -val mk_error: +val mk_error : ?kind:error_kind -> - ?trace_infos:info list -> - ?root:(Loc.t * Loc.t Friendly.message) -> - ?frames:(Loc.t Friendly.message list) -> - ALoc.t -> + ?trace_infos:Loc.t info list -> + ?root:Loc.t * Loc.t Friendly.message -> + ?frames:Loc.t Friendly.message list -> + Loc.t -> Loc.t Friendly.message -> - error + Loc.t printable_error -val mk_speculation_error: +val mk_speculation_error : ?kind:error_kind -> - ?trace_infos:info list -> + ?trace_infos:Loc.t info list -> loc:Loc.t -> root:(Loc.t * Loc.t Friendly.message) option -> - frames:(Loc.t Friendly.message list) -> - speculation_errors:((int * error) list) -> - error + frames:Loc.t Friendly.message list -> + speculation_errors:(int * Loc.t printable_error) list -> + Loc.t printable_error + +val loc_of_printable_error : 'loc printable_error -> 'loc -val is_duplicate_provider_error: error -> bool +val locs_of_printable_error : 'loc printable_error -> 'loc list -val loc_of_error: error -> Loc.t -val locs_of_error: error -> Loc.t list -val kind_of_error: error -> error_kind +val kind_of_printable_error : 'loc printable_error -> error_kind -(* we store errors in sets, currently, because distinct - traces may share endpoints, and produce the same error *) -module ErrorSet : Set.S with type elt = error +module ConcreteLocPrintableErrorSet : Set.S with type elt = Loc.t printable_error (* formatters/printers *) type stdin_file = (Path.t * string) option val deprecated_json_props_of_loc : - strip_root: Path.t option -> - Loc.t -> - (string * Hh_json.json) list + strip_root:Path.t option -> Loc.t -> (string * Hh_json.json) list (* Some of the error printing functions consist only of named and optional arguments, * requiring an extra unit argument for disambiguation on partial application. For @@ -106,86 +122,107 @@ module Cli_output : sig message_width: int; } - val print_errors: + val print_errors : out_channel:out_channel -> flags:error_flags -> ?stdin_file:stdin_file -> - strip_root: Path.t option -> - errors: ErrorSet.t -> - warnings: ErrorSet.t -> - lazy_msg: string option -> + strip_root:Path.t option -> + errors:ConcreteLocPrintableErrorSet.t -> + warnings:ConcreteLocPrintableErrorSet.t -> + lazy_msg:string option -> unit -> unit + + val format_errors : + out_channel:out_channel -> + flags:error_flags -> + ?stdin_file:stdin_file -> + strip_root:Path.t option -> + errors:ConcreteLocPrintableErrorSet.t -> + warnings:ConcreteLocPrintableErrorSet.t -> + lazy_msg:string option -> + unit -> + unit + + (* print errors *) end module Json_output : sig type json_version = - | JsonV1 - | JsonV2 + | JsonV1 + | JsonV2 val json_of_errors_with_context : - strip_root: Path.t option -> - stdin_file: stdin_file -> - suppressed_errors: (error * Utils_js.LocSet.t) list -> + strip_root:Path.t option -> + stdin_file:stdin_file -> + suppressed_errors:(Loc.t printable_error * Loc_collections.LocSet.t) list -> ?version:json_version -> - errors: ErrorSet.t -> - warnings: ErrorSet.t -> + errors:ConcreteLocPrintableErrorSet.t -> + warnings:ConcreteLocPrintableErrorSet.t -> unit -> Hh_json.json val full_status_json_of_errors : - strip_root: Path.t option -> - suppressed_errors: (error * Utils_js.LocSet.t) list -> + strip_root:Path.t option -> + suppressed_errors:(Loc.t printable_error * Loc_collections.LocSet.t) list -> ?version:json_version -> - ?profiling:Profiling_js.finished option -> ?stdin_file:stdin_file -> - errors: ErrorSet.t -> - warnings: ErrorSet.t -> + errors:ConcreteLocPrintableErrorSet.t -> + warnings:ConcreteLocPrintableErrorSet.t -> unit -> + profiling_props:(string * Hh_json.json) list -> Hh_json.json - val print_errors: + val print_errors : + out_channel:out_channel -> + strip_root:Path.t option -> + suppressed_errors:(Loc.t printable_error * Loc_collections.LocSet.t) list -> + pretty:bool -> + ?version:json_version -> + ?stdin_file:stdin_file -> + errors:ConcreteLocPrintableErrorSet.t -> + warnings:ConcreteLocPrintableErrorSet.t -> + unit -> + unit + + val format_errors : out_channel:out_channel -> - strip_root: Path.t option -> - suppressed_errors: (error * Utils_js.LocSet.t) list -> + strip_root:Path.t option -> + suppressed_errors:(Loc.t printable_error * Loc_collections.LocSet.t) list -> pretty:bool -> ?version:json_version -> - ?profiling:Profiling_js.finished option -> ?stdin_file:stdin_file -> - errors: ErrorSet.t -> - warnings: ErrorSet.t -> + errors:ConcreteLocPrintableErrorSet.t -> + warnings:ConcreteLocPrintableErrorSet.t -> unit -> + profiling_props:(string * Hh_json.json) list -> unit + + (* print errors *) end module Vim_emacs_output : sig - val string_of_loc: - strip_root:Path.t option -> - Loc.t -> string - val print_errors: + val string_of_loc : strip_root:Path.t option -> Loc.t -> string + + val print_errors : strip_root:Path.t option -> out_channel -> - errors:ErrorSet.t -> - warnings:ErrorSet.t -> + errors:ConcreteLocPrintableErrorSet.t -> + warnings:ConcreteLocPrintableErrorSet.t -> unit -> unit end module Lsp_output : sig type t = { - loc: Loc.t; (* the file+range at which the message applies *) - message: string; (* the diagnostic's message *) - code: string; (* an error code *) + loc: Loc.t; + (* the file+range at which the message applies *) + message: string; + (* the diagnostic's message *) + code: string; + (* an error code *) relatedLocations: (Loc.t * string) list; } - val lsp_of_error: error -> t -end -class mapper : object - method error: error -> error - method error_kind: error_kind -> error_kind - method friendly_message: Loc.t Friendly.message -> Loc.t Friendly.message - method loc: Loc.t -> Loc.t - method message_feature: Loc.t Friendly.message_feature -> Loc.t Friendly.message_feature - method message_inline: Friendly.message_inline -> Friendly.message_inline + val lsp_of_error : Loc.t printable_error -> t end diff --git a/src/common/exit_status/dune b/src/common/exit_status/dune new file mode 100644 index 00000000000..422f65bca7c --- /dev/null +++ b/src/common/exit_status/dune @@ -0,0 +1,11 @@ +(library + (name flow_exit_status) + (wrapped false) + (libraries + flow_common_utils + flow_logging_stubs + flow_version + hh_json ; hack + imported_core ; hack + ) +) diff --git a/src/common/flowExitStatus.ml b/src/common/exit_status/flowExitStatus.ml similarity index 75% rename from src/common/flowExitStatus.ml rename to src/common/exit_status/flowExitStatus.ml index 0632509e1e0..87d3d4a76fc 100644 --- a/src/common/flowExitStatus.ml +++ b/src/common/exit_status/flowExitStatus.ml @@ -1,3 +1,9 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) type t = (* Signaled *) | Interrupted @@ -53,28 +59,30 @@ type t = | Killed_by_monitor (* The saved state file is invalid and we're running with --saved-state-no-fallback *) | Invalid_saved_state - + (* The server would like to restart, likely since re-init'ing is faster than a recheck *) + | Restart (* The hack code might throw this *) | Socket_error (* The hack code might throw this *) | Dfind_died (* The hack code might throw this *) | Dfind_unresponsive - + (* A fatal error with Watchman *) + | Watchman_error (* A generic something-else-went-wrong *) | Unknown_error - (* Exit codes are part of Flow's API and thus changing exit codes is a - * breaking change to Flow's API. Tools that call Flow may be watching for - * certain exit codes. - * - * In reality, probably no one cares about many of these exit codes. The ones - * I know are definitely being watched for are: - * - * No_error - * Type_error - * Out_of_time - *) +(* Exit codes are part of Flow's API and thus changing exit codes is a + * breaking change to Flow's API. Tools that call Flow may be watching for + * certain exit codes. + * + * In reality, probably no one cares about many of these exit codes. The ones + * I know are definitely being watched for are: + * + * No_error + * Type_error + * Out_of_time + *) let error_code = function | Interrupted -> -6 | No_error -> 0 @@ -99,6 +107,7 @@ let error_code = function | Autostop -> 18 | Killed_by_monitor -> 19 | Invalid_saved_state -> 20 + | Restart -> 21 | Commandline_usage_error -> 64 | No_input -> 66 | Server_start_failed _ -> 78 @@ -106,9 +115,9 @@ let error_code = function | Socket_error -> 98 | Dfind_died -> 99 | Dfind_unresponsive -> 100 + | Watchman_error -> 101 | Unknown_error -> 110 - (* Return an error type given an error code *) let error_type = function | -6 -> Interrupted @@ -133,6 +142,7 @@ let error_type = function | 18 -> Autostop | 19 -> Killed_by_monitor | 20 -> Invalid_saved_state + | 21 -> Restart | 64 -> Commandline_usage_error | 66 -> No_input (* The process status is made up *) @@ -141,14 +151,14 @@ let error_type = function | 98 -> Socket_error | 99 -> Dfind_died | 100 -> Dfind_unresponsive + | 101 -> Watchman_error | 110 -> Unknown_error | _ -> raise Not_found - let unpack_process_status = function - | Unix.WEXITED n -> "exit", n - | Unix.WSIGNALED n -> "signaled", n - | Unix.WSTOPPED n -> "stopped", n + | Unix.WEXITED n -> ("exit", n) + | Unix.WSIGNALED n -> ("signaled", n) + | Unix.WSTOPPED n -> ("stopped", n) let to_string = function | Interrupted -> "Interrupted" @@ -167,8 +177,8 @@ let to_string = function | Path_is_not_a_file -> "Path_is_not_a_file" | Windows_killed_by_task_manager -> "Windows_killed_by_task_manager" | Server_start_failed status -> - let reason, code = unpack_process_status status in - Utils_js.spf "Server_start_failed (%s, %d)" reason code + let (reason, code) = unpack_process_status status in + Utils_js.spf "Server_start_failed (%s, %d)" reason code | Type_error -> "Type_error" | Build_id_mismatch -> "Build_id_mismatch" | Lock_stolen -> "Lock_stolen" @@ -176,6 +186,7 @@ let to_string = function | Missing_flowlib -> "Missing_flowlib" | Dfind_died -> "Dfind_died" | Dfind_unresponsive -> "Dfind_unresponsive" + | Watchman_error -> "Watchman_error" | Unknown_error -> "Unknown_error" | Commandline_usage_error -> "Commandline_usage_error" | No_input -> "No_input" @@ -183,45 +194,48 @@ let to_string = function | Autostop -> "Autostop" | Killed_by_monitor -> "Killed_by_monitor" | Invalid_saved_state -> "Invalid_saved_state" + | Restart -> "Restart" exception Exit_with of t type json_mode = { pretty: bool } -let json_mode = ref None - -let set_json_mode ~pretty = - json_mode := Some { pretty } -let format_json ~msg t = - let open Hh_json in +let json_mode = ref None - let exit_props = [ - "code", JSON_Number (error_code t |> string_of_int); - "reason", JSON_String (to_string t); - ] @ Option.value_map msg ~default:[] ~f:(fun msg -> [ "msg", JSON_String msg ]) in +let set_json_mode ~pretty = json_mode := Some { pretty } - let props = [ - "flowVersion", JSON_String Flow_version.version; - "exit", JSON_Object exit_props; - ] in +let unset_json_mode () = json_mode := None - JSON_Object props +let json_props_of_t ?msg t = + Hh_json.( + let exit_props = + [ + ("code", JSON_Number (error_code t |> string_of_int)); + ("reason", JSON_String (to_string t)); + ] + @ Option.value_map msg ~default:[] ~f:(fun msg -> [("msg", JSON_String msg)]) + in + [("flowVersion", JSON_String Flow_version.version); ("exit", JSON_Object exit_props)]) -let print_json ~msg t = +let print_json ?msg t = match t with (* Commands that exit with these exit codes handle json output themselves *) - | No_error | Type_error -> () - | _ -> begin - match !json_mode with - | None -> () - | Some { pretty } -> - format_json ~msg t |> Hh_json.print_json_endline ~pretty - end + | No_error + | Type_error -> + () + | _ -> + begin + match !json_mode with + | None -> () + | Some { pretty } -> + let json = Hh_json.JSON_Object (json_props_of_t ?msg t) in + Hh_json.print_json_endline ~pretty json + end let exit ?msg t = (match msg with | Some msg -> prerr_endline msg | None -> ()); - print_json ~msg t; + print_json ?msg t; FlowEventLogger.exit msg (to_string t); Pervasives.exit (error_code t) diff --git a/src/common/exit_status/flowExitStatus.mli b/src/common/exit_status/flowExitStatus.mli new file mode 100644 index 00000000000..606db876268 --- /dev/null +++ b/src/common/exit_status/flowExitStatus.mli @@ -0,0 +1,55 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) +type t = + | Interrupted + | No_error + | Windows_killed_by_task_manager + | Type_error + | Out_of_time + | Kill_error + | Unused_server + | No_server_running + | Out_of_retries + | Invalid_flowconfig + | Path_is_not_a_file + | Build_id_mismatch + | Input_error + | Lock_stolen + | Could_not_find_flowconfig + | Server_out_of_date + | Out_of_shared_memory + | Flowconfig_changed + | Server_client_directory_mismatch + | Commandline_usage_error + | No_input + | Server_start_failed of Unix.process_status + | Missing_flowlib + | Autostop + | Killed_by_monitor + | Invalid_saved_state + | Restart + | Socket_error + | Dfind_died + | Dfind_unresponsive + | Watchman_error + | Unknown_error + +exception Exit_with of t + +val exit : ?msg:string -> t -> 'a + +val error_code : t -> int + +val error_type : int -> t + +val to_string : t -> string + +val set_json_mode : pretty:bool -> unit + +val unset_json_mode : unit -> unit + +val json_props_of_t : ?msg:string -> t -> (string * Hh_json.json) list diff --git a/src/common/files.ml b/src/common/files.ml index cb109dfe1d1..ef5816bd7f2 100644 --- a/src/common/files.ml +++ b/src/common/files.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -20,39 +20,49 @@ type options = { } let default_lib_dir options = options.default_lib_dir + let ignores options = options.ignores + let untyped options = options.untyped + let declarations options = options.declarations + let includes options = options.includes + let lib_paths options = options.lib_paths + let module_file_exts options = options.module_file_exts + let module_resource_exts options = options.module_resource_exts + let node_resolver_dirnames options = options.node_resolver_dirnames let node_modules_containers = ref SSet.empty let global_file_name = "(global)" + let flow_ext = ".flow" -let has_flow_ext file = - File_key.check_suffix file flow_ext +let has_flow_ext file = File_key.check_suffix file flow_ext let chop_flow_ext file = - if has_flow_ext file - then Some (File_key.chop_suffix file flow_ext) - else None + if has_flow_ext file then + Some (File_key.chop_suffix file flow_ext) + else + None -let is_directory path = try Sys.is_directory path with Sys_error _ -> false +let is_directory path = (try Sys.is_directory path with Sys_error _ -> false) let is_prefix prefix = - let prefix_with_sep = if String_utils.string_ends_with prefix Filename.dir_sep - then prefix - else prefix ^ Filename.dir_sep - in fun path -> - path = prefix || String_utils.string_starts_with path prefix_with_sep + let prefix_with_sep = + if String_utils.string_ends_with prefix Filename.dir_sep then + prefix + else + prefix ^ Filename.dir_sep + in + (fun path -> path = prefix || String_utils.string_starts_with path prefix_with_sep) -let is_json_file filename = - Utils_js.extension_of_filename filename = Some ".json" +let is_json_file filename = Utils_js.extension_of_filename filename = Some ".json" (* This is the set of file extensions which we watch for changes *) let get_all_watched_extensions options = @@ -68,91 +78,89 @@ let is_valid_path = * -> false *) let rec helper file_exts basename acc ext = - ext <> "" && ( - let acc = ext ^ acc in - SSet.mem acc file_exts || ( - let basename = Filename.chop_suffix basename ext in - let ext = Filename.extension basename in - helper file_exts basename acc ext - ) - ) + ext <> "" + && + let acc = ext ^ acc in + SSet.mem acc file_exts + || + let basename = Filename.chop_suffix basename ext in + let ext = Filename.extension basename in + helper file_exts basename acc ext in - fun file_exts basename -> let extension = Filename.extension basename in let acc = "" in - if extension = flow_ext - then + if extension = flow_ext then (* We treat bar.foo.flow like bar.foo *) let basename = Filename.chop_suffix basename flow_ext in helper file_exts basename acc (Filename.extension basename) else helper file_exts basename acc extension in - - let is_dot_file basename = - basename <> "" && basename.[0] = '.' - in - + let is_dot_file basename = basename <> "" && basename.[0] = '.' in fun ~options -> let file_exts = get_all_watched_extensions options in - fun path -> let basename = Filename.basename path in - not (is_dot_file basename) && (check_ext file_exts basename || basename = "package.json") + (not (is_dot_file basename)) && (check_ext file_exts basename || basename = "package.json") -let is_node_module options path = - List.mem (Filename.basename path) options.node_resolver_dirnames +let is_node_module options path = List.mem (Filename.basename path) options.node_resolver_dirnames let is_flow_file ~options = let is_valid_path = is_valid_path ~options in - fun path -> is_valid_path path && not (is_directory path) + (fun path -> is_valid_path path && not (is_directory path)) -let realpath path = match Sys_utils.realpath path with -| Some path -> path -| None -> path (* perhaps this should error? *) +let realpath path = + match Sys_utils.realpath path with + | Some path -> path + | None -> path + +(* perhaps this should error? *) let make_path_absolute root path = - if Filename.is_relative path - then Path.concat root path - else Path.make path + if Filename.is_relative path then + Path.concat root path + else + Path.make path type file_kind = -| Reg of string -| Dir of string * bool -| StatError of string -| Other + | Reg of string + | Dir of string * bool + | StatError of string + | Other (* Determines whether a path is a regular file, a directory, or something else like a pipe, socket or device. If `path` is a symbolic link, then it returns the type of the target of the symlink, and the target's real path. *) -let kind_of_path path = Unix.( - try match (Sys_utils.lstat path).st_kind with - | S_REG -> Reg path - | S_LNK -> - (try begin match (stat path).st_kind with - | S_REG -> Reg (realpath path) - | S_DIR -> Dir (realpath path, true) - | _ -> Other - (* Don't spew errors on broken symlinks *) - end with Unix_error (ENOENT, _, _) -> Other) - | S_DIR -> Dir (path, false) - | _ -> Other - with - | Unix_error (ENOENT, _, _) when Sys.win32 && String.length path >= 248 -> - StatError ( - Utils_js.spf - "On Windows, paths must be less than 248 characters for directories \ - and 260 characters for files. This path has %d characters. Skipping %s" - (String.length path) - path - ) - | Unix_error (e, _, _) -> - StatError (Utils_js.spf "Skipping %s: %s\n%!" path (Unix.error_message e)) -) +let kind_of_path path = + Unix.( + try + match (Sys_utils.lstat path).st_kind with + | S_REG -> Reg path + | S_LNK -> + (try + match (stat path).st_kind with + | S_REG -> Reg (realpath path) + | S_DIR -> Dir (realpath path, true) + | _ -> Other + (* Don't spew errors on broken symlinks *) + with Unix_error (ENOENT, _, _) -> Other) + | S_DIR -> Dir (path, false) + | _ -> Other + with + | Unix_error (ENOENT, _, _) when Sys.win32 && String.length path >= 248 -> + StatError + (Utils_js.spf + "On Windows, paths must be less than 248 characters for directories and 260 characters for files. This path has %d characters. Skipping %s" + (String.length path) + path) + | Unix_error (e, _, _) -> + StatError (Utils_js.spf "Skipping %s: %s\n%!" path (Unix.error_message e))) let can_read path = - try let () = Unix.access path [Unix.R_OK] in true + try + let () = Unix.access path [Unix.R_OK] in + true with Unix.Unix_error (e, _, _) -> Printf.eprintf "Skipping %s: %s\n%!" path (Unix.error_message e); false @@ -178,7 +186,7 @@ let max_files = 1000 If kind_of_path fails, then we only emit a warning if error_filter passes *) let make_next_files_and_symlinks ~node_module_filter ~path_filter ~realpath_filter ~error_filter paths = - let prefix_checkers = List.map is_prefix paths in + let prefix_checkers = Core_list.map ~f:is_prefix paths in let rec process sz (acc, symlinks) files dir stack = if sz >= max_files then ((acc, symlinks), S_Dir (files, dir, stack)) @@ -186,15 +194,21 @@ let make_next_files_and_symlinks match files with | [] -> process_stack sz (acc, symlinks) stack | file :: files -> - let file = if dir = "" then file else Filename.concat dir file in - match kind_of_path file with + let file = + if dir = "" then + file + else + Filename.concat dir file + in + (match kind_of_path file with | Reg real -> - if path_filter file && (file = real || realpath_filter real) && can_read real - then process (sz+1) (real :: acc, symlinks) files dir stack - else process sz (acc, symlinks) files dir stack + if path_filter file && (file = real || realpath_filter real) && can_read real then + process (sz + 1) (real :: acc, symlinks) files dir stack + else + process sz (acc, symlinks) files dir stack | Dir (path, is_symlink) -> - if node_module_filter file - then node_modules_containers := SSet.add (Filename.dirname file) !node_modules_containers; + if node_module_filter file then + node_modules_containers := SSet.add (Filename.dirname file) !node_modules_containers; let dirfiles = Array.to_list @@ try_readdir path in let symlinks = (* accumulates all of the symlinks that point to @@ -204,7 +218,8 @@ let make_next_files_and_symlinks if not (List.exists (fun check -> check path) prefix_checkers) then SSet.add path symlinks else - symlinks in + symlinks + in if is_symlink then process sz (acc, symlinks) files dir stack else @@ -212,51 +227,65 @@ let make_next_files_and_symlinks | StatError msg -> if error_filter file then prerr_endline msg; process sz (acc, symlinks) files dir stack - | Other -> - process sz (acc, symlinks) files dir stack + | Other -> process sz (acc, symlinks) files dir stack) and process_stack sz accs = function | S_Nil -> (accs, S_Nil) - | S_Dir (files, dir, stack) -> process sz accs files dir stack in + | S_Dir (files, dir, stack) -> process sz accs files dir stack + in let state = ref (S_Dir (paths, "", S_Nil)) in fun () -> - let (res, symlinks), st = process_stack 0 ([], SSet.empty) !state in + let ((res, symlinks), st) = process_stack 0 ([], SSet.empty) !state in state := st; - res, symlinks + (res, symlinks) (* Returns a closure that returns batches of files matching `path_filter` and/or `realpath_filter` (see `make_next_files_and_symlinks`), starting from `paths` and including any directories that are symlinked to even if they are outside of `paths`. *) let make_next_files_following_symlinks - ~node_module_filter - ~path_filter - ~realpath_filter - ~error_filter - paths = - let paths = List.map Path.to_string paths in - let cb = ref (make_next_files_and_symlinks - ~node_module_filter ~path_filter ~realpath_filter ~error_filter paths - ) in + ~node_module_filter ~path_filter ~realpath_filter ~error_filter paths = + let paths = Core_list.map ~f:Path.to_string paths in + let cb = + ref + (make_next_files_and_symlinks + ~node_module_filter + ~path_filter + ~realpath_filter + ~error_filter + paths) + in let symlinks = ref SSet.empty in let seen_symlinks = ref SSet.empty in let rec rec_cb () = - let files, new_symlinks = !cb () in - symlinks := SSet.fold (fun symlink accum -> - if SSet.mem symlink !seen_symlinks then accum - else SSet.add symlink accum - ) new_symlinks !symlinks; + let (files, new_symlinks) = !cb () in + symlinks := + SSet.fold + (fun symlink accum -> + if SSet.mem symlink !seen_symlinks then + accum + else + SSet.add symlink accum) + new_symlinks + !symlinks; seen_symlinks := SSet.union new_symlinks !seen_symlinks; let num_files = List.length files in - if num_files > 0 then files - else if (SSet.is_empty !symlinks) then [] - else begin + if num_files > 0 then + files + else if SSet.is_empty !symlinks then + [] + else let paths = SSet.elements !symlinks in symlinks := SSet.empty; + (* since we're following a symlink, use realpath_filter for both *) - cb := make_next_files_and_symlinks - ~node_module_filter ~path_filter:realpath_filter ~realpath_filter ~error_filter paths; + cb := + make_next_files_and_symlinks + ~node_module_filter + ~path_filter:realpath_filter + ~realpath_filter + ~error_filter + paths; rec_cb () - end in rec_cb @@ -269,26 +298,34 @@ let get_all = let accum = List.fold_left (fun set x -> SSet.add x set) accum result in get_all_rec next accum in - fun next -> get_all_rec next SSet.empty + (fun next -> get_all_rec next SSet.empty) -let init ?(flowlibs_only=false) (options: options) = +let init ?(flowlibs_only = false) (options : options) = let node_module_filter = is_node_module options in - let libs = if flowlibs_only then [] else options.lib_paths in - let libs, filter = match options.default_lib_dir with - | None -> libs, is_valid_path ~options + let libs = + if flowlibs_only then + [] + else + options.lib_paths + in + let (libs, filter) = + match options.default_lib_dir with + | None -> (libs, is_valid_path ~options) | Some root -> let is_in_flowlib = is_prefix (Path.to_string root) in let is_valid_path = is_valid_path ~options in let filter path = is_in_flowlib path || is_valid_path path in - root::libs, filter + (root :: libs, filter) in (* preserve enumeration order *) - let libs = if libs = [] - then [] + let libs = + if libs = [] then + [] else let get_next lib = let lib_str = Path.to_string lib in - let filter' path = path = lib_str || filter path in + (* TODO: better to parse json files, not ignore them *) + let filter' path = (path = lib_str || filter path) && not (is_json_file path) in make_next_files_following_symlinks ~node_module_filter ~path_filter:filter' @@ -296,63 +333,65 @@ let init ?(flowlibs_only=false) (options: options) = ~error_filter:(fun _ -> true) [lib] in - libs - |> List.map (fun lib -> SSet.elements (get_all (get_next lib))) - |> List.flatten + libs |> Core_list.map ~f:(fun lib -> SSet.elements (get_all (get_next lib))) |> List.flatten in (libs, SSet.of_list libs) (* Local reference to the module exported by a file. Like other local references to modules imported by the file, it is a member of Context.module_map. *) -let module_ref file = - File_key.to_string file +let module_ref file = File_key.to_string file let lib_module_ref = "" let dir_sep = Str.regexp "[/\\\\]" + let current_dir_name = Str.regexp_string Filename.current_dir_name + let parent_dir_name = Str.regexp_string Filename.parent_dir_name + let absolute_path_regexp = Str.regexp "^\\(/\\|[A-Za-z]:[/\\\\]\\)" let project_root_token = Str.regexp_string "" +let is_matching path pattern_list = + List.fold_left + (fun current (pattern, rx) -> + if String_utils.string_starts_with pattern "!" then + current && not (Str.string_match rx path 0) + else + current || Str.string_match rx path 0) + false + pattern_list + (* true if a file path matches an [ignore] entry in config *) -let is_ignored (options: options) = - let list = List.map snd options.ignores in - fun path -> - (* On Windows, the path may use \ instead of /, but let's standardize the - * ignore regex to use / *) - let path = Sys_utils.normalize_filename_dir_sep path in - List.exists (fun rx -> Str.string_match rx path 0) list +let is_ignored (options : options) path = + (* On Windows, the path may use \ instead of /, but let's standardize the + * ignore regex to use / *) + let path = Sys_utils.normalize_filename_dir_sep path in + is_matching path options.ignores (* true if a file path matches an [untyped] entry in config *) -let is_untyped (options: options) = - let list = List.map snd options.untyped in - fun path -> - (* On Windows, the path may use \ instead of /, but let's standardize the - * ignore regex to use / *) - let path = Sys_utils.normalize_filename_dir_sep path in - List.exists (fun rx -> Str.string_match rx path 0) list +let is_untyped (options : options) path = + (* On Windows, the path may use \ instead of /, but let's standardize the + * ignore regex to use / *) + let path = Sys_utils.normalize_filename_dir_sep path in + is_matching path options.untyped (* true if a file path matches a [declarations] entry in config *) -let is_declaration (options: options) = - let list = List.map snd options.declarations in - fun path -> - (* On Windows, the path may use \ instead of /, but let's standardize the - * ignore regex to use / *) - let path = Sys_utils.normalize_filename_dir_sep path in - List.exists (fun rx -> Str.string_match rx path 0) list +let is_declaration (options : options) path = + (* On Windows, the path may use \ instead of /, but let's standardize the + * ignore regex to use / *) + let path = Sys_utils.normalize_filename_dir_sep path in + is_matching path options.declarations (* true if a file path matches an [include] path in config *) -let is_included options f = - Path_matcher.matches options.includes f +let is_included options f = Path_matcher.matches options.includes f let wanted ~options lib_fileset = let is_ignored_ = is_ignored options in - fun path -> not (is_ignored_ path) && not (SSet.mem path lib_fileset) + (fun path -> (not (is_ignored_ path)) && not (SSet.mem path lib_fileset)) -let watched_paths options = - Path_matcher.stems options.includes +let watched_paths options = Path_matcher.stems options.includes (** * Creates a "next" function (see also: `get_all`) for finding the files in a @@ -364,71 +403,79 @@ let watched_paths options = *) let make_next_files ~root ~all ~subdir ~options ~libs = let node_module_filter = is_node_module options in - let filter = if all then fun _ -> true else wanted ~options libs in - + let filter = + if all then + fun _ -> + true + else + wanted ~options libs + in (* The directories from which we start our search *) - let starting_points = match subdir with - | None -> watched_paths options - | Some subdir -> [subdir] in - - let root_str= Path.to_string root in + let starting_points = + match subdir with + | None -> watched_paths options + | Some subdir -> [subdir] + in + let root_str = Path.to_string root in let is_valid_path = is_valid_path ~options in let realpath_filter path = is_valid_path path && filter path in let path_filter = - (** + (* * This function is very hot on large codebases, so specialize it up front * to minimize work. *) match subdir with | None -> - (fun path -> - (String_utils.string_starts_with path root_str - || is_included options path) + fun path -> + (String_utils.string_starts_with path root_str || is_included options path) && realpath_filter path - ) | Some subdir -> (* The subdir might contain symlinks outside of the subdir. To prevent * these files from being returned, we modify the path filter to check * that the realpath starts with the subdir *) let subdir_str = Path.to_string subdir in - (fun path -> + fun path -> String_utils.string_starts_with path subdir_str - && (String_utils.string_starts_with path root_str - || is_included options path) + && (String_utils.string_starts_with path root_str || is_included options path) && realpath_filter path - ) in make_next_files_following_symlinks - ~node_module_filter ~path_filter ~realpath_filter ~error_filter:filter starting_points + ~node_module_filter + ~path_filter + ~realpath_filter + ~error_filter:filter + starting_points let is_windows_root root = - Sys.win32 && - String.length root = 2 && - root.[1] = ':' && + Sys.win32 + && String.length root = 2 + && root.[1] = ':' + && match root.[0] with - | 'a'..'z' | 'A'..'Z' -> true - | _ -> false + | 'a' .. 'z' + | 'A' .. 'Z' -> + true + | _ -> false -let rec normalize_path dir file = - normalize_path_ dir (Str.split_delim dir_sep file) +let rec normalize_path dir file = normalize_path_ dir (Str.split_delim dir_sep file) and normalize_path_ dir names = match names with - | dot::names when dot = Filename.current_dir_name -> - (* ./ => dir/names *) - normalize_path_ dir names - | dots::names when dots = Filename.parent_dir_name -> - (* ../ => parent(dir)/ *) - normalize_path_ (Filename.dirname dir) names - | ""::names when names <> [] -> - (* / => / *) - construct_path Filename.dir_sep names - | root::names when is_windows_root root -> - (* C:\ => C:\ *) - construct_path (root ^ Filename.dir_sep) names + | dot :: names when dot = Filename.current_dir_name -> + (* ./ => dir/names *) + normalize_path_ dir names + | dots :: names when dots = Filename.parent_dir_name -> + (* ../ => parent(dir)/ *) + normalize_path_ (Filename.dirname dir) names + | "" :: names when names <> [] -> + (* / => / *) + construct_path Filename.dir_sep names + | root :: names when is_windows_root root -> + (* C:\ => C:\ *) + construct_path (root ^ Filename.dir_sep) names | _ -> - (* => dir/ *) - construct_path dir names + (* => dir/ *) + construct_path dir names and construct_path = List.fold_left Filename.concat @@ -437,55 +484,51 @@ and construct_path = List.fold_left Filename.concat * * Both of these are designed to avoid using Path and realpath so that we don't actually read the * file system *) -let relative_path, absolute_path = +let (relative_path, absolute_path) = let split_path = let rec f acc rest = let dir = Filename.dirname rest in - if rest = dir - then begin - if Filename.is_relative dir (* True for things like ".", false for "/", "C:/" *) - then acc (* "path/to/foo.js" becomes ["path"; "to"; "foo.js"] *) - else match acc with - | [] -> [dir] (* "/" becomes ["/"] *) - | last_dir::rest -> (* "/path/to/foo.js" becomes ["/path"; "to"; "foo.js"] *) - Filename.concat dir last_dir :: rest - end - else f ((Filename.basename rest)::acc) dir + if rest = dir then + if Filename.is_relative dir (* True for things like ".", false for "/", "C:/" *) then + acc + (* "path/to/foo.js" becomes ["path"; "to"; "foo.js"] *) + else + match acc with + | [] -> [dir] (* "/" becomes ["/"] *) + | last_dir :: rest -> + (* "/path/to/foo.js" becomes ["/path"; "to"; "foo.js"] *) + Filename.concat dir last_dir :: rest + else + f (Filename.basename rest :: acc) dir in - fun path -> f [] path + (fun path -> f [] path) in let rec make_relative = function - | (dir1::root, dir2::file) when dir1 = dir2 -> make_relative (root, file) - | (root, file) -> - List.fold_left (fun path _ -> Filename.parent_dir_name::path) file root + | (dir1 :: root, dir2 :: file) when dir1 = dir2 -> make_relative (root, file) + | (root, file) -> List.fold_left (fun path _ -> Filename.parent_dir_name :: path) file root in let make_relative root file = (* This functions is only used for displaying error location or creating saved state. We use '/' as file separator even on Windows. This simplify the test-suite script... *) - make_relative (split_path root, split_path file) - |> String.concat "/" + make_relative (split_path root, split_path file) |> String.concat "/" in let rec absolute_path = function - | (_::root, dir2::file) when dir2 = Filename.parent_dir_name -> - absolute_path (root, file) - | (root, file) -> - List.rev_append root file + | (_ :: root, dir2 :: file) when dir2 = Filename.parent_dir_name -> absolute_path (root, file) + | (root, file) -> List.rev_append root file in let absolute_path root file = (* Let's avoid creating paths like "/path/to/foo/." *) - if file = Filename.current_dir_name || file = "" - then root + if file = Filename.current_dir_name || file = "" then + root else - absolute_path ((List.rev @@ split_path root), split_path file) + absolute_path (List.rev @@ split_path root, split_path file) (* We may actually use these paths, so use the correct directory sep *) |> String.concat Filename.dir_sep in - - make_relative, absolute_path + (make_relative, absolute_path) (* helper to get the full path to the "flow-typed" library dir *) -let get_flowtyped_path root = - make_path_absolute root "flow-typed" +let get_flowtyped_path root = make_path_absolute root "flow-typed" (* helper: make different kinds of File_key.t from a path string *) let filename_from_string ~options p = @@ -500,24 +543,26 @@ let mkdirp path_str perm = (* If path_str is absolute, then path_prefix will be something like C:\ on * Windows and / on Linux *) let path_prefix = - if Str.string_match absolute_path_regexp path_str 0 - then Str.matched_string path_str - else "" in - + if Str.string_match absolute_path_regexp path_str 0 then + Str.matched_string path_str + else + "" + in (* On Windows, the Str.split above will mean the first part of an absolute * path will be something like C:, so let's remove that *) - let parts = match parts with - | first_part::rest when first_part ^ Filename.dir_sep = path_prefix -> rest - | parts -> parts in - - ignore (List.fold_left (fun path_str part -> - let new_path_str = Filename.concat path_str part in - Unix.( - try mkdir new_path_str perm - with Unix_error (EEXIST, "mkdir", _) -> () - ); - new_path_str - ) path_prefix parts) + let parts = + match parts with + | first_part :: rest when first_part ^ Filename.dir_sep = path_prefix -> rest + | parts -> parts + in + ignore + (List.fold_left + (fun path_str part -> + let new_path_str = Filename.concat path_str part in + Unix.((try mkdir new_path_str perm with Unix_error (EEXIST, "mkdir", _) -> ())); + new_path_str) + path_prefix + parts) (* Given a path, we want to know if it's in a node_modules/ directory or not. *) let is_within_node_modules ~root ~options path = @@ -540,22 +585,33 @@ let imaginary_realpath = (* Sys.file_exists should always return true for / and for . so we should never get into * infinite recursion. Let's assert that *) assert (prefix <> path); - if Sys.file_exists prefix - then prefix, rev_suffix - else find_real_prefix prefix rev_suffix + if Sys.file_exists prefix then + (prefix, rev_suffix) + else + find_real_prefix prefix rev_suffix in - fun path -> - let real_prefix, rev_suffix = find_real_prefix path [] in + let (real_prefix, rev_suffix) = find_real_prefix path [] in match Sys_utils.realpath real_prefix with | None -> failwith (Utils_js.spf "Realpath failed for existent path %s" real_prefix) | Some abs -> List.fold_left Filename.concat abs rev_suffix let canonicalize_filenames ~cwd ~handle_imaginary filenames = - List.map (fun filename -> - let filename = Sys_utils.expanduser filename in (* normalize ~ *) - let filename = normalize_path cwd filename in (* normalize ./ and ../ *) - match Sys_utils.realpath filename with (* normalize symlinks *) - | Some abs -> abs - | None -> handle_imaginary filename - ) filenames + Core_list.map + ~f:(fun filename -> + let filename = Sys_utils.expanduser filename in + (* normalize ~ *) + let filename = normalize_path cwd filename in + (* normalize ./ and ../ *) + match Sys_utils.realpath filename with + (* normalize symlinks *) + | Some abs -> abs + | None -> handle_imaginary filename) + filenames + +let expand_project_root_token_to_string ~root str = + let root = Path.to_string root |> Sys_utils.normalize_filename_dir_sep in + str |> Str.split_delim project_root_token |> String.concat root + +let expand_project_root_token_to_regexp ~root str = + expand_project_root_token_to_string ~root str |> Str.regexp diff --git a/src/common/files.mli b/src/common/files.mli index bfe7bddb5e4..b38ac8ee0fa 100644 --- a/src/common/files.mli +++ b/src/common/files.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -19,97 +19,116 @@ type options = { node_resolver_dirnames: string list; } -val default_lib_dir: options -> Path.t option -val ignores: options -> (string * Str.regexp) list -val untyped: options -> (string * Str.regexp) list -val declarations: options -> (string * Str.regexp) list -val includes: options -> Path_matcher.t -val lib_paths: options -> Path.t list -val module_file_exts: options -> SSet.t -val module_resource_exts: options -> SSet.t -val node_resolver_dirnames: options -> string list +val default_lib_dir : options -> Path.t option -val node_modules_containers: SSet.t ref +val ignores : options -> (string * Str.regexp) list -val global_file_name: string -val flow_ext: string +val untyped : options -> (string * Str.regexp) list -val has_flow_ext: File_key.t -> bool -val chop_flow_ext: File_key.t -> File_key.t option +val declarations : options -> (string * Str.regexp) list -val is_json_file: string -> bool -val is_flow_file: options: options -> string -> bool +val includes : options -> Path_matcher.t + +val lib_paths : options -> Path.t list + +val module_file_exts : options -> SSet.t + +val module_resource_exts : options -> SSet.t + +val node_resolver_dirnames : options -> string list + +val node_modules_containers : SSet.t ref + +val global_file_name : string + +val flow_ext : string + +val has_flow_ext : File_key.t -> bool + +val chop_flow_ext : File_key.t -> File_key.t option + +val is_json_file : string -> bool + +val is_flow_file : options:options -> string -> bool (* true if a file path matches an [ignore] entry in config *) -val is_ignored: options -> string -> bool +val is_ignored : options -> string -> bool + (* true if a file path matches an [untyped] entry in config *) -val is_untyped: options -> string -> bool +val is_untyped : options -> string -> bool + (* true if a file path matches a [declarations] entry in config *) -val is_declaration: options -> string -> bool +val is_declaration : options -> string -> bool + (* true if a file path matches an [include] path in config *) -val is_included: options -> string -> bool +val is_included : options -> string -> bool + +val is_valid_path : options:options -> string -> bool -val is_valid_path: options: options -> string -> bool +val get_all_watched_extensions : options -> SSet.t -val get_all_watched_extensions: options -> SSet.t +val init : ?flowlibs_only:bool -> options -> string list * SSet.t -val init: ?flowlibs_only:bool -> options -> string list * SSet.t +val module_ref : File_key.t -> string -val module_ref: File_key.t -> string -val lib_module_ref: string +val lib_module_ref : string (* regexp for Filename constants *) -val dir_sep: Str.regexp -val current_dir_name: Str.regexp -val parent_dir_name: Str.regexp -val absolute_path_regexp: Str.regexp +val dir_sep : Str.regexp + +val current_dir_name : Str.regexp + +val parent_dir_name : Str.regexp + +val absolute_path_regexp : Str.regexp -val project_root_token: Str.regexp +val project_root_token : Str.regexp -val watched_paths: options -> Path.t list +val watched_paths : options -> Path.t list (* given a root, make a filter for file names *) -val wanted: - options: options -> - SSet.t -> - string -> bool +val wanted : options:options -> SSet.t -> string -> bool (* given a root, make a next_files function for MultiWorker *) -val make_next_files: - root: Path.t -> - all: bool -> - subdir: Path.t option -> - options: options -> - libs: SSet.t -> - unit -> string list +val make_next_files : + root:Path.t -> + all:bool -> + subdir:Path.t option -> + options:options -> + libs:SSet.t -> + unit -> + string list -val get_all: (unit -> string list) -> SSet.t +val get_all : (unit -> string list) -> SSet.t (* given a base directory and a relative path, return an absolute path *) -val normalize_path: string -> string -> string +val normalize_path : string -> string -> string (* given a base directory and a relative path, return an absolute path *) -val construct_path: string -> string list -> string +val construct_path : string -> string list -> string + +val relative_path : string -> string -> string -val relative_path: string -> string -> string -val absolute_path: string -> string -> string +val absolute_path : string -> string -> string (* TODO: this doesn't match the signatures of the related functions above *) -val make_path_absolute: Path.t -> string -> Path.t +val make_path_absolute : Path.t -> string -> Path.t -val is_prefix: string -> string -> bool +val is_prefix : string -> string -> bool -val get_flowtyped_path: Path.t -> Path.t +val get_flowtyped_path : Path.t -> Path.t -val filename_from_string: options: options -> string -> File_key.t +val filename_from_string : options:options -> string -> File_key.t -val mkdirp: string -> Unix.file_perm -> unit +val mkdirp : string -> Unix.file_perm -> unit -val is_within_node_modules: root:Path.t -> options: options -> string -> bool +val is_within_node_modules : root:Path.t -> options:options -> string -> bool -val imaginary_realpath: string -> string -val canonicalize_filenames: - cwd:string -> - handle_imaginary:(string -> string) -> - string list -> - string list +val imaginary_realpath : string -> string + +val canonicalize_filenames : + cwd:string -> handle_imaginary:(string -> string) -> string list -> string list + +val expand_project_root_token_to_string : root:Path.t -> string -> string + +val expand_project_root_token_to_regexp : root:Path.t -> string -> Str.regexp diff --git a/src/common/flowExitStatus.mli b/src/common/flowExitStatus.mli deleted file mode 100644 index 19ad25611db..00000000000 --- a/src/common/flowExitStatus.mli +++ /dev/null @@ -1,39 +0,0 @@ -type t = - | Interrupted - | No_error - | Windows_killed_by_task_manager - | Type_error - | Out_of_time - | Kill_error - | Unused_server - | No_server_running - | Out_of_retries - | Invalid_flowconfig - | Path_is_not_a_file - | Build_id_mismatch - | Input_error - | Lock_stolen - | Could_not_find_flowconfig - | Server_out_of_date - | Out_of_shared_memory - | Flowconfig_changed - | Server_client_directory_mismatch - | Commandline_usage_error - | No_input - | Server_start_failed of Unix.process_status - | Missing_flowlib - | Autostop - | Killed_by_monitor - | Invalid_saved_state - | Socket_error - | Dfind_died - | Dfind_unresponsive - | Unknown_error - -exception Exit_with of t - -val exit: ?msg:string -> t -> 'a -val error_code: t -> int -val error_type: int -> t -val to_string: t -> string -val set_json_mode: pretty:bool -> unit diff --git a/src/common/flow_lsp_conversions.ml b/src/common/flow_lsp_conversions.ml index 1cc5b9d055e..72d56c527ce 100644 --- a/src/common/flow_lsp_conversions.ml +++ b/src/common/flow_lsp_conversions.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,323 +7,393 @@ module Ast = Flow_ast +let flow_position_to_lsp (line : int) (char : int) : Lsp.position = + Lsp.{ line = max 0 (line - 1); character = char } + +let lsp_position_to_flow (position : Lsp.position) : int * int = + Lsp.( + let line = position.line + 1 in + let char = position.character in + (line, char)) + +let lsp_position_to_flow_position p = + let (line, column) = lsp_position_to_flow p in + Loc.{ line; column } + +let lsp_range_to_flow_loc ?source (range : Lsp.range) = + Lsp. + { + Loc.source; + start = lsp_position_to_flow_position range.start; + _end = lsp_position_to_flow_position range.end_; + } + +let loc_to_lsp_range (loc : Loc.t) : Lsp.range = + Loc.( + let loc_start = loc.start in + let loc_end = loc._end in + let start = flow_position_to_lsp loc_start.line loc_start.column in + (* Flow's end range is inclusive, LSP's is exclusive. + * +1 for that, but -1 to make it 0-based *) + let end_ = flow_position_to_lsp loc_end.line loc_end.column in + { Lsp.start; end_ }) + let flow_completion_to_lsp - (item: ServerProt.Response.complete_autocomplete_result) - : Lsp.Completion.completionItem = - let open Lsp.Completion in - let open ServerProt.Response in - let trunc n s = if String.length s < n then s else (String.sub s 0 n) ^ "..." in - let trunc80 s = trunc 80 s in - let flow_params_to_string params = - let params = List.map (fun p -> p.param_name ^ ": " ^ p.param_ty) params in - "(" ^ (String.concat ", " params) ^ ")" - in - let kind, itemType, inlineDetail, detail = match item.func_details with - | Some func_details -> - let kind = Some Function in - let itemType = Some (trunc 30 func_details.return_ty) in - let inlineDetail = Some (trunc 40 (flow_params_to_string func_details.param_tys)) in - let detail = Some (trunc80 item.res_ty) in - kind, itemType, inlineDetail, detail - | None -> - let kind = None in - let itemType = None in - let inlineDetail = Some (trunc80 item.res_ty) in - let detail = Some (trunc80 item.res_ty) in - kind, itemType, inlineDetail, detail - in - { - label = item.res_name; - kind; - detail = detail; - inlineDetail; - itemType; - documentation = None; (* This will be filled in by completionItem/resolve. *) - sortText = None; - filterText = None; - insertText = None; - insertTextFormat = Some PlainText; - textEdits = []; - command = None; - data = None; - } - -let file_key_to_uri (file_key_opt: File_key.t option): (string, string) result = - let (>>|) = Core_result.(>>|) in - let (>>=) = Core_result.(>>=) in - Core_result.of_option file_key_opt ~error:"File_key is None" - >>= File_key.to_path - >>| File_url.create + (is_snippet_supported : bool) (item : ServerProt.Response.complete_autocomplete_result) : + Lsp.Completion.completionItem = + Lsp.Completion.( + ServerProt.Response.( + let trunc n s = + if String.length s < n then + s + else + String.sub s 0 n ^ "..." + in + let trunc80 s = trunc 80 s in + let flow_params_to_string params = + let params = Core_list.map ~f:(fun p -> p.param_name ^ ": " ^ p.param_ty) params in + "(" ^ String.concat ", " params ^ ")" + in + let flow_params_to_lsp_snippet name params = + let params = + Core_list.mapi + ~f:(fun i p -> "${" ^ string_of_int (i + 1) ^ ":" ^ p.param_name ^ "}") + params + in + name ^ "(" ^ String.concat ", " params ^ ")" + in + let text_edit loc newText : Lsp.TextEdit.t = + { Lsp.TextEdit.range = loc_to_lsp_range loc; Lsp.TextEdit.newText } + in + let func_snippet item func_details = + let newText = flow_params_to_lsp_snippet item.res_name func_details.param_tys in + text_edit item.res_loc newText + in + let (itemType, inlineDetail, detail, insertTextFormat, textEdits) = + match item.func_details with + | Some func_details -> + let itemType = Some (trunc 30 func_details.return_ty) in + let inlineDetail = Some (trunc 40 (flow_params_to_string func_details.param_tys)) in + let (_ty_loc, ty) = item.res_ty in + let detail = Some (trunc80 ty) in + let (insertTextFormat, textEdits) = + match is_snippet_supported with + | true -> (Some SnippetFormat, [func_snippet item func_details]) + | false -> (Some PlainText, []) + in + (itemType, inlineDetail, detail, insertTextFormat, textEdits) + | None -> + let itemType = None in + let (_ty_loc, ty) = item.res_ty in + let inlineDetail = Some (trunc80 ty) in + let detail = inlineDetail in + let textEdits = [] in + (itemType, inlineDetail, detail, Some PlainText, textEdits) + in + { + label = item.res_name; + kind = item.res_kind; + detail; + inlineDetail; + itemType; + documentation = None; + (* This will be filled in by completionItem/resolve. *) + sortText = None; + filterText = None; + (* deprecated and should not be used *) + insertText = None; + insertTextFormat; + textEdits; + command = None; + data = None; + })) -let loc_to_lsp_range (loc: Loc.t): Lsp.range = - { Lsp. - start = { Lsp.line=loc.Loc.start.Loc.line-1; character=loc.Loc.start.Loc.column; }; - end_ = { Lsp.line=loc.Loc._end.Loc.line-1; character=loc.Loc._end.Loc.column; }; - } +let file_key_to_uri (file_key_opt : File_key.t option) : (string, string) result = + let ( >>| ) = Core_result.( >>| ) in + let ( >>= ) = Core_result.( >>= ) in + Core_result.of_option file_key_opt ~error:"File_key is None" + >>= File_key.to_path + >>| File_url.create -let loc_to_lsp (loc: Loc.t): (Lsp.Location.t, string) result = - let (>>|) = Core_result.(>>|) in - file_key_to_uri loc.Loc.source >>| fun uri -> { Lsp.Location.uri; range = loc_to_lsp_range loc; } +let loc_to_lsp (loc : Loc.t) : (Lsp.Location.t, string) result = + let ( >>| ) = Core_result.( >>| ) in + file_key_to_uri loc.Loc.source >>| (fun uri -> { Lsp.Location.uri; range = loc_to_lsp_range loc }) -let loc_to_lsp_with_default (loc: Loc.t) ~(default_uri: string): Lsp.Location.t = - let uri = match file_key_to_uri loc.Loc.source with +let loc_to_lsp_with_default (loc : Loc.t) ~(default_uri : string) : Lsp.Location.t = + let uri = + match file_key_to_uri loc.Loc.source with | Ok uri -> uri | Error _ -> default_uri in - { Lsp.Location.uri; range = loc_to_lsp_range loc; } - -let lsp_position_to_flow (position: Lsp.position): int * int = - let open Lsp in - let line = position.line + 1 in - let char = position.character - in - (line, char) + { Lsp.Location.uri; range = loc_to_lsp_range loc } -let flow_edit_to_textedit (edit: Loc.t * string): Lsp.TextEdit.t = - let loc, text = edit in +let flow_edit_to_textedit (edit : Loc.t * string) : Lsp.TextEdit.t = + let (loc, text) = edit in { Lsp.TextEdit.range = loc_to_lsp_range loc; newText = text } -let lsp_DocumentIdentifier_to_flow - (textDocument: Lsp.TextDocumentIdentifier.t) - ~(client: Persistent_connection.single_client) - : File_input.t = +let flow_loc_patch_to_lsp_edits (p : (Loc.t * string) list) : Lsp.TextEdit.t list = + let convert_edit (loc, text) = { Lsp.TextEdit.range = loc_to_lsp_range loc; newText = text } in + List.map convert_edit p + +(* ~, . and .. have no meaning in file urls so we don't canonicalize them *) +(* but symlinks must be canonicalized before being used in flow: *) +let lsp_DocumentIdentifier_to_flow_path textDocument = let fn = Lsp_helpers.lsp_textDocumentIdentifier_to_filename textDocument in - (* ~, . and .. have no meaning in file urls so we don't canonicalize them *) - (* but symlinks must be canonicalized before being used in flow: *) - let fn = Option.value (Sys_utils.realpath fn) ~default:fn in - let file = Persistent_connection.get_file client fn - in - file + Sys_utils.realpath fn |> Option.value ~default:fn -let lsp_DocumentPosition_to_flow - (params: Lsp.TextDocumentPositionParams.t) - ~(client: Persistent_connection.single_client) - : File_input.t * int * int = - let open Lsp.TextDocumentPositionParams in - let file = lsp_DocumentIdentifier_to_flow params.textDocument client in - let (line, char) = lsp_position_to_flow params.position - in - (file, line, char) +let lsp_DocumentIdentifier_to_flow + (textDocument : Lsp.TextDocumentIdentifier.t) ~(client : Persistent_connection.single_client) : + File_input.t = + lsp_DocumentIdentifier_to_flow_path textDocument |> Persistent_connection.get_file client +let lsp_DocumentPosition_to_flow + (params : Lsp.TextDocumentPositionParams.t) ~(client : Persistent_connection.single_client) : + File_input.t * int * int = + Lsp.TextDocumentPositionParams.( + let file = lsp_DocumentIdentifier_to_flow params.textDocument client in + let (line, char) = lsp_position_to_flow params.position in + (file, line, char)) +let lsp_textDocument_and_range_to_flow + ?(file_key_of_path = (fun p -> File_key.SourceFile p)) td range client = + let path = lsp_DocumentIdentifier_to_flow_path td in + let file_key = file_key_of_path path in + let file = Persistent_connection.get_file client path in + let loc = lsp_range_to_flow_loc ~source:file_key range in + (file_key, file, loc) module DocumentSymbols = struct - let name_of_key (key: (Loc.t, Loc.t) Ast.Expression.Object.Property.key) : string option = - let open Ast.Expression.Object.Property in - match key with - | Literal (_, { Ast.Literal.raw; _ }) -> Some raw - | Identifier (_, id) -> Some id - | PrivateName (_, (_, id)) -> Some id - | Computed (_, _) -> None - - let name_of_id ((_, id): Loc.t Ast.Identifier.t) : string = - id - - let name_of_id_opt (id_opt: Loc.t Ast.Identifier.t option) : string option = + let name_of_key (key : (Loc.t, Loc.t) Ast.Expression.Object.Property.key) : string option = + Ast.Expression.Object.Property.( + match key with + | Literal (_, { Ast.Literal.raw; _ }) -> Some raw + | Identifier (_, { Ast.Identifier.name = id; comments = _ }) -> Some id + | PrivateName (_, (_, { Ast.Identifier.name = id; comments = _ })) -> Some id + | Computed (_, _) -> None) + + let name_of_id ((_, { Ast.Identifier.name; comments = _ }) : (Loc.t, Loc.t) Ast.Identifier.t) : + string = + name + + let name_of_id_opt (id_opt : (Loc.t, Loc.t) Ast.Identifier.t option) : string option = Option.map id_opt ~f:name_of_id let ast_name - ~(uri: Lsp.documentUri) - ~(acc: Lsp.SymbolInformation.t list) - ~(loc: Loc.t) - ~(containerName: string option) - ~(name: string) - ~(kind: Lsp.SymbolInformation.symbolKind) - : Lsp.SymbolInformation.t list = - { Lsp.SymbolInformation. - name; + ~(uri : Lsp.documentUri) + ~(acc : Lsp.SymbolInformation.t list) + ~(loc : Loc.t) + ~(containerName : string option) + ~(name : string) + ~(kind : Lsp.SymbolInformation.symbolKind) : Lsp.SymbolInformation.t list = + { + Lsp.SymbolInformation.name; kind; - location = { Lsp.Location.uri; range = loc_to_lsp_range loc}; + location = { Lsp.Location.uri; range = loc_to_lsp_range loc }; containerName; - } :: acc + } + :: acc - let ast_name_opt ~uri ~containerName ~acc ~loc ~(name_opt: string option) ~kind = - Option.value_map name_opt ~default:acc - ~f:(fun name -> ast_name ~uri ~containerName ~acc ~loc ~name ~kind) + let ast_name_opt ~uri ~containerName ~acc ~loc ~(name_opt : string option) ~kind = + Option.value_map name_opt ~default:acc ~f:(fun name -> + ast_name ~uri ~containerName ~acc ~loc ~name ~kind) - let ast_key ~uri ~containerName ~acc ~loc ~(key:(Loc.t, Loc.t) Ast.Expression.Object.Property.key) ~kind = + let ast_key + ~uri + ~containerName + ~acc + ~loc + ~(key : (Loc.t, Loc.t) Ast.Expression.Object.Property.key) + ~kind = ast_name_opt ~uri ~containerName ~acc ~loc ~name_opt:(name_of_key key) ~kind - let ast_id ~uri ~containerName ~acc ~loc ~(id: Loc.t Ast.Identifier.t) ~kind = + let ast_id ~uri ~containerName ~acc ~loc ~(id : (Loc.t, Loc.t) Ast.Identifier.t) ~kind = ast_name ~uri ~containerName ~acc ~loc ~name:(name_of_id id) ~kind - let ast_id_opt ~uri ~containerName ~acc ~loc ~(id_opt: Loc.t Ast.Identifier.t option) ~kind = + let ast_id_opt + ~uri ~containerName ~acc ~loc ~(id_opt : (Loc.t, Loc.t) Ast.Identifier.t option) ~kind = ast_name_opt ~uri ~containerName ~acc ~loc ~name_opt:(name_of_id_opt id_opt) ~kind let ast_class_member - ~(uri: Lsp.documentUri) - ~(containerName: string option) - (acc: Lsp.SymbolInformation.t list) - (member: (Loc.t, Loc.t) Ast.Class.Body.element) - : Lsp.SymbolInformation.t list = - let open Ast.Class in - match member with - | Body.Method (loc, { Method.kind = Method.Constructor; key; _ }) -> - ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Constructor - | Body.Method (loc, { Method.kind = Method.Method; key; _ }) -> - ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Method - | Body.Method (loc, { Method.kind = Method.Get; key; _ }) -> - ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Property - | Body.Method (loc, { Method.kind = Method.Set; key; _ }) -> - ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Property - | Body.Property (loc, { Property.key; _ }) -> - ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Property - | Body.PrivateField (loc, { PrivateField.key = (_, (_, name)); _ }) -> - ast_name ~uri ~containerName ~acc ~loc ~name ~kind:Lsp.SymbolInformation.Field + ~(uri : Lsp.documentUri) + ~(containerName : string option) + (acc : Lsp.SymbolInformation.t list) + (member : (Loc.t, Loc.t) Ast.Class.Body.element) : Lsp.SymbolInformation.t list = + Ast.Class.( + match member with + | Body.Method (loc, { Method.kind = Method.Constructor; key; _ }) -> + ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Constructor + | Body.Method (loc, { Method.kind = Method.Method; key; _ }) -> + ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Method + | Body.Method (loc, { Method.kind = Method.Get; key; _ }) -> + ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Property + | Body.Method (loc, { Method.kind = Method.Set; key; _ }) -> + ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Property + | Body.Property (loc, { Property.key; _ }) -> + ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Property + | Body.PrivateField + (loc, { PrivateField.key = (_, (_, { Ast.Identifier.name; comments = _ })); _ }) -> + ast_name ~uri ~containerName ~acc ~loc ~name ~kind:Lsp.SymbolInformation.Field) let ast_class - ~(uri: Lsp.documentUri) - ~(containerName: string option) - ~(acc: Lsp.SymbolInformation.t list) - ~(loc: Loc.t) - ~(class_: (Loc.t, Loc.t) Ast.Class.t) - : Lsp.SymbolInformation.t list = - let open Ast.Class in - let acc = ast_id_opt ~uri ~containerName ~acc - ~loc ~id_opt:class_.id ~kind:Lsp.SymbolInformation.Class in - let containerName = name_of_id_opt class_.id in - let (_, body) = class_.body in - Core_list.fold body.Body.body ~init:acc ~f:(ast_class_member ~uri ~containerName) + ~(uri : Lsp.documentUri) + ~(containerName : string option) + ~(acc : Lsp.SymbolInformation.t list) + ~(loc : Loc.t) + ~(class_ : (Loc.t, Loc.t) Ast.Class.t) : Lsp.SymbolInformation.t list = + Ast.Class.( + let acc = + ast_id_opt + ~uri + ~containerName + ~acc + ~loc + ~id_opt:class_.id + ~kind:Lsp.SymbolInformation.Class + in + let containerName = name_of_id_opt class_.id in + let (_, body) = class_.body in + Core_list.fold body.Body.body ~init:acc ~f:(ast_class_member ~uri ~containerName)) let ast_type_object_property - ~(uri: Lsp.documentUri) - ~(containerName: string option) - (acc: Lsp.SymbolInformation.t list) - (property: (Loc.t, Loc.t) Ast.Type.Object.property) - : Lsp.SymbolInformation.t list = - let open Ast.Type.Object in - match property with - | Property (loc, { Property.key; _}) -> - ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Property - | Indexer (loc, { Indexer.id; _}) -> - ast_id_opt ~uri ~containerName ~acc ~loc ~id_opt:id ~kind:Lsp.SymbolInformation.Property - | InternalSlot (loc, { InternalSlot.id; _}) -> - ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Property - | _ -> acc + ~(uri : Lsp.documentUri) + ~(containerName : string option) + (acc : Lsp.SymbolInformation.t list) + (property : (Loc.t, Loc.t) Ast.Type.Object.property) : Lsp.SymbolInformation.t list = + Ast.Type.Object.( + match property with + | Property (loc, { Property.key; _ }) -> + ast_key ~uri ~containerName ~acc ~loc ~key ~kind:Lsp.SymbolInformation.Property + | Indexer (loc, { Indexer.id; _ }) -> + ast_id_opt ~uri ~containerName ~acc ~loc ~id_opt:id ~kind:Lsp.SymbolInformation.Property + | InternalSlot (loc, { InternalSlot.id; _ }) -> + ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Property + | _ -> acc) let ast_type_object - ~(uri: Lsp.documentUri) - ~(containerName: string option) - ~(acc: Lsp.SymbolInformation.t list) - ~(object_: (Loc.t, Loc.t) Ast.Type.Object.t) - : Lsp.SymbolInformation.t list = - let open Ast.Type.Object in - Core_list.fold object_.properties ~init:acc ~f:(ast_type_object_property ~uri ~containerName) + ~(uri : Lsp.documentUri) + ~(containerName : string option) + ~(acc : Lsp.SymbolInformation.t list) + ~(object_ : (Loc.t, Loc.t) Ast.Type.Object.t) : Lsp.SymbolInformation.t list = + Ast.Type.Object.( + Core_list.fold object_.properties ~init:acc ~f:(ast_type_object_property ~uri ~containerName)) let ast_type - ~(uri: Lsp.documentUri) - ~(containerName: string option) - ~(acc: Lsp.SymbolInformation.t list) - ~(type_: (Loc.t, Loc.t) Ast.Type.t') - : Lsp.SymbolInformation.t list = - let open Ast.Type in - match type_ with - | Object object_ -> - ast_type_object ~uri ~containerName ~acc ~object_ - | Interface {Interface.body=(_, object_); _} -> - ast_type_object ~uri ~containerName ~acc ~object_ - | _ -> acc + ~(uri : Lsp.documentUri) + ~(containerName : string option) + ~(acc : Lsp.SymbolInformation.t list) + ~(type_ : (Loc.t, Loc.t) Ast.Type.t') : Lsp.SymbolInformation.t list = + Ast.Type.( + match type_ with + | Object object_ -> ast_type_object ~uri ~containerName ~acc ~object_ + | Interface { Interface.body = (_, object_); _ } -> + ast_type_object ~uri ~containerName ~acc ~object_ + | _ -> acc) let ast_statement_declaration - ~(uri: Lsp.documentUri) - ~(containerName: string option) - ~(acc: Lsp.SymbolInformation.t list) - ~(declaration: (Loc.t, Loc.t) Ast.Statement.DeclareExportDeclaration.declaration) - : Lsp.SymbolInformation.t list = - let open Ast.Statement.DeclareExportDeclaration in - let open Ast.Statement in - match declaration with - | Variable (loc, {DeclareVariable.id; _}) -> - ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Variable - | Function (loc, {DeclareFunction.id; _}) -> - ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Function - | Class (loc, {DeclareClass.id; body=(_, object_); _}) -> - let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class in - ast_type_object ~uri ~containerName:(Some (name_of_id id)) ~acc ~object_ - | DefaultType (_, type_) -> - ast_type ~uri ~containerName ~acc ~type_ - | NamedType (loc, {TypeAlias.id; right=(_, type_); _}) -> - let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class in - ast_type ~uri ~containerName:(Some (name_of_id id)) ~acc ~type_ - | NamedOpaqueType (loc, {OpaqueType.id; _}) -> - ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class - | Interface (loc, {Interface.id; body=(_, object_); _}) -> - let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Interface in - ast_type_object ~uri ~containerName:(Some (name_of_id id)) ~acc ~object_ + ~(uri : Lsp.documentUri) + ~(containerName : string option) + ~(acc : Lsp.SymbolInformation.t list) + ~(declaration : (Loc.t, Loc.t) Ast.Statement.DeclareExportDeclaration.declaration) : + Lsp.SymbolInformation.t list = + Ast.Statement.DeclareExportDeclaration.( + Ast.Statement.( + match declaration with + | Variable (loc, { DeclareVariable.id; _ }) -> + ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Variable + | Function (loc, { DeclareFunction.id; _ }) -> + ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Function + | Class (loc, { DeclareClass.id; body = (_, object_); _ }) -> + let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class in + ast_type_object ~uri ~containerName:(Some (name_of_id id)) ~acc ~object_ + | DefaultType (_, type_) -> ast_type ~uri ~containerName ~acc ~type_ + | NamedType (loc, { TypeAlias.id; right = (_, type_); _ }) -> + let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class in + ast_type ~uri ~containerName:(Some (name_of_id id)) ~acc ~type_ + | NamedOpaqueType (loc, { OpaqueType.id; _ }) -> + ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class + | Interface (loc, { Interface.id; body = (_, object_); _ }) -> + let acc = + ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Interface + in + ast_type_object ~uri ~containerName:(Some (name_of_id id)) ~acc ~object_)) let ast_expression - ~(uri: Lsp.documentUri) - ~(containerName: string option) - ~(acc: Lsp.SymbolInformation.t list) - ~(expression: (Loc.t, Loc.t) Ast.Expression.t) - : Lsp.SymbolInformation.t list = - let open Ast.Expression in - match expression with - | (loc, Class class_) -> ast_class ~uri ~containerName ~acc ~loc ~class_ - | (_, _) -> acc + ~(uri : Lsp.documentUri) + ~(containerName : string option) + ~(acc : Lsp.SymbolInformation.t list) + ~(expression : (Loc.t, Loc.t) Ast.Expression.t) : Lsp.SymbolInformation.t list = + Ast.Expression.( + match expression with + | (loc, Class class_) -> ast_class ~uri ~containerName ~acc ~loc ~class_ + | (_, _) -> acc) let rec ast_statement - ~(uri: Lsp.documentUri) - ~(containerName: string option) - (acc: Lsp.SymbolInformation.t list) - (statement: (Loc.t, Loc.t) Ast.Statement.t) - : Lsp.SymbolInformation.t list = - let open Ast.Statement in - match statement with - | (_, Expression {Expression.expression; _}) -> - ast_expression ~uri ~containerName ~acc ~expression - | (loc, FunctionDeclaration {Ast.Function.id; _}) -> - ast_id_opt ~uri ~containerName ~acc ~loc ~id_opt:id ~kind:Lsp.SymbolInformation.Function - | (loc, ClassDeclaration class_) -> - ast_class ~uri ~containerName ~acc ~loc ~class_ - | (loc, InterfaceDeclaration {Interface.id; body=(_, object_); _}) -> - let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Interface in - ast_type_object ~uri ~containerName:(Some (name_of_id id)) ~acc ~object_ - | (_, ExportNamedDeclaration {ExportNamedDeclaration.declaration = Some stmt; _}) -> - ast_statement ~uri ~containerName acc stmt - | (_, ExportDefaultDeclaration { - ExportDefaultDeclaration.declaration = ExportDefaultDeclaration.Declaration stmt; _}) -> - ast_statement ~uri ~containerName acc stmt - | (loc, TypeAlias {TypeAlias.id; right=(_, type_); _}) -> - let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class in - ast_type ~uri ~containerName:(Some (name_of_id id)) ~acc ~type_ - | (loc, OpaqueType {OpaqueType.id; _}) -> - ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class - | (_, VariableDeclaration {VariableDeclaration.declarations; kind}) -> - let kind = match kind with - | VariableDeclaration.Var -> Lsp.SymbolInformation.Variable - | VariableDeclaration.Let -> Lsp.SymbolInformation.Variable - | VariableDeclaration.Const -> Lsp.SymbolInformation.Constant in - let ast_pattern acc loc (_, pattern) = - let open Ast.Pattern in - match pattern with - | Identifier { Identifier.name; _ } -> ast_id ~uri ~containerName ~acc ~loc ~id:name ~kind - | _ -> acc in - let ast_declarator acc (loc, declarator) = - ast_pattern acc loc declarator.VariableDeclaration.Declarator.id in - Core_list.fold declarations ~init:acc ~f:ast_declarator - | (loc, DeclareClass {DeclareClass.id; body=(_, object_); _}) -> - let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class in - ast_type_object ~uri ~containerName:(Some (name_of_id id)) ~acc ~object_ - | (loc, DeclareFunction {DeclareFunction.id; _}) -> - ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Function - | (loc, DeclareModule {DeclareModule.id=DeclareModule.Identifier id; - body=(_,{Block.body}); _}) -> - let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Module in - let containerName = Some (name_of_id id) in - Core_list.fold body ~init:acc ~f:(ast_statement ~uri ~containerName) - | (loc, DeclareVariable {DeclareVariable.id; _}) -> - ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Variable - | (loc, DeclareOpaqueType {OpaqueType.id; _}) -> - ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class - | (_, DeclareExportDeclaration {DeclareExportDeclaration.declaration=Some declaration; _}) -> - ast_statement_declaration ~uri ~containerName ~acc ~declaration - | _ -> acc + ~(uri : Lsp.documentUri) + ~(containerName : string option) + (acc : Lsp.SymbolInformation.t list) + (statement : (Loc.t, Loc.t) Ast.Statement.t) : Lsp.SymbolInformation.t list = + Ast.Statement.( + match statement with + | (_, Expression { Expression.expression; _ }) -> + ast_expression ~uri ~containerName ~acc ~expression + | (loc, FunctionDeclaration { Ast.Function.id; _ }) -> + ast_id_opt ~uri ~containerName ~acc ~loc ~id_opt:id ~kind:Lsp.SymbolInformation.Function + | (loc, ClassDeclaration class_) -> ast_class ~uri ~containerName ~acc ~loc ~class_ + | (loc, InterfaceDeclaration { Interface.id; body = (_, object_); _ }) -> + let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Interface in + ast_type_object ~uri ~containerName:(Some (name_of_id id)) ~acc ~object_ + | (_, ExportNamedDeclaration { ExportNamedDeclaration.declaration = Some stmt; _ }) -> + ast_statement ~uri ~containerName acc stmt + | ( _, + ExportDefaultDeclaration + { ExportDefaultDeclaration.declaration = ExportDefaultDeclaration.Declaration stmt; _ } + ) -> + ast_statement ~uri ~containerName acc stmt + | (loc, TypeAlias { TypeAlias.id; right = (_, type_); _ }) -> + let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class in + ast_type ~uri ~containerName:(Some (name_of_id id)) ~acc ~type_ + | (loc, OpaqueType { OpaqueType.id; _ }) -> + ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class + | (_, VariableDeclaration { VariableDeclaration.declarations; kind }) -> + let kind = + match kind with + | VariableDeclaration.Var -> Lsp.SymbolInformation.Variable + | VariableDeclaration.Let -> Lsp.SymbolInformation.Variable + | VariableDeclaration.Const -> Lsp.SymbolInformation.Constant + in + let ast_pattern acc loc (_, pattern) = + Ast.Pattern.( + match pattern with + | Identifier { Identifier.name; _ } -> + ast_id ~uri ~containerName ~acc ~loc ~id:name ~kind + | _ -> acc) + in + let ast_declarator acc (loc, declarator) = + ast_pattern acc loc declarator.VariableDeclaration.Declarator.id + in + Core_list.fold declarations ~init:acc ~f:ast_declarator + | (loc, DeclareClass { DeclareClass.id; body = (_, object_); _ }) -> + let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class in + ast_type_object ~uri ~containerName:(Some (name_of_id id)) ~acc ~object_ + | (loc, DeclareFunction { DeclareFunction.id; _ }) -> + ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Function + | ( loc, + DeclareModule + { DeclareModule.id = DeclareModule.Identifier id; body = (_, { Block.body }); _ } ) -> + let acc = ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Module in + let containerName = Some (name_of_id id) in + Core_list.fold body ~init:acc ~f:(ast_statement ~uri ~containerName) + | (loc, DeclareVariable { DeclareVariable.id; _ }) -> + ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Variable + | (loc, DeclareOpaqueType { OpaqueType.id; _ }) -> + ast_id ~uri ~containerName ~acc ~loc ~id ~kind:Lsp.SymbolInformation.Class + | (_, DeclareExportDeclaration { DeclareExportDeclaration.declaration = Some declaration; _ }) + -> + ast_statement_declaration ~uri ~containerName ~acc ~declaration + | _ -> acc) end -let flow_ast_to_lsp_symbols - ~(uri: Lsp.documentUri) - (program: (Loc.t, Loc.t) Ast.program) - : Lsp.SymbolInformation.t list = +let flow_ast_to_lsp_symbols ~(uri : Lsp.documentUri) (program : (Loc.t, Loc.t) Ast.program) : + Lsp.SymbolInformation.t list = let (_loc, statements, _comments) = program in Core_list.fold statements ~init:[] ~f:(DocumentSymbols.ast_statement ~uri ~containerName:None) diff --git a/src/common/flow_version.ml b/src/common/flow_version.ml index c487b5f7670..fa00f452b03 100644 --- a/src/common/flow_version.ml +++ b/src/common/flow_version.ml @@ -1,8 +1,8 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -let version = "0.82.0" +let version = "0.108.0" diff --git a/src/common/lints/dune b/src/common/lints/dune new file mode 100644 index 00000000000..d49ea19f66c --- /dev/null +++ b/src/common/lints/dune @@ -0,0 +1,10 @@ +(library + (name flow_common_lints) + (wrapped false) + (libraries + collections ; hack + flow_common_span + flow_common_utils + flow_common_utils_loc_utils + imported_core ; hack + )) diff --git a/src/common/lints/exactCover.ml b/src/common/lints/exactCover.ml index 3bdab97ebee..7f94097926b 100644 --- a/src/common/lints/exactCover.ml +++ b/src/common/lints/exactCover.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -15,87 +15,91 @@ exception Uncovered of string type 'a builder = (Loc.t * 'a) list let new_builder = - let open Loc in - let full_range source = - let start = {line = 0; column = 0; offset = 0} in - let _end = {line = max_int / 2; column = max_int / 2; offset = max_int / 2} in - {source; start; _end} - in fun source value -> [full_range (Some source), value] + Loc.( + let full_range source = + let start = { line = 0; column = 0 } in + let _end = { line = max_int / 2; column = max_int / 2 } in + { source; start; _end } + in + (fun source value -> [(full_range (Some source), value)])) (* Gets all ranges that intersect with the provided range. *) let get_intersecting = let rec get_intersecting' query acc = function | [] -> acc (* We've exhausted the list. *) - | candidate::tail -> + | candidate :: tail -> let cand_range = fst candidate in if Loc.(pos_cmp query._end cand_range.start) <= 0 then (* We still haven't reached the first intersecting range. *) get_intersecting' query acc tail else if Loc.(pos_cmp cand_range._end query.start) > 0 then (* The current range is intersecting. *) - get_intersecting' query (candidate::acc) tail + get_intersecting' query (candidate :: acc) tail else (* We've passed the last intersecting range. *) acc - in fun range builder -> - get_intersecting' range [] builder + in + (fun range builder -> get_intersecting' range [] builder) (* Adds the provided entry to the builder. (Assumes that there is no range in * the builder that overlaps with the provided entry.) *) let add = let rec add' entry visited = function - | head::tail as builder -> + | head :: tail as builder -> if Loc.compare (fst entry) (fst head) > 0 then (* We found the place the entry is supposed to go *) - List.rev_append visited (entry::builder) + List.rev_append visited (entry :: builder) else (* We need to keep going *) - add' entry (head::visited) tail + add' entry (head :: visited) tail | [] -> List.rev_append visited [entry] in - fun entry builder -> - add' entry [] builder + (fun entry builder -> add' entry [] builder) (* Removes the provided entry from the builder. (Assumes that the provided entry * exists in the builder.) *) let remove = let rec remove' entry visited = function - | head::tail -> + | head :: tail -> (* '==' is acceptable because this is only called on ranges that have been * taken out of the builder itself. *) - if head == entry then List.rev_append visited tail - else remove' entry (head::visited) tail - | [] -> Utils_js.assert_false - "Remove is only called on entries that exist in the builder." - in fun entry builder -> remove' entry [] builder + if head == entry then + List.rev_append visited tail + else + remove' entry (head :: visited) tail + | [] -> Utils_js.assert_false "Remove is only called on entries that exist in the builder." + in + (fun entry builder -> remove' entry [] builder) (* Takes two overlapping ranges, old_range and new_range, and returns a tuple * (intersection, remaining), where intersection is the intersection of * old_range and new_range, and remaining is a list of 0-2 ranges that, when * unioned with intersection, form old_range. *) let get_overlap ~old_range ~new_range = - let open Loc in - let source = new_range.source in - let start, remaining = - if pos_cmp old_range.start new_range.start < 0 then - new_range.start, [{source; start = old_range.start; _end = new_range.start}] - else old_range.start, [] - in - let _end, remaining = - if pos_cmp old_range._end new_range._end > 0 then - new_range._end, {source; start = new_range._end; _end = old_range._end}::remaining - else old_range._end, remaining - in - {source; start; _end}, remaining + Loc.( + let source = new_range.source in + let (start, remaining) = + if pos_cmp old_range.start new_range.start < 0 then + (new_range.start, [{ source; start = old_range.start; _end = new_range.start }]) + else + (old_range.start, []) + in + let (_end, remaining) = + if pos_cmp old_range._end new_range._end > 0 then + (new_range._end, { source; start = new_range._end; _end = old_range._end } :: remaining) + else + (old_range._end, remaining) + in + ({ source; start; _end }, remaining)) (* Given an entry in a builder, a range to do a modification in the builder, a * modification function, and the builder, returns a builder with the range * covered by the entry updated according to the modification range and * modification function. *) let update_entry ((old_range, old_value) as original) new_range map_fun builder = - let overlap, remaining_ranges = get_overlap ~old_range ~new_range in + let (overlap, remaining_ranges) = get_overlap ~old_range ~new_range in let new_overlap = (overlap, map_fun old_value) in - let new_remaining = List.map (fun loc -> (loc, old_value)) remaining_ranges in + let new_remaining = Core_list.map ~f:(fun loc -> (loc, old_value)) remaining_ranges in let builder = builder |> remove original |> add new_overlap in List.fold_left (Fn.flip add) builder new_remaining @@ -106,53 +110,51 @@ let update_range range map_fun builder = let original_intersecting = get_intersecting range builder in List.fold_left (fun builder original -> update_entry original range map_fun builder) - builder original_intersecting + builder + original_intersecting let update_settings = let map_fun setting_list old_settings = let setting_list = - List.map (fun (kind, (state, loc)) -> (kind, (state, Some loc))) setting_list + Core_list.map ~f:(fun (kind, (state, loc)) -> (kind, (state, Some loc))) setting_list in LintSettings.set_all setting_list old_settings in - fun range setting_list builder -> - update_range range (map_fun setting_list) builder + (fun range setting_list builder -> update_range range (map_fun setting_list) builder) let update_settings_and_running = let update_settings_and_error err_fun settings settings_list = - match settings_list with - | (_, (_, loc))::_ -> - let (new_settings, all_redundant) = List.fold_left - (fun (settings, all_redundant) (kind, (state, loc)) -> - let this_redundant = LintSettings.get_value kind settings = state in - (* Still do set_state to update the location, otherwise it's - * reported that the results of the argument get overwritten. *) - let new_settings = LintSettings.set_value kind (state, Some loc) settings in - (new_settings, all_redundant && this_redundant)) - (settings, true) settings_list - in - if all_redundant then - err_fun (loc, LintSettings.Redundant_argument); - new_settings - | [] -> settings + match settings_list with + | (_, (_, loc)) :: _ -> + let (new_settings, all_redundant) = + List.fold_left + (fun (settings, all_redundant) (kind, (state, loc)) -> + let this_redundant = LintSettings.get_value kind settings = state in + (* Still do set_state to update the location, otherwise it's + * reported that the results of the argument get overwritten. *) + let new_settings = LintSettings.set_value kind (state, Some loc) settings in + (new_settings, all_redundant && this_redundant)) + (settings, true) + settings_list + in + if all_redundant then err_fun (loc, LintSettings.Redundant_argument); + new_settings + | [] -> settings in - let update_settings_and_error_from_list err_fun settings_list_list settings = - List.fold_left (update_settings_and_error err_fun) - settings settings_list_list + List.fold_left (update_settings_and_error err_fun) settings settings_list_list in - fun running_settings err_fun range settings_list_list builder -> let flat_settings_list = List.flatten settings_list_list in let updated_builder = update_settings range flat_settings_list builder in let updated_running_settings = - update_settings_and_error_from_list err_fun settings_list_list running_settings in + update_settings_and_error_from_list err_fun settings_list_list running_settings + in (updated_builder, updated_running_settings) let bake builder = List.fold_left (fun map (loc, value) -> SpanMap.add loc value map) SpanMap.empty builder - (* Supports O(log(n)) queries to get the value associated with a loc. *) type 'a t = 'a SpanMap.t @@ -164,7 +166,11 @@ let file_cover source value = new_builder source value |> bake let find loc cover = let first_char = Loc.first_char loc in try SpanMap.find_unsafe first_char cover - with Not_found -> raise (Uncovered (Loc.to_string ~include_source:true loc)) + with Not_found -> raise (Uncovered (Loc.debug_to_string ~include_source:true loc)) + +let find_opt loc cover = + let first_char = Loc.first_char loc in + SpanMap.get first_char cover (* `severity LintSettings.t`-specific functions *) @@ -173,21 +179,24 @@ type lint_severity_cover = Severity.severity LintSettings.t t let default_file_cover source = file_cover source LintSettings.empty_severities let get_severity lint_kind loc severity_cover = -find loc severity_cover |> LintSettings.get_value lint_kind + find loc severity_cover |> LintSettings.get_value lint_kind let is_suppressed lint_kind loc severity_cover = -find loc severity_cover |> LintSettings.is_suppressed lint_kind + find loc severity_cover |> LintSettings.is_suppressed lint_kind let is_explicit lint_kind loc severity_cover = -find loc severity_cover |> LintSettings.is_explicit lint_kind + find loc severity_cover |> LintSettings.is_explicit lint_kind let to_string settings = - let loc_to_str = Loc.to_string ~include_source:true in + let loc_to_str = Loc.debug_to_string ~include_source:true in let acc = Buffer.create 100 in - let () = SpanMap.iter (fun loc settings -> - Buffer.add_string acc (Printf.sprintf "%s: %s\n" - (loc_to_str loc) (LintSettings.to_string settings))) - settings + let () = + SpanMap.iter + (fun loc settings -> + Buffer.add_string + acc + (Printf.sprintf "%s: %s\n" (loc_to_str loc) (LintSettings.to_string settings))) + settings in (* Strip the trailing newline. *) Buffer.sub acc 0 (Buffer.length acc - 1) diff --git a/src/common/lints/exactCover.mli b/src/common/lints/exactCover.mli index 46eff4ff743..f33e10b2a8e 100644 --- a/src/common/lints/exactCover.mli +++ b/src/common/lints/exactCover.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -21,17 +21,20 @@ open Lints open Severity +exception Uncovered of string + (* Supports O(log(n)) queries to get the value associated with a loc. *) type 'a t (* Given a filename and a value, generate a cover associating that value with that entire file. *) -val file_cover: File_key.t -> 'a -> 'a t +val file_cover : File_key.t -> 'a -> 'a t + (* Gets the value associated with a certain location in the code. To resolve * ambiguity, this looks at the location of the first character in the provided * location. Errors if queried for a file not contained in this cover. *) -val find: Loc.t -> 'a t -> 'a -(* Combines two disjoint covers. Behavior is undefined if the provided covers - * aren't disjoint. *) +val find : Loc.t -> 'a t -> 'a + +val find_opt : Loc.t -> 'a t -> 'a option (* Supports O(j*(j+k)) operations to modify a range of a cover being constructed, * where j is the number of ranges in the builder intersecting the range being @@ -46,9 +49,11 @@ val find: Loc.t -> 'a t -> 'a * file. If a cover for multiple files is needed, construct and bake a cover for * each file and use the union functions to combine them. *) type 'a builder + (* Create a new builder for the provided file. The resultant builder is an exact * cover with a single range associating the whole file with the provided value. *) -val new_builder: File_key.t -> 'a -> 'a builder +val new_builder : File_key.t -> 'a -> 'a builder + (* Change the value in the provided range by applying the provided mapping * function. Ranges in the builder that are completely contained in the provided * range have their values replaced using the mapping function. Ranges in the @@ -59,17 +64,22 @@ val new_builder: File_key.t -> 'a -> 'a builder * completely contained within a range in the builder, the range in the builder * is split into three ranges: one that matches the provided range and two that * have no overlap wth the provided range. *) -val update_range: - Loc.t -> ('a -> 'a) -> 'a builder -> 'a builder +val update_range : Loc.t -> ('a -> 'a) -> 'a builder -> 'a builder (* Change the settings in the provided range by adding the provided settings list. * In the settings list, the kind is the type of lint, the value is the value to set to, * and the location is the position of the setting in the source code. *) -val update_settings: - Loc.t -> (* Range to operate on *) - (lint_kind * ('a * Loc.t)) list -> (* List of settings to add *) - 'a LintSettings.t builder -> (* Builder to work from *) - 'a LintSettings.t builder (* Resultant builder *) +val update_settings : + Loc.t -> + (* Range to operate on *) + (lint_kind * ('a * Loc.t)) list -> + (* List of settings to add *) + 'a LintSettings.t builder -> + (* Builder to work from *) + 'a LintSettings.t builder + +(* Resultant builder *) + (* Works similarly to update_settings, but takes two additional parameters: a running * LintSettings object and an error handling function. The LintSettings object is updated with * the new lint settings (in addition to the builder being updated), and if any redundant @@ -79,17 +89,22 @@ val update_settings: (* This function only checks for settings that are redundant because they don't change * anything. It doesn't check for settings that are redundant because they are * immediately overwritten. (That's done elsewhere.) *) -val update_settings_and_running: - 'a LintSettings.t -> (* Running lint settings *) - (LintSettings.lint_parse_error -> unit) -> (* Parse error handler *) - Loc.t -> (* Range to operate on *) - (lint_kind * ('a * Loc.t)) list list -> (* Unflattened list of settings to add *) - 'a LintSettings.t builder -> (* Builder to work from *) - 'a LintSettings.t builder (* Resultant builder *) - * 'a LintSettings.t (* Resultant running lint settings *) +val update_settings_and_running : + 'a LintSettings.t -> + ((* Running lint settings *) + LintSettings.lint_parse_error -> unit) -> + (* Parse error handler *) + Loc.t -> + (* Range to operate on *) + (lint_kind * ('a * Loc.t)) list list -> + (* Unflattened list of settings to add *) + 'a LintSettings.t builder -> + (* Builder to work from *) + 'a LintSettings.t builder (* Resultant builder *) * 'a LintSettings.t -val bake: 'a builder -> 'a t +(* Resultant running lint settings *) +val bake : 'a builder -> 'a t (* `severity LintSettings.t`-specific functions *) @@ -97,16 +112,20 @@ type lint_severity_cover = severity LintSettings.t t (* Given a filename, generate a cover that applies the default lint severities * across the entire file. *) -val default_file_cover: File_key.t -> lint_severity_cover +val default_file_cover : File_key.t -> lint_severity_cover + (* Gets the severity of the provided lint kind at the provided location. Errors * if queried for a file not contained in this cover. *) -val get_severity: lint_kind -> Loc.t -> lint_severity_cover -> severity +val get_severity : lint_kind -> Loc.t -> lint_severity_cover -> severity + (* True iff the provided lint kind has severity `Off` at the provided location. * Errors if queried for a file not contained in this cover. *) -val is_suppressed: lint_kind -> Loc.t -> lint_severity_cover -> bool +val is_suppressed : lint_kind -> Loc.t -> lint_severity_cover -> bool + (* True iff the severity for the provided lint kind has been explicitly set at * the provided location. Errors if queried for a file not contained in this * cover. *) -val is_explicit: lint_kind -> Loc.t -> lint_severity_cover -> bool +val is_explicit : lint_kind -> Loc.t -> lint_severity_cover -> bool + (* Intended for debugging purposes. *) val to_string : lint_severity_cover -> string diff --git a/src/common/lints/lintSettings.ml b/src/common/lints/lintSettings.ml index f0a8256b350..cdd5c195d4f 100644 --- a/src/common/lints/lintSettings.ml +++ b/src/common/lints/lintSettings.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,8 +7,9 @@ open Lints open Severity +open Utils_js -let (>>=) = Core_result.bind +let ( >>= ) = Core_result.bind type 'a t = { (* The default value associated with a lint if the lint kind isn't found in the map *) @@ -19,14 +20,27 @@ type 'a t = { explicit_values: ('a * Loc.t option) LintMap.t; } -let of_default default_value = { - default_value; - explicit_values = LintMap.empty -} +let default_lint_severities = [(Lints.DeprecatedUtility, (Severity.Err, None))] + +let ignored_by_all = + [ + Lints.DynamicExport; + Lints.DeprecatedUtility; + Lints.ImplicitInexactObject; + Lints.UninitializedInstanceProperty; + ] + +let config_default = + Core_list.Assoc.find default_lint_severities ~equal:( = ) + %> Option.value ~default:(Severity.Off, None) + +let of_default default_value = + let explicit_values = LintMap.of_function ignored_by_all config_default in + { default_value; explicit_values } let set_value key value settings = - let new_map = LintMap.add key value settings.explicit_values - in {settings with explicit_values = new_map} + let new_map = LintMap.add key value settings.explicit_values in + { settings with explicit_values = new_map } let set_all entries settings = List.fold_left (fun settings (key, value) -> set_value key value settings) settings entries @@ -38,165 +52,176 @@ let get_value lint_kind settings = |> Option.value_map ~f:fst ~default:settings.default_value let get_loc lint_kind settings = - LintMap.get lint_kind settings.explicit_values - |> Option.value_map ~f:snd ~default:None + LintMap.get lint_kind settings.explicit_values |> Option.value_map ~f:snd ~default:None -let is_explicit lint_kind settings = - LintMap.mem lint_kind settings.explicit_values +let is_explicit lint_kind settings = LintMap.mem lint_kind settings.explicit_values (* Iterate over all lint kinds with an explicit value *) -let iter f settings = - LintMap.iter f settings.explicit_values +let iter f settings = LintMap.iter f settings.explicit_values (* Fold over all lint kinds with an explicit value *) -let fold f settings acc = - LintMap.fold f settings.explicit_values acc +let fold f settings acc = LintMap.fold f settings.explicit_values acc (* Map over all lint kinds with an explicit value *) let map f settings = let new_explicit = LintMap.map f settings.explicit_values in - {settings with explicit_values = new_explicit} + { settings with explicit_values = new_explicit } (* SEVERITY-SPECIFIC FUNCTIONS *) -let empty_severities = { - default_value = Off; - explicit_values = LintMap.empty; -} +let empty_severities = { default_value = Off; explicit_values = LintMap.empty } let is_enabled lint_kind settings = match get_value lint_kind settings with - | Err | Warn -> true + | Err + | Warn -> + true | Off -> false -let is_suppressed lint_kind settings = - is_enabled lint_kind settings |> not +let is_suppressed lint_kind settings = is_enabled lint_kind settings |> not type parse_result = -| AllSetting of severity t -| EntryList of lint_kind list * (severity * Loc.t option) + | AllSetting of severity t + | EntryList of lint_kind list * (severity * Loc.t option) (* Takes a base LintSettings and a list of labeled lines and returns the corresponding * severity LintSettings.t from applying the new lines on top of the base settings if * successful. Otherwise, returns an error message along with the label of the * line it failed on. *) let of_lines base_settings = - let parse_value label value = match severity_of_string value with - | Some severity -> Ok severity - | None -> Error (label, - "Invalid setting encountered. Valid settings are error, warn, and off.") + | Some severity -> Ok severity + | None -> Error (label, "Invalid setting encountered. Valid settings are error, warn, and off.") in - let eq_regex = Str.regexp "=" in let all_regex = Str.regexp "all" in - let parse_line (loc, (label, line)) = match Str.split_delim eq_regex line with | [left; right] -> let left = left |> String.trim in let right = right |> String.trim in - parse_value label right >>= fun value -> - begin match left with + parse_value label right + >>= fun value -> + begin + match left with | "all" -> Ok (AllSetting (of_default value)) | _ -> - begin match kinds_of_string left with + begin + match kinds_of_string left with | Some kinds -> Ok (EntryList (kinds, (value, Some loc))) - | None -> Error (label, (Printf.sprintf "Invalid lint rule \"%s\" encountered." left)) + | None -> Error (label, Printf.sprintf "Invalid lint rule \"%s\" encountered." left) end - end - | _ -> Error (label, - "Malformed lint rule. Properly formed rules contain a single '=' character.") + end + | _ -> + Error (label, "Malformed lint rule. Properly formed rules contain a single '=' character.") in - let add_value keys value settings = - let (new_settings, all_redundant) = List.fold_left (fun (settings, all_redundant) key -> - let all_redundant = all_redundant && get_value key settings = fst value in - let settings = set_value key value settings in - (settings, all_redundant)) - (settings, true) keys + let (new_settings, all_redundant) = + List.fold_left + (fun (settings, all_redundant) key -> + let v = get_value key settings in + let all_redundant = all_redundant && v = fst value && v <> fst (config_default key) in + let settings = set_value key value settings in + (settings, all_redundant)) + (settings, true) + keys in - if all_redundant - then Error "Redundant argument. This argument doesn't change any lint settings." - else Ok new_settings + if all_redundant then + Error "Redundant argument. This argument doesn't change any lint settings." + else + Ok new_settings in - let rec loop acc = function | [] -> Ok acc - | line::lines -> - parse_line line >>= fun result -> - begin match result with + | line :: lines -> + parse_line line + >>= fun result -> + begin + match result with | EntryList (keys, value) -> - begin match add_value keys value acc with + begin + match add_value keys value acc with | Ok settings -> loop settings lines | Error msg -> Error (line |> snd |> fst, msg) end | AllSetting value -> - if acc == base_settings then loop value lines - else Error (line |> snd |> fst, - "\"all\" is only allowed as the first setting. Settings are order-sensitive.") - end + if acc == base_settings then + loop value lines + else + Error + ( line |> snd |> fst, + "\"all\" is only allowed as the first setting. Settings are order-sensitive." ) + end in - let loc_of_line line = - let open Loc in - let start = {line; column = 0; offset = 0} in - let _end = {line = line + 1; column = 0; offset = 0} in - {source = None; start; _end} + Loc.( + let start = { line; column = 0 } in + let _end = { line = line + 1; column = 0 } in + { source = None; start; _end }) in - fun lint_lines -> let locate_fun = let index = ref 0 in fun item -> let res = (loc_of_line !index, item) in - index := !index + 1; res + index := !index + 1; + res in - (* Artificially locate the lines to detect unused lines *) - let located_lines = List.map locate_fun lint_lines in + let located_lines = Core_list.map ~f:locate_fun lint_lines in let settings = loop base_settings located_lines in - - settings >>= (fun settings -> - let used_locs = fold - (fun _kind (_enabled, loc) acc -> - Option.value_map loc ~f:(fun loc -> Utils_js.LocSet.add loc acc) ~default:acc) - settings Utils_js.LocSet.empty - in - let first_unused = List.fold_left - (fun acc (art_loc, (label, line)) -> - match acc with - | Some _ -> acc - | None -> - if Utils_js.LocSet.mem art_loc used_locs - || Str.string_match all_regex (String.trim line) 0 - then None else Some label - ) None located_lines - in - match first_unused with - | Some label -> Error (label, "Redundant argument. " - ^ "The values set by this argument are completely overwritten.") + settings + >>= fun settings -> + let used_locs = + fold + (fun _kind (_enabled, loc) acc -> + Option.value_map loc ~f:(fun loc -> Loc_collections.LocSet.add loc acc) ~default:acc) + settings + Loc_collections.LocSet.empty + in + let first_unused = + List.fold_left + (fun acc (art_loc, (label, line)) -> + match acc with + | Some _ -> acc | None -> - (* Remove the artificial locations before returning the result *) - Ok (map (fun (enabled, _loc) -> (enabled, None)) settings) - ) + if + Loc_collections.LocSet.mem art_loc used_locs + || Str.string_match all_regex (String.trim line) 0 + then + None + else + Some label) + None + located_lines + in + match first_unused with + | Some label -> + Error + ( label, + "Redundant argument. " ^ "The values set by this argument are completely overwritten." ) + | None -> + (* Remove the artificial locations before returning the result *) + Ok (map (fun (enabled, _loc) -> (enabled, None)) settings) let to_string settings = let acc = Buffer.create 20 in Buffer.add_string acc (Printf.sprintf "all=%s" (settings |> get_default |> string_of_severity)); - iter (fun kind (severity, _) -> - Buffer.add_string acc (Printf.sprintf ", %s=%s" - (string_of_kind kind) (string_of_severity severity))) + iter + (fun kind (severity, _) -> + Buffer.add_string + acc + (Printf.sprintf ", %s=%s" (string_of_kind kind) (string_of_severity severity))) settings; Buffer.contents acc type lint_parse_error_kind = -| Invalid_setting -| Malformed_argument -| Naked_comment -| Nonexistent_rule -| Overwritten_argument -| Redundant_argument + | Invalid_setting + | Malformed_argument + | Naked_comment + | Nonexistent_rule + | Overwritten_argument + | Redundant_argument type lint_parse_error = Loc.t * lint_parse_error_kind diff --git a/src/common/lints/lintSettings.mli b/src/common/lints/lintSettings.mli index 2259562ba2e..0258f94752c 100644 --- a/src/common/lints/lintSettings.mli +++ b/src/common/lints/lintSettings.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,45 +10,56 @@ open Severity type 'a t -val of_default: 'a -> 'a t +val of_default : severity -> severity t -val set_value: lint_kind -> ('a * Loc.t option) -> 'a t -> 'a t +val set_value : lint_kind -> 'a * Loc.t option -> 'a t -> 'a t -val set_all: (lint_kind * ('a * Loc.t option)) list -> 'a t -> 'a t +val set_all : (lint_kind * ('a * Loc.t option)) list -> 'a t -> 'a t + +val get_default : 'a t -> 'a -val get_default: 'a t -> 'a (* Get the state of a lint kind in the provided settings *) -val get_value: lint_kind -> 'a t -> 'a +val get_value : lint_kind -> 'a t -> 'a + (* True iff the severity for the provided lint has been explicitly set *) -val is_explicit: lint_kind -> 'a t -> bool +val is_explicit : lint_kind -> 'a t -> bool + (* Get the location of the comment that set the value for a lint kind, or none if * the active value was not set by a comment *) -val get_loc: lint_kind -> 'a t -> Loc.t option +val get_loc : lint_kind -> 'a t -> Loc.t option + (* Iterate over all lint kinds with an explicit value *) -val iter: (lint_kind -> 'a * Loc.t option -> unit) -> 'a t -> unit +val iter : (lint_kind -> 'a * Loc.t option -> unit) -> 'a t -> unit + (* Fold over all lint kinds with an explicit value *) -val fold: (lint_kind -> 'a * Loc.t option -> 'b -> 'b) -> 'a t -> 'b -> 'b +val fold : (lint_kind -> 'a * Loc.t option -> 'b -> 'b) -> 'a t -> 'b -> 'b + (* Map over all lint kinds with an explicit value *) -val map: ('a * Loc.t option -> 'a * Loc.t option) -> 'a t -> 'a t +val map : ('a * Loc.t option -> 'a * Loc.t option) -> 'a t -> 'a t + +val default_lint_severities : (lint_kind * (severity * 'a option)) list (* SEVERITY-SPECIFIC FUNCTIONS *) -val empty_severities: severity t +val empty_severities : severity t + (* True iff get_state returns Warn or Err, false otherwise *) -val is_enabled: lint_kind -> severity t -> bool +val is_enabled : lint_kind -> severity t -> bool + (* Always the logical opposite of is_enabled *) -val is_suppressed: lint_kind -> severity t -> bool +val is_suppressed : lint_kind -> severity t -> bool + +val of_lines : severity t -> (int * string) list -> (severity t, int * string) result -val of_lines: severity t -> (int * string) list -> (severity t, int * string) result (* Intended for debugging purposes. *) -val to_string: severity t -> string +val to_string : severity t -> string type lint_parse_error_kind = -| Invalid_setting -| Malformed_argument -| Naked_comment -| Nonexistent_rule -| Overwritten_argument -| Redundant_argument + | Invalid_setting + | Malformed_argument + | Naked_comment + | Nonexistent_rule + | Overwritten_argument + | Redundant_argument type lint_parse_error = Loc.t * lint_parse_error_kind diff --git a/src/common/lints/lints.ml b/src/common/lints/lints.ml index f5ef8b1a575..1eee3017313 100644 --- a/src/common/lints/lints.ml +++ b/src/common/lints/lints.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,8 +11,14 @@ type sketchy_null_kind = | SketchyNullNumber | SketchyNullMixed -type sketchy_number_kind = - | SketchyNumberAnd +type sketchy_number_kind = SketchyNumberAnd + +type property_assignment_kind = + | PropertyNotDefinitelyInitialized + | ReadFromUninitializedProperty + | MethodCallBeforeEverythingInitialized + | ThisBeforeEverythingInitialized + | PropertyFunctionCallBeforeEverythingInitialized type lint_kind = | SketchyNull of sketchy_null_kind @@ -22,11 +28,16 @@ type lint_kind = | NonstrictImport | UnclearType | DeprecatedType + | DeprecatedUtility + | DynamicExport | UnsafeGettersSetters | InexactSpread | UnnecessaryOptionalChain | UnnecessaryInvariant - | DeprecatedCallSyntax + | SignatureVerificationFailure + | ImplicitInexactObject + | UninitializedInstanceProperty + | NonArraySpread let string_of_sketchy_null_kind = function | SketchyNullBool -> "sketchy-null-bool" @@ -45,43 +56,54 @@ let string_of_kind = function | NonstrictImport -> "nonstrict-import" | UnclearType -> "unclear-type" | DeprecatedType -> "deprecated-type" + | DeprecatedUtility -> "deprecated-utility" + | DynamicExport -> "dynamic-export" | UnsafeGettersSetters -> "unsafe-getters-setters" | InexactSpread -> "inexact-spread" | UnnecessaryOptionalChain -> "unnecessary-optional-chain" | UnnecessaryInvariant -> "unnecessary-invariant" - | DeprecatedCallSyntax -> "deprecated-call-syntax" + | SignatureVerificationFailure -> "signature-verification-failure" + | ImplicitInexactObject -> "implicit-inexact-object" + | UninitializedInstanceProperty -> "uninitialized-instance-property" + | NonArraySpread -> "non-array-spread" let kinds_of_string = function - | "sketchy-null" -> Some [ - SketchyNull SketchyNullBool; - SketchyNull SketchyNullString; - SketchyNull SketchyNullNumber; - SketchyNull SketchyNullMixed; - ] + | "sketchy-null" -> + Some + [ + SketchyNull SketchyNullBool; + SketchyNull SketchyNullString; + SketchyNull SketchyNullNumber; + SketchyNull SketchyNullMixed; + ] | "sketchy-null-bool" -> Some [SketchyNull SketchyNullBool] | "sketchy-null-string" -> Some [SketchyNull SketchyNullString] | "sketchy-null-number" -> Some [SketchyNull SketchyNullNumber] | "sketchy-null-mixed" -> Some [SketchyNull SketchyNullMixed] - | "sketchy-number" -> Some [ - SketchyNumber SketchyNumberAnd; - ] + | "sketchy-number" -> Some [SketchyNumber SketchyNumberAnd] | "sketchy-number-and" -> Some [SketchyNumber SketchyNumberAnd] | "untyped-type-import" -> Some [UntypedTypeImport] | "nonstrict-import" -> Some [NonstrictImport] | "untyped-import" -> Some [UntypedImport] | "unclear-type" -> Some [UnclearType] | "deprecated-type" -> Some [DeprecatedType] + | "deprecated-utility" -> Some [DeprecatedUtility] + | "dynamic-export" -> Some [DynamicExport] | "unsafe-getters-setters" -> Some [UnsafeGettersSetters] | "inexact-spread" -> Some [InexactSpread] | "unnecessary-optional-chain" -> Some [UnnecessaryOptionalChain] | "unnecessary-invariant" -> Some [UnnecessaryInvariant] - | "deprecated-call-syntax" -> Some [DeprecatedCallSyntax] + | "signature-verification-failure" -> Some [SignatureVerificationFailure] + | "implicit-inexact-object" -> Some [ImplicitInexactObject] + | "uninitialized-instance-property" -> Some [UninitializedInstanceProperty] + | "non-array-spread" -> Some [NonArraySpread] | _ -> None module LintKind = struct type t = lint_kind + let compare = compare end -module LintMap = MyMap.Make(LintKind) -module LintSet = Set.Make(LintKind) +module LintMap = MyMap.Make (LintKind) +module LintSet = Set.Make (LintKind) diff --git a/src/common/lints/lints.mli b/src/common/lints/lints.mli index 94681cc062c..3653cb40c6a 100644 --- a/src/common/lints/lints.mli +++ b/src/common/lints/lints.mli @@ -1,36 +1,48 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type sketchy_null_kind = - | SketchyNullBool - | SketchyNullString - | SketchyNullNumber - | SketchyNullMixed + | SketchyNullBool + | SketchyNullString + | SketchyNullNumber + | SketchyNullMixed -type sketchy_number_kind = - | SketchyNumberAnd +type sketchy_number_kind = SketchyNumberAnd + +type property_assignment_kind = + | PropertyNotDefinitelyInitialized + | ReadFromUninitializedProperty + | MethodCallBeforeEverythingInitialized + | ThisBeforeEverythingInitialized + | PropertyFunctionCallBeforeEverythingInitialized type lint_kind = - | SketchyNull of sketchy_null_kind - | SketchyNumber of sketchy_number_kind - | UntypedTypeImport - | UntypedImport - | NonstrictImport - | UnclearType - | DeprecatedType - | UnsafeGettersSetters - | InexactSpread - | UnnecessaryOptionalChain - | UnnecessaryInvariant - | DeprecatedCallSyntax - -val string_of_kind: lint_kind -> string - -val kinds_of_string: string -> lint_kind list option - -module LintMap: MyMap.S with type key = lint_kind -module LintSet: Set.S with type elt = lint_kind + | SketchyNull of sketchy_null_kind + | SketchyNumber of sketchy_number_kind + | UntypedTypeImport + | UntypedImport + | NonstrictImport + | UnclearType + | DeprecatedType + | DeprecatedUtility + | DynamicExport + | UnsafeGettersSetters + | InexactSpread + | UnnecessaryOptionalChain + | UnnecessaryInvariant + | SignatureVerificationFailure + | ImplicitInexactObject + | UninitializedInstanceProperty + | NonArraySpread + +val string_of_kind : lint_kind -> string + +val kinds_of_string : string -> lint_kind list option + +module LintMap : MyMap.S with type key = lint_kind + +module LintSet : Set.S with type elt = lint_kind diff --git a/src/common/lints/severity.ml b/src/common/lints/severity.ml index afdd4214e58..87765cab205 100644 --- a/src/common/lints/severity.ml +++ b/src/common/lints/severity.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -31,8 +31,17 @@ let severity_cmp = | Off -> 0 | Warn -> 1 | Err -> 2 - in fun a b -> compare (int_of_severity a) (int_of_severity b) + in + (fun a b -> compare (int_of_severity a) (int_of_severity b)) -let severity_min a b = if severity_cmp a b < 0 then a else b +let severity_min a b = + if severity_cmp a b < 0 then + a + else + b -let severity_max a b = if severity_cmp a b > 0 then a else b +let severity_max a b = + if severity_cmp a b > 0 then + a + else + b diff --git a/src/common/lints/severity.mli b/src/common/lints/severity.mli index 84805b8f40d..03e0c5f9c94 100644 --- a/src/common/lints/severity.mli +++ b/src/common/lints/severity.mli @@ -1,20 +1,23 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type severity = - | Off - | Warn - | Err + | Off + | Warn + | Err -val string_of_severity: severity -> string -val output_string_of_severity: severity -> string +val string_of_severity : severity -> string -val severity_of_string: string -> severity option +val output_string_of_severity : severity -> string -val severity_cmp: severity -> severity -> int -val severity_min: severity -> severity -> severity -val severity_max: severity -> severity -> severity +val severity_of_string : string -> severity option + +val severity_cmp : severity -> severity -> int + +val severity_min : severity -> severity -> severity + +val severity_max : severity -> severity -> severity diff --git a/src/common/lints/strictModeSettings.ml b/src/common/lints/strictModeSettings.ml index 6219e70b793..3485a937cd1 100644 --- a/src/common/lints/strictModeSettings.ml +++ b/src/common/lints/strictModeSettings.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,21 +10,22 @@ open Lints type t = LintSet.t let empty = LintSet.empty + let fold = LintSet.fold + let iter = LintSet.iter let of_lines = let parse_line (label, line) = let line = line |> String.trim in match kinds_of_string line with - | Some kinds -> Ok kinds - | None -> Error (label, Printf.sprintf "Invalid strict mode lint \"%s\" encountered." line) + | Some kinds -> Ok kinds + | None -> Error (label, Printf.sprintf "Invalid strict mode lint \"%s\" encountered." line) in let rec loop acc = function | [] -> Ok acc - | labeled_line::labeled_lines -> - Core_result.bind (parse_line labeled_line) - (fun kinds -> + | labeled_line :: labeled_lines -> + Core_result.bind (parse_line labeled_line) (fun kinds -> let acc = List.fold_left (fun acc kind -> LintSet.add kind acc) acc kinds in loop acc labeled_lines) in diff --git a/src/common/lints/strictModeSettings.mli b/src/common/lints/strictModeSettings.mli index 9d1985a25f5..fd800e07294 100644 --- a/src/common/lints/strictModeSettings.mli +++ b/src/common/lints/strictModeSettings.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,7 +9,10 @@ open Lints type t -val empty: t -val fold: (lint_kind -> 'acc -> 'acc) -> t -> 'acc -> 'acc -val iter: (lint_kind -> unit) -> t -> unit -val of_lines: (int * string) list -> (t, int * string) result +val empty : t + +val fold : (lint_kind -> 'acc -> 'acc) -> t -> 'acc -> 'acc + +val iter : (lint_kind -> unit) -> t -> unit + +val of_lines : (int * string) list -> (t, int * string) result diff --git a/src/common/loggingUtils.ml b/src/common/loggingUtils.ml deleted file mode 100644 index 886beb551ff..00000000000 --- a/src/common/loggingUtils.ml +++ /dev/null @@ -1,43 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -let hh_logger_level_of_env env = - match Sys_utils.get_env env with - | Some "off" -> Some Hh_logger.Level.Off - | Some "fatal" -> Some Hh_logger.Level.Fatal - | Some "error" -> Some Hh_logger.Level.Error - | Some "warn" -> Some Hh_logger.Level.Warn - | Some "info" -> Some Hh_logger.Level.Info - | Some "debug" -> Some Hh_logger.Level.Debug - | Some _ (* ignore invalid values *) - | None -> None - -(* TODO: min_level should probably default to warn, but was historically info *) -let set_hh_logger_min_level ?(min_level=Hh_logger.Level.Info) options = - Hh_logger.Level.set_min_level ( - if Options.is_quiet options then - Hh_logger.Level.Off - else if Options.verbose options != None || Options.is_debug_mode options then - Hh_logger.Level.Debug - else match hh_logger_level_of_env "FLOW_LOG_LEVEL" with - | Some level -> level - | None -> min_level - ) - -let init_loggers ~from ~options ?min_level () = - FlowEventLogger.set_from from; - set_hh_logger_min_level ?min_level options - -let set_server_options ~server_options = - let lazy_mode = Option.value_map - (Options.lazy_mode server_options) ~default:"off" ~f:Options.lazy_mode_to_string - in - let cancelable_rechecks = - if Options.enable_cancelable_rechecks server_options then "on" else "off" - in - - FlowEventLogger.set_server_options ~lazy_mode ~cancelable_rechecks diff --git a/src/common/logging_utils/dune b/src/common/logging_utils/dune new file mode 100644 index 00000000000..1715f56b4be --- /dev/null +++ b/src/common/logging_utils/dune @@ -0,0 +1,13 @@ +(library + (name flow_logging_utils) + (wrapped false) + (libraries + flow_common + flow_logging_stubs + collections ; hack + imported_core ; hack + logging_common_stubs ; hack + sys_utils ; hack + utils_core ; hack + ) +) diff --git a/src/common/logging_utils/loggingUtils.ml b/src/common/logging_utils/loggingUtils.ml new file mode 100644 index 00000000000..4db55038eb8 --- /dev/null +++ b/src/common/logging_utils/loggingUtils.ml @@ -0,0 +1,86 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +let hh_logger_level_of_env env = + match Sys_utils.get_env env with + | Some "off" -> Some Hh_logger.Level.Off + | Some "fatal" -> Some Hh_logger.Level.Fatal + | Some "error" -> Some Hh_logger.Level.Error + | Some "warn" -> Some Hh_logger.Level.Warn + | Some "info" -> Some Hh_logger.Level.Info + | Some "debug" -> Some Hh_logger.Level.Debug + | Some _ + (* ignore invalid values *) + + | None -> + None + +(* TODO: min_level should probably default to warn, but was historically info *) +let set_hh_logger_min_level ?(min_level = Hh_logger.Level.Info) options = + Hh_logger.Level.set_min_level + ( if Options.is_quiet options then + Hh_logger.Level.Off + else if Options.verbose options != None || Options.is_debug_mode options then + Hh_logger.Level.Debug + else + match hh_logger_level_of_env "FLOW_LOG_LEVEL" with + | Some level -> level + | None -> min_level ) + +let init_loggers ~options ?min_level () = set_hh_logger_min_level ?min_level options + +let (set_server_options, dump_server_options) = + let format server_options = + let lazy_mode = + match Options.lazy_mode server_options with + | Options.LAZY_MODE_FILESYSTEM -> "fs" + | Options.LAZY_MODE_IDE -> "ide" + | Options.LAZY_MODE_WATCHMAN -> "watchman" + | Options.NON_LAZY_MODE -> "off" + in + let arch = + match Options.arch server_options with + | Options.Classic -> "classic" + | Options.TypesFirst -> "types_first" + in + let abstract_locations = + if Options.abstract_locations server_options then + "on" + else + "off" + in + let max_workers = Options.max_workers server_options in + let enabled_rollouts = Options.enabled_rollouts server_options in + (lazy_mode, arch, abstract_locations, max_workers, enabled_rollouts) + in + let set_server_options ~server_options = + let (lazy_mode, arch, abstract_locations, max_workers, enabled_rollouts) = + format server_options + in + FlowEventLogger.set_server_options + ~lazy_mode + ~arch + ~abstract_locations + ~max_workers + ~enabled_rollouts + in + let dump_server_options ~server_options ~log = + let (lazy_mode, arch, abstract_locations, max_workers, enabled_rollouts) = + format server_options + in + log (Printf.sprintf "lazy_mode=%s" lazy_mode); + log (Printf.sprintf "arch=%s" arch); + log (Printf.sprintf "abstract_locations=%s" abstract_locations); + log (Printf.sprintf "max_workers=%d" max_workers); + SMap.iter (fun r g -> log (Printf.sprintf "Rollout %S set to %S" r g)) enabled_rollouts + in + (set_server_options, dump_server_options) + +let disable_logging () = + EventLogger.disable_logging (); + FlowInteractionLogger.disable_logging (); + Flow_server_profile.disable_logging () diff --git a/src/common/lwt/__tests__/lwtUtils_test.ml b/src/common/lwt/__tests__/lwtUtils_test.ml index e557acc0202..6c3347f1f01 100644 --- a/src/common/lwt/__tests__/lwtUtils_test.ml +++ b/src/common/lwt/__tests__/lwtUtils_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,38 +8,35 @@ open OUnit2 (* Like `>::` except it expects the function to return `unit Lwt.t` rather than `unit` *) -let (%>::) name f = - name >:: begin fun ctxt -> - LwtInit.run_lwt (fun () -> f ctxt) - end +let ( %>:: ) name f = name >:: (fun ctxt -> LwtInit.run_lwt (fun () -> f ctxt)) -let tests = "LwtUtils.all" >::: [ - "includes_all_results" %>:: begin fun ctxt -> - let p1, r1 = Lwt.wait () in - let p2, r2 = Lwt.wait () in - Lwt.wakeup r1 1; - let p = LwtUtils.all [p1; p2] in - Lwt.wakeup r2 2; - let%lwt lst = p in - (* This illustrates the difference between LwtUtils.all and Lwt.nchoose, which would resolve to - * [1] *) - assert_equal ~ctxt lst [1; 2]; - Lwt.return_unit - end; - - "fails_early" %>:: begin fun ctxt -> - let p1, _ = Lwt.wait () in - let p2, r2 = Lwt.wait () in - Lwt.wakeup_exn r2 (Failure "did not work"); - let p = LwtUtils.all [p1; p2] in - let%lwt exn = - try%lwt - let%lwt _lst = p in - Lwt.return_none - with Failure s -> - Lwt.return_some s - in - assert_equal ~ctxt exn (Some "did not work"); - Lwt.return_unit - end; -] +let tests = + "LwtUtils.all" + >::: [ + ( "includes_all_results" + %>:: fun ctxt -> + let (p1, r1) = Lwt.wait () in + let (p2, r2) = Lwt.wait () in + Lwt.wakeup r1 1; + let p = LwtUtils.all [p1; p2] in + Lwt.wakeup r2 2; + let%lwt lst = p in + (* This illustrates the difference between LwtUtils.all and Lwt.nchoose, which would resolve to + * [1] *) + assert_equal ~ctxt lst [1; 2]; + Lwt.return_unit ); + ( "fails_early" + %>:: fun ctxt -> + let (p1, _) = Lwt.wait () in + let (p2, r2) = Lwt.wait () in + Lwt.wakeup_exn r2 (Failure "did not work"); + let p = LwtUtils.all [p1; p2] in + let%lwt exn = + try%lwt + let%lwt _lst = p in + Lwt.return_none + with Failure s -> Lwt.return_some s + in + assert_equal ~ctxt exn (Some "did not work"); + Lwt.return_unit ); + ] diff --git a/src/common/lwt/__tests__/lwt_tests.ml b/src/common/lwt/__tests__/lwt_tests.ml new file mode 100644 index 00000000000..5c016797728 --- /dev/null +++ b/src/common/lwt/__tests__/lwt_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "utils" >::: [LwtUtils_test.tests] + +let () = run_test_tt_main tests diff --git a/src/common/lwt/__tests__/test.ml b/src/common/lwt/__tests__/test.ml deleted file mode 100644 index 5a3e3627aa9..00000000000 --- a/src/common/lwt/__tests__/test.ml +++ /dev/null @@ -1,14 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open OUnit2 - -let tests = "utils" >::: [ - LwtUtils_test.tests; -] - -let () = run_test_tt_main tests diff --git a/src/common/lwt/dune b/src/common/lwt/dune new file mode 100644 index 00000000000..d327be42276 --- /dev/null +++ b/src/common/lwt/dune @@ -0,0 +1,10 @@ +(library + (name flow_common_lwt) + (wrapped false) + (libraries + imported_core + lwt + lwt.unix + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/common/lwt/lwtInit.ml b/src/common/lwt/lwtInit.ml index bb597d27192..c03147e8b27 100644 --- a/src/common/lwt/lwtInit.ml +++ b/src/common/lwt/lwtInit.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -24,31 +24,38 @@ let spf = Printf.sprintf * OCaml bug report: https://caml.inria.fr/mantis/view.php?id=7665 * lwt issue: https://github.com/ocsigen/lwt/issues/496 *) -class windows_select = object - inherit Lwt_engine.select_based +class windows_select = + object + inherit Lwt_engine.select_based - method private select fds_r fds_w timeout = - (* Figure out which fds are already ready to be read *) - let ready_r = List.fold_left (fun ready_r fd_r -> - match Unix.select [fd_r] [] [] 0.0 with - | [], _, _ -> ready_r - | _ -> fd_r::ready_r - ) [] fds_r in - - (* Figure out which fds are already ready to be written *) - let ready_w = List.fold_left (fun ready_w fd_w -> - match Unix.select [] [fd_w] [] 0.0 with - | _, [], _ -> ready_w - | _ -> fd_w::ready_w - ) [] fds_w in - - (* If nothing is ready, then do a multi-fd select with the timeout *) - if ready_r = [] && ready_w = [] - then - let fds_r, fds_w, _ = Unix.select fds_r fds_w [] timeout in - (fds_r, fds_w) - else (ready_r, ready_w) -end + method private select fds_r fds_w timeout = + (* Figure out which fds are already ready to be read *) + let ready_r = + List.fold_left + (fun ready_r fd_r -> + match Unix.select [fd_r] [] [] 0.0 with + | ([], _, _) -> ready_r + | _ -> fd_r :: ready_r) + [] + fds_r + in + (* Figure out which fds are already ready to be written *) + let ready_w = + List.fold_left + (fun ready_w fd_w -> + match Unix.select [] [fd_w] [] 0.0 with + | (_, [], _) -> ready_w + | _ -> fd_w :: ready_w) + [] + fds_w + in + (* If nothing is ready, then do a multi-fd select with the timeout *) + if ready_r = [] && ready_w = [] then + let (fds_r, fds_w, _) = Unix.select fds_r fds_w [] timeout in + (fds_r, fds_w) + else + (ready_r, ready_w) + end (* * So there's a bug in Unix.select on unix (Linux and OSX). Basically, select is supposed to raise @@ -72,43 +79,47 @@ end * OCaml bug report: https://caml.inria.fr/mantis/view.php?id=7700 * lwt issue: https://github.com/ocsigen/lwt/issues/529 *) -class unix_select = object - inherit Lwt_engine.select_based +class unix_select = + object + inherit Lwt_engine.select_based - method private select fds_r fds_w timeout = - let fds_r, fds_w, _ = - try Unix.select fds_r fds_w [] timeout - with - | Unix.Unix_error (Unix.EINVAL, fn, params) -> begin - (* Ok, so either one of the fds is an invalid fd, or maybe it's a valid fd but too large + method private select fds_r fds_w timeout = + let (fds_r, fds_w, _) = + try Unix.select fds_r fds_w [] timeout + with Unix.Unix_error (Unix.EINVAL, fn, params) -> + (* Ok, so either one of the fds is an invalid fd, or maybe it's a valid fd but too large * for select *) - begin try - let explode_if_bad fd = Unix.fstat fd |> ignore in - List.iter explode_if_bad fds_r; - List.iter explode_if_bad fds_w - with Unix.Unix_error (_, _, _) -> - raise (Unix.Unix_error (Unix.EBADF, fn, params)) - end; - (* Oh boy. So it looks like all the fds are valid. This likely means that one fd is larger - * than FD_SETSIZE (which is probably 1024). select() stops working for large fds like this - *) - let string_of_fd fd = string_of_int ((Obj.magic fd): int) in - let string_of_fds fds = String.concat ";" (List.map string_of_fd fds) in - let params = spf "[%s] [%s] []" (string_of_fds fds_r) (string_of_fds fds_w) in - raise (Unix.Unix_error (Unix.EINVAL, "select", params)) - end - in - (fds_r, fds_w) -end + begin + try + let explode_if_bad fd = Unix.fstat fd |> ignore in + List.iter explode_if_bad fds_r; + List.iter explode_if_bad fds_w + with Unix.Unix_error (_, _, _) -> raise (Unix.Unix_error (Unix.EBADF, fn, params)) + end; + + (* Oh boy. So it looks like all the fds are valid. This likely means that one fd is larger + * than FD_SETSIZE (which is probably 1024). select() stops working for large fds like this + *) + let string_of_fd fd = string_of_int (Obj.magic fd : int) in + let string_of_fds fds = String.concat ";" (Core_list.map ~f:string_of_fd fds) in + let params = spf "[%s] [%s] []" (string_of_fds fds_r) (string_of_fds fds_w) in + raise (Unix.Unix_error (Unix.EINVAL, "select", params)) + in + (fds_r, fds_w) + end let set_engine () = (* In theory, we could allow Flow built on machines with libev to use libev instead of select. * However, it seems like lwt_config.h on my OSX opam and my CentOS opam both comment out * HAVE_LIBEV. And I suppose if we can't rely on libev everywhere then we should rely on it * nowhere *) - if Sys.win32 - then Lwt_engine.set (new windows_select) (* See comment on windows_select *) - else Lwt_engine.set (new unix_select) (* See comment on unix_select *) + if Sys.win32 then + Lwt_engine.set (new windows_select) + (* See comment on windows_select *) + else + Lwt_engine.set (new unix_select) + +(* See comment on unix_select *) let run_lwt f = set_engine (); diff --git a/src/common/lwt/lwtInit.mli b/src/common/lwt/lwtInit.mli index f186a5d3904..17b064d6f6b 100644 --- a/src/common/lwt/lwtInit.mli +++ b/src/common/lwt/lwtInit.mli @@ -1,8 +1,8 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val run_lwt: (unit -> 'a Lwt.t) -> 'a +val run_lwt : (unit -> 'a Lwt.t) -> 'a diff --git a/src/common/lwt/lwtLoop.ml b/src/common/lwt/lwtLoop.ml index 7835e0db254..2acb6951b8c 100644 --- a/src/common/lwt/lwtLoop.ml +++ b/src/common/lwt/lwtLoop.ml @@ -1,26 +1,28 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - (* It is useful to write infinite loops using lwt. They're useful for reading, for writing, for - * waiting for some broadcast message, for doing something every N seconds, etc. However, there's a - * bunch of boilerplate. LwtLoop.Make is a functor that tries to hide that boilerplate - *) +(* It is useful to write infinite loops using lwt. They're useful for reading, for writing, for + * waiting for some broadcast message, for doing something every N seconds, etc. However, there's a + * bunch of boilerplate. LwtLoop.Make is a functor that tries to hide that boilerplate + *) module type LOOP = sig type acc + (* A single iteration of the loop *) - val main: acc -> acc Lwt.t + val main : acc -> acc Lwt.t + (* Wraps each iteration of the loop. On an exception, the most recent acc is passed in and the * loop is canceled *) - val catch: acc -> exn -> unit Lwt.t + val catch : acc -> exn -> unit Lwt.t end -module Make (Loop: LOOP): sig - val run: ?cancel_condition:'a Lwt_condition.t -> Loop.acc -> unit Lwt.t +module Make (Loop : LOOP) : sig + val run : ?cancel_condition:'a Lwt_condition.t -> Loop.acc -> unit Lwt.t end = struct let catch acc exn = match exn with @@ -28,28 +30,30 @@ end = struct | Lwt.Canceled -> Lwt.return_unit | exn -> Loop.catch acc exn - let rec loop acc = - Lwt.try_bind - (fun () -> Loop.main acc) - loop - (catch acc) + let rec loop acc = Lwt.try_bind (fun () -> Loop.main acc) loop (catch acc) let run ?cancel_condition acc = (* Create a waiting thread *) - let waiter, wakener = Lwt.task () in + let (waiter, wakener) = Lwt.task () in (* When the waiting thread is woken, it will kick off the loop *) - let thread = (let%lwt ret = waiter in loop ret) in - + let thread = + let%lwt ret = waiter in + loop ret + in (* If there is a cancel condition variable, wait for it to fire and then cancel the loop *) - begin match cancel_condition with - | None -> () - | Some condition -> - (* If the condition is hit, cancel the loop thread. If the loop thread finishes, cancel the - * condition wait *) - Lwt.async (fun () -> Lwt.pick [ - (try%lwt thread with Lwt.Canceled -> Lwt.return_unit); - (let%lwt _ = Lwt_condition.wait condition in Lwt.return_unit); - ]) + begin + match cancel_condition with + | None -> () + | Some condition -> + (* If the condition is hit, cancel the loop thread. If the loop thread finishes, cancel the + * condition wait *) + Lwt.async (fun () -> + Lwt.pick + [ + (try%lwt thread with Lwt.Canceled -> Lwt.return_unit); + (let%lwt _ = Lwt_condition.wait condition in + Lwt.return_unit); + ]) end; (* Start things going *) diff --git a/src/common/lwt/lwtLoop.mli b/src/common/lwt/lwtLoop.mli index fe66c947eda..55dfe6eba6a 100644 --- a/src/common/lwt/lwtLoop.mli +++ b/src/common/lwt/lwtLoop.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,10 +7,12 @@ module type LOOP = sig type acc - val main: acc -> acc Lwt.t - val catch: acc -> exn -> unit Lwt.t + + val main : acc -> acc Lwt.t + + val catch : acc -> exn -> unit Lwt.t end -module Make: functor (Loop: LOOP) -> sig - val run: ?cancel_condition:'a Lwt_condition.t -> Loop.acc -> unit Lwt.t +module Make (Loop : LOOP) : sig + val run : ?cancel_condition:'a Lwt_condition.t -> Loop.acc -> unit Lwt.t end diff --git a/src/common/lwt/lwtSysUtils.ml b/src/common/lwt/lwtSysUtils.ml index d4fa68b57a9..0a1edfdc81b 100644 --- a/src/common/lwt/lwtSysUtils.ml +++ b/src/common/lwt/lwtSysUtils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,41 +7,45 @@ (* Basically a `waitpid [ WUNTRACED ] pid` (WUNTRACED means also return on stopped processes) *) let blocking_waitpid = - let reasonable_impl pid = Lwt_unix.waitpid [Unix.WUNTRACED] pid - in - + let reasonable_impl pid = Lwt_unix.waitpid [Unix.WUNTRACED] pid in (* Lwt_unix.waitpid without WNOHANG doesn't work on Windows. As a workaround, we can call the * WNOHANG version every .5 seconds. https://github.com/ocsigen/lwt/issues/494 *) let rec damn_it_windows_impl pid_to_wait_for = let%lwt (pid_ret, status) = Lwt_unix.waitpid [Unix.WNOHANG; Unix.WUNTRACED] pid_to_wait_for in - if pid_ret = 0 - then + if pid_ret = 0 then (* Still hasn't exited. Let's wait .5s and try again *) - let%lwt () = Lwt_unix.sleep 0.5 in - damn_it_windows_impl pid_to_wait_for + let%lwt () = Lwt_unix.sleep 0.5 in + damn_it_windows_impl pid_to_wait_for else (* Ok, process has exited or died or something. *) Lwt.return (pid_ret, status) in - - if Sys.win32 then damn_it_windows_impl else reasonable_impl + if Sys.win32 then + damn_it_windows_impl + else + reasonable_impl (* An lwt version of Sys_utils.exec_read. Basically just runs a command and returns the first line * of stdout *) let exec_read cmd args = - let process = Lwt_process.open_process_in (cmd, Array.of_list (cmd::args)) in + let process = Lwt_process.open_process_in (cmd, Array.of_list (cmd :: args)) in let%lwt result = Lwt_io.read_line process#stdout in let%lwt status = process#close in assert (status = Unix.WEXITED 0); Lwt.return result -type command_result = { stdout: string; stderr: string; status: Unix.process_status; } +type command_result = { + stdout: string; + stderr: string; + status: Unix.process_status; +} let exec cmd args = - Lwt_process.with_process_full (cmd, Array.of_list (cmd::args)) (fun process -> - (* Wait for it to finish *) - let%lwt status = process#status in - let%lwt stdout = Lwt_io.read process#stdout in - let%lwt stderr = Lwt_io.read process#stderr in - Lwt.return {stdout; stderr; status} - ) + Lwt_process.with_process_full + (cmd, Array.of_list (cmd :: args)) + (fun process -> + (* Wait for it to finish *) + let%lwt status = process#status in + let%lwt stdout = Lwt_io.read process#stdout in + let%lwt stderr = Lwt_io.read process#stderr in + Lwt.return { stdout; stderr; status }) diff --git a/src/common/lwt/lwtSysUtils.mli b/src/common/lwt/lwtSysUtils.mli index 6747af3ca9b..1a8dbe10a9a 100644 --- a/src/common/lwt/lwtSysUtils.mli +++ b/src/common/lwt/lwtSysUtils.mli @@ -1,14 +1,18 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val blocking_waitpid: int -> (int * Unix.process_status) Lwt.t +val blocking_waitpid : int -> (int * Unix.process_status) Lwt.t -val exec_read: string -> string list -> string Lwt.t +val exec_read : string -> string list -> string Lwt.t -type command_result = { stdout: string; stderr: string; status: Unix.process_status; } +type command_result = { + stdout: string; + stderr: string; + status: Unix.process_status; +} -val exec: string -> string list -> command_result Lwt.t +val exec : string -> string list -> command_result Lwt.t diff --git a/src/common/lwt/lwtUtils.ml b/src/common/lwt/lwtUtils.ml index 4f6ee76de41..4a318942bf9 100644 --- a/src/common/lwt/lwtUtils.ml +++ b/src/common/lwt/lwtUtils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -18,19 +18,31 @@ * promises either :P *) let rec iter_all threads = - if threads = [] - then Lwt.return_unit + if threads = [] then + Lwt.return_unit else (* If any thread in threads fails during this nchoose, the whole all function will fail *) - let%lwt _, sleeping_threads = Lwt.nchoose_split threads in - iter_all sleeping_threads + let%lwt (_, sleeping_threads) = Lwt.nchoose_split threads in + iter_all sleeping_threads -let get_value_unsafe thread = match Lwt.state thread with +let get_value_unsafe thread = + match Lwt.state thread with | Lwt.Return x -> x | _ -> failwith "Not yet completed" let all threads = let%lwt () = iter_all threads in - threads - |> List.map get_value_unsafe - |> Lwt.return + threads |> Core_list.map ~f:get_value_unsafe |> Lwt.return + +let output_graph out strip_root graph = + let%lwt () = Lwt_io.fprint out "digraph {\n" in + let%lwt () = + Lwt_list.iter_s + (fun (f, dep_fs) -> + Lwt_list.iter_s + (fun dep_f -> + Lwt_io.fprintf out " \"%s\" -> \"%s\"\n" (strip_root f) (strip_root dep_f)) + dep_fs) + graph + in + Lwt_io.fprint out "}" diff --git a/src/common/lwt/lwtUtils.mli b/src/common/lwt/lwtUtils.mli index ca6c0a4faa9..3abcd9668c3 100644 --- a/src/common/lwt/lwtUtils.mli +++ b/src/common/lwt/lwtUtils.mli @@ -1,10 +1,12 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val iter_all: unit Lwt.t list -> unit Lwt.t +val iter_all : unit Lwt.t list -> unit Lwt.t -val all: 'a Lwt.t list -> 'a list Lwt.t +val all : 'a Lwt.t list -> 'a list Lwt.t + +val output_graph : Lwt_io.output_channel -> ('a -> string) -> ('a * 'a list) list -> unit Lwt.t diff --git a/src/common/modulename/dune b/src/common/modulename/dune new file mode 100644 index 00000000000..e556fb34c00 --- /dev/null +++ b/src/common/modulename/dune @@ -0,0 +1,8 @@ +(library + (name flow_common_modulename) + (wrapped false) + (libraries + flow_parser + collections ; hack + ) +) diff --git a/src/common/modulename/modulename.ml b/src/common/modulename/modulename.ml index aa291c1c028..6e210fe5bfe 100644 --- a/src/common/modulename/modulename.ml +++ b/src/common/modulename/modulename.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -20,8 +20,8 @@ is simply known by its path in the file system. *) type t = -| String of string -| Filename of File_key.t + | String of string + | Filename of File_key.t let to_string = function | String m -> m @@ -31,8 +31,11 @@ let compare = Pervasives.compare module Key = struct type nonrec t = t + let to_string = to_string + let compare = compare end -module Set = Set.Make(Key) -module Map = MyMap.Make(Key) + +module Set = Set.Make (Key) +module Map = MyMap.Make (Key) diff --git a/src/common/monad/dune b/src/common/monad/dune new file mode 100644 index 00000000000..e866679fecd --- /dev/null +++ b/src/common/monad/dune @@ -0,0 +1,8 @@ +(library + (name flow_common_monad) + (wrapped false) + (libraries + flow_common + imported_core + ) +) diff --git a/src/common/monad/stateResult.ml b/src/common/monad/stateResult.ml index 48c53926079..1d284815f6e 100644 --- a/src/common/monad/stateResult.ml +++ b/src/common/monad/stateResult.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,63 +11,80 @@ *) module type State = sig - type t + type t end module type S = sig type s + type ('a, 'b) r = ('a, 'b) Pervasives.result + type ('a, 'b) t = s -> ('a, 'b) r * s include Monad.S2 with type ('a, 'b) t := ('a, 'b) t val put : s -> (unit, 'b) t + val get : (s, 'b) t + val option : ('a -> ('c, 'b) t) -> 'a option -> ('c option, 'b) t + val modify : (s -> s) -> (unit, 'b) t + val error : 'b -> ('a, 'b) t + val run : s -> ('a, 'b) t -> ('a, 'b) r * s -end + (* Used by let%bind syntax *) + module Let_syntax : sig + val return : 'a -> ('a, 'b) t -module Make(S: State) : S with type s = S.t = struct + val bind : ('a, 'b) t -> f:('a -> ('c, 'b) t) -> ('c, 'b) t + val map : ('a, 'b) t -> f:('a -> 'c) -> ('c, 'b) t + end +end + +module Make (S : State) : S with type s = S.t = struct type s = S.t + type ('a, 'b) r = ('a, 'b) Pervasives.result type ('a, 'b) t = S.t -> ('a, 'b) r * S.t + include Monad.Make2 (struct type nonrec ('a, 'b) t = ('a, 'b) t - let bind m f = - fun s -> - let (x, s') = m s in - match x with - | Error _ as x -> (x, s') - | Ok x -> f x s' + let bind m f s = + let (x, s') = m s in + match x with + | Error _ as x -> (x, s') + | Ok x -> f x s' let map = `Define_using_bind - let return x = fun s -> (Ok x, s) - + let return x s = (Ok x, s) end) - let put s = - fun _ -> (Ok (), s) + let put s _ = (Ok (), s) - let get = - fun s -> (Ok s, s) + let get s = (Ok s, s) - let modify f = - get >>= fun st -> - put (f st) + let modify f = get >>= (fun st -> put (f st)) let option f = function | Some x -> f x >>| Option.some | None -> return None - let error x = fun s -> (Error x, s) + let error x s = (Error x, s) let run x m = m x + module Let_syntax = struct + let return = return + + let bind x ~f = bind x f + + let map x ~f = x >>| f + end end diff --git a/src/common/monad/writer.ml b/src/common/monad/writer.ml index eb97ebed2fc..02f46b54f79 100644 --- a/src/common/monad/writer.ml +++ b/src/common/monad/writer.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,38 +7,41 @@ module type S = sig type m + type 'a t = 'a * m include Monad.S with type 'a t := 'a t - val tell: m -> unit t - val listen: 'a t -> ('a * m) t + val tell : m -> unit t + + val listen : 'a t -> ('a * m) t end -module Make (M: Monoid.S): S with type m := M.t = struct +module Make (M : Monoid.S) : S with type m := M.t = struct type m = M.t + type 'a t = 'a * m let map w ~f = - let x, m = w in - f x, m + let (x, m) = w in + (f x, m) - include Monad.Make(struct + include Monad.Make (struct type 'a z = 'a t (* 4.02.1 doesn't have nonrec *) + type 'a t = 'a z - let return x = - x, M.empty + let return x = (x, M.empty) let bind w f = - let x, m = w in - let x, m' = f x in - x, M.append m m' + let (x, m) = w in + let (x, m') = f x in + (x, M.append m m') let map = `Custom map end) - let tell m = (), m + let tell m = ((), m) - let listen (x, s) = (x, s), s + let listen (x, s) = ((x, s), s) end diff --git a/src/common/monoid.ml b/src/common/monoid.ml index 5ec9146b57e..837b6e28787 100644 --- a/src/common/monoid.ml +++ b/src/common/monoid.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,24 +7,32 @@ module type S = sig type t - val empty: t - val append: t -> t -> t + + val empty : t + + val append : t -> t -> t end -module Unit: S with type t = unit = struct +module Unit : S with type t = unit = struct type t = unit + let empty = () + let append _ _ = () end -module Any: S with type t = bool = struct +module Any : S with type t = bool = struct type t = bool + let empty = false - let append = (||) + + let append = ( || ) end -module Counter: S with type t = int = struct +module Counter : S with type t = int = struct type t = int + let empty = 0 - let append = (+) + + let append = ( + ) end diff --git a/src/common/options.ml b/src/common/options.ml index 1987ba6f877..2c2c9ec7f5b 100644 --- a/src/common/options.ml +++ b/src/common/options.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,30 +11,30 @@ type esproposal_feature_mode = | ESPROPOSAL_WARN type file_watcher = -| NoFileWatcher -| DFind -| Watchman + | NoFileWatcher + | DFind + | Watchman type module_system = | Node | Haste type lazy_mode = -| LAZY_MODE_FILESYSTEM -| LAZY_MODE_IDE + | LAZY_MODE_FILESYSTEM + | LAZY_MODE_IDE + | LAZY_MODE_WATCHMAN + | NON_LAZY_MODE type jsx_mode = (* JSX desugars into a `React.createElement(name, props, ...children)` call *) | Jsx_react - - (** + (* * Specifies a function that should be invoked instead of React.createElement * when interpreting JSX syntax. Otherwise, the usual rules of JSX are * followed: children are varargs after a props argument. *) - | Jsx_pragma of (string * (Loc.t, Loc.t) Flow_ast.Expression.t) - - (** + | Jsx_pragma of (string * (ALoc.t, ALoc.t) Flow_ast.Expression.t) + (* * Alternate mode for interpreting JSX syntax. The element name is treated * as a function to be directly invoked, e.g. -> Foo({}). * Children are part of props instead of a separate argument. @@ -42,33 +42,57 @@ type jsx_mode = | Jsx_csx type saved_state_fetcher = -| Dummy_fetcher -| Local_fetcher -| Fb_fetcher + | Dummy_fetcher + | Local_fetcher + | Fb_fetcher + +type arch = + | Classic + | TypesFirst + +type trust_mode = + | NoTrust + | CheckTrust + | SilentTrust type t = { - opt_all : bool; - opt_debug : bool; - opt_max_literal_length: int; - opt_enable_cancelable_rechecks: bool; + opt_abstract_locations: bool; + opt_all: bool; + opt_allow_skip_direct_dependents: bool; + opt_arch: arch; + opt_cache_direct_dependents: bool; + opt_debug: bool; opt_enable_const_params: bool; + opt_enabled_rollouts: string SMap.t; opt_enforce_strict_call_arity: bool; opt_enforce_well_formed_exports: bool; - opt_esproposal_class_static_fields: esproposal_feature_mode; + opt_enforce_well_formed_exports_whitelist: string list; + opt_enums: bool; opt_esproposal_class_instance_fields: esproposal_feature_mode; + opt_esproposal_class_static_fields: esproposal_feature_mode; opt_esproposal_decorators: esproposal_feature_mode; opt_esproposal_export_star_as: esproposal_feature_mode; - opt_esproposal_optional_chaining: esproposal_feature_mode; opt_esproposal_nullish_coalescing: esproposal_feature_mode; + opt_esproposal_optional_chaining: esproposal_feature_mode; + opt_exact_by_default: bool; + opt_facebook_fbs: string option; opt_facebook_fbt: string option; - opt_flowconfig_name: string; opt_file_options: Files.options; + opt_flowconfig_name: string; + opt_haste_module_ref_prefix: string option; opt_haste_name_reducers: (Str.regexp * string) list; opt_haste_paths_blacklist: string list; opt_haste_paths_whitelist: string list; opt_haste_use_name_reducers: bool; opt_ignore_non_literal_requires: bool; + opt_include_suppressions: bool; opt_include_warnings: bool; + opt_lazy_mode: lazy_mode; + opt_lint_severities: Severity.severity LintSettings.t; + opt_lsp_code_actions: bool; + opt_max_files_checked_per_worker: int; + opt_max_header_tokens: int; + opt_max_literal_length: int; opt_max_workers: int; opt_merge_timeout: float option; opt_module: module_system; @@ -77,81 +101,156 @@ type t = { opt_modules_are_use_strict: bool; opt_munge_underscores: bool; opt_no_saved_state: bool; - opt_profile : bool; - opt_lazy_mode: lazy_mode option; - opt_quiet : bool; - opt_root : Path.t; - opt_root_name : string option; + opt_profile: bool; + opt_quiet: bool; + opt_recursion_limit: int; + opt_root: Path.t; + opt_root_name: string option; opt_saved_state_fetcher: saved_state_fetcher; + opt_saved_state_force_recheck: bool; opt_saved_state_no_fallback: bool; - opt_strip_root : bool; - opt_suppress_comments : Str.regexp list; - opt_suppress_types : SSet.t; - opt_temp_dir: string; - opt_traces : int; - opt_verbose : Verbose.t option; - opt_weak : bool; - opt_max_header_tokens: int; - opt_lint_severities: Severity.severity LintSettings.t; opt_strict_mode: StrictModeSettings.t; + opt_strip_root: bool; + opt_suppress_comments: Str.regexp list; + opt_suppress_types: SSet.t; + opt_temp_dir: string; + opt_traces: int; + opt_trust_mode: trust_mode; + opt_type_asserts: bool; + opt_verbose: Verbose.t option; + opt_wait_for_recheck: bool; + opt_weak: bool; } +let abstract_locations opts = opts.opt_abstract_locations + let all opts = opts.opt_all + +let allow_skip_direct_dependents opts = opts.opt_allow_skip_direct_dependents + +let arch opts = opts.opt_arch + +let cache_direct_dependents opts = opts.opt_cache_direct_dependents + let max_literal_length opts = opts.opt_max_literal_length -let enable_cancelable_rechecks opts = opts.opt_enable_cancelable_rechecks + let enable_const_params opts = opts.opt_enable_const_params + +let enabled_rollouts opts = opts.opt_enabled_rollouts + let enforce_strict_call_arity opts = opts.opt_enforce_strict_call_arity + let enforce_well_formed_exports opts = opts.opt_enforce_well_formed_exports -let esproposal_class_static_fields opts = - opts.opt_esproposal_class_static_fields -let esproposal_class_instance_fields opts = - opts.opt_esproposal_class_instance_fields + +let enums opts = opts.opt_enums + +let esproposal_class_static_fields opts = opts.opt_esproposal_class_static_fields + +let esproposal_class_instance_fields opts = opts.opt_esproposal_class_instance_fields + let esproposal_decorators opts = opts.opt_esproposal_decorators + let esproposal_export_star_as opts = opts.opt_esproposal_export_star_as + let esproposal_optional_chaining opts = opts.opt_esproposal_optional_chaining + let esproposal_nullish_coalescing opts = opts.opt_esproposal_nullish_coalescing + +let exact_by_default opts = opts.opt_exact_by_default + +let haste_module_ref_prefix opts = opts.opt_haste_module_ref_prefix + let haste_name_reducers opts = opts.opt_haste_name_reducers + let haste_paths_blacklist opts = opts.opt_haste_paths_blacklist + let haste_paths_whitelist opts = opts.opt_haste_paths_whitelist + let haste_use_name_reducers opts = opts.opt_haste_use_name_reducers + let flowconfig_name opts = opts.opt_flowconfig_name + let file_options opts = opts.opt_file_options + let is_debug_mode opts = opts.opt_debug -let is_lazy_mode opts = opts.opt_lazy_mode <> None + +let is_lazy_mode opts = opts.opt_lazy_mode <> NON_LAZY_MODE + let lazy_mode opts = opts.opt_lazy_mode + let is_quiet opts = opts.opt_quiet + +let max_files_checked_per_worker opts = opts.opt_max_files_checked_per_worker + let max_header_tokens opts = opts.opt_max_header_tokens + let max_trace_depth opts = opts.opt_traces + let max_workers opts = opts.opt_max_workers + let merge_timeout opts = opts.opt_merge_timeout + let module_name_mappers opts = opts.opt_module_name_mappers + let module_resolver opts = opts.opt_module_resolver + let module_system opts = opts.opt_module + let modules_are_use_strict opts = opts.opt_modules_are_use_strict + let no_saved_state opts = opts.opt_no_saved_state + +let recursion_limit opts = opts.opt_recursion_limit + let root opts = opts.opt_root + let root_name opts = opts.opt_root_name + +let facebook_fbs opts = opts.opt_facebook_fbs + let facebook_fbt opts = opts.opt_facebook_fbt + let saved_state_fetcher opts = opts.opt_saved_state_fetcher +let saved_state_force_recheck opts = opts.opt_saved_state_force_recheck + let saved_state_no_fallback opts = opts.opt_saved_state_no_fallback -let should_ignore_non_literal_requires opts = - opts.opt_ignore_non_literal_requires + +let should_ignore_non_literal_requires opts = opts.opt_ignore_non_literal_requires + let should_include_warnings opts = opts.opt_include_warnings + let should_munge_underscores opts = opts.opt_munge_underscores + let should_profile opts = opts.opt_profile && not opts.opt_quiet + let should_strip_root opts = opts.opt_strip_root + let suppress_comments opts = opts.opt_suppress_comments + let suppress_types opts = opts.opt_suppress_types + let temp_dir opts = opts.opt_temp_dir + let verbose opts = opts.opt_verbose + +let wait_for_recheck opts = opts.opt_wait_for_recheck + let weak_by_default opts = opts.opt_weak +let include_suppressions opts = opts.opt_include_suppressions + let lint_severities opts = opts.opt_lint_severities + let strict_mode opts = opts.opt_strict_mode +let trust_mode opts = opts.opt_trust_mode + +let type_asserts opts = opts.opt_type_asserts let lazy_mode_to_string lazy_mode = match lazy_mode with | LAZY_MODE_FILESYSTEM -> "fs" | LAZY_MODE_IDE -> "ide" + | LAZY_MODE_WATCHMAN -> "watchman" + | NON_LAZY_MODE -> "none" diff --git a/src/common/path_matcher.ml b/src/common/path_matcher.ml index 848ee357b2e..291c959b13b 100644 --- a/src/common/path_matcher.ml +++ b/src/common/path_matcher.ml @@ -1,43 +1,50 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -open Utils_js +module PathMap : Map.S with type key = Path.t = Map.Make (struct + type t = Path.t + + let compare p1 p2 = String.compare (Path.to_string p1) (Path.to_string p2) +end) type t = { - (* list of paths to match against. may contain wildcards. - NOTE: stored in reverse-insertion order! *) - paths: Path.t list; (* stems extracted from paths. NOTE: stored in reverse-insertion order! *) stems: Path.t list; (* map from stems to list of (original path, regexified path) *) - stem_map: ((string * Str.regexp) list) PathMap.t; + stem_map: (string * Str.regexp) list PathMap.t; } -let empty = { paths = []; stems = []; stem_map = PathMap.empty; } -let paths matcher = matcher.paths +let empty = { stems = []; stem_map = PathMap.empty } + let stems matcher = matcher.stems -let stem_map matcher = matcher.stem_map (* given a path, return the max prefix not containing a wildcard or a terminal filename. *) let path_stem = let wc = Str.regexp "^[^*?]*[*?]" in - (fun path -> + fun path -> (* strip filename *) - let path = if Path.file_exists path && not (Path.is_directory path) - then Path.parent path else path in + let path = + if Path.file_exists path && not (Path.is_directory path) then + Path.parent path + else + path + in let path_str = Path.to_string path in (* strip back to non-wc prefix *) - let stem = if Str.string_match wc path_str 0 - then Filename.dirname (Str.matched_string path_str) - else path_str in - Path.make stem) + let stem = + if Str.string_match wc path_str 0 then + Filename.dirname (Str.matched_string path_str) + else + path_str + in + Path.make stem (* translate a path with wildcards into a regex *) let path_patt = @@ -45,70 +52,81 @@ let path_patt = let star2 = Str.regexp_string "**" in let qmark = Str.regexp_string "?" in fun path -> - let str = Path.to_string path - |> Sys_utils.normalize_filename_dir_sep in + let str = Path.to_string path |> Sys_utils.normalize_filename_dir_sep in (* because we accept both * and **, convert in 2 steps *) let results = Str.full_split star2 str in - let results = List.map (fun r -> match r with - | Str.Text s -> - (* note: unix directory seperators specifiers only. Windows directory - * seperators will already have been normalized to unix directory - * seperators *) - let s = Str.global_replace star "[^/]*" s in - Str.global_replace qmark "." s - | Str.Delim _ -> ".*") results in + let results = + Core_list.map + ~f:(fun r -> + match r with + | Str.Text s -> + (* note: unix directory seperators specifiers only. Windows directory + * seperators will already have been normalized to unix directory + * seperators *) + let s = Str.global_replace star "[^/]*" s in + Str.global_replace qmark "." s + | Str.Delim _ -> ".*") + results + in let str = String.concat "" results in Str.regexp str (* helper - eliminate noncanonical entries where possible. no other normalization is done *) let dir_sep = Str.regexp_string Filename.dir_sep + let fixup_path p = let s = Path.to_string p in - let is_normalized = match Sys_utils.realpath s with - | Some s' -> s' = s - | None -> false in - if is_normalized then p else - let abs = not (Filename.is_relative s) in - let entries = Str.split_delim dir_sep s in - let rec loop revbase entries = - match entries with - | h :: t when h = Filename.current_dir_name -> - loop revbase t - | h :: t when h = Filename.parent_dir_name -> ( - match revbase with - | _ :: rt -> loop rt t - | _ -> loop (h :: revbase) t - ) - | h :: t -> loop (h :: revbase) t - | [] -> List.rev revbase + let is_normalized = + match Sys_utils.realpath s with + | Some s' -> s' = s + | None -> false in - let entries = loop [] entries in - let s = List.fold_left Filename.concat "" entries in - let s = if abs then Filename.dir_sep ^ s else s in - Path.make s + if is_normalized then + p + else + let abs = not (Filename.is_relative s) in + let entries = Str.split_delim dir_sep s in + let rec loop revbase entries = + match entries with + | h :: t when h = Filename.current_dir_name -> loop revbase t + | h :: t when h = Filename.parent_dir_name -> + (match revbase with + | _ :: rt -> loop rt t + | _ -> loop (h :: revbase) t) + | h :: t -> loop (h :: revbase) t + | [] -> List.rev revbase + in + let entries = loop [] entries in + let s = List.fold_left Filename.concat "" entries in + let s = + if abs then + Filename.dir_sep ^ s + else + s + in + Path.make s (* adds `path` to the matcher, calculating the appropriate stem and pattern *) -let add { paths; stems; stem_map; } path = +let add { stems; stem_map } path = let path = fixup_path path in let stem = path_stem path in let patt = path_patt path in let pstr = Path.to_string path in - let stems, stem_map = - match PathMap.get stem stem_map with + let (stems, stem_map) = + match PathMap.find_opt stem stem_map with | None -> - let stem_map = PathMap.add stem [pstr, patt] stem_map in - (stem :: stems), stem_map + let stem_map = PathMap.add stem [(pstr, patt)] stem_map in + (stem :: stems, stem_map) | Some entries -> - let stem_map = PathMap.add stem ((pstr, patt) :: entries) stem_map in - stems, stem_map + let stem_map = PathMap.add stem ((pstr, patt) :: entries) stem_map in + (stems, stem_map) in - { paths = path::paths; stems; stem_map; } + { stems; stem_map } (* filters a list of prefixes into only the prefixes with which f starts *) -let find_prefixes f = List.filter (fun prefix -> - String_utils.string_starts_with f (Path.to_string prefix) -) +let find_prefixes f = + List.filter (fun prefix -> String_utils.string_starts_with f (Path.to_string prefix)) (* find a match for f in a list of patterns, or none *) let rec match_patt f = function @@ -119,7 +137,8 @@ let rec match_patt f = function let matches path_matcher f = let matching_stems = find_prefixes f path_matcher.stems in let normalized_f = Sys_utils.normalize_filename_dir_sep f in - List.exists (fun stem -> - let patts = PathMap.find_unsafe stem path_matcher.stem_map in - match_patt normalized_f patts != None - ) matching_stems + List.exists + (fun stem -> + let patts = PathMap.find stem path_matcher.stem_map in + match_patt normalized_f patts != None) + matching_stems diff --git a/src/common/path_matcher.mli b/src/common/path_matcher.mli index 0f2d6a2492f..5a2ca91c017 100644 --- a/src/common/path_matcher.mli +++ b/src/common/path_matcher.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,9 +8,9 @@ type t val empty : t -val paths : t -> Path.t list + val stems : t -> Path.t list -val stem_map : t -> ((string * Str.regexp) list) Utils_js.PathMap.t val add : t -> Path.t -> t + val matches : t -> string -> bool diff --git a/src/common/profiling/dune b/src/common/profiling/dune new file mode 100644 index 00000000000..69afa8f35de --- /dev/null +++ b/src/common/profiling/dune @@ -0,0 +1,10 @@ +(library + (name flow_common_profiling) + (wrapped false) + (libraries + dtoa + hh_json ; hack + lwt + sys_utils ; hack + ) + (preprocess (pps lwt_ppx))) diff --git a/src/common/profiling/profiling_js.ml b/src/common/profiling/profiling_js.ml index 784fdc04824..9d2f3e0f42a 100644 --- a/src/common/profiling/profiling_js.ml +++ b/src/common/profiling/profiling_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -23,19 +23,23 @@ *) module Timing : sig type running + type finished - val with_timing_lwt: label:string -> f:(running -> 'a Lwt.t) -> (finished * 'a) Lwt.t - val with_timer_lwt: - ?should_print:bool -> - timer:string -> - f:(unit -> 'a Lwt.t) -> - running -> - 'a Lwt.t - val get_total_wall_duration: finished -> float - val to_json: abridged:bool -> finished -> Hh_json.json - val to_json_legacy: abridged:bool -> finished -> Hh_json.json - val print_summary_timing_table: finished -> unit - val merge: from:finished -> into:running -> unit + + val with_timing_lwt : label:string -> f:(running -> 'a Lwt.t) -> (finished * 'a) Lwt.t + + val with_timer_lwt : + ?should_print:bool -> timer:string -> f:(unit -> 'a Lwt.t) -> running -> 'a Lwt.t + + val get_total_wall_duration : finished -> float + + val to_json : abridged:bool -> finished -> Hh_json.json + + val to_json_legacy : abridged:bool -> finished -> Hh_json.json + + val print_summary_timing_table : finished -> unit + + val merge : from:finished -> into:running -> unit end = struct type time_measurement = { start_age: float; @@ -55,6 +59,7 @@ end = struct worker_read_request: time_measurement; worker_run: time_measurement; worker_send_response: time_measurement; + worker_done: time_measurement; worker_gc_minor: time_measurement; worker_gc_major: time_measurement; } @@ -78,6 +83,7 @@ end = struct worker_read_request_start: float; worker_run_start: float; worker_send_response_start: float; + worker_done_start: float; worker_gc_minor_start: float; worker_gc_major_start: float; } @@ -95,61 +101,66 @@ end = struct } type running = running_timer ref (* The current running parent *) + type finished = result (* Returns the user cpu and system cpu times *) - let times () = Unix.( - let tm = times () in - (* Warning - cutime and cstime (children times) don't work on Windows *) - (tm.tms_utime +. tm.tms_cutime, tm.tms_stime +. tm.tms_cstime) - ) + let times () = + Unix.( + let tm = times () in + (* Warning - cutime and cstime (children times) don't work on Windows *) + (tm.tms_utime +. tm.tms_cutime, tm.tms_stime +. tm.tms_cstime)) let worker_times () = - let worker_user_time = match Measure.get_sum "worker_user_time" with - | None -> 0.0 - | Some time -> time in - let worker_system_time = match Measure.get_sum "worker_system_time" with - | None -> 0.0 - | Some time -> time in + let worker_user_time = + match Measure.get_sum "worker_user_time" with + | None -> 0.0 + | Some time -> time + in + let worker_system_time = + match Measure.get_sum "worker_system_time" with + | None -> 0.0 + | Some time -> time + in (worker_user_time, worker_system_time) - let time_measurement start end_ = - { - start_age = start; - duration = end_ -. start; - } + let time_measurement start end_ = { start_age = start; duration = end_ -. start } - let worker_wall_start_times, worker_wall_times = + let (worker_wall_start_times, worker_wall_times) = let get_run () = Option.value ~default:0.0 (Measure.get_sum "worker_wall_time") in let get_read () = Option.value ~default:0.0 (Measure.get_sum "worker_read_request") in let get_send () = Option.value ~default:0.0 (Measure.get_sum "worker_send_response") in let get_idle () = Option.value ~default:0.0 (Measure.get_sum "worker_idle") in - let get_gc_minor () = Option.value ~default:0.0 (Measure.get_sum "worker_gc_minor_wall_time") in - let get_gc_major () = Option.value ~default:0.0 (Measure.get_sum "worker_gc_major_wall_time") in - + let get_done () = Option.value ~default:0.0 (Measure.get_sum "worker_done") in + let get_gc_minor () = + Option.value ~default:0.0 (Measure.get_sum "worker_gc_minor_wall_time") + in + let get_gc_major () = + Option.value ~default:0.0 (Measure.get_sum "worker_gc_major_wall_time") + in let worker_wall_start_times () = { worker_idle_start = get_idle (); worker_read_request_start = get_read (); worker_run_start = get_run (); worker_send_response_start = get_send (); + worker_done_start = get_done (); worker_gc_minor_start = get_gc_minor (); worker_gc_major_start = get_gc_major (); } in - let worker_wall_times start = { worker_idle = time_measurement start.worker_idle_start (get_idle ()); worker_read_request = time_measurement start.worker_read_request_start (get_read ()); worker_run = time_measurement start.worker_run_start (get_run ()); worker_send_response = time_measurement start.worker_send_response_start (get_send ()); + worker_done = time_measurement start.worker_done_start (get_done ()); worker_gc_minor = time_measurement start.worker_gc_minor_start (get_gc_minor ()); worker_gc_major = time_measurement start.worker_gc_major_start (get_gc_major ()); } in - - worker_wall_start_times, worker_wall_times + (worker_wall_start_times, worker_wall_times) let legacy_top_timer_name = "Profiling" @@ -179,9 +190,12 @@ end = struct let cpu_system = end_.Sys_utils.cpu_system -. start.Sys_utils.cpu_system in let cpu_idle = end_.Sys_utils.cpu_idle -. start.Sys_utils.cpu_idle in let cpu_busy = cpu_user +. cpu_nice_user +. cpu_system in - let cpu_usage = if cpu_busy = 0. - then 0. - else cpu_busy /. (cpu_busy +. cpu_idle) in + let cpu_usage = + if cpu_busy = 0. then + 0. + else + cpu_busy /. (cpu_busy +. cpu_idle) + in { cpu_user; cpu_nice_user; cpu_system; cpu_idle; cpu_usage } let stop_timer running_timer = @@ -189,44 +203,58 @@ end = struct let (user_end, system_end) = times () in let (worker_user_end, worker_system_end) = worker_times () in let processor_info_end = Sys_utils.processor_info () in - - let user = { - start_age = running_timer.user_start; - duration = user_end -. running_timer.user_start; - } in - let system = { - start_age = running_timer.system_start; - duration = system_end -. running_timer.system_start; - } in - let worker_user = { - start_age = running_timer.worker_user_start; - duration = worker_user_end -. running_timer.worker_user_start; - } in - let worker_system = { - start_age = running_timer.worker_system_start; - duration = worker_system_end -. running_timer.worker_system_start; - } in + let user = + { start_age = running_timer.user_start; duration = user_end -. running_timer.user_start } + in + let system = + { + start_age = running_timer.system_start; + duration = system_end -. running_timer.system_start; + } + in + let worker_user = + { + start_age = running_timer.worker_user_start; + duration = worker_user_end -. running_timer.worker_user_start; + } + in + let worker_system = + { + start_age = running_timer.worker_system_start; + duration = worker_system_end -. running_timer.worker_system_start; + } + in let worker_wall_times = worker_wall_times running_timer.worker_wall_start_times in - let wall = { - start_age = running_timer.wall_start -. flow_start_time; - duration = wall_end -. running_timer.wall_start; - } in - let processor_totals = make_processor_info - running_timer.processor_info_start.Sys_utils.proc_totals - processor_info_end.Sys_utils.proc_totals in - + let wall = + { + start_age = running_timer.wall_start -. flow_start_time; + duration = wall_end -. running_timer.wall_start; + } + in + let processor_totals = + make_processor_info + running_timer.processor_info_start.Sys_utils.proc_totals + processor_info_end.Sys_utils.proc_totals + in let flow_cpu_time = - user.duration +. system.duration +. worker_user.duration +. worker_system.duration in + user.duration +. system.duration +. worker_user.duration +. worker_system.duration + in let total_cpu_time = - processor_totals.cpu_user +. processor_totals.cpu_nice_user +. - processor_totals.cpu_system +. processor_totals.cpu_idle in - + processor_totals.cpu_user + +. processor_totals.cpu_nice_user + +. processor_totals.cpu_system + +. processor_totals.cpu_idle + in (* flow_cpu_time and total_cpu_time are calculated using slightly different systems. * flow_cpu_time should always be less than total_cpu_time, so we could in theory just * check the numerator. However, checking the denominator is a slightly safer way to avoid * a division by zero *) - let flow_cpu_usage = if total_cpu_time = 0. then 0. else flow_cpu_time /. total_cpu_time in - + let flow_cpu_usage = + if total_cpu_time = 0. then + 0. + else + flow_cpu_time /. total_cpu_time + in { timer_name = running_timer.timer; user; @@ -244,141 +272,141 @@ end = struct let with_timing_lwt ~label ~f = let total_timer = start_timer ~timer:label in let running = ref total_timer in - (* Why don't we wrap this in a finalize? Well, if f throws, then no one will ever read our * finished timer, so we don't really need to stop it *) let%lwt ret = f running in let finished_timer = stop_timer total_timer in Lwt.return (finished_timer, ret) - let with_timer_lwt ?(should_print=false) ~timer ~f running = + let with_timer_lwt ?(should_print = false) ~timer ~f running = let parent_timer = !running in let running_timer = start_timer ~timer in running := running_timer; Lwt.finalize f (fun () -> - let finished_timer = stop_timer running_timer in + let finished_timer = stop_timer running_timer in + parent_timer.sub_results_rev <- finished_timer :: parent_timer.sub_results_rev; - parent_timer.sub_results_rev <- finished_timer :: parent_timer.sub_results_rev; + running := parent_timer; - running := parent_timer; + ( if should_print then + let stats = + Printf.sprintf + "start_wall_age: %f; wall_duration: %f; cpu_usage: %f; flow_cpu_usage: %f" + finished_timer.wall.start_age + finished_timer.wall.duration + finished_timer.processor_totals.cpu_usage + finished_timer.flow_cpu_usage + in + Hh_logger.info "TimingEvent `%s`: %s" timer stats ); - if should_print then begin - let stats = Printf.sprintf - "start_wall_age: %f; wall_duration: %f; cpu_usage: %f; flow_cpu_usage: %f" - finished_timer.wall.start_age - finished_timer.wall.duration - finished_timer.processor_totals.cpu_usage - finished_timer.flow_cpu_usage in - Hh_logger.info - "TimingEvent `%s`: %s" - timer - stats - end; - - Lwt.return_unit - ) + Lwt.return_unit) let get_total_wall_duration finished_timer = finished_timer.wall.duration - let combine_time_measurements = List.fold_left - (fun acc t -> - { start_age = acc.start_age +. t.start_age; duration = acc.duration +. t.duration } - ) - { start_age = 0.0; duration = 0.0 } + let combine_time_measurements = + List.fold_left + (fun acc t -> + { start_age = acc.start_age +. t.start_age; duration = acc.duration +. t.duration }) + { start_age = 0.0; duration = 0.0 } - let json_of_time_measurement { start_age; duration; } = - let open Hh_json in - JSON_Object [ - "start_age", JSON_Number (Dtoa.ecma_string_of_float start_age); - "duration", JSON_Number (Dtoa.ecma_string_of_float duration); - ] + let json_of_time_measurement { start_age; duration } = + Hh_json.( + JSON_Object + [ + ("start_age", JSON_Number (Dtoa.ecma_string_of_float start_age)); + ("duration", JSON_Number (Dtoa.ecma_string_of_float duration)); + ]) - let total_cpu_time info = - info.cpu_user +. info.cpu_nice_user +. info.cpu_system +. info.cpu_idle + let total_cpu_time info = info.cpu_user +. info.cpu_nice_user +. info.cpu_system +. info.cpu_idle let json_of_processor_info ~abridged info = - let open Hh_json in - if abridged - then - let total = total_cpu_time info in - (* We can infer enough from these two numbers - * busy = total * usage - * idle = total - busy *) - JSON_Object [ - "total", JSON_Number (Dtoa.ecma_string_of_float total); - "usage", JSON_Number (Dtoa.ecma_string_of_float info.cpu_usage); - ] - else - JSON_Object [ - "user", JSON_Number (Dtoa.ecma_string_of_float info.cpu_user); - "nice", JSON_Number (Dtoa.ecma_string_of_float info.cpu_nice_user); - "system", JSON_Number (Dtoa.ecma_string_of_float info.cpu_system); - "idle", JSON_Number (Dtoa.ecma_string_of_float info.cpu_idle); - "usage", JSON_Number (Dtoa.ecma_string_of_float info.cpu_usage); - ] + Hh_json.( + if abridged then + let total = total_cpu_time info in + (* We can infer enough from these two numbers + * busy = total * usage + * idle = total - busy *) + JSON_Object + [ + ("total", JSON_Number (Dtoa.ecma_string_of_float total)); + ("usage", JSON_Number (Dtoa.ecma_string_of_float info.cpu_usage)); + ] + else + JSON_Object + [ + ("user", JSON_Number (Dtoa.ecma_string_of_float info.cpu_user)); + ("nice", JSON_Number (Dtoa.ecma_string_of_float info.cpu_nice_user)); + ("system", JSON_Number (Dtoa.ecma_string_of_float info.cpu_system)); + ("idle", JSON_Number (Dtoa.ecma_string_of_float info.cpu_idle)); + ("usage", JSON_Number (Dtoa.ecma_string_of_float info.cpu_usage)); + ]) (* This function solves the problem of having multiple sibling timers (timers with the same * parent) with the same name. Our JSON representation is an object keyed by the name of the * timer, so we need to merge any two timers with the same name *) let merge_dupes = let merge_time_measurement a b = - { start_age = a.start_age; duration = a.duration +. b.duration; } + { start_age = a.start_age; duration = a.duration +. b.duration } in - let merge_worker_wall_times a b = { worker_idle = merge_time_measurement a.worker_idle b.worker_idle; worker_read_request = merge_time_measurement a.worker_read_request b.worker_read_request; worker_run = merge_time_measurement a.worker_run b.worker_run; worker_send_response = merge_time_measurement a.worker_send_response b.worker_send_response; + worker_done = merge_time_measurement a.worker_done b.worker_done; worker_gc_minor = merge_time_measurement a.worker_gc_minor b.worker_gc_minor; worker_gc_major = merge_time_measurement a.worker_gc_major b.worker_gc_major; } in - let weighted_average values = - let weight_sum, acc = List.fold_left (fun (weight_sum, acc) (weight, value) -> - assert (weight >= 0.); - weight_sum +. weight, acc +. weight *. value - ) (0., 0.) values in - if weight_sum > 0. - then acc /. weight_sum - else 0. + let (weight_sum, acc) = + List.fold_left + (fun (weight_sum, acc) (weight, value) -> + assert (weight >= 0.); + (weight_sum +. weight, acc +. (weight *. value))) + (0., 0.) + values + in + if weight_sum > 0. then + acc /. weight_sum + else + 0. in - let merge_processor_totals a b = { cpu_user = a.cpu_user +. b.cpu_user; cpu_nice_user = a.cpu_nice_user +. b.cpu_nice_user; cpu_system = a.cpu_system +. b.cpu_system; cpu_idle = a.cpu_idle +. b.cpu_idle; - cpu_usage = weighted_average [ - total_cpu_time a, a.cpu_usage; - total_cpu_time b, b.cpu_usage; - ]; + cpu_usage = + weighted_average [(total_cpu_time a, a.cpu_usage); (total_cpu_time b, b.cpu_usage)]; } in - fun ~dupes result -> - List.fold_left (fun result dupe -> - { - timer_name = result.timer_name; - wall = merge_time_measurement result.wall dupe.wall; - user = merge_time_measurement result.user dupe.user; - system = merge_time_measurement result.system dupe.system; - worker_user = merge_time_measurement result.worker_user dupe.worker_user; - worker_system = merge_time_measurement result.worker_system dupe.worker_system; - worker_wall_times = - merge_worker_wall_times result.worker_wall_times dupe.worker_wall_times; - processor_totals = merge_processor_totals result.processor_totals dupe.processor_totals; - flow_cpu_usage = weighted_average [ - total_cpu_time result.processor_totals, result.flow_cpu_usage; - total_cpu_time dupe.processor_totals, dupe.flow_cpu_usage; - ]; - sub_results = result.sub_results @ dupe.sub_results; - sample_count = result.sample_count + 1; - } - ) result dupes + List.fold_left + (fun result dupe -> + { + timer_name = result.timer_name; + wall = merge_time_measurement result.wall dupe.wall; + user = merge_time_measurement result.user dupe.user; + system = merge_time_measurement result.system dupe.system; + worker_user = merge_time_measurement result.worker_user dupe.worker_user; + worker_system = merge_time_measurement result.worker_system dupe.worker_system; + worker_wall_times = + merge_worker_wall_times result.worker_wall_times dupe.worker_wall_times; + processor_totals = merge_processor_totals result.processor_totals dupe.processor_totals; + flow_cpu_usage = + weighted_average + [ + (total_cpu_time result.processor_totals, result.flow_cpu_usage); + (total_cpu_time dupe.processor_totals, dupe.flow_cpu_usage); + ]; + sub_results = result.sub_results @ dupe.sub_results; + sample_count = result.sample_count + 1; + }) + result + dupes let rec json_of_result ~abridged ~max_depth ~dupes result = let { @@ -393,45 +421,55 @@ end = struct flow_cpu_usage; sub_results; sample_count; - } = merge_dupes ~dupes result in - let open Hh_json in - let cpu = [user; system; worker_user; worker_system] in - let common_fields = [ - "wall", json_of_time_measurement wall; - "cpu", json_of_time_measurement (combine_time_measurements cpu); - "flow_cpu_usage", JSON_Number (Dtoa.ecma_string_of_float flow_cpu_usage); - "processor_totals", json_of_processor_info ~abridged processor_totals; - ] in - let fields = - if abridged - then - if sample_count > 1 - then ("samples", JSON_Number (string_of_int sample_count)) :: common_fields - else common_fields - else - let sub_results = - if max_depth > 0 - then json_of_results ~abridged ~max_depth:(max_depth - 1) sub_results - else JSON_Object [] - in - common_fields @ [ - "wall", json_of_time_measurement wall; - "user", json_of_time_measurement user; - "system", json_of_time_measurement system; - "worker_user", json_of_time_measurement worker_user; - "worker_system", json_of_time_measurement worker_system; - "worker_wall_times", JSON_Object [ - "run", json_of_time_measurement worker_wall_times.worker_run; - "read", json_of_time_measurement worker_wall_times.worker_read_request; - "send", json_of_time_measurement worker_wall_times.worker_send_response; - "idle", json_of_time_measurement worker_wall_times.worker_idle; - "gc_minor", json_of_time_measurement worker_wall_times.worker_gc_minor; - "gc_major", json_of_time_measurement worker_wall_times.worker_gc_major; - ]; - "sub_results", sub_results; - "samples", JSON_Number (string_of_int sample_count); + } = + merge_dupes ~dupes result + in + Hh_json.( + let cpu = [user; system; worker_user; worker_system] in + let common_fields = + [ + ("wall", json_of_time_measurement wall); + ("cpu", json_of_time_measurement (combine_time_measurements cpu)); + ("flow_cpu_usage", JSON_Number (Dtoa.ecma_string_of_float flow_cpu_usage)); + ("processor_totals", json_of_processor_info ~abridged processor_totals); ] - in timer_name, JSON_Object fields + in + let fields = + if abridged then + if sample_count > 1 then + ("samples", JSON_Number (string_of_int sample_count)) :: common_fields + else + common_fields + else + let sub_results = + if max_depth > 0 then + json_of_results ~abridged ~max_depth:(max_depth - 1) sub_results + else + JSON_Object [] + in + common_fields + @ [ + ("wall", json_of_time_measurement wall); + ("user", json_of_time_measurement user); + ("system", json_of_time_measurement system); + ("worker_user", json_of_time_measurement worker_user); + ("worker_system", json_of_time_measurement worker_system); + ( "worker_wall_times", + JSON_Object + [ + ("run", json_of_time_measurement worker_wall_times.worker_run); + ("read", json_of_time_measurement worker_wall_times.worker_read_request); + ("send", json_of_time_measurement worker_wall_times.worker_send_response); + ("idle", json_of_time_measurement worker_wall_times.worker_idle); + ("done", json_of_time_measurement worker_wall_times.worker_done); + ("gc_minor", json_of_time_measurement worker_wall_times.worker_gc_minor); + ("gc_major", json_of_time_measurement worker_wall_times.worker_gc_major); + ] ); + ("sub_results", sub_results); + ("samples", JSON_Number (string_of_int sample_count)); + ] + in + (timer_name, JSON_Object fields)) (* This will return a JSON object which is a map from the timer name to the timer's results. This * makes it easy for tools like Scuba to query for timing.results.Parsing.wall.duration or @@ -450,19 +488,29 @@ end = struct * start time, and report how many timers there were. *) and json_of_results ~abridged ~max_depth results = - let results_rev, dupes = List.fold_left (fun (results, dupes) result -> - match SMap.get result.timer_name dupes with - | None -> - result :: results, SMap.add result.timer_name [] dupes - | Some prev_dupes -> - results, SMap.add result.timer_name (result::prev_dupes) dupes - ) ([], SMap.empty) results in - let json_results = List.fold_left (fun acc result -> - let json_result = - json_of_result ~abridged ~max_depth ~dupes:(SMap.find_unsafe result.timer_name dupes) result - in - json_result :: acc - ) [] results_rev in + let (results_rev, dupes) = + List.fold_left + (fun (results, dupes) result -> + match SMap.get result.timer_name dupes with + | None -> (result :: results, SMap.add result.timer_name [] dupes) + | Some prev_dupes -> (results, SMap.add result.timer_name (result :: prev_dupes) dupes)) + ([], SMap.empty) + results + in + let json_results = + List.fold_left + (fun acc result -> + let json_result = + json_of_result + ~abridged + ~max_depth + ~dupes:(SMap.find_unsafe result.timer_name dupes) + result + in + json_result :: acc) + [] + results_rev + in Hh_json.JSON_Object json_results (* There are two concerns here: @@ -473,25 +521,48 @@ end = struct * * So here's the plan: * - * A) When abridged is set, only output the first 2 levels of the hierarchy. That should give us - * totals and each sub timer. + * A) When abridged is set, only output the first 3 levels of the hierarchy. That should give us + * totals, each timer, and each sub timer. * B) The legacy graphs and profiling assumes two main things: * 1) A flat object with all the timers. So we need to flatten out the results * 2) The "totals" to be in a timer named "Profiling". *) let to_json ~abridged result = - let max_depth = if abridged then 1 else 100 in + let max_depth = + if abridged then + 1 + else + 100 + in json_of_results ~abridged ~max_depth [result] let to_json_legacy ~abridged result = - let results = { result with - timer_name = legacy_top_timer_name; - } :: result.sub_results in - let results = json_of_results ~abridged ~max_depth:0 results in - Hh_json.JSON_Object [ - "results", results; - ] - + (* If we have the hierarchy + * + * Foo + * Bar + * BazOne + * BazTwo + * Qux + * + * We flatten it to + * + * Profiling, Foo, Bar, Bar:BazOne, Bar:BazTwo, Qux + *) + let results_rev = + List.fold_left + (fun acc sub_result -> + let prefix = sub_result.timer_name ^ ":" in + List.fold_left + (fun acc sub_sub_result -> + { sub_sub_result with timer_name = prefix ^ sub_sub_result.timer_name } :: acc) + (sub_result :: acc) + sub_result.sub_results) + [{ result with timer_name = legacy_top_timer_name }] + result.sub_results + in + let results = json_of_results ~abridged ~max_depth:0 (List.rev results_rev) in + Hh_json.JSON_Object [("results", results)] (* Prints out a nice table of all the timers for a profiling run. It might look like this: * @@ -523,193 +594,206 @@ end = struct let print_summary_timing_table = (* Total cpu duration *) let sum_cpu result = - result.user.duration +. - result.system.duration +. - result.worker_user.duration +. - result.worker_system.duration + result.user.duration + +. result.system.duration + +. result.worker_user.duration + +. result.worker_system.duration in - (* Total cpu start age *) let sum_cpu_start_age result = - result.user.start_age +. - result.system.start_age +. - result.worker_user.start_age +. - result.worker_system.start_age + result.user.start_age + +. result.system.start_age + +. result.worker_user.start_age + +. result.worker_system.start_age in - (* Prints a single row of the table. All but the last column have a fixed width. *) let print_summary_single_raw - key (result_wall, result_cpu, (run, read, send, idle, gc_minor, gc_major)) total = - let run = run -. gc_minor -. gc_major in (* run time includes gc time *) - let worker_total = idle +. read +. run +. send +. gc_minor +. gc_major in - let worker_total = if worker_total = 0.0 then 1.0 else worker_total in + key (result_wall, result_cpu, (run, read, send, idle, done_, gc_minor, gc_major)) total = + let run = run -. gc_minor -. gc_major in + (* run time includes gc time *) + let worker_total = idle +. done_ +. read +. run +. send +. gc_minor +. gc_major in + let worker_total = + if worker_total = 0.0 then + 1.0 + else + worker_total + in let worker_idle_pct = idle /. worker_total *. 100. in let worker_read_pct = read /. worker_total *. 100. in let worker_run_pct = run /. worker_total *. 100. in let worker_send_pct = send /. worker_total *. 100. in + let worker_done_pct = done_ /. worker_total *. 100. in let worker_gc_minor_pct = gc_minor /. worker_total *. 100. in let worker_gc_major_pct = gc_major /. worker_total *. 100. in - Printf.eprintf - "%7.3f (%5.1f%%) %9.3f (%5.1f%%) %3d%% %3d%% %3d%% %3d%% %3d%% %3d%% %s\n%!" + "%7.3f (%5.1f%%) %9.3f (%5.1f%%) %3d%% %3d%% %3d%% %3d%% %3d%% %3d%% %3d%% %s\n%!" result_wall (100.0 *. result_wall /. total.wall.duration) result_cpu - (100.0 *. result_cpu /. (sum_cpu total)) + (100.0 *. result_cpu /. sum_cpu total) (worker_run_pct |> int_of_float) (worker_read_pct |> int_of_float) (worker_send_pct |> int_of_float) (worker_idle_pct |> int_of_float) + (worker_done_pct |> int_of_float) (worker_gc_minor_pct |> int_of_float) (worker_gc_major_pct |> int_of_float) key in - let print_summary_single key result total = - let worker_wall_times = ( - result.worker_wall_times.worker_run.duration, - result.worker_wall_times.worker_read_request.duration, - result.worker_wall_times.worker_send_response.duration, - result.worker_wall_times.worker_idle.duration, - result.worker_wall_times.worker_gc_minor.duration, - result.worker_wall_times.worker_gc_major.duration - ) in + let worker_wall_times = + ( result.worker_wall_times.worker_run.duration, + result.worker_wall_times.worker_read_request.duration, + result.worker_wall_times.worker_send_response.duration, + result.worker_wall_times.worker_idle.duration, + result.worker_wall_times.worker_done.duration, + result.worker_wall_times.worker_gc_minor.duration, + result.worker_wall_times.worker_gc_major.duration ) + in print_summary_single_raw key (result.wall.duration, sum_cpu result, worker_wall_times) total in - (* If there's more than 1% of wall time since the last end and the next start_age, then print an * row *) let print_unknown ~indent last_end (wall_start_age, cpu_start_age, worker_wall_start) total = - let (run_start, read_start, send_start, idle_start, gc_minor_start, gc_major_start) = + let ( run_start, + read_start, + send_start, + idle_start, + done_start, + gc_minor_start, + gc_major_start ) = worker_wall_start in - let (wall_end, cpu_end, (run_end, read_end, send_end, idle_end, gc_minor_end, gc_major_end)) = + let ( wall_end, + cpu_end, + (run_end, read_end, send_end, idle_end, done_end, gc_minor_end, gc_major_end) ) = last_end in let unknown_wall = wall_start_age -. wall_end in - if unknown_wall /. total.wall.duration > 0.01 - then + if unknown_wall /. total.wall.duration > 0.01 then let unknown_cpu = cpu_start_age -. cpu_end in - let unknown_worker = ( - run_start -. run_end, - read_start -. read_end, - send_start -. send_end, - idle_start -. idle_end, - gc_minor_start -. gc_minor_end, - gc_major_start -. gc_major_end - ) in + let unknown_worker = + ( run_start -. run_end, + read_start -. read_end, + send_start -. send_end, + idle_start -. idle_end, + done_start -. done_end, + gc_minor_start -. gc_minor_end, + gc_major_start -. gc_major_end ) + in print_summary_single_raw - (indent ^ "") (unknown_wall, unknown_cpu, unknown_worker) total + (indent ^ "") + (unknown_wall, unknown_cpu, unknown_worker) + total in - let worker_wall_times_to_tuples worker_wall_times = let { - worker_run = { start_age = run_start; duration = run_duration; }; - worker_read_request = { start_age = read_start; duration = read_duration; }; - worker_send_response = { start_age = send_start; duration = send_duration; }; - worker_idle = { start_age = idle_start; duration = idle_duration; }; - worker_gc_minor = { start_age = gc_minor_start; duration = gc_minor_duration; }; - worker_gc_major = { start_age = gc_major_start; duration = gc_major_duration; }; - } = worker_wall_times in + worker_run = { start_age = run_start; duration = run_duration }; + worker_read_request = { start_age = read_start; duration = read_duration }; + worker_send_response = { start_age = send_start; duration = send_duration }; + worker_idle = { start_age = idle_start; duration = idle_duration }; + worker_done = { start_age = done_start; duration = done_duration }; + worker_gc_minor = { start_age = gc_minor_start; duration = gc_minor_duration }; + worker_gc_major = { start_age = gc_major_start; duration = gc_major_duration }; + } = + worker_wall_times + in let worker_last = - (run_start, read_start, send_start, idle_start, gc_minor_start, gc_major_start) + (run_start, read_start, send_start, idle_start, done_start, gc_minor_start, gc_major_start) in - let worker_remaining = ( - run_duration, + let worker_remaining = + ( run_duration, read_duration, send_duration, idle_duration, + done_duration, gc_minor_duration, - gc_major_duration - ) in - let worker_end = ( - run_start +. run_duration, - read_start +. read_duration, - send_start +. send_duration, - idle_start +. idle_duration, - gc_minor_start +. gc_minor_duration, - gc_major_start +. gc_major_duration - ) in - worker_last, worker_remaining, worker_end + gc_major_duration ) + in + let worker_end = + ( run_start +. run_duration, + read_start +. read_duration, + send_start +. send_duration, + idle_start +. idle_duration, + done_start +. done_duration, + gc_minor_start +. gc_minor_duration, + gc_major_start +. gc_major_duration ) + in + (worker_last, worker_remaining, worker_end) in - let rec print_result_rows - ~indent ~total (last_end, (wall_remaining, cpu_remaining, worker_remaining)) result = - - let result_worker_starts, result_worker_durations, result_worker_end = + ~indent ~total (last_end, (wall_remaining, cpu_remaining, worker_remaining)) result = + let (result_worker_starts, result_worker_durations, result_worker_end) = worker_wall_times_to_tuples result.worker_wall_times in (* Print an row if needed *) print_unknown - ~indent last_end - (result.wall.start_age, sum_cpu_start_age result, result_worker_starts) total; + ~indent + last_end + (result.wall.start_age, sum_cpu_start_age result, result_worker_starts) + total; (* Print this row *) print_summary_single (indent ^ result.timer_name) result total; - if result.sub_results <> [] - then begin + if result.sub_results <> [] then ( let new_indent = indent ^ " " in - - let last_end, remaining = List.fold_left - (print_result_rows ~indent:new_indent ~total) - ( - (result.wall.start_age, sum_cpu_start_age result, result_worker_starts), - (result.wall.duration, sum_cpu result, result_worker_durations) - ) - result.sub_results + let (last_end, remaining) = + List.fold_left + (print_result_rows ~indent:new_indent ~total) + ( (result.wall.start_age, sum_cpu_start_age result, result_worker_starts), + (result.wall.duration, sum_cpu result, result_worker_durations) ) + result.sub_results in - (* Print an row if there's too much time between the last section and the end of * the profiling *) print_unknown ~indent:new_indent last_end - ( - result.wall.start_age +. result.wall.duration, + ( result.wall.start_age +. result.wall.duration, sum_cpu_start_age result +. sum_cpu result, - result_worker_end - ) + result_worker_end ) total; (* Print the unknown totals *) - print_summary_single_raw (new_indent ^ "") remaining total; - end; + print_summary_single_raw (new_indent ^ "") remaining total + ); let last_end = - result.wall.start_age +. result.wall.duration, - (sum_cpu_start_age result) +. (sum_cpu result), - result_worker_end + ( result.wall.start_age +. result.wall.duration, + sum_cpu_start_age result +. sum_cpu result, + result_worker_end ) in let remaining = let wall_remaining = wall_remaining -. result.wall.duration in - let cpu_remaining = cpu_remaining -. (sum_cpu result) in + let cpu_remaining = cpu_remaining -. sum_cpu result in let worker_remaining = - let (run, read, send, idle, gc_minor, gc_major) = worker_remaining in - ( - run -. result.worker_wall_times.worker_run.duration, + let (run, read, send, idle, done_, gc_minor, gc_major) = worker_remaining in + ( run -. result.worker_wall_times.worker_run.duration, read -. result.worker_wall_times.worker_read_request.duration, send -. result.worker_wall_times.worker_send_response.duration, idle -. result.worker_wall_times.worker_idle.duration, + done_ -. result.worker_wall_times.worker_done.duration, gc_minor -. result.worker_wall_times.worker_gc_minor.duration, - gc_major -. result.worker_wall_times.worker_gc_major.duration - ) - in wall_remaining, cpu_remaining, worker_remaining + gc_major -. result.worker_wall_times.worker_gc_major.duration ) + in + (wall_remaining, cpu_remaining, worker_remaining) in - last_end, remaining + (last_end, remaining) in - - fun total -> (* Print the header *) let label = Printf.sprintf "%s Timings" total.timer_name in let header = - " WALL TIME CPU TIME RUN/READ/SEND/IDLE/GC m/GC M SECTION" + " WALL TIME CPU TIME RUN/READ/SEND/IDLE/DONE/GC m/GC M SECTION" in let header_len = String.length header + 8 in - let whitespace_len = header_len - (String.length label) in - Printf.eprintf "%s%s%s\n%!" - (String.make ((whitespace_len+1)/2) '=') label (String.make ((whitespace_len)/2) '='); + let whitespace_len = header_len - String.length label in + Printf.eprintf + "%s%s%s\n%!" + (String.make ((whitespace_len + 1) / 2) '=') + label + (String.make (whitespace_len / 2) '='); Printf.eprintf "%s\n%!" header; Printf.eprintf "%s\n%!" (String.make header_len '-'); @@ -717,53 +801,56 @@ end = struct print_summary_single "" total total; let indent = " " in - - let worker_last, worker_remaining, worker_end = + let (worker_last, worker_remaining, worker_end) = worker_wall_times_to_tuples total.worker_wall_times in - - let last_end = total.wall.start_age, sum_cpu_start_age total, worker_last in - let remaining = total.wall.duration, sum_cpu total, worker_remaining in - + let last_end = (total.wall.start_age, sum_cpu_start_age total, worker_last) in + let remaining = (total.wall.duration, sum_cpu total, worker_remaining) in (* Print the various sections and the unknown durations *) - let last_end, remaining = List.fold_left - (print_result_rows ~indent ~total) - (last_end, remaining) - total.sub_results + let (last_end, remaining) = + List.fold_left (print_result_rows ~indent ~total) (last_end, remaining) total.sub_results in - (* Print an row if there's too much time between the last section and the end of the * profiling *) let () = - let start = ( - total.wall.start_age +. total.wall.duration, - sum_cpu_start_age total +. sum_cpu total, - worker_end - ) in - print_unknown - ~indent - last_end - start - total + let start = + ( total.wall.start_age +. total.wall.duration, + sum_cpu_start_age total +. sum_cpu total, + worker_end ) + in + print_unknown ~indent last_end start total in - (* Print the unknown totals *) print_summary_single_raw "" remaining total - let merge ~from ~into = - !into.sub_results_rev <- from :: !into.sub_results_rev + let merge ~from ~into = !into.sub_results_rev <- from :: !into.sub_results_rev end - -module Memory: sig +module Memory : sig type running + type finished - val with_memory_lwt: label:string -> f:(running -> 'a Lwt.t) -> (finished * 'a) Lwt.t - val legacy_sample_memory: metric:string -> value:float -> running -> unit - val sample_memory: metric:string -> value:float -> running -> unit - val to_json: abridged:bool -> finished -> Hh_json.json - val print_summary_memory_table: finished -> unit - val merge: from:finished -> into:running -> unit + + val with_memory_lwt : label:string -> f:(running -> 'a Lwt.t) -> (finished * 'a) Lwt.t + + val legacy_sample_memory : metric:string -> value:float -> running -> unit + + val sample_memory : group:string -> metric:string -> value:float -> running -> unit + + val add_memory : + group:string -> + metric:string -> + start:float -> + delta:float -> + hwm_delta:float -> + running -> + unit + + val to_json : abridged:bool -> finished -> Hh_json.json + + val print_summary_memory_table : finished -> unit + + val merge : from:finished -> into:running -> unit end = struct type memory_result = { start: float; @@ -773,106 +860,160 @@ end = struct } and running' = { - running_results: memory_result SMap.t; + running_groups_rev: string list; + running_results: memory_result SMap.t SMap.t; running_sub_results_rev: finished list; } + and running = running' ref + and finished = { finished_label: string; - finished_results: memory_result SMap.t; + finished_groups: string list; + finished_results: memory_result SMap.t SMap.t; finished_sub_results: finished list; } + let legacy_group = "LEGACY" + let with_memory_lwt ~label ~f = - let running_memory = ref { - running_results = SMap.empty; - running_sub_results_rev = []; - } in + let running_memory = + ref { running_groups_rev = []; running_results = SMap.empty; running_sub_results_rev = [] } + in let%lwt ret = f running_memory in - let finished_memory = { - finished_label = label; - finished_results = !running_memory.running_results; - finished_sub_results = List.rev (!running_memory.running_sub_results_rev); - } in + let finished_memory = + { + finished_label = label; + finished_groups = List.rev !running_memory.running_groups_rev; + finished_results = !running_memory.running_results; + finished_sub_results = List.rev !running_memory.running_sub_results_rev; + } + in Lwt.return (finished_memory, ret) + let get_group_map ~group running_memory = + match SMap.get group !running_memory.running_results with + | None -> + running_memory := + { + !running_memory with + running_groups_rev = group :: !running_memory.running_groups_rev; + running_results = SMap.add group SMap.empty !running_memory.running_results; + }; + SMap.empty + | Some group -> group + + let get_metric ~group ~metric running_memory = + get_group_map ~group running_memory |> SMap.get metric + + let set_metric ~group ~metric entry running_memory = + let group_map = get_group_map ~group running_memory |> SMap.add metric entry in + running_memory := + { + !running_memory with + running_results = SMap.add group group_map !running_memory.running_results; + } + let legacy_sample_memory ~metric ~value running_memory = - let legacy_metric = { - start = 0.0; - delta = value; - high_water_mark_delta = value; - is_legacy = true; - } in - running_memory := { !running_memory with - running_results = SMap.add metric legacy_metric (!running_memory.running_results); - } + let legacy_metric = + { start = 0.0; delta = value; high_water_mark_delta = value; is_legacy = true } + in + set_metric ~group:legacy_group ~metric legacy_metric running_memory - let start_sampling ~metric ~value running_memory = - let new_metric = { - start = value; - delta = 0.0; - high_water_mark_delta = 0.0; - is_legacy = false; - } in - running_memory := { !running_memory with - running_results = SMap.add metric new_metric (!running_memory.running_results); - } - let sample_memory ~metric ~value running_memory = - match SMap.get metric (!running_memory.running_results) with - | None -> start_sampling ~metric ~value running_memory + let start_sampling ~group ~metric ~value running_memory = + let new_metric = + { start = value; delta = 0.0; high_water_mark_delta = 0.0; is_legacy = false } + in + set_metric ~group ~metric new_metric running_memory + + let sample_memory ~group ~metric ~value running_memory = + match get_metric ~group ~metric running_memory with + | None -> start_sampling ~group ~metric ~value running_memory | Some old_metric -> - let new_metric = { old_metric with - delta = value -. old_metric.start; - high_water_mark_delta = max (value -. old_metric.start) old_metric.high_water_mark_delta; - } in - running_memory := { !running_memory with - running_results = SMap.add metric new_metric (!running_memory.running_results) - } + let new_metric = + { + old_metric with + delta = value -. old_metric.start; + high_water_mark_delta = max (value -. old_metric.start) old_metric.high_water_mark_delta; + } + in + set_metric ~group ~metric new_metric running_memory + + let add_memory ~group ~metric ~start ~delta ~hwm_delta running_memory = + let new_metric = { start; delta; high_water_mark_delta = hwm_delta; is_legacy = false } in + set_metric ~group ~metric new_metric running_memory let rec to_json ~abridged finished_memory = - let open Hh_json in - let object_props = finished_memory.finished_results - |> SMap.map (fun v -> - if v.is_legacy - then JSON_Number (Dtoa.ecma_string_of_float v.delta) - else JSON_Object [ - ("start", JSON_Number (Dtoa.ecma_string_of_float v.start)); - ("delta", JSON_Number (Dtoa.ecma_string_of_float v.delta)); - ("hwm_delta", JSON_Number (Dtoa.ecma_string_of_float v.high_water_mark_delta)); - ] - ) - |> SMap.elements in - let object_props = - if abridged - then object_props - else - let sub_results = JSON_Object ( List.map - (fun result -> result.finished_label, to_json ~abridged:false result) - finished_memory.finished_sub_results - ) in - ("sub_results", sub_results) :: object_props - in - JSON_Object object_props + Hh_json.( + let object_props = + SMap.fold + (fun group_name group props -> + if group_name = legacy_group then + SMap.fold + (fun k v props -> (k, JSON_Number (Dtoa.ecma_string_of_float v.delta)) :: props) + group + props + else + let group_json = + SMap.fold + (fun k v props -> + ( k, + JSON_Object + [ + ("start", JSON_Number (Dtoa.ecma_string_of_float v.start)); + ("delta", JSON_Number (Dtoa.ecma_string_of_float v.delta)); + ( "hwm_delta", + JSON_Number (Dtoa.ecma_string_of_float v.high_water_mark_delta) ); + ] ) + :: props) + group + [] + in + (group_name, JSON_Object group_json) :: props) + finished_memory.finished_results + [] + in + let object_props = + if abridged then + object_props + else + let sub_results = + JSON_Object + (List.map + (fun result -> (result.finished_label, to_json ~abridged:false result)) + finished_memory.finished_sub_results) + in + ("sub_results", sub_results) :: object_props + in + JSON_Object object_props) let print_summary_memory_table = let pretty_num f = let abs_f = abs_float f in - if abs_f > 1000000000.0 - then Printf.sprintf "%+7.2fG" (f /. 1000000000.0) - else if abs_f > 1000000.0 - then Printf.sprintf "%+7.2fM" (f /. 1000000.0) - else if abs_f > 1000.0 - then Printf.sprintf "%+7.2fK" (f /. 1000.0) - else Printf.sprintf "%+7.2f " f + if abs_f > 1000000000.0 then + Printf.sprintf "%+7.2fG" (f /. 1000000000.0) + else if abs_f > 1000000.0 then + Printf.sprintf "%+7.2fM" (f /. 1000000.0) + else if abs_f > 1000.0 then + Printf.sprintf "%+7.2fK" (f /. 1000.0) + else + Printf.sprintf "%+7.2f " f in - let pretty_pct num denom = - if denom = 0.0 then "(--N/A--)" else Printf.sprintf "(%+5.1f%%)" (100.0 *. num /. denom) + if denom = 0.0 then + "(--N/A--)" + else + let fraction = num /. denom in + if fraction >= 10.0 (* e.g "( +20.4x)" fits the space whereas (+2040.0%) doesn't *) then + Printf.sprintf "(%+6.1fx)" fraction + else + Printf.sprintf "(%+6.1f%%)" (fraction *. 100.0) in - (* Prints a single row of the table. All but the last column have a fixed width. *) let print_summary_single ~indent key result = - Printf.eprintf "%s %s %s %s %s %s%s\n%!" + let indent = String.make indent ' ' in + Printf.eprintf + "%s %s %s %s %s %s%s\n%!" (pretty_num result.start) (pretty_num result.delta) (pretty_pct result.delta result.start) @@ -881,41 +1022,53 @@ end = struct indent key in - - let header_without_section = " START DELTA HWM DELTA " in + let header_without_section = + " START DELTA HWM DELTA " + in let pre_section_whitespace = String.make (String.length header_without_section) ' ' in - + let print_group ~indent finished_results group_name = + Option.iter (SMap.get group_name finished_results) ~f:(fun group -> + let indent_str = String.make (String.length header_without_section + indent - 2) ' ' in + Printf.eprintf "%s== %s ==\n%!" indent_str group_name; + SMap.iter (print_summary_single ~indent:(indent + 2)) group) + in let print_header label = let label = Printf.sprintf "%s Memory Stats" label in - let header = header_without_section ^ " SECTION" in + let header = header_without_section ^ "SECTION" in let header_len = String.length header + 8 in - let whitespace_len = header_len - (String.length label) in - Printf.eprintf "%s%s%s\n%!" - (String.make ((whitespace_len+1)/2) '=') label (String.make ((whitespace_len)/2) '='); + let whitespace_len = header_len - String.length label in + Printf.eprintf + "%s%s%s\n%!" + (String.make ((whitespace_len + 1) / 2) '=') + label + (String.make (whitespace_len / 2) '='); Printf.eprintf "%s\n%!" header; Printf.eprintf "%s\n%!" (String.make header_len '-') in - let rec print_finished ~indent results = - SMap.iter (print_summary_single ~indent) results.finished_results; - let new_indent = indent ^ " " in - List.iter (fun sub_result -> - Printf.eprintf "%s%s%s\n%!" pre_section_whitespace indent sub_result.finished_label; - print_finished ~indent:new_indent sub_result - ) results.finished_sub_results + if (not (SMap.is_empty results.finished_results)) || results.finished_sub_results <> [] then ( + let header_indent = String.make indent '=' in + Printf.eprintf + "%s%s %s %s\n%!" + pre_section_whitespace + header_indent + results.finished_label + header_indent; + let indent = indent + 2 in + List.iter (print_group ~indent results.finished_results) results.finished_groups; + List.iter + (fun sub_result -> print_finished ~indent sub_result) + results.finished_sub_results + ) in - fun memory -> - if SMap.cardinal memory.finished_results > 0 || memory.finished_sub_results <> [] - then begin + if SMap.cardinal memory.finished_results > 0 || memory.finished_sub_results <> [] then ( print_header memory.finished_label; - print_finished ~indent:"" memory - end + print_finished ~indent:2 memory + ) let merge ~from ~into = - into := { !into with - running_sub_results_rev = from :: !into.running_sub_results_rev; - } + into := { !into with running_sub_results_rev = from :: !into.running_sub_results_rev } end type running = { @@ -929,27 +1082,23 @@ type finished = { } let print_summary profile = - Printf.eprintf "\n%!"; - Timing.print_summary_timing_table profile.finished_timing; Printf.eprintf "\n%!"; Memory.print_summary_memory_table profile.finished_memory; + Printf.eprintf "\n%!"; + Timing.print_summary_timing_table profile.finished_timing; Printf.eprintf "\n%!" let with_profiling_lwt ~label ~should_print_summary f = - let%lwt finished_timing, (finished_memory, ret) = + let%lwt (finished_timing, (finished_memory, ret)) = Timing.with_timing_lwt ~label ~f:(fun running_timing -> - Memory.with_memory_lwt ~label ~f:(fun running_memory -> - let profile = { - running_timing; - running_memory; - } in - (* We don't really need to wrap this in a finalize, because if this throws no one will ever - * read the profiling info, so there's really nothing we need to do in the exceptional case - *) - f profile - ) - ) in - let finished_profile = { finished_timing; finished_memory; } in + Memory.with_memory_lwt ~label ~f:(fun running_memory -> + let profile = { running_timing; running_memory } in + (* We don't really need to wrap this in a finalize, because if this throws no one will ever + * read the profiling info, so there's really nothing we need to do in the exceptional case + *) + f profile)) + in + let finished_profile = { finished_timing; finished_memory } in if should_print_summary then print_summary finished_profile; Lwt.return (finished_profile, ret) @@ -961,19 +1110,34 @@ let with_timer_lwt ?should_print ~timer ~f profile = let legacy_sample_memory ~metric ~value profile = Memory.legacy_sample_memory ~metric ~value profile.running_memory -let sample_memory ~metric ~value profile = - Memory.sample_memory ~metric ~value profile.running_memory +let total_memory_group = "TOTAL" + +let sample_memory ?group ~metric ~value profile = + Memory.sample_memory ~group:total_memory_group ~metric ~value profile.running_memory; + Option.iter group ~f:(fun group -> + Memory.sample_memory ~group ~metric ~value profile.running_memory) + +let add_memory ?group ~metric ~start ~delta ~hwm_delta profile = + Memory.add_memory + ~group:total_memory_group + ~metric + ~start + ~delta + ~hwm_delta + profile.running_memory; + Option.iter group ~f:(fun group -> + Memory.add_memory ~group ~metric ~start ~delta ~hwm_delta profile.running_memory) let to_json_properties profile = [ - "timing", Timing.to_json ~abridged:false profile.finished_timing; - "memory", Memory.to_json ~abridged:false profile.finished_memory; + ("timing", Timing.to_json ~abridged:false profile.finished_timing); + ("memory", Memory.to_json ~abridged:false profile.finished_memory); ] let to_legacy_json_properties profile = [ - "timing", Timing.to_json_legacy ~abridged:false profile.finished_timing; - "memory", Memory.to_json ~abridged:false profile.finished_memory; + ("timing", Timing.to_json_legacy ~abridged:false profile.finished_timing); + ("memory", Memory.to_json ~abridged:false profile.finished_memory); ] let get_timing_json_string profile = diff --git a/src/common/profiling/profiling_js.mli b/src/common/profiling/profiling_js.mli index 8f16e134ae2..d8e9df622b3 100644 --- a/src/common/profiling/profiling_js.mli +++ b/src/common/profiling/profiling_js.mli @@ -1,39 +1,49 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type running + type finished -val with_profiling_lwt: - label:string -> - should_print_summary:bool -> - (running -> 'a Lwt.t) -> - (finished * 'a) Lwt.t -val get_profiling_duration: finished -> float +val with_profiling_lwt : + label:string -> should_print_summary:bool -> (running -> 'a Lwt.t) -> (finished * 'a) Lwt.t + +val get_profiling_duration : finished -> float + +val merge : from:finished -> into:running -> unit + +val with_timer_lwt : + ?should_print:bool -> timer:string -> f:(unit -> 'a Lwt.t) -> running -> 'a Lwt.t + +val legacy_sample_memory : metric:string -> value:float -> running -> unit -val merge: from:finished -> into:running -> unit +val sample_memory : ?group:string -> metric:string -> value:float -> running -> unit -val with_timer_lwt: - ?should_print:bool -> - timer:string -> - f:(unit -> 'a Lwt.t) -> +val add_memory : + ?group:string -> + metric:string -> + start:float -> + delta:float -> + hwm_delta:float -> running -> - 'a Lwt.t + unit + +val get_timing_json_string : finished -> string + +val get_abridged_timing_json_string : finished -> string + +val get_abridged_legacy_timing_json_string : finished -> string + +val get_memory_json_string : finished -> string -val legacy_sample_memory: metric:string -> value:float -> running -> unit -val sample_memory: metric:string -> value:float -> running -> unit +val get_abridged_memory_json_string : finished -> string -val get_timing_json_string: finished -> string -val get_abridged_timing_json_string: finished -> string -val get_abridged_legacy_timing_json_string: finished -> string -val get_memory_json_string: finished -> string -val get_abridged_memory_json_string: finished -> string +val to_json_properties : finished -> (string * Hh_json.json) list -val to_json_properties: finished -> (string * Hh_json.json) list -val to_legacy_json_properties: finished -> (string * Hh_json.json) list +val to_legacy_json_properties : finished -> (string * Hh_json.json) list -val print_summary: finished -> unit +val print_summary : finished -> unit diff --git a/src/common/reason.ml b/src/common/reason.ml index c92c16a42f4..51887731b17 100644 --- a/src/common/reason.ml +++ b/src/common/reason.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -45,7 +45,7 @@ module TestID = struct let _current = ref None (* Get current test id. *) - let current() = !_current + let current () = !_current (* Call f on a, installing new_test_id as the current test_id, and restoring the current test_id when done. (See also the function mk_reason below.) *) @@ -55,16 +55,30 @@ module TestID = struct let b = f a in _current := test_id; b - end -type reason_desc = - | RNumber | RString | RBoolean | RMixed | REmpty | RAny | RVoid | RNull +type 'loc virtual_reason_desc = + | RTrusted of 'loc virtual_reason_desc + | RPrivate of 'loc virtual_reason_desc + | RAnyExplicit + | RAnyImplicit + | RNumber + | RBigInt + | RString + | RBoolean + | RMixed + | REmpty + | RVoid + | RNull + | RSymbol + | RExports | RNullOrVoid + | RLongStringLit of int (* Max length *) | RStringLit of string | RNumberLit of string + | RBigIntLit of string | RBooleanLit of bool - | RMatchingProp of string * reason_desc + | RMatchingProp of string * 'loc virtual_reason_desc | RObject | RObjectLit | RObjectType @@ -81,7 +95,7 @@ type reason_desc = | RFunction of reason_desc_function | RFunctionType | RFunctionBody - | RFunctionCall of reason_desc + | RFunctionCall of 'loc virtual_reason_desc | RFunctionCallType | RFunctionUnusedArgument | RJSXFunctionCall of string @@ -90,18 +104,18 @@ type reason_desc = | RJSXElement of string option | RJSXText | RFbt - | RUnaryOperator of string * reason_desc - | RBinaryOperator of string * reason_desc * reason_desc - | RLogical of string * reason_desc * reason_desc - | RAnyObject - | RAnyFunction + | RUnaryOperator of string * 'loc virtual_reason_desc + | RBinaryOperator of string * 'loc virtual_reason_desc * 'loc virtual_reason_desc + | RLogical of string * 'loc virtual_reason_desc * 'loc virtual_reason_desc | RTemplateString | RUnknownString + | RUnionEnum | REnum | RGetterSetterProperty | RThis | RThisType | RExistential + | RImplicitInstantiation | RTooFewArgs | RTooFewArgsExpectedRest | RConstructorReturn @@ -120,9 +134,9 @@ type reason_desc = | RDefaultValue | RConstructor | RDefaultConstructor - | RConstructorCall of reason_desc + | RConstructorCall of 'loc virtual_reason_desc | RReturn - | RImplicitReturn of reason_desc + | RImplicitReturn of 'loc virtual_reason_desc | RRegExp | RSuper | RNoSuper @@ -132,10 +146,11 @@ type reason_desc = | RObjectMap | RObjectMapi | RType of string - | RTypeAlias of string * bool (* trust in normalization *) * reason_desc + | RTypeAlias of string * bool (* trust in normalization *) * 'loc virtual_reason_desc | ROpaqueType of string - | RTypeParam of string * (reason_desc * Loc.t) (*reason op *) - * (reason_desc * Loc.t) (* reason tapp *) + | RTypeParam of + string * ('loc virtual_reason_desc * 'loc) * (*reason op *) + ('loc virtual_reason_desc * 'loc) (* reason tapp *) | RTypeof of string | RMethod of string option | RMethodCall of string option @@ -147,14 +162,18 @@ type reason_desc = | RProperty of string option | RPrivateProperty of string | RShadowProperty of string - | RPropertyOf of string * reason_desc + | RMember of { + object_: string; + property: string; + } + | RPropertyOf of string * 'loc virtual_reason_desc | RPropertyIsAString of string | RMissingProperty of string option | RUnknownProperty of string option | RUndefinedProperty of string | RSomeProperty - | RNameProperty of reason_desc - | RMissingAbstract of reason_desc + | RNameProperty of 'loc virtual_reason_desc + | RMissingAbstract of 'loc virtual_reason_desc | RFieldInitializer of string | RUntypedModule of string | RNamedImportedType of string (* module *) * string (* local name *) @@ -164,34 +183,34 @@ type reason_desc = | RDefaultImportedType of string * string | RCode of string | RCustom of string - | RPolyType of reason_desc - | RPolyTest of string * reason_desc - | RExactType of reason_desc - | ROptional of reason_desc - | RMaybe of reason_desc - | RRestArray of reason_desc - | RAbstract of reason_desc - | RTypeApp of reason_desc - | RThisTypeApp of reason_desc - | RExtends of reason_desc - | RClass of reason_desc - | RStatics of reason_desc - | RSuperOf of reason_desc - | RFrozen of reason_desc - | RBound of reason_desc - | RVarianceCheck of reason_desc - | RPredicateOf of reason_desc - | RPredicateCall of reason_desc - | RPredicateCallNeg of reason_desc - | RRefined of reason_desc + | RPolyType of 'loc virtual_reason_desc + | RPolyTest of string * 'loc virtual_reason_desc + | RExactType of 'loc virtual_reason_desc + | ROptional of 'loc virtual_reason_desc + | RMaybe of 'loc virtual_reason_desc + | RRestArray of 'loc virtual_reason_desc + | RAbstract of 'loc virtual_reason_desc + | RTypeApp of 'loc virtual_reason_desc + | RTypeAppImplicit of 'loc virtual_reason_desc + | RThisTypeApp of 'loc virtual_reason_desc + | RExtends of 'loc virtual_reason_desc + | RClass of 'loc virtual_reason_desc + | RStatics of 'loc virtual_reason_desc + | RSuperOf of 'loc virtual_reason_desc + | RFrozen of 'loc virtual_reason_desc + | RBound of 'loc virtual_reason_desc + | RVarianceCheck of 'loc virtual_reason_desc + | RPredicateOf of 'loc virtual_reason_desc + | RPredicateCall of 'loc virtual_reason_desc + | RPredicateCallNeg of 'loc virtual_reason_desc + | RRefined of 'loc virtual_reason_desc | RIncompatibleInstantiation of string - | RSpreadOf of reason_desc + | RSpreadOf of 'loc virtual_reason_desc | RObjectPatternRestProp | RArrayPatternRestProp | RCommonJSExports of string | RModule of string | ROptionalChain - | RReactProps | RReactElement of string option | RReactClass @@ -201,218 +220,312 @@ type reason_desc = | RReactState | RReactPropTypes | RReactChildren - | RReactChildrenOrType of reason_desc - | RReactChildrenOrUndefinedOrType of reason_desc + | RReactChildrenOrType of 'loc virtual_reason_desc + | RReactChildrenOrUndefinedOrType of 'loc virtual_reason_desc | RReactSFC + | RReactConfig +[@@deriving eq] and reason_desc_function = | RAsync | RGenerator | RAsyncGenerator | RNormal - -type reason = { + | RUnknown + +type reason_desc = ALoc.t virtual_reason_desc + +let rec map_desc_locs f = function + | ( RAnyExplicit | RAnyImplicit | RNumber | RBigInt | RString | RBoolean | RMixed | REmpty + | RVoid | RNull | RSymbol | RExports | RNullOrVoid | RLongStringLit _ | RStringLit _ + | RNumberLit _ | RBigIntLit _ | RBooleanLit _ | RObject | RObjectLit | RObjectType + | RObjectClassName | RInterfaceType | RArray | RArrayLit | REmptyArrayLit | RArrayType + | RROArrayType | RTupleType | RTupleElement | RTupleOutOfBoundsAccess | RFunction _ + | RFunctionType | RFunctionBody | RFunctionCallType | RFunctionUnusedArgument + | RJSXFunctionCall _ | RJSXIdentifier _ | RJSXElementProps _ | RJSXElement _ | RJSXText | RFbt + ) as r -> + r + | RFunctionCall desc -> RFunctionCall (map_desc_locs f desc) + | RUnaryOperator (s, desc) -> RUnaryOperator (s, map_desc_locs f desc) + | RBinaryOperator (s, d1, d2) -> RBinaryOperator (s, map_desc_locs f d1, map_desc_locs f d2) + | RLogical (s, d1, d2) -> RLogical (s, map_desc_locs f d1, map_desc_locs f d2) + | ( RTemplateString | RUnknownString | RUnionEnum | REnum | RGetterSetterProperty | RThis + | RThisType | RExistential | RImplicitInstantiation | RTooFewArgs | RTooFewArgsExpectedRest + | RConstructorReturn | RNewObject | RUnion | RUnionType | RIntersection | RIntersectionType + | RKeySet | RAnd | RConditional | RPrototype | RObjectPrototype | RFunctionPrototype + | RDestructuring | RDefaultValue | RConstructor | RReturn | RDefaultConstructor | RRegExp + | RSuper | RNoSuper | RDummyPrototype | RDummyThis | RTupleMap | RObjectMap | RType _ + | RTypeof _ | RMethod _ | RMethodCall _ | RParameter _ | RRestParameter _ | RIdentifier _ + | RIdentifierAssignment _ | RPropertyAssignment _ | RProperty _ | RPrivateProperty _ + | RShadowProperty _ | RMember _ | RPropertyIsAString _ | RMissingProperty _ + | RUnknownProperty _ | RUndefinedProperty _ | RSomeProperty | RFieldInitializer _ + | RUntypedModule _ | RNamedImportedType _ | RImportStarType _ | RImportStarTypeOf _ + | RImportStar _ | RDefaultImportedType _ | RCode _ | RCustom _ | RIncompatibleInstantiation _ + | ROpaqueType _ | RObjectMapi ) as r -> + r + | RConstructorCall desc -> RConstructorCall (map_desc_locs f desc) + | RImplicitReturn desc -> RImplicitReturn (map_desc_locs f desc) + | RTypeAlias (s, b, d) -> RTypeAlias (s, b, map_desc_locs f d) + | RTypeParam (s, (d1, l1), (d2, l2)) -> + RTypeParam (s, (map_desc_locs f d1, f l1), (map_desc_locs f d2, f l2)) + | RPropertyOf (s, d) -> RPropertyOf (s, map_desc_locs f d) + | RNameProperty desc -> RNameProperty (map_desc_locs f desc) + | RMissingAbstract desc -> RMissingAbstract (map_desc_locs f desc) + | RPolyType desc -> RPolyType (map_desc_locs f desc) + | RPolyTest (s, desc) -> RPolyTest (s, map_desc_locs f desc) + | RExactType desc -> RExactType (map_desc_locs f desc) + | ROptional desc -> ROptional (map_desc_locs f desc) + | RMaybe desc -> RMaybe (map_desc_locs f desc) + | RRestArray desc -> RRestArray (map_desc_locs f desc) + | RAbstract desc -> RAbstract (map_desc_locs f desc) + | RTypeApp desc -> RTypeApp (map_desc_locs f desc) + | RTypeAppImplicit desc -> RTypeAppImplicit (map_desc_locs f desc) + | RThisTypeApp desc -> RThisTypeApp (map_desc_locs f desc) + | RExtends desc -> RExtends (map_desc_locs f desc) + | RClass desc -> RClass (map_desc_locs f desc) + | RStatics desc -> RStatics (map_desc_locs f desc) + | RSuperOf desc -> RSuperOf (map_desc_locs f desc) + | RFrozen desc -> RFrozen (map_desc_locs f desc) + | RBound desc -> RBound (map_desc_locs f desc) + | RVarianceCheck desc -> RVarianceCheck (map_desc_locs f desc) + | RPredicateOf desc -> RPredicateOf (map_desc_locs f desc) + | RPredicateCall desc -> RPredicateCall (map_desc_locs f desc) + | RPredicateCallNeg desc -> RPredicateCallNeg (map_desc_locs f desc) + | RRefined desc -> RRefined (map_desc_locs f desc) + | RSpreadOf desc -> RSpreadOf (map_desc_locs f desc) + | RMatchingProp (s, desc) -> RMatchingProp (s, map_desc_locs f desc) + | RTrusted desc -> RTrusted (map_desc_locs f desc) + | RPrivate desc -> RPrivate (map_desc_locs f desc) + | ( RObjectPatternRestProp | RArrayPatternRestProp | RCommonJSExports _ | RModule _ + | ROptionalChain | RReactProps | RReactElement _ | RReactClass | RReactComponent + | RReactStatics | RReactDefaultProps | RReactState | RReactPropTypes | RReactChildren ) as r -> + r + | RReactChildrenOrType desc -> RReactChildrenOrType (map_desc_locs f desc) + | RReactChildrenOrUndefinedOrType desc -> RReactChildrenOrUndefinedOrType (map_desc_locs f desc) + | (RReactSFC | RReactConfig) as r -> r + +type 'loc virtual_reason = { test_id: int option; derivable: bool; - desc: reason_desc; - loc: ALoc.t; - def_loc_opt: Loc.t option; - annot_loc_opt: Loc.t option; + desc: 'loc virtual_reason_desc; + loc: 'loc; + def_loc_opt: 'loc option; + annot_loc_opt: 'loc option; } +[@@deriving eq] -type t = reason +type reason = ALoc.t virtual_reason -let lexpos file line col = { - Lexing.pos_fname = file; - Lexing.pos_lnum = line; - Lexing.pos_bol = 0; - Lexing.pos_cnum = col; -} +type concrete_reason = Loc.t virtual_reason -let diff_range loc = Loc.( - let line1, line2 = loc.start.line, loc._end.line in - (* TODO: Get rid of +1 which is here to ensure same behavior as old code - using Pos.info_pos *) - let start, end_ = loc.start.column + 1, loc._end.column in - (line2 - line1, end_ - start) -) - -let in_range loc range = Loc.( - let line, line1, line2 = loc.start.line, range.start.line, range._end.line in - (line1 < line || (line = line1 && range.start.column <= loc.start.column)) && - (line < line2 || (line = line2 && loc._end.column <= range._end.column)) -) - -let rec patch ll offset lines = function - | [] -> () - | (l,c,str)::insertions -> - let c = if l = ll then c + offset else c in - let del = try Some (int_of_string str) with _ -> None in - let line = lines.(l - 1) in - let shift = match del with - | Some n -> (* delete n chars at l, c *) - lines.(l - 1) <- spf "%s%s" - (string_before line c) (string_after line (c + n)); - -n - | None -> (* insert str at l, c *) - lines.(l - 1) <- spf "%s%s%s" - (string_before line c) str (string_after line c); - String.length str - in - let offset = (if l = ll then offset else 0) + shift in - patch l offset lines insertions - -let do_patch lines insertions = - let lines = Array.of_list lines in - patch 1 0 lines insertions; - String.concat "\n" (Array.to_list lines) - -let string_of_source ?(strip_root=None) = File_key.(function - | Builtins -> "(builtins)" - | LibFile file -> - begin match strip_root with - | Some root -> - let root_str = spf "%s%s" (Path.to_string root) Filename.dir_sep in - if string_starts_with file root_str - then spf "[LIB] %s" (Files.relative_path root_str file) - else spf "[LIB] %s" (Filename.basename file) - | None -> file - end - | SourceFile file - | JsonFile file - | ResourceFile file -> - begin match strip_root with - | Some root -> - let root_str = spf "%s%s" (Path.to_string root) Filename.dir_sep in - Files.relative_path root_str file - | None -> - file - end -) - -let string_of_loc_pos loc = Loc.( - let line = loc.start.line in - let start = loc.start.column + 1 in - let end_ = loc._end.column in - if line <= 0 then - "0:0" - else if line = loc._end.line && start = end_ then - spf "%d:%d" line start - else if line != loc._end.line then - spf "%d:%d,%d:%d" line start loc._end.line end_ - else - spf "%d:%d-%d" line start end_ -) +type t = reason -let string_of_loc ?(strip_root=None) loc = Loc.( - match loc.source with +let concretize_equal aloc_tables = equal_virtual_reason (ALoc.concretize_equal aloc_tables) + +let in_range loc range = + Loc.( + let (line, line1, line2) = (loc.start.line, range.start.line, range._end.line) in + (line1 < line || (line = line1 && range.start.column <= loc.start.column)) + && (line < line2 || (line = line2 && loc._end.column <= range._end.column))) + +let string_of_source ?(strip_root = None) = + File_key.( + function + | Builtins -> "(builtins)" + | LibFile file -> + begin + match strip_root with + | Some root -> + let root_str = spf "%s%s" (Path.to_string root) Filename.dir_sep in + if string_starts_with file root_str then + spf "[LIB] %s" (Files.relative_path root_str file) + else + spf "[LIB] %s" (Filename.basename file) + | None -> file + end + | SourceFile file + | JsonFile file + | ResourceFile file -> + begin + match strip_root with + | Some root -> + let root_str = spf "%s%s" (Path.to_string root) Filename.dir_sep in + Files.relative_path root_str file + | None -> file + end) + +let string_of_loc ?(strip_root = None) loc = + Loc.( + match loc.source with + | None + | Some File_key.Builtins -> + "" + | Some file -> spf "%s:%s" (string_of_source ~strip_root file) (Loc.to_string_no_source loc)) + +let string_of_aloc ?(strip_root = None) aloc = + match ALoc.source aloc with | None - | Some File_key.Builtins -> "" - | Some file -> - spf "%s:%s" (string_of_source ~strip_root file) (string_of_loc_pos loc) -) - -let json_of_loc_props ?(strip_root=None) loc = Hh_json.(Loc.( - [ - "source", ( - match loc.source with - | Some x -> JSON_String (string_of_source ~strip_root x) - | None -> JSON_Null - ); - "type", (match loc.source with - | Some File_key.LibFile _ -> JSON_String "LibFile" - | Some File_key.SourceFile _ -> JSON_String "SourceFile" - | Some File_key.JsonFile _ -> JSON_String "JsonFile" - | Some File_key.ResourceFile _ -> JSON_String "ResourceFile" + | Some File_key.Builtins -> + "" + | Some file -> spf "%s:%s" (string_of_source ~strip_root file) (ALoc.to_string_no_source aloc) + +let json_of_source ?(strip_root = None) = + Hh_json.( + function + | Some x -> JSON_String (string_of_source ~strip_root x) + | None -> JSON_Null) + +let json_source_type_of_source = + Hh_json.( + function + | Some (File_key.LibFile _) -> JSON_String "LibFile" + | Some (File_key.SourceFile _) -> JSON_String "SourceFile" + | Some (File_key.JsonFile _) -> JSON_String "JsonFile" + | Some (File_key.ResourceFile _) -> JSON_String "ResourceFile" | Some File_key.Builtins -> JSON_String "Builtins" - | None -> JSON_Null); - "start", JSON_Object [ - "line", int_ loc.start.line; - (* It's not ideal that we use a different column numbering system here - * versus other places (like the estree translator) *) - "column", int_ (loc.start.column + 1); - "offset", int_ loc.start.offset; - ]; - "end", JSON_Object [ - "line", int_ loc._end.line; - "column", int_ loc._end.column; - "offset", int_ loc._end.offset; - ]; - ] -)) - -let json_of_loc ?strip_root loc = Hh_json.( - JSON_Object (json_of_loc_props ?strip_root loc) -) + | None -> JSON_Null) + +let json_of_loc_props ?(strip_root = None) ?(catch_offset_errors = false) ~offset_table loc = + Hh_json.( + Loc.( + let offset_entry offset_table pos = + let offset = + try int_ (Offset_utils.offset offset_table pos) + with Offset_utils.Offset_lookup_failed _ as exn -> + if catch_offset_errors then + JSON_Null + else + raise exn + in + [("offset", offset)] + in + let start = + [ + ("line", int_ loc.start.line); + (* It's not ideal that we use a different column numbering system here + * versus other places (like the estree translator) *) + ("column", int_ (loc.start.column + 1)); + ] + @ + match offset_table with + | None -> [] + | Some table -> offset_entry table loc.start + in + let end_ = + [("line", int_ loc._end.line); ("column", int_ loc._end.column)] + @ + match offset_table with + | None -> [] + | Some table -> offset_entry table loc._end + in + [ + ("source", json_of_source ~strip_root loc.source); + ("type", json_source_type_of_source loc.source); + ("start", JSON_Object start); + ("end", JSON_Object end_); + ])) + +let json_of_loc ?strip_root ?catch_offset_errors ~offset_table loc = + Hh_json.(JSON_Object (json_of_loc_props ?strip_root ?catch_offset_errors ~offset_table loc)) (* reason constructors, accessors, etc. *) -let mk_reason_with_test_id test_id desc loc def_loc_opt annot_loc_opt = { - test_id; - derivable = false; - desc; - loc; - def_loc_opt; - annot_loc_opt; -} +let mk_reason_with_test_id test_id desc loc def_loc_opt annot_loc_opt = + { test_id; derivable = false; desc; loc; def_loc_opt; annot_loc_opt } + +let map_reason_locs f reason = + let { def_loc_opt; annot_loc_opt; loc; desc; test_id; derivable } = reason in + let loc' = f loc in + let def_loc_opt' = Option.map ~f def_loc_opt in + let annot_loc_opt' = Option.map ~f annot_loc_opt in + let desc' = map_desc_locs f desc in + { + def_loc_opt = def_loc_opt'; + annot_loc_opt = annot_loc_opt'; + loc = loc'; + desc = desc'; + test_id; + derivable; + } (* The current test_id is included in every new reason. *) -let mk_reason desc loc = - mk_reason_with_test_id (TestID.current ()) desc (ALoc.of_loc loc) None None +let mk_reason desc aloc = mk_reason_with_test_id (TestID.current ()) desc aloc None None (* Lift a string to a reason. Usually used as a dummy reason. *) -let locationless_reason desc = - mk_reason_with_test_id None desc (ALoc.of_loc Loc.none) None None - -let func_reason {Ast.Function.async; generator; _} = - let func_desc = match async, generator with - | true, true -> RAsyncGenerator - | true, false -> RAsync - | false, true -> RGenerator - | false, false -> RNormal +let locationless_reason desc = mk_reason_with_test_id None desc ALoc.none None None + +let func_reason ~async ~generator = + let func_desc = + match (async, generator) with + | (true, true) -> RAsyncGenerator + | (true, false) -> RAsync + | (false, true) -> RGenerator + | (false, false) -> RNormal in mk_reason (RFunction func_desc) +let poly_loc_of_reason r = r.loc + +let aloc_of_reason = poly_loc_of_reason -let aloc_of_reason r = r.loc +let loc_of_reason = poly_loc_of_reason (* TODO return ALoc *) -let def_loc_of_reason r = +let def_poly_loc_of_reason r = match r.def_loc_opt with | Some loc -> loc - | None -> ALoc.to_loc @@ aloc_of_reason r + | None -> aloc_of_reason r -let annot_loc_of_reason r = - r.annot_loc_opt +let def_aloc_of_reason = def_poly_loc_of_reason + +let def_loc_of_reason = def_poly_loc_of_reason + +let annot_poly_loc_of_reason r = r.annot_loc_opt + +let annot_aloc_of_reason = annot_poly_loc_of_reason + +let annot_loc_of_reason = annot_poly_loc_of_reason let function_desc_prefix = function | RAsync -> "async " | RGenerator -> "generator " | RAsyncGenerator -> "async generator " | RNormal -> "" + | RUnknown -> "unknown " let prettify_react_util s = let length = String.length s in - if length < 6 then s - else if ((String.sub s 0 6) = "React$") then ("React." ^ (String.sub s 6 (length - 6))) - else s + if length < 6 then + s + else if String.sub s 0 6 = "React$" then + "React." ^ String.sub s 6 (length - 6) + else + s let rec string_of_desc = function + | RTrusted r -> spf "trusted %s" (string_of_desc r) + | RPrivate r -> spf "private %s" (string_of_desc r) | RNumber -> "number" - | RString -> "string" + | RBigInt -> "bigint" + | RString + | RLongStringLit _ -> + "string" | RBoolean -> "boolean" | RMixed -> "mixed" | REmpty -> "empty" - | RAny -> "any" + | RAnyImplicit -> "implicit 'any'" + | RAnyExplicit -> "explicit 'any'" | RVoid -> "undefined" | RNull -> "null" | RNullOrVoid -> "null or undefined" + | RSymbol -> "symbol" + | RExports -> "exports" | RStringLit "" -> "empty string" | RStringLit x -> spf "string literal `%s`" x | RNumberLit x -> spf "number literal `%s`" x + | RBigIntLit x -> spf "bigint literal `%s`" x | RBooleanLit b -> spf "boolean literal `%s`" (string_of_bool b) - | RMatchingProp (k, v) -> - spf "object with property `%s` that matches %s" k (string_of_desc v) + | RMatchingProp (k, v) -> spf "object with property `%s` that matches %s" k (string_of_desc v) | RObject -> "object" | RObjectLit -> "object literal" | RObjectType -> "object type" @@ -441,24 +554,22 @@ let rec string_of_desc = function | RJSXElementProps _ -> "props" | RJSXText -> spf "JSX text" | RFbt -> "``" - | RUnaryOperator (operator, value) -> - spf "%s %s" operator (string_of_desc value) + | RUnaryOperator (operator, value) -> spf "%s %s" operator (string_of_desc value) | RBinaryOperator (operator, left, right) -> spf "%s %s %s" (string_of_desc left) operator (string_of_desc right) | RLogical (operator, left, right) -> spf "%s %s %s" (string_of_desc left) operator (string_of_desc right) - | RAnyObject -> "any object" - | RAnyFunction -> "any function" | RTemplateString -> "template string" | RUnknownString -> "some string with unknown value" - | REnum -> "enum" + | RUnionEnum -> "enum" + | REnum -> "Enums are not yet implemented." | RGetterSetterProperty -> "getter/setter property" | RThis -> "this" | RThisType -> "`this` type" | RExistential -> "existential" + | RImplicitInstantiation -> "implicit instantiation" | RTooFewArgs -> "undefined (too few arguments)" - | RTooFewArgsExpectedRest -> - "undefined (too few arguments, expected default/rest parameters)" + | RTooFewArgsExpectedRest -> "undefined (too few arguments, expected default/rest parameters)" | RConstructorReturn -> "constructor return" | RNewObject -> "new object" | RUnion -> "union" @@ -506,6 +617,7 @@ let rec string_of_desc = function | RProperty (Some x) -> spf "property `%s`" x | RProperty None -> "computed property" | RPrivateProperty x -> spf "property `#%s`" x + | RMember { object_; property } -> spf "`%s%s`" object_ property | RPropertyAssignment (Some x) -> spf "assignment of property `%s`" x | RPropertyAssignment None -> "assignment of computed property/element" | RShadowProperty x -> spf ".%s" x @@ -519,12 +631,11 @@ let rec string_of_desc = function | RUndefinedProperty x -> spf "undefined property `%s`" x | RSomeProperty -> "some property" | RNameProperty d -> spf "property `name` of %s" (string_of_desc d) - | RMissingAbstract d -> - spf "undefined. Did you forget to declare %s?" (string_of_desc d) + | RMissingAbstract d -> spf "undefined. Did you forget to declare %s?" (string_of_desc d) | RFieldInitializer x -> spf "field initializer for `%s`" x | RUntypedModule m -> spf "import from untyped module `%s`" m | RNamedImportedType (m, _) -> spf "Named import from module `%s`" m - | RImportStarType n -> spf "import type * as %s" n + | RImportStarType n -> spf "import type * as %s" n | RImportStarTypeOf n -> spf "import typeof * as %s" n | RImportStar n -> spf "import * as %s" n | RCode x -> "`" ^ x ^ "`" @@ -537,13 +648,14 @@ let rec string_of_desc = function | ROptional d -> spf "optional %s" (string_of_desc d) | RMaybe d -> let rec loop = function - | RMaybe d -> loop d - | d -> d + | RMaybe d -> loop d + | d -> d in spf "nullable %s" (string_of_desc (loop d)) | RRestArray _ -> "rest array" | RAbstract d -> spf "abstract %s" (string_of_desc d) | RTypeApp d -> string_of_desc d + | RTypeAppImplicit d -> string_of_desc d | RThisTypeApp d -> spf "this instantiation of %s" (string_of_desc d) | RExtends d -> spf "extends %s" (string_of_desc d) | RClass d -> spf "class %s" (string_of_desc d) @@ -554,8 +666,7 @@ let rec string_of_desc = function | RVarianceCheck d -> spf "variance check: %s" (string_of_desc d) | RPredicateOf d -> spf "predicate of %s" (string_of_desc d) | RPredicateCall d -> spf "predicate call to %s" (string_of_desc d) - | RPredicateCallNeg d -> - spf "negation of predicate call to %s" (string_of_desc d) + | RPredicateCallNeg d -> spf "negation of predicate call to %s" (string_of_desc d) | RRefined d -> spf "refined %s" (string_of_desc d) | RIncompatibleInstantiation x -> spf "`%s`" x | RSpreadOf d -> spf "spread of %s" (string_of_desc d) @@ -564,7 +675,6 @@ let rec string_of_desc = function | RCommonJSExports x -> spf "module `%s`" x | RModule x -> spf "module `%s`" x | ROptionalChain -> "optional chain" - | RReactProps -> "props" | RReactElement x -> (match x with @@ -577,61 +687,52 @@ let rec string_of_desc = function | RReactState -> "state of React component" | RReactPropTypes -> "propTypes of React component" | RReactChildren -> "children array" - | RReactChildrenOrType desc -> - spf "children array or %s" (string_of_desc desc) - | RReactChildrenOrUndefinedOrType desc -> - spf "children array or %s" (string_of_desc desc) + | RReactChildrenOrType desc -> spf "children array or %s" (string_of_desc desc) + | RReactChildrenOrUndefinedOrType desc -> spf "children array or %s" (string_of_desc desc) | RReactSFC -> "React stateless functional component" + | RReactConfig -> "config of React component" -let string_of_reason ?(strip_root=None) r = - let spos = string_of_loc ~strip_root (aloc_of_reason r |> ALoc.to_loc) in +let string_of_reason ?(strip_root = None) r = + let spos = string_of_aloc ~strip_root (aloc_of_reason r) in let desc = string_of_desc r.desc in - if spos = "" - then desc - else ( - if desc = "" - then spos - else spf "%s:\n%s" spos desc - ) - -let json_of_reason ?(strip_root=None) r = Hh_json.( - JSON_Object ([ - "pos", json_of_loc ~strip_root (aloc_of_reason r |> ALoc.to_loc); - "desc", JSON_String (string_of_desc r.desc) - ]) -) - -let dump_reason ?(strip_root=None) r = - spf "%s: %S%s" - (string_of_loc ~strip_root (aloc_of_reason r |> ALoc.to_loc)) + if spos = "" then + desc + else if desc = "" then + spos + else + spf "%s:\n%s" spos desc + +let dump_reason ?(strip_root = None) r = + spf + "%s: %S%s" + (string_of_aloc ~strip_root (aloc_of_reason r)) (string_of_desc r.desc) - begin match r.test_id with - | Some n -> spf " (test %d)" n - | None -> "" + begin + match r.test_id with + | Some n -> spf " (test %d)" n + | None -> "" end let desc_of_reason = let rec loop = function - | RTypeAlias (_, _, desc) - | RPolyTest (_, desc) - -> loop desc - | desc - -> desc + | RTypeAlias (_, _, desc) + | RPolyTest (_, desc) -> + loop desc + | desc -> desc in - fun ?(unwrap=true) r -> - if not unwrap then r.desc else loop r.desc + fun ?(unwrap = true) r -> + if not unwrap then + r.desc + else + loop r.desc -let internal_name name = - spf ".%s" name +let internal_name name = spf ".%s" name -let is_internal_name name = - String.length name >= 1 && name.[0] = '.' +let is_internal_name name = String.length name >= 1 && name.[0] = '.' -let internal_module_name name = - spf ".$module__%s" name +let internal_module_name name = spf ".$module__%s" name -let is_internal_module_name name = - string_starts_with name ".$module__" +let is_internal_module_name name = string_starts_with name ".$module__" let uninternal_module_name name = if is_internal_module_name name then @@ -639,9 +740,6 @@ let uninternal_module_name name = else name -let internal_pattern_name loc = - spf ".$pattern__%s" (string_of_loc loc) - (* Instantiable reasons identify tvars that are created for the purpose of instantiation: they are fresh rather than shared, and should become types that flow to them. We assume these characteristics when performing @@ -650,7 +748,9 @@ let is_instantiable_reason r = match desc_of_reason r with | RTypeParam _ | RThisType - | RExistential -> true + | RExistential -> + true + | RImplicitInstantiation -> true | _ -> false (* TODO: Property accesses create unresolved tvars to hold results, even when @@ -670,22 +770,34 @@ let is_instantiable_reason r = *) let is_constant_reason r = match desc_of_reason r with - | RIdentifier x + | RIdentifier x -> + (* A single-letter variable name which happens to be upper-case should not + be confused with a constant reason. This should really be further + restricted to `const`-declared identifiers in scope. Or, better yet, + removing this heuristic entirely. *) + let len = String.length x in + if len < 2 then + false + else + is_not_lowercase x 0 (len - 1) | RProperty (Some x) | RPrivateProperty x - | RPropertyOf (x,_) + | RMember { object_ = _; property = x } + | RPropertyOf (x, _) | RPropertyIsAString x -> let len = String.length x in - if len = 0 - then false - else is_not_lowercase x 0 (len - 1) + if len = 0 then + false + else + is_not_lowercase x 0 (len - 1) | _ -> false let is_typemap_reason r = match desc_of_reason r with | RTupleMap | RObjectMap - | RObjectMapi -> true + | RObjectMapi -> + true | _ -> false let is_calltype_reason r = @@ -693,7 +805,8 @@ let is_calltype_reason r = | RTupleMap | RObjectMap | RObjectMapi - | RFunctionCallType -> true + | RFunctionCallType -> + true | _ -> false let is_literal_object_reason r = @@ -706,81 +819,79 @@ let is_literal_object_reason r = | RStatics (RFunction _) | RReactProps | RReactElement _ - | RJSXElementProps _ -> true + | RJSXElementProps _ -> + true | _ -> false let is_literal_array_reason r = match desc_of_reason r with | RArrayLit - | REmptyArrayLit -> true + | REmptyArrayLit -> + true | _ -> false -let is_derivable_reason r = - r.derivable +let is_derivable_reason r = r.derivable -let derivable_reason r = - { r with derivable = true } +let derivable_reason r = { r with derivable = true } let builtin_reason desc = { Loc.none with Loc.source = Some File_key.Builtins } + |> ALoc.of_loc |> mk_reason desc |> derivable_reason -let is_builtin_reason r = - r.loc - |> ALoc.to_loc - |> Loc.source - |> (=) (Some File_key.Builtins) +let is_builtin_reason f r = r.loc |> f |> ( = ) (Some File_key.Builtins) let is_lib_reason r = - r.loc - |> ALoc.to_loc - |> Loc.source - |> Option.value_map ~default:false ~f:File_key.is_lib_file + r.loc |> ALoc.source |> Option.value_map ~default:false ~f:File_key.is_lib_file -let is_blamable_reason r = - not Loc.(ALoc.to_loc r.loc = none || is_lib_reason r) - -let reasons_overlap r1 r2 = - let r1_loc, r2_loc = ALoc.to_loc r1.loc, ALoc.to_loc r2.loc in - Loc.contains r1_loc r2_loc +let is_blamable_reason r = not (r.loc = ALoc.none || is_lib_reason r) (* reason transformers: *) (* returns reason with new description and position of original *) -let replace_reason ?(keep_def_loc=false) f r = - let def_loc_opt = if keep_def_loc then r.def_loc_opt else None in +let update_desc_reason f r = mk_reason_with_test_id - (TestID.current ()) + r.test_id (f (desc_of_reason ~unwrap:false r)) - (aloc_of_reason r) - def_loc_opt - (annot_loc_of_reason r) - -let replace_reason_const ?(keep_def_loc=false) desc r = - let (def_loc_opt, annot_loc_opt) = if keep_def_loc - then (r.def_loc_opt, r.annot_loc_opt) - else (None, None) - in - mk_reason_with_test_id r.test_id desc r.loc def_loc_opt annot_loc_opt + (poly_loc_of_reason r) + r.def_loc_opt + (annot_poly_loc_of_reason r) + +let update_desc_new_reason f r = + mk_reason_with_test_id + r.test_id + (f (desc_of_reason ~unwrap:false r)) + (poly_loc_of_reason r) + None + None + +let replace_desc_reason desc r = + mk_reason_with_test_id r.test_id desc r.loc r.def_loc_opt r.annot_loc_opt + +let replace_desc_new_reason desc r = mk_reason_with_test_id r.test_id desc r.loc None None (* returns reason with new location and description of original *) -let repos_reason loc ?annot_loc reason = - let def_loc_opt = - let def_loc = def_loc_of_reason reason in - if loc = def_loc then None else Some def_loc +let repos_reason loc ?(annot_loc : 'loc option) reason = + let def_aloc_opt = + let def_loc = def_poly_loc_of_reason reason in + if loc = def_loc then + None + else + Some def_loc in - let annot_loc_opt = match annot_loc with - | Some annot_loc -> Some annot_loc - | None -> reason.annot_loc_opt + let annot_aloc_opt = + match annot_loc with + | Some annot_loc -> Some annot_loc + | None -> reason.annot_loc_opt in - mk_reason_with_test_id reason.test_id reason.desc (ALoc.of_loc loc) def_loc_opt annot_loc_opt + mk_reason_with_test_id reason.test_id reason.desc loc def_aloc_opt annot_aloc_opt -let annot_reason reason = - {reason with annot_loc_opt = Some (ALoc.to_loc reason.loc)} +let annot_reason reason = { reason with annot_loc_opt = Some reason.loc } -module ReasonMap = MyMap.Make(struct +module ReasonMap = MyMap.Make (struct type t = reason + let compare = Pervasives.compare end) @@ -804,264 +915,326 @@ end) * access. However, we don't need to wrap o in o.p. In o[1 + 2] we don't need to * wrap 1 + 2 since it is already wrapped in a sense. *) let rec code_desc_of_expression ~wrap (_, x) = -let do_wrap = if wrap then (fun s -> "(" ^ s ^ ")") else (fun s -> s) in -Ast.Expression.(match x with -| Array { Array.elements = []; _ } -> "[]" -| Array _ -> "[...]" -| ArrowFunction { Ast.Function.body = Ast.Function.BodyExpression ((_, Object _) as e); _ } -> - do_wrap ("(...) => (" ^ code_desc_of_expression ~wrap:false e ^ ")") -| ArrowFunction { Ast.Function.body = Ast.Function.BodyExpression e; _ } -> - do_wrap ("(...) => " ^ code_desc_of_expression ~wrap:false e) -| ArrowFunction _ -> - do_wrap "(...) => { ... }" -| Assignment { Assignment.left; operator; right } -> - let left = code_desc_of_pattern left in - let right = code_desc_of_expression ~wrap:false right in - let operator = Assignment.(match operator with - | Assign -> "=" - | PlusAssign -> "+=" - | MinusAssign -> "-=" - | MultAssign -> "*=" - | ExpAssign -> "**=" - | DivAssign -> "/=" - | ModAssign -> "%=" - | LShiftAssign -> "<<=" - | RShiftAssign -> ">>=" - | RShift3Assign -> ">>>=" - | BitOrAssign -> "|=" - | BitXorAssign -> "^=" - | BitAndAssign -> "&=" - ) in - do_wrap (left ^ " " ^ operator ^ " " ^ right) -| Binary { Binary.operator; left; right } -> - do_wrap (code_desc_of_operation left (`Binary operator) right) -| Call { Call.callee; targs; arguments } -> - let targs = match targs with - | None -> "" - | Some (_, []) -> "<>" - | Some (_, _::_) -> "<...>" - in - let args = match arguments with - | [] -> "()" - | _::_ -> "(...)" - in - (code_desc_of_expression ~wrap:true callee) ^ targs ^ args -| Class _ -> "class { ... }" -| Conditional { Conditional.test; consequent; alternate } -> - let wrap_test = match test with _, Conditional _ -> true | _ -> false in - do_wrap ( - (code_desc_of_expression ~wrap:wrap_test test) ^ " ? " ^ - (code_desc_of_expression ~wrap:false consequent) ^ " : " ^ - (code_desc_of_expression ~wrap:false alternate) - ) -| Function _ -> "function () { ... }" -| Identifier (_, x) -> x -| Import x -> "import(" ^ code_desc_of_expression ~wrap:false x ^ ")" -| JSXElement x -> code_desc_of_jsx_element x -| JSXFragment _ -> "<>..." -| Ast.Expression.Literal x -> code_desc_of_literal x -| Logical { Logical.operator; left; right } -> - do_wrap (code_desc_of_operation left (`Logical operator) right) -| Member { Member._object; property; computed = _ } -> Member.( - let o = code_desc_of_expression ~wrap:true _object in - o ^ (match property with - | PropertyIdentifier (_, x) -> "." ^ x - | PropertyPrivateName (_, (_, x)) -> ".#" ^ x - | PropertyExpression x -> "[" ^ code_desc_of_expression ~wrap:false x ^ "]" - )) -| MetaProperty { MetaProperty.meta = (_, o); property = (_, p) } -> o ^ "." ^ p -| New { New.callee; targs; arguments } -> - let targs = match targs with - | None -> "" - | Some (_, []) -> "<>" - | Some (_, _::_) -> "<...>" + let do_wrap = + if wrap then + fun s -> + "(" ^ s ^ ")" + else + fun s -> + s in - let args = match arguments with - | [] -> "()" - | _::_ -> "(...)" - in - "new " ^ (code_desc_of_expression ~wrap:true callee) ^ targs ^ args -| Object _ -> "{...}" -| OptionalCall { OptionalCall. - call = { Call.callee; targs; arguments }; - optional; - } -> - let targ_string = match targs with - | None -> "" - | Some (_, []) -> "<>" - | Some (_, _::_) -> "<...>" - in - let arg_string = begin match arguments with - | [] -> "()" - | _ -> "(...)" - end in - code_desc_of_expression ~wrap:true callee ^ - (if optional then "?." else "") ^ - targ_string ^ arg_string -| OptionalMember { OptionalMember. - member = { Member._object; property; computed = _ }; - optional; - } -> - let o = code_desc_of_expression ~wrap:true _object in - o ^ Member.(match property with - | PropertyIdentifier (_, x) -> (if optional then "?." else ".") ^ x - | PropertyPrivateName (_, (_, x)) -> (if optional then "?.#" else ".#") ^ x - | PropertyExpression x -> - (if optional then "?.[" else "[") ^ code_desc_of_expression ~wrap:false x ^ "]" - ) -| Sequence { Sequence.expressions } -> - code_desc_of_expression ~wrap (List.hd (List.rev expressions)) -| Super -> "super" -| TaggedTemplate { TaggedTemplate.tag; _ } -> code_desc_of_expression ~wrap:true tag ^ "`...`" -| TemplateLiteral _ -> "`...`" -| This -> "this" -| TypeCast { TypeCast.expression; _ } -> code_desc_of_expression ~wrap expression -| Unary { Unary.operator; prefix; argument } -> - let x = code_desc_of_expression ~wrap:true argument in - let op = Unary.(match operator with - | Minus -> "-" - | Plus -> "+" - | Not -> "!" - | BitNot -> "~" - | Typeof -> "typeof " - | Void -> "void " - | Delete -> "delete " - | Await -> "await " - ) in - do_wrap (if prefix then op ^ x else x ^ op) -| Update { Update.operator; prefix; argument } -> - let x = code_desc_of_expression ~wrap:true argument in - let op = Update.(match operator with - | Increment -> "++" - | Decrement -> "--" - ) in - do_wrap (if prefix then op ^ x else x ^ op) -| Yield { Yield.argument = Some x; delegate = false } -> - do_wrap ("yield " ^ code_desc_of_expression ~wrap:false x) -| Yield { Yield.argument = Some x; delegate = true } -> - do_wrap ("yield* " ^ code_desc_of_expression ~wrap:false x) -| Yield { Yield.argument = None; delegate = false } -> "yield" -| Yield { Yield.argument = None; delegate = true } -> "yield*" - -(* TODO *) -| Comprehension _ -| Generator _ - -> do_wrap "..." -) - -and code_desc_of_pattern (_, x) = Ast.Pattern.(match x with -| Object _ -> "{...}" -| Array _ -> "[...]" -| Assignment { Assignment.left; right } -> - code_desc_of_pattern left ^ " = " ^ code_desc_of_expression ~wrap:false right -| Identifier { Identifier.name = (_, name); _ } -> name -| Expression x -> code_desc_of_expression ~wrap:false x -) + Ast.Expression.( + match x with + | Array { Array.elements = []; _ } -> "[]" + | Array _ -> "[...]" + | ArrowFunction { Ast.Function.body = Ast.Function.BodyExpression ((_, Object _) as e); _ } -> + do_wrap ("(...) => (" ^ code_desc_of_expression ~wrap:false e ^ ")") + | ArrowFunction { Ast.Function.body = Ast.Function.BodyExpression e; _ } -> + do_wrap ("(...) => " ^ code_desc_of_expression ~wrap:false e) + | ArrowFunction _ -> do_wrap "(...) => { ... }" + | Assignment { Assignment.left; operator; right } -> + let left = code_desc_of_pattern left in + let right = code_desc_of_expression ~wrap:false right in + let operator = + match operator with + | None -> "=" + | Some op -> Flow_ast_utils.string_of_assignment_operator op + in + do_wrap (left ^ " " ^ operator ^ " " ^ right) + | Binary { Binary.operator; left; right } -> + do_wrap (code_desc_of_operation left (`Binary operator) right) + | Call { Call.callee; targs; arguments } -> + let targs = + match targs with + | None -> "" + | Some (_, []) -> "<>" + | Some (_, _ :: _) -> "<...>" + in + let args = + match arguments with + | [] -> "()" + | _ :: _ -> "(...)" + in + code_desc_of_expression ~wrap:true callee ^ targs ^ args + | Class _ -> "class { ... }" + | Conditional { Conditional.test; consequent; alternate } -> + let wrap_test = + match test with + | (_, Conditional _) -> true + | _ -> false + in + do_wrap + ( code_desc_of_expression ~wrap:wrap_test test + ^ " ? " + ^ code_desc_of_expression ~wrap:false consequent + ^ " : " + ^ code_desc_of_expression ~wrap:false alternate ) + | Function _ -> "function () { ... }" + | Identifier (_, { Ast.Identifier.name = x; comments = _ }) -> x + | Import x -> "import(" ^ code_desc_of_expression ~wrap:false x ^ ")" + | JSXElement x -> code_desc_of_jsx_element x + | JSXFragment _ -> "<>..." + | Ast.Expression.Literal x -> code_desc_of_literal x + | Logical { Logical.operator; left; right } -> + do_wrap (code_desc_of_operation left (`Logical operator) right) + | Member { Member._object; property } -> + let o = code_desc_of_expression ~wrap:true _object in + let p = code_desc_of_property ~optional:false property in + o ^ p + | MetaProperty + { + MetaProperty.meta = (_, { Ast.Identifier.name = o; comments = _ }); + property = (_, { Ast.Identifier.name = p; comments = _ }); + } -> + o ^ "." ^ p + | New { New.callee; targs; arguments; comments = _ } -> + let targs = + match targs with + | None -> "" + | Some (_, []) -> "<>" + | Some (_, _ :: _) -> "<...>" + in + let args = + match arguments with + | [] -> "()" + | _ :: _ -> "(...)" + in + "new " ^ code_desc_of_expression ~wrap:true callee ^ targs ^ args + | Object _ -> "{...}" + | OptionalCall { OptionalCall.call = { Call.callee; targs; arguments }; optional } -> + let targ_string = + match targs with + | None -> "" + | Some (_, []) -> "<>" + | Some (_, _ :: _) -> "<...>" + in + let arg_string = + match arguments with + | [] -> "()" + | _ -> "(...)" + in + code_desc_of_expression ~wrap:true callee + ^ ( if optional then + "?." + else + "" ) + ^ targ_string + ^ arg_string + | OptionalMember { OptionalMember.member = { Member._object; property }; optional } -> + let o = code_desc_of_expression ~wrap:true _object in + let p = code_desc_of_property ~optional property in + o ^ p + | Sequence { Sequence.expressions } -> + code_desc_of_expression ~wrap (List.hd (List.rev expressions)) + | Super -> "super" + | TaggedTemplate { TaggedTemplate.tag; _ } -> code_desc_of_expression ~wrap:true tag ^ "`...`" + | TemplateLiteral _ -> "`...`" + | This -> "this" + | TypeCast { TypeCast.expression; _ } -> code_desc_of_expression ~wrap expression + | Unary { Unary.operator; argument; comments = _ } -> + let x = code_desc_of_expression ~wrap:true argument in + let op = + Unary.( + match operator with + | Minus -> "-" + | Plus -> "+" + | Not -> "!" + | BitNot -> "~" + | Typeof -> "typeof " + | Void -> "void " + | Delete -> "delete " + | Await -> "await ") + in + do_wrap (op ^ x) + | Update { Update.operator; prefix; argument } -> + let x = code_desc_of_expression ~wrap:true argument in + let op = + Update.( + match operator with + | Increment -> "++" + | Decrement -> "--") + in + do_wrap + ( if prefix then + op ^ x + else + x ^ op ) + | Yield { Yield.argument = Some x; delegate = false; _ } -> + do_wrap ("yield " ^ code_desc_of_expression ~wrap:false x) + | Yield { Yield.argument = Some x; delegate = true; _ } -> + do_wrap ("yield* " ^ code_desc_of_expression ~wrap:false x) + | Yield { Yield.argument = None; delegate = false; _ } -> "yield" + | Yield { Yield.argument = None; delegate = true; _ } -> "yield*" + (* TODO *) + | Comprehension _ + | Generator _ -> + do_wrap "...") + +and code_desc_of_pattern (_, x) = + Ast.Pattern.( + match x with + | Object _ -> "{...}" + | Array _ -> "[...]" + | Identifier { Identifier.name = (_, { Ast.Identifier.name; comments = _ }); _ } -> name + | Expression x -> code_desc_of_expression ~wrap:false x) (* Implementation of operator flattening logic lifted from Prettier: * https://github.com/prettier/prettier/blob/dd78f31aaf5b4522b780f13194d57308e5fdf53b/src/common/util.js#L328-L399 *) -and code_desc_of_operation = Ast.Expression.( - let string_of_operator = function - | `Binary op -> Ast_utils.string_of_binary_operator op - | `Logical op -> (match op with - | Logical.Or -> "||" - | Logical.And -> "&&" - | Logical.NullishCoalesce -> "??") - in - let should_flatten = Binary.( - let precedence = function - | `Logical Logical.Or -> 0 - | `Logical Logical.NullishCoalesce -> 0 - | `Logical Logical.And -> 1 - | `Binary BitOr -> 2 - | `Binary Xor -> 3 - | `Binary BitAnd -> 4 - | `Binary (Equal | NotEqual | StrictEqual | StrictNotEqual) -> 5 - | `Binary (LessThan | LessThanEqual | GreaterThan | GreaterThanEqual | In | Instanceof) -> 6 - | `Binary (LShift | RShift | RShift3) -> 7 - | `Binary (Plus | Minus) -> 8 - | `Binary (Mult | Div | Mod) -> 9 - | `Binary Exp -> 10 - in - let equality = function - | `Binary (Equal | NotEqual | StrictEqual | StrictNotEqual) -> true - | _ -> false - in - let multiplicative = function - | `Binary (Mult | Div | Mod) -> true - | _ -> false - in - let bitshift = function - | `Binary (LShift | RShift | RShift3) -> true - | _ -> false +and code_desc_of_operation = + Ast.Expression.( + let string_of_operator = function + | `Binary op -> Flow_ast_utils.string_of_binary_operator op + | `Logical op -> + (match op with + | Logical.Or -> "||" + | Logical.And -> "&&" + | Logical.NullishCoalesce -> "??") in - fun a b -> - if precedence a <> precedence b then - false - else if a = `Binary Exp then - false - else if equality a && equality b then - false - else if (a = `Binary Mod && multiplicative b) || (b = `Binary Mod && multiplicative a) then - false - else if bitshift a && bitshift b then - false - else - true - ) in - fun left op right -> - let wrap_left = match left with - | _, Binary { Binary.operator; _ } -> not (should_flatten op (`Binary operator)) - | _, Logical { Logical.operator; _ } -> not (should_flatten op (`Logical operator)) - | _ -> true + let should_flatten = + Binary.( + let precedence = function + | `Logical Logical.Or -> 0 + | `Logical Logical.NullishCoalesce -> 0 + | `Logical Logical.And -> 1 + | `Binary BitOr -> 2 + | `Binary Xor -> 3 + | `Binary BitAnd -> 4 + | `Binary (Equal | NotEqual | StrictEqual | StrictNotEqual) -> 5 + | `Binary (LessThan | LessThanEqual | GreaterThan | GreaterThanEqual | In | Instanceof) + -> + 6 + | `Binary (LShift | RShift | RShift3) -> 7 + | `Binary (Plus | Minus) -> 8 + | `Binary (Mult | Div | Mod) -> 9 + | `Binary Exp -> 10 + in + let equality = function + | `Binary (Equal | NotEqual | StrictEqual | StrictNotEqual) -> true + | _ -> false + in + let multiplicative = function + | `Binary (Mult | Div | Mod) -> true + | _ -> false + in + let bitshift = function + | `Binary (LShift | RShift | RShift3) -> true + | _ -> false + in + fun a b -> + if precedence a <> precedence b then + false + else if a = `Binary Exp then + false + else if equality a && equality b then + false + else if (a = `Binary Mod && multiplicative b) || (b = `Binary Mod && multiplicative a) + then + false + else if bitshift a && bitshift b then + false + else + true) in - let left = code_desc_of_expression ~wrap:wrap_left left in - let right = code_desc_of_expression ~wrap:true right in - let op = string_of_operator op in - left ^ " " ^ op ^ " " ^ right -) - -and code_desc_of_jsx_element x = Ast.JSX.(match (snd x.openingElement).Opening.name with -| Identifier (_, { Identifier.name }) -> "<" ^ name ^ " />" -| NamespacedName (_, { NamespacedName.namespace = (_, { Identifier.name = a }); - name = (_, { Identifier.name = b }) }) -> - "<" ^ a ^ ":" ^ b ^ " />" -| MemberExpression x -> - let rec loop = function - | (_, { MemberExpression._object = MemberExpression.Identifier (_, { Identifier.name = a }); - property = (_, { Identifier.name = b }) }) -> - a ^ "." ^ b - | (_, { MemberExpression._object = MemberExpression.MemberExpression a; - property = (_, { Identifier.name = b }) }) -> - loop a ^ "." ^ b - in - "<" ^ loop x ^ " />" -) - -and code_desc_of_literal x = Ast.(match x.Literal.value with -| Literal.String x when String.length x > 16 -> "'" ^ String.sub x 0 10 ^ "...'" -| _ -> x.Literal.raw -) - -let rec mk_expression_reason = Ast.Expression.(function -| (loc, TypeCast { TypeCast.expression; _ }) -> repos_reason loc (mk_expression_reason expression) -| (loc, Object _) -> mk_reason RObjectLit loc -| (loc, Array _) -> mk_reason RArrayLit loc -| (loc, ArrowFunction f) -> func_reason f loc -| (loc, Function f) -> func_reason f loc -| (loc, Ast.Expression.Literal {Ast.Literal.value = Ast.Literal.String ""; _}) -> - mk_reason (RStringLit "") loc -| (loc, TaggedTemplate _) -> mk_reason RTemplateString loc -| (loc, TemplateLiteral _) -> mk_reason RTemplateString loc -| (loc, _) as x -> mk_reason (RCode (code_desc_of_expression ~wrap:false x)) loc -) + fun left op right -> + let wrap_left = + match left with + | (_, Binary { Binary.operator; _ }) -> not (should_flatten op (`Binary operator)) + | (_, Logical { Logical.operator; _ }) -> not (should_flatten op (`Logical operator)) + | _ -> true + in + let left = code_desc_of_expression ~wrap:wrap_left left in + let right = code_desc_of_expression ~wrap:true right in + let op = string_of_operator op in + left ^ " " ^ op ^ " " ^ right) + +and code_desc_of_jsx_element x = + Ast.JSX.( + match (snd x.openingElement).Opening.name with + | Identifier (_, { Identifier.name }) -> "<" ^ name ^ " />" + | NamespacedName + ( _, + { + NamespacedName.namespace = (_, { Identifier.name = a }); + name = (_, { Identifier.name = b }); + } ) -> + "<" ^ a ^ ":" ^ b ^ " />" + | MemberExpression x -> + let rec loop = function + | ( _, + { + MemberExpression._object = MemberExpression.Identifier (_, { Identifier.name = a }); + property = (_, { Identifier.name = b }); + } ) -> + a ^ "." ^ b + | ( _, + { + MemberExpression._object = MemberExpression.MemberExpression a; + property = (_, { Identifier.name = b }); + } ) -> + loop a ^ "." ^ b + in + "<" ^ loop x ^ " />") + +and code_desc_of_literal x = + Ast.( + match x.Literal.value with + | Literal.String x when String.length x > 16 -> "'" ^ String.sub x 0 10 ^ "...'" + | _ -> x.Literal.raw) + +and code_desc_of_property ~optional property = + match property with + | Ast.Expression.Member.PropertyIdentifier (_, { Ast.Identifier.name = x; comments = _ }) -> + ( if optional then + "?." + else + "." ) + ^ x + | Ast.Expression.Member.PropertyPrivateName (_, (_, { Ast.Identifier.name = x; comments = _ })) + -> + ( if optional then + "?.#" + else + ".#" ) + ^ x + | Ast.Expression.Member.PropertyExpression x -> + ( if optional then + "?.[" + else + "[" ) + ^ code_desc_of_expression ~wrap:false x + ^ "]" + +let rec mk_expression_reason = + Ast.Expression.( + function + | (loc, TypeCast { TypeCast.expression; _ }) -> + repos_reason loc (mk_expression_reason expression) + | (loc, Object _) -> mk_reason RObjectLit loc + | (loc, Array _) -> mk_reason RArrayLit loc + | (loc, ArrowFunction { Ast.Function.async; _ }) -> func_reason ~async ~generator:false loc + | (loc, Function { Ast.Function.async; generator; _ }) -> func_reason ~async ~generator loc + | (loc, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.String ""; _ }) -> + mk_reason (RStringLit "") loc + | (loc, TaggedTemplate _) -> mk_reason RTemplateString loc + | (loc, TemplateLiteral _) -> mk_reason RTemplateString loc + | (loc, Member { Member._object; property }) -> + mk_reason + (RMember + { + object_ = code_desc_of_expression ~wrap:true _object; + property = code_desc_of_property ~optional:false property; + }) + loc + | (loc, _) as x -> mk_reason (RCode (code_desc_of_expression ~wrap:false x)) loc) + +let mk_pattern_reason ((loc, _) as patt) = mk_reason (RCode (code_desc_of_pattern patt)) loc (* TODO: replace RCustom descriptions with proper descriptions *) let unknown_elem_empty_array_desc = RCustom "unknown element type of empty array" -let inferred_union_elem_array_desc = RCustom - "inferred union of array element types \ - (alternatively, provide an annotation to summarize the array \ - element type)" + +let inferred_union_elem_array_desc = + RCustom + "inferred union of array element types (alternatively, provide an annotation to summarize the array element type)" (* Classifies a reason description. These classifications can be used to * implement various asthetic behaviors in error messages when we would like to @@ -1076,172 +1249,182 @@ let inferred_union_elem_array_desc = RCustom * of arrays and tuples. * - `Unclassified: Everything else which hasn't been classified yet. *) -let classification_of_reason r = match desc_of_reason ~unwrap:true r with -| RNumber -| RString -| RBoolean -| RStringLit _ -| RNumberLit _ -| RBooleanLit _ -| RJSXText -| RFbt -| RTemplateString -| RUnknownString -| REnum -| RKeySet -| RRegExp - -> `Scalar -| RVoid -| RNull -| RNullOrVoid - -> `Nullish -| RArray -| RArrayLit -| REmptyArrayLit -| RArrayType -| RROArrayType -| RTupleType -| RRestArray _ -| RArrayPatternRestProp - -> `Array -| RMixed -| REmpty -| RAny -| RMatchingProp _ -| RObject -| RObjectLit -| RObjectType -| RObjectClassName -| RInterfaceType -| RTupleElement -| RTupleOutOfBoundsAccess -| RFunction _ -| RFunctionType -| RFunctionBody -| RFunctionCall _ -| RFunctionCallType -| RFunctionUnusedArgument -| RJSXFunctionCall _ -| RJSXIdentifier _ -| RJSXElementProps _ -| RJSXElement _ -| RUnaryOperator _ -| RBinaryOperator _ -| RLogical _ -| RAnyObject -| RAnyFunction -| RGetterSetterProperty -| RThis -| RThisType -| RExistential -| RTooFewArgs -| RTooFewArgsExpectedRest -| RConstructorReturn -| RNewObject -| RUnion -| RUnionType -| RIntersection -| RIntersectionType -| RAnd -| RConditional -| RPrototype -| RObjectPrototype -| RFunctionPrototype -| RDestructuring -| RDefaultValue -| RConstructor -| RDefaultConstructor -| RConstructorCall _ -| RReturn -| RImplicitReturn _ -| RSuper -| RNoSuper -| RDummyPrototype -| RDummyThis -| RTupleMap -| RObjectMap -| RObjectMapi -| RType _ -| RTypeAlias _ -| ROpaqueType _ -| RTypeParam _ -| RTypeof _ -| RMethod _ -| RMethodCall _ -| RParameter _ -| RRestParameter _ -| RIdentifier _ -| RIdentifierAssignment _ -| RPropertyAssignment _ -| RProperty _ -| RPrivateProperty _ -| RShadowProperty _ -| RPropertyOf _ -| RPropertyIsAString _ -| RMissingProperty _ -| RUnknownProperty _ -| RUndefinedProperty _ -| RSomeProperty -| RNameProperty _ -| RMissingAbstract _ -| RFieldInitializer _ -| RUntypedModule _ -| RNamedImportedType _ -| RImportStarType _ -| RImportStarTypeOf _ -| RImportStar _ -| RDefaultImportedType _ -| RCode _ -| RCustom _ -| RPolyType _ -| RPolyTest _ -| RExactType _ -| ROptional _ -| RMaybe _ -| RAbstract _ -| RTypeApp _ -| RThisTypeApp _ -| RExtends _ -| RClass _ -| RStatics _ -| RSuperOf _ -| RFrozen _ -| RBound _ -| RVarianceCheck _ -| RPredicateOf _ -| RPredicateCall _ -| RPredicateCallNeg _ -| RRefined _ -| RIncompatibleInstantiation _ -| RSpreadOf _ -| RObjectPatternRestProp -| RCommonJSExports _ -| RModule _ -| ROptionalChain -| RReactProps -| RReactElement _ -| RReactClass -| RReactComponent -| RReactStatics -| RReactDefaultProps -| RReactState -| RReactPropTypes -| RReactChildren -| RReactChildrenOrType _ -| RReactChildrenOrUndefinedOrType _ -| RReactSFC - -> `Unclassified - -let is_nullish_reason r = - classification_of_reason r = `Nullish +let classification_of_reason r = + match desc_of_reason ~unwrap:true r with + | RNumber + | RBigInt + | RString + | RSymbol + | RBoolean + | RLongStringLit _ + | RStringLit _ + | RNumberLit _ + | RBigIntLit _ + | RBooleanLit _ + | RJSXText + | RFbt + | RTemplateString + | RUnknownString + | RUnionEnum + | RKeySet + | RRegExp -> + `Scalar + | RVoid + | RNull + | RNullOrVoid -> + `Nullish + | RArray + | RArrayLit + | REmptyArrayLit + | RArrayType + | RROArrayType + | RTupleType + | RRestArray _ + | RArrayPatternRestProp -> + `Array + | RMixed + | REmpty + | RAnyExplicit + | RAnyImplicit + | RMatchingProp _ + | RObject + | RObjectLit + | RObjectType + | RObjectClassName + | RInterfaceType + | RTupleElement + | RTupleOutOfBoundsAccess + | RFunction _ + | RFunctionType + | RFunctionBody + | RFunctionCall _ + | RFunctionCallType + | RFunctionUnusedArgument + | RJSXFunctionCall _ + | RJSXIdentifier _ + | RJSXElementProps _ + | RJSXElement _ + | RUnaryOperator _ + | RBinaryOperator _ + | RLogical _ + | RGetterSetterProperty + | RThis + | RThisType + | RExistential + | RImplicitInstantiation + | RTooFewArgs + | RTooFewArgsExpectedRest + | RConstructorReturn + | RNewObject + | RUnion + | RUnionType + | RIntersection + | RIntersectionType + | RAnd + | RConditional + | RPrototype + | RObjectPrototype + | RFunctionPrototype + | RDestructuring + | RDefaultValue + | RConstructor + | RDefaultConstructor + | RConstructorCall _ + | RReturn + | RImplicitReturn _ + | RSuper + | RNoSuper + | RDummyPrototype + | RDummyThis + | RTupleMap + | RObjectMap + | RObjectMapi + | RType _ + | RTypeAlias _ + | ROpaqueType _ + | RTypeParam _ + | RTypeof _ + | RMethod _ + | RMethodCall _ + | RParameter _ + | RRestParameter _ + | RIdentifier _ + | RIdentifierAssignment _ + | RPropertyAssignment _ + | RProperty _ + | RPrivateProperty _ + | RShadowProperty _ + | RMember _ + | RPropertyOf _ + | RPropertyIsAString _ + | RMissingProperty _ + | RUnknownProperty _ + | RUndefinedProperty _ + | RSomeProperty + | RNameProperty _ + | RMissingAbstract _ + | RFieldInitializer _ + | RUntypedModule _ + | RNamedImportedType _ + | RImportStarType _ + | RImportStarTypeOf _ + | RImportStar _ + | RDefaultImportedType _ + | RCode _ + | RCustom _ + | RExports + | RPolyType _ + | RPolyTest _ + | RExactType _ + | ROptional _ + | RMaybe _ + | RAbstract _ + | RTypeApp _ + | RTypeAppImplicit _ + | RThisTypeApp _ + | RExtends _ + | RClass _ + | RStatics _ + | RSuperOf _ + | RFrozen _ + | RBound _ + | RVarianceCheck _ + | RPredicateOf _ + | RPredicateCall _ + | RPredicateCallNeg _ + | RRefined _ + | RIncompatibleInstantiation _ + | RSpreadOf _ + | RObjectPatternRestProp + | RCommonJSExports _ + | RModule _ + | ROptionalChain + | RReactProps + | RReactElement _ + | RReactClass + | RReactComponent + | RReactStatics + | RReactDefaultProps + | RReactState + | RReactPropTypes + | RReactChildren + | RReactChildrenOrType _ + | RReactChildrenOrUndefinedOrType _ + | RReactSFC + | RReactConfig + | RTrusted _ + | RPrivate _ + | REnum -> + `Unclassified + +let is_nullish_reason r = classification_of_reason r = `Nullish let is_scalar_reason r = let c = classification_of_reason r in c = `Scalar || c = `Nullish -let is_array_reason r = - classification_of_reason r = `Array +let is_array_reason r = classification_of_reason r = `Array let invalidate_rtype_alias = function - | RTypeAlias (name, _, desc) -> RTypeAlias (name, false, desc) + | RTypeAlias (name, true, desc) -> RTypeAlias (name, false, desc) | desc -> desc diff --git a/src/common/reason.mli b/src/common/reason.mli index 44270effb07..3e6dff61964 100644 --- a/src/common/reason.mli +++ b/src/common/reason.mli @@ -1,19 +1,34 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val mk_id: unit -> int - -type reason_desc = - | RNumber | RString | RBoolean | RMixed | REmpty | RAny | RVoid | RNull +val mk_id : unit -> int + +type 'loc virtual_reason_desc = + | RTrusted of 'loc virtual_reason_desc + | RPrivate of 'loc virtual_reason_desc + | RAnyExplicit + | RAnyImplicit + | RNumber + | RBigInt + | RString + | RBoolean + | RMixed + | REmpty + | RVoid + | RNull + | RSymbol + | RExports | RNullOrVoid + | RLongStringLit of int (* Max length *) | RStringLit of string | RNumberLit of string + | RBigIntLit of string | RBooleanLit of bool - | RMatchingProp of string * reason_desc + | RMatchingProp of string * 'loc virtual_reason_desc | RObject | RObjectLit | RObjectType @@ -30,7 +45,7 @@ type reason_desc = | RFunction of reason_desc_function | RFunctionType | RFunctionBody - | RFunctionCall of reason_desc + | RFunctionCall of 'loc virtual_reason_desc | RFunctionCallType | RFunctionUnusedArgument | RJSXFunctionCall of string @@ -39,18 +54,18 @@ type reason_desc = | RJSXElement of string option | RJSXText | RFbt - | RUnaryOperator of string * reason_desc - | RBinaryOperator of string * reason_desc * reason_desc - | RLogical of string * reason_desc * reason_desc - | RAnyObject - | RAnyFunction + | RUnaryOperator of string * 'loc virtual_reason_desc + | RBinaryOperator of string * 'loc virtual_reason_desc * 'loc virtual_reason_desc + | RLogical of string * 'loc virtual_reason_desc * 'loc virtual_reason_desc | RTemplateString | RUnknownString + | RUnionEnum | REnum | RGetterSetterProperty | RThis | RThisType | RExistential + | RImplicitInstantiation | RTooFewArgs | RTooFewArgsExpectedRest | RConstructorReturn @@ -69,9 +84,9 @@ type reason_desc = | RDefaultValue | RConstructor | RDefaultConstructor - | RConstructorCall of reason_desc + | RConstructorCall of 'loc virtual_reason_desc | RReturn - | RImplicitReturn of reason_desc + | RImplicitReturn of 'loc virtual_reason_desc | RRegExp | RSuper | RNoSuper @@ -81,9 +96,9 @@ type reason_desc = | RObjectMap | RObjectMapi | RType of string - | RTypeAlias of string * bool * reason_desc + | RTypeAlias of string * bool * 'loc virtual_reason_desc | ROpaqueType of string - | RTypeParam of string * (reason_desc * Loc.t) * (reason_desc * Loc.t) + | RTypeParam of string * ('loc virtual_reason_desc * 'loc) * ('loc virtual_reason_desc * 'loc) | RTypeof of string | RMethod of string option | RMethodCall of string option @@ -95,14 +110,18 @@ type reason_desc = | RProperty of string option | RPrivateProperty of string | RShadowProperty of string - | RPropertyOf of string * reason_desc + | RMember of { + object_: string; + property: string; + } + | RPropertyOf of string * 'loc virtual_reason_desc | RPropertyIsAString of string | RMissingProperty of string option | RUnknownProperty of string option | RUndefinedProperty of string | RSomeProperty - | RNameProperty of reason_desc - | RMissingAbstract of reason_desc + | RNameProperty of 'loc virtual_reason_desc + | RMissingAbstract of 'loc virtual_reason_desc | RFieldInitializer of string | RUntypedModule of string | RNamedImportedType of string * string @@ -112,34 +131,34 @@ type reason_desc = | RDefaultImportedType of string * string | RCode of string | RCustom of string - | RPolyType of reason_desc - | RPolyTest of string * reason_desc - | RExactType of reason_desc - | ROptional of reason_desc - | RMaybe of reason_desc - | RRestArray of reason_desc - | RAbstract of reason_desc - | RTypeApp of reason_desc - | RThisTypeApp of reason_desc - | RExtends of reason_desc - | RClass of reason_desc - | RStatics of reason_desc - | RSuperOf of reason_desc - | RFrozen of reason_desc - | RBound of reason_desc - | RVarianceCheck of reason_desc - | RPredicateOf of reason_desc - | RPredicateCall of reason_desc - | RPredicateCallNeg of reason_desc - | RRefined of reason_desc + | RPolyType of 'loc virtual_reason_desc + | RPolyTest of string * 'loc virtual_reason_desc + | RExactType of 'loc virtual_reason_desc + | ROptional of 'loc virtual_reason_desc + | RMaybe of 'loc virtual_reason_desc + | RRestArray of 'loc virtual_reason_desc + | RAbstract of 'loc virtual_reason_desc + | RTypeApp of 'loc virtual_reason_desc + | RTypeAppImplicit of 'loc virtual_reason_desc + | RThisTypeApp of 'loc virtual_reason_desc + | RExtends of 'loc virtual_reason_desc + | RClass of 'loc virtual_reason_desc + | RStatics of 'loc virtual_reason_desc + | RSuperOf of 'loc virtual_reason_desc + | RFrozen of 'loc virtual_reason_desc + | RBound of 'loc virtual_reason_desc + | RVarianceCheck of 'loc virtual_reason_desc + | RPredicateOf of 'loc virtual_reason_desc + | RPredicateCall of 'loc virtual_reason_desc + | RPredicateCallNeg of 'loc virtual_reason_desc + | RRefined of 'loc virtual_reason_desc | RIncompatibleInstantiation of string - | RSpreadOf of reason_desc + | RSpreadOf of 'loc virtual_reason_desc | RObjectPatternRestProp | RArrayPatternRestProp | RCommonJSExports of string | RModule of string | ROptionalChain - | RReactProps | RReactElement of string option | RReactClass @@ -149,103 +168,174 @@ type reason_desc = | RReactState | RReactPropTypes | RReactChildren - | RReactChildrenOrType of reason_desc - | RReactChildrenOrUndefinedOrType of reason_desc + | RReactChildrenOrType of 'loc virtual_reason_desc + | RReactChildrenOrUndefinedOrType of 'loc virtual_reason_desc | RReactSFC + | RReactConfig and reason_desc_function = | RAsync | RGenerator | RAsyncGenerator | RNormal + | RUnknown + +type reason_desc = ALoc.t virtual_reason_desc + +type 'loc virtual_reason + +type reason = ALoc.t virtual_reason + +type concrete_reason = Loc.t virtual_reason -type reason type t = reason (* convenience *) -module TestID: sig - val run: ('a -> 'b) -> 'a -> 'b +module TestID : sig + val run : ('a -> 'b) -> 'a -> 'b end -val lexpos: string -> int -> int -> Lexing.position - (* reason constructor *) -val mk_reason: reason_desc -> Loc.t -> reason +val mk_reason : 'loc virtual_reason_desc -> 'loc -> 'loc virtual_reason (* ranges *) -val diff_range: Loc.t -> int * int -val in_range: Loc.t -> Loc.t -> bool +val in_range : Loc.t -> Loc.t -> bool + +val string_of_desc : 'loc virtual_reason_desc -> string + +val map_reason_locs : ('a -> 'b) -> 'a virtual_reason -> 'b virtual_reason + +val map_desc_locs : ('a -> 'b) -> 'a virtual_reason_desc -> 'b virtual_reason_desc + +val string_of_loc : ?strip_root:Path.t option -> Loc.t -> string + +val string_of_aloc : ?strip_root:Path.t option -> ALoc.t -> string + +val json_of_loc : + ?strip_root:Path.t option -> + ?catch_offset_errors:bool -> + offset_table:Offset_utils.t option -> + Loc.t -> + Hh_json.json + +val json_of_loc_props : + ?strip_root:Path.t option -> + ?catch_offset_errors:bool -> + offset_table:Offset_utils.t option -> + Loc.t -> + (string * Hh_json.json) list + +val json_of_source : ?strip_root:Path.t option -> File_key.t option -> Hh_json.json + +val json_source_type_of_source : File_key.t option -> Hh_json.json + +val locationless_reason : reason_desc -> reason + +val func_reason : async:bool -> generator:bool -> ALoc.t -> reason + +val is_internal_name : string -> bool + +val internal_name : string -> string -val string_of_desc: reason_desc -> string +val is_internal_module_name : string -> bool -val string_of_loc_pos: Loc.t -> string -val string_of_loc: ?strip_root:Path.t option -> Loc.t -> string -val json_of_loc: ?strip_root:Path.t option -> Loc.t -> Hh_json.json -val json_of_loc_props: ?strip_root:Path.t option -> Loc.t -> (string * Hh_json.json) list +val internal_module_name : string -> string -val locationless_reason: reason_desc -> reason +val uninternal_module_name : string -> string -val func_reason: (Loc.t, Loc.t) Flow_ast.Function.t -> Loc.t -> reason +val is_instantiable_reason : 'loc virtual_reason -> bool -val is_internal_name: string -> bool -val internal_name: string -> string +val is_constant_reason : 'loc virtual_reason -> bool -val is_internal_module_name: string -> bool -val internal_module_name: string -> string -val uninternal_module_name: string -> string +val is_typemap_reason : 'loc virtual_reason -> bool -val internal_pattern_name: Loc.t -> string +val is_calltype_reason : 'loc virtual_reason -> bool -val is_instantiable_reason: reason -> bool +val is_nullish_reason : 'loc virtual_reason -> bool -val is_constant_reason: reason -> bool +val is_scalar_reason : 'loc virtual_reason -> bool -val is_typemap_reason: reason -> bool -val is_calltype_reason: reason -> bool +val is_array_reason : 'loc virtual_reason -> bool -val is_nullish_reason: reason -> bool -val is_scalar_reason: reason -> bool -val is_array_reason: reason -> bool +val is_literal_object_reason : 'loc virtual_reason -> bool -val is_literal_object_reason: reason -> bool -val is_literal_array_reason: reason -> bool +val is_literal_array_reason : 'loc virtual_reason -> bool -val derivable_reason: reason -> reason -val is_derivable_reason: reason -> bool +val derivable_reason : 'loc virtual_reason -> 'loc virtual_reason -val builtin_reason: reason_desc -> reason +val is_derivable_reason : 'loc virtual_reason -> bool + +val builtin_reason : reason_desc -> reason (* reason location preds *) -val is_builtin_reason: reason -> bool -val is_lib_reason: reason -> bool -val is_blamable_reason: reason -> bool -val reasons_overlap: reason -> reason -> bool +val is_builtin_reason : ('loc -> File_key.t option) -> 'loc virtual_reason -> bool + +val is_lib_reason : reason -> bool + +val is_blamable_reason : reason -> bool + +val string_of_source : ?strip_root:Path.t option -> File_key.t -> string + +val string_of_reason : ?strip_root:Path.t option -> reason -> string -val string_of_source: ?strip_root:Path.t option -> File_key.t -> string -val string_of_reason: ?strip_root:Path.t option -> reason -> string -val json_of_reason: ?strip_root:Path.t option -> reason -> Hh_json.json -val dump_reason: ?strip_root:Path.t option -> reason -> string +val dump_reason : ?strip_root:Path.t option -> reason -> string (* accessors *) -val aloc_of_reason: reason -> ALoc.t -val def_loc_of_reason: reason -> Loc.t -val annot_loc_of_reason: reason -> Loc.t option -val desc_of_reason: ?unwrap:bool -> reason -> reason_desc +val loc_of_reason : concrete_reason -> Loc.t + +val aloc_of_reason : reason -> ALoc.t + +val def_aloc_of_reason : reason -> ALoc.t + +val def_loc_of_reason : concrete_reason -> Loc.t + +val annot_aloc_of_reason : reason -> ALoc.t option + +val desc_of_reason : ?unwrap:bool -> 'loc virtual_reason -> 'loc virtual_reason_desc + +val annot_loc_of_reason : concrete_reason -> Loc.t option (* simple way to get derived reasons whose descriptions are simple replacements of the original *) -val replace_reason: ?keep_def_loc:bool -> (reason_desc -> reason_desc) -> reason -> reason -val replace_reason_const: ?keep_def_loc:bool -> reason_desc -> reason -> reason +(* replace desc, but keep loc, def_loc, annot_loc *) +val update_desc_reason : + ('loc virtual_reason_desc -> 'loc virtual_reason_desc) -> + 'loc virtual_reason -> + 'loc virtual_reason + +(* replace desc, keep loc, but clobber def_loc, annot_loc as in new reason *) +val update_desc_new_reason : + ('loc virtual_reason_desc -> 'loc virtual_reason_desc) -> + 'loc virtual_reason -> + 'loc virtual_reason + +(* replace desc, but keep loc, def_loc, annot_loc *) +val replace_desc_reason : 'loc virtual_reason_desc -> 'loc virtual_reason -> 'loc virtual_reason -val repos_reason: Loc.t -> ?annot_loc:Loc.t -> reason -> reason -val annot_reason: reason -> reason +(* replace desc, keep loc, but clobber def_loc, annot_loc as in new reason *) +val replace_desc_new_reason : + 'loc virtual_reason_desc -> 'loc virtual_reason -> 'loc virtual_reason -val do_patch: string list -> (int * int * string) list -> string +val repos_reason : 'loc -> ?annot_loc:'loc -> 'loc virtual_reason -> 'loc virtual_reason + +val annot_reason : 'loc virtual_reason -> 'loc virtual_reason module ReasonMap : MyMap.S with type key = reason -val mk_expression_reason: (Loc.t, Loc.t) Flow_ast.Expression.t -> reason +val mk_expression_reason : (ALoc.t, ALoc.t) Flow_ast.Expression.t -> reason + +val mk_pattern_reason : (ALoc.t, ALoc.t) Flow_ast.Pattern.t -> reason + +val unknown_elem_empty_array_desc : reason_desc + +val inferred_union_elem_array_desc : reason_desc + +val invalidate_rtype_alias : 'loc virtual_reason_desc -> 'loc virtual_reason_desc + +val code_desc_of_literal : 'loc Flow_ast.Literal.t -> string -val unknown_elem_empty_array_desc: reason_desc -val inferred_union_elem_array_desc: reason_desc +val code_desc_of_expression : wrap:bool -> ('a, 'b) Flow_ast.Expression.t -> string -val invalidate_rtype_alias: reason_desc -> reason_desc +(* Pass in any available aloc tables to be used when comparing abstract and concrete locations from + * the same file. Usually `Context.aloc_tables` is a good choice, but if the context is not + * available, the empty map may be appropriate. *) +val concretize_equal : ALoc.table Lazy.t Utils_js.FilenameMap.t -> t -> t -> bool diff --git a/src/common/semver.ml b/src/common/semver.ml deleted file mode 100644 index e53dc0f7c3e..00000000000 --- a/src/common/semver.ml +++ /dev/null @@ -1,202 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * This library contains portions based on third party software provided under - * this license: - * - * node-semver software - * - * Copyright (c) Isaac Z. Schlueter and Contributors - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR - * IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - *) - -(** - Basic semantic version parser, as defined by http://semver.org/ - - So far, this implementation only supports individual versions; intersection - ranges (e.g. ">=0.13.0 <0.14.0", which are ANDed together); and caret ranges, - which allow changes that do not modify the left-most non-zero digit (e.g. - "^0.13" expands into ">=0.13.0 <0.14.0", and "^0.13.1" expands into - ">=0.13.1 <0.14.0", whereas "^1.2.3" expands into ">=1.2.3 <2.0.0"). - - Further support for features like "||" ("1.2.3 || 1.2.5"), hyphen ranges - ("1.2 - 1.3"), X-ranges ("1.2.x" or "1.2.*"), tilde ranges ("~1.2"), and - pre-release/build identifiers ("1.2.3-beta.1"), will be added as necessary. - **) - -open Utils_js - -exception Parse_error of string - -let numeric_identifier_str = "0\\|[1-9][0-9]*" -let version_str = spf "\\(%s\\)\\(\\.\\(%s\\)\\(\\.\\(%s\\)\\)?\\)?" - numeric_identifier_str numeric_identifier_str numeric_identifier_str -let version_regexp = Str.regexp version_str - -let get_matched_version s offset = - let get_match index = - try Str.matched_group (offset + index) s with Not_found -> "" - in - get_match 1, get_match 3, get_match 5 - -let missing id = id = "" - -module Version : sig - type t - val create : int -> int -> int -> t - val parse : string -> t - val compare : t -> t -> int -end = struct - type t = { - major: int; - minor: int; - patch: int; - } - - let create major minor patch = { major; minor; patch; } - - let parse version = try - assert (Str.string_match version_regexp version 0); - let major, minor, patch = get_matched_version version 0 in - let major = if missing major then 0 else int_of_string major in - let minor = if missing minor then 0 else int_of_string minor in - let patch = if missing patch then 0 else int_of_string patch in - create major minor patch - with _ -> - raise (Parse_error ("Invalid version number: " ^ version)) - - let compare a b = - Pervasives.compare (a.major, a.minor, a.patch) (b.major, b.minor, b.patch) -end - -module Comparator : sig - type t - val parse : string -> t - val satisfies : Version.t -> t -> bool -end = struct - type op = - | Greater - | GreaterOrEqual - | Less - | LessOrEqual - | Equal - | NotEqual - - type t = { - op: op; - version: Version.t - } - - let comparator_regexp = Str.regexp ( - spf "^\\([<>=]?=?\\|!=\\)\\(%s\\)$" version_str - ) - - let parse comparator = - try - assert (Str.string_match comparator_regexp comparator 0); - let op = match Str.matched_group 1 comparator with - | ">" -> Greater - | ">=" -> GreaterOrEqual - | "<" -> Less - | "<=" -> LessOrEqual - | "!=" -> NotEqual - | "" | "=" | "==" -> Equal - | other -> raise (Parse_error ("Unknown operator: " ^ other)) - in - - let major, minor, patch = get_matched_version comparator 2 in - let major = if missing major then 0 else int_of_string major in - let minor = if missing minor then 0 else int_of_string minor in - let patch = if missing patch then 0 else int_of_string patch in - let version = Version.create major minor patch in - {op; version} - with _ -> - raise (Parse_error ("Invalid comparator: " ^ comparator)) - - let satisfies a {op; version = b} = - let result = Version.compare a b in - match op with - | Greater -> result > 0 - | GreaterOrEqual -> result >= 0 - | Less -> result < 0 - | LessOrEqual -> result <= 0 - | NotEqual -> result <> 0 - | Equal -> result = 0 -end - -module Range : sig - type t - val parse : string -> t - val satisfies : t -> Version.t -> bool -end = struct - type t = Comparator.t list - - let op_trim = Str.regexp "\\([<>]=?\\|[!=]?=\\|\\^\\)[ \t]+" - let caret_regexp = Str.regexp (spf "^\\^\\(%s\\)$" version_str) - - let incr str = (int_of_string str) + 1 - - let expand_caret ver = - let major, minor, patch = ver in - if missing major then "" - else if missing minor then spf ">=%s.0.0 <%d.0.0" major (incr major) - else if missing patch then - if major = "0" then - spf ">=%s.%s.0 <%s.%d.0" major minor major (incr minor) - else - spf ">=%s.%s.0 <%d.0.0" major minor (incr major) - else - if major = "0" then - if minor = "0" then - spf ">=%s.%s.%s <%s.%s.%d" major minor patch major minor (incr patch) - else - spf ">=%s.%s.%s <%s.%d.0" major minor patch major (incr minor) - else - spf ">=%s.%s.%s <%d.0.0" major minor patch (incr major) - - let expand_comparator (comp:string) : string list = - let comp = - if Str.string_match caret_regexp comp 0 then - expand_caret (get_matched_version comp 1) - else comp - in - Str.split (Str.regexp_string " ") comp - - let parse (range:string) : t = - (* normalize whitespace *) - let range = String.trim range in - let range = Str.global_replace op_trim "\\1" range in - let range = Str.global_replace (Str.regexp "[ \t]+") " " range in - - range - |> Str.split (Str.regexp_string " ") - |> List.map expand_comparator - |> List.flatten - |> List.map Comparator.parse - - let satisfies range version = - List.for_all (Comparator.satisfies version) range -end - -let is_valid_range range = - try let _ = Range.parse range in true - with Parse_error _ -> false - -let satisfies (range:string) (version:string) = - let range = Range.parse range in - let version = Version.parse version in - Range.satisfies range version diff --git a/src/common/semver/__tests__/semver_parser_test.ml b/src/common/semver/__tests__/semver_parser_test.ml new file mode 100644 index 00000000000..19119565be6 --- /dev/null +++ b/src/common/semver/__tests__/semver_parser_test.ml @@ -0,0 +1,107 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +exception Semver_parse_error of string + +let parse_version str = + let lexbuf = Lexing.from_string str in + try Semver_parser.version Semver_lexer.token lexbuf + with Parsing.Parse_error -> raise (Semver_parse_error (Lexing.lexeme lexbuf)) + +let parse_comparator str = + let lexbuf = Lexing.from_string str in + try Semver_parser.comparator Semver_lexer.token lexbuf + with Parsing.Parse_error -> raise (Semver_parse_error (Lexing.lexeme lexbuf)) + +let parse_range str = + let lexbuf = Lexing.from_string str in + try Semver_parser.range Semver_lexer.token lexbuf + with Parsing.Parse_error -> raise (Semver_parse_error (Lexing.lexeme lexbuf)) + +let tests = + "parser" + >::: [ + ( "version_basics" + >:: fun ctxt -> + Semver_version.( + let cases = + [ + ("0", zero); + ("0.1", { zero with minor = 1 }); + ("1", { zero with major = 1 }); + ("1.2", { zero with major = 1; minor = 2 }); + ("1.2.3", { zero with major = 1; minor = 2; patch = 3 }); + ( "1.2.3-alpha", + { zero with major = 1; minor = 2; patch = 3; prerelease = [Str "alpha"] } ); + ( "1.2.3-alpha.2", + { zero with major = 1; minor = 2; patch = 3; prerelease = [Str "alpha"; Int 2] } + ); + ] + in + List.iter + (fun (str, version) -> + try assert_equal ~ctxt ~printer:to_string version (parse_version str) + with Semver_parse_error token -> + assert_failure ("Failed to parse " ^ str ^ ": unexpected token " ^ token)) + cases; + assert_bool "done" true) ) + (* fixes ounit error reporting *); + ( "comparator_basics" + >:: fun ctxt -> + Semver_comparator.( + let v1 = Semver_version.{ zero with major = 1 } in + let cases = + [ + (">1", { op = Some Greater; version = v1 }); + (">=1", { op = Some GreaterOrEqual; version = v1 }); + ("<1", { op = Some Less; version = v1 }); + ("<=1", { op = Some LessOrEqual; version = v1 }); + ("=1", { op = Some Equal; version = v1 }); + ("1", { op = None; version = v1 }); + ("= 1", { op = Some Equal; version = v1 }); + (" = 1", { op = Some Equal; version = v1 }); + (" = 1 ", { op = Some Equal; version = v1 }); + ] + in + List.iter + (fun (str, comparator) -> + try assert_equal ~ctxt ~printer:to_string comparator (parse_comparator str) + with Semver_parse_error token -> + assert_failure ("Failed to parse " ^ str ^ ": unexpected token " ^ token)) + cases; + assert_bool "done" true) ) + (* fixes ounit error reporting *); + ( "range_basics" + >:: fun ctxt -> + Semver_range.( + let v1 = Semver_version.{ zero with major = 1 } in + let v2 = Semver_version.{ zero with major = 2 } in + let ge1 = Comparator Semver_comparator.{ op = Some GreaterOrEqual; version = v1 } in + let lt2 = Comparator Semver_comparator.{ op = Some Less; version = v2 } in + let cases = + [ + (">=1", [ge1]); + (">=1 <2", [ge1; lt2]); + ("^1", [Caret v1]); + ("^1.0", [Caret v1]); + ("^1.0.0", [Caret v1]); + ("^1 ^2", [Caret v1; Caret v2]); + (">=1 ^2", [ge1; Caret v2]); + ] + in + List.iter + (fun (str, range) -> + try assert_equal ~ctxt ~printer:to_string range (parse_range str) + with Semver_parse_error token -> + assert_failure ("Failed to parse " ^ str ^ ": unexpected token " ^ token)) + cases; + assert_bool "done" true) ); + (* fixes ounit error reporting *) + + ] diff --git a/src/common/semver/__tests__/semver_range_test.ml b/src/common/semver/__tests__/semver_range_test.ml new file mode 100644 index 00000000000..8e32e632eab --- /dev/null +++ b/src/common/semver/__tests__/semver_range_test.ml @@ -0,0 +1,141 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let v0_0_1 = Semver_version.{ zero with major = 0; minor = 0; patch = 1 } + +let v0_0_2 = Semver_version.{ zero with major = 0; minor = 0; patch = 2 } + +let v0_1_0 = Semver_version.{ zero with major = 0; minor = 1; patch = 0 } + +let v0_1_0_alpha_2 = Semver_version.{ v0_1_0 with prerelease = [Str "alpha"; Int 2] } + +let v0_1_2 = Semver_version.{ zero with major = 0; minor = 1; patch = 2 } + +let v0_2_0 = Semver_version.{ zero with major = 0; minor = 2; patch = 0 } + +let v0_2_0_alpha_2 = Semver_version.{ v0_2_0 with prerelease = [Str "alpha"; Int 2] } + +let v1 = Semver_version.{ zero with major = 1 } + +let v1_2_0 = Semver_version.{ zero with major = 1; minor = 2; patch = 0 } + +let v1_2_3 = Semver_version.{ zero with major = 1; minor = 2; patch = 3 } + +let v1_2_3_alpha_3 = + Semver_version.{ zero with major = 1; minor = 2; patch = 3; prerelease = [Str "alpha"; Int 3] } + +let v1_2_3_alpha_7 = + Semver_version.{ zero with major = 1; minor = 2; patch = 3; prerelease = [Str "alpha"; Int 7] } + +let v1_2_4 = Semver_version.{ zero with major = 1; minor = 2; patch = 4 } + +let v2 = Semver_version.{ zero with major = 2 } + +let v3_4_5_alpha_9 = + Semver_version.{ zero with major = 3; minor = 4; patch = 5; prerelease = [Str "alpha"; Int 9] } + +let ge version = Semver_comparator.{ op = Some GreaterOrEqual; version } + +let lt version = Semver_comparator.{ op = Some Less; version } + +let string_of_comparators comparators = + comparators |> List.map Semver_comparator.to_string |> String.concat " " + +let assert_satisfies ~ctxt ?include_prereleases range version expected = + let msg = + Printf.sprintf + "Expected %s %sto satisfy %s" + (Semver_version.to_string version) + ( if expected then + "" + else + "NOT " ) + (Semver_range.to_string range) + in + assert_equal ~ctxt ~msg expected (Semver_range.satisfies ?include_prereleases range version) + +let tests = + "range" + >::: [ + ( "comparators_of_range" + >:: fun ctxt -> + Semver_range.( + let cases = + [ + ([Caret v1], [ge v1; lt v2]); + ([Caret v1_2_0], [ge v1_2_0; lt v2]); + ([Caret v1_2_3], [ge v1_2_3; lt v2]); + (* when major = 0, minor acts like the major version *) + ([Caret v0_1_0], [ge v0_1_0; lt v0_2_0]); + ([Caret v0_1_2], [ge v0_1_2; lt v0_2_0]); + (* when major = 0 and minor = 0, patch acts like the major version *) + ([Caret v0_0_1], [ge v0_0_1; lt v0_0_2]); + ] + in + List.iter + (fun (input, expected) -> + assert_equal + ~ctxt + ~printer:string_of_comparators + expected + (comparators_of_range input)) + cases; + assert_bool "done" true) ) + (* fixes ounit error reporting *); + ( "satisfies" + >:: fun ctxt -> + Semver_range.( + let cases = + [ + ([Caret v1], v1, true); + ([Caret v1], v2, false); + ([Comparator (ge v1_2_3_alpha_3)], v1_2_3_alpha_7, true); + ([Comparator (ge v1_2_3_alpha_3)], v1_2_4, true); + (* only range has prerelease *) + ([Comparator (ge v1_2_3_alpha_3)], v3_4_5_alpha_9, false); + (* prereleases from diff versions *) + ([Caret v1_2_3_alpha_7], v1_2_3_alpha_3, false); + ([Caret v1_2_3_alpha_7], v1_2_3_alpha_7, true); + ([Caret v1_2_3_alpha_7], v1_2_3, true); + ([Caret v1_2_3_alpha_7], v1_2_4, true); + ([Caret v0_1_0], v0_1_0_alpha_2, false); + ([Caret v0_1_0], v0_1_0, true); + ([Caret v0_1_0], v0_1_2, true); + ([Caret v0_1_0], v0_2_0_alpha_2, false); + ([Caret v0_1_0], v0_2_0, false); + ] + in + List.iter + (fun (range, version, expected) -> assert_satisfies ~ctxt range version expected) + cases; + assert_bool "done" true) ) + (* fixes ounit error reporting *); + ( "satisfies_includes_prereleases" + >:: fun ctxt -> + Semver_range.( + let cases = + [ + ([Comparator (ge v1_2_3)], v3_4_5_alpha_9, true); + (* only version has prerelease *) + ([Comparator (ge v1_2_3_alpha_3)], v1_2_4, true); + (* only range has prerelease *) + ([Comparator (ge v1_2_3_alpha_3)], v3_4_5_alpha_9, true); + (* prereleases from diff versions *) + + ] + in + let include_prereleases = true in + List.iter + (fun (range, version, expected) -> + assert_satisfies ~ctxt ~include_prereleases range version expected) + cases; + assert_bool "done" true) ); + (* fixes ounit error reporting *) + + ] diff --git a/src/common/semver/__tests__/semver_tests.ml b/src/common/semver/__tests__/semver_tests.ml new file mode 100644 index 00000000000..7213f5adad3 --- /dev/null +++ b/src/common/semver/__tests__/semver_tests.ml @@ -0,0 +1,37 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = + "semver" + >::: [ + "satisfies" + >::: [ + ( "greater_than_major" + >:: fun ctxt -> + let cases = + [ + (">2", "2", false); + (">2", "2.0", false); + (">2", "2.1", true); + (">2", "3", true); + (">2", "3.0", true); + (">2", "11.0", true); + ] + in + List.iter + (fun (range, version, satisfies) -> + assert_equal ~ctxt satisfies (Semver.satisfies range version)) + cases ); + ]; + Semver_parser_test.tests; + Semver_range_test.tests; + Semver_version_test.tests; + ] + +let () = run_test_tt_main tests diff --git a/src/common/semver/__tests__/semver_version_test.ml b/src/common/semver/__tests__/semver_version_test.ml new file mode 100644 index 00000000000..e436ede0a6c --- /dev/null +++ b/src/common/semver/__tests__/semver_version_test.ml @@ -0,0 +1,81 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 +open Semver_version + +let v major minor patch prerelease build = { major; minor; patch; prerelease; build } + +let v1_0_0_alpha = v 1 0 0 [Str "alpha"] [] + +let v1_0_0_alpha_1 = v 1 0 0 [Str "alpha"; Int 1] [] + +let v1_0_0_alpha_beta = v 1 0 0 [Str "alpha"; Str "beta"] [] + +let v1_0_0_beta = v 1 0 0 [Str "beta"] [] + +let v1_0_0_beta_2 = v 1 0 0 [Str "beta"; Int 2] [] + +let v1_0_0_beta_11 = v 1 0 0 [Str "beta"; Int 11] [] + +let v1_0_0_rc_1 = v 1 0 0 [Str "rc"; Int 1] [] + +let v1_0_0 = v 1 0 0 [] [] + +let rec iter_pairs f = function + | [] -> () + | [_] -> () + | a :: b :: rest -> + f a b; + iter_pairs f (b :: rest) + +let tests = + "version" + >::: [ + "compare_precedence" + >::: [ + ( "prerelease" + >:: fun _ctxt -> + let ordered = + [ + v1_0_0_alpha; + v1_0_0_alpha_1; + v1_0_0_alpha_beta; + v1_0_0_beta; + v1_0_0_beta_2; + v1_0_0_beta_11; + v1_0_0_rc_1; + v1_0_0; + ] + in + iter_pairs + (fun a b -> + let a_str = to_string a in + let b_str = to_string b in + assert_bool (a_str ^ " < " ^ b_str ^ " failed") (compare_precedence a b < 0); + assert_bool (b_str ^ " > " ^ a_str ^ " failed") (compare_precedence b a > 0)) + ordered; + List.iter + (fun a -> + let a_str = to_string a in + assert_bool (a_str ^ " not equal to itself") (compare_precedence a a = 0)) + ordered ); + ( "build" + >:: fun _ctxt -> + let a = v 1 0 0 [] [Int 1] in + let b = v 1 0 0 [] [Int 2] in + assert_bool "1.0.0+1 should be = 1.0.0+2" (compare_precedence a b = 0) ); + ]; + "compare" + >::: [ + ( "build" + >:: fun _ctxt -> + let a = v 1 0 0 [] [Int 1] in + let b = v 1 0 0 [] [Int 2] in + assert_bool "1.0.0+1 should NOT be = 1.0.0+2" (compare a b < 0) ); + ]; + ] diff --git a/src/common/semver/cli/main.ml b/src/common/semver/cli/main.ml new file mode 100644 index 00000000000..f92fb6eee47 --- /dev/null +++ b/src/common/semver/cli/main.ml @@ -0,0 +1,70 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type args = { + loose: bool; + ranges: Semver_range.t list; + verbose: bool; + versions: string list; +} + +let parse_range_arg range = + (try Semver.range_of_string range with Semver.Parse_error msg -> raise (Arg.Bad msg)) + +let parse_args () = + let loose = ref false in + let ranges = ref [] in + let verbose = ref false in + let rev_versions = ref [] in + let speclist = + [ + ("-l", Arg.Set loose, "Use \"loose\" parsing, allowing versions to start with \"v\""); + ( "-r", + Arg.String (fun r -> ranges := parse_range_arg r :: !ranges), + "Print versions that match this range (if passed multiple times, must pass all ranges)." ); + ("-v", Arg.Set verbose, "Enables verbose mode"); + ] + in + let usage_msg = "Usage: semver [options] [ [...]]\n\nOptions:" in + Arg.parse speclist (fun version -> rev_versions := version :: !rev_versions) usage_msg; + { + loose = !loose; + ranges = List.rev !ranges; + verbose = !verbose; + versions = List.rev !rev_versions; + } + +let main () = + let { loose; ranges; verbose; versions = version_strs } = parse_args () in + let rev_versions = + List.fold_left + (fun acc str -> + let str = + let len = String.length str in + if loose && len > 1 && str.[0] = 'v' then + String.sub str 1 (len - 1) + else + str + in + try Semver.version_of_string str :: acc + with Semver.Parse_error msg -> + if verbose then prerr_endline msg; + acc) + [] + version_strs + in + let rev_versions = + List.filter + (fun version -> List.for_all (fun range -> Semver_range.satisfies range version) ranges) + rev_versions + in + if rev_versions = [] then exit 1; + + let sorted = List.stable_sort Semver_version.compare_precedence rev_versions in + List.iter (fun ver -> print_endline (Semver_version.to_string ver)) sorted + +let () = main () diff --git a/src/common/semver/dune b/src/common/semver/dune new file mode 100644 index 00000000000..f8240d09b86 --- /dev/null +++ b/src/common/semver/dune @@ -0,0 +1,7 @@ +(library + (name semver) + (wrapped false) +) + +(ocamlyacc semver_parser) +(ocamllex semver_lexer) diff --git a/src/common/semver/semver.ml b/src/common/semver/semver.ml new file mode 100644 index 00000000000..e65402479d0 --- /dev/null +++ b/src/common/semver/semver.ml @@ -0,0 +1,45 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(** + Basic semantic version parser, as defined by http://semver.org/ + + So far, this implementation only supports individual versions; intersection + ranges (e.g. ">=0.13.0 <0.14.0", which are ANDed together); and caret ranges, + which allow changes that do not modify the left-most non-zero digit (e.g. + "^0.13" expands into ">=0.13.0 <0.14.0", and "^0.13.1" expands into + ">=0.13.1 <0.14.0", whereas "^1.2.3" expands into ">=1.2.3 <2.0.0"). + + Further support for features like "||" ("1.2.3 || 1.2.5"), hyphen ranges + ("1.2 - 1.3"), X-ranges ("1.2.x" or "1.2.*"), tilde ranges ("~1.2"), and + pre-release/build identifiers ("1.2.3-beta.1"), will be added as necessary. + **) + +exception Parse_error of string + +let version_of_string str = + let lexbuf = Lexing.from_string str in + try Semver_parser.version Semver_lexer.token lexbuf + with Parsing.Parse_error -> raise (Parse_error ("Invalid version number: " ^ str)) + +let range_of_string str = + let lexbuf = Lexing.from_string str in + try Semver_parser.range Semver_lexer.token lexbuf + with Parsing.Parse_error -> raise (Parse_error ("Invalid range: " ^ str)) + +let is_valid_range range = + try + let _ = range_of_string range in + true + with Parse_error _ -> false + +let satisfies ?include_prereleases (range : string) (version : string) = + let range = range_of_string range in + let version = version_of_string version in + Semver_range.satisfies ?include_prereleases range version + +let compare a b = Semver_version.compare_precedence (version_of_string a) (version_of_string b) diff --git a/src/common/semver.mli b/src/common/semver/semver.mli similarity index 81% rename from src/common/semver.mli rename to src/common/semver/semver.mli index eddcb4129d7..f4aa174ad83 100644 --- a/src/common/semver.mli +++ b/src/common/semver/semver.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -26,5 +26,12 @@ exception Parse_error of string +val version_of_string : string -> Semver_version.t + +val range_of_string : string -> Semver_range.t + val is_valid_range : string -> bool -val satisfies : string -> string -> bool + +val satisfies : ?include_prereleases:bool -> string -> string -> bool + +val compare : string -> string -> int diff --git a/src/common/semver/semver_comparator.ml b/src/common/semver/semver_comparator.ml new file mode 100644 index 00000000000..a13d066b40d --- /dev/null +++ b/src/common/semver/semver_comparator.ml @@ -0,0 +1,44 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type op = + | Greater + | GreaterOrEqual + | Less + | LessOrEqual + | Equal + +type t = { + op: op option; + version: Semver_version.t; +} + +let string_of_op = function + | Greater -> ">" + | GreaterOrEqual -> ">=" + | Less -> "<" + | LessOrEqual -> "<=" + | Equal -> "=" + +let to_string { op; version } = + let op = + match op with + | Some op -> string_of_op op + | None -> "" + in + op ^ Semver_version.to_string version + +let satisfies version { op; version = range } = + let result = Semver_version.compare_precedence version range in + match op with + | Some Greater -> result > 0 + | Some GreaterOrEqual -> result >= 0 + | Some Less -> result < 0 + | Some LessOrEqual -> result <= 0 + | Some Equal + | None -> + result = 0 diff --git a/src/common/semver/semver_lexer.mll b/src/common/semver/semver_lexer.mll new file mode 100644 index 00000000000..6b6c6c27053 --- /dev/null +++ b/src/common/semver/semver_lexer.mll @@ -0,0 +1,36 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +{ +open Semver_parser +} + +let number = '0'|['1'-'9'](['0'-'9'])* +let identifier_start = ['0'-'9' 'A'-'Z' 'a'-'z'] +let identifier = identifier_start (identifier_start | '_')* + +rule token = parse + [' ' '\t'] { + (* skip blanks *) + token lexbuf + } +| number as nr { + NR nr + } +| identifier as id { + ID id + } +| '-' { HYPHEN } +| '+' { PLUS } +| '.' { DOT } +| '<' { LT } +| '<''=' { LTE } +| '>' { GT } +| '>''=' { GTE } +| '=' { EQ } +| '^' { CARET } +| eof { EOF } diff --git a/src/common/semver/semver_parser.mly b/src/common/semver/semver_parser.mly new file mode 100644 index 00000000000..5a5ac3b54e6 --- /dev/null +++ b/src/common/semver/semver_parser.mly @@ -0,0 +1,87 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +%token EOF +%token DOT HYPHEN PLUS +%token LT LTE GT GTE EQ +%token CARET +%token ID +%token NR + +%start version comparator range +%type version +%type comparator +%type range +%% +version: + number part part prerelease build { + Semver_version.({ + major = $1; + minor = $2; + patch = $3; + prerelease = $4; + build = $5; + }) + } +; + +comparator: + op version { + Semver_comparator.({ + op = $1; + version = $2; + }) + } +; + +number: + NR { int_of_string $1 } + +part: +/* empty */ { 0 } +| DOT number { $2 } +; + +prerelease: +/* empty */ { [] } +| HYPHEN identifier_list { $2 } +; + +build: +/* empty */ { [] } +| PLUS identifier_list { $2 } +; + +identifier_list: + identifier_part { [$1] } +| identifier_part DOT identifier_list { $1::$3 } +; + +identifier_part: + ID { Semver_version.Str $1 } +| NR { Semver_version.Int (int_of_string $1) } + +op: +/* empty */ { None } +| LT { Some Semver_comparator.Less } +| LTE { Some Semver_comparator.LessOrEqual } +| GT { Some Semver_comparator.Greater } +| GTE { Some Semver_comparator.GreaterOrEqual } +| EQ { Some Semver_comparator.Equal } +; + +/* RANGES */ + +range: + range_part { [$1] } +| range_part range { $1::$2 } +; + +range_part: + comparator { Semver_range.Comparator $1 } +| CARET version { Semver_range.Caret $2 } +; diff --git a/src/common/semver/semver_range.ml b/src/common/semver/semver_range.ml new file mode 100644 index 00000000000..f8e3552c54d --- /dev/null +++ b/src/common/semver/semver_range.ml @@ -0,0 +1,94 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type part = + | Comparator of Semver_comparator.t + | Caret of Semver_version.t + +(* TODO: support unions (`||`), like this: +type comparator_set = part list +type t = comparator_set list +*) +type t = part list + +let expand_caret version = + Semver_version.( + let upper = + match version with + | { major = 0; minor = 0; _ } -> incr_patch version + | { major = 0; _ } -> incr_minor version + | _ -> incr_major version + in + [ + Semver_comparator.{ op = Some GreaterOrEqual; version }; + Semver_comparator.{ op = Some Less; version = upper }; + ]) + +let fold_comparators_of_range f acc t = + List.fold_left + (fun acc part -> + match part with + | Comparator c -> f acc c + | Caret v -> List.fold_left f acc (expand_caret v)) + acc + t + +let comparators_of_range t : Semver_comparator.t list = + let comparators = fold_comparators_of_range (fun acc comp -> comp :: acc) [] t in + List.rev comparators + +(* Determines if the version is matched by the range. + * + * If the range and the version both have a prerelease, then they must be for the same + * version (major, minor, patch). for example, `>1.2.3-alpha` matches `1.2.3-beta` and + * `1.2.4`, but not `1.2.4-alpha`. this is so that opting into one prerelease version + * does not also opt you into all future prereleases. this behavior can be overridden + * with `~include_prereleases:true`. + *) +let satisfies ?(include_prereleases = false) range version = + Semver_version.( + Semver_comparator.( + let satisfied = + fold_comparators_of_range + (fun acc comp -> + if not acc then + acc + else + Semver_comparator.satisfies version comp) + true + range + in + if not satisfied then + false + else + let { major; minor; patch; prerelease; build = _ } = version in + if prerelease = [] || include_prereleases then + true + else + fold_comparators_of_range + (fun acc { version = allowed; op = _ } -> + if acc then + acc + else + match allowed with + | { + major = major'; + minor = minor'; + patch = patch'; + prerelease = _ :: _; + build = _; + } -> + major = major' && minor = minor' && patch = patch' + | _ -> false) + false + range)) + +let string_of_part = function + | Comparator c -> Semver_comparator.to_string c + | Caret ver -> "^" ^ Semver_version.to_string ver + +let to_string t = t |> List.map string_of_part |> String.concat " " diff --git a/src/common/semver/semver_version.ml b/src/common/semver/semver_version.ml new file mode 100644 index 00000000000..d7ec84f4079 --- /dev/null +++ b/src/common/semver/semver_version.ml @@ -0,0 +1,120 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type identifier = + | Str of string + | Int of int + +type t = { + major: int; + minor: int; + patch: int; + prerelease: identifier list; + build: identifier list; +} + +let zero = { major = 0; minor = 0; patch = 0; prerelease = []; build = [] } + +let compare_identifiers a b = + match (a, b) with + | (Int _, Str _) -> -1 + | (Str _, Int _) -> 1 + | (Int a, Int b) -> a - b + | (Str a, Str b) -> String.compare a b + +let compare_identifier_lists = + let rec compare_sets a b = + match (a, b) with + | ([], []) -> 0 + | (_ :: _, []) -> 1 (* a more specific prerelease is greater than one with fewer parts *) + | ([], _ :: _) -> -1 + | (a_hd :: a_tl, b_hd :: b_tl) -> + let k = compare_identifiers a_hd b_hd in + if k <> 0 then + k + else + compare_sets a_tl b_tl + in + fun a b -> + match (a, b) with + | ([], []) -> 0 + | (_ :: _, []) -> -1 (* being a prerelease is less than not being a prerelease *) + | ([], _ :: _) -> 1 + | (_, _) -> compare_sets a b + +(* Compares the precedence of two versions + * + * NOTE: build identifiers are NOT included in precedence! this is the difference vs `compare` + * + * From the spec: + * Precedence refers to how versions are compared to each other when ordered. Precedence MUST + * be calculated by separating the version into major, minor, patch and pre-release identifiers + * in that order (Build metadata does not figure into precedence). Precedence is determined by + * the first difference when comparing each of these identifiers from left to right as follows: + * Major, minor, and patch versions are always compared numerically. + * Example: 1.0.0 < 2.0.0 < 2.1.0 < 2.1.1 + * + * When major, minor, and patch are equal, a pre-release version has lower precedence than a + * normal version. Example: 1.0.0-alpha < 1.0.0. Precedence for two pre-release versions with + * the same major, minor, and patch version MUST be determined by comparing each dot separated + * identifier from left to right until a difference is found as follows: identifiers consisting + * of only digits are compared numerically and identifiers with letters or hyphens are compared + * lexically in ASCII sort order. Numeric identifiers always have lower precedence than + * non-numeric identifiers. A larger set of pre-release fields has a higher precedence than a + * smaller set, if all of the preceding identifiers are equal. + * Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < 1.0.0-beta < 1.0.0-beta.2 < + * 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0 + *) +let compare_precedence = + let compare_ints a b () = a - b in + let compare_pre a b () = compare_identifier_lists a b in + let ( >>= ) k f = + if k <> 0 then + k + else + f () + in + fun { major = a_major; minor = a_minor; patch = a_patch; prerelease = a_pre; build = _ } + { major = b_major; minor = b_minor; patch = b_patch; prerelease = b_pre; build = _ } -> + ( 0 + >>= compare_ints a_major b_major + >>= compare_ints a_minor b_minor + >>= compare_ints a_patch b_patch + >>= compare_pre a_pre b_pre + : int ) + +let compare a b = + let k = compare_precedence a b in + if k <> 0 then + k + else + let { build = a_build; _ } = a in + let { build = b_build; _ } = b in + compare_identifier_lists a_build b_build + +let incr_major { major; _ } = { zero with major = succ major } + +let incr_minor { major; minor; _ } = { zero with major; minor = succ minor } + +let incr_patch { major; minor; patch; _ } = { zero with major; minor; patch = succ patch } + +let string_of_identifier = function + | Int x -> string_of_int x + | Str x -> x + +let to_string { major; minor; patch; prerelease; build } = + let prerelease = + match prerelease with + | [] -> "" + | parts -> "-" ^ (parts |> List.map string_of_identifier |> String.concat ".") + in + let build = + match build with + | [] -> "" + | parts -> "+" ^ (parts |> List.map string_of_identifier |> String.concat ".") + in + Printf.sprintf "%d.%d.%d%s%s" major minor patch prerelease build diff --git a/src/common/span/dune b/src/common/span/dune new file mode 100644 index 00000000000..8034f004e06 --- /dev/null +++ b/src/common/span/dune @@ -0,0 +1,6 @@ +(library + (name flow_common_span) + (wrapped false) + (libraries + collections ; hack + flow_parser)) diff --git a/src/common/span/spanMap.ml b/src/common/span/spanMap.ml index 957dc39dbf5..38ecb166cd0 100644 --- a/src/common/span/spanMap.ml +++ b/src/common/span/spanMap.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,6 +7,7 @@ module SpanMap = MyMap.Make (struct type t = Loc.t + let compare l0 l1 = Loc.span_compare l1 l0 end) diff --git a/src/common/span/spanMap.mli b/src/common/span/spanMap.mli index b4899db8ee8..9d2bb392360 100644 --- a/src/common/span/spanMap.mli +++ b/src/common/span/spanMap.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/common/tarjan/dune b/src/common/tarjan/dune new file mode 100644 index 00000000000..3b9cca1ab3e --- /dev/null +++ b/src/common/tarjan/dune @@ -0,0 +1,8 @@ +(library + (name flow_common_tarjan) + (wrapped false) + (libraries + flow_common + collections ; hack + ) +) diff --git a/src/common/tarjan/tarjan.ml b/src/common/tarjan/tarjan.ml index 27672825f8f..f9b91f7b325 100644 --- a/src/common/tarjan/tarjan.ml +++ b/src/common/tarjan/tarjan.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,135 +12,103 @@ module type NODE = sig type t - val compare: t -> t -> int - val to_string: t -> string + + val compare : t -> t -> int + + val to_string : t -> string end -module Make - (N: NODE) - (NMap: MyMap.S with type key = N.t) - (NSet: Set.S with type elt = N.t) = struct +module Make (N : NODE) (NMap : MyMap.S with type key = N.t) (NSet : Set.S with type elt = N.t) = +struct + type node = { + value: N.t; + edges: NSet.t; + (* visit order, -1 if unvisited *) + mutable index: int; + (* back edge to earliest visited node, -1 when unvisited *) + mutable lowlink: int; + mutable on_stack: bool; + } - (** Nodes are N.t. Edges are dependencies. **) type topsort_state = { - graph: NSet.t NMap.t; - (* nodes not yet visited *) - mutable not_yet_visited: NSet.t; + graph: node NMap.t; (* number of nodes visited *) mutable visit_count: int; - (* visit ordering *) - indices: (N.t, int) Hashtbl.t; (* nodes in a strongly connected component *) - mutable stack: N.t list; - mem_stack: (N.t, bool) Hashtbl.t; - (* back edges to earliest visited nodes *) - lowlinks: (N.t, int) Hashtbl.t; - (* components *) + mutable stack: node list; + (* accumulated components *) mutable components: N.t Nel.t list; } + (** Nodes are N.t. Edges are dependencies. **) - let initial_state ~roots graph = { - graph; - not_yet_visited = roots; - visit_count = 0; - indices = Hashtbl.create 0; - stack = []; - mem_stack = Hashtbl.create 0; - lowlinks = Hashtbl.create 0; - components = []; - } + let initial_state graph = + let graph = + NMap.mapi + (fun value edges -> { value; edges; index = -1; lowlink = -1; on_stack = false }) + graph + in + { graph; visit_count = 0; stack = []; components = [] } (* Compute strongly connected component for node m with requires rs. *) - let rec strongconnect state m rs = + let rec strongconnect state v = let i = state.visit_count in state.visit_count <- i + 1; - (* visit m *) - Hashtbl.replace state.indices m i; - state.not_yet_visited <- NSet.remove m state.not_yet_visited; + (* visit node *) + assert (v.index = -1); + v.index <- i; + v.lowlink <- i; (* push on stack *) - state.stack <- m :: state.stack; - Hashtbl.replace state.mem_stack m true; - - (* initialize lowlink *) - let lowlink = ref i in - - (* for each require r in rs: *) - rs |> NSet.iter (fun r -> - if Hashtbl.mem state.indices r - then begin - if (Hashtbl.find state.mem_stack r) then - (** either back edge, or cross edge where strongly connected component - is not yet complete **) - (* update lowlink with index of r *) - let index_r = Hashtbl.find state.indices r in - lowlink := min !lowlink index_r - end else match NMap.get r state.graph with - | Some rs_ -> - (* recursively compute strongly connected component of r *) - strongconnect state r rs_; - - (* update lowlink with that of r *) - let lowlink_r = Hashtbl.find state.lowlinks r in - lowlink := min !lowlink lowlink_r - - | None -> () - ); - - Hashtbl.replace state.lowlinks m !lowlink; - if (!lowlink = i) then + state.stack <- v :: state.stack; + v.on_stack <- true; + + (* for each edge e: + If the edge has not yet been visited, recurse in a depth-first manner. + If the edge has been visited, it is a back-edge iff it is on the stack, + otherwise it's a cross-edge and can be ignored. *) + v.edges + |> NSet.iter (fun e -> + let w = NMap.find_unsafe e state.graph in + if w.index = -1 then ( + strongconnect state w; + v.lowlink <- min v.lowlink w.lowlink + ) else if w.on_stack then + v.lowlink <- min v.lowlink w.index); + + if v.lowlink = v.index then (* strongly connected component *) - let c = component state m in - state.components <- (m, c) :: state.components + let c = component state v in + state.components <- (v.value, c) :: state.components - (* Return component strongly connected to m. *) - and component state m = + (* Return component strongly connected to v. *) + and component state v = (* pop stack until m is found *) - let m_ = List.hd state.stack in + let w = List.hd state.stack in state.stack <- List.tl state.stack; - Hashtbl.replace state.mem_stack m_ false; - if (m = m_) then [] - else m_ :: (component state m) + w.on_stack <- false; + if v.value = w.value then + [] + else + w.value :: component state v (** main loop **) - let tarjan state = - while not (NSet.is_empty state.not_yet_visited) do - (* choose a node, compute its strongly connected component *) - (** NOTE: this choice is non-deterministic, so any computations that depend - on the visit order, such as heights, are in general non-repeatable. **) - let m = NSet.choose state.not_yet_visited in - let rs = NMap.find_unsafe m state.graph in - strongconnect state m rs - done + let tarjan ~roots state = + NSet.iter + (fun x -> + let v = NMap.find_unsafe x state.graph in + if v.index = -1 then strongconnect state v) + roots let topsort ~roots graph = - let state = initial_state ~roots graph in - tarjan state; + let state = initial_state graph in + tarjan ~roots state; state.components let log = List.iter (fun mc -> - (* Show cycles, which are components with more than one node. *) - if Nel.length mc > 1 - then - let nodes = mc - |> Nel.to_list - |> List.map N.to_string - |> String.concat "\n\t" - in - Printf.ksprintf prerr_endline - "cycle detected among the following nodes:\n\t%s" nodes - ) - - let reverse nodes = - nodes - |> NMap.map (fun _ -> NSet.empty) - |> NMap.fold (fun from_f -> - NSet.fold (fun to_f rev_nodes -> - let from_fs = NMap.find_unsafe to_f rev_nodes in - NMap.add to_f (NSet.add from_f from_fs) rev_nodes - ) - ) nodes - + (* Show cycles, which are components with more than one node. *) + if Nel.length mc > 1 then + let nodes = mc |> Nel.to_list |> Core_list.map ~f:N.to_string |> String.concat "\n\t" in + Printf.ksprintf prerr_endline "cycle detected among the following nodes:\n\t%s" nodes) end diff --git a/src/common/tarjan/tarjan.mli b/src/common/tarjan/tarjan.mli index 90f2f1dc83f..dd4a3220b78 100644 --- a/src/common/tarjan/tarjan.mli +++ b/src/common/tarjan/tarjan.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,19 +7,18 @@ module type NODE = sig type t - val compare: t -> t -> int - val to_string: t -> string + + val compare : t -> t -> int + + val to_string : t -> string end -module Make - (N: NODE) - (NMap: MyMap.S with type key = N.t) - (NSet: Set.S with type elt = N.t) : sig +module Make (N : NODE) (NMap : MyMap.S with type key = N.t) (NSet : Set.S with type elt = N.t) : sig (* given a map from keys to dependencies, returns whether the dependencies are cyclic, as well as a topologically sorted list of key lists where any keys in a list only depend on keys in a subsequent list *) - val topsort: roots:NSet.t -> NSet.t NMap.t -> N.t Nel.t list - val log: N.t Nel.t list -> unit - val reverse: NSet.t NMap.t -> NSet.t NMap.t + val topsort : roots:NSet.t -> NSet.t NMap.t -> N.t Nel.t list + + val log : N.t Nel.t list -> unit end diff --git a/src/common/transaction/dune b/src/common/transaction/dune new file mode 100644 index 00000000000..df70c3ae111 --- /dev/null +++ b/src/common/transaction/dune @@ -0,0 +1,9 @@ +(library + (name flow_common_transaction) + (wrapped false) + (libraries + collections + lwt + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/common/transaction/transaction.ml b/src/common/transaction/transaction.ml index dba4296152e..7df1b59f573 100644 --- a/src/common/transaction/transaction.ml +++ b/src/common/transaction/transaction.ml @@ -1,25 +1,22 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - type mutator = { commit: unit -> unit Lwt.t; rollback: unit -> unit Lwt.t; } -type t = { - mutable mutators: mutator list -} +type t = { mutable mutators: mutator list } let singleton_mutators = ref SSet.empty let add_singleton name = - if SSet.mem name !singleton_mutators - then failwith (Printf.sprintf "Mutator %S is a singleton. Cannot create a second copy" name); + if SSet.mem name !singleton_mutators then + failwith (Printf.sprintf "Mutator %S is a singleton. Cannot create a second copy" name); singleton_mutators := SSet.add name !singleton_mutators let remove_singleton name = singleton_mutators := SSet.remove name !singleton_mutators @@ -32,20 +29,24 @@ let remove_singleton name = singleton_mutators := SSet.remove name !singleton_mu * keys, they might interfere with each other. So they can register themselves as singletons and * we'll enforce that no two singleton mutators are active at the same time *) let add ?singleton ~commit ~rollback transaction = - let commit, rollback = + let (commit, rollback) = match singleton with | Some singleton_name -> add_singleton singleton_name; - let commit () = remove_singleton singleton_name; commit () in - let rollback () = remove_singleton singleton_name; rollback () in - commit, rollback - | None -> - commit, rollback + let commit () = + remove_singleton singleton_name; + commit () + in + let rollback () = + remove_singleton singleton_name; + rollback () + in + (commit, rollback) + | None -> (commit, rollback) in - transaction.mutators <- { commit; rollback; } :: transaction.mutators + transaction.mutators <- { commit; rollback } :: transaction.mutators -let commit transaction = - Lwt_list.iter_s (fun mutator -> mutator.commit ()) transaction.mutators +let commit transaction = Lwt_list.iter_s (fun mutator -> mutator.commit ()) transaction.mutators let rollback transaction = Lwt_list.iter_s (fun mutator -> mutator.rollback ()) transaction.mutators @@ -53,7 +54,7 @@ let rollback transaction = external reraise : exn -> 'a = "%reraise" let with_transaction f = - let transaction = { mutators = []; } in + let transaction = { mutators = [] } in let%lwt result = try%lwt f transaction with exn -> diff --git a/src/common/transaction/transaction.mli b/src/common/transaction/transaction.mli index bd7dc6719a4..bc92b5b3f7a 100644 --- a/src/common/transaction/transaction.mli +++ b/src/common/transaction/transaction.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,10 +7,7 @@ type t -val add: - ?singleton:string -> - commit:(unit -> unit Lwt.t) -> - rollback:(unit -> unit Lwt.t) -> - t -> - unit -val with_transaction: (t -> 'a Lwt.t) -> 'a Lwt.t +val add : + ?singleton:string -> commit:(unit -> unit Lwt.t) -> rollback:(unit -> unit Lwt.t) -> t -> unit + +val with_transaction : (t -> 'a Lwt.t) -> 'a Lwt.t diff --git a/src/common/ty/__tests__/ty_printer_test.ml b/src/common/ty/__tests__/ty_printer_test.ml new file mode 100644 index 00000000000..d65057ddc32 --- /dev/null +++ b/src/common/ty/__tests__/ty_printer_test.ml @@ -0,0 +1,31 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = + "ty_printer" + >::: [ + ( "type_object_property_get" + >:: fun ctxt -> + let getter = Ty.NamedProp ("foo", Ty.Get (Ty.Str None)) in + let obj = + Ty.Obj + { Ty.obj_exact = true; obj_frozen = false; obj_literal = false; obj_props = [getter] } + in + let str = Ty_printer.string_of_t obj in + assert_equal ~ctxt ~printer:(fun x -> x) "{|get foo(): string|}" str ); + ( "type_object_property_set" + >:: fun ctxt -> + let setter = Ty.NamedProp ("foo", Ty.Set (Ty.Str None)) in + let obj = + Ty.Obj + { Ty.obj_exact = true; obj_frozen = false; obj_literal = false; obj_props = [setter] } + in + let str = Ty_printer.string_of_t obj in + assert_equal ~ctxt ~printer:(fun x -> x) "{|set foo(string): void|}" str ); + ] diff --git a/src/common/ty/__tests__/ty_simplifier_test.ml b/src/common/ty/__tests__/ty_simplifier_test.ml new file mode 100644 index 00000000000..0510a7a932d --- /dev/null +++ b/src/common/ty/__tests__/ty_simplifier_test.ml @@ -0,0 +1,357 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 +open Ty +open Ty_utils + +module UnionSimplification = struct + let tests = + [ + (* + * {f: number} | {f: number} + * ~> + * {f: number} + *) + ( "simplify_union_obj" + >:: fun ctxt -> + let t_in = + Ty.Union + ( Ty.Obj + { + Ty.obj_exact = false; + obj_frozen = false; + obj_literal = false; + obj_props = + [ + Ty.NamedProp + ( "f", + Ty.Field + (Ty.Num None, { Ty.fld_polarity = Ty.Neutral; fld_optional = false }) + ); + ]; + }, + Ty.Obj + { + Ty.obj_exact = false; + obj_frozen = false; + obj_literal = false; + obj_props = + [ + Ty.NamedProp + ( "f", + Ty.Field + (Ty.Num None, { Ty.fld_polarity = Ty.Neutral; fld_optional = false }) + ); + ]; + }, + [] ) + in + let t_out = Ty_utils.simplify_type ~merge_kinds:true ~sort:false t_in in + let t_exp = + Ty.Obj + { + Ty.obj_exact = false; + obj_frozen = false; + obj_literal = false; + obj_props = + [ + Ty.NamedProp + ( "f", + Ty.Field (Ty.Num None, { Ty.fld_polarity = Ty.Neutral; fld_optional = false }) + ); + ]; + } + in + assert_equal ~ctxt ~printer:Ty.show t_exp t_out ); + (* + * {+f: number} | {-f: number} + * ~> + * {+f: number} | {-f: number} + *) + ( "simplify_union_obj" + >:: fun ctxt -> + let t_in = + Ty.Union + ( Ty.Obj + { + Ty.obj_exact = false; + obj_frozen = false; + obj_literal = false; + obj_props = + [ + Ty.NamedProp + ( "f", + Ty.Field + (Ty.Num None, { Ty.fld_polarity = Ty.Positive; fld_optional = false }) + ); + ]; + }, + Ty.Obj + { + Ty.obj_exact = false; + obj_frozen = false; + obj_literal = false; + obj_props = + [ + Ty.NamedProp + ( "f", + Ty.Field + (Ty.Num None, { Ty.fld_polarity = Ty.Negative; fld_optional = false }) + ); + ]; + }, + [] ) + in + let t_out = Ty_utils.simplify_type ~merge_kinds:true ~sort:false t_in in + let t_exp = t_in in + assert_equal ~ctxt ~printer:Ty.show t_exp t_out ); + ] +end + +module BotAndTopSimplification = struct + let tests = + [ + (* When merge_kinds is true, all kinds of `empty` are equivalent, even when + * nested under a type constructor. + * + * {f: empty} | {f: empty'} + * ~> (merge_kinds:true) + * {f: empty'} + *) + ( "simplify_union_obj_empty_insensitive" + >:: fun ctxt -> + let t_in = + Ty.Union + ( Ty.Obj + { + Ty.obj_exact = false; + obj_frozen = false; + obj_literal = false; + obj_props = + [ + Ty.NamedProp + ( "f", + Ty.Field + ( Ty.Bot Ty.EmptyType, + { Ty.fld_polarity = Ty.Neutral; fld_optional = false } ) ); + ]; + }, + Ty.Obj + { + Ty.obj_exact = false; + obj_frozen = false; + obj_literal = false; + obj_props = + [ + Ty.NamedProp + ( "f", + Ty.Field + ( Ty.Bot Ty.EmptyMatchingPropT, + { Ty.fld_polarity = Ty.Neutral; fld_optional = false } ) ); + ]; + }, + [] ) + in + let t_out = Ty_utils.simplify_type ~merge_kinds:true ~sort:false t_in in + let t_exp = + Ty.Obj + { + Ty.obj_exact = false; + obj_frozen = false; + obj_literal = false; + obj_props = + [ + Ty.NamedProp + ( "f", + Ty.Field + ( Ty.Bot Ty.EmptyType, + { Ty.fld_polarity = Ty.Neutral; fld_optional = false } ) ); + ]; + } + in + assert_equal ~ctxt ~printer:Ty.show t_exp t_out ); + (* This tests the conversion `mixed & T -> T` and that `empty' | T` remains + * as is when: + * - `empty'` is not the empty type due to + * + an annotation, or + * + a tvar with no lower and no upper bounds + * - merge_kinds is false + * + * mixed & (empty' | (mixed & (empty'' | number))) + * ~> (merge_kinds:false) + * empty' | empty'' | number + *) + ( "merge_bot_and_any_kinds_sensitive" + >:: fun ctxt -> + let t_in = + Ty.Inter + ( Ty.Top, + Ty.Union + ( Ty.Bot (Ty.EmptyTypeDestructorTriggerT ALoc.none), + Ty.Inter + ( Ty.Top, + Ty.Union + (Ty.Bot (Ty.NoLowerWithUpper (Ty.SomeUnknownUpper "blah")), Ty.Num None, []), + [] ), + [] ), + [] ) + in + let t_out = Ty_utils.simplify_type ~merge_kinds:false ~sort:false t_in in + let t_exp = + Ty.Union + ( Ty.Bot (Ty.EmptyTypeDestructorTriggerT ALoc.none), + Ty.Bot (Ty.NoLowerWithUpper (Ty.SomeUnknownUpper "blah")), + [Ty.Num None] ) + in + assert_equal ~ctxt ~printer:Ty.show t_exp t_out ); + (* This tests the conversion `mixed & T -> T` and `empty' | T -> T` when + * merge_kinds is true. + * + * mixed & (empty' | (mixed & (empty'' | number))) + * ~> + * number + *) + ( "merge_bot_and_any_kinds_insensitive" + >:: fun ctxt -> + let t_in = + Ty.Inter + ( Ty.Top, + Ty.Union + ( Ty.Bot (Ty.EmptyTypeDestructorTriggerT ALoc.none), + Ty.Inter + ( Ty.Top, + Ty.Union + (Ty.Bot (Ty.NoLowerWithUpper (Ty.SomeUnknownUpper "blah")), Ty.Num None, []), + [] ), + [] ), + [] ) + in + let t_out = Ty_utils.simplify_type ~merge_kinds:true ~sort:false t_in in + let t_exp = Ty.Num None in + assert_equal ~ctxt ~printer:Ty.show t_exp t_out ); + ] +end + +module AnySimplification = struct + open Ty + + (* When merge_kinds is false, we preserve the different kinds of any. + * + * any | (any' & (any & any')) + * ~> + * any | (any' & (any & any')) + *) + let tests = + [ + ( "merge_any_kinds_sensitive" + >:: fun ctxt -> + let t_in = + Union + ( Any (Unsound BoundFunctionThis), + Inter (Any Annotated, Union (Any (Unsound BoundFunctionThis), Ty.Any Annotated, []), []), + [] ) + in + let t_out = Ty_utils.simplify_type ~merge_kinds:false t_in in + let t_exp = t_in in + assert_equal ~ctxt ~printer:Ty.show t_exp t_out ); + (* When merge_kinds is true, all kinds of any are considered equal and so + * are merged when appearing in unions or intersections. + * + * any | (any' & (any & any')) + * ~> + * any + * + * The output could also be any'. The kind of the resulting any type when + * merge_kinds is true, is not specified. + *) + ( "merge_any_kinds_insensitive" + >:: fun ctxt -> + let t_in = + Union + ( Any (Unsound BoundFunctionThis), + Inter + (Any Annotated, Union (Any (Unsound BoundFunctionThis), Ty.Any Annotated, []), []), + [] ) + in + let t_out = Ty_utils.simplify_type ~merge_kinds:true t_in in + let t_exp = Any (Unsound BoundFunctionThis) in + assert_equal ~ctxt ~printer:Ty.show t_exp t_out ); + ] +end + +module Sorting = struct + let simplify_base = simplify_type ~merge_kinds:false ~sort:false + + let simplify_sort = simplify_type ~merge_kinds:false ~sort:true + + let t0 = Union (Any Annotated, Num None, [NumLit "42"]) + + let t1 = Union (NumLit "1", NumLit "2", [NumLit "42"]) + + let t2 = Union (NumLit "2", t0, [t1]) + + let t3 = Union (t0, t1, [t2]) + + let t4 = Union (t3, t2, [t1; t0]) + + let t5 = Union (t0, t1, [t2; t3; t4]) + + let t6 = Union (t3, t2, [t4; t0; t1; t5]) + + let t6_sorted = Union (Any Annotated, NumLit "1", [NumLit "2"; NumLit "42"; Num None]) + + let tests = + [ + ( "idempotence" + >:: fun ctxt -> + assert_equal + ~ctxt + ~printer:Ty_printer.string_of_t + (simplify_base t0) + (simplify_base (simplify_base t0)); + assert_equal + ~ctxt + ~printer:Ty_printer.string_of_t + (simplify_base t6) + (simplify_base (simplify_base (simplify_base t6))); + assert_equal + ~ctxt + ~printer:Ty_printer.string_of_t + (simplify_sort t4) + (simplify_sort (simplify_sort t4)); + assert_equal + ~ctxt + ~printer:Ty_printer.string_of_t + (simplify_sort t6) + (simplify_sort (simplify_sort (simplify_sort t6))) ); + ( "sorting" + >:: fun ctxt -> + assert_equal ~ctxt ~printer:Ty_printer.string_of_t t6_sorted (simplify_sort t6) ); + ( "union/intersection" + >:: fun ctxt -> + let t_in = + Inter + ( Union + ( Void, + Inter (Void, Any Annotated, [NumLit "1"]), + [Inter (NumLit "1", Any Annotated, [Void])] ), + Union (Inter (Any Annotated, Void, [NumLit "1"]), Void, []), + [] ) + in + let t_out = simplify_sort t_in in + let t_exp = Union (Void, Inter (Any Annotated, Void, [NumLit "1"]), []) in + assert_equal ~ctxt ~printer:Ty.show t_exp t_out ); + ] +end + +let tests = + "ty_simplifier" + >::: UnionSimplification.tests + @ BotAndTopSimplification.tests + @ AnySimplification.tests + @ Sorting.tests diff --git a/src/common/ty/__tests__/ty_tests.ml b/src/common/ty/__tests__/ty_tests.ml new file mode 100644 index 00000000000..c48d0ac534c --- /dev/null +++ b/src/common/ty/__tests__/ty_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "ty" >::: [Ty_printer_test.tests; Ty_simplifier_test.tests] + +let () = run_test_tt_main tests diff --git a/src/common/ty/dune b/src/common/ty/dune new file mode 100644 index 00000000000..d5b212a8493 --- /dev/null +++ b/src/common/ty/dune @@ -0,0 +1,10 @@ +(library + (name flow_typing_ty) + (wrapped false) + (libraries + flow_common + flow_parser_utils_aloc + flow_parser_utils_output + ) + (preprocess (pps visitors.ppx ppx_deriving.show)) +) diff --git a/src/common/ty/ty.ml b/src/common/ty/ty.ml index c0f87bf30e9..5bdf6d4d982 100644 --- a/src/common/ty/ty.ml +++ b/src/common/ty/ty.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,14 +8,21 @@ include Ty_symbol include Ty_ancestors +type aloc = (ALoc.t[@printer (fun fmt loc -> fprintf fmt "%s" (ALoc.to_string_no_source loc))]) +[@@deriving show] + type t = | TVar of tvar * t list option - | Bound of symbol - | Generic of symbol * bool (* structural *) * t list option - | Any | AnyObj | AnyFun - | Top | Bot - | Void | Null - | Num | Str | Bool + | Bound of aloc * string + | Generic of generic_t + | Any of any_kind + | Top + | Bot of bot_kind + | Void + | Null + | Num of string option + | Str of string option + | Bool of bool option | NumLit of string | StrLit of string | BoolLit of bool @@ -26,17 +33,80 @@ type t = | Union of t * t * t list | Inter of t * t * t list | TypeAlias of type_alias - | TypeOf of symbol - | Class of symbol * bool (* structural *) * type_param list option - | Exists - | Module of symbol + | InlineInterface of interface_t + | TypeOf of builtin_value + | ClassDecl of symbol * type_param list option + | InterfaceDecl of symbol * type_param list option + | Utility of utility + | Module of symbol option * export_t | Mu of int * t -and tvar = RVar of int [@@unboxed] (* Recursive variable *) +and tvar = RVar of int [@@unboxed] + +(* Recursive variable *) +and generic_t = symbol * gen_kind * t list option + +and any_kind = + | Annotated + | AnyError + | Unsound of unsoundness_kind + | Untyped + +and unsoundness_kind = + | BoundFunctionThis + | ComputedNonLiteralKey + | Constructor + | DummyStatic + | Existential + | Exports + | FunctionPrototype + | InferenceHooks + | InstanceOfRefinement + | Merged + | ResolveSpread + | Unchecked + | Unimplemented + | UnresolvedType + | WeakContext + +(* The purpose of adding this distinction is to enable normalized types to mimic + * the behavior of the signature optimizer when exporting types that contain + * tvars with no lower bounds. + *) +and upper_bound_kind = + (* No upper bounds are exported as `any` *) + | NoUpper + (* If there is some upper bound (use), this is exported as `MergedT use`. This + * type is not helpful in a normalized form. So instead we attempt to normalize + * the use to a type `t`. If this succeeds then we create `SomeKnownUpper t`. + *) + | SomeKnownUpper of t + (* If the above case fails we resort to this last case. *) + | SomeUnknownUpper of string + +and bot_kind = + (* Type.Empty *) + | EmptyType + (* Type.MatchingPropT *) + | EmptyMatchingPropT + (* Type.TypeDestructorTriggerT *) + | EmptyTypeDestructorTriggerT of aloc + (* A tvar with no lower bounds *) + | NoLowerWithUpper of upper_bound_kind + +and gen_kind = + | ClassKind + | InterfaceKind + | TypeAliasKind + +and export_t = { + exports: (string * t) list; + cjs_export: t option; +} and fun_t = { - fun_params: (identifier option * t * fun_param) list; - fun_rest_param: (identifier option * t) option; + fun_params: (string option * t * fun_param) list; + fun_rest_param: (string option * t) option; fun_return: t; fun_type_params: type_param list option; } @@ -44,28 +114,34 @@ and fun_t = { and obj_t = { obj_exact: bool; obj_frozen: bool; + obj_literal: bool; obj_props: prop list; } and arr_t = { arr_readonly: bool; + arr_literal: bool; arr_elt_t: t; } and type_alias = { ta_name: symbol; ta_tparams: type_param list option; - ta_type: t option + ta_type: t option; } -and fun_param = { - prm_optional: bool; +and interface_t = { + if_extends: generic_t list; + if_body: obj_t; } +and fun_param = { prm_optional: bool } + and prop = - | NamedProp of identifier * named_prop + | NamedProp of string * named_prop | IndexProp of dict | CallProp of fun_t + | SpreadProp of t and named_prop = | Field of t * field @@ -80,13 +156,13 @@ and field = { and dict = { dict_polarity: polarity; - dict_name: identifier option; + dict_name: string option; dict_key: t; dict_value: t; } and type_param = { - tp_name: identifier; + tp_name: string; tp_bound: t option; tp_polarity: polarity; tp_default: t option; @@ -94,125 +170,460 @@ and type_param = { and opt = bool -and polarity = Positive | Negative | Neutral -[@@deriving visitors { - name="iter_ty"; - nude=true; - variety = "iter"; - visit_prefix="on_"; - ancestors=["iter_ty_base"]; -}, visitors { - name="reduce_ty"; - variety = "reduce"; - nude = true; - visit_prefix = "on_"; - ancestors = ["reduce_ty_base"]; -}, visitors { - name="map_ty"; - variety = "map"; - nude = true; - visit_prefix = "on_"; - ancestors = ["map_ty_base"]; -}, visitors { - name="endo_ty"; - variety = "endo"; - nude = true; - visit_prefix = "on_"; - ancestors = ["endo_ty_base"]; -}] - - -(* Type descructors *) - -let rec bk_union = function - | Union (t1,t2,ts) -> Core_list.concat_map ~f:bk_union (t1::t2::ts) - | t -> [t] - -let rec bk_inter = function - | Inter (t1,t2,ts) -> Core_list.concat_map ~f:bk_inter (t1::t2::ts) - | t -> [t] - +and utility = + (* https://flow.org/en/docs/types/utilities/ *) + | Keys of t + | Values of t + | ReadOnly of t + | Exact of t + | Diff of t * t + | Rest of t * t + | PropertyType of t * t + | ElementType of t * t + | NonMaybeType of t + | ObjMap of t * t + | ObjMapi of t * t + | TupleMap of t * t + | Call of t * t list + | Class of t + | Shape of t + | Exists + (* React utils *) + | ReactElementPropsType of t + | ReactElementConfigType of t + | ReactElementRefType of t + | ReactConfigType of t * t + +and polarity = + | Positive + | Negative + | Neutral + +and builtin_value = + | FunProto + | ObjProto + | FunProtoApply + | FunProtoBind + | FunProtoCall +[@@deriving + visitors + { + name = "iter_ty"; + nude = true; + variety = "iter"; + visit_prefix = "on_"; + ancestors = ["iter_ty_base"]; + }, + visitors + { + name = "iter2_ty"; + nude = true; + variety = "iter2"; + visit_prefix = "on_"; + ancestors = ["iter2_ty_base"]; + }, + visitors + { + name = "reduce_ty"; + variety = "reduce"; + nude = true; + visit_prefix = "on_"; + ancestors = ["reduce_ty_base"]; + }, + visitors + { + name = "map_ty"; + variety = "map"; + nude = true; + visit_prefix = "on_"; + ancestors = ["map_ty_base"]; + }, + visitors + { + name = "endo_ty"; + variety = "endo"; + nude = true; + visit_prefix = "on_"; + ancestors = ["endo_ty_base"]; + }, + visitors + { + name = "mapreduce_ty"; + variety = "mapreduce"; + nude = true; + visit_prefix = "on_"; + ancestors = ["mapreduce_ty_base"]; + }, + show] + +exception Difference of int + +let assert0 i = + if i == 0 then + () + else + raise (Difference i) + +(* The prototype of what should happen when overriding fail_* methods *) +let fail_gen : 'env 'x. ('env -> 'x -> int) -> 'env -> 'x -> 'x -> unit = + (fun tag_of env t1 t2 -> assert0 (tag_of env t1 - tag_of env t2)) + +(* Compare Ty.t for structural equality + This class can be overridden to define new forms of equality on types *) +class ['A] comparator_ty = + object (this) + inherit [_] iter2_ty as super + + method compare (env : 'A) (t1 : t) (t2 : t) = + try + this#on_t env t1 t2; + 0 + with Difference n -> n + + (* Take advantage of pointer equality at type nodes to short circut *) + method! private on_t env x y = + if x == y then + () + else + super#on_t env x y + + (* Base fields originally handled in the ancestor *) + method! private on_int _env x y = assert0 (x - y) + + method! private on_string env x y = + (* In order to sort integer literals we try to parse all strings as integers *) + match int_of_string x with + | x -> + begin + match int_of_string y with + (* If both parse as integers then we compare them as integers *) + | y -> this#on_int env x y + (* If xor parses as an integer then that one is "less than" the other *) + | exception Failure _ -> raise (Difference (-1)) + end + | exception Failure _ -> + begin + match int_of_string y with + | _ -> raise (Difference 1) + (* If neither parse as integers then we compare them as strings *) + | exception Failure _ -> assert0 (String.compare x y) + end + + method! private on_bool _env x y = assert0 (Pervasives.compare x y) + + method! private on_symbol _env x y = assert0 (Pervasives.compare x y) + + method! private on_aloc _env x y = assert0 (ALoc.compare x y) + + method! private fail_option _env x _y = + match x with + | None -> raise (Difference (-1)) + | _ -> raise (Difference 1) + + method! private fail_list _env x _y = + match x with + | [] -> raise (Difference (-1)) + | _ -> raise (Difference 1) + + (* This class must override all fail_* methods on variant types to be correct. *) + (* The following methods are ordered respectively with the + definitions in this file to make it easier to check *) + method! private fail_t env x y = fail_gen this#tag_of_t env x y + + method! private fail_any_kind env x y = fail_gen this#tag_of_any_kind env x y + + method! private fail_upper_bound_kind env x y = fail_gen this#tag_of_upper_bound_kind env x y + + method! private fail_bot_kind env x y = fail_gen this#tag_of_bot_kind env x y + + method! private fail_gen_kind env x y = fail_gen this#tag_of_gen_kind env x y + + method! private fail_prop env x y = fail_gen this#tag_of_prop env x y + + method! private fail_named_prop env x y = fail_gen this#tag_of_named_prop env x y + + method! private fail_utility env x y = fail_gen this#tag_of_utility env x y + + method! private fail_polarity env x y = fail_gen this#tag_of_polarity env x y + + method! private fail_unsoundness_kind env x y = fail_gen this#tag_of_unsoundness_kind env x y + + (* types will show up in unions and intersections in ascending order *) + (* No two elements of each variant can be assigned the same tag *) + method tag_of_t _ = + function + (* Roughly in order of increasing complexity *) + (* Favor litererals over base types *) + (* Favor user defined types over structural types *) + | Bot _ -> 0 + | Top -> 1 + | Any _ -> 2 + | Void -> 3 + | Null -> 4 + | BoolLit _ -> 5 + | Bool _ -> 6 + | NumLit _ -> 7 + | Num _ -> 8 + | StrLit _ -> 9 + | Str _ -> 10 + | TVar _ -> 11 + | Bound _ -> 12 + | Generic _ -> 13 + | TypeAlias _ -> 14 + | TypeOf _ -> 15 + | ClassDecl _ -> 16 + | Utility _ -> 17 + | Tup _ -> 18 + | Arr _ -> 19 + | Fun _ -> 20 + | Obj _ -> 21 + | Inter _ -> 22 + | Union _ -> 23 + | InterfaceDecl _ -> 24 + | Module _ -> 25 + | Mu _ -> 26 + | InlineInterface _ -> 27 + + method tag_of_gen_kind _ = + function + | ClassKind -> 0 + | InterfaceKind -> 1 + | TypeAliasKind -> 2 + + method tag_of_any_kind _ = + function + | Annotated -> 0 + | AnyError -> 1 + | Unsound _ -> 2 + | Untyped -> 3 + + method tag_of_unsoundness_kind _ = + function + | BoundFunctionThis -> 0 + | ComputedNonLiteralKey -> 1 + | Constructor -> 2 + | DummyStatic -> 3 + | Existential -> 4 + | Exports -> 5 + | FunctionPrototype -> 6 + | InferenceHooks -> 7 + | InstanceOfRefinement -> 8 + | Merged -> 9 + | ResolveSpread -> 10 + | Unchecked -> 11 + | Unimplemented -> 12 + | UnresolvedType -> 13 + | WeakContext -> 14 + + method tag_of_prop _env = + function + | NamedProp _ -> 0 + | IndexProp _ -> 1 + | CallProp _ -> 2 + | SpreadProp _ -> 3 + + method tag_of_named_prop _env = + function + | Field _ -> 0 + | Method _ -> 1 + | Get _ -> 2 + | Set _ -> 3 + + method tag_of_utility _ = + function + | Keys _ -> 0 + | Values _ -> 1 + | ReadOnly _ -> 2 + | Exact _ -> 3 + | Diff _ -> 4 + | Rest _ -> 5 + | PropertyType _ -> 6 + | ElementType _ -> 7 + | NonMaybeType _ -> 8 + | ObjMap _ -> 9 + | ObjMapi _ -> 10 + | TupleMap _ -> 11 + | Call _ -> 12 + | Class _ -> 13 + | Shape _ -> 14 + | Exists -> 17 + | ReactElementPropsType _ -> 18 + | ReactElementConfigType _ -> 19 + | ReactElementRefType _ -> 20 + | ReactConfigType _ -> 21 + + method tag_of_polarity _ = + function + | Positive -> 0 + | Negative -> 1 + | Neutral -> 2 + + method tag_of_bot_kind _env = + function + | EmptyType -> 0 + | EmptyMatchingPropT -> 1 + | EmptyTypeDestructorTriggerT _ -> 2 + | NoLowerWithUpper _ -> 3 + + method tag_of_upper_bound_kind _env = + function + | NoUpper -> 0 + | SomeKnownUpper _ -> 1 + | SomeUnknownUpper _ -> 2 + end + +(* Type destructors *) + +let rec bk_union ?(flattened = false) = function + | Union (t1, t2, ts) when flattened -> (t1, t2 :: ts) + | Union (t1, t2, ts) -> Nel.map_concat bk_union (t1, t2 :: ts) + | t -> (t, []) + +let rec bk_inter ?(flattened = false) = function + | Inter (t1, t2, ts) when flattened -> (t1, t2 :: ts) + | Inter (t1, t2, ts) -> Nel.map_concat bk_inter (t1, t2 :: ts) + | t -> (t, []) (* Type constructors *) -let mk_union ts = - let ts = List.concat (List.map bk_union ts) in +let mk_union ?(flattened = false) nel_ts = + let (t, ts) = Nel.map_concat (bk_union ~flattened) nel_ts in match ts with - | [] -> Bot - | [t] -> t - | t1::t2::ts -> Union (t1, t2, ts) + | [] -> t + | hd :: tl -> Union (t, hd, tl) -let mk_inter ts = - let ts = List.concat (List.map bk_inter ts) in +let mk_inter ?(flattened = false) nel_ts = + let (t, ts) = Nel.map_concat (bk_inter ~flattened) nel_ts in match ts with - | [] -> Top - | [t] -> t - | t1::t2::ts -> Inter (t1, t2, ts) + | [] -> t + | hd :: tl -> Inter (t, hd, tl) -let mk_maybe t = - mk_union [Null; Void; t] +let explicit_any = Any Annotated -let mk_field_props prop_list = - List.map (fun (id, t, opt) -> NamedProp (id, - Field (t, { fld_polarity = Neutral; fld_optional = opt }) - )) prop_list +let is_dynamic = function + | Any _ -> true + | _ -> false -let mk_object ?(obj_exact=false) ?(obj_frozen=false) obj_props = - Obj { obj_exact; obj_frozen; obj_props } +let mk_maybe t = mk_union (Null, [Void; t]) -let named_t symbol = - Generic (symbol, false, None) +let mk_field_props prop_list = + Core_list.map + ~f:(fun (id, t, opt) -> + NamedProp (id, Field (t, { fld_polarity = Neutral; fld_optional = opt }))) + prop_list + +let mk_object ?(obj_exact = false) ?(obj_frozen = false) ?(obj_literal = false) obj_props = + Obj { obj_exact; obj_frozen; obj_literal; obj_props } -let builtin_t name = - named_t (builtin_symbol name) +let mk_generic_class symbol targs = Generic (symbol, ClassKind, targs) -let generic_t symbol targs = - Generic (symbol, false, Some targs) +let mk_generic_interface symbol targs = Generic (symbol, InterfaceKind, targs) -let generic_builtin_t name targs = - generic_t (builtin_symbol name) targs +let mk_generic_talias symbol targs = Generic (symbol, TypeAliasKind, targs) let rec mk_exact ty = match ty with - | Obj o -> Obj { o with obj_exact=true } + | Obj o -> Obj { o with obj_exact = true } | TypeAlias a -> let ta_type = Option.map ~f:mk_exact a.ta_type in TypeAlias { a with ta_type } | Mu (i, t) -> Mu (i, mk_exact t) - (* Do not nest $Exact *) - | Generic (Symbol (Builtin, "$Exact"), _, Some [_]) -> ty (* Not applicable *) - | Any | AnyObj | AnyFun | Top | Bot | Void | Null - | Num | Str | Bool | NumLit _ | StrLit _ | BoolLit _ - | Fun _ | Arr _ | Tup _ -> ty + | Any _ + | Top + | Bot _ + | Void + | Null + | Num _ + | Str _ + | Bool _ + | NumLit _ + | StrLit _ + | BoolLit _ + | Fun _ + | Arr _ + | Tup _ + | InlineInterface _ -> + ty + (* Do not nest $Exact *) + | Utility (Exact _) -> ty (* Wrap in $Exact<...> *) - | Generic _ | TVar _ | Bound _ | Union _ | Inter _ - | TypeOf _ | Class _ | Exists | Module _ -> - generic_builtin_t "$Exact" [ty] - -let named_alias ?ta_tparams ?ta_type name = - TypeAlias { ta_name=name; ta_tparams; ta_type } - -let string_of_provenance_ctor = function - | Local _ -> "Local" - | Imported _ -> "Imported" - | Remote _ -> "Remote" - | Library _ -> "Library" + | Generic _ + | TVar _ + | Bound _ + | Union _ + | Inter _ + | TypeOf _ + | ClassDecl _ + | InterfaceDecl _ + | Utility _ + | Module _ -> + Utility (Exact ty) + +let mk_array ~readonly ~literal t = + Arr { arr_readonly = readonly; arr_literal = literal; arr_elt_t = t } + +let named_alias ?ta_tparams ?ta_type name = TypeAlias { ta_name = name; ta_tparams; ta_type } + +let debug_string_of_provenance_ctor = function + | Local -> "Local" + | Remote { imported_as = Some _ } -> "Imported" + | Remote { imported_as = None } -> "Remote" + | Library -> "Library" | Builtin -> "Builtin" -let string_of_provenance prov = - match prov with - | Local loc | Imported loc | Remote loc | Library loc -> - Utils_js.spf "%s %s" (string_of_provenance_ctor prov) - (Reason.string_of_loc loc) - | Builtin -> "Builtin" - -let string_of_symbol (Symbol (prov, name)) = - Utils_js.spf "%s (%s)" name (string_of_provenance prov) - -let loc_of_provenance = function - | Local loc -> loc - | Imported loc -> loc - | Remote loc -> loc - | Library loc -> loc - | Builtin -> Loc.none +let debug_string_of_symbol { provenance; def_loc; name; _ } = + Utils_js.spf + "%s (%s:%s)" + name + (debug_string_of_provenance_ctor provenance) + (Reason.string_of_aloc def_loc) + +let debug_string_of_generic_kind = function + | ClassKind -> "class" + | InterfaceKind -> "interface" + | TypeAliasKind -> "type alias" + +let string_of_utility_ctor = function + | Keys _ -> "$Keys" + | Values _ -> "$Values" + | ReadOnly _ -> "$ReadOnly" + | Exact _ -> "$Exact" + | Diff _ -> "$Diff" + | Rest _ -> "$Rest" + | PropertyType _ -> "$PropertyType" + | ElementType _ -> "$ElementType" + | NonMaybeType _ -> "$NonMaybeType" + | ObjMap _ -> "$ObjMap" + | ObjMapi _ -> "$ObjMapi" + | TupleMap _ -> "$TupleMap" + | Call _ -> "$Call" + | Class _ -> "Class" + | Shape _ -> "$Shape" + | Exists -> "*" + | ReactElementPropsType _ -> "React$ElementProps" + | ReactElementConfigType _ -> "React$ElementConfig" + | ReactElementRefType _ -> "React$ElementRef" + | ReactConfigType _ -> "React$Config" + +let types_of_utility = function + | Keys t -> Some [t] + | Values t -> Some [t] + | ReadOnly t -> Some [t] + | Exact t -> Some [t] + | Diff (t1, t2) -> Some [t1; t2] + | Rest (t1, t2) -> Some [t1; t2] + | PropertyType (t1, t2) -> Some [t1; t2] + | ElementType (t1, t2) -> Some [t1; t2] + | NonMaybeType t -> Some [t] + | ObjMap (t1, t2) -> Some [t1; t2] + | ObjMapi (t1, t2) -> Some [t1; t2] + | TupleMap (t1, t2) -> Some [t1; t2] + | Call (t, ts) -> Some (t :: ts) + | Class t -> Some [t] + | Shape t -> Some [t] + | Exists -> None + | ReactElementPropsType t -> Some [t] + | ReactElementConfigType t -> Some [t] + | ReactElementRefType t -> Some [t] + | ReactConfigType (t1, t2) -> Some [t1; t2] diff --git a/src/common/ty/ty_ancestors.ml b/src/common/ty/ty_ancestors.ml index 13aacb92d68..829f746d81d 100644 --- a/src/common/ty/ty_ancestors.ml +++ b/src/common/ty/ty_ancestors.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,139 +7,258 @@ open Ty_symbol -class ['self] iter_ty_base = object (_: 'self) - method private on_string : 'env . 'env -> string -> unit = fun _env _x -> () - method private on_bool : 'env . 'env -> bool -> unit = fun _env _x -> () - method private on_int : 'env . 'env -> int -> unit = fun _env _x -> () - method private on_symbol : 'env . 'env -> symbol -> unit = fun _env _x -> () - method private on_identifier: 'env . 'env -> identifier -> unit = fun _env _x -> () - - method private on_option: 'env 'a 'b . ('env -> 'a -> 'b) -> 'env -> 'a option -> 'b option - = fun f env -> Option.map ~f:(f env) - method private on_list: 'env 'a . ('env -> 'a -> unit) -> 'env -> 'a list -> unit - = fun f env -> List.iter (f env) -end - -class ['self] map_ty_base = object (_: 'self) - method private on_string : 'env -> string -> string = fun _ x -> x - method private on_bool : 'env -> bool -> bool = fun _ x -> x - method private on_int : 'env -> int -> int = fun _ x -> x - method private on_symbol : 'env -> symbol -> symbol = fun _ x -> x - method private on_identifier: 'env -> identifier -> identifier = fun _ x -> x - - method private on_list - : 'env 'a 'b . ('env -> 'a -> 'b) -> 'env -> 'a list -> 'b list - = fun f env -> List.map (f env) - method private on_option - : 'env 'a 'b . ('env -> 'a -> 'b) -> 'env -> 'a option -> 'b option - = fun f env -> Option.map ~f:(f env) -end - -class ['self] endo_ty_base = object (_self : 'self) - method private on_string : 'env -> string -> string = fun _ x -> x - method private on_bool : 'env -> bool -> bool = fun _ x -> x - method private on_int : 'env -> int -> int = fun _ x -> x - method private on_symbol : 'env -> symbol -> symbol = fun _ x -> x - method private on_identifier: 'env -> identifier -> identifier = fun _ x -> x - - (* Copied from - * https://github.com/facebook/hhvm/blob/master/hphp/hack/src/ast/ast_defs_visitors_ancestors.ml - *) - method private on_list - : 'env 'a . ('env -> 'a -> 'a) -> 'env -> 'a list -> 'a list - = fun f env xs -> - let rec aux env xs counter = - match xs with - | [] -> xs - | [y1] -> - let z1 = f env y1 in - if y1 == z1 then xs - else [z1] - | [y1; y2] -> - let z1 = f env y1 in - let z2 = f env y2 in - if y1 == z1 && y2 == z2 then xs - else [z1; z2] - | [y1; y2; y3] -> - let z1 = f env y1 in - let z2 = f env y2 in - let z3 = f env y3 in - if y1 == z1 && y2 == z2 && y3 == z3 then xs - else [z1; z2; z3] - | [y1; y2; y3; y4] -> - let z1 = f env y1 in - let z2 = f env y2 in - let z3 = f env y3 in - let z4 = f env y4 in - if y1 == z1 && y2 == z2 && y3 == z3 && y4 == z4 then xs - else [z1; z2; z3; z4] - | [y1; y2; y3; y4; y5] -> - let z1 = f env y1 in - let z2 = f env y2 in - let z3 = f env y3 in - let z4 = f env y4 in - let z5 = f env y5 in - if y1 == z1 && y2 == z2 && y3 == z3 && y4 == z4 && y5 == z5 then xs - else [z1; z2; z3; z4; z5] - | y1::y2::y3::y4::y5::ys -> - let z1 = f env y1 in - let z2 = f env y2 in - let z3 = f env y3 in - let z4 = f env y4 in - let z5 = f env y5 in - let zs = - if counter > 1000 then - aux_slow env ys [] ys false - else - aux env ys (counter + 1) +class ['self] iter_ty_base = + object (_ : 'self) + method private on_string : 'env. 'env -> string -> unit = (fun _env _x -> ()) + + method private on_bool : 'env. 'env -> bool -> unit = (fun _env _x -> ()) + + method private on_int : 'env. 'env -> int -> unit = (fun _env _x -> ()) + + method private on_symbol : 'env. 'env -> symbol -> unit = (fun _env _x -> ()) + + method private on_aloc : 'env. 'env -> ALoc.t -> unit = (fun _env _x -> ()) + + method private on_option : 'env 'a 'b. ('env -> 'a -> 'b) -> 'env -> 'a option -> 'b option = + (fun f env -> Option.map ~f:(f env)) + + method private on_list : 'env 'a. ('env -> 'a -> unit) -> 'env -> 'a list -> unit = + (fun f env -> List.iter (f env)) + end + +class ['self] iter2_ty_base = + object (self : 'self) + method private on_string : 'env. 'env -> string -> string -> unit = (fun _env _x _y -> ()) + + method private on_bool : 'env. 'env -> bool -> bool -> unit = (fun _env _x _y -> ()) + + method private on_int : 'env. 'env -> int -> int -> unit = (fun _env _x _y -> ()) + + method private on_symbol : 'env. 'env -> symbol -> symbol -> unit = (fun _env _x _y -> ()) + + method private on_aloc : 'env. 'env -> ALoc.t -> ALoc.t -> unit = (fun _env _x _y -> ()) + + method private fail_option : 'env 'a. 'env -> 'a option -> 'a option -> unit = + (fun _ _ _ -> raise VisitorsRuntime.StructuralMismatch) + + method private on_option + : 'env 'a. ('env -> 'a -> 'a -> unit) -> 'env -> 'a option -> 'a option -> unit = + fun f env x y -> + match (x, y) with + | (Some x, Some y) -> f env x y + | (Some _, None) + | (None, Some _) -> + self#fail_option env x y + | (None, None) -> () + + method private fail_list : 'env 'a. 'env -> 'a list -> 'a list -> unit = + (fun _ _ _ -> raise VisitorsRuntime.StructuralMismatch) + + method private on_list + : 'env 'a. ('env -> 'a -> 'a -> unit) -> 'env -> 'a list -> 'a list -> unit = + fun f env l1 l2 -> + match (l1, l2) with + | ([], []) -> () + | (a1 :: l1, a2 :: l2) -> + f env a1 a2; + self#on_list f env l1 l2 + | (l1, l2) -> self#fail_list env l1 l2 + end + +class ['self] map_ty_base = + object (_ : 'self) + method private on_string : 'env -> string -> string = (fun _ x -> x) + + method private on_bool : 'env -> bool -> bool = (fun _ x -> x) + + method private on_int : 'env -> int -> int = (fun _ x -> x) + + method private on_symbol : 'env -> symbol -> symbol = (fun _ x -> x) + + method private on_aloc : 'env -> ALoc.t -> ALoc.t = (fun _ x -> x) + + method private on_list : 'env 'a 'b. ('env -> 'a -> 'b) -> 'env -> 'a list -> 'b list = + (fun f env -> Core_list.map ~f:(f env)) + + method private on_option : 'env 'a 'b. ('env -> 'a -> 'b) -> 'env -> 'a option -> 'b option = + (fun f env -> Option.map ~f:(f env)) + end + +class ['self] endo_ty_base = + object (_self : 'self) + method private on_string : 'env -> string -> string = (fun _ x -> x) + + method private on_bool : 'env -> bool -> bool = (fun _ x -> x) + + method private on_int : 'env -> int -> int = (fun _ x -> x) + + method private on_symbol : 'env -> symbol -> symbol = (fun _ x -> x) + + method private on_aloc : 'env -> ALoc.t -> ALoc.t = (fun _ x -> x) + + (* Copied from + * https://github.com/facebook/hhvm/blob/master/hphp/hack/src/ast/ast_defs_visitors_ancestors.ml + *) + method private on_list : 'env 'a. ('env -> 'a -> 'a) -> 'env -> 'a list -> 'a list = + fun f env xs -> + let rec aux env xs counter = + match xs with + | [] -> xs + | [y1] -> + let z1 = f env y1 in + if y1 == z1 then + xs + else + [z1] + | [y1; y2] -> + let z1 = f env y1 in + let z2 = f env y2 in + if y1 == z1 && y2 == z2 then + xs + else + [z1; z2] + | [y1; y2; y3] -> + let z1 = f env y1 in + let z2 = f env y2 in + let z3 = f env y3 in + if y1 == z1 && y2 == z2 && y3 == z3 then + xs + else + [z1; z2; z3] + | [y1; y2; y3; y4] -> + let z1 = f env y1 in + let z2 = f env y2 in + let z3 = f env y3 in + let z4 = f env y4 in + if y1 == z1 && y2 == z2 && y3 == z3 && y4 == z4 then + xs + else + [z1; z2; z3; z4] + | [y1; y2; y3; y4; y5] -> + let z1 = f env y1 in + let z2 = f env y2 in + let z3 = f env y3 in + let z4 = f env y4 in + let z5 = f env y5 in + if y1 == z1 && y2 == z2 && y3 == z3 && y4 == z4 && y5 == z5 then + xs + else + [z1; z2; z3; z4; z5] + | y1 :: y2 :: y3 :: y4 :: y5 :: ys -> + let z1 = f env y1 in + let z2 = f env y2 in + let z3 = f env y3 in + let z4 = f env y4 in + let z5 = f env y5 in + let zs = + if counter > 1000 then + aux_slow env ys [] ys false + else + aux env ys (counter + 1) + in + if y1 == z1 && y2 == z2 && y3 == z3 && y4 == z4 && y5 == z5 && ys == zs then + xs + else + z1 :: z2 :: z3 :: z4 :: z5 :: zs + and aux_slow env xs acc original_list has_new_elements = + match xs with + | [] -> + if has_new_elements then + List.rev acc + else + original_list + | y1 :: ys -> + let z1 = f env y1 in + aux_slow env ys (z1 :: acc) original_list (has_new_elements || y1 != z1) in - if y1 == z1 && y2 == z2 && y3 == z3 && y4 == z4 && y5 == z5 && ys == zs - then xs - else z1::z2::z3::z4::z5::zs - and aux_slow env xs acc original_list has_new_elements = - match xs with - | [] -> if has_new_elements then List.rev acc else original_list - | y1::ys -> - let z1 = f env y1 in - aux_slow env ys (z1::acc) original_list (has_new_elements || y1 != z1) - in - aux env xs 0 - - method private on_option - : 'env 'a . ('env -> 'a -> 'a) -> 'env -> 'a option -> 'a option - = fun f env x -> - match x with - | None -> x - | Some y -> - let z = f env y in - if y == z then x else Some z -end - - -class virtual ['e] monoid = object - method private virtual zero: 'e - method private virtual plus: 'e -> 'e -> 'e -end - -class virtual ['self] reduce_ty_base = object (self : 'self) - inherit ['acc] monoid - method private on_string : 'env . 'env -> string -> 'acc = fun _ _ -> self#zero - method private on_int : 'env . 'env -> int -> 'acc = fun _ _ -> self#zero - method private on_bool : 'env . 'env -> bool -> 'acc = fun _ _ -> self#zero - method private on_symbol : 'env . 'env -> symbol -> 'acc = fun _ _ -> self#zero - method private on_identifier: 'env . 'env -> identifier -> 'acc = fun _ _ -> self#zero - - method private on_list: 'env 'a . ('env -> 'a -> 'acc) -> 'env -> 'a list -> 'acc - = fun f env xs -> self#list_fold_left f env self#zero xs - method private on_option: 'env 'a . ('env -> 'a -> 'acc) -> 'env -> 'a option -> 'acc - = fun f env -> Option.value_map ~default:self#zero ~f:(f env) - - method private list_fold_left - : 'env 'a . ('env -> 'a -> 'acc) -> 'env -> 'acc -> 'a list -> 'acc - = fun f env acc xs -> - match xs with - | [] -> acc - | y::ys -> - let acc = self#plus acc (f env y) in - self#list_fold_left f env acc ys -end + aux env xs 0 + + method private on_option : 'env 'a. ('env -> 'a -> 'a) -> 'env -> 'a option -> 'a option = + fun f env x -> + match x with + | None -> x + | Some y -> + let z = f env y in + if y == z then + x + else + Some z + end + +class virtual ['e] monoid = + object + method virtual private zero : 'e + + method virtual private plus : 'e -> 'e -> 'e + end + +class virtual ['self] reduce_ty_base = + object (self : 'self) + inherit ['acc] monoid + + method private on_string : 'env. 'env -> string -> 'acc = (fun _ _ -> self#zero) + + method private on_int : 'env. 'env -> int -> 'acc = (fun _ _ -> self#zero) + + method private on_bool : 'env. 'env -> bool -> 'acc = (fun _ _ -> self#zero) + + method private on_symbol : 'env. 'env -> symbol -> 'acc = (fun _ _ -> self#zero) + + method private on_aloc : 'env. 'env -> ALoc.t -> 'acc = (fun _ _ -> self#zero) + + method private on_list : 'env 'a. ('env -> 'a -> 'acc) -> 'env -> 'a list -> 'acc = + (fun f env xs -> self#list_fold_left f env self#zero xs) + + method private on_option : 'env 'a. ('env -> 'a -> 'acc) -> 'env -> 'a option -> 'acc = + (fun f env -> Option.value_map ~default:self#zero ~f:(f env)) + + method private list_fold_left + : 'env 'a. ('env -> 'a -> 'acc) -> 'env -> 'acc -> 'a list -> 'acc = + fun f env acc xs -> + match xs with + | [] -> acc + | y :: ys -> + let acc = self#plus acc (f env y) in + self#list_fold_left f env acc ys + end + +class virtual ['self] mapreduce_ty_base = + object (self : 'self) + inherit ['acc] monoid + + method private on_string : 'env -> string -> string * 'acc = (fun _ x -> (x, self#zero)) + + method private on_bool : 'env -> bool -> bool * 'acc = (fun _ x -> (x, self#zero)) + + method private on_int : 'env -> int -> int * 'acc = (fun _ x -> (x, self#zero)) + + method private on_symbol : 'env -> symbol -> symbol * 'acc = (fun _ x -> (x, self#zero)) + + method private on_aloc : 'env -> ALoc.t -> ALoc.t * 'acc = (fun _ x -> (x, self#zero)) + + method private on_list : 'a 'b. ('env -> 'a -> 'b * 'acc) -> 'env -> 'a list -> 'b list * 'acc + = + (fun f env -> self#list_fold_left f env ([], self#zero)) + + method private on_option + : 'a 'b. ('env -> 'a -> 'b * 'acc) -> 'env -> 'a option -> 'b option * 'acc = + fun f env x -> + match x with + | None -> (None, self#zero) + | Some x -> + let (x', acc) = f env x in + (Some x', acc) + + method private list_fold_left + : 'a 'b. ('env -> 'a -> 'b * 'acc) -> 'env -> 'b list * 'acc -> 'a list -> 'b list * 'acc = + fun f env acc xs -> + match xs with + | [] -> + let (ys_rev, acc) = acc in + (List.rev ys_rev, acc) + | y :: ys -> + let (acc_ys, acc) = acc in + let (y', acc') = f env y in + let acc'' = self#plus acc acc' in + self#list_fold_left f env (y' :: acc_ys, acc'') ys + end diff --git a/src/common/ty/ty_debug.ml b/src/common/ty/ty_debug.ml index 6b3f6f5c13e..6d14122061e 100644 --- a/src/common/ty/ty_debug.ml +++ b/src/common/ty/ty_debug.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,32 +8,73 @@ open Ty open Utils_js -let cut_off ?(limit=1000) str = +let cut_off ?(limit = 1000) str = let len = String.length str in - if len > limit - then String.sub str 0 (len - 1) ^ " ..." - else str - -let rec dump_opt (f: 'a -> string) (o: 'a option) = match o with + if len > limit then + String.sub str 0 (len - 1) ^ " ..." + else + str + +let dump_bot_upper_bound_kind = function + | NoUpper -> "NoUpper" + | SomeKnownUpper _ -> "SomeKnownUpper" + | SomeUnknownUpper u -> spf "SomeUnknownUpper (%s)" u + +let dump_bot_kind = function + | EmptyType -> "EmptyType" + | EmptyMatchingPropT -> "EmptyMatchingPropT" + | EmptyTypeDestructorTriggerT _ -> "EmptyTypeDestructorTriggerT" + | NoLowerWithUpper u -> spf "NoLowerWithUpper (%s)" (dump_bot_upper_bound_kind u) + +let builtin_value = function + | FunProto -> "Function.prototype" + | ObjProto -> "Object.prototype" + | FunProtoApply -> "Function.prototype.apply" + | FunProtoBind -> "Function.prototype.bind" + | FunProtoCall -> "Function.prototype.call" + +let rec dump_opt (f : 'a -> string) (o : 'a option) = + match o with | Some t -> f t | None -> "" -and dump_list : 'a . ('a -> string) -> ?sep:string -> 'a list -> string = - fun f ?(sep=", ") ls -> - List.map f ls |> String.concat sep +and dump_any_kind = function + | Annotated -> "Annotated" + | AnyError -> "AnyError" + | Unsound kind -> spf "Unsound (%s)" (dump_any_unsoundness_kind kind) + | Untyped -> "Untyped" + +and dump_any_unsoundness_kind = function + | BoundFunctionThis -> "BoundFunctionThis" + | ComputedNonLiteralKey -> "ComputedNonLiteralKey" + | Constructor -> "Constructor" + | DummyStatic -> "DummyStatic" + | Existential -> "Existential" + | Exports -> "Exports" + | FunctionPrototype -> "FunctionPrototype" + | InferenceHooks -> "InferenceHooks" + | InstanceOfRefinement -> "InstanceOfRefinement" + | Merged -> "Merged" + | ResolveSpread -> "ResolveSpread" + | Unchecked -> "Unchecked" + | Unimplemented -> "Unimplemented" + | UnresolvedType -> "UnresolvedType" + | WeakContext -> "WeakContext" + +and dump_list : 'a. ('a -> string) -> ?sep:string -> 'a list -> string = + (fun f ?(sep = ", ") ls -> Core_list.map ~f ls |> String.concat sep) and dump_param_opt = function | { prm_optional = true } -> "?" | _ -> "" -and dump_param ~depth (p: string option * t * fun_param) = match p with - | Some s, t, o -> - spf "%s%s: %s" s (dump_param_opt o) (dump_t ~depth t) - | _, t, o -> spf "%s%s" (dump_param_opt o) (dump_t ~depth t) +and dump_param ~depth (p : string option * t * fun_param) = + match p with + | (Some s, t, o) -> spf "%s%s: %s" s (dump_param_opt o) (dump_t ~depth t) + | (_, t, o) -> spf "%s%s" (dump_param_opt o) (dump_t ~depth t) and dump_rest_params ~depth = function - | Some (o, t) -> - spf "...%s" (dump_param ~depth (o, t, { prm_optional = false })) + | Some (o, t) -> spf "...%s" (dump_param ~depth (o, t, { prm_optional = false })) | _ -> "" and dump_bound ~depth = function @@ -46,8 +87,7 @@ and dump_polarity = function | Neutral -> "" and dump_type_param ~depth { tp_name; tp_bound; tp_polarity; _ } = - spf "TParam(%s, %s, %s)" (dump_polarity tp_polarity) tp_name - (dump_bound ~depth tp_bound) + spf "TParam(%s, %s, %s)" (dump_polarity tp_polarity) tp_name (dump_bound ~depth tp_bound) and dump_type_params ~depth = function | Some [] -> "" @@ -55,23 +95,29 @@ and dump_type_params ~depth = function | _ -> "" and dump_fun_t ~depth { fun_params; fun_rest_param; fun_return; fun_type_params } = - spf "Fun(%s, %s, %s, out: %s)" + spf + "Fun(%s, %s, %s, out: %s)" (dump_type_params ~depth fun_type_params) (dump_list (dump_param ~depth) fun_params) - (dump_rest_params ~depth fun_rest_param) + (dump_rest_params ~depth fun_rest_param) (dump_t ~depth fun_return) and dump_field ~depth name { fld_polarity; fld_optional } t = - spf "%s%s%s: %s" + spf + "%s%s%s: %s" (dump_polarity fld_polarity) name - (if fld_optional then "?" else "") + ( if fld_optional then + "?" + else + "" ) (dump_t ~depth t) and dump_prop ~depth = function | NamedProp (s, p) -> dump_named_prop ~depth s p | IndexProp d -> dump_dict ~depth d | CallProp f -> dump_fun_t ~depth f + | SpreadProp t -> dump_spread ~depth t and dump_named_prop ~depth x = function | Field (t, field) -> dump_field ~depth x field t @@ -80,90 +126,122 @@ and dump_named_prop ~depth x = function | Set t -> spf "get %s" (dump_t ~depth t) and dump_dict ~depth { dict_polarity; dict_name; dict_key; dict_value } = - spf "%s[%s%s]: %s" + spf + "%s[%s%s]: %s" (dump_polarity dict_polarity) (match dict_name with - | Some n -> spf "%s: " n - | _ -> "") + | Some n -> spf "%s: " n + | _ -> "") (dump_t ~depth dict_key) (dump_t ~depth dict_value) -and dump_obj ~depth { obj_exact; obj_props; obj_frozen = _ } = - if obj_exact - then spf "{|%s|}" (dump_list (dump_prop ~depth) obj_props) - else spf "{%s}" (dump_list (dump_prop ~depth) obj_props) +and dump_spread ~depth t = spf "...%s" (dump_t ~depth t) + +and dump_obj ~depth { obj_exact; obj_props; _ } = + if obj_exact then + spf "{|%s|}" (dump_list (dump_prop ~depth) obj_props) + else + spf "{%s}" (dump_list (dump_prop ~depth) obj_props) -and dump_arr ~depth { arr_readonly; arr_elt_t } = - let ctor = if arr_readonly then "$ReadOnlyArray" else "Array" in +and dump_arr ~depth { arr_readonly; arr_elt_t; _ } = + let ctor = + if arr_readonly then + "$ReadOnlyArray" + else + "Array" + in spf "%s<%s>" ctor (dump_t ~depth arr_elt_t) +and dump_generic ~depth (s, kind, ts) = + spf + "Generic (%s, kind= %s, params=%s)" + (dump_symbol s) + (Ty.debug_string_of_generic_kind kind) + (dump_generics ~depth ts) + and dump_generics ~depth = function | Some ts -> "<" ^ dump_list (dump_t ~depth) ts ^ ">" | _ -> "" and dump_tvar (RVar i) = spf "T_%d" i -and dump_symbol (Symbol (provenance, name)) = - match provenance with - | Local _ -> name - | Imported loc -> spf "%s /* imported from file %s */" name (Reason.string_of_loc loc) - | Remote loc -> spf "%s /* defined in file %s */" name (Reason.string_of_loc loc) - | Library loc -> spf "%s /* defined in library %s */" name (Reason.string_of_loc loc) - | Builtin -> spf "%s /* builtin */" name +and dump_symbol { provenance; def_loc; name; _ } = + spf + "(%s, %s) %s" + (Ty.debug_string_of_provenance_ctor provenance) + (Reason.string_of_aloc def_loc) + name + +and dump_utility ~depth u = + let ctor = Ty.string_of_utility_ctor u in + match Ty.types_of_utility u with + | Some ts -> Core_list.map ~f:(dump_t ~depth) ts |> String.concat ", " |> spf "%s (%s)" ctor + | None -> ctor and dump_t ?(depth = 10) t = - if depth < 0 then "..." else - let depth = depth - 1 in - match t with - | TVar (v, ts) -> - spf "TVAR(%s, params=%s)" (dump_tvar v) - (dump_generics ~depth ts) - | Bound s -> spf "Bound(%s)" (dump_symbol s) - | Generic (s, st, ts) -> - spf "Generic(%s, struct= %b, params=%s)" - (dump_symbol s) st (dump_generics ~depth ts) - | Any -> "Any" - | AnyObj -> "AnyObj" - | AnyFun -> "AnyFun" - | Top -> "Top" - | Bot -> "Bot" - | Void -> "Void" - | Null -> "Null" - | Num -> "Num" - | NumLit s -> spf "\"%s\"" s - | Str -> "Str" - | StrLit s -> spf "\"%s\"" s - | Bool -> "Bool" - | BoolLit b -> spf "\"%b\"" b - | Fun f -> dump_fun_t ~depth f - | Obj o -> dump_obj ~depth o - | Arr a -> dump_arr ~depth a - | Tup ts -> - spf "Tup (%s)" (dump_list (dump_t ~depth) ~sep:"," ts) - | Union (t1,t2,ts) -> - spf "Union (%s)" (dump_list (dump_t ~depth) ~sep:", " (ListUtils.first_n 10 (t1::t2::ts))) - | Inter (t1,t2,ts) -> - spf "Inter (%s)" (dump_list (dump_t ~depth) ~sep:", " (t1::t2::ts)) - | TypeAlias { ta_name; ta_tparams; ta_type } -> - spf "TypeAlias (%s, %s, %s)" - (dump_symbol ta_name) - (dump_type_params ~depth ta_tparams) - (Option.value_map ta_type ~default:"" ~f:(fun t -> cut_off (dump_t ~depth t))) - | TypeOf n -> - spf "Typeof(%s)" (dump_symbol n) - | Module n -> - spf "Module(%s)" (dump_symbol n) - | Exists -> "*" - | Class (name, true, ps) -> - spf "Interface (name=%s, params= %s)" (dump_symbol name) (dump_type_params ~depth ps) - | Class (name, false, ps) -> - spf "Class (name=%s, params= %s)" (dump_symbol name) (dump_type_params ~depth ps) - | Mu (i, t) -> spf "Mu (%d, %s)" i (dump_t ~depth t) - -let dump_binding (v, ty) = - Utils_js.spf "type %s = %s" (dump_tvar v) (dump_t ty) - -let dump_env_t s = List.map dump_binding s |> String.concat "\n" + if depth < 0 then + "..." + else + let depth = depth - 1 in + match t with + | TVar (v, ts) -> spf "TVAR(%s, params=%s)" (dump_tvar v) (dump_generics ~depth ts) + | Bound (_, s) -> spf "Bound(%s)" s + | Generic g -> dump_generic ~depth g + | Any kind -> spf "Any (%s)" (dump_any_kind kind) + | Top -> "Top" + | Bot k -> spf "Bot (%s)" (dump_bot_kind k) + | Void -> "Void" + | Null -> "Null" + | Num (Some x) -> spf "Num (%s)" x + | Num None -> "Num" + | NumLit s -> spf "\"%s\"" s + | Str (Some x) -> spf "Str (%s)" x + | Str None -> "Str" + | StrLit s -> spf "\"%s\"" s + | Bool (Some x) -> spf "Bool (%b)" x + | Bool None -> "Bool" + | BoolLit b -> spf "\"%b\"" b + | Fun f -> dump_fun_t ~depth f + | Obj o -> dump_obj ~depth o + | Arr a -> dump_arr ~depth a + | Tup ts -> spf "Tup (%s)" (dump_list (dump_t ~depth) ~sep:"," ts) + | Union (t1, t2, ts) -> + spf + "Union (%s)" + (dump_list (dump_t ~depth) ~sep:", " (ListUtils.first_n 10 (t1 :: t2 :: ts))) + | Inter (t1, t2, ts) -> spf "Inter (%s)" (dump_list (dump_t ~depth) ~sep:", " (t1 :: t2 :: ts)) + | TypeAlias { ta_name; ta_tparams; ta_type } -> + spf + "TypeAlias (%s, %s, %s)" + (dump_symbol ta_name) + (dump_type_params ~depth ta_tparams) + (Option.value_map ta_type ~default:"" ~f:(fun t -> cut_off (dump_t ~depth t))) + | InlineInterface { if_extends; if_body } -> + spf + "InlineInterface (%s, %s)" + (dump_list (dump_generic ~depth) if_extends) + (dump_obj ~depth if_body) + | TypeOf v -> spf "Typeof (%s)" (builtin_value v) + | Module (n, { exports; _ }) -> + let name = + match n with + | Some n -> dump_symbol n + | None -> "" + in + let exports = + dump_list (fun (name, t) -> dump_t ~depth t |> spf "%s : %s" name) ~sep:"," exports + in + spf "Module(%s, %s)" name exports + | ClassDecl (name, ps) -> + spf "Class (name=%s, params= %s)" (dump_symbol name) (dump_type_params ~depth ps) + | InterfaceDecl (name, ps) -> + spf "Interface (name=%s, params= %s)" (dump_symbol name) (dump_type_params ~depth ps) + | Utility u -> dump_utility ~depth u + | Mu (i, t) -> spf "Mu (%d, %s)" i (dump_t ~depth t) + +let dump_binding (v, ty) = Utils_js.spf "type %s = %s" (dump_tvar v) (dump_t ty) + +let dump_env_t s = Core_list.map ~f:dump_binding s |> String.concat "\n" let string_of_polarity = function | Negative -> "Negative" @@ -174,16 +252,15 @@ let string_of_ctor = function | TVar (RVar _, _) -> "RecVar" | Bound _ -> "Bound" | Generic _ -> "Generic" - | Any -> "Any" - | AnyObj -> "AnyObj" - | AnyFun -> "AnyFun" + | Any Annotated -> "Explicit Any" + | Any _ -> "Implicit Any" | Top -> "Top" - | Bot -> "Bot" + | Bot _ -> "Bot" | Void -> "Void" | Null -> "Null" - | Num -> "Num" - | Str -> "Str" - | Bool -> "Bool" + | Num _ -> "Num" + | Str _ -> "Str" + | Bool _ -> "Bool" | NumLit _ -> "NumLit" | StrLit _ -> "StrLit" | BoolLit _ -> "BoolLit" @@ -194,202 +271,186 @@ let string_of_ctor = function | Union _ -> "Union" | Inter _ -> "Inter" | TypeAlias _ -> "TypeAlias" + | InlineInterface _ -> "InlineInterface" | TypeOf _ -> "Typeof" - | Class _ -> "Class" - | Exists -> "Exists" + | ClassDecl _ -> "ClassDecl" + | InterfaceDecl _ -> "InterfaceDecl" + | Utility _ -> "Utility" | Module _ -> "Module" | Mu _ -> "Mu" - let json_of_t ~strip_root = - - let json_of_provenance p = Hh_json.(JSON_Object [ - "kind", JSON_String (Ty.string_of_provenance_ctor p); - "loc", JSON_String (Reason.string_of_loc ~strip_root (Ty.loc_of_provenance p)); - ]) + let json_of_provenance loc p = + Hh_json.( + JSON_Object + [ + ("kind", JSON_String (Ty.debug_string_of_provenance_ctor p)); + ("loc", JSON_String (Reason.string_of_aloc ~strip_root loc)); + ]) in - - let json_of_symbol (Symbol (prov, name)) = Hh_json.( - JSON_Object [ - "provenance", json_of_provenance prov; - "name", JSON_String name; - ]) + let json_of_symbol { provenance; def_loc; name; _ } = + Hh_json.( + JSON_Object + [("provenance", json_of_provenance def_loc provenance); ("name", JSON_String name)]) in - - let rec json_of_t t = Hh_json.( - JSON_Object ([ - "kind", JSON_String (string_of_ctor t) - ] @ - match t with - | TVar (v, ts) -> json_of_tvar v @ json_of_targs ts - | Bound (Ty.Symbol (_, s)) -> [ - "bound", JSON_String s - ] - | Generic (s, str, targs_opt) -> - json_of_targs targs_opt @ [ - "type", json_of_symbol s; - "structural", JSON_Bool str; - ] - | Any | AnyObj | AnyFun - | Top | Bot - | Void | Null - | Num | Str | Bool -> [] - | NumLit s - | StrLit s -> [ - "literal", JSON_String s - ] - | BoolLit b -> [ - "literal", JSON_Bool b - ] - | Fun f -> json_of_fun_t f - | Obj { obj_exact; obj_props; obj_frozen } -> [ - "exact", JSON_Bool obj_exact; - "frozen", JSON_Bool obj_frozen; - "props", JSON_Array (List.map json_of_prop obj_props); - ] - | Arr { arr_readonly; arr_elt_t } -> [ - "readonly", JSON_Bool arr_readonly; - "type", json_of_t arr_elt_t; - ] - | Tup ts -> [ - "types", JSON_Array (List.map json_of_t ts); - ] - | Union (t0,t1,ts) -> [ - "types", JSON_Array (List.map json_of_t (t0::t1::ts)); - ] - | Inter (t0,t1,ts) -> [ - "types", JSON_Array (List.map json_of_t (t0::t1::ts)); - ] - | TypeAlias { ta_name; ta_tparams; ta_type } -> [ - "name", json_of_symbol ta_name; - "typeParams", json_of_type_params ta_tparams; - "body", Option.value_map ~f:json_of_t ~default:JSON_Null ta_type - ] - | TypeOf name -> [ - "name", json_of_symbol name; - ] - | Module name -> [ - "name", json_of_symbol name; - ] - | Class (name, structural, tparams) -> [ - "name", json_of_symbol name; - "structural", JSON_Bool structural; - "typeParams", json_of_type_params tparams; - ] - | Exists -> [] - | Mu (i, t) -> [ - "mu_var", int_ i; - "type", json_of_t t; + let rec json_of_t t = + Hh_json.( + JSON_Object + ( [("kind", JSON_String (string_of_ctor t))] + @ + match t with + | TVar (v, ts) -> json_of_tvar v @ json_of_targs ts + | Bound (_, name) -> [("bound", JSON_String name)] + | Generic g -> json_of_generic g + | Any Annotated -> [("any", JSON_String "explicit")] + | Any _ -> [("any", JSON_String "implicit")] + | Top + | Bot _ + | Void + | Null + | Num _ + | Str _ + | Bool _ -> + [] + | NumLit s + | StrLit s -> + [("literal", JSON_String s)] + | BoolLit b -> [("literal", JSON_Bool b)] + | Fun f -> json_of_fun_t f + | Obj o -> json_of_obj_t o + | Arr { arr_readonly; arr_literal; arr_elt_t } -> + [ + ("readonly", JSON_Bool arr_readonly); + ("literal", JSON_Bool arr_literal); + ("type", json_of_t arr_elt_t); + ] + | Tup ts -> [("types", JSON_Array (Core_list.map ~f:json_of_t ts))] + | Union (t0, t1, ts) -> + [("types", JSON_Array (Core_list.map ~f:json_of_t (t0 :: t1 :: ts)))] + | Inter (t0, t1, ts) -> + [("types", JSON_Array (Core_list.map ~f:json_of_t (t0 :: t1 :: ts)))] + | TypeAlias { ta_name; ta_tparams; ta_type } -> + [ + ("name", json_of_symbol ta_name); + ("typeParams", json_of_type_params ta_tparams); + ("body", Option.value_map ~f:json_of_t ~default:JSON_Null ta_type); + ] + | InlineInterface { if_extends; if_body } -> + Hh_json.( + let extends = Core_list.map ~f:(fun g -> JSON_Object (json_of_generic g)) if_extends in + [("extends", JSON_Array extends); ("body", JSON_Object (json_of_obj_t if_body))]) + | TypeOf b -> [("name", JSON_String (builtin_value b))] + | Module (name, _) -> + [("name", Option.value_map ~f:json_of_symbol ~default:JSON_Null name)] + | ClassDecl (name, tparams) -> + [("name", json_of_symbol name); ("typeParams", json_of_type_params tparams)] + | InterfaceDecl (name, tparams) -> + [("name", json_of_symbol name); ("typeParams", json_of_type_params tparams)] + | Utility u -> json_of_utility u + | Mu (i, t) -> [("mu_var", int_ i); ("type", json_of_t t)] )) + and json_of_tvar (RVar i) = Hh_json.[("id", int_ i)] + and json_of_generic (s, k, targs_opt) = + json_of_targs targs_opt + @ [ + ("type", json_of_symbol s); + ("kind", Hh_json.JSON_String (Ty.debug_string_of_generic_kind k)); ] - ) - ) - - and json_of_tvar (RVar i) = Hh_json.(["id", int_ i]) - and json_of_fun_t { fun_params; fun_rest_param; fun_return; fun_type_params } = Hh_json.( + [("typeParams", json_of_type_params fun_type_params)] + @ [("paramTypes", JSON_Array (Core_list.map ~f:(fun (_, t, _) -> json_of_t t) fun_params))] + @ [ + ( "paramNames", + JSON_Array + (List.rev_map + (function + | (Some n, _, _) -> JSON_String n + | (None, _, _) -> JSON_String "_") + fun_params) ); + ] + @ [ + ( "restParam", + match fun_rest_param with + | None -> JSON_Null + | Some (name, t) -> + JSON_Object + ( [("restParamType", json_of_t t)] + @ + match name with + | None -> [] + | Some name -> [("restParamName", JSON_String name)] ) ); + ("returnType", json_of_t fun_return); + ]) + and json_of_obj_t o = + Hh_json.( + let { obj_exact; obj_props; obj_literal; obj_frozen } = o in [ - "typeParams", json_of_type_params fun_type_params; - ] @ [ - "paramTypes", - JSON_Array (List.map (fun (_, t, _) -> json_of_t t) fun_params) - ] @ [ - "paramNames", JSON_Array (List.rev_map (function - | (Some n, _, _) -> JSON_String n - | (None, _, _) -> JSON_String "_" - ) fun_params); - ] @ [ - "restParam", (match fun_rest_param with - | None -> JSON_Null - | Some (name, t) -> JSON_Object ( + ("exact", JSON_Bool obj_exact); + ("frozen", JSON_Bool obj_frozen); + ("literal", JSON_Bool obj_literal); + ("props", JSON_Array (Core_list.map ~f:json_of_prop obj_props)); + ]) + and json_of_type_params ps = + Hh_json.( + match ps with + | None -> JSON_Null + | Some tparams -> JSON_Array (Core_list.map ~f:json_of_typeparam tparams)) + and json_of_targs targs_opt = + Hh_json.( + match targs_opt with + | Some targs -> [("typeArgs", JSON_Array (Core_list.map ~f:json_of_t targs))] + | None -> []) + and json_of_typeparam + { tp_name : string; tp_bound : t option; tp_polarity : polarity; tp_default : t option } = + Hh_json.( + JSON_Object + ( [ + ("name", JSON_String tp_name); + ("bound", Option.value_map tp_bound ~f:json_of_t ~default:JSON_Null); + ("polarity", json_of_polarity tp_polarity); + ] + @ Option.value_map tp_default ~default:[] ~f:(fun t -> [("default", json_of_t t)]) )) + and json_of_polarity polarity = Hh_json.JSON_String (string_of_polarity polarity) + and json_of_prop prop = + Hh_json.( + JSON_Object + (match prop with + | NamedProp (name, p) -> [ - "restParamType", json_of_t t; - ] @ (match name with - | None -> [] - | Some name -> ["restParamName", JSON_String name]))); - "returnType", json_of_t fun_return; - ] - ) - - and json_of_type_params ps = Hh_json.( - match ps with - | None -> JSON_Null - | Some tparams -> JSON_Array (List.map json_of_typeparam tparams) - ) - - and json_of_targs targs_opt = Hh_json.( - match targs_opt with - | Some targs -> [ "typeArgs", JSON_Array (List.map json_of_t targs) ] - | None -> [] - ) - - and json_of_typeparam { - tp_name: string; - tp_bound: t option; - tp_polarity: polarity; - tp_default: t option; - } = Hh_json.( - JSON_Object ([ - "name", JSON_String tp_name; - "bound", Option.value_map tp_bound ~f:json_of_t ~default:JSON_Null; - "polarity", json_of_polarity tp_polarity; - ] @ - Option.value_map tp_default ~default:[] ~f:(fun t -> ["default", json_of_t t]) - ) - ) - - and json_of_polarity polarity = - Hh_json.JSON_String (string_of_polarity polarity) - - and json_of_prop prop = Hh_json.( - JSON_Object (match prop with - | NamedProp (name, p) -> [ - "kind", JSON_String "NamedProp"; - "prop", JSON_Object [ - "name", JSON_String name; - "prop", json_of_named_prop p; - ]; - ] - | IndexProp d -> [ - "kind", JSON_String "IndexProp"; - "prop", json_of_dict d; - ] - | CallProp ft -> [ - "kind", JSON_String "NamedProp"; - "prop", JSON_Object (json_of_fun_t ft); - ] - ) - ) - - and json_of_dict { dict_polarity; dict_name; dict_key; dict_value } = Hh_json.( - JSON_Object [ - "polarity", json_of_polarity dict_polarity; - "name", JSON_String (Option.value dict_name ~default:"_"); - "key", json_of_t dict_key; - "value", json_of_t dict_value; - ] - ) - - and json_of_named_prop p = Hh_json.(JSON_Object ( - match p with - | Field (t, { fld_polarity; fld_optional }) -> [ - "kind", JSON_String "field"; - "type", json_of_t t; - "polarity", json_of_polarity fld_polarity; - "optional", JSON_Bool fld_optional; - ] - | Method t -> [ - "kind", JSON_String "Method"; - "funtype", JSON_Object (json_of_fun_t t); - ] - | Get t -> [ - "kind", JSON_String "Get"; - "type", json_of_t t; - ] - | Set t -> [ - "kind", JSON_String "Set"; - "type", json_of_t t; - ] - )) - - in fun t -> json_of_t t + ("kind", JSON_String "NamedProp"); + ("prop", JSON_Object [("name", JSON_String name); ("prop", json_of_named_prop p)]); + ] + | IndexProp d -> [("kind", JSON_String "IndexProp"); ("prop", json_of_dict d)] + | CallProp ft -> + [("kind", JSON_String "NamedProp"); ("prop", JSON_Object (json_of_fun_t ft))] + | SpreadProp t -> [("kind", JSON_String "SpreadProp"); ("prop", json_of_t t)])) + and json_of_dict { dict_polarity; dict_name; dict_key; dict_value } = + Hh_json.( + JSON_Object + [ + ("polarity", json_of_polarity dict_polarity); + ("name", JSON_String (Option.value dict_name ~default:"_")); + ("key", json_of_t dict_key); + ("value", json_of_t dict_value); + ]) + and json_of_named_prop p = + Hh_json.( + JSON_Object + (match p with + | Field (t, { fld_polarity; fld_optional }) -> + [ + ("kind", JSON_String "field"); + ("type", json_of_t t); + ("polarity", json_of_polarity fld_polarity); + ("optional", JSON_Bool fld_optional); + ] + | Method t -> [("kind", JSON_String "Method"); ("funtype", JSON_Object (json_of_fun_t t))] + | Get t -> [("kind", JSON_String "Get"); ("type", json_of_t t)] + | Set t -> [("kind", JSON_String "Set"); ("type", json_of_t t)])) + and json_of_utility u = + Hh_json.( + let ctor = Ty.string_of_utility_ctor u in + let ts = json_of_targs (Ty.types_of_utility u) in + ("kind", JSON_String ctor) :: ts) + in + (fun t -> json_of_t t) diff --git a/src/common/ty/ty_printer.ml b/src/common/ty/ty_printer.ml index bbbb42dd2f7..dab9674d1a8 100644 --- a/src/common/ty/ty_printer.ml +++ b/src/common/ty/ty_printer.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -16,12 +16,16 @@ open Ty let varname n = spf "V$%d" n let crop_symbol = "..." + let crop_atom = Atom crop_symbol (* from Js_layout_generator *) let utf8_escape = Js_layout_generator.utf8_escape + let better_quote = Js_layout_generator.better_quote + let wrap_in_parens = Js_layout_generator.wrap_in_parens + let with_semicolon = Js_layout_generator.with_semicolon let in_quotes s = @@ -31,65 +35,63 @@ let in_quotes s = let option ~f = Option.value_map ~default:Empty ~f +let property_key_quotes_needed x = + let regexp = Str.regexp "^[a-zA-Z\\$_][a-zA-Z0-9\\$_]*$" in + not (Str.string_match regexp x 0) + (*************************) (* Main Transformation *) (*************************) -let type_ ?(size=5000) t = - - let env_map: (Layout.layout_node IMap.t) ref = ref IMap.empty in +let type_ ?(size = 5000) ?(with_comments = true) t = + let env_map : Layout.layout_node IMap.t ref = ref IMap.empty in let size = ref size in - - (* util to limit the number of calls to a (usually recursive) function *) let counted_map f xs = let rec type_list_aux acc xs_ = if !size = 0 then - crop_atom::acc - else begin + crop_atom :: acc + else match xs_ with | [] -> acc - | y::ys -> type_list_aux (f y :: acc) ys - end + | y :: ys -> type_list_aux (f y :: acc) ys in type_list_aux [] xs |> List.rev in - + let builtin_value = function + | FunProto -> Atom "Function.prototype" + | ObjProto -> Atom "Object.prototype" + | FunProtoApply -> Atom "Function.prototype.apply" + | FunProtoBind -> Atom "Function.prototype.bind" + | FunProtoCall -> Atom "Function.prototype.call" + in (* The depth parameter is useful for formatting unions: Top-level does not get parentheses. *) let rec type_ ~depth t = let depth = depth + 1 in - count_calls ~counter:size ~default:crop_atom (fun () -> - type_impl ~depth t - ) - - and type_impl ~depth (t: Ty.t) = + count_calls ~counter:size ~default:crop_atom (fun () -> type_impl ~depth t) + and type_impl ~depth (t : Ty.t) = match t with - | TVar (v, ts) -> type_generic ~depth (type_var v) ts - | Bound (Symbol (_, s)) -> Atom s - | Any -> Atom "any" - | AnyObj -> Atom "Object" - | AnyFun -> Atom "Function" + | TVar (v, ts) -> type_reference ~depth (type_var v) ts + | Bound (_, name) -> Atom name + | Any k -> any ~depth k | Top -> Atom "mixed" - | Bot -> Atom "empty" + | Bot _ -> Atom "empty" | Void -> Atom "void" | Null -> Atom "null" - | Num -> Atom "number" - | Str -> Atom "string" - | Bool -> Atom "boolean" - | Fun func -> - type_function ~depth - ~sep:(fuse [pretty_space; Atom "=>"]) - func + | Num _ -> Atom "number" + | Str _ -> Atom "string" + | Bool _ -> Atom "boolean" + | Fun func -> type_function ~depth ~sep:(fuse [pretty_space; Atom "=>"]) func | Obj obj -> type_object ~depth obj | Arr arr -> type_array ~depth arr - | Generic (Symbol (_, id), _, ts) -> type_generic ~depth (identifier id) ts - | Union (t1, t2, ts) -> - type_union ~depth (t1::t2::ts) - | Inter (t1, t2, ts) -> - type_intersection ~depth (t1::t2::ts) - | Class (n, s, ps) -> type_class ~depth n s ps + | Generic g -> type_generic ~depth g + | Union (t1, t2, ts) -> type_union ~depth (t1 :: t2 :: ts) + | Inter (t1, t2, ts) -> type_intersection ~depth (t1 :: t2 :: ts) + | ClassDecl (n, ps) -> class_decl ~depth n ps + | InterfaceDecl (n, ps) -> interface_decl ~depth n ps + | Utility s -> utility ~depth s | Tup ts -> list ~wrap:(Atom "[", Atom "]") @@ -98,246 +100,288 @@ let type_ ?(size=5000) t = (counted_map (type_ ~depth) ts) | StrLit raw -> fuse (in_quotes raw) | NumLit raw -> Atom raw - | BoolLit value -> Atom (if value then "true" else "false") - | Exists -> Atom "*" + | BoolLit value -> + Atom + ( if value then + "true" + else + "false" ) | TypeAlias ta -> type_alias ta - | TypeOf (Symbol (_, n)) -> fuse [Atom "typeof"; space; identifier n] - | Module (Symbol (_, n)) -> fuse [Atom "module"; space; identifier n] + | InlineInterface { if_extends; if_body } -> type_interface ~depth if_extends if_body + | TypeOf pv -> fuse [Atom "typeof"; space; builtin_value pv] + | Module (sym, { cjs_export; exports }) -> module_t ~depth sym exports cjs_export | Mu (i, t) -> let t = type_ ~depth:0 t in env_map := IMap.add i t !env_map; Atom (varname i) - + and export ~depth (name, t) = fuse [identifier name; Atom ":"; space; type_ ~depth t] + and module_t ~depth sym exports cjs_export = + let name = + match sym with + | Some { name; _ } -> fuse [space; identifier name] + | None -> Empty + in + let cjs_name = "exports" in + let exports = + Option.value_map ~f:(fun cjs -> (cjs_name, cjs) :: exports) ~default:exports cjs_export + in + fuse + [ + Atom "module"; + name; + Atom ":"; + space; + list ~wrap:(Atom "{", Atom "}") ~sep:(Atom ",") (counted_map (export ~depth) exports); + ] and type_var (RVar i) = Atom (varname i) - - and type_generic ~depth base typeParameters = - fuse [ - base; - option (type_parameter_instantiation ~depth) typeParameters; - ] - + and type_generic ~depth g = + let ({ name; _ }, _, params) = g in + let name = identifier name in + type_reference ~depth name params + and type_reference ~depth name params = + let params = option (type_parameter_instantiation ~depth) params in + fuse [name; params] and type_parameter_instantiation ~depth params = - list - ~wrap:(Atom "<", Atom ">") - ~sep:(Atom ",") - (counted_map (type_ ~depth) params) - + list ~wrap:(Atom "<", Atom ">") ~sep:(Atom ",") (counted_map (type_ ~depth) params) and identifier name = Atom name - - and type_alias { ta_name = Symbol (provenance, id); ta_tparams; ta_type } = - match provenance with - | Imported _ | Remote _ -> fuse [ - Atom "imported"; space; - identifier id; - option (type_parameter ~depth:0) ta_tparams; - ] - - | _ -> fuse ([ - Atom "type"; space; - identifier id; - option (type_parameter ~depth:0) ta_tparams; + and any ~depth kind = + let kind = + match kind with + | Annotated -> "explicit" + | _ -> "implicit" + in + fuse + [ + Atom "any"; + ( if depth = 1 && with_comments then + fuse [pretty_space; Atom kind |> wrap_in_parens] + else + Empty ); ] - @ Option.value_map ta_type ~default:[] ~f:(fun t -> [ - pretty_space; Atom "="; pretty_space; type_ ~depth:0 t - ])) - - and type_function ~depth ~sep - { fun_params; fun_rest_param; fun_return; fun_type_params } = + and type_alias { ta_name = { name; _ }; ta_tparams; ta_type } = + fuse + ( [Atom "type"; space; identifier name; option (type_parameter ~depth:0) ta_tparams] + @ Option.value_map ta_type ~default:[] ~f:(fun t -> + [pretty_space; Atom "="; pretty_space; type_ ~depth:0 t]) ) + and type_interface ~depth extends body = + let extends = + match extends with + | [] -> Empty + | _ -> + fuse_with_space + [Atom "extends"; list ~sep:(Atom ",") (Core_list.map ~f:(type_generic ~depth) extends)] + in + let body = type_object ~depth body in + fuse_with_space [Atom "interface"; extends; body] + and type_function ~depth ~sep { fun_params; fun_rest_param; fun_return; fun_type_params } = let params = counted_map (type_function_param ~depth) fun_params in - let params = match fun_rest_param with - | Some (name, t) -> params @ [ fuse [ - Atom "..."; - type_function_param ~depth (name, t, { prm_optional = false }) - ] - ] - | None -> params + let params = + match fun_rest_param with + | Some (name, t) -> + params + @ [fuse [Atom "..."; type_function_param ~depth (name, t, { prm_optional = false })]] + | None -> params in - fuse [ - option (type_parameter ~depth) fun_type_params; - list - ~wrap:(Atom "(", Atom ")") - ~sep:(Atom ",") - ~trailing:false - params; - sep; - pretty_space; - type_ ~depth fun_return; - ] - + fuse + [ + option (type_parameter ~depth) fun_type_params; + list ~wrap:(Atom "(", Atom ")") ~sep:(Atom ",") ~trailing:false params; + sep; + pretty_space; + type_ ~depth fun_return; + ] and type_function_param ~depth (name, annot, { prm_optional }) = - fuse [ - begin match name with - | Some id -> fuse [ - identifier id; - if prm_optional then Atom "?" else Empty; - Atom ":"; - pretty_space; - ] - | None -> Empty - end; - type_ ~depth annot; - ] - - and type_object_property ~depth = - let open Ty in - function - | NamedProp (key, named_prop) -> begin - match named_prop with - | Field (t, { fld_polarity; fld_optional }) -> - fuse [ - variance_ fld_polarity; - identifier key; - if fld_optional then Atom "?" else Empty; - Atom ":"; - pretty_space; - type_ ~depth t - ] - - | Method func -> fuse [ - identifier key; - type_function ~depth ~sep:(Atom ":") func; - ] - - | Get t -> fuse [ - Atom "get"; space; - identifier key; - type_ ~depth t; - ] - - | Set t -> fuse [ - Atom "set"; space; - identifier key; - type_ ~depth t; - ] - end - - | IndexProp { dict_polarity; dict_name; dict_key; dict_value } -> - fuse [ - variance_ dict_polarity; - Atom "["; - begin match dict_name with - | Some id -> fuse [ - identifier id; Atom ":"; pretty_space; - ] - | None -> Empty + fuse + [ + begin + match name with + | Some id -> + fuse + [ + identifier id; + ( if prm_optional then + Atom "?" + else + Empty ); + Atom ":"; + pretty_space; + ] + | None -> Empty end; - type_ ~depth dict_key; - Atom "]"; Atom ":"; pretty_space; - type_ ~depth dict_value; + type_ ~depth annot; ] - | CallProp func -> fuse [ - type_function ~depth ~sep:(Atom ":") func + and type_object_property = + let to_key x = + if property_key_quotes_needed x then + let quote = better_quote x in + fuse [Atom quote; Atom (utf8_escape ~quote x); Atom quote] + else + identifier x + in + Ty.( + fun ~depth prop -> + match prop with + | NamedProp (key, named_prop) -> + begin + match named_prop with + | Field (t, { fld_polarity; fld_optional }) -> + fuse + [ + variance_ fld_polarity; + to_key key; + ( if fld_optional then + Atom "?" + else + Empty ); + Atom ":"; + pretty_space; + type_ ~depth t; + ] + | Method func -> fuse [to_key key; type_function ~depth ~sep:(Atom ":") func] + | Get t -> + group + [ + Atom "get"; + space; + to_key key; + Atom "("; + softline; + Atom ")"; + Atom ":"; + pretty_space; + type_ ~depth t; + ] + | Set t -> + group + [ + Atom "set"; + space; + to_key key; + wrap_and_indent (Atom "(", Atom ")") [type_ ~depth t]; + Atom ":"; + pretty_space; + type_ ~depth Void; + ] + end + | IndexProp { dict_polarity; dict_name; dict_key; dict_value } -> + fuse + [ + variance_ dict_polarity; + Atom "["; + begin + match dict_name with + | Some id -> fuse [identifier id; Atom ":"; pretty_space] + | None -> Empty + end; + type_ ~depth dict_key; + Atom "]"; + Atom ":"; + pretty_space; + type_ ~depth dict_value; + ] + | CallProp func -> fuse [type_function ~depth ~sep:(Atom ":") func] + | SpreadProp t -> fuse [Atom "..."; type_ ~depth t]) + and type_array ~depth { arr_readonly; arr_literal = _; arr_elt_t } = + fuse + [ + Atom + ( if arr_readonly then + "$ReadOnlyArray" + else + "Array" ); + Atom "<"; + type_ ~depth arr_elt_t; + Atom ">"; ] - - and type_array ~depth { arr_readonly; arr_elt_t } = - fuse [ - Atom (if arr_readonly then "$ReadOnlyArray" else "Array"); - Atom "<"; - type_ ~depth arr_elt_t; - Atom ">"; - ] - - and type_object ~depth ?(sep=(Atom ",")) { obj_exact; obj_props; obj_frozen = _ } = - let s_exact = if obj_exact then Atom "|" else Empty in + and type_object ~depth ?(sep = Atom ",") { obj_exact; obj_props; _ } = + let s_exact = + if obj_exact then + Atom "|" + else + Empty + in list ~wrap:(fuse [Atom "{"; s_exact], fuse [s_exact; Atom "}"]) ~sep ~trailing:false (counted_map (type_object_property ~depth) obj_props) - and type_union ~depth ts = - let prefix, ts = + let (prefix, ts) = if List.mem Null ts && List.mem Void ts then let ts = List.filter (fun t -> t <> Null && t <> Void) ts in - let ts = match ts with - | [] -> [Bot] - | _ -> ts in + let ts = + match ts with + | [] -> [Bot EmptyType] + | _ -> ts + in (Atom "?", ts) else (Empty, ts) in let wrap = - if depth > 1 && List.length ts > 1 - then Some (Atom "(", Atom ")") - else None - in - let elts = Core_list.intersperse ( - counted_map (type_with_parens ~depth) ts - ) ~sep:(Atom "|") + if depth > 1 && List.length ts > 1 then + Some (Atom "(", Atom ")") + else + None in + let elts = Core_list.intersperse (counted_map (type_with_parens ~depth) ts) ~sep:(Atom "|") in fuse [prefix; list ?wrap ~inline:(false, true) elts] - and type_intersection ~depth ts = - let wrap = if depth > 1 then Some (Atom "(", Atom ")") else None in - let elts = Core_list.intersperse ( - counted_map (type_with_parens ~depth) ts - ) ~sep:(Atom "&") + let wrap = + if depth > 1 then + Some (Atom "(", Atom ")") + else + None in + let elts = Core_list.intersperse (counted_map (type_with_parens ~depth) ts) ~sep:(Atom "&") in list ?wrap ~inline:(false, true) elts - and type_with_parens ~depth t = match t with | Fun _ | Union _ - | Inter _ -> wrap_in_parens (type_ ~depth t) + | Inter _ -> + wrap_in_parens (type_ ~depth t) | _ -> type_ ~depth t - - and type_class ~depth (Symbol (_, id)) structural typeParameters = fuse [ - Atom (if structural then "interface" else "class"); - space; - identifier id; - option (type_parameter ~depth) typeParameters; - ] - + and class_decl ~depth { name; _ } typeParameters = + fuse [Atom "class"; space; identifier name; option (type_parameter ~depth) typeParameters] + and interface_decl ~depth { name; _ } typeParameters = + fuse [Atom "interface"; space; identifier name; option (type_parameter ~depth) typeParameters] + and utility ~depth u = + let ctor = Ty.string_of_utility_ctor u in + let ts = Ty.types_of_utility u in + type_reference ~depth (identifier ctor) ts and type_parameter ~depth params = list ~wrap:(Atom "<", Atom ">") ~sep:(Atom ",") ~trailing:false (counted_map (type_param ~depth) params) - and type_param ~depth { tp_name; tp_bound; tp_polarity; tp_default } = - fuse [ - variance_ tp_polarity; - Atom tp_name; - option (type_annotation ~depth) tp_bound; - begin match tp_default with - | Some t -> fuse [ - pretty_space; - Atom "="; - pretty_space; - type_ ~depth t; - ] - | None -> Empty - end; - ] - - and type_annotation ~depth t = - fuse [ - Atom ":"; - pretty_space; - type_ ~depth t; - ] - + fuse + [ + variance_ tp_polarity; + Atom tp_name; + option (type_annotation ~depth) tp_bound; + begin + match tp_default with + | Some t -> fuse [pretty_space; Atom "="; pretty_space; type_ ~depth t] + | None -> Empty + end; + ] + and type_annotation ~depth t = fuse [Atom ":"; pretty_space; type_ ~depth t] and variance_ = function | Positive -> Atom "+" | Negative -> Atom "-" | Neutral -> Empty - in let env_ (i, layout) = - with_semicolon (fuse [ - Atom "type"; space; - Atom (varname i); - pretty_space; Atom "="; pretty_space; - layout - ]) + with_semicolon + (fuse [Atom "type"; space; Atom (varname i); pretty_space; Atom "="; pretty_space; layout]) in - (* Main call *) let type_layout = type_ ~depth:0 t in (* Run type_ first so that env_map has been populated *) - let env_layout = List.map env_ (IMap.bindings !env_map) in + let env_layout = Core_list.map ~f:env_ (IMap.bindings !env_map) in Layout.(join Newline (env_layout @ [type_layout])) (* Same as Compact_printer with the exception of: @@ -349,19 +393,22 @@ let print ~force_single_line ~source_maps node = (* this printer does not output locations *) | SourceLocation _ -> src | Newline -> - if force_single_line then Source.add_space 1 src - else Source.add_newline src + if force_single_line then + Source.add_space 1 src + else + Source.add_newline src | Indent node -> print_node src node | IfPretty (node, _) -> print_node src node | Concat nodes | Group nodes - | Sequence (_, nodes) -> List.fold_left print_node src nodes + | Sequence (_, nodes) -> + List.fold_left print_node src nodes | Atom s -> Source.add_string s src | Identifier (loc, s) -> Source.add_identifier loc s src | IfBreak (_, no_break) -> print_node src no_break | Empty -> src - in + in print_node (Source.create ~source_maps ()) node -let string_of_t ?(force_single_line=false) (ty: Ty.t) : string = - print ~force_single_line ~source_maps:None (type_ ty) |> Source.contents +let string_of_t ?(force_single_line = false) ?(with_comments = true) (ty : Ty.t) : string = + print ~force_single_line ~source_maps:None (type_ ~with_comments ty) |> Source.contents diff --git a/src/common/ty/ty_serializer.ml b/src/common/ty/ty_serializer.ml index 56bd80bae70..80e04753561 100644 --- a/src/common/ty/ty_serializer.ml +++ b/src/common/ty/ty_serializer.ml @@ -1,38 +1,38 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Core_result open Ty module T = Flow_ast.Type -let mapM f ts = all (List.map f ts) +let mapM f ts = all (Core_list.map ~f ts) let opt f t = match t with - | Some t -> f t >>| fun t -> Some t + | Some t -> f t >>| (fun t -> Some t) | _ -> return None -let identifier x = (Loc.none, x) +let id_from_string x = Flow_ast_utils.ident_of_source (Loc.none, x) + +let id_from_symbol x = + let { name; anonymous; _ } = x in + if anonymous then + Error (Utils_js.spf "Cannot output anonymous elements.") + else + Ok (id_from_string name) + +let mk_generic x targs = { T.Generic.id = T.Generic.Identifier.Unqualified x; targs } -let builtin x = - return (Loc.none, T.Generic { - T.Generic. - id = T.Generic.Identifier.Unqualified (identifier x); - targs = None; - }) +let mk_generic_type x targs = (Loc.none, T.Generic (mk_generic x targs)) -let generic_builtin x targs = - return (Loc.none, T.Generic { - T.Generic. - id = T.Generic.Identifier.Unqualified (identifier x); - targs = Some targs; - }) +let builtin_from_string ?targs x = + let x = id_from_string x in + mk_generic_type x targs let tvar (RVar _) = Error "Unsupported recursive variables." @@ -45,195 +45,206 @@ let rec type_ t = let just t = return (Loc.none, t) in match t with | TVar (v, _) -> tvar v - | Bound (Symbol (_, s)) -> builtin s - | Generic (x, _, ts) -> generic x ts - | Any -> just T.Any - | AnyObj -> builtin "Object" - | AnyFun -> builtin "Function" + | Bound (_, name) -> Ok (builtin_from_string name) + | Generic (x, _, ts) -> generic_type x ts + | Any _ -> just T.Any | Top -> just T.Mixed - | Bot -> just T.Empty + | Bot _ -> just T.Empty | Void -> just T.Void | Null -> just T.Null - | Num -> just T.Number - | Str -> just T.String - | Bool -> just T.Boolean + | Num (Some lit) -> + return + (builtin_from_string + "$TEMPORARY$number" + ~targs:(Loc.none, [(Loc.none, T.NumberLiteral (num_lit lit))])) + | Num None -> just T.Number + | Str (Some lit) -> + return + (builtin_from_string + "$TEMPORARY$string" + ~targs:(Loc.none, [(Loc.none, T.StringLiteral (str_lit lit))])) + | Str None -> just T.String + | Bool (Some lit) -> + return + (builtin_from_string + "$TEMPORARY$boolean" + ~targs:(Loc.none, [(Loc.none, T.BooleanLiteral lit)])) + | Bool None -> just T.Boolean | NumLit lit -> just (T.NumberLiteral (num_lit lit)) | StrLit lit -> just (T.StringLiteral (str_lit lit)) | BoolLit lit -> just (T.BooleanLiteral lit) - | Fun f -> function_ f >>| fun f -> (Loc.none, T.Function f) + | Fun f -> function_ f >>| (fun f -> (Loc.none, T.Function f)) | Obj o -> obj_ o | Arr a -> arr a - | Tup ts -> mapM type_ ts >>| fun ts -> (Loc.none, T.Tuple ts) - | Union (t0, t1, ts) as t -> union t (t0,t1,ts) - | Inter (t0, t1, ts) -> intersection (t0,t1,ts) - | Class (s, _, _) -> class_ s - + | Tup ts -> mapM type_ ts >>| (fun ts -> (Loc.none, T.Tuple ts)) + | Union (t0, t1, ts) as t -> union t (t0, t1, ts) + | Inter (t0, t1, ts) -> intersection (t0, t1, ts) + | ClassDecl (s, _) -> class_decl s + | Utility s -> utility s + | InlineInterface i -> inline_interface i + | InterfaceDecl _ | TypeOf _ | TypeAlias _ - | Exists | Mu _ - | Module _ - -> - Error (Utils_js.spf "Unsupported type constructor `%s`." - (Ty_debug.string_of_ctor t)) - -and generic (Symbol (_, id)) targs = - opt type_arguments targs >>| fun targs -> - (Loc.none, T.Generic { - T.Generic. - id = T.Generic.Identifier.Unqualified (identifier id); - targs - }) + | Module _ -> + Error (Utils_js.spf "Unsupported type constructor `%s`." (Ty_debug.string_of_ctor t)) + +and generic x targs = + id_from_symbol x >>= (fun id -> opt type_arguments targs >>| (fun targs -> mk_generic id targs)) + +and generic_type x targs = + id_from_symbol x + >>= (fun id -> opt type_arguments targs >>| (fun targs -> mk_generic_type id targs)) and union t (t0, t1, rest) = - let ts = bk_union t in + let ts = bk_union t |> Nel.to_list in if List.mem Null ts && List.mem Void ts then - let ts = List.filter (fun t -> not (t = Null || t = Void)) ts in - type_ (mk_union ts) >>| fun ts -> - (Loc.none, T.Nullable ts) + match List.filter (fun t -> not (t = Null || t = Void)) ts with + | [] -> return (Loc.none, T.Union ((Loc.none, T.Null), (Loc.none, T.Void), [])) + | hd :: tl -> type_ (mk_union (hd, tl)) >>| (fun ts -> (Loc.none, T.Nullable ts)) else - type_ t0 >>= fun t0 -> - type_ t1 >>= fun t1 -> - mapM type_ rest >>| fun rest -> - (Loc.none, T.Union (t0, t1, rest)) + type_ t0 + >>= fun t0 -> + type_ t1 >>= (fun t1 -> mapM type_ rest >>| (fun rest -> (Loc.none, T.Union (t0, t1, rest)))) and intersection (t0, t1, rest) = - type_ t0 >>= fun t0 -> - type_ t1 >>= fun t1 -> - mapM type_ rest >>| fun rest -> - (Loc.none, T.Intersection (t0, t1, rest)) + type_ t0 + >>= fun t0 -> + type_ t1 + >>= (fun t1 -> mapM type_ rest >>| (fun rest -> (Loc.none, T.Intersection (t0, t1, rest)))) and function_ f = - type_ f.fun_return >>= fun return -> - fun_params f.fun_params f.fun_rest_param >>= fun params -> - opt type_params f.fun_type_params >>| fun tparams -> { - T.Function. - params; - return; - tparams; - } + type_ f.fun_return + >>= fun return -> + fun_params f.fun_params f.fun_rest_param + >>= fun params -> + opt type_params f.fun_type_params >>| (fun tparams -> { T.Function.params; return; tparams }) and fun_params params rest_param = - mapM fun_param params >>= fun params -> - opt fun_rest_param rest_param >>| fun rest -> - (Loc.none, { - T.Function.Params. - params; - rest; - }) - -and fun_param (name, t, {prm_optional}) = - type_ t >>| fun annot -> - (Loc.none, { - T.Function.Param. - name = Option.map ~f:identifier name; - annot; - optional = prm_optional; - }) + mapM fun_param params + >>= fun params -> + opt fun_rest_param rest_param >>| (fun rest -> (Loc.none, { T.Function.Params.params; rest })) + +and fun_param (name, t, { prm_optional }) = + let name = Option.map ~f:id_from_string name in + type_ t >>| (fun annot -> (Loc.none, { T.Function.Param.name; annot; optional = prm_optional })) and fun_rest_param (name, t) = - fun_param (name, t, {prm_optional = false}) >>| fun argument -> - Loc.none, { - T.Function.RestParam. - argument; - } + fun_param (name, t, { prm_optional = false }) + >>| (fun argument -> (Loc.none, { T.Function.RestParam.argument })) and obj_ o = - mapM obj_prop o.obj_props >>| fun properties -> - (Loc.none, T.Object { - T.Object. - exact = o.obj_exact; - properties; - }) + mapM obj_prop o.obj_props + >>| fun properties -> + (Loc.none, T.Object { T.Object.exact = o.obj_exact; inexact = false; properties }) and obj_prop = function - | NamedProp (x, p) -> - obj_named_prop x p >>| fun p -> T.Object.Property (Loc.none, p) - | IndexProp d -> - obj_index_prop d >>| fun p -> T.Object.Indexer (Loc.none, p) - | CallProp f -> - obj_call_prop f >>| fun p -> T.Object.CallProperty (Loc.none, p) - -and obj_named_prop x = - let key = Ast.Expression.Object.Property.Identifier (Loc.none, x) in - function - | Field (t, fld) -> type_ t >>| fun t -> { - T.Object.Property.key; - value = T.Object.Property.Init t; - optional = fld.fld_optional; - static = false; - proto = false; - _method = false; - variance = variance_ fld.fld_polarity; - } - | Method f -> function_ f >>| fun fun_t -> { - T.Object.Property.key; - value = T.Object.Property.Init (Loc.none, T.Function fun_t); - optional = false; - static = false; - proto = false; - _method = true; - variance = None; - } - | Get t -> getter t >>| fun t -> { - T.Object.Property.key; - value = T.Object.Property.Get (Loc.none, t); - optional = false; - static = false; - proto = false; - _method = false; - variance = None; - } - | Set t -> setter t >>| fun t -> { - T.Object.Property.key; - value = T.Object.Property.Set (Loc.none, t); - optional = false; - static = false; - proto = false; - _method = false; - variance = None; - } + | NamedProp (x, p) -> obj_named_prop x p >>| (fun p -> T.Object.Property (Loc.none, p)) + | IndexProp d -> obj_index_prop d >>| (fun p -> T.Object.Indexer (Loc.none, p)) + | CallProp f -> obj_call_prop f >>| (fun p -> T.Object.CallProperty (Loc.none, p)) + | SpreadProp t -> obj_spread_prop t >>| (fun p -> T.Object.SpreadProperty p) + +and obj_named_prop = + let to_key x = + if Ty_printer.property_key_quotes_needed x then + let raw = x in + let value = Ast.Literal.String raw in + Ast.Expression.Object.Property.Literal + (Loc.none, { Ast.Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt () }) + else + Ast.Expression.Object.Property.Identifier (id_from_string x) + in + fun x prop -> + match prop with + | Field (t, fld) -> + type_ t + >>| fun t -> + { + T.Object.Property.key = to_key x; + value = T.Object.Property.Init t; + optional = fld.fld_optional; + static = false; + proto = false; + _method = false; + variance = variance_ fld.fld_polarity; + } + | Method f -> + function_ f + >>| fun fun_t -> + { + T.Object.Property.key = to_key x; + value = T.Object.Property.Init (Loc.none, T.Function fun_t); + optional = false; + static = false; + proto = false; + _method = true; + variance = None; + } + | Get t -> + getter t + >>| fun t -> + { + T.Object.Property.key = to_key x; + value = T.Object.Property.Get (Loc.none, t); + optional = false; + static = false; + proto = false; + _method = false; + variance = None; + } + | Set t -> + setter t + >>| fun t -> + { + T.Object.Property.key = to_key x; + value = T.Object.Property.Set (Loc.none, t); + optional = false; + static = false; + proto = false; + _method = false; + variance = None; + } and obj_index_prop d = - type_ d.dict_key >>= fun key -> - type_ d.dict_value >>| fun value -> { - T.Object.Indexer. - id = Option.map ~f:identifier d.dict_name; - key; - value; - static = false; - variance = variance_ d.dict_polarity; - } + let id = Option.map ~f:id_from_string d.dict_name in + type_ d.dict_key + >>= fun key -> + type_ d.dict_value + >>| fun value -> + { T.Object.Indexer.id; key; value; static = false; variance = variance_ d.dict_polarity } and obj_call_prop f = - function_ f >>| fun value -> { - T.Object.CallProperty. - value = (Loc.none, value); - static = false; -} + function_ f >>| (fun value -> { T.Object.CallProperty.value = (Loc.none, value); static = false }) + +and obj_spread_prop t = type_ t >>| (fun t -> (Loc.none, { T.Object.SpreadProperty.argument = t })) -and arr { arr_readonly; arr_elt_t } = - type_ arr_elt_t >>= fun t -> - if arr_readonly - then generic_builtin "$ReadOnlyArray" (Loc.none, [t]) - else return (Loc.none, T.Array t) +and arr { arr_readonly; arr_elt_t; _ } = + type_ arr_elt_t + >>| fun t -> + if arr_readonly then + builtin_from_string "$ReadOnlyArray" ~targs:(Loc.none, [t]) + else + (Loc.none, T.Array t) -and type_params ts = - mapM type_param ts >>| fun ts -> (Loc.none, ts) +and type_params ts = mapM type_param ts >>| (fun ts -> (Loc.none, ts)) and type_param tp = - opt annotation tp.tp_bound >>= fun bound -> - opt type_ tp.tp_default >>| fun default -> - (Loc.none, { - T.ParameterDeclaration.TypeParam. - name = Loc.none, tp.tp_name; - bound; - variance = variance_ tp.tp_polarity; - default; - }) - -and type_arguments ts = - mapM type_ ts >>| fun ts -> (Loc.none, ts) + opt annotation tp.tp_bound + >>= fun bound -> + opt type_ tp.tp_default + >>| fun default -> + ( Loc.none, + { + T.ParameterDeclaration.TypeParam.name = id_from_string tp.tp_name; + bound = + (match bound with + | Some t -> T.Available t + | None -> T.Missing Loc.none); + variance = variance_ tp.tp_polarity; + default; + } ) + +and type_arguments ts = mapM type_ ts >>| (fun ts -> (Loc.none, ts)) and str_lit lit = let quote = Js_layout_generator.better_quote lit in @@ -241,32 +252,44 @@ and str_lit lit = let raw = quote ^ raw_lit ^ quote in { Ast.StringLiteral.value = lit; raw } -and num_lit lit = { - Ast.NumberLiteral. - value = (try Pervasives.float_of_string lit with Failure _ -> 0.); - raw = lit; -} - -and getter t = function_ { - fun_params = []; - fun_rest_param = None; - fun_return = t; - fun_type_params = None; -} - -and setter t = function_ { - fun_params = [(None, t, {prm_optional = false})]; - fun_rest_param = None; - fun_return = Void; - fun_type_params = None; -} - -and class_ t = - generic t None >>| fun t -> - (Loc.none, T.Generic { - T.Generic. - id = T.Generic.Identifier.Unqualified (identifier "Class"); - targs = Some (Loc.none, [t]) - }) - -and annotation t = type_ t >>| fun t -> (Loc.none, t) +and num_lit lit = + { + Ast.NumberLiteral.value = (try Pervasives.float_of_string lit with Failure _ -> 0.); + raw = lit; + } + +and getter t = + function_ { fun_params = []; fun_rest_param = None; fun_return = t; fun_type_params = None } + +and setter t = + function_ + { + fun_params = [(None, t, { prm_optional = false })]; + fun_rest_param = None; + fun_return = Void; + fun_type_params = None; + } + +and class_decl name = generic_type name None >>| (fun name -> (Loc.none, T.Typeof name)) + +and interface_extends e = + let (x, _, ts) = e in + generic x ts >>| (fun gen -> (Loc.none, gen)) + +and inline_interface i = + let { if_extends; if_body } = i in + let { obj_props; _ } = if_body in + mapM interface_extends if_extends + >>= fun extends -> + mapM obj_prop obj_props + >>| fun properties -> + let body = (Loc.none, { T.Object.exact = false; inexact = false; properties }) in + (Loc.none, T.Interface { T.Interface.body; extends }) + +and utility u = + let ctor = Ty.string_of_utility_ctor u in + let ts = Ty.types_of_utility u in + let id = id_from_string ctor in + opt type_arguments ts >>| (fun ts -> mk_generic_type id ts) + +and annotation t = type_ t >>| (fun t -> (Loc.none, t)) diff --git a/src/common/ty/ty_serializer.mli b/src/common/ty/ty_serializer.mli index d6d255237ae..e7ba452ef97 100644 --- a/src/common/ty/ty_serializer.mli +++ b/src/common/ty/ty_serializer.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/common/ty/ty_symbol.ml b/src/common/ty/ty_symbol.ml index a0377b30717..c9524d8c024 100644 --- a/src/common/ty/ty_symbol.ml +++ b/src/common/ty/ty_symbol.ml @@ -1,20 +1,32 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -type provenance = - | Local of Loc.t (* Defined locally *) - | Imported of Loc.t (* Defined remotely, imported to file *) - | Remote of Loc.t (* Defined remotely, NOT imported to file *) - | Library of Loc.t (* Defined in library *) - | Builtin +type import_mode = + | ValueMode + | TypeMode + | TypeofMode + +and imported_ident = + (ALoc.t * string * import_mode[@printer (fun fmt (_, id, _) -> fprintf fmt "%s" id)]) -type identifier = string +and remote_info = { imported_as: imported_ident option } + +and provenance = + | Local + | Remote of remote_info + | Library + | Builtin -type symbol = Symbol of (provenance * identifier) [@@unboxed] +and symbol = { + provenance: provenance; + def_loc: ALoc.t; [@printer (fun fmt loc -> fprintf fmt "%s" (ALoc.to_string_no_source loc))] + name: string; + anonymous: bool; +} +[@@deriving show] -let builtin_symbol name = - Symbol (Builtin, name) +let builtin_symbol name = { provenance = Builtin; def_loc = ALoc.none; name; anonymous = false } diff --git a/src/common/ty/ty_utils.ml b/src/common/ty/ty_utils.ml index c0a0d76c146..66a410419cd 100644 --- a/src/common/ty/ty_utils.ml +++ b/src/common/ty/ty_utils.ml @@ -1,88 +1,250 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module FreeVars = struct + type env = { + is_toplevel: bool; + skip: ISet.t; + } -(* Set and map based on type variables *) + let searcher = + object (self) + inherit [_] Ty.reduce_ty as super -module TVarSet = struct - include ISet - let append = union + method zero = ISet.empty + + method plus = ISet.union + + method! on_t env t = + Ty.( + match t with + | TVar (RVar i, _) when not (ISet.mem i env.skip) -> ISet.singleton i + | TypeAlias { ta_tparams; ta_type = Some t_body; _ } -> + let env' = { env with is_toplevel = false } in + let acc = self#on_option (self#on_list self#on_type_param) env' ta_tparams in + (* If the type alias is the top-level constructor, then the body of the alias + * will be useful and so we descend into that type expression and collect + * variables. Otherwise we avoid collecting variables from the body as it is + * typically not be exposed through a type query. *) + if env.is_toplevel then + self#plus acc (self#on_t env' t_body) + else + acc + | Mu (v, t) -> + let env = { env with skip = ISet.add v env.skip } in + super#on_t env t + | t -> super#on_t env t) + end + + (* Computes the set of variables appearing free in the input. *) + let of_type ~is_toplevel t = + let env = { is_toplevel; skip = ISet.empty } in + searcher#on_t env t end -(* Free variables - * - * Decide if a type variable appears free inside a type. This is useful for: - * - * - Deciding well-formedness: a type variable should not appear free in a - * top-level type. - * - * - Computing recursive types: we decide if a type is recursive, we will need - * to know if it appears free in its expansion. (More can be found in the type - * normalizer module.) - *) +let tvar_appears_in_type ~is_toplevel v t = + let (Ty.RVar v) = v in + ISet.mem v (FreeVars.of_type ~is_toplevel t) + +module Size = struct + exception SizeCutOff + + type bounded_int = + | Exactly of int + | GreaterThan of int + + let _size_to_string = function + | Exactly x -> Utils_js.spf "%d" x + | GreaterThan x -> Utils_js.spf "(Greater than %d)" x + + let of_type = + Ty.( + let size = ref 0 in + let o = + object + inherit [_] iter_ty as super + + method! on_t max (t : Ty.t) = + size := !size + 1; + if !size > max then raise SizeCutOff; + super#on_t max t + end + in + fun ~max t -> + size := 0; + match o#on_t max t with + | exception SizeCutOff -> GreaterThan max + | () -> Exactly !size) +end + +let size_of_type ?(max = 10000) t = + match Size.of_type ~max t with + | Size.GreaterThan _ -> None + | Size.Exactly s -> Some s + +let symbols_of_type = + Ty.( + let o = + object (_self) + inherit [_] reduce_ty as _super + + method zero = [] + + method plus = List.rev_append + + method! on_symbol _env s = [s] + end + in + (fun t -> o#on_t () t)) + +module Simplify = struct + type config = { + is_bot: Ty.t -> bool; + is_top: Ty.t -> bool; + compare: Ty.t -> Ty.t -> int; + sort: bool; + } + + (* When merge_kinds is set to true then all kinds of Any (resp. Bot) types + * are considered equivalent when comparing types. Specifically for the Bot type + * we implement a predicate 'is_bot' that determines when the type should be + * considered the empty element. *) + let mk_config ~merge_kinds ~sort = + let is_top = function + | Ty.Top -> true + | _ -> false + in + let is_bot = + if merge_kinds then + function + | Ty.Bot _ -> true + | _ -> false + else + let is_bot_upper_kind = function + | Ty.NoUpper -> true + | Ty.SomeKnownUpper _ + | Ty.SomeUnknownUpper _ -> + false + in + let is_bot_kind = function + | Ty.EmptyType -> true + | Ty.EmptyTypeDestructorTriggerT _ -> false + | Ty.EmptyMatchingPropT -> false + | Ty.NoLowerWithUpper kind -> is_bot_upper_kind kind + in + function + | Ty.Bot kind -> is_bot_kind kind + | _ -> false + in + let compare = + let comparator = + if merge_kinds then + object + inherit [unit] Ty.comparator_ty + + (* All Bot kinds are equivalent *) + method! private on_bot_kind () _ _ = () + + method! private on_any_kind () _ _ = () + end + else + new Ty.comparator_ty + in + comparator#compare () + in + { is_top; is_bot; compare; sort } + + (* Simplify union/intersection types, by + * A. removing equal nodes from union and intersection types, and + * B. removing the neutral element for union (resp. intersection) types, + * which is the bottom (resp. top) type. + * + * WARNING: This visitor will do a deep type traversal. + *) + let run = + let rec simplify_list ~is_zero ~is_one acc = function + | [] -> acc + | t :: ts -> + if is_zero t then + [t] + else if is_one t then + simplify_list ~is_zero ~is_one acc ts + else + simplify_list ~is_zero ~is_one (t :: acc) ts + in + let simplify_nel ~is_zero ~is_one (t, ts) = + match simplify_list [] ~is_zero ~is_one (t :: ts) with + | [] -> (t, []) + | t :: ts -> (t, ts) + in + let mapper = + object (self) + inherit [_] Ty.endo_ty + + method private on_nel f env nel = + let (hd, tl) = nel in + let hd' = f env hd in + let tl' = self#on_list f env tl in + if hd == hd' && tl == tl' then + nel + else + (hd', tl') + + method private simplify config ~break ~is_zero ~is_one ~make ~default ts0 = + let { compare; sort; _ } = config in + let ts1 = self#on_nel self#on_t config ts0 in + let len1 = Nel.length ts1 in + let ts2 = Nel.map_concat break ts1 in + let len2 = Nel.length ts2 in + let (ts2, len2) = + if len1 <> len2 then + (ts2, len2) + else + (ts1, len1) + in + let ts3 = ts2 |> simplify_nel ~is_zero ~is_one |> Nel.dedup ~compare in + (* Note we are currently giving up on pointer equality when we are sorting types *) + let ts3 = + if sort || len2 <> Nel.length ts3 then + ts3 + else + ts2 + in + if ts0 == ts3 then + default + else + make ts3 + + method! on_Union config u t0 t1 ts = + let { is_top; is_bot; _ } = config in + self#simplify + ~break:Ty.bk_union + ~make:Ty.mk_union + ~is_zero:is_top + ~is_one:is_bot + ~default:u + config + (t0, t1 :: ts) + + method! on_Inter config i t0 t1 ts = + let { is_top; is_bot; _ } = config in + self#simplify + ~break:Ty.bk_inter + ~make:Ty.mk_inter + ~is_zero:is_bot + ~is_one:is_top + ~default:i + config + (t0, t1 :: ts) + end + in + fun ~merge_kinds ~sort -> + let config = mk_config ~merge_kinds ~sort in + mapper#on_t config +end -type t = { - is_top: bool; - skip: TVarSet.t; -} - -(* Computes the set of variables appearing free in the input. *) -let free_vars_of_t : is_top:bool -> Ty.t -> TVarSet.t = - let open Ty in - let o = object (self) - inherit [_] reduce_ty as super - method zero = TVarSet.empty - method plus = TVarSet.union - - method! on_t env t = - match t with - | TVar (RVar i, _) when not (TVarSet.mem i env.skip) -> - TVarSet.singleton i - | TypeAlias { ta_tparams; ta_type = Some t_body; _ } -> - let env' = { env with is_top = false } in - let acc = self#on_option (self#on_list self#on_type_param) env' ta_tparams in - (* If the type alias is the top-level constructor, then the body of the alias - * will be useful and so we descend into that type expression and collect - * variables. Otherwise we avoid collecting variables from the body as it is - * typically not be exposed through a type query. *) - if env.is_top - then self#plus acc (self#on_t env' t_body) - else acc - | Mu (v, t) -> - let env = { env with skip = TVarSet.add v env.skip } in - super#on_t env t - | t -> - super#on_t env t - end in - fun ~is_top t -> - o#on_t { is_top; skip = TVarSet.empty } t - -(* The reason we require the is_top parameter is to determine if the TypeAlias - * body will be walked over. Typically the body is only useful when TypeAlias - * appears as the top-level constructor, and is ignored otherwise. *) -let appears_in_t ~is_top v t = - TVarSet.mem v (free_vars_of_t ~is_top t) - -let size_of_t : Ty.t -> int = - let open Ty in - let o = object (_self) - inherit [_] reduce_ty as super - method zero = 0 - method plus a b = a + b - method! on_t env (t: Ty.t) = 1 + super#on_t env t - end in - fun t -> o#on_t () t - -let symbols_of_t : Ty.t -> Ty_symbol.symbol list = - let open Ty in - let o = object (_self) - inherit [_] reduce_ty as _super - method zero = [] - method plus = List.rev_append - method! on_symbol _env s = [s] - end in - fun t -> o#on_t () t +let simplify_type ~merge_kinds ?(sort = false) = Simplify.run ~merge_kinds ~sort diff --git a/src/common/ty/ty_utils.mli b/src/common/ty/ty_utils.mli new file mode 100644 index 00000000000..0343a00e9c5 --- /dev/null +++ b/src/common/ty/ty_utils.mli @@ -0,0 +1,30 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* Decide if a type variable appears free inside a type. This is useful for: + * + * - Deciding well-formedness: a type variable should not appear free in a + * top-level type. + * + * - Computing recursive types: we decide if a type is recursive, we will need + * to know if it appears free in its expansion. (More can be found in the type + * normalizer module.) + * + * The reason we require the is_toplevel parameter is to determine if the TypeAlias + * body will be walked over. Typically the body is only useful when TypeAlias + * appears as the top-level constructor, and is ignored otherwise. + *) +val tvar_appears_in_type : is_toplevel:bool -> Ty.tvar -> Ty.t -> bool + +(* Returns the number of nodes in a type. Will return None if the number of nodes + * exceeds the max parameter. + *) +val size_of_type : ?max:int -> Ty.t -> int option + +val simplify_type : merge_kinds:bool -> ?sort:bool -> Ty.t -> Ty.t + +val symbols_of_type : Ty.t -> Ty_symbol.symbol list diff --git a/src/common/utils/__tests__/common_utils_tests.ml b/src/common/utils/__tests__/common_utils_tests.ml new file mode 100644 index 00000000000..4df72508d6f --- /dev/null +++ b/src/common/utils/__tests__/common_utils_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "utils" >::: [Nel_test.tests; ResizableArray_test.tests; UnionFind_test.tests] + +let () = run_test_tt_main tests diff --git a/src/common/utils/__tests__/nel_test.ml b/src/common/utils/__tests__/nel_test.ml index 43daa2d1284..b75333a8dc6 100644 --- a/src/common/utils/__tests__/nel_test.ml +++ b/src/common/utils/__tests__/nel_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,93 +8,76 @@ open OUnit2 (* Unsafe *) -let of_list lst = match Nel.of_list lst with +let of_list lst = + match Nel.of_list lst with | Some nel -> nel | None -> raise Not_found -let assert_identical ~ctxt x y = - assert_equal ~ctxt (x == y) true +let assert_identical ~ctxt x y = assert_equal ~ctxt (x == y) true let id x = x (* [6; 4; 2] *) let lst = Nel.one 2 |> Nel.cons 4 |> Nel.cons 6 + (* [3; 5] *) let lst2 = Nel.one 5 |> Nel.cons 3 -let tests = "nel" >::: [ - "to_list" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.to_list lst) [6; 4; 2] - end; - - "mem" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.mem 4 lst) true; - assert_equal ~ctxt (Nel.mem 5 lst) false - end; - - "exists" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.exists (( = ) 4) lst) true; - assert_equal ~ctxt (Nel.exists (( = ) 5) lst) false - end; - - "iter" >:: begin fun ctxt -> - let x = ref 0 in - Nel.iter (fun y -> x := !x + y) lst; - assert_equal ~ctxt !x 12 - end; - - "map" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.map (( * ) 2) lst |> Nel.to_list) [12; 8; 4] - end; - - "ident_map" >:: begin fun ctxt -> - assert_identical ~ctxt (Nel.ident_map id lst) lst; - assert_equal ~ctxt (Nel.ident_map (( * ) 2) lst |> Nel.to_list) [12; 8; 4] - end; - - "concat" >:: begin fun ctxt -> - let x = Nel.one lst2 |> Nel.cons lst in - assert_equal ~ctxt (Nel.concat x |> Nel.to_list) [6; 4; 2; 3; 5] - end; - - "map_concat" >:: begin fun ctxt -> - let f x = Nel.one (x - 1) |> Nel.cons x in - assert_equal ~ctxt (Nel.map_concat f lst |> Nel.to_list) [6; 5; 4; 3; 2; 1] - end; - - "rev" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.rev lst |> Nel.to_list) [2; 4; 6] - end; - - "rev_map" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.rev_map (( + ) 1) lst |> Nel.to_list) [3; 5; 7] - end; - - "rev_append" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.rev_append lst lst2 |> Nel.to_list) [2; 4; 6; 3; 5] - end; - - "length" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.length lst) 3 - end; - - "fold_left" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.fold_left ( * ) 1 lst) 48 - end; - - "hd" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.hd lst) 6 - end; - - "nth" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.nth lst 0) 6; - assert_equal ~ctxt (Nel.nth lst 1) 4; - assert_equal ~ctxt (Nel.nth lst 2) 2 - end; - - "cat_maybes" >:: begin fun ctxt -> - assert_equal ~ctxt (Nel.cat_maybes (of_list [None])) None; - assert_equal ~ctxt (Nel.cat_maybes (of_list [Some 1; None])) (Some (of_list [1])); - assert_equal ~ctxt (Nel.cat_maybes (of_list [Some 0; None; Some 1])) (Some (of_list [0; 1])) - end; -] +let tests = + "nel" + >::: [ + ("to_list" >:: (fun ctxt -> assert_equal ~ctxt (Nel.to_list lst) [6; 4; 2])); + ( "mem" + >:: fun ctxt -> + assert_equal ~ctxt (Nel.mem 4 lst) true; + assert_equal ~ctxt (Nel.mem 5 lst) false ); + ( "exists" + >:: fun ctxt -> + assert_equal ~ctxt (Nel.exists (( = ) 4) lst) true; + assert_equal ~ctxt (Nel.exists (( = ) 5) lst) false ); + ( "iter" + >:: fun ctxt -> + let x = ref 0 in + Nel.iter (fun y -> x := !x + y) lst; + assert_equal ~ctxt !x 12 ); + ( "map" + >:: (fun ctxt -> assert_equal ~ctxt (Nel.map (( * ) 2) lst |> Nel.to_list) [12; 8; 4]) ); + ( "ident_map" + >:: fun ctxt -> + assert_identical ~ctxt (Nel.ident_map id lst) lst; + assert_equal ~ctxt (Nel.ident_map (( * ) 2) lst |> Nel.to_list) [12; 8; 4] ); + ( "concat" + >:: fun ctxt -> + let x = Nel.one lst2 |> Nel.cons lst in + assert_equal ~ctxt (Nel.concat x |> Nel.to_list) [6; 4; 2; 3; 5] ); + ( "map_concat" + >:: fun ctxt -> + let f x = Nel.one (x - 1) |> Nel.cons x in + assert_equal ~ctxt (Nel.map_concat f lst |> Nel.to_list) [6; 5; 4; 3; 2; 1] ); + ("rev" >:: (fun ctxt -> assert_equal ~ctxt (Nel.rev lst |> Nel.to_list) [2; 4; 6])); + ( "rev_map" + >:: (fun ctxt -> assert_equal ~ctxt (Nel.rev_map (( + ) 1) lst |> Nel.to_list) [3; 5; 7]) + ); + ( "rev_append" + >:: fun ctxt -> + assert_equal ~ctxt (Nel.rev_append lst lst2 |> Nel.to_list) [2; 4; 6; 3; 5] ); + ( "append" + >:: (fun ctxt -> assert_equal ~ctxt (Nel.append lst lst2 |> Nel.to_list) [6; 4; 2; 3; 5]) + ); + ("length" >:: (fun ctxt -> assert_equal ~ctxt (Nel.length lst) 3)); + ("fold_left" >:: (fun ctxt -> assert_equal ~ctxt (Nel.fold_left ( * ) 1 lst) 48)); + ("hd" >:: (fun ctxt -> assert_equal ~ctxt (Nel.hd lst) 6)); + ( "nth" + >:: fun ctxt -> + assert_equal ~ctxt (Nel.nth lst 0) 6; + assert_equal ~ctxt (Nel.nth lst 1) 4; + assert_equal ~ctxt (Nel.nth lst 2) 2 ); + ( "cat_maybes" + >:: fun ctxt -> + assert_equal ~ctxt (Nel.cat_maybes (of_list [None])) None; + assert_equal ~ctxt (Nel.cat_maybes (of_list [Some 1; None])) (Some (of_list [1])); + assert_equal + ~ctxt + (Nel.cat_maybes (of_list [Some 0; None; Some 1])) + (Some (of_list [0; 1])) ); + ] diff --git a/src/common/utils/__tests__/resizableArray_test.ml b/src/common/utils/__tests__/resizableArray_test.ml new file mode 100644 index 00000000000..ccd2b54b3ac --- /dev/null +++ b/src/common/utils/__tests__/resizableArray_test.ml @@ -0,0 +1,129 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = + "resizable_array" + >::: [ + ( "basic" + >:: fun ctxt -> + let arr = ResizableArray.make 1 in + ResizableArray.push arr "foo"; + ResizableArray.push arr "bar"; + assert_equal ~ctxt (ResizableArray.get arr 0) (Some "foo"); + assert_equal ~ctxt (ResizableArray.get arr 1) (Some "bar") ); + ( "zero_initial_size" + >:: fun ctxt -> + let arr = ResizableArray.make 0 in + ResizableArray.push arr "foo"; + assert_equal ~ctxt (ResizableArray.get arr 0) (Some "foo"); + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 1 ); + ( "size" + >:: fun ctxt -> + let arr = ResizableArray.make 4 in + assert_equal ~ctxt (ResizableArray.size arr) 0; + ResizableArray.push arr "foo"; + assert_equal ~ctxt (ResizableArray.size arr) 1 ); + ( "set" + >:: fun ctxt -> + let arr = ResizableArray.make 4 in + ResizableArray.push arr "foo0"; + ResizableArray.push arr "foo1"; + ResizableArray.set arr 0 "bar0"; + ResizableArray.set arr 1 "bar1"; + assert_equal ~ctxt (ResizableArray.get arr 0) (Some "bar0"); + assert_equal ~ctxt (ResizableArray.get arr 1) (Some "bar1") ); + ( "out_of_bounds_get" + >:: fun ctxt -> + let arr = ResizableArray.make 4 in + assert_equal ~ctxt (ResizableArray.get arr 0) None; + assert_equal ~ctxt (ResizableArray.get arr (-1)) None; + assert_equal ~ctxt (ResizableArray.get arr (-10)) None; + assert_equal ~ctxt (ResizableArray.get arr 3) None; + assert_equal ~ctxt (ResizableArray.get arr 4) None; + assert_equal ~ctxt (ResizableArray.get arr 8) None ); + ( "out_of_bounds_set" + >:: fun _ctxt -> + let arr = ResizableArray.make 4 in + assert_raises (ResizableArray.Out_of_bounds_set "Index: 0, size: 0") (fun () -> + ResizableArray.set arr 0 "foo") ); + ( "expand" + >:: fun ctxt -> + let arr = ResizableArray.make 1 in + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 1; + ResizableArray.push arr "foo0"; + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 1; + ResizableArray.push arr "foo1"; + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 2; + ResizableArray.push arr "foo2"; + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 4; + ResizableArray.push arr "foo3"; + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 4; + assert_equal ~ctxt (ResizableArray.get arr 0) (Some "foo0"); + assert_equal ~ctxt (ResizableArray.get arr 1) (Some "foo1"); + assert_equal ~ctxt (ResizableArray.get arr 2) (Some "foo2"); + assert_equal ~ctxt (ResizableArray.get arr 3) (Some "foo3") ); + ( "shrink" + >:: fun ctxt -> + let arr = ResizableArray.make 8 in + ResizableArray.push arr "foo"; + ResizableArray.push arr "bar"; + + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 8; + assert_equal ~ctxt (ResizableArray.size arr) 2; + + ResizableArray.shrink arr; + + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 2; + assert_equal ~ctxt (ResizableArray.size arr) 2; + + assert_equal ~ctxt (ResizableArray.get arr 0) (Some "foo"); + assert_equal ~ctxt (ResizableArray.get arr 1) (Some "bar") ); + ( "shrink_noop" + >:: fun ctxt -> + let arr = ResizableArray.make 2 in + ResizableArray.push arr "foo"; + ResizableArray.push arr "bar"; + + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 2; + assert_equal ~ctxt (ResizableArray.size arr) 2; + + ResizableArray.shrink arr; + + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 2; + assert_equal ~ctxt (ResizableArray.size arr) 2; + + assert_equal ~ctxt (ResizableArray.get arr 0) (Some "foo"); + assert_equal ~ctxt (ResizableArray.get arr 1) (Some "bar") ); + ( "to_hashtbl" + >:: fun ctxt -> + let arr = ResizableArray.make 2 in + ResizableArray.push arr "foo"; + ResizableArray.push arr "bar"; + + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 2; + assert_equal ~ctxt (ResizableArray.size arr) 2; + + let tbl = ResizableArray.to_hashtbl arr in + assert_equal ~ctxt (Hashtbl.find tbl "foo") 0; + assert_equal ~ctxt (Hashtbl.find tbl "bar") 1; + assert_equal ~ctxt (Hashtbl.length tbl) 2 ); + ( "to_hashtbl_bigger_array" + >:: fun ctxt -> + let arr = ResizableArray.make 8 in + ResizableArray.push arr "foo"; + ResizableArray.push arr "bar"; + + assert_equal ~ctxt (ResizableArray.underlying_array_size_do_not_use arr) 8; + assert_equal ~ctxt (ResizableArray.size arr) 2; + + let tbl = ResizableArray.to_hashtbl arr in + assert_equal ~ctxt (Hashtbl.find tbl "foo") 0; + assert_equal ~ctxt (Hashtbl.find tbl "bar") 1; + assert_equal ~ctxt (Hashtbl.length tbl) 2 ); + ] diff --git a/src/common/utils/__tests__/test.ml b/src/common/utils/__tests__/test.ml deleted file mode 100644 index 2fa59bc50f0..00000000000 --- a/src/common/utils/__tests__/test.ml +++ /dev/null @@ -1,15 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open OUnit2 - -let tests = "utils" >::: [ - Nel_test.tests; - UnionFind_test.tests; -] - -let () = run_test_tt_main tests diff --git a/src/common/utils/__tests__/unionFind_test.ml b/src/common/utils/__tests__/unionFind_test.ml index 61208cf939f..9841d0ea498 100644 --- a/src/common/utils/__tests__/unionFind_test.ml +++ b/src/common/utils/__tests__/unionFind_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,8 +11,7 @@ let assert_same_elements ~ctxt lst1 lst2 = let sort = List.fast_sort (fun x y -> x - y) in assert_equal ~ctxt (sort lst1) (sort lst2) -let assert_one_of ~ctxt item lst = - assert_equal ~ctxt (List.mem item lst) true +let assert_one_of ~ctxt item lst = assert_equal ~ctxt (List.mem item lst) true let assert_raises ~ctxt ~f expected_exception = let exn = ref None in @@ -29,98 +28,90 @@ let makeUnionFind () = UnionFind.union x 1 10; x -let tests = "union_find" >::: [ - "add" >:: begin fun ctxt -> - let x = UnionFind.make () in - UnionFind.add x 3; - UnionFind.add x 5; - assert_equal ~ctxt (UnionFind.members x 3) [3]; - assert_equal ~ctxt (UnionFind.members x 5) [5] - end; - - "union_new_values" >:: begin fun ctxt -> - let x = UnionFind.make () in - UnionFind.union x 1 3; - assert_same_elements ~ctxt (UnionFind.members x 1) [1; 3] - end; - - "union_duplicate" >:: begin fun ctxt -> - let x = UnionFind.make () in - UnionFind.add x 1; - UnionFind.add x 2; - UnionFind.union x 1 2; - UnionFind.union x 1 2; - assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2] - end; - - "find" >:: begin fun ctxt -> - let x = makeUnionFind () in - assert_one_of ~ctxt (UnionFind.find x 1) [1; 3; 4; 7; 9; 10]; - assert_one_of ~ctxt (UnionFind.find x 6) [2; 6]; - let y = UnionFind.make () in - UnionFind.add y 1; - assert_equal ~ctxt (UnionFind.find y 1) 1 - end; - - "multiple_groups" >:: begin fun ctxt -> - let x = makeUnionFind () in - assert_same_elements ~ctxt (UnionFind.members x 1) [1; 3; 4; 7; 9; 10]; - assert_same_elements ~ctxt (UnionFind.members x 6) [2; 6] - end; - - "grow" >:: begin fun ctxt -> - let x = UnionFind.of_list [1; 2] in - (* Should grow here *) - UnionFind.add x 3; - UnionFind.add x 4; - (* Should grow here too *) - UnionFind.add x 5; - UnionFind.union x 1 5; - UnionFind.union x 3 4; - UnionFind.union x 4 2; - assert_same_elements ~ctxt (UnionFind.members x 1) [1; 5]; - assert_same_elements ~ctxt (UnionFind.members x 4) [3; 4; 2] - end; - - "find_not_found" >:: begin fun ctxt -> - let x = UnionFind.make () in - assert_raises ~ctxt Not_found ~f:(fun () -> - ignore (UnionFind.find x 1) - ) - end; - - "members_not_found" >:: begin fun ctxt -> - let x = UnionFind.make () in - assert_raises ~ctxt Not_found ~f:(fun () -> - ignore (UnionFind.members x 1) - ) - end; - - "exercise_union_by_rank" >:: begin fun ctxt -> - let x = UnionFind.of_list [1; 2; 3; 4; 5; 6; 7; 8; 9] in - (* Two sets have the same rank (0), now [1; 2] has rank 1. *) - UnionFind.union x 1 2; - (* Put the set with rank 1 first. Resulting set should still have rank 1. *) - UnionFind.union x 2 3; - (* Put the set with rank 1 second. Resulting set should still have rank 1. *) - UnionFind.union x 4 3; - assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2; 3; 4] - end; - - "of_list" >:: begin fun ctxt -> - (* Exercise power-of-two edge cases looking for off-by-ones *) - let x = UnionFind.of_list [] in - UnionFind.union x 1 2; - assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2]; - - let x = UnionFind.of_list [1] in - UnionFind.union x 1 2; - assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2]; - - let x = UnionFind.of_list [1; 2] in - UnionFind.union x 1 2; - UnionFind.union x 1 3; - UnionFind.union x 4 5; - assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2; 3]; - end; -]; +let tests = + "union_find" + >::: [ + ( "add" + >:: fun ctxt -> + let x = UnionFind.make () in + UnionFind.add x 3; + UnionFind.add x 5; + assert_equal ~ctxt (UnionFind.members x 3) [3]; + assert_equal ~ctxt (UnionFind.members x 5) [5] ); + ( "union_new_values" + >:: fun ctxt -> + let x = UnionFind.make () in + UnionFind.union x 1 3; + assert_same_elements ~ctxt (UnionFind.members x 1) [1; 3] ); + ( "union_duplicate" + >:: fun ctxt -> + let x = UnionFind.make () in + UnionFind.add x 1; + UnionFind.add x 2; + UnionFind.union x 1 2; + UnionFind.union x 1 2; + assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2] ); + ( "find" + >:: fun ctxt -> + let x = makeUnionFind () in + assert_one_of ~ctxt (UnionFind.find x 1) [1; 3; 4; 7; 9; 10]; + assert_one_of ~ctxt (UnionFind.find x 6) [2; 6]; + let y = UnionFind.make () in + UnionFind.add y 1; + assert_equal ~ctxt (UnionFind.find y 1) 1 ); + ( "multiple_groups" + >:: fun ctxt -> + let x = makeUnionFind () in + assert_same_elements ~ctxt (UnionFind.members x 1) [1; 3; 4; 7; 9; 10]; + assert_same_elements ~ctxt (UnionFind.members x 6) [2; 6] ); + ( "grow" + >:: fun ctxt -> + let x = UnionFind.of_list [1; 2] in + (* Should grow here *) + UnionFind.add x 3; + UnionFind.add x 4; + + (* Should grow here too *) + UnionFind.add x 5; + UnionFind.union x 1 5; + UnionFind.union x 3 4; + UnionFind.union x 4 2; + assert_same_elements ~ctxt (UnionFind.members x 1) [1; 5]; + assert_same_elements ~ctxt (UnionFind.members x 4) [3; 4; 2] ); + ( "find_not_found" + >:: fun ctxt -> + let x = UnionFind.make () in + assert_raises ~ctxt Not_found ~f:(fun () -> ignore (UnionFind.find x 1)) ); + ( "members_not_found" + >:: fun ctxt -> + let x = UnionFind.make () in + assert_raises ~ctxt Not_found ~f:(fun () -> ignore (UnionFind.members x 1)) ); + ( "exercise_union_by_rank" + >:: fun ctxt -> + let x = UnionFind.of_list [1; 2; 3; 4; 5; 6; 7; 8; 9] in + (* Two sets have the same rank (0), now [1; 2] has rank 1. *) + UnionFind.union x 1 2; + + (* Put the set with rank 1 first. Resulting set should still have rank 1. *) + UnionFind.union x 2 3; + + (* Put the set with rank 1 second. Resulting set should still have rank 1. *) + UnionFind.union x 4 3; + assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2; 3; 4] ); + ( "of_list" + >:: fun ctxt -> + (* Exercise power-of-two edge cases looking for off-by-ones *) + let x = UnionFind.of_list [] in + UnionFind.union x 1 2; + assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2]; + + let x = UnionFind.of_list [1] in + UnionFind.union x 1 2; + assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2]; + + let x = UnionFind.of_list [1; 2] in + UnionFind.union x 1 2; + UnionFind.union x 1 3; + UnionFind.union x 4 5; + assert_same_elements ~ctxt (UnionFind.members x 1) [1; 2; 3] ); + ] diff --git a/src/common/utils/dune b/src/common/utils/dune new file mode 100644 index 00000000000..4950a7389c7 --- /dev/null +++ b/src/common/utils/dune @@ -0,0 +1,12 @@ +(library + (name flow_common_utils) + (wrapped false) + (flags -w +a-4-6-29-35-44-48-50) + (libraries + hh_json ; hack + imported_core ; hack + flow_parser + string ; hack + sys_utils ; hack + lwt) + (preprocess (pps lwt_ppx))) diff --git a/src/common/utils/json_rpc.ml b/src/common/utils/json_rpc.ml index efce399c6f3..058009ef711 100644 --- a/src/common/utils/json_rpc.ml +++ b/src/common/utils/json_rpc.ml @@ -1,11 +1,12 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open Hh_json + type t = (* method name, params, id (only for requests) *) | Obj of (string * json list * int option) @@ -14,59 +15,51 @@ type t = exception Malformed_exn of string let get_prop propname props = - try - List.assoc propname props + try List.assoc propname props with Not_found -> raise (Malformed_exn (propname ^ " property not found")) let parse_unsafe str = - let parsed = - try - json_of_string str - with Syntax_error msg -> raise (Malformed_exn msg) - in - let props = match parsed with + let parsed = (try json_of_string str with Syntax_error msg -> raise (Malformed_exn msg)) in + let props = + match parsed with | JSON_Object props -> props | _ -> raise (Malformed_exn "Message is not a JSON Object") in let method_json = get_prop "method" props in let params_json = get_prop "params" props in - let id_json = try Some (List.assoc "id" props) with Not_found -> None in - let method_name = match method_json with + let id_json = (try Some (List.assoc "id" props) with Not_found -> None) in + let method_name = + match method_json with | JSON_String str -> str | _ -> raise (Malformed_exn "Method name is not a string") in - let params = match params_json with + let params = + match params_json with (* If you don't pass any props you just get a null here *) | JSON_Null -> [] | JSON_Array lst -> lst | other -> [other] in - let id = match id_json with + let id = + match id_json with | None -> None | Some (JSON_Number x) -> Some (int_of_string x) | Some _ -> raise (Malformed_exn "Unexpected id value") in Obj (method_name, params, id) -let parse_json_rpc_response str = - try - parse_unsafe str - with Malformed_exn msg -> Malformed msg +let parse_json_rpc_response str = (try parse_unsafe str with Malformed_exn msg -> Malformed msg) let jsonrpcize_notification method_ params = Hh_json.( - JSON_Object [ - ("jsonrpc", JSON_String "2.0"); - ("method", JSON_String method_); - ("params", JSON_Array params); - ] - ) + JSON_Object + [ + ("jsonrpc", JSON_String "2.0"); + ("method", JSON_String method_); + ("params", JSON_Array params); + ]) let jsonrpcize_response id json = Hh_json.( - JSON_Object [ - ("jsonrpc", JSON_String "2.0"); - ("id", JSON_Number (string_of_int id)); - ("result", json); - ] - ) + JSON_Object + [("jsonrpc", JSON_String "2.0"); ("id", JSON_Number (string_of_int id)); ("result", json)]) diff --git a/src/common/utils/json_rpc.mli b/src/common/utils/json_rpc.mli index 618eec74bf4..afa867c59e9 100644 --- a/src/common/utils/json_rpc.mli +++ b/src/common/utils/json_rpc.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,18 +10,13 @@ type t = | Obj of (string * Hh_json.json list * int option) | Malformed of string -val parse_json_rpc_response: string -> t +val parse_json_rpc_response : string -> t -val jsonrpcize_notification: +val jsonrpcize_notification : (* method name *) - string -> - (* value to send *) - Hh_json.json list -> - Hh_json.json + string -> (* value to send *) + Hh_json.json list -> Hh_json.json -val jsonrpcize_response: - (* request id *) - int -> - (* return value *) - Hh_json.json -> - Hh_json.json +val jsonrpcize_response : (* request id *) + int -> (* return value *) + Hh_json.json -> Hh_json.json diff --git a/src/common/utils/line.ml b/src/common/utils/line.ml index ac909ab6c2c..9383cc40908 100644 --- a/src/common/utils/line.ml +++ b/src/common/utils/line.ml @@ -1,30 +1,66 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -let breaks = "\r\n" +(* Line Separator (0xE2 0x80 0xA8) or Paragraph Separator (0xE2 0x80 0xA9) *) +let is_ls_or_ps = + let c1 = Char.chr 0xE2 in + let c2 = Char.chr 0x80 in + let c3ls = Char.chr 0xA8 in + let c3ps = Char.chr 0xA9 in + fun str len i -> + str.[i] = c1 && i + 2 < len && str.[i + 1] = c2 && (str.[i + 2] = c3ls || str.[i + 2] = c3ps) -let rec eol s x i = - if i >= x then x else - if String.contains breaks s.[i] then i else - eol s x (i + 1) +let length_of_line_terminator str len i = + if str.[i] = '\n' then + 1 + else if str.[i] = '\r' then + if i + 1 < len && str.[i + 1] = '\n' then + 2 + else + 1 + else if is_ls_or_ps str len i then + 3 + else + 0 -let rec line s x n i = - if n <= 0 then i, eol s x (i + 1) else - let i = eol s x i in - if i >= x then x, x else - line s x (n - 1) (i + 1) +(* Finds the index of the first character of the nth line (0-based). + + Assumes a UTF-8 encoding, and treats \n, \r, U+2028 (line separator) and + U+2029 (paragraph separator) as line terminators, per the ECMAscript spec: + https://tc39.es/ecma262/#sec-line-terminators + + If the line doesn't exist, including if the string ends with a line terminator + for the (n-1)th line, then returns [None] (e.g. "foo\n" for n=1, i=0 returns `None` + because the index is the end of the string. *) +let rec nth_line_opt n str len i = + if i >= len then + None + else if n = 0 then + Some i + else + let x = length_of_line_terminator str len i in + if x > 0 then + nth_line_opt (n - 1) str len (i + x) + else + nth_line_opt n str len (i + 1) let split_nth s n = - let x = String.length s in - let i, j = line s x n 0 in - if i = x then None else - Some String.(sub s 0 i, sub s i (j - i), sub s j (x - j)) + let len = String.length s in + match nth_line_opt n s len 0 with + | Some i -> + let j = + match nth_line_opt 1 s len i with + | Some j -> j + | None -> len - 1 + in + Some String.(sub s 0 i, sub s i (j - i), sub s j (len - j)) + | None -> None let transform_nth s n f = match split_nth s n with - | Some (pre, s, post) -> pre ^ (f s) ^ post + | Some (pre, s, post) -> pre ^ f s ^ post | None -> s diff --git a/src/common/utils/line.mli b/src/common/utils/line.mli index ea88cf16a42..f3c02f596c3 100644 --- a/src/common/utils/line.mli +++ b/src/common/utils/line.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/common/utils/listUtils.ml b/src/common/utils/listUtils.ml index 5df0d23f978..19cdb5920d4 100644 --- a/src/common/utils/listUtils.ml +++ b/src/common/utils/listUtils.ml @@ -1,11 +1,22 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module M_ = Monad +let rev_filter_map f lst = + let rec loop lst acc = + match lst with + | [] -> acc + | hd :: tl -> + (match f hd with + | Some x -> loop tl (x :: acc) + | None -> loop tl acc) + in + loop lst [] + +let filter_map f lst = rev_filter_map f lst |> Core_list.rev (** like List.fold_left, but f returns an option and so do we. f acc v = Some acc proceeds as usual; None stops the fold. @@ -17,11 +28,11 @@ module M_ = Monad Useful in situations where failure rules out List.fold_left. *) let rec fold_left_opt f acc = function -| [] -> Some acc -| v :: vs -> - match f acc v with - | None -> None - | Some acc -> fold_left_opt f acc vs + | [] -> Some acc + | v :: vs -> + (match f acc v with + | None -> None + | Some acc -> fold_left_opt f acc vs) (** like List.fold_left, but f returns a stop flag as well as a result. f acc v = true, acc proceeds as usual; false, acc stops the fold. @@ -34,11 +45,11 @@ let rec fold_left_opt f acc = function Useful in situations where shortcutting makes List.fold_left a bad fit. *) let rec fold_left_until f acc = function -| [] -> acc -| v :: vs -> - match f acc v with - | false, acc -> acc - | true, acc -> fold_left_until f acc vs + | [] -> acc + | v :: vs -> + (match f acc v with + | (false, acc) -> acc + | (true, acc) -> fold_left_until f acc vs) (** like List.fold_left, but adds a guard function p. p acc v is called before f acc v at each step. false stops the fold. @@ -53,11 +64,11 @@ let rec fold_left_until f acc = function and test should precede action. *) let rec fold_left_while p f acc = function -| [] -> acc -| v :: vs -> - match p acc v with - | false -> acc - | true -> fold_left_while p f (f acc v) vs + | [] -> acc + | v :: vs -> + (match p acc v with + | false -> acc + | true -> fold_left_while p f (f acc v) vs) (** Folds f over lst the given number of times, or for every element of lst, whichever is less. @@ -66,89 +77,169 @@ let rec fold_left_while p f acc = function fold_left_for 3 f 0 [1; 2; 3; 4; 5] => [1; 2; 3] *) let fold_left_for n f acc lst = - snd (fold_left_while - (fun (i, _) _ -> i < n) - (fun (i, acc) x -> i + 1, f acc x) - (0, acc) - lst) + snd (fold_left_while (fun (i, _) _ -> i < n) (fun (i, acc) x -> (i + 1, f acc x)) (0, acc) lst) let rec first_some_map f = function | [] -> None - | hd::tl -> begin match f hd with - | Some _ as x -> x - | None -> first_some_map f tl - end + | hd :: tl -> + begin + match f hd with + | Some _ as x -> x + | None -> first_some_map f tl + end (** this function takes a list and truncates it if needed to no more than the first n elements. If truncation happened, then the callback 'f' is used to generated a final element e.g. "shown 5/200" *) let first_upto_n n f lst = - let (first, total) = Core_list.fold lst ~init:([],0) ~f:(fun (first, total) s -> - let first = if total < n then (s :: first) else first in - (first, total + 1)) in - let r = if total <= n then first else match f total with - | None -> first - | Some e -> e :: first in + let (first, total) = + Core_list.fold lst ~init:([], 0) ~f:(fun (first, total) s -> + let first = + if total < n then + s :: first + else + first + in + (first, total + 1)) + in + let r = + if total <= n then + first + else + match f total with + | None -> first + | Some e -> e :: first + in Core_list.rev r (* truncate a list to first 0 < n <= len items *) -let first_n n lst = - List.rev (fold_left_for n (fun rl x -> x :: rl) [] lst) +let first_n n lst = fold_left_for n (Fn.flip Core_list.cons) [] lst |> Core_list.rev (* truncate a list to last 0 < n <= len items *) -let last_n n lst = - fold_left_for n (fun rl x -> x :: rl) [] (List.rev lst) +let last_n n lst = Core_list.rev lst |> fold_left_for n (Fn.flip Core_list.cons) [] + +(* split a list into a list of lists, each of length n except the last whose length is in [0, n) *) +let bucket_n n lst = + let (_, curr, all) = + Core_list.fold_left + ~f:(fun (i, curr, all) result -> + if i = n then + (1, [result], Core_list.rev curr :: all) + else + (i + 1, result :: curr, all)) + ~init:(0, [], []) + lst + in + Core_list.rev curr :: all |> Core_list.rev (* make a list of n copies of a given value *) let copy_n n v = let rec loop acc = function | 0 -> acc | i -> loop (v :: acc) (i - 1) - in loop [] n + in + loop [] n (** unique list items, in order of first appearance (requires sorted list) *) let rec uniq = function -| [] -> [] -| [x] -> [x] -| x :: (y :: _ as l) when x = y -> uniq l -| x :: rl -> x :: uniq rl + | [] -> [] + | [x] -> [x] + | x :: (y :: _ as l) when x = y -> uniq l + | x :: rl -> x :: uniq rl (** physically unique list items, in order of first appearance *) let rec phys_uniq = function -| [] -> [] -| [x] -> [x] -| x :: (y :: _ as l) when x == y -> phys_uniq l -| x :: rl -> x :: phys_uniq rl + | [] -> [] + | [x] -> [x] + | x :: (y :: _ as l) when x == y -> phys_uniq l + | x :: rl -> x :: phys_uniq rl (** performs a map, but returns the original list if there is no change **) let ident_map f lst = - let rev_lst, changed = List.fold_left (fun (lst_, changed) item -> - let item_ = f item in - item_::lst_, changed || item_ != item - ) ([], false) lst in - if changed then List.rev rev_lst else lst + let (rev_lst, changed) = + Core_list.fold_left + ~f:(fun (lst_, changed) item -> + let item_ = f item in + (item_ :: lst_, changed || item_ != item)) + ~init:([], false) + lst + in + if changed then + Core_list.rev rev_lst + else + lst + +let ident_mapi f lst = + let (_, rev_lst, changed) = + Core_list.fold_left + ~f:(fun (index, lst_, changed) item -> + let item_ = f index item in + (index + 1, item_ :: lst_, changed || item_ != item)) + ~init:(0, [], false) + lst + in + if changed then + Core_list.rev rev_lst + else + lst + +let ident_map_multiple f lst = + let (rev_lst, changed) = + Core_list.fold_left + ~f:(fun (lst_, changed) item -> + match f item with + | [] -> (lst_, true) + | [item_] -> (item_ :: lst_, changed || item != item_) + | items_ -> (Core_list.rev_append items_ lst_, true)) + ~init:([], false) + lst + in + if changed then + Core_list.rev rev_lst + else + lst + +(** performs a filter, but returns the original list if there is no change **) +let ident_filter f lst = + let (rev_lst, changed) = + Core_list.fold_left + ~f:(fun (lst', changed) item -> + if f item then + (item :: lst', changed) + else + (lst', true)) + ~init:([], false) + lst + in + if changed then + Core_list.rev rev_lst + else + lst let rec combine3 = function | ([], [], []) -> [] - | (a1::l1, a2::l2, a3::l3) -> (a1, a2, a3) :: combine3 (l1, l2, l3) + | (a1 :: l1, a2 :: l2, a3 :: l3) -> (a1, a2, a3) :: combine3 (l1, l2, l3) | (_, _, _) -> invalid_arg "List.combine3" let rec split3 = function | [] -> ([], [], []) - | (x,y,z)::l -> - let (rx, ry,rz) = split3 l in (x::rx, y::ry, z::rz) + | (x, y, z) :: l -> + let (rx, ry, rz) = split3 l in + (x :: rx, y :: ry, z :: rz) -let zipi xs ys = - List.combine xs ys |> List.mapi (fun i (x, y) -> (i,x,y)) +let zipi xs ys = Core_list.zip_exn xs ys |> Core_list.mapi ~f:(fun i (x, y) -> (i, x, y)) let range_with f a b = - if a > b then [] + if a > b then + [] else let rec loop j acc = - if a <= j then loop (j-1) (f j :: acc) - else acc + if a <= j then + loop (j - 1) (f j :: acc) + else + acc in - loop (b-1) [] + loop (b - 1) [] let range = range_with (fun x -> x) @@ -156,7 +247,7 @@ let repeat n a = range_with (fun _ -> a) 0 n let rec cat_maybes = function | [] -> [] - | (Some y) :: ys -> y :: cat_maybes ys + | Some y :: ys -> y :: cat_maybes ys | None :: ys -> cat_maybes ys (** fold over the elements of a list while keeping the results of @@ -164,80 +255,60 @@ let rec cat_maybes = function accumulator *) let fold_map f acc xs = - let acc', ys = List.fold_left (fun (a, ys) x -> - let (a', y) = f a x in - (a', y :: ys) - ) (acc, []) xs in - (acc', List.rev ys) + let (acc', ys) = + Core_list.fold_left + ~f:(fun (a, ys) x -> + let (a', y) = f a x in + (a', y :: ys)) + ~init:(acc, []) + xs + in + (acc', Core_list.rev ys) let concat_fold f acc items = - let acc, lists = List.fold_left (fun (acc, lists) item -> - let acc, list = f acc item in - acc, list :: lists - ) (acc, []) items in - acc, List.concat lists - -(** Monadic versions of previous folding operations - - It's unfortunate that we have to replicate the definitions for - the cases where the monadic module accepts 1 or 2 type paramenters. - *) - -module Monad (M : M_.S) = struct - - include M_.Make(struct - type 'a t = 'a M.t - let bind = M.bind - let return = M.return - let map = `Custom M.map - end) - - let fold_map_m f init xs = - List.fold_left (fun acc x -> acc >>= (fun (s, ys) -> - f s x >>| fun (s', y) -> - (s', y :: ys) - )) (return (init, [])) xs >>| fun (acc', ys) -> - (acc', List.rev ys) - - let concat_fold_m f init items = - List.fold_left (fun a item -> a >>= fun (acc, lists) -> - f acc item >>| fun (acc, list) -> - (acc, list :: lists) - ) (return (init, [])) items >>| fun (acc, lists) -> - (acc, List.concat lists) -end - -module Monad2 (M : M_.S2) = struct - - include M_.Make2(struct - type ('a, 'b) t = ('a,'b) M.t - let bind = M.bind - let return = M.return - let map = `Custom M.map - end) - - let fold_map_m f init xs = - List.fold_left (fun acc x -> acc >>= (fun (s, ys) -> - f s x >>| fun (s', y) -> - (s', y :: ys) - )) (return (init, [])) xs >>| fun (acc', ys) -> - (acc', List.rev ys) - - let concat_fold_m f init items = - List.fold_left (fun a item -> a >>= fun (acc, lists) -> - f acc item >>| fun (acc, list) -> - (acc, list :: lists) - ) (return (init, [])) items >>| fun (acc, lists) -> - (acc, List.concat lists) -end + let (acc, lists) = + Core_list.fold_left + ~f:(fun (acc, lists) item -> + let (acc, list) = f acc item in + (acc, list :: lists)) + ~init:(acc, []) + items + in + (acc, Core_list.concat lists) + +let last_opt l = + let rec last l v = + match l with + | [] -> v + | x :: xs -> last xs x + in + Core_list.nth l 0 |> Option.map ~f:(last l) + +let is_empty = function + | [] -> true + | _ -> false (* Stringify a list given a separator and a printer for the element type *) -let to_string separator printer list = - String.concat separator @@ List.map printer list +let to_string separator printer list = String.concat separator @@ Core_list.map ~f:printer list (* Stringify an association list given a separator, a printer for the key type, a key/value separator, and a printer for the value type *) let assoc_to_string separator key_printer key_value_separator value_printer list = - to_string separator (fun (k, v) -> - Printf.sprintf "%s%s%s" (key_printer k) key_value_separator (value_printer v) - ) list + to_string + separator + (fun (k, v) -> Printf.sprintf "%s%s%s" (key_printer k) key_value_separator (value_printer v)) + list + +(* Dedups a list in O(n) time and space. Unlike Core_list.dedup, this + preserves order. Core's implementation is also O(n log n) *) +let dedup l = + let tbl = Core_list.length l |> Hashtbl.create in + let f l e = + if Hashtbl.mem tbl e then + l + else ( + Hashtbl.add tbl e (); + e :: l + ) + in + Core_list.fold_left ~f ~init:[] l |> Core_list.rev diff --git a/src/common/utils/listUtils.mli b/src/common/utils/listUtils.mli index a5afb4a231b..1f6ebb95140 100644 --- a/src/common/utils/listUtils.mli +++ b/src/common/utils/listUtils.mli @@ -1,51 +1,71 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module M_ = Monad +val rev_filter_map : ('a -> 'b option) -> 'a list -> 'b list + +val filter_map : ('a -> 'b option) -> 'a list -> 'b list val fold_left_opt : ('a -> 'b -> 'a option) -> 'a -> 'b list -> 'a option + val fold_left_until : ('a -> 'b -> bool * 'a) -> 'a -> 'b list -> 'a -val fold_left_while : ('a -> 'b -> bool) -> - ('a -> 'b -> 'a) -> 'a -> 'b list -> 'a + +val fold_left_while : ('a -> 'b -> bool) -> ('a -> 'b -> 'a) -> 'a -> 'b list -> 'a + val fold_left_for : int -> ('a -> 'b -> 'a) -> 'a -> 'b list -> 'a + val first_some_map : ('a -> 'b option) -> 'a list -> 'b option + val first_upto_n : int -> (int -> 'a option) -> 'a list -> 'a list + val first_n : int -> 'a list -> 'a list + val last_n : int -> 'a list -> 'a list + +val bucket_n : int -> 'a list -> 'a list list + val copy_n : int -> 'a -> 'a list + val uniq : 'a list -> 'a list + val phys_uniq : 'a list -> 'a list + val ident_map : ('a -> 'a) -> 'a list -> 'a list + +val ident_mapi : (int -> 'a -> 'a) -> 'a list -> 'a list + +val ident_map_multiple : ('a -> 'a list) -> 'a list -> 'a list + +val ident_filter : ('a -> bool) -> 'a list -> 'a list + val combine3 : 'a list * 'b list * 'c list -> ('a * 'b * 'c) list + val split3 : ('a * 'b * 'c) list -> 'a list * 'b list * 'c list + val zipi : 'a list -> 'b list -> (int * 'a * 'b) list + val range_with : (int -> 'a) -> int -> int -> 'a list + val range : int -> int -> int list + val repeat : int -> 'a -> 'a list + val cat_maybes : 'a option list -> 'a list + val fold_map : ('a -> 'b -> 'a * 'c) -> 'a -> 'b list -> 'a * 'c list + val concat_fold : ('a -> 'b -> 'a * 'c list) -> 'a -> 'b list -> 'a * 'c list -module Monad (M : M_.S) : sig - include M_.S with type 'a t := 'a M.t - val fold_map_m : ('a -> 'b -> ('a * 'c) M.t) -> 'a -> 'b list -> ('a * 'c list) M.t - val concat_fold_m : ('a -> 'b -> ('a * 'c list) M.t) - -> 'a -> 'b list - -> ('a * 'c list) M.t -end - -module Monad2 (M : M_.S2) : sig - include M_.S2 with type ('a,'b) t := ('a,'b) M.t - val fold_map_m : ('a -> 'b -> ('a * 'c, 'd) M.t) -> 'a -> 'b list -> ('a * 'c list, 'd) M.t - val concat_fold_m : ('a -> 'b -> ('a * 'c list, 'd) M.t) - -> 'a -> 'b list - -> ('a * 'c list, 'd) M.t -end - -val to_string: string -> ('a -> string) -> ('a list -> string) -val assoc_to_string: string -> ('a -> string) -> string -> ('b -> string) -> - (('a * 'b) list -> string) +val last_opt : 'a list -> 'a option + +val is_empty : 'a list -> bool + +val dedup : 'a list -> 'a list + +val to_string : string -> ('a -> string) -> 'a list -> string + +val assoc_to_string : + string -> ('a -> string) -> string -> ('b -> string) -> ('a * 'b) list -> string diff --git a/src/common/utils/loc_utils/dune b/src/common/utils/loc_utils/dune new file mode 100644 index 00000000000..7bdbfdc3332 --- /dev/null +++ b/src/common/utils/loc_utils/dune @@ -0,0 +1,7 @@ +(library + (name flow_common_utils_loc_utils) + (wrapped false) + (libraries + collections ; hack + flow_parser + flow_parser_utils_aloc)) diff --git a/src/common/utils/loc_utils/loc_collections.ml b/src/common/utils/loc_utils/loc_collections.ml new file mode 100644 index 00000000000..a5d532e6dad --- /dev/null +++ b/src/common/utils/loc_utils/loc_collections.ml @@ -0,0 +1,11 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module LocSet = Loc_sig.LocS.LSet +module LocMap = Loc_sig.LocS.LMap +module ALocSet = Loc_sig.ALocS.LSet +module ALocMap = Loc_sig.ALocS.LMap diff --git a/src/common/utils/loc_utils/loc_sig.ml b/src/common/utils/loc_utils/loc_sig.ml new file mode 100644 index 00000000000..c2a3b37d50c --- /dev/null +++ b/src/common/utils/loc_utils/loc_sig.ml @@ -0,0 +1,48 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module type S = sig + type t + + val compare : t -> t -> int + + val equal : t -> t -> bool + + (* Exposes the underlying representation of the location. Use for debugging purposes only. Do not + * expose these results in user output or make typecheker behavior depend on it. *) + val debug_to_string : ?include_source:bool -> t -> string + + module LMap : MyMap.S with type key = t + + module LSet : Set.S with type elt = t +end + +module LocS : S with type t = Loc.t = struct + type t = Loc.t + + let compare = Loc.compare + + let equal = Loc.equal + + let debug_to_string = Loc.debug_to_string + + module LMap = MyMap.Make (Loc) + module LSet = Set.Make (Loc) +end + +module ALocS : S with type t = ALoc.t = struct + type t = ALoc.t + + let compare = ALoc.compare + + let equal = ALoc.equal + + let debug_to_string = ALoc.debug_to_string + + module LMap = MyMap.Make (ALoc) + module LSet = Set.Make (ALoc) +end diff --git a/src/common/utils/nel.ml b/src/common/utils/nel.ml index 160efe43d30..dccc03e192c 100644 --- a/src/common/utils/nel.ml +++ b/src/common/utils/nel.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,89 +9,112 @@ type 'a t = 'a * 'a list -let to_list (x, xs) = x::xs +let to_list (x, xs) = x :: xs let of_list = function - | x::xs -> Some (x, xs) + | x :: xs -> Some (x, xs) | [] -> None +let of_list_exn = function + | x :: xs -> (x, xs) + | [] -> failwith "of_list_exn given empty list" + let one x = (x, []) -let cons x0 (x1, xs) = (x0, x1::xs) +let cons x0 (x1, xs) = (x0, x1 :: xs) -let mem y (x1, xs) = - x1 = y || List.mem y xs +let mem y (x1, xs) = x1 = y || Core_list.mem xs y -let exists f (x1, xs) = - f x1 || List.exists f xs +let exists f (x1, xs) = f x1 || Core_list.exists ~f xs let iter f (x, xs) = f x; - List.iter f xs + Core_list.iter ~f xs -let map f (x, xs) = (f x, List.map f xs) +let map f (x, xs) = (f x, Core_list.map ~f xs) -let (>>|) l f = map f l +let ( >>| ) l f = map f l let ident_map f ((x, xs) as original) = let x' = f x in let xs' = ListUtils.ident_map f xs in - if x' == x && xs' == xs then original - else (x', xs') + if x' == x && xs' == xs then + original + else + (x', xs') let concat (xs, xss) = let xs = to_list xs in - let xss = List.map to_list xss in - match List.concat (xs::xss) with + let xss = Core_list.map ~f:to_list xss in + match Core_list.join (xs :: xss) with | [] -> failwith "impossible" - | x::xs -> (x, xs) + | x :: xs -> (x, xs) let map_concat f (x, xs) = - let xss = List.map (fun x -> to_list (f x)) (x::xs) in - match List.concat xss with + let xss = Core_list.map ~f:(fun x -> to_list (f x)) (x :: xs) in + match Core_list.join xss with | [] -> failwith "impossible" - | x::xs -> (x, xs) + | x :: xs -> (x, xs) -let (>>=) l f = map_concat f l +let ( >>= ) l f = map_concat f l let rev (x, xs) = - match List.rev (x::xs) with + match Core_list.rev (x :: xs) with | [] -> failwith "impossible" - | x::xs -> (x, xs) + | x :: xs -> (x, xs) let rev_map f (x, xs) = - match List.rev_map f (x::xs) with + match Core_list.rev_map ~f (x :: xs) with | [] -> failwith "impossible" - | x::xs -> (x, xs) + | x :: xs -> (x, xs) let rev_append xs ys = - match List.rev_append (to_list xs) (to_list ys) with + match Core_list.rev_append (to_list xs) (to_list ys) with + | [] -> failwith "impossible" + | z :: zs -> (z, zs) + +let append xs ys = + match Core_list.append (to_list xs) (to_list ys) with | [] -> failwith "impossible" - | z::zs -> (z, zs) + | z :: zs -> (z, zs) -let length (_, xs) = 1 + List.length xs +let length (_, xs) = 1 + Core_list.length xs -let fold_left f acc (x, xs) = List.fold_left f acc (x::xs) +let fold_left f acc (x, xs) = Core_list.fold_left ~f ~init:acc (x :: xs) let hd (x, _) = x -let nth nel n = List.nth (to_list nel) n +let tl (_, xs) = xs + +let nth nel n = Core_list.nth_exn (to_list nel) n + +let dedup ?(compare = Pervasives.compare) (x, xs) = + let xs = Core_list.dedup ~compare (x :: xs) in + match xs with + | x :: xs -> (x, xs) + | _ -> failwith "impossible: dedup must have removed a nonduplicate" let result_all = function - | Ok x, rest -> - begin match Core_result.all rest with - | Ok rest -> Ok (x, rest) - | Error _ as err -> err + | (Ok x, rest) -> + begin + match Core_result.all rest with + | Ok rest -> Ok (x, rest) + | Error _ as err -> err end - | Error _ as err, _ -> err + | ((Error _ as err), _) -> err let cat_maybes nel = let rev_result = - fold_left begin fun acc elt -> match acc, elt with - | _, None -> acc - | None, Some x -> Some (one x) - | Some lst, Some x -> Some (cons x lst) - end None nel + fold_left + begin + fun acc elt -> + match (acc, elt) with + | (_, None) -> acc + | (None, Some x) -> Some (one x) + | (Some lst, Some x) -> Some (cons x lst) + end + None + nel in match rev_result with | None -> None diff --git a/src/common/utils/nel.mli b/src/common/utils/nel.mli index eaa595ce7cf..66e090b2788 100644 --- a/src/common/utils/nel.mli +++ b/src/common/utils/nel.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,26 +7,54 @@ type 'a t = 'a * 'a list -val to_list: 'a t -> 'a list -val of_list: 'a list -> 'a t option -val one: 'a -> 'a t -val cons: 'a -> 'a t -> 'a t -val mem: 'a -> 'a t -> bool -val exists: ('a -> bool) -> 'a t -> bool -val iter: ('a -> unit) -> 'a t -> unit -val map: ('a -> 'b) -> 'a t -> 'b t -val (>>|): 'a t -> ('a -> 'b) -> 'b t -val ident_map: ('a -> 'a) -> 'a t -> 'a t -val concat: 'a t t -> 'a t -val map_concat: ('a -> 'b t) -> 'a t -> 'b t -val (>>=): 'a t -> ('a -> 'b t) -> 'b t -val rev: 'a t -> 'a t -val rev_map: ('a -> 'b) -> 'a t -> 'b t -val rev_append: 'a t -> 'a t -> 'a t -val length: 'a t -> int -val fold_left: ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a -val hd: 'a t -> 'a -val nth: 'a t -> int -> 'a - -val result_all: ('a, 'b) result t -> ('a t, 'b) result -val cat_maybes: 'a option t -> 'a t option +val to_list : 'a t -> 'a list + +val of_list : 'a list -> 'a t option + +val of_list_exn : 'a list -> 'a t + +val one : 'a -> 'a t + +val cons : 'a -> 'a t -> 'a t + +val mem : 'a -> 'a t -> bool + +val exists : ('a -> bool) -> 'a t -> bool + +val iter : ('a -> unit) -> 'a t -> unit + +val map : ('a -> 'b) -> 'a t -> 'b t + +val ( >>| ) : 'a t -> ('a -> 'b) -> 'b t + +val ident_map : ('a -> 'a) -> 'a t -> 'a t + +val concat : 'a t t -> 'a t + +val map_concat : ('a -> 'b t) -> 'a t -> 'b t + +val ( >>= ) : 'a t -> ('a -> 'b t) -> 'b t + +val rev : 'a t -> 'a t + +val rev_map : ('a -> 'b) -> 'a t -> 'b t + +val rev_append : 'a t -> 'a t -> 'a t + +val append : 'a t -> 'a t -> 'a t + +val length : 'a t -> int + +val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a + +val hd : 'a t -> 'a + +val tl : 'a t -> 'a list + +val nth : 'a t -> int -> 'a + +val dedup : ?compare:('a -> 'a -> int) -> 'a t -> 'a t + +val result_all : ('a, 'b) result t -> ('a t, 'b) result + +val cat_maybes : 'a option t -> 'a t option diff --git a/src/common/utils/optionUtils.ml b/src/common/utils/optionUtils.ml index a7b5f68e349..dcce2574d12 100644 --- a/src/common/utils/optionUtils.ml +++ b/src/common/utils/optionUtils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,6 +10,8 @@ let ident_map f x = match x with | None -> x | Some x' -> - let x'' = f x' in - if x'' == x' then x - else Some x'' + let x'' = f x' in + if x'' == x' then + x + else + Some x'' diff --git a/src/common/utils/resizableArray.ml b/src/common/utils/resizableArray.ml new file mode 100644 index 00000000000..e0d96eed8a2 --- /dev/null +++ b/src/common/utils/resizableArray.ml @@ -0,0 +1,62 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type 'a t = { + mutable arr: 'a option array; + mutable size: int; +} + +exception Out_of_bounds_set of string + +let make size = + { + arr = Array.make size None; + (* 0, not the given `size`. See the comment for this function in the `.mli` file. *) + size = 0; + } + +let get arr i = + if i < 0 || i >= arr.size then + None + else + arr.arr.(i) + +let change_capacity arr new_capacity = + let new_array = Array.make new_capacity None in + Array.blit arr.arr 0 new_array 0 arr.size; + arr.arr <- new_array + +let expand_if_needed arr = + let old_capacity = Array.length arr.arr in + if arr.size = old_capacity then + let new_capacity = max (old_capacity * 2) 1 in + change_capacity arr new_capacity + +let set arr i x = + if i >= arr.size || i < 0 then + raise (Out_of_bounds_set (Printf.sprintf "Index: %d, size: %d" i arr.size)); + arr.arr.(i) <- Some x + +let push arr elt = + expand_if_needed arr; + arr.arr.(arr.size) <- Some elt; + arr.size <- arr.size + 1 + +let shrink arr = if arr.size <> Array.length arr.arr then change_capacity arr arr.size + +let size arr = arr.size + +let underlying_array_size_do_not_use arr = Array.length arr.arr + +let to_hashtbl arr = + let tbl = Hashtbl.create arr.size in + for i = 0 to arr.size - 1 do + match arr.arr.(i) with + | Some v -> Hashtbl.add tbl v i + | None -> () + done; + tbl diff --git a/src/common/utils/resizableArray.mli b/src/common/utils/resizableArray.mli new file mode 100644 index 00000000000..0952761fb05 --- /dev/null +++ b/src/common/utils/resizableArray.mli @@ -0,0 +1,36 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* An array that automatically expands when needed *) + +type 'a t + +exception Out_of_bounds_set of string + +(* `make x` creates a ResizableArray.t where the underlying array has the initial size of `x`. + * However, this is purely for the purposes of optimization: + * `ResizableArray.size (ResizableArray.make 5)` still * evaluates to `0`. *) +val make : int -> 'a t + +(* `set arr i x` raises `Out_of_bounds_set` if `i >= ResizableArray.size arr`, or if `i < 0` *) +val set : 'a t -> int -> 'a -> unit + +(* Expands the underlying array if necessary *) +val push : 'a t -> 'a -> unit + +(* Shrinks the representation to match the number of elements stored *) +val shrink : 'a t -> unit + +(* Returns None if the index is out of bounds. *) +val get : 'a t -> int -> 'a option + +val size : 'a t -> int + +(* Exposed only for white box testing. Do not use this. Really. *) +val underlying_array_size_do_not_use : 'a t -> int + +val to_hashtbl : 'a t -> ('a, int) Hashtbl.t diff --git a/src/common/utils/sMapUtils.ml b/src/common/utils/sMapUtils.ml index 10d1f7937f4..6f77e209a97 100644 --- a/src/common/utils/sMapUtils.ml +++ b/src/common/utils/sMapUtils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,13 +8,19 @@ (** Performs a map, but returns the original SMap.t if there is no change **) let ident_map f map = let changed = ref false in - let map' = SMap.map (fun elem -> - let elem' = f elem in - if elem == elem' - then elem - else begin - changed := true; - elem' - end - ) map in - if !changed then map' else map + let map' = + SMap.map + (fun elem -> + let elem' = f elem in + if elem == elem' then + elem + else ( + changed := true; + elem' + )) + map + in + if !changed then + map' + else + map diff --git a/src/common/utils/tree.ml b/src/common/utils/tree.ml index b27bc871432..f46a189b126 100644 --- a/src/common/utils/tree.ml +++ b/src/common/utils/tree.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/common/utils/unionFind.ml b/src/common/utils/unionFind.ml index 8d1e70638fc..fead4d14b9e 100644 --- a/src/common/utils/unionFind.ml +++ b/src/common/utils/unionFind.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,74 +11,52 @@ type 'a entry = { mutable next: int; mutable rank: int; } + type 'a t = { indices: ('a, int) Hashtbl.t; - mutable entries: 'a entry option array; + entries: 'a entry ResizableArray.t; } let default_size = 32 -let make_with_size n = { - indices = Hashtbl.create n; - entries = Array.make n None; -} +let make_with_size n = { indices = Hashtbl.create n; entries = ResizableArray.make n } let make () = make_with_size default_size -(* Number of entries *) -let entry_count t = Hashtbl.length t.indices - -let add_unsafe t x i = - Hashtbl.add t.indices x i; - t.entries.(i) <- Some { - value = x; - parent = i; - next = i; - rank = 0; - } +let push t x = + let next_index = ResizableArray.size t.entries in + Hashtbl.add t.indices x next_index; + ResizableArray.push t.entries { value = x; parent = next_index; next = next_index; rank = 0 } -let get_entry t i = match t.entries.(i) with +let get_entry t i = + match ResizableArray.get t.entries i with | Some x -> x | None -> raise Not_found let get_value t i = (get_entry t i).value + let get_parent t i = (get_entry t i).parent + let get_next t i = (get_entry t i).next + let get_rank t i = (get_entry t i).rank let set_parent t i parent = (get_entry t i).parent <- parent + let set_next t i next = (get_entry t i).next <- next -let set_rank t i rank = (get_entry t i).rank <- rank -let get_next_power_of_two x = - let rec f y = - if y >= x then - y - else - f (y * 2) - in - f 1 +let set_rank t i rank = (get_entry t i).rank <- rank let of_list lst = let len = List.length lst in - let t = make_with_size (get_next_power_of_two len) in - List.iteri (fun i x -> add_unsafe t x i) lst; + let t = make_with_size (Utils_js.get_next_power_of_two len) in + List.iter (fun x -> push t x) lst; t -let grow t = - let old_arr_size = Array.length t.entries in - let new_arr_size = old_arr_size * 2 in - let new_entries = Array.make new_arr_size None in - Array.blit t.entries 0 new_entries 0 old_arr_size; - t.entries <- new_entries - (* Add the given value, and return its index *) let add_ t x = - let next_index = entry_count t in - if Array.length t.entries = next_index then - grow t; - add_unsafe t x next_index; - next_index + push t x; + ResizableArray.size t.entries - 1 let add t x = ignore (add_ t x) @@ -103,11 +81,12 @@ let union t x1 x2 = let i2 = lookup_or_add t x2 in let i1_root = find_root_index t i1 in let i2_root = find_root_index t i2 in - if i1_root <> i2_root then begin + if i1_root <> i2_root then ( (* merge the circular linked lists *) let tmp = get_next t i1 in set_next t i1 (get_next t i2); set_next t i2 tmp; + (* set the parent pointer according to rank *) let rank1 = get_rank t i1_root in let rank2 = get_rank t i2_root in @@ -117,8 +96,8 @@ let union t x1 x2 = set_parent t i2_root i1_root else set_parent t i1_root i2_root; - set_rank t i2_root (rank2 + 1) - end + set_rank t i2_root (rank2 + 1) + ) let find t x = (* Raises Not_found if x is not present in the Hashtbl *) @@ -127,7 +106,7 @@ let find t x = get_value t root_index let rec members_of_index t initial_index i acc = - let acc = (get_value t i) :: acc in + let acc = get_value t i :: acc in let next = get_next t i in if next = initial_index then acc diff --git a/src/common/utils/unionFind.mli b/src/common/utils/unionFind.mli index 4ff214027e1..e9bd057d45f 100644 --- a/src/common/utils/unionFind.mli +++ b/src/common/utils/unionFind.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,24 +11,24 @@ type 'a t (* CONSTRUCTORS *) (* Create an empty forest *) -val make: unit -> 'a t +val make : unit -> 'a t (* Create a forest initialized with the given items. More efficient than repeatedly calling `add` *) -val of_list: 'a list -> 'a t +val of_list : 'a list -> 'a t (* MUTATORS *) -val add: 'a t -> 'a -> unit +val add : 'a t -> 'a -> unit (* Unions the two elements. If either (or both) element does not exist, add it. *) -val union: 'a t -> 'a -> 'a -> unit +val union : 'a t -> 'a -> 'a -> unit (* ACCESSORS *) (* Finds the root element of the given element. Raises `Not_found` if the given element is not * already present. *) -val find: 'a t -> 'a -> 'a +val find : 'a t -> 'a -> 'a (* Returns all members in the same set as the given element. Raises `Not_found` if the given element * is not already present. *) -val members: 'a t -> 'a -> 'a list +val members : 'a t -> 'a -> 'a list diff --git a/src/common/utils/utils_js.ml b/src/common/utils/utils_js.ml index a33e0c2f22a..758560659a4 100644 --- a/src/common/utils/utils_js.ml +++ b/src/common/utils/utils_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,35 +8,31 @@ exception Key_not_found of (* message *) string * (* key *) string let spf = Printf.sprintf + let print_endlinef fmt = Printf.ksprintf print_endline fmt + let prerr_endlinef fmt = Printf.ksprintf prerr_endline fmt let exe_name = Filename.basename Sys.executable_name -module LocSet = Set.Make(Loc) - -module LocMap = MyMap.Make(Loc) - -module FilenameSet = Set.Make(File_key) - +module FilenameSet = Set.Make (File_key) module FilenameMap = MyMap.Make (File_key) -module PathMap : MyMap.S with type key = Path.t = MyMap.Make (struct - type t = Path.t - let compare p1 p2 = - String.compare (Path.to_string p1) (Path.to_string p2) -end) - let assert_false s = - let callstack = Printexc.(get_callstack 10 |> raw_backtrace_to_string) in - prerr_endline (spf "%s%s\n%s:\n%s%s%s" - (* this clowny shit is to evade hg's conflict marker detection *) - "<<<<" "<<<<" s callstack ">>>>" ">>>>" - ); + let callstack = Exception.get_current_callstack_string 10 in + prerr_endline + (spf + "%s%s\n%s:\n%s%s%s" + (* this clowny shit is to evade hg's conflict marker detection *) + "<<<<" + "<<<<" + s + callstack + ">>>>" + ">>>>"); failwith s -let __DEBUG__ ?(s="") f = - try f () with _ -> assert_false s +let __DEBUG__ ?(s = "") f = (try f () with _ -> assert_false s) let call_succeeds try_function function_input = try @@ -44,47 +40,67 @@ let call_succeeds try_function function_input = true with (* print failwith command's exception message *) - | Failure msg -> prerr_endline msg; - false + | Failure msg -> + prerr_endline msg; + false | _ -> false -(* quick exception format *) +let map_pair f g (a, b) = (f a, g b) + +let map_fst f (a, b) = (f a, b) + +let map_snd g (a, b) = (a, g b) -let fmt_exc exc = Printexc.((to_string exc) ^ "\n" ^ (get_backtrace ())) +let swap (a, b) = (b, a) -let fmt_file_exc file exc = file ^ ": " ^ (fmt_exc exc) +let mk_tuple x y = (x, y) -let map_pair f g (a,b) = (f a, g b) -let map_fst f (a,b) = (f a, b) -let map_snd g (a,b) = (a, g b) +let mk_tuple_swapped x y = (y, x) let rec iter2opt f = function - | x::xs, y::ys -> + | (x :: xs, y :: ys) -> f (Some x) (Some y); iter2opt f (xs, ys) - | x::xs, [] -> + | (x :: xs, []) -> f (Some x) None; iter2opt f (xs, []) - | [], y::ys -> + | ([], y :: ys) -> f None (Some y); iter2opt f ([], ys) - | [], [] -> () + | ([], []) -> () let rec toFixpoint f x = let x' = f x in - if x = x' then x else toFixpoint f x' + if x = x' then + x + else + toFixpoint f x' + +let uncurry f (x, y) = f x y -let uncurry f (x,y) = f x y let curry f x y = f (x, y) let ( %> ) f g x = g (f x) +(** + * Given a list of lazy "option" expressions, evaluate each in the list + * sequentially until one produces a `Some` (and do not evaluate any remaining). + *) +let lazy_seq (lst : 'a option Lazy.t list) : 'a option = + List.fold_left + (fun acc lazy_expr -> + match acc with + | None -> Lazy.force lazy_expr + | Some _ -> acc) + None + lst + (** * Useful for various places where a user might have typoed a string and the * set of possible intended strings is known (i.e. variable names). *) let typo_suggestions = - (** + (* * Calculates the Levenshtein distance between the two strings, but with a * limit. See here for documentation on this algorithm: * @@ -94,77 +110,90 @@ let typo_suggestions = let alen = String.length a in let blen = String.length b in let limit = min (max alen blen) limit in - if abs (alen - blen) > limit then None else ( + if abs (alen - blen) > limit then + None + else let matrix = Array.make_matrix (alen + 1) (blen + 1) (limit + 1) in matrix.(0).(0) <- 0; - for i = 1 to (max alen blen) do + for i = 1 to max alen blen do if i <= alen then matrix.(i).(0) <- i; - if i <= blen then matrix.(0).(i) <- i; + if i <= blen then matrix.(0).(i) <- i done; for ai = 1 to alen do for bi = max 1 (ai - limit - 1) to min blen (ai + limit + 1) do let prev_ai = a.[ai - 1] in let prev_bi = b.[bi - 1] in - let cost = if prev_ai = prev_bi then 0 else 1 in + let cost = + if prev_ai = prev_bi then + 0 + else + 1 + in let closest = min - (min - (matrix.(ai - 1).(bi) + 1) (* deletion *) - (matrix.(ai).(bi - 1) + 1)) (* insertion *) - (matrix.(ai - 1).(bi - 1) + cost) (* substitution *) + (min (matrix.(ai - 1).(bi) + 1) (* deletion *) (matrix.(ai).(bi - 1) + 1)) + (* insertion *) + (matrix.(ai - 1).(bi - 1) + cost) + (* substitution *) in let closest = - if ai > 1 && bi > 1 && prev_ai = b.[bi-2] && a.[ai-2] = prev_bi - then + if ai > 1 && bi > 1 && prev_ai = b.[bi - 2] && a.[ai - 2] = prev_bi then (* transposition *) - min (matrix.(ai).(bi)) (matrix.(ai - 2).(bi - 2) + cost) - else closest + min matrix.(ai).(bi) (matrix.(ai - 2).(bi - 2) + cost) + else + closest in matrix.(ai).(bi) <- closest - done; + done done; let result = matrix.(alen).(blen) in - if result > limit then None else Some result - ) + if result > limit then + None + else + Some result in - let fold_results limit name results poss_name = match distance name poss_name limit with | None -> results | Some distance -> - let (curr_choice, curr_dist) = results in - if distance < curr_dist - then ([poss_name], curr_dist) - else - if distance = curr_dist - then (poss_name::curr_choice, curr_dist) - else results + let (curr_choice, curr_dist) = results in + if distance < curr_dist then + ([poss_name], curr_dist) + else if distance = curr_dist then + (poss_name :: curr_choice, curr_dist) + else + results in - fun possible_names name -> let limit = match String.length name with - | 1 | 2 -> 0 - | 3 | 4 -> 1 - | 5 | 6 -> 2 + | 1 + | 2 -> + 0 + | 3 + | 4 -> + 1 + | 5 + | 6 -> + 2 | _ -> 3 in fst (List.fold_left (fold_results limit name) ([], max_int) possible_names) let typo_suggestion possible_names name = let suggestions = typo_suggestions possible_names name in - try Some (List.hd suggestions) - with _ -> None + (try Some (List.hd suggestions) with _ -> None) (* util to limit the number of calls to a (usually recursive) function *) let count_calls ~counter ~default f = - (** Count number of calls to a function f, decrementing at each call and + (* Count number of calls to a function f, decrementing at each call and returning default when count reaches 0. **) - if !counter = 0 then default - else begin + if !counter = 0 then + default + else ( decr counter; f () - end + ) let extension_of_filename filename = try @@ -185,16 +214,16 @@ let ordinal = function | 9 -> "ninth" | n -> let n = string_of_int n in - let th = String.get n ((String.length n) - 1) in - let th = match th with - | '1' -> "st" - | '2' -> "nd" - | '3' -> "rd" - | _ -> "th" + let th = n.[String.length n - 1] in + let th = + match th with + | '1' -> "st" + | '2' -> "nd" + | '3' -> "rd" + | _ -> "th" in n ^ th - (* Module implementing the recommended way to augment a map. Without this API, we end up using the lower-level Map.union API. But @@ -207,26 +236,22 @@ let ordinal = function explicit, and is implemented by simply passing the arguments in the correct order to Map.union. *) -module Augmentable(M: MyMap.S) = struct +module Augmentable (M : MyMap.S) = struct let augment map ~with_bindings = M.union with_bindings map end -module AugmentableSMap = Augmentable(SMap) +module AugmentableSMap = Augmentable (SMap) (* The problem with Core_result's >>= is that the function second argument cannot return * an Lwt.t. This helper infix operator handles that case *) -let (%>>=) - (result: ('ok, 'err) Core_result.t) - (f: 'ok -> ('a, 'err) Core_result.t Lwt.t) - : ('a, 'err) Core_result.t Lwt.t = +let ( %>>= ) (result : ('ok, 'err) Core_result.t) (f : 'ok -> ('a, 'err) Core_result.t Lwt.t) : + ('a, 'err) Core_result.t Lwt.t = match result with | Error e -> Lwt.return (Error e) | Ok x -> f x -let (%>>|) - (result: ('ok, 'err) Core_result.t) - (f: 'ok -> 'a Lwt.t) - : ('a, 'err) Core_result.t Lwt.t = +let ( %>>| ) (result : ('ok, 'err) Core_result.t) (f : 'ok -> 'a Lwt.t) : + ('a, 'err) Core_result.t Lwt.t = match result with | Error e -> Lwt.return (Error e) | Ok x -> @@ -234,41 +259,43 @@ let (%>>|) Lwt.return (Ok new_x) let bind2 ~f x y = Core_result.bind x (fun x -> Core_result.bind y (f x)) -let map2 ~f x y = Core_result.bind x (fun x -> Core_result.map y ~f:(f x)) -let to_exn_string backtrace exn = - let backtrace = String.trim backtrace in - Printf.sprintf "%s%s%s" - (Printexc.to_string exn) - (if backtrace = "" then "" else "\n") - backtrace +let map2 ~f x y = Core_result.bind x (fun x -> Core_result.map y ~f:(f x)) let try_with_json f = - try%lwt f () with exn -> - let backtrace = Printexc.get_backtrace () in - Lwt.return (Error (to_exn_string backtrace exn, None)) + try%lwt f () with + | Lwt.Canceled as exn -> + let exn = Exception.wrap exn in + Exception.reraise exn + | exn -> + let exn = Exception.wrap exn in + Lwt.return (Error (Exception.to_string exn, None)) let try_with f = - try%lwt f () with exn -> - let backtrace = Printexc.get_backtrace () in - Lwt.return (Error (to_exn_string backtrace exn)) + try%lwt f () with + | Lwt.Canceled as exn -> + let exn = Exception.wrap exn in + Exception.reraise exn + | exn -> + let exn = Exception.wrap exn in + Lwt.return (Error (Exception.to_string exn)) let split_result = function -| Ok (success, extra) -> Ok success, extra -| Error (error, extra) -> Error error, extra + | Ok (success, extra) -> (Ok success, extra) + | Error (error, extra) -> (Error error, extra) let debug_print_current_stack_trace () = - let open Printexc in - get_callstack 200 |> raw_backtrace_to_string |> Hh_logger.info "Current backtrace:\n%s" + Hh_logger.info "Current backtrace:\n%s" (Exception.get_current_callstack_string 200) (* Pass through a result; logging if it is an Error. Includes the provided string context, which is * computed lazily under the assumption that the error case is the uncommon case *) -let log_when_error (context: string Lazy.t) (result: ('a, string) result) : ('a, string) result = - begin match result with +let log_when_error (context : string Lazy.t) (result : ('a, string) result) : ('a, string) result = + begin + match result with | Ok _ -> () | Error msg -> - let lazy context = context in - Hh_logger.error "Error (%s): %s" context msg + let (lazy context) = context in + Hh_logger.error "Error (%s): %s" context msg end; result @@ -286,3 +313,12 @@ let id_print context f x = let debug_string_of_result string_of_val = function | Ok x -> Printf.sprintf "Ok (%s)" (string_of_val x) | Error err -> Printf.sprintf "Error (%s)" err + +let get_next_power_of_two x = + let rec f y = + if y >= x then + y + else + f (y * 2) + in + f 1 diff --git a/src/common/utils/utils_jsx.ml b/src/common/utils/utils_jsx.ml index 9b759ca3797..e0c05e906b1 100644 --- a/src/common/utils/utils_jsx.ml +++ b/src/common/utils/utils_jsx.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -16,113 +16,109 @@ * 1. We can't use Str * 2. It's not enough to trim the text, we also need to figure out the line and * column for the start and end of the text -*) + *) let trim_jsx_text = (* Removes all the spaces from the beginning of the string *) let prefix_trim = let rec trimmer str len idx = - if idx >= len - then "" - else if String.get str idx = ' ' - then trimmer str len (idx+1) - else String.sub str idx (len - idx) - - in fun str -> trimmer str (String.length str) 0 + if idx >= len then + "" + else if str.[idx] = ' ' then + trimmer str len (idx + 1) + else + String.sub str idx (len - idx) + in + (fun str -> trimmer str (String.length str) 0) in - (* Removes all the spaces from the end of the string *) let suffix_trim = let rec trimmer str idx = - if idx < 0 - then "" - else if String.get str idx = ' ' - then trimmer str (idx-1) - else String.sub str 0 (idx+1) - - in fun str -> trimmer str (String.length str - 1) + if idx < 0 then + "" + else if str.[idx] = ' ' then + trimmer str (idx - 1) + else + String.sub str 0 (idx + 1) + in + (fun str -> trimmer str (String.length str - 1)) in - fun loc value -> (* Tabs get turned into spaces *) let value = String_utils.replace_char '\t' ' ' value in - (* The algorithm is line based, so split the string into lines *) let lines = String_utils.split_into_lines value in let last_line = List.length lines - 1 in - - let trimmed_lines = List.mapi (fun idx line -> - (* Remove the leading whitespace from every line but the first *) - let line = if idx <> 0 then prefix_trim line else line in - (* Remove the trailing whitespace from every line but the last *) - if idx <> last_line then suffix_trim line else line - ) lines in - + let trimmed_lines = + List.mapi + (fun idx line -> + (* Remove the leading whitespace from every line but the first *) + let line = + if idx <> 0 then + prefix_trim line + else + line + in + (* Remove the trailing whitespace from every line but the last *) + if idx <> last_line then + suffix_trim line + else + line) + lines + in (* Figure out the first and last non-empty line, if there are any *) - let _, first_and_last_non_empty = + let (_, first_and_last_non_empty) = List.fold_left (fun (idx, first_and_last) line -> let first_and_last = - if line <> "" - then match first_and_last with - | None -> Some (idx, idx) - | Some (first, _) -> Some (first, idx) - else first_and_last in - idx+1, first_and_last) + if line <> "" then + match first_and_last with + | None -> Some (idx, idx) + | Some (first, _) -> Some (first, idx) + else + first_and_last + in + (idx + 1, first_and_last)) (0, None) - trimmed_lines in - + trimmed_lines + in match first_and_last_non_empty with | None -> None | Some (first_line, last_line) -> (* Filter out empty lines and turn newlines into spaces *) - let trimmed = - trimmed_lines - |> List.filter (fun line -> line <> "") - |> String.concat " " in - - let open Loc in - let start_line = loc.start.line + first_line in - let end_line = loc.start.line + last_line in - - (* We want to know the column and offset for the first and last - * non-whitespace characters. We can do that by figuring out what those - * characters are and using String.index and String.rindex to search for - * them *) - let first_trimmed_line = List.nth trimmed_lines first_line in - let last_trimmed_line = List.nth trimmed_lines last_line in - let first_char = String.get first_trimmed_line 0 in - let last_char = - String.get last_trimmed_line (String.length last_trimmed_line - 1) in - - (* For column we just do a search within the line *) - let start_column = String.index (List.nth lines first_line) first_char in - let end_column = String.rindex (List.nth lines last_line) last_char + 1 in - - (* If we're on the first line, then we need to see on which column the line + let trimmed = trimmed_lines |> List.filter (fun line -> line <> "") |> String.concat " " in + Loc.( + let start_line = loc.start.line + first_line in + let end_line = loc.start.line + last_line in + (* We want to know the column and offset for the first and last + * non-whitespace characters. We can do that by figuring out what those + * characters are and using String.index and String.rindex to search for + * them *) + let first_trimmed_line = List.nth trimmed_lines first_line in + let last_trimmed_line = List.nth trimmed_lines last_line in + let first_char = first_trimmed_line.[0] in + let last_char = last_trimmed_line.[String.length last_trimmed_line - 1] in + (* For column we just do a search within the line *) + let start_column = String.index (List.nth lines first_line) first_char in + let end_column = String.rindex (List.nth lines last_line) last_char + 1 in + (* If we're on the first line, then we need to see on which column the line starts *) - let start_column = - if first_line = 0 - then start_column + loc.start.column - else start_column in - let end_column = - if last_line = 0 - then end_column + loc.start.column - else end_column in - - (* For offset, we do a search in the whole JSXText string *) - let start_offset = loc.start.offset + (String.index value first_char) in - let end_offset = loc.start.offset + (String.rindex value last_char) + 1 in - - let loc = { loc with - start = { - line = start_line; - column = start_column; - offset = start_offset; - }; - _end = { - line = end_line; - column = end_column; - offset = end_offset; - }; - } in - Some (loc, trimmed) + let start_column = + if first_line = 0 then + start_column + loc.start.column + else + start_column + in + let end_column = + if last_line = 0 then + end_column + loc.start.column + else + end_column + in + let loc = + { + loc with + start = { line = start_line; column = start_column }; + _end = { line = end_line; column = end_column }; + } + in + Some (loc, trimmed)) diff --git a/src/common/verbose.ml b/src/common/verbose.ml index b5939ba2e45..3e54969b2a7 100644 --- a/src/common/verbose.ml +++ b/src/common/verbose.ml @@ -1,3 +1,10 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + type t = { indent: int; depth: int; diff --git a/src/common/xx/dune b/src/common/xx/dune new file mode 100644 index 00000000000..61f0a489b02 --- /dev/null +++ b/src/common/xx/dune @@ -0,0 +1,6 @@ +(library + (name xx) + (wrapped false) + (c_names xx_stubs) + (libraries lz4) +) diff --git a/src/common/xx/xx.ml b/src/common/xx/xx.ml index d1896313744..200f2d74d9b 100644 --- a/src/common/xx/xx.ml +++ b/src/common/xx/xx.ml @@ -1,12 +1,31 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + type state -type hash -external init: unit -> state = "caml_xx_init" -external update: state -> string -> unit = "caml_xx_update" [@@noalloc] -external update_int: state -> 'a (* int *) -> unit = "caml_xx_update_int" [@@noalloc] -external digest: state -> hash = "caml_xx_digest" +type hash = int64 + +external init : unit -> state = "caml_xx_init" + +external update : state -> string -> unit = "caml_xx_update" [@@noalloc] + +external update_int : state -> 'a (* int *) -> unit = "caml_xx_update_int" [@@noalloc] + +external update_int64 : state -> (int64[@unboxed]) -> unit + = "caml_xx_update_int64" "caml_xx_update_int64_unboxed" + [@@noalloc] + +external digest : state -> (hash[@unboxed]) = "caml_xx_digest" "caml_xx_digest_unboxed" [@@noalloc] -(* Returns a 16 character hex string representation of the hash *) -external to_string: hash -> string = "caml_xx_to_string" +(* Unlike Int64.to_string, which returns a decimal string, this returns a hex + * string which is padded out to the full 16 bytes. *) +external to_string : (hash[@unboxed]) -> string = "caml_xx_to_string" "caml_xx_to_string_unboxed" -let foo x = x +(* 0 <= result < modulus *) +let modulo hash modulus = + assert (modulus > 0); + (Int64.to_int hash |> abs) mod modulus diff --git a/src/common/xx/xx_stubs.c b/src/common/xx/xx_stubs.c index bb22123c59e..fad71c97c4a 100644 --- a/src/common/xx/xx_stubs.c +++ b/src/common/xx/xx_stubs.c @@ -45,30 +45,50 @@ CAMLexport value caml_xx_update_int(value state, value v) { return Val_unit; } +CAMLexport value caml_xx_update_int64_unboxed(value state, int64_t v) { + XXH64_update(&State_val(state), &v, sizeof(int64_t)); + return Val_unit; +} + +CAMLexport value caml_xx_update_int64(value state, value v) { + return caml_xx_update_int64_unboxed(state, Int64_val(v)); +} + +CAMLexport XXH64_hash_t caml_xx_digest_unboxed(value state) { + return XXH64_digest(&State_val(state)); +} + CAMLexport value caml_xx_digest(value state) { - CAMLparam1(state); - CAMLlocal1(v); - XXH64_hash_t hash = XXH64_digest(&State_val(state)); - v = caml_alloc_string(sizeof(XXH64_hash_t)); - memcpy(String_val(v), &hash, sizeof(XXH64_hash_t)); - CAMLreturn(v); + return caml_copy_int64(caml_xx_digest_unboxed(state)); +} + +CAMLexport value caml_xx_to_string_unboxed(XXH64_hash_t hash) { + CAMLparam0(); + CAMLlocal1(str); + /* Max unsigned long long is 7FFFFFFFFFFFFFFF which is 16 bytes. + * + * Note that OCaml strings are not null-terminated, but rather use a somewhat + * clever encoding that combines the wosize from the header and the last byte + * of the data segment. When we allocate a string to hold 16 bytes (2 words), + * OCaml will actually allocate 24 bytes (3 words), where the final byte + * stores an offset which is 7 in our case. + * + * When calculating the length of a string, OCaml combines the byte size of + * the value (23) and the offset (7) to arrive at the actual size 16 = 23 - 7. + * + * The caml_alloc_string API hides this detail for us, so we don't need to + * worry about it at all, except to know that it's safe to write into the + * first 16 bytes. We could even write a null character into those bytes if we + * wanted. + */ + str = caml_alloc_string(16); + snprintf(String_val(str), 16, "%016llx", hash); + CAMLreturn(str); } -/* - * XXH64_hash_t is an unsigned 64 bit integer. This is too big for an OCaml - * int, so we just copy it into a string and pass that around abstractly. But to - * actually print as a readable string, we need to convert it back to an int - * and sprintf it into a new string - */ CAMLexport value caml_xx_to_string(value hash) { CAMLparam1(hash); CAMLlocal1(str); - // Max unsigned long long is 7FFFFFFFFFFFFFFF which is 16 characters. It - // doesn't seem like you need to ask for extra space for the null terminator - str = caml_alloc_string(16); - XXH64_hash_t hash_as_int; - memcpy(&hash_as_int, String_val(hash), sizeof(XXH64_hash_t)); - // 17 is 16 hex characters plus a null terminator - snprintf(String_val(str), 17, "%016llx", hash_as_int); + str = caml_xx_to_string_unboxed(Int64_val(hash)); CAMLreturn(str); } diff --git a/src/dune b/src/dune new file mode 100644 index 00000000000..f0046ccd104 --- /dev/null +++ b/src/dune @@ -0,0 +1,14 @@ +(executable + (name flow) + (modules + flow) + (libraries + flow_commands + flow_exit_status + flow_extra_commands + flow_logging_utils + flow_parser + default_injector_config + lz4 ; see dune-workspace + ) +) diff --git a/src/flow.ml b/src/flow.ml index 7c57bf1fbdb..af4cfb99d90 100644 --- a/src/flow.ml +++ b/src/flow.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,98 +12,78 @@ module FlowShell : sig val main : unit -> unit end = struct - (* normal commands *) - let commands = [ - AstCommand.command; - AutocompleteCommand.command; - CheckCommands.CheckCommand.command; - CheckCommands.FocusCheckCommand.command; - CheckContentsCommand.command; - CoverageCommand.command; - CycleCommand.command; - DumpTypesCommand.command; - FindModuleCommand.command; - FindRefsCommand.command; - ForceRecheckCommand.command; - GenFlowFilesCommand.command; - GetDefCommand.command; - GetImportsCommand.command; - IdeCommand.command; - InitCommand.command; - LspCommand.command; - LsCommand.command; - PortCommand.command; - RefactorCommand.command; - SaveStateCommand.command; - ServerCommand.command; - StartCommand.command; - StopCommand.command; - SuggestCommand.command; - TypeAtPosCommand.command; - VersionCommand.command; - ] @ (Extra_commands.extra_commands ()) + let commands = + [ + AstCommand.command; + AutocompleteCommand.command; + AutofixCommand.command; + CheckCommands.CheckCommand.command; + CheckCommands.FocusCheckCommand.command; + CheckContentsCommand.command; + ConfigCommand.command; + CoverageCommand.command; + BatchCoverageCommand.command; + CycleCommand.command; + GraphCommand.command; + DumpTypesCommand.command; + FindModuleCommand.command; + FindRefsCommand.command; + ForceRecheckCommand.command; + GetDefCommand.command; + GetImportsCommand.command; + InitCommand.command; + LspCommand.command; + LsCommand.command; + RefactorCommand.command; + SaveStateCommand.command; + ServerCommand.command; + StartCommand.command; + StopCommand.command; + SuggestCommand.command; + TypeAtPosCommand.command; + VersionCommand.command; + ] + @ Extra_commands.extra_commands () (* status commands, which need a list of other commands *) - module StatusCommand = StatusCommands.Status(struct + module StatusCommand = StatusCommands.Status (struct let commands = commands end) + let commands = StatusCommand.command :: commands - module DefaultCommand = StatusCommands.Default(struct + module DefaultCommand = StatusCommands.Default (struct let commands = commands end) + let commands = DefaultCommand.command :: commands - module ShellCommand = ShellCompleteCommand.Command(struct + module ShellCommand = ShellCompleteCommand.Command (struct let commands = commands end) + let commands = ShellCommand.command :: commands let main () = - let default_command = DefaultCommand.command in let argv = Array.to_list Sys.argv in - let (command, argv) = match argv with - | [] -> failwith "Expected command" - | _cmd::[] -> (default_command, []) - | _cmd::next::rest -> + let (command, argv) = + match argv with + | [] -> failwith "Expected command" + | [_cmd] -> (default_command, []) + | _cmd :: next :: rest -> let subcmd = String.lowercase_ascii next in - try - let command = List.find (fun command -> - (CommandSpec.name command) = subcmd - ) commands in - (command, rest) - with Not_found -> - (default_command, next::rest) + (try + let command = List.find (fun command -> CommandSpec.name command = subcmd) commands in + (command, rest) + with Not_found -> (default_command, next :: rest)) in let command_string = CommandSpec.name command in FlowEventLogger.set_command (Some command_string); FlowEventLogger.init_flow_command ~version:Flow_version.version; - try - let args = CommandSpec.args_of_argv command argv in - CommandSpec.run command args - with - | CommandSpec.Show_help -> - print_endline (CommandSpec.string_of_usage command); - FlowExitStatus.(exit No_error) - | CommandSpec.Failed_to_parse (arg_name, msg) -> - begin try - let json_arg = List.find (fun s -> - String_utils.string_starts_with s "--pretty" || String_utils.string_starts_with s "--json") - argv in - let pretty = String_utils.string_starts_with json_arg "--pretty" in - FlowExitStatus.set_json_mode ~pretty - with Not_found -> () end; - let msg = Utils_js.spf - "%s: %s %s\n%s" - (Filename.basename Sys.executable_name) - arg_name - msg - (CommandSpec.string_of_usage command) - in - FlowExitStatus.(exit ~msg Commandline_usage_error) + CommandUtils.run_command command argv end let _ = @@ -116,26 +96,30 @@ let _ = normally would cause a SIGPIPE instead throws an EPIPE exception. We handle exceptions and exit via FlowExitStatus.exit instead. *) let () = Sys_utils.set_signal Sys.sigpipe Sys.Signal_ignore in - - let () = Printexc.record_backtrace true in - - let () = if Sys_utils.get_env "IN_FLOW_TEST" <> None then EventLogger.disable_logging () in - + let () = Exception.record_backtrace true in + let () = if Sys_utils.get_env "IN_FLOW_TEST" <> None then LoggingUtils.disable_logging () in try - Daemon.check_entry_point (); (* this call might not return *) + Daemon.check_entry_point (); + + (* this call might not return *) FlowShell.main () with - | SharedMem_js.Out_of_shared_memory -> - let bt = Printexc.get_backtrace () in - let msg = Utils.spf "Out of shared memory%s" (if bt = "" then bt else ":\n"^bt) in - FlowExitStatus.(exit ~msg Out_of_shared_memory) + | SharedMem_js.Out_of_shared_memory as e -> + let e = Exception.wrap e in + let bt = Exception.get_backtrace_string e in + let msg = + Utils.spf + "Out of shared memory%s" + ( if bt = "" then + bt + else + ":\n" ^ bt ) + in + FlowExitStatus.(exit ~msg Out_of_shared_memory) | e -> - let bt = Printexc.get_backtrace () in - let msg = Utils.spf "Unhandled exception: %s%s" - (Printexc.to_string e) - (if bt = "" then bt else "\n"^bt) - in - FlowExitStatus.(exit ~msg Unknown_error) + let e = Exception.wrap e in + let msg = Utils.spf "Unhandled exception: %s" (Exception.to_string e) in + FlowExitStatus.(exit ~msg Unknown_error) (* If we haven't exited yet, let's exit now for logging's sake *) let _ = FlowExitStatus.(exit No_error) diff --git a/src/flow_dot_js.ml b/src/flow_dot_js.ml index 8cc2e7ac937..c49ec79b890 100644 --- a/src/flow_dot_js.ml +++ b/src/flow_dot_js.ml @@ -1,169 +1,222 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module Js = Js_of_ocaml.Js +module Sys_js = Js_of_ocaml.Sys_js + +let lazy_table_of_aloc _ = + lazy (failwith "Did not expect to encounter an abstract location in flow_dot_js") + let error_of_parse_error source_file (loc, err) = - let flow_err = Flow_error.EParseError (loc, err) in - Flow_error.error_of_msg ~trace_reasons:[] ~source_file flow_err + Error_message.EParseError (ALoc.of_loc loc, err) + |> Flow_error.error_of_msg ~trace_reasons:[] ~source_file + |> Flow_error.concretize_error lazy_table_of_aloc + |> Flow_error.make_error_printable lazy_table_of_aloc let error_of_file_sig_error source_file e = - let flow_err = File_sig.(match e with - | IndeterminateModuleType loc -> Flow_error.EIndeterminateModuleType loc - ) in - Flow_error.error_of_msg ~trace_reasons:[] ~source_file flow_err + File_sig.With_Loc.( + match e with + | IndeterminateModuleType loc -> Error_message.EIndeterminateModuleType (ALoc.of_loc loc)) + |> Flow_error.error_of_msg ~trace_reasons:[] ~source_file + |> Flow_error.concretize_error lazy_table_of_aloc + |> Flow_error.make_error_printable lazy_table_of_aloc let parse_content file content = - let parse_options = Some Parser_env.({ - (** - * Always parse ES proposal syntax. The user-facing config option to - * ignore/warn/enable them is handled during inference so that a clean error - * can be surfaced (rather than a more cryptic parse error). - *) - esproposal_class_instance_fields = true; - esproposal_class_static_fields = true; - esproposal_decorators = true; - esproposal_export_star_as = true; - esproposal_optional_chaining = true; - esproposal_nullish_coalescing = true; - types = true; - use_strict = false; - }) in - let ast, parse_errors = + let parse_options = + Some + Parser_env. + { + enums = true; + (* + * Always parse ES proposal syntax. The user-facing config option to + * ignore/warn/enable them is handled during inference so that a clean error + * can be surfaced (rather than a more cryptic parse error). + *) + esproposal_class_instance_fields = true; + esproposal_class_static_fields = true; + esproposal_decorators = true; + esproposal_export_star_as = true; + esproposal_optional_chaining = true; + esproposal_nullish_coalescing = true; + types = true; + use_strict = false; + } + in + let (ast, parse_errors) = Parser_flow.program_file ~fail:false ~parse_options content (Some file) in if parse_errors <> [] then - let converted = List.fold_left (fun acc parse_error -> - Errors.ErrorSet.add (error_of_parse_error file parse_error) acc - ) Errors.ErrorSet.empty parse_errors in + let converted = + List.fold_left + (fun acc parse_error -> + Errors.ConcreteLocPrintableErrorSet.add (error_of_parse_error file parse_error) acc) + Errors.ConcreteLocPrintableErrorSet.empty + parse_errors + in Error converted else - match File_sig.program ~ast with - | Error e -> Error (Errors.ErrorSet.singleton (error_of_file_sig_error file e)) + match File_sig.With_Loc.program ~ast ~module_ref_prefix:None with + | Error e -> + Error (Errors.ConcreteLocPrintableErrorSet.singleton (error_of_file_sig_error file e)) | Ok fsig -> Ok (ast, fsig) -let array_of_list f lst = - Array.of_list (List.map f lst) +let array_of_list f lst = Array.of_list (List.map f lst) let rec js_of_json = function | Hh_json.JSON_Object props -> - let props = array_of_list (fun (k, v) -> k, js_of_json v) props in - Js.Unsafe.inject (Js.Unsafe.obj props) + let props = array_of_list (fun (k, v) -> (k, js_of_json v)) props in + Js.Unsafe.inject (Js.Unsafe.obj props) | Hh_json.JSON_Array items -> - let items = array_of_list js_of_json items in - Js.Unsafe.inject (Js.array items) - | Hh_json.JSON_String str -> - Js.Unsafe.inject (Js.string str) - | Hh_json.JSON_Number num -> - Js.Unsafe.inject (Js.number_of_float (float_of_string num)) - | Hh_json.JSON_Bool value -> - Js.Unsafe.inject (Js.bool value) - | Hh_json.JSON_Null -> - Js.Unsafe.inject Js.null - -let load_lib_files ~master_cx ~metadata files - save_parse_errors save_infer_errors save_suppressions save_lint_suppressions = + let items = array_of_list js_of_json items in + Js.Unsafe.inject (Js.array items) + | Hh_json.JSON_String str -> Js.Unsafe.inject (Js.string str) + | Hh_json.JSON_Number num -> Js.Unsafe.inject (Js.number_of_float (float_of_string num)) + | Hh_json.JSON_Bool value -> Js.Unsafe.inject (Js.bool value) + | Hh_json.JSON_Null -> Js.Unsafe.inject Js.null + +let load_lib_files + ~master_cx + ~metadata + files + save_parse_errors + save_infer_errors + save_suppressions + save_lint_suppressions = (* iterate in reverse override order *) - let _, result = List.rev files |> List.fold_left ( - - fun (exclude_syms, result) file -> - let lib_content = Sys_utils.cat file in - let lib_file = File_key.LibFile file in - match parse_content lib_file lib_content with - | Ok (ast, file_sig) -> - let sig_cx = Context.make_sig () in - let cx = Context.make sig_cx metadata lib_file Files.lib_module_ref in - Flow_js.mk_builtins cx; - let syms = Type_inference_js.infer_lib_file cx ast - ~exclude_syms ~file_sig ~lint_severities:LintSettings.empty_severities ~file_options:None - in - - Context.merge_into (Context.sig_cx master_cx) sig_cx; - - let () = - let from_t = Context.find_module master_cx Files.lib_module_ref in - let to_t = Context.find_module cx Files.lib_module_ref in - Flow_js.flow_t master_cx (from_t, to_t) - in - - let errors = Context.errors cx in - let suppressions = Context.error_suppressions cx in - let severity_cover = Context.severity_cover cx in - - Context.remove_all_errors cx; - Context.remove_all_error_suppressions cx; - Context.remove_all_lint_severities cx; - - save_infer_errors lib_file errors; - save_suppressions lib_file suppressions; - save_lint_suppressions lib_file severity_cover; - - (* symbols loaded from this file are suppressed + let (_, result) = + List.rev files + |> List.fold_left + (fun (exclude_syms, result) file -> + let lib_content = Sys_utils.cat file in + let lib_file = File_key.LibFile file in + match parse_content lib_file lib_content with + | Ok (ast, file_sig) -> + let sig_cx = Context.make_sig () in + let aloc_table = Utils_js.FilenameMap.empty in + let rev_table = lazy (ALoc.make_empty_reverse_table ()) in + let cx = + Context.make + sig_cx + metadata + lib_file + aloc_table + rev_table + Files.lib_module_ref + Context.Checking + in + Flow_js.mk_builtins cx; + let syms = + Type_inference_js.infer_lib_file + cx + ast + ~exclude_syms + ~file_sig:(File_sig.abstractify_locs file_sig) + ~lint_severities:LintSettings.empty_severities + ~file_options:None + in + Context.merge_into (Context.sig_cx master_cx) sig_cx; + + let () = + let from_t = Context.find_module master_cx Files.lib_module_ref in + let to_t = Context.find_module cx Files.lib_module_ref in + Flow_js.flow_t master_cx (from_t, to_t) + in + let errors = Context.errors cx in + let suppressions = Context.error_suppressions cx in + let severity_cover = Context.severity_cover cx in + Context.remove_all_errors cx; + Context.remove_all_error_suppressions cx; + Context.remove_all_lint_severities cx; + + save_infer_errors lib_file errors; + save_suppressions lib_file suppressions; + save_lint_suppressions lib_file severity_cover; + + (* symbols loaded from this file are suppressed if found in later ones *) - let exclude_syms = SSet.union exclude_syms (SSet.of_list syms) in - let result = (lib_file, true) :: result in - exclude_syms, result - - | Error parse_errors -> - save_parse_errors lib_file parse_errors; - exclude_syms, ((lib_file, false) :: result) - - ) (SSet.empty, []) - - in result - -let stub_docblock = { Docblock. - flow = None; - typeAssert = false; - preventMunge = None; - providesModule = None; - isDeclarationFile = false; - jsx = None; -} - -let stub_metadata ~root ~checked = { Context. - (* local *) - checked; - munge_underscores = false; - verbose = None; - weak = false; - jsx = Options.Jsx_react; - strict = false; - strict_local = false; - - (* global *) - max_literal_length = 100; - enable_const_params = false; - enforce_strict_call_arity = true; - esproposal_class_static_fields = Options.ESPROPOSAL_ENABLE; - esproposal_class_instance_fields = Options.ESPROPOSAL_ENABLE; - esproposal_decorators = Options.ESPROPOSAL_ENABLE; - esproposal_export_star_as = Options.ESPROPOSAL_ENABLE; - esproposal_optional_chaining = Options.ESPROPOSAL_ENABLE; - esproposal_nullish_coalescing = Options.ESPROPOSAL_ENABLE; - facebook_fbt = None; - ignore_non_literal_requires = false; - max_trace_depth = 0; - max_workers = 0; - root; - strip_root = true; - suppress_comments = []; - suppress_types = SSet.empty; -} + let exclude_syms = SSet.union exclude_syms (SSet.of_list syms) in + let result = (lib_file, true) :: result in + (exclude_syms, result) + | Error parse_errors -> + save_parse_errors lib_file parse_errors; + (exclude_syms, (lib_file, false) :: result)) + (SSet.empty, []) + in + result + +let stub_docblock = + { + Docblock.flow = None; + typeAssert = false; + preventMunge = None; + providesModule = None; + isDeclarationFile = false; + jsx = None; + } + +let stub_metadata ~root ~checked = + { + Context.checked (* local *); + munge_underscores = false; + verbose = None; + weak = false; + jsx = Options.Jsx_react; + strict = false; + strict_local = false; + include_suppressions = false; + (* global *) + max_literal_length = 100; + enable_const_params = false; + enable_enums = true; + enforce_strict_call_arity = true; + esproposal_class_static_fields = Options.ESPROPOSAL_ENABLE; + esproposal_class_instance_fields = Options.ESPROPOSAL_ENABLE; + esproposal_decorators = Options.ESPROPOSAL_ENABLE; + esproposal_export_star_as = Options.ESPROPOSAL_ENABLE; + esproposal_optional_chaining = Options.ESPROPOSAL_ENABLE; + esproposal_nullish_coalescing = Options.ESPROPOSAL_ENABLE; + exact_by_default = false; + facebook_fbs = None; + facebook_fbt = None; + haste_module_ref_prefix = None; + ignore_non_literal_requires = false; + max_trace_depth = 0; + max_workers = 0; + recursion_limit = 10000; + root; + strip_root = true; + suppress_comments = []; + suppress_types = SSet.empty; + default_lib_dir = None; + trust_mode = Options.NoTrust; + type_asserts = false; + } let get_master_cx = let master_cx = ref None in fun root -> match !master_cx with - | Some (prev_root, cx) -> assert (prev_root = root); cx + | Some (prev_root, cx) -> + assert (prev_root = root); + cx | None -> let sig_cx = Context.make_sig () in - let cx = Context.make sig_cx - (stub_metadata ~root ~checked:false) - File_key.Builtins - Files.lib_module_ref in + let aloc_table = Utils_js.FilenameMap.empty in + let rev_table = lazy (ALoc.make_empty_reverse_table ()) in + let cx = + Context.make + sig_cx + (stub_metadata ~root ~checked:false) + File_key.Builtins + aloc_table + rev_table + Files.lib_module_ref + Context.Checking + in Flow_js.mk_builtins cx; master_cx := Some (root, cx); cx @@ -172,16 +225,17 @@ let set_libs filenames = let root = Path.dummy_path in let master_cx = get_master_cx root in let metadata = stub_metadata ~root ~checked:true in - let _: (File_key.t * bool) list = load_lib_files - ~master_cx - ~metadata - filenames - (fun _file _errs -> ()) - (fun _file _errs -> ()) - (fun _file _sups -> ()) - (fun _file _lint -> ()) in - - Flow_js.Cache.clear(); + let (_ : (File_key.t * bool) list) = + load_lib_files + ~master_cx + ~metadata + filenames + (fun _file _errs -> ()) + (fun _file _errs -> ()) + (fun _file _sups -> ()) + (fun _file _lint -> ()) + in + Flow_js.Cache.clear (); let reason = Reason.builtin_reason (Reason.RCustom "module") in let builtin_module = Obj_type.mk master_cx reason in Flow_js.flow_t master_cx (builtin_module, Flow_js.builtins master_cx); @@ -192,57 +246,107 @@ let infer_and_merge ~root filename ast file_sig = it relies on the JS version only supporting libs + 1 file, so every module you can require() must come from a lib; this skips resolving module names and just adds them all to the `decls` list. *) - Flow_js.Cache.clear(); + Flow_js.Cache.clear (); let metadata = stub_metadata ~root ~checked:true in let master_cx = get_master_cx root in - let require_loc_map = File_sig.(require_loc_map file_sig.module_sig) in - let reqs = SMap.fold (fun module_name locs reqs -> - let m = Modulename.String module_name in - let locs = locs |> Nel.to_list |> Utils_js.LocSet.of_list in - Merge_js.Reqs.add_decl module_name filename (locs, m) reqs - ) require_loc_map Merge_js.Reqs.empty in + let require_loc_map = File_sig.With_ALoc.(require_loc_map file_sig.module_sig) in + let reqs = + SMap.fold + (fun module_name locs reqs -> + let m = Modulename.String module_name in + let locs = locs |> Nel.to_list |> Loc_collections.ALocSet.of_list in + Merge_js.Reqs.add_decl module_name filename (locs, m) reqs) + require_loc_map + Merge_js.Reqs.empty + in let lint_severities = LintSettings.empty_severities in let strict_mode = StrictModeSettings.empty in let file_sigs = Utils_js.FilenameMap.singleton filename file_sig in - let cx, _other_cxs = Merge_js.merge_component_strict - ~metadata ~lint_severities ~file_options:None ~strict_mode ~file_sigs - ~get_ast_unsafe:(fun _ -> ast) - ~get_docblock_unsafe:(fun _ -> stub_docblock) - (Nel.one filename) reqs [] (Context.sig_cx master_cx) + let (_, _, comments) = ast in + let aloc_ast = Ast_loc_utils.loc_to_aloc_mapper#program ast in + let ((cx, tast, _), _other_cxs) = + Merge_js.merge_component + ~metadata + ~lint_severities + ~file_options:None + ~strict_mode + ~file_sigs + ~get_ast_unsafe:(fun _ -> (comments, aloc_ast)) + (* TODO (nmote, sainati) - Exceptions should mainly be used for exceptional code flows. We + * shouldn't use them to decide whether or not to use abstract locations. We should pass through + * whatever options we need instead *) + ~get_aloc_table_unsafe:(fun _ -> + raise (Parsing_heaps_exceptions.Sig_ast_ALoc_table_not_found "")) + ~get_docblock_unsafe:(fun _ -> stub_docblock) + ~phase:Context.Checking + (Nel.one filename) + reqs + [] + (Context.sig_cx master_cx) in - cx + (cx, tast) let check_content ~filename ~content = let stdin_file = Some (Path.make_unsafe filename, content) in let root = Path.dummy_path in let filename = File_key.SourceFile filename in - let errors, warnings = match parse_content filename content with - | Ok (ast, file_sig) -> - let cx, _ = infer_and_merge ~root filename ast file_sig in - let suppressions = Error_suppressions.empty in (* TODO: support suppressions *) - let errors, warnings, _, _ = Error_suppressions.filter_suppressed_errors - suppressions (Context.severity_cover cx) (Context.errors cx) ~unused:suppressions - in errors, warnings - | Error parse_errors -> - parse_errors, Errors.ErrorSet.empty + let (errors, warnings) = + match parse_content filename content with + | Ok (ast, file_sig) -> + let file_sig = File_sig.abstractify_locs file_sig in + let (cx, _) = infer_and_merge ~root filename ast file_sig in + let suppressions = Error_suppressions.empty in + (* TODO: support suppressions *) + let errors = Context.errors cx in + let severity_cover = Context.severity_cover cx in + let include_suppressions = Context.include_suppressions cx in + let aloc_tables = Utils_js.FilenameMap.empty in + let (errors, warnings, suppressions) = + Error_suppressions.filter_lints + ~include_suppressions + suppressions + errors + aloc_tables + severity_cover + in + let errors = Flow_error.make_errors_printable lazy_table_of_aloc errors in + let warnings = Flow_error.make_errors_printable lazy_table_of_aloc warnings in + let (errors, _, suppressions) = + Error_suppressions.filter_suppressed_errors + ~root + ~file_options:None + suppressions + errors + ~unused:suppressions + in + let (warnings, _, _) = + Error_suppressions.filter_suppressed_errors + ~root + ~file_options:None + suppressions + warnings + ~unused:suppressions + in + (errors, warnings) + | Error parse_errors -> (parse_errors, Errors.ConcreteLocPrintableErrorSet.empty) in let strip_root = Some root in Errors.Json_output.json_of_errors_with_context - ~strip_root ~stdin_file ~suppressed_errors:[] ~errors ~warnings () + ~strip_root + ~stdin_file + ~suppressed_errors:[] + ~errors + ~warnings + () |> js_of_json let check filename = let content = Sys_utils.cat filename in check_content ~filename ~content -let set_libs_js js_libs = - Js.to_array js_libs - |> Array.to_list - |> List.map (fun x -> Js.to_string x) - |> set_libs +let set_libs_js js_libs = Js.to_array js_libs |> Array.to_list |> List.map Js.to_string |> set_libs -let check_js js_file = - check (Js.to_string js_file) +let check_js js_file = check (Js.to_string js_file) let check_content_js js_file js_content = let filename = Js.to_string js_file in @@ -251,59 +355,67 @@ let check_content_js js_file js_content = let mk_loc file line col = { - Loc. - source = Some file; - start = { Loc.line; column = col; offset = 0; }; - _end = { Loc.line; column = col + 1; offset = 0; }; + Loc.source = Some file; + start = { Loc.line; column = col }; + _end = { Loc.line; column = col + 1 }; } let infer_type filename content line col = - let filename = File_key.SourceFile filename in - let root = Path.dummy_path in - match parse_content filename content with - | Error _ -> failwith "parse error" - | Ok (ast, file_sig) -> - let cx, typed_ast = infer_and_merge ~root filename ast file_sig in - let type_table = Context.type_table cx in - let file = Context.file cx in - let loc = mk_loc filename line col in Query_types.( - let result = type_at_pos_type ~full_cx:cx ~file ~file_sig ~expand_aliases:false - ~type_table ~typed_ast loc in - match result with - | FailureNoMatch -> Loc.none, Error "No match" - | FailureUnparseable (loc, _, _) -> loc, Error "Unparseable" - | Success (loc, t) -> - loc, Ok (Ty_printer.string_of_t ~force_single_line:true t) - ) + let filename = File_key.SourceFile filename in + let root = Path.dummy_path in + match parse_content filename content with + | Error _ -> failwith "parse error" + | Ok (ast, file_sig) -> + let file_sig = File_sig.abstractify_locs file_sig in + let (cx, typed_ast) = infer_and_merge ~root filename ast file_sig in + let file = Context.file cx in + let loc = mk_loc filename line col in + Query_types.( + let result = + type_at_pos_type + ~full_cx:cx + ~file + ~file_sig + ~expand_aliases:false + ~omit_targ_defaults:false + ~typed_ast + loc + in + (match result with + | FailureNoMatch -> (Loc.none, Error "No match") + | FailureUnparseable (loc, _, _) -> (loc, Error "Unparseable") + | Success (loc, t) -> (loc, Ok (Ty_printer.string_of_t ~force_single_line:true t)))) let types_to_json types ~strip_root = - let open Hh_json in - let open Reason in - let types_json = types |> List.map (fun (loc, str) -> - let json_assoc = ( - ("type", JSON_String str) :: - ("reasons", JSON_Array []) :: - ("loc", json_of_loc ~strip_root loc) :: - (Errors.deprecated_json_props_of_loc ~strip_root loc) - ) in - JSON_Object json_assoc - ) in - JSON_Array types_json + Hh_json.( + Reason.( + let types_json = + types + |> List.map (fun (loc, str) -> + let json_assoc = + ("type", JSON_String str) + :: ("reasons", JSON_Array []) + :: ("loc", json_of_loc ~strip_root ~offset_table:None loc) + :: Errors.deprecated_json_props_of_loc ~strip_root loc + in + JSON_Object json_assoc) + in + JSON_Array types_json)) let dump_types js_file js_content = - let filename = File_key.SourceFile (Js.to_string js_file) in - let root = Path.dummy_path in - let content = Js.to_string js_content in - match parse_content filename content with - | Error _ -> failwith "parse error" - | Ok (ast, file_sig) -> - let cx, _ = infer_and_merge ~root filename ast file_sig in - let printer = Ty_printer.string_of_t in - let types = Query_types.dump_types cx file_sig ~printer in - let strip_root = None in - let types_json = types_to_json types ~strip_root in - - js_of_json types_json + let filename = File_key.SourceFile (Js.to_string js_file) in + let root = Path.dummy_path in + let content = Js.to_string js_content in + match parse_content filename content with + | Error _ -> failwith "parse error" + | Ok (ast, file_sig) -> + let file_sig = File_sig.abstractify_locs file_sig in + let (cx, typed_ast) = infer_and_merge ~root filename ast file_sig in + let printer = Ty_printer.string_of_t in + let types = Query_types.dump_types ~printer cx file_sig typed_ast in + let strip_root = None in + let types_json = types_to_json types ~strip_root in + js_of_json types_json let type_at_pos js_file js_content js_line js_col = let filename = Js.to_string js_file in @@ -312,33 +424,34 @@ let type_at_pos js_file js_content js_line js_col = let col = Js.parseInt js_col in match infer_type filename content line col with | (_, Ok resp) -> resp - | (_, _) -> failwith "Error" + | (_, _) -> failwith "Error" let exports = - if (Js.Unsafe.js_expr "typeof exports !== 'undefined'") - then Js.Unsafe.js_expr "exports" - else begin + if Js.Unsafe.js_expr "typeof exports !== 'undefined'" then + Js.Unsafe.js_expr "exports" + else let exports = Js.Unsafe.obj [||] in Js.Unsafe.set Js.Unsafe.global "flow" exports; exports - end - -let () = Js.Unsafe.set exports "registerFile" ( - Js.wrap_callback (fun name content -> Sys_js.create_file ~name ~content) -) -let () = Js.Unsafe.set exports - "setLibs" (Js.wrap_callback set_libs_js) -let () = Js.Unsafe.set exports - "check" (Js.wrap_callback check_js) -let () = Js.Unsafe.set exports - "checkContent" (Js.wrap_callback check_content_js) -let () = Js.Unsafe.set exports - "dumpTypes" (Js.wrap_callback dump_types) -let () = Js.Unsafe.set exports - "jsOfOcamlVersion" (Js.string Sys_js.js_of_ocaml_version) -let () = Js.Unsafe.set exports - "flowVersion" (Js.string Flow_version.version) -let () = Js.Unsafe.set exports - "parse" (Js.wrap_callback Flow_parser_js.parse) -let () = Js.Unsafe.set exports - "typeAtPos" (Js.wrap_callback type_at_pos) + +let () = + Js.Unsafe.set + exports + "registerFile" + (Js.wrap_callback (fun name content -> Sys_js.create_file ~name ~content)) + +let () = Js.Unsafe.set exports "setLibs" (Js.wrap_callback set_libs_js) + +let () = Js.Unsafe.set exports "check" (Js.wrap_callback check_js) + +let () = Js.Unsafe.set exports "checkContent" (Js.wrap_callback check_content_js) + +let () = Js.Unsafe.set exports "dumpTypes" (Js.wrap_callback dump_types) + +let () = Js.Unsafe.set exports "jsOfOcamlVersion" (Js.string Sys_js.js_of_ocaml_version) + +let () = Js.Unsafe.set exports "flowVersion" (Js.string Flow_version.version) + +let () = Js.Unsafe.set exports "parse" (Js.wrap_callback Flow_parser_js.parse) + +let () = Js.Unsafe.set exports "typeAtPos" (Js.wrap_callback type_at_pos) diff --git a/src/flowlib/dune b/src/flowlib/dune new file mode 100644 index 00000000000..53cb10bd7e6 --- /dev/null +++ b/src/flowlib/dune @@ -0,0 +1,9 @@ +(library + (name flow_flowlib) + (wrapped false) + (libraries + sys_utils + ) + (preprocess (pps ppx_gen_flowlibs -- -flowlib ../../lib/ -prelude ../../prelude/)) + (preprocessor_deps (source_tree lib/) (source_tree prelude/)) +) diff --git a/src/flowlib/flowlib.ml b/src/flowlib/flowlib.ml index 6da069341b0..fb317fb34c3 100644 --- a/src/flowlib/flowlib.ml +++ b/src/flowlib/flowlib.ml @@ -1,18 +1,18 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) let contents no_flowlib = - if no_flowlib - then [%prelude_contents] - else [%flowlib_contents] + if no_flowlib then + [%prelude_contents] + else + [%flowlib_contents] let write_flowlib dir (filename, contents) = let file = Path.(concat dir filename |> to_string) in Sys_utils.write_file ~file contents -let extract_flowlib ~no_flowlib dir = - Array.iter (write_flowlib dir) (contents no_flowlib) +let extract_flowlib ~no_flowlib dir = Array.iter (write_flowlib dir) (contents no_flowlib) diff --git a/src/flowlib/flowlib.mli b/src/flowlib/flowlib.mli index e9cf99a5e7b..6976a0cf3d5 100644 --- a/src/flowlib/flowlib.mli +++ b/src/flowlib/flowlib.mli @@ -1,9 +1,10 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) val extract_flowlib : no_flowlib:bool -> Path.t -> unit + val contents : bool -> (string * string) array diff --git a/src/logging/dune b/src/logging/dune new file mode 100644 index 00000000000..a46a0b2629c --- /dev/null +++ b/src/logging/dune @@ -0,0 +1,34 @@ +(* -*- tuareg -*- *) + +let library_entry name suffix = + Printf.sprintf +"(library + (name flow_%s) + (wrapped false) + (modules) + (libraries flow_%s_%s))" name name suffix + +let fb_entry name = + library_entry name "fb" + +let stubs_entry name = + library_entry name "stubs" + +let entry is_fb name = + if is_fb then + fb_entry name + else + stubs_entry name + +let () = + (* test presence of fb subfolder *) + let current_dir = Sys.getcwd () in + (* we are in src/logging/, locate src/facebook *) + let src_dir = Filename.dirname current_dir in + let fb_dir = Filename.concat src_dir "facebook" in + (* locate src/facebook/dune *) + let fb_dune = Filename.concat fb_dir "dune" in + let is_fb = Sys.file_exists fb_dune in + let logging = entry is_fb "logging" in + let logging_lwt = entry is_fb "logging_lwt" in + Jbuild_plugin.V1.send (String.concat "\n\n" [logging; logging_lwt]) diff --git a/src/lsp/__tests__/lspErrors_test.ml b/src/lsp/__tests__/lspErrors_test.ml new file mode 100644 index 00000000000..0ce37b1e025 --- /dev/null +++ b/src/lsp/__tests__/lspErrors_test.ml @@ -0,0 +1,652 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +type error = { + uri: string; + kind: string; + msg: string; +} + +let mk_clear_error uri = { uri; kind = ""; msg = "Errors cleared for file" } + +(* Build a mock lsp diagnostic *) +let mk_diagnostic { uri = _; kind; msg } = + Lsp.( + PublishDiagnostics. + { + range = { start = { line = 10; character = 20 }; end_ = { line = 10; character = 30 } }; + severity = Some Lsp.PublishDiagnostics.Error; + code = StringCode kind; + source = Some "Flow"; + message = msg; + relatedInformation = []; + relatedLocations = []; + }) + +(* Take the json output and convert it back into a list of errors *) +let error_list_of_json_response json = + Hh_json.( + match json with + | JSON_Object + [ + ("jsonrpc", JSON_String "2.0"); + ("method", JSON_String "textDocument/publishDiagnostics"); + ( "params", + JSON_Object [("uri", JSON_String uri); ("diagnostics", JSON_Array diagnostics)] ); + ] -> + begin + match diagnostics with + | [] -> [mk_clear_error uri] + | _ -> + List.map + (function + | JSON_Object + [ + ("range", _); + ("severity", _); + ("code", JSON_String kind); + ("source", JSON_String "Flow"); + ("message", JSON_String msg); + ("relatedInformation", _); + ("relatedLocations", _); + ] -> + { uri; kind; msg } + | _ -> assert_failure "Diagnostic JSON doesn't match expected format") + diagnostics + end + | _ -> assert_failure "JSON output doesn't match expected format") + +(* Pretty print a list of errors *) +let printer errors = + errors + |> List.map (fun { uri; kind; msg } -> Printf.sprintf " %s: %s: %s," uri kind msg) + |> String.concat "\n" + |> Printf.sprintf "\n[\n%s\n]" + +(* Wraps some lspErrors calls and records what json is sent. Then asserts that all the expected + * errors were sent *) +let with_assert_errors_match ~ctxt ~reason ~expected f = + let actual = ref [] in + let ret = f (fun json -> actual := List.rev_append (error_list_of_json_response json) !actual) in + let actual = List.rev !actual in + let sort = List.sort Pervasives.compare in + assert_equal ~ctxt ~printer ~msg:reason (sort expected) (sort actual); + ret + +(* Assert that NO json is sent. This is different than asserting that we sent a list of 0 errors *) +let assert_no_send ~reason _ = + assert_failure (Printf.sprintf "Expected no send, but got a send for %S" reason) + +(* Given an error list, group it by uri and convert to diagnostics *) +let smap_of_error_list error_list = + List.fold_right + (fun error map -> + let existing = + match SMap.get error.uri map with + | None -> [] + | Some existing -> existing + in + SMap.add error.uri (mk_diagnostic error :: existing) map) + error_list + SMap.empty + +let path_to_foo = "/path/to/foo.js" + +let path_to_bar = "/path/to/bar.js" + +let foo_infer_error_1 = { uri = path_to_foo; kind = "InferError"; msg = "Your code is broken 1" } + +let foo_infer_error_2 = { uri = path_to_foo; kind = "InferError"; msg = "Your code is broken 2" } + +let foo_parse_error_1 = { uri = path_to_foo; kind = "ParseError"; msg = "Your code no parse 1" } + +let foo_parse_error_2 = { uri = path_to_foo; kind = "ParseError"; msg = "Your code no parse 2" } + +let bar_infer_error_1 = { uri = path_to_bar; kind = "InferError"; msg = "Your code is broken 1" } + +let bar_infer_error_2 = { uri = path_to_bar; kind = "InferError"; msg = "Your code is broken 2" } + +let bar_parse_error_1 = { uri = path_to_bar; kind = "ParseError"; msg = "Your code no parse 1" } + +let bar_parse_error_2 = { uri = path_to_bar; kind = "ParseError"; msg = "Your code no parse 2" } + +let clearing_errors_from_empty_is_a_no_op _ctxt = + let reason = "Clearing all the errors when there are no errors should not send anything" in + LspErrors.empty |> LspErrors.clear_all_errors_and_send (assert_no_send ~reason) |> ignore + +let clear_all_live_errors_and_send ctxt = + let reason = + "Clearing the live errors from a file with no live errors should not send anything" + in + let errors = + LspErrors.empty + |> LspErrors.clear_all_live_errors_and_send (assert_no_send ~reason) path_to_foo + in + let reason = "Setting the live parse errors for foo.js to 0 errors won't trigger a send" in + let errors = + errors |> LspErrors.set_live_parse_errors_and_send (assert_no_send ~reason) path_to_foo [] + in + let reason = "Setting server errors to 0 streamed errors should not send anything" in + let errors = + errors + |> LspErrors.add_streamed_server_errors_and_send + (assert_no_send ~reason) + (SMap.add path_to_foo [] SMap.empty) + in + let reason = "Clearing the live errors from a file with 0 streamed errors should not send" in + let errors = + errors |> LspErrors.clear_all_live_errors_and_send (assert_no_send ~reason) path_to_foo + in + let reason = + "Setting the live parse errors for foo.js to 0 errors again should trigger a send" + in + let errors = + errors |> LspErrors.set_live_parse_errors_and_send (assert_no_send ~reason) path_to_foo [] + in + let reason = "Setting server errors to be 1 streamed non-parse error should send that error" in + let expected = [foo_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.add_streamed_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Clearing the live errors from a file with 0 streamed parse errors should not send" + in + let errors = + errors |> LspErrors.clear_all_live_errors_and_send (assert_no_send ~reason) path_to_foo + in + let reason = + "Setting the live parse errors for foo.js to 0 errors one more time should trigger a send" + in + let errors = + errors |> LspErrors.set_live_parse_errors_and_send (assert_no_send ~reason) path_to_foo [] + in + let reason = + "Adding a parse error to server errors should not trigger a send since we have live parse " + ^ "errors" + in + let to_send = [foo_parse_error_1] in + let errors = + errors + |> LspErrors.add_streamed_server_errors_and_send + (assert_no_send ~reason) + (smap_of_error_list to_send) + in + let reason = + "Clearing the live errors from a file with 1 streamed parse errors will trigger a send" + in + let expected = [foo_parse_error_1; foo_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.clear_all_live_errors_and_send send path_to_foo) + in + ignore errors + +let finalized_errors_in_isolation ctxt = + let reason = + "Setting the finalized server errors to empty when there already were no errors should " + ^ "not send anything" + in + let errors = + LspErrors.empty + |> LspErrors.set_finalized_server_errors_and_send + (assert_no_send ~reason) + (smap_of_error_list []) + in + let reason = + "Setting finalized server errors when there were no errors before should send all the " + ^ "finalized errors" + in + let expected = [foo_infer_error_1; bar_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = "Setting the exact same finalized server errors again will resend all errors" in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Setting the finalized server errors to be 0 errors will clear errors for all files" + in + let expected = [mk_clear_error path_to_foo; mk_clear_error path_to_bar] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list [])) + in + let reason = "Putting an error in each file again" in + let expected = [foo_infer_error_1; bar_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Setting the finalized server errors to only include foo.js will clear the errors for bar.js" + in + let expected = [foo_infer_error_1; mk_clear_error path_to_bar] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors + |> LspErrors.set_finalized_server_errors_and_send + send + (smap_of_error_list [foo_infer_error_1])) + in + let reason = "Putting an error in each file again" in + let expected = [foo_infer_error_1; bar_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = "Clearing all errors will clear errors for all files" in + let expected = [mk_clear_error path_to_foo; mk_clear_error path_to_bar] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.clear_all_errors_and_send send) + in + ignore errors + +let streamed_errors_in_isolation ctxt = + let reason = + "Streaming in 0 server errors when there were no errors before will not do anything" + in + let errors = + LspErrors.empty + |> LspErrors.add_streamed_server_errors_and_send + (assert_no_send ~reason) + (smap_of_error_list []) + in + let reason = + "Streaming in server errors when there were no errors before should send all the " + ^ "newly streamed errors" + in + let expected = [foo_infer_error_1; bar_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.add_streamed_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Streaming in the same server errors again will again resend the errors for those files, " + ^ "but now each error will appear twice" + in + let expected = [foo_infer_error_1; foo_infer_error_1; bar_infer_error_1; bar_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors + |> LspErrors.add_streamed_server_errors_and_send + send + (smap_of_error_list [foo_infer_error_1; bar_infer_error_1])) + in + let reason = + "Streaming in 1 more error for foo.js will cause all of foo's errors to be resent" + in + let expected = [foo_infer_error_1; foo_infer_error_1; foo_infer_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors + |> LspErrors.add_streamed_server_errors_and_send + send + (smap_of_error_list [foo_infer_error_2])) + in + let reason = "Streaming in 0 errors will not trigger a send" in + let errors = + errors + |> LspErrors.add_streamed_server_errors_and_send + (assert_no_send ~reason) + (SMap.add path_to_foo [] SMap.empty) + in + let reason = "Clearing all errors will clear errors for all files" in + let expected = [mk_clear_error path_to_foo; mk_clear_error path_to_bar] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.clear_all_errors_and_send send) + in + ignore errors + +let streamed_and_finalized_server_errors ctxt = + let reason = + "Setting finalized server errors when there were no errors before should send all the " + ^ "finalized errors" + in + let expected = [foo_infer_error_1; bar_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + LspErrors.empty + |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Sending streamed errors for foo.js should replace the finalized errors for that file" + in + let expected = [foo_infer_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.add_streamed_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Sending finalized errors for foo.js should replace the streamed errors and the old " + ^ "finalized errors" + in + let expected = [bar_infer_error_2; mk_clear_error path_to_foo] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors + |> LspErrors.set_finalized_server_errors_and_send + send + (smap_of_error_list [bar_infer_error_2])) + in + ignore errors + +let live_parse_errors_override_finalized_errors ctxt = + let reason = + "Setting parse errors to 0 parse errors when there are no server errors doesn't trigger a " + ^ "send" + in + let errors = + LspErrors.empty + |> LspErrors.set_live_parse_errors_and_send (assert_no_send ~reason) path_to_foo [] + in + let reason = + "Setting finalized server errors when there were no errors before should send all the " + ^ "finalized errors" + in + let expected = [foo_infer_error_1; foo_parse_error_1; bar_infer_error_1; bar_parse_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Setting live parse errors for foo.js will replace the known parse errors for that file" + in + let expected = [foo_infer_error_1; foo_parse_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors + |> LspErrors.set_live_parse_errors_and_send + send + path_to_foo + [mk_diagnostic foo_parse_error_2]) + in + let reason = "Setting finalized server errors will still use the live parse errors" in + let to_set = [foo_infer_error_2; foo_parse_error_1; bar_infer_error_1; bar_parse_error_1] in + let expected = [foo_infer_error_2; foo_parse_error_2; bar_infer_error_1; bar_parse_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list to_set)) + in + let reason = + "Clearing the live parse errors for foo.js will resend the server errors for that file" + in + let expected = [foo_infer_error_2; foo_parse_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.clear_all_live_errors_and_send send path_to_foo) + in + let reason = "Clearing live errors again won't do anything" in + let errors = + errors |> LspErrors.clear_all_live_errors_and_send (assert_no_send ~reason) path_to_foo + in + let reason = + "Setting finalized server errors to only non-parse errors for foo.js and some parse errors " + ^ "for bar.js" + in + let expected = [foo_infer_error_1; bar_infer_error_1; bar_parse_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Setting live parse errors for foo.js to [] won't trigger a send since there are no server " + ^ "parse errors for foo.js" + in + let errors = + errors |> LspErrors.set_live_parse_errors_and_send (assert_no_send ~reason) path_to_foo [] + in + let reason = + "Setting live parse errors for bar.js to [] will trigger a send since there are server " + ^ "parse errors for bar.js" + in + let expected = [bar_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_live_parse_errors_and_send send path_to_bar []) + in + ignore errors + +let live_parse_errors_override_streamed_errors ctxt = + let reason = + "Setting streamed server errors when there were no errors before should send all the " + ^ "streamed errors" + in + let expected = [foo_infer_error_1; foo_parse_error_1; bar_infer_error_1; bar_parse_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + LspErrors.empty + |> LspErrors.add_streamed_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Setting live parse errors for foo.js will replace the known parse errors for that file" + in + let expected = [foo_infer_error_1; foo_parse_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors + |> LspErrors.set_live_parse_errors_and_send + send + path_to_foo + [mk_diagnostic foo_parse_error_2]) + in + let reason = "Streaming in parse and type errors will ignore the parse errors for now" in + let to_send = [foo_infer_error_2; foo_parse_error_1] in + let expected = [foo_infer_error_1; foo_infer_error_2; foo_parse_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.add_streamed_server_errors_and_send send (smap_of_error_list to_send)) + in + let reason = + "Clearing the live parse errors for foo.js will resend the server errors for that file" + in + let expected = [foo_infer_error_1; foo_infer_error_2; foo_parse_error_1; foo_parse_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.clear_all_live_errors_and_send send path_to_foo) + in + let reason = "Clearing live errors again won't do anything" in + let errors = + errors |> LspErrors.clear_all_live_errors_and_send (assert_no_send ~reason) path_to_foo + in + ignore errors + +let live_non_parse_errors_override_finalized_errors ctxt = + let reason = + "Setting live errors to 0 live errors when there are no server errors doesn't trigger a " + ^ "send" + in + let errors = + LspErrors.empty + |> LspErrors.set_live_non_parse_errors_and_send + (assert_no_send ~reason) + (SMap.add path_to_foo [] SMap.empty) + in + let reason = + "Setting finalized server errors when there were no errors before should send all the " + ^ "finalized errors" + in + let expected = [foo_infer_error_1; bar_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Setting live non_parse errors for foo.js will replace the server non_parse errors for " + ^ "that file" + in + let to_send = [foo_parse_error_1; foo_infer_error_2] in + let expected = [foo_infer_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_live_non_parse_errors_and_send send (smap_of_error_list to_send)) + in + let reason = + "Setting finalized server errors will still use the live errors for foo.js and the server " + ^ "errors for bar.js. But since the live errors aren't changing, we won't resend errors for " + ^ "foo.js" + in + let to_set = [foo_infer_error_1; bar_infer_error_2] in + let expected = [bar_infer_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_finalized_server_errors_and_send send (smap_of_error_list to_set)) + in + let reason = "Clearing the live errors for foo.js will resend the server errors for that file" in + let expected = [foo_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.clear_all_live_errors_and_send send path_to_foo) + in + let reason = "Clearing live errors again won't do anything" in + let errors = + errors |> LspErrors.clear_all_live_errors_and_send (assert_no_send ~reason) path_to_foo + in + ignore errors + +let live_non_parse_errors_override_streamed_errors ctxt = + let reason = + "Setting streamed server errors when there were no errors before should send all the " + ^ "streamed errors" + in + let expected = [foo_infer_error_1; foo_parse_error_1; bar_infer_error_1; bar_parse_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + LspErrors.empty + |> LspErrors.add_streamed_server_errors_and_send send (smap_of_error_list expected)) + in + let reason = + "Setting live non_parse errors for foo.js will replace the server non_parse errors for " + ^ "that file" + in + let to_send = [foo_parse_error_2; foo_infer_error_2] in + let expected = [foo_parse_error_1; foo_infer_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_live_non_parse_errors_and_send send (smap_of_error_list to_send)) + in + let reason = "Streaming in more non_parse errors for foo.js will not trigger a send" in + let to_send = [foo_infer_error_1] in + let errors = + errors + |> LspErrors.add_streamed_server_errors_and_send + (assert_no_send ~reason) + (smap_of_error_list to_send) + in + let reason = "Streaming in a parse error for foo.js will trigger a send" in + let to_send = [foo_parse_error_1] in + let expected = [foo_infer_error_2; foo_parse_error_1; foo_parse_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.add_streamed_server_errors_and_send send (smap_of_error_list to_send)) + in + let reason = + "Clearing the live parse errors for foo.js will resend the server errors for that file" + in + let expected = [foo_infer_error_1; foo_infer_error_1; foo_parse_error_1; foo_parse_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.clear_all_live_errors_and_send send path_to_foo) + in + let reason = "Clearing live errors again won't do anything" in + let errors = + errors |> LspErrors.clear_all_live_errors_and_send (assert_no_send ~reason) path_to_foo + in + ignore errors + +let live_parse_errors_and_live_non_parse_errors ctxt = + let reason = + "Setting live non-parse errors for foo.js will replace all the server errors for that file " + ^ "and filter out parse errors" + in + let to_send = [foo_infer_error_1; foo_parse_error_1] in + let expected = [foo_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + LspErrors.empty + |> LspErrors.set_live_non_parse_errors_and_send send (smap_of_error_list to_send)) + in + let reason = + "Setting live parse errors for foo.js will add to the live non-parse errors and filter out " + ^ "non-parse errors" + in + let to_send = [foo_infer_error_2; foo_parse_error_2] in + let expected = [foo_infer_error_1; foo_parse_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors + |> LspErrors.set_live_parse_errors_and_send + send + path_to_foo + (List.map mk_diagnostic to_send)) + in + let reason = + "Setting 0 live parse errors for foo.js will strip out the parse error that live errors shows" + in + let expected = [foo_infer_error_1] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_live_parse_errors_and_send send path_to_foo []) + in + let reason = "Setting live errors for foo.js will not affect the live parse errors shown" in + let to_send = [foo_infer_error_2; foo_parse_error_2] in + let expected = [foo_infer_error_2] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_live_non_parse_errors_and_send send (smap_of_error_list to_send)) + in + let reason = + "Setting live errors for foo.js with ONLY parse errors when there are 0 live parse errors " + ^ "will clear the file" + in + let to_send = [foo_parse_error_2] in + let expected = [mk_clear_error path_to_foo] in + let errors = + with_assert_errors_match ~ctxt ~reason ~expected (fun send -> + errors |> LspErrors.set_live_non_parse_errors_and_send send (smap_of_error_list to_send)) + in + let reason = + "Setting live errors for foo.js with ONLY parse errors should NOT trigger a send if modulo " + ^ "parse errors there are 0 live errors before and 0 live errors after" + in + let to_send = [foo_parse_error_2] in + let errors = + errors + |> LspErrors.set_live_non_parse_errors_and_send + (assert_no_send ~reason) + (smap_of_error_list to_send) + in + ignore errors + +let tests = + "LwtErrors" + >::: [ + "clearing_errors_from_empty_is_a_no_op" >:: clearing_errors_from_empty_is_a_no_op; + "clear_all_live_errors_and_send" >:: clear_all_live_errors_and_send; + "finalized_errors_in_isolation" >:: finalized_errors_in_isolation; + "streamed_errors_in_isolation" >:: streamed_errors_in_isolation; + "streamed_and_finalized_server_errors" >:: streamed_and_finalized_server_errors; + "live_parse_errors_override_finalized_errors" + >:: live_parse_errors_override_finalized_errors; + "live_parse_errors_override_streamed_errors" + >:: live_parse_errors_override_streamed_errors; + "live_non_parse_errors_override_finalized_errors" + >:: live_non_parse_errors_override_finalized_errors; + "live_non_parse_errors_override_streamed_errors" + >:: live_non_parse_errors_override_streamed_errors; + "live_parse_errors_and_live_non_parse_errors" + >:: live_parse_errors_and_live_non_parse_errors; + ] + +let () = run_test_tt_main tests diff --git a/src/lsp/dune b/src/lsp/dune new file mode 100644 index 00000000000..d8f94eabb27 --- /dev/null +++ b/src/lsp/dune @@ -0,0 +1,11 @@ +(library + (name flow_lsp) + (wrapped false) + (libraries + flow_commands_utils + flow_common_lsp_conversions + flow_exit_status + flow_logging_lwt + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/lsp/flowLsp.ml b/src/lsp/flowLsp.ml new file mode 100644 index 00000000000..d6f718c29b7 --- /dev/null +++ b/src/lsp/flowLsp.ml @@ -0,0 +1,2067 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open CommandUtils +open Lsp +open Lsp_fmt +module List = Core_list + +(************************************************************************) +(** Protocol orchestration & helpers **) + +(************************************************************************) + +(* LSP exit codes are specified at https://microsoft.github.io/language-server-protocol/specification#exit *) +let lsp_exit_ok () = exit 0 + +let lsp_exit_bad () = exit 1 + +(* Given an ID that came from the server, we have to wrap it when we pass it *) +(* on to the client, to encode which instance of the server it came from. *) +(* That way, if a response comes back later from the client after the server *) +(* has died, we'll know to discard it. We wrap it as "serverid:#id" for *) +(* numeric ids, and "serverid:'id" for strings. *) +type wrapped_id = { + server_id: int; + message_id: lsp_id; +} + +let encode_wrapped (wrapped_id : wrapped_id) : lsp_id = + let { server_id; message_id } = wrapped_id in + match message_id with + | NumberId id -> StringId (Printf.sprintf "%d:#%d" server_id id) + | StringId id -> StringId (Printf.sprintf "%d:'%s" server_id id) + +let decode_wrapped (lsp : lsp_id) : wrapped_id = + let s = + match lsp with + | NumberId _ -> failwith "not a wrapped id" + | StringId s -> s + in + let icolon = String.index s ':' in + let server_id = int_of_string (String.sub s 0 icolon) in + let id = String.sub s (icolon + 1) (String.length s - icolon - 1) in + let message_id = + if s.[icolon + 1] = '#' then + NumberId (int_of_string id) + else + StringId id + in + { server_id; message_id } + +module WrappedKey = struct + type t = wrapped_id + + let compare (x : t) (y : t) = + if x.server_id <> y.server_id then + IntKey.compare x.server_id y.server_id + else + IdKey.compare x.message_id y.message_id +end + +module WrappedMap = MyMap.Make (WrappedKey) + +type server_conn = { + ic: Timeout.in_channel; + oc: out_channel; +} + +type show_status_t = + | Never_shown + | Shown of lsp_id option * ShowStatus.showStatusParams + +(* Shown (Some id, params) -- means it is currently shown *) +(* Shown (None, params) - means it was shown but user dismissed it *) + +type open_file_info = { + (* o_open_doc is guaranteed to be up-to-date with respect to the editor *) + o_open_doc: Lsp.TextDocumentItem.t; + (* o_ast, if present, is guaranteed to be up-to-date. It gets computed lazily. *) + o_ast: ((Loc.t, Loc.t) Flow_ast.program * Lsp.PublishDiagnostics.diagnostic list option) option; + (* o_unsaved if true means that this open file has unsaved changes to the buffer. *) + o_unsaved: bool; +} + +type initialized_env = { + i_initialize_params: Lsp.Initialize.params; + i_connect_params: connect_params; + i_root: Path.t; + i_version: string option; + i_server_id: int; + i_can_autostart_after_version_mismatch: bool; + i_outstanding_local_handlers: state lsp_handler IdMap.t; + i_outstanding_local_requests: lsp_request IdMap.t; + i_outstanding_requests_from_server: Lsp.lsp_request WrappedMap.t; + i_isConnected: bool; + (* what we've told the client about our connection status *) + i_status: show_status_t; + i_open_files: open_file_info SMap.t; + i_errors: LspErrors.t; +} + +and disconnected_env = { + d_ienv: initialized_env; + d_autostart: bool; + d_server_status: (ServerStatus.status * FileWatcherStatus.status) option; +} + +and connected_env = { + c_ienv: initialized_env; + c_conn: server_conn; + c_server_status: ServerStatus.status * FileWatcherStatus.status option; + c_recent_summaries: (float * ServerStatus.summary) list; + (* newest at head of list *) + c_about_to_exit_code: FlowExitStatus.t option; + (* stateful handling of Errors+status from server... *) + c_is_rechecking: bool; + c_lazy_stats: ServerProt.Response.lazy_stats option; + (* if server gets disconnected, we will tidy up these things... *) + c_outstanding_requests_to_server: Lsp.IdSet.t; +} + +and state = + (* Pre_init: we haven't yet received the initialize request. *) + | Pre_init of connect_params + (* Disconnected: we'll attempt to reconnect once a tick. *) + | Disconnected of disconnected_env + (* Main_loop: we have a working connection to both server and client. *) + | Connected of connected_env + (* Post_shutdown: we received the shutdown request. *) + | Post_shutdown + +exception Client_fatal_connection_exception of Marshal_tools.remote_exception_data + +exception Client_recoverable_connection_exception of Marshal_tools.remote_exception_data + +exception Server_fatal_connection_exception of Marshal_tools.remote_exception_data + +type event = + | Server_message of LspProt.message_from_server + | Client_message of Lsp.lsp_message * LspProt.metadata + | Tick + +(* once per second, on idle *) + +let string_of_state (state : state) : string = + match state with + | Pre_init _ -> "Pre_init" + | Disconnected _ -> "Disconnected" + | Connected _ -> "Connected" + | Post_shutdown -> "Post_shutdown" + +let denorm_string_of_event (event : event) : string = + match event with + | Server_message response -> + Printf.sprintf "Server_message(%s)" (LspProt.string_of_message_from_server response) + | Client_message (c, _) -> + Printf.sprintf "Client_message(%s)" (Lsp_fmt.denorm_message_to_string c) + | Tick -> "Tick" + +let to_stdout (json : Hh_json.json) : unit = + (* Extra \r\n purely for easier logfile reading; not required by protocol. *) + let s = Hh_json.json_to_string json ^ "\r\n\r\n" in + Http_lite.write_message stdout s + +let get_current_version flowconfig_name (root : Path.t) : string option = + Server_files_js.config_file flowconfig_name root + |> read_config_or_exit ~allow_cache:false + |> FlowConfig.required_version + +let is_lazy_mode_set_in_flowconfig flowconfig_name (root : Path.t) : bool = + let lazy_mode = + Server_files_js.config_file flowconfig_name root + |> read_config_or_exit ~allow_cache:false + |> FlowConfig.lazy_mode + in + lazy_mode <> None + +let get_root (state : state) : Path.t option = + match state with + | Connected cenv -> Some cenv.c_ienv.i_root + | Disconnected denv -> Some denv.d_ienv.i_root + | _ -> None + +let get_open_files (state : state) : open_file_info SMap.t option = + match state with + | Connected cenv -> Some cenv.c_ienv.i_open_files + | Disconnected denv -> Some denv.d_ienv.i_open_files + | _ -> None + +let update_open_file (uri : string) (open_file_info : open_file_info option) (state : state) : + state = + let update_ienv ienv = + match open_file_info with + | Some open_file_info -> + { ienv with i_open_files = SMap.add uri open_file_info ienv.i_open_files } + | None -> { ienv with i_open_files = SMap.remove uri ienv.i_open_files } + in + match state with + | Connected cenv -> Connected { cenv with c_ienv = update_ienv cenv.c_ienv } + | Disconnected denv -> Disconnected { denv with d_ienv = update_ienv denv.d_ienv } + | _ -> failwith ("client shouldn't be updating files in state " ^ string_of_state state) + +let update_errors f state = + match state with + | Connected cenv -> + Connected { cenv with c_ienv = { cenv.c_ienv with i_errors = f cenv.c_ienv.i_errors } } + | Disconnected denv -> + Disconnected { denv with d_ienv = { denv.d_ienv with i_errors = f denv.d_ienv.i_errors } } + | Pre_init _ + | Post_shutdown -> + state + +let new_metadata (state : state) (message : Jsonrpc.message) : LspProt.metadata = + let (start_lsp_state, start_lsp_state_reason, start_server_status, start_watcher_status) = + match state with + | Connected { c_server_status = (s, w); _ } -> (None, None, Some s, w) + | Disconnected { d_server_status = Some (s, w); _ } -> + (Some (string_of_state state), None, Some s, Some w) + | Disconnected { d_server_status = None; d_ienv; _ } -> + (Some (string_of_state state), Some d_ienv.i_status, None, None) + | _ -> (Some (string_of_state state), None, None, None) + in + let start_lsp_state_reason = + match start_lsp_state_reason with + | None + | Some Never_shown -> + None + | Some (Shown (_, params)) -> Some params.ShowStatus.request.ShowMessageRequest.message + in + { + LspProt.empty_metadata with + LspProt.start_wall_time = message.Jsonrpc.timestamp; + start_server_status; + start_watcher_status; + start_json_truncated = + Hh_json.json_truncate message.Jsonrpc.json ~max_string_length:256 ~max_child_count:4; + start_lsp_state; + start_lsp_state_reason; + lsp_method_name = Jsonrpc.(message.method_); + } + +let edata_of_exception exn = + let message = Exception.get_ctor_string exn in + let stack = Exception.get_backtrace_string exn in + { Marshal_tools.message; stack } + +let selectively_omit_errors (request_name : string) (response : lsp_message) = + match response with + | ResponseMessage (id, ErrorResult _) -> + let new_response = + match request_name with + (* Autocomplete requests are rarely manually-requested, so let's suppress errors from them to + * avoid spamming users if something isn't working. Once we reduce the error rate, we can undo + * this, but right now there are some known problems with the code in `members.ml` that often + * lead to errors. Once we migrate off of that, we will likely be able to display errors to + * users without degrading the experience. + * + * Another option would be to inspect the `completionTriggerKind` field, but `Invoked` doesn't + * actually mean that it was manually invoked. Typing an identifier char also results in an + * `Invoked` trigger. + * + * See https://microsoft.github.io/language-server-protocol/specification#textDocument_completion + *) + | "textDocument/completion" -> + Some Completion.(CompletionResult { isIncomplete = false; items = [] }) + (* Like autocomplete requests, users rarely request these explicitly. The IDE sends them when + * people are simply moving the cursor around. For the same reasons, let's suppress errors here + * for now. *) + | "textDocument/documentHighlight" -> Some (DocumentHighlightResult []) + | _ -> None + in + Option.map ~f:(fun response -> ResponseMessage (id, response)) new_response + |> Option.value ~default:response + | _ -> response + +let get_next_event_from_server (fd : Unix.file_descr) : event = + let r = + try Server_message (Marshal_tools.from_fd_with_preamble fd) + with e -> + let e = Exception.wrap e in + let edata = edata_of_exception e in + raise (Server_fatal_connection_exception edata) + in + (* The server sends an explicit 'EOF' message in case the underlying *) + (* transport protocol doesn't result in EOF normally. We'll respond *) + (* to it by synthesizing the EOF exception we'd otherwise get. *) + if r = Server_message LspProt.(NotificationFromServer EOF) then + let stack = Exception.get_current_callstack_string 100 in + raise (Server_fatal_connection_exception { Marshal_tools.message = "End_of_file"; stack }) + else + r + +let get_next_event_from_client + (state : state) (client : Jsonrpc.queue) (parser : Jsonrpc.message -> Lsp.lsp_message) : + event Lwt.t = + let%lwt message = Jsonrpc.get_message client in + match message with + | `Message message -> Lwt.return (Client_message (parser message, new_metadata state message)) + | `Fatal_exception edata -> raise (Client_fatal_connection_exception edata) + | `Recoverable_exception edata -> raise (Client_recoverable_connection_exception edata) + +let get_next_event + (state : state) (client : Jsonrpc.queue) (parser : Jsonrpc.message -> Lsp.lsp_message) : + event Lwt.t = + if Jsonrpc.has_message client then + get_next_event_from_client state client parser + else + let client_fd = Jsonrpc.get_read_fd client in + match state with + | Connected { c_conn; _ } -> + let server_fd = Timeout.descr_of_in_channel c_conn.ic in + let (fds, _, _) = + try Unix.select [server_fd; client_fd] [] [] 1.0 + with Unix.Unix_error (Unix.EBADF, _, _) as e -> + (* Either the server died or the Jsonrpc died. Figure out which one *) + let exn = Exception.wrap e in + let edata = edata_of_exception exn in + let server_died = + try + let _ = Unix.select [client_fd] [] [] 0.0 in + false + with Unix.Unix_error (Unix.EBADF, _, _) -> true + in + if server_died then + raise (Server_fatal_connection_exception edata) + else + raise (Client_fatal_connection_exception edata) + in + if fds = [] then + Lwt.return Tick + else if List.mem fds server_fd then + Lwt.return (get_next_event_from_server server_fd) + else + let%lwt event = get_next_event_from_client state client parser in + Lwt.return event + | _ -> + let (fds, _, _) = + try Unix.select [client_fd] [] [] 1.0 + with Unix.Unix_error (Unix.EBADF, _, _) as e -> + (* Jsonrpc process died. This is unrecoverable *) + let exn = Exception.wrap e in + let edata = edata_of_exception exn in + raise (Client_fatal_connection_exception edata) + in + if fds = [] then + Lwt.return Tick + else + let%lwt event = get_next_event_from_client state client parser in + Lwt.return event + +let show_status + ?(titles = []) + ?(handler = (fun _title state -> state)) + ~(type_ : MessageType.t) + ~(message : string) + ~(shortMessage : string option) + ~(progress : int option) + ~(total : int option) + (ienv : initialized_env) : initialized_env = + ShowStatus.( + ShowMessageRequest.( + MessageType.( + let use_status = Lsp_helpers.supports_status ienv.i_initialize_params in + let actions = List.map titles ~f:(fun title -> { title }) in + let params = { request = { type_; message; actions }; shortMessage; progress; total } in + (* What should we display/hide? It's a tricky question... *) + let (will_dismiss_old, will_show_new) = + match (use_status, ienv.i_status, params) with + (* If the new status is identical to the old, then no-op *) + | (_, Shown (_, existingParams), params) when existingParams = params -> (false, false) + (* If the client supports status reporting, then we'll blindly send everything *) + | (true, _, _) -> (false, true) + (* If the client only supports dialog boxes, then we'll be very limited: *) + (* only every display failures; and if there was already an error up even *) + (* a different one then leave it undisturbed. *) + | ( false, + Shown (_, { request = { type_ = ErrorMessage; _ }; _ }), + { request = { type_ = ErrorMessage; _ }; _ } ) -> + (false, false) + | (false, Shown (id, _), { request = { type_ = ErrorMessage; _ }; _ }) -> + (Option.is_some id, true) + | (false, Shown (id, _), _) -> (Option.is_some id, false) + | (false, Never_shown, { request = { type_ = ErrorMessage; _ }; _ }) -> (false, true) + | (false, Never_shown, _) -> (false, false) + in + (* dismiss the old one *) + let ienv = + match (will_dismiss_old, ienv.i_status) with + | (true, Shown (id, existingParams)) -> + let id = Option.value_exn id in + let notification = CancelRequestNotification { CancelRequest.id } in + let json = Lsp_fmt.print_lsp (NotificationMessage notification) in + to_stdout json; + { ienv with i_status = Shown (None, existingParams) } + | (_, _) -> ienv + in + (* show the new one *) + if not will_show_new then + ienv + else + let id = NumberId (Jsonrpc.get_next_request_id ()) in + let request = + if use_status then + ShowStatusRequest params + else + ShowMessageRequestRequest params.request + in + let json = Lsp_fmt.print_lsp (RequestMessage (id, request)) in + to_stdout json; + + let mark_ienv_shown future_ienv = + match future_ienv.i_status with + | Shown (Some future_id, future_params) when future_id = id -> + { future_ienv with i_status = Shown (None, future_params) } + | _ -> future_ienv + in + let mark_state_shown state = + match state with + | Connected cenv -> Connected { cenv with c_ienv = mark_ienv_shown cenv.c_ienv } + | Disconnected denv -> Disconnected { denv with d_ienv = mark_ienv_shown denv.d_ienv } + | _ -> state + in + let handle_error _e state = mark_state_shown state in + let handle_result (r : ShowMessageRequest.result) state = + let state = mark_state_shown state in + match r with + | Some { ShowMessageRequest.title } -> handler title state + | None -> state + in + let handle_result = + if use_status then + ShowStatusHandler handle_result + else + ShowMessageHandler handle_result + in + let handlers = (handle_result, handle_error) in + let i_outstanding_local_requests = + IdMap.add id request ienv.i_outstanding_local_requests + in + let i_outstanding_local_handlers = + IdMap.add id handlers ienv.i_outstanding_local_handlers + in + { + ienv with + i_status = Shown (Some id, params); + i_outstanding_local_requests; + i_outstanding_local_handlers; + }))) + +let send_to_server (env : connected_env) (request : LspProt.request) (metadata : LspProt.metadata) + : unit = + let _bytesWritten = + Marshal_tools.to_fd_with_preamble (Unix.descr_of_out_channel env.c_conn.oc) (request, metadata) + in + () + +let send_lsp_to_server (cenv : connected_env) (metadata : LspProt.metadata) (message : lsp_message) + : unit = + send_to_server cenv (LspProt.LspToServer message) metadata + +(************************************************************************) +(** Protocol **) + +(************************************************************************) + +let do_initialize () : Initialize.result = + Initialize. + { + server_capabilities = + { + textDocumentSync = + { + want_openClose = true; + want_change = IncrementalSync; + want_willSave = false; + want_willSaveWaitUntil = false; + want_didSave = Some { includeText = false }; + }; + hoverProvider = true; + completionProvider = + Some { resolveProvider = false; completion_triggerCharacters = ["."; " "] }; + signatureHelpProvider = None; + definitionProvider = true; + typeDefinitionProvider = false; + referencesProvider = true; + documentHighlightProvider = true; + documentSymbolProvider = true; + workspaceSymbolProvider = false; + codeActionProvider = true; + codeLensProvider = None; + documentFormattingProvider = false; + documentRangeFormattingProvider = false; + documentOnTypeFormattingProvider = None; + renameProvider = true; + documentLinkProvider = None; + executeCommandProvider = None; + typeCoverageProvider = true; + rageProvider = true; + }; + } + +let show_connected (env : connected_env) : state = + (* report that we're connected to telemetry/connectionStatus *) + let i_isConnected = + Lsp_helpers.notify_connectionStatus + env.c_ienv.i_initialize_params + to_stdout + env.c_ienv.i_isConnected + true + in + let env = { env with c_ienv = { env.c_ienv with i_isConnected } } in + (* show green status *) + let message = "Flow server is now ready" in + let c_ienv = + show_status + ~type_:MessageType.InfoMessage + ~message + ~shortMessage:None + ~progress:None + ~total:None + env.c_ienv + in + Connected { env with c_ienv } + +let show_connecting (reason : CommandConnectSimple.error) (env : disconnected_env) : state = + if reason = CommandConnectSimple.Server_missing then + Lsp_helpers.log_info to_stdout "Starting Flow server"; + + let (message, shortMessage, progress, total) = + match (reason, env.d_server_status) with + | (CommandConnectSimple.Server_missing, _) -> ("Flow: Server starting", None, None, None) + | (CommandConnectSimple.Server_socket_missing, _) -> + ("Flow: Server starting?", None, None, None) + | (CommandConnectSimple.(Build_id_mismatch Server_exited), _) -> + ("Flow: Server was wrong version and exited", None, None, None) + | (CommandConnectSimple.(Build_id_mismatch (Client_should_error _)), _) -> + ("Flow: Server is wrong version", None, None, None) + | (CommandConnectSimple.Server_busy CommandConnectSimple.Too_many_clients, _) -> + ("Flow: Server busy", None, None, None) + | (CommandConnectSimple.Server_busy _, None) -> ("Flow: Server busy", None, None, None) + | (CommandConnectSimple.Server_busy _, Some (server_status, watcher_status)) -> + if not (ServerStatus.is_free server_status) then + let (shortMessage, progress, total) = ServerStatus.get_progress server_status in + ( "Flow: " ^ ServerStatus.string_of_status ~use_emoji:true server_status, + shortMessage, + progress, + total ) + else + ("Flow: " ^ FileWatcherStatus.string_of_status watcher_status, None, None, None) + in + Disconnected + { + env with + d_ienv = + show_status + ~type_:MessageType.WarningMessage + ~message + ~shortMessage + ~progress + ~total + env.d_ienv; + } + +let show_disconnected + (code : FlowExitStatus.t option) (message : string option) (env : disconnected_env) : state = + (* report that we're disconnected to telemetry/connectionStatus *) + let i_isConnected = + Lsp_helpers.notify_connectionStatus + env.d_ienv.i_initialize_params + to_stdout + env.d_ienv.i_isConnected + false + in + let env = { env with d_ienv = { env.d_ienv with i_isConnected } } in + (* show red status *) + let message = Option.value message ~default:"Flow: server is stopped" in + let message = + match code with + | Some code -> Printf.sprintf "%s [%s]" message (FlowExitStatus.to_string code) + | None -> message + in + let handler r state = + match (state, r) with + | (Disconnected e, "Restart") -> Disconnected { e with d_autostart = true } + | _ -> state + in + Disconnected + { + env with + d_ienv = + show_status + ~handler + ~titles:["Restart"] + ~type_:MessageType.ErrorMessage + ~message + ~shortMessage:None + ~progress:None + ~total:None + env.d_ienv; + } + +let try_connect flowconfig_name (env : disconnected_env) : state = + (* If the version in .flowconfig has changed under our feet then we mustn't *) + (* connect. We'll terminate and trust the editor to relaunch an ok version. *) + let current_version = get_current_version flowconfig_name env.d_ienv.i_root in + if env.d_ienv.i_version <> current_version then ( + let prev_version_str = Option.value env.d_ienv.i_version ~default:"[None]" in + let current_version_str = Option.value current_version ~default:"[None]" in + let message = + "\nVersion in flowconfig that spawned the existing flow server: " + ^ prev_version_str + ^ "\nVersion in flowconfig currently: " + ^ current_version_str + ^ "\n" + in + Lsp_helpers.telemetry_log to_stdout message; + lsp_exit_bad () + ); + let start_env = + let connect_params = + (* If the .flowconfig has explicitly set lazy_mode, then we don't want to override that if we + * start a new server *) + if is_lazy_mode_set_in_flowconfig flowconfig_name env.d_ienv.i_root then + { env.d_ienv.i_connect_params with lazy_mode = None } + else + env.d_ienv.i_connect_params + in + CommandUtils.make_env flowconfig_name connect_params env.d_ienv.i_root + in + let client_handshake = + SocketHandshake. + ( { + client_build_id = build_revision; + client_version = Flow_version.version; + is_stop_request = false; + server_should_hangup_if_still_initializing = true; + (* only exit if we'll restart it *) + version_mismatch_strategy = + ( if env.d_autostart then + Stop_server_if_older + else + SocketHandshake.Error_client ); + }, + { client_type = Persistent { lsp_init_params = env.d_ienv.i_initialize_params } } ) + in + let conn = + CommandConnectSimple.connect_once + ~flowconfig_name + ~client_handshake + ~tmp_dir:start_env.CommandConnect.tmp_dir + start_env.CommandConnect.root + in + match conn with + | Ok (ic, oc) -> + let i_server_id = env.d_ienv.i_server_id + 1 in + let new_env = + { + c_ienv = { env.d_ienv with i_server_id }; + c_conn = { ic; oc }; + c_server_status = (ServerStatus.initial_status, None); + c_about_to_exit_code = None; + c_is_rechecking = false; + c_lazy_stats = None; + c_outstanding_requests_to_server = Lsp.IdSet.empty; + c_recent_summaries = []; + } + in + (* send the initial messages to the server *) + let () = + let metadata = + let method_name = "synthetic/subscribe" in + Hh_json. + { + LspProt.empty_metadata with + LspProt.start_wall_time = Unix.gettimeofday (); + start_server_status = Some (fst new_env.c_server_status); + start_watcher_status = snd new_env.c_server_status; + start_json_truncated = JSON_Object [("method", JSON_String method_name)]; + lsp_method_name = method_name; + } + in + send_to_server new_env LspProt.Subscribe metadata + in + let make_open_message (textDocument : TextDocumentItem.t) : lsp_message = + NotificationMessage (DidOpenNotification { DidOpen.textDocument }) + in + let open_messages = + env.d_ienv.i_open_files + |> SMap.bindings + |> List.map ~f:(fun (_, { o_open_doc; _ }) -> make_open_message o_open_doc) + in + Hh_json.( + let method_name = "synthetic/open" in + let metadata = + { + LspProt.empty_metadata with + LspProt.start_wall_time = Unix.gettimeofday (); + start_server_status = Some (fst new_env.c_server_status); + start_watcher_status = snd new_env.c_server_status; + start_json_truncated = JSON_Object [("method", JSON_String method_name)]; + lsp_method_name = method_name; + } + in + List.iter open_messages ~f:(send_lsp_to_server new_env metadata); + + (* close the old UI and bring up the new *) + let new_state = show_connected new_env in + new_state) + (* Server_missing means the lock file is absent, because the server isn't running *) + | Error (CommandConnectSimple.Server_missing as reason) -> + let new_env = { env with d_autostart = false; d_server_status = None } in + if env.d_autostart then + let start_result = CommandConnect.start_flow_server start_env in + match start_result with + | Ok () -> show_connecting reason new_env + | Error (msg, code) -> show_disconnected (Some code) (Some msg) new_env + else + show_disconnected None None new_env + (* Server_socket_missing means the server is present but lacks its sock *) + (* file. There's a tiny race possibility that the server has created a *) + (* lock but not yet created a sock file. More likely is that the server *) + (* is an old version of the server which doesn't even create the right *) + (* sock file. We'll kill the server now so we can start a new one next. *) + (* And if it was in that race? bad luck... *) + | Error (CommandConnectSimple.Server_socket_missing as reason) -> + begin + try + let tmp_dir = start_env.CommandConnect.tmp_dir in + let root = start_env.CommandConnect.root in + CommandMeanKill.mean_kill ~flowconfig_name ~tmp_dir root; + show_connecting reason { env with d_server_status = None } + with CommandMeanKill.FailedToKill _ -> + let msg = "An old version of the Flow server is running. Please stop it." in + show_disconnected None (Some msg) { env with d_server_status = None } + end + (* The server exited due to a version mismatch between the lsp and the server. *) + | Error (CommandConnectSimple.(Build_id_mismatch Server_exited) as reason) -> + if env.d_autostart then + show_connecting reason { env with d_server_status = None } + else + (* We shouldn't hit this case. When `env.d_autostart` is `false`, we ask the server NOT to + * die on a version mismatch. *) + let msg = "Flow: the server was the wrong version" in + show_disconnected None (Some msg) { env with d_server_status = None } + (* The server and the lsp are different binaries and can't talk to each other. The server is not + * stopping (either because we asked it not to stop or because it is newer than this client). In + * this case, our best option is to stop the lsp and let the IDE start a new lsp with a newer + * binary *) + | Error CommandConnectSimple.(Build_id_mismatch (Client_should_error { server_version; _ })) -> + (match Semver.compare server_version Flow_version.version with + | n when n < 0 -> + Printf.eprintf + "Flow: the running server is an older version of Flow (%s) than the LSP (%s), but we're not allowed to stop it" + server_version + Flow_version.version + | 0 -> + Printf.eprintf + "Flow: the running server is a different binary with the same version (%s)" + Flow_version.version + | _ -> + Printf.eprintf + "Flow: the running server is a newer version of Flow (%s) than the LSP (%s)" + server_version + Flow_version.version); + Printf.eprintf + "LSP is exiting. Hopefully the IDE will start an LSP with the same binary as the server"; + lsp_exit_bad () + (* While the server is busy initializing, sometimes we get Server_busy.Fail_on_init *) + (* with a server-status telling us how far it is through init. And sometimes we get *) + (* just ServerStatus.Not_responding if the server was just too busy to give us a *) + (* status update. These are cases where the right version of the server is running *) + (* but it's not speaking to us just now. So we'll keep trying until it's ready. *) + | Error (CommandConnectSimple.Server_busy (CommandConnectSimple.Fail_on_init st) as reason) -> + show_connecting reason { env with d_server_status = Some st } + (* The following codes mean the right version of the server is running so *) + (* we'll retry. They provide no information about the d_server_status of *) + (* the server, so we'll leave it as it was before. *) + | Error (CommandConnectSimple.Server_busy CommandConnectSimple.Not_responding as reason) + | Error (CommandConnectSimple.Server_busy CommandConnectSimple.Too_many_clients as reason) -> + show_connecting reason env + +let close_conn (env : connected_env) : unit = + try Timeout.shutdown_connection env.c_conn.ic + with _ -> + (); + (try Timeout.close_in_noerr env.c_conn.ic with _ -> ()) + +(************************************************************************) +(** Tracking **) + +(************************************************************************) +(* The goal of tracking is that, if a server goes down, then all errors *) +(* and dialogs and things it created should be taken down with it. *) +(* *) +(* "track_to_server" is called for client->lsp messages when they get *) +(* sent to the current server. *) +(* "track_from_server" is called for server->lsp messages which *) +(* immediately get passed on to the client. *) +(* "dismiss_tracks" is called when a server gets disconnected. *) +(* *) +(* EDITOR_OPEN_FILES - we keep the current contents of all editor open *) +(* files. Updated in response to client->lsp notifications *) +(* didOpen/Change/Save/Close. When a new server starts, we synthesize *) +(* didOpen messages to the new server. *) +(* OUTSTANDING_REQUESTS_TO_SERVER - for all client->lsp requests that *) +(* have been sent to the server. Added to this list when we *) +(* track_to_server(request); removed on track_from_server(response). *) +(* When a server dies, we synthesize RequestCancelled responses *) +(* ourselves since the server will no longer do that. *) +(* OUTSTANDING_REQUESTS_FROM_SERVER - for all server->lsp requests. We *) +(* generate a "wrapped-id" that encodes which server it came from, *) +(* and send immediately to the client. Added to this list when we *) +(* track_from_server(request), removed in track_to_server(response). *) +(* When a server dies, we emit CancelRequest notifications to the *) +(* client so it can dismiss dialogs or similar. When any response *) +(* comes back from the client, we ignore ones that are destined for *) +(* now-defunct servers, and only forward on the ones for the current *) +(* server. *) +(* OUTSTANDING_PROGRESS - for all server->lsp progress notifications *) +(* which are being displayed in the client. Added to this list when *) +(* we track_from_server(progress) a non-empty progress; removed *) +(* when we track_from_server(progress) an empty progress. When a *) +(* server dies, we synthesize progress notifications to the client *) +(* so it can erase all outstanding progress messages. *) +(* OUTSTANDING_ACTION_REQUIRED - similar to outstanding_progress. *) + +type track_effect = { changed_live_uri: string option } + +let track_to_server (state : state) (c : Lsp.lsp_message) : state * track_effect = + let (state, changed_live_uri) = + match (get_open_files state, c) with + | (_, NotificationMessage (DidOpenNotification params)) -> + let o_open_doc = params.DidOpen.textDocument in + let uri = params.DidOpen.textDocument.TextDocumentItem.uri in + let state = + update_open_file uri (Some { o_open_doc; o_ast = None; o_unsaved = false }) state + in + (state, Some uri) + | (_, NotificationMessage (DidCloseNotification params)) -> + let uri = params.DidClose.textDocument.TextDocumentIdentifier.uri in + let state = + state |> update_errors (LspErrors.clear_all_live_errors_and_send to_stdout uri) + in + (state, None) + | (Some open_files, NotificationMessage (DidChangeNotification params)) -> + let uri = params.DidChange.textDocument.VersionedTextDocumentIdentifier.uri in + let { o_open_doc; _ } = SMap.find uri open_files in + let text = o_open_doc.TextDocumentItem.text in + let text = Lsp_helpers.apply_changes_unsafe text params.DidChange.contentChanges in + let o_open_doc = + { + Lsp.TextDocumentItem.uri; + languageId = o_open_doc.TextDocumentItem.languageId; + version = params.DidChange.textDocument.VersionedTextDocumentIdentifier.version; + text; + } + in + let state = + update_open_file uri (Some { o_open_doc; o_ast = None; o_unsaved = true }) state + in + (* update errors... we don't need to send updated squiggle locations *) + (* right now ourselves, since all editors take care of that; but if ever we *) + (* re-send the server's existing diagnostics for this file then that should take *) + (* into account any user edits since then. This isn't perfect - e.g. if the user *) + (* modifies a file we'll update squiggles, but if the user subsquently closes the *) + (* file unsaved and then re-opens it then we'll be left with wrong squiggles. *) + (* It also doesn't compensate if the flow server starts a typecheck, then receives *) + (* a DidChange, then sends error spans from as it was at the start of the typecheck. *) + (* Still, at least we're doing better on the common case -- where the server has sent *) + (* diagnostics, then the user types, then we re-send live syntax errors. *) + let state = + match state with + | Connected _ -> + state |> update_errors (LspErrors.update_errors_due_to_change_and_send to_stdout params) + | _ -> state + in + (state, Some uri) + | (Some open_files, NotificationMessage (DidSaveNotification params)) -> + let uri = params.DidSave.textDocument.TextDocumentIdentifier.uri in + let open_file = SMap.find uri open_files in + let state = update_open_file uri (Some { open_file with o_unsaved = false }) state in + (state, Some uri) + | (_, _) -> (state, None) + in + (* update cenv.c_outstanding_requests*... *) + let state = + match (state, c) with + (* client->server requests *) + | (Connected env, RequestMessage (id, _)) -> + Connected + { + env with + c_outstanding_requests_to_server = IdSet.add id env.c_outstanding_requests_to_server; + } + (* client->server responses *) + | (Connected env, ResponseMessage (id, _)) -> + let wrapped = decode_wrapped id in + let c_ienv = + { + env.c_ienv with + i_outstanding_requests_from_server = + WrappedMap.remove wrapped env.c_ienv.i_outstanding_requests_from_server; + } + in + Connected { env with c_ienv } + | _ -> state + in + (state, { changed_live_uri }) + +let track_from_server (state : state) (c : Lsp.lsp_message) : state = + match (state, c) with + (* server->client response *) + | (Connected env, ResponseMessage (id, _)) -> + Connected + { + env with + c_outstanding_requests_to_server = IdSet.remove id env.c_outstanding_requests_to_server; + } + (* server->client request *) + | (Connected env, RequestMessage (id, params)) -> + let wrapped = { server_id = env.c_ienv.i_server_id; message_id = id } in + let c_ienv = + { + env.c_ienv with + i_outstanding_requests_from_server = + WrappedMap.add wrapped params env.c_ienv.i_outstanding_requests_from_server; + } + in + Connected { env with c_ienv } + | (_, _) -> state + +let dismiss_tracks (state : state) : state = + let decline_request_to_server (id : lsp_id) : unit = + let e = Lsp_fmt.error_of_exn (Error.RequestCancelled "Connection to server has been lost") in + let stack = Exception.get_current_callstack_string 100 in + let json = Lsp_fmt.print_lsp_response id (ErrorResult (e, stack)) in + to_stdout json + in + let cancel_request_from_server (server_id : int) (wrapped : wrapped_id) (_request : lsp_request) + : unit = + if server_id = wrapped.server_id then + let id = encode_wrapped wrapped in + let notification = CancelRequestNotification { CancelRequest.id } in + let json = Lsp_fmt.print_lsp_notification notification in + to_stdout json + else + () + in + match state with + | Connected env -> + WrappedMap.iter + (cancel_request_from_server env.c_ienv.i_server_id) + env.c_ienv.i_outstanding_requests_from_server; + IdSet.iter decline_request_to_server env.c_outstanding_requests_to_server; + Connected { env with c_outstanding_requests_to_server = IdSet.empty } + |> update_errors (LspErrors.clear_all_errors_and_send to_stdout) + | _ -> state + +let lsp_DocumentItem_to_flow (open_doc : Lsp.TextDocumentItem.t) : File_input.t = + let uri = open_doc.TextDocumentItem.uri in + let fn = Lsp_helpers.lsp_uri_to_path uri in + let fn = Option.value (Sys_utils.realpath fn) ~default:fn in + File_input.FileContent (Some fn, open_doc.TextDocumentItem.text) + +(******************************************************************************) +(* Diagnostics *) +(* These should really be handle inside the flow server so it sends out *) +(* LSP publishDiagnostics notifications and we track them in the normal way. *) +(* But while the flow server has to handle legacy clients as well as LSP *) +(* clients, we don't want to make the flow server code too complex, so we're *) +(* handling them here for now. *) +(******************************************************************************) + +let error_to_lsp + ~(severity : PublishDiagnostics.diagnosticSeverity option) + ~(default_uri : string) + (error : Loc.t Errors.printable_error) : string * PublishDiagnostics.diagnostic = + let error = Errors.Lsp_output.lsp_of_error error in + let location = + Flow_lsp_conversions.loc_to_lsp_with_default error.Errors.Lsp_output.loc ~default_uri + in + let uri = location.Lsp.Location.uri in + let related_to_lsp (loc, relatedMessage) = + let relatedLocation = Flow_lsp_conversions.loc_to_lsp_with_default loc ~default_uri in + { Lsp.PublishDiagnostics.relatedLocation; relatedMessage } + in + let relatedInformation = List.map error.Errors.Lsp_output.relatedLocations ~f:related_to_lsp in + ( uri, + { + Lsp.PublishDiagnostics.range = location.Lsp.Location.range; + severity; + code = Lsp.PublishDiagnostics.StringCode error.Errors.Lsp_output.code; + source = Some "Flow"; + message = error.Errors.Lsp_output.message; + relatedInformation; + relatedLocations = relatedInformation (* legacy fb extension *); + } ) + +(* parse_and_cache: either the uri is an open file for which we already + * have parse results (ast+diagnostics), so we can just return them; + * or it's an open file and we are expected to lazily compute the parse results + * and store them in the state; + * or it's an unopened file in which case we'll retrieve parse results but + * won't store them. *) +let parse_and_cache flowconfig_name (state : state) (uri : string) : + state * ((Loc.t, Loc.t) Flow_ast.program * Lsp.PublishDiagnostics.diagnostic list option) = + (* part of parsing is producing parse errors, if so desired *) + let liveSyntaxErrors = + Initialize.( + match state with + | Connected cenv -> cenv.c_ienv.i_initialize_params.initializationOptions.liveSyntaxErrors + | Disconnected denv -> denv.d_ienv.i_initialize_params.initializationOptions.liveSyntaxErrors + | _ -> false) + in + let error_to_diagnostic (loc, parse_error) = + let message = Errors.Friendly.message_of_string (Parse_error.PP.error parse_error) in + let error = Errors.mk_error ~kind:Errors.ParseError loc message in + let (_, diagnostic) = + error_to_lsp ~default_uri:uri ~severity:(Some PublishDiagnostics.Error) error + in + diagnostic + in + (* The way flow compilation works in the flow server is that parser options *) + (* are permissive to allow all constructs, so that parsing works well; if *) + (* the user choses not to enable features through the user's .flowconfig *) + (* then use of impermissable constructs will be reported at typecheck time *) + (* (not as parse errors). We'll do the same here, with permissive parsing *) + (* and only reporting parse errors. *) + let get_parse_options () = + let root = get_root state in + let use_strict = + Option.value_map root ~default:false ~f:(fun root -> + Server_files_js.config_file flowconfig_name root + |> read_config_or_exit + |> FlowConfig.modules_are_use_strict) + in + Some + Parser_env. + { + enums = true; + esproposal_class_instance_fields = true; + esproposal_class_static_fields = true; + esproposal_decorators = true; + esproposal_export_star_as = true; + esproposal_optional_chaining = true; + esproposal_nullish_coalescing = true; + types = true; + use_strict; + } + in + let parse file = + let (program, errors) = + try + let content = File_input.content_of_file_input_unsafe file in + let filename_opt = File_input.path_of_file_input file in + let filekey = Option.map filename_opt ~f:(fun fn -> File_key.SourceFile fn) in + let parse_options = get_parse_options () in + Parser_flow.program_file ~fail:false ~parse_options ~token_sink:None content filekey + with _ -> ((Loc.none, [], []), []) + in + ( program, + if liveSyntaxErrors then + Some (List.map errors ~f:error_to_diagnostic) + else + None ) + in + let open_files = get_open_files state in + let existing_open_file_info = Option.bind open_files (SMap.get uri) in + match existing_open_file_info with + | Some { o_ast = Some o_ast; _ } -> + (* We've already parsed this file since it last changed. No need to parse again *) + (state, o_ast) + | Some { o_open_doc; o_unsaved; _ } -> + (* We have not parsed this file yet. We need to parse it now and save the updated ast *) + let file = lsp_DocumentItem_to_flow o_open_doc in + let o_ast = parse file in + let open_file_info = Some { o_open_doc; o_ast = Some o_ast; o_unsaved } in + let state = state |> update_open_file uri open_file_info in + (state, o_ast) + | None -> + (* This is an unopened file, so we won't cache the results and won't return the errors *) + let fn = Lsp_helpers.lsp_uri_to_path uri in + let fn = Option.value (Sys_utils.realpath fn) ~default:fn in + let file = File_input.FileName fn in + let (open_ast, _) = parse file in + (state, (open_ast, None)) + +let show_recheck_progress (cenv : connected_env) : state = + let (type_, message, shortMessage, progress, total) = + match (cenv.c_is_rechecking, cenv.c_server_status, cenv.c_lazy_stats) with + | (true, (server_status, _), _) when not (ServerStatus.is_free server_status) -> + let (shortMessage, progress, total) = ServerStatus.get_progress server_status in + let message = "Flow: " ^ ServerStatus.string_of_status ~use_emoji:true server_status in + (MessageType.WarningMessage, message, shortMessage, progress, total) + | (true, _, _) -> + (MessageType.WarningMessage, "Flow: Server is rechecking...", None, None, None) + | (false, _, Some { ServerProt.Response.lazy_mode = mode; checked_files; total_files }) + when checked_files < total_files && mode <> Options.NON_LAZY_MODE -> + let message = + Printf.sprintf + "Flow: done recheck. (%s lazy mode let it check only %d/%d files [[more...](%s)])" + (Options.lazy_mode_to_string mode) + checked_files + total_files + "https://flow.org/en/docs/lang/lazy-modes/" + in + (MessageType.InfoMessage, message, None, None, None) + | (false, _, _) -> (MessageType.InfoMessage, "Flow: done recheck", None, None, None) + in + Connected + { cenv with c_ienv = show_status ~type_ ~message ~shortMessage ~progress ~total cenv.c_ienv } + +let do_documentSymbol + flowconfig_name (state : state) (id : lsp_id) (params : DocumentSymbol.params) : state = + let uri = params.DocumentSymbol.textDocument.TextDocumentIdentifier.uri in + (* It's not do_documentSymbol's job to set live parse errors, so we ignore them *) + let (state, (ast, _live_parse_errors)) = parse_and_cache flowconfig_name state uri in + let result = Flow_lsp_conversions.flow_ast_to_lsp_symbols ~uri ast in + let json = Lsp_fmt.print_lsp (ResponseMessage (id, DocumentSymbolResult result)) in + to_stdout json; + state + +module RagePrint = struct + let addline (b : Buffer.t) (prefix : string) (s : string) : unit = + Buffer.add_string b prefix; + Buffer.add_string b s; + Buffer.add_string b "\n"; + () + + let string_of_lazy_stats (lazy_stats : ServerProt.Response.lazy_stats) : string = + ServerProt.( + Printf.sprintf + "lazy_mode=%s, checked_files=%d, total_files=%d" + (Options.lazy_mode_to_string lazy_stats.Response.lazy_mode) + lazy_stats.Response.checked_files + lazy_stats.Response.total_files) + + let string_of_connect_params (p : connect_params) : string = + CommandUtils.( + Printf.sprintf + "retries=%d, retry_if_init=%B, no_auto_start=%B, autostop=%B, ignore_version=%B quiet=%B, temp_dir=%s, timeout=%s, lazy_mode=%s" + p.retries + p.retry_if_init + p.no_auto_start + p.autostop + p.ignore_version + p.quiet + (Option.value ~default:"None" p.temp_dir) + (Option.value_map p.timeout ~default:"None" ~f:string_of_int) + (Option.value_map p.lazy_mode ~default:"None" ~f:Options.lazy_mode_to_string)) + + let string_of_open_file { o_open_doc; o_ast; o_unsaved } : string = + Printf.sprintf + "(uri=%s version=%d text=[%d bytes] ast=[%s] unsaved=%b)" + o_open_doc.TextDocumentItem.uri + o_open_doc.TextDocumentItem.version + (String.length o_open_doc.TextDocumentItem.text) + (Option.value_map o_ast ~default:"absent" ~f:(fun _ -> "present")) + o_unsaved + + let string_of_open_files (files : open_file_info SMap.t) : string = + SMap.bindings files + |> List.map ~f:(fun (_, ofi) -> string_of_open_file ofi) + |> String.concat "," + + let string_of_show_status (show_status : show_status_t) : string = + match show_status with + | Never_shown -> "Never_shown" + | Shown (id_opt, params) -> + Printf.sprintf + "Shown id=%s params=%s" + (Option.value_map id_opt ~default:"None" ~f:Lsp_fmt.id_to_string) + (print_showStatus params |> Hh_json.json_to_string) + + let add_ienv (b : Buffer.t) (ienv : initialized_env) : unit = + addline b "i_connect_params=" (ienv.i_connect_params |> string_of_connect_params); + addline b "i_root=" (ienv.i_root |> Path.to_string); + addline b "i_version=" (ienv.i_version |> Option.value ~default:"None"); + addline b "i_server_id=" (ienv.i_server_id |> string_of_int); + addline + b + "i_can_autostart_after_version_mismatch=" + (ienv.i_can_autostart_after_version_mismatch |> string_of_bool); + addline + b + "i_outstanding_local_handlers=" + ( ienv.i_outstanding_local_handlers + |> IdMap.bindings + |> List.map ~f:(fun (id, _handler) -> Lsp_fmt.id_to_string id) + |> String.concat "," ); + addline + b + "i_outstanding_local_requests=" + ( ienv.i_outstanding_local_requests + |> IdMap.bindings + |> List.map ~f:(fun (id, req) -> + Printf.sprintf "%s:%s" (Lsp_fmt.id_to_string id) (Lsp_fmt.request_name_to_string req)) + |> String.concat "," ); + addline + b + "i_outstanding_requests_from_server=" + ( ienv.i_outstanding_requests_from_server + |> WrappedMap.bindings + |> List.map ~f:(fun (id, req) -> + Printf.sprintf + "#%d:%s:%s" + id.server_id + (Lsp_fmt.id_to_string id.message_id) + (Lsp_fmt.request_name_to_string req)) + |> String.concat "," ); + addline b "i_isConnected=" (ienv.i_isConnected |> string_of_bool); + addline b "i_status=" (ienv.i_status |> string_of_show_status); + addline b "i_open_files=" (ienv.i_open_files |> string_of_open_files); + () + + let add_denv (b : Buffer.t) (denv : disconnected_env) : unit = + let (server_status, watcher_status) = + match denv.d_server_status with + | None -> (None, None) + | Some (s, w) -> (Some s, Some w) + in + add_ienv b denv.d_ienv; + addline b "d_autostart=" (denv.d_autostart |> string_of_bool); + addline + b + "d_server_status:server=" + (server_status |> Option.value_map ~default:"None" ~f:ServerStatus.string_of_status); + addline + b + "d_server_status:watcher=" + (watcher_status |> Option.value_map ~default:"None" ~f:FileWatcherStatus.string_of_status); + () + + let add_cenv (b : Buffer.t) (cenv : connected_env) : unit = + let (server_status, watcher_status) = cenv.c_server_status in + add_ienv b cenv.c_ienv; + addline b "c_server_status:server=" (server_status |> ServerStatus.string_of_status); + addline + b + "c_server_status:watcher=" + (watcher_status |> Option.value_map ~default:"None" ~f:FileWatcherStatus.string_of_status); + addline + b + "c_about_to_exit_code=" + (cenv.c_about_to_exit_code |> Option.value_map ~default:"None" ~f:FlowExitStatus.to_string); + addline b "c_is_rechecking=" (cenv.c_is_rechecking |> string_of_bool); + addline + b + "c_lazy_stats=" + (cenv.c_lazy_stats |> Option.value_map ~default:"None" ~f:string_of_lazy_stats); + addline + b + "c_outstanding_requests_to_server=" + ( cenv.c_outstanding_requests_to_server + |> IdSet.elements + |> List.map ~f:Lsp_fmt.id_to_string + |> String.concat "," ); + () + + let string_of_state (state : state) : string = + let b = Buffer.create 10000 in + begin + match state with + | Pre_init p -> + Buffer.add_string b (Printf.sprintf "Pre_init:\n%s\n" (string_of_connect_params p)) + | Post_shutdown -> Buffer.add_string b "Post_shutdown:\n[]\n" + | Disconnected denv -> + Buffer.add_string b "Disconnected:\n"; + add_denv b denv + | Connected cenv -> + Buffer.add_string b "Connected:\n"; + add_cenv b cenv + end; + Buffer.contents b +end + +let do_rage flowconfig_name (state : state) : Rage.result = + Rage.( + (* Some helpers to add various types of data to the rage output... *) + let add_file (items : rageItem list) (file : Path.t) : rageItem list = + let data = + if Path.file_exists file then + let data = Path.cat file in + (* cat even up to 1gig is workable even if ugly *) + let len = String.length data in + let max_len = 10 * 1024 * 1024 in + (* maximum 10mb *) + if len <= max_len then + data + else + String.sub data (len - max_len) max_len + else + Printf.sprintf "File not found: %s" (Path.to_string file) + in + { title = Some (Path.to_string file); data } :: items + in + let add_string (items : rageItem list) (data : string) : rageItem list = + { title = None; data } :: items + in + let add_pid (items : rageItem list) ((pid, reason) : int * string) : rageItem list = + if String_utils.string_starts_with reason "slave" then + items + else + let pid = string_of_int pid in + (* some systems have "pstack", some have "gstack", some have neither... *) + let stack = + try Sys_utils.exec_read_lines ~reverse:true ("pstack " ^ pid) + with _ -> + begin + try Sys_utils.exec_read_lines ~reverse:true ("gstack " ^ pid) + with e -> + let e = Exception.wrap e in + ["unable to pstack - " ^ Exception.get_ctor_string e] + end + in + let stack = String.concat "\n" stack in + add_string items (Printf.sprintf "PSTACK %s (%s) - %s\n\n" pid reason stack) + in + let items : rageItem list = [] in + (* LOGFILES. *) + (* Where are the logs? Specified explicitly by the user with --log-file and *) + (* --monitor-log-file when they launched the server. Failing that, the *) + (* values in environment variables FLOW_LOG_FILE and FLOW_MONITOR_LOG_FILE *) + (* upon launch. Failing that, CommandUtils.server_log_file will look in the *) + (* flowconfig for a "log.file" option. Failing that it will synthesize one *) + (* from `Server_files_js.log_file` in the tmp-directory. And *) + (* CommandUtils.monitor_log_file is similar except it bypasses flowconfig. *) + (* As for tmp dir, that's --temp_dir, failing that FLOW_TEMP_DIR, failing *) + (* that temp_dir in flowconfig, failing that Sys_utils.temp_dir_name /flow. *) + (* WOW! *) + (* Notionally the only authoritative way to find logs is to connect to a *) + (* running monitor and ask it. But we're a 'rage' command whose whole point *) + (* is to give good answers even when things are not working, e.g. when the *) + (* monitor is down. And in any case, by design, a flow client can only ever *) + (* interact with a server if the client was launched with the same flags *) + (* (minimum tmp_dir and flowconfig) as the server was launched with. *) + (* Therefore there's no need to ask the monitor. We'll just work with what *) + (* log files we'd write to were we ourselves asked to start a server. *) + let ienv = + match state with + | Pre_init _ -> None + | Disconnected denv -> Some denv.d_ienv + | Connected cenv -> Some cenv.c_ienv + | Post_shutdown -> None + in + let items = + match ienv with + | None -> items + | Some ienv -> + let start_env = CommandUtils.make_env flowconfig_name ienv.i_connect_params ienv.i_root in + let tmp_dir = start_env.CommandConnect.tmp_dir in + let server_log_file = Path.make start_env.CommandConnect.log_file in + (* monitor log file isn't retained anywhere. But since flow lsp doesn't *) + (* take a --monitor-log-file option, then we know where it must be. *) + let monitor_log_file = + CommandUtils.monitor_log_file flowconfig_name tmp_dir start_env.CommandConnect.root + in + let items = add_file items server_log_file in + let items = add_file items monitor_log_file in + (* Let's pick up the old files in case user reported bug after a crash *) + let items = add_file items (Path.make (Path.to_string server_log_file ^ ".old")) in + let items = add_file items (Path.make (Path.to_string monitor_log_file ^ ".old")) in + (* And the pids file *) + let items = + try + let pids = + PidLog.get_pids (Server_files_js.pids_file ~flowconfig_name ~tmp_dir ienv.i_root) + in + Core_list.fold pids ~init:items ~f:add_pid + with e -> + let e = Exception.wrap e in + add_string items (Printf.sprintf "Failed to get PIDs: %s" (Exception.to_string e)) + in + items + in + (* CLIENT. This includes the client's perception of the server state. *) + let items = + add_string items ("LSP adapter state: " ^ RagePrint.string_of_state state ^ "\n") + in + (* DONE! *) + items) + +let parse_json (state : state) (json : Jsonrpc.message) : lsp_message = + (* to know how to parse a response, we must provide the corresponding request *) + let outstanding (id : lsp_id) : lsp_request = + let ienv = + match state with + | Connected env -> env.c_ienv + | Disconnected env -> env.d_ienv + | _ -> failwith "Didn't expect an LSP response yet" + in + try IdMap.find id ienv.i_outstanding_local_requests + with Not_found -> WrappedMap.find (decode_wrapped id) ienv.i_outstanding_requests_from_server + in + Lsp_fmt.parse_lsp json.Jsonrpc.json outstanding + +let with_timer (f : unit -> 'a) : float * 'a = + let start = Unix.gettimeofday () in + let ret = f () in + let duration = Unix.gettimeofday () -. start in + (duration, ret) + +(* The EventLogger needs to be periodically flushed. LspCommand was originally written to flush + * when idle, but the idle detection didn't quite work so we never really flushed until exiting. So + * instead lets periodically flush. Flushing should be fast and this is basically what the monitor + * does too. *) +module LogFlusher = LwtLoop.Make (struct + type acc = unit + + let main () = + let%lwt () = Lwt_unix.sleep 5.0 in + Lwt.join [EventLoggerLwt.flush (); FlowInteractionLogger.flush ()] + + let catch () exn = Exception.(reraise (wrap exn)) +end) + +(* Our interaction logging logs a snapshot of the state of the world at the start of an + * interaction (when the interaction is triggered) and at the end of an interaction (when the ux + * occurs). This function collects that state. This is called relatively often, so it should be + * pretty cheap *) +let collect_interaction_state state = + LspInteraction.( + let time = Unix.gettimeofday () in + let buffer_status = + match get_open_files state with + | None -> NoOpenBuffers + | Some files when files = SMap.empty -> NoOpenBuffers + | Some files -> + if SMap.exists (fun _ file -> file.o_unsaved) files then + UnsavedBuffers + else + NoUnsavedBuffers + in + let server_status = + match state with + | Pre_init _ + | Post_shutdown -> + Stopped + | Disconnected disconnected_env -> + if disconnected_env.d_server_status = None then + Stopped + else + Initializing + | Connected connected_env -> + if connected_env.c_is_rechecking then + Rechecking + else + Ready + in + { time; server_status; buffer_status }) + +(* Completed interactions clean themselves up, but we need to clean up pending interactions which + * have never been completed. *) +let gc_pending_interactions = + let next_gc = ref (Unix.gettimeofday ()) in + fun state -> + if Unix.gettimeofday () >= !next_gc then + next_gc := LspInteraction.gc ~get_state:(fun () -> collect_interaction_state state) + +(* Kicks off the interaction tracking *) +let start_interaction ~trigger state = + let start_state = collect_interaction_state state in + LspInteraction.start ~start_state ~trigger + +let log_interaction ~ux state id = + let end_state = collect_interaction_state state in + LspInteraction.log ~end_state ~ux ~id + +let do_live_diagnostics + flowconfig_name + (state : state) + (trigger : LspInteraction.trigger option) + (metadata : LspProt.metadata) + (uri : string) : state = + (* Normally we don't log interactions for unknown triggers. But in this case we're providing live + * diagnostics and want to log what triggered it regardless of whether it's known or not *) + let trigger = Option.value trigger ~default:LspInteraction.UnknownTrigger in + let interaction_id = start_interaction ~trigger state in + (* reparse the file and write it into the state's editor_open_files as needed *) + let (state, (_, live_parse_errors)) = parse_and_cache flowconfig_name state uri in + (* Set the live parse errors *) + let (state, ux) = + match live_parse_errors with + | None -> (state, LspInteraction.ErroredPushingLiveParseErrors) + | Some live_parse_errors -> + let state = + state + |> update_errors (LspErrors.set_live_parse_errors_and_send to_stdout uri live_parse_errors) + in + (state, LspInteraction.PushedLiveParseErrors) + in + log_interaction ~ux state interaction_id; + let error_count = + Option.value_map live_parse_errors ~default:Hh_json.JSON_Null ~f:(fun errors -> + Hh_json.JSON_Number (errors |> List.length |> string_of_int)) + in + FlowEventLogger.live_parse_errors + ~request:(metadata.LspProt.start_json_truncated |> Hh_json.json_to_string) + ~data: + Hh_json.( + JSON_Object [("uri", JSON_String uri); ("error_count", error_count)] |> json_to_string) + ~wall_start:metadata.LspProt.start_wall_time; + + state + +(************************************************************************) +(** Main loop **) + +(************************************************************************) + +type log_needed = + | LogNeeded of LspProt.metadata + | LogDeferred + | LogNotNeeded + +let rec run ~flowconfig_name ~connect_params = + let client = Jsonrpc.make_queue () in + let state = Pre_init connect_params in + LwtInit.run_lwt (initial_lwt_thread flowconfig_name client state) + +and initial_lwt_thread flowconfig_name client state () = + (* If `prom` in `Lwt.async (fun () -> prom)` resolves to an exception, this function will be + * called *) + (Lwt.async_exception_hook := + fun exn -> + let exn = Exception.wrap exn in + let msg = Utils.spf "Uncaught async exception: %s" (Exception.to_string exn) in + FlowExitStatus.(exit ~msg Unknown_error)); + + LspInteraction.init (); + Lwt.async LogFlusher.run; + + main_loop flowconfig_name client state + +and main_loop flowconfig_name (client : Jsonrpc.queue) (state : state) : unit Lwt.t = + (* TODO - delete this line once this loop is fully lwt. At the moment, the idle loop never + * actually does any lwt io so never yields. This starves any asynchronous lwt. This pause call + * just yields *) + let%lwt () = Lwt.pause () in + gc_pending_interactions state; + let%lwt event = + try%lwt + let%lwt event = get_next_event state client (parse_json state) in + Lwt.return_ok event + with e -> + let exn = Exception.wrap e in + let stack = Exception.get_backtrace_string exn in + Lwt.return_error (state, e, Utils.Callstack stack) + in + let result = + match event with + | Error (state, e, stack) -> Error (state, e, stack, None) + | Ok event -> + let (client_duration, result) = + with_timer (fun () -> + try main_handle_unsafe flowconfig_name state event + with e -> + let exn = Exception.wrap e in + let stack = Exception.get_backtrace_string exn in + Error (state, e, Utils.Callstack stack)) + in + (match result with + | Ok (state, logneeded) -> Ok (state, logneeded, client_duration) + | Error (state, e, stack) -> Error (state, e, stack, Some event)) + in + let state = + match result with + | Ok (state, LogNeeded metadata, client_duration) -> + LspProt.( + let client_duration = + if metadata.client_duration = None then + Some client_duration + else + metadata.client_duration + in + let metadata = { metadata with client_duration } in + main_log_command state metadata; + state) + | Ok (state, _, _) -> state + | Error (state, e, stack, event) -> main_handle_error e stack state event + in + main_loop flowconfig_name client state + +and main_handle_unsafe flowconfig_name (state : state) (event : event) : + (state * log_needed, state * exn * Utils.callstack) result = + match (state, event) with + | ( Pre_init i_connect_params, + Client_message (RequestMessage (id, InitializeRequest i_initialize_params), metadata) ) -> + let i_root = Lsp_helpers.get_root i_initialize_params |> Path.make in + let flowconfig = + Server_files_js.config_file flowconfig_name i_root |> read_config_or_exit ~allow_cache:false + in + let d_ienv = + { + i_initialize_params; + i_connect_params; + i_root; + i_version = FlowConfig.required_version flowconfig; + i_can_autostart_after_version_mismatch = true; + i_server_id = 0; + i_outstanding_local_requests = IdMap.empty; + i_outstanding_local_handlers = IdMap.empty; + i_outstanding_requests_from_server = WrappedMap.empty; + i_isConnected = false; + i_status = Never_shown; + i_open_files = SMap.empty; + i_errors = LspErrors.empty; + } + in + FlowInteractionLogger.set_server_config + ~flowconfig_name + ~root:(Path.to_string i_root) + ~root_name:(FlowConfig.root_name flowconfig); + + (* If the version in .flowconfig is simply incompatible with our current *) + (* binary then it doesn't even make sense for us to start up. And future *) + (* attempts by the client to launch us will fail as well. Clients which *) + (* receive the following response are expected to shut down their LSP. *) + let required_version = FlowConfig.required_version flowconfig in + begin + match CommandUtils.check_version required_version with + | Ok () -> () + | Error msg -> raise (Error.ServerErrorStart (msg, { Initialize.retry = false })) + end; + let response = ResponseMessage (id, InitializeResult (do_initialize ())) in + let json = Lsp_fmt.print_lsp response in + to_stdout json; + let env = { d_ienv; d_autostart = true; d_server_status = None } in + Ok (try_connect flowconfig_name env, LogNeeded metadata) + | (_, Client_message (NotificationMessage InitializedNotification, _metadata)) -> + Ok (state, LogNotNeeded) + | (_, Client_message (NotificationMessage SetTraceNotification, _metadata)) + | (_, Client_message (NotificationMessage LogTraceNotification, _metadata)) -> + (* specific to VSCode logging *) + Ok (state, LogNotNeeded) + | (_, Client_message (RequestMessage (id, ShutdownRequest), _metadata)) -> + begin + match state with + | Connected env -> close_conn env + | _ -> () + end; + let response = ResponseMessage (id, ShutdownResult) in + let json = Lsp_fmt.print_lsp response in + to_stdout json; + Ok (Post_shutdown, LogNotNeeded) + | (_, Client_message (NotificationMessage ExitNotification, _metadata)) -> + if state = Post_shutdown then + lsp_exit_ok () + else + lsp_exit_bad () + | (Pre_init _, Client_message _) -> raise (Error.ServerNotInitialized "Server not initialized") + | (_, Client_message ((ResponseMessage (id, result) as c), metadata)) -> + let ienv = + match state with + | Connected env -> env.c_ienv + | Disconnected env -> env.d_ienv + | _ -> failwith "Didn't expect an LSP response yet" + in + begin + try + (* was it a response to a request issued by lspCommand? *) + let (handle, handle_error) = IdMap.find id ienv.i_outstanding_local_handlers in + let i_outstanding_local_handlers = IdMap.remove id ienv.i_outstanding_local_handlers in + let i_outstanding_local_requests = IdMap.remove id ienv.i_outstanding_local_requests in + let ienv = { ienv with i_outstanding_local_handlers; i_outstanding_local_requests } in + let state = + match state with + | Connected env -> Connected { env with c_ienv = ienv } + | Disconnected env -> Disconnected { env with d_ienv = ienv } + | _ -> failwith "Didn't expect an LSP response to be found yet" + in + match (result, handle) with + | (ShowMessageRequestResult result, ShowMessageHandler handle) -> + Ok (handle result state, LogNotNeeded) + | (ShowStatusResult result, ShowStatusHandler handle) -> + Ok (handle result state, LogNotNeeded) + | (ErrorResult (e, msg), _) -> Ok (handle_error (e, msg) state, LogNotNeeded) + | _ -> + failwith (Printf.sprintf "Response %s has mistyped handler" (message_name_to_string c)) + with Not_found -> + (* if not, it must be a response to a request issued by the server *) + (match state with + | Connected cenv -> + let (state, _) = track_to_server state c in + let wrapped = decode_wrapped id in + (* only forward responses if they're to current server *) + if wrapped.server_id = cenv.c_ienv.i_server_id then send_lsp_to_server cenv metadata c; + Ok (state, LogNotNeeded) + | _ -> + failwith (Printf.sprintf "Response %s has missing handler" (message_name_to_string c))) + end + | (_, Client_message (RequestMessage (id, DocumentSymbolRequest params), metadata)) -> + (* documentSymbols is handled in the client, not the server, since it's *) + (* purely syntax-driven and we'd like it to work even if the server is *) + (* busy or disconnected *) + let interaction_id = start_interaction ~trigger:LspInteraction.DocumentSymbol state in + let state = do_documentSymbol flowconfig_name state id params in + log_interaction ~ux:LspInteraction.Responded state interaction_id; + Ok (state, LogNeeded metadata) + | (Connected cenv, Client_message (c, metadata)) -> + let trigger = LspInteraction.trigger_of_lsp_msg c in + let interaction_tracking_id = + Option.map trigger ~f:(fun trigger -> start_interaction ~trigger state) + in + (* We'll track what's being sent to the server. This might involve some client *) + (* computation work, which we'll profile, and send it over in metadata. *) + (* Note: in the case where c is a cancel-notification for a request that *) + (* was already handled in lspCommand like ShutdownRequest or DocSymbolsRequest *) + (* we'll still forward it; that's okay since server already has to be *) + (* hardened against unrecognized ids in cancel requests. *) + let (client_duration, state) = + with_timer (fun () -> + let (state, { changed_live_uri }) = track_to_server state c in + let state = + Option.value_map + changed_live_uri + ~default:state + ~f:(do_live_diagnostics flowconfig_name state trigger metadata) + in + state) + in + let metadata = + { metadata with LspProt.client_duration = Some client_duration; interaction_tracking_id } + in + send_lsp_to_server cenv metadata c; + Ok (state, LogDeferred) + | (_, Client_message (RequestMessage (id, RageRequest), metadata)) -> + (* How to handle a rage request? If we're connected to a server, then the *) + (* above case will just have forwarded the message on to the server (and *) + (* we'll patch in our own extra information when the server replies). But *) + (* if there's no server then we have to reply here and now. *) + let result = do_rage flowconfig_name state in + let response = ResponseMessage (id, RageResult result) in + let json = Lsp_fmt.print_lsp response in + to_stdout json; + Ok (state, LogNeeded metadata) + | (_, Client_message ((NotificationMessage (DidOpenNotification _) as c), metadata)) + | (_, Client_message ((NotificationMessage (DidChangeNotification _) as c), metadata)) + | (_, Client_message ((NotificationMessage (DidSaveNotification _) as c), metadata)) + | (_, Client_message ((NotificationMessage (DidCloseNotification _) as c), metadata)) -> + let trigger = LspInteraction.trigger_of_lsp_msg c in + let interaction_id = Option.map trigger ~f:(fun trigger -> start_interaction ~trigger state) in + (* these are editor events that happen while disconnected. *) + let (client_duration, state) = + with_timer (fun () -> + let (state, { changed_live_uri }) = track_to_server state c in + let state = + Option.value_map + changed_live_uri + ~default:state + ~f:(do_live_diagnostics flowconfig_name state trigger metadata) + in + state) + in + (* TODO - In the future if we start running check-contents on DidChange, we should probably + * log Errored instead of Responded for that one *) + Option.iter interaction_id ~f:(log_interaction ~ux:LspInteraction.Responded state); + let metadata = { metadata with LspProt.client_duration = Some client_duration } in + Ok (state, LogNeeded metadata) + | (_, Client_message (NotificationMessage (CancelRequestNotification _), _metadata)) -> + (* let's just not bother reporting any error in this case *) + Ok (state, LogNotNeeded) + | (Disconnected _, Client_message (c, _metadata)) -> + let interaction_id = + LspInteraction.trigger_of_lsp_msg c + |> Option.map ~f:(fun trigger -> start_interaction ~trigger state) + in + let (state, _) = track_to_server state c in + let method_ = Lsp_fmt.denorm_message_to_string c in + let e = Error.RequestCancelled ("Server not connected; can't handle " ^ method_) in + Option.iter interaction_id ~f:(log_interaction ~ux:LspInteraction.Errored state); + let stack = Exception.get_current_callstack_string 100 in + Error (state, e, Utils.Callstack stack) + | (Post_shutdown, Client_message (_, _metadata)) -> + raise (Error.RequestCancelled "Server shutting down") + | (Connected cenv, Server_message LspProt.(NotificationFromServer (ServerExit exit_code))) -> + let state = Connected { cenv with c_about_to_exit_code = Some exit_code } in + Ok (state, LogNotNeeded) + | (Connected cenv, Server_message LspProt.(RequestResponse (LspFromServer msg, metadata))) -> + let (state, metadata, ux) = + match msg with + | None -> (state, metadata, LspInteraction.Responded) + | Some outgoing -> + let state = track_from_server state outgoing in + let (outgoing, metadata, ux) = + match outgoing with + | RequestMessage (id, request) -> + let wrapped = { server_id = cenv.c_ienv.i_server_id; message_id = id } in + (RequestMessage (encode_wrapped wrapped, request), metadata, LspInteraction.Responded) + | ResponseMessage (id, RageResult items) -> + (* we'll zero out the "client_duration", which at the moment represents client-side *) + (* work we did before sending out the request. By zeroing it out now, it'll get *) + (* filled out with the client-side work that gets done right here and now. *) + let metadata = { metadata with LspProt.client_duration = None } in + let ux = LspInteraction.Responded in + (ResponseMessage (id, RageResult (items @ do_rage flowconfig_name state)), metadata, ux) + | ResponseMessage (_, ErrorResult (e, _)) -> + let ux = + LspInteraction.( + if e.Error.code = Error.Code.requestCancelled then + Canceled + else + Errored) + in + (outgoing, metadata, ux) + | _ -> (outgoing, metadata, LspInteraction.Responded) + in + let outgoing = selectively_omit_errors LspProt.(metadata.lsp_method_name) outgoing in + to_stdout (Lsp_fmt.print_lsp ~include_error_stack_trace:false outgoing); + (state, metadata, ux) + in + Option.iter metadata.LspProt.interaction_tracking_id ~f:(log_interaction ~ux state); + Ok (state, LogNeeded metadata) + | ( Connected cenv, + Server_message LspProt.(NotificationFromServer (Errors { errors; warnings; errors_reason })) + ) -> + (* A note about the errors reported by this server message: *) + (* While a recheck is in progress, between StartRecheck and EndRecheck, *) + (* the server will periodically send errors+warnings. These are additive *) + (* to the errors which have previously been reported. Once the recheck *) + (* has finished then the server will send a new exhaustive set of errors. *) + (* At this opportunity we should erase all errors not in this set. *) + (* This differs considerably from the semantics of LSP publishDiagnostics *) + (* which says "whenever you send publishDiagnostics for a file, that *) + (* now contains the complete truth for that file." *) + + (* I hope that flow won't produce errors with an empty path. But such errors are *) + (* fatal to Nuclide, so if it does, then we'll at least use a fall-back path. *) + let default_uri = cenv.c_ienv.i_root |> Path.to_string |> File_url.create in + (* 'all' is an SMap from uri to diagnostic list, and 'add' appends the error within the map *) + let add severity error all = + let (uri, diagnostic) = error_to_lsp ~severity ~default_uri error in + SMap.add ~combine:List.append uri [diagnostic] all + in + (* First construct an SMap from uri to diagnostic list, which gathers together *) + (* all the errors and warnings per uri *) + let all = + SMap.empty + |> Errors.ConcreteLocPrintableErrorSet.fold (add (Some PublishDiagnostics.Error)) errors + |> Errors.ConcreteLocPrintableErrorSet.fold (add (Some PublishDiagnostics.Warning)) warnings + in + let () = + let end_state = collect_interaction_state state in + LspInteraction.log_pushed_errors ~end_state ~errors_reason + in + let state = + Connected cenv + |> update_errors + ( if cenv.c_is_rechecking then + LspErrors.add_streamed_server_errors_and_send to_stdout all + else + LspErrors.set_finalized_server_errors_and_send to_stdout all ) + in + Ok (state, LogNotNeeded) + | (Connected cenv, Server_message LspProt.(NotificationFromServer StartRecheck)) -> + let start_state = collect_interaction_state state in + LspInteraction.recheck_start ~start_state; + let state = show_recheck_progress { cenv with c_is_rechecking = true; c_lazy_stats = None } in + Ok (state, LogNotNeeded) + | (Connected cenv, Server_message LspProt.(NotificationFromServer (EndRecheck lazy_stats))) -> + let state = + show_recheck_progress { cenv with c_is_rechecking = false; c_lazy_stats = Some lazy_stats } + in + Ok (state, LogNotNeeded) + | (Connected cenv, Server_message LspProt.(NotificationFromServer (Please_hold status))) -> + let (server_status, watcher_status) = status in + let c_server_status = (server_status, Some watcher_status) in + (* We keep a log of typecheck summaries over the past 2mins. *) + let c_recent_summaries = cenv.c_recent_summaries in + let new_time = Unix.gettimeofday () in + let summary = ServerStatus.get_summary server_status in + let c_recent_summaries = + Option.value_map summary ~default:c_recent_summaries ~f:(fun summary -> + (new_time, summary) :: cenv.c_recent_summaries + |> List.filter ~f:(fun (t, _) -> t >= new_time -. 120.0)) + in + let state = show_recheck_progress { cenv with c_server_status; c_recent_summaries } in + Ok (state, LogNotNeeded) + | (_, Server_message _) -> + failwith + (Printf.sprintf + "In state %s, unexpected event %s" + (string_of_state state) + (denorm_string_of_event event)) + | (Disconnected env, Tick) -> + let state = try_connect flowconfig_name env in + Ok (state, LogNotNeeded) + | (_, Tick) -> Ok (state, LogNotNeeded) + +and main_log_command (state : state) (metadata : LspProt.metadata) : unit = + LspProt.( + let client_context = FlowEventLogger.get_context () in + let request = metadata.start_json_truncated |> Hh_json.json_to_string in + let wall_start = metadata.start_wall_time in + let server_profiling = metadata.server_profiling in + let client_duration = metadata.client_duration in + let extra_data = metadata.extra_data in + let persistent_context = + Some + { + FlowEventLogger.start_lsp_state = metadata.start_lsp_state; + start_lsp_state_reason = metadata.start_lsp_state_reason; + start_server_status = + Option.map metadata.start_server_status ~f:(ServerStatus.string_of_status ~terse:true); + start_watcher_status = + Option.map metadata.start_watcher_status ~f:FileWatcherStatus.string_of_status; + } + in + let server_logging_context = metadata.server_logging_context in + (* gather any recent typechecks that finished after the request had arrived *) + let delays = + match state with + | Connected cenv -> + Core_list.filter_map cenv.c_recent_summaries ~f:(fun (t, s) -> + if t > wall_start then + Some s + else + None) + | _ -> [] + in + let root = Option.value ~default:Path.dummy_path (get_root state) in + let persistent_delay = + if delays = [] then + None + else + Some (ServerStatus.log_of_summaries ~root delays) + in + match metadata.error_info with + | None -> + FlowEventLogger.persistent_command_success + ~server_logging_context + ~request + ~extra_data + ~client_context + ~persistent_context + ~persistent_delay + ~server_profiling + ~client_duration + ~wall_start + ~error:None + | Some (ExpectedError, msg, stack) -> + FlowEventLogger.persistent_command_success + ~server_logging_context + ~request + ~extra_data + ~client_context + ~persistent_context + ~persistent_delay + ~server_profiling + ~client_duration + ~wall_start + ~error:(Some (msg, stack)) + | Some (UnexpectedError, msg, stack) -> + FlowEventLogger.persistent_command_failure + ~server_logging_context + ~request + ~extra_data + ~client_context + ~persistent_context + ~persistent_delay + ~server_profiling + ~client_duration + ~wall_start + ~error:(msg, stack)) + +and main_log_error ~(expected : bool) (msg : string) (stack : string) (event : event option) : unit + = + let error = (msg, Utils.Callstack stack) in + let client_context = FlowEventLogger.get_context () in + let request = + match event with + | Some (Client_message (_, metadata)) -> + Some (metadata.LspProt.start_json_truncated |> Hh_json.json_to_string) + | Some (Server_message _) + | Some Tick + | None -> + None + in + match expected with + | true -> FlowEventLogger.persistent_expected_error ~request ~client_context ~error + | false -> FlowEventLogger.persistent_unexpected_error ~request ~client_context ~error + +and main_handle_error (e : exn) (Utils.Callstack stack) (state : state) (event : event option) : + state = + Marshal_tools.( + match e with + | Server_fatal_connection_exception _edata when state = Post_shutdown -> state + | Server_fatal_connection_exception edata -> + (* log the error *) + let stack = edata.stack ^ "---\n" ^ stack in + main_log_error ~expected:true ("[Server fatal] " ^ edata.message) stack event; + + (* report that we're disconnected to telemetry/connectionStatus *) + let state = + match state with + | Connected env -> + let i_isConnected = + Lsp_helpers.notify_connectionStatus + env.c_ienv.i_initialize_params + to_stdout + env.c_ienv.i_isConnected + false + in + let env = { env with c_ienv = { env.c_ienv with i_isConnected } } in + Connected env + | _ -> state + in + (* send the error report *) + let code = + match state with + | Connected cenv -> cenv.c_about_to_exit_code + | _ -> None + in + let code = Option.value_map code ~f:FlowExitStatus.to_string ~default:"" in + let report = Printf.sprintf "Server fatal exception: [%s] %s\n%s" code edata.message stack in + Lsp_helpers.telemetry_error to_stdout report; + let (d_autostart, d_ienv) = + match state with + | Connected { c_ienv; c_about_to_exit_code; _ } + when c_about_to_exit_code = Some FlowExitStatus.Flowconfig_changed + || c_about_to_exit_code = Some FlowExitStatus.Server_out_of_date -> + (* we allow at most one autostart_after_version_mismatch per *) + (* instance so as to avoid getting into version battles. *) + let previous = c_ienv.i_can_autostart_after_version_mismatch in + let d_ienv = { c_ienv with i_can_autostart_after_version_mismatch = false } in + (previous, d_ienv) + | Connected { c_ienv; _ } -> (false, c_ienv) + | Disconnected { d_ienv; _ } -> (false, d_ienv) + | Pre_init _ + | Post_shutdown -> + failwith "Unexpected server error in inapplicable state" + (* crash *) + in + let env = { d_ienv; d_autostart; d_server_status = None } in + let _state = state |> dismiss_tracks in + let state = Disconnected env in + state + | Client_recoverable_connection_exception edata -> + let stack = edata.stack ^ "---\n" ^ stack in + main_log_error ~expected:true ("[Client recoverable] " ^ edata.message) stack event; + let report = Printf.sprintf "Client exception: %s\n%s" edata.message stack in + Lsp_helpers.telemetry_error to_stdout report; + state + | Client_fatal_connection_exception edata -> + let stack = edata.stack ^ "---\n" ^ stack in + main_log_error ~expected:true ("[Client fatal] " ^ edata.message) stack event; + let report = Printf.sprintf "Client fatal exception: %s\n%s" edata.message stack in + Printf.eprintf "%s" report; + lsp_exit_bad () + | e -> + let e = Lsp_fmt.error_of_exn e in + main_log_error ~expected:true ("[FlowLSP] " ^ e.Error.message) stack event; + let text = + Printf.sprintf "FlowLSP exception %s [%i]\n%s" e.Error.message e.Error.code stack + in + let () = + match event with + | Some (Client_message (RequestMessage (id, _request), _metadata)) -> + let json = Lsp_fmt.print_lsp_response id (ErrorResult (e, stack)) in + to_stdout json + | _ -> Lsp_helpers.telemetry_error to_stdout text + in + state) diff --git a/src/lsp/flowLsp.mli b/src/lsp/flowLsp.mli new file mode 100644 index 00000000000..c94a7a916d1 --- /dev/null +++ b/src/lsp/flowLsp.mli @@ -0,0 +1,8 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +val run : flowconfig_name:string -> connect_params:CommandUtils.connect_params -> unit diff --git a/src/lsp/lspErrors.ml b/src/lsp/lspErrors.ml new file mode 100644 index 00000000000..24fa1ba79a0 --- /dev/null +++ b/src/lsp/lspErrors.ml @@ -0,0 +1,357 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Lsp +module List = Core_list + +(* This module is how the Flow lsp stores and reasons about Flow errors. This is tricky because + * Flow errors might come from a few different places. + * + * Here's our general strategy: + * + * 1. If we get streamed server errors for a file, then we either + * 1a. Add them to the known server errors for the file, if we previously had streamed errors for + * that file + * 1b. Replace the known server errors for the file, if we previously had finalized server errors + * (e.g. from after the last recheck) + * 2. If we have live parse errors for a file (e.g. from running the parser locally on an open file) + * then we replace the server's parse errors for that file with the live parse errors + * 3. If we have live non-parse errors for a file (e.g. from check-contents for an open file) + * then we replace the server's non-parse errors for that file with the live non-parse errors. + *) +type errors = PublishDiagnostics.diagnostic list + +type parse_errors = ParseErrors of errors + +type non_parse_errors = NonParseErrors of errors + +type split_errors = parse_errors * non_parse_errors + +(* Server errors are the main errors that the Flow server reports to us. They come in two flavors *) +type server_errors = + (* Streamed errors are sent down during rechecking. They are not necessarily complete yet. + * TODO: Guarantee that the streamed errors are complete. That once we stream down errors for + * foo.js, that we'll never be streamed any more errors for foo.js during this recheck *) + | Streamed of split_errors + (* Finalized errors are sent after rechecking. This is a complete list of errors for this file. *) + | Finalized of split_errors + +type per_file_errors = { + (* Live parse errors come from the lsp process running the parser on an open file. `None` means + * we haven't run the parser on this file. `Some []` means we have run the parser but it said + * there were 0 errors *) + live_parse_errors: parse_errors option; + (* Live non-parse errors come from us running check-contents on open files. `None` means we + * haven't run check-contents on this file. `Some []` means we have run check-contents and it + * found that there were 0 errors *) + live_non_parse_errors: non_parse_errors option; + (* Server errors come from the server. Duh. *) + server_errors: server_errors; +} + +type t = { + dirty_files: SSet.t; + (* The set of files for which we must update the IDE *) + file_to_errors_map: per_file_errors SMap.t; +} + +let empty_per_file_errors = + { + live_parse_errors = None; + live_non_parse_errors = None; + server_errors = Finalized (ParseErrors [], NonParseErrors []); + } + +(* Returns true if we don't know about any errors for this file *) +let file_has_no_errors = function + | { + live_parse_errors = None | Some (ParseErrors []); + live_non_parse_errors = None | Some (NonParseErrors []); + server_errors = + Streamed (ParseErrors [], NonParseErrors []) | Finalized (ParseErrors [], NonParseErrors []); + } -> + true + | _ -> false + +let empty = { dirty_files = SSet.empty; file_to_errors_map = SMap.empty } + +(* For the most part we don't sort errors, and leave it to the server and the IDE to figure that + * out. The one exception is in limit_errors, to ensure consistent results *) +let sort_errors = + PublishDiagnostics.( + List.sort ~cmp:(fun d1 d2 -> Lsp_helpers.pos_compare d1.range.start d2.range.start)) + +(* If we have too many errors then limit them to the first N errors *) +let limit_errors errors = + let cap = 200 in + (* List.nth is O(cap) instead of O(n) *) + let is_below_cap = List.nth errors cap = None in + if is_below_cap then + (* avoid O(nlogn) sort in this case *) + errors + else + (* Sort to make sure we're always sending the same errors *) + let errors = sort_errors errors in + let (retain, discard) = List.split_n errors cap in + match discard with + | [] -> retain + | discard -> + let discard_count = List.length discard in + let message = Printf.sprintf "[Only showing %i/%i diagnostics]" cap (cap + discard_count) in + let diagnostic = + PublishDiagnostics. + { + (* the following range displays fine in all editors, regardless of contents *) + range = { start = { line = 0; character = 0 }; end_ = { line = 0; character = 0 } }; + severity = Some PublishDiagnostics.Information; + code = NoCode; + source = Some "Flow"; + message; + relatedInformation = []; + relatedLocations = []; + } + in + diagnostic :: retain + +let is_parse_error = + let parse_code = Errors.string_of_kind Errors.ParseError in + (fun d -> d.PublishDiagnostics.code = PublishDiagnostics.StringCode parse_code) + +let is_not_parse_error d = not (is_parse_error d) + +let split errors = + let (parse_errors, non_parse_errors) = List.partition_tf errors is_parse_error in + (ParseErrors parse_errors, NonParseErrors non_parse_errors) + +let choose_errors + { + live_parse_errors; + live_non_parse_errors; + server_errors = Streamed server_errors | Finalized server_errors; + } = + let (ParseErrors server_parse_errors, NonParseErrors server_non_parse_errors) = server_errors in + (* Prefer live parse errors over server parse errors *) + let parse_errors = + match live_parse_errors with + | None -> server_parse_errors + | Some (ParseErrors live_parse_errors) -> live_parse_errors + in + let non_parse_errors = + match live_non_parse_errors with + | None -> server_non_parse_errors + | Some (NonParseErrors live_non_parse_errors) -> live_non_parse_errors + in + (parse_errors, non_parse_errors) + +let have_errors_changed before after = + let (before_parse_errors, before_non_parse_errors) = choose_errors before in + let (after_parse_errors, after_non_parse_errors) = choose_errors after in + (* Structural equality for fast comparison. Will never get false negative *) + before_parse_errors != after_parse_errors || before_non_parse_errors != after_non_parse_errors + +(* We need to send the errors for this file. This is when we need to decide exactly which errors to + * send. *) +let send_errors_for_file state (send_json : Hh_json.json -> unit) uri = + let (parse_errors, non_parse_errors) = + SMap.get uri state.file_to_errors_map + |> Option.value ~default:empty_per_file_errors + |> choose_errors + in + let errors = parse_errors @ non_parse_errors in + let diagnostics = limit_errors errors in + PublishDiagnosticsNotification { PublishDiagnostics.uri; diagnostics } + |> Lsp_fmt.print_lsp_notification + |> send_json + +(* For every dirty file (files for which the client likely has out-of-date errors), send the errors + * to the client *) +let send_all_errors send_json state = + let dirty_files = state.dirty_files in + let state = { state with dirty_files = SSet.empty } in + SSet.iter (send_errors_for_file state send_json) dirty_files; + state + +(* Helper function to modify the data for a specific file *) +let modify_per_file_errors uri state f = + let old_per_file_errors = + SMap.get uri state.file_to_errors_map |> Option.value ~default:empty_per_file_errors + in + let new_per_file_errors = f old_per_file_errors in + let dirty = have_errors_changed old_per_file_errors new_per_file_errors in + (* To keep this data structure small, let's filter out files with no live or server errors *) + let file_to_errors_map = + if file_has_no_errors new_per_file_errors then + SMap.remove uri state.file_to_errors_map + else + SMap.add uri new_per_file_errors state.file_to_errors_map + in + { + dirty_files = + ( if dirty then + SSet.add uri state.dirty_files + else + state.dirty_files ); + file_to_errors_map; + } + +(* Helper function to modify the server errors for a specific file *) +let modify_server_errors uri new_errors state f = + modify_per_file_errors uri state (fun per_file_errors -> + let (new_parse_errors, new_non_parse_errors) = split new_errors in + let new_server_errors = + f per_file_errors.server_errors (new_parse_errors, new_non_parse_errors) + in + { per_file_errors with server_errors = new_server_errors }) + +(* We've parsed a file locally and now want to record the number of parse errors for this file *) +let set_live_parse_errors_and_send send_json uri live_parse_errors state = + (* If the caller passes in some non-parse errors then we'll just ignore them *) + let live_parse_errors = List.filter live_parse_errors ~f:is_parse_error in + modify_per_file_errors uri state (fun per_file_errors -> + { per_file_errors with live_parse_errors = Some (ParseErrors live_parse_errors) }) + |> send_all_errors send_json + +(* We've run check-contents on a modified open file and now want to record the errors reported by + * check-contents *) +let set_live_non_parse_errors_and_send send_json uri_to_live_error_map state = + SMap.fold + (fun uri live_non_parse_errors state -> + (* If the caller passes in some parse errors then we'll just ignore them *) + let live_non_parse_errors = List.filter live_non_parse_errors ~f:is_not_parse_error in + modify_per_file_errors uri state (fun per_file_errors -> + { + per_file_errors with + live_non_parse_errors = Some (NonParseErrors live_non_parse_errors); + })) + uri_to_live_error_map + state + |> send_all_errors send_json + +(* When we close a file we clear all the live parse errors or non-parse errors for that file, but we + * keep around the server errors *) +let clear_all_live_errors_and_send send_json uri state = + modify_per_file_errors uri state (fun per_file_errors -> + { per_file_errors with live_parse_errors = None; live_non_parse_errors = None }) + |> send_all_errors send_json + +(* my_list @ [] returns a list which is no longer physically identical to my_list. This is a + * workaround *) +let append list_a list_b = + match (list_a, list_b) with + | ([], []) -> + [] (* [] is always physically equal to other []. Technically this rule isn't needed *) + | ([], _) -> list_b + | (_, []) -> list_a + | _ -> list_a @ list_b + +(* During recheck we stream in errors from the server. These will replace finalized server errors + * from a previous recheck or add to streamed server errors from this recheck *) +let add_streamed_server_errors_and_send send_json uri_to_error_map state = + SMap.fold + (fun uri new_errors_unsplit state -> + modify_server_errors uri new_errors_unsplit state (fun server_errors new_errors -> + match server_errors with + | Finalized _ -> + (* When a recheck streams in new errors, we stop showing the old finalized errors *) + Streamed new_errors + | Streamed existing_errors -> + (* Streamed errors are additive *) + let (ParseErrors existing_parse_errors, NonParseErrors existing_non_parse_errors) = + existing_errors + in + let (ParseErrors new_parse_errors, NonParseErrors new_non_parse_errors) = new_errors in + Streamed + ( ParseErrors (append existing_parse_errors new_parse_errors), + NonParseErrors (append existing_non_parse_errors new_non_parse_errors) ))) + uri_to_error_map + state + |> send_all_errors send_json + +(* After recheck we get all the errors from the server. This replaces whatever server errors we + * already had. *) +let set_finalized_server_errors_and_send send_json uri_to_error_map state = + let (state, files_with_new_errors) = + SMap.fold + (fun uri new_errors_unsplit (state, files_with_new_errors) -> + let state = + modify_server_errors uri new_errors_unsplit state (fun _ new_errors -> + (* At the end of the recheck, the finialized errors will replace either the errors from + * the previous recheck or the streamed errors *) + Finalized new_errors) + in + let files_with_new_errors = SSet.add uri files_with_new_errors in + (state, files_with_new_errors)) + uri_to_error_map + (state, SSet.empty) + in + (* All the errors in uri_to_error_map have been added to state. But uri_to_error_map doesn't + * include files which used to have >0 errors but now have 0 errors. So we need to go through + * every file that used to have errors and clear them out *) + SMap.fold + (fun uri _ state -> + if SSet.mem uri files_with_new_errors then + state + else + modify_server_errors uri [] state (fun _ cleared_errors -> Finalized cleared_errors)) + state.file_to_errors_map + state + |> send_all_errors send_json + +(* When the Flow server dies, LSP must clear all the errors. + * TODO: Don't clear live parse errors. Those don't require the server, so we can still keep + * providing them *) +let clear_all_errors_and_send send_json state = + SMap.fold + (fun uri _ state -> modify_per_file_errors uri state (fun _ -> empty_per_file_errors)) + state.file_to_errors_map + state + |> send_all_errors send_json + +(* Basically a best-effort attempt to update the locations of errors after a didChange *) +let update_errors_due_to_change_and_send send_json params state = + let uri = params.DidChange.textDocument.VersionedTextDocumentIdentifier.uri in + modify_per_file_errors uri state (fun per_file_errors -> + let { live_parse_errors; live_non_parse_errors; server_errors } = per_file_errors in + let live_parse_errors = + match live_parse_errors with + | None + | Some (ParseErrors []) -> + live_parse_errors + | Some (ParseErrors live_parse_errors) -> + Some + (ParseErrors (Lsp_helpers.update_diagnostics_due_to_change live_parse_errors params)) + in + let live_non_parse_errors = + match live_non_parse_errors with + | None + | Some (NonParseErrors []) -> + live_non_parse_errors + | Some (NonParseErrors live_non_parse_errors) -> + Some + (NonParseErrors + (Lsp_helpers.update_diagnostics_due_to_change live_non_parse_errors params)) + in + let server_errors = + match server_errors with + | Streamed (ParseErrors [], NonParseErrors []) + | Finalized (ParseErrors [], NonParseErrors []) -> + server_errors + | Streamed (ParseErrors parse_errors, NonParseErrors non_parse_errors) -> + let parse_errors = Lsp_helpers.update_diagnostics_due_to_change parse_errors params in + let non_parse_errors = + Lsp_helpers.update_diagnostics_due_to_change non_parse_errors params + in + Streamed (ParseErrors parse_errors, NonParseErrors non_parse_errors) + | Finalized (ParseErrors parse_errors, NonParseErrors non_parse_errors) -> + let parse_errors = Lsp_helpers.update_diagnostics_due_to_change parse_errors params in + let non_parse_errors = + Lsp_helpers.update_diagnostics_due_to_change non_parse_errors params + in + Finalized (ParseErrors parse_errors, NonParseErrors non_parse_errors) + in + { live_parse_errors; live_non_parse_errors; server_errors }) + |> send_all_errors send_json diff --git a/src/lsp/lspErrors.mli b/src/lsp/lspErrors.mli new file mode 100644 index 00000000000..6260e2b5d11 --- /dev/null +++ b/src/lsp/lspErrors.mli @@ -0,0 +1,29 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type t + +val empty : t + +val set_live_parse_errors_and_send : + (Hh_json.json -> unit) -> string -> Lsp.PublishDiagnostics.diagnostic list -> t -> t + +val set_live_non_parse_errors_and_send : + (Hh_json.json -> unit) -> Lsp.PublishDiagnostics.diagnostic list SMap.t -> t -> t + +val add_streamed_server_errors_and_send : + (Hh_json.json -> unit) -> Lsp.PublishDiagnostics.diagnostic list SMap.t -> t -> t + +val set_finalized_server_errors_and_send : + (Hh_json.json -> unit) -> Lsp.PublishDiagnostics.diagnostic list SMap.t -> t -> t + +val update_errors_due_to_change_and_send : + (Hh_json.json -> unit) -> Lsp.DidChange.didChangeTextDocumentParams -> t -> t + +val clear_all_live_errors_and_send : (Hh_json.json -> unit) -> string -> t -> t + +val clear_all_errors_and_send : (Hh_json.json -> unit) -> t -> t diff --git a/src/lsp/lspInteraction.ml b/src/lsp/lspInteraction.ml new file mode 100644 index 00000000000..9fc51818e3c --- /dev/null +++ b/src/lsp/lspInteraction.ml @@ -0,0 +1,334 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module List = Core_list +open LspProt + +(* Each interaction gets a unique id. *) +type id = int + +(* What initiated this interaction *) +type trigger = + | CodeAction + | Completion + | Definition + | DidChange + | DidClose + | DidOpen + | DidSave + | DocumentHighlight + | DocumentSymbol + | FindReferences + | Hover + | PushedErrorsEndOfRecheck of recheck_reason + | PushedErrorsEnvChange + | PushedErrorsNewSubscription + | PushedErrorsRecheckStreaming of recheck_reason + | Rage + | Rename + | TypeCoverage + | UnknownTrigger + +(* Source of the trigger *) +type source = + | Server + | Client + | UnknownSource + +(* What was the result of this interaction. *) +type ux = + | Canceled + | Errored + | ErroredPushingLiveParseErrors + | PushedErrors + | PushedLiveParseErrors + | Responded + | Timeout + +(* What was the Flow server doing at a specific point in time *) +type server_status = + | Stopped + | Initializing + | Rechecking + | Ready + +(* What were the IDE's buffers like at a specific point in time *) +type buffer_status = + | NoOpenBuffers + | NoUnsavedBuffers + | UnsavedBuffers + +(* A snapshot of the state of the world at a specific point in time. We record this at the start + * and end of an interaction *) +type state = { + time: float; + server_status: server_status; + buffer_status: buffer_status; +} + +let string_of_trigger = function + | CodeAction -> "codeAction" + | Completion -> "completion" + | Definition -> "definition" + | DidChange -> "didChange" + | DidClose -> "didClose" + | DidOpen -> "didOpen" + | DidSave -> "didSave" + | DocumentHighlight -> "documentHighlight" + | DocumentSymbol -> "documentSymbol" + | FindReferences -> "findReferences" + | Hover -> "hover" + | PushedErrorsEndOfRecheck recheck_reason -> + Printf.sprintf "endOfRecheck/%s" (normalized_string_of_recheck_reason recheck_reason) + | PushedErrorsRecheckStreaming recheck_reason -> + Printf.sprintf "recheckStreaming/%s" (normalized_string_of_recheck_reason recheck_reason) + | PushedErrorsEnvChange -> "envChange" + | PushedErrorsNewSubscription -> "newSubscription" + | Rage -> "Rage" + | Rename -> "Rename" + | TypeCoverage -> "TypeCoverage" + | UnknownTrigger -> "UnknownTrigger" + +let string_of_ux = function + | Canceled -> "Canceled" + | Errored -> "Errored" + | ErroredPushingLiveParseErrors -> "ErroredPushingLiveParseErrors" + | PushedErrors -> "PushedErrors" + | PushedLiveParseErrors -> "PushedLiveParseErrors" + | Responded -> "Responded" + | Timeout -> "Timeout" + +let string_of_server_status = function + | Stopped -> "Stopped" + | Initializing -> "Initializing" + | Rechecking -> "Rechecking" + | Ready -> "Ready" + +let string_of_buffer_status = function + | NoOpenBuffers -> "NoOpenBuffers" + | NoUnsavedBuffers -> "NoUnsavedBuffers" + | UnsavedBuffers -> "UnsavedBuffers" + +let source_of_trigger = function + | CodeAction + | Completion + | Definition + | DidChange + | DidClose + | DidOpen + | DidSave + | DocumentHighlight + | DocumentSymbol + | FindReferences + | Hover + | Rage + | Rename + | TypeCoverage -> + Client + | PushedErrorsEndOfRecheck _ + | PushedErrorsEnvChange + | PushedErrorsNewSubscription + | PushedErrorsRecheckStreaming _ -> + Server + | UnknownTrigger -> UnknownSource + +let string_of_source = function + | Client -> "Client" + | Server -> "Server" + | UnknownSource -> "UnknownSource" + +(* An interaction which has been triggered but which hasn't yet produced the UX for this trigger *) +type pending_interaction = { + start_state: state; + trigger: trigger; +} + +(* The internal state for LspInteraction *) +type internal_state = { + mutable next_id: int; + mutable pending_interactions: pending_interaction IMap.t; + mutable lowest_pending_id: int; + mutable last_recheck_start_state: state option; +} + +let internal_state = + { + next_id = 0; + pending_interactions = IMap.empty; + lowest_pending_id = 0; + last_recheck_start_state = None; + } + +(* Call this to start tracking an interaction *) +let start ~start_state ~trigger = + let id = internal_state.next_id in + internal_state.next_id <- internal_state.next_id + 1; + let interaction = { start_state; trigger } in + internal_state.pending_interactions <- + IMap.add id interaction internal_state.pending_interactions; + id + +(* Call this to note that a recheck has started *) +let recheck_start ~start_state = internal_state.last_recheck_start_state <- Some start_state + +let log ~ux ~trigger ~start_state ~end_state = + FlowInteractionLogger.interaction + ~source:(trigger |> source_of_trigger |> string_of_source) + ~trigger:(trigger |> string_of_trigger) + ~ux:(ux |> string_of_ux) + ~start_time_ms:(start_state.time *. 1000. |> int_of_float) + ~end_time_ms:(end_state.time *. 1000. |> int_of_float) + ~start_server_status:(start_state.server_status |> string_of_server_status) + ~end_server_status:(end_state.server_status |> string_of_server_status) + ~start_buffer_status:(start_state.buffer_status |> string_of_buffer_status) + ~end_buffer_status:(end_state.buffer_status |> string_of_buffer_status) + +(* Most interactions are triggered by the IDE sending a request and the server sending a response. + * Those are logged via start & log. However, when we push errors to the client, we log those using + * this method. *) +let log_pushed_errors ~end_state ~errors_reason = + let (triggers, start_state) = + match errors_reason with + | End_of_recheck { recheck_reasons } -> + ( List.map recheck_reasons ~f:(fun reason -> PushedErrorsEndOfRecheck reason), + Option.value ~default:end_state internal_state.last_recheck_start_state ) + | Recheck_streaming { recheck_reasons } -> + ( List.map recheck_reasons ~f:(fun reason -> PushedErrorsRecheckStreaming reason), + Option.value ~default:end_state internal_state.last_recheck_start_state ) + | Env_change -> ([PushedErrorsEnvChange], end_state) + | New_subscription -> ([PushedErrorsNewSubscription], end_state) + in + List.iter triggers ~f:(fun trigger -> log ~ux:PushedErrors ~trigger ~start_state ~end_state) + +let log ~end_state ~ux ~id = + Option.iter (IMap.get id internal_state.pending_interactions) ~f:(fun interaction -> + internal_state.pending_interactions <- IMap.remove id internal_state.pending_interactions; + let { start_state; trigger } = interaction in + log ~ux ~trigger ~start_state ~end_state) + +let rec gc ~get_state oldest_allowed = + let s = internal_state in + if s.lowest_pending_id < s.next_id then + match IMap.get s.lowest_pending_id s.pending_interactions with + | None -> + s.lowest_pending_id <- s.lowest_pending_id + 1; + gc ~get_state oldest_allowed + | Some interaction -> + if interaction.start_state.time < oldest_allowed then ( + log ~end_state:(get_state ()) ~ux:Timeout ~id:s.lowest_pending_id; + s.pending_interactions <- IMap.remove s.lowest_pending_id s.pending_interactions; + s.lowest_pending_id <- s.lowest_pending_id + 1; + gc ~get_state oldest_allowed + ) else + Some interaction.start_state.time + else + None + +(* If an interaction is over 10 minutes old we'll stop tracking it. *) +let max_age = 600.0 + +(* Garbage collect every pending interaction that started more than `max_age` seconds ago. + * Return when we should call gc again *) +let gc ~get_state = + let now = Unix.gettimeofday () in + (* gc any interaction that started more than max_age seconds ago *) + let oldest_remaining_interaction = gc ~get_state (now -. max_age) in + match oldest_remaining_interaction with + | None -> + (* If there are no pending interactions, then nothing will expire for at least max_age secs *) + now +. max_age + | Some start_time -> + (* Otherwise let's check back in when the oldest pending interaction is set to expire *) + start_time +. max_age + +let init () = FlowInteractionLogger.init () + +let flush () = FlowInteractionLogger.flush () + +(* Not every message the the lsp process receives triggers an interaction. This function + * enumerates which methods we care about and what trigger they correspond to *) +let trigger_of_lsp_msg = + Lsp.( + function + (* Requests from the client which we care about *) + | RequestMessage (_, CodeActionRequest _) -> Some CodeAction + | RequestMessage (_, CompletionRequest _) -> Some Completion + | RequestMessage (_, DefinitionRequest _) -> Some Definition + | RequestMessage (_, DocumentHighlightRequest _) -> Some DocumentHighlight + | RequestMessage (_, DocumentSymbolRequest _) -> Some DocumentSymbol + | RequestMessage (_, FindReferencesRequest _) -> Some FindReferences + | RequestMessage (_, HoverRequest _) -> Some Hover + | RequestMessage (_, RageRequest) -> Some Rage + | RequestMessage (_, RenameRequest _) -> Some Rename + | RequestMessage (_, TypeCoverageRequest _) -> Some TypeCoverage + (* Requests which we don't care about. Some are unsupported and some are sent from the lsp to + * the client *) + | RequestMessage (_, CompletionItemResolveRequest _) + | RequestMessage (_, DocumentFormattingRequest _) + | RequestMessage (_, DocumentOnTypeFormattingRequest _) + | RequestMessage (_, DocumentRangeFormattingRequest _) + | RequestMessage (_, InitializeRequest _) + | RequestMessage (_, ShowMessageRequestRequest _) + | RequestMessage (_, ShowStatusRequest _) + | RequestMessage (_, ShutdownRequest) + | RequestMessage (_, CodeLensResolveRequest _) + | RequestMessage (_, DocumentCodeLensRequest _) + (* TODO not sure if this is right, just need to unbreak the build. *) + + | RequestMessage (_, TypeDefinitionRequest _) + | RequestMessage (_, UnknownRequest _) + | RequestMessage (_, WorkspaceSymbolRequest _) + | RequestMessage (_, RegisterCapabilityRequest _) -> + None + (* No responses trigger interactions *) + | ResponseMessage (_, InitializeResult _) + | ResponseMessage (_, ShutdownResult) + | ResponseMessage (_, CodeLensResolveResult _) + | ResponseMessage (_, HoverResult _) + | ResponseMessage (_, DefinitionResult _) + | ResponseMessage (_, CompletionResult _) + | ResponseMessage (_, CompletionItemResolveResult _) + | ResponseMessage (_, WorkspaceSymbolResult _) + | ResponseMessage (_, DocumentSymbolResult _) + | ResponseMessage (_, FindReferencesResult _) + | ResponseMessage (_, DocumentHighlightResult _) + | ResponseMessage (_, DocumentCodeLensResult _) + | ResponseMessage (_, TypeCoverageResult _) + (* TODO not sure if this is right, just need to unbreak the build. *) + + | ResponseMessage (_, TypeDefinitionResult _) + | ResponseMessage (_, DocumentFormattingResult _) + | ResponseMessage (_, DocumentRangeFormattingResult _) + | ResponseMessage (_, DocumentOnTypeFormattingResult _) + | ResponseMessage (_, ShowMessageRequestResult _) + | ResponseMessage (_, ShowStatusResult _) + | ResponseMessage (_, RageResult _) + | ResponseMessage (_, RenameResult _) + | ResponseMessage (_, ErrorResult _) + | ResponseMessage (_, CodeActionResult _) -> + None + (* Only a few notifications can trigger an interaction *) + | NotificationMessage (DidOpenNotification _) -> Some DidOpen + | NotificationMessage (DidCloseNotification _) -> Some DidClose + | NotificationMessage (DidSaveNotification _) -> Some DidSave + | NotificationMessage (DidChangeNotification _) -> Some DidChange + (* Most notifications we ignore *) + | NotificationMessage ExitNotification + | NotificationMessage (CancelRequestNotification _) + | NotificationMessage (PublishDiagnosticsNotification _) + | NotificationMessage (LogMessageNotification _) + | NotificationMessage (TelemetryNotification _) + | NotificationMessage (ShowMessageNotification _) + | NotificationMessage (ProgressNotification _) + | NotificationMessage (ActionRequiredNotification _) + | NotificationMessage (ConnectionStatusNotification _) + | NotificationMessage InitializedNotification + | NotificationMessage SetTraceNotification + | NotificationMessage LogTraceNotification + | NotificationMessage (UnknownNotification _) + | NotificationMessage (DidChangeWatchedFilesNotification _) -> + None) diff --git a/src/lsp/lspInteraction.mli b/src/lsp/lspInteraction.mli new file mode 100644 index 00000000000..56cd198af4b --- /dev/null +++ b/src/lsp/lspInteraction.mli @@ -0,0 +1,71 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type id = int + +type trigger = + | CodeAction + | Completion + | Definition + | DidChange + | DidClose + | DidOpen + | DidSave + | DocumentHighlight + | DocumentSymbol + | FindReferences + | Hover + | PushedErrorsEndOfRecheck of LspProt.recheck_reason + | PushedErrorsEnvChange + | PushedErrorsNewSubscription + | PushedErrorsRecheckStreaming of LspProt.recheck_reason + | Rage + | Rename + | TypeCoverage + | UnknownTrigger + +type ux = + | Canceled + | Errored + | ErroredPushingLiveParseErrors + | PushedErrors + | PushedLiveParseErrors + | Responded + | Timeout + +type server_status = + | Stopped + | Initializing + | Rechecking + | Ready + +type buffer_status = + | NoOpenBuffers + | NoUnsavedBuffers + | UnsavedBuffers + +type state = { + time: float; + server_status: server_status; + buffer_status: buffer_status; +} + +val init : unit -> unit + +val start : start_state:state -> trigger:trigger -> id + +val recheck_start : start_state:state -> unit + +val log : end_state:state -> ux:ux -> id:id -> unit + +val log_pushed_errors : end_state:state -> errors_reason:LspProt.errors_reason -> unit + +val trigger_of_lsp_msg : Lsp.lsp_message -> trigger option + +val gc : get_state:(unit -> state) -> float + +val flush : unit -> unit Lwt.t diff --git a/src/monitor/connections/dune b/src/monitor/connections/dune new file mode 100644 index 00000000000..b133742c992 --- /dev/null +++ b/src/monitor/connections/dune @@ -0,0 +1,11 @@ +(library + (name flow_monitor_connections) + (wrapped false) + (libraries + flow_common_lwt + flow_monitor_logger + flow_server_protocol + lwt.unix + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/monitor/connections/ephemeralConnection.ml b/src/monitor/connections/ephemeralConnection.ml index 06519a92472..cf05937c4ce 100644 --- a/src/monitor/connections/ephemeralConnection.ml +++ b/src/monitor/connections/ephemeralConnection.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,5 +10,6 @@ include FlowServerMonitorConnection.Make (struct type in_message = ServerProt.Request.command_with_context + type out_message = MonitorProt.monitor_to_client_message end) diff --git a/src/monitor/connections/ephemeralConnection.mli b/src/monitor/connections/ephemeralConnection.mli index c3e4ff02284..f9e6eff65cb 100644 --- a/src/monitor/connections/ephemeralConnection.mli +++ b/src/monitor/connections/ephemeralConnection.mli @@ -1,10 +1,11 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -include FlowServerMonitorConnection.CONNECTION - with type in_message := ServerProt.Request.command_with_context - and type out_message := MonitorProt.monitor_to_client_message +include + FlowServerMonitorConnection.CONNECTION + with type in_message := ServerProt.Request.command_with_context + and type out_message := MonitorProt.monitor_to_client_message diff --git a/src/monitor/connections/flowServerMonitorConnection.ml b/src/monitor/connections/flowServerMonitorConnection.ml index 811c9c4afbc..9a4b0081827 100644 --- a/src/monitor/connections/flowServerMonitorConnection.ml +++ b/src/monitor/connections/flowServerMonitorConnection.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -27,44 +27,56 @@ module Logger = FlowServerMonitorLogger module type CONNECTION_PROCESSOR = sig type in_message + type out_message end type 'out_message command = -| Write of 'out_message -| WriteAndClose of 'out_message + | Write of 'out_message + | WriteAndClose of 'out_message module type CONNECTION = sig type t + type in_message + type out_message - val create: - (* A name for this connection for debugging messages *) - name: string -> - (* The fd from which we should read *) - in_fd:Lwt_unix.file_descr -> - (* The fd to which we should write *) - out_fd:Lwt_unix.file_descr -> - (* A function that closes the in and out fds *) - close:(unit -> unit Lwt.t) -> - (* A callback for when we read a message from the in_fd *) - on_read:(msg:in_message -> connection:t -> unit Lwt.t) -> + val create : + name:(* A name for this connection for debugging messages *) + string -> + in_fd:(* The fd from which we should read *) + Lwt_unix.file_descr -> + out_fd:(* The fd to which we should write *) + Lwt_unix.file_descr -> + close:((* A function that closes the in and out fds *) + unit -> unit Lwt.t) -> + on_read: + (msg:(* A callback for when we read a message from the in_fd *) + in_message -> + connection:t -> + unit Lwt.t) -> (* Returns the tuple (start, conn), where conn is the connection and `start ()` tells the * connection to reading from and writing to the fds *) ((unit -> unit) * t) Lwt.t - val write: msg:out_message -> t -> unit - val write_and_close: msg:out_message -> t -> unit - val close_immediately: t -> unit Lwt.t - val flush_and_close: t -> unit Lwt.t - val is_closed: t -> bool - val wait_for_closed: t -> unit Lwt.t -end -module Make (ConnectionProcessor: CONNECTION_PROCESSOR) : CONNECTION - with type in_message := ConnectionProcessor.in_message - and type out_message := ConnectionProcessor.out_message = struct + val write : msg:out_message -> t -> unit + + val write_and_close : msg:out_message -> t -> unit + + val close_immediately : t -> unit Lwt.t + + val flush_and_close : t -> unit Lwt.t + val is_closed : t -> bool + + val wait_for_closed : t -> unit Lwt.t +end + +module Make (ConnectionProcessor : CONNECTION_PROCESSOR) : + CONNECTION + with type in_message := ConnectionProcessor.in_message + and type out_message := ConnectionProcessor.out_message = struct type t = { name: string; in_fd: Lwt_unix.file_descr; @@ -78,15 +90,11 @@ module Make (ConnectionProcessor: CONNECTION_PROCESSOR) : CONNECTION wait_for_closed_thread: unit Lwt.t; } - let send_command conn command = - conn.push_to_stream (Some command) + let send_command conn command = conn.push_to_stream (Some command) - let close_stream conn = - try conn.push_to_stream None - with Lwt_stream.Closed -> () + let close_stream conn = (try conn.push_to_stream None with Lwt_stream.Closed -> ()) - let write ~msg conn = - send_command conn (Write msg) + let write ~msg conn = send_command conn (Write msg) let write_and_close ~msg conn = send_command conn (WriteAndClose msg); @@ -103,12 +111,13 @@ module Make (ConnectionProcessor: CONNECTION_PROCESSOR) : CONNECTION conn.close () let handle_command conn = function - | Write msg -> - let%lwt _size = Marshal_tools_lwt.to_fd_with_preamble conn.out_fd msg in Lwt.return_unit - | WriteAndClose msg -> - Lwt.cancel conn.command_thread; - let%lwt _size = Marshal_tools_lwt.to_fd_with_preamble conn.out_fd msg in - close_immediately conn + | Write msg -> + let%lwt _size = Marshal_tools_lwt.to_fd_with_preamble conn.out_fd msg in + Lwt.return_unit + | WriteAndClose msg -> + Lwt.cancel conn.command_thread; + let%lwt _size = Marshal_tools_lwt.to_fd_with_preamble conn.out_fd msg in + close_immediately conn (* Write everything available in the stream and then close the connection *) let flush_and_close conn = @@ -134,15 +143,13 @@ module Make (ConnectionProcessor: CONNECTION_PROCESSOR) : CONNECTION let catch conn exn = match exn with (* The command stream has been closed. This means the command loop should gracefully exit *) - | Lwt_stream.Empty -> - Lwt.return_unit - | exn -> begin + | Lwt_stream.Empty -> Lwt.return_unit + | exn -> Logger.error ~exn "Closing connection '%s' due to uncaught exception in command loop" conn.name; close_immediately conn - end end) module ReadLoop = LwtLoop.Make (struct @@ -150,49 +157,59 @@ module Make (ConnectionProcessor: CONNECTION_PROCESSOR) : CONNECTION let main connection = let%lwt msg = - (Marshal_tools_lwt.from_fd_with_preamble connection.in_fd - : ConnectionProcessor.in_message Lwt.t) + ( Marshal_tools_lwt.from_fd_with_preamble connection.in_fd + : ConnectionProcessor.in_message Lwt.t ) in let%lwt () = connection.on_read ~msg ~connection in Lwt.return connection let catch connection exn = - Logger.error - ~exn - "Closing connection '%s' due to uncaught exception in read loop" - connection.name; + (match exn with + | End_of_file -> + Logger.error "Connection '%s' was closed from the other side" connection.name + | _ -> + Logger.error + ~exn + "Closing connection '%s' due to uncaught exception in read loop" + connection.name); close_immediately connection end) let create ~name ~in_fd ~out_fd ~close ~on_read = - let wait_for_closed_thread, close = + let (wait_for_closed_thread, close) = (* Lwt.wait creates a thread that can't be canceled *) let (wait_for_closed_thread, wakener) = Lwt.wait () in (* If we've already woken the thread, then do nothing *) - let wakeup () = try Lwt.wakeup wakener () with Invalid_argument _ -> () in + let wakeup () = (try Lwt.wakeup wakener () with Invalid_argument _ -> ()) in (* On close, wake wait_for_closed_thread *) let close () = let%lwt () = close () in wakeup (); Lwt.return_unit in - wait_for_closed_thread, close + (wait_for_closed_thread, close) in - let command_stream, push_to_stream = Lwt_stream.create () in + let (command_stream, push_to_stream) = Lwt_stream.create () in (* Lwt.task creates a thread that can be canceled *) let (paused_thread, wakener) = Lwt.task () in - let conn = { - name; - in_fd; - out_fd; - command_stream; - push_to_stream; - close; - on_read; - command_thread = (let%lwt conn = paused_thread in CommandLoop.run conn); - read_thread = (let%lwt conn = paused_thread in ReadLoop.run conn); - wait_for_closed_thread; - } in + let conn = + { + name; + in_fd; + out_fd; + command_stream; + push_to_stream; + close; + on_read; + command_thread = + (let%lwt conn = paused_thread in + CommandLoop.run conn); + read_thread = + (let%lwt conn = paused_thread in + ReadLoop.run conn); + wait_for_closed_thread; + } + in let start () = Lwt.wakeup wakener conn in Lwt.return (start, conn) end diff --git a/src/monitor/connections/flowServerMonitorConnection.mli b/src/monitor/connections/flowServerMonitorConnection.mli index a65ed764536..38b88b7806c 100644 --- a/src/monitor/connections/flowServerMonitorConnection.mli +++ b/src/monitor/connections/flowServerMonitorConnection.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,29 +7,39 @@ module type CONNECTION_PROCESSOR = sig type in_message + type out_message end module type CONNECTION = sig type t + type in_message + type out_message - val create: + val create : name:string -> in_fd:Lwt_unix.file_descr -> out_fd:Lwt_unix.file_descr -> close:(unit -> unit Lwt.t) -> on_read:(msg:in_message -> connection:t -> unit Lwt.t) -> ((unit -> unit) * t) Lwt.t - val write: msg:out_message -> t -> unit - val write_and_close: msg:out_message -> t -> unit - val close_immediately: t -> unit Lwt.t - val flush_and_close: t -> unit Lwt.t - val is_closed: t -> bool - val wait_for_closed: t -> unit Lwt.t + + val write : msg:out_message -> t -> unit + + val write_and_close : msg:out_message -> t -> unit + + val close_immediately : t -> unit Lwt.t + + val flush_and_close : t -> unit Lwt.t + + val is_closed : t -> bool + + val wait_for_closed : t -> unit Lwt.t end -module Make : functor (ConnectionProcessor: CONNECTION_PROCESSOR) -> CONNECTION - with type in_message := ConnectionProcessor.in_message - and type out_message := ConnectionProcessor.out_message +module Make (ConnectionProcessor : CONNECTION_PROCESSOR) : + CONNECTION + with type in_message := ConnectionProcessor.in_message + and type out_message := ConnectionProcessor.out_message diff --git a/src/monitor/connections/persistentConnection.ml b/src/monitor/connections/persistentConnection.ml index 66b9149c3c2..91eeb0dae79 100644 --- a/src/monitor/connections/persistentConnection.ml +++ b/src/monitor/connections/persistentConnection.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,6 +9,7 @@ * clients. *) include FlowServerMonitorConnection.Make (struct - type in_message = Persistent_connection_prot.request - type out_message = Persistent_connection_prot.response + type in_message = LspProt.request_with_metadata + + type out_message = LspProt.message_from_server end) diff --git a/src/monitor/connections/persistentConnection.mli b/src/monitor/connections/persistentConnection.mli index 2b5aa2e6320..6340197042d 100644 --- a/src/monitor/connections/persistentConnection.mli +++ b/src/monitor/connections/persistentConnection.mli @@ -1,10 +1,11 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -include FlowServerMonitorConnection.CONNECTION - with type in_message := Persistent_connection_prot.request - and type out_message := Persistent_connection_prot.response +include + FlowServerMonitorConnection.CONNECTION + with type in_message := LspProt.request_with_metadata + and type out_message := LspProt.message_from_server diff --git a/src/monitor/connections/serverConnection.ml b/src/monitor/connections/serverConnection.ml index 34e8e4b1f8c..ec6e5873120 100644 --- a/src/monitor/connections/serverConnection.ml +++ b/src/monitor/connections/serverConnection.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,5 +9,6 @@ include FlowServerMonitorConnection.Make (struct type in_message = MonitorProt.server_to_monitor_message + type out_message = MonitorProt.monitor_to_server_message end) diff --git a/src/monitor/connections/serverConnection.mli b/src/monitor/connections/serverConnection.mli index 44559a2a982..a6de4025231 100644 --- a/src/monitor/connections/serverConnection.mli +++ b/src/monitor/connections/serverConnection.mli @@ -1,10 +1,11 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -include FlowServerMonitorConnection.CONNECTION - with type in_message := MonitorProt.server_to_monitor_message - and type out_message := MonitorProt.monitor_to_server_message +include + FlowServerMonitorConnection.CONNECTION + with type in_message := MonitorProt.server_to_monitor_message + and type out_message := MonitorProt.monitor_to_server_message diff --git a/src/monitor/dune b/src/monitor/dune new file mode 100644 index 00000000000..1e5a6ec9b5c --- /dev/null +++ b/src/monitor/dune @@ -0,0 +1,24 @@ +(library + (name flow_monitor) + (wrapped false) + (libraries + dfind_lwt + flow_common + flow_common_lwt + flow_exit_status + flow_logging_lwt + flow_logging_utils + flow_monitor_connections + flow_monitor_logger + flow_monitor_utils + flow_server + flow_server_files + flow_server_watchman_expression_terms + flow_shared_mem + lwt.unix + semver + socket + watchman_lwt + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/monitor/fileWatcher.ml b/src/monitor/fileWatcher.ml index a080aed7664..dfd10f105c7 100644 --- a/src/monitor/fileWatcher.ml +++ b/src/monitor/fileWatcher.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -17,30 +17,48 @@ exception FileWatcherDied of exn *) class type watcher = object - method name: string - method start_init: unit - method wait_for_init: unit Lwt.t - method get_and_clear_changed_files: SSet.t Lwt.t - method wait_for_changed_files: unit Lwt.t - method stop: unit Lwt.t - method waitpid: Unix.process_status Lwt.t - method getpid: int option + method name : string + + method start_init : unit + + method wait_for_init : unit Lwt.t + + method get_and_clear_changed_files : (SSet.t * MonitorProt.file_watcher_metadata option) Lwt.t + + method wait_for_changed_files : unit Lwt.t + + method stop : unit Lwt.t + + method waitpid : unit Lwt.t + + method getpid : int option end -class dummy : watcher = object - method name = "dummy" - method start_init = () - method wait_for_init = Lwt.return_unit - method get_and_clear_changed_files = Lwt.return SSet.empty - method wait_for_changed_files = Lwt.return_unit - method stop = Lwt.return_unit - method waitpid = let wait_forever_thread, _ = Lwt.task () in wait_forever_thread - method getpid = None -end +class dummy : watcher = + object + method name = "dummy" + + method start_init = () + + method wait_for_init = Lwt.return_unit + + method get_and_clear_changed_files = Lwt.return (SSet.empty, None) -class dfind (monitor_options: FlowServerMonitorOptions.t) : watcher = + method wait_for_changed_files = Lwt.return_unit + + method stop = Lwt.return_unit + + method waitpid = + let (wait_forever_thread, _) = Lwt.task () in + wait_forever_thread + + method getpid = None + end + +class dfind (monitor_options : FlowServerMonitorOptions.t) : watcher = object (self) val mutable dfind_instance = None + val mutable files = SSet.empty method name = "dfind" @@ -60,35 +78,34 @@ class dfind (monitor_options: FlowServerMonitorOptions.t) : watcher = let dfind = DfindLibLwt.init fds ("flow_server_events", watch_paths) in dfind_instance <- Some dfind - method wait_for_init = - DfindLibLwt.wait_until_ready (self#get_dfind) + method wait_for_init = DfindLibLwt.wait_until_ready self#get_dfind (* We don't want two threads to talk to dfind at the same time. And we don't want those two * threads to get the same file change events *) val dfind_mutex = Lwt_mutex.create () + method private fetch = Lwt_mutex.with_lock dfind_mutex (fun () -> - let dfind = self#get_dfind in - try%lwt - let%lwt new_files = DfindLibLwt.get_changes dfind in - files <- SSet.union files new_files; - Lwt.return_unit - with - | Sys_error msg as exn when msg = "Broken pipe" -> raise (FileWatcherDied exn) - | End_of_file - | Unix.Unix_error (Unix.EPIPE, _, _) as exn -> raise (FileWatcherDied exn) - ) + let dfind = self#get_dfind in + try%lwt + let%lwt new_files = DfindLibLwt.get_changes dfind in + files <- SSet.union files new_files; + Lwt.return_unit + with + | Sys_error msg as exn when msg = "Broken pipe" -> raise (FileWatcherDied exn) + | (End_of_file | Unix.Unix_error (Unix.EPIPE, _, _)) as exn -> + raise (FileWatcherDied exn)) method get_and_clear_changed_files = let%lwt () = self#fetch in - let ret = files in + let ret = (files, None) in files <- SSet.empty; Lwt.return ret method wait_for_changed_files = let%lwt () = self#fetch in - if not (SSet.is_empty files) - then Lwt.return_unit + if not (SSet.is_empty files) then + Lwt.return_unit else let%lwt () = Lwt_unix.sleep 1.0 in self#wait_for_changed_files @@ -98,6 +115,7 @@ class dfind (monitor_options: FlowServerMonitorOptions.t) : watcher = let pid = DfindLibLwt.pid dfind in DfindLibLwt.stop dfind; dfind_instance <- None; + (* Reap the killed process *) let%lwt _ = LwtSysUtils.blocking_waitpid pid in Lwt.return_unit @@ -106,7 +124,32 @@ class dfind (monitor_options: FlowServerMonitorOptions.t) : watcher = let dfind = self#get_dfind in let pid = DfindLibLwt.pid dfind in let%lwt (_, status) = LwtSysUtils.blocking_waitpid pid in - Lwt.return status + begin + match status with + | Unix.WEXITED exit_status -> + let exit_type = + (try Some (FlowExitStatus.error_type exit_status) with Not_found -> None) + in + let exit_status_string = + Option.value_map ~default:"Invalid_exit_code" ~f:FlowExitStatus.to_string exit_type + in + Logger.error + "File watcher (%s) exited with code %s (%d)" + self#name + exit_status_string + exit_status + | Unix.WSIGNALED signal -> + Logger.error + "File watcher (%s) was killed with %s signal" + self#name + (PrintSignal.string_of_signal signal) + | Unix.WSTOPPED signal -> + Logger.error + "File watcher (%s) was stopped with %s signal" + self#name + (PrintSignal.string_of_signal signal) + end; + Lwt.return_unit method getpid = let dfind = self#get_dfind in @@ -116,77 +159,190 @@ class dfind (monitor_options: FlowServerMonitorOptions.t) : watcher = module WatchmanFileWatcher : sig class watchman : FlowServerMonitorOptions.t -> watcher end = struct + (* We need to keep track of when hg transactions start and end. It's generally unsafe to read the + * state of the repo in the middle of a transaction, so we often need to avoid reading scm info + * until all transactions are done *) + module HgTransaction : sig + type t + + val empty : t + + val enter : t -> t + + val leave : t -> t Lwt.t + + val register_callback : t -> (unit -> unit Lwt.t) -> t Lwt.t + end = struct + type t = { + count: int; + callbacks: (unit -> unit Lwt.t) list; + } + + let empty = { count = 0; callbacks = [] } + + let enter t = { t with count = t.count + 1 } + + let leave t = + let t = { t with count = t.count - 1 } in + if t.count = 0 then + let%lwt () = Lwt_list.iter_s (fun f -> f ()) (List.rev t.callbacks) in + Lwt.return { t with callbacks = [] } + else + Lwt.return t + + let register_callback t f = + if t.count = 0 then + let%lwt () = f () in + Lwt.return t + else + Lwt.return { t with callbacks = f :: t.callbacks } + end + type env = { mutable instance: Watchman_lwt.watchman_instance; mutable files: SSet.t; + mutable metadata: MonitorProt.file_watcher_metadata; + mutable mergebase: string option; + mutable hg_transactions: HgTransaction.t; listening_thread: unit Lwt.t; changes_condition: unit Lwt_condition.t; + init_settings: Watchman_lwt.init_settings; + should_track_mergebase: bool; } - module WatchmanListenLoop = LwtLoop.Make(struct + let get_mergebase env = + if env.should_track_mergebase then + Watchman_lwt.with_instance + env.instance + ~try_to_restart:true + ~on_alive:(fun watchman_env -> + env.instance <- Watchman_lwt.Watchman_alive watchman_env; + + (* scm queries can be a little slow, but they should usually be only a few seconds. + * Lets set our worst case to 30s before we exit *) + let%lwt mergebase = + Watchman_lwt.(get_mergebase ~timeout:(Explicit_timeout 30.) watchman_env) + in + Lwt.return (Some mergebase)) + ~on_dead:(fun dead_env -> + env.instance <- Watchman_lwt.Watchman_dead dead_env; + failwith "Failed to connect to Watchman to get mergebase") + else + Lwt.return_none + + module WatchmanListenLoop = LwtLoop.Make (struct module J = Hh_json_helpers.AdhocJsonHelpers type acc = env let extract_hg_update_metadata = function - | None -> "", "" - | Some metadata -> - let distance = J.get_number_val "distance" ~default:"" metadata in - let rev = J.get_string_val "rev" ~default:"" metadata in - distance, rev + | None -> ("", "") + | Some metadata -> + let distance = J.get_number_val "distance" ~default:"" metadata in + let rev = J.get_string_val "rev" ~default:"" metadata in + (distance, rev) let broadcast env = - if not (SSet.is_empty env.files) - then Lwt_condition.broadcast env.changes_condition () + if not (SSet.is_empty env.files) then Lwt_condition.broadcast env.changes_condition () let main env = let deadline = Unix.time () +. 604800. in - let%lwt instance, result = Watchman_lwt.get_changes ~deadline env.instance in + let%lwt (instance, result) = Watchman_lwt.get_changes ~deadline env.instance in env.instance <- instance; - begin match result with + match result with | Watchman_lwt.Watchman_pushed pushed_changes -> - begin match pushed_changes with - | Watchman_lwt.Files_changed new_files -> - env.files <- SSet.union env.files new_files; - broadcast env - | Watchman_lwt.State_enter (name, metadata) -> - if name = "hg.update" - then - let distance, rev = extract_hg_update_metadata metadata in - Logger.info - "Watchman reports an hg.update just started. Moving %s revs from %s" distance rev - | Watchman_lwt.State_leave (name, metadata) -> - if name = "hg.update" - then - let distance, rev = extract_hg_update_metadata metadata in - Logger.info - "Watchman reports an hg.update just finished. Moved %s revs to %s" distance rev - | Watchman_lwt.Changed_merge_base _ -> - failwith "We're not using an scm aware subscription, so we should never get these" + begin + match pushed_changes with + | Watchman_lwt.Files_changed new_files -> + env.files <- SSet.union env.files new_files; + broadcast env; + Lwt.return env + | Watchman_lwt.State_enter (name, metadata) -> + (match name with + | "hg.update" -> + let (distance, rev) = extract_hg_update_metadata metadata in + Logger.info + "Watchman reports an hg.update just started. Moving %s revs from %s" + distance + rev + | "hg.transaction" -> env.hg_transactions <- HgTransaction.enter env.hg_transactions + | _ -> ()); + Lwt.return env + | Watchman_lwt.State_leave (name, metadata) -> + (match name with + | "hg.update" -> + let (distance, rev) = extract_hg_update_metadata metadata in + env.metadata <- + MonitorProt. + { + env.metadata with + total_update_distance = + env.metadata.total_update_distance + int_of_string distance; + }; + Logger.info + "Watchman reports an hg.update just finished. Moved %s revs to %s" + distance + rev; + + let old_mergebase = env.mergebase in + let%lwt hg_trans = + HgTransaction.register_callback env.hg_transactions + @@ fun () -> + (* If the mergebase has changed for some reason before this callback runs, then + * don't run this callback. For example, I could imagine multiple hg.update's inside a + * single transaction queueing up multiple callbacks. *) + if env.mergebase <> old_mergebase then + Lwt.return_unit + else + let%lwt new_mergebase = get_mergebase env in + match (new_mergebase, old_mergebase) with + | (Some new_mergebase, Some old_mergebase) when new_mergebase <> old_mergebase -> + Logger.info + "Watchman reports mergebase changed from %S to %S" + old_mergebase + new_mergebase; + env.mergebase <- Some new_mergebase; + env.metadata <- { env.metadata with MonitorProt.changed_mergebase = true }; + Lwt.return_unit + | _ -> Lwt.return_unit + in + env.hg_transactions <- hg_trans; + Lwt.return env + | "hg.transaction" -> + let%lwt hg_trans = HgTransaction.leave env.hg_transactions in + env.hg_transactions <- hg_trans; + Lwt.return env + | _ -> Lwt.return env) + | Watchman_lwt.Changed_merge_base _ -> + failwith "We're not using an scm aware subscription, so we should never get these" end | Watchman_lwt.Watchman_synchronous _ -> failwith "Flow should never use the synchronous watchman API" | Watchman_lwt.Watchman_unavailable -> (* TODO (glevi) - Should we die if we get this for too long? *) - Logger.error "Watchman unavailable. Retrying..." - end; - Lwt.return env + Logger.error "Watchman unavailable. Retrying..."; - external reraise : exn -> 'a = "%reraise" + (* Watchman_lwt.get_changes will restart the connection. However it has some backoff + * built in and will do nothing if called too early. That turns this LwtLoop module into a + * busy wait. So let's add a sleep here to yield and prevent spamming the logs too much. *) + let%lwt () = Lwt_unix.sleep 1.0 in + Lwt.return env let catch _ exn = - match exn with - | Lwt.Canceled -> Lwt.return_unit - | _ -> - Logger.error ~exn "Uncaught exception in Watchman listening loop"; - reraise exn + Logger.error ~exn "Uncaught exception in Watchman listening loop"; + + (* By exiting this loop we'll let the server know that something went wrong with Watchman *) + Lwt.return_unit end) - class watchman (monitor_options: FlowServerMonitorOptions.t) : watcher = + class watchman (monitor_options : FlowServerMonitorOptions.t) : watcher = object (self) val mutable env = None + val mutable init_thread = None + val mutable init_settings = None + method name = "watchman" method private get_env = @@ -195,99 +351,67 @@ end = struct | Some env -> env method start_init = - let { FlowServerMonitorOptions.server_options; file_watcher_debug; _} = - monitor_options in + let { FlowServerMonitorOptions.server_options; file_watcher_debug; _ } = monitor_options in let file_options = Options.file_options server_options in - - let watchman_expression_terms = - let module J = Hh_json_helpers.AdhocJsonHelpers in - - let suffixes = - let exts = SSet.elements @@ Files.get_all_watched_extensions file_options in - let exts = Files.flow_ext :: exts in - - exts - (* Turn .foo.bar into .bar, since suffix can't deal with multi-part extensions *) - |> List.map (fun ext -> Filename.extension ("foo" ^ ext)) - (* Strip off the leading '.' *) - |> List.map (fun ext -> - if ext <> "" && ext.[0] = '.' - then String.sub ext 1 (String.length ext - 1) - else ext - ) - in - - (* Unfortunately watchman can't deal with absolute paths. Its "wholename" feature only - * works for relative paths to the watch root, and we don't know the watch root until we - * init. - * - * Luckily, all we really need is to specify a superset of the files we care about. So - * watching all .flowconfigs instead of just our .flowconfig is fine *) - let absolute_paths = - (* Config file *) - let flowconfig_name = Options.flowconfig_name server_options in - let paths = [Server_files_js.config_file flowconfig_name @@ Options.root server_options] - in - (* Module resolver *) - Option.value_map (Options.module_resolver server_options) - ~default:paths ~f:(fun module_resolver -> Path.to_string module_resolver :: paths) - in - - (* Include any file with this basename *) - let basenames = "package.json" :: (List.map Filename.basename absolute_paths) in - - [ - J.strlist ["type"; "f"]; (* Watch for files *) - J.pred "anyof" @@ [ - J.assoc_strlist "suffix" suffixes; - J.assoc_strlist "name" basenames; - ]; - J.pred "not" @@ [ (* Ignore changes in source control dirs *) - J.pred "anyof" @@ [ - J.strlist ["dirname"; ".hg"]; - J.strlist ["dirname"; ".git"]; - J.strlist ["dirname"; ".svn"]; - ] - ] - ] + let watchman_expression_terms = Watchman_expression_terms.make ~options:server_options in + let settings = + { + (* Defer updates during `hg.update` *) + Watchman_lwt.subscribe_mode = Some Watchman_lwt.Defer_changes; + (* Hack makes this configurable in their local config. Apparently buck & hgwatchman also + * use 10 seconds. *) + init_timeout = Watchman_lwt.Explicit_timeout 10.; + expression_terms = watchman_expression_terms; + subscription_prefix = "flow_watcher"; + roots = Files.watched_paths file_options; + debug_logging = file_watcher_debug; + } in + init_settings <- Some settings; - init_thread <- Some (Watchman_lwt.init { - (* Defer updates during `hg.update` *) - Watchman_lwt.subscribe_mode = Some Watchman_lwt.Defer_changes; - (* Hack makes this configurable in their local config. Apparently buck & hgwatchman also - * use 10 seconds *) - init_timeout = 10; - expression_terms = watchman_expression_terms; - subscription_prefix = "flow_watcher"; - roots = Files.watched_paths file_options; - debug_logging = file_watcher_debug; - } ()) + init_thread <- Some (Watchman_lwt.init settings ()) method wait_for_init = let%lwt watchman = Option.value_exn init_thread in init_thread <- None; - begin match watchman with + let should_track_mergebase = + let server_options = monitor_options.FlowServerMonitorOptions.server_options in + Options.lazy_mode server_options = Options.LAZY_MODE_WATCHMAN + in + match watchman with | Some watchman -> - let waiter, wakener = Lwt.task () in - let new_env = { - instance = Watchman_lwt.Watchman_alive watchman; - files = SSet.empty; - listening_thread = (let%lwt env = waiter in WatchmanListenLoop.run env); - changes_condition = Lwt_condition.create (); - } in + let (waiter, wakener) = Lwt.task () in + let new_env = + { + instance = Watchman_lwt.Watchman_alive watchman; + files = SSet.empty; + listening_thread = + (let%lwt env = waiter in + WatchmanListenLoop.run env); + mergebase = None; + hg_transactions = HgTransaction.empty; + changes_condition = Lwt_condition.create (); + metadata = MonitorProt.empty_file_watcher_metadata; + init_settings = Option.value_exn init_settings; + should_track_mergebase; + } + in + let%lwt mergebase = get_mergebase new_env in + Option.iter mergebase ~f:(Logger.info "Watchman reports the initial mergebase as %S"); + let new_env = { new_env with mergebase } in env <- Some new_env; - Lwt.wakeup wakener new_env - | None -> - failwith "Failed to initialize watchman" - end; - Lwt.return_unit + Lwt.wakeup wakener new_env; + Lwt.return_unit + | None -> failwith "Failed to initialize watchman" + (* Should we throw away metadata even if files is empty? glevi thinks that's fine, since we + * probably don't care about hg updates or mergebase changing if no files were affected *) method get_and_clear_changed_files = let env = self#get_env in - let ret = env.files in + let ret = (env.files, Some env.metadata) in env.files <- SSet.empty; + env.metadata <- MonitorProt.empty_file_watcher_metadata; Lwt.return ret method wait_for_changed_files = @@ -298,19 +422,41 @@ end = struct (* Flow doesn't own the watchman process, so it's not Flow's job to stop the watchman * process. What we can do, though, is stop listening to the messages *) let env = self#get_env in + Logger.info "Canceling Watchman listening thread & closing connection"; Lwt.cancel env.listening_thread; - Lwt.return_unit + Watchman_lwt.with_instance + env.instance + ~try_to_restart:false + ~on_alive:Watchman_lwt.close + ~on_dead:(fun _ -> Lwt.return_unit) method waitpid = (* If watchman dies, we can start it back up again and use clockspec to make sure we didn't * miss anything. So from the point of view of the FileWatcher abstraction, watchman never - * dies and this method can just wait forever *) - let waiter, _ = Lwt.task () in - waiter + * dies and this method can just wait forever. + * + * However it's possible that something Really Really Bad might happen to watchman. If + * the watchman listening thread itself dies, then we need to tell the monitor that this + * file watcher is dead. *) + let env = self#get_env in + (* waitpid should return a thread that resolves when the listening_thread resolves. So why + * don't we just return the listening_thread? + * + * It's because we need to return a cancelable thread. The listening_thread will resolve to + * unit when it is canceled. That is the wrong behavior. + * + * So how do we wrap the listening_thread in a cancelable thread? By running it + * asynchronously, having it signal when it resolves, and waiting for the signal *) + let signal = Lwt_condition.create () in + Lwt.async (fun () -> + let%lwt () = env.listening_thread in + Lwt_condition.signal signal (); + Lwt.return_unit); + + Lwt_condition.wait signal method getpid = None end - end class watchman = WatchmanFileWatcher.watchman diff --git a/src/monitor/fileWatcherStatus.ml b/src/monitor/fileWatcherStatus.ml deleted file mode 100644 index 8d59b4b28c3..00000000000 --- a/src/monitor/fileWatcherStatus.ml +++ /dev/null @@ -1,26 +0,0 @@ -(** - * Copyright (c) 2018-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type status' = -| Initializing -| Ready -type status = Options.file_watcher * status' - -let string_of_file_watcher = function - | Options.NoFileWatcher -> "Dummy" - | Options.DFind -> "Dfind" - | Options.Watchman -> "Watchman" - -let string_of_status = - let string_of_status = function - | Initializing -> "still initializing" - | Ready -> "ready" - in - - fun (watcher, status) -> - Printf.sprintf - "%s file watcher is %s" (string_of_file_watcher watcher) (string_of_status status) diff --git a/src/monitor/flowServerMonitor.ml b/src/monitor/flowServerMonitor.ml index f47f7709fd2..8883e71a1ff 100644 --- a/src/monitor/flowServerMonitor.ml +++ b/src/monitor/flowServerMonitor.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -15,16 +15,13 @@ module Logger = FlowServerMonitorLogger let handle_waiting_start_command waiting_fd = (* Close the fd, but don't worry if it's already closed *) let close () = - try%lwt - Lwt_unix.close waiting_fd - with + try%lwt Lwt_unix.close waiting_fd with (* If the waiting process decided not to wait and the fd is closed, then that's fine *) - | Unix.Unix_error(Unix.EBADF, _, _) -> Lwt.return_unit + | Unix.Unix_error (Unix.EBADF, _, _) -> Lwt.return_unit | exn -> Logger.error ~exn "Unexpected exception when closing connection to waiting start command"; Lwt.return_unit in - (* Send a message to the fd, but don't worry if it's already closed *) let send_message msg = try%lwt @@ -32,19 +29,16 @@ let handle_waiting_start_command waiting_fd = Lwt.return_unit with (* If the waiting process decided not to wait and closed the pipe, then that's fine *) - | Unix.Unix_error(Unix.EPIPE, _, _) -> close () - | Sys_error msg when msg = "Broken pipe" || msg = "Invalid argument" -> close () + | Unix.Unix_error (Unix.EPIPE, _, _) -> close () + | Sys_error msg when msg = "Broken pipe" || msg = "Invalid argument" -> close () | exn -> Logger.error ~exn "Unexpected exception when talking to waiting start command"; close () - - in - - let%lwt () = send_message FlowServerMonitorDaemon.Starting in - StatusStream.call_on_free ~f:(fun () -> + in + let%lwt () = send_message FlowServerMonitorDaemon.Starting in + StatusStream.call_on_free ~f:(fun () -> let%lwt () = send_message FlowServerMonitorDaemon.Ready in - close () - ) + close ()) (* The EventLogger needs to be periodically flushed. The server flushes it during its main serve * loop, but the monitor has no main loop. So instead we flush every 5 seconds. That should be @@ -63,27 +57,23 @@ end) (* This is the common entry point for both daemonize and start. *) let internal_start ~is_daemon ?waiting_fd monitor_options = - let { FlowServerMonitorOptions.server_options; argv; _; } = monitor_options in - + let { FlowServerMonitorOptions.server_options; argv; _ } = monitor_options in let () = let file_watcher = - FileWatcherStatus.string_of_file_watcher monitor_options.FlowServerMonitorOptions.file_watcher + FileWatcherStatus.string_of_file_watcher + monitor_options.FlowServerMonitorOptions.file_watcher in FlowEventLogger.set_monitor_options ~file_watcher; LoggingUtils.set_server_options ~server_options in - let root = Options.root server_options in let tmp_dir = Options.temp_dir server_options in - (* We need to grab the lock before initializing the pid files and before allocating the shared * heap. Luckily for us, the server will do both of these later *) let flowconfig_name = Options.flowconfig_name server_options in - if not (Lock.grab (Server_files_js.lock_file ~flowconfig_name ~tmp_dir root)) - then begin - let msg = "Error: another server is already running?\n" in - FlowExitStatus.(exit ~msg Lock_stolen) - end; + ( if not (Lock.grab (Server_files_js.lock_file ~flowconfig_name ~tmp_dir root)) then + let msg = "Error: another server is already running?\n" in + FlowExitStatus.(exit ~msg Lock_stolen) ); (* We can't open the log until we have the lock. * @@ -93,77 +83,70 @@ let internal_start ~is_daemon ?waiting_fd monitor_options = let log_fd = let log_file = monitor_options.FlowServerMonitorOptions.log_file in let fd = Server_daemon.open_log_file log_file in - if is_daemon - then begin + if is_daemon then ( Unix.dup2 fd Unix.stderr; None - end else begin + ) else ( Hh_logger.set_log log_file (Unix.out_channel_of_descr fd); Some fd - end + ) in - (* Open up the socket immediately. When a client tries to connect to an * open socket, it will block. When a client tries to connect to a not-yet-open * socket, it will fail immediately. The blocking behavior is a little nicer *) let monitor_socket_fd = - Socket.init_unix_socket (Server_files_js.socket_file ~flowconfig_name ~tmp_dir root) in + Socket.init_unix_socket (Server_files_js.socket_file ~flowconfig_name ~tmp_dir root) + in let legacy2_socket_fd = - Socket.init_unix_socket (Server_files_js.legacy2_socket_file ~flowconfig_name ~tmp_dir root) in + Socket.init_unix_socket (Server_files_js.legacy2_socket_file ~flowconfig_name ~tmp_dir root) + in let legacy1_socket_fd = - Socket.init_unix_socket (Server_files_js.legacy1_socket_file ~flowconfig_name ~tmp_dir root) in - + Socket.init_unix_socket (Server_files_js.legacy1_socket_file ~flowconfig_name ~tmp_dir root) + in (************************* HERE BEGINS THE MAGICAL WORLD OF LWT *********************************) - let initial_lwt_thread () = Lwt.async (LogFlusher.run ~cancel_condition:ExitSignal.signal); (* If `prom` in `Lwt.async (fun () -> prom)` resolves to an exception, this function will be * called *) - Lwt.async_exception_hook := (fun exn -> - let bt = Printexc.get_backtrace () in - let msg = Utils.spf "Uncaught async exception: %s%s" - (Printexc.to_string exn) - (if bt = "" then bt else "\n"^bt) - in - Logger.fatal ~exn "Uncaught async exception. Exiting"; - FlowExitStatus.(exit ~msg Unknown_error) - ); + (Lwt.async_exception_hook := + fun exn -> + let exn = Exception.wrap exn in + let msg = Utils.spf "Uncaught async exception: %s" (Exception.to_string exn) in + Logger.fatal_s ~exn "Uncaught async exception. Exiting"; + FlowExitStatus.(exit ~msg Unknown_error)); Logger.init_logger log_fd; Logger.info "argv=%s" (argv |> Array.to_list |> String.concat " "); + LoggingUtils.dump_server_options + ~server_options:monitor_options.FlowServerMonitorOptions.server_options + ~log:(Logger.info "%s"); (* If there is a waiting fd, start up a thread that will message it *) - let handle_waiting_start_command = match waiting_fd with - | None -> Lwt.return_unit - | Some fd -> - let fd = Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true fd in - handle_waiting_start_command fd + let handle_waiting_start_command = + match waiting_fd with + | None -> Lwt.return_unit + | Some fd -> + let fd = Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true fd in + handle_waiting_start_command fd in - (* Don't start the server until we've set up the threads to handle the waiting channel *) Lwt.async (fun () -> - let%lwt () = handle_waiting_start_command in - FlowServerMonitorServer.start monitor_options - ); + let%lwt () = handle_waiting_start_command in + FlowServerMonitorServer.start monitor_options); (* We can start up the socket acceptor even before the server starts *) Lwt.async (fun () -> - SocketAcceptor.run - (Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true monitor_socket_fd) - monitor_options.FlowServerMonitorOptions.autostop - ); + SocketAcceptor.run + (Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true monitor_socket_fd) + monitor_options.FlowServerMonitorOptions.autostop); Lwt.async (fun () -> - SocketAcceptor.run_legacy ( - Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true legacy2_socket_fd - ) - ); + SocketAcceptor.run_legacy + (Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true legacy2_socket_fd)); Lwt.async (fun () -> - SocketAcceptor.run_legacy ( - Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true legacy1_socket_fd - ) - ); + SocketAcceptor.run_legacy + (Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true legacy1_socket_fd)); (* Wait forever! Mwhahahahahaha *) Lwt.wait () |> fst @@ -176,19 +159,14 @@ let daemon_entry_point = (* The entry point for creating a daemonized flow server monitor (like from `flow start`) *) let daemonize ~wait ~on_spawn monitor_options = let server_options = monitor_options.FlowServerMonitorOptions.server_options in - (* Let's make sure this isn't all for naught before we fork *) let root = Options.root server_options in let tmp_dir = Options.temp_dir server_options in let flowconfig_name = Options.flowconfig_name server_options in let lock = Server_files_js.lock_file ~flowconfig_name ~tmp_dir root in - if not (Lock.check lock) - then begin - let msg = spf - "Error: There is already a server running for %s" - (Path.to_string root) in - FlowExitStatus.(exit ~msg Lock_stolen) - end; + ( if not (Lock.check lock) then + let msg = spf "Error: There is already a server running for %s" (Path.to_string root) in + FlowExitStatus.(exit ~msg Lock_stolen) ); FlowServerMonitorDaemon.daemonize ~wait ~on_spawn ~monitor_options daemon_entry_point diff --git a/src/monitor/flowServerMonitor.mli b/src/monitor/flowServerMonitor.mli index ef7958f3d8d..01a9089b0cc 100644 --- a/src/monitor/flowServerMonitor.mli +++ b/src/monitor/flowServerMonitor.mli @@ -1,16 +1,10 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val start : - FlowServerMonitorOptions.t -> - unit +val start : FlowServerMonitorOptions.t -> unit -val daemonize: - wait:bool -> - on_spawn:(int -> unit) -> - FlowServerMonitorOptions.t -> - unit +val daemonize : wait:bool -> on_spawn:(int -> unit) -> FlowServerMonitorOptions.t -> unit diff --git a/src/monitor/flowServerMonitorDaemon.ml b/src/monitor/flowServerMonitorDaemon.ml index 4aa210bfdea..e38a8a5e84f 100644 --- a/src/monitor/flowServerMonitorDaemon.ml +++ b/src/monitor/flowServerMonitorDaemon.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,43 +7,36 @@ let spf = Printf.sprintf -type start_function = - ?waiting_fd:Unix.file_descr -> - FlowServerMonitorOptions.t -> - unit +type start_function = ?waiting_fd:Unix.file_descr -> FlowServerMonitorOptions.t -> unit (* When `flow start --wait` daemonizes the Flow server monitor, it listens over a pipe and waits * for the Flow server to finish initializing. These are the messages we send over the pipe *) type wait_msg = | Starting (* Monitor is up. All `flow start` commands wait for this *) - | Ready (* Server is done initializing. `flow start --wait` commands wait for this *) + | Ready -type entry_point = ( - FlowServerMonitorOptions.t * FlowEventLogger.logging_context, - unit, - wait_msg -) Daemon.entry +(* Server is done initializing. `flow start --wait` commands wait for this *) -(* When the daemonized monitor process starts up, this is the first code it runs *) -let register_entry_point (start: start_function) : entry_point = Daemon.register_entry_point - "monitor" - begin fun (monitor_options, logging_context) (ic, oc) -> - (* Disassociate this process with the process that spawned it *) - ignore(Sys_utils.setsid()); - (* We never read from this channel, so close it *) - Daemon.close_in ic; +type entry_point = + (FlowServerMonitorOptions.t * FlowEventLogger.logging_context, unit, wait_msg) Daemon.entry - (* Set up various logging related things *) - LoggingUtils.set_hh_logger_min_level monitor_options.FlowServerMonitorOptions.server_options; - FlowEventLogger.restore_context logging_context; - FlowEventLogger.set_command (Some "monitor"); - FlowEventLogger.init_flow_command ~version:Flow_version.version; +(* When the daemonized monitor process starts up, this is the first code it runs *) +let register_entry_point (start : start_function) : entry_point = + Daemon.register_entry_point "monitor" (fun (monitor_options, logging_context) (ic, oc) -> + (* Disassociate this process with the process that spawned it *) + ignore (Sys_utils.setsid ()); - let out_fd = Daemon.descr_of_out_channel oc in + (* We never read from this channel, so close it *) + Daemon.close_in ic; - start ~waiting_fd:out_fd monitor_options - end + (* Set up various logging related things *) + LoggingUtils.set_hh_logger_min_level monitor_options.FlowServerMonitorOptions.server_options; + FlowEventLogger.restore_context logging_context; + FlowEventLogger.set_command (Some "monitor"); + FlowEventLogger.init_flow_command ~version:Flow_version.version; + let out_fd = Daemon.descr_of_out_channel oc in + start ~waiting_fd:out_fd monitor_options) (* The monitor can communicate with the process that spawned it over a pipe. * The current scheme has it write a message when it starts up and has the @@ -51,60 +44,59 @@ let register_entry_point (start: start_function) : entry_point = Daemon.register * It's up to the forking process whether it cares to wait for the * initialization to complete *) let rec wait_loop ~should_wait child_pid ic = - let msg: wait_msg = try - Marshal_tools.from_fd_with_preamble (Daemon.descr_of_in_channel ic) - with End_of_file -> - (* The pipe broke before we got the all-clear from the monitor. What kind - * of things could go wrong? Well we check the lock before forking the - * monitor, but maybe by the time the monitor started someone else had - * grabbed the lock, so it exited. I'm sure there's a million other - * things that could have gone wrong *) - let pid, status = - match Unix.(waitpid [ WNOHANG; WUNTRACED; ] child_pid) with - | 0, _ -> + let msg : wait_msg = + try Marshal_tools.from_fd_with_preamble (Daemon.descr_of_in_channel ic) + with End_of_file -> + (* The pipe broke before we got the all-clear from the monitor. What kind + * of things could go wrong? Well we check the lock before forking the + * monitor, but maybe by the time the monitor started someone else had + * grabbed the lock, so it exited. I'm sure there's a million other + * things that could have gone wrong *) + let (pid, status) = + match Unix.(waitpid [WNOHANG; WUNTRACED] child_pid) with + | (0, _) -> (* Sometimes the End_of_file races the child process actually * exiting. In case that's happening here, let's give the child 1 * second more to die *) Unix.sleep 1; - Unix.(waitpid [ WNOHANG; WUNTRACED; ] child_pid) - | pid, status -> pid, status in - let exit_code = FlowExitStatus.Server_start_failed status in - let msg, exit_code = if pid = 0 - (* The monitor is still alive...not sure what happened *) - then - "Error: Failed to start server for some unknown reason.", exit_code - (* The monitor is dead. Shucks. *) - else - let reason, exit_code = match status with - | Unix.WEXITED code -> - if code = FlowExitStatus.(error_code Lock_stolen) - then - (* Sometimes when we actually go to start the monitor we find a - * monitor already running (race condition). If so, we can just - * forward that error code *) - "There is already a server running.", - FlowExitStatus.Lock_stolen - else if code = FlowExitStatus.(error_code Out_of_shared_memory) - then - "The server is failed to allocate shared memory.", - FlowExitStatus.Out_of_shared_memory - else - spf "exited prematurely with code %d." code, exit_code - | Unix.WSIGNALED signal -> - let signal_name = Sys_utils.name_of_signal signal in - spf "The server was killed prematurely with signal %s." signal_name, - exit_code - | Unix.WSTOPPED signal -> - spf "The server was stopped prematurely with signal %d." signal, - exit_code - in spf "Error: Failed to start server. %s" reason, exit_code - in FlowExitStatus.(exit ~msg exit_code) + Unix.(waitpid [WNOHANG; WUNTRACED] child_pid) + | (pid, status) -> (pid, status) + in + let exit_code = FlowExitStatus.Server_start_failed status in + let (msg, exit_code) = + if pid = 0 (* The monitor is still alive...not sure what happened *) then + ("Error: Failed to start server for some unknown reason.", exit_code) + (* The monitor is dead. Shucks. *) + else + let (reason, exit_code) = + match status with + | Unix.WEXITED code -> + if code = FlowExitStatus.(error_code Lock_stolen) then + (* Sometimes when we actually go to start the monitor we find a + * monitor already running (race condition). If so, we can just + * forward that error code *) + ("There is already a server running.", FlowExitStatus.Lock_stolen) + else if code = FlowExitStatus.(error_code Out_of_shared_memory) then + ( "The server is failed to allocate shared memory.", + FlowExitStatus.Out_of_shared_memory ) + else + (spf "exited prematurely with code %d." code, exit_code) + | Unix.WSIGNALED signal -> + let signal_name = Sys_utils.name_of_signal signal in + (spf "The server was killed prematurely with signal %s." signal_name, exit_code) + | Unix.WSTOPPED signal -> + (spf "The server was stopped prematurely with signal %d." signal, exit_code) + in + (spf "Error: Failed to start server. %s" reason, exit_code) + in + FlowExitStatus.(exit ~msg exit_code) in - if should_wait && msg <> Ready - then wait_loop ~should_wait child_pid ic - else Daemon.close_in ic + if should_wait && msg <> Ready then + wait_loop ~should_wait child_pid ic + else + Daemon.close_in ic -let daemonize ~wait ~on_spawn ~monitor_options (entry_point: entry_point) = +let daemonize ~wait ~on_spawn ~monitor_options (entry_point : entry_point) = let null_fd = Daemon.null_fd () in (* Daemon.spawn is creating a new process with /dev/null as both the stdout * and stderr. We are NOT leaking stdout and stderr. But the Windows @@ -121,25 +113,23 @@ let daemonize ~wait ~on_spawn ~monitor_options (entry_point: entry_point) = * So for now let's make Windows 7 not crash. It seems like `flow start` on * Windows 7 doesn't actually leak stdio, so a no op is acceptable *) - if Sys.win32 - then Unix.(try - set_close_on_exec stdout; - set_close_on_exec stderr - with Unix_error (EINVAL, _, _) -> ()); + ( if Sys.win32 then + Unix.( + try + set_close_on_exec stdout; + set_close_on_exec stderr + with Unix_error (EINVAL, _, _) -> ()) ); let root_str = - monitor_options.FlowServerMonitorOptions.server_options - |> Options.root - |> Path.to_string + monitor_options.FlowServerMonitorOptions.server_options |> Options.root |> Path.to_string in - - let {Daemon.pid; channels = (ic, oc)} = - Daemon.spawn ~name:(spf "monitor for %s" root_str) (null_fd, null_fd, null_fd) entry_point ( - monitor_options, - FlowEventLogger.get_context () - ) + let { Daemon.pid; channels = (ic, oc) } = + Daemon.spawn + ~name:(spf "monitor for %s" root_str) + (null_fd, null_fd, null_fd) + entry_point + (monitor_options, FlowEventLogger.get_context ()) in - (* We never write to the child process so we can close this channel *) Daemon.close_out oc; diff --git a/src/monitor/flowServerMonitorDaemon.mli b/src/monitor/flowServerMonitorDaemon.mli index b2b50bce568..61fbecacdf3 100644 --- a/src/monitor/flowServerMonitorDaemon.mli +++ b/src/monitor/flowServerMonitorDaemon.mli @@ -1,14 +1,11 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -type start_function = - ?waiting_fd:Unix.file_descr -> - FlowServerMonitorOptions.t -> - unit +type start_function = ?waiting_fd:Unix.file_descr -> FlowServerMonitorOptions.t -> unit type wait_msg = | Starting @@ -16,9 +13,9 @@ type wait_msg = type entry_point -val register_entry_point: start_function -> entry_point +val register_entry_point : start_function -> entry_point -val daemonize: +val daemonize : wait:bool -> on_spawn:(int -> 'a) -> monitor_options:FlowServerMonitorOptions.t -> diff --git a/src/monitor/flowServerMonitorOptions.ml b/src/monitor/flowServerMonitorOptions.ml index f10d3b247dd..cc3e79dc88d 100644 --- a/src/monitor/flowServerMonitorOptions.ml +++ b/src/monitor/flowServerMonitorOptions.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/monitor/flowServerMonitorServer.ml b/src/monitor/flowServerMonitorServer.ml index f808d84098e..e8dbc7564a5 100644 --- a/src/monitor/flowServerMonitorServer.ml +++ b/src/monitor/flowServerMonitorServer.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -21,84 +21,83 @@ let spf = Printf.sprintf module Logger = FlowServerMonitorLogger -module PersistentProt = Persistent_connection_prot type command = -| Write_ephemeral_request of { - request: ServerProt.Request.command_with_context; - client: EphemeralConnection.t; - } -| Write_persistent_request of { - client_id: PersistentProt.client_id; - request: PersistentProt.request; - } -| Notify_new_persistent_connection of { - client_id: PersistentProt.client_id; - logging_context: FlowEventLogger.logging_context; - lsp: Lsp.Initialize.params option; - } -| Notify_dead_persistent_connection of { - client_id: PersistentProt.client_id; - } -| Notify_file_changes + | Write_ephemeral_request of { + request: ServerProt.Request.command_with_context; + client: EphemeralConnection.t; + } + | Write_persistent_request of { + client_id: LspProt.client_id; + request: LspProt.request_with_metadata; + } + | Notify_new_persistent_connection of { + client_id: LspProt.client_id; + lsp_init_params: Lsp.Initialize.params; + } + | Notify_dead_persistent_connection of { client_id: LspProt.client_id } + | Notify_file_changes (* A wrapper for Pervasives.exit which gives other threads a second to handle their business * before the monitor exits *) let exiting = ref false + let exit ~msg exit_status = - if !exiting - then + if !exiting then (* We're already exiting, so there's nothing to do. But no one expects `exit` to return, so * let's just wait forever *) - let waiter, _ = Lwt.wait () in + let (waiter, _) = Lwt.wait () in waiter - else begin + else ( exiting := true; Logger.info "Monitor is exiting (%s)" msg; Logger.info "Broadcasting to threads and waiting 1 second for them to exit"; Lwt_condition.broadcast ExitSignal.signal (exit_status, msg); (* Protect this thread from getting canceled *) - Lwt.protected ( - let%lwt () = Lwt_unix.sleep 1.0 in - FlowEventLogger.exit (Some msg) (FlowExitStatus.to_string exit_status); - Pervasives.exit (FlowExitStatus.error_code exit_status) - ) - end + Lwt.protected + (let%lwt () = Lwt_unix.sleep 1.0 in + FlowEventLogger.exit (Some msg) (FlowExitStatus.to_string exit_status); + Pervasives.exit (FlowExitStatus.error_code exit_status)) + ) (* Exit after 7 days of no requests *) -module Doomsday: sig - val start_clock: unit -> unit Lwt.t - val postpone: unit -> unit +module Doomsday : sig + val start_clock : unit -> unit Lwt.t + + val postpone : unit -> unit end = struct let seven_days_in_secs = 3600. *. 24. *. 7. - let time_in_seven_days () = - Unix.time () +. seven_days_in_secs + let time_in_seven_days () = Unix.time () +. seven_days_in_secs let doomsday_time = ref (time_in_seven_days ()) - let postpone () = - doomsday_time := time_in_seven_days () + let postpone () = doomsday_time := time_in_seven_days () let rec start_clock () = - let time_til_doomsday = !doomsday_time -. (Unix.time ()) in - if time_til_doomsday <= 0. - then exit ~msg:"Exiting server. Last used >7 days ago" FlowExitStatus.Unused_server - else let%lwt () = Lwt_unix.sleep time_til_doomsday in start_clock () + let time_til_doomsday = !doomsday_time -. Unix.time () in + if time_til_doomsday <= 0. then + exit ~msg:"Exiting server. Last used >7 days ago" FlowExitStatus.Unused_server + else + let%lwt () = Lwt_unix.sleep time_til_doomsday in + start_clock () end (* The long-lived stream of requests in the monitor that have arrived from client *) (* This is unbounded, because otherwise lspCommand might deadlock. *) -let command_stream, push_to_command_stream = Lwt_stream.create () +let (command_stream, push_to_command_stream) = Lwt_stream.create () (* ServerInstance.t is an individual Flow server instance. The code inside this module handles * interacting with a Flow server instance *) module ServerInstance : sig type t - val start: FlowServerMonitorOptions.t -> t Lwt.t - val cleanup: t -> unit Lwt.t - val pid_of: t -> int + + val start : FlowServerMonitorOptions.t -> ServerStatus.restart_reason option -> t Lwt.t + + val cleanup : t -> unit Lwt.t + + val pid_of : t -> int end = struct type t = { pid: int; @@ -126,8 +125,7 @@ end = struct with Lwt_stream.Closed -> Logger.debug "Client for request '%s' is dead. Throwing away response" request_id end; - Lwt.return_unit - ) + Lwt.return_unit) | MonitorProt.RequestFailed (request_id, exn_str) -> Logger.error "Server threw exception when processing '%s': %s" request_id exn_str; let%lwt request = RequestMap.remove ~request_id in @@ -142,8 +140,7 @@ end = struct with Lwt_stream.Closed -> Logger.debug "Client for request '%s' is dead. Throwing away response" request_id end; - Lwt.return_unit - ) + Lwt.return_unit) | MonitorProt.StatusUpdate status -> StatusStream.update ~status; Lwt.return_unit @@ -154,7 +151,7 @@ end = struct Lwt.return_unit module CommandLoop = LwtLoop.Make (struct - type acc = (FileWatcher.watcher * ServerConnection.t) + type acc = FileWatcher.watcher * ServerConnection.t (* Writes a message to the out-stream of the monitor, to be eventually *) (* picked up by the server. *) @@ -163,49 +160,53 @@ end = struct (* In order to try and avoid races between the file system and a command (like `flow status`), * we check for file system notification before sending a request to the server *) let send_file_watcher_notification watcher conn = - let%lwt files = watcher#get_and_clear_changed_files in - if not (SSet.is_empty files) - then begin + let%lwt (files, metadata) = watcher#get_and_clear_changed_files in + if not (SSet.is_empty files) then ( let count = SSet.cardinal files in - Logger.info "File watcher reported %d file%s changed" count (if count = 1 then "" else "s"); - send_request ~msg:(MonitorProt.FileWatcherNotification files) conn - end; + Logger.info + "File watcher reported %d file%s changed" + count + ( if count = 1 then + "" + else + "s" ); + send_request ~msg:(MonitorProt.FileWatcherNotification (files, metadata)) conn + ); Lwt.return_unit let main (watcher, conn) = let%lwt command = Lwt_stream.next command_stream in - let%lwt () = begin match command with - | Write_ephemeral_request { request; client; } -> - Doomsday.postpone (); - if not (EphemeralConnection.is_closed client) - then begin + let%lwt () = + match command with + | Write_ephemeral_request { request; client } -> + Doomsday.postpone (); + if not (EphemeralConnection.is_closed client) then ( + let%lwt () = send_file_watcher_notification watcher conn in + let%lwt request_id = RequestMap.add ~request ~client in + Logger.debug "Writing '%s' to the server connection" request_id; + send_request ~msg:(MonitorProt.Request (request_id, request)) conn; + Lwt.return_unit + ) else ( + Logger.debug "Skipping request from a dead ephemeral connection"; + Lwt.return_unit + ) + | Write_persistent_request { client_id; request } -> + Doomsday.postpone (); let%lwt () = send_file_watcher_notification watcher conn in - let%lwt request_id = RequestMap.add ~request ~client in - Logger.debug "Writing '%s' to the server connection" request_id; - send_request ~msg:(MonitorProt.Request (request_id, request)) conn; + let msg = MonitorProt.PersistentConnectionRequest (client_id, request) in + send_request ~msg conn; Lwt.return_unit - end else begin - Logger.debug "Skipping request from a dead ephemeral connection"; + | Notify_new_persistent_connection { client_id; lsp_init_params } -> + let msg = MonitorProt.NewPersistentConnection (client_id, lsp_init_params) in + send_request ~msg conn; Lwt.return_unit - end - | Write_persistent_request { client_id; request; } -> - Doomsday.postpone (); - let%lwt () = send_file_watcher_notification watcher conn in - let msg = MonitorProt.PersistentConnectionRequest (client_id, request) in - send_request ~msg conn; - Lwt.return_unit - | Notify_new_persistent_connection { client_id; logging_context; lsp; } -> - let msg = MonitorProt.NewPersistentConnection (client_id, logging_context, lsp) in - send_request ~msg conn; - Lwt.return_unit - | Notify_dead_persistent_connection { client_id; } -> - let () = PersistentConnectionMap.remove ~client_id in - let msg = MonitorProt.DeadPersistentConnection client_id in - send_request ~msg conn; - Lwt.return_unit - | Notify_file_changes -> - send_file_watcher_notification watcher conn - end in + | Notify_dead_persistent_connection { client_id } -> + let () = PersistentConnectionMap.remove ~client_id in + let msg = MonitorProt.DeadPersistentConnection client_id in + send_request ~msg conn; + Lwt.return_unit + | Notify_file_changes -> send_file_watcher_notification watcher conn + in Lwt.return (watcher, conn) let catch _ exn = @@ -235,138 +236,128 @@ end = struct (* The monitor is exiting. Let's try and shut down the server gracefully *) let cleanup_on_exit ~exit_status ~exit_msg ~connection ~pid = - let msg = MonitorProt.(PleaseDie (MonitorExiting (exit_status, exit_msg))) in - ServerConnection.write ~msg connection; - + let () = + try + let msg = MonitorProt.(PleaseDie (MonitorExiting (exit_status, exit_msg))) in + ServerConnection.write ~msg connection + with Lwt_stream.Closed -> + (* Connection to the server has already closed. The server is likely already dead *) + () + in (* The monitor waits 1 second before exiting. So let's give the server .75 seconds to shutdown * gracefully. *) - let%lwt server_status = Lwt.pick [ - (let%lwt (_, status) = LwtSysUtils.blocking_waitpid pid in Lwt.return (Some status)); - (let%lwt () = Lwt_unix.sleep 0.75 in Lwt.return None) - ] in - - let%lwt () = ServerConnection.close_immediately connection in - let still_alive = begin match server_status with - | Some (Unix.WEXITED exit_status) -> - let exit_type = - try Some (FlowExitStatus.error_type exit_status) - with Not_found -> None + try%lwt + let%lwt server_status = + Lwt.pick + [ + (let%lwt (_, status) = LwtSysUtils.blocking_waitpid pid in + Lwt.return (Some status)); + (let%lwt () = Lwt_unix.sleep 0.75 in + Lwt.return None); + ] in - begin if exit_type = Some FlowExitStatus.Killed_by_monitor - then Logger.info "Successfully killed the server process" - else - let exit_status_string = - Option.value_map ~default:"Invalid_exit_code" ~f:FlowExitStatus.to_string exit_type - in - Logger.error - "Tried to kill the server process (%d), which exited with the wrong exit code: %s" - pid - exit_status_string - end; - false - | Some (Unix.WSIGNALED signal) -> - Logger.error - "Tried to kill the server process (%d), but for some reason it was killed with %s signal" - pid - (PrintSignal.string_of_signal signal); - false - | Some (Unix.WSTOPPED signal) -> - Logger.error - "Tried to kill the server process (%d), but for some reason it was stopped with %s signal" - pid - (PrintSignal.string_of_signal signal); - true - | None -> - Logger.error "Tried to kill the server process (%d), but it didn't die" pid; - true - end in - - if still_alive then Unix.kill pid Sys.sigkill; + let%lwt () = ServerConnection.close_immediately connection in + let still_alive = + match server_status with + | Some (Unix.WEXITED exit_status) -> + let exit_type = + (try Some (FlowExitStatus.error_type exit_status) with Not_found -> None) + in + begin + if exit_type = Some FlowExitStatus.Killed_by_monitor then + Logger.info "Successfully killed the server process" + else + let exit_status_string = + Option.value_map ~default:"Invalid_exit_code" ~f:FlowExitStatus.to_string exit_type + in + Logger.error + "Tried to kill the server process (%d), which exited with the wrong exit code: %s" + pid + exit_status_string + end; + false + | Some (Unix.WSIGNALED signal) -> + Logger.error + "Tried to kill the server process (%d), but for some reason it was killed with %s signal" + pid + (PrintSignal.string_of_signal signal); + false + | Some (Unix.WSTOPPED signal) -> + Logger.error + "Tried to kill the server process (%d), but for some reason it was stopped with %s signal" + pid + (PrintSignal.string_of_signal signal); + true + | None -> + Logger.error "Tried to kill the server process (%d), but it didn't die" pid; + true + in + if still_alive then Unix.kill pid Sys.sigkill; - Lwt.return_unit + Lwt.return_unit + with Unix.Unix_error (Unix.ECHILD, _, _) -> + Logger.info "Server process has already exited. No need to kill it"; + Lwt.return_unit let cleanup t = Lwt.cancel t.command_loop; Lwt.cancel t.file_watcher_loop; Lwt.cancel t.file_watcher_exit_thread; Lwt.cancel t.on_exit_thread; + (* Lwt.join will run these threads in parallel and only return when EVERY thread has returned * or failed *) - Lwt.join [ - t.watcher#stop; - ServerConnection.close_immediately t.connection; - ] + Lwt.join [(t.watcher)#stop; ServerConnection.close_immediately t.connection] - let handle_file_watcher_exit watcher status = + let handle_file_watcher_exit watcher = (* TODO (glevi) - We probably don't need to make the monitor exit when the file watcher dies. * We could probably just restart it. For dfind, we'd also need to start a new server, but for * watchman we probably could just start a new watchman daemon and use the clockspec *) - begin match status with - | Unix.WEXITED exit_status -> - let exit_type = - try Some (FlowExitStatus.error_type exit_status) - with Not_found -> None in - let exit_status_string = - Option.value_map ~default:"Invalid_exit_code" ~f:FlowExitStatus.to_string exit_type in - Logger.error "File watcher (%s) exited with code %s (%d)" - watcher#name - exit_status_string - exit_status - | Unix.WSIGNALED signal -> - Logger.error "File watcher (%s) was killed with %s signal" - watcher#name - (PrintSignal.string_of_signal signal) - | Unix.WSTOPPED signal -> - Logger.error "File watcher (%s) was stopped with %s signal" - watcher#name - (PrintSignal.string_of_signal signal) - end; exit ~msg:(spf "File watcher (%s) died" watcher#name) FlowExitStatus.Dfind_died + let server_num = ref 0 (* Spawn a brand new Flow server *) - let start monitor_options = + let start monitor_options restart_reason = Logger.info "Creating a new Flow server"; - let { FlowServerMonitorOptions. - shared_mem_config; + let { + FlowServerMonitorOptions.shared_mem_config; server_options; - server_log_file=log_file; + server_log_file = log_file; argv; file_watcher; _; - } = monitor_options in - - let%lwt () = StatusStream.reset file_watcher in - - let watcher = match file_watcher with - | Options.NoFileWatcher -> - new FileWatcher.dummy - | Options.DFind -> - new FileWatcher.dfind monitor_options - | Options.Watchman -> - new FileWatcher.watchman monitor_options + } = + monitor_options + in + let%lwt () = StatusStream.reset file_watcher restart_reason in + let watcher = + match file_watcher with + | Options.NoFileWatcher -> new FileWatcher.dummy + | Options.DFind -> new FileWatcher.dfind monitor_options + | Options.Watchman -> new FileWatcher.watchman monitor_options in Logger.debug "Initializing file watcher (%s)" watcher#name; watcher#start_init; let file_watcher_pid = watcher#getpid in - let handle = Server.daemonize ~log_file ~shared_mem_config ~argv ~file_watcher_pid - server_options in + let handle = + Server.daemonize ~log_file ~shared_mem_config ~argv ~file_watcher_pid server_options + in let (ic, oc) = handle.Daemon.channels in let in_fd = ic |> Daemon.descr_of_in_channel - |> Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true in + |> Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true + in let out_fd = oc |> Daemon.descr_of_out_channel - |> Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true in - + |> Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true + in let close_if_open fd = - try Lwt_unix.close fd - (* If it's already closed, we'll get EBADF *) - with Unix.Unix_error(Unix.EBADF, _, _) -> Lwt.return_unit + try Lwt_unix.close fd (* If it's already closed, we'll get EBADF *) + with Unix.Unix_error (Unix.EBADF, _, _) -> Lwt.return_unit in - (* So it's actually important that we close the Lwt_unix.file_descr and not just the * underlying Unix.file_descr. Why? * @@ -382,12 +373,8 @@ end = struct let close () = (* Lwt.join will run these threads in parallel and only finish when EVERY thread has finished * or failed *) - Lwt.join [ - close_if_open in_fd; - close_if_open out_fd; - ] + Lwt.join [close_if_open in_fd; close_if_open out_fd] in - incr server_num; let name = spf "server #%d" !server_num in let%lwt (start, connection) = @@ -396,7 +383,6 @@ end = struct start (); let pid = handle.Daemon.pid in - (* Close the connection to the server when we're about to exit *) let on_exit_thread = try%lwt @@ -408,96 +394,132 @@ end = struct Logger.fatal ~exn "Uncaught exception in on_exit_thread"; raise exn in - (* This may block for quite awhile. No messages will be sent to the server process until the * file watcher is up and running *) let%lwt () = watcher#wait_for_init in Logger.debug "File watcher (%s) ready!" watcher#name; let file_watcher_exit_thread = - let%lwt status = watcher#waitpid in handle_file_watcher_exit watcher status + let%lwt () = + try%lwt watcher#waitpid with + | Lwt.Canceled as exn -> + let exn = Exception.wrap exn in + Exception.reraise exn + | exn -> + Logger.error ~exn "Uncaught exception in watcher#waitpid"; + Lwt.return_unit + in + handle_file_watcher_exit watcher in StatusStream.file_watcher_ready (); let command_loop = CommandLoop.run ~cancel_condition:ExitSignal.signal (watcher, connection) in let file_watcher_loop = - if file_watcher = Options.NoFileWatcher - then Lwt.return_unit (* Don't even bother *) - else FileWatcherLoop.run ~cancel_condition:ExitSignal.signal watcher + if file_watcher = Options.NoFileWatcher then + Lwt.return_unit + (* Don't even bother *) + else + FileWatcherLoop.run ~cancel_condition:ExitSignal.signal watcher in - - Lwt.return { - pid; - watcher; - connection; - command_loop; - file_watcher_loop; - on_exit_thread; - file_watcher_exit_thread; - } + Lwt.return + { + pid; + watcher; + connection; + command_loop; + file_watcher_loop; + on_exit_thread; + file_watcher_exit_thread; + } let pid_of t = t.pid end (* A loop who's job is to start a server and then wait for it to die *) module KeepAliveLoop = LwtLoop.Make (struct - type acc = FlowServerMonitorOptions.t - - (* Given that a Flow server has just exited with this exit status, should the monitor exit too? *) - let should_monitor_exit_with_server monitor_options exit_status = - if monitor_options.FlowServerMonitorOptions.no_restart - then true - else begin - let open FlowExitStatus in - match exit_status with - (**** Things the server might exit with that implies that the monitor should exit too ****) - - | No_error (* Server exited cleanly *) - | Windows_killed_by_task_manager (* Windows task manager killed the server *) - | Invalid_flowconfig (* Parse/version/etc error. Server will never start correctly. *) - | Path_is_not_a_file (* Required a file but privided path was not a file *) - | Server_client_directory_mismatch (* This is a weird one *) - | Flowconfig_changed (* We could survive some config changes, but it's too hard to tell *) - | Invalid_saved_state (* The saved state file won't automatically recover by restarting *) - | Unused_server (* The server appears unused for long enough that it decided to just die *) - | Unknown_error (* Uncaught exn. We probably could survive this, but it's a little risky *) - - (**** Things that the server shouldn't use, but would imply that the monitor should exit ****) - - | Interrupted - | Build_id_mismatch (* Client build differs from server build - only monitor uses this *) - | Lock_stolen (* Lock lost - only monitor should use this *) - | Socket_error (* Failed to set up socket - only monitor should use this *) - -> true - - (**** Things the server might exit with which the monitor can survive ****) - - | Server_out_of_date (* Server needs to restart, but monitor can survive *) - | Out_of_shared_memory (* The monitor doesn't used sharedmem so we can survive *) - | Dfind_died - | Dfind_unresponsive - | Killed_by_monitor (* The server died because we asked it to die *) - -> false - - (**** Unrelated exit codes. If we see them then something is wrong ****) - - | Type_error - | Out_of_time - | Kill_error - | No_server_running - | Out_of_retries - | Input_error - | Could_not_find_flowconfig - | Commandline_usage_error - | No_input - | Missing_flowlib - | Server_start_failed _ - | Autostop (* is used by monitor to exit, not server *) - -> true - end + type acc = FlowServerMonitorOptions.t * ServerStatus.restart_reason option + + (* Given that a Flow server has just exited with this exit status, should the monitor exit too? + * + * Returns the tuple (should_monitor_exit_with_server, restart_reason) + *) + let process_server_exit monitor_options exit_status = + if monitor_options.FlowServerMonitorOptions.no_restart then + (true, None) + else + FlowExitStatus.( + match exit_status with + (**** Things the server might exit with that implies that the monitor should exit too ****) + | No_error + (* Server exited cleanly *) + + | Windows_killed_by_task_manager + (* Windows task manager killed the server *) + + | Invalid_flowconfig + (* Parse/version/etc error. Server will never start correctly. *) + + | Path_is_not_a_file + (* Required a file but privided path was not a file *) + + | Server_client_directory_mismatch + (* This is a weird one *) + + | Flowconfig_changed + (* We could survive some config changes, but it's too hard to tell *) + + | Invalid_saved_state + (* The saved state file won't automatically recover by restarting *) + + | Unused_server + (* The server appears unused for long enough that it decided to just die *) + + | Unknown_error + (* Uncaught exn. We probably could survive this, but it's a little risky *) + + | Watchman_error + (* We ran into an issue with Watchman *) + (**** Things that the server shouldn't use, but would imply that the monitor should exit ****) + + | Interrupted + | Build_id_mismatch + (* Client build differs from server build - only monitor uses this *) + + | Lock_stolen + (* Lock lost - only monitor should use this *) + + | Socket_error + (* Failed to set up socket - only monitor should use this *) + + | Dfind_died + (* Any file watcher died (it's misnamed) - only monitor should use this *) + + | Dfind_unresponsive (* Not used anymore *) -> + (true, None) + (**** Things the server might exit with which the monitor can survive ****) + | Server_out_of_date (* Server needs to restart, but monitor can survive *) -> + (false, Some ServerStatus.Server_out_of_date) + | Out_of_shared_memory (* The monitor doesn't used sharedmem so we can survive *) -> + (false, Some ServerStatus.Out_of_shared_memory) + | Killed_by_monitor (* The server died because we asked it to die *) -> (false, None) + | Restart (* The server asked to be restarted *) -> (false, Some ServerStatus.Restart) + (**** Unrelated exit codes. If we see them then something is wrong ****) + | Type_error + | Out_of_time + | Kill_error + | No_server_running + | Out_of_retries + | Input_error + | Could_not_find_flowconfig + | Commandline_usage_error + | No_input + | Missing_flowlib + | Server_start_failed _ + | Autostop (* is used by monitor to exit, not server *) -> + (true, None)) let should_monitor_exit_with_signaled_server signal = (* While there are many scary things which can cause segfaults, in practice we've mostly seen - * them when the Flow server hits some infinite or very deep recursion (like List.map on a + * them when the Flow server hits some infinite or very deep recursion (like Core_list.map ~f:on a * very large list). Often, this is triggered by some ephemeral command, which is rerun when * the server starts back up, leading to a cycle of segfaulting servers. * @@ -511,66 +533,82 @@ module KeepAliveLoop = LwtLoop.Make (struct let pid = ServerInstance.pid_of server in let%lwt (_, status) = LwtSysUtils.blocking_waitpid pid in let%lwt () = ServerInstance.cleanup server in - if Sys.unix && try Sys_utils.check_dmesg_for_oom pid "flow" with _ -> false - then FlowEventLogger.murdered_by_oom_killer (); + if Sys.unix && (try Sys_utils.check_dmesg_for_oom pid "flow" with _ -> false) then + FlowEventLogger.murdered_by_oom_killer (); match status with | Unix.WEXITED exit_status -> let exit_type = - try Some (FlowExitStatus.error_type exit_status) - with Not_found -> None in + (try Some (FlowExitStatus.error_type exit_status) with Not_found -> None) + in let exit_status_string = - Option.value_map ~default:"Invalid_exit_code" ~f:FlowExitStatus.to_string exit_type in - Logger.error "Flow server (pid %d) exited with code %s (%d)" + Option.value_map ~default:"Invalid_exit_code" ~f:FlowExitStatus.to_string exit_type + in + Logger.error + "Flow server (pid %d) exited with code %s (%d)" pid exit_status_string exit_status; - begin match exit_type with - | None -> + begin + match exit_type with + | None -> exit - ~msg:(spf "Flow server exited with invalid exit code (%d)" exit_status) - FlowExitStatus.Unknown_error - | Some exit_type -> - (* There are a few whitelisted reasons where the persistent client wants *) - (* to know why the flow server is about to fatally close the persistent *) - (* connection. This WEXITED case covers them. (It doesn't matter that *) - (* it also sends the reason in a few additional cases as well.) *) - let send_close conn = - try PersistentConnection.write ~msg:(PersistentProt.ServerExit exit_type) conn - with _ -> () - in - PersistentConnectionMap.get_all_clients () |> List.iter send_close; - if should_monitor_exit_with_server monitor_options exit_type - then exit ~msg:"Dying along with server" exit_type - else Lwt.return_unit + ~msg:(spf "Flow server exited with invalid exit code (%d)" exit_status) + FlowExitStatus.Unknown_error + | Some exit_type -> + (* There are a few whitelisted reasons where the persistent client wants *) + (* to know why the flow server is about to fatally close the persistent *) + (* connection. This WEXITED case covers them. (It doesn't matter that *) + (* it also sends the reason in a few additional cases as well.) *) + let send_close conn = + try + PersistentConnection.write + ~msg:LspProt.(NotificationFromServer (ServerExit exit_type)) + conn + with _ -> () + in + PersistentConnectionMap.get_all_clients () |> List.iter send_close; + + let (should_monitor_exit_with_server, restart_reason) = + process_server_exit monitor_options exit_type + in + if should_monitor_exit_with_server then + exit ~msg:"Dying along with server" exit_type + else + Lwt.return restart_reason end | Unix.WSIGNALED signal -> - Logger.error "Flow server (pid %d) was killed with %s signal" + Logger.error + "Flow server (pid %d) was killed with %s signal" pid (PrintSignal.string_of_signal signal); FlowEventLogger.report_from_monitor_server_exit_due_to_signal signal; - if should_monitor_exit_with_signaled_server signal - then exit ~msg:"Dying along with signaled server" FlowExitStatus.Interrupted - else Lwt.return_unit + if should_monitor_exit_with_signaled_server signal then + exit ~msg:"Dying along with signaled server" FlowExitStatus.Interrupted + else + Lwt.return_none | Unix.WSTOPPED signal -> (* If a Flow server has been stopped but hasn't exited then what should we do? I suppose we * could try to signal it to resume. Or we could wait for it to start up again. But killing * it and starting a new server seems easier *) - Logger.error "Flow server (pid %d) was stopped with %s signal. Sending sigkill" + Logger.error + "Flow server (pid %d) was stopped with %s signal. Sending sigkill" pid (PrintSignal.string_of_signal signal); + (* kill is not a blocking system call, which is likely why it is missing from Lwt_unix *) Unix.kill pid Sys.sigkill; - Lwt.return_unit + Lwt.return_none (* The RequestMap will contain all the requests which have been sent to the server but never * received a response. If we're starting up a new server, we can resend all these requests to * the new server *) let requeue_stalled_requests () = let%lwt requests = RequestMap.remove_all () in - Lwt_list.iter_p (fun (request, client) -> - Lwt.return (push_to_command_stream (Some (Write_ephemeral_request {request; client;}))) - ) requests + Lwt_list.iter_p + (fun (request, client) -> + Lwt.return (push_to_command_stream (Some (Write_ephemeral_request { request; client })))) + requests (* Ephemeral commands are stateless, so they can survive a server restart. However a persistent * connection might have state, so it's wrong to allow it to survive. Maybe in the future we can @@ -580,68 +618,70 @@ module KeepAliveLoop = LwtLoop.Make (struct PersistentConnectionMap.get_all_clients () |> Lwt_list.iter_p PersistentConnection.close_immediately - let main monitor_options = + let main (monitor_options, restart_reason) = let%lwt () = requeue_stalled_requests () in - let%lwt server = ServerInstance.start monitor_options in - let%lwt () = wait_for_server_to_die monitor_options server in + let%lwt server = ServerInstance.start monitor_options restart_reason in + let%lwt restart_reason = wait_for_server_to_die monitor_options server in let%lwt () = killall_persistent_connections () in - Lwt.return monitor_options + Lwt.return (monitor_options, restart_reason) let catch _ exn = - Logger.error ~exn "Exception in KeepAliveLoop"; - raise exn + let e = Exception.wrap exn in + match exn with + | Watchman_lwt.Timeout -> + let msg = Printf.sprintf "Watchman timed out.\n%s" (Exception.to_string e) in + FlowExitStatus.(exit ~msg Watchman_error) + | _ -> + Logger.error ~exn "Exception in KeepAliveLoop"; + Exception.reraise e end) let setup_signal_handlers = - let signals = [ - Sys.sigint; (* Interrupt - ctrl-c *) - Sys.sigterm; (* Termination - like a nicer sigkill giving you a chance to cleanup *) - Sys.sighup; (* Hang up - the terminal went away *) - Sys.sigquit; (* Dump core - Kind of a meaner sigterm *) - ] + let signals = + [ + Sys.sigint; + (* Interrupt - ctrl-c *) + Sys.sigterm; + (* Termination - like a nicer sigkill giving you a chance to cleanup *) + Sys.sighup; + (* Hang up - the terminal went away *) + Sys.sigquit; + (* Dump core - Kind of a meaner sigterm *) + + ] in - let handle_signal signal = - Lwt.async (fun () -> exit - ~msg:(spf "Received %s signal" (PrintSignal.string_of_signal signal)) - FlowExitStatus.Interrupted - ) + Lwt.async (fun () -> + exit + ~msg:(spf "Received %s signal" (PrintSignal.string_of_signal signal)) + FlowExitStatus.Interrupted) in - let set_signal s = - try - Sys_utils.set_signal s (Sys.Signal_handle handle_signal) + try Sys_utils.set_signal s (Sys.Signal_handle handle_signal) with exn -> Logger.error ~exn "Failed to install signal handler for %s" (PrintSignal.string_of_signal s) in - - fun () -> List.iter set_signal signals + (fun () -> List.iter set_signal signals) let start monitor_options = Lwt.async Doomsday.start_clock; setup_signal_handlers (); - KeepAliveLoop.run ~cancel_condition:ExitSignal.signal monitor_options + KeepAliveLoop.run ~cancel_condition:ExitSignal.signal (monitor_options, None) let send_request ~client ~request = Logger.debug "Adding request (%s) to the command stream" (ServerProt.Request.to_string request.ServerProt.Request.command); - push_to_command_stream - (Some (Write_ephemeral_request {request; client;})) + push_to_command_stream (Some (Write_ephemeral_request { request; client })) let send_persistent_request ~client_id ~request = - Logger.debug - "Adding request (%s) to the command stream" - (PersistentProt.string_of_request request); - push_to_command_stream - (Some (Write_persistent_request {client_id; request;})) + Logger.debug "Adding request (%s) to the command stream" (LspProt.string_of_request request); + push_to_command_stream (Some (Write_persistent_request { client_id; request })) -let notify_new_persistent_connection ~client_id ~logging_context ~lsp = +let notify_new_persistent_connection ~client_id ~lsp_init_params = Logger.debug "Adding notification that there's a new persistent client #%d" client_id; - push_to_command_stream - (Some (Notify_new_persistent_connection {client_id; logging_context; lsp;})) + push_to_command_stream (Some (Notify_new_persistent_connection { client_id; lsp_init_params })) let notify_dead_persistent_connection ~client_id = Logger.debug "Adding notification that persistent client #%d died" client_id; - push_to_command_stream - (Some (Notify_dead_persistent_connection {client_id;})) + push_to_command_stream (Some (Notify_dead_persistent_connection { client_id })) diff --git a/src/monitor/flowServerMonitorServer.mli b/src/monitor/flowServerMonitorServer.mli index c101cf1b6b7..202113c1b2f 100644 --- a/src/monitor/flowServerMonitorServer.mli +++ b/src/monitor/flowServerMonitorServer.mli @@ -1,31 +1,21 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) val send_request : - client:EphemeralConnection.t -> - request:ServerProt.Request.command_with_context -> - unit + client:EphemeralConnection.t -> request:ServerProt.Request.command_with_context -> unit val send_persistent_request : - client_id:Persistent_connection_prot.client_id -> - request:Persistent_connection_prot.request -> - unit - + client_id:LspProt.client_id -> request:LspProt.request_with_metadata -> unit val notify_new_persistent_connection : - client_id:Persistent_connection_prot.client_id -> - logging_context:FlowEventLogger.logging_context -> - lsp:Lsp.Initialize.params option -> - unit + client_id:LspProt.client_id -> lsp_init_params:Lsp.Initialize.params -> unit -val notify_dead_persistent_connection : - client_id:Persistent_connection_prot.client_id -> - unit +val notify_dead_persistent_connection : client_id:LspProt.client_id -> unit -val start: FlowServerMonitorOptions.t -> unit Lwt.t +val start : FlowServerMonitorOptions.t -> unit Lwt.t -val exit: msg:string -> FlowExitStatus.t -> 'a Lwt.t +val exit : msg:string -> FlowExitStatus.t -> 'a Lwt.t diff --git a/src/monitor/logger/dune b/src/monitor/logger/dune new file mode 100644 index 00000000000..f3be6c71b45 --- /dev/null +++ b/src/monitor/logger/dune @@ -0,0 +1,12 @@ +(library + (name flow_monitor_logger) + (wrapped false) + (libraries + flow_common_lwt + lwt_log + lwt_log.core + lwt.unix + utils_core + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/monitor/logger/flowServerMonitorLogger.ml b/src/monitor/logger/flowServerMonitorLogger.ml index f825faf1ddf..0fbf5cbe8fe 100644 --- a/src/monitor/logger/flowServerMonitorLogger.ml +++ b/src/monitor/logger/flowServerMonitorLogger.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -17,14 +17,13 @@ * * 1. Multiple threads write to the msg_stream with their logs * 2. A single thread (WriteLoop) reads the messages and writes them to the various fds directly - **) + * *) -type 'a logger_fn = - ?exn : exn -> - ('a, unit, string, unit) format4 -> - 'a +type 'a logger_fn = ?exn:exn -> ('a, unit, string, unit) format4 -> 'a -let msg_stream, push_to_msg_stream = Lwt_stream.create () +type 'a logger_fn_s = ?exn:Exception.t -> ('a, unit, string, unit) format4 -> 'a + +let (msg_stream, push_to_msg_stream) = Lwt_stream.create () module WriteLoop = LwtLoop.Make (struct type acc = Lwt_unix.file_descr list @@ -46,23 +45,21 @@ module WriteLoop = LwtLoop.Make (struct (* If we failed to write to an fd throw an exception and exit. I'm not 100% sure this is the * best behavior - should logging errors cause the monitor (and server) to crash? *) let catch _ exn = - Printf.eprintf - "Logger.WriteLoop exception:\n%s\n%s" - (Printexc.to_string exn) - (Printexc.get_backtrace ()); - raise exn + let exn = Exception.wrap exn in + Printf.eprintf "Logger.WriteLoop exception:\n%s" (Exception.to_string exn); + Exception.reraise exn end) let initialized = ref false (* We're using lwt's logger instead of Hh_logger, so let's map Hh_logger levels to lwt levels *) let lwt_level_of_hh_logger_level = function -| Hh_logger.Level.Off -> Lwt_log_core.Fatal -| Hh_logger.Level.Fatal -> Lwt_log_core.Fatal -| Hh_logger.Level.Error -> Lwt_log_core.Error -| Hh_logger.Level.Warn -> Lwt_log_core.Warning -| Hh_logger.Level.Info -> Lwt_log_core.Info -| Hh_logger.Level.Debug -> Lwt_log_core.Debug + | Hh_logger.Level.Off -> Lwt_log_core.Fatal + | Hh_logger.Level.Fatal -> Lwt_log_core.Fatal + | Hh_logger.Level.Error -> Lwt_log_core.Error + | Hh_logger.Level.Warn -> Lwt_log_core.Warning + | Hh_logger.Level.Info -> Lwt_log_core.Info + | Hh_logger.Level.Debug -> Lwt_log_core.Debug (* Creates a default logger and sets the minimum logger level. The logger will log every message * that passes the minimum level to stderr. If log_fd is provided, each message will be logged @@ -71,31 +68,31 @@ let init_logger log_fd = if !initialized then failwith "Cannot initialized FlowServerMonitorLogger more than once"; initialized := true; - let min_level = Hh_logger.Level.min_level () |> lwt_level_of_hh_logger_level in - + let min_level = Hh_logger.Level.min_level () |> lwt_level_of_hh_logger_level in let template = "$(date).$(milliseconds) [$(level)] $(message)" in - - let log_fd = Option.map log_fd ~f:(Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true) in - - let fds = Lwt_unix.stderr :: (Option.value_map log_fd ~default:[] ~f:(fun fd -> [fd])) in + let log_fd = + Option.map log_fd ~f:(Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true) + in + let fds = Lwt_unix.stderr :: Option.value_map log_fd ~default:[] ~f:(fun fd -> [fd]) in Lwt.async (fun () -> WriteLoop.run fds); (* Format the messages and write the to the log and stderr *) let output section level messages = let buffer = Buffer.create 42 in - let formatted_messages = List.map (fun message -> - Buffer.clear buffer; - Lwt_log.render ~buffer ~template ~section ~level ~message; - Buffer.add_char buffer '\n'; - Buffer.contents buffer - ) messages in + let formatted_messages = + Core_list.map + ~f:(fun message -> + Buffer.clear buffer; + Lwt_log.render ~buffer ~template ~section ~level ~message; + Buffer.add_char buffer '\n'; + Buffer.contents buffer) + messages + in push_to_msg_stream (Some formatted_messages); Lwt.return_unit in - (* Just close the log *) let close () = Option.value_map ~default:Lwt.return_unit ~f:Lwt_unix.close log_fd in - (* Set the default logger *) Lwt_log.default := Lwt_log_core.make ~output ~close; @@ -105,15 +102,23 @@ let init_logger log_fd = (* Async logging APIs. These are the APIs you should generally use. Since they're async, they * won't make the monitor unresponsive while they're logging *) let fatal ?exn fmt = Lwt_log_core.ign_fatal_f ?exn fmt + let error ?exn fmt = Lwt_log_core.ign_error_f ?exn fmt + let warn ?exn fmt = Lwt_log_core.ign_warning_f ?exn fmt + let info ?exn fmt = Lwt_log_core.ign_info_f ?exn fmt + let debug ?exn fmt = Lwt_log_core.ign_debug_f ?exn fmt (* Synchronous versions just delegate to Hh_logger. These are mainly used for debugging, when you * want a logging call to write to the log RIGHT NOW. *) let fatal_s = Hh_logger.fatal + let error_s = Hh_logger.error + let warn_s = Hh_logger.warn + let info_s = Hh_logger.info + let debug_s = Hh_logger.debug diff --git a/src/monitor/logger/flowServerMonitorLogger.mli b/src/monitor/logger/flowServerMonitorLogger.mli index 6f956324fe2..00c5b851e13 100644 --- a/src/monitor/logger/flowServerMonitorLogger.mli +++ b/src/monitor/logger/flowServerMonitorLogger.mli @@ -1,27 +1,34 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val init_logger: Unix.file_descr option -> unit +val init_logger : Unix.file_descr option -> unit -type 'a logger_fn = - ?exn : exn -> - ('a, unit, string, unit) format4 -> - 'a +type 'a logger_fn = ?exn:exn -> ('a, unit, string, unit) format4 -> 'a + +type 'a logger_fn_s = ?exn:Exception.t -> ('a, unit, string, unit) format4 -> 'a (* Async APIs *) -val fatal: 'a logger_fn -val error: 'a logger_fn -val warn: 'a logger_fn -val info: 'a logger_fn -val debug: 'a logger_fn +val fatal : 'a logger_fn + +val error : 'a logger_fn + +val warn : 'a logger_fn + +val info : 'a logger_fn + +val debug : 'a logger_fn (* Sync APIs *) -val fatal_s: 'a logger_fn -val error_s: 'a logger_fn -val warn_s: 'a logger_fn -val info_s: 'a logger_fn -val debug_s: 'a logger_fn +val fatal_s : 'a logger_fn_s + +val error_s : 'a logger_fn_s + +val warn_s : 'a logger_fn_s + +val info_s : 'a logger_fn_s + +val debug_s : 'a logger_fn_s diff --git a/src/monitor/monitorRPC.ml b/src/monitor/monitorRPC.ml deleted file mode 100644 index adb6abab690..00000000000 --- a/src/monitor/monitorRPC.ml +++ /dev/null @@ -1,86 +0,0 @@ -(** - * Copyright (c) 2017-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open MonitorProt - -type channels = (monitor_to_server_message, server_to_monitor_message) Daemon.channel_pair - -type state = -| Uninitialized -| Initialized of {infd: Lwt_unix.file_descr; outfd: Unix.file_descr} -| Disabled - -let state = ref Uninitialized - -let with_channel select_channel ~on_disabled ~f = match !state with -| Uninitialized -> - (* Probably means someone is calling this module from a worker thread *) - failwith "MonitorRPC can only be used by the master thread" -| Disabled -> - (* Probably means that this is a `flow check` and there is no server monitor *) - on_disabled () -| Initialized {infd; outfd} -> - f (select_channel (infd, outfd)) - -let with_infd ~on_disabled ~f = with_channel fst ~on_disabled ~f -let with_outfd ~on_disabled ~f = with_channel snd ~on_disabled ~f - -(* The main server process will initialize this with the channels to the monitor process *) -let init ~channels:(ic,oc) = - let infd = - Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true (Daemon.descr_of_in_channel ic) in - let outfd = Daemon.descr_of_out_channel oc in - state := Initialized {infd; outfd} - -(* If there is no monitor process (like in `flow check`), we can disable MonitorRPC *) -let disable () = - state := Disabled - -(* Read a single message from the monitor. *) -let read () = - with_infd - ~on_disabled:(fun () -> failwith "MonitorRPC is disabled") - ~f:Marshal_tools_lwt.from_fd_with_preamble - -(* Sends a message to the monitor. - * - * This is a no-op if the MonitorRPC is disabled. This allows the server to stream things like - * status updates without worrying whether or not there is a monitor - * - * Unliked read, this is synchronous. We don't currently have a use case for async sends, and it's a - * little painful to thread lwt through to everywhere we send data - *) -let send ~msg = - with_outfd - ~on_disabled:(fun () -> ()) - ~f:(fun outfd -> Marshal_tools.to_fd_with_preamble outfd msg |> ignore) - -(* Respond to a request from an ephemeral client *) -let respond_to_request ~request_id ~response = - send ~msg:(Response (request_id, response)) - -(* Exception while handling the request *) -let request_failed ~request_id ~exn_str = - send ~msg:(RequestFailed (request_id, exn_str)) - -(* Send a message to a persistent client *) -let respond_to_persistent_connection ~client_id ~response = - send ~msg:(PersistentConnectionResponse (client_id, response)) - -(* Send a status update to the monitor *) -let status_update = - (* Remember the last status so that we only send updates when something changes *) - let last_status = ref ServerStatus.initial_status in - - fun ~event -> - if !state = Disabled then () else - let new_status = ServerStatus.update ~event ~status:!last_status in - if new_status <> !last_status - then begin - last_status := new_status; - send ~msg:(StatusUpdate new_status) - end diff --git a/src/monitor/monitorRPC.mli b/src/monitor/monitorRPC.mli deleted file mode 100644 index 614ac116744..00000000000 --- a/src/monitor/monitorRPC.mli +++ /dev/null @@ -1,33 +0,0 @@ -(** - * Copyright (c) 2017-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type channels = (MonitorProt.monitor_to_server_message, - MonitorProt.server_to_monitor_message) Daemon.channel_pair - -val init : channels:channels -> unit -val disable : unit -> unit - -val read : unit -> MonitorProt.monitor_to_server_message Lwt.t - -val respond_to_request : - request_id: MonitorProt.request_id -> - response: ServerProt.Response.response -> - unit - -val request_failed : - request_id: MonitorProt.request_id -> - exn_str: string -> - unit - -val respond_to_persistent_connection : - client_id: Persistent_connection_prot.client_id -> - response: Persistent_connection_prot.response -> - unit - -val status_update : - event: ServerStatus.event -> - unit diff --git a/src/monitor/persistentConnectionMap.ml b/src/monitor/persistentConnectionMap.ml index 09ee5e37cb4..05990d92ff1 100644 --- a/src/monitor/persistentConnectionMap.ml +++ b/src/monitor/persistentConnectionMap.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,16 +11,12 @@ let map = ref IMap.empty -let add ~client_id ~client = - map := IMap.add client_id client !map +let add ~client_id ~client = map := IMap.add client_id client !map -let get ~client_id = - IMap.get client_id !map +let get ~client_id = IMap.get client_id !map -let remove ~client_id = - map := IMap.remove client_id !map +let remove ~client_id = map := IMap.remove client_id !map -let cardinal () = - IMap.cardinal !map +let cardinal () = IMap.cardinal !map -let get_all_clients () = IMap.bindings !map |> List.map snd +let get_all_clients () = IMap.bindings !map |> Core_list.map ~f:snd diff --git a/src/monitor/persistentConnectionMap.mli b/src/monitor/persistentConnectionMap.mli index 9bb60fe88df..73ef7bc0dd9 100644 --- a/src/monitor/persistentConnectionMap.mli +++ b/src/monitor/persistentConnectionMap.mli @@ -1,27 +1,16 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val add: - client_id:Persistent_connection_prot.client_id -> - client:PersistentConnection.t -> - unit +val add : client_id:LspProt.client_id -> client:PersistentConnection.t -> unit -val get: - client_id:Persistent_connection_prot.client_id -> - PersistentConnection.t option +val get : client_id:LspProt.client_id -> PersistentConnection.t option -val remove: - client_id:Persistent_connection_prot.client_id -> - unit +val remove : client_id:LspProt.client_id -> unit -val cardinal: - unit -> - int +val cardinal : unit -> int -val get_all_clients: - unit -> - PersistentConnection.t list +val get_all_clients : unit -> PersistentConnection.t list diff --git a/src/monitor/requestMap.ml b/src/monitor/requestMap.ml index e04d04db5bb..4a507c4d3ef 100644 --- a/src/monitor/requestMap.ml +++ b/src/monitor/requestMap.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,35 +10,32 @@ * * Every request in this map has been sent to the server and no reply has been processed yet *) - let mutex = Lwt_mutex.create () + let map = ref SMap.empty + let last_id = ref 0 let add ~request ~client = (* TODO(ljw): doesn't really need mutexes since it doesn't yield *) Lwt_mutex.with_lock mutex (fun () -> - incr last_id; - let request_id = Printf.sprintf "Request %d" !last_id in - map := SMap.add request_id (request, client) !map; - Lwt.return request_id - ) + incr last_id; + let request_id = Printf.sprintf "Request %d" !last_id in + map := SMap.add request_id (request, client) !map; + Lwt.return request_id) let remove ~request_id = (* TODO(ljw): doesn't really need mutexes since it doesn't yield *) Lwt_mutex.with_lock mutex (fun () -> - let ret = SMap.get request_id !map in - map := SMap.remove request_id !map; - Lwt.return ret - ) + let ret = SMap.get request_id !map in + map := SMap.remove request_id !map; + Lwt.return ret) let remove_all () = (* TODO(ljw): doesn't really need mutexes since it doesn't yield *) Lwt_mutex.with_lock mutex (fun () -> - let ret = SMap.elements !map |> List.map snd in - map := SMap.empty; - Lwt.return ret - ) + let ret = SMap.elements !map |> Core_list.map ~f:snd in + map := SMap.empty; + Lwt.return ret) -let cardinal () = - SMap.cardinal !map +let cardinal () = SMap.cardinal !map diff --git a/src/monitor/requestMap.mli b/src/monitor/requestMap.mli index 40d32a98d71..fe6ddb826dc 100644 --- a/src/monitor/requestMap.mli +++ b/src/monitor/requestMap.mli @@ -1,23 +1,20 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val add: - request:ServerProt.Request.command_with_context-> +val add : + request:ServerProt.Request.command_with_context -> client:EphemeralConnection.t -> MonitorProt.request_id Lwt.t -val remove: +val remove : request_id:MonitorProt.request_id -> (ServerProt.Request.command_with_context * EphemeralConnection.t) option Lwt.t -val remove_all: - unit -> - (ServerProt.Request.command_with_context * EphemeralConnection.t) list Lwt.t +val remove_all : + unit -> (ServerProt.Request.command_with_context * EphemeralConnection.t) list Lwt.t -val cardinal: - unit -> - int +val cardinal : unit -> int diff --git a/src/monitor/rpc/dune b/src/monitor/rpc/dune new file mode 100644 index 00000000000..004751b26c8 --- /dev/null +++ b/src/monitor/rpc/dune @@ -0,0 +1,8 @@ +(library + (name flow_monitor_rpc) + (wrapped false) + (libraries + flow_server_protocol + sys_utils ; hack + ) +) diff --git a/src/monitor/rpc/monitorRPC.ml b/src/monitor/rpc/monitorRPC.ml new file mode 100644 index 00000000000..3c61398e1e1 --- /dev/null +++ b/src/monitor/rpc/monitorRPC.ml @@ -0,0 +1,88 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open MonitorProt + +type channels = (monitor_to_server_message, server_to_monitor_message) Daemon.channel_pair + +type state = + | Uninitialized + | Initialized of { + infd: Lwt_unix.file_descr; + outfd: Unix.file_descr; + } + | Disabled + +let state = ref Uninitialized + +let with_channel select_channel ~on_disabled ~f = + match !state with + | Uninitialized -> + (* Probably means someone is calling this module from a worker thread *) + failwith "MonitorRPC can only be used by the master thread" + | Disabled -> + (* Probably means that this is a `flow check` and there is no server monitor *) + on_disabled () + | Initialized { infd; outfd } -> f (select_channel (infd, outfd)) + +let with_infd ~on_disabled ~f = with_channel fst ~on_disabled ~f + +let with_outfd ~on_disabled ~f = with_channel snd ~on_disabled ~f + +(* The main server process will initialize this with the channels to the monitor process *) +let init ~channels:(ic, oc) = + let infd = + Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true (Daemon.descr_of_in_channel ic) + in + let outfd = Daemon.descr_of_out_channel oc in + state := Initialized { infd; outfd } + +(* If there is no monitor process (like in `flow check`), we can disable MonitorRPC *) +let disable () = state := Disabled + +(* Read a single message from the monitor. *) +let read () = + with_infd + ~on_disabled:(fun () -> failwith "MonitorRPC is disabled") + ~f:Marshal_tools_lwt.from_fd_with_preamble + +(* Sends a message to the monitor. + * + * This is a no-op if the MonitorRPC is disabled. This allows the server to stream things like + * status updates without worrying whether or not there is a monitor + * + * Unliked read, this is synchronous. We don't currently have a use case for async sends, and it's a + * little painful to thread lwt through to everywhere we send data + *) +let send ~msg = + with_outfd + ~on_disabled:(fun () -> ()) + ~f:(fun outfd -> Marshal_tools.to_fd_with_preamble outfd msg |> ignore) + +(* Respond to a request from an ephemeral client *) +let respond_to_request ~request_id ~response = send ~msg:(Response (request_id, response)) + +(* Exception while handling the request *) +let request_failed ~request_id ~exn_str = send ~msg:(RequestFailed (request_id, exn_str)) + +(* Send a message to a persistent client *) +let respond_to_persistent_connection ~client_id ~response = + send ~msg:(PersistentConnectionResponse (client_id, response)) + +(* Send a status update to the monitor *) +let status_update = + (* Remember the last status so that we only send updates when something changes *) + let last_status = ref ServerStatus.initial_status in + fun ~event -> + if !state = Disabled then + () + else + let new_status = ServerStatus.update ~event ~status:!last_status in + if new_status <> !last_status then ( + last_status := new_status; + send ~msg:(StatusUpdate new_status) + ) diff --git a/src/monitor/rpc/monitorRPC.mli b/src/monitor/rpc/monitorRPC.mli new file mode 100644 index 00000000000..0beda867f89 --- /dev/null +++ b/src/monitor/rpc/monitorRPC.mli @@ -0,0 +1,27 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type channels = + ( MonitorProt.monitor_to_server_message, + MonitorProt.server_to_monitor_message ) + Daemon.channel_pair + +val init : channels:channels -> unit + +val disable : unit -> unit + +val read : unit -> MonitorProt.monitor_to_server_message Lwt.t + +val respond_to_request : + request_id:MonitorProt.request_id -> response:ServerProt.Response.response -> unit + +val request_failed : request_id:MonitorProt.request_id -> exn_str:string -> unit + +val respond_to_persistent_connection : + client_id:LspProt.client_id -> response:LspProt.message_from_server -> unit + +val status_update : event:ServerStatus.event -> unit diff --git a/src/monitor/serverStatus.ml b/src/monitor/serverStatus.ml deleted file mode 100644 index ad2f0172f06..00000000000 --- a/src/monitor/serverStatus.ml +++ /dev/null @@ -1,333 +0,0 @@ -(** - * Copyright (c) 2017-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -(* This module tries to model the Flow server's status as a state machine. The current status is - * the state, and it gets updated by events. This status can then be streamed to Flow clients and - * rendered. - *) - -let spf = Utils_js.spf - -type progress = { - total: int option; - finished: int; -} - -type summary_info = - | RecheckSummary of { - dependent_file_count: int; - changed_file_count: int; - top_cycle: (File_key.t * int) option; (* name of cycle leader, and size of cycle *) - } - | CommandSummary of string - | InitSummary - -type summary = { duration: float; info: summary_info;} - -type event = -| Ready (* The server is free *) -| Init_start (* The server is starting to initialize *) -| Read_saved_state -| Load_saved_state_progress of progress -| Parsing_progress of progress -| Resolving_dependencies_progress -| Calculating_dependencies_progress -| Merging_progress of progress -| Canceling_progress of progress -| Finishing_up of summary (* Server's finishing up typechecking or other work *) -| Recheck_start (* The server is starting to recheck *) -| Handling_request_start (* The server is starting to handle an ephemeral/persistent request *) -| GC_start (* The server is starting to GC *) -| Collating_errors_start (* The server is collating the errors *) - -type typecheck_status = -| Starting_typecheck (* A typecheck's initial state *) -| Reading_saved_state -| Loading_saved_state of progress -| Parsing of progress -| Resolving_dependencies -| Calculating_dependencies -| Merging of progress -| Canceling of progress -| Garbage_collecting_typecheck (* We garbage collect during typechecks sometime *) -| Collating_errors (* We sometimes collate errors during typecheck *) -| Finishing_typecheck of summary (* haven't reached free state yet *) - -type typecheck_mode = -| Initializing (* Flow is busy starting up *) -| Rechecking (* Flow is busy rechecking *) -| Handling_request (* Flow is busy handling a request *) - -type status = -| Starting_up (* The server's initial state *) -| Free (* Not busy doing something else *) -| Typechecking of typecheck_mode * typecheck_status (* Busy doing Flow stuff *) -| Garbage_collecting (* This one is pretty obvious *) -| Unknown (* A bad state caused by transitioning from a good state due to an unexpected event *) - -let string_of_progress {finished; total} = - match total with - | None -> spf "%d" finished - | Some total -> - spf "%d/%d (%02.1f%%)" finished total (100.0 *. (float finished) /. (float (max 1 total))) - -type emoji = -| Bicyclist -| Closed_book -| Cookie -| File_cabinet -| Ghost -| Open_book -| Panda_face -| Recycling_symbol -| Sleeping_face -| Smiling_face_with_mouth_open -| Taco -| Wastebasket - -let string_of_emoji = function -| Bicyclist -> "\xF0\x9F\x9A\xB4" -| Closed_book -> "\xF0\x9F\x93\x95" -| Cookie -> "\xF0\x9F\x8D\xAA" -| File_cabinet -> "\xF0\x9F\x97\x84" -| Ghost -> "\xF0\x9F\x91\xBB" -| Open_book -> "\xF0\x9F\x93\x96" -| Panda_face -> "\xF0\x9F\x90\xBC" -| Recycling_symbol -> "\xE2\x99\xBB" -| Sleeping_face -> "\xF0\x9F\x98\xB4" -| Smiling_face_with_mouth_open -> "\xF0\x9F\x98\x83" -| Taco -> "\xF0\x9F\x8C\xAE" -| Wastebasket -> "\xF0\x9F\x97\x91" - -type pad_emoji = -| Before -| After - -let render_emoji ~use_emoji ?(pad=After) emoji = - if use_emoji - then spf - "%s%s %s" - (if pad = Before then " " else "") - (string_of_emoji emoji) - (if pad = After then " " else "") - else "" - -let string_of_event = function -| Ready -> "Ready" -| Init_start -> "Init_start" -| Read_saved_state -> "Read_saved_state" -| Load_saved_state_progress progress -> - spf "Load_saved_state_progress %s" (string_of_progress progress) -| Parsing_progress progress -> - spf "Parsing_progress files %s" (string_of_progress progress) -| Calculating_dependencies_progress -> "Calculating_dependencies_progress" -| Resolving_dependencies_progress -> "Resolving_dependencies_progress" -| Merging_progress progress -> - spf "Merging_progress %s" (string_of_progress progress) -| Canceling_progress progress -> - spf "Canceling_progress %s" (string_of_progress progress) -| Finishing_up _ -> "Finishing_up" -| Recheck_start -> "Recheck_start" -| Handling_request_start -> "Handling_request_start" -| GC_start -> "GC_start" -| Collating_errors_start -> "Collating_errors_start" - -let string_of_typecheck_status ~use_emoji = function -| Starting_typecheck -> - spf "%sstarting up" (render_emoji ~use_emoji Sleeping_face) -| Reading_saved_state -> - spf "%sreading saved state" (render_emoji ~use_emoji Closed_book) -| Loading_saved_state progress -> - spf "%sloading saved state %s" (render_emoji ~use_emoji Open_book) (string_of_progress progress) -| Parsing progress -> - spf "%sparsed files %s" (render_emoji ~use_emoji Ghost) (string_of_progress progress) -| Resolving_dependencies -> - spf "%sresolving dependencies" (render_emoji ~use_emoji Taco) -| Calculating_dependencies -> - spf "%scalculating dependencies" (render_emoji ~use_emoji Taco) -| Merging progress -> - spf "%smerged files %s" (render_emoji ~use_emoji Bicyclist) (string_of_progress progress) -| Canceling progress -> - spf "%scanceling workers %s" - (render_emoji ~use_emoji Recycling_symbol) (string_of_progress progress) -| Garbage_collecting_typecheck -> - spf "%sgarbage collecting shared memory" (render_emoji ~use_emoji Wastebasket) -| Collating_errors -> - spf "%scollating errors" (render_emoji ~use_emoji File_cabinet) -| Finishing_typecheck _ -> - spf "%sfinishing up" (render_emoji ~use_emoji Cookie) - -let string_of_status ?(use_emoji=false) ?(terse=false) status = - let status_string = match status with - | Starting_up -> - spf "starting up%s" (render_emoji ~use_emoji ~pad:Before Sleeping_face) - | Free -> - spf "free%s" (render_emoji ~use_emoji ~pad:Before Smiling_face_with_mouth_open) - | Typechecking (Initializing, tcs) -> - spf "initializing (%s)" (string_of_typecheck_status ~use_emoji tcs) - | Typechecking (Rechecking, tcs) -> - spf "rechecking (%s)" (string_of_typecheck_status ~use_emoji tcs) - | Typechecking (Handling_request, tcs) -> - spf "handling a request (%s)" (string_of_typecheck_status ~use_emoji tcs) - | Garbage_collecting -> - spf "garbage collecting shared memory%s" (render_emoji ~use_emoji ~pad:Before Wastebasket) - | Unknown -> - spf "doing something%s" (render_emoji ~use_emoji ~pad:Before Panda_face) - in - spf "%s%s" (if terse then "" else "Server is ") status_string - -(* Transition function for the status state machine. Given the current status and the event, - * pick a new status *) -let update ~event ~status = - match event, status with - | Ready, _ -> Free - - | Init_start, _ -> Typechecking (Initializing, Starting_typecheck) - | Recheck_start, _ -> Typechecking (Rechecking, Starting_typecheck) - | Handling_request_start, _ -> Typechecking (Handling_request, Starting_typecheck) - - | Read_saved_state, Typechecking (mode, _) -> Typechecking (mode, Reading_saved_state) - | Load_saved_state_progress progress, Typechecking (mode, _) -> - Typechecking (mode, Loading_saved_state progress) - | Parsing_progress progress, Typechecking (mode, _) -> Typechecking (mode, Parsing progress) - | Resolving_dependencies_progress, Typechecking (mode, _) -> - Typechecking (mode, Resolving_dependencies) - | Calculating_dependencies_progress, Typechecking (mode, _) -> - Typechecking (mode, Calculating_dependencies) - | Merging_progress progress, Typechecking (mode, _) -> Typechecking (mode, Merging progress) - | Canceling_progress progress, Typechecking (mode, _) -> Typechecking (mode, Canceling progress) - | GC_start, Typechecking (mode, _) -> Typechecking (mode, Garbage_collecting_typecheck) - | Collating_errors_start, Typechecking (mode, _) -> Typechecking (mode, Collating_errors) - | Finishing_up summary, Typechecking (mode, _) -> Typechecking (mode, Finishing_typecheck summary) - - | GC_start, _ -> Garbage_collecting - | _ -> - (* This is a bad transition. In dev mode, let's blow up since something is wrong. However in - * production let's soldier on. Usually this means that we forgot to send something like - * Handling_request_start before we sent a Merging_progress. *) - if Build_mode.dev - then failwith (spf - "Unexpected status transition from '%s' with event '%s'" - (string_of_status status) - (string_of_event event) - ) - else Unknown - -let initial_status = Starting_up - -let is_free = function -| Free -> true -| _ -> false - -(* Returns true iff the transition from old_status to new_status is "significant", which is a - * pretty arbitrary judgement of how interesting the new status is to a user, given that they - * already have seen the old status *) -let is_significant_transition old_status new_status = - (* If the statuses are literally the same, then the transition is not significant *) - old_status <> new_status && match old_status, new_status with - | Typechecking (old_mode, old_tc_status), Typechecking (new_mode, new_tc_status) -> - (* A change in mode is always signifcant *) - old_mode <> new_mode || begin match old_tc_status, new_tc_status with - (* Making progress within parsing, merging or canceling is not significant *) - | Parsing _, Parsing _ - | Merging _, Merging _ - | Canceling _, Canceling _ -> false - (* But changing typechecking status always is significant *) - | _, Starting_typecheck - | _, Reading_saved_state - | _, Loading_saved_state _ - | _, Parsing _ - | _, Resolving_dependencies - | _, Calculating_dependencies - | _, Merging _ - | _, Canceling _ - | _, Garbage_collecting_typecheck - | _, Collating_errors - | _, Finishing_typecheck _ -> true - end - (* Switching to a completely different status is always significant *) - | _, Starting_up - | _, Free - | _, Typechecking _ - | _, Garbage_collecting - | _, Unknown -> true - -let get_progress status = - let print progress = - match progress with - | {finished; total=None} -> - Some (Printf.sprintf "%d" finished), Some finished, None - | {finished; total=Some total} -> - Some (Printf.sprintf "%d/%d" finished total), Some finished, Some total in - match status with - | Typechecking (_, Parsing progress) - | Typechecking (_, Merging progress) - | Typechecking (_, Canceling progress) -> print progress - | _ -> None, None, None - - -let get_summary status = - match status with - | Typechecking (_mode, Finishing_typecheck summary) -> Some summary - | _ -> None - -let log_of_summaries ~(root: Path.t) (summaries: summary list) - : FlowEventLogger.persistent_delay = - let open FlowEventLogger in - let init = { - init_duration = 0.0; - command_count = 0; - command_duration = 0.0; - command_worst = None; - command_worst_duration = None; - recheck_count = 0; - recheck_dependent_files = 0; - recheck_changed_files = 0; - recheck_duration = 0.0; - recheck_worst_duration = None; - recheck_worst_dependent_file_count = None; - recheck_worst_changed_file_count = None; - recheck_worst_cycle_leader = None; - recheck_worst_cycle_size = None; - } in - let f acc {duration; info} = - match info with - | InitSummary -> - let acc = { acc with - init_duration = acc.init_duration +. duration; - } in - acc - | CommandSummary cmd -> - let is_worst = match acc.command_worst_duration with None -> true | Some d -> duration >= d in - let acc = if not is_worst then acc else { acc with - command_worst = Some cmd; - command_worst_duration = Some duration; - } in - let acc = { acc with - command_count = acc.command_count + 1; - command_duration = acc.command_duration +. duration; - } in - acc - | RecheckSummary {dependent_file_count; changed_file_count; top_cycle} -> - let is_worst = match acc.recheck_worst_duration with None -> true | Some d -> duration >= d in - let acc = if not is_worst then acc else { acc with - recheck_worst_duration = Some duration; - recheck_worst_dependent_file_count = Some dependent_file_count; - recheck_worst_changed_file_count = Some changed_file_count; - recheck_worst_cycle_size = Option.map top_cycle ~f:(fun (_,size) -> size); - recheck_worst_cycle_leader = Option.map top_cycle - ~f:(fun (f,_) -> f |> File_key.to_string |> Files.relative_path (Path.to_string root)); - } in - let acc = { acc with - recheck_count = acc.recheck_count + 1; - recheck_dependent_files = acc.recheck_dependent_files + dependent_file_count; - recheck_changed_files = acc.recheck_changed_files + changed_file_count; - recheck_duration = acc.recheck_duration +. duration; - } in - acc - in - Core_list.fold summaries ~init ~f diff --git a/src/monitor/serverStatus.mli b/src/monitor/serverStatus.mli deleted file mode 100644 index 8ec06084db7..00000000000 --- a/src/monitor/serverStatus.mli +++ /dev/null @@ -1,49 +0,0 @@ -(** - * Copyright (c) 2017-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type progress = { - total: int option; - finished: int; -} - -type summary_info = - | RecheckSummary of { - dependent_file_count: int; - changed_file_count: int; - top_cycle: (File_key.t * int) option; (* name of cycle leader, and size of cycle *) - } - | CommandSummary of string - | InitSummary - -type summary = { duration: float; info: summary_info;} - -type event = -| Ready -| Init_start -| Read_saved_state -| Load_saved_state_progress of progress -| Parsing_progress of progress -| Resolving_dependencies_progress -| Calculating_dependencies_progress -| Merging_progress of progress -| Canceling_progress of progress -| Finishing_up of summary -| Recheck_start -| Handling_request_start -| GC_start -| Collating_errors_start - -type status - -val initial_status: status -val update: event:event -> status:status -> status -val string_of_status: ?use_emoji:bool -> ?terse:bool ->status -> string -val is_free: status -> bool -val is_significant_transition: status -> status -> bool -val get_progress: status -> string option * int option * int option -val get_summary: status -> summary option -val log_of_summaries: root:Path.t -> summary list -> FlowEventLogger.persistent_delay diff --git a/src/monitor/socketAcceptor.ml b/src/monitor/socketAcceptor.ml index a566e17236f..e0cd169757d 100644 --- a/src/monitor/socketAcceptor.ml +++ b/src/monitor/socketAcceptor.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -24,12 +24,12 @@ let handle_persistent_message ~client_id ~msg ~connection:_ = module type STATUS_WRITER = sig type t + val write : ServerStatus.status * FileWatcherStatus.status -> t -> unit end - (* A loop that sends the Server's busy status to a waiting connection every 0.5 seconds *) -module StatusLoop (Writer: STATUS_WRITER) = LwtLoop.Make (struct +module StatusLoop (Writer : STATUS_WRITER) = LwtLoop.Make (struct type acc = Writer.t let main conn = @@ -38,44 +38,46 @@ module StatusLoop (Writer: STATUS_WRITER) = LwtLoop.Make (struct Lwt.return conn let catch _ exn = - begin match exn with - (* The connection closed its write stream, likely it is closed or closing *) - | Lwt_stream.Closed -> () - | exn -> Logger.error ~exn "StatusLoop threw an exception" + begin + match exn with + (* The connection closed its write stream, likely it is closed or closing *) + | Lwt_stream.Closed -> () + | exn -> Logger.error ~exn "StatusLoop threw an exception" end; Lwt.return_unit end) module EphemeralStatusLoop = StatusLoop (struct type t = EphemeralConnection.t - let write status conn = - EphemeralConnection.write ~msg:(MonitorProt.Please_hold status) conn + + let write status conn = EphemeralConnection.write ~msg:(MonitorProt.Please_hold status) conn end) module PersistentStatusLoop = StatusLoop (struct type t = PersistentConnection.t + let write status conn = - PersistentConnection.write ~msg:(Persistent_connection_prot.Please_hold status) conn + PersistentConnection.write ~msg:LspProt.(NotificationFromServer (Please_hold status)) conn end) let create_ephemeral_connection ~client_fd ~close = Logger.debug "Creating a new ephemeral connection"; - let%lwt (start, conn) = EphemeralConnection.create - ~name:"some ephemeral connection" - ~in_fd:client_fd - ~out_fd:client_fd - ~close - ~on_read:handle_ephemeral_request + let%lwt (start, conn) = + EphemeralConnection.create + ~name:"some ephemeral connection" + ~in_fd:client_fd + ~out_fd:client_fd + ~close + ~on_read:handle_ephemeral_request in (* On exit, do our best to send all pending messages to the waiting client *) let close_on_exit = let%lwt _ = Lwt_condition.wait ExitSignal.signal in EphemeralConnection.flush_and_close conn in - (* Lwt.pick returns the first thread to finish and cancels the rest. *) - Lwt.async (fun () -> Lwt.pick [ close_on_exit; EphemeralConnection.wait_for_closed conn; ]); + Lwt.async (fun () -> Lwt.pick [close_on_exit; EphemeralConnection.wait_for_closed conn]); (* Start the ephemeral connection *) start (); @@ -95,48 +97,45 @@ let create_persistent_id = incr last_persistent_id; !last_persistent_id -let create_persistent_connection ~client_fd ~close ~logging_context ~lsp = +let create_persistent_connection ~client_fd ~close ~lsp_init_params = let client_id = create_persistent_id () in - Logger.debug "Creating a persistent connection #%d" client_id; - Server.notify_new_persistent_connection ~client_id ~logging_context ~lsp; + Server.notify_new_persistent_connection ~client_id ~lsp_init_params; let close () = Server.notify_dead_persistent_connection ~client_id; close () in - - let%lwt (start, conn) = PersistentConnection.create - ~name:(spf "persistent connection #%d" client_id) - ~in_fd:client_fd - ~out_fd:client_fd - ~close - ~on_read:(handle_persistent_message ~client_id) + let%lwt (start, conn) = + PersistentConnection.create + ~name:(spf "persistent connection #%d" client_id) + ~in_fd:client_fd + ~out_fd:client_fd + ~close + ~on_read:(handle_persistent_message ~client_id) in (* On exit, do our best to send all pending messages to the waiting client *) let close_on_exit = let%lwt _ = Lwt_condition.wait ExitSignal.signal in - PersistentConnection.write Persistent_connection_prot.EOF conn; + PersistentConnection.write LspProt.(NotificationFromServer EOF) conn; PersistentConnection.flush_and_close conn in - (* Lwt.pick returns the first thread to finish and cancels the rest. *) - Lwt.async (fun () -> Lwt.pick [ close_on_exit; PersistentConnection.wait_for_closed conn; ]); + Lwt.async (fun () -> Lwt.pick [close_on_exit; PersistentConnection.wait_for_closed conn]); (* Don't start the connection until we add it to the persistent connection map *) Lwt.async (fun () -> - PersistentConnectionMap.add ~client_id ~client:conn; - start (); - PersistentConnection.write - ~msg:(Persistent_connection_prot.Please_hold (StatusStream.get_status ())) conn; - let%lwt () = PersistentStatusLoop.run ~cancel_condition:ExitSignal.signal conn in - Lwt.return_unit - ); + PersistentConnectionMap.add ~client_id ~client:conn; + start (); + PersistentConnection.write + ~msg:LspProt.(NotificationFromServer (Please_hold (StatusStream.get_status ()))) + conn; + let%lwt () = PersistentStatusLoop.run ~cancel_condition:ExitSignal.signal conn in + Lwt.return_unit); Lwt.return () - let close client_fd () = (* Close the client_fd, regardless of whether or not we were able to shutdown the connection. * This prevents fd leaks *) @@ -145,15 +144,12 @@ let close client_fd () = (* To be perfectly honest, it's not clear whether the SHUTDOWN_ALL is really needed. I mean, * shutdown is useful to shutdown one direction of the socket, but if you're about to close * it, does shutting down first actually make any difference? *) - try Lwt_unix.(shutdown client_fd SHUTDOWN_ALL) - with + try Lwt_unix.(shutdown client_fd SHUTDOWN_ALL) with (* Already closed *) | Unix.Unix_error (Unix.EBADF, _, _) -> () | exn -> Logger.error ~exn "Failed to shutdown socket client" end; - try%lwt - Lwt_unix.close client_fd - with + try%lwt Lwt_unix.close client_fd with (* Already closed *) | Unix.Unix_error (Unix.EBADF, _, _) -> Lwt.return_unit | exn -> Lwt.return (Logger.error ~exn "Failed to close socket client fd") @@ -161,112 +157,121 @@ let close client_fd () = (* Well...I mean this is a pretty descriptive function name. It performs the handshake and then * returns the client's side of the handshake *) let perform_handshake_and_get_client_handshake ~client_fd = - let open SocketHandshake in - let server_build_id = build_revision in - let server_bin = Sys.executable_name in - - (* handshake step 1: client sends handshake *) - let%lwt (wire: client_handshake_wire) = Marshal_tools_lwt.from_fd_with_preamble client_fd in - let client1 = try fst wire |> Hh_json.json_of_string |> json_to__client_to_monitor_1 - with exn -> - Logger.error ~exn "Failed to parse JSON section of handshake: %s" (fst wire); - default_client_to_monitor_1 in - let client2 = if client1.client_build_id <> server_build_id then None - else Some (Marshal.from_string (snd wire) 0 : client_to_monitor_2) in - - (* handshake step 2: server sends back handshake *) - let respond server_intent server2 = - assert (server2 = None || client1.client_build_id = server_build_id); - (* the client will trust our invariant that server2=Some means the client *) - (* can certainly deserialize server2. *) - let server1 = { server_build_id; server_bin; server_intent; } in - let wire : server_handshake_wire = ( - server1 |> monitor_to_client_1__to_json |> Hh_json.json_to_string, - Option.map server2 ~f:(fun server2 -> Marshal.to_string server2 []) - ) in - let%lwt _ = Marshal_tools_lwt.to_fd_with_preamble client_fd wire in - Lwt.return_unit - in - - let fd_as_int = client_fd |> Lwt_unix.unix_file_descr |> Obj.magic in - - (* Stop request *) - if client1.is_stop_request then begin - let%lwt () = respond Server_will_exit None in - let%lwt () = close client_fd () in - Server.exit ~msg:"Killed by `flow stop`. Exiting." FlowExitStatus.No_error; - - (* Binary version mismatch *) - end else if client1.client_build_id <> build_revision then begin - if client1.server_should_exit_if_version_mismatch then begin + SocketHandshake.( + let server_build_id = build_revision in + let server_bin = Sys.executable_name in + (* handshake step 1: client sends handshake *) + let%lwt (wire : client_handshake_wire) = Marshal_tools_lwt.from_fd_with_preamble client_fd in + let client_handshake = + try fst wire |> Hh_json.json_of_string |> json_to__client_to_monitor_1 + with exn -> + Logger.error ~exn "Failed to parse JSON section of handshake: %s" (fst wire); + default_client_to_monitor_1 + in + let client = + if client_handshake.client_build_id <> server_build_id then + None + else + Some (Marshal.from_string (snd wire) 0 : client_to_monitor_2) + in + (* handshake step 2: server sends back handshake *) + let respond server_intent server2 = + assert (server2 = None || client_handshake.client_build_id = server_build_id); + + (* the client will trust our invariant that server2=Some means the client *) + (* can certainly deserialize server2. *) + let server_version = Flow_version.version in + let server1 = { server_build_id; server_bin; server_intent; server_version } in + let wire : server_handshake_wire = + ( server1 |> monitor_to_client_1__to_json |> Hh_json.json_to_string, + Option.map server2 ~f:(fun server2 -> Marshal.to_string server2 []) ) + in + let%lwt _ = Marshal_tools_lwt.to_fd_with_preamble client_fd wire in + Lwt.return_unit + in + let error_client () = + let%lwt () = respond Server_will_hangup None in + failwith "Build mismatch, so rejecting attempted connection" + in + let stop_server () = let%lwt () = respond Server_will_exit None in let msg = "Client and server are different builds. Flow server is out of date. Exiting" in FlowEventLogger.out_of_date (); Logger.fatal "%s" msg; FlowExitStatus.exit ~msg FlowExitStatus.Build_id_mismatch - end else begin - let%lwt () = respond Server_will_hangup None in - failwith "Build mismatch, so rejecting attempted connection" - end - - (* Too many clients *) - end else if Sys.unix && fd_as_int > 500 then begin - (* We currently rely on using Unix.select, which doesn't work for fds >= FD_SETSIZE (1024). - * So we can't have an unlimited number of clients. So if the new fd is too large, let's - * reject it. - * TODO(glevi): Figure out whether this check is needed for Windows *) - let%lwt () = respond Server_will_hangup (Some Server_has_too_many_clients) in - failwith (spf "Too many clients, so rejecting new connection (%d)" fd_as_int) - - (* Server still initializing *) - end else if not (StatusStream.ever_been_free ()) then begin - let client2 = Option.value_exn client2 in - let status = StatusStream.get_status () in - if client1.server_should_hangup_if_still_initializing then begin - let%lwt () = respond Server_will_hangup (Some (Server_still_initializing status)) in - (* In the case of Ephemeral, CommandConnect will use that response to display *) - (* a message to the user about "--retry-if-init false and still initializing" *) - (* In the case of Persistent, lspCommand will retry a second later. *) - (* The message we failwith here solely goes to the logs, not the user. *) - let (server_status, watchman_status) = status in - failwith ("Server still initializing -> hangup." - ^ " server_status=" ^ (ServerStatus.string_of_status server_status) - ^ " watchman_status=" ^ (FileWatcherStatus.string_of_status watchman_status)) - end else begin - let%lwt () = respond Server_will_continue (Some (Server_still_initializing status)) in - Lwt.return (client1, client2) - end - - (* Success *) - end else begin - let client2 = Option.value_exn client2 in - let%lwt () = respond Server_will_continue (Some Server_ready) in - Lwt.return (client1, client2) - end + in + let fd_as_int = client_fd |> Lwt_unix.unix_file_descr |> Obj.magic in + (* Stop request *) + if client_handshake.is_stop_request then + let%lwt () = respond Server_will_exit None in + let%lwt () = close client_fd () in + Server.exit ~msg:"Killed by `flow stop`. Exiting." FlowExitStatus.No_error + (* Binary version mismatch *) + else if client_handshake.client_build_id <> build_revision then + match client_handshake.version_mismatch_strategy with + | Always_stop_server -> stop_server () + | Stop_server_if_older -> + if Semver.compare Flow_version.version client_handshake.client_version < 0 then + stop_server () + (* server < client *) + else + error_client () + | Error_client -> error_client () + (* Too many clients *) + else if Sys.unix && fd_as_int > 500 then + (* We currently rely on using Unix.select, which doesn't work for fds >= FD_SETSIZE (1024). + * So we can't have an unlimited number of clients. So if the new fd is too large, let's + * reject it. + * TODO(glevi): Figure out whether this check is needed for Windows *) + let%lwt () = respond Server_will_hangup (Some Server_has_too_many_clients) in + failwith (spf "Too many clients, so rejecting new connection (%d)" fd_as_int) + (* Server still initializing *) + else if not (StatusStream.ever_been_free ()) then + let client = Option.value_exn client in + let status = StatusStream.get_status () in + if client_handshake.server_should_hangup_if_still_initializing then ( + let%lwt () = respond Server_will_hangup (Some (Server_still_initializing status)) in + (* In the case of Ephemeral, CommandConnect will use that response to display *) + (* a message to the user about "--retry-if-init false and still initializing" *) + (* In the case of Persistent, lspCommand will retry a second later. *) + (* The message we log here solely goes to the logs, not the user. *) + let (server_status, watchman_status) = status in + Logger.info + "Server still initializing -> hangup. server_status=%s watchman_status=%s" + (ServerStatus.string_of_status server_status) + (FileWatcherStatus.string_of_status watchman_status); + Lwt.return None + ) else + let%lwt () = respond Server_will_continue (Some (Server_still_initializing status)) in + Lwt.return (Some client) + (* Success *) + else + let client = Option.value_exn client in + let%lwt () = respond Server_will_continue (Some Server_ready) in + Lwt.return (Some client)) let catch close exn = (* We catch all exceptions, since one bad connection shouldn't kill the whole monitor *) - begin match exn with - (* Monitor is dying *) - | Lwt.Canceled -> () - | Marshal_tools.Malformed_Preamble_Exception -> - Logger.error - ~exn - "Someone tried to connect to the socket, but spoke a different protocol. Ignoring them" - | exn -> - Logger.error - ~exn - "Exception while trying to establish new connection over the socket. Closing connection" + begin + match exn with + (* Monitor is dying *) + | Lwt.Canceled -> () + | Marshal_tools.Malformed_Preamble_Exception -> + Logger.error + ~exn + "Someone tried to connect to the socket, but spoke a different protocol. Ignoring them" + | exn -> + Logger.error + ~exn + "Exception while trying to establish new connection over the socket. Closing connection" end; close () - module type Handler = sig - val create_socket_connection: - autostop:bool -> (Lwt_unix.file_descr * Lwt_unix.sockaddr) -> - unit Lwt.t + val create_socket_connection : + autostop:bool -> Lwt_unix.file_descr * Lwt_unix.sockaddr -> unit Lwt.t - val name: string + val name : string end module SocketAcceptorLoop (Handler : Handler) = LwtLoop.Make (struct @@ -298,25 +303,27 @@ module MonitorSocketAcceptorLoop = SocketAcceptorLoop (struct Lwt.return_unit in try%lwt - let%lwt (_client1, client2) = perform_handshake_and_get_client_handshake ~client_fd in - match client2.SocketHandshake.client_type with - | SocketHandshake.Ephemeral -> - create_ephemeral_connection ~client_fd ~close - | SocketHandshake.Persistent {logging_context; lsp} -> - create_persistent_connection ~client_fd ~close ~logging_context ~lsp - with exn -> catch close_without_autostop exn - (* Autostop is meant to be "edge-triggered", i.e. when we transition *) - (* from 1 connections to 0 connections then it might stop the server. *) - (* But this catch clause is fired when an attempt to connect has *) - (* failed, and that's why it never triggers an autostop. *) + let%lwt client = perform_handshake_and_get_client_handshake ~client_fd in + SocketHandshake.( + match client with + | Some { client_type = Ephemeral; _ } -> create_ephemeral_connection ~client_fd ~close + | Some { client_type = Persistent { lsp_init_params }; _ } -> + create_persistent_connection ~client_fd ~close ~lsp_init_params + | None -> Lwt.return_unit) + with exn -> catch close_without_autostop exn + + (* Autostop is meant to be "edge-triggered", i.e. when we transition *) + (* from 1 connections to 0 connections then it might stop the server. *) + (* But this catch clause is fired when an attempt to connect has *) + (* failed, and that's why it never triggers an autostop. *) end) let run monitor_socket_fd ~autostop = MonitorSocketAcceptorLoop.run ~cancel_condition:ExitSignal.signal (autostop, monitor_socket_fd) - module LegacySocketAcceptorLoop = SocketAcceptorLoop (struct let name = "legacy socket connection" + let create_socket_connection ~autostop:_ (client_fd, _) = let close = close client_fd in try%lwt @@ -324,8 +331,7 @@ module LegacySocketAcceptorLoop = SocketAcceptorLoop (struct FlowEventLogger.out_of_date (); let msg = "Client and server are different builds. Flow server is out of date. Exiting" in Logger.fatal "%s" msg; - Server.exit FlowExitStatus.Build_id_mismatch - ~msg:"Killed by legacy client. Exiting." + Server.exit FlowExitStatus.Build_id_mismatch ~msg:"Killed by legacy client. Exiting." with exn -> catch close exn end) diff --git a/src/monitor/socketAcceptor.mli b/src/monitor/socketAcceptor.mli index e51b1f88450..688e2bc9834 100644 --- a/src/monitor/socketAcceptor.mli +++ b/src/monitor/socketAcceptor.mli @@ -1,9 +1,10 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val run: Lwt_unix.file_descr -> autostop:bool -> unit Lwt.t -val run_legacy: Lwt_unix.file_descr -> unit Lwt.t +val run : Lwt_unix.file_descr -> autostop:bool -> unit Lwt.t + +val run_legacy : Lwt_unix.file_descr -> unit Lwt.t diff --git a/src/monitor/status/dune b/src/monitor/status/dune new file mode 100644 index 00000000000..e1c26c6e600 --- /dev/null +++ b/src/monitor/status/dune @@ -0,0 +1,11 @@ +(library + (name flow_server_status) + (wrapped false) + (libraries + flow_parser + flow_logging + flow_common + build_mode + sys_utils + ) +) diff --git a/src/monitor/status/fileWatcherStatus.ml b/src/monitor/status/fileWatcherStatus.ml new file mode 100644 index 00000000000..c150a6758d7 --- /dev/null +++ b/src/monitor/status/fileWatcherStatus.ml @@ -0,0 +1,28 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type status' = + | Initializing + | Ready + +type status = Options.file_watcher * status' + +let string_of_file_watcher = function + | Options.NoFileWatcher -> "Dummy" + | Options.DFind -> "Dfind" + | Options.Watchman -> "Watchman" + +let string_of_status = + let string_of_status = function + | Initializing -> "still initializing" + | Ready -> "ready" + in + fun (watcher, status) -> + Printf.sprintf + "%s file watcher is %s" + (string_of_file_watcher watcher) + (string_of_status status) diff --git a/src/monitor/status/serverStatus.ml b/src/monitor/status/serverStatus.ml new file mode 100644 index 00000000000..d891607e66b --- /dev/null +++ b/src/monitor/status/serverStatus.ml @@ -0,0 +1,443 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* This module tries to model the Flow server's status as a state machine. The current status is + * the state, and it gets updated by events. This status can then be streamed to Flow clients and + * rendered. + *) + +let spf = Utils_js.spf + +type progress = { + total: int option; + finished: int; +} + +type summary_info = + | RecheckSummary of { + dependent_file_count: int; + changed_file_count: int; + top_cycle: (File_key.t * int) option; (* name of cycle leader, and size of cycle *) + } + | CommandSummary of string + | InitSummary + +type summary = { + duration: float; + info: summary_info; +} + +type event = + | Ready (* The server is free *) + | Init_start (* The server is starting to initialize *) + | Read_saved_state + | Load_saved_state_progress of progress + | Parsing_progress of progress + | Resolving_dependencies_progress + | Calculating_dependencies_progress + | Merging_progress of progress + | Merging_types_progress of progress + | Checking_progress of progress + | Canceling_progress of progress + | Finishing_up of summary (* Server's finishing up typechecking or other work *) + | Recheck_start (* The server is starting to recheck *) + | Handling_request_start (* The server is starting to handle an ephemeral/persistent request *) + | GC_start (* The server is starting to GC *) + | Collating_errors_start (* The server is collating the errors *) + | Watchman_wait_start of (* deadline *) float + +(* The server is now blocked waiting for Watchman *) + +type typecheck_status = + | Starting_typecheck (* A typecheck's initial state *) + | Reading_saved_state + | Loading_saved_state of progress + | Parsing of progress + | Resolving_dependencies + | Calculating_dependencies + | Merging of progress + | Merging_types of progress + | Checking of progress + | Canceling of progress + | Garbage_collecting_typecheck (* We garbage collect during typechecks sometime *) + | Collating_errors (* We sometimes collate errors during typecheck *) + | Finishing_typecheck of summary (* haven't reached free state yet *) + | Waiting_for_watchman of (* deadline *) float + +type restart_reason = + | Server_out_of_date + | Out_of_shared_memory + | Restart + +type typecheck_mode = + | Initializing (* Flow is busy starting up *) + | Rechecking (* Flow is busy rechecking *) + | Handling_request (* Flow is busy handling a request *) + | Restarting of restart_reason + +(* Same as initializing but with a reason why we restarted *) + +type status = + | Starting_up (* The server's initial state *) + | Free (* Not busy doing something else *) + | Typechecking of typecheck_mode * typecheck_status (* Busy doing Flow stuff *) + | Garbage_collecting (* This one is pretty obvious *) + | Unknown + +(* A bad state caused by transitioning from a good state due to an unexpected event *) + +let string_of_progress { finished; total } = + match total with + | None -> spf "%d" finished + | Some total -> + spf "%d/%d (%02.1f%%)" finished total (100.0 *. float finished /. float (max 1 total)) + +type emoji = + | Bicyclist + | Closed_book + | Cookie + | Eyes + | File_cabinet + | Ghost + | Open_book + | Panda_face + | Recycling_symbol + | Sleeping_face + | Smiling_face_with_mouth_open + | Taco + | Wastebasket + | Motorcycle + | Skier + +let string_of_emoji = function + | Bicyclist -> "\xF0\x9F\x9A\xB4" + | Closed_book -> "\xF0\x9F\x93\x95" + | Cookie -> "\xF0\x9F\x8D\xAA" + | Eyes -> "\xF0\x9F\x91\x80" + | File_cabinet -> "\xF0\x9F\x97\x84" + | Ghost -> "\xF0\x9F\x91\xBB" + | Open_book -> "\xF0\x9F\x93\x96" + | Panda_face -> "\xF0\x9F\x90\xBC" + | Recycling_symbol -> "\xE2\x99\xBB" + | Sleeping_face -> "\xF0\x9F\x98\xB4" + | Smiling_face_with_mouth_open -> "\xF0\x9F\x98\x83" + | Taco -> "\xF0\x9F\x8C\xAE" + | Wastebasket -> "\xF0\x9F\x97\x91" + | Motorcycle -> "\xf0\x9f\x8f\x8d" + | Skier -> "\xE2\x9B\xB7" + +type pad_emoji = + | Before + | After + +let render_emoji ~use_emoji ?(pad = After) emoji = + if use_emoji then + spf + "%s%s %s" + ( if pad = Before then + " " + else + "" ) + (string_of_emoji emoji) + ( if pad = After then + " " + else + "" ) + else + "" + +let string_of_event = function + | Ready -> "Ready" + | Init_start -> "Init_start" + | Read_saved_state -> "Read_saved_state" + | Load_saved_state_progress progress -> + spf "Load_saved_state_progress %s" (string_of_progress progress) + | Parsing_progress progress -> spf "Parsing_progress files %s" (string_of_progress progress) + | Calculating_dependencies_progress -> "Calculating_dependencies_progress" + | Resolving_dependencies_progress -> "Resolving_dependencies_progress" + | Merging_progress progress -> spf "Merging_progress %s" (string_of_progress progress) + | Merging_types_progress progress -> + spf "Merging_types_progress %s" (string_of_progress progress) + | Checking_progress progress -> spf "Checking_progress files %s" (string_of_progress progress) + | Canceling_progress progress -> spf "Canceling_progress %s" (string_of_progress progress) + | Finishing_up _ -> "Finishing_up" + | Recheck_start -> "Recheck_start" + | Handling_request_start -> "Handling_request_start" + | GC_start -> "GC_start" + | Collating_errors_start -> "Collating_errors_start" + | Watchman_wait_start deadline -> spf "Watchman_wait_start %f" deadline + +(* As a general rule, use past tense for status updates that show progress and present perfect + progressive for those that don't. *) +let string_of_typecheck_status ~use_emoji = function + | Starting_typecheck -> spf "%sstarting up" (render_emoji ~use_emoji Sleeping_face) + | Reading_saved_state -> spf "%sreading saved state" (render_emoji ~use_emoji Closed_book) + | Loading_saved_state progress -> + spf + "%sloading saved state %s" + (render_emoji ~use_emoji Open_book) + (string_of_progress progress) + | Parsing progress -> + spf "%sparsed files %s" (render_emoji ~use_emoji Ghost) (string_of_progress progress) + | Resolving_dependencies -> spf "%sresolving dependencies" (render_emoji ~use_emoji Taco) + | Calculating_dependencies -> spf "%scalculating dependencies" (render_emoji ~use_emoji Taco) + | Merging progress -> + spf "%smerged files %s" (render_emoji ~use_emoji Bicyclist) (string_of_progress progress) + | Merging_types progress -> + spf + "%smerged module interfaces %s" + (render_emoji ~use_emoji Motorcycle) + (string_of_progress progress) + | Checking progress -> + spf "%schecked files %s" (render_emoji ~use_emoji Skier) (string_of_progress progress) + | Canceling progress -> + spf + "%scanceling workers %s" + (render_emoji ~use_emoji Recycling_symbol) + (string_of_progress progress) + | Garbage_collecting_typecheck -> + spf "%sgarbage collecting shared memory" (render_emoji ~use_emoji Wastebasket) + | Collating_errors -> spf "%scollating errors" (render_emoji ~use_emoji File_cabinet) + | Waiting_for_watchman deadline -> + spf + "%swaiting for Watchman - giving up in %d seconds" + (render_emoji ~use_emoji Eyes) + (max 0 (int_of_float @@ (deadline -. Unix.gettimeofday ()))) + | Finishing_typecheck _ -> spf "%sfinishing up" (render_emoji ~use_emoji Cookie) + +let string_of_restart_reason = function + | Server_out_of_date -> "restarting due to change which cannot be handled incrementally" + | Out_of_shared_memory -> "restarting due to running out of shared memory" + | Restart -> "restarting to save time" + +let string_of_status ?(use_emoji = false) ?(terse = false) status = + let status_string = + match status with + | Starting_up -> spf "starting up%s" (render_emoji ~use_emoji ~pad:Before Sleeping_face) + | Free -> spf "free%s" (render_emoji ~use_emoji ~pad:Before Smiling_face_with_mouth_open) + | Typechecking (Initializing, tcs) -> + spf "initializing (%s)" (string_of_typecheck_status ~use_emoji tcs) + | Typechecking (Rechecking, tcs) -> + spf "rechecking (%s)" (string_of_typecheck_status ~use_emoji tcs) + | Typechecking (Handling_request, tcs) -> + spf "handling a request (%s)" (string_of_typecheck_status ~use_emoji tcs) + | Typechecking (Restarting reason, tcs) -> + spf "%s (%s)" (string_of_restart_reason reason) (string_of_typecheck_status ~use_emoji tcs) + | Garbage_collecting -> + spf "garbage collecting shared memory%s" (render_emoji ~use_emoji ~pad:Before Wastebasket) + | Unknown -> spf "doing something%s" (render_emoji ~use_emoji ~pad:Before Panda_face) + in + spf + "%s%s" + ( if terse then + "" + else + "Server is " ) + status_string + +(* Transition function for the status state machine. Given the current status and the event, + * pick a new status *) +let update ~event ~status = + match (event, status) with + | (Ready, _) -> Free + | (Init_start, _) -> Typechecking (Initializing, Starting_typecheck) + | (Recheck_start, _) -> Typechecking (Rechecking, Starting_typecheck) + | (Handling_request_start, _) -> Typechecking (Handling_request, Starting_typecheck) + | (Read_saved_state, Typechecking (mode, _)) -> Typechecking (mode, Reading_saved_state) + | (Load_saved_state_progress progress, Typechecking (mode, _)) -> + Typechecking (mode, Loading_saved_state progress) + | (Parsing_progress progress, Typechecking (mode, _)) -> Typechecking (mode, Parsing progress) + | (Resolving_dependencies_progress, Typechecking (mode, _)) -> + Typechecking (mode, Resolving_dependencies) + | (Calculating_dependencies_progress, Typechecking (mode, _)) -> + Typechecking (mode, Calculating_dependencies) + | (Merging_progress progress, Typechecking (mode, _)) -> Typechecking (mode, Merging progress) + | (Merging_types_progress progress, Typechecking (mode, _)) -> + Typechecking (mode, Merging_types progress) + | (Checking_progress progress, Typechecking (mode, _)) -> Typechecking (mode, Checking progress) + | (Canceling_progress progress, Typechecking (mode, _)) -> Typechecking (mode, Canceling progress) + | (GC_start, Typechecking (mode, _)) -> Typechecking (mode, Garbage_collecting_typecheck) + | (Collating_errors_start, Typechecking (mode, _)) -> Typechecking (mode, Collating_errors) + | (Watchman_wait_start deadline, Typechecking (mode, _)) -> + Typechecking (mode, Waiting_for_watchman deadline) + | (Finishing_up summary, Typechecking (mode, _)) -> + Typechecking (mode, Finishing_typecheck summary) + | (GC_start, _) -> Garbage_collecting + | _ -> + (* This is a bad transition. In dev mode, let's blow up since something is wrong. However in + * production let's soldier on. Usually this means that we forgot to send something like + * Handling_request_start before we sent a Merging_progress. *) + if Build_mode.dev then + failwith + (spf + "Unexpected status transition from '%s' with event '%s'" + (string_of_status status) + (string_of_event event)) + else + Unknown + +let initial_status = Starting_up + +let is_free = function + | Free -> true + | _ -> false + +(* Returns true iff the transition from old_status to new_status is "significant", which is a + * pretty arbitrary judgement of how interesting the new status is to a user, given that they + * already have seen the old status *) +let is_significant_transition old_status new_status = + (* If the statuses are literally the same, then the transition is not significant *) + old_status <> new_status + && + match (old_status, new_status) with + | (Typechecking (old_mode, old_tc_status), Typechecking (new_mode, new_tc_status)) -> + (* A change in mode is always signifcant *) + old_mode <> new_mode + || + begin + match (old_tc_status, new_tc_status) with + (* Making progress within parsing, merging or canceling is not significant *) + | (Parsing _, Parsing _) + | (Merging _, Merging _) + | (Merging_types _, Merging_types _) + | (Checking _, Checking _) + | (Canceling _, Canceling _) -> + false + (* But changing typechecking status always is significant *) + | (_, Starting_typecheck) + | (_, Reading_saved_state) + | (_, Loading_saved_state _) + | (_, Parsing _) + | (_, Resolving_dependencies) + | (_, Calculating_dependencies) + | (_, Merging _) + | (_, Merging_types _) + | (_, Checking _) + | (_, Canceling _) + | (_, Garbage_collecting_typecheck) + | (_, Waiting_for_watchman _) + | (_, Collating_errors) + | (_, Finishing_typecheck _) -> + true + end + (* Switching to a completely different status is always significant *) + | (_, Starting_up) + | (_, Free) + | (_, Typechecking _) + | (_, Garbage_collecting) + | (_, Unknown) -> + true + +let get_progress status = + let print progress = + match progress with + | { finished; total = None } -> (Some (Printf.sprintf "%d" finished), Some finished, None) + | { finished; total = Some total } -> + (Some (Printf.sprintf "%d/%d" finished total), Some finished, Some total) + in + match status with + | Typechecking (_, Parsing progress) + | Typechecking (_, Merging progress) + | Typechecking (_, Merging_types progress) + | Typechecking (_, Checking progress) + | Typechecking (_, Canceling progress) -> + print progress + | _ -> (None, None, None) + +let get_summary status = + match status with + | Typechecking (_mode, Finishing_typecheck summary) -> Some summary + | _ -> None + +let log_of_summaries ~(root : Path.t) (summaries : summary list) : FlowEventLogger.persistent_delay + = + FlowEventLogger.( + let init = + { + init_duration = 0.0; + command_count = 0; + command_duration = 0.0; + command_worst = None; + command_worst_duration = None; + recheck_count = 0; + recheck_dependent_files = 0; + recheck_changed_files = 0; + recheck_duration = 0.0; + recheck_worst_duration = None; + recheck_worst_dependent_file_count = None; + recheck_worst_changed_file_count = None; + recheck_worst_cycle_leader = None; + recheck_worst_cycle_size = None; + } + in + let f acc { duration; info } = + match info with + | InitSummary -> + let acc = { acc with init_duration = acc.init_duration +. duration } in + acc + | CommandSummary cmd -> + let is_worst = + match acc.command_worst_duration with + | None -> true + | Some d -> duration >= d + in + let acc = + if not is_worst then + acc + else + { acc with command_worst = Some cmd; command_worst_duration = Some duration } + in + let acc = + { + acc with + command_count = acc.command_count + 1; + command_duration = acc.command_duration +. duration; + } + in + acc + | RecheckSummary { dependent_file_count; changed_file_count; top_cycle } -> + let is_worst = + match acc.recheck_worst_duration with + | None -> true + | Some d -> duration >= d + in + let acc = + if not is_worst then + acc + else + { + acc with + recheck_worst_duration = Some duration; + recheck_worst_dependent_file_count = Some dependent_file_count; + recheck_worst_changed_file_count = Some changed_file_count; + recheck_worst_cycle_size = Option.map top_cycle ~f:(fun (_, size) -> size); + recheck_worst_cycle_leader = + Option.map top_cycle ~f:(fun (f, _) -> + f |> File_key.to_string |> Files.relative_path (Path.to_string root)); + } + in + let acc = + { + acc with + recheck_count = acc.recheck_count + 1; + recheck_dependent_files = acc.recheck_dependent_files + dependent_file_count; + recheck_changed_files = acc.recheck_changed_files + changed_file_count; + recheck_duration = acc.recheck_duration +. duration; + } + in + acc + in + Core_list.fold summaries ~init ~f) + +(* When the server is initializing it will publish statuses that say it is initializing. The + * monitor might know that the server actually is restarting. This function turns a initializing + * status into a restarting status *) +let change_init_to_restart restart_reason status = + Option.value_map restart_reason ~default:status ~f:(fun restart_reason -> + match status with + | Typechecking (Initializing, tcs) -> Typechecking (Restarting restart_reason, tcs) + | _ -> status) diff --git a/src/monitor/status/serverStatus.mli b/src/monitor/status/serverStatus.mli new file mode 100644 index 00000000000..f1fe4ed0390 --- /dev/null +++ b/src/monitor/status/serverStatus.mli @@ -0,0 +1,69 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type progress = { + total: int option; + finished: int; +} + +type summary_info = + | RecheckSummary of { + dependent_file_count: int; + changed_file_count: int; + top_cycle: (File_key.t * int) option; (* name of cycle leader, and size of cycle *) + } + | CommandSummary of string + | InitSummary + +type summary = { + duration: float; + info: summary_info; +} + +type event = + | Ready + | Init_start + | Read_saved_state + | Load_saved_state_progress of progress + | Parsing_progress of progress + | Resolving_dependencies_progress + | Calculating_dependencies_progress + | Merging_progress of progress + | Merging_types_progress of progress + | Checking_progress of progress + | Canceling_progress of progress + | Finishing_up of summary + | Recheck_start + | Handling_request_start + | GC_start + | Collating_errors_start + | Watchman_wait_start of float + +type status + +type restart_reason = + | Server_out_of_date + | Out_of_shared_memory + | Restart + +val initial_status : status + +val update : event:event -> status:status -> status + +val string_of_status : ?use_emoji:bool -> ?terse:bool -> status -> string + +val is_free : status -> bool + +val is_significant_transition : status -> status -> bool + +val get_progress : status -> string option * int option * int option + +val get_summary : status -> summary option + +val log_of_summaries : root:Path.t -> summary list -> FlowEventLogger.persistent_delay + +val change_init_to_restart : restart_reason option -> status -> status diff --git a/src/monitor/statusStream.ml b/src/monitor/statusStream.ml index cd4ce0af8b6..8decc63e149 100644 --- a/src/monitor/statusStream.ml +++ b/src/monitor/statusStream.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -20,8 +20,9 @@ type t = { mutable status: ServerStatus.status; mutable watcher_status: FileWatcherStatus.status; mutable ever_been_free: bool; + restart_reason: ServerStatus.restart_reason option; stream: ServerStatus.status Lwt_stream.t; - push_to_stream: ServerStatus.status option -> unit + push_to_stream: ServerStatus.status option -> unit; } (* Multiple threads might call StreamStatus functions. *) @@ -34,18 +35,19 @@ let significant_transition = Lwt_condition.create () let check_if_free = let invoke_all_call_on_free () = - let%lwt to_call = Lwt_mutex.with_lock mutex (fun () -> - let to_call = !to_call_on_free in - to_call_on_free := []; - Lwt.return to_call - ) in + let%lwt to_call = + Lwt_mutex.with_lock mutex (fun () -> + let to_call = !to_call_on_free in + to_call_on_free := []; + Lwt.return to_call) + in Lwt_list.iter_p (fun f -> f ()) to_call - - in fun t -> - if ServerStatus.is_free t.status && (snd t.watcher_status) = FileWatcherStatus.Ready then begin + in + fun t -> + if ServerStatus.is_free t.status && snd t.watcher_status = FileWatcherStatus.Ready then ( t.ever_been_free <- true; Lwt.async invoke_all_call_on_free - end + ) let broadcast_significant_transition t = Lwt_condition.broadcast significant_transition (t.status, t.watcher_status) @@ -54,6 +56,7 @@ module UpdateLoop = LwtLoop.Make (struct type acc = t let process_update t new_status = + let new_status = ServerStatus.change_init_to_restart t.restart_reason new_status in Logger.debug "Server status: %s" (ServerStatus.string_of_status new_status); let old_status = t.status in @@ -62,48 +65,49 @@ module UpdateLoop = LwtLoop.Make (struct check_if_free t; - if ServerStatus.is_significant_transition old_status new_status - then broadcast_significant_transition t; + if ServerStatus.is_significant_transition old_status new_status then + broadcast_significant_transition t; Lwt.return t - let main t = let%lwt new_status = Lwt_stream.next t.stream in process_update t new_status let catch _ exn = match exn with - | Lwt_stream.Empty -> - Lwt.return_unit (* This is the signal to stop *) + | Lwt_stream.Empty -> Lwt.return_unit (* This is the signal to stop *) | exn -> Logger.error ~exn "ServerStatus update loop hit an unexpected exception"; Lwt.return_unit end) -let empty file_watcher = - let stream, push_to_stream = Lwt_stream.create () in - let ret = { - status = ServerStatus.initial_status; - watcher_status = (file_watcher, FileWatcherStatus.Initializing); - ever_been_free = false; - stream; - push_to_stream; - } in +let empty file_watcher restart_reason = + let (stream, push_to_stream) = Lwt_stream.create () in + let ret = + { + status = ServerStatus.initial_status; + watcher_status = (file_watcher, FileWatcherStatus.Initializing); + ever_been_free = false; + restart_reason; + stream; + push_to_stream; + } + in Lwt.async (fun () -> UpdateLoop.run ret); ret (* This is the status info for the current Flow server *) -let current_status = ref (empty Options.NoFileWatcher) +let current_status = ref (empty Options.NoFileWatcher None) (* Call f the next time the server is free. If the server is currently free, then call now *) let call_on_free ~f = - if ServerStatus.is_free !current_status.status - then f () - else Lwt_mutex.with_lock mutex (fun () -> - to_call_on_free := f::!to_call_on_free; - Lwt.return_unit - ) + if ServerStatus.is_free !current_status.status then + f () + else + Lwt_mutex.with_lock mutex (fun () -> + to_call_on_free := f :: !to_call_on_free; + Lwt.return_unit) let file_watcher_ready () = let t = !current_status in @@ -112,25 +116,27 @@ let file_watcher_ready () = broadcast_significant_transition t (* When a new server starts up, we close the old server's status stream and start over *) -let reset file_watcher = Lwt_mutex.with_lock mutex (fun () -> - !current_status.push_to_stream None; - current_status := empty file_watcher; - Lwt.return_unit -) +let reset file_watcher restart_reason = + Lwt_mutex.with_lock mutex (fun () -> + !current_status.push_to_stream None; + current_status := empty file_watcher restart_reason; + Lwt.return_unit) let get_status () = - let { status; watcher_status; _; } = !current_status in - status, watcher_status + let { status; watcher_status; _ } = !current_status in + (status, watcher_status) let ever_been_free () = !current_status.ever_been_free let wait_for_signficant_status ~timeout = (* If there is a significant transition before the timeout, the cancel the sleep and return the * new status. Otherwise, stop waiting on the condition variable and return the current status *) - Lwt.pick [ - (let%lwt () = Lwt_unix.sleep timeout in Lwt.return (get_status ())); - Lwt_condition.wait significant_transition; - ] + Lwt.pick + [ + (let%lwt () = Lwt_unix.sleep timeout in + Lwt.return (get_status ())); + Lwt_condition.wait significant_transition; + ] (* Updates will show up on the connection in order. Let's push them immediately to a stream to * preserve that order *) diff --git a/src/monitor/statusStream.mli b/src/monitor/statusStream.mli index 0c9a2a4c18a..c72a95e13c0 100644 --- a/src/monitor/statusStream.mli +++ b/src/monitor/statusStream.mli @@ -1,16 +1,21 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val reset: Options.file_watcher -> unit Lwt.t -val update: status:ServerStatus.status -> unit -val file_watcher_ready: unit -> unit -val call_on_free: f:(unit -> unit Lwt.t) -> unit Lwt.t -val get_status: unit -> ServerStatus.status * FileWatcherStatus.status -val wait_for_signficant_status: - timeout:float -> - (ServerStatus.status * FileWatcherStatus.status) Lwt.t -val ever_been_free: unit -> bool +val reset : Options.file_watcher -> ServerStatus.restart_reason option -> unit Lwt.t + +val update : status:ServerStatus.status -> unit + +val file_watcher_ready : unit -> unit + +val call_on_free : f:(unit -> unit Lwt.t) -> unit Lwt.t + +val get_status : unit -> ServerStatus.status * FileWatcherStatus.status + +val wait_for_signficant_status : + timeout:float -> (ServerStatus.status * FileWatcherStatus.status) Lwt.t + +val ever_been_free : unit -> bool diff --git a/src/monitor/utils/dune b/src/monitor/utils/dune new file mode 100644 index 00000000000..78fba8e857f --- /dev/null +++ b/src/monitor/utils/dune @@ -0,0 +1,9 @@ +(library + (name flow_monitor_utils) + (wrapped false) + (libraries + flow_common + flow_exit_status + lwt + ) +) diff --git a/src/monitor/utils/exitSignal.ml b/src/monitor/utils/exitSignal.ml index ff87e85ced3..851343733c2 100644 --- a/src/monitor/utils/exitSignal.ml +++ b/src/monitor/utils/exitSignal.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/monitor/utils/exitSignal.mli b/src/monitor/utils/exitSignal.mli index 37d92f0fedc..2677d78053c 100644 --- a/src/monitor/utils/exitSignal.mli +++ b/src/monitor/utils/exitSignal.mli @@ -1,8 +1,8 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val signal: (FlowExitStatus.t * string) Lwt_condition.t +val signal : (FlowExitStatus.t * string) Lwt_condition.t diff --git a/src/parser/.merlin b/src/parser/.merlin deleted file mode 100644 index 9a9a7f2612f..00000000000 --- a/src/parser/.merlin +++ /dev/null @@ -1,4 +0,0 @@ -PKG sedlex -PKG wtf8 - -REC diff --git a/src/parser/META b/src/parser/META index c9c58de698e..d49ba725b93 100644 --- a/src/parser/META +++ b/src/parser/META @@ -1,5 +1,5 @@ name="parser_flow" -version="0.82.0" +version="0.108.0" requires = "sedlex wtf8" description="flow parser ocamlfind package" archive(byte)="parser_flow.cma" diff --git a/src/parser/Makefile b/src/parser/Makefile index d86c1239b41..3fdee7b5dcc 100644 --- a/src/parser/Makefile +++ b/src/parser/Makefile @@ -1,4 +1,4 @@ -# Copyright (c) 2013-present, Facebook, Inc. +# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -22,12 +22,13 @@ NATIVE_OBJECT_FILES=\ RUNNER_DEPS=\ hack/heap\ hack/injection/default_injector\ - hack/utils\ + hack/third-party/core\ hack/utils/collections\ + hack/utils/core\ hack/utils/disk\ hack/utils/hh_json\ + hack/utils/string\ hack/utils/sys\ - hack/third-party/core\ src/common/utils\ src/parser\ src/parser_utils diff --git a/src/parser/__tests__/offset_utils_test.ml b/src/parser/__tests__/offset_utils_test.ml new file mode 100644 index 00000000000..5eda9029a1a --- /dev/null +++ b/src/parser/__tests__/offset_utils_test.ml @@ -0,0 +1,164 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let pos line column = Loc.{ line; column } + +(* UTF-8 encoding of code point 0x2028, line separator *) +let line_sep = "\xe2\x80\xa8" + +(* UTF-8 encoding of code point 0x2029, paragraph separator *) +let par_sep = "\xe2\x80\xa9" + +(* UTF-8 encoding of code point 0x1f603, some form of a smiley *) +let smiley = "\xf0\x9f\x98\x83" + +let str_with_smiley = Printf.sprintf "foo %s bar\nbaz\n" smiley + +let get_offset table pos = + try Offset_utils.offset table pos + with Offset_utils.Offset_lookup_failed (_pos, msg) -> + assert_failure + (Printf.sprintf "Lookup failed: %s\nTable:\n%s" msg (Offset_utils.debug_string table)) + +let run ctxt text (line, col) expected_offset = + let table = Offset_utils.make text in + let offset = get_offset table (pos line col) in + assert_equal ~ctxt ~printer:string_of_int expected_offset offset + +let run_expect_failure text (line, col) expected_msg = + let table = Offset_utils.make text in + let p = pos line col in + let f () = Offset_utils.offset table p in + let expected_exn = Offset_utils.Offset_lookup_failed (p, expected_msg) in + assert_raises expected_exn f + +class loc_extractor = + object (this) + inherit [Loc.t, Loc.t, unit, unit] Flow_polymorphic_ast_mapper.mapper + + (* Locations built up in reverse order *) + val mutable locs = [] + + method get_locs = locs + + method on_loc_annot loc = locs <- loc :: locs + + method on_type_annot = this#on_loc_annot + end + +let extract_locs ast = + let extractor = new loc_extractor in + let (_ : (unit, unit) Flow_ast.program) = extractor#program ast in + List.rev extractor#get_locs + +(* This tests to make sure that we can find an offset for all real-world locations that the parser + * can produce, and that I haven't made any incorrect assumptions about edge cases in the rest of + * the tests. *) +let run_full_test source = + let (ast, _) = Parser_flow.program ~fail:false source in + let all_locs = extract_locs ast in + let all_positions = + Loc.( + let all_starts = List.map (fun { start; _ } -> start) all_locs in + let all_ends = List.map (fun { _end; _ } -> _end) all_locs in + all_starts @ all_ends) + in + let offset_table = Offset_utils.make source in + (* Just make sure it doesn't crash *) + List.iter + begin + fun loc -> + let (_ : int) = get_offset offset_table loc in + () + end + all_positions + +let tests = + "offset_utils" + >::: [ + ("empty_line" >:: (fun ctxt -> run ctxt "foo\n\nbar" (3, 0) 5)); + ( "Loc.none" + >:: fun ctxt -> + (* This is a fake location but it's used often enough that we should at least not crash when + * encountering it. *) + run ctxt "" (0, 0) 0 ); + ("first_char" >:: (fun ctxt -> run ctxt "foo bar\n" (1, 0) 0)); + ("last_char" >:: (fun ctxt -> run ctxt "foo bar\n" (1, 6) 6)); + ( "column_after_last" + >:: fun ctxt -> + (* The parser gives us locations where the `end` position is exclusive. Even though the last + * character of the "foo" token is in column 2, the location of "foo" is given as + * ((1, 0), (1, 3)). Because of this, we need to make sure we can look up locations that are + * after the final column of a line, even though these locations don't correspond with an actual + * character. *) + run ctxt "foo\nbar\n" (1, 3) 3 ); + ( "char_after_last" + >:: fun ctxt -> + (* See the comment in the previous test *) + run ctxt "foo\nbar" (2, 3) 7 ); + ( "empty" + >:: fun ctxt -> + (* Similar to above, we should be able to get one offset in an empty string *) + run ctxt "" (1, 0) 0 ); + ("no_last_line_terminator" >:: (fun ctxt -> run ctxt "foo bar" (1, 6) 6)); + ("multi_line" >:: (fun ctxt -> run ctxt "foo\nbar\n" (2, 1) 5)); + ("carriage_return" >:: (fun ctxt -> run ctxt "foo\rbar\r" (2, 1) 5)); + ("windows_line_terminator" >:: (fun ctxt -> run ctxt "foo\r\nbar\r\n" (2, 1) 6)); + ( "unicode_line_separator" + >:: fun ctxt -> + (* Each line separator character is 3 bytes. The returned offset reflects that. *) + run ctxt (Printf.sprintf "foo%sbar%s" line_sep line_sep) (2, 1) 7 ); + ( "unicode_paragraph_separator" + >:: fun ctxt -> + (* Each line separator character is 3 bytes. The returned offset reflects that. *) + run ctxt (Printf.sprintf "foo%sbar%s" par_sep par_sep) (2, 1) 7 ); + ("offset_before_multibyte_char" >:: (fun ctxt -> run ctxt str_with_smiley (1, 3) 3)); + ( "offset_of_multibyte_char" + >:: fun ctxt -> + (* This is the position of the smiley. The offset should give us the first byte in the + * character. *) + run ctxt str_with_smiley (1, 4) 4 ); + ( "offset_after_multibyte_char" + >:: fun ctxt -> + (* This is the position after the smiley. The offset should reflect the width of the multibyte + * character (4 bytes in this case). *) + run ctxt str_with_smiley (1, 5) 8 ); + ("offset_line_after_multibyte_char" >:: (fun ctxt -> run ctxt str_with_smiley (2, 0) 13)); + ( "out_of_bounds_line" + >:: fun _ctxt -> + run_expect_failure "foo\n" (5, 0) "Failure while looking up line. Index: 4. Length: 2." ); + ( "out_of_bounds_column" + >:: fun _ctxt -> + run_expect_failure + "foo\n" + (1, 10) + "Failure while looking up column. Index: 10. Length: 4." ); + ( "full_test" + >:: fun _ctxt -> + (* Note that there is no newline at the end of the string -- I found a bug in an initial version + * which was exposed by not having a final newline character. *) + let source = "const foo = 4;\nconst bar = foo + 2;" in + run_full_test source ); + ( "lexing_error_newline_test" + >:: fun _ctxt -> + let source = "\"foo\nbar\"" in + run_full_test source ); + ( "lexing_error_throw" + >:: fun _ctxt -> + let source = "throw\n" in + run_full_test source ); + ( "lexing_error_regex_newline" + >:: fun _ctxt -> + let source = "/\n/" in + run_full_test source ); + ( "lexing_error_complex_regex_newline" + >:: fun _ctxt -> + let source = "/a\\\n/" in + run_full_test source ); + ] diff --git a/src/parser/__tests__/parser_tests.ml b/src/parser/__tests__/parser_tests.ml new file mode 100644 index 00000000000..0e4dc4207b4 --- /dev/null +++ b/src/parser/__tests__/parser_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "parser_utils" >::: [Offset_utils_test.tests] + +let () = run_test_tt_main tests diff --git a/src/parser/aLoc.ml b/src/parser/aLoc.ml deleted file mode 100644 index 91c72e4d767..00000000000 --- a/src/parser/aLoc.ml +++ /dev/null @@ -1,11 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type t = Loc.t - -let of_loc loc = loc -let to_loc loc = loc diff --git a/src/parser/aLoc.mli b/src/parser/aLoc.mli deleted file mode 100644 index bc3628235b0..00000000000 --- a/src/parser/aLoc.mli +++ /dev/null @@ -1,15 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type t - -(* TODO at some point we will need a state type for the lookup table. probably going to pass it - * around genv. *) - -(* TODO at some point we will also need to provide a key of some sort here *) -val of_loc: Loc.t -> t -val to_loc: t -> Loc.t diff --git a/src/parser/ast_utils.ml b/src/parser/ast_utils.ml deleted file mode 100644 index 82af30f4aaf..00000000000 --- a/src/parser/ast_utils.ml +++ /dev/null @@ -1,166 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open Flow_ast - -type binding = Loc.t * string -type ident = Loc.t * string -type source = Loc.t * string - -let rec bindings_of_pattern = - let open Pattern in - let property acc = - let open Object in - function - | Property (_, { Property.pattern = (_, p); _ }) - | RestProperty (_, { RestProperty.argument = (_, p) }) -> - bindings_of_pattern acc p - in - let element acc = - let open Array in - function - | None -> acc - | Some (Element (_, p)) - | Some (RestElement (_, { RestElement.argument = (_, p) })) -> - bindings_of_pattern acc p - in - fun acc -> - function - | Identifier { Identifier.name; _ } -> - name::acc - | Object { Object.properties; _ } -> - List.fold_left property acc properties - | Array { Array.elements; _ } -> - List.fold_left element acc elements - | Assignment { Assignment.left = (_, p); _ } -> - bindings_of_pattern acc p - | Expression _ -> - failwith "expression pattern" - -let bindings_of_variable_declarations = - let open Flow_ast.Statement.VariableDeclaration in - List.fold_left (fun acc -> function - | _, { Declarator.id = (_, pattern); _ } -> - bindings_of_pattern acc pattern - ) [] - -let partition_directives statements = - let open Flow_ast.Statement in - let rec helper directives = function - | ((_, Expression { Expression.directive = Some _; _ }) as directive)::rest -> - helper (directive::directives) rest - | rest -> List.rev directives, rest - in - helper [] statements - -let negate_number_literal (value, raw) = - let raw_len = String.length raw in - let raw = if raw_len > 0 && raw.[0] = '-' - then String.sub raw 1 (raw_len - 1) - else "-" ^ raw - in - ~-. value, raw - -let loc_of_statement = fst - -let loc_of_expression = fst - -let loc_of_pattern = fst - -let string_of_binary_operator op = - let open Flow_ast.Expression.Binary in - match op with - | Equal -> "==" - | NotEqual -> "!=" - | StrictEqual -> "===" - | StrictNotEqual -> "!==" - | LessThan -> "<" - | LessThanEqual -> "<=" - | GreaterThan -> ">" - | GreaterThanEqual -> ">=" - | LShift -> "<<" - | RShift -> ">>" - | RShift3 -> ">>>" - | Plus -> "+" - | Minus -> "-" - | Mult -> "*" - | Exp -> "**" - | Div -> "/" - | Mod -> "%" - | BitOr -> "|" - | Xor -> "^" - | BitAnd -> "&" - | In -> "in" - | Instanceof -> "instanceof" - -module ExpressionSort = struct - type t = - | Array - | ArrowFunction - | Assignment - | Binary - | Call - | Class - | Comprehension - | Conditional - | Function - | Generator - | Identifier - | Import - | JSXElement - | JSXFragment - | Literal - | Logical - | Member - | MetaProperty - | New - | Object - | OptionalCall - | OptionalMember - | Sequence - | Super - | TaggedTemplate - | TemplateLiteral - | This - | TypeCast - | Unary - | Update - | Yield - - let to_string = function - | Array -> "Array" - | ArrowFunction -> "ArrowFunction" - | Assignment -> "Assignment" - | Binary -> "Binary" - | Call -> "Call" - | Class -> "Class" - | Comprehension -> "Comprehension" - | Conditional -> "Conditional" - | Function -> "Function" - | Generator -> "Generator" - | Identifier -> "Identifier" - | Import -> "Import" - | JSXElement -> "JSXElement" - | JSXFragment -> "JSXFragment" - | Literal -> "Literal" - | Logical -> "Logical" - | Member -> "Member" - | MetaProperty -> "Metaproperty" - | New -> "New" - | Object -> "Object" - | OptionalCall -> "OptionalCall" - | OptionalMember -> "OptionalMember" - | Sequence -> "Sequence" - | Super -> "Super" - | TaggedTemplate -> "TaggedTemplate" - | TemplateLiteral -> "TemplateLiteral" - | This -> "This" - | TypeCast -> "TypeCast" - | Unary -> "Unary" - | Update -> "Update" - | Yield -> "Yield" -end diff --git a/src/parser/ast_utils.mli b/src/parser/ast_utils.mli deleted file mode 100644 index 88ea921d67c..00000000000 --- a/src/parser/ast_utils.mli +++ /dev/null @@ -1,76 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type binding = Loc.t * string -type ident = Loc.t * string -type source = Loc.t * string - -val bindings_of_pattern: - binding list -> - (Loc.t, Loc.t) Flow_ast.Pattern.t' -> - binding list - -val bindings_of_variable_declarations: - (Loc.t, Loc.t) Flow_ast.Statement.VariableDeclaration.Declarator.t list -> - binding list - -val partition_directives: - (Loc.t, Loc.t) Flow_ast.Statement.t list -> - (Loc.t, Loc.t) Flow_ast.Statement.t list * (Loc.t, Loc.t) Flow_ast.Statement.t list - -val negate_number_literal: - float * string -> - float * string - -val loc_of_expression: - ('a, 'a) Flow_ast.Expression.t -> 'a - -val loc_of_statement: - ('a, 'a) Flow_ast.Statement.t -> 'a - -val loc_of_pattern: - ('a, 'a) Flow_ast.Pattern.t -> 'a - -module ExpressionSort: sig - type t = - | Array - | ArrowFunction - | Assignment - | Binary - | Call - | Class - | Comprehension - | Conditional - | Function - | Generator - | Identifier - | Import - | JSXElement - | JSXFragment - | Literal - | Logical - | Member - | MetaProperty - | New - | Object - | OptionalCall - | OptionalMember - | Sequence - | Super - | TaggedTemplate - | TemplateLiteral - | This - | TypeCast - | Unary - | Update - | Yield - - val to_string: t -> string -end - -val string_of_binary_operator: - Flow_ast.Expression.Binary.operator -> string diff --git a/src/parser/declaration_parser.ml b/src/parser/declaration_parser.ml index 6ec3eee6514..449056007f7 100644 --- a/src/parser/declaration_parser.ml +++ b/src/parser/declaration_parser.ml @@ -1,105 +1,100 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Token open Parser_common open Parser_env open Flow_ast -module Error = Parse_error -module SSet = Set.Make(String) +module SSet = Set.Make (String) module type DECLARATION = sig - val async: env -> bool - val generator: env -> bool - val variance: env -> bool -> bool -> Loc.t Variance.t option - val function_params: await:bool -> yield:bool -> env -> (Loc.t, Loc.t) Ast.Function.Params.t - val function_body: env -> async:bool -> generator:bool -> Loc.t * (Loc.t, Loc.t) Function.body * bool - val is_simple_function_params: (Loc.t, Loc.t) Ast.Function.Params.t -> bool - val strict_post_check: env -> strict:bool -> simple:bool -> Loc.t Identifier.t option -> (Loc.t, Loc.t) Ast.Function.Params.t -> unit - val concise_function_body: env -> async:bool -> generator:bool -> (Loc.t, Loc.t) Function.body * bool - val variable: env -> (Loc.t, Loc.t) Statement.t * (Loc.t * Error.t) list - val variable_declaration_list: env -> (Loc.t, Loc.t) Statement.VariableDeclaration.Declarator.t list * (Loc.t * Error.t) list - val let_: env -> (Loc.t, Loc.t) Statement.VariableDeclaration.t * (Loc.t * Error.t) list - val const: env -> (Loc.t, Loc.t) Statement.VariableDeclaration.t * (Loc.t * Error.t) list - val var: env -> (Loc.t, Loc.t) Statement.VariableDeclaration.t * (Loc.t * Error.t) list - val _function: env -> (Loc.t, Loc.t) Statement.t -end + val async : env -> bool -module Declaration - (Parse: Parser_common.PARSER) - (Type: Type_parser.TYPE) -: DECLARATION -= struct - let check_param = - let rec pattern ((env, _) as check_env) (loc, p) = Pattern.(match p with - | Object o -> _object check_env o - | Array arr -> _array check_env arr - | Assignment { Assignment.left; _ } -> pattern check_env left - | Identifier id -> identifier_pattern check_env id - | Expression _ -> ( - error_at env (loc, Error.ExpectedPatternFoundExpression); - check_env - ) - ) + val generator : env -> bool + + val variance : env -> bool -> bool -> Loc.t Variance.t option - and _object check_env o = - List.fold_left - object_property - check_env - o.Pattern.Object.properties + val function_params : await:bool -> yield:bool -> env -> (Loc.t, Loc.t) Ast.Function.Params.t - and object_property check_env = Pattern.Object.(function - | Property (_, property) -> Property.( - let check_env = match property.key with - | Identifier id -> identifier_no_dupe_check check_env id - | _ -> check_env in - pattern check_env property.pattern) - | RestProperty (_, { RestProperty.argument; }) -> - pattern check_env argument) + val function_body : env -> async:bool -> generator:bool -> (Loc.t, Loc.t) Function.body * bool - and _array check_env arr = - List.fold_left - array_element - check_env - arr.Pattern.Array.elements + val is_simple_function_params : (Loc.t, Loc.t) Ast.Function.Params.t -> bool - and array_element check_env = Pattern.Array.(function - | None -> check_env - | Some (Element p) -> pattern check_env p - | Some (RestElement (_, { RestElement.argument; })) -> - pattern check_env argument) + val strict_post_check : + env -> + strict:bool -> + simple:bool -> + (Loc.t, Loc.t) Identifier.t option -> + (Loc.t, Loc.t) Ast.Function.Params.t -> + unit - and identifier_pattern check_env {Pattern.Identifier.name=id; _;} = - identifier check_env id + val let_ : env -> (Loc.t, Loc.t) Statement.VariableDeclaration.t * (Loc.t * Parse_error.t) list - and identifier (env, param_names) (loc, name as id) = - if SSet.mem name param_names - then error_at env (loc, Error.StrictParamDupe); - let env, param_names = - identifier_no_dupe_check (env, param_names) id in - env, SSet.add name param_names + val const : env -> (Loc.t, Loc.t) Statement.VariableDeclaration.t * (Loc.t * Parse_error.t) list - and identifier_no_dupe_check (env, param_names) (loc, name) = - if is_restricted name - then strict_error_at env (loc, Error.StrictParamName); - if is_future_reserved name || is_strict_reserved name - then strict_error_at env (loc, Error.StrictReservedWord); - env, param_names + val var : env -> (Loc.t, Loc.t) Statement.VariableDeclaration.t * (Loc.t * Parse_error.t) list - in pattern + val _function : env -> (Loc.t, Loc.t) Statement.t + + val enum_declaration : env -> (Loc.t, Loc.t) Statement.t +end + +module Declaration (Parse : Parser_common.PARSER) (Type : Type_parser.TYPE) : DECLARATION = struct + module Enum = Enum_parser.Enum (Parse) + + let check_param = + let rec pattern ((env, _) as check_env) (loc, p) = + Pattern.( + match p with + | Object o -> _object check_env o + | Array arr -> _array check_env arr + | Identifier id -> identifier_pattern check_env id + | Expression _ -> + error_at env (loc, Parse_error.ExpectedPatternFoundExpression); + check_env) + and _object check_env o = List.fold_left object_property check_env o.Pattern.Object.properties + and object_property check_env = + Pattern.Object.( + function + | Property (_, property) -> + Property.( + let check_env = + match property.key with + | Identifier id -> identifier_no_dupe_check check_env id + | _ -> check_env + in + pattern check_env property.pattern) + | RestProperty (_, { RestProperty.argument }) -> pattern check_env argument) + and _array check_env arr = List.fold_left array_element check_env arr.Pattern.Array.elements + and array_element check_env = + Pattern.Array.( + function + | None -> check_env + | Some (Element (_, { Element.argument; default = _ })) -> pattern check_env argument + | Some (RestElement (_, { RestElement.argument })) -> pattern check_env argument) + and identifier_pattern check_env { Pattern.Identifier.name = id; _ } = identifier check_env id + and identifier (env, param_names) ((loc, { Identifier.name; comments = _ }) as id) = + if SSet.mem name param_names then error_at env (loc, Parse_error.StrictParamDupe); + let (env, param_names) = identifier_no_dupe_check (env, param_names) id in + (env, SSet.add name param_names) + and identifier_no_dupe_check (env, param_names) (loc, { Identifier.name; comments = _ }) = + if is_restricted name then strict_error_at env (loc, Parse_error.StrictParamName); + if is_future_reserved name || is_strict_reserved name then + strict_error_at env (loc, Parse_error.StrictReservedWord); + (env, param_names) + in + pattern (* Strict is true if we were already in strict mode or if we are newly in * strict mode due to a directive in the function. * Simple is the IsSimpleParameterList thing from the ES6 spec *) let strict_post_check env ~strict ~simple id (_, { Ast.Function.Params.params; rest }) = - if strict || not simple - then + if strict || not simple then ( (* If we are doing this check due to strict mode than there are two * cases to consider. The first is when we were already in strict mode * and therefore already threw strict errors. In this case we want to @@ -107,240 +102,213 @@ module Declaration * originally parsed in non-strict mode but now are strict. Then we * want to do these checks in strict mode *) let env = - if strict - then env |> with_strict (not (Parser_env.in_strict_mode env)) - else env in + if strict then + env |> with_strict (not (Parser_env.in_strict_mode env)) + else + env + in (match id with - | Some (loc, name) -> - if is_restricted name - then strict_error_at env (loc, Error.StrictFunctionName); - if is_future_reserved name || is_strict_reserved name - then strict_error_at env (loc, Error.StrictReservedWord) + | Some (loc, { Identifier.name; comments = _ }) -> + if is_restricted name then strict_error_at env (loc, Parse_error.StrictFunctionName); + if is_future_reserved name || is_strict_reserved name then + strict_error_at env (loc, Parse_error.StrictReservedWord) | None -> ()); - let acc = List.fold_left check_param (env, SSet.empty) params in + let acc = + List.fold_left + (fun acc (_, { Function.Param.argument; default = _ }) -> check_param acc argument) + (env, SSet.empty) + params + in match rest with - | Some (_, { Function.RestElement.argument }) -> - ignore (check_param acc argument) - | None -> - () + | Some (_, { Function.RestParam.argument }) -> ignore (check_param acc argument) + | None -> () + ) let function_params = - let rec param env = - let left = Parse.pattern env Error.StrictParamName in - (* TODO: shouldn't Parse.pattern recognize Assignment patterns? *) - if Peek.token env = T_ASSIGN - then begin - Expect.token env T_ASSIGN; - let right = Parse.assignment env in - let loc = Loc.btwn (fst left) (fst right) in - (loc, Pattern.Assignment { Pattern.Assignment.left; right }) - end else - left - and param_list env acc = - match Peek.token env with - | T_EOF - | T_RPAREN - | T_ELLIPSIS as t -> - let rest = - if t = T_ELLIPSIS then begin - let start_loc = Peek.loc env in - Expect.token env T_ELLIPSIS; - let id = Parse.pattern env Error.StrictParamName in - let loc = Loc.btwn start_loc (fst id) in - Some (loc, { Function.RestElement.argument = id; }) - end else + let rec param = + with_loc (fun env -> + let argument = Parse.pattern env Parse_error.StrictParamName in + let default = + if Peek.token env = T_ASSIGN then ( + Expect.token env T_ASSIGN; + Some (Parse.assignment env) + ) else None in - if Peek.token env <> T_RPAREN - then error env Error.ParameterAfterRestParameter; - { Ast.Function.Params.params = List.rev acc; rest } + { Function.Param.argument; default }) + and param_list env acc = + match Peek.token env with + | (T_EOF | T_RPAREN | T_ELLIPSIS) as t -> + let rest = + if t = T_ELLIPSIS then + let (loc, id) = + with_loc + (fun env -> + Expect.token env T_ELLIPSIS; + Parse.pattern env Parse_error.StrictParamName) + env + in + Some (loc, { Function.RestParam.argument = id }) + else + None + in + if Peek.token env <> T_RPAREN then error env Parse_error.ParameterAfterRestParameter; + { Ast.Function.Params.params = List.rev acc; rest } | _ -> - let the_param = param env in - if Peek.token env <> T_RPAREN - then Expect.token env T_COMMA; - param_list env (the_param::acc) - - in fun ~await ~yield -> with_loc (fun env -> - let env = env - |> with_allow_await await - |> with_allow_yield yield - |> with_in_formal_parameters true - in - Expect.token env T_LPAREN; - let params = param_list env [] in - Expect.token env T_RPAREN; - params - ) + let the_param = param env in + if Peek.token env <> T_RPAREN then Expect.token env T_COMMA; + param_list env (the_param :: acc) + in + fun ~await ~yield -> + with_loc (fun env -> + let env = + env + |> with_allow_await await + |> with_allow_yield yield + |> with_in_formal_parameters true + in + Expect.token env T_LPAREN; + let params = param_list env [] in + Expect.token env T_RPAREN; + params) let function_body env ~async ~generator = let env = enter_function env ~async ~generator in - let loc, block, strict = Parse.function_block_body env in - loc, Function.BodyBlock (loc, block), strict - - let concise_function_body env ~async ~generator = - let env = env |> with_in_function true in - match Peek.token env with - | T_LCURLY -> - let _, body, strict = function_body env ~async ~generator in - body, strict - | _ -> - let env = enter_function env ~async ~generator in - let expr = Parse.assignment env in - Function.BodyExpression expr, in_strict_mode env + let (loc, block, strict) = Parse.function_block_body env in + (Function.BodyBlock (loc, block), strict) let variance env is_async is_generator = let loc = Peek.loc env in - let variance = match Peek.token env with - | T_PLUS -> + let variance = + match Peek.token env with + | T_PLUS -> Eat.token env; Some (loc, Variance.Plus) - | T_MINUS -> + | T_MINUS -> Eat.token env; Some (loc, Variance.Minus) - | _ -> - None + | _ -> None in match variance with | Some (loc, _) when is_async || is_generator -> - error_at env (loc, Error.UnexpectedVariance); - None - | _ -> - variance + error_at env (loc, Parse_error.UnexpectedVariance); + None + | _ -> variance let generator env = Expect.maybe env T_MULT - let async env = Expect.maybe env T_ASYNC + (* Returns true and consumes a token if the token is `async` and the token after it is on + the same line (see https://tc39.github.io/ecma262/#sec-async-function-definitions) *) + let async env = + if Peek.token env = T_ASYNC && not (Peek.ith_is_line_terminator ~i:1 env) then + let () = Eat.token env in + true + else + false let is_simple_function_params = let is_simple_param = function - | _, Pattern.Identifier _ -> true - | _ -> false - - in fun (_, { Ast.Function.Params.params; rest }) -> + | (_, { Ast.Function.Param.argument = (_, Pattern.Identifier _); default = None }) -> true + | _ -> false + in + fun (_, { Ast.Function.Params.params; rest }) -> rest = None && List.for_all is_simple_param params - let _function env = - let start_loc = Peek.loc env in - let async = async env in - Expect.token env T_FUNCTION; - let generator = generator env in - let (tparams, id) = ( - match in_export env, Peek.token env with - | true, T_LPAREN -> (None, None) - | true, T_LESS_THAN -> - let typeParams = Type.type_parameter_declaration env in - let id = if Peek.token env = T_LPAREN then None else Some ( - Parse.identifier ~restricted_error:Error.StrictFunctionName env - ) in - (typeParams, id) - | _ -> - let id = - Parse.identifier ~restricted_error:Error.StrictFunctionName env + let _function = + with_loc (fun env -> + let async = async env in + let (sig_loc, (generator, tparams, id, params, return, predicate)) = + with_loc + (fun env -> + Expect.token env T_FUNCTION; + let generator = generator env in + let (tparams, id) = + match (in_export env, Peek.token env) with + | (true, T_LPAREN) -> (None, None) + | (true, T_LESS_THAN) -> + let typeParams = Type.type_parameter_declaration env in + let id = + if Peek.token env = T_LPAREN then + None + else + Some (Parse.identifier ~restricted_error:Parse_error.StrictFunctionName env) + in + (typeParams, id) + | _ -> + let id = Parse.identifier ~restricted_error:Parse_error.StrictFunctionName env in + (Type.type_parameter_declaration env, Some id) + in + let params = function_params ~await:async ~yield:generator env in + let (return, predicate) = Type.annotation_and_predicate_opt env in + (generator, tparams, id, params, return, predicate)) + env in - (Type.type_parameter_declaration env, Some id) - ) in - let params = - let yield, await = match async, generator with - | true, true -> true, true (* proposal-async-iteration/#prod-AsyncGeneratorDeclaration *) - | true, false -> false, allow_await env (* #prod-AsyncFunctionDeclaration *) - | false, true -> true, false (* #prod-GeneratorDeclaration *) - | false, false -> false, false (* #prod-FunctionDeclaration *) - in - function_params ~await ~yield env - in - let (return, predicate) = Type.annotation_and_predicate_opt env in - let _, body, strict = function_body env ~async ~generator in - let simple = is_simple_function_params params in - strict_post_check env ~strict ~simple id params; - let end_loc, expression = Ast.Function.( - match body with - | BodyBlock (loc, _) -> loc, false - | BodyExpression (loc, _) -> loc, true) in - Loc.btwn start_loc end_loc, Statement.(FunctionDeclaration Function.({ - id; - params; - body; - generator; - async; - predicate; - expression; - return; - tparams; - })) + let (body, strict) = function_body env ~async ~generator in + let simple = is_simple_function_params params in + strict_post_check env ~strict ~simple id params; + Statement.FunctionDeclaration + { Function.id; params; body; generator; async; predicate; return; tparams; sig_loc }) let variable_declaration_list = let variable_declaration env = - let loc, (decl, errs) = with_loc (fun env -> - let id = Parse.pattern env Error.StrictVarName in - let init, errs = if Peek.token env = T_ASSIGN - then begin - Expect.token env T_ASSIGN; - Some (Parse.assignment env), [] - end else Ast.Pattern.( - match id with - | _, Identifier _ -> None, [] - | loc, _ -> None, [(loc, Error.NoUninitializedDestructuring)] - ) in - Ast.Statement.VariableDeclaration.Declarator.({ - id; - init; - }), errs - ) env in - (loc, decl), errs - - in let rec helper env decls errs = - let decl, errs_ = variable_declaration env in - let decls = decl::decls in + let (loc, (decl, errs)) = + with_loc + (fun env -> + let id = Parse.pattern env Parse_error.StrictVarName in + let (init, errs) = + if Peek.token env = T_ASSIGN then ( + Expect.token env T_ASSIGN; + (Some (Parse.assignment env), []) + ) else + Ast.Pattern.( + match id with + | (_, Identifier _) -> (None, []) + | (loc, _) -> (None, [(loc, Parse_error.NoUninitializedDestructuring)])) + in + (Ast.Statement.VariableDeclaration.Declarator.{ id; init }, errs)) + env + in + ((loc, decl), errs) + in + let rec helper env decls errs = + let (decl, errs_) = variable_declaration env in + let decls = decl :: decls in let errs = errs_ @ errs in - if Peek.token env = T_COMMA - then begin + if Peek.token env = T_COMMA then ( Expect.token env T_COMMA; helper env decls errs - end else - List.rev decls, List.rev errs - - in fun env -> helper env [] [] + ) else + (List.rev decls, List.rev errs) + in + (fun env -> helper env [] []) let declarations token kind env = Expect.token env token; - let declarations, errs = variable_declaration_list env in - Statement.VariableDeclaration.({ - kind; - declarations; - }), errs + let (declarations, errs) = variable_declaration_list env in + (Statement.VariableDeclaration.{ kind; declarations }, errs) let var = declarations T_VAR Statement.VariableDeclaration.Var let const env = let env = env |> with_no_let true in - let variable, errs = - declarations T_CONST Statement.VariableDeclaration.Const env in + let (variable, errs) = declarations T_CONST Statement.VariableDeclaration.Const env in (* Make sure all consts defined are initialized *) - let errs = Statement.VariableDeclaration.( - List.fold_left (fun errs decl -> - match decl with - | loc, { Declarator.init = None; _ } -> - (loc, Error.NoUninitializedConst)::errs - | _ -> errs - ) errs variable.declarations - ) in - variable, List.rev errs + let errs = + Statement.VariableDeclaration.( + List.fold_left + (fun errs decl -> + match decl with + | (loc, { Declarator.init = None; _ }) -> + (loc, Parse_error.NoUninitializedConst) :: errs + | _ -> errs) + errs + variable.declarations) + in + (variable, List.rev errs) let let_ env = let env = env |> with_no_let true in declarations T_LET Statement.VariableDeclaration.Let env - let variable env = - let loc, (decl, errs) = with_loc (fun env -> - let variable, errs = match Peek.token env with - | T_CONST -> const env - | T_LET -> let_ env - | T_VAR -> var env - | _ -> - error_unexpected env; - (* We need to return something. This is as good as anything else *) - var env in - Statement.VariableDeclaration variable, errs - ) env in - (loc, decl), errs + let enum_declaration = Enum.declaration end diff --git a/src/parser/dune b/src/parser/dune new file mode 100644 index 00000000000..add19f32075 --- /dev/null +++ b/src/parser/dune @@ -0,0 +1,8 @@ +(library + (name flow_parser) + (wrapped false) + (modules (:standard \ flow_parser_js flow_parser_dot_js)) + (libraries + sedlex + wtf8) + (preprocess (pps ppx_gen_rec ppx_deriving.std sedlex.ppx))) diff --git a/src/parser/enum_common.ml b/src/parser/enum_common.ml new file mode 100644 index 00000000000..7302aac6d67 --- /dev/null +++ b/src/parser/enum_common.ml @@ -0,0 +1,18 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type explicit_type = + | Boolean + | Number + | String + | Symbol + +let string_of_explicit_type = function + | Boolean -> "boolean" + | Number -> "number" + | String -> "string" + | Symbol -> "symbol" diff --git a/src/parser/enum_parser.ml b/src/parser/enum_parser.ml new file mode 100644 index 00000000000..910d46fddc6 --- /dev/null +++ b/src/parser/enum_parser.ml @@ -0,0 +1,286 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Flow_ast +open Parser_common +open Parser_env +open Token +module SSet = Set.Make (String) + +module Enum (Parse : Parser_common.PARSER) : sig + val declaration : env -> (Loc.t, Loc.t) Statement.t +end = struct + open Flow_ast.Statement.EnumDeclaration + + type members = { + boolean_members: (bool, Loc.t) InitializedMember.t list; + number_members: (NumberLiteral.t, Loc.t) InitializedMember.t list; + string_members: (StringLiteral.t, Loc.t) InitializedMember.t list; + defaulted_members: Loc.t DefaultedMember.t list; + } + + type acc = { + members: members; + seen_names: SSet.t; + } + + type init = + | NoInit + | InvalidInit of Loc.t + | BooleanInit of Loc.t * bool + | NumberInit of Loc.t * NumberLiteral.t + | StringInit of Loc.t * StringLiteral.t + + let empty_members = + { boolean_members = []; number_members = []; string_members = []; defaulted_members = [] } + + let empty_acc = { members = empty_members; seen_names = SSet.empty } + + let end_of_member_init env = + match Peek.token env with + | T_COMMA + | T_RCURLY -> + true + | _ -> false + + let member_init env = + let loc = Peek.loc env in + match Peek.token env with + | T_NUMBER { kind; raw } -> + let value = Parse.number env kind raw in + if end_of_member_init env then + NumberInit (loc, { NumberLiteral.value; raw }) + else + InvalidInit loc + | T_STRING (loc, value, raw, octal) -> + if octal then strict_error env Parse_error.StrictOctalLiteral; + Eat.token env; + if end_of_member_init env then + StringInit (loc, { StringLiteral.value; raw }) + else + InvalidInit loc + | (T_TRUE | T_FALSE) as token -> + Eat.token env; + if end_of_member_init env then + BooleanInit (loc, token = T_TRUE) + else + InvalidInit loc + | _ -> + Eat.token env; + InvalidInit loc + + let member_raw = + with_loc (fun env -> + let id = identifier_name env in + let init = + if Expect.maybe env T_ASSIGN then + member_init env + else + NoInit + in + (id, init)) + + let check_explicit_type_mismatch env ~enum_name ~explicit_type ~member_name literal_type loc = + match explicit_type with + | Some enum_type when enum_type <> literal_type -> + error_at + env + (loc, Parse_error.EnumInvalidMemberInitializer { enum_name; explicit_type; member_name }) + | _ -> () + + let is_a_to_z c = c >= 'a' && c <= 'z' + + let enum_member ~enum_name ~explicit_type acc env = + let { members; seen_names } = acc in + let (member_loc, (id, init)) = member_raw env in + let (id_loc, { Identifier.name = member_name; _ }) = id in + (* if we parsed an empty name, something has gone wrong and we should abort analysis *) + if member_name = "" then + acc + else ( + if is_a_to_z @@ member_name.[0] then + error_at env (id_loc, Parse_error.EnumInvalidMemberName { enum_name; member_name }); + if SSet.mem member_name seen_names then + error_at env (id_loc, Parse_error.EnumDuplicateMemberName { enum_name; member_name }); + let acc = { acc with seen_names = SSet.add member_name seen_names } in + let check_explicit_type_mismatch = + check_explicit_type_mismatch env ~enum_name ~explicit_type ~member_name + in + match init with + | BooleanInit (loc, value) -> + check_explicit_type_mismatch Enum_common.Boolean loc; + let member = (member_loc, { InitializedMember.id; init = (loc, value) }) in + { acc with members = { members with boolean_members = member :: members.boolean_members } } + | NumberInit (loc, value) -> + check_explicit_type_mismatch Enum_common.Number loc; + let member = (member_loc, { InitializedMember.id; init = (loc, value) }) in + { acc with members = { members with number_members = member :: members.number_members } } + | StringInit (loc, value) -> + check_explicit_type_mismatch Enum_common.String loc; + let member = (member_loc, { InitializedMember.id; init = (loc, value) }) in + { acc with members = { members with string_members = member :: members.string_members } } + | InvalidInit loc -> + error_at + env + (loc, Parse_error.EnumInvalidMemberInitializer { enum_name; explicit_type; member_name }); + acc + | NoInit -> + begin + match explicit_type with + | Some Enum_common.Boolean -> + error_at + env + (member_loc, Parse_error.EnumBooleanMemberNotInitialized { enum_name; member_name }); + acc + | Some Enum_common.Number -> + error_at + env + (member_loc, Parse_error.EnumNumberMemberNotInitialized { enum_name; member_name }); + acc + | Some Enum_common.String + | Some Enum_common.Symbol + | None -> + let member = (member_loc, { DefaultedMember.id }) in + { + acc with + members = { members with defaulted_members = member :: members.defaulted_members }; + } + end + ) + + let rec enum_members ~enum_name ~explicit_type acc env = + match Peek.token env with + | T_RCURLY + | T_EOF -> + { + boolean_members = List.rev acc.members.boolean_members; + number_members = List.rev acc.members.number_members; + string_members = List.rev acc.members.string_members; + defaulted_members = List.rev acc.members.defaulted_members; + } + | _ -> + let acc = enum_member ~enum_name ~explicit_type acc env in + if Peek.token env <> T_RCURLY then Expect.token env T_COMMA; + enum_members ~enum_name ~explicit_type acc env + + let string_body ~env ~enum_name ~is_explicit string_members defaulted_members = + let initialized_len = List.length string_members in + let defaulted_len = List.length defaulted_members in + let defaulted_body () = + StringBody + { StringBody.members = StringBody.Defaulted defaulted_members; explicitType = is_explicit } + in + let initialized_body () = + StringBody + { StringBody.members = StringBody.Initialized string_members; explicitType = is_explicit } + in + match (initialized_len, defaulted_len) with + | (0, 0) + | (0, _) -> + defaulted_body () + | (_, 0) -> initialized_body () + | _ when defaulted_len > initialized_len -> + List.iter + (fun (loc, _) -> + error_at env (loc, Parse_error.EnumStringMemberInconsistentlyInitailized { enum_name })) + string_members; + defaulted_body () + | _ -> + List.iter + (fun (loc, _) -> + error_at env (loc, Parse_error.EnumStringMemberInconsistentlyInitailized { enum_name })) + defaulted_members; + initialized_body () + + let parse_explicit_type ~enum_name env = + if Expect.maybe env T_OF then ( + Eat.push_lex_mode env Lex_mode.TYPE; + let result = + match Peek.token env with + | T_BOOLEAN_TYPE BOOLEAN -> Some Enum_common.Boolean + | T_NUMBER_TYPE -> Some Enum_common.Number + | T_STRING_TYPE -> Some Enum_common.String + | T_IDENTIFIER { value = "symbol"; _ } -> Some Enum_common.Symbol + | T_IDENTIFIER { value; _ } -> + let supplied_type = Some value in + error env (Parse_error.EnumInvalidExplicitType { enum_name; supplied_type }); + None + | _ -> + error env (Parse_error.EnumInvalidExplicitType { enum_name; supplied_type = None }); + None + in + Eat.token env; + Eat.pop_lex_mode env; + result + ) else + None + + let declaration = + with_loc (fun env -> + Expect.token env T_ENUM; + let id = Parse.identifier env in + let (id_loc, { Identifier.name = enum_name; _ }) = id in + let explicit_type = parse_explicit_type ~enum_name env in + Expect.token env T_LCURLY; + let members = enum_members ~enum_name ~explicit_type empty_acc env in + let body = + match explicit_type with + | Some Enum_common.Boolean -> + BooleanBody { BooleanBody.members = members.boolean_members; explicitType = true } + | Some Enum_common.Number -> + NumberBody { NumberBody.members = members.number_members; explicitType = true } + | Some Enum_common.String -> + string_body + ~env + ~enum_name + ~is_explicit:true + members.string_members + members.defaulted_members + | Some Enum_common.Symbol -> + SymbolBody { SymbolBody.members = members.defaulted_members } + | None -> + let bools_len = List.length members.boolean_members in + let nums_len = List.length members.number_members in + let strs_len = List.length members.string_members in + let defaulted_len = List.length members.defaulted_members in + let empty () = + StringBody { StringBody.members = StringBody.Defaulted []; explicitType = false } + in + begin + match (bools_len, nums_len, strs_len, defaulted_len) with + | (0, 0, 0, 0) -> empty () + | (0, 0, _, _) -> + string_body + ~env + ~enum_name + ~is_explicit:false + members.string_members + members.defaulted_members + | (_, 0, 0, _) when bools_len >= defaulted_len -> + List.iter + (fun (loc, { DefaultedMember.id = (_, { Identifier.name = member_name; _ }) }) -> + error_at + env + (loc, Parse_error.EnumBooleanMemberNotInitialized { enum_name; member_name })) + members.defaulted_members; + BooleanBody { BooleanBody.members = members.boolean_members; explicitType = false } + | (0, _, 0, _) when nums_len >= defaulted_len -> + List.iter + (fun (loc, { DefaultedMember.id = (_, { Identifier.name = member_name; _ }) }) -> + error_at + env + (loc, Parse_error.EnumNumberMemberNotInitialized { enum_name; member_name })) + members.defaulted_members; + NumberBody { NumberBody.members = members.number_members; explicitType = false } + | _ -> + error_at env (id_loc, Parse_error.EnumInconsistentMemberValues { enum_name }); + empty () + end + in + Expect.token env T_RCURLY; + Statement.EnumDeclaration { id; body }) +end diff --git a/src/parser/estree_translator.ml b/src/parser/estree_translator.ml index 0eacef5132f..024d3d60b02 100644 --- a/src/parser/estree_translator.ml +++ b/src/parser/estree_translator.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,1467 +8,1590 @@ module Ast = Flow_ast module type Config = sig - val include_locs: bool - val include_comments: bool + val include_locs : bool + + val include_comments : bool + + (* FIXME(festevezga, T39098154) Temporary flag while we're migrating from one approach to another *) + val include_interned_comments : bool end -module Translate (Impl : Translator_intf.S) (Config : Config) : (sig +module Translate (Impl : Translator_intf.S) (Config : Config) : sig type t - val program: + + val program : + Offset_utils.t option -> Loc.t * (Loc.t, Loc.t) Ast.Statement.t list * (Loc.t * Ast.Comment.t') list -> t - val expression: (Loc.t, Loc.t) Ast.Expression.t -> t - val errors: (Loc.t * Parse_error.t) list -> t -end with type t = Impl.t) = struct + + val expression : Offset_utils.t option -> (Loc.t, Loc.t) Ast.Expression.t -> t + + val errors : (Loc.t * Parse_error.t) list -> t +end +with type t = Impl.t = struct type t = Impl.t + type functions = { + program: Loc.t * (Loc.t, Loc.t) Ast.Statement.t list * (Loc.t * Ast.Comment.t') list -> t; + expression: (Loc.t, Loc.t) Ast.Expression.t -> t; + } + open Ast open Impl - let array_of_list fn list = array (List.map fn list) - let int x = number (float x) + let array_of_list fn list = array (List.rev_map fn list |> List.rev) + let option f = function | Some v -> f v | None -> null - let position p = - obj [ - "line", int p.Loc.line; - "column", int p.Loc.column; - ] + let hint f = function + | Ast.Type.Available v -> f v + | Ast.Type.Missing _ -> null - let loc location = - let source = match Loc.source location with - | Some File_key.LibFile src - | Some File_key.SourceFile src - | Some File_key.JsonFile src - | Some File_key.ResourceFile src -> string src - | Some File_key.Builtins -> string "(global)" - | None -> null - in - obj [ - "source", source; - "start", position location.Loc.start; - "end", position location.Loc._end; - ] - - let range location = Loc.( - array [ - int location.start.offset; - int location._end.offset; - ] - ) + let position p = obj [("line", int p.Loc.line); ("column", int p.Loc.column)] - let node _type location props = - let prefix = - if Config.include_locs then - (* sorted backwards due to the rev_append below *) - [ "range", range location; - "loc", loc location; - "type", string _type; ] - else - [ "type", string _type; ] + let loc location = + let source = + match Loc.source location with + | Some (File_key.LibFile src) + | Some (File_key.SourceFile src) + | Some (File_key.JsonFile src) + | Some (File_key.ResourceFile src) -> + string src + | Some File_key.Builtins -> string "(global)" + | None -> null in - obj (List.rev_append prefix props) + obj + [ + ("source", source); + ("start", position location.Loc.start); + ("end", position location.Loc._end); + ] let errors l = let error (location, e) = - obj [ - "loc", loc location; - "message", string (Parse_error.PP.error e); - ] - in array_of_list error l - - let rec program (loc, statements, comments) = - let body = statement_list statements in - let props = - if Config.include_comments then [ "body", body; "comments", comment_list comments; ] - else [ "body", body; ] + obj [("loc", loc location); ("message", string (Parse_error.PP.error e))] in - node "Program" loc props - - and statement_list statements = array_of_list statement statements - and statement = Statement.(function - | loc, Empty -> node "EmptyStatement" loc [] - | loc, Block b -> block (loc, b) - | loc, Expression expr -> - node "ExpressionStatement" loc [ - "expression", expression expr.Expression.expression; - "directive", option string expr.Expression.directive; - ] - | loc, If _if -> If.( - node "IfStatement" loc [ - "test", expression _if.test; - "consequent", statement _if.consequent; - "alternate", option statement _if.alternate; - ] - ) - | loc, Labeled labeled -> Labeled.( - node "LabeledStatement" loc [ - "label", identifier labeled.label; - "body", statement labeled.body; - ] - ) - | loc, Break break -> - node "BreakStatement" loc [ - "label", option identifier break.Break.label; - ] - | loc, Continue continue -> - node "ContinueStatement" loc [ - "label", option identifier continue.Continue.label; - ] - | loc, With _with -> With.( - node "WithStatement" loc [ - "object", expression _with._object; - "body", statement _with.body; - ] - ) - | loc, TypeAlias alias -> type_alias (loc, alias) - | loc, OpaqueType opaque_t -> opaque_type ~declare:false (loc, opaque_t) - | loc, Switch switch -> Switch.( - node "SwitchStatement" loc [ - "discriminant", expression switch.discriminant; - "cases", array_of_list case switch.cases; - ] - ) - | loc, Return return -> - node "ReturnStatement" loc [ - "argument", option expression return.Return.argument; - ] - | loc, Throw throw -> - node "ThrowStatement" loc [ - "argument", expression throw.Throw.argument; - ] - | loc, Try _try -> Try.( - node "TryStatement" loc [ - "block", block _try.block; - "handler", option catch _try.handler; - "finalizer", option block _try.finalizer; - ] - ) - | loc, While _while -> While.( - node "WhileStatement" loc [ - "test", expression _while.test; - "body", statement _while.body; - ] - ) - | loc, DoWhile dowhile -> DoWhile.( - node "DoWhileStatement" loc [ - "body", statement dowhile.body; - "test", expression dowhile.test; - ] - ) - | loc, For _for -> For.( - let init = function - | InitDeclaration init -> variable_declaration init - | InitExpression expr -> expression expr + array_of_list error l + + (* This is basically a lightweight class. We close over some state and then return more than one + * function that can access that state. We don't need most class features though, so let's avoid + * the dynamic dispatch and the disruptive change. *) + let make_functions offset_table = + let range offset_table location = + Loc.( + array + [ + int (Offset_utils.offset offset_table location.start); + int (Offset_utils.offset offset_table location._end); + ]) + in + let rec node _type location ?comments props = + let locs = + if Config.include_locs then + (* sorted backwards due to the rev_append below *) + let range = + match offset_table with + | Some table -> [("range", range table location)] + | None -> [] + in + range @ [("loc", loc location)] + else + [] in - node "ForStatement" loc [ - "init", option init _for.init; - "test", option expression _for.test; - "update", option expression _for.update; - "body", statement _for.body; - ] - ) - | loc, ForIn forin -> ForIn.( - let left = match forin.left with - | LeftDeclaration left -> variable_declaration left - | LeftPattern left -> pattern left + let comments = + Ast.Syntax.( + match (Config.include_interned_comments, comments) with + | (true, Some c) -> + (match c with + | { leading = _ :: _ as l; trailing = _ :: _ as t; _ } -> + [("leadingComments", comment_list l); ("trailingComments", comment_list t)] + | { leading = _ :: _ as l; trailing = []; _ } -> [("leadingComments", comment_list l)] + | { leading = []; trailing = _ :: _ as t; _ } -> [("trailingComments", comment_list t)] + | _ -> []) + | (_, _) -> []) in - node "ForInStatement" loc [ - "left", left; - "right", expression forin.right; - "body", statement forin.body; - "each", bool forin.each; - ] - ) - | loc, ForOf forof -> ForOf.( - let type_ = - if forof.async - then "ForAwaitStatement" - else "ForOfStatement" + let prefix = locs @ comments @ [("type", string _type)] in + obj (List.rev_append prefix props) + and program (loc, statements, comments) = + let body = statement_list statements in + let props = + if Config.include_comments then + [("body", body); ("comments", comment_list comments)] + else + [("body", body)] in - let left = match forof.left with - | LeftDeclaration left -> variable_declaration left - | LeftPattern left -> pattern left + node "Program" loc props + and statement_list statements = array_of_list statement statements + and statement = + Statement.( + function + | (loc, Empty) -> node "EmptyStatement" loc [] + | (loc, Block b) -> block (loc, b) + | (loc, Expression { Expression.expression = expr; directive }) -> + node + "ExpressionStatement" + loc + [("expression", expression expr); ("directive", option string directive)] + | (loc, If { If.test; consequent; alternate; comments }) -> + node + ?comments + "IfStatement" + loc + [ + ("test", expression test); + ("consequent", statement consequent); + ("alternate", option statement alternate); + ] + | (loc, Labeled { Labeled.label; body }) -> + node "LabeledStatement" loc [("label", identifier label); ("body", statement body)] + | (loc, Break { Break.label; comments }) -> + node ?comments "BreakStatement" loc [("label", option identifier label)] + | (loc, Continue { Continue.label; comments }) -> + node ?comments "ContinueStatement" loc [("label", option identifier label)] + | (loc, With { With._object; body }) -> + node "WithStatement" loc [("object", expression _object); ("body", statement body)] + | (loc, TypeAlias alias) -> type_alias (loc, alias) + | (loc, OpaqueType opaque_t) -> opaque_type ~declare:false (loc, opaque_t) + | (loc, Switch { Switch.discriminant; cases }) -> + node + "SwitchStatement" + loc + [("discriminant", expression discriminant); ("cases", array_of_list case cases)] + | (loc, Return { Return.argument; comments }) -> + node ?comments "ReturnStatement" loc [("argument", option expression argument)] + | (loc, Throw { Throw.argument }) -> + node "ThrowStatement" loc [("argument", expression argument)] + | (loc, Try { Try.block = block_; handler; finalizer; comments }) -> + node + ?comments + "TryStatement" + loc + [ + ("block", block block_); + ("handler", option catch handler); + ("finalizer", option block finalizer); + ] + | (loc, While { While.test; body }) -> + node "WhileStatement" loc [("test", expression test); ("body", statement body)] + | (loc, DoWhile { DoWhile.body; test; comments }) -> + node + ?comments + "DoWhileStatement" + loc + [("body", statement body); ("test", expression test)] + | (loc, For { For.init = init_; test; update; body }) -> + let init = function + | For.InitDeclaration init -> variable_declaration init + | For.InitExpression expr -> expression expr + in + node + "ForStatement" + loc + [ + ("init", option init init_); + ("test", option expression test); + ("update", option expression update); + ("body", statement body); + ] + | (loc, ForIn { ForIn.left; right; body; each }) -> + let left = + match left with + | ForIn.LeftDeclaration left -> variable_declaration left + | ForIn.LeftPattern left -> pattern left + in + node + "ForInStatement" + loc + [ + ("left", left); + ("right", expression right); + ("body", statement body); + ("each", bool each); + ] + | (loc, ForOf { ForOf.async; left; right; body }) -> + let type_ = + if async then + "ForAwaitStatement" + else + "ForOfStatement" + in + let left = + match left with + | ForOf.LeftDeclaration left -> variable_declaration left + | ForOf.LeftPattern left -> pattern left + in + node type_ loc [("left", left); ("right", expression right); ("body", statement body)] + | (loc, EnumDeclaration enum) -> enum_declaration (loc, enum) + | (loc, Debugger) -> node "DebuggerStatement" loc [] + | (loc, ClassDeclaration c) -> class_declaration (loc, c) + | (loc, InterfaceDeclaration i) -> interface_declaration (loc, i) + | (loc, VariableDeclaration var) -> variable_declaration (loc, var) + | (loc, FunctionDeclaration fn) -> function_declaration (loc, fn) + | (loc, DeclareVariable d) -> declare_variable (loc, d) + | (loc, DeclareFunction d) -> declare_function (loc, d) + | (loc, DeclareClass d) -> declare_class (loc, d) + | (loc, DeclareInterface i) -> declare_interface (loc, i) + | (loc, DeclareTypeAlias a) -> declare_type_alias (loc, a) + | (loc, DeclareOpaqueType t) -> opaque_type ~declare:true (loc, t) + | (loc, DeclareModule { DeclareModule.id; body; kind }) -> + let id = + match id with + | DeclareModule.Literal lit -> string_literal lit + | DeclareModule.Identifier id -> identifier id + in + node + "DeclareModule" + loc + [ + ("id", id); + ("body", block body); + ( "kind", + match kind with + | DeclareModule.CommonJS _ -> string "CommonJS" + | DeclareModule.ES _ -> string "ES" ); + ] + | ( loc, + DeclareExportDeclaration + { DeclareExportDeclaration.specifiers; declaration; default; source } ) -> + begin + match specifiers with + | Some (ExportNamedDeclaration.ExportBatchSpecifier (_, None)) -> + node "DeclareExportAllDeclaration" loc [("source", option string_literal source)] + | _ -> + let declaration = + match declaration with + | Some (DeclareExportDeclaration.Variable v) -> declare_variable v + | Some (DeclareExportDeclaration.Function f) -> declare_function f + | Some (DeclareExportDeclaration.Class c) -> declare_class c + | Some (DeclareExportDeclaration.DefaultType t) -> _type t + | Some (DeclareExportDeclaration.NamedType t) -> type_alias t + | Some (DeclareExportDeclaration.NamedOpaqueType t) -> opaque_type ~declare:true t + | Some (DeclareExportDeclaration.Interface i) -> interface_declaration i + | None -> null + in + node + "DeclareExportDeclaration" + loc + [ + ( "default", + bool + (match default with + | Some _ -> true + | None -> false) ); + ("declaration", declaration); + ("specifiers", export_specifiers specifiers); + ("source", option string_literal source); + ] + end + | (loc, DeclareModuleExports annot) -> + node "DeclareModuleExports" loc [("typeAnnotation", type_annotation annot)] + | ( loc, + ExportNamedDeclaration + { ExportNamedDeclaration.specifiers; declaration; source; exportKind } ) -> + begin + match specifiers with + | Some (ExportNamedDeclaration.ExportBatchSpecifier (_, None)) -> + node + "ExportAllDeclaration" + loc + [ + ("source", option string_literal source); + ("exportKind", string (export_kind exportKind)); + ] + | _ -> + node + "ExportNamedDeclaration" + loc + [ + ("declaration", option statement declaration); + ("specifiers", export_specifiers specifiers); + ("source", option string_literal source); + ("exportKind", string (export_kind exportKind)); + ] + end + | ( loc, + ExportDefaultDeclaration + { + ExportDefaultDeclaration.declaration; + default = _ (* TODO: confirm we shouldn't use this *); + } ) -> + let declaration = + match declaration with + | ExportDefaultDeclaration.Declaration stmt -> statement stmt + | ExportDefaultDeclaration.Expression expr -> expression expr + in + node + "ExportDefaultDeclaration" + loc + [ + ("declaration", declaration); + ("exportKind", string (export_kind Statement.ExportValue)); + ] + | (loc, ImportDeclaration { ImportDeclaration.specifiers; default; importKind; source }) -> + let specifiers = + match specifiers with + | Some (ImportDeclaration.ImportNamedSpecifiers specifiers) -> + List.map + (fun { ImportDeclaration.local; remote; kind } -> + import_named_specifier local remote kind) + specifiers + | Some (ImportDeclaration.ImportNamespaceSpecifier id) -> + [import_namespace_specifier id] + | None -> [] + in + let specifiers = + match default with + | Some default -> import_default_specifier default :: specifiers + | None -> specifiers + in + let import_kind = + match importKind with + | ImportDeclaration.ImportType -> "type" + | ImportDeclaration.ImportTypeof -> "typeof" + | ImportDeclaration.ImportValue -> "value" + in + node + "ImportDeclaration" + loc + [ + ("specifiers", array specifiers); + ("source", string_literal source); + ("importKind", string import_kind); + ]) + and expression = + Expression.( + function + | (loc, This) -> node "ThisExpression" loc [] + | (loc, Super) -> node "Super" loc [] + | (loc, Array { Array.elements; comments }) -> + node + ?comments + "ArrayExpression" + loc + [("elements", array_of_list (option expression_or_spread) elements)] + | (loc, Object { Object.properties; comments }) -> + node + ?comments + "ObjectExpression" + loc + [("properties", array_of_list object_property properties)] + | (loc, Function _function) -> function_expression (loc, _function) + | ( loc, + ArrowFunction + { + Function.params; + async; + predicate = predicate_; + tparams; + return; + body; + sig_loc = _; + (* TODO: arrows shouldn't have these: *) + id = _; + generator = _; + } ) -> + let (body, expression) = + match body with + | Function.BodyBlock b -> (block b, false) + | Function.BodyExpression expr -> (expression expr, true) + in + let return = + match return with + | Ast.Type.Missing _ -> None + | Ast.Type.Available t -> Some t + in + node + "ArrowFunctionExpression" + loc + [ + ("id", null); + ("params", function_params params); + ("body", body); + ("async", bool async); + ("generator", bool false); + ("predicate", option predicate predicate_); + ("expression", bool expression); + ("returnType", option type_annotation return); + ("typeParameters", option type_parameter_declaration tparams); + ] + | (loc, Sequence { Sequence.expressions }) -> + node "SequenceExpression" loc [("expressions", array_of_list expression expressions)] + | (loc, Unary { Unary.operator; argument; comments }) -> + Unary.( + (match operator with + | Await -> + (* await is defined as a separate expression in ast-types + * + * TODO + * 1) Send a PR to ast-types + * (https://github.com/benjamn/ast-types/issues/113) + * 2) Output a UnaryExpression + * 3) Modify the esprima test runner to compare AwaitExpression and + * our UnaryExpression + * *) + node ?comments "AwaitExpression" loc [("argument", expression argument)] + | _ -> + let operator = + match operator with + | Minus -> "-" + | Plus -> "+" + | Not -> "!" + | BitNot -> "~" + | Typeof -> "typeof" + | Void -> "void" + | Delete -> "delete" + | Await -> failwith "matched above" + in + node + ?comments + "UnaryExpression" + loc + [ + ("operator", string operator); + ("prefix", bool true); + ("argument", expression argument); + ])) + | (loc, Binary { Binary.left; operator; right }) -> + node + "BinaryExpression" + loc + [ + ("operator", string (Flow_ast_utils.string_of_binary_operator operator)); + ("left", expression left); + ("right", expression right); + ] + | (loc, TypeCast { TypeCast.expression = expr; annot }) -> + node + "TypeCastExpression" + loc + [("expression", expression expr); ("typeAnnotation", type_annotation annot)] + | (loc, Assignment { Assignment.left; operator; right }) -> + let operator = + match operator with + | None -> "=" + | Some op -> Flow_ast_utils.string_of_assignment_operator op + in + node + "AssignmentExpression" + loc + [("operator", string operator); ("left", pattern left); ("right", expression right)] + | (loc, Update { Update.operator; argument; prefix }) -> + let operator = + match operator with + | Update.Increment -> "++" + | Update.Decrement -> "--" + in + node + "UpdateExpression" + loc + [ + ("operator", string operator); + ("argument", expression argument); + ("prefix", bool prefix); + ] + | (loc, Logical { Logical.left; operator; right }) -> + let operator = + match operator with + | Logical.Or -> "||" + | Logical.And -> "&&" + | Logical.NullishCoalesce -> "??" + in + node + "LogicalExpression" + loc + [("operator", string operator); ("left", expression left); ("right", expression right)] + | (loc, Conditional { Conditional.test; consequent; alternate }) -> + node + "ConditionalExpression" + loc + [ + ("test", expression test); + ("consequent", expression consequent); + ("alternate", expression alternate); + ] + | (loc, New { New.callee; targs; arguments; comments }) -> + node + ?comments + "NewExpression" + loc + [ + ("callee", expression callee); + ("typeArguments", option type_parameter_instantiation_with_implicit targs); + ("arguments", array_of_list expression_or_spread arguments); + ] + | (loc, Call call) -> node "CallExpression" loc (call_node_properties call) + | (loc, OptionalCall { OptionalCall.call; optional }) -> + node + "OptionalCallExpression" + loc + (call_node_properties call @ [("optional", bool optional)]) + | (loc, Member member) -> node "MemberExpression" loc (member_node_properties member) + | (loc, OptionalMember { OptionalMember.member; optional }) -> + node + "OptionalMemberExpression" + loc + (member_node_properties member @ [("optional", bool optional)]) + | (loc, Yield { Yield.argument; delegate; comments }) -> + node + ?comments + "YieldExpression" + loc + [("argument", option expression argument); ("delegate", bool delegate)] + | (loc, Comprehension { Comprehension.blocks; filter }) -> + node + "ComprehensionExpression" + loc + [ + ("blocks", array_of_list comprehension_block blocks); + ("filter", option expression filter); + ] + | (loc, Generator { Generator.blocks; filter }) -> + node + "GeneratorExpression" + loc + [ + ("blocks", array_of_list comprehension_block blocks); + ("filter", option expression filter); + ] + | (_loc, Identifier id) -> identifier id + | (loc, Literal ({ Literal.value = Ast.Literal.BigInt _; _ } as lit)) -> + bigint_literal (loc, lit) + | (loc, Literal lit) -> literal (loc, lit) + | (loc, TemplateLiteral lit) -> template_literal (loc, lit) + | (loc, TaggedTemplate tagged) -> tagged_template (loc, tagged) + | (loc, Class c) -> class_expression (loc, c) + | (loc, JSXElement element) -> jsx_element (loc, element) + | (loc, JSXFragment fragment) -> jsx_fragment (loc, fragment) + | (loc, MetaProperty { MetaProperty.meta; property }) -> + node "MetaProperty" loc [("meta", identifier meta); ("property", identifier property)] + | (loc, Import arg) -> + node + "CallExpression" + loc + [ + ("callee", node "Import" (Loc.btwn loc (fst arg)) []); + ("arguments", array_of_list expression [arg]); + ]) + and function_declaration + ( loc, + { + Function.id; + params; + async; + generator; + predicate = predicate_; + tparams; + return; + body; + sig_loc = _; + } ) = + let body = + match body with + | Function.BodyBlock b -> b + | Function.BodyExpression _ -> + failwith "Unexpected FunctionDeclaration with BodyExpression" in - node type_ loc [ - "left", left; - "right", expression forof.right; - "body", statement forof.body; - ] - ) - | loc, Debugger -> node "DebuggerStatement" loc [] - | loc, ClassDeclaration c -> class_declaration (loc, c) - | loc, InterfaceDeclaration i -> interface_declaration (loc, i) - | loc, VariableDeclaration var -> variable_declaration (loc, var) - | loc, FunctionDeclaration fn -> function_declaration (loc, fn) - | loc, DeclareVariable d -> declare_variable (loc, d) - | loc, DeclareFunction d -> declare_function (loc, d) - | loc, DeclareClass d -> declare_class (loc, d) - | loc, DeclareInterface i -> declare_interface (loc, i) - | loc, DeclareTypeAlias a -> declare_type_alias (loc, a) - | loc, DeclareOpaqueType t -> opaque_type ~declare:true (loc, t) - | loc, DeclareModule m -> DeclareModule.( - let id = match m.id with - | Literal lit -> string_literal lit - | Identifier id -> identifier id + let return = + match return with + | Ast.Type.Missing _ -> None + | Ast.Type.Available t -> Some t in - node "DeclareModule" loc [ - "id", id; - "body", block m.body; - "kind", ( - match m.kind with - | DeclareModule.CommonJS _ -> string "CommonJS" - | DeclareModule.ES _ -> string "ES" - ) - ] - ) - | loc, DeclareExportDeclaration export -> DeclareExportDeclaration.( - match export.specifiers with - | Some (ExportNamedDeclaration.ExportBatchSpecifier (_, None)) -> - node "DeclareExportAllDeclaration" loc [ - "source", option string_literal export.source; - ] - | _ -> - let declaration = match export.declaration with - | Some (Variable v) -> declare_variable v - | Some (Function f) -> declare_function f - | Some (Class c) -> declare_class c - | Some (DefaultType t) -> _type t - | Some (NamedType t) -> type_alias t - | Some (NamedOpaqueType t) -> opaque_type ~declare:true t - | Some (Interface i) -> interface_declaration i - | None -> null - in - node "DeclareExportDeclaration" loc [ - "default", bool ( - match export.default with - | Some _ -> true - | None -> false); - "declaration", declaration; - "specifiers", export_specifiers export.specifiers; - "source", option string_literal export.source; - ] - ) - | loc, DeclareModuleExports annot -> - node "DeclareModuleExports" loc [ - "typeAnnotation", type_annotation annot + node + "FunctionDeclaration" + loc + [ + (* estree hasn't come around to the idea that function decls can have + optional ids, but acorn, babel, espree and esprima all have, so let's + do it too. see https://github.com/estree/estree/issues/98 *) + ("id", option identifier id); + ("params", function_params params); + ("body", block body); + ("async", bool async); + ("generator", bool generator); + ("predicate", option predicate predicate_); + ("expression", bool false); + ("returnType", option type_annotation return); + ("typeParameters", option type_parameter_declaration tparams); ] - | loc, ExportNamedDeclaration export -> ExportNamedDeclaration.( - match export.specifiers with - | Some (ExportBatchSpecifier (_, None)) -> - node "ExportAllDeclaration" loc [ - "source", option string_literal export.source; - "exportKind", string (export_kind export.exportKind); - ] - | _ -> - node "ExportNamedDeclaration" loc [ - "declaration", option statement export.declaration; - "specifiers", export_specifiers export.specifiers; - "source", option string_literal export.source; - "exportKind", string (export_kind export.exportKind); - ] - ) - | loc, ExportDefaultDeclaration export -> ExportDefaultDeclaration.( - let declaration = match export.declaration with - | Declaration stmt -> statement stmt - | ExportDefaultDeclaration.Expression expr -> expression expr - in - node "ExportDefaultDeclaration" loc [ - "declaration", declaration; - "exportKind", string (export_kind Statement.ExportValue); + and function_expression + ( loc, + { + Function.id; + params; + async; + generator; + predicate = predicate_; + tparams; + return; + body; + sig_loc = _; + } ) = + let body = + match body with + | Function.BodyBlock b -> b + | Function.BodyExpression _ -> failwith "Unexpected FunctionExpression with BodyExpression" + in + let return = + match return with + | Ast.Type.Missing _ -> None + | Ast.Type.Available t -> Some t + in + node + "FunctionExpression" + loc + [ + ("id", option identifier id); + ("params", function_params params); + ("body", block body); + ("async", bool async); + ("generator", bool generator); + ("predicate", option predicate predicate_); + ("expression", bool false); + ("returnType", option type_annotation return); + ("typeParameters", option type_parameter_declaration tparams); ] - ) - | loc, ImportDeclaration import -> ImportDeclaration.( - let specifiers = match import.specifiers with - | Some (ImportNamedSpecifiers specifiers) -> - List.map (fun {local; remote; kind;} -> - import_named_specifier local remote kind - ) specifiers - | Some (ImportNamespaceSpecifier id) -> - [import_namespace_specifier id] - | None -> - [] - in - - let specifiers = match import.default with - | Some default -> (import_default_specifier default)::specifiers - | None -> specifiers - in - - let import_kind = match import.importKind with - | ImportType -> "type" - | ImportTypeof -> "typeof" - | ImportValue -> "value" - in - - node "ImportDeclaration" loc [ - "specifiers", array specifiers; - "source", string_literal import.source; - "importKind", string (import_kind); + and identifier (loc, { Identifier.name; comments }) = + node + "Identifier" + ?comments + loc + [("name", string name); ("typeAnnotation", null); ("optional", bool false)] + and private_name (loc, name) = node "PrivateName" loc [("id", identifier name)] + and pattern_identifier + loc { Pattern.Identifier.name = (_, { Identifier.name; comments = _ }); annot; optional } = + node + "Identifier" + loc + [ + ("name", string name); + ("typeAnnotation", hint type_annotation annot); + ("optional", bool optional); ] - ) - ) - - and expression = Expression.(function - | loc, This -> node "ThisExpression" loc [] - | loc, Super -> node "Super" loc [] - | loc, Array arr -> - node "ArrayExpression" loc [ - "elements", array_of_list (option expression_or_spread) arr.Array.elements; + and case (loc, { Statement.Switch.Case.test; consequent }) = + node + "SwitchCase" + loc + [("test", option expression test); ("consequent", array_of_list statement consequent)] + and catch (loc, { Statement.Try.CatchClause.param; body }) = + node "CatchClause" loc [("param", option pattern param); ("body", block body)] + and block (loc, { Statement.Block.body }) = + node "BlockStatement" loc [("body", statement_list body)] + and declare_variable (loc, { Statement.DeclareVariable.id; annot }) = + let id_loc = + Loc.btwn + (fst id) + (match annot with + | Ast.Type.Available annot -> fst annot + | Ast.Type.Missing _ -> fst id) + in + node + "DeclareVariable" + loc + [ + ( "id", + pattern_identifier id_loc { Pattern.Identifier.name = id; annot; optional = false } ); ] - | loc, Object _object -> - node "ObjectExpression" loc [ - "properties", array_of_list object_property _object.Object.properties; + and declare_function (loc, { Statement.DeclareFunction.id; annot; predicate = predicate_ }) = + let id_loc = Loc.btwn (fst id) (fst annot) in + node + "DeclareFunction" + loc + [ + ( "id", + pattern_identifier + id_loc + { Pattern.Identifier.name = id; annot = Ast.Type.Available annot; optional = false } + ); + ("predicate", option predicate predicate_); ] - | loc, Function _function -> function_expression (loc, _function) - | loc, ArrowFunction arrow -> Function.( - let body = (match arrow.body with - | BodyBlock b -> block b - | BodyExpression expr -> expression expr) - in - let return = match arrow.return with - | Missing _ -> None - | Available t -> Some t in - node "ArrowFunctionExpression" loc [ - "id", option identifier arrow.id; - "params", function_params arrow.params; - "body", body; - "async", bool arrow.async; - "generator", bool arrow.generator; - "predicate", option predicate arrow.predicate; - "expression", bool arrow.expression; - "returnType", option type_annotation return; - "typeParameters", option type_parameter_declaration arrow.tparams; + and declare_class + (loc, { Statement.DeclareClass.id; tparams; body; extends; implements; mixins }) = + (* TODO: extends shouldn't return an array *) + let extends = + match extends with + | Some extends -> array [interface_extends extends] + | None -> array [] + in + node + "DeclareClass" + loc + [ + ("id", identifier id); + ("typeParameters", option type_parameter_declaration tparams); + ("body", object_type ~include_inexact:false body); + ("extends", extends); + ("implements", array_of_list class_implements implements); + ("mixins", array_of_list interface_extends mixins); ] - ) - | loc, Sequence sequence -> - node "SequenceExpression" loc [ - "expressions", array_of_list expression sequence.Sequence.expressions; + and declare_interface (loc, { Statement.Interface.id; tparams; body; extends }) = + node + "DeclareInterface" + loc + [ + ("id", identifier id); + ("typeParameters", option type_parameter_declaration tparams); + ("body", object_type ~include_inexact:false body); + ("extends", array_of_list interface_extends extends); ] - | loc, Unary unary -> Unary.( - match unary.operator with - | Await -> - (* await is defined as a separate expression in ast-types - * - * TODO - * 1) Send a PR to ast-types - * (https://github.com/benjamn/ast-types/issues/113) - * 2) Output a UnaryExpression - * 3) Modify the esprima test runner to compare AwaitExpression and - * our UnaryExpression - * *) - node "AwaitExpression" loc [ - "argument", expression unary.argument; - ] - | _ -> begin - let operator = match unary.operator with - | Minus -> "-" - | Plus -> "+" - | Not -> "!" - | BitNot -> "~" - | Typeof -> "typeof" - | Void -> "void" - | Delete -> "delete" - | Await -> failwith "matched above" - in - node "UnaryExpression" loc [ - "operator", string operator; - "prefix", bool unary.prefix; - "argument", expression unary.argument; - ] - end - ) - | loc, Binary { Binary.left; operator; right } -> - node "BinaryExpression" loc [ - "operator", string (Ast_utils.string_of_binary_operator operator); - "left", expression left; - "right", expression right; + and export_kind = function + | Statement.ExportType -> "type" + | Statement.ExportValue -> "value" + and export_specifiers = + Statement.ExportNamedDeclaration.( + function + | Some (ExportSpecifiers specifiers) -> array_of_list export_specifier specifiers + | Some (ExportBatchSpecifier (loc, Some name)) -> + array [node "ExportNamespaceSpecifier" loc [("exported", identifier name)]] + | Some (ExportBatchSpecifier (_, None)) -> + (* this should've been handled by callers, since this represents an + ExportAllDeclaration, not a specifier. *) + array [] + | None -> array []) + and declare_type_alias (loc, { Statement.TypeAlias.id; tparams; right }) = + node + "DeclareTypeAlias" + loc + [ + ("id", identifier id); + ("typeParameters", option type_parameter_declaration tparams); + ("right", _type right); ] - | loc, TypeCast typecast -> TypeCast.( - node "TypeCastExpression" loc [ - "expression", expression typecast.expression; - "typeAnnotation", type_annotation typecast.annot; + and type_alias (loc, { Statement.TypeAlias.id; tparams; right }) = + node + "TypeAlias" + loc + [ + ("id", identifier id); + ("typeParameters", option type_parameter_declaration tparams); + ("right", _type right); ] - ) - | loc, Assignment assignment -> Assignment.( - let operator = match assignment.operator with - | Assign -> "=" - | PlusAssign -> "+=" - | MinusAssign -> "-=" - | MultAssign -> "*=" - | ExpAssign -> "**=" - | DivAssign -> "/=" - | ModAssign -> "%=" - | LShiftAssign -> "<<=" - | RShiftAssign -> ">>=" - | RShift3Assign -> ">>>=" - | BitOrAssign -> "|=" - | BitXorAssign -> "^=" - | BitAndAssign -> "&=" - in - node "AssignmentExpression" loc [ - "operator", string operator; - "left", pattern assignment.left; - "right", expression assignment.right; + and opaque_type ~declare (loc, { Statement.OpaqueType.id; tparams; impltype; supertype }) = + let name = + if declare then + "DeclareOpaqueType" + else + "OpaqueType" + in + node + name + loc + [ + ("id", identifier id); + ("typeParameters", option type_parameter_declaration tparams); + ("impltype", option _type impltype); + ("supertype", option _type supertype); ] - ) - | loc, Update update -> Update.( - let operator = match update.operator with - | Increment -> "++" - | Decrement -> "--" - in - node "UpdateExpression" loc [ - "operator", string operator; - "argument", expression update.argument; - "prefix", bool update.prefix; + and class_declaration ast = class_helper "ClassDeclaration" ast + and class_expression ast = class_helper "ClassExpression" ast + and class_helper + node_type (loc, { Class.id; extends; body; tparams; implements; classDecorators }) = + let (super, super_targs) = + match extends with + | Some (_, { Class.Extends.expr; targs }) -> (Some expr, targs) + | None -> (None, None) + in + node + node_type + loc + [ + (* estree hasn't come around to the idea that class decls can have + optional ids, but acorn, babel, espree and esprima all have, so let's + do it too. see https://github.com/estree/estree/issues/98 *) + ("id", option identifier id); + ("body", class_body body); + ("typeParameters", option type_parameter_declaration tparams); + ("superClass", option expression super); + ("superTypeParameters", option type_parameter_instantiation super_targs); + ("implements", array_of_list class_implements implements); + ("decorators", array_of_list class_decorator classDecorators); ] - ) - | loc, Logical logical -> Logical.( - let operator = match logical.operator with - | Or -> "||" - | And -> "&&" - | NullishCoalesce -> "??" - in - node "LogicalExpression" loc [ - "operator", string operator; - "left", expression logical.left; - "right", expression logical.right; + and class_decorator (loc, { Class.Decorator.expression = expr }) = + node "Decorator" loc [("expression", expression expr)] + and class_implements (loc, { Class.Implements.id; targs }) = + node + "ClassImplements" + loc + [("id", identifier id); ("typeParameters", option type_parameter_instantiation targs)] + and class_body (loc, { Class.Body.body }) = + node "ClassBody" loc [("body", array_of_list class_element body)] + and class_element = + Class.Body.( + function + | Method m -> class_method m + | PrivateField p -> class_private_field p + | Property p -> class_property p) + and class_method (loc, { Class.Method.key; value; kind; static; decorators }) = + let (key, computed) = + Expression.Object.Property.( + match key with + | Literal lit -> (literal lit, false) + | Identifier id -> (identifier id, false) + | PrivateName name -> (private_name name, false) + | Computed expr -> (expression expr, true)) + in + let kind = + Class.Method.( + match kind with + | Constructor -> "constructor" + | Method -> "method" + | Get -> "get" + | Set -> "set") + in + node + "MethodDefinition" + loc + [ + ("key", key); + ("value", function_expression value); + ("kind", string kind); + ("static", bool static); + ("computed", bool computed); + ("decorators", array_of_list class_decorator decorators); ] - ) - | loc, Conditional conditional -> Conditional.( - node "ConditionalExpression" loc [ - "test", expression conditional.test; - "consequent", expression conditional.consequent; - "alternate", expression conditional.alternate; + and class_private_field + (loc, { Class.PrivateField.key = (_, key); value; annot; static; variance = variance_ }) = + node + "ClassPrivateProperty" + loc + [ + ("key", identifier key); + ("value", option expression value); + ("typeAnnotation", hint type_annotation annot); + ("static", bool static); + ("variance", option variance variance_); ] - ) - | loc, New _new -> New.( - node "NewExpression" loc [ - "callee", expression _new.callee; - "typeArguments", option type_parameter_instantiation _new.targs; - "arguments", array_of_list expression_or_spread _new.arguments; + and class_property (loc, { Class.Property.key; value; annot; static; variance = variance_ }) = + let (key, computed) = + match key with + | Expression.Object.Property.Literal lit -> (literal lit, false) + | Expression.Object.Property.Identifier id -> (identifier id, false) + | Expression.Object.Property.PrivateName _ -> + failwith "Internal Error: Private name found in class prop" + | Expression.Object.Property.Computed expr -> (expression expr, true) + in + node + "ClassProperty" + loc + [ + ("key", key); + ("value", option expression value); + ("typeAnnotation", hint type_annotation annot); + ("computed", bool computed); + ("static", bool static); + ("variance", option variance variance_); ] - ) - | loc, Call call -> - node "CallExpression" loc (call_node_properties call) - | loc, OptionalCall opt_call -> OptionalCall.( - node "OptionalCallExpression" loc (call_node_properties opt_call.call @ [ - "optional", bool opt_call.optional; - ]) - ) - | loc, Member member -> - node "MemberExpression" loc (member_node_properties member) - | loc, OptionalMember opt_member -> OptionalMember.( - node "OptionalMemberExpression" loc (member_node_properties opt_member.member @ [ - "optional", bool opt_member.optional; - ]) - ) - | loc, Yield yield -> Yield.( - node "YieldExpression" loc [ - "argument", option expression yield.argument; - "delegate", bool yield.delegate; + and enum_declaration (loc, { Statement.EnumDeclaration.id; body }) = + Statement.EnumDeclaration.( + let enum_body = + match body with + | BooleanBody { BooleanBody.members; explicitType } -> + node + "EnumBooleanBody" + loc + [ + ( "members", + array_of_list + (fun (loc, { InitializedMember.id; init = (_, bool_val) }) -> + node "EnumBooleanMember" loc [("id", identifier id); ("init", bool bool_val)]) + members ); + ("explicitType", bool explicitType); + ] + | NumberBody { NumberBody.members; explicitType } -> + node + "EnumNumberBody" + loc + [ + ( "members", + array_of_list + (fun (loc, { InitializedMember.id; init }) -> + node + "EnumNumberMember" + loc + [("id", identifier id); ("init", number_literal init)]) + members ); + ("explicitType", bool explicitType); + ] + | StringBody { StringBody.members; explicitType } -> + let members = + match members with + | StringBody.Defaulted defaulted_members -> + List.map + (fun (loc, { DefaultedMember.id }) -> + node "EnumDefaultedMember" loc [("id", identifier id)]) + defaulted_members + | StringBody.Initialized initialized_members -> + List.map + (fun (loc, { InitializedMember.id; init }) -> + node + "EnumStringMember" + loc + [("id", identifier id); ("init", string_literal init)]) + initialized_members + in + node + "EnumStringBody" + loc + [("members", array members); ("explicitType", bool explicitType)] + | SymbolBody { SymbolBody.members } -> + node + "EnumSymbolBody" + loc + [ + ( "members", + array_of_list + (fun (loc, { DefaultedMember.id }) -> + node "EnumDefaultedMember" loc [("id", identifier id)]) + members ); + ] + in + node "EnumDeclaration" loc [("id", identifier id); ("body", enum_body)]) + and interface_declaration (loc, { Statement.Interface.id; tparams; body; extends }) = + node + "InterfaceDeclaration" + loc + [ + ("id", identifier id); + ("typeParameters", option type_parameter_declaration tparams); + ("body", object_type ~include_inexact:false body); + ("extends", array_of_list interface_extends extends); ] - ) - | loc, Comprehension comp -> Comprehension.( - node "ComprehensionExpression" loc [ - "blocks", array_of_list comprehension_block comp.blocks; - "filter", option expression comp.filter; + and interface_extends (loc, { Type.Generic.id; targs }) = + let id = + match id with + | Type.Generic.Identifier.Unqualified id -> identifier id + | Type.Generic.Identifier.Qualified q -> generic_type_qualified_identifier q + in + node + "InterfaceExtends" + loc + [("id", id); ("typeParameters", option type_parameter_instantiation targs)] + and pattern = + Pattern.( + function + | (loc, Object { Object.properties; annot }) -> + node + "ObjectPattern" + loc + [ + ("properties", array_of_list object_pattern_property properties); + ("typeAnnotation", hint type_annotation annot); + ] + | (loc, Array { Array.elements; annot; comments }) -> + node + ?comments + "ArrayPattern" + loc + [ + ("elements", array_of_list (option array_pattern_element) elements); + ("typeAnnotation", hint type_annotation annot); + ] + | (loc, Identifier pattern_id) -> pattern_identifier loc pattern_id + | (_loc, Expression expr) -> expression expr) + and function_param (loc, { Ast.Function.Param.argument; default }) = + match default with + | Some default -> + node "AssignmentPattern" loc [("left", pattern argument); ("right", expression default)] + | None -> pattern argument + and function_params = + Ast.Function.Params.( + function + | (_, { params; rest = Some (rest_loc, { Function.RestParam.argument }) }) -> + let rest = node "RestElement" rest_loc [("argument", pattern argument)] in + let rev_params = List.rev_map function_param params in + let params = List.rev (rest :: rev_params) in + array params + | (_, { params; rest = None }) -> array_of_list function_param params) + and array_pattern_element = + Pattern.Array.( + function + | Element (loc, { Element.argument; default = Some default }) -> + node "AssignmentPattern" loc [("left", pattern argument); ("right", expression default)] + | Element (_loc, { Element.argument; default = None }) -> pattern argument + | RestElement (loc, { RestElement.argument }) -> + node "RestElement" loc [("argument", pattern argument)]) + and object_property = + Expression.Object.( + function + | Property (loc, prop) -> + Property.( + let (key, value, kind, method_, shorthand) = + match prop with + | Init { key; value; shorthand } -> (key, expression value, "init", false, shorthand) + | Method { key; value = (loc, func) } -> + (key, function_expression (loc, func), "init", true, false) + | Get { key; value = (loc, func) } -> + (key, function_expression (loc, func), "get", false, false) + | Set { key; value = (loc, func) } -> + (key, function_expression (loc, func), "set", false, false) + in + let (key, computed) = + match key with + | Literal lit -> (literal lit, false) + | Identifier id -> (identifier id, false) + | PrivateName _ -> failwith "Internal Error: Found private field in object props" + | Computed expr -> (expression expr, true) + in + node + "Property" + loc + [ + ("key", key); + ("value", value); + ("kind", string kind); + ("method", bool method_); + ("shorthand", bool shorthand); + ("computed", bool computed); + ]) + | SpreadProperty (loc, prop) -> + SpreadProperty.(node "SpreadProperty" loc [("argument", expression prop.argument)])) + and object_pattern_property = + Pattern.Object.( + function + | Property (loc, { Property.key; pattern = patt; default; shorthand }) -> + let (key, computed) = + match key with + | Property.Literal lit -> (literal lit, false) + | Property.Identifier id -> (identifier id, false) + | Property.Computed expr -> (expression expr, true) + in + let value = + match default with + | Some default -> + let loc = Loc.btwn (fst patt) (fst default) in + node "AssignmentPattern" loc [("left", pattern patt); ("right", expression default)] + | None -> pattern patt + in + node + "Property" + loc + [ + ("key", key); + ("value", value); + ("kind", string "init"); + ("method", bool false); + ("shorthand", bool shorthand); + ("computed", bool computed); + ] + | RestProperty (loc, { RestProperty.argument }) -> + node "RestProperty" loc [("argument", pattern argument)]) + and expression_or_spread = + Expression.( + function + | Expression expr -> expression expr + | Spread (loc, { SpreadElement.argument }) -> + node "SpreadElement" loc [("argument", expression argument)]) + and comprehension_block (loc, { Expression.Comprehension.Block.left; right; each }) = + node + "ComprehensionBlock" + loc + [("left", pattern left); ("right", expression right); ("each", bool each)] + and literal (loc, { Literal.value; raw; comments }) = + let value_ = + match value with + | Literal.String str -> string str + | Literal.Boolean b -> bool b + | Literal.Null -> null + | Literal.Number f -> number f + | Literal.BigInt _ -> failwith "We should not create Literal nodes for bigints" + | Literal.RegExp { Literal.RegExp.pattern; flags } -> regexp loc pattern flags + in + let props = + match value with + | Literal.RegExp { Literal.RegExp.pattern; flags } -> + let regex = obj [("pattern", string pattern); ("flags", string flags)] in + [("value", value_); ("raw", string raw); ("regex", regex)] + | _ -> [("value", value_); ("raw", string raw)] + in + node ?comments "Literal" loc props + and number_literal (loc, { NumberLiteral.value; raw }) = + node "Literal" loc [("value", number value); ("raw", string raw)] + and bigint_literal (loc, { Literal.raw; _ }) = + node "BigIntLiteral" loc [("value", null); ("bigint", string raw)] + and string_literal (loc, { StringLiteral.value; raw }) = + node "Literal" loc [("value", string value); ("raw", string raw)] + and template_literal (loc, { Expression.TemplateLiteral.quasis; expressions }) = + node + "TemplateLiteral" + loc + [ + ("quasis", array_of_list template_element quasis); + ("expressions", array_of_list expression expressions); ] - ) - | loc, Generator gen -> Generator.( - node "GeneratorExpression" loc [ - "blocks", array_of_list comprehension_block gen.blocks; - "filter", option expression gen.filter; + and template_element + ( loc, + { + Expression.TemplateLiteral.Element.value = + { Expression.TemplateLiteral.Element.raw; cooked }; + tail; + } ) = + let value = obj [("raw", string raw); ("cooked", string cooked)] in + node "TemplateElement" loc [("value", value); ("tail", bool tail)] + and tagged_template (loc, { Expression.TaggedTemplate.tag; quasi }) = + node + "TaggedTemplateExpression" + loc + [("tag", expression tag); ("quasi", template_literal quasi)] + and variable_declaration (loc, { Statement.VariableDeclaration.kind; declarations }) = + let kind = + match kind with + | Statement.VariableDeclaration.Var -> "var" + | Statement.VariableDeclaration.Let -> "let" + | Statement.VariableDeclaration.Const -> "const" + in + node + "VariableDeclaration" + loc + [("declarations", array_of_list variable_declarator declarations); ("kind", string kind)] + and variable_declarator (loc, { Statement.VariableDeclaration.Declarator.id; init }) = + node "VariableDeclarator" loc [("id", pattern id); ("init", option expression init)] + and variance (loc, sigil) = + let kind = + Variance.( + match sigil with + | Plus -> string "plus" + | Minus -> string "minus") + in + node "Variance" loc [("kind", kind)] + and _type (loc, t) = + Type.( + match t with + | Any -> any_type loc + | Mixed -> mixed_type loc + | Empty -> empty_type loc + | Void -> void_type loc + | Null -> null_type loc + | Number -> number_type loc + | BigInt -> bigint_type loc + | String -> string_type loc + | Boolean -> boolean_type loc + | Nullable t -> nullable_type loc t + | Function fn -> function_type (loc, fn) + | Object o -> object_type ~include_inexact:true (loc, o) + | Interface i -> interface_type (loc, i) + | Array t -> array_type loc t + | Generic g -> generic_type (loc, g) + | Union (t0, t1, ts) -> union_type (loc, t0 :: t1 :: ts) + | Intersection (t0, t1, ts) -> intersection_type (loc, t0 :: t1 :: ts) + | Typeof t -> typeof_type (loc, t) + | Tuple t -> tuple_type (loc, t) + | StringLiteral s -> string_literal_type (loc, s) + | NumberLiteral n -> number_literal_type (loc, n) + | BigIntLiteral n -> bigint_literal_type (loc, n) + | BooleanLiteral b -> boolean_literal_type (loc, b) + | Exists -> exists_type loc) + and implicit loc = + generic_type + ( loc, + { + Type.Generic.id = + Type.Generic.Identifier.Unqualified (Flow_ast_utils.ident_of_source (loc, "_")); + targs = None; + } ) + and explicit_or_implicit_targ x = + match x with + | Expression.TypeParameterInstantiation.Explicit t -> _type t + | Expression.TypeParameterInstantiation.Implicit loc -> implicit loc + and any_type loc = node "AnyTypeAnnotation" loc [] + and mixed_type loc = node "MixedTypeAnnotation" loc [] + and empty_type loc = node "EmptyTypeAnnotation" loc [] + and void_type loc = node "VoidTypeAnnotation" loc [] + and null_type loc = node "NullLiteralTypeAnnotation" loc [] + and number_type loc = node "NumberTypeAnnotation" loc [] + and bigint_type loc = node "BigIntTypeAnnotation" loc [] + and string_type loc = node "StringTypeAnnotation" loc [] + and boolean_type loc = node "BooleanTypeAnnotation" loc [] + and nullable_type loc t = node "NullableTypeAnnotation" loc [("typeAnnotation", _type t)] + and function_type + ( loc, + { Type.Function.params = (_, { Type.Function.Params.params; rest }); return; tparams } ) + = + node + "FunctionTypeAnnotation" + loc + [ + ("params", array_of_list function_type_param params); + ("returnType", _type return); + ("rest", option function_type_rest rest); + ("typeParameters", option type_parameter_declaration tparams); ] - ) - | _loc, Identifier id -> identifier id - | loc, Literal lit -> literal (loc, lit) - | loc, TemplateLiteral lit -> template_literal (loc, lit) - | loc, TaggedTemplate tagged -> tagged_template (loc, tagged) - | loc, Class c -> class_expression (loc, c) - | loc, JSXElement element -> jsx_element (loc, element) - | loc, JSXFragment fragment -> jsx_fragment (loc, fragment) - | loc, MetaProperty meta_prop -> MetaProperty.( - node "MetaProperty" loc [ - "meta", identifier meta_prop.meta; - "property", identifier meta_prop.property; + and function_type_param (loc, { Type.Function.Param.name; annot; optional }) = + node + "FunctionTypeParam" + loc + [ + ("name", option identifier name); + ("typeAnnotation", _type annot); + ("optional", bool optional); ] - ) - | loc, Import arg -> node "CallExpression" loc [ - "callee", node "Import" (Loc.btwn loc (fst arg)) []; - "arguments", array_of_list expression [arg]; - ] - ) - - and function_declaration (loc, fn) = Function.( - let body = match fn.body with - | BodyBlock b -> block b - | BodyExpression b -> expression b in - let return = match fn.return with - | Missing _ -> None - | Available t -> Some t in - node "FunctionDeclaration" loc [ - (* estree hasn't come around to the idea that function decls can have - optional ids, but acorn, babel, espree and esprima all have, so let's - do it too. see https://github.com/estree/estree/issues/98 *) - "id", option identifier fn.id; - "params", function_params fn.params; - "body", body; - "async", bool fn.async; - "generator", bool fn.generator; - "predicate", option predicate fn.predicate; - "expression", bool fn.expression; - "returnType", option type_annotation return; - "typeParameters", option type_parameter_declaration fn.tparams; - ] - ) - - and function_expression (loc, _function) = Function.( - let body = match _function.body with - | BodyBlock b -> block b - | BodyExpression expr -> expression expr - in - let return = match _function.return with - | Missing _ -> None - | Available t -> Some t in - node "FunctionExpression" loc [ - "id", option identifier _function.id; - "params", function_params _function.params; - "body", body; - "async", bool _function.async; - "generator", bool _function.generator; - "predicate", option predicate _function.predicate; - "expression", bool _function.expression; - "returnType", option type_annotation return; - "typeParameters", option type_parameter_declaration _function.tparams; - ] - ) - - and identifier (loc, name) = - node "Identifier" loc [ - "name", string name; - "typeAnnotation", null; - "optional", bool false; - ] - - and private_name (loc, name) = - node "PrivateName" loc [ - "id", identifier name; - ] - - and pattern_identifier loc { - Pattern.Identifier.name; annot; optional; - } = - node "Identifier" loc [ - "name", string (snd name); - "typeAnnotation", option type_annotation annot; - "optional", bool optional; - ] - - and case (loc, c) = Statement.Switch.Case.( - node "SwitchCase" loc [ - "test", option expression c.test; - "consequent", array_of_list statement c.consequent; - ] - ) - - and catch (loc, c) = Statement.Try.CatchClause.( - node "CatchClause" loc [ - "param", option pattern c.param; - "body", block c.body; - ] - ) - - and block (loc, b) = - node "BlockStatement" loc [ - "body", statement_list b.Statement.Block.body; - ] - - and declare_variable (loc, d) = Statement.DeclareVariable.( - let id_loc = Loc.btwn (fst d.id) (match d.annot with - | Some annot -> fst annot - | None -> fst d.id) in - node "DeclareVariable" loc [ - "id", pattern_identifier id_loc { - Pattern.Identifier.name = d.id; - annot = d.annot; - optional = false; - }; - ] - ) - - and declare_function (loc, d) = Statement.DeclareFunction.( - let id_loc = Loc.btwn (fst d.id) (fst d.annot) in - node "DeclareFunction" loc [ - "id", pattern_identifier id_loc { - Pattern.Identifier.name = d.id; - annot = Some d.annot; - optional = false; - }; - "predicate", option predicate d.predicate - ] - ) - - and declare_class (loc, { Statement.DeclareClass. - id; - tparams; - body; - extends; - implements; - mixins; - }) = - (* TODO: extends shouldn't return an array *) - let extends = match extends with - | Some extends -> array [interface_extends extends] - | None -> array [] - in - node "DeclareClass" loc [ - "id", identifier id; - "typeParameters", option type_parameter_declaration tparams; - "body", object_type body; - "extends", extends; - "implements", array_of_list class_implements implements; - "mixins", array_of_list interface_extends mixins; - ] - - and declare_interface (loc, { Statement.Interface. - id; - tparams; - body; - extends; - }) = - node "DeclareInterface" loc [ - "id", identifier id; - "typeParameters", option type_parameter_declaration tparams; - "body", object_type body; - "extends", array_of_list interface_extends extends; - ] - - and export_kind = function - | Statement.ExportType -> "type" - | Statement.ExportValue -> "value" - - and export_specifiers = Statement.ExportNamedDeclaration.(function - | Some (ExportSpecifiers specifiers) -> - array_of_list export_specifier specifiers - | Some (ExportBatchSpecifier (loc, Some name)) -> - array [ - node "ExportNamespaceSpecifier" loc [ - "exported", identifier name + and function_type_rest (_loc, { Type.Function.RestParam.argument }) = + (* TODO: add a node for the rest param itself, including the `...`, + like we do with RestElement on normal functions. This should be + coordinated with Babel, ast-types, etc. so keeping the status quo for + now. Here's an example: *) + (* node "FunctionTypeRestParam" loc [ + "argument", function_type_param argument; + ] *) + function_type_param argument + and object_type ~include_inexact (loc, { Type.Object.properties; exact; inexact }) = + Type.Object.( + let (props, ixs, calls, slots) = + List.fold_left + (fun (props, ixs, calls, slots) -> function + | Property p -> + let prop = object_type_property p in + (prop :: props, ixs, calls, slots) + | SpreadProperty p -> + let prop = object_type_spread_property p in + (prop :: props, ixs, calls, slots) + | Indexer i -> + let ix = object_type_indexer i in + (props, ix :: ixs, calls, slots) + | CallProperty c -> + let call = object_type_call_property c in + (props, ixs, call :: calls, slots) + | InternalSlot s -> + let slot = object_type_internal_slot s in + (props, ixs, calls, slot :: slots)) + ([], [], [], []) + properties + in + let fields = + [ + ("exact", bool exact); + ("properties", array (List.rev props)); + ("indexers", array (List.rev ixs)); + ("callProperties", array (List.rev calls)); + ("internalSlots", array (List.rev slots)); ] + in + let fields = + if include_inexact then + ("inexact", bool inexact) :: fields + else + fields + in + node "ObjectTypeAnnotation" loc fields) + and object_type_property + ( loc, + { + Type.Object.Property.key; + value; + optional; + static; + proto; + variance = variance_; + _method; + } ) = + let key = + match key with + | Expression.Object.Property.Literal lit -> literal lit + | Expression.Object.Property.Identifier id -> identifier id + | Expression.Object.Property.PrivateName _ -> + failwith "Internal Error: Found private field in object props" + | Expression.Object.Property.Computed _ -> + failwith "There should not be computed object type property keys" + in + let (value, kind) = + match value with + | Type.Object.Property.Init value -> (_type value, "init") + | Type.Object.Property.Get (loc, f) -> (function_type (loc, f), "get") + | Type.Object.Property.Set (loc, f) -> (function_type (loc, f), "set") + in + node + "ObjectTypeProperty" + loc + [ + ("key", key); + ("value", value); + ("method", bool _method); + ("optional", bool optional); + ("static", bool static); + ("proto", bool proto); + ("variance", option variance variance_); + ("kind", string kind); ] - | Some (ExportBatchSpecifier (_, None)) -> - (* this should've been handled by callers, since this represents an - ExportAllDeclaration, not a specifier. *) - array [] - | None -> - array [] - ) - - and declare_type_alias (loc, { Statement.TypeAlias. - id; - tparams; - right; - }) = - node "DeclareTypeAlias" loc [ - "id", identifier id; - "typeParameters", option type_parameter_declaration tparams; - "right", _type right; - ] - - and type_alias (loc, alias) = Statement.TypeAlias.( - node "TypeAlias" loc [ - "id", identifier alias.id; - "typeParameters", option type_parameter_declaration alias.tparams; - "right", _type alias.right; - ] - ) - and opaque_type ~declare (loc, opaque_t) = Statement.OpaqueType.( - let name = if declare then "DeclareOpaqueType" else "OpaqueType" in - node name loc [ - "id", identifier opaque_t.id; - "typeParameters", option type_parameter_declaration opaque_t.tparams; - "impltype", option _type opaque_t.impltype; - "supertype", option _type opaque_t.supertype; - ] - ) - - and class_declaration ast = class_helper "ClassDeclaration" ast - - and class_expression ast = class_helper "ClassExpression" ast - - and class_helper node_type (loc, c) = Class.( - let super, super_targs = match c.extends with - | Some (_, { Extends.expr; targs }) -> Some expr, targs - | None -> None, None - in - node node_type loc [ - (* estree hasn't come around to the idea that class decls can have - optional ids, but acorn, babel, espree and esprima all have, so let's - do it too. see https://github.com/estree/estree/issues/98 *) - "id", option identifier c.id; - "body", class_body c.body; - "typeParameters", option type_parameter_declaration c.tparams; - "superClass", option expression super; - "superTypeParameters", option type_parameter_instantiation super_targs; - "implements", array_of_list class_implements c.implements; - "decorators", array_of_list class_decorator c.classDecorators; - ] - ) - - and class_decorator (loc, { Ast.Class.Decorator.expression = expr }) = - node "Decorator" loc [ - "expression", expression expr; - ] - - and class_implements (loc, implements) = Class.Implements.( - node "ClassImplements" loc [ - "id", identifier implements.id; - "typeParameters", option type_parameter_instantiation implements.targs; - ] - ) - - and class_body (loc, body) = Class.Body.( - node "ClassBody" loc [ - "body", array_of_list class_element body.body; - ] - ) - - and class_element = Class.Body.(function - | Method m -> class_method m - | PrivateField p -> class_private_field p - | Property p -> class_property p) - - and class_method (loc, method_) = - let { Class.Method.key; value; kind; static; decorators; } = method_ in - let key, computed = Expression.Object.Property.(match key with - | Literal lit -> literal lit, false - | Identifier id -> identifier id, false - | PrivateName name -> private_name name, false - | Computed expr -> expression expr, true) in - let kind = Class.Method.(match kind with - | Constructor -> "constructor" - | Method -> "method" - | Get -> "get" - | Set -> "set") in - node "MethodDefinition" loc [ - "key", key; - "value", function_expression value; - "kind", string kind; - "static", bool static; - "computed", bool computed; - "decorators", array_of_list class_decorator decorators; - ] - - and class_private_field (loc, prop) = Class.PrivateField.( - let (_, key) = prop.key in - node "ClassPrivateProperty" loc [ - "key", identifier key; - "value", option expression prop.value; - "typeAnnotation", option type_annotation prop.annot; - "static", bool prop.static; - "variance", option variance prop.variance; - ] - ) - and class_property (loc, prop) = Class.Property.( - let key, computed = (match prop.key with - | Expression.Object.Property.Literal lit -> literal lit, false - | Expression.Object.Property.Identifier id -> identifier id, false - | Expression.Object.Property.PrivateName _ -> - failwith "Internal Error: Private name found in class prop" - | Expression.Object.Property.Computed expr -> expression expr, true) in - node "ClassProperty" loc [ - "key", key; - "value", option expression prop.value; - "typeAnnotation", option type_annotation prop.annot; - "computed", bool computed; - "static", bool prop.static; - "variance", option variance prop.variance; - ] - ) - - and interface_declaration (loc, i) = Statement.Interface.( - node "InterfaceDeclaration" loc [ - "id", identifier i.id; - "typeParameters", option type_parameter_declaration i.tparams; - "body", object_type i.body; - "extends", array_of_list interface_extends i.extends; - ] - ) - - and interface_extends (loc, g) = Type.Generic.( - let id = match g.id with - | Identifier.Unqualified id -> identifier id - | Identifier.Qualified q -> generic_type_qualified_identifier q - in - node "InterfaceExtends" loc [ - "id", id; - "typeParameters", option type_parameter_instantiation g.targs; - ] - ) - - and pattern = Pattern.(function - | loc, Object obj -> - node "ObjectPattern" loc [ - "properties", array_of_list object_pattern_property obj.Object.properties; - "typeAnnotation", option type_annotation obj.Object.annot; + and object_type_spread_property (loc, { Type.Object.SpreadProperty.argument }) = + node "ObjectTypeSpreadProperty" loc [("argument", _type argument)] + and object_type_indexer + (loc, { Type.Object.Indexer.id; key; value; static; variance = variance_ }) = + node + "ObjectTypeIndexer" + loc + [ + ("id", option identifier id); + ("key", _type key); + ("value", _type value); + ("static", bool static); + ("variance", option variance variance_); ] - | loc, Array arr -> - node "ArrayPattern" loc [ - "elements", array_of_list (option array_pattern_element) arr.Array.elements; - "typeAnnotation", option type_annotation arr.Array.annot; + and object_type_call_property (loc, { Type.Object.CallProperty.value; static }) = + node "ObjectTypeCallProperty" loc [("value", function_type value); ("static", bool static)] + and object_type_internal_slot + (loc, { Type.Object.InternalSlot.id; optional; static; _method; value }) = + node + "ObjectTypeInternalSlot" + loc + [ + ("id", identifier id); + ("optional", bool optional); + ("static", bool static); + ("method", bool _method); + ("value", _type value); ] - | loc, Assignment { Assignment.left; right } -> - node "AssignmentPattern" loc [ - "left", pattern left; - "right", expression right + and interface_type (loc, { Type.Interface.extends; body }) = + node + "InterfaceTypeAnnotation" + loc + [ + ("extends", array_of_list interface_extends extends); + ("body", object_type ~include_inexact:false body); ] - | loc, Identifier pattern_id -> - pattern_identifier loc pattern_id - | _loc, Expression expr -> expression expr) - - and function_params = Ast.Function.Params.(function - | _, { params; rest = Some (rest_loc, { Function.RestElement.argument }) } -> - let rest = node "RestElement" rest_loc [ - "argument", pattern argument; - ] in - let rev_params = params |> List.map pattern |> List.rev in - let params = List.rev (rest::rev_params) in - array params - | _, { params; rest = None } -> - array_of_list pattern params - ) - - and array_pattern_element = Pattern.Array.(function - | Element p -> pattern p - | RestElement (loc, { RestElement.argument; }) -> - node "RestElement" loc [ - "argument", pattern argument; + and array_type loc t = node "ArrayTypeAnnotation" loc [("elementType", _type t)] + and generic_type_qualified_identifier (loc, { Type.Generic.Identifier.id; qualification }) = + let qualification = + match qualification with + | Type.Generic.Identifier.Unqualified id -> identifier id + | Type.Generic.Identifier.Qualified q -> generic_type_qualified_identifier q + in + node "QualifiedTypeIdentifier" loc [("qualification", qualification); ("id", identifier id)] + and generic_type (loc, { Type.Generic.id; targs }) = + let id = + match id with + | Type.Generic.Identifier.Unqualified id -> identifier id + | Type.Generic.Identifier.Qualified q -> generic_type_qualified_identifier q + in + node + "GenericTypeAnnotation" + loc + [("id", id); ("typeParameters", option type_parameter_instantiation targs)] + and union_type (loc, ts) = node "UnionTypeAnnotation" loc [("types", array_of_list _type ts)] + and intersection_type (loc, ts) = + node "IntersectionTypeAnnotation" loc [("types", array_of_list _type ts)] + and typeof_type (loc, t) = node "TypeofTypeAnnotation" loc [("argument", _type t)] + and tuple_type (loc, tl) = node "TupleTypeAnnotation" loc [("types", array_of_list _type tl)] + and string_literal_type (loc, { Ast.StringLiteral.value; raw }) = + node "StringLiteralTypeAnnotation" loc [("value", string value); ("raw", string raw)] + and number_literal_type (loc, { Ast.NumberLiteral.value; raw }) = + node "NumberLiteralTypeAnnotation" loc [("value", number value); ("raw", string raw)] + and bigint_literal_type (loc, { Ast.BigIntLiteral.bigint; _ }) = + let raw = bigint in + node "BigIntLiteralTypeAnnotation" loc [("value", null); ("raw", string raw)] + and boolean_literal_type (loc, value) = + node + "BooleanLiteralTypeAnnotation" + loc + [ + ("value", bool value); + ( "raw", + string + ( if value then + "true" + else + "false" ) ); ] - ) - - and object_property = Expression.Object.(function - | Property (loc, prop) -> Property.( - let key, value, kind, method_, shorthand = match prop with - | Init { key; value; shorthand } -> - key, expression value, "init", false, shorthand - | Method { key; value = (loc, func) } -> - key, function_expression (loc, func), "init", true, false - | Get { key; value = (loc, func) } -> - key, function_expression (loc, func), "get", false, false - | Set { key; value = (loc, func) } -> - key, function_expression (loc, func), "set", false, false + and exists_type loc = node "ExistsTypeAnnotation" loc [] + and type_annotation (loc, ty) = node "TypeAnnotation" loc [("typeAnnotation", _type ty)] + and type_parameter_declaration (loc, params) = + node "TypeParameterDeclaration" loc [("params", array_of_list type_param params)] + and type_param + ( loc, + { + Type.ParameterDeclaration.TypeParam.name = (_, { Identifier.name; comments = _ }); + bound; + variance = tp_var; + default; + } ) = + node + "TypeParameter" + loc + [ + (* we track the location of the name, but don't expose it here for + backwards-compatibility. TODO: change this? *) + ("name", string name); + ("bound", hint type_annotation bound); + ("variance", option variance tp_var); + ("default", option _type default); + ] + and type_parameter_instantiation (loc, targs) = + node "TypeParameterInstantiation" loc [("params", array_of_list _type targs)] + and type_parameter_instantiation_with_implicit (loc, targs) = + node + "TypeParameterInstantiation" + loc + [("params", array_of_list explicit_or_implicit_targ targs)] + and jsx_element (loc, { JSX.openingElement; closingElement; children = (_loc, children) }) = + node + "JSXElement" + loc + [ + ("openingElement", jsx_opening openingElement); + ("closingElement", option jsx_closing closingElement); + ("children", array_of_list jsx_child children); + ] + and jsx_fragment + ( loc, + { JSX.frag_openingElement; frag_closingElement; frag_children = (_loc, frag_children) } + ) = + node + "JSXFragment" + loc + [ + ("openingFragment", jsx_opening_fragment frag_openingElement); + ("children", array_of_list jsx_child frag_children); + ("closingFragment", jsx_closing_fragment frag_closingElement); + ] + and jsx_opening (loc, { JSX.Opening.name; attributes; selfClosing }) = + node + "JSXOpeningElement" + loc + [ + ("name", jsx_name name); + ("attributes", array_of_list jsx_opening_attribute attributes); + ("selfClosing", bool selfClosing); + ] + and jsx_opening_fragment loc = node "JSXOpeningFragment" loc [] + and jsx_opening_attribute = + JSX.Opening.( + function + | Attribute attribute -> jsx_attribute attribute + | SpreadAttribute attribute -> jsx_spread_attribute attribute) + and jsx_closing (loc, { JSX.Closing.name }) = + node "JSXClosingElement" loc [("name", jsx_name name)] + and jsx_closing_fragment loc = node "JSXClosingFragment" loc [] + and jsx_child = + JSX.( + function + | (loc, Element element) -> jsx_element (loc, element) + | (loc, Fragment fragment) -> jsx_fragment (loc, fragment) + | (loc, ExpressionContainer expr) -> jsx_expression_container (loc, expr) + | (loc, SpreadChild expr) -> node "JSXSpreadChild" loc [("expression", expression expr)] + | (loc, Text str) -> jsx_text (loc, str)) + and jsx_name = + JSX.( + function + | Identifier id -> jsx_identifier id + | NamespacedName namespaced_name -> jsx_namespaced_name namespaced_name + | MemberExpression member -> jsx_member_expression member) + and jsx_attribute (loc, { JSX.Attribute.name; value }) = + let name = + match name with + | JSX.Attribute.Identifier id -> jsx_identifier id + | JSX.Attribute.NamespacedName namespaced_name -> jsx_namespaced_name namespaced_name in - let key, computed = match key with - | Literal lit -> literal lit, false - | Identifier id -> identifier id, false - | PrivateName _ -> failwith "Internal Error: Found private field in object props" - | Computed expr -> expression expr, true + node "JSXAttribute" loc [("name", name); ("value", option jsx_attribute_value value)] + and jsx_attribute_value = + JSX.Attribute.( + function + | Literal (loc, value) -> literal (loc, value) + | ExpressionContainer (loc, expr) -> jsx_expression_container (loc, expr)) + and jsx_spread_attribute (loc, { JSX.SpreadAttribute.argument }) = + node "JSXSpreadAttribute" loc [("argument", expression argument)] + and jsx_expression_container (loc, { JSX.ExpressionContainer.expression = expr }) = + let expression = + match expr with + | JSX.ExpressionContainer.Expression expr -> expression expr + | JSX.ExpressionContainer.EmptyExpression -> + let empty_loc = + Loc. + { + loc with + start = { loc.start with column = loc.start.column + 1 }; + _end = { loc._end with column = loc._end.column - 1 }; + } + in + node "JSXEmptyExpression" empty_loc [] in - node "Property" loc [ - "key", key; - "value", value; - "kind", string kind; - "method", bool method_; - "shorthand", bool shorthand; - "computed", bool computed; - ] - ) - | SpreadProperty(loc, prop) -> SpreadProperty.( - node "SpreadProperty" loc [ - "argument", expression prop.argument; - ] - )) - - and object_pattern_property = Pattern.Object.(function - | Property (loc, prop) -> Property.( - let key, computed = (match prop.key with - | Literal lit -> literal lit, false - | Identifier id -> identifier id, false - | Computed expr -> expression expr, true) in - node "Property" loc [ - "key", key; - "value", pattern prop.pattern; - "kind", string "init"; - "method", bool false; - "shorthand", bool prop.shorthand; - "computed", bool computed; - ] - ) - | RestProperty (loc, prop) -> RestProperty.( - node "RestProperty" loc [ - "argument", pattern prop.argument; - ] - ) - ) - - and expression_or_spread = Expression.(function - | Expression expr -> expression expr - | Spread (loc, { SpreadElement.argument; }) -> - node "SpreadElement" loc [ - "argument", expression argument; + node "JSXExpressionContainer" loc [("expression", expression)] + and jsx_text (loc, { JSX.Text.value; raw }) = + node "JSXText" loc [("value", string value); ("raw", string raw)] + and jsx_member_expression (loc, { JSX.MemberExpression._object; property }) = + let _object = + match _object with + | JSX.MemberExpression.Identifier id -> jsx_identifier id + | JSX.MemberExpression.MemberExpression member -> jsx_member_expression member + in + node "JSXMemberExpression" loc [("object", _object); ("property", jsx_identifier property)] + and jsx_namespaced_name (loc, { JSX.NamespacedName.namespace; name }) = + node + "JSXNamespacedName" + loc + [("namespace", jsx_identifier namespace); ("name", jsx_identifier name)] + and jsx_identifier (loc, { JSX.Identifier.name }) = + node "JSXIdentifier" loc [("name", string name)] + and export_specifier (loc, { Statement.ExportNamedDeclaration.ExportSpecifier.exported; local }) + = + let exported = + match exported with + | Some exported -> identifier exported + | None -> identifier local + in + node "ExportSpecifier" loc [("local", identifier local); ("exported", exported)] + and import_default_specifier id = + node "ImportDefaultSpecifier" (fst id) [("local", identifier id)] + and import_namespace_specifier (loc, id) = + node "ImportNamespaceSpecifier" loc [("local", identifier id)] + and import_named_specifier local_id remote_id kind = + let span_loc = + match local_id with + | Some local_id -> Loc.btwn (fst remote_id) (fst local_id) + | None -> fst remote_id + in + let local_id = + match local_id with + | Some id -> id + | None -> remote_id + in + node + "ImportSpecifier" + span_loc + [ + ("imported", identifier remote_id); + ("local", identifier local_id); + ( "importKind", + match kind with + | Some Statement.ImportDeclaration.ImportType -> string "type" + | Some Statement.ImportDeclaration.ImportTypeof -> string "typeof" + | Some Statement.ImportDeclaration.ImportValue + | None -> + null ); ] - ) - - and comprehension_block (loc, b) = Expression.Comprehension.Block.( - node "ComprehensionBlock" loc [ - "left", pattern b.left; - "right", expression b.right; - "each", bool b.each; - ] - ) - - and literal (loc, lit) = Literal.( - let { value; raw; } = lit in - let value_ = match value with - | String str -> string str - | Boolean b -> bool b - | Null -> null - | Number f -> number f - | RegExp { RegExp.pattern; flags; } -> regexp loc pattern flags - in - let props = match value with - | RegExp { RegExp.pattern; flags; } -> - let regex = obj [ - "pattern", string pattern; - "flags", string flags; - ] in - [ "value", value_; "raw", string raw; "regex", regex ] - | _ -> - [ "value", value_; "raw", string raw; ] - in - node "Literal" loc props - ) - - and string_literal (loc, lit) = StringLiteral.( - node "Literal" loc [ - "value", string lit.value; - "raw", string lit.raw; - ] - ) - - and template_literal (loc, value) = Expression.TemplateLiteral.( - node "TemplateLiteral" loc [ - "quasis", array_of_list template_element value.quasis; - "expressions", array_of_list expression value.expressions; - ] - ) - - and template_element (loc, element) = Expression.TemplateLiteral.Element.( - let value = obj [ - "raw", string element.value.raw; - "cooked", string element.value.cooked; - ] in - node "TemplateElement" loc [ - "value", value; - "tail", bool element.tail; - ] - ) - - and tagged_template (loc, tagged) = Expression.TaggedTemplate.( - node "TaggedTemplateExpression" loc [ - "tag", expression tagged.tag; - "quasi", template_literal tagged.quasi; - ] - ) - - and variable_declaration (loc, var) = Statement.VariableDeclaration.( - let kind = match var.kind with - | Var -> "var" - | Let -> "let" - | Const -> "const" - in - node "VariableDeclaration" loc [ - "declarations", array_of_list variable_declarator var.declarations; - "kind", string kind; - ] - ) - - and variable_declarator (loc, declarator) = - Statement.VariableDeclaration.Declarator.( - node "VariableDeclarator" loc [ - "id", pattern declarator.id; - "init", option expression declarator.init; + and comment_list comments = array_of_list comment comments + and comment (loc, c) = + Comment.( + let (_type, value) = + match c with + | Line s -> ("Line", s) + | Block s -> ("Block", s) + in + node _type loc [("value", string value)]) + and predicate (loc, p) = + Ast.Type.Predicate.( + let (_type, value) = + match p with + | Declared e -> ("DeclaredPredicate", [("value", expression e)]) + | Inferred -> ("InferredPredicate", []) + in + node _type loc value) + and call_node_properties { Expression.Call.callee; targs; arguments } = + [ + ("callee", expression callee); + ("typeArguments", option type_parameter_instantiation_with_implicit targs); + ("arguments", array_of_list expression_or_spread arguments); ] - ) - - and variance (loc, sigil) = - let kind = Variance.(match sigil with - | Plus -> string "plus" - | Minus -> string "minus" - ) in - node "Variance" loc [ "kind", kind ] - - and _type (loc, t) = Type.( - match t with - | Any -> any_type loc - | Mixed -> mixed_type loc - | Empty -> empty_type loc - | Void -> void_type loc - | Null -> null_type loc - | Number -> number_type loc - | String -> string_type loc - | Boolean -> boolean_type loc - | Nullable t -> nullable_type loc t - | Function fn -> function_type (loc, fn) - | Object o -> object_type (loc, o) - | Interface i -> interface_type (loc, i) - | Array t -> array_type loc t - | Generic g -> generic_type (loc, g) - | Union (t0, t1, ts) -> union_type (loc, t0::t1::ts) - | Intersection (t0, t1, ts) -> intersection_type (loc, t0::t1::ts) - | Typeof t -> typeof_type (loc, t) - | Tuple t -> tuple_type (loc, t) - | StringLiteral s -> string_literal_type (loc, s) - | NumberLiteral n -> number_literal_type (loc, n) - | BooleanLiteral b -> boolean_literal_type (loc, b) - | Exists -> exists_type loc - ) - - and any_type loc = node "AnyTypeAnnotation" loc [] - - and mixed_type loc = node "MixedTypeAnnotation" loc [] - - and empty_type loc = node "EmptyTypeAnnotation" loc [] - - and void_type loc = node "VoidTypeAnnotation" loc [] - - and null_type loc = node "NullLiteralTypeAnnotation" loc [] - - and number_type loc = node "NumberTypeAnnotation" loc [] - - and string_type loc = node "StringTypeAnnotation" loc [] - - and boolean_type loc = node "BooleanTypeAnnotation" loc [] - - and nullable_type loc t = - node "NullableTypeAnnotation" loc [ - "typeAnnotation", _type t; - ] - - and function_type (loc, fn) = Type.Function.( - let (_, { Params.params; rest }) = fn.params in - node "FunctionTypeAnnotation" loc [ - "params", array_of_list function_type_param params; - "returnType", _type fn.return; - "rest", option function_type_rest rest; - "typeParameters", option type_parameter_declaration fn.tparams; - ] - ) - - and function_type_param (loc, param) = Type.Function.Param.( - node "FunctionTypeParam" loc [ - "name", option identifier param.name; - "typeAnnotation", _type param.annot; - "optional", bool param.optional; - ] - ) - - and function_type_rest (_loc, { Type.Function.RestParam.argument }) = - (* TODO: add a node for the rest param itself, including the `...`, - like we do with RestElement on normal functions. This should be - coordinated with Babel, ast-types, etc. so keeping the status quo for - now. Here's an example: *) - (* node "FunctionTypeRestParam" loc [ - "argument", function_type_param argument; - ] *) - function_type_param argument - - and object_type (loc, o) = Type.Object.( - let props, ixs, calls, slots = List.fold_left (fun (props, ixs, calls, slots) -> - function - | Property p -> - let prop = object_type_property p in - prop::props, ixs, calls, slots - | SpreadProperty p -> - let prop = object_type_spread_property p in - prop::props, ixs, calls, slots - | Indexer i -> - let ix = object_type_indexer i in - props, ix::ixs, calls, slots - | CallProperty c -> - let call = object_type_call_property c in - props, ixs, call::calls, slots - | InternalSlot s -> - let slot = object_type_internal_slot s in - props, ixs, calls, slot::slots - ) ([], [], [], []) o.properties in - node "ObjectTypeAnnotation" loc [ - "exact", bool o.exact; - "properties", array (List.rev props); - "indexers", array (List.rev ixs); - "callProperties", array (List.rev calls); - "internalSlots", array (List.rev slots); - ] - ) - - and object_type_property (loc, { Type.Object.Property. - key; value; optional; static; proto; variance = variance_; _method; - }) = - let key = match key with - | Expression.Object.Property.Literal lit -> literal lit - | Expression.Object.Property.Identifier id -> identifier id - | Expression.Object.Property.PrivateName _ -> - failwith "Internal Error: Found private field in object props" - | Expression.Object.Property.Computed _ -> - failwith "There should not be computed object type property keys" - in - let value, kind = match value with - | Type.Object.Property.Init value -> _type value, "init" - | Type.Object.Property.Get (loc, f) -> function_type (loc, f), "get" - | Type.Object.Property.Set (loc, f) -> function_type (loc, f), "set" - in - node "ObjectTypeProperty" loc [ - "key", key; - "value", value; - "method", bool _method; - "optional", bool optional; - "static", bool static; - "proto", bool proto; - "variance", option variance variance_; - "kind", string kind; - ] - - and object_type_spread_property (loc, prop) = Type.Object.SpreadProperty.( - node "ObjectTypeSpreadProperty" loc [ - "argument", _type prop.argument; - ] - ) - - and object_type_indexer (loc, indexer) = Type.Object.Indexer.( - node "ObjectTypeIndexer" loc [ - "id", option identifier indexer.id; - "key", _type indexer.key; - "value", _type indexer.value; - "static", bool indexer.static; - "variance", option variance indexer.variance; - ] - ) - - and object_type_call_property (loc, callProperty) = Type.Object.CallProperty.( - node "ObjectTypeCallProperty" loc [ - "value", function_type callProperty.value; - "static", bool callProperty.static; - ] - ) - - and object_type_internal_slot (loc, slot) = Type.Object.InternalSlot.( - node "ObjectTypeInternalSlot" loc [ - "id", identifier slot.id; - "optional", bool slot.optional; - "static", bool slot.static; - "method", bool slot._method; - "value", _type slot.value; - ] - ) - - and interface_type (loc, i) = Type.Interface.( - node "InterfaceTypeAnnotation" loc [ - "extends", array_of_list interface_extends i.extends; - "body", object_type i.body; - ] - ) - - and array_type loc t = - node "ArrayTypeAnnotation" loc [ - "elementType", (_type t); - ] - - and generic_type_qualified_identifier (loc, q) = Type.Generic.Identifier.( - let qualification = match q.qualification with - | Unqualified id -> identifier id - | Qualified q -> generic_type_qualified_identifier q - in - node "QualifiedTypeIdentifier" loc [ - "qualification", qualification; - "id", identifier q.id; - ] - ) - - and generic_type (loc, g) = Type.Generic.( - let id = match g.id with - | Identifier.Unqualified id -> identifier id - | Identifier.Qualified q -> generic_type_qualified_identifier q - in - node "GenericTypeAnnotation" loc [ - "id", id; - "typeParameters", option type_parameter_instantiation g.targs; - ] - ) - - and union_type (loc, ts) = - node "UnionTypeAnnotation" loc [ - "types", array_of_list _type ts; - ] - - and intersection_type (loc, ts) = - node "IntersectionTypeAnnotation" loc [ - "types", array_of_list _type ts; - ] - - and typeof_type (loc, t) = - node "TypeofTypeAnnotation" loc [ - "argument", _type t; - ] - - and tuple_type (loc, tl) = - node "TupleTypeAnnotation" loc [ - "types", array_of_list _type tl; - ] - - and string_literal_type (loc, s) = Ast.StringLiteral.( - node "StringLiteralTypeAnnotation" loc [ - "value", string s.value; - "raw", string s.raw; - ] - ) - - and number_literal_type (loc, s) = Ast.NumberLiteral.( - node "NumberLiteralTypeAnnotation" loc [ - "value", number s.value; - "raw", string s.raw; - ] - ) - - and boolean_literal_type (loc, value) = - node "BooleanLiteralTypeAnnotation" loc [ - "value", bool value; - "raw", string (if value then "true" else "false"); - ] - - and exists_type loc = node "ExistsTypeAnnotation" loc [] - - and type_annotation (loc, ty) = - node "TypeAnnotation" loc [ - "typeAnnotation", _type ty; - ] - - and type_parameter_declaration (loc, params) = - node "TypeParameterDeclaration" loc [ - "params", array_of_list type_param params; - ] - - and type_param (loc, { Type.ParameterDeclaration.TypeParam. - name = (_, name); - bound; - variance = tp_var; - default; - }) = - node "TypeParameter" loc [ - (* we track the location of the name, but don't expose it here for - backwards-compatibility. TODO: change this? *) - "name", string name; - "bound", option type_annotation bound; - "variance", option variance tp_var; - "default", option _type default; - ] - - and type_parameter_instantiation (loc, targs) = - node "TypeParameterInstantiation" loc [ - "params", array_of_list _type targs; - ] - - and jsx_element (loc, (element: (Loc.t, Loc.t) JSX.element)) = JSX.( - node "JSXElement" loc [ - "openingElement", jsx_opening element.openingElement; - "closingElement", option jsx_closing element.closingElement; - "children", array_of_list jsx_child element.children; - ] - ) - - and jsx_fragment (loc, (fragment: (Loc.t, Loc.t) JSX.fragment)) = JSX.( - node "JSXFragment" loc [ - "openingFragment", jsx_opening_fragment fragment.frag_openingElement; - "children", array_of_list jsx_child fragment.frag_children; - "closingFragment", option jsx_closing_fragment fragment.frag_closingElement - ] - ) - - and jsx_opening (loc, opening) = JSX.Opening.( - node "JSXOpeningElement" loc [ - "name", jsx_name opening.name; - "attributes", array_of_list jsx_opening_attribute opening.attributes; - "selfClosing", bool opening.selfClosing; - ] - ) - - and jsx_opening_fragment loc = - node "JSXOpeningFragment" loc [] - - and jsx_opening_attribute = JSX.Opening.(function - | Attribute attribute -> jsx_attribute attribute - | SpreadAttribute attribute -> jsx_spread_attribute attribute - ) - - and jsx_closing (loc, closing) = JSX.Closing.( - node "JSXClosingElement" loc [ - "name", jsx_name closing.name; - ] - ) - - and jsx_closing_fragment loc = - node "JSXClosingFragment" loc [] - - and jsx_child = JSX.(function - | loc, Element element -> jsx_element (loc, element) - | loc, Fragment fragment -> jsx_fragment (loc, fragment) - | loc, ExpressionContainer expr -> jsx_expression_container (loc, expr) - | loc, SpreadChild expr -> - node "JSXSpreadChild" loc [ - "expression", expression expr; - ] - | loc, Text str -> jsx_text (loc, str) - ) - - and jsx_name = JSX.(function - | Identifier id -> jsx_identifier id - | NamespacedName namespaced_name -> jsx_namespaced_name namespaced_name - | MemberExpression member -> jsx_member_expression member - ) - - and jsx_attribute (loc, attribute) = JSX.Attribute.( - let name = match attribute.name with - | Identifier id -> jsx_identifier id - | NamespacedName namespaced_name -> jsx_namespaced_name namespaced_name - in - node "JSXAttribute" loc [ - "name", name; - "value", option jsx_attribute_value attribute.value; - ] - ) - - and jsx_attribute_value = JSX.Attribute.(function - | Literal (loc, value) -> literal (loc, value) - | ExpressionContainer (loc, expr) -> jsx_expression_container (loc, expr) - ) - - and jsx_spread_attribute (loc, attribute) = JSX.SpreadAttribute.( - node "JSXSpreadAttribute" loc [ - "argument", expression attribute.argument; - ] - ) - - and jsx_expression_container (loc, expr) = JSX.ExpressionContainer.( - let expression = match expr.expression with - | Expression expr -> expression expr - | EmptyExpression empty_loc -> - node "JSXEmptyExpression" empty_loc [] - in - node "JSXExpressionContainer" loc [ - "expression", expression; - ] - ) - - and jsx_text (loc, text) = JSX.Text.( - let { value; raw; } = text in - node "JSXText" loc [ - "value", string value; - "raw", string raw; - ] - ) - - and jsx_member_expression (loc, member_expression) = JSX.MemberExpression.( - let _object = match member_expression._object with - | Identifier id -> jsx_identifier id - | MemberExpression member -> jsx_member_expression member in - node "JSXMemberExpression" loc [ - "object", _object; - "property", jsx_identifier member_expression.property; - ] - ) - - and jsx_namespaced_name (loc, namespaced_name) = JSX.NamespacedName.( - node "JSXNamespacedName" loc [ - "namespace", jsx_identifier namespaced_name.namespace; - "name", jsx_identifier namespaced_name.name; - ] - ) - - and jsx_identifier (loc, id) = JSX.Identifier.( - node "JSXIdentifier" loc [ - "name", string id.name; - ] - ) - - and export_specifier (loc, specifier) = - let open Statement.ExportNamedDeclaration.ExportSpecifier in - let exported = match specifier.exported with - | Some exported -> identifier exported - | None -> identifier specifier.local - in - node "ExportSpecifier" loc [ - "local", identifier specifier.local; - "exported", exported; - ] - - and import_default_specifier id = - node "ImportDefaultSpecifier" (fst id) [ - "local", identifier id; - ] - - and import_namespace_specifier (loc, id) = - node "ImportNamespaceSpecifier" loc [ - "local", identifier id; - ] - - and import_named_specifier local_id remote_id kind = - let span_loc = - match local_id with - | Some local_id -> Loc.btwn (fst remote_id) (fst local_id) - | None -> fst remote_id - in - let local_id = match local_id with - | Some id -> id - | None -> remote_id - in - node "ImportSpecifier" span_loc [ - "imported", identifier remote_id; - "local", identifier local_id; - "importKind", ( - match kind with - | Some Statement.ImportDeclaration.ImportType -> string "type" - | Some Statement.ImportDeclaration.ImportTypeof -> string "typeof" - | Some Statement.ImportDeclaration.ImportValue | None -> null - ); - ] - - and comment_list comments = array_of_list comment comments - - and comment (loc, c) = Comment.( - let _type, value = match c with - | Line s -> "Line", s - | Block s -> "Block", s in - node _type loc [ - "value", string value; - ] - ) - - and predicate (loc, p) = Ast.Type.Predicate.( - let _type, value = match p with - | Declared e -> "DeclaredPredicate", ["value", expression e] - | Inferred -> "InferredPredicate", [] + and member_node_properties { Expression.Member._object; property } = + let (property, computed) = + match property with + | Expression.Member.PropertyIdentifier id -> (identifier id, false) + | Expression.Member.PropertyPrivateName name -> (private_name name, false) + | Expression.Member.PropertyExpression expr -> (expression expr, true) + in + [("object", expression _object); ("property", property); ("computed", bool computed)] in - node _type loc value - ) + { program; expression } - and call_node_properties call = Expression.Call.([ - "callee", expression call.callee; - "typeArguments", option type_parameter_instantiation call.targs; - "arguments", array_of_list expression_or_spread call.arguments; - ]) + let program offset_table = (make_functions offset_table).program - and member_node_properties member = Expression.Member.( - let property = match member.property with - | PropertyIdentifier id -> identifier id - | PropertyPrivateName name -> private_name name - | PropertyExpression expr -> expression expr - in - [ - "object", expression member._object; - "property", property; - "computed", bool member.computed; - ] - ) + let expression offset_table = (make_functions offset_table).expression end diff --git a/src/parser/expression_parser.ml b/src/parser/expression_parser.ml index b4fc8f93174..48ca0e8a055 100644 --- a/src/parser/expression_parser.ml +++ b/src/parser/expression_parser.ml @@ -1,88 +1,116 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Token open Parser_env open Flow_ast -module Error = Parse_error open Parser_common module type EXPRESSION = sig - val assignment: env -> (Loc.t, Loc.t) Expression.t - val assignment_cover: env -> pattern_cover - val conditional: env -> (Loc.t, Loc.t) Expression.t - val property_name_include_private: env -> Loc.t * Loc.t Identifier.t * bool - val is_assignable_lhs: (Loc.t, Loc.t) Expression.t -> bool - val left_hand_side: env -> (Loc.t, Loc.t) Expression.t - val number: env -> number_type -> string -> float - val sequence: env -> (Loc.t, Loc.t) Expression.t list -> (Loc.t, Loc.t) Expression.t + val assignment : env -> (Loc.t, Loc.t) Expression.t + + val assignment_cover : env -> pattern_cover + + val conditional : env -> (Loc.t, Loc.t) Expression.t + + val property_name_include_private : env -> Loc.t * (Loc.t, Loc.t) Identifier.t * bool + + val is_assignable_lhs : (Loc.t, Loc.t) Expression.t -> bool + + val left_hand_side : env -> (Loc.t, Loc.t) Expression.t + + val number : env -> number_type -> string -> float + + val sequence : env -> (Loc.t, Loc.t) Expression.t list -> (Loc.t, Loc.t) Expression.t end module Expression - (Parse: PARSER) - (Type: Type_parser.TYPE) - (Declaration: Declaration_parser.DECLARATION) - (Pattern_cover: Pattern_cover.COVER) -: EXPRESSION = struct - type op_precedence = Left_assoc of int | Right_assoc of int + (Parse : PARSER) + (Type : Type_parser.TYPE) + (Declaration : Declaration_parser.DECLARATION) + (Pattern_cover : Pattern_cover.COVER) : EXPRESSION = struct + type op_precedence = + | Left_assoc of int + | Right_assoc of int + let is_tighter a b = - let a_prec = match a with Left_assoc x -> x | Right_assoc x -> x - 1 in - let b_prec = match b with Left_assoc x -> x | Right_assoc x -> x in + let a_prec = + match a with + | Left_assoc x -> x + | Right_assoc x -> x - 1 + in + let b_prec = + match b with + | Left_assoc x -> x + | Right_assoc x -> x + in a_prec >= b_prec - let is_assignable_lhs = Expression.(function - | _, MetaProperty { MetaProperty.meta = (_, "new"); property = (_, "target") } - -> false (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *) - | _, MetaProperty { MetaProperty.meta = (_, "import"); property = (_, "meta") } - -> false (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *) + let is_assignable_lhs = + Expression.( + function + | ( _, + MetaProperty + { + MetaProperty.meta = (_, { Identifier.name = "new"; comments = _ }); + property = (_, { Identifier.name = "target"; comments = _ }); + } ) -> + false + (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *) + | ( _, + MetaProperty + { + MetaProperty.meta = (_, { Identifier.name = "import"; comments = _ }); + property = (_, { Identifier.name = "meta"; comments = _ }); + } ) -> + false + (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *) (* draft spec: https://tc39.github.io/proposal-import-meta/ *) - - | _, Array _ - | _, Identifier _ - | _, Member _ - | _, MetaProperty _ - | _, Object _ - -> true - - | _, ArrowFunction _ - | _, Assignment _ - | _, Binary _ - | _, Call _ - | _, Class _ - | _, Comprehension _ - | _, Conditional _ - | _, Function _ - | _, Generator _ - | _, Import _ - | _, JSXElement _ - | _, JSXFragment _ - | _, Literal _ - | _, Logical _ - | _, New _ - | _, OptionalCall _ - | _, OptionalMember _ - | _, Sequence _ - | _, Super - | _, TaggedTemplate _ - | _, TemplateLiteral _ - | _, This - | _, TypeCast _ - | _, Unary _ - | _, Update _ - | _, Yield _ - -> false - ) + | (_, Array _) + | (_, Identifier _) + | (_, Member _) + | (_, MetaProperty _) + | (_, Object _) -> + true + | (_, ArrowFunction _) + | (_, Assignment _) + | (_, Binary _) + | (_, Call _) + | (_, Class _) + | (_, Comprehension _) + | (_, Conditional _) + | (_, Function _) + | (_, Generator _) + | (_, Import _) + | (_, JSXElement _) + | (_, JSXFragment _) + | (_, Literal _) + | (_, Logical _) + | (_, New _) + | (_, OptionalCall _) + | (_, OptionalMember _) + | (_, Sequence _) + | (_, Super) + | (_, TaggedTemplate _) + | (_, TemplateLiteral _) + | (_, This) + | (_, TypeCast _) + | (_, Unary _) + | (_, Update _) + | (_, Yield _) -> + false) let as_expression = Pattern_cover.as_expression + let as_pattern = Pattern_cover.as_pattern (* AssignmentExpression : + * [+Yield] YieldExpression * ConditionalExpression * LeftHandSideExpression = AssignmentExpression * LeftHandSideExpression AssignmentOperator AssignmentExpression @@ -99,50 +127,49 @@ module Expression let left = as_pattern env expr_or_pattern in let right = assignment env in let loc = Loc.btwn (fst left) (fst right) in - - Cover_expr (loc, Expression.(Assignment { Assignment. - operator; - left; - right; - })) + Cover_expr (loc, Expression.(Assignment { Assignment.operator; left; right })) | _ -> expr_or_pattern - - in let error_callback _ = function + in + let error_callback _ = function (* Don't rollback on these errors. *) - | Error.StrictReservedWord -> () + | Parse_error.StrictReservedWord -> () (* Everything else causes a rollback *) | _ -> raise Try.Rollback - - (* So we may or may not be parsing the first part of an arrow function - * (the part before the =>). We might end up parsing that whole thing or - * we might end up parsing only part of it and thinking we're done. We - * need to look at the next token to figure out if we really parsed an - * assignment expression or if this is just the beginning of an arrow - * function *) - in let try_assignment_but_not_arrow_function env = + (* So we may or may not be parsing the first part of an arrow function + * (the part before the =>). We might end up parsing that whole thing or + * we might end up parsing only part of it and thinking we're done. We + * need to look at the next token to figure out if we really parsed an + * assignment expression or if this is just the beginning of an arrow + * function *) + in + let try_assignment_but_not_arrow_function env = let env = env |> with_error_callback error_callback in let ret = assignment_but_not_arrow_function_cover env in match Peek.token env with - | T_ARROW -> (* x => 123 *) + | T_ARROW -> + (* x => 123 *) raise Try.Rollback - | T_COLON when last_token env = Some T_RPAREN-> (* (x): number => 123 *) + | T_COLON when last_token env = Some T_RPAREN -> + (* (x): number => 123 *) raise Try.Rollback (* async x => 123 -- and we've already parsed async as an identifier * expression *) - | _ when Peek.is_identifier env -> begin match ret with - | Cover_expr (_, Expression.Identifier (_, "async")) + | _ when Peek.is_identifier env -> + begin + match ret with + | Cover_expr (_, Expression.Identifier (_, { Identifier.name = "async"; comments = _ })) when not (Peek.is_line_terminator env) -> - raise Try.Rollback - | _ -> ret + raise Try.Rollback + | _ -> ret end | _ -> ret - in fun env -> - match Peek.token env, Peek.is_identifier env with - | T_YIELD, _ when (allow_yield env) -> Cover_expr (yield env) - | T_LPAREN, _ - | T_LESS_THAN, _ - | _, true -> - + in + fun env -> + match (Peek.token env, Peek.is_identifier env) with + | (T_YIELD, _) when allow_yield env -> Cover_expr (yield env) + | (T_LPAREN, _) + | (T_LESS_THAN, _) + | (_, true) -> (* Ok, we don't know if this is going to be an arrow function or a * regular assignment expression. Let's first try to parse it as an * assignment expression. If that fails we'll try an arrow function. @@ -151,452 +178,522 @@ module Expression | Try.ParsedSuccessfully expr -> expr | Try.FailedToParse -> (match Try.to_parse env try_arrow_function with - | Try.ParsedSuccessfully expr -> expr - | Try.FailedToParse -> - - (* Well shoot. It doesn't parse cleanly as a normal - * expression or as an arrow_function. Let's treat it as a - * normal assignment expression gone wrong *) - assignment_but_not_arrow_function_cover env - ) - ) + | Try.ParsedSuccessfully expr -> expr + | Try.FailedToParse -> + (* Well shoot. It doesn't parse cleanly as a normal + * expression or as an arrow_function. Let's treat it as a + * normal assignment expression gone wrong *) + assignment_but_not_arrow_function_cover env)) | _ -> assignment_but_not_arrow_function_cover env - and assignment env = - as_expression env (assignment_cover env) - - and yield env = with_loc (fun env -> - if in_formal_parameters env then error env Error.YieldInFormalParameters; - Expect.token env T_YIELD; - let argument, delegate = - if Peek.is_implicit_semicolon env then None, false - else - let delegate = Expect.maybe env T_MULT in - let has_argument = match Peek.token env with - | T_SEMICOLON - | T_RBRACKET - | T_RCURLY - | T_RPAREN - | T_COLON - | T_COMMA -> false - | _ -> true + and assignment env = as_expression env (assignment_cover env) + + and yield env = + with_loc + (fun env -> + if in_formal_parameters env then error env Parse_error.YieldInFormalParameters; + let leading = Peek.comments env in + Expect.token env T_YIELD; + let (argument, delegate) = + if Peek.is_implicit_semicolon env then + (None, false) + else + let delegate = Expect.maybe env T_MULT in + let has_argument = + match Peek.token env with + | T_SEMICOLON + | T_RBRACKET + | T_RCURLY + | T_RPAREN + | T_COLON + | T_COMMA -> + false + | _ -> true + in + let argument = + if delegate || has_argument then + Some (assignment env) + else + None + in + (argument, delegate) in - let argument = - if delegate || has_argument - then Some (assignment env) - else None in - argument, delegate - in - Expression.(Yield Yield.({ - argument; - delegate; - })) - ) env - - and is_lhs = Expression.(function - | _, MetaProperty { MetaProperty.meta = (_, "new"); property = (_, "target") } - -> false (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *) - | _, MetaProperty { MetaProperty.meta = (_, "import"); property = (_, "meta") } - -> false (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *) + let trailing = + match argument with + | None -> Peek.comments env + | Some _ -> [] + in + Expression.( + Yield + Yield. + { + argument; + delegate; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + })) + env + + and is_lhs = + Expression.( + function + | ( _, + MetaProperty + { + MetaProperty.meta = (_, { Identifier.name = "new"; comments = _ }); + property = (_, { Identifier.name = "target"; comments = _ }); + } ) -> + false + (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *) + | ( _, + MetaProperty + { + MetaProperty.meta = (_, { Identifier.name = "import"; comments = _ }); + property = (_, { Identifier.name = "meta"; comments = _ }); + } ) -> + false + (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *) (* draft spec: https://tc39.github.io/proposal-import-meta/ *) - - | _, Identifier _ - | _, Member _ - | _, MetaProperty _ - -> true - - | _, Array _ - | _, ArrowFunction _ - | _, Assignment _ - | _, Binary _ - | _, Call _ - | _, Class _ - | _, Comprehension _ - | _, Conditional _ - | _, Function _ - | _, Generator _ - | _, Import _ - | _, JSXElement _ - | _, JSXFragment _ - | _, Literal _ - | _, Logical _ - | _, New _ - | _, Object _ - | _, OptionalCall _ - | _, OptionalMember _ - | _, Sequence _ - | _, Super - | _, TaggedTemplate _ - | _, TemplateLiteral _ - | _, This - | _, TypeCast _ - | _, Unary _ - | _, Update _ - | _, Yield _ - -> false - ) + | (_, Identifier _) + | (_, Member _) + | (_, MetaProperty _) -> + true + | (_, Array _) + | (_, ArrowFunction _) + | (_, Assignment _) + | (_, Binary _) + | (_, Call _) + | (_, Class _) + | (_, Comprehension _) + | (_, Conditional _) + | (_, Function _) + | (_, Generator _) + | (_, Import _) + | (_, JSXElement _) + | (_, JSXFragment _) + | (_, Literal _) + | (_, Logical _) + | (_, New _) + | (_, Object _) + | (_, OptionalCall _) + | (_, OptionalMember _) + | (_, Sequence _) + | (_, Super) + | (_, TaggedTemplate _) + | (_, TemplateLiteral _) + | (_, This) + | (_, TypeCast _) + | (_, Unary _) + | (_, Update _) + | (_, Yield _) -> + false) and assignment_op env = - let op = Expression.Assignment.(match Peek.token env with - | T_RSHIFT3_ASSIGN -> Some RShift3Assign - | T_RSHIFT_ASSIGN -> Some RShiftAssign - | T_LSHIFT_ASSIGN -> Some LShiftAssign - | T_BIT_XOR_ASSIGN -> Some BitXorAssign - | T_BIT_OR_ASSIGN -> Some BitOrAssign - | T_BIT_AND_ASSIGN -> Some BitAndAssign - | T_MOD_ASSIGN -> Some ModAssign - | T_DIV_ASSIGN -> Some DivAssign - | T_MULT_ASSIGN -> Some MultAssign - | T_EXP_ASSIGN -> Some ExpAssign - | T_MINUS_ASSIGN -> Some MinusAssign - | T_PLUS_ASSIGN -> Some PlusAssign - | T_ASSIGN -> Some Assign - | _ -> None) in + let op = + Expression.Assignment.( + match Peek.token env with + | T_RSHIFT3_ASSIGN -> Some (Some RShift3Assign) + | T_RSHIFT_ASSIGN -> Some (Some RShiftAssign) + | T_LSHIFT_ASSIGN -> Some (Some LShiftAssign) + | T_BIT_XOR_ASSIGN -> Some (Some BitXorAssign) + | T_BIT_OR_ASSIGN -> Some (Some BitOrAssign) + | T_BIT_AND_ASSIGN -> Some (Some BitAndAssign) + | T_MOD_ASSIGN -> Some (Some ModAssign) + | T_DIV_ASSIGN -> Some (Some DivAssign) + | T_MULT_ASSIGN -> Some (Some MultAssign) + | T_EXP_ASSIGN -> Some (Some ExpAssign) + | T_MINUS_ASSIGN -> Some (Some MinusAssign) + | T_PLUS_ASSIGN -> Some (Some PlusAssign) + | T_ASSIGN -> Some None + | _ -> None) + in if op <> None then Eat.token env; op + (* ConditionalExpression : + * LogicalExpression + * LogicalExpression ? AssignmentExpression : AssignmentExpression + *) and conditional_cover env = let start_loc = Peek.loc env in let expr = logical_cover env in - if Peek.token env = T_PLING - then begin + if Peek.token env = T_PLING then ( Expect.token env T_PLING; + (* no_in is ignored for the consequent *) let env' = env |> with_no_in false in let consequent = assignment env' in Expect.token env T_COLON; - let end_loc, alternate = with_loc assignment env in + let (end_loc, alternate) = with_loc assignment env in let loc = Loc.btwn start_loc end_loc in - Cover_expr (loc, Expression.(Conditional { Conditional. - test = as_expression env expr; - consequent; - alternate; - })) - end else expr + Cover_expr + ( loc, + Expression.( + Conditional { Conditional.test = as_expression env expr; consequent; alternate }) ) + ) else + expr and conditional env = as_expression env (conditional_cover env) + (* + * LogicalANDExpression : + * BinaryExpression + * LogicalANDExpression && BitwiseORExpression + * + * LogicalORExpression : + * LogicalANDExpression + * LogicalORExpression || LogicalANDExpression + * LogicalORExpression ?? LogicalANDExpression + * + * LogicalExpression : + * LogicalORExpression + *) and logical_cover = - let open Expression in - let make_logical env left right operator loc = - let left = as_expression env left in - let right = as_expression env right in - Cover_expr (loc, Logical {Logical.operator; left; right;}) - in let rec logical_and env left lloc = - match Peek.token env with - | T_AND -> + Expression.( + let make_logical env left right operator loc = + let left = as_expression env left in + let right = as_expression env right in + Cover_expr (loc, Logical { Logical.operator; left; right }) + in + let rec logical_and env left lloc = + match Peek.token env with + | T_AND -> Expect.token env T_AND; - let rloc, right = with_loc binary_cover env in + let (rloc, right) = with_loc binary_cover env in let loc = Loc.btwn lloc rloc in - logical_and env (make_logical env left right Logical.And loc) loc - | _ -> lloc, left - and logical_or env left lloc = - let options = parse_options env in - match Peek.token env with - | T_OR -> + let left = make_logical env left right Logical.And loc in + (* `a && b ?? c` is an error, but to recover, try to parse it like `(a && b) ?? c`. *) + let (loc, left) = coalesce ~allowed:false env left loc in + logical_and env left loc + | _ -> (lloc, left) + and logical_or env left lloc = + match Peek.token env with + | T_OR -> Expect.token env T_OR; - let rloc, right = with_loc binary_cover env in - let rloc, right = logical_and env right rloc in + let (rloc, right) = with_loc binary_cover env in + let (rloc, right) = logical_and env right rloc in let loc = Loc.btwn lloc rloc in - logical_or env (make_logical env left right Logical.Or loc) loc - | T_PLING_PLING -> - if not options.esproposal_nullish_coalescing - then error env Parse_error.NullishCoalescingDisabled; + let left = make_logical env left right Logical.Or loc in + (* `a || b ?? c` is an error, but to recover, try to parse it like `(a || b) ?? c`. *) + let (loc, left) = coalesce ~allowed:false env left loc in + logical_or env left loc + | _ -> (lloc, left) + and coalesce ~allowed env left lloc = + match Peek.token env with + | T_PLING_PLING -> + let options = parse_options env in + if not options.esproposal_nullish_coalescing then + error env Parse_error.NullishCoalescingDisabled; + + if not allowed then error env (Parse_error.NullishCoalescingUnexpectedLogical "??"); Expect.token env T_PLING_PLING; - let rloc, right = with_loc binary_cover env in - let rloc, right = logical_and env right rloc in + let (rloc, right) = with_loc binary_cover env in + let (rloc, right) = + match Peek.token env with + | (T_AND | T_OR) as t -> + (* `a ?? b || c` is an error. To recover, treat it like `a ?? (b || c)`. *) + error env (Parse_error.NullishCoalescingUnexpectedLogical (Token.value_of_token t)); + let (rloc, right) = logical_and env right rloc in + logical_or env right rloc + | _ -> (rloc, right) + in let loc = Loc.btwn lloc rloc in - logical_or env (make_logical env left right Logical.NullishCoalesce loc) loc - | _ -> left - in fun env -> - let loc, left = with_loc binary_cover env in - let loc, left = logical_and env left loc in - logical_or env left loc + coalesce ~allowed:true env (make_logical env left right Logical.NullishCoalesce loc) loc + | _ -> (lloc, left) + in + fun env -> + let (loc, left) = with_loc binary_cover env in + let (_, left) = + match Peek.token env with + | T_PLING_PLING -> coalesce ~allowed:true env left loc + | _ -> + let (loc, left) = logical_and env left loc in + logical_or env left loc + in + left) and binary_cover = let binary_op env = - let ret = Expression.Binary.(match Peek.token env with - (* Most BinaryExpression operators are left associative *) - (* Lowest pri *) - | T_BIT_OR -> Some (BitOr, Left_assoc 2) - | T_BIT_XOR -> Some (Xor, Left_assoc 3) - | T_BIT_AND -> Some (BitAnd, Left_assoc 4) - | T_EQUAL -> Some (Equal, Left_assoc 5) - | T_STRICT_EQUAL -> Some (StrictEqual, Left_assoc 5) - | T_NOT_EQUAL -> Some (NotEqual, Left_assoc 5) - | T_STRICT_NOT_EQUAL -> Some (StrictNotEqual, Left_assoc 5) - | T_LESS_THAN -> Some (LessThan, Left_assoc 6) - | T_LESS_THAN_EQUAL -> Some (LessThanEqual, Left_assoc 6) - | T_GREATER_THAN -> Some (GreaterThan, Left_assoc 6) - | T_GREATER_THAN_EQUAL -> Some (GreaterThanEqual, Left_assoc 6) - | T_IN -> - if (no_in env) then None else Some (In, Left_assoc 6) - | T_INSTANCEOF -> Some (Instanceof, Left_assoc 6) - | T_LSHIFT -> Some (LShift, Left_assoc 7) - | T_RSHIFT -> Some (RShift, Left_assoc 7) - | T_RSHIFT3 -> Some (RShift3, Left_assoc 7) - | T_PLUS -> Some (Plus, Left_assoc 8) - | T_MINUS -> Some (Minus, Left_assoc 8) - | T_MULT -> Some (Mult, Left_assoc 9) - | T_DIV -> Some (Div, Left_assoc 9) - | T_MOD -> Some (Mod, Left_assoc 9) - | T_EXP -> Some (Exp, Right_assoc 10) - (* Highest priority *) - | _ -> None) - in if ret <> None then Eat.token env; + let ret = + Expression.Binary.( + match Peek.token env with + (* Most BinaryExpression operators are left associative *) + (* Lowest pri *) + | T_BIT_OR -> Some (BitOr, Left_assoc 2) + | T_BIT_XOR -> Some (Xor, Left_assoc 3) + | T_BIT_AND -> Some (BitAnd, Left_assoc 4) + | T_EQUAL -> Some (Equal, Left_assoc 5) + | T_STRICT_EQUAL -> Some (StrictEqual, Left_assoc 5) + | T_NOT_EQUAL -> Some (NotEqual, Left_assoc 5) + | T_STRICT_NOT_EQUAL -> Some (StrictNotEqual, Left_assoc 5) + | T_LESS_THAN -> Some (LessThan, Left_assoc 6) + | T_LESS_THAN_EQUAL -> Some (LessThanEqual, Left_assoc 6) + | T_GREATER_THAN -> Some (GreaterThan, Left_assoc 6) + | T_GREATER_THAN_EQUAL -> Some (GreaterThanEqual, Left_assoc 6) + | T_IN -> + if no_in env then + None + else + Some (In, Left_assoc 6) + | T_INSTANCEOF -> Some (Instanceof, Left_assoc 6) + | T_LSHIFT -> Some (LShift, Left_assoc 7) + | T_RSHIFT -> Some (RShift, Left_assoc 7) + | T_RSHIFT3 -> Some (RShift3, Left_assoc 7) + | T_PLUS -> Some (Plus, Left_assoc 8) + | T_MINUS -> Some (Minus, Left_assoc 8) + | T_MULT -> Some (Mult, Left_assoc 9) + | T_DIV -> Some (Div, Left_assoc 9) + | T_MOD -> Some (Mod, Left_assoc 9) + | T_EXP -> Some (Exp, Right_assoc 10) + (* Highest priority *) + | _ -> None) + in + if ret <> None then Eat.token env; ret - - in let make_binary left right operator loc = - loc, Expression.(Binary Binary.({ - operator; - left; - right; - })) - - in let rec add_to_stack right (rop, rpri) rloc = function - | (left, (lop, lpri), lloc)::rest when is_tighter lpri rpri -> - let loc = Loc.btwn lloc rloc in - let right = make_binary left right lop loc in - add_to_stack right (rop, rpri) loc rest - | stack -> (right, (rop, rpri), rloc)::stack - - in let rec collapse_stack right rloc = function + in + let make_binary left right operator loc = + (loc, Expression.(Binary Binary.{ operator; left; right })) + in + let rec add_to_stack right (rop, rpri) rloc = function + | (left, (lop, lpri), lloc) :: rest when is_tighter lpri rpri -> + let loc = Loc.btwn lloc rloc in + let right = make_binary left right lop loc in + add_to_stack right (rop, rpri) loc rest + | stack -> (right, (rop, rpri), rloc) :: stack + in + let rec collapse_stack right rloc = function | [] -> right - | (left, (lop, _), lloc)::rest -> - let loc = Loc.btwn lloc rloc in - collapse_stack (make_binary left right lop loc) loc rest - - in let rec helper env stack = - let right_loc, (is_unary, right) = with_loc (fun env -> - let is_unary = peek_unary_op env <> None in - let right = unary_cover (env |> with_no_in false) in - is_unary, right - ) env in - if Peek.token env = T_LESS_THAN - then begin + | (left, (lop, _), lloc) :: rest -> + let loc = Loc.btwn lloc rloc in + collapse_stack (make_binary left right lop loc) loc rest + in + let rec helper env stack = + let (right_loc, (is_unary, right)) = + with_loc + (fun env -> + let is_unary = peek_unary_op env <> None in + let right = unary_cover (env |> with_no_in false) in + (is_unary, right)) + env + in + ( if Peek.token env = T_LESS_THAN then match right with - | Cover_expr (_, Expression.JSXElement _) -> - error env Error.AdjacentJSXElements - | _ -> () - end; - match stack, binary_op env with - | [], None -> - right - | _, None -> + | Cover_expr (_, Expression.JSXElement _) -> error env Parse_error.AdjacentJSXElements + | _ -> () ); + match (stack, binary_op env) with + | ([], None) -> right + | (_, None) -> let right = as_expression env right in Cover_expr (collapse_stack right right_loc stack) - | _, Some (rop, rpri) -> + | (_, Some (rop, rpri)) -> if is_unary && rop = Expression.Binary.Exp then - error_at env (right_loc, Error.InvalidLHSInExponentiation); + error_at env (right_loc, Parse_error.InvalidLHSInExponentiation); let right = as_expression env right in helper env (add_to_stack right (rop, rpri) right_loc stack) - - in fun env -> helper env [] + in + (fun env -> helper env []) and peek_unary_op env = - let open Expression.Unary in - match Peek.token env with - | T_NOT -> Some Not - | T_BIT_NOT -> Some BitNot - | T_PLUS -> Some Plus - | T_MINUS -> Some Minus - | T_TYPEOF -> Some Typeof - | T_VOID -> Some Void - | T_DELETE -> Some Delete - (* If we are in a unary expression context, and within an async function, - * assume that a use of "await" is intended as a keyword, not an ordinary - * identifier. This is a little bit inconsistent, since it can be used as - * an identifier in other contexts (such as a variable name), but it's how - * Babel does it. *) - | T_AWAIT when allow_await env -> Some Await - | _ -> None + Expression.Unary.( + match Peek.token env with + | T_NOT -> Some Not + | T_BIT_NOT -> Some BitNot + | T_PLUS -> Some Plus + | T_MINUS -> Some Minus + | T_TYPEOF -> Some Typeof + | T_VOID -> Some Void + | T_DELETE -> Some Delete + (* If we are in a unary expression context, and within an async function, + * assume that a use of "await" is intended as a keyword, not an ordinary + * identifier. This is a little bit inconsistent, since it can be used as + * an identifier in other contexts (such as a variable name), but it's how + * Babel does it. *) + | T_AWAIT when allow_await env -> Some Await + | _ -> None) and unary_cover env = let begin_loc = Peek.loc env in + let leading = Peek.comments env in let op = peek_unary_op env in match op with - | None -> begin - let op = Expression.Update.(match Peek.token env with - | T_INCR -> Some Increment - | T_DECR -> Some Decrement - | _ -> None) in - match op with - | None -> postfix_cover env - | Some operator -> - Eat.token env; - let end_loc, argument = with_loc unary env in - if not (is_lhs argument) - then error_at env (fst argument, Error.InvalidLHSInAssignment); - (match argument with - | _, Expression.Identifier (_, name) - when is_restricted name -> - strict_error env Error.StrictLHSPrefix - | _ -> ()); - let loc = Loc.btwn begin_loc end_loc in - Cover_expr (loc, Expression.(Update { Update. - operator; - prefix = true; - argument; - })) - end + | None -> + let op = + Expression.Update.( + match Peek.token env with + | T_INCR -> Some Increment + | T_DECR -> Some Decrement + | _ -> None) + in + (match op with + | None -> postfix_cover env + | Some operator -> + Eat.token env; + let (end_loc, argument) = with_loc unary env in + if not (is_lhs argument) then + error_at env (fst argument, Parse_error.InvalidLHSInAssignment); + (match argument with + | (_, Expression.Identifier (_, { Identifier.name; comments = _ })) when is_restricted name + -> + strict_error env Parse_error.StrictLHSPrefix + | _ -> ()); + let loc = Loc.btwn begin_loc end_loc in + Cover_expr (loc, Expression.(Update { Update.operator; prefix = true; argument }))) | Some operator -> Eat.token env; - let end_loc, argument = with_loc unary env in + let (end_loc, argument) = with_loc unary env in let loc = Loc.btwn begin_loc end_loc in - Expression.(match operator, argument with - | Unary.Delete, (_, Identifier _) -> - strict_error_at env (loc, Error.StrictDelete) - | Unary.Delete, (_, Member member) -> - begin match member.Ast.Expression.Member.property with - | Ast.Expression.Member.PropertyPrivateName _ -> - error_at env (loc, Error.PrivateDelete) - | _ -> () end - | _ -> ()); - Cover_expr (loc, Expression.(Unary { Unary. - operator; - prefix = true; - argument; - })) + Expression.( + match (operator, argument) with + | (Unary.Delete, (_, Identifier _)) -> strict_error_at env (loc, Parse_error.StrictDelete) + | (Unary.Delete, (_, Member member)) -> + begin + match member.Ast.Expression.Member.property with + | Ast.Expression.Member.PropertyPrivateName _ -> + error_at env (loc, Parse_error.PrivateDelete) + | _ -> () + end + | _ -> ()); + Cover_expr + ( loc, + Expression.( + Unary + { Unary.operator; argument; comments = Flow_ast_utils.mk_comments_opt ~leading () }) + ) and unary env = as_expression env (unary_cover env) and postfix_cover env = let argument = left_hand_side_cover env in (* No line terminator allowed before operator *) - if Peek.is_line_terminator env - then argument - else let op = Expression.Update.(match Peek.token env with - | T_INCR -> Some Increment - | T_DECR -> Some Decrement - | _ -> None) in - match op with - | None -> argument - | Some operator -> + if Peek.is_line_terminator env then + argument + else + let op = + Expression.Update.( + match Peek.token env with + | T_INCR -> Some Increment + | T_DECR -> Some Decrement + | _ -> None) + in + match op with + | None -> argument + | Some operator -> let argument = as_expression env argument in - if not (is_lhs argument) - then error_at env (fst argument, Error.InvalidLHSInAssignment); + if not (is_lhs argument) then + error_at env (fst argument, Parse_error.InvalidLHSInAssignment); (match argument with - | _, Expression.Identifier (_, name) - when is_restricted name -> - strict_error env Error.StrictLHSPostfix + | (_, Expression.Identifier (_, { Identifier.name; comments = _ })) when is_restricted name + -> + strict_error env Parse_error.StrictLHSPostfix | _ -> ()); let end_loc = Peek.loc env in Eat.token env; let loc = Loc.btwn (fst argument) end_loc in - Cover_expr (loc, Expression.(Update { Update. - operator; - prefix = false; - argument; - })) + Cover_expr (loc, Expression.(Update { Update.operator; prefix = false; argument })) and left_hand_side_cover env = let start_loc = Peek.loc env in let allow_new = not (no_new env) in let env = with_no_new false env in - let expr = match Peek.token env with - | T_NEW when allow_new -> Cover_expr (new_expression env) - | T_IMPORT -> Cover_expr (import env) - | T_SUPER -> Cover_expr (super env) - | _ when Peek.is_function env -> Cover_expr (_function env) - | _ -> primary_cover env in + let expr = + match Peek.token env with + | T_NEW when allow_new -> Cover_expr (new_expression env) + | T_IMPORT -> Cover_expr (import env) + | T_SUPER -> Cover_expr (super env) + | _ when Peek.is_function env -> Cover_expr (_function env) + | _ -> primary_cover env + in call_cover env start_loc expr and left_hand_side env = as_expression env (left_hand_side_cover env) and super env = - let allowed, call_allowed = match allow_super env with - | No_super -> false, false - | Super_prop -> true, false - | Super_prop_or_call -> true, true + let (allowed, call_allowed) = + match allow_super env with + | No_super -> (false, false) + | Super_prop -> (true, false) + | Super_prop_or_call -> (true, true) in let loc = Peek.loc env in Expect.token env T_SUPER; - let super = loc, Expression.Super in + let super = (loc, Expression.Super) in match Peek.token env with | T_PERIOD | T_LBRACKET -> let super = - if not allowed then begin + if not allowed then ( error_at env (loc, Parse_error.UnexpectedSuper); - loc, Expression.Identifier (loc, "super") - end else + (loc, Expression.Identifier (Flow_ast_utils.ident_of_source (loc, "super"))) + ) else super in call ~allow_optional_chain:false env loc super | T_LPAREN -> let super = - if not call_allowed then begin + if not call_allowed then ( error_at env (loc, Parse_error.UnexpectedSuperCall); - loc, Expression.Identifier (loc, "super") - end else + (loc, Expression.Identifier (Flow_ast_utils.ident_of_source (loc, "super"))) + ) else super in call ~allow_optional_chain:false env loc super | _ -> - if not allowed - then error_at env (loc, Parse_error.UnexpectedSuper) - else error_unexpected env; + if not allowed then + error_at env (loc, Parse_error.UnexpectedSuper) + else + error_unexpected ~expected:"either a call or access of `super`" env; super - and import env = with_loc (fun env -> + and import env = let start_loc = Peek.loc env in Expect.token env T_IMPORT; - match Peek.token env with - | T_LPAREN -> - (* "import(...)" syntax *) - Eat.token env; - let arg = assignment (with_no_in false env) in - Expect.token env T_RPAREN; - Expression.Import arg - | T_PERIOD -> begin + + if Peek.token env = T_PERIOD then ( (* "import.meta" syntax (no other metaproperties are permitted) *) - Eat.token env; + Expect.token env T_PERIOD; + let meta = Flow_ast_utils.ident_of_source (start_loc, "import") in match Peek.token env with | T_IDENTIFIER { raw = "meta"; _ } -> - Eat.token env; - let end_loc = Peek.loc env in - Expression.(MetaProperty MetaProperty.({ - meta = start_loc, "import"; - property = end_loc, "meta"; - })) + let property = Parse.identifier env in + let end_loc = fst property in + (Loc.btwn start_loc end_loc, Expression.(MetaProperty MetaProperty.{ meta; property })) | T_IDENTIFIER _ -> error_at env (start_loc, Parse_error.InvalidImportMetaProperty); - Expression.Identifier (start_loc, "import") + Eat.token env; + (start_loc, Expression.Identifier meta) | _ -> - error_unexpected env; - Expression.Identifier (start_loc, "import") - end - | _ -> - error_unexpected env; - Expression.Identifier (start_loc, "import") - ) env + error_unexpected ~expected:"identifier for `import` metaproperty" env; + Eat.token env; + (start_loc, Expression.Identifier meta) + ) else ( + (* "import(...)" syntax *) + Expect.token env T_LPAREN; + let arg = assignment (with_no_in false env) in + Expect.token env T_RPAREN; + (start_loc, Expression.Import arg) + ) - and call_cover ?(allow_optional_chain=true) ?(in_optional_chain=false) env start_loc left = + and call_cover ?(allow_optional_chain = true) ?(in_optional_chain = false) env start_loc left = let left = member_cover ~allow_optional_chain ~in_optional_chain env start_loc left in let optional = last_token env = Some T_PLING_PERIOD in let arguments ?targs env = - let args_loc, arguments = arguments env in + let (args_loc, arguments) = arguments env in let loc = Loc.btwn start_loc args_loc in - let call = { Expression.Call. - callee = as_expression env left; - targs; - arguments; - } in - let call = if optional || in_optional_chain - then Expression.(OptionalCall { OptionalCall. - call; - optional; - }) - else Expression.Call call + let call = { Expression.Call.callee = as_expression env left; targs; arguments } in + let call = + if optional || in_optional_chain then + Expression.(OptionalCall { OptionalCall.call; optional }) + else + Expression.Call call in - call_cover ~allow_optional_chain ~in_optional_chain env start_loc - (Cover_expr (loc, call)) + call_cover ~allow_optional_chain ~in_optional_chain env start_loc (Cover_expr (loc, call)) in - if no_call env then left - else match Peek.token env with - | T_LPAREN -> arguments env - | T_LESS_THAN when should_parse_types env -> + if no_call env then + left + else + match Peek.token env with + | T_LPAREN -> arguments env + | T_LESS_THAN when should_parse_types env -> (* If we are parsing types, then f(e) is a function call with a type application. If we aren't, it's a nested binary expression. *) let error_callback _ _ = raise Try.Rollback in @@ -604,166 +701,211 @@ module Expression (* Parameterized call syntax is ambiguous, so we fall back to standard parsing if it fails. *) Try.or_else env ~fallback:left (fun env -> - let targs = Type.type_parameter_instantiation env in - arguments ?targs env - ) - | _ -> left + let targs = type_parameter_instantiation env in + arguments ?targs env) + | _ -> left - and call ?(allow_optional_chain=true) env start_loc left = + and call ?(allow_optional_chain = true) env start_loc left = as_expression env (call_cover ~allow_optional_chain env start_loc (Cover_expr left)) and new_expression env = let start_loc = Peek.loc env in + let leading = Peek.comments env in Expect.token env T_NEW; - if in_function env && Peek.token env = T_PERIOD then begin + if in_function env && Peek.token env = T_PERIOD then ( Expect.token env T_PERIOD; - let meta = start_loc, "new" in + let meta = Flow_ast_utils.ident_of_source (start_loc, "new") in match Peek.token env with | T_IDENTIFIER { raw = "target"; _ } -> let property = Parse.identifier env in let end_loc = fst property in - Loc.btwn start_loc end_loc, Expression.(MetaProperty MetaProperty.({ - meta; - property; - })) + (Loc.btwn start_loc end_loc, Expression.(MetaProperty MetaProperty.{ meta; property })) | _ -> - error_unexpected env; - Eat.token env; (* skip unknown identifier *) - start_loc, Expression.Identifier meta (* return `new` identifier *) - end else + error_unexpected ~expected:"the identifier `target`" env; + Eat.token env; + + (* skip unknown identifier *) + (start_loc, Expression.Identifier meta) + (* return `new` identifier *) + ) else let callee_loc = Peek.loc env in - let expr = match Peek.token env with - | T_NEW -> new_expression env - | T_SUPER -> super (env |> with_no_call true) - | _ when Peek.is_function env -> _function env - | _ -> primary env in + let expr = + match Peek.token env with + | T_NEW -> new_expression env + | T_SUPER -> super (env |> with_no_call true) + | _ when Peek.is_function env -> _function env + | _ -> primary env + in let callee = member ~allow_optional_chain:false (env |> with_no_call true) callee_loc expr in (* You can do something like * new raw`42` *) - let callee = match Peek.token env with - | T_TEMPLATE_PART part -> tagged_template env callee_loc callee part - | _ -> callee in + let callee = + match Peek.token env with + | T_TEMPLATE_PART part -> tagged_template env callee_loc callee part + | _ -> callee + in let targs = (* If we are parsing types, then new C(e) is a constructor with a type application. If we aren't, it's a nested binary expression. *) - if should_parse_types env - then + if should_parse_types env then (* Parameterized call syntax is ambiguous, so we fall back to standard parsing if it fails. *) let error_callback _ _ = raise Try.Rollback in let env = env |> with_error_callback error_callback in - Try.or_else env ~fallback:None Type.type_parameter_instantiation + Try.or_else env ~fallback:None type_parameter_instantiation else None in - let end_loc, arguments = match Peek.token env, targs with - | T_LPAREN, _ -> arguments env - | _, Some (targs_loc, _) -> targs_loc, [] - | _ -> fst callee, [] in - - Loc.btwn start_loc end_loc, Expression.(New New.({ - callee; - targs; - arguments; - })) + let (end_loc, arguments) = + match (Peek.token env, targs) with + | (T_LPAREN, _) -> arguments env + | (_, Some (targs_loc, _)) -> (targs_loc, []) + | _ -> (fst callee, []) + in + let trailing = Peek.comments env in + let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in + (Loc.btwn start_loc end_loc, Expression.(New New.{ callee; targs; arguments; comments })) + + and type_parameter_instantiation = + let args env acc = + let rec args_helper env acc = + match Peek.token env with + | T_EOF + | T_GREATER_THAN -> + List.rev acc + | _ -> + let t = + match Peek.token env with + | T_IDENTIFIER { value = "_"; _ } -> + let loc = Peek.loc env in + Expect.identifier env "_"; + Expression.TypeParameterInstantiation.Implicit loc + | _ -> Expression.TypeParameterInstantiation.Explicit (Type._type env) + in + let acc = t :: acc in + if Peek.token env <> T_GREATER_THAN then Expect.token env T_COMMA; + args_helper env acc + in + args_helper env acc + in + fun env -> + if Peek.token env = T_LESS_THAN then + Some + (with_loc + (fun env -> + Expect.token env T_LESS_THAN; + let args = args env [] in + Expect.token env T_GREATER_THAN; + args) + env) + else + None and arguments = let argument env = match Peek.token env with | T_ELLIPSIS -> - let start_loc = Peek.loc env in - Expect.token env T_ELLIPSIS; - let argument = assignment env in - let loc = Loc.btwn start_loc (fst argument) in - Expression.(Spread (loc, SpreadElement.({ - argument; - }))) + let start_loc = Peek.loc env in + Expect.token env T_ELLIPSIS; + let argument = assignment env in + let loc = Loc.btwn start_loc (fst argument) in + Expression.(Spread (loc, SpreadElement.{ argument })) | _ -> Expression.Expression (assignment env) - - in let rec arguments' env acc = + in + let rec arguments' env acc = match Peek.token env with | T_EOF - | T_RPAREN -> List.rev acc + | T_RPAREN -> + List.rev acc | _ -> - let acc = (argument env)::acc in - if Peek.token env <> T_RPAREN - then Expect.token env T_COMMA; - arguments' env acc - - in fun env -> + let acc = argument env :: acc in + if Peek.token env <> T_RPAREN then Expect.token env T_COMMA; + arguments' env acc + in + fun env -> let start_loc = Peek.loc env in Expect.token env T_LPAREN; - let args = arguments' env [] - - in let end_loc = Peek.loc env in + let args = arguments' env [] in + let end_loc = Peek.loc env in Expect.token env T_RPAREN; - Loc.btwn start_loc end_loc, args + (Loc.btwn start_loc end_loc, args) and member_cover = - let dynamic ?(allow_optional_chain=true) ?(in_optional_chain=false) - ?(optional=false) env start_loc left = + let dynamic + ?(allow_optional_chain = true) + ?(in_optional_chain = false) + ?(optional = false) + env + start_loc + left = let expr = Parse.expression (env |> with_no_call false) in let last_loc = Peek.loc env in Expect.token env T_RBRACKET; let loc = Loc.btwn start_loc last_loc in - let member = Expression.Member.({ - _object = as_expression env left; - property = PropertyExpression expr; - computed = true; - }) in - let member = if in_optional_chain - then Expression.(OptionalMember { OptionalMember. - member; - optional; - }) - else Expression.Member member + let member = + Expression.Member.{ _object = as_expression env left; property = PropertyExpression expr } + in + let member = + if in_optional_chain then + Expression.(OptionalMember { OptionalMember.member; optional }) + else + Expression.Member member in - call_cover ~allow_optional_chain ~in_optional_chain env start_loc - (Cover_expr (loc, member)) + call_cover ~allow_optional_chain ~in_optional_chain env start_loc (Cover_expr (loc, member)) in - let static ?(allow_optional_chain=true) ?(in_optional_chain=false) - ?(optional=false) env start_loc left = - let id_loc, id, is_private = property_name_include_private env in - if is_private then add_used_private env (snd id) id_loc; + let static + ?(allow_optional_chain = true) + ?(in_optional_chain = false) + ?(optional = false) + env + start_loc + left = + let (id_loc, id, is_private) = property_name_include_private env in + if is_private then add_used_private env (Flow_ast_utils.name_of_ident id) id_loc; let loc = Loc.btwn start_loc id_loc in - let open Expression.Member in - let property = if is_private then PropertyPrivateName (id_loc, id) - else PropertyIdentifier id in - (* super.PrivateName is a syntax error *) - begin match left with - | Cover_expr (_, Ast.Expression.Super) when is_private -> - error_at env (loc, Error.SuperPrivate) - | _ -> () end; - let member = Expression.Member.({ - _object = as_expression env left; - property; - computed = false; - }) in - let member = if in_optional_chain - then Expression.(OptionalMember { OptionalMember. - member; - optional; - }) - else Expression.Member member - in - call_cover ~allow_optional_chain ~in_optional_chain env start_loc - (Cover_expr (loc, member)) + Expression.Member.( + let property = + if is_private then + PropertyPrivateName (id_loc, id) + else + PropertyIdentifier id + in + (* super.PrivateName is a syntax error *) + begin + match left with + | Cover_expr (_, Ast.Expression.Super) when is_private -> + error_at env (loc, Parse_error.SuperPrivate) + | _ -> () + end; + let member = Expression.Member.{ _object = as_expression env left; property } in + let member = + if in_optional_chain then + Expression.(OptionalMember { OptionalMember.member; optional }) + else + Expression.Member member + in + call_cover + ~allow_optional_chain + ~in_optional_chain + env + start_loc + (Cover_expr (loc, member))) in - fun ?(allow_optional_chain=true) ?(in_optional_chain=false) env start_loc left -> + fun ?(allow_optional_chain = true) ?(in_optional_chain = false) env start_loc left -> let options = parse_options env in match Peek.token env with | T_PLING_PERIOD -> - if not options.esproposal_optional_chaining - then error env Parse_error.OptionalChainingDisabled; + if not options.esproposal_optional_chaining then + error env Parse_error.OptionalChainingDisabled; - if not allow_optional_chain - then error env Parse_error.OptionalChainNew; + if not allow_optional_chain then error env Parse_error.OptionalChainNew; - Expect.token env T_PLING_PERIOD; - begin match Peek.token env with + Expect.token env T_PLING_PERIOD; + begin + match Peek.token env with | T_TEMPLATE_PART _ -> error env Parse_error.OptionalChainTemplate; left @@ -771,226 +913,322 @@ module Expression | T_LESS_THAN when should_parse_types env -> left | T_LBRACKET -> Expect.token env T_LBRACKET; - dynamic ~allow_optional_chain ~in_optional_chain:true - ~optional:true env start_loc left + dynamic ~allow_optional_chain ~in_optional_chain:true ~optional:true env start_loc left | _ -> - static ~allow_optional_chain ~in_optional_chain:true - ~optional:true env start_loc left - end + static ~allow_optional_chain ~in_optional_chain:true ~optional:true env start_loc left + end | T_LBRACKET -> - Expect.token env T_LBRACKET; - dynamic ~allow_optional_chain ~in_optional_chain env start_loc left + Expect.token env T_LBRACKET; + dynamic ~allow_optional_chain ~in_optional_chain env start_loc left | T_PERIOD -> - Expect.token env T_PERIOD; - static ~allow_optional_chain ~in_optional_chain env start_loc left + Expect.token env T_PERIOD; + static ~allow_optional_chain ~in_optional_chain env start_loc left | T_TEMPLATE_PART part -> - if in_optional_chain - then error env Parse_error.OptionalChainTemplate; + if in_optional_chain then error env Parse_error.OptionalChainTemplate; - let expr = tagged_template env start_loc (as_expression env left) part in - call_cover ~allow_optional_chain:false env start_loc (Cover_expr expr) + let expr = tagged_template env start_loc (as_expression env left) part in + call_cover ~allow_optional_chain:false env start_loc (Cover_expr expr) | _ -> left - and member ?(allow_optional_chain=true) env start_loc left = + and member ?(allow_optional_chain = true) env start_loc left = as_expression env (member_cover ~allow_optional_chain env start_loc (Cover_expr left)) and _function env = - let start_loc = Peek.loc env in - let async = Declaration.async env in - Expect.token env T_FUNCTION; - let generator = Declaration.generator env in - let yield, await = match async, generator with - | true, true -> true, true (* proposal-async-iteration/#prod-AsyncGeneratorExpression *) - | true, false -> false, true (* #prod-AsyncFunctionExpression *) - | false, true -> true, false (* #prod-GeneratorExpression *) - | false, false -> false, false (* #prod-FunctionExpression *) - in - let id, tparams = - if Peek.token env = T_LPAREN - then None, None - else begin - let id = match Peek.token env with - | T_LESS_THAN -> None - | _ -> - let env = env |> with_allow_await await |> with_allow_yield yield in - Some (Parse.identifier ~restricted_error:Error.StrictFunctionName env) in - id, Type.type_parameter_declaration env - end in - - (* #sec-function-definitions-static-semantics-early-errors *) - let env = env |> with_allow_super No_super in - - let params = Declaration.function_params ~await ~yield env in - let return, predicate = Type.annotation_and_predicate_opt env in - let end_loc, body, strict = - Declaration.function_body env ~async ~generator in - let simple = Declaration.is_simple_function_params params in - Declaration.strict_post_check env ~strict ~simple id params; - let expression = Function.( - match body with - | BodyBlock _ -> false - | BodyExpression _ -> true) in - Loc.btwn start_loc end_loc, Expression.(Function Function.({ - id; - params; - body; - generator; - async; - predicate; - expression; - return; - tparams; - })) + with_loc + (fun env -> + let async = Declaration.async env in + let (sig_loc, (id, params, generator, predicate, return, tparams)) = + with_loc + (fun env -> + Expect.token env T_FUNCTION; + let generator = Declaration.generator env in + (* `await` is a keyword in async functions: + - proposal-async-iteration/#prod-AsyncGeneratorExpression + - #prod-AsyncFunctionExpression *) + let await = async in + (* `yield` is a keyword in generator functions: + - proposal-async-iteration/#prod-AsyncGeneratorExpression + - #prod-GeneratorExpression *) + let yield = generator in + let (id, tparams) = + if Peek.token env = T_LPAREN then + (None, None) + else + let id = + match Peek.token env with + | T_LESS_THAN -> None + | _ -> + let env = env |> with_allow_await await |> with_allow_yield yield in + Some (Parse.identifier ~restricted_error:Parse_error.StrictFunctionName env) + in + (id, Type.type_parameter_declaration env) + in + (* #sec-function-definitions-static-semantics-early-errors *) + let env = env |> with_allow_super No_super in + let params = Declaration.function_params ~await ~yield env in + let (return, predicate) = Type.annotation_and_predicate_opt env in + (id, params, generator, predicate, return, tparams)) + env + in + let (body, strict) = Declaration.function_body env ~async ~generator in + let simple = Declaration.is_simple_function_params params in + Declaration.strict_post_check env ~strict ~simple id params; + Expression.Function + { Function.id; params; body; generator; async; predicate; return; tparams; sig_loc }) + env and number env kind raw = - let value = match kind with - | LEGACY_OCTAL -> - strict_error env Error.StrictOctalLiteral; - begin try Int64.to_float (Int64.of_string ("0o"^raw)) - with Failure _ -> failwith ("Invalid legacy octal "^raw) - end - | BINARY - | OCTAL -> - begin try Int64.to_float (Int64.of_string raw) - with Failure _ -> failwith ("Invalid binary/octal "^raw) - end - | NORMAL -> - begin try Lexer.FloatOfString.float_of_string raw - with - | _ when Sys.win32 -> - error env Parse_error.WindowsFloatOfString; - 789.0 - | Failure _ -> - failwith ("Invalid number "^raw) - end + let value = + match kind with + | LEGACY_OCTAL -> + strict_error env Parse_error.StrictOctalLiteral; + begin + try Int64.to_float (Int64.of_string ("0o" ^ raw)) + with Failure _ -> failwith ("Invalid legacy octal " ^ raw) + end + | LEGACY_NON_OCTAL -> + strict_error env Parse_error.StrictNonOctalLiteral; + begin + try float_of_string raw with Failure _ -> failwith ("Invalid number " ^ raw) + end + | BINARY + | OCTAL -> + begin + try Int64.to_float (Int64.of_string raw) + with Failure _ -> failwith ("Invalid binary/octal " ^ raw) + end + | NORMAL -> + begin + try float_of_string raw with Failure _ -> failwith ("Invalid number " ^ raw) + end in Expect.token env (T_NUMBER { kind; raw }); value + and bigint_strip_n raw = + let size = String.length raw in + let str = + if size != 0 && raw.[size - 1] == 'n' then + String.sub raw 0 (size - 1) + else + raw + in + str + + and bigint env kind raw = + let value = + match kind with + | BIG_BINARY + | BIG_OCTAL -> + let postraw = bigint_strip_n raw in + begin + try Int64.to_float (Int64.of_string postraw) + with Failure _ -> failwith ("Invalid bigint binary/octal " ^ postraw) + end + | BIG_NORMAL -> + let postraw = bigint_strip_n raw in + begin + try float_of_string postraw with Failure _ -> failwith ("Invalid bigint " ^ postraw) + end + in + Expect.token env (T_BIGINT { kind; raw }); + value + and primary_cover env = let loc = Peek.loc env in - match Peek.token env with + let leading = Peek.comments env in + let tkn = Peek.token env in + match tkn with | T_THIS -> - Expect.token env T_THIS; - Cover_expr (loc, Expression.This) + Expect.token env T_THIS; + Cover_expr (loc, Expression.This) | T_NUMBER { kind; raw } -> - let value = Literal.Number (number env kind raw) in - Cover_expr (loc, Expression.(Literal { Literal.value; raw; })) + let value = Literal.Number (number env kind raw) in + let trailing = Peek.comments env in + Cover_expr + ( loc, + Expression.( + Literal + { + Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) ) + | T_BIGINT { kind; raw } -> + let value = Literal.BigInt (bigint env kind raw) in + let trailing = Peek.comments env in + Cover_expr + ( loc, + Expression.( + Literal + { + Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) ) | T_STRING (loc, value, raw, octal) -> - if octal then strict_error env Error.StrictOctalLiteral; - Expect.token env (T_STRING (loc, value, raw, octal)); - let value = Literal.String value in - Cover_expr (loc, Expression.(Literal { Literal.value; raw; })) + if octal then strict_error env Parse_error.StrictOctalLiteral; + Expect.token env (T_STRING (loc, value, raw, octal)); + let value = Literal.String value in + let trailing = Peek.comments env in + Cover_expr + ( loc, + Expression.( + Literal + { + Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) ) | (T_TRUE | T_FALSE) as token -> - Expect.token env token; - let truthy = token = T_TRUE in - let raw = if truthy then "true" else "false" in - let value = Literal.Boolean truthy in - Cover_expr (loc, Expression.(Literal { Literal.value; raw; })) + Expect.token env token; + let truthy = token = T_TRUE in + let raw = + if truthy then + "true" + else + "false" + in + let value = Literal.Boolean truthy in + let trailing = Peek.comments env in + Cover_expr + ( loc, + Expression.( + Literal + { + Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) ) | T_NULL -> - Expect.token env T_NULL; - let raw = "null" in - let value = Literal.Null in - Cover_expr (loc, Expression.(Literal { Literal.value; raw; })) + Expect.token env T_NULL; + let raw = "null" in + let value = Literal.Null in + let trailing = Peek.comments env in + Cover_expr + ( loc, + Expression.( + Literal + { + Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) ) | T_LPAREN -> Cover_expr (group env) | T_LCURLY -> - let loc, obj, errs = Parse.object_initializer env in - Cover_patt ((loc, Expression.Object obj), errs) + let (loc, obj, errs) = Parse.object_initializer env in + Cover_patt ((loc, Expression.Object obj), errs) | T_LBRACKET -> - let loc, arr, errs = array_initializer env in - Cover_patt ((loc, Expression.Array arr), errs) + let (loc, arr, errs) = array_initializer env in + Cover_patt ((loc, Expression.Array arr), errs) | T_DIV - | T_DIV_ASSIGN -> Cover_expr (regexp env) + | T_DIV_ASSIGN -> + Cover_expr (regexp env) | T_LESS_THAN -> - let loc, expression = match Parse.jsx_element_or_fragment env with - | (loc, `Element e) -> (loc, Expression.JSXElement e) - | (loc, `Fragment f) -> (loc, Expression.JSXFragment f) in - Cover_expr (loc, expression) + let (loc, expression) = + match Parse.jsx_element_or_fragment env with + | (loc, `Element e) -> (loc, Expression.JSXElement e) + | (loc, `Fragment f) -> (loc, Expression.JSXFragment f) + in + Cover_expr (loc, expression) | T_TEMPLATE_PART part -> - let loc, template = template_literal env part in - Cover_expr (loc, Expression.TemplateLiteral template) + let (loc, template) = template_literal env part in + Cover_expr (loc, Expression.TemplateLiteral template) | T_CLASS -> Cover_expr (Parse.class_expression env) | _ when Peek.is_identifier env -> - let id = Parse.identifier env in - Cover_expr (fst id, Expression.Identifier id) + let id = Parse.identifier env in + Cover_expr (fst id, Expression.Identifier id) | t -> - error_unexpected env; - (* Let's get rid of the bad token *) - begin match t with T_ERROR _ -> Eat.token env | _ -> () end; - (* Really no idea how to recover from this. I suppose a null - * expression is as good as anything *) - let value = Literal.Null in - let raw = "null" in - Cover_expr (loc, Expression.(Literal { Literal.value; raw; })) + error_unexpected env; + + (* Let's get rid of the bad token *) + begin + match t with + | T_ERROR _ -> Eat.token env + | _ -> () + end; + + (* Really no idea how to recover from this. I suppose a null + * expression is as good as anything *) + let value = Literal.Null in + let raw = "null" in + let trailing = [] in + Cover_expr + ( loc, + Expression.( + Literal + { + Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) ) and primary env = as_expression env (primary_cover env) and template_literal = let rec template_parts env quasis expressions = let expr = Parse.expression env in - let expressions = expr::expressions in + let expressions = expr :: expressions in match Peek.token env with | T_RCURLY -> - Eat.push_lex_mode env Lex_mode.TEMPLATE; - let loc, part, is_tail = match Peek.token env with - | T_TEMPLATE_PART (loc, {cooked; raw; _}, tail) -> - let open Ast.Expression.TemplateLiteral in + Eat.push_lex_mode env Lex_mode.TEMPLATE; + let (loc, part, is_tail) = + match Peek.token env with + | T_TEMPLATE_PART (loc, { cooked; raw; _ }, tail) -> + Ast.Expression.TemplateLiteral.( Eat.token env; - loc, { Element.value = { Element.cooked; raw; }; tail; }, tail - | _ -> assert false in - Eat.pop_lex_mode env; - let quasis = (loc, part)::quasis in - if is_tail - then loc, List.rev quasis, List.rev expressions - else template_parts env quasis expressions + (loc, { Element.value = { Element.cooked; raw }; tail }, tail)) + | _ -> assert false + in + Eat.pop_lex_mode env; + let quasis = (loc, part) :: quasis in + if is_tail then + (loc, List.rev quasis, List.rev expressions) + else + template_parts env quasis expressions | _ -> - (* Malformed template *) - error_unexpected env; - let imaginary_quasi = fst expr, { Expression.TemplateLiteral.Element. - value = { Expression.TemplateLiteral.Element. - raw = ""; - cooked = ""; - }; - tail = true; - } in - fst expr, List.rev (imaginary_quasi::quasis), List.rev expressions - - in fun env ((start_loc, {cooked; raw; _}, is_tail) as part) -> + (* Malformed template *) + error_unexpected ~expected:"a template literal part" env; + let imaginary_quasi = + ( fst expr, + { + Expression.TemplateLiteral.Element.value = + { Expression.TemplateLiteral.Element.raw = ""; cooked = "" }; + tail = true; + } ) + in + (fst expr, List.rev (imaginary_quasi :: quasis), List.rev expressions) + in + fun env ((start_loc, { cooked; raw; _ }, is_tail) as part) -> Expect.token env (T_TEMPLATE_PART part); - let end_loc, quasis, expressions = - let head = Ast.Expression.TemplateLiteral.(start_loc, { - Element.value = { Element.cooked; raw; }; - tail = is_tail; - }) in - if is_tail - then start_loc, [head], [] - else template_parts env [head] [] in + let (end_loc, quasis, expressions) = + let head = + Ast.Expression.TemplateLiteral. + (start_loc, { Element.value = { Element.cooked; raw }; tail = is_tail }) + in + if is_tail then + (start_loc, [head], []) + else + template_parts env [head] [] + in let loc = Loc.btwn start_loc end_loc in - loc, Expression.TemplateLiteral.({ - quasis; - expressions; - }) + (loc, Expression.TemplateLiteral.{ quasis; expressions }) and tagged_template env start_loc tag part = let quasi = template_literal env part in - Loc.btwn start_loc (fst quasi), Expression.(TaggedTemplate TaggedTemplate.({ - tag; - quasi; - })) + (Loc.btwn start_loc (fst quasi), Expression.(TaggedTemplate TaggedTemplate.{ tag; quasi })) and group env = Expect.token env T_LPAREN; let expression = assignment env in - let ret = (match Peek.token env with - | T_COMMA -> sequence env [expression] - | T_COLON -> + let ret = + match Peek.token env with + | T_COMMA -> sequence env [expression] + | T_COLON -> let annot = Type.annotation env in - Expression.(Loc.btwn (fst expression) (fst annot), - TypeCast TypeCast.({ - expression; - annot; - })) - | _ -> expression) in + Expression.(Loc.btwn (fst expression) (fst annot), TypeCast TypeCast.{ expression; annot }) + | _ -> expression + in Expect.token env T_RPAREN; ret @@ -998,185 +1236,235 @@ module Expression let rec elements env (acc, errs) = match Peek.token env with | T_EOF - | T_RBRACKET -> List.rev acc, Pattern_cover.rev_errors errs + | T_RBRACKET -> + (List.rev acc, Pattern_cover.rev_errors errs) | T_COMMA -> - Expect.token env T_COMMA; - elements env (None::acc, errs) + Expect.token env T_COMMA; + elements env (None :: acc, errs) | T_ELLIPSIS -> - let loc, (argument, new_errs) = with_loc (fun env -> - Expect.token env T_ELLIPSIS; - match assignment_cover env with - | Cover_expr argument -> argument, Pattern_cover.empty_errors - | Cover_patt (argument, new_errs) -> argument, new_errs - ) env in - let elem = Expression.(Spread (loc, SpreadElement.({ - argument; - }))) in - let is_last = Peek.token env = T_RBRACKET in - - (* if this array is interpreted as a pattern, the spread becomes an AssignmentRestElement + let (loc, (argument, new_errs)) = + with_loc + (fun env -> + Expect.token env T_ELLIPSIS; + match assignment_cover env with + | Cover_expr argument -> (argument, Pattern_cover.empty_errors) + | Cover_patt (argument, new_errs) -> (argument, new_errs)) + env + in + let elem = Expression.(Spread (loc, SpreadElement.{ argument })) in + let is_last = Peek.token env = T_RBRACKET in + (* if this array is interpreted as a pattern, the spread becomes an AssignmentRestElement which must be the last element. We can easily error about additional elements since they will be in the element list, but a trailing elision, like `[...x,]`, is not part of the AST. so, keep track of the error so we can raise it if this is a pattern. *) - let new_errs = - if not is_last && Peek.ith_token ~i:1 env = T_RBRACKET then - let if_patt = (loc, Parse_error.ElementAfterRestElement)::new_errs.if_patt in - { new_errs with if_patt } - else new_errs - in - - if not is_last then Expect.token env T_COMMA; - let acc = Some elem :: acc in - let errs = Pattern_cover.rev_append_errors new_errs errs in - elements env (acc, errs) + let new_errs = + if (not is_last) && Peek.ith_token ~i:1 env = T_RBRACKET then + let if_patt = (loc, Parse_error.ElementAfterRestElement) :: new_errs.if_patt in + { new_errs with if_patt } + else + new_errs + in + if not is_last then Expect.token env T_COMMA; + let acc = Some elem :: acc in + let errs = Pattern_cover.rev_append_errors new_errs errs in + elements env (acc, errs) | _ -> - let elem, new_errs = match assignment_cover env with - | Cover_expr elem -> elem, Pattern_cover.empty_errors - | Cover_patt (elem, new_errs) -> elem, new_errs - in - if Peek.token env <> T_RBRACKET then Expect.token env T_COMMA; - let acc = Some (Expression.Expression elem) :: acc in - let errs = Pattern_cover.rev_append_errors new_errs errs in - elements env (acc, errs) - - in fun env -> - let loc, (elements, errs) = with_loc (fun env -> - Expect.token env T_LBRACKET; - let res = elements env ([], Pattern_cover.empty_errors) in - Expect.token env T_RBRACKET; - res - ) env in - loc, { Expression.Array.elements; }, errs + let (elem, new_errs) = + match assignment_cover env with + | Cover_expr elem -> (elem, Pattern_cover.empty_errors) + | Cover_patt (elem, new_errs) -> (elem, new_errs) + in + if Peek.token env <> T_RBRACKET then Expect.token env T_COMMA; + let acc = Some (Expression.Expression elem) :: acc in + let errs = Pattern_cover.rev_append_errors new_errs errs in + elements env (acc, errs) + in + fun env -> + let (loc, (expr, errs)) = + with_loc + (fun env -> + let leading = Peek.comments env in + Expect.token env T_LBRACKET; + let (elems, errs) = elements env ([], Pattern_cover.empty_errors) in + Expect.token env T_RBRACKET; + let trailing = Peek.comments env in + ( { + Ast.Expression.Array.elements = elems; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }, + errs )) + env + in + (loc, expr, errs) and regexp env = Eat.push_lex_mode env Lex_mode.REGEXP; let loc = Peek.loc env in - let raw, pattern, raw_flags = match Peek.token env with + let leading = Peek.comments env in + let tkn = Peek.token env in + let trailing = Peek.comments env in + let (raw, pattern, raw_flags) = + match tkn with | T_REGEXP (_, pattern, flags) -> - Eat.token env; - let raw = "/" ^ pattern ^ "/" ^ flags in - raw, pattern, flags - | _ -> assert false in + Eat.token env; + let raw = "/" ^ pattern ^ "/" ^ flags in + (raw, pattern, flags) + | _ -> assert false + in Eat.pop_lex_mode env; let filtered_flags = Buffer.create (String.length raw_flags) in - String.iter (function - | 'g' | 'i' | 'm' | 's' | 'u' | 'y' as c -> Buffer.add_char filtered_flags c - | _ -> ()) raw_flags; + String.iter + (function + | ('g' | 'i' | 'm' | 's' | 'u' | 'y') as c -> Buffer.add_char filtered_flags c + | _ -> ()) + raw_flags; let flags = Buffer.contents filtered_flags in - if flags <> raw_flags - then error env (Error.InvalidRegExpFlags raw_flags); - let value = Literal.(RegExp { RegExp.pattern; flags; }) in - loc, Expression.(Literal { Literal.value; raw; }) + if flags <> raw_flags then error env (Parse_error.InvalidRegExpFlags raw_flags); + let value = Literal.(RegExp { RegExp.pattern; flags }) in + ( loc, + Expression.( + Literal + { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }) + ) and try_arrow_function = (* Certain errors (almost all errors) cause a rollback *) - let error_callback _ = Error.(function - (* Don't rollback on these errors. *) - | StrictParamName - | StrictReservedWord - | ParameterAfterRestParameter - | NewlineBeforeArrow - | YieldInFormalParameters -> () - (* Everything else causes a rollback *) - | _ -> raise Try.Rollback) in - + let error_callback _ = + Parse_error.( + function + (* Don't rollback on these errors. *) + | StrictParamName + | StrictReservedWord + | ParameterAfterRestParameter + | NewlineBeforeArrow + | YieldInFormalParameters -> + () + (* Everything else causes a rollback *) + | _ -> raise Try.Rollback) + in + let concise_function_body env ~async = + (* arrow functions can't be generators *) + let env = enter_function env ~async ~generator:false in + match Peek.token env with + | T_LCURLY -> + let (loc, body, strict) = Parse.function_block_body env in + (Function.BodyBlock (loc, body), strict) + | _ -> + let expr = Parse.assignment env in + (Function.BodyExpression expr, in_strict_mode env) + in fun env -> let env = env |> with_error_callback error_callback in - let start_loc = Peek.loc env in (* a T_ASYNC could either be a parameter name or it could be indicating * that it's an async function *) let async = Peek.ith_token ~i:1 env <> T_ARROW && Declaration.async env in - let tparams = Type.type_parameter_declaration env in - let params, return, predicate = - (* Disallow all fancy features for identifier => body *) - if Peek.is_identifier env && tparams = None - then - let loc, name = - Parse.identifier ~restricted_error:Error.StrictParamName env in - let param = loc, Pattern.Identifier { - Pattern.Identifier.name = loc, name; - annot=None; - optional=false; - } in - (loc, { Ast.Function.Params.params = [param]; rest = None }), - (* FIXME(festevezga) D9545732 add proper location *) Ast.Function.Missing Loc.none, - None - else - let params = - let yield = allow_yield env in - let await = allow_await env in - Declaration.function_params ~await ~yield env - in - (* There's an ambiguity if you use a function type as the return - * type for an arrow function. So we disallow anonymous function - * types in arrow function return types unless the function type is - * enclosed in parens *) - let return, predicate = env - |> with_no_anon_function_type true - |> Type.annotation_and_predicate_opt in - params, return, predicate in - + let (sig_loc, (tparams, params, return, predicate)) = + with_loc + (fun env -> + let tparams = Type.type_parameter_declaration env in + (* Disallow all fancy features for identifier => body *) + if Peek.is_identifier env && tparams = None then + let ((loc, _) as name) = + Parse.identifier ~restricted_error:Parse_error.StrictParamName env + in + let param = + ( loc, + { + Ast.Function.Param.argument = + ( loc, + Pattern.Identifier + { + Pattern.Identifier.name; + annot = Ast.Type.Missing (Peek.loc_skip_lookahead env); + optional = false; + } ); + default = None; + } ) + in + ( tparams, + (loc, { Ast.Function.Params.params = [param]; rest = None }), + Ast.Type.Missing Loc.{ loc with start = loc._end }, + None ) + else + let params = + let yield = allow_yield env in + let await = allow_await env in + Declaration.function_params ~await ~yield env + in + (* There's an ambiguity if you use a function type as the return + * type for an arrow function. So we disallow anonymous function + * types in arrow function return types unless the function type is + * enclosed in parens *) + let (return, predicate) = + env |> with_no_anon_function_type true |> Type.annotation_and_predicate_opt + in + (tparams, params, return, predicate)) + env + in (* It's hard to tell if an invalid expression was intended to be an * arrow function before we see the =>. If there are no params, that * implies "()" which is only ever found in arrow params. Similarly, * rest params indicate arrow functions. Therefore, if we see a rest * param or an empty param list then we can disable the rollback and * instead generate errors as if we were parsing an arrow function *) - let env = match params with - | _, { Ast.Function.Params.rest = Some _; _ } - | _, { Ast.Function.Params.params = []; _ } -> without_error_callback env + let env = + match params with + | (_, { Ast.Function.Params.rest = Some _; _ }) + | (_, { Ast.Function.Params.params = []; _ }) -> + without_error_callback env | _ -> env in - - if Peek.is_line_terminator env && Peek.token env = T_ARROW - then error env Error.NewlineBeforeArrow; + if Peek.is_line_terminator env && Peek.token env = T_ARROW then + error env Parse_error.NewlineBeforeArrow; Expect.token env T_ARROW; (* Now we know for sure this is an arrow function *) let env = without_error_callback env in - - let end_loc, (body, strict) = with_loc - (Declaration.concise_function_body ~async ~generator:false) - env - in + let (end_loc, (body, strict)) = with_loc (concise_function_body ~async) env in let simple = Declaration.is_simple_function_params params in Declaration.strict_post_check env ~strict ~simple None params; - let expression = Function.( - match body with - | BodyBlock _ -> false - | BodyExpression _ -> true) in let loc = Loc.btwn start_loc end_loc in - Cover_expr (loc, Expression.(ArrowFunction { Function. - id = None; - params; - body; - async; - generator = false; (* arrow functions cannot be generators *) - predicate; - expression; - return; - tparams; - })) + Cover_expr + ( loc, + Expression.( + ArrowFunction + { + Function.id = None; + params; + body; + async; + generator = false; + (* arrow functions cannot be generators *) + predicate; + return; + tparams; + sig_loc; + }) ) and sequence env acc = match Peek.token env with | T_COMMA -> - Expect.token env T_COMMA; - let expr = assignment env in - sequence env (expr::acc) + Expect.token env T_COMMA; + let expr = assignment env in + sequence env (expr :: acc) | _ -> let (last_loc, _) = List.hd acc in let expressions = List.rev acc in let (first_loc, _) = List.hd expressions in - Loc.btwn first_loc last_loc, Expression.(Sequence Sequence.({ - expressions; - })) + (Loc.btwn first_loc last_loc, Expression.(Sequence Sequence.{ expressions })) and property_name_include_private env = let start_loc = Peek.loc env in - let is_private = Expect.maybe env T_POUND in - let id_loc, ident = identifier_name env in - let loc = Loc.btwn start_loc id_loc in - loc, (id_loc, ident), is_private + let (loc, (is_private, id)) = + with_loc + (fun env -> + let is_private = Expect.maybe env T_POUND in + let id = identifier_name env in + (is_private, id)) + env + in + if is_private && start_loc.Loc._end <> (fst id).Loc.start then + error_at env (loc, Parse_error.WhitespaceInPrivateName); + (loc, id, is_private) end diff --git a/src/parser/file_key.ml b/src/parser/file_key.ml index b664e622d69..93cbbf52006 100644 --- a/src/parser/file_key.ml +++ b/src/parser/file_key.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -13,14 +13,22 @@ type t = these, just check that they exist *) | ResourceFile of string | Builtins - [@@deriving show] +[@@deriving show] let to_string = function - | LibFile x | SourceFile x | JsonFile x | ResourceFile x -> x + | LibFile x + | SourceFile x + | JsonFile x + | ResourceFile x -> + x | Builtins -> "(global)" let to_path = function - | LibFile x | SourceFile x | JsonFile x | ResourceFile x -> Ok x + | LibFile x + | SourceFile x + | JsonFile x + | ResourceFile x -> + Ok x | Builtins -> Error "File key refers to a builtin" let compare = @@ -28,23 +36,25 @@ let compare = JSON files are basically source files. We don't actually read resource files so they come last *) let order_of_filename = function - | Builtins -> 1 - | LibFile _ -> 2 - | SourceFile _ -> 3 - | JsonFile _ -> 3 - | ResourceFile _ -> 4 + | Builtins -> 1 + | LibFile _ -> 2 + | SourceFile _ -> 3 + | JsonFile _ -> 3 + | ResourceFile _ -> 4 in fun a b -> - let k = (order_of_filename a) - (order_of_filename b) in - if k <> 0 then k - else String.compare (to_string a) (to_string b) + let k = order_of_filename a - order_of_filename b in + if k <> 0 then + k + else + String.compare (to_string a) (to_string b) let compare_opt a b = - match a, b with - | Some _, None -> -1 - | None, Some _ -> 1 - | None, None -> 0 - | Some a, Some b -> compare a b + match (a, b) with + | (Some _, None) -> -1 + | (None, Some _) -> 1 + | (None, None) -> 0 + | (Some a, Some b) -> compare a b let is_lib_file = function | LibFile _ -> true @@ -64,14 +74,12 @@ let exists f = function | LibFile filename | SourceFile filename | JsonFile filename - | ResourceFile filename -> f filename + | ResourceFile filename -> + f filename | Builtins -> false -let check_suffix filename suffix = - exists (fun fn -> Filename.check_suffix fn suffix) filename +let check_suffix filename suffix = exists (fun fn -> Filename.check_suffix fn suffix) filename -let chop_suffix filename suffix = - map (fun fn -> Filename.chop_suffix fn suffix) filename +let chop_suffix filename suffix = map (fun fn -> Filename.chop_suffix fn suffix) filename -let with_suffix filename suffix = - map (fun fn -> fn ^ suffix) filename +let with_suffix filename suffix = map (fun fn -> fn ^ suffix) filename diff --git a/src/parser/flow_ast.ml b/src/parser/flow_ast.ml index 0f74faf40a6..5a1d507f697 100644 --- a/src/parser/flow_ast.ml +++ b/src/parser/flow_ast.ml @@ -1,24 +1,41 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +[%%gen (* * An Ocaml implementation of the SpiderMonkey Parser API * https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API *) -module%gen rec Identifier : sig - type 'M t = 'M * string +module rec Syntax : sig + type ('M, 'internal) t = { + leading: 'M Comment.t list; + trailing: 'M Comment.t list; + internal: 'internal; + } [@@deriving show] -end = Identifier +end = + Syntax -and PrivateName : sig - type 'M t = 'M * 'M Identifier.t +and Identifier : sig + type ('M, 'T) t = 'T * 'M t' + + and 'M t' = { + name: string; + comments: ('M, unit) Syntax.t option; + } [@@deriving show] -end = PrivateName +end = + Identifier + +and PrivateName : sig + type 'M t = 'M * ('M, 'M) Identifier.t [@@deriving show] +end = + PrivateName and Literal : sig module RegExp : sig @@ -30,18 +47,22 @@ and Literal : sig end (* Literals also carry along their raw value *) - type t = { + type 'M t = { value: value; raw: string; + comments: ('M, unit) Syntax.t option; } + and value = | String of string | Boolean of bool | Null | Number of float + | BigInt of float | RegExp of RegExp.t [@@deriving show] -end = Literal +end = + Literal and StringLiteral : sig type t = { @@ -49,7 +70,8 @@ and StringLiteral : sig raw: string; } [@@deriving show] -end = StringLiteral +end = + StringLiteral and NumberLiteral : sig type t = { @@ -57,40 +79,58 @@ and NumberLiteral : sig raw: string; } [@@deriving show] -end = NumberLiteral +end = + NumberLiteral + +and BigIntLiteral : sig + type t = { + approx_value: float; + (* Warning! Might lose precision! *) + bigint: string; + } + [@@deriving show] +end = + BigIntLiteral and Variance : sig type 'M t = 'M * t' - and t' = Plus | Minus + + and t' = + | Plus + | Minus [@@deriving show] -end = Variance +end = + Variance and Type : sig module Function : sig module Param : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { - name: 'T Identifier.t option; + name: ('M, 'T) Identifier.t option; annot: ('M, 'T) Type.t; optional: bool; } [@@deriving show] end + module RestParam : sig type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = { - argument: ('M, 'T) Param.t - } - [@@deriving show] + + and ('M, 'T) t' = { argument: ('M, 'T) Param.t } [@@deriving show] end + module Params : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { params: ('M, 'T) Param.t list; rest: ('M, 'T) RestParam.t option; } [@@deriving show] end + type ('M, 'T) t = { tparams: ('M, 'T) Type.ParameterDeclaration.t option; params: ('M, 'T) Params.t; @@ -102,15 +142,18 @@ and Type : sig module Generic : sig module Identifier : sig type ('M, 'T) t = - | Unqualified of 'T Identifier.t + | Unqualified of ('M, 'T) Identifier.t | Qualified of ('M, 'T) qualified + and ('M, 'T) qualified = 'M * ('M, 'T) qualified' + and ('M, 'T) qualified' = { qualification: ('M, 'T) t; - id: 'T Identifier.t + id: ('M, 'T) Identifier.t; } [@@deriving show] end + type ('M, 'T) t = { id: ('M, 'T) Identifier.t; targs: ('M, 'T) Type.ParameterInstantiation.t option; @@ -121,6 +164,7 @@ and Type : sig module Object : sig module Property : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { key: ('M, 'T) Expression.Object.Property.key; value: ('M, 'T) value; @@ -130,42 +174,47 @@ and Type : sig _method: bool; variance: 'M Variance.t option; } + and ('M, 'T) value = | Init of ('M, 'T) Type.t | Get of ('M * ('M, 'T) Function.t) | Set of ('M * ('M, 'T) Function.t) [@@deriving show] end + module SpreadProperty : sig type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = { - argument: ('M, 'T) Type.t; - } - [@@deriving show] + + and ('M, 'T) t' = { argument: ('M, 'T) Type.t } [@@deriving show] end - module Indexer: sig + + module Indexer : sig type ('M, 'T) t' = { - id: 'M Identifier.t option; + id: ('M, 'M) Identifier.t option; key: ('M, 'T) Type.t; value: ('M, 'T) Type.t; static: bool; variance: 'M Variance.t option; } - and ('M, 'T) t = 'M * ('M, 'T) t' - [@@deriving show] + + and ('M, 'T) t = 'M * ('M, 'T) t' [@@deriving show] end - module CallProperty: sig + + module CallProperty : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { value: 'M * ('M, 'T) Function.t; static: bool; } [@@deriving show] end - module InternalSlot: sig + + module InternalSlot : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { - id: 'M Identifier.t; + id: ('M, 'M) Identifier.t; value: ('M, 'T) Type.t; optional: bool; static: bool; @@ -176,8 +225,17 @@ and Type : sig type ('M, 'T) t = { exact: bool; + (* Inexact indicates the presence of ... in the object. It is more + * easily understood if exact is read as "explicitly exact" and "inexact" + * is read as "explicitly inexact". + * + * This confusion will go away when we get rid of the exact flag in favor + * of inexact as part of the work to make object types exact by default. + * *) + inexact: bool; properties: ('M, 'T) property list; } + and ('M, 'T) property = | Property of ('M, 'T) Property.t | SpreadProperty of ('M, 'T) SpreadProperty.t @@ -196,6 +254,7 @@ and Type : sig end type ('M, 'T) t = 'T * ('M, 'T) t' + (* Yes, we could add a little complexity here to show that Any and Void * should never be declared nullable, but that check can happen later *) and ('M, 'T) t' = @@ -205,6 +264,7 @@ and Type : sig | Void | Null | Number + | BigInt | String | Boolean | Nullable of ('M, 'T) t @@ -219,6 +279,7 @@ and Type : sig | Tuple of ('M, 'T) t list | StringLiteral of StringLiteral.t | NumberLiteral of NumberLiteral.t + | BigIntLiteral of BigIntLiteral.t | BooleanLiteral of bool | Exists @@ -228,73 +289,85 @@ and Type : sig * Type.annotation with a location from column 6-14 *) and ('M, 'T) annotation = 'M * ('M, 'T) t + and ('M, 'T) annotation_or_hint = + | Missing of 'T + | Available of ('M, 'T) Type.annotation [@@deriving show] module ParameterDeclaration : sig module TypeParam : sig type ('M, 'T) t = 'T * ('M, 'T) t' + and ('M, 'T) t' = { - name: 'T Identifier.t; - bound: ('M, 'T) Type.annotation option; + name: ('M, 'T) Identifier.t; + bound: ('M, 'T) Type.annotation_or_hint; variance: 'M Variance.t option; default: ('M, 'T) Type.t option; } [@@deriving show] end + type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = ('M, 'T) TypeParam.t list - [@@deriving show] + + and ('M, 'T) t' = ('M, 'T) TypeParam.t list [@@deriving show] end + module ParameterInstantiation : sig type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = ('M, 'T) Type.t list - [@@deriving show] + + and ('M, 'T) t' = ('M, 'T) Type.t list [@@deriving show] end module Predicate : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = | Declared of ('M, 'T) Expression.t | Inferred [@@deriving show] end - -end = Type +end = + Type and Statement : sig module Block : sig - type ('M, 'T) t = { - body: ('M, 'T) Statement.t list - } - [@@deriving show] + type ('M, 'T) t = { body: ('M, 'T) Statement.t list } [@@deriving show] end + module If : sig type ('M, 'T) t = { test: ('M, 'T) Expression.t; consequent: ('M, 'T) Statement.t; alternate: ('M, 'T) Statement.t option; + comments: ('M, unit) Syntax.t option; } [@@deriving show] end + module Labeled : sig type ('M, 'T) t = { - label: 'M Identifier.t; + label: ('M, 'M) Identifier.t; body: ('M, 'T) Statement.t; } [@@deriving show] end + module Break : sig type 'M t = { - label: 'M Identifier.t option; + label: ('M, 'M) Identifier.t option; + comments: ('M, unit) Syntax.t option; } [@@deriving show] end + module Continue : sig type 'M t = { - label: 'M Identifier.t option; + label: ('M, 'M) Identifier.t option; + comments: ('M, unit) Syntax.t option; } [@@deriving show] end + module With : sig type ('M, 'T) t = { _object: ('M, 'T) Expression.t; @@ -302,69 +375,80 @@ and Statement : sig } [@@deriving show] end + module TypeAlias : sig type ('M, 'T) t = { - id: 'T Identifier.t; + id: ('M, 'T) Identifier.t; tparams: ('M, 'T) Type.ParameterDeclaration.t option; right: ('M, 'T) Type.t; } [@@deriving show] end - module OpaqueType: sig + + module OpaqueType : sig type ('M, 'T) t = { - id: 'T Identifier.t; + id: ('M, 'T) Identifier.t; tparams: ('M, 'T) Type.ParameterDeclaration.t option; impltype: ('M, 'T) Type.t option; supertype: ('M, 'T) Type.t option; } [@@deriving show] end + module Switch : sig module Case : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { test: ('M, 'T) Expression.t option; consequent: ('M, 'T) Statement.t list; } [@@deriving show] end + type ('M, 'T) t = { discriminant: ('M, 'T) Expression.t; cases: ('M, 'T) Case.t list; } [@@deriving show] end + module Return : sig type ('M, 'T) t = { argument: ('M, 'T) Expression.t option; + comments: ('M, unit) Syntax.t option; } [@@deriving show] end + module Throw : sig - type ('M, 'T) t = { - argument: ('M, 'T) Expression.t; - } - [@@deriving show] + type ('M, 'T) t = { argument: ('M, 'T) Expression.t } [@@deriving show] end + module Try : sig module CatchClause : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { param: ('M, 'T) Pattern.t option; body: 'M * ('M, 'T) Block.t; } [@@deriving show] end + type ('M, 'T) t = { block: 'M * ('M, 'T) Block.t; handler: ('M, 'T) CatchClause.t option; finalizer: ('M * ('M, 'T) Block.t) option; + comments: ('M, unit) Syntax.t option; } [@@deriving show] end + module VariableDeclaration : sig module Declarator : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { id: ('M, 'T) Pattern.t; init: ('M, 'T) Expression.t option; @@ -376,12 +460,14 @@ and Statement : sig declarations: ('M, 'T) Declarator.t list; kind: kind; } + and kind = | Var | Let | Const [@@deriving show] end + module While : sig type ('M, 'T) t = { test: ('M, 'T) Expression.t; @@ -389,13 +475,16 @@ and Statement : sig } [@@deriving show] end + module DoWhile : sig type ('M, 'T) t = { body: ('M, 'T) Statement.t; test: ('M, 'T) Expression.t; + comments: ('M, unit) Syntax.t option; } [@@deriving show] end + module For : sig type ('M, 'T) t = { init: ('M, 'T) init option; @@ -403,11 +492,13 @@ and Statement : sig update: ('M, 'T) Expression.t option; body: ('M, 'T) Statement.t; } + and ('M, 'T) init = | InitDeclaration of ('M * ('M, 'T) VariableDeclaration.t) | InitExpression of ('M, 'T) Expression.t [@@deriving show] end + module ForIn : sig type ('M, 'T) t = { left: ('M, 'T) left; @@ -415,11 +506,13 @@ and Statement : sig body: ('M, 'T) Statement.t; each: bool; } + and ('M, 'T) left = | LeftDeclaration of ('M * ('M, 'T) VariableDeclaration.t) | LeftPattern of ('M, 'T) Pattern.t [@@deriving show] end + module ForOf : sig type ('M, 'T) t = { left: ('M, 'T) left; @@ -427,23 +520,88 @@ and Statement : sig body: ('M, 'T) Statement.t; async: bool; } + and ('M, 'T) left = | LeftDeclaration of ('M * ('M, 'T) VariableDeclaration.t) | LeftPattern of ('M, 'T) Pattern.t [@@deriving show] end + + module EnumDeclaration : sig + module DefaultedMember : sig + type 'M t = 'M * 'M t' + + and 'M t' = { id: ('M, 'M) Identifier.t } [@@deriving show] + end + + module InitializedMember : sig + type ('I, 'M) t = 'M * ('I, 'M) t' + + and ('I, 'M) t' = { + id: ('M, 'M) Identifier.t; + init: 'M * 'I; + } + [@@deriving show] + end + + module BooleanBody : sig + type 'M t = { + members: (bool, 'M) InitializedMember.t list; + explicitType: bool; + } + [@@deriving show] + end + + module NumberBody : sig + type 'M t = { + members: (NumberLiteral.t, 'M) InitializedMember.t list; + explicitType: bool; + } + [@@deriving show] + end + + module StringBody : sig + type 'M t = { + members: (StringLiteral.t, 'M) members; + explicitType: bool; + } + + and ('I, 'M) members = + | Defaulted of 'M DefaultedMember.t list + | Initialized of ('I, 'M) InitializedMember.t list + [@@deriving show] + end + + module SymbolBody : sig + type 'M t = { members: 'M DefaultedMember.t list } [@@deriving show] + end + + type ('M, 'T) t = { + id: ('M, 'T) Identifier.t; + body: 'M body; + } + + and 'M body = + | BooleanBody of 'M BooleanBody.t + | NumberBody of 'M NumberBody.t + | StringBody of 'M StringBody.t + | SymbolBody of 'M SymbolBody.t + [@@deriving show] + end + module Interface : sig type ('M, 'T) t = { - id: 'T Identifier.t; + id: ('M, 'T) Identifier.t; tparams: ('M, 'T) Type.ParameterDeclaration.t option; extends: ('M * ('M, 'T) Type.Generic.t) list; body: 'M * ('M, 'T) Type.Object.t; } [@@deriving show] end + module DeclareClass : sig type ('M, 'T) t = { - id: 'T Identifier.t; + id: ('M, 'T) Identifier.t; tparams: ('M, 'T) Type.ParameterDeclaration.t option; body: 'M * ('M, 'T) Type.Object.t; extends: ('M * ('M, 'T) Type.Generic.t) option; @@ -452,24 +610,27 @@ and Statement : sig } [@@deriving show] end + module DeclareVariable : sig type ('M, 'T) t = { - id: 'T Identifier.t; - annot: ('M, 'T) Type.annotation option; + id: ('M, 'T) Identifier.t; + annot: ('M, 'T) Type.annotation_or_hint; } [@@deriving show] end + module DeclareFunction : sig type ('M, 'T) t = { - id: 'M Identifier.t; + id: ('M, 'T) Identifier.t; annot: ('M, 'T) Type.annotation; predicate: ('M, 'T) Type.Predicate.t option; } [@@deriving show] end + module DeclareModule : sig - type 'T id = - | Identifier of 'T Identifier.t + type ('M, 'T) id = + | Identifier of ('M, 'T) Identifier.t | Literal of ('T * StringLiteral.t) and 'M module_kind = @@ -477,19 +638,20 @@ and Statement : sig | ES of 'M and ('M, 'T) t = { - id: 'T id; + id: ('M, 'T) id; body: 'M * ('M, 'T) Block.t; kind: 'M module_kind; } - [@@deriving show] end + module ExportNamedDeclaration : sig module ExportSpecifier : sig type 'M t = 'M * 'M t' + and 'M t' = { - local: 'M Identifier.t; - exported: 'M Identifier.t option; + local: ('M, 'M) Identifier.t; + exported: ('M, 'M) Identifier.t option; } [@@deriving show] end @@ -500,21 +662,25 @@ and Statement : sig source: ('M * StringLiteral.t) option; exportKind: Statement.exportKind; } + and 'M specifier = | ExportSpecifiers of 'M ExportSpecifier.t list - | ExportBatchSpecifier of 'M * 'M Identifier.t option + | ExportBatchSpecifier of 'M * ('M, 'M) Identifier.t option [@@deriving show] end + module ExportDefaultDeclaration : sig type ('M, 'T) t = { default: 'M; declaration: ('M, 'T) declaration; } + and ('M, 'T) declaration = | Declaration of ('M, 'T) Statement.t | Expression of ('M, 'T) Expression.t [@@deriving show] end + module DeclareExportDeclaration : sig type ('M, 'T) declaration = (* declare export var *) @@ -540,9 +706,9 @@ and Statement : sig specifiers: 'M ExportNamedDeclaration.specifier option; source: ('M * StringLiteral.t) option; } - [@@deriving show] end + module ImportDeclaration : sig type importKind = | ImportType @@ -550,24 +716,24 @@ and Statement : sig | ImportValue and ('M, 'T) specifier = - | ImportNamedSpecifiers of 'T named_specifier list - | ImportNamespaceSpecifier of ('M * 'M Identifier.t) + | ImportNamedSpecifiers of ('M, 'T) named_specifier list + | ImportNamespaceSpecifier of ('M * ('M, 'M) Identifier.t) - and 'T named_specifier = { + and ('M, 'T) named_specifier = { kind: importKind option; - local: 'T Identifier.t option; - remote: 'T Identifier.t; + local: ('M, 'T) Identifier.t option; + remote: ('M, 'T) Identifier.t; } and ('M, 'T) t = { importKind: importKind; - source: ('M * StringLiteral.t); - default: 'T Identifier.t option; + source: 'M * StringLiteral.t; + default: ('M, 'T) Identifier.t option; specifiers: ('M, 'T) specifier option; } - [@@deriving show] end + module Expression : sig type ('M, 'T) t = { expression: ('M, 'T) Expression.t; @@ -581,6 +747,7 @@ and Statement : sig | ExportValue and ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = | Block of ('M, 'T) Block.t | Break of 'M Break.t @@ -598,6 +765,7 @@ and Statement : sig | DeclareVariable of ('M, 'T) DeclareVariable.t | DoWhile of ('M, 'T) DoWhile.t | Empty + | EnumDeclaration of ('M, 'T) EnumDeclaration.t | ExportDefaultDeclaration of ('M, 'T) ExportDefaultDeclaration.t | ExportNamedDeclaration of ('M, 'T) ExportNamedDeclaration.t | Expression of ('M, 'T) Expression.t @@ -618,49 +786,63 @@ and Statement : sig | VariableDeclaration of ('M, 'T) VariableDeclaration.t | While of ('M, 'T) While.t | With of ('M, 'T) With.t - [@@deriving show] -end = Statement +end = + Statement and Expression : sig + module TypeParameterInstantiation : sig + type ('M, 'T) t = 'M * ('M, 'T) t' + + and ('M, 'T) type_parameter_instantiation = + | Explicit of ('M, 'T) Type.t + | Implicit of 'T + + and ('M, 'T) t' = ('M, 'T) type_parameter_instantiation list [@@deriving show] + end + module SpreadElement : sig type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = { - argument: ('M, 'T) Expression.t; - } - [@@deriving show] + + and ('M, 'T) t' = { argument: ('M, 'T) Expression.t } [@@deriving show] end type ('M, 'T) expression_or_spread = | Expression of ('M, 'T) Expression.t | Spread of ('M, 'T) SpreadElement.t - [@@deriving show] + [@@deriving show] module Array : sig type ('M, 'T) t = { elements: ('M, 'T) expression_or_spread option list; + comments: ('M, unit) Syntax.t option; } [@@deriving show] end + module TemplateLiteral : sig module Element : sig type value = { raw: string; cooked: string; } + and 'M t = 'M * t' + and t' = { value: value; tail: bool; } [@@deriving show] end + type ('M, 'T) t = { quasis: 'M Element.t list; expressions: ('M, 'T) Expression.t list; } [@@deriving show] end + module TaggedTemplate : sig type ('M, 'T) t = { tag: ('M, 'T) Expression.t; @@ -668,14 +850,17 @@ and Expression : sig } [@@deriving show] end + module Object : sig module Property : sig type ('M, 'T) key = - | Literal of ('T * Literal.t) - | Identifier of 'T Identifier.t + | Literal of ('T * 'M Literal.t) + | Identifier of ('M, 'T) Identifier.t | PrivateName of 'M PrivateName.t | Computed of ('M, 'T) Expression.t + and ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = | Init of { key: ('M, 'T) key; @@ -694,15 +879,13 @@ and Expression : sig key: ('M, 'T) key; value: 'M * ('M, 'T) Function.t; } - [@@deriving show] end + module SpreadProperty : sig type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = { - argument: ('M, 'T) Expression.t; - } - [@@deriving show] + + and ('M, 'T) t' = { argument: ('M, 'T) Expression.t } [@@deriving show] end type ('M, 'T) property = @@ -711,16 +894,15 @@ and Expression : sig and ('M, 'T) t = { properties: ('M, 'T) property list; + comments: ('M, unit) Syntax.t option; } - [@@deriving show] end + module Sequence : sig - type ('M, 'T) t = { - expressions: ('M, 'T) Expression.t list; - } - [@@deriving show] + type ('M, 'T) t = { expressions: ('M, 'T) Expression.t list } [@@deriving show] end + module Unary : sig type operator = | Minus @@ -734,12 +916,12 @@ and Expression : sig and ('M, 'T) t = { operator: operator; - prefix: bool; - argument: ('M, 'T) Expression.t + argument: ('M, 'T) Expression.t; + comments: ('M, unit) Syntax.t option; } - [@@deriving show] end + module Binary : sig type operator = | Equal @@ -770,12 +952,11 @@ and Expression : sig left: ('M, 'T) Expression.t; right: ('M, 'T) Expression.t; } - [@@deriving show] end + module Assignment : sig type operator = - | Assign | PlusAssign | MinusAssign | MultAssign @@ -790,13 +971,13 @@ and Expression : sig | BitAndAssign and ('M, 'T) t = { - operator: operator; + operator: operator option; left: ('M, 'T) Pattern.t; right: ('M, 'T) Expression.t; } - [@@deriving show] end + module Update : sig type operator = | Increment @@ -807,9 +988,9 @@ and Expression : sig argument: ('M, 'T) Expression.t; prefix: bool; } - [@@deriving show] end + module Logical : sig type operator = | Or @@ -821,9 +1002,9 @@ and Expression : sig left: ('M, 'T) Expression.t; right: ('M, 'T) Expression.t; } - [@@deriving show] end + module Conditional : sig type ('M, 'T) t = { test: ('M, 'T) Expression.t; @@ -832,22 +1013,26 @@ and Expression : sig } [@@deriving show] end + module New : sig type ('M, 'T) t = { callee: ('M, 'T) Expression.t; - targs: ('M, 'T) Type.ParameterInstantiation.t option; + targs: ('M, 'T) Expression.TypeParameterInstantiation.t option; arguments: ('M, 'T) expression_or_spread list; + comments: ('M, unit) Syntax.t option; } [@@deriving show] end + module Call : sig type ('M, 'T) t = { callee: ('M, 'T) Expression.t; - targs: ('M, 'T) Type.ParameterInstantiation.t option; + targs: ('M, 'T) Expression.TypeParameterInstantiation.t option; arguments: ('M, 'T) expression_or_spread list; } [@@deriving show] end + module OptionalCall : sig type ('M, 'T) t = { call: ('M, 'T) Call.t; @@ -855,20 +1040,20 @@ and Expression : sig } [@@deriving show] end + module Member : sig type ('M, 'T) property = - | PropertyIdentifier of 'T Identifier.t + | PropertyIdentifier of ('M, 'T) Identifier.t | PropertyPrivateName of 'M PrivateName.t | PropertyExpression of ('M, 'T) Expression.t and ('M, 'T) t = { _object: ('M, 'T) Expression.t; property: ('M, 'T) property; - computed: bool; } - [@@deriving show] end + module OptionalMember : sig type ('M, 'T) t = { member: ('M, 'T) Member.t; @@ -876,16 +1061,20 @@ and Expression : sig } [@@deriving show] end + module Yield : sig type ('M, 'T) t = { argument: ('M, 'T) Expression.t option; + comments: ('M, unit) Syntax.t option; delegate: bool; } [@@deriving show] end + module Comprehension : sig module Block : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { left: ('M, 'T) Pattern.t; right: ('M, 'T) Expression.t; @@ -893,12 +1082,14 @@ and Expression : sig } [@@deriving show] end + type ('M, 'T) t = { blocks: ('M, 'T) Block.t list; filter: ('M, 'T) Expression.t option; } [@@deriving show] end + module Generator : sig type ('M, 'T) t = { blocks: ('M, 'T) Comprehension.Block.t list; @@ -906,6 +1097,7 @@ and Expression : sig } [@@deriving show] end + module TypeCast : sig type ('M, 'T) t = { expression: ('M, 'T) Expression.t; @@ -913,15 +1105,17 @@ and Expression : sig } [@@deriving show] end + module MetaProperty : sig type 'M t = { - meta: 'M Identifier.t; - property: 'M Identifier.t; + meta: ('M, 'M) Identifier.t; + property: ('M, 'M) Identifier.t; } [@@deriving show] end type ('M, 'T) t = 'T * ('M, 'T) t' + and ('M, 'T) t' = | Array of ('M, 'T) Array.t | ArrowFunction of ('M, 'T) Function.t @@ -933,11 +1127,11 @@ and Expression : sig | Conditional of ('M, 'T) Conditional.t | Function of ('M, 'T) Function.t | Generator of ('M, 'T) Generator.t - | Identifier of 'T Identifier.t + | Identifier of ('M, 'T) Identifier.t | Import of ('M, 'T) t | JSXElement of ('M, 'T) JSX.element | JSXFragment of ('M, 'T) JSX.fragment - | Literal of Literal.t + | Literal of 'M Literal.t | Logical of ('M, 'T) Logical.t | Member of ('M, 'T) Member.t | MetaProperty of 'M MetaProperty.t @@ -954,21 +1148,20 @@ and Expression : sig | Unary of ('M, 'T) Unary.t | Update of ('M, 'T) Update.t | Yield of ('M, 'T) Yield.t - [@@deriving show] -end = Expression +end = + Expression and JSX : sig module Identifier : sig type 'T t = 'T * t' - and t' = { - name: string; - } - [@@deriving show] + + and t' = { name: string } [@@deriving show] end module NamespacedName : sig type ('M, 'T) t = 'M * 'T t' + and 'T t' = { namespace: 'T Identifier.t; name: 'T Identifier.t; @@ -977,14 +1170,11 @@ and JSX : sig end module ExpressionContainer : sig - type ('M, 'T) t = { - expression: ('M, 'T) expression; - } + type ('M, 'T) t = { expression: ('M, 'T) expression } and ('M, 'T) expression = | Expression of ('M, 'T) Expression.t - | EmptyExpression of 'M - + | EmptyExpression [@@deriving show] end @@ -1004,23 +1194,20 @@ and JSX : sig | NamespacedName of ('M, 'T) NamespacedName.t and ('M, 'T) value = - | Literal of 'T * Literal.t + | Literal of 'T * 'M Literal.t | ExpressionContainer of 'T * ('M, 'T) ExpressionContainer.t and ('M, 'T) t' = { name: ('M, 'T) name; value: ('M, 'T) value option; } - [@@deriving show] end module SpreadAttribute : sig type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = { - argument: ('M, 'T) Expression.t; - } - [@@deriving show] + + and ('M, 'T) t' = { argument: ('M, 'T) Expression.t } [@@deriving show] end module MemberExpression : sig @@ -1034,7 +1221,6 @@ and JSX : sig _object: ('M, 'T) _object; property: 'T Identifier.t; } - [@@deriving show] end @@ -1042,7 +1228,7 @@ and JSX : sig | Identifier of 'T Identifier.t | NamespacedName of ('M, 'T) NamespacedName.t | MemberExpression of ('M, 'T) MemberExpression.t - [@@deriving show] + [@@deriving show] module Opening : sig type ('M, 'T) t = 'M * ('M, 'T) t' @@ -1056,19 +1242,17 @@ and JSX : sig selfClosing: bool; attributes: ('M, 'T) attribute list; } - [@@deriving show] end module Closing : sig type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = { - name: ('M, 'T) name; - } - [@@deriving show] + + and ('M, 'T) t' = { name: ('M, 'T) name } [@@deriving show] end type ('M, 'T) child = 'M * ('M, 'T) child' + and ('M, 'T) child' = | Element of ('M, 'T) element | Fragment of ('M, 'T) fragment @@ -1079,107 +1263,123 @@ and JSX : sig and ('M, 'T) element = { openingElement: ('M, 'T) Opening.t; closingElement: ('M, 'T) Closing.t option; - children: ('M, 'T) child list + children: 'M * ('M, 'T) child list; } and ('M, 'T) fragment = { frag_openingElement: 'M; - frag_closingElement: 'M option; - frag_children: ('M, 'T) child list; + frag_closingElement: 'M; + frag_children: 'M * ('M, 'T) child list; } - [@@deriving show] -end = JSX +end = + JSX and Pattern : sig module Object : sig module Property : sig type ('M, 'T) key = - | Literal of ('M * Literal.t) - | Identifier of 'M Identifier.t + | Literal of ('M * 'M Literal.t) + | Identifier of ('M, 'T) Identifier.t | Computed of ('M, 'T) Expression.t + and ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { key: ('M, 'T) key; pattern: ('M, 'T) Pattern.t; + default: ('M, 'T) Expression.t option; shorthand: bool; } [@@deriving show] end + module RestProperty : sig type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = { - argument: ('M, 'T) Pattern.t; - } - [@@deriving show] + + and ('M, 'T) t' = { argument: ('M, 'T) Pattern.t } [@@deriving show] end + type ('M, 'T) property = | Property of ('M, 'T) Property.t | RestProperty of ('M, 'T) RestProperty.t + and ('M, 'T) t = { properties: ('M, 'T) property list; - annot: ('M, 'T) Type.annotation option; + annot: ('M, 'T) Type.annotation_or_hint; } [@@deriving show] end + module Array : sig - module RestElement : sig + module Element : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { argument: ('M, 'T) Pattern.t; + default: ('M, 'T) Expression.t option; } [@@deriving show] end + + module RestElement : sig + type ('M, 'T) t = 'M * ('M, 'T) t' + + and ('M, 'T) t' = { argument: ('M, 'T) Pattern.t } [@@deriving show] + end + type ('M, 'T) element = - | Element of ('M, 'T) Pattern.t + | Element of ('M, 'T) Element.t | RestElement of ('M, 'T) RestElement.t + and ('M, 'T) t = { elements: ('M, 'T) element option list; - annot: ('M, 'T) Type.annotation option; - } - [@@deriving show] - end - module Assignment : sig - type ('M, 'T) t = { - left: ('M, 'T) Pattern.t; - right: ('M, 'T) Expression.t; + annot: ('M, 'T) Type.annotation_or_hint; + comments: ('M, unit) Syntax.t option; } [@@deriving show] end + module Identifier : sig type ('M, 'T) t = { - name: 'T Identifier.t; - annot: ('M, 'T) Type.annotation option; + name: ('M, 'T) Identifier.t; + annot: ('M, 'T) Type.annotation_or_hint; optional: bool; } [@@deriving show] end + type ('M, 'T) t = 'T * ('M, 'T) t' + and ('M, 'T) t' = | Object of ('M, 'T) Object.t | Array of ('M, 'T) Array.t - | Assignment of ('M, 'T) Assignment.t | Identifier of ('M, 'T) Identifier.t | Expression of ('M, 'T) Expression.t [@@deriving show] -end = Pattern +end = + Pattern and Comment : sig type 'M t = 'M * t' + and t' = | Block of string | Line of string [@@deriving show] -end = Comment +end = + Comment and Class : sig module Method : sig type ('M, 'T) t = 'T * ('M, 'T) t' + and kind = | Constructor | Method | Get | Set + and ('M, 'T) t' = { kind: kind; key: ('M, 'T) Expression.Object.Property.key; @@ -1189,65 +1389,74 @@ and Class : sig } [@@deriving show] end + module Property : sig type ('M, 'T) t = 'T * ('M, 'T) t' + and ('M, 'T) t' = { key: ('M, 'T) Expression.Object.Property.key; value: ('M, 'T) Expression.t option; - annot: ('M, 'T) Type.annotation option; + annot: ('M, 'T) Type.annotation_or_hint; static: bool; variance: 'M Variance.t option; } [@@deriving show] end - module PrivateField: sig + + module PrivateField : sig type ('M, 'T) t = 'T * ('M, 'T) t' + and ('M, 'T) t' = { key: 'M PrivateName.t; value: ('M, 'T) Expression.t option; - annot: ('M, 'T) Type.annotation option; + annot: ('M, 'T) Type.annotation_or_hint; static: bool; variance: 'M Variance.t option; } [@@deriving show] end + module Extends : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { expr: ('M, 'T) Expression.t; targs: ('M, 'T) Type.ParameterInstantiation.t option; } [@@deriving show] end + module Implements : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { - id: 'T Identifier.t; + id: ('M, 'T) Identifier.t; targs: ('M, 'T) Type.ParameterInstantiation.t option; } [@@deriving show] end + module Body : sig (* 'T annotation on class body is used to store class's "this" type *) type ('M, 'T) t = 'T * ('M, 'T) t' - and ('M, 'T) t' = { - body: ('M, 'T) element list; - } + + and ('M, 'T) t' = { body: ('M, 'T) element list } + and ('M, 'T) element = | Method of ('M, 'T) Method.t | Property of ('M, 'T) Property.t | PrivateField of ('M, 'T) PrivateField.t [@@deriving show] end + module Decorator : sig type ('M, 'T) t = 'M * ('M, 'T) t' - and ('M, 'T) t' = { - expression: ('M, 'T) Expression.t; - } - [@@deriving show] + + and ('M, 'T) t' = { expression: ('M, 'T) Expression.t } [@@deriving show] end + type ('M, 'T) t = { - id: 'T Identifier.t option; + id: ('M, 'T) Identifier.t option; body: ('M, 'T) Class.Body.t; tparams: ('M, 'T) Type.ParameterDeclaration.t option; extends: ('M, 'T) Extends.t option; @@ -1255,47 +1464,57 @@ and Class : sig classDecorators: ('M, 'T) Decorator.t list; } [@@deriving show] -end = Class +end = + Class and Function : sig - module RestElement : sig + module RestParam : sig + type ('M, 'T) t = 'M * ('M, 'T) t' + + and ('M, 'T) t' = { argument: ('M, 'T) Pattern.t } [@@deriving show] + end + + module Param : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { argument: ('M, 'T) Pattern.t; + default: ('M, 'T) Expression.t option; } [@@deriving show] end + module Params : sig type ('M, 'T) t = 'M * ('M, 'T) t' + and ('M, 'T) t' = { - params: ('M, 'T) Pattern.t list; - rest: ('M, 'T) RestElement.t option; + params: ('M, 'T) Param.t list; + rest: ('M, 'T) RestParam.t option; } [@@deriving show] end type ('M, 'T) t = { - id: 'T Identifier.t option; + id: ('M, 'T) Identifier.t option; params: ('M, 'T) Params.t; body: ('M, 'T) body; async: bool; generator: bool; predicate: ('M, 'T) Type.Predicate.t option; - expression: bool; - return: ('M, 'T) return; + return: ('M, 'T) Type.annotation_or_hint; tparams: ('M, 'T) Type.ParameterDeclaration.t option; + (* Location of the signature portion of a function, e.g. + * function foo(): void {} + * ^^^^^^^^^^^^^^^^^^^^ + *) + sig_loc: 'M; } and ('M, 'T) body = | BodyBlock of ('M * ('M, 'T) Statement.Block.t) | BodyExpression of ('M, 'T) Expression.t - - and ('M, 'T) return = - | Missing of 'T - | Available of ('M, 'T) Type.annotation - [@@deriving show] - -end = Function +end = + Function] type ('M, 'T) program = 'M * ('M, 'T) Statement.t list * 'M Comment.t list [@@deriving show] diff --git a/src/parser/flow_ast_utils.ml b/src/parser/flow_ast_utils.ml new file mode 100644 index 00000000000..aebf84e1c2d --- /dev/null +++ b/src/parser/flow_ast_utils.ml @@ -0,0 +1,195 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Flow_ast + +type 'loc binding = 'loc * string + +type 'loc ident = 'loc * string + +type 'loc source = 'loc * string + +let rec fold_bindings_of_pattern = + Pattern.( + let property f acc = + Object.( + function + | Property (_, { Property.pattern = (_, p); _ }) + | RestProperty (_, { RestProperty.argument = (_, p) }) -> + fold_bindings_of_pattern f acc p) + in + let element f acc = + Array.( + function + | None -> acc + | Some (Element (_, { Element.argument = (_, p); default = _ })) + | Some (RestElement (_, { RestElement.argument = (_, p) })) -> + fold_bindings_of_pattern f acc p) + in + fun f acc -> function + | Identifier { Identifier.name; _ } -> f acc name + | Object { Object.properties; _ } -> List.fold_left (property f) acc properties + | Array { Array.elements; _ } -> List.fold_left (element f) acc elements + | Expression _ -> failwith "expression pattern") + +let fold_bindings_of_variable_declarations f acc declarations = + Flow_ast.Statement.VariableDeclaration.( + List.fold_left + (fun acc -> function + | (_, { Declarator.id = (_, pattern); _ }) -> fold_bindings_of_pattern f acc pattern) + acc + declarations) + +let partition_directives statements = + Flow_ast.Statement.( + let rec helper directives = function + | ((_, Expression { Expression.directive = Some _; _ }) as directive) :: rest -> + helper (directive :: directives) rest + | rest -> (List.rev directives, rest) + in + helper [] statements) + +let negate_number_literal (value, raw) = + let raw_len = String.length raw in + let raw = + if raw_len > 0 && raw.[0] = '-' then + String.sub raw 1 (raw_len - 1) + else + "-" ^ raw + in + (~-.value, raw) + +let loc_of_statement = fst + +let loc_of_expression = fst + +let loc_of_pattern = fst + +let loc_of_ident = fst + +let name_of_ident (_, { Identifier.name; comments = _ }) = name + +let source_of_ident (loc, { Identifier.name; comments = _ }) = (loc, name) + +let ident_of_source (loc, name) = (loc, { Identifier.name; comments = None }) + +let mk_comments ?(leading = []) ?(trailing = []) a = { Syntax.leading; trailing; internal = a } + +let mk_comments_opt ?(leading = []) ?(trailing = []) () = + match (leading, trailing) with + | ([], []) -> None + | (_, _) -> Some (mk_comments ~leading ~trailing ()) + +let string_of_assignment_operator op = + Flow_ast.Expression.Assignment.( + match op with + | PlusAssign -> "+=" + | MinusAssign -> "-=" + | MultAssign -> "*=" + | ExpAssign -> "**=" + | DivAssign -> "/=" + | ModAssign -> "%=" + | LShiftAssign -> "<<=" + | RShiftAssign -> ">>=" + | RShift3Assign -> ">>>=" + | BitOrAssign -> "|=" + | BitXorAssign -> "^=" + | BitAndAssign -> "&=") + +let string_of_binary_operator op = + Flow_ast.Expression.Binary.( + match op with + | Equal -> "==" + | NotEqual -> "!=" + | StrictEqual -> "===" + | StrictNotEqual -> "!==" + | LessThan -> "<" + | LessThanEqual -> "<=" + | GreaterThan -> ">" + | GreaterThanEqual -> ">=" + | LShift -> "<<" + | RShift -> ">>" + | RShift3 -> ">>>" + | Plus -> "+" + | Minus -> "-" + | Mult -> "*" + | Exp -> "**" + | Div -> "/" + | Mod -> "%" + | BitOr -> "|" + | Xor -> "^" + | BitAnd -> "&" + | In -> "in" + | Instanceof -> "instanceof") + +module ExpressionSort = struct + type t = + | Array + | ArrowFunction + | Assignment + | Binary + | Call + | Class + | Comprehension + | Conditional + | Function + | Generator + | Identifier + | Import + | JSXElement + | JSXFragment + | Literal + | Logical + | Member + | MetaProperty + | New + | Object + | OptionalCall + | OptionalMember + | Sequence + | Super + | TaggedTemplate + | TemplateLiteral + | This + | TypeCast + | Unary + | Update + | Yield + + let to_string = function + | Array -> "array" + | ArrowFunction -> "arrow function" + | Assignment -> "assignment expression" + | Binary -> "binary expression" + | Call -> "call expression" + | Class -> "class" + | Comprehension -> "comprehension expression" + | Conditional -> "conditional expression" + | Function -> "function" + | Generator -> "generator" + | Identifier -> "identifier" + | Import -> "import expression" + | JSXElement -> "JSX element" + | JSXFragment -> "JSX fragment" + | Literal -> "literal" + | Logical -> "logical expression" + | Member -> "member expression" + | MetaProperty -> "metaproperty expression" + | New -> "new expression" + | Object -> "object" + | OptionalCall -> "optional call expression" + | OptionalMember -> "optional member expression" + | Sequence -> "sequence" + | Super -> "`super` reference" + | TaggedTemplate -> "tagged template expression" + | TemplateLiteral -> "template literal" + | This -> "`this` reference" + | TypeCast -> "type cast" + | Unary -> "unary expression" + | Update -> "update expression" + | Yield -> "yield expression" +end diff --git a/src/parser/flow_ast_utils.mli b/src/parser/flow_ast_utils.mli new file mode 100644 index 00000000000..7d8c30db6cc --- /dev/null +++ b/src/parser/flow_ast_utils.mli @@ -0,0 +1,94 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type 'loc binding = 'loc * string + +type 'loc ident = 'loc * string + +type 'loc source = 'loc * string + +val fold_bindings_of_pattern : + ('a -> ('loc, 'loc) Flow_ast.Identifier.t -> 'a) -> 'a -> ('loc, 'loc) Flow_ast.Pattern.t' -> 'a + +val fold_bindings_of_variable_declarations : + ('a -> ('loc, 'loc) Flow_ast.Identifier.t -> 'a) -> + 'a -> + ('loc, 'loc) Flow_ast.Statement.VariableDeclaration.Declarator.t list -> + 'a + +val partition_directives : + (Loc.t, Loc.t) Flow_ast.Statement.t list -> + (Loc.t, Loc.t) Flow_ast.Statement.t list * (Loc.t, Loc.t) Flow_ast.Statement.t list + +val negate_number_literal : float * string -> float * string + +val loc_of_expression : ('a, 'a) Flow_ast.Expression.t -> 'a + +val loc_of_statement : ('a, 'a) Flow_ast.Statement.t -> 'a + +val loc_of_pattern : ('a, 'a) Flow_ast.Pattern.t -> 'a + +val loc_of_ident : ('a, 'a) Flow_ast.Identifier.t -> 'a + +val name_of_ident : ('loc, 'a) Flow_ast.Identifier.t -> string + +val source_of_ident : ('a, 'a) Flow_ast.Identifier.t -> 'a source + +val ident_of_source : 'a source -> ('a, 'a) Flow_ast.Identifier.t + +val mk_comments : + ?leading:'loc Flow_ast.Comment.t list -> + ?trailing:'loc Flow_ast.Comment.t list -> + 'a -> + ('loc, 'a) Flow_ast.Syntax.t + +val mk_comments_opt : + ?leading:'loc Flow_ast.Comment.t list -> + ?trailing:'loc Flow_ast.Comment.t list -> + unit -> + ('loc, unit) Flow_ast.Syntax.t option + +module ExpressionSort : sig + type t = + | Array + | ArrowFunction + | Assignment + | Binary + | Call + | Class + | Comprehension + | Conditional + | Function + | Generator + | Identifier + | Import + | JSXElement + | JSXFragment + | Literal + | Logical + | Member + | MetaProperty + | New + | Object + | OptionalCall + | OptionalMember + | Sequence + | Super + | TaggedTemplate + | TemplateLiteral + | This + | TypeCast + | Unary + | Update + | Yield + + val to_string : t -> string +end + +val string_of_assignment_operator : Flow_ast.Expression.Assignment.operator -> string + +val string_of_binary_operator : Flow_ast.Expression.Binary.operator -> string diff --git a/src/parser/flow_lexer.ml b/src/parser/flow_lexer.ml new file mode 100644 index 00000000000..a8c1126ce38 --- /dev/null +++ b/src/parser/flow_lexer.ml @@ -0,0 +1,1773 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +[@@@warning "-39"] (* sedlex inserts some unnecessary `rec`s *) + +open Token +open Lex_env + +let lexeme = Sedlexing.Utf8.lexeme + +let sub_lexeme = Sedlexing.Utf8.sub_lexeme + +let letter = [%sedlex.regexp? 'a' .. 'z' | 'A' .. 'Z' | '$'] + +let id_letter = [%sedlex.regexp? letter | '_'] + +let digit = [%sedlex.regexp? '0' .. '9'] + +let digit_non_zero = [%sedlex.regexp? '1' .. '9'] + +let decintlit = [%sedlex.regexp? '0' | ('1' .. '9', Star digit)] + +(* DecimalIntegerLiteral *) + +let alphanumeric = [%sedlex.regexp? digit | letter] + +let word = [%sedlex.regexp? (letter, Star alphanumeric)] + +let hex_digit = [%sedlex.regexp? digit | 'a' .. 'f' | 'A' .. 'F'] + +let non_hex_letter = [%sedlex.regexp? 'g' .. 'z' | 'G' .. 'Z' | '$'] + +let bin_digit = [%sedlex.regexp? '0' | '1'] + +let oct_digit = [%sedlex.regexp? '0' .. '7'] + +(* This regex could be simplified to (digit Star (digit OR '_' digit)) + * That makes the underscore and failure cases faster, and the base case take x2-3 the steps + * As the codebase contains more base cases than underscored or errors, prefer this version *) +let underscored_bin = + [%sedlex.regexp? Plus bin_digit | (bin_digit, Star (bin_digit | ('_', bin_digit)))] + +let underscored_oct = + [%sedlex.regexp? Plus oct_digit | (oct_digit, Star (oct_digit | ('_', oct_digit)))] + +let underscored_hex = + [%sedlex.regexp? Plus hex_digit | (hex_digit, Star (hex_digit | ('_', hex_digit)))] + +let underscored_digit = + [%sedlex.regexp? Plus digit | (digit_non_zero, Star (digit | ('_', digit)))] + +let underscored_decimal = [%sedlex.regexp? Plus digit | (digit, Star (digit | ('_', digit)))] + +(* Different ways you can write a number *) +let binnumber = [%sedlex.regexp? ('0', ('B' | 'b'), underscored_bin)] + +let octnumber = [%sedlex.regexp? ('0', ('O' | 'o'), underscored_oct)] + +let legacyoctnumber = [%sedlex.regexp? ('0', Plus oct_digit)] + +(* no underscores allowed *) + +let legacynonoctnumber = [%sedlex.regexp? ('0', Star oct_digit, '8' .. '9', Star digit)] + +let hexnumber = [%sedlex.regexp? ('0', ('X' | 'x'), underscored_hex)] + +let scinumber = + [%sedlex.regexp? + ( ((decintlit, Opt ('.', Opt underscored_decimal)) | ('.', underscored_decimal)), + ('e' | 'E'), + Opt ('-' | '+'), + underscored_digit )] + +let wholenumber = [%sedlex.regexp? (underscored_digit, Opt '.')] + +let floatnumber = [%sedlex.regexp? (Opt underscored_digit, '.', underscored_decimal)] + +let binbigint = [%sedlex.regexp? (binnumber, 'n')] + +let octbigint = [%sedlex.regexp? (octnumber, 'n')] + +let hexbigint = [%sedlex.regexp? (hexnumber, 'n')] + +let scibigint = [%sedlex.regexp? (scinumber, 'n')] + +let wholebigint = [%sedlex.regexp? (underscored_digit, 'n')] + +let floatbigint = [%sedlex.regexp? ((floatnumber | (underscored_digit, '.')), 'n')] + +(* 2-8 alphanumeric characters. I could match them directly, but this leads to + * ~5k more lines of generated lexer +let htmlentity = "quot" | "amp" | "apos" | "lt" | "gt" | "nbsp" | "iexcl" + | "cent" | "pound" | "curren" | "yen" | "brvbar" | "sect" | "uml" | "copy" + | "ordf" | "laquo" | "not" | "shy" | "reg" | "macr" | "deg" | "plusmn" + | "sup2" | "sup3" | "acute" | "micro" | "para" | "middot" | "cedil" | "sup1" + | "ordm" | "raquo" | "frac14" | "frac12" | "frac34" | "iquest" | "Agrave" + | "Aacute" | "Acirc" | "Atilde" | "Auml" | "Aring" | "AElig" | "Ccedil" + | "Egrave" | "Eacute" | "Ecirc" | "Euml" | "Igrave" | "Iacute" | "Icirc" + | "Iuml" | "ETH" | "Ntilde" | "Ograve" | "Oacute" | "Ocirc" | "Otilde" + | "Ouml" | "times" | "Oslash" | "Ugrave" | "Uacute" | "Ucirc" | "Uuml" + | "Yacute" | "THORN" | "szlig" | "agrave" | "aacute" | "acirc" | "atilde" + | "auml" | "aring" | "aelig" | "ccedil" | "egrave" | "eacute" | "ecirc" + | "euml" | "igrave" | "iacute" | "icirc" | "iuml" | "eth" | "ntilde" + | "ograve" | "oacute" | "ocirc" | "otilde" | "ouml" | "divide" | "oslash" + | "ugrave" | "uacute" | "ucirc" | "uuml" | "yacute" | "thorn" | "yuml" + | "OElig" | "oelig" | "Scaron" | "scaron" | "Yuml" | "fnof" | "circ" | "tilde" + | "Alpha" | "Beta" | "Gamma" | "Delta" | "Epsilon" | "Zeta" | "Eta" | "Theta" + | "Iota" | "Kappa" | "Lambda" | "Mu" | "Nu" | "Xi" | "Omicron" | "Pi" | "Rho" + | "Sigma" | "Tau" | "Upsilon" | "Phi" | "Chi" | "Psi" | "Omega" | "alpha" + | "beta" | "gamma" | "delta" | "epsilon" | "zeta" | "eta" | "theta" | "iota" + | "kappa" | "lambda" | "mu" | "nu" | "xi" | "omicron" | "pi" | "rho" + | "sigmaf" | "sigma" | "tau" | "upsilon" | "phi" | "chi" | "psi" | "omega" + | "thetasym" | "upsih" | "piv" | "ensp" | "emsp" | "thinsp" | "zwnj" | "zwj" + | "lrm" | "rlm" | "ndash" | "mdash" | "lsquo" | "rsquo" | "sbquo" | "ldquo" + | "rdquo" | "bdquo" | "dagger" | "Dagger" | "bull" | "hellip" | "permil" + | "prime" | "Prime" | "lsaquo" | "rsaquo" | "oline" | "frasl" | "euro" + | "image" | "weierp" | "real" | "trade" | "alefsym" | "larr" | "uarr" | "rarr" + | "darr" | "harr" | "crarr" | "lArr" | "uArr" | "rArr" | "dArr" | "hArr" + | "forall" | "part" | "exist" | "empty" | "nabla" | "isin" | "notin" | "ni" + | "prod" | "sum" | "minus" | "lowast" | "radic" | "prop" | "infin" | "ang" + | "and" | "or" | "cap" | "cup" | "'int'" | "there4" | "sim" | "cong" | "asymp" + | "ne" | "equiv" | "le" | "ge" | "sub" | "sup" | "nsub" | "sube" | "supe" + | "oplus" | "otimes" | "perp" | "sdot" | "lceil" | "rceil" | "lfloor" + | "rfloor" | "lang" | "rang" | "loz" | "spades" | "clubs" | "hearts" | "diams" + *) +let htmlentity = + [%sedlex.regexp? + ( alphanumeric, + alphanumeric, + Opt alphanumeric, + Opt alphanumeric, + Opt alphanumeric, + Opt alphanumeric, + Opt alphanumeric, + Opt alphanumeric )] + +(* https://tc39.github.io/ecma262/#sec-white-space *) +let whitespace = + [%sedlex.regexp? + ( 0x0009 | 0x000B | 0x000C | 0x0020 | 0x00A0 | 0xfeff | 0x1680 + | 0x2000 .. 0x200a + | 0x202f | 0x205f | 0x3000 )] + +(* minus sign in front of negative numbers + (only for types! regular numbers use T_MINUS!) *) +let neg = [%sedlex.regexp? ('-', Star whitespace)] + +let line_terminator_sequence = [%sedlex.regexp? '\n' | '\r' | "\r\n" | 0x2028 | 0x2029] + +let line_terminator_sequence_start = [%sedlex.regexp? '\n' | '\r' | 0x2028 | 0x2029] + +let hex_quad = [%sedlex.regexp? (hex_digit, hex_digit, hex_digit, hex_digit)] + +let unicode_escape = [%sedlex.regexp? ("\\u", hex_quad)] + +let codepoint_escape = [%sedlex.regexp? ("\\u{", Plus hex_digit, '}')] + +let js_id_start = [%sedlex.regexp? '$' | '_' | id_start | unicode_escape | codepoint_escape] + +let js_id_continue = + [%sedlex.regexp? '$' | '_' | 0x200C | 0x200D | id_continue | unicode_escape | codepoint_escape] + +let pos_at_offset env offset = + { Loc.line = Lex_env.line env; column = offset - Lex_env.bol_offset env } + +let loc_of_offsets env start_offset end_offset = + { + Loc.source = Lex_env.source env; + start = pos_at_offset env start_offset; + _end = pos_at_offset env end_offset; + } + +let start_pos_of_lexbuf env (lexbuf : Sedlexing.lexbuf) = + let start_offset = Sedlexing.lexeme_start lexbuf in + pos_at_offset env start_offset + +let end_pos_of_lexbuf env (lexbuf : Sedlexing.lexbuf) = + let end_offset = Sedlexing.lexeme_end lexbuf in + pos_at_offset env end_offset + +let loc_of_lexbuf env (lexbuf : Sedlexing.lexbuf) = + let start_offset = Sedlexing.lexeme_start lexbuf in + let end_offset = Sedlexing.lexeme_end lexbuf in + loc_of_offsets env start_offset end_offset + +let get_result_and_clear_state (env, lex_token, lex_comments) = + let (env, { lex_errors_acc }) = get_and_clear_state env in + let lex_loc = + match lex_token with + | T_STRING (loc, _, _, _) -> loc + | T_JSX_TEXT (loc, _, _) -> loc + | T_TEMPLATE_PART (loc, _, _) -> loc + | T_REGEXP (loc, _, _) -> loc + | _ -> loc_of_lexbuf env env.lex_lb + in + (env, { Lex_result.lex_token; lex_loc; lex_errors = List.rev lex_errors_acc; lex_comments }) + +let lex_error (env : Lex_env.t) loc err : Lex_env.t = + let lex_errors_acc = (loc, err) :: env.lex_state.lex_errors_acc in + { env with lex_state = { lex_errors_acc } } + +let unexpected_error (env : Lex_env.t) (loc : Loc.t) value = + lex_error env loc (Parse_error.Unexpected (quote_token_value value)) + +let unexpected_error_w_suggest (env : Lex_env.t) (loc : Loc.t) value suggest = + lex_error env loc (Parse_error.UnexpectedTokenWithSuggestion (value, suggest)) + +let illegal (env : Lex_env.t) (loc : Loc.t) = + lex_error env loc (Parse_error.Unexpected "token ILLEGAL") + +let new_line env lexbuf = + let offset = Sedlexing.lexeme_end lexbuf in + let lex_bol = { line = Lex_env.line env + 1; offset } in + { env with Lex_env.lex_bol } + +let bigint_strip_n raw = + let size = String.length raw in + let str = + if size != 0 && raw.[size - 1] == 'n' then + String.sub raw 0 (size - 1) + else + raw + in + str + +let mk_comment + (env : Lex_env.t) + (start : Loc.position) + (_end : Loc.position) + (buf : Buffer.t) + (multiline : bool) : Loc.t Flow_ast.Comment.t = + Flow_ast.Comment.( + let loc = { Loc.source = Lex_env.source env; start; _end } in + let s = Buffer.contents buf in + let c = + if multiline then + Block s + else + Line s + in + (loc, c)) + +let mk_num_singleton number_type raw = + let (neg, num) = + if raw.[0] = '-' then + (true, String.sub raw 1 (String.length raw - 1)) + else + (false, raw) + in + (* convert singleton number type into a float *) + let value = + match number_type with + | LEGACY_OCTAL -> + begin + try Int64.to_float (Int64.of_string ("0o" ^ num)) + with Failure _ -> failwith ("Invalid legacy octal " ^ num) + end + | BINARY + | OCTAL -> + begin + try Int64.to_float (Int64.of_string num) + with Failure _ -> failwith ("Invalid binary/octal " ^ num) + end + | LEGACY_NON_OCTAL + | NORMAL -> + begin + try float_of_string num with Failure _ -> failwith ("Invalid number " ^ num) + end + in + let value = + if neg then + ~-.value + else + value + in + T_NUMBER_SINGLETON_TYPE { kind = number_type; value; raw } + +let mk_bignum_singleton kind raw = + let (neg, num) = + if raw.[0] = '-' then + (true, String.sub raw 1 (String.length raw - 1)) + else + (false, raw) + in + (* convert singleton number type into a float *) + let value = + match kind with + | BIG_BINARY + | BIG_OCTAL -> + let postraw = bigint_strip_n num in + begin + try Int64.to_float (Int64.of_string postraw) + with Failure _ -> failwith ("Invalid (lexer) bigint binary/octal " ^ postraw) + end + | BIG_NORMAL -> + let postraw = bigint_strip_n num in + begin + try float_of_string postraw + with Failure _ -> failwith ("Invalid (lexer) bigint " ^ postraw) + end + in + let approx_value = + if neg then + ~-.value + else + value + in + T_BIGINT_SINGLETON_TYPE { kind; approx_value; raw } + +let decode_identifier = + let assert_valid_unicode_in_identifier env loc code = + let lexbuf = Sedlexing.from_int_array [|code|] in + match%sedlex lexbuf with + | js_id_start -> env + | js_id_continue -> env + | any + | eof -> + lex_error env loc Parse_error.IllegalUnicodeEscape + | _ -> failwith "unreachable" + in + let loc_and_sub_lexeme env offset lexbuf trim_start trim_end = + let start_offset = offset + Sedlexing.lexeme_start lexbuf in + let end_offset = offset + Sedlexing.lexeme_end lexbuf in + let loc = loc_of_offsets env start_offset end_offset in + (loc, sub_lexeme lexbuf trim_start (Sedlexing.lexeme_length lexbuf - trim_start - trim_end)) + in + let rec id_char env offset buf lexbuf = + match%sedlex lexbuf with + | unicode_escape -> + let (loc, hex) = loc_and_sub_lexeme env offset lexbuf 2 0 in + let code = int_of_string ("0x" ^ hex) in + let env = assert_valid_unicode_in_identifier env loc code in + Wtf8.add_wtf_8 buf code; + id_char env offset buf lexbuf + | codepoint_escape -> + let (loc, hex) = loc_and_sub_lexeme env offset lexbuf 3 1 in + let code = int_of_string ("0x" ^ hex) in + let env = assert_valid_unicode_in_identifier env loc code in + Wtf8.add_wtf_8 buf code; + id_char env offset buf lexbuf + | eof -> (env, Buffer.contents buf) + (* match multi-char substrings that don't contain the start chars of the above patterns *) + | Plus (Compl (eof | "\\")) + | any -> + let x = lexeme lexbuf in + Buffer.add_string buf x; + id_char env offset buf lexbuf + | _ -> failwith "unreachable" + in + fun env raw -> + let offset = Sedlexing.lexeme_start env.lex_lb in + let lexbuf = Sedlexing.Utf8.from_string raw in + let buf = Buffer.create (String.length raw) in + id_char env offset buf lexbuf + +let recover env lexbuf ~f = + let env = illegal env (loc_of_lexbuf env lexbuf) in + Sedlexing.rollback lexbuf; + f env lexbuf + +type jsx_text_mode = + | JSX_SINGLE_QUOTED_TEXT + | JSX_DOUBLE_QUOTED_TEXT + | JSX_CHILD_TEXT + +type result = + | Token of Lex_env.t * Token.t + | Comment of Lex_env.t * Loc.t Flow_ast.Comment.t + | Continue of Lex_env.t + +let rec comment env buf lexbuf = + match%sedlex lexbuf with + | line_terminator_sequence -> + let env = new_line env lexbuf in + Buffer.add_string buf (lexeme lexbuf); + comment env buf lexbuf + | "*/" -> + let env = + if is_in_comment_syntax env then + let loc = loc_of_lexbuf env lexbuf in + unexpected_error_w_suggest env loc "*/" "*-/" + else + env + in + (env, end_pos_of_lexbuf env lexbuf) + | "*-/" -> + if is_in_comment_syntax env then + (env, end_pos_of_lexbuf env lexbuf) + else ( + Buffer.add_string buf "*-/"; + comment env buf lexbuf + ) + (* match multi-char substrings that don't contain the start chars of the above patterns *) + | Plus (Compl (line_terminator_sequence_start | '*')) + | any -> + Buffer.add_string buf (lexeme lexbuf); + comment env buf lexbuf + | _ -> + let env = illegal env (loc_of_lexbuf env lexbuf) in + (env, end_pos_of_lexbuf env lexbuf) + +let rec line_comment env buf lexbuf = + match%sedlex lexbuf with + | eof -> (env, end_pos_of_lexbuf env lexbuf) + | line_terminator_sequence -> + let { Loc.line; column } = end_pos_of_lexbuf env lexbuf in + let env = new_line env lexbuf in + let len = Sedlexing.lexeme_length lexbuf in + let end_pos = { Loc.line; column = column - len } in + (env, end_pos) + (* match multi-char substrings that don't contain the start chars of the above patterns *) + | Plus (Compl (eof | line_terminator_sequence_start)) + | any -> + let str = lexeme lexbuf in + Buffer.add_string buf str; + line_comment env buf lexbuf + | _ -> failwith "unreachable" + +let string_escape env lexbuf = + match%sedlex lexbuf with + | eof + | '\\' -> + let str = lexeme lexbuf in + let codes = Sedlexing.lexeme lexbuf in + (env, str, codes, false) + | ('x', hex_digit, hex_digit) -> + let str = lexeme lexbuf in + let code = int_of_string ("0" ^ str) in + (* 0xAB *) + (env, str, [|code|], false) + | ('0' .. '7', '0' .. '7', '0' .. '7') -> + let str = lexeme lexbuf in + let code = int_of_string ("0o" ^ str) in + (* 0o012 *) + (* If the 3 character octal code is larger than 256 + * then it is parsed as a 2 character octal code *) + if code < 256 then + (env, str, [|code|], true) + else + let remainder = code land 7 in + let code = code lsr 3 in + (env, str, [|code; Char.code '0' + remainder|], true) + | ('0' .. '7', '0' .. '7') -> + let str = lexeme lexbuf in + let code = int_of_string ("0o" ^ str) in + (* 0o01 *) + (env, str, [|code|], true) + | '0' -> (env, "0", [|0x0|], false) + | 'b' -> (env, "b", [|0x8|], false) + | 'f' -> (env, "f", [|0xC|], false) + | 'n' -> (env, "n", [|0xA|], false) + | 'r' -> (env, "r", [|0xD|], false) + | 't' -> (env, "t", [|0x9|], false) + | 'v' -> (env, "v", [|0xB|], false) + | '0' .. '7' -> + let str = lexeme lexbuf in + let code = int_of_string ("0o" ^ str) in + (* 0o1 *) + (env, str, [|code|], true) + | ('u', hex_quad) -> + let str = lexeme lexbuf in + let hex = String.sub str 1 (String.length str - 1) in + let code = int_of_string ("0x" ^ hex) in + (env, str, [|code|], false) + | ("u{", Plus hex_digit, '}') -> + let str = lexeme lexbuf in + let hex = String.sub str 2 (String.length str - 3) in + let code = int_of_string ("0x" ^ hex) in + (* 11.8.4.1 *) + let env = + if code > 1114111 then + illegal env (loc_of_lexbuf env lexbuf) + else + env + in + (env, str, [|code|], false) + | 'u' + | 'x' + | '0' .. '7' -> + let str = lexeme lexbuf in + let codes = Sedlexing.lexeme lexbuf in + let env = illegal env (loc_of_lexbuf env lexbuf) in + (env, str, codes, false) + | line_terminator_sequence -> + let str = lexeme lexbuf in + let env = new_line env lexbuf in + (env, str, [||], false) + | any -> + let str = lexeme lexbuf in + let codes = Sedlexing.lexeme lexbuf in + (env, str, codes, false) + | _ -> failwith "unreachable" + +(* Really simple version of string lexing. Just try to find beginning and end of + * string. We can inspect the string later to find invalid escapes, etc *) +let rec string_quote env q buf raw octal lexbuf = + match%sedlex lexbuf with + | "'" + | '"' -> + let q' = lexeme lexbuf in + Buffer.add_string raw q'; + if q = q' then + (env, end_pos_of_lexbuf env lexbuf, octal) + else ( + Buffer.add_string buf q'; + string_quote env q buf raw octal lexbuf + ) + | '\\' -> + Buffer.add_string raw "\\"; + let (env, str, codes, octal') = string_escape env lexbuf in + let octal = octal' || octal in + Buffer.add_string raw str; + Array.iter (Wtf8.add_wtf_8 buf) codes; + string_quote env q buf raw octal lexbuf + | '\n' -> + let x = lexeme lexbuf in + Buffer.add_string raw x; + let env = illegal env (loc_of_lexbuf env lexbuf) in + let env = new_line env lexbuf in + Buffer.add_string buf x; + (env, end_pos_of_lexbuf env lexbuf, octal) + | eof -> + let x = lexeme lexbuf in + Buffer.add_string raw x; + let env = illegal env (loc_of_lexbuf env lexbuf) in + Buffer.add_string buf x; + (env, end_pos_of_lexbuf env lexbuf, octal) + (* match multi-char substrings that don't contain the start chars of the above patterns *) + | Plus (Compl ("'" | '"' | '\\' | '\n' | eof)) + | any -> + let x = lexeme lexbuf in + Buffer.add_string raw x; + Buffer.add_string buf x; + string_quote env q buf raw octal lexbuf + | _ -> failwith "unreachable" + +let rec template_part env cooked raw literal lexbuf = + match%sedlex lexbuf with + | eof -> + let env = illegal env (loc_of_lexbuf env lexbuf) in + (env, true) + | '`' -> + Buffer.add_char literal '`'; + (env, true) + | "${" -> + Buffer.add_string literal "${"; + (env, false) + | '\\' -> + Buffer.add_char raw '\\'; + Buffer.add_char literal '\\'; + let (env, str, codes, _) = string_escape env lexbuf in + Buffer.add_string raw str; + Buffer.add_string literal str; + Array.iter (Wtf8.add_wtf_8 cooked) codes; + template_part env cooked raw literal lexbuf + (* ECMAScript 6th Syntax, 11.8.6.1 Static Semantics: TV's and TRV's + * Long story short, is 0xA, is 0xA, and is 0xA + * *) + | "\r\n" -> + Buffer.add_string raw "\r\n"; + Buffer.add_string literal "\r\n"; + Buffer.add_string cooked "\n"; + let env = new_line env lexbuf in + template_part env cooked raw literal lexbuf + | "\n" + | "\r" -> + let lf = lexeme lexbuf in + Buffer.add_string raw lf; + Buffer.add_string literal lf; + Buffer.add_char cooked '\n'; + let env = new_line env lexbuf in + template_part env cooked raw literal lexbuf + (* match multi-char substrings that don't contain the start chars of the above patterns *) + | Plus (Compl (eof | '`' | '$' | '\\' | '\r' | '\n')) + | any -> + let c = lexeme lexbuf in + Buffer.add_string raw c; + Buffer.add_string literal c; + Buffer.add_string cooked c; + template_part env cooked raw literal lexbuf + | _ -> failwith "unreachable" + +let token (env : Lex_env.t) lexbuf : result = + match%sedlex lexbuf with + | line_terminator_sequence -> + let env = new_line env lexbuf in + Continue env + | '\\' -> + let env = illegal env (loc_of_lexbuf env lexbuf) in + Continue env + | Plus whitespace -> Continue env + | "/*" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf true) + | ("/*", Star whitespace, (":" | "::" | "flow-include")) -> + let pattern = lexeme lexbuf in + if not (is_comment_syntax_enabled env) then ( + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + Buffer.add_string buf (String.sub pattern 2 (String.length pattern - 2)); + let (env, end_pos) = comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf true) + ) else + let env = + if is_in_comment_syntax env then + let loc = loc_of_lexbuf env lexbuf in + unexpected_error env loc pattern + else + env + in + let env = in_comment_syntax true env in + let len = Sedlexing.lexeme_length lexbuf in + if + Sedlexing.Utf8.sub_lexeme lexbuf (len - 1) 1 = ":" + && Sedlexing.Utf8.sub_lexeme lexbuf (len - 2) 1 <> ":" + then + Token (env, T_COLON) + else + Continue env + | "*/" -> + if is_in_comment_syntax env then + let env = in_comment_syntax false env in + Continue env + else ( + Sedlexing.rollback lexbuf; + match%sedlex lexbuf with + | "*" -> Token (env, T_MULT) + | _ -> failwith "expected *" + ) + | "//" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = line_comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf false) + (* Support for the shebang at the beginning of a file. It is treated like a + * comment at the beginning or an error elsewhere *) + | "#!" -> + if Sedlexing.lexeme_start lexbuf = 0 then + let (env, _) = line_comment env (Buffer.create 127) lexbuf in + Continue env + else + Token (env, T_ERROR "#!") + (* Values *) + | "'" + | '"' -> + let quote = lexeme lexbuf in + let start = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let raw = Buffer.create 127 in + Buffer.add_string raw quote; + let octal = false in + let (env, _end, octal) = string_quote env quote buf raw octal lexbuf in + let loc = { Loc.source = Lex_env.source env; start; _end } in + Token (env, T_STRING (loc, Buffer.contents buf, Buffer.contents raw, octal)) + | '`' -> + let cooked = Buffer.create 127 in + let raw = Buffer.create 127 in + let literal = Buffer.create 127 in + Buffer.add_string literal (lexeme lexbuf); + + let start = start_pos_of_lexbuf env lexbuf in + let (env, is_tail) = template_part env cooked raw literal lexbuf in + let _end = end_pos_of_lexbuf env lexbuf in + let loc = { Loc.source = Lex_env.source env; start; _end } in + Token + ( env, + T_TEMPLATE_PART + ( loc, + { + cooked = Buffer.contents cooked; + raw = Buffer.contents raw; + literal = Buffer.contents literal; + }, + is_tail ) ) + | (binbigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | binbigint -> Token (env, T_BIGINT { kind = BIG_BINARY; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | binbigint -> Token (env, T_BIGINT { kind = BIG_BINARY; raw = lexeme lexbuf }) + | (binnumber, (letter | '2' .. '9'), Star alphanumeric) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | binnumber -> Token (env, T_NUMBER { kind = BINARY; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | binnumber -> Token (env, T_NUMBER { kind = BINARY; raw = lexeme lexbuf }) + | (octbigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | octbigint -> Token (env, T_BIGINT { kind = BIG_OCTAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | octbigint -> Token (env, T_BIGINT { kind = BIG_OCTAL; raw = lexeme lexbuf }) + | (octnumber, (letter | '8' .. '9'), Star alphanumeric) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | octnumber -> Token (env, T_NUMBER { kind = OCTAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | octnumber -> Token (env, T_NUMBER { kind = OCTAL; raw = lexeme lexbuf }) + | (legacynonoctnumber, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | legacynonoctnumber -> + Token (env, T_NUMBER { kind = LEGACY_NON_OCTAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | legacynonoctnumber -> Token (env, T_NUMBER { kind = LEGACY_NON_OCTAL; raw = lexeme lexbuf }) + | (legacyoctnumber, (letter | '8' .. '9'), Star alphanumeric) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | legacyoctnumber -> Token (env, T_NUMBER { kind = LEGACY_OCTAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | legacyoctnumber -> Token (env, T_NUMBER { kind = LEGACY_OCTAL; raw = lexeme lexbuf }) + | (hexbigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | hexbigint -> Token (env, T_BIGINT { kind = BIG_NORMAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | hexbigint -> Token (env, T_BIGINT { kind = BIG_NORMAL; raw = lexeme lexbuf }) + | (hexnumber, non_hex_letter, Star alphanumeric) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | hexnumber -> Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | hexnumber -> Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) + | (scibigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | scibigint -> + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.InvalidSciBigInt in + Token (env, T_BIGINT { kind = BIG_NORMAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | scibigint -> + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.InvalidSciBigInt in + Token (env, T_BIGINT { kind = BIG_NORMAL; raw = lexeme lexbuf }) + | (scinumber, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | scinumber -> Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | scinumber -> Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) + | (floatbigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | floatbigint -> + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.InvalidFloatBigInt in + Token (env, T_BIGINT { kind = BIG_NORMAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | (wholebigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | wholebigint -> Token (env, T_BIGINT { kind = BIG_NORMAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | floatbigint -> + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.InvalidFloatBigInt in + Token (env, T_BIGINT { kind = BIG_NORMAL; raw = lexeme lexbuf }) + | wholebigint -> Token (env, T_BIGINT { kind = BIG_NORMAL; raw = lexeme lexbuf }) + | ((wholenumber | floatnumber), word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | wholenumber + | floatnumber -> + Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) + | _ -> failwith "unreachable") + | wholenumber + | floatnumber -> + Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) + (* Keywords *) + | "async" -> Token (env, T_ASYNC) + | "await" -> Token (env, T_AWAIT) + | "break" -> Token (env, T_BREAK) + | "case" -> Token (env, T_CASE) + | "catch" -> Token (env, T_CATCH) + | "class" -> Token (env, T_CLASS) + | "const" -> Token (env, T_CONST) + | "continue" -> Token (env, T_CONTINUE) + | "debugger" -> Token (env, T_DEBUGGER) + | "declare" -> Token (env, T_DECLARE) + | "default" -> Token (env, T_DEFAULT) + | "delete" -> Token (env, T_DELETE) + | "do" -> Token (env, T_DO) + | "else" -> Token (env, T_ELSE) + | "enum" -> Token (env, T_ENUM) + | "export" -> Token (env, T_EXPORT) + | "extends" -> Token (env, T_EXTENDS) + | "false" -> Token (env, T_FALSE) + | "finally" -> Token (env, T_FINALLY) + | "for" -> Token (env, T_FOR) + | "function" -> Token (env, T_FUNCTION) + | "if" -> Token (env, T_IF) + | "implements" -> Token (env, T_IMPLEMENTS) + | "import" -> Token (env, T_IMPORT) + | "in" -> Token (env, T_IN) + | "instanceof" -> Token (env, T_INSTANCEOF) + | "interface" -> Token (env, T_INTERFACE) + | "let" -> Token (env, T_LET) + | "new" -> Token (env, T_NEW) + | "null" -> Token (env, T_NULL) + | "of" -> Token (env, T_OF) + | "opaque" -> Token (env, T_OPAQUE) + | "package" -> Token (env, T_PACKAGE) + | "private" -> Token (env, T_PRIVATE) + | "protected" -> Token (env, T_PROTECTED) + | "public" -> Token (env, T_PUBLIC) + | "return" -> Token (env, T_RETURN) + | "static" -> Token (env, T_STATIC) + | "super" -> Token (env, T_SUPER) + | "switch" -> Token (env, T_SWITCH) + | "this" -> Token (env, T_THIS) + | "throw" -> Token (env, T_THROW) + | "true" -> Token (env, T_TRUE) + | "try" -> Token (env, T_TRY) + | "type" -> Token (env, T_TYPE) + | "typeof" -> Token (env, T_TYPEOF) + | "var" -> Token (env, T_VAR) + | "void" -> Token (env, T_VOID) + | "while" -> Token (env, T_WHILE) + | "with" -> Token (env, T_WITH) + | "yield" -> Token (env, T_YIELD) + (* Identifiers *) + | (js_id_start, Star js_id_continue) -> + let loc = loc_of_lexbuf env lexbuf in + let raw = lexeme lexbuf in + let (env, value) = decode_identifier env raw in + Token (env, T_IDENTIFIER { loc; value; raw }) + (* TODO: Use [Symbol.iterator] instead of @@iterator. *) + | "@@iterator" + | "@@asyncIterator" -> + let loc = loc_of_lexbuf env lexbuf in + let raw = lexeme lexbuf in + Token (env, T_IDENTIFIER { loc; value = raw; raw }) + (* Syntax *) + | "{" -> Token (env, T_LCURLY) + | "}" -> Token (env, T_RCURLY) + | "(" -> Token (env, T_LPAREN) + | ")" -> Token (env, T_RPAREN) + | "[" -> Token (env, T_LBRACKET) + | "]" -> Token (env, T_RBRACKET) + | "..." -> Token (env, T_ELLIPSIS) + | "." -> Token (env, T_PERIOD) + | ";" -> Token (env, T_SEMICOLON) + | "," -> Token (env, T_COMMA) + | ":" -> Token (env, T_COLON) + | ("?.", digit) -> + Sedlexing.rollback lexbuf; + (match%sedlex lexbuf with + | "?" -> Token (env, T_PLING) + | _ -> failwith "expected ?") + | "?." -> Token (env, T_PLING_PERIOD) + | "??" -> Token (env, T_PLING_PLING) + | "?" -> Token (env, T_PLING) + | "&&" -> Token (env, T_AND) + | "||" -> Token (env, T_OR) + | "===" -> Token (env, T_STRICT_EQUAL) + | "!==" -> Token (env, T_STRICT_NOT_EQUAL) + | "<=" -> Token (env, T_LESS_THAN_EQUAL) + | ">=" -> Token (env, T_GREATER_THAN_EQUAL) + | "==" -> Token (env, T_EQUAL) + | "!=" -> Token (env, T_NOT_EQUAL) + | "++" -> Token (env, T_INCR) + | "--" -> Token (env, T_DECR) + | "<<=" -> Token (env, T_LSHIFT_ASSIGN) + | "<<" -> Token (env, T_LSHIFT) + | ">>=" -> Token (env, T_RSHIFT_ASSIGN) + | ">>>=" -> Token (env, T_RSHIFT3_ASSIGN) + | ">>>" -> Token (env, T_RSHIFT3) + | ">>" -> Token (env, T_RSHIFT) + | "+=" -> Token (env, T_PLUS_ASSIGN) + | "-=" -> Token (env, T_MINUS_ASSIGN) + | "*=" -> Token (env, T_MULT_ASSIGN) + | "**=" -> Token (env, T_EXP_ASSIGN) + | "%=" -> Token (env, T_MOD_ASSIGN) + | "&=" -> Token (env, T_BIT_AND_ASSIGN) + | "|=" -> Token (env, T_BIT_OR_ASSIGN) + | "^=" -> Token (env, T_BIT_XOR_ASSIGN) + | "<" -> Token (env, T_LESS_THAN) + | ">" -> Token (env, T_GREATER_THAN) + | "+" -> Token (env, T_PLUS) + | "-" -> Token (env, T_MINUS) + | "*" -> Token (env, T_MULT) + | "**" -> Token (env, T_EXP) + | "%" -> Token (env, T_MOD) + | "|" -> Token (env, T_BIT_OR) + | "&" -> Token (env, T_BIT_AND) + | "^" -> Token (env, T_BIT_XOR) + | "!" -> Token (env, T_NOT) + | "~" -> Token (env, T_BIT_NOT) + | "=" -> Token (env, T_ASSIGN) + | "=>" -> Token (env, T_ARROW) + | "/=" -> Token (env, T_DIV_ASSIGN) + | "/" -> Token (env, T_DIV) + | "@" -> Token (env, T_AT) + | "#" -> Token (env, T_POUND) + (* Others *) + | eof -> + let env = + if is_in_comment_syntax env then + let loc = loc_of_lexbuf env lexbuf in + lex_error env loc Parse_error.UnexpectedEOS + else + env + in + Token (env, T_EOF) + | any -> + let env = illegal env (loc_of_lexbuf env lexbuf) in + Token (env, T_ERROR (lexeme lexbuf)) + | _ -> failwith "unreachable" + +let rec regexp_class env buf lexbuf = + match%sedlex lexbuf with + | eof -> env + | "\\\\" -> + Buffer.add_string buf "\\\\"; + regexp_class env buf lexbuf + | ('\\', ']') -> + Buffer.add_char buf '\\'; + Buffer.add_char buf ']'; + regexp_class env buf lexbuf + | ']' -> + Buffer.add_char buf ']'; + env + (* match multi-char substrings that don't contain the start chars of the above patterns *) + | Plus (Compl (eof | '\\' | ']')) + | any -> + let str = lexeme lexbuf in + Buffer.add_string buf str; + regexp_class env buf lexbuf + | _ -> failwith "unreachable" + +let rec regexp_body env buf lexbuf = + match%sedlex lexbuf with + | eof -> + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.UnterminatedRegExp in + (env, "") + | ('\\', line_terminator_sequence) -> + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.UnterminatedRegExp in + let env = new_line env lexbuf in + (env, "") + | ('\\', any) -> + let s = lexeme lexbuf in + Buffer.add_string buf s; + regexp_body env buf lexbuf + | ('/', Plus id_letter) -> + let flags = + let str = lexeme lexbuf in + String.sub str 1 (String.length str - 1) + in + (env, flags) + | '/' -> (env, "") + | '[' -> + Buffer.add_char buf '['; + let env = regexp_class env buf lexbuf in + regexp_body env buf lexbuf + | line_terminator_sequence -> + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.UnterminatedRegExp in + let env = new_line env lexbuf in + (env, "") + (* match multi-char substrings that don't contain the start chars of the above patterns *) + | Plus (Compl (eof | '\\' | '/' | '[' | line_terminator_sequence_start)) + | any -> + let str = lexeme lexbuf in + Buffer.add_string buf str; + regexp_body env buf lexbuf + | _ -> failwith "unreachable" + +let regexp env lexbuf = + match%sedlex lexbuf with + | eof -> Token (env, T_EOF) + | line_terminator_sequence -> + let env = new_line env lexbuf in + Continue env + | Plus whitespace -> Continue env + | "//" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = line_comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf false) + | "/*" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf true) + | '/' -> + let start = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, flags) = regexp_body env buf lexbuf in + let _end = end_pos_of_lexbuf env lexbuf in + let loc = { Loc.source = Lex_env.source env; start; _end } in + Token (env, T_REGEXP (loc, Buffer.contents buf, flags)) + | any -> + let env = illegal env (loc_of_lexbuf env lexbuf) in + Token (env, T_ERROR (lexeme lexbuf)) + | _ -> failwith "unreachable" + +let rec jsx_text env mode buf raw lexbuf = + match%sedlex lexbuf with + | "'" + | '"' + | '<' + | '{' -> + let c = lexeme lexbuf in + begin + match (mode, c) with + | (JSX_SINGLE_QUOTED_TEXT, "'") + | (JSX_DOUBLE_QUOTED_TEXT, "\"") -> + env + | (JSX_CHILD_TEXT, ("<" | "{")) -> + (* Don't actually want to consume these guys + * yet...they're not part of the JSX text *) + Sedlexing.rollback lexbuf; + env + | _ -> + Buffer.add_string raw c; + Buffer.add_string buf c; + jsx_text env mode buf raw lexbuf + end + | eof -> + let env = illegal env (loc_of_lexbuf env lexbuf) in + env + | line_terminator_sequence -> + let lt = lexeme lexbuf in + Buffer.add_string raw lt; + Buffer.add_string buf lt; + let env = new_line env lexbuf in + jsx_text env mode buf raw lexbuf + | ("&#x", Plus hex_digit, ';') -> + let s = lexeme lexbuf in + let n = String.sub s 3 (String.length s - 4) in + Buffer.add_string raw s; + let code = int_of_string ("0x" ^ n) in + Wtf8.add_wtf_8 buf code; + jsx_text env mode buf raw lexbuf + | ("&#", Plus digit, ';') -> + let s = lexeme lexbuf in + let n = String.sub s 2 (String.length s - 3) in + Buffer.add_string raw s; + let code = int_of_string n in + Wtf8.add_wtf_8 buf code; + jsx_text env mode buf raw lexbuf + | ("&", htmlentity, ';') -> + let s = lexeme lexbuf in + let entity = String.sub s 1 (String.length s - 2) in + Buffer.add_string raw s; + let code = + match entity with + | "quot" -> Some 0x0022 + | "amp" -> Some 0x0026 + | "apos" -> Some 0x0027 + | "lt" -> Some 0x003C + | "gt" -> Some 0x003E + | "nbsp" -> Some 0x00A0 + | "iexcl" -> Some 0x00A1 + | "cent" -> Some 0x00A2 + | "pound" -> Some 0x00A3 + | "curren" -> Some 0x00A4 + | "yen" -> Some 0x00A5 + | "brvbar" -> Some 0x00A6 + | "sect" -> Some 0x00A7 + | "uml" -> Some 0x00A8 + | "copy" -> Some 0x00A9 + | "ordf" -> Some 0x00AA + | "laquo" -> Some 0x00AB + | "not" -> Some 0x00AC + | "shy" -> Some 0x00AD + | "reg" -> Some 0x00AE + | "macr" -> Some 0x00AF + | "deg" -> Some 0x00B0 + | "plusmn" -> Some 0x00B1 + | "sup2" -> Some 0x00B2 + | "sup3" -> Some 0x00B3 + | "acute" -> Some 0x00B4 + | "micro" -> Some 0x00B5 + | "para" -> Some 0x00B6 + | "middot" -> Some 0x00B7 + | "cedil" -> Some 0x00B8 + | "sup1" -> Some 0x00B9 + | "ordm" -> Some 0x00BA + | "raquo" -> Some 0x00BB + | "frac14" -> Some 0x00BC + | "frac12" -> Some 0x00BD + | "frac34" -> Some 0x00BE + | "iquest" -> Some 0x00BF + | "Agrave" -> Some 0x00C0 + | "Aacute" -> Some 0x00C1 + | "Acirc" -> Some 0x00C2 + | "Atilde" -> Some 0x00C3 + | "Auml" -> Some 0x00C4 + | "Aring" -> Some 0x00C5 + | "AElig" -> Some 0x00C6 + | "Ccedil" -> Some 0x00C7 + | "Egrave" -> Some 0x00C8 + | "Eacute" -> Some 0x00C9 + | "Ecirc" -> Some 0x00CA + | "Euml" -> Some 0x00CB + | "Igrave" -> Some 0x00CC + | "Iacute" -> Some 0x00CD + | "Icirc" -> Some 0x00CE + | "Iuml" -> Some 0x00CF + | "ETH" -> Some 0x00D0 + | "Ntilde" -> Some 0x00D1 + | "Ograve" -> Some 0x00D2 + | "Oacute" -> Some 0x00D3 + | "Ocirc" -> Some 0x00D4 + | "Otilde" -> Some 0x00D5 + | "Ouml" -> Some 0x00D6 + | "times" -> Some 0x00D7 + | "Oslash" -> Some 0x00D8 + | "Ugrave" -> Some 0x00D9 + | "Uacute" -> Some 0x00DA + | "Ucirc" -> Some 0x00DB + | "Uuml" -> Some 0x00DC + | "Yacute" -> Some 0x00DD + | "THORN" -> Some 0x00DE + | "szlig" -> Some 0x00DF + | "agrave" -> Some 0x00E0 + | "aacute" -> Some 0x00E1 + | "acirc" -> Some 0x00E2 + | "atilde" -> Some 0x00E3 + | "auml" -> Some 0x00E4 + | "aring" -> Some 0x00E5 + | "aelig" -> Some 0x00E6 + | "ccedil" -> Some 0x00E7 + | "egrave" -> Some 0x00E8 + | "eacute" -> Some 0x00E9 + | "ecirc" -> Some 0x00EA + | "euml" -> Some 0x00EB + | "igrave" -> Some 0x00EC + | "iacute" -> Some 0x00ED + | "icirc" -> Some 0x00EE + | "iuml" -> Some 0x00EF + | "eth" -> Some 0x00F0 + | "ntilde" -> Some 0x00F1 + | "ograve" -> Some 0x00F2 + | "oacute" -> Some 0x00F3 + | "ocirc" -> Some 0x00F4 + | "otilde" -> Some 0x00F5 + | "ouml" -> Some 0x00F6 + | "divide" -> Some 0x00F7 + | "oslash" -> Some 0x00F8 + | "ugrave" -> Some 0x00F9 + | "uacute" -> Some 0x00FA + | "ucirc" -> Some 0x00FB + | "uuml" -> Some 0x00FC + | "yacute" -> Some 0x00FD + | "thorn" -> Some 0x00FE + | "yuml" -> Some 0x00FF + | "OElig" -> Some 0x0152 + | "oelig" -> Some 0x0153 + | "Scaron" -> Some 0x0160 + | "scaron" -> Some 0x0161 + | "Yuml" -> Some 0x0178 + | "fnof" -> Some 0x0192 + | "circ" -> Some 0x02C6 + | "tilde" -> Some 0x02DC + | "Alpha" -> Some 0x0391 + | "Beta" -> Some 0x0392 + | "Gamma" -> Some 0x0393 + | "Delta" -> Some 0x0394 + | "Epsilon" -> Some 0x0395 + | "Zeta" -> Some 0x0396 + | "Eta" -> Some 0x0397 + | "Theta" -> Some 0x0398 + | "Iota" -> Some 0x0399 + | "Kappa" -> Some 0x039A + | "Lambda" -> Some 0x039B + | "Mu" -> Some 0x039C + | "Nu" -> Some 0x039D + | "Xi" -> Some 0x039E + | "Omicron" -> Some 0x039F + | "Pi" -> Some 0x03A0 + | "Rho" -> Some 0x03A1 + | "Sigma" -> Some 0x03A3 + | "Tau" -> Some 0x03A4 + | "Upsilon" -> Some 0x03A5 + | "Phi" -> Some 0x03A6 + | "Chi" -> Some 0x03A7 + | "Psi" -> Some 0x03A8 + | "Omega" -> Some 0x03A9 + | "alpha" -> Some 0x03B1 + | "beta" -> Some 0x03B2 + | "gamma" -> Some 0x03B3 + | "delta" -> Some 0x03B4 + | "epsilon" -> Some 0x03B5 + | "zeta" -> Some 0x03B6 + | "eta" -> Some 0x03B7 + | "theta" -> Some 0x03B8 + | "iota" -> Some 0x03B9 + | "kappa" -> Some 0x03BA + | "lambda" -> Some 0x03BB + | "mu" -> Some 0x03BC + | "nu" -> Some 0x03BD + | "xi" -> Some 0x03BE + | "omicron" -> Some 0x03BF + | "pi" -> Some 0x03C0 + | "rho" -> Some 0x03C1 + | "sigmaf" -> Some 0x03C2 + | "sigma" -> Some 0x03C3 + | "tau" -> Some 0x03C4 + | "upsilon" -> Some 0x03C5 + | "phi" -> Some 0x03C6 + | "chi" -> Some 0x03C7 + | "psi" -> Some 0x03C8 + | "omega" -> Some 0x03C9 + | "thetasym" -> Some 0x03D1 + | "upsih" -> Some 0x03D2 + | "piv" -> Some 0x03D6 + | "ensp" -> Some 0x2002 + | "emsp" -> Some 0x2003 + | "thinsp" -> Some 0x2009 + | "zwnj" -> Some 0x200C + | "zwj" -> Some 0x200D + | "lrm" -> Some 0x200E + | "rlm" -> Some 0x200F + | "ndash" -> Some 0x2013 + | "mdash" -> Some 0x2014 + | "lsquo" -> Some 0x2018 + | "rsquo" -> Some 0x2019 + | "sbquo" -> Some 0x201A + | "ldquo" -> Some 0x201C + | "rdquo" -> Some 0x201D + | "bdquo" -> Some 0x201E + | "dagger" -> Some 0x2020 + | "Dagger" -> Some 0x2021 + | "bull" -> Some 0x2022 + | "hellip" -> Some 0x2026 + | "permil" -> Some 0x2030 + | "prime" -> Some 0x2032 + | "Prime" -> Some 0x2033 + | "lsaquo" -> Some 0x2039 + | "rsaquo" -> Some 0x203A + | "oline" -> Some 0x203E + | "frasl" -> Some 0x2044 + | "euro" -> Some 0x20AC + | "image" -> Some 0x2111 + | "weierp" -> Some 0x2118 + | "real" -> Some 0x211C + | "trade" -> Some 0x2122 + | "alefsym" -> Some 0x2135 + | "larr" -> Some 0x2190 + | "uarr" -> Some 0x2191 + | "rarr" -> Some 0x2192 + | "darr" -> Some 0x2193 + | "harr" -> Some 0x2194 + | "crarr" -> Some 0x21B5 + | "lArr" -> Some 0x21D0 + | "uArr" -> Some 0x21D1 + | "rArr" -> Some 0x21D2 + | "dArr" -> Some 0x21D3 + | "hArr" -> Some 0x21D4 + | "forall" -> Some 0x2200 + | "part" -> Some 0x2202 + | "exist" -> Some 0x2203 + | "empty" -> Some 0x2205 + | "nabla" -> Some 0x2207 + | "isin" -> Some 0x2208 + | "notin" -> Some 0x2209 + | "ni" -> Some 0x220B + | "prod" -> Some 0x220F + | "sum" -> Some 0x2211 + | "minus" -> Some 0x2212 + | "lowast" -> Some 0x2217 + | "radic" -> Some 0x221A + | "prop" -> Some 0x221D + | "infin" -> Some 0x221E + | "ang" -> Some 0x2220 + | "and" -> Some 0x2227 + | "or" -> Some 0x2228 + | "cap" -> Some 0x2229 + | "cup" -> Some 0x222A + | "'int'" -> Some 0x222B + | "there4" -> Some 0x2234 + | "sim" -> Some 0x223C + | "cong" -> Some 0x2245 + | "asymp" -> Some 0x2248 + | "ne" -> Some 0x2260 + | "equiv" -> Some 0x2261 + | "le" -> Some 0x2264 + | "ge" -> Some 0x2265 + | "sub" -> Some 0x2282 + | "sup" -> Some 0x2283 + | "nsub" -> Some 0x2284 + | "sube" -> Some 0x2286 + | "supe" -> Some 0x2287 + | "oplus" -> Some 0x2295 + | "otimes" -> Some 0x2297 + | "perp" -> Some 0x22A5 + | "sdot" -> Some 0x22C5 + | "lceil" -> Some 0x2308 + | "rceil" -> Some 0x2309 + | "lfloor" -> Some 0x230A + | "rfloor" -> Some 0x230B + | "lang" -> Some 0x27E8 (* 0x2329 in HTML4 *) + | "rang" -> Some 0x27E9 (* 0x232A in HTML4 *) + | "loz" -> Some 0x25CA + | "spades" -> Some 0x2660 + | "clubs" -> Some 0x2663 + | "hearts" -> Some 0x2665 + | "diams" -> Some 0x2666 + | _ -> None + in + (match code with + | Some code -> Wtf8.add_wtf_8 buf code + | None -> Buffer.add_string buf ("&" ^ entity ^ ";")); + jsx_text env mode buf raw lexbuf + (* match multi-char substrings that don't contain the start chars of the above patterns *) + | Plus (Compl ("'" | '"' | '<' | '{' | '&' | eof | line_terminator_sequence_start)) + | any -> + let c = lexeme lexbuf in + Buffer.add_string raw c; + Buffer.add_string buf c; + jsx_text env mode buf raw lexbuf + | _ -> failwith "unreachable" + +let jsx_tag env lexbuf = + match%sedlex lexbuf with + | eof -> Token (env, T_EOF) + | line_terminator_sequence -> + let env = new_line env lexbuf in + Continue env + | Plus whitespace -> Continue env + | "//" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = line_comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf false) + | "/*" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf true) + | '<' -> Token (env, T_LESS_THAN) + | '/' -> Token (env, T_DIV) + | '>' -> Token (env, T_GREATER_THAN) + | '{' -> Token (env, T_LCURLY) + | ':' -> Token (env, T_COLON) + | '.' -> Token (env, T_PERIOD) + | '=' -> Token (env, T_ASSIGN) + | (js_id_start, Star ('-' | js_id_continue)) -> + Token (env, T_JSX_IDENTIFIER { raw = lexeme lexbuf }) + | "'" + | '"' -> + let quote = lexeme lexbuf in + let start = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let raw = Buffer.create 127 in + Buffer.add_string raw quote; + let mode = + if quote = "'" then + JSX_SINGLE_QUOTED_TEXT + else + JSX_DOUBLE_QUOTED_TEXT + in + let env = jsx_text env mode buf raw lexbuf in + let _end = end_pos_of_lexbuf env lexbuf in + Buffer.add_string raw quote; + let value = Buffer.contents buf in + let raw = Buffer.contents raw in + let loc = { Loc.source = Lex_env.source env; start; _end } in + Token (env, T_JSX_TEXT (loc, value, raw)) + | any -> Token (env, T_ERROR (lexeme lexbuf)) + | _ -> failwith "unreachable" + +let jsx_child env start buf raw lexbuf = + match%sedlex lexbuf with + | line_terminator_sequence -> + let lt = lexeme lexbuf in + Buffer.add_string raw lt; + Buffer.add_string buf lt; + let env = new_line env lexbuf in + let env = jsx_text env JSX_CHILD_TEXT buf raw lexbuf in + let _end = end_pos_of_lexbuf env lexbuf in + let value = Buffer.contents buf in + let raw = Buffer.contents raw in + let loc = { Loc.source = Lex_env.source env; start; _end } in + (env, T_JSX_TEXT (loc, value, raw)) + | eof -> (env, T_EOF) + | '<' -> (env, T_LESS_THAN) + | '{' -> (env, T_LCURLY) + | any -> + Sedlexing.rollback lexbuf; + + (* let jsx_text consume this char *) + let env = jsx_text env JSX_CHILD_TEXT buf raw lexbuf in + let _end = end_pos_of_lexbuf env lexbuf in + let value = Buffer.contents buf in + let raw = Buffer.contents raw in + let loc = { Loc.source = Lex_env.source env; start; _end } in + (env, T_JSX_TEXT (loc, value, raw)) + | _ -> failwith "unreachable" + +let template_tail env lexbuf = + match%sedlex lexbuf with + | line_terminator_sequence -> + let env = new_line env lexbuf in + Continue env + | Plus whitespace -> Continue env + | "//" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = line_comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf false) + | "/*" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf true) + | '}' -> + let start = start_pos_of_lexbuf env lexbuf in + let cooked = Buffer.create 127 in + let raw = Buffer.create 127 in + let literal = Buffer.create 127 in + Buffer.add_string literal "}"; + let (env, is_tail) = template_part env cooked raw literal lexbuf in + let _end = end_pos_of_lexbuf env lexbuf in + let loc = { Loc.source = Lex_env.source env; start; _end } in + Token + ( env, + T_TEMPLATE_PART + ( loc, + { + cooked = Buffer.contents cooked; + raw = Buffer.contents raw; + literal = Buffer.contents literal; + }, + is_tail ) ) + | any -> + let env = illegal env (loc_of_lexbuf env lexbuf) in + Token + ( env, + T_TEMPLATE_PART (loc_of_lexbuf env lexbuf, { cooked = ""; raw = ""; literal = "" }, true) + ) + | _ -> failwith "unreachable" + +(* There are some tokens that never show up in a type and which can cause + * ambiguity. For example, Foo> ends with two angle brackets, not + * with a right shift. + *) +let type_token env lexbuf = + match%sedlex lexbuf with + | line_terminator_sequence -> + let env = new_line env lexbuf in + Continue env + | Plus whitespace -> Continue env + | "/*" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf true) + | ("/*", Star whitespace, (":" | "::" | "flow-include")) -> + let pattern = lexeme lexbuf in + if not (is_comment_syntax_enabled env) then ( + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + Buffer.add_string buf pattern; + let (env, end_pos) = comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf true) + ) else + let env = + if is_in_comment_syntax env then + let loc = loc_of_lexbuf env lexbuf in + unexpected_error env loc pattern + else + env + in + let env = in_comment_syntax true env in + let len = Sedlexing.lexeme_length lexbuf in + if + Sedlexing.Utf8.sub_lexeme lexbuf (len - 1) 1 = ":" + && Sedlexing.Utf8.sub_lexeme lexbuf (len - 2) 1 <> ":" + then + Token (env, T_COLON) + else + Continue env + | "*/" -> + if is_in_comment_syntax env then + let env = in_comment_syntax false env in + Continue env + else ( + Sedlexing.rollback lexbuf; + match%sedlex lexbuf with + | "*" -> Token (env, T_MULT) + | _ -> failwith "expected *" + ) + | "//" -> + let start_pos = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let (env, end_pos) = line_comment env buf lexbuf in + Comment (env, mk_comment env start_pos end_pos buf false) + | "'" + | '"' -> + let quote = lexeme lexbuf in + let start = start_pos_of_lexbuf env lexbuf in + let buf = Buffer.create 127 in + let raw = Buffer.create 127 in + Buffer.add_string raw quote; + let octal = false in + let (env, _end, octal) = string_quote env quote buf raw octal lexbuf in + let loc = { Loc.source = Lex_env.source env; start; _end } in + Token (env, T_STRING (loc, Buffer.contents buf, Buffer.contents raw, octal)) + (* + * Number literals + *) + | (Opt neg, binbigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, binbigint) -> + let num = lexeme lexbuf in + Token (env, mk_bignum_singleton BIG_BINARY num) + | _ -> failwith "unreachable") + | (Opt neg, binbigint) -> + let num = lexeme lexbuf in + Token (env, mk_bignum_singleton BIG_BINARY num) + | (Opt neg, binnumber, (letter | '2' .. '9'), Star alphanumeric) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, binnumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton BINARY num) + | _ -> failwith "unreachable") + | (Opt neg, binnumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton BINARY num) + | (Opt neg, octbigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, octbigint) -> + let num = lexeme lexbuf in + Token (env, mk_bignum_singleton BIG_OCTAL num) + | _ -> failwith "unreachable") + | (Opt neg, octbigint) -> + let num = lexeme lexbuf in + Token (env, mk_bignum_singleton BIG_OCTAL num) + | (Opt neg, octnumber, (letter | '8' .. '9'), Star alphanumeric) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, octnumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton OCTAL num) + | _ -> failwith "unreachable") + | (Opt neg, octnumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton OCTAL num) + | (Opt neg, legacyoctnumber, (letter | '8' .. '9'), Star alphanumeric) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, legacyoctnumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton LEGACY_OCTAL num) + | _ -> failwith "unreachable") + | (Opt neg, legacyoctnumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton LEGACY_OCTAL num) + | (Opt neg, hexbigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, hexbigint) -> + let num = lexeme lexbuf in + Token (env, mk_bignum_singleton BIG_NORMAL num) + | _ -> failwith "unreachable") + | (Opt neg, hexbigint) -> + let num = lexeme lexbuf in + Token (env, mk_bignum_singleton BIG_NORMAL num) + | (Opt neg, hexnumber, non_hex_letter, Star alphanumeric) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, hexnumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton NORMAL num) + | _ -> failwith "unreachable") + | (Opt neg, hexnumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton NORMAL num) + | (Opt neg, scibigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, scibigint) -> + let num = lexeme lexbuf in + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.InvalidSciBigInt in + Token (env, mk_bignum_singleton BIG_NORMAL num) + | _ -> failwith "unreachable") + | (Opt neg, scibigint) -> + let num = lexeme lexbuf in + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.InvalidSciBigInt in + Token (env, mk_bignum_singleton BIG_NORMAL num) + | (Opt neg, scinumber, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, scinumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton NORMAL num) + | _ -> failwith "unreachable") + | (Opt neg, scinumber) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton NORMAL num) + | (Opt neg, floatbigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, floatbigint) -> + let num = lexeme lexbuf in + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.InvalidFloatBigInt in + Token (env, mk_bignum_singleton BIG_NORMAL num) + | _ -> failwith "unreachable") + | (Opt neg, wholebigint, word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, wholebigint) -> + let num = lexeme lexbuf in + Token (env, mk_bignum_singleton BIG_NORMAL num) + | _ -> failwith "unreachable") + | (Opt neg, floatbigint) -> + let num = lexeme lexbuf in + let loc = loc_of_lexbuf env lexbuf in + let env = lex_error env loc Parse_error.InvalidFloatBigInt in + Token (env, mk_bignum_singleton BIG_NORMAL num) + | (Opt neg, wholebigint) -> + let num = lexeme lexbuf in + Token (env, mk_bignum_singleton BIG_NORMAL num) + | (Opt neg, (wholenumber | floatnumber), word) -> + (* Numbers cannot be immediately followed by words *) + recover env lexbuf ~f:(fun env lexbuf -> + match%sedlex lexbuf with + | (Opt neg, wholenumber) + | floatnumber -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton NORMAL num) + | _ -> failwith "unreachable") + | (Opt neg, (wholenumber | floatnumber)) -> + let num = lexeme lexbuf in + Token (env, mk_num_singleton NORMAL num) + (* Keywords *) + | "any" -> Token (env, T_ANY_TYPE) + | "bool" -> Token (env, T_BOOLEAN_TYPE BOOL) + | "boolean" -> Token (env, T_BOOLEAN_TYPE BOOLEAN) + | "empty" -> Token (env, T_EMPTY_TYPE) + | "extends" -> Token (env, T_EXTENDS) + | "false" -> Token (env, T_FALSE) + | "interface" -> Token (env, T_INTERFACE) + | "mixed" -> Token (env, T_MIXED_TYPE) + | "null" -> Token (env, T_NULL) + | "number" -> Token (env, T_NUMBER_TYPE) + | "bigint" -> Token (env, T_BIGINT_TYPE) + | "static" -> Token (env, T_STATIC) + | "string" -> Token (env, T_STRING_TYPE) + | "true" -> Token (env, T_TRUE) + | "typeof" -> Token (env, T_TYPEOF) + | "void" -> Token (env, T_VOID_TYPE) + (* Identifiers *) + | (js_id_start, Star js_id_continue) -> + let loc = loc_of_lexbuf env lexbuf in + let raw = lexeme lexbuf in + let (env, value) = decode_identifier env raw in + Token (env, T_IDENTIFIER { loc; value; raw }) + | "%checks" -> Token (env, T_CHECKS) + (* Syntax *) + | "[" -> Token (env, T_LBRACKET) + | "]" -> Token (env, T_RBRACKET) + | "{" -> Token (env, T_LCURLY) + | "}" -> Token (env, T_RCURLY) + | "{|" -> Token (env, T_LCURLYBAR) + | "|}" -> Token (env, T_RCURLYBAR) + | "(" -> Token (env, T_LPAREN) + | ")" -> Token (env, T_RPAREN) + | "..." -> Token (env, T_ELLIPSIS) + | "." -> Token (env, T_PERIOD) + | ";" -> Token (env, T_SEMICOLON) + | "," -> Token (env, T_COMMA) + | ":" -> Token (env, T_COLON) + | "?" -> Token (env, T_PLING) + | "[" -> Token (env, T_LBRACKET) + | "]" -> Token (env, T_RBRACKET) + (* Generics *) + | "<" -> Token (env, T_LESS_THAN) + | ">" -> Token (env, T_GREATER_THAN) + (* Generic default *) + | "=" -> Token (env, T_ASSIGN) + (* Optional or nullable *) + | "?" -> Token (env, T_PLING) + (* Existential *) + | "*" -> Token (env, T_MULT) + (* Annotation or bound *) + | ":" -> Token (env, T_COLON) + (* Union *) + | '|' -> Token (env, T_BIT_OR) + (* Intersection *) + | '&' -> Token (env, T_BIT_AND) + (* typeof *) + | "typeof" -> Token (env, T_TYPEOF) + (* Function type *) + | "=>" -> Token (env, T_ARROW) + (* Type alias *) + | '=' -> Token (env, T_ASSIGN) + (* Variance annotations *) + | '+' -> Token (env, T_PLUS) + | '-' -> Token (env, T_MINUS) + (* Others *) + | eof -> + let env = + if is_in_comment_syntax env then + let loc = loc_of_lexbuf env lexbuf in + lex_error env loc Parse_error.UnexpectedEOS + else + env + in + Token (env, T_EOF) + | any -> Token (env, T_ERROR (lexeme lexbuf)) + | _ -> failwith "unreachable" + +(* Lexing JSX children requires a string buffer to keep track of whitespace + * *) +let jsx_child env = + (* yes, the _start_ of the child is the _end_pos_ of the lexbuf! *) + let start = end_pos_of_lexbuf env env.lex_lb in + let buf = Buffer.create 127 in + let raw = Buffer.create 127 in + let (env, child) = jsx_child env start buf raw env.lex_lb in + get_result_and_clear_state (env, child, []) + +let wrap f = + let rec helper comments env = + match f env env.lex_lb with + | Token (env, t) -> (env, t, List.rev comments) + | Comment (env, comment) -> helper (comment :: comments) env + | Continue env -> helper comments env + in + (fun env -> get_result_and_clear_state (helper [] env)) + +let regexp = wrap regexp + +let jsx_tag = wrap jsx_tag + +let template_tail = wrap template_tail + +let type_token = wrap type_token + +let token = wrap token diff --git a/src/parser/flow_parser_dot_js.ml b/src/parser/flow_parser_dot_js.ml index 5426e1eb5c5..1403fe4bd49 100644 --- a/src/parser/flow_parser_dot_js.ml +++ b/src/parser/flow_parser_dot_js.ml @@ -1,31 +1,28 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module Js = Js_of_ocaml.Js + let () = let exports = - if (Js.typeof (Js.Unsafe.js_expr "exports") != Js.string "undefined") - then Js.Unsafe.js_expr "exports" - else begin + if Js.typeof (Js.Unsafe.js_expr "exports") != Js.string "undefined" then + Js.Unsafe.js_expr "exports" + else let exports = Js.Unsafe.obj [||] in Js.Unsafe.set Js.Unsafe.global "flow" exports; exports - end in let js_error_of_exn = function - | Js.Error e -> - Js.raise_js_error e + | Js.Error e -> Js.raise_js_error e | exn -> - let msg = "Internal error: "^(Printexc.to_string exn) in - Js.raise_js_error (Js.Unsafe.new_obj Js.error_constr [| - Js.Unsafe.inject (Js.string msg) - |]) + let msg = "Internal error: " ^ Printexc.to_string exn in + Js.raise_js_error (Js.Unsafe.new_obj Js.error_constr [|Js.Unsafe.inject (Js.string msg)|]) in let parse content options = - try Flow_parser_js.parse content options - with exn -> js_error_of_exn exn + (try Flow_parser_js.parse content options with exn -> js_error_of_exn exn) in Js.Unsafe.set exports "parse" (Js.Unsafe.callback parse) diff --git a/src/parser/flow_parser_js.ml b/src/parser/flow_parser_js.ml index 1eae675ca5b..8989708ab36 100644 --- a/src/parser/flow_parser_js.ml +++ b/src/parser/flow_parser_js.ml @@ -1,12 +1,15 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module Js = Js_of_ocaml.Js + module JsTranslator : sig - val translation_errors: (Loc.t * Parse_error.t) list ref + val translation_errors : (Loc.t * Parse_error.t) list ref + include Translator_intf.S end = struct type t = Js.Unsafe.any @@ -14,91 +17,120 @@ end = struct let translation_errors = ref [] let string x = Js.Unsafe.inject (Js.string x) + let bool x = Js.Unsafe.inject (Js.bool x) + let obj props = Js.Unsafe.inject (Js.Unsafe.obj (Array.of_list props)) + let array arr = Js.Unsafe.inject (Js.array (Array.of_list arr)) + let number x = Js.Unsafe.inject (Js.number_of_float x) + + let int x = number (float x) + let null = Js.Unsafe.inject Js.null + let regexp loc pattern flags = - let regexp = try - Js.Unsafe.new_obj (Js.Unsafe.variable "RegExp") [| - string pattern; - string flags; - |] - with _ -> - translation_errors := (loc, Parse_error.InvalidRegExp)::!translation_errors; - (* Invalid RegExp. We already validated the flags, but we've been - * too lazy to write a JS regexp parser in Ocaml, so we didn't know - * the pattern was invalid. We'll recover with an empty pattern. - *) - Js.Unsafe.new_obj (Js.Unsafe.variable "RegExp") [| - string ""; - string flags; - |] + let regexp = + try Js.Unsafe.new_obj (Js.Unsafe.variable "RegExp") [|string pattern; string flags|] + with _ -> + translation_errors := (loc, Parse_error.InvalidRegExp) :: !translation_errors; + + (* Invalid RegExp. We already validated the flags, but we've been + * too lazy to write a JS regexp parser in Ocaml, so we didn't know + * the pattern was invalid. We'll recover with an empty pattern. + *) + Js.Unsafe.new_obj (Js.Unsafe.variable "RegExp") [|string ""; string flags|] in Js.Unsafe.inject regexp end -module Translate = Estree_translator.Translate (JsTranslator) (struct - let include_comments = true - let include_locs = true -end) - -module Token_translator = Token_translator.Translate (JsTranslator) - -let parse_options jsopts = Parser_env.( - let opts = default_parse_options in - - let decorators = Js.Unsafe.get jsopts "esproposal_decorators" in - let opts = if Js.Optdef.test decorators - then { opts with esproposal_decorators = Js.to_bool decorators; } - else opts in - - let class_instance_fields = Js.Unsafe.get jsopts "esproposal_class_instance_fields" in - let opts = if Js.Optdef.test class_instance_fields - then { opts with esproposal_class_instance_fields = Js.to_bool class_instance_fields; } - else opts in +module Translate = + Estree_translator.Translate + (JsTranslator) + (struct + let include_interned_comments = false - let class_static_fields = Js.Unsafe.get jsopts "esproposal_class_static_fields" in - let opts = if Js.Optdef.test class_static_fields - then { opts with esproposal_class_static_fields = Js.to_bool class_static_fields; } - else opts in + let include_comments = true - let export_star_as = Js.Unsafe.get jsopts "esproposal_export_star_as" in - let opts = if Js.Optdef.test export_star_as - then { opts with esproposal_export_star_as = Js.to_bool export_star_as; } - else opts in + let include_locs = true + end) - let optional_chaining = Js.Unsafe.get jsopts "esproposal_optional_chaining" in - let opts = if Js.Optdef.test optional_chaining - then { opts with esproposal_optional_chaining = Js.to_bool optional_chaining; } - else opts in - - let nullish_coalescing = Js.Unsafe.get jsopts "esproposal_nullish_coalescing" in - let opts = if Js.Optdef.test nullish_coalescing - then { opts with esproposal_nullish_coalescing = Js.to_bool nullish_coalescing; } - else opts in - - let types = Js.Unsafe.get jsopts "types" in - let opts = if Js.Optdef.test types - then { opts with types = Js.to_bool types; } - else opts in +module Token_translator = Token_translator.Translate (JsTranslator) - opts -) +let parse_options jsopts = + Parser_env.( + let opts = default_parse_options in + let enums = Js.Unsafe.get jsopts "enums" in + let opts = + if Js.Optdef.test enums then + { opts with enums = Js.to_bool enums } + else + opts + in + let decorators = Js.Unsafe.get jsopts "esproposal_decorators" in + let opts = + if Js.Optdef.test decorators then + { opts with esproposal_decorators = Js.to_bool decorators } + else + opts + in + let class_instance_fields = Js.Unsafe.get jsopts "esproposal_class_instance_fields" in + let opts = + if Js.Optdef.test class_instance_fields then + { opts with esproposal_class_instance_fields = Js.to_bool class_instance_fields } + else + opts + in + let class_static_fields = Js.Unsafe.get jsopts "esproposal_class_static_fields" in + let opts = + if Js.Optdef.test class_static_fields then + { opts with esproposal_class_static_fields = Js.to_bool class_static_fields } + else + opts + in + let export_star_as = Js.Unsafe.get jsopts "esproposal_export_star_as" in + let opts = + if Js.Optdef.test export_star_as then + { opts with esproposal_export_star_as = Js.to_bool export_star_as } + else + opts + in + let optional_chaining = Js.Unsafe.get jsopts "esproposal_optional_chaining" in + let opts = + if Js.Optdef.test optional_chaining then + { opts with esproposal_optional_chaining = Js.to_bool optional_chaining } + else + opts + in + let nullish_coalescing = Js.Unsafe.get jsopts "esproposal_nullish_coalescing" in + let opts = + if Js.Optdef.test nullish_coalescing then + { opts with esproposal_nullish_coalescing = Js.to_bool nullish_coalescing } + else + opts + in + let types = Js.Unsafe.get jsopts "types" in + let opts = + if Js.Optdef.test types then + { opts with types = Js.to_bool types } + else + opts + in + opts) -let translate_tokens tokens = - JsTranslator.array (List.rev_map Token_translator.token tokens) +let translate_tokens offset_table tokens = + JsTranslator.array (List.rev_map (Token_translator.token offset_table) tokens) let parse content options = let options = - if options = Js.undefined - then Js.Unsafe.obj [||] - else options + if options = Js.undefined then + Js.Unsafe.obj [||] + else + options in let content = Js.to_string content in let parse_options = Some (parse_options options) in - let include_tokens = let tokens = Js.Unsafe.get options "tokens" in Js.Optdef.test tokens && Js.to_bool tokens @@ -106,16 +138,15 @@ let parse content options = let rev_tokens = ref [] in let token_sink = if include_tokens then - Some (fun token_data -> - rev_tokens := token_data::!rev_tokens - ) - else None + Some (fun token_data -> rev_tokens := token_data :: !rev_tokens) + else + None in - let (ocaml_ast, errors) = Parser_flow.program ~fail:false ~parse_options ~token_sink content in JsTranslator.translation_errors := []; - let ret = Translate.program ocaml_ast in + let offset_table = Offset_utils.make content in + let ret = Translate.program (Some offset_table) ocaml_ast in let translation_errors = !JsTranslator.translation_errors in Js.Unsafe.set ret "errors" (Translate.errors (errors @ translation_errors)); - if include_tokens then Js.Unsafe.set ret "tokens" (translate_tokens !rev_tokens); + if include_tokens then Js.Unsafe.set ret "tokens" (translate_tokens offset_table !rev_tokens); ret diff --git a/src/parser/jsx_parser.ml b/src/parser/jsx_parser.ml index c2c9d5381ce..5eba887b805 100644 --- a/src/parser/jsx_parser.ml +++ b/src/parser/jsx_parser.ml @@ -1,327 +1,385 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Token +open Parser_common open Parser_env open Flow_ast -module Error = Parse_error -module JSX (Parse: Parser_common.PARSER) = struct +module JSX (Parse : Parser_common.PARSER) = struct let spread_attribute env = Eat.push_lex_mode env Lex_mode.NORMAL; - let start_loc = Peek.loc env in - Expect.token env T_LCURLY; - Expect.token env T_ELLIPSIS; - let argument = Parse.assignment env in - let end_loc = Peek.loc env in - Expect.token env T_RCURLY; + let attr = + with_loc + (fun env -> + Expect.token env T_LCURLY; + Expect.token env T_ELLIPSIS; + let argument = Parse.assignment env in + Expect.token env T_RCURLY; + { JSX.SpreadAttribute.argument }) + env + in Eat.pop_lex_mode env; - Loc.btwn start_loc end_loc, JSX.SpreadAttribute.({ - argument; - }) + attr - let expression_container' env start_loc = - let expression = if Peek.token env = T_RCURLY - then - let empty_loc = Loc.btwn_exclusive start_loc (Peek.loc env) in - JSX.ExpressionContainer.EmptyExpression empty_loc - else JSX.ExpressionContainer.Expression (Parse.expression env) in - let end_loc = Peek.loc env in - Expect.token env T_RCURLY; - Eat.pop_lex_mode env; - Loc.btwn start_loc end_loc, JSX.ExpressionContainer.({ - expression; - }) + let expression_container' env = + let expression = + if Peek.token env = T_RCURLY then + JSX.ExpressionContainer.EmptyExpression + else + JSX.ExpressionContainer.Expression (Parse.expression env) + in + { JSX.ExpressionContainer.expression } let expression_container env = Eat.push_lex_mode env Lex_mode.NORMAL; - let start_loc = Peek.loc env in - Expect.token env T_LCURLY; - expression_container' env start_loc + let container = + with_loc + (fun env -> + Expect.token env T_LCURLY; + let container = expression_container' env in + Expect.token env T_RCURLY; + container) + env + in + Eat.pop_lex_mode env; + container let expression_container_or_spread_child env = Eat.push_lex_mode env Lex_mode.NORMAL; - let start_loc = Peek.loc env in - Expect.token env T_LCURLY; - match Peek.token env with - | T_ELLIPSIS -> - Expect.token env T_ELLIPSIS; - let expr = Parse.assignment env in - let end_loc = Peek.loc env in - Expect.token env T_RCURLY; - Eat.pop_lex_mode env; - Loc.btwn start_loc end_loc, JSX.SpreadChild expr - | _ -> - let expression_container = expression_container' env start_loc in - fst expression_container, JSX.ExpressionContainer (snd expression_container) + let (loc, result) = + with_loc + (fun env -> + Expect.token env T_LCURLY; + let result = + match Peek.token env with + | T_ELLIPSIS -> + Expect.token env T_ELLIPSIS; + let expr = Parse.assignment env in + JSX.SpreadChild expr + | _ -> + let container = expression_container' env in + JSX.ExpressionContainer container + in + Expect.token env T_RCURLY; + result) + env + in + Eat.pop_lex_mode env; + (loc, result) let identifier env = let loc = Peek.loc env in - let name = match Peek.token env with - | T_JSX_IDENTIFIER { raw } -> raw - | _ -> error_unexpected env; "" + let name = + match Peek.token env with + | T_JSX_IDENTIFIER { raw } -> raw + | _ -> + error_unexpected ~expected:"an identifier" env; + "" in Eat.token env; - loc, JSX.Identifier.({ name; }) + (loc, JSX.Identifier.{ name }) let name = let rec member_expression env member = match Peek.token env with | T_PERIOD -> - let _object = JSX.MemberExpression.MemberExpression member in - Expect.token env T_PERIOD; - let property = identifier env in - let loc = Loc.btwn (fst member) (fst property) in - let member = loc, JSX.MemberExpression.({ - _object; - property; - }) in - member_expression env member + let (start_loc, _) = member in + let member = + with_loc + ~start_loc + (fun env -> + Expect.token env T_PERIOD; + let property = identifier env in + { + JSX.MemberExpression._object = JSX.MemberExpression.MemberExpression member; + property; + }) + env + in + member_expression env member | _ -> member - - in fun env -> - let name = identifier env in - match Peek.token env with + in + fun env -> + match Peek.ith_token ~i:1 env with | T_COLON -> - let namespace = name in - Expect.token env T_COLON; - let name = identifier env in - let loc = Loc.btwn (fst namespace) (fst name) in - JSX.NamespacedName (loc, JSX.NamespacedName.({ - namespace; - name; - })) + let namespaced_name = + with_loc + (fun env -> + let namespace = identifier env in + Expect.token env T_COLON; + let name = identifier env in + { JSX.NamespacedName.namespace; name }) + env + in + JSX.NamespacedName namespaced_name | T_PERIOD -> - let _object = JSX.MemberExpression.Identifier name in - Expect.token env T_PERIOD; - let property = identifier env in - let loc = Loc.btwn (fst name) (fst property) in - let member = loc, JSX.MemberExpression.({ - _object; - property; - }) in - JSX.MemberExpression (member_expression env member) - | _ -> JSX.Identifier name - - - let attribute env = - let start_loc = Peek.loc env in - let name = identifier env in - let end_loc, name = - if Peek.token env = T_COLON - then begin - Expect.token env T_COLON; - let namespace = name in + let member = + with_loc + (fun env -> + let _object = JSX.MemberExpression.Identifier (identifier env) in + Expect.token env T_PERIOD; + let property = identifier env in + { JSX.MemberExpression._object; property }) + env + in + JSX.MemberExpression (member_expression env member) + | _ -> let name = identifier env in - let loc = Loc.btwn (fst namespace) (fst name) in - loc, JSX.Attribute.NamespacedName (loc, JSX.NamespacedName.({ - namespace; - name; - })) - end else fst name, JSX.Attribute.Identifier name in - let end_loc, value = - if Peek.token env = T_ASSIGN - then begin - Expect.token env T_ASSIGN; - match Peek.token env with - | T_LCURLY -> - let loc, expression_container = expression_container env in - begin - let open JSX.ExpressionContainer in - match expression_container.expression with - | EmptyExpression _ -> - error_at env (loc, Error.JSXAttributeValueEmptyExpression); - | _ -> () - end; - loc, Some (JSX.Attribute.ExpressionContainer (loc, expression_container)) - | T_JSX_TEXT (loc, value, raw) as token -> - Expect.token env token; - let value = Ast.Literal.String value in - loc, Some (JSX.Attribute.Literal (loc, { Ast.Literal.value; raw;})) - | _ -> - error env Error.InvalidJSXAttributeValue; - let loc = Peek.loc env in - let raw = "" in - let value = Ast.Literal.String "" in - loc, Some (JSX.Attribute.Literal (loc, { Ast.Literal.value; raw;})) - end else end_loc, None in - Loc.btwn start_loc end_loc, JSX.Attribute.({ - name; - value; - }) + JSX.Identifier name - let opening_element_without_lt = - let rec attributes env acc = - match Peek.token env with - | T_EOF - | T_DIV - | T_GREATER_THAN -> List.rev acc - | T_LCURLY -> - let attribute = JSX.Opening.SpreadAttribute (spread_attribute env) in - attributes env (attribute::acc) - | _ -> - let attribute = JSX.Opening.Attribute (attribute env) in - attributes env (attribute::acc) - - in fun env start_loc -> - let (name, attributes, selfClosing) = match Peek.token env with - | T_GREATER_THAN -> - (None, [], false) + let attribute env = + with_loc + (fun env -> + let name = + match Peek.ith_token ~i:1 env with + | T_COLON -> + let namespaced_name = + with_loc + (fun env -> + let namespace = identifier env in + Expect.token env T_COLON; + let name = identifier env in + { JSX.NamespacedName.namespace; name }) + env + in + JSX.Attribute.NamespacedName namespaced_name | _ -> - let name = Some (name env) in - let attributes = attributes env [] in - let selfClosing = Peek.token env = T_DIV in - (name, attributes, selfClosing) in - if selfClosing then Expect.token env T_DIV; - let end_loc = Peek.loc env in - Expect.token env T_GREATER_THAN; - Eat.pop_lex_mode env; - match name with - | Some name -> - Loc.btwn start_loc end_loc, `Element JSX.Opening.({ - name; - selfClosing; - attributes; - }) - | None -> - Loc.btwn start_loc end_loc, `Fragment - - let closing_element_without_lt env start_loc = - Expect.token env T_DIV; - let name = match Peek.token env with - | T_GREATER_THAN -> None - | _ -> Some (name env) in - let end_loc = Peek.loc env in - Expect.token env T_GREATER_THAN; - (* We double pop to avoid going back to childmode and re-lexing the - * lookahead *) - Eat.double_pop_lex_mode env; - match name with - | Some name -> - Loc.btwn start_loc end_loc, `Element JSX.Closing.({ - name; - }) - | None -> - Loc.btwn start_loc end_loc, `Fragment - - type element_or_closing = - | Closing of (Loc.t, Loc.t) JSX.Closing.t - | ClosingFragment of Loc.t - | ChildElement of (Loc.t * (Loc.t, Loc.t) JSX.element) - | ChildFragment of (Loc.t * (Loc.t, Loc.t) JSX.fragment) + let name = identifier env in + JSX.Attribute.Identifier name + in + let value = + match Peek.token env with + | T_ASSIGN -> + Expect.token env T_ASSIGN; + let leading = Peek.comments env in + let tkn = Peek.token env in + let trailing = Peek.comments env in + begin + match tkn with + | T_LCURLY -> + let (loc, expression_container) = expression_container env in + JSX.ExpressionContainer.( + match expression_container.expression with + | EmptyExpression -> + error_at env (loc, Parse_error.JSXAttributeValueEmptyExpression) + | _ -> ()); + Some (JSX.Attribute.ExpressionContainer (loc, expression_container)) + | T_JSX_TEXT (loc, value, raw) as token -> + Expect.token env token; + let value = Ast.Literal.String value in + Some + (JSX.Attribute.Literal + ( loc, + { + Ast.Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + } )) + | _ -> + error env Parse_error.InvalidJSXAttributeValue; + let loc = Peek.loc env in + let raw = "" in + let value = Ast.Literal.String "" in + Some + (JSX.Attribute.Literal + ( loc, + { + Ast.Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + } )) + end + | _ -> None + in + { JSX.Attribute.name; value }) + env - let rec child env = + let opening_element = + let rec attributes env acc = match Peek.token env with - | T_LCURLY -> expression_container_or_spread_child env - | T_JSX_TEXT (loc, value, raw) as token -> - Expect.token env token; - loc, JSX.Text { JSX.Text.value; raw; } + | T_EOF + | T_DIV + | T_GREATER_THAN -> + List.rev acc + | T_LCURLY -> + let attribute = JSX.Opening.SpreadAttribute (spread_attribute env) in + attributes env (attribute :: acc) | _ -> - (match element_or_fragment env with - | (loc, `Element element) -> loc, JSX.Element element - | (loc, `Fragment fragment) -> loc, JSX.Fragment fragment) + let attribute = JSX.Opening.Attribute (attribute env) in + attributes env (attribute :: acc) + in + fun env -> + with_loc + (fun env -> + Expect.token env T_LESS_THAN; + let element = + match Peek.token env with + | T_GREATER_THAN -> `Fragment + | _ -> + let name = name env in + let attributes = attributes env [] in + let selfClosing = Expect.maybe env T_DIV in + `Element { JSX.Opening.name; selfClosing; attributes } + in + Expect.token env T_GREATER_THAN; + element) + env - and element_without_lt = - let element_or_closing env = - Eat.push_lex_mode env Lex_mode.JSX_TAG; - let start_loc = Peek.loc env in + let closing_element env = + with_loc + (fun env -> Expect.token env T_LESS_THAN; - match Peek.token env with - | T_EOF - | T_DIV -> (match closing_element_without_lt env start_loc with - | (loc, `Element ec) -> Closing (loc, ec) - | (loc, `Fragment) -> ClosingFragment loc) - | _ -> (match element_without_lt env start_loc with - | (loc, `Element e) -> ChildElement (loc, e) - | (loc, `Fragment f) -> ChildFragment (loc, f)) + Expect.token env T_DIV; + let element = + match Peek.token env with + | T_GREATER_THAN -> `Fragment + | _ -> `Element { JSX.Closing.name = name env } + in + Expect.token env T_GREATER_THAN; + element) + env + + let rec child env = + match Peek.token env with + | T_LCURLY -> expression_container_or_spread_child env + | T_JSX_TEXT (loc, value, raw) as token -> + Expect.token env token; + (loc, JSX.Text { JSX.Text.value; raw }) + | _ -> + (match element_or_fragment env with + | (loc, `Element element) -> (loc, JSX.Element element) + | (loc, `Fragment fragment) -> (loc, JSX.Fragment fragment)) - in let rec children_and_closing env acc = + and element = + let children_and_closing = + let rec children_and_closing env acc = + let previous_loc = last_loc env in match Peek.token env with - | T_LESS_THAN -> ( - match element_or_closing env with - | Closing closingElement -> - List.rev acc, `Element closingElement - | ClosingFragment closingFragment -> - List.rev acc, `Fragment closingFragment - | ChildElement element -> - let element = fst element, JSX.Element (snd element) in - children_and_closing env (element::acc) - | ChildFragment fragment -> - let fragment = fst fragment, JSX.Fragment (snd fragment) in - children_and_closing env (fragment::acc)) + | T_LESS_THAN -> + Eat.push_lex_mode env Lex_mode.JSX_TAG; + begin + match (Peek.token env, Peek.ith_token ~i:1 env) with + | (T_LESS_THAN, T_EOF) + | (T_LESS_THAN, T_DIV) -> + let closing = + match closing_element env with + | (loc, `Element ec) -> `Element (loc, ec) + | (loc, `Fragment) -> `Fragment loc + in + (* We double pop to avoid going back to childmode and re-lexing the + * lookahead *) + Eat.double_pop_lex_mode env; + (List.rev acc, previous_loc, closing) + | _ -> + let child = + match element env with + | (loc, `Element e) -> (loc, JSX.Element e) + | (loc, `Fragment f) -> (loc, JSX.Fragment f) + in + children_and_closing env (child :: acc) + end | T_EOF -> - error_unexpected env; - List.rev acc, `None - | _ -> - children_and_closing env ((child env)::acc) - - in let rec normalize name = JSX.(match name with + error_unexpected env; + (List.rev acc, previous_loc, `None) + | _ -> children_and_closing env (child env :: acc) + in + fun env -> + let start_loc = Peek.loc env in + let (children, last_child_loc, closing) = children_and_closing env [] in + let last_child_loc = + match last_child_loc with + | Some x -> x + | None -> start_loc + in + (* It's a little bit tricky to untangle the parsing of the child elements from the parsing + * of the closing element, so we can't easily use `with_loc` here. Instead, we'll use the + * same logic that `with_loc` uses, but manipulate the locations explicitly. *) + let children_loc = Loc.btwn start_loc last_child_loc in + ((children_loc, children), closing) + in + let rec normalize name = + JSX.( + match name with | Identifier (_, { Identifier.name }) -> name - | NamespacedName (_, { NamespacedName.namespace; name; }) -> - (snd namespace).Identifier.name ^ ":" ^ (snd name).Identifier.name - | MemberExpression (_, { MemberExpression._object; property; }) -> - let _object = match _object with - | MemberExpression.Identifier (_, {Identifier.name=id; _;}) -> id - | MemberExpression.MemberExpression e -> - normalize (JSX.MemberExpression e) in - _object ^ "." ^ (snd property).Identifier.name - ) - - in fun env start_loc -> - let openingElement = opening_element_without_lt env start_loc in - let children, closingElement = - let selfClosing = match snd openingElement with - | `Element e -> e.JSX.Opening.selfClosing - | `Fragment -> false in - if selfClosing - then [], `None - else begin - Eat.push_lex_mode env Lex_mode.JSX_CHILD; - let ret = children_and_closing env [] in - ret - end in - let end_loc = match closingElement with - | `Element (loc, { JSX.Closing.name }) -> - (match snd openingElement with - | `Element e -> - let opening_name = normalize e.JSX.Opening.name in - if normalize name <> opening_name - then error env (Error.ExpectedJSXClosingTag opening_name) - | `Fragment -> error env (Error.ExpectedJSXClosingTag "JSX fragment")); - loc - | `Fragment loc -> - (match snd openingElement with - | `Element e -> error env (Error.ExpectedJSXClosingTag (normalize e.JSX.Opening.name)) - | _ -> ()); - loc - | _ -> fst openingElement in - match snd openingElement with - | `Element e -> - Loc.btwn (fst openingElement) end_loc, `Element JSX.({ - openingElement = (fst openingElement, e); - closingElement = (match closingElement with - | `Element e -> Some e - | _ -> None); - children; - }) - | `Fragment -> - Loc.btwn (fst openingElement) end_loc, `Fragment JSX.({ - frag_openingElement = fst openingElement; - frag_closingElement = (match closingElement with - | `Fragment loc -> Some loc - | _ -> None); - frag_children = children; - }) + | NamespacedName (_, { NamespacedName.namespace; name }) -> + (snd namespace).Identifier.name ^ ":" ^ (snd name).Identifier.name + | MemberExpression (_, { MemberExpression._object; property }) -> + let _object = + match _object with + | MemberExpression.Identifier (_, { Identifier.name = id; _ }) -> id + | MemberExpression.MemberExpression e -> normalize (JSX.MemberExpression e) + in + _object ^ "." ^ (snd property).Identifier.name) + in + let is_self_closing = function + | (_, `Element e) -> e.JSX.Opening.selfClosing + | (_, `Fragment) -> false + in + fun env -> + let openingElement = opening_element env in + Eat.pop_lex_mode env; + let (children, closingElement) = + if is_self_closing openingElement then + (with_loc (fun _ -> []) env, `None) + else ( + Eat.push_lex_mode env Lex_mode.JSX_CHILD; + children_and_closing env + ) + in + let end_loc = + match closingElement with + | `Element (loc, { JSX.Closing.name }) -> + (match snd openingElement with + | `Element e -> + let opening_name = normalize e.JSX.Opening.name in + if normalize name <> opening_name then + error env (Parse_error.ExpectedJSXClosingTag opening_name) + | `Fragment -> error env (Parse_error.ExpectedJSXClosingTag "JSX fragment")); + loc + | `Fragment loc -> + (match snd openingElement with + | `Element e -> + error env (Parse_error.ExpectedJSXClosingTag (normalize e.JSX.Opening.name)) + | _ -> ()); + loc + | _ -> fst openingElement + in + let result = + match openingElement with + | (start_loc, `Element e) -> + `Element + JSX. + { + openingElement = (start_loc, e); + closingElement = + (match closingElement with + | `Element e -> Some e + | _ -> None); + children; + } + | (start_loc, `Fragment) -> + `Fragment + JSX. + { + frag_openingElement = start_loc; + frag_closingElement = + (match closingElement with + | `Fragment loc -> loc + (* the following are parse erros *) + | `Element (loc, _) -> loc + | _ -> end_loc); + frag_children = children; + } + in + (Loc.btwn (fst openingElement) end_loc, result) - and element_or_fragment env = - let start_loc = Peek.loc env in - Eat.push_lex_mode env Lex_mode.JSX_TAG; - Expect.token env T_LESS_THAN; - element_without_lt env start_loc + and element_or_fragment env = + Eat.push_lex_mode env Lex_mode.JSX_TAG; + element env end diff --git a/src/parser/lex_env.ml b/src/parser/lex_env.ml index cd58612f660..5bb8ea4a407 100644 --- a/src/parser/lex_env.ml +++ b/src/parser/lex_env.ml @@ -1,18 +1,17 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - type t = { - lex_source : File_key.t option; - lex_lb : Sedlexing.lexbuf; - lex_bol : bol; - lex_in_comment_syntax : bool; + lex_source: File_key.t option; + lex_lb: Sedlexing.lexbuf; + lex_bol: bol; + lex_in_comment_syntax: bool; lex_enable_comment_syntax: bool; - lex_state : lex_state; + lex_state: lex_state; } (* bol = Beginning Of Line *) @@ -21,22 +20,19 @@ and bol = { offset: int; } -and lex_state = { - lex_errors_acc: (Loc.t * Parse_error.t) list; -} +and lex_state = { lex_errors_acc: (Loc.t * Parse_error.t) list } -let empty_lex_state = { - lex_errors_acc = []; -} +let empty_lex_state = { lex_errors_acc = [] } -let new_lex_env lex_source lex_lb ~enable_types_in_comments = { - lex_source; - lex_lb; - lex_bol = { line = 1; offset = 0}; - lex_in_comment_syntax = false; - lex_enable_comment_syntax = enable_types_in_comments; - lex_state = empty_lex_state; -} +let new_lex_env lex_source lex_lb ~enable_types_in_comments = + { + lex_source; + lex_lb; + lex_bol = { line = 1; offset = 0 }; + lex_in_comment_syntax = false; + lex_enable_comment_syntax = enable_types_in_comments; + lex_state = empty_lex_state; + } (* copy all the mutable things so that we have a distinct lexing environment that does not interfere with ordinary lexer operations *) @@ -46,40 +42,45 @@ let clone env = let get_and_clear_state env = let state = env.lex_state in - let env = if state != empty_lex_state - then { env with lex_state = empty_lex_state } - else env + let env = + if state != empty_lex_state then + { env with lex_state = empty_lex_state } + else + env in - env, state + (env, state) let lexbuf env = env.lex_lb + let source env = env.lex_source + let state env = env.lex_state + let line env = env.lex_bol.line + let bol_offset env = env.lex_bol.offset + let is_in_comment_syntax env = env.lex_in_comment_syntax + let is_comment_syntax_enabled env = env.lex_enable_comment_syntax + let in_comment_syntax is_in env = - if is_in <> env.lex_in_comment_syntax - then { env with lex_in_comment_syntax = is_in } - else env + if is_in <> env.lex_in_comment_syntax then + { env with lex_in_comment_syntax = is_in } + else + env (* TODO *) let debug_string_of_lexbuf _lb = "" -let debug_string_of_lex_env (env: t) = - let source = match (source env) with +let debug_string_of_lex_env (env : t) = + let source = + match source env with | None -> "None" | Some x -> Printf.sprintf "Some %S" (File_key.to_string x) in Printf.sprintf - "{\n \ - lex_source = %s\n \ - lex_lb = %s\n \ - lex_in_comment_syntax = %b\n \ - lex_enable_comment_syntax = %b\n \ - lex_state = {errors = (count = %d)}\n\ - }" + "{\n lex_source = %s\n lex_lb = %s\n lex_in_comment_syntax = %b\n lex_enable_comment_syntax = %b\n lex_state = {errors = (count = %d)}\n}" source (debug_string_of_lexbuf env.lex_lb) (is_in_comment_syntax env) diff --git a/src/parser/lex_result.ml b/src/parser/lex_result.ml index fae2570e12b..f062b6da288 100644 --- a/src/parser/lex_result.ml +++ b/src/parser/lex_result.ml @@ -1,11 +1,10 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - type t = { lex_token: Token.t; lex_loc: Loc.t; @@ -14,19 +13,17 @@ type t = { } let token result = result.lex_token + let loc result = result.lex_loc + let comments result = result.lex_comments + let errors result = result.lex_errors let debug_string_of_lex_result lex_result = Printf.sprintf - "{\n \ - lex_token = %s\n \ - lex_value = %S\n \ - lex_errors = (length = %d)\n \ - lex_comments = (length = %d)\n\ - }" - (Token.token_to_string lex_result.lex_token) - (Token.value_of_token lex_result.lex_token) - (List.length lex_result.lex_errors) - (List.length lex_result.lex_comments) + "{\n lex_token = %s\n lex_value = %S\n lex_errors = (length = %d)\n lex_comments = (length = %d)\n}" + (Token.token_to_string lex_result.lex_token) + (Token.value_of_token lex_result.lex_token) + (List.length lex_result.lex_errors) + (List.length lex_result.lex_comments) diff --git a/src/parser/lexer.ml b/src/parser/lexer.ml deleted file mode 100644 index d134b436f47..00000000000 --- a/src/parser/lexer.ml +++ /dev/null @@ -1,1770 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -[@@@warning "-39"] (* sedlex inserts some unnecessary `rec`s *) - -open Token -open Lex_env - -let lexeme = Sedlexing.Utf8.lexeme -let sub_lexeme = Sedlexing.Utf8.sub_lexeme - -let letter = [%sedlex.regexp? 'a'..'z' | 'A'..'Z' | '$'] -let id_letter = [%sedlex.regexp? letter | '_'] -let digit = [%sedlex.regexp? '0'..'9'] -let digit_non_zero = [%sedlex.regexp? '1'..'9'] -let decintlit = [%sedlex.regexp? '0' | ('1'..'9', Star digit)] (* DecimalIntegerLiteral *) -let alphanumeric = [%sedlex.regexp? digit | letter] -let word = [%sedlex.regexp? letter, Star alphanumeric] - -let hex_digit = [%sedlex.regexp? digit | 'a'..'f' | 'A'..'F'] -let non_hex_letter = [%sedlex.regexp? 'g'..'z' | 'G'..'Z' | '$'] - -let bin_digit = [%sedlex.regexp? '0' | '1'] -let oct_digit = [%sedlex.regexp? '0'..'7'] - -(* This regex could be simplified to (digit Star (digit OR '_' digit)) - * That makes the underscore and failure cases faster, and the base case take x2-3 the steps - * As the codebase contains more base cases than underscored or errors, prefer this version *) -let underscored_bin = [%sedlex.regexp? - Plus bin_digit | (bin_digit, Star (bin_digit | ('_', bin_digit))) -] -let underscored_oct = [%sedlex.regexp? - Plus oct_digit | (oct_digit, Star (oct_digit | ('_', oct_digit))) -] -let underscored_hex = [%sedlex.regexp? - Plus hex_digit | (hex_digit, Star (hex_digit | ('_', hex_digit))) -] -let underscored_digit = [%sedlex.regexp? - Plus digit | (digit_non_zero, Star (digit | ('_', digit))) -] -let underscored_decimal = [%sedlex.regexp? - Plus digit | (digit, Star (digit | ('_', digit))) -] - -(* Different ways you can write a number *) -let binnumber = [%sedlex.regexp? '0', ('B' | 'b'), underscored_bin] -let octnumber = [%sedlex.regexp? '0', ('O' | 'o'), underscored_oct] -let legacyoctnumber = [%sedlex.regexp? '0', underscored_oct] -let hexnumber = [%sedlex.regexp? '0', ('X' | 'x'), underscored_hex] -let scinumber = [%sedlex.regexp? - ((decintlit, Opt ('.', Opt underscored_decimal)) | ('.', underscored_decimal)), - ('e' | 'E'), Opt ('-' | '+'), underscored_digit -] -let wholenumber = [%sedlex.regexp? underscored_digit, Opt '.'] -let floatnumber = [%sedlex.regexp? Opt underscored_digit, '.', underscored_decimal] - -(* 2-8 alphanumeric characters. I could match them directly, but this leads to - * ~5k more lines of generated lexer -let htmlentity = "quot" | "amp" | "apos" | "lt" | "gt" | "nbsp" | "iexcl" - | "cent" | "pound" | "curren" | "yen" | "brvbar" | "sect" | "uml" | "copy" - | "ordf" | "laquo" | "not" | "shy" | "reg" | "macr" | "deg" | "plusmn" - | "sup2" | "sup3" | "acute" | "micro" | "para" | "middot" | "cedil" | "sup1" - | "ordm" | "raquo" | "frac14" | "frac12" | "frac34" | "iquest" | "Agrave" - | "Aacute" | "Acirc" | "Atilde" | "Auml" | "Aring" | "AElig" | "Ccedil" - | "Egrave" | "Eacute" | "Ecirc" | "Euml" | "Igrave" | "Iacute" | "Icirc" - | "Iuml" | "ETH" | "Ntilde" | "Ograve" | "Oacute" | "Ocirc" | "Otilde" - | "Ouml" | "times" | "Oslash" | "Ugrave" | "Uacute" | "Ucirc" | "Uuml" - | "Yacute" | "THORN" | "szlig" | "agrave" | "aacute" | "acirc" | "atilde" - | "auml" | "aring" | "aelig" | "ccedil" | "egrave" | "eacute" | "ecirc" - | "euml" | "igrave" | "iacute" | "icirc" | "iuml" | "eth" | "ntilde" - | "ograve" | "oacute" | "ocirc" | "otilde" | "ouml" | "divide" | "oslash" - | "ugrave" | "uacute" | "ucirc" | "uuml" | "yacute" | "thorn" | "yuml" - | "OElig" | "oelig" | "Scaron" | "scaron" | "Yuml" | "fnof" | "circ" | "tilde" - | "Alpha" | "Beta" | "Gamma" | "Delta" | "Epsilon" | "Zeta" | "Eta" | "Theta" - | "Iota" | "Kappa" | "Lambda" | "Mu" | "Nu" | "Xi" | "Omicron" | "Pi" | "Rho" - | "Sigma" | "Tau" | "Upsilon" | "Phi" | "Chi" | "Psi" | "Omega" | "alpha" - | "beta" | "gamma" | "delta" | "epsilon" | "zeta" | "eta" | "theta" | "iota" - | "kappa" | "lambda" | "mu" | "nu" | "xi" | "omicron" | "pi" | "rho" - | "sigmaf" | "sigma" | "tau" | "upsilon" | "phi" | "chi" | "psi" | "omega" - | "thetasym" | "upsih" | "piv" | "ensp" | "emsp" | "thinsp" | "zwnj" | "zwj" - | "lrm" | "rlm" | "ndash" | "mdash" | "lsquo" | "rsquo" | "sbquo" | "ldquo" - | "rdquo" | "bdquo" | "dagger" | "Dagger" | "bull" | "hellip" | "permil" - | "prime" | "Prime" | "lsaquo" | "rsaquo" | "oline" | "frasl" | "euro" - | "image" | "weierp" | "real" | "trade" | "alefsym" | "larr" | "uarr" | "rarr" - | "darr" | "harr" | "crarr" | "lArr" | "uArr" | "rArr" | "dArr" | "hArr" - | "forall" | "part" | "exist" | "empty" | "nabla" | "isin" | "notin" | "ni" - | "prod" | "sum" | "minus" | "lowast" | "radic" | "prop" | "infin" | "ang" - | "and" | "or" | "cap" | "cup" | "'int'" | "there4" | "sim" | "cong" | "asymp" - | "ne" | "equiv" | "le" | "ge" | "sub" | "sup" | "nsub" | "sube" | "supe" - | "oplus" | "otimes" | "perp" | "sdot" | "lceil" | "rceil" | "lfloor" - | "rfloor" | "lang" | "rang" | "loz" | "spades" | "clubs" | "hearts" | "diams" -*) -let htmlentity = [%sedlex.regexp? - alphanumeric, alphanumeric, Opt alphanumeric, Opt alphanumeric, - Opt alphanumeric, Opt alphanumeric, Opt alphanumeric, Opt alphanumeric -] - -(* http://www.ecma-international.org/ecma-262/6.0/#table-32 *) -let whitespace = [%sedlex.regexp? - 0x0009 | 0x000B | 0x000C | 0x0020 | 0x00A0 | 0xfeff | - 0x1680 | 0x180e | 0x2000 .. 0x200a | 0x202f | 0x205f | 0x3000 -] - -(* minus sign in front of negative numbers - (only for types! regular numbers use T_MINUS!) *) -let neg = [%sedlex.regexp? '-', Star whitespace] - -let line_terminator_sequence = [%sedlex.regexp? '\n' | '\r' | "\r\n" | 0x2028 | 0x2029] -let line_terminator_sequence_start = [%sedlex.regexp? '\n' | '\r' | 0x2028 | 0x2029] - -let hex_quad = [%sedlex.regexp? hex_digit, hex_digit, hex_digit, hex_digit] -let unicode_escape = [%sedlex.regexp? "\\u", hex_quad] -let codepoint_escape = [%sedlex.regexp? "\\u{", Plus hex_digit, '}'] -let js_id_start = [%sedlex.regexp? '$' | '_' | id_start | unicode_escape | codepoint_escape] -let js_id_continue = [%sedlex.regexp? - '$' | '_' | 0x200C | 0x200D | id_continue | unicode_escape | codepoint_escape -] - -let pos_at_offset env offset = - { Loc. - line = Lex_env.line env; - column = offset - Lex_env.bol_offset env; - offset = offset; - } - -let loc_of_offsets env start_offset end_offset = - { Loc. - source = Lex_env.source env; - start = pos_at_offset env start_offset; - _end = pos_at_offset env end_offset; - } - -let start_pos_of_lexbuf env (lexbuf: Sedlexing.lexbuf) = - let start_offset = Sedlexing.lexeme_start lexbuf in - pos_at_offset env start_offset - -let end_pos_of_lexbuf env (lexbuf: Sedlexing.lexbuf) = - let end_offset = Sedlexing.lexeme_end lexbuf in - pos_at_offset env end_offset - -let loc_of_lexbuf env (lexbuf: Sedlexing.lexbuf) = - let start_offset = Sedlexing.lexeme_start lexbuf in - let end_offset = Sedlexing.lexeme_end lexbuf in - loc_of_offsets env start_offset end_offset - -let get_result_and_clear_state (env, lex_token, lex_comments) = - let env, { - lex_errors_acc; - } = get_and_clear_state env in - let lex_loc = match lex_token with - | T_STRING (loc, _, _, _) -> loc - | T_JSX_TEXT (loc, _, _) -> loc - | T_TEMPLATE_PART (loc, _, _) -> loc - | T_REGEXP (loc, _, _) -> loc - | _ -> loc_of_lexbuf env env.lex_lb - in - env, { Lex_result. - lex_token; - lex_loc; - lex_errors = List.rev lex_errors_acc; - lex_comments; - } - -let lex_error (env: Lex_env.t) loc err: Lex_env.t = - let lex_errors_acc = (loc, err)::env.lex_state.lex_errors_acc in - { env with lex_state = { lex_errors_acc; } } - -let unexpected_error (env: Lex_env.t) (loc: Loc.t) value = - lex_error env loc (Parse_error.UnexpectedToken value) - -let unexpected_error_w_suggest (env: Lex_env.t) (loc: Loc.t) value suggest = - lex_error env loc (Parse_error.UnexpectedTokenWithSuggestion (value, suggest)) - -let illegal (env: Lex_env.t) (loc: Loc.t) = - lex_error env loc (Parse_error.UnexpectedToken "ILLEGAL") - -let new_line env lexbuf = - let offset = Sedlexing.lexeme_end lexbuf in - let lex_bol = { line = Lex_env.line env + 1; offset; } in - { env with Lex_env.lex_bol } - -module FloatOfString : sig - val float_of_string: string -> float -end = struct - type t = { - negative: bool; - mantissa: int; - exponent: int; - decimal_exponent: int option; - todo: char list; - } - - exception No_good - - let eat f = - match f.todo with - | _::todo -> { f with todo; } - | _ -> raise No_good - - let start str = - let todo = ref [] in - String.iter (fun c -> todo := c::(!todo)) str; - { - negative = false; - mantissa = 0; - exponent = 0; - decimal_exponent = None; - todo = List.rev (!todo); - } - - let parse_sign f = - match f.todo with - | '+'::_ -> eat f - | '-'::_ -> { (eat f) with negative = true; } - | _ -> f - - let parse_hex_symbol f = - match f.todo with - | '0'::('x' | 'X')::_ -> f |> eat |> eat - | _ -> raise No_good - - let parse_exponent f = - let todo_str = f.todo - |> List.map Char.escaped - |> String.concat "" in - let exponent = - try int_of_string todo_str - with Failure _ -> raise No_good in - { f with exponent; todo = [] } - - let rec parse_body f = - match f.todo with - | [] -> f - (* _ is just ignored *) - | '_'::_ -> parse_body (eat f) - | '.'::_ -> - if f.decimal_exponent = None - then parse_body { (eat f) with decimal_exponent = Some 0 } - else raise No_good - | ('p' | 'P')::_ -> - parse_exponent (eat f) - | c::_ -> - let ref_char_code = - if c >= '0' && c <= '9' - then Char.code '0' - else if c >= 'A' && c <= 'F' - then Char.code 'A' - 10 - else if c >= 'a' && c <= 'f' - then Char.code 'a' - 10 - else raise No_good in - let value = (Char.code c) - ref_char_code in - let decimal_exponent = match f.decimal_exponent with - | None -> None - | Some e -> Some (e - 4) in - let mantissa = (f.mantissa lsl 4) + value in - parse_body { (eat f) with decimal_exponent; mantissa; } - - let float_of_t f = - assert (f.todo = []); - let ret = float_of_int f.mantissa in - let exponent = match f.decimal_exponent with - | None -> f.exponent - | Some decimal_exponent -> f.exponent + decimal_exponent in - let ret = - if exponent = 0 - then ret - else ret ** (float_of_int exponent) in - if f.negative - then -.ret - else ret - - let float_of_string str = - try Pervasives.float_of_string str - with e when Sys.win32 -> - try - start str - |> parse_sign - |> parse_hex_symbol - |> parse_body - |> float_of_t - with No_good -> raise e -end - -let mk_comment - (env: Lex_env.t) - (start: Loc.position) (_end: Loc.position) - (buf: Buffer.t) - (multiline: bool) -: Loc.t Flow_ast.Comment.t = - let open Flow_ast.Comment in - let loc = { Loc.source = Lex_env.source env; start; _end } in - let s = Buffer.contents buf in - let c = if multiline then Block s else Line s in - (loc, c) - -let mk_num_singleton number_type raw = - let neg, num = if raw.[0] = '-' - then true, String.sub raw 1 (String.length raw - 1) - else false, raw - in - (* convert singleton number type into a float *) - let value = match number_type with - | LEGACY_OCTAL -> - begin try Int64.to_float (Int64.of_string ("0o"^num)) - with Failure _ -> failwith ("Invalid legacy octal "^num) - end - | BINARY - | OCTAL -> - begin try Int64.to_float (Int64.of_string num) - with Failure _ -> failwith ("Invalid binary/octal "^num) - end - | NORMAL -> - begin try FloatOfString.float_of_string num - with Failure _ -> failwith ("Invalid number "^num) - end - in - let value = if neg then ~-.value else value in - T_NUMBER_SINGLETON_TYPE { kind = number_type; value; raw } - -let decode_identifier = - let assert_valid_unicode_in_identifier env loc code = - let lexbuf = Sedlexing.from_int_array [|code|] in - match%sedlex lexbuf with - | js_id_start -> env - | js_id_continue -> env - | any - | eof -> lex_error env loc Parse_error.IllegalUnicodeEscape - | _ -> failwith "unreachable" - in - let loc_and_sub_lexeme env offset lexbuf trim_start trim_end = - let start_offset = offset + Sedlexing.lexeme_start lexbuf in - let end_offset = offset + Sedlexing.lexeme_end lexbuf in - let loc = loc_of_offsets env start_offset end_offset in - loc, sub_lexeme lexbuf trim_start (Sedlexing.lexeme_length lexbuf - trim_start - trim_end) - in - let rec id_char env offset buf lexbuf = - match%sedlex lexbuf with - | unicode_escape -> - let loc, hex = loc_and_sub_lexeme env offset lexbuf 2 0 in - let code = int_of_string ("0x"^hex) in - let env = assert_valid_unicode_in_identifier env loc code in - Wtf8.add_wtf_8 buf code; - id_char env offset buf lexbuf - - | codepoint_escape -> - let loc, hex = loc_and_sub_lexeme env offset lexbuf 3 1 in - let code = int_of_string ("0x"^hex) in - let env = assert_valid_unicode_in_identifier env loc code in - Wtf8.add_wtf_8 buf code; - id_char env offset buf lexbuf - - | eof -> - env, Buffer.contents buf - - (* match multi-char substrings that don't contain the start chars of the above patterns *) - | Plus (Compl (eof | "\\")) - | any -> - let x = lexeme lexbuf in - Buffer.add_string buf x; - id_char env offset buf lexbuf - - | _ -> failwith "unreachable" - in - fun env raw -> - let offset = Sedlexing.lexeme_start env.lex_lb in - let lexbuf = Sedlexing.Utf8.from_string raw in - let buf = Buffer.create (String.length raw) in - id_char env offset buf lexbuf - -let recover env lexbuf ~f = - let env = illegal env (loc_of_lexbuf env lexbuf) in - Sedlexing.rollback lexbuf; - f env lexbuf - -type jsx_text_mode = - | JSX_SINGLE_QUOTED_TEXT - | JSX_DOUBLE_QUOTED_TEXT - | JSX_CHILD_TEXT - -type result = - | Token of Lex_env.t * Token.t - | Comment of Lex_env.t * Loc.t Flow_ast.Comment.t - | Continue of Lex_env.t - -let rec comment env buf lexbuf = - match%sedlex lexbuf with - | line_terminator_sequence -> - let env = new_line env lexbuf in - Buffer.add_string buf (lexeme lexbuf); - comment env buf lexbuf - - | "*/" -> - let env = - if is_in_comment_syntax env then - let loc = loc_of_lexbuf env lexbuf in - unexpected_error_w_suggest env loc "*/" "*-/" - else env - in - env, end_pos_of_lexbuf env lexbuf - - | "*-/" -> - if is_in_comment_syntax env - then env, end_pos_of_lexbuf env lexbuf - else ( - Buffer.add_string buf "*-/"; - comment env buf lexbuf - ) - - (* match multi-char substrings that don't contain the start chars of the above patterns *) - | Plus (Compl (line_terminator_sequence_start | '*')) - | any -> - Buffer.add_string buf (lexeme lexbuf); - comment env buf lexbuf - - | _ -> - let env = illegal env (loc_of_lexbuf env lexbuf) in - env, end_pos_of_lexbuf env lexbuf - - -let rec line_comment env buf lexbuf = - match%sedlex lexbuf with - | eof -> - env, end_pos_of_lexbuf env lexbuf - - | line_terminator_sequence -> - let { Loc.line; column; offset } = end_pos_of_lexbuf env lexbuf in - let env = new_line env lexbuf in - let len = Sedlexing.lexeme_length lexbuf in - let end_pos = { Loc. - line; - column = column - len; - offset = offset - len; - } in - env, end_pos - - (* match multi-char substrings that don't contain the start chars of the above patterns *) - | Plus (Compl (eof | line_terminator_sequence_start)) - | any -> - let str = lexeme lexbuf in - Buffer.add_string buf str; - line_comment env buf lexbuf - - | _ -> failwith "unreachable" - - -let string_escape env lexbuf = - match%sedlex lexbuf with - | eof - | '\\' -> - let str = lexeme lexbuf in - let codes = Sedlexing.lexeme lexbuf in - env, str, codes, false - - | 'x', hex_digit, hex_digit -> - let str = lexeme lexbuf in - let code = int_of_string ("0"^str) in (* 0xAB *) - env, str, [|code|], false - - | '0'..'7', '0'..'7', '0'..'7' -> - let str = lexeme lexbuf in - let code = int_of_string ("0o"^str) in (* 0o012 *) - (* If the 3 character octal code is larger than 256 - * then it is parsed as a 2 character octal code *) - if code < 256 then - env, str, [|code|], true - else - let remainder = code land 7 in - let code = code lsr 3 in - env, str, [|code; Char.code '0' + remainder|], true - - | '0'..'7', '0'..'7' -> - let str = lexeme lexbuf in - let code = int_of_string ("0o"^str) in (* 0o01 *) - env, str, [|code|], true - - | '0' -> env, "0", [|0x0|], false - | 'b' -> env, "b", [|0x8|], false - | 'f' -> env, "f", [|0xC|], false - | 'n' -> env, "n", [|0xA|], false - | 'r' -> env, "r", [|0xD|], false - | 't' -> env, "t", [|0x9|], false - | 'v' -> env, "v", [|0xB|], false - | '0'..'7' -> - let str = lexeme lexbuf in - let code = int_of_string ("0o"^str) in (* 0o1 *) - env, str, [|code|], true - - | 'u', hex_quad -> - let str = lexeme lexbuf in - let hex = String.sub str 1 (String.length str - 1) in - let code = int_of_string ("0x"^hex) in - env, str, [|code|], false - - | "u{", Plus hex_digit, '}' -> - let str = lexeme lexbuf in - let hex = String.sub str 2 (String.length str - 3) in - let code = int_of_string ("0x"^hex) in - (* 11.8.4.1 *) - let env = if code > 1114111 - then illegal env (loc_of_lexbuf env lexbuf) - else env - in - env, str, [|code|], false - - | 'u' | 'x' | '0'..'7' -> - let str = lexeme lexbuf in - let codes = Sedlexing.lexeme lexbuf in - let env = illegal env (loc_of_lexbuf env lexbuf) in - env, str, codes, false - - | line_terminator_sequence -> - let str = lexeme lexbuf in - let env = new_line env lexbuf in - env, str, [||], false - - | any -> - let str = lexeme lexbuf in - let codes = Sedlexing.lexeme lexbuf in - env, str, codes, false - - | _ -> failwith "unreachable" - - -(* Really simple version of string lexing. Just try to find beginning and end of - * string. We can inspect the string later to find invalid escapes, etc *) -let rec string_quote env q buf raw octal lexbuf = - match%sedlex lexbuf with - | "'" | '"' -> - let q' = lexeme lexbuf in - Buffer.add_string raw q'; - if q = q' - then env, end_pos_of_lexbuf env lexbuf, octal - else begin - Buffer.add_string buf q'; - string_quote env q buf raw octal lexbuf - end - - | '\\' -> - Buffer.add_string raw "\\"; - let env, str, codes, octal' = string_escape env lexbuf in - let octal = octal' || octal in - Buffer.add_string raw str; - Array.iter (Wtf8.add_wtf_8 buf) codes; - string_quote env q buf raw octal lexbuf - - | '\n' | eof -> - let x = lexeme lexbuf in - Buffer.add_string raw x; - let env = illegal env (loc_of_lexbuf env lexbuf) in - Buffer.add_string buf x; - env, end_pos_of_lexbuf env lexbuf, octal - - (* match multi-char substrings that don't contain the start chars of the above patterns *) - | Plus (Compl ("'" | '"' | '\\' | '\n' | eof)) - | any -> - let x = lexeme lexbuf in - Buffer.add_string raw x; - Buffer.add_string buf x; - string_quote env q buf raw octal lexbuf - - | _ -> failwith "unreachable" - - -let rec template_part env cooked raw literal lexbuf = - match%sedlex lexbuf with - | eof -> - let env = illegal env (loc_of_lexbuf env lexbuf) in - env, true - - | '`' -> - Buffer.add_char literal '`'; - env, true - - | "${" -> - Buffer.add_string literal "${"; - env, false - - | '\\' -> - Buffer.add_char raw '\\'; - Buffer.add_char literal '\\'; - let env, str, codes, _ = string_escape env lexbuf in - Buffer.add_string raw str; - Buffer.add_string literal str; - Array.iter (Wtf8.add_wtf_8 cooked) codes; - template_part env cooked raw literal lexbuf - - (* ECMAScript 6th Syntax, 11.8.6.1 Static Semantics: TV's and TRV's - * Long story short, is 0xA, is 0xA, and is 0xA - * *) - | "\r\n" -> - Buffer.add_string raw "\r\n"; - Buffer.add_string literal "\r\n"; - Buffer.add_string cooked "\n"; - let env = new_line env lexbuf in - template_part env cooked raw literal lexbuf - - | "\n" | "\r" -> - let lf = lexeme lexbuf in - Buffer.add_string raw lf; - Buffer.add_string literal lf; - Buffer.add_char cooked '\n'; - let env = new_line env lexbuf in - template_part env cooked raw literal lexbuf - - (* match multi-char substrings that don't contain the start chars of the above patterns *) - | Plus (Compl (eof | '`' | '$' | '\\' | '\r' | '\n')) - | any -> - let c = lexeme lexbuf in - Buffer.add_string raw c; - Buffer.add_string literal c; - Buffer.add_string cooked c; - template_part env cooked raw literal lexbuf - - | _ -> failwith "unreachable" - - -let token (env: Lex_env.t) lexbuf : result = - match%sedlex lexbuf with - | line_terminator_sequence -> - let env = new_line env lexbuf in - Continue env - - | '\\' -> - let env = illegal env (loc_of_lexbuf env lexbuf) in - Continue env - - | Plus whitespace -> - Continue env - - | "/*" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf true) - - | "/*", Star whitespace, (":" | "::" | "flow-include") -> - let pattern = lexeme lexbuf in - if not (is_comment_syntax_enabled env) then - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - Buffer.add_string buf (String.sub pattern 2 (String.length pattern - 2)); - let env, end_pos = comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf true) - else - let env = - if is_in_comment_syntax env then - let loc = loc_of_lexbuf env lexbuf in - unexpected_error env loc pattern - else env - in - let env = in_comment_syntax true env in - let len = Sedlexing.lexeme_length lexbuf in - if Sedlexing.Utf8.sub_lexeme lexbuf (len - 1) 1 = ":" && - Sedlexing.Utf8.sub_lexeme lexbuf (len - 2) 1 <> ":" then - Token (env, T_COLON) - else - Continue env - - | "*/" -> - if is_in_comment_syntax env then - let env = in_comment_syntax false env in - Continue env - else begin - Sedlexing.rollback lexbuf; - match%sedlex lexbuf with - | "*" -> Token (env, T_MULT) - | _ -> failwith "expected *" - end - - | "//" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = line_comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf false) - - (* Support for the shebang at the beginning of a file. It is treated like a - * comment at the beginning or an error elsewhere *) - | "#!" -> - if Sedlexing.lexeme_start lexbuf = 0 then - let env, _ = line_comment env (Buffer.create 127) lexbuf in - Continue env - else - Token (env, T_ERROR "#!") - - (* Values *) - | "'" | '"' -> - let quote = lexeme lexbuf in - let start = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let raw = Buffer.create 127 in - Buffer.add_string raw quote; - let octal = false in - let env, _end, octal = string_quote env quote buf raw octal lexbuf in - let loc = { Loc.source = Lex_env.source env; start; _end } in - Token (env, T_STRING (loc, Buffer.contents buf, Buffer.contents raw, octal)) - - | '`' -> - let cooked = Buffer.create 127 in - let raw = Buffer.create 127 in - let literal = Buffer.create 127 in - Buffer.add_string literal (lexeme lexbuf); - - let start = start_pos_of_lexbuf env lexbuf in - let env, is_tail = template_part env cooked raw literal lexbuf in - let _end = end_pos_of_lexbuf env lexbuf in - let loc = { Loc.source = Lex_env.source env; start; _end } in - Token (env, T_TEMPLATE_PART ( - loc, - { - cooked = Buffer.contents cooked; - raw = Buffer.contents raw; - literal = Buffer.contents literal; - }, - is_tail - )) - - | binnumber, (letter | '2'..'9'), Star alphanumeric -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | binnumber -> - Token (env, T_NUMBER { kind = BINARY; raw = lexeme lexbuf }) - | _ -> failwith "unreachable" - ) - - | binnumber -> - Token (env, T_NUMBER { kind = BINARY; raw = lexeme lexbuf }) - - | octnumber, (letter | '8'..'9'), Star alphanumeric -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | octnumber -> Token (env, T_NUMBER { kind = OCTAL; raw = lexeme lexbuf }) - | _ -> failwith "unreachable" - ) - - | octnumber -> - Token (env, T_NUMBER { kind = OCTAL; raw = lexeme lexbuf }) - - | legacyoctnumber, (letter | '8'..'9'), Star alphanumeric -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | legacyoctnumber -> Token (env, T_NUMBER { kind = LEGACY_OCTAL; raw = lexeme lexbuf }) - | _ -> failwith "unreachable" - ) - - | legacyoctnumber -> - Token (env, T_NUMBER { kind = LEGACY_OCTAL; raw = lexeme lexbuf }) - - | hexnumber, non_hex_letter, Star alphanumeric -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | hexnumber -> Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) - | _ -> failwith "unreachable" - ) - - | hexnumber -> - Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) - - | scinumber, word -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | scinumber -> Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) - | _ -> failwith "unreachable" - ) - - | scinumber -> - Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) - - | (wholenumber | floatnumber), word -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | wholenumber | floatnumber -> - Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) - | _ -> failwith "unreachable" - ) - - | wholenumber | floatnumber -> - Token (env, T_NUMBER { kind = NORMAL; raw = lexeme lexbuf }) - - (* Keywords *) - | "async" -> Token (env, T_ASYNC) - | "await" -> Token (env, T_AWAIT) - | "break" -> Token (env, T_BREAK) - | "case" -> Token (env, T_CASE) - | "catch" -> Token (env, T_CATCH) - | "class" -> Token (env, T_CLASS) - | "const" -> Token (env, T_CONST) - | "continue" -> Token (env, T_CONTINUE) - | "debugger" -> Token (env, T_DEBUGGER) - | "declare" -> Token (env, T_DECLARE) - | "default" -> Token (env, T_DEFAULT) - | "delete" -> Token (env, T_DELETE) - | "do" -> Token (env, T_DO) - | "else" -> Token (env, T_ELSE) - | "enum" -> Token (env, T_ENUM) - | "export" -> Token (env, T_EXPORT) - | "extends" -> Token (env, T_EXTENDS) - | "false" -> Token (env, T_FALSE) - | "finally" -> Token (env, T_FINALLY) - | "for" -> Token (env, T_FOR) - | "function" -> Token (env, T_FUNCTION) - | "if" -> Token (env, T_IF) - | "implements" -> Token (env, T_IMPLEMENTS) - | "import" -> Token (env, T_IMPORT) - | "in" -> Token (env, T_IN) - | "instanceof" -> Token (env, T_INSTANCEOF) - | "interface" -> Token (env, T_INTERFACE) - | "let" -> Token (env, T_LET) - | "new" -> Token (env, T_NEW) - | "null" -> Token (env, T_NULL) - | "of" -> Token (env, T_OF) - | "opaque" -> Token (env, T_OPAQUE) - | "package" -> Token (env, T_PACKAGE) - | "private" -> Token (env, T_PRIVATE) - | "protected" -> Token (env, T_PROTECTED) - | "public" -> Token (env, T_PUBLIC) - | "return" -> Token (env, T_RETURN) - | "static" -> Token (env, T_STATIC) - | "super" -> Token (env, T_SUPER) - | "switch" -> Token (env, T_SWITCH) - | "this" -> Token (env, T_THIS) - | "throw" -> Token (env, T_THROW) - | "true" -> Token (env, T_TRUE) - | "try" -> Token (env, T_TRY) - | "type" -> Token (env, T_TYPE) - | "typeof" -> Token (env, T_TYPEOF) - | "var" -> Token (env, T_VAR) - | "void" -> Token (env, T_VOID) - | "while" -> Token (env, T_WHILE) - | "with" -> Token (env, T_WITH) - | "yield" -> Token (env, T_YIELD) - - (* Identifiers *) - | js_id_start, Star js_id_continue -> - let loc = loc_of_lexbuf env lexbuf in - let raw = lexeme lexbuf in - let env, value = decode_identifier env raw in - Token (env, T_IDENTIFIER { loc; value; raw }) - - (* TODO: Use [Symbol.iterator] instead of @@iterator. *) - | "@@iterator" - | "@@asyncIterator" -> - let loc = loc_of_lexbuf env lexbuf in - let raw = lexeme lexbuf in - Token (env, T_IDENTIFIER { loc; value = raw; raw }) - - (* Syntax *) - | "{" -> Token (env, T_LCURLY) - | "}" -> Token (env, T_RCURLY) - | "(" -> Token (env, T_LPAREN) - | ")" -> Token (env, T_RPAREN) - | "[" -> Token (env, T_LBRACKET) - | "]" -> Token (env, T_RBRACKET) - | "..." -> Token (env, T_ELLIPSIS) - | "." -> Token (env, T_PERIOD) - | ";" -> Token (env, T_SEMICOLON) - | "," -> Token (env, T_COMMA) - | ":" -> Token (env, T_COLON) - - | "?.", digit -> - Sedlexing.rollback lexbuf; - begin match%sedlex lexbuf with - | "?" -> Token (env, T_PLING) - | _ -> failwith "expected ?" - end - - | "?." -> Token (env, T_PLING_PERIOD) - | "??" -> Token (env, T_PLING_PLING) - | "?" -> Token (env, T_PLING) - | "&&" -> Token (env, T_AND) - | "||" -> Token (env, T_OR) - | "===" -> Token (env, T_STRICT_EQUAL) - | "!==" -> Token (env, T_STRICT_NOT_EQUAL) - | "<=" -> Token (env, T_LESS_THAN_EQUAL) - | ">=" -> Token (env, T_GREATER_THAN_EQUAL) - | "==" -> Token (env, T_EQUAL) - | "!=" -> Token (env, T_NOT_EQUAL) - | "++" -> Token (env, T_INCR) - | "--" -> Token (env, T_DECR) - | "<<=" -> Token (env, T_LSHIFT_ASSIGN) - | "<<" -> Token (env, T_LSHIFT) - | ">>=" -> Token (env, T_RSHIFT_ASSIGN) - | ">>>=" -> Token (env, T_RSHIFT3_ASSIGN) - | ">>>" -> Token (env, T_RSHIFT3) - | ">>" -> Token (env, T_RSHIFT) - | "+=" -> Token (env, T_PLUS_ASSIGN) - | "-=" -> Token (env, T_MINUS_ASSIGN) - | "*=" -> Token (env, T_MULT_ASSIGN) - | "**=" -> Token (env, T_EXP_ASSIGN) - | "%=" -> Token (env, T_MOD_ASSIGN) - | "&=" -> Token (env, T_BIT_AND_ASSIGN) - | "|=" -> Token (env, T_BIT_OR_ASSIGN) - | "^=" -> Token (env, T_BIT_XOR_ASSIGN) - | "<" -> Token (env, T_LESS_THAN) - | ">" -> Token (env, T_GREATER_THAN) - | "+" -> Token (env, T_PLUS) - | "-" -> Token (env, T_MINUS) - | "*" -> Token (env, T_MULT) - | "**" -> Token (env, T_EXP) - | "%" -> Token (env, T_MOD) - | "|" -> Token (env, T_BIT_OR) - | "&" -> Token (env, T_BIT_AND) - | "^" -> Token (env, T_BIT_XOR) - | "!" -> Token (env, T_NOT) - | "~" -> Token (env, T_BIT_NOT) - | "=" -> Token (env, T_ASSIGN) - | "=>" -> Token (env, T_ARROW) - | "/=" -> Token (env, T_DIV_ASSIGN) - | "/" -> Token (env, T_DIV) - | "@" -> Token (env, T_AT) - | "#" -> Token (env, T_POUND) - - (* Others *) - | eof -> - let env = - if is_in_comment_syntax env then - let loc = loc_of_lexbuf env lexbuf in - lex_error env loc Parse_error.UnexpectedEOS - else env - in - Token (env, T_EOF) - - | any -> - let env = illegal env (loc_of_lexbuf env lexbuf) in - Token (env, T_ERROR (lexeme lexbuf)) - - | _ -> failwith "unreachable" - - -let rec regexp_class env buf lexbuf = - match%sedlex lexbuf with - | eof -> env - - | "\\\\" -> - Buffer.add_string buf "\\\\"; - regexp_class env buf lexbuf - - | '\\', ']' -> - Buffer.add_char buf '\\'; - Buffer.add_char buf ']'; - regexp_class env buf lexbuf - - | ']' -> - Buffer.add_char buf ']'; - env - - (* match multi-char substrings that don't contain the start chars of the above patterns *) - | Plus (Compl (eof | '\\' | ']')) - | any -> - let str = lexeme lexbuf in - Buffer.add_string buf str; - regexp_class env buf lexbuf - - | _ -> failwith "unreachable" - - -let rec regexp_body env buf lexbuf = - match%sedlex lexbuf with - | eof -> - let loc = loc_of_lexbuf env lexbuf in - let env = lex_error env loc Parse_error.UnterminatedRegExp in - env, "" - - | '\\', line_terminator_sequence -> - let loc = loc_of_lexbuf env lexbuf in - let env = lex_error env loc Parse_error.UnterminatedRegExp in - env, "" - - | '\\', any -> - let s = lexeme lexbuf in - Buffer.add_string buf s; - regexp_body env buf lexbuf - - | '/', Plus id_letter -> - let flags = - let str = lexeme lexbuf in - String.sub str 1 (String.length str - 1) - in - env, flags - - | '/' -> - env, "" - - | '[' -> - Buffer.add_char buf '['; - let env = regexp_class env buf lexbuf in - regexp_body env buf lexbuf - - | line_terminator_sequence -> - let loc = loc_of_lexbuf env lexbuf in - let env = lex_error env loc Parse_error.UnterminatedRegExp in - env, "" - - (* match multi-char substrings that don't contain the start chars of the above patterns *) - | Plus (Compl (eof | '\\' | '/' | '[' | line_terminator_sequence_start)) - | any -> - let str = lexeme lexbuf in - Buffer.add_string buf str; - regexp_body env buf lexbuf - - | _ -> failwith "unreachable" - - -let regexp env lexbuf = - match%sedlex lexbuf with - | eof -> Token (env, T_EOF) - - | line_terminator_sequence -> - let env = new_line env lexbuf in - Continue env - - | Plus whitespace -> - Continue env - - | "//" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = line_comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf false) - - | "/*" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf true) - - | '/' -> - let start = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, flags = regexp_body env buf lexbuf in - let _end = end_pos_of_lexbuf env lexbuf in - let loc = { Loc.source = Lex_env.source env; start; _end } in - Token (env, T_REGEXP (loc, Buffer.contents buf, flags)) - - | any -> - let env = illegal env (loc_of_lexbuf env lexbuf) in - Token (env, T_ERROR (lexeme lexbuf)) - - | _ -> failwith "unreachable" - - -let rec jsx_text env mode buf raw lexbuf = - match%sedlex lexbuf with - | "'" | '"' | '<' | '{' -> - let c = lexeme lexbuf in - begin match mode, c with - | JSX_SINGLE_QUOTED_TEXT, "'" - | JSX_DOUBLE_QUOTED_TEXT, "\"" -> - env - | JSX_CHILD_TEXT, ("<" | "{") -> - (* Don't actually want to consume these guys - * yet...they're not part of the JSX text *) - Sedlexing.rollback lexbuf; - env - | _ -> - Buffer.add_string raw c; - Buffer.add_string buf c; - jsx_text env mode buf raw lexbuf - end - - | eof -> - let env = illegal env (loc_of_lexbuf env lexbuf) in - env - - | line_terminator_sequence -> - let lt = lexeme lexbuf in - Buffer.add_string raw lt; - Buffer.add_string buf lt; - let env = new_line env lexbuf in - jsx_text env mode buf raw lexbuf - - | "&#x", Plus hex_digit, ';' -> - let s = lexeme lexbuf in - let n = String.sub s 3 (String.length s - 4) in - Buffer.add_string raw s; - let code = int_of_string ("0x" ^ n) in - Wtf8.add_wtf_8 buf code; - jsx_text env mode buf raw lexbuf - - | "&#", Plus digit, ';' -> - let s = lexeme lexbuf in - let n = String.sub s 2 (String.length s - 3) in - Buffer.add_string raw s; - let code = int_of_string n in - Wtf8.add_wtf_8 buf code; - jsx_text env mode buf raw lexbuf - - | "&", htmlentity, ';' -> - let s = lexeme lexbuf in - let entity = String.sub s 1 (String.length s - 2) in - Buffer.add_string raw s; - let code = match entity with - | "quot" -> Some 0x0022 - | "amp" -> Some 0x0026 - | "apos" -> Some 0x0027 - | "lt" -> Some 0x003C - | "gt" -> Some 0x003E - | "nbsp" -> Some 0x00A0 - | "iexcl" -> Some 0x00A1 - | "cent" -> Some 0x00A2 - | "pound" -> Some 0x00A3 - | "curren" -> Some 0x00A4 - | "yen" -> Some 0x00A5 - | "brvbar" -> Some 0x00A6 - | "sect" -> Some 0x00A7 - | "uml" -> Some 0x00A8 - | "copy" -> Some 0x00A9 - | "ordf" -> Some 0x00AA - | "laquo" -> Some 0x00AB - | "not" -> Some 0x00AC - | "shy" -> Some 0x00AD - | "reg" -> Some 0x00AE - | "macr" -> Some 0x00AF - | "deg" -> Some 0x00B0 - | "plusmn" -> Some 0x00B1 - | "sup2" -> Some 0x00B2 - | "sup3" -> Some 0x00B3 - | "acute" -> Some 0x00B4 - | "micro" -> Some 0x00B5 - | "para" -> Some 0x00B6 - | "middot" -> Some 0x00B7 - | "cedil" -> Some 0x00B8 - | "sup1" -> Some 0x00B9 - | "ordm" -> Some 0x00BA - | "raquo" -> Some 0x00BB - | "frac14" -> Some 0x00BC - | "frac12" -> Some 0x00BD - | "frac34" -> Some 0x00BE - | "iquest" -> Some 0x00BF - | "Agrave" -> Some 0x00C0 - | "Aacute" -> Some 0x00C1 - | "Acirc" -> Some 0x00C2 - | "Atilde" -> Some 0x00C3 - | "Auml" -> Some 0x00C4 - | "Aring" -> Some 0x00C5 - | "AElig" -> Some 0x00C6 - | "Ccedil" -> Some 0x00C7 - | "Egrave" -> Some 0x00C8 - | "Eacute" -> Some 0x00C9 - | "Ecirc" -> Some 0x00CA - | "Euml" -> Some 0x00CB - | "Igrave" -> Some 0x00CC - | "Iacute" -> Some 0x00CD - | "Icirc" -> Some 0x00CE - | "Iuml" -> Some 0x00CF - | "ETH" -> Some 0x00D0 - | "Ntilde" -> Some 0x00D1 - | "Ograve" -> Some 0x00D2 - | "Oacute" -> Some 0x00D3 - | "Ocirc" -> Some 0x00D4 - | "Otilde" -> Some 0x00D5 - | "Ouml" -> Some 0x00D6 - | "times" -> Some 0x00D7 - | "Oslash" -> Some 0x00D8 - | "Ugrave" -> Some 0x00D9 - | "Uacute" -> Some 0x00DA - | "Ucirc" -> Some 0x00DB - | "Uuml" -> Some 0x00DC - | "Yacute" -> Some 0x00DD - | "THORN" -> Some 0x00DE - | "szlig" -> Some 0x00DF - | "agrave" -> Some 0x00E0 - | "aacute" -> Some 0x00E1 - | "acirc" -> Some 0x00E2 - | "atilde" -> Some 0x00E3 - | "auml" -> Some 0x00E4 - | "aring" -> Some 0x00E5 - | "aelig" -> Some 0x00E6 - | "ccedil" -> Some 0x00E7 - | "egrave" -> Some 0x00E8 - | "eacute" -> Some 0x00E9 - | "ecirc" -> Some 0x00EA - | "euml" -> Some 0x00EB - | "igrave" -> Some 0x00EC - | "iacute" -> Some 0x00ED - | "icirc" -> Some 0x00EE - | "iuml" -> Some 0x00EF - | "eth" -> Some 0x00F0 - | "ntilde" -> Some 0x00F1 - | "ograve" -> Some 0x00F2 - | "oacute" -> Some 0x00F3 - | "ocirc" -> Some 0x00F4 - | "otilde" -> Some 0x00F5 - | "ouml" -> Some 0x00F6 - | "divide" -> Some 0x00F7 - | "oslash" -> Some 0x00F8 - | "ugrave" -> Some 0x00F9 - | "uacute" -> Some 0x00FA - | "ucirc" -> Some 0x00FB - | "uuml" -> Some 0x00FC - | "yacute" -> Some 0x00FD - | "thorn" -> Some 0x00FE - | "yuml" -> Some 0x00FF - | "OElig" -> Some 0x0152 - | "oelig" -> Some 0x0153 - | "Scaron" -> Some 0x0160 - | "scaron" -> Some 0x0161 - | "Yuml" -> Some 0x0178 - | "fnof" -> Some 0x0192 - | "circ" -> Some 0x02C6 - | "tilde" -> Some 0x02DC - | "Alpha" -> Some 0x0391 - | "Beta" -> Some 0x0392 - | "Gamma" -> Some 0x0393 - | "Delta" -> Some 0x0394 - | "Epsilon" -> Some 0x0395 - | "Zeta" -> Some 0x0396 - | "Eta" -> Some 0x0397 - | "Theta" -> Some 0x0398 - | "Iota" -> Some 0x0399 - | "Kappa" -> Some 0x039A - | "Lambda" -> Some 0x039B - | "Mu" -> Some 0x039C - | "Nu" -> Some 0x039D - | "Xi" -> Some 0x039E - | "Omicron" -> Some 0x039F - | "Pi" -> Some 0x03A0 - | "Rho" -> Some 0x03A1 - | "Sigma" -> Some 0x03A3 - | "Tau" -> Some 0x03A4 - | "Upsilon" -> Some 0x03A5 - | "Phi" -> Some 0x03A6 - | "Chi" -> Some 0x03A7 - | "Psi" -> Some 0x03A8 - | "Omega" -> Some 0x03A9 - | "alpha" -> Some 0x03B1 - | "beta" -> Some 0x03B2 - | "gamma" -> Some 0x03B3 - | "delta" -> Some 0x03B4 - | "epsilon" -> Some 0x03B5 - | "zeta" -> Some 0x03B6 - | "eta" -> Some 0x03B7 - | "theta" -> Some 0x03B8 - | "iota" -> Some 0x03B9 - | "kappa" -> Some 0x03BA - | "lambda" -> Some 0x03BB - | "mu" -> Some 0x03BC - | "nu" -> Some 0x03BD - | "xi" -> Some 0x03BE - | "omicron" -> Some 0x03BF - | "pi" -> Some 0x03C0 - | "rho" -> Some 0x03C1 - | "sigmaf" -> Some 0x03C2 - | "sigma" -> Some 0x03C3 - | "tau" -> Some 0x03C4 - | "upsilon" -> Some 0x03C5 - | "phi" -> Some 0x03C6 - | "chi" -> Some 0x03C7 - | "psi" -> Some 0x03C8 - | "omega" -> Some 0x03C9 - | "thetasym" -> Some 0x03D1 - | "upsih" -> Some 0x03D2 - | "piv" -> Some 0x03D6 - | "ensp" -> Some 0x2002 - | "emsp" -> Some 0x2003 - | "thinsp" -> Some 0x2009 - | "zwnj" -> Some 0x200C - | "zwj" -> Some 0x200D - | "lrm" -> Some 0x200E - | "rlm" -> Some 0x200F - | "ndash" -> Some 0x2013 - | "mdash" -> Some 0x2014 - | "lsquo" -> Some 0x2018 - | "rsquo" -> Some 0x2019 - | "sbquo" -> Some 0x201A - | "ldquo" -> Some 0x201C - | "rdquo" -> Some 0x201D - | "bdquo" -> Some 0x201E - | "dagger" -> Some 0x2020 - | "Dagger" -> Some 0x2021 - | "bull" -> Some 0x2022 - | "hellip" -> Some 0x2026 - | "permil" -> Some 0x2030 - | "prime" -> Some 0x2032 - | "Prime" -> Some 0x2033 - | "lsaquo" -> Some 0x2039 - | "rsaquo" -> Some 0x203A - | "oline" -> Some 0x203E - | "frasl" -> Some 0x2044 - | "euro" -> Some 0x20AC - | "image" -> Some 0x2111 - | "weierp" -> Some 0x2118 - | "real" -> Some 0x211C - | "trade" -> Some 0x2122 - | "alefsym" -> Some 0x2135 - | "larr" -> Some 0x2190 - | "uarr" -> Some 0x2191 - | "rarr" -> Some 0x2192 - | "darr" -> Some 0x2193 - | "harr" -> Some 0x2194 - | "crarr" -> Some 0x21B5 - | "lArr" -> Some 0x21D0 - | "uArr" -> Some 0x21D1 - | "rArr" -> Some 0x21D2 - | "dArr" -> Some 0x21D3 - | "hArr" -> Some 0x21D4 - | "forall" -> Some 0x2200 - | "part" -> Some 0x2202 - | "exist" -> Some 0x2203 - | "empty" -> Some 0x2205 - | "nabla" -> Some 0x2207 - | "isin" -> Some 0x2208 - | "notin" -> Some 0x2209 - | "ni" -> Some 0x220B - | "prod" -> Some 0x220F - | "sum" -> Some 0x2211 - | "minus" -> Some 0x2212 - | "lowast" -> Some 0x2217 - | "radic" -> Some 0x221A - | "prop" -> Some 0x221D - | "infin" -> Some 0x221E - | "ang" -> Some 0x2220 - | "and" -> Some 0x2227 - | "or" -> Some 0x2228 - | "cap" -> Some 0x2229 - | "cup" -> Some 0x222A - | "'int'" -> Some 0x222B - | "there4" -> Some 0x2234 - | "sim" -> Some 0x223C - | "cong" -> Some 0x2245 - | "asymp" -> Some 0x2248 - | "ne" -> Some 0x2260 - | "equiv" -> Some 0x2261 - | "le" -> Some 0x2264 - | "ge" -> Some 0x2265 - | "sub" -> Some 0x2282 - | "sup" -> Some 0x2283 - | "nsub" -> Some 0x2284 - | "sube" -> Some 0x2286 - | "supe" -> Some 0x2287 - | "oplus" -> Some 0x2295 - | "otimes" -> Some 0x2297 - | "perp" -> Some 0x22A5 - | "sdot" -> Some 0x22C5 - | "lceil" -> Some 0x2308 - | "rceil" -> Some 0x2309 - | "lfloor" -> Some 0x230A - | "rfloor" -> Some 0x230B - | "lang" -> Some 0x27E8 (* 0x2329 in HTML4 *) - | "rang" -> Some 0x27E9 (* 0x232A in HTML4 *) - | "loz" -> Some 0x25CA - | "spades" -> Some 0x2660 - | "clubs" -> Some 0x2663 - | "hearts" -> Some 0x2665 - | "diams" -> Some 0x2666 - | _ -> None in - (match code with - | Some code -> Wtf8.add_wtf_8 buf code - | None -> Buffer.add_string buf ("&" ^ entity ^";")); - jsx_text env mode buf raw lexbuf - - (* match multi-char substrings that don't contain the start chars of the above patterns *) - | Plus (Compl ("'" | '"' | '<' | '{' | '&' | eof | line_terminator_sequence_start)) - | any -> - let c = lexeme lexbuf in - Buffer.add_string raw c; - Buffer.add_string buf c; - jsx_text env mode buf raw lexbuf - - | _ -> failwith "unreachable" - - -let jsx_tag env lexbuf = - match%sedlex lexbuf with - | eof -> - Token (env, T_EOF) - - | line_terminator_sequence -> - let env = new_line env lexbuf in - Continue env - - | Plus whitespace -> - Continue env - - | "//" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = line_comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf false) - - | "/*" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf true) - - | '<' -> Token (env, T_LESS_THAN) - | '/' -> Token (env, T_DIV) - | '>' -> Token (env, T_GREATER_THAN) - | '{' -> Token (env, T_LCURLY) - | ':' -> Token (env, T_COLON) - | '.' -> Token (env, T_PERIOD) - | '=' -> Token (env, T_ASSIGN) - | js_id_start, Star ('-' | js_id_continue) -> - Token (env, T_JSX_IDENTIFIER { raw = lexeme lexbuf }) - - | "'" | '"' -> - let quote = lexeme lexbuf in - let start = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let raw = Buffer.create 127 in - Buffer.add_string raw quote; - let mode = if quote = "'" - then JSX_SINGLE_QUOTED_TEXT - else JSX_DOUBLE_QUOTED_TEXT in - let env = jsx_text env mode buf raw lexbuf in - let _end = end_pos_of_lexbuf env lexbuf in - Buffer.add_string raw quote; - let value = Buffer.contents buf in - let raw = Buffer.contents raw in - let loc = { Loc.source = Lex_env.source env; start; _end } in - Token (env, T_JSX_TEXT (loc, value, raw)) - - | any -> - Token (env, T_ERROR (lexeme lexbuf)) - - | _ -> failwith "unreachable" - - -let jsx_child env start buf raw lexbuf = - match%sedlex lexbuf with - | line_terminator_sequence -> - let lt = lexeme lexbuf in - Buffer.add_string raw lt; - Buffer.add_string buf lt; - let env = new_line env lexbuf in - let env = jsx_text env JSX_CHILD_TEXT buf raw lexbuf in - let _end = end_pos_of_lexbuf env lexbuf in - let value = Buffer.contents buf in - let raw = Buffer.contents raw in - let loc = { Loc.source = Lex_env.source env; start; _end } in - env, T_JSX_TEXT (loc, value, raw) - - | eof -> env, T_EOF - | '<' -> env, T_LESS_THAN - | '{' -> env, T_LCURLY - - | any -> - let c = lexeme lexbuf in - Buffer.add_string raw c; - Buffer.add_string buf c; - let env = jsx_text env JSX_CHILD_TEXT buf raw lexbuf in - let _end = end_pos_of_lexbuf env lexbuf in - let value = Buffer.contents buf in - let raw = Buffer.contents raw in - let loc = { Loc.source = Lex_env.source env; start; _end } in - env, T_JSX_TEXT (loc, value, raw) - - | _ -> failwith "unreachable" - - -let template_tail env lexbuf = - match%sedlex lexbuf with - | line_terminator_sequence -> - let env = new_line env lexbuf in - Continue env - - | Plus whitespace -> - Continue env - - | "//" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = line_comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf false) - - | "/*" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf true) - - | '}' -> - let start = start_pos_of_lexbuf env lexbuf in - let cooked = Buffer.create 127 in - let raw = Buffer.create 127 in - let literal = Buffer.create 127 in - Buffer.add_string literal "}"; - let env, is_tail = template_part env cooked raw literal lexbuf in - let _end = end_pos_of_lexbuf env lexbuf in - let loc = { Loc.source = Lex_env.source env; start; _end } in - Token (env, T_TEMPLATE_PART (loc, { - cooked = Buffer.contents cooked; - raw = Buffer.contents raw; - literal = Buffer.contents literal; - }, is_tail)) - - | any -> - let env = illegal env (loc_of_lexbuf env lexbuf) in - Token (env, T_TEMPLATE_PART ( - loc_of_lexbuf env lexbuf, - { cooked = ""; raw = ""; literal = ""; }, - true - )) - - | _ -> failwith "unreachable" - - -(* There are some tokens that never show up in a type and which can cause - * ambiguity. For example, Foo> ends with two angle brackets, not - * with a right shift. - *) -let type_token env lexbuf = - match%sedlex lexbuf with - | line_terminator_sequence -> - let env = new_line env lexbuf in - Continue env - - | Plus whitespace -> - Continue env - - | "/*" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf true) - - | "/*", Star whitespace, (":" | "::" | "flow-include") -> - let pattern = lexeme lexbuf in - if not (is_comment_syntax_enabled env) then - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - Buffer.add_string buf pattern; - let env, end_pos = comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf true) - else - let env = - if is_in_comment_syntax env then - let loc = loc_of_lexbuf env lexbuf in - unexpected_error env loc pattern - else env - in - let env = in_comment_syntax true env in - let len = Sedlexing.lexeme_length lexbuf in - if Sedlexing.Utf8.sub_lexeme lexbuf (len - 1) 1 = ":" && - Sedlexing.Utf8.sub_lexeme lexbuf (len - 2) 1 <> ":" then - Token (env, T_COLON) - else - Continue env - - | "*/" -> - if is_in_comment_syntax env then - let env = in_comment_syntax false env in - Continue env - else begin - Sedlexing.rollback lexbuf; - match%sedlex lexbuf with - | "*" -> Token (env, T_MULT) - | _ -> failwith "expected *" - end - - | "//" -> - let start_pos = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let env, end_pos = line_comment env buf lexbuf in - Comment (env, mk_comment env start_pos end_pos buf false) - - | "'" | '"' -> - let quote = lexeme lexbuf in - let start = start_pos_of_lexbuf env lexbuf in - let buf = Buffer.create 127 in - let raw = Buffer.create 127 in - Buffer.add_string raw quote; - let octal = false in - let env, _end, octal = string_quote env quote buf raw octal lexbuf in - let loc = { Loc.source = Lex_env.source env; start; _end } in - Token (env, T_STRING (loc, Buffer.contents buf, Buffer.contents raw, octal)) - - (** - * Number literals - *) - - | Opt neg, binnumber, (letter | '2'..'9'), Star alphanumeric -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | Opt neg, binnumber -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton BINARY num) - | _ -> failwith "unreachable" - ) - - | Opt neg, binnumber -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton BINARY num) - - | Opt neg, octnumber, (letter | '8'..'9'), Star alphanumeric -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | Opt neg, octnumber -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton OCTAL num) - | _ -> failwith "unreachable" - ) - - | Opt neg, octnumber -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton OCTAL num) - - | Opt neg, legacyoctnumber, (letter | '8'..'9'), Star alphanumeric -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | Opt neg, legacyoctnumber -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton LEGACY_OCTAL num) - | _ -> failwith "unreachable" - ) - - | Opt neg, legacyoctnumber -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton LEGACY_OCTAL num) - - | Opt neg, hexnumber, non_hex_letter, Star alphanumeric -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | Opt neg, hexnumber -> - let num = lexeme lexbuf in - begin try Token (env, mk_num_singleton NORMAL num) - with _ when Sys.win32 -> - let loc = loc_of_lexbuf env lexbuf in - let env = lex_error env loc Parse_error.WindowsFloatOfString in - Token (env, T_NUMBER_SINGLETON_TYPE { kind = NORMAL; value = 789.0; raw = "789" }) - end - | _ -> failwith "unreachable" - ) - - | Opt neg, hexnumber -> - let num = lexeme lexbuf in - begin try Token (env, mk_num_singleton NORMAL num) - with _ when Sys.win32 -> - let loc = loc_of_lexbuf env lexbuf in - let env = lex_error env loc Parse_error.WindowsFloatOfString in - Token (env, T_NUMBER_SINGLETON_TYPE { kind = NORMAL; value = 789.0; raw = "789" }) - end - - | Opt neg, scinumber, word -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | Opt neg, scinumber -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton NORMAL num) - | _ -> failwith "unreachable" - ) - - | Opt neg, scinumber -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton NORMAL num) - - | Opt neg, (wholenumber | floatnumber), word -> - (* Numbers cannot be immediately followed by words *) - recover env lexbuf ~f:(fun env lexbuf -> match%sedlex lexbuf with - | Opt neg, wholenumber | floatnumber -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton NORMAL num) - | _ -> failwith "unreachable" - ) - - | Opt neg, (wholenumber | floatnumber) -> - let num = lexeme lexbuf in - Token (env, mk_num_singleton NORMAL num) - - (* Keywords *) - | "any" -> Token (env, T_ANY_TYPE) - | "bool" -> Token (env, (T_BOOLEAN_TYPE BOOL)) - | "boolean" -> Token (env, (T_BOOLEAN_TYPE BOOLEAN)) - | "empty" -> Token (env, T_EMPTY_TYPE) - | "extends" -> Token (env, T_EXTENDS) - | "false" -> Token (env, T_FALSE) - | "interface" -> Token (env, T_INTERFACE) - | "mixed" -> Token (env, T_MIXED_TYPE) - | "null" -> Token (env, T_NULL) - | "number" -> Token (env, T_NUMBER_TYPE) - | "static" -> Token (env, T_STATIC) - | "string" -> Token (env, T_STRING_TYPE) - | "true" -> Token (env, T_TRUE) - | "typeof" -> Token (env, T_TYPEOF) - | "void" -> Token (env, T_VOID_TYPE) - - (* Identifiers *) - | js_id_start, Star js_id_continue -> - let loc = loc_of_lexbuf env lexbuf in - let raw = lexeme lexbuf in - let env, value = decode_identifier env raw in - Token (env, T_IDENTIFIER { loc; value; raw }) - - | "%checks" -> Token (env, T_CHECKS) - (* Syntax *) - | "[" -> Token (env, T_LBRACKET) - | "]" -> Token (env, T_RBRACKET) - | "{" -> Token (env, T_LCURLY) - | "}" -> Token (env, T_RCURLY) - | "{|" -> Token (env, T_LCURLYBAR) - | "|}" -> Token (env, T_RCURLYBAR) - | "(" -> Token (env, T_LPAREN) - | ")" -> Token (env, T_RPAREN) - | "..." -> Token (env, T_ELLIPSIS) - | "." -> Token (env, T_PERIOD) - | ";" -> Token (env, T_SEMICOLON) - | "," -> Token (env, T_COMMA) - | ":" -> Token (env, T_COLON) - | "?" -> Token (env, T_PLING) - | "[" -> Token (env, T_LBRACKET) - | "]" -> Token (env, T_RBRACKET) - (* Generics *) - | "<" -> Token (env, T_LESS_THAN) - | ">" -> Token (env, T_GREATER_THAN) - (* Generic default *) - | "=" -> Token (env, T_ASSIGN) - (* Optional or nullable *) - | "?" -> Token (env, T_PLING) - (* Existential *) - | "*" -> Token (env, T_MULT) - (* Annotation or bound *) - | ":" -> Token (env, T_COLON) - (* Union *) - | '|' -> Token (env, T_BIT_OR) - (* Intersection *) - | '&' -> Token (env, T_BIT_AND) - (* typeof *) - | "typeof" -> Token (env, T_TYPEOF) - (* Function type *) - | "=>" -> Token (env, T_ARROW) - (* Type alias *) - | '=' -> Token (env, T_ASSIGN) - (* Variance annotations *) - | '+' -> Token (env, T_PLUS) - | '-' -> Token (env, T_MINUS) - - (* Others *) - | eof -> - let env = - if is_in_comment_syntax env then - let loc = loc_of_lexbuf env lexbuf in - lex_error env loc Parse_error.UnexpectedEOS - else env - in - Token (env, T_EOF) - - | any -> - Token (env, T_ERROR (lexeme lexbuf)) - - | _ -> failwith "unreachable" - - -(* Lexing JSX children requires a string buffer to keep track of whitespace - * *) -let jsx_child env = - (* yes, the _start_ of the child is the _end_pos_ of the lexbuf! *) - let start = end_pos_of_lexbuf env env.lex_lb in - let buf = Buffer.create 127 in - let raw = Buffer.create 127 in - let env, child = jsx_child env start buf raw env.lex_lb in - get_result_and_clear_state (env, child, []) - -let wrap f = - let rec helper comments env = - match f env env.lex_lb with - | Token (env, t) -> env, t, List.rev comments - | Comment (env, comment) -> helper (comment::comments) env - | Continue env -> helper comments env - in - fun env -> get_result_and_clear_state (helper [] env) - -let regexp = wrap regexp -let jsx_tag = wrap jsx_tag -let template_tail = wrap template_tail -let type_token = wrap type_token -let token = wrap token diff --git a/src/parser/libflowparser.h b/src/parser/libflowparser.h index 9e9a3c4d612..9fb34aaf1ed 100644 --- a/src/parser/libflowparser.h +++ b/src/parser/libflowparser.h @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -50,6 +50,7 @@ value cons(value hd, value tl) { template class AbstractTranslator { public: + virtual ~AbstractTranslator() = default; virtual T convert_string(char *str) = 0; virtual T convert_number(double n) = 0; virtual T convert_bool(long b) = 0; diff --git a/src/parser/libflowparser.ml b/src/parser/libflowparser.ml index 0391f6b19c3..033fe5e312b 100644 --- a/src/parser/libflowparser.ml +++ b/src/parser/libflowparser.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -15,74 +15,88 @@ type json = | JBool of bool | JNull -module AbstractTranslator : ( - Translator_intf.S with type t = json -) = struct +module AbstractTranslator : Translator_intf.S with type t = json = struct type t = json + let string x = JString x + let bool x = JBool x + let obj props = JObject props + let array arr = JArray arr + let number x = JNumber x + + let int x = number (float x) (* TODO: this is inefficient, push ints to C *) + let null = JNull + let regexp _loc _pattern _flags = JNull end -module Translate = Estree_translator.Translate (AbstractTranslator) (struct - (* TODO: make these configurable via CLI flags *) - let include_comments = true - let include_locs = true -end) +module Translate = + Estree_translator.Translate + (AbstractTranslator) + (struct + (* TODO: make these configurable via CLI flags *) + + let include_interned_comments = false + + let include_comments = true + + let include_locs = true + end) module Token_translator = Token_translator.Translate (AbstractTranslator) -let translate_tokens tokens = - AbstractTranslator.array (List.rev_map Token_translator.token tokens) +let translate_tokens offset_table tokens = + AbstractTranslator.array (List.rev_map (Token_translator.token offset_table) tokens) let convert_options opts = - let open Parser_env in - List.fold_left (fun (opts, tokens) (k, v) -> - match k with - | "esproposal_class_instance_fields" -> - { opts with esproposal_class_instance_fields = v }, tokens - | "esproposal_class_static_fields" -> - { opts with esproposal_class_static_fields = v }, tokens - | "esproposal_decorators" -> - { opts with esproposal_decorators = v }, tokens - | "esproposal_export_star_as" -> - { opts with esproposal_export_star_as = v }, tokens - | "esproposal_optional_chaining" -> - { opts with esproposal_optional_chaining = v }, tokens - | "types" -> - { opts with types = v }, tokens - | "use_strict" -> - { opts with use_strict = v }, tokens - | "tokens" -> - opts, v - | _ -> - opts, tokens (* ignore unknown stuff for future-compatibility *) - ) (Parser_env.default_parse_options, false) opts + Parser_env.( + List.fold_left + (fun (opts, tokens) (k, v) -> + match k with + | "enums" -> ({ opts with enums = v }, tokens) + | "esproposal_class_instance_fields" -> + ({ opts with esproposal_class_instance_fields = v }, tokens) + | "esproposal_class_static_fields" -> + ({ opts with esproposal_class_static_fields = v }, tokens) + | "esproposal_decorators" -> ({ opts with esproposal_decorators = v }, tokens) + | "esproposal_export_star_as" -> ({ opts with esproposal_export_star_as = v }, tokens) + | "esproposal_optional_chaining" -> ({ opts with esproposal_optional_chaining = v }, tokens) + | "esproposal_nullish_coalescing" -> + ({ opts with esproposal_nullish_coalescing = v }, tokens) + | "types" -> ({ opts with types = v }, tokens) + | "use_strict" -> ({ opts with use_strict = v }, tokens) + | "tokens" -> (opts, v) + | _ -> (opts, tokens)) + (* ignore unknown stuff for future-compatibility *) + (Parser_env.default_parse_options, false) + opts) let parse content options = - let parse_options, include_tokens = convert_options options in - + let (parse_options, include_tokens) = convert_options options in let rev_tokens = ref [] in let token_sink = - if include_tokens then Some (fun token_data -> rev_tokens := token_data::!rev_tokens) - else None + if include_tokens then + Some (fun token_data -> rev_tokens := token_data :: !rev_tokens) + else + None in - - let (ast, errors) = Parser_flow.program - ~fail:false ~parse_options:(Some parse_options) ~token_sink - content + let (ast, errors) = + Parser_flow.program ~fail:false ~parse_options:(Some parse_options) ~token_sink content in - - match Translate.program ast with + let offset_table = Offset_utils.make content in + match Translate.program (Some offset_table) ast with | JObject params -> - let params = ("errors", Translate.errors errors)::params in + let params = ("errors", Translate.errors errors) :: params in let params = - if include_tokens then ("tokens", translate_tokens !rev_tokens)::params - else params + if include_tokens then + ("tokens", translate_tokens offset_table !rev_tokens) :: params + else + params in JObject params | _ -> assert false diff --git a/src/parser/loc.ml b/src/parser/loc.ml index 568585d8ae4..e5cd865ee81 100644 --- a/src/parser/loc.ml +++ b/src/parser/loc.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,43 +8,35 @@ type position = { line: int; column: int; - offset: int; -} [@@deriving show] +} +[@@deriving show] +(* If you are modifying this record, go look at ALoc.ml and make sure you understand the + * representation there. *) type t = { source: File_key.t option; start: position; _end: position; -} [@@deriving show] - -let none = { - source = None; - start = { line = 0; column = 0; offset = 0; }; - _end = { line = 0; column = 0; offset = 0; }; } +[@@deriving show] -let btwn loc1 loc2 = { - source = loc1.source; - start = loc1.start; - _end = loc2._end; -} +let none = { source = None; start = { line = 0; column = 0 }; _end = { line = 0; column = 0 } } -let btwn_exclusive loc1 loc2 = { - source = loc1.source; - start = loc1._end; - _end = loc2.start; -} +let btwn loc1 loc2 = { source = loc1.source; start = loc1.start; _end = loc2._end } (* Returns the position immediately before the start of the given loc. If the given loc is at the beginning of a line, return the position of the first char on the same line. *) let char_before loc = let start = - let { line; column; offset } = loc.start in - let column, offset = if column > 0 - then column - 1, offset - 1 - else column, offset in - { line; column; offset } + let { line; column } = loc.start in + let column = + if column > 0 then + column - 1 + else + column + in + { line; column } in let _end = loc.start in { loc with start; _end } @@ -53,11 +45,15 @@ let char_before loc = * first line is a newline character, but is still consistent with loc orderings. *) let first_char loc = let start = loc.start in - let _end = {start with column = start.column + 1; offset = start.offset + 1} in - {loc with _end} + let _end = { start with column = start.column + 1 } in + { loc with _end } let pos_cmp a b = - let k = a.line - b.line in if k = 0 then a.column - b.column else k + let k = a.line - b.line in + if k = 0 then + a.column - b.column + else + k (** * If `a` spans (completely contains) `b`, then returns 0. @@ -70,27 +66,33 @@ let span_compare a b = let k = pos_cmp a.start b.start in if k <= 0 then let k = pos_cmp a._end b._end in - if k >= 0 then 0 else -1 - else 1 - else k + if k >= 0 then + 0 + else + -1 + else + 1 + else + k (* Returns true if loc1 entirely overlaps loc2 *) let contains loc1 loc2 = span_compare loc1 loc2 = 0 (* Returns true if loc1 intersects loc2 at all *) let lines_intersect loc1 loc2 = - File_key.compare_opt loc1.source loc2.source = 0 && not ( - (loc1._end.line < loc2.start.line) || - (loc1.start.line > loc2._end.line) - ) + File_key.compare_opt loc1.source loc2.source = 0 + && not (loc1._end.line < loc2.start.line || loc1.start.line > loc2._end.line) let compare loc1 loc2 = let k = File_key.compare_opt loc1.source loc2.source in if k = 0 then let k = pos_cmp loc1.start loc2.start in - if k = 0 then pos_cmp loc1._end loc2._end - else k - else k + if k = 0 then + pos_cmp loc1._end loc2._end + else + k + else + k let equal loc1 loc2 = compare loc1 loc2 = 0 @@ -98,31 +100,45 @@ let equal loc1 loc2 = compare loc1 loc2 = 0 * This is mostly useful for debugging purposes. * Please don't dead-code delete this! *) -let to_string ?(include_source=false) loc = +let debug_to_string ?(include_source = false) loc = let source = - if include_source - then Printf.sprintf "%S: " ( - match loc.source with - | Some src -> File_key.to_string src - | None -> "" - ) else "" + if include_source then + Printf.sprintf + "%S: " + (match loc.source with + | Some src -> File_key.to_string src + | None -> "") + else + "" in - let pos = Printf.sprintf "(%d, %d) to (%d, %d)" - loc.start.line - loc.start.column - loc._end.line - loc._end.column + let pos = + Printf.sprintf + "(%d, %d) to (%d, %d)" + loc.start.line + loc.start.column + loc._end.line + loc._end.column in source ^ pos +let to_string_no_source loc = + let line = loc.start.line in + let start = loc.start.column + 1 in + let end_ = loc._end.column in + if line <= 0 then + "0:0" + else if line = loc._end.line && start = end_ then + Printf.sprintf "%d:%d" line start + else if line != loc._end.line then + Printf.sprintf "%d:%d,%d:%d" line start loc._end.line end_ + else + Printf.sprintf "%d:%d-%d" line start end_ + let source loc = loc.source let make file line col = - { - source = Some file; - start = { line; column = col; offset = 0; }; - _end = { line; column = col + 1; offset = 0; }; - } - -let start_loc loc = {loc with _end = loc.start} -let end_loc loc = {loc with start = loc._end} + { source = Some file; start = { line; column = col }; _end = { line; column = col + 1 } } + +let start_loc loc = { loc with _end = loc.start } + +let end_loc loc = { loc with start = loc._end } diff --git a/src/parser/loc.mli b/src/parser/loc.mli index 61aed454a44..220642a8429 100644 --- a/src/parser/loc.mli +++ b/src/parser/loc.mli @@ -1,28 +1,55 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -type position = { line : int; column : int; offset : int; } [@@deriving show] -type t = { source : File_key.t option; start : position; _end : position; } [@@deriving show] +type position = { + line: int; + column: int; +} +[@@deriving show] + +type t = { + source: File_key.t option; + start: position; + _end: position; +} +[@@deriving show] + val none : t + val btwn : t -> t -> t -val btwn_exclusive : t -> t -> t + val char_before : t -> t -val first_char: t -> t + +val first_char : t -> t + val contains : t -> t -> bool + val lines_intersect : t -> t -> bool + val pos_cmp : position -> position -> int + val span_compare : t -> t -> int + val compare : t -> t -> int + val equal : t -> t -> bool -val to_string : ?include_source:bool -> t -> string + +val debug_to_string : ?include_source:bool -> t -> string + +(* Relatively compact; suitable for use as a unique string identifier *) +val to_string_no_source : t -> string + val source : t -> File_key.t option -(* filename, line, column. produces a Loc.t at the given location, with stubbed out offsets *) -val make: File_key.t -> int -> int -> t + +(* filename, line, column. produces a Loc.t at the given location *) +val make : File_key.t -> int -> int -> t + (* Produces a location at the start of the input location *) val start_loc : t -> t + (* Produces a location at the end of the input location *) val end_loc : t -> t diff --git a/src/parser/object_parser.ml b/src/parser/object_parser.ml index 8db04a7aa3a..4522264b31a 100644 --- a/src/parser/object_parser.ml +++ b/src/parser/object_parser.ml @@ -1,39 +1,42 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Token open Parser_env open Flow_ast -module Error = Parse_error -module SSet = Set.Make(String) - +module SMap = Map.Make (String) open Parser_common (* A module for parsing various object related things, like object literals * and classes *) module type OBJECT = sig - val key : ?class_body: bool -> env -> Loc.t * (Loc.t, Loc.t) Ast.Expression.Object.Property.key + val key : ?class_body:bool -> env -> Loc.t * (Loc.t, Loc.t) Ast.Expression.Object.Property.key + val _initializer : env -> Loc.t * (Loc.t, Loc.t) Ast.Expression.Object.t * pattern_errors - val class_declaration : env -> (Loc.t, Loc.t) Ast.Class.Decorator.t list -> (Loc.t, Loc.t) Ast.Statement.t + + val class_declaration : + env -> (Loc.t, Loc.t) Ast.Class.Decorator.t list -> (Loc.t, Loc.t) Ast.Statement.t + val class_expression : env -> (Loc.t, Loc.t) Ast.Expression.t - val class_implements : env -> (Loc.t, Loc.t) Ast.Class.Implements.t list -> (Loc.t, Loc.t) Ast.Class.Implements.t list + + val class_implements : + env -> (Loc.t, Loc.t) Ast.Class.Implements.t list -> (Loc.t, Loc.t) Ast.Class.Implements.t list + val decorator_list : env -> (Loc.t, Loc.t) Ast.Class.Decorator.t list end module Object - (Parse: Parser_common.PARSER) - (Type: Type_parser.TYPE) - (Declaration: Declaration_parser.DECLARATION) - (Expression: Expression_parser.EXPRESSION) - (Pattern_cover: Pattern_cover.COVER) -: OBJECT = struct + (Parse : Parser_common.PARSER) + (Type : Type_parser.TYPE) + (Declaration : Declaration_parser.DECLARATION) + (Expression : Expression_parser.EXPRESSION) + (Pattern_cover : Pattern_cover.COVER) : OBJECT = struct let decorator_list = let decorator env = Eat.token env; @@ -41,653 +44,744 @@ module Object in let rec decorator_list_helper env decorators = match Peek.token env with - | T_AT -> decorator_list_helper env ((with_loc decorator env)::decorators) + | T_AT -> decorator_list_helper env (with_loc decorator env :: decorators) | _ -> decorators in fun env -> - if (parse_options env).esproposal_decorators - then List.rev (decorator_list_helper env []) - else [] - - let key ?(class_body=false) env = - let open Ast.Expression.Object.Property in - match Peek.token env with - | T_STRING (loc, value, raw, octal) -> - if octal then strict_error env Error.StrictOctalLiteral; + if (parse_options env).esproposal_decorators then + List.rev (decorator_list_helper env []) + else + [] + + let key ?(class_body = false) env = + Ast.Expression.Object.Property.( + let leading = Peek.comments env in + let tkn = Peek.token env in + let trailing = Peek.comments env in + match tkn with + | T_STRING (loc, value, raw, octal) -> + if octal then strict_error env Parse_error.StrictOctalLiteral; Expect.token env (T_STRING (loc, value, raw, octal)); let value = Literal.String value in - loc, Literal (loc, { Literal.value; raw; }) - | T_NUMBER { kind; raw } -> + ( loc, + Literal + ( loc, + { + Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + } ) ) + | T_NUMBER { kind; raw } -> let loc = Peek.loc env in let value = Expression.number env kind raw in let value = Literal.Number value in - loc, Literal (loc, { Literal.value; raw; }) - | T_LBRACKET -> - let start_loc = Peek.loc env in - Expect.token env T_LBRACKET; - let expr = Parse.assignment (env |> with_no_in false) in - let end_loc = Peek.loc env in - Expect.token env T_RBRACKET; - Loc.btwn start_loc end_loc, Ast.Expression.Object.Property.Computed expr - | T_POUND when class_body -> - let loc, id, _is_private = Expression.property_name_include_private env in - add_declared_private env (snd id); - loc, PrivateName (loc, id) - | _ -> - let loc, id, is_private = Expression.property_name_include_private env in + ( loc, + Literal + ( loc, + { + Literal.value; + raw; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + } ) ) + | T_LBRACKET -> + with_loc + (fun env -> + Expect.token env T_LBRACKET; + let expr = Parse.assignment (env |> with_no_in false) in + Expect.token env T_RBRACKET; + Ast.Expression.Object.Property.Computed expr) + env + | T_POUND when class_body -> + let (loc, id, _is_private) = Expression.property_name_include_private env in + add_declared_private env (Flow_ast_utils.name_of_ident id); + (loc, PrivateName (loc, id)) + | _ -> + let (loc, id, is_private) = Expression.property_name_include_private env in if is_private then error_at env (loc, Parse_error.PrivateNotInClass); - loc, Identifier id + (loc, Identifier id)) - let getter_or_setter env is_getter = + let getter_or_setter env ~in_class_body is_getter = (* this is a getter or setter, it cannot be async *) let async = false in let generator = Declaration.generator env in - let key_loc, key = key env in - let start_loc = Peek.loc env in - - (* #sec-function-definitions-static-semantics-early-errors *) - let env = env |> with_allow_super Super_prop in - - (* It's not clear how type params on getters & setters would make sense - * in Flow's type system. Since this is a Flow syntax extension, we might - * as well disallow it until we need it *) - let tparams = None in - let params = Declaration.function_params ~await:false ~yield:false env in - begin match is_getter, params with - | true, (_, { Ast.Function.Params.params = []; rest = None }) -> () - | false, (_, { Ast.Function.Params.rest = Some _; _ }) -> - (* rest params don't make sense on a setter *) - error_at env (key_loc, Error.SetterArity) - | false, (_, { Ast.Function.Params.params = [_]; _ }) -> () - | true, _ -> error_at env (key_loc, Error.GetterArity) - | false, _ -> error_at env (key_loc, Error.SetterArity) - end; - let return = match Type.annotation_opt env with - | Some annotation -> Ast.Function.Available annotation - | None -> Ast.Function.Missing (Peek.loc_skip_lookeahead env) + let (key_loc, key) = key ~class_body:in_class_body env in + let value = + with_loc + (fun env -> + (* #sec-function-definitions-static-semantics-early-errors *) + let env = env |> with_allow_super Super_prop in + let (sig_loc, (tparams, params, return)) = + with_loc + (fun env -> + (* It's not clear how type params on getters & setters would make sense + * in Flow's type system. Since this is a Flow syntax extension, we might + * as well disallow it until we need it *) + let tparams = None in + let params = Declaration.function_params ~await:false ~yield:false env in + begin + match (is_getter, params) with + | (true, (_, { Ast.Function.Params.params = []; rest = None })) -> () + | (false, (_, { Ast.Function.Params.rest = Some _; _ })) -> + (* rest params don't make sense on a setter *) + error_at env (key_loc, Parse_error.SetterArity) + | (false, (_, { Ast.Function.Params.params = [_]; _ })) -> () + | (true, _) -> error_at env (key_loc, Parse_error.GetterArity) + | (false, _) -> error_at env (key_loc, Parse_error.SetterArity) + end; + let return = Type.annotation_opt env in + (tparams, params, return)) + env + in + let (body, strict) = Declaration.function_body env ~async ~generator in + let simple = Declaration.is_simple_function_params params in + Declaration.strict_post_check env ~strict ~simple None params; + { + Function.id = None; + params; + body; + generator; + async; + predicate = None; + (* setters/getter are not predicates *) + return; + tparams; + sig_loc; + }) + env in - let _, body, strict = Declaration.function_body env ~async ~generator in - let simple = Declaration.is_simple_function_params params in - Declaration.strict_post_check env ~strict ~simple None params; - let end_loc, expression = Function.( - match body with - | BodyBlock (loc, _) -> loc, false - | BodyExpression (loc, _) -> loc, true) in - let loc = Loc.btwn start_loc end_loc in - let value = loc, Function.({ - id = None; - params; - body; - generator; - async; - predicate = None; (* setters/getter are not predicates *) - expression; - return; - tparams; - }) in - key, value + (key, value) let _initializer = let parse_assignment_cover env = match Expression.assignment_cover env with - | Cover_expr expr -> expr, Pattern_cover.empty_errors - | Cover_patt (expr, errs) -> expr, errs + | Cover_expr expr -> (expr, Pattern_cover.empty_errors) + | Cover_patt (expr, errs) -> (expr, errs) in - let rec property env = - let open Ast.Expression.Object in - let start_loc = Peek.loc env in - if Peek.token env = T_ELLIPSIS - then begin - (* Spread property *) - Expect.token env T_ELLIPSIS; - let argument, errs = parse_assignment_cover env in - SpreadProperty (Loc.btwn start_loc (fst argument), SpreadProperty.({ - argument; - })), errs - end else begin - let async = match Peek.ith_token ~i:1 env with - | T_ASSIGN (* { async = true } (destructuring) *) - | T_COLON (* { async: true } *) - | T_LESS_THAN (* { async() {} } *) - | T_LPAREN (* { async() {} } *) - | T_COMMA (* { async, other, shorthand } *) - | T_RCURLY (* { async } *) - -> false - | _ - -> Declaration.async env + let get env start_loc = + let (loc, (key, value)) = + with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:false true) env + in + Ast.Expression.Object.(Property (loc, Property.Get { key; value })) + in + let set env start_loc = + let (loc, (key, value)) = + with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:false false) env + in + Ast.Expression.Object.(Property (loc, Property.Set { key; value })) + in + (* #prod-PropertyDefinition *) + let init = + Ast.Expression.Object.Property.( + (* #prod-IdentifierReference *) + let parse_shorthand env key = + match key with + | Literal (loc, lit) -> + error_at env (loc, Parse_error.LiteralShorthandProperty); + (loc, Ast.Expression.Literal lit) + | Identifier ((loc, { Identifier.name; comments = _ }) as id) -> + (* #sec-identifiers-static-semantics-early-errors *) + if is_reserved name && name <> "yield" && name <> "await" then + (* it is a syntax error if `name` is a reserved word other than await or yield *) + error_at env (loc, Parse_error.UnexpectedReserved) + else if is_strict_reserved name then + (* it is a syntax error if `name` is a strict reserved word, in strict mode *) + strict_error_at env (loc, Parse_error.StrictReservedWord); + (loc, Ast.Expression.Identifier id) + | PrivateName _ -> failwith "Internal Error: private name found in object props" + | Computed expr -> + error_at env (fst expr, Parse_error.ComputedShorthandProperty); + expr in - let generator = Declaration.generator env in - match async, generator, Peek.token env with - | false, false, T_IDENTIFIER { raw = "get"; _ } -> - let _, key = key env in - begin match Peek.token env with - | T_ASSIGN - | T_COLON + (* #prod-MethodDefinition *) + let parse_method ~async ~generator = + with_loc (fun env -> + (* #sec-function-definitions-static-semantics-early-errors *) + let env = env |> with_allow_super Super_prop in + let (sig_loc, (tparams, params, return)) = + with_loc + (fun env -> + let tparams = Type.type_parameter_declaration env in + let params = + let (yield, await) = + match (async, generator) with + | (true, true) -> (true, true) + (* proposal-async-iteration/#prod-AsyncGeneratorMethod *) + | (true, false) -> (false, allow_await env) (* #prod-AsyncMethod *) + | (false, true) -> (true, false) (* #prod-GeneratorMethod *) + | (false, false) -> (false, false) + (* #prod-MethodDefinition *) + in + Declaration.function_params ~await ~yield env + in + let return = Type.annotation_opt env in + (tparams, params, return)) + env + in + let (body, strict) = Declaration.function_body env ~async ~generator in + let simple = Declaration.is_simple_function_params params in + Declaration.strict_post_check env ~strict ~simple None params; + { + Function.id = None; + params; + body; + generator; + async; + (* TODO: add support for object method predicates *) + predicate = None; + return; + tparams; + sig_loc; + }) + in + (* PropertyName `:` AssignmentExpression *) + let parse_value env = + Expect.token env T_COLON; + parse_assignment_cover env + in + (* #prod-CoverInitializedName *) + let parse_assignment_pattern ~key env = + Ast.Expression.Object.( + match key with + | Property.Identifier id -> + let assignment_loc = Peek.loc env in + let ast = + with_loc + ~start_loc:(fst id) + (fun env -> + Expect.token env T_ASSIGN; + let left = + Parse.pattern_from_expr env (fst id, Ast.Expression.Identifier id) + in + let right = Parse.assignment env in + Ast.Expression.Assignment + { Ast.Expression.Assignment.operator = None; left; right }) + env + in + let errs = + { + if_expr = + [(assignment_loc, Parse_error.Unexpected (Token.quote_token_value "="))]; + if_patt = []; + } + in + (ast, errs) + | Property.Literal _ + | Property.PrivateName _ + | Property.Computed _ -> + parse_value env) + in + let parse_init ~key ~async ~generator env = + if async || generator then + (* the `async` and `*` modifiers are only valid on methods *) + let value = parse_method env ~async ~generator in + let prop = Method { key; value } in + (prop, Pattern_cover.empty_errors) + else + match Peek.token env with + | T_RCURLY + | T_COMMA -> + let value = parse_shorthand env key in + let prop = Init { key; value; shorthand = true } in + (prop, Pattern_cover.empty_errors) | T_LESS_THAN - | T_LPAREN - | T_COMMA - | T_RCURLY -> init env start_loc key false false - | _ -> get env start_loc, Pattern_cover.empty_errors - end - | false, false, T_IDENTIFIER { raw = "set"; _ } -> - let _, key = key env in - begin match Peek.token env with + | T_LPAREN -> + let value = parse_method env ~async ~generator in + let prop = Method { key; value } in + (prop, Pattern_cover.empty_errors) + | T_ASSIGN -> + let (value, errs) = parse_assignment_pattern ~key env in + let prop = Init { key; value; shorthand = true } in + (prop, errs) + | _ -> + let (value, errs) = parse_value env in + let prop = Init { key; value; shorthand = false } in + (prop, errs) + in + fun env start_loc key async generator -> + let (loc, (prop, errs)) = with_loc ~start_loc (parse_init ~key ~async ~generator) env in + (Ast.Expression.Object.Property (loc, prop), errs)) + in + let property env = + Ast.Expression.Object.( + if Peek.token env = T_ELLIPSIS then + (* Spread property *) + let (loc, (argument, errs)) = + with_loc + (fun env -> + Expect.token env T_ELLIPSIS; + parse_assignment_cover env) + env + in + (SpreadProperty (loc, { SpreadProperty.argument }), errs) + else + let start_loc = Peek.loc env in + let async = + match Peek.ith_token ~i:1 env with | T_ASSIGN + (* { async = true } (destructuring) *) + | T_COLON + (* { async: true } *) + | T_LESS_THAN + (* { async() {} } *) + | T_LPAREN + (* { async() {} } *) + | T_COMMA - | T_RCURLY -> init env start_loc key false false - | _ -> set env start_loc, Pattern_cover.empty_errors + (* { async, other, shorthand } *) + + | T_RCURLY (* { async } *) -> + false + | _ -> Declaration.async env + in + let generator = Declaration.generator env in + match (async, generator, Peek.token env) with + | (false, false, T_IDENTIFIER { raw = "get"; _ }) -> + let (_, key) = key env in + begin + match Peek.token env with + | T_ASSIGN + | T_COLON + | T_LESS_THAN + | T_LPAREN + | T_COMMA + | T_RCURLY -> + init env start_loc key false false + | _ -> (get env start_loc, Pattern_cover.empty_errors) end - | async, generator, _ -> - let _, key = key env in - init env start_loc key async generator - end - - and get env start_loc = - let key, (end_loc, fn) = getter_or_setter env true in - let loc = Loc.btwn start_loc end_loc in - Ast.Expression.Object.(Property (loc, Property.Get { - key; - value = (end_loc, fn); - })) - - and set env start_loc = - let key, (end_loc, fn) = getter_or_setter env false in - let loc = Loc.btwn start_loc end_loc in - Ast.Expression.Object.(Property (loc, Property.Set { - key; - value = (end_loc, fn); - })) - - (* #prod-PropertyDefinition *) - and init = - let open Ast.Expression.Object.Property in - - (* #prod-IdentifierReference *) - let parse_shorthand env key = match key with - | Literal (loc, lit) -> - error_at env (loc, Parse_error.LiteralShorthandProperty); - loc, Ast.Expression.Literal lit - | Identifier ((loc, name) as id) -> - (* #sec-identifiers-static-semantics-early-errors *) + | (false, false, T_IDENTIFIER { raw = "set"; _ }) -> + let (_, key) = key env in begin - if is_reserved name && name <> "yield" && name <> "await" then - (* it is a syntax error if `name` is a reserved word other than await or yield *) - error_at env (loc, Parse_error.UnexpectedReserved) - else if is_strict_reserved name then - (* it is a syntax error if `name` is a strict reserved word, in strict mode *) - strict_error_at env (loc, Parse_error.StrictReservedWord) - end; - loc, Ast.Expression.Identifier id - | PrivateName _ -> failwith "Internal Error: private name found in object props" - | Computed expr -> - error_at env (fst expr, Parse_error.ComputedShorthandProperty); - expr - in - - (* #prod-MethodDefinition *) - let parse_method env ~async ~generator = - let start_loc = Peek.loc env in - - (* #sec-function-definitions-static-semantics-early-errors *) - let env = env |> with_allow_super Super_prop in - - let tparams = Type.type_parameter_declaration env in - let params = - let yield, await = match async, generator with - | true, true -> true, true (* proposal-async-iteration/#prod-AsyncGeneratorMethod *) - | true, false -> false, allow_await env (* #prod-AsyncMethod *) - | false, true -> true, false (* #prod-GeneratorMethod *) - | false, false -> false, false (* #prod-MethodDefinition *) - in - Declaration.function_params ~await ~yield env - in - let return = match Type.annotation_opt env with - | Some annotation -> Ast.Function.Available annotation - | None -> Ast.Function.Missing (Peek.loc_skip_lookeahead env) - in - let _, body, strict = - Declaration.function_body env ~async ~generator in - let simple = Declaration.is_simple_function_params params in - Declaration.strict_post_check env ~strict ~simple None params; - let end_loc, expression = match body with - | Function.BodyBlock (loc, _) -> loc, false - | Function.BodyExpression (loc, _) -> loc, true - in - let loc = Loc.btwn start_loc end_loc in - loc, Function.({ - id = None; - params; - body; - generator; - async; - (* TODO: add support for object method predicates *) - predicate = None; - expression; - return; - tparams; - }) - in - - (* PropertyName `:` AssignmentExpression *) - let parse_value env = - Expect.token env T_COLON; - parse_assignment_cover env - in - - (* #prod-CoverInitializedName *) - let parse_assignment_pattern ~key env = - let open Ast.Expression.Object in - match key with - | Property.Identifier id -> - let assignment_loc = Peek.loc env in - Expect.token env T_ASSIGN; - let left = Parse.pattern_from_expr env (fst id, Ast.Expression.Identifier id) in - let right = Parse.assignment env in - let loc = Loc.btwn (fst left) (fst right) in - (loc, Ast.Expression.(Assignment Assignment.({ - operator = Assign; - left; - right; - }))), { - if_expr = [assignment_loc, Parse_error.UnexpectedToken "="]; - if_patt = []; - } - - | Property.Literal _ - | Property.PrivateName _ - | Property.Computed _ -> - parse_value env - in - - let parse_init ~key ~async ~generator env = - if async || generator then - (* the `async` and `*` modifiers are only valid on methods *) - let value = parse_method env ~async ~generator in - let prop = Method { key; value } in - prop, Pattern_cover.empty_errors - else match Peek.token env with - | T_RCURLY - | T_COMMA -> - let value = parse_shorthand env key in - let prop = Init { key; value; shorthand = true } in - prop, Pattern_cover.empty_errors - | T_LESS_THAN - | T_LPAREN -> - let value = parse_method env ~async ~generator in - let prop = Method { key; value } in - prop, Pattern_cover.empty_errors - | T_ASSIGN -> - let value, errs = parse_assignment_pattern ~key env in - let prop = Init { key; value; shorthand = true } in - prop, errs - | _ -> - let value, errs = parse_value env in - let prop = Init { key; value; shorthand = false } in - prop, errs - in - fun env start_loc key async generator -> - let end_loc, (prop, errs) = with_loc ( - parse_init ~key ~async ~generator - ) env in - Ast.Expression.Object.Property (Loc.btwn start_loc end_loc, prop), errs - - and properties env ~rest_trailing_comma (props, errs) = + match Peek.token env with + | T_ASSIGN + | T_COLON + | T_LESS_THAN + | T_LPAREN + | T_COMMA + | T_RCURLY -> + init env start_loc key false false + | _ -> (set env start_loc, Pattern_cover.empty_errors) + end + | (async, generator, _) -> + let (_, key) = key env in + init env start_loc key async generator) + in + let rec properties env ~rest_trailing_comma (props, errs) = match Peek.token env with | T_EOF | T_RCURLY -> - let errs = match rest_trailing_comma with - | Some loc -> - { errs with if_patt = (loc, Parse_error.TrailingCommaAfterRestElement)::errs.if_patt } - | None -> errs in - List.rev props, Pattern_cover.rev_errors errs + let errs = + match rest_trailing_comma with + | Some loc -> + { + errs with + if_patt = (loc, Parse_error.TrailingCommaAfterRestElement) :: errs.if_patt; + } + | None -> errs + in + (List.rev props, Pattern_cover.rev_errors errs) | _ -> - let prop, new_errs = property env in - let rest_trailing_comma = match prop with + let (prop, new_errs) = property env in + let rest_trailing_comma = + match prop with | Ast.Expression.Object.SpreadProperty _ when Peek.token env = T_COMMA -> Some (Peek.loc env) - | _ -> None in - if Peek.token env <> T_RCURLY then Expect.token env T_COMMA; - let errs = Pattern_cover.rev_append_errors new_errs errs in - properties env ~rest_trailing_comma (prop::props, errs) - - in fun env -> - let loc, (expr, errs) = with_loc (fun env -> - Expect.token env T_LCURLY; - let props, errs = - properties env ~rest_trailing_comma:None ([], Pattern_cover.empty_errors) in - Expect.token env T_RCURLY; - { Ast.Expression.Object.properties = props; }, errs - ) env in - loc, expr, errs + | _ -> None + in + if Peek.token env <> T_RCURLY then Expect.token env T_COMMA; + let errs = Pattern_cover.rev_append_errors new_errs errs in + properties env ~rest_trailing_comma (prop :: props, errs) + in + fun env -> + let (loc, (expr, errs)) = + with_loc + (fun env -> + let leading = Peek.comments env in + Expect.token env T_LCURLY; + let (props, errs) = + properties env ~rest_trailing_comma:None ([], Pattern_cover.empty_errors) + in + Expect.token env T_RCURLY; + let trailing = Peek.comments env in + ( { + Ast.Expression.Object.properties = props; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }, + errs )) + env + in + (loc, expr, errs) let rec class_implements env acc = - let id = Type.type_identifier env in - let targs = Type.type_parameter_instantiation env in - let loc = match targs with - | None -> fst id - | Some (loc, _) -> Loc.btwn (fst id) loc in - let implement = loc, Ast.Class.Implements.({ - id; - targs; - }) in - let acc = implement::acc in + let implement = + with_loc + (fun env -> + let id = Type.type_identifier env in + let targs = Type.type_parameter_instantiation env in + { Ast.Class.Implements.id; targs }) + env + in + let acc = implement :: acc in match Peek.token env with | T_COMMA -> - Expect.token env T_COMMA; - class_implements env acc + Expect.token env T_COMMA; + class_implements env acc | _ -> List.rev acc - let class_extends = with_loc (fun env -> - let expr = Expression.left_hand_side (env |> with_allow_yield false) in - let targs = Type.type_parameter_instantiation env in - { Class.Extends.expr; targs } - ) + let class_extends = + with_loc (fun env -> + let expr = Expression.left_hand_side (env |> with_allow_yield false) in + let targs = Type.type_parameter_instantiation env in + { Class.Extends.expr; targs }) let rec _class env = let extends = - if Expect.maybe env T_EXTENDS - then Some (class_extends env) - else None in + if Expect.maybe env T_EXTENDS then + Some (class_extends env) + else + None + in let implements = - if Peek.token env = T_IMPLEMENTS - then begin - if not (should_parse_types env) - then error env Error.UnexpectedTypeInterface; + if Peek.token env = T_IMPLEMENTS then ( + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeInterface; Expect.token env T_IMPLEMENTS; class_implements env [] - end else [] in + ) else + [] + in let body = class_body env in - body, extends, implements + (body, extends, implements) + + and check_property_name env loc name static = + if String.equal name "constructor" || (String.equal name "prototype" && static) then + error_at env (loc, Parse_error.InvalidFieldName { name; static; private_ = false }) + + and check_private_names env seen_names private_name (kind : [ `Field | `Getter | `Setter ]) = + let (loc, (_, { Identifier.name; comments = _ })) = private_name in + if String.equal name "constructor" then + let () = + error_at env (loc, Parse_error.InvalidFieldName { name; static = false; private_ = true }) + in + seen_names + else + match SMap.find_opt name seen_names with + | Some seen -> + begin + match (kind, seen) with + | (`Getter, `Setter) + | (`Setter, `Getter) -> + (* one getter and one setter are allowed as long as it's not used as a field *) + () + | _ -> error_at env (loc, Parse_error.DuplicatePrivateFields name) + end; + SMap.add name `Field seen_names + | None -> SMap.add name kind seen_names and class_body = let rec elements env seen_constructor private_names acc = match Peek.token env with | T_EOF - | T_RCURLY -> List.rev acc + | T_RCURLY -> + List.rev acc | T_SEMICOLON -> - (* Skip empty elements *) - Expect.token env T_SEMICOLON; - elements env seen_constructor private_names acc + (* Skip empty elements *) + Expect.token env T_SEMICOLON; + elements env seen_constructor private_names acc | _ -> - let element = class_element env in - let seen_constructor', private_names' = begin match element with + let element = class_element env in + let (seen_constructor', private_names') = + match element with | Ast.Class.Body.Method (loc, m) -> - let open Ast.Class.Method in - begin match m.kind with - | Constructor when not m.static -> - if seen_constructor then - error_at env (loc, Error.DuplicateConstructor); - (true, private_names) - | Method -> - (seen_constructor, begin match m.key with - | Ast.Expression.Object.Property.PrivateName _ -> - error_at env (loc, Error.PrivateMethod); - private_names - | _ -> private_names - end) - | _ -> (seen_constructor, private_names) - end - | Ast.Class.Body.Property (loc, p) -> - let open Ast.Expression.Object.Property in - (seen_constructor, begin match p.Ast.Class.Property.key with - | Identifier (_, x) when String.equal x "constructor" || - (String.equal x "prototype" && p.Ast.Class.Property.static) -> - error_at env (loc, Error.InvalidFieldName (x, String.equal x "prototype", false)); - private_names - | _ -> private_names + Ast.Class.Method.( + begin + match m.kind with + | Constructor -> + if m.static then + (seen_constructor, private_names) + else ( + if seen_constructor then error_at env (loc, Parse_error.DuplicateConstructor); + (true, private_names) + ) + | Method -> + ( seen_constructor, + begin + match m.key with + | Ast.Expression.Object.Property.PrivateName _ -> + error_at env (loc, Parse_error.PrivateMethod); + private_names + | _ -> private_names + end ) + | Get -> + let private_names = + match m.key with + | Ast.Expression.Object.Property.PrivateName name -> + check_private_names env private_names name `Getter + | _ -> private_names + in + (seen_constructor, private_names) + | Set -> + let private_names = + match m.key with + | Ast.Expression.Object.Property.PrivateName name -> + check_private_names env private_names name `Setter + | _ -> private_names + in + (seen_constructor, private_names) end) - | Ast.Class.Body.PrivateField (_, {Ast.Class.PrivateField.key = (loc, (_, name)); _}) - when String.equal name "#constructor" -> - error_at env (loc, Error.InvalidFieldName (name, false, true)); - (seen_constructor, private_names) - | Ast.Class.Body.PrivateField (_, {Ast.Class.PrivateField.key = (loc, (_, name)); _}) -> - if SSet.mem name private_names then - error_at env (loc, Error.DuplicatePrivateFields name); - (seen_constructor, SSet.add name private_names) - end in - elements env seen_constructor' private_names' (element::acc) - - in fun env -> - let start_loc = Peek.loc env in - Expect.token env T_LCURLY; - enter_class env; - let body = elements env false SSet.empty [] in - exit_class env; - let end_loc = Peek.loc env in - Expect.token env T_RCURLY; - Loc.btwn start_loc end_loc, Ast.Class.Body.({ - body; - }) + | Ast.Class.Body.Property (_, { Ast.Class.Property.key; static; _ }) -> + Ast.Expression.Object.Property.( + begin + match key with + | Identifier (loc, { Identifier.name; comments = _ }) + | Literal (loc, { Literal.value = Literal.String name; _ }) -> + check_property_name env loc name static + | Literal _ + | Computed _ -> + () + | PrivateName _ -> + failwith "unexpected PrivateName in Property, expected a PrivateField" + end; + (seen_constructor, private_names)) + | Ast.Class.Body.PrivateField (_, { Ast.Class.PrivateField.key; _ }) -> + let private_names = check_private_names env private_names key `Field in + (seen_constructor, private_names) + in + elements env seen_constructor' private_names' (element :: acc) + in + fun env -> + with_loc + (fun env -> + Expect.token env T_LCURLY; + enter_class env; + let body = elements env false SMap.empty [] in + exit_class env; + Expect.token env T_RCURLY; + { Ast.Class.Body.body }) + env (* In the ES6 draft, all elements are methods. No properties (though there * are getter and setters allowed *) and class_element = let get env start_loc decorators static = - let key, (end_loc, _ as value) = - getter_or_setter env true in - Ast.Class.(Body.Method (Loc.btwn start_loc end_loc, Method.({ - key; - value; - kind = Get; - static; - decorators; - }))) - - in let set env start_loc decorators static = - let key, (end_loc, _ as value) = - getter_or_setter env false in - Ast.Class.(Body.Method (Loc.btwn start_loc end_loc, Method.({ - key; - value; - kind = Set; - static; - decorators; - }))) - - in let error_unsupported_variance env = function - | Some (loc, _) -> error_at env (loc, Error.UnexpectedVariance) - | None -> () - - in let rec init env start_loc decorators key async generator static variance = + let (loc, (key, value)) = + with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:true true) env + in + Ast.Class.(Body.Method (loc, { Method.key; value; kind = Method.Get; static; decorators })) + in + let set env start_loc decorators static = + let (loc, (key, value)) = + with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:true false) env + in + Ast.Class.(Body.Method (loc, { Method.key; value; kind = Method.Set; static; decorators })) + in + let error_unsupported_variance env = function + | Some (loc, _) -> error_at env (loc, Parse_error.UnexpectedVariance) + | None -> () + (* Class property with annotation *) + in + let property env start_loc key static variance = + let (loc, (annot, value)) = + with_loc + ~start_loc + (fun env -> + let annot = Type.annotation_opt env in + let options = parse_options env in + let value = + if Peek.token env = T_ASSIGN then + if + (static && options.esproposal_class_static_fields) + || ((not static) && options.esproposal_class_instance_fields) + then ( + Expect.token env T_ASSIGN; + Some (Parse.expression (env |> with_allow_super Super_prop)) + ) else + None + else + None + in + if Expect.maybe env T_SEMICOLON then + () + else if Peek.token env == T_LBRACKET || Peek.token env == T_LPAREN then + error_unexpected env; + (annot, value)) + env + in + match key with + | Ast.Expression.Object.Property.PrivateName private_name -> + Ast.Class.( + Body.PrivateField + (loc, { PrivateField.key = private_name; value; annot; static; variance })) + | _ -> Ast.Class.(Body.Property (loc, { Property.key; value; annot; static; variance })) + in + let rec init env start_loc decorators key async generator static variance = match Peek.token env with | T_COLON | T_ASSIGN - | T_SEMICOLON when not async && not generator -> - (* Class property with annotation *) - let end_loc, (annot, value) = with_loc (fun env -> - let annot = Type.annotation_opt env in - let options = parse_options env in - let value = - if Peek.token env = T_ASSIGN then ( - if static && options.esproposal_class_static_fields - || (not static) && options.esproposal_class_instance_fields - then begin - Expect.token env T_ASSIGN; - Some (Parse.expression (env |> with_allow_super Super_prop)) - end else None - ) else None - in - begin if Expect.maybe env T_SEMICOLON then - () - else if Peek.token env == T_LBRACKET || Peek.token env == T_LPAREN then - error_unexpected env - end; - annot, value - ) env in - let loc = Loc.btwn start_loc end_loc in - begin match key with - | Ast.Expression.Object.Property.PrivateName private_name -> - Ast.Class.(Body.PrivateField (loc, PrivateField.({ - key = private_name; - value; - annot; - static; - variance; - }))) - | _ -> Ast.Class.(Body.Property (loc, Property.({ - key; - value; - annot; - static; - variance; - }))) end + | T_SEMICOLON + | T_RCURLY + when (not async) && not generator -> + property env start_loc key static variance | T_PLING -> (* TODO: add support for optional class properties *) error_unexpected env; Eat.token env; init env start_loc decorators key async generator static variance + | _ when Peek.is_implicit_semicolon env -> + (* an uninitialized, unannotated property *) + property env start_loc key static variance | _ -> error_unsupported_variance env variance; - let kind, env = match static, key with - | false, Ast.Expression.Object.Property.Identifier (_, "constructor") - | false, Ast.Expression.Object.Property.Literal (_, { - Literal.value = Literal.String "constructor"; - _; - }) -> - Ast.Class.Method.Constructor, - env |> with_allow_super Super_prop_or_call - | _ -> - Ast.Class.Method.Method, - env |> with_allow_super Super_prop - in - let func_loc = Peek.loc env in - let tparams = Type.type_parameter_declaration env in - let params = - let yield, await = match async, generator with - | true, true -> true, true (* proposal-async-iteration/#prod-AsyncGeneratorMethod *) - | true, false -> false, allow_await env (* #prod-AsyncMethod *) - | false, true -> true, false (* #prod-GeneratorMethod *) - | false, false -> false, false (* #prod-MethodDefinition *) - in - Declaration.function_params ~await ~yield env + let (kind, env) = + match (static, key) with + | ( false, + Ast.Expression.Object.Property.Identifier + (_, { Identifier.name = "constructor"; comments = _ }) ) + | ( false, + Ast.Expression.Object.Property.Literal + (_, { Literal.value = Literal.String "constructor"; _ }) ) -> + (Ast.Class.Method.Constructor, env |> with_allow_super Super_prop_or_call) + | _ -> (Ast.Class.Method.Method, env |> with_allow_super Super_prop) in - let return = match Type.annotation_opt env with - | Some annotation -> Ast.Function.Available annotation - | None -> Ast.Function.Missing (Peek.loc_skip_lookeahead env) + let value = + with_loc + (fun env -> + let (sig_loc, (tparams, params, return)) = + with_loc + (fun env -> + let tparams = Type.type_parameter_declaration env in + let params = + let (yield, await) = + match (async, generator) with + | (true, true) -> (true, true) + (* proposal-async-iteration/#prod-AsyncGeneratorMethod *) + | (true, false) -> (false, allow_await env) (* #prod-AsyncMethod *) + | (false, true) -> (true, false) (* #prod-GeneratorMethod *) + | (false, false) -> (false, false) + (* #prod-MethodDefinition *) + in + Declaration.function_params ~await ~yield env + in + let return = Type.annotation_opt env in + (tparams, params, return)) + env + in + let (body, strict) = Declaration.function_body env ~async ~generator in + let simple = Declaration.is_simple_function_params params in + Declaration.strict_post_check env ~strict ~simple None params; + { + Function.id = None; + params; + body; + generator; + async; + (* TODO: add support for method predicates *) + predicate = None; + return; + tparams; + sig_loc; + }) + env in - let _, body, strict = - Declaration.function_body env ~async ~generator in - let simple = Declaration.is_simple_function_params params in - Declaration.strict_post_check env ~strict ~simple None params; - let end_loc, expression = Function.( - match body with - | BodyBlock (loc, _) -> loc, false - | BodyExpression (loc, _) -> loc, true) in - let loc = Loc.btwn func_loc end_loc in - let value = loc, Function.({ - id = None; - params; - body; - generator; - async; - (* TODO: add support for method predicates *) - predicate = None; - expression; - return; - tparams; - }) in - Ast.Class.(Body.Method (Loc.btwn start_loc end_loc, Method.({ - key; - value; - kind; - static; - decorators; - }))) - - in fun env -> + Ast.Class.( + Body.Method + (Loc.btwn start_loc (fst value), { Method.key; value; kind; static; decorators })) + in + let ith_implies_identifier ~i env = + match Peek.ith_token ~i env with + | T_LESS_THAN + | T_COLON + | T_ASSIGN + | T_SEMICOLON + | T_LPAREN + | T_RCURLY -> + true + | _ -> false + in + let implies_identifier = ith_implies_identifier ~i:0 in + fun env -> let start_loc = Peek.loc env in let decorators = decorator_list env in let static = - Peek.ith_token ~i:1 env <> T_LPAREN && - Peek.ith_token ~i:1 env <> T_LESS_THAN && - Expect.maybe env T_STATIC in + Peek.ith_token ~i:1 env <> T_LPAREN + && Peek.ith_token ~i:1 env <> T_LESS_THAN + && Expect.maybe env T_STATIC + in let async = - Peek.ith_token ~i:1 env <> T_LPAREN && - Peek.ith_token ~i:1 env <> T_COLON && - Declaration.async env in + Peek.token env = T_ASYNC + && (not (ith_implies_identifier ~i:1 env)) + && not (Peek.ith_is_line_terminator ~i:1 env) + in + if async then Eat.token env; + + (* consume `async` *) let generator = Declaration.generator env in let variance = Declaration.variance env async generator in - let generator = match generator, variance with - | false, Some _ -> Declaration.generator env - | _ -> generator + let generator = + match (generator, variance) with + | (false, Some _) -> Declaration.generator env + | _ -> generator in - match async, generator, Peek.token env with - | false, false, T_IDENTIFIER { raw = "get"; _ } -> - let _, key = key ~class_body:true env in - (match Peek.token env with - | T_LESS_THAN - | T_COLON - | T_ASSIGN - | T_SEMICOLON - | T_LPAREN -> - init env start_loc decorators key async generator static variance - | _ -> - error_unsupported_variance env variance; - get env start_loc decorators static) - | false, false, T_IDENTIFIER { raw = "set"; _ } -> - let _, key = key ~class_body:true env in - (match Peek.token env with - | T_LESS_THAN - | T_COLON - | T_ASSIGN - | T_SEMICOLON - | T_LPAREN -> - init env start_loc decorators key async generator static variance - | _ -> - error_unsupported_variance env variance; - set env start_loc decorators static) - | _, _, _ -> - let _, key = key ~class_body:true env in + match (async, generator, Peek.token env) with + | (false, false, T_IDENTIFIER { raw = "get"; _ }) -> + let (_, key) = key ~class_body:true env in + if implies_identifier env then + init env start_loc decorators key async generator static variance + else ( + error_unsupported_variance env variance; + get env start_loc decorators static + ) + | (false, false, T_IDENTIFIER { raw = "set"; _ }) -> + let (_, key) = key ~class_body:true env in + if implies_identifier env then init env start_loc decorators key async generator static variance + else ( + error_unsupported_variance env variance; + set env start_loc decorators static + ) + | (_, _, _) -> + let (_, key) = key ~class_body:true env in + init env start_loc decorators key async generator static variance let class_declaration env decorators = - (* 10.2.1 says all parts of a class definition are strict *) - let env = env |> with_strict true in - let start_loc = Peek.loc env in - let decorators = decorators @ (decorator_list env) in - Expect.token env T_CLASS; - let tmp_env = env |> with_no_let true in - let id = ( - match in_export env, Peek.is_identifier tmp_env with - | true, false -> None - | _ -> Some(Parse.identifier tmp_env) - ) in - let tparams = Type.type_parameter_declaration_with_defaults env in - let body, extends, implements = _class env in - let loc = Loc.btwn start_loc (fst body) in - loc, Ast.Statement.(ClassDeclaration Class.({ - id; - body; - tparams; - extends; - implements; - classDecorators=decorators; - })) - - let class_expression = with_loc (fun env -> - (* 10.2.1 says all parts of a class expression are strict *) - let env = env |> with_strict true in - let decorators = decorator_list env in - Expect.token env T_CLASS; - let id, tparams = match Peek.token env with - | T_EXTENDS - | T_LESS_THAN - | T_LCURLY -> None, None - | _ -> - let id = Some (Parse.identifier env) in - let tparams = Type.type_parameter_declaration_with_defaults env in - id, tparams in - let body, extends, implements = _class env in - Ast.Expression.Class { Class. - id; - body; - tparams; - extends; - implements; - classDecorators=decorators; - } - ) + with_loc + (fun env -> + (* 10.2.1 says all parts of a class definition are strict *) + let env = env |> with_strict true in + let decorators = decorators @ decorator_list env in + Expect.token env T_CLASS; + let tmp_env = env |> with_no_let true in + let id = + match (in_export env, Peek.is_identifier tmp_env) with + | (true, false) -> None + | _ -> Some (Parse.identifier tmp_env) + in + let tparams = Type.type_parameter_declaration env in + let (body, extends, implements) = _class env in + Ast.Statement.ClassDeclaration + { Class.id; body; tparams; extends; implements; classDecorators = decorators }) + env + + let class_expression = + with_loc (fun env -> + (* 10.2.1 says all parts of a class expression are strict *) + let env = env |> with_strict true in + let decorators = decorator_list env in + Expect.token env T_CLASS; + let (id, tparams) = + match Peek.token env with + | T_EXTENDS + | T_IMPLEMENTS + | T_LESS_THAN + | T_LCURLY -> + (None, None) + | _ -> + let id = Some (Parse.identifier env) in + let tparams = Type.type_parameter_declaration env in + (id, tparams) + in + let (body, extends, implements) = _class env in + Ast.Expression.Class + { Class.id; body; tparams; extends; implements; classDecorators = decorators }) end diff --git a/src/parser/offset_utils.ml b/src/parser/offset_utils.ml new file mode 100644 index 00000000000..6e6fbc42e8d --- /dev/null +++ b/src/parser/offset_utils.ml @@ -0,0 +1,119 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* table from 0-based line number and 0-based column number to the offset at that point *) +type t = int array array + +(* Classify each codepoint. We care about how many bytes each codepoint takes, in order to + compute offsets in terms of bytes instead of codepoints. We also care about various kinds of + newlines. To reduce memory, it is important that this is a basic variant with no parameters + (so, don't make it `Chars of int`). *) +type kind = + | Chars_1 + | Chars_2 + | Chars_3 + | Chars_4 + | Cr + | Nl + | Ls + +let size_of_kind = function + | Chars_1 -> 1 + | Chars_2 -> 2 + | Chars_3 -> 3 + | Chars_4 -> 4 + | Cr -> 1 + | Nl -> 1 + | Ls -> 3 + +let make = + (* Using Wtf8 allows us to properly track multi-byte characters, so that we increment the column + * by 1 for a multi-byte character, but increment the offset by the number of bytes in the + * character. It also keeps us from incrementing the line number if a multi-byte character happens + * to include e.g. the codepoint for '\n' as a second-fourth byte. *) + let fold_codepoints acc _offset chr = + let kind = + match chr with + | Wtf8.Point code -> + if code >= 0x10000 then + Chars_4 + else if code == 0x2028 || code == 0x2029 then + Ls + else if code >= 0x800 then + Chars_3 + else if code >= 0x80 then + Chars_2 + else if code == 0xA then + Nl + else if code == 0xD then + Cr + else + Chars_1 + | Wtf8.Malformed -> Chars_1 + in + kind :: acc + in + (* Traverses a `kind list`, breaking it up into an `int array array`, where each `int array` + contains the offsets at each character (aka codepoint) of a line. *) + let rec build_table (offset, rev_line, acc) = function + | [] -> Array.of_list (List.rev acc) + | Cr :: Nl :: rest -> + (* https://www.ecma-international.org/ecma-262/5.1/#sec-7.3 says that "\r\n" should be treated + like a single line terminator, even though both '\r' and '\n' are line terminators in their + own right. *) + let line = Array.of_list (List.rev (offset :: rev_line)) in + build_table (offset + 2, [], line :: acc) rest + | ((Cr | Nl | Ls) as kind) :: rest -> + let line = Array.of_list (List.rev (offset :: rev_line)) in + build_table (offset + size_of_kind kind, [], line :: acc) rest + | ((Chars_1 | Chars_2 | Chars_3 | Chars_4) as kind) :: rest -> + build_table (offset + size_of_kind kind, offset :: rev_line, acc) rest + in + fun text -> + let rev_kinds = Wtf8.fold_wtf_8 fold_codepoints [] text in + (* Add a phantom line at the end of the file. Since end positions are reported exclusively, it + * is possible for the lexer to output an end position with a line number one higher than the + * last line, to indicate something such as "the entire last line." For this purpose, we can + * return the offset that is one higher than the last legitimate offset, since it could only be + * correctly used as an exclusive index. *) + let rev_kinds = Nl :: rev_kinds in + build_table (0, [], []) (List.rev rev_kinds) + +exception Offset_lookup_failed of Loc.position * string + +let lookup arr i pos context_string = + try arr.(i) + with Invalid_argument _ -> + let msg = + Printf.sprintf + "Failure while looking up %s. Index: %d. Length: %d." + context_string + i + (Array.length arr) + in + raise (Offset_lookup_failed (pos, msg)) + +let offset table pos = + Loc.( + (* Special-case `Loc.none` so we don't try to look up line -1. *) + if pos.line = 0 && pos.column = 0 then + (* Loc.none sets the offset as 0, so that's what we'll return here. *) + 0 + else + (* lines are 1-indexed, columns are zero-indexed *) + let line_table = lookup table (pos.line - 1) pos "line" in + lookup line_table pos.column pos "column") + +let debug_string table = + let buf = Buffer.create 4096 in + Array.iteri + (fun line_num line -> + Printf.bprintf buf "%6d: " line_num; + Array.iter (fun offset -> Printf.bprintf buf "%8d " offset) line; + Buffer.add_char buf '\n') + table; + Buffer.contents buf diff --git a/src/parser/offset_utils.mli b/src/parser/offset_utils.mli new file mode 100644 index 00000000000..1d6e69e775a --- /dev/null +++ b/src/parser/offset_utils.mli @@ -0,0 +1,47 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* Note on character encodings: + * + * Throughout Flow, we assume that program text uses a UTF-8 encoding. OCaml strings are just a + * sequence of bytes, so any handling of multi-byte characters needs to be done explicitly. + * + * Column numbers in `Loc.position`s are based on the number of characters into a line the position + * appears, not the number of bytes. Single-byte and multi-byte characters are treated the same for + * the purposes of counting columns. + * + * However, offsets are most useful (at least when working with OCaml's string representation) when + * they represent the number of bytes into the text a given position is. Therefore, this utility + * returns such offsets. + * + * In contrast, JavaScript strings must behave as if they have a UTF-16 encoding, and each element + * is a single 16-bit entry. So, each character occupies either one or two elements of a JavaScript + * string. Esprima, for example, returns ranges based on index into a JS string. For example, this + * utility would consider the smiley emoji (code point 0x1f603) to have width 4 (because its UTF-8 + * encoding is 4 8-bit elements), but Esprima would consider it to have width 2 (because its UTF-16 + * encodinng is 2 16-bit elements). + * + * If necessary to improve compatibility with Esprima, this utility could be extended to + * additionally track what the offsets into JS-style strings would be. + *) + +(* A structure that allows for quick computation of offsets when given a Loc.position *) +type t + +(* Create a table for offsets in the given file. Takes O(n) time and returns an object that takes + * O(n) space, where `n` is the size of the given program text. *) +val make : string (* program text *) -> t + +exception Offset_lookup_failed of Loc.position * string + +(* Returns the offset for the given location. This is the offset in bytes (not characters!) into the + * file where the given position can be found. Constant time operation. Raises + * `Offset_lookup_failed` if the given position does not exist in the file contents which were used + * to construct the table. *) +val offset : t -> Loc.position -> int + +val debug_string : t -> string diff --git a/src/parser/opam b/src/parser/opam index cc176bf3a6b..c4619b6d5aa 100644 --- a/src/parser/opam +++ b/src/parser/opam @@ -1,22 +1,26 @@ -opam-version: "1.2" +opam-version: "2.0" name: "flow_parser" -version: "0.82.0" +version: "0.108.0" maintainer: "flow@fb.com" authors: ["Flow Team "] homepage: "https://github.com/facebook/flow/tree/master/src/parser" bug-reports: "https://github.com/facebook/flow/issues" license: "MIT" -build: [ "sh" "-c" "cd src/parser && make ocamlfind-install" ] - -remove: ["ocamlfind" "remove" "flow_parser"] +build: [ make "-C" "src/parser" "build-parser" ] +install: [ make "-C" "src/parser" "ocamlfind-install"] depends: [ + "ocaml" {>= "4.07.1"} "ocamlfind" {build} "ocamlbuild" {build} "ppx_deriving" {build} "ppx_gen_rec" {build} - "sedlex" + "sedlex" {= "1.99.4"} "wtf8" ] -available: [ocaml-version >= "4.03.0"] -dev-repo: "https://github.com/facebook/flow.git" +dev-repo: "git+https://github.com/facebook/flow.git" +synopsis: "The Flow parser is a JavaScript parser written in OCaml" +description: """ +It produces an AST that conforms to ESTree. The Flow Parser can be compiled to native code or can be compiled to JavaScript using js_of_ocaml. + +To find out more about Flow, check out .""" diff --git a/src/parser/parse_error.ml b/src/parser/parse_error.ml index 822911061f9..ec0dd31538a 100644 --- a/src/parser/parse_error.ml +++ b/src/parser/parse_error.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,11 +7,36 @@ type t = | Assertion of string - | UnexpectedToken of string + | EnumBooleanMemberNotInitialized of { + enum_name: string; + member_name: string; + } + | EnumDuplicateMemberName of { + enum_name: string; + member_name: string; + } + | EnumInconsistentMemberValues of { enum_name: string } + | EnumInvalidExplicitType of { + enum_name: string; + supplied_type: string option; + } + | EnumInvalidMemberInitializer of { + enum_name: string; + explicit_type: Enum_common.explicit_type option; + member_name: string; + } + | EnumInvalidMemberName of { + enum_name: string; + member_name: string; + } + | EnumNumberMemberNotInitialized of { + enum_name: string; + member_name: string; + } + | EnumStringMemberInconsistentlyInitailized of { enum_name: string } + | Unexpected of string + | UnexpectedWithExpected of string * string | UnexpectedTokenWithSuggestion of string * string - | UnexpectedNumber - | UnexpectedString - | UnexpectedIdentifier | UnexpectedReserved | UnexpectedReservedType | UnexpectedSuper @@ -27,7 +52,13 @@ type t = | UnexpectedTypeImport | UnexpectedTypeExport | UnexpectedTypeInterface + | UnexpectedSpreadType + | UnexpectedExplicitInexactInObject + | InexactInsideExact + | InexactInsideNonObject | NewlineAfterThrow + | InvalidFloatBigInt + | InvalidSciBigInt | InvalidRegExp | InvalidRegExpFlags of string | UnterminatedRegExp @@ -52,6 +83,7 @@ type t = | StrictParamDupe | StrictFunctionName | StrictOctalLiteral + | StrictNonOctalLiteral | StrictDelete | StrictDuplicateProperty | AccessorDataProperty @@ -84,7 +116,6 @@ type t = | ExportNamelessFunction | UnsupportedDecorator | MissingTypeParamDefault - | WindowsFloatOfString | DuplicateDeclareModuleExports | AmbiguousDeclareModuleKind | GetterArity @@ -96,7 +127,11 @@ type t = | MalformedUnicode | DuplicateConstructor | DuplicatePrivateFields of string - | InvalidFieldName of string * bool * bool + | InvalidFieldName of { + name: string; + static: bool; + private_: bool; + } | PrivateMethod | PrivateDelete | UnboundPrivate of string @@ -114,180 +149,253 @@ type t = | OptionalChainNew | OptionalChainTemplate | NullishCoalescingDisabled + | NullishCoalescingUnexpectedLogical of string + | WhitespaceInPrivateName exception Error of (Loc.t * t) list -let error loc e = - raise (Error [loc, e]) +let error loc e = raise (Error [(loc, e)]) -module PP = - struct - let error = function - | Assertion str -> "Unexpected parser state: "^str - | UnexpectedToken token-> "Unexpected token "^token - | UnexpectedTokenWithSuggestion (token, suggestion) -> - Printf.sprintf "Unexpected token `%s`. Did you mean `%s`?" - token - suggestion - | UnexpectedNumber -> "Unexpected number" - | UnexpectedString -> "Unexpected string" - | UnexpectedIdentifier -> "Unexpected identifier" - | UnexpectedReserved -> "Unexpected reserved word" - | UnexpectedReservedType -> "Unexpected reserved type" - | UnexpectedSuper -> "Unexpected `super` outside of a class method" - | UnexpectedSuperCall -> "`super()` is only valid in a class constructor" - | UnexpectedEOS -> "Unexpected end of input" - | UnexpectedVariance -> "Unexpected variance sigil" - | UnexpectedStatic -> "Unexpected static modifier" - | UnexpectedProto -> "Unexpected proto modifier" - | UnexpectedTypeAlias -> "Type aliases are not allowed in untyped mode" - | UnexpectedOpaqueTypeAlias -> "Opaque type aliases are not allowed in untyped mode" - | UnexpectedTypeAnnotation -> "Type annotations are not allowed in untyped mode" - | UnexpectedTypeDeclaration -> "Type declarations are not allowed in untyped mode" - | UnexpectedTypeImport -> "Type imports are not allowed in untyped mode" - | UnexpectedTypeExport -> "Type exports are not allowed in untyped mode" - | UnexpectedTypeInterface -> "Interfaces are not allowed in untyped mode" - | NewlineAfterThrow -> "Illegal newline after throw" - | InvalidRegExp -> "Invalid regular expression" - | InvalidRegExpFlags flags -> "Invalid flags supplied to RegExp constructor '"^flags^"'" - | UnterminatedRegExp -> "Invalid regular expression: missing /" - | InvalidLHSInAssignment -> "Invalid left-hand side in assignment" - | InvalidLHSInExponentiation -> "Invalid left-hand side in exponentiation expression" - | InvalidLHSInForIn -> "Invalid left-hand side in for-in" - | InvalidLHSInForOf -> "Invalid left-hand side in for-of" - | InvalidImportMetaProperty -> "The only valid meta property for import is import.meta" - | ExpectedPatternFoundExpression -> ( - "Expected an object pattern, array pattern, or an identifier but " ^ - "found an expression instead" - ) - | MultipleDefaultsInSwitch -> "More than one default clause in switch statement" - | NoCatchOrFinally -> "Missing catch or finally after try" - | UnknownLabel label -> "Undefined label '"^label^"'" - | Redeclaration (what, name)-> what^" '"^name^"' has already been declared" - | IllegalContinue -> "Illegal continue statement" - | IllegalBreak -> "Illegal break statement" - | IllegalReturn -> "Illegal return statement" - | IllegalUnicodeEscape -> "Illegal Unicode escape" - | StrictModeWith -> "Strict mode code may not include a with statement" - | StrictCatchVariable -> "Catch variable may not be eval or arguments in strict mode" - | StrictVarName -> "Variable name may not be eval or arguments in strict mode" - | StrictParamName -> "Parameter name eval or arguments is not allowed in strict mode" - | StrictParamDupe -> "Strict mode function may not have duplicate parameter names" - | StrictFunctionName -> "Function name may not be eval or arguments in strict mode" - | StrictOctalLiteral -> "Octal literals are not allowed in strict mode." - | StrictDelete -> "Delete of an unqualified identifier in strict mode." - | StrictDuplicateProperty -> "Duplicate data property in object literal not allowed in strict mode" - | AccessorDataProperty -> "Object literal may not have data and accessor property with the same name" - | AccessorGetSet -> "Object literal may not have multiple get/set accessors with the same name" - | StrictLHSAssignment -> "Assignment to eval or arguments is not allowed in strict mode" - | StrictLHSPostfix -> "Postfix increment/decrement may not have eval or arguments operand in strict mode" - | StrictLHSPrefix -> "Prefix increment/decrement may not have eval or arguments operand in strict mode" - | StrictReservedWord -> "Use of future reserved word in strict mode" - | JSXAttributeValueEmptyExpression -> "JSX attributes must only be assigned a non-empty expression" - | InvalidJSXAttributeValue -> "JSX value should be either an expression or a quoted JSX text" - | ExpectedJSXClosingTag name -> "Expected corresponding JSX closing tag for "^name - | NoUninitializedConst -> "Const must be initialized" - | NoUninitializedDestructuring -> "Destructuring assignment must be initialized" - | NewlineBeforeArrow -> "Illegal newline before arrow" - | FunctionAsStatement { in_strict_mode } -> - if in_strict_mode then - "In strict mode code, functions can only be declared at top level or "^ - "immediately within another function." - else - "In non-strict mode code, functions can only be declared at top level, "^ - "inside a block, or as the body of an if statement." - | AsyncFunctionAsStatement -> "Async functions can only be declared at top level or "^ - "immediately within another function." - | GeneratorFunctionAsStatement -> "Generators can only be declared at top level or "^ - "immediately within another function." - | AdjacentJSXElements -> "Unexpected token <. Remember, adjacent JSX "^ - "elements must be wrapped in an enclosing parent tag" - | ParameterAfterRestParameter -> - "Rest parameter must be final parameter of an argument list" - | ElementAfterRestElement -> - "Rest element must be final element of an array pattern" - | PropertyAfterRestProperty -> - "Rest property must be final property of an object pattern" - | DeclareAsync -> "async is an implementation detail and isn't necessary for your declare function statement. It is sufficient for your declare function to just have a Promise return type." - | DeclareExportLet -> "`declare export let` is not supported. Use \ - `declare export var` instead." - | DeclareExportConst -> "`declare export const` is not supported. Use \ - `declare export var` instead." - | DeclareExportType -> "`declare export type` is not supported. Use \ - `export type` instead." - | DeclareExportInterface -> "`declare export interface` is not supported. Use \ - `export interface` instead." - | UnexpectedExportStarAs -> "`export * as` is an early-stage proposal \ - and is not enabled by default. To enable support in the parser, use \ - the `esproposal_export_star_as` option" - | DuplicateExport export -> (Printf.sprintf "Duplicate export for `%s`" export) - | ExportNamelessClass -> "When exporting a class as a named export, \ - you must specify a class name. Did you mean \ - `export default class ...`?" - | ExportNamelessFunction -> "When exporting a function as a named export, \ - you must specify a function name. Did you mean \ - `export default function ...`?" - | UnsupportedDecorator -> "Found a decorator in an unsupported position." - | MissingTypeParamDefault -> "Type parameter declaration needs a default, \ - since a preceding type parameter declaration has a default." - | WindowsFloatOfString -> "The Windows version of OCaml has a bug in how \ - it parses hexadecimal numbers. It is fixed in OCaml 4.03.0. Until we \ - can switch to 4.03.0, please avoid either hexadecimal notation or \ - Windows." - | DuplicateDeclareModuleExports -> "Duplicate `declare module.exports` \ - statement!" - | AmbiguousDeclareModuleKind -> "Found both `declare module.exports` and \ - `declare export` in the same module. Modules can only have 1 since \ - they are either an ES module xor they are a CommonJS module." - | GetterArity -> "Getter should have zero parameters" - | SetterArity -> "Setter should have exactly one parameter" - | InvalidNonTypeImportInDeclareModule -> - "Imports within a `declare module` body must always be " ^ - "`import type` or `import typeof`!" - | ImportTypeShorthandOnlyInPureImport -> - "The `type` and `typeof` keywords on named imports can only be used on \ - regular `import` statements. It cannot be used with `import type` or \ - `import typeof` statements" - | ImportSpecifierMissingComma -> - "Missing comma between import specifiers" - | ExportSpecifierMissingComma -> - "Missing comma between export specifiers" - | MalformedUnicode -> - "Malformed unicode" - | DuplicateConstructor -> - "Classes may only have one constructor" - | DuplicatePrivateFields name -> - "Private fields may only be declared once. `#" ^ name ^ "` is declared more than once." - | InvalidFieldName (name, static, private_) -> - let static_modifier = if static then "static " else "" in - let name = if private_ then "#" ^ name else name in - "Classes may not have " ^ static_modifier ^ "fields named `" ^ name ^ "`." - | PrivateMethod -> - "Classes may not have private methods." - | PrivateDelete -> - "Private fields may not be deleted." - | UnboundPrivate name -> - "Private fields must be declared before they can be referenced. `#" ^ name - ^ "` has not been declared." - | PrivateNotInClass -> "Private fields can only be referenced from within a class." - | SuperPrivate -> "You may not access a private field through the `super` keyword." - | YieldInFormalParameters -> "Yield expression not allowed in formal parameter" - | AwaitAsIdentifierReference -> "`await` is an invalid identifier in async functions" - | YieldAsIdentifierReference -> "`yield` is an invalid identifier in generators" - | AmbiguousLetBracket -> "`let [` is ambiguous in this position because it is "^ - "either a `let` binding pattern, or a member expression." - | LiteralShorthandProperty -> "Literals cannot be used as shorthand properties." - | ComputedShorthandProperty -> "Computed properties must have a value." - | MethodInDestructuring -> "Object pattern can't contain methods" - | TrailingCommaAfterRestElement -> "A trailing comma is not permitted after the rest element" - | OptionalChainingDisabled -> "The optional chaining plugin must be enabled in order to \ - use the optional chaining operator (`?.`). Optional chaining is an active early-stage \ - feature proposal which may change and is not enabled by default. To enable support in \ - the parser, use the `esproposal_optional_chaining` option." - | OptionalChainNew -> "An optional chain may not be used in a `new` expression." - | OptionalChainTemplate -> "Template literals may not be used in an optional chain." - | NullishCoalescingDisabled -> "The nullish coalescing plugin must be enabled in order to \ - use the nullish coalescing operator (`??`). Nullish coalescing is an active early-stage \ - feature proposal which may change and is not enabled by default. To enable support in \ - the parser, use the `esproposal_nullish_coalescing` option." - end +module PP = struct + let error = function + | Assertion str -> "Unexpected parser state: " ^ str + | EnumBooleanMemberNotInitialized { enum_name; member_name } -> + Printf.sprintf + "Boolean enum members need to be initialized. Use either `%s = true,` or `%s = false,` in enum `%s`." + member_name + member_name + enum_name + | EnumDuplicateMemberName { enum_name; member_name } -> + Printf.sprintf + "Enum member names need to be unique, but the name `%s` has already been used before in enum `%s`." + member_name + enum_name + | EnumInconsistentMemberValues { enum_name } -> + Printf.sprintf + "Enum `%s` has inconsistent member initializers. Either use no initializers, or consistently use literals (either booleans, numbers, or strings) for all member initializers." + enum_name + | EnumInvalidExplicitType { enum_name; supplied_type } -> + let suggestion = + Printf.sprintf + "Use one of `boolean`, `number`, `string`, or `symbol` in enum `%s`." + enum_name + in + begin + match supplied_type with + | Some supplied_type -> + Printf.sprintf "Enum type `%s` is not valid. %s" supplied_type suggestion + | None -> Printf.sprintf "Supplied enum type is not valid. %s" suggestion + end + | EnumInvalidMemberInitializer { enum_name; explicit_type; member_name } -> + begin + match explicit_type with + | Some (Enum_common.Boolean as explicit_type) + | Some (Enum_common.Number as explicit_type) + | Some (Enum_common.String as explicit_type) -> + let explicit_type_str = Enum_common.string_of_explicit_type explicit_type in + Printf.sprintf + "Enum `%s` has type `%s`, so the initializer of `%s` needs to be a %s literal." + enum_name + explicit_type_str + member_name + explicit_type_str + | Some Enum_common.Symbol -> + Printf.sprintf + "Symbol enum members cannot be initialized. Use `%s,` in enum `%s`." + member_name + enum_name + | None -> + Printf.sprintf + "The enum member initializer for `%s` needs to be a literal (either a boolean, number, or string) in enum `%s`." + member_name + enum_name + end + | EnumInvalidMemberName { enum_name; member_name } -> + (* Based on the error condition, we will only receive member names starting with [a-z] *) + let suggestion = String.capitalize_ascii member_name in + Printf.sprintf + "Enum member names cannot start with lowercase 'a' through 'z'. Instead of using `%s`, consider using `%s`, in enum `%s`." + member_name + suggestion + enum_name + | EnumNumberMemberNotInitialized { enum_name; member_name } -> + Printf.sprintf + "Number enum members need to be initialized, e.g. `%s = 1,` in enum `%s`." + member_name + enum_name + | EnumStringMemberInconsistentlyInitailized { enum_name } -> + Printf.sprintf + "String enum members need to consistently either all use initializers, or use no initializers, in enum %s." + enum_name + | Unexpected unexpected -> Printf.sprintf "Unexpected %s" unexpected + | UnexpectedWithExpected (unexpected, expected) -> + Printf.sprintf "Unexpected %s, expected %s" unexpected expected + | UnexpectedTokenWithSuggestion (token, suggestion) -> + Printf.sprintf "Unexpected token `%s`. Did you mean `%s`?" token suggestion + | UnexpectedReserved -> "Unexpected reserved word" + | UnexpectedReservedType -> "Unexpected reserved type" + | UnexpectedSuper -> "Unexpected `super` outside of a class method" + | UnexpectedSuperCall -> "`super()` is only valid in a class constructor" + | UnexpectedEOS -> "Unexpected end of input" + | UnexpectedVariance -> "Unexpected variance sigil" + | UnexpectedStatic -> "Unexpected static modifier" + | UnexpectedProto -> "Unexpected proto modifier" + | UnexpectedTypeAlias -> "Type aliases are not allowed in untyped mode" + | UnexpectedOpaqueTypeAlias -> "Opaque type aliases are not allowed in untyped mode" + | UnexpectedTypeAnnotation -> "Type annotations are not allowed in untyped mode" + | UnexpectedTypeDeclaration -> "Type declarations are not allowed in untyped mode" + | UnexpectedTypeImport -> "Type imports are not allowed in untyped mode" + | UnexpectedTypeExport -> "Type exports are not allowed in untyped mode" + | UnexpectedTypeInterface -> "Interfaces are not allowed in untyped mode" + | UnexpectedSpreadType -> "Spreading a type is only allowed inside an object type" + | UnexpectedExplicitInexactInObject -> + "Explicit inexact syntax must come at the end of an object type" + | InexactInsideExact -> + "Explicit inexact syntax cannot appear inside an explicit exact object type" + | InexactInsideNonObject -> "Explicit inexact syntax can only appear inside an object type" + | NewlineAfterThrow -> "Illegal newline after throw" + | InvalidFloatBigInt -> "A bigint literal must be an integer" + | InvalidSciBigInt -> "A bigint literal cannot use exponential notation" + | InvalidRegExp -> "Invalid regular expression" + | InvalidRegExpFlags flags -> "Invalid flags supplied to RegExp constructor '" ^ flags ^ "'" + | UnterminatedRegExp -> "Invalid regular expression: missing /" + | InvalidLHSInAssignment -> "Invalid left-hand side in assignment" + | InvalidLHSInExponentiation -> "Invalid left-hand side in exponentiation expression" + | InvalidLHSInForIn -> "Invalid left-hand side in for-in" + | InvalidLHSInForOf -> "Invalid left-hand side in for-of" + | InvalidImportMetaProperty -> "The only valid meta property for import is import.meta" + | ExpectedPatternFoundExpression -> + "Expected an object pattern, array pattern, or an identifier but " + ^ "found an expression instead" + | MultipleDefaultsInSwitch -> "More than one default clause in switch statement" + | NoCatchOrFinally -> "Missing catch or finally after try" + | UnknownLabel label -> "Undefined label '" ^ label ^ "'" + | Redeclaration (what, name) -> what ^ " '" ^ name ^ "' has already been declared" + | IllegalContinue -> "Illegal continue statement" + | IllegalBreak -> "Illegal break statement" + | IllegalReturn -> "Illegal return statement" + | IllegalUnicodeEscape -> "Illegal Unicode escape" + | StrictModeWith -> "Strict mode code may not include a with statement" + | StrictCatchVariable -> "Catch variable may not be eval or arguments in strict mode" + | StrictVarName -> "Variable name may not be eval or arguments in strict mode" + | StrictParamName -> "Parameter name eval or arguments is not allowed in strict mode" + | StrictParamDupe -> "Strict mode function may not have duplicate parameter names" + | StrictFunctionName -> "Function name may not be eval or arguments in strict mode" + | StrictOctalLiteral -> "Octal literals are not allowed in strict mode." + | StrictNonOctalLiteral -> "Number literals with leading zeros are not allowed in strict mode." + | StrictDelete -> "Delete of an unqualified identifier in strict mode." + | StrictDuplicateProperty -> + "Duplicate data property in object literal not allowed in strict mode" + | AccessorDataProperty -> + "Object literal may not have data and accessor property with the same name" + | AccessorGetSet -> "Object literal may not have multiple get/set accessors with the same name" + | StrictLHSAssignment -> "Assignment to eval or arguments is not allowed in strict mode" + | StrictLHSPostfix -> + "Postfix increment/decrement may not have eval or arguments operand in strict mode" + | StrictLHSPrefix -> + "Prefix increment/decrement may not have eval or arguments operand in strict mode" + | StrictReservedWord -> "Use of future reserved word in strict mode" + | JSXAttributeValueEmptyExpression -> + "JSX attributes must only be assigned a non-empty expression" + | InvalidJSXAttributeValue -> "JSX value should be either an expression or a quoted JSX text" + | ExpectedJSXClosingTag name -> "Expected corresponding JSX closing tag for " ^ name + | NoUninitializedConst -> "Const must be initialized" + | NoUninitializedDestructuring -> "Destructuring assignment must be initialized" + | NewlineBeforeArrow -> "Illegal newline before arrow" + | FunctionAsStatement { in_strict_mode } -> + if in_strict_mode then + "In strict mode code, functions can only be declared at top level or " + ^ "immediately within another function." + else + "In non-strict mode code, functions can only be declared at top level, " + ^ "inside a block, or as the body of an if statement." + | AsyncFunctionAsStatement -> + "Async functions can only be declared at top level or " + ^ "immediately within another function." + | GeneratorFunctionAsStatement -> + "Generators can only be declared at top level or " ^ "immediately within another function." + | AdjacentJSXElements -> + "Unexpected token <. Remember, adjacent JSX " + ^ "elements must be wrapped in an enclosing parent tag" + | ParameterAfterRestParameter -> "Rest parameter must be final parameter of an argument list" + | ElementAfterRestElement -> "Rest element must be final element of an array pattern" + | PropertyAfterRestProperty -> "Rest property must be final property of an object pattern" + | DeclareAsync -> + "async is an implementation detail and isn't necessary for your declare function statement. It is sufficient for your declare function to just have a Promise return type." + | DeclareExportLet -> + "`declare export let` is not supported. Use `declare export var` instead." + | DeclareExportConst -> + "`declare export const` is not supported. Use `declare export var` instead." + | DeclareExportType -> "`declare export type` is not supported. Use `export type` instead." + | DeclareExportInterface -> + "`declare export interface` is not supported. Use `export interface` instead." + | UnexpectedExportStarAs -> + "`export * as` is an early-stage proposal and is not enabled by default. To enable support in the parser, use the `esproposal_export_star_as` option" + | DuplicateExport export -> Printf.sprintf "Duplicate export for `%s`" export + | ExportNamelessClass -> + "When exporting a class as a named export, you must specify a class name. Did you mean `export default class ...`?" + | ExportNamelessFunction -> + "When exporting a function as a named export, you must specify a function name. Did you mean `export default function ...`?" + | UnsupportedDecorator -> "Found a decorator in an unsupported position." + | MissingTypeParamDefault -> + "Type parameter declaration needs a default, since a preceding type parameter declaration has a default." + | DuplicateDeclareModuleExports -> "Duplicate `declare module.exports` statement!" + | AmbiguousDeclareModuleKind -> + "Found both `declare module.exports` and `declare export` in the same module. Modules can only have 1 since they are either an ES module xor they are a CommonJS module." + | GetterArity -> "Getter should have zero parameters" + | SetterArity -> "Setter should have exactly one parameter" + | InvalidNonTypeImportInDeclareModule -> + "Imports within a `declare module` body must always be " + ^ "`import type` or `import typeof`!" + | ImportTypeShorthandOnlyInPureImport -> + "The `type` and `typeof` keywords on named imports can only be used on regular `import` statements. It cannot be used with `import type` or `import typeof` statements" + | ImportSpecifierMissingComma -> "Missing comma between import specifiers" + | ExportSpecifierMissingComma -> "Missing comma between export specifiers" + | MalformedUnicode -> "Malformed unicode" + | DuplicateConstructor -> "Classes may only have one constructor" + | DuplicatePrivateFields name -> + "Private fields may only be declared once. `#" ^ name ^ "` is declared more than once." + | InvalidFieldName { name; static; private_ } -> + let static_modifier = + if static then + "static " + else + "" + in + let name = + if private_ then + "#" ^ name + else + name + in + "Classes may not have " ^ static_modifier ^ "fields named `" ^ name ^ "`." + | PrivateMethod -> "Classes may not have private methods." + | PrivateDelete -> "Private fields may not be deleted." + | UnboundPrivate name -> + "Private fields must be declared before they can be referenced. `#" + ^ name + ^ "` has not been declared." + | PrivateNotInClass -> "Private fields can only be referenced from within a class." + | SuperPrivate -> "You may not access a private field through the `super` keyword." + | YieldInFormalParameters -> "Yield expression not allowed in formal parameter" + | AwaitAsIdentifierReference -> "`await` is an invalid identifier in async functions" + | YieldAsIdentifierReference -> "`yield` is an invalid identifier in generators" + | AmbiguousLetBracket -> + "`let [` is ambiguous in this position because it is " + ^ "either a `let` binding pattern, or a member expression." + | LiteralShorthandProperty -> "Literals cannot be used as shorthand properties." + | ComputedShorthandProperty -> "Computed properties must have a value." + | MethodInDestructuring -> "Object pattern can't contain methods" + | TrailingCommaAfterRestElement -> "A trailing comma is not permitted after the rest element" + | OptionalChainingDisabled -> + "The optional chaining plugin must be enabled in order to use the optional chaining operator (`?.`). Optional chaining is an active early-stage feature proposal which may change and is not enabled by default. To enable support in the parser, use the `esproposal_optional_chaining` option." + | OptionalChainNew -> "An optional chain may not be used in a `new` expression." + | OptionalChainTemplate -> "Template literals may not be used in an optional chain." + | NullishCoalescingDisabled -> + "The nullish coalescing plugin must be enabled in order to use the nullish coalescing operator (`??`). Nullish coalescing is an active early-stage feature proposal which may change and is not enabled by default. To enable support in the parser, use the `esproposal_nullish_coalescing` option." + | NullishCoalescingUnexpectedLogical operator -> + Printf.sprintf + "Unexpected token `%s`. Parentheses are required to combine `??` with `&&` or `||` expressions." + operator + | WhitespaceInPrivateName -> "Unexpected whitespace between `#` and identifier" +end diff --git a/src/parser/parser_common.ml b/src/parser/parser_common.ml index 915a4805373..f81fe893b88 100644 --- a/src/parser/parser_common.ml +++ b/src/parser/parser_common.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,7 +7,6 @@ open Parser_env open Flow_ast -module Error = Parse_error type pattern_errors = { if_expr: (Loc.t * Parse_error.t) list; @@ -20,110 +19,147 @@ type pattern_cover = module type PARSER = sig val program : env -> (Loc.t, Loc.t) program + val statement : env -> (Loc.t, Loc.t) Statement.t - val statement_list_item : ?decorators:(Loc.t, Loc.t) Class.Decorator.t list -> env -> (Loc.t, Loc.t) Statement.t + + val statement_list_item : + ?decorators:(Loc.t, Loc.t) Class.Decorator.t list -> env -> (Loc.t, Loc.t) Statement.t + val statement_list : term_fn:(Token.t -> bool) -> env -> (Loc.t, Loc.t) Statement.t list - val statement_list_with_directives : term_fn:(Token.t -> bool) -> env -> (Loc.t, Loc.t) Statement.t list * bool + + val statement_list_with_directives : + term_fn:(Token.t -> bool) -> env -> (Loc.t, Loc.t) Statement.t list * bool + val module_body : term_fn:(Token.t -> bool) -> env -> (Loc.t, Loc.t) Statement.t list + val expression : env -> (Loc.t, Loc.t) Expression.t + val expression_or_pattern : env -> pattern_cover + val conditional : env -> (Loc.t, Loc.t) Expression.t + val assignment : env -> (Loc.t, Loc.t) Expression.t + val left_hand_side : env -> (Loc.t, Loc.t) Expression.t + val object_initializer : env -> Loc.t * (Loc.t, Loc.t) Expression.Object.t * pattern_errors - val identifier : ?restricted_error:Error.t -> env -> Loc.t Identifier.t - val identifier_with_type : env -> ?no_optional:bool -> Error.t -> Loc.t * (Loc.t, Loc.t) Pattern.Identifier.t + + val identifier : ?restricted_error:Parse_error.t -> env -> (Loc.t, Loc.t) Identifier.t + + val identifier_with_type : + env -> ?no_optional:bool -> Parse_error.t -> Loc.t * (Loc.t, Loc.t) Pattern.Identifier.t + val assert_identifier_name_is_identifier : - ?restricted_error:Error.t -> env -> Loc.t * string -> unit + ?restricted_error:Parse_error.t -> env -> (Loc.t, Loc.t) Identifier.t -> unit + val block_body : env -> Loc.t * (Loc.t, Loc.t) Statement.Block.t + val function_block_body : env -> Loc.t * (Loc.t, Loc.t) Statement.Block.t * bool + val jsx_element_or_fragment : - env -> Loc.t * [`Element of (Loc.t, Loc.t) JSX.element | `Fragment of (Loc.t, Loc.t) JSX.fragment] - val pattern : env -> Error.t -> (Loc.t, Loc.t) Pattern.t + env -> + Loc.t * [ `Element of (Loc.t, Loc.t) JSX.element | `Fragment of (Loc.t, Loc.t) JSX.fragment ] + + val pattern : env -> Parse_error.t -> (Loc.t, Loc.t) Pattern.t + val pattern_from_expr : env -> (Loc.t, Loc.t) Expression.t -> (Loc.t, Loc.t) Pattern.t - val object_key : ?class_body: bool -> env -> Loc.t * (Loc.t, Loc.t) Expression.Object.Property.key - val class_declaration : env -> (Loc.t, Loc.t) Class.Decorator.t list -> (Loc.t, Loc.t) Statement.t + + val object_key : ?class_body:bool -> env -> Loc.t * (Loc.t, Loc.t) Expression.Object.Property.key + + val class_declaration : + env -> (Loc.t, Loc.t) Class.Decorator.t list -> (Loc.t, Loc.t) Statement.t + val class_expression : env -> (Loc.t, Loc.t) Expression.t + val is_assignable_lhs : (Loc.t, Loc.t) Expression.t -> bool + + val number : env -> Token.number_type -> string -> float end (* IdentifierName - https://tc39.github.io/ecma262/#prod-IdentifierName *) let identifier_name env = - let open Token in - let loc = Peek.loc env in - let name = match Peek.token env with - (* obviously, Identifier is a valid IdentifierName *) - | T_IDENTIFIER { value; _ } -> value - (* keywords are also IdentifierNames *) - | T_AWAIT -> "await" - | T_BREAK -> "break" - | T_CASE -> "case" - | T_CATCH -> "catch" - | T_CLASS -> "class" - | T_CONST -> "const" - | T_CONTINUE -> "continue" - | T_DEBUGGER -> "debugger" - | T_DEFAULT -> "default" - | T_DELETE -> "delete" - | T_DO -> "do" - | T_ELSE -> "else" - | T_EXPORT -> "export" - | T_EXTENDS -> "extends" - | T_FINALLY -> "finally" - | T_FOR -> "for" - | T_FUNCTION -> "function" - | T_IF -> "if" - | T_IMPORT -> "import" - | T_IN -> "in" - | T_INSTANCEOF -> "instanceof" - | T_NEW -> "new" - | T_RETURN -> "return" - | T_SUPER -> "super" - | T_SWITCH -> "switch" - | T_THIS -> "this" - | T_THROW -> "throw" - | T_TRY -> "try" - | T_TYPEOF -> "typeof" - | T_VAR -> "var" - | T_VOID -> "void" - | T_WHILE -> "while" - | T_WITH -> "with" - | T_YIELD -> "yield" - (* FutureReservedWord *) - | T_ENUM -> "enum" - | T_LET -> "let" - | T_STATIC -> "static" - | T_INTERFACE -> "interface" - | T_IMPLEMENTS -> "implements" - | T_PACKAGE -> "package" - | T_PRIVATE -> "private" - | T_PROTECTED -> "protected" - | T_PUBLIC -> "public" - (* NullLiteral *) - | T_NULL -> "null" - (* BooleanLiteral *) - | T_TRUE -> "true" - | T_FALSE -> "false" - (* Flow-specific stuff *) - | T_DECLARE -> "declare" - | T_TYPE -> "type" - | T_OPAQUE -> "opaque" - | T_ANY_TYPE -> "any" - | T_MIXED_TYPE -> "mixed" - | T_EMPTY_TYPE -> "empty" - | T_BOOLEAN_TYPE BOOL -> "bool" - | T_BOOLEAN_TYPE BOOLEAN -> "boolean" - | T_NUMBER_TYPE -> "number" - | T_STRING_TYPE -> "string" - | T_VOID_TYPE -> "void" - (* Contextual stuff *) - | T_OF -> "of" - | T_ASYNC -> "async" - (* punctuators, types, literals, etc are not identifiers *) - | _ -> error_unexpected env; "" - in - Eat.token env; - loc, name + Token.( + let loc = Peek.loc env in + let leading = Peek.comments env in + let name = + match Peek.token env with + (* obviously, Identifier is a valid IdentifierName *) + | T_IDENTIFIER { value; _ } -> value + (* keywords are also IdentifierNames *) + | T_AWAIT -> "await" + | T_BREAK -> "break" + | T_CASE -> "case" + | T_CATCH -> "catch" + | T_CLASS -> "class" + | T_CONST -> "const" + | T_CONTINUE -> "continue" + | T_DEBUGGER -> "debugger" + | T_DEFAULT -> "default" + | T_DELETE -> "delete" + | T_DO -> "do" + | T_ELSE -> "else" + | T_EXPORT -> "export" + | T_EXTENDS -> "extends" + | T_FINALLY -> "finally" + | T_FOR -> "for" + | T_FUNCTION -> "function" + | T_IF -> "if" + | T_IMPORT -> "import" + | T_IN -> "in" + | T_INSTANCEOF -> "instanceof" + | T_NEW -> "new" + | T_RETURN -> "return" + | T_SUPER -> "super" + | T_SWITCH -> "switch" + | T_THIS -> "this" + | T_THROW -> "throw" + | T_TRY -> "try" + | T_TYPEOF -> "typeof" + | T_VAR -> "var" + | T_VOID -> "void" + | T_WHILE -> "while" + | T_WITH -> "with" + | T_YIELD -> "yield" + (* FutureReservedWord *) + | T_ENUM -> "enum" + | T_LET -> "let" + | T_STATIC -> "static" + | T_INTERFACE -> "interface" + | T_IMPLEMENTS -> "implements" + | T_PACKAGE -> "package" + | T_PRIVATE -> "private" + | T_PROTECTED -> "protected" + | T_PUBLIC -> "public" + (* NullLiteral *) + | T_NULL -> "null" + (* BooleanLiteral *) + | T_TRUE -> "true" + | T_FALSE -> "false" + (* Flow-specific stuff *) + | T_DECLARE -> "declare" + | T_TYPE -> "type" + | T_OPAQUE -> "opaque" + | T_ANY_TYPE -> "any" + | T_MIXED_TYPE -> "mixed" + | T_EMPTY_TYPE -> "empty" + | T_BOOLEAN_TYPE BOOL -> "bool" + | T_BOOLEAN_TYPE BOOLEAN -> "boolean" + | T_NUMBER_TYPE -> "number" + | T_BIGINT_TYPE -> "bigint" + | T_STRING_TYPE -> "string" + | T_VOID_TYPE -> "void" + (* Contextual stuff *) + | T_OF -> "of" + | T_ASYNC -> "async" + (* punctuators, types, literals, etc are not identifiers *) + | _ -> + error_unexpected ~expected:"an identifier" env; + "" + in + Eat.token env; + let trailing = Peek.comments env in + let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in + (loc, { Identifier.name; comments })) (** * The abstract operation IsLabelledFunction @@ -131,22 +167,24 @@ let identifier_name env = * https://tc39.github.io/ecma262/#sec-islabelledfunction *) let rec is_labelled_function = function - | _, Flow_ast.Statement.Labeled { Flow_ast.Statement.Labeled.body; _ } -> - begin match body with - | _, Flow_ast.Statement.FunctionDeclaration _ -> true - | _ -> is_labelled_function body + | (_, Flow_ast.Statement.Labeled { Flow_ast.Statement.Labeled.body; _ }) -> + begin + match body with + | (_, Flow_ast.Statement.FunctionDeclaration _) -> true + | _ -> is_labelled_function body end - | _ -> - false + | _ -> false let with_loc ?start_loc fn env = - let start_loc = match start_loc with - | Some x -> x - | None -> Peek.loc env + let start_loc = + match start_loc with + | Some x -> x + | None -> Peek.loc env in let result = fn env in - let loc = match last_loc env with - | Some end_loc -> Loc.btwn start_loc end_loc - | None -> start_loc + let loc = + match last_loc env with + | Some end_loc -> Loc.btwn start_loc end_loc + | None -> start_loc in - loc, result + (loc, result) diff --git a/src/parser/parser_env.ml b/src/parser/parser_env.ml index 4f02f004b85..c95095cd7b9 100644 --- a/src/parser/parser_env.ml +++ b/src/parser/parser_env.ml @@ -1,13 +1,12 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open Flow_ast -module Error = Parse_error -module SSet = Set.Make(String) +module SSet = Set.Make (String) module Lex_mode = struct type t = @@ -18,7 +17,7 @@ module Lex_mode = struct | TEMPLATE | REGEXP - let debug_string_of_lex_mode (mode: t) = + let debug_string_of_lex_mode (mode : t) = match mode with | NORMAL -> "NORMAL" | TYPE -> "TYPE" @@ -43,59 +42,59 @@ let maximum_lookahead = 2 module Lookahead : sig type t + val create : Lex_env.t -> Lex_mode.t -> t + val peek : t -> int -> Lex_result.t + val lex_env : t -> int -> Lex_env.t + val junk : t -> unit end = struct type t = { - mutable la_results : (Lex_env.t * Lex_result.t) option array; - mutable la_num_lexed : int; - la_lex_mode : Lex_mode.t; - mutable la_lex_env : Lex_env.t; + mutable la_results: (Lex_env.t * Lex_result.t) option array; + mutable la_num_lexed: int; + la_lex_mode: Lex_mode.t; + mutable la_lex_env: Lex_env.t; } let create lex_env mode = let lex_env = Lex_env.clone lex_env in - { - la_results = [||]; - la_num_lexed = 0; - la_lex_mode = mode; - la_lex_env = lex_env; - } + { la_results = [||]; la_num_lexed = 0; la_lex_mode = mode; la_lex_env = lex_env } let next_power_of_two n = let rec f i = if i >= n then i else - f (i * 2) in + f (i * 2) + in f 1 (* resize the tokens array to have at least n elements *) let grow t n = - if Array.length t.la_results < n then begin + if Array.length t.la_results < n then let new_size = next_power_of_two n in let filler i = if i < Array.length t.la_results then t.la_results.(i) else - None in + None + in let new_arr = Array.init new_size filler in t.la_results <- new_arr - end (* precondition: there is enough room in t.la_results for the result *) let lex t = let lex_env = t.la_lex_env in - let lex_env, lex_result = + let (lex_env, lex_result) = match t.la_lex_mode with - | Lex_mode.NORMAL -> Lexer.token lex_env - | Lex_mode.TYPE -> Lexer.type_token lex_env - | Lex_mode.JSX_TAG -> Lexer.jsx_tag lex_env - | Lex_mode.JSX_CHILD -> Lexer.jsx_child lex_env - | Lex_mode.TEMPLATE -> Lexer.template_tail lex_env - | Lex_mode.REGEXP -> Lexer.regexp lex_env + | Lex_mode.NORMAL -> Flow_lexer.token lex_env + | Lex_mode.TYPE -> Flow_lexer.type_token lex_env + | Lex_mode.JSX_TAG -> Flow_lexer.jsx_tag lex_env + | Lex_mode.JSX_CHILD -> Flow_lexer.jsx_child lex_env + | Lex_mode.TEMPLATE -> Flow_lexer.template_tail lex_env + | Lex_mode.REGEXP -> Flow_lexer.regexp lex_env in let cloned_env = Lex_env.clone lex_env in t.la_lex_env <- lex_env; @@ -111,24 +110,23 @@ end = struct let peek t i = lex_until t i; match t.la_results.(i) with - | Some (_, result) -> result - (* only happens if there is a defect in the lookahead module *) - | None -> failwith "Lookahead.peek failed" + | Some (_, result) -> result + (* only happens if there is a defect in the lookahead module *) + | None -> failwith "Lookahead.peek failed" let lex_env t i = lex_until t i; match t.la_results.(i) with - | Some (lex_env, _) -> lex_env - (* only happens if there is a defect in the lookahead module *) - | None -> failwith "Lookahead.peek failed" + | Some (lex_env, _) -> lex_env + (* only happens if there is a defect in the lookahead module *) + | None -> failwith "Lookahead.peek failed" (* Throws away the first peeked-at token, shifting any subsequent tokens up *) let junk t = lex_until t 0; - if t.la_num_lexed > 1 then - Array.blit t.la_results 1 t.la_results 0 (t.la_num_lexed - 1); + if t.la_num_lexed > 1 then Array.blit t.la_results 1 t.la_results 0 (t.la_num_lexed - 1); t.la_results.(t.la_num_lexed - 1) <- None; - t.la_num_lexed <- t.la_num_lexed - 1; + t.la_num_lexed <- t.la_num_lexed - 1 end type token_sink_result = { @@ -138,6 +136,7 @@ type token_sink_result = { } type parse_options = { + enums: bool; esproposal_class_instance_fields: bool; esproposal_class_static_fields: bool; esproposal_decorators: bool; @@ -147,16 +146,19 @@ type parse_options = { types: bool; use_strict: bool; } -let default_parse_options = { - esproposal_class_instance_fields = false; - esproposal_class_static_fields = false; - esproposal_decorators = false; - esproposal_export_star_as = false; - esproposal_optional_chaining = false; - esproposal_nullish_coalescing = false; - types = true; - use_strict = false; -} + +let default_parse_options = + { + enums = false; + esproposal_class_instance_fields = false; + esproposal_class_static_fields = false; + esproposal_decorators = false; + esproposal_export_star_as = false; + esproposal_optional_chaining = false; + esproposal_nullish_coalescing = false; + types = true; + use_strict = false; + } type allowed_super = | No_super @@ -164,50 +166,50 @@ type allowed_super = | Super_prop_or_call type env = { - errors : (Loc.t * Error.t) list ref; - comments : Loc.t Comment.t list ref; - labels : SSet.t; - exports : SSet.t ref; - last_lex_result : Lex_result.t option ref; - in_strict_mode : bool; - in_export : bool; - in_loop : bool; - in_switch : bool; - in_formal_parameters : bool; - in_function : bool; - no_in : bool; - no_call : bool; - no_let : bool; - no_anon_function_type : bool; - no_new : bool; - allow_yield : bool; - allow_await : bool; - allow_directive : bool; - allow_super : allowed_super; - error_callback : (env -> Error.t -> unit) option; - lex_mode_stack : Lex_mode.t list ref; + errors: (Loc.t * Parse_error.t) list ref; + comments: Loc.t Comment.t list ref; + labels: SSet.t; + exports: SSet.t ref; + last_lex_result: Lex_result.t option ref; + in_strict_mode: bool; + in_export: bool; + in_loop: bool; + in_switch: bool; + in_formal_parameters: bool; + in_function: bool; + no_in: bool; + no_call: bool; + no_let: bool; + no_anon_function_type: bool; + no_new: bool; + allow_yield: bool; + allow_await: bool; + allow_directive: bool; + allow_super: allowed_super; + error_callback: (env -> Parse_error.t -> unit) option; + lex_mode_stack: Lex_mode.t list ref; (* lex_env is the lex_env after the single lookahead has been lexed *) - lex_env : Lex_env.t ref; + lex_env: Lex_env.t ref; (* This needs to be cleared whenever we advance. *) - lookahead : Lookahead.t ref; - token_sink : (token_sink_result -> unit) option ref; - parse_options : parse_options; - source : File_key.t option; + lookahead: Lookahead.t ref; + token_sink: (token_sink_result -> unit) option ref; + parse_options: parse_options; + source: File_key.t option; (* It is a syntax error to reference private fields not in scope. In order to enforce this, * we keep track of the privates we've seen declared and used. *) - privates : (SSet.t * ((string * Loc.t) list)) list ref; + privates: (SSet.t * (string * Loc.t) list) list ref; } (* constructor *) -let init_env ?(token_sink=None) ?(parse_options=None) source content = +let init_env ?(token_sink = None) ?(parse_options = None) source content = (* let lb = Sedlexing.Utf16.from_string content (Some Sedlexing.Utf16.Little_endian) in *) - let lb, errors = try Sedlexing.Utf8.from_string content, [] - with Sedlexing.MalFormed -> - Sedlexing.Utf8.from_string "", - [ { Loc.none with Loc.source; }, Parse_error.MalformedUnicode ] + let (lb, errors) = + try (Sedlexing.Utf8.from_string content, []) + with Sedlexing.MalFormed -> + ( Sedlexing.Utf8.from_string "", + [({ Loc.none with Loc.source }, Parse_error.MalformedUnicode)] ) in - let parse_options = match parse_options with | Some opts -> opts @@ -248,26 +250,47 @@ let init_env ?(token_sink=None) ?(parse_options=None) source content = (* getters: *) let in_strict_mode env = env.in_strict_mode + let lex_mode env = List.hd !(env.lex_mode_stack) + let in_export env = env.in_export + let comments env = !(env.comments) + let labels env = env.labels + let in_loop env = env.in_loop + let in_switch env = env.in_switch + let in_formal_parameters env = env.in_formal_parameters + let in_function env = env.in_function + let allow_yield env = env.allow_yield + let allow_await env = env.allow_await + let allow_directive env = env.allow_directive + let allow_super env = env.allow_super + let no_in env = env.no_in + let no_call env = env.no_call + let no_let env = env.no_let + let no_anon_function_type env = env.no_anon_function_type + let no_new env = env.no_new + let errors env = !(env.errors) + let parse_options env = env.parse_options + let source env = env.source + let should_parse_types env = env.parse_options.types (* mutators: *) @@ -276,12 +299,17 @@ let error_at env (loc, e) = match env.error_callback with | None -> () | Some callback -> callback env e -let record_export env (loc, export_name) = - if export_name = "" then () else (* empty identifiers signify an error, don't export it *) - let exports = !(env.exports) in - if SSet.mem export_name exports - then error_at env (loc, Error.DuplicateExport export_name) - else env.exports := SSet.add export_name !(env.exports) + +let record_export env (loc, { Identifier.name = export_name; comments = _ }) = + if export_name = "" then + () + else + (* empty identifiers signify an error, don't export it *) + let exports = !(env.exports) in + if SSet.mem export_name exports then + error_at env (loc, Parse_error.DuplicateExport export_name) + else + env.exports := SSet.add export_name !(env.exports) (* Since private fields out of scope are a parse error, we keep track of the declared and used * private fields. @@ -298,30 +326,31 @@ let record_export env (loc, export_name) = let enter_class env = env.privates := (SSet.empty, []) :: !(env.privates) let exit_class env = - let get_unbound_privates declared_privates used_privates = - List.filter (fun x -> not (SSet.mem (fst x) declared_privates)) used_privates in - + List.filter (fun x -> not (SSet.mem (fst x) declared_privates)) used_privates + in match !(env.privates) with - | [declared_privates, used_privates] -> - let unbound_privates = get_unbound_privates declared_privates used_privates in - List.iter (fun (name, loc) -> error_at env (loc, Error.UnboundPrivate name)) unbound_privates; - env.privates := [] + | [(declared_privates, used_privates)] -> + let unbound_privates = get_unbound_privates declared_privates used_privates in + List.iter + (fun (name, loc) -> error_at env (loc, Parse_error.UnboundPrivate name)) + unbound_privates; + env.privates := [] | (loc_declared_privates, loc_used_privates) :: privates -> - let unbound_privates = get_unbound_privates loc_declared_privates loc_used_privates in - let decl_head, used_head = List.hd privates in - env.privates := (decl_head, used_head @ unbound_privates) :: (List.tl privates) + let unbound_privates = get_unbound_privates loc_declared_privates loc_used_privates in + let (decl_head, used_head) = List.hd privates in + env.privates := (decl_head, used_head @ unbound_privates) :: List.tl privates | _ -> failwith "Internal Error: `exit_class` called before a matching `enter_class`" let add_declared_private env name = match !(env.privates) with | [] -> failwith "Internal Error: Tried to add_declared_private with outside of class scope." - | (declared, used)::xs -> env.privates := ((SSet.add name declared, used) :: xs) + | (declared, used) :: xs -> env.privates := (SSet.add name declared, used) :: xs let add_used_private env name loc = match !(env.privates) with - | [] -> error_at env (loc, Error.PrivateNotInClass) - | (declared, used)::xs -> env.privates := ((declared, (name, loc) :: used) :: xs) + | [] -> error_at env (loc, Parse_error.PrivateNotInClass) + | (declared, used) :: xs -> env.privates := (declared, (name, loc) :: used) :: xs (* lookahead: *) let lookahead ~i env = @@ -330,38 +359,57 @@ let lookahead ~i env = (* functional operations: *) let with_strict in_strict_mode env = { env with in_strict_mode } + let with_in_formal_parameters in_formal_parameters env = { env with in_formal_parameters } + let with_in_function in_function env = { env with in_function } + let with_allow_yield allow_yield env = { env with allow_yield } + let with_allow_await allow_await env = { env with allow_await } + let with_allow_directive allow_directive env = { env with allow_directive } + let with_allow_super allow_super env = { env with allow_super } + let with_no_let no_let env = { env with no_let } + let with_in_loop in_loop env = { env with in_loop } + let with_no_in no_in env = { env with no_in } -let with_no_anon_function_type no_anon_function_type env = - { env with no_anon_function_type } + +let with_no_anon_function_type no_anon_function_type env = { env with no_anon_function_type } + let with_no_new no_new env = { env with no_new } + let with_in_switch in_switch env = { env with in_switch } + let with_in_export in_export env = { env with in_export } + let with_no_call no_call env = { env with no_call } -let with_error_callback error_callback env = - { env with error_callback = Some error_callback } + +let with_error_callback error_callback env = { env with error_callback = Some error_callback } (* other helper functions: *) let error_list env = List.iter (error_at env) -let last_loc env = match !(env.last_lex_result) with -| Some lex_result -> Some (Lex_result.loc lex_result) -| None -> None -let last_token env = match !(env.last_lex_result) with -| Some lex_result -> Some (Lex_result.token lex_result) -| None -> None +let last_loc env = + match !(env.last_lex_result) with + | Some lex_result -> Some (Lex_result.loc lex_result) + | None -> None + +let last_token env = + match !(env.last_lex_result) with + | Some lex_result -> Some (Lex_result.token lex_result) + | None -> None let without_error_callback env = { env with error_callback = None } let add_label env label = { env with labels = SSet.add label env.labels } -let enter_function env ~async ~generator = { env with + +let enter_function env ~async ~generator = + { + env with in_formal_parameters = false; in_function = true; in_loop = false; @@ -373,11 +421,41 @@ let enter_function env ~async ~generator = { env with (* #sec-keywords *) let is_keyword = function - | "await" | "break" | "case" | "catch" | "class" | "const" | "continue" - | "debugger" | "default" | "delete" | "do" | "else" | "export" | "extends" - | "finally" | "for" | "function" | "if" | "import" | "in" | "instanceof" - | "new" | "return" | "super" | "switch" | "this" | "throw" | "try" - | "typeof" | "var" | "void" | "while" | "with" | "yield" -> true + | "await" + | "break" + | "case" + | "catch" + | "class" + | "const" + | "continue" + | "debugger" + | "default" + | "delete" + | "do" + | "else" + | "export" + | "extends" + | "finally" + | "for" + | "function" + | "if" + | "import" + | "in" + | "instanceof" + | "new" + | "return" + | "super" + | "switch" + | "this" + | "throw" + | "try" + | "typeof" + | "var" + | "void" + | "while" + | "with" + | "yield" -> + true | _ -> false (* #sec-future-reserved-words *) @@ -385,11 +463,12 @@ let is_future_reserved = function | "enum" -> true | _ -> false -let token_is_future_reserved = Token.(function - | T_IDENTIFIER { raw; _ } when is_future_reserved raw -> true - | T_ENUM -> true - | _ -> false -) +let token_is_future_reserved = + Token.( + function + | T_IDENTIFIER { raw; _ } when is_future_reserved raw -> true + | T_ENUM -> true + | _ -> false) (* #sec-strict-mode-of-ecmascript *) let is_strict_reserved = function @@ -400,48 +479,70 @@ let is_strict_reserved = function | "protected" | "public" | "static" - | "yield" -> true + | "yield" -> + true | _ -> false -let token_is_strict_reserved = Token.(function - | T_IDENTIFIER { raw; _ } when is_strict_reserved raw -> true - | T_INTERFACE - | T_IMPLEMENTS - | T_PACKAGE - | T_PRIVATE - | T_PROTECTED - | T_PUBLIC - | T_STATIC - | T_YIELD - -> true - | _ - -> false -) +let token_is_strict_reserved = + Token.( + function + | T_IDENTIFIER { raw; _ } when is_strict_reserved raw -> true + | T_INTERFACE + | T_IMPLEMENTS + | T_PACKAGE + | T_PRIVATE + | T_PROTECTED + | T_PUBLIC + | T_STATIC + | T_YIELD -> + true + | _ -> false) (* #sec-strict-mode-of-ecmascript *) let is_restricted = function | "eval" - | "arguments" -> true + | "arguments" -> + true | _ -> false -let token_is_restricted = Token.(function - | T_IDENTIFIER { raw; _ } when is_restricted raw -> true - | _ -> false -) +let token_is_restricted = + Token.( + function + | T_IDENTIFIER { raw; _ } when is_restricted raw -> true + | _ -> false) (* #sec-reserved-words *) let is_reserved str_val = - is_keyword str_val || - is_future_reserved str_val || + is_keyword str_val + || is_future_reserved str_val + || match str_val with - | "null" | "true" | "false" -> true + | "null" + | "true" + | "false" -> + true | _ -> false let is_reserved_type str_val = match str_val with - | "any" | "bool" | "boolean" | "empty" | "false" | "mixed" | "null" | "number" - | "static" | "string" | "true" | "typeof" | "void" | "interface" | "extends" - -> true + | "any" + | "bool" + | "boolean" + | "empty" + | "false" + | "mixed" + | "null" + | "number" + | "bigint" + | "static" + | "string" + | "true" + | "typeof" + | "void" + | "interface" + | "extends" + | "_" -> + true | _ -> false (* Answer questions about what comes next *) @@ -450,31 +551,53 @@ module Peek = struct open Token let ith_token ~i env = Lex_result.token (lookahead ~i env) + let ith_loc ~i env = Lex_result.loc (lookahead ~i env) + let ith_errors ~i env = Lex_result.errors (lookahead ~i env) + let ith_comments ~i env = Lex_result.comments (lookahead ~i env) + let ith_lex_env ~i env = Lookahead.lex_env !(env.lookahead) i let token env = ith_token ~i:0 env + let loc env = ith_loc ~i:0 env - let loc_skip_lookeahead env = - let loc = loc env in - Loc.({ loc with _end = loc.start}) + + (* loc_skip_lookahead is used to give a loc hint to optional tokens such as type annotations *) + let loc_skip_lookahead env = + let loc = + match last_loc env with + | Some loc -> loc + | None -> failwith "Peeking current location when not available" + in + Loc.{ loc with start = loc._end } let errors env = ith_errors ~i:0 env + let comments env = ith_comments ~i:0 env + let lex_env env = ith_lex_env ~i:0 env (* True if there is a line terminator before the next token *) - let is_line_terminator env = - match last_loc env with - | None -> false - | Some loc' -> - (loc env).start.line > loc'.start.line + let ith_is_line_terminator ~i env = + let loc = + if i > 0 then + Some (ith_loc ~i:(i - 1) env) + else + last_loc env + in + match loc with + | None -> false + | Some loc' -> (ith_loc ~i env).start.line > loc'.start.line + + let is_line_terminator env = ith_is_line_terminator ~i:0 env let is_implicit_semicolon env = match token env with - | T_EOF | T_RCURLY -> true + | T_EOF + | T_RCURLY -> + true | T_SEMICOLON -> false | _ -> is_line_terminator env @@ -491,173 +614,176 @@ module Peek = struct | T_ASYNC | T_AWAIT | T_POUND - | T_IDENTIFIER _ -> true + | T_IDENTIFIER _ -> + true | _ -> false let ith_is_type_identifier ~i env = match lex_mode env with | Lex_mode.TYPE -> - begin match ith_token ~i env with - | T_IDENTIFIER _ -> true - | _ -> false + begin + match ith_token ~i env with + | T_IDENTIFIER _ -> true + | _ -> false end | Lex_mode.NORMAL -> (* Sometimes we peek at type identifiers while in normal lex mode. For example, when deciding whether a `type` token is an identifier or the start of a type declaration, based on whether the following token `is_type_identifier`. *) - begin match ith_token ~i env with - | T_IDENTIFIER { raw; _ } when is_reserved_type raw -> false - - (* reserved type identifiers, but these don't appear in NORMAL mode *) - | T_ANY_TYPE - | T_MIXED_TYPE - | T_EMPTY_TYPE - | T_NUMBER_TYPE - | T_STRING_TYPE - | T_VOID_TYPE - | T_BOOLEAN_TYPE _ - | T_NUMBER_SINGLETON_TYPE _ - - (* identifier-ish *) - | T_ASYNC - | T_AWAIT - | T_BREAK - | T_CASE - | T_CATCH - | T_CLASS - | T_CONST - | T_CONTINUE - | T_DEBUGGER - | T_DECLARE - | T_DEFAULT - | T_DELETE - | T_DO - | T_ELSE - | T_ENUM - | T_EXPORT - | T_EXTENDS - | T_FALSE - | T_FINALLY - | T_FOR - | T_FUNCTION - | T_IDENTIFIER _ - | T_IF - | T_IMPLEMENTS - | T_IMPORT - | T_IN - | T_INSTANCEOF - | T_INTERFACE - | T_LET - | T_NEW - | T_NULL - | T_OF - | T_OPAQUE - | T_PACKAGE - | T_PRIVATE - | T_PROTECTED - | T_PUBLIC - | T_RETURN - | T_SUPER - | T_SWITCH - | T_THIS - | T_THROW - | T_TRUE - | T_TRY - | T_TYPE - | T_VAR - | T_WHILE - | T_WITH - | T_YIELD -> true - - (* identifier-ish, but not valid types *) - | T_STATIC - | T_TYPEOF - | T_VOID - -> false - - (* syntax *) - | T_LCURLY - | T_RCURLY - | T_LCURLYBAR - | T_RCURLYBAR - | T_LPAREN - | T_RPAREN - | T_LBRACKET - | T_RBRACKET - | T_SEMICOLON - | T_COMMA - | T_PERIOD - | T_ARROW - | T_ELLIPSIS - | T_AT - | T_POUND - | T_CHECKS - | T_RSHIFT3_ASSIGN - | T_RSHIFT_ASSIGN - | T_LSHIFT_ASSIGN - | T_BIT_XOR_ASSIGN - | T_BIT_OR_ASSIGN - | T_BIT_AND_ASSIGN - | T_MOD_ASSIGN - | T_DIV_ASSIGN - | T_MULT_ASSIGN - | T_EXP_ASSIGN - | T_MINUS_ASSIGN - | T_PLUS_ASSIGN - | T_ASSIGN - | T_PLING_PERIOD - | T_PLING_PLING - | T_PLING - | T_COLON - | T_OR - | T_AND - | T_BIT_OR - | T_BIT_XOR - | T_BIT_AND - | T_EQUAL - | T_NOT_EQUAL - | T_STRICT_EQUAL - | T_STRICT_NOT_EQUAL - | T_LESS_THAN_EQUAL - | T_GREATER_THAN_EQUAL - | T_LESS_THAN - | T_GREATER_THAN - | T_LSHIFT - | T_RSHIFT - | T_RSHIFT3 - | T_PLUS - | T_MINUS - | T_DIV - | T_MULT - | T_EXP - | T_MOD - | T_NOT - | T_BIT_NOT - | T_INCR - | T_DECR - | T_EOF - -> false - - (* literals *) - | T_NUMBER _ - | T_STRING _ - | T_TEMPLATE_PART _ - | T_REGEXP _ - - (* misc that shouldn't appear in NORMAL mode *) - | T_JSX_IDENTIFIER _ - | T_JSX_TEXT _ - | T_ERROR _ - -> false + begin + match ith_token ~i env with + | T_IDENTIFIER { raw; _ } when is_reserved_type raw -> false + (* reserved type identifiers, but these don't appear in NORMAL mode *) + | T_ANY_TYPE + | T_MIXED_TYPE + | T_EMPTY_TYPE + | T_NUMBER_TYPE + | T_BIGINT_TYPE + | T_STRING_TYPE + | T_VOID_TYPE + | T_BOOLEAN_TYPE _ + | T_NUMBER_SINGLETON_TYPE _ + | T_BIGINT_SINGLETON_TYPE _ + (* identifier-ish *) + + | T_ASYNC + | T_AWAIT + | T_BREAK + | T_CASE + | T_CATCH + | T_CLASS + | T_CONST + | T_CONTINUE + | T_DEBUGGER + | T_DECLARE + | T_DEFAULT + | T_DELETE + | T_DO + | T_ELSE + | T_ENUM + | T_EXPORT + | T_EXTENDS + | T_FALSE + | T_FINALLY + | T_FOR + | T_FUNCTION + | T_IDENTIFIER _ + | T_IF + | T_IMPLEMENTS + | T_IMPORT + | T_IN + | T_INSTANCEOF + | T_INTERFACE + | T_LET + | T_NEW + | T_NULL + | T_OF + | T_OPAQUE + | T_PACKAGE + | T_PRIVATE + | T_PROTECTED + | T_PUBLIC + | T_RETURN + | T_SUPER + | T_SWITCH + | T_THIS + | T_THROW + | T_TRUE + | T_TRY + | T_TYPE + | T_VAR + | T_WHILE + | T_WITH + | T_YIELD -> + true + (* identifier-ish, but not valid types *) + | T_STATIC + | T_TYPEOF + | T_VOID -> + false + (* syntax *) + | T_LCURLY + | T_RCURLY + | T_LCURLYBAR + | T_RCURLYBAR + | T_LPAREN + | T_RPAREN + | T_LBRACKET + | T_RBRACKET + | T_SEMICOLON + | T_COMMA + | T_PERIOD + | T_ARROW + | T_ELLIPSIS + | T_AT + | T_POUND + | T_CHECKS + | T_RSHIFT3_ASSIGN + | T_RSHIFT_ASSIGN + | T_LSHIFT_ASSIGN + | T_BIT_XOR_ASSIGN + | T_BIT_OR_ASSIGN + | T_BIT_AND_ASSIGN + | T_MOD_ASSIGN + | T_DIV_ASSIGN + | T_MULT_ASSIGN + | T_EXP_ASSIGN + | T_MINUS_ASSIGN + | T_PLUS_ASSIGN + | T_ASSIGN + | T_PLING_PERIOD + | T_PLING_PLING + | T_PLING + | T_COLON + | T_OR + | T_AND + | T_BIT_OR + | T_BIT_XOR + | T_BIT_AND + | T_EQUAL + | T_NOT_EQUAL + | T_STRICT_EQUAL + | T_STRICT_NOT_EQUAL + | T_LESS_THAN_EQUAL + | T_GREATER_THAN_EQUAL + | T_LESS_THAN + | T_GREATER_THAN + | T_LSHIFT + | T_RSHIFT + | T_RSHIFT3 + | T_PLUS + | T_MINUS + | T_DIV + | T_MULT + | T_EXP + | T_MOD + | T_NOT + | T_BIT_NOT + | T_INCR + | T_DECR + | T_EOF -> + false + (* literals *) + | T_NUMBER _ + | T_BIGINT _ + | T_STRING _ + | T_TEMPLATE_PART _ + | T_REGEXP _ + (* misc that shouldn't appear in NORMAL mode *) + + | T_JSX_IDENTIFIER _ + | T_JSX_TEXT _ + | T_ERROR _ -> + false end | Lex_mode.JSX_TAG | Lex_mode.JSX_CHILD | Lex_mode.TEMPLATE - | Lex_mode.REGEXP -> false + | Lex_mode.REGEXP -> + false - let ith_is_identifier_name ~i env = - ith_is_identifier ~i env || ith_is_type_identifier ~i env + let ith_is_identifier_name ~i env = ith_is_identifier ~i env || ith_is_type_identifier ~i env (* This returns true if the next token is identifier-ish (even if it is an error) *) @@ -668,18 +794,19 @@ module Peek = struct let is_type_identifier env = ith_is_type_identifier ~i:0 env let is_function env = - token env = T_FUNCTION || - (token env = T_ASYNC && ith_token ~i:1 env = T_FUNCTION && - (loc env)._end.line = (ith_loc ~i:1 env).start.line) + token env = T_FUNCTION + || token env = T_ASYNC + && ith_token ~i:1 env = T_FUNCTION + && (loc env)._end.line = (ith_loc ~i:1 env).start.line let is_class env = match token env with | T_CLASS - | T_AT -> true + | T_AT -> + true | _ -> false end - (*****************************************************************************) (* Errors *) (*****************************************************************************) @@ -689,38 +816,35 @@ let error env e = let loc = Peek.loc env in error_at env (loc, e) -let get_unexpected_error token = - let open Token in - match token with - | T_EOF -> Error.UnexpectedEOS - | T_NUMBER _ -> Error.UnexpectedNumber - | T_JSX_TEXT _ - | T_STRING _ -> Error.UnexpectedString - | T_IDENTIFIER _ -> Error.UnexpectedIdentifier - | t when token_is_future_reserved t -> Error.UnexpectedReserved - | t when token_is_strict_reserved t -> Error.StrictReservedWord - | _ -> Error.UnexpectedToken (value_of_token token) - -let error_unexpected env = +let get_unexpected_error ?expected token = + if token_is_future_reserved token then + Parse_error.UnexpectedReserved + else if token_is_strict_reserved token then + Parse_error.StrictReservedWord + else + let unexpected = Token.explanation_of_token token in + match expected with + | Some expected_msg -> Parse_error.UnexpectedWithExpected (unexpected, expected_msg) + | None -> Parse_error.Unexpected unexpected + +let error_unexpected ?expected env = (* So normally we consume the lookahead lex result when Eat.token calls * Parser_env.advance, which will add any lexing errors to our list of errors. * However, raising an unexpected error for a lookahead is kind of like * consuming that token, so we should process any lexing errors before * complaining about the unexpected token *) error_list env (Peek.errors env); - error env (get_unexpected_error (Peek.token env)) + error env (get_unexpected_error ?expected (Peek.token env)) -let error_on_decorators env = List.iter - (fun decorator -> error_at env ((fst decorator), Error.UnsupportedDecorator)) +let error_on_decorators env = + List.iter (fun decorator -> error_at env (fst decorator, Parse_error.UnsupportedDecorator)) let strict_error env e = if in_strict_mode env then error env e -let strict_error_at env (loc, e) = - if in_strict_mode env then error_at env (loc, e) + +let strict_error_at env (loc, e) = if in_strict_mode env then error_at env (loc, e) let function_as_statement_error_at env loc = - error_at env (loc, Error.FunctionAsStatement { - in_strict_mode = in_strict_mode env - }) + error_at env (loc, Parse_error.FunctionAsStatement { in_strict_mode = in_strict_mode env }) (* Consume zero or more tokens *) module Eat = struct @@ -728,24 +852,24 @@ module Eat = struct let token env = (* If there's a token_sink, emit the lexed token before moving forward *) (match !(env.token_sink) with - | None -> () - | Some token_sink -> - token_sink { - token_loc = Peek.loc env; - token = Peek.token env; - (** - * The lex mode is useful because it gives context to some - * context-sensitive tokens. - * - * Some examples of such tokens include: - * - * `=>` - Part of an arrow function? or part of a type annotation? - * `<` - A less-than? Or an opening to a JSX element? - * ...etc... - *) - token_context=(lex_mode env); - } - ); + | None -> () + | Some token_sink -> + token_sink + { + token_loc = Peek.loc env; + token = Peek.token env; + (* + * The lex mode is useful because it gives context to some + * context-sensitive tokens. + * + * Some examples of such tokens include: + * + * `=>` - Part of an arrow function? or part of a type annotation? + * `<` - A less-than? Or an opening to a JSX element? + * ...etc... + *) + token_context = lex_mode env; + }); env.lex_env := Peek.lex_env env; @@ -760,16 +884,20 @@ module Eat = struct env.lookahead := Lookahead.create !(env.lex_env) (lex_mode env) let pop_lex_mode env = - let new_stack = match !(env.lex_mode_stack) with - | _mode::stack -> stack - | _ -> failwith "Popping lex mode from empty stack" in + let new_stack = + match !(env.lex_mode_stack) with + | _mode :: stack -> stack + | _ -> failwith "Popping lex mode from empty stack" + in env.lex_mode_stack := new_stack; env.lookahead := Lookahead.create !(env.lex_env) (lex_mode env) let double_pop_lex_mode env = - let new_stack = match !(env.lex_mode_stack) with - | _::_::stack -> stack - | _ -> failwith "Popping lex mode from empty stack" in + let new_stack = + match !(env.lex_mode_stack) with + | _ :: _ :: stack -> stack + | _ -> failwith "Popping lex mode from empty stack" + in env.lex_mode_stack := new_stack; env.lookahead := Lookahead.create !(env.lex_env) (lex_mode env) @@ -777,34 +905,40 @@ module Eat = struct * semicolons are inserted. First, if we reach the EOF. Second, if the next * token is } or is separated by a LineTerminator. *) - let semicolon env = - if not (Peek.is_implicit_semicolon env) - then - if Peek.token env = Token.T_SEMICOLON - then token env - else error_unexpected env + let semicolon ?(expected = "the token `;`") env = + if not (Peek.is_implicit_semicolon env) then + if Peek.token env = Token.T_SEMICOLON then + token env + else + error_unexpected ~expected env end module Expect = struct let token env t = - if Peek.token env <> t then error_unexpected env; + ( if Peek.token env <> t then + let expected = Token.explanation_of_token ~use_article:true t in + error_unexpected ~expected env ); Eat.token env let identifier env name = - begin match Peek.token env with - | Token.T_IDENTIFIER { raw; _ } when raw = name -> () - | _ -> error_unexpected env + let t = Peek.token env in + begin + match t with + | Token.T_IDENTIFIER { raw; _ } when raw = name -> () + | _ -> + let expected = Printf.sprintf "the identifier `%s`" name in + error_unexpected ~expected env end; Eat.token env (* If the next token is t, then eat it and return true * else return false *) let maybe env t = - if Peek.token env = t - then begin + if Peek.token env = t then ( Eat.token env; true - end else false + ) else + false end (* This module allows you to try parsing and rollback if you need. This is not @@ -817,12 +951,12 @@ module Try = struct exception Rollback type saved_state = { - saved_errors : (Loc.t * Error.t) list; - saved_comments : Loc.t Flow_ast.Comment.t list; - saved_last_lex_result : Lex_result.t option; - saved_lex_mode_stack : Lex_mode.t list; - saved_lex_env : Lex_env.t; - token_buffer : ((token_sink_result -> unit) * token_sink_result Queue.t) option; + saved_errors: (Loc.t * Parse_error.t) list; + saved_comments: Loc.t Flow_ast.Comment.t list; + saved_last_lex_result: Lex_result.t option; + saved_lex_mode_stack: Lex_mode.t list; + saved_lex_env: Lex_env.t; + token_buffer: ((token_sink_result -> unit) * token_sink_result Queue.t) option; } let save_state env = @@ -830,27 +964,25 @@ module Try = struct match !(env.token_sink) with | None -> None | Some orig_token_sink -> - let buffer = Queue.create () in - env.token_sink := Some(fun token_data -> - Queue.add token_data buffer - ); - Some(orig_token_sink, buffer) + let buffer = Queue.create () in + env.token_sink := Some (fun token_data -> Queue.add token_data buffer); + Some (orig_token_sink, buffer) in { - saved_errors = !(env.errors); - saved_comments = !(env.comments); + saved_errors = !(env.errors); + saved_comments = !(env.comments); saved_last_lex_result = !(env.last_lex_result); - saved_lex_mode_stack = !(env.lex_mode_stack); - saved_lex_env = !(env.lex_env); + saved_lex_mode_stack = !(env.lex_mode_stack); + saved_lex_env = !(env.lex_env); token_buffer; } let reset_token_sink ~flush env token_buffer_info = match token_buffer_info with | None -> () - | Some(orig_token_sink, token_buffer) -> - env.token_sink := Some orig_token_sink; - if flush then Queue.iter orig_token_sink token_buffer + | Some (orig_token_sink, token_buffer) -> + env.token_sink := Some orig_token_sink; + if flush then Queue.iter orig_token_sink token_buffer let rollback_state env saved_state = reset_token_sink ~flush:false env saved_state.token_buffer; @@ -869,8 +1001,7 @@ module Try = struct let to_parse env parse = let saved_state = save_state env in - try success env saved_state (parse env) - with Rollback -> rollback_state env saved_state + (try success env saved_state (parse env) with Rollback -> rollback_state env saved_state) let or_else env ~fallback parse = match to_parse env parse with diff --git a/src/parser/parser_env.mli b/src/parser/parser_env.mli index dbd657958e4..36a56e8d70b 100644 --- a/src/parser/parser_env.mli +++ b/src/parser/parser_env.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -18,7 +18,8 @@ module Lex_mode : sig | JSX_CHILD | TEMPLATE | REGEXP - val debug_string_of_lex_mode: t -> string + + val debug_string_of_lex_mode : t -> string end type token_sink_result = { @@ -28,6 +29,7 @@ type token_sink_result = { } type parse_options = { + enums: bool; esproposal_class_instance_fields: bool; esproposal_class_static_fields: bool; esproposal_decorators: bool; @@ -37,6 +39,7 @@ type parse_options = { types: bool; use_strict: bool; } + val default_parse_options : parse_options type env @@ -48,118 +51,199 @@ type allowed_super = (* constructor: *) val init_env : - ?token_sink:(token_sink_result -> unit) option - -> ?parse_options:parse_options option - -> File_key.t option - -> string - -> env + ?token_sink:(token_sink_result -> unit) option -> + ?parse_options:parse_options option -> + File_key.t option -> + string -> + env (* getters: *) val in_strict_mode : env -> bool + val last_loc : env -> Loc.t option + val last_token : env -> Token.t option + val in_export : env -> bool + val labels : env -> SSet.t + val comments : env -> Loc.t Flow_ast.Comment.t list + val in_loop : env -> bool + val in_switch : env -> bool + val in_formal_parameters : env -> bool + val in_function : env -> bool + val allow_yield : env -> bool -val allow_await: env -> bool + +val allow_await : env -> bool + val allow_directive : env -> bool + val allow_super : env -> allowed_super + val no_in : env -> bool + val no_call : env -> bool + val no_let : env -> bool + val no_anon_function_type : env -> bool + val no_new : env -> bool + val errors : env -> (Loc.t * Parse_error.t) list + val parse_options : env -> parse_options + val source : env -> File_key.t option + val should_parse_types : env -> bool (* mutators: *) val error_at : env -> Loc.t * Parse_error.t -> unit + val error : env -> Parse_error.t -> unit -val error_unexpected : env -> unit + +val error_unexpected : ?expected:string -> env -> unit + val error_on_decorators : env -> (Loc.t * 'a) list -> unit + val strict_error : env -> Parse_error.t -> unit + val strict_error_at : env -> Loc.t * Parse_error.t -> unit + val function_as_statement_error_at : env -> Loc.t -> unit + val error_list : env -> (Loc.t * Parse_error.t) list -> unit -val record_export: env -> Loc.t * string -> unit + +val record_export : env -> (Loc.t, Loc.t) Flow_ast.Identifier.t -> unit + val enter_class : env -> unit + val exit_class : env -> unit + val add_declared_private : env -> string -> unit + val add_used_private : env -> string -> Loc.t -> unit (* functional operations -- these return shallow copies, so future mutations to * the returned env will also affect the original: *) val with_strict : bool -> env -> env + val with_in_formal_parameters : bool -> env -> env + val with_in_function : bool -> env -> env + val with_allow_yield : bool -> env -> env + val with_allow_await : bool -> env -> env + val with_allow_directive : bool -> env -> env + val with_allow_super : allowed_super -> env -> env + val with_no_let : bool -> env -> env + val with_in_loop : bool -> env -> env + val with_no_in : bool -> env -> env + val with_no_anon_function_type : bool -> env -> env + val with_no_new : bool -> env -> env + val with_in_switch : bool -> env -> env + val with_in_export : bool -> env -> env + val with_no_call : bool -> env -> env + val with_error_callback : (env -> Parse_error.t -> unit) -> env -> env val without_error_callback : env -> env val add_label : env -> string -> env + val enter_function : env -> async:bool -> generator:bool -> env val is_reserved : string -> bool + val is_future_reserved : string -> bool + val is_strict_reserved : string -> bool + val token_is_strict_reserved : Token.t -> bool + val is_restricted : string -> bool + val is_reserved_type : string -> bool + val token_is_restricted : Token.t -> bool module Peek : sig val token : env -> Token.t + val loc : env -> Loc.t - val loc_skip_lookeahead : env -> Loc.t + + val loc_skip_lookahead : env -> Loc.t + val errors : env -> (Loc.t * Parse_error.t) list + val comments : env -> Loc.t Flow_ast.Comment.t list + val is_line_terminator : env -> bool + val is_implicit_semicolon : env -> bool + val is_identifier : env -> bool + val is_type_identifier : env -> bool + val is_identifier_name : env -> bool + val is_function : env -> bool + val is_class : env -> bool val ith_token : i:int -> env -> Token.t + val ith_loc : i:int -> env -> Loc.t + val ith_errors : i:int -> env -> (Loc.t * Parse_error.t) list + val ith_comments : i:int -> env -> Loc.t Flow_ast.Comment.t list + + val ith_is_line_terminator : i:int -> env -> bool + val ith_is_identifier : i:int -> env -> bool + val ith_is_identifier_name : i:int -> env -> bool + val ith_is_type_identifier : i:int -> env -> bool end module Eat : sig val token : env -> unit + val push_lex_mode : env -> Lex_mode.t -> unit + val pop_lex_mode : env -> unit + val double_pop_lex_mode : env -> unit - val semicolon : env -> unit + + val semicolon : ?expected:string -> env -> unit end module Expect : sig val token : env -> Token.t -> unit + val identifier : env -> string -> unit + val maybe : env -> Token.t -> bool end @@ -170,6 +254,7 @@ module Try : sig exception Rollback - val to_parse: env -> (env -> 'a) -> 'a parse_result - val or_else: env -> fallback:'a -> (env -> 'a) -> 'a + val to_parse : env -> (env -> 'a) -> 'a parse_result + + val or_else : env -> fallback:'a -> (env -> 'a) -> 'a end diff --git a/src/parser/parser_flow.ml b/src/parser/parser_flow.ml index b87a8dfe895..3a794872e95 100644 --- a/src/parser/parser_flow.ml +++ b/src/parser/parser_flow.ml @@ -1,15 +1,13 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Token open Parser_env -module Error = Parse_error open Parser_common (* Sometimes we add the same error for multiple different reasons. This is hard @@ -17,20 +15,28 @@ open Parser_common a reversed list of errors and returns the list in forward order with dupes removed. This differs from a set because the original order is preserved. *) let filter_duplicate_errors = - let module ErrorSet = Set.Make(struct - type t = Loc.t * Error.t + let module PrintableErrorSet = Set.Make (struct + type t = Loc.t * Parse_error.t + let compare (a_loc, a_error) (b_loc, b_error) = let loc = Loc.compare a_loc b_loc in - if loc = 0 - then Pervasives.compare a_error b_error - else loc + if loc = 0 then + Pervasives.compare a_error b_error + else + loc end) in fun errs -> let errs = List.rev errs in - let _, deduped = List.fold_left (fun (set, deduped) err -> - if ErrorSet.mem err set then (set, deduped) - else (ErrorSet.add err set, err::deduped) - ) (ErrorSet.empty, []) errs in + let (_, deduped) = + List.fold_left + (fun (set, deduped) err -> + if PrintableErrorSet.mem err set then + (set, deduped) + else + (PrintableErrorSet.add err set, err :: deduped)) + (PrintableErrorSet.empty, []) + errs + in List.rev deduped module rec Parse : PARSER = struct @@ -39,7 +45,8 @@ module rec Parse : PARSER = struct module Pattern_cover = Pattern_cover.Cover (Parse) module Expression = Expression_parser.Expression (Parse) (Type) (Declaration) (Pattern_cover) module Object = Object_parser.Object (Parse) (Type) (Declaration) (Expression) (Pattern_cover) - module Statement = Statement_parser.Statement (Parse) (Type) (Declaration) (Object) (Pattern_cover) + module Statement = + Statement_parser.Statement (Parse) (Type) (Declaration) (Object) (Pattern_cover) module Pattern = Pattern_parser.Pattern (Parse) (Type) module JSX = Jsx_parser.JSX (Parse) @@ -47,50 +54,44 @@ module rec Parse : PARSER = struct let stmts = module_body_with_directives env (fun _ -> false) in let end_loc = Peek.loc env in Expect.token env T_EOF; - let loc = match stmts with - | [] -> end_loc - | _ -> Loc.btwn (fst (List.hd stmts)) (fst (List.hd (List.rev stmts))) in + let loc = + match stmts with + | [] -> end_loc + | _ -> Loc.btwn (fst (List.hd stmts)) (fst (List.hd (List.rev stmts))) + in let comments = List.rev (comments env) in - loc, stmts, comments + (loc, stmts, comments) and directives = - let check env token = - match token with - | T_STRING (loc, _, _, octal) -> - if octal then strict_error_at env (loc, Error.StrictOctalLiteral) - | _ -> failwith ("Nooo: "^(token_to_string token)^"\n") - - in let rec statement_list env term_fn item_fn (string_tokens, stmts) = - match Peek.token env with - | T_EOF -> env, string_tokens, stmts - | t when term_fn t -> env, string_tokens, stmts - | T_STRING _ as string_token -> - let possible_directive = item_fn env in - let stmts = possible_directive::stmts in - (match possible_directive with - | _, Ast.Statement.Expression { - Ast.Statement.Expression.directive = Some raw; _ - } -> - (* 14.1.1 says that it has to be "use strict" without any + let check env token = + match token with + | T_STRING (loc, _, _, octal) -> + if octal then strict_error_at env (loc, Parse_error.StrictOctalLiteral) + | _ -> failwith ("Nooo: " ^ token_to_string token ^ "\n") + in + let rec statement_list env term_fn item_fn (string_tokens, stmts) = + match Peek.token env with + | T_EOF -> (env, string_tokens, stmts) + | t when term_fn t -> (env, string_tokens, stmts) + | T_STRING _ as string_token -> + let possible_directive = item_fn env in + let stmts = possible_directive :: stmts in + (match possible_directive with + | (_, Ast.Statement.Expression { Ast.Statement.Expression.directive = Some raw; _ }) -> + (* 14.1.1 says that it has to be "use strict" without any escapes, so "use\x20strict" is disallowed. *) - let strict = (in_strict_mode env) || (raw = "use strict") in - let string_tokens = string_token::string_tokens in - statement_list - (env |> with_strict strict) - term_fn - item_fn - (string_tokens, stmts) - | _ -> - env, string_tokens, stmts) - | _ -> - env, string_tokens, stmts - - in fun env term_fn item_fn -> - let env = with_allow_directive true env in - let env, string_tokens, stmts = statement_list env term_fn item_fn ([], []) in - let env = with_allow_directive false env in - List.iter (check env) (List.rev string_tokens); - env, stmts + let strict = in_strict_mode env || raw = "use strict" in + let string_tokens = string_token :: string_tokens in + statement_list (env |> with_strict strict) term_fn item_fn (string_tokens, stmts) + | _ -> (env, string_tokens, stmts)) + | _ -> (env, string_tokens, stmts) + in + fun env term_fn item_fn -> + let env = with_allow_directive true env in + let (env, string_tokens, stmts) = statement_list env term_fn item_fn ([], []) in + let env = with_allow_directive false env in + List.iter (check env) (List.rev string_tokens); + (env, stmts) (* 15.2 *) and module_item env = @@ -98,142 +99,142 @@ module rec Parse : PARSER = struct match Peek.token env with | T_EXPORT -> Statement.export_declaration ~decorators env | T_IMPORT -> - error_on_decorators env decorators; - let statement = match Peek.ith_token ~i:1 env with - | T_LPAREN | T_PERIOD -> Statement.expression env - | _ -> Statement.import_declaration env in - statement + error_on_decorators env decorators; + let statement = + match Peek.ith_token ~i:1 env with + | T_LPAREN | T_PERIOD -> Statement.expression env + | _ -> Statement.import_declaration env + in + statement | T_DECLARE when Peek.ith_token ~i:1 env = T_EXPORT -> - error_on_decorators env decorators; - Statement.declare_export_declaration env + error_on_decorators env decorators; + Statement.declare_export_declaration env | _ -> statement_list_item env ~decorators and module_body_with_directives env term_fn = - let env, directives = directives env term_fn module_item in + let (env, directives) = directives env term_fn module_item in let stmts = module_body ~term_fn env in (* Prepend the directives *) - List.fold_left (fun acc stmt -> stmt::acc) stmts directives + List.fold_left (fun acc stmt -> stmt :: acc) stmts directives and module_body = let rec module_item_list env term_fn acc = match Peek.token env with | T_EOF -> List.rev acc | t when term_fn t -> List.rev acc - | _ -> module_item_list env term_fn (module_item env::acc) - - in fun ~term_fn env -> - module_item_list env term_fn [] + | _ -> module_item_list env term_fn (module_item env :: acc) + in + (fun ~term_fn env -> module_item_list env term_fn []) and statement_list_with_directives ~term_fn env = - let env, directives = directives env term_fn statement_list_item in + let (env, directives) = directives env term_fn statement_list_item in let stmts = statement_list ~term_fn env in (* Prepend the directives *) - let stmts = List.fold_left (fun acc stmt -> stmt::acc) stmts directives in - stmts, (in_strict_mode env) + let stmts = List.fold_left (fun acc stmt -> stmt :: acc) stmts directives in + (stmts, in_strict_mode env) and statement_list = let rec statements env term_fn acc = match Peek.token env with | T_EOF -> List.rev acc | t when term_fn t -> List.rev acc - | _ -> statements env term_fn ((statement_list_item env)::acc) + | _ -> statements env term_fn (statement_list_item env :: acc) + in + (fun ~term_fn env -> statements env term_fn []) - in fun ~term_fn env -> statements env term_fn [] - - - and statement_list_item ?(decorators=[]) env = - if not (Peek.is_class env) - then error_on_decorators env decorators; - Statement.(match Peek.token env with - (* Remember kids, these look like statements but they're not + and statement_list_item ?(decorators = []) env = + if not (Peek.is_class env) then error_on_decorators env decorators; + Statement.( + match Peek.token env with + (* Remember kids, these look like statements but they're not * statements... (see section 13) *) - | T_LET -> let_ env - | T_CONST -> var_or_const env - | _ when Peek.is_function env -> Declaration._function env - | _ when Peek.is_class env -> class_declaration env decorators - | T_INTERFACE -> interface env - | T_DECLARE -> declare env - | T_TYPE -> type_alias env - | T_OPAQUE -> opaque_type env - | _ -> statement env) + | T_LET -> let_ env + | T_CONST -> const env + | _ when Peek.is_function env -> Declaration._function env + | _ when Peek.is_class env -> class_declaration env decorators + | T_INTERFACE -> interface env + | T_DECLARE -> declare env + | T_TYPE -> type_alias env + | T_OPAQUE -> opaque_type env + | T_ENUM when (parse_options env).enums -> Declaration.enum_declaration env + | _ -> statement env) and statement env = - Statement.(match Peek.token env with - | T_EOF -> - error_unexpected env; - Peek.loc env, Ast.Statement.Empty - | T_SEMICOLON -> empty env - | T_LCURLY -> block env - | T_VAR -> var_or_const env - | T_BREAK -> break env - | T_CONTINUE -> continue env - | T_DEBUGGER -> debugger env - | T_DO -> do_while env - | T_FOR -> for_ env - | T_IF -> if_ env - | T_RETURN -> return env - | T_SWITCH -> switch env - | T_THROW -> throw env - | T_TRY -> try_ env - | T_WHILE -> while_ env - | T_WITH -> with_ env - (* If we see an else then it's definitely an error, but we can probably - * assume that this is a malformed if statement that is missing the if *) - | T_ELSE -> if_ env - (* There are a bunch of tokens that aren't the start of any valid - * statement. We list them here in order to skip over them, rather than - * getting stuck *) - | T_COLON - | T_RPAREN - | T_RCURLY - | T_RBRACKET - | T_COMMA - | T_PERIOD - | T_PLING_PERIOD - | T_ARROW - | T_IN - | T_INSTANCEOF - | T_CATCH - | T_FINALLY - | T_CASE - | T_DEFAULT - | T_EXTENDS - | T_STATIC - | T_EXPORT (* TODO *) - | T_ELLIPSIS -> - error_unexpected env; + Statement.( + match Peek.token env with + | T_EOF -> + error_unexpected ~expected:"the start of a statement" env; + (Peek.loc env, Ast.Statement.Empty) + | T_SEMICOLON -> empty env + | T_LCURLY -> block env + | T_VAR -> var env + | T_BREAK -> break env + | T_CONTINUE -> continue env + | T_DEBUGGER -> debugger env + | T_DO -> do_while env + | T_FOR -> for_ env + | T_IF -> if_ env + | T_RETURN -> return env + | T_SWITCH -> switch env + | T_THROW -> throw env + | T_TRY -> try_ env + | T_WHILE -> while_ env + | T_WITH -> with_ env + (* If we see an else then it's definitely an error, but we can probably + * assume that this is a malformed if statement that is missing the if *) + | T_ELSE -> if_ env + (* There are a bunch of tokens that aren't the start of any valid + * statement. We list them here in order to skip over them, rather than + * getting stuck *) + | T_COLON + | T_RPAREN + | T_RCURLY + | T_RBRACKET + | T_COMMA + | T_PERIOD + | T_PLING_PERIOD + | T_ARROW + | T_IN + | T_INSTANCEOF + | T_CATCH + | T_FINALLY + | T_CASE + | T_DEFAULT + | T_EXTENDS + | T_STATIC + | T_EXPORT + (* TODO *) + + | T_ELLIPSIS -> + error_unexpected ~expected:"the start of a statement" env; Eat.token env; statement env - - (* The rest of these patterns handle ExpressionStatement and its negative + (* The rest of these patterns handle ExpressionStatement and its negative lookaheads, which prevent ambiguities. See https://tc39.github.io/ecma262/#sec-expression-statement *) - - | _ when Peek.is_function env -> + | _ when Peek.is_function env -> let func = Declaration._function env in function_as_statement_error_at env (fst func); func - | T_LET when Peek.ith_token ~i:1 env = T_LBRACKET -> + | T_LET when Peek.ith_token ~i:1 env = T_LBRACKET -> (* `let [foo]` is ambiguous: either a let binding pattern, or a member expression, so it is banned. *) let loc = Loc.btwn (Peek.loc env) (Peek.ith_loc ~i:1 env) in error_at env (loc, Parse_error.AmbiguousLetBracket); - Statement.expression env (* recover as a member expression *) - | _ when Peek.is_identifier env -> maybe_labeled env - | _ when Peek.is_class env -> + Statement.expression env + (* recover as a member expression *) + | _ when Peek.is_identifier env -> maybe_labeled env + | _ when Peek.is_class env -> error_unexpected env; Eat.token env; Statement.expression env - | _ -> - Statement.expression env - ) + | _ -> Statement.expression env) and expression env = let expr = Expression.assignment env in match Peek.token env with | T_COMMA -> Expression.sequence env [expr] - | _ -> - expr + | _ -> expr and expression_or_pattern env = let expr_or_pattern = Expression.assignment_cover env in @@ -242,47 +243,57 @@ module rec Parse : PARSER = struct let expr = Pattern_cover.as_expression env expr_or_pattern in let seq = Expression.sequence env [expr] in Cover_expr seq - | _ -> - expr_or_pattern + | _ -> expr_or_pattern and conditional = Expression.conditional + and assignment = Expression.assignment + and left_hand_side = Expression.left_hand_side + and object_initializer = Object._initializer + and object_key = Object.key + and class_declaration = Object.class_declaration + and class_expression = Object.class_expression and is_assignable_lhs = Expression.is_assignable_lhs - and assert_identifier_name_is_identifier ?restricted_error env (loc, name) = + and number = Expression.number + + and assert_identifier_name_is_identifier + ?restricted_error env (loc, { Ast.Identifier.name; comments = _ }) = match name with | "let" -> (* "let" is disallowed as an identifier in a few situations. 11.6.2.1 * lists them out. It is always disallowed in strict mode *) if in_strict_mode env then - strict_error_at env (loc, Error.StrictReservedWord) + strict_error_at env (loc, Parse_error.StrictReservedWord) else if no_let env then - error_at env (loc, Error.UnexpectedToken name) + error_at env (loc, Parse_error.Unexpected (Token.quote_token_value name)) | "await" -> (* `allow_await` means that `await` is allowed to be a keyword, which makes it illegal to use as an identifier. https://tc39.github.io/ecma262/#sec-identifiers-static-semantics-early-errors *) - if allow_await env then error_at env (loc, Error.UnexpectedReserved) + if allow_await env then error_at env (loc, Parse_error.UnexpectedReserved) | "yield" -> (* `allow_yield` means that `yield` is allowed to be a keyword, which makes it illegal to use as an identifier. https://tc39.github.io/ecma262/#sec-identifiers-static-semantics-early-errors *) - if allow_yield env then error_at env (loc, Error.UnexpectedReserved) - else strict_error_at env (loc, Error.StrictReservedWord) - | _ when is_strict_reserved name -> - strict_error_at env (loc, Error.StrictReservedWord) + if allow_yield env then + error_at env (loc, Parse_error.UnexpectedReserved) + else + strict_error_at env (loc, Parse_error.StrictReservedWord) + | _ when is_strict_reserved name -> strict_error_at env (loc, Parse_error.StrictReservedWord) | _ when is_reserved name -> - error_at env (loc, Error.UnexpectedToken name) + error_at env (loc, Parse_error.Unexpected (Token.quote_token_value name)) | _ -> - begin match restricted_error with - | Some err when is_restricted name -> strict_error_at env (loc, err) - | _ -> () + begin + match restricted_error with + | Some err when is_restricted name -> strict_error_at env (loc, err) + | _ -> () end and identifier ?restricted_error env = @@ -293,46 +304,39 @@ module rec Parse : PARSER = struct and identifier_with_type = let with_loc_helper no_optional restricted_error env = let name = identifier ~restricted_error env in - let optional = not no_optional && Peek.token env = T_PLING in - if optional then begin - if not (should_parse_types env) - then error env Error.UnexpectedTypeAnnotation; + let optional = (not no_optional) && Peek.token env = T_PLING in + if optional then ( + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation; Expect.token env T_PLING - end; - let annot = - if Peek.token env = T_COLON - then Some (Type.annotation env) - else None in - Ast.Pattern.Identifier.({ - name; - optional; - annot; - }) - - in fun env ?(no_optional=false) restricted_error -> + ); + let annot = Type.annotation_opt env in + Ast.Pattern.Identifier.{ name; optional; annot } + in + fun env ?(no_optional = false) restricted_error -> with_loc (with_loc_helper no_optional restricted_error) env and block_body env = let start_loc = Peek.loc env in Expect.token env T_LCURLY; - let term_fn = fun t -> t = T_RCURLY in + let term_fn t = t = T_RCURLY in let body = statement_list ~term_fn env in let end_loc = Peek.loc env in Expect.token env T_RCURLY; - Loc.btwn start_loc end_loc, { Ast.Statement.Block.body; } + (Loc.btwn start_loc end_loc, { Ast.Statement.Block.body }) and function_block_body env = let start_loc = Peek.loc env in Expect.token env T_LCURLY; - let term_fn = fun t -> t = T_RCURLY in - let body, strict = statement_list_with_directives ~term_fn env in + let term_fn t = t = T_RCURLY in + let (body, strict) = statement_list_with_directives ~term_fn env in let end_loc = Peek.loc env in Expect.token env T_RCURLY; - Loc.btwn start_loc end_loc, { Ast.Statement.Block.body; }, strict + (Loc.btwn start_loc end_loc, { Ast.Statement.Block.body }, strict) and jsx_element_or_fragment = JSX.element_or_fragment and pattern = Pattern.pattern + and pattern_from_expr = Pattern.from_expr end @@ -342,23 +346,33 @@ end let do_parse env parser fail = let ast = parser env in let error_list = filter_duplicate_errors (errors env) in - if fail && error_list <> [] - then raise (Error.Error error_list); - ast, error_list + if fail && error_list <> [] then raise (Parse_error.Error error_list); + (ast, error_list) + +(* Makes the input parser expect EOF at the end. Use this to error on trailing + * junk when parsing non-Program nodes. *) +let with_eof parser env = + let ast = parser env in + Expect.token env T_EOF; + ast -let parse_program fail ?(token_sink=None) ?(parse_options=None) filename content = +let parse_statement env fail = do_parse env (with_eof Parse.statement_list_item) fail + +let parse_expression env fail = do_parse env (with_eof Parse.expression) fail + +let parse_program fail ?(token_sink = None) ?(parse_options = None) filename content = let env = init_env ~token_sink ~parse_options filename content in do_parse env Parse.program fail -let program ?(fail=true) ?(token_sink=None) ?(parse_options=None) content = +let program ?(fail = true) ?(token_sink = None) ?(parse_options = None) content = parse_program fail ~token_sink ~parse_options None content -let program_file ?(fail=true) ?(token_sink=None) ?(parse_options=None) content filename = +let program_file ?(fail = true) ?(token_sink = None) ?(parse_options = None) content filename = parse_program fail ~token_sink ~parse_options filename content (* even if fail=false, still raises an error on a totally invalid token, since there's no legitimate fallback. *) -let json_file ?(fail=true) ?(token_sink=None) ?(parse_options=None) content filename = +let json_file ?(fail = true) ?(token_sink = None) ?(parse_options = None) content filename = let env = init_env ~token_sink ~parse_options filename content in match Peek.token env with | T_LBRACKET @@ -371,21 +385,20 @@ let json_file ?(fail=true) ?(token_sink=None) ?(parse_options=None) content file do_parse env Parse.expression fail | T_MINUS -> (match Peek.ith_token ~i:1 env with - | T_NUMBER _ -> - do_parse env Parse.expression fail + | T_NUMBER _ -> do_parse env Parse.expression fail | _ -> - error_unexpected env; - raise (Error.Error (errors env))) + error_unexpected ~expected:"a number" env; + raise (Parse_error.Error (errors env))) | _ -> - error_unexpected env; - raise (Error.Error (errors env)) + error_unexpected ~expected:"a valid JSON value" env; + raise (Parse_error.Error (errors env)) let jsx_pragma_expression = let left_hand_side env = let ast = Parse.left_hand_side (with_no_new true env) in Expect.token env T_EOF; ast - - in fun content filename -> + in + fun content filename -> let env = init_env ~token_sink:None ~parse_options:None filename content in do_parse env left_hand_side true diff --git a/src/parser/pattern_cover.ml b/src/parser/pattern_cover.ml index a613bf67728..b9eb6ec25b8 100644 --- a/src/parser/pattern_cover.ml +++ b/src/parser/pattern_cover.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,44 +11,48 @@ open Parser_env module type COVER = sig val as_expression : env -> pattern_cover -> (Loc.t, Loc.t) Expression.t + val as_pattern : ?err:Parse_error.t -> env -> pattern_cover -> (Loc.t, Loc.t) Pattern.t + val empty_errors : pattern_errors + val rev_append_errors : pattern_errors -> pattern_errors -> pattern_errors + val rev_errors : pattern_errors -> pattern_errors end -module Cover - (Parse: PARSER) -: COVER = struct +module Cover (Parse : PARSER) : COVER = struct let as_expression env = function | Cover_expr expr -> expr | Cover_patt (expr, { if_expr; if_patt = _ }) -> - List.iter (error_at env) if_expr; - expr + List.iter (error_at env) if_expr; + expr let as_pattern ?(err = Parse_error.InvalidLHSInAssignment) env cover = - let expr = match cover with - | Cover_expr expr -> expr - | Cover_patt (expr, { if_expr = _; if_patt }) -> + let expr = + match cover with + | Cover_expr expr -> expr + | Cover_patt (expr, { if_expr = _; if_patt }) -> List.iter (error_at env) if_patt; expr in - - if not (Parse.is_assignable_lhs expr) - then error_at env (fst expr, err); + if not (Parse.is_assignable_lhs expr) then error_at env (fst expr, err); (match expr with - | loc, Flow_ast.Expression.Identifier (_, name) + | (loc, Flow_ast.Expression.Identifier (_, { Flow_ast.Identifier.name; comments = _ })) when is_restricted name -> - strict_error_at env (loc, Error.StrictLHSAssignment) + strict_error_at env (loc, Parse_error.StrictLHSAssignment) | _ -> ()); Parse.pattern_from_expr env expr let empty_errors = { if_patt = []; if_expr = [] } + let rev_append_errors a b = - { if_patt = List.rev_append a.if_patt b.if_patt; - if_expr = List.rev_append a.if_expr b.if_expr; } - let rev_errors a = - { if_patt = List.rev a.if_patt; if_expr = List.rev a.if_expr } + { + if_patt = List.rev_append a.if_patt b.if_patt; + if_expr = List.rev_append a.if_expr b.if_expr; + } + + let rev_errors a = { if_patt = List.rev a.if_patt; if_expr = List.rev a.if_expr } end diff --git a/src/parser/pattern_parser.ml b/src/parser/pattern_parser.ml index 245e1870c5b..a5b4161ff4f 100644 --- a/src/parser/pattern_parser.ml +++ b/src/parser/pattern_parser.ml @@ -1,68 +1,73 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Token open Parser_common open Parser_env open Flow_ast -module Pattern - (Parse: Parser_common.PARSER) - (Type: Type_parser.TYPE) - = struct +let missing_annot env = Ast.Type.Missing (Peek.loc_skip_lookahead env) + +module Pattern (Parse : Parser_common.PARSER) (Type : Type_parser.TYPE) = struct (* Reinterpret various expressions as patterns. * This is not the correct thing to do and is only used for assignment * expressions. This should be removed and replaced ASAP. *) let rec object_from_expr = - let rec properties env acc = Ast.Expression.Object.(function - | [] -> List.rev acc - | Property (loc, prop)::remaining -> - let key, pattern, shorthand = match prop with - | Property.Init { key; value; shorthand } -> - key, from_expr env value, shorthand - | Property.Method { key; value = (loc, f) } -> - error_at env (loc, Parse_error.MethodInDestructuring); - key, (loc, Pattern.Expression (loc, Ast.Expression.Function f)), false - | Property.Get { key; value = (loc, f) } - | Property.Set { key; value = (loc, f) } -> - (* these should never happen *) - error_at env (loc, Parse_error.UnexpectedIdentifier); - key, (loc, Pattern.Expression (loc, Ast.Expression.Function f)), false + let rec properties env acc = + Ast.Expression.Object.( + function + | [] -> List.rev acc + | Property (loc, prop) :: remaining -> + let acc = + match prop with + | Property.Init { key; value; shorthand } -> + Ast.Expression.( + let key = + match key with + | Property.Literal lit -> Pattern.Object.Property.Literal lit + | Property.Identifier id -> Pattern.Object.Property.Identifier id + | Property.PrivateName _ -> failwith "Internal Error: Found object private prop" + | Property.Computed expr -> Pattern.Object.Property.Computed expr + in + let (pattern, default) = + match value with + | (_loc, Assignment { Assignment.operator = None; left; right }) -> + (left, Some right) + | _ -> (from_expr env value, None) + in + Pattern.Object.Property + (loc, { Pattern.Object.Property.key; pattern; default; shorthand }) + :: acc) + | Property.Method { key = _; value = (loc, _) } -> + error_at env (loc, Parse_error.MethodInDestructuring); + acc + | Property.Get { key = _; value = (loc, _) } + | Property.Set { key = _; value = (loc, _) } -> + (* these should never happen *) + error_at env (loc, Parse_error.Unexpected "identifier"); + acc in - let key = match key with - | Property.Literal lit -> Pattern.Object.Property.Literal lit - | Property.Identifier id -> Pattern.Object.Property.Identifier id - | Property.PrivateName _ -> failwith "Internal Error: Found object private prop" - | Property.Computed expr -> Pattern.Object.Property.Computed expr - in - let acc = Pattern.(Object.Property (loc, { Object.Property. - key; - pattern; - shorthand; - })) :: acc in properties env acc remaining - | SpreadProperty (loc, { SpreadProperty.argument; })::[] -> - let acc = Pattern.Object.(RestProperty (loc, { RestProperty. - argument = from_expr env argument; - })) :: acc in + | [SpreadProperty (loc, { SpreadProperty.argument })] -> + let acc = + Pattern.Object.(RestProperty (loc, { RestProperty.argument = from_expr env argument })) + :: acc + in properties env acc [] - | SpreadProperty (loc, _)::remaining -> + | SpreadProperty (loc, _) :: remaining -> error_at env (loc, Parse_error.PropertyAfterRestProperty); - properties env acc remaining - ) in - - fun env (loc, { Ast.Expression.Object.properties = props }) -> - loc, Pattern.(Object { Object. - properties = properties env [] props; - annot = None; - }) + properties env acc remaining) + in + fun env (loc, { Ast.Expression.Object.properties = props; comments = _ (* TODO *) }) -> + ( loc, + Pattern.(Object { Object.properties = properties env [] props; annot = missing_annot env }) + ) and array_from_expr = (* Convert an Expression to a Pattern if it is a valid @@ -72,58 +77,67 @@ module Pattern let assignment_target env ((loc, _) as expr) = if Parse.is_assignable_lhs expr then Some (from_expr env expr) - else begin + else ( error_at env (loc, Parse_error.InvalidLHSInAssignment); None - end + ) in - - let rec elements env acc = Ast.Expression.(function - | [] -> List.rev acc - | Some (Spread (loc, { SpreadElement.argument }))::[] -> + let rec elements env acc = + Ast.Expression.( + function + | [] -> List.rev acc + | [Some (Spread (loc, { SpreadElement.argument }))] -> (* AssignmentRestElement is a DestructuringAssignmentTarget, see #prod-AssignmentRestElement *) - let acc = match assignment_target env argument with - | Some argument -> - (Some Pattern.Array.(RestElement (loc, { RestElement.argument; }))) :: acc - | None -> - acc + let acc = + match assignment_target env argument with + | Some argument -> + Some Pattern.Array.(RestElement (loc, { RestElement.argument })) :: acc + | None -> acc in elements env acc [] - | Some (Spread (loc, _))::remaining -> + | Some (Spread (loc, _)) :: remaining -> error_at env (loc, Parse_error.ElementAfterRestElement); elements env acc remaining - | Some (Expression (_, Assignment { Assignment. - operator = Assignment.Assign; _ - } as expr))::remaining -> + | Some (Expression (loc, Assignment { Assignment.operator = None; left; right })) + :: remaining -> (* AssignmentElement is a `DestructuringAssignmentTarget Initializer`, see #prod-AssignmentElement *) - let acc = Some (Pattern.Array.Element (from_expr env expr)) :: acc in + let acc = + Some + (Pattern.Array.Element + (loc, { Pattern.Array.Element.argument = left; default = Some right })) + :: acc + in elements env acc remaining - | Some (Expression expr)::remaining -> + | Some (Expression expr) :: remaining -> (* AssignmentElement is a DestructuringAssignmentTarget, see #prod-AssignmentElement *) - let acc = match assignment_target env expr with - | Some expr -> (Some (Pattern.Array.Element expr)) :: acc - | None -> acc + let acc = + match assignment_target env expr with + | Some ((loc, _) as expr) -> + let element = + Pattern.Array.Element + (loc, { Pattern.Array.Element.argument = expr; default = None }) + in + Some element :: acc + | None -> acc in elements env acc remaining - | None::remaining -> - elements env (None::acc) remaining - ) + | None :: remaining -> elements env (None :: acc) remaining) in - - fun env (loc, { Ast.Expression.Array.elements = elems }) -> - loc, Pattern.Array { Pattern.Array. - elements = elements env [] elems; - annot = None; - } + fun env (loc, { Ast.Expression.Array.elements = elems; comments }) -> + ( loc, + Pattern.Array + { Pattern.Array.elements = elements env [] elems; annot = missing_annot env; comments } + ) and from_expr env (loc, expr) = - Ast.Expression.(match expr with - | Object obj -> object_from_expr env (loc, obj) - | Array arr -> array_from_expr env (loc, arr) - | Identifier ((id_loc, string_val) as name) -> + Ast.Expression.( + match expr with + | Object obj -> object_from_expr env (loc, obj) + | Array arr -> array_from_expr env (loc, arr) + | Identifier ((id_loc, { Identifier.name = string_val; comments = _ }) as name) -> (* per #sec-destructuring-assignment-static-semantics-early-errors, it is a syntax error if IsValidSimpleAssignmentTarget of this IdentifierReference is false. That happens when `string_val` is @@ -136,109 +150,99 @@ module Pattern they are already invalid in strict mode, which we should have already errored about when parsing the expression that we're now converting into a pattern. *) - else if not (in_strict_mode env) then begin + else if not (in_strict_mode env) then if allow_yield env && string_val = "yield" then error_at env (id_loc, Parse_error.YieldAsIdentifierReference) else if allow_await env && string_val = "await" then - error_at env (id_loc, Parse_error.AwaitAsIdentifierReference) - end; - loc, Pattern.Identifier { Pattern.Identifier. - name; - annot = None; - optional = false; - } - | Assignment { Assignment.operator = Assignment.Assign; left; right } -> - loc, Pattern.Assignment { Pattern.Assignment.left; right } - | expr -> loc, Pattern.Expression (loc, expr)) + error_at env (id_loc, Parse_error.AwaitAsIdentifierReference); + ( loc, + Pattern.Identifier + { Pattern.Identifier.name; annot = missing_annot env; optional = false } ) + | expr -> (loc, Pattern.Expression (loc, expr))) (* Parse object destructuring pattern *) let rec object_ restricted_error = let rest_property env = - let loc, argument = with_loc (fun env -> - Expect.token env T_ELLIPSIS; - pattern env restricted_error - ) env in - Pattern.Object.(RestProperty (loc, { RestProperty. - argument - })) + let (loc, argument) = + with_loc + (fun env -> + Expect.token env T_ELLIPSIS; + pattern env restricted_error) + env + in + Pattern.Object.(RestProperty (loc, { RestProperty.argument })) in - let property_default env = match Peek.token env with | T_ASSIGN -> Expect.token env T_ASSIGN; Some (Parse.assignment env) - | _ -> - None + | _ -> None in - - let property_with_default env prop = - match property_default env with - | Some default -> - let loc = Loc.btwn (fst prop) (fst default) in - loc, Pattern.(Assignment Assignment.({ - left = prop; - right = default; - })); - | None -> prop - in - let rec property env = - if Peek.token env = T_ELLIPSIS then begin + if Peek.token env = T_ELLIPSIS then Some (rest_property env) - end else begin + else let start_loc = Peek.loc env in let raw_key = Parse.object_key env in match Peek.token env with | T_COLON -> Expect.token env T_COLON; - let pattern = pattern env restricted_error in - let pattern = property_with_default env pattern in - let loc = Loc.btwn start_loc (fst pattern) in - let key = Ast.Expression.Object.Property.( - match raw_key with - | _, Literal lit -> Pattern.Object.Property.Literal lit - | _, Identifier id -> Pattern.Object.Property.Identifier id - | _, PrivateName _ -> failwith "Internal Error: Found object private prop" - | _, Computed expr -> Pattern.Object.Property.Computed expr - ) in - Some Pattern.Object.(Property (loc, Property.({ - key; - pattern; - shorthand = false; - }))) - + let (loc, (pattern, default)) = + with_loc + ~start_loc + (fun env -> + let pattern = pattern env restricted_error in + let default = property_default env in + (pattern, default)) + env + in + let key = + Ast.Expression.Object.Property.( + match raw_key with + | (_, Literal lit) -> Pattern.Object.Property.Literal lit + | (_, Identifier id) -> Pattern.Object.Property.Identifier id + | (_, PrivateName _) -> failwith "Internal Error: Found object private prop" + | (_, Computed expr) -> Pattern.Object.Property.Computed expr) + in + Some + Pattern.Object.(Property (loc, Property.{ key; pattern; default; shorthand = false })) | _ -> (match raw_key with - | _, Ast.Expression.Object.Property.Identifier ((id_loc, string_val) as name) -> + | ( _, + Ast.Expression.Object.Property.Identifier + ((id_loc, { Identifier.name = string_val; comments = _ }) as name) ) -> (* #sec-identifiers-static-semantics-early-errors *) - begin - if is_reserved string_val && string_val <> "yield" && string_val <> "await" then - (* it is a syntax error if `name` is a reserved word other than await or yield *) - error_at env (id_loc, Parse_error.UnexpectedReserved) - else if is_strict_reserved string_val then - (* it is a syntax error if `name` is a strict reserved word, in strict mode *) - strict_error_at env (id_loc, Parse_error.StrictReservedWord) - end; - let pattern = (id_loc, Pattern.Identifier { Pattern.Identifier. - name; - annot = None; - optional = false; - }) in - let pattern = property_with_default env pattern in - let loc = Loc.btwn start_loc (fst pattern) in - Some Pattern.Object.(Property (loc, { Property. - key = Property.Identifier name; - pattern; - shorthand = true; - })) - + if is_reserved string_val && string_val <> "yield" && string_val <> "await" then + (* it is a syntax error if `name` is a reserved word other than await or yield *) + error_at env (id_loc, Parse_error.UnexpectedReserved) + else if is_strict_reserved string_val then + (* it is a syntax error if `name` is a strict reserved word, in strict mode *) + strict_error_at env (id_loc, Parse_error.StrictReservedWord); + let (loc, (pattern, default)) = + with_loc + ~start_loc + (fun env -> + let pattern = + ( id_loc, + Pattern.Identifier + { Pattern.Identifier.name; annot = missing_annot env; optional = false } ) + in + let default = property_default env in + (pattern, default)) + env + in + Some + Pattern.Object.( + Property + ( loc, + { Property.key = Property.Identifier name; pattern; default; shorthand = true } + )) | _ -> - error_unexpected env; (* invalid shorthand destructuring *) - None - ) - end + error_unexpected ~expected:"an identifier" env; + (* invalid shorthand destructuring *) + None) (* seen_rest is true when we've seen a rest element. rest_trailing_comma is the location of * the rest element's trailing command * Trailing comma: `let { ...rest, } = obj` @@ -247,102 +251,115 @@ module Pattern match Peek.token env with | T_EOF | T_RCURLY -> - begin match rest_trailing_comma with - | Some loc -> error_at env (loc, Parse_error.TrailingCommaAfterRestElement) - | None -> () + begin + match rest_trailing_comma with + | Some loc -> error_at env (loc, Parse_error.TrailingCommaAfterRestElement) + | None -> () end; List.rev acc | _ -> (match property env with - | Some (Pattern.Object.Property (loc, _) | Pattern.Object.RestProperty (loc, _) as prop) -> + | Some ((Pattern.Object.Property (loc, _) | Pattern.Object.RestProperty (loc, _)) as prop) + -> let rest_trailing_comma = - if seen_rest - then begin + if seen_rest then ( error_at env (loc, Parse_error.PropertyAfterRestProperty); None - end - else rest_trailing_comma + ) else + rest_trailing_comma + in + let (seen_rest, rest_trailing_comma) = + match prop with + | Pattern.Object.RestProperty _ -> + ( true, + if Peek.token env = T_COMMA then + Some (Peek.loc env) + else + None ) + | _ -> (seen_rest, rest_trailing_comma) in - let seen_rest, rest_trailing_comma = begin match prop with - | Pattern.Object.RestProperty _ -> - true, if Peek.token env = T_COMMA then Some (Peek.loc env) else None - | _ -> seen_rest, rest_trailing_comma end in - if Peek.token env <> T_RCURLY - then Expect.token env T_COMMA; - properties env ~seen_rest ~rest_trailing_comma (prop::acc) + if Peek.token env <> T_RCURLY then Expect.token env T_COMMA; + properties env ~seen_rest ~rest_trailing_comma (prop :: acc) | None -> properties env ~seen_rest ~rest_trailing_comma acc) in with_loc (fun env -> - Expect.token env T_LCURLY; - let properties = properties env ~seen_rest:false ~rest_trailing_comma:None [] in - Expect.token env T_RCURLY; - let annot = - if Peek.token env = T_COLON then Some (Type.annotation env) - else None - in - Pattern.Object { Pattern.Object.properties; annot; } - ) + Expect.token env T_LCURLY; + let properties = properties env ~seen_rest:false ~rest_trailing_comma:None [] in + Expect.token env T_RCURLY; + let annot = + if Peek.token env = T_COLON then + Ast.Type.Available (Type.annotation env) + else + missing_annot env + in + Pattern.Object { Pattern.Object.properties; annot }) (* Parse array destructuring pattern *) and array_ restricted_error = let rec elements env acc = match Peek.token env with | T_EOF - | T_RBRACKET -> List.rev acc + | T_RBRACKET -> + List.rev acc | T_COMMA -> Expect.token env T_COMMA; - elements env (None::acc) + elements env (None :: acc) | T_ELLIPSIS -> - let loc, argument = with_loc (fun env -> - Expect.token env T_ELLIPSIS; - pattern env restricted_error - ) env in - let element = Pattern.Array.(RestElement (loc, { RestElement. - argument; - })) in + let (loc, argument) = + with_loc + (fun env -> + Expect.token env T_ELLIPSIS; + pattern env restricted_error) + env + in + let element = Pattern.Array.(RestElement (loc, { RestElement.argument })) in (* rest elements are always last, the closing ] should be next. but if not, error and keep going so we recover gracefully by parsing the rest of the elements. *) - if Peek.token env <> T_RBRACKET then begin + if Peek.token env <> T_RBRACKET then ( error_at env (loc, Parse_error.ElementAfterRestElement); if Peek.token env = T_COMMA then Eat.token env - end; - elements env ((Some element)::acc) + ); + elements env (Some element :: acc) | _ -> - let pattern = pattern env restricted_error in - let pattern = match Peek.token env with - | T_ASSIGN -> - Expect.token env T_ASSIGN; - let default = Parse.assignment env in - let loc = Loc.btwn (fst pattern) (fst default) in - loc, Pattern.(Assignment Assignment.({ - left = pattern; - right = default; - })) - | _ -> pattern + let (loc, (pattern, default)) = + with_loc + (fun env -> + let pattern = pattern env restricted_error in + let default = + match Peek.token env with + | T_ASSIGN -> + Expect.token env T_ASSIGN; + Some (Parse.assignment env) + | _ -> None + in + (pattern, default)) + env in - let element = Pattern.Array.(Element pattern) in + let element = Pattern.Array.(Element (loc, { Element.argument = pattern; default })) in if Peek.token env <> T_RBRACKET then Expect.token env T_COMMA; - elements env ((Some element)::acc) + elements env (Some element :: acc) in with_loc (fun env -> - Expect.token env T_LBRACKET; - let elements = elements env [] in - Expect.token env T_RBRACKET; - let annot = - if Peek.token env = T_COLON then Some (Type.annotation env) - else None - in - Pattern.Array { Pattern.Array.elements; annot; } - ) + let leading = Peek.comments env in + Expect.token env T_LBRACKET; + let elements = elements env [] in + Expect.token env T_RBRACKET; + let annot = + if Peek.token env = T_COLON then + Ast.Type.Available (Type.annotation env) + else + missing_annot env + in + let trailing = Peek.comments env in + let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in + Pattern.Array { Pattern.Array.elements; annot; comments }) and pattern env restricted_error = match Peek.token env with - | T_LCURLY -> - object_ restricted_error env - | T_LBRACKET -> - array_ restricted_error env + | T_LCURLY -> object_ restricted_error env + | T_LBRACKET -> array_ restricted_error env | _ -> - let loc, id = Parse.identifier_with_type env restricted_error in - loc, Pattern.Identifier id + let (loc, id) = Parse.identifier_with_type env restricted_error in + (loc, Pattern.Identifier id) end diff --git a/src/parser/relativeLoc.ml b/src/parser/relativeLoc.ml new file mode 100644 index 00000000000..04b1bdfb579 --- /dev/null +++ b/src/parser/relativeLoc.ml @@ -0,0 +1,48 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type relative_position = { + line_offset: int; + (* If line_offset is 0, this is the column offset. If line_offset is nonzero, this is the absolute + * column. *) + column_or_offset: int; +} + +type t = { + start: Loc.position; + end_: relative_position; +} + +let relative_position_of_position base_pos pos = + Loc.( + let line_offset = pos.line - base_pos.line in + let column_or_offset = + if line_offset = 0 then + pos.column - base_pos.column + else + pos.column + in + { line_offset; column_or_offset }) + +let position_of_relative_position base_pos relative_pos = + Loc.( + let line = base_pos.line + relative_pos.line_offset in + let column = + if relative_pos.line_offset = 0 then + base_pos.column + relative_pos.column_or_offset + else + relative_pos.column_or_offset + in + { line; column }) + +let of_loc loc = + let end_ = relative_position_of_position loc.Loc.start loc.Loc._end in + { start = loc.Loc.start; end_ } + +let to_loc relative_loc source = + let end_ = position_of_relative_position relative_loc.start relative_loc.end_ in + { Loc.source; start = relative_loc.start; _end = end_ } diff --git a/src/parser/relativeLoc.mli b/src/parser/relativeLoc.mli new file mode 100644 index 00000000000..43489f27555 --- /dev/null +++ b/src/parser/relativeLoc.mli @@ -0,0 +1,27 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* + * When we store data to the shared heap, we first marshal it using OCaml's marshaller, then we + * compress it. OCaml's marshaling algorithm uses a more compact representation for smaller + * integers, so it is advantageous to use small integers rather than large ones when serializing to + * the shared heap. + * + * To that end, this utility converts locations so that the end position is stored relative to the + * start position, rather than storing it in absolute terms. The intuition is that the end location + * will always be closer to (or as close as) the start position than to the start of the file, so + * the numbers stored will be smaller and therefore have a more compact representation, on average. + * + * This does not change the in-memory size of the location. It does, however make it smaller to + * serialize. + * *) + +type t + +val of_loc : Loc.t -> t + +val to_loc : t -> File_key.t option -> Loc.t diff --git a/src/parser/statement_parser.ml b/src/parser/statement_parser.ml index 2410d57092f..81bcf1aec79 100644 --- a/src/parser/statement_parser.ml +++ b/src/parser/statement_parser.ml @@ -1,56 +1,80 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Token open Parser_env open Flow_ast -module Error = Parse_error -module SSet = Set.Make(String) - +module SSet = Set.Make (String) open Parser_common module type STATEMENT = sig - val for_: env -> (Loc.t, Loc.t) Statement.t - val if_: env -> (Loc.t, Loc.t) Statement.t - val let_: env -> (Loc.t, Loc.t) Statement.t - val try_: env -> (Loc.t, Loc.t) Statement.t - val while_: env -> (Loc.t, Loc.t) Statement.t - val with_: env -> (Loc.t, Loc.t) Statement.t - val block: env -> (Loc.t, Loc.t) Statement.t - val break: env -> (Loc.t, Loc.t) Statement.t - val continue: env -> (Loc.t, Loc.t) Statement.t - val debugger: env -> (Loc.t, Loc.t) Statement.t - val declare: ?in_module:bool -> env -> (Loc.t, Loc.t) Statement.t - val declare_export_declaration: ?allow_export_type:bool -> env -> (Loc.t, Loc.t) Statement.t - val declare_opaque_type : env -> (Loc.t, Loc.t) Statement.t - val do_while: env -> (Loc.t, Loc.t) Statement.t - val empty: env -> (Loc.t, Loc.t) Statement.t - val export_declaration: decorators:(Loc.t, Loc.t) Class.Decorator.t list -> env -> (Loc.t, Loc.t) Statement.t - val expression: env -> (Loc.t, Loc.t) Statement.t - val import_declaration: env -> (Loc.t, Loc.t) Statement.t - val interface: env -> (Loc.t, Loc.t) Statement.t - val maybe_labeled: env -> (Loc.t, Loc.t) Statement.t - val opaque_type : env -> (Loc.t, Loc.t) Statement.t - val return: env -> (Loc.t, Loc.t) Statement.t - val switch: env -> (Loc.t, Loc.t) Statement.t - val throw: env -> (Loc.t, Loc.t) Statement.t - val type_alias: env -> (Loc.t, Loc.t) Statement.t - val var_or_const: env -> (Loc.t, Loc.t) Statement.t + val for_ : env -> (Loc.t, Loc.t) Statement.t + + val if_ : env -> (Loc.t, Loc.t) Statement.t + + val let_ : env -> (Loc.t, Loc.t) Statement.t + + val try_ : env -> (Loc.t, Loc.t) Statement.t + + val while_ : env -> (Loc.t, Loc.t) Statement.t + + val with_ : env -> (Loc.t, Loc.t) Statement.t + + val block : env -> (Loc.t, Loc.t) Statement.t + + val break : env -> (Loc.t, Loc.t) Statement.t + + val continue : env -> (Loc.t, Loc.t) Statement.t + + val debugger : env -> (Loc.t, Loc.t) Statement.t + + val declare : ?in_module:bool -> env -> (Loc.t, Loc.t) Statement.t + + val declare_export_declaration : ?allow_export_type:bool -> env -> (Loc.t, Loc.t) Statement.t + + val declare_opaque_type : env -> (Loc.t, Loc.t) Statement.t + + val do_while : env -> (Loc.t, Loc.t) Statement.t + + val empty : env -> (Loc.t, Loc.t) Statement.t + + val export_declaration : + decorators:(Loc.t, Loc.t) Class.Decorator.t list -> env -> (Loc.t, Loc.t) Statement.t + + val expression : env -> (Loc.t, Loc.t) Statement.t + + val import_declaration : env -> (Loc.t, Loc.t) Statement.t + + val interface : env -> (Loc.t, Loc.t) Statement.t + + val maybe_labeled : env -> (Loc.t, Loc.t) Statement.t + + val opaque_type : env -> (Loc.t, Loc.t) Statement.t + + val return : env -> (Loc.t, Loc.t) Statement.t + + val switch : env -> (Loc.t, Loc.t) Statement.t + + val throw : env -> (Loc.t, Loc.t) Statement.t + + val type_alias : env -> (Loc.t, Loc.t) Statement.t + + val var : env -> (Loc.t, Loc.t) Statement.t + + val const : env -> (Loc.t, Loc.t) Statement.t end module Statement - (Parse: PARSER) - (Type: Type_parser.TYPE) - (Declaration: Declaration_parser.DECLARATION) - (Object: Object_parser.OBJECT) - (Pattern_cover : Pattern_cover.COVER) -: STATEMENT = struct + (Parse : PARSER) + (Type : Type_parser.TYPE) + (Declaration : Declaration_parser.DECLARATION) + (Object : Object_parser.OBJECT) + (Pattern_cover : Pattern_cover.COVER) : STATEMENT = struct type for_lhs = | For_expression of pattern_cover | For_declaration of (Loc.t * (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) @@ -61,218 +85,229 @@ module Statement recover gracefully. *) let function_as_statement env = let func = Declaration._function env in - if in_strict_mode env then + ( if in_strict_mode env then function_as_statement_error_at env (fst func) - else begin match func with - | _, Ast.Statement.FunctionDeclaration { Ast.Function.async = true; _ } -> - error_at env (fst func, Parse_error.AsyncFunctionAsStatement) - | _, Ast.Statement.FunctionDeclaration { Ast.Function.generator = true; _ } -> - error_at env (fst func, Parse_error.GeneratorFunctionAsStatement) - | _ -> () - end; + else + Ast.Statement.( + match func with + | (loc, FunctionDeclaration { Ast.Function.async = true; _ }) -> + error_at env (loc, Parse_error.AsyncFunctionAsStatement) + | (loc, FunctionDeclaration { Ast.Function.generator = true; _ }) -> + error_at env (loc, Parse_error.GeneratorFunctionAsStatement) + | _ -> ()) ); func let rec empty env = let loc = Peek.loc env in Expect.token env T_SEMICOLON; - loc, Statement.Empty + (loc, Statement.Empty) and break env = - let loc, label = with_loc (fun env -> - Expect.token env T_BREAK; - let label = - if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env - then None - else begin - let (_, name) as label = - Parse.identifier env in - if not (SSet.mem name (labels env)) - then error env (Error.UnknownLabel name); - Some label - end - in - Eat.semicolon env; - label - ) env in - if label = None && not (in_loop env || in_switch env) - then error_at env (loc, Error.IllegalBreak); - loc, Statement.Break { Statement.Break.label } + let leading = Peek.comments env in + let (loc, (label, trailing)) = + with_loc + (fun env -> + Expect.token env T_BREAK; + let label = + if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then + None + else + let ((_, { Identifier.name; comments = _ }) as label) = Parse.identifier env in + if not (SSet.mem name (labels env)) then error env (Parse_error.UnknownLabel name); + Some label + in + let trailingComments = Peek.comments env in + Eat.semicolon env; + (label, trailingComments)) + env + in + if label = None && not (in_loop env || in_switch env) then + error_at env (loc, Parse_error.IllegalBreak); + let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in + (loc, Statement.Break { Statement.Break.label; comments }) and continue env = - let loc, label = with_loc (fun env -> - Expect.token env T_CONTINUE; - let label = - if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env - then None - else begin - let (_, name) as label = - Parse.identifier env in - if not (SSet.mem name (labels env)) - then error env (Error.UnknownLabel name); - Some label - end in - Eat.semicolon env; - label - ) env in - if not (in_loop env) then error_at env (loc, Error.IllegalContinue); - loc, Statement.Continue { Statement.Continue.label } - - and debugger = with_loc (fun env -> - Expect.token env T_DEBUGGER; - Eat.semicolon env; - Statement.Debugger - ) - - and do_while = with_loc (fun env -> - Expect.token env T_DO; - let body = Parse.statement (env |> with_in_loop true) in + let leading = Peek.comments env in + let trailingComments = ref [] in + let (loc, label) = + with_loc + (fun env -> + Expect.token env T_CONTINUE; + trailingComments := Peek.comments env; + let label = + if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then + None + else + let ((_, { Identifier.name; comments = _ }) as label) = Parse.identifier env in + if not (SSet.mem name (labels env)) then error env (Parse_error.UnknownLabel name); + Some label + in + Eat.semicolon env; + label) + env + in + if not (in_loop env) then error_at env (loc, Parse_error.IllegalContinue); + let trailing = !trailingComments in + ( loc, + Statement.Continue + { + Statement.Continue.label; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + } ) + + and debugger = + with_loc (fun env -> + Expect.token env T_DEBUGGER; + Eat.semicolon env; + Statement.Debugger) - (* Annex B allows labelled FunctionDeclarations (see + and do_while = + with_loc (fun env -> + let leading = Peek.comments env in + Expect.token env T_DO; + let body = Parse.statement (env |> with_in_loop true) in + (* Annex B allows labelled FunctionDeclarations (see sec-labelled-function-declarations), but not in IterationStatement (see sec-semantics-static-semantics-early-errors). *) - if not (in_strict_mode env) && is_labelled_function body - then function_as_statement_error_at env (fst body); - - Expect.token env T_WHILE; - Expect.token env T_LPAREN; - let test = Parse.expression env in - Expect.token env T_RPAREN; - (* The rules of automatic semicolon insertion in ES5 don't mention this, - * but the semicolon after a do-while loop is optional. This is properly - * specified in ES6 *) - if Peek.token env = T_SEMICOLON - then Eat.semicolon env; - Statement.DoWhile { Statement.DoWhile. - body; - test; - } - ) + if (not (in_strict_mode env)) && is_labelled_function body then + function_as_statement_error_at env (fst body); + let pre_keyword_trailing = Peek.comments env in + Expect.token env T_WHILE; + let pre_cond_trailing = Peek.comments env in + Expect.token env T_LPAREN; + let test = Parse.expression env in + Expect.token env T_RPAREN; + let past_cond_trailing = Peek.comments env in + (* The rules of automatic semicolon insertion in ES5 don't mention this, + * but the semicolon after a do-while loop is optional. This is properly + * specified in ES6 *) + if Peek.token env = T_SEMICOLON then Eat.semicolon env; + let trailing = pre_keyword_trailing @ pre_cond_trailing @ past_cond_trailing in + Statement.DoWhile + { + Statement.DoWhile.body; + test; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) and for_ = let assert_can_be_forin_or_forof env err = function - | loc, { Statement.VariableDeclaration.declarations; _; } -> - (* Only a single declarator is allowed, without an init. So - * something like - * - * for (var x in y) {} - * - * is allowed, but we disallow - * - * for (var x, y in z) {} - * for (var x = 42 in y) {} - *) - (match declarations with - | [ (_, { Statement.VariableDeclaration.Declarator.init = None; _; }) ] -> () - | _ -> error_at env (loc, err)) + | (loc, { Statement.VariableDeclaration.declarations; _ }) -> + (* Only a single declarator is allowed, without an init. So + * something like + * + * for (var x in y) {} + * + * is allowed, but we disallow + * + * for (var x, y in z) {} + * for (var x = 42 in y) {} + *) + (match declarations with + | [(_, { Statement.VariableDeclaration.Declarator.init = None; _ })] -> () + | _ -> error_at env (loc, err)) in - (* Annex B allows labelled FunctionDeclarations (see sec-labelled-function-declarations), but not in IterationStatement (see sec-semantics-static-semantics-early-errors). *) let assert_not_labelled_function env body = - if not (in_strict_mode env) && is_labelled_function body - then function_as_statement_error_at env (fst body) - else () + if (not (in_strict_mode env)) && is_labelled_function body then + function_as_statement_error_at env (fst body) + else + () in - with_loc (fun env -> - Expect.token env T_FOR; - let async = allow_await env && Expect.maybe env T_AWAIT in - Expect.token env T_LPAREN; + Expect.token env T_FOR; + let async = allow_await env && Expect.maybe env T_AWAIT in + Expect.token env T_LPAREN; - let init, errs = - let env = env |> with_no_in true in - match Peek.token env with - | T_SEMICOLON -> None, [] - | T_LET -> - let loc, (decl, errs) = with_loc Declaration.let_ env in - Some (For_declaration (loc, decl)), errs - | T_CONST -> - let loc, (decl, errs) = with_loc Declaration.const env in - Some (For_declaration (loc, decl)), errs - | T_VAR -> - let loc, (decl, errs) = with_loc Declaration.var env in - Some (For_declaration (loc, decl)), errs - | _ -> + let (init, errs) = + let env = env |> with_no_in true in + match Peek.token env with + | T_SEMICOLON -> (None, []) + | T_LET -> + let (loc, (decl, errs)) = with_loc Declaration.let_ env in + (Some (For_declaration (loc, decl)), errs) + | T_CONST -> + let (loc, (decl, errs)) = with_loc Declaration.const env in + (Some (For_declaration (loc, decl)), errs) + | T_VAR -> + let (loc, (decl, errs)) = with_loc Declaration.var env in + (Some (For_declaration (loc, decl)), errs) + | _ -> let expr = Parse.expression_or_pattern (env |> with_no_let true) in - Some (For_expression expr), [] - in - - match Peek.token env with - (* If `async` is true, this must be a for-await-of loop. *) - | t when t = T_OF || async -> - let left = Statement.(match init with - | Some (For_declaration decl) -> - assert_can_be_forin_or_forof env Error.InvalidLHSInForOf decl; - ForOf.LeftDeclaration decl - | Some (For_expression expr) -> - (* #sec-for-in-and-for-of-statements-static-semantics-early-errors *) - let patt = Pattern_cover.as_pattern ~err:Error.InvalidLHSInForOf env expr in - ForOf.LeftPattern patt - | None -> assert false) in + (Some (For_expression expr), []) + in + match Peek.token env with + (* If `async` is true, this must be a for-await-of loop. *) + | t when t = T_OF || async -> + let left = + Statement.( + match init with + | Some (For_declaration decl) -> + assert_can_be_forin_or_forof env Parse_error.InvalidLHSInForOf decl; + ForOf.LeftDeclaration decl + | Some (For_expression expr) -> + (* #sec-for-in-and-for-of-statements-static-semantics-early-errors *) + let patt = Pattern_cover.as_pattern ~err:Parse_error.InvalidLHSInForOf env expr in + ForOf.LeftPattern patt + | None -> assert false) + in (* This is a for of loop *) Expect.token env T_OF; let right = Parse.assignment env in Expect.token env T_RPAREN; let body = Parse.statement (env |> with_in_loop true) in assert_not_labelled_function env body; - Statement.ForOf { Statement.ForOf. - left; - right; - body; - async; - } - | T_IN -> - let left = match init with - | Some (For_declaration decl) -> - assert_can_be_forin_or_forof env Error.InvalidLHSInForIn decl; - Statement.ForIn.LeftDeclaration decl - | Some (For_expression expr) -> - (* #sec-for-in-and-for-of-statements-static-semantics-early-errors *) - let patt = Pattern_cover.as_pattern ~err:Error.InvalidLHSInForIn env expr in - Statement.ForIn.LeftPattern patt - | None -> assert false in + Statement.ForOf { Statement.ForOf.left; right; body; async } + | T_IN -> + let left = + match init with + | Some (For_declaration decl) -> + assert_can_be_forin_or_forof env Parse_error.InvalidLHSInForIn decl; + Statement.ForIn.LeftDeclaration decl + | Some (For_expression expr) -> + (* #sec-for-in-and-for-of-statements-static-semantics-early-errors *) + let patt = Pattern_cover.as_pattern ~err:Parse_error.InvalidLHSInForIn env expr in + Statement.ForIn.LeftPattern patt + | None -> assert false + in (* This is a for in loop *) Expect.token env T_IN; let right = Parse.expression env in Expect.token env T_RPAREN; let body = Parse.statement (env |> with_in_loop true) in assert_not_labelled_function env body; - Statement.ForIn { Statement.ForIn. - left; - right; - body; - each = false; - } - | _ -> + Statement.ForIn { Statement.ForIn.left; right; body; each = false } + | _ -> (* This is a for loop *) errs |> List.iter (error_at env); Expect.token env T_SEMICOLON; - let init = match init with - | Some (For_declaration decl) -> Some (Statement.For.InitDeclaration decl) - | Some (For_expression expr) -> - Some (Statement.For.InitExpression (Pattern_cover.as_expression env expr)) - | None -> None in - let test = match Peek.token env with - | T_SEMICOLON -> None - | _ -> Some (Parse.expression env) in + let init = + match init with + | Some (For_declaration decl) -> Some (Statement.For.InitDeclaration decl) + | Some (For_expression expr) -> + Some (Statement.For.InitExpression (Pattern_cover.as_expression env expr)) + | None -> None + in + let test = + match Peek.token env with + | T_SEMICOLON -> None + | _ -> Some (Parse.expression env) + in Expect.token env T_SEMICOLON; - let update = match Peek.token env with - | T_RPAREN -> None - | _ -> Some (Parse.expression env) in + let update = + match Peek.token env with + | T_RPAREN -> None + | _ -> Some (Parse.expression env) + in Expect.token env T_RPAREN; let body = Parse.statement (env |> with_in_loop true) in assert_not_labelled_function env body; - Statement.For { Statement.For. - init; - test; - update; - body; - } - ) + Statement.For { Statement.For.init; test; update; body }) and if_ = - (** + (* * Either the consequent or alternate of an if statement *) let if_branch env = @@ -285,390 +320,399 @@ module Statement else Parse.statement env in - (* Annex B allows labelled FunctionDeclarations in non-strict mode (see sec-labelled-function-declarations), but not in IfStatement (see sec-if-statement-static-semantics-early-errors). *) - if not (in_strict_mode env) && is_labelled_function stmt - then function_as_statement_error_at env (fst stmt); + if (not (in_strict_mode env)) && is_labelled_function stmt then + function_as_statement_error_at env (fst stmt); stmt in - with_loc (fun env -> - Expect.token env T_IF; - Expect.token env T_LPAREN; - let test = Parse.expression env in - Expect.token env T_RPAREN; - let consequent = if_branch env in - let alternate = if Peek.token env = T_ELSE - then begin - Expect.token env T_ELSE; - Some (if_branch env) - end else None in - Statement.If { Statement.If. - test; - consequent; - alternate; - } - ) - - and return = with_loc (fun env -> - if not (in_function env) - then error env Error.IllegalReturn; - Expect.token env T_RETURN; - let argument = - if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env - then None - else Some (Parse.expression env) in - Eat.semicolon env; - Statement.Return { Statement.Return. - argument; - } - ) + let pre_if_leading = Peek.comments env in + Expect.token env T_IF; + let pre_cond_leading = Peek.comments env in + let leading = pre_if_leading @ pre_cond_leading in + Expect.token env T_LPAREN; + let test = Parse.expression env in + Expect.token env T_RPAREN; + let trailing = Peek.comments env in + let consequent = if_branch env in + let alternate = + if Peek.token env = T_ELSE then ( + Expect.token env T_ELSE; + Some (if_branch env) + ) else + None + in + Statement.If + { + Statement.If.test; + consequent; + alternate; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) + + and return = + with_loc (fun env -> + if not (in_function env) then error env Parse_error.IllegalReturn; + let leading = Peek.comments env in + Expect.token env T_RETURN; + let (argument, trailing) = + if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then + (None, Peek.comments env) + else + (Some (Parse.expression env), []) + in + Eat.semicolon env; + Statement.Return + { + Statement.Return.argument; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) and switch = let rec case_list env (seen_default, acc) = match Peek.token env with | T_EOF - | T_RCURLY -> List.rev acc + | T_RCURLY -> + List.rev acc | _ -> let start_loc = Peek.loc env in - let test = match Peek.token env with - | T_DEFAULT -> - if seen_default - then error env Error.MultipleDefaultsInSwitch; - Expect.token env T_DEFAULT; None - | _ -> + let test = + match Peek.token env with + | T_DEFAULT -> + if seen_default then error env Parse_error.MultipleDefaultsInSwitch; + Expect.token env T_DEFAULT; + None + | _ -> Expect.token env T_CASE; - Some (Parse.expression env) in + Some (Parse.expression env) + in let seen_default = seen_default || test = None in let end_loc = Peek.loc env in Expect.token env T_COLON; let term_fn = function - | T_RCURLY | T_DEFAULT | T_CASE -> true - | _ -> false in - let consequent = - Parse.statement_list ~term_fn (env |> with_in_switch true) in - let end_loc = match List.rev consequent with - | last_stmt::_ -> fst last_stmt - | _ -> end_loc in - let acc = (Loc.btwn start_loc end_loc, Statement.Switch.Case.({ - test; - consequent; - }))::acc in + | T_RCURLY + | T_DEFAULT + | T_CASE -> + true + | _ -> false + in + let consequent = Parse.statement_list ~term_fn (env |> with_in_switch true) in + let end_loc = + match List.rev consequent with + | last_stmt :: _ -> fst last_stmt + | _ -> end_loc + in + let acc = + (Loc.btwn start_loc end_loc, Statement.Switch.Case.{ test; consequent }) :: acc + in case_list env (seen_default, acc) + in + with_loc (fun env -> + Expect.token env T_SWITCH; + Expect.token env T_LPAREN; + let discriminant = Parse.expression env in + Expect.token env T_RPAREN; + Expect.token env T_LCURLY; + let cases = case_list env (false, []) in + Expect.token env T_RCURLY; + Statement.Switch { Statement.Switch.discriminant; cases }) - in with_loc (fun env -> - Expect.token env T_SWITCH; - Expect.token env T_LPAREN; - let discriminant = Parse.expression env in - Expect.token env T_RPAREN; - Expect.token env T_LCURLY; - let cases = case_list env (false, []) in - Expect.token env T_RCURLY; - Statement.Switch { Statement.Switch. - discriminant; - cases; - } - ) - - and throw = with_loc (fun env -> - let start_loc = Peek.loc env in - Expect.token env T_THROW; - if Peek.is_line_terminator env - then error_at env (start_loc, Error.NewlineAfterThrow); - let argument = Parse.expression env in - Eat.semicolon env; - Statement.(Throw { Throw.argument; }) - ) - - and try_ = with_loc (fun env -> - Expect.token env T_TRY; - let block = Parse.block_body env in - let handler = match Peek.token env with - | T_CATCH -> - let catch = with_loc (fun env -> - Expect.token env T_CATCH; - let param = if Peek.token env = T_LPAREN - then begin - Expect.token env T_LPAREN; - let p = Some (Parse.pattern env Error.StrictCatchVariable) in - Expect.token env T_RPAREN; - p - end else - None - in - let body = Parse.block_body env in - { Ast.Statement.Try.CatchClause. - param; - body; - } - ) env in - Some catch - | _ -> None in - let finalizer = match Peek.token env with - | T_FINALLY -> - Expect.token env T_FINALLY; - Some (Parse.block_body env) - | _ -> None in - - (* No catch or finally? That's an error! *) - if handler = None && finalizer = None then - error_at env (fst block, Error.NoCatchOrFinally); - - Statement.Try { Statement.Try. - block; - handler; - finalizer; - } - ) - - and var_or_const = with_loc (fun env -> - let (_loc, declaration), errs = Declaration.variable env in - Eat.semicolon env; - errs |> List.iter (error_at env); - declaration - ) - - and let_ = with_loc (fun env -> - Expect.token env T_LET; - (* Let declaration *) - let declarations, errs = Declaration.variable_declaration_list (env |> with_no_let true) in - let declaration = - Ast.(Statement.VariableDeclaration Statement.VariableDeclaration.({ - declarations; - kind = Let; - })) in - Eat.semicolon env; - errs |> List.iter (error_at env); - declaration - ) - - and while_ = with_loc (fun env -> - Expect.token env T_WHILE; - Expect.token env T_LPAREN; - let test = Parse.expression env in - Expect.token env T_RPAREN; - let body = Parse.statement (env |> with_in_loop true) in - - (* Annex B allows labelled FunctionDeclarations in non-strict mode + and throw = + with_loc (fun env -> + let start_loc = Peek.loc env in + Expect.token env T_THROW; + if Peek.is_line_terminator env then error_at env (start_loc, Parse_error.NewlineAfterThrow); + let argument = Parse.expression env in + Eat.semicolon env; + Statement.(Throw { Throw.argument })) + + and try_ = + with_loc (fun env -> + let leading = Peek.comments env in + Expect.token env T_TRY; + let trailing = Peek.comments env in + let block = Parse.block_body env in + let handler = + match Peek.token env with + | T_CATCH -> + let catch = + with_loc + (fun env -> + Expect.token env T_CATCH; + let param = + if Peek.token env = T_LPAREN then ( + Expect.token env T_LPAREN; + let p = Some (Parse.pattern env Parse_error.StrictCatchVariable) in + Expect.token env T_RPAREN; + p + ) else + None + in + let body = Parse.block_body env in + { Ast.Statement.Try.CatchClause.param; body }) + env + in + Some catch + | _ -> None + in + let finalizer = + match Peek.token env with + | T_FINALLY -> + Expect.token env T_FINALLY; + Some (Parse.block_body env) + | _ -> None + in + (* No catch or finally? That's an error! *) + if handler = None && finalizer = None then + error_at env (fst block, Parse_error.NoCatchOrFinally); + + Statement.Try + { + Statement.Try.block; + handler; + finalizer; + comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing (); + }) + + and var = + with_loc (fun env -> + let (declaration, errs) = Declaration.var env in + Eat.semicolon env; + errs |> List.iter (error_at env); + Statement.VariableDeclaration declaration) + + and const = + with_loc (fun env -> + let (declaration, errs) = Declaration.const env in + Eat.semicolon env; + errs |> List.iter (error_at env); + Statement.VariableDeclaration declaration) + + and let_ = + with_loc (fun env -> + let (declaration, errs) = Declaration.let_ env in + Eat.semicolon env; + errs |> List.iter (error_at env); + Statement.VariableDeclaration declaration) + + and while_ = + with_loc (fun env -> + Expect.token env T_WHILE; + Expect.token env T_LPAREN; + let test = Parse.expression env in + Expect.token env T_RPAREN; + let body = Parse.statement (env |> with_in_loop true) in + (* Annex B allows labelled FunctionDeclarations in non-strict mode (see sec-labelled-function-declarations), but not in IterationStatement (see sec-semantics-static-semantics-early-errors). *) - if not (in_strict_mode env) && is_labelled_function body - then function_as_statement_error_at env (fst body); + if (not (in_strict_mode env)) && is_labelled_function body then + function_as_statement_error_at env (fst body); - Statement.While { Statement.While. - test; - body; - } - ) + Statement.While { Statement.While.test; body }) and with_ env = - let loc, stmt = with_loc (fun env -> - Expect.token env T_WITH; - Expect.token env T_LPAREN; - let _object = Parse.expression env in - Expect.token env T_RPAREN; - let body = Parse.statement env in - - (* Annex B allows labelled FunctionDeclarations in non-strict mode + let (loc, stmt) = + with_loc + (fun env -> + Expect.token env T_WITH; + Expect.token env T_LPAREN; + let _object = Parse.expression env in + Expect.token env T_RPAREN; + let body = Parse.statement env in + (* Annex B allows labelled FunctionDeclarations in non-strict mode (see sec-labelled-function-declarations), but not in WithStatement (see sec-with-statement-static-semantics-early-errors). *) - if not (in_strict_mode env) && is_labelled_function body - then function_as_statement_error_at env (fst body); + if (not (in_strict_mode env)) && is_labelled_function body then + function_as_statement_error_at env (fst body); - Statement.With { Statement.With. - _object; - body; - } - ) env in - strict_error_at env (loc, Error.StrictModeWith); - loc, stmt + Statement.With { Statement.With._object; body }) + env + in + strict_error_at env (loc, Parse_error.StrictModeWith); + (loc, stmt) and block env = - let loc, block = Parse.block_body env in - loc, Statement.Block block + let (loc, block) = Parse.block_body env in + (loc, Statement.Block block) - and maybe_labeled = with_loc (fun env -> - match (Parse.expression env, Peek.token env) with - | ((loc, Ast.Expression.Identifier label), T_COLON) -> - let _, name = label in - Expect.token env T_COLON; - if SSet.mem name (labels env) - then error_at env (loc, Error.Redeclaration ("Label", name)); - let env = add_label env name in - let body = - (* labelled FunctionDeclarations are allowed in non-strict mode + and maybe_labeled = + with_loc (fun env -> + match (Parse.expression env, Peek.token env) with + | ((loc, Ast.Expression.Identifier label), T_COLON) -> + let (_, { Identifier.name; comments = _ }) = label in + Expect.token env T_COLON; + if SSet.mem name (labels env) then + error_at env (loc, Parse_error.Redeclaration ("Label", name)); + let env = add_label env name in + let body = + (* labelled FunctionDeclarations are allowed in non-strict mode (see #sec-labelled-function-declarations) *) - if Peek.is_function env then function_as_statement env - else Parse.statement env - in - Statement.Labeled { Statement.Labeled.label; body; } - | expression, _ -> - Eat.semicolon env; - Statement.(Expression { Expression.expression; directive = None; }) - ) + if Peek.is_function env then + function_as_statement env + else + Parse.statement env + in + Statement.Labeled { Statement.Labeled.label; body } + | (expression, _) -> + Eat.semicolon ~expected:"the end of an expression statement (`;`)" env; + Statement.(Expression { Expression.expression; directive = None })) - and expression = with_loc (fun env -> - let expression = Parse.expression env in - Eat.semicolon env; - let directive = if allow_directive env - then match expression with - | _, Ast.Expression.Literal { Ast.Literal. - value = Ast.Literal.String _; - raw; - _ - } -> Some (String.sub raw 1 (String.length raw - 2)) - | _ -> None - else None - in - Statement.Expression { Statement.Expression. - expression; - directive; - } - ) + and expression = + with_loc (fun env -> + let expression = Parse.expression env in + Eat.semicolon ~expected:"the end of an expression statement (`;`)" env; + let directive = + if allow_directive env then + match expression with + | (_, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.String _; raw; _ }) -> + Some (String.sub raw 1 (String.length raw - 2)) + | _ -> None + else + None + in + Statement.Expression { Statement.Expression.expression; directive }) and type_alias_helper env = - if not (should_parse_types env) - then error env Error.UnexpectedTypeAlias; + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAlias; Expect.token env T_TYPE; Eat.push_lex_mode env Lex_mode.TYPE; let id = Type.type_identifier env in - let tparams = Type.type_parameter_declaration_with_defaults env in + let tparams = Type.type_parameter_declaration env in Expect.token env T_ASSIGN; let right = Type._type env in Eat.semicolon env; Eat.pop_lex_mode env; - Statement.TypeAlias.({ - id; - tparams; - right; - }) - - and declare_type_alias env = with_loc (fun env -> - Expect.token env T_DECLARE; - let type_alias = type_alias_helper env in - Statement.DeclareTypeAlias type_alias - ) env + Statement.TypeAlias.{ id; tparams; right } + + and declare_type_alias env = + with_loc + (fun env -> + Expect.token env T_DECLARE; + let type_alias = type_alias_helper env in + Statement.DeclareTypeAlias type_alias) + env and type_alias env = - if Peek.ith_is_identifier ~i:1 env - then - let loc, type_alias = with_loc type_alias_helper env in - loc, Statement.TypeAlias type_alias + if Peek.ith_is_identifier ~i:1 env then + let (loc, type_alias) = with_loc type_alias_helper env in + (loc, Statement.TypeAlias type_alias) else Parse.statement env - and opaque_type_helper ?(declare=false) env = - if not (should_parse_types env) - then error env Error.UnexpectedOpaqueTypeAlias; + and opaque_type_helper ?(declare = false) env = + if not (should_parse_types env) then error env Parse_error.UnexpectedOpaqueTypeAlias; Expect.token env T_OPAQUE; Expect.token env T_TYPE; Eat.push_lex_mode env Lex_mode.TYPE; let id = Type.type_identifier env in - let tparams = Type.type_parameter_declaration_with_defaults env in - let supertype = match Peek.token env with - | T_COLON -> + let tparams = Type.type_parameter_declaration env in + let supertype = + match Peek.token env with + | T_COLON -> Expect.token env T_COLON; Some (Type._type env) - | _ -> None in + | _ -> None + in let impltype = - if not declare then - (Expect.token env T_ASSIGN; - Some (Type._type env)) - else None in + if not declare then ( + Expect.token env T_ASSIGN; + Some (Type._type env) + ) else + None + in Eat.semicolon env; Eat.pop_lex_mode env; - Statement.OpaqueType.({ - id; - tparams; - impltype; - supertype; - }) - - and declare_opaque_type env = with_loc (fun env -> - Expect.token env T_DECLARE; - let opaque_t = opaque_type_helper ~declare:true env in - Statement.DeclareOpaqueType opaque_t - ) env + Statement.OpaqueType.{ id; tparams; impltype; supertype } + + and declare_opaque_type env = + with_loc + (fun env -> + Expect.token env T_DECLARE; + let opaque_t = opaque_type_helper ~declare:true env in + Statement.DeclareOpaqueType opaque_t) + env and opaque_type env = match Peek.ith_token ~i:1 env with - T_TYPE -> - let loc, opaque_t = with_loc (opaque_type_helper ~declare:false) env in - loc, Statement.OpaqueType opaque_t + | T_TYPE -> + let (loc, opaque_t) = with_loc (opaque_type_helper ~declare:false) env in + (loc, Statement.OpaqueType opaque_t) | _ -> Parse.statement env and interface_helper env = - if not (should_parse_types env) - then error env Error.UnexpectedTypeInterface; + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeInterface; Expect.token env T_INTERFACE; let id = Type.type_identifier env in - let tparams = Type.type_parameter_declaration_with_defaults env in + let tparams = Type.type_parameter_declaration env in let { Ast.Type.Interface.extends; body } = Type.interface_helper env in - Statement.Interface.({ - id; - tparams; - body; - extends; - }) - - and declare_interface env = with_loc (fun env -> - Expect.token env T_DECLARE; - let iface = interface_helper env in - Statement.DeclareInterface iface - ) env + Statement.Interface.{ id; tparams; body; extends } + + and declare_interface env = + with_loc + (fun env -> + Expect.token env T_DECLARE; + let iface = interface_helper env in + Statement.DeclareInterface iface) + env and interface env = (* disambiguate between a value named `interface`, like `var interface = 1; interface++`, and an interface declaration like `interface Foo {}`.` *) - if Peek.ith_is_identifier_name ~i:1 env - then - let loc, iface = with_loc interface_helper env in - loc, Statement.InterfaceDeclaration iface - else expression env + if Peek.ith_is_identifier_name ~i:1 env then + let (loc, iface) = with_loc interface_helper env in + (loc, Statement.InterfaceDeclaration iface) + else + expression env and declare_class = let rec mixins env acc = let super = Type.generic env in - let acc = super::acc in + let acc = super :: acc in match Peek.token env with | T_COMMA -> Expect.token env T_COMMA; mixins env acc | _ -> List.rev acc - - (* This is identical to `interface`, except that mixins are allowed *) - in fun env -> + (* This is identical to `interface`, except that mixins are allowed *) + in + fun env -> let env = env |> with_strict true in Expect.token env T_CLASS; let id = Parse.identifier env in - let tparams = Type.type_parameter_declaration_with_defaults env in - let extends = if Expect.maybe env T_EXTENDS then Some (Type.generic env) else None in - let mixins = match Peek.token env with - | T_IDENTIFIER { raw = "mixins"; _ } -> Eat.token env; mixins env [] - | _ -> [] + let tparams = Type.type_parameter_declaration env in + let extends = + if Expect.maybe env T_EXTENDS then + Some (Type.generic env) + else + None + in + let mixins = + match Peek.token env with + | T_IDENTIFIER { raw = "mixins"; _ } -> + Eat.token env; + mixins env [] + | _ -> [] in - let implements = match Peek.token env with - | T_IMPLEMENTS -> Eat.token env; Object.class_implements env [] - | _ -> [] + let implements = + match Peek.token env with + | T_IMPLEMENTS -> + Eat.token env; + Object.class_implements env [] + | _ -> [] in let body = Type._object ~is_class:true env in - Statement.DeclareClass.({ - id; - tparams; - body; - extends; - mixins; - implements; - }) - - and declare_class_statement env = with_loc (fun env -> - Expect.token env T_DECLARE; - let fn = declare_class env in - Statement.DeclareClass fn - ) env + Statement.DeclareClass.{ id; tparams; body; extends; mixins; implements } + + and declare_class_statement env = + with_loc + (fun env -> + Expect.token env T_DECLARE; + let fn = declare_class env in + Statement.DeclareClass fn) + env and declare_function env = Expect.token env T_FUNCTION; @@ -680,143 +724,134 @@ module Statement let return = Type._type env in let end_loc = fst return in let loc = Loc.btwn start_sig_loc end_loc in - let annot = loc, Ast.Type.(Function {Function. - params; - return; - tparams; - }) in - let annot = fst annot, annot in + let annot = (loc, Ast.Type.(Function { Function.params; return; tparams })) in + let annot = (fst annot, annot) in let predicate = Type.predicate_opt env in Eat.semicolon env; - Statement.DeclareFunction.({ - id; - annot; - predicate; - }) - - and declare_function_statement env = with_loc (fun env -> - Expect.token env T_DECLARE; - begin match Peek.token env with - | T_ASYNC -> - error env Error.DeclareAsync; - Expect.token env T_ASYNC - | _ -> () - end; - let fn = declare_function env in - Statement.DeclareFunction fn - ) env + Statement.DeclareFunction.{ id; annot; predicate } + + and declare_function_statement env = + with_loc + (fun env -> + Expect.token env T_DECLARE; + begin + match Peek.token env with + | T_ASYNC -> + error env Parse_error.DeclareAsync; + Expect.token env T_ASYNC + | _ -> () + end; + let fn = declare_function env in + Statement.DeclareFunction fn) + env and declare_var env = Expect.token env T_VAR; - let _loc, { Pattern.Identifier.name; annot; _; } = - Parse.identifier_with_type env ~no_optional:true Error.StrictVarName in + let (_loc, { Pattern.Identifier.name; annot; _ }) = + Parse.identifier_with_type env ~no_optional:true Parse_error.StrictVarName + in Eat.semicolon env; - Statement.DeclareVariable.({ id=name; annot; }) + Statement.DeclareVariable.{ id = name; annot } - and declare_var_statement env = with_loc (fun env -> - Expect.token env T_DECLARE; - let var = declare_var env in - Statement.DeclareVariable var - ) env + and declare_var_statement env = + with_loc + (fun env -> + Expect.token env T_DECLARE; + let var = declare_var env in + Statement.DeclareVariable var) + env and declare_module = let rec module_items env ~module_kind acc = match Peek.token env with | T_EOF - | T_RCURLY -> (module_kind, List.rev acc) + | T_RCURLY -> + (module_kind, List.rev acc) | _ -> let stmt = declare ~in_module:true env in (* TODO: This is a semantic analysis and shouldn't be in the parser *) - let module_kind = Statement.( - let (loc, stmt) = stmt in - match (module_kind, stmt) with - (** - * The first time we see either a `declare export` or a - * `declare module.exports`, we lock in the kind of the module. - * - * `declare export type` and `declare export interface` are the two - * exceptions to this rule because they are valid in both CommonJS - * and ES modules (and thus do not indicate an intent for either). - *) - | None, DeclareModuleExports _ -> Some (DeclareModule.CommonJS loc) - | None, DeclareExportDeclaration { - DeclareExportDeclaration.declaration; - _; - } -> - (match declaration with + let module_kind = + Statement.( + let (loc, stmt) = stmt in + match (module_kind, stmt) with + (* + * The first time we see either a `declare export` or a + * `declare module.exports`, we lock in the kind of the module. + * + * `declare export type` and `declare export interface` are the two + * exceptions to this rule because they are valid in both CommonJS + * and ES modules (and thus do not indicate an intent for either). + *) + | (None, DeclareModuleExports _) -> Some (DeclareModule.CommonJS loc) + | (None, DeclareExportDeclaration { DeclareExportDeclaration.declaration; _ }) -> + (match declaration with | Some (DeclareExportDeclaration.NamedType _) - | Some (DeclareExportDeclaration.Interface _) - -> module_kind - | _ -> Some (DeclareModule.ES loc) - ) - - (** - * There should never be more than one `declare module.exports` - * statement *) - | Some (DeclareModule.CommonJS _), DeclareModuleExports _ -> - error env Parse_error.DuplicateDeclareModuleExports; - module_kind - - (** - * It's never ok to mix and match `declare export` and - * `declare module.exports` in the same module because it leaves the - * kind of the module (CommonJS vs ES) ambiguous. - * - * The 1 exception to this rule is that `export type/interface` are - * both ok in CommonJS modules. - *) - | Some (DeclareModule.ES _), DeclareModuleExports _ -> - error env Parse_error.AmbiguousDeclareModuleKind; - module_kind - | Some (DeclareModule.CommonJS _), DeclareExportDeclaration { - DeclareExportDeclaration.declaration; - _; - } -> + | Some (DeclareExportDeclaration.Interface _) -> + module_kind + | _ -> Some (DeclareModule.ES loc)) + (* + * There should never be more than one `declare module.exports` + * statement *) + | (Some (DeclareModule.CommonJS _), DeclareModuleExports _) -> + error env Parse_error.DuplicateDeclareModuleExports; + module_kind + (* + * It's never ok to mix and match `declare export` and + * `declare module.exports` in the same module because it leaves the + * kind of the module (CommonJS vs ES) ambiguous. + * + * The 1 exception to this rule is that `export type/interface` are + * both ok in CommonJS modules. + *) + | (Some (DeclareModule.ES _), DeclareModuleExports _) -> + error env Parse_error.AmbiguousDeclareModuleKind; + module_kind + | ( Some (DeclareModule.CommonJS _), + DeclareExportDeclaration { DeclareExportDeclaration.declaration; _ } ) -> (match declaration with - | Some (DeclareExportDeclaration.NamedType _) - | Some (DeclareExportDeclaration.Interface _) - -> () - | _ -> error env Parse_error.AmbiguousDeclareModuleKind - ); + | Some (DeclareExportDeclaration.NamedType _) + | Some (DeclareExportDeclaration.Interface _) -> + () + | _ -> error env Parse_error.AmbiguousDeclareModuleKind); module_kind - - | _ -> module_kind - ) in - module_items env ~module_kind (stmt::acc) + | _ -> module_kind) + in + module_items env ~module_kind (stmt :: acc) in - let declare_module_ env start_loc = - let id = match Peek.token env with - | T_STRING (loc, value, raw, octal) -> - if octal then strict_error env Error.StrictOctalLiteral; + let id = + match Peek.token env with + | T_STRING (loc, value, raw, octal) -> + if octal then strict_error env Parse_error.StrictOctalLiteral; Expect.token env (T_STRING (loc, value, raw, octal)); - Statement.DeclareModule.Literal (loc, { StringLiteral.value; raw; }) - | _ -> - Statement.DeclareModule.Identifier (Parse.identifier env) in - let body_loc, (module_kind, body) = with_loc (fun env -> - Expect.token env T_LCURLY; - let res = module_items env ~module_kind:None [] in - Expect.token env T_RCURLY; - res - ) env in - let body = body_loc, { Statement.Block.body; } in + Statement.DeclareModule.Literal (loc, { StringLiteral.value; raw }) + | _ -> Statement.DeclareModule.Identifier (Parse.identifier env) + in + let (body_loc, (module_kind, body)) = + with_loc + (fun env -> + Expect.token env T_LCURLY; + let res = module_items env ~module_kind:None [] in + Expect.token env T_RCURLY; + res) + env + in + let body = (body_loc, { Statement.Block.body }) in let loc = Loc.btwn start_loc body_loc in let kind = match module_kind with | Some k -> k | None -> Statement.DeclareModule.CommonJS loc in - loc, - Statement.(DeclareModule DeclareModule.({ id; body; kind; })) + (loc, Statement.(DeclareModule DeclareModule.{ id; body; kind })) in - fun ?(in_module=false) env -> + fun ?(in_module = false) env -> let start_loc = Peek.loc env in Expect.token env T_DECLARE; Expect.identifier env "module"; - if in_module || Peek.token env = T_PERIOD - then - let loc, exports = with_loc declare_module_exports env in - Loc.btwn start_loc loc, exports + if in_module || Peek.token env = T_PERIOD then + let (loc, exports) = with_loc declare_module_exports env in + (Loc.btwn start_loc loc, exports) else declare_module_ env start_loc @@ -827,505 +862,506 @@ module Statement Eat.semicolon env; Statement.DeclareModuleExports type_annot - and declare ?(in_module=false) env = - if not (should_parse_types env) - then error env Error.UnexpectedTypeDeclaration; + and declare ?(in_module = false) env = + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeDeclaration; + (* eventually, just emit a wrapper AST node *) - (match Peek.ith_token ~i:1 env with - | T_CLASS -> - declare_class_statement env - | T_INTERFACE -> - declare_interface env - | T_TYPE -> ( - match Peek.token env with - | T_IMPORT when in_module -> - import_declaration env - | _ -> - declare_type_alias env - ) - | T_OPAQUE -> - declare_opaque_type env - | T_TYPEOF when (Peek.token env) = T_IMPORT -> - import_declaration env - | T_FUNCTION - | T_ASYNC -> - declare_function_statement env - | T_VAR -> - declare_var_statement env - | T_EXPORT when in_module -> - declare_export_declaration ~allow_export_type:in_module env - | T_IDENTIFIER { raw = "module"; _ } -> - declare_module ~in_module env - | _ when in_module -> ( - match Peek.token env with - | T_IMPORT -> - error env Error.InvalidNonTypeImportInDeclareModule; - Parse.statement env - | _ -> - (* Oh boy, found some bad stuff in a declare module. Let's just - * pretend it's a declare var (arbitrary choice) *) - declare_var_statement env - ) + match Peek.ith_token ~i:1 env with + | T_CLASS -> declare_class_statement env + | T_INTERFACE -> declare_interface env + | T_TYPE -> + (match Peek.token env with + | T_IMPORT when in_module -> import_declaration env + | _ -> declare_type_alias env) + | T_OPAQUE -> declare_opaque_type env + | T_TYPEOF when Peek.token env = T_IMPORT -> import_declaration env + | T_FUNCTION + | T_ASYNC -> + declare_function_statement env + | T_VAR -> declare_var_statement env + | T_EXPORT when in_module -> declare_export_declaration ~allow_export_type:in_module env + | T_IDENTIFIER { raw = "module"; _ } -> declare_module ~in_module env + | _ when in_module -> + (match Peek.token env with + | T_IMPORT -> + error env Parse_error.InvalidNonTypeImportInDeclareModule; + Parse.statement env | _ -> - Parse.statement env - ) + (* Oh boy, found some bad stuff in a declare module. Let's just + * pretend it's a declare var (arbitrary choice) *) + declare_var_statement env) + | _ -> Parse.statement env and export_source env = Expect.identifier env "from"; match Peek.token env with | T_STRING (loc, value, raw, octal) -> - if octal then strict_error env Error.StrictOctalLiteral; - Expect.token env (T_STRING (loc, value, raw, octal)); - loc, { StringLiteral.value; raw; } + if octal then strict_error env Parse_error.StrictOctalLiteral; + Expect.token env (T_STRING (loc, value, raw, octal)); + (loc, { StringLiteral.value; raw }) | _ -> - (* Just make up a string for the error case *) - let ret = Peek.loc env, { StringLiteral.value = ""; raw = ""; } in - error_unexpected env; - ret + (* Just make up a string for the error case *) + let ret = (Peek.loc env, { StringLiteral.value = ""; raw = "" }) in + error_unexpected ~expected:"a string" env; + ret and extract_pattern_binding_names = - let rec fold acc = Pattern.(function - | (_, Object {Object.properties; _;}) -> - List.fold_left (fun acc prop -> - match prop with - | Object.Property (_, {Object.Property.pattern; _;}) - | Object.RestProperty (_, {Object.RestProperty.argument = pattern;}) - -> fold acc pattern - ) acc properties - | (_, Array {Array.elements; _;}) -> - List.fold_left Array.(fun acc elem -> - match elem with - | Some (Element pattern) - | Some (RestElement (_, {RestElement.argument = pattern;})) - -> fold acc pattern - | None -> acc - ) acc elements - | (_, Assignment {Assignment.left;_;}) -> fold acc left - | (_, Identifier {Pattern.Identifier.name; _; }) -> - name::acc - | (_, Expression _) -> - failwith "Parser error: No such thing as an expression pattern!" - ) in + let rec fold acc = + Pattern.( + function + | (_, Object { Object.properties; _ }) -> + List.fold_left + (fun acc prop -> + match prop with + | Object.Property (_, { Object.Property.pattern; _ }) + | Object.RestProperty (_, { Object.RestProperty.argument = pattern }) -> + fold acc pattern) + acc + properties + | (_, Array { Array.elements; _ }) -> + List.fold_left + Array.( + fun acc elem -> + match elem with + | Some (Element (_, { Element.argument = pattern; default = _ })) + | Some (RestElement (_, { RestElement.argument = pattern })) -> + fold acc pattern + | None -> acc) + acc + elements + | (_, Identifier { Pattern.Identifier.name; _ }) -> name :: acc + | (_, Expression _) -> failwith "Parser error: No such thing as an expression pattern!") + in List.fold_left fold - and extract_ident_name (_, name) = name + and extract_ident_name (_, { Identifier.name; comments = _ }) = name - and export_specifiers ?(preceding_comma=true) env specifiers = + and export_specifiers ?(preceding_comma = true) env specifiers = match Peek.token env with | T_EOF | T_RCURLY -> - List.rev specifiers + List.rev specifiers | _ -> - if not preceding_comma then error env Error.ExportSpecifierMissingComma; - let specifier = with_loc (fun env -> - let local = identifier_name env in - let exported = - match Peek.token env with - | T_IDENTIFIER { raw = "as"; _ } -> - Eat.token env; - let exported = identifier_name env in - record_export env exported; - Some exported - | _ -> - record_export env local; - None - in - { Statement.ExportNamedDeclaration.ExportSpecifier.local; exported; } - ) env in - let preceding_comma = Expect.maybe env T_COMMA in - export_specifiers ~preceding_comma env (specifier::specifiers) + if not preceding_comma then error env Parse_error.ExportSpecifierMissingComma; + let specifier = + with_loc + (fun env -> + let local = identifier_name env in + let exported = + match Peek.token env with + | T_IDENTIFIER { raw = "as"; _ } -> + Eat.token env; + let exported = identifier_name env in + record_export env exported; + Some exported + | _ -> + record_export env local; + None + in + { Statement.ExportNamedDeclaration.ExportSpecifier.local; exported }) + env + in + let preceding_comma = Expect.maybe env T_COMMA in + export_specifiers ~preceding_comma env (specifier :: specifiers) and assert_export_specifier_identifiers env specifiers = - let open Statement.ExportNamedDeclaration.ExportSpecifier in - List.iter (function - | _, { local = id; exported = None; } -> - Parse.assert_identifier_name_is_identifier - ~restricted_error:Parse_error.StrictVarName env id - | _ -> - () - ) specifiers - - and export_declaration ~decorators = with_loc (fun env -> - let env = env |> with_strict true |> with_in_export true in - let start_loc = Peek.loc env in - Expect.token env T_EXPORT; - match Peek.token env with - | T_DEFAULT -> - (* export default ... *) - let open Statement.ExportDefaultDeclaration in - let default, () = with_loc (fun env -> - Expect.token env T_DEFAULT - ) env in - record_export env (Loc.btwn start_loc (Peek.loc env), "default"); - let declaration = - if Peek.is_function env then - (* export default [async] function [foo] (...) { ... } *) - let fn = Declaration._function env in - Declaration fn - else if Peek.is_class env then - (* export default class foo { ... } *) - let _class = Object.class_declaration env decorators in - Declaration _class - else - (* export default [assignment expression]; *) - let expr = Parse.assignment env in - Eat.semicolon env; - Expression expr - in - Statement.ExportDefaultDeclaration { - default; - declaration; - } - | T_TYPE when (Peek.ith_token ~i:1 env) <> T_LCURLY -> - (* export type ... *) - let open Statement.ExportNamedDeclaration in - if not (should_parse_types env) - then error env Error.UnexpectedTypeExport; - (match Peek.ith_token ~i:1 env with - | T_MULT -> - Expect.token env T_TYPE; - let specifier_loc = Peek.loc env in - Expect.token env T_MULT; - let source = export_source env in - Eat.semicolon env; - Statement.ExportNamedDeclaration { - declaration = None; - specifiers = Some (ExportBatchSpecifier (specifier_loc, None)); - source = Some source; - exportKind = Statement.ExportType; - } - | _ -> - let loc, type_alias = with_loc type_alias_helper env in - record_export env (loc, extract_ident_name type_alias.Statement.TypeAlias.id); - let type_alias = (loc, Statement.TypeAlias type_alias) in - Statement.ExportNamedDeclaration { - declaration = Some type_alias; - specifiers = None; - source = None; - exportKind = Statement.ExportType; - } - ) - | T_OPAQUE -> - (* export opaque type ... *) - let open Statement.ExportNamedDeclaration in - let loc, opaque_t = with_loc opaque_type_helper env in - record_export env (loc, extract_ident_name opaque_t.Statement.OpaqueType.id); - let opaque_t = (loc, Statement.OpaqueType opaque_t) in - Statement.ExportNamedDeclaration { - declaration = Some opaque_t; - specifiers = None; - source = None; - exportKind = Statement.ExportType; - } - | T_INTERFACE -> - (* export interface I { ... } *) - let open Statement.ExportNamedDeclaration in - if not (should_parse_types env) - then error env Error.UnexpectedTypeExport; - let interface = interface env in - (match interface with - | (loc, Statement.InterfaceDeclaration {Statement.Interface.id; _;}) -> - record_export env (loc, extract_ident_name id) - | _ -> failwith ( - "Internal Flow Error! Parsed `export interface` into something " ^ - "other than an interface declaration!" - ) - ); - Statement.ExportNamedDeclaration { - declaration = Some interface; - specifiers = None; - source = None; - exportKind = Statement.ExportType; - } - | T_LET - | T_CONST - | T_VAR - (* not using Peek.is_class here because it would guard all of the + Statement.ExportNamedDeclaration.ExportSpecifier.( + List.iter + (function + | (_, { local = id; exported = None }) -> + Parse.assert_identifier_name_is_identifier + ~restricted_error:Parse_error.StrictVarName + env + id + | _ -> ()) + specifiers) + + and export_declaration ~decorators = + with_loc (fun env -> + let env = env |> with_strict true |> with_in_export true in + let start_loc = Peek.loc env in + Expect.token env T_EXPORT; + match Peek.token env with + | T_DEFAULT -> + (* export default ... *) + Statement.ExportDefaultDeclaration.( + let (default, ()) = with_loc (fun env -> Expect.token env T_DEFAULT) env in + record_export + env + (Flow_ast_utils.ident_of_source (Loc.btwn start_loc (Peek.loc env), "default")); + let declaration = + if Peek.is_function env then + (* export default [async] function [foo] (...) { ... } *) + let fn = Declaration._function env in + Declaration fn + else if Peek.is_class env then + (* export default class foo { ... } *) + let _class = Object.class_declaration env decorators in + Declaration _class + else if Peek.token env = T_ENUM then + (* export default enum foo { ... } *) + Declaration (Declaration.enum_declaration env) + else + (* export default [assignment expression]; *) + let expr = Parse.assignment env in + Eat.semicolon env; + Expression expr + in + Statement.ExportDefaultDeclaration { default; declaration }) + | T_TYPE when Peek.ith_token ~i:1 env <> T_LCURLY -> + (* export type ... *) + Statement.ExportNamedDeclaration.( + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeExport; + (match Peek.ith_token ~i:1 env with + | T_MULT -> + Expect.token env T_TYPE; + let specifier_loc = Peek.loc env in + Expect.token env T_MULT; + let source = export_source env in + Eat.semicolon env; + Statement.ExportNamedDeclaration + { + declaration = None; + specifiers = Some (ExportBatchSpecifier (specifier_loc, None)); + source = Some source; + exportKind = Statement.ExportType; + } + | _ -> + let (loc, type_alias) = with_loc type_alias_helper env in + record_export + env + (Flow_ast_utils.ident_of_source + (loc, extract_ident_name type_alias.Statement.TypeAlias.id)); + let type_alias = (loc, Statement.TypeAlias type_alias) in + Statement.ExportNamedDeclaration + { + declaration = Some type_alias; + specifiers = None; + source = None; + exportKind = Statement.ExportType; + })) + | T_OPAQUE -> + (* export opaque type ... *) + Statement.ExportNamedDeclaration.( + let (loc, opaque_t) = with_loc opaque_type_helper env in + record_export + env + (Flow_ast_utils.ident_of_source + (loc, extract_ident_name opaque_t.Statement.OpaqueType.id)); + let opaque_t = (loc, Statement.OpaqueType opaque_t) in + Statement.ExportNamedDeclaration + { + declaration = Some opaque_t; + specifiers = None; + source = None; + exportKind = Statement.ExportType; + }) + | T_INTERFACE -> + (* export interface I { ... } *) + Statement.ExportNamedDeclaration.( + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeExport; + let interface = interface env in + (match interface with + | (loc, Statement.InterfaceDeclaration { Statement.Interface.id; _ }) -> + record_export env (Flow_ast_utils.ident_of_source (loc, extract_ident_name id)) + | _ -> + failwith + ( "Internal Flow Error! Parsed `export interface` into something " + ^ "other than an interface declaration!" )); + Statement.ExportNamedDeclaration + { + declaration = Some interface; + specifiers = None; + source = None; + exportKind = Statement.ExportType; + }) + | T_LET + | T_CONST + | T_VAR + (* not using Peek.is_class here because it would guard all of the * cases *) - | T_AT - | T_CLASS - (* not using Peek.is_function here because it would guard all of the + + | T_AT + | T_CLASS + (* not using Peek.is_function here because it would guard all of the * cases *) - | T_ASYNC - | T_FUNCTION -> - let open Statement.ExportNamedDeclaration in - let stmt = Parse.statement_list_item env ~decorators:decorators in - let names = Statement.( - match stmt with - | (_, VariableDeclaration { VariableDeclaration.declarations; _; }) -> - List.fold_left (fun names (_, declaration) -> - let id = declaration.VariableDeclaration.Declarator.id in - extract_pattern_binding_names names [id] - ) [] declarations - | (loc, ClassDeclaration { Class.id = Some id; _; }) - | (loc, FunctionDeclaration { Function.id = Some id; _; }) - -> [(loc, extract_ident_name id)] - | (loc, ClassDeclaration { Class.id = None; _; }) -> - error_at env (loc, Error.ExportNamelessClass); - [] - | (loc, FunctionDeclaration { Function.id = None; _; }) -> - error_at env (loc, Error.ExportNamelessFunction); - [] - | _ -> failwith "Internal Flow Error! Unexpected export statement declaration!" - ) in - List.iter (record_export env) names; - Statement.ExportNamedDeclaration { - declaration = Some stmt; - specifiers = None; - source = None; - exportKind = Statement.ExportValue; - } - | T_MULT -> - let open Statement.ExportNamedDeclaration in - let loc = Peek.loc env in - Expect.token env T_MULT; - let local_name = - let parse_export_star_as = - (parse_options env).esproposal_export_star_as - in - match Peek.token env with - | T_IDENTIFIER { raw = "as"; _ } -> - Eat.token env; - if parse_export_star_as - then Some (Parse.identifier env) - else (error env Error.UnexpectedTypeDeclaration; None) - | _ -> - None - in - let specifiers = - Some (ExportBatchSpecifier (loc, local_name)) - in - let source = export_source env in - let source = Some source in - Eat.semicolon env; - Statement.ExportNamedDeclaration { - declaration = None; - specifiers; - source; - exportKind = Statement.ExportValue; - } - | _ -> - let open Statement.ExportNamedDeclaration in - let exportKind = ( - match Peek.token env with - | T_TYPE -> Eat.token env; Statement.ExportType - | _ -> Statement.ExportValue - ) in - Expect.token env T_LCURLY; - let specifiers = export_specifiers env [] in - Expect.token env T_RCURLY; - let source = - match Peek.token env with - | T_IDENTIFIER { raw = "from"; _ } -> - Some (export_source env) - | _ -> - assert_export_specifier_identifiers env specifiers; - None - in - Eat.semicolon env; - Statement.ExportNamedDeclaration { - declaration = None; - specifiers = Some (ExportSpecifiers specifiers); - source; - exportKind; - } - ) - - and declare_export_declaration ?(allow_export_type=false) = with_loc (fun env -> - if not (should_parse_types env) - then error env Error.UnexpectedTypeDeclaration; - Expect.token env T_DECLARE; - - let env = env |> with_strict true |> with_in_export true in - Expect.token env T_EXPORT; - Statement.DeclareExportDeclaration.(match Peek.token env with - | T_DEFAULT -> - (* declare export default ... *) - let default, () = with_loc (fun env -> - Expect.token env T_DEFAULT - ) env in - let declaration = match Peek.token env with - | T_FUNCTION -> - (* declare export default function foo (...): ... *) - let fn = with_loc declare_function env in - Some (Function fn) - | T_CLASS -> - (* declare export default class foo { ... } *) - let class_ = with_loc declare_class env in - Some (Class class_) + + | T_ASYNC + | T_FUNCTION + | T_ENUM -> + Statement.ExportNamedDeclaration.( + let stmt = Parse.statement_list_item env ~decorators in + let names = + Statement.( + match stmt with + | (_, VariableDeclaration { VariableDeclaration.declarations; _ }) -> + List.fold_left + (fun names (_, declaration) -> + let id = declaration.VariableDeclaration.Declarator.id in + extract_pattern_binding_names names [id]) + [] + declarations + | (loc, ClassDeclaration { Class.id = Some id; _ }) + | (loc, FunctionDeclaration { Function.id = Some id; _ }) + | (loc, EnumDeclaration { EnumDeclaration.id; _ }) -> + [Flow_ast_utils.ident_of_source (loc, extract_ident_name id)] + | (loc, ClassDeclaration { Class.id = None; _ }) -> + error_at env (loc, Parse_error.ExportNamelessClass); + [] + | (loc, FunctionDeclaration { Function.id = None; _ }) -> + error_at env (loc, Parse_error.ExportNamelessFunction); + [] + | _ -> failwith "Internal Flow Error! Unexpected export statement declaration!") + in + List.iter (record_export env) names; + Statement.ExportNamedDeclaration + { + declaration = Some stmt; + specifiers = None; + source = None; + exportKind = Statement.ExportValue; + }) + | T_MULT -> + Statement.ExportNamedDeclaration.( + let loc = Peek.loc env in + Expect.token env T_MULT; + let local_name = + let parse_export_star_as = (parse_options env).esproposal_export_star_as in + match Peek.token env with + | T_IDENTIFIER { raw = "as"; _ } -> + Eat.token env; + if parse_export_star_as then + Some (Parse.identifier env) + else ( + error env Parse_error.UnexpectedTypeDeclaration; + None + ) + | _ -> None + in + let specifiers = Some (ExportBatchSpecifier (loc, local_name)) in + let source = export_source env in + let source = Some source in + Eat.semicolon env; + Statement.ExportNamedDeclaration + { declaration = None; specifiers; source; exportKind = Statement.ExportValue }) | _ -> - (* declare export default [type]; *) - let type_ = Type._type env in + Statement.ExportNamedDeclaration.( + let exportKind = + match Peek.token env with + | T_TYPE -> + Eat.token env; + Statement.ExportType + | _ -> Statement.ExportValue + in + Expect.token env T_LCURLY; + let specifiers = export_specifiers env [] in + Expect.token env T_RCURLY; + let source = + match Peek.token env with + | T_IDENTIFIER { raw = "from"; _ } -> Some (export_source env) + | _ -> + assert_export_specifier_identifiers env specifiers; + None + in Eat.semicolon env; - Some (DefaultType type_) - in - Statement.DeclareExportDeclaration { - default = Some default; - declaration; - specifiers = None; - source = None; - } - | T_LET - | T_CONST - | T_VAR - | T_CLASS - | T_FUNCTION -> - let declaration = match Peek.token env with - | T_FUNCTION -> - (* declare export function foo (...): ... *) - let fn = with_loc declare_function env in - Some (Function fn) - | T_CLASS -> - (* declare export class foo { ... } *) - let class_ = with_loc declare_class env in - Some (Class class_) - | T_LET - | T_CONST - | T_VAR as token -> - (match token with - | T_LET -> error env Error.DeclareExportLet - | T_CONST -> error env Error.DeclareExportConst - | _ -> ()); - (* declare export var foo: ... *) - let var = with_loc declare_var env in - Some (Variable var) - | _ -> assert false in - Statement.DeclareExportDeclaration { - default = None; - declaration; - specifiers = None; - source = None; - } - | T_MULT -> - (* declare export * from 'foo' *) - let loc = Peek.loc env in - Expect.token env T_MULT; - let parse_export_star_as = - (parse_options env).esproposal_export_star_as - in - let local_name = - match Peek.token env with - | T_IDENTIFIER { raw = "as"; _ } -> - Eat.token env; - if parse_export_star_as - then Some (Parse.identifier env) - else (error env Error.UnexpectedTypeDeclaration; None) - | _ -> - None - in - let specifiers = Statement.ExportNamedDeclaration.( - Some (ExportBatchSpecifier (loc, local_name)) - ) in - let source = export_source env in - Eat.semicolon env; - Statement.DeclareExportDeclaration { - default = None; - declaration = None; - specifiers; - source = Some source; - } - | T_TYPE when allow_export_type -> - (* declare export type = ... *) - let alias = with_loc type_alias_helper env in - Statement.DeclareExportDeclaration { - default = None; - declaration = Some (NamedType alias); - specifiers = None; - source = None; - } - | T_OPAQUE -> - (* declare export opaque type = ... *) - let opaque = with_loc (opaque_type_helper ~declare:true) env in - Statement.DeclareExportDeclaration { - default = None; - declaration = Some (NamedOpaqueType opaque); - specifiers = None; - source = None; - } - | T_INTERFACE when allow_export_type -> - (* declare export interface ... *) - let iface = with_loc interface_helper env in - Statement.DeclareExportDeclaration { - default = None; - declaration = Some (Interface iface); - specifiers = None; - source = None; - } - | _ -> - (match Peek.token env with - | T_TYPE -> error env Error.DeclareExportType - | T_INTERFACE -> error env Error.DeclareExportInterface - | _ -> () - ); - Expect.token env T_LCURLY; - let specifiers = export_specifiers env [] in - Expect.token env T_RCURLY; - let source = + Statement.ExportNamedDeclaration + { + declaration = None; + specifiers = Some (ExportSpecifiers specifiers); + source; + exportKind; + })) + + and declare_export_declaration ?(allow_export_type = false) = + with_loc (fun env -> + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeDeclaration; + Expect.token env T_DECLARE; + + let env = env |> with_strict true |> with_in_export true in + Expect.token env T_EXPORT; + Statement.DeclareExportDeclaration.( match Peek.token env with - | T_IDENTIFIER { raw = "from"; _ } -> - Some (export_source env) + | T_DEFAULT -> + (* declare export default ... *) + let (default, ()) = with_loc (fun env -> Expect.token env T_DEFAULT) env in + let declaration = + match Peek.token env with + | T_FUNCTION -> + (* declare export default function foo (...): ... *) + let fn = with_loc declare_function env in + Some (Function fn) + | T_CLASS -> + (* declare export default class foo { ... } *) + let class_ = with_loc declare_class env in + Some (Class class_) + | _ -> + (* declare export default [type]; *) + let type_ = Type._type env in + Eat.semicolon env; + Some (DefaultType type_) + in + Statement.DeclareExportDeclaration + { default = Some default; declaration; specifiers = None; source = None } + | T_LET + | T_CONST + | T_VAR + | T_CLASS + | T_FUNCTION -> + let declaration = + match Peek.token env with + | T_FUNCTION -> + (* declare export function foo (...): ... *) + let fn = with_loc declare_function env in + Some (Function fn) + | T_CLASS -> + (* declare export class foo { ... } *) + let class_ = with_loc declare_class env in + Some (Class class_) + | (T_LET | T_CONST | T_VAR) as token -> + (match token with + | T_LET -> error env Parse_error.DeclareExportLet + | T_CONST -> error env Parse_error.DeclareExportConst + | _ -> ()); + + (* declare export var foo: ... *) + let var = with_loc declare_var env in + Some (Variable var) + | _ -> assert false + in + Statement.DeclareExportDeclaration + { default = None; declaration; specifiers = None; source = None } + | T_MULT -> + (* declare export * from 'foo' *) + let loc = Peek.loc env in + Expect.token env T_MULT; + let parse_export_star_as = (parse_options env).esproposal_export_star_as in + let local_name = + match Peek.token env with + | T_IDENTIFIER { raw = "as"; _ } -> + Eat.token env; + if parse_export_star_as then + Some (Parse.identifier env) + else ( + error env Parse_error.UnexpectedTypeDeclaration; + None + ) + | _ -> None + in + let specifiers = + Statement.ExportNamedDeclaration.(Some (ExportBatchSpecifier (loc, local_name))) + in + let source = export_source env in + Eat.semicolon env; + Statement.DeclareExportDeclaration + { default = None; declaration = None; specifiers; source = Some source } + | T_TYPE when allow_export_type -> + (* declare export type = ... *) + let alias = with_loc type_alias_helper env in + Statement.DeclareExportDeclaration + { + default = None; + declaration = Some (NamedType alias); + specifiers = None; + source = None; + } + | T_OPAQUE -> + (* declare export opaque type = ... *) + let opaque = with_loc (opaque_type_helper ~declare:true) env in + Statement.DeclareExportDeclaration + { + default = None; + declaration = Some (NamedOpaqueType opaque); + specifiers = None; + source = None; + } + | T_INTERFACE when allow_export_type -> + (* declare export interface ... *) + let iface = with_loc interface_helper env in + Statement.DeclareExportDeclaration + { + default = None; + declaration = Some (Interface iface); + specifiers = None; + source = None; + } | _ -> - assert_export_specifier_identifiers env specifiers; - None - in - Eat.semicolon env; - Statement.DeclareExportDeclaration { - default = None; - declaration = None; - specifiers = Some (Statement.ExportNamedDeclaration.ExportSpecifiers specifiers); - source; - } - ) - ) + (match Peek.token env with + | T_TYPE -> error env Parse_error.DeclareExportType + | T_INTERFACE -> error env Parse_error.DeclareExportInterface + | _ -> ()); + Expect.token env T_LCURLY; + let specifiers = export_specifiers env [] in + Expect.token env T_RCURLY; + let source = + match Peek.token env with + | T_IDENTIFIER { raw = "from"; _ } -> Some (export_source env) + | _ -> + assert_export_specifier_identifiers env specifiers; + None + in + Eat.semicolon env; + Statement.DeclareExportDeclaration + { + default = None; + declaration = None; + specifiers = Some (Statement.ExportNamedDeclaration.ExportSpecifiers specifiers); + source; + })) and import_declaration = - let open Statement.ImportDeclaration in - - let source env = - Expect.identifier env "from"; - match Peek.token env with - | T_STRING (loc, value, raw, octal) -> - if octal then strict_error env Error.StrictOctalLiteral; + Statement.ImportDeclaration.( + let source env = + Expect.identifier env "from"; + match Peek.token env with + | T_STRING (loc, value, raw, octal) -> + if octal then strict_error env Parse_error.StrictOctalLiteral; Expect.token env (T_STRING (loc, value, raw, octal)); - loc, { StringLiteral.value; raw; } - | _ -> + (loc, { StringLiteral.value; raw }) + | _ -> (* Just make up a string for the error case *) - let ret = Peek.loc env, { StringLiteral.value = ""; raw = ""; } in - error_unexpected env; + let ret = (Peek.loc env, { StringLiteral.value = ""; raw = "" }) in + error_unexpected ~expected:"a string" env; ret - - in let is_type_import = function - | T_TYPE - | T_TYPEOF -> true - | _ -> false - - (* `x` or `x as y` in a specifier *) - in let with_maybe_as ~for_type ?error_if_type env = - let identifier env = - if for_type then Type.type_identifier env else Parse.identifier env in - match Peek.ith_token ~i:1 env with - | T_IDENTIFIER { raw = "as"; _ } -> - let remote = identifier_name env in - Eat.token env; (* as *) - let local = Some (identifier env) in - remote, local - | T_EOF - | T_COMMA - | T_RCURLY -> - identifier env, None - | _ -> - begin match error_if_type, Peek.token env with - | Some error_if_type, T_TYPE - | Some error_if_type, T_TYPEOF -> - error env error_if_type; - Eat.token env; (* consume `type` or `typeof` *) - Type.type_identifier env, None + let is_type_import = function + | T_TYPE + | T_TYPEOF -> + true + | _ -> false + (* `x` or `x as y` in a specifier *) + in + let with_maybe_as ~for_type ?error_if_type env = + let identifier env = + if for_type then + Type.type_identifier env + else + Parse.identifier env + in + match Peek.ith_token ~i:1 env with + | T_IDENTIFIER { raw = "as"; _ } -> + let remote = identifier_name env in + Eat.token env; + + (* as *) + let local = Some (identifier env) in + (remote, local) + | T_EOF + | T_COMMA + | T_RCURLY -> + (identifier env, None) | _ -> - identifier env, None - end + begin + match (error_if_type, Peek.token env) with + | (Some error_if_type, T_TYPE) + | (Some error_if_type, T_TYPEOF) -> + error env error_if_type; + Eat.token env; - (* + (* consume `type` or `typeof` *) + (Type.type_identifier env, None) + | _ -> (identifier env, None) + end + (* ImportSpecifier[Type]: [~Type] ImportedBinding [~Type] IdentifierName ImportedTypeBinding @@ -1346,200 +1382,213 @@ module Statement - It is a Syntax Error if the first IdentifierName's StringValue is not "type" or "typeof", and the third IdentifierName's StringValue is not "as" *) - in let specifier env = - let kind = match Peek.token env with - | T_TYPE -> Some ImportType - | T_TYPEOF -> Some ImportTypeof - | _ -> None in - - if is_type_import (Peek.token env) then begin - (* consume `type`, but we don't know yet whether this is `type foo` or + let specifier env = + let kind = + match Peek.token env with + | T_TYPE -> Some ImportType + | T_TYPEOF -> Some ImportTypeof + | _ -> None + in + if is_type_import (Peek.token env) then + (* consume `type`, but we don't know yet whether this is `type foo` or `type as foo`. *) - let type_keyword_or_remote = identifier_name env in - match Peek.token env with - (* `type` (a value) *) - | T_EOF - | T_RCURLY - | T_COMMA -> - let remote = type_keyword_or_remote in (* `type` becomes a value *) - Parse.assert_identifier_name_is_identifier env remote; - { remote; local = None; kind = None } - - (* `type as foo` (value named `type`) or `type as,` (type named `as`) *) - | T_IDENTIFIER { raw = "as"; _ } -> - begin match Peek.ith_token ~i:1 env with + let type_keyword_or_remote = identifier_name env in + match Peek.token env with + (* `type` (a value) *) | T_EOF | T_RCURLY | T_COMMA -> - (* `type as` *) - { remote = Type.type_identifier env; local = None; kind } + let remote = type_keyword_or_remote in + (* `type` becomes a value *) + Parse.assert_identifier_name_is_identifier env remote; + { remote; local = None; kind = None } + (* `type as foo` (value named `type`) or `type as,` (type named `as`) *) | T_IDENTIFIER { raw = "as"; _ } -> - (* `type as as foo` *) - let remote = identifier_name env in (* first `as` *) - Eat.token env; (* second `as` *) - let local = Some (Type.type_identifier env) in (* `foo` *) - { remote; local; kind } + begin + match Peek.ith_token ~i:1 env with + | T_EOF + | T_RCURLY + | T_COMMA -> + (* `type as` *) + { remote = Type.type_identifier env; local = None; kind } + | T_IDENTIFIER { raw = "as"; _ } -> + (* `type as as foo` *) + let remote = identifier_name env in + (* first `as` *) + Eat.token env; + + (* second `as` *) + let local = Some (Type.type_identifier env) in + (* `foo` *) + { remote; local; kind } + | _ -> + (* `type as foo` *) + let remote = type_keyword_or_remote in + (* `type` becomes a value *) + Parse.assert_identifier_name_is_identifier env remote; + Eat.token env; + + (* `as` *) + let local = Some (Parse.identifier env) in + { remote; local; kind = None } + end + (* `type x`, or `type x as y` *) | _ -> - (* `type as foo` *) - let remote = type_keyword_or_remote in (* `type` becomes a value *) - Parse.assert_identifier_name_is_identifier env remote; - Eat.token env; (* `as` *) - let local = Some (Parse.identifier env) in - { remote; local; kind = None } - end - - (* `type x`, or `type x as y` *) - | _ -> - let remote, local = with_maybe_as ~for_type:true env in - { remote; local; kind } - end else - (* standard `x` or `x as y` *) - let remote, local = with_maybe_as ~for_type:false env in - { remote; local; kind = None } - - (* specifier in an `import type { ... }` *) - in let type_specifier env = - let remote, local = with_maybe_as env - ~for_type:true - ~error_if_type:Error.ImportTypeShorthandOnlyInPureImport + let (remote, local) = with_maybe_as ~for_type:true env in + { remote; local; kind } + else + (* standard `x` or `x as y` *) + let (remote, local) = with_maybe_as ~for_type:false env in + { remote; local; kind = None } + (* specifier in an `import type { ... }` *) in - { remote; local; kind = None } - - (* specifier in an `import typeof { ... }` *) - in let typeof_specifier env = - let remote, local = with_maybe_as env - ~for_type:true - ~error_if_type:Error.ImportTypeShorthandOnlyInPureImport + let type_specifier env = + let (remote, local) = + with_maybe_as + env + ~for_type:true + ~error_if_type:Parse_error.ImportTypeShorthandOnlyInPureImport + in + { remote; local; kind = None } + (* specifier in an `import typeof { ... }` *) in - { remote; local; kind = None } - - in let rec specifier_list ?(preceding_comma=true) env statement_kind acc = - match Peek.token env with - | T_EOF - | T_RCURLY -> List.rev acc - | _ -> - if not preceding_comma then error env Error.ImportSpecifierMissingComma; - let specifier = match statement_kind with - | ImportType -> type_specifier env - | ImportTypeof -> typeof_specifier env - | ImportValue -> specifier env + let typeof_specifier env = + let (remote, local) = + with_maybe_as + env + ~for_type:true + ~error_if_type:Parse_error.ImportTypeShorthandOnlyInPureImport in - let preceding_comma = Expect.maybe env T_COMMA in - specifier_list ~preceding_comma env statement_kind (specifier::acc) - - in let named_or_namespace_specifier env import_kind = - let start_loc = Peek.loc env in - match Peek.token env with - | T_MULT -> + { remote; local; kind = None } + in + let rec specifier_list ?(preceding_comma = true) env statement_kind acc = + match Peek.token env with + | T_EOF + | T_RCURLY -> + List.rev acc + | _ -> + if not preceding_comma then error env Parse_error.ImportSpecifierMissingComma; + let specifier = + match statement_kind with + | ImportType -> type_specifier env + | ImportTypeof -> typeof_specifier env + | ImportValue -> specifier env + in + let preceding_comma = Expect.maybe env T_COMMA in + specifier_list ~preceding_comma env statement_kind (specifier :: acc) + in + let named_or_namespace_specifier env import_kind = + let start_loc = Peek.loc env in + match Peek.token env with + | T_MULT -> Expect.token env T_MULT; Expect.identifier env "as"; - let id = match import_kind with - | ImportType - | ImportTypeof -> Type.type_identifier env - | ImportValue -> Parse.identifier env + let id = + match import_kind with + | ImportType + | ImportTypeof -> + Type.type_identifier env + | ImportValue -> Parse.identifier env in ImportNamespaceSpecifier (Loc.btwn start_loc (fst id), id) - | _ -> + | _ -> Expect.token env T_LCURLY; let specifiers = specifier_list env import_kind [] in Expect.token env T_RCURLY; ImportNamedSpecifiers specifiers - - in let with_specifiers importKind env = - let specifiers = Some (named_or_namespace_specifier env importKind) in - let source = source env in - Eat.semicolon env; - Statement.ImportDeclaration { - importKind; - source; - specifiers; - default = None; - } - - in let with_default importKind env = - let default_specifier = match importKind with - | ImportType - | ImportTypeof -> Type.type_identifier env - | ImportValue -> Parse.identifier env in - - let additional_specifiers = - match Peek.token env with - | T_COMMA -> (* `import Foo, ...` *) + let with_specifiers importKind env = + let specifiers = Some (named_or_namespace_specifier env importKind) in + let source = source env in + Eat.semicolon env; + Statement.ImportDeclaration { importKind; source; specifiers; default = None } + in + let with_default importKind env = + let default_specifier = + match importKind with + | ImportType + | ImportTypeof -> + Type.type_identifier env + | ImportValue -> Parse.identifier env + in + let additional_specifiers = + match Peek.token env with + | T_COMMA -> + (* `import Foo, ...` *) Expect.token env T_COMMA; Some (named_or_namespace_specifier env importKind) - | _ -> None + | _ -> None + in + let source = source env in + Eat.semicolon env; + Statement.ImportDeclaration + { + importKind; + source; + specifiers = additional_specifiers; + default = Some default_specifier; + } in + with_loc (fun env -> + let env = env |> with_strict true in + Expect.token env T_IMPORT; - let source = source env in - Eat.semicolon env; - Statement.ImportDeclaration { - importKind; - source; - specifiers = additional_specifiers; - default = Some default_specifier; - } - - in with_loc (fun env -> - let env = env |> with_strict true in - Expect.token env T_IMPORT; - - match Peek.token env with - (* `import * as ns from "ModuleName";` *) - | T_MULT -> - with_specifiers ImportValue env - - (* `import { ... } from "ModuleName";` *) - | T_LCURLY -> - with_specifiers ImportValue env - - (* `import "ModuleName";` *) - | T_STRING (str_loc, value, raw, octal) -> - if octal then strict_error env Error.StrictOctalLiteral; - Expect.token env (T_STRING (str_loc, value, raw, octal)); - let source = (str_loc, { StringLiteral.value; raw; }) in - Eat.semicolon env; - Statement.ImportDeclaration { - importKind = ImportValue; - source; - specifiers = None; - default = None; - } - - (* `import type [...] from "ModuleName";` + match Peek.token env with + (* `import * as ns from "ModuleName";` *) + | T_MULT -> with_specifiers ImportValue env + (* `import { ... } from "ModuleName";` *) + | T_LCURLY -> with_specifiers ImportValue env + (* `import "ModuleName";` *) + | T_STRING (str_loc, value, raw, octal) -> + if octal then strict_error env Parse_error.StrictOctalLiteral; + Expect.token env (T_STRING (str_loc, value, raw, octal)); + let source = (str_loc, { StringLiteral.value; raw }) in + Eat.semicolon env; + Statement.ImportDeclaration + { importKind = ImportValue; source; specifiers = None; default = None } + (* `import type [...] from "ModuleName";` note that if [...] is missing, we're importing a value named `type`! *) - | T_TYPE when should_parse_types env -> - begin match Peek.ith_token ~i:1 env with - (* `import type, { other, names } from "ModuleName";` *) - | T_COMMA - (* `import type from "ModuleName";` *) - | T_IDENTIFIER { raw = "from"; _ } -> - with_default ImportValue env - | T_MULT -> - (* `import type *` is invalid, since the namespace can't be a type *) - Eat.token env; (* consume `type` *) - error_unexpected env; (* unexpected `*` *) - with_specifiers ImportType env - | T_LCURLY -> - Eat.token env; (* consume `type` *) - with_specifiers ImportType env - | _ -> - Eat.token env; (* consume `type` *) - with_default ImportType env - end - - (* `import typeof ... from "ModuleName";` *) - | T_TYPEOF when should_parse_types env -> - Expect.token env T_TYPEOF; - begin match Peek.token env with - | T_MULT - | T_LCURLY -> with_specifiers ImportTypeof env - | _ -> with_default ImportTypeof env - end - - (* import Foo from "ModuleName"; *) - | _ -> - with_default ImportValue env - ) + | T_TYPE when should_parse_types env -> + begin + match Peek.ith_token ~i:1 env with + (* `import type, { other, names } from "ModuleName";` *) + | T_COMMA + (* Importing the exported value named "type." This is not a type-import. + * `import type from "ModuleName";` *) + + | T_IDENTIFIER { raw = "from"; _ } -> + with_default ImportValue env + | T_MULT -> + (* `import type *` is invalid, since the namespace can't be a type *) + Eat.token env; + + (* consume `type` *) + error_unexpected env; + + (* unexpected `*` *) + with_specifiers ImportType env + | T_LCURLY -> + Eat.token env; + + (* consume `type` *) + with_specifiers ImportType env + | _ -> + Eat.token env; + + (* consume `type` *) + with_default ImportType env + end + (* `import typeof ... from "ModuleName";` *) + | T_TYPEOF when should_parse_types env -> + Expect.token env T_TYPEOF; + begin + match Peek.token env with + | T_MULT + | T_LCURLY -> + with_specifiers ImportTypeof env + | _ -> with_default ImportTypeof env + end + (* import Foo from "ModuleName"; *) + | _ -> with_default ImportValue env)) end diff --git a/src/parser/test/esprima/ES6/identifier/dakuten_handakuten.tree.json b/src/parser/test/esprima/ES6/identifier/dakuten_handakuten.tree.json index 87b7b7b218b..6a80e8597f1 100644 --- a/src/parser/test/esprima/ES6/identifier/dakuten_handakuten.tree.json +++ b/src/parser/test/esprima/ES6/identifier/dakuten_handakuten.tree.json @@ -4,7 +4,7 @@ { "range": [ 0, - 4 + 8 ], "loc": { "start": { @@ -20,7 +20,7 @@ "expression": { "range": [ 0, - 4 + 8 ], "loc": { "start": { @@ -37,7 +37,7 @@ "left": { "range": [ 0, - 1 + 3 ], "loc": { "start": { @@ -54,8 +54,8 @@ }, "right": { "range": [ - 3, - 4 + 5, + 8 ], "loc": { "start": { @@ -132,7 +132,7 @@ ], "range": [ 0, - 4 + 8 ], "loc": { "start": { diff --git a/src/parser/test/esprima/ES6/identifier/estimated.tree.json b/src/parser/test/esprima/ES6/identifier/estimated.tree.json index fe9f542c5f5..942d2fd3fbe 100644 --- a/src/parser/test/esprima/ES6/identifier/estimated.tree.json +++ b/src/parser/test/esprima/ES6/identifier/estimated.tree.json @@ -4,7 +4,7 @@ { "range": [ 0, - 5 + 7 ], "loc": { "start": { @@ -21,7 +21,7 @@ { "range": [ 4, - 5 + 7 ], "loc": { "start": { @@ -37,7 +37,7 @@ "id": { "range": [ 4, - 5 + 7 ], "loc": { "start": { @@ -99,7 +99,7 @@ ], "range": [ 0, - 5 + 7 ], "loc": { "start": { diff --git a/src/parser/test/esprima/ES6/identifier/ethiopic_digits.tree.json b/src/parser/test/esprima/ES6/identifier/ethiopic_digits.tree.json index 7a2dcd4e2b3..6197e576c7c 100644 --- a/src/parser/test/esprima/ES6/identifier/ethiopic_digits.tree.json +++ b/src/parser/test/esprima/ES6/identifier/ethiopic_digits.tree.json @@ -4,7 +4,7 @@ { "range": [ 0, - 14 + 32 ], "loc": { "start": { @@ -21,7 +21,7 @@ { "range": [ 4, - 14 + 32 ], "loc": { "start": { @@ -37,7 +37,7 @@ "id": { "range": [ 4, - 14 + 32 ], "loc": { "start": { @@ -99,7 +99,7 @@ ], "range": [ 0, - 14 + 32 ], "loc": { "start": { diff --git a/src/parser/test/esprima/ES6/identifier/weierstrass.tree.json b/src/parser/test/esprima/ES6/identifier/weierstrass.tree.json index b0f2a8672a8..90359b8ea9c 100644 --- a/src/parser/test/esprima/ES6/identifier/weierstrass.tree.json +++ b/src/parser/test/esprima/ES6/identifier/weierstrass.tree.json @@ -4,7 +4,7 @@ { "range": [ 0, - 6 + 8 ], "loc": { "start": { @@ -21,7 +21,7 @@ { "range": [ 4, - 5 + 7 ], "loc": { "start": { @@ -37,7 +37,7 @@ "id": { "range": [ 4, - 5 + 7 ], "loc": { "start": { @@ -117,7 +117,7 @@ ], "range": [ 0, - 6 + 8 ], "loc": { "start": { diff --git a/src/parser/test/esprima/ES6/identifier/weierstrass_weierstrass.tree.json b/src/parser/test/esprima/ES6/identifier/weierstrass_weierstrass.tree.json index 32944ac0cef..39ad824a4c4 100644 --- a/src/parser/test/esprima/ES6/identifier/weierstrass_weierstrass.tree.json +++ b/src/parser/test/esprima/ES6/identifier/weierstrass_weierstrass.tree.json @@ -4,7 +4,7 @@ { "range": [ 0, - 11 + 13 ], "loc": { "start": { @@ -21,7 +21,7 @@ { "range": [ 4, - 11 + 13 ], "loc": { "start": { @@ -37,7 +37,7 @@ "id": { "range": [ 4, - 11 + 13 ], "loc": { "start": { @@ -99,7 +99,7 @@ ], "range": [ 0, - 11 + 13 ], "loc": { "start": { diff --git a/src/parser/test/esprima/expression/primary/array/migrated_0007.tree.json b/src/parser/test/esprima/expression/primary/array/migrated_0007.tree.json index 2b918952f1e..68c2ce817c0 100644 --- a/src/parser/test/esprima/expression/primary/array/migrated_0007.tree.json +++ b/src/parser/test/esprima/expression/primary/array/migrated_0007.tree.json @@ -11,7 +11,7 @@ "name": "日本語", "range": [ 0, - 3 + 9 ], "loc": { "start": { @@ -28,8 +28,8 @@ "type": "ArrayExpression", "elements": [], "range": [ - 6, - 8 + 12, + 14 ], "loc": { "start": { @@ -44,7 +44,7 @@ }, "range": [ 0, - 8 + 14 ], "loc": { "start": { @@ -59,7 +59,7 @@ }, "range": [ 0, - 8 + 14 ], "loc": { "start": { @@ -150,7 +150,7 @@ ], "range": [ 0, - 8 + 14 ], "loc": { "start": { diff --git a/src/parser/test/esprima/expression/primary/literal/numeric/migrated_0022.skip b/src/parser/test/esprima/expression/primary/literal/numeric/migrated_0022.skip deleted file mode 100644 index 1333ed77b7e..00000000000 --- a/src/parser/test/esprima/expression/primary/literal/numeric/migrated_0022.skip +++ /dev/null @@ -1 +0,0 @@ -TODO diff --git a/src/parser/test/esprima/tolerant-parse/for-in-missing-parenthesis.diff b/src/parser/test/esprima/tolerant-parse/for-in-missing-parenthesis.diff index cc8aeac4166..f31cdae92dc 100644 --- a/src/parser/test/esprima/tolerant-parse/for-in-missing-parenthesis.diff +++ b/src/parser/test/esprima/tolerant-parse/for-in-missing-parenthesis.diff @@ -7,7 +7,15 @@ "loc": { "start": { "line": 3, "column": 0 }, "end": { "line": 3, "column": 0 } - } + }, + "message":"Unexpected end of input, expected the token `)`" + }, + "1": { + "loc": { + "start": { "line": 3, "column": 0 }, + "end": { "line": 3, "column": 0 } + }, + "message":"Unexpected end of input, expected the start of a statement" } } } diff --git a/src/parser/test/esprima/tolerant-parse/for-missing-parenthesis.diff b/src/parser/test/esprima/tolerant-parse/for-missing-parenthesis.diff index f298ce74cc4..50cb11ea982 100644 --- a/src/parser/test/esprima/tolerant-parse/for-missing-parenthesis.diff +++ b/src/parser/test/esprima/tolerant-parse/for-missing-parenthesis.diff @@ -7,7 +7,15 @@ "loc": { "start": { "line": 2, "column": 0 }, "end": { "line": 2, "column": 0 } - } + }, + "message":"Unexpected end of input, expected the token `)`" + }, + "1": { + "loc": { + "start": { "line": 2, "column": 0 }, + "end": { "line": 2, "column": 0 } + }, + "message":"Unexpected end of input, expected the start of a statement" } } } diff --git a/src/parser/test/esprima/tolerant-parse/for-of-missing-parenthesis.diff b/src/parser/test/esprima/tolerant-parse/for-of-missing-parenthesis.diff index cc8aeac4166..f31cdae92dc 100644 --- a/src/parser/test/esprima/tolerant-parse/for-of-missing-parenthesis.diff +++ b/src/parser/test/esprima/tolerant-parse/for-of-missing-parenthesis.diff @@ -7,7 +7,15 @@ "loc": { "start": { "line": 3, "column": 0 }, "end": { "line": 3, "column": 0 } - } + }, + "message":"Unexpected end of input, expected the token `)`" + }, + "1": { + "loc": { + "start": { "line": 3, "column": 0 }, + "end": { "line": 3, "column": 0 } + }, + "message":"Unexpected end of input, expected the start of a statement" } } } diff --git a/src/parser/test/esprima/tolerant-parse/if-missing-parenthesis.diff b/src/parser/test/esprima/tolerant-parse/if-missing-parenthesis.diff index f298ce74cc4..50cb11ea982 100644 --- a/src/parser/test/esprima/tolerant-parse/if-missing-parenthesis.diff +++ b/src/parser/test/esprima/tolerant-parse/if-missing-parenthesis.diff @@ -7,7 +7,15 @@ "loc": { "start": { "line": 2, "column": 0 }, "end": { "line": 2, "column": 0 } - } + }, + "message":"Unexpected end of input, expected the token `)`" + }, + "1": { + "loc": { + "start": { "line": 2, "column": 0 }, + "end": { "line": 2, "column": 0 } + }, + "message":"Unexpected end of input, expected the start of a statement" } } } diff --git a/src/parser/test/esprima/tolerant-parse/migrated_0012.diff b/src/parser/test/esprima/tolerant-parse/migrated_0012.diff index fae1d9164a1..125d81c1dd4 100644 --- a/src/parser/test/esprima/tolerant-parse/migrated_0012.diff +++ b/src/parser/test/esprima/tolerant-parse/migrated_0012.diff @@ -7,7 +7,8 @@ "loc": { "start": { "line": 1, "column": 10 }, "end": { "line": 1, "column": 11 } - } + }, + "message":"Unexpected token `;`, expected the token `,`" } } } diff --git a/src/parser/test/esprima/tolerant-parse/while-missing-parenthesis.diff b/src/parser/test/esprima/tolerant-parse/while-missing-parenthesis.diff index f298ce74cc4..50cb11ea982 100644 --- a/src/parser/test/esprima/tolerant-parse/while-missing-parenthesis.diff +++ b/src/parser/test/esprima/tolerant-parse/while-missing-parenthesis.diff @@ -7,7 +7,15 @@ "loc": { "start": { "line": 2, "column": 0 }, "end": { "line": 2, "column": 0 } - } + }, + "message":"Unexpected end of input, expected the token `)`" + }, + "1": { + "loc": { + "start": { "line": 2, "column": 0 }, + "end": { "line": 2, "column": 0 } + }, + "message":"Unexpected end of input, expected the start of a statement" } } } diff --git a/src/parser/test/esprima_tests.js b/src/parser/test/esprima_tests.js index 886557fd62c..1b830ee3c57 100644 --- a/src/parser/test/esprima_tests.js +++ b/src/parser/test/esprima_tests.js @@ -10,9 +10,28 @@ module.exports = { ], 'Invalid syntax': [ - - '{', - '}', + { + content: '{', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the token `}`' + } + } + }, + { + content: '}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token }', + actual: 'Unexpected token `}`, expected the start of a statement' + } + } + }, '3ea', '3in []', '3e', @@ -52,7 +71,6 @@ module.exports = { }, } }, - '018', '01a', '3in[]', '0x3in[]', @@ -160,14 +178,74 @@ module.exports = { }, } }, - '[', - '[,', - '1 + {', - '1 + { t:t ', - '1 + { t:t,', + { + content: '[', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the token `]`' + } + } + }, + { + content: '[,', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the token `]`' + } + } + }, + { + content: '1 + {', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the token `}`' + } + } + }, + { + content: '1 + { t:t ', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the token `,`' + } + } + }, + { + content: '1 + { t:t,', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the token `}`' + } + } + }, 'var x = /\n/', 'var x = "\n', - 'var if = 42', + { + content: 'var if = 42', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token if', + actual: 'Unexpected token `if`' + } + } + }, { content: 'i #= 42', explanation: "# is no longer illegal in Flow, since we support private class fields. " + @@ -177,19 +255,79 @@ module.exports = { 'root.errors.0.message': { type: 'Wrong error message', expected: 'Unexpected token ILLEGAL', - actual: 'Unexpected token #' + actual: 'Unexpected token `#`, expected the end of an expression statement (`;`)' }, } }, 'i + 2 = 42', '+i = 42', '1 + (', - '\n\n\n{', - '\n/* Some multiline\ncomment */\n)', - '{ set 1 }', - '{ get 2 }', - '({ set: s(if) { } })', - '({ set s(.) { } })', + { + content: '\n\n\n{', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the token `}`' + } + } + }, + { + content: '\n/* Some multiline\ncomment */\n)', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token )', + actual: 'Unexpected token `)`, expected the start of a statement' + } + } + }, + { + content: '{ set 1 }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected number', + actual: 'Unexpected number, expected the end of an expression statement (`;`)' + } + } + }, + { + content: '{ get 2 }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected number', + actual: 'Unexpected number, expected the end of an expression statement (`;`)' + } + } + }, + { + content: '({ set: s(if) { } })', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token if', + actual: 'Unexpected token `if`' + } + } + }, + { + content: '({ set s(.) { } })', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token .', + actual: 'Unexpected token `.`, expected an identifier' + } + } + }, { content: '({ set s() { } })', explanation: "Esprima error isn't great", @@ -206,9 +344,39 @@ module.exports = { } } }, - '({ set: s() { } })', - '({ set: s(a, b) { } })', - '({ get: g(d) { } })', + { + content: '({ set: s() { } })', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token {', + actual: 'Unexpected token `{`, expected the token `,`' + } + } + }, + { + content: '({ set: s(a, b) { } })', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token {', + actual: 'Unexpected token `{`, expected the token `,`' + } + } + }, + { + content: '({ get: g(d) { } })', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token {', + actual: 'Unexpected token `{`, expected the token `,`' + } + } + }, { content: '({ get i() { }, i: 42 })', explanation: 'Esprima-fb is wrong, ES6 allows duplicates', @@ -275,8 +443,28 @@ module.exports = { } } }, - '((a)) => 42', - '(a, (b)) => 42', + { + content: '((a)) => 42', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token =>', + actual: 'Unexpected token `=>`, expected the end of an expression statement (`;`)' + } + } + }, + { + content: '(a, (b)) => 42', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token =>', + actual: 'Unexpected token `=>`, expected the end of an expression statement (`;`)' + } + } + }, { content: '"use strict"; (eval = 10) => 42', explanation: "This is an arrow function error, not an assignment "+ @@ -349,14 +537,94 @@ module.exports = { } }, '"use strict"; (a) => 00', - '() <= 42', - '() ? 42', - '() + 42', - '(...x) + 42', - '()++', - '()()', - '(10) => 00', - '(10, 20) => 00', + { + content: '() <= 42', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token <=', + actual: 'Unexpected token `<=`, expected the token `=>`' + } + } + }, + { + content: '() ? 42', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ?', + actual: 'Unexpected token `?`, expected the token `=>`' + } + } + }, + { + content: '() + 42', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token +', + actual: 'Unexpected token `+`, expected the token `=>`' + } + } + }, + { + content: '(...x) + 42', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token +', + actual: 'Unexpected token `+`, expected the token `=>`' + } + } + }, + { + content: '()++', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ++', + actual: 'Unexpected token `++`, expected the token `=>`' + } + } + }, + { + content: '()()', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token (', + actual: 'Unexpected token `(`, expected the token `=>`' + } + } + }, + { + content: '(10) => 00', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token =>', + actual: 'Unexpected token `=>`, expected the end of an expression statement (`;`)' + } + } + }, + { + content: '(10, 20) => 00', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token =>', + actual: 'Unexpected token `=>`, expected the end of an expression statement (`;`)' + } + } + }, { content: '"use strict"; (eval) => 42', explanation: "Esprima error loc is crazy here", @@ -379,17 +647,127 @@ module.exports = { }, } }, - 'function t(if) { }', - 'function t(true) { }', - 'function t(false) { }', - 'function t(null) { }', - 'function null() { }', - 'function true() { }', - 'function false() { }', - 'function if() { }', - 'a b;', - 'if.a;', - 'a if;', + { + content: 'function t(if) { }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token if', + actual: 'Unexpected token `if`' + } + } + }, + { + content: 'function t(true) { }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token true', + actual: 'Unexpected token `true`' + } + } + }, + { + content: 'function t(false) { }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token false', + actual: 'Unexpected token `false`' + } + } + }, + { + content: 'function t(null) { }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token null', + actual: 'Unexpected token `null`' + } + } + }, + { + content: 'function null() { }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token null', + actual: 'Unexpected token `null`' + } + } + }, + { + content: 'function true() { }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token true', + actual: 'Unexpected token `true`' + } + } + }, + { + content: 'function false() { }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token false', + actual: 'Unexpected token `false`' + } + } + }, + { + content: 'function if() { }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token if', + actual: 'Unexpected token `if`' + } + } + }, + { + content: 'a b;', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected identifier', + actual: 'Unexpected identifier, expected the end of an expression statement (`;`)' + } + } + }, + { + content: 'if.a;', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token .', + actual: 'Unexpected token `.`, expected the token `(`' + } + } + }, + { + content: 'a if;', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token if', + actual: 'Unexpected token `if`, expected the end of an expression statement (`;`)' + } + } + }, { content: 'a class;', explanation: 'class is no longer a future reserved word', @@ -397,16 +775,46 @@ module.exports = { 'root.errors.0.message': { type: 'Wrong error message', expected: 'Unexpected reserved word', - actual: 'Unexpected token class', + actual: 'Unexpected token `class`, expected the end of an expression statement (`;`)', }, }, }, 'break\n', - 'break 1;', + { + content: 'break 1;', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected number', + actual: 'Unexpected number, expected an identifier' + } + } + }, 'continue\n', - 'continue 2;', + { + content: 'continue 2;', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected number', + actual: 'Unexpected number, expected an identifier' + } + } + }, 'throw', - 'throw;', + { + content: 'throw;', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ;', + actual: 'Unexpected token `;`' + } + } + }, 'throw\n', { content: 'for (var i, i2 in {});', @@ -424,15 +832,85 @@ module.exports = { }, } }, - 'for ((i in {}));', + { + content: 'for ((i in {}));', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token )', + actual: 'Unexpected token `)`, expected the token `;`' + } + } + }, 'for (i + 1 in {});', 'for (+i in {});', - 'if(false)', - 'if(false) doThis(); else', - 'do', - 'while(false)', - 'for(;;)', - 'with(x)', + { + content: 'if(false)', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the start of a statement' + } + } + }, + { + content: 'if(false) doThis(); else', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the start of a statement' + } + } + }, + { + content: 'do', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the start of a statement' + } + } + }, + { + content: 'while(false)', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the start of a statement' + } + } + }, + { + content: 'for(;;)', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the start of a statement' + } + } + }, + { + content: 'with(x)', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the start of a statement' + } + } + }, 'try { }', 'const x = 12, y;', @@ -451,11 +929,21 @@ module.exports = { 'root.errors.0.message': { type: 'Wrong error message', expected: 'Unexpected token let', - actual: 'Unexpected identifier', + actual: 'Unexpected identifier, expected the end of an expression statement (`;`)', }, } }, - 'if(true) const a = 1;', + { + content: 'if(true) const a = 1;', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token const', + actual: 'Unexpected token `const`' + } + } + }, { content: 'switch (c) { default: default: }', explanation: "Esprima points after the duplicate default", @@ -467,24 +955,134 @@ module.exports = { }, } }, - 'new X()."s"', - '/*', - '/*\n\n\n', - '/**', - '/*\n\n*', - '/*hello', - '/*hello *', - '\n]', - '\r]', - '\r\n]', - '\n\r]', - '//\r\n]', - '//\n\r]', - '/a\\\n/', - '//\r \n]', - '/*\r\n*/]', - '/*\n\r*/]', - '/*\r \n*/]', + { + content: 'new X()."s"', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected string', + actual: 'Unexpected string, expected an identifier' + } + } + }, + '/*', + '/*\n\n\n', + '/**', + '/*\n\n*', + '/*hello', + '/*hello *', + { + content: '\n]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, + { + content: '\r]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, + { + content: '\r\n]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, + { + content: '\n\r]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, + { + content: '//\r\n]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, + { + content: '//\n\r]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, + '/a\\\n/', + { + content: '//\r \n]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, + { + content: '/*\r\n*/]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, + { + content: '/*\n\r*/]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, + { + content: '/*\r \n*/]', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ]', + actual: 'Unexpected token `]`, expected the start of a statement' + } + } + }, '\\\\', '\\x', { @@ -499,12 +1097,32 @@ module.exports = { } }, '"\\u', - 'try { } catch() {}', + { + content: 'try { } catch() {}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token )', + actual: 'Unexpected token `)`, expected an identifier' + } + } + }, 'return', 'break', 'continue', 'switch (x) { default: continue; }', - 'do { x } *', + { + content: 'do { x } *', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token *', + actual: 'Unexpected token `*`, expected the token `while`' + } + } + }, 'while (true) { break x; }', 'while (true) { continue x; }', 'x: while (true) { (function () { break x; }); }', @@ -703,16 +1321,116 @@ module.exports = { } } }, - 'var', - 'let', - 'const', - '{ ; ; ', - 'function t() { ; ; ', - 'let let', - 'const let=4', - 'for (let let=4;;) {}', - 'for (let in arr) {}', - 'for (let let in arr) {}', + { + content: 'var', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected an identifier' + } + } + }, + { + content: 'let', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected an identifier' + } + } + }, + { + content: 'const', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected an identifier' + } + } + }, + { + content: '{ ; ; ', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the token `}`' + } + } + }, + { + content: 'function t() { ; ; ', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected end of input', + actual: 'Unexpected end of input, expected the token `}`' + } + } + }, + { + content: 'let let', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token let', + actual: 'Unexpected token `let`' + } + } + }, + { + content: 'const let=4', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token let', + actual: 'Unexpected token `let`' + } + } + }, + { + content: 'for (let let=4;;) {}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token let', + actual: 'Unexpected token `let`' + } + } + }, + { + content: 'for (let in arr) {}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token in', + actual: 'Unexpected token `in`' + } + } + }, + { + content: 'for (let let in arr) {}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token let', + actual: 'Unexpected token `let`' + } + } + }, { content: 'class let { }', explanation: "Esprima counts comments in its loc, Flow doesn't", @@ -737,7 +1455,7 @@ module.exports = { 'root.errors.0.message': { type: 'Wrong error message', expected: 'Invalid left-hand side in formals list', - actual: 'Unexpected token .', + actual: 'Unexpected token `.`, expected the token `,`', } }, }, @@ -753,7 +1471,7 @@ module.exports = { 'root.errors.0.message': { type: 'Wrong error message', expected: 'Invalid left-hand side in formals list', - actual: 'Unexpected token .', + actual: 'Unexpected token `.`, expected the token `,`', } }, }, @@ -762,7 +1480,57 @@ module.exports = { '', ' {value} ', '', - '<日本語>', + { + content: '<日本語>', + explanation: 'Flow reports offsets based on bytes', + expected_differences: { + 'root.body.0.expression.openingElement.name.range.1': { + type: 'Wrong number', + expected: 4, + actual: 10, + }, + 'root.body.0.expression.openingElement.range.1': { + type: 'Wrong number', + expected: 5, + actual: 11, + }, + 'root.body.0.expression.closingElement.name.range.0': { + type: 'Wrong number', + expected: 7, + actual: 13, + }, + 'root.body.0.expression.closingElement.name.range.1': { + type: 'Wrong number', + expected: 10, + actual: 22, + }, + 'root.body.0.expression.closingElement.range.0': { + type: 'Wrong number', + expected: 5, + actual: 11, + }, + 'root.body.0.expression.closingElement.range.1': { + type: 'Wrong number', + expected: 11, + actual: 23, + }, + 'root.body.0.expression.range.1': { + type: 'Wrong number', + expected: 11, + actual: 23, + }, + 'root.body.0.range.1': { + type: 'Wrong number', + expected: 11, + actual: 23, + }, + 'root.range.1': { + type: 'Wrong number', + expected: 11, + actual: 23, + }, + }, + }, '\nbar\nbaz\n', ' : } />', '
@test content
', @@ -776,19 +1544,129 @@ module.exports = { '
', '', '', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token :', + actual: 'Unexpected token `:`, expected an identifier' + } + } + }, '', - '<.a>', - '', - '', + { + content: '<.a>', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token .', + actual: 'Unexpected token `.`, expected an identifier' + } + } + }, + { + content: '', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token [', + actual: 'Unexpected token `[`, expected an identifier' + } + } + }, + { + content: '', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token [', + actual: 'Unexpected token `[`, expected an identifier' + } + } + }, '', - '{"str";}', - '', - '
', - '
', - '
stuff
', - '
stuff
', + { + content: '{"str";}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ;', + actual: 'Unexpected token `;`, expected the token `}`' + } + } + }, + { + content: '', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ,', + actual: 'Unexpected token `,`, expected an identifier' + } + } + }, + { + content: '
', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected string', + actual: 'Unexpected string, expected an identifier' + } + } + }, + { + content: '
', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected identifier', + actual: 'Unexpected identifier, expected the token `...`' + } + } + }, + { + content: '
stuff
', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token {', + actual: 'Unexpected token `{`, expected the token `>`' + } + } + }, + { + content: '
stuff
', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token {', + actual: 'Unexpected token `{`, expected the token `>`' + } + } + }, '
', ], @@ -827,7 +1705,7 @@ module.exports = { 'root.errors.0.message': { type: 'Wrong error message', expected: 'Invalid left-hand side in formals list', - actual: 'Unexpected token =>', + actual: 'Unexpected token `=>`, expected the end of an expression statement (`;`)', } }, }, @@ -2008,7 +2886,17 @@ module.exports = { /* Esprima doesn't parse nameless exported classes yet 'export class {}', */ - 'export function {}', + { + content: 'export function {}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token {', + actual: 'Unexpected token `{`, expected an identifier' + } + } + }, /* Esprima parses default exports wrong 'export default class A {}', */ @@ -2099,7 +2987,17 @@ module.exports = { 'declare function foo(x: number, y: string): void;', ], 'Invalid Declare Statements': [ - 'declare function foo();', + { + content: 'declare function foo();', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ;', + actual: 'Unexpected token `;`, expected the token `:`' + } + } + }, ], 'Declare Module': [ 'declare module A {}', @@ -2108,8 +3006,28 @@ module.exports = { 'declare module A { declare function foo(): number; }', ], 'Invalid Declare Module': [ - 'declare Module A {}', - 'declare module {}', + { + content: 'declare Module A {}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected identifier', + actual: 'Unexpected identifier, expected the end of an expression statement (`;`)' + } + } + }, + { + content: 'declare module {}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token {', + actual: 'Unexpected token `{`, expected an identifier' + } + } + }, '"use strict"; declare module "\\01" {}', { content: 'declare module A { declare module B {} }', @@ -2119,6 +3037,11 @@ module.exports = { type: 'Wrong error column', expected: 19, actual: '34-35' + }, + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected identifier', + actual: 'Unexpected identifier, expected the token `.`' } } }, @@ -2129,7 +3052,7 @@ module.exports = { 'root.errors.0.message': { type: 'Wrong error message', expected: 'Unexpected reserved word', - actual: 'Unexpected token export', + actual: 'Unexpected token `export`, expected the token `declare`', }, }, }, @@ -2196,13 +3119,43 @@ module.exports = { ], 'Invalid Typecasts': [ // Must be parenthesized - 'var x: number = 0: number;', + { + content: 'var x: number = 0: number;', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token :', + actual: 'Unexpected token `:`, expected the token `;`' + } + } + }, // ...even within groups - '(xxx: number, yyy: string)' + { + content: '(xxx: number, yyy: string)', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ,', + actual: 'Unexpected token `,`, expected the token `)`' + } + } + }, ], 'Bounded Polymorphism': [ 'function foo() {}', - 'class Foo() {}', + { + content: 'class Foo() {}', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token (', + actual: 'Unexpected token `(`, expected the token `{`' + } + } + }, ], 'Async/Await': [ 'try { foo(); } catch (async) { bar(); }', @@ -2428,7 +3381,17 @@ module.exports = { }, }, 'var x = async function bar() { await foo; }', - 'async function foo() { return await; }', + { + content: 'async function foo() { return await; }', + explanation: "Improved error message", + expected_differences: { + 'root.errors.0.message': { + type: 'Wrong error message', + expected: 'Unexpected token ;', + actual: 'Unexpected token `;`' + } + } + }, 'var x = async (a, b) => await a;', 'var x = async a => await a;', 'foo(async () => await bar);', diff --git a/src/parser/test/file_utils.ml b/src/parser/test/file_utils.ml index 200245c6285..7990ddcfe62 100644 --- a/src/parser/test/file_utils.ml +++ b/src/parser/test/file_utils.ml @@ -1,58 +1,70 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type file_kind = -| Dir of string -| File of string + | Dir of string + | File of string let lstat_kind file = - let open Unix in - try Some (lstat file).st_kind - with Unix_error (ENOENT, _, _) -> - prerr_endline ("File not found: "^file); - None + Unix.( + try Some (lstat file).st_kind + with Unix_error (ENOENT, _, _) -> + prerr_endline ("File not found: " ^ file); + None) -module FileSet = Set.Make(struct +module FileSet = Set.Make (struct type t = file_kind + let compare a b = - match a, b with - | Dir a', Dir b' - | File a', File b' -> String.compare a' b' - | Dir _, File _ -> 1 - | File _, Dir _ -> -1 + match (a, b) with + | (Dir a', Dir b') + | (File a', File b') -> + String.compare a' b' + | (Dir _, File _) -> 1 + | (File _, Dir _) -> -1 end) -let fold_files (type t) - ?max_depth ?(filter=(fun _ -> true)) ?(file_only = false) - (paths: string list) (action: file_kind -> t -> t) (init: t) = +let fold_files + (type t) + ?max_depth + ?(filter = (fun _ -> true)) + ?(file_only = false) + (paths : string list) + (action : file_kind -> t -> t) + (init : t) = let rec fold depth acc dir = - let acc = if not file_only && filter dir - then action (Dir dir) acc - else acc + let acc = + if (not file_only) && filter dir then + action (Dir dir) acc + else + acc in if max_depth = Some depth then acc else let files = Sys.readdir dir - |> Array.fold_left (fun acc file -> - let open Unix in - let abs = Filename.concat dir file in - match lstat_kind abs with - | Some S_REG -> FileSet.add (File abs) acc - | Some S_DIR -> FileSet.add (Dir abs) acc - | _ -> acc - ) FileSet.empty + |> Array.fold_left + (fun acc file -> + Unix.( + let abs = Filename.concat dir file in + match lstat_kind abs with + | Some S_REG -> FileSet.add (File abs) acc + | Some S_DIR -> FileSet.add (Dir abs) acc + | _ -> acc)) + FileSet.empty in FileSet.fold (fun entry acc -> - match entry with - | File file when filter file -> action (File file) acc - | Dir file -> fold (depth+1) acc file - | _ -> acc) - files acc in + match entry with + | File file when filter file -> action (File file) acc + | Dir file -> fold (depth + 1) acc file + | _ -> acc) + files + acc + in List.fold_left (fold 0) init paths diff --git a/src/parser/test/file_utils.mli b/src/parser/test/file_utils.mli index b6220923a44..673a18238f6 100644 --- a/src/parser/test/file_utils.mli +++ b/src/parser/test/file_utils.mli @@ -1,16 +1,18 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type file_kind = -| Dir of string -| File of string + | Dir of string + | File of string -val fold_files: - ?max_depth:int -> ?filter:(string -> bool) -> ?file_only:bool -> +val fold_files : + ?max_depth:int -> + ?filter:(string -> bool) -> + ?file_only:bool -> string list -> (file_kind -> 'a -> 'a) -> 'a -> diff --git a/src/parser/test/flow/ES6/binding-pattern/object-pattern/methods_are_invalid.tree.json b/src/parser/test/flow/ES6/binding-pattern/object-pattern/methods_are_invalid.tree.json index 04dc8942a8e..720364ea19d 100644 --- a/src/parser/test/flow/ES6/binding-pattern/object-pattern/methods_are_invalid.tree.json +++ b/src/parser/test/flow/ES6/binding-pattern/object-pattern/methods_are_invalid.tree.json @@ -22,44 +22,7 @@ "type":"ObjectPattern", "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":11}}, "range":[1,11], - "properties":[ - { - "type":"Property", - "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":9}}, - "range":[3,9], - "key":{ - "type":"Identifier", - "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":4}}, - "range":[3,4], - "name":"m", - "typeAnnotation":null, - "optional":false - }, - "value":{ - "type":"FunctionExpression", - "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":9}}, - "range":[4,9], - "id":null, - "params":[], - "body":{ - "type":"BlockStatement", - "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":9}}, - "range":[7,9], - "body":[] - }, - "async":false, - "generator":false, - "predicate":null, - "expression":false, - "returnType":null, - "typeParameters":null - }, - "kind":"init", - "method":false, - "shorthand":false, - "computed":false - } - ], + "properties":[], "typeAnnotation":null }, "right":{ diff --git a/src/parser/test/flow/ES6/yield/invalid-yield-expression.tree.json b/src/parser/test/flow/ES6/yield/invalid-yield-expression.tree.json index 332a7b3400b..db674ae3861 100644 --- a/src/parser/test/flow/ES6/yield/invalid-yield-expression.tree.json +++ b/src/parser/test/flow/ES6/yield/invalid-yield-expression.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected number" + "message":"Unexpected number, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/ES6/yield/migrated_0005.tree.json b/src/parser/test/flow/ES6/yield/migrated_0005.tree.json index db0ce0f2524..9a457eb1f51 100644 --- a/src/parser/test/flow/ES6/yield/migrated_0005.tree.json +++ b/src/parser/test/flow/ES6/yield/migrated_0005.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`" } ], "type":"Program", diff --git a/src/parser/test/flow/JSX/html_entity_at_start_of_child.js b/src/parser/test/flow/JSX/html_entity_at_start_of_child.js new file mode 100644 index 00000000000..a1ed94daf4b --- /dev/null +++ b/src/parser/test/flow/JSX/html_entity_at_start_of_child.js @@ -0,0 +1 @@ +(
 
); diff --git a/src/parser/test/flow/JSX/html_entity_at_start_of_child.tree.json b/src/parser/test/flow/JSX/html_entity_at_start_of_child.tree.json new file mode 100644 index 00000000000..410d17178aa --- /dev/null +++ b/src/parser/test/flow/JSX/html_entity_at_start_of_child.tree.json @@ -0,0 +1,52 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":20}}, + "range":[0,20], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":20}}, + "range":[0,20], + "expression":{ + "type":"JSXElement", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":18}}, + "range":[1,18], + "openingElement":{ + "type":"JSXOpeningElement", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":6}}, + "range":[1,6], + "name":{ + "type":"JSXIdentifier", + "loc":{"source":null,"start":{"line":1,"column":2},"end":{"line":1,"column":5}}, + "range":[2,5], + "name":"div" + }, + "attributes":[], + "selfClosing":false + }, + "closingElement":{ + "type":"JSXClosingElement", + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":18}}, + "range":[12,18], + "name":{ + "type":"JSXIdentifier", + "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":17}}, + "range":[14,17], + "name":"div" + } + }, + "children":[ + { + "type":"JSXText", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":12}}, + "range":[6,12], + "value":" ", + "raw":" " + } + ] + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/JSX_invalid/migrated_0001.tree.json b/src/parser/test/flow/JSX_invalid/migrated_0001.tree.json index 127c8dc39c1..a555323a069 100644 --- a/src/parser/test/flow/JSX_invalid/migrated_0001.tree.json +++ b/src/parser/test/flow/JSX_invalid/migrated_0001.tree.json @@ -10,17 +10,17 @@ } ], "type":"Program", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":38}}, + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, "range":[0,38], "body":[ { "type":"VariableDeclaration", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":38}}, + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, "range":[0,38], "declarations":[ { "type":"VariableDeclarator", - "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":38}}, + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":2,"column":0}}, "range":[4,38], "id":{ "type":"Identifier", @@ -32,7 +32,7 @@ }, "init":{ "type":"BinaryExpression", - "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":38}}, + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":2,"column":0}}, "range":[8,38], "operator":"<", "left":{ @@ -103,7 +103,7 @@ }, "right":{ "type":"Literal", - "loc":{"source":null,"start":{"line":1,"column":31},"end":{"line":1,"column":38}}, + "loc":{"source":null,"start":{"line":1,"column":31},"end":{"line":2,"column":0}}, "range":[31,38], "value":null, "raw":"/div>;/", diff --git a/src/parser/test/flow/JSX_invalid/migrated_0002.tree.json b/src/parser/test/flow/JSX_invalid/migrated_0002.tree.json index db791a243a2..4249be80d12 100644 --- a/src/parser/test/flow/JSX_invalid/migrated_0002.tree.json +++ b/src/parser/test/flow/JSX_invalid/migrated_0002.tree.json @@ -10,17 +10,17 @@ } ], "type":"Program", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":65}}, + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, "range":[0,65], "body":[ { "type":"VariableDeclaration", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":65}}, + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, "range":[0,65], "declarations":[ { "type":"VariableDeclarator", - "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":65}}, + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":2,"column":0}}, "range":[4,65], "id":{ "type":"Identifier", @@ -32,7 +32,7 @@ }, "init":{ "type":"BinaryExpression", - "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":65}}, + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":2,"column":0}}, "range":[8,65], "operator":"<", "left":{ @@ -103,7 +103,7 @@ }, "right":{ "type":"Literal", - "loc":{"source":null,"start":{"line":1,"column":58},"end":{"line":1,"column":65}}, + "loc":{"source":null,"start":{"line":1,"column":58},"end":{"line":2,"column":0}}, "range":[58,65], "value":null, "raw":"/div>;/", diff --git a/src/parser/test/flow/arrow_function/tuple_return_type.js b/src/parser/test/flow/arrow_function/tuple_return_type.js new file mode 100644 index 00000000000..c730addebed --- /dev/null +++ b/src/parser/test/flow/arrow_function/tuple_return_type.js @@ -0,0 +1 @@ +(): [number => void] => {} diff --git a/src/parser/test/flow/arrow_function/tuple_return_type.tree.json b/src/parser/test/flow/arrow_function/tuple_return_type.tree.json new file mode 100644 index 00000000000..d528bcc935b --- /dev/null +++ b/src/parser/test/flow/arrow_function/tuple_return_type.tree.json @@ -0,0 +1,70 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":26}}, + "range":[0,26], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":26}}, + "range":[0,26], + "expression":{ + "type":"ArrowFunctionExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":26}}, + "range":[0,26], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":26}}, + "range":[24,26], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":2},"end":{"line":1,"column":20}}, + "range":[2,20], + "typeAnnotation":{ + "type":"TupleTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":20}}, + "range":[4,20], + "types":[ + { + "type":"FunctionTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":19}}, + "range":[5,19], + "params":[ + { + "type":"FunctionTypeParam", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, + "range":[5,11], + "name":null, + "typeAnnotation":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, + "range":[5,11] + }, + "optional":false + } + ], + "returnType":{ + "type":"VoidTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, + "range":[15,19] + }, + "rest":null, + "typeParameters":null + } + ] + } + }, + "typeParameters":null + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/arrow_function_invalid/migrated_0000.tree.json b/src/parser/test/flow/arrow_function_invalid/migrated_0000.tree.json index fa229370cc8..52bdecc03a5 100644 --- a/src/parser/test/flow/arrow_function_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/arrow_function_invalid/migrated_0000.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":10}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":10}}, + "message":"Unexpected token `:`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/arrow_function_invalid/migrated_0003.tree.json b/src/parser/test/flow/arrow_function_invalid/migrated_0003.tree.json index 2158e6b42f4..9b68787d839 100644 --- a/src/parser/test/flow/arrow_function_invalid/migrated_0003.tree.json +++ b/src/parser/test/flow/arrow_function_invalid/migrated_0003.tree.json @@ -2,11 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, - "message":"Unexpected token *" + "message":"Unexpected token `*`" }, { "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":5}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":5}}, + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/arrow_function_invalid/migrated_0004.tree.json b/src/parser/test/flow/arrow_function_invalid/migrated_0004.tree.json index 918d949467a..b0631cae519 100644 --- a/src/parser/test/flow/arrow_function_invalid/migrated_0004.tree.json +++ b/src/parser/test/flow/arrow_function_invalid/migrated_0004.tree.json @@ -2,11 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, - "message":"Unexpected token *" + "message":"Unexpected token `*`" }, { "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":7}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":7}}, + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/async_arrow_functions/migrated_0008.tree.json b/src/parser/test/flow/async_arrow_functions/migrated_0008.tree.json index 38bacf60ec1..62be93cb20a 100644 --- a/src/parser/test/flow/async_arrow_functions/migrated_0008.tree.json +++ b/src/parser/test/flow/async_arrow_functions/migrated_0008.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `;`" } ], "type":"Program", diff --git a/src/parser/test/flow/async_await/migrated_0007.tree.json b/src/parser/test/flow/async_await/migrated_0007.tree.json index d2ac987d01b..2212d0138e3 100644 --- a/src/parser/test/flow/async_await/migrated_0007.tree.json +++ b/src/parser/test/flow/async_await/migrated_0007.tree.json @@ -1,4 +1,10 @@ { + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":29}}, + "message":"Unexpected reserved word" + } + ], "type":"Program", "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":65}}, "range":[0,65], diff --git a/src/parser/test/flow/async_await/migrated_0009.tree.json b/src/parser/test/flow/async_await/migrated_0009.tree.json index e38ebb08790..1fd11889ab0 100644 --- a/src/parser/test/flow/async_await/migrated_0009.tree.json +++ b/src/parser/test/flow/async_await/migrated_0009.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":33},"end":{"line":2,"column":34}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`" } ], "type":"Program", diff --git a/src/parser/test/flow/async_await/migrated_0018.tree.json b/src/parser/test/flow/async_await/migrated_0018.tree.json index 4c8639a29ee..5c599209bdd 100644 --- a/src/parser/test/flow/async_await/migrated_0018.tree.json +++ b/src/parser/test/flow/async_await/migrated_0018.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":50},"end":{"line":1,"column":51}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`" } ], "type":"Program", diff --git a/src/parser/test/flow/bigint/binary-invalid-digit.js b/src/parser/test/flow/bigint/binary-invalid-digit.js new file mode 100644 index 00000000000..34c5497eee3 --- /dev/null +++ b/src/parser/test/flow/bigint/binary-invalid-digit.js @@ -0,0 +1 @@ +0b2n; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/binary-invalid-digit.tree.json b/src/parser/test/flow/bigint/binary-invalid-digit.tree.json new file mode 100644 index 00000000000..d83ba07ba0d --- /dev/null +++ b/src/parser/test/flow/bigint/binary-invalid-digit.tree.json @@ -0,0 +1,45 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":4}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "value":0, + "raw":"0" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":5}}, + "range":[1,5], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":4}}, + "range":[1,4], + "name":"b2n", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/binary-invalid-word.js b/src/parser/test/flow/bigint/binary-invalid-word.js new file mode 100644 index 00000000000..9960053939c --- /dev/null +++ b/src/parser/test/flow/bigint/binary-invalid-word.js @@ -0,0 +1 @@ +0b101011101nhello; diff --git a/src/parser/test/flow/bigint/binary-invalid-word.tree.json b/src/parser/test/flow/bigint/binary-invalid-word.tree.json new file mode 100644 index 00000000000..be1cb17591e --- /dev/null +++ b/src/parser/test/flow/bigint/binary-invalid-word.tree.json @@ -0,0 +1,45 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":17}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":17}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":18}}, + "range":[0,18], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, + "range":[0,12], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, + "range":[0,12], + "value":null, + "bigint":"0b101011101n" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":18}}, + "range":[12,18], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":17}}, + "range":[12,17], + "name":"hello", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/exponent-part.js b/src/parser/test/flow/bigint/exponent-part.js new file mode 100644 index 00000000000..3f17a453509 --- /dev/null +++ b/src/parser/test/flow/bigint/exponent-part.js @@ -0,0 +1 @@ +0e0n; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/exponent-part.tree.json b/src/parser/test/flow/bigint/exponent-part.tree.json new file mode 100644 index 00000000000..c5ea1752f80 --- /dev/null +++ b/src/parser/test/flow/bigint/exponent-part.tree.json @@ -0,0 +1,27 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "message":"A bigint literal cannot use exponential notation" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "value":null, + "bigint":"0e0n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/float-invalid-dot-dot.js b/src/parser/test/flow/bigint/float-invalid-dot-dot.js new file mode 100644 index 00000000000..5a5bec5778c --- /dev/null +++ b/src/parser/test/flow/bigint/float-invalid-dot-dot.js @@ -0,0 +1 @@ +1..n; diff --git a/src/parser/test/flow/bigint/float-invalid-dot-dot.tree.json b/src/parser/test/flow/bigint/float-invalid-dot-dot.tree.json new file mode 100644 index 00000000000..085bde5195d --- /dev/null +++ b/src/parser/test/flow/bigint/float-invalid-dot-dot.tree.json @@ -0,0 +1,35 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "expression":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "object":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":2}}, + "range":[0,2], + "value":1, + "raw":"1." + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":4}}, + "range":[3,4], + "name":"n", + "typeAnnotation":null, + "optional":false + }, + "computed":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/float-invalid-dot.js b/src/parser/test/flow/bigint/float-invalid-dot.js new file mode 100644 index 00000000000..b70823f7ba7 --- /dev/null +++ b/src/parser/test/flow/bigint/float-invalid-dot.js @@ -0,0 +1 @@ +.1n; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/float-invalid-dot.tree.json b/src/parser/test/flow/bigint/float-invalid-dot.tree.json new file mode 100644 index 00000000000..27d5b6ea889 --- /dev/null +++ b/src/parser/test/flow/bigint/float-invalid-dot.tree.json @@ -0,0 +1,27 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "message":"A bigint literal must be an integer" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "range":[0,3], + "value":null, + "bigint":".1n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/float-invalid-without-fractional-digits.js b/src/parser/test/flow/bigint/float-invalid-without-fractional-digits.js new file mode 100644 index 00000000000..8bd0646f4f7 --- /dev/null +++ b/src/parser/test/flow/bigint/float-invalid-without-fractional-digits.js @@ -0,0 +1 @@ +1.n; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/float-invalid-without-fractional-digits.tree.json b/src/parser/test/flow/bigint/float-invalid-without-fractional-digits.tree.json new file mode 100644 index 00000000000..cb68c632362 --- /dev/null +++ b/src/parser/test/flow/bigint/float-invalid-without-fractional-digits.tree.json @@ -0,0 +1,27 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "message":"A bigint literal must be an integer" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "range":[0,3], + "value":null, + "bigint":"1.n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/float-invalid.js b/src/parser/test/flow/bigint/float-invalid.js new file mode 100644 index 00000000000..6a7194c2f7c --- /dev/null +++ b/src/parser/test/flow/bigint/float-invalid.js @@ -0,0 +1 @@ +1.0n; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/float-invalid.tree.json b/src/parser/test/flow/bigint/float-invalid.tree.json new file mode 100644 index 00000000000..ca7f202ee9e --- /dev/null +++ b/src/parser/test/flow/bigint/float-invalid.tree.json @@ -0,0 +1,27 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "message":"A bigint literal must be an integer" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "value":null, + "bigint":"1.0n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/hex-invalid-word.js b/src/parser/test/flow/bigint/hex-invalid-word.js new file mode 100644 index 00000000000..bc999b4802e --- /dev/null +++ b/src/parser/test/flow/bigint/hex-invalid-word.js @@ -0,0 +1 @@ +0xfff123nhello; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/hex-invalid-word.tree.json b/src/parser/test/flow/bigint/hex-invalid-word.tree.json new file mode 100644 index 00000000000..ef328df8651 --- /dev/null +++ b/src/parser/test/flow/bigint/hex-invalid-word.tree.json @@ -0,0 +1,45 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":14}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":14}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":15}}, + "range":[0,15], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":9}}, + "range":[0,9], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":9}}, + "range":[0,9], + "value":null, + "bigint":"0xfff123n" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":15}}, + "range":[9,15], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":14}}, + "range":[9,14], + "name":"hello", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/hexadecimal-invalid-digit.js b/src/parser/test/flow/bigint/hexadecimal-invalid-digit.js new file mode 100644 index 00000000000..8227cd1e768 --- /dev/null +++ b/src/parser/test/flow/bigint/hexadecimal-invalid-digit.js @@ -0,0 +1 @@ +0xgn; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/hexadecimal-invalid-digit.tree.json b/src/parser/test/flow/bigint/hexadecimal-invalid-digit.tree.json new file mode 100644 index 00000000000..27225240c6a --- /dev/null +++ b/src/parser/test/flow/bigint/hexadecimal-invalid-digit.tree.json @@ -0,0 +1,45 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":4}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "value":0, + "raw":"0" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":5}}, + "range":[1,5], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":4}}, + "range":[1,4], + "name":"xgn", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/legacy-octal-invalid.js b/src/parser/test/flow/bigint/legacy-octal-invalid.js new file mode 100644 index 00000000000..213d49ab4fd --- /dev/null +++ b/src/parser/test/flow/bigint/legacy-octal-invalid.js @@ -0,0 +1 @@ +0123n; diff --git a/src/parser/test/flow/bigint/legacy-octal-invalid.tree.json b/src/parser/test/flow/bigint/legacy-octal-invalid.tree.json new file mode 100644 index 00000000000..e4450549319 --- /dev/null +++ b/src/parser/test/flow/bigint/legacy-octal-invalid.tree.json @@ -0,0 +1,45 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":5}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":6}}, + "range":[0,6], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "value":83, + "raw":"0123" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":6}}, + "range":[4,6], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":5}}, + "range":[4,5], + "name":"n", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/octal-new-invalid-word.js b/src/parser/test/flow/bigint/octal-new-invalid-word.js new file mode 100644 index 00000000000..2fe49079e7f --- /dev/null +++ b/src/parser/test/flow/bigint/octal-new-invalid-word.js @@ -0,0 +1 @@ +0o16432nhello; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/octal-new-invalid-word.tree.json b/src/parser/test/flow/bigint/octal-new-invalid-word.tree.json new file mode 100644 index 00000000000..d169955b039 --- /dev/null +++ b/src/parser/test/flow/bigint/octal-new-invalid-word.tree.json @@ -0,0 +1,45 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":13}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":14}}, + "range":[0,14], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":8}}, + "range":[0,8], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":8}}, + "range":[0,8], + "value":null, + "bigint":"0o16432n" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":14}}, + "range":[8,14], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":13}}, + "range":[8,13], + "name":"hello", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/small-invalid-word.js b/src/parser/test/flow/bigint/small-invalid-word.js new file mode 100644 index 00000000000..b9f17064e1f --- /dev/null +++ b/src/parser/test/flow/bigint/small-invalid-word.js @@ -0,0 +1 @@ +100nhello; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/small-invalid-word.tree.json b/src/parser/test/flow/bigint/small-invalid-word.tree.json new file mode 100644 index 00000000000..e1c37d6868b --- /dev/null +++ b/src/parser/test/flow/bigint/small-invalid-word.tree.json @@ -0,0 +1,45 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":9}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":9}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "range":[0,10], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "value":null, + "bigint":"100n" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":10}}, + "range":[4,10], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":9}}, + "range":[4,9], + "name":"hello", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/valid-binary.js b/src/parser/test/flow/bigint/valid-binary.js new file mode 100644 index 00000000000..0283f39c573 --- /dev/null +++ b/src/parser/test/flow/bigint/valid-binary.js @@ -0,0 +1 @@ +0b101011101n; diff --git a/src/parser/test/flow/bigint/valid-binary.tree.json b/src/parser/test/flow/bigint/valid-binary.tree.json new file mode 100644 index 00000000000..b2fdecb91ab --- /dev/null +++ b/src/parser/test/flow/bigint/valid-binary.tree.json @@ -0,0 +1,21 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, + "range":[0,12], + "value":null, + "bigint":"0b101011101n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/valid-hex.js b/src/parser/test/flow/bigint/valid-hex.js new file mode 100644 index 00000000000..e475c4836d9 --- /dev/null +++ b/src/parser/test/flow/bigint/valid-hex.js @@ -0,0 +1 @@ +0xfff123n; diff --git a/src/parser/test/flow/bigint/valid-hex.tree.json b/src/parser/test/flow/bigint/valid-hex.tree.json new file mode 100644 index 00000000000..4f42c3fd156 --- /dev/null +++ b/src/parser/test/flow/bigint/valid-hex.tree.json @@ -0,0 +1,21 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "range":[0,10], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "range":[0,10], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":9}}, + "range":[0,9], + "value":null, + "bigint":"0xfff123n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/valid-large.js b/src/parser/test/flow/bigint/valid-large.js new file mode 100644 index 00000000000..0fbe10e825d --- /dev/null +++ b/src/parser/test/flow/bigint/valid-large.js @@ -0,0 +1 @@ +9223372036854775807n; \ No newline at end of file diff --git a/src/parser/test/flow/bigint/valid-large.tree.json b/src/parser/test/flow/bigint/valid-large.tree.json new file mode 100644 index 00000000000..4e46c9954fe --- /dev/null +++ b/src/parser/test/flow/bigint/valid-large.tree.json @@ -0,0 +1,21 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":21}}, + "range":[0,21], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":21}}, + "range":[0,21], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":20}}, + "range":[0,20], + "value":null, + "bigint":"9223372036854775807n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/valid-octal-new.js b/src/parser/test/flow/bigint/valid-octal-new.js new file mode 100644 index 00000000000..115f40593f7 --- /dev/null +++ b/src/parser/test/flow/bigint/valid-octal-new.js @@ -0,0 +1 @@ +0o16432n; diff --git a/src/parser/test/flow/bigint/valid-octal-new.tree.json b/src/parser/test/flow/bigint/valid-octal-new.tree.json new file mode 100644 index 00000000000..1d3ea1a1fe9 --- /dev/null +++ b/src/parser/test/flow/bigint/valid-octal-new.tree.json @@ -0,0 +1,21 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":9}}, + "range":[0,9], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":9}}, + "range":[0,9], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":8}}, + "range":[0,8], + "value":null, + "bigint":"0o16432n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/bigint/valid-small.js b/src/parser/test/flow/bigint/valid-small.js new file mode 100644 index 00000000000..e96cca59c70 --- /dev/null +++ b/src/parser/test/flow/bigint/valid-small.js @@ -0,0 +1 @@ +100n; diff --git a/src/parser/test/flow/bigint/valid-small.tree.json b/src/parser/test/flow/bigint/valid-small.tree.json new file mode 100644 index 00000000000..3871b911bf2 --- /dev/null +++ b/src/parser/test/flow/bigint/valid-small.tree.json @@ -0,0 +1,21 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":5}}, + "range":[0,5], + "expression":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "value":null, + "bigint":"100n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/call_properties/migrated_0000.tree.json b/src/parser/test/flow/call_properties/migrated_0000.tree.json index 290a2b0b69a..4888729fca7 100644 --- a/src/parser/test/flow/call_properties/migrated_0000.tree.json +++ b/src/parser/test/flow/call_properties/migrated_0000.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":22}}, "range":[8,22], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[ diff --git a/src/parser/test/flow/call_properties/migrated_0001.tree.json b/src/parser/test/flow/call_properties/migrated_0001.tree.json index c681699c90f..589336727eb 100644 --- a/src/parser/test/flow/call_properties/migrated_0001.tree.json +++ b/src/parser/test/flow/call_properties/migrated_0001.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":23}}, "range":[8,23], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[ diff --git a/src/parser/test/flow/call_properties/migrated_0002.tree.json b/src/parser/test/flow/call_properties/migrated_0002.tree.json index a8c411bff11..a60ed9247cb 100644 --- a/src/parser/test/flow/call_properties/migrated_0002.tree.json +++ b/src/parser/test/flow/call_properties/migrated_0002.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":54}}, "range":[8,54], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/call_properties/migrated_0003.tree.json b/src/parser/test/flow/call_properties/migrated_0003.tree.json index 45f984fa6e3..af4b0a31e8c 100644 --- a/src/parser/test/flow/call_properties/migrated_0003.tree.json +++ b/src/parser/test/flow/call_properties/migrated_0003.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":30}}, "range":[8,30], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[ diff --git a/src/parser/test/flow/call_properties_invalid/migrated_0000.tree.json b/src/parser/test/flow/call_properties_invalid/migrated_0000.tree.json index dab41ea0d5e..6472942af56 100644 --- a/src/parser/test/flow/call_properties_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/call_properties_invalid/migrated_0000.tree.json @@ -2,11 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":14}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the token `:`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, "message":"Unexpected end of input" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", @@ -35,6 +39,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":2,"column":0}}, "range":[8,15], + "inexact":false, "exact":false, "properties":[], "indexers":[], diff --git a/src/parser/test/flow/class_expression/anonymous_implements.js b/src/parser/test/flow/class_expression/anonymous_implements.js new file mode 100644 index 00000000000..daa31add351 --- /dev/null +++ b/src/parser/test/flow/class_expression/anonymous_implements.js @@ -0,0 +1 @@ +(class implements C {}) diff --git a/src/parser/test/flow/class_expression/anonymous_implements.tree.json b/src/parser/test/flow/class_expression/anonymous_implements.tree.json new file mode 100644 index 00000000000..ff4bd183bac --- /dev/null +++ b/src/parser/test/flow/class_expression/anonymous_implements.tree.json @@ -0,0 +1,46 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":23}}, + "range":[0,23], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":23}}, + "range":[0,23], + "expression":{ + "type":"ClassExpression", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":22}}, + "range":[1,22], + "id":null, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":22}}, + "range":[20,22], + "body":[] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[ + { + "type":"ClassImplements", + "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":19}}, + "range":[18,19], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":19}}, + "range":[18,19], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + ], + "decorators":[] + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_method_kinds/async_as_id_tparams.js b/src/parser/test/flow/class_method_kinds/async_as_id_tparams.js new file mode 100644 index 00000000000..3c15dcbd1eb --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/async_as_id_tparams.js @@ -0,0 +1,3 @@ +class C { + async() {} +} diff --git a/src/parser/test/flow/class_method_kinds/async_as_id_tparams.tree.json b/src/parser/test/flow/class_method_kinds/async_as_id_tparams.tree.json new file mode 100644 index 00000000000..f7f4635d282 --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/async_as_id_tparams.tree.json @@ -0,0 +1,84 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,27], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,27], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,27], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":15}}, + "range":[12,25], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":7}}, + "range":[12,17], + "name":"async", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":15}}, + "range":[17,25], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":13},"end":{"line":2,"column":15}}, + "range":[23,25], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":{ + "type":"TypeParameterDeclaration", + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":10}}, + "range":[17,20], + "params":[ + { + "type":"TypeParameter", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, + "range":[18,19], + "name":"T", + "bound":null, + "variance":null, + "default":null + } + ] + } + }, + "kind":"method", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_method_kinds/async_tparams.js b/src/parser/test/flow/class_method_kinds/async_tparams.js new file mode 100644 index 00000000000..01998a7ccd3 --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/async_tparams.js @@ -0,0 +1,3 @@ +class C { + async m() {} +} diff --git a/src/parser/test/flow/class_method_kinds/async_tparams.tree.json b/src/parser/test/flow/class_method_kinds/async_tparams.tree.json new file mode 100644 index 00000000000..e955fcc6c4f --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/async_tparams.tree.json @@ -0,0 +1,84 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,29], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,29], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,29], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":17}}, + "range":[12,27], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, + "range":[18,19], + "name":"m", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":17}}, + "range":[19,27], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":15},"end":{"line":2,"column":17}}, + "range":[25,27], + "body":[] + }, + "async":true, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":{ + "type":"TypeParameterDeclaration", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":12}}, + "range":[19,22], + "params":[ + { + "type":"TypeParameter", + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, + "range":[20,21], + "name":"T", + "bound":null, + "variance":null, + "default":null + } + ] + } + }, + "kind":"method", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_method_kinds/get_as_id.js b/src/parser/test/flow/class_method_kinds/get_as_id.js new file mode 100644 index 00000000000..79add1d8401 --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/get_as_id.js @@ -0,0 +1,3 @@ +class C { + get() {} +} diff --git a/src/parser/test/flow/class_method_kinds/get_as_id.tree.json b/src/parser/test/flow/class_method_kinds/get_as_id.tree.json new file mode 100644 index 00000000000..92a267a1b86 --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/get_as_id.tree.json @@ -0,0 +1,69 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,22], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,22], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,22], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":10}}, + "range":[12,20], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "name":"get", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":5},"end":{"line":2,"column":10}}, + "range":[15,20], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":10}}, + "range":[18,20], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"method", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_method_kinds/get_as_id_tparams.js b/src/parser/test/flow/class_method_kinds/get_as_id_tparams.js new file mode 100644 index 00000000000..1286c365623 --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/get_as_id_tparams.js @@ -0,0 +1,3 @@ +class C { + get() {} +} diff --git a/src/parser/test/flow/class_method_kinds/get_as_id_tparams.tree.json b/src/parser/test/flow/class_method_kinds/get_as_id_tparams.tree.json new file mode 100644 index 00000000000..8921e663938 --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/get_as_id_tparams.tree.json @@ -0,0 +1,84 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,25], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,25], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,25], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":13}}, + "range":[12,23], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "name":"get", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":5},"end":{"line":2,"column":13}}, + "range":[15,23], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":11},"end":{"line":2,"column":13}}, + "range":[21,23], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":{ + "type":"TypeParameterDeclaration", + "loc":{"source":null,"start":{"line":2,"column":5},"end":{"line":2,"column":8}}, + "range":[15,18], + "params":[ + { + "type":"TypeParameter", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "range":[16,17], + "name":"T", + "bound":null, + "variance":null, + "default":null + } + ] + } + }, + "kind":"method", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_method_kinds/multiple_constructors.tree.json b/src/parser/test/flow/class_method_kinds/multiple_constructors.tree.json index f7f5aa2e231..72a7847b934 100644 --- a/src/parser/test/flow/class_method_kinds/multiple_constructors.tree.json +++ b/src/parser/test/flow/class_method_kinds/multiple_constructors.tree.json @@ -5,15 +5,15 @@ "message":"Classes may only have one constructor" }, { - "loc":{"source":null,"start":{"line":7,"column":2},"end":{"line":7,"column":28}}, - "message":"Classes may not have fields named `constructor`." + "loc":{"source":null,"start":{"line":7,"column":9},"end":{"line":7,"column":20}}, + "message":"Classes may not have static fields named `constructor`." }, { - "loc":{"source":null,"start":{"line":11,"column":2},"end":{"line":11,"column":21}}, + "loc":{"source":null,"start":{"line":11,"column":2},"end":{"line":11,"column":13}}, "message":"Classes may not have fields named `constructor`." }, { - "loc":{"source":null,"start":{"line":24,"column":2},"end":{"line":24,"column":26}}, + "loc":{"source":null,"start":{"line":24,"column":9},"end":{"line":24,"column":18}}, "message":"Classes may not have static fields named `prototype`." } ], @@ -112,8 +112,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] @@ -164,8 +164,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] @@ -216,8 +216,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] @@ -313,8 +313,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] @@ -365,8 +365,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] @@ -417,8 +417,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/class_method_kinds/polymorphic_getter.tree.json b/src/parser/test/flow/class_method_kinds/polymorphic_getter.tree.json index 94d7166ef00..aa7b496d1cd 100644 --- a/src/parser/test/flow/class_method_kinds/polymorphic_getter.tree.json +++ b/src/parser/test/flow/class_method_kinds/polymorphic_getter.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":20}}, - "message":"Unexpected token <" + "message":"Unexpected token `<`, expected the token `(`" }, { "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":22}}, - "message":"Unexpected token >" + "message":"Unexpected token `>`, expected the token `,`" }, { "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":23}}, - "message":"Unexpected token (" + "message":"Unexpected token `(`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":19}}, @@ -93,8 +93,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/class_method_kinds/static_async.js b/src/parser/test/flow/class_method_kinds/static_async.js new file mode 100644 index 00000000000..7425e4ed9f4 --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/static_async.js @@ -0,0 +1,3 @@ +class C { + static async m() {} +} diff --git a/src/parser/test/flow/class_method_kinds/static_async.tree.json b/src/parser/test/flow/class_method_kinds/static_async.tree.json new file mode 100644 index 00000000000..fbf4bbc626b --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/static_async.tree.json @@ -0,0 +1,69 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,33], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":21}}, + "range":[12,31], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":15},"end":{"line":2,"column":16}}, + "range":[25,26], + "name":"m", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":16},"end":{"line":2,"column":21}}, + "range":[26,31], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":19},"end":{"line":2,"column":21}}, + "range":[29,31], + "body":[] + }, + "async":true, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"method", + "static":true, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_method_kinds/static_async_as_id.js b/src/parser/test/flow/class_method_kinds/static_async_as_id.js new file mode 100644 index 00000000000..05fc9c10d0c --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/static_async_as_id.js @@ -0,0 +1,3 @@ +class C { + static async() {} +} diff --git a/src/parser/test/flow/class_method_kinds/static_async_as_id.tree.json b/src/parser/test/flow/class_method_kinds/static_async_as_id.tree.json new file mode 100644 index 00000000000..03468e473ed --- /dev/null +++ b/src/parser/test/flow/class_method_kinds/static_async_as_id.tree.json @@ -0,0 +1,69 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,31], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,31], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,31], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":19}}, + "range":[12,29], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":14}}, + "range":[19,24], + "name":"async", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":14},"end":{"line":2,"column":19}}, + "range":[24,29], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":17},"end":{"line":2,"column":19}}, + "range":[27,29], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"method", + "static":true, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_properties/async.js b/src/parser/test/flow/class_properties/async.js new file mode 100644 index 00000000000..e08f7f825bc --- /dev/null +++ b/src/parser/test/flow/class_properties/async.js @@ -0,0 +1,3 @@ +class C { + async +} diff --git a/src/parser/test/flow/class_properties/async.tree.json b/src/parser/test/flow/class_properties/async.tree.json new file mode 100644 index 00000000000..f9c5fc068d6 --- /dev/null +++ b/src/parser/test/flow/class_properties/async.tree.json @@ -0,0 +1,51 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,19], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,19], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,19], + "body":[ + { + "type":"ClassProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":7}}, + "range":[12,17], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":7}}, + "range":[12,17], + "name":"async", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "computed":false, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_properties/async_annotated.js b/src/parser/test/flow/class_properties/async_annotated.js new file mode 100644 index 00000000000..5fa04ea409b --- /dev/null +++ b/src/parser/test/flow/class_properties/async_annotated.js @@ -0,0 +1,3 @@ +class C { + async: boolean +} diff --git a/src/parser/test/flow/class_properties/async_annotated.tree.json b/src/parser/test/flow/class_properties/async_annotated.tree.json new file mode 100644 index 00000000000..f6d034730ac --- /dev/null +++ b/src/parser/test/flow/class_properties/async_annotated.tree.json @@ -0,0 +1,60 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,28], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,28], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,28], + "body":[ + { + "type":"ClassProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":16}}, + "range":[12,26], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":7}}, + "range":[12,17], + "name":"async", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":16}}, + "range":[17,26], + "typeAnnotation":{ + "type":"BooleanTypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":16}}, + "range":[19,26] + } + }, + "computed":false, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_properties/async_asi.js b/src/parser/test/flow/class_properties/async_asi.js new file mode 100644 index 00000000000..a21bfeb48e6 --- /dev/null +++ b/src/parser/test/flow/class_properties/async_asi.js @@ -0,0 +1,4 @@ +class C { + async + foo() {} // not async +} diff --git a/src/parser/test/flow/class_properties/async_asi.tree.json b/src/parser/test/flow/class_properties/async_asi.tree.json new file mode 100644 index 00000000000..c659f4600f8 --- /dev/null +++ b/src/parser/test/flow/class_properties/async_asi.tree.json @@ -0,0 +1,94 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,43], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,43], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":4,"column":1}}, + "range":[8,43], + "body":[ + { + "type":"ClassProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":7}}, + "range":[12,17], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":7}}, + "range":[12,17], + "name":"async", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "computed":false, + "static":false, + "variance":null + }, + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":10}}, + "range":[20,28], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":5}}, + "range":[20,23], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":3,"column":5},"end":{"line":3,"column":10}}, + "range":[23,28], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":10}}, + "range":[26,28], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"method", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":3,"column":11},"end":{"line":3,"column":23}}, + "range":[29,41], + "value":" not async" + } + ] +} diff --git a/src/parser/test/flow/class_properties/async_initializer.js b/src/parser/test/flow/class_properties/async_initializer.js new file mode 100644 index 00000000000..b8125015a7f --- /dev/null +++ b/src/parser/test/flow/class_properties/async_initializer.js @@ -0,0 +1,3 @@ +class C { + async = false +} diff --git a/src/parser/test/flow/class_properties/async_initializer.options.json b/src/parser/test/flow/class_properties/async_initializer.options.json new file mode 100644 index 00000000000..a3276561cc7 --- /dev/null +++ b/src/parser/test/flow/class_properties/async_initializer.options.json @@ -0,0 +1,3 @@ +{ + "esproposal_class_instance_fields": true +} diff --git a/src/parser/test/flow/class_properties/async_initializer.tree.json b/src/parser/test/flow/class_properties/async_initializer.tree.json new file mode 100644 index 00000000000..06bd9e391ba --- /dev/null +++ b/src/parser/test/flow/class_properties/async_initializer.tree.json @@ -0,0 +1,57 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,27], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,27], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,27], + "body":[ + { + "type":"ClassProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":15}}, + "range":[12,25], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":7}}, + "range":[12,17], + "name":"async", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":15}}, + "range":[20,25], + "value":false, + "raw":"false" + }, + "typeAnnotation":null, + "computed":false, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_properties/computed.js b/src/parser/test/flow/class_properties/computed.js new file mode 100644 index 00000000000..45425533da7 --- /dev/null +++ b/src/parser/test/flow/class_properties/computed.js @@ -0,0 +1,3 @@ +class C { + ["foo"] +} diff --git a/src/parser/test/flow/class_properties/computed.tree.json b/src/parser/test/flow/class_properties/computed.tree.json new file mode 100644 index 00000000000..e8643ddfa4d --- /dev/null +++ b/src/parser/test/flow/class_properties/computed.tree.json @@ -0,0 +1,50 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,21], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,21], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,21], + "body":[ + { + "type":"ClassProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":9}}, + "range":[12,19], + "key":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":3},"end":{"line":2,"column":8}}, + "range":[13,18], + "value":"foo", + "raw":"\"foo\"" + }, + "value":null, + "typeAnnotation":null, + "computed":true, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_properties/get.js b/src/parser/test/flow/class_properties/get.js new file mode 100644 index 00000000000..fbb3726d6a4 --- /dev/null +++ b/src/parser/test/flow/class_properties/get.js @@ -0,0 +1,3 @@ +class C { + get +} diff --git a/src/parser/test/flow/class_properties/get.tree.json b/src/parser/test/flow/class_properties/get.tree.json new file mode 100644 index 00000000000..1311a9ab052 --- /dev/null +++ b/src/parser/test/flow/class_properties/get.tree.json @@ -0,0 +1,51 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,17], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,17], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,17], + "body":[ + { + "type":"ClassProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "name":"get", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "computed":false, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_properties/migrated_0003.tree.json b/src/parser/test/flow/class_properties/migrated_0003.tree.json index 12fd7cc4ca2..0ba85e4ada2 100644 --- a/src/parser/test/flow/class_properties/migrated_0003.tree.json +++ b/src/parser/test/flow/class_properties/migrated_0003.tree.json @@ -2,19 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":22}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":23},"end":{"line":1,"column":31}}, - "message":"Unexpected string" + "message":"Unexpected string, expected the token `(`" }, { "loc":{"source":null,"start":{"line":1,"column":31},"end":{"line":1,"column":32}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":33},"end":{"line":1,"column":34}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the token `,`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, @@ -22,7 +22,15 @@ }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `)`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `{`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", @@ -111,8 +119,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/class_properties/migrated_0008.tree.json b/src/parser/test/flow/class_properties/migrated_0008.tree.json index 4867ed9b99e..7d1727e9c5a 100644 --- a/src/parser/test/flow/class_properties/migrated_0008.tree.json +++ b/src/parser/test/flow/class_properties/migrated_0008.tree.json @@ -2,19 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":29}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":30},"end":{"line":1,"column":38}}, - "message":"Unexpected string" + "message":"Unexpected string, expected the token `(`" }, { "loc":{"source":null,"start":{"line":1,"column":38},"end":{"line":1,"column":39}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":40},"end":{"line":1,"column":41}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the token `,`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, @@ -22,7 +22,15 @@ }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `)`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `{`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", @@ -111,8 +119,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/class_properties/migrated_0025.tree.json b/src/parser/test/flow/class_properties/migrated_0025.tree.json index ba4af9ab403..2983b8f0ec1 100644 --- a/src/parser/test/flow/class_properties/migrated_0025.tree.json +++ b/src/parser/test/flow/class_properties/migrated_0025.tree.json @@ -2,23 +2,35 @@ "errors":[ { "loc":{"source":null,"start":{"line":3,"column":12},"end":{"line":3,"column":13}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected an identifier" }, { "loc":{"source":null,"start":{"line":3,"column":14},"end":{"line":3,"column":20}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `(`" }, { "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":1}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected an identifier" }, { "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `,`" }, { "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, "message":"Rest parameter must be final parameter of an argument list" + }, + { + "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, + "message":"Unexpected end of input, expected the token `)`" + }, + { + "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, + "message":"Unexpected end of input, expected the token `{`" + }, + { + "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", @@ -127,8 +139,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/class_properties/migrated_0026.tree.json b/src/parser/test/flow/class_properties/migrated_0026.tree.json index 76e5d1737ca..8ebd0b8f2b9 100644 --- a/src/parser/test/flow/class_properties/migrated_0026.tree.json +++ b/src/parser/test/flow/class_properties/migrated_0026.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, - "message":"Unexpected token [" + "message":"Unexpected token `[`" } ], "type":"Program", @@ -82,8 +82,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/class_properties/migrated_0029.tree.json b/src/parser/test/flow/class_properties/migrated_0029.tree.json index 37675908cd7..d3856b5951d 100644 --- a/src/parser/test/flow/class_properties/migrated_0029.tree.json +++ b/src/parser/test/flow/class_properties/migrated_0029.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, - "message":"Unexpected token ?" + "message":"Unexpected token `?`" } ], "type":"Program", @@ -64,8 +64,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/class_properties/migrated_0030.tree.json b/src/parser/test/flow/class_properties/migrated_0030.tree.json index afff8b9a3f0..6f05bff6d9d 100644 --- a/src/parser/test/flow/class_properties/migrated_0030.tree.json +++ b/src/parser/test/flow/class_properties/migrated_0030.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":12}}, - "message":"Unexpected token ?" + "message":"Unexpected token `?`" } ], "type":"Program", @@ -69,8 +69,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/class_properties/unannotated.js b/src/parser/test/flow/class_properties/unannotated.js new file mode 100644 index 00000000000..58a224ed9ee --- /dev/null +++ b/src/parser/test/flow/class_properties/unannotated.js @@ -0,0 +1,3 @@ +class C { + foo +} diff --git a/src/parser/test/flow/class_properties/unannotated.tree.json b/src/parser/test/flow/class_properties/unannotated.tree.json new file mode 100644 index 00000000000..459e7c286c2 --- /dev/null +++ b/src/parser/test/flow/class_properties/unannotated.tree.json @@ -0,0 +1,51 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,17], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,17], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,17], + "body":[ + { + "type":"ClassProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "computed":false, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/class_properties/unannotated_asi.js b/src/parser/test/flow/class_properties/unannotated_asi.js new file mode 100644 index 00000000000..7eb4df59810 --- /dev/null +++ b/src/parser/test/flow/class_properties/unannotated_asi.js @@ -0,0 +1,4 @@ +class C { + foo + bar +} diff --git a/src/parser/test/flow/class_properties/unannotated_asi.tree.json b/src/parser/test/flow/class_properties/unannotated_asi.tree.json new file mode 100644 index 00000000000..6cae45c3b44 --- /dev/null +++ b/src/parser/test/flow/class_properties/unannotated_asi.tree.json @@ -0,0 +1,69 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,23], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,23], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":4,"column":1}}, + "range":[8,23], + "body":[ + { + "type":"ClassProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "computed":false, + "static":false, + "variance":null + }, + { + "type":"ClassProperty", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":5}}, + "range":[18,21], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":5}}, + "range":[18,21], + "name":"bar", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "computed":false, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/comment_interning/array.js b/src/parser/test/flow/comment_interning/array.js new file mode 100644 index 00000000000..aa53dccd5ec --- /dev/null +++ b/src/parser/test/flow/comment_interning/array.js @@ -0,0 +1,6 @@ + +const arr1 = /* pre */ [1, 2, 3] /* post */; + +const arr2 = /* pre */ [1, 2, 3]; + +const arr3 = [1, 2, 3] /* post */; diff --git a/src/parser/test/flow/comment_interning/array.options.json b/src/parser/test/flow/comment_interning/array.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/array.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/array.tree.json b/src/parser/test/flow/comment_interning/array.tree.json new file mode 100644 index 00000000000..9fc2472f082 --- /dev/null +++ b/src/parser/test/flow/comment_interning/array.tree.json @@ -0,0 +1,212 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":6,"column":34}}, + "range":[1,116], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":44}}, + "range":[1,45], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":32}}, + "range":[7,33], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":10}}, + "range":[7,11], + "name":"arr1", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"ArrayExpression", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":33},"end":{"line":2,"column":43}}, + "range":[34,44], + "value":" post " + } + ], + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":13},"end":{"line":2,"column":22}}, + "range":[14,23], + "value":" pre " + } + ], + "loc":{"source":null,"start":{"line":2,"column":23},"end":{"line":2,"column":32}}, + "range":[24,33], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":24},"end":{"line":2,"column":25}}, + "range":[25,26], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":27},"end":{"line":2,"column":28}}, + "range":[28,29], + "value":2, + "raw":"2" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":30},"end":{"line":2,"column":31}}, + "range":[31,32], + "value":3, + "raw":"3" + } + ] + } + } + ], + "kind":"const" + }, + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":33}}, + "range":[47,80], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":4,"column":6},"end":{"line":4,"column":32}}, + "range":[53,79], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":6},"end":{"line":4,"column":10}}, + "range":[53,57], + "name":"arr2", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"ArrayExpression", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":13},"end":{"line":4,"column":22}}, + "range":[60,69], + "value":" pre " + } + ], + "loc":{"source":null,"start":{"line":4,"column":23},"end":{"line":4,"column":32}}, + "range":[70,79], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":4,"column":24},"end":{"line":4,"column":25}}, + "range":[71,72], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":4,"column":27},"end":{"line":4,"column":28}}, + "range":[74,75], + "value":2, + "raw":"2" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":4,"column":30},"end":{"line":4,"column":31}}, + "range":[77,78], + "value":3, + "raw":"3" + } + ] + } + } + ], + "kind":"const" + }, + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":6,"column":0},"end":{"line":6,"column":34}}, + "range":[82,116], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":6,"column":6},"end":{"line":6,"column":22}}, + "range":[88,104], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":6,"column":6},"end":{"line":6,"column":10}}, + "range":[88,92], + "name":"arr3", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"ArrayExpression", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":6,"column":23},"end":{"line":6,"column":33}}, + "range":[105,115], + "value":" post " + } + ], + "loc":{"source":null,"start":{"line":6,"column":13},"end":{"line":6,"column":22}}, + "range":[95,104], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":6,"column":14},"end":{"line":6,"column":15}}, + "range":[96,97], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":6,"column":17},"end":{"line":6,"column":18}}, + "range":[99,100], + "value":2, + "raw":"2" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":6,"column":20},"end":{"line":6,"column":21}}, + "range":[102,103], + "value":3, + "raw":"3" + } + ] + } + } + ], + "kind":"const" + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":13},"end":{"line":2,"column":22}}, + "range":[14,23], + "value":" pre " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":33},"end":{"line":2,"column":43}}, + "range":[34,44], + "value":" post " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":13},"end":{"line":4,"column":22}}, + "range":[60,69], + "value":" pre " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":6,"column":23},"end":{"line":6,"column":33}}, + "range":[105,115], + "value":" post " + } + ] +} diff --git a/src/parser/test/flow/comment_interning/array_pattern.js b/src/parser/test/flow/comment_interning/array_pattern.js new file mode 100644 index 00000000000..5a885198075 --- /dev/null +++ b/src/parser/test/flow/comment_interning/array_pattern.js @@ -0,0 +1,6 @@ + +const /* pre */ [x1, x2] /* post */ = [1, 2]; + +const /* pre */ [y1, y2] = [1, 2]; + +const [z1, z2] /* post */ = [1, 2]; diff --git a/src/parser/test/flow/comment_interning/array_pattern.options.json b/src/parser/test/flow/comment_interning/array_pattern.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/array_pattern.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/array_pattern.tree.json b/src/parser/test/flow/comment_interning/array_pattern.tree.json new file mode 100644 index 00000000000..a359bce17b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/array_pattern.tree.json @@ -0,0 +1,239 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":6,"column":35}}, + "range":[1,119], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":45}}, + "range":[1,46], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":2,"column":16},"end":{"line":2,"column":44}}, + "range":[17,45], + "id":{ + "type":"ArrayPattern", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":25},"end":{"line":2,"column":35}}, + "range":[26,36], + "value":" post " + } + ], + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":15}}, + "range":[7,16], + "value":" pre " + } + ], + "loc":{"source":null,"start":{"line":2,"column":16},"end":{"line":2,"column":24}}, + "range":[17,25], + "elements":[ + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":17},"end":{"line":2,"column":19}}, + "range":[18,20], + "name":"x1", + "typeAnnotation":null, + "optional":false + }, + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":21},"end":{"line":2,"column":23}}, + "range":[22,24], + "name":"x2", + "typeAnnotation":null, + "optional":false + } + ], + "typeAnnotation":null + }, + "init":{ + "type":"ArrayExpression", + "loc":{"source":null,"start":{"line":2,"column":38},"end":{"line":2,"column":44}}, + "range":[39,45], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":39},"end":{"line":2,"column":40}}, + "range":[40,41], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":42},"end":{"line":2,"column":43}}, + "range":[43,44], + "value":2, + "raw":"2" + } + ] + } + } + ], + "kind":"const" + }, + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":34}}, + "range":[48,82], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":4,"column":16},"end":{"line":4,"column":33}}, + "range":[64,81], + "id":{ + "type":"ArrayPattern", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":6},"end":{"line":4,"column":15}}, + "range":[54,63], + "value":" pre " + } + ], + "loc":{"source":null,"start":{"line":4,"column":16},"end":{"line":4,"column":24}}, + "range":[64,72], + "elements":[ + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":17},"end":{"line":4,"column":19}}, + "range":[65,67], + "name":"y1", + "typeAnnotation":null, + "optional":false + }, + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":21},"end":{"line":4,"column":23}}, + "range":[69,71], + "name":"y2", + "typeAnnotation":null, + "optional":false + } + ], + "typeAnnotation":null + }, + "init":{ + "type":"ArrayExpression", + "loc":{"source":null,"start":{"line":4,"column":27},"end":{"line":4,"column":33}}, + "range":[75,81], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":4,"column":28},"end":{"line":4,"column":29}}, + "range":[76,77], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":4,"column":31},"end":{"line":4,"column":32}}, + "range":[79,80], + "value":2, + "raw":"2" + } + ] + } + } + ], + "kind":"const" + }, + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":6,"column":0},"end":{"line":6,"column":35}}, + "range":[84,119], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":6,"column":6},"end":{"line":6,"column":34}}, + "range":[90,118], + "id":{ + "type":"ArrayPattern", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":6,"column":15},"end":{"line":6,"column":25}}, + "range":[99,109], + "value":" post " + } + ], + "loc":{"source":null,"start":{"line":6,"column":6},"end":{"line":6,"column":14}}, + "range":[90,98], + "elements":[ + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":6,"column":7},"end":{"line":6,"column":9}}, + "range":[91,93], + "name":"z1", + "typeAnnotation":null, + "optional":false + }, + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":6,"column":11},"end":{"line":6,"column":13}}, + "range":[95,97], + "name":"z2", + "typeAnnotation":null, + "optional":false + } + ], + "typeAnnotation":null + }, + "init":{ + "type":"ArrayExpression", + "loc":{"source":null,"start":{"line":6,"column":28},"end":{"line":6,"column":34}}, + "range":[112,118], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":6,"column":29},"end":{"line":6,"column":30}}, + "range":[113,114], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":6,"column":32},"end":{"line":6,"column":33}}, + "range":[116,117], + "value":2, + "raw":"2" + } + ] + } + } + ], + "kind":"const" + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":15}}, + "range":[7,16], + "value":" pre " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":25},"end":{"line":2,"column":35}}, + "range":[26,36], + "value":" post " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":6},"end":{"line":4,"column":15}}, + "range":[54,63], + "value":" pre " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":6,"column":15},"end":{"line":6,"column":25}}, + "range":[99,109], + "value":" post " + } + ] +} diff --git a/src/parser/test/flow/comment_interning/break.js b/src/parser/test/flow/comment_interning/break.js new file mode 100644 index 00000000000..1cd514d875b --- /dev/null +++ b/src/parser/test/flow/comment_interning/break.js @@ -0,0 +1,15 @@ +function break_with_label() { + foo: for (var i = 0; i < 10; i++) { + /* 1.1 leading on break */ + break /* 1.2 leading on label */ foo /* 1.3 trailing */; + /* 1.4 trailing */ + } +} + +function break_without_label() { + for (var i = 0; i < 10; i++) { + /* 2.1 leading */ + break; + /* 2.2 trailing */ + } +} diff --git a/src/parser/test/flow/comment_interning/break.options.json b/src/parser/test/flow/comment_interning/break.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/break.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/break.tree.json b/src/parser/test/flow/comment_interning/break.tree.json new file mode 100644 index 00000000000..a08900e1bfe --- /dev/null +++ b/src/parser/test/flow/comment_interning/break.tree.json @@ -0,0 +1,323 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":15,"column":1}}, + "range":[0,317], + "body":[ + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":7,"column":1}}, + "range":[0,188], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":25}}, + "range":[9,25], + "name":"break_with_label", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":7,"column":1}}, + "range":[28,188], + "body":[ + { + "type":"LabeledStatement", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":6,"column":3}}, + "range":[32,186], + "label":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[32,35], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ForStatement", + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":6,"column":3}}, + "range":[37,186], + "init":{ + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":21}}, + "range":[42,51], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":2,"column":16},"end":{"line":2,"column":21}}, + "range":[46,51], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":16},"end":{"line":2,"column":17}}, + "range":[46,47], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":20},"end":{"line":2,"column":21}}, + "range":[50,51], + "value":0, + "raw":"0" + } + } + ], + "kind":"var" + }, + "test":{ + "type":"BinaryExpression", + "loc":{"source":null,"start":{"line":2,"column":23},"end":{"line":2,"column":29}}, + "range":[53,59], + "operator":"<", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":23},"end":{"line":2,"column":24}}, + "range":[53,54], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":27},"end":{"line":2,"column":29}}, + "range":[57,59], + "value":10, + "raw":"10" + } + }, + "update":{ + "type":"UpdateExpression", + "loc":{"source":null,"start":{"line":2,"column":31},"end":{"line":2,"column":34}}, + "range":[61,64], + "operator":"++", + "argument":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":31},"end":{"line":2,"column":32}}, + "range":[61,62], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "prefix":false + }, + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":36},"end":{"line":6,"column":3}}, + "range":[66,186], + "body":[ + { + "type":"BreakStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":41},"end":{"line":4,"column":59}}, + "range":[140,158], + "value":" 1.3 trailing " + } + ], + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":30}}, + "range":[72,98], + "value":" 1.1 leading on break " + } + ], + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":60}}, + "range":[103,159], + "label":{ + "type":"Identifier", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":41},"end":{"line":4,"column":59}}, + "range":[140,158], + "value":" 1.3 trailing " + } + ], + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":10},"end":{"line":4,"column":36}}, + "range":[109,135], + "value":" 1.2 leading on label " + } + ], + "loc":{"source":null,"start":{"line":4,"column":37},"end":{"line":4,"column":40}}, + "range":[136,139], + "name":"foo", + "typeAnnotation":null, + "optional":false + } + } + ] + } + } + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":9,"column":0},"end":{"line":15,"column":1}}, + "range":[190,317], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":9,"column":9},"end":{"line":9,"column":28}}, + "range":[199,218], + "name":"break_without_label", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":9,"column":31},"end":{"line":15,"column":1}}, + "range":[221,317], + "body":[ + { + "type":"ForStatement", + "loc":{"source":null,"start":{"line":10,"column":2},"end":{"line":14,"column":3}}, + "range":[225,315], + "init":{ + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":10,"column":7},"end":{"line":10,"column":16}}, + "range":[230,239], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":10,"column":11},"end":{"line":10,"column":16}}, + "range":[234,239], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":11},"end":{"line":10,"column":12}}, + "range":[234,235], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":10,"column":15},"end":{"line":10,"column":16}}, + "range":[238,239], + "value":0, + "raw":"0" + } + } + ], + "kind":"var" + }, + "test":{ + "type":"BinaryExpression", + "loc":{"source":null,"start":{"line":10,"column":18},"end":{"line":10,"column":24}}, + "range":[241,247], + "operator":"<", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":18},"end":{"line":10,"column":19}}, + "range":[241,242], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":10,"column":22},"end":{"line":10,"column":24}}, + "range":[245,247], + "value":10, + "raw":"10" + } + }, + "update":{ + "type":"UpdateExpression", + "loc":{"source":null,"start":{"line":10,"column":26},"end":{"line":10,"column":29}}, + "range":[249,252], + "operator":"++", + "argument":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":26},"end":{"line":10,"column":27}}, + "range":[249,250], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "prefix":false + }, + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":10,"column":31},"end":{"line":14,"column":3}}, + "range":[254,315], + "body":[ + { + "type":"BreakStatement", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":11,"column":4},"end":{"line":11,"column":21}}, + "range":[260,277], + "value":" 2.1 leading " + } + ], + "loc":{"source":null,"start":{"line":12,"column":4},"end":{"line":12,"column":10}}, + "range":[282,288], + "label":null + } + ] + } + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":30}}, + "range":[72,98], + "value":" 1.1 leading on break " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":10},"end":{"line":4,"column":36}}, + "range":[109,135], + "value":" 1.2 leading on label " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":41},"end":{"line":4,"column":59}}, + "range":[140,158], + "value":" 1.3 trailing " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":5,"column":4},"end":{"line":5,"column":22}}, + "range":[164,182], + "value":" 1.4 trailing " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":11,"column":4},"end":{"line":11,"column":21}}, + "range":[260,277], + "value":" 2.1 leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":13,"column":4},"end":{"line":13,"column":22}}, + "range":[293,311], + "value":" 2.2 trailing " + } + ] +} diff --git a/src/parser/test/flow/comment_interning/continue.js b/src/parser/test/flow/comment_interning/continue.js new file mode 100644 index 00000000000..3456a8b7425 --- /dev/null +++ b/src/parser/test/flow/comment_interning/continue.js @@ -0,0 +1,4 @@ +function f() { + /* leading */ + continue /* trailing */; +} diff --git a/src/parser/test/flow/comment_interning/continue.options.json b/src/parser/test/flow/comment_interning/continue.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/continue.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/continue.tree.json b/src/parser/test/flow/comment_interning/continue.tree.json new file mode 100644 index 00000000000..faf725be5a1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/continue.tree.json @@ -0,0 +1,76 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":26}}, + "message":"Illegal continue statement" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,59], + "body":[ + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,59], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":10}}, + "range":[9,10], + "name":"f", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":4,"column":1}}, + "range":[13,59], + "body":[ + { + "type":"ContinueStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":11},"end":{"line":3,"column":25}}, + "range":[42,56], + "value":" trailing " + } + ], + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":15}}, + "range":[17,30], + "value":" leading " + } + ], + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":26}}, + "range":[33,57], + "label":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":15}}, + "range":[17,30], + "value":" leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":11},"end":{"line":3,"column":25}}, + "range":[42,56], + "value":" trailing " + } + ] +} diff --git a/src/parser/test/flow/comment_interning/do_while.js b/src/parser/test/flow/comment_interning/do_while.js new file mode 100644 index 00000000000..997ac467b86 --- /dev/null +++ b/src/parser/test/flow/comment_interning/do_while.js @@ -0,0 +1,27 @@ +function only_leading () { + // 1.0. unreachable leading comment + var i = 0; // 1.1. leading comment + // 1.2. another leading comment + do { + i += 1; + } while (i < 3); +} + +function leading_n_trailing() { + var i = 0; + // 2.1. leading comment + do { + i += 1; + } while (i < 3) /* 2.2. trailing comment */; + + // 2.3. unreachable trailing comment +} + +function only_trailing() { + var i = 0; + do { + i += 1; + } /* 3.0. pre-keyword trailing comment */ while /* 3.1. pre-cond trailing comment */ (i< 3) /* 3.2. past-cond trailing comment */; + + // 3.3. unreachable trailing comment +} diff --git a/src/parser/test/flow/comment_interning/do_while.options.json b/src/parser/test/flow/comment_interning/do_while.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/do_while.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/do_while.tree.json b/src/parser/test/flow/comment_interning/do_while.tree.json new file mode 100644 index 00000000000..8b7b5146690 --- /dev/null +++ b/src/parser/test/flow/comment_interning/do_while.tree.json @@ -0,0 +1,464 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":27,"column":1}}, + "range":[0,631], + "body":[ + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":8,"column":1}}, + "range":[0,190], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":21}}, + "range":[9,21], + "name":"only_leading", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":8,"column":1}}, + "range":[25,190], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":14}}, + "range":[71,81], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":13}}, + "range":[75,80], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":9}}, + "range":[75,76], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":12},"end":{"line":3,"column":13}}, + "range":[79,80], + "value":0, + "raw":"0" + } + } + ], + "kind":"var" + }, + { + "type":"DoWhileStatement", + "leadingComments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":3,"column":16},"end":{"line":3,"column":39}}, + "range":[83,106], + "value":" 1.1. leading comment" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":35}}, + "range":[111,142], + "value":" 1.2. another leading comment" + } + ], + "loc":{"source":null,"start":{"line":5,"column":4},"end":{"line":7,"column":20}}, + "range":[147,188], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":5,"column":7},"end":{"line":7,"column":5}}, + "range":[150,173], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":6,"column":8},"end":{"line":6,"column":15}}, + "range":[160,167], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":6,"column":8},"end":{"line":6,"column":14}}, + "range":[160,166], + "operator":"+=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":6,"column":8},"end":{"line":6,"column":9}}, + "range":[160,161], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":6,"column":13},"end":{"line":6,"column":14}}, + "range":[165,166], + "value":1, + "raw":"1" + } + }, + "directive":null + } + ] + }, + "test":{ + "type":"BinaryExpression", + "loc":{"source":null,"start":{"line":7,"column":13},"end":{"line":7,"column":18}}, + "range":[181,186], + "operator":"<", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":7,"column":13},"end":{"line":7,"column":14}}, + "range":[181,182], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":7,"column":17},"end":{"line":7,"column":18}}, + "range":[185,186], + "value":3, + "raw":"3" + } + } + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":10,"column":0},"end":{"line":18,"column":1}}, + "range":[192,384], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":9},"end":{"line":10,"column":27}}, + "range":[201,219], + "name":"leading_n_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":10,"column":30},"end":{"line":18,"column":1}}, + "range":[222,384], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":11,"column":4},"end":{"line":11,"column":14}}, + "range":[228,238], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":11,"column":8},"end":{"line":11,"column":13}}, + "range":[232,237], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":11,"column":8},"end":{"line":11,"column":9}}, + "range":[232,233], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":11,"column":12},"end":{"line":11,"column":13}}, + "range":[236,237], + "value":0, + "raw":"0" + } + } + ], + "kind":"var" + }, + { + "type":"DoWhileStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":15,"column":20},"end":{"line":15,"column":47}}, + "range":[312,339], + "value":" 2.2. trailing comment " + } + ], + "leadingComments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":12,"column":4},"end":{"line":12,"column":27}}, + "range":[243,266], + "value":" 2.1. leading comment" + } + ], + "loc":{"source":null,"start":{"line":13,"column":4},"end":{"line":15,"column":48}}, + "range":[271,340], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":13,"column":7},"end":{"line":15,"column":5}}, + "range":[274,297], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":14,"column":8},"end":{"line":14,"column":15}}, + "range":[284,291], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":14,"column":8},"end":{"line":14,"column":14}}, + "range":[284,290], + "operator":"+=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":14,"column":8},"end":{"line":14,"column":9}}, + "range":[284,285], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":14,"column":13},"end":{"line":14,"column":14}}, + "range":[289,290], + "value":1, + "raw":"1" + } + }, + "directive":null + } + ] + }, + "test":{ + "type":"BinaryExpression", + "loc":{"source":null,"start":{"line":15,"column":13},"end":{"line":15,"column":18}}, + "range":[305,310], + "operator":"<", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":15,"column":13},"end":{"line":15,"column":14}}, + "range":[305,306], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":15,"column":17},"end":{"line":15,"column":18}}, + "range":[309,310], + "value":3, + "raw":"3" + } + } + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":20,"column":0},"end":{"line":27,"column":1}}, + "range":[386,631], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":20,"column":9},"end":{"line":20,"column":22}}, + "range":[395,408], + "name":"only_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":20,"column":25},"end":{"line":27,"column":1}}, + "range":[411,631], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":21,"column":4},"end":{"line":21,"column":14}}, + "range":[417,427], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":21,"column":8},"end":{"line":21,"column":13}}, + "range":[421,426], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":21,"column":8},"end":{"line":21,"column":9}}, + "range":[421,422], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":21,"column":12},"end":{"line":21,"column":13}}, + "range":[425,426], + "value":0, + "raw":"0" + } + } + ], + "kind":"var" + }, + { + "type":"DoWhileStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":6},"end":{"line":24,"column":45}}, + "range":[459,498], + "value":" 3.0. pre-keyword trailing comment " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":52},"end":{"line":24,"column":88}}, + "range":[505,541], + "value":" 3.1. pre-cond trailing comment " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":96},"end":{"line":24,"column":133}}, + "range":[549,586], + "value":" 3.2. past-cond trailing comment " + } + ], + "loc":{"source":null,"start":{"line":22,"column":4},"end":{"line":24,"column":134}}, + "range":[432,587], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":22,"column":7},"end":{"line":24,"column":5}}, + "range":[435,458], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":23,"column":8},"end":{"line":23,"column":15}}, + "range":[445,452], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":23,"column":8},"end":{"line":23,"column":14}}, + "range":[445,451], + "operator":"+=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":23,"column":8},"end":{"line":23,"column":9}}, + "range":[445,446], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":23,"column":13},"end":{"line":23,"column":14}}, + "range":[450,451], + "value":1, + "raw":"1" + } + }, + "directive":null + } + ] + }, + "test":{ + "type":"BinaryExpression", + "loc":{"source":null,"start":{"line":24,"column":90},"end":{"line":24,"column":94}}, + "range":[543,547], + "operator":"<", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":24,"column":90},"end":{"line":24,"column":91}}, + "range":[543,544], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":24,"column":93},"end":{"line":24,"column":94}}, + "range":[546,547], + "value":3, + "raw":"3" + } + } + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + ], + "comments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":2,"column":4},"end":{"line":2,"column":39}}, + "range":[31,66], + "value":" 1.0. unreachable leading comment" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":3,"column":16},"end":{"line":3,"column":39}}, + "range":[83,106], + "value":" 1.1. leading comment" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":35}}, + "range":[111,142], + "value":" 1.2. another leading comment" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":12,"column":4},"end":{"line":12,"column":27}}, + "range":[243,266], + "value":" 2.1. leading comment" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":15,"column":20},"end":{"line":15,"column":47}}, + "range":[312,339], + "value":" 2.2. trailing comment " + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":17,"column":4},"end":{"line":17,"column":40}}, + "range":[346,382], + "value":" 2.3. unreachable trailing comment" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":6},"end":{"line":24,"column":45}}, + "range":[459,498], + "value":" 3.0. pre-keyword trailing comment " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":52},"end":{"line":24,"column":88}}, + "range":[505,541], + "value":" 3.1. pre-cond trailing comment " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":96},"end":{"line":24,"column":133}}, + "range":[549,586], + "value":" 3.2. past-cond trailing comment " + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":26,"column":4},"end":{"line":26,"column":40}}, + "range":[593,629], + "value":" 3.3. unreachable trailing comment" + } + ] +} diff --git a/src/parser/test/flow/comment_interning/if.js b/src/parser/test/flow/comment_interning/if.js new file mode 100644 index 00000000000..ac203158d09 --- /dev/null +++ b/src/parser/test/flow/comment_interning/if.js @@ -0,0 +1,28 @@ +function if_leading () { + // not leading 1 + var pred = true; // pre if leading 1 + /* pre if leading 2 */ + if /* pre cond leading 1 */ (pred) { + pred = false; + } +} + +function if_trailing() { + var pred = false; + if (!pred) /* trailing 1 */ { + pred = true; + } + // not trailing 1 +} + +function if_leading_and_trailing() { + // not leading 2 + var pred = true; // pre if leading 3 + /* pre if leading 4 */ + if /* pre cond leading 2 */ (pred) /* trailing 2 */ { + pred = false; + } /* not trailing 2 */ else /* pre if leading 5 */ if /* pre cond leading 3 */ (pred) /* trailing 3 */ { + pref = true; + } + // not trailing 3 +} diff --git a/src/parser/test/flow/comment_interning/if.options.json b/src/parser/test/flow/comment_interning/if.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/if.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/if.tree.json b/src/parser/test/flow/comment_interning/if.tree.json new file mode 100644 index 00000000000..0ce3e3bad71 --- /dev/null +++ b/src/parser/test/flow/comment_interning/if.tree.json @@ -0,0 +1,546 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":28,"column":1}}, + "range":[0,686], + "body":[ + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":8,"column":1}}, + "range":[0,185], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":19}}, + "range":[9,19], + "name":"if_leading", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":1,"column":23},"end":{"line":8,"column":1}}, + "range":[23,185], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":20}}, + "range":[50,66], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":19}}, + "range":[54,65], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":12}}, + "range":[54,58], + "name":"pred", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":15},"end":{"line":3,"column":19}}, + "range":[61,65], + "value":true, + "raw":"true" + } + } + ], + "kind":"var" + }, + { + "type":"IfStatement", + "leadingComments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":3,"column":22},"end":{"line":3,"column":41}}, + "range":[68,87], + "value":" pre if leading 1" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":26}}, + "range":[92,114], + "value":" pre if leading 2 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":5,"column":7},"end":{"line":5,"column":31}}, + "range":[122,146], + "value":" pre cond leading 1 " + } + ], + "loc":{"source":null,"start":{"line":5,"column":4},"end":{"line":7,"column":5}}, + "range":[119,183], + "test":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":5,"column":33},"end":{"line":5,"column":37}}, + "range":[148,152], + "name":"pred", + "typeAnnotation":null, + "optional":false + }, + "consequent":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":5,"column":39},"end":{"line":7,"column":5}}, + "range":[154,183], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":6,"column":8},"end":{"line":6,"column":21}}, + "range":[164,177], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":6,"column":8},"end":{"line":6,"column":20}}, + "range":[164,176], + "operator":"=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":6,"column":8},"end":{"line":6,"column":12}}, + "range":[164,168], + "name":"pred", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":6,"column":15},"end":{"line":6,"column":20}}, + "range":[171,176], + "value":false, + "raw":"false" + } + }, + "directive":null + } + ] + }, + "alternate":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":10,"column":0},"end":{"line":16,"column":1}}, + "range":[187,318], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":9},"end":{"line":10,"column":20}}, + "range":[196,207], + "name":"if_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":10,"column":23},"end":{"line":16,"column":1}}, + "range":[210,318], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":11,"column":4},"end":{"line":11,"column":21}}, + "range":[216,233], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":11,"column":8},"end":{"line":11,"column":20}}, + "range":[220,232], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":11,"column":8},"end":{"line":11,"column":12}}, + "range":[220,224], + "name":"pred", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":11,"column":15},"end":{"line":11,"column":20}}, + "range":[227,232], + "value":false, + "raw":"false" + } + } + ], + "kind":"var" + }, + { + "type":"IfStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":12,"column":15},"end":{"line":12,"column":31}}, + "range":[249,265], + "value":" trailing 1 " + } + ], + "loc":{"source":null,"start":{"line":12,"column":4},"end":{"line":14,"column":5}}, + "range":[238,294], + "test":{ + "type":"UnaryExpression", + "loc":{"source":null,"start":{"line":12,"column":8},"end":{"line":12,"column":13}}, + "range":[242,247], + "operator":"!", + "prefix":true, + "argument":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":12,"column":9},"end":{"line":12,"column":13}}, + "range":[243,247], + "name":"pred", + "typeAnnotation":null, + "optional":false + } + }, + "consequent":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":12,"column":32},"end":{"line":14,"column":5}}, + "range":[266,294], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":13,"column":8},"end":{"line":13,"column":20}}, + "range":[276,288], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":13,"column":8},"end":{"line":13,"column":19}}, + "range":[276,287], + "operator":"=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":13,"column":8},"end":{"line":13,"column":12}}, + "range":[276,280], + "name":"pred", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":13,"column":15},"end":{"line":13,"column":19}}, + "range":[283,287], + "value":true, + "raw":"true" + } + }, + "directive":null + } + ] + }, + "alternate":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":18,"column":0},"end":{"line":28,"column":1}}, + "range":[320,686], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":18,"column":9},"end":{"line":18,"column":32}}, + "range":[329,352], + "name":"if_leading_and_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":18,"column":35},"end":{"line":28,"column":1}}, + "range":[355,686], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":20,"column":4},"end":{"line":20,"column":20}}, + "range":[382,398], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":20,"column":8},"end":{"line":20,"column":19}}, + "range":[386,397], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":20,"column":8},"end":{"line":20,"column":12}}, + "range":[386,390], + "name":"pred", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":20,"column":15},"end":{"line":20,"column":19}}, + "range":[393,397], + "value":true, + "raw":"true" + } + } + ], + "kind":"var" + }, + { + "type":"IfStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":22,"column":39},"end":{"line":22,"column":55}}, + "range":[486,502], + "value":" trailing 2 " + } + ], + "leadingComments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":20,"column":22},"end":{"line":20,"column":41}}, + "range":[400,419], + "value":" pre if leading 3" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":21,"column":4},"end":{"line":21,"column":26}}, + "range":[424,446], + "value":" pre if leading 4 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":22,"column":7},"end":{"line":22,"column":31}}, + "range":[454,478], + "value":" pre cond leading 2 " + } + ], + "loc":{"source":null,"start":{"line":22,"column":4},"end":{"line":26,"column":5}}, + "range":[451,662], + "test":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":22,"column":33},"end":{"line":22,"column":37}}, + "range":[480,484], + "name":"pred", + "typeAnnotation":null, + "optional":false + }, + "consequent":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":22,"column":56},"end":{"line":24,"column":5}}, + "range":[503,532], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":23,"column":8},"end":{"line":23,"column":21}}, + "range":[513,526], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":23,"column":8},"end":{"line":23,"column":20}}, + "range":[513,525], + "operator":"=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":23,"column":8},"end":{"line":23,"column":12}}, + "range":[513,517], + "name":"pred", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":23,"column":15},"end":{"line":23,"column":20}}, + "range":[520,525], + "value":false, + "raw":"false" + } + }, + "directive":null + } + ] + }, + "alternate":{ + "type":"IfStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":90},"end":{"line":24,"column":106}}, + "range":[617,633], + "value":" trailing 3 " + } + ], + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":32},"end":{"line":24,"column":54}}, + "range":[559,581], + "value":" pre if leading 5 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":58},"end":{"line":24,"column":82}}, + "range":[585,609], + "value":" pre cond leading 3 " + } + ], + "loc":{"source":null,"start":{"line":24,"column":55},"end":{"line":26,"column":5}}, + "range":[582,662], + "test":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":24,"column":84},"end":{"line":24,"column":88}}, + "range":[611,615], + "name":"pred", + "typeAnnotation":null, + "optional":false + }, + "consequent":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":24,"column":107},"end":{"line":26,"column":5}}, + "range":[634,662], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":25,"column":8},"end":{"line":25,"column":20}}, + "range":[644,656], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":25,"column":8},"end":{"line":25,"column":19}}, + "range":[644,655], + "operator":"=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":25,"column":8},"end":{"line":25,"column":12}}, + "range":[644,648], + "name":"pref", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":25,"column":15},"end":{"line":25,"column":19}}, + "range":[651,655], + "value":true, + "raw":"true" + } + }, + "directive":null + } + ] + }, + "alternate":null + } + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + ], + "comments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":2,"column":4},"end":{"line":2,"column":20}}, + "range":[29,45], + "value":" not leading 1" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":3,"column":22},"end":{"line":3,"column":41}}, + "range":[68,87], + "value":" pre if leading 1" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":26}}, + "range":[92,114], + "value":" pre if leading 2 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":5,"column":7},"end":{"line":5,"column":31}}, + "range":[122,146], + "value":" pre cond leading 1 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":12,"column":15},"end":{"line":12,"column":31}}, + "range":[249,265], + "value":" trailing 1 " + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":15,"column":4},"end":{"line":15,"column":21}}, + "range":[299,316], + "value":" not trailing 1" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":19,"column":4},"end":{"line":19,"column":20}}, + "range":[361,377], + "value":" not leading 2" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":20,"column":22},"end":{"line":20,"column":41}}, + "range":[400,419], + "value":" pre if leading 3" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":21,"column":4},"end":{"line":21,"column":26}}, + "range":[424,446], + "value":" pre if leading 4 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":22,"column":7},"end":{"line":22,"column":31}}, + "range":[454,478], + "value":" pre cond leading 2 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":22,"column":39},"end":{"line":22,"column":55}}, + "range":[486,502], + "value":" trailing 2 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":6},"end":{"line":24,"column":26}}, + "range":[533,553], + "value":" not trailing 2 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":32},"end":{"line":24,"column":54}}, + "range":[559,581], + "value":" pre if leading 5 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":58},"end":{"line":24,"column":82}}, + "range":[585,609], + "value":" pre cond leading 3 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":90},"end":{"line":24,"column":106}}, + "range":[617,633], + "value":" trailing 3 " + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":27,"column":4},"end":{"line":27,"column":21}}, + "range":[667,684], + "value":" not trailing 3" + } + ] +} diff --git a/src/parser/test/flow/comment_interning/literal.js b/src/parser/test/flow/comment_interning/literal.js new file mode 100644 index 00000000000..cea4e8c1db2 --- /dev/null +++ b/src/parser/test/flow/comment_interning/literal.js @@ -0,0 +1 @@ +const id = /*pre*/1/*post*/; diff --git a/src/parser/test/flow/comment_interning/literal.options.json b/src/parser/test/flow/comment_interning/literal.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/literal.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/literal.tree.json b/src/parser/test/flow/comment_interning/literal.tree.json new file mode 100644 index 00000000000..94a848b3bc3 --- /dev/null +++ b/src/parser/test/flow/comment_interning/literal.tree.json @@ -0,0 +1,65 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":28}}, + "range":[0,28], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":28}}, + "range":[0,28], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":19}}, + "range":[6,19], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":8}}, + "range":[6,8], + "name":"id", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":27}}, + "range":[19,27], + "value":"post" + } + ], + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":18}}, + "range":[11,18], + "value":"pre" + } + ], + "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":19}}, + "range":[18,19], + "value":1, + "raw":"1" + } + } + ], + "kind":"const" + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":18}}, + "range":[11,18], + "value":"pre" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":27}}, + "range":[19,27], + "value":"post" + } + ] +} diff --git a/src/parser/test/flow/comment_interning/new.js b/src/parser/test/flow/comment_interning/new.js new file mode 100644 index 00000000000..c4cef353eb1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/new.js @@ -0,0 +1,21 @@ +function leading() { + /* 1.0 unreachable leading */ + var F = function(){}; /* 1.1 leading */ + /* 1.2 leading */ + new F(); +} + +function trailing() { + var F = function(){}; + + new F() /* 2.0 trailing */; + /* 2.1 unreachable trailing */ +} + +function leading_and_trailing() { + /* 3.0 unreachable leading */ + var F = function(){}; /* 3.1 leading */ + /* 3.2 leading */ + new /* 3.3 unreachable by New node */ F() /* 3.4 trailing */; + /* 3.5 unreachable trailing */ +} diff --git a/src/parser/test/flow/comment_interning/new.options.json b/src/parser/test/flow/comment_interning/new.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/new.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/new.tree.json b/src/parser/test/flow/comment_interning/new.tree.json new file mode 100644 index 00000000000..b4693cf41fd --- /dev/null +++ b/src/parser/test/flow/comment_interning/new.tree.json @@ -0,0 +1,391 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":21,"column":1}}, + "range":[0,468], + "body":[ + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":6,"column":1}}, + "range":[0,127], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":16}}, + "range":[9,16], + "name":"leading", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":6,"column":1}}, + "range":[19,127], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":23}}, + "range":[55,76], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":22}}, + "range":[59,75], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":7}}, + "range":[59,60], + "name":"F", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":3,"column":10},"end":{"line":3,"column":22}}, + "range":[63,75], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":3,"column":20},"end":{"line":3,"column":22}}, + "range":[73,75], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + } + ], + "kind":"var" + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":5,"column":2},"end":{"line":5,"column":10}}, + "range":[117,125], + "expression":{ + "type":"NewExpression", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":24},"end":{"line":3,"column":41}}, + "range":[77,94], + "value":" 1.1 leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":19}}, + "range":[97,114], + "value":" 1.2 leading " + } + ], + "loc":{"source":null,"start":{"line":5,"column":2},"end":{"line":5,"column":9}}, + "range":[117,124], + "callee":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":5,"column":6},"end":{"line":5,"column":7}}, + "range":[121,122], + "name":"F", + "typeAnnotation":null, + "optional":false + }, + "typeArguments":null, + "arguments":[] + }, + "directive":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":8,"column":0},"end":{"line":13,"column":1}}, + "range":[129,240], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":8,"column":9},"end":{"line":8,"column":17}}, + "range":[138,146], + "name":"trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":8,"column":20},"end":{"line":13,"column":1}}, + "range":[149,240], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":9,"column":2},"end":{"line":9,"column":23}}, + "range":[153,174], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":9,"column":6},"end":{"line":9,"column":22}}, + "range":[157,173], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":9,"column":6},"end":{"line":9,"column":7}}, + "range":[157,158], + "name":"F", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":9,"column":10},"end":{"line":9,"column":22}}, + "range":[161,173], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":9,"column":20},"end":{"line":9,"column":22}}, + "range":[171,173], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + } + ], + "kind":"var" + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":11,"column":2},"end":{"line":11,"column":29}}, + "range":[178,205], + "expression":{ + "type":"NewExpression", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":11,"column":10},"end":{"line":11,"column":28}}, + "range":[186,204], + "value":" 2.0 trailing " + } + ], + "loc":{"source":null,"start":{"line":11,"column":2},"end":{"line":11,"column":9}}, + "range":[178,185], + "callee":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":11,"column":6},"end":{"line":11,"column":7}}, + "range":[182,183], + "name":"F", + "typeAnnotation":null, + "optional":false + }, + "typeArguments":null, + "arguments":[] + }, + "directive":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":15,"column":0},"end":{"line":21,"column":1}}, + "range":[242,468], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":15,"column":9},"end":{"line":15,"column":29}}, + "range":[251,271], + "name":"leading_and_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":15,"column":32},"end":{"line":21,"column":1}}, + "range":[274,468], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":17,"column":2},"end":{"line":17,"column":23}}, + "range":[310,331], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":17,"column":6},"end":{"line":17,"column":22}}, + "range":[314,330], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":17,"column":6},"end":{"line":17,"column":7}}, + "range":[314,315], + "name":"F", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":17,"column":10},"end":{"line":17,"column":22}}, + "range":[318,330], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":17,"column":20},"end":{"line":17,"column":22}}, + "range":[328,330], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + } + ], + "kind":"var" + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":19,"column":2},"end":{"line":19,"column":63}}, + "range":[372,433], + "expression":{ + "type":"NewExpression", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":19,"column":44},"end":{"line":19,"column":62}}, + "range":[414,432], + "value":" 3.4 trailing " + } + ], + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":17,"column":24},"end":{"line":17,"column":41}}, + "range":[332,349], + "value":" 3.1 leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":18,"column":2},"end":{"line":18,"column":19}}, + "range":[352,369], + "value":" 3.2 leading " + } + ], + "loc":{"source":null,"start":{"line":19,"column":2},"end":{"line":19,"column":43}}, + "range":[372,413], + "callee":{ + "type":"Identifier", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":19,"column":6},"end":{"line":19,"column":39}}, + "range":[376,409], + "value":" 3.3 unreachable by New node " + } + ], + "loc":{"source":null,"start":{"line":19,"column":40},"end":{"line":19,"column":41}}, + "range":[410,411], + "name":"F", + "typeAnnotation":null, + "optional":false + }, + "typeArguments":null, + "arguments":[] + }, + "directive":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":31}}, + "range":[23,52], + "value":" 1.0 unreachable leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":24},"end":{"line":3,"column":41}}, + "range":[77,94], + "value":" 1.1 leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":19}}, + "range":[97,114], + "value":" 1.2 leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":11,"column":10},"end":{"line":11,"column":28}}, + "range":[186,204], + "value":" 2.0 trailing " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":12,"column":2},"end":{"line":12,"column":32}}, + "range":[208,238], + "value":" 2.1 unreachable trailing " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":16,"column":2},"end":{"line":16,"column":31}}, + "range":[278,307], + "value":" 3.0 unreachable leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":17,"column":24},"end":{"line":17,"column":41}}, + "range":[332,349], + "value":" 3.1 leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":18,"column":2},"end":{"line":18,"column":19}}, + "range":[352,369], + "value":" 3.2 leading " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":19,"column":6},"end":{"line":19,"column":39}}, + "range":[376,409], + "value":" 3.3 unreachable by New node " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":19,"column":44},"end":{"line":19,"column":62}}, + "range":[414,432], + "value":" 3.4 trailing " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":20,"column":2},"end":{"line":20,"column":32}}, + "range":[436,466], + "value":" 3.5 unreachable trailing " + } + ] +} diff --git a/src/parser/test/flow/comment_interning/object.js b/src/parser/test/flow/comment_interning/object.js new file mode 100644 index 00000000000..145033e2394 --- /dev/null +++ b/src/parser/test/flow/comment_interning/object.js @@ -0,0 +1,3 @@ +const id = /*pre*/ { + a: 1, +}/*post*/; diff --git a/src/parser/test/flow/comment_interning/object.options.json b/src/parser/test/flow/comment_interning/object.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/object.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/object.tree.json b/src/parser/test/flow/comment_interning/object.tree.json new file mode 100644 index 00000000000..cb5cdbcd360 --- /dev/null +++ b/src/parser/test/flow/comment_interning/object.tree.json @@ -0,0 +1,89 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":10}}, + "range":[0,39], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":10}}, + "range":[0,39], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":3,"column":1}}, + "range":[6,30], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":8}}, + "range":[6,8], + "name":"id", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"ObjectExpression", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":1},"end":{"line":3,"column":9}}, + "range":[30,38], + "value":"post" + } + ], + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":18}}, + "range":[11,18], + "value":"pre" + } + ], + "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":3,"column":1}}, + "range":[19,30], + "properties":[ + { + "type":"Property", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":6}}, + "range":[23,27], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[23,24], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":5},"end":{"line":2,"column":6}}, + "range":[26,27], + "value":1, + "raw":"1" + }, + "kind":"init", + "method":false, + "shorthand":false, + "computed":false + } + ] + } + } + ], + "kind":"const" + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":18}}, + "range":[11,18], + "value":"pre" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":1},"end":{"line":3,"column":9}}, + "range":[30,38], + "value":"post" + } + ] +} diff --git a/src/parser/test/flow/comment_interning/return.js b/src/parser/test/flow/comment_interning/return.js new file mode 100644 index 00000000000..3f5b64971eb --- /dev/null +++ b/src/parser/test/flow/comment_interning/return.js @@ -0,0 +1,9 @@ +function t0() { + t1(); // a leading comment + // another leading comment + return 42; // should not be a trailing comment +} + +function t1() { + return /* trailing comment */; +} diff --git a/src/parser/test/flow/comment_interning/return.options.json b/src/parser/test/flow/comment_interning/return.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/return.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/return.tree.json b/src/parser/test/flow/comment_interning/return.tree.json new file mode 100644 index 00000000000..b548eee914f --- /dev/null +++ b/src/parser/test/flow/comment_interning/return.tree.json @@ -0,0 +1,148 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":9,"column":1}}, + "range":[0,193], + "body":[ + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,142], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":11}}, + "range":[9,11], + "name":"t0", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":5,"column":1}}, + "range":[14,142], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":13}}, + "range":[24,29], + "expression":{ + "type":"CallExpression", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":12}}, + "range":[24,28], + "callee":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":10}}, + "range":[24,26], + "name":"t1", + "typeAnnotation":null, + "optional":false + }, + "typeArguments":null, + "arguments":[] + }, + "directive":null + }, + { + "type":"ReturnStatement", + "leadingComments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":2,"column":14},"end":{"line":2,"column":34}}, + "range":[30,50], + "value":" a leading comment" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":34}}, + "range":[59,85], + "value":" another leading comment" + } + ], + "loc":{"source":null,"start":{"line":4,"column":8},"end":{"line":4,"column":18}}, + "range":[94,104], + "argument":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":4,"column":15},"end":{"line":4,"column":17}}, + "range":[101,103], + "value":42, + "raw":"42" + } + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":7,"column":0},"end":{"line":9,"column":1}}, + "range":[144,193], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":7,"column":9},"end":{"line":7,"column":11}}, + "range":[153,155], + "name":"t1", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":7,"column":14},"end":{"line":9,"column":1}}, + "range":[158,193], + "body":[ + { + "type":"ReturnStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":8,"column":8},"end":{"line":8,"column":30}}, + "range":[168,190], + "value":" trailing comment " + } + ], + "loc":{"source":null,"start":{"line":8,"column":1},"end":{"line":8,"column":31}}, + "range":[161,191], + "argument":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + ], + "comments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":2,"column":14},"end":{"line":2,"column":34}}, + "range":[30,50], + "value":" a leading comment" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":34}}, + "range":[59,85], + "value":" another leading comment" + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":4,"column":19},"end":{"line":4,"column":54}}, + "range":[105,140], + "value":" should not be a trailing comment" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":8,"column":8},"end":{"line":8,"column":30}}, + "range":[168,190], + "value":" trailing comment " + } + ] +} diff --git a/src/parser/test/flow/comment_interning/try.js b/src/parser/test/flow/comment_interning/try.js new file mode 100644 index 00000000000..3dddb62ff13 --- /dev/null +++ b/src/parser/test/flow/comment_interning/try.js @@ -0,0 +1,31 @@ +function onlyLeading() { + /* Not leading */ + const z = 0; + // 1.1 Leading A + /* 1.2 Leading B */ try { + const x = 1; + } catch (e1) { + console.log(1); + } +} + +function onlyTrailing() { + try /* 2.1 Trailing */ { + /* 2.2 Not trailing */ + const y = 2; + } catch (e2) { + console.log(2); + } + /* 2.3 Not try Trailing B */ +} + +function leadingAndTrailing() { + // 3.1 Leading + try /* 3.2 Trailing */ { + /* 3.3 Not trailing */ + const y = 2; + } finally { + console.log(3); + } + /* 3.4 Not Trailing */ +} diff --git a/src/parser/test/flow/comment_interning/try.options.json b/src/parser/test/flow/comment_interning/try.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/try.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/try.tree.json b/src/parser/test/flow/comment_interning/try.tree.json new file mode 100644 index 00000000000..4527e9b80ef --- /dev/null +++ b/src/parser/test/flow/comment_interning/try.tree.json @@ -0,0 +1,515 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":31,"column":1}}, + "range":[0,524], + "body":[ + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":10,"column":1}}, + "range":[0,166], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":20}}, + "range":[9,20], + "name":"onlyLeading", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":1,"column":23},"end":{"line":10,"column":1}}, + "range":[23,166], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":14}}, + "range":[47,59], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":13}}, + "range":[53,58], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":9}}, + "range":[53,54], + "name":"z", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":12},"end":{"line":3,"column":13}}, + "range":[57,58], + "value":0, + "raw":"0" + } + } + ], + "kind":"const" + }, + { + "type":"TryStatement", + "leadingComments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":18}}, + "range":[62,78], + "value":" 1.1 Leading A" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":5,"column":2},"end":{"line":5,"column":21}}, + "range":[81,100], + "value":" 1.2 Leading B " + } + ], + "loc":{"source":null,"start":{"line":5,"column":22},"end":{"line":9,"column":3}}, + "range":[101,164], + "block":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":5,"column":26},"end":{"line":7,"column":3}}, + "range":[105,127], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":6,"column":4},"end":{"line":6,"column":16}}, + "range":[111,123], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":6,"column":10},"end":{"line":6,"column":15}}, + "range":[117,122], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":6,"column":10},"end":{"line":6,"column":11}}, + "range":[117,118], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":6,"column":14},"end":{"line":6,"column":15}}, + "range":[121,122], + "value":1, + "raw":"1" + } + } + ], + "kind":"const" + } + ] + }, + "handler":{ + "type":"CatchClause", + "loc":{"source":null,"start":{"line":7,"column":4},"end":{"line":9,"column":3}}, + "range":[128,164], + "param":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":7,"column":11},"end":{"line":7,"column":13}}, + "range":[135,137], + "name":"e1", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":7,"column":15},"end":{"line":9,"column":3}}, + "range":[139,164], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":8,"column":4},"end":{"line":8,"column":19}}, + "range":[145,160], + "expression":{ + "type":"CallExpression", + "loc":{"source":null,"start":{"line":8,"column":4},"end":{"line":8,"column":18}}, + "range":[145,159], + "callee":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":8,"column":4},"end":{"line":8,"column":15}}, + "range":[145,156], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":8,"column":4},"end":{"line":8,"column":11}}, + "range":[145,152], + "name":"console", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":8,"column":12},"end":{"line":8,"column":15}}, + "range":[153,156], + "name":"log", + "typeAnnotation":null, + "optional":false + }, + "computed":false + }, + "typeArguments":null, + "arguments":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":8,"column":16},"end":{"line":8,"column":17}}, + "range":[157,158], + "value":1, + "raw":"1" + } + ] + }, + "directive":null + } + ] + } + }, + "finalizer":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":12,"column":0},"end":{"line":20,"column":1}}, + "range":[168,338], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":12,"column":9},"end":{"line":12,"column":21}}, + "range":[177,189], + "name":"onlyTrailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":12,"column":24},"end":{"line":20,"column":1}}, + "range":[192,338], + "body":[ + { + "type":"TryStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":13,"column":6},"end":{"line":13,"column":24}}, + "range":[200,218], + "value":" 2.1 Trailing " + } + ], + "loc":{"source":null,"start":{"line":13,"column":2},"end":{"line":18,"column":3}}, + "range":[196,305], + "block":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":13,"column":25},"end":{"line":16,"column":3}}, + "range":[219,268], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":15,"column":4},"end":{"line":15,"column":16}}, + "range":[252,264], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":15,"column":10},"end":{"line":15,"column":15}}, + "range":[258,263], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":15,"column":10},"end":{"line":15,"column":11}}, + "range":[258,259], + "name":"y", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":15,"column":14},"end":{"line":15,"column":15}}, + "range":[262,263], + "value":2, + "raw":"2" + } + } + ], + "kind":"const" + } + ] + }, + "handler":{ + "type":"CatchClause", + "loc":{"source":null,"start":{"line":16,"column":4},"end":{"line":18,"column":3}}, + "range":[269,305], + "param":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":16,"column":11},"end":{"line":16,"column":13}}, + "range":[276,278], + "name":"e2", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":16,"column":15},"end":{"line":18,"column":3}}, + "range":[280,305], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":17,"column":4},"end":{"line":17,"column":19}}, + "range":[286,301], + "expression":{ + "type":"CallExpression", + "loc":{"source":null,"start":{"line":17,"column":4},"end":{"line":17,"column":18}}, + "range":[286,300], + "callee":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":17,"column":4},"end":{"line":17,"column":15}}, + "range":[286,297], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":17,"column":4},"end":{"line":17,"column":11}}, + "range":[286,293], + "name":"console", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":17,"column":12},"end":{"line":17,"column":15}}, + "range":[294,297], + "name":"log", + "typeAnnotation":null, + "optional":false + }, + "computed":false + }, + "typeArguments":null, + "arguments":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":17,"column":16},"end":{"line":17,"column":17}}, + "range":[298,299], + "value":2, + "raw":"2" + } + ] + }, + "directive":null + } + ] + } + }, + "finalizer":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":22,"column":0},"end":{"line":31,"column":1}}, + "range":[340,524], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":22,"column":9},"end":{"line":22,"column":27}}, + "range":[349,367], + "name":"leadingAndTrailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":22,"column":30},"end":{"line":31,"column":1}}, + "range":[370,524], + "body":[ + { + "type":"TryStatement", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":6},"end":{"line":24,"column":24}}, + "range":[395,413], + "value":" 3.2 Trailing " + } + ], + "leadingComments":[ + { + "type":"Line", + "loc":{"source":null,"start":{"line":23,"column":2},"end":{"line":23,"column":16}}, + "range":[374,388], + "value":" 3.1 Leading" + } + ], + "loc":{"source":null,"start":{"line":24,"column":2},"end":{"line":29,"column":3}}, + "range":[391,497], + "block":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":24,"column":25},"end":{"line":27,"column":3}}, + "range":[414,463], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":26,"column":4},"end":{"line":26,"column":16}}, + "range":[447,459], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":26,"column":10},"end":{"line":26,"column":15}}, + "range":[453,458], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":26,"column":10},"end":{"line":26,"column":11}}, + "range":[453,454], + "name":"y", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":26,"column":14},"end":{"line":26,"column":15}}, + "range":[457,458], + "value":2, + "raw":"2" + } + } + ], + "kind":"const" + } + ] + }, + "handler":null, + "finalizer":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":27,"column":12},"end":{"line":29,"column":3}}, + "range":[472,497], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":28,"column":4},"end":{"line":28,"column":19}}, + "range":[478,493], + "expression":{ + "type":"CallExpression", + "loc":{"source":null,"start":{"line":28,"column":4},"end":{"line":28,"column":18}}, + "range":[478,492], + "callee":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":28,"column":4},"end":{"line":28,"column":15}}, + "range":[478,489], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":28,"column":4},"end":{"line":28,"column":11}}, + "range":[478,485], + "name":"console", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":28,"column":12},"end":{"line":28,"column":15}}, + "range":[486,489], + "name":"log", + "typeAnnotation":null, + "optional":false + }, + "computed":false + }, + "typeArguments":null, + "arguments":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":28,"column":16},"end":{"line":28,"column":17}}, + "range":[490,491], + "value":3, + "raw":"3" + } + ] + }, + "directive":null + } + ] + } + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":19}}, + "range":[27,44], + "value":" Not leading " + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":18}}, + "range":[62,78], + "value":" 1.1 Leading A" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":5,"column":2},"end":{"line":5,"column":21}}, + "range":[81,100], + "value":" 1.2 Leading B " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":13,"column":6},"end":{"line":13,"column":24}}, + "range":[200,218], + "value":" 2.1 Trailing " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":14,"column":4},"end":{"line":14,"column":26}}, + "range":[225,247], + "value":" 2.2 Not trailing " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":19,"column":2},"end":{"line":19,"column":30}}, + "range":[308,336], + "value":" 2.3 Not try Trailing B " + }, + { + "type":"Line", + "loc":{"source":null,"start":{"line":23,"column":2},"end":{"line":23,"column":16}}, + "range":[374,388], + "value":" 3.1 Leading" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":24,"column":6},"end":{"line":24,"column":24}}, + "range":[395,413], + "value":" 3.2 Trailing " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":25,"column":4},"end":{"line":25,"column":26}}, + "range":[420,442], + "value":" 3.3 Not trailing " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":30,"column":2},"end":{"line":30,"column":24}}, + "range":[500,522], + "value":" 3.4 Not Trailing " + } + ] +} diff --git a/src/parser/test/flow/comment_interning/unary.js b/src/parser/test/flow/comment_interning/unary.js new file mode 100644 index 00000000000..15ccc24053e --- /dev/null +++ b/src/parser/test/flow/comment_interning/unary.js @@ -0,0 +1,3 @@ +const a = /*pre*/ ! /*post*/ false; +const b = /*pre*/ typeof /*post*/ 5; +const c = /*pre*/ delete /*post*/ a; diff --git a/src/parser/test/flow/comment_interning/unary.options.json b/src/parser/test/flow/comment_interning/unary.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/unary.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/unary.tree.json b/src/parser/test/flow/comment_interning/unary.tree.json new file mode 100644 index 00000000000..911c1ed926a --- /dev/null +++ b/src/parser/test/flow/comment_interning/unary.tree.json @@ -0,0 +1,199 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":36}}, + "range":[0,109], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":35}}, + "range":[0,35], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":34}}, + "range":[6,34], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"UnaryExpression", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":17}}, + "range":[10,17], + "value":"pre" + } + ], + "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":34}}, + "range":[18,34], + "operator":"!", + "prefix":true, + "argument":{ + "type":"Literal", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":28}}, + "range":[20,28], + "value":"post" + } + ], + "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":34}}, + "range":[29,34], + "value":false, + "raw":"false" + } + } + } + ], + "kind":"const" + }, + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":36}}, + "range":[36,72], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":35}}, + "range":[42,71], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "range":[42,43], + "name":"b", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"UnaryExpression", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":17}}, + "range":[46,53], + "value":"pre" + } + ], + "loc":{"source":null,"start":{"line":2,"column":18},"end":{"line":2,"column":35}}, + "range":[54,71], + "operator":"typeof", + "prefix":true, + "argument":{ + "type":"Literal", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":25},"end":{"line":2,"column":33}}, + "range":[61,69], + "value":"post" + } + ], + "loc":{"source":null,"start":{"line":2,"column":34},"end":{"line":2,"column":35}}, + "range":[70,71], + "value":5, + "raw":"5" + } + } + } + ], + "kind":"const" + }, + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":36}}, + "range":[73,109], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":35}}, + "range":[79,108], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":7}}, + "range":[79,80], + "name":"c", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"UnaryExpression", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":10},"end":{"line":3,"column":17}}, + "range":[83,90], + "value":"pre" + } + ], + "loc":{"source":null,"start":{"line":3,"column":18},"end":{"line":3,"column":35}}, + "range":[91,108], + "operator":"delete", + "prefix":true, + "argument":{ + "type":"Identifier", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":25},"end":{"line":3,"column":33}}, + "range":[98,106], + "value":"post" + } + ], + "loc":{"source":null,"start":{"line":3,"column":34},"end":{"line":3,"column":35}}, + "range":[107,108], + "name":"a", + "typeAnnotation":null, + "optional":false + } + } + } + ], + "kind":"const" + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":17}}, + "range":[10,17], + "value":"pre" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":28}}, + "range":[20,28], + "value":"post" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":17}}, + "range":[46,53], + "value":"pre" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":2,"column":25},"end":{"line":2,"column":33}}, + "range":[61,69], + "value":"post" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":10},"end":{"line":3,"column":17}}, + "range":[83,90], + "value":"pre" + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":25},"end":{"line":3,"column":33}}, + "range":[98,106], + "value":"post" + } + ] +} diff --git a/src/parser/test/flow/comment_interning/yield.js b/src/parser/test/flow/comment_interning/yield.js new file mode 100644 index 00000000000..1669c36b9b7 --- /dev/null +++ b/src/parser/test/flow/comment_interning/yield.js @@ -0,0 +1,31 @@ +function *yield_trailing() { + cont arr = [1, 2, 3] + for (var i = 0; i < arr.length; i++) { + yield arr[i] /* trailing yield val 1 */; + } +} + +function *yield_trailing() { + cont arr = [1, 2, 3] + for (var i = 0; i < arr.length; i++) { + /* leading yield 1 */ yield arr[i] /* trailing yield val 2 */; + } +} + +function *yield_trailing() { + cont arr = [1, 2, 3] + for (var i = 0; i < arr.length; i++) { + /* leading yield 2 */ yield arr[i]; + } +} + +function *yield_trailing() { + cont arr = [1, 2, 3] + for (var i = 0; i < arr.length; i++) { + yield /* leading yield val */ arr[i] /* trailing yield val 3 */; + } +} + +function *yield_trailing() { + yield /* trailing no yield val */; +} diff --git a/src/parser/test/flow/comment_interning/yield.options.json b/src/parser/test/flow/comment_interning/yield.options.json new file mode 100644 index 00000000000..0d0a7de49b1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/yield.options.json @@ -0,0 +1,3 @@ +{ + "intern_comments": true +} diff --git a/src/parser/test/flow/comment_interning/yield.tree.json b/src/parser/test/flow/comment_interning/yield.tree.json new file mode 100644 index 00000000000..7601e0f58f1 --- /dev/null +++ b/src/parser/test/flow/comment_interning/yield.tree.json @@ -0,0 +1,997 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":12}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":9,"column":9},"end":{"line":9,"column":12}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":16,"column":9},"end":{"line":16,"column":12}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":23,"column":9},"end":{"line":23,"column":12}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":31,"column":1}}, + "range":[0,730], + "body":[ + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":6,"column":1}}, + "range":[0,153], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":24}}, + "range":[10,24], + "name":"yield_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":6,"column":1}}, + "range":[27,153], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":4},"end":{"line":2,"column":8}}, + "range":[33,37], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":4},"end":{"line":2,"column":8}}, + "range":[33,37], + "name":"cont", + "typeAnnotation":null, + "optional":false + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":24}}, + "range":[38,53], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":24}}, + "range":[38,53], + "operator":"=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":12}}, + "range":[38,41], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"ArrayExpression", + "loc":{"source":null,"start":{"line":2,"column":15},"end":{"line":2,"column":24}}, + "range":[44,53], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":16},"end":{"line":2,"column":17}}, + "range":[45,46], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":19},"end":{"line":2,"column":20}}, + "range":[48,49], + "value":2, + "raw":"2" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":22},"end":{"line":2,"column":23}}, + "range":[51,52], + "value":3, + "raw":"3" + } + ] + } + }, + "directive":null + }, + { + "type":"ForStatement", + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":5,"column":5}}, + "range":[58,151], + "init":{ + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":3,"column":9},"end":{"line":3,"column":18}}, + "range":[63,72], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":3,"column":13},"end":{"line":3,"column":18}}, + "range":[67,72], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":13},"end":{"line":3,"column":14}}, + "range":[67,68], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":17},"end":{"line":3,"column":18}}, + "range":[71,72], + "value":0, + "raw":"0" + } + } + ], + "kind":"var" + }, + "test":{ + "type":"BinaryExpression", + "loc":{"source":null,"start":{"line":3,"column":20},"end":{"line":3,"column":34}}, + "range":[74,88], + "operator":"<", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":20},"end":{"line":3,"column":21}}, + "range":[74,75], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":3,"column":24},"end":{"line":3,"column":34}}, + "range":[78,88], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":24},"end":{"line":3,"column":27}}, + "range":[78,81], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":28},"end":{"line":3,"column":34}}, + "range":[82,88], + "name":"length", + "typeAnnotation":null, + "optional":false + }, + "computed":false + } + }, + "update":{ + "type":"UpdateExpression", + "loc":{"source":null,"start":{"line":3,"column":36},"end":{"line":3,"column":39}}, + "range":[90,93], + "operator":"++", + "argument":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":36},"end":{"line":3,"column":37}}, + "range":[90,91], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "prefix":false + }, + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":3,"column":41},"end":{"line":5,"column":5}}, + "range":[95,151], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":4,"column":8},"end":{"line":4,"column":48}}, + "range":[105,145], + "expression":{ + "type":"YieldExpression", + "loc":{"source":null,"start":{"line":4,"column":8},"end":{"line":4,"column":20}}, + "range":[105,117], + "argument":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":4,"column":14},"end":{"line":4,"column":20}}, + "range":[111,117], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":14},"end":{"line":4,"column":17}}, + "range":[111,114], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":18},"end":{"line":4,"column":19}}, + "range":[115,116], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "computed":true + }, + "delegate":false + }, + "directive":null + } + ] + } + } + ] + }, + "async":false, + "generator":true, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":8,"column":0},"end":{"line":13,"column":1}}, + "range":[155,330], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":8,"column":10},"end":{"line":8,"column":24}}, + "range":[165,179], + "name":"yield_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":8,"column":27},"end":{"line":13,"column":1}}, + "range":[182,330], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":9,"column":4},"end":{"line":9,"column":8}}, + "range":[188,192], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":9,"column":4},"end":{"line":9,"column":8}}, + "range":[188,192], + "name":"cont", + "typeAnnotation":null, + "optional":false + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":9,"column":9},"end":{"line":9,"column":24}}, + "range":[193,208], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":9,"column":9},"end":{"line":9,"column":24}}, + "range":[193,208], + "operator":"=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":9,"column":9},"end":{"line":9,"column":12}}, + "range":[193,196], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"ArrayExpression", + "loc":{"source":null,"start":{"line":9,"column":15},"end":{"line":9,"column":24}}, + "range":[199,208], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":9,"column":16},"end":{"line":9,"column":17}}, + "range":[200,201], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":9,"column":19},"end":{"line":9,"column":20}}, + "range":[203,204], + "value":2, + "raw":"2" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":9,"column":22},"end":{"line":9,"column":23}}, + "range":[206,207], + "value":3, + "raw":"3" + } + ] + } + }, + "directive":null + }, + { + "type":"ForStatement", + "loc":{"source":null,"start":{"line":10,"column":4},"end":{"line":12,"column":5}}, + "range":[213,328], + "init":{ + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":10,"column":9},"end":{"line":10,"column":18}}, + "range":[218,227], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":10,"column":13},"end":{"line":10,"column":18}}, + "range":[222,227], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":13},"end":{"line":10,"column":14}}, + "range":[222,223], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":10,"column":17},"end":{"line":10,"column":18}}, + "range":[226,227], + "value":0, + "raw":"0" + } + } + ], + "kind":"var" + }, + "test":{ + "type":"BinaryExpression", + "loc":{"source":null,"start":{"line":10,"column":20},"end":{"line":10,"column":34}}, + "range":[229,243], + "operator":"<", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":20},"end":{"line":10,"column":21}}, + "range":[229,230], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":10,"column":24},"end":{"line":10,"column":34}}, + "range":[233,243], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":24},"end":{"line":10,"column":27}}, + "range":[233,236], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":28},"end":{"line":10,"column":34}}, + "range":[237,243], + "name":"length", + "typeAnnotation":null, + "optional":false + }, + "computed":false + } + }, + "update":{ + "type":"UpdateExpression", + "loc":{"source":null,"start":{"line":10,"column":36},"end":{"line":10,"column":39}}, + "range":[245,248], + "operator":"++", + "argument":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":36},"end":{"line":10,"column":37}}, + "range":[245,246], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "prefix":false + }, + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":10,"column":41},"end":{"line":12,"column":5}}, + "range":[250,328], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":11,"column":30},"end":{"line":11,"column":70}}, + "range":[282,322], + "expression":{ + "type":"YieldExpression", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":11,"column":8},"end":{"line":11,"column":29}}, + "range":[260,281], + "value":" leading yield 1 " + } + ], + "loc":{"source":null,"start":{"line":11,"column":30},"end":{"line":11,"column":42}}, + "range":[282,294], + "argument":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":11,"column":36},"end":{"line":11,"column":42}}, + "range":[288,294], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":11,"column":36},"end":{"line":11,"column":39}}, + "range":[288,291], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":11,"column":40},"end":{"line":11,"column":41}}, + "range":[292,293], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "computed":true + }, + "delegate":false + }, + "directive":null + } + ] + } + } + ] + }, + "async":false, + "generator":true, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":15,"column":0},"end":{"line":20,"column":1}}, + "range":[332,480], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":15,"column":10},"end":{"line":15,"column":24}}, + "range":[342,356], + "name":"yield_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":15,"column":27},"end":{"line":20,"column":1}}, + "range":[359,480], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":16,"column":4},"end":{"line":16,"column":8}}, + "range":[365,369], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":16,"column":4},"end":{"line":16,"column":8}}, + "range":[365,369], + "name":"cont", + "typeAnnotation":null, + "optional":false + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":16,"column":9},"end":{"line":16,"column":24}}, + "range":[370,385], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":16,"column":9},"end":{"line":16,"column":24}}, + "range":[370,385], + "operator":"=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":16,"column":9},"end":{"line":16,"column":12}}, + "range":[370,373], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"ArrayExpression", + "loc":{"source":null,"start":{"line":16,"column":15},"end":{"line":16,"column":24}}, + "range":[376,385], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":16,"column":16},"end":{"line":16,"column":17}}, + "range":[377,378], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":16,"column":19},"end":{"line":16,"column":20}}, + "range":[380,381], + "value":2, + "raw":"2" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":16,"column":22},"end":{"line":16,"column":23}}, + "range":[383,384], + "value":3, + "raw":"3" + } + ] + } + }, + "directive":null + }, + { + "type":"ForStatement", + "loc":{"source":null,"start":{"line":17,"column":4},"end":{"line":19,"column":5}}, + "range":[390,478], + "init":{ + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":17,"column":9},"end":{"line":17,"column":18}}, + "range":[395,404], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":17,"column":13},"end":{"line":17,"column":18}}, + "range":[399,404], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":17,"column":13},"end":{"line":17,"column":14}}, + "range":[399,400], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":17,"column":17},"end":{"line":17,"column":18}}, + "range":[403,404], + "value":0, + "raw":"0" + } + } + ], + "kind":"var" + }, + "test":{ + "type":"BinaryExpression", + "loc":{"source":null,"start":{"line":17,"column":20},"end":{"line":17,"column":34}}, + "range":[406,420], + "operator":"<", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":17,"column":20},"end":{"line":17,"column":21}}, + "range":[406,407], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":17,"column":24},"end":{"line":17,"column":34}}, + "range":[410,420], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":17,"column":24},"end":{"line":17,"column":27}}, + "range":[410,413], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":17,"column":28},"end":{"line":17,"column":34}}, + "range":[414,420], + "name":"length", + "typeAnnotation":null, + "optional":false + }, + "computed":false + } + }, + "update":{ + "type":"UpdateExpression", + "loc":{"source":null,"start":{"line":17,"column":36},"end":{"line":17,"column":39}}, + "range":[422,425], + "operator":"++", + "argument":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":17,"column":36},"end":{"line":17,"column":37}}, + "range":[422,423], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "prefix":false + }, + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":17,"column":41},"end":{"line":19,"column":5}}, + "range":[427,478], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":18,"column":30},"end":{"line":18,"column":43}}, + "range":[459,472], + "expression":{ + "type":"YieldExpression", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":18,"column":8},"end":{"line":18,"column":29}}, + "range":[437,458], + "value":" leading yield 2 " + } + ], + "loc":{"source":null,"start":{"line":18,"column":30},"end":{"line":18,"column":42}}, + "range":[459,471], + "argument":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":18,"column":36},"end":{"line":18,"column":42}}, + "range":[465,471], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":18,"column":36},"end":{"line":18,"column":39}}, + "range":[465,468], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":18,"column":40},"end":{"line":18,"column":41}}, + "range":[469,470], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "computed":true + }, + "delegate":false + }, + "directive":null + } + ] + } + } + ] + }, + "async":false, + "generator":true, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":22,"column":0},"end":{"line":27,"column":1}}, + "range":[482,659], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":22,"column":10},"end":{"line":22,"column":24}}, + "range":[492,506], + "name":"yield_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":22,"column":27},"end":{"line":27,"column":1}}, + "range":[509,659], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":23,"column":4},"end":{"line":23,"column":8}}, + "range":[515,519], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":23,"column":4},"end":{"line":23,"column":8}}, + "range":[515,519], + "name":"cont", + "typeAnnotation":null, + "optional":false + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":23,"column":9},"end":{"line":23,"column":24}}, + "range":[520,535], + "expression":{ + "type":"AssignmentExpression", + "loc":{"source":null,"start":{"line":23,"column":9},"end":{"line":23,"column":24}}, + "range":[520,535], + "operator":"=", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":23,"column":9},"end":{"line":23,"column":12}}, + "range":[520,523], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"ArrayExpression", + "loc":{"source":null,"start":{"line":23,"column":15},"end":{"line":23,"column":24}}, + "range":[526,535], + "elements":[ + { + "type":"Literal", + "loc":{"source":null,"start":{"line":23,"column":16},"end":{"line":23,"column":17}}, + "range":[527,528], + "value":1, + "raw":"1" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":23,"column":19},"end":{"line":23,"column":20}}, + "range":[530,531], + "value":2, + "raw":"2" + }, + { + "type":"Literal", + "loc":{"source":null,"start":{"line":23,"column":22},"end":{"line":23,"column":23}}, + "range":[533,534], + "value":3, + "raw":"3" + } + ] + } + }, + "directive":null + }, + { + "type":"ForStatement", + "loc":{"source":null,"start":{"line":24,"column":4},"end":{"line":26,"column":5}}, + "range":[540,657], + "init":{ + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":24,"column":9},"end":{"line":24,"column":18}}, + "range":[545,554], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":24,"column":13},"end":{"line":24,"column":18}}, + "range":[549,554], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":24,"column":13},"end":{"line":24,"column":14}}, + "range":[549,550], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":24,"column":17},"end":{"line":24,"column":18}}, + "range":[553,554], + "value":0, + "raw":"0" + } + } + ], + "kind":"var" + }, + "test":{ + "type":"BinaryExpression", + "loc":{"source":null,"start":{"line":24,"column":20},"end":{"line":24,"column":34}}, + "range":[556,570], + "operator":"<", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":24,"column":20},"end":{"line":24,"column":21}}, + "range":[556,557], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":24,"column":24},"end":{"line":24,"column":34}}, + "range":[560,570], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":24,"column":24},"end":{"line":24,"column":27}}, + "range":[560,563], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":24,"column":28},"end":{"line":24,"column":34}}, + "range":[564,570], + "name":"length", + "typeAnnotation":null, + "optional":false + }, + "computed":false + } + }, + "update":{ + "type":"UpdateExpression", + "loc":{"source":null,"start":{"line":24,"column":36},"end":{"line":24,"column":39}}, + "range":[572,575], + "operator":"++", + "argument":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":24,"column":36},"end":{"line":24,"column":37}}, + "range":[572,573], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "prefix":false + }, + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":24,"column":41},"end":{"line":26,"column":5}}, + "range":[577,657], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":25,"column":8},"end":{"line":25,"column":72}}, + "range":[587,651], + "expression":{ + "type":"YieldExpression", + "loc":{"source":null,"start":{"line":25,"column":8},"end":{"line":25,"column":44}}, + "range":[587,623], + "argument":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":25,"column":38},"end":{"line":25,"column":44}}, + "range":[617,623], + "object":{ + "type":"Identifier", + "leadingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":25,"column":14},"end":{"line":25,"column":37}}, + "range":[593,616], + "value":" leading yield val " + } + ], + "loc":{"source":null,"start":{"line":25,"column":38},"end":{"line":25,"column":41}}, + "range":[617,620], + "name":"arr", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":25,"column":42},"end":{"line":25,"column":43}}, + "range":[621,622], + "name":"i", + "typeAnnotation":null, + "optional":false + }, + "computed":true + }, + "delegate":false + }, + "directive":null + } + ] + } + } + ] + }, + "async":false, + "generator":true, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + { + "type":"FunctionDeclaration", + "loc":{"source":null,"start":{"line":29,"column":0},"end":{"line":31,"column":1}}, + "range":[661,730], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":29,"column":10},"end":{"line":29,"column":24}}, + "range":[671,685], + "name":"yield_trailing", + "typeAnnotation":null, + "optional":false + }, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":29,"column":27},"end":{"line":31,"column":1}}, + "range":[688,730], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":30,"column":4},"end":{"line":30,"column":38}}, + "range":[694,728], + "expression":{ + "type":"YieldExpression", + "trailingComments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":30,"column":10},"end":{"line":30,"column":37}}, + "range":[700,727], + "value":" trailing no yield val " + } + ], + "loc":{"source":null,"start":{"line":30,"column":4},"end":{"line":30,"column":9}}, + "range":[694,699], + "argument":null, + "delegate":false + }, + "directive":null + } + ] + }, + "async":false, + "generator":true, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":4,"column":21},"end":{"line":4,"column":47}}, + "range":[118,144], + "value":" trailing yield val 1 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":11,"column":8},"end":{"line":11,"column":29}}, + "range":[260,281], + "value":" leading yield 1 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":11,"column":43},"end":{"line":11,"column":69}}, + "range":[295,321], + "value":" trailing yield val 2 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":18,"column":8},"end":{"line":18,"column":29}}, + "range":[437,458], + "value":" leading yield 2 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":25,"column":14},"end":{"line":25,"column":37}}, + "range":[593,616], + "value":" leading yield val " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":25,"column":45},"end":{"line":25,"column":71}}, + "range":[624,650], + "value":" trailing yield val 3 " + }, + { + "type":"Block", + "loc":{"source":null,"start":{"line":30,"column":10},"end":{"line":30,"column":37}}, + "range":[700,727], + "value":" trailing no yield val " + } + ] +} diff --git a/src/parser/test/flow/decorators_invalid/migrated_0000.tree.json b/src/parser/test/flow/decorators_invalid/migrated_0000.tree.json index ef0945a4b3d..4c97e096d11 100644 --- a/src/parser/test/flow/decorators_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/decorators_invalid/migrated_0000.tree.json @@ -2,11 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, - "message":"Unexpected token @" + "message":"Unexpected token `@`, expected the token `class`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `{`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", @@ -31,8 +35,8 @@ "range":[27,27], "body":[] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/decorators_invalid/migrated_0001.tree.json b/src/parser/test/flow/decorators_invalid/migrated_0001.tree.json index e7489ee5efd..290459919e4 100644 --- a/src/parser/test/flow/decorators_invalid/migrated_0001.tree.json +++ b/src/parser/test/flow/decorators_invalid/migrated_0001.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/decorators_invalid/migrated_0002.tree.json b/src/parser/test/flow/decorators_invalid/migrated_0002.tree.json index 177ca4a424b..9c47f1f9d69 100644 --- a/src/parser/test/flow/decorators_invalid/migrated_0002.tree.json +++ b/src/parser/test/flow/decorators_invalid/migrated_0002.tree.json @@ -6,15 +6,19 @@ }, { "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":13}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the identifier `as`" }, { "loc":{"source":null,"start":{"line":2,"column":14},"end":{"line":2,"column":19}}, - "message":"Unexpected string" + "message":"Unexpected string, expected an identifier" }, { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the identifier `from`" + }, + { + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, + "message":"Unexpected end of input, expected a string" } ], "type":"Program", diff --git a/src/parser/test/flow/destructuring_with_default_values/obj_assignment_pattern_with_literal_key.tree.json b/src/parser/test/flow/destructuring_with_default_values/obj_assignment_pattern_with_literal_key.tree.json index 97d031eeecd..6841bc7cac2 100644 --- a/src/parser/test/flow/destructuring_with_default_values/obj_assignment_pattern_with_literal_key.tree.json +++ b/src/parser/test/flow/destructuring_with_default_values/obj_assignment_pattern_with_literal_key.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":8}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `:`" } ], "type":"Program", diff --git a/src/parser/test/flow/dynamic_import/migrated_0005.tree.json b/src/parser/test/flow/dynamic_import/migrated_0005.tree.json index fb228d74eac..eb3081a4da7 100644 --- a/src/parser/test/flow/dynamic_import/migrated_0005.tree.json +++ b/src/parser/test/flow/dynamic_import/migrated_0005.tree.json @@ -2,15 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":23},"end":{"line":1,"column":24}}, - "message":"Unexpected token ," + "message":"Unexpected token `,`, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":29}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `;`" }, { "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":30}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":30}}, + "message":"Unexpected token `)`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/dynamic_import/migrated_0006.tree.json b/src/parser/test/flow/dynamic_import/migrated_0006.tree.json index 477820beb15..775369abcdc 100644 --- a/src/parser/test/flow/dynamic_import/migrated_0006.tree.json +++ b/src/parser/test/flow/dynamic_import/migrated_0006.tree.json @@ -2,11 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":15}}, - "message":"Unexpected string" + "message":"Unexpected string, expected the token `(`" }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, + "message":"Unexpected token `;`, expected the token `)`" } ], "type":"Program", diff --git a/src/parser/test/flow/enums/enum-boolean-member-not-initialized-explicit.js b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-explicit.js new file mode 100644 index 00000000000..4fecb537fb2 --- /dev/null +++ b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-explicit.js @@ -0,0 +1,3 @@ +enum E of boolean { + A, +} diff --git a/src/parser/test/flow/enums/enum-boolean-member-not-initialized-explicit.options.json b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-explicit.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-explicit.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-boolean-member-not-initialized-explicit.tree.json b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-explicit.tree.json new file mode 100644 index 00000000000..dc98a2c6945 --- /dev/null +++ b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-explicit.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "message":"Boolean enum members need to be initialized. Use either `A = true,` or `A = false,` in enum `E`." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,26], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,26], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumBooleanBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,26], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-boolean-member-not-initialized-implicit.js b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-implicit.js new file mode 100644 index 00000000000..73704345dc4 --- /dev/null +++ b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-implicit.js @@ -0,0 +1,4 @@ +enum E { + A, + B = true, +} diff --git a/src/parser/test/flow/enums/enum-boolean-member-not-initialized-implicit.options.json b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-implicit.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-implicit.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-boolean-member-not-initialized-implicit.tree.json b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-implicit.tree.json new file mode 100644 index 00000000000..ee47cda650f --- /dev/null +++ b/src/parser/test/flow/enums/enum-boolean-member-not-initialized-implicit.tree.json @@ -0,0 +1,49 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "message":"Boolean enum members need to be initialized. Use either `A = true,` or `A = false,` in enum `E`." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,27], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,27], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumBooleanBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,27], + "members":[ + { + "type":"EnumBooleanMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":10}}, + "range":[16,24], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[16,17], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "init":true + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-duplicate-member-name.js b/src/parser/test/flow/enums/enum-duplicate-member-name.js new file mode 100644 index 00000000000..c9fde50253a --- /dev/null +++ b/src/parser/test/flow/enums/enum-duplicate-member-name.js @@ -0,0 +1,4 @@ +enum E { + A, + A, +} diff --git a/src/parser/test/flow/enums/enum-duplicate-member-name.options.json b/src/parser/test/flow/enums/enum-duplicate-member-name.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-duplicate-member-name.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-duplicate-member-name.tree.json b/src/parser/test/flow/enums/enum-duplicate-member-name.tree.json new file mode 100644 index 00000000000..b94e4c6c652 --- /dev/null +++ b/src/parser/test/flow/enums/enum-duplicate-member-name.tree.json @@ -0,0 +1,61 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "message":"Enum member names need to be unique, but the name `A` has already been used before in enum `E`." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,20], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,20], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,20], + "members":[ + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "name":"A", + "typeAnnotation":null, + "optional":false + } + }, + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[16,17], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[16,17], + "name":"A", + "typeAnnotation":null, + "optional":false + } + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-end-of-file.js b/src/parser/test/flow/enums/enum-end-of-file.js new file mode 100644 index 00000000000..e3caefb45c4 --- /dev/null +++ b/src/parser/test/flow/enums/enum-end-of-file.js @@ -0,0 +1 @@ +enum diff --git a/src/parser/test/flow/enums/enum-end-of-file.options.json b/src/parser/test/flow/enums/enum-end-of-file.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-end-of-file.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-end-of-file.tree.json b/src/parser/test/flow/enums/enum-end-of-file.tree.json new file mode 100644 index 00000000000..51aaf720b80 --- /dev/null +++ b/src/parser/test/flow/enums/enum-end-of-file.tree.json @@ -0,0 +1,42 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected an identifier" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `{`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, + "range":[0,5], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, + "range":[0,5], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "range":[5,5], + "name":"", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, + "range":[0,5], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-export.js b/src/parser/test/flow/enums/enum-export.js new file mode 100644 index 00000000000..ebdc5a4a8bb --- /dev/null +++ b/src/parser/test/flow/enums/enum-export.js @@ -0,0 +1,3 @@ +export enum A {} + +export default enum B {} diff --git a/src/parser/test/flow/enums/enum-export.options.json b/src/parser/test/flow/enums/enum-export.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-export.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-export.tree.json b/src/parser/test/flow/enums/enum-export.tree.json new file mode 100644 index 00000000000..ab715c60e20 --- /dev/null +++ b/src/parser/test/flow/enums/enum-export.tree.json @@ -0,0 +1,62 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":24}}, + "range":[0,42], + "body":[ + { + "type":"ExportNamedDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":16}}, + "range":[0,16], + "declaration":{ + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":16}}, + "range":[7,16], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":13}}, + "range":[12,13], + "name":"A", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":16}}, + "range":[7,16], + "members":[], + "explicitType":false + } + }, + "specifiers":[], + "source":null, + "exportKind":"value" + }, + { + "type":"ExportDefaultDeclaration", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":24}}, + "range":[18,42], + "declaration":{ + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":3,"column":15},"end":{"line":3,"column":24}}, + "range":[33,42], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":20},"end":{"line":3,"column":21}}, + "range":[38,39], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":3,"column":15},"end":{"line":3,"column":24}}, + "range":[33,42], + "members":[], + "explicitType":false + } + }, + "exportKind":"value" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-inconsistent-member-values-majority-defaulted.js b/src/parser/test/flow/enums/enum-inconsistent-member-values-majority-defaulted.js new file mode 100644 index 00000000000..5a639f10711 --- /dev/null +++ b/src/parser/test/flow/enums/enum-inconsistent-member-values-majority-defaulted.js @@ -0,0 +1,5 @@ +enum E { + A, + B, + C = 3, +} diff --git a/src/parser/test/flow/enums/enum-inconsistent-member-values-majority-defaulted.options.json b/src/parser/test/flow/enums/enum-inconsistent-member-values-majority-defaulted.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-inconsistent-member-values-majority-defaulted.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-inconsistent-member-values-majority-defaulted.tree.json b/src/parser/test/flow/enums/enum-inconsistent-member-values-majority-defaulted.tree.json new file mode 100644 index 00000000000..6b54dc964a6 --- /dev/null +++ b/src/parser/test/flow/enums/enum-inconsistent-member-values-majority-defaulted.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "message":"Enum `E` has inconsistent member initializers. Either use no initializers, or consistently use literals (either booleans, numbers, or strings) for all member initializers." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,29], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,29], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,29], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-inconsistent-member-values-mixed-initializers.js b/src/parser/test/flow/enums/enum-inconsistent-member-values-mixed-initializers.js new file mode 100644 index 00000000000..b0b10ec4d43 --- /dev/null +++ b/src/parser/test/flow/enums/enum-inconsistent-member-values-mixed-initializers.js @@ -0,0 +1,4 @@ +enum E { + A = 1, + B = true, +} diff --git a/src/parser/test/flow/enums/enum-inconsistent-member-values-mixed-initializers.options.json b/src/parser/test/flow/enums/enum-inconsistent-member-values-mixed-initializers.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-inconsistent-member-values-mixed-initializers.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-inconsistent-member-values-mixed-initializers.tree.json b/src/parser/test/flow/enums/enum-inconsistent-member-values-mixed-initializers.tree.json new file mode 100644 index 00000000000..90042e8fa9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-inconsistent-member-values-mixed-initializers.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "message":"Enum `E` has inconsistent member initializers. Either use no initializers, or consistently use literals (either booleans, numbers, or strings) for all member initializers." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,31], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,31], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,31], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-explicit-type-expression.js b/src/parser/test/flow/enums/enum-invalid-explicit-type-expression.js new file mode 100644 index 00000000000..e3353ef7887 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-explicit-type-expression.js @@ -0,0 +1,2 @@ +enum E of [] { +} diff --git a/src/parser/test/flow/enums/enum-invalid-explicit-type-expression.options.json b/src/parser/test/flow/enums/enum-invalid-explicit-type-expression.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-explicit-type-expression.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-explicit-type-expression.tree.json b/src/parser/test/flow/enums/enum-invalid-explicit-type-expression.tree.json new file mode 100644 index 00000000000..e0a6c3924e0 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-explicit-type-expression.tree.json @@ -0,0 +1,42 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "message":"Supplied enum type is not valid. Use one of `boolean`, `number`, `string`, or `symbol` in enum `E`." + }, + { + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":12}}, + "message":"Unexpected token `]`, expected the token `{`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":14}}, + "message":"Unexpected token `{`, expected an identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,16], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,16], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,16], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-explicit-type-identifier.js b/src/parser/test/flow/enums/enum-invalid-explicit-type-identifier.js new file mode 100644 index 00000000000..25bbeb7024e --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-explicit-type-identifier.js @@ -0,0 +1,2 @@ +enum E of xxx { +} diff --git a/src/parser/test/flow/enums/enum-invalid-explicit-type-identifier.options.json b/src/parser/test/flow/enums/enum-invalid-explicit-type-identifier.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-explicit-type-identifier.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-explicit-type-identifier.tree.json b/src/parser/test/flow/enums/enum-invalid-explicit-type-identifier.tree.json new file mode 100644 index 00000000000..8ff60faefb2 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-explicit-type-identifier.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":13}}, + "message":"Enum type `xxx` is not valid. Use one of `boolean`, `number`, `string`, or `symbol` in enum `E`." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,17], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,17], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,17], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-boolean-explicit-string.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-boolean-explicit-string.js new file mode 100644 index 00000000000..31e79ef9f11 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-boolean-explicit-string.js @@ -0,0 +1,3 @@ +enum E of string { + A = true, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-boolean-explicit-string.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-boolean-explicit-string.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-boolean-explicit-string.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-boolean-explicit-string.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-boolean-explicit-string.tree.json new file mode 100644 index 00000000000..fa6fd0c3ca1 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-boolean-explicit-string.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":10}}, + "message":"Enum `E` has type `string`, so the initializer of `A` needs to be a string literal." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,32], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,32], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,32], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-boolean.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-boolean.js new file mode 100644 index 00000000000..d4f1afa92f0 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-boolean.js @@ -0,0 +1,3 @@ +enum E of boolean { + A = 1 + 2, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-boolean.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-boolean.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-boolean.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-boolean.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-boolean.tree.json new file mode 100644 index 00000000000..4bc32768d85 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-boolean.tree.json @@ -0,0 +1,42 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "message":"Enum `E` has type `boolean`, so the initializer of `A` needs to be a boolean literal." + }, + { + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, + "message":"Unexpected token `+`, expected the token `,`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, + "message":"Unexpected number, expected an identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,34], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,34], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumBooleanBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,34], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-number.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-number.js new file mode 100644 index 00000000000..a6f7ed2152c --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-number.js @@ -0,0 +1,3 @@ +enum E of number { + A = 1 + 2, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-number.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-number.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-number.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-number.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-number.tree.json new file mode 100644 index 00000000000..2a5aa5a6135 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-number.tree.json @@ -0,0 +1,42 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "message":"Enum `E` has type `number`, so the initializer of `A` needs to be a number literal." + }, + { + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, + "message":"Unexpected token `+`, expected the token `,`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, + "message":"Unexpected number, expected an identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumNumberBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-string.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-string.js new file mode 100644 index 00000000000..704efcf615b --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-string.js @@ -0,0 +1,3 @@ +enum E of string { + A = 1 + 2, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-string.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-string.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-string.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-string.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-string.tree.json new file mode 100644 index 00000000000..b476733ac91 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-string.tree.json @@ -0,0 +1,42 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "message":"Enum `E` has type `string`, so the initializer of `A` needs to be a string literal." + }, + { + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, + "message":"Unexpected token `+`, expected the token `,`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, + "message":"Unexpected number, expected an identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-symbol.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-symbol.js new file mode 100644 index 00000000000..d4acb6dfd23 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-symbol.js @@ -0,0 +1,3 @@ +enum E of symbol { + A = 1 + 2, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-symbol.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-symbol.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-symbol.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-symbol.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-symbol.tree.json new file mode 100644 index 00000000000..966997c472e --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-explicit-symbol.tree.json @@ -0,0 +1,41 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "message":"Symbol enum members cannot be initialized. Use `A,` in enum `E`." + }, + { + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, + "message":"Unexpected token `+`, expected the token `,`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, + "message":"Unexpected number, expected an identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumSymbolBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "members":[] + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-implicit.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-implicit.js new file mode 100644 index 00000000000..bed31f7fdc1 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-implicit.js @@ -0,0 +1,3 @@ +enum E { + A = 1 + 2, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-implicit.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-implicit.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-implicit.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-implicit.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-implicit.tree.json new file mode 100644 index 00000000000..ad972313ff9 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-expression-implicit.tree.json @@ -0,0 +1,42 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "message":"The enum member initializer for `A` needs to be a literal (either a boolean, number, or string) in enum `E`." + }, + { + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, + "message":"Unexpected token `+`, expected the token `,`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, + "message":"Unexpected number, expected an identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,23], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,23], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,23], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-literal-explicit-symbol.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-literal-explicit-symbol.js new file mode 100644 index 00000000000..dc8429b7534 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-literal-explicit-symbol.js @@ -0,0 +1,3 @@ +enum E of symbol { + A = 1, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-literal-explicit-symbol.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-literal-explicit-symbol.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-literal-explicit-symbol.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-literal-explicit-symbol.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-literal-explicit-symbol.tree.json new file mode 100644 index 00000000000..41a49786be5 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-literal-explicit-symbol.tree.json @@ -0,0 +1,33 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "message":"Symbol enum members cannot be initialized. Use `A,` in enum `E`." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,29], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,29], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumSymbolBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,29], + "members":[] + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-boolean.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-boolean.js new file mode 100644 index 00000000000..81e1097efdd --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-boolean.js @@ -0,0 +1,3 @@ +enum E of boolean { + A = 1, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-boolean.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-boolean.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-boolean.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-boolean.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-boolean.tree.json new file mode 100644 index 00000000000..5833a0751a5 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-boolean.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "message":"Enum `E` has type `boolean`, so the initializer of `A` needs to be a boolean literal." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,30], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,30], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumBooleanBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,30], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-string.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-string.js new file mode 100644 index 00000000000..708914906e1 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-string.js @@ -0,0 +1,3 @@ +enum E of string { + A = 1, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-string.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-string.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-string.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-string.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-string.tree.json new file mode 100644 index 00000000000..bff49f19a3f --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-number-explicit-string.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "message":"Enum `E` has type `string`, so the initializer of `A` needs to be a string literal." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,29], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,29], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,29], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-parenthesized.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-parenthesized.js new file mode 100644 index 00000000000..92cc2012d90 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-parenthesized.js @@ -0,0 +1,3 @@ +enum E { + A = (1), +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-parenthesized.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-parenthesized.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-parenthesized.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-parenthesized.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-parenthesized.tree.json new file mode 100644 index 00000000000..563e7b9975d --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-parenthesized.tree.json @@ -0,0 +1,42 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "message":"The enum member initializer for `A` needs to be a literal (either a boolean, number, or string) in enum `E`." + }, + { + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":8}}, + "message":"Unexpected number, expected the token `,`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, + "message":"Unexpected token `)`, expected an identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,21], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,21], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,21], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-boolean.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-boolean.js new file mode 100644 index 00000000000..86674e7f205 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-boolean.js @@ -0,0 +1,3 @@ +enum E of boolean { + A = "hi", +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-boolean.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-boolean.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-boolean.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-boolean.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-boolean.tree.json new file mode 100644 index 00000000000..80a181e85a3 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-boolean.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":10}}, + "message":"Enum `E` has type `boolean`, so the initializer of `A` needs to be a boolean literal." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumBooleanBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,33], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-number.js b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-number.js new file mode 100644 index 00000000000..5b0a7937af0 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-number.js @@ -0,0 +1,3 @@ +enum E of number { + A = "hi", +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-number.options.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-number.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-number.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-number.tree.json b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-number.tree.json new file mode 100644 index 00000000000..4a37fef7d09 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-initializer-string-explicit-number.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":10}}, + "message":"Enum `E` has type `number`, so the initializer of `A` needs to be a number literal." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,32], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,32], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumNumberBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,32], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-name.js b/src/parser/test/flow/enums/enum-invalid-member-name.js new file mode 100644 index 00000000000..3604a85bb2d --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-name.js @@ -0,0 +1,4 @@ +enum E { + foo, + bar, +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-name.options.json b/src/parser/test/flow/enums/enum-invalid-member-name.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-name.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-invalid-member-name.tree.json b/src/parser/test/flow/enums/enum-invalid-member-name.tree.json new file mode 100644 index 00000000000..1b284b6e832 --- /dev/null +++ b/src/parser/test/flow/enums/enum-invalid-member-name.tree.json @@ -0,0 +1,65 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "message":"Enum member names cannot start with lowercase 'a' through 'z'. Instead of using `foo`, consider using `Foo`, in enum `E`." + }, + { + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":5}}, + "message":"Enum member names cannot start with lowercase 'a' through 'z'. Instead of using `bar`, consider using `Bar`, in enum `E`." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,24], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,24], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,24], + "members":[ + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[11,14], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[11,14], + "name":"foo", + "typeAnnotation":null, + "optional":false + } + }, + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":5}}, + "range":[18,21], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":5}}, + "range":[18,21], + "name":"bar", + "typeAnnotation":null, + "optional":false + } + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-number-member-not-initialized-explicit.js b/src/parser/test/flow/enums/enum-number-member-not-initialized-explicit.js new file mode 100644 index 00000000000..7f4f9cb05bb --- /dev/null +++ b/src/parser/test/flow/enums/enum-number-member-not-initialized-explicit.js @@ -0,0 +1,3 @@ +enum E of number { + A, +} diff --git a/src/parser/test/flow/enums/enum-number-member-not-initialized-explicit.options.json b/src/parser/test/flow/enums/enum-number-member-not-initialized-explicit.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-number-member-not-initialized-explicit.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-number-member-not-initialized-explicit.tree.json b/src/parser/test/flow/enums/enum-number-member-not-initialized-explicit.tree.json new file mode 100644 index 00000000000..a5cbafb0c2b --- /dev/null +++ b/src/parser/test/flow/enums/enum-number-member-not-initialized-explicit.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "message":"Number enum members need to be initialized, e.g. `A = 1,` in enum `E`." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,25], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,25], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumNumberBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,25], + "members":[], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-number-member-not-initialized-implicit.js b/src/parser/test/flow/enums/enum-number-member-not-initialized-implicit.js new file mode 100644 index 00000000000..a435e44e3da --- /dev/null +++ b/src/parser/test/flow/enums/enum-number-member-not-initialized-implicit.js @@ -0,0 +1,4 @@ +enum E { + A, + B = 1, +} diff --git a/src/parser/test/flow/enums/enum-number-member-not-initialized-implicit.options.json b/src/parser/test/flow/enums/enum-number-member-not-initialized-implicit.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-number-member-not-initialized-implicit.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-number-member-not-initialized-implicit.tree.json b/src/parser/test/flow/enums/enum-number-member-not-initialized-implicit.tree.json new file mode 100644 index 00000000000..f09a1099d78 --- /dev/null +++ b/src/parser/test/flow/enums/enum-number-member-not-initialized-implicit.tree.json @@ -0,0 +1,55 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "message":"Number enum members need to be initialized, e.g. `A = 1,` in enum `E`." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,24], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,24], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumNumberBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,24], + "members":[ + { + "type":"EnumNumberMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":7}}, + "range":[16,21], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[16,17], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":7}}, + "range":[20,21], + "value":1, + "raw":"1" + } + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-option-off.js b/src/parser/test/flow/enums/enum-option-off.js new file mode 100644 index 00000000000..b74648fd612 --- /dev/null +++ b/src/parser/test/flow/enums/enum-option-off.js @@ -0,0 +1,4 @@ +enum E { + A, + B, +} diff --git a/src/parser/test/flow/enums/enum-option-off.options.json b/src/parser/test/flow/enums/enum-option-off.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-option-off.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-option-off.tree.json b/src/parser/test/flow/enums/enum-option-off.tree.json new file mode 100644 index 00000000000..1872bb5c80a --- /dev/null +++ b/src/parser/test/flow/enums/enum-option-off.tree.json @@ -0,0 +1,55 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,20], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,20], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,20], + "members":[ + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "name":"A", + "typeAnnotation":null, + "optional":false + } + }, + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[16,17], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[16,17], + "name":"B", + "typeAnnotation":null, + "optional":false + } + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-reserved-word-enum-name.js b/src/parser/test/flow/enums/enum-reserved-word-enum-name.js new file mode 100644 index 00000000000..95350dba2f8 --- /dev/null +++ b/src/parser/test/flow/enums/enum-reserved-word-enum-name.js @@ -0,0 +1,2 @@ +enum class { +} diff --git a/src/parser/test/flow/enums/enum-reserved-word-enum-name.options.json b/src/parser/test/flow/enums/enum-reserved-word-enum-name.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-reserved-word-enum-name.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-reserved-word-enum-name.tree.json b/src/parser/test/flow/enums/enum-reserved-word-enum-name.tree.json new file mode 100644 index 00000000000..9dc947a665e --- /dev/null +++ b/src/parser/test/flow/enums/enum-reserved-word-enum-name.tree.json @@ -0,0 +1,34 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":10}}, + "message":"Unexpected token `class`" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,14], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,14], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":10}}, + "range":[5,10], + "name":"class", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,14], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-defaulted.js b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-defaulted.js new file mode 100644 index 00000000000..279605f466c --- /dev/null +++ b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-defaulted.js @@ -0,0 +1,5 @@ +enum E of string { + A = "a", + B, + C, +} diff --git a/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-defaulted.options.json b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-defaulted.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-defaulted.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-defaulted.tree.json b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-defaulted.tree.json new file mode 100644 index 00000000000..3081294d468 --- /dev/null +++ b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-defaulted.tree.json @@ -0,0 +1,61 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":9}}, + "message":"String enum members need to consistently either all use initializers, or use no initializers, in enum E." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,41], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,41], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,41], + "members":[ + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[32,33], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[32,33], + "name":"B", + "typeAnnotation":null, + "optional":false + } + }, + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":3}}, + "range":[37,38], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":3}}, + "range":[37,38], + "name":"C", + "typeAnnotation":null, + "optional":false + } + } + ], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-initialized.js b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-initialized.js new file mode 100644 index 00000000000..e8db50effe4 --- /dev/null +++ b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-initialized.js @@ -0,0 +1,5 @@ +enum E of string { + A, + B = "b", + C = "c", +} diff --git a/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-initialized.options.json b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-initialized.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-initialized.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-initialized.tree.json b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-initialized.tree.json new file mode 100644 index 00000000000..a5d44413d3a --- /dev/null +++ b/src/parser/test/flow/enums/enum-string-member-inconsistently-initialized-majority-initialized.tree.json @@ -0,0 +1,75 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "message":"String enum members need to consistently either all use initializers, or use no initializers, in enum E." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,47], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,47], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":5,"column":1}}, + "range":[0,47], + "members":[ + { + "type":"EnumStringMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":9}}, + "range":[26,33], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[26,27], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":9}}, + "range":[30,33], + "value":"b", + "raw":"\"b\"" + } + }, + { + "type":"EnumStringMember", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":9}}, + "range":[37,44], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":3}}, + "range":[37,38], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":4,"column":6},"end":{"line":4,"column":9}}, + "range":[41,44], + "value":"c", + "raw":"\"c\"" + } + } + ], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-boolean-explicit.js b/src/parser/test/flow/enums/enum-valid-boolean-explicit.js new file mode 100644 index 00000000000..7b63f4eb296 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-boolean-explicit.js @@ -0,0 +1,4 @@ +enum E of boolean { + A = true, + B = false, +} diff --git a/src/parser/test/flow/enums/enum-valid-boolean-explicit.options.json b/src/parser/test/flow/enums/enum-valid-boolean-explicit.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-boolean-explicit.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-boolean-explicit.tree.json b/src/parser/test/flow/enums/enum-valid-boolean-explicit.tree.json new file mode 100644 index 00000000000..079ca091e9b --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-boolean-explicit.tree.json @@ -0,0 +1,57 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,46], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,46], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumBooleanBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,46], + "members":[ + { + "type":"EnumBooleanMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":10}}, + "range":[22,30], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[22,23], + "name":"A", + "typeAnnotation":null, + "optional":false + }, + "init":true + }, + { + "type":"EnumBooleanMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":11}}, + "range":[34,43], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[34,35], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "init":false + } + ], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-boolean-implicit.js b/src/parser/test/flow/enums/enum-valid-boolean-implicit.js new file mode 100644 index 00000000000..6ab49ea7b2b --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-boolean-implicit.js @@ -0,0 +1,4 @@ +enum E { + A = true, + B = false, +} diff --git a/src/parser/test/flow/enums/enum-valid-boolean-implicit.options.json b/src/parser/test/flow/enums/enum-valid-boolean-implicit.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-boolean-implicit.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-boolean-implicit.tree.json b/src/parser/test/flow/enums/enum-valid-boolean-implicit.tree.json new file mode 100644 index 00000000000..51f2d51af93 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-boolean-implicit.tree.json @@ -0,0 +1,57 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,35], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,35], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumBooleanBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,35], + "members":[ + { + "type":"EnumBooleanMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":10}}, + "range":[11,19], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "name":"A", + "typeAnnotation":null, + "optional":false + }, + "init":true + }, + { + "type":"EnumBooleanMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":11}}, + "range":[23,32], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[23,24], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "init":false + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-empty.js b/src/parser/test/flow/enums/enum-valid-empty.js new file mode 100644 index 00000000000..714f607bf33 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-empty.js @@ -0,0 +1 @@ +enum E { } diff --git a/src/parser/test/flow/enums/enum-valid-empty.options.json b/src/parser/test/flow/enums/enum-valid-empty.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-empty.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-empty.tree.json b/src/parser/test/flow/enums/enum-valid-empty.tree.json new file mode 100644 index 00000000000..57593f89e02 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-empty.tree.json @@ -0,0 +1,28 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "range":[0,10], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "range":[0,10], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "range":[0,10], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-name.js b/src/parser/test/flow/enums/enum-valid-name.js new file mode 100644 index 00000000000..8d057902fa7 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-name.js @@ -0,0 +1,2 @@ +enum type { +} diff --git a/src/parser/test/flow/enums/enum-valid-name.options.json b/src/parser/test/flow/enums/enum-valid-name.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-name.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-name.tree.json b/src/parser/test/flow/enums/enum-valid-name.tree.json new file mode 100644 index 00000000000..8b26e8113cb --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-name.tree.json @@ -0,0 +1,28 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,13], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,13], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":9}}, + "range":[5,9], + "name":"type", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":1}}, + "range":[0,13], + "members":[], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-no-trailing-comma.js b/src/parser/test/flow/enums/enum-valid-no-trailing-comma.js new file mode 100644 index 00000000000..228a928299d --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-no-trailing-comma.js @@ -0,0 +1,3 @@ +enum E { + A +} diff --git a/src/parser/test/flow/enums/enum-valid-no-trailing-comma.options.json b/src/parser/test/flow/enums/enum-valid-no-trailing-comma.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-no-trailing-comma.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-no-trailing-comma.tree.json b/src/parser/test/flow/enums/enum-valid-no-trailing-comma.tree.json new file mode 100644 index 00000000000..1abf2e5d4bc --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-no-trailing-comma.tree.json @@ -0,0 +1,42 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,14], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,14], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,14], + "members":[ + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "name":"A", + "typeAnnotation":null, + "optional":false + } + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-number-explicit.js b/src/parser/test/flow/enums/enum-valid-number-explicit.js new file mode 100644 index 00000000000..f597c5898aa --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-number-explicit.js @@ -0,0 +1,4 @@ +enum E of number { + A = 1, + B = 2, +} diff --git a/src/parser/test/flow/enums/enum-valid-number-explicit.options.json b/src/parser/test/flow/enums/enum-valid-number-explicit.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-number-explicit.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-number-explicit.tree.json b/src/parser/test/flow/enums/enum-valid-number-explicit.tree.json new file mode 100644 index 00000000000..4d38f52d0bc --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-number-explicit.tree.json @@ -0,0 +1,69 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,38], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,38], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumNumberBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,38], + "members":[ + { + "type":"EnumNumberMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":7}}, + "range":[21,26], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[21,22], + "name":"A", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "range":[25,26], + "value":1, + "raw":"1" + } + }, + { + "type":"EnumNumberMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":7}}, + "range":[30,35], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[30,31], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":7}}, + "range":[34,35], + "value":2, + "raw":"2" + } + } + ], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-number-implicit.js b/src/parser/test/flow/enums/enum-valid-number-implicit.js new file mode 100644 index 00000000000..29b4595e148 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-number-implicit.js @@ -0,0 +1,4 @@ +enum E { + A = 0, + B = 1, +} diff --git a/src/parser/test/flow/enums/enum-valid-number-implicit.options.json b/src/parser/test/flow/enums/enum-valid-number-implicit.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-number-implicit.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-number-implicit.tree.json b/src/parser/test/flow/enums/enum-valid-number-implicit.tree.json new file mode 100644 index 00000000000..561924ad081 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-number-implicit.tree.json @@ -0,0 +1,69 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,28], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,28], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumNumberBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,28], + "members":[ + { + "type":"EnumNumberMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":7}}, + "range":[11,16], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "name":"A", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":7}}, + "range":[15,16], + "value":0, + "raw":"0" + } + }, + { + "type":"EnumNumberMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":7}}, + "range":[20,25], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[20,21], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":7}}, + "range":[24,25], + "value":1, + "raw":"1" + } + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-string-explicit-defaulted.js b/src/parser/test/flow/enums/enum-valid-string-explicit-defaulted.js new file mode 100644 index 00000000000..0d3f7fed837 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-explicit-defaulted.js @@ -0,0 +1,4 @@ +enum E of string { + A, + B, +} diff --git a/src/parser/test/flow/enums/enum-valid-string-explicit-defaulted.options.json b/src/parser/test/flow/enums/enum-valid-string-explicit-defaulted.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-explicit-defaulted.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-string-explicit-defaulted.tree.json b/src/parser/test/flow/enums/enum-valid-string-explicit-defaulted.tree.json new file mode 100644 index 00000000000..fc2899cc786 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-explicit-defaulted.tree.json @@ -0,0 +1,55 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,30], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,30], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,30], + "members":[ + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[21,22], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[21,22], + "name":"A", + "typeAnnotation":null, + "optional":false + } + }, + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[26,27], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[26,27], + "name":"B", + "typeAnnotation":null, + "optional":false + } + } + ], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-string-explicit-initialized.js b/src/parser/test/flow/enums/enum-valid-string-explicit-initialized.js new file mode 100644 index 00000000000..4d27120ac56 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-explicit-initialized.js @@ -0,0 +1,4 @@ +enum E of string { + A = "a", + B = "b", +} diff --git a/src/parser/test/flow/enums/enum-valid-string-explicit-initialized.options.json b/src/parser/test/flow/enums/enum-valid-string-explicit-initialized.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-explicit-initialized.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-string-explicit-initialized.tree.json b/src/parser/test/flow/enums/enum-valid-string-explicit-initialized.tree.json new file mode 100644 index 00000000000..ef9d2a5bf19 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-explicit-initialized.tree.json @@ -0,0 +1,69 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,42], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,42], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,42], + "members":[ + { + "type":"EnumStringMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":9}}, + "range":[21,28], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[21,22], + "name":"A", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":9}}, + "range":[25,28], + "value":"a", + "raw":"\"a\"" + } + }, + { + "type":"EnumStringMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":9}}, + "range":[32,39], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[32,33], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":9}}, + "range":[36,39], + "value":"b", + "raw":"\"b\"" + } + } + ], + "explicitType":true + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-string-implicit-defaulted.js b/src/parser/test/flow/enums/enum-valid-string-implicit-defaulted.js new file mode 100644 index 00000000000..b74648fd612 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-implicit-defaulted.js @@ -0,0 +1,4 @@ +enum E { + A, + B, +} diff --git a/src/parser/test/flow/enums/enum-valid-string-implicit-defaulted.options.json b/src/parser/test/flow/enums/enum-valid-string-implicit-defaulted.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-implicit-defaulted.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-string-implicit-defaulted.tree.json b/src/parser/test/flow/enums/enum-valid-string-implicit-defaulted.tree.json new file mode 100644 index 00000000000..1872bb5c80a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-implicit-defaulted.tree.json @@ -0,0 +1,55 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,20], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,20], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,20], + "members":[ + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "name":"A", + "typeAnnotation":null, + "optional":false + } + }, + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[16,17], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[16,17], + "name":"B", + "typeAnnotation":null, + "optional":false + } + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-string-implicit-initialized.js b/src/parser/test/flow/enums/enum-valid-string-implicit-initialized.js new file mode 100644 index 00000000000..d1b1c29fc8a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-implicit-initialized.js @@ -0,0 +1,4 @@ +enum E { + A = "a", + B = "b", +} diff --git a/src/parser/test/flow/enums/enum-valid-string-implicit-initialized.options.json b/src/parser/test/flow/enums/enum-valid-string-implicit-initialized.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-implicit-initialized.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-string-implicit-initialized.tree.json b/src/parser/test/flow/enums/enum-valid-string-implicit-initialized.tree.json new file mode 100644 index 00000000000..5aba7eb05d9 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-string-implicit-initialized.tree.json @@ -0,0 +1,69 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,32], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,32], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumStringBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,32], + "members":[ + { + "type":"EnumStringMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":9}}, + "range":[11,18], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[11,12], + "name":"A", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":9}}, + "range":[15,18], + "value":"a", + "raw":"\"a\"" + } + }, + { + "type":"EnumStringMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":9}}, + "range":[22,29], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[22,23], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":9}}, + "range":[26,29], + "value":"b", + "raw":"\"b\"" + } + } + ], + "explicitType":false + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/enums/enum-valid-symbol.js b/src/parser/test/flow/enums/enum-valid-symbol.js new file mode 100644 index 00000000000..26e5377f5e7 --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-symbol.js @@ -0,0 +1,4 @@ +enum E of symbol { + A, + B, +} diff --git a/src/parser/test/flow/enums/enum-valid-symbol.options.json b/src/parser/test/flow/enums/enum-valid-symbol.options.json new file mode 100644 index 00000000000..85dd67a4f9a --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-symbol.options.json @@ -0,0 +1,3 @@ +{ + "enums": true +} diff --git a/src/parser/test/flow/enums/enum-valid-symbol.tree.json b/src/parser/test/flow/enums/enum-valid-symbol.tree.json new file mode 100644 index 00000000000..31564ac75df --- /dev/null +++ b/src/parser/test/flow/enums/enum-valid-symbol.tree.json @@ -0,0 +1,54 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,30], + "body":[ + { + "type":"EnumDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,30], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"EnumSymbolBody", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,30], + "members":[ + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[21,22], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[21,22], + "name":"A", + "typeAnnotation":null, + "optional":false + } + }, + { + "type":"EnumDefaultedMember", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[26,27], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[26,27], + "name":"B", + "typeAnnotation":null, + "optional":false + } + } + ] + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/export_import_reserved_words/migrated_0005.tree.json b/src/parser/test/flow/export_import_reserved_words/migrated_0005.tree.json index 2be8faf7af4..61e4129ba25 100644 --- a/src/parser/test/flow/export_import_reserved_words/migrated_0005.tree.json +++ b/src/parser/test/flow/export_import_reserved_words/migrated_0005.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":16}}, - "message":"Unexpected token default" + "message":"Unexpected token `default`" } ], "type":"Program", diff --git a/src/parser/test/flow/export_import_reserved_words/migrated_0006.tree.json b/src/parser/test/flow/export_import_reserved_words/migrated_0006.tree.json index 098c59b61bd..d09436548ea 100644 --- a/src/parser/test/flow/export_import_reserved_words/migrated_0006.tree.json +++ b/src/parser/test/flow/export_import_reserved_words/migrated_0006.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":16}}, - "message":"Unexpected token default" + "message":"Unexpected token `default`" } ], "type":"Program", diff --git a/src/parser/test/flow/export_import_reserved_words/migrated_0007.tree.json b/src/parser/test/flow/export_import_reserved_words/migrated_0007.tree.json index 6e3008e59ee..3ae3ef6a6c9 100644 --- a/src/parser/test/flow/export_import_reserved_words/migrated_0007.tree.json +++ b/src/parser/test/flow/export_import_reserved_words/migrated_0007.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":23}}, - "message":"Unexpected token default" + "message":"Unexpected token `default`" } ], "type":"Program", diff --git a/src/parser/test/flow/export_statements/export_default_async_id_and_function.tree.json b/src/parser/test/flow/export_statements/export_default_async_id_and_function.tree.json index 64e0a84ee64..f6143b97304 100644 --- a/src/parser/test/flow/export_statements/export_default_async_id_and_function.tree.json +++ b/src/parser/test/flow/export_statements/export_default_async_id_and_function.tree.json @@ -2,19 +2,31 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, - "message":"Unexpected token (" + "message":"Unexpected token `(`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":10}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the token `(`" }, { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `,`" }, { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, "message":"Rest parameter must be final parameter of an argument list" + }, + { + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, + "message":"Unexpected end of input, expected the token `)`" + }, + { + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, + "message":"Unexpected end of input, expected the token `{`" + }, + { + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/expression/binary-logical/and_on_lhs_of_or.js b/src/parser/test/flow/expression/binary-logical/and_on_lhs_of_or.js new file mode 100644 index 00000000000..6f2cf96a0e1 --- /dev/null +++ b/src/parser/test/flow/expression/binary-logical/and_on_lhs_of_or.js @@ -0,0 +1 @@ +x && y || z diff --git a/src/parser/test/flow/expression/binary-logical/and_on_lhs_of_or.tree.json b/src/parser/test/flow/expression/binary-logical/and_on_lhs_of_or.tree.json new file mode 100644 index 00000000000..8db6e07ebbb --- /dev/null +++ b/src/parser/test/flow/expression/binary-logical/and_on_lhs_of_or.tree.json @@ -0,0 +1,50 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "expression":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "operator":"||", + "left":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":6}}, + "range":[0,6], + "operator":"&&", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"y", + "typeAnnotation":null, + "optional":false + } + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "range":[10,11], + "name":"z", + "typeAnnotation":null, + "optional":false + } + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/for_await_loops/migrated_0000.tree.json b/src/parser/test/flow/for_await_loops/migrated_0000.tree.json index 25d7b27baf7..86d39ec76c9 100644 --- a/src/parser/test/flow/for_await_loops/migrated_0000.tree.json +++ b/src/parser/test/flow/for_await_loops/migrated_0000.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":9}}, - "message":"Unexpected token await" + "message":"Unexpected token `await`, expected the token `(`" }, { "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":14}}, - "message":"Unexpected token let" + "message":"Unexpected token `let`" }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `)`" } ], "type":"Program", diff --git a/src/parser/test/flow/for_await_loops/migrated_0002.tree.json b/src/parser/test/flow/for_await_loops/migrated_0002.tree.json index b4f44ff22bc..5b520c04dd9 100644 --- a/src/parser/test/flow/for_await_loops/migrated_0002.tree.json +++ b/src/parser/test/flow/for_await_loops/migrated_0002.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":31},"end":{"line":1,"column":33}}, - "message":"Unexpected token in" + "message":"Unexpected token `in`, expected the token `of`" } ], "type":"Program", diff --git a/src/parser/test/flow/for_await_loops/migrated_0003.tree.json b/src/parser/test/flow/for_await_loops/migrated_0003.tree.json index c9590c46315..40ba061b563 100644 --- a/src/parser/test/flow/for_await_loops/migrated_0003.tree.json +++ b/src/parser/test/flow/for_await_loops/migrated_0003.tree.json @@ -6,15 +6,19 @@ }, { "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":35}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected the token `of`" }, { "loc":{"source":null,"start":{"line":1,"column":41},"end":{"line":1,"column":42}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":46},"end":{"line":1,"column":47}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":46},"end":{"line":1,"column":47}}, + "message":"Unexpected token `)`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/internal_slot/declare_class_static.tree.json b/src/parser/test/flow/internal_slot/declare_class_static.tree.json index f8052cbe21e..b767811873c 100644 --- a/src/parser/test/flow/internal_slot/declare_class_static.tree.json +++ b/src/parser/test/flow/internal_slot/declare_class_static.tree.json @@ -44,13 +44,15 @@ "type":"GenericTypeAnnotation", "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":35}}, "range":[34,35], - "typeParameters":null, "id":{ "type":"Identifier", "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":35}}, "range":[34,35], - "name":"T" - } + "name":"T", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null } } ] diff --git a/src/parser/test/flow/internal_slot/object.tree.json b/src/parser/test/flow/internal_slot/object.tree.json index 88de631ec1f..8e46960b13a 100644 --- a/src/parser/test/flow/internal_slot/object.tree.json +++ b/src/parser/test/flow/internal_slot/object.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":23}}, "range":[9,23], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[], @@ -44,12 +45,15 @@ "type":"GenericTypeAnnotation", "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, "range":[20,21], - "typeParameters":null, "id":{ "type":"Identifier", "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "name":"X" - } + "range":[20,21], + "name":"X", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null } } ] diff --git a/src/parser/test/flow/internal_slot/object_method.tree.json b/src/parser/test/flow/internal_slot/object_method.tree.json index 78215bb7e7b..c174055ea1d 100644 --- a/src/parser/test/flow/internal_slot/object_method.tree.json +++ b/src/parser/test/flow/internal_slot/object_method.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":25}}, "range":[9,25], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[], diff --git a/src/parser/test/flow/internal_slot/object_optional.tree.json b/src/parser/test/flow/internal_slot/object_optional.tree.json index b15ea6a9875..9535d816f1a 100644 --- a/src/parser/test/flow/internal_slot/object_optional.tree.json +++ b/src/parser/test/flow/internal_slot/object_optional.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":24}}, "range":[9,24], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[], diff --git a/src/parser/test/flow/internal_slot/object_variance.tree.json b/src/parser/test/flow/internal_slot/object_variance.tree.json index b507f8e7025..9d51f39d253 100644 --- a/src/parser/test/flow/internal_slot/object_variance.tree.json +++ b/src/parser/test/flow/internal_slot/object_variance.tree.json @@ -27,6 +27,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":24}}, "range":[9,24], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[], diff --git a/src/parser/test/flow/invalid_syntax/migrated_0005.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0005.tree.json index aba854247b3..ec603ff4f39 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0005.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0005.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":9}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":9}}, + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0006.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0006.tree.json index 048905f6ce5..e9e515c9c21 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0006.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0006.tree.json @@ -2,19 +2,23 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":16}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":20}}, - "message":"Unexpected number" + "message":"Unexpected number, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, + "message":"Unexpected token `)`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0007.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0007.tree.json index 7a43958fa7e..169e0f1799f 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0007.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0007.tree.json @@ -2,19 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":15}}, - "message":"Unexpected token -" + "message":"Unexpected token `-`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":17}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0008.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0008.tree.json index aea55e812e0..ed1e82dd936 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0008.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0008.tree.json @@ -2,19 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected token (" + "message":"Unexpected token `(`, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":22}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":23}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0009.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0009.tree.json index 9e280d823bd..c7856a27996 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0009.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0009.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":16}}, - "message":"Unexpected number" + "message":"Unexpected number, expected an identifier" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0010.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0010.tree.json index c842dbaf72e..6c6644b6644 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0010.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0010.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":16},"end":{"line":2,"column":22}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `;`" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0011.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0011.tree.json index 28457cdd254..d7cddc1dadd 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0011.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0011.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, - "message":"Unexpected token <" + "message":"Unexpected token `<`, expected the token `(`" }, { "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":13}}, - "message":"Unexpected token >" + "message":"Unexpected token `>`, expected the token `,`" }, { "loc":{"source":null,"start":{"line":2,"column":13},"end":{"line":2,"column":14}}, - "message":"Unexpected token (" + "message":"Unexpected token `(`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":10}}, diff --git a/src/parser/test/flow/invalid_syntax/migrated_0012.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0012.tree.json index 846f8d8da9c..5edb97f5dbb 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0012.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0012.tree.json @@ -2,19 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, - "message":"Unexpected token <" + "message":"Unexpected token `<`, expected the token `(`" }, { "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":13}}, - "message":"Unexpected token >" + "message":"Unexpected token `>`, expected the token `,`" }, { "loc":{"source":null,"start":{"line":2,"column":13},"end":{"line":2,"column":14}}, - "message":"Unexpected token (" + "message":"Unexpected token `(`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":14},"end":{"line":2,"column":20}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `,`" }, { "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":10}}, diff --git a/src/parser/test/flow/invalid_syntax/migrated_0013.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0013.tree.json index 9a4e2f5091d..fdc27deafbc 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0013.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0013.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":8}}, - "message":"Unexpected token enum" + "message":"Unexpected token `enum`" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0014.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0014.tree.json index 25a443e78f4..2d8fe4f144f 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0014.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0014.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, - "message":"Unexpected token enum" + "message":"Unexpected token `enum`" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0015.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0015.tree.json index 584294871aa..e3b4fb38b3f 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0015.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0015.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":8}}, - "message":"Unexpected token enum" + "message":"Unexpected token `enum`" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0016.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0016.tree.json index 632cbbc19c0..562b4a31029 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0016.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0016.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":37},"end":{"line":1,"column":41}}, - "message":"Unexpected token enum" + "message":"Unexpected token `enum`" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0017.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0017.tree.json index ff2b1a3b4ce..563f1c059b3 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0017.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0017.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":20}}, - "message":"Unexpected string" + "message":"Unexpected string, expected the identifier `from`" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/migrated_0018.tree.json b/src/parser/test/flow/invalid_syntax/migrated_0018.tree.json index 58c3ebe94b6..7f5fc4ac90b 100644 --- a/src/parser/test/flow/invalid_syntax/migrated_0018.tree.json +++ b/src/parser/test/flow/invalid_syntax/migrated_0018.tree.json @@ -2,19 +2,23 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":16}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the identifier `from`" }, { "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":21}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected a string" + }, + { + "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":21}}, + "message":"Unexpected identifier, expected the token `;`" }, { "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":26}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":32}}, - "message":"Unexpected string" + "message":"Unexpected string, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/invalid_syntax/string_newline.js b/src/parser/test/flow/invalid_syntax/string_newline.js new file mode 100644 index 00000000000..7325139a82c --- /dev/null +++ b/src/parser/test/flow/invalid_syntax/string_newline.js @@ -0,0 +1,2 @@ +"foo +bar" diff --git a/src/parser/test/flow/invalid_syntax/string_newline.tree.json b/src/parser/test/flow/invalid_syntax/string_newline.tree.json new file mode 100644 index 00000000000..3f3ad3d9bdb --- /dev/null +++ b/src/parser/test/flow/invalid_syntax/string_newline.tree.json @@ -0,0 +1,62 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":5}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":2,"column":4},"end":{"line":2,"column":5}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":2,"column":3},"end":{"line":3,"column":0}}, + "message":"Unexpected string, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":0}}, + "range":[0,10], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, + "range":[0,5], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, + "range":[0,5], + "value":"foo\n", + "raw":"\"foo\n" + }, + "directive":"foo" + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":3}}, + "range":[5,8], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":3}}, + "range":[5,8], + "name":"bar", + "typeAnnotation":null, + "optional":false + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":3},"end":{"line":3,"column":0}}, + "range":[8,10], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":3},"end":{"line":3,"column":0}}, + "range":[8,10], + "value":"\n", + "raw":"\"\n" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and.js b/src/parser/test/flow/nullish_coalescing/precedence_and.js index a8d594d41ec..914ff8445d9 100644 --- a/src/parser/test/flow/nullish_coalescing/precedence_and.js +++ b/src/parser/test/flow/nullish_coalescing/precedence_and.js @@ -1 +1 @@ -a ?? b && c +a ?? (b && c) diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_and.tree.json index 38af84cc313..7b6590ca807 100644 --- a/src/parser/test/flow/nullish_coalescing/precedence_and.tree.json +++ b/src/parser/test/flow/nullish_coalescing/precedence_and.tree.json @@ -1,16 +1,16 @@ { "type":"Program", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, - "range":[0,11], + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], "body":[ { "type":"ExpressionStatement", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, - "range":[0,11], + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], "expression":{ "type":"LogicalExpression", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, - "range":[0,11], + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], "operator":"??", "left":{ "type":"Identifier", @@ -22,21 +22,21 @@ }, "right":{ "type":"LogicalExpression", - "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, - "range":[5,11], + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":12}}, + "range":[6,12], "operator":"&&", "left":{ "type":"Identifier", - "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, - "range":[5,6], + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], "name":"b", "typeAnnotation":null, "optional":false }, "right":{ "type":"Identifier", - "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, - "range":[10,11], + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":12}}, + "range":[11,12], "name":"c", "typeAnnotation":null, "optional":false diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_lhs_no_parens.js b/src/parser/test/flow/nullish_coalescing/precedence_and_lhs_no_parens.js new file mode 100644 index 00000000000..a8d594d41ec --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_lhs_no_parens.js @@ -0,0 +1 @@ +a ?? b && c diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_lhs_no_parens.options.json b/src/parser/test/flow/nullish_coalescing/precedence_and_lhs_no_parens.options.json new file mode 100644 index 00000000000..d1468c9c54d --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_lhs_no_parens.options.json @@ -0,0 +1,3 @@ +{ + "esproposal_nullish_coalescing": true +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_lhs_no_parens.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_and_lhs_no_parens.tree.json new file mode 100644 index 00000000000..be9c17d853a --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_lhs_no_parens.tree.json @@ -0,0 +1,56 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":9}}, + "message":"Unexpected token `&&`. Parentheses are required to combine `??` with `&&` or `||` expressions." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "expression":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "operator":"??", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, + "range":[5,11], + "operator":"&&", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"b", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "range":[10,11], + "name":"c", + "typeAnnotation":null, + "optional":false + } + } + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_nested_lhs.js b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_lhs.js new file mode 100644 index 00000000000..a019353b5a0 --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_lhs.js @@ -0,0 +1 @@ +(a ?? b) && c diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_nested_lhs.options.json b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_lhs.options.json new file mode 100644 index 00000000000..d1468c9c54d --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_lhs.options.json @@ -0,0 +1,3 @@ +{ + "esproposal_nullish_coalescing": true +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_nested_lhs.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_lhs.tree.json new file mode 100644 index 00000000000..56603acb84f --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_lhs.tree.json @@ -0,0 +1,50 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "expression":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "operator":"&&", + "left":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":7}}, + "range":[1,7], + "operator":"??", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":2}}, + "range":[1,2], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"b", + "typeAnnotation":null, + "optional":false + } + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":13}}, + "range":[12,13], + "name":"c", + "typeAnnotation":null, + "optional":false + } + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_nested_rhs.js b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_rhs.js new file mode 100644 index 00000000000..d322f5e0d86 --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_rhs.js @@ -0,0 +1 @@ +a && (b ?? c) diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_nested_rhs.options.json b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_rhs.options.json new file mode 100644 index 00000000000..d1468c9c54d --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_rhs.options.json @@ -0,0 +1,3 @@ +{ + "esproposal_nullish_coalescing": true +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_nested_rhs.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_rhs.tree.json new file mode 100644 index 00000000000..e3e826a152a --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_nested_rhs.tree.json @@ -0,0 +1,50 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "expression":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "operator":"&&", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":12}}, + "range":[6,12], + "operator":"??", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"b", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":12}}, + "range":[11,12], + "name":"c", + "typeAnnotation":null, + "optional":false + } + } + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_rhs_no_parens.js b/src/parser/test/flow/nullish_coalescing/precedence_and_rhs_no_parens.js new file mode 100644 index 00000000000..7f9e51ef7a2 --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_rhs_no_parens.js @@ -0,0 +1 @@ +a && b ?? c diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_rhs_no_parens.options.json b/src/parser/test/flow/nullish_coalescing/precedence_and_rhs_no_parens.options.json new file mode 100644 index 00000000000..d1468c9c54d --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_rhs_no_parens.options.json @@ -0,0 +1,3 @@ +{ + "esproposal_nullish_coalescing": true +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_and_rhs_no_parens.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_and_rhs_no_parens.tree.json new file mode 100644 index 00000000000..8369e3964f2 --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_and_rhs_no_parens.tree.json @@ -0,0 +1,56 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":9}}, + "message":"Unexpected token `??`. Parentheses are required to combine `??` with `&&` or `||` expressions." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "expression":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "operator":"??", + "left":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":6}}, + "range":[0,6], + "operator":"&&", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"b", + "typeAnnotation":null, + "optional":false + } + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "range":[10,11], + "name":"c", + "typeAnnotation":null, + "optional":false + } + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_multiple_on_or_rhs_no_parens.js b/src/parser/test/flow/nullish_coalescing/precedence_multiple_on_or_rhs_no_parens.js new file mode 100644 index 00000000000..79fa14dfc9b --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_multiple_on_or_rhs_no_parens.js @@ -0,0 +1 @@ +a || b ?? c ?? d diff --git a/src/parser/test/flow/nullish_coalescing/precedence_multiple_on_or_rhs_no_parens.options.json b/src/parser/test/flow/nullish_coalescing/precedence_multiple_on_or_rhs_no_parens.options.json new file mode 100644 index 00000000000..d1468c9c54d --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_multiple_on_or_rhs_no_parens.options.json @@ -0,0 +1,3 @@ +{ + "esproposal_nullish_coalescing": true +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_multiple_on_or_rhs_no_parens.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_multiple_on_or_rhs_no_parens.tree.json new file mode 100644 index 00000000000..171461b801d --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_multiple_on_or_rhs_no_parens.tree.json @@ -0,0 +1,70 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":9}}, + "message":"Unexpected token `??`. Parentheses are required to combine `??` with `&&` or `||` expressions." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":16}}, + "range":[0,16], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":16}}, + "range":[0,16], + "expression":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":16}}, + "range":[0,16], + "operator":"??", + "left":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "operator":"??", + "left":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":6}}, + "range":[0,6], + "operator":"||", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"b", + "typeAnnotation":null, + "optional":false + } + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "range":[10,11], + "name":"c", + "typeAnnotation":null, + "optional":false + } + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, + "range":[15,16], + "name":"d", + "typeAnnotation":null, + "optional":false + } + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or.js b/src/parser/test/flow/nullish_coalescing/precedence_or.js index dc9a04b0e29..943182e0d5c 100644 --- a/src/parser/test/flow/nullish_coalescing/precedence_or.js +++ b/src/parser/test/flow/nullish_coalescing/precedence_or.js @@ -1 +1 @@ -a ?? b || c +a ?? (b || c) diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_or.tree.json index efdc4362b56..a7934882786 100644 --- a/src/parser/test/flow/nullish_coalescing/precedence_or.tree.json +++ b/src/parser/test/flow/nullish_coalescing/precedence_or.tree.json @@ -1,46 +1,46 @@ { "type":"Program", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, - "range":[0,11], + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], "body":[ { "type":"ExpressionStatement", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, - "range":[0,11], + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], "expression":{ "type":"LogicalExpression", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, - "range":[0,11], - "operator":"||", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "operator":"??", "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "right":{ "type":"LogicalExpression", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":6}}, - "range":[0,6], - "operator":"??", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":12}}, + "range":[6,12], + "operator":"||", "left":{ "type":"Identifier", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, - "range":[0,1], - "name":"a", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"b", "typeAnnotation":null, "optional":false }, "right":{ "type":"Identifier", - "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, - "range":[5,6], - "name":"b", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":12}}, + "range":[11,12], + "name":"c", "typeAnnotation":null, "optional":false } - }, - "right":{ - "type":"Identifier", - "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, - "range":[10,11], - "name":"c", - "typeAnnotation":null, - "optional":false } }, "directive":null diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or_lhs_no_parens.js b/src/parser/test/flow/nullish_coalescing/precedence_or_lhs_no_parens.js new file mode 100644 index 00000000000..dc9a04b0e29 --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_or_lhs_no_parens.js @@ -0,0 +1 @@ +a ?? b || c diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or_lhs_no_parens.options.json b/src/parser/test/flow/nullish_coalescing/precedence_or_lhs_no_parens.options.json new file mode 100644 index 00000000000..d1468c9c54d --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_or_lhs_no_parens.options.json @@ -0,0 +1,3 @@ +{ + "esproposal_nullish_coalescing": true +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or_lhs_no_parens.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_or_lhs_no_parens.tree.json new file mode 100644 index 00000000000..38615ec2926 --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_or_lhs_no_parens.tree.json @@ -0,0 +1,56 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":9}}, + "message":"Unexpected token `||`. Parentheses are required to combine `??` with `&&` or `||` expressions." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "expression":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "operator":"??", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, + "range":[5,11], + "operator":"||", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"b", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "range":[10,11], + "name":"c", + "typeAnnotation":null, + "optional":false + } + } + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or_no_parens.js b/src/parser/test/flow/nullish_coalescing/precedence_or_no_parens.js new file mode 100644 index 00000000000..a751dedebb9 --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_or_no_parens.js @@ -0,0 +1,14 @@ +a || b ?? c || d + +/** + * `||` cannot be combined with `??` without parens. We assume `??` is lower + * precedence and recover by pretending the parens were there: + * + * a || b ... + * ^^^^^^ ok, a LogicalOrExpression + * a || b ?? ... + * ^^ error. to recover, give ?? lower precedence than ||: + * (a || b) ?? c || ... + * ^^ error. to recover, give || higher precedence than ??: + * (a || b) ?? (c || d) + */ diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or_no_parens.options.json b/src/parser/test/flow/nullish_coalescing/precedence_or_no_parens.options.json new file mode 100644 index 00000000000..d1468c9c54d --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_or_no_parens.options.json @@ -0,0 +1,3 @@ +{ + "esproposal_nullish_coalescing": true +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or_no_parens.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_or_no_parens.tree.json new file mode 100644 index 00000000000..38ea02e7b18 --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_or_no_parens.tree.json @@ -0,0 +1,81 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":9}}, + "message":"Unexpected token `??`. Parentheses are required to combine `??` with `&&` or `||` expressions." + }, + { + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":14}}, + "message":"Unexpected token `||`. Parentheses are required to combine `??` with `&&` or `||` expressions." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":16}}, + "range":[0,16], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":16}}, + "range":[0,16], + "expression":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":16}}, + "range":[0,16], + "operator":"??", + "left":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":6}}, + "range":[0,6], + "operator":"||", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"b", + "typeAnnotation":null, + "optional":false + } + }, + "right":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":16}}, + "range":[10,16], + "operator":"||", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "range":[10,11], + "name":"c", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, + "range":[15,16], + "name":"d", + "typeAnnotation":null, + "optional":false + } + } + }, + "directive":null + } + ], + "comments":[ + { + "type":"Block", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":14,"column":3}}, + "range":[18,437], + "value":"*\n * `||` cannot be combined with `??` without parens. We assume `??` is lower\n * precedence and recover by pretending the parens were there:\n *\n * a || b ...\n * ^^^^^^ ok, a LogicalOrExpression\n * a || b ?? ...\n * ^^ error. to recover, give ?? lower precedence than ||:\n * (a || b) ?? c || ...\n * ^^ error. to recover, give || higher precedence than ??:\n * (a || b) ?? (c || d)\n " + } + ] +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or_rhs_no_parens.js b/src/parser/test/flow/nullish_coalescing/precedence_or_rhs_no_parens.js new file mode 100644 index 00000000000..2d12bd6d1f1 --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_or_rhs_no_parens.js @@ -0,0 +1 @@ +a || b ?? c diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or_rhs_no_parens.options.json b/src/parser/test/flow/nullish_coalescing/precedence_or_rhs_no_parens.options.json new file mode 100644 index 00000000000..d1468c9c54d --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_or_rhs_no_parens.options.json @@ -0,0 +1,3 @@ +{ + "esproposal_nullish_coalescing": true +} diff --git a/src/parser/test/flow/nullish_coalescing/precedence_or_rhs_no_parens.tree.json b/src/parser/test/flow/nullish_coalescing/precedence_or_rhs_no_parens.tree.json new file mode 100644 index 00000000000..0e0932c63de --- /dev/null +++ b/src/parser/test/flow/nullish_coalescing/precedence_or_rhs_no_parens.tree.json @@ -0,0 +1,56 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":9}}, + "message":"Unexpected token `??`. Parentheses are required to combine `??` with `&&` or `||` expressions." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "expression":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "operator":"??", + "left":{ + "type":"LogicalExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":6}}, + "range":[0,6], + "operator":"||", + "left":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "name":"a", + "typeAnnotation":null, + "optional":false + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"b", + "typeAnnotation":null, + "optional":false + } + }, + "right":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "range":[10,11], + "name":"c", + "typeAnnotation":null, + "optional":false + } + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/numbers/binnumber_word_invalid.js b/src/parser/test/flow/numbers/binnumber_word_invalid.js new file mode 100644 index 00000000000..46c4fe3a463 --- /dev/null +++ b/src/parser/test/flow/numbers/binnumber_word_invalid.js @@ -0,0 +1 @@ +0b123hello; \ No newline at end of file diff --git a/src/parser/test/flow/numbers/binnumber_word_invalid.tree.json b/src/parser/test/flow/numbers/binnumber_word_invalid.tree.json new file mode 100644 index 00000000000..c564b6f6e9b --- /dev/null +++ b/src/parser/test/flow/numbers/binnumber_word_invalid.tree.json @@ -0,0 +1,66 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":10}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":5}}, + "message":"Unexpected number, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":10}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "range":[0,3], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "range":[0,3], + "value":1, + "raw":"0b1" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":5}}, + "range":[3,5], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":5}}, + "range":[3,5], + "value":23, + "raw":"23" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, + "range":[5,11], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":10}}, + "range":[5,10], + "name":"hello", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/numbers/non_octal_decimal_integer_literal.js b/src/parser/test/flow/numbers/non_octal_decimal_integer_literal.js new file mode 100644 index 00000000000..3527e1f8f68 --- /dev/null +++ b/src/parser/test/flow/numbers/non_octal_decimal_integer_literal.js @@ -0,0 +1,3 @@ +08; +018; +088; diff --git a/src/parser/test/flow/numbers/non_octal_decimal_integer_literal.tree.json b/src/parser/test/flow/numbers/non_octal_decimal_integer_literal.tree.json new file mode 100644 index 00000000000..3e5c3cd34db --- /dev/null +++ b/src/parser/test/flow/numbers/non_octal_decimal_integer_literal.tree.json @@ -0,0 +1,47 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":4}}, + "range":[0,13], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "range":[0,3], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":2}}, + "range":[0,2], + "value":8, + "raw":"08" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":4}}, + "range":[4,8], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":3}}, + "range":[4,7], + "value":18, + "raw":"018" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":4}}, + "range":[9,13], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":3}}, + "range":[9,12], + "value":88, + "raw":"088" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/numbers/non_octal_decimal_integer_literal_strict.js b/src/parser/test/flow/numbers/non_octal_decimal_integer_literal_strict.js new file mode 100644 index 00000000000..6b93d339fcb --- /dev/null +++ b/src/parser/test/flow/numbers/non_octal_decimal_integer_literal_strict.js @@ -0,0 +1,4 @@ +"use strict"; +08; +018; +088; diff --git a/src/parser/test/flow/numbers/non_octal_decimal_integer_literal_strict.tree.json b/src/parser/test/flow/numbers/non_octal_decimal_integer_literal_strict.tree.json new file mode 100644 index 00000000000..24e3a063406 --- /dev/null +++ b/src/parser/test/flow/numbers/non_octal_decimal_integer_literal_strict.tree.json @@ -0,0 +1,74 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":2}}, + "message":"Number literals with leading zeros are not allowed in strict mode." + }, + { + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":3}}, + "message":"Number literals with leading zeros are not allowed in strict mode." + }, + { + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":3}}, + "message":"Number literals with leading zeros are not allowed in strict mode." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":4}}, + "range":[0,27], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, + "range":[0,12], + "value":"use strict", + "raw":"\"use strict\"" + }, + "directive":"use strict" + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":3}}, + "range":[14,17], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":2}}, + "range":[14,16], + "value":8, + "raw":"08" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":4}}, + "range":[18,22], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":3}}, + "range":[18,21], + "value":18, + "raw":"018" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":4}}, + "range":[23,27], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":3}}, + "range":[23,26], + "value":88, + "raw":"088" + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/numbers/underscored_float_err_lead.tree.json b/src/parser/test/flow/numbers/underscored_float_err_lead.tree.json index 9bc0bb3c8ec..8b3b28b77e6 100644 --- a/src/parser/test/flow/numbers/underscored_float_err_lead.tree.json +++ b/src/parser/test/flow/numbers/underscored_float_err_lead.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":7}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/numbers/underscored_float_err_trail.tree.json b/src/parser/test/flow/numbers/underscored_float_err_trail.tree.json index 9c28794a310..5b662379910 100644 --- a/src/parser/test/flow/numbers/underscored_float_err_trail.tree.json +++ b/src/parser/test/flow/numbers/underscored_float_err_trail.tree.json @@ -2,11 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":4}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":7}}, - "message":"Unexpected number" + "message":"Unexpected number, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/numbers/underscored_legacy_octal.js b/src/parser/test/flow/numbers/underscored_legacy_octal.js new file mode 100644 index 00000000000..e264e62f72d --- /dev/null +++ b/src/parser/test/flow/numbers/underscored_legacy_octal.js @@ -0,0 +1,3 @@ +0_123; +01_23; +0123_; diff --git a/src/parser/test/flow/numbers/underscored_legacy_octal.tree.json b/src/parser/test/flow/numbers/underscored_legacy_octal.tree.json new file mode 100644 index 00000000000..7e402feb71c --- /dev/null +++ b/src/parser/test/flow/numbers/underscored_legacy_octal.tree.json @@ -0,0 +1,103 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":5}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":5}}, + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":6}}, + "range":[0,20], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, + "range":[0,1], + "value":0, + "raw":"0" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":6}}, + "range":[1,6], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":5}}, + "range":[1,5], + "name":"_123", + "typeAnnotation":null, + "optional":false + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":2}}, + "range":[7,9], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":2}}, + "range":[7,9], + "value":1, + "raw":"01" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":6}}, + "range":[9,13], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[9,12], + "name":"_23", + "typeAnnotation":null, + "optional":false + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":4}}, + "range":[14,18], + "expression":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":4}}, + "range":[14,18], + "value":83, + "raw":"0123" + }, + "directive":null + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":6}}, + "range":[18,20], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":5}}, + "range":[18,19], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/numbers/underscored_number_err_double.tree.json b/src/parser/test/flow/numbers/underscored_number_err_double.tree.json index 49e0bfa351e..3500a17751b 100644 --- a/src/parser/test/flow/numbers/underscored_number_err_double.tree.json +++ b/src/parser/test/flow/numbers/underscored_number_err_double.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":7}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/numbers/underscored_number_err_leading_zero.tree.json b/src/parser/test/flow/numbers/underscored_number_err_leading_zero.tree.json index 88fe624b74c..2bddb51c2cc 100644 --- a/src/parser/test/flow/numbers/underscored_number_err_leading_zero.tree.json +++ b/src/parser/test/flow/numbers/underscored_number_err_leading_zero.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":6}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/numbers/underscored_number_err_trail.tree.json b/src/parser/test/flow/numbers/underscored_number_err_trail.tree.json index 44f7c4e6320..7dcdb17b4ce 100644 --- a/src/parser/test/flow/numbers/underscored_number_err_trail.tree.json +++ b/src/parser/test/flow/numbers/underscored_number_err_trail.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/optional_chaining/conditional-decimal.tree.json b/src/parser/test/flow/optional_chaining/conditional-decimal.tree.json index 1fe093d2bbb..c556756f2f0 100644 --- a/src/parser/test/flow/optional_chaining/conditional-decimal.tree.json +++ b/src/parser/test/flow/optional_chaining/conditional-decimal.tree.json @@ -2,11 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":5,"column":8},"end":{"line":5,"column":9}}, - "message":"Unexpected number" + "message":"Unexpected number, expected an identifier" }, { "loc":{"source":null,"start":{"line":5,"column":10},"end":{"line":5,"column":11}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":5,"column":10},"end":{"line":5,"column":11}}, + "message":"Unexpected token `:`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/optional_chaining/template-literals.tree.json b/src/parser/test/flow/optional_chaining/template-literals.tree.json index 2eac5ca8d9b..d2093e45dd3 100644 --- a/src/parser/test/flow/optional_chaining/template-literals.tree.json +++ b/src/parser/test/flow/optional_chaining/template-literals.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":8}}, - "message":"Unexpected token `{b}`" + "message":"Unexpected template literal part, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":9}}, @@ -132,6 +132,7 @@ "computed":false, "optional":true }, + "typeArguments":null, "arguments":[ { "type":"TaggedTemplateExpression", diff --git a/src/parser/test/flow/private_class_properties/constructor.tree.json b/src/parser/test/flow/private_class_properties/constructor.tree.json index 85d1b00c853..43be1bec754 100644 --- a/src/parser/test/flow/private_class_properties/constructor.tree.json +++ b/src/parser/test/flow/private_class_properties/constructor.tree.json @@ -7,6 +7,14 @@ { "loc":{"source":null,"start":{"line":6,"column":2},"end":{"line":6,"column":26}}, "message":"Classes may not have private methods." + }, + { + "loc":{"source":null,"start":{"line":10,"column":2},"end":{"line":10,"column":14}}, + "message":"Classes may not have fields named `#constructor`." + }, + { + "loc":{"source":null,"start":{"line":14,"column":9},"end":{"line":14,"column":21}}, + "message":"Classes may not have fields named `#constructor`." } ], "type":"Program", @@ -73,8 +81,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] @@ -139,8 +147,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] @@ -190,8 +198,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] @@ -241,8 +249,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/private_class_properties/getter.js b/src/parser/test/flow/private_class_properties/getter.js new file mode 100644 index 00000000000..d49b9e9adfe --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter.js @@ -0,0 +1,3 @@ +class C { + get #m() {} +} diff --git a/src/parser/test/flow/private_class_properties/getter.tree.json b/src/parser/test/flow/private_class_properties/getter.tree.json new file mode 100644 index 00000000000..f8cac0f755d --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter.tree.json @@ -0,0 +1,74 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,25], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,25], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,25], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":13}}, + "range":[12,23], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":8}}, + "range":[16,18], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":8}}, + "range":[17,18], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":13}}, + "range":[18,23], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":11},"end":{"line":2,"column":13}}, + "range":[21,23], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"get", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/getter_and_field.js b/src/parser/test/flow/private_class_properties/getter_and_field.js new file mode 100644 index 00000000000..ca9573f565d --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter_and_field.js @@ -0,0 +1,4 @@ +class C { + #m; + get #m() {} +} diff --git a/src/parser/test/flow/private_class_properties/getter_and_field.tree.json b/src/parser/test/flow/private_class_properties/getter_and_field.tree.json new file mode 100644 index 00000000000..1395dfdc16e --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter_and_field.tree.json @@ -0,0 +1,97 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":8}}, + "message":"Private fields may only be declared once. `#m` is declared more than once." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,31], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,31], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":4,"column":1}}, + "range":[8,31], + "body":[ + { + "type":"ClassPrivateProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":3},"end":{"line":2,"column":4}}, + "range":[13,14], + "name":"m", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "static":false, + "variance":null + }, + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":13}}, + "range":[18,29], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":8}}, + "range":[22,24], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":7},"end":{"line":3,"column":8}}, + "range":[23,24], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":13}}, + "range":[24,29], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":3,"column":11},"end":{"line":3,"column":13}}, + "range":[27,29], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"get", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/getter_and_setter.js b/src/parser/test/flow/private_class_properties/getter_and_setter.js new file mode 100644 index 00000000000..2facd0fcf5d --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter_and_setter.js @@ -0,0 +1,4 @@ +class C { + get #m() {} + set #m(x) {} +} diff --git a/src/parser/test/flow/private_class_properties/getter_and_setter.tree.json b/src/parser/test/flow/private_class_properties/getter_and_setter.tree.json new file mode 100644 index 00000000000..841f3c6544d --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter_and_setter.tree.json @@ -0,0 +1,124 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,40], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,40], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":4,"column":1}}, + "range":[8,40], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":13}}, + "range":[12,23], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":8}}, + "range":[16,18], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":8}}, + "range":[17,18], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":13}}, + "range":[18,23], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":11},"end":{"line":2,"column":13}}, + "range":[21,23], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"get", + "static":false, + "computed":false, + "decorators":[] + }, + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":14}}, + "range":[26,38], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":8}}, + "range":[30,32], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":7},"end":{"line":3,"column":8}}, + "range":[31,32], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":14}}, + "range":[32,38], + "id":null, + "params":[ + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":9},"end":{"line":3,"column":10}}, + "range":[33,34], + "name":"x", + "typeAnnotation":null, + "optional":false + } + ], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":3,"column":12},"end":{"line":3,"column":14}}, + "range":[36,38], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"set", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/getter_duplicate.js b/src/parser/test/flow/private_class_properties/getter_duplicate.js new file mode 100644 index 00000000000..61957772a69 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter_duplicate.js @@ -0,0 +1,4 @@ +class C { + get #m() {} + get #m() {} +} diff --git a/src/parser/test/flow/private_class_properties/getter_duplicate.tree.json b/src/parser/test/flow/private_class_properties/getter_duplicate.tree.json new file mode 100644 index 00000000000..ec1b346df56 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter_duplicate.tree.json @@ -0,0 +1,121 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":8}}, + "message":"Private fields may only be declared once. `#m` is declared more than once." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,39], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,39], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":4,"column":1}}, + "range":[8,39], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":13}}, + "range":[12,23], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":8}}, + "range":[16,18], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":8}}, + "range":[17,18], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":13}}, + "range":[18,23], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":11},"end":{"line":2,"column":13}}, + "range":[21,23], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"get", + "static":false, + "computed":false, + "decorators":[] + }, + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":13}}, + "range":[26,37], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":8}}, + "range":[30,32], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":7},"end":{"line":3,"column":8}}, + "range":[31,32], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":13}}, + "range":[32,37], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":3,"column":11},"end":{"line":3,"column":13}}, + "range":[35,37], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"get", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/getter_whitespace.js b/src/parser/test/flow/private_class_properties/getter_whitespace.js new file mode 100644 index 00000000000..db51e4754fb --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter_whitespace.js @@ -0,0 +1,3 @@ +class C { + get # m () {} +} diff --git a/src/parser/test/flow/private_class_properties/getter_whitespace.tree.json b/src/parser/test/flow/private_class_properties/getter_whitespace.tree.json new file mode 100644 index 00000000000..4752a7c11f4 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/getter_whitespace.tree.json @@ -0,0 +1,80 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":9}}, + "message":"Unexpected whitespace between `#` and identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,27], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,27], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,27], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":15}}, + "range":[12,25], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":9}}, + "range":[16,19], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":9}}, + "range":[18,19], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":15}}, + "range":[20,25], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":13},"end":{"line":2,"column":15}}, + "range":[23,25], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"get", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/member.tree.json b/src/parser/test/flow/private_class_properties/member.tree.json index a05f3450c5b..cd765404abd 100644 --- a/src/parser/test/flow/private_class_properties/member.tree.json +++ b/src/parser/test/flow/private_class_properties/member.tree.json @@ -2,11 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":5}}, - "message":"Unexpected token #" + "message":"Unexpected token `#`, expected an identifier" }, { "loc":{"source":null,"start":{"line":4,"column":5},"end":{"line":4,"column":6}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" } ], "type":"Program", @@ -197,8 +197,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/private_class_properties/member_whitespace.js b/src/parser/test/flow/private_class_properties/member_whitespace.js new file mode 100644 index 00000000000..13f4467af94 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/member_whitespace.js @@ -0,0 +1,6 @@ +class C { + #m; + foo() { + this.# m; + } +} diff --git a/src/parser/test/flow/private_class_properties/member_whitespace.tree.json b/src/parser/test/flow/private_class_properties/member_whitespace.tree.json new file mode 100644 index 00000000000..6ebe005f64c --- /dev/null +++ b/src/parser/test/flow/private_class_properties/member_whitespace.tree.json @@ -0,0 +1,123 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":4,"column":9},"end":{"line":4,"column":12}}, + "message":"Unexpected whitespace between `#` and identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":6,"column":1}}, + "range":[0,45], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":6,"column":1}}, + "range":[0,45], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":6,"column":1}}, + "range":[8,45], + "body":[ + { + "type":"ClassPrivateProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":3},"end":{"line":2,"column":4}}, + "range":[13,14], + "name":"m", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "static":false, + "variance":null + }, + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":5,"column":3}}, + "range":[18,43], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":5}}, + "range":[18,21], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":3,"column":5},"end":{"line":5,"column":3}}, + "range":[21,43], + "id":null, + "params":[], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":5,"column":3}}, + "range":[24,43], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":13}}, + "range":[30,39], + "expression":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":12}}, + "range":[30,38], + "object":{ + "type":"ThisExpression", + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":8}}, + "range":[30,34] + }, + "property":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":4,"column":9},"end":{"line":4,"column":12}}, + "range":[35,38], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":11},"end":{"line":4,"column":12}}, + "range":[37,38], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "computed":false + }, + "directive":null + } + ] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"method", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/nested.tree.json b/src/parser/test/flow/private_class_properties/nested.tree.json index dda698d6bd8..65de6f20d2f 100644 --- a/src/parser/test/flow/private_class_properties/nested.tree.json +++ b/src/parser/test/flow/private_class_properties/nested.tree.json @@ -48,6 +48,7 @@ "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":18}}, "range":[16,28], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/private_class_properties/object_type.tree.json b/src/parser/test/flow/private_class_properties/object_type.tree.json index ed80010e965..004a37ae7ee 100644 --- a/src/parser/test/flow/private_class_properties/object_type.tree.json +++ b/src/parser/test/flow/private_class_properties/object_type.tree.json @@ -27,6 +27,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":3,"column":1}}, "range":[9,72], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/private_class_properties/setter.js b/src/parser/test/flow/private_class_properties/setter.js new file mode 100644 index 00000000000..bc6b5b08005 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/setter.js @@ -0,0 +1,3 @@ +class C { + set #m(x) {} +} diff --git a/src/parser/test/flow/private_class_properties/setter.tree.json b/src/parser/test/flow/private_class_properties/setter.tree.json new file mode 100644 index 00000000000..af4cefc25c6 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/setter.tree.json @@ -0,0 +1,83 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,26], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,26], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,26], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":14}}, + "range":[12,24], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":8}}, + "range":[16,18], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":8}}, + "range":[17,18], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":14}}, + "range":[18,24], + "id":null, + "params":[ + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":10}}, + "range":[19,20], + "name":"x", + "typeAnnotation":null, + "optional":false + } + ], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":14}}, + "range":[22,24], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"set", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/setter_and_field.js b/src/parser/test/flow/private_class_properties/setter_and_field.js new file mode 100644 index 00000000000..b70a4621543 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/setter_and_field.js @@ -0,0 +1,4 @@ +class C { + #m; + set #m(x) {} +} diff --git a/src/parser/test/flow/private_class_properties/setter_and_field.tree.json b/src/parser/test/flow/private_class_properties/setter_and_field.tree.json new file mode 100644 index 00000000000..8b8a65892e3 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/setter_and_field.tree.json @@ -0,0 +1,106 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":8}}, + "message":"Private fields may only be declared once. `#m` is declared more than once." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,32], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,32], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":4,"column":1}}, + "range":[8,32], + "body":[ + { + "type":"ClassPrivateProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "range":[12,15], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":3},"end":{"line":2,"column":4}}, + "range":[13,14], + "name":"m", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "static":false, + "variance":null + }, + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":14}}, + "range":[18,30], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":8}}, + "range":[22,24], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":7},"end":{"line":3,"column":8}}, + "range":[23,24], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":14}}, + "range":[24,30], + "id":null, + "params":[ + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":9},"end":{"line":3,"column":10}}, + "range":[25,26], + "name":"x", + "typeAnnotation":null, + "optional":false + } + ], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":3,"column":12},"end":{"line":3,"column":14}}, + "range":[28,30], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"set", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/setter_duplicate.js b/src/parser/test/flow/private_class_properties/setter_duplicate.js new file mode 100644 index 00000000000..6dfc4100acd --- /dev/null +++ b/src/parser/test/flow/private_class_properties/setter_duplicate.js @@ -0,0 +1,4 @@ +class C { + set #m(x) {} + set #m(x) {} +} diff --git a/src/parser/test/flow/private_class_properties/setter_duplicate.tree.json b/src/parser/test/flow/private_class_properties/setter_duplicate.tree.json new file mode 100644 index 00000000000..b8de527b1c5 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/setter_duplicate.tree.json @@ -0,0 +1,139 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":8}}, + "message":"Private fields may only be declared once. `#m` is declared more than once." + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,41], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,41], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":4,"column":1}}, + "range":[8,41], + "body":[ + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":14}}, + "range":[12,24], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":2,"column":6},"end":{"line":2,"column":8}}, + "range":[16,18], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":7},"end":{"line":2,"column":8}}, + "range":[17,18], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":2,"column":8},"end":{"line":2,"column":14}}, + "range":[18,24], + "id":null, + "params":[ + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":10}}, + "range":[19,20], + "name":"x", + "typeAnnotation":null, + "optional":false + } + ], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":14}}, + "range":[22,24], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"set", + "static":false, + "computed":false, + "decorators":[] + }, + { + "type":"MethodDefinition", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":14}}, + "range":[27,39], + "key":{ + "type":"PrivateName", + "loc":{"source":null,"start":{"line":3,"column":6},"end":{"line":3,"column":8}}, + "range":[31,33], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":7},"end":{"line":3,"column":8}}, + "range":[32,33], + "name":"m", + "typeAnnotation":null, + "optional":false + } + }, + "value":{ + "type":"FunctionExpression", + "loc":{"source":null,"start":{"line":3,"column":8},"end":{"line":3,"column":14}}, + "range":[33,39], + "id":null, + "params":[ + { + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":9},"end":{"line":3,"column":10}}, + "range":[34,35], + "name":"x", + "typeAnnotation":null, + "optional":false + } + ], + "body":{ + "type":"BlockStatement", + "loc":{"source":null,"start":{"line":3,"column":12},"end":{"line":3,"column":14}}, + "range":[37,39], + "body":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":false, + "returnType":null, + "typeParameters":null + }, + "kind":"set", + "static":false, + "computed":false, + "decorators":[] + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/unannotated.js b/src/parser/test/flow/private_class_properties/unannotated.js new file mode 100644 index 00000000000..f2a45da7868 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/unannotated.js @@ -0,0 +1,3 @@ +class C { + #foo +} diff --git a/src/parser/test/flow/private_class_properties/unannotated.tree.json b/src/parser/test/flow/private_class_properties/unannotated.tree.json new file mode 100644 index 00000000000..6ede2bf0200 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/unannotated.tree.json @@ -0,0 +1,50 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,18], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,18], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,18], + "body":[ + { + "type":"ClassPrivateProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":6}}, + "range":[12,16], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":3},"end":{"line":2,"column":6}}, + "range":[13,16], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/unannotated_asi.js b/src/parser/test/flow/private_class_properties/unannotated_asi.js new file mode 100644 index 00000000000..d8789bdcd85 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/unannotated_asi.js @@ -0,0 +1,4 @@ +class C { + #foo + #bar +} diff --git a/src/parser/test/flow/private_class_properties/unannotated_asi.tree.json b/src/parser/test/flow/private_class_properties/unannotated_asi.tree.json new file mode 100644 index 00000000000..3650df74527 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/unannotated_asi.tree.json @@ -0,0 +1,67 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,25], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":4,"column":1}}, + "range":[0,25], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":4,"column":1}}, + "range":[8,25], + "body":[ + { + "type":"ClassPrivateProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":6}}, + "range":[12,16], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":3},"end":{"line":2,"column":6}}, + "range":[13,16], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "static":false, + "variance":null + }, + { + "type":"ClassPrivateProperty", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":6}}, + "range":[19,23], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":3},"end":{"line":3,"column":6}}, + "range":[20,23], + "name":"bar", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/private_class_properties/whitespace.js b/src/parser/test/flow/private_class_properties/whitespace.js new file mode 100644 index 00000000000..5c0f016dcd9 --- /dev/null +++ b/src/parser/test/flow/private_class_properties/whitespace.js @@ -0,0 +1,3 @@ +class C { + # m; +} diff --git a/src/parser/test/flow/private_class_properties/whitespace.tree.json b/src/parser/test/flow/private_class_properties/whitespace.tree.json new file mode 100644 index 00000000000..d5884deb2aa --- /dev/null +++ b/src/parser/test/flow/private_class_properties/whitespace.tree.json @@ -0,0 +1,56 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "message":"Unexpected whitespace between `#` and identifier" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,18], + "body":[ + { + "type":"ClassDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,18], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":6},"end":{"line":1,"column":7}}, + "range":[6,7], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "body":{ + "type":"ClassBody", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":3,"column":1}}, + "range":[8,18], + "body":[ + { + "type":"ClassPrivateProperty", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":6}}, + "range":[12,16], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":4},"end":{"line":2,"column":5}}, + "range":[14,15], + "name":"m", + "typeAnnotation":null, + "optional":false + }, + "value":null, + "typeAnnotation":null, + "static":false, + "variance":null + } + ] + }, + "typeParameters":null, + "superClass":null, + "superTypeParameters":null, + "implements":[], + "decorators":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/trailing_commas_invalid/migrated_0000.tree.json b/src/parser/test/flow/trailing_commas_invalid/migrated_0000.tree.json index 53e201c4bb8..51725146f0d 100644 --- a/src/parser/test/flow/trailing_commas_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/trailing_commas_invalid/migrated_0000.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`" } ], "type":"Program", @@ -25,6 +25,7 @@ "typeAnnotation":null, "optional":false }, + "typeArguments":null, "arguments":[ { "type":"Identifier", diff --git a/src/parser/test/flow/trailing_commas_invalid/migrated_0001.tree.json b/src/parser/test/flow/trailing_commas_invalid/migrated_0001.tree.json index e036b015090..5215dd4c142 100644 --- a/src/parser/test/flow/trailing_commas_invalid/migrated_0001.tree.json +++ b/src/parser/test/flow/trailing_commas_invalid/migrated_0001.tree.json @@ -6,15 +6,15 @@ }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected token ," + "message":"Unexpected token `,`, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":22}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", @@ -96,6 +96,7 @@ }, "computed":false }, + "typeArguments":null, "arguments":[ { "type":"Identifier", diff --git a/src/parser/test/flow/trailing_commas_invalid/migrated_0002.tree.json b/src/parser/test/flow/trailing_commas_invalid/migrated_0002.tree.json index b356dec8ef1..845361687f8 100644 --- a/src/parser/test/flow/trailing_commas_invalid/migrated_0002.tree.json +++ b/src/parser/test/flow/trailing_commas_invalid/migrated_0002.tree.json @@ -6,15 +6,15 @@ }, { "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, - "message":"Unexpected token ," + "message":"Unexpected token `,`, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":26}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", @@ -106,6 +106,7 @@ }, "computed":false }, + "typeArguments":null, "arguments":[ { "type":"Identifier", diff --git a/src/parser/test/flow/trailing_commas_invalid/migrated_0003.tree.json b/src/parser/test/flow/trailing_commas_invalid/migrated_0003.tree.json index 887f2eafa73..906162df5e2 100644 --- a/src/parser/test/flow/trailing_commas_invalid/migrated_0003.tree.json +++ b/src/parser/test/flow/trailing_commas_invalid/migrated_0003.tree.json @@ -2,19 +2,31 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":15}}, - "message":"Unexpected token ..." + "message":"Unexpected token `...`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":15}}, + "message":"Unexpected token `...`, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":18}}, + "message":"Unexpected token `)`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":18}}, + "message":"Unexpected token `)`, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":18}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":21}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", @@ -122,6 +134,7 @@ }, "computed":false }, + "typeArguments":null, "arguments":[ { "type":"Identifier", diff --git a/src/parser/test/flow/types/aliases/keyword.tree.json b/src/parser/test/flow/types/aliases/keyword.tree.json index a0f98cf2065..1498d8bfcd9 100644 --- a/src/parser/test/flow/types/aliases/keyword.tree.json +++ b/src/parser/test/flow/types/aliases/keyword.tree.json @@ -2,11 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":12}}, - "message":"Unexpected token extends" + "message":"Unexpected token `extends`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":12}}, + "message":"Unexpected token `extends`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":14}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`" }, { "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":14}}, diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_disallowed_in_non_objects.js b/src/parser/test/flow/types/annotations/explicit_inexact_disallowed_in_non_objects.js new file mode 100644 index 00000000000..7a197b14c98 --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_disallowed_in_non_objects.js @@ -0,0 +1,39 @@ +declare class A { + ...; +} + +declare class B { + foo: number; + ...; +} + +declare class C { + ...; + foo: number; +} + +declare class D { + foo: number; + ...; + bar: number; +} + +interface E { + ...; +} + +interface F { + foo: number; + ...; +} + +interface G { + ...; + foo: number; +} + +interface H { + foo: number; + ...; + bar: number; +} diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_disallowed_in_non_objects.tree.json b/src/parser/test/flow/types/annotations/explicit_inexact_disallowed_in_non_objects.tree.json new file mode 100644 index 00000000000..1663b140d6a --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_disallowed_in_non_objects.tree.json @@ -0,0 +1,448 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":5}}, + "message":"Explicit inexact syntax can only appear inside an object type" + }, + { + "loc":{"source":null,"start":{"line":7,"column":2},"end":{"line":7,"column":5}}, + "message":"Explicit inexact syntax can only appear inside an object type" + }, + { + "loc":{"source":null,"start":{"line":11,"column":2},"end":{"line":11,"column":5}}, + "message":"Explicit inexact syntax can only appear inside an object type" + }, + { + "loc":{"source":null,"start":{"line":17,"column":2},"end":{"line":17,"column":5}}, + "message":"Explicit inexact syntax can only appear inside an object type" + }, + { + "loc":{"source":null,"start":{"line":22,"column":2},"end":{"line":22,"column":5}}, + "message":"Explicit inexact syntax can only appear inside an object type" + }, + { + "loc":{"source":null,"start":{"line":27,"column":2},"end":{"line":27,"column":5}}, + "message":"Explicit inexact syntax can only appear inside an object type" + }, + { + "loc":{"source":null,"start":{"line":31,"column":2},"end":{"line":31,"column":5}}, + "message":"Explicit inexact syntax can only appear inside an object type" + }, + { + "loc":{"source":null,"start":{"line":37,"column":2},"end":{"line":37,"column":5}}, + "message":"Explicit inexact syntax can only appear inside an object type" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":39,"column":1}}, + "range":[0,326], + "body":[ + { + "type":"DeclareClass", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":1}}, + "range":[0,26], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":15}}, + "range":[14,15], + "name":"A", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "body":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":3,"column":1}}, + "range":[16,26], + "exact":false, + "properties":[], + "indexers":[], + "callProperties":[], + "internalSlots":[] + }, + "extends":[], + "implements":[], + "mixins":[] + }, + { + "type":"DeclareClass", + "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":8,"column":1}}, + "range":[28,69], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":5,"column":14},"end":{"line":5,"column":15}}, + "range":[42,43], + "name":"B", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "body":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":5,"column":16},"end":{"line":8,"column":1}}, + "range":[44,69], + "exact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":6,"column":2},"end":{"line":6,"column":13}}, + "range":[48,59], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":6,"column":2},"end":{"line":6,"column":5}}, + "range":[48,51], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":6,"column":7},"end":{"line":6,"column":13}}, + "range":[53,59] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + }, + "extends":[], + "implements":[], + "mixins":[] + }, + { + "type":"DeclareClass", + "loc":{"source":null,"start":{"line":10,"column":0},"end":{"line":13,"column":1}}, + "range":[71,112], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":10,"column":14},"end":{"line":10,"column":15}}, + "range":[85,86], + "name":"C", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "body":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":10,"column":16},"end":{"line":13,"column":1}}, + "range":[87,112], + "exact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":12,"column":2},"end":{"line":12,"column":13}}, + "range":[98,109], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":12,"column":2},"end":{"line":12,"column":5}}, + "range":[98,101], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":12,"column":7},"end":{"line":12,"column":13}}, + "range":[103,109] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + }, + "extends":[], + "implements":[], + "mixins":[] + }, + { + "type":"DeclareClass", + "loc":{"source":null,"start":{"line":15,"column":0},"end":{"line":19,"column":1}}, + "range":[114,170], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":15,"column":14},"end":{"line":15,"column":15}}, + "range":[128,129], + "name":"D", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "body":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":15,"column":16},"end":{"line":19,"column":1}}, + "range":[130,170], + "exact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":16,"column":2},"end":{"line":16,"column":13}}, + "range":[134,145], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":16,"column":2},"end":{"line":16,"column":5}}, + "range":[134,137], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":16,"column":7},"end":{"line":16,"column":13}}, + "range":[139,145] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + }, + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":18,"column":2},"end":{"line":18,"column":13}}, + "range":[156,167], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":18,"column":2},"end":{"line":18,"column":5}}, + "range":[156,159], + "name":"bar", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":18,"column":7},"end":{"line":18,"column":13}}, + "range":[161,167] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + }, + "extends":[], + "implements":[], + "mixins":[] + }, + { + "type":"InterfaceDeclaration", + "loc":{"source":null,"start":{"line":21,"column":0},"end":{"line":23,"column":1}}, + "range":[172,194], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":21,"column":10},"end":{"line":21,"column":11}}, + "range":[182,183], + "name":"E", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "body":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":21,"column":12},"end":{"line":23,"column":1}}, + "range":[184,194], + "exact":false, + "properties":[], + "indexers":[], + "callProperties":[], + "internalSlots":[] + }, + "extends":[] + }, + { + "type":"InterfaceDeclaration", + "loc":{"source":null,"start":{"line":25,"column":0},"end":{"line":28,"column":1}}, + "range":[196,233], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":25,"column":10},"end":{"line":25,"column":11}}, + "range":[206,207], + "name":"F", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "body":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":25,"column":12},"end":{"line":28,"column":1}}, + "range":[208,233], + "exact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":26,"column":2},"end":{"line":26,"column":13}}, + "range":[212,223], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":26,"column":2},"end":{"line":26,"column":5}}, + "range":[212,215], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":26,"column":7},"end":{"line":26,"column":13}}, + "range":[217,223] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + }, + "extends":[] + }, + { + "type":"InterfaceDeclaration", + "loc":{"source":null,"start":{"line":30,"column":0},"end":{"line":33,"column":1}}, + "range":[235,272], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":30,"column":10},"end":{"line":30,"column":11}}, + "range":[245,246], + "name":"G", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "body":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":30,"column":12},"end":{"line":33,"column":1}}, + "range":[247,272], + "exact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":32,"column":2},"end":{"line":32,"column":13}}, + "range":[258,269], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":32,"column":2},"end":{"line":32,"column":5}}, + "range":[258,261], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":32,"column":7},"end":{"line":32,"column":13}}, + "range":[263,269] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + }, + "extends":[] + }, + { + "type":"InterfaceDeclaration", + "loc":{"source":null,"start":{"line":35,"column":0},"end":{"line":39,"column":1}}, + "range":[274,326], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":35,"column":10},"end":{"line":35,"column":11}}, + "range":[284,285], + "name":"H", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "body":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":35,"column":12},"end":{"line":39,"column":1}}, + "range":[286,326], + "exact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":36,"column":2},"end":{"line":36,"column":13}}, + "range":[290,301], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":36,"column":2},"end":{"line":36,"column":5}}, + "range":[290,293], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":36,"column":7},"end":{"line":36,"column":13}}, + "range":[295,301] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + }, + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":38,"column":2},"end":{"line":38,"column":13}}, + "range":[312,323], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":38,"column":2},"end":{"line":38,"column":5}}, + "range":[312,315], + "name":"bar", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":38,"column":7},"end":{"line":38,"column":13}}, + "range":[317,323] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + }, + "extends":[] + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_forbidden_in_exact.js b/src/parser/test/flow/types/annotations/explicit_inexact_forbidden_in_exact.js new file mode 100644 index 00000000000..06abe766a3b --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_forbidden_in_exact.js @@ -0,0 +1 @@ +type T = {| foo: number, ... |}; diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_forbidden_in_exact.tree.json b/src/parser/test/flow/types/annotations/explicit_inexact_forbidden_in_exact.tree.json new file mode 100644 index 00000000000..4691a15a2f4 --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_forbidden_in_exact.tree.json @@ -0,0 +1,64 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":28}}, + "message":"Explicit inexact syntax cannot appear inside an explicit exact object type" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":32}}, + "range":[0,32], + "body":[ + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":32}}, + "range":[0,32], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"T", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":31}}, + "range":[9,31], + "exact":true, + "inexact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":23}}, + "range":[12,23], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":15}}, + "range":[12,15], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":23}}, + "range":[17,23] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_must_appear_last.js b/src/parser/test/flow/types/annotations/explicit_inexact_must_appear_last.js new file mode 100644 index 00000000000..938fa7d7fe6 --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_must_appear_last.js @@ -0,0 +1 @@ +type T = {..., foo: number}; diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_must_appear_last.tree.json b/src/parser/test/flow/types/annotations/explicit_inexact_must_appear_last.tree.json new file mode 100644 index 00000000000..181146d7260 --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_must_appear_last.tree.json @@ -0,0 +1,64 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":13}}, + "message":"Explicit inexact syntax must come at the end of an object type" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":28}}, + "range":[0,28], + "body":[ + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":28}}, + "range":[0,28], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"T", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":27}}, + "range":[9,27], + "exact":false, + "inexact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":26}}, + "range":[15,26], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":18}}, + "range":[15,18], + "name":"foo", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":26}}, + "range":[20,26] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_object.js b/src/parser/test/flow/types/annotations/explicit_inexact_object.js new file mode 100644 index 00000000000..0e5d25f7a61 --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_object.js @@ -0,0 +1,3 @@ +type T = {...}; +type U = {x: number, ...}; +type V = {x: number, ...V, ...U, ...}; diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_object.tree.json b/src/parser/test/flow/types/annotations/explicit_inexact_object.tree.json new file mode 100644 index 00000000000..333b331430d --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_object.tree.json @@ -0,0 +1,171 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":38}}, + "range":[0,81], + "body":[ + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":15}}, + "range":[0,15], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"T", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":14}}, + "range":[9,14], + "exact":false, + "inexact":true, + "properties":[], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + }, + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":26}}, + "range":[16,42], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":5},"end":{"line":2,"column":6}}, + "range":[21,22], + "name":"U", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":25}}, + "range":[25,41], + "exact":false, + "inexact":true, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":19}}, + "range":[26,35], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, + "range":[26,27], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":13},"end":{"line":2,"column":19}}, + "range":[29,35] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + }, + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":38}}, + "range":[43,81], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":5},"end":{"line":3,"column":6}}, + "range":[48,49], + "name":"V", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":9},"end":{"line":3,"column":37}}, + "range":[52,80], + "exact":false, + "inexact":true, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":3,"column":10},"end":{"line":3,"column":19}}, + "range":[53,62], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":10},"end":{"line":3,"column":11}}, + "range":[53,54], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":13},"end":{"line":3,"column":19}}, + "range":[56,62] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + }, + { + "type":"ObjectTypeSpreadProperty", + "loc":{"source":null,"start":{"line":3,"column":21},"end":{"line":3,"column":25}}, + "range":[64,68], + "argument":{ + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":24},"end":{"line":3,"column":25}}, + "range":[67,68], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":24},"end":{"line":3,"column":25}}, + "range":[67,68], + "name":"V", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + }, + { + "type":"ObjectTypeSpreadProperty", + "loc":{"source":null,"start":{"line":3,"column":27},"end":{"line":3,"column":31}}, + "range":[70,74], + "argument":{ + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":30},"end":{"line":3,"column":31}}, + "range":[73,74], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":30},"end":{"line":3,"column":31}}, + "range":[73,74], + "name":"U", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_object_invalid.js b/src/parser/test/flow/types/annotations/explicit_inexact_object_invalid.js new file mode 100644 index 00000000000..1a1b50ee640 --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_object_invalid.js @@ -0,0 +1,3 @@ +type T = {x: number, ..., y: number}; +type U = {x: number, ..., ...}; +type V = {x: number, ..., ...X}; diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_object_invalid.tree.json b/src/parser/test/flow/types/annotations/explicit_inexact_object_invalid.tree.json new file mode 100644 index 00000000000..7962232a81b --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_object_invalid.tree.json @@ -0,0 +1,215 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":24}}, + "message":"Explicit inexact syntax must come at the end of an object type" + }, + { + "loc":{"source":null,"start":{"line":2,"column":21},"end":{"line":2,"column":24}}, + "message":"Explicit inexact syntax must come at the end of an object type" + }, + { + "loc":{"source":null,"start":{"line":3,"column":21},"end":{"line":3,"column":24}}, + "message":"Explicit inexact syntax must come at the end of an object type" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":3,"column":32}}, + "range":[0,102], + "body":[ + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":37}}, + "range":[0,37], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"T", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":36}}, + "range":[9,36], + "exact":false, + "inexact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":19}}, + "range":[10,19], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "range":[10,11], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":19}}, + "range":[13,19] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + }, + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":1,"column":26},"end":{"line":1,"column":35}}, + "range":[26,35], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":26},"end":{"line":1,"column":27}}, + "range":[26,27], + "name":"y", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":35}}, + "range":[29,35] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + }, + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":31}}, + "range":[38,69], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":5},"end":{"line":2,"column":6}}, + "range":[43,44], + "name":"U", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":30}}, + "range":[47,68], + "exact":false, + "inexact":true, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":19}}, + "range":[48,57], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":10},"end":{"line":2,"column":11}}, + "range":[48,49], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":13},"end":{"line":2,"column":19}}, + "range":[51,57] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + }, + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":32}}, + "range":[70,102], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":5},"end":{"line":3,"column":6}}, + "range":[75,76], + "name":"V", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":9},"end":{"line":3,"column":31}}, + "range":[79,101], + "exact":false, + "inexact":false, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":3,"column":10},"end":{"line":3,"column":19}}, + "range":[80,89], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":10},"end":{"line":3,"column":11}}, + "range":[80,81], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":13},"end":{"line":3,"column":19}}, + "range":[83,89] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + }, + { + "type":"ObjectTypeSpreadProperty", + "loc":{"source":null,"start":{"line":3,"column":26},"end":{"line":3,"column":30}}, + "range":[96,100], + "argument":{ + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":29},"end":{"line":3,"column":30}}, + "range":[99,100], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":29},"end":{"line":3,"column":30}}, + "range":[99,100], + "name":"X", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_trailing_comma.js b/src/parser/test/flow/types/annotations/explicit_inexact_trailing_comma.js new file mode 100644 index 00000000000..cb18079ee73 --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_trailing_comma.js @@ -0,0 +1,10 @@ +type T = { ..., } +type U = { ...; } +type V = { + x: number, + ..., +} +type W = { + x: number; + ...; +} diff --git a/src/parser/test/flow/types/annotations/explicit_inexact_trailing_comma.tree.json b/src/parser/test/flow/types/annotations/explicit_inexact_trailing_comma.tree.json new file mode 100644 index 00000000000..2de76cf9b1c --- /dev/null +++ b/src/parser/test/flow/types/annotations/explicit_inexact_trailing_comma.tree.json @@ -0,0 +1,158 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":10,"column":1}}, + "range":[0,101], + "body":[ + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":17}}, + "range":[0,17], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"T", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":17}}, + "range":[9,17], + "exact":false, + "inexact":true, + "properties":[], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + }, + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":17}}, + "range":[18,35], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":5},"end":{"line":2,"column":6}}, + "range":[23,24], + "name":"U", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":17}}, + "range":[27,35], + "exact":false, + "inexact":true, + "properties":[], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + }, + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":6,"column":1}}, + "range":[36,68], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":5},"end":{"line":3,"column":6}}, + "range":[41,42], + "name":"V", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":9},"end":{"line":6,"column":1}}, + "range":[45,68], + "exact":false, + "inexact":true, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":11}}, + "range":[49,58], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":3}}, + "range":[49,50], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":4,"column":5},"end":{"line":4,"column":11}}, + "range":[52,58] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + }, + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":7,"column":0},"end":{"line":10,"column":1}}, + "range":[69,101], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":7,"column":5},"end":{"line":7,"column":6}}, + "range":[74,75], + "name":"W", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"ObjectTypeAnnotation", + "loc":{"source":null,"start":{"line":7,"column":9},"end":{"line":10,"column":1}}, + "range":[78,101], + "exact":false, + "inexact":true, + "properties":[ + { + "type":"ObjectTypeProperty", + "loc":{"source":null,"start":{"line":8,"column":2},"end":{"line":8,"column":11}}, + "range":[82,91], + "key":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":8,"column":2},"end":{"line":8,"column":3}}, + "range":[82,83], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "value":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":8,"column":5},"end":{"line":8,"column":11}}, + "range":[85,91] + }, + "method":false, + "optional":false, + "static":false, + "proto":false, + "variance":null, + "kind":"init" + } + ], + "indexers":[], + "callProperties":[], + "internalSlots":[] + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/migrated_0013.tree.json b/src/parser/test/flow/types/annotations/migrated_0013.tree.json index 90b71822ba0..0c039cb84b1 100644 --- a/src/parser/test/flow/types/annotations/migrated_0013.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0013.tree.json @@ -35,6 +35,7 @@ "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":18}}, "range":[16,18], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[], diff --git a/src/parser/test/flow/types/annotations/migrated_0021.tree.json b/src/parser/test/flow/types/annotations/migrated_0021.tree.json index e3536168be0..28160a55da2 100644 --- a/src/parser/test/flow/types/annotations/migrated_0021.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0021.tree.json @@ -30,6 +30,7 @@ "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":24}}, "range":[8,24], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0022.tree.json b/src/parser/test/flow/types/annotations/migrated_0022.tree.json index a7c93ee710b..c60afe1d6ee 100644 --- a/src/parser/test/flow/types/annotations/migrated_0022.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0022.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":23}}, "range":[7,23], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0023.tree.json b/src/parser/test/flow/types/annotations/migrated_0023.tree.json index 39503593ecd..3204fa20716 100644 --- a/src/parser/test/flow/types/annotations/migrated_0023.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0023.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":48}}, "range":[7,48], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0024.tree.json b/src/parser/test/flow/types/annotations/migrated_0024.tree.json index 7646d819240..97c394f0341 100644 --- a/src/parser/test/flow/types/annotations/migrated_0024.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0024.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":58}}, "range":[7,58], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/annotations/migrated_0025.tree.json b/src/parser/test/flow/types/annotations/migrated_0025.tree.json index 49a9db30973..a14dcd0caf0 100644 --- a/src/parser/test/flow/types/annotations/migrated_0025.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0025.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":33}}, "range":[7,33], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", @@ -44,6 +45,7 @@ "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":32}}, "range":[16,32], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0026.tree.json b/src/parser/test/flow/types/annotations/migrated_0026.tree.json index ceaf87d0c8d..56dc6f82b5c 100644 --- a/src/parser/test/flow/types/annotations/migrated_0026.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0026.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":39}}, "range":[7,39], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0027.tree.json b/src/parser/test/flow/types/annotations/migrated_0027.tree.json index 495e58dc67e..cec1aec75fe 100644 --- a/src/parser/test/flow/types/annotations/migrated_0027.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0027.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":40}}, "range":[7,40], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0030.tree.json b/src/parser/test/flow/types/annotations/migrated_0030.tree.json index 7c8d8784636..78bf191b968 100644 --- a/src/parser/test/flow/types/annotations/migrated_0030.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0030.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":46}}, "range":[7,46], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0031.tree.json b/src/parser/test/flow/types/annotations/migrated_0031.tree.json index 369489082d3..4e7a8cf0830 100644 --- a/src/parser/test/flow/types/annotations/migrated_0031.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0031.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":29}}, "range":[7,29], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0032.tree.json b/src/parser/test/flow/types/annotations/migrated_0032.tree.json index d53842e1e66..ed0d0b2c24d 100644 --- a/src/parser/test/flow/types/annotations/migrated_0032.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0032.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":36}}, "range":[7,36], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0033.tree.json b/src/parser/test/flow/types/annotations/migrated_0033.tree.json index 85682ee63d7..ab8a641fa30 100644 --- a/src/parser/test/flow/types/annotations/migrated_0033.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0033.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":55}}, "range":[7,55], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0034.tree.json b/src/parser/test/flow/types/annotations/migrated_0034.tree.json index ecb5e69d5e3..1ed6927689e 100644 --- a/src/parser/test/flow/types/annotations/migrated_0034.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0034.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":35}}, "range":[7,35], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0035.tree.json b/src/parser/test/flow/types/annotations/migrated_0035.tree.json index d5e3eda7cc5..fdbf0d58aa6 100644 --- a/src/parser/test/flow/types/annotations/migrated_0035.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0035.tree.json @@ -32,6 +32,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":38}}, "range":[7,38], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0036.tree.json b/src/parser/test/flow/types/annotations/migrated_0036.tree.json index 57875a13e51..9cdb7d57da2 100644 --- a/src/parser/test/flow/types/annotations/migrated_0036.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0036.tree.json @@ -32,6 +32,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":27}}, "range":[7,27], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0037.tree.json b/src/parser/test/flow/types/annotations/migrated_0037.tree.json index 00c22eb222a..99daf3726cb 100644 --- a/src/parser/test/flow/types/annotations/migrated_0037.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0037.tree.json @@ -32,6 +32,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":29}}, "range":[7,29], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0038.tree.json b/src/parser/test/flow/types/annotations/migrated_0038.tree.json index 795d43f230f..8f470e0a68f 100644 --- a/src/parser/test/flow/types/annotations/migrated_0038.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0038.tree.json @@ -32,6 +32,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":36}}, "range":[7,36], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0057.tree.json b/src/parser/test/flow/types/annotations/migrated_0057.tree.json index c5a2b63621c..47729d414ea 100644 --- a/src/parser/test/flow/types/annotations/migrated_0057.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0057.tree.json @@ -29,8 +29,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":13}}, - "range":[11,13], + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":13}}, + "range":[10,13], "name":"T1", "bound":null, "variance":{ @@ -43,8 +43,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":17}}, - "range":[15,17], + "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":17}}, + "range":[14,17], "name":"T2", "bound":null, "variance":{ diff --git a/src/parser/test/flow/types/annotations/migrated_0058.tree.json b/src/parser/test/flow/types/annotations/migrated_0058.tree.json index 8e4c3d2a488..7083c6092ea 100644 --- a/src/parser/test/flow/types/annotations/migrated_0058.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0058.tree.json @@ -52,6 +52,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":22}}, "range":[9,22], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0060.tree.json b/src/parser/test/flow/types/annotations/migrated_0060.tree.json index 79ba6a518de..0efe8402343 100644 --- a/src/parser/test/flow/types/annotations/migrated_0060.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0060.tree.json @@ -56,6 +56,7 @@ "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":32}}, "range":[18,32], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/migrated_0073.tree.json b/src/parser/test/flow/types/annotations/migrated_0073.tree.json index 41b67ebcc15..fd0f692e2f0 100644 --- a/src/parser/test/flow/types/annotations/migrated_0073.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0073.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":25}}, "range":[7,25], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/annotations/migrated_0079.tree.json b/src/parser/test/flow/types/annotations/migrated_0079.tree.json index e5bce94dcb2..58bc556a77f 100644 --- a/src/parser/test/flow/types/annotations/migrated_0079.tree.json +++ b/src/parser/test/flow/types/annotations/migrated_0079.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":33}}, "range":[9,33], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations/underscore_is_allowed_trailing_commas.js b/src/parser/test/flow/types/annotations/underscore_is_allowed_trailing_commas.js new file mode 100644 index 00000000000..0da470d2b96 --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_allowed_trailing_commas.js @@ -0,0 +1,7 @@ +test< + _, + _, + number, + _, + _, +>(); diff --git a/src/parser/test/flow/types/annotations/underscore_is_allowed_trailing_commas.tree.json b/src/parser/test/flow/types/annotations/underscore_is_allowed_trailing_commas.tree.json new file mode 100644 index 00000000000..35433da0c5f --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_allowed_trailing_commas.tree.json @@ -0,0 +1,96 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":7,"column":4}}, + "range":[0,40], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":7,"column":4}}, + "range":[0,40], + "expression":{ + "type":"CallExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":7,"column":3}}, + "range":[0,39], + "callee":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "name":"test", + "typeAnnotation":null, + "optional":false + }, + "typeArguments":{ + "type":"TypeParameterInstantiation", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":7,"column":1}}, + "range":[4,37], + "params":[ + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[8,9], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":3}}, + "range":[8,9], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[13,14], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":2},"end":{"line":3,"column":3}}, + "range":[13,14], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + }, + { + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":8}}, + "range":[18,24] + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":5,"column":2},"end":{"line":5,"column":3}}, + "range":[28,29], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":5,"column":2},"end":{"line":5,"column":3}}, + "range":[28,29], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":6,"column":2},"end":{"line":6,"column":3}}, + "range":[33,34], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":6,"column":2},"end":{"line":6,"column":3}}, + "range":[33,34], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + ] + }, + "arguments":[] + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/underscore_is_implicit_anywhere_in_list.js b/src/parser/test/flow/types/annotations/underscore_is_implicit_anywhere_in_list.js new file mode 100644 index 00000000000..0515bd549e5 --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_implicit_anywhere_in_list.js @@ -0,0 +1 @@ +test(); diff --git a/src/parser/test/flow/types/annotations/underscore_is_implicit_anywhere_in_list.tree.json b/src/parser/test/flow/types/annotations/underscore_is_implicit_anywhere_in_list.tree.json new file mode 100644 index 00000000000..c263fe570ca --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_implicit_anywhere_in_list.tree.json @@ -0,0 +1,143 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":50}}, + "range":[0,50], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":50}}, + "range":[0,50], + "expression":{ + "type":"CallExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":49}}, + "range":[0,49], + "callee":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "name":"test", + "typeAnnotation":null, + "optional":false + }, + "typeArguments":{ + "type":"TypeParameterInstantiation", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":47}}, + "range":[4,47], + "params":[ + { + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, + "range":[5,11] + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":14}}, + "range":[13,14], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":14}}, + "range":[13,14], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + }, + { + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":22}}, + "range":[16,22] + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, + "range":[24,25], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, + "range":[24,25], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":28}}, + "range":[27,28], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":28}}, + "range":[27,28], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":30},"end":{"line":1,"column":31}}, + "range":[30,31], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":30},"end":{"line":1,"column":31}}, + "range":[30,31], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":33},"end":{"line":1,"column":36}}, + "range":[33,36], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":33},"end":{"line":1,"column":36}}, + "range":[33,36], + "name":"Foo", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":38},"end":{"line":1,"column":41}}, + "range":[38,41], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":38},"end":{"line":1,"column":41}}, + "range":[38,41], + "name":"Bar", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + }, + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":43},"end":{"line":1,"column":46}}, + "range":[43,46], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":43},"end":{"line":1,"column":46}}, + "range":[43,46], + "name":"Baz", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + ] + }, + "arguments":[] + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/underscore_is_implicit_in_calls.js b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_calls.js new file mode 100644 index 00000000000..dbcf0ea1d66 --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_calls.js @@ -0,0 +1 @@ +test<_>(); diff --git a/src/parser/test/flow/types/annotations/underscore_is_implicit_in_calls.tree.json b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_calls.tree.json new file mode 100644 index 00000000000..f4a4ad06060 --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_calls.tree.json @@ -0,0 +1,49 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "range":[0,10], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "range":[0,10], + "expression":{ + "type":"CallExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":9}}, + "range":[0,9], + "callee":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":4}}, + "range":[0,4], + "name":"test", + "typeAnnotation":null, + "optional":false + }, + "typeArguments":{ + "type":"TypeParameterInstantiation", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":7}}, + "range":[4,7], + "params":[ + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + ] + }, + "arguments":[] + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/underscore_is_implicit_in_constructor_calls.js b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_constructor_calls.js new file mode 100644 index 00000000000..1aeca6402db --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_constructor_calls.js @@ -0,0 +1 @@ +new test<_>(); diff --git a/src/parser/test/flow/types/annotations/underscore_is_implicit_in_constructor_calls.tree.json b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_constructor_calls.tree.json new file mode 100644 index 00000000000..194caf5a759 --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_constructor_calls.tree.json @@ -0,0 +1,49 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":14}}, + "range":[0,14], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":14}}, + "range":[0,14], + "expression":{ + "type":"NewExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "callee":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":8}}, + "range":[4,8], + "name":"test", + "typeAnnotation":null, + "optional":false + }, + "typeArguments":{ + "type":"TypeParameterInstantiation", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":11}}, + "range":[8,11], + "params":[ + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":10}}, + "range":[9,10], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":10}}, + "range":[9,10], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + ] + }, + "arguments":[] + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/underscore_is_implicit_in_methods.js b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_methods.js new file mode 100644 index 00000000000..fac16db204a --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_methods.js @@ -0,0 +1 @@ +instance.method()<_>(); diff --git a/src/parser/test/flow/types/annotations/underscore_is_implicit_in_methods.tree.json b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_methods.tree.json new file mode 100644 index 00000000000..67761bf3b90 --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_implicit_in_methods.tree.json @@ -0,0 +1,70 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":23}}, + "range":[0,23], + "body":[ + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":23}}, + "range":[0,23], + "expression":{ + "type":"CallExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":22}}, + "range":[0,22], + "callee":{ + "type":"CallExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":17}}, + "range":[0,17], + "callee":{ + "type":"MemberExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":15}}, + "range":[0,15], + "object":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":8}}, + "range":[0,8], + "name":"instance", + "typeAnnotation":null, + "optional":false + }, + "property":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":15}}, + "range":[9,15], + "name":"method", + "typeAnnotation":null, + "optional":false + }, + "computed":false + }, + "typeArguments":null, + "arguments":[] + }, + "typeArguments":{ + "type":"TypeParameterInstantiation", + "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":20}}, + "range":[17,20], + "params":[ + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":19}}, + "range":[18,19], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":19}}, + "range":[18,19], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + ] + }, + "arguments":[] + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/underscore_is_reserved_elsewhere.js b/src/parser/test/flow/types/annotations/underscore_is_reserved_elsewhere.js new file mode 100644 index 00000000000..076632f9307 --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_reserved_elsewhere.js @@ -0,0 +1,7 @@ +var x: Generic<_> = 3; +(x: Generic<_>); +var y: _ = 3; +(y: _); + +type _ = number; +opaque type _ = number; diff --git a/src/parser/test/flow/types/annotations/underscore_is_reserved_elsewhere.tree.json b/src/parser/test/flow/types/annotations/underscore_is_reserved_elsewhere.tree.json new file mode 100644 index 00000000000..625b3742ff2 --- /dev/null +++ b/src/parser/test/flow/types/annotations/underscore_is_reserved_elsewhere.tree.json @@ -0,0 +1,281 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, + "message":"Unexpected reserved type" + }, + { + "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":13}}, + "message":"Unexpected reserved type" + }, + { + "loc":{"source":null,"start":{"line":3,"column":7},"end":{"line":3,"column":8}}, + "message":"Unexpected reserved type" + }, + { + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":5}}, + "message":"Unexpected reserved type" + }, + { + "loc":{"source":null,"start":{"line":6,"column":5},"end":{"line":6,"column":6}}, + "message":"Unexpected reserved type" + }, + { + "loc":{"source":null,"start":{"line":7,"column":12},"end":{"line":7,"column":13}}, + "message":"Unexpected reserved type" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":7,"column":23}}, + "range":[0,103], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":22}}, + "range":[0,22], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":21}}, + "range":[4,21], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":17}}, + "range":[4,17], + "name":"x", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":17}}, + "range":[5,17], + "typeAnnotation":{ + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":17}}, + "range":[7,17], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":14}}, + "range":[7,14], + "name":"Generic", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":{ + "type":"TypeParameterInstantiation", + "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":17}}, + "range":[14,17], + "params":[ + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, + "range":[15,16], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, + "range":[15,16], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + ] + } + } + }, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, + "range":[20,21], + "value":3, + "raw":"3" + } + } + ], + "kind":"var" + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":16}}, + "range":[23,39], + "expression":{ + "type":"TypeCastExpression", + "loc":{"source":null,"start":{"line":2,"column":1},"end":{"line":2,"column":14}}, + "range":[24,37], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":1},"end":{"line":2,"column":2}}, + "range":[24,25], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":2},"end":{"line":2,"column":14}}, + "range":[25,37], + "typeAnnotation":{ + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":4},"end":{"line":2,"column":14}}, + "range":[27,37], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":4},"end":{"line":2,"column":11}}, + "range":[27,34], + "name":"Generic", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":{ + "type":"TypeParameterInstantiation", + "loc":{"source":null,"start":{"line":2,"column":11},"end":{"line":2,"column":14}}, + "range":[34,37], + "params":[ + { + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":13}}, + "range":[35,36], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":13}}, + "range":[35,36], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + ] + } + } + } + }, + "directive":null + }, + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":13}}, + "range":[40,53], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":12}}, + "range":[44,52], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":4},"end":{"line":3,"column":8}}, + "range":[44,48], + "name":"y", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":5},"end":{"line":3,"column":8}}, + "range":[45,48], + "typeAnnotation":{ + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":3,"column":7},"end":{"line":3,"column":8}}, + "range":[47,48], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":3,"column":7},"end":{"line":3,"column":8}}, + "range":[47,48], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + }, + "optional":false + }, + "init":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":3,"column":11},"end":{"line":3,"column":12}}, + "range":[51,52], + "value":3, + "raw":"3" + } + } + ], + "kind":"var" + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":7}}, + "range":[54,61], + "expression":{ + "type":"TypeCastExpression", + "loc":{"source":null,"start":{"line":4,"column":1},"end":{"line":4,"column":5}}, + "range":[55,59], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":1},"end":{"line":4,"column":2}}, + "range":[55,56], + "name":"y", + "typeAnnotation":null, + "optional":false + }, + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":4,"column":2},"end":{"line":4,"column":5}}, + "range":[56,59], + "typeAnnotation":{ + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":5}}, + "range":[58,59], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":4,"column":4},"end":{"line":4,"column":5}}, + "range":[58,59], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null + } + } + }, + "directive":null + }, + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":6,"column":0},"end":{"line":6,"column":16}}, + "range":[63,79], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":6,"column":5},"end":{"line":6,"column":6}}, + "range":[68,69], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":6,"column":9},"end":{"line":6,"column":15}}, + "range":[72,78] + } + }, + { + "type":"OpaqueType", + "loc":{"source":null,"start":{"line":7,"column":0},"end":{"line":7,"column":23}}, + "range":[80,103], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":7,"column":12},"end":{"line":7,"column":13}}, + "range":[92,93], + "name":"_", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "impltype":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":7,"column":16},"end":{"line":7,"column":22}}, + "range":[96,102] + }, + "supertype":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/annotations/unfinished_colon_method.tree.json b/src/parser/test/flow/types/annotations/unfinished_colon_method.tree.json index be5d318a094..488151b40d6 100644 --- a/src/parser/test/flow/types/annotations/unfinished_colon_method.tree.json +++ b/src/parser/test/flow/types/annotations/unfinished_colon_method.tree.json @@ -2,19 +2,23 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":23}}, - "message":"Unexpected token 42" + "message":"Unexpected number, expected the token `:`" }, { "loc":{"source":null,"start":{"line":1,"column":23},"end":{"line":1,"column":24}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`" }, { "loc":{"source":null,"start":{"line":1,"column":26},"end":{"line":1,"column":27}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `,`" } ], "type":"Program", @@ -79,6 +83,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":26}}, "range":[12,26], + "inexact":false, "exact":false, "properties":[ { diff --git a/src/parser/test/flow/types/annotations/unfinished_colon_param.tree.json b/src/parser/test/flow/types/annotations/unfinished_colon_param.tree.json index 37c397d18a5..398c563e440 100644 --- a/src/parser/test/flow/types/annotations/unfinished_colon_param.tree.json +++ b/src/parser/test/flow/types/annotations/unfinished_colon_param.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":23}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/annotations/unfinished_colon_return.tree.json b/src/parser/test/flow/types/annotations/unfinished_colon_return.tree.json index d181f59bc6c..6ba264ddefb 100644 --- a/src/parser/test/flow/types/annotations/unfinished_colon_return.tree.json +++ b/src/parser/test/flow/types/annotations/unfinished_colon_return.tree.json @@ -2,11 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the token `:`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, "message":"Unexpected end of input" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `{`" } ], "type":"Program", @@ -44,6 +52,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":2,"column":0}}, "range":[16,26], + "inexact":false, "exact":false, "properties":[ { diff --git a/src/parser/test/flow/types/annotations/unfinished_function_body.tree.json b/src/parser/test/flow/types/annotations/unfinished_function_body.tree.json index e294c170f31..5d690ebfdba 100644 --- a/src/parser/test/flow/types/annotations/unfinished_function_body.tree.json +++ b/src/parser/test/flow/types/annotations/unfinished_function_body.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `{`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", @@ -40,6 +44,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":18}}, "range":[16,18], + "inexact":false, "exact":false, "properties":[], "indexers":[], diff --git a/src/parser/test/flow/types/annotations/void_is_reserved_param.tree.json b/src/parser/test/flow/types/annotations/void_is_reserved_param.tree.json index 51d8e8bb79c..b5cfe0381e9 100644 --- a/src/parser/test/flow/types/annotations/void_is_reserved_param.tree.json +++ b/src/parser/test/flow/types/annotations/void_is_reserved_param.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":14}}, - "message":"Unexpected token void" + "message":"Unexpected token `void`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/annotations_in_comments/migrated_0012.tree.json b/src/parser/test/flow/types/annotations_in_comments/migrated_0012.tree.json index be9191133b7..d159ac7e036 100644 --- a/src/parser/test/flow/types/annotations_in_comments/migrated_0012.tree.json +++ b/src/parser/test/flow/types/annotations_in_comments/migrated_0012.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":4,"column":1}}, "range":[17,39], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations_in_comments/migrated_0013.tree.json b/src/parser/test/flow/types/annotations_in_comments/migrated_0013.tree.json index efebbe92611..eb6086b965c 100644 --- a/src/parser/test/flow/types/annotations_in_comments/migrated_0013.tree.json +++ b/src/parser/test/flow/types/annotations_in_comments/migrated_0013.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":4,"column":1}}, "range":[27,49], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0000.tree.json b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0000.tree.json index ad4aac65348..5afcb336735 100644 --- a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0000.tree.json @@ -2,11 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0001.tree.json b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0001.tree.json index c95c532ec0c..714f08cd78f 100644 --- a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0001.tree.json +++ b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0001.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":8}}, - "message":"Unexpected token /*:" + "message":"Unexpected token `/*:`" }, { "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":8}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0002.tree.json b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0002.tree.json index e80075ba61f..59e7dffcd7d 100644 --- a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0002.tree.json +++ b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0002.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":9}}, - "message":"Unexpected token /* :" + "message":"Unexpected token `/* :`" }, { "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":9}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0003.tree.json b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0003.tree.json index f55c000899a..b38b403e835 100644 --- a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0003.tree.json +++ b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0003.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":9}}, - "message":"Unexpected token /*::" + "message":"Unexpected token `/*::`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0004.tree.json b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0004.tree.json index 44a150ff03f..3c70ff35c47 100644 --- a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0004.tree.json +++ b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0004.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":19}}, - "message":"Unexpected token /*flow-include" + "message":"Unexpected token `/*flow-include`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0005.tree.json b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0005.tree.json index 4e28638a1f9..884dbf1bb6b 100644 --- a/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0005.tree.json +++ b/src/parser/test/flow/types/annotations_in_comments_invalid/migrated_0005.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":1}}, - "message":"Unexpected token *" + "message":"Unexpected token `*`" }, { "loc":{"source":null,"start":{"line":1,"column":2},"end":{"line":1,"column":3}}, @@ -10,16 +10,16 @@ } ], "type":"Program", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, "range":[0,3], "body":[ { "type":"ExpressionStatement", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, "range":[0,3], "expression":{ "type":"BinaryExpression", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":3}}, + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, "range":[0,3], "operator":"*", "left":{ @@ -31,7 +31,7 @@ }, "right":{ "type":"Literal", - "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":3}}, + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":2,"column":0}}, "range":[1,3], "value":null, "raw":"//", diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0000.js b/src/parser/test/flow/types/bigint_literal/migrated_0000.js new file mode 100644 index 00000000000..db40a8a5943 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0000.js @@ -0,0 +1 @@ +var a: 123n diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0000.tree.json b/src/parser/test/flow/types/bigint_literal/migrated_0000.tree.json new file mode 100644 index 00000000000..7ea1a79b402 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0000.tree.json @@ -0,0 +1,41 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":11}}, + "range":[4,11], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":11}}, + "range":[4,11], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, + "range":[5,11], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":11}}, + "range":[7,11], + "value":null, + "raw":"123n" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0002.js b/src/parser/test/flow/types/bigint_literal/migrated_0002.js new file mode 100644 index 00000000000..ca902f13d19 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0002.js @@ -0,0 +1 @@ +var a: 0x7Bn diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0002.tree.json b/src/parser/test/flow/types/bigint_literal/migrated_0002.tree.json new file mode 100644 index 00000000000..244f004235b --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0002.tree.json @@ -0,0 +1,41 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, + "range":[0,12], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, + "range":[0,12], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":12}}, + "range":[4,12], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":12}}, + "range":[4,12], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":12}}, + "range":[5,12], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":12}}, + "range":[7,12], + "value":null, + "raw":"0x7Bn" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0003.js b/src/parser/test/flow/types/bigint_literal/migrated_0003.js new file mode 100644 index 00000000000..0472121dffd --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0003.js @@ -0,0 +1 @@ +var a: 0b1111011n diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0003.tree.json b/src/parser/test/flow/types/bigint_literal/migrated_0003.tree.json new file mode 100644 index 00000000000..2786bc08cf4 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0003.tree.json @@ -0,0 +1,41 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":17}}, + "range":[0,17], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":17}}, + "range":[0,17], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":17}}, + "range":[4,17], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":17}}, + "range":[4,17], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":17}}, + "range":[5,17], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":17}}, + "range":[7,17], + "value":null, + "raw":"0b1111011n" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0004.js b/src/parser/test/flow/types/bigint_literal/migrated_0004.js new file mode 100644 index 00000000000..319d239920a --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0004.js @@ -0,0 +1 @@ +var a: 0o173n diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0004.tree.json b/src/parser/test/flow/types/bigint_literal/migrated_0004.tree.json new file mode 100644 index 00000000000..bb83968e9ea --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0004.tree.json @@ -0,0 +1,41 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":13}}, + "range":[4,13], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":13}}, + "range":[4,13], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":13}}, + "range":[5,13], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":13}}, + "range":[7,13], + "value":null, + "raw":"0o173n" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0005.js b/src/parser/test/flow/types/bigint_literal/migrated_0005.js new file mode 100644 index 00000000000..48ec2c00b2a --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0005.js @@ -0,0 +1 @@ +var a: -123n diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0005.tree.json b/src/parser/test/flow/types/bigint_literal/migrated_0005.tree.json new file mode 100644 index 00000000000..cd2e8d32cd6 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0005.tree.json @@ -0,0 +1,41 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, + "range":[0,12], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, + "range":[0,12], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":12}}, + "range":[4,12], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":12}}, + "range":[4,12], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":12}}, + "range":[5,12], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":12}}, + "range":[7,12], + "value":null, + "raw":"-123n" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0006.js b/src/parser/test/flow/types/bigint_literal/migrated_0006.js new file mode 100644 index 00000000000..56a2d783ee8 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0006.js @@ -0,0 +1 @@ +var a: - 123n diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0006.tree.json b/src/parser/test/flow/types/bigint_literal/migrated_0006.tree.json new file mode 100644 index 00000000000..41f65964e8a --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0006.tree.json @@ -0,0 +1,41 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":13}}, + "range":[4,13], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":13}}, + "range":[4,13], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":13}}, + "range":[5,13], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":13}}, + "range":[7,13], + "value":null, + "raw":"- 123n" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0007.js b/src/parser/test/flow/types/bigint_literal/migrated_0007.js new file mode 100644 index 00000000000..8114a44da2c --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0007.js @@ -0,0 +1 @@ +var a: 25257156155n diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0007.tree.json b/src/parser/test/flow/types/bigint_literal/migrated_0007.tree.json new file mode 100644 index 00000000000..3035a48b713 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0007.tree.json @@ -0,0 +1,41 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":19}}, + "range":[0,19], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":19}}, + "range":[0,19], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":19}}, + "range":[4,19], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":19}}, + "range":[4,19], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":19}}, + "range":[5,19], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":19}}, + "range":[7,19], + "value":null, + "raw":"25257156155n" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0008.js b/src/parser/test/flow/types/bigint_literal/migrated_0008.js new file mode 100644 index 00000000000..9cc9435b2c7 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0008.js @@ -0,0 +1 @@ +var a: 0x5E1719E3Bn diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0008.tree.json b/src/parser/test/flow/types/bigint_literal/migrated_0008.tree.json new file mode 100644 index 00000000000..c8f1078f9e5 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0008.tree.json @@ -0,0 +1,41 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":19}}, + "range":[0,19], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":19}}, + "range":[0,19], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":19}}, + "range":[4,19], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":19}}, + "range":[4,19], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":19}}, + "range":[5,19], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":19}}, + "range":[7,19], + "value":null, + "raw":"0x5E1719E3Bn" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0009.js b/src/parser/test/flow/types/bigint_literal/migrated_0009.js new file mode 100644 index 00000000000..d951a7ea1c0 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0009.js @@ -0,0 +1 @@ +var a: 0o274134317073n diff --git a/src/parser/test/flow/types/bigint_literal/migrated_0009.tree.json b/src/parser/test/flow/types/bigint_literal/migrated_0009.tree.json new file mode 100644 index 00000000000..443cf1e21b5 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal/migrated_0009.tree.json @@ -0,0 +1,41 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":22}}, + "range":[0,22], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":22}}, + "range":[0,22], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":22}}, + "range":[4,22], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":22}}, + "range":[4,22], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":22}}, + "range":[5,22], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":22}}, + "range":[7,22], + "value":null, + "raw":"0o274134317073n" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal_invalid/migrated_0000.js b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0000.js new file mode 100644 index 00000000000..a9fa30e3b0e --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0000.js @@ -0,0 +1 @@ +var a: 0173n diff --git a/src/parser/test/flow/types/bigint_literal_invalid/migrated_0000.tree.json b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0000.tree.json new file mode 100644 index 00000000000..3fa63f2d9bc --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0000.tree.json @@ -0,0 +1,69 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":12}}, + "message":"Unexpected token ILLEGAL" + }, + { + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":12}}, + "message":"Octal literals are not allowed in strict mode." + }, + { + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":12}}, + "message":"Unexpected identifier, expected the token `;`" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, + "range":[0,12], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":11}}, + "range":[4,11], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":11}}, + "range":[4,11], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, + "range":[5,11], + "typeAnnotation":{ + "type":"NumberLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":11}}, + "range":[7,11], + "value":123, + "raw":"0173" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":12}}, + "range":[11,12], + "expression":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":12}}, + "range":[11,12], + "name":"n", + "typeAnnotation":null, + "optional":false + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal_invalid/migrated_0001.js b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0001.js new file mode 100644 index 00000000000..02d4d969dc8 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0001.js @@ -0,0 +1 @@ +var a: 123.0n diff --git a/src/parser/test/flow/types/bigint_literal_invalid/migrated_0001.tree.json b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0001.tree.json new file mode 100644 index 00000000000..09bcf2ce18c --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0001.tree.json @@ -0,0 +1,47 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":13}}, + "message":"A bigint literal must be an integer" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":13}}, + "range":[4,13], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":13}}, + "range":[4,13], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":13}}, + "range":[5,13], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":13}}, + "range":[7,13], + "value":null, + "raw":"123.0n" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal_invalid/migrated_0002.js b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0002.js new file mode 100644 index 00000000000..8c345309cb7 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0002.js @@ -0,0 +1 @@ +var a: 123-3n diff --git a/src/parser/test/flow/types/bigint_literal_invalid/migrated_0002.tree.json b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0002.tree.json new file mode 100644 index 00000000000..9d08dc5d2b5 --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0002.tree.json @@ -0,0 +1,67 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, + "message":"Unexpected token `-`, expected the token `;`" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":13}}, + "range":[0,13], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":10}}, + "range":[0,10], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":10}}, + "range":[4,10], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":10}}, + "range":[4,10], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":10}}, + "range":[5,10], + "typeAnnotation":{ + "type":"NumberLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":10}}, + "range":[7,10], + "value":123, + "raw":"123" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + }, + { + "type":"ExpressionStatement", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":13}}, + "range":[10,13], + "expression":{ + "type":"UnaryExpression", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":13}}, + "range":[10,13], + "operator":"-", + "prefix":true, + "argument":{ + "type":"BigIntLiteral", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":13}}, + "range":[11,13], + "value":null, + "bigint":"3n" + } + }, + "directive":null + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/bigint_literal_invalid/migrated_0010.js b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0010.js new file mode 100644 index 00000000000..116efc7277c --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0010.js @@ -0,0 +1 @@ +var a: 1e5n diff --git a/src/parser/test/flow/types/bigint_literal_invalid/migrated_0010.tree.json b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0010.tree.json new file mode 100644 index 00000000000..3614c19330f --- /dev/null +++ b/src/parser/test/flow/types/bigint_literal_invalid/migrated_0010.tree.json @@ -0,0 +1,47 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":11}}, + "message":"A bigint literal cannot use exponential notation" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":11}}, + "range":[0,11], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":11}}, + "range":[4,11], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":11}}, + "range":[4,11], + "name":"a", + "typeAnnotation":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, + "range":[5,11], + "typeAnnotation":{ + "type":"BigIntLiteralTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":11}}, + "range":[7,11], + "value":null, + "raw":"1e5n" + } + }, + "optional":false + }, + "init":null + } + ], + "kind":"var" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/declare_class/invalid_quoted_static_modifier.tree.json b/src/parser/test/flow/types/declare_class/invalid_quoted_static_modifier.tree.json index 6a7440a82f3..f5dd171426b 100644 --- a/src/parser/test/flow/types/declare_class/invalid_quoted_static_modifier.tree.json +++ b/src/parser/test/flow/types/declare_class/invalid_quoted_static_modifier.tree.json @@ -2,11 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":30}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `:`" }, { "loc":{"source":null,"start":{"line":1,"column":32},"end":{"line":1,"column":33}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the token `=>`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_class/multiple_extends.tree.json b/src/parser/test/flow/types/declare_class/multiple_extends.tree.json index 8b06434cd15..ff5fce5f6b9 100644 --- a/src/parser/test/flow/types/declare_class/multiple_extends.tree.json +++ b/src/parser/test/flow/types/declare_class/multiple_extends.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":26}}, - "message":"Unexpected token ," + "message":"Unexpected token `,`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":30}}, - "message":"Unexpected token {" + "message":"Unexpected token `{`, expected the token `:`" }, { "loc":{"source":null,"start":{"line":1,"column":30},"end":{"line":1,"column":31}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_class/static_proto.tree.json b/src/parser/test/flow/types/declare_class/static_proto.tree.json index fa666584649..b7e387f5ca7 100644 --- a/src/parser/test/flow/types/declare_class/static_proto.tree.json +++ b/src/parser/test/flow/types/declare_class/static_proto.tree.json @@ -2,19 +2,23 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":31},"end":{"line":1,"column":32}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `:`" }, { "loc":{"source":null,"start":{"line":1,"column":32},"end":{"line":1,"column":33}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":32},"end":{"line":1,"column":33}}, + "message":"Unexpected token `:`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":35}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `:`" }, { "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":37}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_class/truncated0.tree.json b/src/parser/test/flow/types/declare_class/truncated0.tree.json index e6f2806d095..72ca3cccb18 100644 --- a/src/parser/test/flow/types/declare_class/truncated0.tree.json +++ b/src/parser/test/flow/types/declare_class/truncated0.tree.json @@ -2,11 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":1}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected an identifier" + }, + { + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":0}}, + "message":"Unexpected end of input, expected the token `:`" }, { "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":0}}, "message":"Unexpected end of input" + }, + { + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_class/truncated1.tree.json b/src/parser/test/flow/types/declare_class/truncated1.tree.json index 8e278eff9a8..58c4b0b0410 100644 --- a/src/parser/test/flow/types/declare_class/truncated1.tree.json +++ b/src/parser/test/flow/types/declare_class/truncated1.tree.json @@ -2,11 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":1}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected an identifier" + }, + { + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":0}}, + "message":"Unexpected end of input, expected the token `:`" }, { "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":0}}, "message":"Unexpected end of input" + }, + { + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_class/truncated2.tree.json b/src/parser/test/flow/types/declare_class/truncated2.tree.json index 04ff85b423c..7754cb8e482 100644 --- a/src/parser/test/flow/types/declare_class/truncated2.tree.json +++ b/src/parser/test/flow/types/declare_class/truncated2.tree.json @@ -2,11 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":1}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected an identifier" + }, + { + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":0}}, + "message":"Unexpected end of input, expected the token `:`" }, { "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":0}}, "message":"Unexpected end of input" + }, + { + "loc":{"source":null,"start":{"line":4,"column":0},"end":{"line":4,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/export_non_default_type.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_non_default_type.tree.json index 640e84e49a4..981855502c7 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_non_default_type.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_non_default_type.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":15},"end":{"line":2,"column":21}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `{`" }, { "loc":{"source":null,"start":{"line":2,"column":21},"end":{"line":2,"column":22}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" }, { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0000.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0000.tree.json index 20c207067ee..522fc6f42c9 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0000.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0000.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, - "message":"Unexpected token type" + "message":"Unexpected token `type`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, @@ -14,7 +14,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":26},"end":{"line":1,"column":32}}, @@ -26,11 +26,11 @@ }, { "loc":{"source":null,"start":{"line":1,"column":32},"end":{"line":1,"column":33}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0001.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0001.tree.json index 8655b338a6d..0ead274553e 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0001.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0001.tree.json @@ -6,11 +6,11 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, - "message":"Unexpected token type" + "message":"Unexpected token `type`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected token {" + "message":"Unexpected token `{`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":23}}, diff --git a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0002.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0002.tree.json index 2408e63de9d..3e24fc55c33 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0002.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0002.tree.json @@ -6,11 +6,11 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, - "message":"Unexpected token type" + "message":"Unexpected token `type`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected token *" + "message":"Unexpected token `*`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":26}}, @@ -22,7 +22,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":32}}, - "message":"Unexpected string" + "message":"Unexpected string, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":32},"end":{"line":1,"column":33}}, @@ -30,11 +30,11 @@ }, { "loc":{"source":null,"start":{"line":1,"column":32},"end":{"line":1,"column":33}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0003.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0003.tree.json index da9c9102da5..77e1c332d76 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0003.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0003.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, - "message":"Unexpected token type" + "message":"Unexpected token `type`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, @@ -18,7 +18,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":26}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":33}}, @@ -30,15 +30,15 @@ }, { "loc":{"source":null,"start":{"line":1,"column":33},"end":{"line":1,"column":34}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":23}}, - "message":"Unexpected token var" + "message":"Unexpected token `var`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0004.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0004.tree.json index 68b6a432ed9..bc7392bd15c 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0004.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0004.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, - "message":"Unexpected token type" + "message":"Unexpected token `type`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":32}}, @@ -18,7 +18,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":32},"end":{"line":1,"column":33}}, - "message":"Unexpected token (" + "message":"Unexpected token `(`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":33},"end":{"line":1,"column":34}}, @@ -26,7 +26,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":33},"end":{"line":1,"column":34}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":35}}, @@ -34,7 +34,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":35}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":40}}, @@ -42,15 +42,15 @@ }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":28}}, - "message":"Unexpected token function" + "message":"Unexpected token `function`" }, { "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":40}}, - "message":"Unexpected token void" + "message":"Unexpected token `void`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0005.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0005.tree.json index 34c7a9f2d34..a592735a14a 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0005.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0005.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, - "message":"Unexpected token type" + "message":"Unexpected token `type`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":26},"end":{"line":1,"column":27}}, @@ -18,11 +18,11 @@ }, { "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":29}}, - "message":"Unexpected token {" + "message":"Unexpected token `{`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":25}}, - "message":"Unexpected token class" + "message":"Unexpected token `class`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0006.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0006.tree.json index 9c62e0fcd9f..dfa2cde0e71 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0006.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0006.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, - "message":"Unexpected token type" + "message":"Unexpected token `type`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":34}}, @@ -18,15 +18,15 @@ }, { "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":35}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":27}}, - "message":"Unexpected token default" + "message":"Unexpected token `default`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0007.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0007.tree.json index d497f251e8f..0bb2dd7bb8d 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0007.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0007.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, - "message":"Unexpected token type" + "message":"Unexpected token `type`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":36}}, @@ -22,7 +22,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":40},"end":{"line":1,"column":41}}, - "message":"Unexpected token (" + "message":"Unexpected token `(`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":41},"end":{"line":1,"column":42}}, @@ -30,7 +30,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":41},"end":{"line":1,"column":42}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":42},"end":{"line":1,"column":43}}, @@ -38,7 +38,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":42},"end":{"line":1,"column":43}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":44},"end":{"line":1,"column":48}}, @@ -46,19 +46,19 @@ }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":27}}, - "message":"Unexpected token default" + "message":"Unexpected token `default`" }, { "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":36}}, - "message":"Unexpected token function" + "message":"Unexpected token `function`" }, { "loc":{"source":null,"start":{"line":1,"column":44},"end":{"line":1,"column":48}}, - "message":"Unexpected token void" + "message":"Unexpected token `void`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0008.tree.json b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0008.tree.json index 74abac31815..0f185d4447a 100644 --- a/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0008.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/export_type/migrated_0008.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":19}}, - "message":"Unexpected token type" + "message":"Unexpected token `type`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":33}}, @@ -22,15 +22,15 @@ }, { "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":37}}, - "message":"Unexpected token {" + "message":"Unexpected token `{`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":27}}, - "message":"Unexpected token default" + "message":"Unexpected token `default`" }, { "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":33}}, - "message":"Unexpected token class" + "message":"Unexpected token `class`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/let_const/migrated_0009.tree.json b/src/parser/test/flow/types/declare_export_invalid/let_const/migrated_0009.tree.json index e533267cc04..13576e51bfd 100644 --- a/src/parser/test/flow/types/declare_export_invalid/let_const/migrated_0009.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/let_const/migrated_0009.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":18}}, - "message":"Unexpected token let" + "message":"Unexpected token `let`, expected the token `var`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/let_const/migrated_0010.tree.json b/src/parser/test/flow/types/declare_export_invalid/let_const/migrated_0010.tree.json index 44933ed6183..d7cb819b664 100644 --- a/src/parser/test/flow/types/declare_export_invalid/let_const/migrated_0010.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/let_const/migrated_0010.tree.json @@ -6,7 +6,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":20}}, - "message":"Unexpected token const" + "message":"Unexpected token `const`, expected the token `var`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_export_invalid/migrated_0012.tree.json b/src/parser/test/flow/types/declare_export_invalid/migrated_0012.tree.json index 02e7cefd855..c75f6c3de28 100644 --- a/src/parser/test/flow/types/declare_export_invalid/migrated_0012.tree.json +++ b/src/parser/test/flow/types/declare_export_invalid/migrated_0012.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":29},"end":{"line":2,"column":30}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected the token `:`" }, { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, diff --git a/src/parser/test/flow/types/declare_module_exports_invalid/migrated_0000.tree.json b/src/parser/test/flow/types/declare_module_exports_invalid/migrated_0000.tree.json index 6d1da9bd5b7..72c0c885ef0 100644 --- a/src/parser/test/flow/types/declare_module_exports_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/types/declare_module_exports_invalid/migrated_0000.tree.json @@ -1,5 +1,9 @@ { "errors":[ + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the token `:`" + }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, "message":"Unexpected end of input" diff --git a/src/parser/test/flow/types/declare_module_invalid/migrated_0000.tree.json b/src/parser/test/flow/types/declare_module_invalid/migrated_0000.tree.json index 280fec437a5..c05e41c060f 100644 --- a/src/parser/test/flow/types/declare_module_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/types/declare_module_invalid/migrated_0000.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":14}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":18}}, - "message":"Unexpected token {" + "message":"Unexpected token `{`, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_module_invalid/migrated_0001.tree.json b/src/parser/test/flow/types/declare_module_invalid/migrated_0001.tree.json index f93b274ffd8..fe5455a21c1 100644 --- a/src/parser/test/flow/types/declare_module_invalid/migrated_0001.tree.json +++ b/src/parser/test/flow/types/declare_module_invalid/migrated_0001.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected token {" + "message":"Unexpected token `{`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":17}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_module_invalid/migrated_0002.tree.json b/src/parser/test/flow/types/declare_module_invalid/migrated_0002.tree.json index d0feb8f2372..3a9738a5c91 100644 --- a/src/parser/test/flow/types/declare_module_invalid/migrated_0002.tree.json +++ b/src/parser/test/flow/types/declare_module_invalid/migrated_0002.tree.json @@ -2,19 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":35}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `.`" }, { "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":37}}, - "message":"Unexpected token {" + "message":"Unexpected token `{`, expected the identifier `exports`" }, { "loc":{"source":null,"start":{"line":1,"column":37},"end":{"line":1,"column":38}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the token `:`" }, { "loc":{"source":null,"start":{"line":1,"column":39},"end":{"line":1,"column":40}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_module_invalid/migrated_0003.tree.json b/src/parser/test/flow/types/declare_module_invalid/migrated_0003.tree.json index e0ce38d170d..fe6418387a5 100644 --- a/src/parser/test/flow/types/declare_module_invalid/migrated_0003.tree.json +++ b/src/parser/test/flow/types/declare_module_invalid/migrated_0003.tree.json @@ -2,43 +2,51 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":25}}, - "message":"Unexpected token export" + "message":"Unexpected token `export`, expected the token `declare`" }, { "loc":{"source":null,"start":{"line":1,"column":26},"end":{"line":1,"column":33}}, - "message":"Unexpected token default" + "message":"Unexpected token `default`, expected the token `var`" }, { "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":42}}, - "message":"Unexpected token function" + "message":"Unexpected token `function`" }, { "loc":{"source":null,"start":{"line":1,"column":43},"end":{"line":1,"column":46}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":43},"end":{"line":1,"column":46}}, + "message":"Unexpected identifier, expected the token `declare`" }, { "loc":{"source":null,"start":{"line":1,"column":46},"end":{"line":1,"column":47}}, - "message":"Unexpected token (" + "message":"Unexpected token `(`, expected the token `var`" }, { "loc":{"source":null,"start":{"line":1,"column":47},"end":{"line":1,"column":48}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected an identifier" + }, + { + "loc":{"source":null,"start":{"line":1,"column":49},"end":{"line":1,"column":50}}, + "message":"Unexpected token `{`, expected the token `;`" }, { "loc":{"source":null,"start":{"line":1,"column":49},"end":{"line":1,"column":50}}, - "message":"Unexpected token {" + "message":"Unexpected token `{`, expected the token `declare`" }, { "loc":{"source":null,"start":{"line":1,"column":50},"end":{"line":1,"column":51}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the token `var`" }, { "loc":{"source":null,"start":{"line":1,"column":52},"end":{"line":1,"column":53}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_module_invalid/migrated_0005.tree.json b/src/parser/test/flow/types/declare_module_invalid/migrated_0005.tree.json index 2694ea78c6d..2b86113df7d 100644 --- a/src/parser/test/flow/types/declare_module_invalid/migrated_0005.tree.json +++ b/src/parser/test/flow/types/declare_module_invalid/migrated_0005.tree.json @@ -6,11 +6,11 @@ }, { "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":29}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `(`" }, { "loc":{"source":null,"start":{"line":1,"column":35},"end":{"line":1,"column":39}}, - "message":"Unexpected string" + "message":"Unexpected string, expected the token `)`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/declare_statements_invalid/migrated_0000.tree.json b/src/parser/test/flow/types/declare_statements_invalid/migrated_0000.tree.json index 6eebe34d2b6..a550bd9f0f9 100644 --- a/src/parser/test/flow/types/declare_statements_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/types/declare_statements_invalid/migrated_0000.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":22},"end":{"line":2,"column":23}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected the token `:`" }, { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, diff --git a/src/parser/test/flow/types/exact_objects/migrated_0000.tree.json b/src/parser/test/flow/types/exact_objects/migrated_0000.tree.json index b12363cbf86..ff3032c5b51 100644 --- a/src/parser/test/flow/types/exact_objects/migrated_0000.tree.json +++ b/src/parser/test/flow/types/exact_objects/migrated_0000.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":35}}, "range":[9,35], "exact":true, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/exact_objects/migrated_0001.tree.json b/src/parser/test/flow/types/exact_objects/migrated_0001.tree.json index 6c3d61a3071..f67b0c82a22 100644 --- a/src/parser/test/flow/types/exact_objects/migrated_0001.tree.json +++ b/src/parser/test/flow/types/exact_objects/migrated_0001.tree.json @@ -26,6 +26,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":36}}, "range":[9,36], "exact":true, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/function_predicates/migrated_0005.tree.json b/src/parser/test/flow/types/function_predicates/migrated_0005.tree.json index 2f793cba166..7db82c792cb 100644 --- a/src/parser/test/flow/types/function_predicates/migrated_0005.tree.json +++ b/src/parser/test/flow/types/function_predicates/migrated_0005.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":30},"end":{"line":1,"column":31}}, - "message":"Unexpected token %" + "message":"Unexpected token `%`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":30},"end":{"line":1,"column":31}}, + "message":"Unexpected token `%`" } ], "type":"Program", @@ -96,6 +100,7 @@ "typeAnnotation":null, "optional":false }, + "typeArguments":null, "arguments":[ { "type":"BinaryExpression", diff --git a/src/parser/test/flow/types/function_predicates/migrated_0006.tree.json b/src/parser/test/flow/types/function_predicates/migrated_0006.tree.json index ffb40922d72..52a671b00e9 100644 --- a/src/parser/test/flow/types/function_predicates/migrated_0006.tree.json +++ b/src/parser/test/flow/types/function_predicates/migrated_0006.tree.json @@ -2,19 +2,23 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":46},"end":{"line":1,"column":49}}, - "message":"Unexpected token var" + "message":"Unexpected token `var`" }, { "loc":{"source":null,"start":{"line":1,"column":46},"end":{"line":1,"column":49}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":50},"end":{"line":1,"column":51}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `;`" }, { "loc":{"source":null,"start":{"line":1,"column":77},"end":{"line":1,"column":78}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":77},"end":{"line":1,"column":78}}, + "message":"Unexpected token `)`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics.js b/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics.js new file mode 100644 index 00000000000..70b1d5fb64b --- /dev/null +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics.js @@ -0,0 +1 @@ +type T = Array<(string) => number> diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics.tree.json b/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics.tree.json new file mode 100644 index 00000000000..ae6c5554913 --- /dev/null +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics.tree.json @@ -0,0 +1,68 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":34}}, + "range":[0,34], + "body":[ + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":34}}, + "range":[0,34], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"T", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":34}}, + "range":[9,34], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":14}}, + "range":[9,14], + "name":"Array", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":{ + "type":"TypeParameterInstantiation", + "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":34}}, + "range":[14,34], + "params":[ + { + "type":"FunctionTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":33}}, + "range":[15,33], + "params":[ + { + "type":"FunctionTypeParam", + "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":22}}, + "range":[16,22], + "name":null, + "typeAnnotation":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":22}}, + "range":[16,22] + }, + "optional":false + } + ], + "returnType":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":33}}, + "range":[27,33] + }, + "rest":null, + "typeParameters":null + } + ] + } + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics_inside_return.js b/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics_inside_return.js new file mode 100644 index 00000000000..2797adfd267 --- /dev/null +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics_inside_return.js @@ -0,0 +1 @@ +let x = (): Array<(string) => number> => [] diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics_inside_return.tree.json b/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics_inside_return.tree.json new file mode 100644 index 00000000000..a22497067e7 --- /dev/null +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/inside_generics_inside_return.tree.json @@ -0,0 +1,98 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":43}}, + "range":[0,43], + "body":[ + { + "type":"VariableDeclaration", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":43}}, + "range":[0,43], + "declarations":[ + { + "type":"VariableDeclarator", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":43}}, + "range":[4,43], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":4},"end":{"line":1,"column":5}}, + "range":[4,5], + "name":"x", + "typeAnnotation":null, + "optional":false + }, + "init":{ + "type":"ArrowFunctionExpression", + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":43}}, + "range":[8,43], + "id":null, + "params":[], + "body":{ + "type":"ArrayExpression", + "loc":{"source":null,"start":{"line":1,"column":41},"end":{"line":1,"column":43}}, + "range":[41,43], + "elements":[] + }, + "async":false, + "generator":false, + "predicate":null, + "expression":true, + "returnType":{ + "type":"TypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":37}}, + "range":[10,37], + "typeAnnotation":{ + "type":"GenericTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":37}}, + "range":[12,37], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":17}}, + "range":[12,17], + "name":"Array", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":{ + "type":"TypeParameterInstantiation", + "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":37}}, + "range":[17,37], + "params":[ + { + "type":"FunctionTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":36}}, + "range":[18,36], + "params":[ + { + "type":"FunctionTypeParam", + "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":25}}, + "range":[19,25], + "name":null, + "typeAnnotation":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":25}}, + "range":[19,25] + }, + "optional":false + } + ], + "returnType":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":30},"end":{"line":1,"column":36}}, + "range":[30,36] + }, + "rest":null, + "typeParameters":null + } + ] + } + } + }, + "typeParameters":null + } + } + ], + "kind":"let" + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/invalid_some_named_some_anonymous.js b/src/parser/test/flow/types/function_types_with_anonymous_parameters/invalid_some_named_some_anonymous.js new file mode 100644 index 00000000000..8c6fc6b2a6c --- /dev/null +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/invalid_some_named_some_anonymous.js @@ -0,0 +1 @@ +type T = (arg: string number) => void diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/invalid_some_named_some_anonymous.tree.json b/src/parser/test/flow/types/function_types_with_anonymous_parameters/invalid_some_named_some_anonymous.tree.json new file mode 100644 index 00000000000..8b1be244c84 --- /dev/null +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/invalid_some_named_some_anonymous.tree.json @@ -0,0 +1,61 @@ +{ + "errors":[ + { + "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":28}}, + "message":"Unexpected token `number`, expected the token `,`" + } + ], + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":37}}, + "range":[0,37], + "body":[ + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":37}}, + "range":[0,37], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"T", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"FunctionTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":37}}, + "range":[9,37], + "params":[ + { + "type":"FunctionTypeParam", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":21}}, + "range":[10,21], + "name":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":13}}, + "range":[10,13], + "name":"arg", + "typeAnnotation":null, + "optional":false + }, + "typeAnnotation":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":21}}, + "range":[15,21] + }, + "optional":false + } + ], + "returnType":{ + "type":"VoidTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":33},"end":{"line":1,"column":37}}, + "range":[33,37] + }, + "rest":null, + "typeParameters":null + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0011.tree.json b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0011.tree.json index 6d624fff092..9df2ad7d0c4 100644 --- a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0011.tree.json +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0011.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":29},"end":{"line":2,"column":31}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":29},"end":{"line":2,"column":31}}, + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0012.tree.json b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0012.tree.json index eea7a782551..ce644d2b035 100644 --- a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0012.tree.json +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0012.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":38},"end":{"line":2,"column":40}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":38},"end":{"line":2,"column":40}}, + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0013.tree.json b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0013.tree.json index ffc35143ea8..73cd5d4a391 100644 --- a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0013.tree.json +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0013.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":30},"end":{"line":2,"column":32}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":30},"end":{"line":2,"column":32}}, + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0026.tree.json b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0026.tree.json index 6f6c491cc95..39fc22cc868 100644 --- a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0026.tree.json +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0026.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":27},"end":{"line":2,"column":29}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":27},"end":{"line":2,"column":29}}, + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0027.tree.json b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0027.tree.json index 5e18f04987a..ddf5a47bd50 100644 --- a/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0027.tree.json +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/migrated_0027.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":36},"end":{"line":2,"column":38}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":36},"end":{"line":2,"column":38}}, + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/some_named_some_anonymous.js b/src/parser/test/flow/types/function_types_with_anonymous_parameters/some_named_some_anonymous.js new file mode 100644 index 00000000000..ee6c187e843 --- /dev/null +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/some_named_some_anonymous.js @@ -0,0 +1 @@ +type T = (arg: string, number) => void diff --git a/src/parser/test/flow/types/function_types_with_anonymous_parameters/some_named_some_anonymous.tree.json b/src/parser/test/flow/types/function_types_with_anonymous_parameters/some_named_some_anonymous.tree.json new file mode 100644 index 00000000000..553efc8aa6f --- /dev/null +++ b/src/parser/test/flow/types/function_types_with_anonymous_parameters/some_named_some_anonymous.tree.json @@ -0,0 +1,67 @@ +{ + "type":"Program", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":38}}, + "range":[0,38], + "body":[ + { + "type":"TypeAlias", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":38}}, + "range":[0,38], + "id":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":6}}, + "range":[5,6], + "name":"T", + "typeAnnotation":null, + "optional":false + }, + "typeParameters":null, + "right":{ + "type":"FunctionTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":38}}, + "range":[9,38], + "params":[ + { + "type":"FunctionTypeParam", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":21}}, + "range":[10,21], + "name":{ + "type":"Identifier", + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":13}}, + "range":[10,13], + "name":"arg", + "typeAnnotation":null, + "optional":false + }, + "typeAnnotation":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":21}}, + "range":[15,21] + }, + "optional":false + }, + { + "type":"FunctionTypeParam", + "loc":{"source":null,"start":{"line":1,"column":23},"end":{"line":1,"column":29}}, + "range":[23,29], + "name":null, + "typeAnnotation":{ + "type":"NumberTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":23},"end":{"line":1,"column":29}}, + "range":[23,29] + }, + "optional":false + } + ], + "returnType":{ + "type":"VoidTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":34},"end":{"line":1,"column":38}}, + "range":[34,38] + }, + "rest":null, + "typeParameters":null + } + } + ], + "comments":[] +} diff --git a/src/parser/test/flow/types/grouping/method.tree.json b/src/parser/test/flow/types/grouping/method.tree.json index 6b60dd1c4af..ae2719d69b3 100644 --- a/src/parser/test/flow/types/grouping/method.tree.json +++ b/src/parser/test/flow/types/grouping/method.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":3,"column":1}}, "range":[9,23], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/grouping/object_type_call_property.tree.json b/src/parser/test/flow/types/grouping/object_type_call_property.tree.json index 0afafa16a61..14f62ae1420 100644 --- a/src/parser/test/flow/types/grouping/object_type_call_property.tree.json +++ b/src/parser/test/flow/types/grouping/object_type_call_property.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":3,"column":1}}, "range":[9,22], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[ diff --git a/src/parser/test/flow/types/grouping/object_type_indexer.tree.json b/src/parser/test/flow/types/grouping/object_type_indexer.tree.json index a757b24eaa4..4e2100caac3 100644 --- a/src/parser/test/flow/types/grouping/object_type_indexer.tree.json +++ b/src/parser/test/flow/types/grouping/object_type_indexer.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":3,"column":1}}, "range":[9,23], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/grouping/object_type_property.tree.json b/src/parser/test/flow/types/grouping/object_type_property.tree.json index 399b0fff986..b09672f79ec 100644 --- a/src/parser/test/flow/types/grouping/object_type_property.tree.json +++ b/src/parser/test/flow/types/grouping/object_type_property.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":3,"column":1}}, "range":[9,21], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/grouping/object_type_spread_property.tree.json b/src/parser/test/flow/types/grouping/object_type_spread_property.tree.json index a4db672cc04..b846627eee1 100644 --- a/src/parser/test/flow/types/grouping/object_type_spread_property.tree.json +++ b/src/parser/test/flow/types/grouping/object_type_spread_property.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":3,"column":1}}, "range":[9,21], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeSpreadProperty", diff --git a/src/parser/test/flow/types/grouping_invalid/migrated_0000.tree.json b/src/parser/test/flow/types/grouping_invalid/migrated_0000.tree.json index 3c9297dd271..911be33f5e7 100644 --- a/src/parser/test/flow/types/grouping_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/types/grouping_invalid/migrated_0000.tree.json @@ -2,19 +2,23 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":13}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the token `)`" }, { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":20}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `;`" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, + "message":"Unexpected token `)`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":24}}, - "message":"Unexpected token =>" + "message":"Unexpected token `=>`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/import_type_shorthand/migrated_0007.tree.json b/src/parser/test/flow/types/import_type_shorthand/migrated_0007.tree.json index 040c9e3023d..d864a118937 100644 --- a/src/parser/test/flow/types/import_type_shorthand/migrated_0007.tree.json +++ b/src/parser/test/flow/types/import_type_shorthand/migrated_0007.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":18}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":23}}, @@ -14,7 +14,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":29}}, - "message":"Unexpected string" + "message":"Unexpected string, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":30}}, @@ -22,11 +22,19 @@ }, { "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":30}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the token `}`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected the identifier `from`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected a string" } ], "type":"Program", diff --git a/src/parser/test/flow/types/import_type_shorthand/migrated_0008.tree.json b/src/parser/test/flow/types/import_type_shorthand/migrated_0008.tree.json index c2ca9363028..e0b798f887b 100644 --- a/src/parser/test/flow/types/import_type_shorthand/migrated_0008.tree.json +++ b/src/parser/test/flow/types/import_type_shorthand/migrated_0008.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":14}}, - "message":"Unexpected token typeof" + "message":"Unexpected token `typeof`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/import_types/migrated_0000.tree.json b/src/parser/test/flow/types/import_types/migrated_0000.tree.json index 02484f167bc..f51405a191b 100644 --- a/src/parser/test/flow/types/import_types/migrated_0000.tree.json +++ b/src/parser/test/flow/types/import_types/migrated_0000.tree.json @@ -2,11 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":22}}, - "message":"Unexpected string" + "message":"Unexpected string, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the identifier `from`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, + "message":"Unexpected end of input, expected a string" } ], "type":"Program", diff --git a/src/parser/test/flow/types/import_types/namespace.tree.json b/src/parser/test/flow/types/import_types/namespace.tree.json index 89502b4f2f7..8449840cd7d 100644 --- a/src/parser/test/flow/types/import_types/namespace.tree.json +++ b/src/parser/test/flow/types/import_types/namespace.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":13}}, - "message":"Unexpected token *" + "message":"Unexpected token `*`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/import_types/namespace_reserved_type.tree.json b/src/parser/test/flow/types/import_types/namespace_reserved_type.tree.json index 0a6b2c42569..ef30c749cd2 100644 --- a/src/parser/test/flow/types/import_types/namespace_reserved_type.tree.json +++ b/src/parser/test/flow/types/import_types/namespace_reserved_type.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":13}}, - "message":"Unexpected token *" + "message":"Unexpected token `*`" }, { "loc":{"source":null,"start":{"line":2,"column":17},"end":{"line":2,"column":23}}, diff --git a/src/parser/test/flow/types/import_types/namespace_reserved_value.tree.json b/src/parser/test/flow/types/import_types/namespace_reserved_value.tree.json index 4e167c063f4..8e14309ac61 100644 --- a/src/parser/test/flow/types/import_types/namespace_reserved_value.tree.json +++ b/src/parser/test/flow/types/import_types/namespace_reserved_value.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":13}}, - "message":"Unexpected token *" + "message":"Unexpected token `*`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/instance_spread_invalid/migrated_0000.tree.json b/src/parser/test/flow/types/instance_spread_invalid/migrated_0000.tree.json index 6e210d0e82b..403d3adf36b 100644 --- a/src/parser/test/flow/types/instance_spread_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/types/instance_spread_invalid/migrated_0000.tree.json @@ -2,15 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":16}}, - "message":"Unexpected token ..." - }, - { - "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":17}}, - "message":"Unexpected identifier" - }, - { - "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":18}}, - "message":"Unexpected token }" + "message":"Spreading a type is only allowed inside an object type" } ], "type":"Program", @@ -35,32 +27,7 @@ "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":18}}, "range":[12,18], "exact":false, - "properties":[ - { - "type":"ObjectTypeProperty", - "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":17}}, - "range":[13,17], - "key":{ - "type":"Identifier", - "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":16}}, - "range":[13,16], - "name":"", - "typeAnnotation":null, - "optional":false - }, - "value":{ - "type":"AnyTypeAnnotation", - "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":18}}, - "range":[17,18] - }, - "method":false, - "optional":false, - "static":false, - "proto":false, - "variance":null, - "kind":"init" - } - ], + "properties":[], "indexers":[], "callProperties":[], "internalSlots":[] diff --git a/src/parser/test/flow/types/instance_spread_invalid/migrated_0001.tree.json b/src/parser/test/flow/types/instance_spread_invalid/migrated_0001.tree.json index 3282b411b5a..be731730a6f 100644 --- a/src/parser/test/flow/types/instance_spread_invalid/migrated_0001.tree.json +++ b/src/parser/test/flow/types/instance_spread_invalid/migrated_0001.tree.json @@ -2,15 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":20}}, - "message":"Unexpected token ..." - }, - { - "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected identifier" - }, - { - "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":22}}, - "message":"Unexpected token }" + "message":"Spreading a type is only allowed inside an object type" } ], "type":"Program", @@ -35,32 +27,7 @@ "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":22}}, "range":[16,22], "exact":false, - "properties":[ - { - "type":"ObjectTypeProperty", - "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":21}}, - "range":[17,21], - "key":{ - "type":"Identifier", - "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":20}}, - "range":[17,20], - "name":"", - "typeAnnotation":null, - "optional":false - }, - "value":{ - "type":"AnyTypeAnnotation", - "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":22}}, - "range":[21,22] - }, - "method":false, - "optional":false, - "static":false, - "proto":false, - "variance":null, - "kind":"init" - } - ], + "properties":[], "indexers":[], "callProperties":[], "internalSlots":[] diff --git a/src/parser/test/flow/types/interfaces/inline.tree.json b/src/parser/test/flow/types/interfaces/inline.tree.json index 18dc29fd8b2..d685d1b0712 100644 --- a/src/parser/test/flow/types/interfaces/inline.tree.json +++ b/src/parser/test/flow/types/interfaces/inline.tree.json @@ -2,23 +2,39 @@ "errors":[ { "loc":{"source":null,"start":{"line":4,"column":23},"end":{"line":4,"column":24}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the token `{`" }, { "loc":{"source":null,"start":{"line":4,"column":25},"end":{"line":4,"column":26}}, - "message":"Unexpected token {" + "message":"Unexpected token `{`, expected an identifier" }, { "loc":{"source":null,"start":{"line":4,"column":26},"end":{"line":4,"column":27}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the token `:`" }, { "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, "message":"Unexpected end of input" }, + { + "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, + "message":"Unexpected end of input, expected the token `}`" + }, + { + "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, + "message":"Unexpected end of input, expected the token `,`" + }, { "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, "message":"Rest parameter must be final parameter of an argument list" + }, + { + "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, + "message":"Unexpected end of input, expected the token `)`" + }, + { + "loc":{"source":null,"start":{"line":5,"column":0},"end":{"line":5,"column":0}}, + "message":"Unexpected end of input, expected the token `{`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/number_literal_invalid/migrated_0001.tree.json b/src/parser/test/flow/types/number_literal_invalid/migrated_0001.tree.json index c8a36c475f9..3e78ced846c 100644 --- a/src/parser/test/flow/types/number_literal_invalid/migrated_0001.tree.json +++ b/src/parser/test/flow/types/number_literal_invalid/migrated_0001.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":11}}, - "message":"Unexpected token -" + "message":"Unexpected token `-`, expected the token `;`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/object/indexers/reserved_type_indexer_name.tree.json b/src/parser/test/flow/types/object/indexers/reserved_type_indexer_name.tree.json index 5d667909abc..a565e39697d 100644 --- a/src/parser/test/flow/types/object/indexers/reserved_type_indexer_name.tree.json +++ b/src/parser/test/flow/types/object/indexers/reserved_type_indexer_name.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":37}}, "range":[9,37], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/object/indexers/reserved_word_indexer_name.tree.json b/src/parser/test/flow/types/object/indexers/reserved_word_indexer_name.tree.json index 1141c566c0e..2d4f09b50f0 100644 --- a/src/parser/test/flow/types/object/indexers/reserved_word_indexer_name.tree.json +++ b/src/parser/test/flow/types/object/indexers/reserved_word_indexer_name.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":37}}, "range":[9,37], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/object/methods/generic_method.tree.json b/src/parser/test/flow/types/object/methods/generic_method.tree.json index b97f26f2e3f..650902c9925 100644 --- a/src/parser/test/flow/types/object/methods/generic_method.tree.json +++ b/src/parser/test/flow/types/object/methods/generic_method.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":34}}, "range":[9,34], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/object/methods/method.tree.json b/src/parser/test/flow/types/object/methods/method.tree.json index 59e348740a5..e2bf5a51084 100644 --- a/src/parser/test/flow/types/object/methods/method.tree.json +++ b/src/parser/test/flow/types/object/methods/method.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":26}}, "range":[9,26], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/object/unexpected_proto.tree.json b/src/parser/test/flow/types/object/unexpected_proto.tree.json index ff14f8ccb27..cb5a8954c01 100644 --- a/src/parser/test/flow/types/object/unexpected_proto.tree.json +++ b/src/parser/test/flow/types/object/unexpected_proto.tree.json @@ -2,19 +2,23 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":17},"end":{"line":2,"column":18}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `:`" }, { "loc":{"source":null,"start":{"line":2,"column":18},"end":{"line":2,"column":19}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":18},"end":{"line":2,"column":19}}, + "message":"Unexpected token `:`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":20},"end":{"line":2,"column":21}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `:`" }, { "loc":{"source":null,"start":{"line":2,"column":22},"end":{"line":2,"column":23}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`" } ], "type":"Program", @@ -38,6 +42,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":23}}, "range":[56,70], + "inexact":false, "exact":false, "properties":[ { @@ -111,6 +116,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":5,"column":9},"end":{"line":5,"column":21}}, "range":[105,117], + "inexact":false, "exact":false, "properties":[ { @@ -169,6 +175,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":8,"column":9},"end":{"line":8,"column":23}}, "range":[166,180], + "inexact":false, "exact":false, "properties":[ { diff --git a/src/parser/test/flow/types/object/unexpected_static.tree.json b/src/parser/test/flow/types/object/unexpected_static.tree.json index 9e84b6a142f..57a04acfbe3 100644 --- a/src/parser/test/flow/types/object/unexpected_static.tree.json +++ b/src/parser/test/flow/types/object/unexpected_static.tree.json @@ -2,31 +2,39 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":18},"end":{"line":2,"column":19}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `:`" }, { "loc":{"source":null,"start":{"line":2,"column":19},"end":{"line":2,"column":20}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":19},"end":{"line":2,"column":20}}, + "message":"Unexpected token `:`, expected an identifier" }, { "loc":{"source":null,"start":{"line":2,"column":21},"end":{"line":2,"column":22}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `:`" }, { "loc":{"source":null,"start":{"line":2,"column":23},"end":{"line":2,"column":24}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`" }, { "loc":{"source":null,"start":{"line":5,"column":18},"end":{"line":5,"column":19}}, - "message":"Unexpected token [" + "message":"Unexpected token `[`, expected the token `:`" }, { "loc":{"source":null,"start":{"line":5,"column":20},"end":{"line":5,"column":21}}, - "message":"Unexpected token ]" + "message":"Unexpected token `]`" + }, + { + "loc":{"source":null,"start":{"line":5,"column":20},"end":{"line":5,"column":21}}, + "message":"Unexpected token `]`, expected an identifier" }, { "loc":{"source":null,"start":{"line":8,"column":18},"end":{"line":8,"column":21}}, - "message":"Unexpected token ..." + "message":"Unexpected token `...`, expected the token `:`" }, { "loc":{"source":null,"start":{"line":14,"column":18},"end":{"line":14,"column":24}}, @@ -34,15 +42,19 @@ }, { "loc":{"source":null,"start":{"line":14,"column":24},"end":{"line":14,"column":25}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`" + }, + { + "loc":{"source":null,"start":{"line":14,"column":24},"end":{"line":14,"column":25}}, + "message":"Unexpected token `:`, expected an identifier" }, { "loc":{"source":null,"start":{"line":14,"column":26},"end":{"line":14,"column":27}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `:`" }, { "loc":{"source":null,"start":{"line":14,"column":28},"end":{"line":14,"column":29}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`" }, { "loc":{"source":null,"start":{"line":23,"column":18},"end":{"line":23,"column":24}}, @@ -50,7 +62,7 @@ }, { "loc":{"source":null,"start":{"line":23,"column":26},"end":{"line":23,"column":27}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the token `=>`" }, { "loc":{"source":null,"start":{"line":26,"column":18},"end":{"line":26,"column":24}}, @@ -58,7 +70,7 @@ }, { "loc":{"source":null,"start":{"line":26,"column":29},"end":{"line":26,"column":30}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the token `=>`" } ], "type":"Program", @@ -82,6 +94,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":2,"column":9},"end":{"line":2,"column":24}}, "range":[57,72], + "inexact":false, "exact":false, "properties":[ { @@ -155,6 +168,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":5,"column":9},"end":{"line":5,"column":26}}, "range":[152,169], + "inexact":false, "exact":false, "properties":[ { @@ -246,6 +260,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":8,"column":9},"end":{"line":8,"column":24}}, "range":[247,262], + "inexact":false, "exact":false, "properties":[ { @@ -304,6 +319,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":11,"column":9},"end":{"line":11,"column":22}}, "range":[298,311], + "inexact":false, "exact":false, "properties":[ { @@ -362,6 +378,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":14,"column":9},"end":{"line":14,"column":29}}, "range":[357,377], + "inexact":false, "exact":false, "properties":[ { @@ -435,6 +452,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":17,"column":9},"end":{"line":17,"column":24}}, "range":[427,442], + "inexact":false, "exact":false, "properties":[ { @@ -501,6 +519,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":20,"column":9},"end":{"line":20,"column":27}}, "range":[497,515], + "inexact":false, "exact":false, "properties":[ { @@ -582,6 +601,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":23,"column":9},"end":{"line":23,"column":31}}, "range":[563,585], + "inexact":false, "exact":false, "properties":[ { @@ -648,6 +668,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":26,"column":9},"end":{"line":26,"column":34}}, "range":[638,663], + "inexact":false, "exact":false, "properties":[ { diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0000.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0000.tree.json index c9abf81158e..ecae131475c 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0000.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0000.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":14}}, "range":[9,14], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0001.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0001.tree.json index d81eaf96457..70175641814 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0001.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0001.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":15}}, "range":[9,15], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0002.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0002.tree.json index 9a4103c61ad..abba0ebc584 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0002.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0002.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":15}}, "range":[9,15], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0003.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0003.tree.json index dfa2e64fa15..d4064e44e48 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0003.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0003.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":16}}, "range":[9,16], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0004.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0004.tree.json index f4da11da0b9..8fa99f65eca 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0004.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0004.tree.json @@ -27,6 +27,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":17}}, "range":[9,17], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0005.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0005.tree.json index f4da11da0b9..8fa99f65eca 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0005.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0005.tree.json @@ -27,6 +27,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":17}}, "range":[9,17], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0006.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0006.tree.json index 3faa6614c4e..7ba70945907 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0006.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0006.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":18}}, "range":[9,18], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0007.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0007.tree.json index dfd86301c14..c73a44083d0 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0007.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0007.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":19}}, "range":[9,19], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0008.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0008.tree.json index 21bf6d6c6b0..f4b28505bee 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0008.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0008.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":19}}, "range":[9,19], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0009.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0009.tree.json index c3873e8e821..abf8e72e090 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0009.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0009.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":15}}, "range":[9,15], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[ diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0010.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0010.tree.json index d54b13c340d..9f8dd2bb8b6 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0010.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0010.tree.json @@ -27,6 +27,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":16}}, "range":[9,16], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[ diff --git a/src/parser/test/flow/types/object_type_property_variance/migrated_0011.tree.json b/src/parser/test/flow/types/object_type_property_variance/migrated_0011.tree.json index d54b13c340d..9f8dd2bb8b6 100644 --- a/src/parser/test/flow/types/object_type_property_variance/migrated_0011.tree.json +++ b/src/parser/test/flow/types/object_type_property_variance/migrated_0011.tree.json @@ -27,6 +27,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":16}}, "range":[9,16], "exact":false, + "inexact":false, "properties":[], "indexers":[], "callProperties":[ diff --git a/src/parser/test/flow/types/object_type_spread/migrated_0000.tree.json b/src/parser/test/flow/types/object_type_spread/migrated_0000.tree.json index 8152e6ca783..f0c16cea858 100644 --- a/src/parser/test/flow/types/object_type_spread/migrated_0000.tree.json +++ b/src/parser/test/flow/types/object_type_spread/migrated_0000.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":15}}, "range":[9,15], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeSpreadProperty", diff --git a/src/parser/test/flow/types/object_type_spread/migrated_0001.tree.json b/src/parser/test/flow/types/object_type_spread/migrated_0001.tree.json index 42a644bcc2e..e20ad15f355 100644 --- a/src/parser/test/flow/types/object_type_spread/migrated_0001.tree.json +++ b/src/parser/test/flow/types/object_type_spread/migrated_0001.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":16}}, "range":[9,16], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeSpreadProperty", diff --git a/src/parser/test/flow/types/object_type_spread/migrated_0002.tree.json b/src/parser/test/flow/types/object_type_spread/migrated_0002.tree.json index 7a44838c53d..6a0678eede5 100644 --- a/src/parser/test/flow/types/object_type_spread/migrated_0002.tree.json +++ b/src/parser/test/flow/types/object_type_spread/migrated_0002.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":20}}, "range":[9,20], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/object_type_spread/migrated_0003.tree.json b/src/parser/test/flow/types/object_type_spread/migrated_0003.tree.json index 3f130bf9939..b5679244d4a 100644 --- a/src/parser/test/flow/types/object_type_spread/migrated_0003.tree.json +++ b/src/parser/test/flow/types/object_type_spread/migrated_0003.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":20}}, "range":[9,20], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeSpreadProperty", diff --git a/src/parser/test/flow/types/object_type_spread/migrated_0004.tree.json b/src/parser/test/flow/types/object_type_spread/migrated_0004.tree.json index 3543b08f3fe..1b0117a5d79 100644 --- a/src/parser/test/flow/types/object_type_spread/migrated_0004.tree.json +++ b/src/parser/test/flow/types/object_type_spread/migrated_0004.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":23}}, "range":[9,23], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeSpreadProperty", diff --git a/src/parser/test/flow/types/object_type_spread/migrated_0005.tree.json b/src/parser/test/flow/types/object_type_spread/migrated_0005.tree.json index fe2bacf39d0..dbdd0144bf4 100644 --- a/src/parser/test/flow/types/object_type_spread/migrated_0005.tree.json +++ b/src/parser/test/flow/types/object_type_spread/migrated_0005.tree.json @@ -2,15 +2,15 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":14}}, - "message":"Unexpected token ..." + "message":"Unexpected token `...`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":15}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `:`" }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`" } ], "type":"Program", @@ -34,6 +34,7 @@ "type":"ObjectTypeAnnotation", "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":16}}, "range":[9,16], + "inexact":false, "exact":false, "properties":[ { diff --git a/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_export_t_and_st.tree.json b/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_export_t_and_st.tree.json index 506096ff624..6fb6b5a75c9 100644 --- a/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_export_t_and_st.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_export_t_and_st.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":37}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":37}}, + "message":"Unexpected token `=`" }, { "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":37}}, diff --git a/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_export_t_no_st.tree.json b/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_export_t_no_st.tree.json index be85acc38bd..cdcaf1566dc 100644 --- a/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_export_t_no_st.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_export_t_no_st.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":31},"end":{"line":1,"column":32}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":31},"end":{"line":1,"column":32}}, + "message":"Unexpected token `=`" }, { "loc":{"source":null,"start":{"line":1,"column":31},"end":{"line":1,"column":32}}, diff --git a/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_t_and_st.tree.json b/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_t_and_st.tree.json index 8dfa668518b..97fe7f3b2c7 100644 --- a/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_t_and_st.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_t_and_st.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":30}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":30}}, + "message":"Unexpected token `=`" }, { "loc":{"source":null,"start":{"line":1,"column":29},"end":{"line":1,"column":30}}, diff --git a/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_t_no_st.tree.json b/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_t_no_st.tree.json index 035afe1fa13..db98a050d5e 100644 --- a/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_t_no_st.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/declare/opaque_declare_t_no_st.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, + "message":"Unexpected token `=`" }, { "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, diff --git a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_invalid1.tree.json b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_invalid1.tree.json index 6eef6ff0616..267510941d6 100644 --- a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_invalid1.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_invalid1.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":20}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `type`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_invalid2.tree.json b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_invalid2.tree.json index 07e18dda46e..7ca78366dee 100644 --- a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_invalid2.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_invalid2.tree.json @@ -2,11 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":13}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":18}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_allow_export.tree.json b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_allow_export.tree.json index 8d1516e69ab..d16805eb59b 100644 --- a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_allow_export.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_allow_export.tree.json @@ -2,15 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":44},"end":{"line":2,"column":45}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":44},"end":{"line":2,"column":45}}, + "message":"Unexpected token `=`, expected the token `declare`" }, { "loc":{"source":null,"start":{"line":2,"column":46},"end":{"line":2,"column":52}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `var`" }, { "loc":{"source":null,"start":{"line":2,"column":52},"end":{"line":2,"column":53}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" } ], "type":"Program", diff --git a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid1.tree.json b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid1.tree.json index e0909443cb5..b34f924a381 100644 --- a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid1.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid1.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":12}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" } ], "type":"Program", diff --git a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid2.tree.json b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid2.tree.json index ec0b18991b7..fcbec1b32de 100644 --- a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid2.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid2.tree.json @@ -2,19 +2,23 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":20}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `type`" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":26}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `=`" }, { "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":28}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":28}}, + "message":"Unexpected token `=`, expected the token `;`" }, { "loc":{"source":null,"start":{"line":1,"column":27},"end":{"line":1,"column":28}}, diff --git a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid3.tree.json b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid3.tree.json index 80b54253bd1..5216a49fe5a 100644 --- a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid3.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_subtype_invalid3.tree.json @@ -2,11 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":12}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":22}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`" }, { "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":20}}, diff --git a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_type_allow_export.tree.json b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_type_allow_export.tree.json index 9e581711fda..2bb7804da91 100644 --- a/src/parser/test/flow/types/opaque_aliases/invalid/opaque_type_allow_export.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/invalid/opaque_type_allow_export.tree.json @@ -2,15 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":36},"end":{"line":2,"column":37}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":36},"end":{"line":2,"column":37}}, + "message":"Unexpected token `=`, expected the token `declare`" }, { "loc":{"source":null,"start":{"line":2,"column":38},"end":{"line":2,"column":44}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the token `var`" }, { "loc":{"source":null,"start":{"line":2,"column":44},"end":{"line":2,"column":45}}, - "message":"Unexpected token ;" + "message":"Unexpected token `;`, expected an identifier" } ], "type":"Program", diff --git a/src/parser/test/flow/types/opaque_aliases/valid/declare_opaque.tree.json b/src/parser/test/flow/types/opaque_aliases/valid/declare_opaque.tree.json index bc06c61758d..9f67a78e864 100644 --- a/src/parser/test/flow/types/opaque_aliases/valid/declare_opaque.tree.json +++ b/src/parser/test/flow/types/opaque_aliases/valid/declare_opaque.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":26}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":26}}, + "message":"Unexpected token `=`" }, { "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":26}}, diff --git a/src/parser/test/flow/types/optional_indexer_name/migrated_0000.tree.json b/src/parser/test/flow/types/optional_indexer_name/migrated_0000.tree.json index f806f657261..ed404f50fcb 100644 --- a/src/parser/test/flow/types/optional_indexer_name/migrated_0000.tree.json +++ b/src/parser/test/flow/types/optional_indexer_name/migrated_0000.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":29}}, "range":[9,29], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/optional_indexer_name/migrated_0001.tree.json b/src/parser/test/flow/types/optional_indexer_name/migrated_0001.tree.json index e637c14daf2..b8fbaebeb38 100644 --- a/src/parser/test/flow/types/optional_indexer_name/migrated_0001.tree.json +++ b/src/parser/test/flow/types/optional_indexer_name/migrated_0001.tree.json @@ -21,6 +21,7 @@ "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":39}}, "range":[9,39], "exact":false, + "inexact":false, "properties":[], "indexers":[ { diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0000.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0000.tree.json index c45fd9704ae..f1ca467f5f8 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0000.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0000.tree.json @@ -22,8 +22,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":8}}, - "range":[7,8], + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":17}}, + "range":[7,17], "name":"T", "bound":null, "variance":null, diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0001.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0001.tree.json index 31b9a42a82b..0653a64c90d 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0001.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0001.tree.json @@ -22,8 +22,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":17}}, - "range":[7,17], + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":26}}, + "range":[7,26], "name":"T", "bound":{ "type":"TypeAnnotation", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0002.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0002.tree.json index b7fa6228cb6..abdeaedc365 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0002.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0002.tree.json @@ -31,8 +31,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":20}}, - "range":[10,20], + "loc":{"source":null,"start":{"line":1,"column":10},"end":{"line":1,"column":29}}, + "range":[10,29], "name":"T", "bound":{ "type":"TypeAnnotation", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0003.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0003.tree.json index 0579b0c0b67..70a7226a805 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0003.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0003.tree.json @@ -22,8 +22,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":8}}, - "range":[7,8], + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":17}}, + "range":[7,17], "name":"S", "bound":null, "variance":null, @@ -35,8 +35,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":29}}, - "range":[19,29], + "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":38}}, + "range":[19,38], "name":"T", "bound":{ "type":"TypeAnnotation", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0004.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0004.tree.json index 010e3a97092..783737cde33 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0004.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0004.tree.json @@ -21,7 +21,6 @@ "range":[20,22], "body":[] }, - "superClass":null, "typeParameters":{ "type":"TypeParameterDeclaration", "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":19}}, @@ -29,8 +28,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":9}}, - "range":[8,9], + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":18}}, + "range":[8,18], "name":"T", "bound":null, "variance":null, @@ -42,6 +41,7 @@ } ] }, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0005.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0005.tree.json index e308e460fac..b859cc8eb9a 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0005.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0005.tree.json @@ -21,7 +21,6 @@ "range":[29,31], "body":[] }, - "superClass":null, "typeParameters":{ "type":"TypeParameterDeclaration", "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":28}}, @@ -29,8 +28,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":18}}, - "range":[8,18], + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":27}}, + "range":[8,27], "name":"T", "bound":{ "type":"TypeAnnotation", @@ -56,6 +55,7 @@ } ] }, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0006.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0006.tree.json index ba916399093..65d073fb5ab 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0006.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0006.tree.json @@ -21,7 +21,6 @@ "range":[32,34], "body":[] }, - "superClass":null, "typeParameters":{ "type":"TypeParameterDeclaration", "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":31}}, @@ -38,8 +37,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":21}}, - "range":[11,21], + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":30}}, + "range":[11,30], "name":"T", "bound":{ "type":"TypeAnnotation", @@ -65,6 +64,7 @@ } ] }, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0007.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0007.tree.json index 8b73375e95d..a2a168f289f 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0007.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0007.tree.json @@ -21,7 +21,6 @@ "range":[41,43], "body":[] }, - "superClass":null, "typeParameters":{ "type":"TypeParameterDeclaration", "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":40}}, @@ -29,8 +28,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":9}}, - "range":[8,9], + "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":18}}, + "range":[8,18], "name":"S", "bound":null, "variance":null, @@ -42,8 +41,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":30}}, - "range":[20,30], + "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":39}}, + "range":[20,39], "name":"T", "bound":{ "type":"TypeAnnotation", @@ -69,6 +68,7 @@ } ] }, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0008.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0008.tree.json index 0aa0091b6da..ddad297b34e 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0008.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0008.tree.json @@ -25,7 +25,6 @@ "range":[21,23], "body":[] }, - "superClass":null, "typeParameters":{ "type":"TypeParameterDeclaration", "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":20}}, @@ -33,8 +32,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":10}}, - "range":[9,10], + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":19}}, + "range":[9,19], "name":"T", "bound":null, "variance":null, @@ -46,6 +45,7 @@ } ] }, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0009.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0009.tree.json index f315fe4a101..bd721b0daae 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0009.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0009.tree.json @@ -25,7 +25,6 @@ "range":[30,32], "body":[] }, - "superClass":null, "typeParameters":{ "type":"TypeParameterDeclaration", "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":29}}, @@ -33,8 +32,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":19}}, - "range":[9,19], + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":28}}, + "range":[9,28], "name":"T", "bound":{ "type":"TypeAnnotation", @@ -60,6 +59,7 @@ } ] }, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0010.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0010.tree.json index dc17cc81aab..577e99d021d 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0010.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0010.tree.json @@ -25,7 +25,6 @@ "range":[33,35], "body":[] }, - "superClass":null, "typeParameters":{ "type":"TypeParameterDeclaration", "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":32}}, @@ -42,8 +41,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":22}}, - "range":[12,22], + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":31}}, + "range":[12,31], "name":"T", "bound":{ "type":"TypeAnnotation", @@ -69,6 +68,7 @@ } ] }, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0011.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0011.tree.json index 6fa4a434c41..bda2a7add62 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0011.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0011.tree.json @@ -25,7 +25,6 @@ "range":[42,44], "body":[] }, - "superClass":null, "typeParameters":{ "type":"TypeParameterDeclaration", "loc":{"source":null,"start":{"line":1,"column":8},"end":{"line":1,"column":41}}, @@ -33,8 +32,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":10}}, - "range":[9,10], + "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":19}}, + "range":[9,19], "name":"S", "bound":null, "variance":null, @@ -46,8 +45,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":31}}, - "range":[21,31], + "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":40}}, + "range":[21,40], "name":"T", "bound":{ "type":"TypeAnnotation", @@ -73,6 +72,7 @@ } ] }, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0012.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0012.tree.json index 165d00c5295..5f2c773216e 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0012.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0012.tree.json @@ -22,8 +22,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":17}}, - "range":[16,17], + "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":26}}, + "range":[16,26], "name":"T", "bound":null, "variance":null, diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0013.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0013.tree.json index 98bebff724e..389bed90445 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0013.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0013.tree.json @@ -22,8 +22,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":26}}, - "range":[16,26], + "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":35}}, + "range":[16,35], "name":"T", "bound":{ "type":"TypeAnnotation", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0014.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0014.tree.json index cfb051639c3..00188cb7a66 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0014.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0014.tree.json @@ -31,8 +31,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":29}}, - "range":[19,29], + "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":38}}, + "range":[19,38], "name":"T", "bound":{ "type":"TypeAnnotation", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0015.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0015.tree.json index 94091fb9fdb..84569df40e5 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0015.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0015.tree.json @@ -22,8 +22,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":17}}, - "range":[16,17], + "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":26}}, + "range":[16,26], "name":"S", "bound":null, "variance":null, @@ -35,8 +35,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":38}}, - "range":[28,38], + "loc":{"source":null,"start":{"line":1,"column":28},"end":{"line":1,"column":47}}, + "range":[28,47], "name":"T", "bound":{ "type":"TypeAnnotation", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0016.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0016.tree.json index bda4dbb4fcf..04daa92a879 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0016.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0016.tree.json @@ -22,8 +22,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":13}}, - "range":[12,13], + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":22}}, + "range":[12,22], "name":"T", "bound":null, "variance":null, diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0017.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0017.tree.json index b574f8d2a59..71d1d38915f 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0017.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0017.tree.json @@ -22,8 +22,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":22}}, - "range":[12,22], + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":31}}, + "range":[12,31], "name":"T", "bound":{ "type":"TypeAnnotation", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0018.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0018.tree.json index d80492e75f0..1e9a4f377bc 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0018.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0018.tree.json @@ -31,8 +31,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":25}}, - "range":[15,25], + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":34}}, + "range":[15,34], "name":"T", "bound":{ "type":"TypeAnnotation", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0019.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0019.tree.json index 7f935b38146..6767c8b03bb 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0019.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0019.tree.json @@ -22,8 +22,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":13}}, - "range":[12,13], + "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":1,"column":22}}, + "range":[12,22], "name":"S", "bound":null, "variance":null, @@ -35,8 +35,8 @@ }, { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":34}}, - "range":[24,34], + "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":43}}, + "range":[24,43], "name":"T", "bound":{ "type":"TypeAnnotation", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0022.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0022.tree.json index 6f1c725d3ef..95cf45acb52 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0022.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0022.tree.json @@ -2,11 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":19}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the token `,`" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected token ?" + "message":"Unexpected token `?`, expected an identifier" }, { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, @@ -14,7 +14,7 @@ }, { "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":27}}, - "message":"Unexpected token string" + "message":"Unexpected token `string`, expected the token `,`" } ], "type":"Program", @@ -40,8 +40,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":8}}, - "range":[7,8], + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":17}}, + "range":[7,17], "name":"T", "bound":null, "variance":null, diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0023.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0023.tree.json index 46925455a1d..bbe9d2fd3fb 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0023.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0023.tree.json @@ -28,8 +28,8 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":17}}, - "range":[7,17], + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":26}}, + "range":[7,26], "name":"HasDefault", "bound":null, "variance":null, diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0024.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0024.tree.json index fd20702730a..cd6392c63f4 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0024.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0024.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":20},"end":{"line":1,"column":21}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `,`" } ], "type":"Program", @@ -27,6 +27,7 @@ "range":[30,32], "body":[] }, + "typeParameters":null, "superClass":{ "type":"Identifier", "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":17}}, @@ -35,7 +36,6 @@ "typeAnnotation":null, "optional":false }, - "typeParameters":null, "superTypeParameters":{ "type":"TypeParameterInstantiation", "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":29}}, diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0025.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0025.tree.json index bb5310e3702..b892d634424 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0025.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0025.tree.json @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`, expected the token `,`" } ], "type":"Program", diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0026.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0026.tree.json index 12635f2f14a..5f42722e2fc 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0026.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0026.tree.json @@ -1,14 +1,4 @@ { - "errors":[ - { - "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected token =" - }, - { - "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":23}}, - "message":"Unexpected reserved type" - } - ], "type":"Program", "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":29}}, "range":[0,29], @@ -44,21 +34,16 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":14}}, - "range":[13,14], + "loc":{"source":null,"start":{"line":1,"column":13},"end":{"line":1,"column":23}}, + "range":[13,23], "name":"T", "bound":null, "variance":null, - "default":null - }, - { - "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":23}}, - "range":[17,23], - "name":"string", - "bound":null, - "variance":null, - "default":null + "default":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":23}}, + "range":[17,23] + } } ] } diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0027.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0027.tree.json index d94c2059d2d..2ee1c750c28 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0027.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0027.tree.json @@ -1,24 +1,20 @@ { "errors":[ { - "loc":{"source":null,"start":{"line":1,"column":23},"end":{"line":1,"column":24}}, - "message":"Unexpected token =" - }, - { - "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":31}}, - "message":"Unexpected reserved type" + "loc":{"source":null,"start":{"line":1,"column":35},"end":{"line":1,"column":36}}, + "message":"Unexpected token `{`, expected the token `:`" }, { - "loc":{"source":null,"start":{"line":1,"column":35},"end":{"line":1,"column":36}}, - "message":"Unexpected token {" + "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":37}}, + "message":"Unexpected token `}`" }, { "loc":{"source":null,"start":{"line":1,"column":36},"end":{"line":1,"column":37}}, - "message":"Unexpected token }" + "message":"Unexpected token `}`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the start of a statement" } ], "type":"Program", @@ -56,21 +52,16 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":22}}, - "range":[21,22], + "loc":{"source":null,"start":{"line":1,"column":21},"end":{"line":1,"column":31}}, + "range":[21,31], "name":"T", "bound":null, "variance":null, - "default":null - }, - { - "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":31}}, - "range":[25,31], - "name":"string", - "bound":null, - "variance":null, - "default":null + "default":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":25},"end":{"line":1,"column":31}}, + "range":[25,31] + } } ] } diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0028.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0028.tree.json index e614aed9d12..373e67ae897 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0028.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0028.tree.json @@ -1,14 +1,4 @@ { - "errors":[ - { - "loc":{"source":null,"start":{"line":1,"column":9},"end":{"line":1,"column":10}}, - "message":"Unexpected token =" - }, - { - "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":17}}, - "message":"Unexpected reserved type" - } - ], "type":"Program", "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":26}}, "range":[0,26], @@ -58,21 +48,16 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":8}}, - "range":[7,8], + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":17}}, + "range":[7,17], "name":"T", "bound":null, "variance":null, - "default":null - }, - { - "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":17}}, - "range":[11,17], - "name":"string", - "bound":null, - "variance":null, - "default":null + "default":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":11},"end":{"line":1,"column":17}}, + "range":[11,17] + } } ] } diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0029.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0029.tree.json index b6043211145..68e557023e4 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0029.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0029.tree.json @@ -1,14 +1,4 @@ { - "errors":[ - { - "loc":{"source":null,"start":{"line":1,"column":16},"end":{"line":1,"column":17}}, - "message":"Unexpected token =" - }, - { - "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":24}}, - "message":"Unexpected reserved type" - } - ], "type":"Program", "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":32}}, "range":[0,32], @@ -66,21 +56,16 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":15}}, - "range":[14,15], + "loc":{"source":null,"start":{"line":1,"column":14},"end":{"line":1,"column":24}}, + "range":[14,24], "name":"T", "bound":null, "variance":null, - "default":null - }, - { - "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":24}}, - "range":[18,24], - "name":"string", - "bound":null, - "variance":null, - "default":null + "default":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":24}}, + "range":[18,24] + } } ] } @@ -92,8 +77,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0030.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0030.tree.json index 4ac5a83af90..2acfbcc6575 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0030.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0030.tree.json @@ -1,14 +1,4 @@ { - "errors":[ - { - "loc":{"source":null,"start":{"line":1,"column":17},"end":{"line":1,"column":18}}, - "message":"Unexpected token =" - }, - { - "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":25}}, - "message":"Unexpected reserved type" - } - ], "type":"Program", "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":34}}, "range":[0,34], @@ -70,21 +60,16 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "range":[15,16], + "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":25}}, + "range":[15,25], "name":"T", "bound":null, "variance":null, - "default":null - }, - { - "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":25}}, - "range":[19,25], - "name":"string", - "bound":null, - "variance":null, - "default":null + "default":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":19},"end":{"line":1,"column":25}}, + "range":[19,25] + } } ] } @@ -96,8 +81,8 @@ } ] }, - "superClass":null, "typeParameters":null, + "superClass":null, "superTypeParameters":null, "implements":[], "decorators":[] diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0031.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0031.tree.json index 101bfa6e35f..3d268c6b7b3 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0031.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0031.tree.json @@ -1,14 +1,4 @@ { - "errors":[ - { - "loc":{"source":null,"start":{"line":1,"column":24},"end":{"line":1,"column":25}}, - "message":"Unexpected token =" - }, - { - "loc":{"source":null,"start":{"line":1,"column":26},"end":{"line":1,"column":32}}, - "message":"Unexpected reserved type" - } - ], "type":"Program", "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":43}}, "range":[0,43], @@ -62,21 +52,16 @@ "params":[ { "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":23}}, - "range":[22,23], + "loc":{"source":null,"start":{"line":1,"column":22},"end":{"line":1,"column":32}}, + "range":[22,32], "name":"T", "bound":null, "variance":null, - "default":null - }, - { - "type":"TypeParameter", - "loc":{"source":null,"start":{"line":1,"column":26},"end":{"line":1,"column":32}}, - "range":[26,32], - "name":"string", - "bound":null, - "variance":null, - "default":null + "default":{ + "type":"StringTypeAnnotation", + "loc":{"source":null,"start":{"line":1,"column":26},"end":{"line":1,"column":32}}, + "range":[26,32] + } } ] } diff --git a/src/parser/test/flow/types/parameter_defaults/migrated_0032.tree.json b/src/parser/test/flow/types/parameter_defaults/migrated_0032.tree.json index 6be36a70a3f..cda84de6f1a 100644 --- a/src/parser/test/flow/types/parameter_defaults/migrated_0032.tree.json +++ b/src/parser/test/flow/types/parameter_defaults/migrated_0032.tree.json @@ -1,78 +1,50 @@ { - "errors":[ - { - "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":4}}, - "message":"Unexpected token =" - }, - { - "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected token ILLEGAL" - }, - { - "loc":{"source":null,"start":{"line":2,"column":0},"end":{"line":2,"column":0}}, - "message":"Unexpected end of input" - } - ], "type":"Program", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, - "range":[0,22], + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":21}}, + "range":[0,21], "body":[ { "type":"ExpressionStatement", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":2,"column":0}}, - "range":[0,22], + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":21}}, + "range":[0,21], "expression":{ - "type":"JSXElement", - "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, - "range":[0,12], - "openingElement":{ - "type":"JSXOpeningElement", + "type":"ArrowFunctionExpression", + "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":21}}, + "range":[0,21], + "id":null, + "params":[], + "body":{ + "type":"Literal", + "loc":{"source":null,"start":{"line":1,"column":18},"end":{"line":1,"column":21}}, + "range":[18,21], + "value":123, + "raw":"123" + }, + "async":false, + "generator":false, + "predicate":null, + "expression":true, + "returnType":null, + "typeParameters":{ + "type":"TypeParameterDeclaration", "loc":{"source":null,"start":{"line":1,"column":0},"end":{"line":1,"column":12}}, "range":[0,12], - "name":{ - "type":"JSXIdentifier", - "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":2}}, - "range":[1,2], - "name":"T" - }, - "attributes":[ + "params":[ { - "type":"JSXAttribute", - "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":4}}, - "range":[3,4], - "name":{ - "type":"JSXIdentifier", - "loc":{"source":null,"start":{"line":1,"column":3},"end":{"line":1,"column":4}}, - "range":[3,4], - "name":"" - }, - "value":null - }, - { - "type":"JSXAttribute", - "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, - "range":[5,11], - "name":{ - "type":"JSXIdentifier", + "type":"TypeParameter", + "loc":{"source":null,"start":{"line":1,"column":1},"end":{"line":1,"column":11}}, + "range":[1,11], + "name":"T", + "bound":null, + "variance":null, + "default":{ + "type":"StringTypeAnnotation", "loc":{"source":null,"start":{"line":1,"column":5},"end":{"line":1,"column":11}}, - "range":[5,11], - "name":"string" - }, - "value":null + "range":[5,11] + } } - ], - "selfClosing":false - }, - "closingElement":null, - "children":[ - { - "type":"JSXText", - "loc":{"source":null,"start":{"line":1,"column":12},"end":{"line":2,"column":0}}, - "range":[12,22], - "value":"() => 123\n", - "raw":"() => 123\n" - } - ] + ] + } }, "directive":null } diff --git a/src/parser/test/flow/types/reserved/reserved.tree.json b/src/parser/test/flow/types/reserved/reserved.tree.json index 0bc88a243c1..d7936f4e643 100644 --- a/src/parser/test/flow/types/reserved/reserved.tree.json +++ b/src/parser/test/flow/types/reserved/reserved.tree.json @@ -2,11 +2,19 @@ "errors":[ { "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":14}}, - "message":"Unexpected token extends" + "message":"Unexpected token `extends`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":14}}, + "message":"Unexpected token `extends`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":1,"column":7},"end":{"line":1,"column":14}}, + "message":"Unexpected token `extends`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, - "message":"Unexpected token =" + "message":"Unexpected token `=`" }, { "loc":{"source":null,"start":{"line":1,"column":15},"end":{"line":1,"column":16}}, diff --git a/src/parser/test/flow/types/typecasts/migrated_0001.tree.json b/src/parser/test/flow/types/typecasts/migrated_0001.tree.json index 09396a0fdf0..ec3d03099c8 100644 --- a/src/parser/test/flow/types/typecasts/migrated_0001.tree.json +++ b/src/parser/test/flow/types/typecasts/migrated_0001.tree.json @@ -75,6 +75,7 @@ "loc":{"source":null,"start":{"line":1,"column":23},"end":{"line":1,"column":49}}, "range":[23,49], "exact":false, + "inexact":false, "properties":[ { "type":"ObjectTypeProperty", diff --git a/src/parser/test/flow/types/typecasts_invalid/migrated_0000.tree.json b/src/parser/test/flow/types/typecasts_invalid/migrated_0000.tree.json index f409b5d04f7..550ec8dd84b 100644 --- a/src/parser/test/flow/types/typecasts_invalid/migrated_0000.tree.json +++ b/src/parser/test/flow/types/typecasts_invalid/migrated_0000.tree.json @@ -2,7 +2,11 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":17},"end":{"line":2,"column":18}}, - "message":"Unexpected token :" + "message":"Unexpected token `:`, expected the token `;`" + }, + { + "loc":{"source":null,"start":{"line":2,"column":17},"end":{"line":2,"column":18}}, + "message":"Unexpected token `:`, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/flow/types/typecasts_invalid/migrated_0001.tree.json b/src/parser/test/flow/types/typecasts_invalid/migrated_0001.tree.json index d2400e24487..67db4fa0bc8 100644 --- a/src/parser/test/flow/types/typecasts_invalid/migrated_0001.tree.json +++ b/src/parser/test/flow/types/typecasts_invalid/migrated_0001.tree.json @@ -2,19 +2,23 @@ "errors":[ { "loc":{"source":null,"start":{"line":2,"column":12},"end":{"line":2,"column":13}}, - "message":"Unexpected token ," + "message":"Unexpected token `,`, expected the token `)`" }, { "loc":{"source":null,"start":{"line":2,"column":14},"end":{"line":2,"column":17}}, - "message":"Unexpected identifier" + "message":"Unexpected identifier, expected the end of an expression statement (`;`)" }, { "loc":{"source":null,"start":{"line":2,"column":25},"end":{"line":2,"column":26}}, - "message":"Unexpected token )" + "message":"Unexpected token `)`, expected the end of an expression statement (`;`)" + }, + { + "loc":{"source":null,"start":{"line":2,"column":25},"end":{"line":2,"column":26}}, + "message":"Unexpected token `)`, expected the start of a statement" }, { "loc":{"source":null,"start":{"line":3,"column":0},"end":{"line":3,"column":0}}, - "message":"Unexpected end of input" + "message":"Unexpected end of input, expected the start of a statement" } ], "type":"Program", diff --git a/src/parser/test/parse_test262.ml b/src/parser/test/parse_test262.ml index 29219060e4d..da5acf10381 100644 --- a/src/parser/test/parse_test262.ml +++ b/src/parser/test/parse_test262.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,7 +12,7 @@ type verbose_mode = type error_reason = | Missing_parse_error - | Unexpected_parse_error of (Loc.t * Parser_common.Error.t) + | Unexpected_parse_error of (Loc.t * Parse_error.t) type test_name = string * bool (* filename * strict *) @@ -21,7 +21,7 @@ type test_result = { result: (unit, error_reason) result; } -module SMap = Map.Make(String) +module SMap = Map.Make (String) module Progress_bar = struct type t = { @@ -32,35 +32,41 @@ module Progress_bar = struct frequency: float; } - let percentage bar = - Printf.sprintf "%3d%%" (bar.count * 100 / bar.total) + let percentage bar = Printf.sprintf "%3d%%" (bar.count * 100 / bar.total) let meter bar = let chunks = bar.chunks in let c = bar.count * chunks / bar.total in - if c = 0 then String.make chunks ' ' - else if c = chunks then String.make chunks '=' - else (String.make (c - 1) '=')^">"^(String.make (chunks - c) ' ') + if c = 0 then + String.make chunks ' ' + else if c = chunks then + String.make chunks '=' + else + String.make (c - 1) '=' ^ ">" ^ String.make (chunks - c) ' ' - let incr bar = - bar.count <- succ bar.count + let incr bar = bar.count <- succ bar.count let to_string (passed, failed) bar = let total = float_of_int (passed + failed) in - Printf.sprintf "\r%s [%s] %d/%d -- Passed: %d (%.2f%%), Failed: %d (%.2f%%)%!" - (percentage bar) (meter bar) bar.count bar.total - passed ((float_of_int passed) /. total *. 100.) - failed ((float_of_int failed) /. total *. 100.) - - let print (passed, failed) bar = - Printf.printf "\r%s%!" (to_string (passed, failed) bar) + Printf.sprintf + "\r%s [%s] %d/%d -- Passed: %d (%.2f%%), Failed: %d (%.2f%%)%!" + (percentage bar) + (meter bar) + bar.count + bar.total + passed + (float_of_int passed /. total *. 100.) + failed + (float_of_int failed /. total *. 100.) + + let print (passed, failed) bar = Printf.printf "\r%s%!" (to_string (passed, failed) bar) let print_throttled (passed, failed) bar = let now = Unix.gettimeofday () in - if now -. bar.last_update > bar.frequency then begin + if now -. bar.last_update > bar.frequency then ( bar.last_update <- now; print (passed, failed) bar - end + ) let clear (passed, failed) bar = let len = String.length (to_string (passed, failed) bar) in @@ -71,14 +77,7 @@ module Progress_bar = struct print (passed, failed) bar; Printf.printf "\n%!" - let make ~chunks ~frequency total = - { - count = 0; - last_update = 0.; - total; - chunks; - frequency; - } + let make ~chunks ~frequency total = { count = 0; last_update = 0.; total; chunks; frequency } end let is_fixture = @@ -90,30 +89,44 @@ let is_fixture = let files_of_path path = let filter fn = not (is_fixture fn) in - File_utils.fold_files ~file_only:true ~filter [path] (fun kind acc -> - match kind with - | File_utils.Dir _dir -> assert false - | File_utils.File filename -> filename::acc - ) [] + File_utils.fold_files + ~file_only:true + ~filter + [path] + (fun kind acc -> + match kind with + | File_utils.Dir _dir -> assert false + | File_utils.File filename -> filename :: acc) + [] |> List.rev let print_name ~strip_root (filename, use_strict) = - let filename = match strip_root with - | Some root -> - let len = String.length root in - String.sub filename len (String.length filename - len) - | None -> filename + let filename = + match strip_root with + | Some root -> + let len = String.length root in + String.sub filename len (String.length filename - len) + | None -> filename + in + let strict = + if use_strict then + "(strict mode)" + else + "(default)" + in + let cr = + if Unix.isatty Unix.stdout then + "\r" + else + "" in - let strict = if use_strict then "(strict mode)" else "(default)" in - let cr = if Unix.isatty Unix.stdout then "\r" else "" in Printf.printf "%s%s %s\n%!" cr filename strict let print_error err = match err with - | Missing_parse_error -> - Printf.printf " Missing parse error\n%!" + | Missing_parse_error -> Printf.printf " Missing parse error\n%!" | Unexpected_parse_error (loc, err) -> - Printf.printf " %s at %s\n%!" (Parse_error.PP.error err) (Loc.to_string loc) + Printf.printf " %s at %s\n%!" (Parse_error.PP.error err) (Loc.debug_to_string loc) module Frontmatter = struct type t = { @@ -124,10 +137,12 @@ module Frontmatter = struct flags: strictness; negative: negative option; } + and negative = { phase: string; type_: string; } + and strictness = | Raw | Only_strict @@ -154,7 +169,7 @@ module Frontmatter = struct Str.matched_group 1 str in let opt_matched_group regex str = - try Some (matched_group regex str) with Not_found -> None + (try Some (matched_group regex str) with Not_found -> None) in let contains needle haystack = try @@ -164,7 +179,7 @@ module Frontmatter = struct in fun source -> try - let start_idx = (Str.search_forward start_regexp source 0) + 5 in + let start_idx = Str.search_forward start_regexp source 0 + 5 in let end_idx = Str.search_forward end_regexp source start_idx in let text = String.sub source start_idx (end_idx - start_idx) in let text = Str.global_replace cr_regexp "\n" text in @@ -182,37 +197,29 @@ module Frontmatter = struct let only_strict = contains only_strict_regexp flags in let no_strict = contains no_strict_regexp flags in let raw = contains raw_regexp flags in - begin match only_strict, no_strict, raw with - | true, false, false -> Only_strict - | false, true, false -> No_strict - | false, false, true -> Raw - | _ -> Both_strictnesses (* validate other nonsense combos? *) + begin + match (only_strict, no_strict, raw) with + | (true, false, false) -> Only_strict + | (false, true, false) -> No_strict + | (false, false, true) -> Raw + | _ -> Both_strictnesses (* validate other nonsense combos? *) end | None -> Both_strictnesses in let negative = if contains negative_regexp text then - Some { - phase = matched_group phase_regexp text; - type_ = matched_group type_regexp text; - } - else None + Some + { phase = matched_group phase_regexp text; type_ = matched_group type_regexp text } + else + None in - Some { - es5id; - es6id; - esid; - features; - flags; - negative; - } - with Not_found -> - None + Some { es5id; es6id; esid; features; flags; negative } + with Not_found -> None let to_string fm = let opt_cons key x acc = match x with - | Some x -> (Printf.sprintf "%s: %s" key x)::acc + | Some x -> Printf.sprintf "%s: %s" key x :: acc | None -> acc in [] @@ -220,18 +227,16 @@ module Frontmatter = struct |> opt_cons "es6id" fm.es6id |> opt_cons "esid" fm.esid |> (fun acc -> - let flags = match fm.flags with - | Only_strict -> Some "onlyStrict" - | No_strict -> Some "noStrict" - | Raw -> Some "raw" - | Both_strictnesses -> None - in - match flags with - | Some flags -> - (Printf.sprintf "flags: [%s]" flags)::acc - | None -> - acc - ) + let flags = + match fm.flags with + | Only_strict -> Some "onlyStrict" + | No_strict -> Some "noStrict" + | Raw -> Some "raw" + | Both_strictnesses -> None + in + match flags with + | Some flags -> Printf.sprintf "flags: [%s]" flags :: acc + | None -> acc) |> List.rev |> String.concat "\n" @@ -242,74 +247,87 @@ module Frontmatter = struct end let options_by_strictness frontmatter = - let open Parser_env in - let open Frontmatter in - let o = Parser_env.default_parse_options in - match frontmatter.flags with - | Both_strictnesses -> - failwith "should have been split by split_by_strictness" - | Only_strict -> - { o with use_strict = true } - | No_strict - | Raw -> - { o with use_strict = false } + Parser_env.( + Frontmatter.( + let o = Parser_env.default_parse_options in + match frontmatter.flags with + | Both_strictnesses -> failwith "should have been split by split_by_strictness" + | Only_strict -> { o with use_strict = true } + | No_strict + | Raw -> + { o with use_strict = false })) let split_by_strictness frontmatter = - let open Frontmatter in - match frontmatter.flags with - | Both_strictnesses -> - [ - { frontmatter with flags = Only_strict }; - { frontmatter with flags = No_strict }; - ] - | Only_strict - | No_strict - | Raw -> - [frontmatter] + Frontmatter.( + match frontmatter.flags with + | Both_strictnesses -> + [{ frontmatter with flags = Only_strict }; { frontmatter with flags = No_strict }] + | Only_strict + | No_strict + | Raw -> + [frontmatter]) let parse_test acc filename = let content = Sys_utils.cat filename in match Frontmatter.of_string content with | Some frontmatter -> - List.fold_left (fun acc frontmatter -> - let input = (filename, Frontmatter.(frontmatter.flags = Only_strict)) in - (input, frontmatter, content)::acc - ) acc (split_by_strictness frontmatter) - | None -> - acc + List.fold_left + (fun acc frontmatter -> + let input = (filename, Frontmatter.(frontmatter.flags = Only_strict)) in + (input, frontmatter, content) :: acc) + acc + (split_by_strictness frontmatter) + | None -> acc let run_test (name, frontmatter, content) = let (filename, use_strict) = name in - let parse_options = Parser_env.({ default_parse_options with use_strict }) in - let (_ast, errors) = Parser_flow.program_file - ~fail:false ~parse_options:(Some parse_options) - content (Some (File_key.SourceFile filename)) in - let result = match errors, Frontmatter.negative_phase frontmatter with - | [], Some ("early" | "parse") -> - (* expected a parse error, didn't get it *) - Error Missing_parse_error - | _, Some ("early" | "parse") -> - (* expected a parse error, got one *) - Ok () - | [], Some _ - | [], None -> - (* did not expect a parse error, didn't get one *) - Ok () - | err::_, Some _ - | err::_, None -> - (* did not expect a parse error, got one incorrectly *) - Error (Unexpected_parse_error err) + let parse_options = + { + Parser_env.enums = true; + esproposal_class_instance_fields = true; + esproposal_class_static_fields = true; + esproposal_decorators = false; + esproposal_export_star_as = false; + esproposal_optional_chaining = false; + esproposal_nullish_coalescing = false; + types = false; + use_strict; + } + in + let (_ast, errors) = + Parser_flow.program_file + ~fail:false + ~parse_options:(Some parse_options) + content + (Some (File_key.SourceFile filename)) + in + let result = + match (errors, Frontmatter.negative_phase frontmatter) with + | ([], Some ("early" | "parse")) -> + (* expected a parse error, didn't get it *) + Error Missing_parse_error + | (_, Some ("early" | "parse")) -> + (* expected a parse error, got one *) + Ok () + | ([], Some _) + | ([], None) -> + (* did not expect a parse error, didn't get one *) + Ok () + | (err :: _, Some _) + | (err :: _, None) -> + (* did not expect a parse error, got one incorrectly *) + Error (Unexpected_parse_error err) in { name; result } let incr_result (passed, failed) did_pass = - if did_pass then succ passed, failed - else passed, succ failed - -let fold_test ~verbose ~strip_root ~bar - (passed_acc, failed_acc, features_acc) - (name, frontmatter, content) = + if did_pass then + (succ passed, failed) + else + (passed, succ failed) +let fold_test + ~verbose ~strip_root ~bar (passed_acc, failed_acc, features_acc) (name, frontmatter, content) = if verbose then print_name ~strip_root name; let passed = let { name; result } = run_test (name, frontmatter, content) in @@ -321,72 +339,97 @@ let fold_test ~verbose ~strip_root ~bar print_error err; false in - let passed_acc, failed_acc = incr_result (passed_acc, failed_acc) passed in - let features_acc = List.fold_left (fun acc name -> - let feature = try SMap.find name acc with Not_found -> (0, 0) in - let feature = incr_result feature passed in - SMap.add name feature acc - ) features_acc frontmatter.Frontmatter.features in - Option.iter ~f:(fun bar -> - Progress_bar.incr bar; - if not passed then Progress_bar.print (passed_acc, failed_acc) bar - else Progress_bar.print_throttled (passed_acc, failed_acc) bar - ) bar; + let (passed_acc, failed_acc) = incr_result (passed_acc, failed_acc) passed in + let features_acc = + List.fold_left + (fun acc name -> + let feature = (try SMap.find name acc with Not_found -> (0, 0)) in + let feature = incr_result feature passed in + SMap.add name feature acc) + features_acc + frontmatter.Frontmatter.features + in + Option.iter + ~f:(fun bar -> + Progress_bar.incr bar; + if not passed then + Progress_bar.print (passed_acc, failed_acc) bar + else + Progress_bar.print_throttled (passed_acc, failed_acc) bar) + bar; (passed_acc, failed_acc, features_acc) let main () = let verbose_ref = ref Normal in let path_ref = ref None in let strip_root_ref = ref false in - let speclist = [ - "-q", Arg.Unit (fun () -> verbose_ref := Quiet), "Enables quiet mode"; - "-v", Arg.Unit (fun () -> verbose_ref := Verbose), "Enables verbose mode"; - "-s", Arg.Set strip_root_ref, "Print paths relative to root directory"; - ] in + let speclist = + [ + ("-q", Arg.Unit (fun () -> verbose_ref := Quiet), "Enables quiet mode"); + ("-v", Arg.Unit (fun () -> verbose_ref := Verbose), "Enables verbose mode"); + ("-s", Arg.Set strip_root_ref, "Print paths relative to root directory"); + ] + in let usage_msg = "Runs flow parser on test262 tests. Options available:" in Arg.parse speclist (fun anon -> path_ref := Some anon) usage_msg; - let path = match !path_ref with - | Some "" - | None -> prerr_endline "Invalid usage"; exit 1 - | Some path -> if path.[String.length path - 1] <> '/' then (path^"/") else path + let path = + match !path_ref with + | Some "" + | None -> + prerr_endline "Invalid usage"; + exit 1 + | Some path -> + if path.[String.length path - 1] <> '/' then + path ^ "/" + else + path + in + let strip_root = + if !strip_root_ref then + Some path + else + None in - let strip_root = if !strip_root_ref then Some path else None in let quiet = !verbose_ref = Quiet in let verbose = !verbose_ref = Verbose in - let files = files_of_path path |> List.sort String.compare in let tests = List.fold_left parse_test [] files |> List.rev in let test_count = List.length tests in - let bar = - if quiet || not (Unix.isatty Unix.stdout) then None - else Some (Progress_bar.make ~chunks:40 ~frequency:0.1 test_count) in - - let (passed, failed, results_by_feature) = List.fold_left - (fold_test ~verbose ~strip_root ~bar) - (0, 0, SMap.empty) - tests in - - begin match bar with - | Some bar -> Progress_bar.clear (passed, failed) bar - | None -> () + if quiet || not (Unix.isatty Unix.stdout) then + None + else + Some (Progress_bar.make ~chunks:40 ~frequency:0.1 test_count) + in + let (passed, failed, results_by_feature) = + List.fold_left (fold_test ~verbose ~strip_root ~bar) (0, 0, SMap.empty) tests + in + begin + match bar with + | Some bar -> Progress_bar.clear (passed, failed) bar + | None -> () end; let total = float_of_int (passed + failed) in Printf.printf "\n=== Summary ===\n"; - Printf.printf "Passed: %d (%.2f%%)\n" passed ((float_of_int passed) /. total *. 100.); - Printf.printf "Failed: %d (%.2f%%)\n" failed ((float_of_int failed) /. total *. 100.); + Printf.printf "Passed: %d (%.2f%%)\n" passed (float_of_int passed /. total *. 100.); + Printf.printf "Failed: %d (%.2f%%)\n" failed (float_of_int failed /. total *. 100.); - if not (SMap.is_empty results_by_feature) then begin + if not (SMap.is_empty results_by_feature) then ( Printf.printf "\nFeatures:\n"; - SMap.iter (fun name (passed, failed) -> - if failed > 0 || verbose then - let total = passed + failed in - let total_f = float_of_int total in - Printf.printf " %s: %d/%d (%.2f%%)\n" - name passed total ((float_of_int passed) /. total_f *. 100.) - ) results_by_feature; - end; + SMap.iter + (fun name (passed, failed) -> + if failed > 0 || verbose then + let total = passed + failed in + let total_f = float_of_int total in + Printf.printf + " %s: %d/%d (%.2f%%)\n" + name + passed + total + (float_of_int passed /. total_f *. 100.)) + results_by_feature + ); () diff --git a/src/parser/test/run_tests.ml b/src/parser/test/run_tests.ml index 0866d823e84..cee25f78408 100644 --- a/src/parser/test/run_tests.ml +++ b/src/parser/test/run_tests.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -14,46 +14,39 @@ module String_utils = struct try let long = String.sub long 0 (String.length short) in long = short - with Invalid_argument _ -> - false + with Invalid_argument _ -> false let strip_prefix prefix str = let prefix_length = String.length prefix in - if string_starts_with str prefix - then String.sub str prefix_length (String.length str - prefix_length) - else str + if string_starts_with str prefix then + String.sub str prefix_length (String.length str - prefix_length) + else + str let split_extension str = let chopped = Filename.chop_extension str in let len = String.length str in let clen = String.length chopped in let ext = String.sub str (clen + 1) (len - clen - 1) in - chopped, ext + (chopped, ext) end -module Translate = Estree_translator.Translate (Json_of_estree) (struct - let include_comments = true - let include_locs = true -end) - module RunEsprimaTests : sig val main : unit -> unit end = struct open Hh_json open String_utils - - module SMap = Map.Make(String) + module SMap = Map.Make (String) module C = Tty let should_color = Sys.os_type <> "Win32" && Unix.isatty Unix.stdout && Sys.getenv "TERM" <> "dumb" let print to_print = - if should_color - then + if should_color then C.cprint to_print else - let strings = List.map snd to_print in + let strings = Core_list.map ~f:snd to_print in List.iter (Printf.printf "%s") strings type case_expectation = @@ -67,9 +60,11 @@ end = struct | Todo of string (* reason *) | Same + type test_options = { intern_comments: bool } + type case = { source: string option; - options: Parser_env.parse_options option; + options: test_options * Parser_env.parse_options option; expected: case_expectation option; diff: case_diff; skipped: string list; @@ -85,13 +80,14 @@ end = struct | Case_skipped of string option (* reason *) | Case_error of string list - let empty_case = { - source = None; - options = None; - expected = None; - diff = Same; - skipped = []; - } + let empty_case = + { + source = None; + options = ({ intern_comments = false }, None); + expected = None; + diff = Same; + skipped = []; + } type path_part = | Prop of string @@ -102,285 +98,297 @@ end = struct | Prop p :: rest -> spf "%s.%s" (string_of_path rest) p | Index i :: rest -> spf "%s[%s]" (string_of_path rest) (string_of_int i) - let find_case name map = try SMap.find name map with Not_found -> empty_case + let find_case name map = (try SMap.find name map with Not_found -> empty_case) let parse_options content = - let open Core_result in - let get_bool k v = - try return (Hh_json.get_bool_exn v) - with Assert_failure _ -> failf "invalid value for %S, expected bool" k - in - return (Hh_json.json_of_string content) - >>= fun json -> - begin match json with - | Hh_json.JSON_Object props -> return props - | _ -> fail "expected options to be a JSON object" + Core_result.( + let get_bool k v = + try return (Hh_json.get_bool_exn v) + with Assert_failure _ -> failf "invalid value for %S, expected bool" k + in + return (Hh_json.json_of_string content) + >>= fun json -> + begin + match json with + | Hh_json.JSON_Object props -> return props + | _ -> fail "expected options to be a JSON object" end - >>= fun props -> - List.fold_left (fun opts (k, v) -> opts >>= (fun opts -> - match k with - | "esproposal_class_instance_fields" -> get_bool k v >>= fun v -> - return { opts with Parser_env.esproposal_class_instance_fields = v } - - | "esproposal_class_static_fields" -> get_bool k v >>= fun v -> - return { opts with Parser_env.esproposal_class_static_fields = v } - - | "esproposal_decorators" -> get_bool k v >>= fun v -> - return { opts with Parser_env.esproposal_decorators = v } - - | "esproposal_export_star_as" -> get_bool k v >>= fun v -> - return { opts with Parser_env.esproposal_export_star_as = v } - - | "esproposal_optional_chaining" -> get_bool k v >>= fun v -> - return { opts with Parser_env.esproposal_optional_chaining = v } - - | "esproposal_nullish_coalescing" -> get_bool k v >>= fun v -> - return { opts with Parser_env.esproposal_nullish_coalescing = v } - - | "types" -> get_bool k v >>= fun v -> - return { opts with Parser_env.types = v } - - | "use_strict" -> get_bool k v >>= fun v -> - return { opts with Parser_env.use_strict = v } - - | _ -> - failf "unknown option %S" k - )) (return Parser_env.default_parse_options) props + >>= fun props -> + List.fold_left + (fun opts (k, v) -> + opts + >>= fun (test_opts, opts) -> + match k with + | "enums" -> + get_bool k v >>= (fun v -> return (test_opts, { opts with Parser_env.enums = v })) + | "esproposal_class_instance_fields" -> + get_bool k v + >>= fun v -> + return (test_opts, { opts with Parser_env.esproposal_class_instance_fields = v }) + | "esproposal_class_static_fields" -> + get_bool k v + >>= fun v -> + return (test_opts, { opts with Parser_env.esproposal_class_static_fields = v }) + | "esproposal_decorators" -> + get_bool k v + >>= (fun v -> return (test_opts, { opts with Parser_env.esproposal_decorators = v })) + | "esproposal_export_star_as" -> + get_bool k v + >>= fun v -> + return (test_opts, { opts with Parser_env.esproposal_export_star_as = v }) + | "esproposal_optional_chaining" -> + get_bool k v + >>= fun v -> + return (test_opts, { opts with Parser_env.esproposal_optional_chaining = v }) + | "esproposal_nullish_coalescing" -> + get_bool k v + >>= fun v -> + return (test_opts, { opts with Parser_env.esproposal_nullish_coalescing = v }) + | "types" -> + get_bool k v >>= (fun v -> return (test_opts, { opts with Parser_env.types = v })) + | "use_strict" -> + get_bool k v >>= (fun v -> return (test_opts, { opts with Parser_env.use_strict = v })) + | "intern_comments" -> get_bool k v >>= (fun v -> return ({ intern_comments = v }, opts)) + | _ -> failf "unknown option %S" k) + (return ({ intern_comments = false }, Parser_env.default_parse_options)) + props) let tests_of_path path = let relativize = strip_prefix path in - File_utils.fold_files ~filter:(fun x -> x <> path) [path] (fun kind acc -> - match kind with - | File_utils.Dir dir -> - let test = { test_name = relativize dir; cases = SMap.empty } in - test::acc - | File_utils.File file -> - begin match acc with - | test::rest -> - let case_name = strip_prefix (test.test_name ^ Filename.dir_sep) (relativize file) in - let case_name, ext = split_extension case_name in - let cases = match ext with - | "js" when Filename.check_suffix case_name ".source" -> - let case_name = Filename.chop_suffix case_name ".source" in - let case = find_case case_name test.cases in - let case = { case with skipped = file::case.skipped } in - SMap.add case_name case test.cases - | "js" -> - let case = find_case case_name test.cases in - let source = Sys_utils.cat file in - let case = { case with source = Some source; } in - SMap.add case_name case test.cases - | "json" -> - let case_name, kind = split_extension case_name in - let case = find_case case_name test.cases in - let content = Sys_utils.cat file in - let case = match kind with - | "module" -> { case with expected = Some (Module content); } - | "tree" -> { case with expected = Some (Tree content); } - | "tokens" -> { case with expected = Some (Tokens content); } - | "failure" -> { case with expected = Some (Failure content); } - | "options" -> - (* TODO: propagate errors better *) - let options = Core_result.ok_or_failwith (parse_options content) in - { case with options = Some options; } - | _ -> { case with skipped = file::case.skipped } - in - SMap.add case_name case test.cases - | "diff" -> - let case = find_case case_name test.cases in - let source = Sys_utils.cat file in - let case = { case with diff = Diff source; } in - SMap.add case_name case test.cases - | "skip" -> - let case = find_case case_name test.cases in - let skip = Sys_utils.cat file |> String.trim in - let case = { case with diff = Todo skip; } in - SMap.add case_name case test.cases - | _ -> test.cases - in - ({ test with cases = cases })::rest - | _ -> - acc - end - ) [] + File_utils.fold_files + ~filter:(fun x -> x <> path) + [path] + (fun kind acc -> + match kind with + | File_utils.Dir dir -> + let test = { test_name = relativize dir; cases = SMap.empty } in + test :: acc + | File_utils.File file -> + begin + match acc with + | test :: rest -> + let case_name = strip_prefix (test.test_name ^ Filename.dir_sep) (relativize file) in + let (case_name, ext) = split_extension case_name in + let cases = + match ext with + | "js" when Filename.check_suffix case_name ".source" -> + let case_name = Filename.chop_suffix case_name ".source" in + let case = find_case case_name test.cases in + let case = { case with skipped = file :: case.skipped } in + SMap.add case_name case test.cases + | "js" -> + let case = find_case case_name test.cases in + let source = Sys_utils.cat file in + let case = { case with source = Some source } in + SMap.add case_name case test.cases + | "json" -> + let (case_name, kind) = split_extension case_name in + let case = find_case case_name test.cases in + let content = Sys_utils.cat file in + let case = + match kind with + | "module" -> { case with expected = Some (Module content) } + | "tree" -> { case with expected = Some (Tree content) } + | "tokens" -> { case with expected = Some (Tokens content) } + | "failure" -> { case with expected = Some (Failure content) } + | "options" -> + (* TODO: propagate errors better *) + let options = Core_result.ok_or_failwith (parse_options content) in + { case with options = (fst options, Some (snd options)) } + | _ -> { case with skipped = file :: case.skipped } + in + SMap.add case_name case test.cases + | "diff" -> + let case = find_case case_name test.cases in + let source = Sys_utils.cat file in + let case = { case with diff = Diff source } in + SMap.add case_name case test.cases + | "skip" -> + let case = find_case case_name test.cases in + let skip = Sys_utils.cat file |> String.trim in + let case = { case with diff = Todo skip } in + SMap.add case_name case test.cases + | _ -> test.cases + in + { test with cases } :: rest + | _ -> acc + end) + [] |> List.filter (fun test -> SMap.cardinal test.cases > 0) |> List.rev let expected_error_regex = Str.regexp "^Error: Line [0-9]+: \\(.*\\)$" let expected_different_property path expected actual = - match path, expected, actual with - | (Index _::Prop "errors"::[]), None, Some _ -> + match (path, expected, actual) with + | ([Index _; Prop "errors"], None, Some _) -> (* Don't ignore differences in errors *) false - (* Flow always includes comments and locations *) - | _, None, Some "comments" - | _, None, Some "loc" - | _, None, Some "range" - | _, None, Some "source" - + | (_, None, Some "comments") + | (_, None, Some "loc") + | (_, None, Some "range") + | (_, None, Some "source") (* Esprima doesn't support type annotations *) - | _, None, Some "typeAnnotation" - | _, None, Some "typeParameters" - | _, None, Some "superTypeParameters" - | _, None, Some "optional" - | _, None, Some "returnType" - | _, None, Some "predicate" - | _, None, Some "implements" - | _, None, Some "importKind" - | _, None, Some "exportKind" - + + | (_, None, Some "typeAnnotation") + | (_, None, Some "typeParameters") + | (_, None, Some "superTypeParameters") + | (_, None, Some "optional") + | (_, None, Some "returnType") + | (_, None, Some "predicate") + | (_, None, Some "implements") + | (_, None, Some "importKind") + | (_, None, Some "exportKind") (* Esprima doesn't support decorators *) (* https://github.com/estree/estree/blob/master/experimental/decorators.md *) - | _, None, Some "decorators" - + + | (_, None, Some "decorators") (* Esprima doesn't support async functions *) - | _, None, Some "async" - + + | (_, None, Some "async") (* TODO: Flow should include this *) - | [], Some "sourceType", None - + + | ([], Some "sourceType", None) (* TODO: enable this in tests *) - | [], Some "tokens", None - + + | ([], Some "tokens", None) (* Flow doesn't support this *) - | _, Some "leadingComments", None - | _, Some "trailingComments", None - | _, Some "innerComments", None - -> true - + + | (_, Some "leadingComments", None) + | (_, Some "trailingComments", None) + | (_, Some "innerComments", None) -> + true | _ -> false let string_value_matches expected actual = - if expected = actual then true + if expected = actual then + true else if Str.string_match expected_error_regex expected 0 then Str.matched_group 1 expected = actual else false let map_of_pairs pairs = - List.fold_left (fun acc (key, value) -> - SMap.add key value acc - ) SMap.empty pairs + List.fold_left (fun acc (key, value) -> SMap.add key value acc) SMap.empty pairs let map_of_properties props = - let open Ast.Expression in - List.fold_left (fun acc prop -> - match prop with - | Object.Property (_loc, Object.Property.Init { - key = Object.Property.Literal (_, { - Ast.Literal.value = Ast.Literal.String name; raw = _ - }); - value; - shorthand = false; - }) -> - SMap.add name value acc - | _ -> failwith "Invalid JSON" - ) SMap.empty props + Ast.Expression.( + List.fold_left + (fun acc prop -> + match prop with + | Object.Property + ( _loc, + Object.Property.Init + { + key = + Object.Property.Literal + (_, { Ast.Literal.value = Ast.Literal.String name; raw = _; comments = _ }); + value; + shorthand = false; + } ) -> + SMap.add name value acc + | _ -> failwith "Invalid JSON") + SMap.empty + props) let string_of_json_type = function - | JSON_Object _ -> "object" - | JSON_Array _ -> "array" - | JSON_String _ -> "string" - | JSON_Number _ -> "number" - | JSON_Bool _ -> "bool" - | JSON_Null -> "null" + | JSON_Object _ -> "object" + | JSON_Array _ -> "array" + | JSON_String _ -> "string" + | JSON_Number _ -> "number" + | JSON_Bool _ -> "bool" + | JSON_Null -> "null" let rec test_tree - (path: path_part list) - (actual: Hh_json.json) - (expected: (Loc.t, Loc.t) Ast.Expression.t) - (errors: string list) - : string list = - let open Ast.Expression in - match actual, expected with - | JSON_Object aprops, - (_, Object { Object.properties = eprops }) -> - let amap = map_of_pairs aprops in - let emap = map_of_properties eprops in - errors - |> SMap.fold (test_actual_prop path emap) amap - |> SMap.fold (test_expected_prop path amap) emap - | JSON_Array aitems, - (_, Array { Array.elements = eitems }) -> - let a_len = List.length aitems in - let e_len = List.length eitems in - if e_len <> a_len then - let path = string_of_path path in - let err = spf "%s: Expected %d elements, got %d." path e_len a_len in - err::errors - else - let _, diffs = List.fold_left2 (fun (i, acc) actual expected -> - let path = (Index i)::path in - let acc = match expected with - | Some (Expression expr) -> - test_tree path actual expr acc - | _ -> (spf "%s: invalid JSON" (string_of_path path))::acc + (path : path_part list) + (actual : Hh_json.json) + (expected : (Loc.t, Loc.t) Ast.Expression.t) + (errors : string list) : string list = + Ast.Expression.( + match (actual, expected) with + | (JSON_Object aprops, (_, Object { Object.properties = eprops; comments = _ })) -> + let amap = map_of_pairs aprops in + let emap = map_of_properties eprops in + errors + |> SMap.fold (test_actual_prop path emap) amap + |> SMap.fold (test_expected_prop path amap) emap + | (JSON_Array aitems, (_, Array { Array.elements = eitems; comments = _ })) -> + let a_len = List.length aitems in + let e_len = List.length eitems in + if e_len <> a_len then + let path = string_of_path path in + let err = spf "%s: Expected %d elements, got %d." path e_len a_len in + err :: errors + else + let (_, diffs) = + List.fold_left2 + (fun (i, acc) actual expected -> + let path = Index i :: path in + let acc = + match expected with + | Some (Expression expr) -> test_tree path actual expr acc + | _ -> spf "%s: invalid JSON" (string_of_path path) :: acc + in + (i + 1, acc)) + (0, errors) + aitems + eitems in - i + 1, acc - ) (0, errors) aitems eitems in - diffs - | JSON_Bool actual, - (_, Literal { Ast.Literal. - value = Ast.Literal.Boolean expected; - raw = _; - }) -> + diffs + | ( JSON_Bool actual, + (_, Literal { Ast.Literal.value = Ast.Literal.Boolean expected; raw = _; comments = _ }) + ) -> if actual <> expected then let path = string_of_path path in - (spf "%s: Expected %b, got %b." path expected actual)::errors + spf "%s: Expected %b, got %b." path expected actual :: errors else errors - | JSON_Number actual, - (_, Literal { Ast.Literal. - value = Ast.Literal.Number _; - raw = expected; - }) -> + | ( JSON_Number actual, + (_, Literal { Ast.Literal.value = Ast.Literal.Number _; raw = expected; comments = _ }) + ) -> if actual <> expected then let path = string_of_path path in - (spf "%s: Expected %s, got %s." path expected actual)::errors + spf "%s: Expected %s, got %s." path expected actual :: errors else errors - | JSON_Number actual, - (_, Unary { Unary. - operator = Unary.Minus; - prefix = _; - argument = (_, Literal { Ast.Literal. - value = Ast.Literal.Number _; - raw = expected; - }) - }) -> + | ( JSON_Number actual, + ( _, + Unary + { + Unary.operator = Unary.Minus; + argument = + ( _, + Literal + { Ast.Literal.value = Ast.Literal.Number _; raw = expected; comments = _ } ); + comments = _; + } ) ) -> let expected = "-" ^ expected in if actual <> expected then let path = string_of_path path in - (spf "%s: Expected %s, got %s." path expected actual)::errors + spf "%s: Expected %s, got %s." path expected actual :: errors else errors - | JSON_String actual, - (_, Literal { Ast.Literal. - value = Ast.Literal.String expected; - raw = _; - }) -> + | ( JSON_String actual, + (_, Literal { Ast.Literal.value = Ast.Literal.String expected; raw = _; comments = _ }) + ) -> if not (string_value_matches expected actual) then let path = string_of_path path in - (spf "%s: Expected %S, got %S." path expected actual)::errors + spf "%s: Expected %S, got %S." path expected actual :: errors else errors - | JSON_Null, - (_, Literal { Ast.Literal. - value = Ast.Literal.Null; - raw = _; - }) -> + | (JSON_Null, (_, Literal { Ast.Literal.value = Ast.Literal.Null; raw = _; comments = _ })) + -> errors - | _, _ -> + | (_, _) -> let path = string_of_path path in let act_type = string_of_json_type actual in - (spf "%s: Types do not match, got %s" path act_type)::errors + spf "%s: Types do not match, got %s" path act_type :: errors) and test_actual_prop path expected_map name value acc = if SMap.mem name expected_map then let expected_value = SMap.find name expected_map in - test_tree ((Prop name)::path) value expected_value acc + test_tree (Prop name :: path) value expected_value acc else if value = JSON_Null then (* allow Flow to have extra properties when their values are null. this is a narrower exception than `expected_different_property`; that function @@ -391,166 +399,205 @@ end = struct acc else let path = string_of_path path in - (spf "%s: Unexpected key %S" path name)::acc + spf "%s: Unexpected key %S" path name :: acc and test_expected_prop path actual_map name _ acc = - if SMap.mem name actual_map || - expected_different_property path (Some name) None - then acc + if SMap.mem name actual_map || expected_different_property path (Some name) None then + acc else let path = string_of_path path in - (spf "%s: Missing key %S" path name)::acc - - let prop_name_and_value = Ast.Expression.(function - | Object.Property.Init { - key = Object.Property.Literal (_, { - Ast.Literal.value = Ast.Literal.String name; raw = _ - }); - value; - shorthand = false; - } -> name, value - | _ -> failwith "Invalid JSON" - ) - - let has_prop - (needle: string) - (haystack: (Loc.t, Loc.t) Ast.Expression.Object.property list) = - List.exists Ast.Expression.(function - | Object.Property (_, prop) -> fst (prop_name_and_value prop) = needle - | _ -> false - ) haystack + spf "%s: Missing key %S" path name :: acc + + let prop_name_and_value = + Ast.Expression.( + function + | Object.Property.Init + { + key = + Object.Property.Literal + (_, { Ast.Literal.value = Ast.Literal.String name; raw = _; comments = _ }); + value; + shorthand = false; + } -> + (name, value) + | _ -> failwith "Invalid JSON") + + let has_prop (needle : string) (haystack : (Loc.t, Loc.t) Ast.Expression.Object.property list) = + List.exists + Ast.Expression.( + function + | Object.Property (_, prop) -> fst (prop_name_and_value prop) = needle + | _ -> false) + haystack let rec apply_diff diff expected = - let open Ast.Expression in - match diff with - | (_, Object { Object.properties = diff_props; _ }) -> - begin match expected with - | (loc, Object { Object.properties = expected_props; }) -> - let properties = List.fold_left (fun props diff_prop -> - match diff_prop with - | Object.Property (diff_loc, diff_prop) -> - let diff_name, diff_value = prop_name_and_value diff_prop in - if not (has_prop diff_name props) then - Object.Property (diff_loc, diff_prop)::props - else List.fold_left (fun acc exp -> match exp with - | Object.Property (exp_loc, exp_prop) -> - let exp_key = match exp_prop with - | Object.Property.Init { key; _ } -> key - | _ -> failwith "Invalid JSON" - in - let exp_name, exp_value = prop_name_and_value exp_prop in - if exp_name = diff_name then - (* recursively apply diff *) - match apply_diff diff_value exp_value with - | Some value -> - let prop = Object.Property (exp_loc, Object.Property.Init { - key = exp_key; - value; - shorthand = false; - }) in - prop::acc - | None -> - acc - else - let prop = Object.Property (exp_loc, exp_prop) in - prop::acc - | prop -> prop::acc - ) [] props - | _ -> failwith "Invalid JSON" - ) expected_props diff_props in - Some (loc, Object { Object.properties }) - | (loc, Array { Array.elements = expected_elems; }) -> - let expected_length = List.length expected_elems in - let elements = List.fold_left (fun elems diff_prop -> - match diff_prop with - | Object.Property (_, diff_prop) -> - let diff_name, diff_value = prop_name_and_value diff_prop in - let diff_index = int_of_string diff_name in - if diff_index >= expected_length then - (* append the diff *) - (* TODO: this should insert gaps, but I don't expect people to + Ast.Expression.( + match diff with + | (_, Object { Object.properties = diff_props; _ }) -> + begin + match expected with + | (loc, Object { Object.properties = expected_props; comments }) -> + let properties = + List.fold_left + (fun props diff_prop -> + match diff_prop with + | Object.Property (diff_loc, diff_prop) -> + let (diff_name, diff_value) = prop_name_and_value diff_prop in + if not (has_prop diff_name props) then + Object.Property (diff_loc, diff_prop) :: props + else + List.fold_left + (fun acc exp -> + match exp with + | Object.Property (exp_loc, exp_prop) -> + let exp_key = + match exp_prop with + | Object.Property.Init { key; _ } -> key + | _ -> failwith "Invalid JSON" + in + let (exp_name, exp_value) = prop_name_and_value exp_prop in + if exp_name = diff_name then + (* recursively apply diff *) + match apply_diff diff_value exp_value with + | Some value -> + let prop = + Object.Property + ( exp_loc, + Object.Property.Init + { key = exp_key; value; shorthand = false } ) + in + prop :: acc + | None -> acc + else + let prop = Object.Property (exp_loc, exp_prop) in + prop :: acc + | prop -> prop :: acc) + [] + props + | _ -> failwith "Invalid JSON") + expected_props + diff_props + in + Some (loc, Object { Object.properties; comments }) + | (loc, Array { Array.elements = expected_elems; comments }) -> + let expected_length = List.length expected_elems in + let elements = + List.fold_left + (fun elems diff_prop -> + match diff_prop with + | Object.Property (_, diff_prop) -> + let (diff_name, diff_value) = prop_name_and_value diff_prop in + let diff_index = int_of_string diff_name in + if diff_index >= expected_length then + (* append the diff *) + (* TODO: this should insert gaps, but I don't expect people to write diffs that have gaps. *) - List.rev ((Some (Expression diff_value))::(List.rev elems)) - else - (* apply the diff *) - List.mapi (fun index elem -> - if index <> diff_index then elem - else match elem with - | None -> - Some (Expression diff_value) - | Some (Expression exp_value) -> - begin match apply_diff diff_value exp_value with - | Some value -> Some (Expression value) - | None -> None - end - | Some (Spread _) -> - failwith "Invalid JSON" - ) elems - | _ -> failwith "Invalid JSON" - ) expected_elems diff_props in - Some (loc, Array { Array.elements }) - | _ -> Some expected - end - | (_, Literal _) -> Some diff - | (_, Identifier (_, "undefined")) -> - None - | _ -> failwith "Invalid diff format" - - let parse_file ?parse_options content = - let (ast, errors) = Parser_flow.program_file - ~fail:false ~parse_options content None in - match Translate.program ast with + List.rev (Some (Expression diff_value) :: List.rev elems) + else + (* apply the diff *) + List.mapi + (fun index elem -> + if index <> diff_index then + elem + else + match elem with + | None -> Some (Expression diff_value) + | Some (Expression exp_value) -> + begin + match apply_diff diff_value exp_value with + | Some value -> Some (Expression value) + | None -> None + end + | Some (Spread _) -> failwith "Invalid JSON") + elems + | _ -> failwith "Invalid JSON") + expected_elems + diff_props + in + Some (loc, Array { Array.elements; comments }) + | _ -> Some expected + end + | (_, Literal _) -> Some diff + | (_, Identifier (_, { Ast.Identifier.name = "undefined"; comments = _ })) -> None + | _ -> failwith "Invalid diff format") + + let parse_file (test_options, parse_options) content = + let (ast, errors) = Parser_flow.program_file ~fail:false ~parse_options content None in + let offset_table = Some (Offset_utils.make content) in + let module Translate = + Estree_translator.Translate + (Json_of_estree) + (struct + let include_interned_comments = test_options.intern_comments + + let include_comments = true + + let include_locs = true + end) + in + match Translate.program offset_table ast with | JSON_Object params -> - let params = - if errors = [] then params - else ("errors", Translate.errors errors)::params in - JSON_Object params + let params = + if errors = [] then + params + else + ("errors", Translate.errors errors) :: params + in + JSON_Object params | _ -> assert false let run_case case : case_result = match case.source with | None -> - if List.length case.skipped = 0 && case.diff = Same - then Case_error ["No source"] - else Case_skipped None + if List.length case.skipped = 0 && case.diff = Same then + Case_error ["No source"] + else + Case_skipped None | Some content -> - let actual = parse_file ?parse_options:case.options content in - let diff, todo = + let actual = parse_file case.options content in + let (diff, todo) = match case.diff with - | Diff str -> Some str, None - | Todo str -> None, Some str - | Same -> None, None + | Diff str -> (Some str, None) + | Todo str -> (None, Some str) + | Same -> (None, None) in - begin match case.expected with - | Some (Module _) -> (* TODO *) Case_skipped None - | Some (Tree tree) -> - let expected, json_errors = Parser_flow.json_file ~fail:false tree None in - if json_errors <> [] then begin + begin + match case.expected with + | Some (Module _) -> (* TODO *) Case_skipped None + | Some (Tree tree) -> + let (expected, json_errors) = Parser_flow.json_file ~fail:false tree None in + if json_errors <> [] then let (loc, err) = List.hd json_errors in - let str = Printf.sprintf "Unable to parse .tree.json: %s: %s" - (Loc.to_string loc) (Parse_error.PP.error err) in + let str = + Printf.sprintf + "Unable to parse .tree.json: %s: %s" + (Loc.debug_to_string loc) + (Parse_error.PP.error err) + in Case_error [str] - end else - let expected = match diff with - | Some str -> - let diffs = fst (Parser_flow.json_file ~fail:true str None) in - begin match apply_diff diffs expected with - | Some x -> x - | None -> failwith "unexpected diff: removed everything"; + else + let expected = + match diff with + | Some str -> + let diffs = fst (Parser_flow.json_file ~fail:true str None) in + begin + match apply_diff diffs expected with + | Some x -> x + | None -> failwith "unexpected diff: removed everything" + end + | None -> expected + in + let errors = test_tree [] actual expected [] in + begin + match (errors, todo) with + | ([], None) -> Case_ok + | ([], Some _) -> Case_error ["Skipped test passes"] + | (_, Some reason) -> Case_skipped (Some reason) + | (_, None) -> Case_error errors end - | None -> - expected - in - let errors = test_tree [] actual expected [] in - begin match errors, todo with - | [], None -> Case_ok - | [], Some _ -> Case_error ["Skipped test passes"] - | _, Some reason -> Case_skipped (Some reason) - | _, None -> Case_error errors - end - | Some (Tokens _) -> (* TODO *) Case_skipped None - | Some (Failure _) -> (* TODO *) Case_skipped None - | None -> Case_error ["Nothing to do"] + | Some (Tokens _) -> (* TODO *) Case_skipped None + | Some (Failure _) -> (* TODO *) Case_skipped None + | None -> Case_error ["Nothing to do"] end type test_results = { @@ -567,32 +614,28 @@ end = struct failed_cases: int; } - let empty_suite_results = { - ok_tests = 0; - ok_cases = 0; - skipped_cases = 0; - failed_tests = 0; - failed_cases = 0; - } + let empty_suite_results = + { ok_tests = 0; ok_cases = 0; skipped_cases = 0; failed_tests = 0; failed_cases = 0 } let add_results suite test = - { suite with + { + suite with ok_cases = suite.ok_cases + test.ok; skipped_cases = suite.skipped_cases + test.skipped; failed_cases = suite.failed_cases + test.failed; } let record_tree path test_name case_name case = - match case.source, case.expected with - | Some content, None - | Some content, Some (Tree _) -> - let (/) a b = a ^ Filename.dir_sep ^ b in + match (case.source, case.expected) with + | (Some content, None) + | (Some content, Some (Tree _)) -> + let ( / ) a b = a ^ Filename.dir_sep ^ b in let filename = (path / test_name / case_name) ^ ".tree.json" in - let json = parse_file ?parse_options:case.options content in + let json = parse_file case.options content in let oc = open_out filename in output_string oc (Hh_json.json_to_multiline json); output_char oc '\n'; - close_out oc; + close_out oc | _ -> () type verbose_mode = @@ -604,83 +647,108 @@ end = struct let verbose_ref = ref Normal in let record_ref = ref false in let path_ref = ref None in - let speclist = [ - "-q", Arg.Unit (fun () -> verbose_ref := Quiet), "Enables quiet mode"; - "-v", Arg.Unit (fun () -> verbose_ref := Verbose), "Enables verbose mode"; - "-r", Arg.Set record_ref, "Re-record failing expected trees"; - ] in + let speclist = + [ + ("-q", Arg.Unit (fun () -> verbose_ref := Quiet), "Enables quiet mode"); + ("-v", Arg.Unit (fun () -> verbose_ref := Verbose), "Enables verbose mode"); + ("-r", Arg.Set record_ref, "Re-record failing expected trees"); + ] + in let usage_msg = "Runs flow parser on esprima tests. Options available:" in Arg.parse speclist (fun anon -> path_ref := Some anon) usage_msg; - let path = match !path_ref with - | Some path -> path - | None -> prerr_endline "Invalid usage"; exit 1 + let path = + match !path_ref with + | Some path -> path + | None -> + prerr_endline "Invalid usage"; + exit 1 in let quiet = !verbose_ref = Quiet in let verbose = !verbose_ref = Verbose in let record = !record_ref in let tests = tests_of_path path in - let results = List.fold_left (fun results { test_name; cases; } -> - if not quiet then print [C.Bold C.White, spf "=== %s ===\n" test_name]; - let test_results, _ = SMap.fold (fun key case (results, shown_header) -> - (* print [C.Normal C.Default, spf "[ ] %s\r" key]; *) - match run_case case with - | Case_ok -> - if verbose then print [ - C.Normal C.Green, "[\xE2\x9C\x93] PASS"; - C.Normal C.Default, spf ": %s\n" key - ]; - { results with ok = results.ok + 1 }, shown_header - | Case_skipped reason -> - begin match reason with - | Some "" - | None -> () - | Some reason -> - if not quiet then print [ - C.Normal C.Yellow, "[-] SKIP"; - C.Normal C.Default, spf ": %s - %s\n" key reason - ] - end; - { results with skipped = results.skipped + 1 }, shown_header - | Case_error errs -> - if quiet && not shown_header then print [C.Bold C.White, spf "=== %s ===\n" test_name]; - print [ - C.Normal C.Red, "[\xE2\x9C\x97] FAIL"; - C.Normal C.Default, spf ": %s\n" key - ]; - List.iter (fun err -> - print [C.Normal C.Default, spf " %s\n" err]; - ) errs; - flush stdout; - if record then record_tree path test_name key case; - { results with failed = results.failed + 1 }, true - ) cases ({ ok = 0; skipped = 0; failed = 0; }, false) in - if not quiet then print_endline ""; - let results = add_results results test_results in - if test_results.failed > 0 then - { results with failed_tests = results.failed_tests + 1; } - else - { results with ok_tests = results.ok_tests + 1; } - ) empty_suite_results tests in - + let results = + List.fold_left + (fun results { test_name; cases } -> + if not quiet then print [(C.Bold C.White, spf "=== %s ===\n" test_name)]; + let (test_results, _) = + SMap.fold + (fun key case (results, shown_header) -> + (* print [C.Normal C.Default, spf "[ ] %s\r" key]; *) + match run_case case with + | Case_ok -> + if verbose then + print + [ + (C.Normal C.Green, "[\xE2\x9C\x93] PASS"); + (C.Normal C.Default, spf ": %s\n" key); + ]; + ({ results with ok = results.ok + 1 }, shown_header) + | Case_skipped reason -> + begin + match reason with + | Some "" + | None -> + () + | Some reason -> + if not quiet then + print + [ + (C.Normal C.Yellow, "[-] SKIP"); + (C.Normal C.Default, spf ": %s - %s\n" key reason); + ] + end; + ({ results with skipped = results.skipped + 1 }, shown_header) + | Case_error errs -> + if quiet && not shown_header then + print [(C.Bold C.White, spf "=== %s ===\n" test_name)]; + print + [ + (C.Normal C.Red, "[\xE2\x9C\x97] FAIL"); + (C.Normal C.Default, spf ": %s\n" key); + ]; + List.iter (fun err -> print [(C.Normal C.Default, spf " %s\n" err)]) errs; + flush stdout; + if record then record_tree path test_name key case; + ({ results with failed = results.failed + 1 }, true)) + cases + ({ ok = 0; skipped = 0; failed = 0 }, false) + in + if not quiet then print_endline ""; + let results = add_results results test_results in + if test_results.failed > 0 then + { results with failed_tests = results.failed_tests + 1 } + else + { results with ok_tests = results.ok_tests + 1 }) + empty_suite_results + tests + in if results.failed_tests = 0 then - let _ = print [ - C.Bold C.Default, spf "Passed: %d (%d cases), Failed: %d (%d cases), Skipped: %d cases\n" - results.ok_tests results.ok_cases - results.failed_tests results.failed_cases - results.skipped_cases - ] in + let _ = + print + [ + ( C.Bold C.Default, + spf + "Passed: %d (%d cases), Failed: %d (%d cases), Skipped: %d cases\n" + results.ok_tests + results.ok_cases + results.failed_tests + results.failed_cases + results.skipped_cases ); + ] + in exit 0 else - let _ = print [ - C.Bold C.Default, spf "Passed: %d (%d cases), " - results.ok_tests results.ok_cases; - C.BoldWithBG (C.White, C.Red), spf "Failed: %d (%d cases)" - results.failed_tests results.failed_cases; - C.Bold C.Default, spf ", Skipped: %d cases\n" - results.skipped_cases; - ] in + let _ = + print + [ + (C.Bold C.Default, spf "Passed: %d (%d cases), " results.ok_tests results.ok_cases); + ( C.BoldWithBG (C.White, C.Red), + spf "Failed: %d (%d cases)" results.failed_tests results.failed_cases ); + (C.Bold C.Default, spf ", Skipped: %d cases\n" results.skipped_cases); + ] + in exit 1 - end let _ = RunEsprimaTests.main () diff --git a/src/parser/test/test262_baseline.txt b/src/parser/test/test262_baseline.txt index c5beedf08fb..cc72aed5904 100644 --- a/src/parser/test/test262_baseline.txt +++ b/src/parser/test/test262_baseline.txt @@ -6,6 +6,10 @@ annexB/language/comments/single-line-html-close-asi.js (strict mode) Unexpected token > at (27, 2) to (27, 3) annexB/language/comments/single-line-html-close-asi.js (default) Unexpected token > at (27, 2) to (27, 3) +annexB/language/comments/single-line-html-close-unicode-separators.js (strict mode) + Unexpected token > at (37, 2) to (37, 3) +annexB/language/comments/single-line-html-close-unicode-separators.js (default) + Unexpected token > at (37, 2) to (37, 3) annexB/language/comments/single-line-html-close.js (strict mode) Unexpected token > at (25, 2) to (25, 3) annexB/language/comments/single-line-html-close.js (default) @@ -18,1890 +22,1206 @@ annexB/language/expressions/object/__proto__-duplicate.js (strict mode) Missing parse error annexB/language/expressions/object/__proto__-duplicate.js (default) Missing parse error -annexB/language/literals/numeric/non-octal-decimal-integer.js (default) - Unexpected token ILLEGAL at (33, 17) to (33, 20) annexB/language/statements/for-in/nonstrict-initializer.js (default) Invalid left-hand side in for-in at (11, 7) to (11, 24) -built-ins/BigInt/asIntN/arithmetic.js (strict mode) - Unexpected token ILLEGAL at (15, 35) to (15, 37) -built-ins/BigInt/asIntN/arithmetic.js (default) - Unexpected token ILLEGAL at (15, 35) to (15, 37) -built-ins/BigInt/asIntN/bigint-tobigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (25, 4) to (25, 6) -built-ins/BigInt/asIntN/bigint-tobigint-toprimitive.js (default) - Unexpected token ILLEGAL at (25, 4) to (25, 6) -built-ins/BigInt/asIntN/bigint-tobigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (13, 41) to (13, 43) -built-ins/BigInt/asIntN/bigint-tobigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (13, 41) to (13, 43) -built-ins/BigInt/asIntN/bigint-tobigint.js (strict mode) - Unexpected token ILLEGAL at (13, 34) to (13, 36) -built-ins/BigInt/asIntN/bigint-tobigint.js (default) - Unexpected token ILLEGAL at (13, 34) to (13, 36) -built-ins/BigInt/asIntN/bits-toindex-errors.js (strict mode) - Unexpected token ILLEGAL at (14, 20) to (14, 22) -built-ins/BigInt/asIntN/bits-toindex-errors.js (default) - Unexpected token ILLEGAL at (14, 20) to (14, 22) -built-ins/BigInt/asIntN/bits-toindex-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (25, 3) to (25, 5) -built-ins/BigInt/asIntN/bits-toindex-toprimitive.js (default) - Unexpected token ILLEGAL at (25, 3) to (25, 5) -built-ins/BigInt/asIntN/bits-toindex-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (13, 42) to (13, 44) -built-ins/BigInt/asIntN/bits-toindex-wrapped-values.js (default) - Unexpected token ILLEGAL at (13, 42) to (13, 44) -built-ins/BigInt/asIntN/bits-toindex.js (strict mode) - Unexpected token ILLEGAL at (13, 34) to (13, 36) -built-ins/BigInt/asIntN/bits-toindex.js (default) - Unexpected token ILLEGAL at (13, 34) to (13, 36) -built-ins/BigInt/asIntN/order-of-steps.js (strict mode) - Unexpected token ILLEGAL at (27, 11) to (27, 13) -built-ins/BigInt/asIntN/order-of-steps.js (default) - Unexpected token ILLEGAL at (27, 11) to (27, 13) -built-ins/BigInt/asUintN/arithmetic.js (strict mode) - Unexpected token ILLEGAL at (14, 36) to (14, 38) -built-ins/BigInt/asUintN/arithmetic.js (default) - Unexpected token ILLEGAL at (14, 36) to (14, 38) -built-ins/BigInt/asUintN/bigint-tobigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (25, 4) to (25, 6) -built-ins/BigInt/asUintN/bigint-tobigint-toprimitive.js (default) - Unexpected token ILLEGAL at (25, 4) to (25, 6) -built-ins/BigInt/asUintN/bigint-tobigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (13, 42) to (13, 44) -built-ins/BigInt/asUintN/bigint-tobigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (13, 42) to (13, 44) -built-ins/BigInt/asUintN/bigint-tobigint.js (strict mode) - Unexpected token ILLEGAL at (13, 35) to (13, 37) -built-ins/BigInt/asUintN/bigint-tobigint.js (default) - Unexpected token ILLEGAL at (13, 35) to (13, 37) -built-ins/BigInt/asUintN/bits-toindex-errors.js (strict mode) - Unexpected token ILLEGAL at (14, 21) to (14, 23) -built-ins/BigInt/asUintN/bits-toindex-errors.js (default) - Unexpected token ILLEGAL at (14, 21) to (14, 23) -built-ins/BigInt/asUintN/bits-toindex-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (25, 3) to (25, 5) -built-ins/BigInt/asUintN/bits-toindex-toprimitive.js (default) - Unexpected token ILLEGAL at (25, 3) to (25, 5) -built-ins/BigInt/asUintN/bits-toindex-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (13, 43) to (13, 45) -built-ins/BigInt/asUintN/bits-toindex-wrapped-values.js (default) - Unexpected token ILLEGAL at (13, 43) to (13, 45) -built-ins/BigInt/asUintN/bits-toindex.js (strict mode) - Unexpected token ILLEGAL at (13, 35) to (13, 37) -built-ins/BigInt/asUintN/bits-toindex.js (default) - Unexpected token ILLEGAL at (13, 35) to (13, 37) -built-ins/BigInt/asUintN/order-of-steps.js (strict mode) - Unexpected token ILLEGAL at (27, 11) to (27, 13) -built-ins/BigInt/asUintN/order-of-steps.js (default) - Unexpected token ILLEGAL at (27, 11) to (27, 13) -built-ins/BigInt/call-value-of-when-to-string-present.js (strict mode) - Unexpected token ILLEGAL at (24, 28) to (24, 31) -built-ins/BigInt/call-value-of-when-to-string-present.js (default) - Unexpected token ILLEGAL at (24, 28) to (24, 31) -built-ins/BigInt/constructor-empty-string.js (strict mode) - Unexpected token ILLEGAL at (15, 29) to (15, 31) -built-ins/BigInt/constructor-empty-string.js (default) - Unexpected token ILLEGAL at (15, 29) to (15, 31) -built-ins/BigInt/constructor-from-binary-string.js (strict mode) - Unexpected token ILLEGAL at (16, 35) to (16, 38) -built-ins/BigInt/constructor-from-binary-string.js (default) - Unexpected token ILLEGAL at (16, 35) to (16, 38) -built-ins/BigInt/constructor-from-decimal-string.js (strict mode) - Unexpected token ILLEGAL at (16, 31) to (16, 34) -built-ins/BigInt/constructor-from-decimal-string.js (default) - Unexpected token ILLEGAL at (16, 31) to (16, 34) -built-ins/BigInt/constructor-from-hex-string.js (strict mode) - Unexpected token ILLEGAL at (16, 32) to (16, 35) -built-ins/BigInt/constructor-from-hex-string.js (default) - Unexpected token ILLEGAL at (16, 32) to (16, 35) -built-ins/BigInt/constructor-from-octal-string.js (strict mode) - Unexpected token ILLEGAL at (16, 32) to (16, 34) -built-ins/BigInt/constructor-from-octal-string.js (default) - Unexpected token ILLEGAL at (16, 32) to (16, 34) -built-ins/BigInt/constructor-trailing-leading-spaces.js (strict mode) - Unexpected token ILLEGAL at (16, 38) to (16, 41) -built-ins/BigInt/constructor-trailing-leading-spaces.js (default) - Unexpected token ILLEGAL at (16, 38) to (16, 41) -built-ins/BigInt/issafeinteger-true.js (strict mode) - Unexpected token ILLEGAL at (19, 50) to (19, 67) -built-ins/BigInt/issafeinteger-true.js (default) - Unexpected token ILLEGAL at (19, 50) to (19, 67) -built-ins/BigInt/prototype/toString/radix-2-to-36.js (strict mode) - Unexpected token ILLEGAL at (21, 20) to (21, 22) -built-ins/BigInt/prototype/toString/radix-2-to-36.js (default) - Unexpected token ILLEGAL at (21, 20) to (21, 22) -built-ins/BigInt/prototype/toString/radix-err.js (strict mode) - Unexpected token ILLEGAL at (19, 5) to (19, 7) -built-ins/BigInt/prototype/toString/radix-err.js (default) - Unexpected token ILLEGAL at (19, 5) to (19, 7) -built-ins/BigInt/prototype/toString/string-is-code-units-of-decimal-digits-only.js (strict mode) - Unexpected token ILLEGAL at (19, 24) to (19, 26) -built-ins/BigInt/prototype/toString/string-is-code-units-of-decimal-digits-only.js (default) - Unexpected token ILLEGAL at (19, 24) to (19, 26) -built-ins/BigInt/prototype/toString/thisbigintvalue-not-valid-throws.js (strict mode) - Unexpected token ILLEGAL at (26, 7) to (26, 9) -built-ins/BigInt/prototype/toString/thisbigintvalue-not-valid-throws.js (default) - Unexpected token ILLEGAL at (26, 7) to (26, 9) -built-ins/BigInt/prototype/valueOf/return.js (strict mode) - Unexpected token ILLEGAL at (17, 30) to (17, 32) -built-ins/BigInt/prototype/valueOf/return.js (default) - Unexpected token ILLEGAL at (17, 30) to (17, 32) -built-ins/DataView/prototype/getBigInt64/return-value-clean-arraybuffer.js (strict mode) - Unexpected token ILLEGAL at (39, 46) to (39, 48) -built-ins/DataView/prototype/getBigInt64/return-value-clean-arraybuffer.js (default) - Unexpected token ILLEGAL at (39, 46) to (39, 48) -built-ins/DataView/prototype/getBigInt64/return-values-custom-offset.js (strict mode) - Unexpected token ILLEGAL at (58, 48) to (58, 67) -built-ins/DataView/prototype/getBigInt64/return-values-custom-offset.js (default) - Unexpected token ILLEGAL at (58, 48) to (58, 67) -built-ins/DataView/prototype/getBigInt64/return-values.js (strict mode) - Unexpected token ILLEGAL at (51, 47) to (51, 66) -built-ins/DataView/prototype/getBigInt64/return-values.js (default) - Unexpected token ILLEGAL at (51, 47) to (51, 66) -built-ins/DataView/prototype/getBigInt64/to-boolean-littleendian.js (strict mode) - Unexpected token ILLEGAL at (39, 40) to (39, 45) -built-ins/DataView/prototype/getBigInt64/to-boolean-littleendian.js (default) - Unexpected token ILLEGAL at (39, 40) to (39, 45) -built-ins/DataView/prototype/getBigInt64/toindex-byteoffset-errors.js (strict mode) - Unexpected token ILLEGAL at (55, 21) to (55, 23) -built-ins/DataView/prototype/getBigInt64/toindex-byteoffset-errors.js (default) - Unexpected token ILLEGAL at (55, 21) to (55, 23) -built-ins/DataView/prototype/getBigInt64/toindex-byteoffset-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (48, 4) to (48, 22) -built-ins/DataView/prototype/getBigInt64/toindex-byteoffset-toprimitive.js (default) - Unexpected token ILLEGAL at (48, 4) to (48, 22) -built-ins/DataView/prototype/getBigInt64/toindex-byteoffset-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (36, 48) to (36, 67) -built-ins/DataView/prototype/getBigInt64/toindex-byteoffset-wrapped-values.js (default) - Unexpected token ILLEGAL at (36, 48) to (36, 67) -built-ins/DataView/prototype/getBigInt64/toindex-byteoffset.js (strict mode) - Unexpected token ILLEGAL at (36, 40) to (36, 59) -built-ins/DataView/prototype/getBigInt64/toindex-byteoffset.js (default) - Unexpected token ILLEGAL at (36, 40) to (36, 59) -built-ins/DataView/prototype/getBigUint64/return-value-clean-arraybuffer.js (strict mode) - Unexpected token ILLEGAL at (14, 47) to (14, 49) -built-ins/DataView/prototype/getBigUint64/return-value-clean-arraybuffer.js (default) - Unexpected token ILLEGAL at (14, 47) to (14, 49) -built-ins/DataView/prototype/getBigUint64/return-values-custom-offset.js (strict mode) - Unexpected token ILLEGAL at (33, 48) to (33, 67) -built-ins/DataView/prototype/getBigUint64/return-values-custom-offset.js (default) - Unexpected token ILLEGAL at (33, 48) to (33, 67) -built-ins/DataView/prototype/getBigUint64/return-values.js (strict mode) - Unexpected token ILLEGAL at (31, 48) to (31, 67) -built-ins/DataView/prototype/getBigUint64/return-values.js (default) - Unexpected token ILLEGAL at (31, 48) to (31, 67) -built-ins/DataView/prototype/getBigUint64/to-boolean-littleendian.js (strict mode) - Unexpected token ILLEGAL at (12, 41) to (12, 46) -built-ins/DataView/prototype/getBigUint64/to-boolean-littleendian.js (default) - Unexpected token ILLEGAL at (12, 41) to (12, 46) -built-ins/DataView/prototype/getBigUint64/toindex-byteoffset-errors.js (strict mode) - Unexpected token ILLEGAL at (43, 22) to (43, 24) -built-ins/DataView/prototype/getBigUint64/toindex-byteoffset-errors.js (default) - Unexpected token ILLEGAL at (43, 22) to (43, 24) -built-ins/DataView/prototype/getBigUint64/toindex-byteoffset-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (36, 4) to (36, 22) -built-ins/DataView/prototype/getBigUint64/toindex-byteoffset-toprimitive.js (default) - Unexpected token ILLEGAL at (36, 4) to (36, 22) -built-ins/DataView/prototype/getBigUint64/toindex-byteoffset-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (24, 49) to (24, 68) -built-ins/DataView/prototype/getBigUint64/toindex-byteoffset-wrapped-values.js (default) - Unexpected token ILLEGAL at (24, 49) to (24, 68) -built-ins/DataView/prototype/getBigUint64/toindex-byteoffset.js (strict mode) - Unexpected token ILLEGAL at (24, 41) to (24, 60) -built-ins/DataView/prototype/getBigUint64/toindex-byteoffset.js (default) - Unexpected token ILLEGAL at (24, 41) to (24, 60) -built-ins/DataView/prototype/setBigInt64/detached-buffer.js (strict mode) - Unexpected token ILLEGAL at (17, 24) to (17, 26) -built-ins/DataView/prototype/setBigInt64/detached-buffer.js (default) - Unexpected token ILLEGAL at (17, 24) to (17, 26) -built-ins/DataView/prototype/setBigInt64/index-is-out-of-range.js (strict mode) - Unexpected token ILLEGAL at (17, 31) to (17, 34) -built-ins/DataView/prototype/setBigInt64/index-is-out-of-range.js (default) - Unexpected token ILLEGAL at (17, 31) to (17, 34) -built-ins/DataView/prototype/setBigInt64/negative-byteoffset-throws.js (strict mode) - Unexpected token ILLEGAL at (15, 25) to (15, 28) -built-ins/DataView/prototype/setBigInt64/negative-byteoffset-throws.js (default) - Unexpected token ILLEGAL at (15, 25) to (15, 28) -built-ins/DataView/prototype/setBigInt64/return-abrupt-from-tonumber-byteoffset-symbol.js (strict mode) - Unexpected token ILLEGAL at (17, 24) to (17, 26) -built-ins/DataView/prototype/setBigInt64/return-abrupt-from-tonumber-byteoffset-symbol.js (default) - Unexpected token ILLEGAL at (17, 24) to (17, 26) -built-ins/DataView/prototype/setBigInt64/return-abrupt-from-tonumber-byteoffset.js (strict mode) - Unexpected token ILLEGAL at (26, 26) to (26, 28) -built-ins/DataView/prototype/setBigInt64/return-abrupt-from-tonumber-byteoffset.js (default) - Unexpected token ILLEGAL at (26, 26) to (26, 28) -built-ins/DataView/prototype/setBigInt64/set-values-little-endian-order.js (strict mode) - Unexpected token ILLEGAL at (16, 32) to (16, 43) -built-ins/DataView/prototype/setBigInt64/set-values-little-endian-order.js (default) - Unexpected token ILLEGAL at (16, 32) to (16, 43) -built-ins/DataView/prototype/setBigInt64/to-boolean-littleendian.js (strict mode) - Unexpected token ILLEGAL at (15, 22) to (15, 24) -built-ins/DataView/prototype/setBigInt64/to-boolean-littleendian.js (default) - Unexpected token ILLEGAL at (15, 22) to (15, 24) -built-ins/DataView/prototype/setBigInt64/toindex-byteoffset.js (strict mode) - Unexpected token ILLEGAL at (25, 22) to (25, 24) -built-ins/DataView/prototype/setBigInt64/toindex-byteoffset.js (default) - Unexpected token ILLEGAL at (25, 22) to (25, 24) -built-ins/JSON/stringify/bigint-order.js (strict mode) - Unexpected token ILLEGAL at (30, 22) to (30, 24) -built-ins/JSON/stringify/bigint-order.js (default) - Unexpected token ILLEGAL at (30, 22) to (30, 24) -built-ins/JSON/stringify/bigint-replacer.js (strict mode) - Unexpected token ILLEGAL at (23, 32) to (23, 34) -built-ins/JSON/stringify/bigint-replacer.js (default) - Unexpected token ILLEGAL at (23, 32) to (23, 34) -built-ins/JSON/stringify/bigint-tojson.js (strict mode) - Unexpected token ILLEGAL at (18, 32) to (18, 34) -built-ins/JSON/stringify/bigint-tojson.js (default) - Unexpected token ILLEGAL at (18, 32) to (18, 34) -built-ins/JSON/stringify/bigint.js (strict mode) - Unexpected token ILLEGAL at (10, 46) to (10, 48) -built-ins/JSON/stringify/bigint.js (default) - Unexpected token ILLEGAL at (10, 46) to (10, 48) -built-ins/Number/bigint-conversion.js (strict mode) - Unexpected token ILLEGAL at (10, 24) to (10, 26) -built-ins/Number/bigint-conversion.js (default) - Unexpected token ILLEGAL at (10, 24) to (10, 26) -built-ins/Object/bigint.js (strict mode) - Unexpected token ILLEGAL at (10, 14) to (10, 16) -built-ins/Object/bigint.js (default) - Unexpected token ILLEGAL at (10, 14) to (10, 16) -built-ins/Object/prototype/toString/Object.prototype.toString.call-bigint.js (strict mode) - Unexpected token ILLEGAL at (11, 48) to (11, 50) -built-ins/Object/prototype/toString/Object.prototype.toString.call-bigint.js (default) - Unexpected token ILLEGAL at (11, 48) to (11, 50) -built-ins/Object/setPrototypeOf/bigint.js (strict mode) - Unexpected token ILLEGAL at (11, 11) to (11, 13) -built-ins/Object/setPrototypeOf/bigint.js (default) - Unexpected token ILLEGAL at (11, 11) to (11, 13) -built-ins/String/prototype/indexOf/position-tointeger-bigint.js (strict mode) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -built-ins/String/prototype/indexOf/position-tointeger-bigint.js (default) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -built-ins/String/prototype/indexOf/searchstring-tostring-bigint.js (strict mode) - Unexpected token ILLEGAL at (13, 33) to (13, 35) -built-ins/String/prototype/indexOf/searchstring-tostring-bigint.js (default) - Unexpected token ILLEGAL at (13, 33) to (13, 35) -built-ins/TypedArray/prototype/copyWithin/BigInt/coerced-values-end.js (strict mode) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/coerced-values-end.js (default) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/coerced-values-start.js (strict mode) - Unexpected token ILLEGAL at (31, 14) to (31, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/coerced-values-start.js (default) - Unexpected token ILLEGAL at (31, 14) to (31, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/coerced-values-target.js (strict mode) - Unexpected token ILLEGAL at (31, 14) to (31, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/coerced-values-target.js (default) - Unexpected token ILLEGAL at (31, 14) to (31, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-end.js (strict mode) - Unexpected token ILLEGAL at (34, 14) to (34, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-end.js (default) - Unexpected token ILLEGAL at (34, 14) to (34, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-out-of-bounds-end.js (strict mode) - Unexpected token ILLEGAL at (34, 14) to (34, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-out-of-bounds-end.js (default) - Unexpected token ILLEGAL at (34, 14) to (34, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-out-of-bounds-start.js (strict mode) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-out-of-bounds-start.js (default) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-out-of-bounds-target.js (strict mode) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-out-of-bounds-target.js (default) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-start.js (strict mode) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-start.js (default) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-target.js (strict mode) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/negative-target.js (default) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/non-negative-out-of-bounds-end.js (strict mode) - Unexpected token ILLEGAL at (25, 14) to (25, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/non-negative-out-of-bounds-end.js (default) - Unexpected token ILLEGAL at (25, 14) to (25, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/non-negative-out-of-bounds-target-and-start.js (strict mode) - Unexpected token ILLEGAL at (25, 14) to (25, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/non-negative-out-of-bounds-target-and-start.js (default) - Unexpected token ILLEGAL at (25, 14) to (25, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/non-negative-target-and-start.js (strict mode) - Unexpected token ILLEGAL at (25, 14) to (25, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/non-negative-target-and-start.js (default) - Unexpected token ILLEGAL at (25, 14) to (25, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/non-negative-target-start-and-end.js (strict mode) - Unexpected token ILLEGAL at (25, 14) to (25, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/non-negative-target-start-and-end.js (default) - Unexpected token ILLEGAL at (25, 14) to (25, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/return-this.js (strict mode) - Unexpected token ILLEGAL at (32, 24) to (32, 26) -built-ins/TypedArray/prototype/copyWithin/BigInt/return-this.js (default) - Unexpected token ILLEGAL at (32, 24) to (32, 26) -built-ins/TypedArray/prototype/copyWithin/BigInt/undefined-end.js (strict mode) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/copyWithin/BigInt/undefined-end.js (default) - Unexpected token ILLEGAL at (32, 14) to (32, 16) -built-ins/TypedArray/prototype/entries/BigInt/iter-prototype.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 25) -built-ins/TypedArray/prototype/entries/BigInt/iter-prototype.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 25) -built-ins/TypedArray/prototype/entries/BigInt/return-itor.js (strict mode) - Unexpected token ILLEGAL at (16, 27) to (16, 29) -built-ins/TypedArray/prototype/entries/BigInt/return-itor.js (default) - Unexpected token ILLEGAL at (16, 27) to (16, 29) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-arguments-with-thisarg.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-arguments-with-thisarg.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-arguments-without-thisarg.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-arguments-without-thisarg.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-no-interaction-over-non-integer.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 25) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-no-interaction-over-non-integer.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 25) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-return-does-not-change-instance.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-return-does-not-change-instance.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-set-value-during-interaction.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/every/BigInt/callbackfn-set-value-during-interaction.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/every/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/every/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/every/BigInt/values-are-not-cached.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/every/BigInt/values-are-not-cached.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/fill/BigInt/coerced-indexes.js (strict mode) - Unexpected token ILLEGAL at (36, 25) to (36, 27) -built-ins/TypedArray/prototype/fill/BigInt/coerced-indexes.js (default) - Unexpected token ILLEGAL at (36, 25) to (36, 27) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-conversion-once.js (strict mode) - Unexpected token ILLEGAL at (21, 10) to (21, 12) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-conversion-once.js (default) - Unexpected token ILLEGAL at (21, 10) to (21, 12) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-custom-start-and-end.js (strict mode) - Unexpected token ILLEGAL at (37, 30) to (37, 32) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-custom-start-and-end.js (default) - Unexpected token ILLEGAL at (37, 30) to (37, 32) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-non-numeric-throw.js (strict mode) - Unexpected token ILLEGAL at (43, 19) to (43, 22) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-non-numeric-throw.js (default) - Unexpected token ILLEGAL at (43, 19) to (43, 22) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-non-numeric.js (strict mode) - Unexpected token ILLEGAL at (43, 19) to (43, 22) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-non-numeric.js (default) - Unexpected token ILLEGAL at (43, 19) to (43, 22) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-relative-end.js (strict mode) - Unexpected token ILLEGAL at (35, 25) to (35, 27) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-relative-end.js (default) - Unexpected token ILLEGAL at (35, 25) to (35, 27) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-relative-start.js (strict mode) - Unexpected token ILLEGAL at (33, 25) to (33, 27) -built-ins/TypedArray/prototype/fill/BigInt/fill-values-relative-start.js (default) - Unexpected token ILLEGAL at (33, 25) to (33, 27) -built-ins/TypedArray/prototype/fill/BigInt/fill-values.js (strict mode) - Unexpected token ILLEGAL at (34, 20) to (34, 22) -built-ins/TypedArray/prototype/fill/BigInt/fill-values.js (default) - Unexpected token ILLEGAL at (34, 20) to (34, 22) -built-ins/TypedArray/prototype/fill/BigInt/get-length-ignores-length-prop.js (strict mode) - Unexpected token ILLEGAL at (50, 31) to (50, 33) -built-ins/TypedArray/prototype/fill/BigInt/get-length-ignores-length-prop.js (default) - Unexpected token ILLEGAL at (50, 31) to (50, 33) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-end-as-symbol.js (strict mode) - Unexpected token ILLEGAL at (36, 16) to (36, 18) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-end-as-symbol.js (default) - Unexpected token ILLEGAL at (36, 16) to (36, 18) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-end.js (strict mode) - Unexpected token ILLEGAL at (40, 16) to (40, 18) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-end.js (default) - Unexpected token ILLEGAL at (40, 16) to (40, 18) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-set-value.js (strict mode) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-set-value.js (default) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-start-as-symbol.js (strict mode) - Unexpected token ILLEGAL at (35, 16) to (35, 18) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-start-as-symbol.js (default) - Unexpected token ILLEGAL at (35, 16) to (35, 18) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-start.js (strict mode) - Unexpected token ILLEGAL at (39, 16) to (39, 18) -built-ins/TypedArray/prototype/fill/BigInt/return-abrupt-from-start.js (default) - Unexpected token ILLEGAL at (39, 16) to (39, 18) -built-ins/TypedArray/prototype/fill/BigInt/return-this.js (strict mode) - Unexpected token ILLEGAL at (13, 29) to (13, 31) -built-ins/TypedArray/prototype/fill/BigInt/return-this.js (default) - Unexpected token ILLEGAL at (13, 29) to (13, 31) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-arguments-with-thisarg.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-arguments-with-thisarg.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-arguments-without-thisarg.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-arguments-without-thisarg.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-no-iteration-over-non-integer.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 25) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-no-iteration-over-non-integer.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 25) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-return-does-not-change-instance.js (strict mode) - Unexpected token ILLEGAL at (14, 15) to (14, 17) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-return-does-not-change-instance.js (default) - Unexpected token ILLEGAL at (14, 15) to (14, 17) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-set-value-during-iteration.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/callbackfn-set-value-during-iteration.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/result-does-not-share-buffer.js (strict mode) - Unexpected token ILLEGAL at (19, 23) to (19, 26) -built-ins/TypedArray/prototype/filter/BigInt/result-does-not-share-buffer.js (default) - Unexpected token ILLEGAL at (19, 23) to (19, 26) -built-ins/TypedArray/prototype/filter/BigInt/result-full-callbackfn-returns-true.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/result-full-callbackfn-returns-true.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-ctor-abrupt.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-ctor-abrupt.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-ctor-inherited.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-ctor-inherited.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-ctor-returns-throws.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-ctor-returns-throws.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-ctor.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-ctor.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-species-custom-ctor-invocation.js (strict mode) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-species-custom-ctor-invocation.js (default) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-species-custom-ctor-returns-another-instance.js (strict mode) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-species-custom-ctor-returns-another-instance.js (default) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-species-custom-ctor.js (strict mode) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/filter/BigInt/speciesctor-get-species-custom-ctor.js (default) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/filter/BigInt/values-are-not-cached.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/values-are-not-cached.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/values-are-set.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/filter/BigInt/values-are-set.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/find/BigInt/get-length-ignores-length-prop.js (strict mode) - Unexpected token ILLEGAL at (41, 23) to (41, 26) -built-ins/TypedArray/prototype/find/BigInt/get-length-ignores-length-prop.js (default) - Unexpected token ILLEGAL at (41, 23) to (41, 26) -built-ins/TypedArray/prototype/find/BigInt/predicate-call-changes-value.js (strict mode) - Unexpected token ILLEGAL at (33, 13) to (33, 15) -built-ins/TypedArray/prototype/find/BigInt/predicate-call-changes-value.js (default) - Unexpected token ILLEGAL at (33, 13) to (33, 15) -built-ins/TypedArray/prototype/find/BigInt/predicate-call-parameters.js (strict mode) - Unexpected token ILLEGAL at (33, 23) to (33, 26) -built-ins/TypedArray/prototype/find/BigInt/predicate-call-parameters.js (default) - Unexpected token ILLEGAL at (33, 23) to (33, 26) -built-ins/TypedArray/prototype/find/BigInt/return-found-value-predicate-result-is-true.js (strict mode) - Unexpected token ILLEGAL at (32, 23) to (32, 26) -built-ins/TypedArray/prototype/find/BigInt/return-found-value-predicate-result-is-true.js (default) - Unexpected token ILLEGAL at (32, 23) to (32, 26) -built-ins/TypedArray/prototype/findIndex/BigInt/get-length-ignores-length-prop.js (strict mode) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/findIndex/BigInt/get-length-ignores-length-prop.js (default) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/findIndex/BigInt/predicate-call-changes-value.js (strict mode) - Unexpected token ILLEGAL at (29, 13) to (29, 16) -built-ins/TypedArray/prototype/findIndex/BigInt/predicate-call-changes-value.js (default) - Unexpected token ILLEGAL at (29, 13) to (29, 16) -built-ins/TypedArray/prototype/findIndex/BigInt/predicate-call-parameters.js (strict mode) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/findIndex/BigInt/predicate-call-parameters.js (default) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/findIndex/BigInt/return-index-predicate-result-is-true.js (strict mode) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/findIndex/BigInt/return-index-predicate-result-is-true.js (default) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/findIndex/BigInt/return-negative-one-if-predicate-returns-false-value.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 25) -built-ins/TypedArray/prototype/findIndex/BigInt/return-negative-one-if-predicate-returns-false-value.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 25) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-arguments-with-thisarg.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-arguments-with-thisarg.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-arguments-without-thisarg.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-arguments-without-thisarg.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-no-interaction-over-non-integer.js (strict mode) - Unexpected token ILLEGAL at (23, 23) to (23, 25) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-no-interaction-over-non-integer.js (default) - Unexpected token ILLEGAL at (23, 23) to (23, 25) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-return-does-not-change-instance.js (strict mode) - Unexpected token ILLEGAL at (21, 15) to (21, 17) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-return-does-not-change-instance.js (default) - Unexpected token ILLEGAL at (21, 15) to (21, 17) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-set-value-during-interaction.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/forEach/BigInt/callbackfn-set-value-during-interaction.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/forEach/BigInt/values-are-not-cached.js (strict mode) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/forEach/BigInt/values-are-not-cached.js (default) - Unexpected token ILLEGAL at (20, 23) to (20, 26) -built-ins/TypedArray/prototype/includes/BigInt/detached-buffer.js (strict mode) - Unexpected token ILLEGAL at (26, 20) to (26, 22) -built-ins/TypedArray/prototype/includes/BigInt/detached-buffer.js (default) - Unexpected token ILLEGAL at (26, 20) to (26, 22) -built-ins/TypedArray/prototype/includes/BigInt/fromIndex-equal-or-greater-length-returns-false.js (strict mode) - Unexpected token ILLEGAL at (34, 35) to (34, 37) -built-ins/TypedArray/prototype/includes/BigInt/fromIndex-equal-or-greater-length-returns-false.js (default) - Unexpected token ILLEGAL at (34, 35) to (34, 37) -built-ins/TypedArray/prototype/includes/BigInt/fromIndex-infinity.js (strict mode) - Unexpected token ILLEGAL at (33, 23) to (33, 26) -built-ins/TypedArray/prototype/includes/BigInt/fromIndex-infinity.js (default) - Unexpected token ILLEGAL at (33, 23) to (33, 26) -built-ins/TypedArray/prototype/includes/BigInt/fromIndex-minus-zero.js (strict mode) - Unexpected token ILLEGAL at (30, 19) to (30, 22) -built-ins/TypedArray/prototype/includes/BigInt/fromIndex-minus-zero.js (default) - Unexpected token ILLEGAL at (30, 19) to (30, 22) -built-ins/TypedArray/prototype/includes/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (27, 23) to (27, 25) -built-ins/TypedArray/prototype/includes/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (27, 23) to (27, 25) -built-ins/TypedArray/prototype/includes/BigInt/length-zero-returns-false.js (strict mode) - Unexpected token ILLEGAL at (36, 20) to (36, 22) -built-ins/TypedArray/prototype/includes/BigInt/length-zero-returns-false.js (default) - Unexpected token ILLEGAL at (36, 20) to (36, 22) -built-ins/TypedArray/prototype/includes/BigInt/return-abrupt-tointeger-fromindex-symbol.js (strict mode) - Unexpected token ILLEGAL at (28, 23) to (28, 25) -built-ins/TypedArray/prototype/includes/BigInt/return-abrupt-tointeger-fromindex-symbol.js (default) - Unexpected token ILLEGAL at (28, 23) to (28, 25) -built-ins/TypedArray/prototype/includes/BigInt/return-abrupt-tointeger-fromindex.js (strict mode) - Unexpected token ILLEGAL at (32, 23) to (32, 25) -built-ins/TypedArray/prototype/includes/BigInt/return-abrupt-tointeger-fromindex.js (default) - Unexpected token ILLEGAL at (32, 23) to (32, 25) -built-ins/TypedArray/prototype/includes/BigInt/search-found-returns-true.js (strict mode) - Unexpected token ILLEGAL at (33, 23) to (33, 26) -built-ins/TypedArray/prototype/includes/BigInt/search-found-returns-true.js (default) - Unexpected token ILLEGAL at (33, 23) to (33, 26) -built-ins/TypedArray/prototype/includes/BigInt/search-not-found-returns-false.js (strict mode) - Unexpected token ILLEGAL at (35, 19) to (35, 22) -built-ins/TypedArray/prototype/includes/BigInt/search-not-found-returns-false.js (default) - Unexpected token ILLEGAL at (35, 19) to (35, 22) -built-ins/TypedArray/prototype/includes/BigInt/tointeger-fromindex.js (strict mode) - Unexpected token ILLEGAL at (41, 19) to (41, 22) -built-ins/TypedArray/prototype/includes/BigInt/tointeger-fromindex.js (default) - Unexpected token ILLEGAL at (41, 19) to (41, 22) -built-ins/TypedArray/prototype/indexOf/BigInt/detached-buffer.js (strict mode) - Unexpected token ILLEGAL at (26, 19) to (26, 21) -built-ins/TypedArray/prototype/indexOf/BigInt/detached-buffer.js (default) - Unexpected token ILLEGAL at (26, 19) to (26, 21) -built-ins/TypedArray/prototype/indexOf/BigInt/fromIndex-equal-or-greater-length-returns-minus-one.js (strict mode) - Unexpected token ILLEGAL at (28, 34) to (28, 36) -built-ins/TypedArray/prototype/indexOf/BigInt/fromIndex-equal-or-greater-length-returns-minus-one.js (default) - Unexpected token ILLEGAL at (28, 34) to (28, 36) -built-ins/TypedArray/prototype/indexOf/BigInt/fromIndex-infinity.js (strict mode) - Unexpected token ILLEGAL at (35, 23) to (35, 26) -built-ins/TypedArray/prototype/indexOf/BigInt/fromIndex-infinity.js (default) - Unexpected token ILLEGAL at (35, 23) to (35, 26) -built-ins/TypedArray/prototype/indexOf/BigInt/fromIndex-minus-zero.js (strict mode) - Unexpected token ILLEGAL at (27, 19) to (27, 22) -built-ins/TypedArray/prototype/indexOf/BigInt/fromIndex-minus-zero.js (default) - Unexpected token ILLEGAL at (27, 19) to (27, 22) -built-ins/TypedArray/prototype/indexOf/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (26, 23) to (26, 25) -built-ins/TypedArray/prototype/indexOf/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (26, 23) to (26, 25) -built-ins/TypedArray/prototype/indexOf/BigInt/length-zero-returns-minus-one.js (strict mode) - Unexpected token ILLEGAL at (32, 34) to (32, 36) -built-ins/TypedArray/prototype/indexOf/BigInt/length-zero-returns-minus-one.js (default) - Unexpected token ILLEGAL at (32, 34) to (32, 36) -built-ins/TypedArray/prototype/indexOf/BigInt/return-abrupt-tointeger-fromindex-symbol.js (strict mode) - Unexpected token ILLEGAL at (30, 19) to (30, 21) -built-ins/TypedArray/prototype/indexOf/BigInt/return-abrupt-tointeger-fromindex-symbol.js (default) - Unexpected token ILLEGAL at (30, 19) to (30, 21) -built-ins/TypedArray/prototype/indexOf/BigInt/return-abrupt-tointeger-fromindex.js (strict mode) - Unexpected token ILLEGAL at (34, 19) to (34, 21) -built-ins/TypedArray/prototype/indexOf/BigInt/return-abrupt-tointeger-fromindex.js (default) - Unexpected token ILLEGAL at (34, 19) to (34, 21) -built-ins/TypedArray/prototype/indexOf/BigInt/search-found-returns-index.js (strict mode) - Unexpected token ILLEGAL at (35, 23) to (35, 26) -built-ins/TypedArray/prototype/indexOf/BigInt/search-found-returns-index.js (default) - Unexpected token ILLEGAL at (35, 23) to (35, 26) -built-ins/TypedArray/prototype/indexOf/BigInt/search-not-found-returns-minus-one.js (strict mode) - Unexpected token ILLEGAL at (31, 19) to (31, 22) -built-ins/TypedArray/prototype/indexOf/BigInt/search-not-found-returns-minus-one.js (default) - Unexpected token ILLEGAL at (31, 19) to (31, 22) -built-ins/TypedArray/prototype/indexOf/BigInt/tointeger-fromindex.js (strict mode) - Unexpected token ILLEGAL at (33, 19) to (33, 22) -built-ins/TypedArray/prototype/indexOf/BigInt/tointeger-fromindex.js (default) - Unexpected token ILLEGAL at (33, 19) to (33, 22) -built-ins/TypedArray/prototype/join/BigInt/custom-separator-result-from-tostring-on-each-simple-value.js (strict mode) - Unexpected token ILLEGAL at (33, 23) to (33, 25) -built-ins/TypedArray/prototype/join/BigInt/custom-separator-result-from-tostring-on-each-simple-value.js (default) - Unexpected token ILLEGAL at (33, 23) to (33, 25) -built-ins/TypedArray/prototype/join/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (36, 23) to (36, 26) -built-ins/TypedArray/prototype/join/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (36, 23) to (36, 26) -built-ins/TypedArray/prototype/join/BigInt/result-from-tostring-on-each-simple-value.js (strict mode) - Unexpected token ILLEGAL at (32, 23) to (32, 25) -built-ins/TypedArray/prototype/join/BigInt/result-from-tostring-on-each-simple-value.js (default) - Unexpected token ILLEGAL at (32, 23) to (32, 25) -built-ins/TypedArray/prototype/keys/BigInt/iter-prototype.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 25) -built-ins/TypedArray/prototype/keys/BigInt/iter-prototype.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 25) -built-ins/TypedArray/prototype/keys/BigInt/return-itor.js (strict mode) - Unexpected token ILLEGAL at (16, 14) to (16, 16) -built-ins/TypedArray/prototype/keys/BigInt/return-itor.js (default) - Unexpected token ILLEGAL at (16, 14) to (16, 16) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/detached-buffer.js (strict mode) - Unexpected token ILLEGAL at (26, 23) to (26, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/detached-buffer.js (default) - Unexpected token ILLEGAL at (26, 23) to (26, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/fromIndex-infinity.js (strict mode) - Unexpected token ILLEGAL at (28, 23) to (28, 26) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/fromIndex-infinity.js (default) - Unexpected token ILLEGAL at (28, 23) to (28, 26) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/fromIndex-minus-zero.js (strict mode) - Unexpected token ILLEGAL at (27, 19) to (27, 22) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/fromIndex-minus-zero.js (default) - Unexpected token ILLEGAL at (27, 19) to (27, 22) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (26, 23) to (26, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (26, 23) to (26, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/length-zero-returns-minus-one.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/length-zero-returns-minus-one.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/return-abrupt-tointeger-fromindex-symbol.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/return-abrupt-tointeger-fromindex-symbol.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/return-abrupt-tointeger-fromindex.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/return-abrupt-tointeger-fromindex.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 25) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/search-found-returns-index.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/search-found-returns-index.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/search-not-found-returns-minus-one.js (strict mode) - Unexpected token ILLEGAL at (31, 19) to (31, 22) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/search-not-found-returns-minus-one.js (default) - Unexpected token ILLEGAL at (31, 19) to (31, 22) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/tointeger-fromindex.js (strict mode) - Unexpected token ILLEGAL at (33, 19) to (33, 22) -built-ins/TypedArray/prototype/lastIndexOf/BigInt/tointeger-fromindex.js (default) - Unexpected token ILLEGAL at (33, 19) to (33, 22) -built-ins/TypedArray/prototype/map/BigInt/arraylength-internal.js (strict mode) - Unexpected token ILLEGAL at (25, 11) to (25, 13) -built-ins/TypedArray/prototype/map/BigInt/arraylength-internal.js (default) - Unexpected token ILLEGAL at (25, 11) to (25, 13) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-arguments-with-thisarg.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-arguments-with-thisarg.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-arguments-without-thisarg.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-arguments-without-thisarg.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-detachbuffer.js (strict mode) - Unexpected token ILLEGAL at (31, 13) to (31, 15) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-detachbuffer.js (default) - Unexpected token ILLEGAL at (31, 13) to (31, 15) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-no-interaction-over-non-integer-properties.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 25) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-no-interaction-over-non-integer-properties.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 25) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-return-affects-returned-object.js (strict mode) - Unexpected token ILLEGAL at (23, 23) to (23, 25) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-return-affects-returned-object.js (default) - Unexpected token ILLEGAL at (23, 23) to (23, 25) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-return-does-not-change-instance.js (strict mode) - Unexpected token ILLEGAL at (16, 15) to (16, 17) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-return-does-not-change-instance.js (default) - Unexpected token ILLEGAL at (16, 15) to (16, 17) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-return-does-not-copy-non-integer-properties.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 25) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-return-does-not-copy-non-integer-properties.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 25) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-set-value-during-interaction.js (strict mode) - Unexpected token ILLEGAL at (15, 23) to (15, 26) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-set-value-during-interaction.js (default) - Unexpected token ILLEGAL at (15, 23) to (15, 26) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-this.js (strict mode) - Unexpected token ILLEGAL at (31, 11) to (31, 13) -built-ins/TypedArray/prototype/map/BigInt/callbackfn-this.js (default) - Unexpected token ILLEGAL at (31, 11) to (31, 13) -built-ins/TypedArray/prototype/map/BigInt/values-are-not-cached.js (strict mode) - Unexpected token ILLEGAL at (15, 23) to (15, 26) -built-ins/TypedArray/prototype/map/BigInt/values-are-not-cached.js (default) - Unexpected token ILLEGAL at (15, 23) to (15, 26) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-arguments-custom-accumulator.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-arguments-custom-accumulator.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-arguments-default-accumulator.js (strict mode) - Unexpected token ILLEGAL at (37, 23) to (37, 26) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-arguments-default-accumulator.js (default) - Unexpected token ILLEGAL at (37, 23) to (37, 26) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-no-iteration-over-non-integer-properties.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 25) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-no-iteration-over-non-integer-properties.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 25) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-return-does-not-change-instance.js (strict mode) - Unexpected token ILLEGAL at (12, 23) to (12, 25) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-return-does-not-change-instance.js (default) - Unexpected token ILLEGAL at (12, 23) to (12, 25) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-set-value-during-iteration.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArray/prototype/reduce/BigInt/callbackfn-set-value-during-iteration.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArray/prototype/reduce/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/reduce/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/reduce/BigInt/result-is-last-callbackfn-return.js (strict mode) - Unexpected token ILLEGAL at (41, 19) to (41, 21) -built-ins/TypedArray/prototype/reduce/BigInt/result-is-last-callbackfn-return.js (default) - Unexpected token ILLEGAL at (41, 19) to (41, 21) -built-ins/TypedArray/prototype/reduce/BigInt/result-of-any-type.js (strict mode) - Unexpected token ILLEGAL at (38, 23) to (38, 26) -built-ins/TypedArray/prototype/reduce/BigInt/result-of-any-type.js (default) - Unexpected token ILLEGAL at (38, 23) to (38, 26) -built-ins/TypedArray/prototype/reduce/BigInt/return-first-value-without-callbackfn.js (strict mode) - Unexpected token ILLEGAL at (36, 23) to (36, 26) -built-ins/TypedArray/prototype/reduce/BigInt/return-first-value-without-callbackfn.js (default) - Unexpected token ILLEGAL at (36, 23) to (36, 26) -built-ins/TypedArray/prototype/reduce/BigInt/values-are-not-cached.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/reduce/BigInt/values-are-not-cached.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-arguments-custom-accumulator.js (strict mode) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-arguments-custom-accumulator.js (default) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-arguments-default-accumulator.js (strict mode) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-arguments-default-accumulator.js (default) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-no-iteration-over-non-integer-properties.js (strict mode) - Unexpected token ILLEGAL at (31, 23) to (31, 25) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-no-iteration-over-non-integer-properties.js (default) - Unexpected token ILLEGAL at (31, 23) to (31, 25) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-return-does-not-change-instance.js (strict mode) - Unexpected token ILLEGAL at (12, 23) to (12, 25) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-return-does-not-change-instance.js (default) - Unexpected token ILLEGAL at (12, 23) to (12, 25) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-set-value-during-iteration.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/callbackfn-set-value-during-iteration.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/result-is-last-callbackfn-return.js (strict mode) - Unexpected token ILLEGAL at (43, 19) to (43, 21) -built-ins/TypedArray/prototype/reduceRight/BigInt/result-is-last-callbackfn-return.js (default) - Unexpected token ILLEGAL at (43, 19) to (43, 21) -built-ins/TypedArray/prototype/reduceRight/BigInt/result-of-any-type.js (strict mode) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/result-of-any-type.js (default) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/return-first-value-without-callbackfn.js (strict mode) - Unexpected token ILLEGAL at (37, 23) to (37, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/return-first-value-without-callbackfn.js (default) - Unexpected token ILLEGAL at (37, 23) to (37, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/values-are-not-cached.js (strict mode) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/reduceRight/BigInt/values-are-not-cached.js (default) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/reverse/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/reverse/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/reverse/BigInt/reverts.js (strict mode) - Unexpected token ILLEGAL at (28, 14) to (28, 17) -built-ins/TypedArray/prototype/reverse/BigInt/reverts.js (default) - Unexpected token ILLEGAL at (28, 14) to (28, 17) -built-ins/TypedArray/prototype/set/BigInt/array-arg-negative-integer-offset-throws.js (strict mode) - Unexpected token ILLEGAL at (25, 16) to (25, 18) -built-ins/TypedArray/prototype/set/BigInt/array-arg-negative-integer-offset-throws.js (default) - Unexpected token ILLEGAL at (25, 16) to (25, 18) -built-ins/TypedArray/prototype/set/BigInt/array-arg-offset-tointeger.js (strict mode) - Unexpected token ILLEGAL at (24, 19) to (24, 21) -built-ins/TypedArray/prototype/set/BigInt/array-arg-offset-tointeger.js (default) - Unexpected token ILLEGAL at (24, 19) to (24, 21) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-get-length.js (strict mode) - Unexpected token ILLEGAL at (28, 23) to (28, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-get-length.js (default) - Unexpected token ILLEGAL at (28, 23) to (28, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-get-value.js (strict mode) - Unexpected token ILLEGAL at (28, 11) to (28, 14) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-get-value.js (default) - Unexpected token ILLEGAL at (28, 11) to (28, 14) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-length-symbol.js (strict mode) - Unexpected token ILLEGAL at (25, 23) to (25, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-length-symbol.js (default) - Unexpected token ILLEGAL at (25, 23) to (25, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-length.js (strict mode) - Unexpected token ILLEGAL at (37, 23) to (37, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-length.js (default) - Unexpected token ILLEGAL at (37, 23) to (37, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-tonumber-value-symbol.js (strict mode) - Unexpected token ILLEGAL at (28, 11) to (28, 14) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-tonumber-value-symbol.js (default) - Unexpected token ILLEGAL at (28, 11) to (28, 14) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-tonumber-value.js (strict mode) - Unexpected token ILLEGAL at (28, 11) to (28, 14) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-src-tonumber-value.js (default) - Unexpected token ILLEGAL at (28, 11) to (28, 14) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-tointeger-offset-symbol.js (strict mode) - Unexpected token ILLEGAL at (25, 16) to (25, 18) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-tointeger-offset-symbol.js (default) - Unexpected token ILLEGAL at (25, 16) to (25, 18) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-toobject-offset.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-return-abrupt-from-toobject-offset.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-set-values-in-order.js (strict mode) - Unexpected token ILLEGAL at (35, 13) to (35, 16) -built-ins/TypedArray/prototype/set/BigInt/array-arg-set-values-in-order.js (default) - Unexpected token ILLEGAL at (35, 13) to (35, 16) -built-ins/TypedArray/prototype/set/BigInt/array-arg-set-values.js (strict mode) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArray/prototype/set/BigInt/array-arg-set-values.js (default) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArray/prototype/set/BigInt/array-arg-src-tonumber-value-type-conversions.js (strict mode) - Unexpected token ILLEGAL at (28, 15) to (28, 18) -built-ins/TypedArray/prototype/set/BigInt/array-arg-src-tonumber-value-type-conversions.js (default) - Unexpected token ILLEGAL at (28, 15) to (28, 18) -built-ins/TypedArray/prototype/set/BigInt/array-arg-src-values-are-not-cached.js (strict mode) - Unexpected token ILLEGAL at (29, 9) to (29, 11) -built-ins/TypedArray/prototype/set/BigInt/array-arg-src-values-are-not-cached.js (default) - Unexpected token ILLEGAL at (29, 9) to (29, 11) -built-ins/TypedArray/prototype/set/BigInt/array-arg-target-arraylength-internal.js (strict mode) - Unexpected token ILLEGAL at (38, 14) to (38, 17) -built-ins/TypedArray/prototype/set/BigInt/array-arg-target-arraylength-internal.js (default) - Unexpected token ILLEGAL at (38, 14) to (38, 17) -built-ins/TypedArray/prototype/set/BigInt/array-arg-targetbuffer-detached-on-get-src-value-throws.js (strict mode) - Unexpected token ILLEGAL at (26, 23) to (26, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-targetbuffer-detached-on-get-src-value-throws.js (default) - Unexpected token ILLEGAL at (26, 23) to (26, 25) -built-ins/TypedArray/prototype/set/BigInt/array-arg-targetbuffer-detached-on-tointeger-offset-throws.js (strict mode) - Unexpected token ILLEGAL at (35, 16) to (35, 18) -built-ins/TypedArray/prototype/set/BigInt/array-arg-targetbuffer-detached-on-tointeger-offset-throws.js (default) - Unexpected token ILLEGAL at (35, 16) to (35, 18) -built-ins/TypedArray/prototype/set/BigInt/array-arg-targetbuffer-detached-throws.js (strict mode) - Unexpected token ILLEGAL at (37, 16) to (37, 18) -built-ins/TypedArray/prototype/set/BigInt/array-arg-targetbuffer-detached-throws.js (default) - Unexpected token ILLEGAL at (37, 16) to (37, 18) -built-ins/TypedArray/prototype/set/BigInt/bigint-tobigint64.js (strict mode) - Unexpected token ILLEGAL at (57, 2) to (57, 23) -built-ins/TypedArray/prototype/set/BigInt/bigint-tobigint64.js (default) - Unexpected token ILLEGAL at (57, 2) to (57, 23) -built-ins/TypedArray/prototype/set/BigInt/bigint-tobiguint64.js (strict mode) - Unexpected token ILLEGAL at (57, 2) to (57, 23) -built-ins/TypedArray/prototype/set/BigInt/bigint-tobiguint64.js (default) - Unexpected token ILLEGAL at (57, 2) to (57, 23) -built-ins/TypedArray/prototype/set/BigInt/boolean-tobigint.js (strict mode) - Unexpected token ILLEGAL at (44, 34) to (44, 36) -built-ins/TypedArray/prototype/set/BigInt/boolean-tobigint.js (default) - Unexpected token ILLEGAL at (44, 34) to (44, 36) -built-ins/TypedArray/prototype/set/BigInt/src-typedarray-big.js (strict mode) - Unexpected token ILLEGAL at (24, 16) to (24, 19) -built-ins/TypedArray/prototype/set/BigInt/src-typedarray-big.js (default) - Unexpected token ILLEGAL at (24, 16) to (24, 19) -built-ins/TypedArray/prototype/set/BigInt/string-tobigint.js (strict mode) - Unexpected token ILLEGAL at (47, 34) to (47, 36) -built-ins/TypedArray/prototype/set/BigInt/string-tobigint.js (default) - Unexpected token ILLEGAL at (47, 34) to (47, 36) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-offset-tointeger.js (strict mode) - Unexpected token ILLEGAL at (20, 20) to (20, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-offset-tointeger.js (default) - Unexpected token ILLEGAL at (20, 20) to (20, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-other-type-sab.js (strict mode) - Unexpected token ILLEGAL at (18, 11) to (18, 14) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-other-type-sab.js (default) - Unexpected token ILLEGAL at (18, 11) to (18, 14) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-other-type.js (strict mode) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-other-type.js (default) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-same-type-sab.js (strict mode) - Unexpected token ILLEGAL at (18, 11) to (18, 14) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-same-type-sab.js (default) - Unexpected token ILLEGAL at (18, 11) to (18, 14) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-same-type.js (strict mode) - Unexpected token ILLEGAL at (34, 20) to (34, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-same-type.js (default) - Unexpected token ILLEGAL at (34, 20) to (34, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-same-buffer-same-type-sab.js (strict mode) - Unexpected token ILLEGAL at (19, 14) to (19, 16) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-same-buffer-same-type-sab.js (default) - Unexpected token ILLEGAL at (19, 14) to (19, 16) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-same-buffer-same-type.js (strict mode) - Unexpected token ILLEGAL at (36, 19) to (36, 21) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-same-buffer-same-type.js (default) - Unexpected token ILLEGAL at (36, 19) to (36, 21) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-src-arraylength-internal.js (strict mode) - Unexpected token ILLEGAL at (33, 20) to (33, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-src-arraylength-internal.js (default) - Unexpected token ILLEGAL at (33, 20) to (33, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-src-byteoffset-internal.js (strict mode) - Unexpected token ILLEGAL at (30, 20) to (30, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-src-byteoffset-internal.js (default) - Unexpected token ILLEGAL at (30, 20) to (30, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-target-arraylength-internal.js (strict mode) - Unexpected token ILLEGAL at (34, 20) to (34, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-target-arraylength-internal.js (default) - Unexpected token ILLEGAL at (34, 20) to (34, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-target-byteoffset-internal.js (strict mode) - Unexpected token ILLEGAL at (31, 20) to (31, 23) -built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-target-byteoffset-internal.js (default) - Unexpected token ILLEGAL at (31, 20) to (31, 23) -built-ins/TypedArray/prototype/set/src-typedarray-big-throws.js (strict mode) - Unexpected token ILLEGAL at (27, 27) to (27, 29) -built-ins/TypedArray/prototype/set/src-typedarray-big-throws.js (default) - Unexpected token ILLEGAL at (27, 27) to (27, 29) -built-ins/TypedArray/prototype/slice/BigInt/arraylength-internal.js (strict mode) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArray/prototype/slice/BigInt/arraylength-internal.js (default) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArray/prototype/slice/BigInt/infinity.js (strict mode) - Unexpected token ILLEGAL at (11, 23) to (11, 26) -built-ins/TypedArray/prototype/slice/BigInt/infinity.js (default) - Unexpected token ILLEGAL at (11, 23) to (11, 26) -built-ins/TypedArray/prototype/slice/BigInt/minus-zero.js (strict mode) - Unexpected token ILLEGAL at (13, 23) to (13, 26) -built-ins/TypedArray/prototype/slice/BigInt/minus-zero.js (default) - Unexpected token ILLEGAL at (13, 23) to (13, 26) -built-ins/TypedArray/prototype/slice/BigInt/result-does-not-copy-ordinary-properties.js (strict mode) - Unexpected token ILLEGAL at (13, 23) to (13, 26) -built-ins/TypedArray/prototype/slice/BigInt/result-does-not-copy-ordinary-properties.js (default) - Unexpected token ILLEGAL at (13, 23) to (13, 26) -built-ins/TypedArray/prototype/slice/BigInt/results-with-different-length.js (strict mode) - Unexpected token ILLEGAL at (11, 23) to (11, 26) -built-ins/TypedArray/prototype/slice/BigInt/results-with-different-length.js (default) - Unexpected token ILLEGAL at (11, 23) to (11, 26) -built-ins/TypedArray/prototype/slice/BigInt/results-with-empty-length.js (strict mode) - Unexpected token ILLEGAL at (11, 23) to (11, 26) -built-ins/TypedArray/prototype/slice/BigInt/results-with-empty-length.js (default) - Unexpected token ILLEGAL at (11, 23) to (11, 26) -built-ins/TypedArray/prototype/slice/BigInt/results-with-same-length.js (strict mode) - Unexpected token ILLEGAL at (11, 23) to (11, 26) -built-ins/TypedArray/prototype/slice/BigInt/results-with-same-length.js (default) - Unexpected token ILLEGAL at (11, 23) to (11, 26) -built-ins/TypedArray/prototype/slice/BigInt/set-values-from-different-ctor-type.js (strict mode) - Unexpected token ILLEGAL at (33, 11) to (33, 14) -built-ins/TypedArray/prototype/slice/BigInt/set-values-from-different-ctor-type.js (default) - Unexpected token ILLEGAL at (33, 11) to (33, 14) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-ctor-abrupt.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-ctor-abrupt.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-ctor-inherited.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-ctor-inherited.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-ctor-returns-throws.js (strict mode) - Unexpected token ILLEGAL at (32, 23) to (32, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-ctor-returns-throws.js (default) - Unexpected token ILLEGAL at (32, 23) to (32, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-ctor.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-ctor.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-species-custom-ctor-invocation.js (strict mode) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-species-custom-ctor-invocation.js (default) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-species-custom-ctor-returns-another-instance.js (strict mode) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-species-custom-ctor-returns-another-instance.js (default) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-species-custom-ctor.js (strict mode) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/slice/BigInt/speciesctor-get-species-custom-ctor.js (default) - Unexpected token ILLEGAL at (40, 23) to (40, 26) -built-ins/TypedArray/prototype/slice/BigInt/tointeger-end.js (strict mode) - Unexpected token ILLEGAL at (24, 23) to (24, 26) -built-ins/TypedArray/prototype/slice/BigInt/tointeger-end.js (default) - Unexpected token ILLEGAL at (24, 23) to (24, 26) -built-ins/TypedArray/prototype/slice/BigInt/tointeger-start.js (strict mode) - Unexpected token ILLEGAL at (23, 23) to (23, 26) -built-ins/TypedArray/prototype/slice/BigInt/tointeger-start.js (default) - Unexpected token ILLEGAL at (23, 23) to (23, 26) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-arguments-with-thisarg.js (strict mode) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-arguments-with-thisarg.js (default) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-arguments-without-thisarg.js (strict mode) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-arguments-without-thisarg.js (default) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-no-interaction-over-non-integer.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 25) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-no-interaction-over-non-integer.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 25) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-return-does-not-change-instance.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-return-does-not-change-instance.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-set-value-during-interaction.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/some/BigInt/callbackfn-set-value-during-interaction.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/some/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/some/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/some/BigInt/values-are-not-cached.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/some/BigInt/values-are-not-cached.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/sort/BigInt/arraylength-internal.js (strict mode) - Unexpected token ILLEGAL at (26, 23) to (26, 26) -built-ins/TypedArray/prototype/sort/BigInt/arraylength-internal.js (default) - Unexpected token ILLEGAL at (26, 23) to (26, 26) -built-ins/TypedArray/prototype/sort/BigInt/comparefn-call-throws.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/sort/BigInt/comparefn-call-throws.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/sort/BigInt/comparefn-calls.js (strict mode) - Unexpected token ILLEGAL at (26, 23) to (26, 26) -built-ins/TypedArray/prototype/sort/BigInt/comparefn-calls.js (default) - Unexpected token ILLEGAL at (26, 23) to (26, 26) -built-ins/TypedArray/prototype/sort/BigInt/comparefn-nonfunction-call-throws.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArray/prototype/sort/BigInt/comparefn-nonfunction-call-throws.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArray/prototype/sort/BigInt/return-same-instance.js (strict mode) - Unexpected token ILLEGAL at (18, 23) to (18, 25) -built-ins/TypedArray/prototype/sort/BigInt/return-same-instance.js (default) - Unexpected token ILLEGAL at (18, 23) to (18, 25) -built-ins/TypedArray/prototype/sort/BigInt/sortcompare-with-no-tostring.js (strict mode) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArray/prototype/sort/BigInt/sortcompare-with-no-tostring.js (default) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArray/prototype/sort/BigInt/sorted-values.js (strict mode) - Unexpected token ILLEGAL at (20, 19) to (20, 21) -built-ins/TypedArray/prototype/sort/BigInt/sorted-values.js (default) - Unexpected token ILLEGAL at (20, 19) to (20, 21) -built-ins/TypedArray/prototype/subarray/BigInt/infinity.js (strict mode) - Unexpected token ILLEGAL at (13, 23) to (13, 26) -built-ins/TypedArray/prototype/subarray/BigInt/infinity.js (default) - Unexpected token ILLEGAL at (13, 23) to (13, 26) -built-ins/TypedArray/prototype/subarray/BigInt/minus-zero.js (strict mode) - Unexpected token ILLEGAL at (13, 23) to (13, 26) -built-ins/TypedArray/prototype/subarray/BigInt/minus-zero.js (default) - Unexpected token ILLEGAL at (13, 23) to (13, 26) -built-ins/TypedArray/prototype/subarray/BigInt/result-does-not-copy-ordinary-properties.js (strict mode) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/result-does-not-copy-ordinary-properties.js (default) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/result-is-new-instance-from-same-ctor.js (strict mode) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/result-is-new-instance-from-same-ctor.js (default) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/result-is-new-instance-with-shared-buffer.js (strict mode) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/result-is-new-instance-with-shared-buffer.js (default) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/results-with-different-length.js (strict mode) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/results-with-different-length.js (default) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/results-with-empty-length.js (strict mode) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/results-with-empty-length.js (default) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/results-with-same-length.js (strict mode) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/results-with-same-length.js (default) - Unexpected token ILLEGAL at (16, 23) to (16, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-ctor-abrupt.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-ctor-abrupt.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-ctor-inherited.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-ctor-inherited.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-ctor-returns-throws.js (strict mode) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-ctor-returns-throws.js (default) - Unexpected token ILLEGAL at (31, 23) to (31, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-ctor.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-ctor.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-species-custom-ctor-invocation.js (strict mode) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-species-custom-ctor-invocation.js (default) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-species-custom-ctor-returns-another-instance.js (strict mode) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-species-custom-ctor-returns-another-instance.js (default) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-species-custom-ctor.js (strict mode) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/subarray/BigInt/speciesctor-get-species-custom-ctor.js (default) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/subarray/BigInt/tointeger-begin.js (strict mode) - Unexpected token ILLEGAL at (23, 23) to (23, 26) -built-ins/TypedArray/prototype/subarray/BigInt/tointeger-begin.js (default) - Unexpected token ILLEGAL at (23, 23) to (23, 26) -built-ins/TypedArray/prototype/subarray/BigInt/tointeger-end.js (strict mode) - Unexpected token ILLEGAL at (24, 23) to (24, 26) -built-ins/TypedArray/prototype/subarray/BigInt/tointeger-end.js (default) - Unexpected token ILLEGAL at (24, 23) to (24, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/calls-tolocalestring-from-each-value.js (strict mode) - Unexpected token ILLEGAL at (45, 23) to (45, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/calls-tolocalestring-from-each-value.js (default) - Unexpected token ILLEGAL at (45, 23) to (45, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/calls-tostring-from-each-value.js (strict mode) - Unexpected token ILLEGAL at (50, 11) to (50, 14) -built-ins/TypedArray/prototype/toLocaleString/BigInt/calls-tostring-from-each-value.js (default) - Unexpected token ILLEGAL at (50, 11) to (50, 14) -built-ins/TypedArray/prototype/toLocaleString/BigInt/calls-valueof-from-each-value.js (strict mode) - Unexpected token ILLEGAL at (51, 23) to (51, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/calls-valueof-from-each-value.js (default) - Unexpected token ILLEGAL at (51, 23) to (51, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/get-length-uses-internal-arraylength.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/get-length-uses-internal-arraylength.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-firstelement-tolocalestring.js (strict mode) - Unexpected token ILLEGAL at (36, 23) to (36, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-firstelement-tolocalestring.js (default) - Unexpected token ILLEGAL at (36, 23) to (36, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-firstelement-tostring.js (strict mode) - Unexpected token ILLEGAL at (47, 23) to (47, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-firstelement-tostring.js (default) - Unexpected token ILLEGAL at (47, 23) to (47, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-firstelement-valueof.js (strict mode) - Unexpected token ILLEGAL at (48, 23) to (48, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-firstelement-valueof.js (default) - Unexpected token ILLEGAL at (48, 23) to (48, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-nextelement-tolocalestring.js (strict mode) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-nextelement-tolocalestring.js (default) - Unexpected token ILLEGAL at (39, 23) to (39, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-nextelement-tostring.js (strict mode) - Unexpected token ILLEGAL at (49, 23) to (49, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-nextelement-tostring.js (default) - Unexpected token ILLEGAL at (49, 23) to (49, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-nextelement-valueof.js (strict mode) - Unexpected token ILLEGAL at (50, 23) to (50, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-abrupt-from-nextelement-valueof.js (default) - Unexpected token ILLEGAL at (50, 23) to (50, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-result.js (strict mode) - Unexpected token ILLEGAL at (37, 23) to (37, 26) -built-ins/TypedArray/prototype/toLocaleString/BigInt/return-result.js (default) - Unexpected token ILLEGAL at (37, 23) to (37, 26) -built-ins/TypedArray/prototype/values/BigInt/iter-prototype.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 25) -built-ins/TypedArray/prototype/values/BigInt/iter-prototype.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 25) -built-ins/TypedArray/prototype/values/BigInt/return-itor.js (strict mode) - Unexpected token ILLEGAL at (17, 27) to (17, 29) -built-ins/TypedArray/prototype/values/BigInt/return-itor.js (default) - Unexpected token ILLEGAL at (17, 27) to (17, 29) -built-ins/TypedArrayConstructors/ctors-bigint/length-arg/init-zeros.js (strict mode) - Unexpected token ILLEGAL at (46, 31) to (46, 33) -built-ins/TypedArrayConstructors/ctors-bigint/length-arg/init-zeros.js (default) - Unexpected token ILLEGAL at (46, 31) to (46, 33) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/as-array-returns.js (strict mode) - Unexpected token ILLEGAL at (20, 27) to (20, 29) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/as-array-returns.js (default) - Unexpected token ILLEGAL at (20, 27) to (20, 29) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/as-generator-iterable-returns.js (strict mode) - Unexpected token ILLEGAL at (21, 10) to (21, 12) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/as-generator-iterable-returns.js (default) - Unexpected token ILLEGAL at (21, 10) to (21, 12) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/bigint-tobigint64.js (strict mode) - Unexpected token ILLEGAL at (67, 2) to (67, 23) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/bigint-tobigint64.js (default) - Unexpected token ILLEGAL at (67, 2) to (67, 23) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/bigint-tobiguint64.js (strict mode) - Unexpected token ILLEGAL at (67, 2) to (67, 23) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/bigint-tobiguint64.js (default) - Unexpected token ILLEGAL at (67, 2) to (67, 23) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/boolean-tobigint.js (strict mode) - Unexpected token ILLEGAL at (50, 34) to (50, 36) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/boolean-tobigint.js (default) - Unexpected token ILLEGAL at (50, 34) to (50, 36) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/new-instance-extensibility.js (strict mode) - Unexpected token ILLEGAL at (34, 9) to (34, 11) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/new-instance-extensibility.js (default) - Unexpected token ILLEGAL at (34, 9) to (34, 11) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/string-tobigint.js (strict mode) - Unexpected token ILLEGAL at (58, 34) to (58, 36) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/string-tobigint.js (default) - Unexpected token ILLEGAL at (58, 34) to (58, 36) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-from-property.js (strict mode) - Unexpected token ILLEGAL at (35, 11) to (35, 13) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-from-property.js (default) - Unexpected token ILLEGAL at (35, 11) to (35, 13) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-to-primitive-typeerror.js (strict mode) - Unexpected token ILLEGAL at (72, 12) to (72, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-to-primitive-typeerror.js (default) - Unexpected token ILLEGAL at (72, 12) to (72, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-to-primitive.js (strict mode) - Unexpected token ILLEGAL at (70, 12) to (70, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-to-primitive.js (default) - Unexpected token ILLEGAL at (70, 12) to (70, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-tostring.js (strict mode) - Unexpected token ILLEGAL at (83, 12) to (83, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-tostring.js (default) - Unexpected token ILLEGAL at (83, 12) to (83, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-valueof-typeerror.js (strict mode) - Unexpected token ILLEGAL at (84, 12) to (84, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-valueof-typeerror.js (default) - Unexpected token ILLEGAL at (84, 12) to (84, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-valueof.js (strict mode) - Unexpected token ILLEGAL at (78, 12) to (78, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-obj-valueof.js (default) - Unexpected token ILLEGAL at (78, 12) to (78, 14) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-property.js (strict mode) - Unexpected token ILLEGAL at (35, 11) to (35, 13) -built-ins/TypedArrayConstructors/ctors-bigint/object-arg/throws-setting-property.js (default) - Unexpected token ILLEGAL at (35, 11) to (35, 13) -built-ins/TypedArrayConstructors/from/BigInt/custom-ctor-returns-other-instance.js (strict mode) - Unexpected token ILLEGAL at (27, 20) to (27, 22) -built-ins/TypedArrayConstructors/from/BigInt/custom-ctor-returns-other-instance.js (default) - Unexpected token ILLEGAL at (27, 20) to (27, 22) -built-ins/TypedArrayConstructors/from/BigInt/custom-ctor-returns-smaller-instance-throws.js (strict mode) - Unexpected token ILLEGAL at (25, 18) to (25, 20) -built-ins/TypedArrayConstructors/from/BigInt/custom-ctor-returns-smaller-instance-throws.js (default) - Unexpected token ILLEGAL at (25, 18) to (25, 20) -built-ins/TypedArrayConstructors/from/BigInt/mapfn-abrupt-completion.js (strict mode) - Unexpected token ILLEGAL at (21, 7) to (21, 10) -built-ins/TypedArrayConstructors/from/BigInt/mapfn-abrupt-completion.js (default) - Unexpected token ILLEGAL at (21, 7) to (21, 10) -built-ins/TypedArrayConstructors/from/BigInt/mapfn-arguments.js (strict mode) - Unexpected token ILLEGAL at (30, 11) to (30, 13) -built-ins/TypedArrayConstructors/from/BigInt/mapfn-arguments.js (default) - Unexpected token ILLEGAL at (30, 11) to (30, 13) -built-ins/TypedArrayConstructors/from/BigInt/mapfn-this-with-thisarg.js (strict mode) - Unexpected token ILLEGAL at (29, 11) to (29, 13) -built-ins/TypedArrayConstructors/from/BigInt/mapfn-this-with-thisarg.js (default) - Unexpected token ILLEGAL at (29, 11) to (29, 13) -built-ins/TypedArrayConstructors/from/BigInt/mapfn-this-without-thisarg-non-strict.js (default) - Unexpected token ILLEGAL at (32, 11) to (32, 14) -built-ins/TypedArrayConstructors/from/BigInt/mapfn-this-without-thisarg-strict.js (strict mode) - Unexpected token ILLEGAL at (29, 11) to (29, 13) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-from-ordinary-object.js (strict mode) - Unexpected token ILLEGAL at (12, 7) to (12, 10) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-from-ordinary-object.js (default) - Unexpected token ILLEGAL at (12, 7) to (12, 10) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-from-sparse-array.js (strict mode) - Unexpected token ILLEGAL at (11, 15) to (11, 18) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-from-sparse-array.js (default) - Unexpected token ILLEGAL at (11, 15) to (11, 18) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-using-custom-ctor.js (strict mode) - Unexpected token ILLEGAL at (20, 35) to (20, 38) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-using-custom-ctor.js (default) - Unexpected token ILLEGAL at (20, 35) to (20, 38) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-with-mapfn.js (strict mode) - Unexpected token ILLEGAL at (13, 20) to (13, 22) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-with-mapfn.js (default) - Unexpected token ILLEGAL at (13, 20) to (13, 22) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-without-mapfn.js (strict mode) - Unexpected token ILLEGAL at (12, 24) to (12, 27) -built-ins/TypedArrayConstructors/from/BigInt/new-instance-without-mapfn.js (default) - Unexpected token ILLEGAL at (12, 24) to (12, 27) -built-ins/TypedArrayConstructors/from/BigInt/set-value-abrupt-completion.js (strict mode) - Unexpected token ILLEGAL at (29, 16) to (29, 19) -built-ins/TypedArrayConstructors/from/BigInt/set-value-abrupt-completion.js (default) - Unexpected token ILLEGAL at (29, 16) to (29, 19) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/desc-value-throws.js (strict mode) - Unexpected token ILLEGAL at (35, 23) to (35, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/desc-value-throws.js (default) - Unexpected token ILLEGAL at (35, 23) to (35, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/detached-buffer-realm.js (strict mode) - Unexpected token ILLEGAL at (32, 9) to (32, 11) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/detached-buffer-realm.js (default) - Unexpected token ILLEGAL at (32, 9) to (32, 11) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/detached-buffer.js (strict mode) - Unexpected token ILLEGAL at (30, 9) to (30, 11) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/detached-buffer.js (default) - Unexpected token ILLEGAL at (30, 9) to (30, 11) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-greater-than-last-index.js (strict mode) - Unexpected token ILLEGAL at (24, 23) to (24, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-greater-than-last-index.js (default) - Unexpected token ILLEGAL at (24, 23) to (24, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-lower-than-zero.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-lower-than-zero.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-minus-zero.js (strict mode) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-minus-zero.js (default) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-not-canonical-index.js (strict mode) - Unexpected token ILLEGAL at (40, 13) to (40, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-not-canonical-index.js (default) - Unexpected token ILLEGAL at (40, 13) to (40, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-not-integer.js (strict mode) - Unexpected token ILLEGAL at (24, 13) to (24, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-not-integer.js (default) - Unexpected token ILLEGAL at (24, 13) to (24, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-not-numeric-index.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-not-numeric-index.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex-accessor-desc.js (strict mode) - Unexpected token ILLEGAL at (34, 30) to (34, 32) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex-accessor-desc.js (default) - Unexpected token ILLEGAL at (34, 30) to (34, 32) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex-desc-configurable.js (strict mode) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex-desc-configurable.js (default) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex-desc-not-enumerable.js (strict mode) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex-desc-not-enumerable.js (default) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex-desc-not-writable.js (strict mode) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex-desc-not-writable.js (default) - Unexpected token ILLEGAL at (26, 13) to (26, 16) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-numericindex.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-symbol.js (strict mode) - Unexpected token ILLEGAL at (19, 23) to (19, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/key-is-symbol.js (default) - Unexpected token ILLEGAL at (19, 23) to (19, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/non-extensible-new-key.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/non-extensible-new-key.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/non-extensible-redefine-key.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/non-extensible-redefine-key.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/set-value.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 25) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/set-value.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 25) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/this-is-not-extensible.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/this-is-not-extensible.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/tonumber-value-detached-buffer.js (strict mode) - Unexpected token ILLEGAL at (33, 19) to (33, 22) -built-ins/TypedArrayConstructors/internals/DefineOwnProperty/BigInt/tonumber-value-detached-buffer.js (default) - Unexpected token ILLEGAL at (33, 19) to (33, 22) -built-ins/TypedArrayConstructors/internals/Get/BigInt/detached-buffer-key-is-not-numeric-index.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/detached-buffer-key-is-not-numeric-index.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/detached-buffer-key-is-symbol.js (strict mode) - Unexpected token ILLEGAL at (19, 23) to (19, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/detached-buffer-key-is-symbol.js (default) - Unexpected token ILLEGAL at (19, 23) to (19, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/detached-buffer.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/detached-buffer.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/indexed-value-sab.js (strict mode) - Unexpected token ILLEGAL at (24, 14) to (24, 17) -built-ins/TypedArrayConstructors/internals/Get/BigInt/indexed-value-sab.js (default) - Unexpected token ILLEGAL at (24, 14) to (24, 17) -built-ins/TypedArrayConstructors/internals/Get/BigInt/indexed-value.js (strict mode) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/indexed-value.js (default) - Unexpected token ILLEGAL at (30, 23) to (30, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-not-integer.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-not-integer.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-not-minus-zero.js (strict mode) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-not-minus-zero.js (default) - Unexpected token ILLEGAL at (34, 23) to (34, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-not-numeric-index.js (strict mode) - Unexpected token ILLEGAL at (23, 23) to (23, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-not-numeric-index.js (default) - Unexpected token ILLEGAL at (23, 23) to (23, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-out-of-bounds.js (strict mode) - Unexpected token ILLEGAL at (38, 23) to (38, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-out-of-bounds.js (default) - Unexpected token ILLEGAL at (38, 23) to (38, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-symbol.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/Get/BigInt/key-is-symbol.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/detached-buffer-key-is-not-number.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/detached-buffer-key-is-not-number.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/detached-buffer-key-is-symbol.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/detached-buffer-key-is-symbol.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/index-prop-desc.js (strict mode) - Unexpected token ILLEGAL at (23, 23) to (23, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/index-prop-desc.js (default) - Unexpected token ILLEGAL at (23, 23) to (23, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-minus-zero.js (strict mode) - Unexpected token ILLEGAL at (33, 23) to (33, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-minus-zero.js (default) - Unexpected token ILLEGAL at (33, 23) to (33, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-not-canonical-index.js (strict mode) - Unexpected token ILLEGAL at (31, 25) to (31, 28) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-not-canonical-index.js (default) - Unexpected token ILLEGAL at (31, 25) to (31, 28) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-not-integer.js (strict mode) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-not-integer.js (default) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-not-numeric-index.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-not-numeric-index.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-out-of-bounds.js (strict mode) - Unexpected token ILLEGAL at (28, 23) to (28, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-out-of-bounds.js (default) - Unexpected token ILLEGAL at (28, 23) to (28, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-symbol.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/GetOwnProperty/BigInt/key-is-symbol.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/HasProperty/BigInt/detached-buffer-key-is-not-number.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/HasProperty/BigInt/detached-buffer-key-is-not-number.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/HasProperty/BigInt/detached-buffer-key-is-symbol.js (strict mode) - Unexpected token ILLEGAL at (24, 23) to (24, 26) -built-ins/TypedArrayConstructors/internals/HasProperty/BigInt/detached-buffer-key-is-symbol.js (default) - Unexpected token ILLEGAL at (24, 23) to (24, 26) -built-ins/TypedArrayConstructors/internals/HasProperty/BigInt/indexed-value.js (strict mode) - Unexpected token ILLEGAL at (28, 23) to (28, 26) -built-ins/TypedArrayConstructors/internals/HasProperty/BigInt/indexed-value.js (default) - Unexpected token ILLEGAL at (28, 23) to (28, 26) -built-ins/TypedArrayConstructors/internals/HasProperty/BigInt/key-is-not-canonical-index.js (strict mode) - Unexpected token ILLEGAL at (46, 47) to (46, 50) -built-ins/TypedArrayConstructors/internals/HasProperty/BigInt/key-is-not-canonical-index.js (default) - Unexpected token ILLEGAL at (46, 47) to (46, 50) -built-ins/TypedArrayConstructors/internals/OwnPropertyKeys/BigInt/integer-indexes-and-string-and-symbol-keys-.js (strict mode) - Unexpected token ILLEGAL at (27, 24) to (27, 27) -built-ins/TypedArrayConstructors/internals/OwnPropertyKeys/BigInt/integer-indexes-and-string-and-symbol-keys-.js (default) - Unexpected token ILLEGAL at (27, 24) to (27, 27) -built-ins/TypedArrayConstructors/internals/OwnPropertyKeys/BigInt/integer-indexes-and-string-keys.js (strict mode) - Unexpected token ILLEGAL at (24, 24) to (24, 27) -built-ins/TypedArrayConstructors/internals/OwnPropertyKeys/BigInt/integer-indexes-and-string-keys.js (default) - Unexpected token ILLEGAL at (24, 24) to (24, 27) -built-ins/TypedArrayConstructors/internals/OwnPropertyKeys/BigInt/integer-indexes.js (strict mode) - Unexpected token ILLEGAL at (21, 24) to (21, 27) -built-ins/TypedArrayConstructors/internals/OwnPropertyKeys/BigInt/integer-indexes.js (default) - Unexpected token ILLEGAL at (21, 24) to (21, 27) -built-ins/TypedArrayConstructors/internals/Set/BigInt/bigint-tobigint64.js (strict mode) - Unexpected token ILLEGAL at (73, 2) to (73, 23) -built-ins/TypedArrayConstructors/internals/Set/BigInt/bigint-tobigint64.js (default) - Unexpected token ILLEGAL at (73, 2) to (73, 23) -built-ins/TypedArrayConstructors/internals/Set/BigInt/bigint-tobiguint64.js (strict mode) - Unexpected token ILLEGAL at (73, 2) to (73, 23) -built-ins/TypedArrayConstructors/internals/Set/BigInt/bigint-tobiguint64.js (default) - Unexpected token ILLEGAL at (73, 2) to (73, 23) -built-ins/TypedArrayConstructors/internals/Set/BigInt/boolean-tobigint.js (strict mode) - Unexpected token ILLEGAL at (59, 34) to (59, 36) -built-ins/TypedArrayConstructors/internals/Set/BigInt/boolean-tobigint.js (default) - Unexpected token ILLEGAL at (59, 34) to (59, 36) -built-ins/TypedArrayConstructors/internals/Set/BigInt/detached-buffer-key-is-not-numeric-index.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/detached-buffer-key-is-not-numeric-index.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/detached-buffer-key-is-symbol.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/detached-buffer-key-is-symbol.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/detached-buffer-realm.js (strict mode) - Unexpected token ILLEGAL at (38, 16) to (38, 18) -built-ins/TypedArrayConstructors/internals/Set/BigInt/detached-buffer-realm.js (default) - Unexpected token ILLEGAL at (38, 16) to (38, 18) -built-ins/TypedArrayConstructors/internals/Set/BigInt/detached-buffer.js (strict mode) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/detached-buffer.js (default) - Unexpected token ILLEGAL at (29, 23) to (29, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/indexed-value.js (strict mode) - Unexpected token ILLEGAL at (36, 23) to (36, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/indexed-value.js (default) - Unexpected token ILLEGAL at (36, 23) to (36, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-minus-zero.js (strict mode) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-minus-zero.js (default) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-not-canonical-index.js (strict mode) - Unexpected token ILLEGAL at (29, 25) to (29, 28) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-not-canonical-index.js (default) - Unexpected token ILLEGAL at (29, 25) to (29, 28) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-not-integer.js (strict mode) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-not-integer.js (default) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-not-numeric-index.js (strict mode) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-not-numeric-index.js (default) - Unexpected token ILLEGAL at (21, 23) to (21, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-out-of-bounds.js (strict mode) - Unexpected token ILLEGAL at (28, 23) to (28, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-out-of-bounds.js (default) - Unexpected token ILLEGAL at (28, 23) to (28, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-symbol.js (strict mode) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/key-is-symbol.js (default) - Unexpected token ILLEGAL at (22, 23) to (22, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/string-tobigint.js (strict mode) - Unexpected token ILLEGAL at (68, 34) to (68, 36) -built-ins/TypedArrayConstructors/internals/Set/BigInt/string-tobigint.js (default) - Unexpected token ILLEGAL at (68, 34) to (68, 36) -built-ins/TypedArrayConstructors/internals/Set/BigInt/tonumber-value-detached-buffer.js (strict mode) - Unexpected token ILLEGAL at (30, 19) to (30, 22) -built-ins/TypedArrayConstructors/internals/Set/BigInt/tonumber-value-detached-buffer.js (default) - Unexpected token ILLEGAL at (30, 19) to (30, 22) -built-ins/TypedArrayConstructors/internals/Set/BigInt/tonumber-value-throws.js (strict mode) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArrayConstructors/internals/Set/BigInt/tonumber-value-throws.js (default) - Unexpected token ILLEGAL at (27, 23) to (27, 26) -built-ins/TypedArrayConstructors/internals/Set/bigint-tonumber.js (strict mode) - Unexpected token ILLEGAL at (60, 22) to (60, 24) -built-ins/TypedArrayConstructors/internals/Set/bigint-tonumber.js (default) - Unexpected token ILLEGAL at (60, 22) to (60, 24) -built-ins/TypedArrayConstructors/of/BigInt/argument-number-value-throws.js (strict mode) - Unexpected token ILLEGAL at (25, 13) to (25, 16) -built-ins/TypedArrayConstructors/of/BigInt/argument-number-value-throws.js (default) - Unexpected token ILLEGAL at (25, 13) to (25, 16) -built-ins/TypedArrayConstructors/of/BigInt/custom-ctor-does-not-instantiate-ta-throws.js (strict mode) - Unexpected token ILLEGAL at (27, 21) to (27, 24) -built-ins/TypedArrayConstructors/of/BigInt/custom-ctor-does-not-instantiate-ta-throws.js (default) - Unexpected token ILLEGAL at (27, 21) to (27, 24) -built-ins/TypedArrayConstructors/of/BigInt/custom-ctor-returns-other-instance.js (strict mode) - Unexpected token ILLEGAL at (28, 36) to (28, 38) -built-ins/TypedArrayConstructors/of/BigInt/custom-ctor-returns-other-instance.js (default) - Unexpected token ILLEGAL at (28, 36) to (28, 38) -built-ins/TypedArrayConstructors/of/BigInt/custom-ctor-returns-smaller-instance-throws.js (strict mode) - Unexpected token ILLEGAL at (26, 29) to (26, 31) -built-ins/TypedArrayConstructors/of/BigInt/custom-ctor-returns-smaller-instance-throws.js (default) - Unexpected token ILLEGAL at (26, 29) to (26, 31) -built-ins/TypedArrayConstructors/of/BigInt/custom-ctor.js (strict mode) - Unexpected token ILLEGAL at (31, 21) to (31, 24) -built-ins/TypedArrayConstructors/of/BigInt/custom-ctor.js (default) - Unexpected token ILLEGAL at (31, 21) to (31, 24) -built-ins/TypedArrayConstructors/of/BigInt/new-instance-using-custom-ctor.js (strict mode) - Unexpected token ILLEGAL at (21, 32) to (21, 35) -built-ins/TypedArrayConstructors/of/BigInt/new-instance-using-custom-ctor.js (default) - Unexpected token ILLEGAL at (21, 32) to (21, 35) -built-ins/TypedArrayConstructors/of/BigInt/new-instance.js (strict mode) - Unexpected token ILLEGAL at (27, 21) to (27, 24) -built-ins/TypedArrayConstructors/of/BigInt/new-instance.js (default) - Unexpected token ILLEGAL at (27, 21) to (27, 24) -built-ins/TypedArrayConstructors/of/BigInt/this-is-not-constructor.js (strict mode) - Unexpected token ILLEGAL at (22, 18) to (22, 20) -built-ins/TypedArrayConstructors/of/BigInt/this-is-not-constructor.js (default) - Unexpected token ILLEGAL at (22, 18) to (22, 20) -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_F-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_F-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_F.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_F.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Invalid-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-class-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Invalid-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Invalid.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-const-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Invalid.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_N-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-function-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_N-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_N.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_N.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_No-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-let-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_No-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_No.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-var-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_No.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_T-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_T-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_T.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_T.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Y-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-class-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Y-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Y.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-const-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Y.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Yes-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-function-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Yes-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Yes.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +built-ins/RegExp/property-escapes/binary-property-with-value-ASCII_-_Yes.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +built-ins/RegExp/property-escapes/character-class-range-end.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-let-declaration.js (default) +built-ins/RegExp/property-escapes/character-class-range-end.js (default) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +built-ins/RegExp/property-escapes/character-class-range-no-dash-end.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-var-declaration.js (default) +built-ins/RegExp/property-escapes/character-class-range-no-dash-end.js (default) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/character-class-range-no-dash-start.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +built-ins/RegExp/property-escapes/character-class-range-no-dash-start.js (default) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/character-class-range-start.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +built-ins/RegExp/property-escapes/character-class-range-start.js (default) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Block-implicit-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-class-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Block-implicit-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Block-implicit.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-const-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Block-implicit.js (default) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Script-implicit-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-function-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Script-implicit-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Script-implicit.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Script-implicit.js (default) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Script-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-let-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Script-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Script.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-var-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-In-prefix-Script.js (default) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-Is-prefix-Script-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-Is-prefix-Script-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-Is-prefix-Script.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-Is-prefix-Script.js (default) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-circumflex-negation-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-class-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-circumflex-negation-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-circumflex-negation.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-const-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-circumflex-negation.js (default) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-empty-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-function-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-empty-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-empty.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-empty.js (default) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-invalid-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-let-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-invalid-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-invalid.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-var-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-invalid.js (default) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-no-braces-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-no-braces-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-no-braces-value-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-no-braces-value-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-no-braces-value.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-class-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-no-braces-value.js (default) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-no-braces.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-const-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-no-braces.js (default) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-separator-and-value-only-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-separator-and-value-only-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-separator-and-value-only.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-separator-and-value-only.js (default) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-let-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-separator-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-separator-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-var-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-separator-only-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-separator-only-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-separator-only.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-separator-only.js (default) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-separator.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-separator.js (default) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-class-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-unclosed-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-unclosed-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-const-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-unclosed.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-unclosed.js (default) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-function-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-unopened-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-unopened-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +built-ins/RegExp/property-escapes/grammar-extension-unopened.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +built-ins/RegExp/property-escapes/grammar-extension-unopened.js (default) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-let-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-01-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-01-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-var-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-01.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-01.js (default) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-02-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-02-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-02.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-02.js (default) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-class-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-03-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-03-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-const-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-03.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-03.js (default) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-function-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-04-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-04-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-04.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-04.js (default) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-let-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-05-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-05-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-var-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-05.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-05.js (default) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-06-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-06-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-06.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-06.js (default) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-class-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-07-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-07-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-const-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-07.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-07.js (default) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-function-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-08-negated.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-08-negated.js (default) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-08.js (strict mode) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +built-ins/RegExp/property-escapes/loose-matching-08.js (default) Missing parse error -language/block-scope/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-let-declaration.js (default) +built-ins/RegExp/property-escapes/loose-matching-09-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-09-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-09.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-09.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-10-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-10-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-10.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-10.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-11-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-11-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-11.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-11.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-12-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-12-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-12.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-12.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-13-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-13-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-13.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-13.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-14-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-14-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-14.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/loose-matching-14.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-General_Category-equals-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-General_Category-equals-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-General_Category-equals.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-General_Category-equals.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-General_Category-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-General_Category-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-General_Category.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-General_Category.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script-equals-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script-equals-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script-equals.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script-equals.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script_Extensions-equals-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script_Extensions-equals-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script_Extensions-equals.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script_Extensions-equals.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script_Extensions-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script_Extensions-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script_Extensions.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-binary-property-without-value-Script_Extensions.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-binary-property-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-binary-property-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-binary-property.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-binary-property.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-and-value-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-and-value-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-and-value.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-and-value.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-existing-value-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-existing-value-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-existing-value.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-existing-value.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-General_Category-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-General_Category-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-Script-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-Script-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-Script.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-Script.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-Script_Extensions-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-Script_Extensions-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-Script_Extensions.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-Script_Extensions.js (default) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-general-category.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/non-existent-property-value-general-category.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Composition_Exclusion-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Composition_Exclusion-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Composition_Exclusion.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Composition_Exclusion.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFC-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFC-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFC.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFC.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFD-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFD-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFD.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFD.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFKC-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFKC-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFKC.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFKC.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFKD-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFKD-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFKD.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Expands_On_NFKD.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-FC_NFKC_Closure-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-FC_NFKC_Closure-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-FC_NFKC_Closure.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-FC_NFKC_Closure.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Full_Composition_Exclusion-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Full_Composition_Exclusion-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Full_Composition_Exclusion.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Full_Composition_Exclusion.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Grapheme_Link-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Grapheme_Link-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Grapheme_Link.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Grapheme_Link.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Hyphen-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Hyphen-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Hyphen.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Hyphen.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Alphabetic-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Alphabetic-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Alphabetic.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Alphabetic.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Default_Ignorable_Code_Point-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Default_Ignorable_Code_Point-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Default_Ignorable_Code_Point.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Default_Ignorable_Code_Point.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Grapheme_Extend-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Grapheme_Extend-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Grapheme_Extend.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Grapheme_Extend.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_ID_Continue-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_ID_Continue-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_ID_Continue.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_ID_Continue.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_ID_Start-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_ID_Start-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_ID_Start.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_ID_Start.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Lowercase-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Lowercase-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Lowercase.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Lowercase.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Math-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Math-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Math.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Math.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Uppercase-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Uppercase-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Uppercase.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Other_Uppercase.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Prepended_Concatenation_Mark-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Prepended_Concatenation_Mark-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Prepended_Concatenation_Mark.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-binary-property-Prepended_Concatenation_Mark.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Block-with-value-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Block-with-value-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Block-with-value.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Block-with-value.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-FC_NFKC_Closure-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-FC_NFKC_Closure-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-FC_NFKC_Closure.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-FC_NFKC_Closure.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Line_Break-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Line_Break-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Line_Break-with-value-negated.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Line_Break-with-value-negated.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Line_Break-with-value.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Line_Break-with-value.js (default) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Line_Break.js (strict mode) + Missing parse error +built-ins/RegExp/property-escapes/unsupported-property-Line_Break.js (default) + Missing parse error +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-null.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (62, 2) to (69, 3) +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (62, 2) to (69, 3) +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/arguments-object/cls-decl-async-private-gen-meth-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (41, 2) to (46, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (41, 2) to (46, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (41, 2) to (46, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-null.js (default) + Classes may not have private methods. at (41, 2) to (46, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (41, 2) to (45, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (41, 2) to (45, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (43, 2) to (50, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (43, 2) to (50, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (41, 2) to (46, 3) +language/arguments-object/cls-decl-async-private-gen-meth-static-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (41, 2) to (46, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-null.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-null.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-gen-meth-static-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-null.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-null.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-decl-private-meth-static-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-null.js (default) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (64, 2) to (71, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (64, 2) to (71, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-null.js (default) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (64, 2) to (71, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (64, 2) to (71, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-async-private-gen-meth-static-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (62, 2) to (67, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-null.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-null.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-gen-meth-static-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-null.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-multiple.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-multiple.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-null.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-null.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-single-args.js (strict mode) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-single-args.js (default) + Classes may not have private methods. at (36, 2) to (40, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-spread-operator.js (strict mode) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-spread-operator.js (default) + Classes may not have private methods. at (38, 2) to (45, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-undefined.js (strict mode) + Classes may not have private methods. at (36, 2) to (41, 3) +language/arguments-object/cls-expr-private-meth-static-args-trailing-comma-undefined.js (default) + Classes may not have private methods. at (36, 2) to (41, 3) +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-var.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-function-name-redeclaration-attempt-with-var.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-var.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-var.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-var.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/class-name-redeclaration-attempt-with-var.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-var.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/const-name-redeclaration-attempt-with-var.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/fn-scope-var-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-var-declaration-nested-in-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-var-declaration-nested-in-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-var.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/function-name-redeclaration-attempt-with-var.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-var.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/generator-name-redeclaration-attempt-with-var.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/inner-block-var-redeclaration-attempt-after-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-var.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/let-name-redeclaration-attempt-with-var.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-name-redeclaration-attempt-with-let.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-async-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-async-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-async-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-async-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-class.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-class.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-const.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-const.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-function.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-function.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-generator.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-generator.js (default) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-let.js (strict mode) + Missing parse error +language/block-scope/syntax/redeclaration/var-redeclaration-attempt-after-let.js (default) + Missing parse error +language/expressions/arrow-function/params-duplicate.js (default) Missing parse error -language/expressions/addition/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (16, 38) to (16, 40) -language/expressions/addition/bigint-and-number.js (default) - Unexpected token ILLEGAL at (16, 38) to (16, 40) -language/expressions/addition/bigint-arithmetic.js (strict mode) - Unexpected token ILLEGAL at (15, 8) to (15, 27) -language/expressions/addition/bigint-arithmetic.js (default) - Unexpected token ILLEGAL at (15, 8) to (15, 27) -language/expressions/addition/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/addition/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/addition/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/addition/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/addition/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/addition/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/addition/coerce-bigint-to-string.js (strict mode) - Unexpected token ILLEGAL at (26, 18) to (26, 20) -language/expressions/addition/coerce-bigint-to-string.js (default) - Unexpected token ILLEGAL at (26, 18) to (26, 20) language/expressions/arrow-function/syntax/early-errors/arrowparameters-cover-no-duplicates.js (default) Missing parse error language/expressions/arrow-function/syntax/early-errors/use-strict-with-non-simple-param.js (strict mode) Missing parse error language/expressions/arrow-function/syntax/early-errors/use-strict-with-non-simple-param.js (default) Missing parse error +language/expressions/async-arrow-function/await-as-param-ident-nested-arrow-parameter-position.js (strict mode) + Missing parse error +language/expressions/async-arrow-function/await-as-param-ident-nested-arrow-parameter-position.js (default) + Missing parse error +language/expressions/async-arrow-function/await-as-param-nested-arrow-body-position.js (strict mode) + Missing parse error +language/expressions/async-arrow-function/await-as-param-nested-arrow-body-position.js (default) + Missing parse error +language/expressions/async-arrow-function/await-as-param-nested-arrow-parameter-position.js (strict mode) + Missing parse error +language/expressions/async-arrow-function/await-as-param-nested-arrow-parameter-position.js (default) + Missing parse error +language/expressions/async-arrow-function/await-as-param-rest-nested-arrow-parameter-position.js (strict mode) + Missing parse error +language/expressions/async-arrow-function/await-as-param-rest-nested-arrow-parameter-position.js (default) + Missing parse error language/expressions/async-arrow-function/early-errors-arrow-NSPL-with-USD.js (strict mode) Missing parse error language/expressions/async-arrow-function/early-errors-arrow-NSPL-with-USD.js (default) @@ -1914,14 +1234,12 @@ language/expressions/async-arrow-function/early-errors-arrow-await-in-formals.js Missing parse error language/expressions/async-arrow-function/early-errors-arrow-await-in-formals.js (default) Missing parse error +language/expressions/async-arrow-function/early-errors-arrow-duplicate-parameters.js (default) + Missing parse error language/expressions/async-arrow-function/early-errors-arrow-formals-body-duplicate.js (strict mode) Missing parse error language/expressions/async-arrow-function/early-errors-arrow-formals-body-duplicate.js (default) Missing parse error -language/expressions/async-arrow-function/early-errors-arrow-formals-lineterminator.js (strict mode) - Missing parse error -language/expressions/async-arrow-function/early-errors-arrow-formals-lineterminator.js (default) - Missing parse error language/expressions/async-function/early-errors-expression-NSPL-with-USD.js (strict mode) Missing parse error language/expressions/async-function/early-errors-expression-NSPL-with-USD.js (default) @@ -1946,630 +1264,4454 @@ language/expressions/async-generator/early-errors-expression-formals-contains-aw Missing parse error language/expressions/async-generator/early-errors-expression-formals-contains-await-expr.js (default) Missing parse error -language/expressions/bitwise-and/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/bitwise-and/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/bitwise-and/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/bitwise-and/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/bitwise-and/bigint-non-primitive.js (strict mode) - Unexpected token ILLEGAL at (17, 24) to (17, 30) -language/expressions/bitwise-and/bigint-non-primitive.js (default) - Unexpected token ILLEGAL at (17, 24) to (17, 30) -language/expressions/bitwise-and/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/bitwise-and/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/bitwise-and/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/bitwise-and/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/bitwise-and/bigint.js (strict mode) - Unexpected token ILLEGAL at (26, 17) to (26, 22) -language/expressions/bitwise-and/bigint.js (default) - Unexpected token ILLEGAL at (26, 17) to (26, 22) -language/expressions/bitwise-not/bigint-non-primitive.js (strict mode) - Unexpected token ILLEGAL at (19, 25) to (19, 27) -language/expressions/bitwise-not/bigint-non-primitive.js (default) - Unexpected token ILLEGAL at (19, 25) to (19, 27) -language/expressions/bitwise-not/bigint.js (strict mode) - Unexpected token ILLEGAL at (15, 18) to (15, 20) -language/expressions/bitwise-not/bigint.js (default) - Unexpected token ILLEGAL at (15, 18) to (15, 20) -language/expressions/bitwise-or/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/bitwise-or/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/bitwise-or/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/bitwise-or/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/bitwise-or/bigint-non-primitive.js (strict mode) - Unexpected token ILLEGAL at (18, 24) to (18, 30) -language/expressions/bitwise-or/bigint-non-primitive.js (default) - Unexpected token ILLEGAL at (18, 24) to (18, 30) -language/expressions/bitwise-or/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/bitwise-or/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/bitwise-or/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/bitwise-or/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/bitwise-or/bigint.js (strict mode) - Unexpected token ILLEGAL at (26, 17) to (26, 22) -language/expressions/bitwise-or/bigint.js (default) - Unexpected token ILLEGAL at (26, 17) to (26, 22) -language/expressions/bitwise-xor/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/bitwise-xor/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/bitwise-xor/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/bitwise-xor/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/bitwise-xor/bigint-non-primitive.js (strict mode) - Unexpected token ILLEGAL at (18, 24) to (18, 30) -language/expressions/bitwise-xor/bigint-non-primitive.js (default) - Unexpected token ILLEGAL at (18, 24) to (18, 30) -language/expressions/bitwise-xor/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/bitwise-xor/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/bitwise-xor/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/bitwise-xor/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/bitwise-xor/bigint.js (strict mode) - Unexpected token ILLEGAL at (26, 17) to (26, 22) -language/expressions/bitwise-xor/bigint.js (default) - Unexpected token ILLEGAL at (26, 17) to (26, 22) -language/expressions/class/constructor-this-tdz-during-initializers.js (strict mode) - Unexpected token = at (30, 8) to (30, 9) -language/expressions/class/constructor-this-tdz-during-initializers.js (default) - Unexpected token = at (30, 8) to (30, 9) -language/expressions/class/fields-after-same-line-gen-computed-names.js (strict mode) - Unexpected token = at (27, 26) to (27, 27) -language/expressions/class/fields-after-same-line-gen-computed-names.js (default) - Unexpected token = at (27, 26) to (27, 27) -language/expressions/class/fields-after-same-line-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 31) to (28, 32) -language/expressions/class/fields-after-same-line-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 31) to (28, 32) -language/expressions/class/fields-after-same-line-gen-literal-names.js (strict mode) - Unexpected token = at (27, 27) to (27, 28) -language/expressions/class/fields-after-same-line-gen-literal-names.js (default) - Unexpected token = at (27, 27) to (27, 28) -language/expressions/class/fields-after-same-line-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 36) to (25, 37) -language/expressions/class/fields-after-same-line-gen-string-literal-names.js (default) - Unexpected token = at (25, 36) to (25, 37) -language/expressions/class/fields-after-same-line-method-computed-names.js (strict mode) - Unexpected token = at (27, 25) to (27, 26) -language/expressions/class/fields-after-same-line-method-computed-names.js (default) - Unexpected token = at (27, 25) to (27, 26) -language/expressions/class/fields-after-same-line-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 30) to (28, 31) -language/expressions/class/fields-after-same-line-method-computed-symbol-names.js (default) - Unexpected token = at (28, 30) to (28, 31) -language/expressions/class/fields-after-same-line-method-literal-names.js (strict mode) - Unexpected token = at (27, 26) to (27, 27) -language/expressions/class/fields-after-same-line-method-literal-names.js (default) - Unexpected token = at (27, 26) to (27, 27) -language/expressions/class/fields-after-same-line-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 35) to (25, 36) -language/expressions/class/fields-after-same-line-method-string-literal-names.js (default) - Unexpected token = at (25, 35) to (25, 36) -language/expressions/class/fields-after-same-line-static-async-gen-computed-names.js (strict mode) - Unexpected token = at (27, 39) to (27, 40) -language/expressions/class/fields-after-same-line-static-async-gen-computed-names.js (default) - Unexpected token = at (27, 39) to (27, 40) -language/expressions/class/fields-after-same-line-static-async-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 44) to (28, 45) -language/expressions/class/fields-after-same-line-static-async-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 44) to (28, 45) -language/expressions/class/fields-after-same-line-static-async-gen-literal-names.js (strict mode) - Unexpected token = at (27, 40) to (27, 41) -language/expressions/class/fields-after-same-line-static-async-gen-literal-names.js (default) - Unexpected token = at (27, 40) to (27, 41) -language/expressions/class/fields-after-same-line-static-async-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 49) to (25, 50) -language/expressions/class/fields-after-same-line-static-async-gen-string-literal-names.js (default) - Unexpected token = at (25, 49) to (25, 50) -language/expressions/class/fields-after-same-line-static-async-method-computed-names.js (strict mode) - Unexpected token = at (27, 38) to (27, 39) -language/expressions/class/fields-after-same-line-static-async-method-computed-names.js (default) - Unexpected token = at (27, 38) to (27, 39) -language/expressions/class/fields-after-same-line-static-async-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 43) to (28, 44) -language/expressions/class/fields-after-same-line-static-async-method-computed-symbol-names.js (default) - Unexpected token = at (28, 43) to (28, 44) -language/expressions/class/fields-after-same-line-static-async-method-literal-names.js (strict mode) - Unexpected token = at (27, 39) to (27, 40) -language/expressions/class/fields-after-same-line-static-async-method-literal-names.js (default) - Unexpected token = at (27, 39) to (27, 40) -language/expressions/class/fields-after-same-line-static-async-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 48) to (25, 49) -language/expressions/class/fields-after-same-line-static-async-method-string-literal-names.js (default) - Unexpected token = at (25, 48) to (25, 49) -language/expressions/class/fields-after-same-line-static-gen-computed-names.js (strict mode) - Unexpected token = at (27, 33) to (27, 34) -language/expressions/class/fields-after-same-line-static-gen-computed-names.js (default) - Unexpected token = at (27, 33) to (27, 34) -language/expressions/class/fields-after-same-line-static-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 38) to (28, 39) -language/expressions/class/fields-after-same-line-static-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 38) to (28, 39) -language/expressions/class/fields-after-same-line-static-gen-literal-names.js (strict mode) - Unexpected token = at (27, 34) to (27, 35) -language/expressions/class/fields-after-same-line-static-gen-literal-names.js (default) - Unexpected token = at (27, 34) to (27, 35) -language/expressions/class/fields-after-same-line-static-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 43) to (25, 44) -language/expressions/class/fields-after-same-line-static-gen-string-literal-names.js (default) - Unexpected token = at (25, 43) to (25, 44) -language/expressions/class/fields-after-same-line-static-method-computed-names.js (strict mode) - Unexpected token = at (27, 32) to (27, 33) -language/expressions/class/fields-after-same-line-static-method-computed-names.js (default) - Unexpected token = at (27, 32) to (27, 33) -language/expressions/class/fields-after-same-line-static-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 37) to (28, 38) -language/expressions/class/fields-after-same-line-static-method-computed-symbol-names.js (default) - Unexpected token = at (28, 37) to (28, 38) -language/expressions/class/fields-after-same-line-static-method-literal-names.js (strict mode) - Unexpected token = at (27, 33) to (27, 34) -language/expressions/class/fields-after-same-line-static-method-literal-names.js (default) - Unexpected token = at (27, 33) to (27, 34) -language/expressions/class/fields-after-same-line-static-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 42) to (25, 43) -language/expressions/class/fields-after-same-line-static-method-string-literal-names.js (default) - Unexpected token = at (25, 42) to (25, 43) -language/expressions/class/fields-asi-1.js (strict mode) - Unexpected token = at (12, 4) to (12, 5) -language/expressions/class/fields-asi-1.js (default) - Unexpected token = at (12, 4) to (12, 5) -language/expressions/class/fields-asi-2.js (strict mode) - Unexpected token = at (11, 4) to (11, 5) -language/expressions/class/fields-asi-2.js (default) - Unexpected token = at (11, 4) to (11, 5) -language/expressions/class/fields-asi-5.js (strict mode) - Unexpected token = at (15, 4) to (15, 5) -language/expressions/class/fields-asi-5.js (default) - Unexpected token = at (15, 4) to (15, 5) -language/expressions/class/fields-computed-name-toprimitive-symbol.js (strict mode) - Unexpected token = at (68, 9) to (68, 10) -language/expressions/class/fields-computed-name-toprimitive-symbol.js (default) - Unexpected token = at (68, 9) to (68, 10) -language/expressions/class/fields-computed-name-toprimitive.js (strict mode) - Unexpected token = at (65, 9) to (65, 10) -language/expressions/class/fields-computed-name-toprimitive.js (default) - Unexpected token = at (65, 9) to (65, 10) -language/expressions/class/fields-ctor-called-after-fields-init.js (strict mode) - Unexpected token = at (28, 6) to (28, 7) -language/expressions/class/fields-ctor-called-after-fields-init.js (default) - Unexpected token = at (28, 6) to (28, 7) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-supercall-1.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-supercall-1.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-supercall-2.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-supercall-2.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-supercall.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-supercall.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-superproperty-1.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-superproperty-1.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-superproperty-2.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-direct-eval-err-contains-superproperty-2.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-supercall-1.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-supercall-1.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-supercall-2.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-supercall-2.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-supercall.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-supercall.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-superproperty-1.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-superproperty-1.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-superproperty-2.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-derived-cls-indirect-eval-err-contains-superproperty-2.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/expressions/class/fields-direct-eval-err-contains-arguments.js (strict mode) - Unexpected token = at (29, 4) to (29, 5) -language/expressions/class/fields-direct-eval-err-contains-arguments.js (default) - Unexpected token = at (29, 4) to (29, 5) -language/expressions/class/fields-direct-eval-err-contains-newtarget.js (strict mode) - Unexpected token = at (28, 4) to (28, 5) -language/expressions/class/fields-direct-eval-err-contains-newtarget.js (default) - Unexpected token = at (28, 4) to (28, 5) -language/expressions/class/fields-evaluation-error-computed-name-referenceerror.js (strict mode) - Unexpected token = at (52, 12) to (52, 13) -language/expressions/class/fields-evaluation-error-computed-name-referenceerror.js (default) - Unexpected token = at (52, 12) to (52, 13) -language/expressions/class/fields-evaluation-error-computed-name-toprimitive-err.js (strict mode) - Unexpected token } at (55, 2) to (55, 3) -language/expressions/class/fields-evaluation-error-computed-name-toprimitive-err.js (default) - Unexpected token } at (55, 2) to (55, 3) -language/expressions/class/fields-evaluation-error-computed-name-tostring-err.js (strict mode) - Unexpected token } at (55, 2) to (55, 3) -language/expressions/class/fields-evaluation-error-computed-name-tostring-err.js (default) - Unexpected token } at (55, 2) to (55, 3) -language/expressions/class/fields-evaluation-error-computed-name-valueof-err.js (strict mode) - Unexpected token } at (56, 2) to (56, 3) -language/expressions/class/fields-evaluation-error-computed-name-valueof-err.js (default) - Unexpected token } at (56, 2) to (56, 3) -language/expressions/class/fields-indirect-eval-err-contains-arguments.js (strict mode) - Unexpected token = at (29, 4) to (29, 5) -language/expressions/class/fields-indirect-eval-err-contains-arguments.js (default) - Unexpected token = at (29, 4) to (29, 5) -language/expressions/class/fields-indirect-eval-err-contains-newtarget.js (strict mode) - Unexpected token = at (28, 4) to (28, 5) -language/expressions/class/fields-indirect-eval-err-contains-newtarget.js (default) - Unexpected token = at (28, 4) to (28, 5) -language/expressions/class/fields-init-err-evaluation.js (strict mode) - Unexpected token = at (27, 4) to (27, 5) -language/expressions/class/fields-init-err-evaluation.js (default) - Unexpected token = at (27, 4) to (27, 5) -language/expressions/class/fields-init-value-defined-after-class.js (strict mode) - Unexpected token = at (26, 6) to (26, 7) -language/expressions/class/fields-init-value-defined-after-class.js (default) - Unexpected token = at (26, 6) to (26, 7) -language/expressions/class/fields-init-value-incremental.js (strict mode) - Unexpected token = at (35, 8) to (35, 9) -language/expressions/class/fields-init-value-incremental.js (default) - Unexpected token = at (35, 8) to (35, 9) -language/expressions/class/fields-literal-name-static-private-fields-forbidden.js (strict mode) - Missing parse error -language/expressions/class/fields-literal-name-static-private-fields-forbidden.js (default) - Missing parse error -language/expressions/class/fields-literal-name-static-public-fields-forbidden.js (strict mode) - Missing parse error -language/expressions/class/fields-literal-name-static-public-fields-forbidden.js (default) - Missing parse error -language/expressions/class/fields-multiple-definitions-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-multiple-definitions-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-multiple-definitions-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 6) to (28, 7) -language/expressions/class/fields-multiple-definitions-computed-symbol-names.js (default) - Unexpected token = at (28, 6) to (28, 7) -language/expressions/class/fields-multiple-definitions-literal-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-multiple-definitions-literal-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-multiple-definitions-private-names.js (strict mode) - Unexpected token = at (28, 6) to (28, 7) -language/expressions/class/fields-multiple-definitions-private-names.js (default) - Unexpected token = at (28, 6) to (28, 7) -language/expressions/class/fields-multiple-definitions-string-literal-names.js (strict mode) - Unexpected token = at (25, 6) to (25, 7) -language/expressions/class/fields-multiple-definitions-string-literal-names.js (default) - Unexpected token = at (25, 6) to (25, 7) -language/expressions/class/fields-multiple-stacked-definitions-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-multiple-stacked-definitions-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-multiple-stacked-definitions-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-multiple-stacked-definitions-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-multiple-stacked-definitions-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-multiple-stacked-definitions-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-multiple-stacked-definitions-private-names.js (strict mode) - Unexpected identifier at (29, 2) to (29, 5) -language/expressions/class/fields-multiple-stacked-definitions-private-names.js (default) - Unexpected identifier at (29, 2) to (29, 5) -language/expressions/class/fields-multiple-stacked-definitions-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-multiple-stacked-definitions-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-new-no-sc-line-method-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-new-no-sc-line-method-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-new-no-sc-line-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-new-no-sc-line-method-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-new-no-sc-line-method-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-new-no-sc-line-method-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-new-no-sc-line-method-private-names.js (strict mode) - Unexpected identifier at (29, 2) to (29, 3) -language/expressions/class/fields-new-no-sc-line-method-private-names.js (default) - Unexpected identifier at (29, 2) to (29, 3) -language/expressions/class/fields-new-no-sc-line-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-new-no-sc-line-method-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-new-sc-line-gen-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-new-sc-line-gen-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-new-sc-line-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-new-sc-line-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-new-sc-line-gen-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-new-sc-line-gen-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-new-sc-line-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-new-sc-line-gen-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-new-sc-line-method-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-new-sc-line-method-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-new-sc-line-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-new-sc-line-method-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-new-sc-line-method-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-new-sc-line-method-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-new-sc-line-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-new-sc-line-method-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-privatename-constructor-err.js (strict mode) - Missing parse error -language/expressions/class/fields-privatename-constructor-err.js (default) - Missing parse error -language/expressions/class/fields-redeclaration-symbol.js (strict mode) - Unexpected token = at (38, 6) to (38, 7) -language/expressions/class/fields-redeclaration-symbol.js (default) - Unexpected token = at (38, 6) to (38, 7) -language/expressions/class/fields-redeclaration.js (strict mode) - Unexpected token = at (37, 4) to (37, 5) -language/expressions/class/fields-redeclaration.js (default) - Unexpected token = at (37, 4) to (37, 5) -language/expressions/class/fields-regular-definitions-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-regular-definitions-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-regular-definitions-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-regular-definitions-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-regular-definitions-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-regular-definitions-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-regular-definitions-private-names.js (strict mode) - Unexpected identifier at (28, 0) to (28, 1) -language/expressions/class/fields-regular-definitions-private-names.js (default) - Unexpected identifier at (28, 0) to (28, 1) -language/expressions/class/fields-regular-definitions-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-regular-definitions-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-run-once-on-double-super.js (strict mode) - Unexpected token = at (27, 8) to (27, 9) -language/expressions/class/fields-run-once-on-double-super.js (default) - Unexpected token = at (27, 8) to (27, 9) -language/expressions/class/fields-same-line-async-gen-computed-names.js (strict mode) - Unexpected token = at (27, 32) to (27, 33) -language/expressions/class/fields-same-line-async-gen-computed-names.js (default) - Unexpected token = at (27, 32) to (27, 33) -language/expressions/class/fields-same-line-async-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 37) to (28, 38) -language/expressions/class/fields-same-line-async-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 37) to (28, 38) -language/expressions/class/fields-same-line-async-gen-literal-names.js (strict mode) - Unexpected token = at (27, 33) to (27, 34) -language/expressions/class/fields-same-line-async-gen-literal-names.js (default) - Unexpected token = at (27, 33) to (27, 34) -language/expressions/class/fields-same-line-async-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 42) to (25, 43) -language/expressions/class/fields-same-line-async-gen-string-literal-names.js (default) - Unexpected token = at (25, 42) to (25, 43) -language/expressions/class/fields-same-line-async-method-computed-names.js (strict mode) - Unexpected token = at (27, 31) to (27, 32) -language/expressions/class/fields-same-line-async-method-computed-names.js (default) - Unexpected token = at (27, 31) to (27, 32) -language/expressions/class/fields-same-line-async-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 36) to (28, 37) -language/expressions/class/fields-same-line-async-method-computed-symbol-names.js (default) - Unexpected token = at (28, 36) to (28, 37) -language/expressions/class/fields-same-line-async-method-literal-names.js (strict mode) - Unexpected token = at (27, 32) to (27, 33) -language/expressions/class/fields-same-line-async-method-literal-names.js (default) - Unexpected token = at (27, 32) to (27, 33) -language/expressions/class/fields-same-line-async-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 41) to (25, 42) -language/expressions/class/fields-same-line-async-method-string-literal-names.js (default) - Unexpected token = at (25, 41) to (25, 42) -language/expressions/class/fields-same-line-gen-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-same-line-gen-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-same-line-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-same-line-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-same-line-gen-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-same-line-gen-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-same-line-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-same-line-gen-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-same-line-method-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-same-line-method-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/expressions/class/fields-same-line-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-same-line-method-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/expressions/class/fields-same-line-method-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-same-line-method-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/expressions/class/fields-same-line-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-same-line-method-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/expressions/class/fields-string-name-propname-constructor.js (strict mode) - Missing parse error -language/expressions/class/fields-string-name-propname-constructor.js (default) - Missing parse error -language/expressions/class/fields-wrapped-in-sc-computed-names.js (strict mode) - Unexpected token = at (28, 12) to (28, 13) -language/expressions/class/fields-wrapped-in-sc-computed-names.js (default) - Unexpected token = at (28, 12) to (28, 13) -language/expressions/class/fields-wrapped-in-sc-computed-symbol-names.js (strict mode) - Unexpected token = at (29, 17) to (29, 18) -language/expressions/class/fields-wrapped-in-sc-computed-symbol-names.js (default) - Unexpected token = at (29, 17) to (29, 18) -language/expressions/class/fields-wrapped-in-sc-literal-names.js (strict mode) - Unexpected token = at (28, 13) to (28, 14) -language/expressions/class/fields-wrapped-in-sc-literal-names.js (default) - Unexpected token = at (28, 13) to (28, 14) -language/expressions/class/fields-wrapped-in-sc-string-literal-names.js (strict mode) - Unexpected token = at (26, 22) to (26, 23) -language/expressions/class/fields-wrapped-in-sc-string-literal-names.js (default) - Unexpected token = at (26, 22) to (26, 23) -language/expressions/division/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/division/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/division/bigint-arithmetic.js (strict mode) - Unexpected token ILLEGAL at (14, 8) to (14, 27) -language/expressions/division/bigint-arithmetic.js (default) - Unexpected token ILLEGAL at (14, 8) to (14, 27) -language/expressions/division/bigint-complex-infinity.js (strict mode) - Unexpected token ILLEGAL at (24, 2) to (24, 4) -language/expressions/division/bigint-complex-infinity.js (default) - Unexpected token ILLEGAL at (24, 2) to (24, 4) -language/expressions/division/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/division/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/division/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/division/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/division/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/division/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/does-not-equals/bigint-and-bigint.js (strict mode) - Unexpected token ILLEGAL at (19, 17) to (19, 19) -language/expressions/does-not-equals/bigint-and-bigint.js (default) - Unexpected token ILLEGAL at (19, 17) to (19, 19) -language/expressions/does-not-equals/bigint-and-boolean.js (strict mode) - Unexpected token ILLEGAL at (18, 18) to (18, 20) -language/expressions/does-not-equals/bigint-and-boolean.js (default) - Unexpected token ILLEGAL at (18, 18) to (18, 20) -language/expressions/does-not-equals/bigint-and-incomparable-primitive.js (strict mode) - Unexpected token ILLEGAL at (16, 17) to (16, 19) -language/expressions/does-not-equals/bigint-and-incomparable-primitive.js (default) - Unexpected token ILLEGAL at (16, 17) to (16, 19) -language/expressions/does-not-equals/bigint-and-non-finite.js (strict mode) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/does-not-equals/bigint-and-non-finite.js (default) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/does-not-equals/bigint-and-number-extremes.js (strict mode) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/does-not-equals/bigint-and-number-extremes.js (default) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/does-not-equals/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/does-not-equals/bigint-and-number.js (default) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/does-not-equals/bigint-and-object.js (strict mode) - Unexpected token ILLEGAL at (25, 17) to (25, 19) -language/expressions/does-not-equals/bigint-and-object.js (default) - Unexpected token ILLEGAL at (25, 17) to (25, 19) -language/expressions/does-not-equals/bigint-and-string.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/does-not-equals/bigint-and-string.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/equals/bigint-and-bigint.js (strict mode) - Unexpected token ILLEGAL at (19, 17) to (19, 19) -language/expressions/equals/bigint-and-bigint.js (default) - Unexpected token ILLEGAL at (19, 17) to (19, 19) -language/expressions/equals/bigint-and-boolean.js (strict mode) - Unexpected token ILLEGAL at (18, 18) to (18, 20) -language/expressions/equals/bigint-and-boolean.js (default) - Unexpected token ILLEGAL at (18, 18) to (18, 20) -language/expressions/equals/bigint-and-incomparable-primitive.js (strict mode) - Unexpected token ILLEGAL at (15, 17) to (15, 19) -language/expressions/equals/bigint-and-incomparable-primitive.js (default) - Unexpected token ILLEGAL at (15, 17) to (15, 19) -language/expressions/equals/bigint-and-non-finite.js (strict mode) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/equals/bigint-and-non-finite.js (default) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/equals/bigint-and-number-extremes.js (strict mode) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/equals/bigint-and-number-extremes.js (default) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/equals/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/equals/bigint-and-number.js (default) - Unexpected token ILLEGAL at (14, 17) to (14, 19) -language/expressions/equals/bigint-and-object.js (strict mode) - Unexpected token ILLEGAL at (25, 17) to (25, 19) -language/expressions/equals/bigint-and-object.js (default) - Unexpected token ILLEGAL at (25, 17) to (25, 19) -language/expressions/equals/bigint-and-string.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/equals/bigint-and-string.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/exponentiation/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/exponentiation/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/exponentiation/bigint-arithmetic.js (strict mode) - Unexpected token ILLEGAL at (14, 9) to (14, 15) -language/expressions/exponentiation/bigint-arithmetic.js (default) - Unexpected token ILLEGAL at (14, 9) to (14, 15) -language/expressions/exponentiation/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 17) to (10, 19) -language/expressions/exponentiation/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 17) to (10, 19) -language/expressions/exponentiation/bigint-exp-operator-negative-throws.js (strict mode) - Unexpected token ILLEGAL at (23, 2) to (23, 4) -language/expressions/exponentiation/bigint-exp-operator-negative-throws.js (default) - Unexpected token ILLEGAL at (23, 2) to (23, 4) -language/expressions/exponentiation/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/exponentiation/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/exponentiation/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/exponentiation/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/exponentiation/bigint-zero-base-zero-exponent.js (strict mode) - Unexpected token ILLEGAL at (22, 17) to (22, 19) -language/expressions/exponentiation/bigint-zero-base-zero-exponent.js (default) - Unexpected token ILLEGAL at (22, 17) to (22, 19) +language/expressions/class/class-name-ident-await-escaped-module.js (strict mode) + Missing parse error +language/expressions/class/class-name-ident-await-escaped-module.js (default) + Missing parse error +language/expressions/class/class-name-ident-await-module.js (strict mode) + Missing parse error +language/expressions/class/class-name-ident-await-module.js (default) + Missing parse error +language/expressions/class/dstr/async-private-gen-meth-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-init-iter-close.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-name-iter-val.js (default) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (67, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (67, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (65, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (65, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (63, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (63, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (60, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (60, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (62, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (62, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (61, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (61, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (55, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (55, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (61, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (61, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (63, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (63, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (66, 2) to (69, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (66, 2) to (69, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-elision.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-empty.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (87, 2) to (91, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (87, 2) to (91, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (58, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (58, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (58, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (58, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (55, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (55, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (67, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (67, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (65, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (65, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (63, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (63, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (60, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (60, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (62, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (62, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (61, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (61, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (55, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (55, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (61, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (61, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (63, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (63, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (66, 2) to (69, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (66, 2) to (69, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (87, 2) to (91, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (87, 2) to (91, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (58, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (58, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (58, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (58, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (55, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (55, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (60, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (60, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (59, 2) to (79, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (59, 2) to (79, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (49, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (49, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (50, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (50, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (48, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (48, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-empty.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (60, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (60, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (59, 2) to (79, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (59, 2) to (79, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (49, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (49, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (50, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (50, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (48, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (48, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-init-iter-close.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-name-iter-val.js (default) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (67, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (67, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (65, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (65, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (63, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (63, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (60, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (60, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (62, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (62, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (61, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (61, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (55, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (55, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (61, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (61, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (63, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (63, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (66, 2) to (69, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (66, 2) to (69, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-elision.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-empty.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (87, 2) to (91, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (87, 2) to (91, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (58, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (58, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (58, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (58, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (55, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (55, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (67, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (67, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (65, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (65, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (63, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (63, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (60, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (60, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (62, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (62, 2) to (71, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (61, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (61, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (61, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (55, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (55, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (61, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (61, 2) to (68, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (63, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (63, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (67, 2) to (72, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (59, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (66, 2) to (69, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (66, 2) to (69, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (87, 2) to (91, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (87, 2) to (91, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (58, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (58, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (58, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (58, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (55, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (55, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (60, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (60, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (59, 2) to (79, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (59, 2) to (79, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (49, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (49, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (50, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (50, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (48, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (48, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-empty.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (60, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (60, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (57, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (59, 2) to (79, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (59, 2) to (79, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (54, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (54, 2) to (61, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (54, 2) to (60, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (57, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (55, 2) to (64, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (49, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (49, 2) to (59, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (50, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (50, 2) to (67, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (48, 2) to (66, 3) +language/expressions/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (48, 2) to (66, 3) +language/expressions/class/dstr/private-gen-meth-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-ary-init-iter-close.js (default) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-ary-name-iter-val.js (default) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (86, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (86, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (84, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (84, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (82, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (82, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (79, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (79, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (81, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (81, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (81, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (81, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (80, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (80, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (93, 2) to (97, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-elision.js (default) + Classes may not have private methods. at (93, 2) to (97, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-empty.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (99, 2) to (104, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (99, 2) to (104, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (106, 2) to (110, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (106, 2) to (110, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (90, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (90, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (77, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (77, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (86, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (86, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (84, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (84, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (82, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (82, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (79, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (79, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (81, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (81, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (81, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (81, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (80, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (80, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (93, 2) to (97, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (93, 2) to (97, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (99, 2) to (104, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (99, 2) to (104, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (106, 2) to (110, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (106, 2) to (110, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (90, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (90, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (77, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (77, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (79, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (79, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (78, 2) to (98, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (78, 2) to (98, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (68, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (68, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (69, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (69, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (67, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (67, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-empty.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (79, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (79, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (78, 2) to (98, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (78, 2) to (98, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (68, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (68, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (69, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (69, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (67, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (67, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-init-iter-close.js (default) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-name-iter-val.js (default) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (86, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (86, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (84, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (84, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (82, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (82, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (79, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (79, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (81, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (81, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (81, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (81, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (80, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (80, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (93, 2) to (97, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-elision.js (default) + Classes may not have private methods. at (93, 2) to (97, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-empty.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (99, 2) to (104, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (99, 2) to (104, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (106, 2) to (110, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (106, 2) to (110, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (90, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (90, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (77, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (77, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-static-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (89, 2) to (92, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (86, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (86, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (84, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (84, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (82, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (82, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (79, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (79, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (81, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (81, 2) to (90, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (80, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (81, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (81, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (80, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (80, 2) to (87, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (86, 2) to (91, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (78, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (93, 2) to (97, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (93, 2) to (97, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (99, 2) to (104, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (99, 2) to (104, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (106, 2) to (110, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (106, 2) to (110, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (90, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (90, 2) to (93, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (77, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (77, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (80, 2) to (89, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (79, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (79, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (78, 2) to (98, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (78, 2) to (98, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (68, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (68, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (69, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (69, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (67, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (67, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-empty.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (79, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (79, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (78, 2) to (98, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (78, 2) to (98, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (73, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (73, 2) to (80, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (73, 2) to (79, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (68, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (68, 2) to (78, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (69, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (69, 2) to (86, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (67, 2) to (85, 3) +language/expressions/class/dstr/private-gen-meth-static-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (67, 2) to (85, 3) +language/expressions/class/dstr/private-meth-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-ary-init-iter-close.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-ary-name-iter-val.js (default) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (83, 2) to (87, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (83, 2) to (87, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (81, 2) to (84, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (81, 2) to (84, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (84, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (76, 2) to (84, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (71, 2) to (75, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (71, 2) to (75, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (77, 2) to (84, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (77, 2) to (84, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (90, 2) to (94, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-elision.js (default) + Classes may not have private methods. at (90, 2) to (94, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-empty.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (96, 2) to (101, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (96, 2) to (101, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (103, 2) to (107, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (103, 2) to (107, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (71, 2) to (79, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (71, 2) to (79, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (83, 2) to (87, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (83, 2) to (87, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (81, 2) to (84, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (81, 2) to (84, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (84, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (76, 2) to (84, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (71, 2) to (75, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (71, 2) to (75, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (77, 2) to (84, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (77, 2) to (84, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (90, 2) to (94, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (90, 2) to (94, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (96, 2) to (101, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (96, 2) to (101, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (103, 2) to (107, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (103, 2) to (107, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (71, 2) to (79, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (71, 2) to (79, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (83, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (83, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (75, 2) to (95, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (75, 2) to (95, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (77, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (77, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (65, 2) to (75, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (65, 2) to (75, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (66, 2) to (83, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (66, 2) to (83, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (64, 2) to (82, 3) +language/expressions/class/dstr/private-meth-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (64, 2) to (82, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-empty.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (83, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (83, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (75, 2) to (95, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (75, 2) to (95, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (77, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (77, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (65, 2) to (75, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (65, 2) to (75, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (66, 2) to (83, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (66, 2) to (83, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (64, 2) to (82, 3) +language/expressions/class/dstr/private-meth-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (64, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-static-ary-init-iter-close.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-static-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-static-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-static-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-static-ary-name-iter-val.js (default) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (83, 2) to (87, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (83, 2) to (87, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (81, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (81, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (76, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (71, 2) to (75, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (71, 2) to (75, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (77, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (77, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (90, 2) to (94, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-elision.js (default) + Classes may not have private methods. at (90, 2) to (94, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-empty.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (96, 2) to (101, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (96, 2) to (101, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (103, 2) to (107, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (103, 2) to (107, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (71, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (71, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-static-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (83, 2) to (87, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (83, 2) to (87, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (81, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (81, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (76, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (71, 2) to (75, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (71, 2) to (75, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (77, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (77, 2) to (84, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (79, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (83, 2) to (88, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (75, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (90, 2) to (94, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (90, 2) to (94, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (96, 2) to (101, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (96, 2) to (101, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (103, 2) to (107, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (103, 2) to (107, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (74, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (74, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (71, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (71, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (75, 2) to (95, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (75, 2) to (95, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (65, 2) to (75, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (65, 2) to (75, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (66, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (66, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (64, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (64, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-empty.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (75, 2) to (95, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (75, 2) to (95, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (70, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (70, 2) to (77, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (70, 2) to (76, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (73, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (71, 2) to (80, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (65, 2) to (75, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (65, 2) to (75, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (66, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (66, 2) to (83, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (64, 2) to (82, 3) +language/expressions/class/dstr/private-meth-static-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (64, 2) to (82, 3) +language/expressions/class/elements/after-same-line-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 22) to (26, 48) +language/expressions/class/elements/after-same-line-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 22) to (26, 48) +language/expressions/class/elements/after-same-line-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 22) to (62, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 22) to (62, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 22) to (62, 3) +language/expressions/class/elements/after-same-line-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 22) to (62, 3) +language/expressions/class/elements/after-same-line-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 21) to (26, 47) +language/expressions/class/elements/after-same-line-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 21) to (26, 47) +language/expressions/class/elements/after-same-line-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/expressions/class/elements/after-same-line-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 21) to (62, 3) +language/expressions/class/elements/after-same-line-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 21) to (62, 3) +language/expressions/class/elements/after-same-line-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 21) to (62, 3) +language/expressions/class/elements/after-same-line-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 21) to (62, 3) +language/expressions/class/elements/after-same-line-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-static-async-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 35) to (26, 61) +language/expressions/class/elements/after-same-line-static-async-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 35) to (26, 61) +language/expressions/class/elements/after-same-line-static-async-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 35) to (62, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 35) to (62, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 35) to (62, 3) +language/expressions/class/elements/after-same-line-static-async-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 35) to (62, 3) +language/expressions/class/elements/after-same-line-static-async-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-static-async-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-static-async-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-static-async-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-static-async-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 34) to (26, 60) +language/expressions/class/elements/after-same-line-static-async-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 34) to (26, 60) +language/expressions/class/elements/after-same-line-static-async-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 34) to (62, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 34) to (62, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 34) to (62, 3) +language/expressions/class/elements/after-same-line-static-async-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 34) to (62, 3) +language/expressions/class/elements/after-same-line-static-async-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-static-async-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-static-async-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-static-async-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-static-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 29) to (26, 55) +language/expressions/class/elements/after-same-line-static-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 29) to (26, 55) +language/expressions/class/elements/after-same-line-static-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 29) to (62, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 29) to (62, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 29) to (62, 3) +language/expressions/class/elements/after-same-line-static-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 29) to (62, 3) +language/expressions/class/elements/after-same-line-static-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-static-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-static-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-static-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-static-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 28) to (26, 54) +language/expressions/class/elements/after-same-line-static-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 28) to (26, 54) +language/expressions/class/elements/after-same-line-static-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 28) to (62, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 28) to (62, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 28) to (62, 3) +language/expressions/class/elements/after-same-line-static-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 28) to (62, 3) +language/expressions/class/elements/after-same-line-static-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-static-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/after-same-line-static-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/after-same-line-static-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/arrow-fnc-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/arrow-fnc-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-catch.js (strict mode) + Classes may not have private methods. at (28, 4) to (32, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-catch.js (default) + Classes may not have private methods. at (28, 4) to (32, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-for-await-of-async-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-for-await-of-async-iterator.js (default) + Classes may not have private methods. at (32, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-for-await-of-sync-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-for-await-of-sync-iterator.js (default) + Classes may not have private methods. at (32, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-yield-star-async-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (36, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-yield-star-async-iterator.js (default) + Classes may not have private methods. at (32, 4) to (36, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-yield-star-sync-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (35, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next-yield-star-sync-iterator.js (default) + Classes may not have private methods. at (32, 4) to (35, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next.js (strict mode) + Classes may not have private methods. at (28, 4) to (32, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-promise-reject-next.js (default) + Classes may not have private methods. at (28, 4) to (32, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-spread-arr-multiple.js (strict mode) + Classes may not have private methods. at (36, 4) to (39, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-spread-arr-multiple.js (default) + Classes may not have private methods. at (36, 4) to (39, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-spread-arr-single.js (strict mode) + Classes may not have private methods. at (34, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-spread-arr-single.js (default) + Classes may not have private methods. at (34, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-spread-obj.js (strict mode) + Classes may not have private methods. at (35, 4) to (42, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-spread-obj.js (default) + Classes may not have private methods. at (35, 4) to (42, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-async-next.js (strict mode) + Classes may not have private methods. at (139, 4) to (149, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-async-next.js (default) + Classes may not have private methods. at (139, 4) to (149, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-async-return.js (strict mode) + Classes may not have private methods. at (158, 4) to (163, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-async-return.js (default) + Classes may not have private methods. at (158, 4) to (163, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-async-throw.js (strict mode) + Classes may not have private methods. at (158, 4) to (168, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-async-throw.js (default) + Classes may not have private methods. at (158, 4) to (168, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-expr-abrupt.js (strict mode) + Classes may not have private methods. at (39, 4) to (44, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-expr-abrupt.js (default) + Classes may not have private methods. at (39, 4) to (44, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-get-abrupt.js (strict mode) + Classes may not have private methods. at (59, 4) to (64, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-get-abrupt.js (default) + Classes may not have private methods. at (59, 4) to (64, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-number-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-object-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-string-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-null-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (63, 4) to (68, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-null-sync-get-abrupt.js (default) + Classes may not have private methods. at (63, 4) to (68, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-abrupt.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-abrupt.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-boolean-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-boolean-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-null-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-null-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-number-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-number-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-string-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-string-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-symbol-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-symbol-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-undefined-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-undefined-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-undefined-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (63, 4) to (68, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-async-undefined-sync-get-abrupt.js (default) + Classes may not have private methods. at (63, 4) to (68, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-get-abrupt.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-number-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-object-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-string-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-abrupt.js (strict mode) + Classes may not have private methods. at (52, 4) to (57, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-abrupt.js (default) + Classes may not have private methods. at (52, 4) to (57, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-boolean-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-boolean-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-null-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-null-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-number-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-number-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-string-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-string-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-symbol-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-symbol-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-undefined-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-undefined-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-call-done-get-abrupt.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-call-done-get-abrupt.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-call-returns-abrupt.js (strict mode) + Classes may not have private methods. at (50, 4) to (55, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-call-returns-abrupt.js (default) + Classes may not have private methods. at (50, 4) to (55, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-call-value-get-abrupt.js (strict mode) + Classes may not have private methods. at (60, 4) to (65, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-call-value-get-abrupt.js (default) + Classes may not have private methods. at (60, 4) to (65, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-get-abrupt.js (strict mode) + Classes may not have private methods. at (50, 4) to (55, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-get-abrupt.js (default) + Classes may not have private methods. at (50, 4) to (55, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-non-object-ignores-then.js (strict mode) + Classes may not have private methods. at (70, 4) to (75, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-non-object-ignores-then.js (default) + Classes may not have private methods. at (70, 4) to (75, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-null-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-null-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-number-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-object-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-string-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-undefined-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-not-callable-undefined-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-get-abrupt.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-get-abrupt.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-boolean-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-boolean-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-null-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-null-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-number-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-number-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-object-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-object-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-string-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-string-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-symbol-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-symbol-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-undefined-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-undefined-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-returns-abrupt.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-next-then-returns-abrupt.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-sync-next.js (strict mode) + Classes may not have private methods. at (158, 4) to (168, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-sync-next.js (default) + Classes may not have private methods. at (158, 4) to (168, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-sync-return.js (strict mode) + Classes may not have private methods. at (138, 4) to (143, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-sync-return.js (default) + Classes may not have private methods. at (138, 4) to (143, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-sync-throw.js (strict mode) + Classes may not have private methods. at (136, 4) to (146, 5) +language/expressions/class/elements/async-gen-private-method-static/yield-star-sync-throw.js (default) + Classes may not have private methods. at (136, 4) to (146, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-catch.js (strict mode) + Classes may not have private methods. at (28, 4) to (32, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-catch.js (default) + Classes may not have private methods. at (28, 4) to (32, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-for-await-of-async-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-for-await-of-async-iterator.js (default) + Classes may not have private methods. at (32, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-for-await-of-sync-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-for-await-of-sync-iterator.js (default) + Classes may not have private methods. at (32, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-yield-star-async-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (36, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-yield-star-async-iterator.js (default) + Classes may not have private methods. at (32, 4) to (36, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-yield-star-sync-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (35, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next-yield-star-sync-iterator.js (default) + Classes may not have private methods. at (32, 4) to (35, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next.js (strict mode) + Classes may not have private methods. at (28, 4) to (32, 5) +language/expressions/class/elements/async-gen-private-method/yield-promise-reject-next.js (default) + Classes may not have private methods. at (28, 4) to (32, 5) +language/expressions/class/elements/async-gen-private-method/yield-spread-arr-multiple.js (strict mode) + Classes may not have private methods. at (36, 4) to (39, 5) +language/expressions/class/elements/async-gen-private-method/yield-spread-arr-multiple.js (default) + Classes may not have private methods. at (36, 4) to (39, 5) +language/expressions/class/elements/async-gen-private-method/yield-spread-arr-single.js (strict mode) + Classes may not have private methods. at (34, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method/yield-spread-arr-single.js (default) + Classes may not have private methods. at (34, 4) to (37, 5) +language/expressions/class/elements/async-gen-private-method/yield-spread-obj.js (strict mode) + Classes may not have private methods. at (35, 4) to (42, 5) +language/expressions/class/elements/async-gen-private-method/yield-spread-obj.js (default) + Classes may not have private methods. at (35, 4) to (42, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-async-next.js (strict mode) + Classes may not have private methods. at (139, 4) to (149, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-async-next.js (default) + Classes may not have private methods. at (139, 4) to (149, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-async-return.js (strict mode) + Classes may not have private methods. at (158, 4) to (163, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-async-return.js (default) + Classes may not have private methods. at (158, 4) to (163, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-async-throw.js (strict mode) + Classes may not have private methods. at (158, 4) to (168, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-async-throw.js (default) + Classes may not have private methods. at (158, 4) to (168, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-expr-abrupt.js (strict mode) + Classes may not have private methods. at (39, 4) to (44, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-expr-abrupt.js (default) + Classes may not have private methods. at (39, 4) to (44, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-get-abrupt.js (strict mode) + Classes may not have private methods. at (59, 4) to (64, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-get-abrupt.js (default) + Classes may not have private methods. at (59, 4) to (64, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-number-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-object-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-string-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-null-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (63, 4) to (68, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-null-sync-get-abrupt.js (default) + Classes may not have private methods. at (63, 4) to (68, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-abrupt.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-abrupt.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-boolean-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-boolean-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-null-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-null-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-number-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-number-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-string-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-string-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-symbol-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-symbol-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-undefined-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-returns-undefined-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-undefined-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (63, 4) to (68, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-async-undefined-sync-get-abrupt.js (default) + Classes may not have private methods. at (63, 4) to (68, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-get-abrupt.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-number-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-object-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-string-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-abrupt.js (strict mode) + Classes may not have private methods. at (52, 4) to (57, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-abrupt.js (default) + Classes may not have private methods. at (52, 4) to (57, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-boolean-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-boolean-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-null-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-null-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-number-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-number-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-string-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-string-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-symbol-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-symbol-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-undefined-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-undefined-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-call-done-get-abrupt.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-call-done-get-abrupt.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-call-returns-abrupt.js (strict mode) + Classes may not have private methods. at (50, 4) to (55, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-call-returns-abrupt.js (default) + Classes may not have private methods. at (50, 4) to (55, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-call-value-get-abrupt.js (strict mode) + Classes may not have private methods. at (60, 4) to (65, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-call-value-get-abrupt.js (default) + Classes may not have private methods. at (60, 4) to (65, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-get-abrupt.js (strict mode) + Classes may not have private methods. at (50, 4) to (55, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-get-abrupt.js (default) + Classes may not have private methods. at (50, 4) to (55, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-non-object-ignores-then.js (strict mode) + Classes may not have private methods. at (70, 4) to (75, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-non-object-ignores-then.js (default) + Classes may not have private methods. at (70, 4) to (75, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-null-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-null-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-number-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-object-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-string-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-undefined-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-not-callable-undefined-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-get-abrupt.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-get-abrupt.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-boolean-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-boolean-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-null-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-null-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-number-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-number-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-object-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-object-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-string-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-string-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-symbol-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-symbol-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-undefined-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-non-callable-undefined-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-returns-abrupt.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-next-then-returns-abrupt.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-sync-next.js (strict mode) + Classes may not have private methods. at (158, 4) to (168, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-sync-next.js (default) + Classes may not have private methods. at (158, 4) to (168, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-sync-return.js (strict mode) + Classes may not have private methods. at (138, 4) to (143, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-sync-return.js (default) + Classes may not have private methods. at (138, 4) to (143, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-sync-throw.js (strict mode) + Classes may not have private methods. at (136, 4) to (146, 5) +language/expressions/class/elements/async-gen-private-method/yield-star-sync-throw.js (default) + Classes may not have private methods. at (136, 4) to (146, 5) +language/expressions/class/elements/comp-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/comp-name-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/equality-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/equality-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/fields-asi-same-line-2.js (strict mode) + Missing parse error +language/expressions/class/elements/fields-asi-same-line-2.js (default) + Missing parse error +language/expressions/class/elements/fields-multiple-definitions-static-private-methods-proxy.js (strict mode) + Classes may not have private methods. at (27, 2) to (29, 3) +language/expressions/class/elements/fields-multiple-definitions-static-private-methods-proxy.js (default) + Classes may not have private methods. at (27, 2) to (29, 3) +language/expressions/class/elements/gen-private-method-static/yield-spread-arr-multiple.js (strict mode) + Classes may not have private methods. at (35, 4) to (38, 5) +language/expressions/class/elements/gen-private-method-static/yield-spread-arr-multiple.js (default) + Classes may not have private methods. at (35, 4) to (38, 5) +language/expressions/class/elements/gen-private-method-static/yield-spread-arr-single.js (strict mode) + Classes may not have private methods. at (32, 4) to (35, 5) +language/expressions/class/elements/gen-private-method-static/yield-spread-arr-single.js (default) + Classes may not have private methods. at (32, 4) to (35, 5) +language/expressions/class/elements/gen-private-method-static/yield-spread-obj.js (strict mode) + Classes may not have private methods. at (34, 4) to (41, 5) +language/expressions/class/elements/gen-private-method-static/yield-spread-obj.js (default) + Classes may not have private methods. at (34, 4) to (41, 5) +language/expressions/class/elements/gen-private-method/yield-spread-arr-multiple.js (strict mode) + Classes may not have private methods. at (35, 4) to (38, 5) +language/expressions/class/elements/gen-private-method/yield-spread-arr-multiple.js (default) + Classes may not have private methods. at (35, 4) to (38, 5) +language/expressions/class/elements/gen-private-method/yield-spread-arr-single.js (strict mode) + Classes may not have private methods. at (32, 4) to (35, 5) +language/expressions/class/elements/gen-private-method/yield-spread-arr-single.js (default) + Classes may not have private methods. at (32, 4) to (35, 5) +language/expressions/class/elements/gen-private-method/yield-spread-obj.js (strict mode) + Classes may not have private methods. at (34, 4) to (41, 5) +language/expressions/class/elements/gen-private-method/yield-spread-obj.js (default) + Classes may not have private methods. at (34, 4) to (41, 5) +language/expressions/class/elements/literal-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/literal-name-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/multiple-definitions-private-method-usage.js (strict mode) + Classes may not have private methods. at (28, 2) to (28, 28) +language/expressions/class/elements/multiple-definitions-private-method-usage.js (default) + Classes may not have private methods. at (28, 2) to (28, 28) +language/expressions/class/elements/multiple-definitions-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (61, 2) to (63, 3) +language/expressions/class/elements/multiple-definitions-rs-private-method-alt.js (default) + Classes may not have private methods. at (61, 2) to (63, 3) +language/expressions/class/elements/multiple-definitions-rs-private-method.js (strict mode) + Classes may not have private methods. at (61, 2) to (63, 3) +language/expressions/class/elements/multiple-definitions-rs-private-method.js (default) + Classes may not have private methods. at (61, 2) to (63, 3) +language/expressions/class/elements/multiple-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/expressions/class/elements/multiple-definitions-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/multiple-definitions-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/multiple-definitions-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/multiple-definitions-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/multiple-definitions-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (33, 2) to (36, 3) +language/expressions/class/elements/multiple-definitions-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (33, 2) to (36, 3) +language/expressions/class/elements/multiple-definitions-static-private-methods.js (strict mode) + Classes may not have private methods. at (33, 2) to (35, 3) +language/expressions/class/elements/multiple-definitions-static-private-methods.js (default) + Classes may not have private methods. at (33, 2) to (35, 3) +language/expressions/class/elements/multiple-stacked-definitions-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/multiple-stacked-definitions-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/multiple-stacked-definitions-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/multiple-stacked-definitions-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/multiple-stacked-definitions-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (31, 2) to (34, 3) +language/expressions/class/elements/multiple-stacked-definitions-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (31, 2) to (34, 3) +language/expressions/class/elements/multiple-stacked-definitions-static-private-methods.js (strict mode) + Classes may not have private methods. at (31, 2) to (33, 3) +language/expressions/class/elements/multiple-stacked-definitions-static-private-methods.js (default) + Classes may not have private methods. at (31, 2) to (33, 3) +language/expressions/class/elements/new-no-sc-line-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/new-no-sc-line-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/new-no-sc-line-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-no-sc-line-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-no-sc-line-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (30, 2) to (33, 3) +language/expressions/class/elements/new-no-sc-line-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (30, 2) to (33, 3) +language/expressions/class/elements/new-no-sc-line-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (30, 2) to (32, 3) +language/expressions/class/elements/new-no-sc-line-method-static-private-methods.js (default) + Classes may not have private methods. at (30, 2) to (32, 3) +language/expressions/class/elements/new-sc-line-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/new-sc-line-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/new-sc-line-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-sc-line-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-sc-line-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-sc-line-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-sc-line-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-sc-line-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (30, 2) to (33, 3) +language/expressions/class/elements/new-sc-line-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (30, 2) to (33, 3) +language/expressions/class/elements/new-sc-line-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (30, 2) to (32, 3) +language/expressions/class/elements/new-sc-line-gen-static-private-methods.js (default) + Classes may not have private methods. at (30, 2) to (32, 3) +language/expressions/class/elements/new-sc-line-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/new-sc-line-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/new-sc-line-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-sc-line-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-sc-line-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-sc-line-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-sc-line-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/new-sc-line-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (30, 2) to (33, 3) +language/expressions/class/elements/new-sc-line-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (30, 2) to (33, 3) +language/expressions/class/elements/new-sc-line-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (30, 2) to (32, 3) +language/expressions/class/elements/new-sc-line-method-static-private-methods.js (default) + Classes may not have private methods. at (30, 2) to (32, 3) +language/expressions/class/elements/private-arrow-fnc-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/private-arrow-fnc-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/private-literal-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/private-literal-name-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/private-methods/prod-private-async-generator.js (strict mode) + Classes may not have private methods. at (89, 2) to (89, 29) +language/expressions/class/elements/private-methods/prod-private-async-generator.js (default) + Classes may not have private methods. at (89, 2) to (89, 29) +language/expressions/class/elements/private-methods/prod-private-async-method.js (strict mode) + Classes may not have private methods. at (89, 2) to (89, 27) +language/expressions/class/elements/private-methods/prod-private-async-method.js (default) + Classes may not have private methods. at (89, 2) to (89, 27) +language/expressions/class/elements/private-methods/prod-private-generator.js (strict mode) + Classes may not have private methods. at (87, 2) to (87, 23) +language/expressions/class/elements/private-methods/prod-private-generator.js (default) + Classes may not have private methods. at (87, 2) to (87, 23) +language/expressions/class/elements/private-methods/prod-private-method-initialize-order.js (strict mode) + Classes may not have private methods. at (104, 2) to (104, 21) +language/expressions/class/elements/private-methods/prod-private-method-initialize-order.js (default) + Classes may not have private methods. at (104, 2) to (104, 21) +language/expressions/class/elements/private-methods/prod-private-method.js (strict mode) + Classes may not have private methods. at (87, 2) to (87, 21) +language/expressions/class/elements/private-methods/prod-private-method.js (default) + Classes may not have private methods. at (87, 2) to (87, 21) +language/expressions/class/elements/private-ternary-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/private-ternary-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/private-typeof-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/private-typeof-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/regular-definitions-private-method-usage.js (strict mode) + Classes may not have private methods. at (25, 2) to (25, 28) +language/expressions/class/elements/regular-definitions-private-method-usage.js (default) + Classes may not have private methods. at (25, 2) to (25, 28) +language/expressions/class/elements/regular-definitions-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (58, 2) to (60, 3) +language/expressions/class/elements/regular-definitions-rs-private-method-alt.js (default) + Classes may not have private methods. at (58, 2) to (60, 3) +language/expressions/class/elements/regular-definitions-rs-private-method.js (strict mode) + Classes may not have private methods. at (58, 2) to (60, 3) +language/expressions/class/elements/regular-definitions-rs-private-method.js (default) + Classes may not have private methods. at (58, 2) to (60, 3) +language/expressions/class/elements/regular-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/expressions/class/elements/regular-definitions-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/regular-definitions-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/regular-definitions-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/regular-definitions-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/regular-definitions-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (28, 2) to (31, 3) +language/expressions/class/elements/regular-definitions-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (28, 2) to (31, 3) +language/expressions/class/elements/regular-definitions-static-private-methods.js (strict mode) + Classes may not have private methods. at (28, 2) to (30, 3) +language/expressions/class/elements/regular-definitions-static-private-methods.js (default) + Classes may not have private methods. at (28, 2) to (30, 3) +language/expressions/class/elements/same-line-async-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 28) to (26, 54) +language/expressions/class/elements/same-line-async-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 28) to (26, 54) +language/expressions/class/elements/same-line-async-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-async-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-async-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-async-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 28) to (62, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 28) to (62, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 28) to (62, 3) +language/expressions/class/elements/same-line-async-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 28) to (62, 3) +language/expressions/class/elements/same-line-async-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/same-line-async-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/same-line-async-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/same-line-async-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/same-line-async-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 27) to (26, 53) +language/expressions/class/elements/same-line-async-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 27) to (26, 53) +language/expressions/class/elements/same-line-async-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-async-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-async-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-async-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-async-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/expressions/class/elements/same-line-async-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 27) to (62, 3) +language/expressions/class/elements/same-line-async-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 27) to (62, 3) +language/expressions/class/elements/same-line-async-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 27) to (62, 3) +language/expressions/class/elements/same-line-async-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 27) to (62, 3) +language/expressions/class/elements/same-line-async-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/same-line-async-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/same-line-async-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/same-line-async-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/same-line-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/same-line-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/same-line-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/same-line-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/same-line-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/same-line-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/same-line-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/same-line-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/same-line-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/same-line-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/same-line-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/same-line-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/expressions/class/elements/same-line-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/same-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/expressions/class/elements/same-line-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/same-line-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/same-line-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/same-line-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/expressions/class/elements/same-line-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/same-line-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/expressions/class/elements/same-line-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/same-line-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/expressions/class/elements/static-comp-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/static-comp-name-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/static-literal-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/static-literal-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/static-private-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/static-private-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/static-private-methods-proxy-default-handler-throws.js (strict mode) + Classes may not have private methods. at (18, 2) to (20, 3) +language/expressions/class/elements/static-private-methods-proxy-default-handler-throws.js (default) + Classes may not have private methods. at (18, 2) to (20, 3) +language/expressions/class/elements/static-string-literal-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/static-string-literal-name-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/string-literal-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/string-literal-name-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-ctor-super-no-heritage.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-ctor-super-no-heritage.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-field-identifier-invalid-zwj-error.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-field-identifier-invalid-zwj-error.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-field-identifier-invalid-zwnj-error.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-field-identifier-invalid-zwnj-error.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-privatename-identifier-invalid-zwj-error.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-privatename-identifier-invalid-zwj-error.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-privatename-identifier-invalid-zwnj-error.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-privatename-identifier-invalid-zwnj-error.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-async-gen.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-async-gen.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-async-meth.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-async-meth.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-gen.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-gen.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-get.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-get.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-set.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-special-meth-ctor-set.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-async-gen-meth-prototype.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-async-gen-meth-prototype.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-async-meth-prototype.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-async-meth-prototype.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-gen-meth-prototype.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-gen-meth-prototype.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-get-meth-prototype.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-get-meth-prototype.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-meth-prototype.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-meth-prototype.js (default) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-set-meth-prototype.js (strict mode) + Missing parse error +language/expressions/class/elements/syntax/early-errors/grammar-static-set-meth-prototype.js (default) + Missing parse error +language/expressions/class/elements/syntax/valid/grammar-privatemeth-duplicate-meth-nestedclassmeth.js (strict mode) + Classes may not have private methods. at (21, 6) to (21, 13) +language/expressions/class/elements/syntax/valid/grammar-privatemeth-duplicate-meth-nestedclassmeth.js (default) + Classes may not have private methods. at (21, 6) to (21, 13) +language/expressions/class/elements/syntax/valid/grammar-static-private-async-gen-meth-prototype.js (strict mode) + Classes may not have private methods. at (19, 2) to (19, 32) +language/expressions/class/elements/syntax/valid/grammar-static-private-async-gen-meth-prototype.js (default) + Classes may not have private methods. at (19, 2) to (19, 32) +language/expressions/class/elements/syntax/valid/grammar-static-private-async-meth-prototype.js (strict mode) + Classes may not have private methods. at (19, 2) to (19, 30) +language/expressions/class/elements/syntax/valid/grammar-static-private-async-meth-prototype.js (default) + Classes may not have private methods. at (19, 2) to (19, 30) +language/expressions/class/elements/syntax/valid/grammar-static-private-gen-meth-prototype.js (strict mode) + Classes may not have private methods. at (19, 2) to (19, 26) +language/expressions/class/elements/syntax/valid/grammar-static-private-gen-meth-prototype.js (default) + Classes may not have private methods. at (19, 2) to (19, 26) +language/expressions/class/elements/syntax/valid/grammar-static-private-meth-prototype.js (strict mode) + Classes may not have private methods. at (19, 2) to (19, 24) +language/expressions/class/elements/syntax/valid/grammar-static-private-meth-prototype.js (default) + Classes may not have private methods. at (19, 2) to (19, 24) +language/expressions/class/elements/ternary-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/ternary-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/typeof-init-err-contains-arguments.js (strict mode) + Missing parse error +language/expressions/class/elements/typeof-init-err-contains-arguments.js (default) + Missing parse error +language/expressions/class/elements/wrapped-in-sc-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 8) to (26, 34) +language/expressions/class/elements/wrapped-in-sc-private-method-usage.js (default) + Classes may not have private methods. at (26, 8) to (26, 34) +language/expressions/class/elements/wrapped-in-sc-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/wrapped-in-sc-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/wrapped-in-sc-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/wrapped-in-sc-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 8) to (62, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 8) to (62, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 8) to (62, 3) +language/expressions/class/elements/wrapped-in-sc-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 8) to (62, 3) +language/expressions/class/elements/wrapped-in-sc-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (30, 2) to (33, 3) +language/expressions/class/elements/wrapped-in-sc-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (30, 2) to (33, 3) +language/expressions/class/elements/wrapped-in-sc-static-private-methods.js (strict mode) + Classes may not have private methods. at (30, 2) to (32, 3) +language/expressions/class/elements/wrapped-in-sc-static-private-methods.js (default) + Classes may not have private methods. at (30, 2) to (32, 3) +language/expressions/dynamic-import/assignment-expression/import-meta.js (strict mode) + Unexpected token . at (22, 23) to (22, 24) +language/expressions/dynamic-import/assignment-expression/import-meta.js (default) + Unexpected token . at (22, 23) to (22, 24) language/expressions/function/use-strict-with-non-simple-param.js (strict mode) Missing parse error language/expressions/function/use-strict-with-non-simple-param.js (default) @@ -2578,150 +5720,32 @@ language/expressions/generators/use-strict-with-non-simple-param.js (strict mode Missing parse error language/expressions/generators/use-strict-with-non-simple-param.js (default) Missing parse error -language/expressions/greater-than-or-equal/bigint-and-bigint.js (strict mode) - Unexpected token ILLEGAL at (24, 17) to (24, 19) -language/expressions/greater-than-or-equal/bigint-and-bigint.js (default) - Unexpected token ILLEGAL at (24, 17) to (24, 19) -language/expressions/greater-than-or-equal/bigint-and-non-finite.js (strict mode) - Unexpected token ILLEGAL at (22, 17) to (22, 19) -language/expressions/greater-than-or-equal/bigint-and-non-finite.js (default) - Unexpected token ILLEGAL at (22, 17) to (22, 19) -language/expressions/greater-than-or-equal/bigint-and-number-extremes.js (strict mode) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/greater-than-or-equal/bigint-and-number-extremes.js (default) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/greater-than-or-equal/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/greater-than-or-equal/bigint-and-number.js (default) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/greater-than/bigint-and-bigint.js (strict mode) - Unexpected token ILLEGAL at (24, 17) to (24, 19) -language/expressions/greater-than/bigint-and-bigint.js (default) - Unexpected token ILLEGAL at (24, 17) to (24, 19) -language/expressions/greater-than/bigint-and-non-finite.js (strict mode) - Unexpected token ILLEGAL at (22, 17) to (22, 19) -language/expressions/greater-than/bigint-and-non-finite.js (default) - Unexpected token ILLEGAL at (22, 17) to (22, 19) -language/expressions/greater-than/bigint-and-number-extremes.js (strict mode) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/greater-than/bigint-and-number-extremes.js (default) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/greater-than/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/greater-than/bigint-and-number.js (default) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/left-shift/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/left-shift/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/left-shift/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 17) to (10, 19) -language/expressions/left-shift/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 17) to (10, 19) -language/expressions/left-shift/bigint-non-primitive.js (strict mode) - Unexpected token ILLEGAL at (23, 24) to (23, 30) -language/expressions/left-shift/bigint-non-primitive.js (default) - Unexpected token ILLEGAL at (23, 24) to (23, 30) -language/expressions/left-shift/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/left-shift/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/left-shift/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/left-shift/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/left-shift/bigint.js (strict mode) - Unexpected token ILLEGAL at (21, 17) to (21, 19) -language/expressions/left-shift/bigint.js (default) - Unexpected token ILLEGAL at (21, 17) to (21, 19) -language/expressions/less-than-or-equal/bigint-and-bigint.js (strict mode) - Unexpected token ILLEGAL at (24, 17) to (24, 19) -language/expressions/less-than-or-equal/bigint-and-bigint.js (default) - Unexpected token ILLEGAL at (24, 17) to (24, 19) -language/expressions/less-than-or-equal/bigint-and-non-finite.js (strict mode) - Unexpected token ILLEGAL at (22, 17) to (22, 19) -language/expressions/less-than-or-equal/bigint-and-non-finite.js (default) - Unexpected token ILLEGAL at (22, 17) to (22, 19) -language/expressions/less-than-or-equal/bigint-and-number-extremes.js (strict mode) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/less-than-or-equal/bigint-and-number-extremes.js (default) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/less-than-or-equal/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/less-than-or-equal/bigint-and-number.js (default) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/less-than/bigint-and-bigint.js (strict mode) - Unexpected token ILLEGAL at (24, 17) to (24, 19) -language/expressions/less-than/bigint-and-bigint.js (default) - Unexpected token ILLEGAL at (24, 17) to (24, 19) -language/expressions/less-than/bigint-and-non-finite.js (strict mode) - Unexpected token ILLEGAL at (22, 17) to (22, 19) -language/expressions/less-than/bigint-and-non-finite.js (default) - Unexpected token ILLEGAL at (22, 17) to (22, 19) -language/expressions/less-than/bigint-and-number-extremes.js (strict mode) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/less-than/bigint-and-number-extremes.js (default) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/less-than/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/less-than/bigint-and-number.js (default) - Unexpected token ILLEGAL at (23, 17) to (23, 19) -language/expressions/logical-not/bigint.js (strict mode) - Unexpected token ILLEGAL at (21, 18) to (21, 20) -language/expressions/logical-not/bigint.js (default) - Unexpected token ILLEGAL at (21, 18) to (21, 20) -language/expressions/modulus/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/modulus/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/modulus/bigint-arithmetic.js (strict mode) - Unexpected token ILLEGAL at (14, 8) to (14, 27) -language/expressions/modulus/bigint-arithmetic.js (default) - Unexpected token ILLEGAL at (14, 8) to (14, 27) -language/expressions/modulus/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/modulus/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/modulus/bigint-modulo-zero.js (strict mode) - Unexpected token ILLEGAL at (24, 2) to (24, 4) -language/expressions/modulus/bigint-modulo-zero.js (default) - Unexpected token ILLEGAL at (24, 2) to (24, 4) -language/expressions/modulus/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/modulus/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/modulus/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/modulus/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/multiplication/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/multiplication/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/multiplication/bigint-arithmetic.js (strict mode) - Unexpected token ILLEGAL at (15, 8) to (15, 27) -language/expressions/multiplication/bigint-arithmetic.js (default) - Unexpected token ILLEGAL at (15, 8) to (15, 27) -language/expressions/multiplication/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/multiplication/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/multiplication/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/multiplication/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/multiplication/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/multiplication/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) +language/expressions/import.meta/distinct-for-each-module.js (strict mode) + Unexpected token . at (28, 26) to (28, 27) +language/expressions/import.meta/distinct-for-each-module.js (default) + Unexpected token . at (28, 26) to (28, 27) +language/expressions/import.meta/import-meta-is-an-ordinary-object.js (strict mode) + Unexpected token . at (27, 30) to (27, 31) +language/expressions/import.meta/import-meta-is-an-ordinary-object.js (default) + Unexpected token . at (27, 30) to (27, 31) +language/expressions/import.meta/same-object-returned.js (strict mode) + Unexpected token . at (27, 14) to (27, 15) +language/expressions/import.meta/same-object-returned.js (default) + Unexpected token . at (27, 14) to (27, 15) +language/expressions/import.meta/syntax/goal-module-nested-function.js (strict mode) + Unexpected token . at (15, 8) to (15, 9) +language/expressions/import.meta/syntax/goal-module-nested-function.js (default) + Unexpected token . at (15, 8) to (15, 9) +language/expressions/import.meta/syntax/goal-module.js (strict mode) + Unexpected token . at (14, 6) to (14, 7) +language/expressions/import.meta/syntax/goal-module.js (default) + Unexpected token . at (14, 6) to (14, 7) +language/expressions/object/method-definition/early-errors-object-async-method-duplicate-parameters.js (default) + Missing parse error language/expressions/object/method-definition/early-errors-object-method-NSPL-with-USD.js (strict mode) Missing parse error language/expressions/object/method-definition/early-errors-object-method-NSPL-with-USD.js (default) Missing parse error -language/expressions/object/method-definition/early-errors-object-method-async-lineterminator.js (strict mode) - Missing parse error -language/expressions/object/method-definition/early-errors-object-method-async-lineterminator.js (default) - Missing parse error language/expressions/object/method-definition/early-errors-object-method-await-in-formals-default.js (strict mode) Missing parse error language/expressions/object/method-definition/early-errors-object-method-await-in-formals-default.js (default) @@ -2756,130 +5780,6 @@ language/expressions/object/method-definition/use-strict-with-non-simple-param.j Missing parse error language/expressions/object/method-definition/use-strict-with-non-simple-param.js (default) Missing parse error -language/expressions/postfix-decrement/bigint.js (strict mode) - Unexpected token ILLEGAL at (17, 8) to (17, 10) -language/expressions/postfix-decrement/bigint.js (default) - Unexpected token ILLEGAL at (17, 8) to (17, 10) -language/expressions/postfix-increment/bigint.js (strict mode) - Unexpected token ILLEGAL at (17, 8) to (17, 10) -language/expressions/postfix-increment/bigint.js (default) - Unexpected token ILLEGAL at (17, 8) to (17, 10) -language/expressions/prefix-decrement/bigint.js (strict mode) - Unexpected token ILLEGAL at (17, 8) to (17, 10) -language/expressions/prefix-decrement/bigint.js (default) - Unexpected token ILLEGAL at (17, 8) to (17, 10) -language/expressions/prefix-increment/bigint.js (strict mode) - Unexpected token ILLEGAL at (17, 8) to (17, 10) -language/expressions/prefix-increment/bigint.js (default) - Unexpected token ILLEGAL at (17, 8) to (17, 10) -language/expressions/right-shift/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/right-shift/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/right-shift/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 17) to (10, 19) -language/expressions/right-shift/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 17) to (10, 19) -language/expressions/right-shift/bigint-non-primitive.js (strict mode) - Unexpected token ILLEGAL at (23, 24) to (23, 30) -language/expressions/right-shift/bigint-non-primitive.js (default) - Unexpected token ILLEGAL at (23, 24) to (23, 30) -language/expressions/right-shift/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/right-shift/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/right-shift/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/right-shift/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/right-shift/bigint.js (strict mode) - Unexpected token ILLEGAL at (28, 17) to (28, 19) -language/expressions/right-shift/bigint.js (default) - Unexpected token ILLEGAL at (28, 17) to (28, 19) -language/expressions/strict-does-not-equals/bigint-and-bigint.js (strict mode) - Unexpected token ILLEGAL at (20, 17) to (20, 19) -language/expressions/strict-does-not-equals/bigint-and-bigint.js (default) - Unexpected token ILLEGAL at (20, 17) to (20, 19) -language/expressions/strict-does-not-equals/bigint-and-boolean.js (strict mode) - Unexpected token ILLEGAL at (13, 18) to (13, 20) -language/expressions/strict-does-not-equals/bigint-and-boolean.js (default) - Unexpected token ILLEGAL at (13, 18) to (13, 20) -language/expressions/strict-does-not-equals/bigint-and-incomparable-primitive.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-incomparable-primitive.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-non-finite.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-non-finite.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-number-extremes.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-number-extremes.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-object.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-object.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-string.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-does-not-equals/bigint-and-string.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-bigint.js (strict mode) - Unexpected token ILLEGAL at (20, 17) to (20, 19) -language/expressions/strict-equals/bigint-and-bigint.js (default) - Unexpected token ILLEGAL at (20, 17) to (20, 19) -language/expressions/strict-equals/bigint-and-boolean.js (strict mode) - Unexpected token ILLEGAL at (13, 18) to (13, 20) -language/expressions/strict-equals/bigint-and-boolean.js (default) - Unexpected token ILLEGAL at (13, 18) to (13, 20) -language/expressions/strict-equals/bigint-and-incomparable-primitive.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-incomparable-primitive.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-non-finite.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-non-finite.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-number-extremes.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-number-extremes.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-object.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-object.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-string.js (strict mode) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/strict-equals/bigint-and-string.js (default) - Unexpected token ILLEGAL at (13, 17) to (13, 19) -language/expressions/subtraction/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/subtraction/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/subtraction/bigint-arithmetic.js (strict mode) - Unexpected token ILLEGAL at (14, 8) to (14, 27) -language/expressions/subtraction/bigint-arithmetic.js (default) - Unexpected token ILLEGAL at (14, 8) to (14, 27) -language/expressions/subtraction/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/subtraction/bigint-errors.js (default) - Unexpected token ILLEGAL at (10, 16) to (10, 18) -language/expressions/subtraction/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/subtraction/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (17, 11) to (17, 13) -language/expressions/subtraction/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (9, 24) to (9, 26) -language/expressions/subtraction/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (9, 24) to (9, 26) language/expressions/tagged-template/invalid-escape-sequences.js (strict mode) Unexpected token ILLEGAL at (21, 4) to (21, 5) language/expressions/tagged-template/invalid-escape-sequences.js (default) @@ -2888,46 +5788,6 @@ language/expressions/template-literal/invalid-legacy-octal-escape-sequence.js (s Missing parse error language/expressions/template-literal/invalid-legacy-octal-escape-sequence.js (default) Missing parse error -language/expressions/typeof/bigint.js (strict mode) - Unexpected token ILLEGAL at (24, 9) to (24, 11) -language/expressions/typeof/bigint.js (default) - Unexpected token ILLEGAL at (24, 9) to (24, 11) -language/expressions/unary-minus/bigint-non-primitive.js (strict mode) - Unexpected token ILLEGAL at (19, 25) to (19, 27) -language/expressions/unary-minus/bigint-non-primitive.js (default) - Unexpected token ILLEGAL at (19, 25) to (19, 27) -language/expressions/unary-minus/bigint.js (strict mode) - Unexpected token ILLEGAL at (17, 18) to (17, 20) -language/expressions/unary-minus/bigint.js (default) - Unexpected token ILLEGAL at (17, 18) to (17, 20) -language/expressions/unary-plus/bigint-throws.js (strict mode) - Unexpected token ILLEGAL at (19, 39) to (19, 41) -language/expressions/unary-plus/bigint-throws.js (default) - Unexpected token ILLEGAL at (19, 39) to (19, 41) -language/expressions/unsigned-right-shift/bigint-and-number.js (strict mode) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/unsigned-right-shift/bigint-and-number.js (default) - Unexpected token ILLEGAL at (13, 38) to (13, 40) -language/expressions/unsigned-right-shift/bigint-errors.js (strict mode) - Unexpected token ILLEGAL at (11, 18) to (11, 20) -language/expressions/unsigned-right-shift/bigint-errors.js (default) - Unexpected token ILLEGAL at (11, 18) to (11, 20) -language/expressions/unsigned-right-shift/bigint-non-primitive.js (strict mode) - Unexpected token ILLEGAL at (26, 22) to (26, 28) -language/expressions/unsigned-right-shift/bigint-non-primitive.js (default) - Unexpected token ILLEGAL at (26, 22) to (26, 28) -language/expressions/unsigned-right-shift/bigint-toprimitive.js (strict mode) - Unexpected token ILLEGAL at (19, 13) to (19, 15) -language/expressions/unsigned-right-shift/bigint-toprimitive.js (default) - Unexpected token ILLEGAL at (19, 13) to (19, 15) -language/expressions/unsigned-right-shift/bigint-wrapped-values.js (strict mode) - Unexpected token ILLEGAL at (11, 9) to (11, 11) -language/expressions/unsigned-right-shift/bigint-wrapped-values.js (default) - Unexpected token ILLEGAL at (11, 9) to (11, 11) -language/expressions/unsigned-right-shift/bigint.js (strict mode) - Unexpected token ILLEGAL at (17, 38) to (17, 40) -language/expressions/unsigned-right-shift/bigint.js (default) - Unexpected token ILLEGAL at (17, 38) to (17, 40) language/global-code/export.js (strict mode) Missing parse error language/global-code/export.js (default) @@ -2940,15 +5800,13 @@ language/global-code/new.target-arrow.js (strict mode) Missing parse error language/global-code/new.target-arrow.js (default) Missing parse error -language/global-code/new.target.js (strict mode) - Unexpected token . at (19, 3) to (19, 4) -language/global-code/new.target.js (default) - Unexpected token . at (19, 3) to (19, 4) language/import/dup-bound-names.js (strict mode) Missing parse error language/import/dup-bound-names.js (default) Missing parse error -language/literals/numeric/non-octal-decimal-integer-strict.js (strict mode) +language/line-terminators/invalid-string-cr.js (strict mode) + Missing parse error +language/line-terminators/invalid-string-cr.js (default) Missing parse error language/literals/regexp/early-err-dup-flag.js (strict mode) Missing parse error @@ -2986,6 +5844,230 @@ language/literals/regexp/invalid-range-negative-lookbehind.js (strict mode) Missing parse error language/literals/regexp/invalid-range-negative-lookbehind.js (default) Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-2-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-2-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-2.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-2.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-3-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-3-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-3.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-3.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-4-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-4-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-4.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-4.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-5.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-5.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-without-group-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname-without-group-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-dangling-groupname.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-duplicate-groupspecifier-2-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-duplicate-groupspecifier-2-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-duplicate-groupspecifier-2.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-duplicate-groupspecifier-2.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-duplicate-groupspecifier-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-duplicate-groupspecifier-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-duplicate-groupspecifier.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-duplicate-groupspecifier.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-empty-groupspecifier-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-empty-groupspecifier-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-empty-groupspecifier.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-empty-groupspecifier.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-identity-escape-in-capture-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-identity-escape-in-capture-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-2-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-2-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-2.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-2.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-3-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-3-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-3.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-3.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-4.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-4.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-5.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-5.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-6.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-6.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-without-group-2-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-without-group-2-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-without-group-3-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-without-group-3-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-without-group-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname-without-group-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-incomplete-groupname.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-continue-groupspecifier-4-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-continue-groupspecifier-4-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-continue-groupspecifier-4.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-continue-groupspecifier-4.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-continue-groupspecifier.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-continue-groupspecifier.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-2-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-2-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-2.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-2.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-3.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-3.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-4-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-4-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-4.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-4.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-5-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-5-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-5.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-5.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-6.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-6.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-7.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-7.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-8-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-8-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-8.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-8.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-9-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-9-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-non-id-start-groupspecifier.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-numeric-groupspecifier-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-numeric-groupspecifier-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-numeric-groupspecifier.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-numeric-groupspecifier.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-punctuator-starting-groupspecifier-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-punctuator-starting-groupspecifier-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-punctuator-starting-groupspecifier.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-punctuator-starting-groupspecifier.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-punctuator-within-groupspecifier-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-punctuator-within-groupspecifier-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-punctuator-within-groupspecifier.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-punctuator-within-groupspecifier.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-u-escape-in-groupspecifier-2.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-u-escape-in-groupspecifier-2.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-u-escape-in-groupspecifier.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-u-escape-in-groupspecifier.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-unterminated-groupspecifier-u.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-unterminated-groupspecifier-u.js (default) + Missing parse error +language/literals/regexp/named-groups/invalid-unterminated-groupspecifier.js (strict mode) + Missing parse error +language/literals/regexp/named-groups/invalid-unterminated-groupspecifier.js (default) + Missing parse error language/literals/regexp/u-dec-esc.js (strict mode) Missing parse error language/literals/regexp/u-dec-esc.js (default) @@ -3102,6 +6184,22 @@ language/module-code/early-lex-and-var.js (default) Missing parse error language/module-code/early-strict-mode.js (default) Missing parse error +language/module-code/eval-rqstd-once.js (strict mode) + Type declarations are not allowed in untyped mode at (26, 12) to (26, 15) +language/module-code/eval-rqstd-once.js (default) + Type declarations are not allowed in untyped mode at (26, 12) to (26, 15) +language/module-code/eval-rqstd-order.js (strict mode) + Type declarations are not allowed in untyped mode at (40, 12) to (40, 15) +language/module-code/eval-rqstd-order.js (default) + Type declarations are not allowed in untyped mode at (40, 12) to (40, 15) +language/module-code/eval-self-once.js (strict mode) + Type declarations are not allowed in untyped mode at (35, 12) to (35, 15) +language/module-code/eval-self-once.js (default) + Type declarations are not allowed in untyped mode at (35, 12) to (35, 15) +language/module-code/instn-once.js (strict mode) + Type declarations are not allowed in untyped mode at (34, 12) to (34, 15) +language/module-code/instn-once.js (default) + Type declarations are not allowed in untyped mode at (34, 12) to (34, 15) language/module-code/parse-err-hoist-lex-fun.js (strict mode) Missing parse error language/module-code/parse-err-hoist-lex-fun.js (default) @@ -3112,10 +6210,6 @@ language/module-code/parse-err-hoist-lex-gen.js (default) Missing parse error language/module-code/parse-err-yield.js (default) Missing parse error -language/module-code/privatename-valid-no-earlyerr.js (strict mode) - Unexpected token = at (32, 5) to (32, 6) -language/module-code/privatename-valid-no-earlyerr.js (default) - Unexpected token = at (32, 5) to (32, 6) language/reserved-words/await-module.js (strict mode) Missing parse error language/reserved-words/await-module.js (default) @@ -3124,22 +6218,18 @@ language/statements/async-function/early-errors-declaration-NSPL-with-USD.js (st Missing parse error language/statements/async-function/early-errors-declaration-NSPL-with-USD.js (default) Missing parse error -language/statements/async-function/early-errors-declaration-await-in-formals-default.js (strict mode) +language/statements/async-function/early-errors-declaration-formals-body-duplicate.js (strict mode) Missing parse error -language/statements/async-function/early-errors-declaration-await-in-formals-default.js (default) +language/statements/async-function/early-errors-declaration-formals-body-duplicate.js (default) Missing parse error -language/statements/async-function/early-errors-declaration-await-in-formals.js (strict mode) +language/statements/class/class-name-ident-await-escaped-module.js (strict mode) Missing parse error -language/statements/async-function/early-errors-declaration-await-in-formals.js (default) +language/statements/class/class-name-ident-await-escaped-module.js (default) Missing parse error -language/statements/async-function/early-errors-declaration-formals-body-duplicate.js (strict mode) +language/statements/class/class-name-ident-await-module.js (strict mode) Missing parse error -language/statements/async-function/early-errors-declaration-formals-body-duplicate.js (default) +language/statements/class/class-name-ident-await-module.js (default) Missing parse error -language/statements/class/classelementname-abrupt-completion.js (strict mode) - Unexpected token } at (41, 2) to (41, 3) -language/statements/class/classelementname-abrupt-completion.js (default) - Unexpected token } at (41, 2) to (41, 3) language/statements/class/definition/early-errors-class-method-NSPL-with-USD.js (strict mode) Missing parse error language/statements/class/definition/early-errors-class-method-NSPL-with-USD.js (default) @@ -3152,518 +6242,4450 @@ language/statements/class/definition/early-errors-class-method-await-in-formals. Missing parse error language/statements/class/definition/early-errors-class-method-await-in-formals.js (default) Missing parse error -language/statements/class/fielddefinition-initializer-abrupt-completion.js (strict mode) - Unexpected token = at (41, 4) to (41, 5) -language/statements/class/fielddefinition-initializer-abrupt-completion.js (default) - Unexpected token = at (41, 4) to (41, 5) -language/statements/class/fields-after-same-line-gen-computed-names.js (strict mode) - Unexpected token = at (27, 26) to (27, 27) -language/statements/class/fields-after-same-line-gen-computed-names.js (default) - Unexpected token = at (27, 26) to (27, 27) -language/statements/class/fields-after-same-line-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 31) to (28, 32) -language/statements/class/fields-after-same-line-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 31) to (28, 32) -language/statements/class/fields-after-same-line-gen-literal-names.js (strict mode) - Unexpected token = at (27, 27) to (27, 28) -language/statements/class/fields-after-same-line-gen-literal-names.js (default) - Unexpected token = at (27, 27) to (27, 28) -language/statements/class/fields-after-same-line-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 36) to (25, 37) -language/statements/class/fields-after-same-line-gen-string-literal-names.js (default) - Unexpected token = at (25, 36) to (25, 37) -language/statements/class/fields-after-same-line-method-computed-names.js (strict mode) - Unexpected token = at (27, 25) to (27, 26) -language/statements/class/fields-after-same-line-method-computed-names.js (default) - Unexpected token = at (27, 25) to (27, 26) -language/statements/class/fields-after-same-line-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 30) to (28, 31) -language/statements/class/fields-after-same-line-method-computed-symbol-names.js (default) - Unexpected token = at (28, 30) to (28, 31) -language/statements/class/fields-after-same-line-method-literal-names.js (strict mode) - Unexpected token = at (27, 26) to (27, 27) -language/statements/class/fields-after-same-line-method-literal-names.js (default) - Unexpected token = at (27, 26) to (27, 27) -language/statements/class/fields-after-same-line-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 35) to (25, 36) -language/statements/class/fields-after-same-line-method-string-literal-names.js (default) - Unexpected token = at (25, 35) to (25, 36) -language/statements/class/fields-after-same-line-static-async-gen-computed-names.js (strict mode) - Unexpected token = at (27, 39) to (27, 40) -language/statements/class/fields-after-same-line-static-async-gen-computed-names.js (default) - Unexpected token = at (27, 39) to (27, 40) -language/statements/class/fields-after-same-line-static-async-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 44) to (28, 45) -language/statements/class/fields-after-same-line-static-async-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 44) to (28, 45) -language/statements/class/fields-after-same-line-static-async-gen-literal-names.js (strict mode) - Unexpected token = at (27, 40) to (27, 41) -language/statements/class/fields-after-same-line-static-async-gen-literal-names.js (default) - Unexpected token = at (27, 40) to (27, 41) -language/statements/class/fields-after-same-line-static-async-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 49) to (25, 50) -language/statements/class/fields-after-same-line-static-async-gen-string-literal-names.js (default) - Unexpected token = at (25, 49) to (25, 50) -language/statements/class/fields-after-same-line-static-async-method-computed-names.js (strict mode) - Unexpected token = at (27, 38) to (27, 39) -language/statements/class/fields-after-same-line-static-async-method-computed-names.js (default) - Unexpected token = at (27, 38) to (27, 39) -language/statements/class/fields-after-same-line-static-async-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 43) to (28, 44) -language/statements/class/fields-after-same-line-static-async-method-computed-symbol-names.js (default) - Unexpected token = at (28, 43) to (28, 44) -language/statements/class/fields-after-same-line-static-async-method-literal-names.js (strict mode) - Unexpected token = at (27, 39) to (27, 40) -language/statements/class/fields-after-same-line-static-async-method-literal-names.js (default) - Unexpected token = at (27, 39) to (27, 40) -language/statements/class/fields-after-same-line-static-async-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 48) to (25, 49) -language/statements/class/fields-after-same-line-static-async-method-string-literal-names.js (default) - Unexpected token = at (25, 48) to (25, 49) -language/statements/class/fields-after-same-line-static-gen-computed-names.js (strict mode) - Unexpected token = at (27, 33) to (27, 34) -language/statements/class/fields-after-same-line-static-gen-computed-names.js (default) - Unexpected token = at (27, 33) to (27, 34) -language/statements/class/fields-after-same-line-static-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 38) to (28, 39) -language/statements/class/fields-after-same-line-static-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 38) to (28, 39) -language/statements/class/fields-after-same-line-static-gen-literal-names.js (strict mode) - Unexpected token = at (27, 34) to (27, 35) -language/statements/class/fields-after-same-line-static-gen-literal-names.js (default) - Unexpected token = at (27, 34) to (27, 35) -language/statements/class/fields-after-same-line-static-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 43) to (25, 44) -language/statements/class/fields-after-same-line-static-gen-string-literal-names.js (default) - Unexpected token = at (25, 43) to (25, 44) -language/statements/class/fields-after-same-line-static-method-computed-names.js (strict mode) - Unexpected token = at (27, 32) to (27, 33) -language/statements/class/fields-after-same-line-static-method-computed-names.js (default) - Unexpected token = at (27, 32) to (27, 33) -language/statements/class/fields-after-same-line-static-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 37) to (28, 38) -language/statements/class/fields-after-same-line-static-method-computed-symbol-names.js (default) - Unexpected token = at (28, 37) to (28, 38) -language/statements/class/fields-after-same-line-static-method-literal-names.js (strict mode) - Unexpected token = at (27, 33) to (27, 34) -language/statements/class/fields-after-same-line-static-method-literal-names.js (default) - Unexpected token = at (27, 33) to (27, 34) -language/statements/class/fields-after-same-line-static-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 42) to (25, 43) -language/statements/class/fields-after-same-line-static-method-string-literal-names.js (default) - Unexpected token = at (25, 42) to (25, 43) -language/statements/class/fields-asi-1.js (strict mode) - Unexpected token = at (12, 4) to (12, 5) -language/statements/class/fields-asi-1.js (default) - Unexpected token = at (12, 4) to (12, 5) -language/statements/class/fields-asi-2.js (strict mode) - Unexpected token = at (11, 4) to (11, 5) -language/statements/class/fields-asi-2.js (default) - Unexpected token = at (11, 4) to (11, 5) -language/statements/class/fields-asi-5.js (strict mode) - Unexpected token = at (15, 4) to (15, 5) -language/statements/class/fields-asi-5.js (default) - Unexpected token = at (15, 4) to (15, 5) -language/statements/class/fields-computed-name-toprimitive-symbol.js (strict mode) - Unexpected token = at (68, 9) to (68, 10) -language/statements/class/fields-computed-name-toprimitive-symbol.js (default) - Unexpected token = at (68, 9) to (68, 10) -language/statements/class/fields-computed-name-toprimitive.js (strict mode) - Unexpected token = at (65, 9) to (65, 10) -language/statements/class/fields-computed-name-toprimitive.js (default) - Unexpected token = at (65, 9) to (65, 10) -language/statements/class/fields-ctor-called-after-fields-init.js (strict mode) - Unexpected token = at (28, 6) to (28, 7) -language/statements/class/fields-ctor-called-after-fields-init.js (default) - Unexpected token = at (28, 6) to (28, 7) -language/statements/class/fields-derived-cls-direct-eval-err-contains-supercall-1.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-direct-eval-err-contains-supercall-1.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-direct-eval-err-contains-supercall-2.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-direct-eval-err-contains-supercall-2.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-direct-eval-err-contains-supercall.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-direct-eval-err-contains-supercall.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-direct-eval-err-contains-superproperty-1.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-direct-eval-err-contains-superproperty-1.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-direct-eval-err-contains-superproperty-2.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-direct-eval-err-contains-superproperty-2.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-supercall-1.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-supercall-1.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-supercall-2.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-supercall-2.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-supercall.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-supercall.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-superproperty-1.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-superproperty-1.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-superproperty-2.js (strict mode) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-derived-cls-indirect-eval-err-contains-superproperty-2.js (default) - Unexpected token = at (30, 4) to (30, 5) -language/statements/class/fields-direct-eval-err-contains-arguments.js (strict mode) - Unexpected token = at (29, 4) to (29, 5) -language/statements/class/fields-direct-eval-err-contains-arguments.js (default) - Unexpected token = at (29, 4) to (29, 5) -language/statements/class/fields-direct-eval-err-contains-newtarget.js (strict mode) - Unexpected token = at (28, 4) to (28, 5) -language/statements/class/fields-direct-eval-err-contains-newtarget.js (default) - Unexpected token = at (28, 4) to (28, 5) -language/statements/class/fields-evaluation-error-computed-name-referenceerror.js (strict mode) - Unexpected token = at (52, 12) to (52, 13) -language/statements/class/fields-evaluation-error-computed-name-referenceerror.js (default) - Unexpected token = at (52, 12) to (52, 13) -language/statements/class/fields-evaluation-error-computed-name-toprimitive-err.js (strict mode) - Unexpected token } at (55, 2) to (55, 3) -language/statements/class/fields-evaluation-error-computed-name-toprimitive-err.js (default) - Unexpected token } at (55, 2) to (55, 3) -language/statements/class/fields-evaluation-error-computed-name-tostring-err.js (strict mode) - Unexpected token } at (55, 2) to (55, 3) -language/statements/class/fields-evaluation-error-computed-name-tostring-err.js (default) - Unexpected token } at (55, 2) to (55, 3) -language/statements/class/fields-evaluation-error-computed-name-valueof-err.js (strict mode) - Unexpected token } at (56, 2) to (56, 3) -language/statements/class/fields-evaluation-error-computed-name-valueof-err.js (default) - Unexpected token } at (56, 2) to (56, 3) -language/statements/class/fields-indirect-eval-err-contains-arguments.js (strict mode) - Unexpected token = at (29, 4) to (29, 5) -language/statements/class/fields-indirect-eval-err-contains-arguments.js (default) - Unexpected token = at (29, 4) to (29, 5) -language/statements/class/fields-indirect-eval-err-contains-newtarget.js (strict mode) - Unexpected token = at (28, 4) to (28, 5) -language/statements/class/fields-indirect-eval-err-contains-newtarget.js (default) - Unexpected token = at (28, 4) to (28, 5) -language/statements/class/fields-init-err-evaluation.js (strict mode) - Unexpected token = at (27, 4) to (27, 5) -language/statements/class/fields-init-err-evaluation.js (default) - Unexpected token = at (27, 4) to (27, 5) -language/statements/class/fields-init-value-defined-after-class.js (strict mode) - Unexpected token = at (26, 6) to (26, 7) -language/statements/class/fields-init-value-defined-after-class.js (default) - Unexpected token = at (26, 6) to (26, 7) -language/statements/class/fields-init-value-incremental.js (strict mode) - Unexpected token = at (35, 8) to (35, 9) -language/statements/class/fields-init-value-incremental.js (default) - Unexpected token = at (35, 8) to (35, 9) -language/statements/class/fields-literal-name-static-private-fields-forbidden.js (strict mode) - Missing parse error -language/statements/class/fields-literal-name-static-private-fields-forbidden.js (default) - Missing parse error -language/statements/class/fields-literal-name-static-public-fields-forbidden.js (strict mode) - Missing parse error -language/statements/class/fields-literal-name-static-public-fields-forbidden.js (default) - Missing parse error -language/statements/class/fields-multiple-definitions-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-multiple-definitions-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-multiple-definitions-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 6) to (28, 7) -language/statements/class/fields-multiple-definitions-computed-symbol-names.js (default) - Unexpected token = at (28, 6) to (28, 7) -language/statements/class/fields-multiple-definitions-literal-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-multiple-definitions-literal-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-multiple-definitions-private-names.js (strict mode) - Unexpected token = at (28, 6) to (28, 7) -language/statements/class/fields-multiple-definitions-private-names.js (default) - Unexpected token = at (28, 6) to (28, 7) -language/statements/class/fields-multiple-definitions-string-literal-names.js (strict mode) - Unexpected token = at (25, 6) to (25, 7) -language/statements/class/fields-multiple-definitions-string-literal-names.js (default) - Unexpected token = at (25, 6) to (25, 7) -language/statements/class/fields-multiple-stacked-definitions-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-multiple-stacked-definitions-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-multiple-stacked-definitions-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-multiple-stacked-definitions-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-multiple-stacked-definitions-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-multiple-stacked-definitions-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-multiple-stacked-definitions-private-names.js (strict mode) - Unexpected identifier at (29, 2) to (29, 5) -language/statements/class/fields-multiple-stacked-definitions-private-names.js (default) - Unexpected identifier at (29, 2) to (29, 5) -language/statements/class/fields-multiple-stacked-definitions-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-multiple-stacked-definitions-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-new-no-sc-line-method-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-new-no-sc-line-method-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-new-no-sc-line-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-new-no-sc-line-method-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-new-no-sc-line-method-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-new-no-sc-line-method-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-new-no-sc-line-method-private-names.js (strict mode) - Unexpected identifier at (29, 2) to (29, 3) -language/statements/class/fields-new-no-sc-line-method-private-names.js (default) - Unexpected identifier at (29, 2) to (29, 3) -language/statements/class/fields-new-no-sc-line-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-new-no-sc-line-method-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-new-sc-line-gen-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-new-sc-line-gen-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-new-sc-line-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-new-sc-line-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-new-sc-line-gen-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-new-sc-line-gen-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-new-sc-line-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-new-sc-line-gen-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-new-sc-line-method-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-new-sc-line-method-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-new-sc-line-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-new-sc-line-method-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-new-sc-line-method-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-new-sc-line-method-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-new-sc-line-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-new-sc-line-method-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-privatename-constructor-err.js (strict mode) - Missing parse error -language/statements/class/fields-privatename-constructor-err.js (default) - Missing parse error -language/statements/class/fields-redeclaration-symbol.js (strict mode) - Unexpected token = at (38, 6) to (38, 7) -language/statements/class/fields-redeclaration-symbol.js (default) - Unexpected token = at (38, 6) to (38, 7) -language/statements/class/fields-redeclaration.js (strict mode) - Unexpected token = at (37, 4) to (37, 5) -language/statements/class/fields-redeclaration.js (default) - Unexpected token = at (37, 4) to (37, 5) -language/statements/class/fields-regular-definitions-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-regular-definitions-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-regular-definitions-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-regular-definitions-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-regular-definitions-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-regular-definitions-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-regular-definitions-private-names.js (strict mode) - Unexpected identifier at (28, 0) to (28, 1) -language/statements/class/fields-regular-definitions-private-names.js (default) - Unexpected identifier at (28, 0) to (28, 1) -language/statements/class/fields-regular-definitions-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-regular-definitions-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-same-line-async-gen-computed-names.js (strict mode) - Unexpected token = at (27, 32) to (27, 33) -language/statements/class/fields-same-line-async-gen-computed-names.js (default) - Unexpected token = at (27, 32) to (27, 33) -language/statements/class/fields-same-line-async-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 37) to (28, 38) -language/statements/class/fields-same-line-async-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 37) to (28, 38) -language/statements/class/fields-same-line-async-gen-literal-names.js (strict mode) - Unexpected token = at (27, 33) to (27, 34) -language/statements/class/fields-same-line-async-gen-literal-names.js (default) - Unexpected token = at (27, 33) to (27, 34) -language/statements/class/fields-same-line-async-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 42) to (25, 43) -language/statements/class/fields-same-line-async-gen-string-literal-names.js (default) - Unexpected token = at (25, 42) to (25, 43) -language/statements/class/fields-same-line-async-method-computed-names.js (strict mode) - Unexpected token = at (27, 31) to (27, 32) -language/statements/class/fields-same-line-async-method-computed-names.js (default) - Unexpected token = at (27, 31) to (27, 32) -language/statements/class/fields-same-line-async-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 36) to (28, 37) -language/statements/class/fields-same-line-async-method-computed-symbol-names.js (default) - Unexpected token = at (28, 36) to (28, 37) -language/statements/class/fields-same-line-async-method-literal-names.js (strict mode) - Unexpected token = at (27, 32) to (27, 33) -language/statements/class/fields-same-line-async-method-literal-names.js (default) - Unexpected token = at (27, 32) to (27, 33) -language/statements/class/fields-same-line-async-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 41) to (25, 42) -language/statements/class/fields-same-line-async-method-string-literal-names.js (default) - Unexpected token = at (25, 41) to (25, 42) -language/statements/class/fields-same-line-gen-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-same-line-gen-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-same-line-gen-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-same-line-gen-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-same-line-gen-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-same-line-gen-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-same-line-gen-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-same-line-gen-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-same-line-method-computed-names.js (strict mode) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-same-line-method-computed-names.js (default) - Unexpected token = at (27, 6) to (27, 7) -language/statements/class/fields-same-line-method-computed-symbol-names.js (strict mode) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-same-line-method-computed-symbol-names.js (default) - Unexpected token = at (28, 11) to (28, 12) -language/statements/class/fields-same-line-method-literal-names.js (strict mode) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-same-line-method-literal-names.js (default) - Unexpected token = at (27, 7) to (27, 8) -language/statements/class/fields-same-line-method-string-literal-names.js (strict mode) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-same-line-method-string-literal-names.js (default) - Unexpected token = at (25, 16) to (25, 17) -language/statements/class/fields-string-name-propname-constructor.js (strict mode) - Missing parse error -language/statements/class/fields-string-name-propname-constructor.js (default) - Missing parse error -language/statements/class/fields-wrapped-in-sc-computed-names.js (strict mode) - Unexpected token = at (28, 12) to (28, 13) -language/statements/class/fields-wrapped-in-sc-computed-names.js (default) - Unexpected token = at (28, 12) to (28, 13) -language/statements/class/fields-wrapped-in-sc-computed-symbol-names.js (strict mode) - Unexpected token = at (29, 17) to (29, 18) -language/statements/class/fields-wrapped-in-sc-computed-symbol-names.js (default) - Unexpected token = at (29, 17) to (29, 18) -language/statements/class/fields-wrapped-in-sc-literal-names.js (strict mode) - Unexpected token = at (28, 13) to (28, 14) -language/statements/class/fields-wrapped-in-sc-literal-names.js (default) - Unexpected token = at (28, 13) to (28, 14) -language/statements/class/fields-wrapped-in-sc-string-literal-names.js (strict mode) - Unexpected token = at (26, 22) to (26, 23) -language/statements/class/fields-wrapped-in-sc-string-literal-names.js (default) - Unexpected token = at (26, 22) to (26, 23) -language/statements/class/privatefieldget-success-1.js (strict mode) - Unexpected token = at (29, 5) to (29, 6) -language/statements/class/privatefieldget-success-1.js (default) - Unexpected token = at (29, 5) to (29, 6) -language/statements/class/privatefieldget-success-2.js (strict mode) - Unexpected token = at (30, 5) to (30, 6) -language/statements/class/privatefieldget-success-2.js (default) - Unexpected token = at (30, 5) to (30, 6) -language/statements/class/privatefieldget-success-3.js (strict mode) - Unexpected token = at (30, 5) to (30, 6) -language/statements/class/privatefieldget-success-3.js (default) - Unexpected token = at (30, 5) to (30, 6) -language/statements/class/privatefieldget-success-4.js (strict mode) - Unexpected token = at (30, 5) to (30, 6) -language/statements/class/privatefieldget-success-4.js (default) - Unexpected token = at (30, 5) to (30, 6) -language/statements/class/privatefieldget-success-5.js (strict mode) - Unexpected token = at (30, 5) to (30, 6) -language/statements/class/privatefieldget-success-5.js (default) - Unexpected token = at (30, 5) to (30, 6) -language/statements/class/privatefieldget-typeerror-1.js (strict mode) - Unexpected token = at (35, 4) to (35, 5) -language/statements/class/privatefieldget-typeerror-1.js (default) - Unexpected token = at (35, 4) to (35, 5) -language/statements/class/privatefieldget-typeerror-2.js (strict mode) - Unexpected token = at (35, 5) to (35, 6) -language/statements/class/privatefieldget-typeerror-2.js (default) - Unexpected token = at (35, 5) to (35, 6) -language/statements/class/privatefieldget-typeerror-3.js (strict mode) - Unexpected token = at (36, 5) to (36, 6) -language/statements/class/privatefieldget-typeerror-3.js (default) - Unexpected token = at (36, 5) to (36, 6) -language/statements/class/privatefieldget-typeerror-5.js (strict mode) - Unexpected token = at (35, 5) to (35, 6) -language/statements/class/privatefieldget-typeerror-5.js (default) - Unexpected token = at (35, 5) to (35, 6) -language/statements/class/privatefieldset-typeerror-1.js (strict mode) - Unexpected token = at (35, 4) to (35, 5) -language/statements/class/privatefieldset-typeerror-1.js (default) - Unexpected token = at (35, 4) to (35, 5) -language/statements/class/privatefieldset-typeerror-2.js (strict mode) - Unexpected token = at (35, 5) to (35, 6) -language/statements/class/privatefieldset-typeerror-2.js (default) - Unexpected token = at (35, 5) to (35, 6) -language/statements/class/privatefieldset-typeerror-3.js (strict mode) - Unexpected token = at (36, 5) to (36, 6) -language/statements/class/privatefieldset-typeerror-3.js (default) - Unexpected token = at (36, 5) to (36, 6) -language/statements/class/privatefieldset-typeerror-5.js (strict mode) - Unexpected token = at (36, 5) to (36, 6) -language/statements/class/privatefieldset-typeerror-5.js (default) - Unexpected token = at (36, 5) to (36, 6) -language/statements/class/privatename-not-valid-eval-earlyerr-3.js (strict mode) - Unexpected token = at (23, 4) to (23, 5) -language/statements/class/privatename-not-valid-eval-earlyerr-3.js (default) - Unexpected token = at (23, 4) to (23, 5) -language/statements/class/privatename-valid-no-earlyerr.js (strict mode) - Unexpected token = at (34, 5) to (34, 6) -language/statements/class/privatename-valid-no-earlyerr.js (default) - Unexpected token = at (34, 5) to (34, 6) -language/statements/class/super-fielddefinition-initializer-abrupt-completion.js (strict mode) - Unexpected token = at (45, 4) to (45, 5) -language/statements/class/super-fielddefinition-initializer-abrupt-completion.js (default) - Unexpected token = at (45, 4) to (45, 5) -language/statements/class/syntax/early-errors/class-body-has-direct-super-missing-class-heritage.js (strict mode) - Missing parse error -language/statements/class/syntax/early-errors/class-body-has-direct-super-missing-class-heritage.js (default) - Missing parse error -language/statements/class/syntax/early-errors/class-body-special-method-generator-propname-constructor.js (strict mode) - Missing parse error -language/statements/class/syntax/early-errors/class-body-special-method-generator-propname-constructor.js (default) - Missing parse error -language/statements/class/syntax/early-errors/class-body-special-method-get-propname-constructor.js (strict mode) - Missing parse error -language/statements/class/syntax/early-errors/class-body-special-method-get-propname-constructor.js (default) - Missing parse error -language/statements/class/syntax/early-errors/class-body-special-method-set-propname-constructor.js (strict mode) - Missing parse error -language/statements/class/syntax/early-errors/class-body-special-method-set-propname-constructor.js (default) - Missing parse error -language/statements/class/syntax/early-errors/class-body-static-method-get-propname-prototype.js (strict mode) - Missing parse error -language/statements/class/syntax/early-errors/class-body-static-method-get-propname-prototype.js (default) - Missing parse error -language/statements/class/syntax/early-errors/class-body-static-method-propname-prototype.js (strict mode) - Missing parse error -language/statements/class/syntax/early-errors/class-body-static-method-propname-prototype.js (default) +language/statements/class/dstr/async-private-gen-meth-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-ary-init-iter-close.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-ary-name-iter-val.js (default) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (66, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (66, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (64, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (64, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (59, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (54, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (54, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (60, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (60, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (62, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (62, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (65, 2) to (68, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (65, 2) to (68, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-elision.js (default) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-empty.js (default) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (86, 2) to (90, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (86, 2) to (90, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (57, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (57, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (57, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (57, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (54, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (54, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (66, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (66, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (64, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (64, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (59, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (54, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (54, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (60, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (60, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (62, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (62, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (65, 2) to (68, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (65, 2) to (68, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (86, 2) to (90, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (86, 2) to (90, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (57, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (57, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (57, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (57, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (54, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (54, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (59, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (59, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (58, 2) to (78, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (58, 2) to (78, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (48, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (48, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (49, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (49, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (47, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (47, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-empty.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (59, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (59, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (58, 2) to (78, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (58, 2) to (78, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (48, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (48, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (49, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (49, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (47, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (47, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-init-iter-close.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-name-iter-val.js (default) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (66, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (66, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (64, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (64, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (59, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (54, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (54, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (60, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (60, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (62, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (62, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (65, 2) to (68, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (65, 2) to (68, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-elision.js (default) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-empty.js (default) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (86, 2) to (90, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (86, 2) to (90, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (57, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (57, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (57, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (57, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (54, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (54, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-static-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (66, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (66, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (64, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (64, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (62, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (62, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (59, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (59, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (61, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (61, 2) to (70, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (60, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (60, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (60, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (61, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (61, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (54, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (54, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (60, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (60, 2) to (67, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (62, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (62, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (66, 2) to (71, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (58, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (58, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (65, 2) to (68, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (65, 2) to (68, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (57, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (79, 2) to (84, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (79, 2) to (84, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (86, 2) to (90, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (86, 2) to (90, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (70, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (70, 2) to (73, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (57, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (57, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (57, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (57, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (54, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (54, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (60, 2) to (69, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (59, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (59, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (58, 2) to (78, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (58, 2) to (78, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (48, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (48, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (49, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (49, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (47, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (47, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-empty.js (default) + Classes may not have private methods. at (58, 2) to (61, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (59, 2) to (62, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (59, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (59, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (60, 2) to (64, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (59, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (59, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (56, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (58, 2) to (78, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (58, 2) to (78, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (53, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (53, 2) to (60, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (53, 2) to (59, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (56, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (54, 2) to (63, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (48, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (48, 2) to (58, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (49, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (49, 2) to (66, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (47, 2) to (65, 3) +language/statements/class/dstr/async-private-gen-meth-static-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (47, 2) to (65, 3) +language/statements/class/dstr/private-gen-meth-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-ary-init-iter-close.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-ary-name-iter-val.js (default) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (84, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (84, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (77, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (77, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (79, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (79, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (72, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (72, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (78, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (78, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (83, 2) to (86, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (83, 2) to (86, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (91, 2) to (95, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-elision.js (default) + Classes may not have private methods. at (91, 2) to (95, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-empty.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (97, 2) to (102, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (97, 2) to (102, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (104, 2) to (108, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (104, 2) to (108, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (88, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (88, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (72, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (72, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (84, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (84, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (77, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (77, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (79, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (79, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (72, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (72, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (78, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (78, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (83, 2) to (86, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (83, 2) to (86, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (91, 2) to (95, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (91, 2) to (95, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (97, 2) to (102, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (97, 2) to (102, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (104, 2) to (108, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (104, 2) to (108, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (88, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (88, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (72, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (72, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (77, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (77, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (96, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (96, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (66, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (66, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (67, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (67, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (65, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (65, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-empty.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (77, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (77, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (96, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (96, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (66, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (66, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (67, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (67, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (65, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (65, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-static-ary-init-iter-close.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-static-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-static-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-static-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-static-ary-name-iter-val.js (default) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (84, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (84, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (77, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (77, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (79, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (79, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (72, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (72, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (78, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (78, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (83, 2) to (86, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (83, 2) to (86, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (91, 2) to (95, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-elision.js (default) + Classes may not have private methods. at (91, 2) to (95, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-empty.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (97, 2) to (102, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (97, 2) to (102, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (104, 2) to (108, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (104, 2) to (108, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (88, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (88, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (72, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (72, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-static-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (87, 2) to (90, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (84, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (84, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (82, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (82, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (80, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (80, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (77, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (77, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (79, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (79, 2) to (88, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (78, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (78, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (79, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (79, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (72, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (72, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (78, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (78, 2) to (85, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (84, 2) to (89, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (76, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (83, 2) to (86, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (83, 2) to (86, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (91, 2) to (95, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (91, 2) to (95, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (97, 2) to (102, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (97, 2) to (102, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (104, 2) to (108, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (104, 2) to (108, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (88, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (88, 2) to (91, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (72, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (72, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (78, 2) to (87, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (77, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (77, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (96, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (96, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (66, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (66, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (67, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (67, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (65, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (65, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-empty.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (77, 2) to (80, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (77, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (77, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (77, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (74, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (96, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (96, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (71, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (71, 2) to (78, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (71, 2) to (77, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (74, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (66, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (66, 2) to (76, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (67, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (67, 2) to (84, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (65, 2) to (83, 3) +language/statements/class/dstr/private-gen-meth-static-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (65, 2) to (83, 3) +language/statements/class/dstr/private-meth-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-ary-init-iter-close.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-ary-name-iter-val.js (default) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (82, 2) to (86, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (82, 2) to (86, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (70, 2) to (74, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (70, 2) to (74, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (83, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (83, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (81, 2) to (84, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (81, 2) to (84, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (89, 2) to (93, 3) +language/statements/class/dstr/private-meth-ary-ptrn-elision.js (default) + Classes may not have private methods. at (89, 2) to (93, 3) +language/statements/class/dstr/private-meth-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-ary-ptrn-empty.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (95, 2) to (100, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (95, 2) to (100, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (102, 2) to (106, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (102, 2) to (106, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (73, 2) to (81, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (73, 2) to (81, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (70, 2) to (78, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (70, 2) to (78, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (82, 2) to (86, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (82, 2) to (86, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (70, 2) to (74, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (70, 2) to (74, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (83, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (83, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (81, 2) to (84, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (81, 2) to (84, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (89, 2) to (93, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (89, 2) to (93, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (95, 2) to (100, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (95, 2) to (100, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (102, 2) to (106, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (102, 2) to (106, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (73, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (73, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (70, 2) to (78, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (70, 2) to (78, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (75, 2) to (82, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (75, 2) to (82, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (74, 2) to (94, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (74, 2) to (94, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (76, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (76, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (64, 2) to (74, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (64, 2) to (74, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (65, 2) to (82, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (65, 2) to (82, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (63, 2) to (81, 3) +language/statements/class/dstr/private-meth-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (63, 2) to (81, 3) +language/statements/class/dstr/private-meth-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-obj-ptrn-empty.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (75, 2) to (82, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (75, 2) to (82, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (74, 2) to (94, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (74, 2) to (94, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (76, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (76, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (64, 2) to (74, 3) +language/statements/class/dstr/private-meth-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (64, 2) to (74, 3) +language/statements/class/dstr/private-meth-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (65, 2) to (82, 3) +language/statements/class/dstr/private-meth-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (65, 2) to (82, 3) +language/statements/class/dstr/private-meth-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (63, 2) to (81, 3) +language/statements/class/dstr/private-meth-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (63, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-static-ary-init-iter-close.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-static-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-static-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-static-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-static-ary-name-iter-val.js (default) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (82, 2) to (86, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (82, 2) to (86, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (70, 2) to (74, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (70, 2) to (74, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (81, 2) to (84, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (81, 2) to (84, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (89, 2) to (93, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-elision.js (default) + Classes may not have private methods. at (89, 2) to (93, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-empty.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (95, 2) to (100, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (95, 2) to (100, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (102, 2) to (106, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (102, 2) to (106, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (73, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (73, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (70, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (70, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-static-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-init-iter-close.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-init-iter-close.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-init-iter-no-close.js (strict mode) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-init-iter-no-close.js (default) + Classes may not have private methods. at (85, 2) to (88, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-name-iter-val.js (strict mode) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-name-iter-val.js (default) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elem-init.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elem-iter.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (strict mode) + Classes may not have private methods. at (82, 2) to (86, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elision-init.js (default) + Classes may not have private methods. at (82, 2) to (86, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (strict mode) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-elision-iter.js (default) + Classes may not have private methods. at (80, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (strict mode) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-empty-init.js (default) + Classes may not have private methods. at (78, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-empty-iter.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (strict mode) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-rest-init.js (default) + Classes may not have private methods. at (75, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (strict mode) + Classes may not have private methods. at (77, 2) to (86, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-ary-rest-iter.js (default) + Classes may not have private methods. at (77, 2) to (86, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-exhausted.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (76, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-class.js (default) + Classes may not have private methods. at (76, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (77, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (strict mode) + Classes may not have private methods. at (70, 2) to (74, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-hole.js (default) + Classes may not have private methods. at (70, 2) to (74, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (strict mode) + Classes may not have private methods. at (76, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-skipped.js (default) + Classes may not have private methods. at (76, 2) to (83, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-init-undef.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (strict mode) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-complete.js (default) + Classes may not have private methods. at (78, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-done.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (strict mode) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-id-iter-val.js (default) + Classes may not have private methods. at (82, 2) to (87, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-id-init.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-id.js (default) + Classes may not have private methods. at (74, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (strict mode) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-prop-id-init.js (default) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (strict mode) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elem-obj-prop-id.js (default) + Classes may not have private methods. at (74, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elision-exhausted.js (strict mode) + Classes may not have private methods. at (81, 2) to (84, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elision-exhausted.js (default) + Classes may not have private methods. at (81, 2) to (84, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elision.js (strict mode) + Classes may not have private methods. at (89, 2) to (93, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-elision.js (default) + Classes may not have private methods. at (89, 2) to (93, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-empty.js (strict mode) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-empty.js (default) + Classes may not have private methods. at (73, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-elem.js (strict mode) + Classes may not have private methods. at (95, 2) to (100, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-elem.js (default) + Classes may not have private methods. at (95, 2) to (100, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-elision.js (strict mode) + Classes may not have private methods. at (102, 2) to (106, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-elision.js (default) + Classes may not have private methods. at (102, 2) to (106, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-empty.js (strict mode) + Classes may not have private methods. at (86, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-empty.js (default) + Classes may not have private methods. at (86, 2) to (89, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-rest.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-ary-rest.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id-elision.js (strict mode) + Classes may not have private methods. at (73, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id-elision.js (default) + Classes may not have private methods. at (73, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (strict mode) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id-exhausted.js (default) + Classes may not have private methods. at (73, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id.js (strict mode) + Classes may not have private methods. at (70, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-id.js (default) + Classes may not have private methods. at (70, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-obj-id.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-obj-id.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (strict mode) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-static-dflt-ary-ptrn-rest-obj-prop-id.js (default) + Classes may not have private methods. at (76, 2) to (85, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-empty.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (75, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (75, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (74, 2) to (94, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (74, 2) to (94, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (64, 2) to (74, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (64, 2) to (74, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (65, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (65, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (63, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-dflt-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (63, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-empty.js (strict mode) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-empty.js (default) + Classes may not have private methods. at (74, 2) to (77, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-arrow.js (strict mode) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-arrow.js (default) + Classes may not have private methods. at (75, 2) to (78, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-class.js (strict mode) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-class.js (default) + Classes may not have private methods. at (75, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-cover.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-cover.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-fn.js (strict mode) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-fn.js (default) + Classes may not have private methods. at (75, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-gen.js (strict mode) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-fn-name-gen.js (default) + Classes may not have private methods. at (76, 2) to (80, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-skipped.js (strict mode) + Classes may not have private methods. at (75, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-init-skipped.js (default) + Classes may not have private methods. at (75, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-id-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-ary-init.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-ary-init.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-ary-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-ary-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (72, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-ary.js (strict mode) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-ary.js (default) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-id-init-skipped.js (strict mode) + Classes may not have private methods. at (74, 2) to (94, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-id-init-skipped.js (default) + Classes may not have private methods. at (74, 2) to (94, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-id-init.js (strict mode) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-id-init.js (default) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-id-trailing-comma.js (strict mode) + Classes may not have private methods. at (69, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-id-trailing-comma.js (default) + Classes may not have private methods. at (69, 2) to (76, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-id.js (strict mode) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-id.js (default) + Classes may not have private methods. at (69, 2) to (75, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-obj-init.js (strict mode) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-obj-init.js (default) + Classes may not have private methods. at (72, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-obj.js (strict mode) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-prop-obj.js (default) + Classes may not have private methods. at (70, 2) to (79, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-rest-getter.js (strict mode) + Classes may not have private methods. at (64, 2) to (74, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-rest-getter.js (default) + Classes may not have private methods. at (64, 2) to (74, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-rest-skip-non-enumerable.js (strict mode) + Classes may not have private methods. at (65, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-rest-skip-non-enumerable.js (default) + Classes may not have private methods. at (65, 2) to (82, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-rest-val-obj.js (strict mode) + Classes may not have private methods. at (63, 2) to (81, 3) +language/statements/class/dstr/private-meth-static-obj-ptrn-rest-val-obj.js (default) + Classes may not have private methods. at (63, 2) to (81, 3) +language/statements/class/elements/after-same-line-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 22) to (26, 48) +language/statements/class/elements/after-same-line-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 22) to (26, 48) +language/statements/class/elements/after-same-line-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 22) to (65, 3) +language/statements/class/elements/after-same-line-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 22) to (62, 3) +language/statements/class/elements/after-same-line-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 22) to (62, 3) +language/statements/class/elements/after-same-line-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 22) to (62, 3) +language/statements/class/elements/after-same-line-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 22) to (62, 3) +language/statements/class/elements/after-same-line-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 21) to (26, 47) +language/statements/class/elements/after-same-line-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 21) to (26, 47) +language/statements/class/elements/after-same-line-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 21) to (65, 3) +language/statements/class/elements/after-same-line-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 21) to (62, 3) +language/statements/class/elements/after-same-line-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 21) to (62, 3) +language/statements/class/elements/after-same-line-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 21) to (62, 3) +language/statements/class/elements/after-same-line-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 21) to (62, 3) +language/statements/class/elements/after-same-line-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-static-async-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 35) to (26, 61) +language/statements/class/elements/after-same-line-static-async-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 35) to (26, 61) +language/statements/class/elements/after-same-line-static-async-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 35) to (65, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 35) to (62, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 35) to (62, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 35) to (62, 3) +language/statements/class/elements/after-same-line-static-async-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 35) to (62, 3) +language/statements/class/elements/after-same-line-static-async-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-static-async-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-static-async-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-static-async-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-static-async-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 34) to (26, 60) +language/statements/class/elements/after-same-line-static-async-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 34) to (26, 60) +language/statements/class/elements/after-same-line-static-async-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 34) to (65, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 34) to (62, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 34) to (62, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 34) to (62, 3) +language/statements/class/elements/after-same-line-static-async-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 34) to (62, 3) +language/statements/class/elements/after-same-line-static-async-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-static-async-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-static-async-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-static-async-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-static-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 29) to (26, 55) +language/statements/class/elements/after-same-line-static-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 29) to (26, 55) +language/statements/class/elements/after-same-line-static-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 29) to (65, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 29) to (62, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 29) to (62, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 29) to (62, 3) +language/statements/class/elements/after-same-line-static-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 29) to (62, 3) +language/statements/class/elements/after-same-line-static-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-static-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-static-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-static-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-static-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 28) to (26, 54) +language/statements/class/elements/after-same-line-static-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 28) to (26, 54) +language/statements/class/elements/after-same-line-static-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 28) to (62, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 28) to (62, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 28) to (62, 3) +language/statements/class/elements/after-same-line-static-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 28) to (62, 3) +language/statements/class/elements/after-same-line-static-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-static-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/after-same-line-static-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/after-same-line-static-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/arrow-fnc-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/arrow-fnc-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-catch.js (strict mode) + Classes may not have private methods. at (28, 4) to (32, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-catch.js (default) + Classes may not have private methods. at (28, 4) to (32, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-for-await-of-async-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-for-await-of-async-iterator.js (default) + Classes may not have private methods. at (32, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-for-await-of-sync-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-for-await-of-sync-iterator.js (default) + Classes may not have private methods. at (32, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-yield-star-async-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (36, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-yield-star-async-iterator.js (default) + Classes may not have private methods. at (32, 4) to (36, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-yield-star-sync-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (35, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next-yield-star-sync-iterator.js (default) + Classes may not have private methods. at (32, 4) to (35, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next.js (strict mode) + Classes may not have private methods. at (28, 4) to (32, 5) +language/statements/class/elements/async-gen-private-method-static/yield-promise-reject-next.js (default) + Classes may not have private methods. at (28, 4) to (32, 5) +language/statements/class/elements/async-gen-private-method-static/yield-spread-arr-multiple.js (strict mode) + Classes may not have private methods. at (36, 4) to (39, 5) +language/statements/class/elements/async-gen-private-method-static/yield-spread-arr-multiple.js (default) + Classes may not have private methods. at (36, 4) to (39, 5) +language/statements/class/elements/async-gen-private-method-static/yield-spread-arr-single.js (strict mode) + Classes may not have private methods. at (34, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method-static/yield-spread-arr-single.js (default) + Classes may not have private methods. at (34, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method-static/yield-spread-obj.js (strict mode) + Classes may not have private methods. at (35, 4) to (42, 5) +language/statements/class/elements/async-gen-private-method-static/yield-spread-obj.js (default) + Classes may not have private methods. at (35, 4) to (42, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-async-next.js (strict mode) + Classes may not have private methods. at (139, 4) to (149, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-async-next.js (default) + Classes may not have private methods. at (139, 4) to (149, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-async-return.js (strict mode) + Classes may not have private methods. at (158, 4) to (163, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-async-return.js (default) + Classes may not have private methods. at (158, 4) to (163, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-async-throw.js (strict mode) + Classes may not have private methods. at (158, 4) to (168, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-async-throw.js (default) + Classes may not have private methods. at (158, 4) to (168, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-expr-abrupt.js (strict mode) + Classes may not have private methods. at (39, 4) to (44, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-expr-abrupt.js (default) + Classes may not have private methods. at (39, 4) to (44, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-get-abrupt.js (strict mode) + Classes may not have private methods. at (59, 4) to (64, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-get-abrupt.js (default) + Classes may not have private methods. at (59, 4) to (64, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-number-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-object-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-string-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-null-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (63, 4) to (68, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-null-sync-get-abrupt.js (default) + Classes may not have private methods. at (63, 4) to (68, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-abrupt.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-abrupt.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-boolean-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-boolean-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-null-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-null-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-number-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-number-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-string-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-string-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-symbol-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-symbol-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-undefined-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-returns-undefined-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-undefined-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (63, 4) to (68, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-async-undefined-sync-get-abrupt.js (default) + Classes may not have private methods. at (63, 4) to (68, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-get-abrupt.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-number-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-object-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-string-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-abrupt.js (strict mode) + Classes may not have private methods. at (52, 4) to (57, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-abrupt.js (default) + Classes may not have private methods. at (52, 4) to (57, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-boolean-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-boolean-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-null-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-null-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-number-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-number-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-string-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-string-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-symbol-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-symbol-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-undefined-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-getiter-sync-returns-undefined-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-call-done-get-abrupt.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-call-done-get-abrupt.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-call-returns-abrupt.js (strict mode) + Classes may not have private methods. at (50, 4) to (55, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-call-returns-abrupt.js (default) + Classes may not have private methods. at (50, 4) to (55, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-call-value-get-abrupt.js (strict mode) + Classes may not have private methods. at (60, 4) to (65, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-call-value-get-abrupt.js (default) + Classes may not have private methods. at (60, 4) to (65, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-get-abrupt.js (strict mode) + Classes may not have private methods. at (50, 4) to (55, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-get-abrupt.js (default) + Classes may not have private methods. at (50, 4) to (55, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-non-object-ignores-then.js (strict mode) + Classes may not have private methods. at (70, 4) to (75, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-non-object-ignores-then.js (default) + Classes may not have private methods. at (70, 4) to (75, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-null-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-null-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-number-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-object-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-string-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-undefined-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-not-callable-undefined-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-get-abrupt.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-get-abrupt.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-boolean-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-boolean-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-null-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-null-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-number-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-number-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-object-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-object-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-string-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-string-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-symbol-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-symbol-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-undefined-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-non-callable-undefined-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-returns-abrupt.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-next-then-returns-abrupt.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-sync-next.js (strict mode) + Classes may not have private methods. at (158, 4) to (168, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-sync-next.js (default) + Classes may not have private methods. at (158, 4) to (168, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-sync-return.js (strict mode) + Classes may not have private methods. at (138, 4) to (143, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-sync-return.js (default) + Classes may not have private methods. at (138, 4) to (143, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-sync-throw.js (strict mode) + Classes may not have private methods. at (136, 4) to (146, 5) +language/statements/class/elements/async-gen-private-method-static/yield-star-sync-throw.js (default) + Classes may not have private methods. at (136, 4) to (146, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-catch.js (strict mode) + Classes may not have private methods. at (28, 4) to (32, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-catch.js (default) + Classes may not have private methods. at (28, 4) to (32, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-for-await-of-async-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-for-await-of-async-iterator.js (default) + Classes may not have private methods. at (32, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-for-await-of-sync-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-for-await-of-sync-iterator.js (default) + Classes may not have private methods. at (32, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-yield-star-async-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (36, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-yield-star-async-iterator.js (default) + Classes may not have private methods. at (32, 4) to (36, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-yield-star-sync-iterator.js (strict mode) + Classes may not have private methods. at (32, 4) to (35, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next-yield-star-sync-iterator.js (default) + Classes may not have private methods. at (32, 4) to (35, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next.js (strict mode) + Classes may not have private methods. at (28, 4) to (32, 5) +language/statements/class/elements/async-gen-private-method/yield-promise-reject-next.js (default) + Classes may not have private methods. at (28, 4) to (32, 5) +language/statements/class/elements/async-gen-private-method/yield-spread-arr-multiple.js (strict mode) + Classes may not have private methods. at (36, 4) to (39, 5) +language/statements/class/elements/async-gen-private-method/yield-spread-arr-multiple.js (default) + Classes may not have private methods. at (36, 4) to (39, 5) +language/statements/class/elements/async-gen-private-method/yield-spread-arr-single.js (strict mode) + Classes may not have private methods. at (34, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method/yield-spread-arr-single.js (default) + Classes may not have private methods. at (34, 4) to (37, 5) +language/statements/class/elements/async-gen-private-method/yield-spread-obj.js (strict mode) + Classes may not have private methods. at (35, 4) to (42, 5) +language/statements/class/elements/async-gen-private-method/yield-spread-obj.js (default) + Classes may not have private methods. at (35, 4) to (42, 5) +language/statements/class/elements/async-gen-private-method/yield-star-async-next.js (strict mode) + Classes may not have private methods. at (139, 4) to (149, 5) +language/statements/class/elements/async-gen-private-method/yield-star-async-next.js (default) + Classes may not have private methods. at (139, 4) to (149, 5) +language/statements/class/elements/async-gen-private-method/yield-star-async-return.js (strict mode) + Classes may not have private methods. at (158, 4) to (163, 5) +language/statements/class/elements/async-gen-private-method/yield-star-async-return.js (default) + Classes may not have private methods. at (158, 4) to (163, 5) +language/statements/class/elements/async-gen-private-method/yield-star-async-throw.js (strict mode) + Classes may not have private methods. at (158, 4) to (168, 5) +language/statements/class/elements/async-gen-private-method/yield-star-async-throw.js (default) + Classes may not have private methods. at (158, 4) to (168, 5) +language/statements/class/elements/async-gen-private-method/yield-star-expr-abrupt.js (strict mode) + Classes may not have private methods. at (39, 4) to (44, 5) +language/statements/class/elements/async-gen-private-method/yield-star-expr-abrupt.js (default) + Classes may not have private methods. at (39, 4) to (44, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-get-abrupt.js (strict mode) + Classes may not have private methods. at (59, 4) to (64, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-get-abrupt.js (default) + Classes may not have private methods. at (59, 4) to (64, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-number-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-object-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-string-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-null-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (63, 4) to (68, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-null-sync-get-abrupt.js (default) + Classes may not have private methods. at (63, 4) to (68, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-abrupt.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-abrupt.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-boolean-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-boolean-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-null-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-null-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-number-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-number-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-string-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-string-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-symbol-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-symbol-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-undefined-throw.js (strict mode) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-returns-undefined-throw.js (default) + Classes may not have private methods. at (55, 4) to (60, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-undefined-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (63, 4) to (68, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-async-undefined-sync-get-abrupt.js (default) + Classes may not have private methods. at (63, 4) to (68, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-get-abrupt.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-get-abrupt.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-number-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-object-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-string-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-abrupt.js (strict mode) + Classes may not have private methods. at (52, 4) to (57, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-abrupt.js (default) + Classes may not have private methods. at (52, 4) to (57, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-boolean-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-boolean-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-null-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-null-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-number-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-number-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-string-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-string-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-symbol-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-symbol-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-undefined-throw.js (strict mode) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-getiter-sync-returns-undefined-throw.js (default) + Classes may not have private methods. at (57, 4) to (62, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-call-done-get-abrupt.js (strict mode) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-call-done-get-abrupt.js (default) + Classes may not have private methods. at (58, 4) to (63, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-call-returns-abrupt.js (strict mode) + Classes may not have private methods. at (50, 4) to (55, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-call-returns-abrupt.js (default) + Classes may not have private methods. at (50, 4) to (55, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-call-value-get-abrupt.js (strict mode) + Classes may not have private methods. at (60, 4) to (65, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-call-value-get-abrupt.js (default) + Classes may not have private methods. at (60, 4) to (65, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-get-abrupt.js (strict mode) + Classes may not have private methods. at (50, 4) to (55, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-get-abrupt.js (default) + Classes may not have private methods. at (50, 4) to (55, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-non-object-ignores-then.js (strict mode) + Classes may not have private methods. at (70, 4) to (75, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-non-object-ignores-then.js (default) + Classes may not have private methods. at (70, 4) to (75, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-boolean-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-boolean-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-null-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-null-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-number-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-number-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-object-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-object-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-string-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-string-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-symbol-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-symbol-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-undefined-throw.js (strict mode) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-not-callable-undefined-throw.js (default) + Classes may not have private methods. at (47, 4) to (52, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-get-abrupt.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-get-abrupt.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-boolean-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-boolean-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-null-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-null-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-number-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-number-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-object-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-object-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-string-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-string-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-symbol-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-symbol-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-undefined-fulfillpromise.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-non-callable-undefined-fulfillpromise.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-returns-abrupt.js (strict mode) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-next-then-returns-abrupt.js (default) + Classes may not have private methods. at (74, 4) to (79, 5) +language/statements/class/elements/async-gen-private-method/yield-star-sync-next.js (strict mode) + Classes may not have private methods. at (158, 4) to (168, 5) +language/statements/class/elements/async-gen-private-method/yield-star-sync-next.js (default) + Classes may not have private methods. at (158, 4) to (168, 5) +language/statements/class/elements/async-gen-private-method/yield-star-sync-return.js (strict mode) + Classes may not have private methods. at (138, 4) to (143, 5) +language/statements/class/elements/async-gen-private-method/yield-star-sync-return.js (default) + Classes may not have private methods. at (138, 4) to (143, 5) +language/statements/class/elements/async-gen-private-method/yield-star-sync-throw.js (strict mode) + Classes may not have private methods. at (136, 4) to (146, 5) +language/statements/class/elements/async-gen-private-method/yield-star-sync-throw.js (default) + Classes may not have private methods. at (136, 4) to (146, 5) +language/statements/class/elements/comp-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/comp-name-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/equality-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/equality-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/fields-asi-same-line-2.js (strict mode) + Missing parse error +language/statements/class/elements/fields-asi-same-line-2.js (default) + Missing parse error +language/statements/class/elements/gen-private-method-static/yield-spread-arr-multiple.js (strict mode) + Classes may not have private methods. at (35, 4) to (38, 5) +language/statements/class/elements/gen-private-method-static/yield-spread-arr-multiple.js (default) + Classes may not have private methods. at (35, 4) to (38, 5) +language/statements/class/elements/gen-private-method-static/yield-spread-arr-single.js (strict mode) + Classes may not have private methods. at (32, 4) to (35, 5) +language/statements/class/elements/gen-private-method-static/yield-spread-arr-single.js (default) + Classes may not have private methods. at (32, 4) to (35, 5) +language/statements/class/elements/gen-private-method-static/yield-spread-obj.js (strict mode) + Classes may not have private methods. at (34, 4) to (41, 5) +language/statements/class/elements/gen-private-method-static/yield-spread-obj.js (default) + Classes may not have private methods. at (34, 4) to (41, 5) +language/statements/class/elements/gen-private-method/yield-spread-arr-multiple.js (strict mode) + Classes may not have private methods. at (35, 4) to (38, 5) +language/statements/class/elements/gen-private-method/yield-spread-arr-multiple.js (default) + Classes may not have private methods. at (35, 4) to (38, 5) +language/statements/class/elements/gen-private-method/yield-spread-arr-single.js (strict mode) + Classes may not have private methods. at (32, 4) to (35, 5) +language/statements/class/elements/gen-private-method/yield-spread-arr-single.js (default) + Classes may not have private methods. at (32, 4) to (35, 5) +language/statements/class/elements/gen-private-method/yield-spread-obj.js (strict mode) + Classes may not have private methods. at (34, 4) to (41, 5) +language/statements/class/elements/gen-private-method/yield-spread-obj.js (default) + Classes may not have private methods. at (34, 4) to (41, 5) +language/statements/class/elements/literal-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/literal-name-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/multiple-definitions-private-method-usage.js (strict mode) + Classes may not have private methods. at (28, 2) to (28, 28) +language/statements/class/elements/multiple-definitions-private-method-usage.js (default) + Classes may not have private methods. at (28, 2) to (28, 28) +language/statements/class/elements/multiple-definitions-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (61, 2) to (63, 3) +language/statements/class/elements/multiple-definitions-rs-private-method-alt.js (default) + Classes may not have private methods. at (61, 2) to (63, 3) +language/statements/class/elements/multiple-definitions-rs-private-method.js (strict mode) + Classes may not have private methods. at (61, 2) to (63, 3) +language/statements/class/elements/multiple-definitions-rs-private-method.js (default) + Classes may not have private methods. at (61, 2) to (63, 3) +language/statements/class/elements/multiple-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (65, 2) to (67, 3) +language/statements/class/elements/multiple-definitions-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/multiple-definitions-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/multiple-definitions-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/multiple-definitions-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/multiple-definitions-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (33, 2) to (36, 3) +language/statements/class/elements/multiple-definitions-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (33, 2) to (36, 3) +language/statements/class/elements/multiple-definitions-static-private-methods.js (strict mode) + Classes may not have private methods. at (33, 2) to (35, 3) +language/statements/class/elements/multiple-definitions-static-private-methods.js (default) + Classes may not have private methods. at (33, 2) to (35, 3) +language/statements/class/elements/multiple-stacked-definitions-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/multiple-stacked-definitions-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/multiple-stacked-definitions-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/multiple-stacked-definitions-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/multiple-stacked-definitions-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (31, 2) to (34, 3) +language/statements/class/elements/multiple-stacked-definitions-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (31, 2) to (34, 3) +language/statements/class/elements/multiple-stacked-definitions-static-private-methods.js (strict mode) + Classes may not have private methods. at (31, 2) to (33, 3) +language/statements/class/elements/multiple-stacked-definitions-static-private-methods.js (default) + Classes may not have private methods. at (31, 2) to (33, 3) +language/statements/class/elements/new-no-sc-line-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/new-no-sc-line-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/new-no-sc-line-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-no-sc-line-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-no-sc-line-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-no-sc-line-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-no-sc-line-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-no-sc-line-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (30, 2) to (33, 3) +language/statements/class/elements/new-no-sc-line-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (30, 2) to (33, 3) +language/statements/class/elements/new-no-sc-line-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (30, 2) to (32, 3) +language/statements/class/elements/new-no-sc-line-method-static-private-methods.js (default) + Classes may not have private methods. at (30, 2) to (32, 3) +language/statements/class/elements/new-sc-line-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/new-sc-line-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/new-sc-line-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-sc-line-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-sc-line-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-sc-line-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-sc-line-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-sc-line-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (30, 2) to (33, 3) +language/statements/class/elements/new-sc-line-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (30, 2) to (33, 3) +language/statements/class/elements/new-sc-line-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (30, 2) to (32, 3) +language/statements/class/elements/new-sc-line-gen-static-private-methods.js (default) + Classes may not have private methods. at (30, 2) to (32, 3) +language/statements/class/elements/new-sc-line-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/new-sc-line-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/new-sc-line-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-sc-line-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-sc-line-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-sc-line-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/new-sc-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/new-sc-line-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-sc-line-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-sc-line-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-sc-line-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/new-sc-line-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (30, 2) to (33, 3) +language/statements/class/elements/new-sc-line-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (30, 2) to (33, 3) +language/statements/class/elements/new-sc-line-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (30, 2) to (32, 3) +language/statements/class/elements/new-sc-line-method-static-private-methods.js (default) + Classes may not have private methods. at (30, 2) to (32, 3) +language/statements/class/elements/private-arrow-fnc-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/private-arrow-fnc-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/private-literal-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/private-literal-name-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/private-method-brand-check-multiple-evaluations-of-class.js (strict mode) + Classes may not have private methods. at (24, 4) to (24, 30) +language/statements/class/elements/private-method-brand-check-multiple-evaluations-of-class.js (default) + Classes may not have private methods. at (24, 4) to (24, 30) +language/statements/class/elements/private-method-brand-check-super-class.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 37) +language/statements/class/elements/private-method-brand-check-super-class.js (default) + Classes may not have private methods. at (26, 2) to (26, 37) +language/statements/class/elements/private-method-brand-check.js (strict mode) + Classes may not have private methods. at (31, 2) to (31, 28) +language/statements/class/elements/private-method-brand-check.js (default) + Classes may not have private methods. at (31, 2) to (31, 28) +language/statements/class/elements/private-methods/prod-private-async-generator.js (strict mode) + Classes may not have private methods. at (88, 2) to (88, 29) +language/statements/class/elements/private-methods/prod-private-async-generator.js (default) + Classes may not have private methods. at (88, 2) to (88, 29) +language/statements/class/elements/private-methods/prod-private-async-method.js (strict mode) + Classes may not have private methods. at (88, 2) to (88, 27) +language/statements/class/elements/private-methods/prod-private-async-method.js (default) + Classes may not have private methods. at (88, 2) to (88, 27) +language/statements/class/elements/private-methods/prod-private-generator.js (strict mode) + Classes may not have private methods. at (86, 2) to (86, 23) +language/statements/class/elements/private-methods/prod-private-generator.js (default) + Classes may not have private methods. at (86, 2) to (86, 23) +language/statements/class/elements/private-methods/prod-private-method-initialize-order.js (strict mode) + Classes may not have private methods. at (103, 2) to (103, 21) +language/statements/class/elements/private-methods/prod-private-method-initialize-order.js (default) + Classes may not have private methods. at (103, 2) to (103, 21) +language/statements/class/elements/private-methods/prod-private-method.js (strict mode) + Classes may not have private methods. at (86, 2) to (86, 21) +language/statements/class/elements/private-methods/prod-private-method.js (default) + Classes may not have private methods. at (86, 2) to (86, 21) +language/statements/class/elements/private-ternary-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/private-ternary-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/private-typeof-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/private-typeof-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/privatemethods-on-proxy.js (strict mode) + Classes may not have private methods. at (42, 2) to (44, 3) +language/statements/class/elements/privatemethods-on-proxy.js (default) + Classes may not have private methods. at (42, 2) to (44, 3) +language/statements/class/elements/regular-definitions-private-method-usage.js (strict mode) + Classes may not have private methods. at (25, 2) to (25, 28) +language/statements/class/elements/regular-definitions-private-method-usage.js (default) + Classes may not have private methods. at (25, 2) to (25, 28) +language/statements/class/elements/regular-definitions-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (58, 2) to (60, 3) +language/statements/class/elements/regular-definitions-rs-private-method-alt.js (default) + Classes may not have private methods. at (58, 2) to (60, 3) +language/statements/class/elements/regular-definitions-rs-private-method.js (strict mode) + Classes may not have private methods. at (58, 2) to (60, 3) +language/statements/class/elements/regular-definitions-rs-private-method.js (default) + Classes may not have private methods. at (58, 2) to (60, 3) +language/statements/class/elements/regular-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (62, 2) to (64, 3) +language/statements/class/elements/regular-definitions-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/regular-definitions-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/regular-definitions-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/regular-definitions-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/regular-definitions-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (28, 2) to (31, 3) +language/statements/class/elements/regular-definitions-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (28, 2) to (31, 3) +language/statements/class/elements/regular-definitions-static-private-methods.js (strict mode) + Classes may not have private methods. at (28, 2) to (30, 3) +language/statements/class/elements/regular-definitions-static-private-methods.js (default) + Classes may not have private methods. at (28, 2) to (30, 3) +language/statements/class/elements/same-line-async-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 28) to (26, 54) +language/statements/class/elements/same-line-async-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 28) to (26, 54) +language/statements/class/elements/same-line-async-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-async-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-async-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-async-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-async-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 28) to (65, 3) +language/statements/class/elements/same-line-async-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 28) to (62, 3) +language/statements/class/elements/same-line-async-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 28) to (62, 3) +language/statements/class/elements/same-line-async-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 28) to (62, 3) +language/statements/class/elements/same-line-async-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 28) to (62, 3) +language/statements/class/elements/same-line-async-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/same-line-async-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/same-line-async-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/same-line-async-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/same-line-async-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 27) to (26, 53) +language/statements/class/elements/same-line-async-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 27) to (26, 53) +language/statements/class/elements/same-line-async-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-async-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-async-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-async-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-async-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 27) to (65, 3) +language/statements/class/elements/same-line-async-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 27) to (62, 3) +language/statements/class/elements/same-line-async-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 27) to (62, 3) +language/statements/class/elements/same-line-async-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 27) to (62, 3) +language/statements/class/elements/same-line-async-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 27) to (62, 3) +language/statements/class/elements/same-line-async-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/same-line-async-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/same-line-async-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/same-line-async-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/same-line-gen-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/same-line-gen-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/same-line-gen-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-gen-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-gen-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-gen-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-gen-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/same-line-gen-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/same-line-gen-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/same-line-gen-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/same-line-gen-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/same-line-gen-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/same-line-gen-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/same-line-gen-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/same-line-method-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/same-line-method-private-method-usage.js (default) + Classes may not have private methods. at (26, 2) to (26, 28) +language/statements/class/elements/same-line-method-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-method-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-method-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-method-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/same-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 2) to (65, 3) +language/statements/class/elements/same-line-method-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/same-line-method-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/same-line-method-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/same-line-method-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 2) to (62, 3) +language/statements/class/elements/same-line-method-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/same-line-method-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (29, 2) to (32, 3) +language/statements/class/elements/same-line-method-static-private-methods.js (strict mode) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/same-line-method-static-private-methods.js (default) + Classes may not have private methods. at (29, 2) to (31, 3) +language/statements/class/elements/static-comp-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/static-comp-name-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/static-literal-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/static-literal-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/static-private-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/static-private-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/static-string-literal-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/static-string-literal-name-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/string-literal-name-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/string-literal-name-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-ctor-super-no-heritage.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-ctor-super-no-heritage.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-field-identifier-invalid-zwj-error.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-field-identifier-invalid-zwj-error.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-field-identifier-invalid-zwnj-error.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-field-identifier-invalid-zwnj-error.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-privatename-identifier-invalid-zwj-error.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-privatename-identifier-invalid-zwj-error.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-privatename-identifier-invalid-zwnj-error.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-privatename-identifier-invalid-zwnj-error.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-async-gen.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-async-gen.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-async-meth.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-async-meth.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-gen.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-gen.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-get.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-get.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-set.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-special-meth-ctor-set.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-async-gen-meth-prototype.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-async-gen-meth-prototype.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-async-meth-prototype.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-async-meth-prototype.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-gen-meth-prototype.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-gen-meth-prototype.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-get-meth-prototype.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-get-meth-prototype.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-meth-prototype.js (strict mode) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-meth-prototype.js (default) + Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-set-meth-prototype.js (strict mode) Missing parse error +language/statements/class/elements/syntax/early-errors/grammar-static-set-meth-prototype.js (default) + Missing parse error +language/statements/class/elements/syntax/valid/grammar-privatemeth-duplicate-meth-nestedclassmeth.js (strict mode) + Classes may not have private methods. at (21, 6) to (21, 13) +language/statements/class/elements/syntax/valid/grammar-privatemeth-duplicate-meth-nestedclassmeth.js (default) + Classes may not have private methods. at (21, 6) to (21, 13) +language/statements/class/elements/syntax/valid/grammar-static-private-async-gen-meth-prototype.js (strict mode) + Classes may not have private methods. at (19, 2) to (19, 32) +language/statements/class/elements/syntax/valid/grammar-static-private-async-gen-meth-prototype.js (default) + Classes may not have private methods. at (19, 2) to (19, 32) +language/statements/class/elements/syntax/valid/grammar-static-private-async-meth-prototype.js (strict mode) + Classes may not have private methods. at (19, 2) to (19, 30) +language/statements/class/elements/syntax/valid/grammar-static-private-async-meth-prototype.js (default) + Classes may not have private methods. at (19, 2) to (19, 30) +language/statements/class/elements/syntax/valid/grammar-static-private-gen-meth-prototype.js (strict mode) + Classes may not have private methods. at (19, 2) to (19, 26) +language/statements/class/elements/syntax/valid/grammar-static-private-gen-meth-prototype.js (default) + Classes may not have private methods. at (19, 2) to (19, 26) +language/statements/class/elements/syntax/valid/grammar-static-private-meth-prototype.js (strict mode) + Classes may not have private methods. at (19, 2) to (19, 24) +language/statements/class/elements/syntax/valid/grammar-static-private-meth-prototype.js (default) + Classes may not have private methods. at (19, 2) to (19, 24) +language/statements/class/elements/ternary-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/ternary-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/typeof-init-err-contains-arguments.js (strict mode) + Missing parse error +language/statements/class/elements/typeof-init-err-contains-arguments.js (default) + Missing parse error +language/statements/class/elements/wrapped-in-sc-private-method-usage.js (strict mode) + Classes may not have private methods. at (26, 8) to (26, 34) +language/statements/class/elements/wrapped-in-sc-private-method-usage.js (default) + Classes may not have private methods. at (26, 8) to (26, 34) +language/statements/class/elements/wrapped-in-sc-rs-private-method-alt.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/wrapped-in-sc-rs-private-method-alt.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/wrapped-in-sc-rs-private-method.js (strict mode) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/wrapped-in-sc-rs-private-method.js (default) + Classes may not have private methods. at (59, 2) to (61, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-async-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-async-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-async-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-async-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-async-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-async-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-async-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-async-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-generator-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-generator-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-generator-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-generator-method-privatename-identifier.js (default) + Classes may not have private methods. at (63, 8) to (65, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-method-privatename-identifier-alt.js (strict mode) + Classes may not have private methods. at (60, 8) to (62, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-method-privatename-identifier-alt.js (default) + Classes may not have private methods. at (60, 8) to (62, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-method-privatename-identifier.js (strict mode) + Classes may not have private methods. at (60, 8) to (62, 3) +language/statements/class/elements/wrapped-in-sc-rs-static-method-privatename-identifier.js (default) + Classes may not have private methods. at (60, 8) to (62, 3) +language/statements/class/elements/wrapped-in-sc-static-private-methods-with-fields.js (strict mode) + Classes may not have private methods. at (30, 2) to (33, 3) +language/statements/class/elements/wrapped-in-sc-static-private-methods-with-fields.js (default) + Classes may not have private methods. at (30, 2) to (33, 3) +language/statements/class/elements/wrapped-in-sc-static-private-methods.js (strict mode) + Classes may not have private methods. at (30, 2) to (32, 3) +language/statements/class/elements/wrapped-in-sc-static-private-methods.js (default) + Classes may not have private methods. at (30, 2) to (32, 3) language/statements/class/syntax/early-errors/class-definition-evaluation-block-duplicate-binding.js (strict mode) Missing parse error language/statements/class/syntax/early-errors/class-definition-evaluation-block-duplicate-binding.js (default) @@ -3738,308 +10760,298 @@ language/statements/labeled/value-await-module.js (default) Missing parse error language/statements/let/redeclaration-error-from-within-strict-mode-function.js (default) Missing parse error -language/statements/let/syntax/attempt-to-redeclare-let-binding-with-function-declaration.js (strict mode) - Missing parse error -language/statements/let/syntax/attempt-to-redeclare-let-binding-with-function-declaration.js (default) - Missing parse error -language/statements/let/syntax/attempt-to-redeclare-let-binding-with-var.js (strict mode) - Missing parse error -language/statements/let/syntax/attempt-to-redeclare-let-binding-with-var.js (default) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-async-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-async-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-async-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-async-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-class.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-class.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-class-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-const.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-const.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-const-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-let.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-let.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-let-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-var.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-function-name-redeclaration-attempt-with-var.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-function-declaration-attempt-to-redeclare-with-var-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-async-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-async-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-async-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-async-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-class.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-class.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-class-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-const.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-const.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-const-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-let.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-let.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-let-declaration.js (default) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-var.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/async-generator-name-redeclaration-attempt-with-var.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/async-generator-declaration-attempt-to-redeclare-with-var-declaration.js (default) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-async-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-async-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-async-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-async-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-class.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-class.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-class-declaration.js (default) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-const.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-const.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-const-declaration.js (default) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-let.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-let.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-let-declaration.js (default) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-var.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/class-name-redeclaration-attempt-with-var.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/class-declaration-attempt-to-redeclare-with-var-declaration.js (default) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-async-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-async-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-async-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-async-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-class.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-class.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-class-declaration.js (default) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-const.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-const.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-const-declaration.js (default) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-let.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-let.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-let-declaration.js (default) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-var.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/const-name-redeclaration-attempt-with-var.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/const-declaration-attempt-to-redeclare-with-var-declaration.js (default) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-async-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-async-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-async-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-async-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-class.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-class.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-class-declaration.js (default) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-const.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-const.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-const-declaration.js (default) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-let.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-let.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-let-declaration.js (default) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-var.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/function-name-redeclaration-attempt-with-var.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/function-declaration-attempt-to-redeclare-with-var-declaration.js (default) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-async-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-async-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-async-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-async-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-class.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-class.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-class-declaration.js (default) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-const.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-const.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-const-declaration.js (default) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-let.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-let.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-let-declaration.js (default) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-var.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/generator-name-redeclaration-attempt-with-var.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/generator-declaration-attempt-to-redeclare-with-var-declaration.js (default) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-async-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-async-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-async-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-async-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-class.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-class.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-class-declaration.js (default) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-const.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-const.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-const-declaration.js (default) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-let.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-let.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-let-declaration.js (default) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-var.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-var-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/let-name-redeclaration-attempt-with-var.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/let-declaration-attempt-to-redeclare-with-var-declaration.js (default) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-async-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-async-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-async-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-async-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-async-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-async-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-async-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-async-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-class.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-class-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-class.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-class-declaration.js (default) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-const.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-const-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-const.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-const-declaration.js (default) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-function.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-function-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-function.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-function-declaration.js (default) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-generator.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-generator-declaration.js (strict mode) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-generator.js (default) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-generator-declaration.js (default) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-let.js (strict mode) Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-let-declaration.js (strict mode) - Missing parse error -language/statements/switch/syntax/redeclaration/var-declaration-attempt-to-redeclare-with-let-declaration.js (default) +language/statements/switch/syntax/redeclaration/var-name-redeclaration-attempt-with-let.js (default) Missing parse error language/statements/try/early-catch-duplicates.js (strict mode) Missing parse error language/statements/try/early-catch-duplicates.js (default) Missing parse error -language/statements/try/early-catch-lex.js (strict mode) - Missing parse error -language/statements/try/early-catch-lex.js (default) +language/statements/try/early-catch-function.js (strict mode) Missing parse error -language/statements/try/early-catch-var.js (strict mode) +language/statements/try/early-catch-function.js (default) Missing parse error -language/statements/try/early-catch-var.js (default) - Missing parse error -language/white-space/mongolian-vowel-separator.js (strict mode) +language/statements/try/early-catch-lex.js (strict mode) Missing parse error -language/white-space/mongolian-vowel-separator.js (default) +language/statements/try/early-catch-lex.js (default) Missing parse error === Summary === -Passed: 56968 (96.60%) -Failed: 2007 (3.40%) +Passed: 64222 (92.10%) +Failed: 5512 (7.90%) Features: - Array.prototype.values: 4/6 (66.67%) - ArrayBuffer: 136/184 (73.91%) - BigInt: 905/1965 (46.06%) - DataView: 164/212 (77.36%) - DataView.prototype.setUint8: 72/100 (72.00%) - Reflect: 304/376 (80.85%) - Reflect.set: 14/28 (50.00%) - SharedArrayBuffer: 390/398 (97.99%) - Symbol: 1195/1383 (86.41%) - Symbol.iterator: 2618/2628 (99.62%) - Symbol.species: 448/468 (95.73%) - Symbol.toPrimitive: 196/332 (59.04%) - TypedArray: 2848/3518 (80.96%) - arrow-function: 142/146 (97.26%) - async-functions: 165/257 (64.20%) - async-iteration: 7060/7160 (98.60%) - class: 396/854 (46.37%) - class-fields-private: 192/248 (77.42%) - class-fields-public: 198/626 (31.63%) - computed-property-names: 16/292 (5.48%) - cross-realm: 231/235 (98.30%) - generators: 5341/5479 (97.48%) - let: 191/195 (97.95%) + Proxy: 661/663 (99.70%) + Symbol.asyncIterator: 716/1068 (67.04%) + Symbol.iterator: 2722/3290 (82.74%) + arrow-function: 108/118 (91.53%) + async-functions: 719/907 (79.27%) + async-iteration: 7688/9388 (81.89%) + class: 3562/7578 (47.00%) + class-fields-private: 1572/1736 (90.55%) + class-fields-public: 2432/3388 (71.78%) + class-methods-private: 1098/3082 (35.63%) + class-static-fields-private: 568/640 (88.75%) + class-static-fields-public: 42/54 (77.78%) + class-static-methods-private: 308/2756 (11.18%) + computed-property-names: 312/318 (98.11%) + default-parameters: 3436/4380 (78.45%) + destructuring-binding: 10835/12723 (85.16%) + dynamic-import: 1034/1036 (99.81%) + export-star-as-namespace-from-module: 28/36 (77.78%) + generators: 6021/7653 (78.68%) + import.meta: 34/46 (73.91%) + let: 131/135 (97.04%) + new.target: 92/94 (97.87%) numeric-separator-literal: 202/204 (99.02%) - u180e: 46/48 (95.83%) + object-rest: 530/674 (78.64%) + object-spread: 216/232 (93.10%) + regexp-named-groups: 50/162 (30.86%) + regexp-unicode-property-escapes: 792/1076 (73.61%) diff --git a/src/parser/token.ml b/src/parser/token.ml index dae847f0142..0dd4b8e8dfc 100644 --- a/src/parser/token.ml +++ b/src/parser/token.ml @@ -1,15 +1,26 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type t = - | T_NUMBER of { kind: number_type; raw: string } + | T_NUMBER of { + kind: number_type; + raw: string; + } + | T_BIGINT of { + kind: bigint_type; + raw: string; + } | T_STRING of (Loc.t * string * string * bool) (* loc, value, raw, octal *) | T_TEMPLATE_PART of (Loc.t * template_part * bool) (* loc, value, is_tail *) - | T_IDENTIFIER of { loc: Loc.t; value: string; raw: string } + | T_IDENTIFIER of { + loc: Loc.t; + value: string; + raw: string; + } | T_REGEXP of (Loc.t * string * string) (* /pattern/flags *) (* Syntax *) | T_LCURLY @@ -136,7 +147,18 @@ type t = | T_EMPTY_TYPE | T_BOOLEAN_TYPE of bool_or_boolean | T_NUMBER_TYPE - | T_NUMBER_SINGLETON_TYPE of { kind: number_type; value: float; raw: string } + | T_BIGINT_TYPE + | T_NUMBER_SINGLETON_TYPE of { + kind: number_type; + value: float; + raw: string; + } + | T_BIGINT_SINGLETON_TYPE of { + kind: bigint_type; + approx_value: float; + (* Warning! Might lose precision! *) + raw: string; + } | T_STRING_TYPE | T_VOID_TYPE @@ -151,12 +173,20 @@ and bool_or_boolean = and number_type = | BINARY | LEGACY_OCTAL + | LEGACY_NON_OCTAL (* NonOctalDecimalIntegerLiteral in Annex B *) | OCTAL | NORMAL +and bigint_type = + | BIG_BINARY + | BIG_OCTAL + | BIG_NORMAL + and template_part = { - cooked: string; (* string after processing special chars *) - raw: string; (* string as specified in source *) + cooked: string; + (* string after processing special chars *) + raw: string; + (* string as specified in source *) literal: string; (* same as raw, plus characters like ` and ${ *) } @@ -165,6 +195,7 @@ and template_part = { (*****************************************************************************) let token_to_string = function | T_NUMBER _ -> "T_NUMBER" + | T_BIGINT _ -> "T_BIGINT" | T_STRING _ -> "T_STRING" | T_TEMPLATE_PART _ -> "T_TEMPLATE_PART" | T_IDENTIFIER _ -> "T_IDENTIFIER" @@ -182,7 +213,7 @@ let token_to_string = function | T_WHILE -> "T_WHILE" | T_WITH -> "T_WITH" | T_CONST -> "T_CONST" - | T_LET -> "T_LET" + | T_LET -> "T_LET" | T_NULL -> "T_NULL" | T_FALSE -> "T_FALSE" | T_TRUE -> "T_TRUE" @@ -203,9 +234,9 @@ let token_to_string = function | T_TYPEOF -> "T_TYPEOF" | T_VOID -> "T_VOID" | T_ENUM -> "T_ENUM" - | T_EXPORT -> "T_EXPORT" + | T_EXPORT -> "T_EXPORT" | T_IMPORT -> "T_IMPORT" - | T_SUPER -> "T_SUPER" + | T_SUPER -> "T_SUPER" | T_IMPLEMENTS -> "T_IMPLEMENTS" | T_INTERFACE -> "T_INTERFACE" | T_PACKAGE -> "T_PACKAGE" @@ -290,12 +321,15 @@ let token_to_string = function | T_EMPTY_TYPE -> "T_EMPTY_TYPE" | T_BOOLEAN_TYPE _ -> "T_BOOLEAN_TYPE" | T_NUMBER_TYPE -> "T_NUMBER_TYPE" + | T_BIGINT_TYPE -> "T_BIGINT_TYPE" | T_NUMBER_SINGLETON_TYPE _ -> "T_NUMBER_SINGLETON_TYPE" + | T_BIGINT_SINGLETON_TYPE _ -> "T_BIGINT_SINGLETON_TYPE" | T_STRING_TYPE -> "T_STRING_TYPE" | T_VOID_TYPE -> "T_VOID_TYPE" let value_of_token = function | T_NUMBER { raw; _ } -> raw + | T_BIGINT { raw; _ } -> raw | T_STRING (_, _, raw, _) -> raw | T_TEMPLATE_PART (_, { literal; _ }, _) -> literal | T_IDENTIFIER { raw; _ } -> raw @@ -410,15 +444,51 @@ let value_of_token = function | T_BIT_NOT -> "~" | T_INCR -> "++" | T_DECR -> "--" + (* Extra tokens *) | T_ERROR raw -> raw | T_EOF -> "" | T_JSX_IDENTIFIER { raw } -> raw | T_JSX_TEXT (_, _, raw) -> raw + (* Type primitives *) | T_ANY_TYPE -> "any" | T_MIXED_TYPE -> "mixed" | T_EMPTY_TYPE -> "empty" - | T_BOOLEAN_TYPE kind -> begin match kind with BOOL -> "bool" | BOOLEAN -> "boolean" end + | T_BOOLEAN_TYPE kind -> + begin + match kind with + | BOOL -> "bool" + | BOOLEAN -> "boolean" + end | T_NUMBER_TYPE -> "number" + | T_BIGINT_TYPE -> "bigint" | T_NUMBER_SINGLETON_TYPE { raw; _ } -> raw + | T_BIGINT_SINGLETON_TYPE { raw; _ } -> raw | T_STRING_TYPE -> "string" | T_VOID_TYPE -> "void" + +let quote_token_value value = Printf.sprintf "token `%s`" value + +let explanation_of_token ?(use_article = false) token = + let (value, article) = + match token with + | T_NUMBER_SINGLETON_TYPE _ + | T_NUMBER _ -> + ("number", "a") + | T_BIGINT_SINGLETON_TYPE _ + | T_BIGINT _ -> + ("bigint", "a") + | T_JSX_TEXT _ + | T_STRING _ -> + ("string", "a") + | T_TEMPLATE_PART _ -> ("template literal part", "a") + | T_JSX_IDENTIFIER _ + | T_IDENTIFIER _ -> + ("identifier", "an") + | T_REGEXP _ -> ("regexp", "a") + | T_EOF -> ("end of input", "the") + | _ -> (quote_token_value (value_of_token token), "the") + in + if use_article then + article ^ " " ^ value + else + value diff --git a/src/parser/token_translator.ml b/src/parser/token_translator.ml index ac1e937976e..c0fdbc50958 100644 --- a/src/parser/token_translator.ml +++ b/src/parser/token_translator.ml @@ -1,50 +1,60 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - -module Translate (Impl : Translator_intf.S) : (sig +module Translate (Impl : Translator_intf.S) : sig type t - val token: Parser_env.token_sink_result -> t - val token_list: Parser_env.token_sink_result list -> t -end with type t = Impl.t) = struct - type t = Impl.t - let token { Parser_env.token_loc; token; token_context } = - let open Loc in + val token : Offset_utils.t -> Parser_env.token_sink_result -> t - Impl.obj [ - ("type", Impl.string (Token.token_to_string token)); - ("context", Impl.string Parser_env.Lex_mode.( - match token_context with - | NORMAL -> "normal" - | TYPE -> "type" - | JSX_TAG -> "jsxTag" - | JSX_CHILD -> "jsxChild" - | TEMPLATE -> "template" - | REGEXP -> "regexp" - )); - ("loc", Impl.obj [ - ("start", Impl.obj [ - ("line", Impl.number (float token_loc.start.line)); - ("column", Impl.number (float token_loc.start.column)); - ]); - ("end", Impl.obj [ - ("line", Impl.number (float token_loc._end.line)); - ("column", Impl.number (float token_loc._end.column)); - ]); - ]); - ("range", Impl.array [ - Impl.number (float token_loc.start.offset); - Impl.number (float token_loc._end.offset); - ]); - ("value", Impl.string (Token.value_of_token token)); - ] + val token_list : Offset_utils.t -> Parser_env.token_sink_result list -> t +end +with type t = Impl.t = struct + type t = Impl.t - let token_list tokens = - Impl.array (List.rev_map token tokens |> List.rev) + let token offset_table { Parser_env.token_loc; token; token_context } = + Loc.( + Impl.obj + [ + ("type", Impl.string (Token.token_to_string token)); + ( "context", + Impl.string + Parser_env.Lex_mode.( + match token_context with + | NORMAL -> "normal" + | TYPE -> "type" + | JSX_TAG -> "jsxTag" + | JSX_CHILD -> "jsxChild" + | TEMPLATE -> "template" + | REGEXP -> "regexp") ); + ( "loc", + Impl.obj + [ + ( "start", + Impl.obj + [ + ("line", Impl.number (float token_loc.start.line)); + ("column", Impl.number (float token_loc.start.column)); + ] ); + ( "end", + Impl.obj + [ + ("line", Impl.number (float token_loc._end.line)); + ("column", Impl.number (float token_loc._end.column)); + ] ); + ] ); + ( "range", + Impl.array + [ + Impl.number (float (Offset_utils.offset offset_table token_loc.start)); + Impl.number (float (Offset_utils.offset offset_table token_loc._end)); + ] ); + ("value", Impl.string (Token.value_of_token token)); + ]) + let token_list offset_table tokens = + Impl.array (List.rev_map (token offset_table) tokens |> List.rev) end diff --git a/src/parser/translator_intf.ml b/src/parser/translator_intf.ml index fdc0124f7e5..f29e53990fb 100644 --- a/src/parser/translator_intf.ml +++ b/src/parser/translator_intf.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,11 +7,20 @@ module type S = sig type t - val string: string -> t - val bool: bool -> t - val obj: (string * t) list -> t - val array: t list -> t - val number: float -> t - val null: t - val regexp: Loc.t -> string -> string -> t + + val string : string -> t + + val bool : bool -> t + + val obj : (string * t) list -> t + + val array : t list -> t + + val number : float -> t + + val int : int -> t + + val null : t + + val regexp : Loc.t -> string -> string -> t end diff --git a/src/parser/type_parser.ml b/src/parser/type_parser.ml index f49b46e42e6..8b06de75e3f 100644 --- a/src/parser/type_parser.ml +++ b/src/parser/type_parser.ml @@ -1,60 +1,68 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Token open Parser_env open Flow_ast open Parser_common -module Error = Parse_error module type TYPE = sig val _type : env -> (Loc.t, Loc.t) Ast.Type.t - val type_identifier : env -> Loc.t * string + + val type_identifier : env -> (Loc.t, Loc.t) Ast.Identifier.t + val type_parameter_declaration : env -> (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option - val type_parameter_declaration_with_defaults : env -> (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option + val type_parameter_instantiation : env -> (Loc.t, Loc.t) Ast.Type.ParameterInstantiation.t option + val generic : env -> Loc.t * (Loc.t, Loc.t) Ast.Type.Generic.t + val _object : is_class:bool -> env -> Loc.t * (Loc.t, Loc.t) Type.Object.t + val interface_helper : env -> (Loc.t, Loc.t) Type.Interface.t + val function_param_list : env -> (Loc.t, Loc.t) Type.Function.Params.t + val annotation : env -> (Loc.t, Loc.t) Ast.Type.annotation - val annotation_opt : env -> (Loc.t, Loc.t) Ast.Type.annotation option + + val annotation_opt : env -> (Loc.t, Loc.t) Ast.Type.annotation_or_hint + val predicate_opt : env -> (Loc.t, Loc.t) Ast.Type.Predicate.t option - val annotation_and_predicate_opt : env -> (Loc.t, Loc.t) Ast.Function.return * (Loc.t, Loc.t) Ast.Type.Predicate.t option + + val annotation_and_predicate_opt : + env -> (Loc.t, Loc.t) Ast.Type.annotation_or_hint * (Loc.t, Loc.t) Ast.Type.Predicate.t option end -module Type (Parse: Parser_common.PARSER) : TYPE = struct +module Type (Parse : Parser_common.PARSER) : TYPE = struct type param_list_or_type = | ParamList of (Loc.t, Loc.t) Type.Function.Params.t' | Type of (Loc.t, Loc.t) Type.t + let maybe_variance env = + let loc = Peek.loc env in + match Peek.token env with + | T_PLUS -> + Eat.token env; + Some (loc, Variance.Plus) + | T_MINUS -> + Eat.token env; + Some (loc, Variance.Minus) + | _ -> None + let rec _type env = union env and annotation env = - if not (should_parse_types env) - then error env Error.UnexpectedTypeAnnotation; - with_loc (fun env -> - Expect.token env T_COLON; - _type env - ) env - - and variance env = - let loc = Peek.loc env in - match Peek.token env with - | T_PLUS -> - Eat.token env; - Some (loc, Variance.Plus) - | T_MINUS -> - Eat.token env; - Some (loc, Variance.Minus) - | _ -> - None + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation; + with_loc + (fun env -> + Expect.token env T_COLON; + _type env) + env and union env = let _ = Expect.maybe env T_BIT_OR in @@ -65,16 +73,18 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct let rec unions acc env = match Peek.token env with | T_BIT_OR -> - Expect.token env T_BIT_OR; - unions (intersection env::acc) env + Expect.token env T_BIT_OR; + unions (intersection env :: acc) env | _ -> - match List.rev acc with - | t0::t1::ts -> Type.Union (t0, t1, ts) - | _ -> assert false - in fun env left -> - if Peek.token env = T_BIT_OR - then with_loc ~start_loc:(fst left) (unions [left]) env - else left + (match List.rev acc with + | t0 :: t1 :: ts -> Type.Union (t0, t1, ts) + | _ -> assert false) + in + fun env left -> + if Peek.token env = T_BIT_OR then + with_loc ~start_loc:(fst left) (unions [left]) env + else + left and intersection env = let _ = Expect.maybe env T_BIT_AND in @@ -85,17 +95,18 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct let rec intersections acc env = match Peek.token env with | T_BIT_AND -> - Expect.token env T_BIT_AND; - intersections (anon_function_without_parens env::acc) env + Expect.token env T_BIT_AND; + intersections (anon_function_without_parens env :: acc) env | _ -> - match List.rev acc with - | t0::t1::ts -> Type.Intersection (t0, t1, ts) - | _ -> assert false - in fun env left -> - if Peek.token env = T_BIT_AND - then with_loc ~start_loc:(fst left) (intersections [left]) env - else left - + (match List.rev acc with + | t0 :: t1 :: ts -> Type.Intersection (t0, t1, ts) + | _ -> assert false) + in + fun env left -> + if Peek.token env = T_BIT_AND then + with_loc ~start_loc:(fst left) (intersections [left]) env + else + left and anon_function_without_parens env = let param = prefix env in @@ -103,13 +114,10 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct and anon_function_without_parens_with env param = match Peek.token env with - | T_ARROW when not (no_anon_function_type env)-> - let start_loc, tparams, params = + | T_ARROW when not (no_anon_function_type env) -> + let (start_loc, tparams, params) = let param = anonymous_function_param env param in - fst param, None, (fst param, { Ast.Type.Function.Params. - params = [param]; - rest = None; - }) + (fst param, None, (fst param, { Ast.Type.Function.Params.params = [param]; rest = None })) in function_with_params env start_loc tparams params | _ -> param @@ -117,82 +125,84 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct and prefix env = match Peek.token env with | T_PLING -> - with_loc (fun env -> + with_loc + (fun env -> Expect.token env T_PLING; - Type.Nullable (prefix env) - ) env - | _ -> - postfix env + Type.Nullable (prefix env)) + env + | _ -> postfix env and postfix env = let t = primary env in postfix_with env t and postfix_with env t = - if not (Peek.is_line_terminator env) && Expect.maybe env T_LBRACKET - then begin - let t = with_loc ~start_loc:(fst t) (fun env -> - Expect.token env T_RBRACKET; - Type.Array t - ) env in + if (not (Peek.is_line_terminator env)) && Expect.maybe env T_LBRACKET then + let t = + with_loc + ~start_loc:(fst t) + (fun env -> + Expect.token env T_RBRACKET; + Type.Array t) + env + in postfix_with env t - end else t + else + t and primary env = let loc = Peek.loc env in match Peek.token env with | T_MULT -> - Expect.token env T_MULT; - loc, Type.Exists + Expect.token env T_MULT; + (loc, Type.Exists) | T_LESS_THAN -> _function env | T_LPAREN -> function_or_group env | T_LCURLY | T_LCURLYBAR -> - let loc, o = _object env - ~is_class:false ~allow_exact:true ~allow_spread:true in - loc, Type.Object o + let (loc, o) = _object env ~is_class:false ~allow_exact:true ~allow_spread:true in + (loc, Type.Object o) | T_INTERFACE -> - with_loc (fun env -> - Expect.token env T_INTERFACE; - Type.Interface (interface_helper env) - ) env + with_loc + (fun env -> + Expect.token env T_INTERFACE; + Type.Interface (interface_helper env)) + env | T_TYPEOF -> - with_loc (fun env -> + with_loc + (fun env -> Expect.token env T_TYPEOF; - Type.Typeof (primary env) - ) env + Type.Typeof (primary env)) + env | T_LBRACKET -> tuple env | T_IDENTIFIER _ | T_STATIC (* `static` is reserved in strict mode, but still an identifier *) -> - let loc, g = generic env in - loc, Type.Generic g - | T_STRING (loc, value, raw, octal) -> - if octal then strict_error env Error.StrictOctalLiteral; - Expect.token env (T_STRING (loc, value, raw, octal)); - loc, Type.StringLiteral { - Ast.StringLiteral.value; - raw; - } + let (loc, g) = generic env in + (loc, Type.Generic g) + | T_STRING (loc, value, raw, octal) -> + if octal then strict_error env Parse_error.StrictOctalLiteral; + Expect.token env (T_STRING (loc, value, raw, octal)); + (loc, Type.StringLiteral { Ast.StringLiteral.value; raw }) | T_NUMBER_SINGLETON_TYPE { kind; value; raw } -> - Expect.token env (T_NUMBER_SINGLETON_TYPE { kind; value; raw }); - if kind = LEGACY_OCTAL - then strict_error env Error.StrictOctalLiteral; - loc, Type.NumberLiteral { - Ast.NumberLiteral.value; - raw; - } + Expect.token env (T_NUMBER_SINGLETON_TYPE { kind; value; raw }); + if kind = LEGACY_OCTAL then strict_error env Parse_error.StrictOctalLiteral; + (loc, Type.NumberLiteral { Ast.NumberLiteral.value; raw }) + | T_BIGINT_SINGLETON_TYPE { kind; approx_value; raw } -> + let bigint = raw in + Expect.token env (T_BIGINT_SINGLETON_TYPE { kind; approx_value; raw }); + (loc, Type.BigIntLiteral { Ast.BigIntLiteral.approx_value; bigint }) | (T_TRUE | T_FALSE) as token -> - Expect.token env token; - let value = token = T_TRUE in - loc, Type.BooleanLiteral value + Expect.token env token; + let value = token = T_TRUE in + (loc, Type.BooleanLiteral value) | token -> - match primitive token with - | Some t -> - Expect.token env token; - loc, t - | None -> - error_unexpected env; - loc, Type.Any + (match primitive token with + | Some t -> + Expect.token env token; + (loc, t) + | None -> + error_unexpected env; + (loc, Type.Any)) and primitive = function | T_ANY_TYPE -> Some Type.Any @@ -200,6 +210,7 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct | T_EMPTY_TYPE -> Some Type.Empty | T_BOOLEAN_TYPE _ -> Some Type.Boolean | T_NUMBER_TYPE -> Some Type.Number + | T_BIGINT_TYPE -> Some Type.BigInt | T_STRING_TYPE -> Some Type.String | T_VOID_TYPE -> Some Type.Void | T_NULL -> Some Type.Null @@ -209,84 +220,79 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct let rec types env acc = match Peek.token env with | T_EOF - | T_RBRACKET -> List.rev acc + | T_RBRACKET -> + List.rev acc | _ -> - let acc = (_type env)::acc in - (* Trailing comma support (like [number, string,]) *) - if Peek.token env <> T_RBRACKET then Expect.token env T_COMMA; - types env acc - - in fun env -> - with_loc (fun env -> - Expect.token env T_LBRACKET; - let tl = types env [] in - Expect.token env T_RBRACKET; - Type.Tuple tl - ) env + let acc = _type env :: acc in + (* Trailing comma support (like [number, string,]) *) + if Peek.token env <> T_RBRACKET then Expect.token env T_COMMA; + types env acc + in + fun env -> + with_loc + (fun env -> + Expect.token env T_LBRACKET; + let tl = types (with_no_anon_function_type false env) [] in + Expect.token env T_RBRACKET; + Type.Tuple tl) + env and anonymous_function_param _env annot = - fst annot, Type.Function.Param.({ - name = None; - annot; - optional = false; - }) - - - and function_param_with_id env name = - if not (should_parse_types env) - then error env Error.UnexpectedTypeAnnotation; - with_loc ~start_loc:(fst name) (fun env -> - let optional = Expect.maybe env T_PLING in - Expect.token env T_COLON; - let annot = _type env in - { Type.Function.Param. - name = Some name; - annot; - optional; - } - ) env + (fst annot, Type.Function.Param.{ name = None; annot; optional = false }) + + and function_param_with_id env = + with_loc + (fun env -> + let name = Parse.identifier env in + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation; + let optional = Expect.maybe env T_PLING in + Expect.token env T_COLON; + let annot = _type env in + { Type.Function.Param.name = Some name; annot; optional }) + env and function_param_list_without_parens = let param env = match Peek.ith_token ~i:1 env with - | T_COLON | T_PLING -> - let id = Parse.identifier env in - function_param_with_id env id + | T_COLON + | T_PLING -> + function_param_with_id env | _ -> - let annot = _type env in - anonymous_function_param env annot - - in let rec param_list env acc = + let annot = _type env in + anonymous_function_param env annot + in + let rec param_list env acc = match Peek.token env with - | T_EOF - | T_ELLIPSIS - | T_RPAREN as t -> + | (T_EOF | T_ELLIPSIS | T_RPAREN) as t -> let rest = - if t = T_ELLIPSIS then begin - let rest = with_loc (fun env -> - Expect.token env T_ELLIPSIS; - { Type.Function.RestParam.argument = param env } - ) env in + if t = T_ELLIPSIS then + let rest = + with_loc + (fun env -> + Expect.token env T_ELLIPSIS; + { Type.Function.RestParam.argument = param env }) + env + in Some rest - end else + else None in - { Ast.Type.Function.Params.params = List.rev acc; rest; } + { Ast.Type.Function.Params.params = List.rev acc; rest } | _ -> - let acc = (param env)::acc in - if Peek.token env <> T_RPAREN - then Expect.token env T_COMMA; + let acc = param env :: acc in + if Peek.token env <> T_RPAREN then Expect.token env T_COMMA; param_list env acc - - in fun env -> param_list env + in + (fun env -> param_list env) and function_param_list env = - with_loc (fun env -> - Expect.token env T_LPAREN; - let ret = function_param_list_without_parens env [] in - Expect.token env T_RPAREN; - ret - ) env + with_loc + (fun env -> + Expect.token env T_LPAREN; + let ret = function_param_list_without_parens env [] in + Expect.token env T_RPAREN; + ret) + env and param_list_or_type env = Expect.token env T_LPAREN; @@ -295,308 +301,406 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct match Peek.token env with | T_EOF | T_ELLIPSIS -> - (* (... is definitely the beginning of a param list *) - ParamList (function_param_list_without_parens env []) + (* (... is definitely the beginning of a param list *) + ParamList (function_param_list_without_parens env []) | T_RPAREN -> - (* () or is definitely a param list *) - ParamList ({ Ast.Type.Function.Params.params = []; rest = None }) + (* () or is definitely a param list *) + ParamList { Ast.Type.Function.Params.params = []; rest = None } | T_IDENTIFIER _ | T_STATIC (* `static` is reserved in strict mode, but still an identifier *) -> - (* This could be a function parameter or a generic type *) - function_param_or_generic_type env + (* This could be a function parameter or a generic type *) + function_param_or_generic_type env | token -> - (match primitive token with - | None -> - (* All params start with an identifier or `...` *) - Type (_type env) - | Some _ -> - (* Don't know if this is (number) or (number: number). The first - * is a type, the second is a param. *) - match Peek.ith_token ~i:1 env with - | T_PLING | T_COLON -> - (* Ok this is definitely a parameter *) - ParamList (function_param_list_without_parens env []) - | _ -> - Type (_type env) - ) + (match primitive token with + | None -> + (* All params start with an identifier or `...` *) + Type (_type env) + | Some _ -> + (* Don't know if this is (number) or (number: number). The first + * is a type, the second is a param. *) + (match Peek.ith_token ~i:1 env with + | T_PLING + | T_COLON -> + (* Ok this is definitely a parameter *) + ParamList (function_param_list_without_parens env []) + | _ -> Type (_type env))) in (* Now that we allow anonymous parameters in function types, we need to * disambiguate a little bit more *) - let ret = match ret with - | ParamList _ -> ret - | Type _ when no_anon_function_type env -> ret - | Type t -> + let ret = + match ret with + | ParamList _ -> ret + | Type _ when no_anon_function_type env -> ret + | Type t -> (match Peek.token env with | T_RPAREN -> - (* Reinterpret `(type) =>` as a ParamList *) - if Peek.ith_token ~i:1 env = T_ARROW - then - let param = anonymous_function_param env t in - ParamList (function_param_list_without_parens env [param]) - else Type t - | T_COMMA -> - (* Reinterpret `(type,` as a ParamList *) - Expect.token env T_COMMA; + (* Reinterpret `(type) =>` as a ParamList *) + if Peek.ith_token ~i:1 env = T_ARROW then let param = anonymous_function_param env t in ParamList (function_param_list_without_parens env [param]) - | _ -> ret) in + else + Type t + | T_COMMA -> + (* Reinterpret `(type,` as a ParamList *) + Expect.token env T_COMMA; + let param = anonymous_function_param env t in + ParamList (function_param_list_without_parens env [param]) + | _ -> ret) + in Expect.token env T_RPAREN; ret and function_param_or_generic_type env = match Peek.ith_token ~i:1 env with - | T_PLING (* optional param *) + | T_PLING + (* optional param *) + | T_COLON -> - let id = Parse.identifier env in - let param = function_param_with_id env id in - ignore (Expect.maybe env T_COMMA); - ParamList (function_param_list_without_parens env [param]) + ParamList (function_param_list_without_parens env []) | _ -> - let id = type_identifier env in - Type ( - generic_type_with_identifier env id - |> postfix_with env - |> anon_function_without_parens_with env - |> intersection_with env - |> union_with env - ) + let id = type_identifier env in + Type + ( generic_type_with_identifier env id + |> postfix_with env + |> anon_function_without_parens_with env + |> intersection_with env + |> union_with env ) and function_or_group env = let start_loc = Peek.loc env in match with_loc param_list_or_type env with - | loc, ParamList params -> function_with_params env start_loc None (loc, params) - | _, Type _type -> _type + | (loc, ParamList params) -> function_with_params env start_loc None (loc, params) + | (_, Type _type) -> _type and _function env = let start_loc = Peek.loc env in - let tparams = type_parameter_declaration ~allow_default:false env in + let tparams = type_parameter_declaration env in let params = function_param_list env in function_with_params env start_loc tparams params - and function_with_params env start_loc tparams (params: (Loc.t, Loc.t) Ast.Type.Function.Params.t) = - with_loc ~start_loc (fun env -> - Expect.token env T_ARROW; - let return = _type env in - Type.(Function { Function.params; return; tparams }) - ) env + and function_with_params + env start_loc tparams (params : (Loc.t, Loc.t) Ast.Type.Function.Params.t) = + with_loc + ~start_loc + (fun env -> + Expect.token env T_ARROW; + let return = _type env in + Type.(Function { Function.params; return; tparams })) + env and _object = let methodish env start_loc tparams = - with_loc ~start_loc (fun env -> - let params = function_param_list env in - Expect.token env T_COLON; - let return = _type env in - { Type.Function. - params; - return; - tparams; - } - ) env - - in let method_property env start_loc static key = - let tparams = type_parameter_declaration ~allow_default:false env in + with_loc + ~start_loc + (fun env -> + let params = function_param_list env in + Expect.token env T_COLON; + let return = _type env in + { Type.Function.params; return; tparams }) + env + in + let method_property env start_loc static key = + let tparams = type_parameter_declaration env in let value = methodish env start_loc tparams in - let value = fst value, Type.Function (snd value) in - Type.Object.(Property (fst value, Property.({ - key; - value = Init value; - optional = false; - static = static <> None; - proto = false; - _method = true; - variance = None; - }))) - - in let call_property env start_loc static = - let prop = with_loc ~start_loc (fun env -> - let tparams = type_parameter_declaration ~allow_default:false env in - let value = methodish env (Peek.loc env) tparams in - Type.Object.CallProperty.({ - value; - static = static <> None; - }) - ) env in + let value = (fst value, Type.Function (snd value)) in + Type.Object.( + Property + ( fst value, + Property. + { + key; + value = Init value; + optional = false; + static = static <> None; + proto = false; + _method = true; + variance = None; + } )) + in + let call_property env start_loc static = + let prop = + with_loc + ~start_loc + (fun env -> + let tparams = type_parameter_declaration env in + let value = methodish env (Peek.loc env) tparams in + Type.Object.CallProperty.{ value; static = static <> None }) + env + in Type.Object.CallProperty prop - - in let init_property env start_loc ~variance ~static ~proto key = + in + let init_property env start_loc ~variance ~static ~proto key = ignore proto; - if not (should_parse_types env) - then error env Error.UnexpectedTypeAnnotation; - let prop = with_loc ~start_loc (fun env -> - let optional = Expect.maybe env T_PLING in - Expect.token env T_COLON; - let value = _type env in - Type.Object.Property.({ - key; - value = Init value; - optional; - static = static <> None; - proto = proto <> None; - _method = false; - variance; - }) - ) env in + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation; + let prop = + with_loc + ~start_loc + (fun env -> + let optional = Expect.maybe env T_PLING in + Expect.token env T_COLON; + let value = _type env in + Type.Object.Property. + { + key; + value = Init value; + optional; + static = static <> None; + proto = proto <> None; + _method = false; + variance; + }) + env + in Type.Object.Property prop - - in let getter_or_setter ~is_getter env start_loc static key = - let prop = with_loc ~start_loc (fun env -> - let value = methodish env start_loc None in - let (key_loc, key) = key in - let (_, { Type.Function.params; _ }) = value in - begin match is_getter, params with - | true, (_, { Type.Function.Params.params = []; rest = None }) -> () - | false, (_, { Type.Function.Params.rest = Some _; _ }) -> - (* rest params don't make sense on a setter *) - error_at env (key_loc, Error.SetterArity) - | false, (_, { Type.Function.Params.params = [_]; _ }) -> () - | true, _ -> error_at env (key_loc, Error.GetterArity) - | false, _ -> error_at env (key_loc, Error.SetterArity) - end; - Type.Object.Property.({ - key; - value = if is_getter then Get value else Set value; - optional = false; - static = static <> None; - proto = false; - _method = false; - variance = None; - }) - ) env in + in + let getter_or_setter ~is_getter env start_loc static key = + let prop = + with_loc + ~start_loc + (fun env -> + let value = methodish env start_loc None in + let (key_loc, key) = key in + let (_, { Type.Function.params; _ }) = value in + begin + match (is_getter, params) with + | (true, (_, { Type.Function.Params.params = []; rest = None })) -> () + | (false, (_, { Type.Function.Params.rest = Some _; _ })) -> + (* rest params don't make sense on a setter *) + error_at env (key_loc, Parse_error.SetterArity) + | (false, (_, { Type.Function.Params.params = [_]; _ })) -> () + | (true, _) -> error_at env (key_loc, Parse_error.GetterArity) + | (false, _) -> error_at env (key_loc, Parse_error.SetterArity) + end; + Type.Object.Property. + { + key; + value = + ( if is_getter then + Get value + else + Set value ); + optional = false; + static = static <> None; + proto = false; + _method = false; + variance = None; + }) + env + in Type.Object.Property prop - - in let indexer_property env start_loc static variance = - let indexer = with_loc ~start_loc (fun env -> - (* Note: T_LBRACKET has already been consumed *) - let id = - if Peek.ith_token ~i:1 env = T_COLON - then begin - let id = identifier_name env in + in + let indexer_property env start_loc static variance = + let indexer = + with_loc + ~start_loc + (fun env -> + (* Note: T_LBRACKET has already been consumed *) + let id = + if Peek.ith_token ~i:1 env = T_COLON then ( + let id = identifier_name env in + Expect.token env T_COLON; + Some id + ) else + None + in + let key = _type env in + Expect.token env T_RBRACKET; Expect.token env T_COLON; - Some id - end else None in - let key = _type env in - Expect.token env T_RBRACKET; - Expect.token env T_COLON; - let value = _type env in - { Type.Object.Indexer. - id; - key; - value; - static = static <> None; - variance; - } - ) env in + let value = _type env in + { Type.Object.Indexer.id; key; value; static = static <> None; variance }) + env + in Type.Object.Indexer indexer - - in let internal_slot env start_loc static = - let islot = with_loc ~start_loc (fun env -> - (* Note: First T_LBRACKET has already been consumed *) - Expect.token env T_LBRACKET; - let id = identifier_name env in - Expect.token env T_RBRACKET; - Expect.token env T_RBRACKET; - let optional, _method, value = match Peek.token env with - | T_LESS_THAN - | T_LPAREN -> - let tparams = type_parameter_declaration ~allow_default:false env in - let value = - let fn_loc, fn = methodish env start_loc tparams in - fn_loc, Type.Function fn - in - false, true, value - | _ -> - let optional = Expect.maybe env T_PLING in - Expect.token env T_COLON; - let value = _type env in - optional, false, value - in - { Type.Object.InternalSlot. - id; - value; - optional; - static = static <> None; - _method; - } - ) env in + in + let internal_slot env start_loc static = + let islot = + with_loc + ~start_loc + (fun env -> + (* Note: First T_LBRACKET has already been consumed *) + Expect.token env T_LBRACKET; + let id = identifier_name env in + Expect.token env T_RBRACKET; + Expect.token env T_RBRACKET; + let (optional, _method, value) = + match Peek.token env with + | T_LESS_THAN + | T_LPAREN -> + let tparams = type_parameter_declaration env in + let value = + let (fn_loc, fn) = methodish env start_loc tparams in + (fn_loc, Type.Function fn) + in + (false, true, value) + | _ -> + let optional = Expect.maybe env T_PLING in + Expect.token env T_COLON; + let value = _type env in + (optional, false, value) + in + { Type.Object.InternalSlot.id; value; optional; static = static <> None; _method }) + env + in Type.Object.InternalSlot islot - - in let spread_property env start_loc = - let spread = with_loc ~start_loc (fun env -> - Expect.token env T_ELLIPSIS; - { Type.Object.SpreadProperty. - argument = _type env; - } - ) env in + (* Expects the T_ELLIPSIS has already been eaten *) + in + let spread_property env start_loc = + let spread = + with_loc ~start_loc (fun env -> { Type.Object.SpreadProperty.argument = _type env }) env + in Type.Object.SpreadProperty spread - - in let semicolon exact env = + in + let semicolon exact env = match Peek.token env with - | T_COMMA | T_SEMICOLON -> Eat.token env + | T_COMMA + | T_SEMICOLON -> + Eat.token env | T_RCURLYBAR when exact -> () | T_RCURLY when not exact -> () | _ -> error_unexpected env - - in let error_unexpected_variance env = function - | Some (loc, _) -> error_at env (loc, Error.UnexpectedVariance) - | None -> () - - in let error_unexpected_proto env = function - | Some loc -> error_at env (loc, Error.UnexpectedProto) - | None -> () - - in let error_invalid_property_name env is_class static key = + in + let error_unexpected_variance env = function + | Some (loc, _) -> error_at env (loc, Parse_error.UnexpectedVariance) + | None -> () + in + let error_unexpected_proto env = function + | Some loc -> error_at env (loc, Parse_error.UnexpectedProto) + | None -> () + in + let error_invalid_property_name env is_class static key = let is_static = static <> None in let is_constructor = String.equal "constructor" in let is_prototype = String.equal "prototype" in match key with - | Expression.Object.Property.Identifier (loc, name) + | Expression.Object.Property.Identifier (loc, { Identifier.name; comments = _ }) when is_class && (is_constructor name || (is_static && is_prototype name)) -> - error_at env (loc, Error.InvalidFieldName (name, is_static, false)) + error_at + env + (loc, Parse_error.InvalidFieldName { name; static = is_static; private_ = false }) | _ -> () + in + let rec properties ~is_class ~allow_inexact ~allow_spread ~exact env ((props, inexact) as acc) + = + assert (not (is_class && allow_spread)); - in let rec properties ~is_class ~allow_spread ~exact env acc = - assert (not (is_class && allow_spread)); (* no `static ...A` *) + (* no `static ...A` *) + assert ((not allow_inexact) || allow_spread); + + (* allow_inexact implies allow_spread *) let start_loc = Peek.loc env in match Peek.token env with - | T_EOF -> List.rev acc - | T_RCURLYBAR when exact -> List.rev acc - | T_RCURLY when not exact -> List.rev acc + | T_EOF -> (List.rev props, inexact) + | T_RCURLYBAR when exact -> (List.rev props, inexact) + | T_RCURLY when not exact -> (List.rev props, inexact) | T_ELLIPSIS when allow_spread -> - let prop = spread_property env start_loc in - semicolon exact env; - properties ~is_class ~allow_spread ~exact env (prop::acc) + Eat.token env; + begin + match Peek.token env with + | T_COMMA + | T_SEMICOLON + | T_RCURLY + | T_RCURLYBAR -> + semicolon exact env; + begin + match Peek.token env with + | T_RCURLY when allow_inexact -> (List.rev props, true) + | T_RCURLYBAR -> + error_at env (start_loc, Parse_error.InexactInsideExact); + (List.rev props, inexact) + | _ -> + error_at env (start_loc, Parse_error.UnexpectedExplicitInexactInObject); + properties ~is_class ~allow_inexact ~allow_spread ~exact env acc + end + | _ -> + let prop = spread_property env start_loc in + semicolon exact env; + properties ~is_class ~allow_inexact ~allow_spread ~exact env (prop :: props, inexact) + end + (* In this case, allow_spread is false, so we may assume allow_inexact is false based on our + * assertion at the top of this function. Thus, any T_ELLIPSIS here is not allowed. + *) + | T_ELLIPSIS -> + Eat.token env; + begin + match Peek.token env with + | T_COMMA + | T_SEMICOLON + | T_RCURLY + | T_RCURLYBAR -> + error_at env (start_loc, Parse_error.InexactInsideNonObject); + semicolon exact env; + properties ~is_class ~allow_inexact ~allow_spread ~exact env acc + | _ -> + error_list env (Peek.errors env); + error_at env (start_loc, Parse_error.UnexpectedSpreadType); + + (* It's likely the user is trying to spread something here, so we can + * eat what they try to spread to try to continue parsing the remaining + * properties. + *) + Eat.token env; + semicolon exact env; + properties ~is_class ~allow_inexact ~allow_spread ~exact env acc + end | _ -> - let prop = property env start_loc ~is_class ~allow_static:is_class - ~allow_proto:is_class ~variance:None ~static:None ~proto:None in + let prop = + property + env + start_loc + ~is_class + ~allow_static:is_class + ~allow_proto:is_class + ~variance:None + ~static:None + ~proto:None + in semicolon exact env; - properties ~is_class ~allow_spread ~exact env (prop::acc) - + properties ~is_class ~allow_inexact ~allow_spread ~exact env (prop :: props, inexact) and property env ~is_class ~allow_static ~allow_proto ~variance ~static ~proto start_loc = match Peek.token env with - | T_PLUS when variance = None -> - let loc = Peek.loc env in - Eat.token env; - let variance = Some (loc, Variance.Plus) in + | T_PLUS + | T_MINUS + when variance = None -> + let variance = maybe_variance env in property - env ~is_class ~allow_static:false ~allow_proto:false ~variance ~static ~proto start_loc - | T_MINUS when variance = None -> - let loc = Peek.loc env in - Eat.token env; - let variance = Some (loc, Variance.Minus) in - property - env ~is_class ~allow_static:false ~allow_proto:false ~variance ~static ~proto start_loc + env + ~is_class + ~allow_static:false + ~allow_proto:false + ~variance + ~static + ~proto + start_loc | T_STATIC when allow_static -> - assert (variance = None); (* if we parsed variance, allow_static = false *) + assert (variance = None); + + (* if we parsed variance, allow_static = false *) let static = Some (Peek.loc env) in Eat.token env; property - env ~is_class ~allow_static:false ~allow_proto:false ~variance ~static ~proto start_loc + env + ~is_class + ~allow_static:false + ~allow_proto:false + ~variance + ~static + ~proto + start_loc | T_IDENTIFIER { raw = "proto"; _ } when allow_proto -> - assert (variance = None); (* if we parsed variance, allow_proto = false *) + assert (variance = None); + + (* if we parsed variance, allow_proto = false *) let proto = Some (Peek.loc env) in Eat.token env; property - env ~is_class ~allow_static:false ~allow_proto:false ~variance ~static ~proto start_loc + env + ~is_class + ~allow_static:false + ~allow_proto:false + ~variance + ~static + ~proto + start_loc | T_LBRACKET -> error_unexpected_proto env proto; Expect.token env T_LBRACKET; @@ -604,8 +708,7 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct | T_LBRACKET -> error_unexpected_variance env variance; internal_slot env start_loc static - | _ -> - indexer_property env start_loc static variance) + | _ -> indexer_property env start_loc static variance) | T_LESS_THAN | T_LPAREN -> (* Note that `static(): void` is a static callable property if we @@ -614,26 +717,26 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct error_unexpected_variance env variance; call_property env start_loc static | token -> - match static, proto, token with - | Some _, Some _, _ -> failwith "Can not have both `static` and `proto`" - | Some static_loc, None, (T_PLING | T_COLON) -> + (match (static, proto, token) with + | (Some _, Some _, _) -> failwith "Can not have both `static` and `proto`" + | (Some static_loc, None, (T_PLING | T_COLON)) -> (* We speculatively parsed `static` as a static modifier, but now that we've parsed the next token, we changed our minds and want to parse `static` as the key of a named property. *) - let key = Expression.Object.Property.Identifier ( - static_loc, - "static" - ) in + let key = + Expression.Object.Property.Identifier + (Flow_ast_utils.ident_of_source (static_loc, "static")) + in let static = None in init_property env start_loc ~variance ~static ~proto key - | None, Some proto_loc, (T_PLING | T_COLON) -> + | (None, Some proto_loc, (T_PLING | T_COLON)) -> (* We speculatively parsed `proto` as a proto modifier, but now that we've parsed the next token, we changed our minds and want to parse `proto` as the key of a named property. *) - let key = Expression.Object.Property.Identifier ( - proto_loc, - "proto" - ) in + let key = + Expression.Object.Property.Identifier + (Flow_ast_utils.ident_of_source (proto_loc, "proto")) + in let proto = None in init_property env start_loc ~variance ~static ~proto key | _ -> @@ -643,10 +746,12 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct Eat.pop_lex_mode env; result in - match object_key env with - | _, (Expression.Object.Property.Identifier - (_, ("get" | "set" as name)) as key) -> - begin match Peek.token env with + (match object_key env with + | ( _, + ( Expression.Object.Property.Identifier + (_, { Identifier.name = ("get" | "set") as name; comments = _ }) as key ) ) -> + begin + match Peek.token env with | T_LESS_THAN | T_LPAREN -> error_unexpected_proto env proto; @@ -661,9 +766,10 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct error_unexpected_proto env proto; error_unexpected_variance env variance; getter_or_setter ~is_getter env start_loc static key - end - | _, key -> - begin match Peek.token env with + end + | (_, key) -> + begin + match Peek.token env with | T_LESS_THAN | T_LPAREN -> error_unexpected_proto env proto; @@ -672,159 +778,194 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct | _ -> error_invalid_property_name env is_class static key; init_property env start_loc ~variance ~static ~proto key - end - - in fun ~is_class ~allow_exact ~allow_spread env -> + end)) + in + fun ~is_class ~allow_exact ~allow_spread env -> let exact = allow_exact && Peek.token env = T_LCURLYBAR in - with_loc (fun env -> - Expect.token env (if exact then T_LCURLYBAR else T_LCURLY); - let properties = properties ~is_class ~exact ~allow_spread env [] in - Expect.token env (if exact then T_RCURLYBAR else T_RCURLY); - { Type.Object.exact; properties; } - ) env + let allow_inexact = allow_exact && not exact in + with_loc + (fun env -> + Expect.token + env + ( if exact then + T_LCURLYBAR + else + T_LCURLY ); + let (properties, inexact) = + properties ~is_class ~allow_inexact ~exact ~allow_spread env ([], false) + in + Expect.token + env + ( if exact then + T_RCURLYBAR + else + T_RCURLY ); + + (* inexact = true iff `...` was used to indicate inexactnes *) + { Type.Object.exact; properties; inexact }) + env and interface_helper = let rec supers env acc = let super = generic env in - let acc = super::acc in + let acc = super :: acc in match Peek.token env with | T_COMMA -> - Expect.token env T_COMMA; - supers env acc + Expect.token env T_COMMA; + supers env acc | _ -> List.rev acc - - in fun env -> - let extends = if Peek.token env = T_EXTENDS - then begin - Expect.token env T_EXTENDS; - supers env [] - end else [] in - let body = _object env - ~allow_exact:false ~allow_spread:false ~is_class:false + in + fun env -> + let extends = + if Peek.token env = T_EXTENDS then ( + Expect.token env T_EXTENDS; + supers env [] + ) else + [] in + let body = _object env ~allow_exact:false ~allow_spread:false ~is_class:false in { Type.Interface.extends; body } and type_identifier env = - let loc, name = identifier_name env in + let (loc, { Identifier.name; comments }) = identifier_name env in if is_reserved_type name then error_at env (loc, Parse_error.UnexpectedReservedType); - loc, name - - and bounded_type env = with_loc (fun env -> - let name = type_identifier env in - let bound = if Peek.token env = T_COLON then Some (annotation env) else None in - name, bound - ) env + (loc, { Identifier.name; comments }) + + and bounded_type env = + with_loc + (fun env -> + let name = type_identifier env in + let bound = + if Peek.token env = T_COLON then + Ast.Type.Available (annotation env) + else + Ast.Type.Missing (Peek.loc_skip_lookahead env) + in + (name, bound)) + env and type_parameter_declaration = - let rec params env ~allow_default ~require_default acc = Type.ParameterDeclaration.TypeParam.( - let variance = variance env in - let loc, (name, bound) = bounded_type env in - let default, require_default = match allow_default, Peek.token env with - | false, _ -> None, false - | true, T_ASSIGN -> - Eat.token env; - Some (_type env), true - | true, _ -> - if require_default - then error_at env (loc, Error.MissingTypeParamDefault); - None, require_default in - let param = loc, { - name; - bound; - variance; - default; - } in - let acc = param::acc in - match Peek.token env with - | T_EOF - | T_GREATER_THAN -> List.rev acc - | _ -> - Expect.token env T_COMMA; - if Peek.token env = T_GREATER_THAN - then List.rev acc - else params env ~allow_default ~require_default acc - ) - in fun ~allow_default env -> - if Peek.token env = T_LESS_THAN - then begin - if not (should_parse_types env) - then error env Error.UnexpectedTypeAnnotation; - Some (with_loc (fun env -> - Expect.token env T_LESS_THAN; - let params = params env ~allow_default ~require_default:false [] in - Expect.token env T_GREATER_THAN; - params - ) env) - end else None + let rec params env ~require_default acc = + Type.ParameterDeclaration.TypeParam.( + let (loc, (variance, name, bound, default, require_default)) = + with_loc + (fun env -> + let variance = maybe_variance env in + let (loc, (name, bound)) = bounded_type env in + let (default, require_default) = + match Peek.token env with + | T_ASSIGN -> + Eat.token env; + (Some (_type env), true) + | _ -> + if require_default then error_at env (loc, Parse_error.MissingTypeParamDefault); + (None, require_default) + in + (variance, name, bound, default, require_default)) + env + in + let param = (loc, { name; bound; variance; default }) in + let acc = param :: acc in + match Peek.token env with + | T_EOF + | T_GREATER_THAN -> + List.rev acc + | _ -> + Expect.token env T_COMMA; + if Peek.token env = T_GREATER_THAN then + List.rev acc + else + params env ~require_default acc) + in + fun env -> + if Peek.token env = T_LESS_THAN then ( + if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation; + Some + (with_loc + (fun env -> + Expect.token env T_LESS_THAN; + let params = params env ~require_default:false [] in + Expect.token env T_GREATER_THAN; + params) + env) + ) else + None and type_parameter_instantiation = let rec args env acc = match Peek.token env with | T_EOF - | T_GREATER_THAN -> List.rev acc + | T_GREATER_THAN -> + List.rev acc | _ -> - let acc = (_type env)::acc in - if Peek.token env <> T_GREATER_THAN - then Expect.token env T_COMMA; + let acc = _type env :: acc in + if Peek.token env <> T_GREATER_THAN then Expect.token env T_COMMA; args env acc - - in fun env -> - if Peek.token env = T_LESS_THAN then - Some (with_loc (fun env -> - Expect.token env T_LESS_THAN; - let args = args env [] in - Expect.token env T_GREATER_THAN; - args - ) env) - else None + in + fun env -> + if Peek.token env = T_LESS_THAN then + Some + (with_loc + (fun env -> + Expect.token env T_LESS_THAN; + let env = with_no_anon_function_type false env in + let args = args env [] in + Expect.token env T_GREATER_THAN; + args) + env) + else + None and generic env = raw_generic_with_identifier env (type_identifier env) and raw_generic_with_identifier = let rec identifier env (q_loc, qualification) = - if Peek.token env = T_PERIOD - then begin - let loc, q = with_loc ~start_loc:q_loc (fun env -> - Expect.token env T_PERIOD; - let id = type_identifier env in - { Type.Generic.Identifier. - qualification; - id; - } - ) env in + if Peek.token env = T_PERIOD then + let (loc, q) = + with_loc + ~start_loc:q_loc + (fun env -> + Expect.token env T_PERIOD; + let id = type_identifier env in + { Type.Generic.Identifier.qualification; id }) + env + in let qualification = Type.Generic.Identifier.Qualified (loc, q) in identifier env (loc, qualification) - end else (q_loc, qualification) - - in fun env id -> - with_loc ~start_loc:(fst id) (fun env -> - let id = fst id, Type.Generic.Identifier.Unqualified id in - let _id_loc, id = identifier env id in - let targs = type_parameter_instantiation env in - { Type.Generic.id; targs } - ) env + else + (q_loc, qualification) + in + fun env id -> + with_loc + ~start_loc:(fst id) + (fun env -> + let id = (fst id, Type.Generic.Identifier.Unqualified id) in + let (_id_loc, id) = identifier env id in + let targs = type_parameter_instantiation env in + { Type.Generic.id; targs }) + env and generic_type_with_identifier env id = - let loc, generic = raw_generic_with_identifier env id in - loc, Type.Generic generic + let (loc, generic) = raw_generic_with_identifier env id in + (loc, Type.Generic generic) and annotation_opt env = match Peek.token env with - | T_COLON -> Some (annotation env) - | _ -> None + | T_COLON -> Type.Available (annotation env) + | _ -> Type.Missing (Peek.loc_skip_lookahead env) - let predicate = with_loc (fun env -> - Expect.token env T_CHECKS; - if Peek.token env = T_LPAREN then begin - Expect.token env T_LPAREN; - Eat.push_lex_mode env Lex_mode.NORMAL; - let exp = Parse.conditional env in - Eat.pop_lex_mode env; - Expect.token env T_RPAREN; - Ast.Type.Predicate.Declared exp - end else - Ast.Type.Predicate.Inferred - ) + let predicate = + with_loc (fun env -> + Expect.token env T_CHECKS; + if Peek.token env = T_LPAREN then ( + Expect.token env T_LPAREN; + Eat.push_lex_mode env Lex_mode.NORMAL; + let exp = Parse.conditional env in + Eat.pop_lex_mode env; + Expect.token env T_RPAREN; + Ast.Type.Predicate.Declared exp + ) else + Ast.Type.Predicate.Inferred) let predicate_opt env = let env = with_no_anon_function_type false env in @@ -833,19 +974,16 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct | _ -> None let annotation_and_predicate_opt env = - let open Ast.Function in - match Peek.token env, Peek.ith_token ~i:1 env with - | T_COLON, T_CHECKS -> - Expect.token env T_COLON; - Missing (Peek.loc_skip_lookeahead env), predicate_opt env - | T_COLON, _ -> - let annotation = match annotation_opt env with - | Some annotation -> Available annotation - | None -> Missing (Peek.loc_skip_lookeahead env) - in - let predicate = predicate_opt env in - annotation, predicate - | _ -> Missing (Peek.loc_skip_lookeahead env), None + Ast.Type.( + match (Peek.token env, Peek.ith_token ~i:1 env) with + | (T_COLON, T_CHECKS) -> + Expect.token env T_COLON; + (Missing (Peek.loc_skip_lookahead env), predicate_opt env) + | (T_COLON, _) -> + let annotation = annotation_opt env in + let predicate = predicate_opt env in + (annotation, predicate) + | _ -> (Missing (Peek.loc_skip_lookahead env), None)) let wrap f env = let env = env |> with_strict true in @@ -855,19 +993,26 @@ module Type (Parse: Parser_common.PARSER) : TYPE = struct ret let _type = wrap _type + let type_identifier = wrap type_identifier - let type_parameter_declaration_with_defaults = - wrap (type_parameter_declaration ~allow_default:true) - let type_parameter_declaration = - wrap (type_parameter_declaration ~allow_default:false) + + let type_parameter_declaration = wrap type_parameter_declaration + let type_parameter_instantiation = wrap type_parameter_instantiation - let _object ~is_class env = - wrap (_object ~is_class ~allow_exact:false ~allow_spread:false) env + + let _object ~is_class env = wrap (_object ~is_class ~allow_exact:false ~allow_spread:false) env + let interface_helper = wrap interface_helper + let function_param_list = wrap function_param_list + let annotation = wrap annotation + let annotation_opt = wrap annotation_opt + let predicate_opt = wrap predicate_opt + let annotation_and_predicate_opt = wrap annotation_and_predicate_opt + let generic = wrap generic end diff --git a/src/parser/yarn.lock b/src/parser/yarn.lock deleted file mode 100644 index d1ea139f0f9..00000000000 --- a/src/parser/yarn.lock +++ /dev/null @@ -1,19 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -ast-types@0.8.18: - version "0.8.18" - resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.8.18.tgz#c8b98574898e8914e9d8de74b947564a9fe929af" - -colors@>=0.6.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" - -esprima-fb@15001.1001.0-dev-harmony-fb: - version "15001.1001.0-dev-harmony-fb" - resolved "https://registry.yarnpkg.com/esprima-fb/-/esprima-fb-15001.1001.0-dev-harmony-fb.tgz#43beb57ec26e8cf237d3dd8b33e42533577f2659" - -minimist@>=0.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" diff --git a/src/parser_utils/.merlin b/src/parser_utils/.merlin deleted file mode 100644 index 00507826db5..00000000000 --- a/src/parser_utils/.merlin +++ /dev/null @@ -1,3 +0,0 @@ -PKG dtoa - -REC diff --git a/src/parser_utils/__tests__/comment_attacher_test.ml b/src/parser_utils/__tests__/comment_attacher_test.ml deleted file mode 100644 index 0382d361150..00000000000 --- a/src/parser_utils/__tests__/comment_attacher_test.ml +++ /dev/null @@ -1,108 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -module Ast = Flow_ast - -open OUnit2 -open Test_utils - -let mk_comments_test - contents - expected_attached_comments - expected_unattached_comments = - begin fun ctxt -> - let info = Comment_attacher.program (parse contents) in - let attached_comments = Utils_js.LocMap.fold - (fun loc comments a -> a @ [ - ( - loc, - List.map (fun (_, (_, comment)) -> - match comment with | Ast.Comment.Block s | Ast.Comment.Line s -> s - ) comments - ) - ] - ) - info.Comment_attacher.attached_comments - [] - in - let unattached_comments = List.map - (fun (_, comment) -> - match comment with | Ast.Comment.Block s | Ast.Comment.Line s -> s - ) - info.Comment_attacher.unattached_comments - in - let printer_attached = print_list (fun (loc, s) -> - Loc.to_string loc ^ " ~ \"" ^ (String.concat "\", \"" s) ^ "\"" - ) in - let printer_unattached = String.concat ", " in - assert_equal ~ctxt - ~cmp:(eq printer_attached) - ~printer:printer_attached - ~msg:"Defs of all uses don't match!" - expected_attached_comments attached_comments; - assert_equal ~ctxt - ~cmp:(eq printer_unattached) - ~printer:printer_unattached - ~msg:"Defs of all uses don't match!" - expected_unattached_comments unattached_comments - end - -let tests = "scope_builder" >::: [ - "simple_fn" >:: mk_comments_test - "//1 - function foo() {}" - [ - mk_loc (2, 4) (2, 21), ["1"]; - ] - []; - - "multi_fn" >:: mk_comments_test - "//1 - //2 - /*3*/ - function foo() {}" - [ - mk_loc (4, 4) (4, 21), ["1"; "2"; "3"]; - ] - []; - - "var_fn" >:: mk_comments_test - "//1 - const A = function() {}" - [ - mk_loc (2, 4) (2, 27), ["1"]; - ] - []; - - "class_element" >:: mk_comments_test - "class A { - //1 - a() { - //2 - a; - } - }" - [ - mk_loc (3, 6) (6, 7), ["1"]; - mk_loc (5, 8) (5, 10), ["2"]; - ] - []; - - "object_properties" >:: mk_comments_test - "const A = { - //1 - a() { - //2 - a; - } - }" - [ - mk_loc (3, 6) (6, 7), ["1"]; - mk_loc (5, 8) (5, 10), ["2"]; - ] - []; -] diff --git a/src/parser_utils/__tests__/file_exports_resolver_test.ml b/src/parser_utils/__tests__/file_exports_resolver_test.ml deleted file mode 100644 index 9a4024ed390..00000000000 --- a/src/parser_utils/__tests__/file_exports_resolver_test.ml +++ /dev/null @@ -1,345 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open OUnit2 -open File_exports_resolver - -let substring_loc s loc = - let open Loc in - let {start={offset=a; _}; _end={offset=b; _}; _} = loc in - String.sub s a (b - a) - -let assert_singleton_smap ~ctxt key map = - assert_equal ~ctxt 1 (SMap.cardinal map); - SMap.find_unsafe key map - -let assert_substring_equal ~ctxt expected source loc = - assert_equal ~ctxt ~printer:(fun s -> s) expected (substring_loc source loc) - -let assert_exports ?assert_default ?assert_named source = - let parse_options = Some Parser_env.({ - default_parse_options with - Parser_env.esproposal_class_static_fields = true; - }) in - let ast, _ = Parser_flow.program ~parse_options source in - let exports = File_exports_resolver.program ast in - begin match assert_default, exports.default with - | Some _, None -> assert_failure "Default export not resolved" - | None, Some _ -> assert_failure "Unexpected default export resolved" - | Some assert_default, Some default -> assert_default default - | None, None -> () - end; - match assert_named, exports.named with - | Some _, None -> assert_failure "Named exports not resolved" - | None, Some _ -> assert_failure "Unexpected named exports resolved" - | Some assert_named, Some named -> assert_named named - | None, None -> () - -let tests = "require" >::: [ - "empty" >:: begin fun _ctxt -> - let source = "" in - (* Expect nothing *) - assert_exports source; - end; - - "cjs_rebound_module" >:: begin fun _ctxt -> - let source = "var module = {}; module.exports = 0;" in - (* Expect nothing *) - assert_exports source; - end; - - "cjs_rebound_exports" >:: begin fun _ctxt -> - let source = "var exports = {}; exports.foo = 0;" in - (* Expect nothing *) - assert_exports source; - end; - - "cjs_clobber_module" >:: begin fun _ctxt -> - let source = "module = {}; module.exports = 0;" in - (* Expect nothing *) - assert_exports source ~assert_default:(fun _ -> - (* TODO: Stop tracking after clobber *) - () - ); - end; - - "cjs_clobber_exports" >:: begin fun _ctxt -> - let source = "exports = {}; exports.baz = 0;" in - (* Expect nothing *) - assert_exports source ~assert_named:(fun _ -> - (* TODO: Stop tracking after clobber *) - () - ); - end; - - "cjs_default_expression" >:: begin fun ctxt -> - let source = "module.exports = 0;" in - assert_exports source ~assert_default:(function - | ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt source source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_default_expression_var" >:: begin fun ctxt -> - let source_foo = "const foo = 0;" in - let source_exp = "module.exports = foo;" in - let source = source_foo ^ source_exp in - assert_exports source ~assert_default:(function - | ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_default_function" >:: begin fun ctxt -> - let source = "module.exports = () => {};" in - assert_exports source ~assert_default:(function - | ExportFunction { line_loc; func = (loc, _) } -> - assert_substring_equal ~ctxt source source line_loc; - assert_substring_equal ~ctxt "() => {}" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_default_function_statement" >:: begin fun ctxt -> - let source_foo = "function foo() {}" in - let source_exp = "module.exports = foo;" in - let source = source_foo ^ source_exp in - assert_exports source ~assert_default:(function - | ExportFunction { line_loc; func = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt source_foo source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_default_function_statement_statement" >:: begin fun ctxt -> - let source_foo = "function foo() {}" in - let source_bar = "const bar = foo;" in - let source_exp = "module.exports = bar;" in - let source = source_foo ^ source_bar ^ source_exp in - assert_exports source ~assert_default:(function - | ExportFunction { line_loc; func = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt source_foo source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_default_class" >:: begin fun ctxt -> - let source = "module.exports = class {};" in - assert_exports source ~assert_default:(function - | ExportClass { line_loc; class_ = (loc, _) } -> - assert_substring_equal ~ctxt source source line_loc; - assert_substring_equal ~ctxt "class {}" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - - "cjs_default_class_statement" >:: begin fun ctxt -> - let source_foo = "class foo {}" in - let source_exp = "module.exports = foo;" in - let source = source_foo ^ source_exp in - assert_exports source ~assert_default:(function - | ExportClass { line_loc; class_ = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt source_foo source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_default_clobber_named_exports" >:: begin fun ctxt -> - let source = "exports.foo = 0; module.exports = 0;" in - (* Expect nothing *) - assert_exports source ~assert_default:(function - | ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt "module.exports = 0;" source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_named_assignment_function" >:: begin fun ctxt -> - let source = "module.exports.foo = () => {};" in - assert_exports source ~assert_named:(fun named -> - match assert_singleton_smap ~ctxt "foo" named with - | ExportFunction { line_loc; func = (loc, _) } -> - assert_substring_equal ~ctxt source source line_loc; - assert_substring_equal ~ctxt "() => {}" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_named_assignment_class" >:: begin fun ctxt -> - let source = "module.exports.foo = class {};" in - assert_exports source ~assert_named:(fun named -> - match assert_singleton_smap ~ctxt "foo" named with - | ExportClass { line_loc; class_ = (loc, _) } -> - assert_substring_equal ~ctxt source source line_loc; - assert_substring_equal ~ctxt "class {}" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_named_assignment_expression" >:: begin fun ctxt -> - let source = "module.exports.foo = 0;" in - assert_exports source ~assert_named:(fun named -> - match assert_singleton_smap ~ctxt "foo" named with - | ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt source source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_named_assignment_expression_statement" >:: begin fun ctxt -> - let source_foo = "const foo = 0;" in - let source_bar = "module.exports.bar = foo;" in - let source = source_foo ^ source_bar in - assert_exports source ~assert_named:(fun named -> - match assert_singleton_smap ~ctxt "bar" named with - | ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_named_mutli_assignment" >:: begin fun ctxt -> - let source_foo = "module.exports.foo = 0;" in - let source_bar = "module.exports.bar = () => {};" in - let source = source_foo ^ source_bar in - assert_exports source ~assert_named:(fun named -> - SMap.iter (fun k e -> match k, e with - | "foo", ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | "bar", ExportFunction { line_loc; func = (loc, _) } -> - assert_substring_equal ~ctxt source_bar source line_loc; - assert_substring_equal ~ctxt "() => {}" source loc; - | _ -> assert_failure "Unexpected export" - ) named; - ); - end; - - "cjs_fancy_named_assignment" >:: begin fun ctxt -> - let source_foo = "module.exports['foo'] = 0;" in - let source_bar = "module.exports[bar] = 0;" in (* Not supported *) - let source = source_foo ^ source_bar in - assert_exports source ~assert_named:(fun named -> - SMap.iter (fun k e -> match k, e with - | "foo", ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ) named; - ); - end; - - "cjs_mixed_assignment" >:: begin fun ctxt -> - let source_def = "module.exports = class {};" in - let source_foo = "module.exports.foo = 0;" in - let source = source_def ^ source_foo in - assert_exports source - ~assert_default:(function - | ExportClass { line_loc; class_ = (loc, _) } -> - assert_substring_equal ~ctxt source_def source line_loc; - assert_substring_equal ~ctxt "class {}" source loc; - | _ -> assert_failure "Unexpected export" - ) - ~assert_named:(fun named -> - match assert_singleton_smap ~ctxt "foo" named with - | ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_named_object_function" >:: begin fun ctxt -> - let source = "module.exports = { foo() {} };" in - assert_exports source ~assert_named:(fun named -> - match assert_singleton_smap ~ctxt "foo" named with - | ExportFunction { line_loc; func = (loc, _) } -> - assert_substring_equal ~ctxt "foo() {}" source line_loc; - assert_substring_equal ~ctxt "() {}" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_named_object_expression" >:: begin fun ctxt -> - let source = "module.exports = { foo: 0 };" in - assert_exports source ~assert_named:(fun named -> - match assert_singleton_smap ~ctxt "foo" named with - | ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt "foo: 0" source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ); - end; - - "cjs_fancy_named_object_expression" >:: begin fun ctxt -> - let source = "module.exports = { 'foo': 0, ['bar']: 0 };" in - assert_exports source ~assert_named:(fun named -> - SMap.iter (fun k e -> match k, e with - | "foo", ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt "'foo': 0" source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | "bar", ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt "['bar']: 0" source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ) named; - ); - end; - - "cjs_named_object_expression_statement" >:: begin fun ctxt -> - let source_foo = "function foo() {}" in - let source_bar = "const bar = { foo, baz: 0 };" in - let source_exp = "module.exports = bar;" in - let source = source_foo ^ source_bar ^ source_exp in - assert_exports source ~assert_named:(fun named -> - SMap.iter (fun k e -> match k, e with - | "foo", ExportFunction { line_loc; func = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt source_foo source loc; - | "baz", ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt "baz: 0" source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ) named; - ); - end; - - "cjs_class_statement_static_props" >:: begin fun ctxt -> - let source_foo = "class foo { static bar() {} static baz = 0; }" in - let source_exp = "module.exports = foo;" in - let source = source_foo ^ source_exp in - assert_exports source - ~assert_default:(function - | ExportClass { line_loc; class_ = (loc, _) } -> - assert_substring_equal ~ctxt source_foo source line_loc; - assert_substring_equal ~ctxt source_foo source loc; - | _ -> assert_failure "Unexpected export" - ) - ~assert_named:(fun named -> - SMap.iter (fun k e -> match k, e with - | "bar", ExportFunction { line_loc; func = (loc, _) } -> - assert_substring_equal ~ctxt "static bar() {}" source line_loc; - assert_substring_equal ~ctxt "() {}" source loc; - | "baz", ExportExpression { line_loc; expr = (loc, _) } -> - assert_substring_equal ~ctxt "static baz = 0;" source line_loc; - assert_substring_equal ~ctxt "0" source loc; - | _ -> assert_failure "Unexpected export" - ) named; - ); - end; -] diff --git a/src/parser_utils/__tests__/file_sig_test.ml b/src/parser_utils/__tests__/file_sig_test.ml index 7f97a736393..8c7cf11e0d0 100644 --- a/src/parser_utils/__tests__/file_sig_test.ml +++ b/src/parser_utils/__tests__/file_sig_test.ml @@ -1,47 +1,51 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open OUnit2 -open File_sig +open File_sig.With_Loc -let visit ?parse_options source = - let ast, _ = Parser_flow.program ~parse_options source in - match program ast with +let visit ?parse_options ?(module_ref_prefix = None) source = + let (ast, _) = Parser_flow.program ~parse_options source in + match program ~ast ~module_ref_prefix with | Ok fsig -> fsig | Error _ -> assert_failure "Unexpected error" -let visit_err ?parse_options source = - let ast, _ = Parser_flow.program ~parse_options source in - match program ast with +let visit_err ?parse_options ?(module_ref_prefix = None) source = + let (ast, _) = Parser_flow.program ~parse_options source in + match program ~ast ~module_ref_prefix with | Error e -> e | Ok _ -> assert_failure "Unexpected success" let substring_loc s loc = - let open Loc in - let {start={offset=a; _}; _end={offset=b; _}; _} = loc in - String.sub s a (b - a) + Loc.( + let table = Offset_utils.make s in + let a = Offset_utils.offset table loc.start in + let b = Offset_utils.offset table loc._end in + String.sub s a (b - a)) -let call_opt x = function Some f -> f x | None -> () +let call_opt x = function + | Some f -> f x + | None -> () let assert_es ?assert_named ?assert_star = function | ES { named; star } -> call_opt named assert_named; - call_opt star assert_star; - | CommonJS _ -> - assert_failure "Unexpected module kind" + call_opt star assert_star + | CommonJS _ -> assert_failure "Unexpected module kind" -let assert_cjs ?assert_export_loc = function +let assert_cjs ~source ?assert_export_loc = function | CommonJS { mod_exp_loc } -> - let offsets = - Option.map mod_exp_loc ~f:Loc.(fun { start={offset=a;_}; _end={offset=b;_}; _} -> (a, b)) + let table = Offset_utils.make source in + let offset_pair_of_loc loc = + Loc.(Offset_utils.offset table loc.start, Offset_utils.offset table loc._end) in + let offsets = Option.map mod_exp_loc ~f:offset_pair_of_loc in call_opt offsets assert_export_loc - | ES _ -> - assert_failure "Unexpected module kind" + | ES _ -> assert_failure "Unexpected module kind" let assert_some opt ~f = match opt with @@ -54,865 +58,793 @@ let assert_singleton_smap ~ctxt key map = let assert_singleton_nel nel = match nel with - | x, [] -> x + | (x, []) -> x | _ -> assert_failure "Expected singleton nel" +let assert_singleton_assoc ~ctxt key assoc = + match assoc with + | (x, v) :: _ -> + assert_equal ~ctxt key x; + v + | _ -> assert_failure "Expected singleton list" + let assert_substring_equal ~ctxt expected source loc = assert_equal ~ctxt expected (substring_loc source loc) -let assert_substrings_equal ~ctxt expected_remote expected_local source {remote_loc; local_loc} = +let assert_substrings_equal ~ctxt expected_remote expected_local source { remote_loc; local_loc } = assert_substring_equal ~ctxt expected_remote source remote_loc; assert_substring_equal ~ctxt expected_local source local_loc -let tests = "require" >::: [ - "cjs_require" >:: begin fun ctxt -> - let source = "const Foo = require('foo')" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Require { - source = (source_loc, "foo"); - require_loc; - bindings = Some (BindIdent (ident_loc, "Foo")) - }] -> - assert_substring_equal ~ctxt "'foo'" source source_loc; - assert_substring_equal ~ctxt "require('foo')" source require_loc; - assert_substring_equal ~ctxt "Foo" source ident_loc - | _ -> assert_failure "Unexpected requires" - end; - - "cjs_deep_requires" >:: begin fun ctxt -> - let source = "let foo = {x: require('bar')}; func(foo, require('baz'));" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [ Require { - source = (baz_loc, "baz"); - require_loc = req_baz_loc; - bindings = None; - } - ; Require { - source = (bar_loc, "bar"); - require_loc = req_bar_loc; - bindings = None; - } - ] -> - assert_substring_equal ~ctxt "'bar'" source bar_loc; - assert_substring_equal ~ctxt "require('bar')" source req_bar_loc; - assert_substring_equal ~ctxt "'baz'" source baz_loc; - assert_substring_equal ~ctxt "require('baz')" source req_baz_loc; - | _ -> assert_failure "Unexpected requires" - end; - - "cjs_deep_requires_plus_bindings" >:: begin fun ctxt -> - let source = "const Foo = require('foo'); func(Foo, require('bar'));" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [ Require { - source = (bar_loc, "bar"); - require_loc = req_bar_loc; - bindings = None; - } - ; Require { - source = (foo_loc, "foo"); - require_loc = req_foo_loc; - bindings = Some (BindIdent (foo_id_loc, "Foo")); - } - ] -> - assert_substring_equal ~ctxt "'foo'" source foo_loc; - assert_substring_equal ~ctxt "require('foo')" source req_foo_loc; - assert_substring_equal ~ctxt "Foo" source foo_id_loc; - assert_substring_equal ~ctxt "'bar'" source bar_loc; - assert_substring_equal ~ctxt "require('bar')" source req_bar_loc; - | _ -> assert_failure "Unexpected requires" - end; - - "cjs_require_template_literal" >:: begin fun ctxt -> - let source = "const Foo = require(`foo`)" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Require { - source = (source_loc, "foo"); - require_loc; - bindings = Some (BindIdent (ident_loc, "Foo")) - }] -> - assert_substring_equal ~ctxt "`foo`" source source_loc; - assert_substring_equal ~ctxt "require(`foo`)" source require_loc; - assert_substring_equal ~ctxt "Foo" source ident_loc - | _ -> assert_failure "Unexpected requires" - end; - - "cjs_require_named" >:: begin fun ctxt -> - let source = "const {foo, bar: baz} = require('foo');" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Require { - source = (source_loc, "foo"); - require_loc; - bindings = Some (BindNamed map) - }] -> - assert_substring_equal ~ctxt "'foo'" source source_loc; - assert_substring_equal ~ctxt "require('foo')" source require_loc; - assert_equal ~ctxt 2 (SMap.cardinal map); - let foo_loc, foo_loc' = match SMap.find_unsafe "foo" map with - | locals when SMap.mem "foo" locals -> - let { local_loc = loc; remote_loc = loc' } = Nel.hd @@ SMap.find "foo" locals in - loc, loc' - | _ -> assert_failure "Unexpected requires" - in - let baz_loc, bar_loc = match SMap.find_unsafe "bar" map with - | locals when SMap.mem "baz" locals -> - let { local_loc = loc; remote_loc = loc' } = Nel.hd @@ SMap.find "baz" locals in - loc, loc' - | _ -> assert_failure "Unexpected requires" - in - assert_substring_equal ~ctxt "foo" source foo_loc; - assert_substring_equal ~ctxt "foo" source foo_loc'; - assert_substring_equal ~ctxt "bar" source bar_loc; - assert_substring_equal ~ctxt "baz" source baz_loc; - | _ -> assert_failure "Unexpected requires" - end; - - "cjs_require_duplicate_remote" >:: begin fun ctxt -> - let source = "const {foo: bar, foo: baz} = require('foo');" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Require { - source = (source_loc, "foo"); - require_loc; - bindings = Some (BindNamed map) - }] -> - assert_substring_equal ~ctxt "'foo'" source source_loc; - assert_substring_equal ~ctxt "require('foo')" source require_loc; - assert_equal ~ctxt 1 (SMap.cardinal map); - let bar_loc, foo_loc = match SMap.find_unsafe "foo" map with - | locals when SMap.mem "bar" locals -> - let { local_loc = loc; remote_loc = loc' } = Nel.hd @@ SMap.find "bar" locals in - loc, loc' - | _ -> assert_failure "Unexpected requires" - in - let baz_loc, foo_loc' = match SMap.find_unsafe "foo" map with - | locals when SMap.mem "baz" locals -> - let { local_loc = loc; remote_loc = loc' } = Nel.hd @@ SMap.find "baz" locals in - loc, loc' - | _ -> assert_failure "Unexpected requires" - in - assert_substring_equal ~ctxt "foo" source foo_loc; - assert_substring_equal ~ctxt "foo" source foo_loc'; - assert_substring_equal ~ctxt "bar" source bar_loc; - assert_substring_equal ~ctxt "baz" source baz_loc; - | _ -> assert_failure "Unexpected requires" - end; - - "cjs_require_duplicate_local" >:: begin fun ctxt -> - let source = "const {foo: bar, baz: bar} = require('foo');" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Require { - source = (source_loc, "foo"); - require_loc; - bindings = Some (BindNamed map) - }] -> - assert_substring_equal ~ctxt "'foo'" source source_loc; - assert_substring_equal ~ctxt "require('foo')" source require_loc; - assert_equal ~ctxt 2 (SMap.cardinal map); - let bar_loc, baz_loc = match SMap.find_unsafe "baz" map with - | locals when SMap.mem "bar" locals -> - let { local_loc = loc; remote_loc = loc' } = Nel.hd @@ SMap.find "bar" locals in - loc, loc' - | _ -> assert_failure "Unexpected requires" - in - assert_substring_equal ~ctxt "bar" source bar_loc; - assert_substring_equal ~ctxt "baz" source baz_loc; - | _ -> assert_failure "Unexpected requires" - end; - - "cjs_require_in_export" >:: begin fun ctxt -> - (* An initial version of the change to ban non-toplevel exports failed to descend into the RHS - * of export statements *) - let source = "module.exports.foo = require('foo');" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Require { - source = (source_loc, "foo"); - require_loc; - bindings = None; - }] -> - assert_substring_equal ~ctxt "'foo'" source source_loc; - assert_substring_equal ~ctxt "require('foo')" source require_loc; - | _ -> assert_failure "Unexpected requires" - end; - - "dynamic_import" >:: begin fun ctxt -> - let source = "import('foo')" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [ImportDynamic {source = (source_loc, "foo"); import_loc}] -> - assert_substring_equal ~ctxt "'foo'" source source_loc; - assert_substring_equal ~ctxt "import('foo')" source import_loc; - | _ -> assert_failure "Unexpected requires" - end; - - "dynamic_import_template_literal" >:: begin fun ctxt -> - let source = "import(`foo`)" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [ImportDynamic {source = (source_loc, "foo"); import_loc}] -> - assert_substring_equal ~ctxt "`foo`" source source_loc; - assert_substring_equal ~ctxt "import(`foo`)" source import_loc; - | _ -> assert_failure "Unexpected requires" - end; - - "es_import" >:: begin fun ctxt -> - let source = "import 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import0 {source = (loc, "foo")}] -> - assert_substring_equal ~ctxt "'foo'" source loc - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_default" >:: begin fun ctxt -> - let source = "import Foo from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); named; _}] -> - named - |> assert_singleton_smap ~ctxt "default" - |> assert_singleton_smap ~ctxt "Foo" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "Foo" "Foo" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_named" >:: begin fun ctxt -> - let source = "import {A} from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); named; _}] -> - named - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "A" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_renamed" >:: begin fun ctxt -> - let source = "import {A as B} from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); named; _}] -> - named - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_smap ~ctxt "B" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "B" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_named_type" >:: begin fun ctxt -> - let source = "import {type A} from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); types; _}] -> - types - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "A" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_named_typeof" >:: begin fun ctxt -> - let source = "import {typeof A} from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); typesof; _}] -> - typesof - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "A" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_ns" >:: begin fun ctxt -> - let source = "import * as Foo from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); ns = Some (loc, "Foo"); _}] -> - assert_substring_equal ~ctxt "* as Foo" source loc - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_type" >:: begin fun ctxt -> - let source = "import type A from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); types; _}] -> - types - |> assert_singleton_smap ~ctxt "default" - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "A" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_type_named" >:: begin fun ctxt -> - let source = "import type {A} from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); types; _}] -> - types - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "A" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_type_renamed" >:: begin fun ctxt -> - let source = "import type {A as B} from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); types; _}] -> - types - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_smap ~ctxt "B" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "B" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_typeof" >:: begin fun ctxt -> - let source = "import typeof A from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); typesof; _}] -> - typesof - |> assert_singleton_smap ~ctxt "default" - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "A" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_typeof_named" >:: begin fun ctxt -> - let source = "import typeof {A} from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); typesof; _}] -> - typesof - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "A" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_typeof_renamed" >:: begin fun ctxt -> - let source = "import typeof {A as B} from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); typesof; _}] -> - typesof - |> assert_singleton_smap ~ctxt "A" - |> assert_singleton_smap ~ctxt "B" - |> assert_singleton_nel - |> assert_substrings_equal ~ctxt "A" "B" source - | _ -> assert_failure "Unexpected requires" - end; - - "es_import_typesof_ns" >:: begin fun ctxt -> - let source = "import typeof * as Foo from 'foo'" in - let {module_sig = {requires; _}; _} = visit source in - match requires with - | [Import {source = (_, "foo"); typesof_ns = Some (loc, "Foo"); _}] -> - assert_substring_equal ~ctxt "* as Foo" source loc - | _ -> assert_failure "Unexpected requires" - end; - - "cjs_default" >:: begin fun ctxt -> - let {module_sig = {module_kind; _}; _} = visit "" in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt None) - end; - - "cjs_clobber" >:: begin fun ctxt -> - let source = "module.exports = 0" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) - end; - - "cjs_clobber_rebound" >:: begin fun ctxt -> - let source = "var module = {}; module.exports = 0" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (None)) - end; - - "cjs_exports_named_rebound" >:: begin fun ctxt -> - let source = "var module = {}; module.exports.bar = 0" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (None)) - end; - - "cjs_exports_named_rebound2" >:: begin fun ctxt -> - let source = "var exports = {}; exports.bar = 0" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (None)) - end; - - "cjs_exports" >:: begin fun ctxt -> - let source = "exports = {foo: bar}; exports.baz = qux;" in - let {module_sig = {module_kind; _}; _} = visit source in - (* TODO report an export loc here *) - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (22, 29))) - end; - - "cjs_export_named" >:: begin fun ctxt -> - let source = "module.exports.foo = 0; module.exports.bar = baz;" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) - end; - - "cjs_export_object" >:: begin fun ctxt -> - let source = "module.exports = {foo: bar, baz: 0, qux};" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) - end; - - "cjs_export_ident" >:: begin fun ctxt -> - let source = "module.exports = foo;" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) - end; - - "cjs_export_ident_then_props" >:: begin fun ctxt -> - let source = "module.exports = foo; module.exports.bar = baz;" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) - end; - - "cjs_export_props_then_ident" >:: begin fun ctxt -> - let source = "module.exports.foo = bar; module.exports = baz;" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) - end; - - "export_named_type" >:: begin fun ctxt -> - let source = "export type ty = string" in - let {module_sig = {type_exports_named; _}; _} = visit source in - let type_export = assert_singleton_smap ~ctxt "ty" type_exports_named in - match type_export with - | TypeExportNamed { loc; kind = NamedDeclaration } -> - assert_substring_equal ~ctxt "type ty = string" source loc - | _ -> assert_failure "Unexpected type export" - end; - - "export_named_opaque_type" >:: begin fun ctxt -> - let source = "export opaque type ty = string" in - let {module_sig = {type_exports_named; _}; _} = visit source in - let type_export = assert_singleton_smap ~ctxt "ty" type_exports_named in - match type_export with - | TypeExportNamed { loc; kind = NamedDeclaration } -> - assert_substring_equal ~ctxt "opaque type ty = string" source loc - | _ -> assert_failure "Unexpected type export" - end; - - "export_named_interface" >:: begin fun ctxt -> - let source = "export interface I {}" in - let {module_sig = {type_exports_named; _}; _} = visit source in - let type_export = assert_singleton_smap ~ctxt "I" type_exports_named in - match type_export with - | TypeExportNamed { loc; kind = NamedDeclaration } -> - assert_substring_equal ~ctxt "interface I {}" source loc - | _ -> assert_failure "Unexpected type export" - end; - - "export_default_expr" >:: begin fun ctxt -> - let source = "export default 0" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "default" named in - match export with - | ExportDefault { default_loc; local = None } -> - assert_substring_equal ~ctxt "default" source default_loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "export_default_anon_decl" >:: begin fun ctxt -> - let source = "export default function() {}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "default" named in - match export with - | ExportDefault { default_loc; local = None } -> - assert_substring_equal ~ctxt "default" source default_loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "export_default_named_func" >:: begin fun ctxt -> - let source = "export default function foo() {}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "default" named in - match export with - | ExportDefault { default_loc; local = Some (loc, "foo") } -> - assert_substring_equal ~ctxt "default" source default_loc; - assert_substring_equal ~ctxt "foo" source loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "export_default_named_class" >:: begin fun ctxt -> - let source = "export default function C() {}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "default" named in - match export with - | ExportDefault { default_loc; local = Some (loc, "C") } -> - assert_substring_equal ~ctxt "default" source default_loc; - assert_substring_equal ~ctxt "C" source loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "export_named_func" >:: begin fun ctxt -> - let source = "export function foo() {}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "foo" named in - match export with - | ExportNamed { loc; kind = NamedDeclaration } -> - assert_substring_equal ~ctxt "foo" source loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "export_named_class" >:: begin fun ctxt -> - let source = "export class C {}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "C" named in - match export with - | ExportNamed { loc; kind = NamedDeclaration } -> - assert_substring_equal ~ctxt "C" source loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "export_named_vars" >:: begin fun ctxt -> - let source = "export var x, y = 0, [a] = [], {p} = {}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - assert_equal ~ctxt 4 (SMap.cardinal named); - let x_loc = match SMap.find_unsafe "x" named with - | ExportNamed { loc; kind = NamedDeclaration } -> loc - | _ -> assert_failure "Unexpected export" - in - let y_loc = match SMap.find_unsafe "y" named with - | ExportNamed { loc; kind = NamedDeclaration } -> loc - | _ -> assert_failure "Unexpected export" - in - let a_loc = match SMap.find_unsafe "a" named with - | ExportNamed { loc; kind = NamedDeclaration } -> loc - | _ -> assert_failure "Unexpected export" - in - let p_loc = match SMap.find_unsafe "p" named with - | ExportNamed { loc; kind = NamedDeclaration } -> loc - | _ -> assert_failure "Unexpected export" - in - assert_substring_equal ~ctxt "x" source x_loc; - assert_substring_equal ~ctxt "y" source y_loc; - assert_substring_equal ~ctxt "a" source a_loc; - assert_substring_equal ~ctxt "p" source p_loc; - ) - end; - - "export_named_specs" >:: begin fun ctxt -> - let source = "export {x, y as z}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - assert_equal ~ctxt 2 (SMap.cardinal named); - let x_loc = match SMap.find_unsafe "x" named with - | ExportNamed { loc; kind = NamedSpecifier ({local = (x_loc, "x"); source = None }) } when - x_loc = loc -> loc - | _ -> assert_failure "Unexpected export" - in - let y_loc, z_loc = match SMap.find_unsafe "z" named with - | ExportNamed { loc; kind = NamedSpecifier ({local = (y_loc, "y"); source = None }) } -> y_loc, loc - | _ -> assert_failure "Unexpected export" - in - assert_substring_equal ~ctxt "x" source x_loc; - assert_substring_equal ~ctxt "y" source y_loc; - assert_substring_equal ~ctxt "z" source z_loc; - ) - end; - - "export_star" >:: begin fun ctxt -> - let source = "export * from 'foo'" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_star:(function - | [ExportStar { star_loc; source = (source_loc, "foo") }] -> - assert_substring_equal ~ctxt "*" source star_loc; - assert_substring_equal ~ctxt "'foo'" source source_loc; - | _ -> assert_failure "Unexpected export" - ) - end; - - "export_ns" >:: begin fun ctxt -> - let source = "export * as ns from 'foo'" in - let parse_options = Parser_env.({ - default_parse_options with - esproposal_export_star_as = true - }) in - let {module_sig = {module_kind; _}; _} = visit ~parse_options source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "ns" named in - match export with - | ExportNs { loc; source = (source_loc, "foo") } -> - assert_substring_equal ~ctxt "ns" source loc; - assert_substring_equal ~ctxt "'foo'" source source_loc; - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_module.exports" >:: begin fun ctxt -> - let source = "declare module.exports: ty" in - let {module_sig = {module_kind; _}; _} = visit source in - (* TODO use just the `module.exports` location *) - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 26))) - end; - - "declare_export_default" >:: begin fun ctxt -> - let source = "declare export default string" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "default" named in - match export with - | ExportDefault { default_loc; local = None } -> - assert_substring_equal ~ctxt "default" source default_loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_export_default_func" >:: begin fun ctxt -> - let source = "declare export default function foo(): void" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "default" named in - match export with - | ExportDefault { default_loc; local = Some (loc, "foo") } -> - assert_substring_equal ~ctxt "default" source default_loc; - assert_substring_equal ~ctxt "foo" source loc; - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_export_default_class" >:: begin fun ctxt -> - let source = "declare export default class C {}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - assert_equal ~ctxt 1 (SMap.cardinal named); - let export = assert_singleton_smap ~ctxt "default" named in - match export with - | ExportDefault { default_loc; local = Some (loc, "C") } -> - assert_substring_equal ~ctxt "default" source default_loc; - assert_substring_equal ~ctxt "C" source loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_export_named_func" >:: begin fun ctxt -> - let source = "declare export function foo(): void" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "foo" named in - match export with - | ExportNamed { loc; kind = NamedDeclaration } -> - assert_substring_equal ~ctxt "foo" source loc; - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_export_named_class" >:: begin fun ctxt -> - let source = "declare export class C {}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "C" named in - match export with - | ExportNamed { loc; kind = NamedDeclaration } -> - assert_substring_equal ~ctxt "C" source loc; - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_export_named_var" >:: begin fun ctxt -> - let source = "declare export var foo: string" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - let export = assert_singleton_smap ~ctxt "foo" named in - match export with - | ExportNamed { loc; kind = NamedDeclaration } -> - assert_substring_equal ~ctxt "foo" source loc; - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_export_named_specs" >:: begin fun ctxt -> - let source = "declare export {x, y as z}" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_named:(fun named -> - assert_equal ~ctxt 2 (SMap.cardinal named); - let x_loc = match SMap.find_unsafe "x" named with - | ExportNamed { loc; kind = NamedSpecifier ({ local = (x_loc, "x"); source = None }) } - when x_loc = loc -> loc - | _ -> assert_failure "Unexpected export" - in - let y_loc, z_loc = match SMap.find_unsafe "z" named with - | ExportNamed { loc; kind = NamedSpecifier ({ local = (y_loc, "y"); source = None }) } -> y_loc, loc - | _ -> assert_failure "Unexpected export" - in - assert_substring_equal ~ctxt "x" source x_loc; - assert_substring_equal ~ctxt "y" source y_loc; - assert_substring_equal ~ctxt "z" source z_loc; - ) - end; - - "declare_export_star" >:: begin fun ctxt -> - let source = "declare export * from 'foo'" in - let {module_sig = {module_kind; _}; _} = visit source in - assert_es module_kind ~assert_star:(function - | [ExportStar { star_loc; source = (source_loc, "foo") }] -> - assert_substring_equal ~ctxt "*" source star_loc; - assert_substring_equal ~ctxt "'foo'" source source_loc; - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_export_ns" >:: begin fun ctxt -> - let source = "declare export * as ns from 'foo'" in - let parse_options = Parser_env.({ - default_parse_options with - esproposal_export_star_as = true - }) in - let {module_sig = {module_kind; _}; _} = visit ~parse_options source in - assert_es module_kind ~assert_named:(fun named -> - assert_equal ~ctxt 1 (SMap.cardinal named); - let loc, source_loc = match SMap.find_unsafe "ns" named with - | ExportNs { loc; source = (source_loc, "foo") } -> loc, source_loc - | _ -> assert_failure "Unexpected export" - in - assert_substring_equal ~ctxt "ns" source loc; - assert_substring_equal ~ctxt "'foo'" source source_loc; - ) - end; - - "declare_module" >:: begin fun ctxt -> - let source = "declare module foo {}" in - let {declare_modules; _} = visit source in - let modules = declare_modules in - assert_equal ~ctxt 1 (SMap.cardinal modules); - let loc, { requires; module_kind; type_exports_named; type_exports_star; info = () } = - SMap.find_unsafe "foo" modules in - assert_substring_equal ~ctxt source source loc; - assert_equal ~ctxt 0 (List.length requires); - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt None); - assert_equal ~ctxt 0 (SMap.cardinal type_exports_named); - assert_equal ~ctxt 0 (List.length type_exports_star); - end; - - "declare_module_export_type" >:: begin function ctxt -> - let source = "declare module foo { declare export type bar = string }" in - let {declare_modules; _} = visit source in - assert_equal ~ctxt 1 (SMap.cardinal declare_modules); - let _, { type_exports_named; _ } = - SMap.find_unsafe "foo" declare_modules in - assert_equal ~ctxt 1 (SMap.cardinal type_exports_named); - let loc = match SMap.find_unsafe "bar" type_exports_named with - | TypeExportNamed { loc; kind = NamedDeclaration } -> loc - | _ -> assert_failure "Unexpected type export" - in - assert_substring_equal ~ctxt "bar" source loc; - end; - - "declare_module_export_default_expr" >:: begin fun ctxt -> - let source = "declare module foo { declare export default ty }" in - let {declare_modules; _} = visit source in - assert_equal ~ctxt 1 (SMap.cardinal declare_modules); - let _, { module_kind; _ } = SMap.find_unsafe "foo" declare_modules in - assert_es module_kind ~assert_named:(fun named -> - assert_equal ~ctxt 1 (SMap.cardinal named); - match SMap.find_unsafe "default" named with - | ExportDefault { default_loc; local = None } -> - assert_substring_equal ~ctxt "default" source default_loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_module_export_default_decl" >:: begin fun ctxt -> - let source = "declare module foo { declare export default function bar(): void }" in - let {declare_modules; _} = visit source in - assert_equal ~ctxt 1 (SMap.cardinal declare_modules); - let _, { module_kind; _ } = SMap.find_unsafe "foo" declare_modules in - assert_es module_kind ~assert_named:(fun named -> - assert_equal ~ctxt 1 (SMap.cardinal named); - match SMap.find_unsafe "default" named with - | ExportDefault { default_loc; local = Some (loc, "bar") } -> - assert_substring_equal ~ctxt "default" source default_loc; - assert_substring_equal ~ctxt "bar" source loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_module_export_name_func" >:: begin fun ctxt -> - let source = "declare module foo { declare export function bar(): void }" in - let {declare_modules; _} = visit source in - assert_equal ~ctxt 1 (SMap.cardinal declare_modules); - let _, { module_kind; _ } = SMap.find_unsafe "foo" declare_modules in - assert_es module_kind ~assert_named:(fun named -> - assert_equal ~ctxt 1 (SMap.cardinal named); - let loc = match SMap.find_unsafe "bar" named with - | ExportNamed { loc; kind = NamedDeclaration } -> loc - | _ -> assert_failure "Unexpected export" - in - assert_substring_equal ~ctxt "bar" source loc - ) - end; - - "declare_module_export_star" >:: begin fun ctxt -> - let source = "declare module foo { declare export * from 'bar' }" in - let {declare_modules; _} = visit source in - assert_equal ~ctxt 1 (SMap.cardinal declare_modules); - let _, { module_kind; _ } = SMap.find_unsafe "foo" declare_modules in - assert_es module_kind ~assert_star:(function - | [ExportStar { star_loc; source = (source_loc, "bar") }] -> - assert_substring_equal ~ctxt "*" source star_loc; - assert_substring_equal ~ctxt "'bar'" source source_loc - | _ -> assert_failure "Unexpected export" - ) - end; - - "declare_module_declare_module.export" >:: begin fun ctxt -> - let source = "declare module foo { declare module.exports: ty }" in - let {declare_modules; _} = visit source in - assert_equal ~ctxt 1 (SMap.cardinal declare_modules); - let _, { module_kind; _ } = SMap.find_unsafe "foo" declare_modules in - (* TODO use o0nly the location of `module.exports` *) - assert_cjs module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (21, 47))) - end; - - "err_indeterminate_clobber_after_export" >:: begin fun ctxt -> - let source = "export default 0; module.exports = 0;" in - match visit_err source with - | IndeterminateModuleType loc -> - assert_substring_equal ~ctxt "module.exports" source loc - end; - - "err_indeterminate_export_after_clobber" >:: begin fun ctxt -> - let source = "module.exports = 0; export default 0;" in - match visit_err source with - | IndeterminateModuleType loc -> - assert_substring_equal ~ctxt "export default 0;" source loc - end; -] +let tests = + "require" + >::: [ + ( "cjs_require" + >:: fun ctxt -> + let source = "const Foo = require('foo')" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [ + Require + { + source = (source_loc, "foo"); + require_loc; + bindings = Some (BindIdent (ident_loc, "Foo")); + }; + ] -> + assert_substring_equal ~ctxt "'foo'" source source_loc; + assert_substring_equal ~ctxt "require('foo')" source require_loc; + assert_substring_equal ~ctxt "Foo" source ident_loc + | _ -> assert_failure "Unexpected requires" ); + ( "cjs_deep_requires" + >:: fun ctxt -> + let source = "let foo = {x: require('bar')}; func(foo, require('baz'));" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [ + Require { source = (baz_loc, "baz"); require_loc = req_baz_loc; bindings = None }; + Require { source = (bar_loc, "bar"); require_loc = req_bar_loc; bindings = None }; + ] -> + assert_substring_equal ~ctxt "'bar'" source bar_loc; + assert_substring_equal ~ctxt "require('bar')" source req_bar_loc; + assert_substring_equal ~ctxt "'baz'" source baz_loc; + assert_substring_equal ~ctxt "require('baz')" source req_baz_loc + | _ -> assert_failure "Unexpected requires" ); + ( "cjs_deep_requires_plus_bindings" + >:: fun ctxt -> + let source = "const Foo = require('foo'); func(Foo, require('bar'));" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [ + Require { source = (bar_loc, "bar"); require_loc = req_bar_loc; bindings = None }; + Require + { + source = (foo_loc, "foo"); + require_loc = req_foo_loc; + bindings = Some (BindIdent (foo_id_loc, "Foo")); + }; + ] -> + assert_substring_equal ~ctxt "'foo'" source foo_loc; + assert_substring_equal ~ctxt "require('foo')" source req_foo_loc; + assert_substring_equal ~ctxt "Foo" source foo_id_loc; + assert_substring_equal ~ctxt "'bar'" source bar_loc; + assert_substring_equal ~ctxt "require('bar')" source req_bar_loc + | _ -> assert_failure "Unexpected requires" ); + ( "cjs_require_template_literal" + >:: fun ctxt -> + let source = "const Foo = require(`foo`)" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [ + Require + { + source = (source_loc, "foo"); + require_loc; + bindings = Some (BindIdent (ident_loc, "Foo")); + }; + ] -> + assert_substring_equal ~ctxt "`foo`" source source_loc; + assert_substring_equal ~ctxt "require(`foo`)" source require_loc; + assert_substring_equal ~ctxt "Foo" source ident_loc + | _ -> assert_failure "Unexpected requires" ); + ( "cjs_require_named" + >:: fun ctxt -> + let source = "const {foo, bar: baz} = require('foo');" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Require { source = (source_loc, "foo"); require_loc; bindings = Some (BindNamed map) }] + -> + assert_substring_equal ~ctxt "'foo'" source source_loc; + assert_substring_equal ~ctxt "require('foo')" source require_loc; + assert_equal ~ctxt 2 (List.length map); + let (foo_loc, foo_loc') = + match List.find_all (fun ((_, x), _) -> x = "foo") map with + | [((foo_loc', _), BindIdent (foo_loc, "foo"))] -> (foo_loc, foo_loc') + | _ -> assert_failure "Unexpected requires" + in + let (baz_loc, bar_loc) = + match List.find_all (fun ((_, x), _) -> x = "bar") map with + | [((bar_loc, _), BindIdent (baz_loc, "baz"))] -> (baz_loc, bar_loc) + | _ -> assert_failure "Unexpected requires" + in + assert_substring_equal ~ctxt "foo" source foo_loc; + assert_substring_equal ~ctxt "foo" source foo_loc'; + assert_substring_equal ~ctxt "bar" source bar_loc; + assert_substring_equal ~ctxt "baz" source baz_loc + | _ -> assert_failure "Unexpected requires" ); + ( "cjs_require_duplicate_remote" + >:: fun ctxt -> + let source = "const {foo: bar, foo: baz} = require('foo');" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Require { source = (source_loc, "foo"); require_loc; bindings = Some (BindNamed map) }] + -> + assert_substring_equal ~ctxt "'foo'" source source_loc; + assert_substring_equal ~ctxt "require('foo')" source require_loc; + assert_equal ~ctxt 2 (List.length map); + let (bar_loc, foo_loc, baz_loc, foo_loc') = + match List.find_all (fun ((_, x), _) -> x = "foo") map with + | [ + ((foo_loc', _), BindIdent (baz_loc, "baz")); + ((foo_loc, _), BindIdent (bar_loc, "bar")); + ] -> + (bar_loc, foo_loc, baz_loc, foo_loc') + | _ -> assert_failure "Unexpected requires" + in + assert_substring_equal ~ctxt "foo" source foo_loc; + assert_substring_equal ~ctxt "foo" source foo_loc'; + assert_substring_equal ~ctxt "bar" source bar_loc; + assert_substring_equal ~ctxt "baz" source baz_loc + | _ -> assert_failure "Unexpected requires" ); + ( "cjs_require_duplicate_local" + >:: fun ctxt -> + let source = "const {foo: bar, baz: bar} = require('foo');" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Require { source = (source_loc, "foo"); require_loc; bindings = Some (BindNamed map) }] + -> + assert_substring_equal ~ctxt "'foo'" source source_loc; + assert_substring_equal ~ctxt "require('foo')" source require_loc; + assert_equal ~ctxt 2 (List.length map); + let (bar_loc, baz_loc) = + match List.find_all (fun ((_, x), _) -> x = "baz") map with + | [((baz_loc, _), BindIdent (bar_loc, "bar"))] -> (bar_loc, baz_loc) + | _ -> assert_failure "Unexpected requires" + in + assert_substring_equal ~ctxt "bar" source bar_loc; + assert_substring_equal ~ctxt "baz" source baz_loc + | _ -> assert_failure "Unexpected requires" ); + ( "cjs_require_in_export" + >:: fun ctxt -> + (* An initial version of the change to ban non-toplevel exports failed to descend into the RHS + * of export statements *) + let source = "module.exports.foo = require('foo');" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Require { source = (source_loc, "foo"); require_loc; bindings = None }] -> + assert_substring_equal ~ctxt "'foo'" source source_loc; + assert_substring_equal ~ctxt "require('foo')" source require_loc + | _ -> assert_failure "Unexpected requires" ); + ( "cjs_module_ref" + >:: fun ctxt -> + let source = "moduleRefConsumer('m#foo')" in + let { module_sig = { requires; _ }; _ } = visit source ~module_ref_prefix:(Some "m#") in + match requires with + | [Require { source = (source_loc, "foo"); require_loc; _ }] -> + assert_substring_equal ~ctxt "'m#foo'" source source_loc; + assert_substring_equal ~ctxt "'m#foo'" source require_loc + | _ -> assert_failure "Unexpected requires" ); + ( "dynamic_import" + >:: fun ctxt -> + let source = "import('foo')" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [ImportDynamic { source = (source_loc, "foo"); import_loc }] -> + assert_substring_equal ~ctxt "'foo'" source source_loc; + assert_substring_equal ~ctxt "import('foo')" source import_loc + | _ -> assert_failure "Unexpected requires" ); + ( "dynamic_import_template_literal" + >:: fun ctxt -> + let source = "import(`foo`)" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [ImportDynamic { source = (source_loc, "foo"); import_loc }] -> + assert_substring_equal ~ctxt "`foo`" source source_loc; + assert_substring_equal ~ctxt "import(`foo`)" source import_loc + | _ -> assert_failure "Unexpected requires" ); + ( "es_import" + >:: fun ctxt -> + let source = "import 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import0 { source = (loc, "foo") }] -> assert_substring_equal ~ctxt "'foo'" source loc + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_default" + >:: fun ctxt -> + let source = "import Foo from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); named; _ }] -> + named + |> assert_singleton_smap ~ctxt "default" + |> assert_singleton_smap ~ctxt "Foo" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "Foo" "Foo" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_named" + >:: fun ctxt -> + let source = "import {A} from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); named; _ }] -> + named + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "A" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_renamed" + >:: fun ctxt -> + let source = "import {A as B} from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); named; _ }] -> + named + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_smap ~ctxt "B" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "B" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_named_type" + >:: fun ctxt -> + let source = "import {type A} from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); types; _ }] -> + types + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "A" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_named_typeof" + >:: fun ctxt -> + let source = "import {typeof A} from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); typesof; _ }] -> + typesof + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "A" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_ns" + >:: fun ctxt -> + let source = "import * as Foo from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); ns = Some (loc, "Foo"); _ }] -> + assert_substring_equal ~ctxt "* as Foo" source loc + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_type" + >:: fun ctxt -> + let source = "import type A from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); types; _ }] -> + types + |> assert_singleton_smap ~ctxt "default" + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "A" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_type_named" + >:: fun ctxt -> + let source = "import type {A} from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); types; _ }] -> + types + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "A" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_type_renamed" + >:: fun ctxt -> + let source = "import type {A as B} from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); types; _ }] -> + types + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_smap ~ctxt "B" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "B" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_typeof" + >:: fun ctxt -> + let source = "import typeof A from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); typesof; _ }] -> + typesof + |> assert_singleton_smap ~ctxt "default" + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "A" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_typeof_named" + >:: fun ctxt -> + let source = "import typeof {A} from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); typesof; _ }] -> + typesof + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "A" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_typeof_renamed" + >:: fun ctxt -> + let source = "import typeof {A as B} from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); typesof; _ }] -> + typesof + |> assert_singleton_smap ~ctxt "A" + |> assert_singleton_smap ~ctxt "B" + |> assert_singleton_nel + |> assert_substrings_equal ~ctxt "A" "B" source + | _ -> assert_failure "Unexpected requires" ); + ( "es_import_typesof_ns" + >:: fun ctxt -> + let source = "import typeof * as Foo from 'foo'" in + let { module_sig = { requires; _ }; _ } = visit source in + match requires with + | [Import { source = (_, "foo"); typesof_ns = Some (loc, "Foo"); _ }] -> + assert_substring_equal ~ctxt "* as Foo" source loc + | _ -> assert_failure "Unexpected requires" ); + ( "cjs_default" + >:: fun ctxt -> + let source = "" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt None) ); + ( "cjs_clobber" + >:: fun ctxt -> + let source = "module.exports = 0" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) ); + ( "cjs_clobber_rebound" + >:: fun ctxt -> + let source = "var module = {}; module.exports = 0" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt None) ); + ( "cjs_exports_named_rebound" + >:: fun ctxt -> + let source = "var module = {}; module.exports.bar = 0" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt None) ); + ( "cjs_exports_named_rebound2" + >:: fun ctxt -> + let source = "var exports = {}; exports.bar = 0" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt None) ); + ( "cjs_exports" + >:: fun ctxt -> + let source = "exports = {foo: bar}; exports.baz = qux;" in + let { module_sig = { module_kind; _ }; _ } = visit source in + (* TODO report an export loc here *) + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (22, 29))) ); + ( "cjs_export_named" + >:: fun ctxt -> + let source = "module.exports.foo = 0; module.exports.bar = baz;" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) ); + ( "cjs_export_object" + >:: fun ctxt -> + let source = "module.exports = {foo: bar, baz: 0, qux};" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) ); + ( "cjs_export_ident" + >:: fun ctxt -> + let source = "module.exports = foo;" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) ); + ( "cjs_export_ident_then_props" + >:: fun ctxt -> + let source = "module.exports = foo; module.exports.bar = baz;" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) ); + ( "cjs_export_props_then_ident" + >:: fun ctxt -> + let source = "module.exports.foo = bar; module.exports = baz;" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 14))) ); + ( "export_named_type" + >:: fun ctxt -> + let source = "export type ty = string" in + let { module_sig = { type_exports_named; _ }; _ } = visit source in + let type_export = assert_singleton_assoc ~ctxt "ty" type_exports_named in + match type_export with + | (_, TypeExportNamed { loc; kind = NamedDeclaration }) -> + assert_substring_equal ~ctxt "type ty = string" source loc + | _ -> assert_failure "Unexpected type export" ); + ( "export_named_opaque_type" + >:: fun ctxt -> + let source = "export opaque type ty = string" in + let { module_sig = { type_exports_named; _ }; _ } = visit source in + let type_export = assert_singleton_assoc ~ctxt "ty" type_exports_named in + match type_export with + | (_, TypeExportNamed { loc; kind = NamedDeclaration }) -> + assert_substring_equal ~ctxt "opaque type ty = string" source loc + | _ -> assert_failure "Unexpected type export" ); + ( "export_named_interface" + >:: fun ctxt -> + let source = "export interface I {}" in + let { module_sig = { type_exports_named; _ }; _ } = visit source in + let type_export = assert_singleton_assoc ~ctxt "I" type_exports_named in + match type_export with + | (_, TypeExportNamed { loc; kind = NamedDeclaration }) -> + assert_substring_equal ~ctxt "interface I {}" source loc + | _ -> assert_failure "Unexpected type export" ); + ( "export_default_expr" + >:: fun ctxt -> + let source = "export default 0" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "default" named in + match export with + | (_, ExportDefault { default_loc; local = None }) -> + assert_substring_equal ~ctxt "default" source default_loc + | _ -> assert_failure "Unexpected export") ); + ( "export_default_anon_decl" + >:: fun ctxt -> + let source = "export default function() {}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "default" named in + match export with + | (_, ExportDefault { default_loc; local = None }) -> + assert_substring_equal ~ctxt "default" source default_loc + | _ -> assert_failure "Unexpected export") ); + ( "export_default_named_func" + >:: fun ctxt -> + let source = "export default function foo() {}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "default" named in + match export with + | (_, ExportDefault { default_loc; local = Some (loc, "foo") }) -> + assert_substring_equal ~ctxt "default" source default_loc; + assert_substring_equal ~ctxt "foo" source loc + | _ -> assert_failure "Unexpected export") ); + ( "export_default_named_class" + >:: fun ctxt -> + let source = "export default function C() {}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "default" named in + match export with + | (_, ExportDefault { default_loc; local = Some (loc, "C") }) -> + assert_substring_equal ~ctxt "default" source default_loc; + assert_substring_equal ~ctxt "C" source loc + | _ -> assert_failure "Unexpected export") ); + ( "export_named_func" + >:: fun ctxt -> + let source = "export function foo() {}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "foo" named in + match export with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> + assert_substring_equal ~ctxt "foo" source loc + | _ -> assert_failure "Unexpected export") ); + ( "export_named_class" + >:: fun ctxt -> + let source = "export class C {}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "C" named in + match export with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> + assert_substring_equal ~ctxt "C" source loc + | _ -> assert_failure "Unexpected export") ); + ( "export_named_vars" + >:: fun ctxt -> + let source = "export var x, y = 0, [a] = [], {p} = {}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + assert_equal ~ctxt 4 (List.length named); + let x_loc = + match List.assoc "x" named with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> loc + | _ -> assert_failure "Unexpected export" + in + let y_loc = + match List.assoc "y" named with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> loc + | _ -> assert_failure "Unexpected export" + in + let a_loc = + match List.assoc "a" named with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> loc + | _ -> assert_failure "Unexpected export" + in + let p_loc = + match List.assoc "p" named with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> loc + | _ -> assert_failure "Unexpected export" + in + assert_substring_equal ~ctxt "x" source x_loc; + assert_substring_equal ~ctxt "y" source y_loc; + assert_substring_equal ~ctxt "a" source a_loc; + assert_substring_equal ~ctxt "p" source p_loc) ); + ( "export_named_specs" + >:: fun ctxt -> + let source = "export {x, y as z}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + assert_equal ~ctxt 2 (List.length named); + let x_loc = + match List.assoc "x" named with + | ( _, + ExportNamed + { loc; kind = NamedSpecifier { local = (x_loc, "x"); source = None } } ) + when x_loc = loc -> + loc + | _ -> assert_failure "Unexpected export" + in + let (y_loc, z_loc) = + match List.assoc "z" named with + | ( _, + ExportNamed + { loc; kind = NamedSpecifier { local = (y_loc, "y"); source = None } } ) -> + (y_loc, loc) + | _ -> assert_failure "Unexpected export" + in + assert_substring_equal ~ctxt "x" source x_loc; + assert_substring_equal ~ctxt "y" source y_loc; + assert_substring_equal ~ctxt "z" source z_loc) ); + ( "export_star" + >:: fun ctxt -> + let source = "export * from 'foo'" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_star:(function + | [(_, ExportStar { star_loc; source = (source_loc, "foo") })] -> + assert_substring_equal ~ctxt "*" source star_loc; + assert_substring_equal ~ctxt "'foo'" source source_loc + | _ -> assert_failure "Unexpected export") ); + ( "export_ns" + >:: fun ctxt -> + let source = "export * as ns from 'foo'" in + let parse_options = + Parser_env.{ default_parse_options with esproposal_export_star_as = true } + in + let { module_sig = { module_kind; _ }; _ } = visit ~parse_options source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "ns" named in + match export with + | (_, ExportNs { loc; source = (source_loc, "foo"); _ }) -> + assert_substring_equal ~ctxt "ns" source loc; + assert_substring_equal ~ctxt "'foo'" source source_loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_module.exports" + >:: fun ctxt -> + let source = "declare module.exports: ty" in + let { module_sig = { module_kind; _ }; _ } = visit source in + (* TODO use just the `module.exports` location *) + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (0, 26))) ); + ( "declare_export_default" + >:: fun ctxt -> + let source = "declare export default string" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "default" named in + match export with + | (_, ExportDefault { default_loc; local = None }) -> + assert_substring_equal ~ctxt "default" source default_loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_export_default_func" + >:: fun ctxt -> + let source = "declare export default function foo(): void" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "default" named in + match export with + | (_, ExportDefault { default_loc; local = Some (loc, "foo") }) -> + assert_substring_equal ~ctxt "default" source default_loc; + assert_substring_equal ~ctxt "foo" source loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_export_default_class" + >:: fun ctxt -> + let source = "declare export default class C {}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "default" named in + match export with + | (_, ExportDefault { default_loc; local = Some (loc, "C") }) -> + assert_substring_equal ~ctxt "default" source default_loc; + assert_substring_equal ~ctxt "C" source loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_export_named_func" + >:: fun ctxt -> + let source = "declare export function foo(): void" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "foo" named in + match export with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> + assert_substring_equal ~ctxt "foo" source loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_export_named_class" + >:: fun ctxt -> + let source = "declare export class C {}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "C" named in + match export with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> + assert_substring_equal ~ctxt "C" source loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_export_named_var" + >:: fun ctxt -> + let source = "declare export var foo: string" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + let export = assert_singleton_assoc ~ctxt "foo" named in + match export with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> + assert_substring_equal ~ctxt "foo" source loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_export_named_specs" + >:: fun ctxt -> + let source = "declare export {x, y as z}" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_named:(fun named -> + assert_equal ~ctxt 2 (List.length named); + let x_loc = + match List.assoc "x" named with + | ( _, + ExportNamed + { loc; kind = NamedSpecifier { local = (x_loc, "x"); source = None } } ) + when x_loc = loc -> + loc + | _ -> assert_failure "Unexpected export" + in + let (y_loc, z_loc) = + match List.assoc "z" named with + | ( _, + ExportNamed + { loc; kind = NamedSpecifier { local = (y_loc, "y"); source = None } } ) -> + (y_loc, loc) + | _ -> assert_failure "Unexpected export" + in + assert_substring_equal ~ctxt "x" source x_loc; + assert_substring_equal ~ctxt "y" source y_loc; + assert_substring_equal ~ctxt "z" source z_loc) ); + ( "declare_export_star" + >:: fun ctxt -> + let source = "declare export * from 'foo'" in + let { module_sig = { module_kind; _ }; _ } = visit source in + assert_es module_kind ~assert_star:(function + | [(_, ExportStar { star_loc; source = (source_loc, "foo") })] -> + assert_substring_equal ~ctxt "*" source star_loc; + assert_substring_equal ~ctxt "'foo'" source source_loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_export_ns" + >:: fun ctxt -> + let source = "declare export * as ns from 'foo'" in + let parse_options = + Parser_env.{ default_parse_options with esproposal_export_star_as = true } + in + let { module_sig = { module_kind; _ }; _ } = visit ~parse_options source in + assert_es module_kind ~assert_named:(fun named -> + assert_equal ~ctxt 1 (List.length named); + let (loc, source_loc) = + match List.assoc "ns" named with + | (_, ExportNs { loc; source = (source_loc, "foo"); _ }) -> (loc, source_loc) + | _ -> assert_failure "Unexpected export" + in + assert_substring_equal ~ctxt "ns" source loc; + assert_substring_equal ~ctxt "'foo'" source source_loc) ); + ( "declare_module" + >:: fun ctxt -> + let source = "declare module foo {}" in + let { declare_modules; _ } = visit source in + let modules = declare_modules in + assert_equal ~ctxt 1 (SMap.cardinal modules); + let (loc, { requires; module_kind; type_exports_named; type_exports_star; info = () }) = + SMap.find_unsafe "foo" modules + in + assert_substring_equal ~ctxt source source loc; + assert_equal ~ctxt 0 (List.length requires); + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt None); + assert_equal ~ctxt 0 (List.length type_exports_named); + assert_equal ~ctxt 0 (List.length type_exports_star) ); + ( "declare_module_export_type" + >:: function + | ctxt -> + let source = "declare module foo { declare export type bar = string }" in + let { declare_modules; _ } = visit source in + assert_equal ~ctxt 1 (SMap.cardinal declare_modules); + let (_, { type_exports_named; _ }) = SMap.find_unsafe "foo" declare_modules in + assert_equal ~ctxt 1 (List.length type_exports_named); + let loc = + match List.assoc "bar" type_exports_named with + | (_, TypeExportNamed { loc; kind = NamedDeclaration }) -> loc + | _ -> assert_failure "Unexpected type export" + in + assert_substring_equal ~ctxt "bar" source loc ); + ( "declare_module_export_default_expr" + >:: fun ctxt -> + let source = "declare module foo { declare export default ty }" in + let { declare_modules; _ } = visit source in + assert_equal ~ctxt 1 (SMap.cardinal declare_modules); + let (_, { module_kind; _ }) = SMap.find_unsafe "foo" declare_modules in + assert_es module_kind ~assert_named:(fun named -> + assert_equal ~ctxt 1 (List.length named); + match List.assoc "default" named with + | (_, ExportDefault { default_loc; local = None }) -> + assert_substring_equal ~ctxt "default" source default_loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_module_export_default_decl" + >:: fun ctxt -> + let source = "declare module foo { declare export default function bar(): void }" in + let { declare_modules; _ } = visit source in + assert_equal ~ctxt 1 (SMap.cardinal declare_modules); + let (_, { module_kind; _ }) = SMap.find_unsafe "foo" declare_modules in + assert_es module_kind ~assert_named:(fun named -> + assert_equal ~ctxt 1 (List.length named); + match List.assoc "default" named with + | (_, ExportDefault { default_loc; local = Some (loc, "bar") }) -> + assert_substring_equal ~ctxt "default" source default_loc; + assert_substring_equal ~ctxt "bar" source loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_module_export_name_func" + >:: fun ctxt -> + let source = "declare module foo { declare export function bar(): void }" in + let { declare_modules; _ } = visit source in + assert_equal ~ctxt 1 (SMap.cardinal declare_modules); + let (_, { module_kind; _ }) = SMap.find_unsafe "foo" declare_modules in + assert_es module_kind ~assert_named:(fun named -> + assert_equal ~ctxt 1 (List.length named); + let loc = + match List.assoc "bar" named with + | (_, ExportNamed { loc; kind = NamedDeclaration }) -> loc + | _ -> assert_failure "Unexpected export" + in + assert_substring_equal ~ctxt "bar" source loc) ); + ( "declare_module_export_star" + >:: fun ctxt -> + let source = "declare module foo { declare export * from 'bar' }" in + let { declare_modules; _ } = visit source in + assert_equal ~ctxt 1 (SMap.cardinal declare_modules); + let (_, { module_kind; _ }) = SMap.find_unsafe "foo" declare_modules in + assert_es module_kind ~assert_star:(function + | [(_, ExportStar { star_loc; source = (source_loc, "bar") })] -> + assert_substring_equal ~ctxt "*" source star_loc; + assert_substring_equal ~ctxt "'bar'" source source_loc + | _ -> assert_failure "Unexpected export") ); + ( "declare_module_declare_module.export" + >:: fun ctxt -> + let source = "declare module foo { declare module.exports: ty }" in + let { declare_modules; _ } = visit source in + assert_equal ~ctxt 1 (SMap.cardinal declare_modules); + let (_, { module_kind; _ }) = SMap.find_unsafe "foo" declare_modules in + (* TODO use o0nly the location of `module.exports` *) + assert_cjs ~source module_kind ~assert_export_loc:(assert_equal ~ctxt (Some (21, 47))) ); + ( "err_indeterminate_clobber_after_export" + >:: fun ctxt -> + let source = "export default 0; module.exports = 0;" in + match visit_err source with + | IndeterminateModuleType loc -> assert_substring_equal ~ctxt "module.exports" source loc + ); + ( "err_indeterminate_export_after_clobber" + >:: fun ctxt -> + let source = "module.exports = 0; export default 0;" in + match visit_err source with + | IndeterminateModuleType loc -> + assert_substring_equal ~ctxt "export default 0;" source loc ); + ] diff --git a/src/parser_utils/__tests__/flow_ast_differ_test.ml b/src/parser_utils/__tests__/flow_ast_differ_test.ml index de95a2320d7..7c0613b8d70 100644 --- a/src/parser_utils/__tests__/flow_ast_differ_test.ml +++ b/src/parser_utils/__tests__/flow_ast_differ_test.ml @@ -1,198 +1,735 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast -module Type = Flow_ast.Type +module Type = Ast.Type open Flow_ast_differ open Utils_js - open OUnit2 -let parse_options = Some Parser_env.({ - esproposal_class_instance_fields = true; - esproposal_class_static_fields = true; - esproposal_decorators = true; - esproposal_export_star_as = true; - esproposal_optional_chaining = true; - esproposal_nullish_coalescing = true; - types = true; - use_strict = false; -}) - -class useless_mapper = object - inherit Flow_ast_mapper.mapper as super - - method! literal _loc (expr: Ast.Literal.t) = - let open Ast.Literal in - match expr.value with - | Number 4.0 -> - {value=Number 5.0; raw="5"} - | _ -> expr - - method! binary loc (expr: (Loc.t, Loc.t) Ast.Expression.Binary.t) = - let open Ast.Expression.Binary in - let expr = super#binary loc expr in - let { operator; _ } = expr in - match operator with - | Plus -> - { expr with operator=Minus } - | Mult -> - { expr with operator=Plus } - | _ -> expr - - method! unary_expression loc (expr: (Loc.t, Loc.t) Ast.Expression.Unary.t) = - let open Ast.Expression.Unary in - let expr = super#unary_expression loc expr in - let { operator; _ } = expr in - match operator with - | Minus -> expr - | _ -> - { expr with operator=Minus } - - method! identifier id = - let (loc, name) = id in - if name = "rename" then - (loc, "gotRenamed") - else - id - - method! variable_declaration loc (decl: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) = - let open Ast.Statement.VariableDeclaration in - let decl = super#variable_declaration loc decl in - let { declarations; kind } = decl in - if kind = Var then { declarations; kind = Const } - else decl - - method! type_ (annot: (Loc.t, Loc.t) Flow_ast.Type.t) = - let (loc, typ) = annot in - match typ with - | Type.Number -> (loc, Type.String) - | _ -> annot -end - -class insert_end_mapper = object - inherit Flow_ast_mapper.mapper - method! statement_list stmts = - let stmt = List.nth stmts (List.length stmts - 1) in - stmts @ [stmt] -end - -class insert_begin_mapper = object - inherit Flow_ast_mapper.mapper - method! statement_list stmts = - let stmt = List.nth stmts (List.length stmts - 1) in - stmt :: stmts -end - -class insert_dup_mapper = object - inherit Flow_ast_mapper.mapper - method! statement_list stmts = - let rec dup = function - | [] -> [] - | h :: t -> h :: h :: (dup t) in - dup stmts -end - -class delete_mapper = object - inherit Flow_ast_mapper.mapper - method! statement_list = List.tl -end - -class delete_annot_mapper = object - inherit Flow_ast_mapper.mapper as super - - method! pattern ?kind expr = - let open Flow_ast.Pattern in - let open Flow_ast.Pattern.Identifier in - let expr = super#pattern ?kind expr in - let (loc, patt) = expr in - match patt with - | Identifier id -> loc, Identifier { id with annot = None } - | _ -> expr - - method! return_type_annotation return = - let open Flow_ast.Function in - match super#return_type_annotation return with - | Available (loc, _) -> Missing loc - | Missing _ -> return -end - -class insert_annot_mapper = object - inherit Flow_ast_mapper.mapper as super - - method! pattern ?kind expr = - let open Flow_ast.Pattern in - let open Flow_ast.Pattern.Identifier in - let expr = super#pattern ?kind expr in - let (loc, patt) = expr in - match patt with - | Identifier id -> loc, Identifier { id with annot = Some (loc, (loc, Type.Number)) } - | _ -> expr - - method! return_type_annotation return = - let open Flow_ast.Function in - match super#return_type_annotation return with - | Available _ -> return - | Missing _loc -> Available (_loc, (_loc, Type.Number)) -end - -class prop_annot_mapper = object - inherit Flow_ast_mapper.mapper as super - - method! class_property _loc (prop: (Loc.t, Loc.t) Flow_ast.Class.Property.t') = - let open Flow_ast.Class.Property in - let prop = super#class_property _loc prop in - let { annot; _ } = prop in - let annot' = match annot with - | Some _ -> annot - | None -> Some (Loc.none, (Loc.none, Type.Number)) in - { prop with annot = annot' } -end - -class insert_typecast_mapper = object - inherit Flow_ast_mapper.mapper - method! expression expression = - let loc, _ = expression in - loc, Flow_ast.Expression.TypeCast - { Flow_ast.Expression.TypeCast.annot=(loc, (loc, Flow_ast.Type.Any)); expression } -end +let parse_options = + Some + Parser_env. + { + enums = true; + esproposal_class_instance_fields = true; + esproposal_class_static_fields = true; + esproposal_decorators = true; + esproposal_export_star_as = true; + esproposal_optional_chaining = true; + esproposal_nullish_coalescing = true; + types = true; + use_strict = false; + } + +class useless_mapper = + object (this) + inherit [Loc.t] Flow_ast_mapper.mapper as super + + method! literal _loc (expr : Loc.t Ast.Literal.t) = + Ast.Literal.( + match expr.value with + | Number 4.0 -> + { value = Number 5.0; raw = "5"; comments = Flow_ast_utils.mk_comments_opt () } + | _ -> expr) + + method! string_literal_type _loc (lit : Ast.StringLiteral.t) = + Ast.StringLiteral.( + let { value; _ } = lit in + if String.equal "RenameSL" value then + { value = "\"GotRenamedSL\""; raw = "\"GotRenamedSL\"" } + else + lit) + + method! logical loc (expr : (Loc.t, Loc.t) Ast.Expression.Logical.t) = + Ast.Expression.Logical.( + let expr = super#logical loc expr in + let { operator; _ } = expr in + match operator with + | NullishCoalesce -> { expr with operator = Or } + | _ -> expr) + + method! binary loc (expr : (Loc.t, Loc.t) Ast.Expression.Binary.t) = + Ast.Expression.Binary.( + let expr = super#binary loc expr in + let { operator; _ } = expr in + match operator with + | Plus -> { expr with operator = Minus } + | Mult -> { expr with operator = Plus } + | _ -> expr) + + method! unary_expression loc (expr : (Loc.t, Loc.t) Ast.Expression.Unary.t) = + Ast.Expression.Unary.( + let expr = super#unary_expression loc expr in + let { operator; _ } = expr in + match operator with + | Minus -> expr + | _ -> { expr with operator = Minus }) + + method! identifier id = + let (loc, { Ast.Identifier.name; comments = _ }) = id in + match name with + | "rename" -> Flow_ast_utils.ident_of_source (loc, "gotRenamed") + | "Rename" -> Flow_ast_utils.ident_of_source (loc, "GotRenamed") + | "RENAME" -> Flow_ast_utils.ident_of_source (loc, "GOT_RENAMED") + | _ -> id + + method! variable_declaration loc (decl : (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) = + Ast.Statement.VariableDeclaration.( + let decl = super#variable_declaration loc decl in + let { declarations; kind } = decl in + if kind = Var then + { declarations; kind = Const } + else + decl) + + method! template_literal_element (elem : 'loc Ast.Expression.TemplateLiteral.Element.t) = + Ast.Expression.TemplateLiteral.Element.( + let (loc, { value; tail }) = elem in + if value.raw = "rename" then + (loc, { value = { raw = "gotRenamed"; cooked = "gotRenamed" }; tail }) + else + elem) + + method! type_ (annot : (Loc.t, Loc.t) Type.t) = + Ast.NumberLiteral.( + let annot = super#type_ annot in + let (loc, typ) = annot in + match typ with + | Type.Number -> (loc, Type.String) + | Type.NumberLiteral _ -> (loc, Type.NumberLiteral { value = 4.0; raw = "4.0" }) + | _ -> annot) + + method! jsx_element _loc (elem : (Loc.t, Loc.t) Ast.JSX.element) = + Ast.JSX.( + let { openingElement = (_, open_elem) as openingElement; closingElement; children } = + elem + in + let openingElement' = this#jsx_opening_element openingElement in + let closingElement' = + let (loc, open_elem') = openingElement' in + if open_elem'.Opening.selfClosing then + None + (* if selfClosing changed from true to false, construct a closing element *) + else if open_elem.Opening.selfClosing then + Some (loc, { Closing.name = open_elem'.Opening.name }) + else + Flow_ast_mapper.map_opt super#jsx_closing_element closingElement + in + let children' = this#jsx_children children in + if + openingElement == openingElement' + && closingElement == closingElement' + && children == children' + then + elem + else + { + openingElement = openingElement'; + closingElement = closingElement'; + children = children'; + }) + + method! jsx_opening_element (elem : (Loc.t, Loc.t) Ast.JSX.Opening.t) = + Ast.JSX.Opening.( + let (loc, { name; selfClosing; attributes }) = elem in + let name' = this#jsx_name name in + let selfClosing' = + match name' with + | Ast.JSX.Identifier (_, { Ast.JSX.Identifier.name = id_name }) -> + if id_name = "selfClosing" then + true + else if id_name = "notSelfClosing" then + false + else + selfClosing + | _ -> selfClosing + in + let attributes' = ListUtils.ident_map super#jsx_opening_attribute attributes in + if name == name' && selfClosing == selfClosing' && attributes == attributes' then + elem + else + (loc, { name = name'; selfClosing = selfClosing'; attributes = attributes' })) + + method! jsx_identifier (id : Loc.t Ast.JSX.Identifier.t) = + Ast.JSX.Identifier.( + let (loc, { name }) = id in + match name with + | "rename" -> (loc, { name = "gotRenamed" }) + | "Rename" -> (loc, { name = "GotRenamed" }) + | "RENAME" -> (loc, { name = "GOT_RENAMED" }) + | _ -> id) + + method! jsx_attribute (attr : (Loc.t, Loc.t) Ast.JSX.Attribute.t) = + Ast.JSX.Attribute.( + let (loc, { name; value }) = attr in + let name' = + match name with + | Identifier id -> Identifier (this#jsx_identifier id) + | _ -> name + in + let value' = Flow_ast_mapper.map_opt super#jsx_attribute_value value in + if name == name' && value == value' then + attr + else + (loc, { name = name'; value = value' })) + + method! jsx_child (child : (Loc.t, Loc.t) Ast.JSX.child) = + Ast.JSX.( + match child with + | (loc, Text txt) -> + let { Text.value; _ } = txt in + if value = "rename" then + (loc, Text { Text.value = "gotRenamed"; Text.raw = "gotRenamed" }) + else + child + | _ -> super#jsx_child child) + + method! variance (variance : Loc.t Ast.Variance.t option) = + Ast.Variance.( + match variance with + | Some (loc, Minus) -> Some (loc, Plus) + | _ -> variance) + + method! type_parameter_instantiation_with_implicit (loc, targs) = + Ast.Expression.TypeParameterInstantiation.( + let f targ = + match targ with + | Explicit targ' -> Explicit (this#type_ targ') + | Implicit loc -> Explicit (loc, Ast.Type.Any) + in + (loc, Core_list.map ~f targs)) + + method! function_param_type (fpt : (Loc.t, Loc.t) Ast.Type.Function.Param.t) = + Ast.Type.Function.Param.( + let ((loc, fpt') as fpt) = super#function_param_type fpt in + let { name; _ } = fpt' in + let name' = Flow_ast_mapper.map_opt this#identifier name in + if name' == name then + fpt + else + (loc, { fpt' with name = name' })) + + method! update_expression loc (expr : (Loc.t, Loc.t) Ast.Expression.Update.t) = + Ast.Expression.Update.( + let expr = super#update_expression loc expr in + let { operator; _ } = expr in + match operator with + | Increment -> expr + | _ -> { expr with operator = Increment }) + + method! object_property_type (opt : (Loc.t, Loc.t) Ast.Type.Object.Property.t) = + Ast.Type.Object.Property.( + let ((loc, opt') as opt) = super#object_property_type opt in + let { key; variance; _ } = opt' in + let key' = this#object_key key in + let variance' = this#variance variance in + if key' == key && variance' == variance then + opt + else + (loc, { opt' with key = key'; variance = variance' })) + end + +class literal_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! literal _loc (expr : Loc.t Ast.Literal.t) = + Ast.Literal.( + match expr.value with + | String "rename" -> + { + value = String "gotRenamed"; + raw = "gotRenamed"; + comments = Flow_ast_utils.mk_comments_opt (); + } + | Boolean false -> + { value = Boolean true; raw = "true"; comments = Flow_ast_utils.mk_comments_opt () } + | Null -> + { + value = String "wasNull"; + raw = "wasNull"; + comments = Flow_ast_utils.mk_comments_opt (); + } + | Number 4.0 -> + { value = Number 5.0; raw = "5"; comments = Flow_ast_utils.mk_comments_opt () } + (* TODO: add test for RegExp case? *) + | _ -> expr) + end + +class insert_variance_mapper = + object (this) + inherit useless_mapper as super + + method! type_parameter_declaration_type_param + (type_param : (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.TypeParam.t) = + Ast.Type.ParameterDeclaration.TypeParam.( + let ((loc, type_param') as orig) = + super#type_parameter_declaration_type_param type_param + in + let { variance; _ } = type_param' in + let variance' = this#variance_ loc variance in + if variance == variance' then + orig + else + (loc, { type_param' with variance = variance' })) + + (* New variance method with a different type signature that allows us to insert a loc *) + method variance_ (loc : Loc.t) (variance : Loc.t Ast.Variance.t option) = + Ast.Variance.( + match variance with + | None -> Some (loc, Plus) + | _ -> variance) + end + +class delete_variance_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! variance (variance : Loc.t Ast.Variance.t option) = + Ast.Variance.( + match variance with + | Some (_loc, Minus) -> None + | _ -> variance) + end + +class insert_end_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! statement_list stmts = + let stmt = List.nth stmts (List.length stmts - 1) in + stmts @ [stmt] + end + +class insert_begin_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! statement_list stmts = + let stmt = List.nth stmts (List.length stmts - 1) in + stmt :: stmts + end + +class insert_dup_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! statement_list stmts = + let rec dup = function + | [] -> [] + | h :: t -> h :: h :: dup t + in + dup stmts + end + +class first_last_dup_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! statement_list stmts = (List.hd stmts :: stmts) @ [List.hd (List.rev stmts)] + end + +class insert_import_mapper = + object + inherit useless_mapper as super + + method! statement_list stmts = + if List.length stmts > 0 then + Ast.Statement.ImportDeclaration.( + Ast.StringLiteral.( + let stmts = super#statement_list stmts in + let (loc, _) = List.hd stmts in + let imp = + ( loc, + Ast.Statement.ImportDeclaration + { + importKind = Ast.Statement.ImportDeclaration.ImportValue; + source = (loc, { value = "baz"; raw = "\"baz\"" }); + default = None; + specifiers = + Some + (Ast.Statement.ImportDeclaration.ImportNamedSpecifiers + [ + { + kind = None; + local = None; + remote = Flow_ast_utils.ident_of_source (loc, "baz"); + }; + ]); + } ) + in + imp :: stmts)) + else + super#statement_list stmts + end + +class insert_second_import_mapper = + object + inherit useless_mapper as super + + method! statement_list stmts = + if List.length stmts > 0 then + Ast.Statement.ImportDeclaration.( + Ast.StringLiteral.( + let stmts = super#statement_list stmts in + let (loc, _) = List.hd stmts in + let imp = + ( loc, + Ast.Statement.ImportDeclaration + { + importKind = Ast.Statement.ImportDeclaration.ImportValue; + source = (loc, { value = "baz"; raw = "\"baz\"" }); + default = None; + specifiers = + Some + (Ast.Statement.ImportDeclaration.ImportNamedSpecifiers + [ + { + kind = None; + local = None; + remote = Flow_ast_utils.ident_of_source (loc, "baz"); + }; + ]); + } ) + in + List.hd stmts :: imp :: List.tl stmts)) + else + super#statement_list stmts + end + +class insert_second_cjsimport_mapper = + object + inherit useless_mapper as super + + method! statement_list stmts = + if List.length stmts > 0 then + Ast.Statement.Expression.( + Ast.Expression.Call.( + Ast.Literal.( + let stmts = super#statement_list stmts in + let (loc, _) = List.hd stmts in + let imp = + ( loc, + Ast.Statement.Expression + { + expression = + ( loc, + Ast.Expression.Call + { + callee = + ( loc, + Ast.Expression.Identifier + (Flow_ast_utils.ident_of_source (loc, "require")) ); + targs = None; + arguments = + [ + Ast.Expression.Expression + ( loc, + Ast.Expression.Literal + { + value = Ast.Literal.String "baz"; + raw = "\"baz\""; + comments = Flow_ast_utils.mk_comments_opt (); + } ); + ]; + } ); + directive = None; + } ) + in + List.hd stmts :: imp :: List.tl stmts))) + else + super#statement_list stmts + end + +class add_body_mapper = + object + inherit useless_mapper as super + + method! statement_list stmts = + if List.length stmts > 0 then + Ast.Statement.Expression.( + Ast.Expression.Call.( + Ast.Literal.( + let stmts = super#statement_list stmts in + let (loc, _) = List.rev stmts |> List.hd in + let imp = + ( loc, + Ast.Statement.Expression + { + expression = + ( loc, + Ast.Expression.Call + { + callee = + ( loc, + Ast.Expression.Identifier + (Flow_ast_utils.ident_of_source (loc, "foo")) ); + targs = None; + arguments = + [ + Ast.Expression.Expression + ( loc, + Ast.Expression.Literal + { + value = Ast.Literal.String "baz"; + raw = "\"baz\""; + comments = Flow_ast_utils.mk_comments_opt (); + } ); + ]; + } ); + directive = None; + } ) + in + stmts @ [imp]))) + else + super#statement_list stmts + end + +class delete_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! statement_list = List.tl + end + +class delete_end_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! statement_list stmt = List.rev stmt |> List.tl |> List.rev + end + +class delete_annot_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper as super + + method! pattern ?kind expr = + Ast.Pattern.( + Ast.Pattern.Identifier.( + let expr = super#pattern ?kind expr in + let (loc, patt) = expr in + match patt with + | Identifier id -> (loc, Identifier { id with annot = Type.Missing Loc.none }) + | _ -> expr)) + + method! type_annotation_hint return = + match super#type_annotation_hint return with + | Type.Available (loc, _) -> Type.Missing loc + | Type.Missing _ -> return + end + +class insert_annot_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper as super + + method! pattern ?kind expr = + Ast.Pattern.( + Ast.Pattern.Identifier.( + let expr = super#pattern ?kind expr in + let (loc, patt) = expr in + match patt with + | Identifier id -> + (loc, Identifier { id with annot = Type.Available (loc, (loc, Type.Number)) }) + | _ -> expr)) + + method! type_annotation_hint return = + match super#type_annotation_hint return with + | Type.Available _ -> return + | Type.Missing _loc -> Type.Available (_loc, (_loc, Type.Number)) + end + +class insert_function_annot_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper as super + + method! type_annotation_hint return = + match super#type_annotation_hint return with + | Type.Available _ -> return + | Type.Missing loc -> + Type.Available + ( loc, + ( loc, + Type.Function + { + Type.Function.tparams = None; + params = (loc, { Type.Function.Params.params = []; rest = None }); + return = (loc, Type.Number); + } ) ) + end + +class insert_import_and_annot_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper as super + + method! type_annotation_hint return = + match super#type_annotation_hint return with + | Type.Available _ -> return + | Type.Missing loc -> + Type.Available + ( loc, + ( loc, + Type.Function + { + Type.Function.tparams = None; + params = (loc, { Type.Function.Params.params = []; rest = None }); + return = (loc, Type.Number); + } ) ) + + method! program prog = + let (loc, stmts, comments) = super#program prog in + let import num = + let imp = Printf.sprintf "new_import%d" num in + Ast.Statement. + ( Loc.none, + ImportDeclaration + { + ImportDeclaration.importKind = ImportDeclaration.ImportType; + source = (Loc.none, { Ast.StringLiteral.value = imp; raw = imp }); + default = None; + specifiers = + Some + ImportDeclaration.( + ImportNamedSpecifiers + [ + { + kind = None; + local = Some (Flow_ast_utils.ident_of_source (Loc.none, "here")); + remote = Flow_ast_utils.ident_of_source (Loc.none, "there"); + }; + ]); + } ) + in + (loc, List.hd stmts :: import 1 :: import 2 :: List.tl stmts, comments) + end + +class prop_annot_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper as super + + method! class_property _loc (prop : (Loc.t, Loc.t) Ast.Class.Property.t') = + Ast.Class.Property.( + let prop = super#class_property _loc prop in + let { annot; _ } = prop in + let annot' = + match annot with + | Type.Available _ -> annot + | Type.Missing _ -> Type.Available (Loc.none, (Loc.none, Type.Number)) + in + { prop with annot = annot' }) + end + +class insert_typecast_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! expression expression = + let (loc, _) = expression in + ( loc, + Ast.Expression.TypeCast + { Ast.Expression.TypeCast.annot = (loc, (loc, Type.Any)); expression } ) + end + +class insert_type_param_instantiation = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! type_parameter_instantiation_with_implicit (loc, targs) = + Ast.Expression.TypeParameterInstantiation.(loc, Explicit (loc, Ast.Type.Any) :: targs) + end + +class add_comment_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! identifier (loc, i) = + Flow_ast.Syntax. + ( loc, + { + i with + Flow_ast.Identifier.comments = + Some + { + leading = [(Loc.none, Flow_ast.Comment.Block "hello")]; + trailing = [(Loc.none, Flow_ast.Comment.Block "bye")]; + internal = (); + }; + } ) + end + +class true_to_false_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! literal _loc (expr : Loc.t Ast.Literal.t) = + Ast.Literal.( + match expr.value with + | Boolean true -> + { value = Boolean false; raw = "false"; comments = Flow_ast_utils.mk_comments_opt () } + | _ -> expr) + + method! type_annotation (annot : (Loc.t, Loc.t) Ast.Type.annotation) = + Ast.Type.( + let (t1, a) = annot in + let (t2, right_var) = a in + match right_var with + | BooleanLiteral true -> (t1, (t2, BooleanLiteral false)) + | _ -> annot) + end + +class remove_annotation_rest_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper as super + + method! type_ (annot : (Loc.t, Loc.t) Type.t) = + let annot = super#type_ annot in + let (loc, typ) = annot in + match typ with + | Type.Intersection (t, t', _) -> (loc, Type.Intersection (t, t', [])) + | Type.Union (t, t', _) -> (loc, Type.Union (t, t', [])) + | _ -> annot + end + +class double_sequence_mapper = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! sequence _loc { Ast.Expression.Sequence.expressions } = + { Ast.Expression.Sequence.expressions = expressions @ expressions } + end let edits_of_source algo source mapper = - let ast, _ = Parser_flow.program source ~parse_options in + let (ast, _) = Parser_flow.program source ~parse_options in let new_ast = mapper#program ast in - let edits = - program algo ast new_ast - |> Ast_diff_printer.edits_of_changes None - in + let edits = program algo ast new_ast |> Ast_diff_printer.edits_of_changes None in (* Extract columns from the locs *) - List.map (fun (loc, text) -> Loc.((loc.start.column, loc._end.column), text)) edits + Core_list.map ~f:(fun (loc, text) -> Loc.((loc.start.column, loc._end.column), text)) edits -let debug_string_of_edit ((start, end_), text) = - Printf.sprintf "((%d, %d), %s)" start end_ text +let debug_string_of_edit ((start, end_), text) = Printf.sprintf "((%d, %d), %s)" start end_ text -let debug_string_of_edits = - List.map debug_string_of_edit - %> String.concat ", " +let debug_string_of_edits = Core_list.map ~f:debug_string_of_edit %> String.concat ", " let debug_print_string_script script = let print_string_result (i, chg) = match chg with | Replace (ol, ne) -> print_endline (Utils_js.spf "Replace %s with %s at %d" ol ne i) | Insert (_, ins) -> print_endline (Utils_js.spf "Insert %s at %d" (String.concat ", " ins) i) - | Delete d -> print_endline (Utils_js.spf "Delete %s at %d" d i) in + | Delete d -> print_endline (Utils_js.spf "Delete %s at %d" d i) + in match script with | None -> print_endline "no script" | Some sc -> List.iter print_string_result sc let apply_edits source edits = let apply_edit acc ((_begin, _end), str) = - let before = Str.string_before acc (_begin) in - let after = Str.string_after acc (_end) in - before ^ str ^ after in + let before = Str.string_before acc _begin in + let after = Str.string_after acc _end in + before ^ str ^ after + in List.fold_left apply_edit source (List.rev edits) let print_debug_info source edits_trivial edits_standard = @@ -210,8 +747,8 @@ let assert_edits_equal ctxt ~edits ~source ~expected ~mapper = assert_equal ~ctxt expected (apply_edits source edits_trivial); assert_equal ~ctxt expected (apply_edits source edits_standard) -let assert_edits_differ ctxt ~edits_trivial ~edits_standard ~source - ~trivial_expected ~standard_expected ~mapper = +let assert_edits_differ + ctxt ~edits_trivial ~edits_standard ~source ~trivial_expected ~standard_expected ~mapper = let edits_trivial' = edits_of_source Trivial source mapper in let edits_standard' = edits_of_source Standard source mapper in assert_equal ~ctxt edits_trivial edits_trivial'; @@ -219,481 +756,2415 @@ let assert_edits_differ ctxt ~edits_trivial ~edits_standard ~source assert_equal ~ctxt trivial_expected (apply_edits source edits_trivial'); assert_equal ~ctxt standard_expected (apply_edits source edits_standard') -let tests = "ast_differ" >::: [ - "simple" >:: begin fun ctxt -> - let source = "function foo() { (5 - 3); 4; (6 + 4); }" in - assert_edits_equal ctxt ~edits:[((26, 27), "(5)"); ((30, 35), "(6 - 5)")] - ~source ~expected:"function foo() { (5 - 3); (5); ((6 - 5)); }" ~mapper:(new useless_mapper) - end; - "class" >:: begin fun ctxt -> - let source = "class Foo { bar() { 4; } }" in - assert_edits_equal ctxt ~edits:[((20, 21), "(5)")] ~source - ~expected:"class Foo { bar() { (5); } }" ~mapper:(new useless_mapper) - end; - "class2" >:: begin fun ctxt -> - let source = "class Foo { bar = 4; }" in - assert_edits_equal ctxt ~edits:[((18, 19), "(5)")] ~source - ~expected:"class Foo { bar = (5); }" ~mapper:(new useless_mapper) - end; - "class_prop_annot" >:: begin fun ctxt -> - let source = "class A { f = (x: string) => x; }" in - assert_edits_equal ctxt ~edits:[(10, 31), "f: number = (x: string) => x;"] ~source - ~expected:"class A { f: number = (x: string) => x; }" - ~mapper:(new prop_annot_mapper) - end; - "obj_prop" >:: begin fun ctxt -> - let source = "let x = { rename : 4 }" in - assert_edits_equal ctxt ~edits:[((10, 16), "gotRenamed"); ((19, 20), "(5)")] ~source - ~expected:"let x = { gotRenamed : (5) }" - ~mapper:(new useless_mapper) - end; - "obj_prop2" >:: begin fun ctxt -> - let source = "let x = { bar() { rename; } }" in - assert_edits_equal ctxt ~edits:[(18, 24), "gotRenamed"] ~source - ~expected:"let x = { bar() { gotRenamed; } }" - ~mapper:(new useless_mapper) - end; - "obj_prop3" >:: begin fun ctxt -> - let source = "let x = { 4 : 3 }" in - assert_edits_equal ctxt ~edits:[(10, 15), "5: 3"] ~source - ~expected:"let x = { 5: 3 }" - ~mapper:(new useless_mapper) - end; - "obj_spread_prop" >:: begin fun ctxt -> - let source = "let x = { ...rename, x : 4}" in - assert_edits_equal ctxt ~edits:[((13, 19), "gotRenamed"); ((25, 26), "(5)")] ~source - ~expected:"let x = { ...gotRenamed, x : (5)}" - ~mapper:(new useless_mapper) - end; - "precedence" >:: begin fun ctxt -> - let source = "5 - 3 * 3" in - (* It is mandatory to insert the parens here *) - assert_edits_equal ctxt ~edits:[((4, 9), "(3 + 3)")] ~source - ~expected:"5 - (3 + 3)" ~mapper:(new useless_mapper) - end; - "identifier" >:: begin fun ctxt -> - let source = "5 - rename" in - assert_edits_equal ctxt ~edits:[((4, 10), "gotRenamed")] ~source - ~expected:"5 - gotRenamed" ~mapper:(new useless_mapper) - end; - "new" >:: begin fun ctxt -> - let source = "new rename()" in - assert_edits_equal ctxt ~edits:[((4, 10), "gotRenamed")] ~source ~expected:"new gotRenamed()" - ~mapper:(new useless_mapper) - end; - "unary_same_op" >:: begin fun ctxt -> - let source = "-rename" in - assert_edits_equal ctxt ~edits:[((1, 7), "gotRenamed")] ~source ~expected:"-gotRenamed" - ~mapper:(new useless_mapper) - end; - "unary_diff_op" >:: begin fun ctxt -> - let source = "+rename" in - assert_edits_equal ctxt ~edits:[((0, 7), "(-gotRenamed)")] ~source ~expected:"(-gotRenamed)" - ~mapper:(new useless_mapper) - end; - "block" >:: begin fun ctxt -> - let source = "{ 2; 4; 10; rename; }" in - assert_edits_equal ctxt ~edits:[((5, 6), "(5)"); ((12, 18), "gotRenamed")] ~source - ~expected:"{ 2; (5); 10; gotRenamed; }" ~mapper:(new useless_mapper) - end; - "if_nochange" >:: begin fun ctxt -> - let source = "if (true) { false; } else { true; }" in - assert_edits_equal ctxt ~edits:[] ~source - ~expected:"if (true) { false; } else { true; }" ~mapper:(new useless_mapper) - end; - "if_noblock" >:: begin fun ctxt -> - let source = "if (4) rename;" in - assert_edits_equal ctxt ~edits:[((4, 5), "(5)"); ((7, 13), "gotRenamed");] ~source - ~expected:"if ((5)) gotRenamed;" ~mapper:(new useless_mapper) - end; - "if_partial" >:: begin fun ctxt -> - let source = "if (4) { rename; }" in - assert_edits_equal ctxt ~edits:[((4, 5), "(5)"); ((9, 15), "gotRenamed");] ~source - ~expected:"if ((5)) { gotRenamed; }" ~mapper:(new useless_mapper) - end; - "if_full" >:: begin fun ctxt -> - let source = "if (4) { 4; } else { rename }" in - assert_edits_equal ctxt ~edits:[((4, 5), "(5)"); ((9, 10), "(5)"); ((21, 27), "gotRenamed")] - ~source ~expected:"if ((5)) { (5); } else { gotRenamed }" ~mapper:(new useless_mapper) - end; - "with_nochange" >:: begin fun ctxt -> - let source = "with (object) { foo = true; }" in - assert_edits_equal ctxt ~edits:[] ~source - ~expected:"with (object) { foo = true; }" ~mapper:(new useless_mapper) - end; - "with_object" >:: begin fun ctxt -> - let source = "with (rename) { foo = true; };" in - assert_edits_equal ctxt ~edits:[(6, 12), "gotRenamed"] ~source - ~expected:"with (gotRenamed) { foo = true; };" ~mapper:(new useless_mapper) - end; - "with_body" >:: begin fun ctxt -> - let source = "with (objct) { rename; };" in - assert_edits_equal ctxt ~edits:[(15, 21), "gotRenamed"] ~source - ~expected:"with (objct) { gotRenamed; };" ~mapper:(new useless_mapper) - end; - "function_expression" >:: begin fun ctxt -> - let source = "(function() { 4; })" in - assert_edits_equal ctxt ~edits:[((14, 15), "(5)")] ~source ~expected:"(function() { (5); })" - ~mapper:(new useless_mapper) - end; - "arrow_function" >:: begin fun ctxt -> - let source = "let bar = (x) => 4;" in - assert_edits_equal ctxt ~edits:[(17, 18), "(5)"] ~source - ~expected:"let bar = (x) => (5);" - ~mapper:(new useless_mapper) - end; - "call" >:: begin fun ctxt -> - let source = "rename()" in - assert_edits_equal ctxt ~edits:[((0, 6), "gotRenamed")] ~source ~expected:"gotRenamed()" - ~mapper:(new useless_mapper) - end; - "variable_declaration_kind" >:: begin fun ctxt -> - let source = "var x = 5;" in - assert_edits_equal ctxt ~edits:[((0, 10), "const x = 5;")] ~source ~expected:"const x = 5;" - ~mapper:(new useless_mapper) - end; - "variable_declaration_expression" >:: begin fun ctxt -> - let source = "let x = 4;" in - assert_edits_equal ctxt ~edits:[((8, 9), "(5)")] ~source ~expected:"let x = (5);" - ~mapper:(new useless_mapper) - end; - "variable_declaration_kind_expression" >:: begin fun ctxt -> - let source = "var x = 4;" in - assert_edits_equal ctxt ~edits:[((0, 10), "const x = 5;")] ~source ~expected:"const x = 5;" - ~mapper:(new useless_mapper) - end; - "for" >:: begin fun ctxt -> - let source = "for (i = 7; i < rename; i++) {}" in - assert_edits_equal ctxt ~edits:[(16, 22) , "gotRenamed"] ~source - ~expected:"for (i = 7; i < gotRenamed; i++) {}" ~mapper:(new useless_mapper) - end; - "for_init" >:: begin fun ctxt -> - let source = "for (let i = 4; i < 10; i++) {}" in - assert_edits_equal ctxt ~edits:[(13, 14), "(5)"] ~source - ~expected:"for (let i = (5); i < 10; i++) {}" ~mapper:(new useless_mapper) - end; - "for_body" >:: begin fun ctxt -> - let source = "for (i = 7; i < top; i++) { rename; }" in - assert_edits_equal ctxt ~edits:[(28, 34), "gotRenamed"] ~source - ~expected:"for (i = 7; i < top; i++) { gotRenamed; }" ~mapper:(new useless_mapper) - end; - "for_in_left" >:: begin fun ctxt -> - let source = "for (var x in xs) { continue; }" in - assert_edits_equal ctxt ~edits:[(0, 31), "for (const x in xs) {\n continue;\n}"] - ~source ~expected:"for (const x in xs) {\n continue;\n}" - ~mapper:(new useless_mapper) - end; - "for_in_right" >:: begin fun ctxt -> - let source = "for (let x in rename) { continue; }" in - assert_edits_equal ctxt ~edits:[(14, 20), "gotRenamed"] ~source - ~expected:"for (let x in gotRenamed) { continue; }" ~mapper:(new useless_mapper) - end; - "for_in_body" >:: begin fun ctxt -> - let source = "for (let x in xs) { rename; }" in - assert_edits_equal ctxt ~edits:[(20, 26), "gotRenamed"] ~source - ~expected:"for (let x in xs) { gotRenamed; }" ~mapper:(new useless_mapper) - end; - "while_test" >:: begin fun ctxt -> - let source = "while (rename) { break; };" in - assert_edits_equal ctxt ~edits:[(7, 13), "gotRenamed"] ~source - ~expected:"while (gotRenamed) { break; };" ~mapper:(new useless_mapper) - end; - "while_body" >:: begin fun ctxt -> - let source = "while (true) { rename; };" in - assert_edits_equal ctxt ~edits:[(15, 21), "gotRenamed"] ~source - ~expected:"while (true) { gotRenamed; };" ~mapper:(new useless_mapper) - end; - "for_of_left" >:: begin fun ctxt -> - let source = "for (var x of xs) { continue; }" in - assert_edits_equal ctxt ~edits:[(0, 31), "for (const x of xs) {\n continue;\n}"] - ~source ~expected:"for (const x of xs) {\n continue;\n}" ~mapper:(new useless_mapper) - end; - "for_of_right" >:: begin fun ctxt -> - let source = "for (let x of rename) { continue; }" in - assert_edits_equal ctxt ~edits:[(14, 20), "gotRenamed"] ~source - ~expected:"for (let x of gotRenamed) { continue; }" ~mapper:(new useless_mapper) - end; - "for_of_body" >:: begin fun ctxt -> - let source = "for (let x of xs) { rename; }" in - assert_edits_equal ctxt ~edits:[(20, 26), "gotRenamed"] ~source - ~expected:"for (let x of xs) { gotRenamed; }" ~mapper:(new useless_mapper) - end; - "do_while_body" >:: begin fun ctxt -> - let source = "do { rename; } while (true);" in - assert_edits_equal ctxt ~edits:[(5, 11), "gotRenamed"] ~source - ~expected:"do { gotRenamed; } while (true);" ~mapper:(new useless_mapper) - end; - "do_while_condition" >:: begin fun ctxt -> - let source = "do { continue; } while (rename);" in - assert_edits_equal ctxt ~edits:[(24, 30), "gotRenamed"] ~source - ~expected:"do { continue; } while (gotRenamed);" ~mapper:(new useless_mapper) - end; - "switch_discriminant" >:: begin fun ctxt -> - let source = "switch (rename) { case true: break; }" in - assert_edits_equal ctxt ~edits:[(8, 14), "gotRenamed"] ~source - ~expected:"switch (gotRenamed) { case true: break; }" ~mapper:(new useless_mapper) - end; - "switch_case_test" >:: begin fun ctxt -> - let source = "switch (true) { case rename: break; }" in - assert_edits_equal ctxt ~edits:[(21, 27), "gotRenamed"] ~source - ~expected:"switch (true) { case gotRenamed: break; }" ~mapper:(new useless_mapper) - end; - "switch_case_consequent" >:: begin fun ctxt -> - let source = "switch (true) { case true: rename; }" in - assert_edits_equal ctxt ~edits:[(27, 33), "gotRenamed"] ~source - ~expected:"switch (true) { case true: gotRenamed; }" ~mapper:(new useless_mapper) - end; - "algo_diff_end_insert" >:: begin fun ctxt -> - let source = "var x = 5; var y = 6;" in - assert_edits_differ ctxt ~edits_trivial:[(0, 21), "var x = 5;\nvar y = 6;\nvar y = 6;"] - ~edits_standard:[(21, 21), "var y = 6;"] ~source - ~trivial_expected:"var x = 5;\nvar y = 6;\nvar y = 6;" - ~standard_expected:"var x = 5; var y = 6;var y = 6;" ~mapper:(new insert_end_mapper) - end; - "algo_diff_delete" >:: begin fun ctxt -> - let source = "var x = 5; var y = 6; var z = 7;" in - assert_edits_differ ctxt ~edits_trivial:[(0, 32), "var y = 6;\nvar z = 7;"] - ~edits_standard:[(0, 10), ""] ~source - ~trivial_expected:"var y = 6;\nvar z = 7;" - ~standard_expected:" var y = 6; var z = 7;" ~mapper:(new delete_mapper) - end; - "algo_diff_begin_insert" >:: begin fun ctxt -> - let source = "var x = 5; var y = 6;" in - assert_edits_differ ctxt ~edits_trivial:[(0, 21), "var y = 6;\nvar x = 5;\nvar y = 6;"] - ~edits_standard:[(0, 0), "var y = 6;"] ~source - ~trivial_expected:"var y = 6;\nvar x = 5;\nvar y = 6;" - ~standard_expected:"var y = 6;var x = 5; var y = 6;" ~mapper:(new insert_begin_mapper) - end; - "algo_diff_middle_insert" >:: begin fun ctxt -> - let source = "var x = 5; var y = 6;" in - assert_edits_differ ctxt - ~edits_trivial:[(0, 21), "var x = 5;\nvar x = 5;\nvar y = 6;\nvar y = 6;"] - ~edits_standard:[((10, 10), "var x = 5;"); ((21, 21), "var y = 6;");] ~source - ~trivial_expected:"var x = 5;\nvar x = 5;\nvar y = 6;\nvar y = 6;" - ~standard_expected:"var x = 5;var x = 5; var y = 6;var y = 6;" ~mapper:(new insert_dup_mapper) - end; - "algo_diff_empty" >:: begin fun ctxt -> - let source = "" in - let ast_empty, _ = Parser_flow.program source in - let ast_var, _ = Parser_flow.program "var x = 6;" in - let edits_trivial = - program Trivial ast_empty ast_var - |> Ast_diff_printer.edits_of_changes None - |> List.map (fun (loc, text) -> Loc.((loc.start.column, loc._end.column), text)) in - let edits_standard = - program Standard ast_empty ast_var - |> Ast_diff_printer.edits_of_changes None - |> List.map (fun (loc, text) -> Loc.((loc.start.column, loc._end.column), text)) in - assert_equal ~ctxt edits_trivial [(0, 0), "var x = 6;"]; - assert_equal ~ctxt edits_standard [((0, 0), "var x = 6;");]; - assert_equal ~ctxt (apply_edits source edits_trivial) "var x = 6;"; - assert_equal ~ctxt (apply_edits source edits_standard) "var x = 6;" - end; - "unnamed_class_expression" >:: begin fun ctxt -> - let source = "(class { method() { rename; } })" in - assert_edits_equal ctxt ~edits:[(20, 26), "gotRenamed"] ~source - ~expected:"(class { method() { gotRenamed; } })" ~mapper:(new useless_mapper) - end; - "named_class_expression" >:: begin fun ctxt -> - let source = "(class Foo { method() { rename; } })" in - assert_edits_equal ctxt ~edits:[(24, 30), "gotRenamed"] ~source - ~expected:"(class Foo { method() { gotRenamed; } })" ~mapper:(new useless_mapper) - end; - "return_statement_with_expression" >:: begin fun ctxt -> - let source = "function foo() { return rename; }" in - assert_edits_equal ctxt ~edits:[(24, 30), "gotRenamed"] ~source - ~expected:"function foo() { return gotRenamed; }" ~mapper:(new useless_mapper) - end; - "type_annotation_delete" >:: begin fun ctxt -> - let source = "let x : number = 3;" in - assert_edits_equal ctxt ~edits:[(6, 14),""] ~source - ~expected:"let x = 3;" ~mapper:(new delete_annot_mapper) - end; - "type_annotation_insert" >:: begin fun ctxt -> - let source = "let x = 3;" in - assert_edits_equal ctxt ~edits:[(4, 5), "x: number"] ~source - ~expected:"let x: number = 3;" ~mapper:(new insert_annot_mapper) - end; - "type_annotation_replace" >:: begin fun ctxt -> - let source = "let x : number = 3;" in - assert_edits_equal ctxt ~edits:[(6, 14),": string"] ~source - ~expected:"let x : string = 3;" ~mapper:(new useless_mapper) - end; - "return_type_replace" >:: begin fun ctxt -> - let source = "function foo() : number { return 1; }" in - assert_edits_equal ctxt ~edits:[(15, 23),": string"] ~source - ~expected:"function foo() : string { return 1; }" ~mapper:(new useless_mapper) - end; - "return_type_delete" >:: begin fun ctxt -> - let source = "function foo() : number { return 1; }" in - assert_edits_equal ctxt ~edits:[(15, 23),""] ~source - ~expected:"function foo() { return 1; }" ~mapper:(new delete_annot_mapper) - end; - "return_type_insert" >:: begin fun ctxt -> - let source = "function foo() { return 1; }" in - assert_edits_equal ctxt ~edits:[(15, 15),": number"] ~source - ~expected:"function foo() : number{ return 1; }" ~mapper:(new insert_annot_mapper) - end; - "comments" >:: begin fun ctxt -> - let source = "function foo() { /* comment */ (5 - 3); 4; (6 + 4); /* comment */}" in - assert_edits_equal ctxt ~edits:[((40, 41), "(5)"); ((44, 49), "(6 - 5)")] ~source - ~expected:"function foo() { /* comment */ (5 - 3); (5); ((6 - 5)); /* comment */}" - ~mapper:(new useless_mapper) - end; - "fn_default_export" >:: begin fun ctxt -> - let source = "export default function foo() { let x = rename; }" in - assert_edits_equal ctxt ~edits:[(40, 46), "gotRenamed"] ~source - ~expected:"export default function foo() { let x = gotRenamed; }" - ~mapper:(new useless_mapper) - end; - "fn_export_named" >:: begin fun ctxt -> - let source = "export function foo() { let x = rename; }" in - assert_edits_equal ctxt ~edits:[(32, 38), "gotRenamed"] ~source - ~expected:"export function foo() { let x = gotRenamed; }" - ~mapper:(new useless_mapper) - end; - "assignment_left" >:: begin fun ctxt -> - let source = "rename = 6;" in - assert_edits_equal ctxt ~edits:[(0, 6), "gotRenamed"] ~source - ~expected:"gotRenamed = 6;" - ~mapper:(new useless_mapper) - end; - "assignment_right" >:: begin fun ctxt -> - let source = "x = rename;" in - assert_edits_equal ctxt ~edits:[(4, 10), "gotRenamed"] ~source - ~expected:"x = gotRenamed;" - ~mapper:(new useless_mapper) - end; - "list_diff_simple" >:: begin fun ctxt -> - let a = "a" in - let b = "b" in - let old_list = [a] in - let new_list = [b] in - let edits = [(0, Replace (a, b))] in - let script = list_diff Standard old_list new_list in - assert_equal ~ctxt (Some edits) script - end; - "list_diff_simple2" >:: begin fun ctxt -> - let a = "a" in - let b = "b" in - let old_list = [a;a] in - let new_list = [b;b] in - let edits = [(0, Replace (a, b));(1, Replace (a, b))] in - let script = list_diff Standard old_list new_list in - assert_equal ~ctxt (Some edits) script - end; - "list_diff_simple3" >:: begin fun ctxt -> - let a = "a" in - let b = "b" in - let old_list = [a;a] in - let new_list = [b;b;b;b] in - let edits = [(0, Replace (a, b));(1, Replace (a, b)); - (1, Insert (None, [b;b]))] in - let script = list_diff Standard old_list new_list in - assert_equal ~ctxt (Some edits) script - end; - "list_diff_simple4" >:: begin fun ctxt -> - let a = "a" in - let b = "b" in - let old_list = [a;a;a;a] in - let new_list = [b;b] in - let edits = [(0, Replace (a, b));(1, Replace (a,b)); - (2, Delete a);(3, Delete a)] in - let script = list_diff Standard old_list new_list in - assert_equal ~ctxt (Some edits) script - end; - "list_diff_paper" >:: begin fun ctxt -> - let a = "a" in - let b = "b" in - let c = "c" in - let old_list = [a;b;c;a;b;b;a] in - let new_list = [c;b;a;b;a;c] in - let edits = [(0, Delete a); (1, Delete b); (3, Delete a) - ; (4, Insert (None, [a])); (6, Insert (None, [c]))] in - let script = list_diff Standard old_list new_list in - assert_equal ~ctxt (Some edits) script - end; - "list_diff_flip" >:: begin fun ctxt -> - let x = "x" in - let y = "y" in - let old_list = [x;x;x;y;y;y] in - let new_list = [y;y;y;x;x;x] in - let edits = [(0, Delete x); (1, Delete x); (2, Delete x); (5, Insert (None, [x;x;x]))] in - let script = list_diff Standard old_list new_list in - assert_equal ~ctxt (Some edits) script - end; - "list_diff_sentence" >:: begin fun ctxt -> - let (t', h, i, s, space, e, n, t, c, o, pd, d) = - "T", "h", "i", "s", " ", "e", "n", "t", "c", "o", ".", "d" in - (*"This is sentence one."*) - let old_list = [t';h;i;s;space;i;s;space;s;e;n;t;e;n;c;e;space;o;n;e;pd] in - (*"This is the second sentence"*) - let new_list = [t';h;i;s;space;i;s;space;t;h;e;space;s;e;c;o;n;d;space;s;e;n;t;e;n;c;e;pd] in - let edits = [(7, Insert (None, [t;h;e;space])); (9, Insert (None, [c;o])); (11, Replace (t,d)); - (11, Insert (None, [space;s])); (14, Replace (c,t)); (16, Delete space); - (17, Delete o); (18, Insert (None, [c]))] in - let script = list_diff Standard old_list new_list in - debug_print_string_script script; - assert_equal ~ctxt (Some edits) script - end; - "list_diff_simple5" >:: begin fun ctxt -> - let a = "a" in - let b = "b" in - let old_list = [a;b] in - let new_list = [b] in - let edits = [(0, Delete a)] in - let script = list_diff Standard old_list new_list in - assert_equal ~ctxt (Some edits) script - end; - "pattern_identifier" >:: begin fun ctxt -> - let source = "let rename = 0" in - assert_edits_equal ctxt ~edits:[(4,10), "gotRenamed"] ~source - ~expected:"let gotRenamed = 0" ~mapper:(new useless_mapper) - end; - "pattern_array" >:: begin fun ctxt -> - let source = "let [rename,rename] = [0]" in - assert_edits_equal ctxt ~edits:[(5,11), "gotRenamed"; (12,18), "gotRenamed"] ~source - ~expected:"let [gotRenamed,gotRenamed] = [0]" ~mapper:(new useless_mapper) - end; - "pattern_array_nested" >:: begin fun ctxt -> - let source = "let [[[rename]]] = 0" in - assert_edits_equal ctxt ~edits:[(7,13), "gotRenamed"] ~source - ~expected:"let [[[gotRenamed]]] = 0" ~mapper:(new useless_mapper) - end; - "pattern_array_rest" >:: begin fun ctxt -> - let source = "let [a,b,...rename] = 0" in - assert_edits_equal ctxt ~edits:[(12,18), "gotRenamed"] ~source - ~expected:"let [a,b,...gotRenamed] = 0" ~mapper:(new useless_mapper) - end; - "pattern_object_longhand" >:: begin fun ctxt -> - let source = "let {rename: rename} = 0" in - assert_edits_equal ctxt ~edits:[(5,11), "gotRenamed"; (13,19), "gotRenamed"] ~source - ~expected:"let {gotRenamed: gotRenamed} = 0" ~mapper:(new useless_mapper) - end; - "pattern_object_rest" >:: begin fun ctxt -> - let source = "let {a,b,...rename} = 0" in - assert_edits_equal ctxt ~edits:[(12,18), "gotRenamed"] ~source - ~expected:"let {a,b,...gotRenamed} = 0" ~mapper:(new useless_mapper) - end; - "pattern_assignment" >:: begin fun ctxt -> - let source = "let [a=rename] = 0" in - assert_edits_equal ctxt ~edits:[(7,13), "gotRenamed"] ~source - ~expected:"let [a=gotRenamed] = 0" ~mapper:(new useless_mapper) - end; - "type_cast_expr" >:: begin fun ctxt -> - let source = "(rename: string)" in - assert_edits_equal ctxt ~edits:[(1,7), "gotRenamed"] ~source - ~expected:"(gotRenamed: string)" ~mapper:(new useless_mapper) - end; - "type_cast_type" >:: begin fun ctxt -> - let source = "(dontrename: number)" in - assert_edits_equal ctxt ~edits:[(11,19), ": string"] ~source - ~expected:"(dontrename: string)" ~mapper:(new useless_mapper) - end; - "type_cast_add" >:: begin fun ctxt -> - let source = "const dontrename = call( /* preserve spaces */ )" in - assert_edits_equal ctxt ~edits:[(19, 19), "("; (49, 49), ": any)"] - ~source ~mapper:(new insert_typecast_mapper) - ~expected:"const dontrename = (call( /* preserve spaces */ ): any)" - end; -] +let assert_edits_equal_standard_only ctxt ~edits ~source ~expected ~mapper = + let edits_standard = edits_of_source Standard source mapper in + assert_equal ~ctxt edits edits_standard; + assert_equal ~ctxt expected (apply_edits source edits_standard) + +let tests = + "ast_differ" + >::: [ + ( "literal_number" + >:: fun ctxt -> + let source = "4" in + assert_edits_equal + ctxt + ~edits:[((0, 1), "5")] + ~source + ~expected:"5" + ~mapper:(new literal_mapper) ); + ( "literal_string" + >:: fun ctxt -> + let source = "\"rename\"" in + assert_edits_equal + ctxt + ~edits:[((0, 8), "\"gotRenamed\"")] + ~source + ~expected:"\"gotRenamed\"" + ~mapper:(new literal_mapper) ); + ( "literal_bool" + >:: fun ctxt -> + let source = "false" in + assert_edits_equal + ctxt + ~edits:[((0, 5), "true")] + ~source + ~expected:"true" + ~mapper:(new literal_mapper) ); + ( "literal_null" + >:: fun ctxt -> + let source = "null" in + assert_edits_equal + ctxt + ~edits:[((0, 4), "\"wasNull\"")] + ~source + ~expected:"\"wasNull\"" + ~mapper:(new literal_mapper) ); + ( "string_literal_type" + >:: fun ctxt -> + let source = "(foo: \"RenameSL\")" in + assert_edits_equal + ctxt + ~edits:[((6, 16), "\"GotRenamedSL\"")] + ~source + ~expected:"(foo: \"GotRenamedSL\")" + ~mapper:(new useless_mapper) ); + ( "simple" + >:: fun ctxt -> + let source = "function foo() { (5 - 3); 4; (6 + 4); }" in + assert_edits_equal + ctxt + ~edits:[((26, 27), "5"); ((30, 35), "(6 - 5)")] + ~source + ~expected:"function foo() { (5 - 3); 5; ((6 - 5)); }" + ~mapper:(new useless_mapper) ); + ( "class" + >:: fun ctxt -> + let source = "class Foo { bar() { 4; } }" in + assert_edits_equal + ctxt + ~edits:[((20, 21), "5")] + ~source + ~expected:"class Foo { bar() { 5; } }" + ~mapper:(new useless_mapper) ); + ( "class2" + >:: fun ctxt -> + let source = "class Foo { bar = 4; }" in + assert_edits_equal + ctxt + ~edits:[((18, 19), "5")] + ~source + ~expected:"class Foo { bar = 5; }" + ~mapper:(new useless_mapper) ); + ( "class_prop_annot" + >:: fun ctxt -> + let source = "class A { f = (x: string) => x; }" in + assert_edits_equal + ctxt + ~edits:[((11, 11), ": number")] + ~source + ~expected:"class A { f: number = (x: string) => x; }" + ~mapper:(new prop_annot_mapper) ); + ( "class_extends" + >:: fun ctxt -> + let source = "class A extends rename { }" in + assert_edits_equal + ctxt + ~edits:[((16, 22), "gotRenamed"); ((23, 29), "gotRenamed")] + ~source + ~expected:"class A extends gotRenamed { }" + ~mapper:(new useless_mapper) ); + ( "class_extends_integration" + >:: fun ctxt -> + let source = "class A extends rename { bar = 4 }" in + assert_edits_equal + ctxt + ~edits:[((16, 22), "gotRenamed"); ((31, 32), "5")] + ~source + ~expected:"class A extends gotRenamed { bar = 5 }" + ~mapper:(new useless_mapper) ); + ( "interface_id" + >:: fun ctxt -> + let source = "interface Rename { }" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "GotRenamed")] + ~source + ~expected:"interface GotRenamed { }" + ~mapper:(new useless_mapper) ); + ( "interface_tparams" + >:: fun ctxt -> + let source = "interface Foo { }" in + assert_edits_equal + ctxt + ~edits:[((14, 20), "GOT_RENAMED")] + ~source + ~expected:"interface Foo { }" + ~mapper:(new useless_mapper) ); + ( "interface_extends_id" + >:: fun ctxt -> + let source = "interface Foo extends Rename { }" in + assert_edits_equal + ctxt + ~edits:[((22, 28), "GotRenamed")] + ~source + ~expected:"interface Foo extends GotRenamed { }" + ~mapper:(new useless_mapper) ); + ( "interface_extends_targ_simple" + >:: fun ctxt -> + let source = "interface Foo extends Bar { }" in + assert_edits_equal + ctxt + ~edits:[((26, 32), "GOT_RENAMED")] + ~source + ~expected:"interface Foo extends Bar { }" + ~mapper:(new useless_mapper) ); + ( "interface_extends_targs" + >:: fun ctxt -> + let source = "interface Foo extends Bar { }" in + assert_edits_equal + ctxt + ~edits:[((26, 32), "GOT_RENAMED"); ((34, 40), "GOT_RENAMED")] + ~source + ~expected:"interface Foo extends Bar { }" + ~mapper:(new useless_mapper) ); + ( "interface_combo" + >:: fun ctxt -> + let source = "interface Rename extends Rename { }" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "GotRenamed"); ((25, 31), "GotRenamed"); ((32, 38), "GOT_RENAMED")] + ~source + ~expected:"interface GotRenamed extends GotRenamed { }" + ~mapper:(new useless_mapper) ); + ( "interface_body_object_property_key" + >:: fun ctxt -> + let source = "interface Foo { rename: string }" in + assert_edits_equal + ctxt + ~edits:[((16, 22), "gotRenamed")] + ~source + ~expected:"interface Foo { gotRenamed: string }" + ~mapper:(new useless_mapper) ); + ( "interface_body_object_property_value_init" + >:: fun ctxt -> + let source = "interface Foo { bar: number }" in + assert_edits_equal + ctxt + ~edits:[((21, 27), "string")] + ~source + ~expected:"interface Foo { bar: string }" + ~mapper:(new useless_mapper) ); + ( "obj_prop" + >:: fun ctxt -> + let source = "let x = { rename : 4 }" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "gotRenamed"); ((19, 20), "5")] + ~source + ~expected:"let x = { gotRenamed : 5 }" + ~mapper:(new useless_mapper) ); + ( "obj_prop2" + >:: fun ctxt -> + let source = "let x = { bar() { rename; } }" in + assert_edits_equal + ctxt + ~edits:[((18, 24), "gotRenamed")] + ~source + ~expected:"let x = { bar() { gotRenamed; } }" + ~mapper:(new useless_mapper) ); + ( "obj_prop3" + >:: fun ctxt -> + let source = "let x = { 4 : 3 }" in + assert_edits_equal + ctxt + ~edits:[((10, 11), "5")] + ~source + ~expected:"let x = { 5 : 3 }" + ~mapper:(new useless_mapper) ); + ( "obj_spread_prop" + >:: fun ctxt -> + let source = "let x = { ...rename, x : 4}" in + assert_edits_equal + ctxt + ~edits:[((13, 19), "gotRenamed"); ((25, 26), "5")] + ~source + ~expected:"let x = { ...gotRenamed, x : 5}" + ~mapper:(new useless_mapper) ); + ( "precedence" + >:: fun ctxt -> + let source = "5 - 3 * 3" in + (* It is mandatory to insert the parens here *) + assert_edits_equal + ctxt + ~edits:[((4, 9), "(3 + 3)")] + ~source + ~expected:"5 - (3 + 3)" + ~mapper:(new useless_mapper) ); + ( "tuple" + >:: fun ctxt -> + let source = "type Foo = [number, number];" in + assert_edits_equal + ctxt + ~edits:[((12, 18), "string"); ((20, 26), "string")] + ~source + ~expected:"type Foo = [string, string];" + ~mapper:(new useless_mapper) ); + ( "identifier" + >:: fun ctxt -> + let source = "5 - rename" in + assert_edits_equal + ctxt + ~edits:[((4, 10), "gotRenamed")] + ~source + ~expected:"5 - gotRenamed" + ~mapper:(new useless_mapper) ); + ( "interface_type" + >:: fun ctxt -> + let source = "type Foo = interface { rename() : string }" in + assert_edits_equal + ctxt + ~edits:[((23, 29), "gotRenamed")] + ~source + ~expected:"type Foo = interface { gotRenamed() : string }" + ~mapper:(new useless_mapper) ); + ( "new" + >:: fun ctxt -> + let source = "new rename()" in + assert_edits_equal + ctxt + ~edits:[((4, 10), "gotRenamed")] + ~source + ~expected:"new gotRenamed()" + ~mapper:(new useless_mapper) ); + ( "typeof_type" + >:: fun ctxt -> + let source = "type Foo = typeof number" in + assert_edits_equal + ctxt + ~edits:[((18, 24), "string")] + ~source + ~expected:"type Foo = typeof string" + ~mapper:(new useless_mapper) ); + ( "new_type_param" + >:: fun ctxt -> + let source = "new foo()" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "GOT_RENAMED")] + ~source + ~expected:"new foo()" + ~mapper:(new useless_mapper) ); + ( "new_type_param_multiple" + >:: fun ctxt -> + let source = "new foo()" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "GOT_RENAMED"); ((16, 22), "GOT_RENAMED")] + ~source + ~expected:"new foo()" + ~mapper:(new useless_mapper) ); + ( "new_type_param_insert" + >:: fun ctxt -> + let source = "new foo<>()" in + assert_edits_equal + ctxt + ~edits:[((0, 11), "(new foo())")] + ~source + ~expected:"(new foo())" + ~mapper:(new insert_type_param_instantiation) ); + ( "new_type_param_implicit" + >:: fun ctxt -> + let source = "new foo<_>()" in + assert_edits_equal + ctxt + ~edits:[((0, 12), "(new foo())")] + ~source + ~expected:"(new foo())" + ~mapper:(new useless_mapper) ); + ( "member" + >:: fun ctxt -> + let source = "rename.a" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "gotRenamed")] + ~source + ~expected:"gotRenamed.a" + ~mapper:(new useless_mapper) ); + ( "member_identifier" + >:: fun ctxt -> + let source = "rename.rename" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "gotRenamed"); ((7, 13), "gotRenamed")] + ~source + ~expected:"gotRenamed.gotRenamed" + ~mapper:(new useless_mapper) ); + ( "member_expression" + >:: fun ctxt -> + let source = "obj[4]" in + assert_edits_equal + ctxt + ~edits:[((4, 5), "5")] + ~source + ~expected:"obj[5]" + ~mapper:(new useless_mapper) ); + ( "unary_same_op" + >:: fun ctxt -> + let source = "-rename" in + assert_edits_equal + ctxt + ~edits:[((1, 7), "gotRenamed")] + ~source + ~expected:"-gotRenamed" + ~mapper:(new useless_mapper) ); + ( "unary_diff_op" + >:: fun ctxt -> + let source = "+rename" in + assert_edits_equal + ctxt + ~edits:[((0, 7), "(-gotRenamed)")] + ~source + ~expected:"(-gotRenamed)" + ~mapper:(new useless_mapper) ); + ( "block" + >:: fun ctxt -> + let source = "{ 2; 4; 10; rename; }" in + assert_edits_equal + ctxt + ~edits:[((5, 6), "5"); ((12, 18), "gotRenamed")] + ~source + ~expected:"{ 2; 5; 10; gotRenamed; }" + ~mapper:(new useless_mapper) ); + ( "if_nochange" + >:: fun ctxt -> + let source = "if (true) { false; } else { true; }" in + assert_edits_equal + ctxt + ~edits:[] + ~source + ~expected:"if (true) { false; } else { true; }" + ~mapper:(new useless_mapper) ); + ( "if_noblock" + >:: fun ctxt -> + let source = "if (4) rename;" in + assert_edits_equal + ctxt + ~edits:[((4, 5), "5"); ((7, 13), "gotRenamed")] + ~source + ~expected:"if (5) gotRenamed;" + ~mapper:(new useless_mapper) ); + ( "if_partial" + >:: fun ctxt -> + let source = "if (4) { rename; }" in + assert_edits_equal + ctxt + ~edits:[((4, 5), "5"); ((9, 15), "gotRenamed")] + ~source + ~expected:"if (5) { gotRenamed; }" + ~mapper:(new useless_mapper) ); + ( "if_full" + >:: fun ctxt -> + let source = "if (4) { 4; } else { rename }" in + assert_edits_equal + ctxt + ~edits:[((4, 5), "5"); ((9, 10), "5"); ((21, 27), "gotRenamed")] + ~source + ~expected:"if (5) { 5; } else { gotRenamed }" + ~mapper:(new useless_mapper) ); + ( "conditional_nochange" + >:: fun ctxt -> + let source = "1 > 0 ? false : true" in + assert_edits_equal + ctxt + ~edits:[] + ~source + ~expected:"1 > 0 ? false : true" + ~mapper:(new useless_mapper) ); + ( "conditional_test" + >:: fun ctxt -> + let source = "rename ? false : true" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "gotRenamed")] + ~source + ~expected:"gotRenamed ? false : true" + ~mapper:(new useless_mapper) ); + ( "conditional_consequent" + >:: fun ctxt -> + let source = "1 > 0 ? rename : true" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "gotRenamed")] + ~source + ~expected:"1 > 0 ? gotRenamed : true" + ~mapper:(new useless_mapper) ); + ( "conditional_alternate" + >:: fun ctxt -> + let source = "1 > 0 ? false : rename" in + assert_edits_equal + ctxt + ~edits:[((16, 22), "gotRenamed")] + ~source + ~expected:"1 > 0 ? false : gotRenamed" + ~mapper:(new useless_mapper) ); + ( "conditional_cons_and_alt" + >:: fun ctxt -> + let source = "1 > 0 ? 4 : rename" in + assert_edits_equal + ctxt + ~edits:[((8, 9), "5"); ((12, 18), "gotRenamed")] + ~source + ~expected:"1 > 0 ? 5 : gotRenamed" + ~mapper:(new useless_mapper) ); + ( "with_nochange" + >:: fun ctxt -> + let source = "with (object) { foo = true; }" in + assert_edits_equal + ctxt + ~edits:[] + ~source + ~expected:"with (object) { foo = true; }" + ~mapper:(new useless_mapper) ); + ( "with_object" + >:: fun ctxt -> + let source = "with (rename) { foo = true; };" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed")] + ~source + ~expected:"with (gotRenamed) { foo = true; };" + ~mapper:(new useless_mapper) ); + ( "with_body" + >:: fun ctxt -> + let source = "with (objct) { rename; };" in + assert_edits_equal + ctxt + ~edits:[((15, 21), "gotRenamed")] + ~source + ~expected:"with (objct) { gotRenamed; };" + ~mapper:(new useless_mapper) ); + ( "function_expression" + >:: fun ctxt -> + let source = "(function() { 4; })" in + assert_edits_equal + ctxt + ~edits:[((14, 15), "5")] + ~source + ~expected:"(function() { 5; })" + ~mapper:(new useless_mapper) ); + ( "function_id" + >:: fun ctxt -> + let source = "(function rename() { return; })" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "gotRenamed")] + ~source + ~expected:"(function gotRenamed() { return; })" + ~mapper:(new useless_mapper) ); + ( "function_rest" + >:: fun ctxt -> + let source = "(function(...rename) { return; })" in + assert_edits_equal + ctxt + ~edits:[((13, 19), "gotRenamed")] + ~source + ~expected:"(function(...gotRenamed) { return; })" + ~mapper:(new useless_mapper) ); + ( "function_param" + >:: fun ctxt -> + let source = "(function(rename, ...dontRename) { return; })" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "gotRenamed")] + ~source + ~expected:"(function(gotRenamed, ...dontRename) { return; })" + ~mapper:(new useless_mapper) ); + ( "function_params" + >:: fun ctxt -> + let source = "(function(rename, dontRename, rename) { return; })" in + assert_edits_equal + ctxt + ~source + ~edits:[((10, 16), "gotRenamed"); ((30, 36), "gotRenamed")] + ~expected:"(function(gotRenamed, dontRename, gotRenamed) { return; })" + ~mapper:(new useless_mapper) ); + ( "function_type_params" + >:: fun ctxt -> + let source = "(function() { return; })" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "GOT_RENAMED")] + ~source + ~expected:"(function() { return; })" + ~mapper:(new useless_mapper) ); + ( "function_combo" + >:: fun ctxt -> + let source = "(function rename(rename): Rename { return 4; })" in + assert_edits_equal + ctxt + ~source + ~edits: + [ + ((10, 16), "gotRenamed"); + ((17, 23), "GOT_RENAMED"); + ((25, 31), "gotRenamed"); + ((34, 40), "GotRenamed"); + ((50, 51), "5"); + ] + ~expected:"(function gotRenamed(gotRenamed): GotRenamed { return 5; })" + ~mapper:(new useless_mapper) ); + ( "arrow_function" + >:: fun ctxt -> + let source = "let bar = (x) => 4;" in + assert_edits_equal + ctxt + ~edits:[((17, 18), "5")] + ~source + ~expected:"let bar = (x) => 5;" + ~mapper:(new useless_mapper) ); + ( "call" + >:: fun ctxt -> + let source = "rename()" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "gotRenamed")] + ~source + ~expected:"gotRenamed()" + ~mapper:(new useless_mapper) ); + ( "call_type_param" + >:: fun ctxt -> + let source = "rename()" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "GOT_RENAMED")] + ~source + ~expected:"rename()" + ~mapper:(new useless_mapper) ); + ( "variable_declaration_kind" + >:: fun ctxt -> + let source = "var x = 5;" in + assert_edits_equal + ctxt + ~edits:[((0, 10), "const x = 5;")] + ~source + ~expected:"const x = 5;" + ~mapper:(new useless_mapper) ); + ( "variable_declaration_expression" + >:: fun ctxt -> + let source = "let x = 4;" in + assert_edits_equal + ctxt + ~edits:[((8, 9), "5")] + ~source + ~expected:"let x = 5;" + ~mapper:(new useless_mapper) ); + ( "variable_declaration_kind_expression" + >:: fun ctxt -> + let source = "var x = 4;" in + assert_edits_equal + ctxt + ~edits:[((0, 10), "const x = 5;")] + ~source + ~expected:"const x = 5;" + ~mapper:(new useless_mapper) ); + ( "for" + >:: fun ctxt -> + let source = "for (i = 7; i < rename; i++) {}" in + assert_edits_equal + ctxt + ~edits:[((16, 22), "gotRenamed")] + ~source + ~expected:"for (i = 7; i < gotRenamed; i++) {}" + ~mapper:(new useless_mapper) ); + ( "for_init" + >:: fun ctxt -> + let source = "for (let i = 4; i < 10; i++) {}" in + assert_edits_equal + ctxt + ~edits:[((13, 14), "5")] + ~source + ~expected:"for (let i = 5; i < 10; i++) {}" + ~mapper:(new useless_mapper) ); + ( "for_body" + >:: fun ctxt -> + let source = "for (i = 7; i < top; i++) { rename; }" in + assert_edits_equal + ctxt + ~edits:[((28, 34), "gotRenamed")] + ~source + ~expected:"for (i = 7; i < top; i++) { gotRenamed; }" + ~mapper:(new useless_mapper) ); + ( "for_in_left" + >:: fun ctxt -> + let source = "for (var x in xs) { continue; }" in + assert_edits_equal + ctxt + ~edits:[((0, 31), "for (const x in xs) {\n continue;\n}")] + ~source + ~expected:"for (const x in xs) {\n continue;\n}" + ~mapper:(new useless_mapper) ); + ( "for_in_right" + >:: fun ctxt -> + let source = "for (let x in rename) { continue; }" in + assert_edits_equal + ctxt + ~edits:[((14, 20), "gotRenamed")] + ~source + ~expected:"for (let x in gotRenamed) { continue; }" + ~mapper:(new useless_mapper) ); + ( "for_in_body" + >:: fun ctxt -> + let source = "for (let x in xs) { rename; }" in + assert_edits_equal + ctxt + ~edits:[((20, 26), "gotRenamed")] + ~source + ~expected:"for (let x in xs) { gotRenamed; }" + ~mapper:(new useless_mapper) ); + ( "while_test" + >:: fun ctxt -> + let source = "while (rename) { break; };" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"while (gotRenamed) { break; };" + ~mapper:(new useless_mapper) ); + ( "while_body" + >:: fun ctxt -> + let source = "while (true) { rename; };" in + assert_edits_equal + ctxt + ~edits:[((15, 21), "gotRenamed")] + ~source + ~expected:"while (true) { gotRenamed; };" + ~mapper:(new useless_mapper) ); + ( "for_of_left" + >:: fun ctxt -> + let source = "for (var x of xs) { continue; }" in + assert_edits_equal + ctxt + ~edits:[((0, 31), "for (const x of xs) {\n continue;\n}")] + ~source + ~expected:"for (const x of xs) {\n continue;\n}" + ~mapper:(new useless_mapper) ); + ( "for_of_right" + >:: fun ctxt -> + let source = "for (let x of rename) { continue; }" in + assert_edits_equal + ctxt + ~edits:[((14, 20), "gotRenamed")] + ~source + ~expected:"for (let x of gotRenamed) { continue; }" + ~mapper:(new useless_mapper) ); + ( "for_of_body" + >:: fun ctxt -> + let source = "for (let x of xs) { rename; }" in + assert_edits_equal + ctxt + ~edits:[((20, 26), "gotRenamed")] + ~source + ~expected:"for (let x of xs) { gotRenamed; }" + ~mapper:(new useless_mapper) ); + ( "do_while_body" + >:: fun ctxt -> + let source = "do { rename; } while (true);" in + assert_edits_equal + ctxt + ~edits:[((5, 11), "gotRenamed")] + ~source + ~expected:"do { gotRenamed; } while (true);" + ~mapper:(new useless_mapper) ); + ( "do_while_condition" + >:: fun ctxt -> + let source = "do { continue; } while (rename);" in + assert_edits_equal + ctxt + ~edits:[((24, 30), "gotRenamed")] + ~source + ~expected:"do { continue; } while (gotRenamed);" + ~mapper:(new useless_mapper) ); + ( "try_stmt_body" + >:: fun ctxt -> + let source = "try { rename; } catch(e) { other; };" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed")] + ~source + ~expected:"try { gotRenamed; } catch(e) { other; };" + ~mapper:(new useless_mapper) ); + ( "try_stmt_catch" + >:: fun ctxt -> + let source = "try { thing; } catch(rename) { other; };" in + assert_edits_equal + ctxt + ~edits:[((21, 27), "gotRenamed")] + ~source + ~expected:"try { thing; } catch(gotRenamed) { other; };" + ~mapper:(new useless_mapper) ); + ( "try_stmt_handler" + >:: fun ctxt -> + let source = "try { thing; } catch(e) { rename; };" in + assert_edits_equal + ctxt + ~edits:[((26, 32), "gotRenamed")] + ~source + ~expected:"try { thing; } catch(e) { gotRenamed; };" + ~mapper:(new useless_mapper) ); + ( "try_stmt_finalizer" + >:: fun ctxt -> + let source = "try { thing; } finally { rename; };" in + assert_edits_equal + ctxt + ~edits:[((25, 31), "gotRenamed")] + ~source + ~expected:"try { thing; } finally { gotRenamed; };" + ~mapper:(new useless_mapper) ); + ( "labeled_label" + >:: fun ctxt -> + let source = "rename: while (true) { }" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "gotRenamed")] + ~source + ~expected:"gotRenamed: while (true) { }" + ~mapper:(new useless_mapper) ); + ( "labeled_body" + >:: fun ctxt -> + let source = "foo: while (rename) { }" in + assert_edits_equal + ctxt + ~edits:[((12, 18), "gotRenamed")] + ~source + ~expected:"foo: while (gotRenamed) { }" + ~mapper:(new useless_mapper) ); + ( "switch_discriminant" + >:: fun ctxt -> + let source = "switch (rename) { case true: break; }" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "gotRenamed")] + ~source + ~expected:"switch (gotRenamed) { case true: break; }" + ~mapper:(new useless_mapper) ); + ( "switch_case_test" + >:: fun ctxt -> + let source = "switch (true) { case rename: break; }" in + assert_edits_equal + ctxt + ~edits:[((21, 27), "gotRenamed")] + ~source + ~expected:"switch (true) { case gotRenamed: break; }" + ~mapper:(new useless_mapper) ); + ( "switch_case_consequent" + >:: fun ctxt -> + let source = "switch (true) { case true: rename; }" in + assert_edits_equal + ctxt + ~edits:[((27, 33), "gotRenamed")] + ~source + ~expected:"switch (true) { case true: gotRenamed; }" + ~mapper:(new useless_mapper) ); + ( "algo_diff_end_insert" + >:: fun ctxt -> + let source = "var x = 5; var y = 6;" in + assert_edits_differ + ctxt + ~edits_trivial:[((0, 21), "var x = 5;\nvar y = 6;\nvar y = 6;")] + ~edits_standard:[((21, 21), "var y = 6;")] + ~source + ~trivial_expected:"var x = 5;\nvar y = 6;\nvar y = 6;" + ~standard_expected:"var x = 5; var y = 6;var y = 6;" + ~mapper:(new insert_end_mapper) ); + ( "algo_diff_delete" + >:: fun ctxt -> + let source = "var x = 5; var y = 6; var z = 7;" in + assert_edits_differ + ctxt + ~edits_trivial:[((0, 32), "var y = 6;\nvar z = 7;")] + ~edits_standard:[((0, 10), "")] + ~source + ~trivial_expected:"var y = 6;\nvar z = 7;" + ~standard_expected:" var y = 6; var z = 7;" + ~mapper:(new delete_mapper) ); + ( "algo_diff_begin_insert" + >:: fun ctxt -> + let source = "var x = 5; var y = 6;" in + assert_edits_differ + ctxt + ~edits_trivial:[((0, 21), "var y = 6;\nvar x = 5;\nvar y = 6;")] + ~edits_standard:[((0, 0), "var y = 6;")] + ~source + ~trivial_expected:"var y = 6;\nvar x = 5;\nvar y = 6;" + ~standard_expected:"var y = 6;var x = 5; var y = 6;" + ~mapper:(new insert_begin_mapper) ); + ( "algo_diff_middle_insert" + >:: fun ctxt -> + let source = "var x = 5; var y = 6;" in + assert_edits_differ + ctxt + ~edits_trivial:[((0, 21), "var x = 5;\nvar x = 5;\nvar y = 6;\nvar y = 6;")] + ~edits_standard:[((10, 10), "var x = 5;"); ((21, 21), "var y = 6;")] + ~source + ~trivial_expected:"var x = 5;\nvar x = 5;\nvar y = 6;\nvar y = 6;" + ~standard_expected:"var x = 5;var x = 5; var y = 6;var y = 6;" + ~mapper:(new insert_dup_mapper) ); + ( "algo_diff_empty" + >:: fun ctxt -> + let source = "" in + let (ast_empty, _) = Parser_flow.program source in + let (ast_var, _) = Parser_flow.program "var x = 6;" in + let edits_trivial = + program Trivial ast_empty ast_var + |> Ast_diff_printer.edits_of_changes None + |> Core_list.map ~f:(fun (loc, text) -> Loc.((loc.start.column, loc._end.column), text)) + in + let edits_standard = + program Standard ast_empty ast_var + |> Ast_diff_printer.edits_of_changes None + |> Core_list.map ~f:(fun (loc, text) -> Loc.((loc.start.column, loc._end.column), text)) + in + assert_equal ~ctxt edits_trivial [((0, 0), "var x = 6;")]; + assert_equal ~ctxt edits_standard [((0, 0), "var x = 6;")]; + assert_equal ~ctxt (apply_edits source edits_trivial) "var x = 6;"; + assert_equal ~ctxt (apply_edits source edits_standard) "var x = 6;" ); + ( "unnamed_class_expression" + >:: fun ctxt -> + let source = "(class { method() { rename; } })" in + assert_edits_equal + ctxt + ~edits:[((20, 26), "gotRenamed")] + ~source + ~expected:"(class { method() { gotRenamed; } })" + ~mapper:(new useless_mapper) ); + ( "named_class_expression" + >:: fun ctxt -> + let source = "(class Foo { method() { rename; } })" in + assert_edits_equal + ctxt + ~edits:[((24, 30), "gotRenamed")] + ~source + ~expected:"(class Foo { method() { gotRenamed; } })" + ~mapper:(new useless_mapper) ); + ( "return_statement_with_expression" + >:: fun ctxt -> + let source = "function foo() { return rename; }" in + assert_edits_equal + ctxt + ~edits:[((24, 30), "gotRenamed")] + ~source + ~expected:"function foo() { return gotRenamed; }" + ~mapper:(new useless_mapper) ); + ( "type_annotation_delete" + >:: fun ctxt -> + let source = "let x : number = 3;" in + assert_edits_equal + ctxt + ~edits:[((6, 14), "")] + ~source + ~expected:"let x = 3;" + ~mapper:(new delete_annot_mapper) ); + ( "type_annotation_insert" + >:: fun ctxt -> + let source = "let x = 3;" in + assert_edits_equal + ctxt + ~edits:[((5, 5), ": number")] + ~source + ~expected:"let x: number = 3;" + ~mapper:(new insert_annot_mapper) ); + ( "type_annotation_replace" + >:: fun ctxt -> + let source = "let x : number = 3;" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "string")] + ~source + ~expected:"let x : string = 3;" + ~mapper:(new useless_mapper) ); + ( "type_annotation_rename_type_arg" + >:: fun ctxt -> + let source = "(foo: bar);" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "gotRenamed")] + ~source + ~expected:"(foo: bar);" + ~mapper:(new useless_mapper) ); + ( "type_annotation_rename_type" + >:: fun ctxt -> + let source = "(foo: rename);" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed")] + ~source + ~expected:"(foo: gotRenamed);" + ~mapper:(new useless_mapper) ); + ( "type_annotation_rename_type_and_typearg" + >:: fun ctxt -> + let source = "(foo: rename);" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed"); ((13, 19), "gotRenamed")] + ~source + ~expected:"(foo: gotRenamed);" + ~mapper:(new useless_mapper) ); + ( "type_annotation_rename_qualified_type" + >:: fun ctxt -> + let source = "(foo: Foo.rename);" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "gotRenamed")] + ~source + ~expected:"(foo: Foo.gotRenamed);" + ~mapper:(new useless_mapper) ); + ( "type_annotation_rename_qualified_typearg" + >:: fun ctxt -> + let source = "(foo: Foo.Bar);" in + assert_edits_equal + ctxt + ~edits:[((14, 20), "gotRenamed")] + ~source + ~expected:"(foo: Foo.Bar);" + ~mapper:(new useless_mapper) ); + ( "type_annotation_rename_qualified_type_and_typearg" + >:: fun ctxt -> + let source = "(foo: Foo.rename);" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "gotRenamed"); ((17, 23), "gotRenamed")] + ~source + ~expected:"(foo: Foo.gotRenamed);" + ~mapper:(new useless_mapper) ); + ( "return_type_replace" + >:: fun ctxt -> + let source = "function foo() : number { return 1; }" in + assert_edits_equal + ctxt + ~edits:[((17, 23), "string")] + ~source + ~expected:"function foo() : string { return 1; }" + ~mapper:(new useless_mapper) ); + ( "return_type_delete" + >:: fun ctxt -> + let source = "function foo() : number { return 1; }" in + assert_edits_equal + ctxt + ~edits:[((15, 23), "")] + ~source + ~expected:"function foo() { return 1; }" + ~mapper:(new delete_annot_mapper) ); + ( "return_type_insert" + >:: fun ctxt -> + let source = "function foo() { return 1; }" in + assert_edits_equal + ctxt + ~edits:[((14, 14), ": number")] + ~source + ~expected:"function foo(): number { return 1; }" + ~mapper:(new insert_annot_mapper) ); + ( "comments" + >:: fun ctxt -> + let source = "function foo() { /* comment */ (5 - 3); 4; (6 + 4); /* comment */}" in + assert_edits_equal + ctxt + ~edits:[((40, 41), "5"); ((44, 49), "(6 - 5)")] + ~source + ~expected:"function foo() { /* comment */ (5 - 3); 5; ((6 - 5)); /* comment */}" + ~mapper:(new useless_mapper) ); + ( "fn_default_export" + >:: fun ctxt -> + let source = "export default function foo() { let x = rename; }" in + assert_edits_equal + ctxt + ~edits:[((40, 46), "gotRenamed")] + ~source + ~expected:"export default function foo() { let x = gotRenamed; }" + ~mapper:(new useless_mapper) ); + ( "fn_export_named" + >:: fun ctxt -> + let source = "export function foo() { let x = rename; }" in + assert_edits_equal + ctxt + ~edits:[((32, 38), "gotRenamed")] + ~source + ~expected:"export function foo() { let x = gotRenamed; }" + ~mapper:(new useless_mapper) ); + ( "assignment_left" + >:: fun ctxt -> + let source = "rename = 6;" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "gotRenamed")] + ~source + ~expected:"gotRenamed = 6;" + ~mapper:(new useless_mapper) ); + ( "assignment_right" + >:: fun ctxt -> + let source = "x = rename;" in + assert_edits_equal + ctxt + ~edits:[((4, 10), "gotRenamed")] + ~source + ~expected:"x = gotRenamed;" + ~mapper:(new useless_mapper) ); + ( "list_diff_simple" + >:: fun ctxt -> + let a = "a" in + let b = "b" in + let old_list = [a] in + let new_list = [b] in + let edits = [(0, Replace (a, b))] in + let script = list_diff Standard old_list new_list in + assert_equal ~ctxt (Some edits) script ); + ( "list_diff_simple2" + >:: fun ctxt -> + let a = "a" in + let b = "b" in + let old_list = [a; a] in + let new_list = [b; b] in + let edits = [(0, Replace (a, b)); (1, Replace (a, b))] in + let script = list_diff Standard old_list new_list in + assert_equal ~ctxt (Some edits) script ); + ( "list_diff_simple3" + >:: fun ctxt -> + let a = "a" in + let b = "b" in + let old_list = [a; a] in + let new_list = [b; b; b; b] in + let edits = [(0, Replace (a, b)); (1, Replace (a, b)); (1, Insert (None, [b; b]))] in + let script = list_diff Standard old_list new_list in + assert_equal ~ctxt (Some edits) script ); + ( "list_diff_simple4" + >:: fun ctxt -> + let a = "a" in + let b = "b" in + let old_list = [a; a; a; a] in + let new_list = [b; b] in + let edits = [(0, Replace (a, b)); (1, Replace (a, b)); (2, Delete a); (3, Delete a)] in + let script = list_diff Standard old_list new_list in + assert_equal ~ctxt (Some edits) script ); + ( "list_diff_paper" + >:: fun ctxt -> + let a = "a" in + let b = "b" in + let c = "c" in + let old_list = [a; b; c; a; b; b; a] in + let new_list = [c; b; a; b; a; c] in + let edits = + [ + (0, Delete a); + (1, Delete b); + (3, Delete a); + (4, Insert (None, [a])); + (6, Insert (None, [c])); + ] + in + let script = list_diff Standard old_list new_list in + assert_equal ~ctxt (Some edits) script ); + ( "list_diff_flip" + >:: fun ctxt -> + let x = "x" in + let y = "y" in + let old_list = [x; x; x; y; y; y] in + let new_list = [y; y; y; x; x; x] in + let edits = + [(0, Delete x); (1, Delete x); (2, Delete x); (5, Insert (None, [x; x; x]))] + in + let script = list_diff Standard old_list new_list in + assert_equal ~ctxt (Some edits) script ); + ( "list_diff_sentence" + >:: fun ctxt -> + let (t', h, i, s, space, e, n, t, c, o, pd, d) = + ("T", "h", "i", "s", " ", "e", "n", "t", "c", "o", ".", "d") + in + (*"This is sentence one."*) + let old_list = + [t'; h; i; s; space; i; s; space; s; e; n; t; e; n; c; e; space; o; n; e; pd] + in + (*"This is the second sentence"*) + let new_list = + [ + t'; + h; + i; + s; + space; + i; + s; + space; + t; + h; + e; + space; + s; + e; + c; + o; + n; + d; + space; + s; + e; + n; + t; + e; + n; + c; + e; + pd; + ] + in + let edits = + [ + (7, Insert (None, [t; h; e; space])); + (9, Insert (None, [c; o])); + (11, Replace (t, d)); + (11, Insert (None, [space; s])); + (14, Replace (c, t)); + (16, Delete space); + (17, Delete o); + (18, Insert (None, [c])); + ] + in + let script = list_diff Standard old_list new_list in + debug_print_string_script script; + assert_equal ~ctxt (Some edits) script ); + ( "list_diff_simple5" + >:: fun ctxt -> + let a = "a" in + let b = "b" in + let old_list = [a; b] in + let new_list = [b] in + let edits = [(0, Delete a)] in + let script = list_diff Standard old_list new_list in + assert_equal ~ctxt (Some edits) script ); + ( "pattern_identifier" + >:: fun ctxt -> + let source = "let rename = 0" in + assert_edits_equal + ctxt + ~edits:[((4, 10), "gotRenamed")] + ~source + ~expected:"let gotRenamed = 0" + ~mapper:(new useless_mapper) ); + ( "pattern_array" + >:: fun ctxt -> + let source = "let [rename,rename] = [0]" in + assert_edits_equal + ctxt + ~edits:[((5, 11), "gotRenamed"); ((12, 18), "gotRenamed")] + ~source + ~expected:"let [gotRenamed,gotRenamed] = [0]" + ~mapper:(new useless_mapper) ); + ( "pattern_array_nested" + >:: fun ctxt -> + let source = "let [[[rename]]] = 0" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"let [[[gotRenamed]]] = 0" + ~mapper:(new useless_mapper) ); + ( "pattern_array_rest" + >:: fun ctxt -> + let source = "let [a,b,...rename] = 0" in + assert_edits_equal + ctxt + ~edits:[((12, 18), "gotRenamed")] + ~source + ~expected:"let [a,b,...gotRenamed] = 0" + ~mapper:(new useless_mapper) ); + ( "pattern_array_annot" + >:: fun ctxt -> + let source = "let [foo,bar]: rename = [0]" in + assert_edits_equal + ctxt + ~edits:[((15, 21), "gotRenamed")] + ~source + ~expected:"let [foo,bar]: gotRenamed = [0]" + ~mapper:(new useless_mapper) ); + ( "pattern_object_longhand" + >:: fun ctxt -> + let source = "let {rename: rename} = 0" in + assert_edits_equal + ctxt + ~edits:[((5, 11), "gotRenamed"); ((13, 19), "gotRenamed")] + ~source + ~expected:"let {gotRenamed: gotRenamed} = 0" + ~mapper:(new useless_mapper) ); + ( "pattern_object_rest" + >:: fun ctxt -> + let source = "let {a,b,...rename} = 0" in + assert_edits_equal + ctxt + ~edits:[((12, 18), "gotRenamed")] + ~source + ~expected:"let {a,b,...gotRenamed} = 0" + ~mapper:(new useless_mapper) ); + ( "pattern_object_annot" + >:: fun ctxt -> + let source = "let {foo: bar}: rename = 0" in + assert_edits_equal + ctxt + ~edits:[((16, 22), "gotRenamed")] + ~source + ~expected:"let {foo: bar}: gotRenamed = 0" + ~mapper:(new useless_mapper) ); + ( "pattern_assignment" + >:: fun ctxt -> + let source = "let [a=rename] = 0" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"let [a=gotRenamed] = 0" + ~mapper:(new useless_mapper) ); + ( "type_cast_expr" + >:: fun ctxt -> + let source = "(rename: string)" in + assert_edits_equal + ctxt + ~edits:[((1, 7), "gotRenamed")] + ~source + ~expected:"(gotRenamed: string)" + ~mapper:(new useless_mapper) ); + ( "type_cast_type" + >:: fun ctxt -> + let source = "(dontrename: number)" in + assert_edits_equal + ctxt + ~edits:[((13, 19), "string")] + ~source + ~expected:"(dontrename: string)" + ~mapper:(new useless_mapper) ); + ( "type_cast_assign" + >:: fun ctxt -> + let source = "const x : number = (dontrename: number)" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "string"); ((32, 38), "string")] + ~source + ~expected:"const x : string = (dontrename: string)" + ~mapper:(new useless_mapper) ); + ( "type_cast_add" + >:: fun ctxt -> + let source = "const dontrename = call( /* preserve spaces */ )" in + assert_edits_equal + ctxt + ~edits:[((19, 19), "("); ((49, 49), ": any)")] + ~source + ~mapper:(new insert_typecast_mapper) + ~expected:"const dontrename = (call( /* preserve spaces */ ): any)" ); + ( "class_type_param_instantiation" + >:: fun ctxt -> + let source = "class A extends B<{}> { m(): rename {} }" in + assert_edits_equal + ctxt + ~edits:[((29, 35), "gotRenamed")] + ~source + ~expected:"class A extends B<{}> { m(): gotRenamed {} }" + ~mapper:(new useless_mapper) ); + ( "logical_operator_left" + >:: fun ctxt -> + let source = "rename && b" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "gotRenamed")] + ~source + ~expected:"gotRenamed && b" + ~mapper:(new useless_mapper) ); + ( "logical_operator_right" + >:: fun ctxt -> + let source = "a || rename" in + assert_edits_equal + ctxt + ~edits:[((5, 11), "gotRenamed")] + ~source + ~expected:"a || gotRenamed" + ~mapper:(new useless_mapper) ); + ( "logical_operator_changed" + >:: fun ctxt -> + let source = "a ?? b" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "(a || b)")] + ~source + ~expected:"(a || b)" + ~mapper:(new useless_mapper) ); + ( "insert_import_split" + >:: fun ctxt -> + let source = "5 - (2 + 2)" in + assert_edits_equal_standard_only + ctxt + ~edits:[((0, 0), "import {baz} from \"baz\";"); ((5, 10), "(2 - 2)")] + ~source + ~expected:"import {baz} from \"baz\";5 - ((2 - 2))" + ~mapper:(new insert_import_mapper) ); + ( "insert_import_existing_split" + >:: fun ctxt -> + let source = "foo; 5 - (2 + 2)" in + assert_edits_equal_standard_only + ctxt + ~edits:[((0, 0), "import {baz} from \"baz\";"); ((10, 15), "(2 - 2)")] + ~source + ~expected:"import {baz} from \"baz\";foo; 5 - ((2 - 2))" + ~mapper:(new insert_import_mapper) ); + ( "insert_import_second_split" + >:: fun ctxt -> + let source = "import bing from 'bing'; 5 - (2 + 2)" in + assert_edits_equal_standard_only + ctxt + ~edits:[((24, 24), "import {baz} from \"baz\";"); ((30, 35), "(2 - 2)")] + ~source + ~expected:"import bing from 'bing';import {baz} from \"baz\"; 5 - ((2 - 2))" + ~mapper:(new insert_second_import_mapper) ); + ( "existing_cjs_import_split" + >:: fun ctxt -> + let source = "const x = require('bing'); 5 - (2 + 2)" in + assert_edits_equal_standard_only + ctxt + ~edits:[((26, 26), "import {baz} from \"baz\";"); ((32, 37), "(2 - 2)")] + ~source + ~expected:"const x = require('bing');import {baz} from \"baz\"; 5 - ((2 - 2))" + ~mapper:(new insert_second_import_mapper) ); + ( "insert_cjs_import_split" + >:: fun ctxt -> + let source = "import 'bing'; 5 - (2 + 2)" in + assert_edits_equal_standard_only + ctxt + ~edits:[((14, 14), "require(\"baz\");"); ((20, 25), "(2 - 2)")] + ~source + ~expected:"import 'bing';require(\"baz\"); 5 - ((2 - 2))" + ~mapper:(new insert_second_cjsimport_mapper) ); + ( "pathological_import_split" + >:: fun ctxt -> + let source = "import 'baz'; import 'bing'; 5 - (2 + 2);" in + assert_edits_equal_standard_only + ctxt + ~edits:[((0, 0), "5 - (2 + 2);")] + ~source + ~expected:"5 - (2 + 2);import 'baz'; import 'bing'; 5 - (2 + 2);" + ~mapper:(new insert_begin_mapper) ); + ( "remove_import_split" + >:: fun ctxt -> + let source = "import 'baz';5 - (2 + 2);" in + assert_edits_equal_standard_only + ctxt + ~edits:[((0, 13), "")] + ~source + ~expected:"5 - (2 + 2);" + ~mapper:(new delete_mapper) ); + ( "add_body_split" + >:: fun ctxt -> + let source = "import 'baz';" in + assert_edits_equal_standard_only + ctxt + ~edits:[((13, 13), "foo(\"baz\");")] + ~source + ~expected:"import 'baz';foo(\"baz\");" + ~mapper:(new add_body_mapper) ); + ( "add_to_body_split" + >:: fun ctxt -> + let source = "import 'baz'; bar(qux);" in + assert_edits_equal_standard_only + ctxt + ~edits:[((23, 23), "foo(\"baz\");")] + ~source + ~expected:"import 'baz'; bar(qux);foo(\"baz\");" + ~mapper:(new add_body_mapper) ); + ( "remove_body_split" + >:: fun ctxt -> + let source = "import 'baz';5 - (2 + 2);" in + assert_edits_equal_standard_only + ctxt + ~edits:[((13, 25), "")] + ~source + ~expected:"import 'baz';" + ~mapper:(new delete_end_mapper) ); + ( "spread_simple" + >:: fun ctxt -> + let source = "[...rename]" in + assert_edits_equal + ctxt + ~edits:[((4, 10), "gotRenamed")] + ~source + ~expected:"[...gotRenamed]" + ~mapper:(new useless_mapper) ); + ( "tagged_template_tag" + >:: fun ctxt -> + let source = "rename`dontRename`" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "gotRenamed")] + ~source + ~expected:"gotRenamed`dontRename`" + ~mapper:(new useless_mapper) ); + ( "tagged_template_literal" + >:: fun ctxt -> + let source = "dontRename`rename`" in + assert_edits_equal + ctxt + ~edits:[((10, 18), "`gotRenamed`")] + ~source + ~expected:"dontRename`gotRenamed`" + ~mapper:(new useless_mapper) ); + ( "template_literal_simple" + >:: fun ctxt -> + let source = "`rename`" in + assert_edits_equal + ctxt + ~edits:[((0, 8), "`gotRenamed`")] + ~source + ~expected:"`gotRenamed`" + ~mapper:(new useless_mapper) ); + ( "template_literal_expr" + >:: fun ctxt -> + let source = "`foo ${rename} bar`" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"`foo ${gotRenamed} bar`" + ~mapper:(new useless_mapper) ); + ( "template_literal_expr_multiple" + >:: fun ctxt -> + let source = "let test = `${rename} ${foo} bar ${rename}`" in + assert_edits_equal + ctxt + ~edits:[((14, 20), "gotRenamed"); ((35, 41), "gotRenamed")] + ~source + ~expected:"let test = `${gotRenamed} ${foo} bar ${gotRenamed}`" + ~mapper:(new useless_mapper) ); + ( "jsx_element_self_closing_simple" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((1, 7), "gotRenamed")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_self_closing_namespaced_namespace" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((1, 7), "GOT_RENAMED")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_self_closing_namespaced_name" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((13, 19), "gotRenamed")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_self_closing_member_expr_object" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((1, 7), "GotRenamed")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_self_closing_member_expr_name" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((12, 18), "gotRenamed")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_self_closing_member_expr_nested_object" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((1, 7), "GotRenamed"); ((19, 25), "GotRenamed")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_simple" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((1, 7), "gotRenamed"); ((10, 16), "gotRenamed")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_member_expr_nested" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits: + [ + ((1, 7), "GotRenamed"); + ((19, 25), "gotRenamed"); + ((28, 34), "GotRenamed"); + ((46, 52), "gotRenamed"); + ] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_to_self_closing" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((0, 27), "()")] + ~source + ~expected:"()" + ~mapper:(new useless_mapper) ); + ( "jsx_element_from_self_closing" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((0, 18), "()")] + ~source + ~expected:"()" + ~mapper:(new useless_mapper) ); + ( "jsx_element_attribute_name" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((11, 17), "gotRenamed")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_attribute_value_expression_literal" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((21, 22), "5")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_attribute_value_expression_binop" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((21, 26), "(5 - 5)")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_attribute_name_and_value" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((11, 17), "gotRenamed"); ((19, 20), "5")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_attribute_list_name" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((26, 32), "gotRenamed")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_attribute_list_expression_literal" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((21, 22), "5"); ((37, 38), "5")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_spread_attribute" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((15, 21), "gotRenamed")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_spread_attribute_list_mixed" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((15, 21), "gotRenamed"); ((23, 29), "gotRenamed"); ((31, 32), "5")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_attribute_list_name_and_value" + >:: fun ctxt -> + let source = "" in + assert_edits_equal + ctxt + ~edits:[((11, 17), "gotRenamed"); ((34, 35), "5")] + ~source + ~expected:"" + ~mapper:(new useless_mapper) ); + ( "jsx_element_child_element" + >:: fun ctxt -> + let source = "
" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed")] + ~source + ~expected:"
" + ~mapper:(new useless_mapper) ); + ( "jsx_element_child_fragment" + >:: fun ctxt -> + let source = "
<>rename
" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"
<>gotRenamed
" + ~mapper:(new useless_mapper) ); + ( "jsx_element_child_expr" + >:: fun ctxt -> + let source = "
{rename}
" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed")] + ~source + ~expected:"
{gotRenamed}
" + ~mapper:(new useless_mapper) ); + ( "jsx_element_child_spread" + >:: fun ctxt -> + let source = "
{...rename}
" in + assert_edits_equal + ctxt + ~edits:[((9, 15), "gotRenamed")] + ~source + ~expected:"
{...gotRenamed}
" + ~mapper:(new useless_mapper) ); + ( "jsx_element_child_text" + >:: fun ctxt -> + let source = "
rename
" in + assert_edits_equal + ctxt + ~edits:[((5, 11), "gotRenamed")] + ~source + ~expected:"
gotRenamed
" + ~mapper:(new useless_mapper) ); + ( "jsx_element_children" + >:: fun ctxt -> + let source = "
{rename}
" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed"); ((15, 21), "gotRenamed")] + ~source + ~expected:"
{gotRenamed}
" + ~mapper:(new useless_mapper) ); + ( "jsx_element_children_nested" + >:: fun ctxt -> + let source = "
<>
" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed"); ((16, 22), "gotRenamed"); ((30, 36), "gotRenamed")] + ~source + ~expected:"
<>
" + ~mapper:(new useless_mapper) ); + ( "jsx_fragment_expr" + >:: fun ctxt -> + let source = "<>{rename}" in + assert_edits_equal + ctxt + ~edits:[((3, 9), "gotRenamed")] + ~source + ~expected:"<>{gotRenamed}" + ~mapper:(new useless_mapper) ); + ( "declare_type_alias_id" + >:: fun ctxt -> + let source = "declare type Rename = string" in + assert_edits_equal + ctxt + ~edits:[((13, 19), "GotRenamed")] + ~source + ~expected:"declare type GotRenamed = string" + ~mapper:(new useless_mapper) ); + ( "type_alias_id" + >:: fun ctxt -> + let source = "type Rename = string" in + assert_edits_equal + ctxt + ~edits:[((5, 11), "GotRenamed")] + ~source + ~expected:"type GotRenamed = string" + ~mapper:(new useless_mapper) ); + ( "type_alias_intersection_left" + >:: fun ctxt -> + let source = "type foo = number & bar" in + assert_edits_equal + ctxt + ~edits:[((11, 17), "string")] + ~source + ~expected:"type foo = string & bar" + ~mapper:(new useless_mapper) ); + ( "type_alias_intersection_right" + >:: fun ctxt -> + let source = "type foo = bar & number" in + assert_edits_equal + ctxt + ~edits:[((17, 23), "string")] + ~source + ~expected:"type foo = bar & string" + ~mapper:(new useless_mapper) ); + ( "type_alias_intersection_rest" + >:: fun ctxt -> + let source = "type foo = bar & baz & number & number" in + assert_edits_equal + ctxt + ~edits:[((23, 29), "string"); ((32, 38), "string")] + ~source + ~expected:"type foo = bar & baz & string & string" + ~mapper:(new useless_mapper) ); + ( "type_alias_intersection_argument_mismatch" + >:: fun ctxt -> + let source = "type foo = bar & true & boolean" in + assert_edits_equal + ctxt + ~edits:[((11, 31), "bar & true")] + ~source + ~expected:"type foo = bar & true" + ~mapper:(new remove_annotation_rest_mapper) ); + ( "type_alias_nullable" + >:: fun ctxt -> + let source = "type foo = ?number" in + assert_edits_equal + ctxt + ~edits:[((12, 18), "string")] + ~source + ~expected:"type foo = ?string" + ~mapper:(new useless_mapper) ); + ( "type_alias_number_literal" + >:: fun ctxt -> + let source = "type foo = 5.0" in + assert_edits_equal + ctxt + ~edits:[((11, 14), "4.0")] + ~source + ~expected:"type foo = 4.0" + ~mapper:(new useless_mapper) ); + ( "type_alias_param_name" + >:: fun ctxt -> + let source = "type alias = string" in + assert_edits_equal + ctxt + ~edits:[((11, 17), "GOT_RENAMED")] + ~source + ~expected:"type alias = string" + ~mapper:(new useless_mapper) ); + ( "type_alias_param_bound" + >:: fun ctxt -> + let source = "type alias = string" in + assert_edits_equal + ctxt + ~edits:[((14, 20), "string")] + ~source + ~expected:"type alias = string" + ~mapper:(new useless_mapper) ); + ( "type_alias_param_variance" + >:: fun ctxt -> + let source = "type alias<-A> = string" in + assert_edits_equal + ctxt + ~edits:[((11, 12), "+")] + ~source + ~expected:"type alias<+A> = string" + ~mapper:(new useless_mapper) ); + ( "type_alias_param_insert_variance" + >:: fun ctxt -> + let source = "type alias
= string" in + assert_edits_equal + ctxt + ~edits:[((11, 12), "+A")] + ~source + ~expected:"type alias<+A> = string" + ~mapper:(new insert_variance_mapper) ); + ( "type_alias_param_bound_insert_variance" + >:: fun ctxt -> + let source = "type alias = string" in + assert_edits_equal + ctxt + ~edits:[((11, 20), "+A: string")] + ~source + ~expected:"type alias<+A: string> = string" + ~mapper:(new insert_variance_mapper) ); + ( "type_alias_param_delete_variance" + >:: fun ctxt -> + let source = "type alias<-A> = string" in + assert_edits_equal + ctxt + ~edits:[((11, 12), "")] + ~source + ~expected:"type alias = string" + ~mapper:(new delete_variance_mapper) ); + ( "type_alias_param_default" + >:: fun ctxt -> + let source = "type alias = string" in + assert_edits_equal + ctxt + ~edits:[((15, 21), "string")] + ~source + ~expected:"type alias = string" + ~mapper:(new useless_mapper) ); + ( "type_alias_param_combo" + >:: fun ctxt -> + let source = "type alias<-RENAME: number = number> = string" in + assert_edits_equal + ctxt + ~edits: + [ + ((11, 12), "+"); + ((12, 18), "GOT_RENAMED"); + ((20, 26), "string"); + ((29, 35), "string"); + ] + ~source + ~expected:"type alias<+GOT_RENAMED: string = string> = string" + ~mapper:(new useless_mapper) ); + ( "type_alias_param_list" + >:: fun ctxt -> + let source = "type alias<-RENAME, RENAME: number> = string" in + assert_edits_equal + ctxt + ~edits: + [ + ((11, 12), "+"); + ((12, 18), "GOT_RENAMED"); + ((20, 26), "GOT_RENAMED"); + ((28, 34), "string"); + ] + ~source + ~expected:"type alias<+GOT_RENAMED, GOT_RENAMED: string> = string" + ~mapper:(new useless_mapper) ); + ( "declare_type_alias_right" + >:: fun ctxt -> + let source = "declare type alias = number" in + assert_edits_equal + ctxt + ~edits:[((21, 27), "string")] + ~source + ~expected:"declare type alias = string" + ~mapper:(new useless_mapper) ); + ( "type_alias_right" + >:: fun ctxt -> + let source = "type alias = number" in + assert_edits_equal + ctxt + ~edits:[((13, 19), "string")] + ~source + ~expected:"type alias = string" + ~mapper:(new useless_mapper) ); + ( "type_alias_right_function_type_params" + >:: fun ctxt -> + let source = "type alias = (rename: string, bar: number, ...rename: string) => string" in + assert_edits_equal + ctxt + ~edits:[((14, 20), "gotRenamed"); ((35, 41), "string"); ((46, 52), "gotRenamed")] + ~source + ~expected: + "type alias = (gotRenamed: string, bar: string, ...gotRenamed: string) => string" + ~mapper:(new useless_mapper) ); + ( "type_alias_right_function_type_tparams" + >:: fun ctxt -> + let source = "type alias = (param: string) => string" in + assert_edits_equal + ctxt + ~edits:[((14, 20), "GOT_RENAMED")] + ~source + ~expected:"type alias = (param: string) => string" + ~mapper:(new useless_mapper) ); + ( "type_alias_right_function_type_return" + >:: fun ctxt -> + let source = "type alias = string => number" in + assert_edits_equal + ctxt + ~edits:[((23, 29), "string")] + ~source + ~expected:"type alias = string => string" + ~mapper:(new useless_mapper) ); + ( "type_alias_right_object_type" + >:: fun ctxt -> + let source = "type alias = { rename: string }" in + assert_edits_equal + ctxt + ~edits:[((15, 21), "gotRenamed")] + ~source + ~expected:"type alias = { gotRenamed: string }" + ~mapper:(new useless_mapper) ); + ( "type_alias_right_object_property_value_get" + >:: fun ctxt -> + let source = "type alias = { get rename(): void; }" in + assert_edits_equal + ctxt + ~edits:[((19, 25), "gotRenamed")] + ~source + ~expected:"type alias = { get gotRenamed(): void; }" + ~mapper:(new useless_mapper) ); + ( "type_alias_right_object_property_value_set" + >:: fun ctxt -> + let source = "type alias = { set foo(value: number): void; }" in + assert_edits_equal + ctxt + ~edits:[((30, 36), "string")] + ~source + ~expected:"type alias = { set foo(value: string): void; }" + ~mapper:(new useless_mapper) ); + ( "type_alias_right_object_variance" + >:: fun ctxt -> + let source = "type alias = { -foo: string }" in + assert_edits_equal + ctxt + ~edits:[((15, 16), "+")] + ~source + ~expected:"type alias = { +foo: string }" + ~mapper:(new useless_mapper) ); + ( "opaque_type_id" + >:: fun ctxt -> + let source = "opaque type Rename = string" in + assert_edits_equal + ctxt + ~edits:[((12, 18), "GotRenamed")] + ~source + ~expected:"opaque type GotRenamed = string" + ~mapper:(new useless_mapper) ); + ( "opaque_type_param" + >:: fun ctxt -> + let source = "opaque type foo = string" in + assert_edits_equal + ctxt + ~edits:[((16, 22), "GOT_RENAMED")] + ~source + ~expected:"opaque type foo = string" + ~mapper:(new useless_mapper) ); + ( "opaque_type_impl" + >:: fun ctxt -> + let source = "opaque type foo = number" in + assert_edits_equal + ctxt + ~edits:[((21, 27), "string")] + ~source + ~expected:"opaque type foo = string" + ~mapper:(new useless_mapper) ); + ( "opaque_type_super" + >:: fun ctxt -> + let source = "opaque type foo: number = string" in + assert_edits_equal + ctxt + ~edits:[((17, 23), "string")] + ~source + ~expected:"opaque type foo: string = string" + ~mapper:(new useless_mapper) ); + ( "opaque_type_combo" + >:: fun ctxt -> + let source = "opaque type Rename: number = number" in + assert_edits_equal + ctxt + ~edits: + [ + ((12, 18), "GotRenamed"); + ((19, 25), "GOT_RENAMED"); + ((28, 34), "string"); + ((37, 43), "string"); + ] + ~source + ~expected:"opaque type GotRenamed: string = string" + ~mapper:(new useless_mapper) ); + ( "call_insert" + >:: fun ctxt -> + let source = "callFunction(class A { f = (x: string) => x; });" in + assert_edits_equal + ctxt + ~edits:[((24, 24), ": number")] + ~source + ~expected:"callFunction(class A { f: number = (x: string) => x; });" + ~mapper:(new prop_annot_mapper) ); + ( "new_insert" + >:: fun ctxt -> + let source = "new MyClass(class A { f = (x: string) => x; });" in + assert_edits_equal + ctxt + ~edits:[((23, 23), ": number")] + ~source + ~expected:"new MyClass(class A { f: number = (x: string) => x; });" + ~mapper:(new prop_annot_mapper) ); + ( "insert_inside_array" + >:: fun ctxt -> + let source = "[{ render() { class A { f = (x: string) => x; } return new A() } }]" in + assert_edits_equal + ctxt + ~edits:[((25, 25), ": number")] + ~source + ~expected:"[{ render() { class A { f: number = (x: string) => x; } return new A() } }]" + ~mapper:(new prop_annot_mapper) ); + ( "update_same_op" + >:: fun ctxt -> + let source = "++rename" in + assert_edits_equal + ctxt + ~edits:[((2, 8), "gotRenamed")] + ~source + ~expected:"++gotRenamed" + ~mapper:(new useless_mapper) ); + ( "update_diff_op" + >:: fun ctxt -> + let source = "--rename" in + assert_edits_equal + ctxt + ~edits:[((0, 8), "(++gotRenamed)")] + ~source + ~expected:"(++gotRenamed)" + ~mapper:(new useless_mapper) ); + ( "update_arrow_function_single_param" + >:: fun ctxt -> + let source = "const x = bla => { return 0; };" in + assert_edits_equal + ctxt + ~edits:[((7, 7), ": number"); ((10, 13), "(bla: number)"); ((13, 13), ": number")] + ~source + ~expected:"const x: number = (bla: number): number => { return 0; };" + ~mapper:(new insert_annot_mapper) ); + ( "update_arrow_function_function_return" + >:: fun ctxt -> + let source = "const x = bla => { return 0; };" in + assert_edits_equal + ctxt + ~edits: + [ + ((7, 7), ": (() => number)"); + ((10, 13), "(bla: () => number)"); + ((13, 13), ": (() => number)"); + ] + ~source + ~expected: + "const x: (() => number) = (bla: () => number): (() => number) => { return 0; };" + ~mapper:(new insert_function_annot_mapper) ); + ( "new_imports_after_directive_dont_reprint_the_file" + >:: fun ctxt -> + let source = "'use strict';const x = bla => { return 0; };" in + assert_edits_equal_standard_only + ctxt + ~edits: + [ + ( (13, 13), + "import type {there as here} from \"new_import1\"; +import type {there as here} from \"new_import2\";" + ); + ((20, 20), ": (() => number)"); + ((23, 26), "(bla: () => number)"); + ((26, 26), ": (() => number)"); + ] + ~source + ~expected: + "'use strict';import type {there as here} from \"new_import1\"; +import type {there as here} from \"new_import2\";const x: (() => number) = (bla: () => number): (() => number) => { return 0; };" + ~mapper:(new insert_import_and_annot_mapper) ); + ( "import_renamed_simple" + >:: fun ctxt -> + let source = "import rename from \"foo\";" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"import gotRenamed from \"foo\";" + ~mapper:(new useless_mapper) ); + ( "import_renamed_simple_multiple1" + >:: fun ctxt -> + let source = "import rename, {bar} from \"foo\";" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"import gotRenamed, {bar} from \"foo\";" + ~mapper:(new useless_mapper) ); + ( "import_renamed_simple_multiple2" + >:: fun ctxt -> + let source = "import bar, {rename} from \"foo\";" in + assert_edits_equal + ctxt + ~edits:[((13, 19), "gotRenamed")] + ~source + ~expected:"import bar, {gotRenamed} from \"foo\";" + ~mapper:(new useless_mapper) ); + ( "import_renamed_simple1" + >:: fun ctxt -> + let source = "import {rename} from \"foo\";" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "gotRenamed")] + ~source + ~expected:"import {gotRenamed} from \"foo\";" + ~mapper:(new useless_mapper) ); + ( "import_renamed_multiple" + >:: fun ctxt -> + let source = "import {rename, bar} from \"foo\";" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "gotRenamed")] + ~source + ~expected:"import {gotRenamed, bar} from \"foo\";" + ~mapper:(new useless_mapper) ); + ( "import_renamed_whole_module" + >:: fun ctxt -> + let source = "import * as rename from \"foo\";" in + assert_edits_equal + ctxt + ~edits:[((12, 18), "gotRenamed")] + ~source + ~expected:"import * as gotRenamed from \"foo\";" + ~mapper:(new useless_mapper) ); + ( "import_type1" + >:: fun ctxt -> + let source = "import type rename from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((12, 18), "gotRenamed")] + ~source + ~expected:"import type gotRenamed from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_type_and_fn2" + >:: fun ctxt -> + let source = "import rename, {type foo} from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"import gotRenamed, {type foo} from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_multiple_names1" + >:: fun ctxt -> + let source = "import rename, {myBar, myBaz} from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"import gotRenamed, {myBar, myBaz} from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_multiple_names2" + >:: fun ctxt -> + let source = "import myBar, {rename, myBaz} from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((15, 21), "gotRenamed")] + ~source + ~expected:"import myBar, {gotRenamed, myBaz} from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_type2" + >:: fun ctxt -> + let source = "import type {rename} from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((13, 19), "gotRenamed")] + ~source + ~expected:"import type {gotRenamed} from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_fn_and_rename_module1" + >:: fun ctxt -> + let source = "import rename, * as myModule from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((7, 13), "gotRenamed")] + ~source + ~expected:"import gotRenamed, * as myModule from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_fn_and_rename_module2" + >:: fun ctxt -> + let source = "import foo, * as rename from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((17, 23), "gotRenamed")] + ~source + ~expected:"import foo, * as gotRenamed from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_rename" + >:: fun ctxt -> + let source = "import {rename as bar} from \"foo\";" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "gotRenamed")] + ~source + ~expected:"import {gotRenamed as bar} from \"foo\";" + ~mapper:(new useless_mapper) ); + ( "import_type_and_fn1" + >:: fun ctxt -> + let source = "import foo, {type rename} from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((18, 24), "gotRenamed")] + ~source + ~expected:"import foo, {type gotRenamed} from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_type3" + >:: fun ctxt -> + let source = "import {type rename} from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((13, 19), "gotRenamed")] + ~source + ~expected:"import {type gotRenamed} from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_typeof1" + >:: fun ctxt -> + let source = "import typeof rename from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((14, 20), "gotRenamed")] + ~source + ~expected:"import typeof gotRenamed from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "import_typeof2" + >:: fun ctxt -> + let source = "import typeof {rename} from \"bar\";" in + assert_edits_equal + ctxt + ~edits:[((15, 21), "gotRenamed")] + ~source + ~expected:"import typeof {gotRenamed} from \"bar\";" + ~mapper:(new useless_mapper) ); + ( "throw" + >:: fun ctxt -> + let source = "throw \"rename\";" in + assert_edits_equal + ctxt + ~edits:[((6, 14), "\"gotRenamed\"")] + ~source + ~expected:"throw \"gotRenamed\";" + ~mapper:(new literal_mapper) ); + ( "bool1" + >:: fun ctxt -> + let source = "rename = true;" in + assert_edits_equal + ctxt + ~edits:[((0, 6), "gotRenamed")] + ~source + ~expected:"gotRenamed = true;" + ~mapper:(new useless_mapper) ); + ( "bool2" + >:: fun ctxt -> + let source = "const rename = 0; Boolean(rename);" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed"); ((26, 32), "gotRenamed")] + ~source + ~expected:"const gotRenamed = 0; Boolean(gotRenamed);" + ~mapper:(new useless_mapper) ); + ( "bool3" + >:: fun ctxt -> + let source = "const rename = true; Boolean((false || rename));" in + assert_edits_equal + ctxt + ~edits:[((6, 12), "gotRenamed"); ((39, 45), "gotRenamed")] + ~source + ~expected:"const gotRenamed = true; Boolean((false || gotRenamed));" + ~mapper:(new useless_mapper) ); + ( "bool_change" + >:: fun ctxt -> + let source = "const x = true; Boolean(true);" in + assert_edits_equal + ctxt + ~edits:[((10, 14), "false"); ((24, 28), "false")] + ~source + ~expected:"const x = false; Boolean(false);" + ~mapper:(new true_to_false_mapper) ); + ( "bool_type_change" + >:: fun ctxt -> + let source = "const x: true = 'garbage';" in + assert_edits_equal + ctxt + ~edits:[((9, 13), "false")] + ~source + ~expected:"const x: false = 'garbage';" + ~mapper:(new true_to_false_mapper) ); + ( "comment_add" + >:: fun ctxt -> + let source = "bla" in + assert_edits_equal + ctxt + ~edits:[((0, 0), "/*hello*/"); ((3, 3), "/*bye*/")] + ~source + ~expected:"/*hello*/bla/*bye*/" + ~mapper:(new add_comment_mapper) ); + ( "comment_modify" + >:: fun ctxt -> + let source = "/*MAL*/bla/*WRONG*/" in + assert_edits_equal + ctxt + ~edits:[((0, 7), "/*hello*/"); ((10, 19), "/*bye*/")] + ~source + ~expected:"/*hello*/bla/*bye*/" + ~mapper:(new add_comment_mapper) ); + ( "comment_annot_generic_deep" + >:: fun ctxt -> + let source = "const a: Box = {}" in + assert_edits_equal + ctxt + ~source + ~mapper:(new add_comment_mapper) + ~edits: + [ + ((6, 6), "/*hello*/"); + ((7, 7), "/*bye*/"); + ((9, 9), "/*hello*/"); + ((12, 12), "/*bye*/"); + ((13, 13), "/*hello*/"); + ((16, 16), "/*bye*/"); + ] + ~expected:"const /*hello*/a/*bye*/: /*hello*/Box/*bye*/ = {}" ); + ( "let_union_first" + >:: fun ctxt -> + let source = "let x : number | void = 42;" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "string")] + ~source + ~expected:"let x : string | void = 42;" + ~mapper:(new useless_mapper) ); + ( "let_union_second" + >:: fun ctxt -> + let source = "let x : boolean | number = 42;" in + assert_edits_equal + ctxt + ~edits:[((18, 24), "string")] + ~source + ~expected:"let x : boolean | string = 42;" + ~mapper:(new useless_mapper) ); + ( "let_union_rest" + >:: fun ctxt -> + let source = "let x : boolean | void | number = 42;" in + assert_edits_equal + ctxt + ~edits:[((25, 31), "string")] + ~source + ~expected:"let x : boolean | void | string = 42;" + ~mapper:(new useless_mapper) ); + ( "type_alias_union_argument_mismatch" + >:: fun ctxt -> + let source = "type foo = bar | true | boolean" in + assert_edits_equal + ctxt + ~edits:[((11, 31), "bar | true")] + ~source + ~expected:"type foo = bar | true" + ~mapper:(new remove_annotation_rest_mapper) ); + ( "array_type" + >:: fun ctxt -> + let source = "let x : rename[] = []" in + assert_edits_equal + ctxt + ~edits:[((8, 14), "gotRenamed")] + ~source + ~expected:"let x : gotRenamed[] = []" + ~mapper:(new useless_mapper) ); + ( "sequence1" + >:: fun ctxt -> + let source = "(a, b, c, rename, d)" in + assert_edits_equal + ctxt + ~edits:[((10, 16), "gotRenamed")] + ~source + ~expected:"(a, b, c, gotRenamed, d)" + ~mapper:(new useless_mapper) ); + ( "sequence2" + >:: fun ctxt -> + let source = "(a, b, c, d)" in + assert_edits_equal + ctxt + ~edits:[((1, 11), "(a, b, c, d, a, b, c, d)")] + ~source + ~expected:"((a, b, c, d, a, b, c, d))" + ~mapper:(new double_sequence_mapper) ); + ] diff --git a/src/parser_utils/__tests__/flow_polymorphic_ast_mapper_test.ml b/src/parser_utils/__tests__/flow_polymorphic_ast_mapper_test.ml index a0a9af9ab2b..a52d099e9bb 100644 --- a/src/parser_utils/__tests__/flow_polymorphic_ast_mapper_test.ml +++ b/src/parser_utils/__tests__/flow_polymorphic_ast_mapper_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,23 +7,29 @@ open OUnit2 -class ['a] mapper = object(_) - inherit [Loc.t, 'a, Loc.t, 'a] Flow_polymorphic_ast_mapper.mapper - method on_loc_annot (x: Loc.t) = x - method on_type_annot (x: 'a) = x -end +class ['a] mapper = + object + inherit [Loc.t, 'a, Loc.t, 'a] Flow_polymorphic_ast_mapper.mapper + + method on_loc_annot (x : Loc.t) = x + + method on_type_annot (x : 'a) = x + end (* these tests don't do much other than check that the mapper doesn't raise exceptions *) let run_mapper source = - let ast, _ = Parser_flow.program source in + let (ast, _) = Parser_flow.program source in let mapper = new mapper in let _ = mapper#program ast in () -let tests = "polymorphic ast mapper" >::: [ - "simple" >:: (fun _ -> - let source = "function foo() { (5 * 3); 4; (6 + 4); }" in - run_mapper source; -)] +let tests = + "polymorphic ast mapper" + >::: [ + ( "simple" + >:: fun _ -> + let source = "function foo() { (5 * 3); 4; (6 + 4); }" in + run_mapper source ); + ] diff --git a/src/parser_utils/__tests__/parser_utils_tests.ml b/src/parser_utils/__tests__/parser_utils_tests.ml new file mode 100644 index 00000000000..a94f8ded3cb --- /dev/null +++ b/src/parser_utils/__tests__/parser_utils_tests.ml @@ -0,0 +1,19 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = + "parser_utils" + >::: [ + Scope_builder_test.tests; + Ssa_builder_test.tests; + File_sig_test.tests; + Flow_ast_differ_test.tests; + ] + +let () = run_test_tt_main tests diff --git a/src/parser_utils/__tests__/scope_builder_test.ml b/src/parser_utils/__tests__/scope_builder_test.ml index 061127bd733..3acff5f245d 100644 --- a/src/parser_utils/__tests__/scope_builder_test.ml +++ b/src/parser_utils/__tests__/scope_builder_test.ml @@ -1,222 +1,275 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - open OUnit2 open Test_utils +module Scope_api = Scope_api.With_Loc -let mk_scope_builder_all_uses_test contents expected_all_uses = - begin fun ctxt -> - let info = Scope_builder.program (parse contents) in - let all_uses = Utils_js.LocSet.elements @@ Scope_api.all_uses info in - let printer = print_list Loc.to_string in - assert_equal ~ctxt - ~cmp:(eq printer) - ~printer - ~msg:"All uses don't match!" - expected_all_uses all_uses - end +let mk_scope_builder_all_uses_test contents expected_all_uses ctxt = + let info = Scope_builder.program (parse contents) in + let all_uses = Loc_collections.LocSet.elements @@ Scope_api.all_uses info in + let printer = print_list Loc.debug_to_string in + assert_equal + ~ctxt + ~cmp:(eq printer) + ~printer + ~msg:"All uses don't match!" + expected_all_uses + all_uses -let mk_scope_builder_locs_of_defs_of_all_uses_test contents expected_locs_of_defs = - begin fun ctxt -> - let info = Scope_builder.program (parse contents) in - let all_uses = Utils_js.LocSet.elements @@ Scope_api.all_uses info in - let defs = List.map (Scope_api.def_of_use info) all_uses in - let locs_of_defs = List.map ( - fun { Scope_api.Def.locs; _ } -> Nel.to_list locs - ) defs in - let printer = print_list @@ print_list Loc.to_string in - assert_equal ~ctxt - ~cmp:(eq printer) - ~printer - ~msg:"Defs of all uses don't match!" - expected_locs_of_defs locs_of_defs - end +let mk_scope_builder_locs_of_defs_of_all_uses_test contents expected_locs_of_defs ctxt = + let info = Scope_builder.program (parse contents) in + let all_uses = Loc_collections.LocSet.elements @@ Scope_api.all_uses info in + let defs = Core_list.map ~f:(Scope_api.def_of_use info) all_uses in + let locs_of_defs = Core_list.map ~f:(fun { Scope_api.Def.locs; _ } -> Nel.to_list locs) defs in + let printer = print_list @@ print_list Loc.debug_to_string in + assert_equal + ~ctxt + ~cmp:(eq printer) + ~printer + ~msg:"Defs of all uses don't match!" + expected_locs_of_defs + locs_of_defs -let mk_scope_builder_uses_of_all_uses_test contents expected_uses = - begin fun ctxt -> - let info = Scope_builder.program (parse contents) in - let all_uses = Utils_js.LocSet.elements @@ Scope_api.all_uses info in - let uses = List.map (fun use -> - Utils_js.LocSet.elements @@ Scope_api.uses_of_use ~exclude_def:true info use - ) all_uses in - let printer = print_list @@ (fun list -> - Printf.sprintf "[%s]" (print_list Loc.to_string list) - ) in - assert_equal ~ctxt - ~cmp:(eq printer) - ~printer - ~msg:"Uses of all uses don't match!" - expected_uses uses - end +let mk_scope_builder_uses_of_all_uses_test contents expected_uses ctxt = + let info = Scope_builder.program (parse contents) in + let all_uses = Loc_collections.LocSet.elements @@ Scope_api.all_uses info in + let uses = + Core_list.map + ~f:(fun use -> + Loc_collections.LocSet.elements @@ Scope_api.uses_of_use ~exclude_def:true info use) + all_uses + in + let printer = + print_list @@ (fun list -> Printf.sprintf "[%s]" (print_list Loc.debug_to_string list)) + in + assert_equal + ~ctxt + ~cmp:(eq printer) + ~printer + ~msg:"Uses of all uses don't match!" + expected_uses + uses -let mk_scope_builder_scope_loc_test contents expected_scope_locs = - begin fun ctxt -> - let info = Scope_builder.program (parse contents) in - let scope_locs = IMap.elements ( - IMap.map - (fun scope -> scope.Scope_api.Scope.loc) - info.Scope_api.scopes - ) - in - let scope_locs = List.rev scope_locs in - let printer = (fun list -> - Printf.sprintf "[%s]" (print_list (fun (id, loc) -> Printf.sprintf "%d: %s" id (Loc.to_string loc)) list) - ) in - assert_equal ~ctxt - ~cmp:(eq printer) - ~printer - ~msg:"Uses of all uses don't match!" - expected_scope_locs scope_locs - end +let mk_scope_builder_scope_loc_test contents expected_scope_locs ctxt = + let info = Scope_builder.program (parse contents) in + let scope_locs = + IMap.elements (IMap.map (fun scope -> scope.Scope_api.Scope.loc) info.Scope_api.scopes) + in + let scope_locs = List.rev scope_locs in + let printer list = + Printf.sprintf + "[%s]" + (print_list (fun (id, loc) -> Printf.sprintf "%d: %s" id (Loc.debug_to_string loc)) list) + in + assert_equal + ~ctxt + ~cmp:(eq printer) + ~printer + ~msg:"Uses of all uses don't match!" + expected_scope_locs + scope_locs -let tests = "scope_builder" >::: [ - "let_all_uses" >:: mk_scope_builder_all_uses_test - "function foo() { \ - let x = 0; \ - return x; \ - }" - [mk_loc (1, 9) (1, 12); - mk_loc (1, 21) (1, 22); - mk_loc (1, 35) (1, 36)]; - "let_locs_of_defs_of_all_uses" >:: mk_scope_builder_locs_of_defs_of_all_uses_test - "function foo() { \ - let x = 0; \ - return x; \ - }" - [[mk_loc (1, 9) (1, 12)]; - [mk_loc (1, 21) (1, 22)]; - [mk_loc (1, 21) (1, 22)]]; - "let_uses_of_all_uses" >:: mk_scope_builder_uses_of_all_uses_test - "function foo() { \ - let x = 0; \ - return x; \ - }" - [[]; - [mk_loc (1, 35) (1, 36)]; - [mk_loc (1, 35) (1, 36)]]; - "var_locs_of_defs_of_all_uses" >:: mk_scope_builder_locs_of_defs_of_all_uses_test - "function foo({y}) { \ - var {x} = y; \ - return x; \ - }" - [[mk_loc (1, 9) (1, 12)]; - [mk_loc (1, 14) (1, 15)]; - [mk_loc (1, 25) (1, 26)]; - [mk_loc (1, 14) (1, 15)]; - [mk_loc (1, 25) (1, 26)]]; - "var_uses_of_all_uses" >:: mk_scope_builder_uses_of_all_uses_test - "function foo({y}) { \ - var {x} = y; \ - return x; \ - }" - [[]; - [mk_loc (1, 30) (1, 31)]; - [mk_loc (1, 40) (1, 41)]; - [mk_loc (1, 30) (1, 31)]; - [mk_loc (1, 40) (1, 41)]]; - "var_locs_of_defs_of_all_uses2" >:: mk_scope_builder_locs_of_defs_of_all_uses_test - "function foo() { \ - var { x, y } = { x: 0, y: 0 }; \ - var { x: _x, y: _y } = { x, y }; \ - return ({ x: _x, y: _y }); \ - }" - [[mk_loc (1, 9) (1, 12)]; - [mk_loc (1, 23) (1, 24)]; - [mk_loc (1, 26) (1, 27)]; - [mk_loc (1, 57) (1, 59)]; - [mk_loc (1, 64) (1, 66)]; - [mk_loc (1, 23) (1, 24)]; - [mk_loc (1, 26) (1, 27)]; - [mk_loc (1, 57) (1, 59)]; - [mk_loc (1, 64) (1, 66)]]; - "let_uses_of_all_uses2" >:: mk_scope_builder_uses_of_all_uses_test - "function foo() { \ - let { x, y } = { x: 0, y: 0 }; \ - let { x: _x, y: _y } = { x, y }; \ - return ({ x: _x, y: _y }); \ - }" - [[]; - [mk_loc (1, 73) (1, 74)]; - [mk_loc (1, 76) (1, 77)]; - [mk_loc (1, 94) (1, 96)]; - [mk_loc (1, 101) (1, 103)]; - [mk_loc (1, 73) (1, 74)]; - [mk_loc (1, 76) (1, 77)]; - [mk_loc (1, 94) (1, 96)]; - [mk_loc (1, 101) (1, 103)]]; - "jsx_uses_of_all_uses" >:: mk_scope_builder_all_uses_test - "class Foo {}; ; " - [mk_loc (1, 6) (1, 9); - mk_loc (1, 15) (1, 18); - mk_loc (1, 21) (1, 24); - mk_loc (1, 28) (1, 31)]; - "declare_var" >:: mk_scope_builder_all_uses_test - "declare var foo: number; foo" - [mk_loc (1, 12) (1, 15); - mk_loc (1, 25) (1, 28)]; - "declare_class" >:: mk_scope_builder_all_uses_test - "declare class Foo {}; new Foo()" - [mk_loc (1, 14) (1, 17); - mk_loc (1, 26) (1, 29)]; - "declare_function" >:: mk_scope_builder_all_uses_test - "declare function foo(): void; foo()" - [mk_loc (1, 17) (1, 20); - mk_loc (1, 30) (1, 33)]; - "export_named_function" >:: mk_scope_builder_all_uses_test - "export function foo() {}; foo()" - [mk_loc (1, 16) (1, 19); - mk_loc (1, 26) (1, 29)]; - "export_named_class" >:: mk_scope_builder_all_uses_test - "export class Foo {}; new Foo()" - [mk_loc (1, 13) (1, 16); - mk_loc (1, 25) (1, 28)]; - "export_named_binding" >:: mk_scope_builder_all_uses_test - "export const foo = () => {}; foo()" - [mk_loc (1, 13) (1, 16); - mk_loc (1, 29) (1, 32)]; - "export_default_function" >:: mk_scope_builder_all_uses_test - "export default function foo() {}; foo()" - [mk_loc (1, 24) (1, 27); - mk_loc (1, 34) (1, 37)]; - "export_default_class" >:: mk_scope_builder_all_uses_test - "export default class Foo {} new Foo()" - [mk_loc (1, 21) (1, 24); - mk_loc (1, 32) (1, 35)]; - "computed_property_destructuring" >:: mk_scope_builder_all_uses_test - "const x = {}; const foo = ''; const {[foo]: bar} = x;" - [mk_loc (1, 6) (1, 7); - mk_loc (1, 20) (1, 23); - mk_loc (1, 38) (1, 41); - mk_loc (1, 44) (1, 47); - mk_loc (1, 51) (1, 52)]; - "scope_loc_function_declaration" >:: mk_scope_builder_scope_loc_test - "function a() {};" - [0, mk_loc (1, 0) (1, 16); (* program *) - 1, mk_loc (1, 0) (1, 16); (* program (lexical) *) - 2, mk_loc (1, 0) (1, 15); (* function params and body *) - 3, mk_loc (1, 13) (1, 15)]; (* block (lexical) *) - "scope_loc_function_expression" >:: mk_scope_builder_scope_loc_test - "const x = function() {};" - [0, mk_loc (1, 0) (1, 24); (* program *) - 1, mk_loc (1, 0) (1, 24); (* program (lexical) *) - 2, mk_loc (1, 10) (1, 23); (* function name (lexical) *) - 3, mk_loc (1, 10) (1, 23); (* function params and body *) - 4, mk_loc (1, 21) (1, 23)]; (* block (lexical) *) - "scope_loc_arrow_function" >:: mk_scope_builder_scope_loc_test - "const x = () => 1;" - [0, mk_loc (1, 0) (1, 18); (* program *) - 1, mk_loc (1, 0) (1, 18); (* program (lexical) *) - 2, mk_loc (1, 10) (1, 17); (* function name (lexical) *) - 3, mk_loc (1, 10) (1, 17)]; (* function params and body *) - "scope_loc_for_in" >:: mk_scope_builder_scope_loc_test - "for (let a in b) {}" - [0, mk_loc (1, 0) (1, 19); (* program *) - 1, mk_loc (1, 0) (1, 19); (* program (lexical) *) - 2, mk_loc (1, 0) (1, 19); (* for in (lexical) *) - 3, mk_loc (1, 17) (1, 19)]; (* block (lexical) *) -] +let tests = + "scope_builder" + >::: [ + "let_all_uses" + >:: mk_scope_builder_all_uses_test + ( "function foo(x, ...y) {\n" + ^ " let z = 0;\n" + ^ " x, y;\n" + ^ " return z;\n" + ^ "}" ) + [ + mk_loc (1, 9) (1, 12); + (* foo *) + mk_loc (1, 13) (1, 14); + (* x def *) + mk_loc (1, 19) (1, 20); + (* y def *) + mk_loc (2, 6) (2, 7); + (* z def *) + mk_loc (3, 2) (3, 3); + (* x use *) + mk_loc (3, 5) (3, 6); + (* y use *) + mk_loc (4, 9) (4, 10); + ]; + (* z use *) + "let_locs_of_defs_of_all_uses" + >:: mk_scope_builder_locs_of_defs_of_all_uses_test + "function foo() { let x = 0; return x; }" + [[mk_loc (1, 9) (1, 12)]; [mk_loc (1, 21) (1, 22)]; [mk_loc (1, 21) (1, 22)]]; + "let_uses_of_all_uses" + >:: mk_scope_builder_uses_of_all_uses_test + "function foo() { let x = 0; return x; }" + [[]; [mk_loc (1, 35) (1, 36)]; [mk_loc (1, 35) (1, 36)]]; + "var_locs_of_defs_of_all_uses" + >:: mk_scope_builder_locs_of_defs_of_all_uses_test + "function foo({y}) { var {x} = y; return x; }" + [ + [mk_loc (1, 9) (1, 12)]; + [mk_loc (1, 14) (1, 15)]; + [mk_loc (1, 25) (1, 26)]; + [mk_loc (1, 14) (1, 15)]; + [mk_loc (1, 25) (1, 26)]; + ]; + "var_uses_of_all_uses" + >:: mk_scope_builder_uses_of_all_uses_test + "function foo({y}) { var {x} = y; return x; }" + [ + []; + [mk_loc (1, 30) (1, 31)]; + [mk_loc (1, 40) (1, 41)]; + [mk_loc (1, 30) (1, 31)]; + [mk_loc (1, 40) (1, 41)]; + ]; + "var_locs_of_defs_of_all_uses2" + >:: mk_scope_builder_locs_of_defs_of_all_uses_test + "function foo() { var { x, y } = { x: 0, y: 0 }; var { x: _x, y: _y } = { x, y }; return ({ x: _x, y: _y }); }" + [ + [mk_loc (1, 9) (1, 12)]; + [mk_loc (1, 23) (1, 24)]; + [mk_loc (1, 26) (1, 27)]; + [mk_loc (1, 57) (1, 59)]; + [mk_loc (1, 64) (1, 66)]; + [mk_loc (1, 23) (1, 24)]; + [mk_loc (1, 26) (1, 27)]; + [mk_loc (1, 57) (1, 59)]; + [mk_loc (1, 64) (1, 66)]; + ]; + "let_uses_of_all_uses2" + >:: mk_scope_builder_uses_of_all_uses_test + "function foo() { let { x, y } = { x: 0, y: 0 }; let { x: _x, y: _y } = { x, y }; return ({ x: _x, y: _y }); }" + [ + []; + [mk_loc (1, 73) (1, 74)]; + [mk_loc (1, 76) (1, 77)]; + [mk_loc (1, 94) (1, 96)]; + [mk_loc (1, 101) (1, 103)]; + [mk_loc (1, 73) (1, 74)]; + [mk_loc (1, 76) (1, 77)]; + [mk_loc (1, 94) (1, 96)]; + [mk_loc (1, 101) (1, 103)]; + ]; + "jsx_uses_of_all_uses" + >:: mk_scope_builder_all_uses_test + "class Foo {}; ; " + [ + mk_loc (1, 6) (1, 9); + mk_loc (1, 15) (1, 18); + mk_loc (1, 21) (1, 24); + mk_loc (1, 28) (1, 31); + ]; + "declare_var" + >:: mk_scope_builder_all_uses_test + "declare var foo: number; foo" + [mk_loc (1, 12) (1, 15); mk_loc (1, 25) (1, 28)]; + "declare_export_var" + >:: mk_scope_builder_all_uses_test + "declare export var bar; bar" + [mk_loc (1, 19) (1, 22); mk_loc (1, 24) (1, 27)]; + "declare_class" + >:: mk_scope_builder_all_uses_test + "declare class Foo {}; new Foo()" + [mk_loc (1, 14) (1, 17); mk_loc (1, 26) (1, 29)]; + "declare_function" + >:: mk_scope_builder_all_uses_test + "declare function foo(): void; foo()" + [mk_loc (1, 17) (1, 20); mk_loc (1, 30) (1, 33)]; + "export_named_function" + >:: mk_scope_builder_all_uses_test + "export function foo() {}; foo()" + [mk_loc (1, 16) (1, 19); mk_loc (1, 26) (1, 29)]; + "export_named_class" + >:: mk_scope_builder_all_uses_test + "export class Foo {}; new Foo()" + [mk_loc (1, 13) (1, 16); mk_loc (1, 25) (1, 28)]; + "export_named_binding" + >:: mk_scope_builder_all_uses_test + "export const foo = () => {}; foo()" + [mk_loc (1, 13) (1, 16); mk_loc (1, 29) (1, 32)]; + "export_default_function" + >:: mk_scope_builder_all_uses_test + "export default function foo() {}; foo()" + [mk_loc (1, 24) (1, 27); mk_loc (1, 34) (1, 37)]; + "export_default_class" + >:: mk_scope_builder_all_uses_test + "export default class Foo {} new Foo()" + [mk_loc (1, 21) (1, 24); mk_loc (1, 32) (1, 35)]; + "computed_property_destructuring" + >:: mk_scope_builder_all_uses_test + "const x = {}; const foo = ''; const {[foo]: bar} = x;" + [ + mk_loc (1, 6) (1, 7); + mk_loc (1, 20) (1, 23); + mk_loc (1, 38) (1, 41); + mk_loc (1, 44) (1, 47); + mk_loc (1, 51) (1, 52); + ]; + "switch" + >:: mk_scope_builder_all_uses_test "switch ('') { case '': const foo = ''; foo; };" []; + (* TODO this should be the output, but there is a bug: + [mk_loc (1, 29) (1, 32); + mk_loc (1, 39) (1, 42)]; + *) + "scope_loc_function_declaration" + >:: mk_scope_builder_scope_loc_test + "function a() {};" + [ + (0, mk_loc (1, 0) (1, 16)); + (* program *) + (1, mk_loc (1, 0) (1, 16)); + (* program (lexical) *) + (2, mk_loc (1, 0) (1, 15)); + (* function params and body *) + (3, mk_loc (1, 13) (1, 15)); + ]; + (* block (lexical) *) + "scope_loc_function_expression" + >:: mk_scope_builder_scope_loc_test + "const x = function() {};" + [ + (0, mk_loc (1, 0) (1, 24)); + (* program *) + (1, mk_loc (1, 0) (1, 24)); + (* program (lexical) *) + (2, mk_loc (1, 10) (1, 23)); + (* function name (lexical) *) + (3, mk_loc (1, 10) (1, 23)); + (* function params and body *) + (4, mk_loc (1, 21) (1, 23)); + ]; + (* block (lexical) *) + "scope_loc_arrow_function" + >:: mk_scope_builder_scope_loc_test + "const x = () => 1;" + [ + (0, mk_loc (1, 0) (1, 18)); + (* program *) + (1, mk_loc (1, 0) (1, 18)); + (* program (lexical) *) + (2, mk_loc (1, 10) (1, 17)); + (* function name (lexical) *) + (3, mk_loc (1, 10) (1, 17)); + ]; + (* function params and body *) + "scope_loc_for_in" + >:: mk_scope_builder_scope_loc_test + "for (let a in b) {}" + [ + (0, mk_loc (1, 0) (1, 19)); + (* program *) + (1, mk_loc (1, 0) (1, 19)); + (* program (lexical) *) + (2, mk_loc (1, 0) (1, 19)); + (* for in (lexical) *) + (3, mk_loc (1, 17) (1, 19)); + ]; + (* block (lexical) *) + + ] diff --git a/src/parser_utils/__tests__/signature_verifier_test.ml b/src/parser_utils/__tests__/signature_verifier_test.ml deleted file mode 100644 index 2ead24caea9..00000000000 --- a/src/parser_utils/__tests__/signature_verifier_test.ml +++ /dev/null @@ -1,357 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - - -open OUnit2 -open Test_utils - -let mk_signature_verifier_test ?(prevent_munge = false) contents expected_msgs = - begin fun ctxt -> - let contents = String.concat "\n" contents in - let signature = match Signature_builder.program (parse contents) with - | Ok signature -> signature - | Error _ -> failwith "Signature builder failure!" in - let errors, remote_dependencies = - Signature_builder.Signature.verify ~prevent_munge signature - in - let error_msgs = List.map Signature_builder_deps.Error.to_string @@ - Signature_builder_deps.ErrorSet.elements errors in - let remote_dependency_msgs = List.map Signature_builder_deps.Dep.to_string @@ - Signature_builder_deps.DepSet.elements remote_dependencies in - let msgs = error_msgs @ remote_dependency_msgs in - let printer = String.concat "; " in - assert_equal ~ctxt - ~cmp:(eq printer) - ~printer - ~msg:"Results don't match!" - expected_msgs msgs - end - -let tests = "signature_verifier" >::: [ - "export_number_literal" >:: mk_signature_verifier_test - ["export default 0;"] - []; - - "export_function_literal" >:: mk_signature_verifier_test - ["export default function(x: number): number { return x };"] - []; - - "export_function_literal_check1" >:: mk_signature_verifier_test - ["export default function(x): number { return x };"] - ["Expected annotation @ (1, 24) to (1, 25)"]; - - "export_function_literal_check2" >:: mk_signature_verifier_test - ["export default function(x: number) { return x };"] - ["Expected annotation @ (1, 15) to (1, 47)"]; - - "export_function_reference" >:: mk_signature_verifier_test - ["function foo(x: number): number { return x }"; - "export default foo;"] - []; - - "export_function_reference_check1" >:: mk_signature_verifier_test - ["function foo(x): number { return x }"; - "export default foo;"] - ["Expected annotation @ (1, 13) to (1, 14)"]; - - "export_function_reference_check2" >:: mk_signature_verifier_test - ["function foo(x: number) { return x }"; - "export default foo;"] - ["Expected annotation @ (1, 9) to (1, 12)"]; - - "export_object_literal_property_literal" >:: mk_signature_verifier_test - ["export default { p: 0 };"] - []; - - "export_object_literal_property_reference" >:: mk_signature_verifier_test - ["var x: number = 0;"; - "export default { p: x };"] - []; - - "export_object_literal_property_reference_check" >:: mk_signature_verifier_test - ["var x = 0;"; - "export default { p: x };"] - ["Expected annotation @ (1, 4) to (1, 5)"]; - - "export_class_reference" >:: mk_signature_verifier_test - ["class C {"; - " f: number = 0;"; - " m(x: number): number { return x; }"; - "}"; - "export default C;"] - []; - - "export_class_reference_check1" >:: mk_signature_verifier_test - ["class C {"; - " f = 0;"; - " m(x: number): number { return x; }"; - "}"; - "export default C;"] - ["Expected annotation @ (2, 2) to (2, 8)"]; - - "export_class_reference_check2" >:: mk_signature_verifier_test - ["class C {"; - " f: number = 0;"; - " m(x): number { return x; }"; - "}"; - "export default C;"] - ["Expected annotation @ (3, 4) to (3, 5)"]; - - "export_class_reference_check3" >:: mk_signature_verifier_test - ["class C {"; - " f: number = 0;"; - " m(x: number) { return x; }"; - "}"; - "export default C;"] - ["Expected annotation @ (3, 3) to (3, 28)"]; - - "type_alias_dependencies" >:: mk_signature_verifier_test - ["type T1 = number;"; - "type T2 = number;"; - "type T3 = number;"; - "class C {"; - " f: T1 = 0;"; - " m(x: T2): T3 { return x; }"; - "}"; - "export default C;"] - []; - - "class_dependencies" >:: mk_signature_verifier_test - ["class D { f: number = 0; }"; - "class C {"; - " f: D = new D;"; - " m(x: D): D { return x; }"; - "}"; - "export default C;"] - []; - - "class_dependencies_check" >:: mk_signature_verifier_test - ["class D { f = 0; }"; - "class C {"; - " f: D = new D;"; - " m(x: D): D { return x; }"; - "}"; - "export default C;"] - ["Expected annotation @ (1, 10) to (1, 16)"]; - - "export_new_typecast" >:: mk_signature_verifier_test - ["class D { f: number = 0; }"; - "class C {"; - " f: D = new D;"; - " m(x: D): D { return x; }"; - "}"; - "export default (new C: C);"] - []; - - "export_new_typecast_check" >:: mk_signature_verifier_test - ["class D { f = 0; }"; - "class C {"; - " f: D = new D;"; - " m(x: D): D { return x; }"; - "}"; - "export default (new C: C);"] - ["Expected annotation @ (1, 10) to (1, 16)"]; - - "recursive_dependencies" >:: mk_signature_verifier_test - ["class C {"; - " f: C = new C;"; - " m(x: C): C { return x; }"; - "}"; - "export default C;"] - []; - - "recursive_dependencies_check" >:: mk_signature_verifier_test - ["class C {"; - " f = new C;"; - " m(x: C): C { return x; }"; - "}"; - "export default C;"] - ["Expected annotation @ (2, 2) to (2, 12)"]; - - "typeof_dependencies" >:: mk_signature_verifier_test - ["var x: number = 0"; - "class C {"; - " p: typeof x = 0"; - "}"; - "export default (new C: C);"] - []; - - "typeof_dependencies_check" >:: mk_signature_verifier_test - ["var x = 0"; - "class C {"; - " p: typeof x = 0"; - "}"; - "export default (new C: C);"] - ["Expected annotation @ (1, 4) to (1, 5)"]; - - "const_initializer" >:: mk_signature_verifier_test - ["const x = 0"; - "export default { x };"] - []; - - "array_literal" >:: mk_signature_verifier_test - ["const x = 0"; - "var y = false"; - "export default [ x, y ];"] - ["Expected annotation @ (2, 4) to (2, 5)"]; - - "void_function" >:: mk_signature_verifier_test - ["function foo() {}"; - "export default foo;"] - []; - - "void_generator" >:: mk_signature_verifier_test - ["function* foo() { yield 0; }"; - "export default foo;"] - ["Expected annotation @ (1, 10) to (1, 13)"]; - - "import_default_dependencies" >:: mk_signature_verifier_test - ["import x from './import_default_dependencies_helper';"; - "class C {"; - " p: typeof x = 0"; - "}"; - "export default (new C: C);"] - ["import { default } from './import_default_dependencies_helper'"]; - - "import_type_dependencies" >:: mk_signature_verifier_test - ["import type { T1, T2, T3 } from './import_type_dependencies_helper';"; - "class C {"; - " f: T1 = 0;"; - " m(x: T2): T3 { return x; }"; - "}"; - "export default C;"] - ["import type { T1 } from './import_type_dependencies_helper'"; - "import type { T2 } from './import_type_dependencies_helper'"; - "import type { T3 } from './import_type_dependencies_helper'"]; - - "qualified_references" >:: mk_signature_verifier_test - ["import M1 from './qualified_references_helper';"; - "import type M2 from './qualified_references_helper';"; - "class C {"; - " m(x: M1.T): M2.T { return x; }"; - "}"; - "export default C;"] - ["import type { default } from './qualified_references_helper'"; - "import { default } from './qualified_references_helper'"]; - - "hoisted_requires" >:: mk_signature_verifier_test - ["const M = require('./hoisted_requires_helper');"; - "if (Math.random() < 0.5) {"; - " var { D } = require('./hoisted_requires_helper');"; - "} else {"; - " var { D } = require('./hoisted_requires_helper');"; - "}"; - "var D = 0;"; - "class C extends M.D {"; - " f: D = 0;"; - "}"; - "module.exports = C;"] - ["Expected annotation @ (7, 4) to (7, 5)"; - "import { D } from './hoisted_requires_helper'"; - "import { D } from './hoisted_requires_helper'"; - "require('./hoisted_requires_helper')"]; - - "hoisted_locals" >:: mk_signature_verifier_test - ["const M = require('./hoisted_locals_helper');"; - "if (Math.random() < 0.5) {"; - " var D = 0;"; - "} else {"; - " var D = false;"; - "}"; - "class C extends M.D {"; - " f: D = 0;"; - "}"; - "module.exports = C;"] - ["Unexpected toplevel definition that needs hoisting @ (3, 6) to (3, 7)"; - "Unexpected toplevel definition that needs hoisting @ (5, 6) to (5, 7)"; - "require('./hoisted_locals_helper')"]; - - "dynamic_requires" >:: mk_signature_verifier_test - ["module.exports = require('./dynamic_requires_helper');"] - ["require('./dynamic_requires_helper')"]; - - "scope_extrusion" >:: mk_signature_verifier_test - ["{"; - " class C {}"; - " var x: C = new C;"; - "}"; - "class C {"; - " f = 0;"; - "}"; - "module.exports = x;"] - ["Unexpected toplevel definition that needs hoisting @ (3, 6) to (3, 7)"]; - - "scope_extrusion_nested" >:: mk_signature_verifier_test - ["{"; - " class C {}"; - " let y = 0;"; - " if (b) {"; - " var x: C = new C;"; - " }"; - "}"; - "class C {"; - " f = 0;"; - "}"; - "module.exports = { x, y };"] - ["Unexpected toplevel definition that needs hoisting @ (5, 8) to (5, 9)"; - "global value: y"]; - - "report_all_errors" >:: mk_signature_verifier_test - ["class A {"; - " f = (x: number) => x; // C"; - "}"; - "module.exports = {"; - " a: A, // A"; - " b: (x: string) => x, // B"; - "};"] - ["Expected annotation @ (2, 2) to (2, 23)"; - "Expected annotation @ (6, 5) to (6, 21)"]; - - "munged_methods_ignored" >:: mk_signature_verifier_test - ["class C {"; - " _method() { return 1; }"; - "}"; - "export default C;"] - []; - - "munged_methods_not_ignored_if_directive" >:: mk_signature_verifier_test - ~prevent_munge:true - ["class C {"; - " _method() { return 1; }"; - "}"; - "export default C;"] - ["Expected annotation @ (2, 8) to (2, 24)"]; - - "munged_fields_ignored" >:: mk_signature_verifier_test - ["class C {"; - " _method = () => { return 1; }"; - "}"; - "export default C;"] - []; - - "munged_fields_not_ignored_if_directive" >:: mk_signature_verifier_test - ~prevent_munge:true - ["class C {"; - " _method = () => { return 1; }"; - "}"; - "export default C;"] - ["Expected annotation @ (2, 1) to (2, 30)"]; - - "propTypes_static_ignored" >:: mk_signature_verifier_test - ["class C {"; - " static propTypes = {}"; - "}"; - "export default C;"] - []; - - "propTypes_member_failure" >:: mk_signature_verifier_test - ["class C {"; - " propTypes = {}"; - "}"; - "export default C;"] - ["Expected annotation @ (2, 1) to (2, 15)"]; -] diff --git a/src/parser_utils/__tests__/ssa_builder_test.ml b/src/parser_utils/__tests__/ssa_builder_test.ml index eb85952b0f2..7e0670f025a 100644 --- a/src/parser_utils/__tests__/ssa_builder_test.ml +++ b/src/parser_utils/__tests__/ssa_builder_test.ml @@ -1,749 +1,388 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - open OUnit2 open Test_utils +module LocMap = Loc_collections.LocMap -module LocMap = Utils_js.LocMap - -let mk_ssa_builder_test contents expected_values = - begin fun ctxt -> - let values = Ssa_builder.program (parse contents) in - let printer = Ssa_api.print_values in - assert_equal ~ctxt - ~cmp:(eq printer) - ~printer - ~msg:"SSA values don't match!" - expected_values values - end +let mk_ssa_builder_test contents expected_values ctxt = + let values = Ssa_builder.program (parse contents) in + let printer = Ssa_api.print_values in + assert_equal + ~ctxt + ~cmp:(eq printer) + ~printer + ~msg:"SSA values don't match!" + expected_values + values -let mk_write pos1 pos2 = - Ssa_api.Write (mk_loc pos1 pos2) +let mk_write pos1 pos2 = Ssa_api.Write (mk_loc pos1 pos2) -let tests = "ssa_builder" >::: [ - "var" >:: mk_ssa_builder_test - "function foo(x) { +let tests = + "ssa_builder" + >::: [ + "var" + >:: mk_ssa_builder_test + "function foo(x) { var y; if (x) y = 123; return y; }" - LocMap.( - empty |> - add (mk_loc (3, 11) (3, 12)) [ (* x *) - mk_write (1, 13) (1, 14); - ] |> - add (mk_loc (4, 14) (4, 15)) [ (* y *) - Ssa_api.uninitialized; - mk_write (3, 14) (3, 15); - ] - ); - "var_hoist" >:: mk_ssa_builder_test - "function foo(x) { + LocMap.( + empty + |> add (mk_loc (3, 11) (3, 12)) [(* x *) mk_write (1, 13) (1, 14)] + |> add + (mk_loc (4, 14) (4, 15)) + [(* y *) Ssa_api.uninitialized; mk_write (3, 14) (3, 15)]); + "var_hoist" + >:: mk_ssa_builder_test + "function foo(x) { y = x; var y; return y; }" - LocMap.( - empty |> - add (mk_loc (2, 11) (2, 12)) [ (* x *) - mk_write (1, 13) (1, 14); - ] |> - add (mk_loc (4, 14) (4, 15)) [ (* y *) - mk_write (2, 7) (2, 8); - ] - ); - "let" >:: mk_ssa_builder_test - "function foo() { \ - let x = 0; \ - return x; \ - }" - LocMap.( - empty |> - add (mk_loc (1, 35) (1, 36)) [ - mk_write (1, 21) (1, 22); - ] - ); - "let_update" >:: mk_ssa_builder_test - "function foo() { \ - let x = 0; \ - x++; \ - return x; \ - }" - LocMap.( - empty |> - add (mk_loc (1, 28) (1, 29)) [ - mk_write (1, 21) (1, 22); - ] |> - add (mk_loc (1, 40) (1, 41)) [ - mk_write (1, 28) (1, 29); - ] - ); - "if" >:: mk_ssa_builder_test - "(function() { \ - var xxx = 0; \ - let yyy = 1; \ - if (yyy) { \ - yyy = 2; \ - } else { } \ - xxx = yyy; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 44) (1, 47)) [ - mk_write (1, 31) (1, 34); - ] |> - add (mk_loc (1, 77) (1, 80)) [ - mk_write (1, 31) (1, 34); - mk_write (1, 51) (1, 54); - ] - ); - "if_let" >:: mk_ssa_builder_test - "(function() { \ - var xxx = 0; \ - let yyy = 1; \ - if (yyy) { \ - let yyy = 2; \ - } else { } \ - xxx = yyy; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 44) (1, 47)) [ - mk_write (1, 31) (1, 34); - ] |> - add (mk_loc (1, 81) (1, 84)) [ - mk_write (1, 31) (1, 34); - ] - ); - "while" >:: mk_ssa_builder_test - "(function() { \ - var xxx = 0; \ - let yyy = 1; \ - while (yyy) { \ - yyy = 2; \ - } \ - xxx = yyy; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 47) (1, 50)) [ - mk_write (1, 31) (1, 34); - mk_write (1, 54) (1, 57); - ] |> - add (mk_loc (1, 71) (1, 74)) [ - mk_write (1, 31) (1, 34); - mk_write (1, 54) (1, 57); - ] - ); - "while_let" >:: mk_ssa_builder_test - "(function() { \ - var xxx = 0; \ - let yyy = 1; \ - while (yyy) { \ - let yyy = 2; \ - } \ - xxx = yyy; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 47) (1, 50)) [ - mk_write (1, 31) (1, 34); - ] |> - add (mk_loc (1, 75) (1, 78)) [ - mk_write (1, 31) (1, 34); - ] - ); - "for" >:: mk_ssa_builder_test - "(function() { \ - var xxx = 0; \ - let yyy = 1; \ - for (var zzz = 0; zzz < 3; zzz = zzz + 1) { \ - yyy = 2; \ - } \ - xxx = yyy; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 58) (1, 61)) [ - mk_write (1, 49) (1, 52); - mk_write (1, 67) (1, 70); - ] |> - add (mk_loc (1, 73) (1, 76)) [ - mk_write (1, 49) (1, 52); - mk_write (1, 67) (1, 70); - ] |> - add (mk_loc (1, 101) (1, 104)) [ - mk_write (1, 31) (1, 34); - mk_write (1, 84) (1, 87); - ] - ); - "for_let" >:: mk_ssa_builder_test - "(function() { \ - var xxx = 0; \ - let yyy = 1; \ - for (let zzz = 0; zzz < 3; zzz = zzz + 1) { \ - yyy = 2; \ - } \ - xxx = yyy; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 58) (1, 61)) [ - mk_write (1, 49) (1, 52); - mk_write (1, 67) (1, 70); - ] |> - add (mk_loc (1, 73) (1, 76)) [ - mk_write (1, 49) (1, 52); - mk_write (1, 67) (1, 70); - ] |> - add (mk_loc (1, 101) (1, 104)) [ - mk_write (1, 31) (1, 34); - mk_write (1, 84) (1, 87); - ] - ); - "switch" >:: mk_ssa_builder_test - "(function() { \ - var a = 0; \ - switch (a + 1) { \ - case a: \ - a = a + 1; \ - case a + 1: \ - a = a + 1; \ - default: \ - a = a + 1; \ - } \ - return a; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 33) (1, 34)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 47) (1, 48)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 54) (1, 55)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 66) (1, 67)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 77) (1, 78)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 50) (1, 51); - ] |> - add (mk_loc (1, 97) (1, 98)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 73) (1, 74); - ] |> - add (mk_loc (1, 113) (1, 114)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 93) (1, 94); - ] - ); - "try" >:: mk_ssa_builder_test - "(function() { \ - var xxx = 0; \ - let yyy = 1; \ - try { \ - yyy = 2; \ - } catch (e) { \ - xxx = yyy; \ - } \ - return xxx; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 75) (1, 78)) [ - mk_write (1, 31) (1, 34); - mk_write (1, 46) (1, 49); - ] |> - add (mk_loc (1, 89) (1, 92)) [ - mk_write (1, 18) (1, 21); - mk_write (1, 69) (1, 72); - ] - ); - "closure" >:: mk_ssa_builder_test - "(function() { \ - var xxx = 0; \ - let yyy = 1; \ - function foo() { \ - xxx = yyy; \ - } \ - yyy = 2; \ - foo(); \ - return xxx; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 63) (1, 66)) [ - Ssa_api.uninitialized; - mk_write (1, 31) (1, 34); - mk_write (1, 70) (1, 73) - ] |> - add (mk_loc (1, 79) (1, 82)) [ - mk_write (1, 49) (1, 52); - ] |> - add (mk_loc (1, 93) (1, 96)) [ - mk_write (1, 18) (1, 21); - mk_write (1, 57) (1, 60); - ] - ); - "break_while" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - while (x) { \ - x = 1; \ - break; \ - x; \ - x = 2; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 32) (1, 33)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 70) (1, 71)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 37) (1, 38); - ] - ); - "continue_while" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - while (x) { \ - x = 1; \ - continue; \ - x; \ - x = 2; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 32) (1, 33)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 37) (1, 38); - ] |> - add (mk_loc (1, 73) (1, 74)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 37) (1, 38); - ] - ); - "break_for" >:: mk_ssa_builder_test - "(function() { \ - for (var x = 0; x < 10; x++) { \ - x = 1; \ - break; \ - x; \ - x = 2; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 30) (1, 31)) [ - mk_write (1, 23) (1, 24); - ] |> - add (mk_loc (1, 78) (1, 79)) [ - mk_write (1, 23) (1, 24); - mk_write (1, 45) (1, 46); - ] - ); - "continue_for" >:: mk_ssa_builder_test - "(function() { \ - for (var x = 0; x < 10; x++) { \ - x = 1; \ - continue; \ - x; \ - x = 2; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 30) (1, 31)) [ - mk_write (1, 23) (1, 24); - mk_write (1, 38) (1, 39); - ] |> - add (mk_loc (1, 38) (1, 39)) [ - mk_write (1, 45) (1, 46); - ] |> - add (mk_loc (1, 81) (1, 82)) [ - mk_write (1, 23) (1, 24); - mk_write (1, 38) (1, 39); - ] - ); - "break_switch" >:: mk_ssa_builder_test - "(function() { \ - var a = 0; \ - switch (a + 1) { \ - case a: \ - a = a + 1; \ - break; \ - case a + 1: \ - a = a + 1; \ - default: \ - a = a + 1; \ - } \ - return a; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 33) (1, 34)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 47) (1, 48)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 54) (1, 55)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 73) (1, 74)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 84) (1, 85)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 104) (1, 105)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 80) (1, 81); - ] |> - add (mk_loc (1, 120) (1, 121)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 50) (1, 51); - mk_write (1, 100) (1, 101); - ] - ); - "break_labeled" >:: mk_ssa_builder_test - "(function() { \ - var a = 0; \ - L: { \ - a = a + 1; \ - break L; \ - a = a + 1; \ - } \ - return a; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 34) (1, 35)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 70) (1, 71)) [ - mk_write (1, 30) (1, 31); - ] - ); - "break_labeled_while" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - L: while (x) { \ - x = 1; \ - break L; \ - x; \ - x = 2; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 35) (1, 36)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 75) (1, 76)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 40) (1, 41); - ] - ); - "break_if" >:: mk_ssa_builder_test - "(function() { \ - var a = 0; \ - L: { \ - a = a + 1; \ - if (a) break L; \ - else break L; \ - a = a + 1; \ - } \ - return a; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 34) (1, 35)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 45) (1, 46)) [ - mk_write (1, 30) (1, 31); - ] |> - add (mk_loc (1, 91) (1, 92)) [ - mk_write (1, 30) (1, 31); - ] - ); - "break_if_partial" >:: mk_ssa_builder_test - "(function() { \ - var a = 0; \ - L: { \ - a = a + 1; \ - if (a) break L; \ - a = a + 1; \ - } \ - return a; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 34) (1, 35)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 45) (1, 46)) [ - mk_write (1, 30) (1, 31); - ] |> - add (mk_loc (1, 61) (1, 62)) [ - mk_write (1, 30) (1, 31); - ] |> - add (mk_loc (1, 77) (1, 78)) [ - mk_write (1, 30) (1, 31); - mk_write (1, 57) (1, 58); - ] - ); - "continue_if_partial" >:: mk_ssa_builder_test - "(function() { \ - var a = 0; \ - while (a) { \ - a = a + 1; \ - if (a) continue; \ - a = a + 1; \ - } \ - return a; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 32) (1, 33)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 37) (1, 38); - mk_write (1, 65) (1, 66); - ] |> - add (mk_loc (1, 41) (1, 42)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 37) (1, 38); - mk_write (1, 65) (1, 66); - ] |> - add (mk_loc (1, 52) (1, 53)) [ - mk_write (1, 37) (1, 38); - ] |> - add (mk_loc (1, 69) (1, 70)) [ - mk_write (1, 37) (1, 38); - ] |> - add (mk_loc (1, 85) (1, 86)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 37) (1, 38); - mk_write (1, 65) (1, 66); - ] - ); - "continue_labeled_while" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - L: while (x) { \ - x = 1; \ - continue L; \ - x; \ - x = 2; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 35) (1, 36)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 40) (1, 41); - ] |> - add (mk_loc (1, 78) (1, 79)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 40) (1, 41); - ] - ); - "continue_labeled_do_while" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - L: do { \ - x = 1; \ - continue L; \ - x; \ - x = 2; \ - } while (x) \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 71) (1, 72)) [ - mk_write (1, 33) (1, 34); - ] |> - add (mk_loc (1, 81) (1, 82)) [ - mk_write (1, 33) (1, 34); - ] - ); - "labeled_break_do_while" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - L: { \ - do { \ - x = 1; \ - break L; \ - x = 2; \ - } while (true); \ - x = 3; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 90) (1, 91)) [ - mk_write (1, 35) (1, 36); - ] - ); - "labeled_break_try_catch" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - L: \ - try { \ - x = x + 1; \ - } catch (e) { \ - x = e + 1; \ - break L; \ - } finally { \ - x = x + 1; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 38) (1, 39)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 63) (1, 64)) [ - mk_write (1, 54) (1, 55); - ] |> - add (mk_loc (1, 95) (1, 96)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 34) (1, 35); - mk_write (1, 59) (1, 60); - mk_write (1, 91) (1, 92); - ] |> - add (mk_loc (1, 111) (1, 112)) [ - mk_write (1, 91) (1, 92); - ] - ); - "nested_labeled_break_try_catch" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - M: { \ - L: { \ - try { \ - x = x + 1; \ - } catch (e) { \ - x = e + 1; \ - break L; \ - } finally { \ - x = x + 1; \ - break M; \ - } \ - } \ - x = x + 1; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 45) (1, 46)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 70) (1, 71)) [ - mk_write (1, 61) (1, 62); - ] |> - add (mk_loc (1, 102) (1, 103)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 41) (1, 42); - mk_write (1, 66) (1, 67); - mk_write (1, 98) (1, 99); - ] |> - add (mk_loc (1, 142) (1, 143)) [ - mk_write (1, 98) (1, 99); - ] - ); - "throw" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - if (x) { \ - x = 1; \ - throw x; \ - x = 2; \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 29) (1, 30)) [ - mk_write (1, 18) (1, 19); - ] |> - add (mk_loc (1, 47) (1, 48)) [ - mk_write (1, 34) (1, 35); - ] |> - add (mk_loc (1, 66) (1, 67)) [ - mk_write (1, 18) (1, 19); - ] - ); - "nested_while" >:: mk_ssa_builder_test - "(function() { \ - var x = 0; \ - while (x) { \ - x = 1; \ - if (x) { break; } \ - x = 2; \ - while (x) { \ - break; \ - } \ - x = x + 1 \ - } \ - return x; \ - })" - LocMap.( - empty |> - add (mk_loc (1, 32) (1, 33)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 90) (1, 91); - ] |> - add (mk_loc (1, 48) (1, 49)) [ - mk_write (1, 37) (1, 38); - ] |> - add (mk_loc (1, 76) (1, 77)) [ - mk_write (1, 62) (1, 63); - ] |> - add (mk_loc (1, 94) (1, 95)) [ - mk_write (1, 62) (1, 63); - ] |> - add (mk_loc (1, 109) (1, 110)) [ - mk_write (1, 18) (1, 19); - mk_write (1, 37) (1, 38); - mk_write (1, 90) (1, 91); - ] - ); - "JSX" >:: mk_ssa_builder_test - "class Foo {}; ; " - LocMap.( - empty |> - add (mk_loc (1, 15) (1, 18)) [ - mk_write (1, 6) (1, 9); - ] |> - add (mk_loc (1, 21) (1, 24)) [ - mk_write (1, 6) (1, 9); - ] |> - add (mk_loc (1, 28) (1, 31)) [ - mk_write (1, 6) (1, 9); - ] - ); -] + LocMap.( + empty + |> add (mk_loc (2, 11) (2, 12)) [(* x *) mk_write (1, 13) (1, 14)] + |> add (mk_loc (4, 14) (4, 15)) [(* y *) mk_write (2, 7) (2, 8)]); + "let" + >:: mk_ssa_builder_test + "function foo() { let x = 0; return x; }" + LocMap.(empty |> add (mk_loc (1, 35) (1, 36)) [mk_write (1, 21) (1, 22)]); + "let_update" + >:: mk_ssa_builder_test + "function foo() { let x = 0; x++; return x; }" + LocMap.( + empty + |> add (mk_loc (1, 28) (1, 29)) [mk_write (1, 21) (1, 22)] + |> add (mk_loc (1, 40) (1, 41)) [mk_write (1, 28) (1, 29)]); + "if" + >:: mk_ssa_builder_test + "(function() { var xxx = 0; let yyy = 1; if (yyy) { yyy = 2; } else { } xxx = yyy; })" + LocMap.( + empty + |> add (mk_loc (1, 44) (1, 47)) [mk_write (1, 31) (1, 34)] + |> add + (mk_loc (1, 77) (1, 80)) + [mk_write (1, 31) (1, 34); mk_write (1, 51) (1, 54)]); + "if_let" + >:: mk_ssa_builder_test + "(function() { var xxx = 0; let yyy = 1; if (yyy) { let yyy = 2; } else { } xxx = yyy; })" + LocMap.( + empty + |> add (mk_loc (1, 44) (1, 47)) [mk_write (1, 31) (1, 34)] + |> add (mk_loc (1, 81) (1, 84)) [mk_write (1, 31) (1, 34)]); + "while" + >:: mk_ssa_builder_test + "(function() { var xxx = 0; let yyy = 1; while (yyy) { yyy = 2; } xxx = yyy; })" + LocMap.( + empty + |> add + (mk_loc (1, 47) (1, 50)) + [mk_write (1, 31) (1, 34); mk_write (1, 54) (1, 57)] + |> add + (mk_loc (1, 71) (1, 74)) + [mk_write (1, 31) (1, 34); mk_write (1, 54) (1, 57)]); + "while_let" + >:: mk_ssa_builder_test + "(function() { var xxx = 0; let yyy = 1; while (yyy) { let yyy = 2; } xxx = yyy; })" + LocMap.( + empty + |> add (mk_loc (1, 47) (1, 50)) [mk_write (1, 31) (1, 34)] + |> add (mk_loc (1, 75) (1, 78)) [mk_write (1, 31) (1, 34)]); + "for" + >:: mk_ssa_builder_test + "(function() { var xxx = 0; let yyy = 1; for (var zzz = 0; zzz < 3; zzz = zzz + 1) { yyy = 2; } xxx = yyy; })" + LocMap.( + empty + |> add + (mk_loc (1, 58) (1, 61)) + [mk_write (1, 49) (1, 52); mk_write (1, 67) (1, 70)] + |> add + (mk_loc (1, 73) (1, 76)) + [mk_write (1, 49) (1, 52); mk_write (1, 67) (1, 70)] + |> add + (mk_loc (1, 101) (1, 104)) + [mk_write (1, 31) (1, 34); mk_write (1, 84) (1, 87)]); + "for_let" + >:: mk_ssa_builder_test + "(function() { var xxx = 0; let yyy = 1; for (let zzz = 0; zzz < 3; zzz = zzz + 1) { yyy = 2; } xxx = yyy; })" + LocMap.( + empty + |> add + (mk_loc (1, 58) (1, 61)) + [mk_write (1, 49) (1, 52); mk_write (1, 67) (1, 70)] + |> add + (mk_loc (1, 73) (1, 76)) + [mk_write (1, 49) (1, 52); mk_write (1, 67) (1, 70)] + |> add + (mk_loc (1, 101) (1, 104)) + [mk_write (1, 31) (1, 34); mk_write (1, 84) (1, 87)]); + "switch" + >:: mk_ssa_builder_test + "(function() { var a = 0; switch (a + 1) { case a: a = a + 1; case a + 1: a = a + 1; default: a = a + 1; } return a; })" + LocMap.( + empty + |> add (mk_loc (1, 33) (1, 34)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 47) (1, 48)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 54) (1, 55)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 66) (1, 67)) [mk_write (1, 18) (1, 19)] + |> add + (mk_loc (1, 77) (1, 78)) + [mk_write (1, 18) (1, 19); mk_write (1, 50) (1, 51)] + |> add + (mk_loc (1, 97) (1, 98)) + [mk_write (1, 18) (1, 19); mk_write (1, 73) (1, 74)] + |> add + (mk_loc (1, 113) (1, 114)) + [mk_write (1, 18) (1, 19); mk_write (1, 93) (1, 94)]); + "try" + >:: mk_ssa_builder_test + "(function() { var xxx = 0; let yyy = 1; try { yyy = 2; } catch (e) { xxx = yyy; } return xxx; })" + LocMap.( + empty + |> add + (mk_loc (1, 75) (1, 78)) + [mk_write (1, 31) (1, 34); mk_write (1, 46) (1, 49)] + |> add + (mk_loc (1, 89) (1, 92)) + [mk_write (1, 18) (1, 21); mk_write (1, 69) (1, 72)]); + "closure" + >:: mk_ssa_builder_test + "(function() { var xxx = 0; let yyy = 1; function foo() { xxx = yyy; } yyy = 2; foo(); return xxx; })" + LocMap.( + empty + |> add + (mk_loc (1, 63) (1, 66)) + [Ssa_api.uninitialized; mk_write (1, 31) (1, 34); mk_write (1, 70) (1, 73)] + |> add (mk_loc (1, 79) (1, 82)) [mk_write (1, 49) (1, 52)] + |> add + (mk_loc (1, 93) (1, 96)) + [mk_write (1, 18) (1, 21); mk_write (1, 57) (1, 60)]); + "break_while" + >:: mk_ssa_builder_test + "(function() { var x = 0; while (x) { x = 1; break; x; x = 2; } return x; })" + LocMap.( + empty + |> add (mk_loc (1, 32) (1, 33)) [mk_write (1, 18) (1, 19)] + |> add + (mk_loc (1, 70) (1, 71)) + [mk_write (1, 18) (1, 19); mk_write (1, 37) (1, 38)]); + "continue_while" + >:: mk_ssa_builder_test + "(function() { var x = 0; while (x) { x = 1; continue; x; x = 2; } return x; })" + LocMap.( + empty + |> add + (mk_loc (1, 32) (1, 33)) + [mk_write (1, 18) (1, 19); mk_write (1, 37) (1, 38)] + |> add + (mk_loc (1, 73) (1, 74)) + [mk_write (1, 18) (1, 19); mk_write (1, 37) (1, 38)]); + "break_for" + >:: mk_ssa_builder_test + "(function() { for (var x = 0; x < 10; x++) { x = 1; break; x; x = 2; } return x; })" + LocMap.( + empty + |> add (mk_loc (1, 30) (1, 31)) [mk_write (1, 23) (1, 24)] + |> add + (mk_loc (1, 78) (1, 79)) + [mk_write (1, 23) (1, 24); mk_write (1, 45) (1, 46)]); + "continue_for" + >:: mk_ssa_builder_test + "(function() { for (var x = 0; x < 10; x++) { x = 1; continue; x; x = 2; } return x; })" + LocMap.( + empty + |> add + (mk_loc (1, 30) (1, 31)) + [mk_write (1, 23) (1, 24); mk_write (1, 38) (1, 39)] + |> add (mk_loc (1, 38) (1, 39)) [mk_write (1, 45) (1, 46)] + |> add + (mk_loc (1, 81) (1, 82)) + [mk_write (1, 23) (1, 24); mk_write (1, 38) (1, 39)]); + "break_switch" + >:: mk_ssa_builder_test + "(function() { var a = 0; switch (a + 1) { case a: a = a + 1; break; case a + 1: a = a + 1; default: a = a + 1; } return a; })" + LocMap.( + empty + |> add (mk_loc (1, 33) (1, 34)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 47) (1, 48)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 54) (1, 55)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 73) (1, 74)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 84) (1, 85)) [mk_write (1, 18) (1, 19)] + |> add + (mk_loc (1, 104) (1, 105)) + [mk_write (1, 18) (1, 19); mk_write (1, 80) (1, 81)] + |> add + (mk_loc (1, 120) (1, 121)) + [ + mk_write (1, 18) (1, 19); + mk_write (1, 50) (1, 51); + mk_write (1, 100) (1, 101); + ]); + "break_labeled" + >:: mk_ssa_builder_test + "(function() { var a = 0; L: { a = a + 1; break L; a = a + 1; } return a; })" + LocMap.( + empty + |> add (mk_loc (1, 34) (1, 35)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 70) (1, 71)) [mk_write (1, 30) (1, 31)]); + "break_labeled_while" + >:: mk_ssa_builder_test + "(function() { var x = 0; L: while (x) { x = 1; break L; x; x = 2; } return x; })" + LocMap.( + empty + |> add (mk_loc (1, 35) (1, 36)) [mk_write (1, 18) (1, 19)] + |> add + (mk_loc (1, 75) (1, 76)) + [mk_write (1, 18) (1, 19); mk_write (1, 40) (1, 41)]); + "break_if" + >:: mk_ssa_builder_test + "(function() { var a = 0; L: { a = a + 1; if (a) break L; else break L; a = a + 1; } return a; })" + LocMap.( + empty + |> add (mk_loc (1, 34) (1, 35)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 45) (1, 46)) [mk_write (1, 30) (1, 31)] + |> add (mk_loc (1, 91) (1, 92)) [mk_write (1, 30) (1, 31)]); + "break_if_partial" + >:: mk_ssa_builder_test + "(function() { var a = 0; L: { a = a + 1; if (a) break L; a = a + 1; } return a; })" + LocMap.( + empty + |> add (mk_loc (1, 34) (1, 35)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 45) (1, 46)) [mk_write (1, 30) (1, 31)] + |> add (mk_loc (1, 61) (1, 62)) [mk_write (1, 30) (1, 31)] + |> add + (mk_loc (1, 77) (1, 78)) + [mk_write (1, 30) (1, 31); mk_write (1, 57) (1, 58)]); + "continue_if_partial" + >:: mk_ssa_builder_test + "(function() { var a = 0; while (a) { a = a + 1; if (a) continue; a = a + 1; } return a; })" + LocMap.( + empty + |> add + (mk_loc (1, 32) (1, 33)) + [ + mk_write (1, 18) (1, 19); + mk_write (1, 37) (1, 38); + mk_write (1, 65) (1, 66); + ] + |> add + (mk_loc (1, 41) (1, 42)) + [ + mk_write (1, 18) (1, 19); + mk_write (1, 37) (1, 38); + mk_write (1, 65) (1, 66); + ] + |> add (mk_loc (1, 52) (1, 53)) [mk_write (1, 37) (1, 38)] + |> add (mk_loc (1, 69) (1, 70)) [mk_write (1, 37) (1, 38)] + |> add + (mk_loc (1, 85) (1, 86)) + [ + mk_write (1, 18) (1, 19); + mk_write (1, 37) (1, 38); + mk_write (1, 65) (1, 66); + ]); + "continue_labeled_while" + >:: mk_ssa_builder_test + "(function() { var x = 0; L: while (x) { x = 1; continue L; x; x = 2; } return x; })" + LocMap.( + empty + |> add + (mk_loc (1, 35) (1, 36)) + [mk_write (1, 18) (1, 19); mk_write (1, 40) (1, 41)] + |> add + (mk_loc (1, 78) (1, 79)) + [mk_write (1, 18) (1, 19); mk_write (1, 40) (1, 41)]); + "continue_labeled_do_while" + >:: mk_ssa_builder_test + "(function() { var x = 0; L: do { x = 1; continue L; x; x = 2; } while (x) return x; })" + LocMap.( + empty + |> add (mk_loc (1, 71) (1, 72)) [mk_write (1, 33) (1, 34)] + |> add (mk_loc (1, 81) (1, 82)) [mk_write (1, 33) (1, 34)]); + "labeled_break_do_while" + >:: mk_ssa_builder_test + "(function() { var x = 0; L: { do { x = 1; break L; x = 2; } while (true); x = 3; } return x; })" + LocMap.(empty |> add (mk_loc (1, 90) (1, 91)) [mk_write (1, 35) (1, 36)]); + "labeled_break_try_catch" + >:: mk_ssa_builder_test + "(function() { var x = 0; L: try { x = x + 1; } catch (e) { x = e + 1; break L; } finally { x = x + 1; } return x; })" + LocMap.( + empty + |> add (mk_loc (1, 38) (1, 39)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 63) (1, 64)) [mk_write (1, 54) (1, 55)] + |> add + (mk_loc (1, 95) (1, 96)) + [ + mk_write (1, 18) (1, 19); + mk_write (1, 34) (1, 35); + mk_write (1, 59) (1, 60); + mk_write (1, 91) (1, 92); + ] + |> add (mk_loc (1, 111) (1, 112)) [mk_write (1, 91) (1, 92)]); + "nested_labeled_break_try_catch" + >:: mk_ssa_builder_test + "(function() { var x = 0; M: { L: { try { x = x + 1; } catch (e) { x = e + 1; break L; } finally { x = x + 1; break M; } } x = x + 1; } return x; })" + LocMap.( + empty + |> add (mk_loc (1, 45) (1, 46)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 70) (1, 71)) [mk_write (1, 61) (1, 62)] + |> add + (mk_loc (1, 102) (1, 103)) + [ + mk_write (1, 18) (1, 19); + mk_write (1, 41) (1, 42); + mk_write (1, 66) (1, 67); + mk_write (1, 98) (1, 99); + ] + |> add (mk_loc (1, 142) (1, 143)) [mk_write (1, 98) (1, 99)]); + "throw" + >:: mk_ssa_builder_test + "(function() { var x = 0; if (x) { x = 1; throw x; x = 2; } return x; })" + LocMap.( + empty + |> add (mk_loc (1, 29) (1, 30)) [mk_write (1, 18) (1, 19)] + |> add (mk_loc (1, 47) (1, 48)) [mk_write (1, 34) (1, 35)] + |> add (mk_loc (1, 66) (1, 67)) [mk_write (1, 18) (1, 19)]); + "nested_while" + >:: mk_ssa_builder_test + "(function() { var x = 0; while (x) { x = 1; if (x) { break; } x = 2; while (x) { break; } x = x + 1 } return x; })" + LocMap.( + empty + |> add + (mk_loc (1, 32) (1, 33)) + [mk_write (1, 18) (1, 19); mk_write (1, 90) (1, 91)] + |> add (mk_loc (1, 48) (1, 49)) [mk_write (1, 37) (1, 38)] + |> add (mk_loc (1, 76) (1, 77)) [mk_write (1, 62) (1, 63)] + |> add (mk_loc (1, 94) (1, 95)) [mk_write (1, 62) (1, 63)] + |> add + (mk_loc (1, 109) (1, 110)) + [ + mk_write (1, 18) (1, 19); + mk_write (1, 37) (1, 38); + mk_write (1, 90) (1, 91); + ]); + "JSX" + >:: mk_ssa_builder_test + "class Foo {}; ; " + LocMap.( + empty + |> add (mk_loc (1, 15) (1, 18)) [mk_write (1, 6) (1, 9)] + |> add (mk_loc (1, 21) (1, 24)) [mk_write (1, 6) (1, 9)] + |> add (mk_loc (1, 28) (1, 31)) [mk_write (1, 6) (1, 9)]); + "new" + >:: mk_ssa_builder_test + "(function() { var x; new Y(x = 1); return x; })" + LocMap.(empty |> add (mk_loc (1, 42) (1, 43)) [mk_write (1, 27) (1, 28)]); + "new_closure" + >:: mk_ssa_builder_test + "(function() { var x; new Y(function() { x = 1; }); return x; })" + LocMap.( + empty + |> add (mk_loc (1, 58) (1, 59)) [Ssa_api.uninitialized; mk_write (1, 40) (1, 41)]); + ] diff --git a/src/parser_utils/__tests__/test.ml b/src/parser_utils/__tests__/test.ml deleted file mode 100644 index 07e87c072ba..00000000000 --- a/src/parser_utils/__tests__/test.ml +++ /dev/null @@ -1,20 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open OUnit2 - -let tests = "parser_utils" >::: [ - Scope_builder_test.tests; - Comment_attacher_test.tests; - Ssa_builder_test.tests; - File_sig_test.tests; - File_exports_resolver_test.tests; - Flow_ast_differ_test.tests; - Signature_verifier_test.tests; -] - -let () = run_test_tt_main tests diff --git a/src/parser_utils/__tests__/test_utils.ml b/src/parser_utils/__tests__/test_utils.ml index 6a26907e73f..61828ebbf4d 100644 --- a/src/parser_utils/__tests__/test_utils.ml +++ b/src/parser_utils/__tests__/test_utils.ml @@ -1,27 +1,31 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) let parse contents = - let parse_options = Some { Parser_env.default_parse_options with - Parser_env.esproposal_class_instance_fields = true; - Parser_env.esproposal_class_static_fields = true; - Parser_env.esproposal_export_star_as = true; - } in - let ast, _errors = Parser_flow.program ~parse_options contents in + let parse_options = + Some + { + Parser_env.default_parse_options with + Parser_env.esproposal_class_instance_fields = true; + Parser_env.esproposal_class_static_fields = true; + Parser_env.esproposal_export_star_as = true; + } + in + let (ast, _errors) = Parser_flow.program ~parse_options contents in ast -let mk_loc (line1, column1) (line2, column2) = Loc.{ - source = None; - start = { line = line1; column = column1; offset = 0; }; - _end = { line = line2; column = column2; offset = 0; }; -} +let mk_loc (line1, column1) (line2, column2) = + Loc. + { + source = None; + start = { line = line1; column = column1 }; + _end = { line = line2; column = column2 }; + } -let print_list printer list = - String.concat ", " @@ List.map printer list +let print_list printer list = String.concat ", " @@ Core_list.map ~f:printer list -let eq printer v1 v2 = - printer v1 = printer v2 +let eq printer v1 v2 = printer v1 = printer v2 diff --git a/src/parser_utils/aloc/aLoc.ml b/src/parser_utils/aloc/aLoc.ml new file mode 100644 index 00000000000..f5236f0bafe --- /dev/null +++ b/src/parser_utils/aloc/aLoc.ml @@ -0,0 +1,334 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type table = { + (* This is not strictly necessary, but it allows us to check that the location source matches the + * table source, to avoid confusing issues if we try a lookup with the wrong table. *) + file: File_key.t; + map: RelativeLoc.t ResizableArray.t; +} + +let make_table file = + { + file; + (* TODO maybe start with a rough estimate of the number of locations? *) + map = ResizableArray.make 32; + } + +let shrink_table table = ResizableArray.shrink table.map + +type key = int + +let compare_key : key -> key -> int = Pervasives.compare + +let string_of_key = string_of_int + +type reverse_table = (RelativeLoc.t, key) Hashtbl.t + +let make_empty_reverse_table () = Hashtbl.create 0 + +module Repr : sig + type t + + type kind = + | ALocNone + | Abstract + | Concrete + + val of_loc : Loc.t -> t + + val of_key : File_key.t option -> key -> t + + val source : t -> File_key.t option + + val update_source : (File_key.t option -> File_key.t option) -> t -> t + + (* `is_abstract x` is equivalent to `kind x = Abstract` *) + val is_abstract : t -> bool + + val kind : t -> kind + + (* Raises unless `kind` returns `Abstract` *) + val get_key_exn : t -> key + + (* Raises if `kind` returns `Abstract` *) + val to_loc_exn : t -> Loc.t +end = struct + (* This module uses `Obj.magic` to achieve zero-cost conversions between `Loc.t` and an `ALoc.t` + * which has a concrete underlying representation. Don't modify this unless you understand how + * OCaml lays out values in memory. + * + * In order to achieve the zero-cost conversion, we have to use the same representation as `Loc.t` + * for each `ALoc.t` that has a concrete underlying representation. By doing so, we can avoid any + * new allocations, and simply use `Obj.magic` to return a pointer to the same heap-allocated + * object with a different type. + * + * This also means that we must have a representation for actually-abstract `ALoc.t`s which is + * distinct from every possible value of `Loc.t`. Fortunately, the start and end fields are both + * pointers, and therefore cannot share a representation with any integer. We can take advantage + * of this in order to differentiate between the kinds of `ALoc.t`s. + *) + type kind = + | ALocNone + | Abstract + | Concrete + + type t = Loc.t + + type abstract_t = { + (* This field has the same type in Loc.t *) + abstract_source: File_key.t option; + (* In Loc.t, this is the `start` field. We will use it as an integer key here. *) + key: key; + } + + let of_loc : Loc.t -> t = Obj.magic + + let of_key (source : File_key.t option) (key : key) : t = + let loc : abstract_t = { abstract_source = source; key } in + Obj.magic loc + + let source { Loc.source; _ } = source + + (* The `key` field is an integer in `abstract_t`, but the field in the corresponding location in + * `Loc.t` is `start`, which is a pointer. We can use this fact to determine whether we have an + * `abstract_t` or a `t` here, since OCaml keeps track of whether a value is an integer or a + * pointer. If it's an integer, the value is abstract. *) + let is_abstract (loc : t) : bool = (Obj.magic loc).key |> Obj.repr |> Obj.is_int + + let kind (loc : t) : kind = + if is_abstract loc then + Abstract + else if Obj.magic loc = Loc.none then + ALocNone + else + Concrete + + let get_key_exn (loc : t) : key = + if is_abstract loc then + let ({ key; _ } : abstract_t) = Obj.magic loc in + key + else + invalid_arg "Can only get the key from an abstract location" + + let to_loc_exn (loc : t) : Loc.t = + if is_abstract loc then + invalid_arg "loc must be concrete" + else + (Obj.magic loc : Loc.t) + + let update_source f loc = + if is_abstract loc then + let loc : abstract_t = Obj.magic loc in + let updated_loc : abstract_t = { loc with abstract_source = f loc.abstract_source } in + (Obj.magic updated_loc : t) + else + Loc.{ loc with source = f loc.source } +end + +type t = Repr.t + +let of_loc = Repr.of_loc + +let abstractify table loc = + match Repr.kind loc with + | Repr.Abstract -> failwith "Cannot abstractify a location which is already abstract" + | Repr.ALocNone -> loc + | Repr.Concrete -> + let underlying_loc = Repr.to_loc_exn loc in + let source = Repr.source loc in + if source <> Some table.file then + failwith "abstractify: File mismatch between location and table" + else + ResizableArray.push table.map (RelativeLoc.of_loc underlying_loc); + let key = ResizableArray.size table.map - 1 in + Repr.of_key source key + +let to_loc_exn = Repr.to_loc_exn + +let to_loc table loc = + if Repr.is_abstract loc then + let source = Repr.source loc in + let key = Repr.get_key_exn loc in + let table = Lazy.force table in + if Some table.file <> source then + failwith "to_loc_safe: File mismatch between location and table" + else + match ResizableArray.get table.map key with + | Some loc -> RelativeLoc.to_loc loc source + | None -> + failwith + (Printf.sprintf + "Unable to look up location with key %S for file %S" + (string_of_key key) + (File_key.to_string table.file)) + else + Repr.to_loc_exn loc + +let to_loc_with_tables tables loc = + let aloc_table = + lazy + (let source = + match Repr.source loc with + | Some x -> x + | None -> failwith "Unexpectedly encountered a location without a source" + in + Lazy.force (Utils_js.FilenameMap.find_unsafe source tables)) + in + to_loc aloc_table loc + +let none = Repr.of_loc Loc.none + +let source = Repr.source + +let update_source = Repr.update_source + +let debug_to_string ?(include_source = false) loc = + if Repr.is_abstract loc then + let source = Repr.source loc in + let key = Repr.get_key_exn loc in + let source = + if include_source then + Printf.sprintf + "%S: " + (match source with + | Some src -> File_key.to_string src + | None -> "") + else + "" + in + let key = string_of_key key in + source ^ key + else + let loc = Repr.to_loc_exn loc in + Loc.debug_to_string ~include_source loc + +let compare loc1 loc2 = + let source_compare = File_key.compare_opt (Repr.source loc1) (Repr.source loc2) in + if source_compare = 0 then + match (Repr.kind loc1, Repr.kind loc2) with + | (Repr.Abstract, Repr.Abstract) -> + let k1 = Repr.get_key_exn loc1 in + let k2 = Repr.get_key_exn loc2 in + compare_key k1 k2 + | (Repr.Concrete, Repr.Concrete) -> + let l1 = Repr.to_loc_exn loc1 in + let l2 = Repr.to_loc_exn loc2 in + let k = Loc.pos_cmp l1.Loc.start l2.Loc.start in + if k = 0 then + Loc.pos_cmp l1.Loc._end l2.Loc._end + else + k + | (Repr.ALocNone, Repr.ALocNone) -> 0 + | (Repr.ALocNone, (Repr.Abstract | Repr.Concrete)) -> -1 + | ((Repr.Abstract | Repr.Concrete), Repr.ALocNone) -> 1 + (* This might be too aggressive. For example, we might sort errors by location, and some errors + * generated about a file might use concrete locations, and others might use abstract ones. For + * now let's wait and see, and if this is too aggressive we can relax it. *) + | (Repr.Abstract, Repr.Concrete) + | (Repr.Concrete, Repr.Abstract) -> + invalid_arg + (Printf.sprintf + "Unable to compare an abstract location with a concrete one. loc1: %s, loc2: %s" + (debug_to_string ~include_source:true loc1) + (debug_to_string ~include_source:true loc2)) + else + source_compare + +let quick_compare loc1 loc2 = + (* String comparisons are expensive, so we should only evaluate this lambda if + * the other information we have ties *) + let source_compare () = File_key.compare_opt (Repr.source loc1) (Repr.source loc2) in + match (Repr.kind loc1, Repr.kind loc2) with + | (Repr.Abstract, Repr.Abstract) -> + let k1 = Repr.get_key_exn loc1 in + let k2 = Repr.get_key_exn loc2 in + let key_compare = compare_key k1 k2 in + if key_compare = 0 then + source_compare () + else + key_compare + | (Repr.Concrete, Repr.Concrete) -> + let l1 = Repr.to_loc_exn loc1 in + let l2 = Repr.to_loc_exn loc2 in + let start_compare = Loc.pos_cmp l1.Loc.start l2.Loc.start in + if start_compare = 0 then + let end_compare = Loc.pos_cmp l1.Loc._end l2.Loc._end in + if end_compare = 0 then + source_compare () + else + end_compare + else + start_compare + | (Repr.ALocNone, Repr.ALocNone) -> 0 + | (Repr.ALocNone, (Repr.Abstract | Repr.Concrete)) -> -1 + | ((Repr.Abstract | Repr.Concrete), Repr.ALocNone) -> 1 + | (Repr.Abstract, Repr.Concrete) -> 1 + | (Repr.Concrete, Repr.Abstract) -> -1 + +let equal loc1 loc2 = compare loc1 loc2 = 0 + +let concretize_if_possible available_tables loc = + if Repr.is_abstract loc then + match Repr.source loc with + (* We shouldn't end up with a location with no source and an abstract representation. It may be + * worth asserting here at some point. *) + | None -> loc + | Some source -> + begin + match Utils_js.FilenameMap.find_opt source available_tables with + (* We don't have the right table, so just return the loc *) + | None -> loc + | Some table -> + (* Concretize by converting to a Loc.t, then back to an ALoc.t *) + of_loc (to_loc table loc) + end + else + loc + +let concretize_compare available_tables loc1 loc2 = + if Repr.source loc1 = Repr.source loc2 && Repr.is_abstract loc1 <> Repr.is_abstract loc2 then + let loc1 = concretize_if_possible available_tables loc1 in + let loc2 = concretize_if_possible available_tables loc2 in + compare loc1 loc2 + else + compare loc1 loc2 + +let concretize_equal table loc1 loc2 = concretize_compare table loc1 loc2 = 0 + +let to_string_no_source loc = + if Repr.is_abstract loc then + let key = Repr.get_key_exn loc in + string_of_key key + else + Loc.to_string_no_source (Repr.to_loc_exn loc) + +let lookup_key_if_possible rev_table loc = + match Repr.kind loc with + | Repr.Abstract + | Repr.ALocNone -> + loc + | Repr.Concrete -> + let underlying_loc = Repr.to_loc_exn loc |> RelativeLoc.of_loc in + (match Hashtbl.find_opt (Lazy.force rev_table) underlying_loc with + | Some key -> + begin + match Repr.source loc with + | Some source -> Repr.of_key (Some source) key + | None -> failwith "Unexpectedly encountered a location without a source" + end + | None -> loc) + +let reverse_table table = ResizableArray.to_hashtbl table.map + +module ALocRepresentationDoNotUse = struct + let is_abstract = Repr.is_abstract + + let get_key_exn = Repr.get_key_exn + + let string_of_key = string_of_key +end diff --git a/src/parser_utils/aloc/aLoc.mli b/src/parser_utils/aloc/aLoc.mli new file mode 100644 index 00000000000..20c3ed6fb47 --- /dev/null +++ b/src/parser_utils/aloc/aLoc.mli @@ -0,0 +1,101 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type table + +val make_table : File_key.t -> table + +val shrink_table : table -> unit + +type reverse_table + +type key + +type t + +(* Creates an ALoc.t with a concrete underlying representation *) +val of_loc : Loc.t -> t + +(* Takes an ALoc.t with a concrete underlying representation and makes it abstract. + * + * Preconditions: + * - The given location cannot have already been abstractified. + * - The file key with which the table was created must match the `source` of the given location. + * - This also implies that locations with `None` as the source cannot be abstractified. This + * could be relaxed in the future if necessary. + * *) +val abstractify : table -> t -> t + +(* Takes an ALoc.t with a concrete underlying representation and finds + * the existing abstract representation for it from a reverse table + * + * Preconditions: + * - The file key with which the table was created must match the `source` of the given location. + * *) +val lookup_key_if_possible : reverse_table Lazy.t -> t -> t + +(* Converts an ALoc.t back to a Loc.t, looking up the underlying location in the given table if + * necessary. We will have to look up tables in the shared heap at some point, so making it lazy + * allows us to avoid fetching the table if the underlying location is actually concrete. *) +val to_loc : table Lazy.t -> t -> Loc.t + +(* Like to_loc, but conveniently picks the correct table for the conversion out of the map *) +val to_loc_with_tables : table Lazy.t Utils_js.FilenameMap.t -> t -> Loc.t + +(* TODO move to ALocRepresentationDoNotUse *) +(* Unsafe: fails if the location has an abstract underlying representation. *) +val to_loc_exn : t -> Loc.t + +(* The specific contents of this string should not be used to influence typechecking, but it can be + * used as a unique identifier within a given source file. *) +val to_string_no_source : t -> string + +val none : t + +val source : t -> File_key.t option + +val update_source : (File_key.t option -> File_key.t option) -> t -> t + +val compare : t -> t -> int + +(* Only does the expensive source compare if positional comparisons tie. + * This is useful for data structures that do not need equal files to be + * sorted closely to each other. + * + * This comparison also does not throw an error when concrete and abstract + * locations are compared. + *) +val quick_compare : t -> t -> int + +val equal : t -> t -> bool + +(* If one of the provided locations has an abstract underlying representation, and the other is + * concrete, attempt to concretize the abstract one using the given table, before comparing *) +val concretize_compare : table Lazy.t Utils_js.FilenameMap.t -> t -> t -> int + +val concretize_equal : table Lazy.t Utils_js.FilenameMap.t -> t -> t -> bool + +(* Stringifies the underlying representation of the ALoc.t, without concretizing it, for debugging + * purposes. If you make any typechecking behavior depend on the result of this function you are a + * bad person. *) +val debug_to_string : ?include_source:bool -> t -> string + +val reverse_table : table -> reverse_table + +val make_empty_reverse_table : unit -> reverse_table + +(* Exposes the internal representation of an ALoc.t. Typechecking behavior should not be + * made to depend on this module. If you find yourself tempted to use anything here, really think + * through your options. *) +module ALocRepresentationDoNotUse : sig + val is_abstract : t -> bool + + (* Should only be called if `is_abstract` returns `true`. Otherwise it will raise *) + val get_key_exn : t -> key + + val string_of_key : key -> string +end diff --git a/src/parser_utils/aloc/dune b/src/parser_utils/aloc/dune new file mode 100644 index 00000000000..258f510a37a --- /dev/null +++ b/src/parser_utils/aloc/dune @@ -0,0 +1,6 @@ +(library + (name flow_parser_utils_aloc) + (wrapped false) + (libraries + flow_common_utils + flow_parser)) diff --git a/src/parser_utils/ast_builder.ml b/src/parser_utils/ast_builder.ml index 315fc7ae1ca..5f838622eb5 100644 --- a/src/parser_utils/ast_builder.ml +++ b/src/parser_utils/ast_builder.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,95 +7,180 @@ module Ast = Flow_ast +module Identifiers = struct + let identifier ?(loc = Loc.none) name = Flow_ast_utils.ident_of_source (loc, name) +end + +module Types = struct + module Functions = struct + let params ?(loc = Loc.none) ?rest params = (loc, { Ast.Type.Function.Params.params; rest }) + + let make ?tparams params return = { Ast.Type.Function.tparams; params; return } + end + + module Objects = struct + let make ?(exact = true) ?(inexact = false) properties = + { Ast.Type.Object.exact; inexact; properties } + + let property + ?(loc = Loc.none) + ?(optional = false) + ?(static = false) + ?(proto = false) + ?(_method = false) + ?(variance = None) + key + value = + (loc, { Ast.Type.Object.Property.key; value; optional; static; proto; _method; variance }) + + let getter ?(loc = Loc.none) ?optional ?static ?proto ?_method ?variance key func = + let value = Ast.Type.Object.Property.Get (loc, func) in + let prop = property ~loc ?optional ?static ?proto ?_method ?variance key value in + Ast.Type.Object.Property prop + + let setter ?(loc = Loc.none) ?optional ?static ?proto ?_method ?variance key func = + let value = Ast.Type.Object.Property.Set (loc, func) in + let prop = property ~loc ?optional ?static ?proto ?_method ?variance key value in + Ast.Type.Object.Property prop + end + + let mixed = (Loc.none, Ast.Type.Mixed) + + let annotation t = (Loc.none, t) + + let object_ ?(loc = Loc.none) ?exact ?inexact properties = + (loc, Ast.Type.Object (Objects.make ?exact ?inexact properties)) +end + +let string_literal value = { Ast.StringLiteral.value; raw = Printf.sprintf "%S" value } + +let number_literal value raw = { Ast.NumberLiteral.value; raw } + module Literals = struct open Ast.Literal - let string value = - { value = String value; raw = Printf.sprintf "%S" value; } + let string ?(comments = None) value = + { value = String value; raw = Printf.sprintf "%S" value; comments } + + let number ?(comments = None) value raw = { value = Number value; raw; comments } - let number value raw = - { value = Number value; raw; } + let int value = number (float_of_int value) (Printf.sprintf "%d" value) - let bool is_true = - { value = Boolean is_true; raw = if is_true then "true" else "false" } + let bool ?(comments = None) is_true = + { + value = Boolean is_true; + raw = + ( if is_true then + "true" + else + "false" ); + comments; + } end module Patterns = struct open Ast.Pattern - let identifier str = - Loc.none, Identifier { Identifier. - name = Loc.none, str; - annot = None; - optional = false; - } - - let array str = - Loc.none, Array { Array. - elements = [Some (Array.Element (identifier str))]; - annot = None; - } - - let assignment str expr = - Loc.none, Assignment { Assignment. - left = identifier str; - right = expr; - } + let identifier ?(loc = Loc.none) str = + ( loc, + Identifier + { + Identifier.name = Flow_ast_utils.ident_of_source (loc, str); + annot = Ast.Type.Missing loc; + optional = false; + } ) + + let array elements = + let elements = + Core_list.map + ~f:(function + | Some i -> + Some (Array.Element (Loc.none, { Array.Element.argument = i; default = None })) + | None -> None) + elements + in + ( Loc.none, + Array + { + Array.elements; + annot = Ast.Type.Missing Loc.none; + comments = Flow_ast_utils.mk_comments_opt (); + } ) let object_ str = - let open Object in - Loc.none, Object { - properties = [Property (Loc.none, { Property. - key = Property.Identifier (Loc.none, str); - pattern = identifier str; - shorthand = true; - })]; - annot = None; - } + Object. + ( Loc.none, + Object + { + properties = + [ + Property + ( Loc.none, + { + Property.key = + Property.Identifier (Flow_ast_utils.ident_of_source (Loc.none, str)); + pattern = identifier str; + default = None; + shorthand = true; + } ); + ]; + annot = Ast.Type.Missing Loc.none; + } ) end module Functions = struct open Ast.Function - let body_block stmts = - BodyBlock (Loc.none, { Ast.Statement.Block. - body = stmts; - }) + let params ?(loc = Loc.none) ?rest ps = (loc, { Ast.Function.Params.params = ps; rest }) - let body_expression expr = - BodyExpression expr + let param ?(loc = Loc.none) ?default argument = (loc, { Ast.Function.Param.argument; default }) - let make ~id ~expression ~params ?(generator=false) ?(async=false) ?body () = - let body = match body with - | Some body -> body - | None -> body_block [] + let body ?(loc = Loc.none) stmts = BodyBlock (loc, { Ast.Statement.Block.body = stmts }) + + let body_expression expr = BodyExpression expr + + let make ~id ?params:params_ ?(generator = false) ?(async = false) ?body:body_ () = + let params = + match params_ with + | Some ps -> ps + | None -> params [] + in + let body = + match body_ with + | Some body_ -> body_ + | None -> body [] in - { id; - params = Loc.none, { Ast.Function.Params.params; rest = None }; + { + id; + params; body; async; - generator = generator; + generator; predicate = None; - expression; - return = Missing Loc.none; + return = Ast.Type.Missing Loc.none; tparams = None; + sig_loc = Loc.none; } end module Classes = struct open Ast.Class + let implements ?targs id = (Loc.none, { Implements.id; targs }) + (* TODO: add method_ and property *) - let make ?super ?id elements = - let extends = match super with - | None -> None - | Some expr -> Some (Loc.none, { Extends.expr; targs = None }) + let make ?super ?(implements = []) ?id elements = + let extends = + match super with + | None -> None + | Some expr -> Some (Loc.none, { Extends.expr; targs = None }) in - { id; - body = Loc.none, { Body.body = elements }; + { + id; + body = (Loc.none, { Body.body = elements }); tparams = None; extends; - implements = []; + implements; classDecorators = []; } end @@ -105,268 +190,312 @@ module JSXs = struct let identifier name = Identifier (Loc.none, { Identifier.name }) - let attr_identifier name = Attribute.Identifier (Loc.none, { Identifier.name }) + let attr_identifier ?(loc = Loc.none) name = Attribute.Identifier (loc, { Identifier.name }) let attr_literal lit = Attribute.Literal (Loc.none, lit) - let attr name value = Opening.Attribute (Loc.none, { Attribute.name; value }) - - let element ?selfclosing:(selfClosing=false) ?attrs:(attributes=[]) ?(children=[]) name = - { openingElement = Loc.none, { Opening.name; selfClosing; attributes }; - closingElement = if selfClosing then None else Some (Loc.none, { Closing.name }); - children; + let attr ?(loc = Loc.none) name value = Opening.Attribute (loc, { Attribute.name; value }) + + let element ?selfclosing:(selfClosing = false) ?attrs:(attributes = []) ?(children = []) name = + { + openingElement = (Loc.none, { Opening.name; selfClosing; attributes }); + closingElement = + ( if selfClosing then + None + else + Some (Loc.none, { Closing.name }) ); + children = (Loc.none, children); } - let child_element ?(loc=Loc.none) ?selfclosing ?attrs ?children name = - loc, Element (element ?selfclosing ?attrs ?children name) + let child_element ?(loc = Loc.none) ?selfclosing ?attrs ?children name = + (loc, Element (element ?selfclosing ?attrs ?children name)) end module Statements = struct open Ast.Statement - let empty () = Loc.none, Empty + let empty () = (Loc.none, Empty) - let block children = - Loc.none, Block { Block.body = children } + let block children = (Loc.none, Block { Block.body = children }) - let while_ test body = - Loc.none, While { While.test; body } + let while_ test body = (Loc.none, While { While.test; body }) - let do_while body test = - Loc.none, DoWhile { DoWhile.body; test } + let do_while body ?comments test = (Loc.none, DoWhile { DoWhile.body; test; comments }) let for_ init test update body = - Loc.none, For { For. - init = Some (For.InitExpression init); - test; - update; - body; - } + (Loc.none, For { For.init = Some (For.InitExpression init); test; update; body }) - let expression ?directive expression = - Loc.none, Expression { Expression.expression; directive } + let for_in ?(each = false) left right body = (Loc.none, ForIn { ForIn.left; right; body; each }) - let labeled label body = - Loc.none, Labeled { Labeled.label; body } + let for_in_declarator ?(kind = Ast.Statement.VariableDeclaration.Var) declarations = + ForIn.LeftDeclaration (Loc.none, { VariableDeclaration.declarations; kind }) - let variable_declarator_generic id init = - Loc.none, { VariableDeclaration.Declarator. - id; - init; - } + let for_in_pattern patt = ForIn.LeftPattern patt - let variable_declarator ?init str = - Loc.none, { VariableDeclaration.Declarator. - id = Patterns.identifier str; - init; - } + let for_of ?(async = false) left right body = (Loc.none, ForOf { ForOf.left; right; body; async }) + + let for_of_declarator ?(kind = Ast.Statement.VariableDeclaration.Var) declarations = + ForOf.LeftDeclaration (Loc.none, { VariableDeclaration.declarations; kind }) + + let for_of_pattern patt = ForOf.LeftPattern patt + + let expression ?(loc = Loc.none) ?directive expression = + (loc, Expression { Expression.expression; directive }) + + let labeled label body = (Loc.none, Labeled { Labeled.label; body }) + + let variable_declarator_generic id init = (Loc.none, { VariableDeclaration.Declarator.id; init }) + + let variable_declarator ?init ?(loc = Loc.none) str = + (loc, { VariableDeclaration.Declarator.id = Patterns.identifier ~loc str; init }) let variable_declaration - ?(kind = Ast.Statement.VariableDeclaration.Var) - declarations = - Loc.none, VariableDeclaration { VariableDeclaration.kind; declarations; } - - let function_declaration ?(loc=Loc.none) ?(params=[]) ?body id = - let body = match body with - | Some stmts -> Some (Functions.body_block stmts) - | None -> None - in - let fn = Functions.make ~params ~id:(Some id) ?body ~expression:false () in - loc, FunctionDeclaration fn + ?(kind = Ast.Statement.VariableDeclaration.Var) ?(loc = Loc.none) declarations = + (loc, VariableDeclaration { VariableDeclaration.kind; declarations }) + + let let_declaration declarations = + variable_declaration ~kind:Ast.Statement.VariableDeclaration.Let declarations - let class_declaration ?super ?id elements = - Loc.none, ClassDeclaration (Classes.make ?super ?id elements) + let const_declaration declarations = + variable_declaration ~kind:Ast.Statement.VariableDeclaration.Const declarations - let if_ test consequent alternate = - Loc.none, If { If.test; consequent; alternate } + let function_declaration ?(loc = Loc.none) ?(async = false) ?(generator = false) ?params ?body id + = + let fn = Functions.make ?params ~id:(Some id) ~async ~generator ?body () in + (loc, FunctionDeclaration fn) - let return expr = - Loc.none, Return { Return.argument = expr } + let class_declaration ?super ?implements ?id elements = + (Loc.none, ClassDeclaration (Classes.make ?super ?implements ?id elements)) + + let if_ ?comments test consequent alternate = + (Loc.none, If { If.test; consequent; alternate; comments }) + + let return ?(loc = Loc.none) ?comments expr = (loc, Return { Return.argument = expr; comments }) let directive txt = - let expr = Loc.none, Ast.Expression.Literal (Literals.string txt) in + let expr = (Loc.none, Ast.Expression.Literal (Literals.string txt)) in expression ~directive:txt expr + + let switch discriminant cases = (Loc.none, Switch { Switch.discriminant; cases }) + + let switch_case ?(loc = Loc.none) ?test consequent = (loc, { Switch.Case.test; consequent }) + + let break ?comments ?label () = (Loc.none, Break { Break.label; comments }) + + let with_ _object body = (Loc.none, With { With._object; body }) + + let enum_declaration ?(loc = Loc.none) id body = + (loc, EnumDeclaration { EnumDeclaration.id; body }) + + module EnumDeclarations = struct + open EnumDeclaration + + let initialized_member ?(loc = Loc.none) id init_value = + (loc, { InitializedMember.id; init = (Loc.none, init_value) }) + + let defaulted_member ?(loc = Loc.none) id = (loc, { DefaultedMember.id }) + + let boolean_body ?(explicit_type = false) members = + BooleanBody { BooleanBody.members; explicitType = explicit_type } + + let number_body ?(explicit_type = false) members = + NumberBody { NumberBody.members; explicitType = explicit_type } + + let string_defaulted_body ?(explicit_type = false) members = + let members = StringBody.Defaulted members in + StringBody { StringBody.members; explicitType = explicit_type } + + let string_initialized_body ?(explicit_type = false) members = + let members = StringBody.Initialized members in + StringBody { StringBody.members; explicitType = explicit_type } + + let symbol_body members = SymbolBody { SymbolBody.members } + end end module Expressions = struct open Ast.Expression - let identifier name = - Loc.none, Identifier (Loc.none, name) + let identifier ?(loc = Loc.none) ?(comments = None) name = + (loc, Identifier (loc, { Ast.Identifier.name; comments })) - let call_node ?targs ?(args=[]) callee = { Call.callee; targs; arguments = args } + let array ?comments elements = (Loc.none, Array { Array.elements; comments }) - let call ?(args=[]) callee = - Loc.none, Call (call_node ~args callee) + let call_node ?targs ?(args = []) callee = { Call.callee; targs; arguments = args } - let optional_call ~optional ?(args=[]) callee = - Loc.none, OptionalCall { OptionalCall. - call = call_node ~args callee; - optional; - } + let call ?(args = []) callee = (Loc.none, Call (call_node ~args callee)) + + let optional_call ~optional ?(args = []) callee = + (Loc.none, OptionalCall { OptionalCall.call = call_node ~args callee; optional }) + + let function_ ?(loc = Loc.none) ?(async = false) ?(generator = false) ?params ?id ?body () = + let fn = Functions.make ~async ~generator ?params ~id ?body () in + (loc, Function fn) + + let arrow_function ?(loc = Loc.none) ?(async = false) ?params ?body () = + let fn = Functions.make ~async ~generator:false ?params ~id:None ?body () in + (loc, ArrowFunction fn) + + let class_ ?super ?id elements = (Loc.none, Class (Classes.make ?super ?id elements)) - let function_ ?(generator=false) ?(params=[]) ?body () = - let fn = Functions.make ~generator ~params ~id:None ?body ~expression:true () in - Loc.none, Function fn + let literal ?(loc = Loc.none) lit = (loc, Literal lit) - let arrow_function ?(params=[]) ?body () = - let fn = Functions.make ~params ~id:None ?body ~expression:true () in - Loc.none, ArrowFunction fn + let assignment left ?operator right = (Loc.none, Assignment { Assignment.operator; left; right }) - let class_ ?super ?id elements = - Loc.none, Class (Classes.make ?super ?id elements) + let binary ~op left right = (Loc.none, Binary { Binary.operator = op; left; right }) - let literal lit = - Loc.none, Literal lit + let plus left right = binary ~op:Binary.Plus left right - let assignment left ?(operator=Ast.Expression.Assignment.Assign) right = - Loc.none, Assignment { Assignment.operator; left; right; } + let minus left right = binary ~op:Binary.Minus left right - let binary ~op left right = - Loc.none, Binary { Binary.operator = op; left; right } + let mult left right = binary ~op:Binary.Mult left right + + let instanceof left right = binary ~op:Binary.Instanceof left right + + let in_ left right = binary ~op:Binary.In left right + + let equal left right = binary ~op:Binary.Equal left right let conditional test consequent alternate = - Loc.none, Conditional { Conditional.test; consequent; alternate } + (Loc.none, Conditional { Conditional.test; consequent; alternate }) - let logical ~op left right = - Loc.none, Logical { Logical.operator = op; left; right } + let logical ~op left right = (Loc.none, Logical { Logical.operator = op; left; right }) - let unary ~op argument = - Loc.none, Unary { Unary. - operator = op; - prefix = true; - argument; - } + let unary ?(comments = None) ~op argument = + (Loc.none, Unary { Unary.operator = op; argument; comments }) - let update ~op ~prefix argument = - Loc.none, Update { Update. - operator = op; - prefix; - argument; - } + let unary_plus (b : (Loc.t, Loc.t) Ast.Expression.t) = unary ~op:Unary.Plus b - let object_property_key (k: string) = - Object.Property.Identifier (Loc.none, k) + let unary_minus (b : (Loc.t, Loc.t) Ast.Expression.t) = unary ~op:Unary.Minus b - let object_property_key_literal k = - Object.Property.Literal (Loc.none, k) + let unary_not (b : (Loc.t, Loc.t) Ast.Expression.t) = unary ~op:Unary.Not b - let object_property_key_literal_from_string (k: string) = - Object.Property.Literal (Loc.none, Literals.string k) + let update ~op ~prefix argument = (Loc.none, Update { Update.operator = op; prefix; argument }) - let object_method ?body ?(params=[]) ?(generator=false) ?(async=false) key = - let fn = Functions.make ~id:None ~expression:true ~params ~generator ~async ?body () in + let increment ~prefix argument = update ~op:Update.Increment ~prefix argument + + let decrement ~prefix argument = update ~op:Update.Decrement ~prefix argument + + let object_property_key ?(loc = Loc.none) (k : string) = + Object.Property.Identifier (Flow_ast_utils.ident_of_source (loc, k)) + + let object_property_key_literal ?(loc = Loc.none) k = Object.Property.Literal (loc, k) + + let object_property_key_literal_from_string ?(loc = Loc.none) (k : string) = + Object.Property.Literal (loc, Literals.string k) + + let object_property_computed_key k = Object.Property.Computed k + + let object_method ?body ?params ?(generator = false) ?(async = false) key = + let fn = Functions.make ~id:None ?params ~generator ~async ?body () in let prop = Object.Property.Method { key; value = (Loc.none, fn) } in Object.Property (Loc.none, prop) - let object_property ?(shorthand=false) key value = + let object_property ?(shorthand = false) ?(loc = Loc.none) key value = let module Prop = Object.Property in let prop = Prop.Init { key; value; shorthand } in - Object.Property (Loc.none, prop) + Object.Property (loc, prop) - let object_property_with_literal ?(shorthand=false) k v = - object_property ~shorthand (object_property_key_literal k) v + let object_property_with_literal ?(shorthand = false) ?(loc = Loc.none) k v = + object_property ~shorthand ~loc (object_property_key_literal ~loc k) v - let object_ properties = - Loc.none, Object { Object.properties } + let object_ ?comments ?(loc = Loc.none) properties = (loc, Object { Object.properties; comments }) (* _object.property *) let member ~property _object = - { Member. - _object; - property = Member.PropertyIdentifier (Loc.none, property); - computed = false; + { + Member._object; + property = Member.PropertyIdentifier (Flow_ast_utils.ident_of_source (Loc.none, property)); } (* _object[property] *) let member_computed ~property _object = - { Member. - _object; - property = Member.PropertyIdentifier (Loc.none, property); - computed = true; - } - - let member_computed_expr ~property _object = - { Member. - _object; - property = Member.PropertyExpression property; - computed = true; - } + { Member._object; property = Member.PropertyExpression property } - let member_expression expr = - Loc.none, Ast.Expression.Member expr + let member_expression expr = (Loc.none, Ast.Expression.Member expr) - let member_expression_ident_by_name obj (name: string) = - member_expression (member obj ~property: name) + let member_expression_ident_by_name obj (name : string) = + member_expression (member obj ~property:name) - let member_expression_computed_string obj (str: string) = - member_expression (member_computed_expr obj - ~property:(literal (Literals.string str))) + let member_expression_computed_string obj (str : string) = + member_expression (member_computed obj ~property:(literal (Literals.string str))) let optional_member_expression ~optional expr = - Loc.none, OptionalMember { OptionalMember. - member = expr; - optional; - } + (Loc.none, OptionalMember { OptionalMember.member = expr; optional }) + + let new_ ?comments ?targs ?(args = []) callee = + (Loc.none, New { New.callee; targs; arguments = args; comments }) + + let sequence exprs = (Loc.none, Sequence { Sequence.expressions = exprs }) + + let expression expr = Expression expr + + let spread expr = Spread (Loc.none, { SpreadElement.argument = expr }) - let new_ ?targs ?(args=[]) callee = - Loc.none, New { New.callee; targs; arguments = args } + let jsx_element ?(loc = Loc.none) elem = (loc, JSXElement elem) - let sequence exprs = - Loc.none, Sequence { Sequence.expressions = exprs } + let true_ () = literal (Literals.bool true) - let spread expr = - Spread (Loc.none, { SpreadElement.argument = expr }) + let false_ () = literal (Literals.bool false) - let jsx_element ?(loc=Loc.none) elem = - loc, JSXElement elem + let logical_and (l : (Loc.t, Loc.t) Ast.Expression.t) r = logical ~op:Logical.And l r - let true_ () = - literal (Literals.bool true) + let logical_or (l : (Loc.t, Loc.t) Ast.Expression.t) r = logical ~op:Logical.Or l r - let false_ () = - literal (Literals.bool false) + let typecast expression annotation = + (Loc.none, TypeCast { TypeCast.expression; annot = Types.annotation annotation }) - let logical_and (l: (Loc.t, Loc.t) Ast.Expression.t) r = logical ~op:Logical.And l r - let logical_or (l: (Loc.t, Loc.t) Ast.Expression.t) r = logical ~op:Logical.Or l r - let unary_not (b: (Loc.t, Loc.t) Ast.Expression.t) = unary ~op:Unary.Not b + module Literals = struct + let string ?loc value = literal ?loc (Literals.string value) + + let number ?loc value raw = literal ?loc (Literals.number value raw) + + let int ?loc value = literal ?loc (Literals.int value) + + let bool ?loc is_true = literal ?loc (Literals.bool is_true) + end end module Comments = struct - let block txt = Loc.none, Ast.Comment.Block txt - let line txt = Loc.none, Ast.Comment.Line txt + let block ?(loc = Loc.none) txt = (loc, Ast.Comment.Block txt) + + let line ?(loc = Loc.none) txt = (loc, Ast.Comment.Line txt) end -let mk_program stmts = - Loc.none, stmts, [] +let mk_program ?(comments = []) stmts = (Loc.none, stmts, comments) let ast_of_string ~parser str = - let parse_options = Some Parser_env.({ - esproposal_class_instance_fields = true; - esproposal_class_static_fields = true; - esproposal_decorators = true; - esproposal_export_star_as = true; - esproposal_optional_chaining = true; - esproposal_nullish_coalescing = true; - types = true; - use_strict = false; - }) in + let parse_options = + Some + Parser_env. + { + enums = true; + esproposal_class_instance_fields = true; + esproposal_class_static_fields = true; + esproposal_decorators = true; + esproposal_export_star_as = true; + esproposal_optional_chaining = true; + esproposal_nullish_coalescing = true; + types = true; + use_strict = false; + } + in let env = Parser_env.init_env ~token_sink:None ~parse_options None str in - let (ast, _) = Parser_flow.do_parse - env parser false in + let (ast, _) = Parser_flow.do_parse env parser false in ast -let expression_of_string str = - ast_of_string ~parser:Parser_flow.Parse.expression str +let expression_of_string str = ast_of_string ~parser:Parser_flow.Parse.expression str let statement_of_string str = - let ast_list = ast_of_string - ~parser:(Parser_flow.Parse.module_body ~term_fn:(fun _ -> false)) str in + let ast_list = + ast_of_string ~parser:(Parser_flow.Parse.module_body ~term_fn:(fun _ -> false)) str + in match ast_list with | [ast] -> ast | _ -> failwith "Multiple statements found" let program_of_string str = - let stmts = ast_of_string - ~parser:(Parser_flow.Parse.module_body ~term_fn:(fun _ -> false)) str in + let stmts = + ast_of_string ~parser:(Parser_flow.Parse.module_body ~term_fn:(fun _ -> false)) str + in (Loc.none, stmts, []) diff --git a/src/parser_utils/ast_loc_utils.ml b/src/parser_utils/ast_loc_utils.ml new file mode 100644 index 00000000000..b43b5f3a380 --- /dev/null +++ b/src/parser_utils/ast_loc_utils.ml @@ -0,0 +1,42 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* Provides utilities for converting AST locations between ALoc.t and Loc.t *) + +let loc_to_aloc_mapper : (Loc.t, Loc.t, ALoc.t, ALoc.t) Flow_polymorphic_ast_mapper.mapper = + object + inherit [Loc.t, Loc.t, ALoc.t, ALoc.t] Flow_polymorphic_ast_mapper.mapper + + method on_loc_annot = ALoc.of_loc + + method on_type_annot = ALoc.of_loc + end + +class abstractifier filename = + object (this) + inherit [ALoc.t, ALoc.t, ALoc.t, ALoc.t] Flow_polymorphic_ast_mapper.mapper + + val table = ALoc.make_table filename + + method get_table = table + + method abstractify aloc = ALoc.abstractify table aloc + + method on_loc_annot = this#abstractify + + method on_type_annot = this#abstractify + + (* We don't need the comment locations to be abstract *) + method! comment (cmt : ALoc.t Flow_ast.Comment.t) : ALoc.t Flow_ast.Comment.t = cmt + end + +let abstractify_alocs filename ast = + let mapper = new abstractifier filename in + let ast' = mapper#program ast in + let table = mapper#get_table in + ALoc.shrink_table table; + (table, ast') diff --git a/src/parser_utils/ast_loc_utils.mli b/src/parser_utils/ast_loc_utils.mli new file mode 100644 index 00000000000..7c0312d619c --- /dev/null +++ b/src/parser_utils/ast_loc_utils.mli @@ -0,0 +1,15 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* Converts a (Loc.t, Loc.t) AST into a (ALoc.t, ALoc.t) AST. Leaves the underlying representation + * of the contained ALoc.ts concrete. *) +val loc_to_aloc_mapper : (Loc.t, Loc.t, ALoc.t, ALoc.t) Flow_polymorphic_ast_mapper.mapper + +(* Converts an (ALoc.t, ALoc.t) AST where the ALoc.ts have an concrete underlying representation to + * one where the ALoc.ts have an abstract underlying representation. *) +val abstractify_alocs : + File_key.t -> (ALoc.t, ALoc.t) Flow_ast.program -> ALoc.table * (ALoc.t, ALoc.t) Flow_ast.program diff --git a/src/parser_utils/comment_attacher.ml b/src/parser_utils/comment_attacher.ml deleted file mode 100644 index fba4c418f6a..00000000000 --- a/src/parser_utils/comment_attacher.ml +++ /dev/null @@ -1,95 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -module Ast = Flow_ast - -open Flow_ast_visitor - -type attachment_pos = - | LeadingLine - (* NOT IMPLEMENTED *) - (* TODO: Add support for these attachment types *) - | InlineLeft (* On the same line immediately to the left *) - | InlineRight (* On the same line immediately to the right *) - | InlineAtEnd (* Last item on line, takes precedence over `InlineRight` *) - | TrailingLine (* Any non statement level comments, not attachable as `LeadingLine` *) - | TrailingBlock (* Any comments after the last statement within a block *) - -type info = { - (* Resulting map of node loc to list of comments and attachment type *) - attached_comments: ((attachment_pos * Loc.t Ast.Comment.t) list) Utils_js.LocMap.t; - unattached_comments: Loc.t Ast.Comment.t list; -} -module Acc = struct - type t = info - let init (comments: Loc.t Ast.Comment.t list) = { - attached_comments = Utils_js.LocMap.empty; - (* Sort comments into *) - unattached_comments = List.sort - (fun (loc1, _) (loc2, _) -> Loc.compare loc1 loc2) - comments; - } -end - -class comment_attacher ~comments = object(this) - inherit [Acc.t] visitor ~init:(Acc.init comments) as super - - method private check_loc node_loc = - match acc.unattached_comments with - | [] -> () - | (comment_loc, _) as comment::rest -> - let open Loc in - (* Check if comment is on line before node *) - if comment_loc.start.line < node_loc.start.line then begin - this#update_acc (fun acc -> - let existing = match Utils_js.LocMap.get node_loc acc.attached_comments with - | Some e -> e | None -> [] - in - { - attached_comments = Utils_js.LocMap.add node_loc - (existing @ [(LeadingLine, comment)]) - acc.attached_comments; - unattached_comments = rest - }); - this#check_loc node_loc - end - - method! statement (stmt: (Loc.t, Loc.t) Ast.Statement.t) = - let (loc, _) = stmt in - this#check_loc loc; - super#statement stmt - - method! expression (expr: (Loc.t, Loc.t) Ast.Expression.t) = - let (loc, _) = expr in - this#check_loc loc; - super#expression expr - - method! identifier (expr: Loc.t Ast.Identifier.t) = - let (loc, _) = expr in - this#check_loc loc; - super#identifier expr - - method! object_property (prop: (Loc.t, Loc.t) Ast.Expression.Object.Property.t) = - let (loc, _) = prop in - this#check_loc loc; - super#object_property prop - - method! class_element (elem: (Loc.t, Loc.t) Ast.Class.Body.element) = - let open Ast.Class.Body in - begin match elem with - | Method (loc, _) - | Property (loc, _) - | PrivateField (loc, _) -> this#check_loc loc - end; - super#class_element elem - -end - -let program ~ast = - let (_, _, comments) = ast in - let walk = new comment_attacher ~comments in - walk#eval walk#program ast diff --git a/src/parser_utils/comment_utils.ml b/src/parser_utils/comment_utils.ml new file mode 100644 index 00000000000..4d17e092917 --- /dev/null +++ b/src/parser_utils/comment_utils.ml @@ -0,0 +1,18 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +class ['loc] comments_stripper = + object + inherit ['loc] Flow_ast_mapper.mapper + + method! syntax_opt + : 'internal. ('loc, 'internal) Flow_ast.Syntax.t option -> + ('loc, 'internal) Flow_ast.Syntax.t option = + (fun _ -> None) + end + +let strip_inlined_comments p = (new comments_stripper)#program p diff --git a/src/parser_utils/dep_mapper.ml b/src/parser_utils/dep_mapper.ml deleted file mode 100644 index 90072810ca4..00000000000 --- a/src/parser_utils/dep_mapper.ml +++ /dev/null @@ -1,630 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -module Ast = Flow_ast - -(* -Implements a basic dependence tracker for both values and types (tvars). -It collects, for all the variable declarations, the assignments -to the variable. For tvar dependence, if the variable declaration has -an annotation, then we do not need to track any assignments to it. -Also, we collect dependences for each AST node, which we can think of -as an implicit temporary variable. - -Value flows through heap and function parameters are ignored for now, though -we do introduce HeapLocs for future extension. -*) - -(* TODO Ensure physical equality optimization? *) - -module LocMap = Utils_js.LocMap - -module DepKey = struct - type t = - | Id of Loc.t (* uniquely resolved name def_loc, id *) - | Temp of Loc.t (* expression node in AST *) - | HeapLoc of Loc.t * string (* locations in a heap object, - loc=allocation site, string=prop *) - - let same_file_loc_compare = - let open Loc in - fun loc1 loc2 -> - let k = Loc.pos_cmp loc1.start loc2.start in - if k = 0 then Loc.pos_cmp loc1._end loc2._end - else k - - (* ID < Temp < HeapLoc, and within those based on sub-structure comparison *) - let compare = - fun k1 k2 -> - (match k1, k2 with - | Id l1, Id l2 -> same_file_loc_compare l1 l2 - | Temp l1, Temp l2 -> same_file_loc_compare l1 l2 - | Id _, Temp _ -> -1 - | Temp _, Id _ -> 1 - | Id _, HeapLoc _ -> -1 - | HeapLoc _, Id _ -> 1 - | Temp _, HeapLoc _ -> -1 - | HeapLoc _, Temp _ -> 1 - | HeapLoc (l1, s1), HeapLoc (l2, s2) -> - let lcmp = (same_file_loc_compare l1 l2) in - if (lcmp = 0) then - String.compare s1 s2 - else lcmp - ) -end - -module DepMap = MyMap.Make (DepKey) - -module Dep = struct - type depkind = - | Annotation of (Loc.t, Loc.t) Ast.Type.annotation * string list - | Primitive (* of Ast.Literal.t *) - | Object (* | Function *) - | Depends of DepKey.t list (* TODO set, not list *) - | Incomplete - | NoInfo - | Destructure of DepKey.t * string - type t = { - typeDep: depkind; (* for tvar dependence - not all cases of depkind apply! *) - valDep: depkind; (* for value dependence - not all cases of depkind apply! *) - } - - let merge_dep = - let merge_typeDep key type_dep_cur type_dep_new = - match type_dep_cur, type_dep_new with - | _, Incomplete -> Incomplete - | Annotation _, _ -> type_dep_cur - | NoInfo, _ -> type_dep_new - | Incomplete, _ -> type_dep_cur - | Depends tlist, _ -> Depends (List.cons key tlist) - | Primitive, Primitive -> Primitive - | Primitive, _ -> Incomplete - | _,_ -> type_dep_cur - in - let merge_valDep key val_dep_cur val_dep_new = - match val_dep_cur, val_dep_new with - | _, Incomplete -> Incomplete - | Incomplete, _ -> val_dep_cur - | NoInfo, NoInfo -> NoInfo - | NoInfo, Destructure (k,s) -> Destructure (k,s) - | NoInfo, Primitive -> Depends [key] - | NoInfo, _ -> Depends [key] - | Destructure _, _ -> Incomplete - | Primitive, _ -> Incomplete - | Depends tlist, _ -> Depends (List.cons key tlist) - | _, _ -> val_dep_cur - in - fun key cur_t new_t -> - let { typeDep=type_dep_new; valDep=val_dep_new} = new_t in - let { typeDep=type_dep_cur; valDep=val_dep_cur} = cur_t in - { typeDep = merge_typeDep key type_dep_cur type_dep_new; - valDep = merge_valDep key val_dep_cur val_dep_new } - - let print_dep = - let key_to_string key = - match key with - | DepKey.Id d -> - String.concat " " - ["{"; "DEFLOC"; Loc.to_string d; "}"] - | DepKey.Temp l -> - String.concat " " - ["{"; "LOC"; Loc.to_string l; "}"] - | DepKey.HeapLoc (l, s) -> - String.concat " " - ["{"; "HEAPLOC"; Loc.to_string l; s; "}"] - in - let kind_to_string kind = - match kind with - | NoInfo -> "noinfo" - | Depends slist -> - String.concat " " - ["depends"; String.concat ", " (List.map key_to_string slist)] - | Annotation _ -> "annot" - | Incomplete -> "incomplete" - | Object -> "object" - | Primitive -> "prim" - | Destructure (k, _) -> - String.concat " " - ["destruct"; (key_to_string k)] - in - fun key dep -> - let { typeDep; valDep } = dep in - String.concat " " - [key_to_string key; - "->"; - "T ="; kind_to_string typeDep; - "V =" ; kind_to_string valDep] -end - -class mapper = object(this) - inherit Flow_ast_mapper.mapper as super - - val mutable use_def_map = LocMap.empty - method use_def_map = use_def_map - - val mutable dep_map = DepMap.empty - method dep_map = dep_map - - (* TODO: Move this out of the class, and pass the relevant maps to it? *) - method has_single_value_dep (loc: Loc.t) = - let open Dep in - try - let d = LocMap.find loc use_def_map in - let { typeDep = _; valDep } = - DepMap.find (DepKey.Id d) dep_map in - match valDep with - | Depends l -> (List.length l) = 1 - | Primitive -> true - | Destructure _ -> true - | _ -> false - with _ -> false - - method has_no_open_type_dep (loc: Loc.t) = - let open Dep in - try - let d = LocMap.find loc use_def_map in - let { typeDep; valDep = _ } = - DepMap.find (DepKey.Id d) dep_map in - match typeDep with - | Annotation _ -> true - | Primitive -> true - | Object -> true - | _ -> false - (* TODO: recurse in the Depends list *) - with _ -> false - - val merge_dep = Dep.merge_dep - - method! program (program: (Loc.t, Loc.t) Ast.program) = - let { Scope_api.scopes; max_distinct=_ } = - Scope_builder.program ~ignore_toplevel:true program in - use_def_map <- IMap.fold (fun _ scope acc -> - LocMap.fold (fun loc { Scope_api.Def.locs; _ } acc -> - (* TODO: investigate whether picking the first location where there could - be multiple is fine in principle *) - LocMap.add loc (Nel.hd locs) acc - ) scope.Scope_api.Scope.locals acc - ) scopes LocMap.empty; - LocMap.iter - (fun _ def_loc -> - let open Dep in - let dep = { typeDep = NoInfo; - valDep = NoInfo } in - dep_map <- DepMap.add (DepKey.Id def_loc) dep dep_map) - use_def_map; - super#program program - - method! function_param_pattern (expr: (Loc.t, Loc.t) Ast.Pattern.t) = - let open Dep in - (match expr with - | _, Ast.Pattern.Identifier id -> - let open Ast.Pattern.Identifier in - let { name = (loc, _); annot; _ } = id in - (try - let d = LocMap.find loc use_def_map in - let key = DepKey.Id d in - match annot with - | None -> - (* Currently, we pretend we don't know anything about caller/callee *) - let dep = { typeDep = Incomplete; - valDep = Incomplete } in - dep_map <- - DepMap.add ~combine:(merge_dep key) (* update_dep *) - key dep dep_map - | Some some_annot -> - let dep = { typeDep = Annotation (some_annot, []); - valDep = Incomplete } in - dep_map <- - DepMap.add ~combine:(merge_dep key) (* update_dep *) - key dep dep_map - with _ -> ()) - | _, _ -> ()); (* Other interesting cases in Pattern applicable here? *) - super#function_param_pattern expr - - method! variable_declarator_pattern ~kind (expr: (Loc.t, Loc.t) Ast.Pattern.t) = - let open Dep in - (match expr with - | _, Ast.Pattern.Identifier id -> - let open Ast.Pattern.Identifier in - let { name = (loc, _); annot; _ } = id in - (try - let d = LocMap.find loc use_def_map in - let key = DepKey.Id d in - match annot with - | None -> - let dep = { typeDep = NoInfo; - valDep = NoInfo } in - (* different from fun param!! *) - dep_map <- - DepMap.add ~combine:(merge_dep key) (* update_dep *) - key dep dep_map - | Some some_annot -> (* annotation *) - let dep = { typeDep = Annotation (some_annot, []); - valDep = NoInfo } in - dep_map <- - DepMap.add ~combine:(merge_dep key) (* update_dep *) - key dep dep_map - with _ -> ()) - | _, _ -> ()) - ; - super#variable_declarator_pattern ~kind expr - - (* In DepMap, map id @ loc to Incomplete.*) - method map_id_to_incomplete - (loc: Loc.t) = - let open Dep in - try - let d = LocMap.find loc use_def_map in - let key = DepKey.Id d in - let dep_right = { typeDep = Dep.Incomplete; - valDep = Dep.Incomplete } in - dep_map <- - DepMap.add ~combine:(merge_dep key) - key dep_right dep_map - with _ -> () - (* Non-renamable vars such as globals do not exist in the scope builder *) - - (* In DepMap, map id @ Loc to Destructure (Temp expr's loc), key *) - method map_id_to_destructure - (loc: Loc.t) - (key: (Loc.t, Loc.t) Ast.Pattern.Object.Property.key) - (expr: (Loc.t, Loc.t) Ast.Expression.t option) = - let open Dep in - try - let d = LocMap.find loc use_def_map in - (* syntax guarantees that in destructuring, rhs is not optional *) - let (loc_e,_) = (match expr with | Some e -> e | None -> raise Not_found) in - match key with - | Ast.Pattern.Object.Property.Identifier iden -> - let _, real_name = iden in - let key = DepKey.Id d in - let dep_right = { typeDep = Incomplete; - valDep = Destructure (DepKey.Temp loc_e, real_name) } in - dep_map <- - DepMap.add ~combine:(merge_dep key) - key dep_right dep_map - (* Remark: we cannot really pretend rhs is a Destructure expr and - 'depend' on that Destructure. Here is no syntactic representation - such as *s or s->p. Also, we destructure into multiple names at - once, var {a:c,b:d} = s; So, we just say c |-> Destructure s,a, and d - |-> Destructure s,b. *) - | _ -> - (* If the key is not an identifier, then we cannot do a Destructure kind *) - this#map_id_to_incomplete loc - with _ -> () - - method assign_to_variable_declarator_pattern - (pat: (Loc.t, Loc.t) Ast.Pattern.t) - (expr: (Loc.t, Loc.t) Ast.Expression.t option) = - match pat with - | _, Ast.Pattern.Identifier id -> - let open Ast.Pattern.Identifier in - let open Dep in - let { name = (loc, _); annot = _; _ } = id in - (try - let d = LocMap.find loc use_def_map in - let key = DepKey.Id d in - (match expr with - | Some expr' -> - let (loc_e, _) = expr' in - let dep_right = DepMap.find (DepKey.Temp loc_e) dep_map in - dep_map <- - DepMap.add ~combine:(merge_dep key) - key dep_right dep_map - | None -> - (* treat no-rhs-expression as uninitialized *) - let dep_right = { typeDep = Dep.NoInfo; - valDep = Dep.NoInfo } in - dep_map <- - DepMap.add ~combine:(merge_dep key) - key dep_right dep_map - ) - with _ -> - ()) - | _, Ast.Pattern.Object o-> - (* Dealing with real destructing depends on actual heap analysis *) - (* For now, we can just map each of these properties to Destructure *) - let open Ast.Pattern.Object in - let { properties; annot = _} = o in - let process_prop = fun p -> - (match p with - | Property (loc,{Property.key=key; pattern; Property.shorthand=shorthand}) -> - (* Note that if pattern is present then shorthand=false, and we - use the pattern as the "name" being declared, or otherwise use - the key. *) - if shorthand then - (* The key could be complex, but we only care if it is in the - use_def_map table. If it is not an identifier, it will not be in - the use_def_map table anyway. The loc here is the location of the - key. *) - this#map_id_to_destructure loc key expr - else - (match pattern with - | loc, Ast.Pattern.Identifier _ -> this#map_id_to_destructure loc key expr - | _, _ -> ()) - | RestProperty (_,_) -> ()) - in - List.iter process_prop properties - | _, Ast.Pattern.Array a -> - let open Ast.Pattern.Array in - let { elements; annot = _} = a in - let process_elem = fun e -> - (match e with - | Element (loc, _) -> this#map_id_to_incomplete loc - | RestElement (_,_) -> ()) - in - let process_elem_opt = fun e_opt -> - (match e_opt with - | Some e -> process_elem e - | None -> ()) in - List.iter process_elem_opt elements - | _, _ -> () (* Deal with other names getting values? *) - - method! variable_declarator ~kind - (decl: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.Declarator.t) = - let open Ast.Statement.VariableDeclaration.Declarator in - let decl' = super#variable_declarator ~kind decl in (* calls var_decl_pattern *) - let (_, { id = patt ; init = e }) = decl' in - this#assign_to_variable_declarator_pattern patt e; - decl' - - method! expression (expr: (Loc.t, Loc.t) Ast.Expression.t) = - let open Ast.Expression in - let open Dep in - match expr with - - | loc, Identifier id -> - let open Dep in - let id' = this#identifier id in - (try - let d = LocMap.find loc use_def_map in - let dep = { typeDep = Depends [DepKey.Id d]; - valDep = Depends [DepKey.Id d] } in - dep_map <- DepMap.add (DepKey.Temp loc) dep dep_map - with _ -> - let dep = { typeDep = Incomplete ; - valDep = Incomplete } in - dep_map <- DepMap.add (DepKey.Temp loc) dep dep_map - ) - ; - if id == id' then expr else loc, Identifier id' - - | loc, Literal l -> - let l' = this#literal loc l in - let dep = { typeDep = Dep.Primitive; - valDep = Dep.Primitive } in - dep_map <- DepMap.add (DepKey.Temp loc) dep dep_map - ; - loc, Literal l' - - | loc, Ast.Expression.Object o -> - let open Dep in - let o' = super#object_ loc o in - (* Initialize dependence info for the AST node *) - let dep = { typeDep = Dep.Object; - valDep = Dep.Object } in - dep_map <- DepMap.add (DepKey.Temp loc) dep dep_map - ; - (* Initialize dependence info for HeapLocs implied by the obj literal *) - let open Ast.Expression.Object in - let { properties=properties } = o' in - List.iter (function - | Property (_, Property.Init { - key = Property.Identifier (_, name); - value = (eloc, _); - shorthand = _; - }) -> - let dkey = DepKey.HeapLoc (loc, name) in - let dep = { - typeDep = Dep.Depends [DepKey.Temp eloc]; - valDep = Dep.Depends [DepKey.Temp eloc] - } in - dep_map <- DepMap.add dkey dep dep_map - (* TODO *) - | Property (_, Property.Init { - key = Property.Literal _ | Property.PrivateName _ | Property.Computed _; - _; - }) - | Property (_, Property.Method _) - | Property (_, Property.Get _) - | Property (_, Property.Set _) -> () - | SpreadProperty _ -> () - ) properties; - loc, Ast.Expression.Object o' - - | loc, Assignment a -> - let a' = this#assignment loc a in - loc, Assignment a' - - | loc, Binary b -> - let open Ast.Expression.Binary in - let open Dep in - let { operator = o; left; right } = b in - let left' = this#expression left in - let right' = this#expression right in - let left_loc, _ = left' in - let right_loc, _ = right' in - let dep = { typeDep = Depends [DepKey.Temp left_loc; DepKey.Temp right_loc]; - valDep = Depends [DepKey.Temp left_loc; DepKey.Temp right_loc] } in - dep_map <- DepMap.add (DepKey.Temp loc) dep dep_map - ; - if left == left' && right == right' then expr - else loc, Binary { operator = o; left = left'; right = right' } - - | loc, TypeCast x -> - let open Ast.Expression.TypeCast in - let open Dep in - let { expression=e; annot } = x in - let e' = this#expression e in - let loc_e',_ = e' in - let dep = { typeDep = Annotation (annot, []); - valDep = Depends [DepKey.Temp loc_e'] } in - dep_map <- DepMap.add (DepKey.Temp loc) dep dep_map - ; - if e' == e then expr - else loc, TypeCast { expression = e'; annot } - - (* TODO Member: in the best case, we can retrieve the right HeapLocs *) - - | loc, Update x -> - let open Ast.Expression.Update in - let x' = this#update_expression loc x in - let { argument; operator = _; prefix = _ } = x' in - (match argument with - | loc_a, Ast.Expression.Identifier _ -> - (try - let d = LocMap.find loc_a use_def_map in - let key = DepKey.Temp loc in - let dep_expr = { valDep = Depends [DepKey.Id d]; - typeDep = Depends [DepKey.Id d] } in (* number? *) - (* v++ augments the dependence of v onto itself as expr *) - dep_map <- - DepMap.add ~combine:(merge_dep key) - key dep_expr dep_map; - (* the expression v++ depends on v *) - dep_map <- DepMap.add (DepKey.Temp loc) dep_expr dep_map - with _ -> ()) - (* TODO: deal with heap locations: they should not become incomplete *) - | loc_e, _ -> - let open Dep in - let dep = { valDep = Incomplete; - typeDep = Incomplete } in - dep_map <- DepMap.add (DepKey.Temp loc_e) dep dep_map - ) - ; - loc, Update x' - - | loc,_ -> - let open Dep in - let dep = { valDep = Incomplete; - typeDep = Incomplete } in - dep_map <- DepMap.add (DepKey.Temp loc) dep dep_map; - super#expression expr - - method assign_to_assignment_pattern - (pat: (Loc.t, Loc.t) Ast.Pattern.t) - (expr: (Loc.t, Loc.t) Ast.Expression.t) - (op: Ast.Expression.Assignment.operator) = - match pat with - | _, Ast.Pattern.Identifier id -> - (* Similar, but not identical to the corresponding case in - * assign_to_variable_declarator_pattern. 1. The rhs expression is not - * optional. 2. The += syntax can occur here. 3. We ignore the type - * annotation here. *) - let open Ast.Pattern.Identifier in - let { name = (loc, _); annot = _; _} = id in - (try - let d = LocMap.find loc use_def_map in - let key = DepKey.Id d in - let dep_left = - DepMap.find (DepKey.Id d) dep_map in - let (loc_e, _) = expr in - let dep_right = - DepMap.find (DepKey.Temp loc_e) dep_map in - match op with - | Ast.Expression.Assignment.Assign -> - (* treat as = assignment *) - dep_map <- - DepMap.add ~combine:(merge_dep key) - key dep_right dep_map - | _ -> - (* treat as += assignment *) - dep_map <- - DepMap.add ~combine:(merge_dep key) - key dep_left dep_map; - dep_map <- - DepMap.add ~combine:(merge_dep key) - key dep_right dep_map - with _ -> ()) - - | _, Ast.Pattern.Array a -> - (* This is identical to the corresponding case in - * assign_to_variable_declarator_pattern. TODO - refactor. *) - let open Ast.Pattern.Array in - let { elements; annot = _} = a in - let process_elem = fun e -> - (match e with - | Element (loc, _) -> this#map_id_to_incomplete loc - | RestElement (_,_) -> ()) - in - let process_elem_opt = fun e_opt -> - (match e_opt with - | Some e -> process_elem e - | None -> ()) in - List.iter process_elem_opt elements - - | _, _ -> () (* TODO deal with the case e.p = e'. Update or havoc *) - - method! assignment _loc (expr: (Loc.t, Loc.t) Ast.Expression.Assignment.t) = - let open Ast.Expression.Assignment in - let { operator = op; left; right } = expr in - let left' = this#assignment_pattern left in - let right' = this#expression right in - this#assign_to_assignment_pattern left' right' op - ; - if left == left' && right == right' then expr - else { expr with left = left'; right = right' } - - method! for_of_statement_lhs (left: (Loc.t, Loc.t) Ast.Statement.ForOf.left) = - let open Ast.Statement.ForOf in - let open Ast.Statement.VariableDeclaration in - match left with - | LeftDeclaration (loc, decl) -> - let decl' = super#variable_declaration loc decl in - (* Even though variable_declarator is handled elsewhere, For the For-of - case, it does not see the rhs assignment. So we need to havoc that - declarator here. *) - (try - let {declarations; _} = decl' in - let var_locs = List.map (fun (l,_) -> l) declarations in - (* For this decl, let the variable become Incomplete *) - List.iter this#map_id_to_incomplete var_locs - with _ -> ()) - ; - if decl == decl' - then left - else LeftDeclaration (loc, decl') - | LeftPattern patt -> - let patt' = this#for_of_assignment_pattern patt in - (match patt' with - | loc, Ast.Pattern.Identifier _ -> this#map_id_to_incomplete loc - | _ -> ()) (* TODO: object and array patterns can happen here *) - ; - if patt == patt' - then left - else LeftPattern patt' - - method! for_in_statement_lhs (left: (Loc.t, Loc.t) Ast.Statement.ForIn.left) = - (* Almost identical to the for-of case *) - let open Ast.Statement.ForIn in - let open Ast.Statement.VariableDeclaration in - match left with - | LeftDeclaration (loc, decl) -> - let decl' = super#variable_declaration loc decl in - (* Even though variable_declarator is handled elsewhere, For the For-In - case, it does not see the rhs assignment. So we need to havoc that - declarator here. *) - (try - let {declarations; _} = decl' in - let var_locs = List.map (fun (l,_) -> l) declarations in - (* For this decl, let the variable become Incomplete *) - List.iter this#map_id_to_incomplete var_locs - with _ -> ()) - ; - if decl == decl' - then left - else LeftDeclaration (loc, decl') - | LeftPattern patt -> - let patt' = this#for_in_assignment_pattern patt in - (match patt' with - | loc, Ast.Pattern.Identifier _ -> this#map_id_to_incomplete loc - | _ -> ()) (* TODO: object and array patterns can happen here *) - ; - if patt == patt' - then left - else LeftPattern patt' -end diff --git a/src/parser_utils/dune b/src/parser_utils/dune new file mode 100644 index 00000000000..83ed16be2f9 --- /dev/null +++ b/src/parser_utils/dune @@ -0,0 +1,12 @@ +(library + (name flow_parser_utils) + (wrapped false) + (flags -w +a-4-6-29-35-44-48-50) + (libraries + dtoa + flow_common + flow_common_utils + flow_common_utils_loc_utils + flow_parser + hh_json ; hack + )) diff --git a/src/parser_utils/file_exports_resolver.ml b/src/parser_utils/file_exports_resolver.ml deleted file mode 100644 index 8988f73474a..00000000000 --- a/src/parser_utils/file_exports_resolver.ml +++ /dev/null @@ -1,330 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -module Ast = Flow_ast - -open Flow_ast_visitor - -(* Resolve the AST nodes of CommonJS exported values, it mimics the ES modules interface - * by differentiating between named and default values. - * - * NOTE: This logic is temporary and is intended to be replaced by `File_sig`, avoid - * using this module since any coding relying on it will need to be rewritten. - *) - -type export_values = - | ExportFunction of { line_loc: Loc.t; func: (Loc.t * (Loc.t, Loc.t) Ast.Function.t) } - | ExportClass of { line_loc: Loc.t; class_: (Loc.t * (Loc.t, Loc.t) Ast.Class.t) } - | ExportExpression of { line_loc: Loc.t; expr: (Loc.t, Loc.t) Ast.Expression.t } - -type module_exports = { - default: export_values option; - named: export_values SMap.t option -} - -type non_resolveable_export_reason = - (* The name for a named property could not be resolved, it was likely a - non static computed property *) - | NonResolveableName of { line_loc: Loc.t; loc: Loc.t } - (* A non-analyzable export value, e.g. Getter, Setter *) - | DynamicExport of { line_loc: Loc.t; name: string option } - (* The global `export` or `module` object has been overridden *) - | ClobberedExport of { line_loc: Loc.t; } - -type ast_node = - | DefFunction of { line_loc: Loc.t; func: (Loc.t * (Loc.t, Loc.t) Ast.Function.t) } - | DefClass of { line_loc: Loc.t; class_: (Loc.t * (Loc.t, Loc.t) Ast.Class.t) } - | DefExpression of { line_loc: Loc.t; expr: (Loc.t, Loc.t) Ast.Expression.t } - -let empty_module_exports = { - default = None; - named = None; -} - -(* TODO: Create a generic LOC to AST map *) -class ast_of_def_loc_visitor ~loc = object(this) - inherit [ast_node option] visitor ~init:None as super - - method! statement stmt = - let open Ast.Statement in - if acc = None then match stmt with - | line_loc, FunctionDeclaration ({ Ast.Function.id = Some (local_loc, _); _ } as func) - when local_loc = loc -> - this#update_acc (fun _ -> Some (DefFunction { line_loc; func = (line_loc, func) })); - stmt - | line_loc, ClassDeclaration ({ Ast.Class.id = Some (local_loc, _); _ } as class_) - when local_loc = loc -> - this#update_acc (fun _ -> Some (DefClass { line_loc; class_ = (line_loc, class_) })); - stmt - | line_loc, VariableDeclaration { VariableDeclaration.declarations; _ } -> - List.iter (function - | _, { VariableDeclaration.Declarator. - id = (_, Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (local_loc, _); _ - }); - init = Some expr - } when local_loc = loc -> - this#update_acc (fun _ -> Some (DefExpression { line_loc; expr })); - | _ -> () - ) declarations; - stmt - | _ -> super#statement stmt - else stmt -end - -let ast_of_def_loc ~ast ~loc = - let walk = new ast_of_def_loc_visitor ~loc in - walk#eval walk#program ast - -class exports_resolver ~ast = object(this) - inherit [module_exports] visitor ~init:empty_module_exports as _super - - val scope_info = Scope_builder.program ast - - method private reset_cjs_exports () = - this#update_acc (fun _ -> empty_module_exports) - - method private set_cjs_default_export export = - (* Setting a default export will always clobber the named properties *) - this#update_acc (fun _ -> { default = Some export; named = None }) - - method private add_cjs_named_export name export = - match name with - | Some name -> - this#update_acc (fun exports -> - let named = Option.value exports.named ~default:SMap.empty in - let named = SMap.add name export named in - { exports with named = Some named } - ) - (* TODO: Report non-resolvable exported names, currently they are dropped *) - | None -> - begin match export with - | ExportFunction { line_loc; func = (loc, _) } - | ExportClass { line_loc; class_ = (loc, _) } - | ExportExpression { line_loc; expr = (loc, _) } -> - this#add_non_resolveable_export (NonResolveableName { line_loc; loc }) - end - - method private add_non_resolveable_export (_reason: non_resolveable_export_reason) = - (* TODO: Report non-resolvable exports, currently they are dropped *) - () - - method! declare_module _loc (m: (Loc.t, Loc.t) Ast.Statement.DeclareModule.t) = - (* Don't walk into declare modules since they can define their own exports *) - m - - method! statement (stmt: (Loc.t, Loc.t) Ast.Statement.t) = - let open Ast.Statement in - match stmt with - | loc, Expression { Expression. - expression = (_, Ast.Expression.Assignment expr); - directive = None - } -> - this#assignment_with_loc loc expr; - stmt - (* No need to walk anything else *) - | _ -> stmt - - method private assignment_with_loc (line_loc: Loc.t) (expr: (Loc.t, Loc.t) Ast.Expression.Assignment.t) = - let open Ast.Expression in - let { Assignment.operator; left; right } = expr in - - (* Handle exports *) - match operator, left with - (* exports = ... *) - | Assignment.Assign, (module_loc, Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (_, ("module" | "exports")); _ - }) - (* We only care about global scope *) - when not (Scope_api.is_local_use scope_info module_loc) -> - - (* The assignment will override the global object referance, making it no longer exported *) - this#reset_cjs_exports (); - this#add_non_resolveable_export (ClobberedExport { line_loc }) - - (* module.exports = ... *) - | Assignment.Assign, (_, Ast.Pattern.Expression (_, Member { Member. - _object = module_loc, Identifier (_, "module"); - property = Member.PropertyIdentifier (_, "exports"); _ - })) - (* We only care about global scope *) - when not (Scope_api.is_local_use scope_info module_loc) -> - - (* The assignment will clobber exports *) - this#reset_cjs_exports (); - this#process_default_export line_loc right; - - (* exports.foo = ... *) - | Assignment.Assign, (_, Ast.Pattern.Expression (_, Member { Member. - _object = module_loc, Identifier (_, "exports"); - property; - computed = _; - })) - (* module.exports.foo = ... *) - | Assignment.Assign, (_, Ast.Pattern.Expression (_, Member { Member. - _object = _, Member { Member. - _object = module_loc, Identifier (_, "module"); - property = Member.PropertyIdentifier (_, "exports"); _ - }; - property; - computed = _; - })) - (* We only care about global scope *) - when not (Scope_api.is_local_use scope_info module_loc) -> - - this#process_named_export - line_loc - (this#get_member_property_name property) - right - - | _ -> () - - method private process_default_export (line_loc: Loc.t) (expr: (Loc.t, Loc.t) Ast.Expression.t) = - let open Ast.Expression in - match expr with - | _, Identifier (loc, _) -> - begin match Scope_api.def_of_use scope_info loc with - | { Scope_api.Def.locs = (loc, []); _ } -> - begin match ast_of_def_loc ~ast ~loc with - | Some (DefFunction { line_loc; func }) -> - this#set_cjs_default_export (ExportFunction { line_loc; func }) - | Some (DefClass { line_loc; class_ }) -> - this#set_cjs_default_export (ExportClass { line_loc; class_ }); - let (_, { Ast.Class.body = (_, { Ast.Class.Body.body }); _ }) = class_ in - List.iter this#process_class_static_property_export body - | Some (DefExpression { line_loc; expr }) -> - this#process_default_export line_loc expr; - | None -> - (* Definition not found *) - this#set_cjs_default_export (ExportExpression { line_loc; expr }) - end - | _ -> - (* Definition not found *) - this#set_cjs_default_export (ExportExpression { line_loc; expr }) - end - | _, Object { Object.properties; _ } -> - List.iter this#process_object_property_export properties - | loc, Function func - | loc, ArrowFunction func -> - this#set_cjs_default_export (ExportFunction { line_loc; func = (loc, func) }) - (* TODO: walk static properties, watch out for mutations *) - | loc, Class ({ Ast.Class.body = (_, { Ast.Class.Body.body }); _ } as class_) -> - this#set_cjs_default_export (ExportClass { line_loc; class_ = (loc, class_) }); - List.iter this#process_class_static_property_export body - | _ -> - this#set_cjs_default_export (ExportExpression { line_loc; expr }) - - method private process_named_export (line_loc: Loc.t) (name: string option) (expr: (Loc.t, Loc.t) Ast.Expression.t) = - let open Ast.Expression in - match expr with - | _, Identifier (loc, _) -> - begin match Scope_api.def_of_use scope_info loc with - | { Scope_api.Def.locs = (loc, []); _ } -> - begin match ast_of_def_loc ~ast ~loc with - | Some (DefFunction { line_loc; func }) -> - this#add_cjs_named_export name (ExportFunction { line_loc; func }) - | Some (DefClass { line_loc; class_ }) -> - this#add_cjs_named_export name (ExportClass { line_loc; class_ }) - | Some (DefExpression { line_loc; expr }) -> - this#process_named_export line_loc name expr; - | None -> - (* Definition not found *) - this#add_cjs_named_export name (ExportExpression { line_loc; expr }) - end - | _ -> - (* Definition not found *) - this#add_cjs_named_export name (ExportExpression { line_loc; expr }) - end - | loc, Function func - | loc, ArrowFunction func -> - this#add_cjs_named_export name (ExportFunction { line_loc; func = (loc, func) }) - | loc, Class class_ -> - this#add_cjs_named_export name (ExportClass { line_loc; class_ = (loc, class_) }) - | _ -> - this#add_cjs_named_export name (ExportExpression { line_loc; expr }) - - method private process_object_property_export (prop: (Loc.t, Loc.t) Ast.Expression.Object.property) = - let open Ast.Expression.Object in - match prop with - | Property (line_loc, Property.Init { key; value; shorthand = _ }) -> - this#process_named_export line_loc (this#get_object_property_key_name key) value - - | Property (line_loc, Property.Method { key; value = func }) -> - this#add_cjs_named_export - (this#get_object_property_key_name key) - (ExportFunction { line_loc; func }) - - (* TODO: What do we do with setters or getters? *) - | Property (line_loc, Property.Get { key; _ }) - | Property (line_loc, Property.Set { key; _ }) -> - this#add_non_resolveable_export (DynamicExport { - line_loc; - name = (this#get_object_property_key_name key); - }) - - (* TODO: Follow local spread identifiers *) - | SpreadProperty (line_loc, _) -> - this#add_non_resolveable_export (DynamicExport { line_loc; name = None }) - - method private process_class_static_property_export (el: (Loc.t, Loc.t) Ast.Class.Body.element) = - let open Ast.Class in - match el with - | Body.Method (line_loc, { Method.static = true; key; value = func; kind = Method.Method; _ }) -> - this#add_cjs_named_export - (this#get_object_property_key_name key) - (ExportFunction { line_loc; func }) - - (* TODO: What do we do with setters or getters? *) - | Body.Method (line_loc, { Method.static = true; key; _ }) -> - this#add_non_resolveable_export (DynamicExport { - line_loc; - name = (this#get_object_property_key_name key); - }) - - | Body.Property (line_loc, { Property.static = true; key; value = Some value; _ }) -> - this#process_named_export line_loc (this#get_object_property_key_name key) value - - | _ -> () - - method private get_object_property_key_name (key: (Loc.t, Loc.t) Ast.Expression.Object.Property.key) = - let open Ast.Expression.Object.Property in - match key with - (* Normal key *) - | Identifier (_, id) -> Some id - - (* Is computed but just defined as a string *) - | Literal (_, { Ast.Literal.value = Ast.Literal.String value; _ }) - | Computed (_, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.String value; _ }) - -> Some value - - (* Not exported *) - | PrivateName _ -> None - - (* Anything else *) - | _ -> None - - method private get_member_property_name (key: (Loc.t, Loc.t) Ast.Expression.Member.property) = - let open Ast.Expression.Member in - match key with - (* Normal key *) - | PropertyIdentifier (_, id) -> Some id - - (* Is computed but just defined as a string *) - | PropertyExpression (_, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.String value; _ }) - -> Some value - - (* Not exported *) - | PropertyPrivateName _ -> None - - (* Anything else *) - | _ -> None - -end - -let program ~ast = - let walk = new exports_resolver ~ast in - walk#eval walk#program ast diff --git a/src/parser_utils/file_sig.ml b/src/parser_utils/file_sig.ml index 2ec0dd8a574..1535ce0c02f 100644 --- a/src/parser_utils/file_sig.ml +++ b/src/parser_utils/file_sig.ml @@ -1,1160 +1,1632 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module Ast_utils = Flow_ast_utils module Ast = Flow_ast - module Result = Core_result open Flow_ast_visitor -type 'info t' = { - module_sig: 'info module_sig'; - declare_modules: (Loc.t * 'info module_sig') SMap.t; - tolerable_errors: tolerable_error list; -} - -and 'info module_sig' = { - requires: require list; - module_kind: module_kind; - type_exports_named: type_export SMap.t; - type_exports_star: export_star list; - info: 'info; -} - -and require = - | Require of { - source: Ast_utils.source; - require_loc: Loc.t; - bindings: require_bindings option; - } - | ImportDynamic of { source: Ast_utils.source; import_loc: Loc.t } - | Import0 of { source: Ast_utils.source } - | Import of { - source: Ast_utils.source; - named: imported_locs Nel.t SMap.t SMap.t; - ns: Ast_utils.ident option; - types: imported_locs Nel.t SMap.t SMap.t; - typesof: imported_locs Nel.t SMap.t SMap.t; - typesof_ns: Ast_utils.ident option; - } +module Make + (L : Loc_sig.S) + (Scope_api : Scope_api_sig.S with module L = L) + (Scope_builder : Scope_builder_sig.S with module L = L and module Api = Scope_api) + (Signature_builder_deps : Signature_builder_deps_sig.S with module L = L) = +struct + module L = L + module Signature_builder_deps = Signature_builder_deps + + type 'info t' = { + module_sig: 'info module_sig'; + declare_modules: (L.t * 'info module_sig') SMap.t; + tolerable_errors: tolerable_error list; + } -and imported_locs = { - remote_loc: Loc.t; - local_loc: Loc.t; -} + and 'info module_sig' = { + requires: require list; + module_kind: module_kind; + type_exports_named: (string * (L.t * type_export)) list; + type_exports_star: (L.t * export_star) list; + info: 'info; + } -and require_bindings = - | BindIdent of Ast_utils.ident - | BindNamed of imported_locs Nel.t SMap.t SMap.t + and require = + | Require of { + source: L.t Ast_utils.source; + require_loc: L.t; + bindings: require_bindings option; + } + | ImportDynamic of { + source: L.t Ast_utils.source; + import_loc: L.t; + } + | Import0 of { source: L.t Ast_utils.source } + | Import of { + import_loc: L.t; + source: L.t Ast_utils.source; + named: imported_locs Nel.t SMap.t SMap.t; + ns: L.t Ast_utils.ident option; + types: imported_locs Nel.t SMap.t SMap.t; + typesof: imported_locs Nel.t SMap.t SMap.t; + typesof_ns: L.t Ast_utils.ident option; + } -and module_kind = - | CommonJS of { - mod_exp_loc: Loc.t option; - } - | ES of { - named: export SMap.t; - star: export_star list; - } + and imported_locs = { + remote_loc: L.t; + local_loc: L.t; + } -and export = - | ExportDefault of { - default_loc: Loc.t; - local: Ast_utils.ident option; - } - | ExportNamed of { - loc: Loc.t; - kind: named_export_kind; - } - | ExportNs of { - loc: Loc.t; - source: Ast_utils.source; - } + and require_bindings = + | BindIdent of L.t Ast_utils.ident + | BindNamed of (L.t Ast_utils.ident * require_bindings) list -and named_export_kind = - | NamedDeclaration - | NamedSpecifier of { local: Ast_utils.ident; source: Ast_utils.source option } + and module_kind = + | CommonJS of { mod_exp_loc: L.t option } + | ES of { + named: (string * (L.t * export)) list; + star: (L.t * export_star) list; + } -and export_star = - | ExportStar of { star_loc: Loc.t; source: Ast_utils.source } + and export = + | ExportDefault of { + default_loc: L.t; + local: L.t Ast_utils.ident option; + } + | ExportNamed of { + loc: L.t; + kind: named_export_kind; + } + | ExportNs of { + loc: L.t; + star_loc: L.t; + source: L.t Ast_utils.source; + } + + and named_export_kind = + | NamedDeclaration + | NamedSpecifier of { + local: L.t Ast_utils.ident; + source: L.t Ast_utils.source option; + } + + and export_star = + | ExportStar of { + star_loc: L.t; + source: L.t Ast_utils.source; + } -and type_export = - | TypeExportNamed of { - loc: Loc.t; - kind: named_export_kind + and type_export = + | TypeExportNamed of { + loc: L.t; + kind: named_export_kind; + } + + and tolerable_error = + (* e.g. `module.exports.foo = 4` when not at the top level *) + | BadExportPosition of L.t + (* e.g. `foo(module)`, dangerous because `module` is aliased *) + | BadExportContext of string (* offending identifier *) * L.t + | SignatureVerificationError of Signature_builder_deps.Error.t + + type exports_info = { + module_kind_info: module_kind_info; + type_exports_named_info: es_export_def list; + } + + and module_kind_info = + | CommonJSInfo of cjs_exports_def list + | ESInfo of es_export_def list + + and cjs_exports_def = + | DeclareModuleExportsDef of (L.t, L.t) Ast.Type.annotation + | SetModuleExportsDef of (L.t, L.t) Ast.Expression.t + | AddModuleExportsDef of L.t Ast_utils.ident * (L.t, L.t) Ast.Expression.t + + and es_export_def = + | DeclareExportDef of (L.t, L.t) Ast.Statement.DeclareExportDeclaration.declaration + | ExportDefaultDef of (L.t, L.t) Ast.Statement.ExportDefaultDeclaration.declaration + | ExportNamedDef of (L.t, L.t) Ast.Statement.t + + type error = IndeterminateModuleType of L.t + + let mk_module_sig info = + { + requires = []; + module_kind = CommonJS { mod_exp_loc = None }; + type_exports_named = []; + type_exports_star = []; + info; } -and tolerable_error = - (* e.g. `module.exports.foo = 4` when not at the top level *) - | BadExportPosition of Loc.t - (* e.g. `foo(module)`, dangerous because `module` is aliased *) - | BadExportContext of string (* offending identifier *) * Loc.t - | SignatureVerificationError of Signature_builder_deps.Error.t - -type exports_info = { - module_kind_info: module_kind_info; - type_exports_named_info: es_export_def SMap.t; -} - -and module_kind_info = - | CommonJSInfo of cjs_exports_def list - | ESInfo of es_export_def SMap.t - -and cjs_exports_def = - | DeclareModuleExportsDef of (Loc.t, Loc.t) Ast.Type.annotation - | SetModuleExportsDef of (Loc.t, Loc.t) Ast.Expression.t - | AddModuleExportsDef of Ast_utils.ident * (Loc.t, Loc.t) Ast.Expression.t - -and es_export_def = - | DeclareExportDef of (Loc.t, Loc.t) Ast.Statement.DeclareExportDeclaration.declaration - | ExportDefaultDef of (Loc.t, Loc.t) Ast.Statement.ExportDefaultDeclaration.declaration - | ExportNamedDef of (Loc.t, Loc.t) Ast.Statement.t - -type error = - | IndeterminateModuleType of Loc.t - -let mk_module_sig info = { - requires = []; - module_kind = CommonJS { mod_exp_loc = None }; - type_exports_named = SMap.empty; - type_exports_star = []; - info; -} - -let mk_file_sig info = { - module_sig = mk_module_sig info; - declare_modules = SMap.empty; - tolerable_errors = []; -} - -let init_exports_info = { - module_kind_info = CommonJSInfo []; - type_exports_named_info = SMap.empty; -} - -module PP = struct - let string_of_option f = function - | None -> "None" - | Some x -> Printf.sprintf "Some (%s)" (f x) - - let items_to_collection_string indent open_ close items = - let indent_str = String.make (indent * 2) ' ' in - let items_str = - items - |> List.map (Printf.sprintf "%s%s;\n" (indent_str ^ " ")) - |> String.concat "" - in - Printf.sprintf "%s\n%s%s%s" open_ items_str indent_str close + let mk_file_sig info = + { module_sig = mk_module_sig info; declare_modules = SMap.empty; tolerable_errors = [] } - let items_to_list_string indent items = - items_to_collection_string indent "[" "]" items + let init_exports_info = { module_kind_info = CommonJSInfo []; type_exports_named_info = [] } - let items_to_record_string indent items = - let items = items |> List.map (fun (label, value) -> - Printf.sprintf "%s: %s" label value - ) in - items_to_collection_string indent "{" "}" items -end + module PP = struct + let string_of_option f = function + | None -> "None" + | Some x -> Printf.sprintf "Some (%s)" (f x) -let exports_info_to_string exports_info = - let string_of_es_export_def = function - | DeclareExportDef _ -> "DeclareExportDef" - | ExportDefaultDef _ -> "ExportDefaultDef" - | ExportNamedDef _ -> "ExportNamedDef" - in - let string_of_module_kind_info = function - | CommonJSInfo _ -> "CommonJSInfo" - | ESInfo named -> - PP.items_to_record_string 2 @@ SMap.bindings @@ SMap.map string_of_es_export_def named - in - PP.items_to_record_string 1 [ - "module_kind_info", string_of_module_kind_info exports_info.module_kind_info; - "type_exports_named_info", PP.items_to_record_string 2 @@ SMap.bindings @@ SMap.map string_of_es_export_def exports_info.type_exports_named_info; - ] - -(* Applications may not care about the info carried by signatures. *) -type module_sig = unit module_sig' -type t = unit t' - -let init = mk_file_sig () - -let to_string t = - let string_of_module_sig module_sig = - let string_of_require_list require_list = - let string_of_require_bindings = function - | BindIdent (_, name) -> "BindIdent: " ^ name - | BindNamed _ -> "BindNamed" + let items_to_collection_string indent open_ close items = + let indent_str = String.make (indent * 2) ' ' in + let items_str = + items + |> Core_list.map ~f:(Printf.sprintf "%s%s;\n" (indent_str ^ " ")) + |> String.concat "" in - let string_of_require = function - | Require {source=(_, name); bindings; _} -> - Printf.sprintf "Require (%s, %s)" - name - (PP.string_of_option string_of_require_bindings bindings) - | ImportDynamic _ -> "ImportDynamic" - | Import0 _ -> "Import0" - | Import _ -> "Import" + Printf.sprintf "%s\n%s%s%s" open_ items_str indent_str close + + let items_to_list_string indent items = items_to_collection_string indent "[" "]" items + + let items_to_record_string indent items = + let items = + items |> Core_list.map ~f:(fun (label, value) -> Printf.sprintf "%s: %s" label value) in - PP.items_to_list_string 2 (List.map string_of_require require_list) + items_to_collection_string indent "{" "}" items + end + + let exports_info_to_string exports_info = + let string_of_es_export_def = function + | DeclareExportDef _ -> "DeclareExportDef" + | ExportDefaultDef _ -> "ExportDefaultDef" + | ExportNamedDef _ -> "ExportNamedDef" in - let string_of_named_export_kind = function - | NamedDeclaration -> "NamedDeclaration" - | NamedSpecifier { local; _ } -> - let _, x = local in - Printf.sprintf "NamedSpecifier(%s)" x + let string_of_module_kind_info = function + | CommonJSInfo _ -> "CommonJSInfo" + | ESInfo named -> PP.items_to_list_string 2 @@ Core_list.map ~f:string_of_es_export_def named + in + PP.items_to_record_string + 1 + [ + ("module_kind_info", string_of_module_kind_info exports_info.module_kind_info); + ( "type_exports_named_info", + PP.items_to_list_string 2 + @@ Core_list.map ~f:string_of_es_export_def exports_info.type_exports_named_info ); + ] + + (* Applications may not care about the info carried by signatures. *) + type module_sig = unit module_sig' + + type t = unit t' + + let init = mk_file_sig () + + let to_string t = + let string_of_module_sig module_sig = + let string_of_require_list require_list = + let string_of_require_bindings = function + | BindIdent (_, name) -> Printf.sprintf "BindIdent: %s" name + | BindNamed named -> + Printf.sprintf + "BindNamed: %s" + (String.concat ", " @@ Core_list.map ~f:(fun ((_, name), _) -> name) named) + in + let string_of_require = function + | Require { source = (_, name); bindings; _ } -> + Printf.sprintf + "Require (%s, %s)" + name + (PP.string_of_option string_of_require_bindings bindings) + | ImportDynamic _ -> "ImportDynamic" + | Import0 _ -> "Import0" + | Import _ -> "Import" + in + PP.items_to_list_string 2 (Core_list.map ~f:string_of_require require_list) + in + let string_of_named_export_kind = function + | NamedDeclaration -> "NamedDeclaration" + | NamedSpecifier { local; _ } -> + let (_, x) = local in + Printf.sprintf "NamedSpecifier(%s)" x + in + let string_of_export (n, export) = + ( n, + match export with + | (_, ExportDefault { local; _ }) -> + Printf.sprintf "ExportDefault (%s)" @@ PP.string_of_option (fun (_, x) -> x) local + | (_, ExportNamed { kind; _ }) -> + Printf.sprintf "ExportNamed (%s)" @@ string_of_named_export_kind kind + | (_, ExportNs _) -> "ExportNs" ) + in + let string_of_type_export (n, type_export) = + ( n, + match type_export with + | (_, TypeExportNamed { kind; _ }) -> + Printf.sprintf "TypeExportNamed (%s)" @@ string_of_named_export_kind kind ) + in + let string_of_export_star = function + | (_, ExportStar _) -> "ExportStar" + in + let string_of_module_kind = function + | CommonJS _ -> "CommonJS" + | ES { named; star } -> + PP.items_to_record_string + 2 + [ + ("named", PP.items_to_record_string 3 @@ Core_list.map ~f:string_of_export named); + ("star", PP.items_to_list_string 3 @@ Core_list.map ~f:string_of_export_star star); + ] + in + PP.items_to_record_string + 1 + [ + ("requires", string_of_require_list module_sig.requires); + ("module_kind", string_of_module_kind module_sig.module_kind); + ( "type_exports_named", + PP.items_to_record_string 2 + @@ Core_list.map ~f:string_of_type_export module_sig.type_exports_named ); + ( "type_exports_star", + PP.items_to_list_string 2 + @@ Core_list.map ~f:string_of_export_star module_sig.type_exports_star ); + ] in - let string_of_export = function - | ExportDefault { local; _ } -> - Printf.sprintf "ExportDefault (%s)" @@ - PP.string_of_option (fun (_, x) -> x) local - | ExportNamed { kind; _ } -> - Printf.sprintf "ExportNamed (%s)" @@ - string_of_named_export_kind kind - | ExportNs _ -> "ExportNs" + PP.items_to_record_string 0 [("module_sig", string_of_module_sig t.module_sig)] + + let combine_nel _ a b = Some (Nel.concat (a, [b])) + + let require_loc_map msig = + let acc = SMap.empty in + (* requires *) + let acc = + List.fold_left + (fun acc require -> + match require with + | Require { source = (loc, mref); _ } + | ImportDynamic { source = (loc, mref); _ } + | Import0 { source = (loc, mref) } + | Import { source = (loc, mref); _ } -> + SMap.add mref (Nel.one loc) acc ~combine:Nel.rev_append) + acc + msig.requires in - let string_of_type_export = function - | TypeExportNamed { kind; _ } -> - Printf.sprintf "TypeExportNamed (%s)" @@ - string_of_named_export_kind kind + (* export type {...} from 'foo' *) + let acc = + List.fold_left + (fun acc (_, type_export) -> + match type_export with + | (_, TypeExportNamed { kind = NamedSpecifier { source = Some (loc, mref); _ }; _ }) -> + SMap.add mref (Nel.one loc) acc ~combine:Nel.rev_append + | _ -> acc) + acc + msig.type_exports_named in - let string_of_export_star = function - | ExportStar _ -> "ExportStar" + (* export type * from 'foo' *) + let acc = + List.fold_left + (fun acc export_star -> + match export_star with + | (_, ExportStar { source = (source_loc, mref); _ }) -> + SMap.add mref (Nel.one source_loc) acc ~combine:Nel.rev_append) + acc + msig.type_exports_star in - let string_of_module_kind = function - | CommonJS _ -> "CommonJS" + let acc = + match msig.module_kind with + | CommonJS _ -> acc | ES { named; star } -> - PP.items_to_record_string 2 [ - "named", PP.items_to_record_string 3 @@ SMap.bindings @@ SMap.map string_of_export named; - "star", PP.items_to_list_string 3 @@ List.map string_of_export_star star; - ] + (* export {...} from 'foo' *) + let acc = + List.fold_left + (fun acc (_, export) -> + match export with + | (_, ExportNamed { kind = NamedSpecifier { source = Some (loc, mref); _ }; _ }) + | (_, ExportNs { source = (loc, mref); _ }) -> + SMap.add mref (Nel.one loc) acc ~combine:Nel.rev_append + | _ -> acc) + acc + named + in + (* export * from 'foo' *) + let acc = + List.fold_left + (fun acc export_star -> + match export_star with + | (_, ExportStar { source = (source_loc, mref); _ }) -> + SMap.add mref (Nel.one source_loc) acc ~combine:Nel.rev_append) + acc + star + in + acc in - PP.items_to_record_string 1 [ - "requires", string_of_require_list module_sig.requires; - "module_kind", string_of_module_kind module_sig.module_kind; - "type_exports_named", PP.items_to_record_string 2 @@ SMap.bindings @@ SMap.map string_of_type_export module_sig.type_exports_named; - "type_exports_star", PP.items_to_list_string 2 @@ List.map string_of_export_star module_sig.type_exports_star; - ] - in - let string_of_declare_modules _ = "TODO" in - let string_of_tolerable_errors _ = "TODO" in - PP.items_to_record_string 0 [ - "module_sig", string_of_module_sig t.module_sig; - "declare_modules", string_of_declare_modules t.declare_modules; - "tolerable_errors", string_of_tolerable_errors t.tolerable_errors; - ] - -let combine_nel _ a b = Some (Nel.concat (a, [b])) - -let require_loc_map msig = - let acc = SMap.empty in - (* requires *) - let acc = List.fold_left (fun acc require -> - match require with - | Require { source = (loc, mref); _ } - | ImportDynamic { source = (loc, mref); _ } - | Import0 { source = (loc, mref) } - | Import { source = (loc, mref); _ } -> - SMap.add mref (Nel.one loc) acc ~combine:Nel.rev_append - ) acc msig.requires in - (* export type {...} from 'foo' *) - let acc = SMap.fold (fun _ type_export acc -> - match type_export with - | TypeExportNamed { kind = NamedSpecifier ({source = Some (loc, mref); _ }); _ } -> - SMap.add mref (Nel.one loc) acc ~combine:Nel.rev_append - | _ -> acc - ) msig.type_exports_named acc in - (* export type * from 'foo' *) - let acc = List.fold_left (fun acc export_star -> - match export_star with - | ExportStar { source = (source_loc, mref); _ } -> - SMap.add mref (Nel.one source_loc) acc ~combine:Nel.rev_append - ) acc msig.type_exports_star in - let acc = match msig.module_kind with - | CommonJS _ -> acc - | ES { named; star } -> - (* export {...} from 'foo' *) - let acc = SMap.fold (fun _ export acc -> - match export with - | ExportNamed { kind = NamedSpecifier ({source = Some (loc, mref); _ }); _ } - | ExportNs { source = (loc, mref); _ } -> - SMap.add mref (Nel.one loc) acc ~combine:Nel.rev_append - | _ -> acc - ) named acc in - (* export * from 'foo' *) - let acc = List.fold_left (fun acc export_star -> - match export_star with - | ExportStar { source = (source_loc, mref); _ } -> - SMap.add mref (Nel.one source_loc) acc ~combine:Nel.rev_append - ) acc star in acc - in - acc - -let add_declare_module name m loc fsig = { - fsig with - declare_modules = SMap.add name (loc, m) fsig.declare_modules; -} - -let add_require require msig = - let requires = require :: msig.requires in - Ok ({ msig with requires }) - -let add_type_exports named named_info star msig = - let type_exports_named = List.fold_left (fun acc (export, name) -> - let type_export = match export with - | ExportNamed { loc; kind; } -> TypeExportNamed { loc; kind; } - | ExportDefault _ -> failwith "export default type" - | ExportNs _ -> failwith "export type * as X" - in - SMap.add name type_export acc - ) msig.type_exports_named named in - let info = msig.info in - let type_exports_named_info = List.fold_left (fun acc (export_info, name) -> - SMap.add name export_info acc - ) info.type_exports_named_info named_info in - let type_exports_star = Option.fold ~f:(fun acc export_star -> - export_star :: acc - ) ~init:msig.type_exports_star star in - Ok { msig with - type_exports_named; - type_exports_star; - info = { info with - type_exports_named_info - } - } -let add_es_exports loc named named_info star msig = - let info = msig.info in - let result = match msig.module_kind, info.module_kind_info with - | CommonJS { mod_exp_loc = Some _ }, CommonJSInfo _ -> Error (IndeterminateModuleType loc) - | CommonJS { mod_exp_loc = None }, CommonJSInfo _ -> Ok (SMap.empty, SMap.empty, []) - | ES { named; star }, ESInfo named_info -> Ok (named, named_info, star) - | _ -> failwith "unreachable" - in - match result with - | Error e -> Error e - | Ok (named0, named_info0, star0) -> - let named = List.fold_left (fun acc (export, name) -> - SMap.add name export acc - ) named0 named in - let named_info = List.fold_left (fun acc (export, name) -> - SMap.add name export acc - ) named_info0 named_info in - let star = Option.fold ~f:(fun acc export_star -> - export_star :: acc - ) ~init:star0 star in - let module_kind = ES { named; star } in - let module_kind_info = ESInfo named_info in - Ok ({ msig with module_kind; info = { info with module_kind_info } }) - -let set_cjs_exports mod_exp_loc cjs_exports_def msig = - let info = msig.info in - match msig.module_kind, info.module_kind_info with - | CommonJS { mod_exp_loc = original_mod_exp_loc }, CommonJSInfo def -> - let mod_exp_loc = Option.first_some original_mod_exp_loc (Some mod_exp_loc) in - let module_kind = CommonJS { mod_exp_loc } in - let module_kind_info = CommonJSInfo (cjs_exports_def::def) in - Ok { msig with module_kind; info = { info with module_kind_info } } - | ES _, ESInfo _ -> Error (IndeterminateModuleType mod_exp_loc) - | _ -> failwith "unreachable" - -(* Subclass of the AST visitor class that calculates requires and exports. Initializes with the - scope builder class. -*) -class requires_exports_calculator ~ast = object(this) - inherit [(exports_info t', error) result] visitor ~init:(Ok (mk_file_sig init_exports_info)) as super - - val scope_info = Scope_builder.program ast - method toplevel_names = - Scope_api.toplevel_names scope_info - - val mutable curr_declare_module: exports_info module_sig' option = None; - - (* This ensures that we do not add `require`s to `module_sig.requires` twice: - * once in `variable_declarator`/`assignment` and once in `call`. *) - val mutable visited_requires: Utils_js.LocSet.t = Utils_js.LocSet.empty; - - method private update_module_sig f = - match curr_declare_module with - | Some m -> - (match f m with - | Error e -> this#set_acc (Error e) - | Ok msig -> curr_declare_module <- Some msig) - | None -> - this#update_acc (function - | Error _ as acc -> acc - | Ok fsig -> - match f fsig.module_sig with - | Error e -> Error e - | Ok module_sig -> Ok ({ fsig with module_sig }) - ) - - method private add_require require = - this#update_module_sig (add_require require) - - method private add_exports loc kind named named_info batch = - let add = Ast.Statement.(match kind with - | ExportType -> add_type_exports - | ExportValue -> (add_es_exports loc) - ) in - this#update_module_sig (add named named_info batch) - - method private set_cjs_exports mod_exp_loc cjs_exports_def = - this#update_module_sig (set_cjs_exports mod_exp_loc cjs_exports_def) - - method private add_cjs_export mod_exp_loc cjs_exports_def = - this#update_module_sig (set_cjs_exports mod_exp_loc cjs_exports_def) - - method private add_tolerable_error (err: tolerable_error) = - this#update_acc (Result.map ~f:(fun fsig -> - { fsig with tolerable_errors=err::fsig.tolerable_errors } - )) - - method! expression (expr: (Loc.t, Loc.t) Ast.Expression.t) = - let open Ast.Expression in - begin match expr with - (* Disallow expressions consisting of `module` or `exports`. These are dangerous because they - * can allow aliasing and mutation. *) - | _, Identifier (loc, (("module" | "exports") as name)) - when not (Scope_api.is_local_use scope_info loc) -> - this#add_tolerable_error (BadExportContext (name, loc)) - | _ -> () - end; - super#expression expr - - method! binary loc (expr: (Loc.t, Loc.t) Ast.Expression.Binary.t) = - let open Ast.Expression in - let open Ast.Expression.Binary in - let is_module_or_exports = function - | _, Identifier (_, ("module" | "exports")) -> true - | _ -> false + let require_set msig = + let map = require_loc_map msig in + SMap.fold (fun key _ acc -> SSet.add key acc) map SSet.empty + + let add_declare_module name m loc fsig = + { fsig with declare_modules = SMap.add name (loc, m) fsig.declare_modules } + + let add_require require msig = + let requires = require :: msig.requires in + Ok { msig with requires } + + let add_type_exports named named_info star msig = + let named = + Core_list.map + ~f:(fun (name, export) -> + let type_export = + match export with + | (export_loc, ExportNamed { loc; kind }) -> (export_loc, TypeExportNamed { loc; kind }) + | (_, ExportDefault _) -> failwith "export default type" + | (_, ExportNs _) -> failwith "export type * as X" + in + (name, type_export)) + named in - let is_legal_operator = function - | StrictEqual | StrictNotEqual -> true - | _ -> false + let type_exports_named = List.rev_append named msig.type_exports_named in + let info = msig.info in + let type_exports_named_info = List.rev_append named_info info.type_exports_named_info in + let type_exports_star = + Option.fold ~f:(fun acc export_star -> export_star :: acc) ~init:msig.type_exports_star star in - let identify_or_recurse subexpr = - if not (is_module_or_exports subexpr) then - ignore (this#expression subexpr) - in - let { operator; left; right } = expr in - (* Whitelist e.g. `require.main === module` by avoiding the recursive calls (where the errors - * are generated) if the AST matches specific patterns. *) - if is_legal_operator operator then begin - identify_or_recurse left; - identify_or_recurse right; - expr - end else - super#binary loc expr - - method! member loc (expr: (Loc.t, Loc.t) Ast.Expression.Member.t) = - let open Ast.Expression in - let open Ast.Expression.Member in - let { _object; property; computed = _ } = expr in - (* Strip the loc to simplify the patterns *) - let _, _object = _object in - (* This gets called when patterns like `module.id` appear on the LHS of an - * assignment, in addition to when they appear in ordinary expression - * locations. Therefore we have to prevent anything that would be dangerous - * if it appeared on the LHS side of an assignment. Ordinary export - * statements are handled by handle_assignment, which stops recursion so we - * don't arrive here in those cases. *) - begin match _object, property with - (* Allow `module.anythingButExports` *) - | Identifier (_, "module"), PropertyIdentifier (_, prop) when prop <> "exports" -> () - (* Allow `module.exports.whatever` -- this is safe because handle_assignment has already - * looked for assignments to it before recursing down here. *) - | Member { - _object=(_, Identifier (_, "module")); - property = PropertyIdentifier (_, "exports"); - _; - }, - PropertyIdentifier _ - (* Allow `exports.whatever`, for the same reason as above *) - | Identifier (_, "exports"), PropertyIdentifier _ -> - (* In these cases we don't know much about the property so we should recurse *) - ignore (this#member_property property) - | _ -> ignore (super#member loc expr) - end; - expr - - method! call call_loc (expr: (Loc.t, Loc.t) Ast.Expression.Call.t) = - let open Ast.Expression in - let { Call.callee; targs = _; arguments } = expr in - this#handle_call call_loc callee arguments None; - super#call call_loc expr - - method! import import_loc (expr: (Loc.t, Loc.t) Ast.Expression.t) = - let open Ast.Expression in - begin match expr with - | loc, ( - Literal { Ast.Literal.value = Ast.Literal.String name; _ } | - TemplateLiteral { TemplateLiteral. - quasis = [_, { TemplateLiteral.Element. - value = { TemplateLiteral.Element.cooked = name; _ }; _ - }]; _ - } - ) -> - this#add_require (ImportDynamic { - source = (loc, name); - import_loc; - }) - | _ -> () - end; - super#expression expr - - method! import_declaration stmt_loc (decl: (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.t) = - let open Ast.Statement.ImportDeclaration in - let { importKind; source; specifiers; default } = decl in - let source = match source with - | loc, { Ast.StringLiteral.value = name; _ } -> loc, name - in - let import = match default, specifiers with - | None, None -> Import0 { source } - | _ -> - let named = ref SMap.empty in - let ns = ref None in - let types = ref SMap.empty in - let typesof = ref SMap.empty in - let typesof_ns = ref None in - let ref_of_kind = function - | ImportType -> types - | ImportTypeof -> typesof - | ImportValue -> named - in - let add_named remote local loc ref = - let locals = SMap.singleton local (Nel.one loc) in - let combine_nel_smap a b = SMap.union a b ~combine:combine_nel in - ref := SMap.add remote locals !ref ~combine:combine_nel_smap - in - let set_ns local loc ref = - if !ref = None then ref := Some (loc, local) - else failwith "unreachable" - in - Option.iter ~f:(fun (loc, local) -> - add_named "default" local {remote_loc=loc; local_loc=loc} (ref_of_kind importKind) - ) default; - Option.iter ~f:(function - | ImportNamespaceSpecifier (loc, (_, local)) -> - (match importKind with - | ImportType -> failwith "import type * is a parse error" - | ImportTypeof -> set_ns local loc typesof_ns - | ImportValue -> set_ns local loc ns) - | ImportNamedSpecifiers named_specifiers -> - List.iter (function {local; remote; kind} -> - let importKind = match kind with Some k -> k | None -> importKind in - let local_loc, local_name = match local with Some x -> x | None -> remote in - let remote_loc, remote_name = remote in - add_named remote_name local_name {remote_loc; local_loc} (ref_of_kind importKind) - ) named_specifiers - ) specifiers; - Import { - source; - named = !named; - ns = !ns; - types = !types; - typesof = !typesof; - typesof_ns = !typesof_ns; + Ok + { + msig with + type_exports_named; + type_exports_star; + info = { info with type_exports_named_info }; } + + let add_es_exports loc named named_info star msig = + let info = msig.info in + let result = + match (msig.module_kind, info.module_kind_info) with + | (CommonJS { mod_exp_loc = Some _ }, CommonJSInfo _) -> Error (IndeterminateModuleType loc) + | (CommonJS { mod_exp_loc = None }, CommonJSInfo _) -> Ok ([], [], []) + | (ES { named; star }, ESInfo named_info) -> Ok (named, named_info, star) + | _ -> failwith "unreachable" in - this#add_require import; - super#import_declaration stmt_loc decl - - method! export_default_declaration stmt_loc (decl: (Loc.t, Loc.t) Ast.Statement.ExportDefaultDeclaration.t) = - let open Ast.Statement in - let open Ast.Statement.ExportDefaultDeclaration in - let { default = default_loc; declaration } = decl in - let local = match declaration with - | Declaration (_, FunctionDeclaration { Ast.Function.id; _ }) -> id - | Declaration (_, ClassDeclaration { Ast.Class.id; _ }) -> id - (* There's some ambiguity about the name Expression. This satisfies the compiler. *) - | ExportDefaultDeclaration.Expression (_, Ast.Expression.Function { Ast.Function.id; _ }) -> id - | _ -> None - in - let export = ExportDefault { default_loc; local } in - let export_info = ExportDefaultDef declaration in - this#add_exports stmt_loc ExportValue [export, "default"] [export_info, "default"] None; - super#export_default_declaration stmt_loc decl - - method! export_named_declaration stmt_loc (decl: (Loc.t, Loc.t) Ast.Statement.ExportNamedDeclaration.t) = - let open Ast.Statement.ExportNamedDeclaration in - let { exportKind; source; specifiers; declaration} = decl in - let source = match source with - | Some (loc, { Ast.StringLiteral.value = mref; raw = _ }) -> Some (loc, mref) - | None -> None - in - begin match declaration with - | None -> () (* assert specifiers <> None *) - | Some (loc, stmt) -> - let open Ast.Statement in - assert (source = None); - let kind = NamedDeclaration in - let export_info = ExportNamedDef (loc, stmt) in - match stmt with - | FunctionDeclaration { Ast.Function.id = Some (loc, name); _ } - | ClassDeclaration { Ast.Class.id = Some (loc, name); _ } -> - let export = ExportNamed { loc; kind; } in - this#add_exports stmt_loc ExportValue [export, name] [export_info, name] None - | VariableDeclaration { VariableDeclaration.declarations = decls; _ } -> - let bindings = Ast_utils.bindings_of_variable_declarations decls in - let bindings = List.map (fun (loc, name) -> - let export = ExportNamed { loc; kind } in - (export, name), (export_info, name) - ) bindings in - let bindings1, bindings2 = List.split bindings in - this#add_exports stmt_loc ExportValue bindings1 bindings2 None - | TypeAlias { TypeAlias.id; _ } - | OpaqueType { OpaqueType.id; _ } - | InterfaceDeclaration { Interface.id; _ } -> - let export = ExportNamed { loc; kind } in - this#add_exports stmt_loc ExportType [export, snd id] [export_info, snd id] None; - | _ -> failwith "unsupported declaration" - end; - begin match specifiers with - | None -> () (* assert declaration <> None *) - | Some specifiers -> - this#export_specifiers stmt_loc exportKind source specifiers - end; - super#export_named_declaration stmt_loc decl - - method! declare_module_exports loc (annot: (Loc.t, Loc.t) Ast.Type.annotation) = - this#set_cjs_exports loc (DeclareModuleExportsDef annot); - super#declare_module_exports loc annot - - method! declare_export_declaration stmt_loc (decl: (Loc.t, Loc.t) Ast.Statement.DeclareExportDeclaration.t) = - let open Ast.Statement.DeclareExportDeclaration in - let { default; source; specifiers; declaration } = decl in - let source = match source with - | Some (loc, { Ast.StringLiteral.value = mref; raw = _ }) -> - assert (Option.is_none default); (* declare export default from not supported *) - Some (loc, mref) - | _ -> None - in - begin match declaration with - | None -> () (* assert specifiers <> None *) - | Some declaration -> - let open Ast.Statement in - assert (source = None); - let kind = NamedDeclaration in - let export_info = DeclareExportDef declaration in - match declaration with - | Variable (_, { DeclareVariable.id; _ }) - | Function (_, { DeclareFunction.id; _ }) - | Class (_, { DeclareClass.id; _ }) -> - let name, export = match default with - | Some default_loc -> "default", ExportDefault { default_loc; local = Some id } - | None -> snd id, ExportNamed { loc = fst id; kind } + match result with + | Error e -> Error e + | Ok (named0, named_info0, star0) -> + let named = List.rev_append named named0 in + let named_info = List.rev_append named_info named_info0 in + let star = Option.fold ~f:(fun acc export_star -> export_star :: acc) ~init:star0 star in + let module_kind = ES { named; star } in + let module_kind_info = ESInfo named_info in + Ok { msig with module_kind; info = { info with module_kind_info } } + + let set_cjs_exports mod_exp_loc cjs_exports_def msig = + let info = msig.info in + match (msig.module_kind, info.module_kind_info) with + | (CommonJS { mod_exp_loc = original_mod_exp_loc }, CommonJSInfo def) -> + let mod_exp_loc = Option.first_some original_mod_exp_loc (Some mod_exp_loc) in + let module_kind = CommonJS { mod_exp_loc } in + let module_kind_info = CommonJSInfo (cjs_exports_def :: def) in + Ok { msig with module_kind; info = { info with module_kind_info } } + | (ES _, ESInfo _) -> Error (IndeterminateModuleType mod_exp_loc) + | _ -> failwith "unreachable" + + (* Subclass of the AST visitor class that calculates requires and exports. Initializes with the + scope builder class. + *) + class requires_exports_calculator ~ast ~module_ref_prefix = + object (this) + inherit + [(exports_info t', error) result, L.t] visitor ~init:(Ok (mk_file_sig init_exports_info)) as super + + val scope_info = Scope_builder.program ast + + method toplevel_names = Scope_api.toplevel_names scope_info + + val mutable curr_declare_module : exports_info module_sig' option = None + + (* This ensures that we do not add a `require` with no bindings to `module_sig.requires` (when + * processing a `call`) when we have already added that `require` with bindings (when processing + * a `variable_declarator`). *) + val mutable visited_requires_with_bindings : L.LSet.t = L.LSet.empty + + method private visited_requires_with_bindings loc bindings = + bindings = None && L.LSet.mem loc visited_requires_with_bindings + + method private visit_requires_with_bindings loc bindings = + if bindings <> None then + visited_requires_with_bindings <- L.LSet.add loc visited_requires_with_bindings + + method private update_module_sig f = + match curr_declare_module with + | Some m -> + (match f m with + | Error e -> this#set_acc (Error e) + | Ok msig -> curr_declare_module <- Some msig) + | None -> + this#update_acc (function + | Error _ as acc -> acc + | Ok fsig -> + (match f fsig.module_sig with + | Error e -> Error e + | Ok module_sig -> Ok { fsig with module_sig })) + + method private add_require require = this#update_module_sig (add_require require) + + method private add_exports loc kind named named_info batch = + let add = + Ast.Statement.( + match kind with + | ExportType -> add_type_exports + | ExportValue -> add_es_exports loc) in - this#add_exports stmt_loc ExportValue [export, name] [export_info, name] None - | DefaultType _ -> - let default_loc = match default with - | Some loc -> loc - | None -> failwith "declare export default must have a default loc" + this#update_module_sig (add named named_info batch) + + method private set_cjs_exports mod_exp_loc cjs_exports_def = + this#update_module_sig (set_cjs_exports mod_exp_loc cjs_exports_def) + + method private add_cjs_export mod_exp_loc cjs_exports_def = + this#update_module_sig (set_cjs_exports mod_exp_loc cjs_exports_def) + + method private add_tolerable_error (err : tolerable_error) = + this#update_acc + (Result.map ~f:(fun fsig -> + { fsig with tolerable_errors = err :: fsig.tolerable_errors })) + + method! expression (expr : (L.t, L.t) Ast.Expression.t) = + Ast.Expression.( + begin + match expr with + (* Disallow expressions consisting of `module` or `exports`. These are dangerous because they + * can allow aliasing and mutation. *) + | ( _, + Identifier + (loc, { Ast.Identifier.name = ("module" | "exports") as name; comments = _ }) ) + when not (Scope_api.is_local_use scope_info loc) -> + this#add_tolerable_error (BadExportContext (name, loc)) + | _ -> () + end; + super#expression expr) + + method! binary loc (expr : (L.t, L.t) Ast.Expression.Binary.t) = + Ast.Expression.( + Ast.Expression.Binary.( + let is_module_or_exports = function + | (_, Identifier (_, { Ast.Identifier.name = "module" | "exports"; comments = _ })) + -> + true + | _ -> false + in + let is_legal_operator = function + | StrictEqual + | StrictNotEqual -> + true + | _ -> false + in + let identify_or_recurse subexpr = + if not (is_module_or_exports subexpr) then ignore (this#expression subexpr) + in + let { operator; left; right } = expr in + (* Whitelist e.g. `require.main === module` by avoiding the recursive calls (where the errors + * are generated) if the AST matches specific patterns. *) + if is_legal_operator operator then ( + identify_or_recurse left; + identify_or_recurse right; + expr + ) else + super#binary loc expr)) + + method! member loc (expr : (L.t, L.t) Ast.Expression.Member.t) = + Ast.Expression.( + Ast.Expression.Member.( + let { _object; property } = expr in + (* Strip the loc to simplify the patterns *) + let (_, _object) = _object in + (* This gets called when patterns like `module.id` appear on the LHS of an + * assignment, in addition to when they appear in ordinary expression + * locations. Therefore we have to prevent anything that would be dangerous + * if it appeared on the LHS side of an assignment. Ordinary export + * statements are handled by handle_assignment, which stops recursion so we + * don't arrive here in those cases. *) + begin + match (_object, property) with + (* Allow `module.anythingButExports` *) + | ( Identifier (_, { Ast.Identifier.name = "module"; comments = _ }), + PropertyIdentifier (_, { Ast.Identifier.name = prop; comments = _ }) ) + when prop <> "exports" -> + () + (* Allow `module.exports.whatever` -- this is safe because handle_assignment has already + * looked for assignments to it before recursing down here. *) + | ( Member + { + _object = + (_, Identifier (_, { Ast.Identifier.name = "module"; comments = _ })); + property = + PropertyIdentifier (_, { Ast.Identifier.name = "exports"; comments = _ }); + _; + }, + PropertyIdentifier _ ) + (* Allow `exports.whatever`, for the same reason as above *) + + | ( Identifier (_, { Ast.Identifier.name = "exports"; comments = _ }), + PropertyIdentifier _ ) -> + (* In these cases we don't know much about the property so we should recurse *) + ignore (this#member_property property) + | _ -> ignore (super#member loc expr) + end; + expr)) + + method! call call_loc (expr : (L.t, L.t) Ast.Expression.Call.t) = + Ast.Expression.( + let { Call.callee; targs = _; arguments } = expr in + this#handle_call call_loc callee arguments None; + super#call call_loc expr) + + method! literal loc (expr : L.t Ast.Literal.t) = + Ast.Literal.( + this#handle_literal loc expr.value; + super#literal loc expr) + + method! import import_loc (expr : (L.t, L.t) Ast.Expression.t) = + Ast.Expression.( + begin + match expr with + | ( loc, + ( Literal { Ast.Literal.value = Ast.Literal.String name; _ } + | TemplateLiteral + { + TemplateLiteral.quasis = + [ + ( _, + { + TemplateLiteral.Element.value = + { TemplateLiteral.Element.cooked = name; _ }; + _; + } ); + ]; + _; + } ) ) -> + this#add_require (ImportDynamic { source = (loc, name); import_loc }) + | _ -> () + end; + super#expression expr) + + method! import_declaration import_loc (decl : (L.t, L.t) Ast.Statement.ImportDeclaration.t) = + Ast.Statement.ImportDeclaration.( + let { importKind; source; specifiers; default } = decl in + let source = + match source with + | (loc, { Ast.StringLiteral.value = name; _ }) -> (loc, name) + in + let import = + match (default, specifiers) with + | (None, None) -> Import0 { source } + | _ -> + let named = ref SMap.empty in + let ns = ref None in + let types = ref SMap.empty in + let typesof = ref SMap.empty in + let typesof_ns = ref None in + let ref_of_kind = function + | ImportType -> types + | ImportTypeof -> typesof + | ImportValue -> named + in + let add_named remote local loc ref = + let locals = SMap.singleton local (Nel.one loc) in + let combine_nel_smap a b = SMap.union a b ~combine:combine_nel in + ref := SMap.add remote locals !ref ~combine:combine_nel_smap + in + let set_ns local loc ref = + if !ref = None then + ref := Some (loc, local) + else + failwith "unreachable" + in + Option.iter + ~f:(fun (loc, { Ast.Identifier.name = local; comments = _ }) -> + add_named + "default" + local + { remote_loc = loc; local_loc = loc } + (ref_of_kind importKind)) + default; + Option.iter + ~f:(function + | ImportNamespaceSpecifier + (loc, (_, { Ast.Identifier.name = local; comments = _ })) -> + (match importKind with + | ImportType -> failwith "import type * is a parse error" + | ImportTypeof -> set_ns local loc typesof_ns + | ImportValue -> set_ns local loc ns) + | ImportNamedSpecifiers named_specifiers -> + List.iter + (function + | { local; remote; kind } -> + let importKind = + match kind with + | Some k -> k + | None -> importKind + in + let (local_loc, { Ast.Identifier.name = local_name; comments = _ }) = + match local with + | Some x -> x + | None -> remote + in + let (remote_loc, { Ast.Identifier.name = remote_name; comments = _ }) = + remote + in + add_named + remote_name + local_name + { remote_loc; local_loc } + (ref_of_kind importKind)) + named_specifiers) + specifiers; + Import + { + import_loc; + source; + named = !named; + ns = !ns; + types = !types; + typesof = !typesof; + typesof_ns = !typesof_ns; + } + in + this#add_require import; + super#import_declaration import_loc decl) + + method! export_default_declaration + stmt_loc (decl : (L.t, L.t) Ast.Statement.ExportDefaultDeclaration.t) = + Ast.Statement.ExportDefaultDeclaration.( + let { default = default_loc; declaration } = decl in + let local = + match declaration with + | Declaration (_, Ast.Statement.FunctionDeclaration { Ast.Function.id; _ }) -> id + | Declaration (_, Ast.Statement.ClassDeclaration { Ast.Class.id; _ }) -> id + | Expression (_, Ast.Expression.Function { Ast.Function.id; _ }) -> id + | _ -> None + in + let local = Option.map ~f:Flow_ast_utils.source_of_ident local in + let export = (stmt_loc, ExportDefault { default_loc; local }) in + let export_info = ExportDefaultDef declaration in + this#add_exports + stmt_loc + Ast.Statement.ExportValue + [("default", export)] + [export_info] + None; + super#export_default_declaration stmt_loc decl) + + method! export_named_declaration + stmt_loc (decl : (L.t, L.t) Ast.Statement.ExportNamedDeclaration.t) = + Ast.Statement.ExportNamedDeclaration.( + let { exportKind; source; specifiers; declaration } = decl in + let source = + match source with + | Some (loc, { Ast.StringLiteral.value = mref; raw = _ }) -> Some (loc, mref) + | None -> None + in + begin + match declaration with + | None -> () (* assert specifiers <> None *) + | Some (loc, stmt) -> + Ast.Statement.( + assert (source = None); + let kind = NamedDeclaration in + let export_info = ExportNamedDef (loc, stmt) in + (match stmt with + | FunctionDeclaration + { Ast.Function.id = Some (loc, { Ast.Identifier.name; comments = _ }); _ } + | ClassDeclaration + { Ast.Class.id = Some (loc, { Ast.Identifier.name; comments = _ }); _ } -> + let export = (stmt_loc, ExportNamed { loc; kind }) in + this#add_exports stmt_loc ExportValue [(name, export)] [export_info] None + | VariableDeclaration { VariableDeclaration.declarations = decls; _ } -> + let (rev_named, rev_info) = + Ast_utils.fold_bindings_of_variable_declarations + (fun (named, infos) (loc, { Ast.Identifier.name; comments = _ }) -> + let export = (stmt_loc, ExportNamed { loc; kind }) in + ((name, export) :: named, export_info :: infos)) + ([], []) + decls + in + this#add_exports + stmt_loc + ExportValue + (List.rev rev_named) + (List.rev rev_info) + None + | TypeAlias { TypeAlias.id; _ } + | OpaqueType { OpaqueType.id; _ } + | InterfaceDeclaration { Interface.id; _ } -> + let export = (stmt_loc, ExportNamed { loc; kind }) in + this#add_exports + stmt_loc + ExportType + [(Flow_ast_utils.name_of_ident id, export)] + [export_info] + None + | _ -> failwith "unsupported declaration")) + end; + begin + match specifiers with + | None -> () (* assert declaration <> None *) + | Some specifiers -> this#export_specifiers stmt_loc exportKind source specifiers + end; + super#export_named_declaration stmt_loc decl) + + method! declare_module_exports loc (annot : (L.t, L.t) Ast.Type.annotation) = + this#set_cjs_exports loc (DeclareModuleExportsDef annot); + super#declare_module_exports loc annot + + method! declare_export_declaration + stmt_loc (decl : (L.t, L.t) Ast.Statement.DeclareExportDeclaration.t) = + Ast.Statement.DeclareExportDeclaration.( + let { default; source; specifiers; declaration } = decl in + let source = + match source with + | Some (loc, { Ast.StringLiteral.value = mref; raw = _ }) -> + assert (Option.is_none default); + + (* declare export default from not supported *) + Some (loc, mref) + | _ -> None + in + begin + match declaration with + | None -> () (* assert specifiers <> None *) + | Some declaration -> + Ast.Statement.( + assert (source = None); + let kind = NamedDeclaration in + let export_info = DeclareExportDef declaration in + (match declaration with + | Variable (_, { DeclareVariable.id; _ }) + | Function (_, { DeclareFunction.id; _ }) + | Class (_, { DeclareClass.id; _ }) -> + let (name, export) = + match default with + | Some default_loc -> + ( "default", + ( stmt_loc, + ExportDefault + { default_loc; local = Some (Flow_ast_utils.source_of_ident id) } ) ) + | None -> + ( Flow_ast_utils.name_of_ident id, + (stmt_loc, ExportNamed { loc = fst id; kind }) ) + in + this#add_exports stmt_loc ExportValue [(name, export)] [export_info] None + | DefaultType _ -> + let default_loc = + match default with + | Some loc -> loc + | None -> failwith "declare export default must have a default loc" + in + let export = (stmt_loc, ExportDefault { default_loc; local = None }) in + this#add_exports stmt_loc ExportValue [("default", export)] [export_info] None + | NamedType (_, { TypeAlias.id; _ }) + | NamedOpaqueType (_, { OpaqueType.id; _ }) + | Interface (_, { Interface.id; _ }) -> + assert (Option.is_none default); + let export = (stmt_loc, ExportNamed { loc = fst id; kind }) in + this#add_exports + stmt_loc + ExportType + [(Flow_ast_utils.name_of_ident id, export)] + [export_info] + None)) + end; + begin + match specifiers with + | None -> () (* assert declaration <> None *) + | Some specifiers -> + assert (Option.is_none default); + + (* declare export type unsupported *) + let exportKind = Ast.Statement.ExportValue in + this#export_specifiers stmt_loc exportKind source specifiers + end; + super#declare_export_declaration stmt_loc decl) + + method! assignment loc (expr : (L.t, L.t) Ast.Expression.Assignment.t) = + this#handle_assignment ~is_toplevel:false loc expr; + expr + + method handle_assignment + ~(is_toplevel : bool) loc (expr : (L.t, L.t) Ast.Expression.Assignment.t) = + Ast.Expression.( + Ast.Expression.Assignment.( + let { operator; left; right } = expr in + (* Handle exports *) + match (operator, left) with + (* module.exports = ... *) + | ( None, + ( mod_exp_loc, + Ast.Pattern.Expression + ( _, + Member + { + Member._object = + ( module_loc, + Identifier (_, { Ast.Identifier.name = "module"; comments = _ }) ); + property = + Member.PropertyIdentifier + (_, { Ast.Identifier.name = "exports"; comments = _ }); + _; + } ) ) ) + when not (Scope_api.is_local_use scope_info module_loc) -> + this#handle_cjs_default_export module_loc mod_exp_loc (SetModuleExportsDef right); + ignore (this#expression right); + if not is_toplevel then this#add_tolerable_error (BadExportPosition mod_exp_loc) + (* exports.foo = ... *) + | ( None, + ( _, + Ast.Pattern.Expression + ( _, + Member + { + Member._object = + ( (mod_exp_loc as module_loc), + Identifier (_, { Ast.Identifier.name = "exports"; comments = _ }) ); + property = Member.PropertyIdentifier id; + _; + } ) ) ) + (* module.exports.foo = ... *) + + | ( None, + ( _, + Ast.Pattern.Expression + ( _, + Member + { + Member._object = + ( mod_exp_loc, + Member + { + Member._object = + ( module_loc, + Identifier + (_, { Ast.Identifier.name = "module"; comments = _ }) ); + property = + Member.PropertyIdentifier + (_, { Ast.Identifier.name = "exports"; comments = _ }); + _; + } ); + property = Member.PropertyIdentifier id; + _; + } ) ) ) + when not (Scope_api.is_local_use scope_info module_loc) -> + (* expressions not allowed in declare module body *) + assert (curr_declare_module = None); + this#add_cjs_export + mod_exp_loc + (AddModuleExportsDef (Flow_ast_utils.source_of_ident id, right)); + ignore (this#expression right); + if not is_toplevel then this#add_tolerable_error (BadExportPosition mod_exp_loc) + (* module = ... *) + | ( None, + ( _, + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = + (loc, { Ast.Identifier.name = ("exports" | "module") as id; comments = _ }); + _; + } ) ) + when not (Scope_api.is_local_use scope_info loc) -> + ignore (this#expression right); + this#add_tolerable_error (BadExportContext (id, loc)) + | _ -> ignore (super#assignment loc expr))) + + method private handle_cjs_default_export module_loc mod_exp_loc cjs_exports_def = + (* expressions not allowed in declare module body *) + assert (curr_declare_module = None); + if not (Scope_api.is_local_use scope_info module_loc) then + this#set_cjs_exports mod_exp_loc cjs_exports_def + + method! variable_declarator + ~kind (decl : (L.t, L.t) Ast.Statement.VariableDeclaration.Declarator.t) = + begin + match decl with + | (_, { Ast.Statement.VariableDeclaration.Declarator.id; init = Some init }) -> + this#handle_require id init + | _ -> () + end; + super#variable_declarator ~kind decl + + method private require_pattern (pattern : (L.t, L.t) Ast.Pattern.t) = + match pattern with + | (_, Ast.Pattern.Identifier { Ast.Pattern.Identifier.name; _ }) -> + Some (BindIdent (Flow_ast_utils.source_of_ident name)) + | (_, Ast.Pattern.Object { Ast.Pattern.Object.properties; _ }) -> + let named_opt = + ListUtils.fold_left_opt + (fun named prop -> + match prop with + | Ast.Pattern.Object.Property + ( _, + { + Ast.Pattern.Object.Property.key = + Ast.Pattern.Object.Property.Identifier remote; + pattern; + _; + } ) -> + let bindings = this#require_pattern pattern in + Option.map bindings (fun bindings -> (remote, bindings) :: named) + | _ -> None) + [] + properties + in + Option.map named_opt (fun named -> + let named_bind = + List.map (fun (id, bind) -> (Flow_ast_utils.source_of_ident id, bind)) named + in + BindNamed named_bind) + | _ -> None + + method private handle_require + (left : (L.t, L.t) Ast.Pattern.t) (right : (L.t, L.t) Ast.Expression.t) = + Ast.Expression.( + let bindings = this#require_pattern left in + match right with + | (call_loc, Call { Call.callee; targs = _; arguments }) -> + this#handle_call call_loc callee arguments bindings + | _ -> ()) + + method private handle_call call_loc callee arguments bindings = + Ast.Expression.( + if not (this#visited_requires_with_bindings call_loc bindings) then ( + this#visit_requires_with_bindings call_loc bindings; + match (callee, arguments) with + | ( (_, Identifier (loc, { Ast.Identifier.name = "require"; comments = _ })), + [ + Expression + ( source_loc, + ( Literal { Ast.Literal.value = Ast.Literal.String name; _ } + | TemplateLiteral + { + TemplateLiteral.quasis = + [ + ( _, + { + TemplateLiteral.Element.value = + { TemplateLiteral.Element.cooked = name; _ }; + _; + } ); + ]; + _; + } ) ); + ] ) -> + if not (Scope_api.is_local_use scope_info loc) then + this#add_require + (Require { source = (source_loc, name); require_loc = call_loc; bindings }) + | ( (_, Identifier (loc, { Ast.Identifier.name = "requireLazy"; comments = _ })), + [Expression (_, Array { Array.elements; comments = _ }); Expression _] ) -> + let element = function + | Some + (Expression + (source_loc, Literal { Ast.Literal.value = Ast.Literal.String name; _ })) -> + if not (Scope_api.is_local_use scope_info loc) then + this#add_require + (Require { source = (source_loc, name); require_loc = call_loc; bindings }) + | _ -> () + in + List.iter element elements + | _ -> () + )) + + method private handle_literal loc lit = + Ast.Literal.( + match module_ref_prefix with + | Some prefix -> + begin + match lit with + | String s when String_utils.string_starts_with s prefix -> + this#add_require + (Require + { + source = (loc, String_utils.lstrip s prefix); + require_loc = loc; + bindings = None; + }) + | _ -> () + end + | None -> ()) + + method! declare_module loc (m : (L.t, L.t) Ast.Statement.DeclareModule.t) = + let name = + Ast.Statement.DeclareModule.( + match m.id with + | Identifier (_, { Ast.Identifier.name; comments = _ }) -> name + | Literal (_, { Ast.StringLiteral.value; _ }) -> value) in - let export = ExportDefault { default_loc; local = None } in - this#add_exports stmt_loc ExportValue [export, "default"] [export_info, "default"] None - | NamedType (_, { TypeAlias.id; _ }) - | NamedOpaqueType (_, { OpaqueType.id; _ }) - | Interface (_, { Interface.id; _ }) -> - assert (Option.is_none default); - let export = ExportNamed { loc = fst id; kind } in - this#add_exports stmt_loc ExportType [export, snd id] [export_info, snd id] None - end; - begin match specifiers with - | None -> () (* assert declaration <> None *) - | Some specifiers -> - assert (Option.is_none default); - (* declare export type unsupported *) - let exportKind = Ast.Statement.ExportValue in - this#export_specifiers stmt_loc exportKind source specifiers - end; - super#declare_export_declaration stmt_loc decl - - method! assignment loc (expr: (Loc.t, Loc.t) Ast.Expression.Assignment.t) = - this#handle_assignment ~is_toplevel:false loc expr; - expr - - method handle_assignment ~(is_toplevel: bool) loc (expr: (Loc.t, Loc.t) Ast.Expression.Assignment.t) = - let open Ast.Expression in - let open Ast.Expression.Assignment in - let { operator; left; right } = expr in - - (* Handle exports *) - begin match operator, left with - (* module.exports = ... *) - | Assign, (mod_exp_loc, Ast.Pattern.Expression (_, Member { Member. - _object = module_loc, Identifier (_, "module"); - property = Member.PropertyIdentifier (_, "exports"); _ - })) when not (Scope_api.is_local_use scope_info module_loc) -> - this#handle_cjs_default_export module_loc mod_exp_loc (SetModuleExportsDef right); - ignore (this#expression right); - if not is_toplevel then - this#add_tolerable_error (BadExportPosition mod_exp_loc) - (* exports.foo = ... *) - | Assign, (_, Ast.Pattern.Expression (_, Member { Member. - _object = mod_exp_loc as module_loc, Identifier (_, "exports"); - property = Member.PropertyIdentifier id; _ - })) - (* module.exports.foo = ... *) - | Assign, (_, Ast.Pattern.Expression (_, Member { Member. - _object = mod_exp_loc, Member { Member. - _object = module_loc, Identifier (_, "module"); - property = Member.PropertyIdentifier (_, "exports"); _ - }; - property = Member.PropertyIdentifier id; _ - })) when not (Scope_api.is_local_use scope_info module_loc) -> - (* expressions not allowed in declare module body *) - assert (curr_declare_module = None); - this#add_cjs_export mod_exp_loc (AddModuleExportsDef (id, right)); - ignore (this#expression right); - if not is_toplevel then - this#add_tolerable_error (BadExportPosition mod_exp_loc) - (* module = ... *) - | Assign, (_, Ast.Pattern.Identifier { - Ast.Pattern.Identifier.name=(loc, ("exports" | "module" as id)); _ - }) when not (Scope_api.is_local_use scope_info loc) -> - ignore (this#expression right); - this#add_tolerable_error (BadExportContext (id, loc)) - | _ -> - ignore (super#assignment loc expr) - end; - - (* Handle imports *) - begin match operator with - | Assign -> this#handle_require left right - | _ -> () - end - - method private handle_cjs_default_export module_loc mod_exp_loc cjs_exports_def = - (* expressions not allowed in declare module body *) - assert (curr_declare_module = None); - if not (Scope_api.is_local_use scope_info module_loc) - then this#set_cjs_exports mod_exp_loc cjs_exports_def - - method! variable_declarator ~kind (decl: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.Declarator.t) = - begin match decl with - | _, { Ast.Statement.VariableDeclaration.Declarator.id; init = Some init } -> - this#handle_require id init - | _ -> () - end; - super#variable_declarator ~kind decl - - method private handle_require (left: (Loc.t, Loc.t) Ast.Pattern.t) (right: (Loc.t, Loc.t) Ast.Expression.t) = - let open Ast.Expression in - let bindings = begin match left with - | _, Ast.Pattern.Identifier { Ast.Pattern.Identifier.name; _ } -> Some (BindIdent name) - | _, Ast.Pattern.Object { Ast.Pattern.Object.properties; _ } -> - let add_named remote local loc acc = - let locals = SMap.singleton local (Nel.one loc) in - let combine_nel_smap a b = SMap.union a b ~combine:combine_nel in - SMap.add remote locals acc ~combine:combine_nel_smap - in - Some (BindNamed (List.fold_left (fun acc prop -> - match prop with - | Ast.Pattern.Object.Property (_, { - Ast.Pattern.Object.Property.key = Ast.Pattern.Object.Property.Identifier (remote_loc, remote_name); - pattern = _, Ast.Pattern.Identifier { Ast.Pattern.Identifier.name = (local_loc, local_name); _ }; - _ - }) -> - add_named remote_name local_name { local_loc; remote_loc } acc - | _ -> acc - ) SMap.empty properties)) - | _ -> None - end in - begin match right with - | call_loc, Call { Call.callee; targs = _; arguments } -> - this#handle_call call_loc callee arguments bindings - | _ -> () + curr_declare_module <- Some (mk_module_sig init_exports_info); + let ret = super#declare_module loc m in + begin + match curr_declare_module with + | None -> failwith "lost curr_declare_module" + | Some m -> + this#update_acc (function + | Error _ as acc -> acc + | Ok fsig -> Ok (add_declare_module name m loc fsig)) + end; + curr_declare_module <- None; + ret + + method private export_specifiers stmt_loc kind source = + Ast.Statement.ExportNamedDeclaration.( + function + | ExportBatchSpecifier (star_loc, Some (loc, { Ast.Identifier.name; comments = _ })) -> + (* export type * as X from "foo" unsupported *) + assert (kind = Ast.Statement.ExportValue); + let mref = + match source with + | Some mref -> mref + | None -> failwith "export batch without source" + in + let export = (stmt_loc, ExportNs { loc; star_loc; source = mref }) in + this#add_exports stmt_loc kind [(name, export)] [] None + | ExportBatchSpecifier (star_loc, None) -> + let mref = + match source with + | Some mref -> mref + | _ -> failwith "batch export missing source" + in + let export = (stmt_loc, ExportStar { star_loc; source = mref }) in + this#add_exports stmt_loc kind [] [] (Some export) + | ExportSpecifiers specs -> + let bindings = + List.fold_left + ExportSpecifier.( + fun acc (_, spec) -> + let ({ Ast.Identifier.name; comments = _ }, loc) = + match spec.exported with + | None -> (snd spec.local, fst spec.local) + | Some remote -> (snd remote, fst remote) + in + let export = + ( stmt_loc, + ExportNamed + { + loc; + kind = + NamedSpecifier + { local = Flow_ast_utils.source_of_ident spec.local; source }; + } ) + in + (name, export) :: acc) + [] + specs + in + this#add_exports stmt_loc kind bindings [] None) + + method! toplevel_statement_list (stmts : (L.t, L.t) Ast.Statement.t list) = + Ast.( + let id = Flow_ast_mapper.id in + let map_expression (expr : (L.t, L.t) Expression.t) = + Expression.( + match expr with + | (loc, Assignment assg) -> + this#handle_assignment ~is_toplevel:true loc assg; + expr + | _ -> this#expression expr) + in + let map_expression_statement (stmt : (L.t, L.t) Statement.Expression.t) = + Statement.Expression.( + let { expression; _ } = stmt in + id map_expression expression stmt (fun expr -> { stmt with expression = expr })) + in + let map_statement (stmt : (L.t, L.t) Statement.t) = + Statement.( + match stmt with + | (loc, Expression expr) -> + id map_expression_statement expr stmt (fun expr -> (loc, Expression expr)) + | _ -> this#statement stmt) + in + ListUtils.ident_map map_statement stmts) end - method private handle_call call_loc callee arguments bindings = - let open Ast.Expression in - if not (Utils_js.LocSet.mem call_loc visited_requires) then begin - visited_requires <- Utils_js.LocSet.add call_loc visited_requires; - match callee, arguments with - | ((_, Identifier (loc, "require")), [Expression (source_loc, ( - Literal { Ast.Literal.value = Ast.Literal.String name; _ } | - TemplateLiteral { TemplateLiteral. - quasis = [_, { TemplateLiteral.Element. - value = { TemplateLiteral.Element.cooked = name; _ }; _ - }]; _ - } - ))]) -> - if not (Scope_api.is_local_use scope_info loc) - then - this#add_require (Require { - source = (source_loc, name); - require_loc = call_loc; - bindings; - }) - | ((_, Identifier (loc, "requireLazy")), - [Expression (_, Array ({ Array.elements })); Expression (_);]) - -> - let element = function - | Some (Expression (source_loc, Literal { Ast.Literal.value = Ast.Literal.String name; _ })) -> - if not (Scope_api.is_local_use scope_info loc) - then - this#add_require (Require { - source = (source_loc, name); - require_loc = call_loc; - bindings; - }) - | _ -> () in - List.iter element elements - | _ -> () - end - - method! declare_module loc (m: (Loc.t, Loc.t) Ast.Statement.DeclareModule.t) = - let name = Ast.Statement.DeclareModule.(match m.id with - | Identifier (_, name) -> name - | Literal (_, { Ast.StringLiteral.value; _ }) -> value - ) in - curr_declare_module <- Some (mk_module_sig init_exports_info); - let ret = super#declare_module loc m in - begin match curr_declare_module with - | None -> failwith "lost curr_declare_module" - | Some m -> - this#update_acc (function - | Error _ as acc -> acc - | Ok fsig -> Ok (add_declare_module name m loc fsig) - ) - end; - curr_declare_module <- None; - ret - - method private export_specifiers stmt_loc kind source = - let open Ast.Statement.ExportNamedDeclaration in - function - | ExportBatchSpecifier (_, Some (loc, name)) -> - (* export type * as X from "foo" unsupported *) - assert (kind = Ast.Statement.ExportValue); - let mref = match source with - | Some mref -> mref - | None -> failwith "export batch without source" - in - this#add_exports stmt_loc kind [ExportNs { loc; source = mref }, name] [] None - | ExportBatchSpecifier (star_loc, None) -> - let mref = match source with - | Some mref -> mref - | _ -> failwith "batch export missing source" - in - this#add_exports stmt_loc kind [] [] (Some (ExportStar { star_loc; source = mref })) - | ExportSpecifiers specs -> - let bindings = List.fold_left ExportSpecifier.(fun acc (_, spec) -> - let name, loc = match spec.exported with - | None -> snd spec.local, fst spec.local - | Some remote -> snd remote, fst remote - in - let export = ExportNamed { loc; kind = NamedSpecifier { local = spec.local; source } } in - (export, name) :: acc - ) [] specs in - this#add_exports stmt_loc kind bindings [] None - - method! toplevel_statement_list (stmts: (Loc.t, Loc.t) Ast.Statement.t list) = - let open Ast in - let id = Flow_ast_mapper.id in - let map_expression (expr: (Loc.t, Loc.t) Expression.t) = - let open Expression in - match expr with - | loc, Assignment assg -> - this#handle_assignment ~is_toplevel:true loc assg; - expr - | _ -> this#expression expr - in - let map_expression_statement (stmt: (Loc.t, Loc.t) Statement.Expression.t) = - let open Statement.Expression in - let {expression; _} = stmt in - id map_expression expression stmt (fun expr -> { stmt with expression=expr }) - in - let map_statement (stmt: (Loc.t, Loc.t) Statement.t) = - let open Statement in - match stmt with - | loc, Expression expr -> - id map_expression_statement expr stmt (fun expr -> loc, Expression expr) - | _ -> this#statement stmt - in - ListUtils.ident_map map_statement stmts -end - -type toplevel_names_and_exports_info = { - toplevel_names: SSet.t; - exports_info: (exports_info t', error) result -} - -let program_with_toplevel_names_and_exports_info ~ast = - let walk = new requires_exports_calculator ~ast in - { - toplevel_names = walk#toplevel_names; - exports_info = walk#eval walk#program ast + type toplevel_names_and_exports_info = { + toplevel_names: SSet.t; + exports_info: (exports_info t', error) result; } -let map_unit_file_sig = - let map_unit_module_sig module_sig = - { module_sig with info = () } - in fun file_sig -> - let { module_sig; declare_modules; _ } = file_sig in - let module_sig' = map_unit_module_sig module_sig in - let declare_modules' = SMap.map (fun (loc, module_sig) -> + let program_with_toplevel_names_and_exports_info ~ast ~module_ref_prefix = + let walk = new requires_exports_calculator ~ast ~module_ref_prefix in + { toplevel_names = walk#toplevel_names; exports_info = walk#eval walk#program ast } + + let map_unit_file_sig = + let map_unit_module_sig module_sig = { module_sig with info = () } in + fun file_sig -> + let { module_sig; declare_modules; _ } = file_sig in let module_sig' = map_unit_module_sig module_sig in - (loc, module_sig') - ) declare_modules in - { file_sig with - module_sig = module_sig'; - declare_modules = declare_modules'; - } + let declare_modules' = + SMap.map + (fun (loc, module_sig) -> + let module_sig' = map_unit_module_sig module_sig in + (loc, module_sig')) + declare_modules + in + { file_sig with module_sig = module_sig'; declare_modules = declare_modules' } -let program ~ast = - match program_with_toplevel_names_and_exports_info ~ast with + let program ~ast ~module_ref_prefix = + match program_with_toplevel_names_and_exports_info ~ast ~module_ref_prefix with | { exports_info = Ok file_sig; _ } -> Ok (map_unit_file_sig file_sig) | { exports_info = Error e; _ } -> Error e -let verified errors file_sig = - let file_sig = map_unit_file_sig file_sig in - { file_sig with - tolerable_errors = Signature_builder_deps.ErrorSet.fold (fun error acc -> - (SignatureVerificationError error):: acc - ) errors file_sig.tolerable_errors - } - -class mapper = object(this) - method file_sig (file_sig: t) = - let { module_sig; declare_modules; tolerable_errors } = file_sig in - let module_sig' = this#module_sig module_sig in - let declare_modules' = SMapUtils.ident_map (fun (loc, module_sig) -> - let loc = this#loc loc in - let module_sig = this#module_sig module_sig in - (loc, module_sig) - ) declare_modules in - let tolerable_errors' = ListUtils.ident_map this#tolerable_error tolerable_errors in - if module_sig == module_sig' && - declare_modules == declare_modules' && - tolerable_errors == tolerable_errors' - then file_sig - else { - module_sig = module_sig'; - declare_modules = declare_modules'; - tolerable_errors = tolerable_errors'; + let verified errors file_sig = + let file_sig = map_unit_file_sig file_sig in + { + file_sig with + tolerable_errors = + Signature_builder_deps.PrintableErrorSet.fold + (fun error acc -> SignatureVerificationError error :: acc) + errors + file_sig.tolerable_errors; } - method module_sig (module_sig: module_sig) = - let { requires; module_kind; type_exports_named; type_exports_star; info = () } = module_sig in - let requires' = ListUtils.ident_map this#require requires in - let module_kind' = this#module_kind module_kind in - let type_exports_named' = SMapUtils.ident_map this#type_export type_exports_named in - let type_exports_star' = ListUtils.ident_map this#export_star type_exports_star in - if requires == requires' && - module_kind == module_kind' && - type_exports_named == type_exports_named' && - type_exports_star == type_exports_star' - then module_sig - else { module_sig with - requires = requires'; - module_kind = module_kind'; - type_exports_named = type_exports_named'; - type_exports_star = type_exports_star'; - } + class mapper = + object (this) + method file_sig (file_sig : t) = + let { module_sig; declare_modules; tolerable_errors } = file_sig in + let module_sig' = this#module_sig module_sig in + let declare_modules' = + SMapUtils.ident_map + (fun (loc, module_sig) -> + let loc = this#loc loc in + let module_sig = this#module_sig module_sig in + (loc, module_sig)) + declare_modules + in + let tolerable_errors' = ListUtils.ident_map this#tolerable_error tolerable_errors in + if + module_sig == module_sig' + && declare_modules == declare_modules' + && tolerable_errors == tolerable_errors' + then + file_sig + else + { + module_sig = module_sig'; + declare_modules = declare_modules'; + tolerable_errors = tolerable_errors'; + } - method require (require: require) = - match require with - | Require { source; require_loc; bindings; } -> - let source' = this#source source in - let require_loc' = this#loc require_loc in - let bindings' = OptionUtils.ident_map this#require_bindings bindings in - if source == source' && require_loc == require_loc' && bindings == bindings' - then require - else Require { source = source'; require_loc = require_loc'; bindings = bindings'; } - | ImportDynamic { source; import_loc } -> - let source' = this#source source in - let import_loc' = this#loc import_loc in - if source == source' && import_loc == import_loc' - then require - else ImportDynamic { source = source'; import_loc = import_loc'; } - | Import0 { source } -> - let source' = this#source source in - if source == source' - then require - else Import0 { source = source' } - | Import { source; named; ns; types; typesof; typesof_ns; } -> - let source' = this#source source in - let named' = SMapUtils.ident_map (SMapUtils.ident_map (Nel.ident_map this#imported_locs)) named in - let ns' = OptionUtils.ident_map this#ident ns in - let types' = SMapUtils.ident_map (SMapUtils.ident_map (Nel.ident_map this#imported_locs)) types in - let typesof' = SMapUtils.ident_map (SMapUtils.ident_map (Nel.ident_map this#imported_locs)) typesof in - let typesof_ns' = OptionUtils.ident_map this#ident typesof_ns in - if source == source' && - named == named' && - ns == ns' && - types == types' && - typesof == typesof' && - typesof_ns == typesof_ns' - then require - else Import { - source = source'; - named = named'; - ns = ns'; - types = types'; - typesof = typesof'; - typesof_ns = typesof_ns'; - } + method module_sig (module_sig : module_sig) = + let { requires; module_kind; type_exports_named; type_exports_star; info = () } = + module_sig + in + let requires' = ListUtils.ident_map this#require requires in + let module_kind' = this#module_kind module_kind in + let type_exports_named' = ListUtils.ident_map this#type_export type_exports_named in + let type_exports_star' = ListUtils.ident_map this#export_star type_exports_star in + if + requires == requires' + && module_kind == module_kind' + && type_exports_named == type_exports_named' + && type_exports_star == type_exports_star' + then + module_sig + else + { + module_sig with + requires = requires'; + module_kind = module_kind'; + type_exports_named = type_exports_named'; + type_exports_star = type_exports_star'; + } - method imported_locs (imported_locs: imported_locs) = - let {remote_loc; local_loc} = imported_locs in - let remote_loc' = this#loc remote_loc in - let local_loc' = this#loc local_loc in - if remote_loc == remote_loc' && local_loc == local_loc' - then imported_locs - else {remote_loc = remote_loc'; local_loc = local_loc'} - - method require_bindings (require_bindings: require_bindings) = - match require_bindings with - | BindIdent ident -> - let ident' = this#ident ident in - if ident == ident' - then require_bindings - else BindIdent ident' - | BindNamed named -> - let named' = SMapUtils.ident_map (SMapUtils.ident_map (Nel.ident_map this#imported_locs)) named in - if named == named' - then require_bindings - else BindNamed named' - - method module_kind (module_kind: module_kind) = - match module_kind with - | CommonJS { mod_exp_loc } -> - let mod_exp_loc' = OptionUtils.ident_map this#loc mod_exp_loc in - if mod_exp_loc == mod_exp_loc' - then module_kind - else CommonJS { mod_exp_loc = mod_exp_loc' } - | ES { named; star } -> - let named' = SMapUtils.ident_map this#export named in - let star' = ListUtils.ident_map this#export_star star in - if named == named' && star == star' - then module_kind - else ES { named = named'; star = star' } - - method named_export_kind (kind: named_export_kind) = - match kind with - | NamedDeclaration -> kind - | NamedSpecifier { local; source } -> - let local' = this#ident local in - let source' = OptionUtils.ident_map this#source source in - if local == local' && source == source' - then kind - else NamedSpecifier { local = local'; source = source' } - - method export (export: export) = - match export with - | ExportDefault { default_loc; local } -> - let default_loc' = this#loc default_loc in - let local' = OptionUtils.ident_map this#ident local in - if default_loc == default_loc' && local == local' - then export - else ExportDefault { default_loc = default_loc'; local = local' } - | ExportNamed { loc; kind } -> - let loc' = this#loc loc in - let kind' = this#named_export_kind kind in - if loc == loc' && kind == kind' - then export - else ExportNamed { loc = loc'; kind = kind' } - | ExportNs { loc; source; } -> - let loc' = this#loc loc in - let source' = this#source source in - if loc == loc' && source == source' - then export - else ExportNs { loc = loc'; source = source'; } - - method export_star (export_star: export_star) = - match export_star with - | ExportStar { star_loc; source } -> - let star_loc' = this#loc star_loc in - let source' = this#source source in - if star_loc == star_loc' && source == source' - then export_star - else ExportStar { star_loc = star_loc'; source = source'; } - - method type_export (type_export: type_export) = - match type_export with - | TypeExportNamed { loc; kind } -> - let loc' = this#loc loc in - let kind' = this#named_export_kind kind in - if loc == loc' && kind == kind' - then type_export - else TypeExportNamed { loc = loc'; kind = kind' } - - method ident (ident: Ast_utils.ident) = - let (loc, str) = ident in - let loc' = this#loc loc in - if loc == loc' - then ident - else (loc', str) - - method source (source: Ast_utils.source) = - let (loc, str) = source in - let loc' = this#loc loc in - if loc == loc' - then source - else (loc', str) - - method tolerable_error (tolerable_error: tolerable_error) = - match tolerable_error with - | BadExportPosition loc -> - let loc' = this#loc loc in - if loc == loc' - then tolerable_error - else BadExportPosition loc' - | BadExportContext (str, loc) -> - let loc' = this#loc loc in - if loc == loc' - then tolerable_error - else BadExportContext (str, loc') - | SignatureVerificationError sve -> - let open Signature_builder_deps.Error in - begin match sve with - | ExpectedSort (sort, x, loc) -> - let loc' = this#loc loc in - if loc == loc' - then tolerable_error - else SignatureVerificationError (ExpectedSort (sort, x, loc')) - | ExpectedAnnotation loc -> + method require (require : require) = + match require with + | Require { source; require_loc; bindings } -> + let source' = this#source source in + let require_loc' = this#loc require_loc in + let bindings' = OptionUtils.ident_map this#require_bindings bindings in + if source == source' && require_loc == require_loc' && bindings == bindings' then + require + else + Require { source = source'; require_loc = require_loc'; bindings = bindings' } + | ImportDynamic { source; import_loc } -> + let source' = this#source source in + let import_loc' = this#loc import_loc in + if source == source' && import_loc == import_loc' then + require + else + ImportDynamic { source = source'; import_loc = import_loc' } + | Import0 { source } -> + let source' = this#source source in + if source == source' then + require + else + Import0 { source = source' } + | Import { import_loc; source; named; ns; types; typesof; typesof_ns } -> + let import_loc' = this#loc import_loc in + let source' = this#source source in + let named' = + SMapUtils.ident_map (SMapUtils.ident_map (Nel.ident_map this#imported_locs)) named + in + let ns' = OptionUtils.ident_map this#ident ns in + let types' = + SMapUtils.ident_map (SMapUtils.ident_map (Nel.ident_map this#imported_locs)) types + in + let typesof' = + SMapUtils.ident_map (SMapUtils.ident_map (Nel.ident_map this#imported_locs)) typesof + in + let typesof_ns' = OptionUtils.ident_map this#ident typesof_ns in + if + import_loc == import_loc' + && source == source' + && named == named' + && ns == ns' + && types == types' + && typesof == typesof' + && typesof_ns == typesof_ns' + then + require + else + Import + { + import_loc = import_loc'; + source = source'; + named = named'; + ns = ns'; + types = types'; + typesof = typesof'; + typesof_ns = typesof_ns'; + } + + method imported_locs (imported_locs : imported_locs) = + let { remote_loc; local_loc } = imported_locs in + let remote_loc' = this#loc remote_loc in + let local_loc' = this#loc local_loc in + if remote_loc == remote_loc' && local_loc == local_loc' then + imported_locs + else + { remote_loc = remote_loc'; local_loc = local_loc' } + + method require_bindings (require_bindings : require_bindings) = + match require_bindings with + | BindIdent ident -> + let ident' = this#ident ident in + if ident == ident' then + require_bindings + else + BindIdent ident' + | BindNamed named -> + let named' = + ListUtils.ident_map + (fun ((remote, require_bindings) as x) -> + let remote' = this#ident remote in + let require_bindings' = this#require_bindings require_bindings in + if remote == remote' && require_bindings == require_bindings' then + x + else + (remote', require_bindings')) + named + in + if named == named' then + require_bindings + else + BindNamed named' + + method module_kind (module_kind : module_kind) = + match module_kind with + | CommonJS { mod_exp_loc } -> + let mod_exp_loc' = OptionUtils.ident_map this#loc mod_exp_loc in + if mod_exp_loc == mod_exp_loc' then + module_kind + else + CommonJS { mod_exp_loc = mod_exp_loc' } + | ES { named; star } -> + let named' = ListUtils.ident_map this#export named in + let star' = ListUtils.ident_map this#export_star star in + if named == named' && star == star' then + module_kind + else + ES { named = named'; star = star' } + + method named_export_kind (kind : named_export_kind) = + match kind with + | NamedDeclaration -> kind + | NamedSpecifier { local; source } -> + let local' = this#ident local in + let source' = OptionUtils.ident_map this#source source in + if local == local' && source == source' then + kind + else + NamedSpecifier { local = local'; source = source' } + + method export (export : string * (L.t * export)) = + match export with + | (n, (export_loc, ExportDefault { default_loc; local })) -> + let export_loc' = this#loc export_loc in + let default_loc' = this#loc default_loc in + let local' = OptionUtils.ident_map this#ident local in + if export_loc == export_loc' && default_loc == default_loc' && local == local' then + export + else + (n, (export_loc', ExportDefault { default_loc = default_loc'; local = local' })) + | (n, (export_loc, ExportNamed { loc; kind })) -> + let export_loc' = this#loc export_loc in let loc' = this#loc loc in - if loc == loc' - then tolerable_error - else SignatureVerificationError (ExpectedAnnotation loc') - | InvalidTypeParamUse loc -> + let kind' = this#named_export_kind kind in + if export_loc == export_loc' && loc == loc' && kind == kind' then + export + else + (n, (export_loc', ExportNamed { loc = loc'; kind = kind' })) + | (n, (export_loc, ExportNs { loc; star_loc; source })) -> + let export_loc' = this#loc export_loc in let loc' = this#loc loc in - if loc == loc' - then tolerable_error - else SignatureVerificationError (InvalidTypeParamUse loc') - | UnexpectedObjectKey loc -> + let star_loc' = this#loc star_loc in + let source' = this#source source in + if export_loc == export_loc' && loc == loc' && star_loc == star_loc' && source == source' + then + export + else + (n, (export_loc', ExportNs { loc = loc'; star_loc = star_loc'; source = source' })) + + method export_star (export_star : L.t * export_star) = + match export_star with + | (export_loc, ExportStar { star_loc; source }) -> + let export_loc' = this#loc export_loc in + let star_loc' = this#loc star_loc in + let source' = this#source source in + if export_loc == export_loc' && star_loc == star_loc' && source == source' then + export_star + else + (export_loc', ExportStar { star_loc = star_loc'; source = source' }) + + method type_export (type_export : string * (L.t * type_export)) = + match type_export with + | (n, (export_loc, TypeExportNamed { loc; kind })) -> + let export_loc' = this#loc export_loc in let loc' = this#loc loc in - if loc == loc' - then tolerable_error - else SignatureVerificationError (UnexpectedObjectKey loc') - | UnexpectedExpression (loc, esort) -> + let kind' = this#named_export_kind kind in + if export_loc == export_loc' && loc == loc' && kind == kind' then + type_export + else + (n, (export_loc', TypeExportNamed { loc = loc'; kind = kind' })) + + method ident (ident : L.t Ast_utils.ident) = + let (loc, str) = ident in + let loc' = this#loc loc in + if loc == loc' then + ident + else + (loc', str) + + method source (source : L.t Ast_utils.source) = + let (loc, str) = source in + let loc' = this#loc loc in + if loc == loc' then + source + else + (loc', str) + + method tolerable_error (tolerable_error : tolerable_error) = + match tolerable_error with + | BadExportPosition loc -> let loc' = this#loc loc in - if loc == loc' - then tolerable_error - else SignatureVerificationError (UnexpectedExpression (loc', esort)) - | SketchyToplevelDef loc -> + if loc == loc' then + tolerable_error + else + BadExportPosition loc' + | BadExportContext (str, loc) -> let loc' = this#loc loc in - if loc == loc' - then tolerable_error - else SignatureVerificationError (SketchyToplevelDef loc') - | TODO (msg, loc) -> + if loc == loc' then + tolerable_error + else + BadExportContext (str, loc') + | SignatureVerificationError sve -> + Signature_builder_deps.Error.( + begin + match sve with + | ExpectedSort (sort, x, loc) -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (ExpectedSort (sort, x, loc')) + | ExpectedAnnotation (loc, sort) -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (ExpectedAnnotation (loc', sort)) + | InvalidTypeParamUse loc -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (InvalidTypeParamUse loc') + | UnexpectedObjectKey (loc, key_loc) -> + let loc' = this#loc loc in + let key_loc' = this#loc key_loc in + if loc == loc' && key_loc == key_loc' then + tolerable_error + else + SignatureVerificationError (UnexpectedObjectKey (loc', key_loc')) + | UnexpectedObjectSpread (loc, spread_loc) -> + let loc' = this#loc loc in + let spread_loc' = this#loc spread_loc in + if loc == loc' && spread_loc == spread_loc' then + tolerable_error + else + SignatureVerificationError (UnexpectedObjectSpread (loc', spread_loc')) + | UnexpectedArraySpread (loc, spread_loc) -> + let loc' = this#loc loc in + let spread_loc' = this#loc spread_loc in + if loc == loc' && spread_loc == spread_loc' then + tolerable_error + else + SignatureVerificationError (UnexpectedArraySpread (loc', spread_loc')) + | UnexpectedArrayHole loc -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (UnexpectedArrayHole loc') + | EmptyArray loc -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (EmptyArray loc') + | EmptyObject loc -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (EmptyObject loc') + | UnexpectedExpression (loc, esort) -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (UnexpectedExpression (loc', esort)) + | SketchyToplevelDef loc -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (SketchyToplevelDef loc') + | UnsupportedPredicateExpression loc -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (UnsupportedPredicateExpression loc') + | TODO (msg, loc) -> + let loc' = this#loc loc in + if loc == loc' then + tolerable_error + else + SignatureVerificationError (TODO (msg, loc')) + end) + + method error (error : error) = + match error with + | IndeterminateModuleType loc -> let loc' = this#loc loc in - if loc == loc' - then tolerable_error - else SignatureVerificationError (TODO (msg, loc')) - end - - method error (error: error) = - match error with - | IndeterminateModuleType loc -> - let loc' = this#loc loc in - if loc == loc' - then error - else IndeterminateModuleType loc' - - method loc (loc: Loc.t) = loc + if loc == loc' then + error + else + IndeterminateModuleType loc' + + method loc (loc : L.t) = loc + end end + +module With_Loc = + Make (Loc_sig.LocS) (Scope_api.With_Loc) (Scope_builder.With_Loc) + (Signature_builder_deps.With_Loc) +module With_ALoc = + Make (Loc_sig.ALocS) (Scope_api.With_ALoc) (Scope_builder.With_ALoc) + (Signature_builder_deps.With_ALoc) + +let abstractify_tolerable_errors = + let module WL = With_Loc in + let module WA = With_ALoc in + let abstractify_tolerable_error = function + | WL.BadExportPosition loc -> WA.BadExportPosition (ALoc.of_loc loc) + | WL.BadExportContext (name, loc) -> WA.BadExportContext (name, ALoc.of_loc loc) + | WL.SignatureVerificationError err -> + WA.SignatureVerificationError (Signature_builder_deps.abstractify_error err) + in + Core_list.map ~f:abstractify_tolerable_error + +let abstractify_locs : With_Loc.t -> With_ALoc.t = + let module WL = With_Loc in + let module WA = With_ALoc in + let abstractify_fst (loc, x) = (ALoc.of_loc loc, x) in + let abstractify_imported_locs { WL.remote_loc; local_loc } = + { WA.remote_loc = ALoc.of_loc remote_loc; local_loc = ALoc.of_loc local_loc } + in + let abstractify_imported_locs_map = SMap.map (SMap.map (Nel.map abstractify_imported_locs)) in + let rec abstractify_require_bindings = function + | WL.BindIdent x -> WA.BindIdent (abstractify_fst x) + | WL.BindNamed named -> + WA.BindNamed + (Core_list.map + ~f:(fun (remote, require_bindings) -> + (abstractify_fst remote, abstractify_require_bindings require_bindings)) + named) + in + let abstractify_require = function + | WL.Require { source; require_loc; bindings } -> + WA.Require + { + source = abstractify_fst source; + require_loc = ALoc.of_loc require_loc; + bindings = Option.map ~f:abstractify_require_bindings bindings; + } + | WL.ImportDynamic { source; import_loc } -> + WA.ImportDynamic { source = abstractify_fst source; import_loc = ALoc.of_loc import_loc } + | WL.Import0 { source } -> WA.Import0 { source = abstractify_fst source } + | WL.Import { import_loc; source; named; ns; types; typesof; typesof_ns } -> + WA.Import + { + import_loc = ALoc.of_loc import_loc; + source = abstractify_fst source; + named = abstractify_imported_locs_map named; + ns = Option.map ~f:abstractify_fst ns; + types = abstractify_imported_locs_map types; + typesof = abstractify_imported_locs_map typesof; + typesof_ns = Option.map ~f:abstractify_fst typesof_ns; + } + in + let abstractify_requires = Core_list.map ~f:abstractify_require in + let abstractify_named_export_kind = function + | WL.NamedDeclaration -> WA.NamedDeclaration + | WL.NamedSpecifier { local; source } -> + WA.NamedSpecifier + { local = abstractify_fst local; source = Option.map ~f:abstractify_fst source } + in + let abstractify_export = function + | WL.ExportDefault { default_loc; local } -> + WA.ExportDefault + { default_loc = ALoc.of_loc default_loc; local = Option.map ~f:abstractify_fst local } + | WL.ExportNamed { loc; kind } -> + WA.ExportNamed { loc = ALoc.of_loc loc; kind = abstractify_named_export_kind kind } + | WL.ExportNs { loc; star_loc; source } -> + WA.ExportNs + { loc = ALoc.of_loc loc; star_loc = ALoc.of_loc star_loc; source = abstractify_fst source } + in + let abstractify_named_export (name, (loc, export)) = + (name, (ALoc.of_loc loc, abstractify_export export)) + in + let abstractify_named_exports = Core_list.map ~f:abstractify_named_export in + let abstractify_export_star = function + | WL.ExportStar { star_loc; source } -> + WA.ExportStar { star_loc = ALoc.of_loc star_loc; source = abstractify_fst source } + in + let abstractify_es_star = + Core_list.map ~f:(fun (loc, export_star) -> + (ALoc.of_loc loc, abstractify_export_star export_star)) + in + let abstractify_module_kind = function + | WL.CommonJS { mod_exp_loc } -> + WA.CommonJS { mod_exp_loc = Option.map ~f:ALoc.of_loc mod_exp_loc } + | WL.ES { named; star } -> + WA.ES { named = abstractify_named_exports named; star = abstractify_es_star star } + in + let abstractify_type_export = function + | WL.TypeExportNamed { loc; kind } -> + WA.TypeExportNamed { loc = ALoc.of_loc loc; kind = abstractify_named_export_kind kind } + in + let abstractify_type_exports_named = + Core_list.map ~f:(fun (name, (loc, type_export)) -> + (name, (ALoc.of_loc loc, abstractify_type_export type_export))) + in + let abstractify_type_exports_star = abstractify_es_star in + let abstractify_module_sig + { WL.requires; module_kind; type_exports_named; type_exports_star; info } = + { + WA.requires = abstractify_requires requires; + module_kind = abstractify_module_kind module_kind; + type_exports_named = abstractify_type_exports_named type_exports_named; + type_exports_star = abstractify_type_exports_star type_exports_star; + info; + } + in + let abstractify_declare_modules = + SMap.map (fun (loc, module_sig) -> (ALoc.of_loc loc, abstractify_module_sig module_sig)) + in + fun { WL.module_sig; declare_modules; tolerable_errors } -> + { + WA.module_sig = abstractify_module_sig module_sig; + declare_modules = abstractify_declare_modules declare_modules; + tolerable_errors = abstractify_tolerable_errors tolerable_errors; + } diff --git a/src/parser_utils/file_sig.mli b/src/parser_utils/file_sig.mli index 18837f09bfd..6a29d07b24d 100644 --- a/src/parser_utils/file_sig.mli +++ b/src/parser_utils/file_sig.mli @@ -1,238 +1,20 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -(* In Flow, every file creates a single module, but may also include declared - * modules. This data structure describes all such modules. - * - * If a declared module with the same name appears twice, the last one will be - * represented here. - * - * This representation is a bit broad, because implementation files generally - * should not contain declare modules and declaration files (libdefs) are all - * coalesced into a single module (builtins). *) -type 'info t' = { - module_sig: 'info module_sig'; - declare_modules: (Loc.t * 'info module_sig') SMap.t; - tolerable_errors: tolerable_error list; -} - -(* We can extract the observable interface of a module by extracting information - * about what it requires and what it exports. *) -and 'info module_sig' = { - requires: require list; - module_kind: module_kind; - type_exports_named: type_export SMap.t; (* export type {A, B as C} [from x] *) - type_exports_star: export_star list; (* export type * from "foo" *) - info: 'info; (* useful to carry information that might eventually be erased *) -} - -(* We track information about dependencies for each unique module reference in a - * file. For example, `import X from "foo"` and `require("foo")` both induce - * dependencies on the same module and have the same module ref. - * - * Note that different refs can point to the same module, but we haven't - * resolved modules yet, so we don't know where the ref actually points. - *) -and require = - (* require('foo'); *) - | Require of { - (* location of module ref *) - source: Ast_utils.source; - - require_loc: Loc.t; - - (* Note: These are best-effort. - * DO NOT use these for typechecking. *) - bindings: require_bindings option; - - } - - (* import('foo').then(...) *) - | ImportDynamic of { source: Ast_utils.source; import_loc: Loc.t } - - (* import declaration without specifiers - * - * Note that this is equivalent to the Import variant below with all fields - * empty, but modeled as a separate variant to ensure use sites handle this - * case if necessary. *) - | Import0 of { source: Ast_utils.source } - - (* import declaration with specifiers *) - | Import of { - (* location of module ref *) - source: Ast_utils.source; - - (* map from remote name to local names of value imports - * source: import {A, B as C} from "foo"; - * result: {A:{A:{[ImportedLocs {_}]}}, B:{C:{[ImportedLocs {_}]}}} - * - * Multiple locations for a given (remoteName, localName) pair are not typical, but they can - * occur e.g. with the code `import {foo, foo} from 'bar';`. This code would cause an error - * later because of the duplicate local name, but we should handle it here since it does parse. - *) - named: imported_locs Nel.t SMap.t SMap.t; - - (* optional pair of location of namespace import and local name - * source: import * as X from "foo"; - * result: loc, X *) - ns: Ast_utils.ident option; - - (* map from remote name to local names of type imports - * source: import type {A, B as C} from "foo"; - * source: import {type A, type B as C} from "foo"; - * result: {A:{A:{[ImportedLocs {_}]}}, B:{C:{[ImportedLocs {_}]}}} *) - types: imported_locs Nel.t SMap.t SMap.t; - - (* map from remote name to local names of typeof imports - * source: import typeof {A, B as C} from "foo"; - * source: import {typeof A, typeof B as C} from "foo"; - * result: {A:{A:{[ImportedLocs {_}]}}, B:{C:{[ImportedLocs {_}]}}} *) - typesof: imported_locs Nel.t SMap.t SMap.t; - - (* optional pair of location of namespace typeof import and local name - * source: import typeof * as X from "foo"; - * result: loc, X *) - typesof_ns: Ast_utils.ident option - } - -and imported_locs = { - remote_loc: Loc.t; - local_loc: Loc.t; -} - -and require_bindings = - (* source: const bar = require('./foo'); - * result: bar *) - | BindIdent of Ast_utils.ident - (* map from remote name to local names of requires - * source: const {a, b: c} = require('./foo'); - * result: {a: {a: [a_loc]}, b: {c: [c_loc]}} *) - | BindNamed of imported_locs Nel.t SMap.t SMap.t - -(* All modules are assumed to be CommonJS to start with, but if we see an ES - * module-style export, we switch to ES. *) -and module_kind = - | CommonJS of { - mod_exp_loc: Loc.t option; - } - | ES of { - (* map from exported name to export data *) - named: export SMap.t; - (* map from module reference to location of `export *` *) - star: export_star list; - } - -and export = - | ExportDefault of { - (* location of the `default` keyword *) - default_loc: Loc.t; - (* may have local name, e.g., `export default function foo {}` *) - (** NOTE: local = Some id if and only if id introduces a local binding **) - local: Ast_utils.ident option; - } - | ExportNamed of { - (* loc of remote name *) - loc: Loc.t; - kind: named_export_kind; - } - | ExportNs of { - (* loc of remote name *) - loc: Loc.t; - (* module reference of exported namespace *) - source: Ast_utils.source; - } - -and named_export_kind = - | NamedDeclaration - | NamedSpecifier of { - (* local name, e.g., `export {foo as bar}`, `export type {T as U}` *) - local: Ast_utils.ident; - (* module reference for re-exports, e.g., `export {foo} from 'bar'`,`export type {T} from 'bar'` *) - source: Ast_utils.source option - } - -and export_star = - | ExportStar of { star_loc: Loc.t; source: Ast_utils.source } - -and type_export = - | TypeExportNamed of { - (* loc of remote name *) - loc: Loc.t; - kind: named_export_kind; - } - -and tolerable_error = - (* e.g. `module.exports.foo = 4` when not at the top level *) - | BadExportPosition of Loc.t - (* e.g. `foo(module)`, dangerous because `module` is aliased *) - | BadExportContext of string (* offending identifier *) * Loc.t - | SignatureVerificationError of Signature_builder_deps.Error.t - -type exports_info = { - module_kind_info: module_kind_info; - type_exports_named_info: es_export_def SMap.t; -} - -and module_kind_info = - | CommonJSInfo of cjs_exports_def list - | ESInfo of es_export_def SMap.t - -and cjs_exports_def = - | DeclareModuleExportsDef of (Loc.t, Loc.t) Flow_ast.Type.annotation - | SetModuleExportsDef of (Loc.t, Loc.t) Flow_ast.Expression.t - | AddModuleExportsDef of Ast_utils.ident * (Loc.t, Loc.t) Flow_ast.Expression.t - -and es_export_def = - | DeclareExportDef of (Loc.t, Loc.t) Flow_ast.Statement.DeclareExportDeclaration.declaration - | ExportDefaultDef of (Loc.t, Loc.t) Flow_ast.Statement.ExportDefaultDeclaration.declaration - | ExportNamedDef of (Loc.t, Loc.t) Flow_ast.Statement.t - -type error = - | IndeterminateModuleType of Loc.t - -type toplevel_names_and_exports_info = { - toplevel_names: SSet.t; - exports_info: (exports_info t', error) result -} - -val program_with_toplevel_names_and_exports_info: ast:(Loc.t, Loc.t) Flow_ast.program -> - toplevel_names_and_exports_info - -(* Use for debugging; not for exposing info the the end user *) -val exports_info_to_string: exports_info -> string - -(* Applications may not care about the info carried by signatures. *) -type module_sig = unit module_sig' -type t = unit t' - -val init: t - -val program: ast:(Loc.t, Loc.t) Flow_ast.program -> (t, error) result -val verified: Signature_builder_deps.ErrorSet.t -> exports_info t' -> t +module With_Loc : + File_sig_sig.S + with module L = Loc_sig.LocS + and module Signature_builder_deps = Signature_builder_deps.With_Loc -(* Use for debugging; not for exposing info the the end user *) -val to_string: t -> string +module With_ALoc : + File_sig_sig.S + with module L = Loc_sig.ALocS + and module Signature_builder_deps = Signature_builder_deps.With_ALoc -val require_loc_map: module_sig -> Loc.t Nel.t SMap.t +val abstractify_locs : With_Loc.t -> With_ALoc.t -class mapper : object - method error: error -> error - method export: export -> export - method export_star: export_star -> export_star - method file_sig: t -> t - method ident: Ast_utils.ident -> Ast_utils.ident - method source: Ast_utils.source -> Ast_utils.source - method named_export_kind: named_export_kind -> named_export_kind - method imported_locs: imported_locs -> imported_locs - method loc: Loc.t -> Loc.t - method module_kind: module_kind -> module_kind - method module_sig: module_sig -> module_sig - method require: require -> require - method require_bindings: require_bindings -> require_bindings - method tolerable_error: tolerable_error -> tolerable_error - method type_export: type_export -> type_export -end +val abstractify_tolerable_errors : With_Loc.tolerable_error list -> With_ALoc.tolerable_error list diff --git a/src/parser_utils/file_sig_sig.ml b/src/parser_utils/file_sig_sig.ml new file mode 100644 index 00000000000..c8db510817e --- /dev/null +++ b/src/parser_utils/file_sig_sig.ml @@ -0,0 +1,263 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module type S = sig + module L : Loc_sig.S + + module Signature_builder_deps : Signature_builder_deps_sig.S + + (* In Flow, every file creates a single module, but may also include declared + * modules. This data structure describes all such modules. + * + * If a declared module with the same name appears twice, the last one will be + * represented here. + * + * This representation is a bit broad, because implementation files generally + * should not contain declare modules and declaration files (libdefs) are all + * coalesced into a single module (builtins). *) + type 'info t' = { + module_sig: 'info module_sig'; + declare_modules: (L.t * 'info module_sig') SMap.t; + tolerable_errors: tolerable_error list; + } + + (* We can extract the observable interface of a module by extracting information + * about what it requires and what it exports. *) + and 'info module_sig' = { + requires: require list; + module_kind: module_kind; + type_exports_named: (string * (L.t * type_export)) list; + (* export type {A, B as C} [from x] *) + type_exports_star: (L.t * export_star) list; + (* export type * from "foo" *) + info: 'info; (* useful to carry information that might eventually be erased *) + } + + (* We track information about dependencies for each unique module reference in a + * file. For example, `import X from "foo"` and `require("foo")` both induce + * dependencies on the same module and have the same module ref. + * + * Note that different refs can point to the same module, but we haven't + * resolved modules yet, so we don't know where the ref actually points. + *) + and require = + (* require('foo'); *) + | Require of { + (* location of module ref *) + source: L.t Flow_ast_utils.source; + require_loc: L.t; + (* Note: These are best-effort. + * DO NOT use these for typechecking. *) + bindings: require_bindings option; + } + (* import('foo').then(...) *) + | ImportDynamic of { + source: L.t Flow_ast_utils.source; + import_loc: L.t; + } + (* import declaration without specifiers + * + * Note that this is equivalent to the Import variant below with all fields + * empty, but modeled as a separate variant to ensure use sites handle this + * case if necessary. *) + | Import0 of { source: L.t Flow_ast_utils.source } + (* import declaration with specifiers *) + | Import of { + import_loc: L.t; + (* location of module ref *) + source: L.t Flow_ast_utils.source; + (* map from remote name to local names of value imports + * source: import {A, B as C} from "foo"; + * result: {A:{A:{[ImportedLocs {_}]}}, B:{C:{[ImportedLocs {_}]}}} + * + * Multiple locations for a given (remoteName, localName) pair are not typical, but they can + * occur e.g. with the code `import {foo, foo} from 'bar';`. This code would cause an error + * later because of the duplicate local name, but we should handle it here since it does parse. + *) + named: imported_locs Nel.t SMap.t SMap.t; + (* optional pair of location of namespace import and local name + * source: import * as X from "foo"; + * result: loc, X *) + ns: L.t Flow_ast_utils.ident option; + (* map from remote name to local names of type imports + * source: import type {A, B as C} from "foo"; + * source: import {type A, type B as C} from "foo"; + * result: {A:{A:{[ImportedLocs {_}]}}, B:{C:{[ImportedLocs {_}]}}} *) + types: imported_locs Nel.t SMap.t SMap.t; + (* map from remote name to local names of typeof imports + * source: import typeof {A, B as C} from "foo"; + * source: import {typeof A, typeof B as C} from "foo"; + * result: {A:{A:{[ImportedLocs {_}]}}, B:{C:{[ImportedLocs {_}]}}} *) + typesof: imported_locs Nel.t SMap.t SMap.t; + (* optional pair of location of namespace typeof import and local name + * source: import typeof * as X from "foo"; + * result: loc, X *) + typesof_ns: L.t Flow_ast_utils.ident option; + } + + and imported_locs = { + remote_loc: L.t; + local_loc: L.t; + } + + and require_bindings = + (* source: const bar = require('./foo'); + * result: bar *) + | BindIdent of L.t Flow_ast_utils.ident + (* map from remote name to local names of requires + * source: const {a, b: c} = require('./foo'); + * result: {a: (a_loc, a), b: (c_loc, c)} *) + | BindNamed of (L.t Flow_ast_utils.ident * require_bindings) list + + (* All modules are assumed to be CommonJS to start with, but if we see an ES + * module-style export, we switch to ES. *) + and module_kind = + | CommonJS of { mod_exp_loc: L.t option } + | ES of { + (* map from exported name to export data *) + named: (string * (L.t * export)) list; + (* map from module reference to location of `export *` *) + star: (L.t * export_star) list; + } + + and export = + | ExportDefault of { + (* location of the `default` keyword *) + default_loc: L.t; + (* may have local name, e.g., `export default function foo {}` *) + (** NOTE: local = Some id if and only if id introduces a local binding **) + local: L.t Flow_ast_utils.ident option; + } + | ExportNamed of { + (* loc of remote name *) + loc: L.t; + kind: named_export_kind; + } + | ExportNs of { + (* loc of remote name *) + loc: L.t; + star_loc: L.t; + (* module reference of exported namespace *) + source: L.t Flow_ast_utils.source; + } + + and named_export_kind = + | NamedDeclaration + | NamedSpecifier of { + (* local name, e.g., `export {foo as bar}`, `export type {T as U}` *) + local: L.t Flow_ast_utils.ident; + (* module reference for re-exports, e.g., `export {foo} from 'bar'`,`export type {T} from 'bar'` *) + source: L.t Flow_ast_utils.source option; + } + + and export_star = + | ExportStar of { + star_loc: L.t; + source: L.t Flow_ast_utils.source; + } + + and type_export = + | TypeExportNamed of { + (* loc of remote name *) + loc: L.t; + kind: named_export_kind; + } + + and tolerable_error = + (* e.g. `module.exports.foo = 4` when not at the top level *) + | BadExportPosition of L.t + (* e.g. `foo(module)`, dangerous because `module` is aliased *) + | BadExportContext of string (* offending identifier *) * L.t + | SignatureVerificationError of Signature_builder_deps.Error.t + + type exports_info = { + module_kind_info: module_kind_info; + type_exports_named_info: es_export_def list; + } + + and module_kind_info = + | CommonJSInfo of cjs_exports_def list + | ESInfo of es_export_def list + + and cjs_exports_def = + | DeclareModuleExportsDef of (L.t, L.t) Flow_ast.Type.annotation + | SetModuleExportsDef of (L.t, L.t) Flow_ast.Expression.t + | AddModuleExportsDef of L.t Flow_ast_utils.ident * (L.t, L.t) Flow_ast.Expression.t + + and es_export_def = + | DeclareExportDef of (L.t, L.t) Flow_ast.Statement.DeclareExportDeclaration.declaration + | ExportDefaultDef of (L.t, L.t) Flow_ast.Statement.ExportDefaultDeclaration.declaration + | ExportNamedDef of (L.t, L.t) Flow_ast.Statement.t + + type error = IndeterminateModuleType of L.t + + type toplevel_names_and_exports_info = { + toplevel_names: SSet.t; + exports_info: (exports_info t', error) result; + } + + val program_with_toplevel_names_and_exports_info : + ast:(L.t, L.t) Flow_ast.program -> + module_ref_prefix:string option -> + toplevel_names_and_exports_info + + (* Use for debugging; not for exposing info the the end user *) + val exports_info_to_string : exports_info -> string + + (* Applications may not care about the info carried by signatures. *) + type module_sig = unit module_sig' + + type t = unit t' + + val init : t + + val program : + ast:(L.t, L.t) Flow_ast.program -> module_ref_prefix:string option -> (t, error) result + + val verified : Signature_builder_deps.PrintableErrorSet.t -> exports_info t' -> t + + (* Use for debugging; not for exposing info the the end user *) + val to_string : t -> string + + val require_loc_map : module_sig -> L.t Nel.t SMap.t + + (* Only the keys returned by `require_loc_map` *) + val require_set : module_sig -> SSet.t + + class mapper : + object + method error : error -> error + + method export : string * (L.t * export) -> string * (L.t * export) + + method export_star : L.t * export_star -> L.t * export_star + + method file_sig : t -> t + + method ident : L.t Flow_ast_utils.ident -> L.t Flow_ast_utils.ident + + method source : L.t Flow_ast_utils.source -> L.t Flow_ast_utils.source + + method named_export_kind : named_export_kind -> named_export_kind + + method imported_locs : imported_locs -> imported_locs + + method loc : L.t -> L.t + + method module_kind : module_kind -> module_kind + + method module_sig : module_sig -> module_sig + + method require : require -> require + + method require_bindings : require_bindings -> require_bindings + + method tolerable_error : tolerable_error -> tolerable_error + + method type_export : string * (L.t * type_export) -> string * (L.t * type_export) + end +end diff --git a/src/parser_utils/flow_ast_contains_mapper.ml b/src/parser_utils/flow_ast_contains_mapper.ml new file mode 100644 index 00000000000..cdf45c786fd --- /dev/null +++ b/src/parser_utils/flow_ast_contains_mapper.ml @@ -0,0 +1,41 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* This mapper prunes expression and statements that are not relevent + to a target from being mapped over and rebuilt. It doesn't prune the + space as much as is possible, but doing so results in a much less + maintainable piece of code. *) +class virtual ['L] mapper = + object (this) + inherit ['L] Flow_ast_mapper.mapper as super + + method virtual loc_annot_contains_target : 'L -> bool + + method! program ((l, _, _) as x) = + if this#loc_annot_contains_target l then + super#program x + else + x + + method! statement ((l, _) as x) = + if this#loc_annot_contains_target l then + super#statement x + else + x + + method! comment ((l, _) as x) = + if this#loc_annot_contains_target l then + super#comment x + else + x + + method! expression ((l, _) as x) = + if this#loc_annot_contains_target l then + super#expression x + else + x + end diff --git a/src/parser_utils/flow_ast_differ.ml b/src/parser_utils/flow_ast_differ.ml index faf38348cd1..1522f2da6c2 100644 --- a/src/parser_utils/flow_ast_differ.ml +++ b/src/parser_utils/flow_ast_differ.ml @@ -1,10 +1,11 @@ (** - * Copyright (c) 2014, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module Ast_utils = Flow_ast_utils module Ast = Flow_ast open Utils_js @@ -13,85 +14,112 @@ type 'a change' = | Insert of (* separator. Defaults to \n *) string option * 'a list | Delete of 'a -type 'a change = (Loc.t * 'a change') +type 'a change = Loc.t * 'a change' -type diff_algorithm = Trivial | Standard +type diff_algorithm = + | Trivial + | Standard (* Position in the list is necessary to figure out what Loc.t to assign to insertions. *) -type 'a diff_result = (int (* position *) * 'a change') +type 'a diff_result = int (* position *) * 'a change' + +(* Compares changes based on location. *) +let change_compare (pos1, chg1) (pos2, chg2) = + if pos1 <> pos2 then + compare pos1 pos2 + else + (* Orders the change types alphabetically. This puts same-indexed inserts before deletes *) + match (chg1, chg2) with + | (Insert _, Delete _) + | (Delete _, Replace _) + | (Insert _, Replace _) -> + -1 + | (Delete _, Insert _) + | (Replace _, Delete _) + | (Replace _, Insert _) -> + 1 + | _ -> 0 (* diffs based on identity *) (* return None if no good diff was found (max edit distance exceeded, etc.) *) -let trivial_list_diff (old_list : 'a list) (new_list : 'a list) : ('a diff_result list) option = - (* inspect the lists pairwise and record any items which are different as replacements. Give up if - * the lists have different lengths.*) +let trivial_list_diff (old_list : 'a list) (new_list : 'a list) : 'a diff_result list option = + (* inspect the lists pairwise and record any items which are different as replacements. Give up if + * the lists have different lengths.*) let rec helper i lst1 lst2 = - match lst1, lst2 with - | [], [] -> Some [] - | hd1::tl1, hd2::tl2 -> + match (lst1, lst2) with + | ([], []) -> Some [] + | (hd1 :: tl1, hd2 :: tl2) -> let rest = helper (i + 1) tl1 tl2 in if hd1 != hd2 then Option.map rest ~f:(List.cons (i, Replace (hd1, hd2))) else rest - | _, [] - | [], _ -> + | (_, []) + | ([], _) -> None in - if old_list == new_list then Some [] - else helper 0 old_list new_list + if old_list == new_list then + Some [] + else + helper 0 old_list new_list (* diffs based on http://www.xmailserver.org/diff2.pdf on page 6 *) -let standard_list_diff (old_list : 'a list) (new_list : 'a list) : ('a diff_result list) option = +let standard_list_diff (old_list : 'a list) (new_list : 'a list) : 'a diff_result list option = (* Lots of acccesses in this algorithm so arrays are faster *) - let (old_arr, new_arr) = Array.of_list old_list, Array.of_list new_list in - let (n, m) = Array.length old_arr, Array.length new_arr in - + let (old_arr, new_arr) = (Array.of_list old_list, Array.of_list new_list) in + let (n, m) = (Array.length old_arr, Array.length new_arr) in (* The shortest edit sequence problem is equivalent to finding the longest common subsequence, or equivalently the longest trace *) let longest_trace max_distance : (int * int) list option = (* adds the match points in this snake to the trace and produces the endpoint along with the new trace *) let rec follow_snake x y trace = - if x >= n || y >= m then x, y, trace else - if old_arr.(x) == new_arr.(y) then follow_snake (x + 1) (y + 1) ((x,y) :: trace) else - x, y, trace in - - let rec build_trace dist frontier visited = - if Hashtbl.mem visited (n, m) then () else - let new_frontier = Queue.create () in - if dist > max_distance then () else - - let follow_trace (x, y) : unit = - let trace = Hashtbl.find visited (x,y) in - let x_old, y_old, advance_in_old_list = follow_snake (x + 1) y trace in - let x_new, y_new, advance_in_new_list = follow_snake x (y + 1) trace in - (* if we have already visited this location, there is a shorter path to it, so we don't + if x >= n || y >= m then + (x, y, trace) + else if old_arr.(x) == new_arr.(y) then + follow_snake (x + 1) (y + 1) ((x, y) :: trace) + else + (x, y, trace) + in + let rec build_trace dist frontier visited = + if Hashtbl.mem visited (n, m) then + () + else + let new_frontier = Queue.create () in + if dist > max_distance then + () + else + let follow_trace (x, y) : unit = + let trace = Hashtbl.find visited (x, y) in + let (x_old, y_old, advance_in_old_list) = follow_snake (x + 1) y trace in + let (x_new, y_new, advance_in_new_list) = follow_snake x (y + 1) trace in + (* if we have already visited this location, there is a shorter path to it, so we don't store this trace *) - let () = if Hashtbl.mem visited (x_old, y_old) |> not then - let () = Queue.add (x_old, y_old) new_frontier in - Hashtbl.add visited (x_old, y_old) advance_in_old_list in - if Hashtbl.mem visited (x_new, y_new) |> not then - let () = Queue.add (x_new, y_new) new_frontier in - Hashtbl.add visited (x_new, y_new) advance_in_new_list in - - Queue.iter follow_trace frontier; - build_trace (dist + 1) new_frontier visited in - + let () = + if Hashtbl.mem visited (x_old, y_old) |> not then + let () = Queue.add (x_old, y_old) new_frontier in + Hashtbl.add visited (x_old, y_old) advance_in_old_list + in + if Hashtbl.mem visited (x_new, y_new) |> not then + let () = Queue.add (x_new, y_new) new_frontier in + Hashtbl.add visited (x_new, y_new) advance_in_new_list + in + Queue.iter follow_trace frontier; + build_trace (dist + 1) new_frontier visited + in (* Keep track of all visited string locations so we don't duplicate work *) let visited = Hashtbl.create (n * m) in let frontier = Queue.create () in (* Start with the basic trace, but follow a starting snake to a non-match point *) - let x,y,trace = follow_snake 0 0 [] in - Queue.add (x,y) frontier; - Hashtbl.add visited (x,y) trace; + let (x, y, trace) = follow_snake 0 0 [] in + Queue.add (x, y) frontier; + Hashtbl.add visited (x, y) trace; build_trace 0 frontier visited; - Hashtbl.find_opt visited (n,m) in - + Hashtbl.find_opt visited (n, m) + in (* Produces an edit script from a trace via the procedure described on page 4 of the paper. Assumes the trace is ordered by the x coordinate *) let build_script_from_trace (trace : (int * int) list) : 'a diff_result list = - (* adds inserts at position x_k for values in new_list from y_k + 1 to y_(k + 1) - 1 for k such that y_k + 1 < y_(k + 1) *) let rec add_inserts k script = @@ -99,57 +127,66 @@ let standard_list_diff (old_list : 'a list) (new_list : 'a list) : ('a diff_resu let trace_array = Array.of_list trace in let gen_inserts first last = let len = last - first in - Core_list.sub new_list ~pos:first ~len:len in - if k > trace_len - 1 then script else - (* The algorithm treats the trace as though (-1,-1) were the (-1)th match point + Core_list.sub new_list ~pos:first ~len + in + if k > trace_len - 1 then + script + else + (* The algorithm treats the trace as though (-1,-1) were the (-1)th match point in the list and (n,m) were the (len+1)th *) - let first = if k = -1 then 0 else (trace_array.(k) |> snd) + 1 in - let last = if k = trace_len - 1 then m else trace_array.(k + 1) |> snd in - if first < last then - let start = if k = -1 then -1 else trace_array.(k) |> fst in - (start, Insert (None, (gen_inserts first last))) :: script - |> add_inserts (k + 1) - else add_inserts (k + 1) script in - - let change_compare (pos1, chg1) (pos2, chg2) = - if pos1 <> pos2 then compare pos1 pos2 else - (* Orders the change types alphabetically. This puts same-indexed inserts before deletes *) - match chg1, chg2 with - | Insert _, Delete _ | Delete _, Replace _ | Insert _, Replace _ -> -1 - | Delete _, Insert _ | Replace _, Delete _ | Replace _, Insert _ -> 1 - | _ -> 0 in - + let first = + if k = -1 then + 0 + else + (trace_array.(k) |> snd) + 1 + in + let last = + if k = trace_len - 1 then + m + else + trace_array.(k + 1) |> snd + in + if first < last then + let start = + if k = -1 then + -1 + else + trace_array.(k) |> fst + in + (start, Insert (None, gen_inserts first last)) :: script |> add_inserts (k + 1) + else + add_inserts (k + 1) script + in (* Convert like-indexed deletes and inserts into a replacement. This relies on the fact that sorting the script with our change_compare function will order all Insert nodes before Deletes *) let rec convert_to_replace script = match script with - | [] | [_] -> script - | (i1, Insert (_, x :: [])) :: (i2, Delete y) :: t when i1 = i2 - 1 -> - (i2, Replace (y, x)) :: (convert_to_replace t) + | [] + | [_] -> + script + | (i1, Insert (_, [x])) :: (i2, Delete y) :: t when i1 = i2 - 1 -> + (i2, Replace (y, x)) :: convert_to_replace t | (i1, Insert (break, x :: rst)) :: (i2, Delete y) :: t when i1 = i2 - 1 -> - (* We are only removing the first element of the insertion *) - (i2, Replace (y, x)) :: (convert_to_replace ((i2, Insert (break, rst)) :: t)) - | h :: t -> h :: (convert_to_replace t) in - + (* We are only removing the first element of the insertion *) + (i2, Replace (y, x)) :: convert_to_replace ((i2, Insert (break, rst)) :: t) + | h :: t -> h :: convert_to_replace t + in (* Deletes are added for every element of old_list that does not have a match point with new_list *) let deletes = - List.map fst trace + Core_list.map ~f:fst trace |> ISet.of_list |> ISet.diff (ListUtils.range 0 n |> ISet.of_list) |> ISet.elements - |> List.map (fun pos -> (pos, Delete (old_arr.(pos)))) in - - deletes - |> add_inserts (-1) - |> List.sort change_compare - |> convert_to_replace in - - let open Option in - longest_trace (n + m) - >>| List.rev (* trace is built backwards for efficiency *) - >>| build_script_from_trace + |> Core_list.map ~f:(fun pos -> (pos, Delete old_arr.(pos))) + in + deletes |> add_inserts (-1) |> List.sort change_compare |> convert_to_replace + in + Option.( + longest_trace (n + m) + >>| List.rev (* trace is built backwards for efficiency *) + >>| build_script_from_trace) let list_diff = function | Trivial -> trivial_list_diff @@ -159,49 +196,119 @@ let list_diff = function * have here, the more granularly we can diff. *) type node = | Raw of string + | Comment of Loc.t Flow_ast.Comment.t + | NumberLiteralNode of Ast.NumberLiteral.t + | Literal of Loc.t Ast.Literal.t + | StringLiteral of Ast.StringLiteral.t | Statement of (Loc.t, Loc.t) Ast.Statement.t | Program of (Loc.t, Loc.t) Ast.program | Expression of (Loc.t, Loc.t) Ast.Expression.t - | Identifier of Loc.t Ast.Identifier.t | Pattern of (Loc.t, Loc.t) Ast.Pattern.t + | Params of (Loc.t, Loc.t) Ast.Function.Params.t + | Variance of Loc.t Ast.Variance.t + | Type of (Loc.t, Loc.t) Flow_ast.Type.t + | TypeParam of (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.TypeParam.t | TypeAnnotation of (Loc.t, Loc.t) Flow_ast.Type.annotation + | FunctionTypeAnnotation of (Loc.t, Loc.t) Flow_ast.Type.annotation | ClassProperty of (Loc.t, Loc.t) Flow_ast.Class.Property.t | ObjectProperty of (Loc.t, Loc.t) Flow_ast.Expression.Object.property + | TemplateLiteral of (Loc.t, Loc.t) Ast.Expression.TemplateLiteral.t + | JSXChild of (Loc.t, Loc.t) Ast.JSX.child + | JSXIdentifier of Loc.t Ast.JSX.Identifier.t (* This is needed because all of the functions assume that if they are called, there is some * difference between their arguments and they will often report that even if no difference actually * exists. This allows us to easily avoid calling the diffing function if there is no difference. *) let diff_if_changed f x1 x2 = - if x1 == x2 then [] else f x1 x2 + if x1 == x2 then + [] + else + f x1 x2 -let diff_if_changed_opt f opt1 opt2: node change list option = - match opt1, opt2 with - | Some x1, Some x2 -> - if x1 == x2 then Some [] else f x1 x2 - | None, None -> +let diff_if_changed_ret_opt f x1 x2 = + if x1 == x2 then Some [] - | _ -> - None + else + f x1 x2 + +let diff_if_changed_opt f opt1 opt2 : node change list option = + match (opt1, opt2) with + | (Some x1, Some x2) -> + if x1 == x2 then + Some [] + else + f x1 x2 + | (None, None) -> Some [] + | _ -> None + +let diff_or_add_opt f add opt1 opt2 : node change list option = + match (opt1, opt2) with + | (Some x1, Some x2) -> + if x1 == x2 then + Some [] + else + f x1 x2 + | (None, None) -> Some [] + | (None, Some x2) -> Some (add x2) + | _ -> None (* This is needed if the function f takes its arguments as options and produces an optional node change list (for instance, type annotation). In this case it is not sufficient just to give up and return None if only one of the options is present *) -let diff_if_changed_opt_arg f opt1 opt2: node change list option = - match opt1, opt2 with - | None, None -> Some [] - | Some x1, Some x2 when x1 == x2 -> Some [] +let _diff_if_changed_opt_arg f opt1 opt2 : node change list option = + match (opt1, opt2) with + | (None, None) -> Some [] + | (Some x1, Some x2) when x1 == x2 -> Some [] | _ -> f opt1 opt2 (* This is needed if the function for the given node returns a node change * list instead of a node change list option (for instance, expression) *) -let diff_if_changed_nonopt_fn f opt1 opt2: node change list option = - match opt1, opt2 with - | Some x1, Some x2 -> - if x1 == x2 then Some [] else Some (f x1 x2) - | None, None -> - Some [] - | _ -> - None +let diff_if_changed_nonopt_fn f opt1 opt2 : node change list option = + match (opt1, opt2) with + | (Some x1, Some x2) -> + if x1 == x2 then + Some [] + else + Some (f x1 x2) + | (None, None) -> Some [] + | _ -> None + +(* Is an RHS expression an import expression? *) +let is_import_expr (expr : (Loc.t, Loc.t) Ast.Expression.t) = + Ast.Expression.Call.( + match expr with + | (_, Ast.Expression.Import _) -> true + | ( _, + Ast.Expression.Call + { callee = (_, Ast.Expression.Identifier (_, { Ast.Identifier.name; comments = _ })); _ } + ) -> + name = "require" + | _ -> false) + +(* Guess whether a statement is an import or not *) +let is_import_or_directive_stmt (stmt : (Loc.t, Loc.t) Ast.Statement.t) = + Ast.Statement.Expression.( + Ast.Statement.VariableDeclaration.( + Ast.Statement.VariableDeclaration.Declarator.( + match stmt with + | (_, Ast.Statement.Expression { directive = Some _; _ }) + | (_, Ast.Statement.ImportDeclaration _) -> + true + | (_, Ast.Statement.Expression { expression = expr; _ }) -> is_import_expr expr + | (_, Ast.Statement.VariableDeclaration { declarations = decs; _ }) -> + List.exists + (fun (_, { init; _ }) -> Option.value_map init ~default:false ~f:is_import_expr) + decs + | _ -> false))) + +let partition_imports (stmts : (Loc.t, Loc.t) Ast.Statement.t list) = + let rec partition_import_helper rec_stmts top = + match rec_stmts with + | [] -> (List.rev top, []) + | hd :: tl when is_import_or_directive_stmt hd -> partition_import_helper tl (hd :: top) + | _ -> (List.rev top, rec_stmts) + in + partition_import_helper stmts [] (* Outline: * - There is a function for every AST node that we want to be able to recurse into. @@ -223,765 +330,1658 @@ let diff_if_changed_nonopt_fn f opt1 opt2: node change list option = * would not know what Loc.t to give to the insertion. *) (* Entry point *) -let program (algo : diff_algorithm) - (program1: (Loc.t, Loc.t) Ast.program) - (program2: (Loc.t, Loc.t) Ast.program) : node change list = - +let program + (algo : diff_algorithm) + (program1 : (Loc.t, Loc.t) Ast.program) + (program2 : (Loc.t, Loc.t) Ast.program) : node change list = + (* Assuming a diff has already been generated, recurse into it. + This function is passed the old_list and index_offset parameters + in order to correctly insert new statements WITHOUT assuming that + the entire statement list is being processed with a single call + to this function. When an Insert diff is detected, we need to find + a Loc.t that represents where in the original program they will be inserted. + To do so, we find the statement in the old statement list that they will + be inserted after, and get its end_loc. The index_offset parameter represents how + many statements in the old statement list are NOT represented in this diff-- + for example, if we separated the statement lists into a list of initial imports + and a list of body statements and generated diffs for them separately + (cf. toplevel_statement_list), when recursing into the body diffs, the + length of the imports in the old statement list should be passed in to + index_offset so that insertions into the body section are given the right index. + *) + let recurse_into_diff + (type a b) + (f : a -> a -> b change list option) + (trivial : a -> (Loc.t * b) option) + (old_list : a list) + (index_offset : int) + (diffs : a diff_result list) : b change list option = + Option.( + let recurse_into_change = function + | (_, Replace (x1, x2)) -> f x1 x2 + | (index, Insert (break, lst)) -> + let index = index + index_offset in + let loc = + if List.length old_list = 0 then + None + else if + (* To insert at the start of the list, insert before the first element *) + index = -1 + then + List.hd old_list |> trivial >>| fst >>| Loc.start_loc + (* Otherwise insert it after the current element *) + else + List.nth old_list index |> trivial >>| fst >>| Loc.end_loc + in + Core_list.map ~f:trivial lst + |> all + >>| Core_list.map ~f:snd (* drop the loc *) + >>| (fun x -> Insert (break, x)) + |> both loc + >>| Core_list.return + | (_, Delete x) -> trivial x >>| (fun (loc, y) -> (loc, Delete y)) >>| Core_list.return + in + let recurse_into_changes = + Core_list.map ~f:recurse_into_change %> all %> map ~f:List.concat + in + recurse_into_changes diffs) + in (* Runs `list_diff` and then recurses into replacements (using `f`) to get more granular diffs. For inserts and deletes, it uses `trivial` to produce a Loc.t and a b for the change *) - let diff_and_recurse (type a) (type b) - (f: a -> a -> b change list option) + let diff_and_recurse + (type a b) + (f : a -> a -> b change list option) (trivial : a -> (Loc.t * b) option) - (old_list: a list) - (new_list: a list) - : b change list option = - let open Option in - - let recurse_into_change = function - | _, Replace (x1, x2) -> f x1 x2 - | index, Insert (break, lst) -> - let loc = - if List.length old_list = 0 then None else - (* To insert at the start of the list, insert before the first element *) - if index = -1 then List.hd old_list |> trivial >>| fst >>| Loc.start_loc - (* Otherwise insert it after the current element *) - else List.nth old_list index |> trivial >>| fst >>| Loc.end_loc in - List.map trivial lst - |> all - >>| List.map snd (* drop the loc *) - >>| (fun x -> Insert (break, x)) - |> both loc - >>| Core_list.return - | _, Delete x -> - trivial x - >>| (fun (loc, y) -> loc, Delete y) - >>| Core_list.return in - - let recurse_into_changes = - List.map recurse_into_change - %> all - %> map ~f:List.concat in - - list_diff algo old_list new_list - >>= recurse_into_changes in - - (* diff_and_recurse for when there is no way to get a trivial transfomation from a to b*) + (old_list : a list) + (new_list : a list) : b change list option = + Option.(list_diff algo old_list new_list >>= recurse_into_diff f trivial old_list 0) + in + (* Same as diff_and_recurse but takes in a function `f` that doesn't return an option *) + let diff_and_recurse_nonopt (type a b) (f : a -> a -> b change list) = + diff_and_recurse (fun x y -> f x y |> Option.return) + in + (* diff_and_recurse for when there is no way to get a trivial transformation from a to b*) let diff_and_recurse_no_trivial f = diff_and_recurse f (fun _ -> None) in - - let rec program' (program1: (Loc.t, Loc.t) Ast.program) (program2: (Loc.t, Loc.t) Ast.program) : node change list = + let diff_and_recurse_nonopt_no_trivial f = diff_and_recurse_nonopt f (fun _ -> None) in + let join_diff_list = Some [] |> List.fold_left (Option.map2 ~f:List.append) in + let rec syntax_opt + (loc : Loc.t) + (s1 : (Loc.t, unit) Ast.Syntax.t option) + (s2 : (Loc.t, unit) Ast.Syntax.t option) = + let add_comments { Ast.Syntax.leading; trailing; internal = _ } = + Loc.( + let fold_comment acc cmt = Comment cmt :: acc in + let leading = List.fold_left fold_comment [] leading in + let leading_inserts = + match leading with + | [] -> [] + | leading -> [({ loc with _end = loc.start }, Insert (None, List.rev leading))] + in + let trailing = List.fold_left fold_comment [] trailing in + let trailing_inserts = + match trailing with + | [] -> [] + | trailing -> [({ loc with start = loc._end }, Insert (None, List.rev trailing))] + in + leading_inserts @ trailing_inserts) + in + diff_or_add_opt syntax add_comments s1 s2 + and syntax (s1 : (Loc.t, unit) Ast.Syntax.t) (s2 : (Loc.t, unit) Ast.Syntax.t) = + let { Ast.Syntax.leading = leading1; trailing = trailing1; internal = _ } = s1 in + let { Ast.Syntax.leading = leading2; trailing = trailing2; internal = _ } = s2 in + let add_comment ((loc, _) as cmt) = Some (loc, Comment cmt) in + let leading = diff_and_recurse comment add_comment leading1 leading2 in + let trailing = diff_and_recurse comment add_comment trailing1 trailing2 in + match (leading, trailing) with + | (Some l, Some t) -> Some (l @ t) + | (Some l, None) -> Some l + | (None, Some t) -> Some t + | (None, None) -> None + and comment + ((loc1, comment1) as cmt1 : Loc.t Ast.Comment.t) + ((_loc2, comment2) as cmt2 : Loc.t Ast.Comment.t) = + Ast.Comment.( + match (comment1, comment2) with + | (Line _, Block _) -> Some [(loc1, Replace (Comment cmt1, Comment cmt2))] + | (Block _, Line _) -> Some [(loc1, Replace (Comment cmt1, Comment cmt2))] + | (Line c1, Line c2) + | (Block c1, Block c2) + when not (String.equal c1 c2) -> + Some [(loc1, Replace (Comment cmt1, Comment cmt2))] + | _ -> None) + and program' (program1 : (Loc.t, Loc.t) Ast.program) (program2 : (Loc.t, Loc.t) Ast.program) : + node change list = let (program_loc, statements1, _) = program1 in let (_, statements2, _) = program2 in - statement_list statements1 statements2 + toplevel_statement_list statements1 statements2 |> Option.value ~default:[(program_loc, Replace (Program program1, Program program2))] - - and statement_list (stmts1: (Loc.t, Loc.t) Ast.Statement.t list) (stmts2: (Loc.t, Loc.t) Ast.Statement.t list) + and toplevel_statement_list + (stmts1 : (Loc.t, Loc.t) Ast.Statement.t list) (stmts2 : (Loc.t, Loc.t) Ast.Statement.t list) + = + Option.( + let (imports1, body1) = partition_imports stmts1 in + let (imports2, body2) = partition_imports stmts2 in + let imports_diff = list_diff algo imports1 imports2 in + let body_diff = list_diff algo body1 body2 in + let whole_program_diff = list_diff algo stmts1 stmts2 in + let split_len = + all [imports_diff; body_diff] + >>| Core_list.map ~f:List.length + >>| List.fold_left ( + ) 0 + |> value ~default:max_int + in + let whole_len = value_map ~default:max_int whole_program_diff ~f:List.length in + if split_len > whole_len then + whole_program_diff + >>= recurse_into_diff + (fun x y -> Some (statement x y)) + (fun s -> Some (Ast_utils.loc_of_statement s, Statement s)) + stmts1 + 0 + else + imports_diff + >>= recurse_into_diff + (fun x y -> Some (statement x y)) + (fun s -> Some (Ast_utils.loc_of_statement s, Statement s)) + stmts1 + 0 + >>= fun import_recurse -> + body_diff + >>= ( List.length imports1 + |> recurse_into_diff + (fun x y -> Some (statement x y)) + (fun s -> Some (Ast_utils.loc_of_statement s, Statement s)) + stmts1 ) + >>| (fun body_recurse -> import_recurse @ body_recurse)) + and statement_list + (stmts1 : (Loc.t, Loc.t) Ast.Statement.t list) (stmts2 : (Loc.t, Loc.t) Ast.Statement.t list) : node change list option = - diff_and_recurse (fun x y -> Some (statement x y)) - (fun s -> Some (Ast_utils.loc_of_statement s, Statement s)) stmts1 stmts2 - - and statement (stmt1: (Loc.t, Loc.t) Ast.Statement.t) (stmt2: (Loc.t, Loc.t) Ast.Statement.t) - : node change list = - let open Ast.Statement in - let changes = match stmt1, stmt2 with - | (_, VariableDeclaration var1), (_, VariableDeclaration var2) -> - variable_declaration var1 var2 - | (_, FunctionDeclaration func1), (_, FunctionDeclaration func2) -> - function_declaration func1 func2 - | (_, ClassDeclaration class1), (_, ClassDeclaration class2) -> - class_ class1 class2 - | (_, Ast.Statement.If if1), (_, Ast.Statement.If if2) -> - if_statement if1 if2 - | (_, Ast.Statement.Expression expr1), (_, Ast.Statement.Expression expr2) -> - expression_statement expr1 expr2 - | (_, Ast.Statement.Block block1), (_, Ast.Statement.Block block2) -> - block block1 block2 - | (_, Ast.Statement.For for1), (_, Ast.Statement.For for2) -> - for_statement for1 for2 - | (_, Ast.Statement.ForIn for_in1), (_, Ast.Statement.ForIn for_in2) -> - for_in_statement for_in1 for_in2 - | (_, Ast.Statement.While while1), (_, Ast.Statement.While while2) -> - Some (while_statement while1 while2) - | (_, Ast.Statement.ForOf for_of1), (_, Ast.Statement.ForOf for_of2) -> - for_of_statement for_of1 for_of2 - | (_, Ast.Statement.DoWhile do_while1), (_, Ast.Statement.DoWhile do_while2) -> - Some (do_while_statement do_while1 do_while2) - | (_, Ast.Statement.Switch switch1), (_, Ast.Statement.Switch switch2) -> - switch_statement switch1 switch2 - | (_, Ast.Statement.Return return1), (_, Ast.Statement.Return return2) -> - return_statement return1 return2 - | (_, Ast.Statement.With with1), (_, Ast.Statement.With with2) -> - Some (with_statement with1 with2) - | (_, Ast.Statement.ExportDefaultDeclaration export1), - (_, Ast.Statement.ExportDefaultDeclaration export2) -> - export_default_declaration export1 export2 - | (_, Ast.Statement.DeclareExportDeclaration export1), - (_, Ast.Statement.DeclareExportDeclaration export2) -> - declare_export export1 export2 - | (_, Ast.Statement.ExportNamedDeclaration export1), - (_, Ast.Statement.ExportNamedDeclaration export2) -> - export_named_declaration export1 export2 - | _, _ -> - None - in - let old_loc = Ast_utils.loc_of_statement stmt1 in - Option.value changes ~default:[(old_loc, Replace (Statement stmt1, Statement stmt2))] - + diff_and_recurse_nonopt + statement + (fun s -> Some (Ast_utils.loc_of_statement s, Statement s)) + stmts1 + stmts2 + and statement (stmt1 : (Loc.t, Loc.t) Ast.Statement.t) (stmt2 : (Loc.t, Loc.t) Ast.Statement.t) : + node change list = + Ast.Statement.( + let changes = + match (stmt1, stmt2) with + | ((_, VariableDeclaration var1), (_, VariableDeclaration var2)) -> + variable_declaration var1 var2 + | ((_, FunctionDeclaration func1), (_, FunctionDeclaration func2)) -> + function_declaration func1 func2 + | ((_, ClassDeclaration class1), (_, ClassDeclaration class2)) -> class_ class1 class2 + | ((_, InterfaceDeclaration intf1), (_, InterfaceDeclaration intf2)) -> + interface intf1 intf2 + | ((loc, If if1), (_, If if2)) -> if_statement loc if1 if2 + | ((_, Ast.Statement.Expression expr1), (_, Ast.Statement.Expression expr2)) -> + expression_statement expr1 expr2 + | ((_, Block block1), (_, Block block2)) -> block block1 block2 + | ((_, For for1), (_, For for2)) -> for_statement for1 for2 + | ((_, ForIn for_in1), (_, ForIn for_in2)) -> for_in_statement for_in1 for_in2 + | ((_, While while1), (_, While while2)) -> Some (while_statement while1 while2) + | ((_, ForOf for_of1), (_, ForOf for_of2)) -> for_of_statement for_of1 for_of2 + | ((loc, DoWhile do_while1), (_, DoWhile do_while2)) -> + Some (do_while_statement loc do_while1 do_while2) + | ((_, Switch switch1), (_, Switch switch2)) -> switch_statement switch1 switch2 + | ((loc, Return return1), (_, Return return2)) -> return_statement loc return1 return2 + | ((_, Labeled labeled1), (_, Labeled labeled2)) -> + Some (labeled_statement labeled1 labeled2) + | ((_, With with1), (_, With with2)) -> Some (with_statement with1 with2) + | ((_, ExportDefaultDeclaration export1), (_, ExportDefaultDeclaration export2)) -> + export_default_declaration export1 export2 + | ((_, DeclareExportDeclaration export1), (_, DeclareExportDeclaration export2)) -> + declare_export export1 export2 + | ((_, ImportDeclaration import1), (_, ImportDeclaration import2)) -> + import_declaration import1 import2 + | ((_, ExportNamedDeclaration export1), (_, ExportNamedDeclaration export2)) -> + export_named_declaration export1 export2 + | ((loc, Try try1), (_, Try try2)) -> try_ loc try1 try2 + | ((_, Throw throw1), (_, Throw throw2)) -> Some (throw_statement throw1 throw2) + | ((_, DeclareTypeAlias d_t_alias1), (_, DeclareTypeAlias d_t_alias2)) -> + type_alias d_t_alias1 d_t_alias2 + | ((_, TypeAlias t_alias1), (_, TypeAlias t_alias2)) -> type_alias t_alias1 t_alias2 + | ((_, OpaqueType o_type1), (_, OpaqueType o_type2)) -> opaque_type o_type1 o_type2 + | (_, _) -> None + in + let old_loc = Ast_utils.loc_of_statement stmt1 in + Option.value changes ~default:[(old_loc, Replace (Statement stmt1, Statement stmt2))]) and export_named_declaration export1 export2 = - let open Ast.Statement.ExportNamedDeclaration in - let { declaration = decl1; specifiers = specs1; source = src1; exportKind = kind1 } = export1 in - let { declaration = decl2; specifiers = specs2; source = src2; exportKind = kind2 } = export2 in - if src1 != src2 || kind1 != kind2 then None else - let decls = diff_if_changed_nonopt_fn statement decl1 decl2 in - let specs = diff_if_changed_opt export_named_declaration_specifier specs1 specs2 in - Option.(all [decls; specs] >>| List.concat) - - and export_default_declaration (export1 : (Loc.t, Loc.t) Ast.Statement.ExportDefaultDeclaration.t) - (export2 : (Loc.t, Loc.t) Ast.Statement.ExportDefaultDeclaration.t) - : node change list option = - let open Ast.Statement.ExportDefaultDeclaration in - let { declaration = declaration1; default = default1 } = export1 in - let { declaration = declaration2; default = default2 } = export2 in - if default1 != default2 then None else - match declaration1, declaration2 with - | Declaration s1, Declaration s2 -> statement s1 s2 |> Option.return - | Ast.Statement.ExportDefaultDeclaration.Expression e1, - Ast.Statement.ExportDefaultDeclaration.Expression e2 -> - expression e1 e2 |> Option.return - | _ -> None - - and export_specifier (spec1 : Loc.t Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t) - (spec2 : Loc.t Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t) - : node change list option = - let open Ast.Statement.ExportNamedDeclaration.ExportSpecifier in - let _, { local = local1; exported = exported1 } = spec1 in - let _, { local = local2; exported = exported2 } = spec2 in - let locals = diff_if_changed identifier local1 local2 in - let exporteds = diff_if_changed_nonopt_fn identifier exported1 exported2 in - Option.(all [return locals; exporteds] >>| List.concat) - + Ast.Statement.ExportNamedDeclaration.( + let { declaration = decl1; specifiers = specs1; source = src1; exportKind = kind1 } = + export1 + in + let { declaration = decl2; specifiers = specs2; source = src2; exportKind = kind2 } = + export2 + in + if src1 != src2 || kind1 != kind2 then + None + else + let decls = diff_if_changed_nonopt_fn statement decl1 decl2 in + let specs = diff_if_changed_opt export_named_declaration_specifier specs1 specs2 in + join_diff_list [decls; specs]) + and export_default_declaration + (export1 : (Loc.t, Loc.t) Ast.Statement.ExportDefaultDeclaration.t) + (export2 : (Loc.t, Loc.t) Ast.Statement.ExportDefaultDeclaration.t) : node change list option + = + Ast.Statement.ExportDefaultDeclaration.( + let { declaration = declaration1; default = default1 } = export1 in + let { declaration = declaration2; default = default2 } = export2 in + if default1 != default2 then + None + else + match (declaration1, declaration2) with + | (Declaration s1, Declaration s2) -> statement s1 s2 |> Option.return + | ( Ast.Statement.ExportDefaultDeclaration.Expression e1, + Ast.Statement.ExportDefaultDeclaration.Expression e2 ) -> + expression e1 e2 |> Option.return + | _ -> None) + and export_specifier + (spec1 : Loc.t Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t) + (spec2 : Loc.t Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t) : + node change list option = + Ast.Statement.ExportNamedDeclaration.ExportSpecifier.( + let (_, { local = local1; exported = exported1 }) = spec1 in + let (_, { local = local2; exported = exported2 }) = spec2 in + let locals = diff_if_changed identifier local1 local2 |> Option.return in + let exporteds = diff_if_changed_nonopt_fn identifier exported1 exported2 in + join_diff_list [locals; exporteds]) and export_named_declaration_specifier (specs1 : Loc.t Ast.Statement.ExportNamedDeclaration.specifier) (specs2 : Loc.t Ast.Statement.ExportNamedDeclaration.specifier) = - let open Ast.Statement.ExportNamedDeclaration in - match specs1, specs2 with - | ExportSpecifiers es1, ExportSpecifiers es2 -> - diff_and_recurse_no_trivial export_specifier es1 es2 - | ExportBatchSpecifier (_, ebs1), ExportBatchSpecifier (_, ebs2) -> - diff_if_changed_nonopt_fn identifier ebs1 ebs2 - | _ -> None - - and declare_export (export1 : (Loc.t, Loc.t) Ast.Statement.DeclareExportDeclaration.t) - (export2 : (Loc.t, Loc.t) Ast.Statement.DeclareExportDeclaration.t) - : node change list option = - let open Ast.Statement.DeclareExportDeclaration in - let { default = default1; declaration = decl1; specifiers = specs1; source = src1 } = export1 in - let { default = default2; declaration = decl2; specifiers = specs2; source = src2 } = export2 in - if default1 != default2 || src1 != src2 || decl1 != decl2 then None else - diff_if_changed_opt export_named_declaration_specifier specs1 specs2 - + Ast.Statement.ExportNamedDeclaration.( + match (specs1, specs2) with + | (ExportSpecifiers es1, ExportSpecifiers es2) -> + diff_and_recurse_no_trivial export_specifier es1 es2 + | (ExportBatchSpecifier (_, ebs1), ExportBatchSpecifier (_, ebs2)) -> + diff_if_changed_nonopt_fn identifier ebs1 ebs2 + | _ -> None) + and declare_export + (export1 : (Loc.t, Loc.t) Ast.Statement.DeclareExportDeclaration.t) + (export2 : (Loc.t, Loc.t) Ast.Statement.DeclareExportDeclaration.t) : node change list option + = + Ast.Statement.DeclareExportDeclaration.( + let { default = default1; declaration = decl1; specifiers = specs1; source = src1 } = + export1 + in + let { default = default2; declaration = decl2; specifiers = specs2; source = src2 } = + export2 + in + if default1 != default2 || src1 != src2 || decl1 != decl2 then + None + else + diff_if_changed_opt export_named_declaration_specifier specs1 specs2) + and import_default_specifier + (ident1 : (Loc.t, Loc.t) Ast.Identifier.t option) + (ident2 : (Loc.t, Loc.t) Ast.Identifier.t option) : node change list option = + diff_if_changed_nonopt_fn identifier ident1 ident2 + and import_namespace_specifier + (ident1 : (Loc.t, Loc.t) Ast.Identifier.t) (ident2 : (Loc.t, Loc.t) Ast.Identifier.t) : + node change list option = + diff_if_changed identifier ident1 ident2 |> Option.return + and import_named_specifier + (nm_spec1 : (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.named_specifier) + (nm_spec2 : (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.named_specifier) : + node change list option = + Ast.Statement.ImportDeclaration.( + let { kind = kind1; local = local1; remote = remote1 } = nm_spec1 in + let { kind = kind2; local = local2; remote = remote2 } = nm_spec2 in + if kind1 != kind2 then + None + else + let locals = diff_if_changed_nonopt_fn identifier local1 local2 in + let remotes = diff_if_changed identifier remote1 remote2 |> Option.return in + join_diff_list [locals; remotes]) + and import_specifier + (spec1 : (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.specifier) + (spec2 : (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.specifier) : node change list option + = + Ast.Statement.ImportDeclaration.( + match (spec1, spec2) with + | (ImportNamedSpecifiers nm_specs1, ImportNamedSpecifiers nm_specs2) -> + diff_and_recurse_no_trivial import_named_specifier nm_specs1 nm_specs2 + | (ImportNamespaceSpecifier (_, ident1), ImportNamespaceSpecifier (_, ident2)) -> + diff_if_changed_ret_opt import_namespace_specifier ident1 ident2 + | _ -> None) + and import_declaration + (import1 : (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.t) + (import2 : (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.t) : node change list option = + Ast.Statement.ImportDeclaration.( + let { importKind = imprt_knd1; source = src1; default = dflt1; specifiers = spec1 } = + import1 + in + let { importKind = imprt_knd2; source = src2; default = dflt2; specifiers = spec2 } = + import2 + in + if imprt_knd1 != imprt_knd2 || src1 != src2 then + None + else + let dflt_diff = import_default_specifier dflt1 dflt2 in + let spec_diff = diff_if_changed_opt import_specifier spec1 spec2 in + join_diff_list [dflt_diff; spec_diff]) and function_declaration func1 func2 = function_ func1 func2 - - and function_ (func1: (Loc.t, Loc.t) Ast.Function.t) (func2: (Loc.t, Loc.t) Ast.Function.t) - : node change list option = - let open Ast.Function in - let { - id = id1; params = params1; body = body1; async = async1; generator = generator1; - expression = expression1; predicate = predicate1; return = return1; tparams = tparams1; - } = func1 in - let { - id = id2; params = params2; body = body2; async = async2; generator = generator2; - expression = expression2; predicate = predicate2; return = return2; tparams = tparams2; - } = func2 in - - if id1 != id2 || params1 != params2 || (* body handled below *) async1 != async2 - || generator1 != generator2 || expression1 != expression2 || predicate1 != predicate2 - || tparams1 != tparams2 - then - None - else - let fnbody = diff_if_changed_opt function_body_any (Some body1) (Some body2) in - let returns = diff_if_changed return_type_annotation return1 return2 |> Option.return in - Option.(all [fnbody; returns] >>| List.concat) - - and function_body_any (body1 : (Loc.t, Loc.t) Ast.Function.body) - (body2 : (Loc.t, Loc.t) Ast.Function.body) - : node change list option = - let open Ast.Function in - match body1, body2 with - | BodyExpression e1, BodyExpression e2 -> expression e1 e2 |> Option.return - | BodyBlock (_, block1), BodyBlock (_, block2) -> block block1 block2 - | _ -> None - - and return_type_annotation (return1: (Loc.t, Loc.t) Ast.Function.return) - (return2: (Loc.t, Loc.t) Ast.Function.return) - : node change list = - let open Ast.Function in - match return1, return2 with - | Missing _, Missing _ -> [] - | Missing loc1, Available (loc2, typ) -> [loc1, Insert (None, [TypeAnnotation (loc2, typ)])] - | Available (loc1, typ), Missing _ -> [loc1, Delete (TypeAnnotation (loc1, typ))] - | Available (loc1, typ1), Available (loc2, typ2) -> - [loc1, Replace (TypeAnnotation (loc1, typ1), TypeAnnotation (loc2, typ2))] - - and variable_declarator (decl1: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.Declarator.t) (decl2: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.Declarator.t) - : node change list option = - let open Ast.Statement.VariableDeclaration.Declarator in - let (_, { id = id1; init = init1 }) = decl1 in - let (_, { id = id2; init = init2 }) = decl2 in - if id1 != id2 then - Some (pattern id1 id2) - else - diff_if_changed_nonopt_fn expression init1 init2 - - and variable_declaration (var1: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) (var2: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) - : node change list option = - let open Ast.Statement.VariableDeclaration in - let { declarations = declarations1; kind = kind1 } = var1 in - let { declarations = declarations2; kind = kind2 } = var2 in - if kind1 != kind2 then - None - else if declarations1 != declarations2 then - diff_and_recurse_no_trivial variable_declarator declarations1 declarations2 - else - Some [] - - and if_statement (if1: (Loc.t, Loc.t) Ast.Statement.If.t) (if2: (Loc.t, Loc.t) Ast.Statement.If.t) + and function_ + ?(is_arrow = false) + (func1 : (Loc.t, Loc.t) Ast.Function.t) + (func2 : (Loc.t, Loc.t) Ast.Function.t) : node change list option = + Ast.Function.( + let { + id = id1; + params = params1; + body = body1; + async = async1; + generator = generator1; + predicate = predicate1; + return = return1; + tparams = tparams1; + sig_loc = _; + } = + func1 + in + let { + id = id2; + params = params2; + body = body2; + async = async2; + generator = generator2; + predicate = predicate2; + return = return2; + tparams = tparams2; + sig_loc = _; + } = + func2 + in + if async1 != async2 || generator1 != generator2 || predicate1 != predicate2 then + None + else + let id = diff_if_changed_nonopt_fn identifier id1 id2 in + let tparams = diff_if_changed_opt type_parameter_declaration tparams1 tparams2 in + let params = diff_if_changed_ret_opt function_params params1 params2 in + let params = + match (is_arrow, params1, params2, params) with + | ( true, + (l, { Params.params = [_p1]; rest = None }), + (_, { Params.params = [_p2]; rest = None }), + Some [_] ) -> + (* reprint the parameter if it's the single parameter of a lambda to add () *) + Some [(l, Replace (Params params1, Params params2))] + | _ -> params + in + let returns = diff_if_changed type_annotation_hint return1 return2 |> Option.return in + let fnbody = diff_if_changed_ret_opt function_body_any body1 body2 in + join_diff_list [id; tparams; params; returns; fnbody]) + and function_params + (params1 : (Loc.t, Loc.t) Ast.Function.Params.t) + (params2 : (Loc.t, Loc.t) Ast.Function.Params.t) : node change list option = + Ast.Function.Params.( + let (_, { params = param_lst1; rest = rest1 }) = params1 in + let (_, { params = param_lst2; rest = rest2 }) = params2 in + let params_diff = diff_and_recurse_no_trivial function_param param_lst1 param_lst2 in + let rest_diff = diff_if_changed_nonopt_fn function_rest_param rest1 rest2 in + join_diff_list [params_diff; rest_diff]) + and function_param + (param1 : (Loc.t, Loc.t) Ast.Function.Param.t) (param2 : (Loc.t, Loc.t) Ast.Function.Param.t) : node change list option = - let open Ast.Statement.If in - let { - test = test1; - consequent = consequent1; - alternate = alternate1 - } = if1 in - let { - test = test2; - consequent = consequent2; - alternate = alternate2 - } = if2 in - - let expr_diff = Some (diff_if_changed expression test1 test2) in - let cons_diff = Some (diff_if_changed statement consequent1 consequent2) in - let alt_diff = match alternate1, alternate2 with - | None, None -> Some ([]) - | Some _, None - | None, Some _ -> None - | Some a1, Some a2 -> Some (diff_if_changed statement a1 a2) in - let result_list = [expr_diff; cons_diff; alt_diff] in - Option.all result_list |> Option.map ~f:List.concat - - and with_statement (with1: (Loc.t, Loc.t) Ast.Statement.With.t) - (with2: (Loc.t, Loc.t) Ast.Statement.With.t) - : node change list = - let open Ast.Statement.With in - let {_object = _object1; body = body1;} = with1 in - let {_object = _object2; body = body2;} = with2 in + let (_, { Ast.Function.Param.argument = arg1; default = def1 }) = param1 in + let (_, { Ast.Function.Param.argument = arg2; default = def2 }) = param2 in + let param_diff = diff_if_changed function_param_pattern arg1 arg2 |> Option.return in + let default_diff = diff_if_changed_nonopt_fn expression def1 def2 in + join_diff_list [param_diff; default_diff] + and function_body_any + (body1 : (Loc.t, Loc.t) Ast.Function.body) (body2 : (Loc.t, Loc.t) Ast.Function.body) : + node change list option = + Ast.Function.( + match (body1, body2) with + | (BodyExpression e1, BodyExpression e2) -> expression e1 e2 |> Option.return + | (BodyBlock (_, block1), BodyBlock (_, block2)) -> block block1 block2 + | _ -> None) + and variable_declarator + (decl1 : (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.Declarator.t) + (decl2 : (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.Declarator.t) : + node change list option = + Ast.Statement.VariableDeclaration.Declarator.( + let (_, { id = id1; init = init1 }) = decl1 in + let (_, { id = id2; init = init2 }) = decl2 in + let id_diff = diff_if_changed pattern id1 id2 |> Option.return in + let expr_diff = diff_if_changed_nonopt_fn expression init1 init2 in + join_diff_list [id_diff; expr_diff]) + and variable_declaration + (var1 : (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) + (var2 : (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) : node change list option = + Ast.Statement.VariableDeclaration.( + let { declarations = declarations1; kind = kind1 } = var1 in + let { declarations = declarations2; kind = kind2 } = var2 in + if kind1 != kind2 then + None + else if declarations1 != declarations2 then + diff_and_recurse_no_trivial variable_declarator declarations1 declarations2 + else + Some []) + and if_statement + loc (if1 : (Loc.t, Loc.t) Ast.Statement.If.t) (if2 : (Loc.t, Loc.t) Ast.Statement.If.t) : + node change list option = + Ast.Statement.If.( + let { test = test1; consequent = consequent1; alternate = alternate1; comments = comments1 } + = + if1 + in + let { test = test2; consequent = consequent2; alternate = alternate2; comments = comments2 } + = + if2 + in + let expr_diff = Some (diff_if_changed expression test1 test2) in + let cons_diff = Some (diff_if_changed statement consequent1 consequent2) in + let alt_diff = + match (alternate1, alternate2) with + | (None, None) -> Some [] + | (Some _, None) + | (None, Some _) -> + None + | (Some a1, Some a2) -> Some (diff_if_changed statement a1 a2) + in + let comments = syntax_opt loc comments1 comments2 in + join_diff_list [comments; expr_diff; cons_diff; alt_diff]) + and with_statement + (with1 : (Loc.t, Loc.t) Ast.Statement.With.t) (with2 : (Loc.t, Loc.t) Ast.Statement.With.t) : + node change list = + Ast.Statement.With.( + let { _object = _object1; body = body1 } = with1 in + let { _object = _object2; body = body2 } = with2 in let _object_diff = diff_if_changed expression _object1 _object2 in - let body_diff = diff_if_changed statement body1 body2 in - _object_diff @ body_diff - - and class_ (class1: (Loc.t, Loc.t) Ast.Class.t) (class2: (Loc.t, Loc.t) Ast.Class.t) = - let open Ast.Class in - let { - id=id1; body=body1; tparams=tparams1; extends=extends1; - implements=implements1; classDecorators=classDecorators1; - } = class1 in - let { - id=id2; body=body2; tparams=tparams2; extends=extends2; - implements=implements2; classDecorators=classDecorators2; - } = class2 in - if id1 != id2 || (* body handled below *) tparams1 != tparams2 || extends1 != extends2 || - implements1 != implements2 || classDecorators1 != classDecorators2 - then - None - else - (* just body changed *) - class_body body1 body2 - - and class_body (class_body1: (Loc.t, Loc.t) Ast.Class.Body.t) (class_body2: (Loc.t, Loc.t) Ast.Class.Body.t) - : node change list option = - let open Ast.Class.Body in - let _, { body=body1 } = class_body1 in - let _, { body=body2 } = class_body2 in - diff_and_recurse_no_trivial class_element body1 body2 - - and class_element (elem1: (Loc.t, Loc.t) Ast.Class.Body.element) (elem2: (Loc.t, Loc.t) Ast.Class.Body.element) - : node change list option = - let open Ast.Class.Body in - match elem1, elem2 with - | Method (_, m1), Method (_, m2) -> - class_method m1 m2 - | Property p1, Property p2 -> - class_property p1 p2 |> Option.return - | _ -> None (* TODO *) - + let body_diff = diff_if_changed statement body1 body2 in + _object_diff @ body_diff) + and try_ + loc (try1 : (Loc.t, Loc.t) Ast.Statement.Try.t) (try2 : (Loc.t, Loc.t) Ast.Statement.Try.t) = + Ast.Statement.Try.( + let { block = (_, block1); handler = handler1; finalizer = finalizer1; comments = comments1 } + = + try1 + in + let { block = (_, block2); handler = handler2; finalizer = finalizer2; comments = comments2 } + = + try2 + in + let comments = syntax_opt loc comments1 comments2 in + let block_diff = diff_if_changed_ret_opt block block1 block2 in + let finalizer_diff = + diff_if_changed_opt block (Option.map ~f:snd finalizer1) (Option.map ~f:snd finalizer2) + in + let handler_diff = diff_if_changed_opt handler handler1 handler2 in + join_diff_list [comments; block_diff; finalizer_diff; handler_diff]) + and handler + (hand1 : (Loc.t, Loc.t) Ast.Statement.Try.CatchClause.t) + (hand2 : (Loc.t, Loc.t) Ast.Statement.Try.CatchClause.t) = + Ast.Statement.Try.CatchClause.( + let (_, { body = (_, block1); param = param1 }) = hand1 in + let (_, { body = (_, block2); param = param2 }) = hand2 in + let body_diff = diff_if_changed_ret_opt block block1 block2 in + let param_diff = diff_if_changed_nonopt_fn pattern param1 param2 in + join_diff_list [body_diff; param_diff]) + and class_ (class1 : (Loc.t, Loc.t) Ast.Class.t) (class2 : (Loc.t, Loc.t) Ast.Class.t) = + Ast.Class.( + let { + id = id1; + body = body1; + tparams = tparams1; + extends = extends1; + implements = implements1; + classDecorators = classDecorators1; + } = + class1 + in + let { + id = id2; + body = body2; + tparams = tparams2; + extends = extends2; + implements = implements2; + classDecorators = classDecorators2; + } = + class2 + in + if + id1 != id2 + (* body handled below *) + || tparams1 != tparams2 + (* extends handled below *) + || implements1 != implements2 + || classDecorators1 != classDecorators2 + then + None + else + let extends_diff = diff_if_changed_opt class_extends extends1 extends2 in + let body_diff = diff_if_changed_ret_opt class_body body1 body2 in + join_diff_list [extends_diff; body_diff]) + and class_extends + ((_loc, extends1) : (Loc.t, Loc.t) Ast.Class.Extends.t) + ((_loc, extends2) : (Loc.t, Loc.t) Ast.Class.Extends.t) = + Ast.Class.Extends.( + let { expr = expr1; targs = targs1 } = extends1 in + let { expr = expr2; targs = targs2 } = extends2 in + let expr_diff = diff_if_changed expression expr1 expr2 |> Option.return in + let targs_diff = diff_if_changed_opt type_parameter_instantiation targs1 targs2 in + join_diff_list [expr_diff; targs_diff]) + and interface + (intf1 : (Loc.t, Loc.t) Ast.Statement.Interface.t) + (intf2 : (Loc.t, Loc.t) Ast.Statement.Interface.t) : node change list option = + Ast.Statement.Interface.( + let { id = id1; tparams = tparams1; extends = extends1; body = (_loc1, body1) } = intf1 in + let { id = id2; tparams = tparams2; extends = extends2; body = (_loc2, body2) } = intf2 in + let id_diff = diff_if_changed identifier id1 id2 |> Option.return in + let tparams_diff = diff_if_changed_opt type_parameter_declaration tparams1 tparams2 in + let extends_diff = diff_and_recurse_no_trivial generic_type_with_loc extends1 extends2 in + let body_diff = diff_if_changed_ret_opt object_type body1 body2 in + join_diff_list [id_diff; tparams_diff; extends_diff; body_diff]) + and class_body + (class_body1 : (Loc.t, Loc.t) Ast.Class.Body.t) + (class_body2 : (Loc.t, Loc.t) Ast.Class.Body.t) : node change list option = + Ast.Class.Body.( + let (_, { body = body1 }) = class_body1 in + let (_, { body = body2 }) = class_body2 in + diff_and_recurse_no_trivial class_element body1 body2) + and class_element + (elem1 : (Loc.t, Loc.t) Ast.Class.Body.element) + (elem2 : (Loc.t, Loc.t) Ast.Class.Body.element) : node change list option = + Ast.Class.Body.( + match (elem1, elem2) with + | (Method (_, m1), Method (_, m2)) -> class_method m1 m2 + | (Property p1, Property p2) -> class_property p1 p2 |> Option.return + | _ -> None) + (* TODO *) and class_property prop1 prop2 : node change list = - let open Ast.Class.Property in - let loc1, { key = key1; value = val1; annot = annot1; static = s1; variance = var1} = prop1 in - let _, { key = key2; value = val2; annot = annot2; static = s2; variance = var2} = prop2 in - (if key1 != key2 || s1 != s2 || var1 != var2 then None else - let vals = diff_if_changed_nonopt_fn expression val1 val2 in - let annots = diff_if_changed_opt_arg type_annotation_opt annot1 annot2 in - Option.(all [vals; annots] >>| List.concat)) - |> Option.value ~default:[(loc1, Replace (ClassProperty prop1, ClassProperty prop2))] - + Ast.Class.Property.( + let (loc1, { key = key1; value = val1; annot = annot1; static = s1; variance = var1 }) = + prop1 + in + let (_, { key = key2; value = val2; annot = annot2; static = s2; variance = var2 }) = + prop2 + in + ( if key1 != key2 || s1 != s2 || var1 != var2 then + None + else + let vals = diff_if_changed_nonopt_fn expression val1 val2 in + let annots = Some (diff_if_changed type_annotation_hint annot1 annot2) in + join_diff_list [vals; annots] ) + |> Option.value ~default:[(loc1, Replace (ClassProperty prop1, ClassProperty prop2))]) and class_method - (m1: (Loc.t, Loc.t) Ast.Class.Method.t') - (m2: (Loc.t, Loc.t) Ast.Class.Method.t') - : node change list option = - let open Ast.Class.Method in - let { kind = kind1; key = key1; value = (_loc, value1); static = static1; decorators = decorators1 } = - m1 - in - let { kind = kind2; key = key2; value = (_loc, value2); static = static2; decorators = decorators2 } = - m2 - in - if kind1 != kind2 || key1 != key2 || (* value handled below *) static1 != static2 || - decorators1 != decorators2 - then - None - else - function_ value1 value2 - - and block (block1: (Loc.t, Loc.t) Ast.Statement.Block.t) (block2: (Loc.t, Loc.t) Ast.Statement.Block.t) - : node change list option = - let open Ast.Statement.Block in - let { body = body1 } = block1 in - let { body = body2 } = block2 in - statement_list body1 body2 - + (m1 : (Loc.t, Loc.t) Ast.Class.Method.t') (m2 : (Loc.t, Loc.t) Ast.Class.Method.t') : + node change list option = + Ast.Class.Method.( + let { + kind = kind1; + key = key1; + value = (_loc, value1); + static = static1; + decorators = decorators1; + } = + m1 + in + let { + kind = kind2; + key = key2; + value = (_loc, value2); + static = static2; + decorators = decorators2; + } = + m2 + in + if + kind1 != kind2 + || key1 != key2 + (* value handled below *) + || static1 != static2 + || decorators1 != decorators2 + then + None + else + function_ value1 value2) + and block + (block1 : (Loc.t, Loc.t) Ast.Statement.Block.t) + (block2 : (Loc.t, Loc.t) Ast.Statement.Block.t) : node change list option = + Ast.Statement.Block.( + let { body = body1 } = block1 in + let { body = body2 } = block2 in + statement_list body1 body2) and expression_statement - (stmt1: (Loc.t, Loc.t) Ast.Statement.Expression.t) - (stmt2: (Loc.t, Loc.t) Ast.Statement.Expression.t) - : node change list option = - let open Ast.Statement.Expression in - let { expression = expr1; directive = dir1 } = stmt1 in - let { expression = expr2; directive = dir2 } = stmt2 in - if dir1 != dir2 then - None - else - Some (expression expr1 expr2) - - and expression (expr1: (Loc.t, Loc.t) Ast.Expression.t) (expr2: (Loc.t, Loc.t) Ast.Expression.t) - : node change list = + (stmt1 : (Loc.t, Loc.t) Ast.Statement.Expression.t) + (stmt2 : (Loc.t, Loc.t) Ast.Statement.Expression.t) : node change list option = + Ast.Statement.Expression.( + let { expression = expr1; directive = dir1 } = stmt1 in + let { expression = expr2; directive = dir2 } = stmt2 in + if dir1 != dir2 then + None + else + Some (expression expr1 expr2)) + and expression + (expr1 : (Loc.t, Loc.t) Ast.Expression.t) (expr2 : (Loc.t, Loc.t) Ast.Expression.t) : + node change list = let changes = (* The open is here to avoid ambiguity with the use of the local `Expression` constructor * below *) - let open Ast.Expression in - match expr1, expr2 with - | (_, Binary b1), (_, Binary b2) -> - binary b1 b2 - | (_, Unary u1), (_, Unary u2) -> - unary u1 u2 - | (_, Ast.Expression.Identifier id1), (_, Ast.Expression.Identifier id2) -> - identifier id1 id2 |> Option.return - | (_, New new1), (_, New new2) -> - new_ new1 new2 - | (_, Call call1), (_, Call call2) -> - call_ call1 call2 - | (_, Function f1), (_, Function f2) | (_, ArrowFunction f1), (_, ArrowFunction f2) -> - function_ f1 f2 - | (_, Class class1), (_, Class class2) -> - class_ class1 class2 - | (_, Assignment assn1), (_, Assignment assn2) -> - assignment_ assn1 assn2 - | (_, Object obj1), (_, Object obj2) -> - _object obj1 obj2 - | (_, TypeCast t1), (_, TypeCast t2) -> - Some (type_cast t1 t2) - | expr, (loc, TypeCast t2) -> - Some (type_cast_added expr loc t2) - | _, _ -> - None + Ast.Expression.( + match (expr1, expr2) with + | ((loc, Ast.Expression.Literal lit1), (_, Ast.Expression.Literal lit2)) -> + Some (literal loc lit1 lit2) + | ((_, Binary b1), (_, Binary b2)) -> binary b1 b2 + | ((loc, Unary u1), (_, Unary u2)) -> unary loc u1 u2 + | ((_, Ast.Expression.Identifier id1), (_, Ast.Expression.Identifier id2)) -> + identifier id1 id2 |> Option.return + | ((_, Conditional c1), (_, Conditional c2)) -> conditional c1 c2 |> Option.return + | ((loc, New new1), (_, New new2)) -> new_ loc new1 new2 + | ((_, Member member1), (_, Member member2)) -> member member1 member2 + | ((_, Call call1), (_, Call call2)) -> call call1 call2 + | ((_, ArrowFunction f1), (_, ArrowFunction f2)) -> function_ ~is_arrow:true f1 f2 + | ((_, Function f1), (_, Function f2)) -> function_ f1 f2 + | ((_, Class class1), (_, Class class2)) -> class_ class1 class2 + | ((_, Assignment assn1), (_, Assignment assn2)) -> assignment assn1 assn2 + | ((loc, Object obj1), (_, Object obj2)) -> object_ loc obj1 obj2 + | ((_, TaggedTemplate t_tmpl1), (_, TaggedTemplate t_tmpl2)) -> + Some (tagged_template t_tmpl1 t_tmpl2) + | ((loc, Ast.Expression.TemplateLiteral t_lit1), (_, Ast.Expression.TemplateLiteral t_lit2)) + -> + Some (template_literal loc t_lit1 t_lit2) + | ((_, JSXElement jsx_elem1), (_, JSXElement jsx_elem2)) -> jsx_element jsx_elem1 jsx_elem2 + | ((_, JSXFragment frag1), (_, JSXFragment frag2)) -> jsx_fragment frag1 frag2 + | ((_, TypeCast t1), (_, TypeCast t2)) -> Some (type_cast t1 t2) + | ((_, Logical l1), (_, Logical l2)) -> logical l1 l2 + | ((loc, Array arr1), (_, Array arr2)) -> array loc arr1 arr2 + | (expr, (loc, TypeCast t2)) -> Some (type_cast_added expr loc t2) + | ((_, Update update1), (_, Update update2)) -> update update1 update2 + | ((_, Sequence seq1), (_, Sequence seq2)) -> sequence seq1 seq2 + | (_, _) -> None) in let old_loc = Ast_utils.loc_of_expression expr1 in Option.value changes ~default:[(old_loc, Replace (Expression expr1, Expression expr2))] - - and assignment_ (assn1: (Loc.t, Loc.t) Ast.Expression.Assignment.t) - (assn2: (Loc.t, Loc.t) Ast.Expression.Assignment.t) - : node change list option = - let open Ast.Expression.Assignment in - let { operator = op1; left = pat1; right = exp1 } = assn1 in - let { operator = op2; left = pat2; right = exp2 } = assn2 in - if op1 != op2 then None else - diff_if_changed pattern pat1 pat2 @ diff_if_changed expression exp1 exp2 |> Option.return - + and literal (loc : Loc.t) (lit1 : Loc.t Ast.Literal.t) (lit2 : Loc.t Ast.Literal.t) : + node change list = + [(loc, Replace (Literal lit1, Literal lit2))] + and number_literal_type (loc : Loc.t) (nlit1 : Ast.NumberLiteral.t) (nlit2 : Ast.NumberLiteral.t) + : node change list = + [(loc, Replace (NumberLiteralNode nlit1, NumberLiteralNode nlit2))] + and string_literal (loc : Loc.t) (lit1 : Ast.StringLiteral.t) (lit2 : Ast.StringLiteral.t) : + node change list option = + let { Ast.StringLiteral.value = val1; raw = raw1 } = lit1 in + let { Ast.StringLiteral.value = val2; raw = raw2 } = lit2 in + if String.equal val1 val2 && String.equal raw1 raw2 then + Some [] + else + Some [(loc, Replace (StringLiteral lit1, StringLiteral lit2))] + and tagged_template + (t_tmpl1 : (Loc.t, Loc.t) Ast.Expression.TaggedTemplate.t) + (t_tmpl2 : (Loc.t, Loc.t) Ast.Expression.TaggedTemplate.t) : node change list = + Ast.Expression.TaggedTemplate.( + let { tag = tag1; quasi = (loc, quasi1) } = t_tmpl1 in + let { tag = tag2; quasi = (_, quasi2) } = t_tmpl2 in + let tag_diff = diff_if_changed expression tag1 tag2 in + let quasi_diff = diff_if_changed (template_literal loc) quasi1 quasi2 in + tag_diff @ quasi_diff) + and template_literal + (loc : Loc.t) + (* Need to pass in loc because TemplateLiteral doesn't have a loc attached *) + (t_lit1 : (Loc.t, Loc.t) Ast.Expression.TemplateLiteral.t) + (t_lit2 : (Loc.t, Loc.t) Ast.Expression.TemplateLiteral.t) : node change list = + Ast.Expression.TemplateLiteral.( + let { quasis = quasis1; expressions = exprs1 } = t_lit1 in + let { quasis = quasis2; expressions = exprs2 } = t_lit2 in + let quasis_diff = diff_and_recurse_no_trivial template_literal_element quasis1 quasis2 in + let exprs_diff = diff_and_recurse_nonopt_no_trivial expression exprs1 exprs2 in + let result = join_diff_list [quasis_diff; exprs_diff] in + Option.value + result + ~default:[(loc, Replace (TemplateLiteral t_lit1, TemplateLiteral t_lit2))]) + and template_literal_element + (tl_elem1 : Loc.t Ast.Expression.TemplateLiteral.Element.t) + (tl_elem2 : Loc.t Ast.Expression.TemplateLiteral.Element.t) : node change list option = + Ast.Expression.TemplateLiteral.Element.( + let (_, { value = value1; tail = tail1 }) = tl_elem1 in + let (_, { value = value2; tail = tail2 }) = tl_elem2 in + (* These are primitives, so structural equality is fine *) + if value1.raw <> value2.raw || value1.cooked <> value2.cooked || tail1 <> tail2 then + None + else + Some []) + and jsx_element + (jsx_elem1 : (Loc.t, Loc.t) Ast.JSX.element) (jsx_elem2 : (Loc.t, Loc.t) Ast.JSX.element) : + node change list option = + Ast.JSX.( + let { openingElement = open_elem1; closingElement = close_elem1; children = (_, children1) } + = + jsx_elem1 + in + let { openingElement = open_elem2; closingElement = close_elem2; children = (_, children2) } + = + jsx_elem2 + in + let opening_diff = diff_if_changed_ret_opt jsx_opening_element open_elem1 open_elem2 in + let children_diff = diff_and_recurse_nonopt_no_trivial jsx_child children1 children2 in + let closing_diff = diff_if_changed_opt jsx_closing_element close_elem1 close_elem2 in + join_diff_list [opening_diff; children_diff; closing_diff]) + and jsx_fragment + (frag1 : (Loc.t, Loc.t) Ast.JSX.fragment) (frag2 : (Loc.t, Loc.t) Ast.JSX.fragment) : + node change list option = + Ast.JSX.( + (* Opening and closing elements contain no information besides loc, so we + * ignore them for the diff *) + let { frag_openingElement = _; frag_children = (_, children1); frag_closingElement = _ } = + frag1 + in + let { frag_openingElement = _; frag_children = (_, children2); frag_closingElement = _ } = + frag2 + in + diff_and_recurse_nonopt_no_trivial jsx_child children1 children2) + and jsx_opening_element + (elem1 : (Loc.t, Loc.t) Ast.JSX.Opening.t) (elem2 : (Loc.t, Loc.t) Ast.JSX.Opening.t) : + node change list option = + Ast.JSX.Opening.( + let (_, { name = name1; selfClosing = self_close1; attributes = attrs1 }) = elem1 in + let (_, { name = name2; selfClosing = self_close2; attributes = attrs2 }) = elem2 in + if self_close1 != self_close2 then + None + else + let name_diff = diff_if_changed_ret_opt jsx_name name1 name2 in + let attrs_diff = diff_and_recurse_no_trivial jsx_opening_attribute attrs1 attrs2 in + join_diff_list [name_diff; attrs_diff]) + and jsx_name (name1 : (Loc.t, Loc.t) Ast.JSX.name) (name2 : (Loc.t, Loc.t) Ast.JSX.name) : + node change list option = + Ast.JSX.( + match (name1, name2) with + | (Ast.JSX.Identifier id1, Ast.JSX.Identifier id2) -> + Some (diff_if_changed jsx_identifier id1 id2) + | (NamespacedName namespaced_name1, NamespacedName namespaced_name2) -> + Some (diff_if_changed jsx_namespaced_name namespaced_name1 namespaced_name2) + | (MemberExpression member_expr1, MemberExpression member_expr2) -> + diff_if_changed_ret_opt jsx_member_expression member_expr1 member_expr2 + | _ -> None) + and jsx_identifier (id1 : Loc.t Ast.JSX.Identifier.t) (id2 : Loc.t Ast.JSX.Identifier.t) : + node change list = + Ast.JSX.Identifier.( + let (old_loc, { name = name1 }) = id1 in + let (_, { name = name2 }) = id2 in + if name1 = name2 then + [] + else + [(old_loc, Replace (JSXIdentifier id1, JSXIdentifier id2))]) + and jsx_namespaced_name + (namespaced_name1 : (Loc.t, Loc.t) Ast.JSX.NamespacedName.t) + (namespaced_name2 : (Loc.t, Loc.t) Ast.JSX.NamespacedName.t) : node change list = + Ast.JSX.NamespacedName.( + let (_, { namespace = namespace1; name = name1 }) = namespaced_name1 in + let (_, { namespace = namespace2; name = name2 }) = namespaced_name2 in + let namespace_diff = diff_if_changed jsx_identifier namespace1 namespace2 in + let name_diff = diff_if_changed jsx_identifier name1 name2 in + namespace_diff @ name_diff) + and jsx_member_expression + (member_expr1 : (Loc.t, Loc.t) Ast.JSX.MemberExpression.t) + (member_expr2 : (Loc.t, Loc.t) Ast.JSX.MemberExpression.t) : node change list option = + Ast.JSX.MemberExpression.( + let (_, { _object = object1; property = prop1 }) = member_expr1 in + let (_, { _object = object2; property = prop2 }) = member_expr2 in + let obj_diff = + match (object1, object2) with + | (Ast.JSX.MemberExpression.Identifier id1, Ast.JSX.MemberExpression.Identifier id2) -> + Some (diff_if_changed jsx_identifier id1 id2) + | (MemberExpression member_expr1', MemberExpression member_expr2') -> + diff_if_changed_ret_opt jsx_member_expression member_expr1' member_expr2' + | _ -> None + in + let prop_diff = diff_if_changed jsx_identifier prop1 prop2 |> Option.return in + join_diff_list [obj_diff; prop_diff]) + and jsx_closing_element + (elem1 : (Loc.t, Loc.t) Ast.JSX.Closing.t) (elem2 : (Loc.t, Loc.t) Ast.JSX.Closing.t) : + node change list option = + Ast.JSX.Closing.( + let (_, { name = name1 }) = elem1 in + let (_, { name = name2 }) = elem2 in + diff_if_changed_ret_opt jsx_name name1 name2) + and jsx_opening_attribute + (jsx_attr1 : (Loc.t, Loc.t) Ast.JSX.Opening.attribute) + (jsx_attr2 : (Loc.t, Loc.t) Ast.JSX.Opening.attribute) : node change list option = + Ast.JSX.Opening.( + match (jsx_attr1, jsx_attr2) with + | (Attribute attr1, Attribute attr2) -> diff_if_changed_ret_opt jsx_attribute attr1 attr2 + | (SpreadAttribute attr1, SpreadAttribute attr2) -> + diff_if_changed jsx_spread_attribute attr1 attr2 |> Option.return + | _ -> None) + and jsx_spread_attribute + (attr1 : (Loc.t, Loc.t) Ast.JSX.SpreadAttribute.t) + (attr2 : (Loc.t, Loc.t) Ast.JSX.SpreadAttribute.t) : node change list = + Flow_ast.JSX.SpreadAttribute.( + let (_, { argument = arg1 }) = attr1 in + let (_, { argument = arg2 }) = attr2 in + diff_if_changed expression arg1 arg2) + and jsx_attribute + (attr1 : (Loc.t, Loc.t) Ast.JSX.Attribute.t) (attr2 : (Loc.t, Loc.t) Ast.JSX.Attribute.t) : + node change list option = + Ast.JSX.Attribute.( + let (_, { name = name1; value = value1 }) = attr1 in + let (_, { name = name2; value = value2 }) = attr2 in + let name_diff = + match (name1, name2) with + | (Ast.JSX.Attribute.Identifier id1, Ast.JSX.Attribute.Identifier id2) -> + Some (diff_if_changed jsx_identifier id1 id2) + | (NamespacedName namespaced_name1, NamespacedName namespaced_name2) -> + Some (diff_if_changed jsx_namespaced_name namespaced_name1 namespaced_name2) + | _ -> None + in + let value_diff = + match (value1, value2) with + | (Some (Ast.JSX.Attribute.Literal (loc, lit1)), Some (Ast.JSX.Attribute.Literal (_, lit2))) + -> + diff_if_changed (literal loc) lit1 lit2 |> Option.return + | (Some (ExpressionContainer (_, expr1)), Some (ExpressionContainer (_, expr2))) -> + diff_if_changed_ret_opt jsx_expression expr1 expr2 + | _ -> None + in + join_diff_list [name_diff; value_diff]) + and jsx_child (child1 : (Loc.t, Loc.t) Ast.JSX.child) (child2 : (Loc.t, Loc.t) Ast.JSX.child) : + node change list = + Ast.JSX.( + let (old_loc, child1') = child1 in + let (_, child2') = child2 in + if child1' == child2' then + [] + else + let changes = + match (child1', child2') with + | (Element elem1, Element elem2) -> diff_if_changed_ret_opt jsx_element elem1 elem2 + | (Fragment frag1, Fragment frag2) -> diff_if_changed_ret_opt jsx_fragment frag1 frag2 + | (ExpressionContainer expr1, ExpressionContainer expr2) -> + diff_if_changed_ret_opt jsx_expression expr1 expr2 + | (SpreadChild expr1, SpreadChild expr2) -> + diff_if_changed expression expr1 expr2 |> Option.return + | (Text _, Text _) -> None + | _ -> None + in + Option.value changes ~default:[(old_loc, Replace (JSXChild child1, JSXChild child2))]) + and jsx_expression + (jsx_expr1 : (Loc.t, Loc.t) Ast.JSX.ExpressionContainer.t) + (jsx_expr2 : (Loc.t, Loc.t) Ast.JSX.ExpressionContainer.t) : node change list option = + Ast.JSX.( + let { ExpressionContainer.expression = expr1 } = jsx_expr1 in + let { ExpressionContainer.expression = expr2 } = jsx_expr2 in + match (expr1, expr2) with + | (ExpressionContainer.Expression expr1', ExpressionContainer.Expression expr2') -> + Some (diff_if_changed expression expr1' expr2') + | (ExpressionContainer.EmptyExpression, ExpressionContainer.EmptyExpression) -> Some [] + | _ -> None) + and assignment + (assn1 : (Loc.t, Loc.t) Ast.Expression.Assignment.t) + (assn2 : (Loc.t, Loc.t) Ast.Expression.Assignment.t) : node change list option = + Ast.Expression.Assignment.( + let { operator = op1; left = pat1; right = exp1 } = assn1 in + let { operator = op2; left = pat2; right = exp2 } = assn2 in + if op1 != op2 then + None + else + diff_if_changed pattern pat1 pat2 @ diff_if_changed expression exp1 exp2 |> Option.return) and object_spread_property prop1 prop2 = - let open Ast.Expression.Object.SpreadProperty in - let { argument = arg1 } = prop1 in - let { argument = arg2 } = prop2 in - expression arg1 arg2 - + Ast.Expression.Object.SpreadProperty.( + let { argument = arg1 } = prop1 in + let { argument = arg2 } = prop2 in + expression arg1 arg2) and object_key key1 key2 = - let open Ast.Expression.Object.Property in - match key1, key2 with - | Literal _, Literal _ -> (* TODO: recurse into literals *) None - | Ast.Expression.Object.Property.Identifier i1, Ast.Expression.Object.Property.Identifier i2 -> - identifier i1 i2 |> Option.return - | Computed e1, Computed e2 -> expression e1 e2 |> Option.return - | _, _ -> None - + let module EOP = Ast.Expression.Object.Property in + match (key1, key2) with + | (EOP.Literal (loc, l1), EOP.Literal (_, l2)) -> + diff_if_changed (literal loc) l1 l2 |> Option.return + | (EOP.Identifier i1, EOP.Identifier i2) -> diff_if_changed identifier i1 i2 |> Option.return + | (EOP.Computed e1, EOP.Computed e2) -> diff_if_changed expression e1 e2 |> Option.return + | (_, _) -> None and object_regular_property (_, prop1) (_, prop2) = - let open Ast.Expression.Object.Property in - match prop1, prop2 with - | Init { shorthand = sh1; value = val1; key = key1 }, - Init { shorthand = sh2; value = val2; key = key2 } -> - if sh1 != sh2 then None else - let values = diff_if_changed expression val1 val2 |> Option.return in - let keys = diff_if_changed_opt object_key (Some key1) (Some key2) in - Option.(all [keys; values] >>| List.concat) - | Set {value = val1; key = key1 }, Set { value = val2; key = key2 } - | Method {value = val1; key = key1 }, Method { value = val2; key = key2 } - | Get {value = val1; key = key1 }, Get { value = val2; key = key2 } -> - let values = diff_if_changed_opt function_ (Some (snd val1)) (Some (snd val2)) in - let keys = diff_if_changed_opt object_key (Some key1) (Some key2) in - Option.(all [keys; values] >>| List.concat) - | _ -> None - + Ast.Expression.Object.Property.( + match (prop1, prop2) with + | ( Init { shorthand = sh1; value = val1; key = key1 }, + Init { shorthand = sh2; value = val2; key = key2 } ) -> + if sh1 != sh2 then + None + else + let values = diff_if_changed expression val1 val2 |> Option.return in + let keys = diff_if_changed_ret_opt object_key key1 key2 in + join_diff_list [keys; values] + | (Set { value = val1; key = key1 }, Set { value = val2; key = key2 }) + | (Method { value = val1; key = key1 }, Method { value = val2; key = key2 }) + | (Get { value = val1; key = key1 }, Get { value = val2; key = key2 }) -> + let values = diff_if_changed_ret_opt function_ (snd val1) (snd val2) in + let keys = diff_if_changed_ret_opt object_key key1 key2 in + join_diff_list [keys; values] + | _ -> None) and object_property prop1 prop2 = - let open Ast.Expression.Object in - match prop1, prop2 with - | Property (loc, p1), Property p2 -> - object_regular_property (loc, p1) p2 - |> Option.value ~default:[(loc, Replace (ObjectProperty prop1, ObjectProperty prop2))] - |> Option.return - | SpreadProperty (_, p1), SpreadProperty (_, p2) -> + Ast.Expression.Object.( + match (prop1, prop2) with + | (Property (loc, p1), Property p2) -> + object_regular_property (loc, p1) p2 + |> Option.value ~default:[(loc, Replace (ObjectProperty prop1, ObjectProperty prop2))] + |> Option.return + | (SpreadProperty (_, p1), SpreadProperty (_, p2)) -> object_spread_property p1 p2 |> Option.return - | _ -> None - - and _object obj1 obj2 = - let open Ast.Expression.Object in - let { properties = properties1 } = obj1 in - let { properties = properties2 } = obj2 in - diff_and_recurse_no_trivial object_property properties1 properties2 - - and binary (b1: (Loc.t, Loc.t) Ast.Expression.Binary.t) (b2: (Loc.t, Loc.t) Ast.Expression.Binary.t): node change list option = - let open Ast.Expression.Binary in - let { operator = op1; left = left1; right = right1 } = b1 in - let { operator = op2; left = left2; right = right2 } = b2 in - if op1 != op2 then - None - else - Some (diff_if_changed expression left1 left2 @ diff_if_changed expression right1 right2) - - and unary (u1: (Loc.t, Loc.t) Ast.Expression.Unary.t) (u2: (Loc.t, Loc.t) Ast.Expression.Unary.t): node change list option = - let open Ast.Expression.Unary in - let { operator = op1; argument = arg1; prefix = prefix1 } = u1 in - let { operator = op2; argument = arg2; prefix = prefix2 } = u2 in - if op1 != op2 || prefix1 != prefix2 then - None - else - Some (expression arg1 arg2) - - and identifier (id1: Loc.t Ast.Identifier.t) (id2: Loc.t Ast.Identifier.t): node change list = - let (old_loc, _) = id1 in - [(old_loc, Replace (Identifier id1, Identifier id2))] - - and new_ (new1: (Loc.t, Loc.t) Ast.Expression.New.t) (new2: (Loc.t, Loc.t) Ast.Expression.New.t): node change list option = - let open Ast.Expression.New in - let { callee = callee1; targs = targs1; arguments = arguments1 } = new1 in - let { callee = callee2; targs = targs2; arguments = arguments2 } = new2 in - if targs1 != targs2 || arguments1 != arguments2 then - (* TODO(nmote) recurse into targs and arguments *) - None - else - Some (diff_if_changed expression callee1 callee2) - - and call_ (call1: (Loc.t, Loc.t) Ast.Expression.Call.t) (call2: (Loc.t, Loc.t) Ast.Expression.Call.t): node change list option = - let open Ast.Expression.Call in - let { callee = callee1; targs = targs1; arguments = arguments1 } = call1 in - let { callee = callee2; targs = targs2; arguments = arguments2 } = call2 in - if targs1 != targs2 || arguments1 != arguments2 then - (* TODO(nmote) recurse into targs and arguments *) - None - else - Some (diff_if_changed expression callee1 callee2) - - and for_statement (stmt1: (Loc.t, Loc.t) Ast.Statement.For.t) - (stmt2: (Loc.t, Loc.t) Ast.Statement.For.t) - : node change list option = - let open Ast.Statement.For in - let { init = init1; test = test1; update = update1; body = body1 } = stmt1 in - let { init = init2; test = test2; update = update2; body = body2 } = stmt2 in - let init = diff_if_changed_opt for_statement_init init1 init2 in - let test = diff_if_changed_nonopt_fn expression test1 test2 in - let update = diff_if_changed_nonopt_fn expression update1 update2 in - let body = Some (diff_if_changed statement body1 body2) in - Option.all [init; test; update; body] |> Option.map ~f:List.concat - - and for_statement_init(init1: (Loc.t, Loc.t) Ast.Statement.For.init) - (init2: (Loc.t, Loc.t) Ast.Statement.For.init) + | _ -> None) + and object_ loc obj1 obj2 = + Ast.Expression.Object.( + let { properties = properties1; comments = comments1 } = obj1 in + let { properties = properties2; comments = comments2 } = obj2 in + let comments = syntax_opt loc comments1 comments2 in + join_diff_list + [comments; diff_and_recurse_no_trivial object_property properties1 properties2]) + and binary + (b1 : (Loc.t, Loc.t) Ast.Expression.Binary.t) (b2 : (Loc.t, Loc.t) Ast.Expression.Binary.t) : + node change list option = + Ast.Expression.Binary.( + let { operator = op1; left = left1; right = right1 } = b1 in + let { operator = op2; left = left2; right = right2 } = b2 in + if op1 != op2 then + None + else + Some (diff_if_changed expression left1 left2 @ diff_if_changed expression right1 right2)) + and unary + loc (u1 : (Loc.t, Loc.t) Ast.Expression.Unary.t) (u2 : (Loc.t, Loc.t) Ast.Expression.Unary.t) : node change list option = - let open Ast.Statement.For in - match (init1, init2) with - | (InitDeclaration(_, decl1), InitDeclaration(_, decl2)) -> - variable_declaration decl1 decl2 - | (InitExpression expr1, InitExpression expr2) -> - Some (diff_if_changed expression expr1 expr2) - | (InitDeclaration _, InitExpression _) - | (InitExpression _, InitDeclaration _) -> - None - - and for_in_statement (stmt1: (Loc.t, Loc.t) Ast.Statement.ForIn.t) - (stmt2: (Loc.t, Loc.t) Ast.Statement.ForIn.t) - : node change list option = - let open Ast.Statement.ForIn in - let { left = left1; right = right1; body = body1; each = each1 } = stmt1 in - let { left = left2; right = right2; body = body2; each = each2 } = stmt2 in - let left = if left1 == left2 then Some [] else for_in_statement_lhs left1 left2 in - let body = Some (diff_if_changed statement body1 body2) in - let right = Some (diff_if_changed expression right1 right2) in - let each = if each1 != each2 then None else Some [] in - Option.all [left; right; body; each] |> Option.map ~f:List.concat - - and for_in_statement_lhs (left1: (Loc.t, Loc.t) Ast.Statement.ForIn.left) - (left2: (Loc.t, Loc.t) Ast.Statement.ForIn.left) + Ast.Expression.Unary.( + let { operator = op1; argument = arg1; comments = comments1 } = u1 in + let { operator = op2; argument = arg2; comments = comments2 } = u2 in + let comments = syntax_opt loc comments1 comments2 |> Option.value ~default:[] in + if op1 != op2 then + None + else + Some (comments @ expression arg1 arg2)) + and identifier (id1 : (Loc.t, Loc.t) Ast.Identifier.t) (id2 : (Loc.t, Loc.t) Ast.Identifier.t) : + node change list = + let (old_loc, { Ast.Identifier.name = name1; comments = comments1 }) = id1 in + let (_new_loc, { Ast.Identifier.name = name2; comments = comments2 }) = id2 in + let name = + if String.equal name1 name2 then + [] + else + [(old_loc, Replace (Raw name1, Raw name2))] + in + let comments = syntax_opt old_loc comments1 comments2 |> Option.value ~default:[] in + comments @ name + and conditional + (c1 : (Loc.t, Loc.t) Ast.Expression.Conditional.t) + (c2 : (Loc.t, Loc.t) Ast.Expression.Conditional.t) : node change list = + Ast.Expression.Conditional.( + let { test = test1; consequent = cons1; alternate = alt1 } = c1 in + let { test = test2; consequent = cons2; alternate = alt2 } = c2 in + let test_diff = diff_if_changed expression test1 test2 in + let cons_diff = diff_if_changed expression cons1 cons2 in + let alt_diff = diff_if_changed expression alt1 alt2 in + List.concat [test_diff; cons_diff; alt_diff]) + and new_ + loc (new1 : (Loc.t, Loc.t) Ast.Expression.New.t) (new2 : (Loc.t, Loc.t) Ast.Expression.New.t) : node change list option = - let open Ast.Statement.ForIn in - match (left1, left2) with - | (LeftDeclaration(_, decl1), LeftDeclaration(_, decl2)) -> - variable_declaration decl1 decl2 - | (LeftPattern p1, LeftPattern p2) -> - Some (pattern p1 p2) - | (LeftDeclaration _, LeftPattern _) - | (LeftPattern _, LeftDeclaration _) -> - None - - and while_statement (stmt1: (Loc.t, Loc.t) Ast.Statement.While.t) - (stmt2: (Loc.t, Loc.t) Ast.Statement.While.t) - : node change list = - let open Ast.Statement.While in - let { test = test1; body = body1 } = stmt1 in - let { test = test2; body = body2 } = stmt2 in - let test = diff_if_changed expression test1 test2 in - let body = diff_if_changed statement body1 body2 in - test @ body - - and for_of_statement (stmt1: (Loc.t, Loc.t) Ast.Statement.ForOf.t) - (stmt2: (Loc.t, Loc.t) Ast.Statement.ForOf.t) + Ast.Expression.New.( + let { callee = callee1; targs = targs1; arguments = arguments1; comments = comments1 } = + new1 + in + let { callee = callee2; targs = targs2; arguments = arguments2; comments = comments2 } = + new2 + in + let comments = syntax_opt loc comments1 comments2 in + let args_diff_list = + if targs1 != targs2 then + let targs = + diff_if_changed_opt type_parameter_instantiation_with_implicit targs1 targs2 + in + [targs] + else + let args = diff_and_recurse_no_trivial expression_or_spread arguments1 arguments2 in + let callee = Some (diff_if_changed expression callee1 callee2) in + [args; callee] + in + join_diff_list ([comments] @ args_diff_list)) + and member + (member1 : (Loc.t, Loc.t) Ast.Expression.Member.t) + (member2 : (Loc.t, Loc.t) Ast.Expression.Member.t) : node change list option = + Ast.Expression.Member.( + let { _object = obj1; property = prop1 } = member1 in + let { _object = obj2; property = prop2 } = member2 in + let obj = Some (diff_if_changed expression obj1 obj2) in + let prop = diff_if_changed_ret_opt member_property prop1 prop2 in + join_diff_list [obj; prop]) + and member_property + (prop1 : (Loc.t, Loc.t) Ast.Expression.Member.property) + (prop2 : (Loc.t, Loc.t) Ast.Expression.Member.property) : node change list option = + Ast.Expression.Member.( + match (prop1, prop2) with + | (PropertyExpression exp1, PropertyExpression exp2) -> + Some (diff_if_changed expression exp1 exp2) + | (PropertyIdentifier id1, PropertyIdentifier id2) + | (PropertyPrivateName (_, id1), PropertyPrivateName (_, id2)) -> + Some (diff_if_changed identifier id1 id2) + | (_, _) -> None) + and call + (call1 : (Loc.t, Loc.t) Ast.Expression.Call.t) (call2 : (Loc.t, Loc.t) Ast.Expression.Call.t) : node change list option = - let open Ast.Statement.ForOf in - let { left = left1; right = right1; body = body1; async = async1 } = stmt1 in - let { left = left2; right = right2; body = body2; async = async2 } = stmt2 in - let left = if left1 == left2 then Some [] else for_of_statement_lhs left1 left2 in - let body = Some (diff_if_changed statement body1 body2) in - let right = Some (diff_if_changed expression right1 right2) in - let async = if async1 != async2 then None else Some [] in - Option.all [left; right; body; async] |> Option.map ~f:List.concat - - and for_of_statement_lhs (left1: (Loc.t, Loc.t) Ast.Statement.ForOf.left) - (left2: (Loc.t, Loc.t) Ast.Statement.ForOf.left) + Ast.Expression.Call.( + let { callee = callee1; targs = targs1; arguments = arguments1 } = call1 in + let { callee = callee2; targs = targs2; arguments = arguments2 } = call2 in + if targs1 != targs2 then + diff_if_changed_opt type_parameter_instantiation_with_implicit targs1 targs2 + else + let args = diff_and_recurse_no_trivial expression_or_spread arguments1 arguments2 in + let callee = Some (diff_if_changed expression callee1 callee2) in + join_diff_list [args; callee]) + and expression_or_spread + (expr1 : (Loc.t, Loc.t) Ast.Expression.expression_or_spread) + (expr2 : (Loc.t, Loc.t) Ast.Expression.expression_or_spread) : node change list option = + match (expr1, expr2) with + | (Ast.Expression.Expression e1, Ast.Expression.Expression e2) -> + Some (diff_if_changed expression e1 e2) + | (Ast.Expression.Spread spread1, Ast.Expression.Spread spread2) -> + Some (diff_if_changed spread_element spread1 spread2) + | (_, _) -> None + and spread_element + (spread1 : (Loc.t, Loc.t) Ast.Expression.SpreadElement.t) + (spread2 : (Loc.t, Loc.t) Ast.Expression.SpreadElement.t) : node change list = + Ast.Expression.SpreadElement.( + let (_, { argument = arg1 }) = spread1 in + let (_, { argument = arg2 }) = spread2 in + diff_if_changed expression arg1 arg2) + and logical expr1 expr2 = + Ast.Expression.Logical.( + let { left = left1; right = right1; operator = operator1 } = expr1 in + let { left = left2; right = right2; operator = operator2 } = expr2 in + if operator1 == operator2 then + let left = diff_if_changed expression left1 left2 in + let right = diff_if_changed expression right1 right2 in + Some (List.concat [left; right]) + else + None) + and array loc arr1 arr2 : node change list option = + Ast.Expression.Array.( + let { elements = elems1; comments = comments1 } = arr1 in + let { elements = elems2; comments = comments2 } = arr2 in + let comments = syntax_opt loc comments1 comments2 in + let elements = + diff_and_recurse_no_trivial (diff_if_changed_opt expression_or_spread) elems1 elems2 + in + join_diff_list [comments; elements]) + and sequence seq1 seq2 : node change list option = + Ast.Expression.Sequence.( + let { expressions = exps1 } = seq1 in + let { expressions = exps2 } = seq2 in + diff_and_recurse_nonopt_no_trivial expression exps1 exps2) + and for_statement + (stmt1 : (Loc.t, Loc.t) Ast.Statement.For.t) (stmt2 : (Loc.t, Loc.t) Ast.Statement.For.t) : + node change list option = + Ast.Statement.For.( + let { init = init1; test = test1; update = update1; body = body1 } = stmt1 in + let { init = init2; test = test2; update = update2; body = body2 } = stmt2 in + let init = diff_if_changed_opt for_statement_init init1 init2 in + let test = diff_if_changed_nonopt_fn expression test1 test2 in + let update = diff_if_changed_nonopt_fn expression update1 update2 in + let body = Some (diff_if_changed statement body1 body2) in + join_diff_list [init; test; update; body]) + and for_statement_init + (init1 : (Loc.t, Loc.t) Ast.Statement.For.init) + (init2 : (Loc.t, Loc.t) Ast.Statement.For.init) : node change list option = + Ast.Statement.For.( + match (init1, init2) with + | (InitDeclaration (_, decl1), InitDeclaration (_, decl2)) -> + variable_declaration decl1 decl2 + | (InitExpression expr1, InitExpression expr2) -> + Some (diff_if_changed expression expr1 expr2) + | (InitDeclaration _, InitExpression _) + | (InitExpression _, InitDeclaration _) -> + None) + and for_in_statement + (stmt1 : (Loc.t, Loc.t) Ast.Statement.ForIn.t) (stmt2 : (Loc.t, Loc.t) Ast.Statement.ForIn.t) : node change list option = - let open Ast.Statement.ForOf in - match (left1, left2) with - | (LeftDeclaration(_, decl1), LeftDeclaration(_, decl2)) -> - variable_declaration decl1 decl2 - | (LeftPattern p1, LeftPattern p2) -> - Some (pattern p1 p2) - | (LeftDeclaration _, LeftPattern _) - | (LeftPattern _, LeftDeclaration _) -> - None - - and do_while_statement (stmt1: (Loc.t, Loc.t) Ast.Statement.DoWhile.t) - (stmt2: (Loc.t, Loc.t) Ast.Statement.DoWhile.t) + Ast.Statement.ForIn.( + let { left = left1; right = right1; body = body1; each = each1 } = stmt1 in + let { left = left2; right = right2; body = body2; each = each2 } = stmt2 in + let left = + if left1 == left2 then + Some [] + else + for_in_statement_lhs left1 left2 + in + let body = Some (diff_if_changed statement body1 body2) in + let right = Some (diff_if_changed expression right1 right2) in + let each = + if each1 != each2 then + None + else + Some [] + in + join_diff_list [left; right; body; each]) + and for_in_statement_lhs + (left1 : (Loc.t, Loc.t) Ast.Statement.ForIn.left) + (left2 : (Loc.t, Loc.t) Ast.Statement.ForIn.left) : node change list option = + Ast.Statement.ForIn.( + match (left1, left2) with + | (LeftDeclaration (_, decl1), LeftDeclaration (_, decl2)) -> + variable_declaration decl1 decl2 + | (LeftPattern p1, LeftPattern p2) -> Some (pattern p1 p2) + | (LeftDeclaration _, LeftPattern _) + | (LeftPattern _, LeftDeclaration _) -> + None) + and while_statement + (stmt1 : (Loc.t, Loc.t) Ast.Statement.While.t) (stmt2 : (Loc.t, Loc.t) Ast.Statement.While.t) : node change list = - let open Ast.Statement.DoWhile in - let { body = body1; test = test1 } = stmt1 in - let { body = body2; test = test2 } = stmt2 in - let body = diff_if_changed statement body1 body2 in - let test = diff_if_changed expression test1 test2 in - List.concat [body; test] - - and return_statement (stmt1: (Loc.t, Loc.t) Ast.Statement.Return.t) - (stmt2: (Loc.t, Loc.t) Ast.Statement.Return.t) - : node change list option = - let open Ast.Statement.Return in - let { argument = argument1; } = stmt1 in - let { argument = argument2; } = stmt2 in - diff_if_changed_nonopt_fn expression argument1 argument2 - - and switch_statement (stmt1: (Loc.t, Loc.t) Ast.Statement.Switch.t) - (stmt2: (Loc.t, Loc.t) Ast.Statement.Switch.t) - : node change list option = - let open Ast.Statement.Switch in - let { discriminant = discriminant1; cases = cases1} = stmt1 in - let { discriminant = discriminant2; cases = cases2} = stmt2 in - let discriminant = Some (diff_if_changed expression discriminant1 discriminant2) in - let cases = diff_and_recurse_no_trivial switch_case cases1 cases2 in - Option.all [discriminant; cases] |> Option.map ~f:List.concat - - and switch_case ((_, s1): (Loc.t, Loc.t) Ast.Statement.Switch.Case.t) - ((_, s2): (Loc.t, Loc.t) Ast.Statement.Switch.Case.t) + Ast.Statement.While.( + let { test = test1; body = body1 } = stmt1 in + let { test = test2; body = body2 } = stmt2 in + let test = diff_if_changed expression test1 test2 in + let body = diff_if_changed statement body1 body2 in + test @ body) + and for_of_statement + (stmt1 : (Loc.t, Loc.t) Ast.Statement.ForOf.t) (stmt2 : (Loc.t, Loc.t) Ast.Statement.ForOf.t) : node change list option = - let open Ast.Statement.Switch.Case in - let { test = test1; consequent = consequent1} = s1 in - let { test = test2; consequent = consequent2} = s2 in - let test = diff_if_changed_nonopt_fn expression test1 test2 in - let consequent = statement_list consequent1 consequent2 in - Option.all [test; consequent] |> Option.map ~f:List.concat - - and pattern (p1: (Loc.t, Loc.t) Ast.Pattern.t) - (p2: (Loc.t, Loc.t) Ast.Pattern.t) - : node change list = - let changes = match p1, p2 with - | (_, Ast.Pattern.Identifier i1), (_, Ast.Pattern.Identifier i2) -> - pattern_identifier i1 i2 - | (_, Ast.Pattern.Array a1), (_, Ast.Pattern.Array a2) -> - pattern_array a1 a2 - | (_, Ast.Pattern.Object o1), (_, Ast.Pattern.Object o2) -> - pattern_object o1 o2 - | (_, Ast.Pattern.Assignment a1), (_, Ast.Pattern.Assignment a2) -> - Some (pattern_assignment a1 a2) - | (_, Ast.Pattern.Expression e1), (_, Ast.Pattern.Expression e2) -> - Some (expression e1 e2) - | _, _ -> + Ast.Statement.ForOf.( + let { left = left1; right = right1; body = body1; async = async1 } = stmt1 in + let { left = left2; right = right2; body = body2; async = async2 } = stmt2 in + let left = + if left1 == left2 then + Some [] + else + for_of_statement_lhs left1 left2 + in + let body = Some (diff_if_changed statement body1 body2) in + let right = Some (diff_if_changed expression right1 right2) in + let async = + if async1 != async2 then None - in - let old_loc = Ast_utils.loc_of_pattern p1 in - Option.value changes ~default:[(old_loc, Replace (Pattern p1, Pattern p2))] - - and pattern_assignment (a1: (Loc.t, Loc.t) Ast.Pattern.Assignment.t) - (a2: (Loc.t, Loc.t) Ast.Pattern.Assignment.t) + else + Some [] + in + join_diff_list [left; right; body; async]) + and for_of_statement_lhs + (left1 : (Loc.t, Loc.t) Ast.Statement.ForOf.left) + (left2 : (Loc.t, Loc.t) Ast.Statement.ForOf.left) : node change list option = + Ast.Statement.ForOf.( + match (left1, left2) with + | (LeftDeclaration (_, decl1), LeftDeclaration (_, decl2)) -> + variable_declaration decl1 decl2 + | (LeftPattern p1, LeftPattern p2) -> Some (pattern p1 p2) + | (LeftDeclaration _, LeftPattern _) + | (LeftPattern _, LeftDeclaration _) -> + None) + and do_while_statement + loc + (stmt1 : (Loc.t, Loc.t) Ast.Statement.DoWhile.t) + (stmt2 : (Loc.t, Loc.t) Ast.Statement.DoWhile.t) : node change list = + Ast.Statement.DoWhile.( + let { body = body1; test = test1; comments = comments1 } = stmt1 in + let { body = body2; test = test2; comments = comments2 } = stmt2 in + let body = diff_if_changed statement body1 body2 in + let test = diff_if_changed expression test1 test2 in + let comments = syntax_opt loc comments1 comments2 |> Option.value ~default:[] in + List.concat [body; test; comments]) + and return_statement + loc + (stmt1 : (Loc.t, Loc.t) Ast.Statement.Return.t) + (stmt2 : (Loc.t, Loc.t) Ast.Statement.Return.t) : node change list option = + Ast.Statement.Return.( + let { argument = argument1; comments = comments1 } = stmt1 in + let { argument = argument2; comments = comments2 } = stmt2 in + let comments = syntax_opt loc comments1 comments2 in + join_diff_list [comments; diff_if_changed_nonopt_fn expression argument1 argument2]) + and throw_statement + (stmt1 : (Loc.t, Loc.t) Ast.Statement.Throw.t) (stmt2 : (Loc.t, Loc.t) Ast.Statement.Throw.t) : node change list = - let open Ast.Pattern.Assignment in - let { left = left1; right = right1 } = a1 in - let { left = left2; right = right2 } = a2 in - let left_diffs = diff_if_changed pattern left1 left2 in - let right_diffs = diff_if_changed expression right1 right2 in - left_diffs @ right_diffs - - and pattern_object (o1: (Loc.t, Loc.t) Ast.Pattern.Object.t) - (o2: (Loc.t, Loc.t) Ast.Pattern.Object.t) - : node change list option = - let open Ast.Pattern.Object in - let { properties = properties1; annot = annot1 } = o1 in - let { properties = properties2; annot = annot2 } = o2 in - if annot1 != annot2 then - None - else + Ast.Statement.Throw.( + let { argument = argument1 } = stmt1 in + let { argument = argument2 } = stmt2 in + diff_if_changed expression argument1 argument2) + and labeled_statement + (labeled1 : (Loc.t, Loc.t) Ast.Statement.Labeled.t) + (labeled2 : (Loc.t, Loc.t) Ast.Statement.Labeled.t) : node change list = + Ast.Statement.Labeled.( + let { label = label1; body = body1 } = labeled1 in + let { label = label2; body = body2 } = labeled2 in + let label_diff = diff_if_changed identifier label1 label2 in + let body_diff = diff_if_changed statement body1 body2 in + label_diff @ body_diff) + and switch_statement + (stmt1 : (Loc.t, Loc.t) Ast.Statement.Switch.t) + (stmt2 : (Loc.t, Loc.t) Ast.Statement.Switch.t) : node change list option = + Ast.Statement.Switch.( + let { discriminant = discriminant1; cases = cases1 } = stmt1 in + let { discriminant = discriminant2; cases = cases2 } = stmt2 in + let discriminant = Some (diff_if_changed expression discriminant1 discriminant2) in + let cases = diff_and_recurse_no_trivial switch_case cases1 cases2 in + join_diff_list [discriminant; cases]) + and switch_case + ((_, s1) : (Loc.t, Loc.t) Ast.Statement.Switch.Case.t) + ((_, s2) : (Loc.t, Loc.t) Ast.Statement.Switch.Case.t) : node change list option = + Ast.Statement.Switch.Case.( + let { test = test1; consequent = consequent1 } = s1 in + let { test = test2; consequent = consequent2 } = s2 in + let test = diff_if_changed_nonopt_fn expression test1 test2 in + let consequent = statement_list consequent1 consequent2 in + join_diff_list [test; consequent]) + and function_param_pattern + (pat1 : (Loc.t, Loc.t) Ast.Pattern.t) (pat2 : (Loc.t, Loc.t) Ast.Pattern.t) : + node change list = + binding_pattern pat1 pat2 + and binding_pattern (pat1 : (Loc.t, Loc.t) Ast.Pattern.t) (pat2 : (Loc.t, Loc.t) Ast.Pattern.t) : + node change list = + pattern pat1 pat2 + and pattern (p1 : (Loc.t, Loc.t) Ast.Pattern.t) (p2 : (Loc.t, Loc.t) Ast.Pattern.t) : + node change list = + let changes = + match (p1, p2) with + | ((_, Ast.Pattern.Identifier i1), (_, Ast.Pattern.Identifier i2)) -> + pattern_identifier i1 i2 + | ((loc, Ast.Pattern.Array a1), (_, Ast.Pattern.Array a2)) -> pattern_array loc a1 a2 + | ((_, Ast.Pattern.Object o1), (_, Ast.Pattern.Object o2)) -> pattern_object o1 o2 + | ((_, Ast.Pattern.Expression e1), (_, Ast.Pattern.Expression e2)) -> Some (expression e1 e2) + | (_, _) -> None + in + let old_loc = Ast_utils.loc_of_pattern p1 in + Option.value changes ~default:[(old_loc, Replace (Pattern p1, Pattern p2))] + and pattern_object + (o1 : (Loc.t, Loc.t) Ast.Pattern.Object.t) (o2 : (Loc.t, Loc.t) Ast.Pattern.Object.t) : + node change list option = + Ast.Pattern.Object.( + let { properties = properties1; annot = annot1 } = o1 in + let { properties = properties2; annot = annot2 } = o2 in + let properties_diff = diff_and_recurse_no_trivial pattern_object_property properties1 properties2 - - and pattern_object_property (p1: (Loc.t, Loc.t) Ast.Pattern.Object.property) - (p2: (Loc.t, Loc.t) Ast.Pattern.Object.property) + in + let annot_diff = diff_if_changed type_annotation_hint annot1 annot2 |> Option.return in + join_diff_list [properties_diff; annot_diff]) + and pattern_object_property + (p1 : (Loc.t, Loc.t) Ast.Pattern.Object.property) + (p2 : (Loc.t, Loc.t) Ast.Pattern.Object.property) : node change list option = + Ast.Pattern.Object.( + match (p1, p2) with + | (Property (_, p3), Property (_, p4)) -> + Ast.Pattern.Object.Property.( + let { key = key1; pattern = pattern1; default = default1; shorthand = shorthand1 } = + p3 + in + let { key = key2; pattern = pattern2; default = default2; shorthand = shorthand2 } = + p4 + in + let keys = diff_if_changed_ret_opt pattern_object_property_key key1 key2 in + let pats = Some (diff_if_changed pattern pattern1 pattern2) in + let defaults = diff_if_changed_nonopt_fn expression default1 default2 in + (match (shorthand1, shorthand2) with + | (false, false) -> join_diff_list [keys; pats; defaults] + | (_, _) -> None)) + | (RestProperty (_, rp1), RestProperty (_, rp2)) -> + Ast.Pattern.Object.RestProperty.( + let { argument = argument1 } = rp1 in + let { argument = argument2 } = rp2 in + Some (diff_if_changed pattern argument1 argument2)) + | (_, _) -> None) + and pattern_object_property_key + (k1 : (Loc.t, Loc.t) Ast.Pattern.Object.Property.key) + (k2 : (Loc.t, Loc.t) Ast.Pattern.Object.Property.key) : node change list option = + let module POP = Ast.Pattern.Object.Property in + match (k1, k2) with + | (POP.Literal (loc, l1), POP.Literal (_, l2)) -> + diff_if_changed (literal loc) l1 l2 |> Option.return + | (POP.Identifier i1, POP.Identifier i2) -> diff_if_changed identifier i1 i2 |> Option.return + | (POP.Computed e1, POP.Computed e2) -> diff_if_changed expression e1 e2 |> Option.return + | (_, _) -> None + and pattern_array + loc (a1 : (Loc.t, Loc.t) Ast.Pattern.Array.t) (a2 : (Loc.t, Loc.t) Ast.Pattern.Array.t) : + node change list option = + Ast.Pattern.Array.( + let { elements = elements1; annot = annot1; comments = comments1 } = a1 in + let { elements = elements2; annot = annot2; comments = comments2 } = a2 in + let elements_diff = diff_and_recurse_no_trivial pattern_array_e elements1 elements2 in + let annot_diff = diff_if_changed type_annotation_hint annot1 annot2 |> Option.return in + let comments_diff = syntax_opt loc comments1 comments2 in + join_diff_list [comments_diff; elements_diff; annot_diff]) + and pattern_array_e + (eo1 : (Loc.t, Loc.t) Ast.Pattern.Array.element option) + (eo2 : (Loc.t, Loc.t) Ast.Pattern.Array.element option) : node change list option = + Ast.Pattern.Array.( + match (eo1, eo2) with + | (Some (Element p1), Some (Element p2)) -> pattern_array_element p1 p2 + | (Some (RestElement re1), Some (RestElement re2)) -> Some (pattern_array_rest re1 re2) + | (None, None) -> Some [] (* Both elements elided *) + | (_, _) -> None) + (* one element is elided and another is not *) + and pattern_array_element + ((_, e1) : (Loc.t, Loc.t) Ast.Pattern.Array.Element.t) + ((_, e2) : (Loc.t, Loc.t) Ast.Pattern.Array.Element.t) : node change list option = + Ast.Pattern.Array.Element.( + let { argument = argument1; default = default1 } = e1 in + let { argument = argument2; default = default2 } = e2 in + let args = Some (diff_if_changed pattern argument1 argument2) in + let defaults = diff_if_changed_nonopt_fn expression default1 default2 in + join_diff_list [args; defaults]) + and pattern_array_rest + ((_, r1) : (Loc.t, Loc.t) Ast.Pattern.Array.RestElement.t) + ((_, r2) : (Loc.t, Loc.t) Ast.Pattern.Array.RestElement.t) : node change list = + Ast.Pattern.Array.RestElement.( + let { argument = argument1 } = r1 in + let { argument = argument2 } = r2 in + pattern argument1 argument2) + and pattern_identifier + (i1 : (Loc.t, Loc.t) Ast.Pattern.Identifier.t) (i2 : (Loc.t, Loc.t) Ast.Pattern.Identifier.t) : node change list option = - let open Ast.Pattern.Object in - match p1, p2 with - | (Property (_, p3), Property (_, p4)) -> - let open Ast.Pattern.Object.Property in - let { key = key1; pattern = pattern1; shorthand = shorthand1; } = p3 in - let { key = key2; pattern = pattern2; shorthand = shorthand2; } = p4 in - let keys = diff_if_changed_opt pattern_object_property_key (Some key1) (Some key2) in - let pats = Some (diff_if_changed pattern pattern1 pattern2) in - (match shorthand1, shorthand2 with - | false, false -> - Option.all [keys; pats] |> Option.map ~f:List.concat - | _, _ -> - None) - | (RestProperty (_, rp1) ,RestProperty (_, rp2)) -> - let open Ast.Pattern.Object.RestProperty in - let { argument = argument1 } = rp1 in - let { argument = argument2 } = rp2 in - Some (diff_if_changed pattern argument1 argument2) - | _, _ -> + Ast.Pattern.Identifier.( + let { name = name1; annot = annot1; optional = optional1 } = i1 in + let { name = name2; annot = annot2; optional = optional2 } = i2 in + if optional1 != optional2 then None - - and pattern_object_property_key (k1: (Loc.t, Loc.t) Ast.Pattern.Object.Property.key) - (k2: (Loc.t, Loc.t) Ast.Pattern.Object.Property.key) - : node change list option = - let open Ast.Pattern.Object.Property in - match k1, k2 with - | Literal _, Literal _ -> - (* TODO: recurse into literals *) - None - | Ast.Pattern.Object.Property.Identifier i1, Ast.Pattern.Object.Property.Identifier i2 -> - identifier i1 i2 |> Option.return - | Computed e1, Computed e2 -> - Some (expression e1 e2) - | _, _ -> - None - - and pattern_array (a1: (Loc.t, Loc.t) Ast.Pattern.Array.t) - (a2: (Loc.t, Loc.t) Ast.Pattern.Array.t) + else + let ids = diff_if_changed identifier name1 name2 |> Option.return in + let annots = Some (diff_if_changed type_annotation_hint annot1 annot2) in + join_diff_list [ids; annots]) + and function_rest_param + (elem1 : (Loc.t, Loc.t) Ast.Function.RestParam.t) + (elem2 : (Loc.t, Loc.t) Ast.Function.RestParam.t) : node change list = + Ast.Function.RestParam.( + let (_, { argument = arg1 }) = elem1 in + let (_, { argument = arg2 }) = elem2 in + binding_pattern arg1 arg2) + and type_ + ((loc1, type1) : (Loc.t, Loc.t) Ast.Type.t) ((_loc2, type2) : (Loc.t, Loc.t) Ast.Type.t) : + node change list = + Ast.Type.( + let type_diff = + match (type1, type2) with + | (NumberLiteral n1, NumberLiteral n2) -> + Some (diff_if_changed (number_literal_type loc1) n1 n2) + | (Function fn1, Function fn2) -> diff_if_changed_ret_opt function_type fn1 fn2 + | (Interface i1, Interface i2) -> interface_type i1 i2 + | (Generic g1, Generic g2) -> generic_type g1 g2 + | (Intersection (t0, t1, ts), Intersection (t0', t1', ts')) + | (Union (t0, t1, ts), Union (t0', t1', ts')) -> + diff_and_recurse_nonopt_no_trivial type_ (t0 :: t1 :: ts) (t0' :: t1' :: ts') + | (Nullable (t1_loc, t1), Nullable (t2_loc, t2)) -> Some (type_ (t1_loc, t1) (t2_loc, t2)) + | (Object obj1, Object obj2) -> diff_if_changed_ret_opt object_type obj1 obj2 + | (Ast.Type.StringLiteral s1, Ast.Type.StringLiteral s2) -> (string_literal loc1) s1 s2 + | (Typeof (t1_loc, t1), Typeof (t2_loc, t2)) -> Some (type_ (t1_loc, t1) (t2_loc, t2)) + | (Tuple t1, Tuple t2) -> diff_if_changed_ret_opt tuple_type t1 t2 + | (Array t1, Array t2) -> Some (type_ t1 t2) + | _ -> None + in + Option.value type_diff ~default:[(loc1, Replace (Type (loc1, type1), Type (loc1, type2)))]) + and interface_type + (it1 : (Loc.t, Loc.t) Ast.Type.Interface.t) (it2 : (Loc.t, Loc.t) Ast.Type.Interface.t) : + node change list option = + Ast.Type.Interface.( + let { extends = extends1; body = (_loc1, body1) } = it1 in + let { extends = extends2; body = (_loc2, body2) } = it2 in + let extends_diff = diff_and_recurse_no_trivial generic_type_with_loc extends1 extends2 in + let body_diff = diff_if_changed_ret_opt object_type body1 body2 in + join_diff_list [extends_diff; body_diff]) + and generic_type + (gt1 : (Loc.t, Loc.t) Ast.Type.Generic.t) (gt2 : (Loc.t, Loc.t) Ast.Type.Generic.t) : + node change list option = + Ast.Type.Generic.( + let { id = id1; targs = targs1 } = gt1 in + let { id = id2; targs = targs2 } = gt2 in + let id_diff = diff_if_changed_ret_opt generic_identifier_type id1 id2 in + let targs_diff = diff_if_changed_opt type_parameter_instantiation targs1 targs2 in + join_diff_list [id_diff; targs_diff]) + and generic_type_with_loc + ((_loc1, gt1) : Loc.t * (Loc.t, Loc.t) Ast.Type.Generic.t) + ((_loc2, gt2) : Loc.t * (Loc.t, Loc.t) Ast.Type.Generic.t) : node change list option = + generic_type gt1 gt2 + and generic_identifier_type + (git1 : (Loc.t, Loc.t) Ast.Type.Generic.Identifier.t) + (git2 : (Loc.t, Loc.t) Ast.Type.Generic.Identifier.t) : node change list option = + Ast.Type.Generic.Identifier.( + match (git1, git2) with + | (Unqualified id1, Unqualified id2) -> diff_if_changed identifier id1 id2 |> Option.return + | ( Qualified (_loc1, { qualification = q1; id = id1 }), + Qualified (_loc2, { qualification = q2; id = id2 }) ) -> + let qualification_diff = diff_if_changed_ret_opt generic_identifier_type q1 q2 in + let id_diff = diff_if_changed identifier id1 id2 |> Option.return in + join_diff_list [qualification_diff; id_diff] + | _ -> None) + and object_type (ot1 : (Loc.t, Loc.t) Ast.Type.Object.t) (ot2 : (Loc.t, Loc.t) Ast.Type.Object.t) : node change list option = - let open Ast.Pattern.Array in - let { elements = elements1; annot = annot1 } = a1 in - let { elements = elements2; annot = annot2 } = a2 in - if annot1 != annot2 then + Ast.Type.Object.( + let { properties = props1; exact = exact1; inexact = inexact1 } = ot1 in + let { properties = props2; exact = exact2; inexact = inexact2 } = ot2 in + (* These are boolean literals, so structural equality is ok *) + let exact_diff = + if exact1 = exact2 then + Some [] + else + None + in + let inexact_diff = + if inexact1 = inexact2 then + Some [] + else + None + in + let properties_diff = diff_and_recurse_no_trivial object_type_property props1 props2 in + join_diff_list [exact_diff; inexact_diff; properties_diff]) + and object_type_property + (prop1 : (Loc.t, Loc.t) Ast.Type.Object.property) + (prop2 : (Loc.t, Loc.t) Ast.Type.Object.property) : node change list option = + Ast.Type.Object.( + match (prop1, prop2) with + | (Property p1, Property p2) -> diff_if_changed_ret_opt object_property_type p1 p2 + | (SpreadProperty _p1, SpreadProperty _p2) -> None (* TODO *) + | (Indexer _p1, Indexer _p2) -> None (* TODO *) + | (CallProperty _p1, CallProperty _p2) -> None (* TODO *) + | (InternalSlot _s1, InternalSlot _s2) -> None (* TODO *) + | _ -> None) + and object_property_type + (optype1 : (Loc.t, Loc.t) Ast.Type.Object.Property.t) + (optype2 : (Loc.t, Loc.t) Ast.Type.Object.Property.t) : node change list option = + Ast.Type.Object.Property.( + let ( _loc1, + { + key = key1; + value = value1; + optional = opt1; + static = static1; + proto = proto1; + _method = method1; + variance = var1; + } ) = + optype1 + in + let ( _loc2, + { + key = key2; + value = value2; + optional = opt2; + static = static2; + proto = proto2; + _method = method2; + variance = var2; + } ) = + optype2 + in + if opt1 != opt2 || static1 != static2 || proto1 != proto2 || method1 != method2 then None - else - diff_and_recurse_no_trivial pattern_array_element elements1 elements2 - - and pattern_array_element (eo1: (Loc.t, Loc.t) Ast.Pattern.Array.element option) - (eo2: (Loc.t, Loc.t) Ast.Pattern.Array.element option) - : node change list option = - let open Ast.Pattern.Array in - match eo1, eo2 with - | Some (Element p1), Some (Element p2) -> - Some (pattern p1 p2) - | Some (RestElement re1), Some (RestElement re2) -> - Some (pattern_array_rest re1 re2) - | None, None -> - Some [] (* Both elements elided *) - | _, _ -> - None (* one element is elided and another is not *) - - and pattern_array_rest ((_, r1): (Loc.t, Loc.t) Ast.Pattern.Array.RestElement.t) - ((_, r2): (Loc.t, Loc.t) Ast.Pattern.Array.RestElement.t) - : node change list = - let open Ast.Pattern.Array.RestElement in - let { argument = argument1 } = r1 in - let { argument = argument2 } = r2 in - pattern argument1 argument2 - - and pattern_identifier (i1: (Loc.t, Loc.t) Ast.Pattern.Identifier.t) - (i2: (Loc.t, Loc.t) Ast.Pattern.Identifier.t) - : node change list option = - let open Ast.Pattern.Identifier in - let { name = name1; annot = annot1; optional = optional1 } = i1 in - let { name = name2; annot = annot2; optional = optional2 } = i2 in - if optional1 != optional2 then - None - else - let ids = diff_if_changed identifier name1 name2 |> Option.return in - let annots = diff_if_changed_opt_arg type_annotation_opt annot1 annot2 in - Option.(all [ids; annots] >>| List.concat) - - and type_annotation_opt (annot1: (Loc.t, Loc.t) Ast.Type.annotation option) - (annot2: (Loc.t, Loc.t) Ast.Type.annotation option) + else + let variance_diff = diff_if_changed_ret_opt variance var1 var2 in + let key_diff = diff_if_changed_ret_opt object_key key1 key2 in + let value_diff = diff_if_changed_ret_opt object_property_value_type value1 value2 in + join_diff_list [variance_diff; key_diff; value_diff]) + and object_property_value_type + (opvt1 : (Loc.t, Loc.t) Ast.Type.Object.Property.value) + (opvt2 : (Loc.t, Loc.t) Ast.Type.Object.Property.value) : node change list option = + Ast.Type.Object.Property.( + match (opvt1, opvt2) with + | (Init t1, Init t2) -> diff_if_changed type_ t1 t2 |> Option.return + | (Get (_loc1, ft1), Get (_loc2, ft2)) + | (Set (_loc1, ft1), Set (_loc2, ft2)) -> + diff_if_changed_ret_opt function_type ft1 ft2 + | _ -> None) + and tuple_type (tp1 : (Loc.t, Loc.t) Ast.Type.t list) (tp2 : (Loc.t, Loc.t) Ast.Type.t list) : + node change list option = + diff_and_recurse_nonopt_no_trivial type_ tp1 tp2 + and type_or_implicit + (t1 : (Loc.t, Loc.t) Ast.Expression.TypeParameterInstantiation.type_parameter_instantiation) + (t2 : (Loc.t, Loc.t) Ast.Expression.TypeParameterInstantiation.type_parameter_instantiation) : node change list option = - match annot1, annot2 with - | None, None -> Some [] - | Some (loc, typ), None -> Some [loc, Delete (TypeAnnotation (loc, typ))] - | None, Some _ -> None (* Nowhere in the original program to insert the annotation *) - | Some annot1, Some annot2 -> - Some (type_annotation annot1 annot2) - - and type_annotation ((loc1, typ1): (Loc.t, Loc.t) Ast.Type.annotation) - ((loc2, typ2): (Loc.t, Loc.t) Ast.Type.annotation) - : node change list = - [loc1, Replace (TypeAnnotation (loc1, typ1), TypeAnnotation (loc2, typ2))] - + Ast.Expression.TypeParameterInstantiation.( + match (t1, t2) with + | (Explicit type1, Explicit type2) -> Some (diff_if_changed type_ type1 type2) + | (Implicit _, Implicit _) -> Some [] + | _ -> None) + and type_parameter_instantiation_with_implicit + (pi1 : (Loc.t, Loc.t) Ast.Expression.TypeParameterInstantiation.t) + (pi2 : (Loc.t, Loc.t) Ast.Expression.TypeParameterInstantiation.t) : node change list option + = + let (_, t_args1) = pi1 in + let (_, t_args2) = pi2 in + diff_and_recurse_no_trivial type_or_implicit t_args1 t_args2 + and type_parameter_instantiation + (pi1 : (Loc.t, Loc.t) Ast.Type.ParameterInstantiation.t) + (pi2 : (Loc.t, Loc.t) Ast.Type.ParameterInstantiation.t) : node change list option = + let (_, t_args1) = pi1 in + let (_, t_args2) = pi2 in + diff_and_recurse_nonopt_no_trivial type_ t_args1 t_args2 + and function_param_type + (fpt1 : (Loc.t, Loc.t) Ast.Type.Function.Param.t) + (fpt2 : (Loc.t, Loc.t) Ast.Type.Function.Param.t) : node change list option = + Ast.Type.Function.Param.( + let (_loc1, { annot = annot1; name = name1; optional = opt1 }) = fpt1 in + let (_loc2, { annot = annot2; name = name2; optional = opt2 }) = fpt2 in + (* These are boolean literals, so structural equality is ok *) + let optional_diff = + if opt1 = opt2 then + Some [] + else + None + in + let name_diff = diff_if_changed_nonopt_fn identifier name1 name2 in + let annot_diff = diff_if_changed type_ annot1 annot2 |> Option.return in + join_diff_list [optional_diff; name_diff; annot_diff]) + and function_rest_param_type + (frpt1 : (Loc.t, Loc.t) Ast.Type.Function.RestParam.t) + (frpt2 : (Loc.t, Loc.t) Ast.Type.Function.RestParam.t) : node change list option = + Ast.Type.Function.RestParam.( + let (_loc1, { argument = arg1 }) = frpt1 in + let (_loc2, { argument = arg2 }) = frpt2 in + diff_if_changed_ret_opt function_param_type arg1 arg2) + and function_type + (ft1 : (Loc.t, Loc.t) Ast.Type.Function.t) (ft2 : (Loc.t, Loc.t) Ast.Type.Function.t) : + node change list option = + Ast.Type.Function.( + let { + params = (_params_loc1, { Params.params = params1; rest = rest1 }); + return = return1; + tparams = tparams1; + } = + ft1 + in + let { + params = (_params_loc2, { Params.params = params2; rest = rest2 }); + return = return2; + tparams = tparams2; + } = + ft2 + in + let tparams_diff = diff_if_changed_opt type_parameter_declaration tparams1 tparams2 in + let params_diff = diff_and_recurse_no_trivial function_param_type params1 params2 in + let rest_diff = diff_if_changed_opt function_rest_param_type rest1 rest2 in + let return_diff = diff_if_changed type_ return1 return2 |> Option.return in + join_diff_list [tparams_diff; params_diff; rest_diff; return_diff]) + and type_alias + (t_alias1 : (Loc.t, Loc.t) Ast.Statement.TypeAlias.t) + (t_alias2 : (Loc.t, Loc.t) Ast.Statement.TypeAlias.t) : node change list option = + Ast.Statement.TypeAlias.( + let { id = id1; tparams = t_params1; right = right1 } = t_alias1 in + let { id = id2; tparams = t_params2; right = right2 } = t_alias2 in + let id_diff = diff_if_changed identifier id1 id2 |> Option.return in + let t_params_diff = diff_if_changed_opt type_parameter_declaration t_params1 t_params2 in + let right_diff = diff_if_changed type_ right1 right2 |> Option.return in + join_diff_list [id_diff; t_params_diff; right_diff]) + and opaque_type + (o_type1 : (Loc.t, Loc.t) Ast.Statement.OpaqueType.t) + (o_type2 : (Loc.t, Loc.t) Ast.Statement.OpaqueType.t) : node change list option = + Ast.Statement.OpaqueType.( + let { id = id1; tparams = t_params1; impltype = impltype1; supertype = supertype1 } = + o_type1 + in + let { id = id2; tparams = t_params2; impltype = impltype2; supertype = supertype2 } = + o_type2 + in + let id_diff = diff_if_changed identifier id1 id2 |> Option.return in + let t_params_diff = diff_if_changed_opt type_parameter_declaration t_params1 t_params2 in + let supertype_diff = diff_if_changed_nonopt_fn type_ supertype1 supertype2 in + let impltype_diff = diff_if_changed_nonopt_fn type_ impltype1 impltype2 in + join_diff_list [id_diff; t_params_diff; supertype_diff; impltype_diff]) + and type_parameter_declaration + (pd1 : (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t) + (pd2 : (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t) : node change list option = + let (_, t_params1) = pd1 in + let (_, t_params2) = pd2 in + diff_and_recurse_nonopt_no_trivial type_parameter_declaration_type_param t_params1 t_params2 + and type_parameter_declaration_type_param + ((loc1, t_param1) : (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.TypeParam.t) + ((_, t_param2) : (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.TypeParam.t) : node change list + = + Ast.Type.ParameterDeclaration.TypeParam.( + let { name = name1; bound = bound1; variance = variance1; default = default1 } = t_param1 in + let { name = name2; bound = bound2; variance = variance2; default = default2 } = t_param2 in + let variance_diff = diff_if_changed_ret_opt variance variance1 variance2 in + let name_diff = diff_if_changed identifier name1 name2 |> Option.return in + let bound_diff = diff_if_changed type_annotation_hint bound1 bound2 |> Option.return in + let default_diff = diff_if_changed_nonopt_fn type_ default1 default2 in + let result = join_diff_list [variance_diff; name_diff; bound_diff; default_diff] in + Option.value + result + ~default:[(loc1, Replace (TypeParam (loc1, t_param1), TypeParam (loc1, t_param2)))]) + and variance (var1 : Loc.t Ast.Variance.t option) (var2 : Loc.t Ast.Variance.t option) : + node change list option = + match (var1, var2) with + | (Some (loc1, var1), Some (_, var2)) -> + Some [(loc1, Replace (Variance (loc1, var1), Variance (loc1, var2)))] + | (Some (loc1, var1), None) -> Some [(loc1, Delete (Variance (loc1, var1)))] + | (None, None) -> Some [] + | _ -> None + and type_annotation_hint + (return1 : (Loc.t, Loc.t) Ast.Type.annotation_or_hint) + (return2 : (Loc.t, Loc.t) Ast.Type.annotation_or_hint) : node change list = + Ast.Type.( + let annot_change typ = + match return2 with + | Available (_, (_, Function _)) -> FunctionTypeAnnotation typ + | _ -> TypeAnnotation typ + in + match (return1, return2) with + | (Missing _, Missing _) -> [] + | (Available (loc1, typ), Missing _) -> [(loc1, Delete (TypeAnnotation (loc1, typ)))] + | (Missing loc1, Available annot) -> [(loc1, Insert (None, [annot_change annot]))] + | (Available annot1, Available annot2) -> type_annotation annot1 annot2) + and type_annotation + ((loc1, typ1) : (Loc.t, Loc.t) Ast.Type.annotation) + ((loc2, typ2) : (Loc.t, Loc.t) Ast.Type.annotation) : node change list = + Ast.Type.( + match (typ1, typ2) with + | (_, (_, Function _)) -> + [(loc1, Replace (TypeAnnotation (loc1, typ1), FunctionTypeAnnotation (loc2, typ2)))] + | (_, _) -> type_ typ1 typ2) and type_cast - (type_cast1: (Loc.t, Loc.t) Flow_ast.Expression.TypeCast.t) - (type_cast2: (Loc.t, Loc.t) Flow_ast.Expression.TypeCast.t): node change list = - let open Flow_ast.Expression.TypeCast in - let { expression=expr1; annot=annot1; } = type_cast1 in - let { expression=expr2; annot=annot2; } = type_cast2 in - let expr = diff_if_changed expression expr1 expr2 in - let annot = diff_if_changed type_annotation annot1 annot2 in - expr @ annot - + (type_cast1 : (Loc.t, Loc.t) Flow_ast.Expression.TypeCast.t) + (type_cast2 : (Loc.t, Loc.t) Flow_ast.Expression.TypeCast.t) : node change list = + Flow_ast.Expression.TypeCast.( + let { expression = expr1; annot = annot1 } = type_cast1 in + let { expression = expr2; annot = annot2 } = type_cast2 in + let expr = diff_if_changed expression expr1 expr2 in + let annot = diff_if_changed type_annotation annot1 annot2 in + expr @ annot) and type_cast_added - (expr: (Loc.t, Loc.t) Flow_ast.Expression.t) - (loc: Loc.t) - (type_cast: (Loc.t, Loc.t) Flow_ast.Expression.TypeCast.t): node change list = - let open Flow_ast.Expression.TypeCast in - let open Loc in - let { expression=expr2; annot=annot2; } = type_cast in - let expr_diff_rev = diff_if_changed expression expr expr2 |> List.rev in - let append_annot_rev = - ({loc with start = loc._end }, Insert (Some "", [TypeAnnotation annot2; Raw ")"])) - :: expr_diff_rev in - ({loc with _end = loc.start}, Insert (Some "", [Raw "("])) :: (List.rev append_annot_rev) -in - -program' program1 program2 + (expr : (Loc.t, Loc.t) Flow_ast.Expression.t) + (loc : Loc.t) + (type_cast : (Loc.t, Loc.t) Flow_ast.Expression.TypeCast.t) : node change list = + Flow_ast.Expression.TypeCast.( + Loc.( + let { expression = expr2; annot = annot2 } = type_cast in + let expr_diff_rev = diff_if_changed expression expr expr2 |> List.rev in + let append_annot_rev = + ({ loc with start = loc._end }, Insert (Some "", [TypeAnnotation annot2; Raw ")"])) + :: expr_diff_rev + in + ({ loc with _end = loc.start }, Insert (Some "", [Raw "("])) :: List.rev append_annot_rev)) + and update + (update1 : (Loc.t, Loc.t) Ast.Expression.Update.t) + (update2 : (Loc.t, Loc.t) Ast.Expression.Update.t) : node change list option = + Ast.Expression.Update.( + let { operator = op1; argument = arg1; prefix = p1 } = update1 in + let { operator = op2; argument = arg2; prefix = p2 } = update2 in + if op1 != op2 || p1 != p2 then + None + else + Some (expression arg1 arg2)) + in + program' program1 program2 |> List.sort change_compare diff --git a/src/parser_utils/flow_ast_differ.mli b/src/parser_utils/flow_ast_differ.mli index e6622ae30d4..53f2746bd92 100644 --- a/src/parser_utils/flow_ast_differ.mli +++ b/src/parser_utils/flow_ast_differ.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2014, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,31 +10,47 @@ type 'a change' = | Insert of (* separator. Defaults to \n *) string option * 'a list | Delete of 'a -type 'a change = (Loc.t * 'a change') +type 'a change = Loc.t * 'a change' (* Algorithm to use to compute diff. Trivial algorithm just compares lists pairwise and generates replacements to convert one to the other. Standard is more computationally intensive but will generate the minimal edit script to convert one list to the other. *) -type diff_algorithm = Trivial | Standard +type diff_algorithm = + | Trivial + | Standard type node = | Raw of string + | Comment of Loc.t Flow_ast.Comment.t + | NumberLiteralNode of Flow_ast.NumberLiteral.t + | Literal of Loc.t Flow_ast.Literal.t + | StringLiteral of Flow_ast.StringLiteral.t | Statement of (Loc.t, Loc.t) Flow_ast.Statement.t | Program of (Loc.t, Loc.t) Flow_ast.program | Expression of (Loc.t, Loc.t) Flow_ast.Expression.t - | Identifier of Loc.t Flow_ast.Identifier.t | Pattern of (Loc.t, Loc.t) Flow_ast.Pattern.t + | Params of (Loc.t, Loc.t) Flow_ast.Function.Params.t + | Variance of Loc.t Flow_ast.Variance.t + | Type of (Loc.t, Loc.t) Flow_ast.Type.t + | TypeParam of (Loc.t, Loc.t) Flow_ast.Type.ParameterDeclaration.TypeParam.t | TypeAnnotation of (Loc.t, Loc.t) Flow_ast.Type.annotation + | FunctionTypeAnnotation of (Loc.t, Loc.t) Flow_ast.Type.annotation | ClassProperty of (Loc.t, Loc.t) Flow_ast.Class.Property.t | ObjectProperty of (Loc.t, Loc.t) Flow_ast.Expression.Object.property + | TemplateLiteral of (Loc.t, Loc.t) Flow_ast.Expression.TemplateLiteral.t + | JSXChild of (Loc.t, Loc.t) Flow_ast.JSX.child + | JSXIdentifier of Loc.t Flow_ast.JSX.Identifier.t (* Diffs the given ASTs using referential equality to determine whether two nodes are different. * This works well for transformations based on Flow_ast_mapper, which preserves identity, but it * does not work well for e.g. parsing two programs and determining their differences. *) -val program: diff_algorithm -> (Loc.t, Loc.t) Flow_ast.program -> - (Loc.t, Loc.t) Flow_ast.program -> node change list +val program : + diff_algorithm -> + (Loc.t, Loc.t) Flow_ast.program -> + (Loc.t, Loc.t) Flow_ast.program -> + node change list (* Diffs two lists and produces an edit script. This is exposed only for testing purposes *) type 'a diff_result = int * 'a change' -val list_diff: diff_algorithm -> 'a list -> 'a list -> ('a diff_result list) option +val list_diff : diff_algorithm -> 'a list -> 'a list -> 'a diff_result list option diff --git a/src/parser_utils/flow_ast_mapper.ml b/src/parser_utils/flow_ast_mapper.ml index 610c6d14305..6aa43ab7efe 100644 --- a/src/parser_utils/flow_ast_mapper.ml +++ b/src/parser_utils/flow_ast_mapper.ml @@ -1,1371 +1,1830 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -let map_opt: 'node. ('node -> 'node) -> 'node option -> 'node option = - fun map opt -> - match opt with - | Some item -> - let item' = map item in - if item == item' then opt else Some item' - | None -> opt - -let id_loc: 'node 'a. (Loc.t -> 'node -> 'node) -> Loc.t -> 'node -> 'a -> ('node -> 'a) -> 'a = - fun map loc item same diff -> - let item' = map loc item in - if item == item' then same else diff item' - -let id: 'node 'a. ('node -> 'node) -> 'node -> 'a -> ('node -> 'a) -> 'a = - fun map item same diff -> - let item' = map item in - if item == item' then same else diff item' - -let map_loc: 'node. (Loc.t -> 'node -> 'node) -> (Loc.t * 'node) -> (Loc.t * 'node) = - fun map same -> - let loc, item = same in - id_loc map loc item same (fun diff -> (loc, diff)) - -class mapper = object(this) - method program (program: (Loc.t, Loc.t) Flow_ast.program) = - let (loc, statements, comments) = program in - let statements' = this#toplevel_statement_list statements in - let comments' = ListUtils.ident_map (this#comment) comments in - if statements == statements' && comments == comments' then program - else loc, statements', comments' - - method statement (stmt: (Loc.t, Loc.t) Flow_ast.Statement.t) = - let open Flow_ast.Statement in - match stmt with - | (loc, Block block) -> - id_loc this#block loc block stmt (fun block -> loc, Block block) - - | (loc, Break break) -> - id_loc this#break loc break stmt (fun break -> loc, Break break) - - | (loc, ClassDeclaration cls) -> - id_loc this#class_ loc cls stmt (fun cls -> loc, ClassDeclaration cls) - - | (loc, Continue cont) -> - id_loc this#continue loc cont stmt (fun cont -> loc, Continue cont) - - | (loc, Debugger) -> - this#debugger loc; - stmt - - | (loc, DeclareClass stuff) -> - id_loc this#declare_class loc stuff stmt (fun stuff -> loc, DeclareClass stuff) - - | (loc, DeclareExportDeclaration decl) -> - id_loc this#declare_export_declaration loc decl stmt (fun decl -> loc, DeclareExportDeclaration decl) - - | (loc, DeclareFunction stuff) -> - id_loc this#declare_function loc stuff stmt (fun stuff -> loc, DeclareFunction stuff) - - | (loc, DeclareInterface stuff) -> - id_loc this#declare_interface loc stuff stmt (fun stuff -> loc, DeclareInterface stuff) +module Ast = Flow_ast - | (loc, DeclareModule m) -> - id_loc this#declare_module loc m stmt (fun m -> loc, DeclareModule m) - - | (loc, DeclareTypeAlias stuff) -> - id_loc this#declare_type_alias loc stuff stmt (fun stuff -> loc, DeclareTypeAlias stuff) - - | (loc, DeclareVariable stuff) -> - id_loc this#declare_variable loc stuff stmt (fun stuff -> loc, DeclareVariable stuff) - - | (loc, DeclareModuleExports annot) -> - id_loc this#declare_module_exports loc annot stmt (fun annot -> loc, DeclareModuleExports annot) - - | (loc, DoWhile stuff) -> - id_loc this#do_while loc stuff stmt (fun stuff -> loc, DoWhile stuff) - - | (loc, Empty) -> - this#empty loc; - stmt - - | (loc, ExportDefaultDeclaration decl) -> - id_loc this#export_default_declaration loc decl stmt (fun decl -> loc, ExportDefaultDeclaration decl) - - | (loc, ExportNamedDeclaration decl) -> - id_loc this#export_named_declaration loc decl stmt (fun decl -> loc, ExportNamedDeclaration decl) - - | (loc, Expression expr) -> - id_loc this#expression_statement loc expr stmt (fun expr -> loc, Expression expr) - - | (loc, For for_stmt) -> - id_loc this#for_statement loc for_stmt stmt (fun for_stmt -> loc, For for_stmt) - - | (loc, ForIn stuff) -> - id_loc this#for_in_statement loc stuff stmt (fun stuff -> loc, ForIn stuff) - - | (loc, ForOf stuff) -> - id_loc this#for_of_statement loc stuff stmt (fun stuff -> loc, ForOf stuff) - - | (loc, FunctionDeclaration func) -> - id_loc this#function_declaration loc func stmt (fun func -> loc, FunctionDeclaration func) - - | (loc, If if_stmt) -> - id_loc this#if_statement loc if_stmt stmt (fun if_stmt -> loc, If if_stmt) - - | (loc, ImportDeclaration decl) -> - id_loc this#import_declaration loc decl stmt (fun decl -> loc, ImportDeclaration decl) - - | (loc, InterfaceDeclaration stuff) -> - id_loc this#interface_declaration loc stuff stmt (fun stuff -> loc, InterfaceDeclaration stuff) - - | (loc, Labeled label) -> - id_loc this#labeled_statement loc label stmt (fun label -> loc, Labeled label) - - | (loc, OpaqueType otype) -> - id_loc this#opaque_type loc otype stmt (fun otype -> loc, OpaqueType otype) - - | (loc, Return ret) -> - id_loc this#return loc ret stmt (fun ret -> loc, Return ret) - - | (loc, Switch switch) -> - id_loc this#switch loc switch stmt (fun switch -> loc, Switch switch) - - | (loc, Throw throw) -> - id_loc this#throw loc throw stmt (fun throw -> loc, Throw throw) - - | (loc, Try try_stmt) -> - id_loc this#try_catch loc try_stmt stmt (fun try_stmt -> loc, Try try_stmt) - - | (loc, VariableDeclaration decl) -> - id_loc this#variable_declaration loc decl stmt (fun decl -> loc, VariableDeclaration decl) - - | (loc, While stuff) -> - id_loc this#while_ loc stuff stmt (fun stuff -> loc, While stuff) - - | (loc, With stuff) -> - id_loc this#with_ loc stuff stmt (fun stuff -> loc, With stuff) - - | (loc, TypeAlias stuff) -> - id_loc this#type_alias loc stuff stmt (fun stuff -> loc, TypeAlias stuff) - - (* TODO: Flow specific stuff *) - | (_loc, DeclareOpaqueType _) -> stmt - - method comment (c: Loc.t Flow_ast.Comment.t) = c - - method expression (expr: (Loc.t, Loc.t) Flow_ast.Expression.t) = - let open Flow_ast.Expression in - match expr with - | _, This -> expr - | _, Super -> expr - | loc, Array x -> id_loc this#array loc x expr (fun x -> loc, Array x) - | loc, ArrowFunction x -> id_loc this#arrow_function loc x expr (fun x -> loc, ArrowFunction x) - | loc, Assignment x -> id_loc this#assignment loc x expr (fun x -> loc, Assignment x) - | loc, Binary x -> id_loc this#binary loc x expr (fun x -> loc, Binary x) - | loc, Call x -> id_loc this#call loc x expr (fun x -> loc, Call x) - | loc, Class x -> id_loc this#class_ loc x expr (fun x -> loc, Class x) - | loc, Comprehension x -> id_loc this#comprehension loc x expr (fun x -> loc, Comprehension x) - | loc, Conditional x -> id_loc this#conditional loc x expr (fun x -> loc, Conditional x) - | loc, Function x -> id_loc this#function_ loc x expr (fun x -> loc, Function x) - | loc, Generator x -> id_loc this#generator loc x expr (fun x -> loc, Generator x) - | loc, Identifier x -> id this#identifier x expr (fun x -> loc, Identifier x) - | loc, Import x -> id (this#import loc) x expr (fun x -> loc, Import x) - | loc, JSXElement x -> id_loc this#jsx_element loc x expr (fun x -> loc, JSXElement x) - | loc, JSXFragment x -> id_loc this#jsx_fragment loc x expr (fun x -> loc, JSXFragment x) - | loc, Literal x -> id_loc this#literal loc x expr (fun x -> loc, Literal x) - | loc, Logical x -> id_loc this#logical loc x expr (fun x -> loc, Logical x) - | loc, Member x -> id_loc this#member loc x expr (fun x -> loc, Member x) - | loc, MetaProperty x -> id_loc this#meta_property loc x expr (fun x -> loc, MetaProperty x) - | loc, New x -> id_loc this#new_ loc x expr (fun x -> loc, New x) - | loc, Object x -> id_loc this#object_ loc x expr (fun x -> loc, Object x) - | loc, OptionalCall x -> id (this#optional_call loc) x expr (fun x -> loc, OptionalCall x) - | loc, OptionalMember x -> id_loc this#optional_member loc x expr (fun x -> loc, OptionalMember x) - | loc, Sequence x -> id_loc this#sequence loc x expr (fun x -> loc, Sequence x) - | loc, TaggedTemplate x -> id_loc this#tagged_template loc x expr (fun x -> loc, TaggedTemplate x) - | loc, TemplateLiteral x -> id_loc this#template_literal loc x expr (fun x -> loc, TemplateLiteral x) - | loc, TypeCast x -> id_loc this#type_cast loc x expr (fun x -> loc, TypeCast x) - | loc, Unary x -> id_loc this#unary_expression loc x expr (fun x -> loc, Unary x) - | loc, Update x -> id_loc this#update_expression loc x expr (fun x -> loc, Update x) - | loc, Yield x -> id_loc this#yield loc x expr (fun x -> loc, Yield x) - - method array _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Array.t) = - let open Flow_ast.Expression in - let { Array.elements } = expr in - let elements' = ListUtils.ident_map (map_opt this#expression_or_spread) elements in - if elements == elements' then expr - else { Array.elements = elements' } - - method arrow_function loc (expr: (Loc.t, Loc.t) Flow_ast.Function.t) = - this#function_ loc expr - - method assignment _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Assignment.t) = - let open Flow_ast.Expression.Assignment in - let { operator = _; left; right } = expr in - let left' = this#assignment_pattern left in - let right' = this#expression right in - if left == left' && right == right' then expr - else { expr with left = left'; right = right' } - - method binary _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Binary.t) = - let open Flow_ast.Expression.Binary in - let { operator = _; left; right } = expr in - let left' = this#expression left in - let right' = this#expression right in - if left == left' && right == right' then expr - else { expr with left = left'; right = right' } - - method block _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.Block.t) = - let open Flow_ast.Statement.Block in - let { body } = stmt in - let body' = this#statement_list body in - if body == body' then stmt else { body = body' } - - method break _loc (break: Loc.t Flow_ast.Statement.Break.t) = - let open Flow_ast.Statement.Break in - let { label } = break in - let label' = map_opt this#label_identifier label in - if label == label' then break else { label = label' } - - method call _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Call.t) = - let open Flow_ast.Expression.Call in - let { callee; targs; arguments } = expr in - let callee' = this#expression callee in - let targs' = map_opt this#type_parameter_instantiation targs in - let arguments' = ListUtils.ident_map this#expression_or_spread arguments in - if callee == callee' && targs == targs' && arguments == arguments' then expr - else { callee = callee'; targs = targs'; arguments = arguments' } - - method optional_call loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.OptionalCall.t) = - let open Flow_ast.Expression.OptionalCall in - let { call; optional = _ } = expr in - let call' = this#call loc call in - if call == call' then expr - else { expr with call = call' } - - method catch_clause _loc (clause: (Loc.t, Loc.t) Flow_ast.Statement.Try.CatchClause.t') = - let open Flow_ast.Statement.Try.CatchClause in - let { param; body } = clause in - - let param' = map_opt this#catch_clause_pattern param in - let body' = map_loc this#block body in - if param == param' && body == body' then clause - else { param = param'; body = body' } - - method class_ _loc (cls: (Loc.t, Loc.t) Flow_ast.Class.t) = - let open Flow_ast.Class in - let { - id; body; tparams = _; - extends; - implements = _; classDecorators = _; - } = cls in - let id' = map_opt this#class_identifier id in - let body' = this#class_body body in - let extends' = map_opt (map_loc this#class_extends) extends in - if id == id' && body == body' && extends == extends' then cls - else { cls with id = id'; body = body'; extends = extends' } - - method class_extends _loc (extends: (Loc.t, Loc.t) Flow_ast.Class.Extends.t') = - let open Flow_ast.Class.Extends in - let { expr; targs } = extends in - let expr' = this#expression expr in - let targs' = map_opt this#type_parameter_instantiation targs in - if expr == expr' && targs == targs' then extends - else { expr = expr'; targs = targs' } - - method class_identifier (ident: Loc.t Flow_ast.Identifier.t) = - this#pattern_identifier ~kind:Flow_ast.Statement.VariableDeclaration.Let ident - - method class_body (cls_body: (Loc.t, Loc.t) Flow_ast.Class.Body.t) = - let open Flow_ast.Class.Body in - let loc, { body } = cls_body in - let body' = ListUtils.ident_map this#class_element body in - if body == body' then cls_body - else loc, { body = body' } - - method class_element (elem: (Loc.t, Loc.t) Flow_ast.Class.Body.element) = - let open Flow_ast.Class.Body in - match elem with - | Method (loc, meth) -> id_loc this#class_method loc meth elem (fun meth -> Method (loc, meth)) - | Property (loc, prop) -> id_loc this#class_property loc prop elem (fun prop -> Property (loc, prop)) - | PrivateField (loc, field) -> id_loc this#class_private_field loc field elem - (fun field -> PrivateField (loc, field)) - - method class_method _loc (meth: (Loc.t, Loc.t) Flow_ast.Class.Method.t') = - let open Flow_ast.Class.Method in - let { kind = _; key; value; static = _; decorators = _; } = meth in - let key' = this#object_key key in - let value' = map_loc this#function_ value in - if key == key' && value == value' then meth - else { meth with key = key'; value = value' } - - method class_property _loc (prop: (Loc.t, Loc.t) Flow_ast.Class.Property.t') = - let open Flow_ast.Class.Property in - let { key; value; annot; static = _; variance = _; } = prop in - let key' = this#object_key key in - let value' = map_opt this#expression value in - let annot' = map_opt this#type_annotation annot in - if key == key' && value == value' && annot' == annot then prop - else { prop with key = key'; value = value'; annot = annot' } - - method class_private_field _loc (prop: (Loc.t, Loc.t) Flow_ast.Class.PrivateField.t') = - let open Flow_ast.Class.PrivateField in - let { key; value; annot; static = _; variance = _; } = prop in - let key' = this#private_name key in - let value' = map_opt this#expression value in - let annot' = map_opt this#type_annotation annot in - if key == key' && value == value' && annot' == annot then prop - else { prop with key = key'; value = value'; annot = annot' } - - (* TODO *) - method comprehension _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Comprehension.t) = expr - - method conditional _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Conditional.t) = - let open Flow_ast.Expression.Conditional in - let { test; consequent; alternate } = expr in - let test' = this#predicate_expression test in - let consequent' = this#expression consequent in - let alternate' = this#expression alternate in - if test == test' && consequent == consequent' && alternate == alternate' - then expr - else { test = test'; consequent = consequent'; alternate = alternate' } - - method continue _loc (cont: Loc.t Flow_ast.Statement.Continue.t) = - let open Flow_ast.Statement.Continue in - let { label } = cont in - let label' = map_opt this#label_identifier label in - if label == label' then cont else { label = label' } - - method debugger _loc = - () - - method declare_class _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.DeclareClass.t) = - let open Flow_ast.Statement.DeclareClass in - let { id = ident; tparams; body; extends; mixins; implements } = decl in - let id' = this#class_identifier ident in - let tparams' = map_opt this#type_parameter_declaration tparams in - let body' = map_loc this#object_type body in - let extends' = map_opt (map_loc this#generic_type) extends in - let mixins' = ListUtils.ident_map (map_loc this#generic_type) mixins in - if id' == ident && tparams' == tparams && body' == body && extends' == extends - && mixins' == mixins then decl - else { id = id'; tparams = tparams'; body = body'; extends = extends'; - mixins = mixins'; implements } - - method declare_export_declaration _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.DeclareExportDeclaration.t) = - let open Flow_ast.Statement.DeclareExportDeclaration in - let { default; source; specifiers; declaration } = decl in - let specifiers' = map_opt this#export_named_specifier specifiers in - let declaration' = map_opt this#declare_export_declaration_decl declaration in - if specifiers == specifiers' && declaration == declaration' then decl - else { default; source; specifiers = specifiers'; declaration = declaration' } - - (* TODO(T22777134): Implement this when the mapper supports OpaqueType. *) - method declare_export_declaration_decl (decl: (Loc.t, Loc.t) Flow_ast.Statement.DeclareExportDeclaration.declaration) = - decl - - method declare_function _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.DeclareFunction.t) = - let open Flow_ast.Statement.DeclareFunction in - let { id = ident; annot; predicate } = decl in - let id' = this#function_identifier ident in - let annot' = this#type_annotation annot in - (* TODO: walk predicate *) - if id' == ident && annot' == annot then decl - else { id = id'; annot = annot'; predicate } - - method declare_interface _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.Interface.t) = - this#interface decl - - method declare_module _loc (m: (Loc.t, Loc.t) Flow_ast.Statement.DeclareModule.t) = - let open Flow_ast.Statement.DeclareModule in - let { id; body; kind } = m in - let body' = map_loc this#block body in - if body' == body then m - else { id; body = body'; kind } - - (* TODO *) - method declare_module_exports _loc (annot: (Loc.t, Loc.t) Flow_ast.Type.annotation) = - annot - - method declare_type_alias loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.TypeAlias.t) = - this#type_alias loc decl - - method declare_variable _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.DeclareVariable.t) = - let open Flow_ast.Statement.DeclareVariable in - let { id = ident; annot } = decl in - let id' = this#pattern_identifier ~kind:Flow_ast.Statement.VariableDeclaration.Var ident in - let annot' = map_opt this#type_annotation annot in - if id' == ident && annot' == annot then decl - else { id = id'; annot = annot' } - - method do_while _loc (stuff: (Loc.t, Loc.t) Flow_ast.Statement.DoWhile.t) = - let open Flow_ast.Statement.DoWhile in - let { body; test } = stuff in - let body' = this#statement body in - let test' = this#predicate_expression test in - if body == body' && test == test' then stuff - else { body = body'; test = test' } - - method empty _loc = - () - - method export_default_declaration _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.ExportDefaultDeclaration.t) = - let open Flow_ast.Statement.ExportDefaultDeclaration in - let { default; declaration } = decl in - let declaration' = this#export_default_declaration_decl declaration in - if declaration' = declaration then decl - else { default; declaration = declaration' } - - method export_default_declaration_decl (decl: (Loc.t, Loc.t) Flow_ast.Statement.ExportDefaultDeclaration.declaration) = - let open Flow_ast.Statement.ExportDefaultDeclaration in - match decl with - | Declaration stmt -> id this#statement stmt decl (fun stmt -> Declaration stmt) - | Expression expr -> id this#expression expr decl (fun expr -> Expression expr) - - method export_named_declaration _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.ExportNamedDeclaration.t) = - let open Flow_ast.Statement.ExportNamedDeclaration in - let { exportKind; source; specifiers; declaration } = decl in - let specifiers' = map_opt this#export_named_specifier specifiers in - let declaration' = map_opt this#statement declaration in - if specifiers == specifiers' && declaration == declaration' then decl - else { exportKind; source; specifiers = specifiers'; declaration = declaration' } - - (* TODO *) - method export_named_specifier (spec: Loc.t Flow_ast.Statement.ExportNamedDeclaration.specifier) = - spec - - method expression_statement _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.Expression.t) = - let open Flow_ast.Statement.Expression in - let { expression = expr; directive = _ } = stmt in - id this#expression expr stmt (fun expression -> { stmt with expression }) - - method expression_or_spread expr_or_spread = - let open Flow_ast.Expression in - match expr_or_spread with - | Expression expr -> - id this#expression expr expr_or_spread (fun expr -> Expression expr) - | Spread spread -> - id this#spread_element spread expr_or_spread (fun spread -> Spread spread) - - method for_in_statement _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.ForIn.t) = - let open Flow_ast.Statement.ForIn in - let { left; right; body; each } = stmt in - let left' = this#for_in_statement_lhs left in - let right' = this#expression right in - let body' = this#statement body in - if left == left' && right == right' && body == body' then stmt - else { left = left'; right = right'; body = body'; each } - - method for_in_statement_lhs (left: (Loc.t, Loc.t) Flow_ast.Statement.ForIn.left) = - let open Flow_ast.Statement.ForIn in - match left with - | LeftDeclaration (loc, decl) -> - id_loc this#variable_declaration loc decl left (fun decl -> LeftDeclaration (loc, decl)) - | LeftPattern patt -> - id this#for_in_assignment_pattern patt left (fun patt -> LeftPattern patt) - - method for_of_statement _loc (stuff: (Loc.t, Loc.t) Flow_ast.Statement.ForOf.t) = - let open Flow_ast.Statement.ForOf in - let { left; right; body; async } = stuff in - let left' = this#for_of_statement_lhs left in - let right' = this#expression right in - let body' = this#statement body in - if left == left' && right == right' && body == body' then stuff - else { left = left'; right = right'; body = body'; async } - - method for_of_statement_lhs (left: (Loc.t, Loc.t) Flow_ast.Statement.ForOf.left) = - let open Flow_ast.Statement.ForOf in - match left with - | LeftDeclaration (loc, decl) -> - id_loc this#variable_declaration loc decl left (fun decl -> LeftDeclaration (loc, decl)) - | LeftPattern patt -> - id this#for_of_assignment_pattern patt left (fun patt -> LeftPattern patt) - - method for_statement _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.For.t) = - let open Flow_ast.Statement.For in - let { init; test; update; body } = stmt in - let init' = map_opt this#for_statement_init init in - let test' = map_opt this#predicate_expression test in - let update' = map_opt this#expression update in - let body' = this#statement body in - if init == init' && - test == test' && - update == update' && - body == body' - then stmt - else { init = init'; test = test'; update = update'; body = body' } - - method for_statement_init (init: (Loc.t, Loc.t) Flow_ast.Statement.For.init) = - let open Flow_ast.Statement.For in - match init with - | InitDeclaration (loc, decl) -> - id_loc this#variable_declaration loc decl init - (fun decl -> InitDeclaration (loc, decl)) - | InitExpression expr -> - id this#expression expr init (fun expr -> InitExpression expr) - - method function_param_type (fpt: (Loc.t, Loc.t) Flow_ast.Type.Function.Param.t) = - let open Flow_ast.Type.Function.Param in - let loc, { annot; name; optional; } = fpt in - let annot' = this#type_ annot in - if annot' == annot then fpt - else loc, { annot = annot'; name; optional } - - method function_rest_param_type (frpt: (Loc.t, Loc.t) Flow_ast.Type.Function.RestParam.t) = - let open Flow_ast.Type.Function.RestParam in - let loc, { argument } = frpt in - let argument' = this#function_param_type argument in - if argument' == argument then frpt - else loc, { argument = argument' } - - method function_type _loc (ft: (Loc.t, Loc.t) Flow_ast.Type.Function.t) = - let open Flow_ast.Type.Function in - let { - params = (params_loc, { Params.params = ps; rest = rpo }); - return; - tparams; - } = ft in - let ps' = ListUtils.ident_map this#function_param_type ps in - let rpo' = map_opt this#function_rest_param_type rpo in - let return' = this#type_ return in - if ps' == ps && rpo' == rpo && return' == return then ft - else { - params = (params_loc, { Params.params = ps'; rest = rpo' }); - return = return'; - tparams - } - - method label_identifier (ident: Loc.t Flow_ast.Identifier.t) = - this#identifier ident - - method object_property_value_type (opvt: (Loc.t, Loc.t) Flow_ast.Type.Object.Property.value) = - let open Flow_ast.Type.Object.Property in - match opvt with - | Init t -> id this#type_ t opvt (fun t -> Init t) - | Get (loc, ft) -> id_loc this#function_type loc ft opvt (fun ft -> Get (loc, ft)) - | Set (loc, ft) -> id_loc this#function_type loc ft opvt (fun ft -> Set (loc, ft)) - - method object_property_type (opt: (Loc.t, Loc.t) Flow_ast.Type.Object.Property.t) = - let open Flow_ast.Type.Object.Property in - let loc, { key; value; optional; static; proto; _method; variance; } = opt in - let value' = this#object_property_value_type value in - if value' == value then opt - else loc, { key; value = value'; optional; static; proto; _method; variance } - - method object_spread_property_type (opt: (Loc.t, Loc.t) Flow_ast.Type.Object.SpreadProperty.t) = - let open Flow_ast.Type.Object.SpreadProperty in - let loc, { argument; } = opt in - let argument' = this#type_ argument in - if argument' == argument then opt - else loc, { argument = argument'; } - - method object_indexer_property_type (opt: (Loc.t, Loc.t) Flow_ast.Type.Object.Indexer.t) = - let open Flow_ast.Type.Object.Indexer in - let loc, { id; key; value; static; variance; } = opt in - let key' = this#type_ key in - let value' = this#type_ value in - if key' == key && value' == value then opt - else loc, { id; key = key'; value = value'; static; variance; } - - method object_type _loc (ot: (Loc.t, Loc.t) Flow_ast.Type.Object.t) = - let open Flow_ast.Type.Object in - let { properties ; exact; } = ot in - let properties' = ListUtils.ident_map (fun p -> match p with - | Property p' -> id this#object_property_type p' p (fun p' -> Property p') - | SpreadProperty p' -> id this#object_spread_property_type p' p (fun p' -> SpreadProperty p') - | Indexer p' -> id this#object_indexer_property_type p' p (fun p' -> Indexer p') - | CallProperty _ - | InternalSlot _ -> p (* TODO *) - ) properties in - if properties' == properties then ot - else { properties = properties'; exact } - - method interface_type _loc (i: (Loc.t, Loc.t) Flow_ast.Type.Interface.t) = - let open Flow_ast.Type.Interface in - let { extends; body } = i in - let extends' = ListUtils.ident_map (map_loc this#generic_type) extends in - let body' = map_loc this#object_type body in - if extends' == extends && body' == body then i - else { extends = extends'; body = body' } - - method generic_identifier_type (git: (Loc.t, Loc.t) Flow_ast.Type.Generic.Identifier.t) = - let open Flow_ast.Type.Generic.Identifier in - match git with - | Unqualified i -> id this#identifier i git (fun i -> Unqualified i) - | _ -> git (* TODO *) - - method type_parameter_instantiation (pi: (Loc.t, Loc.t) Flow_ast.Type.ParameterInstantiation.t) = - let loc, targs = pi in - let targs' = ListUtils.ident_map this#type_ targs in - if targs' == targs then pi - else loc, targs' - - method type_parameter_declaration (pd: (Loc.t, Loc.t) Flow_ast.Type.ParameterDeclaration.t) = - let loc, type_params = pd in - let type_params' = ListUtils.ident_map this#type_parameter_declaration_type_param type_params in - if type_params' == type_params then pd - else loc, type_params' - - method type_parameter_declaration_type_param (type_param: (Loc.t, Loc.t) Flow_ast.Type.ParameterDeclaration.TypeParam.t) = - let open Flow_ast.Type.ParameterDeclaration.TypeParam in - let loc, { name; bound; variance; default; } = type_param in - let bound' = map_opt this#type_annotation bound in - let default' = map_opt this#type_ default in - if bound' == bound && default' == default then type_param - else loc, { name; bound = bound'; variance; default = default'; } - - method generic_type _loc (gt: (Loc.t, Loc.t) Flow_ast.Type.Generic.t) = - let open Flow_ast.Type.Generic in - let { id; targs; } = gt in - let id' = this#generic_identifier_type id in - let targs' = map_opt this#type_parameter_instantiation targs in - if id' == id && targs' == targs then gt - else { id = id'; targs = targs' } - - method type_ (t: (Loc.t, Loc.t) Flow_ast.Type.t) = - let open Flow_ast.Type in - match t with - | _, Any - | _, Mixed - | _, Empty - | _, Void - | _, Null - | _, Number - | _, String - | _, Boolean - | _, StringLiteral _ - | _, NumberLiteral _ - | _, BooleanLiteral _ - | _, Exists - -> t - | loc, Nullable t' -> id this#type_ t' t (fun t' -> loc, Nullable t') - | loc, Array t' -> id this#type_ t' t (fun t' -> loc, Array t') - | loc, Typeof t' -> id this#type_ t' t (fun t' -> loc, Typeof t') - | loc, Function ft -> id_loc this#function_type loc ft t (fun ft -> loc, Function ft) - | loc, Object ot -> id_loc this#object_type loc ot t (fun ot -> loc, Object ot) - | loc, Interface i -> id_loc this#interface_type loc i t (fun i -> loc, Interface i) - | loc, Generic gt -> id_loc this#generic_type loc gt t (fun gt -> loc, Generic gt) - | loc, Union (t0, t1, ts) -> - let t0' = this#type_ t0 in - let t1' = this#type_ t1 in - let ts' = ListUtils.ident_map this#type_ ts in - if t0' == t0 && t1' == t1 && ts' == ts then t - else loc, Union (t0', t1', ts') - | loc, Intersection (t0, t1, ts) -> - let t0' = this#type_ t0 in - let t1' = this#type_ t1 in - let ts' = ListUtils.ident_map this#type_ ts in - if t0' == t0 && t1' == t1 && ts' == ts then t - else loc, Intersection (t0', t1', ts') - | loc, Tuple ts -> - let ts' = ListUtils.ident_map this#type_ ts in - if ts' == ts then t - else loc, Tuple ts' - - method type_annotation (annot: (Loc.t, Loc.t) Flow_ast.Type.annotation) = - let loc, a = annot in - id this#type_ a annot (fun a -> (loc, a)) - - method return_type_annotation (return: ('M, 'T) Flow_ast.Function.return) = - let open Flow_ast.Function in - match return with - | Available annot -> - let annot' = this#type_annotation annot in - if annot' == annot then return - else Available annot' - | Missing _loc -> return - - method function_ _loc (expr: (Loc.t, Loc.t) Flow_ast.Function.t) = - let open Flow_ast.Function in - let { - id = ident; params; body; async; generator; expression; - predicate; return; tparams; - } = expr in - let ident' = map_opt this#function_identifier ident in - let params' = this#function_params params in - let return' = this#return_type_annotation return in - let body' = this#function_body_any body in - (* TODO: walk predicate *) - let tparams' = map_opt this#type_parameter_declaration tparams in - if ident == ident' && params == params' && body == body' && return == return' - && tparams == tparams' then expr - else { - id = ident'; params = params'; return = return'; body = body'; - async; generator; expression; predicate; tparams = tparams'; - } - - method function_params (params: (Loc.t, Loc.t) Flow_ast.Function.Params.t) = - let open Flow_ast.Function in - let (loc, { Params.params = params_list; rest }) = params in - let params_list' = this#function_param_patterns params_list in - let rest' = map_opt this#function_rest_element rest in - if params_list == params_list' && rest == rest' then params - else (loc, { Params.params = params_list'; rest = rest' }) - - method function_param_patterns (params_list: (Loc.t, Loc.t) Flow_ast.Pattern.t list) = - ListUtils.ident_map this#function_param_pattern params_list - - method function_body_any (body: (Loc.t, Loc.t) Flow_ast.Function.body) = - match body with - | Flow_ast.Function.BodyBlock (loc, block) -> - id_loc this#function_body loc block body (fun block -> Flow_ast.Function.BodyBlock (loc, block)) - | Flow_ast.Function.BodyExpression expr -> - id this#expression expr body (fun expr -> Flow_ast.Function.BodyExpression expr) - - method function_body loc (block: (Loc.t, Loc.t) Flow_ast.Statement.Block.t) = - this#block loc block - - method function_identifier (ident: Loc.t Flow_ast.Identifier.t) = - this#pattern_identifier ~kind:Flow_ast.Statement.VariableDeclaration.Var ident - - method function_declaration loc (stmt: (Loc.t, Loc.t) Flow_ast.Function.t) = - this#function_ loc stmt - - (* TODO *) - method generator _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Generator.t) = expr - - method identifier (expr: Loc.t Flow_ast.Identifier.t) = expr - - method interface (interface: (Loc.t, Loc.t) Flow_ast.Statement.Interface.t) = - let open Flow_ast.Statement.Interface in - let { id = ident; tparams; extends; body } = interface in - let id' = this#class_identifier ident in - let tparams' = map_opt this#type_parameter_declaration tparams in - let extends' = ListUtils.ident_map (map_loc this#generic_type) extends in - let body' = map_loc this#object_type body in - if id' == ident && tparams' == tparams && extends' == extends && body' == body - then interface - else { id = id'; tparams = tparams'; extends = extends'; body = body' } - - method interface_declaration _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.Interface.t) = - this#interface decl - - method private_name (expr: Loc.t Flow_ast.PrivateName.t) = expr - - method import _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.t) = expr - - method if_consequent_statement ~has_else (stmt: (Loc.t, Loc.t) Flow_ast.Statement.t) = - ignore has_else; - this#statement stmt - - method if_statement _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.If.t) = - let open Flow_ast.Statement.If in - let { test; consequent; alternate } = stmt in - let test' = this#predicate_expression test in - let consequent' = - this#if_consequent_statement ~has_else:(alternate <> None) consequent in - let alternate' = map_opt this#statement alternate in - if test == test' && consequent == consequent' && alternate == alternate' - then stmt - else { test = test'; consequent = consequent'; alternate = alternate' } - - method import_declaration _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.ImportDeclaration.t) = - let open Flow_ast.Statement.ImportDeclaration in - let { importKind; source; specifiers; default } = decl in - match importKind with - | ImportValue - | ImportType -> - let specifiers' = map_opt this#import_specifier specifiers in - let default' = map_opt this#import_default_specifier default in - if specifiers == specifiers' && default == default' then decl - else { importKind; source; specifiers = specifiers'; default = default' } - | ImportTypeof -> decl (* TODO *) - - method import_specifier (specifier: (Loc.t, Loc.t) Flow_ast.Statement.ImportDeclaration.specifier) = - let open Flow_ast.Statement.ImportDeclaration in - match specifier with - | ImportNamedSpecifiers named_specifiers -> - let named_specifiers' = ListUtils.ident_map this#import_named_specifier named_specifiers in - if named_specifiers == named_specifiers' then specifier - else ImportNamedSpecifiers named_specifiers' - | ImportNamespaceSpecifier (loc, ident) -> - id_loc this#import_namespace_specifier loc ident specifier - (fun ident -> ImportNamespaceSpecifier (loc, ident)) - - method import_named_specifier (specifier: Loc.t Flow_ast.Statement.ImportDeclaration.named_specifier) = - let open Flow_ast.Statement.ImportDeclaration in - let { kind; local; remote } = specifier in - begin match kind with - | None -> - let ident = match local with - | None -> remote - | Some ident -> ident - in - let local' = id (this#pattern_identifier ~kind:Flow_ast.Statement.VariableDeclaration.Let) - ident local (fun ident -> Some ident) - in - if local == local' then specifier - else { kind; local = local'; remote } - | Some _importKind -> specifier (* TODO *) - end - - method import_default_specifier (id: Loc.t Flow_ast.Identifier.t) = - this#pattern_identifier ~kind:Flow_ast.Statement.VariableDeclaration.Let id - - method import_namespace_specifier _loc (id: Loc.t Flow_ast.Identifier.t) = - this#pattern_identifier ~kind:Flow_ast.Statement.VariableDeclaration.Let id - - method jsx_element _loc (expr: (Loc.t, Loc.t) Flow_ast.JSX.element) = - let open Flow_ast.JSX in - let { openingElement; closingElement; children } = expr in - let openingElement' = this#jsx_opening_element openingElement in - let closingElement' = map_opt this#jsx_closing_element closingElement in - let children' = ListUtils.ident_map this#jsx_child children in - if openingElement == openingElement' && closingElement == closingElement' && children == children' then expr - else { openingElement = openingElement'; closingElement = closingElement'; children = children' } - - method jsx_fragment _loc (expr: (Loc.t, Loc.t) Flow_ast.JSX.fragment) = - let open Flow_ast.JSX in - let { frag_children; _ } = expr in - let children' = ListUtils.ident_map this#jsx_child frag_children in - { expr with frag_children = children' } - - method jsx_opening_element (elem: (Loc.t, Loc.t) Flow_ast.JSX.Opening.t) = - let open Flow_ast.JSX.Opening in - let loc, { name; selfClosing; attributes } = elem in - let name' = this#jsx_name name in - let attributes' = ListUtils.ident_map this#jsx_opening_attribute attributes in - if name == name' && attributes == attributes' then elem - else loc, { name; selfClosing; attributes = attributes' } - - method jsx_closing_element (elem: (Loc.t, Loc.t) Flow_ast.JSX.Closing.t) = - let open Flow_ast.JSX.Closing in - let loc, {name} = elem in - let name' = this#jsx_name name in - if name == name' then elem else loc, {name=name'} - - method jsx_opening_attribute (jsx_attr: (Loc.t, Loc.t) Flow_ast.JSX.Opening.attribute) = - let open Flow_ast.JSX.Opening in - match jsx_attr with - | Attribute attr -> - id this#jsx_attribute attr jsx_attr (fun attr -> Attribute attr) - | SpreadAttribute (loc, attr) -> - id_loc this#jsx_spread_attribute loc attr jsx_attr (fun attr -> SpreadAttribute (loc, attr)) - - method jsx_spread_attribute _loc (attr: (Loc.t, Loc.t) Flow_ast.JSX.SpreadAttribute.t') = - let open Flow_ast.JSX.SpreadAttribute in - let { argument } = attr in - id this#expression argument attr (fun argument -> { argument }) - - method jsx_attribute (attr: (Loc.t, Loc.t) Flow_ast.JSX.Attribute.t) = - let open Flow_ast.JSX.Attribute in - let loc, { name; value } = attr in - let value' = map_opt this#jsx_attribute_value value in - if value == value' then attr - else loc, { name; value = value' } - - method jsx_attribute_value (value: (Loc.t, Loc.t) Flow_ast.JSX.Attribute.value) = - let open Flow_ast.JSX.Attribute in - match value with - | Literal _ -> value - | ExpressionContainer (loc, expr) -> - id_loc this#jsx_expression loc expr value (fun expr -> ExpressionContainer (loc, expr)) - - method jsx_child (child: (Loc.t, Loc.t) Flow_ast.JSX.child) = - let open Flow_ast.JSX in - match child with - | loc, Element elem -> - id_loc this#jsx_element loc elem child (fun elem -> loc, Element elem) - | loc, Fragment frag -> - id_loc this#jsx_fragment loc frag child (fun frag -> loc, Fragment frag) - | loc, ExpressionContainer expr -> - id_loc this#jsx_expression loc expr child (fun expr -> loc, ExpressionContainer expr) - | loc, SpreadChild expr -> - id this#expression expr child (fun expr -> loc, SpreadChild expr) - | _loc, Text _ -> child - - method jsx_expression _loc (jsx_expr: (Loc.t, Loc.t) Flow_ast.JSX.ExpressionContainer.t) = - let open Flow_ast.JSX.ExpressionContainer in - let { expression } = jsx_expr in - match expression with - | Expression expr -> - id this#expression expr jsx_expr (fun expr -> { expression = Expression expr}) - | EmptyExpression _ -> jsx_expr - - method jsx_name (name: (Loc.t, Loc.t) Flow_ast.JSX.name) = - let open Flow_ast.JSX in - let name' = match name with - | Identifier id -> Identifier (this#jsx_identifier id) - | NamespacedName namespaced_name -> - NamespacedName (this#jsx_namespaced_name namespaced_name) - | MemberExpression member_exp -> - MemberExpression (this#jsx_member_expression member_exp) - in - (* structural equality since it's easier than checking equality in each branch of the match - * above *) - if name = name' then name else name' - - method jsx_namespaced_name (namespaced_name: (Loc.t, Loc.t) Flow_ast.JSX.NamespacedName.t) = - let open Flow_ast.JSX in - let open NamespacedName in - let loc, {namespace; name} = namespaced_name in - let namespace' = this#jsx_identifier namespace in - let name' = this#jsx_identifier name in - if namespace == namespace' && name == name' then - namespaced_name +let map_opt : 'node. ('node -> 'node) -> 'node option -> 'node option = + fun map opt -> + match opt with + | Some item -> + let item' = map item in + if item == item' then + opt else - loc, {namespace=namespace'; name=name'} - - method jsx_member_expression (member_exp: (Loc.t, Loc.t) Flow_ast.JSX.MemberExpression.t) = - let open Flow_ast.JSX in - let loc, {MemberExpression._object; MemberExpression.property} = member_exp in - let _object' = match _object with - | MemberExpression.Identifier id -> - let id' = this#jsx_identifier id in - if id' == id then _object else MemberExpression.Identifier id' - | MemberExpression.MemberExpression nested_exp -> - let nested_exp' = this#jsx_member_expression nested_exp in - if nested_exp' == nested_exp then - _object + Some item' + | None -> opt + +let id_loc : 'node 'a. ('loc -> 'node -> 'node) -> 'loc -> 'node -> 'a -> ('node -> 'a) -> 'a = + fun map loc item same diff -> + let item' = map loc item in + if item == item' then + same + else + diff item' + +let id : 'node 'a. ('node -> 'node) -> 'node -> 'a -> ('node -> 'a) -> 'a = + fun map item same diff -> + let item' = map item in + if item == item' then + same + else + diff item' + +let map_loc : 'node. ('loc -> 'node -> 'node) -> 'loc * 'node -> 'loc * 'node = + fun map same -> + let (loc, item) = same in + id_loc map loc item same (fun diff -> (loc, diff)) + +class ['loc] mapper = + object (this) + method program (program : ('loc, 'loc) Ast.program) = + let (loc, statements, comments) = program in + let statements' = this#toplevel_statement_list statements in + let comments' = ListUtils.ident_map this#comment comments in + if statements == statements' && comments == comments' then + program + else + (loc, statements', comments') + + method statement (stmt : ('loc, 'loc) Ast.Statement.t) = + Ast.Statement.( + match stmt with + | (loc, Block block) -> id_loc this#block loc block stmt (fun block -> (loc, Block block)) + | (loc, Break break) -> id_loc this#break loc break stmt (fun break -> (loc, Break break)) + | (loc, ClassDeclaration cls) -> + id_loc this#class_ loc cls stmt (fun cls -> (loc, ClassDeclaration cls)) + | (loc, Continue cont) -> + id_loc this#continue loc cont stmt (fun cont -> (loc, Continue cont)) + | (loc, Debugger) -> + this#debugger loc; + stmt + | (loc, DeclareClass stuff) -> + id_loc this#declare_class loc stuff stmt (fun stuff -> (loc, DeclareClass stuff)) + | (loc, DeclareExportDeclaration decl) -> + id_loc this#declare_export_declaration loc decl stmt (fun decl -> + (loc, DeclareExportDeclaration decl)) + | (loc, DeclareFunction stuff) -> + id_loc this#declare_function loc stuff stmt (fun stuff -> (loc, DeclareFunction stuff)) + | (loc, DeclareInterface stuff) -> + id_loc this#declare_interface loc stuff stmt (fun stuff -> (loc, DeclareInterface stuff)) + | (loc, DeclareModule m) -> + id_loc this#declare_module loc m stmt (fun m -> (loc, DeclareModule m)) + | (loc, DeclareTypeAlias stuff) -> + id_loc this#declare_type_alias loc stuff stmt (fun stuff -> + (loc, DeclareTypeAlias stuff)) + | (loc, DeclareVariable stuff) -> + id_loc this#declare_variable loc stuff stmt (fun stuff -> (loc, DeclareVariable stuff)) + | (loc, DeclareModuleExports annot) -> + id_loc this#declare_module_exports loc annot stmt (fun annot -> + (loc, DeclareModuleExports annot)) + | (loc, DoWhile stuff) -> + id_loc this#do_while loc stuff stmt (fun stuff -> (loc, DoWhile stuff)) + | (loc, Empty) -> + this#empty loc; + stmt + | (loc, EnumDeclaration enum) -> + id_loc this#enum_declaration loc enum stmt (fun enum -> (loc, EnumDeclaration enum)) + | (loc, ExportDefaultDeclaration decl) -> + id_loc this#export_default_declaration loc decl stmt (fun decl -> + (loc, ExportDefaultDeclaration decl)) + | (loc, ExportNamedDeclaration decl) -> + id_loc this#export_named_declaration loc decl stmt (fun decl -> + (loc, ExportNamedDeclaration decl)) + | (loc, Expression expr) -> + id_loc this#expression_statement loc expr stmt (fun expr -> (loc, Expression expr)) + | (loc, For for_stmt) -> + id_loc this#for_statement loc for_stmt stmt (fun for_stmt -> (loc, For for_stmt)) + | (loc, ForIn stuff) -> + id_loc this#for_in_statement loc stuff stmt (fun stuff -> (loc, ForIn stuff)) + | (loc, ForOf stuff) -> + id_loc this#for_of_statement loc stuff stmt (fun stuff -> (loc, ForOf stuff)) + | (loc, FunctionDeclaration func) -> + id_loc this#function_declaration loc func stmt (fun func -> + (loc, FunctionDeclaration func)) + | (loc, If if_stmt) -> + id_loc this#if_statement loc if_stmt stmt (fun if_stmt -> (loc, If if_stmt)) + | (loc, ImportDeclaration decl) -> + id_loc this#import_declaration loc decl stmt (fun decl -> (loc, ImportDeclaration decl)) + | (loc, InterfaceDeclaration stuff) -> + id_loc this#interface_declaration loc stuff stmt (fun stuff -> + (loc, InterfaceDeclaration stuff)) + | (loc, Labeled label) -> + id_loc this#labeled_statement loc label stmt (fun label -> (loc, Labeled label)) + | (loc, OpaqueType otype) -> + id_loc this#opaque_type loc otype stmt (fun otype -> (loc, OpaqueType otype)) + | (loc, Return ret) -> id_loc this#return loc ret stmt (fun ret -> (loc, Return ret)) + | (loc, Switch switch) -> + id_loc this#switch loc switch stmt (fun switch -> (loc, Switch switch)) + | (loc, Throw throw) -> id_loc this#throw loc throw stmt (fun throw -> (loc, Throw throw)) + | (loc, Try try_stmt) -> + id_loc this#try_catch loc try_stmt stmt (fun try_stmt -> (loc, Try try_stmt)) + | (loc, VariableDeclaration decl) -> + id_loc this#variable_declaration loc decl stmt (fun decl -> + (loc, VariableDeclaration decl)) + | (loc, While stuff) -> id_loc this#while_ loc stuff stmt (fun stuff -> (loc, While stuff)) + | (loc, With stuff) -> id_loc this#with_ loc stuff stmt (fun stuff -> (loc, With stuff)) + | (loc, TypeAlias stuff) -> + id_loc this#type_alias loc stuff stmt (fun stuff -> (loc, TypeAlias stuff)) + (* TODO: Flow specific stuff *) + | (_loc, DeclareOpaqueType _) -> stmt) + + method comment (c : 'loc Ast.Comment.t) = c + + method syntax_opt + : 'internal. ('loc, 'internal) Ast.Syntax.t option -> ('loc, 'internal) Ast.Syntax.t option + = + map_opt this#syntax + + method syntax : 'internal. ('loc, 'internal) Ast.Syntax.t -> ('loc, 'internal) Ast.Syntax.t = + fun attached -> + Ast.Syntax.( + let { leading; trailing; internal } = attached in + let leading' = ListUtils.ident_map this#comment leading in + let trailing' = ListUtils.ident_map this#comment trailing in + if leading == leading' && trailing == trailing' then + attached else - MemberExpression.MemberExpression nested_exp' - in - let property' = this#jsx_identifier property in - if _object == _object' && property == property' then - member_exp - else - loc, MemberExpression.({_object=_object'; property=property'}) - - method jsx_identifier (id: Loc.t Flow_ast.JSX.Identifier.t) = id - - method labeled_statement _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.Labeled.t) = - let open Flow_ast.Statement.Labeled in - let { label; body } = stmt in - let label' = this#label_identifier label in - let body' = this#statement body in - if label == label' && body == body' then stmt - else { label = label'; body = body' } - - method literal _loc (expr: Flow_ast.Literal.t) = expr - - method logical _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Logical.t) = - let open Flow_ast.Expression.Logical in - let { operator = _; left; right } = expr in - let left' = this#expression left in - let right' = this#expression right in - if left == left' && right == right' then expr - else { expr with left = left'; right = right' } - - method member _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Member.t) = - let open Flow_ast.Expression.Member in - let { _object; property; computed = _ } = expr in - let _object' = this#expression _object in - let property' = this#member_property property in - if _object == _object' && property == property' then expr - else { expr with _object = _object'; property = property' } - - method optional_member loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.OptionalMember.t) = - let open Flow_ast.Expression.OptionalMember in - let { member; optional = _ } = expr in - let member' = this#member loc member in - if member == member' then expr - else { expr with member = member' } - - method member_property (expr: (Loc.t, Loc.t) Flow_ast.Expression.Member.property) = - let open Flow_ast.Expression.Member in - match expr with - | PropertyIdentifier ident -> - id this#member_property_identifier ident expr - (fun ident -> PropertyIdentifier ident) - | PropertyPrivateName ident -> - id this#member_private_name ident expr - (fun ident -> PropertyPrivateName ident) - | PropertyExpression e -> - id this#member_property_expression e expr (fun e -> PropertyExpression e) - - method member_property_identifier (ident: Loc.t Flow_ast.Identifier.t) = - this#identifier ident - - method member_private_name (name: Loc.t Flow_ast.PrivateName.t) = - this#private_name name - - method member_property_expression (expr: (Loc.t, Loc.t) Flow_ast.Expression.t) = - this#expression expr - - (* TODO *) - method meta_property _loc (expr: Loc.t Flow_ast.Expression.MetaProperty.t) = expr - - method new_ _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.New.t) = - let open Flow_ast.Expression.New in - let { callee; targs; arguments } = expr in - let callee' = this#expression callee in - let targs' = map_opt this#type_parameter_instantiation targs in - let arguments' = ListUtils.ident_map this#expression_or_spread arguments in - if callee == callee' && targs == targs' && arguments == arguments' then expr - else { callee = callee'; targs = targs'; arguments = arguments' } - - method object_ _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Object.t) = - let open Flow_ast.Expression.Object in - let { properties } = expr in - let properties' = ListUtils.ident_map (fun prop -> - match prop with - | Property p -> - let p' = this#object_property p in - if p == p' then prop else Property p' - | SpreadProperty s -> - let s' = this#spread_property s in - if s == s' then prop else SpreadProperty s' - ) properties in - if properties == properties' then expr - else { properties = properties' } - - method object_property (prop: (Loc.t, Loc.t) Flow_ast.Expression.Object.Property.t) = - let open Flow_ast.Expression.Object.Property in - match prop with - | loc, Init { key; value; shorthand } -> - let key' = this#object_key key in - let value' = this#expression value in - if key == key' && value == value' then prop - else (loc, Init { key = key'; value = value'; shorthand }) - - | loc, Method { key; value = fn } -> - let key' = this#object_key key in - let fn' = map_loc this#function_ fn in - if key == key' && fn == fn' then prop - else (loc, Method { key = key'; value = fn' }) - - | loc, Get { key; value = fn } -> - let key' = this#object_key key in - let fn' = map_loc this#function_ fn in - if key == key' && fn == fn' then prop - else (loc, Get { key = key'; value = fn' }) - - | loc, Set { key; value = fn } -> - let key' = this#object_key key in - let fn' = map_loc this#function_ fn in - if key == key' && fn == fn' then prop - else (loc, Set { key = key'; value = fn' }) - - method object_key (key: (Loc.t, Loc.t) Flow_ast.Expression.Object.Property.key) = - let open Flow_ast.Expression.Object.Property in - match key with - | Literal (loc, lit) -> - id_loc this#literal loc lit key (fun lit -> Literal (loc, lit)) - | Identifier ident -> - id this#object_key_identifier ident key (fun ident -> Identifier ident) - | PrivateName ident -> - id this#private_name ident key (fun ident -> PrivateName ident) - | Computed expr -> - id this#expression expr key (fun expr -> Computed expr) - - method object_key_identifier (ident: Loc.t Flow_ast.Identifier.t) = - this#identifier ident - - method opaque_type _loc (otype: (Loc.t, Loc.t) Flow_ast.Statement.OpaqueType.t) = - let open Flow_ast.Statement.OpaqueType in - let { id; tparams; impltype; supertype } = otype in - let id' = this#identifier id in - let tparams' = map_opt this#type_parameter_declaration tparams in - let impltype' = map_opt this#type_ impltype in - let supertype' = map_opt this#type_ supertype in - if id == id' && - impltype == impltype' && - tparams == tparams' && - impltype == impltype' && - supertype == supertype' - then otype - else { - id = id'; - tparams = tparams'; - impltype = impltype'; - supertype = supertype' - } - - method function_param_pattern (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#binding_pattern expr - - method variable_declarator_pattern ~kind (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#binding_pattern ~kind expr - - method catch_clause_pattern (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#binding_pattern ~kind:Flow_ast.Statement.VariableDeclaration.Let expr - - method for_in_assignment_pattern (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#assignment_pattern expr - - method for_of_assignment_pattern (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#assignment_pattern expr - - method binding_pattern ?(kind=Flow_ast.Statement.VariableDeclaration.Var) (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#pattern ~kind expr - - method assignment_pattern (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#pattern expr - - (* NOTE: Patterns are highly overloaded. A pattern can be a binding pattern, + { leading = leading'; trailing = trailing'; internal }) + + method expression (expr : ('loc, 'loc) Ast.Expression.t) = + Ast.Expression.( + match expr with + | (_, This) -> expr + | (_, Super) -> expr + | (loc, Array x) -> id_loc this#array loc x expr (fun x -> (loc, Array x)) + | (loc, ArrowFunction x) -> + id_loc this#arrow_function loc x expr (fun x -> (loc, ArrowFunction x)) + | (loc, Assignment x) -> id_loc this#assignment loc x expr (fun x -> (loc, Assignment x)) + | (loc, Binary x) -> id_loc this#binary loc x expr (fun x -> (loc, Binary x)) + | (loc, Call x) -> id_loc this#call loc x expr (fun x -> (loc, Call x)) + | (loc, Class x) -> id_loc this#class_ loc x expr (fun x -> (loc, Class x)) + | (loc, Comprehension x) -> + id_loc this#comprehension loc x expr (fun x -> (loc, Comprehension x)) + | (loc, Conditional x) -> + id_loc this#conditional loc x expr (fun x -> (loc, Conditional x)) + | (loc, Function x) -> + id_loc this#function_expression loc x expr (fun x -> (loc, Function x)) + | (loc, Generator x) -> id_loc this#generator loc x expr (fun x -> (loc, Generator x)) + | (loc, Identifier x) -> id this#identifier x expr (fun x -> (loc, Identifier x)) + | (loc, Import x) -> id (this#import loc) x expr (fun x -> (loc, Import x)) + | (loc, JSXElement x) -> id_loc this#jsx_element loc x expr (fun x -> (loc, JSXElement x)) + | (loc, JSXFragment x) -> + id_loc this#jsx_fragment loc x expr (fun x -> (loc, JSXFragment x)) + | (loc, Literal x) -> id_loc this#literal loc x expr (fun x -> (loc, Literal x)) + | (loc, Logical x) -> id_loc this#logical loc x expr (fun x -> (loc, Logical x)) + | (loc, Member x) -> id_loc this#member loc x expr (fun x -> (loc, Member x)) + | (loc, MetaProperty x) -> + id_loc this#meta_property loc x expr (fun x -> (loc, MetaProperty x)) + | (loc, New x) -> id_loc this#new_ loc x expr (fun x -> (loc, New x)) + | (loc, Object x) -> id_loc this#object_ loc x expr (fun x -> (loc, Object x)) + | (loc, OptionalCall x) -> + id (this#optional_call loc) x expr (fun x -> (loc, OptionalCall x)) + | (loc, OptionalMember x) -> + id_loc this#optional_member loc x expr (fun x -> (loc, OptionalMember x)) + | (loc, Sequence x) -> id_loc this#sequence loc x expr (fun x -> (loc, Sequence x)) + | (loc, TaggedTemplate x) -> + id_loc this#tagged_template loc x expr (fun x -> (loc, TaggedTemplate x)) + | (loc, TemplateLiteral x) -> + id_loc this#template_literal loc x expr (fun x -> (loc, TemplateLiteral x)) + | (loc, TypeCast x) -> id_loc this#type_cast loc x expr (fun x -> (loc, TypeCast x)) + | (loc, Unary x) -> id_loc this#unary_expression loc x expr (fun x -> (loc, Unary x)) + | (loc, Update x) -> id_loc this#update_expression loc x expr (fun x -> (loc, Update x)) + | (loc, Yield x) -> id_loc this#yield loc x expr (fun x -> (loc, Yield x))) + + method array _loc (expr : ('loc, 'loc) Ast.Expression.Array.t) = + Ast.Expression.( + let { Array.elements; comments } = expr in + let elements' = ListUtils.ident_map (map_opt this#expression_or_spread) elements in + let comments' = this#syntax_opt comments in + if elements == elements' && comments == comments' then + expr + else + { Array.elements = elements'; comments = comments' }) + + method arrow_function loc (expr : ('loc, 'loc) Ast.Function.t) = this#function_ loc expr + + method assignment _loc (expr : ('loc, 'loc) Ast.Expression.Assignment.t) = + Ast.Expression.Assignment.( + let { operator = _; left; right } = expr in + let left' = this#assignment_pattern left in + let right' = this#expression right in + if left == left' && right == right' then + expr + else + { expr with left = left'; right = right' }) + + method binary _loc (expr : ('loc, 'loc) Ast.Expression.Binary.t) = + Ast.Expression.Binary.( + let { operator = _; left; right } = expr in + let left' = this#expression left in + let right' = this#expression right in + if left == left' && right == right' then + expr + else + { expr with left = left'; right = right' }) + + method block _loc (stmt : ('loc, 'loc) Ast.Statement.Block.t) = + Ast.Statement.Block.( + let { body } = stmt in + let body' = this#statement_list body in + if body == body' then + stmt + else + { body = body' }) + + method break _loc (break : 'loc Ast.Statement.Break.t) = + Ast.Statement.Break.( + let { label; comments } = break in + let label' = map_opt this#label_identifier label in + let comments' = this#syntax_opt comments in + if label == label' && comments == comments' then + break + else + { label = label'; comments = comments' }) + + method call _loc (expr : ('loc, 'loc) Ast.Expression.Call.t) = + Ast.Expression.Call.( + let { callee; targs; arguments } = expr in + let callee' = this#expression callee in + let targs' = map_opt this#type_parameter_instantiation_with_implicit targs in + let arguments' = this#call_arguments arguments in + if callee == callee' && targs == targs' && arguments == arguments' then + expr + else + { callee = callee'; targs = targs'; arguments = arguments' }) + + method call_arguments (arguments : ('loc, 'loc) Ast.Expression.expression_or_spread list) = + ListUtils.ident_map this#expression_or_spread arguments + + method optional_call loc (expr : ('loc, 'loc) Ast.Expression.OptionalCall.t) = + Ast.Expression.OptionalCall.( + let { call; optional = _ } = expr in + let call' = this#call loc call in + if call == call' then + expr + else + { expr with call = call' }) + + method catch_body (body : 'loc * ('loc, 'loc) Ast.Statement.Block.t) = map_loc this#block body + + method catch_clause _loc (clause : ('loc, 'loc) Ast.Statement.Try.CatchClause.t') = + Ast.Statement.Try.CatchClause.( + let { param; body } = clause in + let param' = map_opt this#catch_clause_pattern param in + let body' = this#catch_body body in + if param == param' && body == body' then + clause + else + { param = param'; body = body' }) + + method class_ _loc (cls : ('loc, 'loc) Ast.Class.t) = + Ast.Class.( + let { id; body; tparams = _; extends; implements = _; classDecorators = _ } = cls in + let id' = map_opt this#class_identifier id in + let body' = this#class_body body in + let extends' = map_opt (map_loc this#class_extends) extends in + if id == id' && body == body' && extends == extends' then + cls + else + { cls with id = id'; body = body'; extends = extends' }) + + method class_extends _loc (extends : ('loc, 'loc) Ast.Class.Extends.t') = + Ast.Class.Extends.( + let { expr; targs } = extends in + let expr' = this#expression expr in + let targs' = map_opt this#type_parameter_instantiation targs in + if expr == expr' && targs == targs' then + extends + else + { expr = expr'; targs = targs' }) + + method class_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = + this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let ident + + method class_body (cls_body : ('loc, 'loc) Ast.Class.Body.t) = + Ast.Class.Body.( + let (loc, { body }) = cls_body in + let body' = ListUtils.ident_map this#class_element body in + if body == body' then + cls_body + else + (loc, { body = body' })) + + method class_element (elem : ('loc, 'loc) Ast.Class.Body.element) = + Ast.Class.Body.( + match elem with + | Method (loc, meth) -> + id_loc this#class_method loc meth elem (fun meth -> Method (loc, meth)) + | Property (loc, prop) -> + id_loc this#class_property loc prop elem (fun prop -> Property (loc, prop)) + | PrivateField (loc, field) -> + id_loc this#class_private_field loc field elem (fun field -> PrivateField (loc, field))) + + method class_method _loc (meth : ('loc, 'loc) Ast.Class.Method.t') = + Ast.Class.Method.( + let { kind = _; key; value; static = _; decorators = _ } = meth in + let key' = this#object_key key in + let value' = map_loc this#function_expression value in + if key == key' && value == value' then + meth + else + { meth with key = key'; value = value' }) + + method class_property _loc (prop : ('loc, 'loc) Ast.Class.Property.t') = + Ast.Class.Property.( + let { key; value; annot; static = _; variance = _ } = prop in + let key' = this#object_key key in + let value' = map_opt this#expression value in + let annot' = this#type_annotation_hint annot in + if key == key' && value == value' && annot' == annot then + prop + else + { prop with key = key'; value = value'; annot = annot' }) + + method class_private_field _loc (prop : ('loc, 'loc) Ast.Class.PrivateField.t') = + Ast.Class.PrivateField.( + let { key; value; annot; static = _; variance = _ } = prop in + let key' = this#private_name key in + let value' = map_opt this#expression value in + let annot' = this#type_annotation_hint annot in + if key == key' && value == value' && annot' == annot then + prop + else + { prop with key = key'; value = value'; annot = annot' }) + + (* TODO *) + method comprehension _loc (expr : ('loc, 'loc) Ast.Expression.Comprehension.t) = expr + + method conditional _loc (expr : ('loc, 'loc) Ast.Expression.Conditional.t) = + Ast.Expression.Conditional.( + let { test; consequent; alternate } = expr in + let test' = this#predicate_expression test in + let consequent' = this#expression consequent in + let alternate' = this#expression alternate in + if test == test' && consequent == consequent' && alternate == alternate' then + expr + else + { test = test'; consequent = consequent'; alternate = alternate' }) + + method continue _loc (cont : 'loc Ast.Statement.Continue.t) = + Ast.Statement.Continue.( + let { label; comments } = cont in + let label' = map_opt this#label_identifier label in + let comments' = this#syntax_opt comments in + if label == label' then + cont + else + { label = label'; comments = comments' }) + + method debugger _loc = () + + method declare_class _loc (decl : ('loc, 'loc) Ast.Statement.DeclareClass.t) = + Ast.Statement.DeclareClass.( + let { id = ident; tparams; body; extends; mixins; implements } = decl in + let id' = this#class_identifier ident in + let tparams' = map_opt this#type_parameter_declaration tparams in + let body' = map_loc this#object_type body in + let extends' = map_opt (map_loc this#generic_type) extends in + let mixins' = ListUtils.ident_map (map_loc this#generic_type) mixins in + if + id' == ident + && tparams' == tparams + && body' == body + && extends' == extends + && mixins' == mixins + then + decl + else + { + id = id'; + tparams = tparams'; + body = body'; + extends = extends'; + mixins = mixins'; + implements; + }) + + method declare_export_declaration + _loc (decl : ('loc, 'loc) Ast.Statement.DeclareExportDeclaration.t) = + Ast.Statement.DeclareExportDeclaration.( + let { default; source; specifiers; declaration } = decl in + let specifiers' = map_opt this#export_named_specifier specifiers in + let declaration' = map_opt this#declare_export_declaration_decl declaration in + if specifiers == specifiers' && declaration == declaration' then + decl + else + { default; source; specifiers = specifiers'; declaration = declaration' }) + + method declare_export_declaration_decl + (decl : ('loc, 'loc) Ast.Statement.DeclareExportDeclaration.declaration) = + Ast.Statement.DeclareExportDeclaration.( + match decl with + | Variable (loc, dv) -> + let dv' = this#declare_variable loc dv in + if dv' == dv then + decl + else + Variable (loc, dv') + | Function (loc, df) -> + let df' = this#declare_function loc df in + if df' == df then + decl + else + Function (loc, df') + | Class (loc, dc) -> + let dc' = this#declare_class loc dc in + if dc' == dc then + decl + else + Class (loc, dc') + | DefaultType t -> + let t' = this#type_ t in + if t' == t then + decl + else + DefaultType t' + | NamedType (loc, ta) -> + let ta' = this#type_alias loc ta in + if ta' == ta then + decl + else + NamedType (loc, ta') + | NamedOpaqueType (loc, ot) -> + let ot' = this#opaque_type loc ot in + if ot' == ot then + decl + else + NamedOpaqueType (loc, ot') + | Interface (loc, i) -> + let i' = this#interface loc i in + if i' == i then + decl + else + Interface (loc, i')) + + method declare_function _loc (decl : ('loc, 'loc) Ast.Statement.DeclareFunction.t) = + Ast.Statement.DeclareFunction.( + let { id = ident; annot; predicate } = decl in + let id' = this#function_identifier ident in + let annot' = this#type_annotation annot in + (* TODO: walk predicate *) + if id' == ident && annot' == annot then + decl + else + { id = id'; annot = annot'; predicate }) + + method declare_interface loc (decl : ('loc, 'loc) Ast.Statement.Interface.t) = + this#interface loc decl + + method declare_module _loc (m : ('loc, 'loc) Ast.Statement.DeclareModule.t) = + Ast.Statement.DeclareModule.( + let { id; body; kind } = m in + let body' = map_loc this#block body in + if body' == body then + m + else + { id; body = body'; kind }) + + (* TODO *) + method declare_module_exports _loc (annot : ('loc, 'loc) Ast.Type.annotation) = annot + + method declare_type_alias loc (decl : ('loc, 'loc) Ast.Statement.TypeAlias.t) = + this#type_alias loc decl + + method declare_variable _loc (decl : ('loc, 'loc) Ast.Statement.DeclareVariable.t) = + Ast.Statement.DeclareVariable.( + let { id = ident; annot } = decl in + let id' = this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident in + let annot' = this#type_annotation_hint annot in + if id' == ident && annot' == annot then + decl + else + { id = id'; annot = annot' }) + + method do_while _loc (stuff : ('loc, 'loc) Ast.Statement.DoWhile.t) = + Ast.Statement.DoWhile.( + let { body; test; comments } = stuff in + let body' = this#statement body in + let test' = this#predicate_expression test in + let comments' = this#syntax_opt comments in + if body == body' && test == test' && comments == comments' then + stuff + else + { body = body'; test = test'; comments = comments' }) + + method empty _loc = () + + method enum_declaration _loc (enum : ('loc, 'loc) Ast.Statement.EnumDeclaration.t) = + Ast.Statement.EnumDeclaration.( + let { id = ident; body } = enum in + let id' = this#identifier ident in + let body' = + match body with + | BooleanBody boolean_body -> + id this#enum_boolean_body boolean_body body (fun body -> BooleanBody body) + | NumberBody number_body -> + id this#enum_number_body number_body body (fun body -> NumberBody body) + | StringBody string_body -> + id this#enum_string_body string_body body (fun body -> StringBody body) + | SymbolBody symbol_body -> + id this#enum_symbol_body symbol_body body (fun body -> SymbolBody body) + in + if ident == id' && body == body' then + enum + else + { id = id'; body = body' }) + + method enum_boolean_body (body : 'loc Ast.Statement.EnumDeclaration.BooleanBody.t) = + Ast.Statement.EnumDeclaration.BooleanBody.( + let { members; explicitType = _ } = body in + let members' = ListUtils.ident_map this#enum_boolean_member members in + if members == members' then + body + else + { body with members = members' }) + + method enum_number_body (body : 'loc Ast.Statement.EnumDeclaration.NumberBody.t) = + Ast.Statement.EnumDeclaration.NumberBody.( + let { members; explicitType = _ } = body in + let members' = ListUtils.ident_map this#enum_number_member members in + if members == members' then + body + else + { body with members = members' }) + + method enum_string_body (body : 'loc Ast.Statement.EnumDeclaration.StringBody.t) = + Ast.Statement.EnumDeclaration.StringBody.( + let { members; explicitType = _ } = body in + let members' = + match members with + | Defaulted members -> Defaulted (ListUtils.ident_map this#enum_defaulted_member members) + | Initialized members -> + Initialized (ListUtils.ident_map this#enum_string_member members) + in + if members == members' then + body + else + { body with members = members' }) + + method enum_symbol_body (body : 'loc Ast.Statement.EnumDeclaration.SymbolBody.t) = + Ast.Statement.EnumDeclaration.SymbolBody.( + let { members } = body in + let members' = ListUtils.ident_map this#enum_defaulted_member members in + if members == members' then + body + else + { members = members' }) + + method enum_defaulted_member (member : 'loc Ast.Statement.EnumDeclaration.DefaultedMember.t) = + Ast.Statement.EnumDeclaration.DefaultedMember.( + let (loc, { id = ident }) = member in + let id' = this#identifier ident in + if ident = id' then + member + else + (loc, { id = id' })) + + method enum_boolean_member + (member : (bool, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t) = + Ast.Statement.EnumDeclaration.InitializedMember.( + let (loc, { id = ident; init }) = member in + let id' = this#identifier ident in + if ident = id' then + member + else + (loc, { id = id'; init })) + + method enum_number_member + (member : (Ast.NumberLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t) = + Ast.Statement.EnumDeclaration.InitializedMember.( + let (loc, { id = ident; init }) = member in + let id' = this#identifier ident in + if ident = id' then + member + else + (loc, { id = id'; init })) + + method enum_string_member + (member : (Ast.StringLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t) = + Ast.Statement.EnumDeclaration.InitializedMember.( + let (loc, { id = ident; init }) = member in + let id' = this#identifier ident in + if ident = id' then + member + else + (loc, { id = id'; init })) + + method export_default_declaration + _loc (decl : ('loc, 'loc) Ast.Statement.ExportDefaultDeclaration.t) = + Ast.Statement.ExportDefaultDeclaration.( + let { default; declaration } = decl in + let declaration' = this#export_default_declaration_decl declaration in + if declaration' = declaration then + decl + else + { default; declaration = declaration' }) + + method export_default_declaration_decl + (decl : ('loc, 'loc) Ast.Statement.ExportDefaultDeclaration.declaration) = + Ast.Statement.ExportDefaultDeclaration.( + match decl with + | Declaration stmt -> id this#statement stmt decl (fun stmt -> Declaration stmt) + | Expression expr -> id this#expression expr decl (fun expr -> Expression expr)) + + method export_named_declaration + _loc (decl : ('loc, 'loc) Ast.Statement.ExportNamedDeclaration.t) = + Ast.Statement.ExportNamedDeclaration.( + let { exportKind; source; specifiers; declaration } = decl in + let specifiers' = map_opt this#export_named_specifier specifiers in + let declaration' = map_opt this#statement declaration in + if specifiers == specifiers' && declaration == declaration' then + decl + else + { exportKind; source; specifiers = specifiers'; declaration = declaration' }) + + (* TODO *) + method export_named_specifier (spec : 'loc Ast.Statement.ExportNamedDeclaration.specifier) = + spec + + method expression_statement _loc (stmt : ('loc, 'loc) Ast.Statement.Expression.t) = + Ast.Statement.Expression.( + let { expression = expr; directive = _ } = stmt in + id this#expression expr stmt (fun expression -> { stmt with expression })) + + method expression_or_spread expr_or_spread = + Ast.Expression.( + match expr_or_spread with + | Expression expr -> id this#expression expr expr_or_spread (fun expr -> Expression expr) + | Spread spread -> + id this#spread_element spread expr_or_spread (fun spread -> Spread spread)) + + method for_in_statement _loc (stmt : ('loc, 'loc) Ast.Statement.ForIn.t) = + Ast.Statement.ForIn.( + let { left; right; body; each } = stmt in + let left' = this#for_in_statement_lhs left in + let right' = this#expression right in + let body' = this#statement body in + if left == left' && right == right' && body == body' then + stmt + else + { left = left'; right = right'; body = body'; each }) + + method for_in_statement_lhs (left : ('loc, 'loc) Ast.Statement.ForIn.left) = + Ast.Statement.ForIn.( + match left with + | LeftDeclaration (loc, decl) -> + id_loc this#variable_declaration loc decl left (fun decl -> LeftDeclaration (loc, decl)) + | LeftPattern patt -> + id this#for_in_assignment_pattern patt left (fun patt -> LeftPattern patt)) + + method for_of_statement _loc (stuff : ('loc, 'loc) Ast.Statement.ForOf.t) = + Ast.Statement.ForOf.( + let { left; right; body; async } = stuff in + let left' = this#for_of_statement_lhs left in + let right' = this#expression right in + let body' = this#statement body in + if left == left' && right == right' && body == body' then + stuff + else + { left = left'; right = right'; body = body'; async }) + + method for_of_statement_lhs (left : ('loc, 'loc) Ast.Statement.ForOf.left) = + Ast.Statement.ForOf.( + match left with + | LeftDeclaration (loc, decl) -> + id_loc this#variable_declaration loc decl left (fun decl -> LeftDeclaration (loc, decl)) + | LeftPattern patt -> + id this#for_of_assignment_pattern patt left (fun patt -> LeftPattern patt)) + + method for_statement _loc (stmt : ('loc, 'loc) Ast.Statement.For.t) = + Ast.Statement.For.( + let { init; test; update; body } = stmt in + let init' = map_opt this#for_statement_init init in + let test' = map_opt this#predicate_expression test in + let update' = map_opt this#expression update in + let body' = this#statement body in + if init == init' && test == test' && update == update' && body == body' then + stmt + else + { init = init'; test = test'; update = update'; body = body' }) + + method for_statement_init (init : ('loc, 'loc) Ast.Statement.For.init) = + Ast.Statement.For.( + match init with + | InitDeclaration (loc, decl) -> + id_loc this#variable_declaration loc decl init (fun decl -> InitDeclaration (loc, decl)) + | InitExpression expr -> id this#expression expr init (fun expr -> InitExpression expr)) + + method function_param_type (fpt : ('loc, 'loc) Ast.Type.Function.Param.t) = + Ast.Type.Function.Param.( + let (loc, { annot; name; optional }) = fpt in + let annot' = this#type_ annot in + if annot' == annot then + fpt + else + (loc, { annot = annot'; name; optional })) + + method function_rest_param_type (frpt : ('loc, 'loc) Ast.Type.Function.RestParam.t) = + Ast.Type.Function.RestParam.( + let (loc, { argument }) = frpt in + let argument' = this#function_param_type argument in + if argument' == argument then + frpt + else + (loc, { argument = argument' })) + + method function_type _loc (ft : ('loc, 'loc) Ast.Type.Function.t) = + Ast.Type.Function.( + let { params = (params_loc, { Params.params = ps; rest = rpo }); return; tparams } = ft in + let ps' = ListUtils.ident_map this#function_param_type ps in + let rpo' = map_opt this#function_rest_param_type rpo in + let return' = this#type_ return in + let tparams' = map_opt this#type_parameter_declaration tparams in + if ps' == ps && rpo' == rpo && return' == return && tparams' == tparams then + ft + else + { + params = (params_loc, { Params.params = ps'; rest = rpo' }); + return = return'; + tparams = tparams'; + }) + + method label_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = this#identifier ident + + method object_property_value_type (opvt : ('loc, 'loc) Ast.Type.Object.Property.value) = + Ast.Type.Object.Property.( + match opvt with + | Init t -> id this#type_ t opvt (fun t -> Init t) + | Get (loc, ft) -> id_loc this#function_type loc ft opvt (fun ft -> Get (loc, ft)) + | Set (loc, ft) -> id_loc this#function_type loc ft opvt (fun ft -> Set (loc, ft))) + + method object_property_type (opt : ('loc, 'loc) Ast.Type.Object.Property.t) = + Ast.Type.Object.Property.( + let (loc, { key; value; optional; static; proto; _method; variance }) = opt in + let value' = this#object_property_value_type value in + if value' == value then + opt + else + (loc, { key; value = value'; optional; static; proto; _method; variance })) + + method object_spread_property_type (opt : ('loc, 'loc) Ast.Type.Object.SpreadProperty.t) = + Ast.Type.Object.SpreadProperty.( + let (loc, { argument }) = opt in + let argument' = this#type_ argument in + if argument' == argument then + opt + else + (loc, { argument = argument' })) + + method object_indexer_property_type (opt : ('loc, 'loc) Ast.Type.Object.Indexer.t) = + Ast.Type.Object.Indexer.( + let (loc, { id; key; value; static; variance }) = opt in + let key' = this#type_ key in + let value' = this#type_ value in + if key' == key && value' == value then + opt + else + (loc, { id; key = key'; value = value'; static; variance })) + + method object_type _loc (ot : ('loc, 'loc) Ast.Type.Object.t) = + Ast.Type.Object.( + let { properties; exact; inexact } = ot in + let properties' = + ListUtils.ident_map + (fun p -> + match p with + | Property p' -> id this#object_property_type p' p (fun p' -> Property p') + | SpreadProperty p' -> + id this#object_spread_property_type p' p (fun p' -> SpreadProperty p') + | Indexer p' -> id this#object_indexer_property_type p' p (fun p' -> Indexer p') + | CallProperty _ + | InternalSlot _ -> + p) (* TODO *) + properties + in + if properties' == properties then + ot + else + { properties = properties'; exact; inexact }) + + method interface_type _loc (i : ('loc, 'loc) Ast.Type.Interface.t) = + Ast.Type.Interface.( + let { extends; body } = i in + let extends' = ListUtils.ident_map (map_loc this#generic_type) extends in + let body' = map_loc this#object_type body in + if extends' == extends && body' == body then + i + else + { extends = extends'; body = body' }) + + method generic_identifier_type (git : ('loc, 'loc) Ast.Type.Generic.Identifier.t) = + Ast.Type.Generic.Identifier.( + match git with + | Unqualified i -> id this#identifier i git (fun i -> Unqualified i) + | Qualified (loc, { qualification; id }) -> + let qualification' = this#generic_identifier_type qualification in + let id' = this#identifier id in + if qualification' == qualification && id' == id then + git + else + Qualified (loc, { qualification = qualification'; id = id' })) + + method variance (variance : 'loc Ast.Variance.t option) = variance + + method type_parameter_instantiation_with_implicit + (pi : ('loc, 'loc) Ast.Expression.TypeParameterInstantiation.t) = + let (loc, targs) = pi in + let targs' = ListUtils.ident_map this#type_or_implicit targs in + if targs' == targs then + pi + else + (loc, targs') + + method type_parameter_instantiation (pi : ('loc, 'loc) Ast.Type.ParameterInstantiation.t) = + let (loc, targs) = pi in + let targs' = ListUtils.ident_map this#type_ targs in + if targs' == targs then + pi + else + (loc, targs') + + method type_parameter_declaration (pd : ('loc, 'loc) Ast.Type.ParameterDeclaration.t) = + let (loc, type_params) = pd in + let type_params' = + ListUtils.ident_map this#type_parameter_declaration_type_param type_params + in + if type_params' == type_params then + pd + else + (loc, type_params') + + method type_parameter_declaration_type_param + (type_param : ('loc, 'loc) Ast.Type.ParameterDeclaration.TypeParam.t) = + Ast.Type.ParameterDeclaration.TypeParam.( + let (loc, { name; bound; variance; default }) = type_param in + let name' = this#identifier name in + let bound' = this#type_annotation_hint bound in + let variance' = this#variance variance in + let default' = map_opt this#type_ default in + if name' == name && bound' == bound && variance' == variance && default' == default then + type_param + else + (loc, { name = name'; bound = bound'; variance = variance'; default = default' })) + + method generic_type _loc (gt : ('loc, 'loc) Ast.Type.Generic.t) = + Ast.Type.Generic.( + let { id; targs } = gt in + let id' = this#generic_identifier_type id in + let targs' = map_opt this#type_parameter_instantiation targs in + if id' == id && targs' == targs then + gt + else + { id = id'; targs = targs' }) + + method string_literal_type _loc (lit : Ast.StringLiteral.t) = lit + + method type_ (t : ('loc, 'loc) Ast.Type.t) = + Ast.Type.( + match t with + | (_, Any) + | (_, Mixed) + | (_, Empty) + | (_, Void) + | (_, Null) + | (_, Number) + | (_, BigInt) + | (_, String) + | (_, Boolean) + | (_, NumberLiteral _) + | (_, BigIntLiteral _) + | (_, BooleanLiteral _) + | (_, Exists) -> + t + | (loc, Nullable t') -> id this#type_ t' t (fun t' -> (loc, Nullable t')) + | (loc, Array t') -> id this#type_ t' t (fun t' -> (loc, Array t')) + | (loc, Typeof t') -> id this#type_ t' t (fun t' -> (loc, Typeof t')) + | (loc, Function ft) -> id_loc this#function_type loc ft t (fun ft -> (loc, Function ft)) + | (loc, Object ot) -> id_loc this#object_type loc ot t (fun ot -> (loc, Object ot)) + | (loc, Interface i) -> id_loc this#interface_type loc i t (fun i -> (loc, Interface i)) + | (loc, Generic gt) -> id_loc this#generic_type loc gt t (fun gt -> (loc, Generic gt)) + | (loc, StringLiteral lit) -> + id_loc this#string_literal_type loc lit t (fun lit -> (loc, StringLiteral lit)) + | (loc, Union (t0, t1, ts)) -> + let t0' = this#type_ t0 in + let t1' = this#type_ t1 in + let ts' = ListUtils.ident_map this#type_ ts in + if t0' == t0 && t1' == t1 && ts' == ts then + t + else + (loc, Union (t0', t1', ts')) + | (loc, Intersection (t0, t1, ts)) -> + let t0' = this#type_ t0 in + let t1' = this#type_ t1 in + let ts' = ListUtils.ident_map this#type_ ts in + if t0' == t0 && t1' == t1 && ts' == ts then + t + else + (loc, Intersection (t0', t1', ts')) + | (loc, Tuple ts) -> + let ts' = ListUtils.ident_map this#type_ ts in + if ts' == ts then + t + else + (loc, Tuple ts')) + + method type_or_implicit t = + Ast.Expression.TypeParameterInstantiation.( + match t with + | Explicit x -> + let x' = this#type_ x in + if x' == x then + t + else + Explicit x' + | Implicit _ -> t) + + method type_annotation (annot : ('loc, 'loc) Ast.Type.annotation) = + let (loc, a) = annot in + id this#type_ a annot (fun a -> (loc, a)) + + method type_annotation_hint (return : ('M, 'T) Ast.Type.annotation_or_hint) = + Ast.Type.( + match return with + | Available annot -> + let annot' = this#type_annotation annot in + if annot' == annot then + return + else + Available annot' + | Missing _loc -> return) + + method function_declaration loc (stmt : ('loc, 'loc) Ast.Function.t) = this#function_ loc stmt + + method function_expression loc (stmt : ('loc, 'loc) Ast.Function.t) = this#function_ loc stmt + + (* Internal helper for function declarations, function expressions and arrow functions *) + method function_ _loc (expr : ('loc, 'loc) Ast.Function.t) = + Ast.Function.( + let { id = ident; params; body; async; generator; predicate; return; tparams; sig_loc } = + expr + in + let ident' = map_opt this#function_identifier ident in + let params' = this#function_params params in + let return' = this#type_annotation_hint return in + let body' = this#function_body_any body in + (* TODO: walk predicate *) + let tparams' = map_opt this#type_parameter_declaration tparams in + if + ident == ident' + && params == params' + && body == body' + && return == return' + && tparams == tparams' + then + expr + else + { + id = ident'; + params = params'; + return = return'; + body = body'; + async; + generator; + predicate; + tparams = tparams'; + sig_loc; + }) + + method function_params (params : ('loc, 'loc) Ast.Function.Params.t) = + Ast.Function.( + let (loc, { Params.params = params_list; rest }) = params in + let params_list' = ListUtils.ident_map this#function_param params_list in + let rest' = map_opt this#function_rest_param rest in + if params_list == params_list' && rest == rest' then + params + else + (loc, { Params.params = params_list'; rest = rest' })) + + method function_param (param : ('loc, 'loc) Ast.Function.Param.t) = + Ast.Function.Param.( + let (loc, { argument; default }) = param in + let argument' = this#function_param_pattern argument in + let default' = map_opt this#expression default in + if argument == argument' && default == default' then + param + else + (loc, { argument = argument'; default = default' })) + + method function_body_any (body : ('loc, 'loc) Ast.Function.body) = + match body with + | Ast.Function.BodyBlock (loc, block) -> + id_loc this#function_body loc block body (fun block -> Ast.Function.BodyBlock (loc, block)) + | Ast.Function.BodyExpression expr -> + id this#expression expr body (fun expr -> Ast.Function.BodyExpression expr) + + method function_body loc (block : ('loc, 'loc) Ast.Statement.Block.t) = this#block loc block + + method function_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = + this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident + + (* TODO *) + method generator _loc (expr : ('loc, 'loc) Ast.Expression.Generator.t) = expr + + method identifier (id : ('loc, 'loc) Ast.Identifier.t) = + Ast.Identifier.( + let (loc, { name; comments }) = id in + let comments' = this#syntax_opt comments in + if comments == comments' then + id + else + (loc, { name; comments = comments' })) + + method interface _loc (interface : ('loc, 'loc) Ast.Statement.Interface.t) = + Ast.Statement.Interface.( + let { id = ident; tparams; extends; body } = interface in + let id' = this#class_identifier ident in + let tparams' = map_opt this#type_parameter_declaration tparams in + let extends' = ListUtils.ident_map (map_loc this#generic_type) extends in + let body' = map_loc this#object_type body in + if id' == ident && tparams' == tparams && extends' == extends && body' == body then + interface + else + { id = id'; tparams = tparams'; extends = extends'; body = body' }) + + method interface_declaration loc (decl : ('loc, 'loc) Ast.Statement.Interface.t) = + this#interface loc decl + + method private_name (expr : 'loc Ast.PrivateName.t) = expr + + method import _loc (expr : ('loc, 'loc) Ast.Expression.t) = expr + + method if_consequent_statement ~has_else (stmt : ('loc, 'loc) Ast.Statement.t) = + ignore has_else; + this#statement stmt + + method if_alternate_statement (stmt : ('loc, 'loc) Ast.Statement.t) = this#statement stmt + + method if_statement _loc (stmt : ('loc, 'loc) Ast.Statement.If.t) = + Ast.Statement.If.( + let { test; consequent; alternate; comments } = stmt in + let test' = this#predicate_expression test in + let consequent' = this#if_consequent_statement ~has_else:(alternate <> None) consequent in + let alternate' = map_opt this#if_alternate_statement alternate in + let comments' = this#syntax_opt comments in + if + test == test' + && consequent == consequent' + && alternate == alternate' + && comments == comments' + then + stmt + else + { test = test'; consequent = consequent'; alternate = alternate'; comments = comments' }) + + method import_declaration _loc (decl : ('loc, 'loc) Ast.Statement.ImportDeclaration.t) = + Ast.Statement.ImportDeclaration.( + let { importKind; source; specifiers; default } = decl in + let specifiers' = map_opt this#import_specifier specifiers in + let default' = map_opt this#import_default_specifier default in + if specifiers == specifiers' && default == default' then + decl + else + { importKind; source; specifiers = specifiers'; default = default' }) + + method import_specifier (specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.specifier) = + Ast.Statement.ImportDeclaration.( + match specifier with + | ImportNamedSpecifiers named_specifiers -> + let named_specifiers' = + ListUtils.ident_map this#import_named_specifier named_specifiers + in + if named_specifiers == named_specifiers' then + specifier + else + ImportNamedSpecifiers named_specifiers' + | ImportNamespaceSpecifier (loc, ident) -> + id_loc this#import_namespace_specifier loc ident specifier (fun ident -> + ImportNamespaceSpecifier (loc, ident))) + + method import_named_specifier + (specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.named_specifier) = + Ast.Statement.ImportDeclaration.( + let { kind; local; remote } = specifier in + let remote' = this#identifier remote in + let local' = + match local with + | None -> None + | Some ident -> + id + (this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let) + ident + local + (fun ident -> Some ident) + in + if local == local' && remote == remote' then + specifier + else + { kind; local = local'; remote = remote' }) + + method import_default_specifier (id : ('loc, 'loc) Ast.Identifier.t) = + this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let id + + method import_namespace_specifier _loc (id : ('loc, 'loc) Ast.Identifier.t) = + this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let id + + method jsx_element _loc (expr : ('loc, 'loc) Ast.JSX.element) = + Ast.JSX.( + let { openingElement; closingElement; children } = expr in + let openingElement' = this#jsx_opening_element openingElement in + let closingElement' = map_opt this#jsx_closing_element closingElement in + let children' = this#jsx_children children in + if + openingElement == openingElement' + && closingElement == closingElement' + && children == children' + then + expr + else + { + openingElement = openingElement'; + closingElement = closingElement'; + children = children'; + }) + + method jsx_fragment _loc (expr : ('loc, 'loc) Ast.JSX.fragment) = + Ast.JSX.( + let { frag_children; _ } = expr in + let children' = this#jsx_children frag_children in + if frag_children == children' then + expr + else + { expr with frag_children = children' }) + + method jsx_opening_element (elem : ('loc, 'loc) Ast.JSX.Opening.t) = + Ast.JSX.Opening.( + let (loc, { name; selfClosing; attributes }) = elem in + let name' = this#jsx_name name in + let attributes' = ListUtils.ident_map this#jsx_opening_attribute attributes in + if name == name' && attributes == attributes' then + elem + else + (loc, { name = name'; selfClosing; attributes = attributes' })) + + method jsx_closing_element (elem : ('loc, 'loc) Ast.JSX.Closing.t) = + Ast.JSX.Closing.( + let (loc, { name }) = elem in + let name' = this#jsx_name name in + if name == name' then + elem + else + (loc, { name = name' })) + + method jsx_opening_attribute (jsx_attr : ('loc, 'loc) Ast.JSX.Opening.attribute) = + Ast.JSX.Opening.( + match jsx_attr with + | Attribute attr -> id this#jsx_attribute attr jsx_attr (fun attr -> Attribute attr) + | SpreadAttribute (loc, attr) -> + id_loc this#jsx_spread_attribute loc attr jsx_attr (fun attr -> + SpreadAttribute (loc, attr))) + + method jsx_spread_attribute _loc (attr : ('loc, 'loc) Ast.JSX.SpreadAttribute.t') = + Ast.JSX.SpreadAttribute.( + let { argument } = attr in + id this#expression argument attr (fun argument -> { argument })) + + method jsx_attribute (attr : ('loc, 'loc) Ast.JSX.Attribute.t) = + Ast.JSX.Attribute.( + let (loc, { name; value }) = attr in + let value' = map_opt this#jsx_attribute_value value in + if value == value' then + attr + else + (loc, { name; value = value' })) + + method jsx_attribute_value (value : ('loc, 'loc) Ast.JSX.Attribute.value) = + Ast.JSX.Attribute.( + match value with + | Literal _ -> value + | ExpressionContainer (loc, expr) -> + id_loc this#jsx_expression loc expr value (fun expr -> ExpressionContainer (loc, expr))) + + method jsx_children ((loc, children) as orig : 'loc * ('loc, 'loc) Ast.JSX.child list) = + let children' = ListUtils.ident_map this#jsx_child children in + if children == children' then + orig + else + (loc, children') + + method jsx_child (child : ('loc, 'loc) Ast.JSX.child) = + Ast.JSX.( + match child with + | (loc, Element elem) -> + id_loc this#jsx_element loc elem child (fun elem -> (loc, Element elem)) + | (loc, Fragment frag) -> + id_loc this#jsx_fragment loc frag child (fun frag -> (loc, Fragment frag)) + | (loc, ExpressionContainer expr) -> + id_loc this#jsx_expression loc expr child (fun expr -> (loc, ExpressionContainer expr)) + | (loc, SpreadChild expr) -> + id this#expression expr child (fun expr -> (loc, SpreadChild expr)) + | (_loc, Text _) -> child) + + method jsx_expression _loc (jsx_expr : ('loc, 'loc) Ast.JSX.ExpressionContainer.t) = + Ast.JSX.ExpressionContainer.( + let { expression } = jsx_expr in + match expression with + | Expression expr -> + id this#expression expr jsx_expr (fun expr -> { expression = Expression expr }) + | EmptyExpression -> jsx_expr) + + method jsx_name (name : ('loc, 'loc) Ast.JSX.name) = + Ast.JSX.( + let name' = + match name with + | Identifier id -> Identifier (this#jsx_identifier id) + | NamespacedName namespaced_name -> + NamespacedName (this#jsx_namespaced_name namespaced_name) + | MemberExpression member_exp -> MemberExpression (this#jsx_member_expression member_exp) + in + (* structural equality since it's easier than checking equality in each branch of the match + * above *) + if name = name' then + name + else + name') + + method jsx_namespaced_name (namespaced_name : ('loc, 'loc) Ast.JSX.NamespacedName.t) = + Ast.JSX.( + NamespacedName.( + let (loc, { namespace; name }) = namespaced_name in + let namespace' = this#jsx_identifier namespace in + let name' = this#jsx_identifier name in + if namespace == namespace' && name == name' then + namespaced_name + else + (loc, { namespace = namespace'; name = name' }))) + + method jsx_member_expression (member_exp : ('loc, 'loc) Ast.JSX.MemberExpression.t) = + Ast.JSX.( + let (loc, { MemberExpression._object; MemberExpression.property }) = member_exp in + let _object' = + match _object with + | MemberExpression.Identifier id -> + let id' = this#jsx_identifier id in + if id' == id then + _object + else + MemberExpression.Identifier id' + | MemberExpression.MemberExpression nested_exp -> + let nested_exp' = this#jsx_member_expression nested_exp in + if nested_exp' == nested_exp then + _object + else + MemberExpression.MemberExpression nested_exp' + in + let property' = this#jsx_identifier property in + if _object == _object' && property == property' then + member_exp + else + (loc, MemberExpression.{ _object = _object'; property = property' })) + + method jsx_identifier (id : 'loc Ast.JSX.Identifier.t) = id + + method labeled_statement _loc (stmt : ('loc, 'loc) Ast.Statement.Labeled.t) = + Ast.Statement.Labeled.( + let { label; body } = stmt in + let label' = this#label_identifier label in + let body' = this#statement body in + if label == label' && body == body' then + stmt + else + { label = label'; body = body' }) + + method literal _loc (expr : 'loc Ast.Literal.t) = expr + + method logical _loc (expr : ('loc, 'loc) Ast.Expression.Logical.t) = + Ast.Expression.Logical.( + let { operator = _; left; right } = expr in + let left' = this#expression left in + let right' = this#expression right in + if left == left' && right == right' then + expr + else + { expr with left = left'; right = right' }) + + method member _loc (expr : ('loc, 'loc) Ast.Expression.Member.t) = + Ast.Expression.Member.( + let { _object; property } = expr in + let _object' = this#expression _object in + let property' = this#member_property property in + if _object == _object' && property == property' then + expr + else + { _object = _object'; property = property' }) + + method optional_member loc (expr : ('loc, 'loc) Ast.Expression.OptionalMember.t) = + Ast.Expression.OptionalMember.( + let { member; optional = _ } = expr in + let member' = this#member loc member in + if member == member' then + expr + else + { expr with member = member' }) + + method member_property (expr : ('loc, 'loc) Ast.Expression.Member.property) = + Ast.Expression.Member.( + match expr with + | PropertyIdentifier ident -> + id this#member_property_identifier ident expr (fun ident -> PropertyIdentifier ident) + | PropertyPrivateName ident -> + id this#member_private_name ident expr (fun ident -> PropertyPrivateName ident) + | PropertyExpression e -> + id this#member_property_expression e expr (fun e -> PropertyExpression e)) + + method member_property_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = + this#identifier ident + + method member_private_name (name : 'loc Ast.PrivateName.t) = this#private_name name + + method member_property_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr + + (* TODO *) + method meta_property _loc (expr : 'loc Ast.Expression.MetaProperty.t) = expr + + method new_ _loc (expr : ('loc, 'loc) Ast.Expression.New.t) = + Ast.Expression.New.( + let { callee; targs; arguments; comments } = expr in + let callee' = this#expression callee in + let targs' = map_opt this#type_parameter_instantiation_with_implicit targs in + let arguments' = ListUtils.ident_map this#expression_or_spread arguments in + let comments' = this#syntax_opt comments in + if callee == callee' && targs == targs' && arguments == arguments' && comments == comments' + then + expr + else + { callee = callee'; targs = targs'; arguments = arguments'; comments = comments' }) + + method object_ _loc (expr : ('loc, 'loc) Ast.Expression.Object.t) = + Ast.Expression.Object.( + let { properties; comments } = expr in + let properties' = + ListUtils.ident_map + (fun prop -> + match prop with + | Property p -> + let p' = this#object_property p in + if p == p' then + prop + else + Property p' + | SpreadProperty s -> + let s' = this#spread_property s in + if s == s' then + prop + else + SpreadProperty s') + properties + in + let comments' = this#syntax_opt comments in + if properties == properties' && comments == comments' then + expr + else + { properties = properties'; comments = comments' }) + + method object_property (prop : ('loc, 'loc) Ast.Expression.Object.Property.t) = + Ast.Expression.Object.Property.( + match prop with + | (loc, Init { key; value; shorthand }) -> + let key' = this#object_key key in + let value' = this#expression value in + if key == key' && value == value' then + prop + else + (loc, Init { key = key'; value = value'; shorthand }) + | (loc, Method { key; value = fn }) -> + let key' = this#object_key key in + let fn' = map_loc this#function_expression fn in + if key == key' && fn == fn' then + prop + else + (loc, Method { key = key'; value = fn' }) + | (loc, Get { key; value = fn }) -> + let key' = this#object_key key in + let fn' = map_loc this#function_expression fn in + if key == key' && fn == fn' then + prop + else + (loc, Get { key = key'; value = fn' }) + | (loc, Set { key; value = fn }) -> + let key' = this#object_key key in + let fn' = map_loc this#function_expression fn in + if key == key' && fn == fn' then + prop + else + (loc, Set { key = key'; value = fn' })) + + method object_key (key : ('loc, 'loc) Ast.Expression.Object.Property.key) = + Ast.Expression.Object.Property.( + match key with + | Literal (loc, lit) -> id_loc this#literal loc lit key (fun lit -> Literal (loc, lit)) + | Identifier ident -> + id this#object_key_identifier ident key (fun ident -> Identifier ident) + | PrivateName ident -> id this#private_name ident key (fun ident -> PrivateName ident) + | Computed expr -> id this#expression expr key (fun expr -> Computed expr)) + + method object_key_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = this#identifier ident + + method opaque_type _loc (otype : ('loc, 'loc) Ast.Statement.OpaqueType.t) = + Ast.Statement.OpaqueType.( + let { id; tparams; impltype; supertype } = otype in + let id' = this#identifier id in + let tparams' = map_opt this#type_parameter_declaration tparams in + let impltype' = map_opt this#type_ impltype in + let supertype' = map_opt this#type_ supertype in + if + id == id' + && impltype == impltype' + && tparams == tparams' + && impltype == impltype' + && supertype == supertype' + then + otype + else + { id = id'; tparams = tparams'; impltype = impltype'; supertype = supertype' }) + + method function_param_pattern (expr : ('loc, 'loc) Ast.Pattern.t) = this#binding_pattern expr + + method variable_declarator_pattern ~kind (expr : ('loc, 'loc) Ast.Pattern.t) = + this#binding_pattern ~kind expr + + method catch_clause_pattern (expr : ('loc, 'loc) Ast.Pattern.t) = + this#binding_pattern ~kind:Ast.Statement.VariableDeclaration.Let expr + + method for_in_assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) = + this#assignment_pattern expr + + method for_of_assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) = + this#assignment_pattern expr + + method binding_pattern + ?(kind = Ast.Statement.VariableDeclaration.Var) (expr : ('loc, 'loc) Ast.Pattern.t) = + this#pattern ~kind expr + + method assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) = this#pattern expr + + (* NOTE: Patterns are highly overloaded. A pattern can be a binding pattern, which has a kind (Var/Let/Const, with Var being the default for all pre-ES5 bindings), or an assignment pattern, which has no kind. Subterms that are patterns inherit the kind (or lack thereof). *) - method pattern ?kind (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - let open Flow_ast.Pattern in - let (loc, patt) = expr in - let patt' = match patt with - | Object { Object.properties; annot } -> - let properties' = ListUtils.ident_map (this#pattern_object_p ?kind) properties in - let annot' = map_opt this#type_annotation annot in - if properties' == properties && annot' == annot then patt - else Object { Object.properties = properties'; annot = annot' } - | Array { Array.elements; annot } -> - let elements' = ListUtils.ident_map (map_opt (this#pattern_array_e ?kind)) elements in - let annot' = map_opt this#type_annotation annot in - if elements' == elements && annot' == annot then patt - else Array { Array.elements = elements'; annot = annot' } - | Assignment { Assignment.left; right } -> - let left' = this#pattern_assignment_pattern ?kind left in - let right' = this#expression right in - if left == left' && right == right' then patt - else Assignment { Assignment.left = left'; right = right' } - | Identifier { Identifier.name; annot; optional } -> - let name' = this#pattern_identifier ?kind name in - let annot' = map_opt this#type_annotation annot in - if name == name' && annot == annot' then patt - else Identifier { Identifier.name = name'; annot = annot'; optional } - | Expression e -> - id this#pattern_expression e patt (fun e -> Expression e) - in - if patt == patt' then expr else (loc, patt') - - method pattern_identifier ?kind (ident: Loc.t Flow_ast.Identifier.t) = - ignore kind; - this#identifier ident - - method pattern_literal ?kind loc (expr: Flow_ast.Literal.t) = - ignore kind; - this#literal loc expr - - method pattern_object_p ?kind (p: (Loc.t, Loc.t) Flow_ast.Pattern.Object.property) = - let open Flow_ast.Pattern.Object in - match p with - | Property (loc, prop) -> - id (this#pattern_object_property ?kind) prop p (fun prop -> Property (loc, prop)) - | RestProperty (loc, prop) -> - id (this#pattern_object_rest_property ?kind) prop p (fun prop -> RestProperty (loc, prop)) - - method pattern_object_property ?kind (prop: (Loc.t, Loc.t) Flow_ast.Pattern.Object.Property.t') = - let open Flow_ast.Pattern.Object.Property in - let { key; pattern; shorthand = _ } = prop in - let key' = this#pattern_object_property_key ?kind key in - let pattern' = this#pattern_object_property_pattern ?kind pattern in - if key' == key && pattern' == pattern then prop - else { key = key'; pattern = pattern'; shorthand = false } - - method pattern_object_property_key ?kind (key: (Loc.t, Loc.t) Flow_ast.Pattern.Object.Property.key) = - let open Flow_ast.Pattern.Object.Property in - match key with - | Literal (loc, lit) -> - id_loc (this#pattern_object_property_literal_key ?kind) loc lit key (fun lit' -> Literal (loc, lit')) - | Identifier identifier -> - id (this#pattern_object_property_identifier_key ?kind) identifier key (fun id' -> Identifier id') - | Computed expr -> - id (this#pattern_object_property_computed_key ?kind) expr key (fun expr' -> Computed expr') - - method pattern_object_property_literal_key ?kind loc (key: Flow_ast.Literal.t) = - this#pattern_literal ?kind loc key - - method pattern_object_property_identifier_key ?kind (key: Loc.t Flow_ast.Identifier.t) = - this#pattern_identifier ?kind key - - method pattern_object_property_computed_key ?kind (key: (Loc.t, Loc.t) Flow_ast.Expression.t) = - ignore kind; - this#pattern_expression key - - method pattern_object_rest_property ?kind (prop: (Loc.t, Loc.t) Flow_ast.Pattern.Object.RestProperty.t') = - let open Flow_ast.Pattern.Object.RestProperty in - let { argument } = prop in - let argument' = this#pattern_object_rest_property_pattern ?kind argument in - if argument' == argument then prop - else { argument = argument' } - - method pattern_object_property_pattern ?kind (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#pattern ?kind expr - - method pattern_object_rest_property_pattern ?kind (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#pattern ?kind expr - - method pattern_array_e ?kind (e: (Loc.t, Loc.t) Flow_ast.Pattern.Array.element) = - let open Flow_ast.Pattern.Array in - match e with - | Element elem -> - id (this#pattern_array_element_pattern ?kind) elem e (fun elem -> Element elem) - | RestElement (loc, elem) -> - id (this#pattern_array_rest_element ?kind) elem e (fun elem -> RestElement (loc, elem)) - - method pattern_array_element_pattern ?kind (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#pattern ?kind expr - - method pattern_array_rest_element ?kind (elem: (Loc.t, Loc.t) Flow_ast.Pattern.Array.RestElement.t') = - let open Flow_ast.Pattern.Array.RestElement in - let { argument } = elem in - let argument' = this#pattern_array_rest_element_pattern ?kind argument in - if argument' == argument then elem - else { argument = argument' } - - method pattern_array_rest_element_pattern ?kind (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#pattern ?kind expr - - method pattern_assignment_pattern ?kind (expr: (Loc.t, Loc.t) Flow_ast.Pattern.t) = - this#pattern ?kind expr - - method pattern_expression (expr: (Loc.t, Loc.t) Flow_ast.Expression.t) = - this#expression expr - - method predicate_expression (expr: (Loc.t, Loc.t) Flow_ast.Expression.t) = - this#expression expr - - (* TODO *) - method function_rest_element (expr: (Loc.t, Loc.t) Flow_ast.Function.RestElement.t) = expr - - method return _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.Return.t) = - let open Flow_ast.Statement.Return in - let { argument } = stmt in - let argument' = map_opt this#expression argument in - if argument == argument' then stmt else { argument = argument' } - - method sequence _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Sequence.t) = - let open Flow_ast.Expression.Sequence in - let { expressions } = expr in - let expressions' = ListUtils.ident_map this#expression expressions in - if expressions == expressions' then expr else { expressions = expressions' } - - method toplevel_statement_list (stmts: (Loc.t, Loc.t) Flow_ast.Statement.t list) = - this#statement_list stmts - - method statement_list (stmts: (Loc.t, Loc.t) Flow_ast.Statement.t list) = - ListUtils.ident_map this#statement stmts - - method spread_element (expr: (Loc.t, Loc.t) Flow_ast.Expression.SpreadElement.t) = - let open Flow_ast.Expression.SpreadElement in - let loc, { argument } = expr in - id this#expression argument expr (fun argument -> loc, { argument }) - - method spread_property (expr: (Loc.t, Loc.t) Flow_ast.Expression.Object.SpreadProperty.t) = - let open Flow_ast.Expression.Object.SpreadProperty in - let (loc, { argument }) = expr in - id this#expression argument expr (fun argument -> loc, { argument }) - - method switch _loc (switch: (Loc.t, Loc.t) Flow_ast.Statement.Switch.t) = - let open Flow_ast.Statement.Switch in - let { discriminant; cases } = switch in - let discriminant' = this#expression discriminant in - let cases' = ListUtils.ident_map (map_loc this#switch_case) cases in - if discriminant == discriminant' && cases == cases' then switch - else { discriminant = discriminant'; cases = cases' } - - method switch_case _loc (case: (Loc.t, Loc.t) Flow_ast.Statement.Switch.Case.t') = - let open Flow_ast.Statement.Switch.Case in - let { test; consequent } = case in - let test' = map_opt this#expression test in - let consequent' = this#statement_list consequent in - if test == test' && consequent == consequent' then case - else { test = test'; consequent = consequent' } - - method tagged_template _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.TaggedTemplate.t) = - let open Flow_ast.Expression.TaggedTemplate in - let { tag; quasi } = expr in - let tag' = this#expression tag in - let quasi' = map_loc this#template_literal quasi in - if tag == tag' && quasi == quasi' then expr - else { tag = tag'; quasi = quasi' } - - method template_literal _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.TemplateLiteral.t) = - let open Flow_ast.Expression.TemplateLiteral in - let { quasis; expressions } = expr in - let quasis' = ListUtils.ident_map this#template_literal_element quasis in - let expressions' = ListUtils.ident_map this#expression expressions in - if quasis == quasis' && expressions == expressions' then expr - else { quasis = quasis'; expressions = expressions' } - - (* TODO *) - method template_literal_element (elem: Loc.t Flow_ast.Expression.TemplateLiteral.Element.t) = - elem - - method throw _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.Throw.t) = - let open Flow_ast.Statement.Throw in - let { argument } = stmt in - id this#expression argument stmt (fun argument -> { argument }) - - method try_catch _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.Try.t) = - let open Flow_ast.Statement.Try in - let { block; handler; finalizer } = stmt in - let block' = map_loc this#block block in - let handler' = match handler with - | Some (loc, clause) -> - id_loc this#catch_clause loc clause handler (fun clause -> Some (loc, clause)) - | None -> handler - in - let finalizer' = match finalizer with - | Some (finalizer_loc, block) -> - id_loc this#block finalizer_loc block finalizer (fun block -> Some (finalizer_loc, block)) - | None -> finalizer - in - if block == block' && handler == handler' && finalizer == finalizer' - then stmt - else { - block = block'; - handler = handler'; - finalizer = finalizer' - } - - method type_cast _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.TypeCast.t) = - let open Flow_ast.Expression.TypeCast in - let { expression; annot; } = expr in - let expression' = this#expression expression in - let annot' = this#type_annotation annot in - if expression' == expression && annot' == annot then expr - else { expression = expression'; annot = annot' } - - method unary_expression _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Unary.t) = - let open Flow_ast.Expression in - let { Unary.argument; operator = _; prefix = _ } = expr in - id this#expression argument expr - (fun argument -> { expr with Unary.argument }) - - method update_expression _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Update.t) = - let open Flow_ast.Expression.Update in - let { argument; operator = _; prefix = _ } = expr in - id this#expression argument expr (fun argument -> { expr with argument }) - - method variable_declaration _loc (decl: (Loc.t, Loc.t) Flow_ast.Statement.VariableDeclaration.t) = - let open Flow_ast.Statement.VariableDeclaration in - let { declarations; kind } = decl in - let decls' = ListUtils.ident_map (this#variable_declarator ~kind) declarations in - if declarations == decls' then decl - else { declarations = decls'; kind } - - method variable_declarator ~kind (decl: (Loc.t, Loc.t) Flow_ast.Statement.VariableDeclaration.Declarator.t) = - let open Flow_ast.Statement.VariableDeclaration.Declarator in - let (loc, { id; init }) = decl in - let id' = this#variable_declarator_pattern ~kind id in - let init' = map_opt this#expression init in - if id == id' && init == init' then decl - else (loc, { id = id'; init = init' }) - - method while_ _loc (stuff: (Loc.t, Loc.t) Flow_ast.Statement.While.t) = - let open Flow_ast.Statement.While in - let { test; body } = stuff in - let test' = this#predicate_expression test in - let body' = this#statement body in - if test == test' && body == body' then stuff - else { test = test'; body = body' } - - method with_ _loc (stuff: (Loc.t, Loc.t) Flow_ast.Statement.With.t) = - let open Flow_ast.Statement.With in - let { _object; body } = stuff in - let _object' = this#expression _object in - let body' = this#statement body in - if _object == _object' && body == body' then stuff - else { _object = _object'; body = body' } - - method type_alias _loc (stuff: (Loc.t, Loc.t) Flow_ast.Statement.TypeAlias.t) = - let open Flow_ast.Statement.TypeAlias in - let { id; tparams; right } = stuff in - let id' = this#identifier id in - let tparams' = map_opt this#type_parameter_declaration tparams in - let right' = this#type_ right in - if id == id' && right == right' && tparams == tparams' then stuff - else { id = id'; tparams = tparams'; right = right' } - - (* TODO *) - method yield _loc (expr: (Loc.t, Loc.t) Flow_ast.Expression.Yield.t) = expr - -end - -let fold_program mappers ast = - List.fold_left (fun ast (m: mapper) -> m#program ast) ast mappers + method pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) = + Ast.Pattern.( + let (loc, patt) = expr in + let patt' = + match patt with + | Object { Object.properties; annot } -> + let properties' = ListUtils.ident_map (this#pattern_object_p ?kind) properties in + let annot' = this#type_annotation_hint annot in + if properties' == properties && annot' == annot then + patt + else + Object { Object.properties = properties'; annot = annot' } + | Array { Array.elements; annot; comments } -> + let elements' = ListUtils.ident_map (map_opt (this#pattern_array_e ?kind)) elements in + let annot' = this#type_annotation_hint annot in + let comments' = this#syntax_opt comments in + if comments == comments' && elements' == elements && annot' == annot then + patt + else + Array { Array.elements = elements'; annot = annot'; comments = comments' } + | Identifier { Identifier.name; annot; optional } -> + let name' = this#pattern_identifier ?kind name in + let annot' = this#type_annotation_hint annot in + if name == name' && annot == annot' then + patt + else + Identifier { Identifier.name = name'; annot = annot'; optional } + | Expression e -> id this#pattern_expression e patt (fun e -> Expression e) + in + if patt == patt' then + expr + else + (loc, patt')) + + method pattern_identifier ?kind (ident : ('loc, 'loc) Ast.Identifier.t) = + ignore kind; + this#identifier ident + + method pattern_literal ?kind loc (expr : 'loc Ast.Literal.t) = + ignore kind; + this#literal loc expr + + method pattern_object_p ?kind (p : ('loc, 'loc) Ast.Pattern.Object.property) = + Ast.Pattern.Object.( + match p with + | Property (loc, prop) -> + id (this#pattern_object_property ?kind) prop p (fun prop -> Property (loc, prop)) + | RestProperty (loc, prop) -> + id (this#pattern_object_rest_property ?kind) prop p (fun prop -> + RestProperty (loc, prop))) + + method pattern_object_property ?kind (prop : ('loc, 'loc) Ast.Pattern.Object.Property.t') = + Ast.Pattern.Object.Property.( + let { key; pattern; default; shorthand = _ } = prop in + let key' = this#pattern_object_property_key ?kind key in + let pattern' = this#pattern_object_property_pattern ?kind pattern in + let default' = map_opt this#expression default in + if key' == key && pattern' == pattern && default' == default then + prop + else + { key = key'; pattern = pattern'; default = default'; shorthand = false }) + + method pattern_object_property_key ?kind (key : ('loc, 'loc) Ast.Pattern.Object.Property.key) = + Ast.Pattern.Object.Property.( + match key with + | Literal (loc, lit) -> + id_loc (this#pattern_object_property_literal_key ?kind) loc lit key (fun lit' -> + Literal (loc, lit')) + | Identifier identifier -> + id (this#pattern_object_property_identifier_key ?kind) identifier key (fun id' -> + Identifier id') + | Computed expr -> + id (this#pattern_object_property_computed_key ?kind) expr key (fun expr' -> + Computed expr')) + + method pattern_object_property_literal_key ?kind loc (key : 'loc Ast.Literal.t) = + this#pattern_literal ?kind loc key + + method pattern_object_property_identifier_key ?kind (key : ('loc, 'loc) Ast.Identifier.t) = + this#pattern_identifier ?kind key + + method pattern_object_property_computed_key ?kind (key : ('loc, 'loc) Ast.Expression.t) = + ignore kind; + this#pattern_expression key + + method pattern_object_rest_property + ?kind (prop : ('loc, 'loc) Ast.Pattern.Object.RestProperty.t') = + Ast.Pattern.Object.RestProperty.( + let { argument } = prop in + let argument' = this#pattern_object_rest_property_pattern ?kind argument in + if argument' == argument then + prop + else + { argument = argument' }) + + method pattern_object_property_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) = + this#pattern ?kind expr + + method pattern_object_rest_property_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) = + this#pattern ?kind expr + + method pattern_array_e ?kind (e : ('loc, 'loc) Ast.Pattern.Array.element) = + Ast.Pattern.Array.( + match e with + | Element (loc, elem) -> + id (this#pattern_array_element ?kind) elem e (fun elem -> Element (loc, elem)) + | RestElement (loc, elem) -> + id (this#pattern_array_rest_element ?kind) elem e (fun elem -> RestElement (loc, elem))) + + method pattern_array_element ?kind (elem : ('loc, 'loc) Ast.Pattern.Array.Element.t') = + Ast.Pattern.Array.Element.( + let { argument; default } = elem in + let argument' = this#pattern_array_element_pattern ?kind argument in + let default' = map_opt this#expression default in + if argument == argument' && default == default' then + elem + else + { argument = argument'; default = default' }) + + method pattern_array_element_pattern ?kind (patt : ('loc, 'loc) Ast.Pattern.t) = + this#pattern ?kind patt + + method pattern_array_rest_element ?kind (elem : ('loc, 'loc) Ast.Pattern.Array.RestElement.t') + = + Ast.Pattern.Array.RestElement.( + let { argument } = elem in + let argument' = this#pattern_array_rest_element_pattern ?kind argument in + if argument' == argument then + elem + else + { argument = argument' }) + + method pattern_array_rest_element_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) = + this#pattern ?kind expr + + method pattern_assignment_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) = + this#pattern ?kind expr + + method pattern_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr + + method predicate_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr + + method function_rest_param (expr : ('loc, 'loc) Ast.Function.RestParam.t) = + Ast.Function.RestParam.( + let (loc, { argument }) = expr in + id this#binding_pattern argument expr (fun argument -> (loc, { argument }))) + + method return _loc (stmt : ('loc, 'loc) Ast.Statement.Return.t) = + Ast.Statement.Return.( + let { argument; comments } = stmt in + let argument' = map_opt this#expression argument in + let comments' = this#syntax_opt comments in + if argument == argument' && comments == comments' then + stmt + else + { argument = argument'; comments = comments' }) + + method sequence _loc (expr : ('loc, 'loc) Ast.Expression.Sequence.t) = + Ast.Expression.Sequence.( + let { expressions } = expr in + let expressions' = ListUtils.ident_map this#expression expressions in + if expressions == expressions' then + expr + else + { expressions = expressions' }) + + method toplevel_statement_list (stmts : ('loc, 'loc) Ast.Statement.t list) = + this#statement_list stmts + + method statement_list (stmts : ('loc, 'loc) Ast.Statement.t list) = + ListUtils.ident_map_multiple this#statement_fork_point stmts + + method statement_fork_point (stmt : ('loc, 'loc) Ast.Statement.t) = [this#statement stmt] + + method spread_element (expr : ('loc, 'loc) Ast.Expression.SpreadElement.t) = + Ast.Expression.SpreadElement.( + let (loc, { argument }) = expr in + id this#expression argument expr (fun argument -> (loc, { argument }))) + + method spread_property (expr : ('loc, 'loc) Ast.Expression.Object.SpreadProperty.t) = + Ast.Expression.Object.SpreadProperty.( + let (loc, { argument }) = expr in + id this#expression argument expr (fun argument -> (loc, { argument }))) + + method switch _loc (switch : ('loc, 'loc) Ast.Statement.Switch.t) = + Ast.Statement.Switch.( + let { discriminant; cases } = switch in + let discriminant' = this#expression discriminant in + let cases' = ListUtils.ident_map (map_loc this#switch_case) cases in + if discriminant == discriminant' && cases == cases' then + switch + else + { discriminant = discriminant'; cases = cases' }) + + method switch_case _loc (case : ('loc, 'loc) Ast.Statement.Switch.Case.t') = + Ast.Statement.Switch.Case.( + let { test; consequent } = case in + let test' = map_opt this#expression test in + let consequent' = this#statement_list consequent in + if test == test' && consequent == consequent' then + case + else + { test = test'; consequent = consequent' }) + + method tagged_template _loc (expr : ('loc, 'loc) Ast.Expression.TaggedTemplate.t) = + Ast.Expression.TaggedTemplate.( + let { tag; quasi } = expr in + let tag' = this#expression tag in + let quasi' = map_loc this#template_literal quasi in + if tag == tag' && quasi == quasi' then + expr + else + { tag = tag'; quasi = quasi' }) + + method template_literal _loc (expr : ('loc, 'loc) Ast.Expression.TemplateLiteral.t) = + Ast.Expression.TemplateLiteral.( + let { quasis; expressions } = expr in + let quasis' = ListUtils.ident_map this#template_literal_element quasis in + let expressions' = ListUtils.ident_map this#expression expressions in + if quasis == quasis' && expressions == expressions' then + expr + else + { quasis = quasis'; expressions = expressions' }) + + (* TODO *) + method template_literal_element (elem : 'loc Ast.Expression.TemplateLiteral.Element.t) = elem + + method throw _loc (stmt : ('loc, 'loc) Ast.Statement.Throw.t) = + Ast.Statement.Throw.( + let { argument } = stmt in + id this#expression argument stmt (fun argument -> { argument })) + + method try_catch _loc (stmt : ('loc, 'loc) Ast.Statement.Try.t) = + Ast.Statement.Try.( + let { block; handler; finalizer; comments } = stmt in + let block' = map_loc this#block block in + let handler' = + match handler with + | Some (loc, clause) -> + id_loc this#catch_clause loc clause handler (fun clause -> Some (loc, clause)) + | None -> handler + in + let finalizer' = + match finalizer with + | Some (finalizer_loc, block) -> + id_loc this#block finalizer_loc block finalizer (fun block -> + Some (finalizer_loc, block)) + | None -> finalizer + in + let comments' = this#syntax_opt comments in + if + block == block' + && handler == handler' + && finalizer == finalizer' + && comments == comments' + then + stmt + else + { block = block'; handler = handler'; finalizer = finalizer'; comments = comments' }) + + method type_cast _loc (expr : ('loc, 'loc) Ast.Expression.TypeCast.t) = + Ast.Expression.TypeCast.( + let { expression; annot } = expr in + let expression' = this#expression expression in + let annot' = this#type_annotation annot in + if expression' == expression && annot' == annot then + expr + else + { expression = expression'; annot = annot' }) + + method unary_expression _loc (expr : ('loc, 'loc) Flow_ast.Expression.Unary.t) = + Flow_ast.Expression.Unary.( + let { argument; operator = _; comments } = expr in + let argument' = this#expression argument in + let comments' = this#syntax_opt comments in + if argument == argument' && comments == comments' then + expr + else + { expr with argument = argument'; comments = comments' }) + + method update_expression _loc (expr : ('loc, 'loc) Ast.Expression.Update.t) = + Ast.Expression.Update.( + let { argument; operator = _; prefix = _ } = expr in + id this#expression argument expr (fun argument -> { expr with argument })) + + method variable_declaration _loc (decl : ('loc, 'loc) Ast.Statement.VariableDeclaration.t) = + Ast.Statement.VariableDeclaration.( + let { declarations; kind } = decl in + let decls' = ListUtils.ident_map (this#variable_declarator ~kind) declarations in + if declarations == decls' then + decl + else + { declarations = decls'; kind }) + + method variable_declarator + ~kind (decl : ('loc, 'loc) Ast.Statement.VariableDeclaration.Declarator.t) = + Ast.Statement.VariableDeclaration.Declarator.( + let (loc, { id; init }) = decl in + let id' = this#variable_declarator_pattern ~kind id in + let init' = map_opt this#expression init in + if id == id' && init == init' then + decl + else + (loc, { id = id'; init = init' })) + + method while_ _loc (stuff : ('loc, 'loc) Ast.Statement.While.t) = + Ast.Statement.While.( + let { test; body } = stuff in + let test' = this#predicate_expression test in + let body' = this#statement body in + if test == test' && body == body' then + stuff + else + { test = test'; body = body' }) + + method with_ _loc (stuff : ('loc, 'loc) Ast.Statement.With.t) = + Ast.Statement.With.( + let { _object; body } = stuff in + let _object' = this#expression _object in + let body' = this#statement body in + if _object == _object' && body == body' then + stuff + else + { _object = _object'; body = body' }) + + method type_alias _loc (stuff : ('loc, 'loc) Ast.Statement.TypeAlias.t) = + Ast.Statement.TypeAlias.( + let { id; tparams; right } = stuff in + let id' = this#identifier id in + let tparams' = map_opt this#type_parameter_declaration tparams in + let right' = this#type_ right in + if id == id' && right == right' && tparams == tparams' then + stuff + else + { id = id'; tparams = tparams'; right = right' }) + + method yield _loc (expr : ('loc, 'loc) Ast.Expression.Yield.t) = + Ast.Expression.Yield.( + let { argument; delegate; comments } = expr in + let argument' = map_opt this#expression argument in + let comments' = this#syntax_opt comments in + if comments = comments' && argument == argument' then + expr + else + { argument = argument'; delegate; comments = comments' }) + end + +let fold_program (mappers : 'a mapper list) ast = + List.fold_left (fun ast (m : 'a mapper) -> m#program ast) ast mappers diff --git a/src/parser_utils/flow_ast_visitor.ml b/src/parser_utils/flow_ast_visitor.ml index 4af42be8d7f..2356681cb5c 100644 --- a/src/parser_utils/flow_ast_visitor.ml +++ b/src/parser_utils/flow_ast_visitor.ml @@ -1,29 +1,31 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -let run: 'node. ('node -> 'node) -> 'node -> unit = - fun visit node -> ignore @@ (visit node) +let run : 'node. ('node -> 'node) -> 'node -> unit = (fun visit node -> ignore @@ visit node) -let run_opt: 'node. ('node -> 'node) -> 'node option -> unit = - fun visit -> Option.iter ~f:(run visit) +let run_opt : 'node. ('node -> 'node) -> 'node option -> unit = + (fun visit -> Option.iter ~f:(run visit)) -let run_list: 'node. ('node -> 'node) -> 'node list -> unit = - fun visit -> List.iter (run visit) +let run_list : 'node. ('node -> 'node) -> 'node list -> unit = (fun visit -> List.iter (run visit)) -class ['acc] visitor ~init = object(this) - inherit Flow_ast_mapper.mapper +class ['acc, 'loc] visitor ~init = + object (this) + inherit ['loc] Flow_ast_mapper.mapper - val mutable acc: 'acc = init - method acc = acc - method set_acc x = acc <- x - method update_acc f = acc <- f acc + val mutable acc : 'acc = init - method eval: 'node. ('node -> 'node) -> 'node -> 'acc = - fun visit node -> - run visit node; - this#acc -end + method acc = acc + + method set_acc x = acc <- x + + method update_acc f = acc <- f acc + + method eval : 'node. ('node -> 'node) -> 'node -> 'acc = + fun visit node -> + run visit node; + this#acc + end diff --git a/src/parser_utils/flow_polymorphic_ast_mapper.ml b/src/parser_utils/flow_polymorphic_ast_mapper.ml index 8c0f4622ca6..370069ca6f1 100644 --- a/src/parser_utils/flow_polymorphic_ast_mapper.ml +++ b/src/parser_utils/flow_polymorphic_ast_mapper.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,1434 +7,1530 @@ module Ast = Flow_ast -let ( * ) : 'a 'b 'c 'd . ('a -> 'c) -> ('b -> 'd) -> ('a * 'b) -> ('c * 'd) = - fun f g (x, y) -> (f x, g y) +let ( * ) : 'a 'b 'c 'd. ('a -> 'c) -> ('b -> 'd) -> 'a * 'b -> 'c * 'd = + (fun f g (x, y) -> (f x, g y)) -let id : 'a . 'a -> 'a = fun x -> x +let id : 'a. 'a -> 'a = (fun x -> x) -class virtual ['M, 'T, 'N, 'U] mapper = object(this) +class virtual ['M, 'T, 'N, 'U] mapper = + object (this) + method virtual on_loc_annot : 'M -> 'N - method virtual on_loc_annot : 'M -> 'N - method virtual on_type_annot : 'T -> 'U + method virtual on_type_annot : 'T -> 'U - method program (program: ('M, 'T) Ast.program) : ('N, 'U) Ast.program = - let (annot, statements, comments) = program in - let annot' = this#on_loc_annot annot in - let statements' = this#toplevel_statement_list statements in - let comments' = List.map (this#comment) comments in - annot', statements', comments' - - method statement ((annot, stmt): ('M, 'T) Ast.Statement.t) : ('N, 'U) Ast.Statement.t = - let open Ast.Statement in - this#on_loc_annot annot, - match stmt with - | Block block -> Block (this#block block) - - | Break break -> Break (this#break break) - - | ClassDeclaration cls -> ClassDeclaration (this#class_ cls) - - | Continue cont -> Continue (this#continue cont) - - | Debugger -> Debugger - - | DeclareClass stuff -> DeclareClass (this#declare_class stuff) - - | DeclareExportDeclaration decl -> - DeclareExportDeclaration (this#declare_export_declaration annot decl) - - | DeclareFunction stuff -> DeclareFunction (this#declare_function stuff) - - | DeclareInterface stuff -> DeclareInterface (this#declare_interface stuff) - - | DeclareModule m -> DeclareModule (this#declare_module annot m) - - | DeclareTypeAlias stuff -> DeclareTypeAlias (this#declare_type_alias stuff) - - | DeclareVariable stuff -> DeclareVariable (this#declare_variable stuff) - - | DeclareModuleExports t_annot -> - DeclareModuleExports (this#declare_module_exports annot t_annot) - - | DoWhile stuff -> DoWhile (this#do_while stuff) - - | Empty -> - this#empty (); - Empty - - | ExportDefaultDeclaration decl -> - ExportDefaultDeclaration (this#export_default_declaration annot decl) - - | ExportNamedDeclaration decl -> - ExportNamedDeclaration (this#export_named_declaration annot decl) - - | Expression expr -> Expression (this#expression_statement expr) - - | For for_stmt -> For (this#for_statement for_stmt) - - | ForIn stuff -> ForIn (this#for_in_statement stuff) - - | ForOf stuff -> ForOf (this#for_of_statement stuff) - - | FunctionDeclaration func -> - FunctionDeclaration (this#function_declaration func) - - | If if_stmt -> If (this#if_statement if_stmt) - - | ImportDeclaration decl -> - ImportDeclaration (this#import_declaration annot decl) - - | InterfaceDeclaration stuff -> - InterfaceDeclaration (this#interface_declaration stuff) - - | Labeled label -> Labeled (this#labeled_statement label) - - | OpaqueType otype -> OpaqueType (this#opaque_type otype) - - | Return ret -> Return (this#return ret) - - | Switch switch -> Switch (this#switch switch) - - | Throw throw -> Throw (this#throw throw) - - | Try try_stmt -> Try (this#try_catch try_stmt) - - | VariableDeclaration decl -> - VariableDeclaration (this#variable_declaration decl) - - | While stuff -> While (this#while_ stuff) - - | With stuff -> With (this#with_ stuff) - - | TypeAlias stuff -> TypeAlias (this#type_alias stuff) - - | DeclareOpaqueType otype -> - DeclareOpaqueType (this#declare_opaque_type otype) - - method comment ((annot, c): 'M Ast.Comment.t) : 'N Ast.Comment.t = - this#on_loc_annot annot, c - - method expression ((annot, expr'): ('M, 'T) Ast.Expression.t) : ('N, 'U) Ast.Expression.t = - let open Ast.Expression in - this#on_type_annot annot, - match expr' with - | This -> This - | Super -> Super - | Array x -> Array (this#array x) - | ArrowFunction x -> ArrowFunction (this#arrow_function x) - | Assignment x -> Assignment (this#assignment x) - | Binary x -> Binary (this#binary x) - | Call x -> Call (this#call annot x) - | Class x -> Class (this#class_ x) - | Comprehension x -> Comprehension (this#comprehension x) - | Conditional x -> Conditional (this#conditional x) - | Function x -> Function (this#function_ x) - | Generator x -> Generator (this#generator x) - | Identifier x -> Identifier (this#t_identifier x) - | Import x -> Import (this#import annot x) - | JSXElement x -> JSXElement (this#jsx_element x) - | JSXFragment x -> JSXFragment (this#jsx_fragment x) - | Literal x -> Literal (this#literal x) - | Logical x -> Logical (this#logical x) - | Member x -> Member (this#member x) - | MetaProperty x -> MetaProperty (this#meta_property x) - | New x -> New (this#new_ x) - | Object x -> Object (this#object_ x) - | OptionalCall x -> OptionalCall (this#optional_call annot x) - | OptionalMember x -> OptionalMember (this#optional_member x) - | Sequence x -> Sequence (this#sequence x) - | TaggedTemplate x -> TaggedTemplate (this#tagged_template x) - | TemplateLiteral x -> TemplateLiteral (this#template_literal x) - | TypeCast x -> TypeCast (this#type_cast x) - | Unary x -> Unary (this#unary_expression x) - | Update x -> Update (this#update_expression x) - | Yield x -> Yield (this#yield x) - - method array (expr: ('M, 'T) Ast.Expression.Array.t) : ('N, 'U) Ast.Expression.Array.t = - let open Ast.Expression in - let { Array.elements } = expr in - let elements' = List.map (Option.map ~f:this#expression_or_spread) elements in - { Array.elements = elements' } - - method arrow_function (expr: ('M, 'T) Ast.Function.t) : ('N, 'U) Ast.Function.t = - this#function_ expr - - method assignment (expr: ('M, 'T) Ast.Expression.Assignment.t) - : ('N, 'U) Ast.Expression.Assignment.t = - let open Ast.Expression.Assignment in - let { operator; left; right } = expr in - let left' = this#assignment_pattern left in - let right' = this#expression right in - { operator; left = left'; right = right' } - - method binary (expr: ('M, 'T) Ast.Expression.Binary.t) : ('N, 'U) Ast.Expression.Binary.t = - let open Ast.Expression.Binary in - let { operator; left; right } = expr in - let left' = this#expression left in - let right' = this#expression right in - { operator; left = left'; right = right' } - - method block (stmt: ('M, 'T) Ast.Statement.Block.t) : ('N, 'U) Ast.Statement.Block.t = - let open Ast.Statement.Block in - let { body } = stmt in - let body' = this#statement_list body in - { body = body' } - - method break (break: 'M Ast.Statement.Break.t) : 'N Ast.Statement.Break.t = - let open Ast.Statement.Break in - let { label } = break in - let label' = Option.map ~f:this#label_identifier label in - { label = label' } - - method call _annot (expr: ('M, 'T) Ast.Expression.Call.t) : ('N, 'U) Ast.Expression.Call.t = - let open Ast.Expression.Call in - let { callee; targs; arguments } = expr in - let callee' = this#expression callee in - let targs' = Option.map ~f:this#type_parameter_instantiation targs in - let arguments' = List.map this#expression_or_spread arguments in - { callee = callee'; targs = targs'; arguments = arguments' } - - method optional_call annot (expr: ('M, 'T) Ast.Expression.OptionalCall.t) - : ('N, 'U) Ast.Expression.OptionalCall.t = - let open Ast.Expression.OptionalCall in - let { call; optional; } = expr in - let call' = this#call annot call in - { call = call'; optional } - - method catch_clause (clause: ('M, 'T) Ast.Statement.Try.CatchClause.t') - : ('N, 'U) Ast.Statement.Try.CatchClause.t' = - let open Ast.Statement.Try.CatchClause in - let { param; body } = clause in - let param' = Option.map ~f:this#catch_clause_pattern param in - let body' = (this#on_loc_annot * this#block) body in - { param = param'; body = body' } - - method class_ (cls: ('M, 'T) Ast.Class.t) : ('N, 'U) Ast.Class.t = - let open Ast.Class in - let { id; body; tparams; extends; implements; classDecorators; } = cls in - let id' = Option.map ~f:this#class_identifier id in - this#type_parameter_declaration_opt tparams (fun tparams' -> - let extends' = Option.map ~f:this#class_extends extends in - let body' = this#class_body body in - let implements' = List.map this#class_implements implements in - let classDecorators' = List.map this#class_decorator classDecorators in - { - id = id'; - body = body'; - tparams = tparams'; - extends = extends'; - implements = implements'; - classDecorators = classDecorators'; - } - ) - - method class_extends (extends : ('M, 'T) Ast.Class.Extends.t) : ('N, 'U) Ast.Class.Extends.t = - let open Ast.Class.Extends in - let annot, { expr; targs } = extends in - let expr' = this#expression expr in - let targs' = Option.map ~f:this#type_parameter_instantiation targs in - this#on_loc_annot annot, { expr = expr'; targs = targs' } - - method class_decorator (dec : ('M, 'T) Ast.Class.Decorator.t) : ('N, 'U) Ast.Class.Decorator.t = - let open Ast.Class.Decorator in - let annot, { expression } = dec in - let expression' = this#expression expression in - this#on_loc_annot annot, { expression = expression' } - - method class_identifier (ident: 'T Ast.Identifier.t) : 'U Ast.Identifier.t = - this#t_pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let ident - - method class_body (cls_body: ('M, 'T) Ast.Class.Body.t) : ('N, 'U) Ast.Class.Body.t = - let open Ast.Class.Body in - let annot, { body } = cls_body in - let body' = List.map this#class_element body in - this#on_type_annot annot, { body = body' } - - method class_element (elem: ('M, 'T) Ast.Class.Body.element) : ('N, 'U) Ast.Class.Body.element = - let open Ast.Class.Body in - match elem with - | Method (annot, meth) -> - Method (this#on_type_annot annot, this#class_method meth) - | Property (annot, prop) -> - Property (this#on_type_annot annot, this#class_property prop) - | PrivateField (annot, field) -> - PrivateField (this#on_type_annot annot, this#class_private_field field) - - method class_method (meth: ('M, 'T) Ast.Class.Method.t') : ('N, 'U) Ast.Class.Method.t' = - let open Ast.Class.Method in - let { kind; key; value; static; decorators; } = meth in - let key' = this#object_key key in - let value' = (this#on_loc_annot * this#function_) value in - let decorators' = List.map this#class_decorator decorators in - { kind; key = key'; value = value'; static; decorators = decorators' } - - method class_property (prop: ('M, 'T) Ast.Class.Property.t') : ('N, 'U) Ast.Class.Property.t' = - let open Ast.Class.Property in - let { key; value; annot; static; variance; } = prop in - let key' = this#object_key key in - let value' = Option.map ~f:this#expression value in - let annot' = Option.map ~f:this#type_annotation annot in - let variance' = Option.map ~f:(this#on_loc_annot * id) variance in - { key = key'; value = value'; annot = annot'; static; variance = variance'; } - - method class_private_field (prop: ('M, 'T) Ast.Class.PrivateField.t') = - let open Ast.Class.PrivateField in - let { key; value; annot; static; variance; } = prop in - let key' = this#private_name key in - let value' = Option.map ~f:this#expression value in - let annot' = Option.map ~f:this#type_annotation annot in - let variance' = Option.map ~f:(this#on_loc_annot * id) variance in - { key = key'; value = value'; annot = annot'; static; variance = variance' } - - method comprehension (expr: ('M, 'T) Ast.Expression.Comprehension.t) - : ('N, 'U) Ast.Expression.Comprehension.t = - let open Ast.Expression.Comprehension in - let { blocks; filter } = expr in - let blocks' = List.map this#comprehension_block blocks in - let filter' = Option.map ~f:this#expression filter in - { blocks = blocks'; filter = filter' } - - method comprehension_block (block : ('M, 'T) Ast.Expression.Comprehension.Block.t) - : ('N, 'U) Ast.Expression.Comprehension.Block.t = - let open Ast.Expression.Comprehension.Block in - let annot, { left; right; each } = block in - let left' = this#pattern left in - let right' = this#expression right in - this#on_loc_annot annot, { left = left'; right = right'; each } - - method conditional (expr: ('M, 'T) Ast.Expression.Conditional.t) - : ('N, 'U) Ast.Expression.Conditional.t = - let open Ast.Expression.Conditional in - let { test; consequent; alternate } = expr in - let test' = this#predicate_expression test in - let consequent' = this#expression consequent in - let alternate' = this#expression alternate in - { test = test'; consequent = consequent'; alternate = alternate' } - - method continue (cont: 'M Ast.Statement.Continue.t) : 'N Ast.Statement.Continue.t = - let open Ast.Statement.Continue in - let { label } = cont in - let label' = Option.map ~f:this#label_identifier label in - { label = label' } - - method debugger () = - () - - method declare_class (decl: ('M, 'T) Ast.Statement.DeclareClass.t) - : ('N, 'U) Ast.Statement.DeclareClass.t = - let open Ast.Statement.DeclareClass in - let { id = ident; tparams; body; extends; mixins; implements } = decl in - let id' = this#class_identifier ident in - this#type_parameter_declaration_opt tparams (fun tparams' -> - let body' = - let a, b = body in - this#on_loc_annot a, this#object_type b in - let extends' = Option.map ~f:(this#on_loc_annot * this#generic_type) extends in - let mixins' = List.map (this#on_loc_annot * this#generic_type) mixins in - let implements' = List.map this#class_implements implements in - { - id = id'; - tparams = tparams'; - body = body'; - extends = extends'; - mixins = mixins'; - implements = implements'; - } - ) - - method class_implements (implements: ('M, 'T) Ast.Class.Implements.t) - : ('N, 'U) Ast.Class.Implements.t = - let open Ast.Class.Implements in - let annot, { id = id_; targs } = implements in - let id' = this#t_identifier id_ in - let targs' = Option.map ~f:this#type_parameter_instantiation targs in - this#on_loc_annot annot, { id = id'; targs = targs' } - - method declare_export_declaration _annot - (decl: ('M, 'T) Ast.Statement.DeclareExportDeclaration.t) - : ('N, 'U) Ast.Statement.DeclareExportDeclaration.t = - let open Ast.Statement.DeclareExportDeclaration in - let { default; source; specifiers; declaration } = decl in - let default' = Option.map ~f:this#on_loc_annot default in - let source' = Option.map ~f:(this#on_loc_annot * id) source in - let specifiers' = Option.map ~f:this#export_named_specifier specifiers in - let declaration' = Option.map ~f:this#declare_export_declaration_decl declaration in - { - default = default'; - source = source'; - specifiers = specifiers'; - declaration = declaration' - } - - method declare_export_declaration_decl - (decl: ('M, 'T) Ast.Statement.DeclareExportDeclaration.declaration) - : ('N, 'U) Ast.Statement.DeclareExportDeclaration.declaration = - let open Ast.Statement.DeclareExportDeclaration in - match decl with - | Variable (annot, decl_var) -> - Variable (this#on_loc_annot annot, this#declare_variable decl_var) - | Function (annot, decl_func) -> - Function (this#on_loc_annot annot, this#declare_function decl_func) - | Class (annot, decl_class) -> - Class (this#on_loc_annot annot, this#declare_class decl_class) - | DefaultType t -> - DefaultType (this#type_ t) - | NamedType (annot, alias) -> - NamedType (this#on_loc_annot annot, this#type_alias alias) - | NamedOpaqueType (annot, ot) -> - NamedOpaqueType (this#on_loc_annot annot, this#opaque_type ot) - | Interface (annot, iface) -> - Interface (this#on_loc_annot annot, this#interface iface) - - method declare_function (decl: ('M, 'T) Ast.Statement.DeclareFunction.t) - : ('N, 'U) Ast.Statement.DeclareFunction.t = - let open Ast.Statement.DeclareFunction in - let { id = ident; annot; predicate } = decl in - let id' = this#function_identifier ident in - let annot' = this#type_annotation annot in - let predicate' = Option.map ~f:this#type_predicate predicate in - { id = id'; annot = annot'; predicate = predicate' } - - method declare_interface (decl: ('M, 'T) Ast.Statement.Interface.t) - : ('N, 'U) Ast.Statement.Interface.t = - this#interface decl - - method declare_module _annot (m: ('M, 'T) Ast.Statement.DeclareModule.t) - : ('N, 'U) Ast.Statement.DeclareModule.t = - let open Ast.Statement.DeclareModule in - let { id; body; kind } = m in - let id' = match id with - | Identifier (annot, name) -> Identifier (this#on_type_annot annot, name) - | Literal (annot, name) -> Literal (this#on_type_annot annot, name) - in - let kind' = match kind with - | CommonJS annot -> CommonJS (this#on_loc_annot annot) - | ES annot -> ES (this#on_loc_annot annot) - in - let body' = (this#on_loc_annot * this#block) body in - { id = id'; body = body'; kind = kind' } - - method declare_module_exports _annot (t_annot: ('M, 'T) Ast.Type.annotation) - : ('N, 'U) Ast.Type.annotation = - this#type_annotation t_annot - - method declare_type_alias (decl: ('M, 'T) Ast.Statement.TypeAlias.t) - : ('N, 'U) Ast.Statement.TypeAlias.t = - this#type_alias decl - - method declare_variable (decl: ('M, 'T) Ast.Statement.DeclareVariable.t) - : ('N, 'U) Ast.Statement.DeclareVariable.t = - let open Ast.Statement.DeclareVariable in - let { id = ident; annot } = decl in - let id' = this#t_pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident in - let annot' = Option.map ~f:this#type_annotation annot in - { id = id'; annot = annot' } - - method do_while (stuff: ('M, 'T) Ast.Statement.DoWhile.t) : ('N, 'U) Ast.Statement.DoWhile.t = - let open Ast.Statement.DoWhile in - let { body; test } = stuff in - let body' = this#statement body in - let test' = this#predicate_expression test in - { body = body'; test = test' } - - method empty () = - () - - method export_default_declaration _loc (decl: ('M, 'T) Ast.Statement.ExportDefaultDeclaration.t) - : ('N, 'U) Ast.Statement.ExportDefaultDeclaration.t = - let open Ast.Statement.ExportDefaultDeclaration in - let { default; declaration } = decl in - let default' = this#on_loc_annot default in - let declaration' = this#export_default_declaration_decl declaration in - { default = default'; declaration = declaration' } - - method export_default_declaration_decl - (decl: ('M, 'T) Ast.Statement.ExportDefaultDeclaration.declaration) - : ('N, 'U) Ast.Statement.ExportDefaultDeclaration.declaration = - let open Ast.Statement.ExportDefaultDeclaration in - match decl with - | Declaration stmt -> Declaration (this#statement stmt) - | Expression expr -> Expression (this#expression expr) - - method export_named_declaration _loc (decl: ('M, 'T) Ast.Statement.ExportNamedDeclaration.t) - : ('N, 'U) Ast.Statement.ExportNamedDeclaration.t = - let open Ast.Statement.ExportNamedDeclaration in - let { exportKind; source; specifiers; declaration } = decl in - let source' = Option.map ~f:(this#on_loc_annot * id) source in - let specifiers' = Option.map ~f:this#export_named_specifier specifiers in - let declaration' = Option.map ~f:this#statement declaration in - { exportKind; source = source'; specifiers = specifiers'; declaration = declaration' } - - method export_named_specifier (spec: 'M Ast.Statement.ExportNamedDeclaration.specifier) - : 'N Ast.Statement.ExportNamedDeclaration.specifier = - let open Ast.Statement.ExportNamedDeclaration in - match spec with - | ExportSpecifiers specs -> ExportSpecifiers (List.map this#export_specifier specs) - | ExportBatchSpecifier (annot, name) -> + method program (program : ('M, 'T) Ast.program) : ('N, 'U) Ast.program = + let (annot, statements, comments) = program in let annot' = this#on_loc_annot annot in - let name' = Option.map ~f:(this#on_loc_annot * id) name in - ExportBatchSpecifier (annot', name') - - method export_specifier (spec : 'M Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t) - : 'N Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t = - let open Ast.Statement.ExportNamedDeclaration.ExportSpecifier in - let annot, { local; exported } = spec in - let local' = (this#on_loc_annot * id) local in - let exported' = Option.map ~f:(this#on_loc_annot * id) exported in - this#on_loc_annot annot, { local = local'; exported = exported' } - - method expression_statement (stmt: ('M, 'T) Ast.Statement.Expression.t) - : ('N, 'U) Ast.Statement.Expression.t = - let open Ast.Statement.Expression in - let { expression = expr; directive; } = stmt in - { expression = this#expression expr; directive; } - - method expression_or_spread expr_or_spread = - let open Ast.Expression in - match expr_or_spread with - | Expression expr -> - Expression (this#expression expr) - | Spread spread -> - Spread (this#spread_element spread) - - method for_in_statement (stmt: ('M, 'T) Ast.Statement.ForIn.t) : ('N, 'U) Ast.Statement.ForIn.t = - let open Ast.Statement.ForIn in - let { left; right; body; each } = stmt in - let left' = this#for_in_statement_lhs left in - let right' = this#expression right in - let body' = this#statement body in - { left = left'; right = right'; body = body'; each } - - method for_in_statement_lhs (left: ('M, 'T) Ast.Statement.ForIn.left) - : ('N, 'U) Ast.Statement.ForIn.left = - let open Ast.Statement.ForIn in - match left with - | LeftDeclaration (annot, decl) -> - LeftDeclaration (this#on_loc_annot annot, this#variable_declaration decl) - | LeftPattern patt -> LeftPattern (this#for_in_assignment_pattern patt) - - method for_of_statement (stuff: ('M, 'T) Ast.Statement.ForOf.t) : ('N, 'U) Ast.Statement.ForOf.t = - let open Ast.Statement.ForOf in - let { left; right; body; async } = stuff in - let left' = this#for_of_statement_lhs left in - let right' = this#expression right in - let body' = this#statement body in - { left = left'; right = right'; body = body'; async } - - method for_of_statement_lhs (left: ('M, 'T) Ast.Statement.ForOf.left) = - let open Ast.Statement.ForOf in - match left with - | LeftDeclaration (annot, decl) -> - LeftDeclaration (this#on_loc_annot annot, this#variable_declaration decl) - | LeftPattern patt -> - LeftPattern (this#for_of_assignment_pattern patt) - - method for_statement (stmt: ('M, 'T) Ast.Statement.For.t) : ('N, 'U) Ast.Statement.For.t = - let open Ast.Statement.For in - let { init; test; update; body } = stmt in - let init' = Option.map ~f:this#for_statement_init init in - let test' = Option.map ~f:this#predicate_expression test in - let update' = Option.map ~f:this#expression update in - let body' = this#statement body in - { init = init'; test = test'; update = update'; body = body' } - - method for_statement_init (init: ('M, 'T) Ast.Statement.For.init) - : ('N, 'U) Ast.Statement.For.init = - let open Ast.Statement.For in - match init with - | InitDeclaration (annot, decl) -> - InitDeclaration (this#on_loc_annot annot, this#variable_declaration decl) - | InitExpression expr -> - InitExpression (this#expression expr) - - method function_param_type (fpt: ('M, 'T) Ast.Type.Function.Param.t) - : ('N, 'U) Ast.Type.Function.Param.t = - let open Ast.Type.Function.Param in - let annot, { annot = t_annot; name; optional; } = fpt in - let t_annot' = this#type_ t_annot in - let name' = Option.map ~f:this#t_identifier name in - this#on_loc_annot annot, { annot = t_annot'; name = name'; optional } - - method function_rest_param_type (frpt: ('M, 'T) Ast.Type.Function.RestParam.t) - : ('N, 'U) Ast.Type.Function.RestParam.t = - let open Ast.Type.Function.RestParam in - let annot, { argument } = frpt in - let argument' = this#function_param_type argument in - this#on_loc_annot annot, { argument = argument' } - - method function_type (ft: ('M, 'T) Ast.Type.Function.t) : ('N, 'U) Ast.Type.Function.t = - let open Ast.Type.Function in - let { - params = (params_annot, { Params.params = ps; rest = rpo }); - return; - tparams; - } = ft in - this#type_parameter_declaration_opt tparams (fun tparams' -> - let ps' = List.map this#function_param_type ps in - let rpo' = Option.map ~f:this#function_rest_param_type rpo in - let return' = this#type_ return in - { - params = (this#on_loc_annot params_annot, { Params.params = ps'; rest = rpo' }); - return = return'; - tparams = tparams'; - } - ) - - method label_identifier (ident: 'M Ast.Identifier.t) : 'N Ast.Identifier.t = - this#identifier ident - - method object_property_value_type (opvt: ('M, 'T) Ast.Type.Object.Property.value) - : ('N, 'U) Ast.Type.Object.Property.value = - let open Ast.Type.Object.Property in - match opvt with - | Init t -> Init (this#type_ t) - | Get (annot, ft) -> - Get (this#on_loc_annot annot, this#function_type ft) - | Set (annot, ft) -> - Set (this#on_loc_annot annot, this#function_type ft) - - method object_property_type (opt: ('M, 'T) Ast.Type.Object.Property.t) - : ('N, 'U) Ast.Type.Object.Property.t = - let open Ast.Type.Object.Property in - let annot, { key; value; optional; static; proto; _method; variance; } = opt in - let key' = this#object_key key in - let value' = this#object_property_value_type value in - let variance' = Option.map ~f:(this#on_loc_annot * id) variance in - this#on_loc_annot annot, { key = key'; value = value'; optional; static; proto; _method; variance = variance' } - - method object_type (ot: ('M, 'T) Ast.Type.Object.t) : ('N, 'U) Ast.Type.Object.t = - let open Ast.Type.Object in - let { properties ; exact; } = ot in - let properties' = List.map this#object_type_property properties in - { properties = properties'; exact } - - method object_type_property (prop : ('M, 'T) Ast.Type.Object.property) - : ('N, 'U) Ast.Type.Object.property = - let open Ast.Type.Object in - match prop with - | Property prop -> Property (this#object_property_type prop) - | SpreadProperty (annot, { SpreadProperty.argument }) -> - let argument' = this#type_ argument in - SpreadProperty (this#on_loc_annot annot, { SpreadProperty.argument = argument' }) - | Indexer (annot, indexer) -> - let open Indexer in - let { id = id_; key; value; static; variance } = indexer in - let id' = Option.map ~f:(this#on_loc_annot * id) id_ in - let key' = this#type_ key in - let value' = this#type_ value in - let variance' = Option.map ~f:(this#on_loc_annot * id) variance in - Indexer (this#on_loc_annot annot, { id = id'; key = key'; value = value'; static; variance = variance' }) - | CallProperty (annot, { CallProperty.value; static }) -> - let open CallProperty in - let value' = (this#on_loc_annot * this#function_type) value in - CallProperty (this#on_loc_annot annot, { value = value'; static }) - | InternalSlot (annot, islot) -> - let open InternalSlot in - let { id = id_; value; _ } = islot in - let id' = (this#on_loc_annot * id) id_ in - let value' = this#type_ value in - InternalSlot (this#on_loc_annot annot, { islot with id = id'; value = value' }) - - method interface_type (i: ('M, 'T) Ast.Type.Interface.t) : ('N, 'U) Ast.Type.Interface.t = - let open Ast.Type.Interface in - let { extends; body } = i in - let extends' = List.map (this#on_loc_annot * this#generic_type) extends in - let body' = (this#on_loc_annot * this#object_type) body in - { extends = extends'; body = body' } - - method generic_identifier_type (git: ('M, 'T) Ast.Type.Generic.Identifier.t) - : ('N, 'U) Ast.Type.Generic.Identifier.t = - let open Ast.Type.Generic.Identifier in - match git with - | Unqualified i -> Unqualified (this#t_identifier i) - | Qualified (annot, { qualification; id = id_ }) -> - let qualification' = this#generic_identifier_type qualification in - let id' = this#t_identifier id_ in - Qualified (this#on_loc_annot annot, { qualification = qualification'; id = id' }) - - method type_parameter_instantiation (pi: ('M, 'T) Ast.Type.ParameterInstantiation.t) - : ('N, 'U) Ast.Type.ParameterInstantiation.t = - let annot, targs = pi in - let targs' = List.map this#type_ targs in - this#on_loc_annot annot, targs' - - method type_parameter_declaration_opt : - 'a . ('M, 'T) Ast.Type.ParameterDeclaration.t option -> - (('N, 'U) Ast.Type.ParameterDeclaration.t option -> 'a) -> 'a = + let statements' = this#toplevel_statement_list statements in + let comments' = Core_list.map ~f:this#comment comments in + (annot', statements', comments') + + method statement ((annot, stmt) : ('M, 'T) Ast.Statement.t) : ('N, 'U) Ast.Statement.t = + Ast.Statement. + ( this#on_loc_annot annot, + match stmt with + | Block block -> Block (this#block block) + | Break break -> Break (this#break break) + | ClassDeclaration cls -> ClassDeclaration (this#class_ cls) + | Continue cont -> Continue (this#continue cont) + | Debugger -> Debugger + | DeclareClass stuff -> DeclareClass (this#declare_class stuff) + | DeclareExportDeclaration decl -> + DeclareExportDeclaration (this#declare_export_declaration annot decl) + | DeclareFunction stuff -> DeclareFunction (this#declare_function stuff) + | DeclareInterface stuff -> DeclareInterface (this#declare_interface stuff) + | DeclareModule m -> DeclareModule (this#declare_module annot m) + | DeclareTypeAlias stuff -> DeclareTypeAlias (this#declare_type_alias stuff) + | DeclareVariable stuff -> DeclareVariable (this#declare_variable stuff) + | DeclareModuleExports t_annot -> + DeclareModuleExports (this#declare_module_exports annot t_annot) + | DoWhile stuff -> DoWhile (this#do_while stuff) + | Empty -> + this#empty (); + Empty + | EnumDeclaration enum -> EnumDeclaration (this#enum_declaration enum) + | ExportDefaultDeclaration decl -> + ExportDefaultDeclaration (this#export_default_declaration annot decl) + | ExportNamedDeclaration decl -> + ExportNamedDeclaration (this#export_named_declaration annot decl) + | Expression expr -> Expression (this#expression_statement expr) + | For for_stmt -> For (this#for_statement for_stmt) + | ForIn stuff -> ForIn (this#for_in_statement stuff) + | ForOf stuff -> ForOf (this#for_of_statement stuff) + | FunctionDeclaration func -> FunctionDeclaration (this#function_declaration func) + | If if_stmt -> If (this#if_statement if_stmt) + | ImportDeclaration decl -> ImportDeclaration (this#import_declaration annot decl) + | InterfaceDeclaration stuff -> InterfaceDeclaration (this#interface_declaration stuff) + | Labeled label -> Labeled (this#labeled_statement label) + | OpaqueType otype -> OpaqueType (this#opaque_type otype) + | Return ret -> Return (this#return ret) + | Switch switch -> Switch (this#switch switch) + | Throw throw -> Throw (this#throw throw) + | Try try_stmt -> Try (this#try_catch try_stmt) + | VariableDeclaration decl -> VariableDeclaration (this#variable_declaration decl) + | While stuff -> While (this#while_ stuff) + | With stuff -> With (this#with_ stuff) + | TypeAlias stuff -> TypeAlias (this#type_alias stuff) + | DeclareOpaqueType otype -> DeclareOpaqueType (this#declare_opaque_type otype) ) + + method comment ((annot, c) : 'M Ast.Comment.t) : 'N Ast.Comment.t = (this#on_loc_annot annot, c) + + method t_comment ((annot, c) : 'T Ast.Comment.t) : 'U Ast.Comment.t = + (this#on_type_annot annot, c) + + method syntax : 'internal. ('M, 'internal) Ast.Syntax.t -> ('N, 'internal) Ast.Syntax.t = + fun attached -> + Ast.Syntax.( + let { leading; trailing; internal } = attached in + let leading' = List.map this#comment leading in + let trailing' = List.map this#comment trailing in + { leading = leading'; trailing = trailing'; internal }) + + method expression ((annot, expr') : ('M, 'T) Ast.Expression.t) : ('N, 'U) Ast.Expression.t = + Ast.Expression. + ( this#on_type_annot annot, + match expr' with + | This -> This + | Super -> Super + | Array x -> Array (this#array x) + | ArrowFunction x -> ArrowFunction (this#arrow_function x) + | Assignment x -> Assignment (this#assignment x) + | Binary x -> Binary (this#binary x) + | Call x -> Call (this#call annot x) + | Class x -> Class (this#class_ x) + | Comprehension x -> Comprehension (this#comprehension x) + | Conditional x -> Conditional (this#conditional x) + | Function x -> Function (this#function_expression x) + | Generator x -> Generator (this#generator x) + | Identifier x -> Identifier (this#t_identifier x) + | Import x -> Import (this#import annot x) + | JSXElement x -> JSXElement (this#jsx_element x) + | JSXFragment x -> JSXFragment (this#jsx_fragment x) + | Literal x -> Literal (this#literal x) + | Logical x -> Logical (this#logical x) + | Member x -> Member (this#member x) + | MetaProperty x -> MetaProperty (this#meta_property x) + | New x -> New (this#new_ x) + | Object x -> Object (this#object_ x) + | OptionalCall x -> OptionalCall (this#optional_call annot x) + | OptionalMember x -> OptionalMember (this#optional_member x) + | Sequence x -> Sequence (this#sequence x) + | TaggedTemplate x -> TaggedTemplate (this#tagged_template x) + | TemplateLiteral x -> TemplateLiteral (this#template_literal x) + | TypeCast x -> TypeCast (this#type_cast x) + | Unary x -> Unary (this#unary_expression x) + | Update x -> Update (this#update_expression x) + | Yield x -> Yield (this#yield x) ) + + method array (expr : ('M, 'T) Ast.Expression.Array.t) : ('N, 'U) Ast.Expression.Array.t = + Ast.Expression.( + let { Array.elements; Array.comments } = expr in + let elements' = Core_list.map ~f:(Option.map ~f:this#expression_or_spread) elements in + let comments' = Option.map ~f:this#syntax comments in + { Array.elements = elements'; comments = comments' }) + + method arrow_function (expr : ('M, 'T) Ast.Function.t) : ('N, 'U) Ast.Function.t = + this#function_ expr + + method assignment (expr : ('M, 'T) Ast.Expression.Assignment.t) + : ('N, 'U) Ast.Expression.Assignment.t = + Ast.Expression.Assignment.( + let { operator; left; right } = expr in + let left' = this#assignment_pattern left in + let right' = this#expression right in + { operator; left = left'; right = right' }) + + method binary (expr : ('M, 'T) Ast.Expression.Binary.t) : ('N, 'U) Ast.Expression.Binary.t = + Ast.Expression.Binary.( + let { operator; left; right } = expr in + let left' = this#expression left in + let right' = this#expression right in + { operator; left = left'; right = right' }) + + method block (stmt : ('M, 'T) Ast.Statement.Block.t) : ('N, 'U) Ast.Statement.Block.t = + Ast.Statement.Block.( + let { body } = stmt in + let body' = this#statement_list body in + { body = body' }) + + method break (break : 'M Ast.Statement.Break.t) : 'N Ast.Statement.Break.t = + Ast.Statement.Break.( + let { label; comments } = break in + let label' = Option.map ~f:this#label_identifier label in + let comments' = Option.map ~f:this#syntax comments in + { label = label'; comments = comments' }) + + method call _annot (expr : ('M, 'T) Ast.Expression.Call.t) : ('N, 'U) Ast.Expression.Call.t = + Ast.Expression.Call.( + let { callee; targs; arguments } = expr in + let callee' = this#expression callee in + let targs' = Option.map ~f:this#type_parameter_instantiation_with_implicit targs in + let arguments' = Core_list.map ~f:this#expression_or_spread arguments in + { callee = callee'; targs = targs'; arguments = arguments' }) + + method optional_call annot (expr : ('M, 'T) Ast.Expression.OptionalCall.t) + : ('N, 'U) Ast.Expression.OptionalCall.t = + Ast.Expression.OptionalCall.( + let { call; optional } = expr in + let call' = this#call annot call in + { call = call'; optional }) + + method catch_clause (clause : ('M, 'T) Ast.Statement.Try.CatchClause.t') + : ('N, 'U) Ast.Statement.Try.CatchClause.t' = + Ast.Statement.Try.CatchClause.( + let { param; body } = clause in + let param' = Option.map ~f:this#catch_clause_pattern param in + let body' = (this#on_loc_annot * this#block) body in + { param = param'; body = body' }) + + method class_ (cls : ('M, 'T) Ast.Class.t) : ('N, 'U) Ast.Class.t = + Ast.Class.( + let { id; body; tparams; extends; implements; classDecorators } = cls in + let id' = Option.map ~f:this#class_identifier id in + this#type_parameter_declaration_opt tparams (fun tparams' -> + let extends' = Option.map ~f:this#class_extends extends in + let body' = this#class_body body in + let implements' = Core_list.map ~f:this#class_implements implements in + let classDecorators' = Core_list.map ~f:this#class_decorator classDecorators in + { + id = id'; + body = body'; + tparams = tparams'; + extends = extends'; + implements = implements'; + classDecorators = classDecorators'; + })) + + method class_extends (extends : ('M, 'T) Ast.Class.Extends.t) : ('N, 'U) Ast.Class.Extends.t = + Ast.Class.Extends.( + let (annot, { expr; targs }) = extends in + let expr' = this#expression expr in + let targs' = Option.map ~f:this#type_parameter_instantiation targs in + (this#on_loc_annot annot, { expr = expr'; targs = targs' })) + + method class_decorator (dec : ('M, 'T) Ast.Class.Decorator.t) : ('N, 'U) Ast.Class.Decorator.t + = + Ast.Class.Decorator.( + let (annot, { expression }) = dec in + let expression' = this#expression expression in + (this#on_loc_annot annot, { expression = expression' })) + + method class_identifier (ident : ('M, 'T) Ast.Identifier.t) : ('N, 'U) Ast.Identifier.t = + this#t_pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let ident + + method class_body (cls_body : ('M, 'T) Ast.Class.Body.t) : ('N, 'U) Ast.Class.Body.t = + Ast.Class.Body.( + let (annot, { body }) = cls_body in + let body' = Core_list.map ~f:this#class_element body in + (this#on_type_annot annot, { body = body' })) + + method class_element (elem : ('M, 'T) Ast.Class.Body.element) : ('N, 'U) Ast.Class.Body.element + = + Ast.Class.Body.( + match elem with + | Method (annot, meth) -> Method (this#on_type_annot annot, this#class_method meth) + | Property (annot, prop) -> Property (this#on_type_annot annot, this#class_property prop) + | PrivateField (annot, field) -> + PrivateField (this#on_type_annot annot, this#class_private_field field)) + + method class_method (meth : ('M, 'T) Ast.Class.Method.t') : ('N, 'U) Ast.Class.Method.t' = + Ast.Class.Method.( + let { kind; key; value; static; decorators } = meth in + let key' = this#object_key key in + let value' = (this#on_loc_annot * this#function_expression) value in + let decorators' = Core_list.map ~f:this#class_decorator decorators in + { kind; key = key'; value = value'; static; decorators = decorators' }) + + method class_property (prop : ('M, 'T) Ast.Class.Property.t') : ('N, 'U) Ast.Class.Property.t' + = + Ast.Class.Property.( + let { key; value; annot; static; variance } = prop in + let key' = this#object_key key in + let value' = Option.map ~f:this#expression value in + let annot' = this#type_annotation_hint annot in + let variance' = Option.map ~f:(this#on_loc_annot * id) variance in + { key = key'; value = value'; annot = annot'; static; variance = variance' }) + + method class_private_field (prop : ('M, 'T) Ast.Class.PrivateField.t') = + Ast.Class.PrivateField.( + let { key; value; annot; static; variance } = prop in + let key' = this#private_name key in + let value' = Option.map ~f:this#expression value in + let annot' = this#type_annotation_hint annot in + let variance' = Option.map ~f:(this#on_loc_annot * id) variance in + { key = key'; value = value'; annot = annot'; static; variance = variance' }) + + method comprehension (expr : ('M, 'T) Ast.Expression.Comprehension.t) + : ('N, 'U) Ast.Expression.Comprehension.t = + Ast.Expression.Comprehension.( + let { blocks; filter } = expr in + let blocks' = Core_list.map ~f:this#comprehension_block blocks in + let filter' = Option.map ~f:this#expression filter in + { blocks = blocks'; filter = filter' }) + + method comprehension_block (block : ('M, 'T) Ast.Expression.Comprehension.Block.t) + : ('N, 'U) Ast.Expression.Comprehension.Block.t = + Ast.Expression.Comprehension.Block.( + let (annot, { left; right; each }) = block in + let left' = this#pattern left in + let right' = this#expression right in + (this#on_loc_annot annot, { left = left'; right = right'; each })) + + method conditional (expr : ('M, 'T) Ast.Expression.Conditional.t) + : ('N, 'U) Ast.Expression.Conditional.t = + Ast.Expression.Conditional.( + let { test; consequent; alternate } = expr in + let test' = this#predicate_expression test in + let consequent' = this#expression consequent in + let alternate' = this#expression alternate in + { test = test'; consequent = consequent'; alternate = alternate' }) + + method continue (cont : 'M Ast.Statement.Continue.t) : 'N Ast.Statement.Continue.t = + Ast.Statement.Continue.( + let { label; comments } = cont in + let label' = Option.map ~f:this#label_identifier label in + let comments' = Option.map ~f:this#syntax comments in + { label = label'; comments = comments' }) + + method debugger () = () + + method declare_class (decl : ('M, 'T) Ast.Statement.DeclareClass.t) + : ('N, 'U) Ast.Statement.DeclareClass.t = + Ast.Statement.DeclareClass.( + let { id = ident; tparams; body; extends; mixins; implements } = decl in + let id' = this#class_identifier ident in + this#type_parameter_declaration_opt tparams (fun tparams' -> + let body' = + let (a, b) = body in + (this#on_loc_annot a, this#object_type b) + in + let extends' = Option.map ~f:(this#on_loc_annot * this#generic_type) extends in + let mixins' = Core_list.map ~f:(this#on_loc_annot * this#generic_type) mixins in + let implements' = Core_list.map ~f:this#class_implements implements in + { + id = id'; + tparams = tparams'; + body = body'; + extends = extends'; + mixins = mixins'; + implements = implements'; + })) + + method class_implements (implements : ('M, 'T) Ast.Class.Implements.t) + : ('N, 'U) Ast.Class.Implements.t = + Ast.Class.Implements.( + let (annot, { id = id_; targs }) = implements in + let id' = this#t_identifier id_ in + let targs' = Option.map ~f:this#type_parameter_instantiation targs in + (this#on_loc_annot annot, { id = id'; targs = targs' })) + + method declare_export_declaration + _annot (decl : ('M, 'T) Ast.Statement.DeclareExportDeclaration.t) + : ('N, 'U) Ast.Statement.DeclareExportDeclaration.t = + Ast.Statement.DeclareExportDeclaration.( + let { default; source; specifiers; declaration } = decl in + let default' = Option.map ~f:this#on_loc_annot default in + let source' = Option.map ~f:(this#on_loc_annot * id) source in + let specifiers' = Option.map ~f:this#export_named_specifier specifiers in + let declaration' = Option.map ~f:this#declare_export_declaration_decl declaration in + { + default = default'; + source = source'; + specifiers = specifiers'; + declaration = declaration'; + }) + + method declare_export_declaration_decl + (decl : ('M, 'T) Ast.Statement.DeclareExportDeclaration.declaration) + : ('N, 'U) Ast.Statement.DeclareExportDeclaration.declaration = + Ast.Statement.DeclareExportDeclaration.( + match decl with + | Variable (annot, decl_var) -> + Variable (this#on_loc_annot annot, this#declare_variable decl_var) + | Function (annot, decl_func) -> + Function (this#on_loc_annot annot, this#declare_function decl_func) + | Class (annot, decl_class) -> + Class (this#on_loc_annot annot, this#declare_class decl_class) + | DefaultType t -> DefaultType (this#type_ t) + | NamedType (annot, alias) -> NamedType (this#on_loc_annot annot, this#type_alias alias) + | NamedOpaqueType (annot, ot) -> + NamedOpaqueType (this#on_loc_annot annot, this#opaque_type ot) + | Interface (annot, iface) -> Interface (this#on_loc_annot annot, this#interface iface)) + + method declare_function (decl : ('M, 'T) Ast.Statement.DeclareFunction.t) + : ('N, 'U) Ast.Statement.DeclareFunction.t = + Ast.Statement.DeclareFunction.( + let { id = ident; annot; predicate } = decl in + let id' = this#t_function_identifier ident in + let annot' = this#type_annotation annot in + let predicate' = Option.map ~f:this#type_predicate predicate in + { id = id'; annot = annot'; predicate = predicate' }) + + method declare_interface (decl : ('M, 'T) Ast.Statement.Interface.t) + : ('N, 'U) Ast.Statement.Interface.t = + this#interface decl + + method declare_module _annot (m : ('M, 'T) Ast.Statement.DeclareModule.t) + : ('N, 'U) Ast.Statement.DeclareModule.t = + Ast.Statement.DeclareModule.( + let { id; body; kind } = m in + let id' = + match id with + | Identifier id -> Identifier (this#t_identifier id) + | Literal (annot, name) -> Literal (this#on_type_annot annot, name) + in + let kind' = + match kind with + | CommonJS annot -> CommonJS (this#on_loc_annot annot) + | ES annot -> ES (this#on_loc_annot annot) + in + let body' = (this#on_loc_annot * this#block) body in + { id = id'; body = body'; kind = kind' }) + + method declare_module_exports _annot (t_annot : ('M, 'T) Ast.Type.annotation) + : ('N, 'U) Ast.Type.annotation = + this#type_annotation t_annot + + method declare_type_alias (decl : ('M, 'T) Ast.Statement.TypeAlias.t) + : ('N, 'U) Ast.Statement.TypeAlias.t = + this#type_alias decl + + method declare_variable (decl : ('M, 'T) Ast.Statement.DeclareVariable.t) + : ('N, 'U) Ast.Statement.DeclareVariable.t = + Ast.Statement.DeclareVariable.( + let { id = ident; annot } = decl in + let id' = this#t_pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident in + let annot' = this#type_annotation_hint annot in + { id = id'; annot = annot' }) + + method do_while (stuff : ('M, 'T) Ast.Statement.DoWhile.t) : ('N, 'U) Ast.Statement.DoWhile.t = + Ast.Statement.DoWhile.( + let { body; test; comments } = stuff in + let body' = this#statement body in + let test' = this#predicate_expression test in + let comments' = Option.map ~f:this#syntax comments in + { body = body'; test = test'; comments = comments' }) + + method empty () = () + + method enum_declaration (enum : ('M, 'T) Ast.Statement.EnumDeclaration.t) + : ('N, 'U) Ast.Statement.EnumDeclaration.t = + Ast.Statement.EnumDeclaration.( + let { id; body } = enum in + let body' = + match body with + | BooleanBody boolean_body -> BooleanBody (this#enum_boolean_body boolean_body) + | NumberBody number_body -> NumberBody (this#enum_number_body number_body) + | StringBody string_body -> StringBody (this#enum_string_body string_body) + | SymbolBody symbol_body -> SymbolBody (this#enum_symbol_body symbol_body) + in + { id = this#t_identifier id; body = body' }) + + method enum_boolean_body (body : 'M Ast.Statement.EnumDeclaration.BooleanBody.t) + : 'N Ast.Statement.EnumDeclaration.BooleanBody.t = + Ast.Statement.EnumDeclaration.BooleanBody.( + let { members; explicitType } = body in + { members = Core_list.map ~f:this#enum_boolean_member members; explicitType }) + + method enum_number_body (body : 'M Ast.Statement.EnumDeclaration.NumberBody.t) + : 'N Ast.Statement.EnumDeclaration.NumberBody.t = + Ast.Statement.EnumDeclaration.NumberBody.( + let { members; explicitType } = body in + { members = Core_list.map ~f:this#enum_number_member members; explicitType }) + + method enum_string_body (body : 'M Ast.Statement.EnumDeclaration.StringBody.t) + : 'N Ast.Statement.EnumDeclaration.StringBody.t = + Ast.Statement.EnumDeclaration.StringBody.( + let { members; explicitType } = body in + let members' = + match members with + | Defaulted members -> Defaulted (Core_list.map ~f:this#enum_defaulted_member members) + | Initialized members -> Initialized (Core_list.map ~f:this#enum_string_member members) + in + { members = members'; explicitType }) + + method enum_symbol_body (body : 'M Ast.Statement.EnumDeclaration.SymbolBody.t) + : 'N Ast.Statement.EnumDeclaration.SymbolBody.t = + Ast.Statement.EnumDeclaration.SymbolBody.( + let { members } = body in + { members = Core_list.map ~f:this#enum_defaulted_member members }) + + method enum_defaulted_member (member : 'M Ast.Statement.EnumDeclaration.DefaultedMember.t) + : 'N Ast.Statement.EnumDeclaration.DefaultedMember.t = + Ast.Statement.EnumDeclaration.DefaultedMember.( + let (annot, { id }) = member in + (this#on_loc_annot annot, { id = this#identifier id })) + + method enum_boolean_member + (member : (bool, 'M) Ast.Statement.EnumDeclaration.InitializedMember.t) + : (bool, 'N) Ast.Statement.EnumDeclaration.InitializedMember.t = + Ast.Statement.EnumDeclaration.InitializedMember.( + let (annot, { id; init = (init_annot, init_val) }) = member in + let init' = (this#on_loc_annot init_annot, init_val) in + (this#on_loc_annot annot, { id = this#identifier id; init = init' })) + + method enum_number_member + (member : (Ast.NumberLiteral.t, 'M) Ast.Statement.EnumDeclaration.InitializedMember.t) + : (Ast.NumberLiteral.t, 'N) Ast.Statement.EnumDeclaration.InitializedMember.t = + Ast.Statement.EnumDeclaration.InitializedMember.( + let (annot, { id; init = (init_annot, init_val) }) = member in + let init' = (this#on_loc_annot init_annot, init_val) in + (this#on_loc_annot annot, { id = this#identifier id; init = init' })) + + method enum_string_member + (member : (Ast.StringLiteral.t, 'M) Ast.Statement.EnumDeclaration.InitializedMember.t) + : (Ast.StringLiteral.t, 'N) Ast.Statement.EnumDeclaration.InitializedMember.t = + Ast.Statement.EnumDeclaration.InitializedMember.( + let (annot, { id; init = (init_annot, init_val) }) = member in + let init' = (this#on_loc_annot init_annot, init_val) in + (this#on_loc_annot annot, { id = this#identifier id; init = init' })) + + method export_default_declaration + _loc (decl : ('M, 'T) Ast.Statement.ExportDefaultDeclaration.t) + : ('N, 'U) Ast.Statement.ExportDefaultDeclaration.t = + Ast.Statement.ExportDefaultDeclaration.( + let { default; declaration } = decl in + let default' = this#on_loc_annot default in + let declaration' = this#export_default_declaration_decl declaration in + { default = default'; declaration = declaration' }) + + method export_default_declaration_decl + (decl : ('M, 'T) Ast.Statement.ExportDefaultDeclaration.declaration) + : ('N, 'U) Ast.Statement.ExportDefaultDeclaration.declaration = + Ast.Statement.ExportDefaultDeclaration.( + match decl with + | Declaration stmt -> Declaration (this#statement stmt) + | Expression expr -> Expression (this#expression expr)) + + method export_named_declaration _loc (decl : ('M, 'T) Ast.Statement.ExportNamedDeclaration.t) + : ('N, 'U) Ast.Statement.ExportNamedDeclaration.t = + Ast.Statement.ExportNamedDeclaration.( + let { exportKind; source; specifiers; declaration } = decl in + let source' = Option.map ~f:(this#on_loc_annot * id) source in + let specifiers' = Option.map ~f:this#export_named_specifier specifiers in + let declaration' = Option.map ~f:this#statement declaration in + { exportKind; source = source'; specifiers = specifiers'; declaration = declaration' }) + + method export_named_specifier (spec : 'M Ast.Statement.ExportNamedDeclaration.specifier) + : 'N Ast.Statement.ExportNamedDeclaration.specifier = + Ast.Statement.ExportNamedDeclaration.( + match spec with + | ExportSpecifiers specs -> ExportSpecifiers (Core_list.map ~f:this#export_specifier specs) + | ExportBatchSpecifier (annot, name) -> + let annot' = this#on_loc_annot annot in + let name' = Option.map ~f:this#identifier name in + ExportBatchSpecifier (annot', name')) + + method export_specifier (spec : 'M Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t) + : 'N Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t = + Ast.Statement.ExportNamedDeclaration.ExportSpecifier.( + let (annot, { local; exported }) = spec in + let local' = this#identifier local in + let exported' = Option.map ~f:this#identifier exported in + (this#on_loc_annot annot, { local = local'; exported = exported' })) + + method expression_statement (stmt : ('M, 'T) Ast.Statement.Expression.t) + : ('N, 'U) Ast.Statement.Expression.t = + Ast.Statement.Expression.( + let { expression = expr; directive } = stmt in + { expression = this#expression expr; directive }) + + method expression_or_spread (expr_or_spread : ('M, 'T) Ast.Expression.expression_or_spread) + : ('N, 'U) Ast.Expression.expression_or_spread = + Ast.Expression.( + match expr_or_spread with + | Expression expr -> Expression (this#expression expr) + | Spread spread -> Spread (this#spread_element spread)) + + method for_in_statement (stmt : ('M, 'T) Ast.Statement.ForIn.t) + : ('N, 'U) Ast.Statement.ForIn.t = + Ast.Statement.ForIn.( + let { left; right; body; each } = stmt in + let left' = this#for_in_statement_lhs left in + let right' = this#expression right in + let body' = this#statement body in + { left = left'; right = right'; body = body'; each }) + + method for_in_statement_lhs (left : ('M, 'T) Ast.Statement.ForIn.left) + : ('N, 'U) Ast.Statement.ForIn.left = + Ast.Statement.ForIn.( + match left with + | LeftDeclaration (annot, decl) -> + LeftDeclaration (this#on_loc_annot annot, this#variable_declaration decl) + | LeftPattern patt -> LeftPattern (this#for_in_assignment_pattern patt)) + + method for_of_statement (stuff : ('M, 'T) Ast.Statement.ForOf.t) + : ('N, 'U) Ast.Statement.ForOf.t = + Ast.Statement.ForOf.( + let { left; right; body; async } = stuff in + let left' = this#for_of_statement_lhs left in + let right' = this#expression right in + let body' = this#statement body in + { left = left'; right = right'; body = body'; async }) + + method for_of_statement_lhs (left : ('M, 'T) Ast.Statement.ForOf.left) = + Ast.Statement.ForOf.( + match left with + | LeftDeclaration (annot, decl) -> + LeftDeclaration (this#on_loc_annot annot, this#variable_declaration decl) + | LeftPattern patt -> LeftPattern (this#for_of_assignment_pattern patt)) + + method for_statement (stmt : ('M, 'T) Ast.Statement.For.t) : ('N, 'U) Ast.Statement.For.t = + Ast.Statement.For.( + let { init; test; update; body } = stmt in + let init' = Option.map ~f:this#for_statement_init init in + let test' = Option.map ~f:this#predicate_expression test in + let update' = Option.map ~f:this#expression update in + let body' = this#statement body in + { init = init'; test = test'; update = update'; body = body' }) + + method for_statement_init (init : ('M, 'T) Ast.Statement.For.init) + : ('N, 'U) Ast.Statement.For.init = + Ast.Statement.For.( + match init with + | InitDeclaration (annot, decl) -> + InitDeclaration (this#on_loc_annot annot, this#variable_declaration decl) + | InitExpression expr -> InitExpression (this#expression expr)) + + method function_param_type (fpt : ('M, 'T) Ast.Type.Function.Param.t) + : ('N, 'U) Ast.Type.Function.Param.t = + Ast.Type.Function.Param.( + let (annot, { annot = t_annot; name; optional }) = fpt in + let t_annot' = this#type_ t_annot in + let name' = Option.map ~f:this#t_identifier name in + (this#on_loc_annot annot, { annot = t_annot'; name = name'; optional })) + + method function_rest_param_type (frpt : ('M, 'T) Ast.Type.Function.RestParam.t) + : ('N, 'U) Ast.Type.Function.RestParam.t = + Ast.Type.Function.RestParam.( + let (annot, { argument }) = frpt in + let argument' = this#function_param_type argument in + (this#on_loc_annot annot, { argument = argument' })) + + method function_type (ft : ('M, 'T) Ast.Type.Function.t) : ('N, 'U) Ast.Type.Function.t = + Ast.Type.Function.( + let { params = (params_annot, { Params.params = ps; rest = rpo }); return; tparams } = + ft + in + this#type_parameter_declaration_opt tparams (fun tparams' -> + let ps' = Core_list.map ~f:this#function_param_type ps in + let rpo' = Option.map ~f:this#function_rest_param_type rpo in + let return' = this#type_ return in + { + params = (this#on_loc_annot params_annot, { Params.params = ps'; rest = rpo' }); + return = return'; + tparams = tparams'; + })) + + method label_identifier (ident : ('M, 'M) Ast.Identifier.t) : ('N, 'N) Ast.Identifier.t = + this#identifier ident + + method object_property_value_type (opvt : ('M, 'T) Ast.Type.Object.Property.value) + : ('N, 'U) Ast.Type.Object.Property.value = + Ast.Type.Object.Property.( + match opvt with + | Init t -> Init (this#type_ t) + | Get (annot, ft) -> Get (this#on_loc_annot annot, this#function_type ft) + | Set (annot, ft) -> Set (this#on_loc_annot annot, this#function_type ft)) + + method object_property_type (opt : ('M, 'T) Ast.Type.Object.Property.t) + : ('N, 'U) Ast.Type.Object.Property.t = + Ast.Type.Object.Property.( + let (annot, { key; value; optional; static; proto; _method; variance }) = opt in + let key' = this#object_key key in + let value' = this#object_property_value_type value in + let variance' = Option.map ~f:(this#on_loc_annot * id) variance in + ( this#on_loc_annot annot, + { key = key'; value = value'; optional; static; proto; _method; variance = variance' } )) + + method object_indexer_type (oit : ('M, 'T) Ast.Type.Object.Indexer.t) + : ('N, 'U) Ast.Type.Object.Indexer.t = + Ast.Type.Object.Indexer.( + let (annot, { id = id_; key; value; static; variance }) = oit in + let id' = Option.map ~f:this#identifier id_ in + let key' = this#type_ key in + let value' = this#type_ value in + let variance' = Option.map ~f:(this#on_loc_annot * id) variance in + ( this#on_loc_annot annot, + { id = id'; key = key'; value = value'; static; variance = variance' } )) + + method object_internal_slot_type (islot : ('M, 'T) Ast.Type.Object.InternalSlot.t) + : ('N, 'U) Ast.Type.Object.InternalSlot.t = + Ast.Type.Object.InternalSlot.( + let (annot, { id = id_; value; optional; static; _method }) = islot in + let id' = this#identifier id_ in + let value' = this#type_ value in + (this#on_loc_annot annot, { id = id'; value = value'; optional; static; _method })) + + method object_type (ot : ('M, 'T) Ast.Type.Object.t) : ('N, 'U) Ast.Type.Object.t = + Ast.Type.Object.( + let { properties; exact; inexact } = ot in + let properties' = Core_list.map ~f:this#object_type_property properties in + { properties = properties'; exact; inexact }) + + method object_type_property (prop : ('M, 'T) Ast.Type.Object.property) + : ('N, 'U) Ast.Type.Object.property = + Ast.Type.Object.( + match prop with + | Property prop -> Property (this#object_property_type prop) + | SpreadProperty (annot, { SpreadProperty.argument }) -> + let argument' = this#type_ argument in + SpreadProperty (this#on_loc_annot annot, { SpreadProperty.argument = argument' }) + | Indexer indexer -> Indexer (this#object_indexer_type indexer) + | CallProperty (annot, { CallProperty.value; static }) -> + CallProperty.( + let value' = (this#on_loc_annot * this#function_type) value in + CallProperty (this#on_loc_annot annot, { value = value'; static })) + | InternalSlot islot -> InternalSlot (this#object_internal_slot_type islot)) + + method interface_type (i : ('M, 'T) Ast.Type.Interface.t) : ('N, 'U) Ast.Type.Interface.t = + Ast.Type.Interface.( + let { extends; body } = i in + let extends' = Core_list.map ~f:(this#on_loc_annot * this#generic_type) extends in + let body' = (this#on_loc_annot * this#object_type) body in + { extends = extends'; body = body' }) + + method generic_identifier_type (git : ('M, 'T) Ast.Type.Generic.Identifier.t) + : ('N, 'U) Ast.Type.Generic.Identifier.t = + Ast.Type.Generic.Identifier.( + match git with + | Unqualified i -> Unqualified (this#t_identifier i) + | Qualified (annot, { qualification; id = id_ }) -> + let qualification' = this#generic_identifier_type qualification in + let id' = this#t_identifier id_ in + Qualified (this#on_loc_annot annot, { qualification = qualification'; id = id' })) + + method type_parameter_instantiation (pi : ('M, 'T) Ast.Type.ParameterInstantiation.t) + : ('N, 'U) Ast.Type.ParameterInstantiation.t = + let (annot, targs) = pi in + let targs' = Core_list.map ~f:this#type_ targs in + (this#on_loc_annot annot, targs') + + method type_parameter_instantiation_with_implicit + (pi : ('M, 'T) Ast.Expression.TypeParameterInstantiation.t) + : ('N, 'U) Ast.Expression.TypeParameterInstantiation.t = + let (annot, targs) = pi in + let targs' = Core_list.map ~f:this#explicit_or_implicit targs in + (this#on_loc_annot annot, targs') + + method type_parameter_declaration_opt + : 'a. ('M, 'T) Ast.Type.ParameterDeclaration.t option -> + (('N, 'U) Ast.Type.ParameterDeclaration.t option -> 'a) -> 'a = fun pd f -> let pd' = - Option.map ~f:(fun pd -> - let annot, type_params = pd in - let type_params' = List.map this#type_parameter_declaration_type_param type_params in - this#on_loc_annot annot, type_params' - ) pd + Option.map + ~f:(fun pd -> + let (annot, type_params) = pd in + let type_params' = + Core_list.map ~f:this#type_parameter_declaration_type_param type_params + in + (this#on_loc_annot annot, type_params')) + pd in f pd' - method type_parameter_declaration_type_param - (type_param: ('M, 'T) Ast.Type.ParameterDeclaration.TypeParam.t) - : ('N, 'U) Ast.Type.ParameterDeclaration.TypeParam.t = - let open Ast.Type.ParameterDeclaration.TypeParam in - let annot, { name; bound; variance; default; } = type_param in - let name' = this#t_identifier name in - let bound' = Option.map ~f:this#type_annotation bound in - let variance' = Option.map ~f:(this#on_loc_annot * id) variance in - let default' = Option.map ~f:this#type_ default in - this#on_type_annot annot, { name = name'; bound = bound'; variance = variance'; default = default'; } - - method generic_type (gt: ('M, 'T) Ast.Type.Generic.t) : ('N, 'U) Ast.Type.Generic.t = - let open Ast.Type.Generic in - let { id; targs; } = gt in - let id' = this#generic_identifier_type id in - let targs' = Option.map ~f:this#type_parameter_instantiation targs in - { id = id'; targs = targs' } - - method type_predicate ((annot, pred) : ('M, 'T) Ast.Type.Predicate.t) - : ('N, 'U) Ast.Type.Predicate.t = - let open Ast.Type.Predicate in - this#on_loc_annot annot, - match pred with - | Declared e -> Declared (this#expression e) - | Inferred -> Inferred - - method type_ ((annot, t): ('M, 'T) Ast.Type.t) : ('N, 'U) Ast.Type.t = - let open Ast.Type in - this#on_type_annot annot, - match t with - ( Any - | Mixed - | Empty - | Void - | Null - | Number - | String - | Boolean - | StringLiteral _ - | NumberLiteral _ - | BooleanLiteral _ - | Exists - ) as t -> t - | Nullable t' -> Nullable (this#type_ t') - | Array t' -> Array (this#type_ t') - | Typeof t' -> Typeof (this#type_ t') - | Function ft -> Function (this#function_type ft) - | Object ot -> Object (this#object_type ot) - | Interface i -> Interface (this#interface_type i) - | Generic gt -> Generic (this#generic_type gt) - | Union (t0, t1, ts) -> - let t0' = this#type_ t0 in - let t1' = this#type_ t1 in - let ts' = List.map this#type_ ts in - Union (t0', t1', ts') - | Intersection (t0, t1, ts) -> - let t0' = this#type_ t0 in - let t1' = this#type_ t1 in - let ts' = List.map this#type_ ts in - Intersection (t0', t1', ts') - | Tuple ts -> - let ts' = List.map this#type_ ts in - Tuple ts' - - method type_annotation ((annot, t_annot): ('M, 'T) Ast.Type.annotation) = - this#on_loc_annot annot, this#type_ t_annot - - method return_type_annotation (return: ('M, 'T) Ast.Function.return) - : ('N, 'U) Ast.Function.return = - let open Ast.Function in - match return with - | Available annot -> Available (this#type_annotation annot) - | Missing loc -> Missing (this#on_type_annot loc) - - method function_ (expr: ('M, 'T) Ast.Function.t) : ('N, 'U) Ast.Function.t = - let open Ast.Function in - let { - id = ident; params; body; async; generator; expression; - predicate; return; tparams; - } = expr in - let ident' = Option.map ~f:this#t_function_identifier ident in - this#type_parameter_declaration_opt tparams (fun tparams' -> - let params' = - let annot, { Params.params = params_list; rest } = params in - let params_list' = List.map this#function_param_pattern params_list in - let rest' = Option.map ~f:this#function_rest_element rest in - this#on_loc_annot annot, { Params.params = params_list'; rest = rest' } - in - let return' = this#return_type_annotation return in - let body' = match body with - | BodyBlock (annot, block) -> - BodyBlock (this#on_loc_annot annot, this#function_body block) - | BodyExpression expr -> - BodyExpression (this#expression expr) - in - let predicate' = Option.map ~f:this#type_predicate predicate in - { - id = ident'; params = params'; return = return'; body = body'; - async; generator; expression; predicate = predicate'; tparams = tparams'; - } - ) - - method function_body (block: ('M, 'T) Ast.Statement.Block.t) : ('N, 'U) Ast.Statement.Block.t = - this#block block - - method function_identifier (ident: 'M Ast.Identifier.t) : 'N Ast.Identifier.t = - this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident - - method t_function_identifier (ident: 'T Ast.Identifier.t) : 'U Ast.Identifier.t = - this#t_pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident - - method function_declaration (stmt: ('M, 'T) Ast.Function.t) : ('N, 'U) Ast.Function.t = - this#function_ stmt - - method generator (expr: ('M, 'T) Ast.Expression.Generator.t) - : ('N, 'U) Ast.Expression.Generator.t = - let open Ast.Expression.Generator in - let { blocks; filter } = expr in - let blocks' = List.map this#comprehension_block blocks in - let filter' = Option.map ~f:this#expression filter in - { blocks = blocks'; filter = filter' } - - method identifier ((annot, name): 'M Ast.Identifier.t) : 'N Ast.Identifier.t = - this#on_loc_annot annot, name - - method t_identifier ((annot, name): 'T Ast.Identifier.t) : 'U Ast.Identifier.t = - this#on_type_annot annot, name - - method interface (interface: ('M, 'T) Ast.Statement.Interface.t) - : ('N, 'U) Ast.Statement.Interface.t = - let open Ast.Statement.Interface in - let { id = ident; tparams; extends; body } = interface in - let id' = this#class_identifier ident in - this#type_parameter_declaration_opt tparams (fun tparams' -> - let extends' = List.map (this#on_loc_annot * this#generic_type) extends in - let body' = (this#on_loc_annot * this#object_type) body in - { id = id'; tparams = tparams'; extends = extends'; body = body' } - ) - - method interface_declaration (decl: ('M, 'T) Ast.Statement.Interface.t) - : ('N, 'U) Ast.Statement.Interface.t = - this#interface decl - - method private_name ((annot, ident): 'M Ast.PrivateName.t) : 'N Ast.PrivateName.t = - this#on_loc_annot annot, this#identifier ident - - method import _annot (expr: ('M, 'T) Ast.Expression.t) : ('N, 'U) Ast.Expression.t = - this#expression expr - - method if_consequent_statement ~has_else (stmt: ('M, 'T) Ast.Statement.t) - : ('N, 'U) Ast.Statement.t = - ignore has_else; - this#statement stmt - - method if_statement (stmt: ('M, 'T) Ast.Statement.If.t) : ('N, 'U) Ast.Statement.If.t = - let open Ast.Statement.If in - let { test; consequent; alternate } = stmt in - let test' = this#predicate_expression test in - let consequent' = - this#if_consequent_statement ~has_else:(alternate <> None) consequent in - let alternate' = Option.map ~f:this#statement alternate in - { test = test'; consequent = consequent'; alternate = alternate' } - - method import_declaration _loc (decl: ('M, 'T) Ast.Statement.ImportDeclaration.t) - : ('N, 'U) Ast.Statement.ImportDeclaration.t = - let open Ast.Statement.ImportDeclaration in - let { importKind; source; specifiers; default } = decl in - let specifiers' = Option.map ~f:this#import_specifier specifiers in - let default' = Option.map ~f:this#import_default_specifier default in - let source' = (this#on_loc_annot * id) source in - { importKind; source = source'; specifiers = specifiers'; default = default' } - - method import_specifier (specifier: ('M, 'T) Ast.Statement.ImportDeclaration.specifier) - : ('N, 'U) Ast.Statement.ImportDeclaration.specifier = - let open Ast.Statement.ImportDeclaration in - match specifier with - | ImportNamedSpecifiers named_specifiers -> - let named_specifiers' = List.map this#import_named_specifier named_specifiers in - ImportNamedSpecifiers named_specifiers' - | ImportNamespaceSpecifier (annot, ident) -> - let ident' = this#import_namespace_specifier ident in - ImportNamespaceSpecifier (this#on_loc_annot annot, ident') - - method import_named_specifier (specifier: 'T Ast.Statement.ImportDeclaration.named_specifier) - : 'U Ast.Statement.ImportDeclaration.named_specifier = - let open Ast.Statement.ImportDeclaration in - let { kind; local; remote } = specifier in - let local' = Option.map ~f:this#t_pattern_identifier local in - let remote' = this#t_pattern_identifier remote in - { kind; local = local'; remote = remote' } - - method import_default_specifier (id: 'T Ast.Identifier.t) : 'U Ast.Identifier.t = - this#t_pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let id - - method import_namespace_specifier (id: 'M Ast.Identifier.t) : 'N Ast.Identifier.t = - this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let id - - method jsx_element (expr: ('M, 'T) Ast.JSX.element) = - let open Ast.JSX in - let { openingElement; closingElement; children } = expr in - let openingElement' = this#jsx_opening_element openingElement in - let closingElement' = Option.map ~f:this#jsx_closing_element closingElement in - let children' = List.map this#jsx_child children in - { openingElement = openingElement'; closingElement = closingElement'; children = children' } - - method jsx_fragment (expr: ('M, 'T) Ast.JSX.fragment) : ('N, 'U) Ast.JSX.fragment = - let open Ast.JSX in - let { frag_openingElement; frag_closingElement; frag_children } = expr in - let opening' = this#on_loc_annot frag_openingElement in - let closing' = Option.map ~f:this#on_loc_annot frag_closingElement in - let children' = List.map this#jsx_child frag_children in - { frag_openingElement = opening'; frag_closingElement = closing'; frag_children = children' } - - method jsx_opening_element (elem: ('M, 'T) Ast.JSX.Opening.t) : ('N, 'U) Ast.JSX.Opening.t = - let open Ast.JSX.Opening in - let annot, { name; selfClosing; attributes } = elem in - let name' = this#jsx_name name in - let attributes' = List.map this#jsx_opening_attribute attributes in - this#on_loc_annot annot, { name = name'; selfClosing; attributes = attributes' } - - method jsx_closing_element (elem: ('M, 'T) Ast.JSX.Closing.t) : ('N, 'U) Ast.JSX.Closing.t = - let open Ast.JSX.Closing in - let annot, {name} = elem in - let name' = this#jsx_name name in - this#on_loc_annot annot, {name=name'} - - method jsx_opening_attribute (jsx_attr: ('M, 'T) Ast.JSX.Opening.attribute) - : ('N, 'U) Ast.JSX.Opening.attribute = - let open Ast.JSX.Opening in - match jsx_attr with - | Attribute attr -> - Attribute (this#jsx_attribute attr) - | SpreadAttribute (annot, attr) -> - SpreadAttribute (this#on_loc_annot annot, this#jsx_spread_attribute attr) - - method jsx_spread_attribute (attr: ('M, 'T) Ast.JSX.SpreadAttribute.t') - : ('N, 'U) Ast.JSX.SpreadAttribute.t' = - let open Ast.JSX.SpreadAttribute in - let { argument } = attr in - { argument = this#expression argument } - - method jsx_attribute (attr: ('M, 'T) Ast.JSX.Attribute.t) : ('N, 'U) Ast.JSX.Attribute.t = - let open Ast.JSX.Attribute in - let annot, { name; value } = attr in - let name' = match name with - | Identifier id -> Identifier (this#jsx_identifier id) - | NamespacedName nname -> NamespacedName (this#jsx_namespaced_name nname) - in - let value' = Option.map ~f:this#jsx_attribute_value value in - this#on_loc_annot annot, { name = name'; value = value' } - - method jsx_attribute_value (value: ('M, 'T) Ast.JSX.Attribute.value) - : ('N, 'U) Ast.JSX.Attribute.value = - let open Ast.JSX.Attribute in - match value with - | Literal (annot, lit) -> Literal (this#on_type_annot annot, lit) - | ExpressionContainer (annot, expr) -> - ExpressionContainer (this#on_type_annot annot, this#jsx_expression expr) - - method jsx_child (child: ('M, 'T) Ast.JSX.child) : ('N, 'U) Ast.JSX.child = - let open Ast.JSX in - let annot, child' = child in - this#on_loc_annot annot, - match child' with - | Element elem -> - Element (this#jsx_element elem) - | Fragment frag -> - Fragment (this#jsx_fragment frag) - | ExpressionContainer expr -> - ExpressionContainer (this#jsx_expression expr) - | SpreadChild expr -> - SpreadChild (this#expression expr) - | Text _ as child' -> child' - - method jsx_expression (jsx_expr: ('M, 'T) Ast.JSX.ExpressionContainer.t) - : ('N, 'U) Ast.JSX.ExpressionContainer.t = - let open Ast.JSX.ExpressionContainer in - let { expression } = jsx_expr in - let expression' = match expression with - | Expression expr -> Expression (this#expression expr) - | EmptyExpression annot -> EmptyExpression (this#on_loc_annot annot) - in - { expression = expression' } - - method jsx_name (name: ('M, 'T) Ast.JSX.name): ('N, 'U) Ast.JSX.name = - let open Ast.JSX in - match name with - | Identifier id -> Identifier (this#jsx_identifier id) - | NamespacedName namespaced_name -> - NamespacedName (this#jsx_namespaced_name namespaced_name) - | MemberExpression member_exp -> - MemberExpression (this#jsx_member_expression member_exp) - - method jsx_namespaced_name (namespaced_name: ('M, 'T) Ast.JSX.NamespacedName.t) - : ('N, 'U) Ast.JSX.NamespacedName.t = - let open Ast.JSX in - let open NamespacedName in - let annot, {namespace; name} = namespaced_name in - let namespace' = this#jsx_identifier namespace in - let name' = this#jsx_identifier name in - this#on_loc_annot annot, {namespace=namespace'; name=name'} - - method jsx_member_expression (member_exp: ('M, 'T) Ast.JSX.MemberExpression.t) - : ('N, 'U) Ast.JSX.MemberExpression.t = - let open Ast.JSX in - let annot, {MemberExpression._object; MemberExpression.property} = member_exp in - let _object' = match _object with - | MemberExpression.Identifier id -> - let id' = this#jsx_identifier id in - MemberExpression.Identifier id' - | MemberExpression.MemberExpression nested_exp -> - let nested_exp' = this#jsx_member_expression nested_exp in - MemberExpression.MemberExpression nested_exp' - in - let property' = this#jsx_identifier property in - this#on_loc_annot annot, MemberExpression.({_object=_object'; property=property'}) - - method jsx_identifier ((annot, name): 'T Ast.JSX.Identifier.t) : 'U Ast.JSX.Identifier.t = - this#on_type_annot annot, name - - method labeled_statement (stmt: ('M, 'T) Ast.Statement.Labeled.t) - : ('N, 'U) Ast.Statement.Labeled.t = - let open Ast.Statement.Labeled in - let { label; body } = stmt in - let label' = this#label_identifier label in - let body' = this#statement body in - { label = label'; body = body' } - - method literal (expr: Ast.Literal.t) : Ast.Literal.t = expr - - method logical (expr: ('M, 'T) Ast.Expression.Logical.t) : ('N, 'U) Ast.Expression.Logical.t = - let open Ast.Expression.Logical in - let { operator; left; right } = expr in - let left' = this#expression left in - let right' = this#expression right in - { operator; left = left'; right = right' } - - method member (expr: ('M, 'T) Ast.Expression.Member.t) : ('N, 'U) Ast.Expression.Member.t = - let open Ast.Expression.Member in - let { _object; property; computed } = expr in - let _object' = this#expression _object in - let property' = this#member_property property in - { _object = _object'; property = property'; computed } - - method optional_member (expr: ('M, 'T) Ast.Expression.OptionalMember.t) - : ('N, 'U) Ast.Expression.OptionalMember.t = - let open Ast.Expression.OptionalMember in - let { member; optional } = expr in - let member' = this#member member in - { member = member'; optional } - - method member_property (expr: ('M, 'T) Ast.Expression.Member.property) - : ('N, 'U) Ast.Expression.Member.property = - let open Ast.Expression.Member in - match expr with - | PropertyIdentifier ident -> - PropertyIdentifier (this#member_property_identifier ident) - | PropertyPrivateName ident -> - PropertyPrivateName (this#member_private_name ident) - | PropertyExpression e -> - PropertyExpression (this#member_property_expression e) - - method member_property_identifier (ident: 'T Ast.Identifier.t) : 'U Ast.Identifier.t = - this#t_identifier ident - - method member_private_name (name: 'M Ast.PrivateName.t) : 'N Ast.PrivateName.t = - this#private_name name - - method member_property_expression (expr: ('M, 'T) Ast.Expression.t) : ('N, 'U) Ast.Expression.t = - this#expression expr - - method meta_property (expr: 'M Ast.Expression.MetaProperty.t) - : 'N Ast.Expression.MetaProperty.t = - let open Ast.Expression.MetaProperty in - let { meta; property } = expr in - { meta = (this#on_loc_annot * id) meta; property = (this#on_loc_annot * id) property } - - method new_ (expr: ('M, 'T) Ast.Expression.New.t) : ('N, 'U) Ast.Expression.New.t = - let open Ast.Expression.New in - let { callee; targs; arguments } = expr in - let callee' = this#expression callee in - let targs' = Option.map ~f:this#type_parameter_instantiation targs in - let arguments' = List.map this#expression_or_spread arguments in - { callee = callee'; targs = targs'; arguments = arguments' } - - method object_ (expr: ('M, 'T) Ast.Expression.Object.t) : ('N, 'U) Ast.Expression.Object.t = - let open Ast.Expression.Object in - let { properties } = expr in - let properties' = List.map (fun prop -> - match prop with - | Property p -> - let p' = this#object_property p in - Property p' - | SpreadProperty s -> - let s' = this#spread_property s in - SpreadProperty s' - ) properties in - { properties = properties' } - - method object_property (prop: ('M, 'T) Ast.Expression.Object.Property.t) - : ('N, 'U) Ast.Expression.Object.Property.t = - let open Ast.Expression.Object.Property in - let annot, prop' = prop in - this#on_loc_annot annot, - match prop' with - | Init { key; value; shorthand } -> - let key' = this#object_key key in - let value' = this#expression value in - Init { key = key'; value = value'; shorthand } - - | Method { key; value = fn_annot, fn } -> - let key' = this#object_key key in - let fn' = this#function_ fn in - Method { key = key'; value = this#on_loc_annot fn_annot, fn' } - - | Get { key; value = fn_annot, fn } -> - let key' = this#object_key key in - let fn' = this#function_ fn in - Get { key = key'; value = this#on_loc_annot fn_annot, fn' } - - | Set { key; value = fn_annot, fn } -> - let key' = this#object_key key in - let fn' = this#function_ fn in - Set { key = key'; value = this#on_loc_annot fn_annot, fn' } - - method object_key (key: ('M, 'T) Ast.Expression.Object.Property.key) = - let open Ast.Expression.Object.Property in - match key with - | Literal (annot, lit) -> - Literal (this#on_type_annot annot, this#literal lit) - | Identifier ident -> - Identifier (this#object_key_identifier ident) - | PrivateName ident -> - PrivateName (this#private_name ident) - | Computed expr -> - Computed (this#expression expr) - - method object_key_identifier (ident: 'T Ast.Identifier.t) : 'U Ast.Identifier.t = - this#t_identifier ident - - method opaque_type (otype: ('M, 'T) Ast.Statement.OpaqueType.t) - : ('N, 'U) Ast.Statement.OpaqueType.t = - let open Ast.Statement.OpaqueType in - let { id; tparams; impltype; supertype } = otype in - let id' = this#t_identifier id in - this#type_parameter_declaration_opt tparams (fun tparams' -> - let impltype' = Option.map ~f:this#type_ impltype in - let supertype' = Option.map ~f:this#type_ supertype in - { - id = id'; - tparams = tparams'; - impltype = impltype'; - supertype = supertype' - } - ) - - method declare_opaque_type (otype: ('M, 'T) Ast.Statement.OpaqueType.t) - : ('N, 'U) Ast.Statement.OpaqueType.t = - this#opaque_type otype - - method function_param_pattern (expr: ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = - this#binding_pattern expr - - method variable_declarator_pattern ~kind (expr: ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = - this#binding_pattern ~kind expr - - method catch_clause_pattern (expr: ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = - this#binding_pattern ~kind:Ast.Statement.VariableDeclaration.Let expr - - method for_in_assignment_pattern (expr: ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = - this#assignment_pattern expr - - method for_of_assignment_pattern (expr: ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = - this#assignment_pattern expr - - method binding_pattern - ?(kind=Ast.Statement.VariableDeclaration.Var) - (expr: ('M, 'T) Ast.Pattern.t) - : ('N, 'U) Ast.Pattern.t = - this#pattern ~kind expr - - method assignment_pattern (expr: ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = - this#pattern expr - - (* NOTE: Patterns are highly overloaded. A pattern can be a binding pattern, + method type_parameter_declaration_type_param + (type_param : ('M, 'T) Ast.Type.ParameterDeclaration.TypeParam.t) + : ('N, 'U) Ast.Type.ParameterDeclaration.TypeParam.t = + Ast.Type.ParameterDeclaration.TypeParam.( + let (annot, { name; bound; variance; default }) = type_param in + let name' = this#t_identifier name in + let bound' = this#type_annotation_hint bound in + let variance' = Option.map ~f:(this#on_loc_annot * id) variance in + let default' = Option.map ~f:this#type_ default in + ( this#on_type_annot annot, + { name = name'; bound = bound'; variance = variance'; default = default' } )) + + method generic_type (gt : ('M, 'T) Ast.Type.Generic.t) : ('N, 'U) Ast.Type.Generic.t = + Ast.Type.Generic.( + let { id; targs } = gt in + let id' = this#generic_identifier_type id in + let targs' = Option.map ~f:this#type_parameter_instantiation targs in + { id = id'; targs = targs' }) + + method type_predicate ((annot, pred) : ('M, 'T) Ast.Type.Predicate.t) + : ('N, 'U) Ast.Type.Predicate.t = + Ast.Type.Predicate. + ( this#on_loc_annot annot, + match pred with + | Declared e -> Declared (this#expression e) + | Inferred -> Inferred ) + + method type_ ((annot, t) : ('M, 'T) Ast.Type.t) : ('N, 'U) Ast.Type.t = + Ast.Type. + ( this#on_type_annot annot, + match t with + | ( Any | Mixed | Empty | Void | Null | Number | BigInt | String | Boolean + | StringLiteral _ | NumberLiteral _ | BigIntLiteral _ | BooleanLiteral _ | Exists ) as + t -> + t + | Nullable t' -> Nullable (this#type_ t') + | Array t' -> Array (this#type_ t') + | Typeof t' -> Typeof (this#type_ t') + | Function ft -> Function (this#function_type ft) + | Object ot -> Object (this#object_type ot) + | Interface i -> Interface (this#interface_type i) + | Generic gt -> Generic (this#generic_type gt) + | Union (t0, t1, ts) -> + let t0' = this#type_ t0 in + let t1' = this#type_ t1 in + let ts' = Core_list.map ~f:this#type_ ts in + Union (t0', t1', ts') + | Intersection (t0, t1, ts) -> + let t0' = this#type_ t0 in + let t1' = this#type_ t1 in + let ts' = Core_list.map ~f:this#type_ ts in + Intersection (t0', t1', ts') + | Tuple ts -> + let ts' = Core_list.map ~f:this#type_ ts in + Tuple ts' ) + + method implicit (t : 'T) : 'U = this#on_type_annot t + + method explicit_or_implicit + (x : ('M, 'T) Ast.Expression.TypeParameterInstantiation.type_parameter_instantiation) + : ('N, 'U) Ast.Expression.TypeParameterInstantiation.type_parameter_instantiation = + Ast.Expression.TypeParameterInstantiation.( + match x with + | Explicit t -> Explicit (this#type_ t) + | Implicit t -> Implicit (this#implicit t)) + + method type_annotation ((annot, t_annot) : ('M, 'T) Ast.Type.annotation) = + (this#on_loc_annot annot, this#type_ t_annot) + + method type_annotation_hint (return : ('M, 'T) Ast.Type.annotation_or_hint) + : ('N, 'U) Ast.Type.annotation_or_hint = + Ast.Type.( + match return with + | Available annot -> Available (this#type_annotation annot) + | Missing loc -> Missing (this#on_type_annot loc)) + + method function_declaration (stmt : ('M, 'T) Ast.Function.t) : ('N, 'U) Ast.Function.t = + this#function_ stmt + + method function_expression (expr : ('M, 'T) Ast.Function.t) : ('N, 'U) Ast.Function.t = + this#function_ expr + + (* Internal helper for function declarations, function expressions and arrow functions *) + method function_ (expr : ('M, 'T) Ast.Function.t) : ('N, 'U) Ast.Function.t = + Ast.Function.( + let { id = ident; params; body; async; generator; predicate; return; tparams; sig_loc } = + expr + in + let ident' = Option.map ~f:this#t_function_identifier ident in + this#type_parameter_declaration_opt tparams (fun tparams' -> + let params' = this#function_params params in + let return' = this#type_annotation_hint return in + let body' = this#function_body body in + let predicate' = Option.map ~f:this#type_predicate predicate in + let sig_loc' = this#on_loc_annot sig_loc in + { + id = ident'; + params = params'; + return = return'; + body = body'; + async; + generator; + predicate = predicate'; + tparams = tparams'; + sig_loc = sig_loc'; + })) + + method function_params (params : ('M, 'T) Ast.Function.Params.t) + : ('N, 'U) Ast.Function.Params.t = + let (annot, { Ast.Function.Params.params = params_list; rest }) = params in + let params_list' = Core_list.map ~f:this#function_param params_list in + let rest' = Option.map ~f:this#function_rest_param rest in + (this#on_loc_annot annot, { Ast.Function.Params.params = params_list'; rest = rest' }) + + method function_param (param : ('M, 'T) Ast.Function.Param.t) : ('N, 'U) Ast.Function.Param.t = + Ast.Function.Param.( + let (annot, { argument; default }) = param in + let annot' = this#on_loc_annot annot in + let argument' = this#function_param_pattern argument in + let default' = Option.map ~f:this#expression default in + (annot', { argument = argument'; default = default' })) + + method function_rest_param (expr : ('M, 'T) Ast.Function.RestParam.t) + : ('N, 'U) Ast.Function.RestParam.t = + Ast.Function.RestParam.( + let (annot, { argument }) = expr in + (this#on_loc_annot annot, { argument = this#function_param_pattern argument })) + + method function_body body = + Ast.Function.( + match body with + | BodyBlock body -> BodyBlock ((this#on_loc_annot * this#block) body) + | BodyExpression expr -> BodyExpression (this#expression expr)) + + method function_identifier (ident : ('M, 'M) Ast.Identifier.t) : ('N, 'N) Ast.Identifier.t = + this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident + + method t_function_identifier (ident : ('M, 'T) Ast.Identifier.t) : ('N, 'U) Ast.Identifier.t = + this#t_pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident + + method generator (expr : ('M, 'T) Ast.Expression.Generator.t) + : ('N, 'U) Ast.Expression.Generator.t = + Ast.Expression.Generator.( + let { blocks; filter } = expr in + let blocks' = Core_list.map ~f:this#comprehension_block blocks in + let filter' = Option.map ~f:this#expression filter in + { blocks = blocks'; filter = filter' }) + + method identifier ((annot, { Ast.Identifier.name; comments }) : ('M, 'M) Ast.Identifier.t) + : ('N, 'N) Ast.Identifier.t = + let annot = this#on_loc_annot annot in + let comments = Option.map ~f:this#syntax comments in + (annot, { Ast.Identifier.name; comments }) + + method t_identifier ((annot, { Ast.Identifier.name; comments }) : ('M, 'T) Ast.Identifier.t) + : ('N, 'U) Ast.Identifier.t = + let annot = this#on_type_annot annot in + let comments = Option.map ~f:this#syntax comments in + (annot, { Ast.Identifier.name; comments }) + + method interface (interface : ('M, 'T) Ast.Statement.Interface.t) + : ('N, 'U) Ast.Statement.Interface.t = + Ast.Statement.Interface.( + let { id = ident; tparams; extends; body } = interface in + let id' = this#class_identifier ident in + this#type_parameter_declaration_opt tparams (fun tparams' -> + let extends' = Core_list.map ~f:(this#on_loc_annot * this#generic_type) extends in + let body' = (this#on_loc_annot * this#object_type) body in + { id = id'; tparams = tparams'; extends = extends'; body = body' })) + + method interface_declaration (decl : ('M, 'T) Ast.Statement.Interface.t) + : ('N, 'U) Ast.Statement.Interface.t = + this#interface decl + + method private_name ((annot, ident) : 'M Ast.PrivateName.t) : 'N Ast.PrivateName.t = + (this#on_loc_annot annot, this#identifier ident) + + method import _annot (expr : ('M, 'T) Ast.Expression.t) : ('N, 'U) Ast.Expression.t = + this#expression expr + + method if_consequent_statement ~has_else (stmt : ('M, 'T) Ast.Statement.t) + : ('N, 'U) Ast.Statement.t = + ignore has_else; + this#statement stmt + + method if_statement (stmt : ('M, 'T) Ast.Statement.If.t) : ('N, 'U) Ast.Statement.If.t = + Ast.Statement.If.( + let { test; consequent; alternate; comments } = stmt in + let test' = this#predicate_expression test in + let consequent' = this#if_consequent_statement ~has_else:(alternate <> None) consequent in + let alternate' = Option.map ~f:this#statement alternate in + let comments' = Option.map ~f:this#syntax comments in + { test = test'; consequent = consequent'; alternate = alternate'; comments = comments' }) + + method import_declaration _loc (decl : ('M, 'T) Ast.Statement.ImportDeclaration.t) + : ('N, 'U) Ast.Statement.ImportDeclaration.t = + Ast.Statement.ImportDeclaration.( + let { importKind; source; specifiers; default } = decl in + let specifiers' = Option.map ~f:this#import_specifier specifiers in + let default' = Option.map ~f:this#import_default_specifier default in + let source' = (this#on_loc_annot * id) source in + { importKind; source = source'; specifiers = specifiers'; default = default' }) + + method import_specifier (specifier : ('M, 'T) Ast.Statement.ImportDeclaration.specifier) + : ('N, 'U) Ast.Statement.ImportDeclaration.specifier = + Ast.Statement.ImportDeclaration.( + match specifier with + | ImportNamedSpecifiers named_specifiers -> + let named_specifiers' = Core_list.map ~f:this#import_named_specifier named_specifiers in + ImportNamedSpecifiers named_specifiers' + | ImportNamespaceSpecifier (annot, ident) -> + let ident' = this#import_namespace_specifier ident in + ImportNamespaceSpecifier (this#on_loc_annot annot, ident')) + + method import_named_specifier + (specifier : ('M, 'T) Ast.Statement.ImportDeclaration.named_specifier) + : ('N, 'U) Ast.Statement.ImportDeclaration.named_specifier = + Ast.Statement.ImportDeclaration.( + let { kind; local; remote } = specifier in + let local' = Option.map ~f:this#t_pattern_identifier local in + let remote' = this#t_pattern_identifier remote in + { kind; local = local'; remote = remote' }) + + method import_default_specifier (id : ('M, 'T) Ast.Identifier.t) : ('N, 'U) Ast.Identifier.t = + this#t_pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let id + + method import_namespace_specifier (id : ('M, 'M) Ast.Identifier.t) : ('N, 'N) Ast.Identifier.t + = + this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let id + + method jsx_element (expr : ('M, 'T) Ast.JSX.element) = + Ast.JSX.( + let { openingElement; closingElement; children } = expr in + let openingElement' = this#jsx_opening_element openingElement in + let closingElement' = Option.map ~f:this#jsx_closing_element closingElement in + let children' = this#jsx_children children in + { + openingElement = openingElement'; + closingElement = closingElement'; + children = children'; + }) + + method jsx_fragment (expr : ('M, 'T) Ast.JSX.fragment) : ('N, 'U) Ast.JSX.fragment = + Ast.JSX.( + let { frag_openingElement; frag_closingElement; frag_children } = expr in + let opening' = this#on_loc_annot frag_openingElement in + let closing' = this#on_loc_annot frag_closingElement in + let children' = this#jsx_children frag_children in + { + frag_openingElement = opening'; + frag_closingElement = closing'; + frag_children = children'; + }) + + method jsx_opening_element (elem : ('M, 'T) Ast.JSX.Opening.t) : ('N, 'U) Ast.JSX.Opening.t = + Ast.JSX.Opening.( + let (annot, { name; selfClosing; attributes }) = elem in + let name' = this#jsx_name name in + let attributes' = Core_list.map ~f:this#jsx_opening_attribute attributes in + (this#on_loc_annot annot, { name = name'; selfClosing; attributes = attributes' })) + + method jsx_closing_element (elem : ('M, 'T) Ast.JSX.Closing.t) : ('N, 'U) Ast.JSX.Closing.t = + Ast.JSX.Closing.( + let (annot, { name }) = elem in + let name' = this#jsx_name name in + (this#on_loc_annot annot, { name = name' })) + + method jsx_opening_attribute (jsx_attr : ('M, 'T) Ast.JSX.Opening.attribute) + : ('N, 'U) Ast.JSX.Opening.attribute = + Ast.JSX.Opening.( + match jsx_attr with + | Attribute attr -> Attribute (this#jsx_attribute attr) + | SpreadAttribute (annot, attr) -> + SpreadAttribute (this#on_loc_annot annot, this#jsx_spread_attribute attr)) + + method jsx_spread_attribute (attr : ('M, 'T) Ast.JSX.SpreadAttribute.t') + : ('N, 'U) Ast.JSX.SpreadAttribute.t' = + Ast.JSX.SpreadAttribute.( + let { argument } = attr in + { argument = this#expression argument }) + + method jsx_attribute (attr : ('M, 'T) Ast.JSX.Attribute.t) : ('N, 'U) Ast.JSX.Attribute.t = + Ast.JSX.Attribute.( + let (annot, { name; value }) = attr in + let name' = + match name with + | Identifier id -> Identifier (this#jsx_identifier id) + | NamespacedName nname -> NamespacedName (this#jsx_namespaced_name nname) + in + let value' = Option.map ~f:this#jsx_attribute_value value in + (this#on_loc_annot annot, { name = name'; value = value' })) + + method jsx_attribute_value (value : ('M, 'T) Ast.JSX.Attribute.value) + : ('N, 'U) Ast.JSX.Attribute.value = + Ast.JSX.Attribute.( + match value with + | Literal (annot, lit) -> Literal (this#on_type_annot annot, this#literal lit) + | ExpressionContainer (annot, expr) -> + ExpressionContainer (this#on_type_annot annot, this#jsx_expression expr)) + + method jsx_children (children : 'M * ('M, 'T) Ast.JSX.child list) + : 'N * ('N, 'U) Ast.JSX.child list = + let (annot, children') = children in + (this#on_loc_annot annot, Core_list.map ~f:this#jsx_child children') + + method jsx_child (child : ('M, 'T) Ast.JSX.child) : ('N, 'U) Ast.JSX.child = + Ast.JSX.( + let (annot, child') = child in + ( this#on_loc_annot annot, + match child' with + | Element elem -> Element (this#jsx_element elem) + | Fragment frag -> Fragment (this#jsx_fragment frag) + | ExpressionContainer expr -> ExpressionContainer (this#jsx_expression expr) + | SpreadChild expr -> SpreadChild (this#expression expr) + | Text _ as child' -> child' )) + + method jsx_expression (jsx_expr : ('M, 'T) Ast.JSX.ExpressionContainer.t) + : ('N, 'U) Ast.JSX.ExpressionContainer.t = + Ast.JSX.ExpressionContainer.( + let { expression } = jsx_expr in + let expression' = + match expression with + | Expression expr -> Expression (this#expression expr) + | EmptyExpression -> EmptyExpression + in + { expression = expression' }) + + method jsx_name (name : ('M, 'T) Ast.JSX.name) : ('N, 'U) Ast.JSX.name = + Ast.JSX.( + match name with + | Identifier id -> Identifier (this#jsx_identifier id) + | NamespacedName namespaced_name -> + NamespacedName (this#jsx_namespaced_name namespaced_name) + | MemberExpression member_exp -> MemberExpression (this#jsx_member_expression member_exp)) + + method jsx_namespaced_name (namespaced_name : ('M, 'T) Ast.JSX.NamespacedName.t) + : ('N, 'U) Ast.JSX.NamespacedName.t = + Ast.JSX.( + NamespacedName.( + let (annot, { namespace; name }) = namespaced_name in + let namespace' = this#jsx_identifier namespace in + let name' = this#jsx_identifier name in + (this#on_loc_annot annot, { namespace = namespace'; name = name' }))) + + method jsx_member_expression (member_exp : ('M, 'T) Ast.JSX.MemberExpression.t) + : ('N, 'U) Ast.JSX.MemberExpression.t = + Ast.JSX.( + let (annot, { MemberExpression._object; MemberExpression.property }) = member_exp in + let _object' = this#jsx_member_expression_object _object in + let property' = this#jsx_identifier property in + (this#on_loc_annot annot, MemberExpression.{ _object = _object'; property = property' })) + + method jsx_member_expression_object (_object : ('M, 'T) Ast.JSX.MemberExpression._object) + : ('N, 'U) Ast.JSX.MemberExpression._object = + Ast.JSX.MemberExpression.( + match _object with + | Identifier id -> + let id' = this#jsx_identifier id in + Identifier id' + | MemberExpression nested_exp -> + let nested_exp' = this#jsx_member_expression nested_exp in + MemberExpression nested_exp') + + method jsx_identifier ((annot, name) : 'T Ast.JSX.Identifier.t) : 'U Ast.JSX.Identifier.t = + (this#on_type_annot annot, name) + + method labeled_statement (stmt : ('M, 'T) Ast.Statement.Labeled.t) + : ('N, 'U) Ast.Statement.Labeled.t = + Ast.Statement.Labeled.( + let { label; body } = stmt in + let label' = this#label_identifier label in + let body' = this#statement body in + { label = label'; body = body' }) + + method literal (expr : 'M Ast.Literal.t) : 'N Ast.Literal.t = + Ast.Literal.( + let { comments; _ } = expr in + { expr with comments = Option.map ~f:this#syntax comments }) + + method logical (expr : ('M, 'T) Ast.Expression.Logical.t) : ('N, 'U) Ast.Expression.Logical.t = + Ast.Expression.Logical.( + let { operator; left; right } = expr in + let left' = this#expression left in + let right' = this#expression right in + { operator; left = left'; right = right' }) + + method member (expr : ('M, 'T) Ast.Expression.Member.t) : ('N, 'U) Ast.Expression.Member.t = + Ast.Expression.Member.( + let { _object; property } = expr in + let _object' = this#expression _object in + let property' = this#member_property property in + { _object = _object'; property = property' }) + + method optional_member (expr : ('M, 'T) Ast.Expression.OptionalMember.t) + : ('N, 'U) Ast.Expression.OptionalMember.t = + Ast.Expression.OptionalMember.( + let { member; optional } = expr in + let member' = this#member member in + { member = member'; optional }) + + method member_property (expr : ('M, 'T) Ast.Expression.Member.property) + : ('N, 'U) Ast.Expression.Member.property = + Ast.Expression.Member.( + match expr with + | PropertyIdentifier ident -> PropertyIdentifier (this#member_property_identifier ident) + | PropertyPrivateName ident -> PropertyPrivateName (this#member_private_name ident) + | PropertyExpression e -> PropertyExpression (this#member_property_expression e)) + + method member_property_identifier (ident : ('M, 'T) Ast.Identifier.t) + : ('N, 'U) Ast.Identifier.t = + this#t_identifier ident + + method member_private_name (name : 'M Ast.PrivateName.t) : 'N Ast.PrivateName.t = + this#private_name name + + method member_property_expression (expr : ('M, 'T) Ast.Expression.t) + : ('N, 'U) Ast.Expression.t = + this#expression expr + + method meta_property (expr : 'M Ast.Expression.MetaProperty.t) + : 'N Ast.Expression.MetaProperty.t = + Ast.Expression.MetaProperty.( + let { meta; property } = expr in + { meta = this#identifier meta; property = this#identifier property }) + + method new_ (expr : ('M, 'T) Ast.Expression.New.t) : ('N, 'U) Ast.Expression.New.t = + Ast.Expression.New.( + let { callee; targs; arguments; comments } = expr in + let callee' = this#expression callee in + let targs' = Option.map ~f:this#type_parameter_instantiation_with_implicit targs in + let arguments' = Core_list.map ~f:this#expression_or_spread arguments in + let comments' = Option.map ~f:this#syntax comments in + { callee = callee'; targs = targs'; arguments = arguments'; comments = comments' }) + + method object_ (expr : ('M, 'T) Ast.Expression.Object.t) : ('N, 'U) Ast.Expression.Object.t = + Ast.Expression.Object.( + let { properties; comments } = expr in + let comments' = Option.map ~f:this#syntax comments in + let properties' = List.map this#object_property_or_spread_property properties in + { properties = properties'; comments = comments' }) + + method object_property_or_spread_property (prop : ('M, 'T) Ast.Expression.Object.property) + : ('N, 'U) Ast.Expression.Object.property = + Ast.Expression.Object.( + match prop with + | Property p -> Property (this#object_property p) + | SpreadProperty s -> SpreadProperty (this#spread_property s)) + + method object_property (prop : ('M, 'T) Ast.Expression.Object.Property.t) + : ('N, 'U) Ast.Expression.Object.Property.t = + Ast.Expression.Object.Property.( + let (annot, prop') = prop in + ( this#on_loc_annot annot, + match prop' with + | Init { key; value; shorthand } -> + let key' = this#object_key key in + let value' = this#expression value in + Init { key = key'; value = value'; shorthand } + | Method { key; value = (fn_annot, fn) } -> + let key' = this#object_key key in + let fn' = this#function_expression fn in + Method { key = key'; value = (this#on_loc_annot fn_annot, fn') } + | Get { key; value = (fn_annot, fn) } -> + let key' = this#object_key key in + let fn' = this#function_expression fn in + Get { key = key'; value = (this#on_loc_annot fn_annot, fn') } + | Set { key; value = (fn_annot, fn) } -> + let key' = this#object_key key in + let fn' = this#function_expression fn in + Set { key = key'; value = (this#on_loc_annot fn_annot, fn') } )) + + method object_key (key : ('M, 'T) Ast.Expression.Object.Property.key) + : ('N, 'U) Ast.Expression.Object.Property.key = + Ast.Expression.Object.Property.( + match key with + | Literal (annot, lit) -> Literal (this#on_type_annot annot, this#literal lit) + | Identifier ident -> Identifier (this#object_key_identifier ident) + | PrivateName ident -> PrivateName (this#private_name ident) + | Computed expr -> Computed (this#expression expr)) + + method object_key_identifier (ident : ('M, 'T) Ast.Identifier.t) : ('N, 'U) Ast.Identifier.t = + this#t_identifier ident + + method opaque_type (otype : ('M, 'T) Ast.Statement.OpaqueType.t) + : ('N, 'U) Ast.Statement.OpaqueType.t = + Ast.Statement.OpaqueType.( + let { id; tparams; impltype; supertype } = otype in + let id' = this#t_identifier id in + this#type_parameter_declaration_opt tparams (fun tparams' -> + let impltype' = Option.map ~f:this#type_ impltype in + let supertype' = Option.map ~f:this#type_ supertype in + { id = id'; tparams = tparams'; impltype = impltype'; supertype = supertype' })) + + method declare_opaque_type (otype : ('M, 'T) Ast.Statement.OpaqueType.t) + : ('N, 'U) Ast.Statement.OpaqueType.t = + this#opaque_type otype + + method function_param_pattern (expr : ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = + this#binding_pattern expr + + method variable_declarator_pattern ~kind (expr : ('M, 'T) Ast.Pattern.t) + : ('N, 'U) Ast.Pattern.t = + this#binding_pattern ~kind expr + + method catch_clause_pattern (expr : ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = + this#binding_pattern ~kind:Ast.Statement.VariableDeclaration.Let expr + + method for_in_assignment_pattern (expr : ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = + this#assignment_pattern expr + + method for_of_assignment_pattern (expr : ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = + this#assignment_pattern expr + + method binding_pattern + ?(kind = Ast.Statement.VariableDeclaration.Var) (expr : ('M, 'T) Ast.Pattern.t) + : ('N, 'U) Ast.Pattern.t = + this#pattern ~kind expr + + method assignment_pattern (expr : ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = + this#pattern expr + + (* NOTE: Patterns are highly overloaded. A pattern can be a binding pattern, which has a kind (Var/Let/Const, with Var being the default for all pre-ES5 bindings), or an assignment pattern, which has no kind. Subterms that are patterns inherit the kind (or lack thereof). *) - method pattern ?kind (expr: ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = - let open Ast.Pattern in - let annot, patt = expr in - this#on_type_annot annot, - match patt with - | Object { Object.properties; annot } -> - let properties' = List.map (this#pattern_object_p ?kind) properties in - let annot' = Option.map ~f:this#type_annotation annot in - Object { Object.properties = properties'; annot = annot' } - | Array { Array.elements; annot } -> - let elements' = List.map (Option.map ~f:(this#pattern_array_e ?kind)) elements in - let annot' = Option.map ~f:this#type_annotation annot in - Array { Array.elements = elements'; annot = annot' } - | Assignment { Assignment.left; right } -> - let left' = this#pattern_assignment_pattern ?kind left in - let right' = this#expression right in - Assignment { Assignment.left = left'; right = right' } - | Identifier { Identifier.name; annot; optional } -> - let name' = this#t_pattern_identifier ?kind name in - let annot' = Option.map ~f:this#type_annotation annot in - Identifier { Identifier.name = name'; annot = annot'; optional } - | Expression e -> - Expression (this#pattern_expression e) - - method pattern_identifier ?kind (ident: 'M Ast.Identifier.t) : 'N Ast.Identifier.t = - ignore kind; - this#identifier ident - - method t_pattern_identifier ?kind (ident: 'T Ast.Identifier.t) : 'U Ast.Identifier.t = - ignore kind; - this#t_identifier ident - - method pattern_literal ?kind (expr: Ast.Literal.t) : Ast.Literal.t = - ignore kind; - this#literal expr - - method pattern_object_p ?kind (p: ('M, 'T) Ast.Pattern.Object.property) = - let open Ast.Pattern.Object in - match p with - | Property (annot, prop) -> - Property (this#on_loc_annot annot, this#pattern_object_property ?kind prop) - | RestProperty (annot, prop) -> - RestProperty (this#on_loc_annot annot, this#pattern_object_rest_property ?kind prop) - - method pattern_object_property ?kind (prop: ('M, 'T) Ast.Pattern.Object.Property.t') - : ('N, 'U) Ast.Pattern.Object.Property.t' = - let open Ast.Pattern.Object.Property in - let { key; pattern; shorthand = _ } = prop in - let key' = this#pattern_object_property_key ?kind key in - let pattern' = this#pattern_object_property_pattern ?kind pattern in - { key = key'; pattern = pattern'; shorthand = false } - - method pattern_object_property_key ?kind (key: ('M, 'T) Ast.Pattern.Object.Property.key) = - let open Ast.Pattern.Object.Property in - match key with - | Literal (annot, lit) -> - Literal (this#on_loc_annot annot, this#pattern_object_property_literal_key ?kind lit) - | Identifier identifier -> - Identifier (this#pattern_object_property_identifier_key ?kind identifier) - | Computed expr -> - Computed (this#pattern_object_property_computed_key ?kind expr) - - method pattern_object_property_literal_key ?kind (key: Ast.Literal.t) : Ast.Literal.t = - this#pattern_literal ?kind key - - method pattern_object_property_identifier_key ?kind (key: 'M Ast.Identifier.t) - : 'N Ast.Identifier.t = - this#pattern_identifier ?kind key - - method pattern_object_property_computed_key ?kind (key: ('M, 'T) Ast.Expression.t) - : ('N, 'U) Ast.Expression.t = - ignore kind; - this#pattern_expression key - - method pattern_object_rest_property ?kind (prop: ('M, 'T) Ast.Pattern.Object.RestProperty.t') - : ('N, 'U) Ast.Pattern.Object.RestProperty.t' = - let open Ast.Pattern.Object.RestProperty in - let { argument } = prop in - let argument' = this#pattern_object_rest_property_pattern ?kind argument in - { argument = argument' } - - method pattern_object_property_pattern ?kind (expr: ('M, 'T) Ast.Pattern.t) - : ('N, 'U) Ast.Pattern.t = - this#pattern ?kind expr - - method pattern_object_rest_property_pattern ?kind (expr: ('M, 'T) Ast.Pattern.t) - : ('N, 'U) Ast.Pattern.t = - this#pattern ?kind expr - - method pattern_array_e ?kind (e: ('M, 'T) Ast.Pattern.Array.element) - : ('N, 'U) Ast.Pattern.Array.element = - let open Ast.Pattern.Array in - match e with - | Element elem -> - Element (this#pattern_array_element_pattern ?kind elem) - | RestElement (annot, elem) -> - RestElement (this#on_loc_annot annot, this#pattern_array_rest_element ?kind elem) - - method pattern_array_element_pattern ?kind (expr: ('M, 'T) Ast.Pattern.t) - : ('N, 'U) Ast.Pattern.t = - this#pattern ?kind expr - - method pattern_array_rest_element ?kind (elem: ('M, 'T) Ast.Pattern.Array.RestElement.t') = - let open Ast.Pattern.Array.RestElement in - let { argument } = elem in - let argument' = this#pattern_array_rest_element_pattern ?kind argument in - { argument = argument' } - - method pattern_array_rest_element_pattern ?kind (expr: ('M, 'T) Ast.Pattern.t) - : ('N, 'U) Ast.Pattern.t = - this#pattern ?kind expr - - method pattern_assignment_pattern ?kind (expr: ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = - this#pattern ?kind expr - - method pattern_expression (expr: ('M, 'T) Ast.Expression.t) : ('N, 'U) Ast.Expression.t = - this#expression expr - - method predicate_expression (expr: ('M, 'T) Ast.Expression.t) : ('N, 'U) Ast.Expression.t = - this#expression expr - - method function_rest_element (expr: ('M, 'T) Ast.Function.RestElement.t) - : ('N, 'U) Ast.Function.RestElement.t = - let open Ast.Function.RestElement in - let annot, { argument } = expr in - this#on_loc_annot annot, { argument = this#pattern argument } - - method return (stmt: ('M, 'T) Ast.Statement.Return.t) : ('N, 'U) Ast.Statement.Return.t = - let open Ast.Statement.Return in - let { argument } = stmt in - let argument' = Option.map ~f:this#expression argument in - { argument = argument' } - - method sequence (expr: ('M, 'T) Ast.Expression.Sequence.t) : ('N, 'U) Ast.Expression.Sequence.t = - let open Ast.Expression.Sequence in - let { expressions } = expr in - let expressions' = List.map this#expression expressions in - { expressions = expressions' } - - method toplevel_statement_list (stmts: ('M, 'T) Ast.Statement.t list) - : ('N, 'U) Ast.Statement.t list = - this#statement_list stmts - - method statement_list (stmts: ('M, 'T) Ast.Statement.t list) : ('N, 'U) Ast.Statement.t list = - List.map this#statement stmts - - method spread_element (expr: ('M, 'T) Ast.Expression.SpreadElement.t) - : ('N, 'U) Ast.Expression.SpreadElement.t = - let open Ast.Expression.SpreadElement in - let annot, { argument } = expr in - this#on_loc_annot annot, { argument = this#expression argument } - - method spread_property (expr: ('M, 'T) Ast.Expression.Object.SpreadProperty.t) - : ('N, 'U) Ast.Expression.Object.SpreadProperty.t = - let open Ast.Expression.Object.SpreadProperty in - let annot, { argument } = expr in - this#on_loc_annot annot, { argument = this#expression argument } - - method switch (switch: ('M, 'T) Ast.Statement.Switch.t) : ('N, 'U) Ast.Statement.Switch.t = - let open Ast.Statement.Switch in - let { discriminant; cases } = switch in - let discriminant' = this#expression discriminant in - let cases' = List.map (this#on_loc_annot * this#switch_case) cases in - { discriminant = discriminant'; cases = cases' } - - method switch_case (case: ('M, 'T) Ast.Statement.Switch.Case.t') = - let open Ast.Statement.Switch.Case in - let { test; consequent } = case in - let test' = Option.map ~f:this#expression test in - let consequent' = this#statement_list consequent in - { test = test'; consequent = consequent' } - - method tagged_template (expr: ('M, 'T) Ast.Expression.TaggedTemplate.t) - : ('N, 'U) Ast.Expression.TaggedTemplate.t = - let open Ast.Expression.TaggedTemplate in - let { tag; quasi } = expr in - let tag' = this#expression tag in - let quasi' = (this#on_loc_annot * this#template_literal) quasi in - { tag = tag'; quasi = quasi' } - - method template_literal (expr: ('M, 'T) Ast.Expression.TemplateLiteral.t) - : ('N, 'U) Ast.Expression.TemplateLiteral.t = - let open Ast.Expression.TemplateLiteral in - let { quasis; expressions } = expr in - let quasis' = List.map this#template_literal_element quasis in - let expressions' = List.map this#expression expressions in - { quasis = quasis'; expressions = expressions' } - - method template_literal_element ((annot, elem): 'M Ast.Expression.TemplateLiteral.Element.t) - : 'N Ast.Expression.TemplateLiteral.Element.t = - this#on_loc_annot annot, elem - - method throw (stmt: ('M, 'T) Ast.Statement.Throw.t) : ('N, 'U) Ast.Statement.Throw.t = - let open Ast.Statement.Throw in - let { argument } = stmt in - { argument = this#expression argument } - - method try_catch (stmt: ('M, 'T) Ast.Statement.Try.t) : ('N, 'U) Ast.Statement.Try.t = - let open Ast.Statement.Try in - let { block; handler; finalizer } = stmt in - let block' = (this#on_loc_annot * this#block) block in - let handler' = Option.map ~f:(this#on_loc_annot * this#catch_clause) handler in - let finalizer' = Option.map ~f:(this#on_loc_annot * this#block) finalizer in - { - block = block'; - handler = handler'; - finalizer = finalizer' - } - - method type_cast (expr: ('M, 'T) Ast.Expression.TypeCast.t) : ('N, 'U) Ast.Expression.TypeCast.t = - let open Ast.Expression.TypeCast in - let { expression; annot; } = expr in - let expression' = this#expression expression in - let annot' = this#type_annotation annot in - { expression = expression'; annot = annot' } - - method unary_expression (expr: ('M, 'T) Ast.Expression.Unary.t) - : ('N, 'U) Ast.Expression.Unary.t = - let open Ast.Expression.Unary in - let { argument; operator; prefix; } = expr in - let argument' = this#expression argument in - { argument = argument'; operator; prefix } - - method update_expression (expr: ('M, 'T) Ast.Expression.Update.t) - : ('N, 'U) Ast.Expression.Update.t = - let open Ast.Expression.Update in - let { argument; operator; prefix; } = expr in - let argument' = this#expression argument in - { argument = argument'; operator; prefix } - - method variable_declaration (decl: ('M, 'T) Ast.Statement.VariableDeclaration.t) - : ('N, 'U) Ast.Statement.VariableDeclaration.t = - let open Ast.Statement.VariableDeclaration in - let { declarations; kind } = decl in - let decls' = List.map (this#variable_declarator ~kind) declarations in - { declarations = decls'; kind } - - method variable_declarator ~kind (decl: ('M, 'T) Ast.Statement.VariableDeclaration.Declarator.t) - : ('N, 'U) Ast.Statement.VariableDeclaration.Declarator.t = - let open Ast.Statement.VariableDeclaration.Declarator in - let annot, { id; init } = decl in - let id' = this#variable_declarator_pattern ~kind id in - let init' = Option.map ~f:this#expression init in - this#on_loc_annot annot, { id = id'; init = init' } - - method while_ (stuff: ('M, 'T) Ast.Statement.While.t) : ('N, 'U) Ast.Statement.While.t = - let open Ast.Statement.While in - let { test; body } = stuff in - let test' = this#predicate_expression test in - let body' = this#statement body in - { test = test'; body = body' } - - method with_ (stuff: ('M, 'T) Ast.Statement.With.t) : ('N, 'U) Ast.Statement.With.t = - let open Ast.Statement.With in - let { _object; body } = stuff in - let _object' = this#expression _object in - let body' = this#statement body in - { _object = _object'; body = body' } - - method type_alias (stuff: ('M, 'T) Ast.Statement.TypeAlias.t) - : ('N, 'U) Ast.Statement.TypeAlias.t = - let open Ast.Statement.TypeAlias in - let { id; tparams; right } = stuff in - let id' = this#t_identifier id in - this#type_parameter_declaration_opt tparams (fun tparams' -> - let right' = this#type_ right in - { id = id'; tparams = tparams'; right = right' } - ) - - method yield (expr: ('M, 'T) Ast.Expression.Yield.t) : ('N, 'U) Ast.Expression.Yield.t = - let open Ast.Expression.Yield in - let { argument; delegate } = expr in - let argument' = Option.map ~f:this#expression argument in - { argument = argument'; delegate } - -end - -let fold_program mappers ast = - List.fold_left (fun ast m -> m#program ast) ast mappers + method pattern ?kind (expr : ('M, 'T) Ast.Pattern.t) : ('N, 'U) Ast.Pattern.t = + Ast.Pattern.( + let (annot, patt) = expr in + ( this#on_type_annot annot, + match patt with + | Object { Object.properties; annot } -> + let properties' = Core_list.map ~f:(this#pattern_object_p ?kind) properties in + let annot' = this#type_annotation_hint annot in + Object { Object.properties = properties'; annot = annot' } + | Array { Array.elements; annot; comments } -> + let elements' = + Core_list.map ~f:(Option.map ~f:(this#pattern_array_e ?kind)) elements + in + let annot' = this#type_annotation_hint annot in + let comments' = Option.map ~f:this#syntax comments in + Array { Array.elements = elements'; annot = annot'; comments = comments' } + | Identifier { Identifier.name; annot; optional } -> + let name' = this#t_pattern_identifier ?kind name in + let annot' = this#type_annotation_hint annot in + Identifier { Identifier.name = name'; annot = annot'; optional } + | Expression e -> Expression (this#pattern_expression e) )) + + method pattern_identifier ?kind (ident : ('M, 'M) Ast.Identifier.t) : ('N, 'N) Ast.Identifier.t + = + ignore kind; + this#identifier ident + + method t_pattern_identifier ?kind (ident : ('M, 'T) Ast.Identifier.t) + : ('N, 'U) Ast.Identifier.t = + ignore kind; + this#t_identifier ident + + method pattern_literal ?kind (expr : 'M Ast.Literal.t) : 'N Ast.Literal.t = + ignore kind; + this#literal expr + + method pattern_object_p ?kind (p : ('M, 'T) Ast.Pattern.Object.property) = + Ast.Pattern.Object.( + match p with + | Property (annot, prop) -> + Property (this#on_loc_annot annot, this#pattern_object_property ?kind prop) + | RestProperty (annot, prop) -> + RestProperty (this#on_loc_annot annot, this#pattern_object_rest_property ?kind prop)) + + method pattern_object_property ?kind (prop : ('M, 'T) Ast.Pattern.Object.Property.t') + : ('N, 'U) Ast.Pattern.Object.Property.t' = + Ast.Pattern.Object.Property.( + let { key; pattern; default; shorthand } = prop in + let key' = this#pattern_object_property_key ?kind key in + let pattern' = this#pattern_object_property_pattern ?kind pattern in + let default' = Option.map ~f:this#expression default in + { key = key'; pattern = pattern'; default = default'; shorthand }) + + method pattern_object_property_key ?kind (key : ('M, 'T) Ast.Pattern.Object.Property.key) = + Ast.Pattern.Object.Property.( + match key with + | Literal (annot, lit) -> + Literal (this#on_loc_annot annot, this#pattern_object_property_literal_key ?kind lit) + | Identifier identifier -> + Identifier (this#pattern_object_property_identifier_key ?kind identifier) + | Computed expr -> Computed (this#pattern_object_property_computed_key ?kind expr)) + + method pattern_object_property_literal_key ?kind (key : 'M Ast.Literal.t) : 'N Ast.Literal.t = + this#pattern_literal ?kind key + + method pattern_object_property_identifier_key ?kind (key : ('M, 'T) Ast.Identifier.t) + : ('N, 'U) Ast.Identifier.t = + this#t_pattern_identifier ?kind key + + method pattern_object_property_computed_key ?kind (key : ('M, 'T) Ast.Expression.t) + : ('N, 'U) Ast.Expression.t = + ignore kind; + this#pattern_expression key + + method pattern_object_rest_property ?kind (prop : ('M, 'T) Ast.Pattern.Object.RestProperty.t') + : ('N, 'U) Ast.Pattern.Object.RestProperty.t' = + Ast.Pattern.Object.RestProperty.( + let { argument } = prop in + let argument' = this#pattern_object_rest_property_pattern ?kind argument in + { argument = argument' }) + + method pattern_object_property_pattern ?kind (expr : ('M, 'T) Ast.Pattern.t) + : ('N, 'U) Ast.Pattern.t = + this#pattern ?kind expr + + method pattern_object_rest_property_pattern ?kind (expr : ('M, 'T) Ast.Pattern.t) + : ('N, 'U) Ast.Pattern.t = + this#pattern ?kind expr + + method pattern_array_e ?kind (e : ('M, 'T) Ast.Pattern.Array.element) + : ('N, 'U) Ast.Pattern.Array.element = + Ast.Pattern.Array.( + match e with + | Element (annot, elem) -> + Element (this#on_loc_annot annot, this#pattern_array_element ?kind elem) + | RestElement (annot, elem) -> + RestElement (this#on_loc_annot annot, this#pattern_array_rest_element ?kind elem)) + + method pattern_array_element ?kind (elem : ('M, 'T) Ast.Pattern.Array.Element.t') + : ('N, 'U) Ast.Pattern.Array.Element.t' = + Ast.Pattern.Array.Element.( + let { argument; default } = elem in + let argument' = this#pattern_array_element_pattern ?kind argument in + let default' = Option.map ~f:this#expression default in + { argument = argument'; default = default' }) + + method pattern_array_element_pattern ?kind (expr : ('M, 'T) Ast.Pattern.t) + : ('N, 'U) Ast.Pattern.t = + this#pattern ?kind expr + + method pattern_array_rest_element ?kind (elem : ('M, 'T) Ast.Pattern.Array.RestElement.t') = + Ast.Pattern.Array.RestElement.( + let { argument } = elem in + let argument' = this#pattern_array_rest_element_pattern ?kind argument in + { argument = argument' }) + + method pattern_array_rest_element_pattern ?kind (expr : ('M, 'T) Ast.Pattern.t) + : ('N, 'U) Ast.Pattern.t = + this#pattern ?kind expr + + method pattern_assignment_pattern ?kind (expr : ('M, 'T) Ast.Pattern.t) + : ('N, 'U) Ast.Pattern.t = + this#pattern ?kind expr + + method pattern_expression (expr : ('M, 'T) Ast.Expression.t) : ('N, 'U) Ast.Expression.t = + this#expression expr + + method predicate_expression (expr : ('M, 'T) Ast.Expression.t) : ('N, 'U) Ast.Expression.t = + this#expression expr + + method return (stmt : ('M, 'T) Ast.Statement.Return.t) : ('N, 'U) Ast.Statement.Return.t = + Ast.Statement.Return.( + let { argument; comments } = stmt in + let argument' = Option.map ~f:this#expression argument in + let comments' = Option.map ~f:this#syntax comments in + { argument = argument'; comments = comments' }) + + method sequence (expr : ('M, 'T) Ast.Expression.Sequence.t) + : ('N, 'U) Ast.Expression.Sequence.t = + Ast.Expression.Sequence.( + let { expressions } = expr in + let expressions' = Core_list.map ~f:this#expression expressions in + { expressions = expressions' }) + + method toplevel_statement_list (stmts : ('M, 'T) Ast.Statement.t list) + : ('N, 'U) Ast.Statement.t list = + this#statement_list stmts + + method statement_list (stmts : ('M, 'T) Ast.Statement.t list) : ('N, 'U) Ast.Statement.t list = + Core_list.map ~f:this#statement stmts + + method spread_element (expr : ('M, 'T) Ast.Expression.SpreadElement.t) + : ('N, 'U) Ast.Expression.SpreadElement.t = + Ast.Expression.SpreadElement.( + let (annot, { argument }) = expr in + (this#on_loc_annot annot, { argument = this#expression argument })) + + method spread_property (expr : ('M, 'T) Ast.Expression.Object.SpreadProperty.t) + : ('N, 'U) Ast.Expression.Object.SpreadProperty.t = + Ast.Expression.Object.SpreadProperty.( + let (annot, { argument }) = expr in + (this#on_loc_annot annot, { argument = this#expression argument })) + + method switch (switch : ('M, 'T) Ast.Statement.Switch.t) : ('N, 'U) Ast.Statement.Switch.t = + Ast.Statement.Switch.( + let { discriminant; cases } = switch in + let discriminant' = this#expression discriminant in + let cases' = Core_list.map ~f:(this#on_loc_annot * this#switch_case) cases in + { discriminant = discriminant'; cases = cases' }) + + method switch_case (case : ('M, 'T) Ast.Statement.Switch.Case.t') = + Ast.Statement.Switch.Case.( + let { test; consequent } = case in + let test' = Option.map ~f:this#expression test in + let consequent' = this#statement_list consequent in + { test = test'; consequent = consequent' }) + + method tagged_template (expr : ('M, 'T) Ast.Expression.TaggedTemplate.t) + : ('N, 'U) Ast.Expression.TaggedTemplate.t = + Ast.Expression.TaggedTemplate.( + let { tag; quasi } = expr in + let tag' = this#expression tag in + let quasi' = (this#on_loc_annot * this#template_literal) quasi in + { tag = tag'; quasi = quasi' }) + + method template_literal (expr : ('M, 'T) Ast.Expression.TemplateLiteral.t) + : ('N, 'U) Ast.Expression.TemplateLiteral.t = + Ast.Expression.TemplateLiteral.( + let { quasis; expressions } = expr in + let quasis' = Core_list.map ~f:this#template_literal_element quasis in + let expressions' = Core_list.map ~f:this#expression expressions in + { quasis = quasis'; expressions = expressions' }) + + method template_literal_element ((annot, elem) : 'M Ast.Expression.TemplateLiteral.Element.t) + : 'N Ast.Expression.TemplateLiteral.Element.t = + (this#on_loc_annot annot, elem) + + method throw (stmt : ('M, 'T) Ast.Statement.Throw.t) : ('N, 'U) Ast.Statement.Throw.t = + Ast.Statement.Throw.( + let { argument } = stmt in + { argument = this#expression argument }) + + method try_catch (stmt : ('M, 'T) Ast.Statement.Try.t) : ('N, 'U) Ast.Statement.Try.t = + Ast.Statement.Try.( + let { block; handler; finalizer; comments } = stmt in + let block' = (this#on_loc_annot * this#block) block in + let handler' = Option.map ~f:(this#on_loc_annot * this#catch_clause) handler in + let finalizer' = Option.map ~f:(this#on_loc_annot * this#block) finalizer in + let comments' = Option.map ~f:this#syntax comments in + { block = block'; handler = handler'; finalizer = finalizer'; comments = comments' }) + + method type_cast (expr : ('M, 'T) Ast.Expression.TypeCast.t) + : ('N, 'U) Ast.Expression.TypeCast.t = + Ast.Expression.TypeCast.( + let { expression; annot } = expr in + let expression' = this#expression expression in + let annot' = this#type_annotation annot in + { expression = expression'; annot = annot' }) + + method unary_expression (expr : ('M, 'T) Ast.Expression.Unary.t) + : ('N, 'U) Ast.Expression.Unary.t = + Ast.Expression.Unary.( + let { argument; operator; comments } = expr in + let argument' = this#expression argument in + let comments' = Option.map ~f:this#syntax comments in + { argument = argument'; operator; comments = comments' }) + + method update_expression (expr : ('M, 'T) Ast.Expression.Update.t) + : ('N, 'U) Ast.Expression.Update.t = + Ast.Expression.Update.( + let { argument; operator; prefix } = expr in + let argument' = this#expression argument in + { argument = argument'; operator; prefix }) + + method variable_declaration (decl : ('M, 'T) Ast.Statement.VariableDeclaration.t) + : ('N, 'U) Ast.Statement.VariableDeclaration.t = + Ast.Statement.VariableDeclaration.( + let { declarations; kind } = decl in + let decls' = Core_list.map ~f:(this#variable_declarator ~kind) declarations in + { declarations = decls'; kind }) + + method variable_declarator + ~kind (decl : ('M, 'T) Ast.Statement.VariableDeclaration.Declarator.t) + : ('N, 'U) Ast.Statement.VariableDeclaration.Declarator.t = + Ast.Statement.VariableDeclaration.Declarator.( + let (annot, { id; init }) = decl in + let id' = this#variable_declarator_pattern ~kind id in + let init' = Option.map ~f:this#expression init in + (this#on_loc_annot annot, { id = id'; init = init' })) + + method while_ (stuff : ('M, 'T) Ast.Statement.While.t) : ('N, 'U) Ast.Statement.While.t = + Ast.Statement.While.( + let { test; body } = stuff in + let test' = this#predicate_expression test in + let body' = this#statement body in + { test = test'; body = body' }) + + method with_ (stuff : ('M, 'T) Ast.Statement.With.t) : ('N, 'U) Ast.Statement.With.t = + Ast.Statement.With.( + let { _object; body } = stuff in + let _object' = this#expression _object in + let body' = this#statement body in + { _object = _object'; body = body' }) + + method type_alias (stuff : ('M, 'T) Ast.Statement.TypeAlias.t) + : ('N, 'U) Ast.Statement.TypeAlias.t = + Ast.Statement.TypeAlias.( + let { id; tparams; right } = stuff in + let id' = this#t_identifier id in + this#type_parameter_declaration_opt tparams (fun tparams' -> + let right' = this#type_ right in + { id = id'; tparams = tparams'; right = right' })) + + method yield (expr : ('M, 'T) Ast.Expression.Yield.t) : ('N, 'U) Ast.Expression.Yield.t = + Ast.Expression.Yield.( + let { argument; delegate; comments } = expr in + let argument' = Option.map ~f:this#expression argument in + let comments' = Option.map ~f:this#syntax comments in + { argument = argument'; delegate; comments = comments' }) + end diff --git a/src/parser_utils/flow_polymorphic_ast_mapper.mli b/src/parser_utils/flow_polymorphic_ast_mapper.mli new file mode 100644 index 00000000000..c8fcc0f7e50 --- /dev/null +++ b/src/parser_utils/flow_polymorphic_ast_mapper.mli @@ -0,0 +1,567 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module Ast = Flow_ast + +class virtual ['M, 'T, 'N, 'U] mapper : + object + method array : ('M, 'T) Ast.Expression.Array.t -> ('N, 'U) Ast.Expression.Array.t + + method arrow_function : ('M, 'T) Ast.Function.t -> ('N, 'U) Ast.Function.t + + method assignment : + ('M, 'T) Ast.Expression.Assignment.t -> ('N, 'U) Ast.Expression.Assignment.t + + method assignment_pattern : ('M, 'T) Flow_ast.Pattern.t -> ('N, 'U) Ast.Pattern.t + + method binary : ('M, 'T) Ast.Expression.Binary.t -> ('N, 'U) Ast.Expression.Binary.t + + method binding_pattern : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.t -> + ('N, 'U) Ast.Pattern.t + + method block : ('M, 'T) Ast.Statement.Block.t -> ('N, 'U) Ast.Statement.Block.t + + method break : 'M Ast.Statement.Break.t -> 'N Ast.Statement.Break.t + + method call : 'T -> ('M, 'T) Ast.Expression.Call.t -> ('N, 'U) Ast.Expression.Call.t + + method catch_clause : + ('M, 'T) Ast.Statement.Try.CatchClause.t' -> ('N, 'U) Ast.Statement.Try.CatchClause.t' + + method catch_clause_pattern : ('M, 'T) Flow_ast.Pattern.t -> ('N, 'U) Ast.Pattern.t + + method class_ : ('M, 'T) Ast.Class.t -> ('N, 'U) Ast.Class.t + + method class_body : ('M, 'T) Flow_ast.Class.Body.t -> ('N, 'U) Ast.Class.Body.t + + method class_decorator : ('M, 'T) Ast.Class.Decorator.t -> ('N, 'U) Ast.Class.Decorator.t + + method class_element : ('M, 'T) Ast.Class.Body.element -> ('N, 'U) Ast.Class.Body.element + + method class_extends : ('M, 'T) Ast.Class.Extends.t -> ('N, 'U) Ast.Class.Extends.t + + method class_identifier : ('M, 'T) Ast.Identifier.t -> ('N, 'U) Ast.Identifier.t + + method class_implements : + ('M, 'T) Flow_ast.Class.Implements.t -> ('N, 'U) Ast.Class.Implements.t + + method class_method : ('M, 'T) Ast.Class.Method.t' -> ('N, 'U) Ast.Class.Method.t' + + method class_private_field : + ('M, 'T) Ast.Class.PrivateField.t' -> ('N, 'U) Ast.Class.PrivateField.t' + + method class_property : ('M, 'T) Ast.Class.Property.t' -> ('N, 'U) Ast.Class.Property.t' + + method comment : 'M Ast.Comment.t -> 'N Ast.Comment.t + + method t_comment : 'T Ast.Comment.t -> 'U Ast.Comment.t + + method syntax : 'internal. ('M, 'internal) Ast.Syntax.t -> ('N, 'internal) Ast.Syntax.t + + method comprehension : + ('M, 'T) Ast.Expression.Comprehension.t -> ('N, 'U) Ast.Expression.Comprehension.t + + method comprehension_block : + ('M, 'T) Ast.Expression.Comprehension.Block.t -> + ('N, 'U) Ast.Expression.Comprehension.Block.t + + method conditional : + ('M, 'T) Ast.Expression.Conditional.t -> ('N, 'U) Ast.Expression.Conditional.t + + method continue : 'M Ast.Statement.Continue.t -> 'N Ast.Statement.Continue.t + + method debugger : unit -> unit + + method declare_class : + ('M, 'T) Ast.Statement.DeclareClass.t -> ('N, 'U) Ast.Statement.DeclareClass.t + + method declare_export_declaration : + 'M -> + ('M, 'T) Ast.Statement.DeclareExportDeclaration.t -> + ('N, 'U) Ast.Statement.DeclareExportDeclaration.t + + method declare_export_declaration_decl : + ('M, 'T) Ast.Statement.DeclareExportDeclaration.declaration -> + ('N, 'U) Ast.Statement.DeclareExportDeclaration.declaration + + method declare_function : + ('M, 'T) Ast.Statement.DeclareFunction.t -> ('N, 'U) Ast.Statement.DeclareFunction.t + + method declare_interface : + ('M, 'T) Ast.Statement.Interface.t -> ('N, 'U) Ast.Statement.Interface.t + + method declare_module : + 'M -> ('M, 'T) Ast.Statement.DeclareModule.t -> ('N, 'U) Ast.Statement.DeclareModule.t + + method declare_module_exports : + 'M -> ('M, 'T) Flow_ast.Type.annotation -> ('N, 'U) Ast.Type.annotation + + method declare_opaque_type : + ('M, 'T) Ast.Statement.OpaqueType.t -> ('N, 'U) Ast.Statement.OpaqueType.t + + method declare_type_alias : + ('M, 'T) Ast.Statement.TypeAlias.t -> ('N, 'U) Ast.Statement.TypeAlias.t + + method declare_variable : + ('M, 'T) Ast.Statement.DeclareVariable.t -> ('N, 'U) Ast.Statement.DeclareVariable.t + + method do_while : ('M, 'T) Ast.Statement.DoWhile.t -> ('N, 'U) Ast.Statement.DoWhile.t + + method empty : unit -> unit + + method enum_declaration : + ('M, 'T) Ast.Statement.EnumDeclaration.t -> ('N, 'U) Ast.Statement.EnumDeclaration.t + + method enum_boolean_body : + 'M Ast.Statement.EnumDeclaration.BooleanBody.t -> + 'N Ast.Statement.EnumDeclaration.BooleanBody.t + + method enum_number_body : + 'M Ast.Statement.EnumDeclaration.NumberBody.t -> + 'N Ast.Statement.EnumDeclaration.NumberBody.t + + method enum_string_body : + 'M Ast.Statement.EnumDeclaration.StringBody.t -> + 'N Ast.Statement.EnumDeclaration.StringBody.t + + method enum_symbol_body : + 'M Ast.Statement.EnumDeclaration.SymbolBody.t -> + 'N Ast.Statement.EnumDeclaration.SymbolBody.t + + method enum_defaulted_member : + 'M Ast.Statement.EnumDeclaration.DefaultedMember.t -> + 'N Ast.Statement.EnumDeclaration.DefaultedMember.t + + method enum_boolean_member : + (bool, 'M) Ast.Statement.EnumDeclaration.InitializedMember.t -> + (bool, 'N) Ast.Statement.EnumDeclaration.InitializedMember.t + + method enum_number_member : + (Ast.NumberLiteral.t, 'M) Ast.Statement.EnumDeclaration.InitializedMember.t -> + (Ast.NumberLiteral.t, 'N) Ast.Statement.EnumDeclaration.InitializedMember.t + + method enum_string_member : + (Ast.StringLiteral.t, 'M) Ast.Statement.EnumDeclaration.InitializedMember.t -> + (Ast.StringLiteral.t, 'N) Ast.Statement.EnumDeclaration.InitializedMember.t + + method explicit_or_implicit : + ('M, 'T) Ast.Expression.TypeParameterInstantiation.type_parameter_instantiation -> + ('N, 'U) Ast.Expression.TypeParameterInstantiation.type_parameter_instantiation + + method export_default_declaration : + 'M -> + ('M, 'T) Ast.Statement.ExportDefaultDeclaration.t -> + ('N, 'U) Ast.Statement.ExportDefaultDeclaration.t + + method export_default_declaration_decl : + ('M, 'T) Ast.Statement.ExportDefaultDeclaration.declaration -> + ('N, 'U) Ast.Statement.ExportDefaultDeclaration.declaration + + method export_named_declaration : + 'M -> + ('M, 'T) Ast.Statement.ExportNamedDeclaration.t -> + ('N, 'U) Ast.Statement.ExportNamedDeclaration.t + + method export_named_specifier : + 'M Ast.Statement.ExportNamedDeclaration.specifier -> + 'N Ast.Statement.ExportNamedDeclaration.specifier + + method export_specifier : + 'M Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t -> + 'N Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t + + method expression : ('M, 'T) Ast.Expression.t -> ('N, 'U) Ast.Expression.t + + method expression_or_spread : + ('M, 'T) Ast.Expression.expression_or_spread -> ('N, 'U) Ast.Expression.expression_or_spread + + method expression_statement : + ('M, 'T) Ast.Statement.Expression.t -> ('N, 'U) Ast.Statement.Expression.t + + method for_in_assignment_pattern : ('M, 'T) Flow_ast.Pattern.t -> ('N, 'U) Ast.Pattern.t + + method for_in_statement : ('M, 'T) Ast.Statement.ForIn.t -> ('N, 'U) Ast.Statement.ForIn.t + + method for_in_statement_lhs : + ('M, 'T) Ast.Statement.ForIn.left -> ('N, 'U) Ast.Statement.ForIn.left + + method for_of_assignment_pattern : ('M, 'T) Flow_ast.Pattern.t -> ('N, 'U) Ast.Pattern.t + + method for_of_statement : ('M, 'T) Ast.Statement.ForOf.t -> ('N, 'U) Ast.Statement.ForOf.t + + method for_of_statement_lhs : + ('M, 'T) Ast.Statement.ForOf.left -> ('N, 'U) Ast.Statement.ForOf.left + + method for_statement : ('M, 'T) Ast.Statement.For.t -> ('N, 'U) Ast.Statement.For.t + + method for_statement_init : ('M, 'T) Ast.Statement.For.init -> ('N, 'U) Ast.Statement.For.init + + method function_ : ('M, 'T) Ast.Function.t -> ('N, 'U) Ast.Function.t + + method function_body : ('M, 'T) Ast.Function.body -> ('N, 'U) Ast.Function.body + + method function_declaration : ('M, 'T) Ast.Function.t -> ('N, 'U) Ast.Function.t + + method function_expression : ('M, 'T) Ast.Function.t -> ('N, 'U) Ast.Function.t + + method function_identifier : ('M, 'M) Flow_ast.Identifier.t -> ('N, 'N) Ast.Identifier.t + + method function_param : ('M, 'T) Flow_ast.Function.Param.t -> ('N, 'U) Ast.Function.Param.t + + method function_params : ('M, 'T) Flow_ast.Function.Params.t -> ('N, 'U) Ast.Function.Params.t + + method function_param_pattern : ('M, 'T) Flow_ast.Pattern.t -> ('N, 'U) Ast.Pattern.t + + method function_param_type : + ('M, 'T) Ast.Type.Function.Param.t -> ('N, 'U) Ast.Type.Function.Param.t + + method function_rest_param : + ('M, 'T) Flow_ast.Function.RestParam.t -> ('N, 'U) Ast.Function.RestParam.t + + method function_rest_param_type : + ('M, 'T) Ast.Type.Function.RestParam.t -> ('N, 'U) Ast.Type.Function.RestParam.t + + method function_type : ('M, 'T) Ast.Type.Function.t -> ('N, 'U) Ast.Type.Function.t + + method generator : ('M, 'T) Ast.Expression.Generator.t -> ('N, 'U) Ast.Expression.Generator.t + + method generic_identifier_type : + ('M, 'T) Ast.Type.Generic.Identifier.t -> ('N, 'U) Ast.Type.Generic.Identifier.t + + method generic_type : ('M, 'T) Ast.Type.Generic.t -> ('N, 'U) Ast.Type.Generic.t + + method identifier : ('M, 'M) Ast.Identifier.t -> ('N, 'N) Ast.Identifier.t + + method if_consequent_statement : + has_else:bool -> ('M, 'T) Ast.Statement.t -> ('N, 'U) Ast.Statement.t + + method if_statement : ('M, 'T) Ast.Statement.If.t -> ('N, 'U) Ast.Statement.If.t + + method implicit : 'T -> 'U + + method import : 'T -> ('M, 'T) Ast.Expression.t -> ('N, 'U) Ast.Expression.t + + method import_declaration : + 'M -> + ('M, 'T) Ast.Statement.ImportDeclaration.t -> + ('N, 'U) Ast.Statement.ImportDeclaration.t + + method import_default_specifier : ('M, 'T) Ast.Identifier.t -> ('N, 'U) Ast.Identifier.t + + method import_named_specifier : + ('M, 'T) Ast.Statement.ImportDeclaration.named_specifier -> + ('N, 'U) Ast.Statement.ImportDeclaration.named_specifier + + method import_namespace_specifier : ('M, 'M) Ast.Identifier.t -> ('N, 'N) Ast.Identifier.t + + method import_specifier : + ('M, 'T) Ast.Statement.ImportDeclaration.specifier -> + ('N, 'U) Ast.Statement.ImportDeclaration.specifier + + method interface : ('M, 'T) Ast.Statement.Interface.t -> ('N, 'U) Ast.Statement.Interface.t + + method interface_declaration : + ('M, 'T) Ast.Statement.Interface.t -> ('N, 'U) Ast.Statement.Interface.t + + method interface_type : ('M, 'T) Ast.Type.Interface.t -> ('N, 'U) Ast.Type.Interface.t + + method jsx_attribute : ('M, 'T) Flow_ast.JSX.Attribute.t -> ('N, 'U) Ast.JSX.Attribute.t + + method jsx_attribute_value : + ('M, 'T) Ast.JSX.Attribute.value -> ('N, 'U) Ast.JSX.Attribute.value + + method jsx_children : 'M * ('M, 'T) Ast.JSX.child list -> 'N * ('N, 'U) Ast.JSX.child list + + method jsx_child : ('M, 'T) Ast.JSX.child -> ('N, 'U) Ast.JSX.child + + method jsx_closing_element : ('M, 'T) Ast.JSX.Closing.t -> ('N, 'U) Ast.JSX.Closing.t + + method jsx_element : ('M, 'T) Ast.JSX.element -> ('N, 'U) Ast.JSX.element + + method jsx_expression : + ('M, 'T) Ast.JSX.ExpressionContainer.t -> ('N, 'U) Ast.JSX.ExpressionContainer.t + + method jsx_fragment : ('M, 'T) Ast.JSX.fragment -> ('N, 'U) Ast.JSX.fragment + + method jsx_identifier : 'T Flow_ast.JSX.Identifier.t -> 'U Ast.JSX.Identifier.t + + method jsx_member_expression : + ('M, 'T) Ast.JSX.MemberExpression.t -> ('N, 'U) Ast.JSX.MemberExpression.t + + method jsx_member_expression_object : + ('M, 'T) Ast.JSX.MemberExpression._object -> ('N, 'U) Ast.JSX.MemberExpression._object + + method jsx_name : ('M, 'T) Ast.JSX.name -> ('N, 'U) Ast.JSX.name + + method jsx_namespaced_name : + ('M, 'T) Flow_ast.JSX.NamespacedName.t -> ('N, 'U) Ast.JSX.NamespacedName.t + + method jsx_opening_attribute : + ('M, 'T) Ast.JSX.Opening.attribute -> ('N, 'U) Ast.JSX.Opening.attribute + + method jsx_opening_element : ('M, 'T) Ast.JSX.Opening.t -> ('N, 'U) Ast.JSX.Opening.t + + method jsx_spread_attribute : + ('M, 'T) Ast.JSX.SpreadAttribute.t' -> ('N, 'U) Ast.JSX.SpreadAttribute.t' + + method label_identifier : ('M, 'M) Ast.Identifier.t -> ('N, 'N) Ast.Identifier.t + + method labeled_statement : ('M, 'T) Ast.Statement.Labeled.t -> ('N, 'U) Ast.Statement.Labeled.t + + method literal : 'M Ast.Literal.t -> 'N Ast.Literal.t + + method logical : ('M, 'T) Ast.Expression.Logical.t -> ('N, 'U) Ast.Expression.Logical.t + + method member : ('M, 'T) Ast.Expression.Member.t -> ('N, 'U) Ast.Expression.Member.t + + method member_private_name : 'M Flow_ast.PrivateName.t -> 'N Ast.PrivateName.t + + method member_property : + ('M, 'T) Ast.Expression.Member.property -> ('N, 'U) Ast.Expression.Member.property + + method member_property_expression : ('M, 'T) Flow_ast.Expression.t -> ('N, 'U) Ast.Expression.t + + method member_property_identifier : ('M, 'T) Ast.Identifier.t -> ('N, 'U) Ast.Identifier.t + + method meta_property : 'M Ast.Expression.MetaProperty.t -> 'N Ast.Expression.MetaProperty.t + + method new_ : ('M, 'T) Ast.Expression.New.t -> ('N, 'U) Ast.Expression.New.t + + method object_ : ('M, 'T) Ast.Expression.Object.t -> ('N, 'U) Ast.Expression.Object.t + + method object_indexer_type : + ('M, 'T) Ast.Type.Object.Indexer.t -> ('N, 'U) Ast.Type.Object.Indexer.t + + method object_internal_slot_type : + ('M, 'T) Ast.Type.Object.InternalSlot.t -> ('N, 'U) Ast.Type.Object.InternalSlot.t + + method object_key : + ('M, 'T) Ast.Expression.Object.Property.key -> ('N, 'U) Ast.Expression.Object.Property.key + + method object_key_identifier : ('M, 'T) Ast.Identifier.t -> ('N, 'U) Ast.Identifier.t + + method object_property : + ('M, 'T) Ast.Expression.Object.Property.t -> ('N, 'U) Ast.Expression.Object.Property.t + + method object_property_or_spread_property : + ('M, 'T) Ast.Expression.Object.property -> ('N, 'U) Ast.Expression.Object.property + + method object_property_type : + ('M, 'T) Ast.Type.Object.Property.t -> ('N, 'U) Ast.Type.Object.Property.t + + method object_property_value_type : + ('M, 'T) Ast.Type.Object.Property.value -> ('N, 'U) Ast.Type.Object.Property.value + + method object_type : ('M, 'T) Ast.Type.Object.t -> ('N, 'U) Ast.Type.Object.t + + method object_type_property : + ('M, 'T) Ast.Type.Object.property -> ('N, 'U) Ast.Type.Object.property + + method virtual on_loc_annot : 'M -> 'N + + method virtual on_type_annot : 'T -> 'U + + method opaque_type : ('M, 'T) Ast.Statement.OpaqueType.t -> ('N, 'U) Ast.Statement.OpaqueType.t + + method optional_call : + 'T -> ('M, 'T) Ast.Expression.OptionalCall.t -> ('N, 'U) Ast.Expression.OptionalCall.t + + method optional_member : + ('M, 'T) Ast.Expression.OptionalMember.t -> ('N, 'U) Ast.Expression.OptionalMember.t + + method pattern : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Flow_ast.Pattern.t -> + ('N, 'U) Ast.Pattern.t + + method pattern_array_e : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.Array.element -> + ('N, 'U) Ast.Pattern.Array.element + + method pattern_array_element : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.Array.Element.t' -> + ('N, 'U) Ast.Pattern.Array.Element.t' + + method pattern_array_element_pattern : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Flow_ast.Pattern.t -> + ('N, 'U) Ast.Pattern.t + + method pattern_array_rest_element : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.Array.RestElement.t' -> + ('N, 'U) Ast.Pattern.Array.RestElement.t' + + method pattern_array_rest_element_pattern : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Flow_ast.Pattern.t -> + ('N, 'U) Ast.Pattern.t + + method pattern_assignment_pattern : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.t -> + ('N, 'U) Ast.Pattern.t + + method pattern_expression : ('M, 'T) Flow_ast.Expression.t -> ('N, 'U) Ast.Expression.t + + method pattern_identifier : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'M) Ast.Identifier.t -> + ('N, 'N) Ast.Identifier.t + + method pattern_literal : + ?kind:Ast.Statement.VariableDeclaration.kind -> 'M Ast.Literal.t -> 'N Ast.Literal.t + + method pattern_object_p : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.Object.property -> + ('N, 'U) Ast.Pattern.Object.property + + method pattern_object_property : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.Object.Property.t' -> + ('N, 'U) Ast.Pattern.Object.Property.t' + + method pattern_object_property_computed_key : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Flow_ast.Expression.t -> + ('N, 'U) Ast.Expression.t + + method pattern_object_property_identifier_key : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Identifier.t -> + ('N, 'U) Ast.Identifier.t + + method pattern_object_property_key : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.Object.Property.key -> + ('N, 'U) Ast.Pattern.Object.Property.key + + method pattern_object_property_literal_key : + ?kind:Ast.Statement.VariableDeclaration.kind -> 'M Ast.Literal.t -> 'N Ast.Literal.t + + method pattern_object_property_pattern : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Flow_ast.Pattern.t -> + ('N, 'U) Ast.Pattern.t + + method pattern_object_rest_property : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.Object.RestProperty.t' -> + ('N, 'U) Ast.Pattern.Object.RestProperty.t' + + method pattern_object_rest_property_pattern : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Flow_ast.Pattern.t -> + ('N, 'U) Ast.Pattern.t + + method predicate_expression : ('M, 'T) Flow_ast.Expression.t -> ('N, 'U) Ast.Expression.t + + method private_name : 'M Flow_ast.PrivateName.t -> 'N Ast.PrivateName.t + + method program : ('M, 'T) Ast.program -> ('N, 'U) Ast.program + + method return : ('M, 'T) Ast.Statement.Return.t -> ('N, 'U) Ast.Statement.Return.t + + method sequence : ('M, 'T) Ast.Expression.Sequence.t -> ('N, 'U) Ast.Expression.Sequence.t + + method spread_element : + ('M, 'T) Ast.Expression.SpreadElement.t -> ('N, 'U) Ast.Expression.SpreadElement.t + + method spread_property : + ('M, 'T) Ast.Expression.Object.SpreadProperty.t -> + ('N, 'U) Ast.Expression.Object.SpreadProperty.t + + method statement : ('M, 'T) Ast.Statement.t -> ('N, 'U) Ast.Statement.t + + method statement_list : ('M, 'T) Flow_ast.Statement.t list -> ('N, 'U) Ast.Statement.t list + + method switch : ('M, 'T) Ast.Statement.Switch.t -> ('N, 'U) Ast.Statement.Switch.t + + method switch_case : + ('M, 'T) Ast.Statement.Switch.Case.t' -> ('N, 'U) Ast.Statement.Switch.Case.t' + + method t_function_identifier : ('M, 'T) Ast.Identifier.t -> ('N, 'U) Ast.Identifier.t + + method t_identifier : ('M, 'T) Ast.Identifier.t -> ('N, 'U) Ast.Identifier.t + + method t_pattern_identifier : + ?kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Identifier.t -> + ('N, 'U) Ast.Identifier.t + + method tagged_template : + ('M, 'T) Ast.Expression.TaggedTemplate.t -> ('N, 'U) Ast.Expression.TaggedTemplate.t + + method template_literal : + ('M, 'T) Ast.Expression.TemplateLiteral.t -> ('N, 'U) Ast.Expression.TemplateLiteral.t + + method template_literal_element : + 'M Ast.Expression.TemplateLiteral.Element.t -> 'N Ast.Expression.TemplateLiteral.Element.t + + method throw : ('M, 'T) Ast.Statement.Throw.t -> ('N, 'U) Ast.Statement.Throw.t + + method toplevel_statement_list : ('M, 'T) Ast.Statement.t list -> ('N, 'U) Ast.Statement.t list + + method try_catch : ('M, 'T) Ast.Statement.Try.t -> ('N, 'U) Ast.Statement.Try.t + + method type_ : ('M, 'T) Flow_ast.Type.t -> ('N, 'U) Ast.Type.t + + method type_alias : ('M, 'T) Ast.Statement.TypeAlias.t -> ('N, 'U) Ast.Statement.TypeAlias.t + + method type_annotation : ('M, 'T) Flow_ast.Type.annotation -> 'N * ('N, 'U) Flow_ast.Type.t + + method type_annotation_hint : + ('M, 'T) Ast.Type.annotation_or_hint -> ('N, 'U) Ast.Type.annotation_or_hint + + method type_cast : ('M, 'T) Ast.Expression.TypeCast.t -> ('N, 'U) Ast.Expression.TypeCast.t + + method type_parameter_declaration_opt : + ('M, 'T) Ast.Type.ParameterDeclaration.t option -> + (('N, 'U) Ast.Type.ParameterDeclaration.t option -> 'a) -> + 'a + + method type_parameter_declaration_type_param : + ('M, 'T) Ast.Type.ParameterDeclaration.TypeParam.t -> + ('N, 'U) Ast.Type.ParameterDeclaration.TypeParam.t + + method type_parameter_instantiation : + ('M, 'T) Flow_ast.Type.ParameterInstantiation.t -> ('N, 'U) Ast.Type.ParameterInstantiation.t + + method type_parameter_instantiation_with_implicit : + ('M, 'T) Flow_ast.Expression.TypeParameterInstantiation.t -> + ('N, 'U) Ast.Expression.TypeParameterInstantiation.t + + method type_predicate : ('M, 'T) Flow_ast.Type.Predicate.t -> ('N, 'U) Ast.Type.Predicate.t + + method unary_expression : ('M, 'T) Ast.Expression.Unary.t -> ('N, 'U) Ast.Expression.Unary.t + + method update_expression : ('M, 'T) Ast.Expression.Update.t -> ('N, 'U) Ast.Expression.Update.t + + method variable_declaration : + ('M, 'T) Ast.Statement.VariableDeclaration.t -> ('N, 'U) Ast.Statement.VariableDeclaration.t + + method variable_declarator : + kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Statement.VariableDeclaration.Declarator.t -> + ('N, 'U) Ast.Statement.VariableDeclaration.Declarator.t + + method variable_declarator_pattern : + kind:Ast.Statement.VariableDeclaration.kind -> + ('M, 'T) Ast.Pattern.t -> + ('N, 'U) Ast.Pattern.t + + method while_ : ('M, 'T) Ast.Statement.While.t -> ('N, 'U) Ast.Statement.While.t + + method with_ : ('M, 'T) Ast.Statement.With.t -> ('N, 'U) Ast.Statement.With.t + + method yield : ('M, 'T) Ast.Expression.Yield.t -> ('N, 'U) Ast.Expression.Yield.t + end diff --git a/src/parser_utils/hoister.ml b/src/parser_utils/hoister.ml index d0817295aed..04f15f958dc 100644 --- a/src/parser_utils/hoister.ml +++ b/src/parser_utils/hoister.ml @@ -1,12 +1,11 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Flow_ast_visitor (* Hoister class. Does a shallow visit of statements, looking for binding @@ -16,37 +15,61 @@ open Flow_ast_visitor TODO: Ideally implemented as a fold, not a map. *) -module Bindings: sig - type t - type entry = Loc.t Ast.Identifier.t - val empty: t - val singleton: entry -> t - val add: entry -> t -> t - val push: t -> t -> t - val exists: (entry -> bool) -> t -> bool - val to_assoc: t -> (string * Loc.t Nel.t) list - val to_map: t -> Loc.t list SMap.t +module Bindings : sig + type 'loc t + + type 'loc entry = ('loc, 'loc) Ast.Identifier.t + + val empty : 'loc t + + val singleton : 'loc entry -> 'loc t + + val add : 'loc entry -> 'loc t -> 'loc t + + val push : 'loc t -> 'loc t -> 'loc t + + val exists : ('loc entry -> bool) -> 'loc t -> bool + + val to_assoc : 'loc t -> (string * 'loc Nel.t) list + + val to_map : 'loc t -> 'loc list SMap.t end = struct - type entry = Loc.t Ast.Identifier.t - type t = entry list + type 'loc entry = ('loc, 'loc) Ast.Identifier.t + + type 'loc t = 'loc entry list + let empty = [] + let singleton x = [x] + let add = List.cons + let push = List.append + let exists = List.exists + let to_assoc t = - let xs, map = List.fold_left (fun (xs, map) (loc, x) -> - match SMap.get x map with - | Some locs -> xs, SMap.add x (Nel.cons loc locs) map - | None -> x::xs, SMap.add x (Nel.one loc) map - ) ([], SMap.empty) (List.rev t) in - List.rev_map (fun x -> x, Nel.rev @@ SMap.find x map) xs + let (xs, map) = + List.fold_left + (fun (xs, map) (loc, { Ast.Identifier.name = x; comments = _ }) -> + match SMap.get x map with + | Some locs -> (xs, SMap.add x (Nel.cons loc locs) map) + | None -> (x :: xs, SMap.add x (Nel.one loc) map)) + ([], SMap.empty) + (List.rev t) + in + List.rev_map (fun x -> (x, Nel.rev @@ SMap.find x map)) xs + let to_map t = - let map = List.fold_left (fun map (loc, x) -> - match SMap.get x map with - | Some locs -> SMap.add x (loc::locs) map - | None -> SMap.add x [loc] map - ) SMap.empty (List.rev t) in + let map = + List.fold_left + (fun map (loc, { Ast.Identifier.name = x; comments = _ }) -> + match SMap.get x map with + | Some locs -> SMap.add x (loc :: locs) map + | None -> SMap.add x [loc] map) + SMap.empty + (List.rev t) + in SMap.map List.rev map end @@ -59,161 +82,146 @@ end are known to introduce bindings. The logic here is sufficiently tricky that we probably should not change it without extensive testing. *) -class hoister = object(this) - inherit [Bindings.t] visitor ~init:Bindings.empty as super +class ['loc] hoister = + object (this) + inherit ['loc Bindings.t, 'loc] visitor ~init:Bindings.empty as super - method private add_binding entry = - (* `event` is a global in old IE and jsxmin lazily avoids renaming it. it - should be safe to shadow it, i.e. `function(event){event.target}` can be - renamed to `function(a){a.target}`, because code relying on the global - would have to have written `function(){event.target}` or - `function(event) {(event || window.event).target}`, both of which are - compatible with renaming. + method private add_binding entry = this#update_acc (Bindings.add entry) - TODO[jsxmin]: remove this. *) - let _loc, x = entry in - if x = "event" then () else - this#update_acc (Bindings.add entry) - - (* Ignore expressions. This includes, importantly, function expressions (whose + (* Ignore expressions. This includes, importantly, function expressions (whose ids should not be hoisted). *) - method! expression (expr: (Loc.t, Loc.t) Ast.Expression.t) = - expr + method! expression (expr : ('loc, 'loc) Ast.Expression.t) = expr - (* Ignore assignment patterns, whose targets should not be hoisted. *) - method! assignment_pattern (patt: (Loc.t, Loc.t) Ast.Pattern.t) = - patt + (* Ignore assignment patterns, whose targets should not be hoisted. *) + method! assignment_pattern (patt : ('loc, 'loc) Ast.Pattern.t) = patt - (* Ignore class declarations, since they are lexical bindings (thus not + (* Ignore class declarations, since they are lexical bindings (thus not hoisted). *) - method! class_ _loc (cls: (Loc.t, Loc.t) Ast.Class.t) = - cls + method! class_ _loc (cls : ('loc, 'loc) Ast.Class.t) = cls - (* Ignore import declarations, since they are lexical bindings (thus not + (* Ignore import declarations, since they are lexical bindings (thus not hoisted). *) - method! import_declaration _loc (decl: (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.t) = - decl + method! import_declaration _loc (decl : ('loc, 'loc) Ast.Statement.ImportDeclaration.t) = decl - (* This is visited by function parameters, variable declarations, and catch patterns (but not + (* This is visited by function parameters, variable declarations, and catch patterns (but not assignment expressions). *) - method! pattern ?kind (expr: (Loc.t, Loc.t) Ast.Pattern.t) = - match Utils.unsafe_opt kind with - | Ast.Statement.VariableDeclaration.Var -> - let open Ast.Pattern in - let _, patt = expr in - begin match patt with - | Identifier { Identifier.name; _ } -> - this#add_binding name - | Object _ - | Array _ - | Assignment _ -> run (super#pattern ?kind) expr - | Expression _ -> () - end; - expr - | Ast.Statement.VariableDeclaration.Let | Ast.Statement.VariableDeclaration.Const -> - expr (* don't hoist let/const bindings *) - - method! declare_variable loc (decl: (Loc.t, Loc.t) Ast.Statement.DeclareVariable.t) = - let open Ast.Statement.DeclareVariable in - this#add_binding decl.id; - super#declare_variable loc decl - - method! declare_class loc (decl: (Loc.t, Loc.t) Ast.Statement.DeclareClass.t) = - let open Ast.Statement.DeclareClass in - this#add_binding decl.id; - super#declare_class loc decl - - method! declare_function loc (decl: (Loc.t, Loc.t) Ast.Statement.DeclareFunction.t) = - let open Ast.Statement.DeclareFunction in - this#add_binding decl.id; - super#declare_function loc decl - - method! function_declaration _loc (expr: (Loc.t, Loc.t) Ast.Function.t) = - let open Ast.Function in - let { id; _ } = expr in - begin match id with - | Some name -> - this#add_binding name - | None -> () - end; - expr - -end - -class lexical_hoister = object(this) - inherit [Bindings.t] visitor ~init:Bindings.empty as super - - method private add_binding entry = - this#update_acc (Bindings.add entry) - - (* Ignore all statements except variable declarations, class declarations, and + method! pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) = + match Utils.unsafe_opt kind with + | Ast.Statement.VariableDeclaration.Var -> + Ast.Pattern.( + let (_, patt) = expr in + begin + match patt with + | Identifier { Identifier.name; _ } -> this#add_binding name + | Object _ + | Array _ -> + run (super#pattern ?kind) expr + | Expression _ -> () + end; + expr) + | Ast.Statement.VariableDeclaration.Let + | Ast.Statement.VariableDeclaration.Const -> + expr + + (* don't hoist let/const bindings *) + method! declare_variable loc (decl : ('loc, 'loc) Ast.Statement.DeclareVariable.t) = + Ast.Statement.DeclareVariable.( + this#add_binding decl.id; + super#declare_variable loc decl) + + method! declare_class loc (decl : ('loc, 'loc) Ast.Statement.DeclareClass.t) = + Ast.Statement.DeclareClass.( + this#add_binding decl.id; + super#declare_class loc decl) + + method! declare_function loc (decl : ('loc, 'loc) Ast.Statement.DeclareFunction.t) = + Ast.Statement.DeclareFunction.( + this#add_binding decl.id; + super#declare_function loc decl) + + method! function_declaration _loc (expr : ('loc, 'loc) Ast.Function.t) = + Ast.Function.( + let { id; _ } = expr in + begin + match id with + | Some name -> this#add_binding name + | None -> () + end; + expr) + end + +class ['loc] lexical_hoister = + object (this) + inherit ['loc Bindings.t, 'loc] visitor ~init:Bindings.empty as super + + method private add_binding entry = this#update_acc (Bindings.add entry) + + (* Ignore all statements except variable declarations, class declarations, and import declarations. The ignored statements cannot contain lexical bindings in the current scope. *) - method! statement (stmt: (Loc.t, Loc.t) Ast.Statement.t) = - let open Ast.Statement in - match stmt with - | (_, VariableDeclaration _) - | (_, ClassDeclaration _) - | (_, ExportNamedDeclaration _) - | (_, ExportDefaultDeclaration _) - | (_, ImportDeclaration _) -> super#statement stmt - | _ -> stmt - - (* Ignore expressions. This includes, importantly, initializers of variable + method! statement (stmt : ('loc, 'loc) Ast.Statement.t) = + Ast.Statement.( + match stmt with + | (_, VariableDeclaration _) + | (_, ClassDeclaration _) + | (_, ExportNamedDeclaration _) + | (_, ExportDefaultDeclaration _) + | (_, ImportDeclaration _) -> + super#statement stmt + | _ -> stmt) + + (* Ignore expressions. This includes, importantly, initializers of variable declarations. *) - method! expression (expr: (Loc.t, Loc.t) Ast.Expression.t) = - expr + method! expression (expr : ('loc, 'loc) Ast.Expression.t) = expr - (* This is visited by variable declarations, as well as other kinds of + (* This is visited by variable declarations, as well as other kinds of patterns that we ignore. *) - method! pattern ?kind (expr: (Loc.t, Loc.t) Ast.Pattern.t) = - match kind with - | None -> expr - | Some (Ast.Statement.VariableDeclaration.Let | Ast.Statement.VariableDeclaration.Const) -> - let open Ast.Pattern in - let _, patt = expr in - begin match patt with - | Identifier { Identifier.name; _ } -> - this#add_binding name - | Object _ - | Array _ - | Assignment _ -> run (super#pattern ?kind) expr - | _ -> () - end; - expr - | Some Ast.Statement.VariableDeclaration.Var -> expr - - method! class_ _loc (cls: (Loc.t, Loc.t) Ast.Class.t) = - let open Ast.Class in - let { - id; body = _; tparams = _; - extends = _; implements = _; - classDecorators = _; - } = cls in - begin match id with - | Some name -> - this#add_binding name - | None -> () - end; - cls - - method! import_named_specifier - (specifier: Loc.t Ast.Statement.ImportDeclaration.named_specifier) = - let open Ast.Statement.ImportDeclaration in - let binding = match specifier with - | { local = Some binding; remote = _; kind = _ } - | { local = None; remote = binding; kind = _ } -> - binding - in - this#add_binding binding; - specifier - - method! import_default_specifier (id: Loc.t Ast.Identifier.t) = - this#add_binding id; - id - - method! import_namespace_specifier _loc (id: Loc.t Ast.Identifier.t) = - this#add_binding id; - id - -end + method! pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) = + match kind with + | None -> expr + | Some (Ast.Statement.VariableDeclaration.Let | Ast.Statement.VariableDeclaration.Const) -> + Ast.Pattern.( + let (_, patt) = expr in + begin + match patt with + | Identifier { Identifier.name; _ } -> this#add_binding name + | Object _ + | Array _ -> + run (super#pattern ?kind) expr + | _ -> () + end; + expr) + | Some Ast.Statement.VariableDeclaration.Var -> expr + + method! class_ _loc (cls : ('loc, 'loc) Ast.Class.t) = + Ast.Class.( + let { id; body = _; tparams = _; extends = _; implements = _; classDecorators = _ } = + cls + in + begin + match id with + | Some name -> this#add_binding name + | None -> () + end; + cls) + + method! import_named_specifier + (specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.named_specifier) = + Ast.Statement.ImportDeclaration.( + let binding = + match specifier with + | { local = Some binding; remote = _; kind = _ } + | { local = None; remote = binding; kind = _ } -> + binding + in + this#add_binding binding; + specifier) + + method! import_default_specifier (id : ('loc, 'loc) Ast.Identifier.t) = + this#add_binding id; + id + + method! import_namespace_specifier _loc (id : ('loc, 'loc) Ast.Identifier.t) = + this#add_binding id; + id + end diff --git a/src/parser_utils/json_of_estree.ml b/src/parser_utils/json_of_estree.ml index aa7fac337e2..60b4e00357c 100644 --- a/src/parser_utils/json_of_estree.ml +++ b/src/parser_utils/json_of_estree.ml @@ -1,23 +1,29 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module Hh_json_translator : ( - Translator_intf.S with type t = Hh_json.json -) = struct +module Hh_json_translator : Translator_intf.S with type t = Hh_json.json = struct type t = Hh_json.json open Hh_json let string x = JSON_String x + let bool x = JSON_Bool x + let obj props = JSON_Object props + let array arr = JSON_Array arr + let number x = JSON_Number (Dtoa.ecma_string_of_float x) + + let int x = JSON_Number (string_of_int x) + let null = JSON_Null + let regexp _loc _pattern _flags = JSON_Null end diff --git a/src/parser_utils/mapper_differ.ml b/src/parser_utils/mapper_differ.ml deleted file mode 100644 index ec92194a798..00000000000 --- a/src/parser_utils/mapper_differ.ml +++ /dev/null @@ -1,168 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -module Ast = Flow_ast - -exception Combine_inconsistency - -type node = - | Statement of (Loc.t, Loc.t) Ast.Statement.t * (Loc.t, Loc.t) Ast.Statement.t - | Expression of (Loc.t, Loc.t) Ast.Expression.t * (Loc.t, Loc.t) Ast.Expression.t - | ClassElement of (Loc.t, Loc.t) Ast.Class.Body.element * (Loc.t, Loc.t) Ast.Class.Body.element - | Type of (Loc.t, Loc.t) Ast.Type.t * (Loc.t, Loc.t) Ast.Type.t - | Return of (Loc.t, Loc.t) Flow_ast.Function.return - -type t = node Utils_js.LocMap.t - -module L = Utils_js.LocMap -module B = Flow_ast.Class.Body - -class wrapper (m: Flow_ast_mapper.mapper) (s: t ref) = - object (_this) - inherit Flow_ast_mapper.mapper as super - - val m = m - - method! statement (stmt: (Loc.t, Loc.t) Ast.Statement.t) = - let stmt = super#statement stmt in - let (loc_pre, _) = stmt in - let size_pre = L.cardinal !s in - let mapped = m#statement stmt in - if size_pre == L.cardinal !s && mapped <> stmt then ( - s := L.add loc_pre (Statement (stmt, mapped)) !s ; - mapped ) - else stmt - - method! expression (expr: (Loc.t, Loc.t) Ast.Expression.t) = - let expr = super#expression expr in - let (loc_pre, _) = expr in - let size_pre = L.cardinal !s in - let mapped = m#expression expr in - if size_pre == L.cardinal !s && mapped <> expr then ( - s := L.add loc_pre (Expression (expr, mapped)) !s ; - mapped ) - else expr - - method! class_element (elem: (Loc.t, Loc.t) Ast.Class.Body.element) = - let elem = super#class_element elem in - let loc_pre = match elem with - | B.Method (loc, _) -> loc - | B.Property (loc, _) -> loc - | B.PrivateField (loc, _) -> loc - in - let size_pre = L.cardinal !s in - let mapped = m#class_element elem in - if size_pre == L.cardinal !s && mapped <> elem then ( - s := L.add loc_pre (ClassElement (elem, mapped)) !s ; - mapped ) - else elem - - method! type_ (t: (Loc.t, Loc.t) Ast.Type.t) = - let t = super#type_ t in - let (loc_pre, _) = t in - let size_pre = L.cardinal !s in - let mapped = m#type_ t in - if size_pre == L.cardinal !s && mapped <> t then ( - s := L.add loc_pre (Type (t, mapped)) !s ; - mapped ) - else t - - method! return_type_annotation (return: (Loc.t, Loc.t) Flow_ast.Function.return) = - let open Flow_ast.Function in - let return = super#return_type_annotation return in - let loc_pre = match return with - | Available _ -> Loc.none - | Missing loc -> loc - in - let size_pre = L.cardinal !s in - let mapped = m#return_type_annotation return in - if size_pre == L.cardinal !s && mapped <> return then ( - s := L.add loc_pre (Return mapped) !s ; - mapped ) - else return - - method! function_ loc (expr: (Loc.t, Loc.t) Flow_ast.Function.t) = - let open Flow_ast.Function in - let { return = return1; _ } = expr in - let loc_pre = match return1 with - | Available _ -> Loc.none - | Missing loc -> loc - in - let func' = super#function_ loc expr in - let { return = return2; _ } = func' in - let func'' = m#function_ loc func' in - let { return = return3; _ } = func'' in - if not (return1 == return2 && return2 == return3) then - s := L.add loc_pre (Return return3) !s ; - func'' - - method! class_method loc (meth: (Loc.t, Loc.t) Ast.Class.Method.t') = - let open Flow_ast.Function in - let ({Ast.Class.Method.value; _} as meth) = super#class_method loc meth in - let _, { return = return1; _ } = value in - let loc_pre = match return1 with - | Available _ -> Loc.none - | Missing loc -> loc - in - let ({Ast.Class.Method.value; _} as meth') = super#class_method loc meth in - let _, { return = return2; _ } = value in - let ({Ast.Class.Method.value; _} as meth'') = m#class_method loc meth' in - let _, { return = return3; _ } = value in - if not (return1 == return2 && return2 == return3) then - s := L.add loc_pre (Return return3) !s ; - meth'' - end - -let collapse_diffs map = - L.fold - (fun loc elem acc -> - match - L.find_first_opt - (fun loc_candidate -> - loc <> loc_candidate && Loc.span_compare loc_candidate loc = 0 ) - map - with - | Some _ -> acc - | None -> L.add loc elem acc ) - map L.empty - -let diff m ast = - let s = ref L.empty in - let w = new wrapper m s in - let ast = w#program ast in - let diffs = collapse_diffs !s in - (diffs, ast) - -let combine ~orig ~add = - if L.is_empty orig then add - else if L.is_empty add then orig - else - (* Adds to orig the elements that intersect between add and orig *) - let merged_map = - L.fold - (fun loc elem acc -> - match L.find_opt loc add with - | Some node -> ( - match (elem, node) with - | Statement (old, _), Statement (_, new_) -> - L.add loc (Statement (old, new_)) acc - | Expression (old, _), Expression (_, new_) -> - L.add loc (Expression (old, new_)) acc - | _ -> raise Combine_inconsistency ) - | None -> L.add loc elem acc ) - orig L.empty - in - (* Adds the elements from add that were not in orig *) - let merged_map_with_add = - L.fold - (fun loc elem acc -> - match L.find_opt loc acc with - | Some _ -> acc - | None -> L.add loc elem acc ) - add merged_map - in - collapse_diffs merged_map_with_add diff --git a/src/parser_utils/mapper_differ.mli b/src/parser_utils/mapper_differ.mli deleted file mode 100644 index 2d1aa26a089..00000000000 --- a/src/parser_utils/mapper_differ.mli +++ /dev/null @@ -1,28 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type node = - | Statement of (Loc.t, Loc.t) Flow_ast.Statement.t * (Loc.t, Loc.t) Flow_ast.Statement.t - | Expression of (Loc.t, Loc.t) Flow_ast.Expression.t * (Loc.t, Loc.t) Flow_ast.Expression.t - | ClassElement of (Loc.t, Loc.t) Flow_ast.Class.Body.element * (Loc.t, Loc.t) Flow_ast.Class.Body.element - | Type of (Loc.t, Loc.t) Flow_ast.Type.t * (Loc.t, Loc.t) Flow_ast.Type.t - | Return of (Loc.t, Loc.t) Flow_ast.Function.return - -(* - * A map of (old node, new node) - *) - -type t = node Utils_js.LocMap.t - -val diff : Flow_ast_mapper.mapper -> (Loc.t, Loc.t) Flow_ast.program -> t * (Loc.t, Loc.t) Flow_ast.program - -(* - * Combines two diffs and merges the old value with the add value - * The combination keeps the old node of orig and the new node of add - *) - -val combine : orig:t -> add:t -> t diff --git a/src/parser_utils/output/.merlin b/src/parser_utils/output/.merlin deleted file mode 100644 index b4280945d53..00000000000 --- a/src/parser_utils/output/.merlin +++ /dev/null @@ -1,4 +0,0 @@ -PKG dtoa -PKG wtf8 - -REC diff --git a/src/parser_utils/output/__tests__/_tags b/src/parser_utils/output/__tests__/_tags new file mode 100644 index 00000000000..44b26f97d51 --- /dev/null +++ b/src/parser_utils/output/__tests__/_tags @@ -0,0 +1 @@ +: include diff --git a/src/parser_utils/output/__tests__/js_layout_generator/assignment_precedence_test.ml b/src/parser_utils/output/__tests__/js_layout_generator/assignment_precedence_test.ml index d51b19141fb..88ae3757750 100644 --- a/src/parser_utils/output/__tests__/js_layout_generator/assignment_precedence_test.ml +++ b/src/parser_utils/output/__tests__/js_layout_generator/assignment_precedence_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,7 +7,6 @@ open Ast_builder open Layout_test_utils - module S = Ast_builder.Statements module E = Ast_builder.Expressions module L = Layout_builder @@ -15,52 +14,103 @@ module L = Layout_builder let test ctxt = let rhs = E.sequence [E.identifier "y"; E.identifier "z"] in let ast = E.assignment (Patterns.identifier "x") rhs in - assert_layout_of_expression ~ctxt - L.(loc (fused ( - [loc (id "x"); pretty_space; atom "="; pretty_space] @ - wrap_in_parens_raw (expression rhs) - ))) + assert_layout_of_expression + ~ctxt + L.( + loc + (fused + [loc (id "x"); pretty_space; atom "="; pretty_space; wrap_in_parens (expression rhs)])) ast; let rhs = E.assignment (Patterns.identifier "y") (E.identifier "z") in let ast = E.assignment (Patterns.identifier "x") rhs in - assert_layout_of_expression ~ctxt - L.(loc (fused [ - loc (id "x"); pretty_space; atom "="; pretty_space; expression rhs; - ])) + assert_layout_of_expression + ~ctxt + L.(loc (fused [loc (id "x"); pretty_space; atom "="; pretty_space; expression rhs])) ast; let rhs = E.function_ () in let ast = E.assignment (Patterns.identifier "x") rhs in let expected = - L.(loc (fused [ - loc (id "x"); pretty_space; atom "="; pretty_space; expression rhs; - ])) + L.(loc (fused [loc (id "x"); pretty_space; atom "="; pretty_space; expression rhs])) in assert_layout_of_expression ~ctxt expected ast; - assert_layout_of_statement ~ctxt - L.(loc (fused [expected; atom ";"])) - (S.expression ast); + assert_layout_of_statement ~ctxt L.(loc (fused [expected; atom ";"])) (S.expression ast); - assert_layout_of_statement_string ~ctxt - L.(loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=0; offset=0}; _end={Loc.line=1; column=8; offset=8}} (fused [ - atom "("; - sequence ~break:Layout.Break_if_needed [ - loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=1; offset=1}; _end={Loc.line=1; column=6; offset=6}} (fused [ - loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=1; offset=1}; _end={Loc.line=1; column=4; offset=4}} (sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - atom "{"; - sequence ~break:Layout.Break_if_needed [ - loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=2; offset=2}; _end={Loc.line=1; column=3; offset=3}} (id ~loc:{Loc.none with Loc.start={Loc.line=1; column=2; offset=2}; _end={Loc.line=1; column=3; offset=3}} "a"); - ]; - atom "}"; - ]; - ]); - pretty_space; atom "="; pretty_space; - loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=5; offset=5}; _end={Loc.line=1; column=6; offset=6}} (id ~loc:{Loc.none with Loc.start={Loc.line=1; column=5; offset=5}; _end={Loc.line=1; column=6; offset=6}} "b"); - ]); - ]; - atom ")"; - atom ";"; - ])) - "({a}=b);"; + assert_layout_of_statement_string + ~ctxt + L.( + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 0 }; + _end = { Loc.line = 1; column = 8 }; + } + (fused + [ + wrap_in_parens + (loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 1 }; + _end = { Loc.line = 1; column = 6 }; + } + (fused + [ + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 1 }; + _end = { Loc.line = 1; column = 4 }; + } + (group + [ + atom "{"; + indent + (fused + [ + softline; + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 2 }; + _end = { Loc.line = 1; column = 3 }; + } + (id + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 2 }; + _end = { Loc.line = 1; column = 3 }; + } + "a"); + ]); + softline; + atom "}"; + ]); + pretty_space; + atom "="; + pretty_space; + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 5 }; + _end = { Loc.line = 1; column = 6 }; + } + (id + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 5 }; + _end = { Loc.line = 1; column = 6 }; + } + "b"); + ])); + atom ";"; + ])) + "({a}=b);" diff --git a/src/parser_utils/output/__tests__/js_layout_generator/comment_test.ml b/src/parser_utils/output/__tests__/js_layout_generator/comment_test.ml index 2e9b1332bf3..93ee6a7cffa 100644 --- a/src/parser_utils/output/__tests__/js_layout_generator/comment_test.ml +++ b/src/parser_utils/output/__tests__/js_layout_generator/comment_test.ml @@ -1,47 +1,29 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open OUnit2 - open Layout_test_utils open Layout_generator_test_utils - module L = Layout_builder -let tests = [ - "block" >:: - begin fun ctxt -> - let comment = Ast_builder.Comments.block "test" in - let layout = Js_layout_generator.comment comment in - assert_layout ~ctxt - L.(loc (fused [ - atom "/*"; - pretty_newline; - atom "test"; - pretty_newline; - atom "*/"; - ])) - layout; - assert_output ~ctxt "/*test*/" layout; - assert_output ~ctxt ~pretty:true "/*\ntest\n*/" layout; - end; - - "line" >:: - begin fun ctxt -> - let comment = Ast_builder.Comments.line "test" in - let layout = Js_layout_generator.comment comment in - assert_layout ~ctxt - L.(loc (fused [ - atom "//"; - atom "test"; - Layout.Newline; - ])) - layout; - assert_output ~ctxt "//test\n" layout; - assert_output ~ctxt ~pretty:true "//test\n" layout; - end; -] +let tests = + [ + ( "block" + >:: fun ctxt -> + let comment = Ast_builder.Comments.block "test" in + let layout = Js_layout_generator.comment comment in + assert_layout ~ctxt L.(loc (fused [atom "/*"; atom "test"; atom "*/"])) layout; + assert_output ~ctxt "/*test*/" layout; + assert_output ~ctxt ~pretty:true "/*test*/" layout ); + ( "line" + >:: fun ctxt -> + let comment = Ast_builder.Comments.line "test" in + let layout = Js_layout_generator.comment comment in + assert_layout ~ctxt L.(loc (fused [atom "//"; atom "test"; Layout.Newline])) layout; + assert_output ~ctxt "//test\n" layout; + assert_output ~ctxt ~pretty:true "//test\n" layout ); + ] diff --git a/src/parser_utils/output/__tests__/js_layout_generator/jsx_test.ml b/src/parser_utils/output/__tests__/js_layout_generator/jsx_test.ml new file mode 100644 index 00000000000..63f4315e6a8 --- /dev/null +++ b/src/parser_utils/output/__tests__/js_layout_generator/jsx_test.ml @@ -0,0 +1,276 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 +open Ast_builder +open Layout_test_utils +open Layout_generator_test_utils +module E = Ast_builder.Expressions +module J = Ast_builder.JSXs +module S = Ast_builder.Statements +module L = Layout_builder + +let make_loc start_line end_line = + Loc. + { + source = None; + start = { line = start_line; column = 0 }; + _end = { line = end_line; column = 0 }; + } + +let tests = + [ + ("simple_self_closing" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ("simple" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ("namespaced" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ("member" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ("nested_member" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ("simple_with_child" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ( "simple_with_attribute_and_child" + >:: (fun ctxt -> assert_expression_string ~ctxt ~pretty:true "") ); + ( "long_attribute_with_children" + >:: fun ctxt -> + let a_loc = make_loc 1 4 in + let b_loc = make_loc 2 2 in + let c_loc = make_loc 3 3 in + let ast = + E.jsx_element + ~loc:a_loc + (J.element + (J.identifier "A") + ~attrs: + [ + J.attr + (J.attr_identifier "a") + (Some (J.attr_literal (Literals.string (String.make 80 'a')))); + ] + ~children: + [ + J.child_element ~loc:b_loc (J.identifier "B") ~selfclosing:true; + J.child_element ~loc:c_loc (J.identifier "C") ~selfclosing:true; + ]) + in + let layout = + L.( + loc + ~loc:a_loc + (fused + [ + loc + (group + [ + atom "<"; + id "A"; + indent + (fused + [ + line; + loc + (fused + [ + id "a"; + atom "="; + loc + (fused + [ + atom "\""; + atom + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; + atom "\""; + ]); + ]); + ]); + atom ">"; + ]); + indent + (fused + [ + pretty_hardline; + loc ~loc:b_loc (loc (group [atom "<"; id "B"; pretty_space; atom "/>"])); + hardline; + loc ~loc:c_loc (loc (group [atom "<"; id "C"; pretty_space; atom "/>"])); + ]); + pretty_hardline; + loc (fused [atom ""]); + ])) + in + assert_layout_of_expression ~ctxt layout ast; + assert_expression + ~ctxt + ~pretty:true + ("\n \n \n") + ast; + assert_expression ~ctxt ("\n") ast ); + ( "borderline_length_with_children" + >:: fun ctxt -> + (* opening tag is 80 columns. if it's indented, make sure it breaks. + + + + + *) + let a_loc = make_loc 1 4 in + let f_loc = make_loc 2 2 in + let ast = + E.jsx_element + ~loc:a_loc + (J.element + (J.identifier "aaaaaaaaaaaaa") + ~attrs: + [ + J.attr + (J.attr_identifier "bbbb") + (Some (J.attr_literal (Literals.string "cccccccccccccccccccccccccccccccccccc"))); + J.attr + (J.attr_identifier "ddddd") + (Some (J.attr_literal (Literals.string "eeeeeeeeeeee"))); + ] + ~children:[J.child_element ~loc:f_loc (J.identifier "f") ~selfclosing:true]) + in + let layout = Js_layout_generator.expression ast in + assert_layout + ~ctxt + L.( + loc + ~loc:a_loc + (fused + [ + loc + (group + [ + atom "<"; + id "aaaaaaaaaaaaa"; + indent + (fused + [ + line; + loc + (fused + [ + id "bbbb"; + atom "="; + loc + (fused + [ + atom "\""; + atom "cccccccccccccccccccccccccccccccccccc"; + atom "\""; + ]); + ]); + pretty_line; + loc + (fused + [ + id "ddddd"; + atom "="; + loc (fused [atom "\""; atom "eeeeeeeeeeee"; atom "\""]); + ]); + ]); + atom ">"; + ]); + indent + (fused + [ + pretty_hardline; + loc ~loc:f_loc (loc (group [atom "<"; id "f"; pretty_space; atom "/>"])); + ]); + pretty_hardline; + loc (fused [atom ""]); + ])) + layout; + assert_output + ~ctxt + ( {||} + ^ {||} + ^ {||} ) + layout; + assert_output + ~ctxt + ~pretty:true + ( {||} + ^ "\n" + ^ {| |} + ^ "\n" + ^ {||} ) + layout; + + let block_layout = Js_layout_generator.statement (S.block [S.expression ast]) in + assert_output + ~ctxt + ( "{" + ^ "" + ^ "" + ^ "" + ^ "}" ) + block_layout; + assert_output + ~ctxt + ~pretty:true + ( "{\n" + ^ " \n" + ^ " \n" + ^ " ;\n" + ^ "}" ) + block_layout ); + ( "long_child_text" + >:: fun ctxt -> + assert_expression_string ~ctxt ~pretty:true ("\n " ^ String.make 80 'b' ^ "\n") + ); + ( "literal_whitespace" + >:: fun ctxt -> + assert_expression_string ~ctxt ~pretty:true "\n a{\" \"}\n b\n" ); + ( "children_fit_on_one_line" + >:: (fun ctxt -> assert_expression_string ~ctxt ~pretty:true "") ); + ( "long_child_element" + >:: fun ctxt -> + assert_expression_string + ~ctxt + ~pretty:true + ("\n <" ^ String.make 80 'B' ^ " />\n \n") ); + ( "user_supplied_newlines" + >:: fun ctxt -> + (* TODO: Utils_jsx.trim_jsx_text is overly aggressive for pretty + * printing, user supplied newlines between words should be + * maintained. The following test should pass: + * + * assert_expression_string ~ctxt ~pretty:true ( + * "\n " ^ String.make 80 'a' ^ "\n " ^ String.make 80 'b' ^ "\n" + * ); + *) + ignore ctxt ); + ( "valueless_attribute" + >:: fun ctxt -> + (* TODO: valueless attributes shouldnt print trailing spaces when last *) + assert_expression_string ~ctxt "" ); + ("namespaced_valueluess_attribute" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ("attribute_braces" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ("attribute_string" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ("attribute_spread" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ( "attribute_spread_between_attributes" + >:: (fun ctxt -> assert_expression_string ~ctxt "") ); + ("multiple_shorthand_attributes" >:: (fun ctxt -> assert_expression_string ~ctxt "")); + ( "shorthand_and_longhand_attributes" + >:: (fun ctxt -> assert_expression_string ~ctxt "") ); + ( "multiple_longhand_attributes" + >:: fun ctxt -> + assert_expression_string ~ctxt ""; + assert_expression_string ~ctxt ~pretty:true ""; + assert_expression_string + ~ctxt + ~pretty:true + ("") ); + ( "string_longhand_and_shorthand_attributes" + >:: fun ctxt -> + assert_expression_string ~ctxt ""; + assert_expression_string ~ctxt ~pretty:true ""; + assert_expression_string ~ctxt ~pretty:true ("") + ); + ( "expression_longhand_and_shorthand_attributes" + >:: (fun ctxt -> assert_expression_string ~ctxt "") ); + ] diff --git a/src/parser_utils/output/__tests__/js_layout_generator/object_test.ml b/src/parser_utils/output/__tests__/js_layout_generator/object_test.ml index 998961ea981..78e09e34f25 100644 --- a/src/parser_utils/output/__tests__/js_layout_generator/object_test.ml +++ b/src/parser_utils/output/__tests__/js_layout_generator/object_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,7 +8,6 @@ open OUnit2 open Layout_test_utils open Layout_generator_test_utils - module S = Ast_builder.Statements module E = Ast_builder.Expressions module L = Layout_builder @@ -16,161 +15,242 @@ module L = Layout_builder let expected_object2_layout prop1 prop2 = let prop1_layout = Js_layout_generator.object_property prop1 in let prop2_layout = Js_layout_generator.object_property prop2 in - L.(loc (sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - atom "{"; - flat_pretty_space; - sequence ~break:Layout.Break_if_needed [ - fused [ - prop1_layout; - Layout.IfBreak ((atom ","), (fused [atom ","; pretty_space])); - ]; - fused [ - prop2_layout; - Layout.IfBreak ((Layout.IfPretty ((atom ","), empty)), empty); - ]; - ]; - flat_pretty_space; - atom "}"; - ]; - ])) - -let tests = [ - (* `{ foo: x, bar: y }` rather than `{foo: x, bar: y}` *) - "flat_spaces_inside_braces" >:: begin fun ctxt -> - let prop1 = E.object_property (E.object_property_key "foo") (E.identifier "x") in - let prop2 = E.object_property (E.object_property_key "bar") (E.identifier "y") in - let ast = E.object_ [prop1; prop2] in - let layout = Js_layout_generator.expression ast in - assert_layout ~ctxt (expected_object2_layout prop1 prop2) layout; - assert_output ~ctxt "{foo:x,bar:y}" layout; - assert_output ~ctxt ~pretty:true "{ foo: x, bar: y }" layout; - end; + L.( + loc + (group + [ + atom "{"; + indent + (fused + [ + pretty_line; + prop1_layout; + atom ","; + pretty_line; + prop2_layout; + Layout.IfBreak (atom ",", empty); + ]); + pretty_line; + atom "}"; + ])) - (* if it wraps, there's a trailing comma *) - "newlines_and_trailing_comma" >:: begin fun ctxt -> - let x40 = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" in - let prop1 = E.object_property (E.object_property_key "foo") (E.identifier x40) in - let prop2 = E.object_property (E.object_property_key "bar") (E.identifier x40) in - let ast = E.object_ [prop1; prop2] in +let tests = + [ + ( "empty_object" + >:: fun ctxt -> + let ast = E.object_ [] in let layout = Js_layout_generator.expression ast in - assert_layout ~ctxt (expected_object2_layout prop1 prop2) layout; - assert_output ~ctxt ("{foo:"^x40^",bar:"^x40^"}") layout; - assert_output ~ctxt ~pretty:true ("{\n foo: "^x40^",\n bar: "^x40^",\n}") layout; - end; - - (* a function value forces the whole object to break in pretty mode *) - "object_property_is_function" >:: begin fun ctxt -> - let prop1 = E.object_property (E.object_property_key "foo") (E.identifier "x") in - let prop2 = E.object_property (E.object_property_key "bar") (E.function_ ()) in - let prop3 = E.object_property (E.object_property_key "baz") (E.identifier "y") in - let ast = E.object_ [prop1; prop2; prop3] in - let layout = Js_layout_generator.expression ast in - let prop1_layout = Js_layout_generator.object_property prop1 in - let prop2_layout = Js_layout_generator.object_property prop2 in - let prop3_layout = Js_layout_generator.object_property prop3 in - assert_layout ~ctxt - L.(loc (sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - atom "{"; - flat_pretty_space; - sequence ~break:Layout.Break_if_needed [ - fused [ - prop1_layout; - Layout.IfBreak ((atom ","), (fused [ - atom ","; - pretty_space; - ])); - ]; - fused [ - pretty_newline; - prop2_layout; - Layout.IfBreak ((atom ","), (fused [ - atom ","; - pretty_space; - ])); - ]; - fused [ - pretty_newline; - prop3_layout; - Layout.IfBreak ((Layout.IfPretty ((atom ","), empty)), empty); - ]; - ]; - flat_pretty_space; - atom "}"; - ]; - ])) - layout; - assert_output ~ctxt "{foo:x,bar:function(){},baz:y}" layout; - assert_output ~ctxt ~pretty:true "{\n foo: x,\n \n bar: function() {},\n \n baz: y,\n}" layout; - end; - - "object_property_is_method" >:: begin fun ctxt -> - let layout = Js_layout_generator.expression ( - E.object_ [ - E.object_method (E.object_property_key "foo"); - ] - ) in + assert_layout ~ctxt L.(loc (group [atom "{"; atom "}"])) layout; + assert_output ~ctxt "{}" layout; + assert_output ~ctxt ~pretty:true "{}" layout ); + (* `{ foo: x, bar: y }` rather than `{foo: x, bar: y}` *) + ( "flat_spaces_inside_braces" + >:: fun ctxt -> + let prop1 = E.object_property (E.object_property_key "foo") (E.identifier "x") in + let prop2 = E.object_property (E.object_property_key "bar") (E.identifier "y") in + let ast = E.object_ [prop1; prop2] in + let layout = Js_layout_generator.expression ast in + assert_layout ~ctxt (expected_object2_layout prop1 prop2) layout; + assert_output ~ctxt "{foo:x,bar:y}" layout; + assert_output ~ctxt ~pretty:true "{ foo: x, bar: y }" layout ); + (* if it wraps, there's a trailing comma *) + ( "newlines_and_trailing_comma" + >:: fun ctxt -> + let x40 = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" in + let prop1 = E.object_property (E.object_property_key "foo") (E.identifier x40) in + let prop2 = E.object_property (E.object_property_key "bar") (E.identifier x40) in + let ast = E.object_ [prop1; prop2] in + let layout = Js_layout_generator.expression ast in + assert_layout ~ctxt (expected_object2_layout prop1 prop2) layout; + assert_output ~ctxt ("{foo:" ^ x40 ^ ",bar:" ^ x40 ^ "}") layout; + assert_output ~ctxt ~pretty:true ("{\n foo: " ^ x40 ^ ",\n bar: " ^ x40 ^ ",\n}") layout ); + (* a function value forces the whole object to break in pretty mode *) + ( "object_property_is_function" + >:: fun ctxt -> + let prop1 = E.object_property (E.object_property_key "foo") (E.identifier "x") in + let prop2 = E.object_property (E.object_property_key "bar") (E.function_ ()) in + let prop3 = E.object_property (E.object_property_key "baz") (E.identifier "y") in + let ast = E.object_ [prop1; prop2; prop3] in + let layout = Js_layout_generator.expression ast in + let prop1_layout = Js_layout_generator.object_property prop1 in + let prop2_layout = Js_layout_generator.object_property prop2 in + let prop3_layout = Js_layout_generator.object_property prop3 in + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "{"; + indent + (fused + [ + pretty_line; + prop1_layout; + atom ","; + pretty_line; + pretty_hardline; + prop2_layout; + atom ","; + pretty_line; + pretty_hardline; + prop3_layout; + Layout.IfBreak (atom ",", empty); + ]); + pretty_line; + atom "}"; + ])) + layout; + assert_output ~ctxt "{foo:x,bar:function(){},baz:y}" layout; + assert_output + ~ctxt + ~pretty:true + "{\n foo: x,\n \n bar: function() {},\n \n baz: y,\n}" + layout ); + ( "object_property_is_method" + >:: fun ctxt -> + let layout = + Js_layout_generator.expression (E.object_ [E.object_method (E.object_property_key "foo")]) + in assert_output ~ctxt "{foo(){}}" layout; - assert_output ~ctxt ~pretty:true "{ foo() {} }" layout; - end; - - "object_property_is_generator_method" >:: begin fun ctxt -> - let layout = Js_layout_generator.expression ( - E.object_ [ - E.object_method ~generator:true (E.object_property_key "foo"); - ] - ) in + assert_output ~ctxt ~pretty:true "{ foo() {} }" layout ); + ( "object_property_is_generator_method" + >:: fun ctxt -> + let layout = + Js_layout_generator.expression + (E.object_ [E.object_method ~generator:true (E.object_property_key "foo")]) + in assert_output ~ctxt "{*foo(){}}" layout; - assert_output ~ctxt ~pretty:true "{ *foo() {} }" layout; - end; - - "object_property_is_sequence" >:: begin fun ctxt -> - let layout = Js_layout_generator.expression ( - E.object_ [ - E.object_property - (E.object_property_key "foo") - (E.sequence [E.identifier "x"; E.identifier "y"]); - ] - ) in + assert_output ~ctxt ~pretty:true "{ *foo() {} }" layout ); + ( "object_property_is_sequence" + >:: fun ctxt -> + let layout = + Js_layout_generator.expression + (E.object_ + [ + E.object_property + (E.object_property_key "foo") + (E.sequence [E.identifier "x"; E.identifier "y"]); + ]) + in assert_output ~ctxt "{foo:(x,y)}" layout; - assert_output ~ctxt ~pretty:true "{ foo: (x, y) }" layout; - end; - - "object_property_key_is_literal" >:: begin fun ctxt -> - let layout = Js_layout_generator.expression ( - E.object_ [ - E.object_property_with_literal - (Ast_builder.Literals.string "foo") - (E.literal (Ast_builder.Literals.string "bar")); - ] - ) in - assert_output ~ctxt ~msg:"string literal keys should be quoted" - "{\"foo\":\"bar\"}" layout; - assert_output ~ctxt ~msg:"string literal keys should be quoted" ~pretty:true - "{ \"foo\": \"bar\" }" layout; - end; - - "object_property_key_is_computed" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "{ foo: (x, y) }" layout ); + ( "object_property_key_is_literal" + >:: fun ctxt -> + let layout = + Js_layout_generator.expression + (E.object_ + [ + E.object_property_with_literal + (Ast_builder.Literals.string "foo") + (E.literal (Ast_builder.Literals.string "bar")); + ]) + in + assert_output ~ctxt ~msg:"string literal keys should be quoted" "{\"foo\":\"bar\"}" layout; + assert_output + ~ctxt + ~msg:"string literal keys should be quoted" + ~pretty:true + "{ \"foo\": \"bar\" }" + layout ); + ( "object_property_key_is_computed" + >:: fun ctxt -> let b80 = String.make 80 'b' in - let ast = Ast_builder.expression_of_string ("{["^b80^"]: 123}") in + let ast = + E.object_ + [ + E.object_property + (E.object_property_computed_key (E.identifier b80)) + (E.literal (Ast_builder.Literals.number 123. "123")); + ] + in let layout = Js_layout_generator.expression ast in - assert_output ~ctxt - ("{["^b80^"]:123}") - layout; - assert_output ~ctxt ~pretty:true - ("{\n [\n "^b80^"\n ]: 123,\n}") + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "{"; + indent + (fused + [ + pretty_line; + loc + (group + [ + atom "["; + indent (fused [pretty_line; loc (id b80)]); + pretty_line; + atom "]"; + atom ":"; + pretty_space; + loc (atom "123"); + ]); + Layout.IfBreak (atom ",", empty); + ]); + pretty_line; + atom "}"; + ])) layout; + assert_output ~ctxt ("{[" ^ b80 ^ "]:123}") layout; + assert_output ~ctxt ~pretty:true ("{\n [\n " ^ b80 ^ "\n ]: 123,\n}") layout; let b40 = String.make 40 'b' in - let ast = Ast_builder.expression_of_string ("{["^b40^"+"^b40^"]: 123}") in + let ast = + E.object_ + [ + E.object_property + (E.object_property_computed_key + (E.binary ~op:Flow_ast.Expression.Binary.Plus (E.identifier b40) (E.identifier b40))) + (E.literal (Ast_builder.Literals.number 123. "123")); + ] + in let layout = Js_layout_generator.expression ast in - assert_output ~ctxt - ("{["^b40^"+"^b40^"]:123}") + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "{"; + indent + (fused + [ + pretty_line; + loc + (group + [ + atom "["; + indent + (fused + [ + pretty_line; + loc + (fused + [ + loc (id b40); + pretty_space; + atom "+"; + pretty_space; + loc (id b40); + ]); + ]); + pretty_line; + atom "]"; + atom ":"; + pretty_space; + loc (atom "123"); + ]); + Layout.IfBreak (atom ",", empty); + ]); + pretty_line; + atom "}"; + ])) layout; + assert_output ~ctxt ("{[" ^ b40 ^ "+" ^ b40 ^ "]:123}") layout; + (* TODO: the second b40 should wrap *) - assert_output ~ctxt ~pretty:true - ("{\n [\n "^b40^" + "^b40^"\n ]: 123,\n}") - layout; - end; -] + assert_output ~ctxt ~pretty:true ("{\n [\n " ^ b40 ^ " + " ^ b40 ^ "\n ]: 123,\n}") layout + ); + ] diff --git a/src/parser_utils/output/__tests__/js_layout_generator/operator_precedence_test.ml b/src/parser_utils/output/__tests__/js_layout_generator/operator_precedence_test.ml index dc9f88331b6..1faf28d855b 100644 --- a/src/parser_utils/output/__tests__/js_layout_generator/operator_precedence_test.ml +++ b/src/parser_utils/output/__tests__/js_layout_generator/operator_precedence_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,369 +9,447 @@ open OUnit2 open Ast_builder open Layout_test_utils open Layout_generator_test_utils - module S = Ast_builder.Statements module E = Ast_builder.Expressions module L = Layout_builder -let x, y, z = E.identifier "x", E.identifier "y", E.identifier "z" +let (x, y, z) = (E.identifier "x", E.identifier "y", E.identifier "z") + let x40 = E.identifier (String.make 40 'x') + let str = E.literal (Literals.string "a") -let (&&) a b = E.logical_and a b -let (||) a b = E.logical_or a b -let (+) a b = E.binary ~op:Flow_ast.Expression.Binary.Plus a b -let (-) a b = E.binary ~op:Flow_ast.Expression.Binary.Minus a b -let tests = [ - "and_with_and_lhs" >:: begin fun ctxt -> +let ( && ) a b = E.logical_and a b + +let ( || ) a b = E.logical_or a b + +let ( + ) a b = E.binary ~op:Flow_ast.Expression.Binary.Plus a b + +let ( - ) a b = E.binary ~op:Flow_ast.Expression.Binary.Minus a b + +let tests = + [ + ( "and_with_and_lhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression ((x && y) && z) in - assert_layout ~ctxt - L.(loc (group [ - expression (x && y); pretty_space; atom "&&"; - indent (fused [line; expression z]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + expression (x && y); + pretty_space; + atom "&&"; + indent (fused [pretty_line; expression z]); + ])) layout; assert_output ~ctxt "x&&y&&z" layout; assert_output ~ctxt ~pretty:true "x && y && z" layout; let layout = Js_layout_generator.expression ((x40 && x40) && x40) in - assert_output ~ctxt + assert_output + ~ctxt "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&&xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&&xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" layout; - assert_output ~ctxt ~pretty:true - ("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx") - layout; - end; - - "and_with_and_rhs" >:: begin fun ctxt -> - let layout = Js_layout_generator.expression (x && (y && z)) in - assert_layout ~ctxt - L.(loc (group [ - expression x; pretty_space; atom "&&"; - indent (fused ( - line::(wrap_in_parens_raw (expression (y && z))) - )); - ])) + assert_output + ~ctxt + ~pretty:true + ( "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n" + ^ " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n" + ^ " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" ) + layout ); + ( "and_with_and_rhs" + >:: fun ctxt -> + let layout = Js_layout_generator.expression (x && y && z) in + assert_layout + ~ctxt + L.( + loc + (group + [ + expression x; + pretty_space; + atom "&&"; + indent (fused [pretty_line; wrap_in_parens (expression (y && z))]); + ])) layout; assert_output ~ctxt "x&&(y&&z)" layout; assert_output ~ctxt ~pretty:true "x && (y && z)" layout; - let layout = Js_layout_generator.expression (x40 && (x40 && x40)) in - assert_output ~ctxt + let layout = Js_layout_generator.expression (x40 && x40 && x40) in + assert_output + ~ctxt "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&&(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&&xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx)" layout; - assert_output ~ctxt ~pretty:true - ("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n"^ - " (\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\n"^ - " )") - layout; - end; - - "or_with_and_lhs" >:: begin fun ctxt -> + assert_output + ~ctxt + ~pretty:true + ( "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n" + ^ " (xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n" + ^ " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx)" ) + layout ); + ( "or_with_and_lhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression ((x && y) || z) in - assert_layout ~ctxt - L.(loc (group [ - expression (x && y); pretty_space; atom "||"; - indent (fused [line; expression z]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + expression (x && y); + pretty_space; + atom "||"; + indent (fused [pretty_line; expression z]); + ])) layout; assert_output ~ctxt "x&&y||z" layout; assert_output ~ctxt ~pretty:true "x && y || z" layout; let layout = Js_layout_generator.expression ((x40 && x40) || x40) in - assert_output ~ctxt + assert_output + ~ctxt "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&&xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" layout; - assert_output ~ctxt ~pretty:true - ("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ||\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx") - layout; - end; - - "and_with_or_rhs" >:: begin fun ctxt -> + assert_output + ~ctxt + ~pretty:true + ( "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n" + ^ " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ||\n" + ^ " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" ) + layout ); + ( "and_with_or_rhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression (x && (y || z)) in - assert_layout ~ctxt - L.(loc (group [ - expression x; pretty_space; atom "&&"; - indent (fused ( - line::(wrap_in_parens_raw (expression (y || z))) - )); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + expression x; + pretty_space; + atom "&&"; + indent (fused [pretty_line; wrap_in_parens (expression (y || z))]); + ])) layout; assert_output ~ctxt "x&&(y||z)" layout; assert_output ~ctxt ~pretty:true "x && (y || z)" layout; let layout = Js_layout_generator.expression (x40 && (x40 || x40)) in - assert_output ~ctxt + assert_output + ~ctxt "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&&(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx)" layout; - assert_output ~ctxt ~pretty:true - ("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n"^ - " (\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ||\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\n"^ - " )") - layout; - end; - - "or_with_or_lhs" >:: begin fun ctxt -> + assert_output + ~ctxt + ~pretty:true + ( "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx &&\n" + ^ " (xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ||\n" + ^ " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx)" ) + layout ); + ( "or_with_or_lhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression ((x || y) || z) in - assert_layout ~ctxt - L.(loc (group [ - expression (x || y); pretty_space; atom "||"; - indent (fused [line; expression z]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + expression (x || y); + pretty_space; + atom "||"; + indent (fused [pretty_line; expression z]); + ])) layout; assert_output ~ctxt "x||y||z" layout; assert_output ~ctxt ~pretty:true "x || y || z" layout; let layout = Js_layout_generator.expression ((x40 || x40) || x40) in - assert_output ~ctxt + assert_output + ~ctxt "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" layout; - assert_output ~ctxt ~pretty:true - ("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ||\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ||\n"^ - " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx") - layout; - end; - - "or_with_or_rhs" >:: begin fun ctxt -> - let layout = Js_layout_generator.expression (x || (y || z)) in - assert_layout ~ctxt - L.(loc (group [ - expression x; pretty_space; atom "||"; - indent (fused ( - line::(wrap_in_parens_raw (expression (y || z))) - )); - ])) + assert_output + ~ctxt + ~pretty:true + ( "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ||\n" + ^ " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ||\n" + ^ " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" ) + layout ); + ( "or_with_or_rhs" + >:: fun ctxt -> + let layout = Js_layout_generator.expression (x || y || z) in + assert_layout + ~ctxt + L.( + loc + (group + [ + expression x; + pretty_space; + atom "||"; + indent (fused [pretty_line; wrap_in_parens (expression (y || z))]); + ])) layout; assert_output ~ctxt "x||(y||z)" layout; - assert_output ~ctxt ~pretty:true "x || (y || z)" layout; - end; - - "and_with_or_lhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "x || (y || z)" layout ); + ( "and_with_or_lhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression ((x || y) && z) in - assert_layout ~ctxt - L.(loc (group [ - wrap_in_parens (expression (x || y)); - pretty_space; atom "&&"; - indent (fused [line; expression z]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + wrap_in_parens (expression (x || y)); + pretty_space; + atom "&&"; + indent (fused [pretty_line; expression z]); + ])) layout; assert_output ~ctxt "(x||y)&&z" layout; - assert_output ~ctxt ~pretty:true "(x || y) && z" layout; - end; - - "or_with_and_rhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "(x || y) && z" layout ); + ( "or_with_and_rhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression (x || (y && z)) in - assert_layout ~ctxt - L.(loc (group [ - expression x; pretty_space; atom "||"; - indent (fused [line; expression (y && z)]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + expression x; + pretty_space; + atom "||"; + indent (fused [pretty_line; expression (y && z)]); + ])) layout; assert_output ~ctxt "x||y&&z" layout; - assert_output ~ctxt ~pretty:true "x || y && z" layout; - end; - - "plus_with_plus_lhs" >:: begin fun ctxt -> - let layout = Js_layout_generator.expression ((x + y) + z) in - assert_layout ~ctxt + assert_output ~ctxt ~pretty:true "x || y && z" layout ); + ( "plus_with_plus_lhs" + >:: fun ctxt -> + let layout = Js_layout_generator.expression (x + y + z) in + assert_layout + ~ctxt L.(loc (fused [expression (x + y); pretty_space; atom "+"; pretty_space; expression z])) layout; assert_output ~ctxt "x+y+z" layout; - assert_output ~ctxt ~pretty:true "x + y + z" layout; - end; - - "plus_with_plus_rhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "x + y + z" layout ); + ( "plus_with_plus_rhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression (x + (y + z)) in - assert_layout ~ctxt - L.(loc (fused ( - [expression x; pretty_space; atom "+"; pretty_space] @ - wrap_in_parens_raw (expression (y + z)) - ))) + assert_layout + ~ctxt + L.( + loc + (fused + [ + expression x; + pretty_space; + atom "+"; + pretty_space; + wrap_in_parens (expression (y + z)); + ])) layout; assert_output ~ctxt "x+(y+z)" layout; - assert_output ~ctxt ~pretty:true "x + (y + z)" layout; - end; - - "minus_with_plus_lhs" >:: begin fun ctxt -> - let layout = Js_layout_generator.expression ((x + y) - z) in - assert_layout ~ctxt + assert_output ~ctxt ~pretty:true "x + (y + z)" layout ); + ( "minus_with_plus_lhs" + >:: fun ctxt -> + let layout = Js_layout_generator.expression (x + y - z) in + assert_layout + ~ctxt L.(loc (fused [expression (x + y); pretty_space; atom "-"; pretty_space; expression z])) layout; assert_output ~ctxt "x+y-z" layout; - assert_output ~ctxt ~pretty:true "x + y - z" layout; - end; - - "plus_with_minus_rhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "x + y - z" layout ); + ( "plus_with_minus_rhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression (x + (y - z)) in - assert_layout ~ctxt - L.(loc (fused ( - [expression x; pretty_space; atom "+"; pretty_space] @ - wrap_in_parens_raw (expression (y - z)) - ))) + assert_layout + ~ctxt + L.( + loc + (fused + [ + expression x; + pretty_space; + atom "+"; + pretty_space; + wrap_in_parens (expression (y - z)); + ])) layout; assert_output ~ctxt "x+(y-z)" layout; - assert_output ~ctxt ~pretty:true "x + (y - z)" layout; - end; - - "and_with_plus_lhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "x + (y - z)" layout ); + ( "and_with_plus_lhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression ((x + y) && z) in - assert_layout ~ctxt - L.(loc (group [ - expression (x + y); pretty_space; atom "&&"; - indent (fused [line; expression z]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + expression (x + y); + pretty_space; + atom "&&"; + indent (fused [pretty_line; expression z]); + ])) layout; assert_output ~ctxt "x+y&&z" layout; - assert_output ~ctxt ~pretty:true "x + y && z" layout; - end; - - "plus_with_and_rhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "x + y && z" layout ); + ( "plus_with_and_rhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression (x + (y && z)) in - assert_layout ~ctxt - L.(loc (fused ( - [expression x; pretty_space; atom "+"; pretty_space] @ - wrap_in_parens_raw (expression (y && z)) - ))) + assert_layout + ~ctxt + L.( + loc + (fused + [ + expression x; + pretty_space; + atom "+"; + pretty_space; + wrap_in_parens (expression (y && z)); + ])) layout; assert_output ~ctxt "x+(y&&z)" layout; - assert_output ~ctxt ~pretty:true "x + (y && z)" layout; - end; - - "plus_with_and_lhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "x + (y && z)" layout ); + ( "plus_with_and_lhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression ((x && y) + z) in - assert_layout ~ctxt - L.(loc (fused ( - wrap_in_parens_raw (expression (x && y)) @ - [pretty_space; atom "+"; pretty_space; expression z] - ))) + assert_layout + ~ctxt + L.( + loc + (fused + [ + wrap_in_parens (expression (x && y)); + pretty_space; + atom "+"; + pretty_space; + expression z; + ])) layout; assert_output ~ctxt "(x&&y)+z" layout; - assert_output ~ctxt ~pretty:true "(x && y) + z" layout; - end; - - "and_with_plus_rhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "(x && y) + z" layout ); + ( "and_with_plus_rhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression (x && (y + z)) in - assert_layout ~ctxt - L.(loc (group [ - expression x; pretty_space; atom "&&"; - indent (fused [line; expression (y + z)]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + expression x; + pretty_space; + atom "&&"; + indent (fused [pretty_line; expression (y + z)]); + ])) layout; assert_output ~ctxt "x&&y+z" layout; - assert_output ~ctxt ~pretty:true "x && y + z" layout; - end; - - "and_literal_lhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "x && y + z" layout ); + ( "and_literal_lhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression (str && x) in - assert_layout ~ctxt - L.(loc (group [ - expression str; pretty_space; atom "&&"; - indent (fused [line; expression x]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [expression str; pretty_space; atom "&&"; indent (fused [pretty_line; expression x])])) layout; assert_output ~ctxt "\"a\"&&x" layout; - assert_output ~ctxt ~pretty:true "\"a\" && x" layout; - end; - - "and_literal_rhs" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "\"a\" && x" layout ); + ( "and_literal_rhs" + >:: fun ctxt -> let layout = Js_layout_generator.expression (x && str) in - assert_layout ~ctxt - L.(loc (group [ - expression x; pretty_space; atom "&&"; - indent (fused [line; expression str]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [expression x; pretty_space; atom "&&"; indent (fused [pretty_line; expression str])])) layout; assert_output ~ctxt "x&&\"a\"" layout; - assert_output ~ctxt ~pretty:true "x && \"a\"" layout; - end; - - "function" >:: begin fun ctxt -> - let fn = (Loc.none, Flow_ast.Expression.Function ( - Functions.make ~id:None ~expression:true ~params:[] ()) - ) in + assert_output ~ctxt ~pretty:true "x && \"a\"" layout ); + ( "function" + >:: fun ctxt -> + let fn = (Loc.none, Flow_ast.Expression.Function (Functions.make ~id:None ())) in let layout = Js_layout_generator.expression (fn && x) in - assert_layout ~ctxt - L.(loc (group [ - expression fn; pretty_space; atom "&&"; - indent (fused [line; expression x]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [expression fn; pretty_space; atom "&&"; indent (fused [pretty_line; expression x])])) layout; assert_output ~ctxt "function(){}&&x" layout; assert_output ~ctxt ~pretty:true "function() {} && x" layout; let layout = Js_layout_generator.expression (x && fn) in - assert_layout ~ctxt - L.(loc (group [ - expression x; pretty_space; atom "&&"; - indent (fused [line; expression fn]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [expression x; pretty_space; atom "&&"; indent (fused [pretty_line; expression fn])])) layout; assert_output ~ctxt "x&&function(){}" layout; - assert_output ~ctxt ~pretty:true "x && function() {}" layout; - end; - - "sequence" >:: begin fun ctxt -> + assert_output ~ctxt ~pretty:true "x && function() {}" layout ); + ( "sequence" + >:: fun ctxt -> let seq = E.sequence [x; y] in let layout = Js_layout_generator.expression (seq && z) in - assert_layout ~ctxt - L.(loc (group [ - wrap_in_parens (expression seq); - pretty_space; atom "&&"; - indent (fused [line; expression z]); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + wrap_in_parens (expression seq); + pretty_space; + atom "&&"; + indent (fused [pretty_line; expression z]); + ])) layout; assert_output ~ctxt "(x,y)&&z" layout; assert_output ~ctxt ~pretty:true "(x, y) && z" layout; let layout = Js_layout_generator.expression (z && seq) in - assert_layout ~ctxt - L.(loc (group [ - expression z; pretty_space; atom "&&"; - indent (fused ( - line::(wrap_in_parens_raw (expression seq)) - )); - ])) + assert_layout + ~ctxt + L.( + loc + (group + [ + expression z; + pretty_space; + atom "&&"; + indent (fused [pretty_line; wrap_in_parens (expression seq)]); + ])) layout; assert_output ~ctxt "z&&(x,y)" layout; assert_output ~ctxt ~pretty:true "z && (x, y)" layout; let layout = Js_layout_generator.expression (E.sequence [z; seq]) in - assert_layout ~ctxt - L.(loc (sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - expression z; - Layout.IfBreak ((atom ","), (fused [atom ","; pretty_space])); - ]; - wrap_in_parens (expression seq); - ]; - ])) + assert_layout + ~ctxt + L.(loc (group [loc (id "z"); atom ","; pretty_line; wrap_in_parens (expression seq)])) layout; assert_output ~ctxt "z,(x,y)" layout; assert_output ~ctxt ~pretty:true "z, (x, y)" layout; let layout = Js_layout_generator.expression (E.sequence [seq; z]) in - assert_layout ~ctxt - L.(loc (sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused ( - wrap_in_parens_raw (expression seq) @ - [Layout.IfBreak ((atom ","), (fused [atom ","; pretty_space]))] - ); - expression z; - ]; - ])) + assert_layout + ~ctxt + L.(loc (group [wrap_in_parens (expression seq); atom ","; pretty_line; loc (id "z")])) layout; assert_output ~ctxt "(x,y),z" layout; - assert_output ~ctxt ~pretty:true "(x, y), z" layout; - end; -] + assert_output ~ctxt ~pretty:true "(x, y), z" layout ); + ] diff --git a/src/parser_utils/output/__tests__/js_layout_generator/pattern_test.ml b/src/parser_utils/output/__tests__/js_layout_generator/pattern_test.ml new file mode 100644 index 00000000000..330d14329cc --- /dev/null +++ b/src/parser_utils/output/__tests__/js_layout_generator/pattern_test.ml @@ -0,0 +1,212 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 +open Layout_test_utils +open Layout_generator_test_utils +module S = Ast_builder.Statements +module E = Ast_builder.Expressions +module P = Ast_builder.Patterns +module L = Layout_builder + +let tests = + [ + ( "let_simple_assign" + >:: fun ctxt -> + let mk_layout a = + Js_layout_generator.statement + (S.let_declaration [S.variable_declarator a ~init:(E.identifier "a")]) + in + let layout = mk_layout "a" in + assert_layout + ~ctxt + L.( + loc + (fused + [ + loc + (fused + [ + atom "let"; + space; + loc + (fused [loc (id "a"); pretty_space; atom "="; pretty_space; loc (id "a")]); + ]); + atom ";"; + ])) + layout; + assert_output ~ctxt "let a=a;" layout; + assert_output ~ctxt ~pretty:true "let a = a;" layout; + + let a80 = String.make 80 'a' in + let layout = mk_layout a80 in + assert_output ~ctxt ("let " ^ a80 ^ "=a;") layout; + assert_output ~ctxt ~pretty:true ("let " ^ a80 ^ " = a;") layout ); + ( "let_simple_object_assign" + >:: fun ctxt -> + let mk_layout a = + Js_layout_generator.statement + (S.let_declaration [S.variable_declarator_generic (P.object_ a) (Some (E.identifier "a"))]) + in + let layout = mk_layout "a" in + assert_layout + ~ctxt + L.( + loc + (fused + [ + loc + (fused + [ + atom "let"; + pretty_space; + loc + (fused + [ + loc + (group + [ + atom "{"; + indent (fused [softline; loc (id "a")]); + softline; + atom "}"; + ]); + pretty_space; + atom "="; + pretty_space; + loc (id "a"); + ]); + ]); + atom ";"; + ])) + layout; + assert_output ~ctxt "let{a}=a;" layout; + assert_output ~ctxt ~pretty:true "let {a} = a;" layout; + + let a80 = String.make 80 'a' in + let layout = mk_layout a80 in + assert_output ~ctxt ("let{" ^ a80 ^ "}=a;") layout; + assert_output ~ctxt ~pretty:true ("let {\n " ^ a80 ^ "\n} = a;") layout ); + ("let_optional_assign" >:: (fun ctxt -> assert_statement_string ~ctxt "let a?=a;")); + ("let_assign_annotation" >:: (fun ctxt -> assert_statement_string ~ctxt "let a:b=a;")); + ("let_optional_assign_annotation" >:: (fun ctxt -> assert_statement_string ~ctxt "let a?:b=a;")); + ("let_empty_object" >:: (fun ctxt -> assert_statement_string ~ctxt "let{}=a;")); + ("let_empty_object_annotation" >:: (fun ctxt -> assert_statement_string ~ctxt "let{}:b=a;")); + ("let_object_single_var" >:: (fun ctxt -> assert_statement_string ~ctxt "let{a}=a;")); + ( "let_object_aliased_var" + >:: fun ctxt -> + assert_statement_string ~ctxt "let{a:b}=a;"; + assert_statement_string ~ctxt ~pretty:true "let {a: b} = a;" ); + ("let_object_multiple_vars" >:: (fun ctxt -> assert_statement_string ~ctxt "let{a,b}=a;")); + ( "let_object_multiple_vars_aliased" + >:: fun ctxt -> + assert_statement_string ~ctxt "let{a:b,c}=a;"; + assert_statement_string ~ctxt ~pretty:true "let {a: b, c} = a;" ); + ("let_object_nested" >:: (fun ctxt -> assert_statement_string ~ctxt "let{a,b:{c}}=a;")); + ("let_object_default" >:: (fun ctxt -> assert_statement_string ~ctxt "let{a=b}=a;")); + ("let_object_aliased_default" >:: (fun ctxt -> assert_statement_string ~ctxt "let{a:b=c}=a;")); + ( "let_object_alias_and_default" + >:: fun ctxt -> + assert_statement_string ~ctxt "let{a:b,c=d}=a;"; + assert_statement_string ~ctxt ~pretty:true "let {a: b, c = d} = a;" ); + ( "let_object_alias_and_default_long" + >:: fun ctxt -> + let d80 = String.make 80 'd' in + assert_statement_string ~ctxt ("let{a:b,c=" ^ d80 ^ "}=a;"); + assert_statement_string ~ctxt ~pretty:true ("let {\n a: b,\n c = " ^ d80 ^ "\n} = a;") ); + ( "let_object_default_expression" + >:: (fun ctxt -> assert_statement_string ~ctxt "let{a=++b}=a;") ); + ( "let_object_rest" + >:: fun ctxt -> + assert_statement_string ~ctxt "let{...a}=a;"; + assert_statement_string ~ctxt ~pretty:true "let {...a} = a;" ); + ("let_object_var_rest" >:: (fun ctxt -> assert_statement_string ~ctxt "let{a,...b}=a;")); + ( "let_object_var_rest_long" + >:: fun ctxt -> + let c80 = String.make 80 'c' in + assert_statement_string ~ctxt ("let{a,..." ^ c80 ^ "}=a;"); + assert_statement_string ~ctxt ~pretty:true ("let {\n a,\n ..." ^ c80 ^ "\n} = a;") ); + ("let_array_empty" >:: (fun ctxt -> assert_statement_string ~ctxt "let[]=a;")); + ("let_array_annotated" >:: (fun ctxt -> assert_statement_string ~ctxt "let[]:a=a;")); + ( "let_array_single_item" + >:: fun ctxt -> + let mk_layout a = + Js_layout_generator.statement + (S.let_declaration + [ + S.variable_declarator_generic + (P.array [Some (P.identifier a)]) + (Some (E.identifier "a")); + ]) + in + let layout = mk_layout "a" in + assert_layout + ~ctxt + L.( + loc + (fused + [ + loc + (fused + [ + atom "let"; + pretty_space; + loc + (fused + [ + loc + (group + [ + atom "["; + indent (fused [softline; loc (loc (id "a"))]); + softline; + atom "]"; + ]); + pretty_space; + atom "="; + pretty_space; + loc (id "a"); + ]); + ]); + atom ";"; + ])) + layout; + assert_output ~ctxt "let[a]=a;" layout; + assert_output ~ctxt ~pretty:true "let [a] = a;" layout; + + let a80 = String.make 80 'a' in + let layout = mk_layout a80 in + assert_output ~ctxt ("let[" ^ a80 ^ "]=a;") layout; + assert_output ~ctxt ~pretty:true ("let [\n" ^ " " ^ a80 ^ "\n" ^ "] = a;") layout ); + ("let_array_optional_item" >:: (fun ctxt -> assert_statement_string ~ctxt "let[a?]=a;")); + ("let_array_annotated_item" >:: (fun ctxt -> assert_statement_string ~ctxt "let[a:b]=a;")); + ( "let_array_optional_annotated_item" + >:: (fun ctxt -> assert_statement_string ~ctxt "let[a?:b]=a;") ); + ( "let_array_multiple_items" + >:: fun ctxt -> + assert_statement_string ~ctxt "let[a,b]=a;"; + assert_statement_string ~ctxt ~pretty:true "let [a, b] = a;" ); + ( "let_array_holes" + >:: fun ctxt -> + assert_statement_string ~ctxt "let[,,a]=a;"; + assert_statement_string ~ctxt ~pretty:true "let [a, , b] = a;"; + assert_statement_string + ~ctxt + ~pretty:true + ("let [\n a,\n ,\n " ^ String.make 80 'b' ^ "\n] = a;") ); + ("let_nested_array" >:: (fun ctxt -> assert_statement_string ~ctxt "let[[]]=a;")); + ("let_array_holes_and_nested" >:: (fun ctxt -> assert_statement_string ~ctxt "let[,,[a]]=a;")); + ("let_array_spread" >:: (fun ctxt -> assert_statement_string ~ctxt "let[...a]=a;")); + ( "let_array_item_and_spread" + >:: fun ctxt -> + assert_statement_string ~ctxt "let[a,...b]=a;"; + assert_statement_string ~ctxt ~pretty:true "let [a, ...b] = a;"; + assert_statement_string + ~ctxt + ~pretty:true + ("let [\n a,\n ...b" ^ String.make 80 'c' ^ "\n] = a;") ); + ] diff --git a/src/parser_utils/output/__tests__/js_layout_generator/program_test.ml b/src/parser_utils/output/__tests__/js_layout_generator/program_test.ml index 69e12dc395c..cc0187c005e 100644 --- a/src/parser_utils/output/__tests__/js_layout_generator/program_test.ml +++ b/src/parser_utils/output/__tests__/js_layout_generator/program_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,49 +8,217 @@ open OUnit2 open Layout_test_utils open Layout_generator_test_utils - +module S = Ast_builder.Statements +module E = Ast_builder.Expressions module L = Layout_builder -let tests = [ - "blank_lines_if_in_original" >:: begin fun ctxt -> +let tests = + [ + ( "blank_lines_if_in_original" + >:: fun ctxt -> let ast = Ast_builder.program_of_string "var x = 1;\n\n\nvar y = 2;" in let layout = Js_layout_generator.program ~checksum:None ~preserve_docblock:false ast in - assert_layout ~ctxt - L.(program (sequence ~break:Layout.Break_if_pretty ~inline:(true, true) ~indent:0 [ - loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=0; offset=0}; _end={Loc.line=1; column=10; offset=10}} (fused [ - loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=0; offset=0}; _end={Loc.line=1; column=10; offset=10}} (fused [ - atom "var"; - atom " "; - loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=4; offset=4}; _end={Loc.line=1; column=9; offset=9}} (fused [ - loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=4; offset=4}; _end={Loc.line=1; column=5; offset=5}} (id ~loc:{Loc.none with Loc.start={Loc.line=1; column=4; offset=4}; _end={Loc.line=1; column=5; offset=5}} "x"); - pretty_space; - atom "="; - pretty_space; - loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=8; offset=8}; _end={Loc.line=1; column=9; offset=9}} (loc ~loc:{Loc.none with Loc.start={Loc.line=1; column=8; offset=8}; _end={Loc.line=1; column=9; offset=9}} (Layout.IfPretty ((atom "1"), (atom "1")))); - ]); - ]); - atom ";"; - ]); - fused [ - pretty_newline; - loc ~loc:{Loc.none with Loc.start={Loc.line=4; column=0; offset=13}; _end={Loc.line=4; column=10; offset=23}} (fused [ - loc ~loc:{Loc.none with Loc.start={Loc.line=4; column=0; offset=13}; _end={Loc.line=4; column=10; offset=23}} (fused [ - atom "var"; - atom " "; - loc ~loc:{Loc.none with Loc.start={Loc.line=4; column=4; offset=17}; _end={Loc.line=4; column=9; offset=22}} (fused [ - loc ~loc:{Loc.none with Loc.start={Loc.line=4; column=4; offset=17}; _end={Loc.line=4; column=5; offset=18}} (id ~loc:{Loc.none with Loc.start={Loc.line=4; column=4; offset=17}; _end={Loc.line=4; column=5; offset=18}} "y"); - pretty_space; - atom "="; - pretty_space; - loc ~loc:{Loc.none with Loc.start={Loc.line=4; column=8; offset=21}; _end={Loc.line=4; column=9; offset=22}} (loc ~loc:{Loc.none with Loc.start={Loc.line=4; column=8; offset=21}; _end={Loc.line=4; column=9; offset=22}} (Layout.IfPretty ((atom "2"), (atom "2")))); - ]); - ]); - atom ";"; - ]); - ]; - ])) + assert_layout + ~ctxt + L.( + program + (group + [ + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 0 }; + _end = { Loc.line = 1; column = 10 }; + } + (fused + [ + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 0 }; + _end = { Loc.line = 1; column = 10 }; + } + (fused + [ + atom "var"; + space; + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 4 }; + _end = { Loc.line = 1; column = 9 }; + } + (fused + [ + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 4 }; + _end = { Loc.line = 1; column = 5 }; + } + (id + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 4 }; + _end = { Loc.line = 1; column = 5 }; + } + "x"); + pretty_space; + atom "="; + pretty_space; + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 1; column = 8 }; + _end = { Loc.line = 1; column = 9 }; + } + (atom "1"); + ]); + ]); + atom ";"; + ]); + pretty_hardline; + pretty_hardline; + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 4; column = 0 }; + _end = { Loc.line = 4; column = 10 }; + } + (fused + [ + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 4; column = 0 }; + _end = { Loc.line = 4; column = 10 }; + } + (fused + [ + atom "var"; + space; + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 4; column = 4 }; + _end = { Loc.line = 4; column = 9 }; + } + (fused + [ + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 4; column = 4 }; + _end = { Loc.line = 4; column = 5 }; + } + (id + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 4; column = 4 }; + _end = { Loc.line = 4; column = 5 }; + } + "y"); + pretty_space; + atom "="; + pretty_space; + loc + ~loc: + { + Loc.none with + Loc.start = { Loc.line = 4; column = 8 }; + _end = { Loc.line = 4; column = 9 }; + } + (atom "2"); + ]); + ]); + atom ";"; + ]); + ])) layout; assert_output ~ctxt "var x=1;var y=2;" layout; - assert_output ~ctxt ~pretty:true "var x = 1;\n\nvar y = 2;" layout; - end; -] + assert_output ~ctxt ~pretty:true "var x = 1;\n\nvar y = 2;" layout ); + ( "program_artifact_newline" + >:: fun ctxt -> + let ast = Ast_builder.mk_program [S.expression (E.identifier "x")] in + let layout = + Js_layout_generator.program ~preserve_docblock:false ~checksum:(Some "@artifact abc123") ast + in + assert_layout + ~ctxt + L.( + program + (fused + [ + group [loc (fused [loc (id "x"); atom ";"])]; + hardline; + atom "/* @artifact abc123 */"; + ])) + layout; + assert_output ~ctxt "x;\n/* @artifact abc123 */" layout; + assert_output ~ctxt ~pretty:true "x;\n/* @artifact abc123 */" layout ); + ( "program_trailing_semicolon" + >:: fun ctxt -> + let ast = + Ast_builder.mk_program [S.expression (E.identifier "x"); S.expression (E.identifier "y")] + in + let layout = Js_layout_generator.program ~preserve_docblock:false ~checksum:None ast in + assert_layout + ~ctxt + L.( + program + (group + [ + loc (fused [loc (id "x"); atom ";"]); + pretty_hardline; + loc (fused [loc (id "y"); atom ";"]); + ])) + layout; + assert_output ~ctxt "x;y;" layout; + assert_output ~ctxt ~pretty:true "x;\ny;" layout ); + ( "preserve_docblock" + >:: fun ctxt -> + let c_loc = Loc.{ none with start = { line = 1; column = 1 } } in + let s_loc = Loc.{ none with start = { line = 2; column = 1 } } in + let ast = + let comments = [Ast_builder.Comments.line ~loc:c_loc " hello world"] in + let statements = [S.expression ~loc:s_loc (E.identifier "x")] in + Ast_builder.mk_program ~comments statements + in + begin + let layout = Js_layout_generator.program ~preserve_docblock:true ~checksum:None ast in + assert_layout + ~ctxt + L.( + program + (group + [ + loc ~loc:c_loc (fused [atom "//"; atom " hello world"; hardline]); + pretty_hardline; + loc ~loc:s_loc (fused [loc (id "x"); atom ";"]); + ])) + layout; + assert_output ~ctxt "// hello world\nx;" layout; + + (* TODO: inserts an extra line between line comments *) + assert_output ~ctxt ~pretty:true "// hello world\n\nx;" layout + end; + + let layout = Js_layout_generator.program ~preserve_docblock:false ~checksum:None ast in + assert_layout + ~ctxt + L.(program (group [loc ~loc:s_loc (fused [loc (id "x"); atom ";"])])) + layout; + assert_output ~ctxt "x;" layout; + assert_output ~ctxt ~pretty:true "x;" layout ); + ] diff --git a/src/parser_utils/output/__tests__/js_layout_generator/variable_declaration_precedence_test.ml b/src/parser_utils/output/__tests__/js_layout_generator/variable_declaration_precedence_test.ml index 80d75ba0967..ed594dc5d79 100644 --- a/src/parser_utils/output/__tests__/js_layout_generator/variable_declaration_precedence_test.ml +++ b/src/parser_utils/output/__tests__/js_layout_generator/variable_declaration_precedence_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,7 +7,6 @@ open Ast_builder open Layout_test_utils - module S = Ast_builder.Statements module E = Ast_builder.Expressions module L = Layout_builder @@ -15,53 +14,79 @@ module L = Layout_builder let test ctxt = let seq = E.sequence [E.identifier "y"; E.identifier "z"] in let ast = S.variable_declaration [S.variable_declarator "x" ~init:seq] in - assert_layout_of_statement ~ctxt - L.(loc (fused [ - loc (fused [ - atom "var"; atom " "; - loc (fused ( - [loc (id "x"); pretty_space; atom "="; pretty_space] @ - wrap_in_parens_raw (expression seq) - )); - ]); - atom ";"; - ])) + assert_layout_of_statement + ~ctxt + L.( + loc + (fused + [ + loc + (fused + [ + atom "var"; + atom " "; + loc + (fused + [ + loc (id "x"); + pretty_space; + atom "="; + pretty_space; + wrap_in_parens (expression seq); + ]); + ]); + atom ";"; + ])) ast; let ast = - let init = E.assignment - (Patterns.identifier "y") - (E.identifier "z") in + let init = E.assignment (Patterns.identifier "y") (E.identifier "z") in S.variable_declaration [S.variable_declarator "x" ~init] in - assert_layout_of_statement ~ctxt - L.(loc (fused [ - loc (fused [ - atom "var"; atom " "; - loc (fused [ - loc (id "x"); - pretty_space; atom "="; pretty_space; - loc (fused [ - loc (id "y"); - pretty_space; atom "="; pretty_space; - loc (id "z"); - ]); - ]); - ]); - atom ";"; - ])) + assert_layout_of_statement + ~ctxt + L.( + loc + (fused + [ + loc + (fused + [ + atom "var"; + atom " "; + loc + (fused + [ + loc (id "x"); + pretty_space; + atom "="; + pretty_space; + loc + (fused + [loc (id "y"); pretty_space; atom "="; pretty_space; loc (id "z")]); + ]); + ]); + atom ";"; + ])) ast; let fn_ast = E.function_ () in let ast = S.variable_declaration [S.variable_declarator "x" ~init:fn_ast] in - assert_layout_of_statement ~ctxt - L.(loc (fused [ - loc (fused [ - atom "var"; atom " "; - loc (fused [ - loc (id "x"); pretty_space; atom "="; pretty_space; expression fn_ast; - ]); - ]); - atom ";"; - ])) - ast; + assert_layout_of_statement + ~ctxt + L.( + loc + (fused + [ + loc + (fused + [ + atom "var"; + atom " "; + loc + (fused + [loc (id "x"); pretty_space; atom "="; pretty_space; expression fn_ast]); + ]); + atom ";"; + ])) + ast diff --git a/src/parser_utils/output/__tests__/js_layout_generator_test.ml b/src/parser_utils/output/__tests__/js_layout_generator_test.ml index 076d106bf52..f3647fd2164 100644 --- a/src/parser_utils/output/__tests__/js_layout_generator_test.ml +++ b/src/parser_utils/output/__tests__/js_layout_generator_test.ml @@ -1,1742 +1,2286 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module Ast = Flow_ast - open OUnit2 open Ast_builder open Layout_test_utils open Layout_generator_test_utils - +module I = Ast_builder.Identifiers module S = Ast_builder.Statements module E = Ast_builder.Expressions +module F = Ast_builder.Functions module J = Ast_builder.JSXs module L = Layout_builder -let make_loc start_line end_line = Loc.{ - source = None; - start = { line = start_line; column = 0; offset = 0; }; - _end = { line = end_line; column = 0; offset = 0; }; - } - -let tests = "js_layout_generator" >::: [ - "operator_precedence" >::: Operator_precedence_test.tests; - "assignment_precedence" >:: Assignment_precedence_test.test; - "variable_declaration_precedence" >:: Variable_declaration_precedence_test.test; - "objects" >::: Object_test.tests; - "comment" >::: Comment_test.tests; - "program" >::: Program_test.tests; - - "unary_plus_binary" >:: - begin fun ctxt -> - let module U = Ast.Expression.Unary in - let module B = Ast.Expression.Binary in - - let x = E.identifier "x" in - let y = E.identifier "y" in - let plus_y = E.unary ~op:U.Plus y in - let minus_y = E.unary ~op:U.Minus y in - - let ast = E.binary ~op:B.Plus x plus_y in - assert_expression ~ctxt "x+ +y" ast; - - let ast = E.binary ~op:B.Plus plus_y x in - assert_expression ~ctxt "+y+x" ast; - - let ast = E.binary ~op:B.Minus x minus_y in - assert_expression ~ctxt "x- -y" ast; - - let ast = E.binary ~op:B.Plus x minus_y in - assert_expression ~ctxt "x+-y" ast; - - let ast = E.binary ~op:B.Minus x plus_y in - assert_expression ~ctxt "x-+y" ast; - - let ast = E.binary ~op:B.Plus x (E.conditional plus_y y y) in - assert_expression ~ctxt "x+(+y?y:y)" ast; - - let ast = E.binary ~op:B.Plus x (E.binary plus_y ~op:B.Plus y) in - assert_expression ~ctxt "x+(+y+y)" ast; - - (* `*` is higher precedence than `+`, so would not normally need parens if +let tests = + "js_layout_generator" + >::: [ + "operator_precedence" >::: Operator_precedence_test.tests; + "assignment_precedence" >:: Assignment_precedence_test.test; + "variable_declaration_precedence" >:: Variable_declaration_precedence_test.test; + "objects" >::: Object_test.tests; + "comment" >::: Comment_test.tests; + "pattern" >::: Pattern_test.tests; + "program" >::: Program_test.tests; + "jsx" >::: Jsx_test.tests; + ( "unary_plus_binary" + >:: fun ctxt -> + let x = E.identifier "x" in + let y = E.identifier "y" in + let plus_y = E.unary_plus y in + let minus_y = E.unary_minus y in + let ast = E.plus x plus_y in + assert_expression ~ctxt "x+ +y" ast; + + let ast = E.plus plus_y x in + assert_expression ~ctxt "+y+x" ast; + + let ast = E.minus x minus_y in + assert_expression ~ctxt "x- -y" ast; + + let ast = E.plus x minus_y in + assert_expression ~ctxt "x+-y" ast; + + let ast = E.minus x plus_y in + assert_expression ~ctxt "x-+y" ast; + + let ast = E.plus x (E.conditional plus_y y y) in + assert_expression ~ctxt "x+(+y?y:y)" ast; + + let ast = E.plus x (E.plus plus_y y) in + assert_expression ~ctxt "x+(+y+y)" ast; + + (* `*` is higher precedence than `+`, so would not normally need parens if not for the `+y` *) - let ast = E.binary ~op:B.Plus x (E.binary plus_y ~op:B.Mult y) in - assert_expression ~ctxt "x+(+y)*y" ast; + let ast = E.plus x (E.mult plus_y y) in + assert_expression ~ctxt "x+(+y)*y" ast; - (* parens are necessary around the inner `+y+y`, but would be reundant + (* parens are necessary around the inner `+y+y`, but would be reundant around the multiplication. that is, we don't need `x+((+y+y)*y)`. *) - let ast = E.binary - ~op:B.Plus x (E.binary ~op:B.Mult (E.binary plus_y ~op:B.Plus y) y) in - assert_expression ~ctxt "x+(+y+y)*y" ast; - end; - - "update_plus_binary" >:: - begin fun ctxt -> - let x = E.identifier "x" in - let y = E.identifier "y" in - let x_incr = (Loc.none, Ast.Expression.Update { Ast.Expression.Update. - operator = Ast.Expression.Update.Increment; - prefix = false; - argument = x; - }) in - let x_decr = (Loc.none, Ast.Expression.Update { Ast.Expression.Update. - operator = Ast.Expression.Update.Decrement; - prefix = false; - argument = x; - }) in - let incr_y = (Loc.none, Ast.Expression.Update { Ast.Expression.Update. - operator = Ast.Expression.Update.Increment; - prefix = true; - argument = y; - }) in - let decr_y = (Loc.none, Ast.Expression.Update { Ast.Expression.Update. - operator = Ast.Expression.Update.Decrement; - prefix = true; - argument = y; - }) in - - let ast = E.binary ~op:Ast.Expression.Binary.Plus x incr_y in - assert_expression ~ctxt "x+ ++y" ast; - - let ast = E.binary ~op:Ast.Expression.Binary.Minus x incr_y in - assert_expression ~ctxt "x- ++y" ast; - - let ast = E.binary ~op:Ast.Expression.Binary.Minus x decr_y in - assert_expression ~ctxt "x- --y" ast; - - let ast = E.binary ~op:Ast.Expression.Binary.Plus x decr_y in - assert_expression ~ctxt "x+ --y" ast; - - let ast = E.binary ~op:Ast.Expression.Binary.Plus x_incr y in - assert_expression ~ctxt "x+++y" ast; - - let ast = E.binary ~op:Ast.Expression.Binary.Minus x_decr y in - assert_expression ~ctxt "x---y" ast; - - let ast = E.binary ~op:Ast.Expression.Binary.Plus x_incr incr_y in - assert_expression ~ctxt "x+++ ++y" ast; - - let ast = E.binary ~op:Ast.Expression.Binary.Minus x_decr decr_y in - assert_expression ~ctxt "x--- --y" ast; - end; - - "do_while_semicolon" >:: - begin fun ctxt -> - let module S = Ast.Statement in - (* do { x } while (y) *) - let ast = (Loc.none, S.DoWhile { S.DoWhile. - body = (Loc.none, S.Block { S.Block. - body = [ - Loc.none, S.Expression { S.Expression. - expression = E.identifier "x"; - directive = None; - }; - ]; - }); - test = E.identifier "y"; - }) in - assert_statement ~ctxt "do{x}while(y);" ast; - end; - - "do_while_single_statement" >:: - begin fun ctxt -> - let module S = Ast.Statement in - (* do x; while (y) *) - let ast = (Loc.none, S.DoWhile { S.DoWhile. - body = (Loc.none, S.Expression { S.Expression. - expression = E.identifier "x"; - directive = None; - }); - test = E.identifier "y"; - }) in - assert_statement ~ctxt "do x;while(y);" ast; - end; - - "conditional_expression_parens" >:: - begin fun ctxt -> - let module Expr = Ast.Expression in - - let a, b, c, d, e = - E.identifier "a", E.identifier "b", E.identifier "c", - E.identifier "d", E.identifier "e" in - - (* a ? b++ : c-- *) - let update = E.conditional a - (E.update ~op:Expr.Update.Increment ~prefix:false b) - (E.update ~op:Expr.Update.Decrement ~prefix:false c) in - assert_expression ~ctxt "a?b++:c--" update; - - (* a ? +b : -c *) - let unary = E.conditional a - (E.unary ~op:Expr.Unary.Plus b) - (E.unary ~op:Expr.Unary.Minus c) in - assert_expression ~ctxt "a?+b:-c" unary; - - (* (a || b) ? c : d *) - let logical_test = E.conditional (E.logical_or a b) c d in - assert_expression ~ctxt "a||b?c:d" logical_test; - - (* (a ? b : c) ? d : e *) - let nested_in_test = E.conditional (E.conditional a b c) d e in - assert_expression ~ctxt "(a?b:c)?d:e" nested_in_test; - - (* a ? (b ? c : d) : e *) - let nested_in_consequent = E.conditional a (E.conditional b c d) e in - assert_expression ~ctxt "a?b?c:d:e" nested_in_consequent; - - (* a ? b : (c ? d : e) *) - let nested_in_alternate = E.conditional a b (E.conditional c d e) in - assert_expression ~ctxt "a?b:c?d:e" nested_in_alternate; - - let assignment = E.conditional - a - (E.assignment (Patterns.identifier "x") b) - (E.assignment (Patterns.identifier "y") c) - in - assert_expression ~ctxt "a?x=b:y=c" assignment; - - let sequence = E.conditional a (E.sequence [b; c]) (E.sequence [d; e]) in - assert_expression ~ctxt "a?(b,c):(d,e)" sequence; - end; - - "call_expression_parens" >:: - begin fun ctxt -> - let x = E.identifier "x" in - - (* `(x++)()` *) - let update = E.call (E.update ~op:Ast.Expression.Update.Increment ~prefix:false x) in - assert_expression ~ctxt "(x++)()" update; - - (* `x.y()` *) - let member = E.call (E.member_expression (E.member x ~property:"y")) in - assert_expression ~ctxt "x.y()" member; - - (* `x.y.z()` *) - let two_members = E.call - (E.member_expression (E.member - (E.member_expression (E.member x ~property:"y")) - ~property:"z")) in - assert_expression ~ctxt "x.y.z()" two_members; - - (* `x()()` *) - let call = E.call (E.call x) in - assert_expression ~ctxt "x()()" call; - - (* `new x()()` *) - let new_ = E.call (E.new_ x) in - assert_expression ~ctxt "new x()()" new_; - - (* `function() {}()` *) - let func = E.call (E.function_ ()) in - assert_expression ~ctxt "function(){}()" func; - - (* `(function() {}.foo)()` *) - let func = E.call (E.member_expression (E.member - (E.function_ ()) ~property:"foo" - )) in - assert_expression ~ctxt "function(){}.foo()" func; - - (* `(() => {})()` *) - let arrow = E.call (E.arrow_function ()) in - assert_expression ~ctxt "(()=>{})()" arrow; - - (* `(foo, bar)()` *) - let seq = E.call (E.sequence [x; E.identifier "y"]) in - assert_expression ~ctxt "(x,y)()" seq; - - (* `__d("a", [], (function() {}), 1)` *) - let underscore_d = E.call - ~args:[ - Ast.Expression.Expression (E.literal (Literals.string "a")); - Ast.Expression.Expression (E.literal (Literals.string "b")); - Ast.Expression.Expression (E.function_ ()); - Ast.Expression.Expression (E.literal (Literals.number 1. "1")); - ] - (E.identifier "__d") in - assert_expression ~ctxt "__d(\"a\",\"b\",(function(){}),1)" underscore_d; - end; - - "member_expression_parens" >:: - begin fun ctxt -> - let x = E.identifier "x" in - - (* `(x++).y` *) - let update = E.member_expression (E.member - (E.update ~op:Ast.Expression.Update.Increment ~prefix:false x) - ~property:"y") in - assert_expression ~ctxt "(x++).y" update; - - (* `x.y.z` *) - let member = E.member_expression (E.member - (E.member_expression (E.member x ~property:"y")) - ~property:"z") in - assert_expression ~ctxt "x.y.z" member; - - (* x().y *) - let call = E.member_expression (E.member (E.call x) ~property:"y") in - assert_expression ~ctxt "x().y" call; - - (* x()[y] *) - let computed = E.member_expression ( - E.member_computed (E.call x) ~property:"y" - ) in - assert_expression ~ctxt "x()[y]" computed; - - (* `(function() {}).x` *) - let func = E.member_expression (E.member - (E.function_ ()) - ~property:"x" - ) in - assert_expression ~ctxt "function(){}.x" func; - - (* `(() => {}).x` *) - let func = E.member_expression (E.member - (E.arrow_function ()) - ~property:"x" - ) in - assert_expression ~ctxt "(()=>{}).x" func; - - (* `(x, y).z` *) - let seq = E.member_expression (E.member - (E.sequence [x; E.identifier "y"]) - ~property:"z" - ) in - assert_expression ~ctxt "(x,y).z" seq; - - let num = E.member_expression (E.member - (E.literal (Literals.number 1.0 "1")) - ~property:"z" - ) in - assert_expression ~ctxt "1..z" num; - let num = E.member_expression (E.member - (E.literal (Literals.number 1.1 "1.1")) - ~property:"z" - ) in - assert_expression ~ctxt "1.1.z" num; - let num = E.member_expression (E.member - (E.literal (Literals.number 0.0000001 "0.0000001")) - ~property:"z" - ) in - assert_expression ~ctxt "1e-7.z" num; - - end; - - "new_expression_parens" >:: - begin fun ctxt -> - let x, y, z = E.identifier "x", E.identifier "y", E.identifier "z" in - - (* `new (x++)()` *) - let update = E.new_ ( - E.update ~op:Ast.Expression.Update.Increment ~prefix:false x - ) in - assert_expression ~ctxt "new(x++)()" update; - - (* `new (x())()` *) - let call = E.new_ (E.call x) in - assert_expression ~ctxt "new(x())()" call; - - (* `new x.y()` *) - let member = E.new_ (Loc.none, Ast.Expression.Member { Ast.Expression.Member. - _object = x; - property = Ast.Expression.Member.PropertyIdentifier (Loc.none, "y"); - computed = false; - }) in - assert_expression ~ctxt "new x.y()" member; - - (* `new (x.y())()` *) - let member_call = E.new_ (E.call ( - E.member_expression (E.member x ~property:"y") - )) in - assert_expression ~ctxt "new(x.y())()" member_call; - - (* `new (x().y)()` *) - let call_member = E.new_ (E.member_expression ( - E.member (E.call x) ~property:"y" - )) in - assert_expression ~ctxt "new(x().y)()" call_member; - - (* `new (x ? y : z)()` *) - let cond = E.new_ (E.conditional x y z) in - assert_expression ~ctxt "new(x?y:z)()" cond; - end; - - "unary_expression_parens" >:: - begin fun ctxt -> - let module Unary = Ast.Expression.Unary in - let module Update = Ast.Expression.Update in - - (* `+(+x)` *) - let plus = E.unary ~op:Unary.Plus ( - E.unary ~op:Unary.Plus (E.identifier "x") - ) in - assert_expression ~ctxt "+(+x)" plus; - - (* `+-x` *) - let minus = E.unary ~op:Unary.Plus ( - E.unary ~op:Unary.Minus (E.identifier "x") - ) in - assert_expression ~ctxt "+-x" minus; - - (* `+(++x)` *) - let prefix_incr = E.unary ~op:Unary.Plus ( - E.update ~op:Update.Increment ~prefix:true (E.identifier "x") - ) in - assert_expression ~ctxt "+(++x)" prefix_incr; - - (* `+--x` *) - let prefix_decr = E.unary ~op:Unary.Plus ( - E.update ~op:Update.Decrement ~prefix:true (E.identifier "x") - ) in - assert_expression ~ctxt "+--x" prefix_decr; - - (* `+x++` *) - let suffix_incr = E.unary ~op:Unary.Plus ( - E.update ~op:Update.Increment ~prefix:false (E.identifier "x") - ) in - assert_expression ~ctxt "+x++" suffix_incr; - - (* `+x--` *) - let suffix_decr = E.unary ~op:Unary.Plus ( - E.update ~op:Update.Decrement ~prefix:false (E.identifier "x") - ) in - assert_expression ~ctxt "+x--" suffix_decr; - - (* `+x()` *) - let call = E.unary ~op:Unary.Plus (E.call (E.identifier "x")) in - assert_expression ~ctxt "+x()" call; - - (* `+new x()` *) - let new_ = E.unary ~op:Unary.Plus (E.new_ (E.identifier "x")) in - assert_expression ~ctxt "+new x()" new_; - end; - - "expression_statement_parens" >:: - begin fun ctxt -> - let obj = S.expression (E.object_ []) in - assert_statement ~ctxt "({});" obj; - - let func = S.expression (E.function_ ()) in - assert_statement ~ctxt "(function(){});" func; - - let arrow = S.expression (E.arrow_function ()) in - assert_statement ~ctxt "()=>{};" arrow; - - let klass = S.expression (E.class_ []) in - assert_statement ~ctxt "(class{});" klass; - - let func_call = S.expression ( - E.call (E.function_ ()) - ) in - assert_statement ~ctxt "(function(){})();" func_call; - - let func_member = S.expression (E.member_expression ( - E.member (E.function_ ()) ~property:"foo" - )) in - assert_statement ~ctxt "(function(){}).foo;" func_member; - - let class_member = S.expression (E.member_expression ( - E.member (E.class_ []) ~property:"foo" - )) in - assert_statement ~ctxt "(class{}).foo;" class_member; - - let func_member_call = S.expression ( - E.call (E.member_expression (E.member - (E.function_ ()) ~property:"foo" - )) - ) in - assert_statement ~ctxt "(function(){}).foo();" func_member_call; - - let func_call_member = S.expression ( - E.member_expression (E.member - (E.call (E.function_ ())) ~property:"foo" - ) - ) in - assert_statement ~ctxt "(function(){})().foo;" func_call_member; - - let func_sequence = S.expression ( - E.sequence [E.function_ (); E.identifier "x"] - ) in - assert_statement ~ctxt "(function(){}),x;" func_sequence; - end; - - "arrow_body_parens" >:: - begin fun ctxt -> - let x, y, z = E.identifier "x", E.identifier "y", E.identifier "z" in - - let arrow = - let body = Functions.body_expression (E.sequence [x; y]) in - E.arrow_function ~body () in - assert_expression ~ctxt "()=>(x,y)" arrow; - - let arrow = - let body = Functions.body_expression (E.conditional x y z) in - E.arrow_function ~body () in - assert_expression ~ctxt "()=>x?y:z" arrow; - - let arrow = - let arrow = E.arrow_function () in - let body = Functions.body_expression arrow in - E.arrow_function ~body () in - assert_expression ~ctxt "()=>()=>{}" arrow; - end; - - "argument_parens" >:: - begin fun ctxt -> - let f = E.identifier "f" in - let x, y, z = E.identifier "x", E.identifier "y", E.identifier "z" in - - let args = [] in - let call = E.call ~args f in - assert_expression ~ctxt "f()" call; - - let args = - let seq = E.sequence [x; y] in - [Ast.Expression.Expression seq] in - let call = E.call ~args f in - assert_expression ~ctxt ~msg:"sequence should be parenthesized" - "f((x,y))" call; - - let args = [E.spread (E.sequence [x; y])] in - let call = E.call ~args f in - assert_expression ~ctxt ~msg:"sequence should be parenthesized" - "f(...(x,y))" call; - - let args = [Ast.Expression.Expression (E.conditional x y z)] in - let call = E.call ~args f in - assert_expression ~ctxt ~msg:"higher-precedence ops don't need parens" - "f(x?y:z)" call; - - let call = - let arrow = E.arrow_function () in - let args = [Ast.Expression.Expression arrow] in - E.call ~args f in - assert_expression ~ctxt ~msg:"higher-precedence ops don't need parens" - "f(()=>{})" call; - - let args = - let seq = E.sequence [x; y] in - let logical = E.logical_or seq z in - [Ast.Expression.Expression logical] in - let call = E.call ~args f in - assert_expression ~ctxt ~msg:"nested sequence has parens" - "f((x,y)||z)" call; - end; - - "binary_in_space" >:: - begin fun ctxt -> - let ast = statement_of_string {|if("foo" in {"foo": bar}){}|} in - assert_statement ~ctxt {|if("foo"in{"foo":bar}){}|} ast; - - let ast = statement_of_string {|if("foo" in bar){}|} in - assert_statement ~ctxt {|if("foo"in bar){}|} ast; - - let ast = statement_of_string {|if(foo in {"foo":bar}){}|} in - assert_statement ~ctxt {|if(foo in{"foo":bar}){}|} ast; - end; - - "binary_instanceof_space" >:: - begin fun ctxt -> - let ast = statement_of_string {|if("foo" instanceof {"foo": bar}){}|} in - assert_statement ~ctxt {|if("foo"instanceof{"foo":bar}){}|} ast; - - let ast = statement_of_string {|if("foo" instanceof bar){}|} in - assert_statement ~ctxt {|if("foo"instanceof bar){}|} ast; - - let ast = statement_of_string {|if(foo instanceof {"foo":bar}){}|} in - assert_statement ~ctxt {|if(foo instanceof{"foo":bar}){}|} ast; - end; - - "logical_wrapping" >:: - begin fun ctxt -> - let x40 = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" in - let ast = E.logical_and (E.identifier x40) (E.identifier x40) in - let layout = Js_layout_generator.expression ast in - assert_layout ~ctxt - L.(loc (group [ - loc (id "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); - pretty_space; - atom "&&"; - indent ((fused [ - Layout.IfBreak (Layout.Newline, pretty_space); - loc (id "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); - ])); - ])) - layout; - assert_output ~ctxt (x40^"&&"^x40) layout; - assert_output ~ctxt ~pretty:true (x40^" &&\n "^x40) layout; - end; - - "return_statement_parens" >:: - begin fun ctxt -> - let ret = S.return None in - assert_statement ~ctxt "return;" ret; - - let x = E.identifier "x" in - let y = E.identifier "y" in - let seq = E.sequence [x; y] in - let ret = S.return (Some seq) in - assert_statement ~ctxt "return x,y;" ret; - assert_statement ~ctxt ~pretty:true "return x, y;" ret; - - (* sequences get split across lines and wrapped in parens *) - let x40 = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" in - let y40 = "yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy" in - let func = S.function_declaration (Loc.none, "f") ~body:[ - S.return (Some (E.sequence [E.identifier x40; E.identifier y40])); - ] in - assert_layout_result ~ctxt - L.(loc (fused [ - atom "return"; - atom " "; - sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - Layout.IfBreak ((atom "("), empty); - sequence ~break:Layout.Break_if_needed [ - loc (sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - loc (id "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); - Layout.IfBreak ((atom ","), (fused [atom ","; pretty_space])); - ]; - loc (id "yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"); - ]; - ]); - ]; - Layout.IfBreak ((atom ")"), empty); - ]; - ]; - Layout.IfPretty ((atom ";"), empty); - ])) - Layout_matcher.(body_of_function_declaration func >>= nth_sequence 0); - assert_statement ~ctxt ("function f(){return "^x40^","^y40^"}") func; - assert_statement ~ctxt ~pretty:true - ("function f() {\n return (\n "^x40^",\n "^y40^"\n );\n}") - func; - - (* logicals get split *) - let logical = E.logical_and (E.identifier x40) (E.identifier y40) in - let func = S.function_declaration (Loc.none, "f") ~body:[S.return (Some logical)] in - assert_layout_result ~ctxt - L.(loc (fused [ - atom "return"; - atom " "; - sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - Layout.IfBreak ((atom "("), empty); - sequence ~break:Layout.Break_if_needed [expression logical]; - Layout.IfBreak ((atom ")"), empty); - ]; - ]; - Layout.IfPretty ((atom ";"), empty); - ])) - Layout_matcher.(body_of_function_declaration func >>= nth_sequence 0); - assert_statement ~ctxt ~pretty:true - ("function f() {\n return (\n "^x40^" &&\n " ^ y40 ^ "\n );\n}") - func; - - (* binary expressions get split *) - let func = S.function_declaration (Loc.none, "f") ~body:[ - let op = Ast.Expression.Binary.Plus in - S.return (Some (E.binary ~op (E.identifier x40) (E.identifier y40))) - ] in - assert_layout_result ~ctxt - L.(loc (fused [ - atom "return"; - atom " "; - sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - Layout.IfBreak ((atom "("), empty); - sequence ~break:Layout.Break_if_needed [ - (* TODO: this is wrong, it should allow the + to break *) - loc (fused [ - loc (id "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); - pretty_space; - atom "+"; - pretty_space; - loc (id "yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"); - ]); - ]; - Layout.IfBreak ((atom ")"), empty); - ]; - ]; - Layout.IfPretty ((atom ";"), empty); - ])) - Layout_matcher.(body_of_function_declaration func >>= nth_sequence 0); - assert_statement ~ctxt ~pretty:true - ("function f() {\n return (\n "^x40^" + " ^ y40 ^ "\n );\n}") - func; - - (* jsx gets split *) - let long_name = String.make 80 'A' in - let jsx = E.jsx_element (J.element (J.identifier long_name)) in - let func = S.function_declaration (Loc.none, "f") ~body:[S.return (Some jsx)] in - assert_layout_result ~ctxt - L.(loc (fused [ - atom "return"; - pretty_space; - sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - Layout.IfBreak ((atom "("), empty); - sequence ~break:Layout.Break_if_needed [expression jsx]; - Layout.IfBreak ((atom ")"), empty); - ]; - ]; - Layout.IfPretty ((atom ";"), empty); - ])) - Layout_matcher.(body_of_function_declaration func >>= nth_sequence 0); - assert_statement ~ctxt ~pretty:true - ("function f() {\n return (\n <"^long_name^">\n );\n}") - func; - - (* a string doesn't get split *) - let x80 = x40 ^ x40 in - let func = S.function_declaration (Loc.none, "f") ~body:[ - S.return (Some (E.identifier x80)) - ] in - assert_layout_result ~ctxt - L.(loc (fused [ - atom "return"; - atom " "; - loc (id "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); - Layout.IfPretty ((atom ";"), empty); - ])) - Layout_matcher.(body_of_function_declaration func >>= nth_sequence 0); - assert_statement ~ctxt ("function f(){return "^x80^"}") func; - assert_statement ~ctxt ~pretty:true ("function f() {\n return "^x80^";\n}") func; - end; - - "return_statement_space" >:: - begin fun ctxt -> - let assert_no_space ~ctxt expr = - let ret = statement_of_string ("return "^expr^";") in - assert_statement ~ctxt ("return"^expr^";") ret - in - - assert_no_space ~ctxt {|"foo"|}; - assert_no_space ~ctxt {|{foo:"bar"}|}; - assert_no_space ~ctxt {|[foo]|}; - assert_no_space ~ctxt {|!foo|}; - assert_no_space ~ctxt {|+foo|}; - assert_no_space ~ctxt {|-foo|}; - assert_no_space ~ctxt {|~foo|}; - - let ret = statement_of_string {|return (foo);|} in - assert_statement ~ctxt {|return foo;|} ret; - - let ret = statement_of_string {|return 123;|} in - assert_statement ~ctxt {|return 123;|} ret; - end; - - "for_loops" >:: - begin fun ctxt -> - let ast = - let x, y = E.identifier "x", E.identifier "y" in - let init = E.binary x ~op:Ast.Expression.Binary.In y in - let body = S.empty () in - S.for_ init None None body - in - assert_statement ~ctxt ~msg:"binary `in` expressions need parens" - "for((x in y);;);" ast; - - let ast = - let y, z = E.identifier "y", E.identifier "z" in - let true_ = Expressions.true_ () in - let in_expr = E.binary y ~op:Ast.Expression.Binary.In z in - let eq_expr = E.binary true_ ~op:Ast.Expression.Binary.Equal in_expr in - let init = E.assignment (Patterns.identifier "x") eq_expr in - let body = S.empty () in - S.for_ init None None body - in - assert_statement ~ctxt ~msg:"binary `in` expressions need parens" - "for(x=true==(y in z);;);" ast; - end; - - "for_in_space" >:: - begin fun ctxt -> - let ast = statement_of_string {|for(var x in {"foo": bar}){}|} in - assert_statement ~ctxt {|for(var x in{"foo":bar}){}|} ast; - - let ast = statement_of_string {|for(var x in bar){}|} in - assert_statement ~ctxt {|for(var x in bar){}|} ast; - end; - - "for_statement_without_block" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "for(;;)x;"; - assert_statement_string ~ctxt "{for(;;)x}"; - end; - - "if_statements" >:: - begin fun ctxt -> - let ast = S.if_ - (E.identifier "x") - (S.labeled (Loc.none, "y") (S.empty ())) - (Some (S.empty ())) - in - assert_statement ~ctxt "if(x)y:;else;" ast; - end; - - "if_statement_without_block" >:: - begin fun ctxt -> - let if_stmt = S.if_ - (E.identifier "x") - (S.expression (E.identifier "y")) - (None) - in - let if_else_stmt = S.if_ - (E.identifier "x") - (S.expression (E.identifier "y")) - (Some (S.expression (E.identifier "z"))) - in - - assert_statement ~ctxt "if(x)y;" if_stmt; - assert_statement ~ctxt "if(x)y;else z;" if_else_stmt; - - let ast = S.block [ - if_stmt; - S.expression (E.identifier "z"); - ] in - assert_statement ~ctxt "{if(x)y;z}" ast; - - let ast = S.block [ - if_else_stmt; - ] in - assert_statement ~ctxt "{if(x)y;else z}" ast; - - let ast = S.if_ - (E.identifier "x") - (S.expression (E.identifier "y")) - (Some (S.expression ( - E.update ~op:Ast.Expression.Update.Increment ~prefix:true (E.identifier "z") - ))) - in - assert_statement ~ctxt "if(x)y;else++z;" ast - end; - - "while_statement_without_block" >:: - begin fun ctxt -> - let while_stmt = S.while_ - (E.identifier "x") - (S.expression (E.identifier "y")) - in - assert_statement ~ctxt "while(x)y;" while_stmt; - - let ast = S.block [while_stmt] in - assert_statement ~ctxt "{while(x)y}" ast; - end; - - "do_while_statements" >:: - begin fun ctxt -> - let ast = S.do_while - (S.labeled (Loc.none, "x") (S.empty ())) - (E.identifier "y") - in - assert_statement ~ctxt "do x:;while(y);" ast; - - let ast = S.do_while - (S.expression ( - E.update ~op:Ast.Expression.Update.Increment ~prefix:true (E.identifier "x") - )) - (E.identifier "y") - in - assert_statement ~ctxt "do++x;while(y);" ast; - end; - - "array_expressions" >:: - begin fun ctxt -> - assert_expression_string ~ctxt "[]"; - assert_expression_string ~ctxt "[a]"; - assert_expression_string ~ctxt "[a,b]"; - assert_expression_string ~ctxt "[a,,b]"; - assert_expression_string ~ctxt "[a,b,,]"; - assert_expression_string ~ctxt ~pretty:true "[a]"; - assert_expression_string ~ctxt ~pretty:true "[a, b]"; - assert_expression_string ~ctxt ~pretty:true "[a, b, ,]"; - assert_expression_string ~ctxt ~pretty:true ( - "[\n a,\n " ^ String.make 80 'b' ^ ",\n ,\n]" - ); - end; - - "function_statements" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "function a(){}"; - assert_statement_string ~ctxt "async function a(){}"; - assert_statement_string ~ctxt "function* a(){}"; - assert_statement_string ~ctxt "function a(a){}"; - assert_statement_string ~ctxt "function a(a,b){}"; - assert_statement_string ~ctxt "function a(a:b){}"; - assert_statement_string ~ctxt "function a(a:b,c:d){}"; - assert_statement_string ~ctxt "function a(a:?b=b){}"; - assert_statement_string ~ctxt "function a():a{}"; - assert_statement_string ~ctxt "function a(){}"; - assert_statement_string ~ctxt "function a():%checks{}"; - assert_statement_string ~ctxt "function a():a%checks{}"; - assert_statement_string ~ctxt "function a():a%checks(a){}"; - assert_statement_string ~ctxt ~pretty:true ( - "function a(): a %checks(a) {}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "function a(a: a, b: b): a {}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "function a(\n a: a,\n b: " ^ String.make 80 'b' ^ ",\n): a {}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "function a() {\n a;\n}" - ); - end; - - "function_expressions" >:: - begin fun ctxt -> - assert_expression_string ~ctxt "function(){}"; - assert_expression_string ~ctxt "function a(){}"; - assert_expression_string ~ctxt "async function(){}"; - assert_expression_string ~ctxt "function*(){}"; - assert_expression_string ~ctxt "function(a){}"; - assert_expression_string ~ctxt "function(a,b){}"; - assert_expression_string ~ctxt "function(a:a,b:b):c{}"; - assert_expression_string ~ctxt "function(){}"; - assert_expression_string ~ctxt ~pretty:true ( - "function(a: a, b: b): c {}" - ); - assert_expression_string ~ctxt "()=>a"; - assert_expression_string ~ctxt "()=>{}"; - assert_expression_string ~ctxt "():* =>{}"; - assert_expression_string ~ctxt "async ()=>{}"; - assert_expression_string ~ctxt "a=>{}"; - assert_expression_string ~ctxt "async a=>{}"; - assert_expression_string ~ctxt "(a)=>{}"; - assert_expression_string ~ctxt "(a,b)=>{}"; - assert_expression_string ~ctxt "(a):%checks=>{}"; - assert_expression_string ~ctxt "({a})=>a"; - assert_expression_string ~ctxt "({a})=>({a:b})"; - assert_expression_string ~ctxt "({a})=>[]"; - assert_expression_string ~ctxt "({a})=>i++"; - assert_expression_string ~ctxt "({a})=>a()"; - assert_expression_string ~ctxt "(a:b)=>{}"; - assert_expression_string ~ctxt "(a?:b)=>{}"; - assert_expression_string ~ctxt "(a):b=>{}"; - assert_expression_string ~ctxt "():c=>{}"; - assert_expression_string ~ctxt "(a):c=>{}"; - assert_expression_string ~ctxt "(a:a,b:b):c=>{}"; - assert_expression_string ~ctxt ~pretty:true ( - "(a: a, b: b): c => {}" - ); - end; - - "class_statements" >:: - begin fun ctxt -> - let long_a = String.make 80 'a' in - let long_b = String.make 80 'b' in - - assert_statement_string ~ctxt "class a{}"; - assert_statement_string ~ctxt "class a extends b{}"; - assert_statement_string ~ctxt "class a extends b{}"; - assert_statement_string ~ctxt "class a extends b{}"; - assert_statement_string ~ctxt ~pretty:true ( - "class " ^ long_a ^ " {}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "class a\n extends " ^ long_b ^ " {}" - ); - assert_statement_string ~ctxt "@a class a extends b{}"; - assert_statement_string ~ctxt "@a@b class a extends b{}"; - assert_statement_string ~ctxt "@a()@b class a extends b{}"; - assert_statement_string ~ctxt "@(++a)@b class a extends b{}"; - assert_statement_string ~ctxt "@(a&&b)@b class a extends b{}"; - assert_statement_string ~ctxt "@(()=>{})@b class a extends b{}"; - assert_statement_string ~ctxt ~pretty:true "@a\nclass a extends b {}"; - assert_statement_string ~ctxt ~pretty:true "@a\n@b\nclass a extends b {}"; - assert_statement_string ~ctxt "class a implements b{}"; - assert_statement_string ~ctxt "class a implements b{}"; - assert_statement_string ~ctxt "class a implements b,c{}"; - assert_statement_string ~ctxt "class a implements b,c{}"; - assert_statement_string ~ctxt "class a extends b implements c{}"; - assert_statement_string ~ctxt ~pretty:true ( - "class a extends b implements c {}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "class a\n extends " ^ long_b ^ "\n implements c {}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "class a\n extends " ^ long_b ^ "\n implements " ^ long_b ^ " {}" - ); - (* TODO: this seems wrong, `c {` should break onto a new line *) - assert_statement_string ~ctxt ~pretty:true ( - "class a\n extends " ^ long_b ^ "\n implements " ^ long_b ^ ", c {}" - ); - end; - - "class_expressions" >:: - begin fun ctxt -> - assert_expression_string ~ctxt "class{}"; - assert_expression_string ~ctxt "class a{}"; - assert_expression_string ~ctxt "class a extends b{}"; - end; - - "class_methods" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "class a{b(){}}"; - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n b() {}\n static b() {}\n}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n async a() {}\n static async a() {}\n}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n get a() {}\n set a() {}\n static get a() {}\n}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n constructor() {}\n}" - ); - assert_statement_string ~ctxt "class a{@a a(){}}"; - assert_statement_string ~ctxt "class a{@(()=>{}) a(){}}"; - assert_statement_string ~ctxt "class a{@a@b a(){}}"; - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n @a\n a() {}\n}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n @a\n @b\n a() {}\n}" - ); - assert_statement_string ~ctxt "class a{*b(){}}"; - end; - - "class_properties" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "class a{a;}"; - assert_statement_string ~ctxt "class a{a:a;}"; - assert_statement_string ~ctxt "class a{a;b=c;}"; - assert_statement_string ~ctxt "class a{a;b:b=c;}"; - assert_statement_string ~ctxt "class a{+a;}"; - assert_statement_string ~ctxt "class a{+a:a=a;}"; - assert_statement_string ~ctxt "class a{static a;}"; - assert_statement_string ~ctxt "class a{static +a:a=a;}"; - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n a;\n b = c;\n static b = c;\n}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n +a: a;\n b: b = c;\n}" - ); - end; - - "class_private_properties" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "class a{#a;}"; - assert_statement_string ~ctxt "class a{#a:a;}"; - assert_statement_string ~ctxt "class a{#a;#b=c;}"; - assert_statement_string ~ctxt "class a{#a;#b:b=c;}"; - assert_statement_string ~ctxt "class a{+#a;}"; - assert_statement_string ~ctxt "class a{+#a:a=a;}"; - assert_statement_string ~ctxt "class a{static #a;}"; - assert_statement_string ~ctxt "class a{static +#a:a=a;}"; - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n #a;\n #b = c;\n static #b = c;\n}" - ); - assert_statement_string ~ctxt ~pretty:true ( - "class a {\n +#a: a;\n #b: b = c;\n}" - ); - end; - - "forof_statements" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "for(let a of b){}"; - assert_statement_string ~ctxt "for(a of b){}"; - assert_statement_string ~ctxt ~pretty:true ( - "for (let a of b) {\n a;\n}" - ); - assert_statement_string ~ctxt ( - "async function f(){for await(let x of y){}}" - ); - end; - - "forof_statement_without_block" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "for(a of b)x;"; - assert_statement_string ~ctxt "{for(a of b)x}"; - end; - - "forin_statement_without_block" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "for(a in b)x;"; - assert_statement_string ~ctxt "{for(a in b)x}"; - end; - - "yield_expressions" >:: - begin fun ctxt -> - assert_expression_string ~ctxt "function* f(){yield}"; - assert_expression_string ~ctxt "function* f(){yield a}"; - assert_expression_string ~ctxt "function* f(){yield* a}"; - end; - - "meta_property_expressions" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "function F(){new.target}"; - assert_statement_string ~ctxt "function F(){new.target.name}"; - end; - - "tagged_template_expressions" >:: - begin fun ctxt -> - assert_expression_string ~ctxt "a``"; - assert_expression_string ~ctxt "b.c``"; - assert_expression_string ~ctxt "(()=>{})``"; - assert_expression_string ~ctxt "(b=c)``"; - assert_expression_string ~ctxt "(b+c)``"; - assert_expression_string ~ctxt "b()``"; - assert_expression_string ~ctxt "(class{})``"; - assert_expression_string ~ctxt "(b?c:d)``"; - assert_expression_string ~ctxt "(function(){})``"; - assert_expression_string ~ctxt "(b||c)``"; - assert_expression_string ~ctxt "(new B())``"; - assert_expression_string ~ctxt "({})``"; - assert_expression_string ~ctxt "(b,c)``"; - assert_expression_string ~ctxt "````"; - assert_expression_string ~ctxt "(void b)``"; - assert_expression_string ~ctxt "(++b)``"; - end; - - "template_expressions" >:: - begin fun ctxt -> - assert_expression_string ~ctxt "``"; - assert_expression_string ~ctxt "`${a}`"; - assert_expression_string ~ctxt "`a${b}c`"; - assert_expression_string ~ctxt "`a${b}c${d}e`"; - assert_expression_string ~ctxt "`\\``"; - end; - - "import_expressions" >:: - begin fun ctxt -> - assert_expression_string ~ctxt {|import("a")|}; - assert_expression_string ~ctxt "import(a)"; - end; - - "jsx_element" >:: - begin fun ctxt -> - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ~pretty:true ( - "" - ); - - begin - let a_loc = make_loc 1 4 in - let b_loc = make_loc 2 2 in - let c_loc = make_loc 3 3 in - let ast = E.jsx_element ~loc:a_loc ( - J.element - (J.identifier "A") - ~attrs:[ - J.attr - (J.attr_identifier "a") - (Some (J.attr_literal (Literals.string (String.make 80 'a')))) - ] - ~children:[ - J.child_element ~loc:b_loc (J.identifier "B") ~selfclosing:true; - J.child_element ~loc:c_loc (J.identifier "C") ~selfclosing:true; - ] - ) in - let layout = L.(loc ~loc:a_loc (fused [ - loc (fused [ - atom "<"; id "A"; - sequence ~break:Layout.Break_if_needed ~inline:(true, true) ~indent:0 [ - fused [ - Layout.IfBreak (empty, (atom " ")); - sequence ~break:Layout.Break_if_needed ~inline:(false, true) [ - loc (fused [ - id "a"; - atom "="; - loc (fused [ - atom "\""; - atom "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; - atom "\""; - ]); - ]); - ]; - ]; - ]; - atom ">"; - ]); - sequence ~break:Layout.Break_if_pretty [ - fused [ - loc ~loc:b_loc (loc (fused [atom "<"; id "B"; pretty_space; atom "/>"])); - Layout.Newline; - loc ~loc:c_loc (loc (fused [atom "<"; id "C"; pretty_space; atom "/>"])); - ] - ]; - loc (fused [atom ""]); - ])) in - assert_layout_of_expression ~ctxt layout ast; - assert_expression ~ctxt ~pretty:true ( - "\n \n \n" - ) ast; - assert_expression ~ctxt ( - "\n" - ) ast; - end; - - assert_expression_string ~ctxt ~pretty:true ( - "\n " ^ String.make 80 'b' ^ "\n" - ); - - assert_expression_string ~ctxt ~pretty:true ( - "\n a{\" \"}\n b\n" - ); - assert_expression_string ~ctxt ~pretty:true ( - "" - ); - assert_expression_string ~ctxt ~pretty:true ( - "\n <" ^ String.make 80 'B' ^ " />\n \n" - ); - (* TODO: Utils_jsx.trim_jsx_text is overly aggressive for pretty - * printing, user supplied newlines between words should be - * maintained. The following test should pass: - * - * assert_expression_string ~ctxt ~pretty:true ( - * "\n " ^ String.make 80 'a' ^ "\n " ^ String.make 80 'b' ^ "\n" - * ); - *) - end; - - "jsx_attribute" >:: - begin fun ctxt -> - (* TODO: valueless attributes shouldnt print trailing spaces when last *) - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ""; - assert_expression_string ~ctxt ~pretty:true ( - "" - ); - assert_expression_string ~ctxt ~pretty:true ( - "" - ); - assert_expression_string ~ctxt ~pretty:true ( - "" - ); - assert_expression_string ~ctxt ~pretty:true ( - "" - ); - end; - - "import_declaration_statement" >:: - begin fun ctxt -> - assert_statement_string ~ctxt {|import"a";|}; - assert_statement_string ~ctxt {|import a from"a";|}; - assert_statement_string ~ctxt {|import type a from"a";|}; - assert_statement_string ~ctxt {|import typeof a from"a";|}; - assert_statement_string ~ctxt {|import a,*as b from"a";|}; - assert_statement_string ~ctxt {|import a,{b}from"a";|}; - assert_statement_string ~ctxt {|import{a,type b}from"a";|}; - assert_statement_string ~ctxt {|import{a,typeof b}from"a";|}; - assert_statement_string ~ctxt {|import{a,type b as c}from"a";|}; - assert_statement_string ~ctxt {|import{a as b}from"a";|}; - assert_statement_string ~ctxt {|import type{a}from"a";|}; - assert_statement_string ~ctxt {|import{a,b}from"a";|}; - assert_statement_string ~ctxt {|import type{}from"a";|}; - assert_statement_string ~ctxt {|import typeof{}from"a";|}; - assert_statement_string ~ctxt ~pretty:true ( - {|import {a, b} from "a";|} - ); - assert_statement_string ~ctxt ~pretty:true ( - {|import type {a, b} from "a";|} - ); - assert_statement_string ~ctxt ~pretty:true ( - "import {\n a,\n " ^ String.make 80 'b' ^ ",\n} from \"a\";" - ); - assert_statement_string ~ctxt ~pretty:true ( - {|import a, * as b from "a";|} - ); - assert_statement_string ~ctxt ~pretty:true ( - "import a, * as " ^ String.make 80 'b' ^ " from \"a\";" - ); - assert_statement_string ~ctxt ~pretty:true ( - {|import a, {b} from "a";|} - ); - assert_statement_string ~ctxt ~pretty:true ( - "import a, {\n " ^ String.make 80 'b' ^ ",\n} from \"a\";" - ); - end; - - "export_declaration_statement" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "export{};"; - assert_statement_string ~ctxt "export{}from\"a\";"; - assert_statement_string ~ctxt "export{a}from\"a\";"; - assert_statement_string ~ctxt "export{a,b as c};"; - assert_statement_string ~ctxt "export*from\"a\";"; - assert_statement_string ~ctxt "export*as a from\"a\";"; - assert_statement_string ~ctxt "export type{};"; - assert_statement_string ~ctxt "export type{a};"; - assert_statement_string ~ctxt "export type a=b;"; - assert_statement_string ~ctxt "export let a;"; - assert_statement_string ~ctxt "export const a=b;"; - assert_statement_string ~ctxt "export interface a{a():b}"; - assert_statement_string ~ctxt ~pretty:true "export {};"; - assert_statement_string ~ctxt ~pretty:true "export {a} from \"a\";"; - assert_statement_string ~ctxt ~pretty:true "export * from \"a\";"; - assert_statement_string ~ctxt ~pretty:true "export * as a from \"a\";"; - assert_statement_string ~ctxt ~pretty:true "export type {a};"; - assert_statement_string ~ctxt ~pretty:true ( - "export {\n a,\n b as " ^ String.make 80 'c' ^ ",\n} from \"a\";" - ); - assert_statement_string ~ctxt ~pretty:true ( - "export * as " ^ String.make 80 'a' ^ " from \"a\";" - ); - assert_statement_string ~ctxt ~pretty:true "export opaque type a = b;"; - - (* TODO: Flow does not parse this but should + let ast = E.plus x (E.mult (E.plus plus_y y) y) in + assert_expression ~ctxt "x+(+y+y)*y" ast ); + ( "update_plus_binary" + >:: fun ctxt -> + let x = E.identifier "x" in + let y = E.identifier "y" in + let x_incr = E.increment ~prefix:false x in + let x_decr = E.decrement ~prefix:false x in + let incr_y = E.increment ~prefix:true y in + let decr_y = E.decrement ~prefix:true y in + begin + let ast = E.plus x incr_y in + let layout = Js_layout_generator.expression ast in + assert_layout + ~ctxt + L.( + loc + (fused + [ + loc (id "x"); + pretty_space; + atom "+"; + pretty_space; + ugly_space; + loc (fused [atom "++"; loc (id "y")]); + ])) + layout; + assert_output ~ctxt "x+ ++y" layout; + assert_output ~ctxt ~pretty:true "x + ++y" layout + end; + + let ast = E.minus x incr_y in + assert_expression ~ctxt "x-++y" ast; + + let ast = E.minus x decr_y in + assert_expression ~ctxt "x- --y" ast; + + let ast = E.plus x decr_y in + assert_expression ~ctxt "x+--y" ast; + + let ast = E.plus x_incr y in + assert_expression ~ctxt "x+++y" ast; + + let ast = E.minus x_decr y in + assert_expression ~ctxt "x---y" ast; + + let ast = E.plus x_incr incr_y in + assert_expression ~ctxt "x+++ ++y" ast; + + let ast = E.minus x_decr decr_y in + assert_expression ~ctxt "x--- --y" ast ); + ( "do_while_semicolon" + >:: fun ctxt -> + (* do { x } while (y) *) + let layout = + Js_layout_generator.statement + (let body = S.block [S.expression (E.identifier "x")] in + let test = E.identifier "y" in + S.do_while body test) + in + assert_output ~ctxt "do{x}while(y);" layout; + assert_output ~ctxt ~pretty:true ("do {\n" ^ " x;\n" ^ "} while (y);") layout ); + ( "do_while_long" + >:: fun ctxt -> + (* do { xxxx... } while (yyyy...) *) + let x80 = String.make 80 'x' in + let y80 = String.make 80 'y' in + let layout = + Js_layout_generator.statement + (let body = S.block [S.expression (E.identifier x80)] in + let test = E.identifier y80 in + S.do_while body test) + in + assert_output ~ctxt ("do{" ^ x80 ^ "}while(" ^ y80 ^ ");") layout; + assert_output + ~ctxt + ~pretty:true + ("do {\n" ^ " " ^ x80 ^ ";\n" ^ "} while (\n" ^ " " ^ y80 ^ "\n" ^ ");") + layout ); + ( "do_while_single_statement" + >:: fun ctxt -> + (* do x; while (y) *) + let layout = + Js_layout_generator.statement + (let body = S.expression (E.identifier "x") in + let test = E.identifier "y" in + S.do_while body test) + in + assert_output ~ctxt "do x;while(y);" layout; + assert_output ~ctxt ~pretty:true "do x; while (y);" layout ); + ( "do_while_single_statement_long" + >:: fun ctxt -> + (* do xxxx...; while (yyyy...) *) + let x80 = String.make 80 'x' in + let y80 = String.make 80 'y' in + let layout = + Js_layout_generator.statement + (let body = S.expression (E.identifier x80) in + let test = E.identifier y80 in + S.do_while body test) + in + assert_output ~ctxt ("do " ^ x80 ^ ";while(" ^ y80 ^ ");") layout; + assert_output + ~ctxt + ~pretty:true + ("do " ^ x80 ^ "; while (\n" ^ " " ^ y80 ^ "\n" ^ ");") + layout ); + ( "do_while_empty_statement" + >:: fun ctxt -> + (* do ; while (y) *) + let layout = + Js_layout_generator.statement + (let body = S.empty () in + let test = E.identifier "y" in + S.do_while body test) + in + assert_output ~ctxt "do;while(y);" layout; + assert_output ~ctxt ~pretty:true "do ; while (y);" layout ) + (* TODO: remove space after do *); + ( "conditionals" + >:: fun ctxt -> + let layout = + Js_layout_generator.expression + (E.conditional (E.identifier "a") (E.identifier "b") (E.identifier "c")) + in + assert_layout + ~ctxt + L.( + loc + (group + [ + loc (id "a"); + indent + (fused + [ + pretty_line; + atom "?"; + pretty_space; + loc (id "b"); + pretty_line; + atom ":"; + pretty_space; + loc (id "c"); + ]); + ])) + layout; + assert_output ~ctxt "a?b:c" layout; + assert_output ~ctxt ~pretty:true "a ? b : c" layout; + + let a80 = String.make 80 'a' in + let layout = + Js_layout_generator.expression + (E.conditional (E.identifier a80) (E.identifier "b") (E.identifier "c")) + in + assert_output ~ctxt (a80 ^ "?b:c") layout; + assert_output ~ctxt ~pretty:true (a80 ^ "\n" ^ " ? b\n" ^ " : c") layout; + + let b80 = String.make 80 'b' in + let layout = + Js_layout_generator.expression + (E.conditional (E.identifier "a") (E.identifier b80) (E.identifier "c")) + in + assert_output ~ctxt ("a?" ^ b80 ^ ":c") layout; + assert_output ~ctxt ~pretty:true ("a\n" ^ " ? " ^ b80 ^ "\n" ^ " : c") layout ); + ( "conditional_expression_parens" + >:: fun ctxt -> + let (a, b, c, d, e) = + ( E.identifier "a", + E.identifier "b", + E.identifier "c", + E.identifier "d", + E.identifier "e" ) + in + (* a ? b++ : c-- *) + let update = + E.conditional a (E.increment ~prefix:false b) (E.decrement ~prefix:false c) + in + assert_expression ~ctxt "a?b++:c--" update; + + (* a ? +b : -c *) + let unary = E.conditional a (E.unary_plus b) (E.unary_minus c) in + assert_expression ~ctxt "a?+b:-c" unary; + + (* (a || b) ? c : d *) + let logical_test = E.conditional (E.logical_or a b) c d in + assert_expression ~ctxt "a||b?c:d" logical_test; + + (* (a ? b : c) ? d : e *) + let nested_in_test = E.conditional (E.conditional a b c) d e in + assert_expression ~ctxt "(a?b:c)?d:e" nested_in_test; + + (* a ? (b ? c : d) : e *) + let nested_in_consequent = E.conditional a (E.conditional b c d) e in + assert_expression ~ctxt "a?b?c:d:e" nested_in_consequent; + + (* a ? b : (c ? d : e) *) + let nested_in_alternate = E.conditional a b (E.conditional c d e) in + assert_expression ~ctxt "a?b:c?d:e" nested_in_alternate; + + let assignment = + E.conditional + a + (E.assignment (Patterns.identifier "x") b) + (E.assignment (Patterns.identifier "y") c) + in + assert_expression ~ctxt "a?x=b:y=c" assignment; + + let sequence = E.conditional a (E.sequence [b; c]) (E.sequence [d; e]) in + assert_expression ~ctxt "a?(b,c):(d,e)" sequence ); + ( "call_expression_parens" + >:: fun ctxt -> + let x = E.identifier "x" in + (* `(x++)()` *) + let update = E.call (E.increment ~prefix:false x) in + assert_expression ~ctxt "(x++)()" update; + + (* `x.y()` *) + let member = E.call (E.member_expression (E.member x ~property:"y")) in + assert_expression ~ctxt "x.y()" member; + + (* `x.y.z()` *) + let two_members = + E.call + (E.member_expression + (E.member (E.member_expression (E.member x ~property:"y")) ~property:"z")) + in + assert_expression ~ctxt "x.y.z()" two_members; + + (* `x()()` *) + let call = E.call (E.call x) in + assert_expression ~ctxt "x()()" call; + + (* `new x()()` *) + let new_ = E.call (E.new_ x) in + assert_expression ~ctxt "new x()()" new_; + + (* `function() {}()` *) + let func = E.call (E.function_ ()) in + assert_expression ~ctxt "function(){}()" func; + + (* `(function() {}.foo)()` *) + let func = E.call (E.member_expression (E.member (E.function_ ()) ~property:"foo")) in + assert_expression ~ctxt "function(){}.foo()" func; + + (* `(() => {})()` *) + let arrow = E.call (E.arrow_function ()) in + assert_expression ~ctxt "(()=>{})()" arrow; + + (* `(foo, bar)()` *) + let seq = E.call (E.sequence [x; E.identifier "y"]) in + assert_expression ~ctxt "(x,y)()" seq; + + (* `__d("a", [], (function() {}), 1)` *) + let underscore_d = + E.call + ~args: + [ + E.expression (E.literal (Literals.string "a")); + E.expression (E.literal (Literals.string "b")); + E.expression (E.function_ ()); + E.expression (E.literal (Literals.number 1. "1")); + ] + (E.identifier "__d") + in + assert_expression ~ctxt "__d(\"a\",\"b\",(function(){}),1)" underscore_d ); + ( "member_expression_parens" + >:: fun ctxt -> + let x = E.identifier "x" in + (* `(x++).y` *) + let update = E.member_expression (E.member (E.increment ~prefix:false x) ~property:"y") in + assert_expression ~ctxt "(x++).y" update; + + (* `x.y.z` *) + let member = + E.member_expression + (E.member (E.member_expression (E.member x ~property:"y")) ~property:"z") + in + assert_expression ~ctxt "x.y.z" member; + + (* x().y *) + let call = E.member_expression (E.member (E.call x) ~property:"y") in + assert_expression ~ctxt "x().y" call; + + (* x()[y] *) + let computed = + E.member_expression (E.member_computed (E.call x) ~property:(E.identifier "y")) + in + assert_expression ~ctxt "x()[y]" computed; + + (* `(function() {}).x` *) + let func = E.member_expression (E.member (E.function_ ()) ~property:"x") in + assert_expression ~ctxt "function(){}.x" func; + + (* `(() => {}).x` *) + let func = E.member_expression (E.member (E.arrow_function ()) ~property:"x") in + assert_expression ~ctxt "(()=>{}).x" func; + + (* `(x, y).z` *) + let seq = + E.member_expression (E.member (E.sequence [x; E.identifier "y"]) ~property:"z") + in + assert_expression ~ctxt "(x,y).z" seq; + + let num = + E.member_expression (E.member (E.literal (Literals.number 1.0 "1")) ~property:"z") + in + assert_expression ~ctxt "1..z" num; + let num = + E.member_expression (E.member (E.literal (Literals.number 1.1 "1.1")) ~property:"z") + in + assert_expression ~ctxt "1.1.z" num; + let num = + E.member_expression + (E.member (E.literal (Literals.number 0.0000001 "0.0000001")) ~property:"z") + in + assert_expression ~ctxt "1e-7.z" num ); + ( "new_expression_empty_params" + >:: fun ctxt -> + (* `new xxxxxxx....()` *) + let x80 = String.make 80 'x' in + let layout = Js_layout_generator.expression (E.new_ (E.identifier x80)) in + assert_layout + ~ctxt + L.(loc (group [atom "new"; space; loc (id x80); atom "("; atom ")"])) + layout; + assert_output ~ctxt ("new " ^ x80 ^ "()") layout; + assert_output ~ctxt ~pretty:true ("new " ^ x80 ^ "()") layout ); + ( "new_expression_params" + >:: fun ctxt -> + (* `new Foo(x, y)` *) + let layout = + Js_layout_generator.expression + (E.new_ + (E.identifier "Foo") + ~args:[E.expression (E.identifier "x"); E.expression (E.identifier "y")]) + in + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "new"; + space; + loc (id "Foo"); + atom "("; + indent + (fused + [ + softline; + loc (id "x"); + atom ","; + pretty_line; + loc (id "y"); + Layout.IfBreak (atom ",", empty); + ]); + softline; + atom ")"; + ])) + layout; + assert_output ~ctxt "new Foo(x,y)" layout; + assert_output ~ctxt ~pretty:true "new Foo(x, y)" layout ); + ( "new_expression_params_long" + >:: fun ctxt -> + (* `new Foo(xxxxxxx....)` *) + let x80 = String.make 80 'x' in + let layout = + Js_layout_generator.expression + (E.new_ (E.identifier "Foo") ~args:[E.expression (E.identifier x80)]) + in + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "new"; + space; + loc (id "Foo"); + atom "("; + indent (fused [softline; loc (id x80); Layout.IfBreak (atom ",", empty)]); + softline; + atom ")"; + ])) + layout; + assert_output ~ctxt ("new Foo(" ^ x80 ^ ")") layout; + assert_output ~ctxt ~pretty:true ("new Foo(\n" ^ " " ^ x80 ^ ",\n" ^ ")") layout ); + ( "new_expression_parens" + >:: fun ctxt -> + let x80 = String.make 80 'x' in + let (x, y, z, id80) = + (E.identifier "x", E.identifier "y", E.identifier "z", E.identifier x80) + in + (* `new (x++)()` *) + begin + let layout = Js_layout_generator.expression (E.new_ (E.increment ~prefix:false x)) in + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "new"; + pretty_space; + wrap_in_parens (loc (fused [loc (id "x"); atom "++"])); + atom "("; + atom ")"; + ])) + layout; + assert_output ~ctxt "new(x++)()" layout; + assert_output ~ctxt ~pretty:true "new (x++)()" layout; + + let update = E.new_ (E.increment ~prefix:false id80) in + assert_expression ~ctxt ("new(" ^ x80 ^ "++)()") update; + assert_expression ~ctxt ~pretty:true ("new (" ^ x80 ^ "++)()") update + end; + + (* `new (x())()` *) + let call = E.new_ (E.call x) in + assert_expression ~ctxt "new(x())()" call; + + (* `new x.y()` *) + let member = E.new_ (E.member_expression (E.member x ~property:"y")) in + assert_expression ~ctxt "new x.y()" member; + + (* `new (x.y())()` *) + let member_call = E.new_ (E.call (E.member_expression (E.member x ~property:"y"))) in + assert_expression ~ctxt "new(x.y())()" member_call; + + (* `new (x().y)()` *) + let call_member = E.new_ (E.member_expression (E.member (E.call x) ~property:"y")) in + assert_expression ~ctxt "new(x().y)()" call_member; + + (* `new (x ? y : z)()` *) + let cond = E.new_ (E.conditional x y z) in + assert_expression ~ctxt "new(x?y:z)()" cond ); + ( "unary_expression_parens" + >:: fun ctxt -> + (* `+(+x)` *) + let plus = E.unary_plus (E.unary_plus (E.identifier "x")) in + assert_expression ~ctxt "+(+x)" plus; + + (* `+-x` *) + let minus = E.unary_plus (E.unary_minus (E.identifier "x")) in + assert_expression ~ctxt "+-x" minus; + + (* `+(++x)` *) + let prefix_incr = E.unary_plus (E.increment ~prefix:true (E.identifier "x")) in + assert_expression ~ctxt "+(++x)" prefix_incr; + + (* `+--x` *) + let prefix_decr = E.unary_plus (E.decrement ~prefix:true (E.identifier "x")) in + assert_expression ~ctxt "+--x" prefix_decr; + + (* `+x++` *) + let suffix_incr = E.unary_plus (E.increment ~prefix:false (E.identifier "x")) in + assert_expression ~ctxt "+x++" suffix_incr; + + (* `+x--` *) + let suffix_decr = E.unary_plus (E.decrement ~prefix:false (E.identifier "x")) in + assert_expression ~ctxt "+x--" suffix_decr; + + (* `+x()` *) + let call = E.unary_plus (E.call (E.identifier "x")) in + assert_expression ~ctxt "+x()" call; + + (* `+new x()` *) + let new_ = E.unary_plus (E.new_ (E.identifier "x")) in + assert_expression ~ctxt "+new x()" new_ ); + ( "expression_statement_parens" + >:: fun ctxt -> + let obj = S.expression (E.object_ []) in + assert_statement ~ctxt "({});" obj; + + let func = S.expression (E.function_ ()) in + assert_statement ~ctxt "(function(){});" func; + + let arrow = S.expression (E.arrow_function ()) in + assert_statement ~ctxt "()=>{};" arrow; + + let klass = S.expression (E.class_ []) in + assert_statement ~ctxt "(class{});" klass; + + let func_call = S.expression (E.call (E.function_ ())) in + assert_statement ~ctxt "(function(){})();" func_call; + + let func_member = + S.expression (E.member_expression (E.member (E.function_ ()) ~property:"foo")) + in + assert_statement ~ctxt "(function(){}).foo;" func_member; + + let class_member = + S.expression (E.member_expression (E.member (E.class_ []) ~property:"foo")) + in + assert_statement ~ctxt "(class{}).foo;" class_member; + + let func_member_call = + S.expression (E.call (E.member_expression (E.member (E.function_ ()) ~property:"foo"))) + in + assert_statement ~ctxt "(function(){}).foo();" func_member_call; + + let func_call_member = + S.expression (E.member_expression (E.member (E.call (E.function_ ())) ~property:"foo")) + in + assert_statement ~ctxt "(function(){})().foo;" func_call_member; + + let func_sequence = S.expression (E.sequence [E.function_ (); E.identifier "x"]) in + assert_statement ~ctxt "(function(){}),x;" func_sequence ); + ( "arrow_body_parens" + >:: fun ctxt -> + let (x, y, z) = (E.identifier "x", E.identifier "y", E.identifier "z") in + let arrow = + let body = Functions.body_expression (E.sequence [x; y]) in + E.arrow_function ~body () + in + assert_expression ~ctxt "()=>(x,y)" arrow; + + let arrow = + let body = Functions.body_expression (E.conditional x y z) in + E.arrow_function ~body () + in + assert_expression ~ctxt "()=>x?y:z" arrow; + + let arrow = + let arrow = E.arrow_function () in + let body = Functions.body_expression arrow in + E.arrow_function ~body () + in + assert_expression ~ctxt "()=>()=>{}" arrow ); + ( "argument_parens" + >:: fun ctxt -> + let f = E.identifier "f" in + let (x, y, z) = (E.identifier "x", E.identifier "y", E.identifier "z") in + let args = [] in + let call = E.call ~args f in + assert_expression ~ctxt "f()" call; + + let args = + let seq = E.sequence [x; y] in + [E.expression seq] + in + let call = E.call ~args f in + assert_expression ~ctxt ~msg:"sequence should be parenthesized" "f((x,y))" call; + + let args = [E.spread (E.sequence [x; y])] in + let call = E.call ~args f in + assert_expression ~ctxt ~msg:"sequence should be parenthesized" "f(...(x,y))" call; + + let args = [E.expression (E.conditional x y z)] in + let call = E.call ~args f in + assert_expression ~ctxt ~msg:"higher-precedence ops don't need parens" "f(x?y:z)" call; + + let call = + let arrow = E.arrow_function () in + let args = [E.expression arrow] in + E.call ~args f + in + assert_expression ~ctxt ~msg:"higher-precedence ops don't need parens" "f(()=>{})" call; + + let args = + let seq = E.sequence [x; y] in + let logical = E.logical_or seq z in + [E.expression logical] + in + let call = E.call ~args f in + assert_expression ~ctxt ~msg:"nested sequence has parens" "f((x,y)||z)" call ); + ( "binary_in_space" + >:: fun ctxt -> + let ast = statement_of_string {|if("foo" in {"foo": bar}){}|} in + assert_statement ~ctxt {|if("foo"in{"foo":bar}){}|} ast; + + let ast = statement_of_string {|if("foo" in bar){}|} in + assert_statement ~ctxt {|if("foo"in bar){}|} ast; + + let ast = statement_of_string {|if(foo in {"foo":bar}){}|} in + assert_statement ~ctxt {|if(foo in{"foo":bar}){}|} ast ); + ( "binary_instanceof_space" + >:: fun ctxt -> + begin + let ast = E.instanceof (E.literal (Literals.string "foo")) (E.object_ []) in + let layout = Js_layout_generator.expression ast in + assert_layout + ~ctxt + L.( + loc + (fused + [ + loc (fused [atom "\""; atom "foo"; atom "\""]); + pretty_space; + atom "instanceof"; + pretty_space; + loc (group [atom "{"; atom "}"]); + ])) + layout; + assert_output ~ctxt {|"foo"instanceof{}|} layout; + assert_output ~ctxt ~pretty:true {|"foo" instanceof {}|} layout + end; + + begin + let ast = E.instanceof (E.literal (Literals.string "foo")) (E.identifier "bar") in + let layout = Js_layout_generator.expression ast in + assert_layout + ~ctxt + L.( + loc + (fused + [ + loc (fused [atom "\""; atom "foo"; atom "\""]); + pretty_space; + atom "instanceof"; + space; + loc (id "bar"); + ])) + layout; + assert_output ~ctxt {|"foo"instanceof bar|} layout; + assert_output ~ctxt ~pretty:true {|"foo" instanceof bar|} layout + end; + + let ast = E.instanceof (E.identifier "foo") (E.object_ []) in + let layout = Js_layout_generator.expression ast in + assert_layout + ~ctxt + L.( + loc + (fused + [ + loc (id "foo"); + space; + atom "instanceof"; + pretty_space; + loc (group [atom "{"; atom "}"]); + ])) + layout; + assert_output ~ctxt {|foo instanceof{}|} layout; + assert_output ~ctxt ~pretty:true {|foo instanceof {}|} layout ); + ( "logical_wrapping" + >:: fun ctxt -> + let x40 = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" in + let ast = E.logical_and (E.identifier x40) (E.identifier x40) in + let layout = Js_layout_generator.expression ast in + assert_layout + ~ctxt + L.( + loc + (group + [ + loc (id "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); + pretty_space; + atom "&&"; + indent + (fused + [ + Layout.IfBreak (hardline, pretty_space); + loc (id "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); + ]); + ])) + layout; + assert_output ~ctxt (x40 ^ "&&" ^ x40) layout; + assert_output ~ctxt ~pretty:true (x40 ^ " &&\n " ^ x40) layout ); + ( "return_statement_parens" + >:: fun ctxt -> + let ret = S.return None in + assert_statement ~ctxt "return;" ret; + + let x = E.identifier "x" in + let y = E.identifier "y" in + let seq = E.sequence [x; y] in + let ret = S.return (Some seq) in + assert_statement ~ctxt "return x,y;" ret; + assert_statement ~ctxt ~pretty:true "return x, y;" ret; + + (* sequences get split across lines and wrapped in parens *) + let x40 = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" in + let y40 = "yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy" in + let func = + S.function_declaration + (I.identifier "f") + ~body:(F.body [S.return (Some (E.sequence [E.identifier x40; E.identifier y40]))]) + in + assert_layout_result + ~ctxt + L.( + loc + (fused + [ + atom "return"; + space; + group + [ + Layout.IfBreak (atom "(", empty); + indent + (fused + [ + softline; + loc + (group + [ + loc (id "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); + atom ","; + pretty_line; + loc (id "yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"); + ]); + ]); + softline; + Layout.IfBreak (atom ")", empty); + ]; + Layout.IfPretty (atom ";", empty); + ])) + Layout_matcher.(body_of_function_declaration func >>= nth_fused 0); + assert_statement ~ctxt ("function f(){return " ^ x40 ^ "," ^ y40 ^ "}") func; + assert_statement + ~ctxt + ~pretty:true + ("function f() {\n return (\n " ^ x40 ^ ",\n " ^ y40 ^ "\n );\n}") + func; + + (* logicals get split *) + let logical = E.logical_and (E.identifier x40) (E.identifier y40) in + let func = + S.function_declaration (I.identifier "f") ~body:(F.body [S.return (Some logical)]) + in + assert_layout_result + ~ctxt + L.( + loc + (fused + [ + atom "return"; + space; + group + [ + Layout.IfBreak (atom "(", empty); + indent (fused [softline; expression logical]); + softline; + Layout.IfBreak (atom ")", empty); + ]; + Layout.IfPretty (atom ";", empty); + ])) + Layout_matcher.(body_of_function_declaration func >>= nth_fused 0); + assert_statement + ~ctxt + ~pretty:true + ("function f() {\n return (\n " ^ x40 ^ " &&\n " ^ y40 ^ "\n );\n}") + func; + + (* binary expressions get split *) + let plus = E.plus (E.identifier x40) (E.identifier y40) in + let func = + S.function_declaration (I.identifier "f") ~body:(F.body [S.return (Some plus)]) + in + assert_layout_result + ~ctxt + L.( + loc + (fused + [ + atom "return"; + space; + group + [ + Layout.IfBreak (atom "(", empty); + indent (fused [softline; expression plus]); + softline; + Layout.IfBreak (atom ")", empty); + ]; + Layout.IfPretty (atom ";", empty); + ])) + Layout_matcher.(body_of_function_declaration func >>= nth_fused 0); + assert_statement + ~ctxt + ~pretty:true + ("function f() {\n return (\n " ^ x40 ^ " + " ^ y40 ^ "\n );\n}") + func; + + (* jsx gets split *) + let long_name = String.make 80 'A' in + let jsx = E.jsx_element (J.element (J.identifier long_name)) in + let func = + S.function_declaration (I.identifier "f") ~body:(F.body [S.return (Some jsx)]) + in + assert_layout_result + ~ctxt + L.( + loc + (fused + [ + atom "return"; + pretty_space; + group + [ + Layout.IfBreak (atom "(", empty); + indent (fused [softline; expression jsx]); + softline; + Layout.IfBreak (atom ")", empty); + ]; + Layout.IfPretty (atom ";", empty); + ])) + Layout_matcher.(body_of_function_declaration func >>= nth_fused 0); + assert_statement + ~ctxt + ~pretty:true + ("function f() {\n return (\n <" ^ long_name ^ ">\n );\n}") + func; + + (* a string doesn't get split *) + let x80 = x40 ^ x40 in + let func = + S.function_declaration + (I.identifier "f") + ~body:(F.body [S.return (Some (E.identifier x80))]) + in + assert_layout_result + ~ctxt + L.( + loc + (fused + [ + atom "return"; + atom " "; + loc + (id + "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"); + Layout.IfPretty (atom ";", empty); + ])) + Layout_matcher.(body_of_function_declaration func >>= nth_fused 0); + assert_statement ~ctxt ("function f(){return " ^ x80 ^ "}") func; + assert_statement ~ctxt ~pretty:true ("function f() {\n return " ^ x80 ^ ";\n}") func ); + ( "return_statement_space" + >:: fun ctxt -> + let assert_no_space ~ctxt expr = + let ret = statement_of_string ("return " ^ expr ^ ";") in + assert_statement ~ctxt ("return" ^ expr ^ ";") ret + in + assert_no_space ~ctxt {|"foo"|}; + assert_no_space ~ctxt {|{foo:"bar"}|}; + assert_no_space ~ctxt {|[foo]|}; + assert_no_space ~ctxt {|!foo|}; + assert_no_space ~ctxt {|+foo|}; + assert_no_space ~ctxt {|-foo|}; + assert_no_space ~ctxt {|~foo|}; + + let ret = statement_of_string {|return (foo);|} in + assert_statement ~ctxt {|return foo;|} ret; + + let ret = statement_of_string {|return 123;|} in + assert_statement ~ctxt {|return 123;|} ret ); + ( "for_loop" + >:: fun ctxt -> + let x80 = String.make 80 'x' in + let layout = + Js_layout_generator.statement (S.for_ (E.identifier x80) None None (S.empty ())) + in + assert_layout + ~ctxt + L.( + loc + (fused + [ + atom "for"; + pretty_space; + group + [ + atom "("; + indent + (fused + [softline; loc (id x80); atom ";"; pretty_line; atom ";"; pretty_line]); + softline; + atom ")"; + ]; + loc (atom ";"); + ])) + layout; + assert_output ~ctxt ("for(" ^ x80 ^ ";;);") layout; + assert_output + ~ctxt + ~pretty:true + ( "for (\n" + ^ " " + ^ x80 + ^ ";\n" + ^ " ;\n" + ^ " \n" + (* TODO: remove trailing whitespace *) + ^ ");" ) + layout ); + ( "binary_in_in_for_loops" + >:: fun ctxt -> + let ast = + let (x, y) = (E.identifier "x", E.identifier "y") in + let init = E.in_ x y in + let body = S.empty () in + S.for_ init None None body + in + assert_statement ~ctxt ~msg:"binary `in` expressions need parens" "for((x in y);;);" ast; + + let ast = + let (y, z) = (E.identifier "y", E.identifier "z") in + let true_ = Expressions.true_ () in + let in_expr = E.in_ y z in + let eq_expr = E.equal true_ in_expr in + let init = E.assignment (Patterns.identifier "x") eq_expr in + let body = S.empty () in + S.for_ init None None body + in + assert_statement + ~ctxt + ~msg:"binary `in` expressions need parens" + "for(x=true==(y in z);;);" + ast ); + ( "for_in_space" + >:: fun ctxt -> + let ast = statement_of_string {|for(var x in {"foo": bar}){}|} in + assert_statement ~ctxt {|for(var x in{"foo":bar}){}|} ast; + + let ast = statement_of_string {|for(var x in bar){}|} in + assert_statement ~ctxt {|for(var x in bar){}|} ast ); + ( "for_statement_without_block" + >:: fun ctxt -> + assert_statement_string ~ctxt "for(;;)x;"; + assert_statement_string ~ctxt "{for(;;)x}" ); + ( "if_statement_with_labeled_consequent" + >:: fun ctxt -> + let ast = + S.if_ + (E.identifier "x") + (S.labeled (I.identifier "y") (S.expression (E.identifier "z"))) + (Some (S.expression (E.identifier "z"))) + in + assert_statement ~ctxt "if(x)y:z;else z;" ast; + assert_statement ~ctxt ~pretty:true "if (x) y: z; else z;" ast ); + ( "if_statement_without_block" + >:: fun ctxt -> + let if_stmt = S.if_ (E.identifier "x") (S.expression (E.identifier "y")) None in + assert_statement ~ctxt "if(x)y;" if_stmt; + assert_statement ~ctxt ~pretty:true "if (x) y;" if_stmt; + + let ast = S.block [if_stmt; S.expression (E.identifier "z")] in + assert_statement ~ctxt "{if(x)y;z}" ast; + assert_statement ~ctxt ~pretty:true ("{\n" ^ " if (x) y;\n" ^ " z;\n" ^ "}") ast ); + ( "if_statement_with_empty_consequent" + >:: fun ctxt -> + let layout = Js_layout_generator.statement (S.if_ (E.identifier "x") (S.empty ()) None) in + assert_output ~ctxt "if(x);" layout; + assert_output ~ctxt ~pretty:true "if (x);" layout ); + ( "if_else_statement_without_block" + >:: fun ctxt -> + let if_else_stmt = + S.if_ + (E.identifier "x") + (S.expression (E.identifier "y")) + (Some (S.expression (E.identifier "z"))) + in + assert_statement ~ctxt "if(x)y;else z;" if_else_stmt; + assert_statement ~ctxt ~pretty:true "if (x) y; else z;" if_else_stmt; + + let ast = S.block [if_else_stmt] in + assert_statement ~ctxt "{if(x)y;else z}" ast; + assert_statement ~ctxt ~pretty:true ("{\n" ^ " if (x) y; else z;\n" ^ "}") ast; + + let ast = + S.if_ + (E.identifier "x") + (S.expression (E.identifier "y")) + (Some (S.expression (E.increment ~prefix:true (E.identifier "z")))) + in + assert_statement ~ctxt "if(x)y;else++z;" ast; + assert_statement ~ctxt ~pretty:true "if (x) y; else ++z;" ast ); + ( "if_statement_without_block_long" + >:: fun ctxt -> + let a80 = String.make 80 'A' in + let if_stmt = S.if_ (E.identifier a80) (S.expression (E.identifier "y")) None in + assert_statement ~ctxt ("if(" ^ a80 ^ ")y;") if_stmt; + assert_statement + ~ctxt + ~pretty:true + ("if (\n" ^ " " ^ a80 ^ "\n" ^ ")\n" ^ " y;") + if_stmt; + + let ast = S.block [if_stmt; S.expression (E.identifier "z")] in + assert_statement ~ctxt ("{if(" ^ a80 ^ ")y;z}") ast; + assert_statement + ~ctxt + ~pretty:true + ("{\n" ^ " if (\n" ^ " " ^ a80 ^ "\n" ^ " )\n" ^ " y;\n" ^ " z;\n" ^ "}") + ast ); + ( "if_else_statement_with_empty_consequent" + >:: fun ctxt -> + let layout = + Js_layout_generator.statement + (S.if_ (E.identifier "x") (S.empty ()) (Some (S.expression (E.identifier "y")))) + in + assert_output ~ctxt "if(x);else y;" layout; + assert_output ~ctxt ~pretty:true "if (x); else y;" layout ); + ( "if_else_statement_with_empty_alternate" + >:: fun ctxt -> + let layout = + Js_layout_generator.statement + (S.if_ (E.identifier "x") (S.expression (E.identifier "y")) (Some (S.empty ()))) + in + assert_output ~ctxt "if(x)y;else;" layout; + assert_output ~ctxt ~pretty:true "if (x) y; else ;" layout ) + (* TODO: remove extra space *); + ( "if_else_statement_with_empty_consequent_and_alternate" + >:: fun ctxt -> + let layout = + Js_layout_generator.statement + (S.if_ (E.identifier "x") (S.empty ()) (Some (S.empty ()))) + in + assert_output ~ctxt "if(x);else;" layout; + assert_output ~ctxt ~pretty:true "if (x); else ;" layout ) + (* TODO: remove extra space *); + ( "while_statement_without_block" + >:: fun ctxt -> + let while_stmt = S.while_ (E.identifier "x") (S.expression (E.identifier "y")) in + assert_statement ~ctxt "while(x)y;" while_stmt; + + let ast = S.block [while_stmt] in + assert_statement ~ctxt "{while(x)y}" ast; + + let ast = S.while_ (E.identifier "x") (S.empty ()) in + assert_statement ~ctxt "while(x);" ast; + assert_statement ~ctxt ~pretty:true "while (x);" ast ); + ( "do_while_statements" + >:: fun ctxt -> + let ast = + S.do_while + (S.labeled (I.identifier "x") (S.expression (E.identifier "z"))) + (E.identifier "y") + in + assert_statement ~ctxt "do x:z;while(y);" ast; + assert_statement ~ctxt ~pretty:true "do x: z; while (y);" ast; + + let ast = + S.do_while + (S.expression (E.increment ~prefix:true (E.identifier "x"))) + (E.identifier "y") + in + assert_statement ~ctxt "do++x;while(y);" ast ); + ( "labeled_empty_statement" + >:: fun ctxt -> + let layout = Js_layout_generator.statement (S.labeled (I.identifier "x") (S.empty ())) in + assert_output ~ctxt "x:;" layout; + assert_output ~ctxt ~pretty:true "x: ;" layout ); + ( "array_expressions" + >:: fun ctxt -> + assert_expression_string ~ctxt "[]"; + assert_expression_string ~ctxt "[a]"; + assert_expression_string ~ctxt "[a,b]"; + assert_expression_string ~ctxt "[a,,b]"; + assert_expression_string ~ctxt "[a,b,,]"; + assert_expression_string ~ctxt ~pretty:true "[a]"; + assert_expression_string ~ctxt ~pretty:true "[a, b]"; + assert_expression_string ~ctxt ~pretty:true "[a, b, ,]"; + assert_expression_string + ~ctxt + ~pretty:true + ("[\n a,\n " ^ String.make 80 'b' ^ ",\n ,\n]") ); + ( "array_with_trailing_comma" + >:: fun ctxt -> + let a80 = String.make 80 'a' in + let layout = + Js_layout_generator.expression + (E.array + [Some (E.expression (E.identifier a80)); Some (E.expression (E.identifier a80))]) + in + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "["; + indent + (fused + [ + softline; + loc (id a80); + atom ","; + pretty_line; + loc (id a80); + Layout.IfBreak (atom ",", empty); + ]); + softline; + atom "]"; + ])) + layout; + assert_output ~ctxt ("[" ^ a80 ^ "," ^ a80 ^ "]") layout; + assert_output + ~ctxt + ~pretty:true + ("[\n" ^ " " ^ a80 ^ ",\n" ^ " " ^ a80 ^ ",\n" ^ "]") + layout ); + ( "array_with_trailing_hole" + >:: fun ctxt -> + let layout = + Js_layout_generator.expression (E.array [Some (E.expression (E.identifier "a")); None]) + in + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "["; + indent (fused [softline; loc (id "a"); atom ","; pretty_line; atom ","]); + softline; + atom "]"; + ])) + layout; + assert_output ~ctxt "[a,,]" layout; + assert_output ~ctxt ~pretty:true "[a, ,]" layout; + + let a80 = String.make 80 'a' in + let layout = + Js_layout_generator.expression (E.array [Some (E.expression (E.identifier a80)); None]) + in + assert_output ~ctxt ("[" ^ a80 ^ ",,]") layout; + assert_output ~ctxt ~pretty:true ("[\n" ^ " " ^ a80 ^ ",\n" ^ " ,\n" ^ "]") layout ); + ( "function_statements" + >:: fun ctxt -> + assert_statement_string ~ctxt "function a(){}"; + assert_statement_string ~ctxt "async function a(){}"; + assert_statement_string ~ctxt "function* a(){}"; + assert_statement_string ~ctxt "function a(a){}"; + assert_statement_string ~ctxt "function a(a,b){}"; + assert_statement_string ~ctxt "function a(a:b){}"; + assert_statement_string ~ctxt "function a(a:b,c:d){}"; + assert_statement_string ~ctxt "function a(a:?b=b){}"; + assert_statement_string ~ctxt "function a():a{}"; + assert_statement_string ~ctxt "function a(){}"; + assert_statement_string ~ctxt "function a():%checks{}"; + assert_statement_string ~ctxt "function a():a%checks{}"; + assert_statement_string ~ctxt "function a():a%checks(a){}"; + assert_statement_string ~ctxt ~pretty:true "function a(): a %checks(a) {}"; + assert_statement_string ~ctxt ~pretty:true "function a(a: a, b: b): a {}"; + assert_statement_string + ~ctxt + ~pretty:true + ("function a(\n a: a,\n b: " ^ String.make 80 'b' ^ ",\n): a {}"); + assert_statement_string ~ctxt ~pretty:true "function a() {\n a;\n}" ); + ( "function_expressions" + >:: fun ctxt -> + assert_expression_string ~ctxt "function(){}"; + assert_expression_string ~ctxt "function a(){}"; + assert_expression_string ~ctxt "async function(){}"; + assert_expression_string ~ctxt "function*(){}"; + assert_expression_string ~ctxt "function(a){}"; + assert_expression_string ~ctxt "function(a,b){}"; + assert_expression_string ~ctxt "function(a:a,b:b):c{}"; + assert_expression_string ~ctxt "function(){}"; + assert_expression_string ~ctxt ~pretty:true "function(a: a, b: b): c {}"; + assert_expression_string ~ctxt "()=>a"; + assert_expression_string ~ctxt "()=>{}"; + assert_expression_string ~ctxt "():* =>{}"; + assert_expression_string ~ctxt "async()=>{}"; + assert_expression_string ~ctxt "a=>{}"; + assert_expression_string ~ctxt "async a=>{}"; + assert_expression_string ~ctxt "(a)=>{}"; + assert_expression_string ~ctxt "(a,b)=>{}"; + assert_expression_string ~ctxt "(a):%checks=>{}"; + assert_expression_string ~ctxt "({a})=>a"; + assert_expression_string ~ctxt "({a})=>({a:b})"; + assert_expression_string ~ctxt "({a})=>[]"; + assert_expression_string ~ctxt "({a})=>i++"; + assert_expression_string ~ctxt "({a})=>a()"; + assert_expression_string ~ctxt "(a:b)=>{}"; + assert_expression_string ~ctxt "(a?:b)=>{}"; + assert_expression_string ~ctxt "(a):b=>{}"; + assert_expression_string ~ctxt "():c=>{}"; + assert_expression_string ~ctxt "(a):c=>{}"; + assert_expression_string ~ctxt "(a:a,b:b):c=>{}"; + assert_expression_string ~ctxt ~pretty:true "(a: a, b: b): c => {}" ); + ( "class_statements" + >:: fun ctxt -> + let long_a = String.make 80 'a' in + let long_b = String.make 80 'b' in + assert_statement_string ~ctxt "class a{}"; + assert_statement_string ~ctxt "class a extends b{}"; + assert_statement_string ~ctxt "class a extends b{}"; + assert_statement_string ~ctxt "class a extends b{}"; + assert_statement_string ~ctxt ~pretty:true ("class " ^ long_a ^ " {}"); + assert_statement_string ~ctxt ~pretty:true ("class a\n extends " ^ long_b ^ " {}"); + assert_statement_string ~ctxt "@a class a extends b{}"; + assert_statement_string ~ctxt "@a@b class a extends b{}"; + assert_statement_string ~ctxt "@a()@b class a extends b{}"; + assert_statement_string ~ctxt "@(++a)@b class a extends b{}"; + assert_statement_string ~ctxt "@(a&&b)@b class a extends b{}"; + assert_statement_string ~ctxt "@(()=>{})@b class a extends b{}"; + assert_statement_string ~ctxt ~pretty:true "@a\nclass a extends b {}"; + assert_statement_string ~ctxt ~pretty:true "@a\n@b\nclass a extends b {}"; + assert_statement_string ~ctxt "class a implements b{}"; + assert_statement_string ~ctxt "class a implements b{}"; + assert_statement_string ~ctxt "class a implements b,c{}"; + assert_statement_string ~ctxt "class a implements b,c{}"; + + begin + let ast = + S.class_declaration + ~id:(I.identifier "a") + ~super:(E.identifier "b") + ~implements:[Ast_builder.Classes.implements (I.identifier "c")] + [] + in + let layout = Js_layout_generator.statement ast in + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "class"; + space; + id "a"; + indent + (fused + [ + line; + atom "extends"; + space; + loc (loc (id "b")); + line; + atom "implements"; + space; + loc (id "c"); + ]); + pretty_space; + atom "{}"; + ])) + layout; + assert_output ~ctxt "class a extends b implements c{}" layout; + assert_output ~ctxt ~pretty:true "class a extends b implements c {}" layout + end; + + begin + let x35 = String.make 35 'x' in + let y29 = String.make 29 'y' in + let c2 = S.class_declaration ~id:(I.identifier x35) ~super:(E.identifier y29) [] in + let ast = S.block [c2] in + let layout = Js_layout_generator.statement ast in + assert_layout + ~ctxt + L.( + loc + (loc + (group + [ + atom "{"; + indent + (fused + [ + pretty_hardline; + loc + (group + [ + atom "class"; + space; + id "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"; + indent + (fused + [ + line; + atom "extends"; + space; + loc (loc (id "yyyyyyyyyyyyyyyyyyyyyyyyyyyyy")); + ]); + pretty_space; + atom "{}"; + ]); + ]); + pretty_hardline; + atom "}"; + ]))) + layout; + assert_output ~ctxt ("{class " ^ x35 ^ " extends " ^ y29 ^ "{}}") layout; + assert_output + ~ctxt + ~pretty:true + ("{\n class " ^ x35 ^ "\n extends " ^ y29 ^ " {}" ^ "\n}") + layout + end; + + assert_statement_string + ~ctxt + ~pretty:true + ("class a\n extends " ^ long_b ^ "\n implements c {}"); + assert_statement_string + ~ctxt + ~pretty:true + ("class a\n extends " ^ long_b ^ "\n implements " ^ long_b ^ " {}"); + + (* TODO: this seems wrong, `c {` should break onto a new line *) + assert_statement_string + ~ctxt + ~pretty:true + ("class a\n extends " ^ long_b ^ "\n implements " ^ long_b ^ ", c {}") ); + ( "class_expressions" + >:: fun ctxt -> + assert_expression_string ~ctxt "class{}"; + assert_expression_string ~ctxt "class a{}"; + assert_expression_string ~ctxt "class a extends b{}" ); + ( "class_methods" + >:: fun ctxt -> + assert_statement_string ~ctxt "class a{b(){}}"; + assert_statement_string ~ctxt ~pretty:true "class a {\n b() {}\n static b() {}\n}"; + assert_statement_string + ~ctxt + ~pretty:true + "class a {\n async a() {}\n static async a() {}\n}"; + assert_statement_string + ~ctxt + ~pretty:true + "class a {\n get a() {}\n set a() {}\n static get a() {}\n}"; + assert_statement_string ~ctxt ~pretty:true "class a {\n constructor() {}\n}"; + assert_statement_string ~ctxt "class a{@a a(){}}"; + assert_statement_string ~ctxt "class a{@(()=>{}) a(){}}"; + assert_statement_string ~ctxt "class a{@a@b a(){}}"; + assert_statement_string ~ctxt ~pretty:true "class a {\n @a\n a() {}\n}"; + assert_statement_string ~ctxt ~pretty:true "class a {\n @a\n @b\n a() {}\n}"; + assert_statement_string ~ctxt "class a{*b(){}}" ); + ( "class_properties" + >:: fun ctxt -> + assert_statement_string ~ctxt "class a{a;}"; + assert_statement_string ~ctxt "class a{a:a;}"; + assert_statement_string ~ctxt "class a{a;b=c;}"; + assert_statement_string ~ctxt "class a{a;b:b=c;}"; + assert_statement_string ~ctxt "class a{+a;}"; + assert_statement_string ~ctxt "class a{+a:a=a;}"; + assert_statement_string ~ctxt "class a{static a;}"; + assert_statement_string ~ctxt "class a{static +a:a=a;}"; + assert_statement_string ~ctxt ~pretty:true "class a {\n a;\n b = c;\n static b = c;\n}"; + assert_statement_string ~ctxt ~pretty:true "class a {\n +a: a;\n b: b = c;\n}" ); + ( "class_private_properties" + >:: fun ctxt -> + assert_statement_string ~ctxt "class a{#a;}"; + assert_statement_string ~ctxt "class a{#a:a;}"; + assert_statement_string ~ctxt "class a{#a;#b=c;}"; + assert_statement_string ~ctxt "class a{#a;#b:b=c;}"; + assert_statement_string ~ctxt "class a{+#a;}"; + assert_statement_string ~ctxt "class a{+#a:a=a;}"; + assert_statement_string ~ctxt "class a{static #a;}"; + assert_statement_string ~ctxt "class a{static +#a:a=a;}"; + assert_statement_string + ~ctxt + ~pretty:true + "class a {\n #a;\n #b = c;\n static #b = c;\n}"; + assert_statement_string ~ctxt ~pretty:true "class a {\n +#a: a;\n #b: b = c;\n}" ); + ( "forin_statement_declaration" + >:: fun ctxt -> + let mk_layout a b = + Js_layout_generator.statement + (S.for_in + (S.for_in_declarator [S.variable_declarator a]) + (E.identifier b) + (S.block [S.expression (E.identifier a)])) + in + begin + let layout = mk_layout "a" "b" in + assert_layout + ~ctxt + L.( + loc + (fused + [ + atom "for"; + pretty_space; + group + [ + atom "("; + loc (fused [atom "var"; space; loc (loc (id "a"))]); + space; + atom "in"; + space; + loc (id "b"); + atom ")"; + ]; + pretty_space; + loc + (loc + (group + [ + atom "{"; + indent + (fused + [ + pretty_hardline; + loc + (fused [loc (id "a"); Layout.IfPretty (atom ";", empty)]); + ]); + pretty_hardline; + atom "}"; + ])); + ])) + layout; + assert_output ~ctxt "for(var a in b){a}" layout; + assert_output ~ctxt ~pretty:true ("for (var a in b) {\n" ^ " a;\n" ^ "}") layout + end; + + let a80 = String.make 80 'a' in + let layout = mk_layout a80 "b" in + assert_output ~ctxt ("for(var " ^ a80 ^ " in b){" ^ a80 ^ "}") layout; + assert_output + ~ctxt + ~pretty:true + ("for (var " ^ a80 ^ " in b) {\n" ^ " " ^ a80 ^ ";\n" ^ "}") + layout ); + ( "forin_statement_pattern_identifier" + >:: fun ctxt -> + let mk_layout a b = + Js_layout_generator.statement + (S.for_in (S.for_in_pattern (Patterns.identifier a)) (E.identifier b) (S.block [])) + in + begin + let layout = mk_layout "a" "b" in + assert_layout + ~ctxt + L.( + loc + (fused + [ + atom "for"; + pretty_space; + group + [atom "("; loc (id "a"); space; atom "in"; space; loc (id "b"); atom ")"]; + pretty_space; + loc (loc (atom "{}")); + ])) + layout; + assert_output ~ctxt "for(a in b){}" layout; + assert_output ~ctxt ~pretty:true "for (a in b) {}" layout + end; + + let a80 = String.make 80 'a' in + let layout = mk_layout a80 "b" in + assert_output ~ctxt ("for(" ^ a80 ^ " in b){}") layout; + assert_output ~ctxt ~pretty:true ("for (" ^ a80 ^ " in b) {}") layout ); + ( "forin_statement_without_block" + >:: fun ctxt -> + assert_statement_string ~ctxt "for(a in b)x;"; + assert_statement_string ~ctxt "{for(a in b)x}" ); + ( "forin_empty_body" + >:: fun ctxt -> + let layout = + Js_layout_generator.statement + (S.for_in (S.for_in_pattern (Patterns.identifier "a")) (E.identifier "b") (S.empty ())) + in + assert_output ~ctxt "for(a in b);" layout; + assert_output ~ctxt ~pretty:true "for (a in b);" layout ); + ( "forof_statement_declaration" + >:: fun ctxt -> + let mk_layout a b = + Js_layout_generator.statement + (S.for_of + (S.for_of_declarator [S.variable_declarator a]) + (E.identifier b) + (S.block [S.expression (E.identifier a)])) + in + begin + let layout = mk_layout "a" "b" in + assert_layout + ~ctxt + L.( + loc + (fused + [ + atom "for"; + pretty_space; + group + [ + atom "("; + loc (fused [atom "var"; space; loc (loc (id "a"))]); + space; + atom "of"; + space; + loc (id "b"); + atom ")"; + ]; + pretty_space; + loc + (loc + (group + [ + atom "{"; + indent + (fused + [ + pretty_hardline; + loc + (fused [loc (id "a"); Layout.IfPretty (atom ";", empty)]); + ]); + pretty_hardline; + atom "}"; + ])); + ])) + layout; + assert_output ~ctxt "for(var a of b){a}" layout; + assert_output ~ctxt ~pretty:true ("for (var a of b) {\n" ^ " a;\n" ^ "}") layout + end; + + let a80 = String.make 80 'a' in + let layout = mk_layout a80 "b" in + assert_output ~ctxt ("for(var " ^ a80 ^ " of b){" ^ a80 ^ "}") layout; + assert_output + ~ctxt + ~pretty:true + ("for (var " ^ a80 ^ " of b) {\n" ^ " " ^ a80 ^ ";\n" ^ "}") + layout ); + ( "forof_statement_pattern_identifier" + >:: fun ctxt -> + let mk_layout a b = + Js_layout_generator.statement + (S.for_of (S.for_of_pattern (Patterns.identifier a)) (E.identifier b) (S.block [])) + in + begin + let layout = mk_layout "a" "b" in + assert_layout + ~ctxt + L.( + loc + (fused + [ + atom "for"; + pretty_space; + group + [atom "("; loc (id "a"); space; atom "of"; space; loc (id "b"); atom ")"]; + pretty_space; + loc (loc (atom "{}")); + ])) + layout; + assert_output ~ctxt "for(a of b){}" layout; + assert_output ~ctxt ~pretty:true "for (a of b) {}" layout + end; + + let a80 = String.make 80 'a' in + let layout = mk_layout a80 "b" in + assert_output ~ctxt ("for(" ^ a80 ^ " of b){}") layout; + assert_output ~ctxt ~pretty:true ("for (" ^ a80 ^ " of b) {}") layout ); + ( "forof_statement_async" + >:: fun ctxt -> + assert_statement_string ~ctxt "async function f(){for await(let x of y){}}" ); + ( "forof_statement_without_block" + >:: fun ctxt -> + assert_statement_string ~ctxt "for(a of b)x;"; + assert_statement_string ~ctxt "{for(a of b)x}" ); + ( "forof_empty_body" + >:: fun ctxt -> + let layout = + Js_layout_generator.statement + (S.for_of (S.for_of_pattern (Patterns.identifier "a")) (E.identifier "b") (S.empty ())) + in + assert_output ~ctxt "for(a of b);" layout; + assert_output ~ctxt ~pretty:true "for (a of b);" layout ); + ( "yield_expressions" + >:: fun ctxt -> + assert_expression_string ~ctxt "function* f(){yield}"; + assert_expression_string ~ctxt "function* f(){yield a}"; + assert_expression_string ~ctxt "function* f(){yield* a}" ); + ( "meta_property_expressions" + >:: fun ctxt -> + assert_statement_string ~ctxt "function F(){new.target}"; + assert_statement_string ~ctxt "function F(){new.target.name}" ); + ( "tagged_template_expressions" + >:: fun ctxt -> + assert_expression_string ~ctxt "a``"; + assert_expression_string ~ctxt "b.c``"; + assert_expression_string ~ctxt "(()=>{})``"; + assert_expression_string ~ctxt "(b=c)``"; + assert_expression_string ~ctxt "(b+c)``"; + assert_expression_string ~ctxt "b()``"; + assert_expression_string ~ctxt "(class{})``"; + assert_expression_string ~ctxt "(b?c:d)``"; + assert_expression_string ~ctxt "(function(){})``"; + assert_expression_string ~ctxt "(b||c)``"; + assert_expression_string ~ctxt "(new B())``"; + assert_expression_string ~ctxt "({})``"; + assert_expression_string ~ctxt "(b,c)``"; + assert_expression_string ~ctxt "````"; + assert_expression_string ~ctxt "(void b)``"; + assert_expression_string ~ctxt "(++b)``" ); + ( "template_expressions" + >:: fun ctxt -> + assert_expression_string ~ctxt "``"; + assert_expression_string ~ctxt "`${a}`"; + assert_expression_string ~ctxt "`a${b}c`"; + assert_expression_string ~ctxt "`a${b}c${d}e`"; + assert_expression_string ~ctxt "`\\``" ); + ( "import_expressions" + >:: fun ctxt -> + assert_expression_string ~ctxt {|import("a")|}; + assert_expression_string ~ctxt "import(a)" ); + ( "import_declaration_statement" + >:: fun ctxt -> + assert_statement_string ~ctxt {|import"a";|}; + assert_statement_string ~ctxt {|import a from"a";|}; + assert_statement_string ~ctxt {|import type a from"a";|}; + assert_statement_string ~ctxt {|import typeof a from"a";|}; + assert_statement_string ~ctxt {|import a,*as b from"a";|}; + assert_statement_string ~ctxt {|import a,{b}from"a";|}; + assert_statement_string ~ctxt {|import{a,type b}from"a";|}; + assert_statement_string ~ctxt {|import{a,typeof b}from"a";|}; + assert_statement_string ~ctxt {|import{a,type b as c}from"a";|}; + assert_statement_string ~ctxt {|import{a as b}from"a";|}; + assert_statement_string ~ctxt {|import type{a}from"a";|}; + assert_statement_string ~ctxt {|import{a,b}from"a";|}; + assert_statement_string ~ctxt {|import type{}from"a";|}; + assert_statement_string ~ctxt {|import typeof{}from"a";|}; + assert_statement_string ~ctxt ~pretty:true {|import {a, b} from "a";|}; + assert_statement_string ~ctxt ~pretty:true {|import type {a, b} from "a";|}; + assert_statement_string + ~ctxt + ~pretty:true + ("import {\n a,\n " ^ String.make 80 'b' ^ ",\n} from \"a\";"); + assert_statement_string ~ctxt ~pretty:true {|import a, * as b from "a";|}; + assert_statement_string + ~ctxt + ~pretty:true + ("import a, * as " ^ String.make 80 'b' ^ " from \"a\";"); + assert_statement_string ~ctxt ~pretty:true {|import a, {b} from "a";|}; + assert_statement_string + ~ctxt + ~pretty:true + ("import a, {\n " ^ String.make 80 'b' ^ ",\n} from \"a\";") ); + ( "export_declaration_statement" + >:: fun ctxt -> + assert_statement_string ~ctxt "export{};"; + assert_statement_string ~ctxt "export{}from\"a\";"; + assert_statement_string ~ctxt "export{a}from\"a\";"; + assert_statement_string ~ctxt "export{a,b as c};"; + assert_statement_string ~ctxt "export*from\"a\";"; + assert_statement_string ~ctxt "export*as a from\"a\";"; + assert_statement_string ~ctxt "export type{};"; + assert_statement_string ~ctxt "export type{a};"; + assert_statement_string ~ctxt "export type a=b;"; + assert_statement_string ~ctxt "export let a;"; + assert_statement_string ~ctxt "export const a=b;"; + assert_statement_string ~ctxt "export interface a{a():b}"; + assert_statement_string ~ctxt ~pretty:true "export {};"; + assert_statement_string ~ctxt ~pretty:true "export {a} from \"a\";"; + assert_statement_string ~ctxt ~pretty:true "export * from \"a\";"; + assert_statement_string ~ctxt ~pretty:true "export * as a from \"a\";"; + assert_statement_string ~ctxt ~pretty:true "export type {a};"; + assert_statement_string + ~ctxt + ~pretty:true + ("export {\n a,\n b as " ^ String.make 80 'c' ^ ",\n} from \"a\";"); + assert_statement_string + ~ctxt + ~pretty:true + ("export * as " ^ String.make 80 'a' ^ " from \"a\";"); + assert_statement_string ~ctxt ~pretty:true "export opaque type a = b;" ) + (* TODO: Flow does not parse this but should assert_statement_string ~ctxt "export a,{b}from'a';"; - assert_statement_string ~ctxt "export*as foo,{bar}from'a';"; *) - end; - - "default_export_declaration_statement" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "export default a;"; - assert_statement_string ~ctxt "export default a=b;"; - assert_statement_string ~ctxt "export default function(){}"; - assert_statement_string ~ctxt "export default class{}"; - end; - - "type_alias_statement" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt ~pretty:true "type a = a;"; - end; - - "opaque_type_alias_statement" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "opaque type a=a;"; - assert_statement_string ~ctxt "opaque type a:b=a;"; - assert_statement_string ~ctxt "opaque type a=a;"; - assert_statement_string ~ctxt "opaque type a:b=a;"; - assert_statement_string ~ctxt "opaque type a:b=c;"; - assert_statement_string ~ctxt ~pretty:true "opaque type a = a;"; - assert_statement_string ~ctxt ~pretty:true "opaque type a: b = a;"; - end; - - "declare_opaque_type_alias_statement" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "declare opaque type a;"; - assert_statement_string ~ctxt "declare opaque type a:b;"; - assert_statement_string ~ctxt ~pretty:true "declare opaque type a: b;"; - assert_statement_string ~ctxt "declare export opaque type a;"; - assert_statement_string ~ctxt "declare export opaque type a:b;"; - assert_statement_string ~ctxt ~pretty:true "declare export opaque type a: b;"; - end; - - "type_cast_expression" >:: - begin fun ctxt -> - assert_expression_string ~ctxt "(a:b)"; - assert_expression_string ~ctxt ~pretty:true "(a: b)"; - end; - - "type_parameter" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt "type a<+a>=a;"; - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt ~pretty:true ( - "type a = a;" - ); - assert_statement_string ~ctxt ~pretty:true ( - "type a<\n a,\n +a: b = " ^ String.make 80 'c' ^ ",\n> = a;" - ); - assert_statement_string ~ctxt ~pretty:true ( - "type a = " ^ String.make 80 'a' ^ ";" - ); - end; - - "type" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "type a=any;"; - assert_statement_string ~ctxt "type a=mixed;"; - assert_statement_string ~ctxt "type a=empty;"; - assert_statement_string ~ctxt "type a=void;"; - assert_statement_string ~ctxt "type a=null;"; - assert_statement_string ~ctxt "type a=number;"; - assert_statement_string ~ctxt "type a=string;"; - assert_statement_string ~ctxt "type a=boolean;"; - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt "type a=?a;"; - assert_statement_string ~ctxt ~pretty:true "type a = ?a;"; - assert_statement_string ~ctxt "type a=Array;"; - assert_statement_string ~ctxt "type a=a.b;"; - assert_statement_string ~ctxt "type a=a.b.c;"; - assert_statement_string ~ctxt "type a=a;"; - assert_statement_string ~ctxt "type a=a.b;"; - assert_statement_string ~ctxt ~pretty:true ( - "type a = a.b;" - ); - assert_statement_string ~ctxt ~pretty:true ( - "type a = a.b<\n c,\n " ^ String.make 80 'd' ^ ",\n>;" - ); - assert_statement_string ~ctxt "type a=typeof a;"; - assert_statement_string ~ctxt "type a=[a,b];"; - assert_statement_string ~ctxt ~pretty:true "type a = [a, b];"; - assert_statement_string ~ctxt ~pretty:true ( - "type a = [\n a,\n " ^ String.make 80 'b' ^ ",\n];" - ); - assert_statement_string ~ctxt "type a=*;"; - assert_statement_string ~ctxt "type a='';"; - assert_statement_string ~ctxt "type a=1;"; - assert_statement_string ~ctxt "type a=true;"; - end; - - "type_function" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "type a=()=>c;"; - assert_statement_string ~ctxt "type a=(a:b)=>c;"; - assert_statement_string ~ctxt "type a=(a:b,c:d)=>c;"; - assert_statement_string ~ctxt "type a=(a:b,c?:d)=>c;"; - assert_statement_string ~ctxt "type a=(a,b)=>c;"; - assert_statement_string ~ctxt "type a=()=>c;"; - assert_statement_string ~ctxt "type a=(...a)=>c;"; - assert_statement_string ~ctxt ~pretty:true "type a = () => c;"; - assert_statement_string ~ctxt ~pretty:true "type a = (a) => c;"; - assert_statement_string ~ctxt ~pretty:true "type a = (a: b) => c;"; - assert_statement_string ~ctxt ~pretty:true "type a = (a?: b) => c;"; - assert_statement_string ~ctxt ~pretty:true "type a = (a?: b, c) => c;"; - assert_statement_string ~ctxt ~pretty:true "type a = (a?: b, c) => c;"; - assert_statement_string ~ctxt ~pretty:true ( - "type a = (\n a?: b,\n " ^ String.make 80 'c' ^ ",\n) => c;" - ); - end; - - "type_object" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "type a={};"; - assert_statement_string ~ctxt "type a={||};"; - assert_statement_string ~ctxt "type a={a:b};"; - assert_statement_string ~ctxt "type a={|a:b|};"; - assert_statement_string ~ctxt "type a={+a:b};"; - assert_statement_string ~ctxt "type a={a?:b};"; - assert_statement_string ~ctxt "type a={a:?b};"; - assert_statement_string ~ctxt "type a={a?:?b};"; - assert_statement_string ~ctxt "type a={\"a\":b};"; - assert_statement_string ~ctxt "type a={a:b};"; - assert_statement_string ~ctxt "type a={a:b,c:d};"; - assert_statement_string ~ctxt "type a={...a};"; - assert_statement_string ~ctxt "type a={a:b,...a};"; - assert_statement_string ~ctxt ~pretty:true "type a = {a: b};"; - assert_statement_string ~ctxt ~pretty:true "type a = {a: b, c: d};"; - assert_statement_string ~ctxt ~pretty:true ( - "type a = {\n a: b,\n c: " ^ String.make 80 'd' ^ ",\n};" - ); - assert_statement_string ~ctxt "type a={a():b};"; - assert_statement_string ~ctxt "type a={get a():b};"; - assert_statement_string ~ctxt "type a={set a():b};"; - assert_statement_string ~ctxt ~pretty:true "type a = {set a(): b};"; - assert_statement_string ~ctxt "type a={a?:()=>a};"; - assert_statement_string ~ctxt "type a={+a:()=>a};"; - assert_statement_string ~ctxt "type a={():a};"; - assert_statement_string ~ctxt "type a={[b]:a};"; - assert_statement_string ~ctxt "type a={[a:b]:a};"; - assert_statement_string ~ctxt "type a={+[a:b]:a};"; - assert_statement_string ~ctxt ~pretty:true "type a = {+[a: b]: a};"; - assert_statement_string ~ctxt "type a={a:b,+[a:b]:a,():a,c():b};"; - end; - - "type_union_or_intersection" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "type a=a|b;"; - assert_statement_string ~ctxt "type a=a|b|c;"; - assert_statement_string ~ctxt "type a=?(a|b);"; - assert_statement_string ~ctxt "type a=a&b;"; - assert_statement_string ~ctxt "type a=a&b&c;"; - assert_statement_string ~ctxt "type a=?(a&b);"; - assert_statement_string ~ctxt "type a=a|(b&c)|d;"; - assert_statement_string ~ctxt "type a=(a|b)&c;"; - assert_statement_string ~ctxt "type a=(a&b)|c;"; - assert_statement_string ~ctxt "type a=a|(b|c);"; - assert_statement_string ~ctxt "type a=(a&b)|c;"; - assert_statement_string ~ctxt "type a=a|(()=>b)|c;"; - assert_statement_string ~ctxt ~pretty:true "type a = a | b;"; - assert_statement_string ~ctxt ~pretty:true "type a = a | b | c;"; - assert_statement_string ~ctxt ~pretty:true "type a = a & b & c;"; - assert_statement_string ~ctxt ~pretty:true ( - "type a = \n | a\n | b\n | " ^ String.make 80 'c' ^ ";" - ); - end; - - "interface_declaration_statements" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "interface a{}"; - assert_statement_string ~ctxt "interface a extends b{}"; - assert_statement_string ~ctxt "interface a extends b{}"; - assert_statement_string ~ctxt "interface a extends b,c{}"; - assert_statement_string ~ctxt ~pretty:true "interface a {}"; - assert_statement_string ~ctxt ~pretty:true "interface a extends b, c {}"; - assert_statement_string ~ctxt ~pretty:true ( - "interface a {\n a: b,\n d(): " ^ - String.make 80 'c' ^ ",\n}" - ); - end; - - "declare_class_statements" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "declare class a{}"; - assert_statement_string ~ctxt "declare class a extends b{}"; - assert_statement_string ~ctxt ~pretty:true ( - "declare class a {\n static a: b,\n static d(): " ^ - String.make 80 'c' ^ ",\n}" - ); - end; - - "declare_function_statements" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "declare function a():b;"; - assert_statement_string ~ctxt ~pretty:true ( - "declare function a(): b;" - ); - assert_statement_string ~ctxt "declare function f():a%checks;"; - assert_statement_string ~ctxt "declare function f(a:b):a%checks(!a);"; - assert_statement_string ~ctxt ~pretty:true ( - "declare function f(a: b): a %checks(!a);" - ); - end; - - "declare_var_statements" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "declare var a;"; - assert_statement_string ~ctxt "declare var a:b;"; - end; - - "declare_module_exports_statements" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "declare module.exports:a;"; - end; - - "declare_module_statements" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "declare module a{}"; - assert_statement_string ~ctxt "declare module \"a\"{}"; - assert_statement_string ~ctxt ~pretty:true "declare module \"a\" {}"; - end; - - "declare_export_declaration_statements" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "declare export default a;"; - assert_statement_string ~ctxt "declare export var a;"; - assert_statement_string ~ctxt "declare export function a():a;"; - assert_statement_string ~ctxt "declare export default function a():a;"; - assert_statement_string ~ctxt "declare export class a{}"; - assert_statement_string ~ctxt "declare export default class a{}"; - assert_statement_string ~ctxt "declare export{}"; - assert_statement_string ~ctxt "declare export{a,b}"; - assert_statement_string ~ctxt "declare export{a,b}from\"a\""; - assert_statement_string ~ctxt "declare export*from\"a\""; - end; - - "pattern" >:: - begin fun ctxt -> - assert_statement_string ~ctxt "let a=a;"; - assert_statement_string ~ctxt "let a?=a;"; - assert_statement_string ~ctxt "let a:b=a;"; - assert_statement_string ~ctxt "let a?:b=a;"; - assert_statement_string ~ctxt "let {}=a;"; - assert_statement_string ~ctxt "let {}:b=a;"; - assert_statement_string ~ctxt "let {a}=a;"; - assert_statement_string ~ctxt "let {a:b}=a;"; - assert_statement_string ~ctxt "let {a:b}=a;"; - assert_statement_string ~ctxt "let {a,b}=a;"; - assert_statement_string ~ctxt "let {a,b:{c}}=a;"; - assert_statement_string ~ctxt "let {a=b}=a;"; - assert_statement_string ~ctxt "let {a:b=c}=a;"; - assert_statement_string ~ctxt "let {a=++b}=a;"; - assert_statement_string ~ctxt "let {...a}=a;"; - assert_statement_string ~ctxt "let {a,...b}=a;"; - assert_statement_string ~ctxt ~pretty:true "let {a} = a;"; - assert_statement_string ~ctxt ~pretty:true "let {a: b} = a;"; - assert_statement_string ~ctxt ~pretty:true "let {a: b, c} = a;"; - assert_statement_string ~ctxt ~pretty:true "let {a: b, c = d} = a;"; - assert_statement_string ~ctxt ~pretty:true "let {...a} = a;"; - assert_statement_string ~ctxt ~pretty:true ( - "let {\n a: b,\n c = " ^ String.make 80 'd' ^ "\n} = a;" - ); - assert_statement_string ~ctxt ~pretty:true ( - "let {\n a: b,\n ...c" ^ String.make 80 'c' ^ "\n} = a;" - ); - assert_statement_string ~ctxt "let []=a;"; - assert_statement_string ~ctxt "let []:a=a;"; - assert_statement_string ~ctxt "let [a]=a;"; - assert_statement_string ~ctxt "let [a?]=a;"; - assert_statement_string ~ctxt "let [a:b]=a;"; - assert_statement_string ~ctxt "let [a?:b]=a;"; - assert_statement_string ~ctxt "let [a,b]=a;"; - assert_statement_string ~ctxt "let [,,a]=a;"; - assert_statement_string ~ctxt "let [[]]=a;"; - assert_statement_string ~ctxt "let [,,[a]]=a;"; - assert_statement_string ~ctxt "let [...a]=a;"; - assert_statement_string ~ctxt "let [a,...b]=a;"; - assert_statement_string ~ctxt ~pretty:true "let [a, b] = a;"; - assert_statement_string ~ctxt ~pretty:true "let [a, ...b] = a;"; - assert_statement_string ~ctxt ~pretty:true ( - "let [\n a,\n ...b" ^ String.make 80 'c' ^ "\n] = a;" - ); - assert_statement_string ~ctxt ~pretty:true "let [a, , b] = a;"; - assert_statement_string ~ctxt ~pretty:true ( - "let [\n a,\n ,\n " ^ String.make 80 'b' ^ "\n] = a;" - ); - end; - - "program_artifact_newline" >:: - begin fun ctxt -> - let ast = mk_program [ - S.expression (E.identifier "x"); - ] in - let layout = Js_layout_generator.program - ~preserve_docblock:false - ~checksum:(Some "@artifact abc123") - ast - in - let expected = - L.(program (fused [ - sequence ~break:Layout.Break_if_pretty ~inline:(true, true) ~indent:0 [ - loc (fused [loc (id "x"); atom ";"]); - ]; - Layout.Newline; - atom "/* @artifact abc123 */"; - ])) - in - assert_layout ~ctxt expected layout; - assert_output ~ctxt "x;\n/* @artifact abc123 */" layout; - assert_output ~ctxt ~pretty:true "x;\n/* @artifact abc123 */" layout; - end; - - "program_trailing_semicolon" >:: - begin fun ctxt -> - let ast = mk_program [ - S.expression (E.identifier "x"); - S.expression (E.identifier "y"); - ] in - let layout = Js_layout_generator.program - ~preserve_docblock:false - ~checksum:None - ast - in - let expected = - L.(program ( - fused_vertically ~inline:(true, true) [ - loc (fused [loc (id "x"); atom ";"]); - loc (fused [loc (id "y"); atom ";"]); - ] - )) - in - assert_layout ~ctxt expected layout - end; - - "regexp" >:: - begin fun ctxt -> - (* flags should be sorted *) - let regexp = expression_of_string "/foo/ymg" in - assert_expression ~ctxt "/foo/gmy" regexp - end; - - "string_literal_quotes" >:: - begin fun ctxt -> - assert_expression ~ctxt {|"'''"|} (expression_of_string {|"'''"|}); - assert_expression ~ctxt {|'"'|} (expression_of_string {|"\""|}); - assert_expression ~ctxt {|"''"|} (expression_of_string {|'\'\''|}); - assert_expression ~ctxt {|"''\""|} (expression_of_string {|"''\""|}); - assert_expression ~ctxt {|'""\''|} (expression_of_string {|'""\''|}); - end; - - "switch_case_space" >:: - begin fun ctxt -> - let assert_no_space ~ctxt expr = - let ret = statement_of_string ("switch(x){case "^expr^":break}") in - assert_statement ~ctxt ("switch(x){case"^expr^":break}") ret - in - - assert_no_space ~ctxt {|"foo"|}; - assert_no_space ~ctxt {|{foo:"bar"}|}; - assert_no_space ~ctxt {|[foo]|}; - assert_no_space ~ctxt {|!foo|}; - assert_no_space ~ctxt {|+foo|}; - assert_no_space ~ctxt {|-foo|}; - assert_no_space ~ctxt {|~foo|}; - - let ret = statement_of_string "switch(x){case (foo):break}" in - assert_statement ~ctxt "switch(x){case foo:break}" ret; - - let ret = statement_of_string "switch(x){case 123:break}" in - assert_statement ~ctxt "switch(x){case 123:break}" ret; - end; - - "throw_space" >:: - begin fun ctxt -> - let assert_no_space ~ctxt expr = - let ret = statement_of_string ("throw "^expr^";") in - assert_statement ~ctxt ("throw"^expr^";") ret - in - - assert_no_space ~ctxt {|"foo"|}; - assert_no_space ~ctxt {|{foo:"bar"}|}; - assert_no_space ~ctxt {|[foo]|}; - assert_no_space ~ctxt {|!foo|}; - assert_no_space ~ctxt {|+foo|}; - assert_no_space ~ctxt {|-foo|}; - assert_no_space ~ctxt {|~foo|}; - - assert_statement_string ~ctxt "throw foo;"; - assert_statement ~ctxt "throw foo;" (statement_of_string "throw (foo);"); - assert_statement_string ~ctxt "throw new Error();"; - end; - - "unicode_string_literal" >:: - begin fun ctxt -> - (* escaped using Unicode codepoint *) - let ast = expression_of_string {|"\u{1F4A9}"|} in - assert_expression ~ctxt {|"\ud83d\udca9"|} ast; - - (* escaped using UTF-16 (hex get lowercased) *) - let ast = expression_of_string {|"\uD83D\uDCA9"|} in - assert_expression ~ctxt {|"\ud83d\udca9"|} ast; - - (* literal emoji *) - let ast = expression_of_string "\"\xF0\x9F\x92\xA9\"" in - assert_expression ~ctxt {|"\ud83d\udca9"|} ast; - - (* unprintable ascii, escaped *) - let ast = expression_of_string {|"\x07"|} in - assert_expression ~ctxt {|"\x07"|} ast; - let ast = expression_of_string {|"\x11"|} in - assert_expression ~ctxt {|"\x11"|} ast; - - (* unprintable ascii, literal *) - let ast = expression_of_string "\"\x11\"" in - assert_expression ~ctxt {|"\x11"|} ast; - - (* special escapes *) - let ast = expression_of_string {|"\x09"|} in - assert_expression ~ctxt {|"\t"|} ast; - let ast = expression_of_string {|"\\"|} in - assert_expression ~ctxt {|"\\"|} ast; - end; - - "numbers" >:: - begin fun ctxt -> - assert_expression ~ctxt "100" (expression_of_string "1e2"); - assert_expression ~ctxt "1e3" (expression_of_string "1000"); - assert_expression ~ctxt "2592e6" (expression_of_string "2.592e+09"); - end -] + assert_statement_string ~ctxt "export*as foo,{bar}from'a';"; *); + ( "default_export_declaration_statement" + >:: fun ctxt -> + assert_statement_string ~ctxt "export default a;"; + assert_statement_string ~ctxt "export default a=b;"; + assert_statement_string ~ctxt "export default function(){}"; + assert_statement_string ~ctxt "export default class{}" ); + ( "type_alias_statement" + >:: fun ctxt -> + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt ~pretty:true "type a = a;" ); + ( "opaque_type_alias_statement" + >:: fun ctxt -> + assert_statement_string ~ctxt "opaque type a=a;"; + assert_statement_string ~ctxt "opaque type a:b=a;"; + assert_statement_string ~ctxt "opaque type a=a;"; + assert_statement_string ~ctxt "opaque type a:b=a;"; + assert_statement_string ~ctxt "opaque type a:b=c;"; + assert_statement_string ~ctxt ~pretty:true "opaque type a = a;"; + assert_statement_string ~ctxt ~pretty:true "opaque type a: b = a;" ); + ( "declare_opaque_type_alias_statement" + >:: fun ctxt -> + assert_statement_string ~ctxt "declare opaque type a;"; + assert_statement_string ~ctxt "declare opaque type a:b;"; + assert_statement_string ~ctxt ~pretty:true "declare opaque type a: b;"; + assert_statement_string ~ctxt "declare export opaque type a;"; + assert_statement_string ~ctxt "declare export opaque type a:b;"; + assert_statement_string ~ctxt ~pretty:true "declare export opaque type a: b;" ); + ( "type_cast_expression" + >:: fun ctxt -> + let layout = Js_layout_generator.expression (E.typecast (E.identifier "a") Types.mixed) in + assert_layout + ~ctxt + L.( + loc + (group + [ + atom "("; + loc (id "a"); + loc (fused [atom ":"; pretty_space; loc (atom "mixed")]); + atom ")"; + ])) + layout; + assert_output ~ctxt "(a:mixed)" layout; + assert_output ~ctxt ~pretty:true "(a: mixed)" layout; + + let a80 = String.make 80 'a' in + let layout = Js_layout_generator.expression (E.typecast (E.identifier a80) Types.mixed) in + assert_output ~ctxt ("(" ^ a80 ^ ":mixed)") layout; + assert_output ~ctxt ~pretty:true ("(" ^ a80 ^ ": mixed)") layout ); + ( "type_parameter" + >:: fun ctxt -> + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt "type a<+a>=a;"; + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt ~pretty:true "type a = a;"; + assert_statement_string + ~ctxt + ~pretty:true + ("type a<\n a,\n +a: b = " ^ String.make 80 'c' ^ ",\n> = a;"); + assert_statement_string ~ctxt ~pretty:true ("type a = " ^ String.make 80 'a' ^ ";") + ); + ( "type" + >:: fun ctxt -> + assert_statement_string ~ctxt "type a=any;"; + assert_statement_string ~ctxt "type a=mixed;"; + assert_statement_string ~ctxt "type a=empty;"; + assert_statement_string ~ctxt "type a=void;"; + assert_statement_string ~ctxt "type a=null;"; + assert_statement_string ~ctxt "type a=number;"; + assert_statement_string ~ctxt "type a=string;"; + assert_statement_string ~ctxt "type a=boolean;"; + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt "type a=?a;"; + assert_statement_string ~ctxt ~pretty:true "type a = ?a;"; + assert_statement_string ~ctxt "type a=Array;"; + assert_statement_string ~ctxt "type a=a.b;"; + assert_statement_string ~ctxt "type a=a.b.c;"; + assert_statement_string ~ctxt "type a=a;"; + assert_statement_string ~ctxt "type a=a.b;"; + assert_statement_string ~ctxt ~pretty:true "type a = a.b;"; + assert_statement_string + ~ctxt + ~pretty:true + ("type a = a.b<\n c,\n " ^ String.make 80 'd' ^ ",\n>;"); + assert_statement_string ~ctxt "type a=typeof a;"; + assert_statement_string ~ctxt "type a=[a,b];"; + assert_statement_string ~ctxt ~pretty:true "type a = [a, b];"; + assert_statement_string + ~ctxt + ~pretty:true + ("type a = [\n a,\n " ^ String.make 80 'b' ^ ",\n];"); + assert_statement_string ~ctxt "type a=*;"; + assert_statement_string ~ctxt "type a='';"; + assert_statement_string ~ctxt "type a=1;"; + assert_statement_string ~ctxt "type a=true;" ); + ( "type_function" + >:: fun ctxt -> + assert_statement_string ~ctxt "type a=()=>c;"; + assert_statement_string ~ctxt "type a=(a:b)=>c;"; + assert_statement_string ~ctxt "type a=(a:b,c:d)=>c;"; + assert_statement_string ~ctxt "type a=(a:b,c?:d)=>c;"; + assert_statement_string ~ctxt "type a=(a,b)=>c;"; + assert_statement_string ~ctxt "type a=()=>c;"; + assert_statement_string ~ctxt "type a=(...a)=>c;"; + assert_statement_string ~ctxt ~pretty:true "type a = () => c;"; + assert_statement_string ~ctxt ~pretty:true "type a = (a) => c;"; + assert_statement_string ~ctxt ~pretty:true "type a = (a: b) => c;"; + assert_statement_string ~ctxt ~pretty:true "type a = (a?: b) => c;"; + assert_statement_string ~ctxt ~pretty:true "type a = (a?: b, c) => c;"; + assert_statement_string ~ctxt ~pretty:true "type a = (a?: b, c) => c;"; + assert_statement_string + ~ctxt + ~pretty:true + ("type a = (\n a?: b,\n " ^ String.make 80 'c' ^ "\n) => c;"); + let a30 = String.make 30 'a' in + let b30 = String.make 30 'b' in + assert_expression_string + ~ctxt + ("(" ^ a30 ^ ":" ^ a30 ^ ",..." ^ b30 ^ ":" ^ b30 ^ "):c=>{}") ); + ( "type_object" + >:: fun ctxt -> + assert_statement_string ~ctxt "type a={};"; + assert_statement_string ~ctxt "type a={||};"; + assert_statement_string ~ctxt "type a={a:b};"; + assert_statement_string ~ctxt "type a={|a:b|};"; + assert_statement_string ~ctxt "type a={+a:b};"; + assert_statement_string ~ctxt "type a={a?:b};"; + assert_statement_string ~ctxt "type a={a:?b};"; + assert_statement_string ~ctxt "type a={a?:?b};"; + assert_statement_string ~ctxt "type a={\"a\":b};"; + assert_statement_string ~ctxt "type a={a:b};"; + assert_statement_string ~ctxt "type a={a:b,c:d};"; + assert_statement_string ~ctxt "type a={...a};"; + assert_statement_string ~ctxt "type a={a:b,...a};"; + assert_statement_string ~ctxt ~pretty:true "type a = {a: b};"; + assert_statement_string ~ctxt ~pretty:true "type a = {a: b, c: d};"; + assert_statement_string + ~ctxt + ~pretty:true + ("type a = {\n a: b,\n c: " ^ String.make 80 'd' ^ ",\n};"); + assert_statement_string ~ctxt "type a={a():b};"; + assert_statement_string ~ctxt "type a={get a():b};"; + assert_statement_string ~ctxt "type a={set a():b};"; + assert_statement_string ~ctxt ~pretty:true "type a = {set a(): b};"; + assert_statement_string ~ctxt "type a={a?:()=>a};"; + assert_statement_string ~ctxt "type a={+a:()=>a};"; + assert_statement_string ~ctxt "type a={():a};"; + assert_statement_string ~ctxt "type a={[b]:a};"; + assert_statement_string ~ctxt "type a={[a:b]:a};"; + assert_statement_string ~ctxt "type a={+[a:b]:a};"; + assert_statement_string ~ctxt ~pretty:true "type a = {+[a: b]: a};"; + assert_statement_string ~ctxt "type a={a:b,+[a:b]:a,():a,c():b};" ); + ( "type_union_or_intersection" + >:: fun ctxt -> + assert_statement_string ~ctxt "type a=a|b;"; + assert_statement_string ~ctxt "type a=a|b|c;"; + assert_statement_string ~ctxt "type a=?(a|b);"; + assert_statement_string ~ctxt "type a=a&b;"; + assert_statement_string ~ctxt "type a=a&b&c;"; + assert_statement_string ~ctxt "type a=?(a&b);"; + assert_statement_string ~ctxt "type a=a|(b&c)|d;"; + assert_statement_string ~ctxt "type a=(a|b)&c;"; + assert_statement_string ~ctxt "type a=(a&b)|c;"; + assert_statement_string ~ctxt "type a=a|(b|c);"; + assert_statement_string ~ctxt "type a=(a&b)|c;"; + assert_statement_string ~ctxt "type a=a|(()=>b)|c;"; + assert_statement_string ~ctxt ~pretty:true "type a = a | b;"; + assert_statement_string ~ctxt ~pretty:true "type a = a | b | c;"; + assert_statement_string ~ctxt ~pretty:true "type a = a & b & c;"; + assert_statement_string + ~ctxt + ~pretty:true + ("type a = \n | a\n | b\n | " ^ String.make 80 'c' ^ ";") ); + ( "interface_declaration_statements" + >:: fun ctxt -> + assert_statement_string ~ctxt "interface a{}"; + assert_statement_string ~ctxt "interface a extends b{}"; + assert_statement_string ~ctxt "interface a extends b{}"; + assert_statement_string ~ctxt "interface a extends b,c{}"; + assert_statement_string ~ctxt ~pretty:true "interface a {}"; + assert_statement_string ~ctxt ~pretty:true "interface a extends b, c {}"; + assert_statement_string + ~ctxt + ~pretty:true + ("interface a {\n a: b,\n d(): " ^ String.make 80 'c' ^ ",\n}") ); + ( "declare_class_statements" + >:: fun ctxt -> + assert_statement_string ~ctxt "declare class a{}"; + assert_statement_string ~ctxt "declare class a extends b{}"; + assert_statement_string ~ctxt "declare class a implements b{}"; + assert_statement_string ~ctxt "declare class a extends b mixins c implements d{}"; + assert_statement_string ~ctxt "declare class a extends b implements c{}"; + assert_statement_string + ~ctxt + ~pretty:true + ("declare class a {\n static a: b,\n static d(): " ^ String.make 80 'c' ^ ",\n}") ); + ( "declare_function_statements" + >:: fun ctxt -> + assert_statement_string ~ctxt "declare function a():b;"; + assert_statement_string ~ctxt ~pretty:true "declare function a(): b;"; + assert_statement_string ~ctxt "declare function f():a%checks;"; + assert_statement_string ~ctxt "declare function f(a:b):a%checks(!a);"; + assert_statement_string ~ctxt ~pretty:true "declare function f(a: b): a %checks(!a);" ); + ( "declare_var_statements" + >:: fun ctxt -> + assert_statement_string ~ctxt "declare var a;"; + assert_statement_string ~ctxt "declare var a:b;" ); + ( "declare_module_exports_statements" + >:: (fun ctxt -> assert_statement_string ~ctxt "declare module.exports:a;") ); + ( "declare_module_statements" + >:: fun ctxt -> + assert_statement_string ~ctxt "declare module a{}"; + assert_statement_string ~ctxt "declare module \"a\"{}"; + assert_statement_string ~ctxt ~pretty:true "declare module \"a\" {}" ); + ( "declare_export_declaration_statements" + >:: fun ctxt -> + assert_statement_string ~ctxt "declare export default a;"; + assert_statement_string ~ctxt "declare export var a;"; + assert_statement_string ~ctxt "declare export function a():a;"; + assert_statement_string ~ctxt "declare export default function a():a;"; + assert_statement_string ~ctxt "declare export class a{}"; + assert_statement_string ~ctxt "declare export default class a{}"; + assert_statement_string ~ctxt "declare export{}"; + assert_statement_string ~ctxt "declare export{a,b}"; + assert_statement_string ~ctxt "declare export{a,b}from\"a\""; + assert_statement_string ~ctxt "declare export*from\"a\"" ); + ( "regexp" + >:: fun ctxt -> + (* flags should be sorted *) + let regexp = expression_of_string "/foo/ymg" in + assert_expression ~ctxt "/foo/gmy" regexp ); + ( "string_literal_quotes" + >:: fun ctxt -> + assert_expression ~ctxt {|"'''"|} (expression_of_string {|"'''"|}); + assert_expression ~ctxt {|'"'|} (expression_of_string {|"\""|}); + assert_expression ~ctxt {|"''"|} (expression_of_string {|'\'\''|}); + assert_expression ~ctxt {|"''\""|} (expression_of_string {|"''\""|}); + assert_expression ~ctxt {|'""\''|} (expression_of_string {|'""\''|}) ); + ( "switch" + >:: fun ctxt -> + let case1_loc = + Loc.{ none with start = { line = 1; column = 1 }; _end = { line = 2; column = 3 } } + in + let case2_loc = + Loc.{ none with start = { line = 4; column = 1 }; _end = { line = 5; column = 3 } } + in + let layout = + Js_layout_generator.statement + (S.switch + (E.identifier "x") + [ + S.switch_case + ~loc:case1_loc + ~test:(E.literal (Literals.string "a")) + [S.expression (E.increment ~prefix:false (E.identifier "x")); S.break ()]; + S.switch_case + ~loc:case2_loc + ~test:(E.literal (Literals.string "b")) + [S.expression (E.increment ~prefix:false (E.identifier "x")); S.break ()]; + ]) + in + assert_layout + ~ctxt + L.( + loc + (fused + [ + atom "switch"; + pretty_space; + group [atom "("; indent (fused [softline; loc (id "x")]); softline; atom ")"]; + pretty_space; + atom "{"; + indent + (fused + [ + pretty_hardline; + loc + ~loc:case1_loc + (fused + [ + atom "case"; + pretty_space; + loc (fused [atom "\""; atom "a"; atom "\""]); + atom ":"; + indent + (fused + [ + pretty_hardline; + loc + (fused [loc (fused [loc (id "x"); atom "++"]); atom ";"]); + pretty_hardline; + loc (fused [atom "break"; atom ";"]); + ]); + ]); + pretty_hardline; + pretty_hardline; + loc + ~loc:case2_loc + (fused + [ + atom "case"; + pretty_space; + loc (fused [atom "\""; atom "b"; atom "\""]); + atom ":"; + indent + (fused + [ + pretty_hardline; + loc + (fused [loc (fused [loc (id "x"); atom "++"]); atom ";"]); + pretty_hardline; + loc + (fused [atom "break"; Layout.IfPretty (atom ";", empty)]); + ]); + ]); + ]); + pretty_hardline; + atom "}"; + ])) + layout; + assert_output ~ctxt "switch(x){case\"a\":x++;break;case\"b\":x++;break}" layout; + assert_output + ~ctxt + ~pretty:true + ( "switch (x) {\n" + ^ " case \"a\":\n" + ^ " x++;\n" + ^ " break;\n" + ^ " \n" + (* TODO: fix trailing whitespace *) + ^ " case \"b\":\n" + ^ " x++;\n" + ^ " break;\n" + ^ "}" ) + layout ); + ( "switch_case_space" + >:: fun ctxt -> + let assert_no_space ~ctxt expr = + let ret = statement_of_string ("switch(x){case " ^ expr ^ ":break}") in + assert_statement ~ctxt ("switch(x){case" ^ expr ^ ":break}") ret + in + assert_no_space ~ctxt {|"foo"|}; + assert_no_space ~ctxt {|{foo:"bar"}|}; + assert_no_space ~ctxt {|[foo]|}; + assert_no_space ~ctxt {|!foo|}; + assert_no_space ~ctxt {|+foo|}; + assert_no_space ~ctxt {|-foo|}; + assert_no_space ~ctxt {|~foo|}; + + let ret = statement_of_string "switch(x){case (foo):break}" in + assert_statement ~ctxt "switch(x){case foo:break}" ret; + + let ret = statement_of_string "switch(x){case 123:break}" in + assert_statement ~ctxt "switch(x){case 123:break}" ret ); + ( "switch_case_empty" + >:: fun ctxt -> + let layout = + Js_layout_generator.statement + (S.switch + (E.identifier "x") + [S.switch_case ~test:(E.literal (Literals.string "a")) [S.empty ()]]) + in + assert_output ~ctxt "switch(x){case\"a\":;}" layout; + assert_output + ~ctxt + ~pretty:true + ("switch (x) {\n" ^ " case \"a\":\n" ^ " ;\n" ^ "}") + layout ); + ( "throw_space" + >:: fun ctxt -> + let assert_no_space ~ctxt expr = + let ret = statement_of_string ("throw " ^ expr ^ ";") in + assert_statement ~ctxt ("throw" ^ expr ^ ";") ret + in + assert_no_space ~ctxt {|"foo"|}; + assert_no_space ~ctxt {|{foo:"bar"}|}; + assert_no_space ~ctxt {|[foo]|}; + assert_no_space ~ctxt {|!foo|}; + assert_no_space ~ctxt {|+foo|}; + assert_no_space ~ctxt {|-foo|}; + assert_no_space ~ctxt {|~foo|}; + + assert_statement_string ~ctxt "throw foo;"; + assert_statement ~ctxt "throw foo;" (statement_of_string "throw (foo);"); + assert_statement_string ~ctxt "throw new Error();" ); + ( "string_literal" + >:: fun ctxt -> + let ast = E.literal (Literals.string "a") in + let layout = Js_layout_generator.expression ast in + assert_layout ~ctxt L.(loc (fused [atom "\""; atom "a"; atom "\""])) layout; + assert_output ~ctxt {|"a"|} layout; + assert_output ~ctxt ~pretty:true {|"a"|} layout ); + ( "unicode_string_literal" + >:: fun ctxt -> + (* escaped using Unicode codepoint *) + let ast = expression_of_string {|"\u{1F4A9}"|} in + assert_expression ~ctxt {|"\ud83d\udca9"|} ast; + + (* escaped using UTF-16 (hex get lowercased) *) + let ast = expression_of_string {|"\uD83D\uDCA9"|} in + assert_expression ~ctxt {|"\ud83d\udca9"|} ast; + + (* literal emoji *) + let ast = expression_of_string "\"\xF0\x9F\x92\xA9\"" in + assert_expression ~ctxt {|"\ud83d\udca9"|} ast; + + (* zero followed by ASCII number *) + let ast = expression_of_string "\"\x00\x31\"" in + assert_expression ~ctxt {|"\x001"|} ast; + + (* not `\01`! *) + let ast = expression_of_string "\"\x00\x39\"" in + assert_expression ~ctxt {|"\x009"|} ast; + + (* not `\09`! *) + + (* unprintable ascii, escaped *) + let ast = expression_of_string {|"\x07"|} in + assert_expression ~ctxt {|"\x07"|} ast; + let ast = expression_of_string {|"\x11"|} in + assert_expression ~ctxt {|"\x11"|} ast; + + (* unprintable ascii, literal *) + let ast = expression_of_string "\"\x11\"" in + assert_expression ~ctxt {|"\x11"|} ast; + + (* special escapes *) + let ast = expression_of_string {|"\x09"|} in + assert_expression ~ctxt {|"\t"|} ast; + let ast = expression_of_string {|"\\"|} in + assert_expression ~ctxt {|"\\"|} ast ); + ( "numbers" + >:: fun ctxt -> + assert_expression ~ctxt "100" (expression_of_string "1e2"); + assert_expression ~ctxt "1e3" (expression_of_string "1000"); + assert_expression ~ctxt "2592e6" (expression_of_string "2.592e+09") ); + ( "sequence_long" + >:: fun ctxt -> + let x80 = String.make 80 'x' in + let layout = + Js_layout_generator.expression (E.sequence [E.identifier x80; E.identifier x80]) + in + assert_output ~ctxt (x80 ^ "," ^ x80) layout; + assert_output ~ctxt ~pretty:true (x80 ^ ",\n" ^ x80) layout ); + ( "with_statement_with_empty_body" + >:: fun ctxt -> + let layout = Js_layout_generator.statement (S.with_ (E.identifier "x") (S.empty ())) in + assert_output ~ctxt "with(x);" layout; + assert_output ~ctxt ~pretty:true "with (x);" layout ); + ( "enum_of_boolean" + >:: fun ctxt -> + S.EnumDeclarations.( + let layout ~explicit_type = + Js_layout_generator.statement + @@ S.enum_declaration + (I.identifier "E") + (boolean_body + ~explicit_type + [ + initialized_member (I.identifier "A") true; + initialized_member (I.identifier "B") false; + ]) + in + assert_output ~ctxt "enum E{A=true,B=false,}" (layout ~explicit_type:false); + let pretty_output = "enum E {\n" ^ " A = true,\n" ^ " B = false,\n" ^ "}" in + assert_output ~ctxt ~pretty:true pretty_output (layout ~explicit_type:false); + + assert_output ~ctxt "enum E of boolean{A=true,B=false,}" (layout ~explicit_type:true); + let explicit_type_pretty_output = + "enum E of boolean {\n" ^ " A = true,\n" ^ " B = false,\n" ^ "}" + in + assert_output + ~ctxt + ~pretty:true + explicit_type_pretty_output + (layout ~explicit_type:true)) ); + ( "enum_of_number" + >:: fun ctxt -> + S.EnumDeclarations.( + let layout ~explicit_type = + Js_layout_generator.statement + @@ S.enum_declaration + (I.identifier "E") + (number_body + ~explicit_type + [ + initialized_member (I.identifier "A") (number_literal 1.0 "1"); + initialized_member (I.identifier "B") (number_literal 2.0 "2"); + ]) + in + assert_output ~ctxt "enum E{A=1,B=2,}" (layout ~explicit_type:false); + let pretty_output = "enum E {\n" ^ " A = 1,\n" ^ " B = 2,\n" ^ "}" in + assert_output ~ctxt ~pretty:true pretty_output (layout ~explicit_type:false); + + assert_output ~ctxt "enum E of number{A=1,B=2,}" (layout ~explicit_type:true); + let explicit_type_pretty_output = + "enum E of number {\n" ^ " A = 1,\n" ^ " B = 2,\n" ^ "}" + in + assert_output + ~ctxt + ~pretty:true + explicit_type_pretty_output + (layout ~explicit_type:true)) ); + ( "enum_of_string_initialized" + >:: fun ctxt -> + S.EnumDeclarations.( + let layout ~explicit_type = + Js_layout_generator.statement + @@ S.enum_declaration + (I.identifier "E") + (string_initialized_body + ~explicit_type + [ + initialized_member (I.identifier "A") (string_literal "a"); + initialized_member (I.identifier "B") (string_literal "b"); + ]) + in + assert_output ~ctxt "enum E{A=\"a\",B=\"b\",}" (layout ~explicit_type:false); + let pretty_output = "enum E {\n" ^ " A = \"a\",\n" ^ " B = \"b\",\n" ^ "}" in + assert_output ~ctxt ~pretty:true pretty_output (layout ~explicit_type:false); + + assert_output ~ctxt "enum E of string{A=\"a\",B=\"b\",}" (layout ~explicit_type:true); + let explicit_type_pretty_output = + "enum E of string {\n" ^ " A = \"a\",\n" ^ " B = \"b\",\n" ^ "}" + in + assert_output + ~ctxt + ~pretty:true + explicit_type_pretty_output + (layout ~explicit_type:true)) ); + ( "enum_of_string_defaulted" + >:: fun ctxt -> + S.EnumDeclarations.( + let layout ~explicit_type = + Js_layout_generator.statement + @@ S.enum_declaration + (I.identifier "E") + (string_defaulted_body + ~explicit_type + [defaulted_member (I.identifier "A"); defaulted_member (I.identifier "B")]) + in + assert_output ~ctxt "enum E{A,B,}" (layout ~explicit_type:false); + let pretty_output = "enum E {\n" ^ " A,\n" ^ " B,\n" ^ "}" in + assert_output ~ctxt ~pretty:true pretty_output (layout ~explicit_type:false); + + assert_output ~ctxt "enum E of string{A,B,}" (layout ~explicit_type:true); + let explicit_type_pretty_output = "enum E of string {\n" ^ " A,\n" ^ " B,\n" ^ "}" in + assert_output + ~ctxt + ~pretty:true + explicit_type_pretty_output + (layout ~explicit_type:true)) ); + ( "enum_of_symbol" + >:: fun ctxt -> + S.EnumDeclarations.( + let layout = + Js_layout_generator.statement + @@ S.enum_declaration + (I.identifier "E") + (symbol_body + [defaulted_member (I.identifier "A"); defaulted_member (I.identifier "B")]) + in + assert_output ~ctxt "enum E of symbol{A,B,}" layout; + let pretty_output = "enum E of symbol {\n" ^ " A,\n" ^ " B,\n" ^ "}" in + assert_output ~ctxt ~pretty:true pretty_output layout) ); + ] diff --git a/src/parser_utils/output/__tests__/layout_generator_test_utils.ml b/src/parser_utils/output/__tests__/layout_generator_test_utils.ml index 17f51ecadef..98a82918606 100644 --- a/src/parser_utils/output/__tests__/layout_generator_test_utils.ml +++ b/src/parser_utils/output/__tests__/layout_generator_test_utils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,25 +9,29 @@ open OUnit2 open Ast_builder let space_regex = Str.regexp_string " " + let newline_regex = Str.regexp_string "\n" -let assert_output ~ctxt ?msg ?(pretty=false) expected_str layout = +let assert_output ~ctxt ?msg ?(pretty = false) expected_str layout = let print = - if pretty then Pretty_printer.print ~source_maps:None ~skip_endline:false - else Compact_printer.print ~source_maps:None + if pretty then + Pretty_printer.print ~source_maps:None ~skip_endline:false + else + Compact_printer.print ~source_maps:None in let out = print layout |> Source.contents in - let out = String.sub out 0 (String.length out - 1) in (* remove trailing \n *) + let out = String.sub out 0 (String.length out - 1) in + (* remove trailing \n *) let printer x = x |> Str.global_replace space_regex "\xE2\x90\xA3" (* open box *) - |> Str.global_replace newline_regex "\xC2\xAC\n" (* not sign, what Atom uses *) + |> Str.global_replace newline_regex "\xC2\xAC\n" + (* not sign, what Atom uses *) in assert_equal ~ctxt ?msg ~printer expected_str out let assert_expression - ~ctxt ?msg ?pretty ?(expr_ctxt=Js_layout_generator.normal_context) - expected_str ast = + ~ctxt ?msg ?pretty ?(expr_ctxt = Js_layout_generator.normal_context) expected_str ast = let layout = Js_layout_generator.expression ~ctxt:expr_ctxt ast in assert_output ~ctxt ?msg ?pretty expected_str layout diff --git a/src/parser_utils/output/__tests__/layout_test.ml b/src/parser_utils/output/__tests__/layout_test.ml index 0ad364f80c6..b72c5b6595f 100644 --- a/src/parser_utils/output/__tests__/layout_test.ml +++ b/src/parser_utils/output/__tests__/layout_test.ml @@ -1,46 +1,45 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - open OUnit2 open Layout open Layout_test_utils -let tests = "layout" >::: [ - "fuse_list" >:: - begin fun ctxt -> - let a = Atom "a" in - let b = Atom "b" in - let c = Atom "c" in - let d = Atom "d" in - let sep = Atom "," in - - let actual = fuse_list [a; b] in - let expected = Concat [a; pretty_space; b] in - assert_layout ~ctxt expected actual; - - let actual = fuse_list [a; b; c] in - let expected = Concat [a; pretty_space; b; pretty_space; c] in - assert_layout ~ctxt expected actual; - - let actual = fuse_list ~sep [a; b] in - let expected = Concat [a; sep; pretty_space; b] in - assert_layout ~ctxt expected actual; - - let actual = fuse_list ~sep [a; b; c] in - let expected = Concat [a; sep; pretty_space; b; sep; pretty_space; c] in - assert_layout ~ctxt expected actual; - - let actual = fuse_list ~wrap:(c, d) [a; b] in - let expected = Concat [c; a; pretty_space; b; d] in - assert_layout ~ctxt expected actual; - - let actual = fuse_list ~sep ~wrap:(c, d) [a; b] in - let expected = Concat [c; a; sep; pretty_space; b; d] in - assert_layout ~ctxt expected actual; - end; -] +let tests = + "layout" + >::: [ + ( "fuse_list" + >:: fun ctxt -> + let a = Atom "a" in + let b = Atom "b" in + let c = Atom "c" in + let d = Atom "d" in + let sep = Atom "," in + let actual = fuse_list [a; b] in + let expected = Concat [a; pretty_space; b] in + assert_layout ~ctxt expected actual; + + let actual = fuse_list [a; b; c] in + let expected = Concat [a; pretty_space; b; pretty_space; c] in + assert_layout ~ctxt expected actual; + + let actual = fuse_list ~sep [a; b] in + let expected = Concat [a; sep; pretty_space; b] in + assert_layout ~ctxt expected actual; + + let actual = fuse_list ~sep [a; b; c] in + let expected = Concat [a; sep; pretty_space; b; sep; pretty_space; c] in + assert_layout ~ctxt expected actual; + + let actual = fuse_list ~wrap:(c, d) [a; b] in + let expected = Concat [c; a; pretty_space; b; d] in + assert_layout ~ctxt expected actual; + + let actual = fuse_list ~sep ~wrap:(c, d) [a; b] in + let expected = Concat [c; a; sep; pretty_space; b; d] in + assert_layout ~ctxt expected actual ); + ] diff --git a/src/parser_utils/output/__tests__/layout_test_utils.ml b/src/parser_utils/output/__tests__/layout_test_utils.ml index c7a800359a1..528aedb9c0b 100644 --- a/src/parser_utils/output/__tests__/layout_test_utils.ml +++ b/src/parser_utils/output/__tests__/layout_test_utils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,226 +11,236 @@ open Ast_builder module Layout_builder = struct open Layout - let expression ?(expr_ctxt=Js_layout_generator.normal_context) ast = + let expression ?(expr_ctxt = Js_layout_generator.normal_context) ast = Js_layout_generator.expression ~ctxt:expr_ctxt ast let empty = Empty - let loc ?(loc=Loc.none) node = - SourceLocation (loc, node) + let loc ?(loc = Loc.none) node = SourceLocation (loc, node) - let program_loc loc = - { loc with Loc.start = { Loc.line = 1; column = 0; offset = 0; }} + let program_loc loc = { loc with Loc.start = { Loc.line = 1; column = 0 } } - let program ?loc:(prog_loc=Loc.none) node = - loc ~loc:(program_loc prog_loc) node + let program ?loc:(prog_loc = Loc.none) node = loc ~loc:(program_loc prog_loc) node - let sequence ~break ?(inline=(false, false)) ?(indent=2) items = - Sequence ({break; inline; indent}, items) + let sequence ~break ?(inline = (false, false)) ?(indent = 2) items = + Sequence ({ break; inline; indent }, items) - let group items = - Group items + let group items = Group items - let fused items = - Concat items + let fused items = Concat items - let fused_vertically - ?(indent=0) - ?(inline=(false, false)) - items = - Sequence ({ break=Break_if_pretty; indent; inline }, items) + let fused_vertically ?(indent = 0) ?(inline = (false, false)) items = + Sequence ({ break = Break_if_pretty; indent; inline }, items) - let id ?(loc=Loc.none) str = - Identifier (loc, str) + let id ?(loc = Loc.none) str = Identifier (loc, str) - let atom str = - Atom str + let atom str = Atom str - let pretty_space = - IfPretty (Atom " ", Empty) + let space = Atom " " - let flat_pretty_space = - IfBreak (Empty, IfPretty (Atom " ", Empty)) + let hardline = Newline - let pretty_newline = - IfPretty (Newline, Empty) + let pretty_space = IfPretty (space, Empty) - let line = - IfBreak (Newline, pretty_space) + let ugly_space = IfPretty (Empty, space) - let softline = - IfBreak (Newline, Empty) + let flat_pretty_space = IfBreak (Empty, IfPretty (space, Empty)) - let indent node = Indent node + let pretty_hardline = IfPretty (Newline, Empty) + + let line = IfBreak (Newline, space) + + let pretty_line = IfBreak (Newline, pretty_space) - let wrap_in_parens_raw x = - [atom "("; sequence ~break:Layout.Break_if_needed [x]; atom ")"] + let softline = IfBreak (Newline, Empty) - let wrap_in_parens x = fused (wrap_in_parens_raw x) + let indent node = Indent node + + let wrap_in_parens x = group [atom "("; x; atom ")"] - type printer_pos = Word of string | Phrase of string + type printer_pos = + | Word of string + | Phrase of string let printer = let spf = Printf.sprintf in let is_program_loc loc = - let open Loc in - match loc with - | { source = None; - start = { line = 1; column = 0; offset = 0 }; - _end = { line = 0; column = 0; offset = 0 } - } -> true - | _ -> false + Loc.( + match loc with + | { source = None; start = { line = 1; column = 0 }; _end = { line = 0; column = 0 } } -> + true + | _ -> false) in let string_of_loc loc = - let open Loc in - spf "{Loc.none with start={Loc.line=%d; column=%d; offset=%d}; _end={Loc.line=%d; column=%d; offset=%d}}" - loc.start.line loc.start.column loc.start.offset - loc._end.line loc._end.column loc._end.offset + Loc.( + spf + "{Loc.none with start={Loc.line=%d; column=%d}; _end={Loc.line=%d; column=%d}}" + loc.start.line + loc.start.column + loc._end.line + loc._end.column) in let string_of_when_to_break = function - | Break_if_needed -> "Layout.Break_if_needed" - | Break_if_pretty -> "Layout.Break_if_pretty" + | Break_if_needed -> "Layout.Break_if_needed" + | Break_if_pretty -> "Layout.Break_if_pretty" in let word x = Word x in let phrase fmt = Printf.ksprintf (fun str -> Phrase str) fmt in let rec top ~i = function - | SourceLocation (loc, child) -> - if is_program_loc loc then - phrase "program %s" (helper ~i child) - else - let loc = if loc = Loc.none then "" else spf " ~loc:%s" (string_of_loc loc) in - phrase "loc%s %s" loc (helper ~i child) - - | Group items -> - phrase "group %s" (list ~i items) - - | Concat items -> - phrase "fused %s" (list ~i items) - - | Sequence ({ break = Break_if_pretty; inline = (false, false); indent = 0 }, items) -> - phrase "fused_vertically %s" (list ~i items) - - | Sequence ( - { break = Break_if_needed; inline = (true, true); indent = 0 }, - [Concat [ - Atom "("; - Sequence ({ break = Break_if_needed; inline = (false, false); indent = 2}, [x]); - Atom ")"; - ]] - ) -> - phrase "wrap_in_parens %s" (helper ~i x) - - | Sequence ({ break; inline; indent; }, items) -> - let break = spf " ~break:%s" (string_of_when_to_break break) in - let inline = match inline with - | false, false -> "" - | before, after -> spf " ~inline:(%b, %b)" before after - in - let indent = if indent = 2 then "" else spf " ~indent:%d" indent in - phrase "sequence%s%s%s %s" break inline indent (list ~i items) - - | Atom str -> - phrase "atom %S" str - - | Identifier (loc, str) -> - let loc = if loc = Loc.none then "" else spf " ~loc:%s" (string_of_loc loc) in - phrase "id%s %S" loc str - - | IfPretty (Atom " ", Empty) -> word "pretty_space" - | IfPretty (Newline, Empty) -> word "pretty_newline" - - | IfPretty (left, right) -> - phrase "Layout.IfPretty (%s, %s)" (helper ~i left) (helper ~i right) - - | IfBreak (Empty, IfPretty (Atom " ", Empty)) -> word "flat_pretty_space" - - | IfBreak (Newline, IfPretty (Atom " ", Empty)) -> word "line" - | IfBreak (Newline, Empty) -> word "softline" - - | IfBreak (left, right) -> - phrase "Layout.IfBreak (%s, %s)" (helper ~i left) (helper ~i right) - - | Indent node -> - phrase "indent (%s)" (helper ~i node) - - | Newline -> - word "Newline" - - | Empty -> - word "empty" + | SourceLocation (loc, child) -> + if is_program_loc loc then + phrase "program %s" (helper ~i child) + else + let loc = + if loc = Loc.none then + "" + else + spf " ~loc:%s" (string_of_loc loc) + in + phrase "loc%s %s" loc (helper ~i child) + | Group [Atom "("; x; Atom ")"] -> phrase "wrap_in_parens %s" (helper ~i x) + | Group items -> phrase "group %s" (list ~i items) + | Concat items -> phrase "fused %s" (list ~i items) + | Sequence ({ break = Break_if_pretty; inline = (false, false); indent = 0 }, items) -> + phrase "fused_vertically %s" (list ~i items) + | Sequence ({ break; inline; indent }, items) -> + let break = spf " ~break:%s" (string_of_when_to_break break) in + let inline = + match inline with + | (false, false) -> "" + | (before, after) -> spf " ~inline:(%b, %b)" before after + in + let indent = + if indent = 2 then + "" + else + spf " ~indent:%d" indent + in + phrase "sequence%s%s%s %s" break inline indent (list ~i items) + | Atom " " -> word "space" + | Atom str -> phrase "atom %S" str + | Identifier (loc, str) -> + let loc = + if loc = Loc.none then + "" + else + spf " ~loc:%s" (string_of_loc loc) + in + phrase "id%s %S" loc str + | IfPretty (Atom " ", Empty) -> word "pretty_space" + | IfPretty (Newline, Empty) -> word "pretty_hardline" + | IfPretty (Empty, Atom " ") -> word "ugly_space" + | IfPretty (left, right) -> + phrase "Layout.IfPretty (%s, %s)" (helper ~i left) (helper ~i right) + | IfBreak (Empty, IfPretty (Atom " ", Empty)) -> word "flat_pretty_space" + | IfBreak (Newline, Atom " ") -> word "line" + | IfBreak (Newline, IfPretty (Atom " ", Empty)) -> word "pretty_line" + | IfBreak (Newline, Empty) -> word "softline" + | IfBreak (left, right) -> + phrase "Layout.IfBreak (%s, %s)" (helper ~i left) (helper ~i right) + | Indent node -> phrase "indent (%s)" (helper ~i node) + | Newline -> word "hardline" + | Empty -> word "empty" and list ~i nodes = let indent = String.make (i * 2) ' ' in let f node = match top ~i:(i + 1) node with | Word str - | Phrase str -> spf " %s%s;" indent str - in - let str = nodes - |> List.map f - |> String.concat "\n" + | Phrase str -> + spf " %s%s;" indent str in + let str = nodes |> Core_list.map ~f |> String.concat "\n" in spf "[\n%s\n%s]" str indent and helper ~i node = match top ~i node with | Word str -> str | Phrase str -> spf "(%s)" str in - fun node -> - spf "L.%s" (helper ~i:0 node) + (fun node -> spf "L.%s" (helper ~i:0 node)) end module Layout_matcher = struct open Layout let return x = Ok x - let (>>=) x f = match x with + + let ( >>= ) x f = + match x with | Error _ as x -> x | Ok x -> f x let empty = function | Empty -> Ok () - | x -> Error x + | x -> Error ("expected Empty", x) let loc = function | SourceLocation (_, x) -> Ok x - | x -> Error x + | x -> Error ("expected SourceLocation", x) + + let indent = function + | Indent x -> Ok x + | x -> Error ("expected Indent", x) + + let group = function + | Group x -> Ok x + | x -> Error ("expected Group", x) + + let nth_group n = function + | Group xs as layout -> + begin + try Ok (List.nth xs n) + with Failure _ -> Error ("couldn't get " ^ string_of_int n ^ "th Group element", layout) + end + | x -> Error ("expected Group", x) let fused = function | Concat x -> Ok x - | x -> Error x + | x -> Error ("expected Concat", x) let nth_fused n = function - | Concat xs as layout -> (try Ok (List.nth xs n) with Failure _ -> Error layout) - | layout -> Error layout + | Concat xs as layout -> + begin + try Ok (List.nth xs n) + with Failure _ -> Error ("couldn't get " ^ string_of_int n ^ "th Concat element", layout) + end + | layout -> Error ("expected Concat", layout) let atom = function | Atom x -> Ok x - | x -> Error x + | x -> Error ("expected Atom", x) (* TODO: support matching break, inline, indent *) let sequence = function | Sequence (_, x) -> Ok x - | x -> Error x + | x -> Error ("expected Sequence", x) let nth_sequence n = function - | Sequence (_, xs) as layout -> (try Ok (List.nth xs n) with Failure _ -> Error layout) - | layout -> Error layout + | Sequence (_, xs) as layout -> + begin + try Ok (List.nth xs n) + with Failure _ -> Error ("couldn't get " ^ string_of_int n ^ "th Layout element", layout) + end + | layout -> Error ("expected Sequence", layout) let pretty_space = function | IfPretty (Atom " ", Empty) -> Ok () - | x -> Error x - + | x -> Error ("expected pretty space", x) (* higher level helpers *) let body_of_function_declaration ast = return (Js_layout_generator.statement ast) >>= loc - >>= (nth_fused 5) (* skip `function`, space, name, space, params *) + >>= nth_fused 5 (* skip `function`, space, name, space, params *) >>= loc - >>= (nth_sequence 0) - >>= (nth_fused 1) (* skip { to get body *) - + >>= nth_group 1 (* skip opening { *) + >>= indent (* body is indented *) + >>= fused + >>= function + | [] -> Ok (Concat []) + | _newline :: rest -> Ok (Concat rest) + + (* skip newline after { *) end let assert_layout ~ctxt ?msg expected actual = @@ -239,14 +249,11 @@ let assert_layout ~ctxt ?msg expected actual = let assert_layout_result ~ctxt ?msg expected actual = match actual with | Ok layout -> assert_layout ~ctxt ?msg expected layout - | Error layout -> - assert_equal ~ctxt - ~msg:(Printf.sprintf "Unable to decode %s" (Layout_builder.printer layout)) - true false + | Error (msg, layout) -> + assert_failure (Printf.sprintf "Unable to decode %s:\n%s" (Layout_builder.printer layout) msg) let assert_layout_of_expression - ~ctxt ?msg ?(expr_ctxt=Js_layout_generator.normal_context) - expected ast = + ~ctxt ?msg ?(expr_ctxt = Js_layout_generator.normal_context) expected ast = let actual = Js_layout_generator.expression ~ctxt:expr_ctxt ast in assert_layout ~ctxt ?msg expected actual diff --git a/src/parser_utils/output/__tests__/parser_utils_output_tests.ml b/src/parser_utils/output/__tests__/parser_utils_output_tests.ml new file mode 100644 index 00000000000..24b5b9ca0ef --- /dev/null +++ b/src/parser_utils/output/__tests__/parser_utils_output_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "output" >::: [Js_layout_generator_test.tests; Layout_test.tests; Source_test.tests] + +let () = run_test_tt_main tests diff --git a/src/parser_utils/output/__tests__/source_test.ml b/src/parser_utils/output/__tests__/source_test.ml new file mode 100644 index 00000000000..93531710c2b --- /dev/null +++ b/src/parser_utils/output/__tests__/source_test.ml @@ -0,0 +1,81 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let mk_source ?(source_maps = Some Source_map_config.default) () = Source.create ~source_maps () + +let mk_loc (start_line, start_col) (end_line, end_col) = + { + Loc.none with + Loc.start = { Loc.line = start_line; column = start_col }; + _end = { Loc.line = end_line; column = end_col }; + } + +let assert_contents_equal = + let printer x = x in + fun ~ctxt (expected : string) (source : Source.t) -> + assert_equal ~ctxt ~printer expected (Source.contents source) + +let assert_sourcemaps_equal = + let printer = function + | Some map -> map |> Json_sourcemap.json_of_sourcemap |> Hh_json.json_to_string ~pretty:true + | None -> "None" + in + fun ~ctxt (expected : string option) (source : Source.t) -> + let expected = + match expected with + | Some expected -> Some (Json_sourcemap.sourcemap_of_string expected) + | None -> None + in + assert_equal ~ctxt ~printer expected (Source.sourcemap source) + +let tests = + "source" + >::: [ + ( "simple_string" + >:: fun ctxt -> + let s = + mk_source () + |> Source.push_loc (mk_loc (1, 0) (1, 3)) + |> Source.add_string "foo;" + |> Source.pop_loc + in + assert_contents_equal ~ctxt "foo;" s; + assert_sourcemaps_equal + ~ctxt + (Some + {|{ + "version": 3, + "sources": [""], + "names": [], + "mappings": "AAAA" + }|}) + s ); + ( "two_strings" + >:: fun ctxt -> + let s = + mk_source () + |> Source.push_loc (mk_loc (1, 0) (1, 3)) + |> Source.add_string "foo;" + |> Source.pop_loc + |> Source.push_loc (mk_loc (1, 4) (1, 7)) + |> Source.add_string "bar;" + |> Source.pop_loc + in + assert_contents_equal ~ctxt "foo;bar;" s; + assert_sourcemaps_equal + ~ctxt + (Some + {|{ + "version": 3, + "sources": [""], + "names": [], + "mappings": "AAAA,IAAI" + }|}) + s ); + ] diff --git a/src/parser_utils/output/__tests__/test.ml b/src/parser_utils/output/__tests__/test.ml deleted file mode 100644 index 0b5eb2aed4c..00000000000 --- a/src/parser_utils/output/__tests__/test.ml +++ /dev/null @@ -1,15 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open OUnit2 - -let tests = "output" >::: [ - Js_layout_generator_test.tests; - Layout_test.tests; -] - -let () = run_test_tt_main tests diff --git a/src/parser_utils/output/dune b/src/parser_utils/output/dune new file mode 100644 index 00000000000..89f814c71c2 --- /dev/null +++ b/src/parser_utils/output/dune @@ -0,0 +1,11 @@ +(library + (name flow_parser_utils_output) + (wrapped false) + (libraries + hh_json ; hack + imported_core ; hack/third-party/core + flow_common_utils + flow_common_utils_loc_utils + flow_parser + flow_parser_utils + flow_third_party_sourcemaps)) diff --git a/src/parser_utils/output/flow_prettier_comments.ml b/src/parser_utils/output/flow_prettier_comments.ml index e229cd1e28d..e12ba2d703e 100644 --- a/src/parser_utils/output/flow_prettier_comments.ml +++ b/src/parser_utils/output/flow_prettier_comments.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -14,63 +14,59 @@ module Ast = Flow_ast * It assumes that the comments are in the same file as the statements *) -module LocMap = Utils_js.LocMap +module LocMap = Loc_collections.LocMap module CommentAttachCandidate = struct - type 'M t = - { preceding: ('M, 'M) Ast.Statement.t option - ; enclosing: ('M, 'M) Ast.Statement.t option - ; following: ('M, 'M) Ast.Statement.t option } + type 'M t = { + preceding: ('M, 'M) Ast.Statement.t option; + enclosing: ('M, 'M) Ast.Statement.t option; + following: ('M, 'M) Ast.Statement.t option; + } end let node_list_of_option ~f = Option.value_map ~default:[] ~f (* comments.js#findExpressionIndexForComment *) -let find_expression_index_for_node quasis ({Loc.start= {Loc.offset; _}; _}, _) = - let start_offset = offset in +let find_expression_index_for_node quasis ({ Loc.start = orig_start; _ }, _) = try - let found, _ = + let (found, _) = List.tl quasis |> List.mapi (fun i q -> (i, q)) - |> List.find (fun (_, ({Loc.start= {Loc.offset; _}; _}, _)) -> - start_offset < offset ) + |> List.find (fun (_, ({ Loc.start; _ }, _)) -> Loc.pos_cmp orig_start start < 0) in found - 1 with Not_found -> 0 (* / comments.js#findExpressionIndexForComment *) (* comments.js#attach *) -let rec attach_comments ((_, ss, cs): (Loc.t, Loc.t) Ast.program) : +let rec attach_comments ((_, ss, cs) : (Loc.t, Loc.t) Ast.program) : Js_layout_generator.comment_map = - let comment_list, comment_ties = - List.fold_left (attach_comment ss) ([], []) cs - in + let (comment_list, comment_ties) = List.fold_left (attach_comment ss) ([], []) cs in (* The original algorithm in prettier may return some unresolved ties *) let comment_list = break_tie comment_list comment_ties in List.fold_left (fun map attach -> - let _, statement, _ = attach in - let loc, _ = statement in + let (_, statement, _) = attach in + let (loc, _) = statement in let comments_at_loc = match LocMap.find_opt loc map with | Some comments -> attach :: comments | None -> [attach] in - LocMap.add loc comments_at_loc map ) - LocMap.empty comment_list + LocMap.add loc comments_at_loc map) + LocMap.empty + comment_list -and attach_comment (statements: (Loc.t, Loc.t) Ast.Statement.t list) (comments, ties) - comment = +and attach_comment (statements : (Loc.t, Loc.t) Ast.Statement.t list) (comments, ties) comment = let attach_candidate = find_comment_attach statements comment in - let attach_candidate_fixed = - fix_template_literals comment attach_candidate - in + let attach_candidate_fixed = fix_template_literals comment attach_candidate in position_comment comments ties comment attach_candidate_fixed (* comments.js#decorateComment *) and find_comment_attach statements (comment_pos, _) = let rec find_comment statements l r candidate = - if l >= r then candidate + if l >= r then + candidate else (* In prettier it uses >> 1, which has different behavior for -1 * and shouldn't be a problem here *) @@ -78,289 +74,283 @@ and find_comment_attach statements (comment_pos, _) = let ((pivot_pos, _) as pivot) = statements.(m) in match Loc.span_compare pivot_pos comment_pos with | n when n < 0 -> - find_comment statements (m + 1) r - {candidate with CommentAttachCandidate.preceding= Some pivot} + find_comment + statements + (m + 1) + r + { candidate with CommentAttachCandidate.preceding = Some pivot } | n when n > 0 -> - find_comment statements l m - {candidate with CommentAttachCandidate.following= Some pivot} + find_comment + statements + l + m + { candidate with CommentAttachCandidate.following = Some pivot } | _ -> - let children_nodes = Array.of_list (get_children_nodes pivot) in - find_comment children_nodes 0 - (Array.length children_nodes) - { CommentAttachCandidate.preceding= None - ; enclosing= Some pivot - ; following= None } + let children_nodes = Array.of_list (get_children_nodes pivot) in + find_comment + children_nodes + 0 + (Array.length children_nodes) + { CommentAttachCandidate.preceding = None; enclosing = Some pivot; following = None } in let statements = Array.of_list statements in - find_comment statements 0 (Array.length statements) - {CommentAttachCandidate.preceding= None; enclosing= None; following= None} + find_comment + statements + 0 + (Array.length statements) + { CommentAttachCandidate.preceding = None; enclosing = None; following = None } (* comments.js#getSortedChildNodes *) -and get_children_nodes (statement: (Loc.t, Loc.t) Ast.Statement.t) = - let _loc, stmt = statement in - let open Ast.Statement in - match stmt with - | Block {Block.body} -> body - | If {If.test; consequent; alternate} -> - [statement_of_expression test; consequent] - @ statement_list_of_option alternate - | Labeled {Labeled.body; _} -> [body] - | With {With._object; body} -> [statement_of_expression _object; body] - | TypeAlias _ -> [] - | OpaqueType _ -> [] - | Switch {Switch.discriminant; cases} -> +and get_children_nodes (statement : (Loc.t, Loc.t) Ast.Statement.t) = + let (_loc, stmt) = statement in + Ast.Statement.( + match stmt with + | Block { Block.body } -> body + | If { If.test; consequent; alternate; comments = _ } -> + [statement_of_expression test; consequent] @ statement_list_of_option alternate + | Labeled { Labeled.body; _ } -> [body] + | With { With._object; body } -> [statement_of_expression _object; body] + | TypeAlias _ -> [] + | OpaqueType _ -> [] + | Switch { Switch.discriminant; cases } -> get_children_nodes_expr discriminant @ List.fold_left - (fun nodes (_, {Switch.Case.test; consequent}) -> - let test_nodes = - node_list_of_option ~f:get_children_nodes_expr test - in + (fun nodes (_, { Switch.Case.test; consequent }) -> + let test_nodes = node_list_of_option ~f:get_children_nodes_expr test in let consequent_nodes = - consequent |> List.map get_children_nodes |> List.flatten + consequent |> Core_list.map ~f:get_children_nodes |> List.flatten in - nodes @ test_nodes @ consequent_nodes ) - [] cases - | Return {Return.argument} -> + nodes @ test_nodes @ consequent_nodes) + [] + cases + | Return { Return.argument; comments = _ } -> node_list_of_option ~f:statement_list_of_expression argument - | Throw {Throw.argument} -> statement_list_of_expression argument - | Try {Try.block= _, {Block.body}; handler; finalizer} -> + | Throw { Throw.argument } -> statement_list_of_expression argument + | Try { Try.block = (_, { Block.body }); handler; finalizer; _ } -> let handler_nodes = - Option.value_map ~default:[] - ~f:(fun (_, {Try.CatchClause.param; body= _, {Block.body}}) -> - node_list_of_option ~f:get_children_nodes_pattern param @ body ) + Option.value_map + ~default:[] + ~f:(fun (_, { Try.CatchClause.param; body = (_, { Block.body }) }) -> + node_list_of_option ~f:get_children_nodes_pattern param @ body) handler in let finalizer_nodes = - Option.value_map ~default:[] - ~f:(fun (_, {Block.body}) -> body) - finalizer + Option.value_map ~default:[] ~f:(fun (_, { Block.body }) -> body) finalizer in body @ handler_nodes @ finalizer_nodes - | VariableDeclaration {VariableDeclaration.declarations; _} -> + | VariableDeclaration { VariableDeclaration.declarations; _ } -> List.fold_left - (fun nodes (_, {VariableDeclaration.Declarator.init; _}) -> - nodes @ node_list_of_option ~f:get_children_nodes_expr init ) - [] declarations - | While {While.test; body} -> [statement_of_expression test; body] - | DoWhile {DoWhile.test; body} -> [statement_of_expression test; body] - | For {For.init; test; update; body} -> + (fun nodes (_, { VariableDeclaration.Declarator.init; _ }) -> + nodes @ node_list_of_option ~f:get_children_nodes_expr init) + [] + declarations + | While { While.test; body } -> [statement_of_expression test; body] + | DoWhile { DoWhile.test; body; comments = _ } -> [statement_of_expression test; body] + | For { For.init; test; update; body } -> let init_nodes = node_list_of_option ~f:(fun init -> - let open For in - match init with - | InitDeclaration (loc, decl) -> - get_children_nodes (loc, VariableDeclaration decl) - | InitExpression expr -> get_children_nodes_expr expr ) + For.( + match init with + | InitDeclaration (loc, decl) -> get_children_nodes (loc, VariableDeclaration decl) + | InitExpression expr -> get_children_nodes_expr expr)) init in let test_nodes = node_list_of_option ~f:get_children_nodes_expr test in - let update_nodes = - node_list_of_option ~f:get_children_nodes_expr update - in + let update_nodes = node_list_of_option ~f:get_children_nodes_expr update in let body_nodes = get_children_nodes body in init_nodes @ test_nodes @ update_nodes @ body_nodes - | ForIn {ForIn.left; right; body; _} -> + | ForIn { ForIn.left; right; body; _ } -> let left_nodes = - let open ForIn in - match left with - | LeftDeclaration (loc, decl) -> - get_children_nodes (loc, VariableDeclaration decl) - | LeftPattern pattern -> get_children_nodes_pattern pattern + ForIn.( + match left with + | LeftDeclaration (loc, decl) -> get_children_nodes (loc, VariableDeclaration decl) + | LeftPattern pattern -> get_children_nodes_pattern pattern) in let right_nodes = get_children_nodes_expr right in let body_nodes = get_children_nodes body in left_nodes @ right_nodes @ body_nodes - | ForOf {ForOf.left; right; body; _} -> + | ForOf { ForOf.left; right; body; _ } -> let left_nodes = - let open ForOf in - match left with - | LeftDeclaration (loc, decl) -> - get_children_nodes (loc, VariableDeclaration decl) - | LeftPattern pattern -> get_children_nodes_pattern pattern + ForOf.( + match left with + | LeftDeclaration (loc, decl) -> get_children_nodes (loc, VariableDeclaration decl) + | LeftPattern pattern -> get_children_nodes_pattern pattern) in let right_nodes = get_children_nodes_expr right in let body_nodes = get_children_nodes body in left_nodes @ right_nodes @ body_nodes - | DeclareClass _ -> [] - | DeclareVariable _ -> [] - | DeclareFunction _ -> [] - | DeclareModule {DeclareModule.body= _, {Block.body}; _} -> body - | ExportNamedDeclaration {ExportNamedDeclaration.declaration; _} -> + | DeclareClass _ -> [] + | DeclareVariable _ -> [] + | DeclareFunction _ -> [] + | DeclareModule { DeclareModule.body = (_, { Block.body }); _ } -> body + | ExportNamedDeclaration { ExportNamedDeclaration.declaration; _ } -> statement_list_of_option declaration - | ExportDefaultDeclaration {ExportDefaultDeclaration.declaration; _} -> ( - match declaration with - | ExportDefaultDeclaration.Declaration d -> [d] - | ExportDefaultDeclaration.Expression e -> statement_list_of_expression e ) - | DeclareExportDeclaration _ -> [] - | ImportDeclaration _ -> [] - | Expression {Expression.expression; _} -> get_children_nodes_expr expression - | Debugger -> [] - | Empty -> [] - | Break _ -> [] - | ClassDeclaration clazz -> get_children_nodes_class clazz - | Continue _ -> [] - | DeclareInterface _ -> [] - | DeclareModuleExports _ -> [] - | DeclareTypeAlias _ -> [] - | DeclareOpaqueType _ -> [] - | FunctionDeclaration funct -> get_children_nodes_function funct - | InterfaceDeclaration _ -> [] + | ExportDefaultDeclaration { ExportDefaultDeclaration.declaration; _ } -> + (match declaration with + | ExportDefaultDeclaration.Declaration d -> [d] + | ExportDefaultDeclaration.Expression e -> statement_list_of_expression e) + | DeclareExportDeclaration _ -> [] + | ImportDeclaration _ -> [] + | Expression { Expression.expression; _ } -> get_children_nodes_expr expression + | Debugger -> [] + | Empty -> [] + | EnumDeclaration _ -> [] + | Break _ -> [] + | ClassDeclaration clazz -> get_children_nodes_class clazz + | Continue _ -> [] + | DeclareInterface _ -> [] + | DeclareModuleExports _ -> [] + | DeclareTypeAlias _ -> [] + | DeclareOpaqueType _ -> [] + | FunctionDeclaration funct -> get_children_nodes_function funct + | InterfaceDeclaration _ -> []) and get_children_nodes_expr expression = - let loc, expr = expression in - let open Ast.Expression in - match expr with - | Array {Array.elements} -> + let (loc, expr) = expression in + Ast.Expression.( + match expr with + | Array { Array.elements; comments = _ } -> List.fold_left (fun nodes element -> - nodes - @ Option.value_map ~default:[] - ~f:get_children_nodes_expression_or_spread element ) - [] elements - | ArrowFunction func -> get_children_nodes_function func - | Assignment {Assignment.left; right; _} -> + nodes @ Option.value_map ~default:[] ~f:get_children_nodes_expression_or_spread element) + [] + elements + | ArrowFunction func -> get_children_nodes_function func + | Assignment { Assignment.left; right; _ } -> get_children_nodes_pattern left @ get_children_nodes_expr right - | Binary {Binary.left; right; _} -> + | Binary { Binary.left; right; _ } -> get_children_nodes_expr left @ get_children_nodes_expr right - | Call {Call.callee; arguments; _} -> + | Call { Call.callee; arguments; _ } -> get_children_nodes_expr callee @ List.fold_left (fun nodes eos -> nodes @ get_children_nodes_expression_or_spread eos) - [] arguments - | Class clazz -> get_children_nodes_class clazz - | Comprehension {Comprehension.blocks; filter} -> + [] + arguments + | Class clazz -> get_children_nodes_class clazz + | Comprehension { Comprehension.blocks; filter } -> let block_nodes = get_children_nodes_comprehension_block_list blocks in - let filter_nodes = - node_list_of_option ~f:get_children_nodes_expr filter - in + let filter_nodes = node_list_of_option ~f:get_children_nodes_expr filter in block_nodes @ filter_nodes - | Conditional {Conditional.consequent; alternate; _} -> + | Conditional { Conditional.consequent; alternate; _ } -> get_children_nodes_expr consequent @ get_children_nodes_expr alternate - | Function func -> get_children_nodes_function func - | Generator {Generator.blocks; filter} -> - get_children_nodes_expr - (loc, Comprehension {Comprehension.blocks; filter}) - | Identifier _ -> [] - | Import i -> get_children_nodes_expr i - | JSXElement {Ast.JSX.openingElement; children; _} -> - get_children_nodes_jsx_opening openingElement - @ get_children_nodes_jsx_child_list children - | JSXFragment {Ast.JSX.frag_children; _} -> - get_children_nodes_jsx_child_list frag_children - | Literal _ -> [] - | Logical {Logical.left; right; _} -> + | Function func -> get_children_nodes_function func + | Generator { Generator.blocks; filter } -> + get_children_nodes_expr (loc, Comprehension { Comprehension.blocks; filter }) + | Identifier _ -> [] + | Import i -> get_children_nodes_expr i + | JSXElement { Ast.JSX.openingElement; children; _ } -> + get_children_nodes_jsx_opening openingElement @ get_children_nodes_jsx_child_list children + | JSXFragment { Ast.JSX.frag_children; _ } -> get_children_nodes_jsx_child_list frag_children + | Literal _ -> [] + | Logical { Logical.left; right; _ } -> get_children_nodes_expr left @ get_children_nodes_expr right - | Member member -> get_children_nodes_member member - | MetaProperty _ -> [] - | New {New.callee; arguments; _} -> + | Member member -> get_children_nodes_member member + | MetaProperty _ -> [] + | New { New.callee; arguments; _ } -> get_children_nodes_expr callee @ List.fold_left (fun nodes eos -> nodes @ get_children_nodes_expression_or_spread eos) - [] arguments - | Object {Object.properties} -> + [] + arguments + | Object { Object.properties; comments = _ } -> List.fold_left (fun nodes property -> nodes @ match property with - | Object.SpreadProperty (_, {Object.SpreadProperty.argument}) -> - get_children_nodes_expr argument + | Object.SpreadProperty (_, { Object.SpreadProperty.argument }) -> + get_children_nodes_expr argument | Object.Property (_, property) -> - match property with - | Object.Property.Init {value; _} -> get_children_nodes_expr value - | Object.Property.Method {value= _, func; _} -> - get_children_nodes_function func - | Object.Property.Get {value= _, func; _} -> - get_children_nodes_function func - | Object.Property.Set {value= _, func; _} -> - get_children_nodes_function func ) - [] properties - | OptionalCall {OptionalCall.call= {Call.callee; arguments; _}; _} -> + (match property with + | Object.Property.Init { value; _ } -> get_children_nodes_expr value + | Object.Property.Method { value = (_, func); _ } -> get_children_nodes_function func + | Object.Property.Get { value = (_, func); _ } -> get_children_nodes_function func + | Object.Property.Set { value = (_, func); _ } -> get_children_nodes_function func)) + [] + properties + | OptionalCall { OptionalCall.call = { Call.callee; arguments; _ }; _ } -> get_children_nodes_expr callee @ List.fold_left (fun nodes eos -> nodes @ get_children_nodes_expression_or_spread eos) - [] arguments - | OptionalMember {OptionalMember.member; _} -> - get_children_nodes_member member - | Sequence {Sequence.expressions} -> - List.fold_left - (fun nodes eos -> nodes @ get_children_nodes_expr eos) - [] expressions - | Super -> [] - | TaggedTemplate {TaggedTemplate.tag; quasi= loc, quasi} -> - get_children_nodes_expr tag - @ get_children_nodes_expr (loc, TemplateLiteral quasi) - | TemplateLiteral {TemplateLiteral.expressions; _} -> - expressions |> List.map get_children_nodes_expr |> List.flatten - | This -> [] - | TypeCast {TypeCast.expression; _} -> get_children_nodes_expr expression - | Unary {Unary.argument; _} -> get_children_nodes_expr argument - | Update {Update.argument; _} -> get_children_nodes_expr argument - | Yield {Yield.argument; _} -> - node_list_of_option ~f:get_children_nodes_expr argument + [] + arguments + | OptionalMember { OptionalMember.member; _ } -> get_children_nodes_member member + | Sequence { Sequence.expressions } -> + List.fold_left (fun nodes eos -> nodes @ get_children_nodes_expr eos) [] expressions + | Super -> [] + | TaggedTemplate { TaggedTemplate.tag; quasi = (loc, quasi) } -> + get_children_nodes_expr tag @ get_children_nodes_expr (loc, TemplateLiteral quasi) + | TemplateLiteral { TemplateLiteral.expressions; _ } -> + expressions |> Core_list.map ~f:get_children_nodes_expr |> List.flatten + | This -> [] + | TypeCast { TypeCast.expression; _ } -> get_children_nodes_expr expression + | Unary { Unary.argument; _ } -> get_children_nodes_expr argument + | Update { Update.argument; _ } -> get_children_nodes_expr argument + | Yield { Yield.argument; _ } -> node_list_of_option ~f:get_children_nodes_expr argument) -and get_children_nodes_function {Ast.Function.body; _} = +and get_children_nodes_function { Ast.Function.body; _ } = match body with - | Ast.Function.BodyBlock (_, {Ast.Statement.Block.body}) -> body + | Ast.Function.BodyBlock (_, { Ast.Statement.Block.body }) -> body | Ast.Function.BodyExpression expr -> get_children_nodes_expr expr and get_children_nodes_expression_or_spread eos = match eos with | Ast.Expression.Expression expr -> get_children_nodes_expr expr - | Ast.Expression.Spread (_, {Ast.Expression.SpreadElement.argument}) -> - get_children_nodes_expr argument + | Ast.Expression.Spread (_, { Ast.Expression.SpreadElement.argument }) -> + get_children_nodes_expr argument and get_children_nodes_pattern (_loc, pattern) = - let open Ast.Pattern in - match pattern with - | Object {Object.properties; _} -> + Ast.Pattern.( + match pattern with + | Object { Object.properties; _ } -> properties - |> List.map (fun property -> + |> Core_list.map ~f:(fun property -> match property with - | Object.Property (_, {Object.Property.key; pattern; _}) -> - let key_nodes = - match key with - | Object.Property.Literal _ -> [] - | Object.Property.Identifier _ -> [] - | Object.Property.Computed expr -> - get_children_nodes_expr expr - in - let pattern_nodes = get_children_nodes_pattern pattern in - key_nodes @ pattern_nodes - | Object.RestProperty (_, {Object.RestProperty.argument}) -> - get_children_nodes_pattern argument ) + | Object.Property (_, { Object.Property.key; pattern; default; shorthand = _ }) -> + let key_nodes = + match key with + | Object.Property.Literal _ -> [] + | Object.Property.Identifier _ -> [] + | Object.Property.Computed expr -> get_children_nodes_expr expr + in + let pattern_nodes = get_children_nodes_pattern pattern in + let default_nodes = node_list_of_option ~f:get_children_nodes_expr default in + key_nodes @ pattern_nodes @ default_nodes + | Object.RestProperty (_, { Object.RestProperty.argument }) -> + get_children_nodes_pattern argument) |> List.flatten - | Array {Array.elements; _} -> + | Array { Array.elements; _ } -> elements - |> List.map (fun element_opt -> + |> Core_list.map ~f:(fun element_opt -> match element_opt with - | Some (Array.Element pattern) -> - get_children_nodes_pattern pattern - | Some (Array.RestElement (_, {Array.RestElement.argument})) -> - get_children_nodes_pattern argument - | None -> [] ) + | Some (Array.Element (_, { Array.Element.argument; default })) -> + let pattern_nodes = get_children_nodes_pattern argument in + let default_nodes = node_list_of_option ~f:get_children_nodes_expr default in + pattern_nodes @ default_nodes + | Some (Array.RestElement (_, { Array.RestElement.argument })) -> + get_children_nodes_pattern argument + | None -> []) |> List.flatten - | Assignment {Assignment.left; right} -> - get_children_nodes_pattern left @ get_children_nodes_expr right - | Identifier _ -> [] - | Expression expr -> get_children_nodes_expr expr + | Identifier _ -> [] + | Expression expr -> get_children_nodes_expr expr) -and get_children_nodes_class {Ast.Class.body= _, {Ast.Class.Body.body}; _} = +and get_children_nodes_class { Ast.Class.body = (_, { Ast.Class.Body.body }); _ } = List.fold_left (fun nodes member -> nodes @ match member with - | Ast.Class.Body.Method (_, {Ast.Class.Method.value= _, funct; _}) -> - get_children_nodes_function funct - | Ast.Class.Body.Property (_, {Ast.Class.Property.value; _}) -> - node_list_of_option ~f:statement_list_of_expression value - | Ast.Class.Body.PrivateField (_, {Ast.Class.PrivateField.value; _}) -> - node_list_of_option ~f:statement_list_of_expression value ) - [] body + | Ast.Class.Body.Method (_, { Ast.Class.Method.value = (_, funct); _ }) -> + get_children_nodes_function funct + | Ast.Class.Body.Property (_, { Ast.Class.Property.value; _ }) -> + node_list_of_option ~f:statement_list_of_expression value + | Ast.Class.Body.PrivateField (_, { Ast.Class.PrivateField.value; _ }) -> + node_list_of_option ~f:statement_list_of_expression value) + [] + body -and get_children_nodes_member {Ast.Expression.Member._object; property; _} = +and get_children_nodes_member { Ast.Expression.Member._object; property; _ } = Ast.Expression.Member.( match property with | PropertyIdentifier _ -> [] @@ -369,128 +359,126 @@ and get_children_nodes_member {Ast.Expression.Member._object; property; _} = @ get_children_nodes_expr _object and get_children_nodes_comprehension_block_list - (blocks: (Loc.t, Loc.t) Ast.Expression.Comprehension.Block.t list) = - let open Ast.Expression.Comprehension in - blocks - |> List.map (fun (_, {Block.left; right; _}) -> - get_children_nodes_pattern left @ get_children_nodes_expr right ) - |> List.flatten + (blocks : (Loc.t, Loc.t) Ast.Expression.Comprehension.Block.t list) = + Ast.Expression.Comprehension.( + blocks + |> Core_list.map ~f:(fun (_, { Block.left; right; _ }) -> + get_children_nodes_pattern left @ get_children_nodes_expr right) + |> List.flatten) -and get_children_nodes_jsx_opening (_loc, {Ast.JSX.Opening.attributes; _}) = - let open Ast.JSX in - attributes - |> List.map (fun attr -> - match attr with - | Opening.Attribute (_, {Attribute.value; _}) -> +and get_children_nodes_jsx_opening (_loc, { Ast.JSX.Opening.attributes; _ }) = + Ast.JSX.( + attributes + |> Core_list.map ~f:(fun attr -> + match attr with + | Opening.Attribute (_, { Attribute.value; _ }) -> node_list_of_option ~f:(fun value -> match value with | Attribute.ExpressionContainer - ( _ - , { ExpressionContainer.expression= - ExpressionContainer.Expression expression } ) -> - get_children_nodes_expr expression - | _ -> [] ) + ( _, + { + ExpressionContainer.expression = ExpressionContainer.Expression expression; + } ) -> + get_children_nodes_expr expression + | _ -> []) value - | Opening.SpreadAttribute _ -> [] ) - |> List.flatten + | Opening.SpreadAttribute _ -> []) + |> List.flatten) -and get_children_nodes_jsx_child_list children = - let open Ast.JSX in - children - |> List.map (fun (loc, child) -> - match child with - | Element e -> - get_children_nodes_expr (loc, Ast.Expression.JSXElement e) - | Fragment f -> - get_children_nodes_expr (loc, Ast.Expression.JSXFragment f) - | ExpressionContainer {ExpressionContainer.expression} -> ( - match expression with - | ExpressionContainer.Expression expression -> - get_children_nodes_expr expression - | _ -> [] ) - | SpreadChild expr -> get_children_nodes_expr expr - | Text _ -> [] ) - |> List.flatten +and get_children_nodes_jsx_child_list (_children_loc, children) = + Ast.JSX.( + children + |> Core_list.map ~f:(fun (loc, child) -> + match child with + | Element e -> get_children_nodes_expr (loc, Ast.Expression.JSXElement e) + | Fragment f -> get_children_nodes_expr (loc, Ast.Expression.JSXFragment f) + | ExpressionContainer { ExpressionContainer.expression } -> + (match expression with + | ExpressionContainer.Expression expression -> get_children_nodes_expr expression + | _ -> []) + | SpreadChild expr -> get_children_nodes_expr expr + | Text _ -> []) + |> List.flatten) -and statement_of_expression (expression: (Loc.t, Loc.t) Ast.Expression.t) : +and statement_of_expression (expression : (Loc.t, Loc.t) Ast.Expression.t) : (Loc.t, Loc.t) Ast.Statement.t = - let pos, _ = expression in - Ast.Statement.(pos, Expression {Expression.expression; directive= None}) + let (pos, _) = expression in + Ast.Statement.(pos, Expression { Expression.expression; directive = None }) -and statement_list_of_expression (expression: (Loc.t, Loc.t) Ast.Expression.t) : +and statement_list_of_expression (expression : (Loc.t, Loc.t) Ast.Expression.t) : (Loc.t, Loc.t) Ast.Statement.t list = [statement_of_expression expression] -and statement_list_of_option = function Some x -> [x] | None -> [] +and statement_list_of_option = function + | Some x -> [x] + | None -> [] (* / comments.js#getSortedChildNodes *) and fix_template_literals comment attach = match attach with - | { CommentAttachCandidate.enclosing= - Some - ( _ - , Ast.Statement.Expression - { Ast.Statement.Expression.expression= - _, Ast.Expression.TemplateLiteral lit; _ } ); _ } -> - retain_comments_inside_template_literal lit comment attach + | { + CommentAttachCandidate.enclosing = + Some + ( _, + Ast.Statement.Expression + { Ast.Statement.Expression.expression = (_, Ast.Expression.TemplateLiteral lit); _ } ); + _; + } -> + retain_comments_inside_template_literal lit comment attach | _ -> attach and retain_comments_inside_template_literal - {Ast.Expression.TemplateLiteral.quasis; _} comment - {CommentAttachCandidate.preceding; following; enclosing} = + { Ast.Expression.TemplateLiteral.quasis; _ } + comment + { CommentAttachCandidate.preceding; following; enclosing } = let comment_index = find_expression_index_for_node quasis comment in let check_node node = match node with - | Some n when find_expression_index_for_node quasis n <> comment_index -> - None + | Some n when find_expression_index_for_node quasis n <> comment_index -> None | any -> any in let preceding = check_node preceding in let following = check_node following in - {CommentAttachCandidate.preceding; following; enclosing} + { CommentAttachCandidate.preceding; following; enclosing } (* / comments.js#decorateComment *) and position_comment comments ties comment attach_candidate = - let {CommentAttachCandidate.preceding; following; enclosing} = - attach_candidate - in + let { CommentAttachCandidate.preceding; following; enclosing } = attach_candidate in match (preceding, following, enclosing) with (* Patapam. FIXME the original attaches to the root of the AST *) - | None, None, None -> - raise Not_found + | (None, None, None) -> raise Not_found (* Everything has precedence over enclosing *) - | None, Some following, Some _enclosing -> - (comments @ [(Js_layout_generator.Following, following, comment)], ties) - | Some preceding, None, Some _enclosing -> - (comments @ [(Js_layout_generator.Preceding, preceding, comment)], ties) + | (None, Some following, Some _enclosing) -> + (comments @ [(Js_layout_generator.Following, following, comment)], ties) + | (Some preceding, None, Some _enclosing) -> + (comments @ [(Js_layout_generator.Preceding, preceding, comment)], ties) (* No modifications required *) - | Some preceding, None, None -> - (comments @ [(Js_layout_generator.Preceding, preceding, comment)], ties) - | None, Some following, None -> - (comments @ [(Js_layout_generator.Following, following, comment)], ties) - | None, None, Some enclosing -> - (comments @ [(Js_layout_generator.Enclosing, enclosing, comment)], ties) + | (Some preceding, None, None) -> + (comments @ [(Js_layout_generator.Preceding, preceding, comment)], ties) + | (None, Some following, None) -> + (comments @ [(Js_layout_generator.Following, following, comment)], ties) + | (None, None, Some enclosing) -> + (comments @ [(Js_layout_generator.Enclosing, enclosing, comment)], ties) (* Where the magic happens *) - | Some preceding, Some following, _ -> - resolve_tie comments ties comment preceding following + | (Some preceding, Some following, _) -> resolve_tie comments ties comment preceding following and resolve_tie comments ties comment preceding following = - let following_loc, _ = following in + let (following_loc, _) = following in let count = List.length ties in match count with | 0 -> (comments, ties @ [(comment, preceding, following)]) | count -> - let _, _, (following_loc_last, _) = List.nth ties (count - 1) in - match Loc.compare following_loc following_loc_last with - | 0 -> (comments, ties @ [(comment, preceding, following)]) - | _ -> (break_tie comments ties, [(comment, preceding, following)]) + let (_, _, (following_loc_last, _)) = List.nth ties (count - 1) in + (match Loc.compare following_loc following_loc_last with + | 0 -> (comments, ties @ [(comment, preceding, following)]) + | _ -> (break_tie comments ties, [(comment, preceding, following)])) (* comments.js#breakTies *) and break_tie comments = function | [] -> comments | ties -> - (* tl;dr see if comments with the same preceding and following nodes are separated by newline, + (* tl;dr see if comments with the same preceding and following nodes are separated by newline, * Those comments after the newline are leading, the rest are trailing * * The original algorithm uses text lookup to break the ties. @@ -504,37 +492,41 @@ and break_tie comments = function * // gaps (or other comments). Gaps should only contain whitespace or open * // parentheses. *) - let reverse_ties = List.rev ties in - let leading, trailing = - List.fold_left - (fun (leading, trailing) comment -> - (* If we have not found a newline gap yet *) - if trailing = [] then - let leading_comment, _, _ = List.hd leading in - let {Loc._end= {Loc.line; _}; _}, _ = leading_comment in - let line_previous = line in - let comment_original, _, _ = comment in - let {Loc.start= {Loc.line; _}; _}, _ = comment_original in - if abs (line_previous - line) >= 2 then - (leading, [comment] @ trailing) - else ([comment] @ leading, trailing) - else (leading, [comment] @ trailing) ) - ([List.hd reverse_ties], []) - (List.tl reverse_ties) - in - let comments = - List.fold_left - (fun comments (comment, preceding, _) -> - comments @ [(Js_layout_generator.Preceding, preceding, comment)] ) - comments trailing - in - let comments = - List.fold_left - (fun comments (comment, _, following) -> - comments @ [(Js_layout_generator.Following, following, comment)] ) - comments leading - in - comments + let reverse_ties = List.rev ties in + let (leading, trailing) = + List.fold_left + (fun (leading, trailing) comment -> + (* If we have not found a newline gap yet *) + if trailing = [] then + let (leading_comment, _, _) = List.hd leading in + let ({ Loc._end = { Loc.line; _ }; _ }, _) = leading_comment in + let line_previous = line in + let (comment_original, _, _) = comment in + let ({ Loc.start = { Loc.line; _ }; _ }, _) = comment_original in + if abs (line_previous - line) >= 2 then + (leading, [comment] @ trailing) + else + ([comment] @ leading, trailing) + else + (leading, [comment] @ trailing)) + ([List.hd reverse_ties], []) + (List.tl reverse_ties) + in + let comments = + List.fold_left + (fun comments (comment, preceding, _) -> + comments @ [(Js_layout_generator.Preceding, preceding, comment)]) + comments + trailing + in + let comments = + List.fold_left + (fun comments (comment, _, following) -> + comments @ [(Js_layout_generator.Following, following, comment)]) + comments + leading + in + comments (* / comments.js#breakTies *) (* / comments.js#attach *) diff --git a/src/parser_utils/output/js_layout_generator.ml b/src/parser_utils/output/js_layout_generator.ml index f722105e53e..d907b643619 100644 --- a/src/parser_utils/output/js_layout_generator.ml +++ b/src/parser_utils/output/js_layout_generator.ml @@ -1,15 +1,13 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Layout - -module LocMap = Utils_js.LocMap +module LocMap = Loc_collections.LocMap (* There are some cases where expressions must be wrapped in parens to eliminate ambiguity. We pass whether we're in one of these special cases down through @@ -20,27 +18,31 @@ type expression_context = { left: expression_context_left; group: expression_context_group; } + and expression_context_left = | Normal_left | In_expression_statement (* `(function x(){});` would become a declaration *) | In_tagged_template (* `(new a)``` would become `new (a``)` *) | In_plus_op (* `x+(+y)` would become `(x++)y` *) - | In_minus_op (* `x-(-y)` would become `(x--)y` *) + | In_minus_op + +(* `x-(-y)` would become `(x--)y` *) and expression_context_group = | Normal_group | In_arrow_func (* `() => ({a: b})` would become `() => {a: b}` *) - | In_for_init (* `for ((x in y);;);` would become a for-in *) + | In_for_init + +(* `for ((x in y);;);` would become a for-in *) type comment_attach = - | Preceding - | Enclosing - | Following + | Preceding + | Enclosing + | Following type comment_map = - (comment_attach * (Loc.t, Loc.t) Ast.Statement.t * Loc.t Ast.Comment.t) - list LocMap.t + (comment_attach * (Loc.t, Loc.t) Ast.Statement.t * Loc.t Ast.Comment.t) list LocMap.t -let normal_context = { left = Normal_left; group = Normal_group; } +let normal_context = { left = Normal_left; group = Normal_group } (* Some contexts only matter to the left-most token. If we output some other token, like an `=`, then we can reset the context. Note that all contexts @@ -49,37 +51,43 @@ let normal_context = { left = Normal_left; group = Normal_group; } let context_after_token ctxt = { ctxt with left = Normal_left } (* JS layout helpers *) -let not_supported loc message = failwith (message ^ " at " ^ Loc.to_string loc) +let not_supported loc message = failwith (message ^ " at " ^ Loc.debug_to_string loc) + let with_semicolon node = fuse [node; Atom ";"] + let with_pretty_semicolon node = fuse [node; IfPretty (Atom ";", Empty)] -let wrap_in_parens item = - fuse [ - Atom "("; - Sequence ({ seq with break=Break_if_needed }, [item]); - Atom ")"; - ] -let wrap_in_parens_on_break item = list - ~wrap:(IfBreak (Atom "(", Empty), IfBreak (Atom ")", Empty)) - [item] -let statement_with_test name test body = fuse [ - Atom name; - pretty_space; - wrap_in_parens test; - pretty_space; - body; - ] + +let wrap_in_parens item = group [Atom "("; item; Atom ")"] + +let wrap_in_parens_on_break item = + wrap_and_indent (IfBreak (Atom "(", Empty), IfBreak (Atom ")", Empty)) [item] let option f = function | Some v -> f v | None -> Empty +let hint f = function + | Ast.Type.Available v -> f v + | Ast.Type.Missing _ -> Empty + let deoptionalize l = - List.rev (List.fold_left (fun acc -> function None -> acc | Some x -> x::acc) [] l) + List.rev + (List.fold_left + (fun acc -> function + | None -> acc + | Some x -> x :: acc) + [] + l) (* See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_Precedence *) let max_precedence = 20 -let min_precedence = 1 (* 0 means always parenthesize, which is not a precedence decision *) + +let min_precedence = 1 + +(* 0 means always parenthesize, which is not a precedence decision *) + let precedence_of_assignment = 3 + let precedence_of_expression expr = let module E = Ast.Expression in match expr with @@ -94,44 +102,47 @@ let precedence_of_expression expr = | (_, E.Object _) | (_, E.Super) | (_, E.TemplateLiteral _) - | (_, E.This) -> max_precedence - + | (_, E.This) -> + max_precedence (* Expressions involving operators *) | (_, E.Member _) | (_, E.OptionalMember _) | (_, E.MetaProperty _) - | (_, E.New _) -> 19 + | (_, E.New _) -> + 19 | (_, E.Call _) | (_, E.OptionalCall _) | (_, E.TaggedTemplate _) - | (_, E.Import _) -> 18 + | (_, E.Import _) -> + 18 | (_, E.Update { E.Update.prefix = false; _ }) -> 17 | (_, E.Update { E.Update.prefix = true; _ }) -> 16 | (_, E.Unary _) -> 16 | (_, E.Binary { E.Binary.operator; _ }) -> - begin match operator with - | E.Binary.Exp -> 15 - | E.Binary.Mult -> 14 - | E.Binary.Div -> 14 - | E.Binary.Mod -> 14 - | E.Binary.Plus -> 13 - | E.Binary.Minus -> 13 - | E.Binary.LShift -> 12 - | E.Binary.RShift -> 12 - | E.Binary.RShift3 -> 12 - | E.Binary.LessThan -> 11 - | E.Binary.LessThanEqual -> 11 - | E.Binary.GreaterThan -> 11 - | E.Binary.GreaterThanEqual -> 11 - | E.Binary.In -> 11 - | E.Binary.Instanceof -> 11 - | E.Binary.Equal -> 10 - | E.Binary.NotEqual -> 10 - | E.Binary.StrictEqual -> 10 - | E.Binary.StrictNotEqual -> 10 - | E.Binary.BitAnd -> 9 - | E.Binary.Xor -> 8 - | E.Binary.BitOr -> 7 + begin + match operator with + | E.Binary.Exp -> 15 + | E.Binary.Mult -> 14 + | E.Binary.Div -> 14 + | E.Binary.Mod -> 14 + | E.Binary.Plus -> 13 + | E.Binary.Minus -> 13 + | E.Binary.LShift -> 12 + | E.Binary.RShift -> 12 + | E.Binary.RShift3 -> 12 + | E.Binary.LessThan -> 11 + | E.Binary.LessThanEqual -> 11 + | E.Binary.GreaterThan -> 11 + | E.Binary.GreaterThanEqual -> 11 + | E.Binary.In -> 11 + | E.Binary.Instanceof -> 11 + | E.Binary.Equal -> 10 + | E.Binary.NotEqual -> 10 + | E.Binary.StrictEqual -> 10 + | E.Binary.StrictNotEqual -> 10 + | E.Binary.BitAnd -> 9 + | E.Binary.Xor -> 8 + | E.Binary.BitOr -> 7 end | (_, E.Logical { E.Logical.operator = E.Logical.And; _ }) -> 6 | (_, E.Logical { E.Logical.operator = E.Logical.Or; _ }) -> 5 @@ -139,40 +150,37 @@ let precedence_of_expression expr = | (_, E.Conditional _) -> 4 | (_, E.Assignment _) -> precedence_of_assignment | (_, E.Yield _) -> 2 - (* not sure how low this _needs_ to be, but it can at least be higher than 0 because it binds tighter than a sequence expression. it must be lower than a member expression, though, because `()=>{}.x` is invalid. *) | (_, E.ArrowFunction _) -> 1 - | (_, E.Sequence _) -> 0 - (* Expressions that always need parens (probably) *) | (_, E.Comprehension _) | (_, E.Generator _) - | (_, E.TypeCast _) -> 0 + | (_, E.TypeCast _) -> + 0 let definitely_needs_parens = let module E = Ast.Expression in - let context_needs_parens ctxt expr = match ctxt with | { group = In_arrow_func; _ } -> (* an object body expression in an arrow function needs parens to not make it become a block with label statement. *) - begin match expr with - | (_, E.Object _) -> true - | _ -> false + begin + match expr with + | (_, E.Object _) -> true + | _ -> false end - | { group = In_for_init; _ } -> (* an `in` binary expression in the init of a for loop needs parens to not make the for loop become a for-in loop. *) - begin match expr with - | (_, E.Binary { E.Binary.operator = E.Binary.In; _ }) -> true - | _ -> false + begin + match expr with + | (_, E.Binary { E.Binary.operator = E.Binary.In; _ }) -> true + | _ -> false end - | { left = In_expression_statement; _ } -> (* functions (including async functions, but not arrow functions) and classes must be wrapped in parens to avoid ambiguity with function and @@ -180,54 +188,57 @@ let definitely_needs_parens = blocks. https://tc39.github.io/ecma262/#prod-ExpressionStatement *) - begin match expr with - | _, E.Class _ - | _, E.Function _ - | _, E.Object _ - | _, E.Assignment { E.Assignment. - left=(_, Ast.Pattern.Object _); _ - } -> true - | _ -> false + begin + match expr with + | (_, E.Class _) + | (_, E.Function _) + | (_, E.Object _) + | (_, E.Assignment { E.Assignment.left = (_, Ast.Pattern.Object _); _ }) -> + true + | _ -> false end - | { left = In_tagged_template; _ } -> - begin match expr with - | _, E.Class _ - | _, E.Function _ - | _, E.New _ - | _, E.Import _ - | _, E.Object _ -> true - | _ -> false + begin + match expr with + | (_, E.Class _) + | (_, E.Function _) + | (_, E.New _) + | (_, E.Import _) + | (_, E.Object _) -> + true + | _ -> false end - | { left = In_minus_op; _ } -> - begin match expr with - | _, E.Unary { E.Unary.operator = E.Unary.Minus; _ } - | _, E.Update { E.Update.operator = E.Update.Decrement; prefix = true; _ } - -> true - | _ -> false + begin + match expr with + | (_, E.Unary { E.Unary.operator = E.Unary.Minus; _ }) + | (_, E.Update { E.Update.operator = E.Update.Decrement; prefix = true; _ }) -> + true + | _ -> false end - | { left = In_plus_op; _ } -> - begin match expr with - | _, E.Unary { E.Unary.operator = E.Unary.Plus; _ } - | _, E.Update { E.Update.operator = E.Update.Increment; prefix = true; _ } - -> true - | _ -> false + begin + match expr with + | (_, E.Unary { E.Unary.operator = E.Unary.Plus; _ }) + | (_, E.Update { E.Update.operator = E.Update.Increment; prefix = true; _ }) -> + true + | _ -> false end - | { left = Normal_left; group = Normal_group } -> false in - fun ~precedence ctxt expr -> precedence_of_expression expr < precedence || context_needs_parens ctxt expr (* TODO: this only needs to be shallow; we don't need to walk into function or class bodies, for example. *) -class contains_call_mapper result_ref = object - inherit Flow_ast_mapper.mapper - method! call _loc expr = result_ref := true; expr -end +class contains_call_mapper result_ref = + object + inherit [Loc.t] Flow_ast_mapper.mapper + + method! call _loc expr = + result_ref := true; + expr + end let contains_call_expression expr = (* TODO: use a fold *) @@ -238,160 +249,241 @@ let contains_call_expression expr = (* returns all of the comments that start before `loc`, and discards the rest *) let comments_before_loc loc comments = let rec helper loc acc = function - | ((c_loc, _) as comment)::rest when Loc.compare c_loc loc < 0 -> helper loc (comment::acc) rest - | _ -> List.rev acc + | ((c_loc, _) as comment) :: rest when Loc.compare c_loc loc < 0 -> + helper loc (comment :: acc) rest + | _ -> List.rev acc in helper loc [] comments type statement_or_comment = -| Statement of (Loc.t, Loc.t) Ast.Statement.t -| Comment of Loc.t Ast.Comment.t + | Statement of (Loc.t, Loc.t) Ast.Statement.t + | Comment of Loc.t Ast.Comment.t let better_quote = let rec count (double, single) str i = - if i < 0 then (double, single) else - let acc = match str.[i] with - | '"' -> succ double, single - | '\'' -> double, succ single - | _ -> double, single - in - count acc str (pred i) + if i < 0 then + (double, single) + else + let acc = + match str.[i] with + | '"' -> (succ double, single) + | '\'' -> (double, succ single) + | _ -> (double, single) + in + count acc str (pred i) in fun str -> - let double, single = count (0, 0) str (String.length str - 1) in - if double > single then "'" else "\"" + let (double, single) = count (0, 0) str (String.length str - 1) in + if double > single then + "'" + else + "\"" let utf8_escape = - let f ~quote buf _i = function - | Wtf8.Malformed -> buf - | Wtf8.Point cp -> - begin match cp with - (* SingleEscapeCharacter: http://www.ecma-international.org/ecma-262/6.0/#table-34 *) - | 0x0 -> Buffer.add_string buf "\\0"; buf - | 0x8 -> Buffer.add_string buf "\\b"; buf - | 0x9 -> Buffer.add_string buf "\\t"; buf - | 0xA -> Buffer.add_string buf "\\n"; buf - | 0xB -> Buffer.add_string buf "\\v"; buf - | 0xC -> Buffer.add_string buf "\\f"; buf - | 0xD -> Buffer.add_string buf "\\r"; buf - | 0x22 when quote = "\"" -> Buffer.add_string buf "\\\""; buf - | 0x27 when quote = "'" -> Buffer.add_string buf "\\'"; buf - | 0x5C -> Buffer.add_string buf "\\\\"; buf - - (* printable ascii *) - | n when 0x1F < n && n < 0x7F -> - Buffer.add_char buf (Char.unsafe_chr cp); buf - - (* basic multilingual plane, 2 digits *) - | n when n < 0x100 -> - Printf.bprintf buf "\\x%02x" n; buf - - (* basic multilingual plane, 4 digits *) - | n when n < 0x10000 -> - Printf.bprintf buf "\\u%04x" n; buf - - (* supplemental planes *) - | n -> - (* ES5 does not support the \u{} syntax, so print surrogate pairs + (* a null character can be printed as \x00 or \0. but if the next character is an ASCII digit, + then using \0 would create \01 (for example), which is a legacy octal 1. so, rather than simply + fold over the codepoints, we have to look ahead at the next character as well. *) + let lookahead_fold_wtf_8 : + ?pos:int -> + ?len:int -> + (next:(int * Wtf8.codepoint) option -> 'a -> int -> Wtf8.codepoint -> 'a) -> + 'a -> + string -> + 'a = + let lookahead ~f (prev, buf) i cp = + let next = Some (i, cp) in + let buf = + match prev with + | Some (prev_i, prev_cp) -> f ~next buf prev_i prev_cp + | None -> buf + in + (next, buf) + in + fun ?pos ?len f acc str -> + str + |> Wtf8.fold_wtf_8 ?pos ?len (lookahead ~f) (None, acc) + |> fun (last, acc) -> + match last with + | Some (i, cp) -> f ~next:None acc i cp + | None -> acc + in + let f ~quote ~next buf _i = function + | Wtf8.Malformed -> buf + | Wtf8.Point cp -> + begin + match cp with + (* SingleEscapeCharacter: http://www.ecma-international.org/ecma-262/6.0/#table-34 *) + | 0x0 -> + let zero = + match next with + | Some (_i, Wtf8.Point n) when 0x30 <= n && n <= 0x39 -> "\\x00" + | _ -> "\\0" + in + Buffer.add_string buf zero; + buf + | 0x8 -> + Buffer.add_string buf "\\b"; + buf + | 0x9 -> + Buffer.add_string buf "\\t"; + buf + | 0xA -> + Buffer.add_string buf "\\n"; + buf + | 0xB -> + Buffer.add_string buf "\\v"; + buf + | 0xC -> + Buffer.add_string buf "\\f"; + buf + | 0xD -> + Buffer.add_string buf "\\r"; + buf + | 0x22 when quote = "\"" -> + Buffer.add_string buf "\\\""; + buf + | 0x27 when quote = "'" -> + Buffer.add_string buf "\\'"; + buf + | 0x5C -> + Buffer.add_string buf "\\\\"; + buf + (* printable ascii *) + | n when 0x1F < n && n < 0x7F -> + Buffer.add_char buf (Char.unsafe_chr cp); + buf + (* basic multilingual plane, 2 digits *) + | n when n < 0x100 -> + Printf.bprintf buf "\\x%02x" n; + buf + (* basic multilingual plane, 4 digits *) + | n when n < 0x10000 -> + Printf.bprintf buf "\\u%04x" n; + buf + (* supplemental planes *) + | n -> + (* ES5 does not support the \u{} syntax, so print surrogate pairs "\ud83d\udca9" instead of "\u{1f4A9}". if we add a flag to target ES6, we should change this. *) - let n' = n - 0x10000 in - let hi = (0xD800 lor (n' lsr 10)) in - let lo = (0xDC00 lor (n' land 0x3FF)) in - Printf.bprintf buf "\\u%4x" hi; - Printf.bprintf buf "\\u%4x" lo; - buf - end + let n' = n - 0x10000 in + let hi = 0xD800 lor (n' lsr 10) in + let lo = 0xDC00 lor (n' land 0x3FF) in + Printf.bprintf buf "\\u%4x" hi; + Printf.bprintf buf "\\u%4x" lo; + buf + end in fun ~quote str -> - str - |> Wtf8.fold_wtf_8 (f ~quote) (Buffer.create (String.length str)) - |> Buffer.contents - -let with_attached_comments: comment_map option ref = ref None + str |> lookahead_fold_wtf_8 (f ~quote) (Buffer.create (String.length str)) |> Buffer.contents -let layout_node_with_comments current_loc layout_node = - let open Layout in - let open Ast.Comment in - let layout_from_comment anchor (loc_st, _) (loc_cm, comment) = - let comment_text = match comment with +let layout_from_comment anchor loc_node (loc_cm, comment) = + Ast.Comment.( + let comment_text = + match comment with | Line txt -> Printf.sprintf "//%s\n" txt | Block txt -> - match Loc.lines_intersect loc_st loc_cm, anchor with - | false, Preceding -> Printf.sprintf "\n/*%s*/" txt - | false, Following -> Printf.sprintf "/*%s*/\n" txt - | false, Enclosing -> Printf.sprintf "/*%s*/\n" txt - | _ -> Printf.sprintf "/*%s*/" txt + (match (Loc.lines_intersect loc_node loc_cm, anchor) with + | (false, Preceding) -> Printf.sprintf "\n/*%s*/" txt + | (false, Following) -> Printf.sprintf "/*%s*/\n" txt + | (false, Enclosing) -> Printf.sprintf "/*%s*/\n" txt + | _ -> Printf.sprintf "/*%s*/" txt) in - SourceLocation (loc_cm, Atom comment_text) - in - match !with_attached_comments with + SourceLocation (loc_cm, Atom comment_text)) + +let with_attached_comments : comment_map option ref = ref None + +let layout_node_with_comments current_loc layout_node = + Layout.( + match !with_attached_comments with | None -> layout_node | Some attached when LocMap.is_empty attached -> layout_node | Some attached -> - match LocMap.find_opt current_loc attached with - | None | Some [] -> layout_node - | Some comments -> - with_attached_comments := Some (LocMap.remove current_loc attached); - let matched = List.fold_left (fun nodes comment -> - let (anchor, statement_attached, comment) = comment in - match anchor with - | Preceding -> - nodes @ [layout_from_comment anchor statement_attached comment] - | Following -> - [layout_from_comment anchor statement_attached comment] @ nodes + (match LocMap.find_opt current_loc attached with + | None + | Some [] -> + layout_node + | Some comments -> + with_attached_comments := Some (LocMap.remove current_loc attached); + let matched = + List.fold_left + (fun nodes comment -> + let (anchor, (loc_st, _), comment) = comment in + match anchor with + | Preceding -> nodes @ [layout_from_comment anchor loc_st comment] + | Following -> [layout_from_comment anchor loc_st comment] @ nodes | Enclosing -> - (* TODO(festevezga)(T29896911) print enclosing comments using statement_attached *) - [layout_from_comment anchor statement_attached comment] @ nodes - ) [layout_node] comments in - Concat matched - -let source_location_with_comments (current_loc, layout_node) = - layout_node_with_comments current_loc (SourceLocation (current_loc, layout_node)) - -let identifier_with_comments (current_loc, name) = - layout_node_with_comments current_loc (Identifier (current_loc, name)) + (* TODO(festevezga)(T29896911) print enclosing comments using full statement *) + [layout_from_comment anchor loc_st comment] @ nodes) + [layout_node] + comments + in + Concat matched)) + +let layout_node_with_simple_comments current_loc comments layout_node = + let { Ast.Syntax.leading; trailing; _ } = comments in + let preceding = List.map (layout_from_comment Preceding current_loc) leading in + let following = List.map (layout_from_comment Following current_loc) trailing in + Concat (preceding @ [layout_node] @ following) + +let layout_node_with_simple_comments_opt current_loc comments layout_node = + match comments with + | Some c -> layout_node_with_simple_comments current_loc c layout_node + | None -> layout_node + +let source_location_with_comments ?comments (current_loc, layout_node) = + match comments with + | Some comments -> + layout_node_with_simple_comments + current_loc + comments + (SourceLocation (current_loc, layout_node)) + | None -> layout_node_with_comments current_loc (SourceLocation (current_loc, layout_node)) + +let identifier_with_comments (current_loc, { Ast.Identifier.name; comments }) = + let node = Identifier (current_loc, name) in + match comments with + | Some comments -> layout_node_with_simple_comments current_loc comments node + | None -> node (* Generate JS layouts *) let rec program ~preserve_docblock ~checksum (loc, statements, comments) = let nodes = if preserve_docblock && comments <> [] then - let directives, statements = Ast_utils.partition_directives statements in - let comments = match statements with - | [] -> comments - | (loc, _)::_ -> comments_before_loc loc comments + let (directives, statements) = Flow_ast_utils.partition_directives statements in + let comments = + match statements with + | [] -> comments + | (loc, _) :: _ -> comments_before_loc loc comments in - fuse_vertically ~inline:(true, true) ( - (combine_directives_and_comments directives comments):: - (statements_list_with_newlines statements) - ) + combine_directives_and_comments directives comments :: statement_list statements else - fuse_vertically ~inline:(true, true) ( - statements_list_with_newlines statements - ) + statement_list statements in + let nodes = group [join pretty_hardline nodes] in let nodes = maybe_embed_checksum nodes checksum in - let loc = { loc with Loc.start = { Loc.line = 1; column = 0; offset = 0; }} in + let loc = { loc with Loc.start = { Loc.line = 1; column = 0 } } in source_location_with_comments (loc, nodes) and program_simple (loc, statements, _) = - let nodes = fuse_vertically ~inline:(true, true) ( - statements_list_with_newlines statements - ) in - let loc = { loc with Loc.start = { Loc.line = 1; column = 0; offset = 0; }} in + let nodes = group [join pretty_hardline (statement_list statements)] in + let loc = { loc with Loc.start = { Loc.line = 1; column = 0 } } in source_location_with_comments (loc, nodes) and combine_directives_and_comments directives comments : Layout.layout_node = - let directives = List.map (fun ((loc, _) as x) -> loc, Statement x) directives in - let comments = List.map (fun ((loc, _) as x) -> loc, Comment x) comments in + let directives = Core_list.map ~f:(fun ((loc, _) as x) -> (loc, Statement x)) directives in + let comments = Core_list.map ~f:(fun ((loc, _) as x) -> (loc, Comment x)) comments in let merged = List.merge (fun (a, _) (b, _) -> Loc.compare a b) directives comments in - let nodes = List.map (function - | loc, Statement s -> loc, statement ~allow_empty:true s - | loc, Comment c -> loc, comment c - ) merged in - fuse_vertically ~inline:(true, true) (list_with_newlines nodes) + let nodes = + Core_list.map + ~f:(function + | (loc, Statement s) -> (loc, statement s) + | (loc, Comment c) -> (loc, comment c)) + merged + in + join pretty_hardline (list_with_newlines nodes) -and maybe_embed_checksum nodes checksum = match checksum with +and maybe_embed_checksum nodes checksum = + match checksum with | Some checksum -> let comment = Printf.sprintf "/* %s */" checksum in fuse [nodes; Newline; Atom comment] @@ -399,33 +491,11 @@ and maybe_embed_checksum nodes checksum = match checksum with and comment (loc, comment) = let module C = Ast.Comment in - source_location_with_comments (loc, match comment with - | C.Block txt -> fuse [ - Atom "/*"; pretty_newline; - Atom txt; pretty_newline; - Atom "*/"; - ] - | C.Line txt -> fuse [ - Atom "//"; - Atom txt; - Newline; - ] - ) - -and statement_list_with_locs ?allow_empty ?(pretty_semicolon=false) (stmts: (Loc.t, Loc.t) Ast.Statement.t list) = - let rec mapper acc = function - | [] -> List.rev acc - | ((loc, _) as stmt)::rest -> - let pretty_semicolon = pretty_semicolon && rest = [] in - let acc = (loc, statement ?allow_empty ~pretty_semicolon stmt)::acc in - (mapper [@tailcall]) acc rest - in - mapper [] stmts - -and statement_list ?allow_empty ?pretty_semicolon (stmts: (Loc.t, Loc.t) Ast.Statement.t list) = - stmts - |> statement_list_with_locs ?allow_empty ?pretty_semicolon - |> List.map (fun (_loc, layout) -> layout) + source_location_with_comments + ( loc, + match comment with + | C.Block txt -> fuse [Atom "/*"; Atom txt; Atom "*/"] + | C.Line txt -> fuse [Atom "//"; Atom txt; Newline] ) (** * Renders a statement @@ -434,1193 +504,1584 @@ and statement_list ?allow_empty ?pretty_semicolon (stmts: (Loc.t, Loc.t) Ast.Sta * a semicolon is never required on the last statement of a statement list, so we can set * `~pretty_semicolon:true` to only print the unnecessary semicolon in pretty mode. *) -and statement ?(allow_empty=false) ?(pretty_semicolon=false) (root_stmt: (Loc.t, Loc.t) Ast.Statement.t) = +and statement ?(pretty_semicolon = false) (root_stmt : (Loc.t, Loc.t) Ast.Statement.t) = let (loc, stmt) = root_stmt in let module E = Ast.Expression in let module S = Ast.Statement in - let with_semicolon = if pretty_semicolon then with_pretty_semicolon else with_semicolon in - source_location_with_comments ( - loc, - match stmt with - | S.Empty -> if allow_empty then Atom ";" else IfPretty(Atom "{}", Atom ";") - | S.Debugger -> with_semicolon (Atom "debugger") - | S.Block b -> block (loc, b) - | S.Expression { S.Expression.expression = expr; _ } -> - let ctxt = { normal_context with left = In_expression_statement } in - with_semicolon (expression_with_parens ~precedence:0 ~ctxt expr) - | S.If { S.If.test; consequent; alternate; } -> - begin match alternate with - | Some alt -> - fuse [ - statement_with_test "if" (expression test) (statement consequent); - pretty_space; - fuse_with_space [ - Atom "else"; - statement ~pretty_semicolon alt; - ] - ] - | None -> - statement_with_test "if" (expression test) (statement ~pretty_semicolon consequent) - end - | S.Labeled { S.Labeled.label; body } -> - fuse [ - identifier label; - Atom ":"; - pretty_space; - statement body - ] - | S.Break { S.Break.label } -> - let s_break = Atom "break" in - with_semicolon ( - match label with - | Some l -> fuse [s_break; space; identifier l] - | None -> s_break; - ) - | S.Continue { S.Continue.label } -> - let s_continue = Atom "continue" in - with_semicolon ( - match label with - | Some l -> fuse [s_continue; space; identifier l] - | None -> s_continue; - ) - | S.With { S.With._object; body } -> - statement_with_test "with" (expression _object) (statement body) - | S.Switch { S.Switch.discriminant; cases } -> - let case_nodes = match cases with - | [] -> [] - | hd::[] -> [switch_case ~last:true hd] - | hd::rest -> - let rev_rest = List.rev rest in - let last = List.hd rev_rest |> switch_case ~last:true in - let middle = List.tl rev_rest |> List.map (switch_case ~last:false) in - (switch_case ~last:false hd)::(List.rev (last::middle)) - in - statement_with_test - "switch" - (expression discriminant) - (list ~wrap:(Atom "{", Atom "}") ~break:Break_if_pretty case_nodes) - | S.Return { S.Return.argument } -> - let s_return = Atom "return" in - with_semicolon ( - match argument with - | Some arg -> - let arg = match arg with - | _, E.Logical _ - | _, E.Binary _ - | _, E.Sequence _ - | _, E.JSXElement _ -> - wrap_in_parens_on_break (expression arg) - | _ -> - expression arg + let with_semicolon = + if pretty_semicolon then + with_pretty_semicolon + else + with_semicolon + in + source_location_with_comments + ( loc, + match stmt with + | S.Empty -> Atom ";" + | S.Debugger -> with_semicolon (Atom "debugger") + | S.Block b -> block (loc, b) + | S.Expression { S.Expression.expression = expr; _ } -> + let ctxt = { normal_context with left = In_expression_statement } in + with_semicolon (expression_with_parens ~precedence:0 ~ctxt expr) + | S.If { S.If.test; consequent; alternate; comments } -> + layout_node_with_simple_comments_opt + loc + comments + begin + match alternate with + | Some alt -> + fuse + [ + group + [statement_with_test "if" (expression test); statement_after_test consequent]; + pretty_space; + fuse_with_space [Atom "else"; statement ~pretty_semicolon alt]; + ] + | None -> + group + [ + statement_with_test "if" (expression test); + statement_after_test ~pretty_semicolon consequent; + ] + end + | S.Labeled { S.Labeled.label; body } -> + fuse [identifier label; Atom ":"; pretty_space; statement body] + | S.Break { S.Break.label; comments } -> + let s_break = Atom "break" in + with_semicolon + @@ layout_node_with_simple_comments_opt + loc + comments + (match label with + | Some l -> fuse [s_break; space; identifier l] + | None -> s_break) + | S.Continue { S.Continue.label; comments } -> + let s_continue = Atom "continue" in + with_semicolon + @@ layout_node_with_simple_comments_opt + loc + comments + (match label with + | Some l -> fuse [s_continue; space; identifier l] + | None -> s_continue) + | S.With { S.With._object; body } -> + fuse [statement_with_test "with" (expression _object); statement_after_test body] + | S.Switch { S.Switch.discriminant; cases } -> + let case_nodes = + let rec helper acc = function + | [] -> List.rev acc + | [case] -> List.rev (switch_case ~last:true case :: acc) + | case :: next :: rest -> + let case_node = switch_case ~last:false case in + let next_node = switch_case ~last:(rest = []) next in + let case_node = + let (Loc.{ _end = { line = case_end; _ }; _ }, _) = case in + let (Loc.{ start = { line = next_start; _ }; _ }, _) = next in + if case_end + 1 < next_start then + fuse [case_node; pretty_hardline] + else + case_node + in + helper (next_node :: case_node :: acc) rest in - fuse_with_space [s_return; arg] - | None -> s_return; - ) - | S.Throw { S.Throw.argument } -> - with_semicolon (fuse_with_space [ - Atom "throw"; - wrap_in_parens_on_break (expression argument); - ]) - | S.Try { S.Try.block=b; handler; finalizer } -> - fuse [ - Atom "try"; - pretty_space; - block b; - (match handler with - | Some (loc, { S.Try.CatchClause.param; body }) -> - source_location_with_comments (loc, match param with - | Some p -> fuse [ - pretty_space; - statement_with_test "catch" - (pattern ~ctxt:normal_context p) - (block body) - ] - | None -> fuse [ - pretty_space; - Atom "catch"; - pretty_space; - block body; - ] - ) - | None -> Empty); - match finalizer with - | Some b -> - fuse [ - pretty_space; - Atom "finally"; + helper [] cases + in + let cases_node = + wrap_and_indent + ~break:pretty_hardline + (Atom "{", Atom "}") + [join pretty_hardline case_nodes] + in + fuse [statement_with_test "switch" (expression discriminant); pretty_space; cases_node] + | S.Return { S.Return.argument; comments } -> + let s_return = Atom "return" in + with_semicolon + @@ layout_node_with_simple_comments_opt + loc + comments + (match argument with + | Some arg -> + let arg = + match arg with + | (_, E.Logical _) + | (_, E.Binary _) + | (_, E.Sequence _) + | (_, E.JSXElement _) -> + group [wrap_in_parens_on_break (expression arg)] + | _ -> expression arg + in + fuse_with_space [s_return; arg] + | None -> s_return) + | S.Throw { S.Throw.argument } -> + with_semicolon + (fuse_with_space [Atom "throw"; group [wrap_in_parens_on_break (expression argument)]]) + | S.Try { S.Try.block = b; handler; finalizer; comments } -> + layout_node_with_simple_comments_opt + loc + comments + (fuse + [ + Atom "try"; + pretty_space; + block b; + (match handler with + | Some (loc, { S.Try.CatchClause.param; body }) -> + source_location_with_comments + ( loc, + match param with + | Some p -> + fuse + [ + pretty_space; + statement_with_test "catch" (pattern ~ctxt:normal_context p); + pretty_space; + block body; + ] + | None -> fuse [pretty_space; Atom "catch"; pretty_space; block body] ) + | None -> Empty); + (match finalizer with + | Some b -> fuse [pretty_space; Atom "finally"; pretty_space; block b] + | None -> Empty); + ]) + | S.While { S.While.test; body } -> + fuse + [ + statement_with_test "while" (expression test); + statement_after_test ~pretty_semicolon body; + ] + | S.DoWhile { S.DoWhile.body; test; comments } -> + with_semicolon + @@ layout_node_with_simple_comments_opt + loc + comments + (fuse + [ + fuse_with_space [Atom "do"; statement body]; + pretty_space; + Atom "while"; + pretty_space; + group [wrap_and_indent (Atom "(", Atom ")") [expression test]]; + ]) + | S.For { S.For.init; test; update; body } -> + fuse + [ + statement_with_test + "for" + (join + (fuse [Atom ";"; pretty_line]) + [ + begin + match init with + | Some (S.For.InitDeclaration decl) -> + let ctxt = { normal_context with group = In_for_init } in + variable_declaration ~ctxt decl + | Some (S.For.InitExpression expr) -> + let ctxt = { normal_context with group = In_for_init } in + expression_with_parens ~precedence:0 ~ctxt expr + | None -> Empty + end; + begin + match test with + | Some expr -> expression expr + | None -> Empty + end; + begin + match update with + | Some expr -> expression expr + | None -> Empty + end; + ]); + statement_after_test ~pretty_semicolon body; + ] + | S.ForIn { S.ForIn.left; right; body; each } -> + fuse + [ + Atom "for"; + ( if each then + fuse [space; Atom "each"] + else + Empty ); pretty_space; - block b + wrap_in_parens + (fuse_with_space + [ + begin + match left with + | S.ForIn.LeftDeclaration decl -> variable_declaration decl + | S.ForIn.LeftPattern patt -> pattern patt + end; + Atom "in"; + expression right; + ]); + statement_after_test ~pretty_semicolon body; ] - | None -> Empty - ] - | S.While { S.While.test; body } -> - statement_with_test "while" (expression test) (statement ~pretty_semicolon body); - | S.DoWhile { S.DoWhile.body; test } -> - with_semicolon (fuse [ - fuse_with_space [ - Atom "do"; - statement body; - ]; - pretty_space; - Atom "while"; - pretty_space; - wrap_in_parens (expression test) - ]) - | S.For { S.For.init; test; update; body } -> - fuse [ - Atom "for"; - pretty_space; - list - ~wrap:(Atom "(", Atom ")") - ~sep:(Atom ";") - ~trailing:false + | S.FunctionDeclaration func -> function_ func + | S.VariableDeclaration decl -> with_semicolon (variable_declaration (loc, decl)) + | S.ClassDeclaration class_ -> class_base class_ + | S.EnumDeclaration enum -> enum_declaration enum + | S.ForOf { S.ForOf.left; right; body; async } -> + fuse [ - begin match init with - | Some (S.For.InitDeclaration decl) -> - let ctxt = { normal_context with group = In_for_init } in - variable_declaration ~ctxt decl - | Some (S.For.InitExpression expr) -> - let ctxt = { normal_context with group = In_for_init } in - expression_with_parens ~precedence:0 ~ctxt expr - | None -> Empty - end; - begin match test with - | Some expr -> expression expr - | None -> Empty - end; - begin match update with - | Some expr -> expression expr - | None -> Empty - end; - ]; - pretty_space; - statement ~pretty_semicolon body; - ] - | S.ForIn { S.ForIn.left; right; body; each } -> - fuse [ - Atom "for"; - if each then fuse [space; Atom "each"] else Empty; - pretty_space; - wrap_in_parens (fuse_with_space [ - begin match left with - | S.ForIn.LeftDeclaration decl -> variable_declaration decl - | S.ForIn.LeftPattern patt -> pattern patt - end; - Atom "in"; - expression right; - ]); - pretty_space; - statement ~pretty_semicolon body; - ] - | S.FunctionDeclaration func -> function_ ~precedence:max_precedence func - | S.VariableDeclaration decl -> - with_semicolon (variable_declaration (loc, decl)) - | S.ClassDeclaration class_ -> class_base class_ - | S.ForOf { S.ForOf.left; right; body; async } -> - fuse [ - Atom "for"; - if async then fuse [space; Atom "await"] else Empty; - pretty_space; - wrap_in_parens (fuse [ - begin match left with - | S.ForOf.LeftDeclaration decl -> variable_declaration decl - | S.ForOf.LeftPattern patt -> pattern patt - end; - space; Atom "of"; space; - expression right; - ]); - pretty_space; - statement ~pretty_semicolon body; - ] - | S.ImportDeclaration import -> import_declaration import - | S.ExportNamedDeclaration export -> export_declaration export - | S.ExportDefaultDeclaration export -> export_default_declaration export - | S.TypeAlias typeAlias -> type_alias ~declare:false typeAlias - | S.OpaqueType opaqueType -> opaque_type ~declare:false opaqueType - | S.InterfaceDeclaration interface -> interface_declaration interface - | S.DeclareClass interface -> declare_class interface - | S.DeclareFunction func -> declare_function func - | S.DeclareInterface interface -> declare_interface interface - | S.DeclareVariable var -> declare_variable var - | S.DeclareModuleExports annot -> - declare_module_exports annot - | S.DeclareModule m -> declare_module m - | S.DeclareTypeAlias typeAlias -> type_alias ~declare:true typeAlias - | S.DeclareOpaqueType opaqueType -> opaque_type ~declare:true opaqueType - | S.DeclareExportDeclaration export -> declare_export_declaration export - ) - -and expression ?(ctxt=normal_context) (root_expr: (Loc.t, Loc.t) Ast.Expression.t) = + Atom "for"; + ( if async then + fuse [space; Atom "await"] + else + Empty ); + pretty_space; + wrap_in_parens + (fuse + [ + begin + match left with + | S.ForOf.LeftDeclaration decl -> variable_declaration decl + | S.ForOf.LeftPattern patt -> pattern patt + end; + space; + Atom "of"; + space; + expression right; + ]); + statement_after_test ~pretty_semicolon body; + ] + | S.ImportDeclaration import -> import_declaration import + | S.ExportNamedDeclaration export -> export_declaration export + | S.ExportDefaultDeclaration export -> export_default_declaration export + | S.TypeAlias typeAlias -> type_alias ~declare:false typeAlias + | S.OpaqueType opaqueType -> opaque_type ~declare:false opaqueType + | S.InterfaceDeclaration interface -> interface_declaration interface + | S.DeclareClass interface -> declare_class interface + | S.DeclareFunction func -> declare_function func + | S.DeclareInterface interface -> declare_interface interface + | S.DeclareVariable var -> declare_variable var + | S.DeclareModuleExports annot -> declare_module_exports annot + | S.DeclareModule m -> declare_module m + | S.DeclareTypeAlias typeAlias -> type_alias ~declare:true typeAlias + | S.DeclareOpaqueType opaqueType -> opaque_type ~declare:true opaqueType + | S.DeclareExportDeclaration export -> declare_export_declaration export ) + +(* The beginning of a statement that does a "test", like `if (test)` or `while (test)` *) +and statement_with_test name test = + fuse [Atom name; pretty_space; group [wrap_and_indent (Atom "(", Atom ")") [test]]] + +(* A statement following a "test", like the `statement` in `if (expr) statement` or + `for (...) statement`. Better names for this are welcome! *) +and statement_after_test ?pretty_semicolon = function + | (_, Ast.Statement.Empty) as stmt -> statement ?pretty_semicolon stmt + | (_, Ast.Statement.Block _) as stmt -> fuse [pretty_space; statement ?pretty_semicolon stmt] + | stmt -> Indent (fuse [pretty_line; statement ?pretty_semicolon stmt]) + +and expression ?(ctxt = normal_context) (root_expr : (Loc.t, Loc.t) Ast.Expression.t) = let (loc, expr) = root_expr in let module E = Ast.Expression in let precedence = precedence_of_expression (loc, expr) in - source_location_with_comments ( - loc, - match expr with - | E.This -> Atom "this" - | E.Super -> Atom "super" - | E.Array { E.Array.elements } -> - let last_element = (List.length elements) - 1 in - list - ~wrap:(Atom "[", Atom "]") - ~sep:(Atom ",") - (List.mapi - (fun i e -> match e with - | Some expr -> expression_or_spread ~ctxt:normal_context expr - (* If the last item is empty it needs a trailing comma forced so to - retain the same AST output. *) - | None when i = last_element -> IfBreak (Empty, Atom ",") - | None -> Empty - ) - elements - ) - | E.Object { E.Object.properties } -> - list - ~wrap:(Concat [Atom "{"; flat_pretty_space], Concat [flat_pretty_space; Atom "}"]) - ~sep:(Atom ",") - (object_properties_with_newlines properties) - | E.Sequence { E.Sequence.expressions } -> - (* to get an AST like `x, (y, z)`, then there must've been parens + source_location_with_comments + ( loc, + match expr with + | E.This -> Atom "this" + | E.Super -> Atom "super" + | E.Array { E.Array.elements; comments } -> + let rev_elements = + List.rev_map + (function + | Some expr -> expression_or_spread ~ctxt:normal_context expr + | None -> Empty) + elements + in + (* if the last element is a hole, then we need to manually insert a trailing `,`, even in + ugly mode, and disable automatic trailing separators. *) + let (trailing_sep, rev_elements) = + match rev_elements with + | Empty :: tl -> (false, Atom "," :: tl) + | _ -> (true, rev_elements) + in + layout_node_with_simple_comments_opt loc comments + @@ group + [ + new_list + ~wrap:(Atom "[", Atom "]") + ~sep:(Atom ",") + ~trailing_sep + (List.rev rev_elements); + ] + | E.Object { E.Object.properties; comments } -> + layout_node_with_simple_comments_opt loc comments + @@ group + [ + new_list + ~wrap:(Atom "{", Atom "}") + ~sep:(Atom ",") + ~wrap_spaces:true + (object_properties_with_newlines properties); + ] + | E.Sequence { E.Sequence.expressions } -> + (* to get an AST like `x, (y, z)`, then there must've been parens around the right side. we can force that by bumping the minimum precedence. *) - let precedence = precedence + 1 in - list - ~inline:(true, true) - ~sep:(Atom ",") - ~indent:0 - ~trailing:false - (List.map (expression_with_parens ~precedence ~ctxt) expressions) - | E.Identifier ident -> identifier ident - | E.Literal lit -> literal (loc, lit) - | E.Function func -> function_ ~precedence func - | E.ArrowFunction func -> function_base ~ctxt ~precedence ~arrow:true func - | E.Assignment { E.Assignment.operator; left; right } -> - fuse [ - pattern ~ctxt left; - pretty_space; - E.Assignment.(match operator with - | Assign -> Atom "=" - | PlusAssign -> Atom "+=" - | MinusAssign -> Atom "-=" - | MultAssign -> Atom "*=" - | ExpAssign -> Atom "**=" - | DivAssign -> Atom "/=" - | ModAssign -> Atom "%=" - | LShiftAssign -> Atom "<<=" - | RShiftAssign -> Atom ">>=" - | RShift3Assign -> Atom ">>>=" - | BitOrAssign -> Atom "|=" - | BitXorAssign -> Atom "^=" - | BitAndAssign -> Atom "&=" - ); - pretty_space; - begin - let ctxt = context_after_token ctxt in - expression_with_parens ~precedence ~ctxt right - end; - ] - | E.Binary { E.Binary.operator; left; right; } -> - let module B = E.Binary in - fuse_with_space [ - expression_with_parens ~precedence ~ctxt left; - Atom (Ast_utils.string_of_binary_operator operator); - begin match operator, right with - | E.Binary.Plus, - (_, E.Unary { E.Unary.operator=E.Unary.Plus; _ }) - | E.Binary.Minus, - (_, E.Unary { E.Unary.operator=E.Unary.Minus; _ }) - -> - let ctxt = context_after_token ctxt in - fuse [ugly_space; expression ~ctxt right] - | E.Binary.Plus, - (_, E.Unary { E.Unary.operator=E.Unary.Minus; _ }) - | E.Binary.Minus, - (_, E.Unary { E.Unary.operator=E.Unary.Plus; _ }) - -> - let ctxt = context_after_token ctxt in - fuse [expression ~ctxt right] - | (E.Binary.Plus | E.Binary.Minus), - (_, E.Update { E.Update.prefix = true; _ }) - -> - let ctxt = context_after_token ctxt in - fuse [ugly_space; expression ~ctxt right] - | _ -> - (* to get an AST like `x + (y - z)`, then there must've been parens + let precedence = precedence + 1 in + let layouts = Core_list.map ~f:(expression_with_parens ~precedence ~ctxt) expressions in + group [join (fuse [Atom ","; pretty_line]) layouts] + | E.Identifier ident -> identifier ident + | E.Literal lit -> literal lit + | E.Function func -> function_ func + | E.ArrowFunction func -> arrow_function ~ctxt ~precedence func + | E.Assignment { E.Assignment.operator; left; right } -> + fuse + [ + pattern ~ctxt left; + pretty_space; + begin + match operator with + | None -> Atom "=" + | Some op -> Atom (Flow_ast_utils.string_of_assignment_operator op) + end; + pretty_space; + begin + let ctxt = context_after_token ctxt in + expression_with_parens ~precedence ~ctxt right + end; + ] + | E.Binary { E.Binary.operator; left; right } -> + let module B = E.Binary in + fuse_with_space + [ + expression_with_parens ~precedence ~ctxt left; + Atom (Flow_ast_utils.string_of_binary_operator operator); + begin + match (operator, right) with + | (E.Binary.Plus, (_, E.Unary { E.Unary.operator = E.Unary.Plus; _ })) + | (E.Binary.Minus, (_, E.Unary { E.Unary.operator = E.Unary.Minus; _ })) + | ( E.Binary.Plus, + (_, E.Update { E.Update.prefix = true; operator = E.Update.Increment; _ }) ) + | ( E.Binary.Minus, + (_, E.Update { E.Update.prefix = true; operator = E.Update.Decrement; _ }) ) -> + let ctxt = context_after_token ctxt in + fuse [ugly_space; expression ~ctxt right] + | _ -> + (* to get an AST like `x + (y - z)`, then there must've been parens around the right side. we can force that by bumping the minimum precedence to not have parens. *) - let precedence = precedence + 1 in - let ctxt = { ctxt with left = - match operator with - | E.Binary.Minus -> In_minus_op - | E.Binary.Plus -> In_plus_op - | _ -> Normal_left - } in - expression_with_parens ~precedence ~ctxt right - end; - ] - | E.Call c -> call ~precedence ~ctxt c - | E.OptionalCall { E.OptionalCall.call = c; optional } -> call ~optional ~precedence ~ctxt c - | E.Conditional { E.Conditional.test; consequent; alternate } -> - let test_layout = - (* conditionals are right-associative *) - let precedence = precedence + 1 in - expression_with_parens ~precedence ~ctxt test in - list - ~wrap:(fuse [test_layout; pretty_space], Empty) - ~inline:(false, true) - [ - fuse [ - Atom "?"; pretty_space; - expression_with_parens ~precedence:min_precedence ~ctxt consequent - ]; - fuse [ - Atom ":"; pretty_space; - expression_with_parens ~precedence:min_precedence ~ctxt alternate - ]; - ] - | E.Logical { E.Logical.operator; left; right } -> - let left = expression_with_parens ~precedence ~ctxt left in - let operator = match operator with - | E.Logical.Or -> Atom "||" - | E.Logical.And -> Atom "&&" - | E.Logical.NullishCoalesce -> Atom "??" - in - let right = expression_with_parens ~precedence:(precedence + 1) ~ctxt right in - - (* if we need to wrap, the op stays on the first line, with the RHS on a + let precedence = precedence + 1 in + let ctxt = + { + ctxt with + left = + (match operator with + | E.Binary.Minus -> In_minus_op + | E.Binary.Plus -> In_plus_op + | _ -> Normal_left); + } + in + expression_with_parens ~precedence ~ctxt right + end; + ] + | E.Call c -> call ~precedence ~ctxt c + | E.OptionalCall { E.OptionalCall.call = c; optional } -> call ~optional ~precedence ~ctxt c + | E.Conditional { E.Conditional.test; consequent; alternate } -> + let test_layout = + (* increase precedence since conditionals are right-associative *) + expression_with_parens ~precedence:(precedence + 1) ~ctxt test + in + group + [ + test_layout; + Indent + (fuse + [ + pretty_line; + Atom "?"; + pretty_space; + expression_with_parens ~precedence:min_precedence ~ctxt consequent; + pretty_line; + Atom ":"; + pretty_space; + expression_with_parens ~precedence:min_precedence ~ctxt alternate; + ]); + ] + | E.Logical { E.Logical.operator; left; right } -> + let left = expression_with_parens ~precedence ~ctxt left in + let operator = + match operator with + | E.Logical.Or -> Atom "||" + | E.Logical.And -> Atom "&&" + | E.Logical.NullishCoalesce -> Atom "??" + in + let right = expression_with_parens ~precedence:(precedence + 1) ~ctxt right in + (* if we need to wrap, the op stays on the first line, with the RHS on a new line and indented by 2 spaces *) - Group [ - left; - pretty_space; - operator; - Indent (fuse [line; right]) - ] - | E.Member m -> member ~precedence ~ctxt m - | E.OptionalMember { E.OptionalMember.member = m; optional } -> - member ~optional ~precedence ~ctxt m - | E.New { E.New.callee; targs; arguments } -> - let callee_layout = - if definitely_needs_parens ~precedence ctxt callee || - contains_call_expression callee - then wrap_in_parens (expression ~ctxt callee) - else expression ~ctxt callee - in - fuse [ - fuse_with_space [ - Atom "new"; - callee_layout; - ]; - option type_parameter_instantiation targs; - list - ~wrap:(Atom "(", Atom ")") - ~sep:(Atom ",") - (List.map expression_or_spread arguments); - ]; - | E.Unary { E.Unary.operator; prefix = _; argument } -> - let s_operator, needs_space = begin match operator with - | E.Unary.Minus -> Atom "-", false - | E.Unary.Plus -> Atom "+", false - | E.Unary.Not -> Atom "!", false - | E.Unary.BitNot -> Atom "~", false - | E.Unary.Typeof -> Atom "typeof", true - | E.Unary.Void -> Atom "void", true - | E.Unary.Delete -> Atom "delete", true - | E.Unary.Await -> Atom "await", true - end in - let expr = - let ctxt = { ctxt with left = + Group [left; pretty_space; operator; Indent (fuse [pretty_line; right])] + | E.Member m -> member ~precedence ~ctxt m + | E.OptionalMember { E.OptionalMember.member = m; optional } -> + member ~optional ~precedence ~ctxt m + | E.New { E.New.callee; targs; arguments; comments } -> + let callee_layout = + if definitely_needs_parens ~precedence ctxt callee || contains_call_expression callee + then + wrap_in_parens (expression ~ctxt callee) + else + expression ~ctxt callee + in + layout_node_with_simple_comments_opt loc comments + @@ group + [ + fuse_with_space [Atom "new"; callee_layout]; + option type_parameter_instantiation_with_implicit targs; + new_list + ~wrap:(Atom "(", Atom ")") + ~sep:(Atom ",") + (Core_list.map ~f:expression_or_spread arguments); + ] + | E.Unary { E.Unary.operator; argument; comments } -> + let (s_operator, needs_space) = match operator with - | E.Unary.Minus -> In_minus_op - | E.Unary.Plus -> In_plus_op - | _ -> Normal_left - } in - expression_with_parens ~precedence ~ctxt argument - in - fuse [ - s_operator; - if needs_space then begin match argument with - | (_, E.Sequence _) -> Empty - | _ -> space - end else Empty; - expr; - ] - | E.Update { E.Update.operator; prefix; argument } -> - let s_operator = match operator with - | E.Update.Increment -> Atom "++" - | E.Update.Decrement -> Atom "--" - in - (* we never need to wrap `argument` in parens because it must be a valid + | E.Unary.Minus -> (Atom "-", false) + | E.Unary.Plus -> (Atom "+", false) + | E.Unary.Not -> (Atom "!", false) + | E.Unary.BitNot -> (Atom "~", false) + | E.Unary.Typeof -> (Atom "typeof", true) + | E.Unary.Void -> (Atom "void", true) + | E.Unary.Delete -> (Atom "delete", true) + | E.Unary.Await -> (Atom "await", true) + in + let expr = + let ctxt = + { + ctxt with + left = + (match operator with + | E.Unary.Minus -> In_minus_op + | E.Unary.Plus -> In_plus_op + | _ -> Normal_left); + } + in + expression_with_parens ~precedence ~ctxt argument + in + layout_node_with_simple_comments_opt loc comments + @@ fuse + [ + s_operator; + ( if needs_space then + match argument with + | (_, E.Sequence _) -> Empty + | _ -> space + else + Empty ); + expr; + ] + | E.Update { E.Update.operator; prefix; argument } -> + let s_operator = + match operator with + | E.Update.Increment -> Atom "++" + | E.Update.Decrement -> Atom "--" + in + (* we never need to wrap `argument` in parens because it must be a valid left-hand side expression *) - if prefix then fuse [s_operator; expression ~ctxt argument] - else fuse [expression ~ctxt argument; s_operator] - | E.Class class_ -> class_base class_ - | E.Yield { E.Yield.argument; delegate } -> - fuse [ - Atom "yield"; - if delegate then Atom "*" else Empty; - match argument with - | Some arg -> fuse [space; expression ~ctxt arg] - | None -> Empty - ] - | E.MetaProperty { E.MetaProperty.meta; property } -> - fuse [ - identifier meta; - Atom "."; - identifier property; - ] - | E.TaggedTemplate { E.TaggedTemplate.tag; quasi=(loc, template) } -> - let ctxt = { normal_context with left = In_tagged_template } in - fuse [ - expression_with_parens ~precedence ~ctxt tag; - source_location_with_comments (loc, template_literal template) - ] - | E.TemplateLiteral template -> template_literal template - | E.JSXElement el -> jsx_element loc el - | E.JSXFragment fr -> jsx_fragment loc fr - | E.TypeCast { E.TypeCast.expression=expr; annot } -> - wrap_in_parens (fuse [ - expression expr; - type_annotation annot; - ]) - | E.Import expr -> fuse [ - Atom "import"; - wrap_in_parens (expression expr); - ] - - (* Not supported *) - | E.Comprehension _ - | E.Generator _ -> not_supported loc "Comprehension not supported" - ) - -and call ?(optional=false) ~precedence ~ctxt call_node = + if prefix then + fuse [s_operator; expression ~ctxt argument] + else + fuse [expression ~ctxt argument; s_operator] + | E.Class class_ -> class_base class_ + | E.Yield { E.Yield.argument; delegate; comments } -> + layout_node_with_simple_comments_opt loc comments + @@ fuse + [ + Atom "yield"; + ( if delegate then + Atom "*" + else + Empty ); + (match argument with + | Some arg -> fuse [space; expression ~ctxt arg] + | None -> Empty); + ] + | E.MetaProperty { E.MetaProperty.meta; property } -> + fuse [identifier meta; Atom "."; identifier property] + | E.TaggedTemplate { E.TaggedTemplate.tag; quasi = (loc, template) } -> + let ctxt = { normal_context with left = In_tagged_template } in + fuse + [ + expression_with_parens ~precedence ~ctxt tag; + source_location_with_comments (loc, template_literal template); + ] + | E.TemplateLiteral template -> template_literal template + | E.JSXElement el -> jsx_element loc el + | E.JSXFragment fr -> jsx_fragment loc fr + | E.TypeCast { E.TypeCast.expression = expr; annot } -> + wrap_in_parens (fuse [expression expr; type_annotation annot]) + | E.Import expr -> fuse [Atom "import"; wrap_in_parens (expression expr)] + (* Not supported *) + | E.Comprehension _ + | E.Generator _ -> + not_supported loc "Comprehension not supported" ) + +and call ?(optional = false) ~precedence ~ctxt call_node = let { Ast.Expression.Call.callee; targs; arguments } = call_node in - match callee, targs, arguments with + match (callee, targs, arguments) with (* __d hack, force parens around factory function. More details at: https://fburl.com/b1wv51vj TODO: This is FB only, find generic way to add logic *) - | (_, Ast.Expression.Identifier (_, "__d")), None, [a; b; c; d] -> - let lparen = if optional then ".?(" else "(" in - fuse [ - Atom "__d"; - list - ~wrap:(Atom lparen, Atom ")") - ~sep:(Atom ",") - [ - expression_or_spread a; - expression_or_spread b; - wrap_in_parens (expression_or_spread c); - expression_or_spread d; - ] - ] + | ( (_, Ast.Expression.Identifier (_, { Ast.Identifier.name = "__d"; comments = _ })), + None, + [a; b; c; d] ) -> + let lparen = + if optional then + ".?(" + else + "(" + in + group + [ + Atom "__d"; + new_list + ~wrap:(Atom lparen, Atom ")") + ~sep:(Atom ",") + [ + expression_or_spread a; + expression_or_spread b; + wrap_in_parens (expression_or_spread c); + expression_or_spread d; + ]; + ] (* Standard call expression printing *) | _ -> - let targs, lparen = match targs with - | None -> - let lparen = if optional then ".?(" else "(" in - Empty, lparen - | Some (loc, args) -> - let less_than = if optional then "?.<" else "<" in - source_location_with_comments ( - loc, - list - ~wrap:(Atom less_than, Atom ">") - ~sep:(Atom ",") - (List.map type_ args) - ), "(" + let (targs, lparen) = + match targs with + | None -> + let lparen = + if optional then + ".?(" + else + "(" + in + (Empty, lparen) + | Some (loc, args) -> + let less_than = + if optional then + "?.<" + else + "<" + in + ( source_location_with_comments + ( loc, + group + [ + new_list + ~wrap:(Atom less_than, Atom ">") + ~sep:(Atom ",") + (Core_list.map ~f:explicit_or_implicit args); + ] ), + "(" ) in - fuse [ - expression_with_parens ~precedence ~ctxt callee; - targs; - list - ~wrap:(Atom lparen, Atom ")") - ~sep:(Atom ",") - (List.map expression_or_spread arguments) - ] + fuse + [ + expression_with_parens ~precedence ~ctxt callee; + targs; + group + [ + new_list + ~wrap:(Atom lparen, Atom ")") + ~sep:(Atom ",") + (Core_list.map ~f:expression_or_spread arguments); + ]; + ] -and expression_with_parens ~precedence ~(ctxt:expression_context) expr = - if definitely_needs_parens ~precedence ctxt expr - then wrap_in_parens (expression ~ctxt:normal_context expr) - else expression ~ctxt expr +and expression_with_parens ~precedence ~(ctxt : expression_context) expr = + if definitely_needs_parens ~precedence ctxt expr then + wrap_in_parens (expression ~ctxt:normal_context expr) + else + expression ~ctxt expr -and expression_or_spread ?(ctxt=normal_context) expr_or_spread = +and expression_or_spread ?(ctxt = normal_context) expr_or_spread = (* min_precedence causes operators that should always be parenthesized (they have precedence = 0) to be parenthesized. one notable example is the comma operator, which would be confused with additional arguments if not parenthesized. *) let precedence = min_precedence in match expr_or_spread with - | Ast.Expression.Expression expr -> - expression_with_parens ~precedence ~ctxt expr + | Ast.Expression.Expression expr -> expression_with_parens ~precedence ~ctxt expr | Ast.Expression.Spread (loc, { Ast.Expression.SpreadElement.argument }) -> - source_location_with_comments (loc, fuse [ - Atom "..."; expression_with_parens ~precedence ~ctxt argument - ]) + source_location_with_comments + (loc, fuse [Atom "..."; expression_with_parens ~precedence ~ctxt argument]) and identifier (loc, name) = identifier_with_comments (loc, name) +and number_literal_type { Ast.NumberLiteral.raw; _ } = Atom raw + and number_literal ~in_member_object raw num = let str = Dtoa.shortest_string_of_float num in - let if_pretty, if_ugly = - if in_member_object then - (* `1.foo` is a syntax error, but `1.0.foo`, `1e0.foo` and even `1..foo` are all ok. *) - let is_int x = not (String.contains x '.' || String.contains x 'e') in - let if_pretty = if is_int raw then wrap_in_parens (Atom raw) else Atom raw in - let if_ugly = if is_int str then fuse [Atom str; Atom "."] else Atom str in - if_pretty, if_ugly + if in_member_object then + (* `1.foo` is a syntax error, but `1.0.foo`, `1e0.foo` and even `1..foo` are all ok. *) + let is_int x = not (String.contains x '.' || String.contains x 'e') in + let if_pretty = + if is_int raw then + wrap_in_parens (Atom raw) + else + Atom raw + in + let if_ugly = + if is_int str then + fuse [Atom str; Atom "."] + else + Atom str + in + if if_pretty = if_ugly then + if_pretty else - Atom raw, Atom str - in - IfPretty (if_pretty, if_ugly) - -and literal (loc, { Ast.Literal.raw; value; }) = - let open Ast.Literal in - source_location_with_comments ( - loc, + IfPretty (if_pretty, if_ugly) + else if String.equal raw str then + Atom raw + else + IfPretty (Atom raw, Atom str) + +and literal { Ast.Literal.raw; value; comments = _ (* handled by caller *) } = + Ast.Literal.( match value with - | Number num -> - number_literal ~in_member_object:false raw num + | Number num -> number_literal ~in_member_object:false raw num | String str -> let quote = better_quote str in fuse [Atom quote; Atom (utf8_escape ~quote str); Atom quote] - | RegExp { RegExp.pattern; flags; } -> - let flags = flags |> String_utils.to_list |> List.sort Char.compare |> String_utils.of_list in + | RegExp { RegExp.pattern; flags } -> + let flags = + flags |> String_utils.to_list |> List.sort Char.compare |> String_utils.of_list + in fuse [Atom "/"; Atom pattern; Atom "/"; Atom flags] - | _ -> Atom raw - ) - -and member ?(optional=false) ~precedence ~ctxt member_node = - let { Ast.Expression.Member._object; property; computed } = member_node in - let ldelim, rdelim = begin match computed, optional with - | false, false -> Atom ".", Empty - | false, true -> Atom "?.", Empty - | true, false -> Atom "[", Atom "]" - | true, true -> Atom "?.[", Atom "]" - end in - fuse [ - begin match _object with - | (_, Ast.Expression.Call _) -> expression ~ctxt _object - | (_, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.Number num; raw }) - when not computed -> - (* 1.foo would be confused with a decimal point, so it needs parens *) - number_literal ~in_member_object:true raw num - | _ -> expression_with_parens ~precedence ~ctxt _object - end; - ldelim; - begin match property with - | Ast.Expression.Member.PropertyIdentifier (loc, id) -> - source_location_with_comments (loc, Atom id) - | Ast.Expression.Member.PropertyPrivateName (loc, (_, id)) -> - source_location_with_comments (loc, Atom ("#" ^ id)) - | Ast.Expression.Member.PropertyExpression expr -> - expression ~ctxt expr - end; - rdelim; - ] + | _ -> Atom raw) + +and string_literal_type { Ast.StringLiteral.raw; _ } = Atom raw + +and member ?(optional = false) ~precedence ~ctxt member_node = + let { Ast.Expression.Member._object; property } = member_node in + let computed = + match property with + | Ast.Expression.Member.PropertyExpression _ -> true + | Ast.Expression.Member.PropertyIdentifier _ + | Ast.Expression.Member.PropertyPrivateName _ -> + false + in + let (ldelim, rdelim) = + match (computed, optional) with + | (false, false) -> (Atom ".", Empty) + | (false, true) -> (Atom "?.", Empty) + | (true, false) -> (Atom "[", Atom "]") + | (true, true) -> (Atom "?.[", Atom "]") + in + fuse + [ + begin + match _object with + | (_, Ast.Expression.Call _) -> expression ~ctxt _object + | ( loc, + Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.Number num; raw; comments } ) + when not computed -> + (* 1.foo would be confused with a decimal point, so it needs parens *) + source_location_with_comments + ?comments + (loc, number_literal ~in_member_object:true raw num) + | _ -> expression_with_parens ~precedence ~ctxt _object + end; + ldelim; + begin + match property with + | Ast.Expression.Member.PropertyIdentifier (loc, { Ast.Identifier.name = id; comments = _ }) + -> + source_location_with_comments (loc, Atom id) + | Ast.Expression.Member.PropertyPrivateName + (loc, (_, { Ast.Identifier.name = id; comments = _ })) -> + source_location_with_comments (loc, Atom ("#" ^ id)) + | Ast.Expression.Member.PropertyExpression expr -> expression ~ctxt expr + end; + rdelim; + ] and string_literal (loc, { Ast.StringLiteral.value; _ }) = let quote = better_quote value in - source_location_with_comments ( - loc, - fuse [Atom quote; Atom (utf8_escape ~quote value); Atom quote] - ) - -and pattern_object_property_key = Ast.Pattern.Object.(function - | Property.Literal lit -> literal lit - | Property.Identifier ident -> identifier ident - | Property.Computed expr -> - fuse [ - Atom "["; - Sequence ({ seq with break=Break_if_needed }, [expression expr]); - Atom "]"; - ] - ) - -and pattern ?(ctxt=normal_context) ((loc, pat): (Loc.t, Loc.t) Ast.Pattern.t) = + source_location_with_comments + (loc, fuse [Atom quote; Atom (utf8_escape ~quote value); Atom quote]) + +and pattern_object_property_key = + Ast.Pattern.Object.( + function + | Property.Literal (loc, lit) -> source_location_with_comments (loc, literal lit) + | Property.Identifier ident -> identifier ident + | Property.Computed expr -> + fuse [Atom "["; Sequence ({ seq with break = Break_if_needed }, [expression expr]); Atom "]"]) + +and pattern ?(ctxt = normal_context) ((loc, pat) : (Loc.t, Loc.t) Ast.Pattern.t) = let module P = Ast.Pattern in - source_location_with_comments ( - loc, - match pat with - | P.Object { P.Object.properties; annot } -> - fuse [ - list - ~wrap:(Atom "{", Atom "}") - ~sep:(Atom ",") - (* Object rest can have comma but most tooling still apply old + source_location_with_comments + ( loc, + match pat with + | P.Object { P.Object.properties; annot } -> + group + [ + new_list + ~wrap:(Atom "{", Atom "}") + ~sep: + (Atom ",") + (* Object rest can have comma but most tooling still apply old pre-spec rules that disallow it so omit it to be safe *) - ~trailing:false - (List.map - (function - | P.Object.Property (loc, { P.Object.Property. - key; pattern=pat; shorthand - }) -> - source_location_with_comments (loc, - begin match pat, shorthand with - (* Special case shorthand assignments *) - | (_, P.Assignment _), true -> pattern pat - (* Shorthand property *) - | _, true -> pattern_object_property_key key - (* *) - | _, false -> fuse [ - pattern_object_property_key key; - Atom ":"; pretty_space; - pattern pat - ] - end; - ) - | P.Object.RestProperty (loc, { P.Object.RestProperty.argument }) -> - source_location_with_comments (loc, fuse [Atom "..."; pattern argument]) - ) - properties - ); - option type_annotation annot; - ] - | P.Array { P.Array.elements; annot } -> - fuse [ - list - ~wrap:(Atom "[", Atom "]") - ~sep:(Atom ",") - ~trailing:false (* Array rest cannot have trailing *) - (List.map - (function - | None -> Empty - | Some P.Array.Element pat -> pattern pat - | Some P.Array.RestElement (loc, { P.Array.RestElement. - argument - }) -> - source_location_with_comments (loc, fuse [Atom "..."; pattern argument]) - ) - elements); - option type_annotation annot; - ] - | P.Assignment { P.Assignment.left; right } -> - fuse [ - pattern left; - pretty_space; Atom "="; pretty_space; - begin - let ctxt = context_after_token ctxt in - expression_with_parens - ~precedence:precedence_of_assignment - ~ctxt right - end; - ] - | P.Identifier { P.Identifier.name; annot; optional } -> - fuse [ - identifier name; - if optional then Atom "?" else Empty; - option type_annotation annot; - ] - | P.Expression expr -> expression ~ctxt expr - ) + ~trailing_sep:false + (List.map + (function + | P.Object.Property + (loc, { P.Object.Property.key; pattern = pat; default; shorthand }) -> + let prop = pattern_object_property_key key in + let prop = + match shorthand with + | false -> fuse [prop; Atom ":"; pretty_space; pattern pat] + | true -> prop + in + let prop = + match default with + | Some expr -> fuse_with_default prop expr + | None -> prop + in + source_location_with_comments (loc, prop) + | P.Object.RestProperty (loc, { P.Object.RestProperty.argument }) -> + source_location_with_comments (loc, fuse [Atom "..."; pattern argument])) + properties); + hint type_annotation annot; + ] + | P.Array { P.Array.elements; annot; comments } -> + group + [ + new_list + ~wrap:(Atom "[", Atom "]") + ~sep:(Atom ",") + ~trailing_sep:false (* Array rest cannot have trailing *) + (List.map + (function + | None -> Empty + | Some (P.Array.Element (loc, { P.Array.Element.argument; default })) -> + let elem = pattern argument in + let elem = + match default with + | Some expr -> fuse_with_default elem expr + | None -> elem + in + source_location_with_comments (loc, elem) + | Some (P.Array.RestElement (loc, { P.Array.RestElement.argument })) -> + source_location_with_comments + ?comments + (loc, fuse [Atom "..."; pattern argument])) + elements); + hint type_annotation annot; + ] + | P.Identifier { P.Identifier.name; annot; optional } -> + fuse + [ + identifier name; + ( if optional then + Atom "?" + else + Empty ); + hint type_annotation annot; + ] + | P.Expression expr -> expression ~ctxt expr ) + +and fuse_with_default ?(ctxt = normal_context) node expr = + fuse + [ + node; + pretty_space; + Atom "="; + pretty_space; + expression_with_parens + ~precedence:precedence_of_assignment + ~ctxt:(context_after_token ctxt) + expr; + ] and template_literal { Ast.Expression.TemplateLiteral.quasis; expressions } = let module T = Ast.Expression.TemplateLiteral in - let template_element i (loc, { T.Element.value={ T.Element.raw; _ }; tail }) = - fuse [ - source_location_with_comments (loc, fuse [ - if i > 0 then Atom "}" else Empty; - Atom raw; - if not tail then Atom "${" else Empty; - ]); - if not tail then expression (List.nth expressions i) else Empty; - ] in - fuse [ - Atom "`"; - fuse (List.mapi template_element quasis); - Atom "`"; - ] - -and variable_declaration ?(ctxt=normal_context) (loc, { - Ast.Statement.VariableDeclaration.declarations; - kind; -}) = - source_location_with_comments (loc, fuse [ - begin match kind with + let template_element i (loc, { T.Element.value = { T.Element.raw; _ }; tail }) = + fuse + [ + source_location_with_comments + ( loc, + fuse + [ + ( if i > 0 then + Atom "}" + else + Empty ); + Atom raw; + ( if not tail then + Atom "${" + else + Empty ); + ] ); + ( if not tail then + expression (List.nth expressions i) + else + Empty ); + ] + in + fuse [Atom "`"; fuse (List.mapi template_element quasis); Atom "`"] + +and variable_declaration + ?(ctxt = normal_context) (loc, { Ast.Statement.VariableDeclaration.declarations; kind }) = + let kind_layout = + match kind with | Ast.Statement.VariableDeclaration.Var -> Atom "var" | Ast.Statement.VariableDeclaration.Let -> Atom "let" | Ast.Statement.VariableDeclaration.Const -> Atom "const" - end; - space; - begin match declarations with - | [single_decl] -> variable_declarator ~ctxt single_decl - | _ -> - list - ~sep:(Atom ",") - ~inline:(false, true) - ~trailing:false - (List.map (variable_declarator ~ctxt) declarations) - end - ]); - -and variable_declarator ~ctxt (loc, { - Ast.Statement.VariableDeclaration.Declarator.id; - init; -}) = - source_location_with_comments ( - loc, - match init with - | Some expr -> - fuse [ - pattern ~ctxt id; pretty_space; Atom "="; pretty_space; - expression_with_parens ~precedence:precedence_of_assignment ~ctxt expr; - ]; - | None -> pattern ~ctxt id - ) - -and function_ ?(ctxt=normal_context) ~precedence func = - let { Ast.Function.id; generator; _ } = func in - let s_func = fuse [ - Atom "function"; - if generator then Atom "*" else Empty; - ] in - function_base - ~ctxt - ~precedence - ~id:(match id with - | Some id -> fuse [s_func; space; identifier id] - | None -> s_func - ) - func - -and function_base - ~ctxt - ~precedence - ?(arrow=false) - ?(id=Empty) - { Ast.Function. - params; body; async; predicate; return; tparams; - expression=_; generator=_; id=_ (* Handled via `function_` *) - } = - let open Ast.Function in - fuse [ - if async then fuse [Atom "async"; space; id] else id; - option type_parameter tparams; - begin match arrow, params, return, predicate, tparams with - | true, (_, { Ast.Function.Params.params = [( - _, - Ast.Pattern.Identifier { - Ast.Pattern.Identifier.optional=false; annot=None; _; - } - )]; rest = None}), Missing _, None, None -> List.hd (function_params ~ctxt params) - | _, _, _, _, _ -> - list - ~wrap:(Atom "(", Atom ")") - ~sep:(Atom ",") - (function_params ~ctxt:normal_context params) - end; - begin match return, predicate with - | Missing _, None -> Empty - | Missing _, Some pred -> fuse [Atom ":"; pretty_space; type_predicate pred] - | Available ret, Some pred -> fuse [ - type_annotation ret; - pretty_space; - type_predicate pred; - ] - | Available ret, None -> type_annotation ret; - end; - if arrow then fuse [ - (* Babylon does not parse ():*=>{}` because it thinks the `*=` is an - unexpected multiply-and-assign operator. Thus, we format this with a - space e.g. `():* =>{}`. *) - begin match return with - | Available (_, (_, Ast.Type.Exists)) -> space - | _ -> pretty_space - end; - Atom "=>"; - ] else Empty; - pretty_space; - begin match body with - | Ast.Function.BodyBlock b -> block b - | Ast.Function.BodyExpression expr -> - let ctxt = if arrow then { normal_context with group=In_arrow_func } - else normal_context in - expression_with_parens ~precedence ~ctxt expr - end; - ] - -and function_params ~ctxt (_, { Ast.Function.Params.params; rest }) = - let s_params = List.map (pattern ~ctxt) params in - match rest with - | Some (loc, {Ast.Function.RestElement.argument}) -> - let s_rest = source_location_with_comments (loc, fuse [ - Atom "..."; pattern ~ctxt argument - ]) in - List.append s_params [s_rest] - | None -> s_params + in + let has_init = + List.exists + (fun var -> + Ast.Statement.VariableDeclaration.Declarator.( + match var with + | (_, { id = _; init = Some _ }) -> true + | _ -> false)) + declarations + in + let sep = + if has_init then + pretty_hardline + else + pretty_line + in + let decls_layout = + match declarations with + | [] -> Empty (* impossible *) + | [single_decl] -> variable_declarator ~ctxt single_decl + | hd :: tl -> + let hd = variable_declarator ~ctxt hd in + let tl = Core_list.map ~f:(variable_declarator ~ctxt) tl in + group [hd; Atom ","; Indent (fuse [sep; join (fuse [Atom ","; sep]) tl])] + in + source_location_with_comments (loc, fuse_with_space [kind_layout; decls_layout]) + +and variable_declarator ~ctxt (loc, { Ast.Statement.VariableDeclaration.Declarator.id; init }) = + source_location_with_comments + ( loc, + match init with + | Some expr -> + fuse + [ + pattern ~ctxt id; + pretty_space; + Atom "="; + pretty_space; + expression_with_parens ~precedence:precedence_of_assignment ~ctxt expr; + ] + | None -> pattern ~ctxt id ) + +and arrow_function + ?(ctxt = normal_context) + ~precedence + { + Ast.Function.params; + body; + async; + predicate; + return; + tparams; + generator = _; + id = _; + (* arrows don't have ids and can't be generators *) sig_loc = _; + } = + let is_single_simple_param = + match params with + | ( _, + { + Ast.Function.Params.params = + [ + ( _, + { + Ast.Function.Param.argument = + ( _, + Ast.Pattern.Identifier + { Ast.Pattern.Identifier.optional = false; annot = Ast.Type.Missing _; _ } + ); + default = None; + } ); + ]; + rest = None; + } ) -> + true + | _ -> false + in + let params_and_stuff = + match (is_single_simple_param, return, predicate, tparams) with + | (true, Ast.Type.Missing _, None, None) -> List.hd (function_params ~ctxt params) + | (_, _, _, _) -> + fuse + [ + option type_parameter tparams; + arrow_function_params params; + function_return return predicate; + ] + in + fuse + [ + fuse_with_space + [ + ( if async then + Atom "async" + else + Empty ); + params_and_stuff; + ]; + (* Babylon does not parse ():*=>{}` because it thinks the `*=` is an + unexpected multiply-and-assign operator. Thus, we format this with a + space e.g. `():* =>{}`. *) + begin + match return with + | Ast.Type.Available (_, (_, Ast.Type.Exists)) -> space + | _ -> pretty_space + end; + Atom "=>"; + pretty_space; + begin + match body with + | Ast.Function.BodyBlock b -> block b + | Ast.Function.BodyExpression expr -> + let ctxt = { normal_context with group = In_arrow_func } in + expression_with_parens ~precedence ~ctxt expr + end; + ] + +and arrow_function_params params = + group + [ + new_list + ~wrap:(Atom "(", Atom ")") + ~sep:(Atom ",") + (function_params ~ctxt:normal_context params); + ] + +and function_ func = + let { Ast.Function.id; params; body; async; generator; predicate; return; tparams; sig_loc = _ } + = + func + in + let prefix = + let s_func = + fuse + [ + Atom "function"; + ( if generator then + Atom "*" + else + Empty ); + ] + in + let id = + match id with + | Some id -> fuse [s_func; space; identifier id] + | None -> s_func + in + if async then + fuse [Atom "async"; space; id] + else + id + in + function_base ~prefix ~params ~body ~predicate ~return ~tparams + +and function_base ~prefix ~params ~body ~predicate ~return ~tparams = + fuse + [ + prefix; + option type_parameter tparams; + list ~wrap:(Atom "(", Atom ")") ~sep:(Atom ",") (function_params ~ctxt:normal_context params); + function_return return predicate; + pretty_space; + begin + match body with + | Ast.Function.BodyBlock b -> block b + | Ast.Function.BodyExpression _ -> failwith "Only arrows should have BodyExpressions" + end; + ] + +and function_params ~ctxt (_, { Ast.Function.Params.params; rest }) = + let s_params = + Core_list.map + ~f:(fun (loc, { Ast.Function.Param.argument; default }) -> + let node = pattern ~ctxt argument in + let node = + match default with + | Some expr -> fuse_with_default node expr + | None -> node + in + source_location_with_comments (loc, node)) + params + in + match rest with + | Some (loc, { Ast.Function.RestParam.argument }) -> + let s_rest = source_location_with_comments (loc, fuse [Atom "..."; pattern ~ctxt argument]) in + List.append s_params [s_rest] + | None -> s_params + +and function_return return predicate = + match (return, predicate) with + | (Ast.Type.Missing _, None) -> Empty + | (Ast.Type.Missing _, Some pred) -> fuse [Atom ":"; pretty_space; type_predicate pred] + | (Ast.Type.Available ret, Some pred) -> + fuse [type_annotation ret; pretty_space; type_predicate pred] + | (Ast.Type.Available ret, None) -> type_annotation ret and block (loc, { Ast.Statement.Block.body }) = - source_location_with_comments ( - loc, - if List.length body > 0 then - body - |> statement_list_with_locs ~allow_empty:true ~pretty_semicolon:true - |> list_with_newlines - |> list ~wrap:(Atom "{", Atom "}") ~break:Break_if_pretty - else Atom "{}" - ) + let statements = statement_list ~pretty_semicolon:true body in + source_location_with_comments + ( loc, + if statements <> [] then + group + [ + wrap_and_indent + ~break:pretty_hardline + (Atom "{", Atom "}") + [join pretty_hardline statements]; + ] + else + Atom "{}" ) and decorators_list decorators = if List.length decorators > 0 then - list - ~wrap:(Empty, flat_ugly_space) - ~inline:(true, false) - ~break:Break_if_pretty - ~indent:0 - (List.map - (fun (_, { Ast.Class.Decorator.expression = expr }) -> fuse [ - Atom "@"; - begin - (* Magic number, after `Call` but before `Update` *) - let precedence = 18 in - expression_with_parens ~precedence ~ctxt:normal_context expr; - end; - ]) + let decorators = + List.map + (fun (_, { Ast.Class.Decorator.expression = expr }) -> + fuse + [ + Atom "@"; + begin + (* Magic number, after `Call` but before `Update` *) + let precedence = 18 in + expression_with_parens ~precedence ~ctxt:normal_context expr + end; + ]) decorators - ) - else Empty - -and class_method ( - loc, - { Ast.Class.Method.kind; key; value=(func_loc, func); static; decorators } -) = - let module M = Ast.Class.Method in - source_location_with_comments (loc, begin - let s_key = object_property_key key in - let s_key = - let { Ast.Function.generator; _ } = func in - fuse [if generator then Atom "*" else Empty; s_key;] in - let s_key = match kind with - | M.Constructor - | M.Method -> s_key - | M.Get -> fuse [Atom "get"; space; s_key] - | M.Set -> fuse [Atom "set"; space; s_key] - in - fuse [ - decorators_list decorators; - if static then fuse [Atom "static"; space] else Empty; - source_location_with_comments ( - func_loc, - function_base - ~ctxt:normal_context - ~precedence:max_precedence - ~id:s_key - func - ) - ] - end - ) - -and class_property_helper loc key value static annot variance = - source_location_with_comments (loc, with_semicolon (fuse [ - if static then fuse [Atom "static"; space] else Empty; - option variance_ variance; - key; - option type_annotation annot; - begin match value with - | Some v -> fuse [ - pretty_space; Atom "="; pretty_space; - expression_with_parens ~precedence:min_precedence ~ctxt:normal_context v; - ] - | None -> Empty - end; - ])) + group [join pretty_line decorators; if_pretty hardline space] + else + Empty -and class_property (loc, { Ast.Class.Property. - key; value; static; annot; variance -}) = +and class_method (loc, { Ast.Class.Method.kind; key; value = (func_loc, func); static; decorators }) + = + let module M = Ast.Class.Method in + let { + Ast.Function.params; + body; + async; + generator; + predicate; + return; + tparams; + id = _; + (* methods don't use id; see `key` *) sig_loc = _; + } = + func + in + source_location_with_comments + ( loc, + let s_key = object_property_key key in + let s_key = + if generator then + fuse [Atom "*"; s_key] + else + s_key + in + let s_kind = + match kind with + | M.Constructor + | M.Method -> + Empty + | M.Get -> Atom "get" + | M.Set -> Atom "set" + in + (* TODO: getters/setters/constructors will never be async *) + let s_async = + if async then + Atom "async" + else + Empty + in + let prefix = fuse_with_space [s_async; s_kind; s_key] in + fuse + [ + decorators_list decorators; + ( if static then + fuse [Atom "static"; space] + else + Empty ); + source_location_with_comments + (func_loc, function_base ~prefix ~params ~body ~predicate ~return ~tparams); + ] ) + +and class_property_helper loc key value static annot variance_ = + source_location_with_comments + ( loc, + with_semicolon + (fuse + [ + ( if static then + fuse [Atom "static"; space] + else + Empty ); + option variance variance_; + key; + hint type_annotation annot; + begin + match value with + | Some v -> + fuse + [ + pretty_space; + Atom "="; + pretty_space; + expression_with_parens ~precedence:min_precedence ~ctxt:normal_context v; + ] + | None -> Empty + end; + ]) ) + +and class_property (loc, { Ast.Class.Property.key; value; static; annot; variance }) = class_property_helper loc (object_property_key key) value static annot variance -and class_private_field (loc, { Ast.Class.PrivateField. - key = (ident_loc, ident); value; static; annot; variance -}) = - class_property_helper loc (identifier (ident_loc, "#" ^ (snd ident))) value static annot +and class_private_field + ( loc, + { + Ast.Class.PrivateField.key = (ident_loc, (_, { Ast.Identifier.name; comments = _ })); + value; + static; + annot; + variance; + } ) = + class_property_helper + loc + (identifier (Flow_ast_utils.ident_of_source (ident_loc, "#" ^ name))) + value + static + annot variance and class_body (loc, { Ast.Class.Body.body }) = - if List.length body > 0 then - source_location_with_comments ( - loc, - list - ~wrap:(Atom "{", Atom "}") - ~break:Break_if_pretty - (List.map - (function - | Ast.Class.Body.Method meth -> class_method meth - | Ast.Class.Body.Property prop -> class_property prop - | Ast.Class.Body.PrivateField field -> class_private_field field - ) - body - ) - ) - else Atom "{}" - -and class_base { Ast.Class. - id; body; tparams; extends; - implements; classDecorators -} = - fuse [ - decorators_list classDecorators; - Atom "class"; - begin match id with - | Some ident -> fuse [ - space; identifier ident; - option type_parameter tparams; - ] - | None -> Empty - end; - begin - let class_extends = [ - begin match extends with - | Some (loc, { Ast.Class.Extends.expr; targs }) -> Some (fuse [ - Atom "extends"; space; - source_location_with_comments (loc, fuse [ - expression expr; - option type_parameter_instantiation targs; - ]) - ]) - | None -> None - end; - begin match implements with - | [] -> None - | _ -> Some (fuse [ - Atom "implements"; space; - fuse_list - ~sep:(Atom ",") - (List.map + if body <> [] then + source_location_with_comments + ( loc, + group + [ + wrap_and_indent + ~break:pretty_hardline + (Atom "{", Atom "}") + [ + join + pretty_hardline + (Core_list.map + ~f:(function + | Ast.Class.Body.Method meth -> class_method meth + | Ast.Class.Body.Property prop -> class_property prop + | Ast.Class.Body.PrivateField field -> class_private_field field) + body); + ]; + ] ) + else + Atom "{}" + +and class_implements implements = + match implements with + | [] -> None + | _ -> + Some + (fuse + [ + Atom "implements"; + space; + fuse_list + ~sep:(Atom ",") + (List.map (fun (loc, { Ast.Class.Implements.id; targs }) -> - source_location_with_comments (loc, fuse [ - identifier id; - option type_parameter_instantiation targs; - ]) - ) - implements - ) - ]) + source_location_with_comments + (loc, fuse [identifier id; option type_parameter_instantiation targs])) + implements); + ]) + +and class_base { Ast.Class.id; body; tparams; extends; implements; classDecorators } = + let decorator_parts = decorators_list classDecorators in + let class_parts = + [ + Atom "class"; + begin + match id with + | Some ident -> fuse [space; identifier ident; option type_parameter tparams] + | None -> Empty + end; + ] + in + let extends_parts = + let class_extends = + [ + begin + match extends with + | Some (loc, { Ast.Class.Extends.expr; targs }) -> + Some + (fuse + [ + Atom "extends"; + space; + source_location_with_comments + (loc, fuse [expression expr; option type_parameter_instantiation targs]); + ]) + | None -> None end; - ] in - match deoptionalize class_extends with - | [] -> Empty - | items -> - list - ~wrap:(flat_space, Empty) - (* Ensure items are space separated when flat *) - ~sep:flat_ugly_space - ~trailing:false - ~inline:(false, true) - items - end; - pretty_space; - class_body body; - ] + class_implements implements; + ] + in + match deoptionalize class_extends with + | [] -> [] + | items -> [Layout.Indent (fuse [line; join line items])] + in + let parts = + [] + |> List.rev_append class_parts + |> List.rev_append extends_parts + |> List.cons pretty_space + |> List.cons (class_body body) + |> List.rev + in + group [decorator_parts; group parts] + +and enum_declaration { Ast.Statement.EnumDeclaration.id; body } = + Ast.Statement.EnumDeclaration.( + let representation_type name explicit = + if explicit then + fuse [space; Atom "of"; space; Atom name] + else + Empty + in + let wrap_body members = + wrap_and_indent ~break:pretty_hardline (Atom "{", Atom "}") [join pretty_hardline members] + in + let defaulted_member (_, { DefaultedMember.id }) = fuse [identifier id; Atom ","] in + let initialized_member id value_str = + fuse [identifier id; pretty_space; Atom "="; pretty_space; Atom value_str; Atom ","] + in + let boolean_member (_, { InitializedMember.id; init = (_, init_value) }) = + initialized_member + id + ( if init_value then + "true" + else + "false" ) + in + let number_member (_, { InitializedMember.id; init = (_, { Ast.NumberLiteral.raw; _ }) }) = + initialized_member id raw + in + let string_member (_, { InitializedMember.id; init = (_, { Ast.StringLiteral.raw; _ }) }) = + initialized_member id raw + in + let body = + match body with + | BooleanBody { BooleanBody.members; explicitType } -> + fuse + [ + representation_type "boolean" explicitType; + pretty_space; + wrap_body @@ Core_list.map ~f:boolean_member members; + ] + | NumberBody { NumberBody.members; explicitType } -> + fuse + [ + representation_type "number" explicitType; + pretty_space; + wrap_body @@ Core_list.map ~f:number_member members; + ] + | StringBody { StringBody.members; explicitType } -> + fuse + [ + representation_type "string" explicitType; + pretty_space; + ( wrap_body + @@ + match members with + | StringBody.Defaulted members -> Core_list.map ~f:defaulted_member members + | StringBody.Initialized members -> Core_list.map ~f:string_member members ); + ] + | SymbolBody { SymbolBody.members } -> + fuse + [ + representation_type "symbol" true; + pretty_space; + wrap_body @@ Core_list.map ~f:defaulted_member members; + ] + in + fuse [Atom "enum"; space; identifier id; body]) (* given a list of (loc * layout node) pairs, insert newlines between the nodes when necessary *) -and list_with_newlines (nodes: (Loc.t * Layout.layout_node) list) = - let (nodes, _) = List.fold_left (fun (acc, last_loc) (loc, node) -> - let open Loc in - let acc = match last_loc, node with - (* empty line, don't add anything *) - | _, Empty -> acc - - (* Lines are offset by more than one, let's add a line break *) - | Some { Loc._end; _ }, node when _end.line + 1 < loc.start.line -> - (fuse [pretty_newline; node])::acc - - (* Hasn't matched, just add the node *) - | _, node -> node::acc - in - acc, Some loc - ) ([], None) nodes in +and list_with_newlines (nodes : (Loc.t * Layout.layout_node) list) = + let (nodes, _) = + List.fold_left + (fun (acc, last_loc) (loc, node) -> + Loc.( + let acc = + match (last_loc, node) with + (* empty line, don't add anything *) + | (_, Empty) -> acc + (* Lines are offset by more than one, let's add a line break *) + | (Some { Loc._end; _ }, node) when _end.line + 1 < loc.start.line -> + fuse [pretty_hardline; node] :: acc + (* Hasn't matched, just add the node *) + | (_, node) -> node :: acc + in + (acc, Some loc))) + ([], None) + nodes + in List.rev nodes -and statements_list_with_newlines statements = - statements - |> List.map (fun (loc, s) -> loc, statement ~allow_empty:true (loc, s)) - |> list_with_newlines +and statement_list ?(pretty_semicolon = false) statements = + let rec mapper acc = function + | [] -> List.rev acc + | ((loc, _) as stmt) :: rest -> + let pretty_semicolon = pretty_semicolon && rest = [] in + let acc = (loc, statement ~pretty_semicolon stmt) :: acc in + (mapper [@tailcall]) acc rest + in + mapper [] statements |> list_with_newlines and object_properties_with_newlines properties = let module E = Ast.Expression in let module O = E.Object in let rec has_function_decl = function | O.Property (_, O.Property.Init { value = v; _ }) -> - begin match v with + begin + match v with | (_, E.Function _) - | (_, E.ArrowFunction _) -> true - | (_, E.Object { O.properties }) -> - List.exists has_function_decl properties + | (_, E.ArrowFunction _) -> + true + | (_, E.Object { O.properties; comments = _ }) -> List.exists has_function_decl properties | _ -> false - end + end | O.Property (_, O.Property.Get _) - | O.Property (_, O.Property.Set _) -> true + | O.Property (_, O.Property.Set _) -> + true | _ -> false in let (property_labels, _) = List.fold_left - ( - fun (acc, last_p) p -> - match (last_p, p) with - | (None, _) -> (* Never on first line *) - ((object_property p)::acc, Some (has_function_decl p)) - | (Some true, p) -> - ( - (fuse [pretty_newline; object_property p])::acc, - Some (has_function_decl p) - ) - | (_, p) when has_function_decl p -> - ( - (fuse [pretty_newline; object_property p])::acc, - Some true - ) - | _ -> ((object_property p)::acc, Some false) - ) + (fun (acc, last_p) p -> + match (last_p, p) with + | (None, _) -> + (* Never on first line *) + (object_property p :: acc, Some (has_function_decl p)) + | (Some true, p) -> + (fuse [pretty_hardline; object_property p] :: acc, Some (has_function_decl p)) + | (_, p) when has_function_decl p -> + (fuse [pretty_hardline; object_property p] :: acc, Some true) + | _ -> (object_property p :: acc, Some false)) ([], None) - properties in + properties + in List.rev property_labels and object_property_key key = let module O = Ast.Expression.Object in match key with - | O.Property.Literal lit -> literal lit + | O.Property.Literal (loc, lit) -> source_location_with_comments (loc, literal lit) | O.Property.Identifier ident -> identifier ident | O.Property.Computed expr -> - fuse [ - Atom "["; - Sequence ({ seq with break=Break_if_needed }, [expression expr]); - Atom "]"; - ] - | O.Property.PrivateName _ -> - failwith "Internal Error: Found object prop with private name" + fuse [Atom "["; Layout.Indent (fuse [pretty_line; expression expr]); pretty_line; Atom "]"] + | O.Property.PrivateName _ -> failwith "Internal Error: Found object prop with private name" and object_property property = let module O = Ast.Expression.Object in match property with | O.Property (loc, O.Property.Init { key; value; shorthand }) -> - source_location_with_comments (loc, - let s_key = object_property_key key in - if shorthand then s_key - else fuse [ - s_key; Atom ":"; pretty_space; - expression_with_parens ~precedence:min_precedence ~ctxt:normal_context value; - ] - ) + source_location_with_comments + ( loc, + let s_key = object_property_key key in + if shorthand then + s_key + else + group + [ + s_key; + Atom ":"; + pretty_space; + expression_with_parens ~precedence:min_precedence ~ctxt:normal_context value; + ] ) | O.Property (loc, O.Property.Method { key; value = (fn_loc, func) }) -> let s_key = object_property_key key in - let { Ast.Function.generator; _ } = func in - let precedence = max_precedence in - let ctxt = normal_context in - let g_key = fuse [ - if generator then Atom "*" else Empty; - s_key; - ] in - source_location_with_comments (loc, - source_location_with_comments (fn_loc, function_base ~ctxt ~precedence ~id:g_key func) - ) + let { + Ast.Function.id; + params; + body; + async; + generator; + predicate; + return; + tparams; + sig_loc = _; + } = + func + in + assert (id = None); + + (* methods don't have ids, see `key` *) + let prefix = + fuse + [ + ( if async then + fuse [Atom "async"; space] + else + Empty ); + ( if generator then + Atom "*" + else + Empty ); + s_key; + ] + in + source_location_with_comments + ( loc, + source_location_with_comments + (fn_loc, function_base ~prefix ~params ~body ~predicate ~return ~tparams) ) | O.Property (loc, O.Property.Get { key; value = (fn_loc, func) }) -> - let s_key = object_property_key key in - let precedence = max_precedence in - let ctxt = normal_context in - source_location_with_comments (loc, - source_location_with_comments (fn_loc, fuse [ - Atom "get"; space; - function_base ~ctxt ~precedence ~id:s_key func - ]) - ) + let { + Ast.Function.id; + params; + body; + async; + generator; + predicate; + return; + tparams; + sig_loc = _; + } = + func + in + assert (id = None); + + (* getters don't have ids, see `key` *) + assert (not async); + + (* getters can't be async *) + assert (not generator); + + (* getters can't be generators *) + let prefix = fuse [Atom "get"; space; object_property_key key] in + source_location_with_comments + ( loc, + source_location_with_comments + (fn_loc, function_base ~prefix ~params ~body ~predicate ~return ~tparams) ) | O.Property (loc, O.Property.Set { key; value = (fn_loc, func) }) -> - let s_key = object_property_key key in - let precedence = max_precedence in - let ctxt = normal_context in - source_location_with_comments (loc, - source_location_with_comments (fn_loc, fuse [ - Atom "set"; space; - function_base ~ctxt ~precedence ~id:s_key func - ]) - ) + let { + Ast.Function.id; + params; + body; + async; + generator; + predicate; + return; + tparams; + sig_loc = _; + } = + func + in + assert (id = None); + + (* setters don't have ids, see `key` *) + assert (not async); + + (* setters can't be async *) + assert (not generator); + + (* setters can't be generators *) + let prefix = fuse [Atom "set"; space; object_property_key key] in + source_location_with_comments + ( loc, + source_location_with_comments + (fn_loc, function_base ~prefix ~params ~body ~predicate ~return ~tparams) ) | O.SpreadProperty (loc, { O.SpreadProperty.argument }) -> source_location_with_comments (loc, fuse [Atom "..."; expression argument]) and jsx_element loc { Ast.JSX.openingElement; closingElement; children } = - fuse [ - begin match openingElement with - | (_, { Ast.JSX.Opening.selfClosing=false; _ }) -> - jsx_opening openingElement; - | (_, { Ast.JSX.Opening.selfClosing=true; _ }) -> - jsx_self_closing openingElement; - end; - jsx_children loc children; - begin match closingElement with - | Some closing -> jsx_closing closing - | _ -> Empty - end; - ] + fuse + [ + begin + match openingElement with + | (_, { Ast.JSX.Opening.selfClosing = false; _ }) -> jsx_opening openingElement + | (_, { Ast.JSX.Opening.selfClosing = true; _ }) -> jsx_self_closing openingElement + end; + jsx_children loc children; + begin + match closingElement with + | Some closing -> jsx_closing closing + | _ -> Empty + end; + ] and jsx_fragment loc { Ast.JSX.frag_openingElement; frag_closingElement; frag_children } = - fuse [ - jsx_fragment_opening frag_openingElement; - jsx_children loc frag_children; - begin match frag_closingElement with - | Some closing -> jsx_closing_fragment closing - | _ -> Empty - end; - ] + fuse + [ + jsx_fragment_opening frag_openingElement; + jsx_children loc frag_children; + jsx_closing_fragment frag_closingElement; + ] -and jsx_identifier (loc, { Ast.JSX.Identifier.name }) = identifier_with_comments (loc, name) +and jsx_identifier (loc, { Ast.JSX.Identifier.name }) = + identifier_with_comments @@ Flow_ast_utils.ident_of_source (loc, name) and jsx_namespaced_name (loc, { Ast.JSX.NamespacedName.namespace; name }) = - source_location_with_comments (loc, fuse [ - jsx_identifier namespace; - Atom ":"; - jsx_identifier name; - ]) + source_location_with_comments + (loc, fuse [jsx_identifier namespace; Atom ":"; jsx_identifier name]) and jsx_member_expression (loc, { Ast.JSX.MemberExpression._object; property }) = - source_location_with_comments (loc, fuse [ - begin match _object with - | Ast.JSX.MemberExpression.Identifier ident -> jsx_identifier ident - | Ast.JSX.MemberExpression.MemberExpression member -> - jsx_member_expression member - end; - Atom "."; - jsx_identifier property; - ]) - -and jsx_expression_container { Ast.JSX.ExpressionContainer.expression=expr } = - fuse [ - Atom "{"; - begin match expr with - | Ast.JSX.ExpressionContainer.Expression expr -> expression expr - | Ast.JSX.ExpressionContainer.EmptyExpression loc -> - (* Potentally we will need to inject comments here *) - source_location_with_comments (loc, Empty) - end; - Atom "}"; - ] + source_location_with_comments + ( loc, + fuse + [ + begin + match _object with + | Ast.JSX.MemberExpression.Identifier ident -> jsx_identifier ident + | Ast.JSX.MemberExpression.MemberExpression member -> jsx_member_expression member + end; + Atom "."; + jsx_identifier property; + ] ) + +and jsx_expression_container { Ast.JSX.ExpressionContainer.expression = expr } = + fuse + [ + Atom "{"; + begin + match expr with + | Ast.JSX.ExpressionContainer.Expression expr -> expression expr + | Ast.JSX.ExpressionContainer.EmptyExpression -> Empty + end; + Atom "}"; + ] and jsx_attribute (loc, { Ast.JSX.Attribute.name; value }) = let module A = Ast.JSX.Attribute in - source_location_with_comments (loc, fuse [ - begin match name with - | A.Identifier ident -> jsx_identifier ident - | A.NamespacedName name -> jsx_namespaced_name name - end; - begin match value with - | Some v -> fuse [ - Atom "="; - begin match v with - | A.Literal (loc, lit) -> literal (loc, lit) - | A.ExpressionContainer (loc, express) -> - source_location_with_comments (loc, jsx_expression_container express) - end; - ] - | None -> flat_ugly_space (* TODO we shouldn't do this for the last attr *) - end; - ]) + source_location_with_comments + ( loc, + fuse + [ + begin + match name with + | A.Identifier ident -> jsx_identifier ident + | A.NamespacedName name -> jsx_namespaced_name name + end; + begin + match value with + | Some v -> + fuse + [ + Atom "="; + begin + match v with + | A.Literal (loc, lit) -> source_location_with_comments (loc, literal lit) + | A.ExpressionContainer (loc, express) -> + source_location_with_comments (loc, jsx_expression_container express) + end; + ] + | None -> flat_ugly_space (* TODO we shouldn't do this for the last attr *) + end; + ] ) and jsx_spread_attribute (loc, { Ast.JSX.SpreadAttribute.argument }) = - source_location_with_comments (loc, fuse [ - Atom "{"; - Atom "..."; - expression argument; - Atom "}"; - ]) + source_location_with_comments (loc, fuse [Atom "{"; Atom "..."; expression argument; Atom "}"]) and jsx_element_name = function | Ast.JSX.Identifier ident -> jsx_identifier ident @@ -1631,732 +2092,835 @@ and jsx_opening_attr = function | Ast.JSX.Opening.Attribute attr -> jsx_attribute attr | Ast.JSX.Opening.SpreadAttribute attr -> jsx_spread_attribute attr -and jsx_opening (loc, { Ast.JSX.Opening.name; attributes; selfClosing=_ }) = +and jsx_opening (loc, { Ast.JSX.Opening.name; attributes; selfClosing = _ }) = jsx_opening_helper loc (Some name) attributes -and jsx_fragment_opening loc = - jsx_opening_helper loc None [] +and jsx_fragment_opening loc = jsx_opening_helper loc None [] and jsx_opening_helper loc nameOpt attributes = - source_location_with_comments (loc, fuse [ - Atom "<"; - (match nameOpt with - | Some name -> jsx_element_name name - | None -> Empty); - if List.length attributes > 0 then - list - ~wrap:(flat_space, Empty) - ~inline:(false, true) (* put `>` on end of last attr *) - (List.map jsx_opening_attr attributes) - else Empty; - Atom ">"; - ]) - -and jsx_self_closing (loc, { Ast.JSX.Opening. - name; attributes; selfClosing=_ -}) = - source_location_with_comments (loc, fuse [ - Atom "<"; - jsx_element_name name; - if List.length attributes > 0 then - list - ~wrap:(flat_space, flat_pretty_space) - (List.map jsx_opening_attr attributes) - else pretty_space; - Atom "/>"; - ]) + source_location_with_comments + ( loc, + group + [ + Atom "<"; + (match nameOpt with + | Some name -> jsx_element_name name + | None -> Empty); + ( if attributes <> [] then + Layout.Indent + (fuse [line; join pretty_line (Core_list.map ~f:jsx_opening_attr attributes)]) + else + Empty ); + Atom ">"; + ] ) + +and jsx_self_closing (loc, { Ast.JSX.Opening.name; attributes; selfClosing = _ }) = + let attributes = Core_list.map ~f:jsx_opening_attr attributes in + source_location_with_comments + ( loc, + group + [ + Atom "<"; + jsx_element_name name; + ( if attributes <> [] then + fuse [Layout.Indent (fuse [line; join pretty_line attributes]); pretty_line] + else + pretty_space ); + Atom "/>"; + ] ) and jsx_closing (loc, { Ast.JSX.Closing.name }) = - source_location_with_comments (loc, fuse [ - Atom ""; - ]) - -and jsx_closing_fragment loc = - source_location_with_comments (loc, fuse [ - Atom ""; - ]) - -and jsx_children loc children = - let open Loc in - let processed_children = deoptionalize (List.map jsx_child children) in - (* Check for empty children *) - if List.length processed_children <= 0 then Empty - (* If start and end lines don't match check inner breaks *) - else if loc._end.line > loc.start.line then begin - let children_n, _ = List.fold_left - (fun (children_n, last_line) (loc, child) -> - let child_n = SourceLocation (loc, child) in - let formatted_child_n = match last_line with - (* First child, newlines will always be forced via the `fuse_vertically` below *) - | None -> child_n - (* If the current child and the previous child line positions are offset match + source_location_with_comments (loc, fuse [Atom ""]) + +and jsx_closing_fragment loc = source_location_with_comments (loc, fuse [Atom ""]) + +and jsx_children loc (_children_loc, children) = + Loc.( + let processed_children = deoptionalize (Core_list.map ~f:jsx_child children) in + (* Check for empty children *) + if List.length processed_children <= 0 then + Empty + (* If start and end lines don't match check inner breaks *) + else if loc._end.line > loc.start.line then + let (children_n, _) = + List.fold_left + (fun (children_n, last_line) (loc, child) -> + let child_n = SourceLocation (loc, child) in + let formatted_child_n = + match last_line with + (* First child, newlines will always be forced via the `pretty_hardline` below *) + | None -> child_n + (* If the current child and the previous child line positions are offset match this via forcing a newline *) - | Some last_line when loc.start.line > last_line -> - (* TODO: Remove the `Newline` hack, this forces newlines to exist + | Some last_line when loc.start.line > last_line -> + (* TODO: Remove the `Newline` hack, this forces newlines to exist when using the compact printer *) - fuse [Newline; child_n] - (* Must be on the same line as the previous child *) - | Some _ -> child_n - in - formatted_child_n::children_n, Some loc._end.line - ) - ([], None) - processed_children - in - fuse_vertically ~indent:2 [fuse (List.rev children_n)] - (* Single line *) - end else fuse (List.map (fun (loc, child) -> SourceLocation (loc, child)) processed_children) + fuse [Newline; child_n] + (* Must be on the same line as the previous child *) + | Some _ -> child_n + in + (formatted_child_n :: children_n, Some loc._end.line)) + ([], None) + processed_children + in + fuse [Layout.Indent (fuse (pretty_hardline :: List.rev children_n)); pretty_hardline] + (* Single line *) + else + fuse (Core_list.map ~f:(fun (loc, child) -> SourceLocation (loc, child)) processed_children)) and jsx_child (loc, child) = match child with | Ast.JSX.Element elem -> Some (loc, jsx_element loc elem) | Ast.JSX.Fragment frag -> Some (loc, jsx_fragment loc frag) - | Ast.JSX.ExpressionContainer express -> - Some (loc, jsx_expression_container express) - | Ast.JSX.SpreadChild expr -> Some (loc, fuse [ - Atom "{..."; - expression expr; - Atom "}" - ]) + | Ast.JSX.ExpressionContainer express -> Some (loc, jsx_expression_container express) + | Ast.JSX.SpreadChild expr -> Some (loc, fuse [Atom "{..."; expression expr; Atom "}"]) | Ast.JSX.Text { Ast.JSX.Text.raw; _ } -> - begin match Utils_jsx.trim_jsx_text loc raw with - | Some (loc, txt) -> Some (loc, Atom txt) - | None -> None + begin + match Utils_jsx.trim_jsx_text loc raw with + | Some (loc, txt) -> Some (loc, Atom txt) + | None -> None end and partition_specifiers default specifiers = - let open Ast.Statement.ImportDeclaration in - let special, named = match specifiers with - | Some (ImportNamespaceSpecifier (loc, id)) -> - [import_namespace_specifier (loc, id)], None - | Some (ImportNamedSpecifiers named_specifiers) -> - [], Some (import_named_specifiers named_specifiers) - | None -> - [], None - in - match default with - | Some default -> (identifier default)::special, named - | None -> special, named + Ast.Statement.ImportDeclaration.( + let (special, named) = + match specifiers with + | Some (ImportNamespaceSpecifier (loc, id)) -> ([import_namespace_specifier (loc, id)], None) + | Some (ImportNamedSpecifiers named_specifiers) -> + ([], Some (import_named_specifiers named_specifiers)) + | None -> ([], None) + in + match default with + | Some default -> (identifier default :: special, named) + | None -> (special, named)) and import_namespace_specifier (loc, id) = - source_location_with_comments (loc, fuse [ - Atom "*"; pretty_space; Atom "as"; space; identifier id - ]) - -and import_named_specifier { Ast.Statement.ImportDeclaration. - kind; local; remote -} = - fuse [ - Ast.Statement.ImportDeclaration.(match kind with - | Some ImportType -> fuse [Atom "type"; space] - | Some ImportTypeof -> fuse [Atom "typeof"; space] - | Some ImportValue - | None -> Empty - ); - identifier remote; - match local with - | Some id -> fuse [ - space; - Atom "as"; - space; - identifier id; + source_location_with_comments + (loc, fuse [Atom "*"; pretty_space; Atom "as"; space; identifier id]) + +and import_named_specifier { Ast.Statement.ImportDeclaration.kind; local; remote } = + fuse + [ + Ast.Statement.ImportDeclaration.( + match kind with + | Some ImportType -> fuse [Atom "type"; space] + | Some ImportTypeof -> fuse [Atom "typeof"; space] + | Some ImportValue + | None -> + Empty); + identifier remote; + (match local with + | Some id -> fuse [space; Atom "as"; space; identifier id] + | None -> Empty); ] - | None -> Empty - ] and import_named_specifiers named_specifiers = - list - ~wrap:(Atom "{", Atom "}") - ~sep:(Atom ",") - (List.map import_named_specifier named_specifiers) - -and import_declaration { Ast.Statement.ImportDeclaration. - importKind; source; specifiers; default -} = - let s_from = fuse [Atom "from"; pretty_space;] in - let module I = Ast.Statement.ImportDeclaration in - with_semicolon (fuse [ - Atom "import"; - begin match importKind with - | I.ImportType -> fuse [space; Atom "type"] - | I.ImportTypeof -> fuse [space; Atom "typeof"] - | I.ImportValue -> Empty - end; - begin match partition_specifiers default specifiers, importKind with - (* No export specifiers *) - (* `import 'module-name';` *) - | ([], None), I.ImportValue -> pretty_space - (* `import type {} from 'module-name';` *) - | ([], None), (I.ImportType | I.ImportTypeof) -> - fuse [pretty_space; Atom "{}"; pretty_space; s_from] - (* Only has named specifiers *) - | ([], Some named), _ -> fuse [ - pretty_space; named; pretty_space; s_from; - ] - (* Only has default or namedspaced specifiers *) - | (special, None), _ -> fuse [ - space; - fuse_list ~sep:(Atom ",") special; - space; - s_from; - ] - (* Has both default or namedspaced specifiers and named specifiers *) - | (special, Some named), _ -> fuse [ - space; - fuse_list ~sep:(Atom ",") (special@[named]); - pretty_space; - s_from; + group + [ + new_list + ~wrap:(Atom "{", Atom "}") + ~sep:(Atom ",") + (Core_list.map ~f:import_named_specifier named_specifiers); ] - end; - string_literal source; - ]) + +and import_declaration { Ast.Statement.ImportDeclaration.importKind; source; specifiers; default } + = + let s_from = fuse [Atom "from"; pretty_space] in + let module I = Ast.Statement.ImportDeclaration in + with_semicolon + (fuse + [ + Atom "import"; + begin + match importKind with + | I.ImportType -> fuse [space; Atom "type"] + | I.ImportTypeof -> fuse [space; Atom "typeof"] + | I.ImportValue -> Empty + end; + begin + match (partition_specifiers default specifiers, importKind) with + (* No export specifiers *) + (* `import 'module-name';` *) + | (([], None), I.ImportValue) -> pretty_space + (* `import type {} from 'module-name';` *) + | (([], None), (I.ImportType | I.ImportTypeof)) -> + fuse [pretty_space; Atom "{}"; pretty_space; s_from] + (* Only has named specifiers *) + | (([], Some named), _) -> fuse [pretty_space; named; pretty_space; s_from] + (* Only has default or namedspaced specifiers *) + | ((special, None), _) -> fuse [space; fuse_list ~sep:(Atom ",") special; space; s_from] + (* Has both default or namedspaced specifiers and named specifiers *) + | ((special, Some named), _) -> + fuse [space; fuse_list ~sep:(Atom ",") (special @ [named]); pretty_space; s_from] + end; + string_literal source; + ]) and export_source ~prefix = function - | Some lit -> fuse [ - prefix; - Atom "from"; - pretty_space; - string_literal lit; - ] + | Some lit -> fuse [prefix; Atom "from"; pretty_space; string_literal lit] | None -> Empty -and export_specifier source = Ast.Statement.ExportNamedDeclaration.(function - | ExportSpecifiers specifiers -> fuse [ - list - ~wrap:(Atom "{", Atom "}") - ~sep:(Atom ",") - (List.map - (fun (loc, { ExportSpecifier.local; exported }) -> source_location_with_comments ( - loc, - fuse [ - identifier local; - begin match exported with - | Some export -> fuse [ - space; - Atom "as"; - space; - identifier export; - ] - | None -> Empty - end; - ] - )) - specifiers - ); - export_source ~prefix:pretty_space source; +and export_specifier source = + Ast.Statement.ExportNamedDeclaration.( + function + | ExportSpecifiers specifiers -> + fuse + [ + group + [ + new_list + ~wrap:(Atom "{", Atom "}") + ~sep:(Atom ",") + (List.map + (fun (loc, { ExportSpecifier.local; exported }) -> + source_location_with_comments + ( loc, + fuse + [ + identifier local; + begin + match exported with + | Some export -> fuse [space; Atom "as"; space; identifier export] + | None -> Empty + end; + ] )) + specifiers); + ]; + export_source ~prefix:pretty_space source; + ] + | ExportBatchSpecifier (loc, Some ident) -> + fuse + [ + source_location_with_comments + (loc, fuse [Atom "*"; pretty_space; Atom "as"; space; identifier ident]); + export_source ~prefix:space source; + ] + | ExportBatchSpecifier (loc, None) -> + fuse + [source_location_with_comments (loc, Atom "*"); export_source ~prefix:pretty_space source]) + +and export_declaration + { Ast.Statement.ExportNamedDeclaration.declaration; specifiers; source; exportKind } = + fuse + [ + Atom "export"; + begin + match (declaration, specifiers) with + | (Some decl, None) -> fuse [space; statement decl] + | (None, Some specifier) -> + with_semicolon + (fuse + [ + begin + match exportKind with + | Ast.Statement.ExportType -> fuse [space; Atom "type"] + | Ast.Statement.ExportValue -> Empty + end; + pretty_space; + export_specifier source specifier; + ]) + | (_, _) -> failwith "Invalid export declaration" + end; ] - | ExportBatchSpecifier (loc, Some ident) -> fuse [ - source_location_with_comments (loc, fuse [ - Atom "*"; - pretty_space; - Atom "as"; - space; - identifier ident; - ]); - export_source ~prefix:space source; - ] - | ExportBatchSpecifier (loc, None) -> fuse [ - source_location_with_comments (loc, Atom "*"); - export_source ~prefix:pretty_space source; + +and export_default_declaration { Ast.Statement.ExportDefaultDeclaration.default = _; declaration } + = + fuse + [ + Atom "export"; + space; + Atom "default"; + space; + Ast.Statement.ExportDefaultDeclaration.( + match declaration with + | Declaration stat -> statement stat + | Expression expr -> with_semicolon (expression expr)); ] - ) - -and export_declaration { Ast.Statement.ExportNamedDeclaration. - declaration; specifiers; source; exportKind -} = - fuse [ - Atom "export"; - begin match declaration, specifiers with - | Some decl, None -> fuse [space; statement decl] - | None, Some specifier -> with_semicolon (fuse [ - begin match exportKind with - | Ast.Statement.ExportType -> fuse [ - space; - Atom "type"; - ] - | Ast.Statement.ExportValue -> Empty - end; - pretty_space; - export_specifier source specifier; - ]) - | _, _ -> failwith "Invalid export declaration" - end; - ] - -and export_default_declaration { Ast.Statement.ExportDefaultDeclaration. - default=_; declaration -} = - fuse [ - Atom "export"; space; Atom "default"; space; - Ast.Statement.ExportDefaultDeclaration.(match declaration with - | Declaration stat -> statement stat - | Expression expr -> with_semicolon (expression expr) - ); - ] - -and variance_ (loc, var) = - source_location_with_comments ( - loc, - match var with - | Ast.Variance.Plus -> Atom "+" - | Ast.Variance.Minus -> Atom "-" - ) + +and variance (loc, var) = + source_location_with_comments + ( loc, + match var with + | Ast.Variance.Plus -> Atom "+" + | Ast.Variance.Minus -> Atom "-" ) and switch_case ~last (loc, { Ast.Statement.Switch.Case.test; consequent }) = - let case_left = match test with - | Some expr -> - fuse_with_space [ - Atom "case"; - fuse [expression expr; Atom ":"] + let case_left = + match test with + | Some expr -> fuse_with_space [Atom "case"; fuse [expression expr; Atom ":"]] + | None -> Atom "default:" + in + source_location_with_comments + ( loc, + match consequent with + | [] -> case_left + | _ -> + let statements = statement_list ~pretty_semicolon:last consequent in + fuse [case_left; Indent (fuse [pretty_hardline; join pretty_hardline statements])] ) + +and type_param + ( _, + { + Ast.Type.ParameterDeclaration.TypeParam.name = (loc, { Ast.Identifier.name; comments = _ }); + bound; + variance = variance_; + default; + } ) = + fuse + [ + option variance variance_; + source_location_with_comments (loc, Atom name); + hint type_annotation bound; + begin + match default with + | Some t -> fuse [pretty_space; Atom "="; pretty_space; type_ t] + | None -> Empty + end; ] - | None -> Atom "default:" in - source_location_with_comments ( - loc, - match consequent with - | [] -> case_left - | _ -> - list - ~wrap:(case_left, Empty) - ~break:Break_if_pretty - (statement_list ~pretty_semicolon:last consequent) - ) - -and type_param (_, { Ast.Type.ParameterDeclaration.TypeParam. - name = (loc, name); bound; variance; default -}) = - fuse [ - option variance_ variance; - source_location_with_comments (loc, Atom name); - option type_annotation bound; - begin match default with - | Some t -> fuse [ - pretty_space; - Atom "="; - pretty_space; - type_ t; - ] - | None -> Empty - end; - ] and type_parameter (loc, params) = - source_location_with_comments ( - loc, - list - ~wrap:(Atom "<", Atom ">") - ~sep:(Atom ",") - (List.map type_param params) - ) + source_location_with_comments + ( loc, + group + [new_list ~wrap:(Atom "<", Atom ">") ~sep:(Atom ",") (Core_list.map ~f:type_param params)] + ) + +and type_parameter_instantiation_with_implicit (loc, args) = + source_location_with_comments + ( loc, + group + [ + new_list + ~wrap:(Atom "<", Atom ">") + ~sep:(Atom ",") + (Core_list.map ~f:explicit_or_implicit args); + ] ) and type_parameter_instantiation (loc, args) = - source_location_with_comments ( - loc, - list - ~wrap:(Atom "<", Atom ">") - ~sep:(Atom ",") - (List.map type_ args) - ) + source_location_with_comments + (loc, group [new_list ~wrap:(Atom "<", Atom ">") ~sep:(Atom ",") (Core_list.map ~f:type_ args)]) and type_alias ~declare { Ast.Statement.TypeAlias.id; tparams; right } = - with_semicolon (fuse [ - if declare then fuse [Atom "declare"; space;] else Empty; - Atom "type"; space; - identifier id; - option type_parameter tparams; - pretty_space; Atom "="; pretty_space; - type_ right; - ]) - -and opaque_type ~declare { Ast.Statement.OpaqueType.id; tparams; impltype; supertype} = - with_semicolon (fuse ([ - if declare then fuse [Atom "declare"; space;] else Empty; - Atom "opaque type"; space; - identifier id; - option type_parameter tparams] - @ (match supertype with - | Some t -> [Atom ":"; pretty_space; type_ t] - | None -> []) - @ (match impltype with - | Some impltype -> [pretty_space; Atom "="; pretty_space; type_ impltype] - | None -> []))) - -and type_annotation (loc, t) = - source_location_with_comments (loc, fuse [ - Atom ":"; - pretty_space; - type_ t; - ]) + with_semicolon + (fuse + [ + ( if declare then + fuse [Atom "declare"; space] + else + Empty ); + Atom "type"; + space; + identifier id; + option type_parameter tparams; + pretty_space; + Atom "="; + pretty_space; + type_ right; + ]) + +and opaque_type ~declare { Ast.Statement.OpaqueType.id; tparams; impltype; supertype } = + with_semicolon + (fuse + ( [ + ( if declare then + fuse [Atom "declare"; space] + else + Empty ); + Atom "opaque type"; + space; + identifier id; + option type_parameter tparams; + ] + @ (match supertype with + | Some t -> [Atom ":"; pretty_space; type_ t] + | None -> []) + @ + match impltype with + | Some impltype -> [pretty_space; Atom "="; pretty_space; type_ impltype] + | None -> [] )) + +and type_annotation ?(parens = false) (loc, t) = + source_location_with_comments + ( loc, + fuse + [ + Atom ":"; + pretty_space; + ( if parens then + wrap_in_parens (type_ t) + else + type_ t ); + ] ) and type_predicate (loc, pred) = - source_location_with_comments (loc, fuse [ - Atom "%checks"; - Ast.Type.Predicate.(match pred with - | Declared expr -> wrap_in_parens (expression expr) - | Inferred -> Empty - ); - ]) + source_location_with_comments + ( loc, + fuse + [ + Atom "%checks"; + Ast.Type.Predicate.( + match pred with + | Declared expr -> wrap_in_parens (expression expr) + | Inferred -> Empty); + ] ) and type_union_or_intersection ~sep ts = let sep = fuse [sep; pretty_space] in list ~inline:(false, true) (List.mapi - (fun i t -> fuse [ - if i = 0 then IfBreak (sep, Empty) else sep; - type_with_parens t; - ]) - ts - ) + (fun i t -> + fuse + [ + ( if i = 0 then + IfBreak (sep, Empty) + else + sep ); + type_with_parens t; + ]) + ts) + +and type_function_param (loc, { Ast.Type.Function.Param.name; annot; optional }) = + source_location_with_comments + ( loc, + fuse + [ + begin + match name with + | Some id -> + fuse + [ + identifier id; + ( if optional then + Atom "?" + else + Empty ); + Atom ":"; + pretty_space; + ] + | None -> Empty + end; + type_ annot; + ] ) -and type_function_param (loc, { Ast.Type.Function.Param. - name; annot; optional -}) = - source_location_with_comments (loc, fuse [ - begin match name with - | Some id -> fuse [ - identifier id; - if optional then Atom "?" else Empty; - Atom ":"; - pretty_space; - ] - | None -> Empty - end; - type_ annot; - ]) - -and type_function ~sep { Ast.Type.Function. - params = (_, { Ast.Type.Function.Params.params; rest = restParams}); - return; - tparams; -} = - let params = List.map type_function_param params in - let params = match restParams with - | Some (loc, { Ast.Type.Function.RestParam.argument }) -> params@[ - source_location_with_comments (loc, fuse [Atom "..."; type_function_param argument]); +and type_function + ~sep + { + Ast.Type.Function.params = (_, { Ast.Type.Function.Params.params; rest = restParams }); + return; + tparams; + } = + let params = Core_list.map ~f:type_function_param params in + let params = + match restParams with + | Some (loc, { Ast.Type.Function.RestParam.argument }) -> + params + @ [source_location_with_comments (loc, fuse [Atom "..."; type_function_param argument])] + | None -> params + in + fuse + [ + option type_parameter tparams; + group + [ + new_list (* Calls should not allow a trailing comma *) + ~trailing_sep:false + ~wrap:(Atom "(", Atom ")") + ~sep:(Atom ",") + params; + ]; + sep; + pretty_space; + type_ return; ] - | None -> params + +and type_object_property = + Ast.Type.Object.( + function + | Property + (loc, { Property.key; value; optional; static; proto; variance = variance_; _method }) -> + let s_static = + if static then + fuse [Atom "static"; space] + else + Empty + in + let s_proto = + if proto then + fuse [Atom "proto"; space] + else + Empty + in + source_location_with_comments + ( loc, + match (value, _method, proto, optional) with + (* Functions with no special properties can be rendered as methods *) + | (Property.Init (loc, Ast.Type.Function func), true, false, false) -> + source_location_with_comments + (loc, fuse [s_static; object_property_key key; type_function ~sep:(Atom ":") func]) + (* Normal properties *) + | (Property.Init t, _, _, _) -> + fuse + [ + s_static; + s_proto; + option variance variance_; + object_property_key key; + ( if optional then + Atom "?" + else + Empty ); + Atom ":"; + pretty_space; + type_ t; + ] + (* Getters/Setters *) + | (Property.Get (loc, func), _, _, _) -> + source_location_with_comments + ( loc, + fuse + [Atom "get"; space; object_property_key key; type_function ~sep:(Atom ":") func] + ) + | (Property.Set (loc, func), _, _, _) -> + source_location_with_comments + ( loc, + fuse + [Atom "set"; space; object_property_key key; type_function ~sep:(Atom ":") func] + ) ) + | SpreadProperty (loc, { SpreadProperty.argument }) -> + source_location_with_comments (loc, fuse [Atom "..."; type_ argument]) + | Indexer (loc, { Indexer.id; key; value; static; variance = variance_ }) -> + source_location_with_comments + ( loc, + fuse + [ + ( if static then + fuse [Atom "static"; space] + else + Empty ); + option variance variance_; + Atom "["; + begin + match id with + | Some id -> fuse [identifier id; Atom ":"; pretty_space] + | None -> Empty + end; + type_ key; + Atom "]"; + Atom ":"; + pretty_space; + type_ value; + ] ) + | CallProperty (loc, { CallProperty.value = (call_loc, func); static }) -> + source_location_with_comments + ( loc, + fuse + [ + ( if static then + fuse [Atom "static"; space] + else + Empty ); + source_location_with_comments (call_loc, type_function ~sep:(Atom ":") func); + ] ) + | InternalSlot (loc, { InternalSlot.id; value; optional; static; _method = _ }) -> + source_location_with_comments + ( loc, + fuse + [ + ( if static then + fuse [Atom "static"; space] + else + Empty ); + Atom "[["; + identifier id; + Atom "]]"; + ( if optional then + Atom "?" + else + Empty ); + Atom ":"; + pretty_space; + type_ value; + ] )) + +and type_object ?(sep = Atom ",") { Ast.Type.Object.exact; properties; inexact } = + let s_exact = + if exact then + Atom "|" + else + Empty in - fuse [ - option type_parameter tparams; - list - ~wrap:(Atom "(", Atom ")") - ~sep:(Atom ",") - params; - sep; - pretty_space; - type_ return; - ] - -and type_object_property = Ast.Type.Object.(function - | Property (loc, { Property. - key; value; optional; static; proto; variance; _method; - }) -> - let s_static = if static then fuse [Atom "static"; space] else Empty in - let s_proto = if proto then fuse [Atom "proto"; space] else Empty in - source_location_with_comments ( - loc, - match value, _method, proto, optional with - (* Functions with no special properties can be rendered as methods *) - | Property.Init (loc, Ast.Type.Function func), true, false, false -> - source_location_with_comments (loc, fuse [ - s_static; - object_property_key key; - type_function ~sep:(Atom ":") func; - ]) - (* Normal properties *) - | Property.Init t, _, _, _ -> fuse [ - s_static; - s_proto; - option variance_ variance; - object_property_key key; - if optional then Atom "?" else Empty; - Atom ":"; - pretty_space; - type_ t - ] - (* Getters/Setters *) - | Property.Get (loc, func), _, _, _ -> source_location_with_comments (loc, fuse [ - Atom "get"; space; - object_property_key key; - type_function ~sep:(Atom ":") func; - ]) - | Property.Set (loc, func), _, _, _ -> source_location_with_comments (loc, fuse [ - Atom "set"; space; - object_property_key key; - type_function ~sep:(Atom ":") func; - ]) - ) - | SpreadProperty (loc, { SpreadProperty.argument }) -> - source_location_with_comments (loc, fuse [ - Atom "..."; - type_ argument; - ]) - | Indexer (loc, { Indexer.id; key; value; static; variance }) -> - source_location_with_comments (loc, fuse [ - if static then fuse [Atom "static"; space] else Empty; - option variance_ variance; - Atom "["; - begin match id with - | Some id -> fuse [ - identifier id; Atom ":"; pretty_space; - ] - | None -> Empty - end; - type_ key; - Atom "]"; Atom ":"; pretty_space; - type_ value; - ]) - | CallProperty (loc, { CallProperty.value=(call_loc, func); static }) -> - source_location_with_comments (loc, fuse [ - if static then fuse [Atom "static"; space] else Empty; - source_location_with_comments (call_loc, type_function ~sep:(Atom ":") func); - ]) - | InternalSlot (loc, { InternalSlot.id; value; optional; static; _method=_ }) -> - source_location_with_comments (loc, fuse [ - if static then fuse [Atom "static"; space] else Empty; - Atom "[["; - identifier id; - Atom "]]"; - if optional then Atom "?" else Empty; - Atom ":"; pretty_space; - type_ value; - ]) - ) - -and type_object ?(sep=(Atom ",")) { Ast.Type.Object.exact; properties } = - let s_exact = if exact then Atom "|" else Empty in - list - ~wrap:(fuse [Atom "{"; s_exact], fuse [s_exact; Atom "}"]) - ~sep - (List.map type_object_property properties) + let props = Core_list.map ~f:type_object_property properties in + let props = + if inexact then + props @ [Atom "..."] + else + props + in + group [new_list ~wrap:(fuse [Atom "{"; s_exact], fuse [s_exact; Atom "}"]) ~sep props] -and type_interface { Ast.Type.Interface.extends; body=(loc, obj) } = - fuse [ - Atom "interface"; - interface_extends extends; - pretty_space; - source_location_with_comments (loc, type_object ~sep:(Atom ",") obj) - ] +and type_interface { Ast.Type.Interface.extends; body = (loc, obj) } = + fuse + [ + Atom "interface"; + interface_extends extends; + pretty_space; + source_location_with_comments (loc, type_object ~sep:(Atom ",") obj); + ] and interface_extends = function | [] -> Empty - | xs -> fuse [ - space; Atom "extends"; space; - fuse_list - ~sep:(Atom ",") - (List.map - (fun (loc, generic) -> source_location_with_comments (loc, type_generic generic)) - xs - ) - ] + | xs -> + fuse + [ + space; + Atom "extends"; + space; + fuse_list + ~sep:(Atom ",") + (List.map + (fun (loc, generic) -> source_location_with_comments (loc, type_generic generic)) + xs); + ] and type_generic { Ast.Type.Generic.id; targs } = - let rec generic_identifier = Ast.Type.Generic.Identifier.(function - | Unqualified id -> identifier id - | Qualified (loc, { qualification; id }) -> - source_location_with_comments (loc, fuse [ - generic_identifier qualification; - Atom "."; - identifier id; - ]) - ) in - fuse [ - generic_identifier id; - option type_parameter_instantiation targs; - ] + let rec generic_identifier = + Ast.Type.Generic.Identifier.( + function + | Unqualified id -> identifier id + | Qualified (loc, { qualification; id }) -> + source_location_with_comments + (loc, fuse [generic_identifier qualification; Atom "."; identifier id])) + in + fuse [generic_identifier id; option type_parameter_instantiation targs] and type_with_parens t = let module T = Ast.Type in match t with | (_, T.Function _) | (_, T.Union _) - | (_, T.Intersection _) -> wrap_in_parens (type_ t) + | (_, T.Intersection _) -> + wrap_in_parens (type_ t) | _ -> type_ t -and type_ ((loc, t): (Loc.t, Loc.t) Ast.Type.t) = +and type_ ((loc, t) : (Loc.t, Loc.t) Ast.Type.t) = let module T = Ast.Type in - source_location_with_comments ( - loc, - match t with - | T.Any -> Atom "any" - | T.Mixed -> Atom "mixed" - | T.Empty -> Atom "empty" - | T.Void -> Atom "void" - | T.Null -> Atom "null" - | T.Number -> Atom "number" - | T.String -> Atom "string" - | T.Boolean -> Atom "boolean" - | T.Nullable t -> - fuse [ - Atom "?"; - type_with_parens t; - ] - | T.Function func -> - type_function - ~sep:(fuse [pretty_space; Atom "=>"]) - func - | T.Object obj -> type_object obj - | T.Interface i -> type_interface i - | T.Array t -> fuse [Atom "Array<"; type_ t; Atom ">"] - | T.Generic generic -> type_generic generic - | T.Union (t1, t2, ts) -> - type_union_or_intersection ~sep:(Atom "|") (t1::t2::ts) - | T.Intersection (t1, t2, ts) -> - type_union_or_intersection ~sep:(Atom "&") (t1::t2::ts) - | T.Typeof t -> fuse [Atom "typeof"; space; type_ t] - | T.Tuple ts -> - list - ~wrap:(Atom "[", Atom "]") - ~sep:(Atom ",") - (List.map type_ ts) - | T.StringLiteral { Ast.StringLiteral.raw; _ } - | T.NumberLiteral { Ast.NumberLiteral.raw; _ } -> Atom raw - | T.BooleanLiteral value -> Atom (if value then "true" else "false") - | T.Exists -> Atom "*" - ) - -and interface_declaration_base ~def { Ast.Statement.Interface. - id; tparams; body=(loc, obj); extends -} = - fuse [ - def; - identifier id; - option type_parameter tparams; - interface_extends extends; - pretty_space; - source_location_with_comments (loc, type_object ~sep:(Atom ",") obj) - ] + source_location_with_comments + ( loc, + match t with + | T.Any -> Atom "any" + | T.Mixed -> Atom "mixed" + | T.Empty -> Atom "empty" + | T.Void -> Atom "void" + | T.Null -> Atom "null" + | T.Number -> Atom "number" + | T.BigInt -> Atom "bigint" + | T.String -> Atom "string" + | T.Boolean -> Atom "boolean" + | T.Nullable t -> fuse [Atom "?"; type_with_parens t] + | T.Function func -> type_function ~sep:(fuse [pretty_space; Atom "=>"]) func + | T.Object obj -> type_object obj + | T.Interface i -> type_interface i + | T.Array t -> fuse [Atom "Array<"; type_ t; Atom ">"] + | T.Generic generic -> type_generic generic + | T.Union (t1, t2, ts) -> type_union_or_intersection ~sep:(Atom "|") (t1 :: t2 :: ts) + | T.Intersection (t1, t2, ts) -> type_union_or_intersection ~sep:(Atom "&") (t1 :: t2 :: ts) + | T.Typeof t -> fuse [Atom "typeof"; space; type_ t] + | T.Tuple ts -> + group [new_list ~wrap:(Atom "[", Atom "]") ~sep:(Atom ",") (Core_list.map ~f:type_ ts)] + | T.StringLiteral lit -> string_literal_type lit + | T.NumberLiteral t -> number_literal_type t + | T.BigIntLiteral { Ast.BigIntLiteral.bigint; _ } -> Atom bigint + | T.BooleanLiteral value -> + Atom + ( if value then + "true" + else + "false" ) + | T.Exists -> Atom "*" ) + +and explicit_or_implicit + (x : (Loc.t, Loc.t) Ast.Expression.TypeParameterInstantiation.type_parameter_instantiation) = + Ast.Expression.TypeParameterInstantiation.( + match x with + | Implicit _ -> Atom "_" + | Explicit t -> type_ t) + +and interface_declaration_base + ~def { Ast.Statement.Interface.id; tparams; body = (loc, obj); extends } = + fuse + [ + def; + identifier id; + option type_parameter tparams; + interface_extends extends; + pretty_space; + source_location_with_comments (loc, type_object ~sep:(Atom ",") obj); + ] and interface_declaration interface = interface_declaration_base ~def:(fuse [Atom "interface"; space]) interface and declare_interface interface = - interface_declaration_base ~def:(fuse [ - Atom "declare"; space; - Atom "interface"; space; - ]) interface - -and declare_class ?(s_type=Empty) { Ast.Statement.DeclareClass. - id; tparams; body=(loc, obj); extends; mixins=_; implements=_; -} = - (* TODO: What are mixins? *) - (* TODO: Print implements *) - fuse [ - Atom "declare"; space; - s_type; - Atom "class"; space; - identifier id; - option type_parameter tparams; - begin match extends with - | None -> Empty - | Some (loc, generic) -> fuse [ - space; Atom "extends"; space; - source_location_with_comments (loc, type_generic generic) + interface_declaration_base ~def:(fuse [Atom "declare"; space; Atom "interface"; space]) interface + +and declare_class + ?(s_type = Empty) + { Ast.Statement.DeclareClass.id; tparams; body = (loc, obj); extends; mixins; implements } = + let class_parts = + [ + Atom "declare"; + space; + s_type; + Atom "class"; + space; + identifier id; + option type_parameter tparams; + ] + in + let extends_parts = + let class_extends = + [ + begin + match extends with + | Some (loc, generic) -> + Some + (fuse + [Atom "extends"; space; source_location_with_comments (loc, type_generic generic)]) + | None -> None + end; + begin + match mixins with + | [] -> None + | xs -> + Some + (fuse + [ + Atom "mixins"; + space; + fuse_list + ~sep:(Atom ",") + (List.map + (fun (loc, generic) -> + source_location_with_comments (loc, type_generic generic)) + xs); + ]) + end; + class_implements implements; ] - end; - pretty_space; - source_location_with_comments (loc, type_object ~sep:(Atom ",") obj) - ] - -and declare_function ?(s_type=Empty) { Ast.Statement.DeclareFunction. - id; annot=(loc, t); predicate -} = - with_semicolon (fuse [ - Atom "declare"; space; - s_type; - Atom "function"; space; - identifier id; - source_location_with_comments (loc, match t with - | loc, Ast.Type.Function func -> - source_location_with_comments (loc, type_function ~sep:(Atom ":") func) - | _ -> failwith "Invalid DeclareFunction" - ); - begin match predicate with - | Some pred -> fuse [pretty_space; type_predicate pred] - | None -> Empty; - end; - ]) - -and declare_variable ?(s_type=Empty) { Ast.Statement.DeclareVariable. - id; annot -} = - with_semicolon (fuse [ - Atom "declare"; space; - s_type; - Atom "var"; space; - identifier id; - option type_annotation annot; - ]) + in + match deoptionalize class_extends with + | [] -> Empty + | items -> Layout.Indent (fuse [line; join line items]) + in + let body = source_location_with_comments (loc, type_object ~sep:(Atom ",") obj) in + let parts = + [] + |> List.rev_append class_parts + |> List.cons extends_parts + |> List.cons pretty_space + |> List.cons body + |> List.rev + in + group parts + +and declare_function + ?(s_type = Empty) { Ast.Statement.DeclareFunction.id; annot = (loc, t); predicate } = + with_semicolon + (fuse + [ + Atom "declare"; + space; + s_type; + Atom "function"; + space; + identifier id; + source_location_with_comments + ( loc, + match t with + | (loc, Ast.Type.Function func) -> + source_location_with_comments (loc, type_function ~sep:(Atom ":") func) + | _ -> failwith "Invalid DeclareFunction" ); + begin + match predicate with + | Some pred -> fuse [pretty_space; type_predicate pred] + | None -> Empty + end; + ]) + +and declare_variable ?(s_type = Empty) { Ast.Statement.DeclareVariable.id; annot } = + with_semicolon + (fuse + [ + Atom "declare"; + space; + s_type; + Atom "var"; + space; + identifier id; + hint type_annotation annot; + ]) and declare_module_exports annot = - with_semicolon (fuse [ - Atom "declare"; space; - Atom "module.exports"; - type_annotation annot; - ]) - -and declare_module { Ast.Statement.DeclareModule.id; body; kind=_ } = - fuse [ - Atom "declare"; space; - Atom "module"; space; - begin match id with - | Ast.Statement.DeclareModule.Identifier id -> identifier id - | Ast.Statement.DeclareModule.Literal lit -> string_literal lit - end; - pretty_space; - block body; - ] - -and declare_export_declaration { Ast.Statement.DeclareExportDeclaration. - default; declaration; specifiers; source -} = - let s_export = fuse [ - Atom "export"; space; - if Option.is_some default then fuse [Atom "default"; space] else Empty; - ] in - match declaration, specifiers with - | Some decl, None -> Ast.Statement.DeclareExportDeclaration.(match decl with - (* declare export var *) - | Variable (loc, var) -> - source_location_with_comments (loc, declare_variable ~s_type:s_export var) - (* declare export function *) - | Function (loc, func) -> - source_location_with_comments (loc, declare_function ~s_type:s_export func) - (* declare export class *) - | Class (loc, c) -> - source_location_with_comments (loc, declare_class ~s_type:s_export c) - (* declare export default [type] - * this corresponds to things like - * export default 1+1; *) - | DefaultType t -> - with_semicolon (fuse [ - Atom "declare"; space; s_export; - type_ t; - ]) - (* declare export type *) - | NamedType (loc, typeAlias) -> - source_location_with_comments (loc, fuse [ - Atom "declare"; space; s_export; - type_alias ~declare:false typeAlias; - ]) - (* declare export opaque type *) - | NamedOpaqueType (loc, opaqueType) -> - source_location_with_comments (loc, fuse [ - Atom "declare"; space; s_export; - opaque_type ~declare:false opaqueType; - ]) - (* declare export interface *) - | Interface (loc, interface) -> - source_location_with_comments (loc, fuse [ - Atom "declare"; space; s_export; - interface_declaration interface; - ]) - ); - | None, Some specifier -> fuse [ - Atom "declare"; space; - Atom "export"; pretty_space; - export_specifier source specifier; + with_semicolon (fuse [Atom "declare"; space; Atom "module.exports"; type_annotation annot]) + +and declare_module { Ast.Statement.DeclareModule.id; body; kind = _ } = + fuse + [ + Atom "declare"; + space; + Atom "module"; + space; + begin + match id with + | Ast.Statement.DeclareModule.Identifier id -> identifier id + | Ast.Statement.DeclareModule.Literal lit -> string_literal lit + end; + pretty_space; + block body; ] - | _, _ -> failwith "Invalid declare export declaration" + +and declare_export_declaration + { Ast.Statement.DeclareExportDeclaration.default; declaration; specifiers; source } = + let s_export = + fuse + [ + Atom "export"; + space; + ( if Option.is_some default then + fuse [Atom "default"; space] + else + Empty ); + ] + in + match (declaration, specifiers) with + | (Some decl, None) -> + Ast.Statement.DeclareExportDeclaration.( + (match decl with + (* declare export var *) + | Variable (loc, var) -> + source_location_with_comments (loc, declare_variable ~s_type:s_export var) + (* declare export function *) + | Function (loc, func) -> + source_location_with_comments (loc, declare_function ~s_type:s_export func) + (* declare export class *) + | Class (loc, c) -> source_location_with_comments (loc, declare_class ~s_type:s_export c) + (* declare export default [type] + * this corresponds to things like + * export default 1+1; *) + | DefaultType t -> with_semicolon (fuse [Atom "declare"; space; s_export; type_ t]) + (* declare export type *) + | NamedType (loc, typeAlias) -> + source_location_with_comments + (loc, fuse [Atom "declare"; space; s_export; type_alias ~declare:false typeAlias]) + (* declare export opaque type *) + | NamedOpaqueType (loc, opaqueType) -> + source_location_with_comments + (loc, fuse [Atom "declare"; space; s_export; opaque_type ~declare:false opaqueType]) + (* declare export interface *) + | Interface (loc, interface) -> + source_location_with_comments + (loc, fuse [Atom "declare"; space; s_export; interface_declaration interface]))) + | (None, Some specifier) -> + fuse [Atom "declare"; space; Atom "export"; pretty_space; export_specifier source specifier] + | (_, _) -> failwith "Invalid declare export declaration" diff --git a/src/parser_utils/output/js_layout_generator.mli b/src/parser_utils/output/js_layout_generator.mli index c27c7644a56..5779d48071e 100644 --- a/src/parser_utils/output/js_layout_generator.mli +++ b/src/parser_utils/output/js_layout_generator.mli @@ -1,62 +1,94 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module LocMap = Utils_js.LocMap - type expression_context = { left: expression_context_left; group: expression_context_group; } + and expression_context_left = | Normal_left | In_expression_statement | In_tagged_template | In_plus_op | In_minus_op + and expression_context_group = | Normal_group | In_arrow_func | In_for_init type comment_attach = - | Preceding - | Enclosing - | Following + | Preceding + | Enclosing + | Following type comment_map = - (comment_attach * (Loc.t, Loc.t) Flow_ast.Statement.t * Loc.t Flow_ast.Comment.t) - list LocMap.t + (comment_attach * (Loc.t, Loc.t) Flow_ast.Statement.t * Loc.t Flow_ast.Comment.t) list + Loc_collections.LocMap.t -val normal_context: expression_context +val normal_context : expression_context -val with_attached_comments: comment_map option ref +val with_attached_comments : comment_map option ref -val program: +val program : preserve_docblock:bool -> checksum:string option -> - (Loc.t, Loc.t) Flow_ast.program -> Layout.layout_node -val program_simple: - (Loc.t, Loc.t) Flow_ast.program -> Layout.layout_node -val expression: ?ctxt:expression_context -> (Loc.t, Loc.t) Flow_ast.Expression.t -> Layout.layout_node -val statement: - ?allow_empty:bool -> - ?pretty_semicolon:bool -> - (Loc.t, Loc.t) Flow_ast.Statement.t -> Layout.layout_node -val object_property: (Loc.t, Loc.t) Flow_ast.Expression.Object.property -> Layout.layout_node -val class_method: (Loc.t, Loc.t) Flow_ast.Class.Method.t -> Layout.layout_node -val class_property: (Loc.t, Loc.t) Flow_ast.Class.Property.t -> Layout.layout_node -val class_private_field: (Loc.t, Loc.t) Flow_ast.Class.PrivateField.t -> Layout.layout_node -val type_: (Loc.t, Loc.t) Flow_ast.Type.t -> Layout.layout_node -val type_annotation: (Loc.t, Loc.t) Flow_ast.Type.annotation -> Layout.layout_node -val identifier: Loc.t Flow_ast.Identifier.t -> Layout.layout_node -val pattern: ?ctxt:expression_context -> (Loc.t, Loc.t) Flow_ast.Pattern.t -> Layout.layout_node -val comment: Loc.t Flow_ast.Comment.t -> Layout.layout_node - -val better_quote: string -> string -val utf8_escape: quote:string -> string -> string -val wrap_in_parens: Layout.layout_node -> Layout.layout_node -val with_semicolon: Layout.layout_node -> Layout.layout_node + (Loc.t, Loc.t) Flow_ast.program -> + Layout.layout_node + +val program_simple : (Loc.t, Loc.t) Flow_ast.program -> Layout.layout_node + +val literal : Loc.t Flow_ast.Literal.t -> Layout.layout_node + +val number_literal_type : Flow_ast.NumberLiteral.t -> Layout.layout_node + +val string_literal_type : Flow_ast.StringLiteral.t -> Layout.layout_node + +val expression : + ?ctxt:expression_context -> (Loc.t, Loc.t) Flow_ast.Expression.t -> Layout.layout_node + +val statement : ?pretty_semicolon:bool -> (Loc.t, Loc.t) Flow_ast.Statement.t -> Layout.layout_node + +val object_property : (Loc.t, Loc.t) Flow_ast.Expression.Object.property -> Layout.layout_node + +val class_method : (Loc.t, Loc.t) Flow_ast.Class.Method.t -> Layout.layout_node + +val class_property : (Loc.t, Loc.t) Flow_ast.Class.Property.t -> Layout.layout_node + +val class_private_field : (Loc.t, Loc.t) Flow_ast.Class.PrivateField.t -> Layout.layout_node + +val type_ : (Loc.t, Loc.t) Flow_ast.Type.t -> Layout.layout_node + +val variance : Loc.t Flow_ast.Variance.t -> Layout.layout_node + +val type_param : + (Loc.t, Loc.t) Flow_ast.Type.ParameterDeclaration.TypeParam.t -> Layout.layout_node + +val type_annotation : ?parens:bool -> (Loc.t, Loc.t) Flow_ast.Type.annotation -> Layout.layout_node + +val identifier : (Loc.t, Loc.t) Flow_ast.Identifier.t -> Layout.layout_node + +val pattern : ?ctxt:expression_context -> (Loc.t, Loc.t) Flow_ast.Pattern.t -> Layout.layout_node + +val comment : Loc.t Flow_ast.Comment.t -> Layout.layout_node + +val template_literal : (Loc.t, Loc.t) Flow_ast.Expression.TemplateLiteral.t -> Layout.layout_node + +val jsx_identifier : Loc.t Flow_ast.JSX.Identifier.t -> Layout.layout_node + +val jsx_child : (Loc.t, Loc.t) Flow_ast.JSX.child -> (Loc.t * Layout.layout_node) option + +val arrow_function_params : (Loc.t, Loc.t) Flow_ast.Function.Params.t -> Layout.layout_node + +val better_quote : string -> string + +val utf8_escape : quote:string -> string -> string + +val wrap_in_parens : Layout.layout_node -> Layout.layout_node + +val with_semicolon : Layout.layout_node -> Layout.layout_node diff --git a/src/parser_utils/output/json_sourcemap.ml b/src/parser_utils/output/json_sourcemap.ml new file mode 100644 index 00000000000..8434d11ca9e --- /dev/null +++ b/src/parser_utils/output/json_sourcemap.ml @@ -0,0 +1,52 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* Translator from Hh_json to Sourcemap's abstract JSON representation *) +module Json_translator = struct + type t = Hh_json.json + + let of_string x = Hh_json.JSON_String x + + let of_obj props = Hh_json.JSON_Object props + + let of_array arr = Hh_json.JSON_Array arr + + let of_number x = Hh_json.JSON_Number x + + let null = Hh_json.JSON_Null + + let to_string t = + match t with + | Hh_json.JSON_String x -> x + | _ -> raise (Hh_json.Syntax_error "expected string") + + let to_obj t = + match t with + | Hh_json.JSON_Object x -> x + | _ -> raise (Hh_json.Syntax_error "expected object") + + let to_array t = + match t with + | Hh_json.JSON_Array x -> x + | _ -> raise (Hh_json.Syntax_error "expected array") + + let to_number t = + match t with + | Hh_json.JSON_Number x -> x + | _ -> raise (Hh_json.Syntax_error "expected number") + + let is_null t = t = Hh_json.JSON_Null +end + +module Writer = Sourcemap.Make_json_writer (Json_translator) +module Reader = Sourcemap.Make_json_reader (Json_translator) + +let json_of_sourcemap map = Writer.json_of_sourcemap map + +let sourcemap_of_json json = Reader.sourcemap_of_json json + +let sourcemap_of_string str = sourcemap_of_json (Hh_json.json_of_string str) diff --git a/src/parser_utils/output/json_sourcemap.mli b/src/parser_utils/output/json_sourcemap.mli new file mode 100644 index 00000000000..5193499e680 --- /dev/null +++ b/src/parser_utils/output/json_sourcemap.mli @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +val json_of_sourcemap : Sourcemap.t -> Hh_json.json + +val sourcemap_of_json : Hh_json.json -> Sourcemap.t + +val sourcemap_of_string : string -> Sourcemap.t diff --git a/src/parser_utils/output/layout.ml b/src/parser_utils/output/layout.ml index d37fca23370..5c94f82b0a3 100644 --- a/src/parser_utils/output/layout.ml +++ b/src/parser_utils/output/layout.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -13,7 +13,7 @@ type layout_node = (* A list of nodes to try to fit on one line *) | Group of layout_node list (* Join elements, allow for breaking over over lines *) - | Sequence of list_config * (layout_node list) + | Sequence of list_config * layout_node list (* Increase the indentation *) | Indent of layout_node (* Force a line break *) @@ -41,40 +41,70 @@ and list_config = { indent: int; } -let seq = { - break = Break_if_needed; - inline = (false, false); - indent = 2; -} +let seq = { break = Break_if_needed; inline = (false, false); indent = 2 } (* Whitespace utils *) let space = Atom " " + let pretty_space = IfPretty (space, Empty) + let ugly_space = IfPretty (Empty, space) -let flat_space = IfBreak (Empty, space) -let flat_pretty_space = IfBreak (Empty, pretty_space) + let flat_ugly_space = IfBreak (Empty, ugly_space) +let hardline = Newline + (* Force a line break (`\n`) in pretty mode *) -let pretty_newline = IfPretty (Newline, Empty) +let pretty_hardline = IfPretty (Newline, Empty) + (* Inserts a line break (`\n`) if the code doesn't fit on one line, otherwise a space *) -let line = IfBreak (Newline, pretty_space) +let line = IfBreak (Newline, space) + +(* Inserts a line break (`\n`) if the code doesn't fit on one line, otherwise a pretty space *) +let pretty_line = IfBreak (Newline, pretty_space) + (* Inserts a line break (`\n`) if the code doesn't fit on one line, otherwise nothing *) let softline = IfBreak (Newline, Empty) let if_pretty if_ else_ = - if if_ = Empty && else_ = Empty then Empty else IfPretty (if_, else_) + if if_ = Empty && else_ = Empty then + Empty + else + IfPretty (if_, else_) let if_break if_ else_ = - if if_ = Empty && else_ = Empty then Empty else IfBreak (if_, else_) + if if_ = Empty && else_ = Empty then + Empty + else + IfBreak (if_, else_) + +let group items = + let items = + List.rev + (List.fold_left + (fun acc -> function + | Empty -> acc + | Concat more -> List.rev_append more acc + | item -> item :: acc) + [] + items) + in + match items with + | [(Group _ as hd)] -> hd + | _ -> Group items (* Fuse a list of items together, no spaces or breaks will be inserted *) let fuse items = - let items = List.rev (List.fold_left (fun acc -> function - | Empty -> acc - | Concat more -> List.rev_append more acc - | item -> item::acc - ) [] items) in + let items = + List.rev + (List.fold_left + (fun acc -> function + | Empty -> acc + | Concat more -> List.rev_append more acc + | item -> item :: acc) + [] + items) + in match items with | [] -> Empty | [item] -> item @@ -83,260 +113,315 @@ let fuse items = let join sep nodes = let rec helper acc = function | [] -> List.rev acc - | hd::tl -> - let acc = if acc = [] then [hd] else hd::sep::acc in + | hd :: tl -> + let acc = + if acc = [] then + [hd] + else + hd :: sep :: acc + in helper acc tl in fuse (helper [] nodes) -(* Fuse a list of items to align vertically *) -let fuse_vertically - ?(indent=0) - ?(inline=(false, false)) - items = - Sequence ({ break=Break_if_pretty; indent; inline }, items) - let fuse_list = let rec helper ~sep acc = function - | [] -> fuse (List.rev acc) - | item::[] -> helper ~sep (item::acc) [] - | item::items -> helper ~sep (pretty_space::sep::item::acc) items + | [] -> fuse (List.rev acc) + | [item] -> helper ~sep (item :: acc) [] + | item :: items -> helper ~sep (pretty_space :: sep :: item :: acc) items + in + fun ?(sep = Empty) ?(wrap = (Empty, Empty)) items -> + fuse [fst wrap; helper ~sep [] items; snd wrap] + +let wrap_and_indent ?break (before, after) items = + let break = + match break with + | Some break -> break + | None -> softline + in + let layout = + if items = [] then + Empty + else + fuse [Indent (fuse (break :: items)); break] in - fun ?(sep=Empty) ?(wrap=(Empty, Empty)) items -> - fuse [ - fst wrap; - helper ~sep [] items; - snd wrap; - ] + fuse [before; layout; after] + +let new_list + ?(wrap = (Empty, Empty)) ?(sep = Empty) ?(wrap_spaces = false) ?(trailing_sep = true) items = + let items_layout = + if items = [] then + items + else + [ + join (fuse [sep; pretty_line]) items; + ( if trailing_sep then + if_break (Atom ",") Empty + else + Empty ); + ] + in + let break = + if wrap_spaces then + Some pretty_line + else + None + in + wrap_and_indent ?break wrap items_layout (* All purpose list *) let list - ?(break=Break_if_needed) - ?(wrap=(Empty, Empty)) - ?(sep=Empty) - ?(trailing=true) - ?(inline=(false, false)) - ?(indent=2) - items = + ?(break = Break_if_needed) + ?(wrap = (Empty, Empty)) + ?(sep = Empty) + ?(trailing = true) + ?(inline = (false, false)) + ?(indent = 2) + items = let add_seperator is_last item = - fuse [ - item; - if_break - ( - if is_last && trailing then if_pretty sep Empty - else if not is_last then sep - else Empty - ) - (if is_last then Empty else fuse [sep; pretty_space]) - ] in + fuse + [ + item; + if_break + ( if is_last && trailing then + if_pretty sep Empty + else if not is_last then + sep + else + Empty ) + ( if is_last then + Empty + else + fuse [sep; pretty_space] ); + ] + in let items_count = List.length items - 1 in - let layout_items = fuse [ - fst wrap; - Sequence ( - { break; inline; indent }, - List.mapi (fun i item -> add_seperator (i = items_count) item) items - ); - snd wrap; - ] in + let layout_items = + fuse + [ + fst wrap; + Sequence + ( { break; inline; indent }, + List.mapi (fun i item -> add_seperator (i = items_count) item) items ); + snd wrap; + ] + in (* Wrap items in additional sequence so `IfBreak`s within wrap are not triggered by adjacent lists. *) - Sequence ( - { break=Break_if_needed; inline=(true, true); indent=0 }, - [layout_items] - ) + Sequence ({ break = Break_if_needed; inline = (true, true); indent = 0 }, [layout_items]) +(* Takes a list of layout nodes and intersperses spaces: a `space` if a space is necessary + to separate two tokens, or a `pretty_space` if it's only needed for aesthetics. Generally a + space is required, except if the last char of one node or the first char of the next node is + a punctuator, then spaces are only for aesthetics (e.g. `new Foo` vs `new(Foo)`) *) let fuse_with_space = let is_punctuator = function - | '{' | '(' | ')' | '[' | ']' | '.' - | ';' | ',' | '<' | '>' | '=' | '!' - | '+' | '-' | '*' | '%' | '^' | '&' - | '~' | '|' | '?' | ':' | '/' | '"' - | '\'' -> true - | _ -> false + | '{' + | '(' + | ')' + | '[' + | ']' + | '.' + | ';' + | ',' + | '<' + | '>' + | '=' + | '!' + | '+' + | '-' + | '*' + | '%' + | '^' + | '&' + | '~' + | '|' + | '?' + | ':' + | '/' + | '"' + | '\'' -> + true + | _ -> false in let rec ugly_char ~mode = function | Atom str | Identifier (_, str) -> - if str = "" then None else - Some (if mode = `First then str.[0] else str.[String.length str - 1]) + if str = "" then + None + else + Some + ( if mode = `First then + str.[0] + else + str.[String.length str - 1] ) | Empty -> None | Indent node -> ugly_char ~mode node | Newline -> None | SourceLocation (_, node) | IfPretty (_, node) - | IfBreak (_, node) -> ugly_char ~mode node + | IfBreak (_, node) -> + ugly_char ~mode node | Concat nodes | Group nodes | Sequence (_, nodes) -> - let nodes = if mode = `First then nodes else List.rev nodes in - List.fold_left (fun acc node -> - match acc with Some _ -> acc | None -> ugly_char ~mode node - ) None nodes + let nodes = + if mode = `First then + nodes + else + List.rev nodes + in + List.fold_left + (fun acc node -> + match acc with + | Some _ -> acc + | None -> ugly_char ~mode node) + None + nodes in let opt_punctuator = function - | Some x when is_punctuator x -> true - | _ -> false + | Some x when is_punctuator x -> true + | _ -> false in let rec helper acc = function - | a::b::rest -> - let prev = ugly_char ~mode:`Last a |> opt_punctuator in - let next = ugly_char ~mode:`First b |> opt_punctuator in - let sp = if prev || next then pretty_space else space in - helper (sp::a::acc) (b::rest) - | a::[] -> - helper (a::acc) [] - | [] -> - List.rev acc + | Empty :: rest -> helper acc rest + | a :: Empty :: rest -> helper acc (a :: rest) + | a :: b :: rest -> + let prev = ugly_char ~mode:`Last a |> opt_punctuator in + let next = ugly_char ~mode:`First b |> opt_punctuator in + let sp = + if prev || next then + pretty_space + else + space + in + helper (sp :: a :: acc) (b :: rest) + | [a] -> helper (a :: acc) [] + | [] -> List.rev acc in - fun nodes -> fuse (helper [] nodes) + (fun nodes -> fuse (helper [] nodes)) module Debug : sig - val string_of_layout: layout_node -> string - val layout_of_layout: layout_node -> layout_node + val string_of_layout : layout_node -> string + + val layout_of_layout : layout_node -> layout_node end = struct let spf = Printf.sprintf let debug_string_of_loc loc = - let open Loc in - spf "%d:%d-%d:%d" - loc.start.line loc.start.column - loc._end.line loc._end.column + Loc.(spf "%d:%d-%d:%d" loc.start.line loc.start.column loc._end.line loc._end.column) let debug_string_of_when_to_break = function - | Break_if_needed -> "Break_if_needed" - | Break_if_pretty -> "Break_if_pretty" + | Break_if_needed -> "Break_if_needed" + | Break_if_pretty -> "Break_if_pretty" let rec string_of_layout = function | SourceLocation (loc, child) -> - spf "SourceLocation (%s, %s)" - (debug_string_of_loc loc) - (string_of_layout child) - + spf "SourceLocation (%s, %s)" (debug_string_of_loc loc) (string_of_layout child) | Concat items -> - let items = - items - |> List.map string_of_layout - |> String.concat "; " - in + let items = items |> Core_list.map ~f:string_of_layout |> String.concat "; " in spf "Concat [%s]" items - | Group items -> - let items = - items - |> List.map string_of_layout - |> String.concat "; " - in + let items = items |> Core_list.map ~f:string_of_layout |> String.concat "; " in spf "Group [%s]" items - - | Sequence ({ break; inline=(inline_before, inline_after); indent; }, node_list) -> - let config = spf - "{break=%s; inline=(%b, %b); indent=%d}" - (debug_string_of_when_to_break break) - inline_before inline_after - indent - in - let nodes = - node_list - |> List.map string_of_layout - |> String.concat "; " + | Sequence ({ break; inline = (inline_before, inline_after); indent }, node_list) -> + let config = + spf + "{break=%s; inline=(%b, %b); indent=%d}" + (debug_string_of_when_to_break break) + inline_before + inline_after + indent in + let nodes = node_list |> Core_list.map ~f:string_of_layout |> String.concat "; " in spf "Sequence (%s, [%s])" config nodes - - | Atom str -> - spf "Atom %S" str - - | Identifier (loc, str) -> - spf "Identifier (%s, %S)" (debug_string_of_loc loc) str - + | Atom str -> spf "Atom %S" str + | Identifier (loc, str) -> spf "Identifier (%s, %S)" (debug_string_of_loc loc) str | IfPretty (left, right) -> - spf "IfPretty (%s, %s)" - (string_of_layout left) - (string_of_layout right) - + spf "IfPretty (%s, %s)" (string_of_layout left) (string_of_layout right) | IfBreak (left, right) -> - spf "IfBreak (%s, %s)" - (string_of_layout left) - (string_of_layout right) - + spf "IfBreak (%s, %s)" (string_of_layout left) (string_of_layout right) | Indent node -> spf "Indent (%s)" (string_of_layout node) | Newline -> "Newline" | Empty -> "Empty" let rec layout_of_layout = function | SourceLocation (loc, child) -> - Concat [ - Atom "SourceLocation"; - pretty_space; - list ~wrap:(Atom "(", Atom ")") ~sep:(Atom ",") - [Atom (debug_string_of_loc loc); layout_of_layout child]; - ] - + Concat + [ + Atom "SourceLocation"; + pretty_space; + list + ~wrap:(Atom "(", Atom ")") + ~sep:(Atom ",") + [Atom (debug_string_of_loc loc); layout_of_layout child]; + ] | Concat items -> - Concat [ - Atom "Concat"; - pretty_space; - list ~wrap:(Atom "[", Atom "]") ~sep:(Atom ";") - (List.map layout_of_layout items); - ] - + Concat + [ + Atom "Concat"; + pretty_space; + list ~wrap:(Atom "[", Atom "]") ~sep:(Atom ";") (Core_list.map ~f:layout_of_layout items); + ] | Group items -> - Concat [ - Atom "Group"; - pretty_space; - list ~wrap:(Atom "[", Atom "]") ~sep:(Atom ";") - (List.map layout_of_layout items); - ] - - | Sequence ({ break; inline=(inline_before, inline_after); indent; }, node_list) -> - let config = list ~wrap:(Atom "{", Atom "}") ~sep:(Atom ";") [ - Atom (spf "break=%s" (debug_string_of_when_to_break break)); - Atom (spf "inline=(%b, %b)" inline_before inline_after); - Atom (spf "indent=%d" indent); - ] in - let nodes = list ~wrap:(Atom "[", Atom "]") ~sep:(Atom ";") - (List.map layout_of_layout node_list) in - Concat [ - Atom "Sequence"; - pretty_space; - list ~wrap:(Atom "(", Atom ")") ~sep:(Atom ",") - [config; nodes]; - ] - - | Atom str -> - Atom (spf "Atom %S" str) - - | Identifier (loc, str) -> - Atom (spf "Identifier (%s, %S)" (debug_string_of_loc loc) str) - + Concat + [ + Atom "Group"; + pretty_space; + list ~wrap:(Atom "[", Atom "]") ~sep:(Atom ";") (Core_list.map ~f:layout_of_layout items); + ] + | Sequence ({ break; inline = (inline_before, inline_after); indent }, node_list) -> + let config = + list + ~wrap:(Atom "{", Atom "}") + ~sep:(Atom ";") + [ + Atom (spf "break=%s" (debug_string_of_when_to_break break)); + Atom (spf "inline=(%b, %b)" inline_before inline_after); + Atom (spf "indent=%d" indent); + ] + in + let nodes = + list + ~wrap:(Atom "[", Atom "]") + ~sep:(Atom ";") + (Core_list.map ~f:layout_of_layout node_list) + in + Concat + [ + Atom "Sequence"; + pretty_space; + list ~wrap:(Atom "(", Atom ")") ~sep:(Atom ",") [config; nodes]; + ] + | Atom str -> Atom (spf "Atom %S" str) + | Identifier (loc, str) -> Atom (spf "Identifier (%s, %S)" (debug_string_of_loc loc) str) | IfPretty (left, right) -> - Concat [ - Atom "IfPretty"; - pretty_space; - list ~wrap:(Atom "(", Atom ")") ~sep:(Atom ",") [ - (layout_of_layout left); - (layout_of_layout right); - ]; - ] - + Concat + [ + Atom "IfPretty"; + pretty_space; + list + ~wrap:(Atom "(", Atom ")") + ~sep:(Atom ",") + [layout_of_layout left; layout_of_layout right]; + ] | IfBreak (left, right) -> - Concat [ - Atom "IfBreak"; - pretty_space; - list ~wrap:(Atom "(", Atom ")") ~sep:(Atom ",") [ - (layout_of_layout left); - (layout_of_layout right); - ]; - ] - + Concat + [ + Atom "IfBreak"; + pretty_space; + list + ~wrap:(Atom "(", Atom ")") + ~sep:(Atom ",") + [layout_of_layout left; layout_of_layout right]; + ] | Indent child -> - Concat [ - Atom "Indent"; - pretty_space; - list ~wrap:(Atom "(", Atom ")") ~sep:(Atom ",") [ - (layout_of_layout child); - ]; - ] - + Concat + [ + Atom "Indent"; + pretty_space; + list ~wrap:(Atom "(", Atom ")") ~sep:(Atom ",") [layout_of_layout child]; + ] | Newline -> Atom "Newline" | Empty -> Atom "Empty" end diff --git a/src/parser_utils/output/printers/__tests__/parser_utils_output_printers_tests.ml b/src/parser_utils/output/printers/__tests__/parser_utils_output_printers_tests.ml new file mode 100644 index 00000000000..4e2397fcb4d --- /dev/null +++ b/src/parser_utils/output/printers/__tests__/parser_utils_output_printers_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "printers" >::: [Pretty_printer_test.tests] + +let () = run_test_tt_main tests diff --git a/src/parser_utils/output/printers/__tests__/pretty_printer_test.ml b/src/parser_utils/output/printers/__tests__/pretty_printer_test.ml index 9dc92874134..aa9b009d518 100644 --- a/src/parser_utils/output/printers/__tests__/pretty_printer_test.ml +++ b/src/parser_utils/output/printers/__tests__/pretty_printer_test.ml @@ -1,231 +1,243 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open Layout - open OUnit2 let space_regex = Str.regexp_string " " +let flat_pretty_space = IfBreak (Empty, pretty_space) + let assert_pretty_print ~ctxt ?msg expected_str layout = - let out = layout - |> Pretty_printer.print ~source_maps:None - |> Source.contents - |> String.trim - in + let out = layout |> Pretty_printer.print ~source_maps:None |> Source.contents |> String.trim in let printer x = Str.global_replace space_regex "\xE2\x90\xA3" x (* open box *) in assert_equal ~ctxt ?msg ~printer expected_str out -let tests = "pretty_printer" >::: [ - "breaks_in_list" >:: - begin fun ctxt -> - let layout = fuse [ - Atom "return"; space; - list - ~wrap:(IfBreak (Atom "(", Empty), IfBreak (Atom ")", Empty)) - [Atom "null"]; - ] in - assert_pretty_print ~ctxt "return null" layout; - - let long_string = String.make 80 'x' in - let layout = fuse [ - Atom "return"; space; - list - ~wrap:(IfBreak (Atom "(", Empty), IfBreak (Atom ")", Empty)) - [Atom long_string]; - ] in - assert_pretty_print ~ctxt ("return (\n "^long_string^"\n)") layout; - end; - - "force_breaks_in_list" >:: - begin fun ctxt -> - let short_string = String.make 10 'x' in - let layout = fuse [ - Atom "myFunc"; - list - ~wrap:(Atom "(", Atom ")") - ~sep:(Atom ",") - [ - Atom "a"; - fuse [ - Atom "b"; space; Atom "=>"; - fuse_vertically ~indent:2 ~inline:(false, true) [Atom short_string]; - ]; - ]; - ] in - assert_pretty_print ~ctxt ("myFunc(\n a,\n b =>\n "^short_string^",\n)") layout; - end; - - "sequence_inline_after" >:: - begin fun ctxt -> - let short_string = String.make 10 'x' in - let long_string = String.make 80 'x' in - - begin - let layout = fuse [ - Atom short_string; - Sequence ({ break = Break_if_needed; inline = (false, true); indent = 2 }, [ - fuse [flat_pretty_space; Atom short_string]; - ]); - ] in - assert_pretty_print ~ctxt (short_string^" "^short_string) layout - end; - - begin - let layout = fuse [ - fuse [ - Atom short_string; - Sequence ({ break = Break_if_needed; inline = (false, true); indent = 2 }, [ - fuse [flat_pretty_space; Atom short_string]; - ]); - ]; - Sequence ({ break = Break_if_needed; inline = (false, true); indent = 2 }, [ - fuse [flat_pretty_space; Atom short_string]; - ]); - ] in - assert_pretty_print ~ctxt (short_string^" "^short_string^" "^short_string) layout - end; - - begin - let layout = fuse [ - Atom long_string; - Sequence ({ break = Break_if_needed; inline = (false, true); indent = 2 }, [ - fuse [flat_pretty_space; Atom long_string]; - ]); - ] in - assert_pretty_print ~ctxt (long_string^"\n "^long_string) layout; - end; - - begin - let layout = fuse [ - fuse [ - Atom long_string; - Sequence ({ break = Break_if_needed; inline = (false, true); indent = 2 }, [ - fuse [flat_pretty_space; Atom long_string]; - ]); - ]; - Sequence ({ break = Break_if_needed; inline = (false, true); indent = 2 }, [ - fuse [flat_pretty_space; Atom long_string]; - ]); - ] in - assert_pretty_print ~ctxt (long_string^"\n "^long_string^"\n "^long_string) layout; - end; - end; - - "if_break_inside_concat" >:: - begin fun ctxt -> - let a41 = String.make 41 'A' in - let layout = Concat [ - Atom a41; - IfBreak (Empty, Atom " "); (* this never breaks because it's fused *) - Atom a41; - ] in - assert_pretty_print ~ctxt (a41 ^ " " ^ a41) layout; - end; - - "if_break_inside_concat_inside_sequence" >:: - begin fun ctxt -> - let a40 = String.make 40 'A' in - let layout = - Sequence ({ break = Break_if_needed; inline = (true, true); indent = 0 }, [ - Concat [ - Atom a40; - IfBreak (Empty, Atom " "); - Atom a40; - ]; - ]) - in - (* the IfBreak generates Empty because a break is needed, but no newline because it's fused *) - assert_pretty_print ~ctxt (a40 ^ a40) layout; - - let layout = - Sequence ({ break = Break_if_needed; inline = (true, true); indent = 0 }, [ - Concat [ - Atom a40; - IfBreak (Empty, Atom " "); - Atom a40; - ]; - Atom a40; - ]) - in - (* same as above. the Concat would be 81 chars if it doesn't break, which causes the parent +let tests = + "pretty_printer" + >::: [ + ( "breaks_in_list" + >:: fun ctxt -> + let layout = + fuse + [ + Atom "return"; + space; + list ~wrap:(IfBreak (Atom "(", Empty), IfBreak (Atom ")", Empty)) [Atom "null"]; + ] + in + assert_pretty_print ~ctxt "return null" layout; + + let long_string = String.make 80 'x' in + let layout = + fuse + [ + Atom "return"; + space; + list ~wrap:(IfBreak (Atom "(", Empty), IfBreak (Atom ")", Empty)) [Atom long_string]; + ] + in + assert_pretty_print ~ctxt ("return (\n " ^ long_string ^ "\n)") layout ); + ( "force_breaks_in_list" + >:: fun ctxt -> + let short_string = String.make 10 'x' in + let layout = + fuse + [ + Atom "myFunc"; + list + ~wrap:(Atom "(", Atom ")") + ~sep:(Atom ",") + [ + Atom "a"; + fuse + [ + Atom "b"; + space; + Atom "=>"; + Indent (fuse [pretty_hardline; Atom short_string]); + ]; + ]; + ] + in + assert_pretty_print ~ctxt ("myFunc(\n a,\n b =>\n " ^ short_string ^ ",\n)") layout + ); + ( "sequence_inline_after" + >:: fun ctxt -> + let short_string = String.make 10 'x' in + let long_string = String.make 80 'x' in + begin + let layout = + fuse + [ + Atom short_string; + Sequence + ( { break = Break_if_needed; inline = (false, true); indent = 2 }, + [fuse [flat_pretty_space; Atom short_string]] ); + ] + in + assert_pretty_print ~ctxt (short_string ^ " " ^ short_string) layout + end; + + begin + let layout = + fuse + [ + fuse + [ + Atom short_string; + Sequence + ( { break = Break_if_needed; inline = (false, true); indent = 2 }, + [fuse [flat_pretty_space; Atom short_string]] ); + ]; + Sequence + ( { break = Break_if_needed; inline = (false, true); indent = 2 }, + [fuse [flat_pretty_space; Atom short_string]] ); + ] + in + assert_pretty_print + ~ctxt + (short_string ^ " " ^ short_string ^ " " ^ short_string) + layout + end; + + begin + let layout = + fuse + [ + Atom long_string; + Sequence + ( { break = Break_if_needed; inline = (false, true); indent = 2 }, + [fuse [flat_pretty_space; Atom long_string]] ); + ] + in + assert_pretty_print ~ctxt (long_string ^ "\n " ^ long_string) layout + end; + + let layout = + fuse + [ + fuse + [ + Atom long_string; + Sequence + ( { break = Break_if_needed; inline = (false, true); indent = 2 }, + [fuse [flat_pretty_space; Atom long_string]] ); + ]; + Sequence + ( { break = Break_if_needed; inline = (false, true); indent = 2 }, + [fuse [flat_pretty_space; Atom long_string]] ); + ] + in + assert_pretty_print + ~ctxt + (long_string ^ "\n " ^ long_string ^ "\n " ^ long_string) + layout ); + ( "if_break_inside_concat" + >:: fun ctxt -> + let a41 = String.make 41 'A' in + let layout = + Concat + [ + Atom a41; + IfBreak (Empty, Atom " "); + (* this never breaks because it's fused *) + Atom a41; + ] + in + assert_pretty_print ~ctxt (a41 ^ " " ^ a41) layout ); + ( "if_break_inside_concat_inside_sequence" + >:: fun ctxt -> + let a40 = String.make 40 'A' in + let layout = + Sequence + ( { break = Break_if_needed; inline = (true, true); indent = 0 }, + [Concat [Atom a40; IfBreak (Empty, Atom " "); Atom a40]] ) + in + (* the IfBreak generates Empty because a break is needed, but no newline because it's fused *) + assert_pretty_print ~ctxt (a40 ^ a40) layout; + + let layout = + Sequence + ( { break = Break_if_needed; inline = (true, true); indent = 0 }, + [Concat [Atom a40; IfBreak (Empty, Atom " "); Atom a40]; Atom a40] ) + in + (* same as above. the Concat would be 81 chars if it doesn't break, which causes the parent Sequence to break, so the IfBreak takes the "break" case instead, and there's a NL between the Concat and last Atom. *) - assert_pretty_print ~ctxt (a40 ^ a40 ^ "\n" ^ a40) layout; - end; - - "break_if_needed_sequence_inside_concat" >:: - begin fun ctxt -> - let a80 = String.make 80 'A' in - - (* fits in 80 cols *) - let layout = - Concat [ - Atom "("; - Sequence ({ break = Break_if_needed; inline = (false, false); indent = 2 }, [Atom "a"]); - Atom ")"; - ] - in - assert_pretty_print ~ctxt "(a)" layout; - - (* doesn't fit in 80 cols, so indents *) - let layout = - Concat [ - Atom "("; - Sequence ({ break = Break_if_needed; inline = (false, false); indent = 2 }, [Atom a80]); - Atom ")"; - ] - in - assert_pretty_print ~ctxt ("(\n "^a80^"\n)") layout; - - (* doesn't fit in 80 cols, but doesn't indent *) - let layout = - Concat [ - Atom "("; - Sequence ({ break = Break_if_needed; inline = (true, true); indent = 2 }, [Atom a80]); - Atom ")"; - ] - in - assert_pretty_print ~ctxt ("("^a80^")") layout; - end; - - "group_break" >:: - begin fun ctxt -> - let a40 = String.make 40 'A' in - let a80 = String.make 80 'A' in - - (* fits *) - assert_pretty_print ~ctxt - ("("^a40^")") - (Group [Atom "("; Atom a40; Atom ")"]); - - (* fits *) - assert_pretty_print ~ctxt - ("( "^a40^" )") - (Group [Atom "("; line; Atom a40; line; Atom ")"]); - - (* exceeds 80 cols since there are no breaks *) - assert_pretty_print ~ctxt - ("("^a80^")") - (Group [Atom "("; Atom a80; Atom ")"]); - - (* breaks *) - assert_pretty_print ~ctxt - ("(\n"^a80^"\n)") - (Group [Atom "("; line; Atom a80; line; Atom ")"]); - - (* doesn't exceed 80 cols *) - assert_pretty_print ~ctxt - ("( "^a40^" )") - (Group [Atom "("; Indent (Concat [line; Atom a40]); line; Atom ")"]); - - (* breaks, but indent makes it 82 cols *) - assert_pretty_print ~ctxt - ("(\n "^a80^"\n)") - (Group [Atom "("; Indent (Concat [line; Atom a80]); line; Atom ")"]); - end; -] + assert_pretty_print ~ctxt (a40 ^ a40 ^ "\n" ^ a40) layout ); + ( "break_if_needed_sequence_inside_concat" + >:: fun ctxt -> + let a80 = String.make 80 'A' in + (* fits in 80 cols *) + let layout = + Concat + [ + Atom "("; + Sequence + ({ break = Break_if_needed; inline = (false, false); indent = 2 }, [Atom "a"]); + Atom ")"; + ] + in + assert_pretty_print ~ctxt "(a)" layout; + + (* doesn't fit in 80 cols, so indents *) + let layout = + Concat + [ + Atom "("; + Sequence + ({ break = Break_if_needed; inline = (false, false); indent = 2 }, [Atom a80]); + Atom ")"; + ] + in + assert_pretty_print ~ctxt ("(\n " ^ a80 ^ "\n)") layout; + + (* doesn't fit in 80 cols, but doesn't indent *) + let layout = + Concat + [ + Atom "("; + Sequence ({ break = Break_if_needed; inline = (true, true); indent = 2 }, [Atom a80]); + Atom ")"; + ] + in + assert_pretty_print ~ctxt ("(" ^ a80 ^ ")") layout ); + ( "group_break" + >:: fun ctxt -> + let a40 = String.make 40 'A' in + let a80 = String.make 80 'A' in + (* fits *) + assert_pretty_print ~ctxt ("(" ^ a40 ^ ")") (Group [Atom "("; Atom a40; Atom ")"]); + + (* fits *) + assert_pretty_print + ~ctxt + ("( " ^ a40 ^ " )") + (Group [Atom "("; line; Atom a40; line; Atom ")"]); + + (* exceeds 80 cols since there are no breaks *) + assert_pretty_print ~ctxt ("(" ^ a80 ^ ")") (Group [Atom "("; Atom a80; Atom ")"]); + + (* breaks *) + assert_pretty_print + ~ctxt + ("(\n" ^ a80 ^ "\n)") + (Group [Atom "("; line; Atom a80; line; Atom ")"]); + + (* doesn't exceed 80 cols *) + assert_pretty_print + ~ctxt + ("( " ^ a40 ^ " )") + (Group [Atom "("; Indent (Concat [line; Atom a40]); line; Atom ")"]); + + (* breaks, but indent makes it 82 cols *) + assert_pretty_print + ~ctxt + ("(\n " ^ a80 ^ "\n)") + (Group [Atom "("; Indent (Concat [line; Atom a80]); line; Atom ")"]) ); + ] diff --git a/src/parser_utils/output/printers/__tests__/test.ml b/src/parser_utils/output/printers/__tests__/test.ml deleted file mode 100644 index d7044f970ff..00000000000 --- a/src/parser_utils/output/printers/__tests__/test.ml +++ /dev/null @@ -1,14 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open OUnit2 - -let tests = "printers" >::: [ - Pretty_printer_test.tests; -] - -let () = run_test_tt_main tests diff --git a/src/parser_utils/output/printers/ast_diff_printer.ml b/src/parser_utils/output/printers/ast_diff_printer.ml index 6abb54e1eb4..4cbaa7c400c 100644 --- a/src/parser_utils/output/printers/ast_diff_printer.ml +++ b/src/parser_utils/output/printers/ast_diff_printer.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2014, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,24 +11,39 @@ open Utils_js let layout_of_node comments node = let old = !Js_layout_generator.with_attached_comments in Js_layout_generator.with_attached_comments := comments; - let layout = match node with - | Raw str -> Layout.Atom str - | Statement stmt -> Js_layout_generator.statement stmt - | Program ast -> Js_layout_generator.program ~preserve_docblock:true ~checksum:None ast - | Expression expr -> - (* Wrap the expression in parentheses because we don't know what context we are in. *) - (* TODO keep track of the expression context for printing, which will only insert parens when - * actually needed. *) - Layout.fuse [ - Layout.Atom "("; - Js_layout_generator.expression expr; - Layout.Atom ")"; - ] - | Identifier id -> Js_layout_generator.identifier id - | Pattern pat -> Js_layout_generator.pattern pat - | TypeAnnotation annot -> Js_layout_generator.type_annotation annot - | ClassProperty prop -> Js_layout_generator.class_property prop - | ObjectProperty prop -> Js_layout_generator.object_property prop in + let layout = + match node with + | Raw str -> Layout.Atom str + | Comment c -> Js_layout_generator.comment c + | NumberLiteralNode t -> Js_layout_generator.number_literal_type t + | Literal lit -> Js_layout_generator.literal lit + | StringLiteral lit -> Js_layout_generator.string_literal_type lit + | Statement stmt -> Js_layout_generator.statement stmt + | Program ast -> Js_layout_generator.program ~preserve_docblock:true ~checksum:None ast + | Expression expr -> + (* Wrap the expression in parentheses because we don't know what context we are in. *) + (* TODO keep track of the expression context for printing, which will only insert parens when + * actually needed. *) + Layout.fuse [Layout.Atom "("; Js_layout_generator.expression expr; Layout.Atom ")"] + | Pattern pat -> Js_layout_generator.pattern pat + | Params params -> Js_layout_generator.arrow_function_params params + | Variance var -> Js_layout_generator.variance var + | Type typ -> Js_layout_generator.type_ typ + | TypeParam t_param -> Js_layout_generator.type_param t_param + | TypeAnnotation annot -> Js_layout_generator.type_annotation ~parens:false annot + | FunctionTypeAnnotation annot -> Js_layout_generator.type_annotation ~parens:true annot + | ClassProperty prop -> Js_layout_generator.class_property prop + | ObjectProperty prop -> Js_layout_generator.object_property prop + | TemplateLiteral t_lit -> Js_layout_generator.template_literal t_lit + | JSXChild child -> + begin + match Js_layout_generator.jsx_child child with + | Some (_, layout_node) -> layout_node + (* This case shouldn't happen, so return Empty *) + | None -> Layout.Empty + end + | JSXIdentifier id -> Js_layout_generator.jsx_identifier id + in Js_layout_generator.with_attached_comments := old; layout @@ -40,13 +55,16 @@ let text_of_node comments = %> Source.contents let text_of_nodes break = - let sep = match break with | Some str -> str | None -> "\n" in + let sep = + match break with + | Some str -> str + | None -> "\n" + in text_of_node %> ListUtils.to_string sep let edit_of_change comments = function - | loc, Replace (_, new_node) -> (loc, text_of_node comments new_node) - | loc, Insert (break, new_nodes) -> (loc, text_of_nodes break comments new_nodes) - | loc, Delete _ -> (loc, "") + | (loc, Replace (_, new_node)) -> (loc, text_of_node comments new_node) + | (loc, Insert (break, new_nodes)) -> (loc, text_of_nodes break comments new_nodes) + | (loc, Delete _) -> (loc, "") -let edits_of_changes comments changes = - List.map (edit_of_change comments) changes +let edits_of_changes comments changes = List.map (edit_of_change comments) changes diff --git a/src/parser_utils/output/printers/compact_printer.ml b/src/parser_utils/output/printers/compact_printer.ml index 043d2bb9bfb..d30e7749553 100644 --- a/src/parser_utils/output/printers/compact_printer.ml +++ b/src/parser_utils/output/printers/compact_printer.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -16,7 +16,8 @@ let print ~source_maps node = src | Concat nodes | Group nodes - | Sequence (_, nodes) -> List.fold_left print_node src nodes + | Sequence (_, nodes) -> + List.fold_left print_node src nodes | Indent node -> print_node src node | Newline -> Source.add_newline src | Atom s -> Source.add_string s src @@ -24,6 +25,6 @@ let print ~source_maps node = | IfPretty (_, node) -> print_node src node | IfBreak (_, otherwise) -> print_node src otherwise | Empty -> src - in + in let src = print_node (Source.create ~source_maps ()) node in Source.add_newline src diff --git a/src/parser_utils/output/printers/dune b/src/parser_utils/output/printers/dune new file mode 100644 index 00000000000..9d1205f38d2 --- /dev/null +++ b/src/parser_utils/output/printers/dune @@ -0,0 +1,18 @@ +(library + (name flow_parser_utils_output_printers) + (wrapped false) + (modules (:standard \ replacement_printer)) + (libraries + flow_parser_utils_output)) + +(library + (name flow_parser_utils_replacement_printer) + (wrapped false) + (modules replacement_printer) + (libraries + flow_parser_utils + flow_parser_utils_output + flow_parser_utils_output_printers + flow_server_utils + ) +) diff --git a/src/parser_utils/output/printers/pretty_printer.ml b/src/parser_utils/output/printers/pretty_printer.ml index 9ba346ec7b6..c636d1a2f39 100644 --- a/src/parser_utils/output/printers/pretty_printer.ml +++ b/src/parser_utils/output/printers/pretty_printer.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,107 +9,129 @@ open Layout (* TODO: Make this configurable *) let max_width = 80 -type break_mode = | Break | Flat -type writer = { src: Source.t; pos: int } -type context = { mode: break_mode; ind: int } + +type break_mode = + | Break + | Flat + +type writer = { + src: Source.t; + pos: int; +} + +type context = { + mode: break_mode; + ind: int; +} let rec fits ~width ~context nodes = - if width < 0 then false else - match nodes with - | [] -> true - | next::rest -> begin - match next with - | Empty -> fits ~width ~context rest - | SourceLocation (_, next) -> fits ~width ~context (next::rest) - | IfPretty (next, _) -> fits ~width ~context (next::rest) - | IfBreak (if_, else_) -> - let nodes = match context.mode with - | Break -> if_::rest - | Flat -> else_::rest - in - fits ~width ~context nodes - | Group items - | Concat items -> fits ~width ~context (items @ rest) - | Indent node -> fits ~width ~context (node::rest) - (* Respect forced breaks *) - | Newline -> false - | Sequence ({ break = Break_if_pretty; _ }, _) -> false - | Sequence ({ break = _; inline = (before, _); indent = _ }, items) -> - (* TODO: need to consider `after`. and indent? *) - (not before && context.mode = Break) || (fits ~width ~context (items @ rest)) - | Identifier (_, x) - | Atom x -> fits ~width:(width - String.length x) ~context rest - end + if width < 0 then + false + else + match nodes with + | [] -> true + | next :: rest -> + begin + match next with + | Empty -> fits ~width ~context rest + | SourceLocation (_, next) -> fits ~width ~context (next :: rest) + | IfPretty (next, _) -> fits ~width ~context (next :: rest) + | IfBreak (if_, else_) -> + let nodes = + match context.mode with + | Break -> if_ :: rest + | Flat -> else_ :: rest + in + fits ~width ~context nodes + | Group items + | Concat items -> + fits ~width ~context (items @ rest) + | Indent node -> fits ~width ~context (node :: rest) + (* Respect forced breaks *) + | Newline -> false + | Sequence ({ break = Break_if_pretty; _ }, _) -> false + | Sequence ({ break = _; inline = (before, _); indent = _ }, items) -> + (* TODO: need to consider `after`. and indent? *) + ((not before) && context.mode = Break) || fits ~width ~context (items @ rest) + | Identifier (_, x) + | Atom x -> + fits ~width:(width - String.length x) ~context rest + end let print = - let break_and_indent (c: context) (w: writer) = - let src = w.src - |> Source.add_newline - |> Source.add_space c.ind - in - { src; pos=c.ind } (* Reset indentation to our inset *) + let break_and_indent (c : context) (w : writer) = + let src = w.src |> Source.add_newline |> Source.add_space c.ind in + { src; pos = c.ind } + (* Reset indentation to our inset *) in - - let rec print_node (context: context) (w: writer) : layout_node -> writer = - function + let rec print_node (context : context) (w : writer) : layout_node -> writer = function | SourceLocation (loc, node) -> let src = Source.push_loc loc w.src in - let w = print_node context {w with src} node in + let w = print_node context { w with src } node in let src = Source.pop_loc w.src in { w with src } | Concat nodes -> List.fold_left (print_node context) w nodes | Newline -> break_and_indent context w | Indent node -> print_node { context with ind = context.ind + 2 } w node - | Sequence ({ break=Break_if_pretty; inline=(left, right); indent }, nodes) -> + | Sequence ({ break = Break_if_pretty; inline = (left, right); indent }, nodes) -> let inner_context = { ind = context.ind + indent; mode = Break } in - let w = if not left then break_and_indent inner_context w else w in - let (w, _) = List.fold_left - (fun (w, i) node -> - let w = print_node inner_context w node in - if i > 0 then (break_and_indent inner_context w, i - 1) - else (w, 0) - ) - (w, List.length nodes - 1) - nodes in - if not right then break_and_indent context w else w + let w = + if not left then + break_and_indent inner_context w + else + w + in + let (w, _) = + List.fold_left + (fun (w, i) node -> + let w = print_node inner_context w node in + if i > 0 then + (break_and_indent inner_context w, i - 1) + else + (w, 0)) + (w, List.length nodes - 1) + nodes + in + if not right then + break_and_indent context w + else + w | Group nodes as layout -> let context = - let flat_context = {context with mode = Flat} in - if fits ~width:(max_width - w.pos) ~context:flat_context [layout] - then flat_context - else {context with mode=Break} + let flat_context = { context with mode = Flat } in + if fits ~width:(max_width - w.pos) ~context:flat_context [layout] then + flat_context + else + { context with mode = Break } in print_node context w (Concat nodes) - | Sequence ({ break=Break_if_needed; inline; indent }, nodes) as layout -> begin - let flat_context = {context with mode = Flat} in - if fits ~width:(max_width - w.pos) ~context:flat_context [layout] then ( + | Sequence ({ break = Break_if_needed; inline; indent }, nodes) as layout -> + let flat_context = { context with mode = Flat } in + if fits ~width:(max_width - w.pos) ~context:flat_context [layout] then print_node flat_context w (Concat nodes) - ) else ( - let break_context = {context with mode=Break} in - print_node break_context w - (Sequence ({ break=Break_if_pretty; inline; indent }, nodes)) - ) - end + else + let break_context = { context with mode = Break } in + print_node break_context w (Sequence ({ break = Break_if_pretty; inline; indent }, nodes)) | Atom s -> let src = Source.add_string s w.src in - { src; pos=w.pos + String.length s } + { src; pos = w.pos + String.length s } | Identifier (loc, s) -> let src = Source.add_identifier loc s w.src in - { src; pos=w.pos + String.length s } + { src; pos = w.pos + String.length s } | IfPretty (node, _) -> print_node context w node - | IfBreak (on_break, otherwise) -> begin - match context.mode with - | Break -> print_node context w on_break - | Flat -> print_node context w otherwise + | IfBreak (on_break, otherwise) -> + begin + match context.mode with + | Break -> print_node context w on_break + | Flat -> print_node context w otherwise end | Empty -> w in - - fun ~source_maps ?(skip_endline=false) node -> - let { src; _ } = print_node - { mode=Flat; ind = 0 } - { src=Source.create ~source_maps (); pos=0 } - node + fun ~source_maps ?(skip_endline = false) node -> + let { src; _ } = + print_node { mode = Flat; ind = 0 } { src = Source.create ~source_maps (); pos = 0 } node in - if skip_endline then src - else Source.add_newline src + if skip_endline then + src + else + Source.add_newline src diff --git a/src/parser_utils/output/printers/pretty_printer.mli b/src/parser_utils/output/printers/pretty_printer.mli index 5549987f0bf..129508ff4c0 100644 --- a/src/parser_utils/output/printers/pretty_printer.mli +++ b/src/parser_utils/output/printers/pretty_printer.mli @@ -1,12 +1,9 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val print: - source_maps:Source_map_config.t option - -> ?skip_endline:bool - -> Layout.layout_node - -> Source.t +val print : + source_maps:Source_map_config.t option -> ?skip_endline:bool -> Layout.layout_node -> Source.t diff --git a/src/parser_utils/output/printers/replacement_printer.ml b/src/parser_utils/output/printers/replacement_printer.ml index f9ba6b57857..091c8e9fe23 100644 --- a/src/parser_utils/output/printers/replacement_printer.ml +++ b/src/parser_utils/output/printers/replacement_printer.ml @@ -1,121 +1,77 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast -module L = Utils_js.LocMap -module D = Mapper_differ -module J = Js_layout_generator -module F = Ast.Function type patch = (int * int * string) list -let show_patch p: string = - ListUtils.to_string "" - (fun (s, e, p) -> Printf.sprintf "Start: <%d> End: <%d> Patch: <%s>\n" s e p) p +(* Location patches retain all the information needed to send edits over the LSP *) +type loc_patch = (Loc.t * string) list -let file_info file_path : string list * (int * int * int) list = - let input_channel = open_in file_path in - let rec build_list l = - match input_line input_channel with - | line -> build_list (line :: l) - | exception End_of_file -> close_in input_channel ; List.rev l - in - let lines_read = build_list [] in - let lines, line_counts, _ = - List.fold_left - (fun (lines, acc, prev_offset) s -> - let size = String.length s in - let new_offset = prev_offset + size in - (s :: lines, (size, prev_offset, new_offset) :: acc, new_offset + 1) ) - ([], [(0, 0, 0)], 0) - lines_read - in - (lines, line_counts) +let show_patch p : string = + ListUtils.to_string + "" + (fun (s, e, p) -> Printf.sprintf "Start: <%d> End: <%d> Patch: <%s>\n" s e p) + p -let mk_patch (diff : Mapper_differ.t) (ast : (Loc.t, Loc.t) Ast.program) - (file_path : string) : patch = - let _, line_counts = file_info file_path in - let line_counts_arr = Array.of_list (List.rev line_counts) in - let offset {Loc.line; column; _} = - let _, line_start, _ = line_counts_arr.(line) in - line_start + column - in - let attached_comments = Flow_prettier_comments.attach_comments ast in - J.with_attached_comments := Some attached_comments ; - let spans = - L.fold - (fun loc value acc -> - let {Loc.start; _end; _} = loc in - let node_string = - ( match value with - | D.Statement (_, node) -> J.statement node - | D.Expression (_, node) -> J.expression node - | D.Type (_, node) -> J.type_ node - | D.Return (F.Available annot) -> J.type_annotation annot - | D.Return (F.Missing _) -> Layout.Empty - | D.ClassElement (_, node) -> ( - match node with - | Ast.Class.Body.Method meth -> J.class_method meth - | Ast.Class.Body.Property prop -> J.class_property prop - | Ast.Class.Body.PrivateField field -> J.class_private_field field - ) ) - |> Pretty_printer.print ~skip_endline:true ~source_maps:None - |> Source.contents - in - let node_string = - match value with - | D.Return (F.Available _) -> node_string ^ " " - | _ -> node_string - in - (offset start, offset _end, node_string) :: acc ) - diff [] - in - J.with_attached_comments := None ; - spans +let with_content_of_file_input file f = + match File_input.content_of_file_input file with + | Ok contents -> f contents + | Error _ -> + let file_name = File_input.filename_of_file_input file in + let error_msg = + Printf.sprintf "Replacement_printer: Input file, \"%s\", couldn't be read." file_name + in + Utils_js.assert_false error_msg -let mk_patch_ast_differ (diff : Flow_ast_differ.node Flow_ast_differ.change list) - (ast : (Loc.t, Loc.t) Ast.program) (file_path : string) : patch = +let mk_loc_patch_ast_differ + (diff : Flow_ast_differ.node Flow_ast_differ.change list) (ast : (Loc.t, Loc.t) Ast.program) : + loc_patch = + let attached_comments = Some (Flow_prettier_comments.attach_comments ast) in + Ast_diff_printer.edits_of_changes attached_comments diff - let _, line_counts = file_info file_path in - let line_counts_arr = Array.of_list (List.rev line_counts) in - let offset {Loc.line; column; _} = - let _, line_start, _ = line_counts_arr.(line) in - line_start + column in +let mk_patch_ast_differ + (diff : Flow_ast_differ.node Flow_ast_differ.change list) + (ast : (Loc.t, Loc.t) Ast.program) + (content : string) : patch = + let offset_table = Offset_utils.make content in + let offset loc = Offset_utils.offset offset_table loc in + mk_loc_patch_ast_differ diff ast + |> Core_list.map ~f:(fun (loc, text) -> Loc.(offset loc.start, offset loc._end, text)) - let attached_comments = Some (Flow_prettier_comments.attach_comments ast) in - Ast_diff_printer.edits_of_changes attached_comments diff - |> List.map (fun (loc, text) -> Loc.(offset loc.start, offset loc._end, text)) +let mk_patch_ast_differ_unsafe diff ast file = + with_content_of_file_input file @@ mk_patch_ast_differ diff ast -let print (patch : patch) (file_path : string) : string = - let patch_sorted = List.sort - (fun (start_one, _, _) (start_two, _, _) -> compare start_one start_two) - patch +let print (patch : patch) (content : string) : string = + let patch_sorted = + List.sort (fun (start_one, _, _) (start_two, _, _) -> compare start_one start_two) patch in - let lines, line_counts = file_info file_path in - let _, _, file_end = List.hd line_counts in - let file_string = String.concat "\n" (List.rev lines) in + let file_end = String.length content in (* Apply the spans to the original text *) - let result_string_minus_end, last_span = + let (result_string_minus_end, last_span) = List.fold_left (fun (file, last) (start, _end, text) -> let file_curr = - Printf.sprintf "%s%s%s" file - (String.sub file_string last (start - last)) - text + Printf.sprintf "%s%s%s" file (String.sub content last (start - last)) text in - (file_curr, _end) ) - ("", 0) patch_sorted + (file_curr, _end)) + ("", 0) + patch_sorted in let last_span_to_end_size = file_end - last_span in let result_string = if last_span_to_end_size = 0 then - Printf.sprintf "%s\n" result_string_minus_end + Printf.sprintf "%s" result_string_minus_end else - Printf.sprintf "%s%s\n" result_string_minus_end - (String.sub file_string last_span last_span_to_end_size) + Printf.sprintf + "%s%s" + result_string_minus_end + (String.sub content last_span last_span_to_end_size) in result_string + +let print_unsafe patch file = with_content_of_file_input file @@ print patch diff --git a/src/parser_utils/output/printers/replacement_printer.mli b/src/parser_utils/output/printers/replacement_printer.mli index 41d291642ca..7d526b16980 100644 --- a/src/parser_utils/output/printers/replacement_printer.mli +++ b/src/parser_utils/output/printers/replacement_printer.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,11 +7,25 @@ type patch = (int * int * string) list -val show_patch: patch -> string +type loc_patch = (Loc.t * string) list -val mk_patch : Mapper_differ.t -> (Loc.t, Loc.t) Flow_ast.program -> string -> patch +val show_patch : patch -> string -val mk_patch_ast_differ : Flow_ast_differ.node Flow_ast_differ.change list -> - (Loc.t, Loc.t) Flow_ast.program -> string -> patch +val mk_loc_patch_ast_differ : + Flow_ast_differ.node Flow_ast_differ.change list -> (Loc.t, Loc.t) Flow_ast.program -> loc_patch + +val mk_patch_ast_differ : + Flow_ast_differ.node Flow_ast_differ.change list -> + (Loc.t, Loc.t) Flow_ast.program -> + string -> + patch + +val mk_patch_ast_differ_unsafe : + Flow_ast_differ.node Flow_ast_differ.change list -> + (Loc.t, Loc.t) Flow_ast.program -> + File_input.t -> + patch val print : patch -> string -> string + +val print_unsafe : patch -> File_input.t -> string diff --git a/src/parser_utils/output/source.ml b/src/parser_utils/output/source.ml index c4b47a103a0..f5136e1e253 100644 --- a/src/parser_utils/output/source.ml +++ b/src/parser_utils/output/source.ml @@ -1,139 +1,106 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module LocMap = Utils_js.LocMap +module LocMap = Loc_collections.LocMap type t = { buffer: Buffer.t; sourcemap: Sourcemap.t option; pos: Sourcemap.line_col; loc_stack: Loc.t list; - names: Source_map_config.names option + names: Source_map_config.names option; } let create ~source_maps () = - let sourcemap, names = Option.value_map source_maps - ~default:(None, None) - ~f:(fun { Source_map_config.names; } -> - Some (Sourcemap.create ()), Some names - ) + let (sourcemap, names) = + Option.value_map source_maps ~default:(None, None) ~f:(fun { Source_map_config.names } -> + (Some (Sourcemap.create ()), Some names)) in { - buffer = Buffer.create 127; (* no idea the best value for this *) + buffer = Buffer.create 127; + (* no idea the best value for this *) sourcemap; names; pos = { Sourcemap.line = 1; col = 0 }; loc_stack = []; } -let push_loc loc source = - { source with loc_stack = loc::source.loc_stack } +let push_loc loc source = { source with loc_stack = loc :: source.loc_stack } let pop_loc source = (* raises if you call pop more than push *) let loc_stack = List.tl source.loc_stack in { source with loc_stack } -let pos_new_line pos = - Sourcemap.({ line = pos.line + 1; col = 0 }) +let pos_new_line pos = Sourcemap.{ line = pos.line + 1; col = 0 } let pos_add_string = let rec count n i str = try let i' = String.index_from str i '\n' in count (n + 1) (i' + 1) str - with Not_found -> n, String.length str - i + with Not_found -> (n, String.length str - i) in fun { Sourcemap.line; col } str -> - let newlines, remaining_cols = count 0 0 str in + let (newlines, remaining_cols) = count 0 0 str in let line = line + newlines in - let col = if newlines > 0 then remaining_cols else col + remaining_cols in + let col = + if newlines > 0 then + remaining_cols + else + col + remaining_cols + in { Sourcemap.line; col } let add_string ?name str src = Buffer.add_string src.buffer str; - let sourcemap = Option.map src.sourcemap ~f:(fun sourcemap -> - match src.loc_stack with - | [] -> sourcemap - | loc::_ -> - let source = match Loc.source loc with - | Some filename -> File_key.to_string filename - | None -> "" - in - let original = Sourcemap.({ - name; - source; - original_loc = { - line = loc.Loc.start.Loc.line; - col = loc.Loc.start.Loc.column; - } - }) in - Sourcemap.add_mapping ~original ~generated:src.pos sourcemap - ) in + let sourcemap = + Option.map src.sourcemap ~f:(fun sourcemap -> + match src.loc_stack with + | [] -> sourcemap + | loc :: _ -> + let source = + match Loc.source loc with + | Some filename -> File_key.to_string filename + | None -> "" + in + let original = + Sourcemap. + { + name; + source; + original_loc = { line = loc.Loc.start.Loc.line; col = loc.Loc.start.Loc.column }; + } + in + Sourcemap.add_mapping ~original ~generated:src.pos sourcemap) + in let pos = pos_add_string src.pos str in { src with sourcemap; pos } let add_identifier loc str src = (* If no name is found or its the same as the original name don't set it *) let default = None in - let name = Option.value_map src.names ~default ~f:(fun names -> - Option.value_map (LocMap.get loc names) ~default ~f:(fun name -> - if name = str then None else Some name - ) - ) in - src - |> push_loc loc - |> add_string ?name str - |> pop_loc + let name = + Option.value_map src.names ~default ~f:(fun names -> + Option.value_map (LocMap.get loc names) ~default ~f:(fun name -> + if name = str then + None + else + Some name)) + in + src |> push_loc loc |> add_string ?name str |> pop_loc (* TODO: Remove any right trailing whitespace *) let add_newline source = Buffer.add_string source.buffer "\n"; { source with pos = pos_new_line source.pos } -let add_space num b = - add_string (String.make num ' ') b - -(* Merge two source builders. The first is mutated *) -let add_source src1 src2 = - (* TODO: handle merging source mappings, currently you will get incorrect - source maps after merging (the compact printer doesn't use this API) *) - Buffer.add_buffer src1.buffer src2.buffer; - let pos = - let open Sourcemap in - let line = src1.pos.line + src2.pos.line - 1 in - let col = if src2.pos.line > 1 then src2.pos.col else src1.pos.col + src2.pos.col in - { line; col } - in - { src1 with pos } +let add_space num b = add_string (String.make num ' ') b let contents b = Buffer.contents b.buffer -module Json = Sourcemap.Make_json_writer (struct - type t = Hh_json.json - let of_string x = Hh_json.JSON_String x - let of_obj props = Hh_json.JSON_Object props - let of_array arr = Hh_json.JSON_Array arr - let of_number x = Hh_json.JSON_Number x - let null = Hh_json.JSON_Null -end) - -let json_of_source source = - let open Hh_json in - let rev_props = ["code", JSON_String (contents source)] in - let rev_props = match source.sourcemap with - | Some sourcemap -> - ("sourceMap", Json.json_of_sourcemap sourcemap)::rev_props - | None -> rev_props - in - JSON_Object (List.rev rev_props) - -let json_of_source_map source = - match source.sourcemap with - | Some sourcemap -> - Json.json_of_sourcemap sourcemap - | None -> Hh_json.JSON_Object ([]) +let sourcemap b = b.sourcemap diff --git a/src/parser_utils/output/source.mli b/src/parser_utils/output/source.mli index 32a15e039cb..914e78bc1dc 100644 --- a/src/parser_utils/output/source.mli +++ b/src/parser_utils/output/source.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,20 +10,23 @@ type t = { sourcemap: Sourcemap.t option; pos: Sourcemap.line_col; loc_stack: Loc.t list; - names: Source_map_config.names option + names: Source_map_config.names option; } -val create: source_maps:Source_map_config.t option -> unit -> t +val create : source_maps:Source_map_config.t option -> unit -> t -val push_loc: Loc.t -> t -> t -val pop_loc: t -> t +val push_loc : Loc.t -> t -> t -val add_string: ?name:string -> string -> t -> t -val add_identifier: Loc.t -> string -> t -> t -val add_newline: t -> t -val add_space: int -> t -> t -val add_source: t -> t -> t +val pop_loc : t -> t -val contents: t -> string -val json_of_source : t -> Hh_json.json -val json_of_source_map: t -> Hh_json.json +val add_string : ?name:string -> string -> t -> t + +val add_identifier : Loc.t -> string -> t -> t + +val add_newline : t -> t + +val add_space : int -> t -> t + +val contents : t -> string + +val sourcemap : t -> Sourcemap.t option diff --git a/src/parser_utils/output/source_map_config.ml b/src/parser_utils/output/source_map_config.ml index 73ba9c8048f..57a7cf3b27f 100644 --- a/src/parser_utils/output/source_map_config.ml +++ b/src/parser_utils/output/source_map_config.ml @@ -1,17 +1,14 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module LocMap = Utils_js.LocMap +module LocMap = Loc_collections.LocMap type names = string LocMap.t -type t = { - names: names -} -let default = { - names = LocMap.empty -} +type t = { names: names } + +let default = { names = LocMap.empty } diff --git a/src/parser_utils/package_json.ml b/src/parser_utils/package_json.ml index 2620741bbb9..b5163afa0c0 100644 --- a/src/parser_utils/package_json.ml +++ b/src/parser_utils/package_json.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,54 +12,61 @@ type t = { main: string option; } -let (>>=) = Core_result.(>>=) +type 'a t_or_error = (t, 'a * string) result + +let ( >>= ) = Core_result.( >>= ) + +let empty = { name = None; main = None } -let empty = { name = None; main = None; } let name package = package.name + let main package = package.main let statement_of_program = function | (_, [statement], _) -> Ok statement - | _ -> Error "Expected a single statement." + | (loc, _, _) -> Error (loc, "Expected a single statement.") let object_of_statement statement = - let open Ast in - match statement with - | _, Statement.Expression { Statement.Expression. - expression = _, Expression.Assignment { Expression.Assignment. - operator = Expression.Assignment.Assign; - left = _; - right = obj; - }; - directive = _; - } -> Ok obj - | _ -> Error "Expected an assignment" + Ast.( + match statement with + | ( _, + Statement.Expression + { + Statement.Expression.expression = + ( _, + Expression.Assignment + { Expression.Assignment.operator = None; left = _; right = obj } ); + directive = _; + } ) -> + Ok obj + | (loc, _) -> Error (loc, "Expected an assignment")) let properties_of_object = function - | (_, Ast.Expression.Object {Ast.Expression.Object.properties}) -> Ok properties - | _ -> Error "Expected an object literal" + | (_, Ast.Expression.Object { Ast.Expression.Object.properties; comments = _ }) -> Ok properties + | (loc, _) -> Error (loc, "Expected an object literal") -let parse ast = +let parse ast : 'a t_or_error = statement_of_program ast >>= object_of_statement >>= properties_of_object >>= fun properties -> - let open Ast in - let open Expression.Object in - let extract_property package = function - | Property (_, Property.Init { - key = Property.Literal(_, { Literal.value = Literal.String key; _ }); - value = (_, Expression.Literal { Literal. - value = Literal.String value; - _ - }); - _; - }) -> - begin match key with - | "name" -> { package with name = Some value } - | "main" -> { package with main = Some value } - | _ -> package + Ast.( + Expression.Object.( + let extract_property package = function + | Property + ( _, + Property.Init + { + key = Property.Literal (_, { Literal.value = Literal.String key; _ }); + value = (_, Expression.Literal { Literal.value = Literal.String value; _ }); + _; + } ) -> + begin + match key with + | "name" -> { package with name = Some value } + | "main" -> { package with main = Some value } + | _ -> package end - | _ -> package - in - Ok (List.fold_left extract_property empty properties) + | _ -> package + in + Ok (List.fold_left extract_property empty properties))) diff --git a/src/parser_utils/package_json.mli b/src/parser_utils/package_json.mli index 7c096efd32c..966a4814796 100644 --- a/src/parser_utils/package_json.mli +++ b/src/parser_utils/package_json.mli @@ -1,12 +1,18 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type t -val empty: t -val name: t -> string option -val main: t -> string option -val parse: (Loc.t, Loc.t) Flow_ast.program -> (t, string) result + +type 'a t_or_error = (t, 'a * string) result + +val empty : t + +val name : t -> string option + +val main : t -> string option + +val parse : (Loc.t, Loc.t) Flow_ast.program -> Loc.t t_or_error diff --git a/src/parser_utils/property_assignment.ml b/src/parser_utils/property_assignment.ml new file mode 100644 index 00000000000..15730b99a2b --- /dev/null +++ b/src/parser_utils/property_assignment.ml @@ -0,0 +1,369 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module Ast = Flow_ast + +type 'loc error = { + loc: 'loc; + desc: Lints.property_assignment_kind; +} + +type 'loc errors = { + public_property_errors: 'loc error list SMap.t; + private_property_errors: 'loc error list SMap.t; +} + +let public_property loc ident = + let (_, ({ Ast.Identifier.name; comments = _ } as r)) = ident in + (loc, { r with Ast.Identifier.name = "this." ^ name }) + +let private_property loc ident = + let (_, (_, ({ Ast.Identifier.name; comments = _ } as r))) = ident in + (loc, { r with Ast.Identifier.name = "this.#" ^ name }) + +let not_definitively_initialized (write_locs : Ssa_api.With_ALoc.write_locs) : bool = + (* write_locs = [] corresponds to a binding whose "value" in the ssa_env is + * Val.empty. It typically will not be the final result unless (1) something + * is declared but never initialized (2) something needs to be recorded after + * an abnormal control flow exit (e.g., after return, the environment is + * "empty"d out). + *) + write_locs = [] || List.mem Ssa_api.With_ALoc.uninitialized write_locs + +(* NOTE: This function should only be called after the ssa walk that produces + * the `ssa_env` argument has finished. Simplifying the values in the ssa + * environment mid-walk will throw an exception if all values have not been + * resolved. The invariant we are relying on here is that by the time the walk + * completes, all values must be resolved so it is safe to call this function. + *) +let filter_uninitialized + (ssa_env : Ssa_builder.With_ALoc.Env.t) + (properties : (ALoc.t, ALoc.t) Flow_ast.Identifier.t list) : + (ALoc.t, ALoc.t) Flow_ast.Identifier.t list = + let ssa_env = SMap.map Ssa_builder.With_ALoc.Val.simplify ssa_env in + Core_list.filter + ~f:(fun id -> + match SMap.get (Flow_ast_utils.name_of_ident id) ssa_env with + | Some write_locs -> not_definitively_initialized write_locs + | None -> true) + properties + +class property_assignment (property_names : SSet.t) = + object (this) + inherit Ssa_builder.With_ALoc.ssa_builder as super + + (* ABRUPT COMPLETIONS *) + method expecting_return_or_throw (f : unit -> unit) : unit = + let completion_state = this#run_to_completion f in + this#commit_abrupt_completion_matching + Ssa_builder.With_ALoc.AbruptCompletion.(mem [return; throw]) + completion_state + + (* WRITES *) + + (* Keep track of the final_ssa_env so that we can check that all properties + * are initialized when the constructor exits. + *) + val mutable final_ssa_env : Ssa_builder.With_ALoc.Env.t = SMap.empty + + method final_ssa_env = final_ssa_env + + method! pop_ssa_env saved_state = + final_ssa_env <- this#ssa_env; + super#pop_ssa_env saved_state + + method initialize_property property_id value = + (match snd value with + | Ast.Expression.ArrowFunction _ + | Ast.Expression.Function _ -> + () + | _ -> ignore @@ this#expression value); + ignore @@ this#pattern_identifier property_id + + (* READS *) + val mutable read_loc_metadata : string Loc_collections.ALocMap.t = + Loc_collections.ALocMap.empty + + method metadata_of_read_loc (read_loc : ALoc.t) : string option = + Loc_collections.ALocMap.get read_loc read_loc_metadata + + method! identifier (ident : (ALoc.t, ALoc.t) Ast.Identifier.t) = ident + + method! jsx_identifier (ident : ALoc.t Ast.JSX.Identifier.t) = ident + + method! member loc (expr : (ALoc.t, ALoc.t) Ast.Expression.Member.t) = + match expr with + | { + Ast.Expression.Member._object = (_, Ast.Expression.This); + property = + (Ast.Expression.Member.PropertyIdentifier _ | Ast.Expression.Member.PropertyPrivateName _) + as property; + } -> + let property_name : string = + Flow_ast_utils.name_of_ident + Ast.Expression.Member.( + match property with + | PropertyIdentifier id -> public_property loc id + | PropertyPrivateName id -> private_property loc id + | PropertyExpression _ -> failwith "match on expr makes this impossible") + in + read_loc_metadata <- Loc_collections.ALocMap.add loc property_name read_loc_metadata; + ignore @@ this#any_identifier loc property_name; + expr + | _ -> super#member loc expr + + (* EVALUATION ORDER *) + method! assignment loc (expr : (ALoc.t, ALoc.t) Ast.Expression.Assignment.t) = + Ast.Expression.Assignment.( + let { operator; left; right } = expr in + match left with + | ( _, + Ast.Pattern.Expression + ( member_loc, + Ast.Expression.Member + ( { + Ast.Expression.Member._object = (_, Ast.Expression.This); + property = + ( Ast.Expression.Member.PropertyIdentifier _ + | Ast.Expression.Member.PropertyPrivateName _ ) as property; + } as left_member ) ) ) -> + (match operator with + | None -> + (* given `this.x = e`, read e then write x *) + this#initialize_property + Ast.Expression.Member.( + match property with + | PropertyIdentifier id -> public_property member_loc id + | PropertyPrivateName id -> private_property member_loc id + | PropertyExpression _ -> failwith "match on expr makes this impossible") + right + | Some _ -> + (* given `this.x += e`, read x then read e *) + ignore @@ this#member member_loc left_member; + ignore @@ this#expression right) + (* This expression technically also writes to x, but we don't model that + * here since in order for `this.x += e` to not cause an error, x must + * already be assigned anyway. Also, not writing to x here leads to + * more understandable error messages, as the write would mask the + * PropertyNotDefinitelyInitialized error. + *); + + expr + | _ -> super#assignment loc expr) + + (* PREVENT THIS FROM ESCAPING *) + val mutable this_escape_errors + : (ALoc.t * Lints.property_assignment_kind * Ssa_builder.With_ALoc.Env.t) list = + [] + + method this_escape_errors = this_escape_errors + + method private add_this_escape_error error = this_escape_errors <- error :: this_escape_errors + + method! expression expr = + (match expr with + | (loc, Ast.Expression.This) -> + this#add_this_escape_error (loc, Lints.ThisBeforeEverythingInitialized, this#ssa_env) + | _ -> ()); + super#expression expr + + method! call loc (expr : ('loc, 'loc) Ast.Expression.Call.t) = + (match expr.Ast.Expression.Call.callee with + (* match on method calls *) + | ( member_loc, + Ast.Expression.Member + { + Ast.Expression.Member._object = (_, Ast.Expression.This); + property = + ( Ast.Expression.Member.PropertyIdentifier _ + | Ast.Expression.Member.PropertyPrivateName _ ) as property; + } ) -> + let name = + Flow_ast_utils.name_of_ident + @@ Ast.Expression.Member.( + match property with + | PropertyIdentifier id -> public_property member_loc id + | PropertyPrivateName id -> private_property member_loc id + | PropertyExpression _ -> failwith "match on expr.callee makes this impossible") + in + let error = + if SSet.mem name property_names then + Lints.PropertyFunctionCallBeforeEverythingInitialized + else + Lints.MethodCallBeforeEverythingInitialized + in + this#add_this_escape_error (loc, error, this#ssa_env) + | _ -> ()); + super#call loc expr + end + +let eval_property_assignment class_body = + Ast.Class.( + let property_declarations = + Core_list.filter_map + ~f:(function + | Body.Property + ( _, + { + Property.key = Ast.Expression.Object.Property.Identifier ((loc, _) as id); + value; + static = false; + annot = _; + variance = _; + } ) -> + Some (public_property loc id, value) + | Body.PrivateField + ( _, + { + PrivateField.key = (loc, _) as id; + value; + static = false; + annot = _; + variance = _; + } ) -> + Some (private_property loc id, value) + | _ -> None) + class_body + in + let ctor_body : (ALoc.t, ALoc.t) Ast.Statement.Block.t = + Core_list.find_map + ~f:(function + | Body.Method + ( _, + { + Method.kind = Method.Constructor; + value = + ( _, + { + Ast.Function.body = Ast.Function.BodyBlock (_, block); + id = _; + params = _; + async = _; + generator = _; + predicate = _; + return = _; + tparams = _; + sig_loc = _; + } ); + key = _; + static = _; + decorators = _; + } ) -> + Some block + | _ -> None) + class_body + |> Option.value ~default:{ Ast.Statement.Block.body = [] } + in + let properties = Core_list.map ~f:fst property_declarations in + let bindings : ALoc.t Hoister.Bindings.t = + List.fold_left + (fun bindings property -> Hoister.Bindings.add property bindings) + Hoister.Bindings.empty + properties + in + let property_names = + List.fold_left + (fun acc property -> SSet.add (Flow_ast_utils.name_of_ident property) acc) + SSet.empty + properties + in + let ssa_walk = new property_assignment property_names in + ignore + @@ ssa_walk#with_bindings + ALoc.none + bindings + (fun body -> + ssa_walk#expecting_return_or_throw (fun () -> + List.iter + (function + | (property_id, Some default_initializer) -> + ssa_walk#initialize_property property_id default_initializer + | _ -> ()) + property_declarations; + ignore @@ ssa_walk#block ALoc.none body); + body) + ctor_body; + + (* We make heavy use of the Core_list.rev_* functions below because they are + * tail recursive. We can do this freely because the order in which we + * process the errors doesn't actually matter (Flow will sort the errors + * before printing them anyway). + *) + let uninitialized_properties : (ALoc.t error * string) list = + properties + |> filter_uninitialized ssa_walk#final_ssa_env + |> Core_list.rev_map ~f:(fun id -> + ( { + loc = Flow_ast_utils.loc_of_ident id; + desc = Lints.PropertyNotDefinitelyInitialized; + }, + Flow_ast_utils.name_of_ident id )) + in + let read_before_initialized : (ALoc.t error * string) list = + ssa_walk#values + |> Loc_collections.ALocMap.bindings + |> Core_list.rev_filter_map ~f:(fun (read_loc, write_locs) -> + if not_definitively_initialized write_locs then + ssa_walk#metadata_of_read_loc read_loc + |> Option.map ~f:(fun name -> + ({ loc = read_loc; desc = Lints.ReadFromUninitializedProperty }, name)) + else + None) + in + let this_errors : (ALoc.t error * string Nel.t) list = + Core_list.rev_filter_map + ~f:(fun (loc, desc, ssa_env) -> + filter_uninitialized ssa_env properties + |> Core_list.map ~f:Flow_ast_utils.name_of_ident + |> Nel.of_list + |> Option.map ~f:(fun uninitialized_properties -> + ({ loc; desc }, uninitialized_properties))) + ssa_walk#this_escape_errors + in + (* It's better to append new to old b/c new is always a singleton *) + let combine_voidable_checks old new_ = Core_list.rev_append new_ old in + let add_to_errors error errors prefixed_name = + (* Check if it is private first since `this.` is a prefix of `this.#` *) + if String_utils.string_starts_with prefixed_name "this.#" then + { + errors with + private_property_errors = + SMap.add + ~combine:combine_voidable_checks + (String_utils.lstrip prefixed_name "this.#") + [error] + errors.private_property_errors; + } + else if String_utils.string_starts_with prefixed_name "this." then + { + errors with + public_property_errors = + SMap.add + ~combine:combine_voidable_checks + (String_utils.lstrip prefixed_name "this.") + [error] + errors.public_property_errors; + } + else + errors + in + let single_property_errors errors checks = + List.fold_left + (fun acc (error, prefixed_name) -> add_to_errors error acc prefixed_name) + checks + errors + in + let multi_property_errors errors checks = + List.fold_left + (fun acc (error, names) -> Nel.fold_left (add_to_errors error) acc names) + checks + errors + in + { public_property_errors = SMap.empty; private_property_errors = SMap.empty } + |> single_property_errors uninitialized_properties + |> single_property_errors read_before_initialized + |> multi_property_errors this_errors) diff --git a/src/parser_utils/property_assignment.mli b/src/parser_utils/property_assignment.mli new file mode 100644 index 00000000000..2d079b30422 --- /dev/null +++ b/src/parser_utils/property_assignment.mli @@ -0,0 +1,23 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type 'loc error = { + loc: 'loc; + desc: Lints.property_assignment_kind; +} + +type 'loc errors = { + public_property_errors: 'loc error list SMap.t; + private_property_errors: 'loc error list SMap.t; +} + +(* The bulk of the definite instance property assignment analysis is performed + * by this function. It takes the elements of a class body as input and returns + * a map from property names to a list of errors that we should emit if that + * property isn't voidable + *) +val eval_property_assignment : (ALoc.t, ALoc.t) Flow_ast.Class.Body.element list -> ALoc.t errors diff --git a/src/parser_utils/scope_api.ml b/src/parser_utils/scope_api.ml index d106ab069aa..a395e512de6 100644 --- a/src/parser_utils/scope_api.ml +++ b/src/parser_utils/scope_api.ml @@ -1,195 +1,236 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module LocSet = Utils_js.LocSet -module LocMap = Utils_js.LocMap - -type scope = int -type use = Loc.t -type uses = LocSet.t - -module Def = struct - type t = { - locs: Loc.t Nel.t; - name: int; - actual_name: string; +module Make (L : Loc_sig.S) = struct + module L = L + + type scope = int + + type use = L.t + + type uses = L.LSet.t + + module Def = struct + type t = { + locs: L.t Nel.t; + name: int; + actual_name: string; + } + + let compare = + let rec iter locs1 locs2 = + match (locs1, locs2) with + | ([], []) -> 0 + | ([], _) -> -1 + | (_, []) -> 1 + | (loc1 :: locs1, loc2 :: locs2) -> + let i = L.compare loc1 loc2 in + if i = 0 then + iter locs1 locs2 + else + i + in + (fun t1 t2 -> iter (Nel.to_list t1.locs) (Nel.to_list t2.locs)) + + let is x t = Nel.exists (L.equal x) t.locs + end + + module DefMap = MyMap.Make (Def) + + type use_def_map = Def.t L.LMap.t + + module Scope = struct + type t = { + lexical: bool; + parent: int option; + defs: Def.t SMap.t; + locals: use_def_map; + globals: SSet.t; + loc: L.t; + } + end + + type info = { + (* number of distinct name ids *) + max_distinct: int; + (* map of scope ids to local scopes *) + scopes: Scope.t IMap.t; } - let compare = - let rec iter locs1 locs2 = match locs1, locs2 with - | [], [] -> 0 - | [], _ -> -1 - | _, [] -> 1 - | loc1::locs1, loc2::locs2 -> - let i = Loc.compare loc1 loc2 in - if i = 0 then iter locs1 locs2 - else i - in fun t1 t2 -> iter (Nel.to_list t1.locs) (Nel.to_list t2.locs) - - let is x t = - Nel.exists (Loc.equal x) t.locs -end -module DefMap = MyMap.Make(Def) - -type use_def_map = Def.t LocMap.t -module Scope = struct - type t = { - lexical: bool; - parent: int option; - defs: Def.t SMap.t; - locals: use_def_map; - globals: SSet.t; - loc: Loc.t; - } -end -type info = { - (* number of distinct name ids *) - max_distinct: int; - (* map of scope ids to local scopes *) - scopes: Scope.t IMap.t -} - -let all_uses { scopes; _ } = - IMap.fold (fun _ scope acc -> - LocMap.fold (fun use _ uses -> - LocSet.add use uses - ) scope.Scope.locals acc - ) scopes LocSet.empty - -let defs_of_all_uses { scopes; _ } = - IMap.fold (fun _ scope acc -> - LocMap.union scope.Scope.locals acc - ) scopes LocMap.empty - -let uses_of_all_defs info = - let use_def_map = defs_of_all_uses info in - LocMap.fold (fun use def def_uses_map -> - match DefMap.get def def_uses_map with - | None -> DefMap.add def (LocSet.singleton use) def_uses_map - | Some uses -> DefMap.add def (LocSet.add use uses) def_uses_map - ) use_def_map DefMap.empty - -let def_of_use { scopes; _ } use = - let def_opt = IMap.fold (fun _ scope acc -> - match acc with - | Some _ -> acc - | None -> LocMap.get use scope.Scope.locals - ) scopes None in - match def_opt with - | Some def -> def - | None -> failwith "missing def" - -let use_is_def info use = - let def = def_of_use info use in - Def.is use def - -let uses_of_def { scopes; _ } ?(exclude_def=false) def = - IMap.fold (fun _ scope acc -> - LocMap.fold (fun use def' uses -> - if exclude_def && Def.is use def' then uses - else if Def.compare def def' = 0 then LocSet.add use uses else uses - ) scope.Scope.locals acc - ) scopes LocSet.empty - -let uses_of_use info ?exclude_def use = - let def = def_of_use info use in - uses_of_def info ?exclude_def def - -let def_is_unused info def = - LocSet.is_empty (uses_of_def info ~exclude_def:true def) - -let scope info scope_id = - try IMap.find_unsafe scope_id info.scopes with Not_found -> - failwith ("Scope " ^ (string_of_int scope_id) ^ " not found") - -let scope_of_loc info scope_loc = - let scopes = + let all_uses { scopes; _ } = IMap.fold - (fun scope_id scope acc -> - if scope.Scope.loc = scope_loc then scope_id::acc - else acc - ) - info.scopes - [] - in - List.rev scopes - -let is_local_use { scopes; _ } use = - IMap.exists (fun _ scope -> - LocMap.mem use scope.Scope.locals - ) scopes - -let rec fold_scope_chain info f scope_id acc = - let s = scope info scope_id in - let acc = f scope_id s acc in - match s.Scope.parent with - | Some parent_id -> fold_scope_chain info f parent_id acc - | None -> acc - -let rev_scope_pointers scopes = - IMap.fold (fun id scope acc -> - match scope.Scope.parent with - | Some scope_id -> - let children' = match IMap.get scope_id acc with - | Some children -> children - | None -> [] - in IMap.add scope_id (id::children') acc - | None -> acc - ) scopes IMap.empty - -let build_scope_tree info = - let scopes = info.scopes in - let children_map = rev_scope_pointers scopes in - let rec build_scope_tree scope_id = - let children = match IMap.get scope_id children_map with - | None -> [] - | Some children_scope_ids -> List.rev_map build_scope_tree children_scope_ids in - Tree.Node (IMap.find scope_id scopes, children) - in build_scope_tree 0 - -(* Let D be the declared names of some scope. - - The free variables F of the scope are the names in G + F' + L - D, where: - * G contains the global names used in that scope - * L contains the local names used in that scope - * F' contains the free variables of its children - - The bound variables B of the scope are the names in B' + D, where: - * B' contains the bound variables of its children -*) -let rec compute_free_and_bound_variables = function - | Tree.Node (scope, children) -> - let children' = List.map compute_free_and_bound_variables children in - let free_children, bound_children = List.fold_left (fun (facc, bacc) -> function - | Tree.Node ((_, free, bound), _) -> SSet.union free facc, SSet.union bound bacc - ) (SSet.empty, SSet.empty) children' in - - let def_locals = scope.Scope.defs in - let is_def_local use_name = SMap.exists (fun def_name _ -> def_name = use_name) def_locals in - let free = - scope.Scope.globals |> - LocMap.fold (fun _loc use_def acc -> - let use_name = use_def.Def.actual_name in - if is_def_local use_name then acc else SSet.add use_name acc - ) scope.Scope.locals |> - SSet.fold (fun use_name acc -> - if is_def_local use_name then acc else SSet.add use_name acc - ) free_children in - let bound = SMap.fold (fun name _def acc -> - SSet.add name acc - ) def_locals bound_children - in Tree.Node ((def_locals, free, bound), children') - -let toplevel_names info = - let scopes = info.scopes in - let open Scope in - let toplevel_scope = IMap.find 0 scopes in - assert (toplevel_scope.parent = None); - let toplevel_lexical_scope = IMap.find 1 scopes in - assert (toplevel_lexical_scope.parent = Some 0); - SMap.fold (fun x _def acc -> SSet.add x acc) - (SMap.union toplevel_scope.defs toplevel_lexical_scope.defs) SSet.empty + (fun _ scope acc -> + L.LMap.fold (fun use _ uses -> L.LSet.add use uses) scope.Scope.locals acc) + scopes + L.LSet.empty + + let defs_of_all_uses { scopes; _ } = + IMap.fold (fun _ scope acc -> L.LMap.union scope.Scope.locals acc) scopes L.LMap.empty + + let uses_of_all_defs info = + let use_def_map = defs_of_all_uses info in + L.LMap.fold + (fun use def def_uses_map -> + match DefMap.get def def_uses_map with + | None -> DefMap.add def (L.LSet.singleton use) def_uses_map + | Some uses -> DefMap.add def (L.LSet.add use uses) def_uses_map) + use_def_map + DefMap.empty + + let def_of_use { scopes; _ } use = + let def_opt = + IMap.fold + (fun _ scope acc -> + match acc with + | Some _ -> acc + | None -> L.LMap.get use scope.Scope.locals) + scopes + None + in + match def_opt with + | Some def -> def + | None -> failwith "missing def" + + let use_is_def info use = + let def = def_of_use info use in + Def.is use def + + let uses_of_def { scopes; _ } ?(exclude_def = false) def = + IMap.fold + (fun _ scope acc -> + L.LMap.fold + (fun use def' uses -> + if exclude_def && Def.is use def' then + uses + else if Def.compare def def' = 0 then + L.LSet.add use uses + else + uses) + scope.Scope.locals + acc) + scopes + L.LSet.empty + + let uses_of_use info ?exclude_def use = + let def = def_of_use info use in + uses_of_def info ?exclude_def def + + let def_is_unused info def = L.LSet.is_empty (uses_of_def info ~exclude_def:true def) + + let scope info scope_id = + try IMap.find_unsafe scope_id info.scopes + with Not_found -> failwith ("Scope " ^ string_of_int scope_id ^ " not found") + + let scope_of_loc info scope_loc = + let scopes = + IMap.fold + (fun scope_id scope acc -> + if scope.Scope.loc = scope_loc then + scope_id :: acc + else + acc) + info.scopes + [] + in + List.rev scopes + + let is_local_use { scopes; _ } use = + IMap.exists (fun _ scope -> L.LMap.mem use scope.Scope.locals) scopes + + let rec fold_scope_chain info f scope_id acc = + let s = scope info scope_id in + let acc = f scope_id s acc in + match s.Scope.parent with + | Some parent_id -> fold_scope_chain info f parent_id acc + | None -> acc + + let rev_scope_pointers scopes = + IMap.fold + (fun id scope acc -> + match scope.Scope.parent with + | Some scope_id -> + let children' = + match IMap.get scope_id acc with + | Some children -> children + | None -> [] + in + IMap.add scope_id (id :: children') acc + | None -> acc) + scopes + IMap.empty + + let build_scope_tree info = + let scopes = info.scopes in + let children_map = rev_scope_pointers scopes in + let rec build_scope_tree scope_id = + let children = + match IMap.get scope_id children_map with + | None -> [] + | Some children_scope_ids -> List.rev_map build_scope_tree children_scope_ids + in + Tree.Node (IMap.find scope_id scopes, children) + in + build_scope_tree 0 + + (* Let D be the declared names of some scope. + + The free variables F of the scope are the names in G + F' + L - D, where: + * G contains the global names used in that scope + * L contains the local names used in that scope + * F' contains the free variables of its children + + The bound variables B of the scope are the names in B' + D, where: + * B' contains the bound variables of its children + *) + let rec compute_free_and_bound_variables = function + | Tree.Node (scope, children) -> + let children' = Core_list.map ~f:compute_free_and_bound_variables children in + let (free_children, bound_children) = + List.fold_left + (fun (facc, bacc) -> function + | Tree.Node ((_, free, bound), _) -> (SSet.union free facc, SSet.union bound bacc)) + (SSet.empty, SSet.empty) + children' + in + let def_locals = scope.Scope.defs in + let is_def_local use_name = SMap.exists (fun def_name _ -> def_name = use_name) def_locals in + let free = + scope.Scope.globals + |> L.LMap.fold + (fun _loc use_def acc -> + let use_name = use_def.Def.actual_name in + if is_def_local use_name then + acc + else + SSet.add use_name acc) + scope.Scope.locals + |> SSet.fold + (fun use_name acc -> + if is_def_local use_name then + acc + else + SSet.add use_name acc) + free_children + in + let bound = SMap.fold (fun name _def acc -> SSet.add name acc) def_locals bound_children in + Tree.Node ((def_locals, free, bound), children') + + let toplevel_names info = + let scopes = info.scopes in + Scope.( + let toplevel_scope = IMap.find 0 scopes in + assert (toplevel_scope.parent = None); + let toplevel_lexical_scope = IMap.find 1 scopes in + assert (toplevel_lexical_scope.parent = Some 0); + SMap.fold + (fun x _def acc -> SSet.add x acc) + (SMap.union toplevel_scope.defs toplevel_lexical_scope.defs) + SSet.empty) +end + +module With_Loc = Make (Loc_sig.LocS) +module With_ALoc = Make (Loc_sig.ALocS) diff --git a/src/parser_utils/scope_api.mli b/src/parser_utils/scope_api.mli index b88d8f6c75d..88fc6be4334 100644 --- a/src/parser_utils/scope_api.mli +++ b/src/parser_utils/scope_api.mli @@ -1,57 +1,10 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -type scope = int -type use = Loc.t -type uses = Utils_js.LocSet.t -module Def: sig - type t = { - locs: Loc.t Nel.t; - name: int; - actual_name: string; - } - val compare: t -> t -> int -end -module DefMap: MyMap_sig.S with type key = Def.t -type use_def_map = Def.t Utils_js.LocMap.t -module Scope: sig - type t = { - lexical: bool; - parent: int option; - defs: Def.t SMap.t; - locals: use_def_map; - globals: SSet.t; - loc: Loc.t; - } -end -type info = { - max_distinct: int; - scopes: Scope.t IMap.t; -} +module With_Loc : Scope_api_sig.S with module L = Loc_sig.LocS -val scope: info -> scope -> Scope.t - -(* List of scopes associated with a loc. The returned list order should be - based on the scope depth (some nodes such as functions have two scopes - associated, one for the name and one for the params/body). *) -val scope_of_loc: info -> Loc.t -> scope list - -val all_uses: info -> uses -val defs_of_all_uses: info -> use_def_map -val uses_of_all_defs: info -> uses DefMap.t -val def_of_use: info -> use -> Def.t -val use_is_def: info -> use -> bool -val uses_of_def: info -> ?exclude_def:bool -> Def.t -> uses -val uses_of_use: info -> ?exclude_def:bool -> use -> uses -val def_is_unused: info -> Def.t -> bool -val is_local_use: info -> use -> bool -val fold_scope_chain: info -> (scope -> Scope.t -> 'a -> 'a) -> scope -> 'a -> 'a - -val build_scope_tree: info -> Scope.t Tree.t -val compute_free_and_bound_variables: Scope.t Tree.t -> (Def.t SMap.t * SSet.t * SSet.t) Tree.t - -val toplevel_names: info -> SSet.t +module With_ALoc : Scope_api_sig.S with module L = Loc_sig.ALocS diff --git a/src/parser_utils/scope_api_sig.ml b/src/parser_utils/scope_api_sig.ml new file mode 100644 index 00000000000..ec887a3fc2e --- /dev/null +++ b/src/parser_utils/scope_api_sig.ml @@ -0,0 +1,79 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module type S = sig + module L : Loc_sig.S + + type scope = int + + type use = L.t + + type uses = L.LSet.t + + module Def : sig + type t = { + locs: L.t Nel.t; + name: int; + actual_name: string; + } + + val compare : t -> t -> int + end + + module DefMap : MyMap_sig.S with type key = Def.t + + type use_def_map = Def.t L.LMap.t + + module Scope : sig + type t = { + lexical: bool; + parent: int option; + defs: Def.t SMap.t; + locals: use_def_map; + globals: SSet.t; + loc: L.t; + } + end + + type info = { + max_distinct: int; + scopes: Scope.t IMap.t; + } + + val scope : info -> scope -> Scope.t + + (* List of scopes associated with a loc. The returned list order should be + based on the scope depth (some nodes such as functions have two scopes + associated, one for the name and one for the params/body). *) + val scope_of_loc : info -> L.t -> scope list + + val all_uses : info -> uses + + val defs_of_all_uses : info -> use_def_map + + val uses_of_all_defs : info -> uses DefMap.t + + val def_of_use : info -> use -> Def.t + + val use_is_def : info -> use -> bool + + val uses_of_def : info -> ?exclude_def:bool -> Def.t -> uses + + val uses_of_use : info -> ?exclude_def:bool -> use -> uses + + val def_is_unused : info -> Def.t -> bool + + val is_local_use : info -> use -> bool + + val fold_scope_chain : info -> (scope -> Scope.t -> 'a -> 'a) -> scope -> 'a -> 'a + + val build_scope_tree : info -> Scope.t Tree.t + + val compute_free_and_bound_variables : Scope.t Tree.t -> (Def.t SMap.t * SSet.t * SSet.t) Tree.t + + val toplevel_names : info -> SSet.t +end diff --git a/src/parser_utils/scope_builder.ml b/src/parser_utils/scope_builder.ml index d3b3fb5f218..26b1e4d9f3e 100644 --- a/src/parser_utils/scope_builder.ml +++ b/src/parser_utils/scope_builder.ml @@ -1,313 +1,381 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Flow_ast_visitor open Hoister -open Scope_api - -module LocMap = Utils_js.LocMap - -class with_or_eval_visitor = object(this) - inherit [bool] visitor ~init:false as super - - method! expression (expr: (Loc.t, Loc.t) Ast.Expression.t) = - let open Ast.Expression in - if this#acc = true then expr else match expr with - | (_, Call { Call.callee = (_, Identifier (_, "eval")); _}) -> - this#set_acc true; - expr - | _ -> super#expression expr - - method! statement (stmt: (Loc.t, Loc.t) Ast.Statement.t) = - if this#acc = true then stmt else super#statement stmt - - method! with_ _loc (stuff: (Loc.t, Loc.t) Ast.Statement.With.t) = - this#set_acc true; - stuff -end - -(* Visitor class that prepares use-def info, hoisting bindings one scope at a - time. This info can be used for various purposes, e.g. variable renaming. - - We do not generate the scope tree for the entire program, because it is not - clear where to hang scopes for function expressions, catch clauses, - etc. One possibility is to augment the AST with scope identifiers. - - As we move into a nested scope, we generate bindings for the new scope, map - the bindings to names generated by a factory, and augment the existing - environment with this map before visiting the nested scope. -*) -module Acc = struct - type t = info - let init = { - max_distinct = 0; - scopes = IMap.empty; - } -end - -module Env : sig - type t - val empty: t - val mk_env: (unit -> int) -> t -> Bindings.t -> t - val get: string -> t -> Def.t option - val defs: t -> Def.t SMap.t -end = struct - type t = Def.t SMap.t list - let empty = [] - - let rec get x t = - match t with - | [] -> None - | hd::rest -> - begin match SMap.get x hd with - | Some def -> Some def - | None -> get x rest - end - - let defs = function - | [] -> SMap.empty - | hd::_ -> hd - - let mk_env next parent_env bindings = - let bindings = Bindings.to_assoc bindings in - let env = List.fold_left (fun env (x, locs) -> - let name = match get x parent_env with - | Some def -> def.Def.name - | None -> next () in - SMap.add x { Def.locs; name; actual_name=x } env - ) SMap.empty bindings in - env::parent_env -end - -class scope_builder = object(this) - inherit [Acc.t] visitor ~init:Acc.init as super - - val mutable env = Env.empty - val mutable current_scope_opt = None - val mutable scope_counter = 0 - val mutable uses = [] - - method private new_scope = - let new_scope = scope_counter in - scope_counter <- scope_counter + 1; - new_scope - - val mutable counter = 0 - method private next = - let result = counter in - counter <- counter + 1; - this#update_acc (fun acc -> { acc with - max_distinct = max counter acc.max_distinct - }); - result - - method with_bindings: 'a. ?lexical:bool -> Loc.t -> Bindings.t -> ('a -> 'a) -> 'a -> 'a = - fun ?(lexical=false) loc bindings visit node -> - let save_counter = counter in - let save_uses = uses in - let old_env = env in - let parent = current_scope_opt in - let child = this#new_scope in - uses <- []; - current_scope_opt <- Some child; - env <- Env.mk_env (fun () -> this#next) old_env bindings; - let result = Core_result.try_with (fun () -> visit node) in - this#update_acc (fun acc -> - let defs = Env.defs env in - let locals = SMap.fold (fun _ def locals -> - Nel.fold_left (fun locals loc -> LocMap.add loc def locals) locals def.Def.locs - ) defs LocMap.empty in - let locals, globals = List.fold_left (fun (locals, globals) (loc, x) -> - match Env.get x env with - | Some def -> LocMap.add loc def locals, globals - | None -> locals, SSet.add x globals - ) (locals, SSet.empty) uses in - let scopes = IMap.add child { Scope.lexical; parent; defs; locals; globals; loc; } acc.scopes in - { acc with scopes } - ); - uses <- save_uses; - current_scope_opt <- parent; - env <- old_env; - counter <- save_counter; - Core_result.ok_exn result - - method! identifier (expr: Loc.t Ast.Identifier.t) = - uses <- expr::uses; - expr - - method! jsx_identifier (id: Loc.t Ast.JSX.Identifier.t) = - let open Ast.JSX.Identifier in - let loc, {name} = id in - uses <- (loc, name)::uses; - id - - (* don't rename the `foo` in `x.foo` *) - method! member_property_identifier (id: Loc.t Ast.Identifier.t) = id - - (* don't rename the `foo` in `const {foo: bar} = x` *) - method! pattern_object_property_identifier_key ?kind id = ignore kind; id - - (* don't rename the `foo` in `{ foo: ... }` *) - method! object_key_identifier (id: Loc.t Ast.Identifier.t) = id - - method! block loc (stmt: (Loc.t, Loc.t) Ast.Statement.Block.t) = - let lexical_hoist = new lexical_hoister in - let lexical_bindings = lexical_hoist#eval (lexical_hoist#block loc) stmt in - this#with_bindings ~lexical:true loc lexical_bindings (super#block loc) stmt - - (* like block *) - method! program (program: (Loc.t, Loc.t) Ast.program) = - let loc, _, _ = program in - let lexical_hoist = new lexical_hoister in - let lexical_bindings = lexical_hoist#eval lexical_hoist#program program in - this#with_bindings ~lexical:true loc lexical_bindings super#program program - - method private scoped_for_in_statement loc (stmt: (Loc.t, Loc.t) Ast.Statement.ForIn.t) = - super#for_in_statement loc stmt - - method! for_in_statement loc (stmt: (Loc.t, Loc.t) Ast.Statement.ForIn.t) = - let open Ast.Statement.ForIn in - let { left; right = _; body = _; each = _ } = stmt in - - let lexical_hoist = new lexical_hoister in - let lexical_bindings = match left with - | LeftDeclaration (loc, decl) -> - lexical_hoist#eval (lexical_hoist#variable_declaration loc) decl - | LeftPattern _ -> Bindings.empty - in - this#with_bindings ~lexical:true loc lexical_bindings (this#scoped_for_in_statement loc) stmt - - method private scoped_for_of_statement loc (stmt: (Loc.t, Loc.t) Ast.Statement.ForOf.t) = - super#for_of_statement loc stmt - - method! for_of_statement loc (stmt: (Loc.t, Loc.t) Ast.Statement.ForOf.t) = - let open Ast.Statement.ForOf in - let { left; right = _; body = _; async = _ } = stmt in - let lexical_hoist = new lexical_hoister in - let lexical_bindings = match left with - | LeftDeclaration (loc, decl) -> - lexical_hoist#eval (lexical_hoist#variable_declaration loc) decl - | LeftPattern _ -> Bindings.empty - in - this#with_bindings ~lexical:true loc lexical_bindings (this#scoped_for_of_statement loc) stmt - - method private scoped_for_statement loc (stmt: (Loc.t, Loc.t) Ast.Statement.For.t) = - super#for_statement loc stmt - - method! for_statement loc (stmt: (Loc.t, Loc.t) Ast.Statement.For.t) = - let open Ast.Statement.For in - let { init; test = _; update = _; body = _ } = stmt in - - let lexical_hoist = new lexical_hoister in - let lexical_bindings = match init with - | Some (InitDeclaration (loc, decl)) -> - lexical_hoist#eval (lexical_hoist#variable_declaration loc) decl - | _ -> Bindings.empty - in - this#with_bindings ~lexical:true loc lexical_bindings (this#scoped_for_statement loc) stmt - - method! catch_clause loc (clause: (Loc.t, Loc.t) Ast.Statement.Try.CatchClause.t') = - let open Ast.Statement.Try.CatchClause in - let { param; body = _ } = clause in +module Make (L : Loc_sig.S) (Api : Scope_api_sig.S with module L = L) : + Scope_builder_sig.S with module L = L and module Api = Api = struct + module L = L + module Api = Api + open Api + + class with_or_eval_visitor = + object (this) + inherit [bool, L.t] visitor ~init:false as super + + method! expression (expr : (L.t, L.t) Ast.Expression.t) = + Ast.Expression.( + if this#acc = true then + expr + else + match expr with + | ( _, + Call + { + Call.callee = + (_, Identifier (_, { Ast.Identifier.name = "eval"; comments = _ })); + _; + } ) -> + this#set_acc true; + expr + | _ -> super#expression expr) + + method! statement (stmt : (L.t, L.t) Ast.Statement.t) = + if this#acc = true then + stmt + else + super#statement stmt + + method! with_ _loc (stuff : (L.t, L.t) Ast.Statement.With.t) = + this#set_acc true; + stuff + end + + (* Visitor class that prepares use-def info, hoisting bindings one scope at a + time. This info can be used for various purposes, e.g. variable renaming. + + We do not generate the scope tree for the entire program, because it is not + clear where to hang scopes for function expressions, catch clauses, + etc. One possibility is to augment the AST with scope identifiers. + + As we move into a nested scope, we generate bindings for the new scope, map + the bindings to names generated by a factory, and augment the existing + environment with this map before visiting the nested scope. + *) + module Acc = struct + type t = info + + let init = { max_distinct = 0; scopes = IMap.empty } + end + + module Env : sig + type t + + val empty : t + + val mk_env : (unit -> int) -> t -> L.t Bindings.t -> t + + val get : string -> t -> Def.t option + + val defs : t -> Def.t SMap.t + end = struct + type t = Def.t SMap.t list + + let empty = [] + + let rec get x t = + match t with + | [] -> None + | hd :: rest -> + begin + match SMap.get x hd with + | Some def -> Some def + | None -> get x rest + end + + let defs = function + | [] -> SMap.empty + | hd :: _ -> hd + + let mk_env next parent_env bindings = + let bindings = Bindings.to_assoc bindings in + let env = + List.fold_left + (fun env (x, locs) -> + let name = + match get x parent_env with + | Some def -> def.Def.name + | None -> next () + in + SMap.add x { Def.locs; name; actual_name = x } env) + SMap.empty + bindings + in + env :: parent_env + end + + class scope_builder = + object (this) + inherit [Acc.t, L.t] visitor ~init:Acc.init as super + + val mutable env = Env.empty + + val mutable current_scope_opt = None + + val mutable scope_counter = 0 + + val mutable uses = [] + + method private new_scope = + let new_scope = scope_counter in + scope_counter <- scope_counter + 1; + new_scope + + val mutable counter = 0 + + method private next = + let result = counter in + counter <- counter + 1; + this#update_acc (fun acc -> { acc with max_distinct = max counter acc.max_distinct }); + result + + method with_bindings : 'a. ?lexical:bool -> L.t -> L.t Bindings.t -> ('a -> 'a) -> 'a -> 'a = + fun ?(lexical = false) loc bindings visit node -> + let save_counter = counter in + let save_uses = uses in + let old_env = env in + let parent = current_scope_opt in + let child = this#new_scope in + uses <- []; + current_scope_opt <- Some child; + env <- Env.mk_env (fun () -> this#next) old_env bindings; + let result = Core_result.try_with (fun () -> visit node) in + this#update_acc (fun acc -> + let defs = Env.defs env in + let locals = + SMap.fold + (fun _ def locals -> + Nel.fold_left (fun locals loc -> L.LMap.add loc def locals) locals def.Def.locs) + defs + L.LMap.empty + in + let (locals, globals) = + List.fold_left + (fun (locals, globals) (loc, { Ast.Identifier.name = x; comments = _ }) -> + match Env.get x env with + | Some def -> (L.LMap.add loc def locals, globals) + | None -> (locals, SSet.add x globals)) + (locals, SSet.empty) + uses + in + let scopes = + IMap.add child { Scope.lexical; parent; defs; locals; globals; loc } acc.scopes + in + { acc with scopes }); + uses <- save_uses; + current_scope_opt <- parent; + env <- old_env; + counter <- save_counter; + Core_result.ok_exn result + + method! identifier (expr : (L.t, L.t) Ast.Identifier.t) = + uses <- expr :: uses; + expr + + method! jsx_identifier (id : L.t Ast.JSX.Identifier.t) = + Ast.JSX.Identifier.( + let (loc, { name }) = id in + uses <- Flow_ast_utils.ident_of_source (loc, name) :: uses; + id) + + (* don't rename the `foo` in `x.foo` *) + method! member_property_identifier (id : (L.t, L.t) Ast.Identifier.t) = id + + (* don't rename the `foo` in `const {foo: bar} = x` *) + method! pattern_object_property_identifier_key ?kind id = + ignore kind; + id + + (* don't rename the `foo` in `{ foo: ... }` *) + method! object_key_identifier (id : (L.t, L.t) Ast.Identifier.t) = id + + method! block loc (stmt : (L.t, L.t) Ast.Statement.Block.t) = + let lexical_hoist = new lexical_hoister in + let lexical_bindings = lexical_hoist#eval (lexical_hoist#block loc) stmt in + this#with_bindings ~lexical:true loc lexical_bindings (super#block loc) stmt - (* hoisting *) - let lexical_bindings = match param with - | Some p -> + (* like block *) + method! program (program : (L.t, L.t) Ast.program) = + let (loc, _, _) = program in let lexical_hoist = new lexical_hoister in - lexical_hoist#eval lexical_hoist#catch_clause_pattern p - | None -> Bindings.empty - in - this#with_bindings ~lexical:true loc lexical_bindings (super#catch_clause loc) clause - - (* helper for function params and body *) - method private lambda loc params body = - let open Ast.Function in - - (* hoisting *) - let hoist = new hoister in - begin - let (_loc, { Params.params = param_list; rest = _rest }) = params in - run_list hoist#function_param_pattern param_list; - match body with - | BodyBlock (block_loc, block) -> - run (hoist#block block_loc) block - | _ -> + let lexical_bindings = lexical_hoist#eval lexical_hoist#program program in + this#with_bindings ~lexical:true loc lexical_bindings super#program program + + method private scoped_for_in_statement loc (stmt : (L.t, L.t) Ast.Statement.ForIn.t) = + super#for_in_statement loc stmt + + method! for_in_statement loc (stmt : (L.t, L.t) Ast.Statement.ForIn.t) = + Ast.Statement.ForIn.( + let { left; right = _; body = _; each = _ } = stmt in + let lexical_hoist = new lexical_hoister in + let lexical_bindings = + match left with + | LeftDeclaration (loc, decl) -> + lexical_hoist#eval (lexical_hoist#variable_declaration loc) decl + | LeftPattern _ -> Bindings.empty + in + this#with_bindings + ~lexical:true + loc + lexical_bindings + (this#scoped_for_in_statement loc) + stmt) + + method private scoped_for_of_statement loc (stmt : (L.t, L.t) Ast.Statement.ForOf.t) = + super#for_of_statement loc stmt + + method! for_of_statement loc (stmt : (L.t, L.t) Ast.Statement.ForOf.t) = + Ast.Statement.ForOf.( + let { left; right = _; body = _; async = _ } = stmt in + let lexical_hoist = new lexical_hoister in + let lexical_bindings = + match left with + | LeftDeclaration (loc, decl) -> + lexical_hoist#eval (lexical_hoist#variable_declaration loc) decl + | LeftPattern _ -> Bindings.empty + in + this#with_bindings + ~lexical:true + loc + lexical_bindings + (this#scoped_for_of_statement loc) + stmt) + + method private scoped_for_statement loc (stmt : (L.t, L.t) Ast.Statement.For.t) = + super#for_statement loc stmt + + method! for_statement loc (stmt : (L.t, L.t) Ast.Statement.For.t) = + Ast.Statement.For.( + let { init; test = _; update = _; body = _ } = stmt in + let lexical_hoist = new lexical_hoister in + let lexical_bindings = + match init with + | Some (InitDeclaration (loc, decl)) -> + lexical_hoist#eval (lexical_hoist#variable_declaration loc) decl + | _ -> Bindings.empty + in + this#with_bindings + ~lexical:true + loc + lexical_bindings + (this#scoped_for_statement loc) + stmt) + + method! catch_clause loc (clause : (L.t, L.t) Ast.Statement.Try.CatchClause.t') = + Ast.Statement.Try.CatchClause.( + let { param; body = _ } = clause in + (* hoisting *) + let lexical_bindings = + match param with + | Some p -> + let lexical_hoist = new lexical_hoister in + lexical_hoist#eval lexical_hoist#catch_clause_pattern p + | None -> Bindings.empty + in + this#with_bindings ~lexical:true loc lexical_bindings (super#catch_clause loc) clause) + + (* helper for function params and body *) + method private lambda loc params body = + (* function params and bindings within the function body share the same scope *) + let bindings = + let hoist = new hoister in + run hoist#function_params params; + run hoist#function_body_any body; + hoist#acc + in + this#with_bindings + loc + bindings + (fun () -> + run this#function_params params; + run this#function_body_any body) () - end; - - this#with_bindings loc hoist#acc (fun () -> - let (_loc, { Params.params = param_list; rest }) = params in - run_list this#function_param_pattern param_list; - run_opt this#function_rest_element rest; - begin match body with - | BodyBlock (block_loc, block) -> - run (this#block block_loc) block - | BodyExpression expr -> - run this#expression expr - end; - ) () - - method! function_declaration loc (expr: (Loc.t, Loc.t) Ast.Function.t) = - let contains_with_or_eval = - let visit = new with_or_eval_visitor in - visit#eval (visit#function_declaration loc) expr - in - - if not contains_with_or_eval then begin - let open Ast.Function in - let { - id; params; body; async = _; generator = _; expression = _; - predicate = _; return = _; tparams = _; - } = expr in - - run_opt this#function_identifier id; - this#lambda loc params body; - end; - - expr - - (* Almost the same as function_declaration, except that the name of the - function expression is locally in scope. *) - method! function_ loc (expr: (Loc.t, Loc.t) Ast.Function.t) = - let contains_with_or_eval = - let visit = new with_or_eval_visitor in - visit#eval (visit#function_ loc) expr + method! function_declaration loc (expr : (L.t, L.t) Ast.Function.t) = + let contains_with_or_eval = + let visit = new with_or_eval_visitor in + visit#eval (visit#function_declaration loc) expr + in + if not contains_with_or_eval then ( + Ast.Function.( + let { + id; + params; + body; + async = _; + generator = _; + predicate = _; + return = _; + tparams = _; + sig_loc = _; + } = + expr + in + run_opt this#function_identifier id; + + this#lambda loc params body) + ); + + expr + + (* Almost the same as function_declaration, except that the name of the + function expression is locally in scope. *) + method! function_ loc (expr : (L.t, L.t) Ast.Function.t) = + let contains_with_or_eval = + let visit = new with_or_eval_visitor in + visit#eval (visit#function_ loc) expr + in + ( if not contains_with_or_eval then + Ast.Function.( + let { + id; + params; + body; + async = _; + generator = _; + predicate = _; + return = _; + tparams = _; + sig_loc = _; + } = + expr + in + let bindings = + match id with + | Some name -> Bindings.singleton name + | None -> Bindings.empty + in + this#with_bindings + loc + ~lexical:true + bindings + (fun () -> + run_opt this#function_identifier id; + this#lambda loc params body) + ()) ); + + expr + end + + let program ?(ignore_toplevel = false) program = + let (loc, _, _) = program in + let walk = new scope_builder in + let bindings = + if ignore_toplevel then + Bindings.empty + else + let hoist = new hoister in + hoist#eval hoist#program program in - - if not contains_with_or_eval then begin - let open Ast.Function in - let { - id; params; body; async = _; generator = _; expression = _; - predicate = _; return = _; tparams = _; - } = expr in - - let bindings = match id with - | Some name -> Bindings.singleton name - | None -> Bindings.empty in - this#with_bindings loc ~lexical:true bindings (fun () -> - run_opt this#function_identifier id; - this#lambda loc params body; - ) (); - end; - - expr + walk#eval (walk#with_bindings loc bindings walk#program) program end -let program ?(ignore_toplevel=false) program = - let loc, _, _ = program in - let walk = new scope_builder in - let bindings = - if ignore_toplevel then Bindings.empty - else - let hoist = new hoister in - hoist#eval hoist#program program - in - walk#eval (walk#with_bindings loc bindings walk#program) program +module With_Loc = Make (Loc_sig.LocS) (Scope_api.With_Loc) +module With_ALoc = Make (Loc_sig.ALocS) (Scope_api.With_ALoc) +include With_Loc diff --git a/src/parser_utils/scope_builder_sig.ml b/src/parser_utils/scope_builder_sig.ml new file mode 100644 index 00000000000..43a443e6be4 --- /dev/null +++ b/src/parser_utils/scope_builder_sig.ml @@ -0,0 +1,38 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module type S = sig + module L : Loc_sig.S + + module Api : Scope_api_sig.S with module L = L + + module Acc : sig + type t = Api.info + end + + val program : ?ignore_toplevel:bool -> (L.t, L.t) Flow_ast.program -> Acc.t + + class scope_builder : + object + inherit [Acc.t, L.t] Flow_ast_visitor.visitor + + method with_bindings : + 'a. ?lexical:bool -> L.t -> L.t Hoister.Bindings.t -> ('a -> 'a) -> 'a -> 'a + + method private scoped_for_statement : + L.t -> (L.t, L.t) Flow_ast.Statement.For.t -> (L.t, L.t) Flow_ast.Statement.For.t + + method private scoped_for_in_statement : + L.t -> (L.t, L.t) Flow_ast.Statement.ForIn.t -> (L.t, L.t) Flow_ast.Statement.ForIn.t + + method private scoped_for_of_statement : + L.t -> (L.t, L.t) Flow_ast.Statement.ForOf.t -> (L.t, L.t) Flow_ast.Statement.ForOf.t + + method private lambda : + L.t -> (L.t, L.t) Flow_ast.Function.Params.t -> (L.t, L.t) Flow_ast.Function.body -> unit + end +end diff --git a/src/parser_utils/signature_builder.ml b/src/parser_utils/signature_builder.ml index 7a5ddd574c4..f0ab442c58a 100644 --- a/src/parser_utils/signature_builder.ml +++ b/src/parser_utils/signature_builder.ml @@ -1,423 +1,592 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Flow_ast_visitor - module Entry = Signature_builder_entry module Env = Signature_builder_env module V = Signature_builder_verify.Verifier -module Verify = V(struct let prevent_munge = false end) -module VerifyPreventMunge = V(struct let prevent_munge = true end) +module G = Signature_builder_generate.Generator +module Signature_builder_deps = Signature_builder_deps.With_Loc +module File_sig = File_sig.With_Loc module Signature = struct type t = Env.t * File_sig.exports_info File_sig.t' - let add_env env entry = - Env.add entry env - - let add_env_opt env = function - | None, _ -> env - | Some id, kind -> add_env env (id, kind) - - let add_env_list env entries = - Env.push entries env - - let add_variable_declaration env variable_declaration = - add_env_list env (Entry.variable_declaration variable_declaration) - - let add_function_declaration env function_declaration = - add_env_opt env (Entry.function_declaration function_declaration) - - let add_class env class_ = - add_env_opt env (Entry.class_ class_) - - let add_declare_variable env declare_variable = - add_env env (Entry.declare_variable declare_variable) - - let add_declare_function env declare_function = - add_env env (Entry.declare_function declare_function) - - let add_declare_class env declare_class = - add_env env (Entry.declare_class declare_class) - - let add_type_alias env type_alias = - add_env env (Entry.type_alias type_alias) - - let add_opaque_type env opaque_type = - add_env env (Entry.opaque_type opaque_type) - - let add_interface env interface = - add_env env (Entry.interface interface) - - let add_declare_export_declaration env = Ast.Statement.DeclareExportDeclaration.(function - | Variable (_, declare_variable) -> add_declare_variable env declare_variable - | Function (_, declare_function) -> add_declare_function env declare_function - | Class (_, declare_class) -> add_declare_class env declare_class - | NamedType (_, type_alias) -> add_type_alias env type_alias - | NamedOpaqueType (_, opaque_type) -> add_opaque_type env opaque_type - | Interface (_, interface) -> add_interface env interface - | DefaultType _ -> assert false - ) - - let add_export_default_declaration env = Ast.Statement.ExportDefaultDeclaration.(function - | Declaration (_, Ast.Statement.FunctionDeclaration - ({ Ast.Function.id = Some _; _ } as function_declaration) - ) -> - add_function_declaration env function_declaration - | Declaration (_, Ast.Statement.ClassDeclaration ({ Ast.Class.id = Some _; _ } as class_)) -> - add_class env class_ - | Declaration _ -> assert false - | Expression (_, Ast.Expression.Function ({ Ast.Function.id = Some _; _ } as function_)) -> - add_function_declaration env function_ - | Expression _ -> assert false - ) - - let add_stmt env = Ast.Statement.(function - | _, VariableDeclaration variable_declaration -> add_variable_declaration env variable_declaration - | _, DeclareVariable declare_variable -> add_declare_variable env declare_variable - | _, FunctionDeclaration function_declaration -> add_function_declaration env function_declaration - | _, DeclareFunction declare_function -> add_declare_function env declare_function - | _, ClassDeclaration class_ -> add_class env class_ - | _, DeclareClass declare_class -> add_declare_class env declare_class - | _, TypeAlias type_alias -> add_type_alias env type_alias - | _, DeclareTypeAlias type_alias -> add_type_alias env type_alias - | _, OpaqueType opaque_type -> add_opaque_type env opaque_type - | _, DeclareOpaqueType opaque_type -> add_opaque_type env opaque_type - | _, InterfaceDeclaration interface -> add_interface env interface - | _, DeclareInterface interface -> add_interface env interface - - | _, Block _ - | _, DoWhile _ - | _, For _ - | _, ForIn _ - | _, ForOf _ - | _, If _ - | _, Labeled _ - | _, Switch _ - | _, Try _ - | _, While _ - | _, DeclareExportDeclaration _ - | _, ExportDefaultDeclaration _ - | _, ExportNamedDeclaration _ - | _, ImportDeclaration _ - | _, DeclareModule _ - | _, DeclareModuleExports _ - | _, Empty - | _, Expression _ - | _, Break _ - | _, Continue _ - | _, Throw _ - | _, Return _ - | _, Debugger - | _, With _ - -> assert false - ) + let add_env env entry = Env.add entry env + + let add_env_list env entries = Env.push entries env + + let add_variable_declaration env loc variable_declaration = + add_env_list env (Entry.variable_declaration loc variable_declaration) + + let add_function_declaration env loc function_declaration = + add_env env (Entry.function_declaration loc function_declaration) + + let add_function_expression env loc function_expression = + add_env env (Entry.function_expression loc function_expression) + + let add_class env loc class_ = add_env env (Entry.class_ loc class_) + + let add_declare_variable env loc declare_variable = + add_env env (Entry.declare_variable loc declare_variable) + + let add_declare_function env loc declare_function = + add_env env (Entry.declare_function loc declare_function) + + let add_declare_class env loc declare_class = add_env env (Entry.declare_class loc declare_class) + + let add_type_alias env loc type_alias = add_env env (Entry.type_alias loc type_alias) + + let add_opaque_type env loc opaque_type = add_env env (Entry.opaque_type loc opaque_type) + + let add_interface env loc interface = add_env env (Entry.interface loc interface) + + let add_declare_export_declaration env = + Ast.Statement.DeclareExportDeclaration.( + function + | Variable (loc, declare_variable) -> add_declare_variable env loc declare_variable + | Function (loc, declare_function) -> add_declare_function env loc declare_function + | Class (loc, declare_class) -> add_declare_class env loc declare_class + | NamedType (loc, type_alias) -> add_type_alias env loc type_alias + | NamedOpaqueType (loc, opaque_type) -> add_opaque_type env loc opaque_type + | Interface (loc, interface) -> add_interface env loc interface + | DefaultType _ -> assert false) + + let add_export_default_declaration env = + Ast.Statement.ExportDefaultDeclaration.( + function + | Declaration + ( loc, + Ast.Statement.FunctionDeclaration + ({ Ast.Function.id = Some _; _ } as function_declaration) ) -> + add_function_declaration env loc function_declaration + | Declaration (loc, Ast.Statement.ClassDeclaration ({ Ast.Class.id = Some _; _ } as class_)) + -> + add_class env loc class_ + | Declaration _ -> assert false + | Expression (loc, Ast.Expression.Function ({ Ast.Function.id = Some _; _ } as function_)) -> + add_function_expression env loc function_ + | Expression _ -> assert false + (* TODO: class? *)) + + let add_stmt env = + Ast.Statement.( + function + | (loc, VariableDeclaration variable_declaration) -> + add_variable_declaration env loc variable_declaration + | (loc, DeclareVariable declare_variable) -> add_declare_variable env loc declare_variable + | (loc, FunctionDeclaration function_declaration) -> + add_function_declaration env loc function_declaration + | (loc, DeclareFunction declare_function) -> add_declare_function env loc declare_function + | (loc, ClassDeclaration class_) -> add_class env loc class_ + | (loc, DeclareClass declare_class) -> add_declare_class env loc declare_class + | (loc, TypeAlias type_alias) -> add_type_alias env loc type_alias + | (loc, DeclareTypeAlias type_alias) -> add_type_alias env loc type_alias + | (loc, OpaqueType opaque_type) -> add_opaque_type env loc opaque_type + | (loc, DeclareOpaqueType opaque_type) -> add_opaque_type env loc opaque_type + | (loc, InterfaceDeclaration interface) -> add_interface env loc interface + | (loc, DeclareInterface interface) -> add_interface env loc interface + | (_, Block _) + | (_, DoWhile _) + | (_, For _) + | (_, ForIn _) + | (_, ForOf _) + | (_, If _) + | (_, Labeled _) + | (_, Switch _) + | (_, Try _) + | (_, While _) + | (_, DeclareExportDeclaration _) + | (_, ExportDefaultDeclaration _) + | (_, ExportNamedDeclaration _) + | (_, ImportDeclaration _) + | (_, DeclareModule _) + | (_, DeclareModuleExports _) + | (_, Empty) + | (_, EnumDeclaration _) + (* TODO(T44736715) Support enums in signature builder/verifier/etc. *) + + | (_, Expression _) + | (_, Break _) + | (_, Continue _) + | (_, Throw _) + | (_, Return _) + | (_, Debugger) + | (_, With _) -> + assert false) let add_export_value_bindings named named_infos env = - let open File_sig in - SMap.fold (fun n export_def env -> - let export = SMap.find n named in - match export, export_def with - | ExportDefault { local; _ }, DeclareExportDef declare_export_declaration -> - begin match local with - | Some _id -> add_declare_export_declaration env declare_export_declaration - | None -> env - end - | ExportNamed { kind; _ }, DeclareExportDef declare_export_declaration -> - begin match kind with - | NamedDeclaration -> add_declare_export_declaration env declare_export_declaration - | NamedSpecifier _ -> assert false - end - | ExportDefault { local; _ }, ExportDefaultDef export_default_declaration -> - begin match local with - | Some _id -> add_export_default_declaration env export_default_declaration - | None -> env - end - | ExportNamed { kind; _ }, ExportNamedDef stmt -> - begin match kind with - | NamedDeclaration -> add_stmt env stmt - | NamedSpecifier _ -> assert false - end - | _ -> assert false - ) named_infos env + File_sig.( + let named = + List.filter + (function + | (_, (_, ExportNamed { kind = NamedSpecifier _; _ })) + | (_, (_, ExportNs _)) -> + false + | (_, (_, _)) -> true) + named + in + List.fold_left2 + (fun env (_n, (_, export)) export_def -> + match (export, export_def) with + | (ExportDefault { local; _ }, DeclareExportDef declare_export_declaration) -> + begin + match local with + | Some _id -> add_declare_export_declaration env declare_export_declaration + | None -> env + end + | (ExportNamed { kind; _ }, DeclareExportDef declare_export_declaration) -> + begin + match kind with + | NamedDeclaration -> add_declare_export_declaration env declare_export_declaration + | NamedSpecifier _ -> assert false + end + | (ExportDefault { local; _ }, ExportDefaultDef export_default_declaration) -> + begin + match local with + | Some _id -> add_export_default_declaration env export_default_declaration + | None -> env + end + | (ExportNamed { kind; _ }, ExportNamedDef stmt) -> + begin + match kind with + | NamedDeclaration -> add_stmt env stmt + | NamedSpecifier _ -> assert false + end + | _ -> assert false) + env + named + named_infos) let add_export_type_bindings type_named type_named_infos env = - let open File_sig in - SMap.fold (fun n export_def env -> - let export = SMap.find n type_named in - match export, export_def with - | TypeExportNamed { kind; _ }, DeclareExportDef declare_export_declaration -> - begin match kind with - | NamedDeclaration -> add_declare_export_declaration env declare_export_declaration - | NamedSpecifier _ -> assert false - end - | TypeExportNamed { kind; _ }, ExportNamedDef stmt -> - begin match kind with - | NamedDeclaration -> add_stmt env stmt - | NamedSpecifier _ -> assert false - end - | _ -> assert false - ) type_named_infos env - - let add_named_imports ?(filter=(fun _ -> true)) source kind named_imports env = - SMap.fold (fun remote ids env -> - SMap.fold (fun local locs env -> - Nel.fold_left (fun env { File_sig.remote_loc; local_loc } -> - let id = local_loc, local in - let name = remote_loc, remote in - if filter id then add_env env (Entry.import_named id name kind source) else env - ) env locs - ) ids env - ) named_imports env - - let add_require_bindings toplevel_names source require_bindings env = + File_sig.( + let type_named = + List.filter + (function + | (_, (_, TypeExportNamed { kind = NamedSpecifier _; _ })) -> false + | (_, (_, _)) -> true) + type_named + in + List.fold_left2 + (fun env (_n, (_, export)) export_def -> + match (export, export_def) with + | (TypeExportNamed { kind; _ }, DeclareExportDef declare_export_declaration) -> + begin + match kind with + | NamedDeclaration -> add_declare_export_declaration env declare_export_declaration + | NamedSpecifier _ -> assert false + end + | (TypeExportNamed { kind; _ }, ExportNamedDef stmt) -> + begin + match kind with + | NamedDeclaration -> add_stmt env stmt + | NamedSpecifier _ -> assert false + end + | _ -> assert false) + env + type_named + type_named_infos) + + let add_named_imports import_loc source kind named_imports env = + SMap.fold + (fun remote ids env -> + SMap.fold + (fun local locs env -> + Nel.fold_left + (fun env { File_sig.remote_loc; local_loc } -> + let id = Flow_ast_utils.ident_of_source (local_loc, local) in + let name = (remote_loc, remote) in + add_env env (Entry.import_named import_loc id name kind source)) + env + locs) + ids + env) + named_imports + env + + let rec add_require_bindings toplevel_names require_loc source ?name require_bindings env = let filter (_, x) = SSet.mem x toplevel_names in - let open File_sig in - match require_bindings with - | BindIdent id -> if filter id then add_env env (Entry.require id source) else env - | BindNamed named_imports -> - let kind = Ast.Statement.ImportDeclaration.ImportValue in - add_named_imports ~filter source kind named_imports env - - let add_ns_imports source kind ns_imports env = + File_sig.( + match require_bindings with + | BindIdent id -> + if filter id then + add_env env (Entry.require require_loc (Flow_ast_utils.ident_of_source id) ?name source) + else + env + | BindNamed named_requires -> + List.fold_left + (fun env (remote, require_bindings) -> + let name = + match name with + | None -> Nel.one remote + | Some name -> Nel.cons remote name + in + add_require_bindings toplevel_names require_loc source ~name require_bindings env) + env + named_requires) + + let add_ns_imports import_loc source kind ns_imports env = match ns_imports with - | None -> env - | Some id -> add_env env (Entry.import_star id kind source) + | None -> env + | Some id -> add_env env (Entry.import_star import_loc id kind source) let mk env toplevel_names file_sig = - let open File_sig in - let module_sig = file_sig.module_sig in - let { - requires = imports_info; - info = exports_info; - module_kind; - type_exports_named; - _ - } = module_sig in - let env = - let { module_kind_info; type_exports_named_info } = exports_info in - let env = match module_kind, module_kind_info with - | CommonJS _, CommonJSInfo _ -> env - | ES { named; _ }, ESInfo named_infos -> - add_export_value_bindings named named_infos env - | _ -> assert false + File_sig.( + let module_sig = file_sig.module_sig in + let { requires = imports_info; info = exports_info; module_kind; type_exports_named; _ } = + module_sig + in + let env = + let { module_kind_info; type_exports_named_info } = exports_info in + let env = + match (module_kind, module_kind_info) with + | (CommonJS _, CommonJSInfo _) -> env + | (ES { named; _ }, ESInfo named_infos) -> + add_export_value_bindings named named_infos env + | _ -> assert false + in + add_export_type_bindings type_exports_named type_exports_named_info env in - add_export_type_bindings type_exports_named type_exports_named_info env + let env = + List.fold_left + (fun env -> function + | Require { source; bindings = Some require_bindings; require_loc } -> + add_require_bindings toplevel_names require_loc source require_bindings env + | Import { import_loc; source; named; ns; types; typesof; typesof_ns } -> + Ast.Statement.ImportDeclaration.( + let env = add_named_imports import_loc source ImportValue named env in + let env = + add_ns_imports + import_loc + source + ImportValue + (Option.map ~f:Flow_ast_utils.ident_of_source ns) + env + in + let env = add_named_imports import_loc source ImportType types env in + let env = add_named_imports import_loc source ImportTypeof typesof env in + add_ns_imports + import_loc + source + ImportTypeof + (Option.map ~f:Flow_ast_utils.ident_of_source typesof_ns) + env) + | _ -> env) + env + imports_info + in + (env, file_sig)) + + let verify + ?(prevent_munge = false) + ?(facebook_fbt = None) + ?(ignore_static_propTypes = false) + ?(facebook_keyMirror = false) + (env, file_sig) = + let module Verify = V (struct + let prevent_munge = prevent_munge + + let facebook_fbt = facebook_fbt + + let ignore_static_propTypes = ignore_static_propTypes + + let facebook_keyMirror = facebook_keyMirror + end) in + Verify.check env file_sig @@ Verify.exports file_sig + + let generate + ?(prevent_munge = false) + ?(facebook_fbt = None) + ?(ignore_static_propTypes = false) + ?(facebook_keyMirror = false) + (env, file_sig) + program = + let module Generate = G (struct + let prevent_munge = prevent_munge + + let facebook_fbt = facebook_fbt + + let ignore_static_propTypes = ignore_static_propTypes + + let facebook_keyMirror = facebook_keyMirror + end) in + Generate.make env file_sig program + + let verify_and_generate + ?(prevent_munge = false) + ?(facebook_fbt = None) + ?(ignore_static_propTypes = false) + ?(facebook_keyMirror = false) + (env, file_sig) + program = + let (errors, _, pruned_env) = + verify + ~prevent_munge + ~facebook_fbt + ~ignore_static_propTypes + ~facebook_keyMirror + (env, file_sig) in - let env = List.fold_left (fun env -> function - | Require { source; bindings = Some require_bindings; _ } -> - add_require_bindings toplevel_names source require_bindings env - | Import { source; named; ns; types; typesof; typesof_ns } -> - let env = add_named_imports source Ast.Statement.ImportDeclaration.ImportValue named env in - let env = add_ns_imports source Ast.Statement.ImportDeclaration.ImportValue ns env in - let env = add_named_imports source Ast.Statement.ImportDeclaration.ImportType types env in - let env = add_named_imports source Ast.Statement.ImportDeclaration.ImportTypeof typesof env in - add_ns_imports source Ast.Statement.ImportDeclaration.ImportTypeof typesof_ns env - | _ -> env - ) env imports_info in - env, file_sig - - let verify ?(prevent_munge=false) (env, file_sig) = - match prevent_munge with - | false -> Verify.check env file_sig @@ Verify.exports file_sig - | true -> VerifyPreventMunge.check env file_sig @@ VerifyPreventMunge.exports file_sig + ( errors, + if Signature_builder_deps.PrintableErrorSet.is_empty errors then + generate + ~prevent_munge + ~facebook_fbt + ~ignore_static_propTypes + ~facebook_keyMirror + (pruned_env, file_sig) + program + else + generate + ~prevent_munge + ~facebook_fbt + ~ignore_static_propTypes + ~facebook_keyMirror + (env, file_sig) + program ) end -class type_hoister = object(this) - inherit [Env.t] visitor ~init:Env.empty as super +class type_hoister = + object (this) + inherit [Env.t, Loc.t] visitor ~init:Env.empty as super - (* tracks the current block scope level; for now, this can only take on values 0 and 1 *) - val mutable level = 0 - method private next f = - level <- level + 1; - Lazy.force f; - level <- level - 1 + (* tracks the current block scope level; for now, this can only take on values 0 and 1 *) + val mutable level = 0 - method private is_toplevel = - level = 0 + method private next f = + level <- level + 1; + Lazy.force f; + level <- level - 1 - method private add_binding entry = - let entry = - if this#is_toplevel then entry - else - let id, _ = entry in - Entry.sketchy_toplevel id - in - this#update_acc (Env.add entry) + method private is_toplevel = level = 0 - method private add_binding_opt = function - | None, _ -> () - | Some id, kind -> this#add_binding (id, kind) - - method private add_binding_list = - List.iter (fun entry -> this#add_binding entry) - - (* Process local declarations. Ignore import declarations and export declarations since they are + method private add_binding entry = + let entry = + if this#is_toplevel then + entry + else + let (id, (loc, _)) = entry in + Entry.sketchy_toplevel loc id + in + this#update_acc (Env.add entry) + + method private update_binding (x, id, expr) = + this#update_acc (fun env -> + match SMap.get x env with + | None -> env + | Some u -> + SMap.add + x + (Loc_collections.LocMap.map + (function + | (loc, Signature_builder_kind.WithPropertiesDef def) -> + ( loc, + Signature_builder_kind.WithPropertiesDef + { def with properties = (id, expr) :: def.properties } ) + | (loc, base) -> + ( loc, + Signature_builder_kind.WithPropertiesDef { base; properties = [(id, expr)] } + )) + u) + env) + + method private add_binding_opt = + function + | (None, _) -> () + | (Some id, kind) -> this#add_binding (id, kind) + + method private add_binding_list = List.iter (fun entry -> this#add_binding entry) + + (* Process local declarations. Ignore import declarations and export declarations since they are handled in File_sig, although it is likely there is still some overlap, in which case we arrange things so that whatever File_sig does wins. *) - method! toplevel_statement_list (stmts: (Loc.t, Loc.t) Ast.Statement.t list) = - stmts |> ListUtils.ident_map (fun stmt -> - let open Ast.Statement in - match stmt with - (* process bindings *) - | _, VariableDeclaration _ - | _, DeclareVariable _ - | _, FunctionDeclaration _ - | _, DeclareFunction _ - | _, ClassDeclaration _ - | _, DeclareClass _ - | _, TypeAlias _ - | _, DeclareTypeAlias _ - | _, OpaqueType _ - | _, DeclareOpaqueType _ - | _, InterfaceDeclaration _ - | _, DeclareInterface _ - -> super#statement stmt - - (* recurse through control-flow *) - | _, Block _ - | _, DoWhile _ - | _, For _ - | _, ForIn _ - | _, ForOf _ - | _, If _ - | _, Labeled _ - | _, Switch _ - | _, Try _ - | _, While _ - -> - this#next (lazy (ignore @@ super#statement stmt)); - stmt - - (* shortcut *) - | _, DeclareExportDeclaration _ - | _, ExportDefaultDeclaration _ - | _, ExportNamedDeclaration _ - | _, ImportDeclaration _ - | _, DeclareModule _ - | _, DeclareModuleExports _ - | _, Empty - | _, Expression _ - | _, Break _ - | _, Continue _ - | _, Throw _ - | _, Return _ - | _, Debugger - | _, With _ - -> stmt - ) - - method! statement (stmt: (Loc.t, Loc.t) Ast.Statement.t) = - let open Ast.Statement in - match stmt with - (* ignore block-scoped bindings and type bindings *) - | _, ClassDeclaration _ - | _, DeclareClass _ - | _, TypeAlias _ - | _, DeclareTypeAlias _ - | _, OpaqueType _ - | _, DeclareOpaqueType _ - | _, InterfaceDeclaration _ - | _, DeclareInterface _ - -> stmt - - (* process function-scoped bindings *) - | _, VariableDeclaration decl -> - let open Ast.Statement.VariableDeclaration in - let { kind; _ } = decl in - begin match kind with - | Ast.Statement.VariableDeclaration.Var -> super#statement stmt - | Ast.Statement.VariableDeclaration.Let | Ast.Statement.VariableDeclaration.Const -> stmt - end - | _, DeclareVariable _ - | _, FunctionDeclaration _ - | _, DeclareFunction _ - -> super#statement stmt - - (* recurse through control flow *) - | _, Block _ - | _, DoWhile _ - | _, For _ - | _, ForIn _ - | _, ForOf _ - | _, If _ - | _, Labeled _ - | _, Switch _ - | _, Try _ - | _, While _ - -> super#statement stmt - - (* shortcut *) - | _, DeclareExportDeclaration _ - | _, ExportDefaultDeclaration _ - | _, ExportNamedDeclaration _ - | _, ImportDeclaration _ - | _, DeclareModule _ - | _, DeclareModuleExports _ - | _, Empty - | _, Expression _ - | _, Break _ - | _, Continue _ - | _, Throw _ - | _, Return _ - | _, Debugger - | _, With _ - -> stmt - - method! variable_declaration _loc (decl: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) = - this#add_binding_list (Entry.variable_declaration decl); - decl - - method! declare_variable _loc (decl: (Loc.t, Loc.t) Ast.Statement.DeclareVariable.t) = - this#add_binding (Entry.declare_variable decl); - decl - - method! function_declaration _loc (expr: (Loc.t, Loc.t) Ast.Function.t) = - this#add_binding_opt (Entry.function_declaration expr); - expr - - method! declare_function _loc (decl: (Loc.t, Loc.t) Ast.Statement.DeclareFunction.t) = - this#add_binding (Entry.declare_function decl); - decl - - method! class_ _loc (cls: (Loc.t, Loc.t) Ast.Class.t) = - this#add_binding_opt (Entry.class_ cls); - cls - - method! declare_class _loc (decl: (Loc.t, Loc.t) Ast.Statement.DeclareClass.t) = - this#add_binding (Entry.declare_class decl); - decl - - method! type_alias _loc (stuff: (Loc.t, Loc.t) Ast.Statement.TypeAlias.t) = - this#add_binding (Entry.type_alias stuff); - stuff - - method! opaque_type _loc (otype: (Loc.t, Loc.t) Ast.Statement.OpaqueType.t) = - this#add_binding (Entry.opaque_type otype); - otype - - method! interface (interface: (Loc.t, Loc.t) Ast.Statement.Interface.t) = - this#add_binding (Entry.interface interface); - interface - - (* Ignore expressions *) - method! expression (expr: (Loc.t, Loc.t) Ast.Expression.t) = - expr - -end - -let program program = + method! toplevel_statement_list (stmts : (Loc.t, Loc.t) Ast.Statement.t list) = + stmts + |> ListUtils.ident_map (fun stmt -> + Ast.Statement.( + match stmt with + (* process bindings *) + | (_, VariableDeclaration _) + | (_, DeclareVariable _) + | (_, FunctionDeclaration _) + | (_, DeclareFunction _) + | (_, ClassDeclaration _) + | (_, DeclareClass _) + | (_, EnumDeclaration _) + | (_, TypeAlias _) + | (_, DeclareTypeAlias _) + | (_, OpaqueType _) + | (_, DeclareOpaqueType _) + | (_, InterfaceDeclaration _) + | (_, DeclareInterface _) -> + super#statement stmt + (* recurse through control-flow *) + | (_, Block _) + | (_, DoWhile _) + | (_, For _) + | (_, ForIn _) + | (_, ForOf _) + | (_, If _) + | (_, Labeled _) + | (_, Switch _) + | (_, Try _) + | (_, While _) -> + this#next (lazy (ignore @@ super#statement stmt)); + stmt + | ( _, + Expression + { + Expression.expression = + ( _, + Ast.Expression.Assignment + { + Ast.Expression.Assignment.operator = None; + left = + ( _, + Ast.Pattern.Expression + ( _, + Ast.Expression.Member + { + Ast.Expression.Member._object = + ( _, + Ast.Expression.Identifier + (_, { Ast.Identifier.name = x; _ }) ); + property = Ast.Expression.Member.PropertyIdentifier id; + } ) ); + right = expr; + } ); + _; + } ) -> + this#update_binding (x, id, expr); + stmt + (* shortcut *) + | (_, DeclareExportDeclaration _) + | (_, ExportDefaultDeclaration _) + | (_, ExportNamedDeclaration _) + | (_, ImportDeclaration _) + | (_, DeclareModule _) + | (_, DeclareModuleExports _) + | (_, Empty) + | (_, Expression _) + | (_, Break _) + | (_, Continue _) + | (_, Throw _) + | (_, Return _) + | (_, Debugger) + | (_, With _) -> + stmt)) + + method! statement (stmt : (Loc.t, Loc.t) Ast.Statement.t) = + Ast.Statement.( + match stmt with + (* ignore block-scoped bindings and type bindings *) + | (_, ClassDeclaration _) + | (_, DeclareClass _) + | (_, EnumDeclaration _) + | (_, TypeAlias _) + | (_, DeclareTypeAlias _) + | (_, OpaqueType _) + | (_, DeclareOpaqueType _) + | (_, InterfaceDeclaration _) + | (_, DeclareInterface _) -> + stmt + (* process function-scoped bindings *) + | (_, VariableDeclaration decl) -> + Ast.Statement.VariableDeclaration.( + let { kind; _ } = decl in + begin + match kind with + | Ast.Statement.VariableDeclaration.Var -> super#statement stmt + | Ast.Statement.VariableDeclaration.Let + | Ast.Statement.VariableDeclaration.Const -> + stmt + end) + | (_, DeclareVariable _) + | (_, FunctionDeclaration _) + | (_, DeclareFunction _) -> + super#statement stmt + (* recurse through control flow *) + | (_, Block _) + | (_, DoWhile _) + | (_, For _) + | (_, ForIn _) + | (_, ForOf _) + | (_, If _) + | (_, Labeled _) + | (_, Switch _) + | (_, Try _) + | (_, While _) -> + super#statement stmt + (* shortcut *) + | (_, DeclareExportDeclaration _) + | (_, ExportDefaultDeclaration _) + | (_, ExportNamedDeclaration _) + | (_, ImportDeclaration _) + | (_, DeclareModule _) + | (_, DeclareModuleExports _) + | (_, Empty) + | (_, Expression _) + | (_, Break _) + | (_, Continue _) + | (_, Throw _) + | (_, Return _) + | (_, Debugger) + | (_, With _) -> + stmt) + + method! variable_declaration loc (decl : (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) = + this#add_binding_list (Entry.variable_declaration loc decl); + decl + + method! declare_variable loc (decl : (Loc.t, Loc.t) Ast.Statement.DeclareVariable.t) = + this#add_binding (Entry.declare_variable loc decl); + decl + + method! function_declaration loc (expr : (Loc.t, Loc.t) Ast.Function.t) = + this#add_binding (Entry.function_declaration loc expr); + expr + + method! declare_function loc (decl : (Loc.t, Loc.t) Ast.Statement.DeclareFunction.t) = + this#add_binding (Entry.declare_function loc decl); + decl + + method! class_ loc (cls : (Loc.t, Loc.t) Ast.Class.t) = + this#add_binding (Entry.class_ loc cls); + cls + + method! declare_class loc (decl : (Loc.t, Loc.t) Ast.Statement.DeclareClass.t) = + this#add_binding (Entry.declare_class loc decl); + decl + + method! type_alias loc (stuff : (Loc.t, Loc.t) Ast.Statement.TypeAlias.t) = + this#add_binding (Entry.type_alias loc stuff); + stuff + + method! opaque_type loc (otype : (Loc.t, Loc.t) Ast.Statement.OpaqueType.t) = + this#add_binding (Entry.opaque_type loc otype); + otype + + method! interface loc (interface : (Loc.t, Loc.t) Ast.Statement.Interface.t) = + this#add_binding (Entry.interface loc interface); + interface + + (* Ignore expressions *) + method! expression (expr : (Loc.t, Loc.t) Ast.Expression.t) = expr + end + +let program program ~module_ref_prefix = let env = let hoist = new type_hoister in - hoist#eval hoist#program program in + hoist#eval hoist#program program + in let { File_sig.toplevel_names; exports_info } = - File_sig.program_with_toplevel_names_and_exports_info program in + File_sig.program_with_toplevel_names_and_exports_info ~ast:program ~module_ref_prefix + in match exports_info with - | Ok exports_info -> Ok (Signature.mk env toplevel_names exports_info) - | Error e -> Error e + | Ok exports_info -> Ok (Signature.mk env toplevel_names exports_info) + | Error e -> Error e diff --git a/src/parser_utils/signature_builder_deps.ml b/src/parser_utils/signature_builder_deps.ml index ebf8c73720e..10733cf1c31 100644 --- a/src/parser_utils/signature_builder_deps.ml +++ b/src/parser_utils/signature_builder_deps.ml @@ -1,128 +1,262 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module Ast_utils = Flow_ast_utils + let spf = Printf.sprintf module Sort = Signature_builder_kind.Sort -module Error = struct - type t = - | ExpectedSort of Sort.t * string * Loc.t - | ExpectedAnnotation of Loc.t - | InvalidTypeParamUse of Loc.t - | UnexpectedObjectKey of Loc.t - | UnexpectedExpression of Loc.t * Ast_utils.ExpressionSort.t - | SketchyToplevelDef of Loc.t - | TODO of string * Loc.t - - let compare = Pervasives.compare - - let to_string = function - | ExpectedSort (sort, x, loc) -> - spf "%s @ %s is not a %s" - x (Loc.to_string loc) (Sort.to_string sort) - | ExpectedAnnotation loc -> spf "Expected annotation @ %s" (Loc.to_string loc) - | InvalidTypeParamUse loc -> spf "Invalid use of type parameter @ %s" (Loc.to_string loc) - | UnexpectedObjectKey loc -> spf "Expected simple object key @ %s" (Loc.to_string loc) - | UnexpectedExpression (loc, esort) -> - spf "Expected literal expression instead of %s @ %s" - (Ast_utils.ExpressionSort.to_string esort) (Loc.to_string loc) - | SketchyToplevelDef loc -> - spf "Unexpected toplevel definition that needs hoisting @ %s" (Loc.to_string loc) - | TODO (msg, loc) -> spf "TODO: %s @ %s" msg (Loc.to_string loc) +module Make (L : Loc_sig.S) : Signature_builder_deps_sig.S with module L = L = struct + module L = L -end -module ErrorSet = Set.Make (Error) - -module Dep = struct - type t = - | Local of local - | Dynamic of dynamic - | Remote of remote - - and local = Sort.t * string - - and dynamic = - | DynamicImport of Loc.t - | DynamicRequire of Loc.t - - and remote = - | ImportNamed of { - sort: Sort.t; - source: Ast_utils.source; - name: Ast_utils.ident; - } - | ImportStar of { - sort: Sort.t; - source: Ast_utils.source; - } - | Require of { - source: Ast_utils.source; - } - | Global of local - - let compare = Pervasives.compare - - let expectation sort x loc = Error.ExpectedSort (sort, x, loc) - - let remote = function - | Remote _ -> true - | Local _ | Dynamic _ -> false - - let to_string = - let string_of_import_sort = function - | Sort.Value -> "import" - | Sort.Type -> "import type" in - let string_of_local (sort, x) = - spf "%s: %s" (Sort.to_string sort) x in - let string_of_dynamic = function - | DynamicImport loc -> spf "import @ %s" (Loc.to_string loc) - | DynamicRequire loc -> spf "require @ %s" (Loc.to_string loc) in - let string_of_remote = function - | ImportNamed { sort; name = (_, n); source = (_, m) } -> - spf "%s { %s } from '%s'" (string_of_import_sort sort) n m - | ImportStar { sort; source = (_, m) } -> - spf "%s * from '%s'" (string_of_import_sort sort) m - | Require { source = (_, m) } -> spf "require('%s')" m - | Global local -> spf "global %s" (string_of_local local) - in function + module ExpectedAnnotationSort = struct + type t = + | ArrayPattern + | FunctionReturn + | PrivateField of L.t Flow_ast.PrivateName.t + | Property of (L.t, L.t) Flow_ast.Expression.Object.Property.key + | VariableDefinition of (L.t, L.t) Flow_ast.Identifier.t + + let property_key_to_string = + Flow_ast.Expression.Object.Property.( + function + | Literal (_, lit) -> + let lit = Reason.code_desc_of_literal lit in + spf "literal property %s" lit + | Identifier (_, { Flow_ast.Identifier.name; _ }) -> spf "property `%s`" name + | PrivateName (_, (_, { Flow_ast.Identifier.name; _ })) -> spf "property `%s`" name + | Computed e -> + let e = Reason.code_desc_of_expression ~wrap:false e in + spf "computed property `[%s]`" e) + + let to_string = function + | ArrayPattern -> "array pattern" + | FunctionReturn -> "function return" + | Property key -> property_key_to_string key + | PrivateField (_, (_, { Flow_ast.Identifier.name; _ })) -> spf "private field `#%s`" name + | VariableDefinition (_, { Flow_ast.Identifier.name; _ }) -> + spf "declaration of variable `%s`" name + end + + module Error = struct + type t = + | ExpectedSort of Sort.t * string * L.t + | ExpectedAnnotation of L.t * ExpectedAnnotationSort.t + | InvalidTypeParamUse of L.t + | UnexpectedObjectKey of L.t * L.t + | UnexpectedObjectSpread of L.t * L.t + | UnexpectedArraySpread of L.t * L.t + | UnexpectedArrayHole of L.t + | EmptyArray of L.t + | EmptyObject of L.t + | UnexpectedExpression of L.t * Ast_utils.ExpressionSort.t + | SketchyToplevelDef of L.t + | UnsupportedPredicateExpression of L.t + | TODO of string * L.t + + let compare = Pervasives.compare + + let debug_to_string = function + | ExpectedSort (sort, x, loc) -> + spf "%s @ %s is not a %s" x (L.debug_to_string loc) (Sort.to_string sort) + | ExpectedAnnotation (loc, sort) -> + spf + "Expected annotation at %s @ %s" + (ExpectedAnnotationSort.to_string sort) + (L.debug_to_string loc) + | InvalidTypeParamUse loc -> spf "Invalid use of type parameter @ %s" (L.debug_to_string loc) + | UnexpectedObjectKey (_loc, key_loc) -> + spf "Expected simple object key @ %s" (L.debug_to_string key_loc) + | UnexpectedObjectSpread (_loc, spread_loc) -> + spf "Unexpected object spread @ %s" (L.debug_to_string spread_loc) + | UnexpectedArraySpread (_loc, spread_loc) -> + spf "Unexpected array spread @ %s" (L.debug_to_string spread_loc) + | UnexpectedArrayHole loc -> spf "Unexpected array hole @ %s" (L.debug_to_string loc) + | EmptyArray loc -> + spf "Cannot determine the element type of an empty array @ %s" (L.debug_to_string loc) + | EmptyObject loc -> + spf + "Cannot determine types of initialized properties of an empty object @ %s" + (L.debug_to_string loc) + | UnexpectedExpression (loc, esort) -> + spf + "Cannot determine the type of this %s @ %s" + (Ast_utils.ExpressionSort.to_string esort) + (L.debug_to_string loc) + | SketchyToplevelDef loc -> + spf "Unexpected toplevel definition that needs hoisting @ %s" (L.debug_to_string loc) + | UnsupportedPredicateExpression loc -> + spf "Unsupported predicate expression @ %s" (L.debug_to_string loc) + | TODO (msg, loc) -> spf "TODO: %s @ %s" msg (L.debug_to_string loc) + end + + module PrintableErrorSet = Set.Make (Error) + + module Dep = struct + type t = + | Local of local + | Dynamic of dynamic + | Remote of remote + + and local = Sort.t * string + + and dynamic = + | Class of L.t * string + | DynamicImport of L.t + | DynamicRequire of L.t + + and remote = + | ImportNamed of { + sort: Sort.t; + source: L.t Ast_utils.source; + name: L.t Ast_utils.ident; + } + | ImportStar of { + sort: Sort.t; + source: L.t Ast_utils.source; + } + | Require of { + source: L.t Ast_utils.source; + name: L.t Ast_utils.ident Nel.t option; + } + | Global of local + + let compare = Pervasives.compare + + let expectation sort x loc = Error.ExpectedSort (sort, x, loc) + + let remote = function + | Remote _ -> true + | Local _ + | Dynamic _ -> + false + + let local_uses dep acc = + match dep with + | Local (_, n) -> SSet.add n acc + | Remote _ + | Dynamic _ -> + acc + + let to_string = + let string_of_import_sort = function + | Sort.Value -> "import" + | Sort.Type -> "import type" + in + let string_of_local (sort, x) = spf "%s: %s" (Sort.to_string sort) x in + let string_of_dynamic = function + | Class (loc, x) -> spf "class %s @ %s" x (L.debug_to_string loc) + | DynamicImport loc -> spf "import @ %s" (L.debug_to_string loc) + | DynamicRequire loc -> spf "require @ %s" (L.debug_to_string loc) + in + let string_of_remote = function + | ImportNamed { sort; name = (_, n); source = (_, m) } -> + spf "%s { %s } from '%s'" (string_of_import_sort sort) n m + | ImportStar { sort; source = (_, m) } -> + spf "%s * from '%s'" (string_of_import_sort sort) m + | Require { source = (_, m); name } -> + begin + match name with + | None -> spf "require('%s')" m + | Some ns -> spf "require('%s').%s" m (ListUtils.to_string "." snd @@ Nel.to_list ns) + end + | Global local -> spf "global %s" (string_of_local local) + in + function | Local local -> string_of_local local | Dynamic dynamic -> string_of_dynamic dynamic | Remote remote -> string_of_remote remote -end + end + + module DepSet = Set.Make (Dep) + + type t = DepSet.t * PrintableErrorSet.t + + let join ((deps1, msgs1), (deps2, msgs2)) = + (DepSet.union deps1 deps2, PrintableErrorSet.union msgs1 msgs2) + + let bot = (DepSet.empty, PrintableErrorSet.empty) + + let top msg = (DepSet.empty, PrintableErrorSet.singleton msg) -module DepSet = Set.Make (Dep) + let unreachable = bot -type t = DepSet.t * ErrorSet.t + let todo loc msg = top (Error.TODO (msg, loc)) -let join ((deps1, msgs1), (deps2, msgs2)) = - DepSet.union deps1 deps2, ErrorSet.union msgs1 msgs2 + let unit dep = (DepSet.singleton dep, PrintableErrorSet.empty) -let bot = DepSet.empty, ErrorSet.empty -let top msg = DepSet.empty, ErrorSet.singleton msg + let type_ atom = unit Dep.(Local (Sort.Type, atom)) -let unreachable = bot -let todo loc msg = top (Error.TODO (msg, loc)) + let value atom = unit Dep.(Local (Sort.Value, atom)) -let unit dep = DepSet.singleton dep, ErrorSet.empty + let dynamic_import loc = unit Dep.(Dynamic (DynamicImport loc)) -let type_ atom = unit Dep.(Local (Sort.Type, atom)) -let value atom = unit Dep.(Local (Sort.Value, atom)) + let dynamic_require loc = unit Dep.(Dynamic (DynamicRequire loc)) -let dynamic_import loc = unit Dep.(Dynamic (DynamicImport loc)) -let dynamic_require loc = unit Dep.(Dynamic (DynamicRequire loc)) + let import_named sort source name = unit Dep.(Remote (ImportNamed { sort; source; name })) + + let import_star sort source = unit Dep.(Remote (ImportStar { sort; source })) + + let require ?name source = unit Dep.(Remote (Require { source; name })) + + let global local = unit Dep.(Remote (Global local)) + + let reduce_join f deps x = join (deps, f x) + + let recurse f (deps, msgs) = + DepSet.fold (fun dep msgs -> PrintableErrorSet.union (f dep) msgs) deps msgs + + let replace_local_with_dynamic_class (loc, x) (deps, msgs) = + let acc = + DepSet.fold + (fun dep acc -> + match dep with + | Dep.Local (_, y) when x = y -> acc + | _ -> join (acc, unit dep)) + deps + (DepSet.empty, msgs) + in + join (acc, unit (Dep.Dynamic (Dep.Class (loc, x)))) +end -let import_named sort source name = unit Dep.(Remote (ImportNamed { sort; source; name })) -let import_star sort source = unit Dep.(Remote (ImportStar { sort; source })) -let require source = unit Dep.(Remote (Require { source })) -let global local = unit Dep.(Remote (Global local)) +module With_Loc = Make (Loc_sig.LocS) +module With_ALoc = Make (Loc_sig.ALocS) +include With_Loc -let reduce_join f deps x = - join (deps, f x) +let abstractify_expected_annotation_sort = + let module WL = With_Loc.ExpectedAnnotationSort in + let module WA = With_ALoc.ExpectedAnnotationSort in + function + | WL.ArrayPattern -> WA.ArrayPattern + | WL.FunctionReturn -> WA.FunctionReturn + | WL.Property key -> WA.Property (Ast_loc_utils.loc_to_aloc_mapper#object_key key) + | WL.PrivateField key -> WA.PrivateField (Ast_loc_utils.loc_to_aloc_mapper#private_name key) + | WL.VariableDefinition id -> + WA.VariableDefinition (Ast_loc_utils.loc_to_aloc_mapper#identifier id) -let recurse f (deps, msgs) = - DepSet.fold (fun dep msgs -> ErrorSet.union (f dep) msgs) deps msgs +let abstractify_error = + let module WL = With_Loc.Error in + let module WA = With_ALoc.Error in + function + | WL.ExpectedSort (sort, str, loc) -> WA.ExpectedSort (sort, str, ALoc.of_loc loc) + | WL.ExpectedAnnotation (loc, sort) -> + WA.ExpectedAnnotation (ALoc.of_loc loc, abstractify_expected_annotation_sort sort) + | WL.InvalidTypeParamUse loc -> WA.InvalidTypeParamUse (ALoc.of_loc loc) + | WL.UnexpectedObjectKey (loc, key_loc) -> + WA.UnexpectedObjectKey (ALoc.of_loc loc, ALoc.of_loc key_loc) + | WL.UnexpectedObjectSpread (loc, spread_loc) -> + WA.UnexpectedObjectSpread (ALoc.of_loc loc, ALoc.of_loc spread_loc) + | WL.UnexpectedArraySpread (loc, spread_loc) -> + WA.UnexpectedArraySpread (ALoc.of_loc loc, ALoc.of_loc spread_loc) + | WL.UnexpectedArrayHole loc -> WA.UnexpectedArrayHole (ALoc.of_loc loc) + | WL.EmptyArray loc -> WA.EmptyArray (ALoc.of_loc loc) + | WL.EmptyObject loc -> WA.EmptyObject (ALoc.of_loc loc) + | WL.UnexpectedExpression (loc, sort) -> WA.UnexpectedExpression (ALoc.of_loc loc, sort) + | WL.SketchyToplevelDef loc -> WA.SketchyToplevelDef (ALoc.of_loc loc) + | WL.UnsupportedPredicateExpression loc -> WA.UnsupportedPredicateExpression (ALoc.of_loc loc) + | WL.TODO (str, loc) -> WA.TODO (str, ALoc.of_loc loc) diff --git a/src/parser_utils/signature_builder_deps_sig.ml b/src/parser_utils/signature_builder_deps_sig.ml new file mode 100644 index 00000000000..700f5a664f0 --- /dev/null +++ b/src/parser_utils/signature_builder_deps_sig.ml @@ -0,0 +1,126 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module Sort = Signature_builder_kind.Sort + +module type S = sig + module L : Loc_sig.S + + module ExpectedAnnotationSort : sig + type t = + | ArrayPattern + | FunctionReturn + | PrivateField of L.t Flow_ast.PrivateName.t + | Property of (L.t, L.t) Flow_ast.Expression.Object.Property.key + | VariableDefinition of (L.t, L.t) Flow_ast.Identifier.t + + val property_key_to_string : (L.t, L.t) Flow_ast.Expression.Object.Property.key -> string + + val to_string : t -> string + end + + module Error : sig + type t = + | ExpectedSort of Sort.t * string * L.t + | ExpectedAnnotation of L.t * ExpectedAnnotationSort.t + | InvalidTypeParamUse of L.t + | UnexpectedObjectKey of L.t (* object loc *) * L.t (* key loc *) + | UnexpectedObjectSpread of L.t (* object loc *) * L.t (* spread loc *) + | UnexpectedArraySpread of L.t (* array loc *) * L.t (* spread loc *) + | UnexpectedArrayHole of L.t (* array loc *) + | EmptyArray of L.t (* array loc *) + | EmptyObject of L.t (* object loc *) + | UnexpectedExpression of L.t * Flow_ast_utils.ExpressionSort.t + | SketchyToplevelDef of L.t + | UnsupportedPredicateExpression of L.t + | TODO of string * L.t + + val compare : t -> t -> int + + val debug_to_string : t -> string + end + + module PrintableErrorSet : Set.S with type elt = Error.t + + module Dep : sig + type t = + | Local of local + | Dynamic of dynamic + | Remote of remote + + and local = Sort.t * string + + and dynamic = + | Class of L.t * string + | DynamicImport of L.t + | DynamicRequire of L.t + + and remote = + | ImportNamed of { + sort: Sort.t; + source: L.t Flow_ast_utils.source; + name: L.t Flow_ast_utils.ident; + } + | ImportStar of { + sort: Sort.t; + source: L.t Flow_ast_utils.source; + } + | Require of { + source: L.t Flow_ast_utils.source; + name: L.t Flow_ast_utils.ident Nel.t option; + } + | Global of local + + val compare : t -> t -> int + + val expectation : Sort.t -> string -> L.t -> Error.t + + val remote : t -> bool + + val local_uses : t -> SSet.t -> SSet.t + + val to_string : t -> string + end + + module DepSet : Set.S with type elt = Dep.t + + type t = DepSet.t * PrintableErrorSet.t + + val join : t * t -> t + + val bot : t + + val top : Error.t -> t + + val unreachable : t + + val todo : L.t -> string -> t + + val unit : Dep.t -> t + + val type_ : string -> t + + val value : string -> t + + val dynamic_import : L.t -> t + + val dynamic_require : L.t -> t + + val import_named : Sort.t -> L.t Flow_ast_utils.source -> L.t Flow_ast_utils.ident -> t + + val import_star : Sort.t -> L.t Flow_ast_utils.source -> t + + val require : ?name:L.t Flow_ast_utils.ident Nel.t -> L.t Flow_ast_utils.source -> t + + val global : Dep.local -> t + + val reduce_join : ('a -> t) -> t -> 'a -> t + + val recurse : (Dep.t -> PrintableErrorSet.t) -> t -> PrintableErrorSet.t + + val replace_local_with_dynamic_class : L.t Flow_ast_utils.ident -> t -> t +end diff --git a/src/parser_utils/signature_builder_entry.ml b/src/parser_utils/signature_builder_entry.ml index fd066b9eaaa..b57205dc2a7 100644 --- a/src/parser_utils/signature_builder_entry.ml +++ b/src/parser_utils/signature_builder_entry.ml @@ -1,122 +1,127 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - module Kind = Signature_builder_kind -type t = Loc.t Ast.Identifier.t * Kind.t - -let rec pattern ?annot_path init (p: (Loc.t, Loc.t) Ast.Pattern.t) = - let open Ast.Pattern in - begin match p with - | _, Identifier { Identifier.name; annot; _ } -> - [name, Kind.VariableDef { annot = Kind.Annot_path.mk_annot ?annot_path annot; init }] - | _, Object { Object.properties; annot } -> - let open Object in - let annot_path = Kind.Annot_path.mk_annot ?annot_path annot in - List.fold_left (fun acc -> function - | Property (_, { Property.key; pattern = p; _ }) -> - begin match key with - | Property.Identifier (_, x) -> - let annot_path = Kind.Annot_path.mk_object ?annot_path x in - acc @ (pattern ?annot_path init p) - | Property.Literal (_, { Ast.Literal.raw; _ }) -> - let annot_path = Kind.Annot_path.mk_object ?annot_path raw in - acc @ (pattern ?annot_path init p) - | Property.Computed _ -> - acc @ (pattern init p) - end - | RestProperty (_, { RestProperty.argument = p }) -> - acc @ (pattern init p) - ) [] properties - | _, Array { Array.elements; annot } -> - let open Array in - let annot_path = Kind.Annot_path.mk_annot ?annot_path annot in - fst @@ List.fold_left (fun (acc, i) -> function - | None -> acc, i+1 - | Some (Element p) -> - let annot_path = Kind.Annot_path.mk_array ?annot_path i in - acc @ (pattern ?annot_path init p), i+1 - | Some (RestElement (_, { RestElement.argument = p })) -> - acc @ (pattern init p), i+1 - ) ([], 0) elements - | _, Assignment { Assignment.left; _ } -> pattern ?annot_path init left - | _, Expression _ -> [] (* TODO *) - end - -let variable_declaration (decl: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) = - let open Ast.Statement.VariableDeclaration in - let { declarations; kind } = decl in - List.fold_left (fun acc (_, { Declarator.id; init }) -> - let init = match kind, init with - | Const, Some _ -> init - | _ -> None in - acc @ (pattern init id) - ) [] declarations - -let function_declaration function_declaration = - let open Ast.Function in - let { id; generator; tparams; params; return; body; _ } = function_declaration in - id, Kind.FunctionDef { generator; tparams; params; return; body } - -let class_ class_ = - let open Ast.Class in - let { - id; tparams; body; extends; implements; - classDecorators = _; - } = class_ in - let super, super_targs = match extends with - | None -> None, None - | Some (_, { Extends.expr; targs; }) -> Some expr, targs in - id, Kind.ClassDef { tparams; body; super; super_targs; implements } - -let declare_variable declare_variable = - let open Ast.Statement.DeclareVariable in - let { id; annot } = declare_variable in - id, Kind.VariableDef { annot = Kind.Annot_path.mk_annot annot; init = None } - -let declare_function declare_function = - let open Ast.Statement.DeclareFunction in - let { id; annot; _ } = declare_function in - id, Kind.DeclareFunctionDef { annot } - -let declare_class declare_class = - let open Ast.Statement.DeclareClass in - let { - id; tparams; body = (_, body); extends; mixins; implements - } = declare_class in - id, Kind.DeclareClassDef { tparams; body; extends; mixins; implements } - -let type_alias type_alias = - let open Ast.Statement.TypeAlias in - let { id; right; tparams } = type_alias in - id, Kind.TypeDef { tparams; right } - -let opaque_type opaque_type = - let open Ast.Statement.OpaqueType in - let { id; tparams; impltype; supertype; _ } = opaque_type in - id, Kind.OpaqueTypeDef { tparams; impltype; supertype } - -let interface interface = - let open Ast.Statement.Interface in - let { - id; tparams; body = (_, body); extends - } = interface in - id, Kind.InterfaceDef { tparams; body; extends } - -let import_star id kind source = - id, Kind.ImportStarDef { kind; source } - -let import_named id name kind source = - id, Kind.ImportNamedDef { kind; source; name } - -let require id source = - id, Kind.RequireDef { source } - -let sketchy_toplevel id = - id, Kind.SketchyToplevelDef +type t = (Loc.t, Loc.t) Ast.Identifier.t * Kind.t + +let rec pattern loc ?annot_path ?init_path (p : (Loc.t, Loc.t) Ast.Pattern.t) = + Ast.Pattern.( + match p with + | (_, Identifier { Identifier.name; annot; _ }) -> + [ + ( name, + ( loc, + Kind.VariableDef + { id = name; annot = Kind.Annot_path.mk_annot ?annot_path annot; init = init_path } + ) ); + ] + | (_, Object { Object.properties; annot }) -> + Object.( + let annot_path = Kind.Annot_path.mk_annot ?annot_path annot in + List.fold_left + (fun acc -> function + | Property (prop_loc, { Property.key; pattern = p; _ }) -> + begin + match key with + | Property.Identifier (key_loc, { Ast.Identifier.name = x; comments = _ }) -> + let annot_path = Kind.Annot_path.mk_object prop_loc ?annot_path (key_loc, x) in + let init_path = Kind.Init_path.mk_object prop_loc ?init_path (key_loc, x) in + acc @ pattern loc ?annot_path ?init_path p + | Property.Literal (key_loc, { Ast.Literal.raw; _ }) -> + let annot_path = Kind.Annot_path.mk_object prop_loc ?annot_path (key_loc, raw) in + let init_path = Kind.Init_path.mk_object prop_loc ?init_path (key_loc, raw) in + acc @ pattern loc ?annot_path ?init_path p + | Property.Computed _ -> acc @ pattern loc p + end + | RestProperty (_, { RestProperty.argument = p }) -> acc @ pattern loc p) + [] + properties) + | (_, Array { Array.elements; annot = _; comments = _ }) -> + Array.( + List.fold_left + (fun acc -> function + | None -> acc + | Some (Element (_, { Element.argument = p; default = _ })) -> acc @ pattern loc p + | Some (RestElement (_, { RestElement.argument = p })) -> acc @ pattern loc p) + [] + elements) + | (_, Expression _) -> [] + (* TODO *)) + +let variable_declaration loc (decl : (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t) = + Ast.Statement.VariableDeclaration.( + let { declarations; kind } = decl in + List.fold_left + (fun acc (_, { Declarator.id; init }) -> + let init = + match (kind, init) with + | (Const, Some _) -> init + | _ -> None + in + acc @ pattern loc ?init_path:(Kind.Init_path.mk_init init) id) + [] + declarations) + +let function_declaration + loc { Ast.Function.id; generator; async; tparams; params; return; body; predicate; _ } = + ( Option.value_exn id, + (loc, Kind.FunctionDef { generator; async; tparams; params; return; body; predicate }) ) + +let function_expression + loc { Ast.Function.id; generator; async; tparams; params; return; body; predicate; _ } = + ( Option.value_exn id, + (loc, Kind.FunctionDef { generator; async; tparams; params; return; body; predicate }) ) + +let class_ loc class_ = + Ast.Class.( + let { id; tparams; body; extends; implements; classDecorators = _ } = class_ in + let (super, super_targs) = + match extends with + | None -> (None, None) + | Some (_, { Extends.expr; targs }) -> (Some expr, targs) + in + (Option.value_exn id, (loc, Kind.ClassDef { tparams; body; super; super_targs; implements }))) + +let declare_variable loc declare_variable = + Ast.Statement.DeclareVariable.( + let { id; annot } = declare_variable in + (id, (loc, Kind.VariableDef { id; annot = Kind.Annot_path.mk_annot annot; init = None }))) + +let declare_function loc declare_function = + Ast.Statement.DeclareFunction.( + let { id; annot; predicate; _ } = declare_function in + (id, (loc, Kind.DeclareFunctionDef { annot; predicate }))) + +let declare_class loc declare_class = + Ast.Statement.DeclareClass.( + let { id; tparams; body; extends; mixins; implements } = declare_class in + (id, (loc, Kind.DeclareClassDef { tparams; body; extends; mixins; implements }))) + +let type_alias loc type_alias = + Ast.Statement.TypeAlias.( + let { id; right; tparams } = type_alias in + (id, (loc, Kind.TypeDef { tparams; right }))) + +let opaque_type loc opaque_type = + Ast.Statement.OpaqueType.( + let { id; tparams; impltype; supertype } = opaque_type in + (id, (loc, Kind.OpaqueTypeDef { tparams; impltype; supertype }))) + +let interface loc interface = + Ast.Statement.Interface.( + let { id; tparams; body; extends } = interface in + (id, (loc, Kind.InterfaceDef { tparams; body; extends }))) + +let import_star loc id kind source = (id, (loc, Kind.ImportStarDef { kind; source })) + +let import_named loc id name kind source = (id, (loc, Kind.ImportNamedDef { kind; source; name })) + +let require loc id ?name source = (id, (loc, Kind.RequireDef { source; name })) + +let sketchy_toplevel loc id = (id, (loc, Kind.SketchyToplevelDef)) diff --git a/src/parser_utils/signature_builder_env.ml b/src/parser_utils/signature_builder_env.ml index d9bb2c4684f..71a515aafd0 100644 --- a/src/parser_utils/signature_builder_env.ml +++ b/src/parser_utils/signature_builder_env.ml @@ -1,23 +1,25 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module LocMap = Utils_js.LocMap -type t = Signature_builder_kind.t LocMap.t SMap.t +module LocMap = Loc_collections.LocMap + +type t = (Loc.t * Signature_builder_kind.t) LocMap.t SMap.t let empty = SMap.empty -let singleton ((loc, x), kind) = +let singleton ((loc, { Flow_ast.Identifier.name = x; comments = _ }), kind) = SMap.singleton x (LocMap.singleton loc kind) -let add ((loc, x), kind) t = - SMap.add x (match SMap.get x t with +let add ((loc, { Flow_ast.Identifier.name = x; comments = _ }), kind) t = + SMap.add + x + (match SMap.get x t with | Some u -> LocMap.add loc kind u - | None -> LocMap.singleton loc kind - ) t + | None -> LocMap.singleton loc kind) + t -let push entries t = - List.fold_left (fun t entry -> add entry t) t entries +let push entries t = List.fold_left (fun t entry -> add entry t) t entries diff --git a/src/parser_utils/signature_builder_generate.ml b/src/parser_utils/signature_builder_generate.ml new file mode 100644 index 00000000000..ee5630e23c7 --- /dev/null +++ b/src/parser_utils/signature_builder_generate.ml @@ -0,0 +1,1979 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module Ast_utils = Flow_ast_utils +module Ast = Flow_ast +module LocMap = Loc_collections.LocMap +module Kind = Signature_builder_kind +module Entry = Signature_builder_entry +module Deps = Signature_builder_deps.With_Loc +module File_sig = File_sig.With_Loc +module Error = Deps.Error +module Dep = Deps.Dep + +(* The generator creates new AST nodes, some of whose locations do not map back very accurately to + original locations. While these are relatively unimportant, in that they should never make their + way into type errors, making them Loc.none is risky because they would make Flow crash in the + event of unforeseen bugs. Instead we reuse some nearby locations as approximations. *) +let approx_loc loc = loc + +module T = struct + type type_ = (Loc.t, Loc.t) Ast.Type.t + + and decl = + (* type definitions *) + | Type of { + tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; + right: type_; + } + | OpaqueType of { + tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; + impltype: type_ option; + supertype: type_ option; + } + | Interface of { + tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; + extends: generic list; + body: Loc.t * object_type; + } + (* declarations and outlined expressions *) + | ClassDecl of class_t + | FunctionDecl of { + annot: little_annotation; + predicate: (Loc.t, Loc.t) Ast.Type.Predicate.t option; + } + | FunctionWithStaticsDecl of { + base: Loc.t * expr_type; + statics: ((Loc.t, Loc.t) Ast.Identifier.t * (Loc.t * expr_type)) list; + } + | VariableDecl of little_annotation + (* remote *) + | ImportNamed of { + kind: Ast.Statement.ImportDeclaration.importKind; + source: Loc.t Ast_utils.source; + name: Loc.t Ast_utils.ident; + } + | ImportStar of { + kind: Ast.Statement.ImportDeclaration.importKind; + source: Loc.t Ast_utils.source; + } + | Require of { + source: Loc.t Ast_utils.source; + name: Loc.t Ast_utils.ident Nel.t option; + } + + and generic = Loc.t * (Loc.t, Loc.t) Ast.Type.Generic.t + + and class_implement = (Loc.t, Loc.t) Ast.Class.Implements.t + + and little_annotation = + | TYPE of type_ + | EXPR of (Loc.t * expr_type) + + and expr_type = + (* types and expressions *) + | Function of function_t + | ObjectLiteral of { + frozen: bool; + properties: (Loc.t * object_property_t) Nel.t; + } + | ArrayLiteral of array_element_t Nel.t + | ValueRef of reference (* typeof `x` *) + | NumberLiteral of Ast.NumberLiteral.t + | StringLiteral of Ast.StringLiteral.t + | BooleanLiteral of bool + | Number + | String + | Boolean + | JSXLiteral of generic + | Void + | Null + | Promise of (Loc.t * expr_type) + | TypeCast of type_ + | Outline of outlinable_t + | ObjectDestruct of little_annotation * (Loc.t * string) + | FixMe + + and object_type = (Loc.t, Loc.t) Ast.Type.Object.t + + and object_key = (Loc.t, Loc.t) Ast.Expression.Object.Property.key + + and outlinable_t = + | Class of (Loc.t * string) option * class_t + | DynamicImport of Loc.t * Ast.StringLiteral.t + | DynamicRequire of (Loc.t, Loc.t) Ast.Expression.t + + and function_t = + | FUNCTION of { + tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; + params: function_params; + return: little_annotation; + } + + and function_params = Loc.t * pattern list * (Loc.t * pattern) option + + and pattern = Loc.t * (Loc.t, Loc.t) Ast.Identifier.t option * bool (* optional *) * type_ + + and class_t = + | CLASS of { + tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; + extends: generic option; + implements: class_implement list; + body: Loc.t * (Loc.t * class_element_t) list; + } + | DECLARE_CLASS of { + tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; + extends: generic option; + mixins: generic list; + implements: class_implement list; + body: Loc.t * object_type; + } + + and class_element_t = + | CMethod of object_key * Ast.Class.Method.kind * bool (* static *) * (Loc.t * function_t) + | CProperty of object_key * bool (* static *) * Loc.t Ast.Variance.t option * type_ + | CPrivateField of string * bool (* static *) * Loc.t Ast.Variance.t option * type_ + + and object_property_t = + | OInit of object_key * (Loc.t * expr_type) + | OMethod of object_key * (Loc.t * function_t) + | OGet of object_key * (Loc.t * function_t) + | OSet of object_key * (Loc.t * function_t) + | OSpread of (Loc.t * expr_type) + + and array_element_t = AInit of (Loc.t * expr_type) + + and reference = + | RLexical of Loc.t * string + | RPath of Loc.t * reference * (Loc.t * string) + + module FixMe = struct + let mk_type loc = + ( loc, + Ast.Type.Generic + { + Ast.Type.Generic.id = + Ast.Type.Generic.Identifier.Unqualified + (Flow_ast_utils.ident_of_source (loc, "$FlowFixMe")); + targs = None; + } ) + + let mk_little_annotation loc = TYPE (mk_type loc) + + let mk_pattern default loc = + if default then + (loc, Some (Flow_ast_utils.ident_of_source (loc, "_")), true, mk_type loc) + else + (loc, None, false, mk_type loc) + + let mk_expr_type loc = (loc, FixMe) + + let mk_extends loc = + Some + ( loc, + { + Ast.Type.Generic.id = + Ast.Type.Generic.Identifier.Unqualified + (Flow_ast_utils.ident_of_source (loc, "$TEMPORARY$Super$FlowFixMe")); + targs = None; + } ) + + let mk_decl loc = VariableDecl (mk_little_annotation loc) + end + + let rec summarize_array loc = function + | (AInit (_, et), aes) -> + List.fold_left + (fun acc -> function + | AInit (_, et) -> data_optional_pair loc acc (Some et)) + (Some et) + aes + + and data_optional_pair loc data1 data2 = + match (data1, data2) with + | (Some et1, Some et2) -> summarize_expr_type_pair loc et1 et2 + | (None, _) + | (_, None) -> + None + + and summarize_expr_type_pair loc expr_type1 expr_type2 = + match (expr_type1, expr_type2) with + | (ArrayLiteral array1, ArrayLiteral array2) -> + let array' = summarize_array_pair loc array1 array2 in + begin + match array' with + | None -> None + | Some et -> Some (ArrayLiteral (AInit (loc, et), [])) + end + | ( ObjectLiteral { frozen = frozen1; properties = object1 }, + ObjectLiteral { frozen = frozen2; properties = object2 } ) -> + let frozen' = + match (frozen1, frozen2) with + | (true, true) -> Some true + | (false, false) -> Some false + | _ -> None + in + let object' = summarize_object_pair loc object1 object2 in + begin + match (frozen', object') with + | (Some frozen, Some xets) -> + Some + (ObjectLiteral + { + frozen; + properties = Nel.rev_map (fun (x, et) -> (loc, OInit (x, (loc, et)))) xets; + }) + | _ -> None + end + | ((NumberLiteral _ | Number), (NumberLiteral _ | Number)) -> Some Number + | ((StringLiteral _ | String), (StringLiteral _ | String)) -> Some String + | ((BooleanLiteral _ | Boolean), (BooleanLiteral _ | Boolean)) -> Some Boolean + | (Null, Null) -> Some Null + | _ -> None + + and summarize_array_pair loc array1 array2 = + data_optional_pair loc (summarize_array loc array1) (summarize_array loc array2) + + and summarize_object_pair = + let abs_object_key object_key = + Ast.Expression.Object.Property.( + match object_key with + | Literal (_, x) -> `Literal x + | Identifier (_, x) -> `Identifier x + | PrivateName (_, (_, x)) -> `PrivateName x + | _ -> assert false) + in + let object_key loc abs_object_key = + Ast.Expression.Object.Property.( + match abs_object_key with + | `Literal x -> Literal (loc, x) + | `Identifier x -> Identifier (loc, x) + | `PrivateName x -> PrivateName (loc, (loc, x))) + in + let compare_object_property = + let abs_object_key = function + | (_, OInit (object_key, _)) + | (_, OMethod (object_key, _)) + | (_, OGet (object_key, _)) + | (_, OSet (object_key, _)) -> + abs_object_key object_key + | (_, OSpread _) -> assert false + in + (fun op1 op2 -> Pervasives.compare (abs_object_key op1) (abs_object_key op2)) + in + let summarize_object_property_pair loc op1 op2 = + match (snd op1, snd op2) with + | (OInit (object_key1, (_, et1)), OInit (object_key2, (_, et2))) -> + let x = abs_object_key object_key1 in + if x = abs_object_key object_key2 then + match summarize_expr_type_pair loc et1 et2 with + | Some et -> Some (object_key loc x, et) + | None -> None + else + None + | _ -> None + in + let rec summarize_object_pair loc acc = function + | ([], []) -> acc + | ([], _) + | (_, []) -> + None + | (op1 :: ops1, op2 :: ops2) -> + let acc = + match (summarize_object_property_pair loc op1 op2, acc) with + | (None, _) + | (_, None) -> + None + | (Some xet, Some xets) -> Some (Nel.cons xet xets) + in + summarize_object_pair loc acc (ops1, ops2) + in + fun loc object1 object2 -> + let (op1, ops1) = + Nel.of_list_exn @@ List.sort compare_object_property @@ Nel.to_list object1 + in + let (op2, ops2) = + Nel.of_list_exn @@ List.sort compare_object_property @@ Nel.to_list object2 + in + let init = + match summarize_object_property_pair loc op1 op2 with + | None -> None + | Some xet -> Some (xet, []) + in + summarize_object_pair loc init (ops1, ops2) + + module Outlined : sig + type 'a t + + val create : unit -> 'a t + + val next : 'a t -> Loc.t -> (Loc.t * string -> (Loc.t * string) option * 'a) -> Loc.t * string + + val get : 'a t -> 'a list + end = struct + type 'a t = (int * 'a list) ref + + let create () = ref (0, []) + + let next outlined outlined_loc f = + let (n, l) = !outlined in + let n = n + 1 in + let id = (outlined_loc, Printf.sprintf "$%d" n) in + let (id_opt, x) = f id in + let (n, id) = + match id_opt with + | None -> (n, id) + | Some id -> (n - 1, id) + in + let l = x :: l in + outlined := (n, l); + id + + let get outlined = + let (_, l) = !outlined in + l + end + + let param_of_type (loc, name, optional, annot) = + (loc, { Ast.Type.Function.Param.name; annot; optional }) + + let type_of_generic (loc, gt) = (loc, Ast.Type.Generic gt) + + let source_of_source (loc, x) = (loc, { Ast.StringLiteral.value = x; raw = x }) + + let temporary_type name loc t = + ( loc, + Ast.Type.Generic + { + Ast.Type.Generic.id = + Ast.Type.Generic.Identifier.Unqualified (Flow_ast_utils.ident_of_source (loc, name)); + targs = Some (loc, [(loc, t)]); + } ) + + let rec type_of_expr_type outlined = function + | (loc, Function function_t) -> type_of_function outlined (loc, function_t) + | (loc, ObjectLiteral { frozen = true; properties = (pt, pts) }) -> + temporary_type + "$TEMPORARY$Object$freeze" + loc + (Ast.Type.Object + { + Ast.Type.Object.exact = true; + inexact = false; + properties = List.map (type_of_object_property outlined) (pt :: pts); + }) + | (loc, ObjectLiteral { frozen = false; properties = (pt, pts) }) -> + temporary_type + "$TEMPORARY$object" + loc + (Ast.Type.Object + { + Ast.Type.Object.exact = true; + inexact = false; + properties = Core_list.map ~f:(type_of_object_property outlined) (pt :: pts); + }) + | (loc, ArrayLiteral ets) -> + temporary_type + "$TEMPORARY$array" + loc + (match ets with + | (et, []) -> snd (type_of_array_element outlined et) + | (et1, et2 :: ets) -> + Ast.Type.Union + ( type_of_array_element outlined et1, + type_of_array_element outlined et2, + Core_list.map ~f:(type_of_array_element outlined) ets )) + | (loc, ValueRef reference) -> + ( loc, + Ast.Type.Typeof + (type_of_generic + (loc, { Ast.Type.Generic.id = generic_id_of_reference reference; targs = None })) ) + | (loc, NumberLiteral nt) -> temporary_type "$TEMPORARY$number" loc (Ast.Type.NumberLiteral nt) + | (loc, StringLiteral st) -> temporary_type "$TEMPORARY$string" loc (Ast.Type.StringLiteral st) + | (loc, BooleanLiteral b) -> + temporary_type "$TEMPORARY$boolean" loc (Ast.Type.BooleanLiteral b) + | (loc, Number) -> (loc, Ast.Type.Number) + | (loc, String) -> (loc, Ast.Type.String) + | (loc, Boolean) -> (loc, Ast.Type.Boolean) + | (loc, Void) -> (loc, Ast.Type.Void) + | (loc, Promise t) -> + ( loc, + Ast.Type.Generic + { + Ast.Type.Generic.id = + Ast.Type.Generic.Identifier.Unqualified + (Flow_ast_utils.ident_of_source (loc, "Promise")); + targs = Some (loc, [type_of_expr_type outlined t]); + } ) + | (loc, Null) -> (loc, Ast.Type.Null) + | (_loc, JSXLiteral g) -> type_of_generic g + | (_loc, TypeCast t) -> t + | (loc, Outline ht) -> + let f = outlining_fun outlined loc ht in + let id = Outlined.next outlined loc f in + ( loc, + Ast.Type.Typeof + (type_of_generic + ( loc, + { + Ast.Type.Generic.id = + Ast.Type.Generic.Identifier.Unqualified (Flow_ast_utils.ident_of_source id); + targs = None; + } )) ) + | (loc, ObjectDestruct (annot_or_init, prop)) -> + let t = type_of_little_annotation outlined annot_or_init in + let f id = + ( None, + ( fst t, + Ast.Statement.DeclareVariable + { + Ast.Statement.DeclareVariable.id = Flow_ast_utils.ident_of_source id; + annot = Ast.Type.Available (fst t, t); + } ) ) + in + let id = Outlined.next outlined loc f in + ( loc, + Ast.Type.Typeof + (type_of_generic + ( loc, + { + Ast.Type.Generic.id = + Ast.Type.Generic.Identifier.Qualified + ( loc, + { + Ast.Type.Generic.Identifier.qualification = + Ast.Type.Generic.Identifier.Unqualified + (Flow_ast_utils.ident_of_source id); + id = Flow_ast_utils.ident_of_source prop; + } ); + targs = None; + } )) ) + | (loc, FixMe) -> FixMe.mk_type loc + + and generic_id_of_reference = function + | RLexical (loc, x) -> + Ast.Type.Generic.Identifier.Unqualified (Flow_ast_utils.ident_of_source (loc, x)) + | RPath (path_loc, reference, (loc, x)) -> + Ast.Type.Generic.Identifier.Qualified + ( path_loc, + { + Ast.Type.Generic.Identifier.qualification = generic_id_of_reference reference; + id = Flow_ast_utils.ident_of_source (loc, x); + } ) + + and outlining_fun outlined decl_loc ht id = + match ht with + | Class (id_opt, class_t) -> + ( id_opt, + let id = + match id_opt with + | None -> id + | Some id -> id + in + stmt_of_decl outlined decl_loc id (ClassDecl class_t) ) + | DynamicImport (source_loc, source_lit) -> + ( None, + let importKind = Ast.Statement.ImportDeclaration.ImportValue in + let source = (source_loc, source_lit) in + let default = None in + let specifiers = + Some + (Ast.Statement.ImportDeclaration.ImportNamespaceSpecifier + (decl_loc, Flow_ast_utils.ident_of_source id)) + in + ( decl_loc, + Ast.Statement.ImportDeclaration + { Ast.Statement.ImportDeclaration.importKind; source; default; specifiers } ) ) + | DynamicRequire require -> + ( None, + let kind = Ast.Statement.VariableDeclaration.Const in + let pattern = + ( decl_loc, + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = Flow_ast_utils.ident_of_source id; + annot = Ast.Type.Missing (fst id); + optional = false; + } ) + in + let declaration = + { Ast.Statement.VariableDeclaration.Declarator.id = pattern; init = Some require } + in + ( decl_loc, + Ast.Statement.VariableDeclaration + { Ast.Statement.VariableDeclaration.kind; declarations = [(decl_loc, declaration)] } ) + ) + + and type_of_array_element outlined = function + | AInit expr_type -> type_of_expr_type outlined expr_type + + and type_of_object_property outlined = function + | (loc, OInit (key, expr_type)) -> + Ast.Type.Object.Property + ( loc, + { + Ast.Type.Object.Property.key; + value = Ast.Type.Object.Property.Init (type_of_expr_type outlined expr_type); + optional = false; + static = false; + proto = false; + _method = false; + variance = None; + } ) + | (loc, OMethod (key, function_t)) -> + Ast.Type.Object.Property + ( loc, + { + Ast.Type.Object.Property.key; + value = Ast.Type.Object.Property.Init (type_of_function outlined function_t); + optional = false; + static = false; + proto = false; + _method = true; + variance = None; + } ) + | (loc, OGet (key, function_t)) -> + Ast.Type.Object.Property + ( loc, + { + Ast.Type.Object.Property.key; + value = Ast.Type.Object.Property.Get (type_of_function_t outlined function_t); + optional = false; + static = false; + proto = false; + _method = false; + variance = None; + } ) + | (loc, OSet (key, function_t)) -> + Ast.Type.Object.Property + ( loc, + { + Ast.Type.Object.Property.key; + value = Ast.Type.Object.Property.Set (type_of_function_t outlined function_t); + optional = false; + static = false; + proto = false; + _method = false; + variance = None; + } ) + | (loc, OSpread expr_type) -> + Ast.Type.Object.SpreadProperty + (loc, { Ast.Type.Object.SpreadProperty.argument = type_of_expr_type outlined expr_type }) + + and type_of_function_t outlined = function + | ( loc, + FUNCTION + { + tparams : (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; + params : function_params; + return : little_annotation; + } ) -> + let (params_loc, params, rest) = params in + ( loc, + { + Ast.Type.Function.tparams; + params = + ( params_loc, + { + Ast.Type.Function.Params.params = Core_list.map ~f:param_of_type params; + rest = + (match rest with + | None -> None + | Some (loc, rest) -> + Some (loc, { Ast.Type.Function.RestParam.argument = param_of_type rest })); + } ); + return = type_of_little_annotation outlined return; + } ) + + and type_of_function outlined function_t = + let (loc, function_t) = type_of_function_t outlined function_t in + (loc, Ast.Type.Function function_t) + + and type_of_little_annotation outlined = function + | TYPE t -> t + | EXPR expr_type -> type_of_expr_type outlined expr_type + + and annot_of_little_annotation outlined little_annotation = + let t = type_of_little_annotation outlined little_annotation in + (fst t, t) + + and name_opt_pattern id name_opt = + let id_pattern = + ( fst id, + Ast.Pattern.Identifier + { Ast.Pattern.Identifier.name = id; annot = Ast.Type.Missing (fst id); optional = false } + ) + in + match name_opt with + | None -> id_pattern + | Some (name, names) -> + let (_, { Ast.Identifier.name = id_name; comments = _ }) = id in + let pattern = + ( fst name, + Ast.Pattern.Object + { + Ast.Pattern.Object.properties = + [ + Ast.Pattern.Object.Property + ( fst name, + { + Ast.Pattern.Object.Property.key = + Ast.Pattern.Object.Property.Identifier + (Flow_ast_utils.ident_of_source name); + pattern = id_pattern; + shorthand = id_name = snd name; + default = None; + } ); + ]; + annot = Ast.Type.Missing (fst name); + } ) + in + wrap_name_pattern pattern names + + and wrap_name_pattern pattern = function + | [] -> pattern + | name :: names -> + let pattern = + ( fst name, + Ast.Pattern.Object + { + Ast.Pattern.Object.properties = + [ + Ast.Pattern.Object.Property + ( fst name, + { + Ast.Pattern.Object.Property.key = + Ast.Pattern.Object.Property.Identifier + (Flow_ast_utils.ident_of_source name); + pattern; + shorthand = false; + default = None; + } ); + ]; + annot = Ast.Type.Missing (fst name); + } ) + in + wrap_name_pattern pattern names + + and stmt_of_decl outlined decl_loc id decl = + let id = Flow_ast_utils.ident_of_source id in + match decl with + | Type { tparams; right } -> + (decl_loc, Ast.Statement.TypeAlias { Ast.Statement.TypeAlias.id; tparams; right }) + | OpaqueType { tparams; impltype; supertype } -> + ( decl_loc, + Ast.Statement.OpaqueType { Ast.Statement.OpaqueType.id; tparams; impltype; supertype } ) + | Interface { tparams; extends; body } -> + ( decl_loc, + Ast.Statement.InterfaceDeclaration { Ast.Statement.Interface.id; tparams; extends; body } + ) + | ClassDecl (CLASS { tparams; extends; implements; body = (body_loc, body) }) -> + (* FIXME(T39206072, festevezga) Private properties are filtered to prevent an exception surfaced in https://github.com/facebook/flow/issues/7355 *) + let filtered_body_FIXME = + Core_list.filter + ~f:(fun prop -> + match prop with + | (_loc, CPrivateField _) -> false + | _ -> true) + body + in + let body = + ( body_loc, + { + Ast.Type.Object.exact = false; + inexact = false; + properties = + Core_list.map ~f:(object_type_property_of_class_element outlined) filtered_body_FIXME; + } ) + in + let mixins = [] in + ( decl_loc, + Ast.Statement.DeclareClass + { Ast.Statement.DeclareClass.id; tparams; extends; implements; mixins; body } ) + | ClassDecl (DECLARE_CLASS { tparams; extends; mixins; implements; body }) -> + ( decl_loc, + Ast.Statement.DeclareClass + { Ast.Statement.DeclareClass.id; tparams; extends; implements; mixins; body } ) + | FunctionDecl { annot = little_annotation; predicate } -> + ( decl_loc, + Ast.Statement.DeclareFunction + { + Ast.Statement.DeclareFunction.id; + annot = annot_of_little_annotation outlined little_annotation; + predicate; + } ) + | FunctionWithStaticsDecl { base; statics } -> + let annot = type_of_expr_type outlined base in + let properties = + Core_list.rev_map + ~f:(fun (id, expr) -> + let annot = type_of_expr_type outlined expr in + Ast.Type.Object.( + Property + ( fst id, + { + Property.key = Ast.Expression.Object.Property.Identifier id; + value = Property.Init annot; + optional = false; + static = false; + proto = false; + _method = false; + variance = None; + } ))) + statics + in + let ot = { Ast.Type.Object.exact = false; inexact = true; properties } in + let assign = (decl_loc, Ast.Type.Object ot) in + let t = + let name = "$TEMPORARY$function" in + let id = + Ast.Type.Generic.Identifier.Unqualified (Flow_ast_utils.ident_of_source (decl_loc, name)) + in + ( decl_loc, + Ast.Type.Generic { Ast.Type.Generic.id; targs = Some (decl_loc, [annot; assign]) } ) + in + ( decl_loc, + Ast.Statement.DeclareVariable + { Ast.Statement.DeclareVariable.id; annot = Ast.Type.Available (fst annot, t) } ) + | VariableDecl little_annotation -> + ( decl_loc, + Ast.Statement.DeclareVariable + { + Ast.Statement.DeclareVariable.id; + annot = Ast.Type.Available (annot_of_little_annotation outlined little_annotation); + } ) + | ImportNamed { kind; source; name } -> + let importKind = kind in + let source = source_of_source source in + let default = + if snd name = "default" then + Some id + else + None + in + let specifiers = + let (_, { Ast.Identifier.name = id_name; comments = _ }) = id in + if snd name = "default" then + None + else + Some + (Ast.Statement.ImportDeclaration.ImportNamedSpecifiers + [ + { + Ast.Statement.ImportDeclaration.kind = None; + local = + ( if id_name = snd name then + None + else + Some id ); + remote = Flow_ast_utils.ident_of_source name; + }; + ]) + in + ( decl_loc, + Ast.Statement.ImportDeclaration + { Ast.Statement.ImportDeclaration.importKind; source; default; specifiers } ) + | ImportStar { kind; source } -> + let importKind = kind in + let source = source_of_source source in + let default = None in + let specifiers = + Some (Ast.Statement.ImportDeclaration.ImportNamespaceSpecifier (fst id, id)) + in + ( decl_loc, + Ast.Statement.ImportDeclaration + { Ast.Statement.ImportDeclaration.importKind; source; default; specifiers } ) + | Require { source; name } -> + let kind = Ast.Statement.VariableDeclaration.Const in + let pattern = name_opt_pattern id name in + let (loc, x) = source in + let require = + ( decl_loc, + Ast.Expression.Call + { + Ast.Expression.Call.callee = + ( approx_loc decl_loc, + Ast.Expression.Identifier + (Flow_ast_utils.ident_of_source (approx_loc decl_loc, "require")) ); + targs = None; + arguments = + [ + Ast.Expression.Expression + ( loc, + Ast.Expression.Literal + { + Ast.Literal.value = Ast.Literal.String x; + raw = x; + comments = Flow_ast_utils.mk_comments_opt (); + } ); + ]; + } ) + in + let declaration = + { Ast.Statement.VariableDeclaration.Declarator.id = pattern; init = Some require } + in + ( decl_loc, + Ast.Statement.VariableDeclaration + { Ast.Statement.VariableDeclaration.kind; declarations = [(decl_loc, declaration)] } ) + + and object_type_property_of_class_element outlined = function + | (loc, CMethod (object_key, _kind, static, f)) -> + Ast.Type.Object.( + Property + ( loc, + { + Property.key = object_key; + value = Property.Init (type_of_function outlined f); + optional = false; + static; + proto = false; + _method = true; + variance = None; + } )) + | (loc, CProperty (object_key, static, variance, t)) -> + Ast.Type.Object.( + Property + ( loc, + { + Property.key = object_key; + value = Property.Init t; + optional = false; + static; + proto = false; + _method = false; + variance; + } )) + | (_loc, CPrivateField (_x, _static, _variance, _t)) -> assert false +end + +(* A signature of a module is described by exported expressions / definitions, but what we're really + interested in is their types. In particular, we are interested in computing these types early, so + that we can check the code inside a module against the signature in a separate pass. So the + question is: what information is necessary to compute these types? + + Assuming we know how to map various kinds of type constructors (and destructors) to their + meanings, all that remains to verify is that the types are well-formed: any identifiers appearing + inside them should be defined in the top-level local scope, or imported, or global; and their + "sort" of use (as a type or as a value) must match up with their definition. + + We break up the verification of well-formedness by computing a set of "dependencies" found by + walking the structure of types, definitions, and expressions. The dependencies are simply the + identifiers that are reached in this walk, coupled with their sort of use. Elsewhere, we + recursively expand these dependencies by looking up the definitions of such identifiers, possibly + uncovering further dependencies, and so on. + + A couple of important things to note at this point. + + 1. The verification of well-formedness (and computation of types) is complete only up to the + top-level local scope: any identifiers that are imported or global need to be resolved in a + separate phase that builds things up in module-dependency order. To reflect this arrangement, + verification returns not only a set of immediate errors but a set of conditions on imported and + global identifiers that must be enforced by that separate phase. + + 2. There is a fine line between errors found during verification and errors found during the + computation of types (since both kinds of errors are static errors). Still, one might argue that + the verification step should ensure that the computation step never fails. In that regard, the + checks we have so far are not enough. In particular: + + (a) While classes are intended to be the only values that can be used as types, we also allow + variables to be used as types, to account for the fact that a variable could be bound to a + top-level local, imported, or global class. Ideally we would verify that these expectation is + met, but we don't yet. + + (b) While destructuring only makes sense on types of the corresponding kinds (e.g., object + destructuring would only work on object types), currently we allow destructuring on all + types. Again, ideally we would discharge verification conditions for these and ensure that they + are satisfied. + + (c) Parts of the module system are still under design. For example, can types be defined locally + in anything other than the top-level scope? Do (or under what circumstances do) `require` and + `import *` bring exported types in scope? These considerations will affect the computation step + and ideally would be verified as well, but we're punting on them right now. +*) +module Eval (Env : Signature_builder_verify.EvalEnv) = struct + let rec type_ t = t + + and type_params tparams = tparams + + and object_key key = key + + and object_type ot = ot + + and generic tr = tr + + and type_args = function + | None -> None + | Some (loc, ts) -> Some (loc, Core_list.map ~f:type_ ts) + + let rec annot_path = function + | Kind.Annot_path.Annot (_, t) -> T.TYPE (type_ t) + | Kind.Annot_path.Object (prop_loc, (path, (loc, x))) -> + let annot = annot_path path in + T.EXPR (prop_loc, T.ObjectDestruct (annot, (loc, x))) + + let rec init_path = function + | Kind.Init_path.Init expr -> literal_expr expr + | Kind.Init_path.Object (prop_loc, (path, (loc, x))) -> + let expr_type = init_path path in + ( prop_loc, + (match expr_type with + | (path_loc, T.ValueRef reference) -> T.ValueRef (T.RPath (path_loc, reference, (loc, x))) + | _ -> T.ObjectDestruct (T.EXPR expr_type, (loc, x))) ) + + and annotation loc ?init annot = + match annot with + | Some path -> annot_path path + | None -> + begin + match init with + | Some path -> T.EXPR (init_path path) + | None -> T.FixMe.mk_little_annotation loc + end + + and annotated_type = function + | Ast.Type.Missing loc -> T.FixMe.mk_type loc + | Ast.Type.Available (_, t) -> type_ t + + and pattern ?(default = false) patt = + Ast.Pattern.( + match patt with + | (loc, Identifier { Identifier.annot; name; optional }) -> + (loc, Some name, default || optional, annotated_type annot) + | (loc, Object { Object.annot; properties = _ }) -> + if default then + (loc, Some (Flow_ast_utils.ident_of_source (loc, "_")), true, annotated_type annot) + else + (loc, None, false, annotated_type annot) + | (loc, Array { Array.annot; elements = _; comments = _ }) -> + if default then + (loc, Some (Flow_ast_utils.ident_of_source (loc, "_")), true, annotated_type annot) + else + (loc, None, false, annotated_type annot) + | (loc, Expression _) -> T.FixMe.mk_pattern default loc) + + and literal_expr = + let string_value_of_object_key object_key = + Ast.Expression.Object.Property.( + match object_key with + | Literal (loc, { Ast.Literal.value = Ast.Literal.String value; raw; comments = _ }) -> + (loc, T.TypeCast (loc, Ast.Type.StringLiteral { Ast.StringLiteral.value; raw })) + | Identifier (loc, { Ast.Identifier.name; comments = _ }) -> + let value = name in + let raw = Printf.sprintf "'%s'" name in + (loc, T.TypeCast (loc, Ast.Type.StringLiteral { Ast.StringLiteral.value; raw })) + | _ -> assert false) + in + let keys_as_string_values_of_object_properties object_properties = + try + Some + (Nel.map + (function + | (loc, T.OInit (x, _)) -> (loc, T.OInit (x, string_value_of_object_key x)) + | _ -> assert false) + object_properties) + with _ -> None + in + Ast.Expression.( + function + | (loc, Literal { Ast.Literal.value; raw; comments = _ }) -> + begin + match value with + | Ast.Literal.String value -> (loc, T.StringLiteral { Ast.StringLiteral.value; raw }) + | Ast.Literal.Number value -> (loc, T.NumberLiteral { Ast.NumberLiteral.value; raw }) + | Ast.Literal.Boolean b -> (loc, T.BooleanLiteral b) + | Ast.Literal.Null -> (loc, T.Null) + | _ -> T.FixMe.mk_expr_type loc + end + | (loc, TemplateLiteral _) -> (loc, T.String) + | (loc, Identifier stuff) -> (loc, T.ValueRef (identifier stuff)) + | (loc, Class stuff) -> + Ast.Class.( + let { tparams; body; extends; implements; id; classDecorators = _ } = stuff in + let (super, super_targs) = + match extends with + | None -> (None, None) + | Some (_, { Extends.expr; targs }) -> (Some expr, targs) + in + ( loc, + T.Outline + (T.Class + ( Option.map ~f:Flow_ast_utils.source_of_ident id, + class_ tparams body super super_targs implements )) )) + | ( loc, + Function + { + Ast.Function.generator; + tparams; + params; + return; + body; + id = _; + async; + predicate = _; + sig_loc = _; + } ) -> + (loc, T.Function (function_ generator async tparams params return body)) + | ( loc, + ArrowFunction + { + Ast.Function.tparams; + params; + return; + body; + async; + predicate = _; + sig_loc = _; + (* TODO: arrow functions can't have ids or be generators: *) + id = _; + generator = _; + } ) -> + (loc, T.Function (function_ false async tparams params return body)) + | (loc, Object stuff) -> + Ast.Expression.Object.( + let { properties; comments = _ } = stuff in + begin + match object_ properties with + | Some o -> (loc, T.ObjectLiteral { frozen = false; properties = o }) + | None -> T.FixMe.mk_expr_type loc + end) + | (loc, Array stuff) -> + Ast.Expression.Array.( + let { elements; comments = _ } = stuff in + begin + match array_ elements with + | Some a -> (loc, T.ArrayLiteral a) + | None -> T.FixMe.mk_expr_type loc + end) + | (loc, TypeCast stuff) -> + Ast.Expression.TypeCast.( + let { annot; expression = _ } = stuff in + let (_, t) = annot in + (loc, T.TypeCast (type_ t))) + | (loc, Member stuff) -> + begin + match member stuff with + | Some ref_expr -> (loc, T.ValueRef ref_expr) + | None -> T.FixMe.mk_expr_type loc + end + | ( loc, + Import + ( source_loc, + ( Literal { Ast.Literal.value = Ast.Literal.String value; raw; comments = _ } + | TemplateLiteral + { + TemplateLiteral.quasis = + [ + ( _, + { + TemplateLiteral.Element.value = + { TemplateLiteral.Element.cooked = value; raw }; + _; + } ); + ]; + _; + } ) ) ) -> + (loc, T.Outline (T.DynamicImport (source_loc, { Ast.StringLiteral.value; raw }))) + | ( loc, + Call + { + Ast.Expression.Call.callee = + (_, Identifier (_, { Ast.Identifier.name = "require"; comments = _ })); + _; + } ) as expr -> + (loc, T.Outline (T.DynamicRequire expr)) + | ( _, + Call + { + Ast.Expression.Call.callee = + ( _, + Member + { + Ast.Expression.Member._object = + (_, Identifier (_, { Ast.Identifier.name = "Object"; comments = _ })); + property = + Ast.Expression.Member.PropertyIdentifier + (_, { Ast.Identifier.name = "freeze"; comments = _ }); + } ); + targs = None; + arguments = [Expression (loc, Object stuff)]; + } ) -> + Ast.Expression.Object.( + let { properties; comments = _ } = stuff in + begin + match object_ properties with + | Some o -> (loc, T.ObjectLiteral { frozen = true; properties = o }) + | None -> T.FixMe.mk_expr_type loc + end) + | ( _, + Call + { + Ast.Expression.Call.callee = + (_, Identifier (_, { Ast.Identifier.name = "keyMirror"; comments = _ })); + targs = None; + arguments = [Expression (loc, Object stuff)]; + } ) -> + Ast.Expression.Object.( + let { properties; comments = _ } = stuff in + begin + match object_ properties with + | Some o -> + begin + match keys_as_string_values_of_object_properties o with + | Some o' -> (loc, T.ObjectLiteral { frozen = false; properties = o' }) + | None -> T.FixMe.mk_expr_type loc + end + | None -> T.FixMe.mk_expr_type loc + end) + | (loc, Unary stuff) -> + Ast.Expression.Unary.( + let { operator; argument; comments = _ } = stuff in + arith_unary operator loc argument) + | (loc, Binary stuff) -> + Ast.Expression.Binary.( + let { operator; left; right } = stuff in + arith_binary operator loc left right) + | (loc, Sequence stuff) -> + Ast.Expression.Sequence.( + let { expressions } = stuff in + begin + match List.rev expressions with + | expr :: _ -> literal_expr expr + | [] -> T.FixMe.mk_expr_type loc + end) + | (loc, Assignment stuff) -> + Ast.Expression.Assignment.( + let { operator; left = _; right } = stuff in + begin + match operator with + | None -> literal_expr right + | Some _ -> T.FixMe.mk_expr_type loc + end) + | (loc, Update stuff) -> + Ast.Expression.Update.( + (* This operation has a simple result type. *) + let { operator = _; argument = _; prefix = _ } = stuff in + (loc, T.Number)) + | (loc, JSXElement e) -> + Ast.JSX.( + let { openingElement; closingElement = _; children = _ } = e in + let (_loc, { Opening.name; selfClosing = _; attributes = _ }) = openingElement in + begin + match (name, Env.facebook_fbt) with + | (Ast.JSX.Identifier (_loc_id, { Identifier.name = "fbt" }), Some custom_jsx_type) -> + ( loc, + T.JSXLiteral + ( loc, + { + Ast.Type.Generic.id = + Ast.Type.Generic.Identifier.Unqualified + (Flow_ast_utils.ident_of_source (loc, custom_jsx_type)); + targs = None; + } ) ) + | _ -> T.FixMe.mk_expr_type loc + end) + | (loc, Call _) + | (loc, Comprehension _) + | (loc, Conditional _) + | (loc, Generator _) + | (loc, Import _) + | (loc, JSXFragment _) + | (loc, Logical _) + | (loc, MetaProperty _) + | (loc, New _) + | (loc, OptionalCall _) + | (loc, OptionalMember _) + | (loc, Super) + | (loc, TaggedTemplate _) + | (loc, This) + | (loc, Yield _) -> + T.FixMe.mk_expr_type loc) + + and identifier stuff = + let (loc, { Ast.Identifier.name; comments = _ }) = stuff in + T.RLexical (loc, name) + + and member stuff = + Ast.Expression.Member.( + let { _object; property } = stuff in + let ref_expr_opt = ref_expr _object in + let name_opt = + match property with + | PropertyIdentifier (loc, x) -> Some (loc, x) + | PropertyPrivateName (_, (loc, x)) -> Some (loc, x) + | PropertyExpression _ -> None + in + match (ref_expr_opt, name_opt) with + | (Some (path_loc, t), Some name) -> + Some (T.RPath (path_loc, t, Flow_ast_utils.source_of_ident name)) + | (None, _) + | (_, None) -> + None) + + and ref_expr expr = + Ast.Expression.( + match expr with + | (loc, Identifier stuff) -> Some (loc, identifier stuff) + | (loc, Member stuff) -> + begin + match member stuff with + | Some ref_expr -> Some (loc, ref_expr) + | None -> None + end + | _ -> None) + + and arith_unary operator loc argument = + Ast.Expression.Unary.( + match operator with + (* These operations have simple result types. *) + | Plus -> (loc, T.Number) + | BitNot -> (loc, T.Number) + | Typeof -> (loc, T.String) + | Void -> (loc, T.Void) + | Delete -> (loc, T.Boolean) + (* These operations may or may not have simple result types. See associated TODO: comment in + Signature_builder_verify. *) + | Minus -> + begin + match literal_expr argument with + | (_, T.NumberLiteral { Ast.NumberLiteral.value; raw }) -> + (loc, T.NumberLiteral { Ast.NumberLiteral.value = -.value; raw = "-" ^ raw }) + | _ -> (loc, T.Number) + end + | Not -> + begin + match literal_expr argument with + | (_, T.BooleanLiteral b) -> (loc, T.BooleanLiteral (not b)) + | (_, T.Function _) + | (_, T.ObjectLiteral _) + | (_, T.ArrayLiteral _) + | (_, T.JSXLiteral _) -> + (loc, T.BooleanLiteral false) + | (_, T.Void) + | (_, T.Null) -> + (loc, T.BooleanLiteral true) + | (_, T.NumberLiteral { Ast.NumberLiteral.value; _ }) -> + (loc, T.BooleanLiteral (value = 0.)) + | (_, T.StringLiteral { Ast.StringLiteral.value; _ }) -> + (loc, T.BooleanLiteral (value = "")) + | _ -> (loc, T.Boolean) + end + | Await -> + (* The result type of this operation depends in a complicated way on the argument type. *) + T.FixMe.mk_expr_type loc) + + and arith_binary operator loc _left _right = + Ast.Expression.Binary.( + match operator with + | Plus -> + (* The result type of this operation depends in a complicated way on the argument type. *) + T.FixMe.mk_expr_type loc + (* These operations have simple result types. *) + | Equal -> (loc, T.Boolean) + | NotEqual -> (loc, T.Boolean) + | StrictEqual -> (loc, T.Boolean) + | StrictNotEqual -> (loc, T.Boolean) + | LessThan -> (loc, T.Boolean) + | LessThanEqual -> (loc, T.Boolean) + | GreaterThan -> (loc, T.Boolean) + | GreaterThanEqual -> (loc, T.Boolean) + | LShift -> (loc, T.Number) + | RShift -> (loc, T.Number) + | RShift3 -> (loc, T.Number) + | Minus -> (loc, T.Number) + | Mult -> (loc, T.Number) + | Exp -> (loc, T.Number) + | Div -> (loc, T.Number) + | Mod -> (loc, T.Number) + | BitOr -> (loc, T.Number) + | Xor -> (loc, T.Number) + | BitAnd -> (loc, T.Number) + | In -> (loc, T.Boolean) + | Instanceof -> (loc, T.Boolean)) + + and function_param (_, { Ast.Function.Param.argument; default }) = + pattern ~default:(default <> None) argument + + and function_rest_param (loc, { Ast.Function.RestParam.argument }) = (loc, pattern argument) + + and function_params params = + Ast.Function.( + let (params_loc, { Params.params; rest }) = params in + let params = Core_list.map ~f:function_param params in + let rest = + match rest with + | None -> None + | Some param -> Some (function_rest_param param) + in + (params_loc, params, rest)) + + and function_return ~is_missing_ok ~async return = + match return with + | Ast.Type.Missing loc -> + if is_missing_ok () then + let t = T.Void in + let t = + if async then + T.Promise (loc, t) + else + t + in + T.EXPR (loc, t) + else + T.FixMe.mk_little_annotation loc + | Ast.Type.Available (_, t) -> T.TYPE (type_ t) + + and function_predicate body predicate = + match (predicate, body) with + | (None, _) -> None + | ( Some (loc, Ast.Type.Predicate.Inferred), + ( Ast.Function.BodyBlock + ( _, + { + Ast.Statement.Block.body = + [(_, Ast.Statement.Return { Ast.Statement.Return.argument = Some e; _ })]; + } ) + | Ast.Function.BodyExpression e ) ) -> + Some (loc, Ast.Type.Predicate.Declared e) + | (Some (_, Ast.Type.Predicate.Inferred), _) -> None + | (Some (_, Ast.Type.Predicate.Declared _), _) -> predicate + + and function_ generator async tparams params return body = + let tparams = type_params tparams in + let params = function_params params in + let return = + let is_missing_ok () = (not generator) && Signature_utils.Procedure_decider.is body in + function_return ~is_missing_ok ~async return + in + (* TODO: It is unclear what happens for generator functions. In particular, + what do declarations of such functions look like, aside from the return type being + `Generator<...>`? *) + T.FUNCTION { tparams; params; return } + + and class_ = + let class_element acc element = + Ast.Class.( + match element with + | Body.Method + ( _, + { + Method.key = + Ast.Expression.Object.Property.Identifier + (_, { Ast.Identifier.name; comments = _ }); + _; + } ) + | Body.Property + ( _, + { + Property.key = + Ast.Expression.Object.Property.Identifier + (_, { Ast.Identifier.name; comments = _ }); + _; + } ) + when (not Env.prevent_munge) && Signature_utils.is_munged_property_name name -> + acc + | Body.Property + ( _, + { + Property.key = + Ast.Expression.Object.Property.Identifier + (_, { Ast.Identifier.name = "propTypes"; comments = _ }); + static = true; + _; + } ) + when Env.ignore_static_propTypes -> + acc + | Body.Method (elem_loc, { Method.key; value; kind; static; decorators = _ }) -> + let x = object_key key in + let ( loc, + { + Ast.Function.generator; + tparams; + params; + return; + body; + id = _; + async; + predicate = _; + sig_loc = _; + } ) = + value + in + ( elem_loc, + T.CMethod (x, kind, static, (loc, function_ generator async tparams params return body)) + ) + :: acc + | Body.Property (elem_loc, { Property.key; annot; static; variance; value = _ }) -> + let x = object_key key in + (elem_loc, T.CProperty (x, static, variance, annotated_type annot)) :: acc + | Body.PrivateField + ( elem_loc, + { + PrivateField.key = (_, (_, { Ast.Identifier.name = x; comments = _ })); + annot; + static; + variance; + value = _; + } ) -> + (elem_loc, T.CPrivateField (x, static, variance, annotated_type annot)) :: acc) + in + fun tparams body super super_targs implements -> + Ast.Class.( + let (body_loc, { Body.body }) = body in + let tparams = type_params tparams in + let body = List.rev @@ List.fold_left class_element [] body in + let extends = + match super with + | None -> None + | Some expr -> + let ref_expr_opt = ref_expr expr in + begin + match ref_expr_opt with + | Some (loc, reference) -> + Some + ( loc, + { + Ast.Type.Generic.id = T.generic_id_of_reference reference; + targs = type_args super_targs; + } ) + | None -> T.FixMe.mk_extends (fst expr) + end + in + let implements = Core_list.map ~f:class_implement implements in + T.CLASS { tparams; extends; implements; body = (body_loc, body) }) + + and array_ = + let array_element expr_or_spread_opt = + Ast.Expression.( + match expr_or_spread_opt with + | None -> assert false + | Some (Expression expr) -> T.AInit (literal_expr expr) + | Some (Spread _spread) -> assert false) + in + function + | [] -> None + | t :: ts -> (try Some (Nel.map array_element (t, ts)) with _ -> None) + + and class_implement implement = implement + + and object_ = + let object_property = + Ast.Expression.Object.Property.( + function + | (loc, Init { key; value; shorthand = _ }) -> + let x = object_key key in + (loc, T.OInit (x, literal_expr value)) + | (loc, Method { key; value = (fn_loc, fn) }) -> + let x = object_key key in + let { + Ast.Function.generator; + tparams; + params; + return; + body; + id = _; + async; + predicate = _; + sig_loc = _; + } = + fn + in + (loc, T.OMethod (x, (fn_loc, function_ generator async tparams params return body))) + | (loc, Get { key; value = (fn_loc, fn) }) -> + let x = object_key key in + let { + Ast.Function.generator; + tparams; + params; + return; + body; + id = _; + async; + predicate = _; + sig_loc = _; + } = + fn + in + (loc, T.OGet (x, (fn_loc, function_ generator async tparams params return body))) + | (loc, Set { key; value = (fn_loc, fn) }) -> + let x = object_key key in + let { + Ast.Function.generator; + tparams; + params; + return; + body; + id = _; + async; + predicate = _; + sig_loc = _; + } = + fn + in + (loc, T.OSet (x, (fn_loc, function_ generator async tparams params return body)))) + in + let object_spread_property = + Ast.Expression.Object.SpreadProperty.( + (fun (loc, { argument }) -> (loc, T.OSpread (literal_expr argument)))) + in + function + | [] -> None + | property :: properties -> + Ast.Expression.Object.( + (try + Some + (Nel.map + (function + | Property p -> object_property p + | SpreadProperty p -> object_spread_property p) + (property, properties)) + with _ -> None)) +end + +module Generator (Env : Signature_builder_verify.EvalEnv) = struct + module Eval = Eval (Env) + + let rec eval (loc, kind) = + match kind with + | Kind.WithPropertiesDef { base; properties } -> + begin + match Kind.get_function_kind_info base with + | Some (generator, async, tparams, params, return, body) -> + T.FunctionWithStaticsDecl + { + base = (loc, T.Function (Eval.function_ generator async tparams params return body)); + statics = + Core_list.map properties ~f:(fun (id_prop, expr) -> + (id_prop, Eval.literal_expr expr)); + } + | None -> eval (loc, base) + end + | Kind.VariableDef { id = _; annot; init } -> T.VariableDecl (Eval.annotation loc ?init annot) + | Kind.FunctionDef { generator; async; tparams; params; return; body; predicate } -> + let annot = + T.EXPR (loc, T.Function (Eval.function_ generator async tparams params return body)) + in + let predicate = Eval.function_predicate body predicate in + T.FunctionDecl { annot; predicate } + | Kind.DeclareFunctionDef { annot = (_, t); predicate } -> + T.FunctionDecl { annot = T.TYPE (Eval.type_ t); predicate } + | Kind.ClassDef { tparams; body; super; super_targs; implements } -> + T.ClassDecl (Eval.class_ tparams body super super_targs implements) + | Kind.DeclareClassDef { tparams; body = (body_loc, body); extends; mixins; implements } -> + let tparams = Eval.type_params tparams in + let body = Eval.object_type body in + let extends = + match extends with + | None -> None + | Some r -> Some (Eval.generic r) + in + let mixins = Core_list.map ~f:Eval.generic mixins in + let implements = Core_list.map ~f:Eval.class_implement implements in + T.ClassDecl + (T.DECLARE_CLASS { tparams; extends; mixins; implements; body = (body_loc, body) }) + | Kind.TypeDef { tparams; right } -> + let tparams = Eval.type_params tparams in + let right = Eval.type_ right in + T.Type { tparams; right } + | Kind.OpaqueTypeDef { tparams; impltype; supertype } -> + let tparams = Eval.type_params tparams in + let impltype = + match impltype with + | None -> None + | Some t -> Some (Eval.type_ t) + in + let supertype = + match supertype with + | None -> None + | Some t -> Some (Eval.type_ t) + in + T.OpaqueType { tparams; impltype; supertype } + | Kind.InterfaceDef { tparams; extends; body = (body_loc, body) } -> + let tparams = Eval.type_params tparams in + let extends = Core_list.map ~f:Eval.generic extends in + let body = Eval.object_type body in + T.Interface { tparams; extends; body = (body_loc, body) } + | Kind.ImportNamedDef { kind; source; name } -> T.ImportNamed { kind; source; name } + | Kind.ImportStarDef { kind; source } -> T.ImportStar { kind; source } + | Kind.RequireDef { source; name } -> T.Require { source; name } + | Kind.SketchyToplevelDef -> T.FixMe.mk_decl loc + + let make_env outlined env = + SMap.fold + (fun n entries acc -> + Loc_collections.LocMap.fold + (fun loc kind acc -> + let id = (loc, n) in + let dt = eval kind in + let decl_loc = fst kind in + T.stmt_of_decl outlined decl_loc id dt :: acc) + entries + acc) + env + [] + + let cjs_exports = + let declare_module_exports mod_exp_loc loc t = + (mod_exp_loc, Ast.Statement.DeclareModuleExports (loc, t)) + in + let additional_properties_of_module_exports outlined add_module_exports_list = + Core_list.rev_map + ~f:(fun (id, expr) -> + let annot = T.type_of_expr_type outlined (Eval.literal_expr expr) in + Ast.Type.Object.( + Property + ( fst id, + { + Property.key = + Ast.Expression.Object.Property.Identifier (Flow_ast_utils.ident_of_source id); + value = Property.Init annot; + optional = false; + static = false; + proto = false; + _method = false; + variance = None; + } ))) + add_module_exports_list + in + let set_module_exports mod_exp_loc outlined expr add_module_exports_list = + let annot = T.type_of_expr_type outlined (Eval.literal_expr expr) in + if ListUtils.is_empty add_module_exports_list then + (mod_exp_loc, Ast.Statement.DeclareModuleExports (fst annot, annot)) + else + let properties = + additional_properties_of_module_exports outlined add_module_exports_list + in + let ot = { Ast.Type.Object.exact = false; inexact = true; properties } in + let assign = (mod_exp_loc, Ast.Type.Object ot) in + let t = + let name = "$TEMPORARY$module$exports$assign" in + let id = + Ast.Type.Generic.Identifier.Unqualified + (Flow_ast_utils.ident_of_source (mod_exp_loc, name)) + in + ( mod_exp_loc, + Ast.Type.Generic { Ast.Type.Generic.id; targs = Some (mod_exp_loc, [annot; assign]) } + ) + in + (mod_exp_loc, Ast.Statement.DeclareModuleExports (fst annot, t)) + in + let add_module_exports mod_exp_loc outlined add_module_exports_list = + let properties = additional_properties_of_module_exports outlined add_module_exports_list in + let ot = { Ast.Type.Object.exact = true; inexact = false; properties } in + let t = (mod_exp_loc, Ast.Type.Object ot) in + [(mod_exp_loc, Ast.Statement.DeclareModuleExports (mod_exp_loc, t))] + in + fun outlined -> function + | (None, _) -> [] + | (Some mod_exp_loc, list) -> + let (declare_module_exports_list, set_module_exports_list, add_module_exports_list) = + List.fold_left + (fun (declare_module_exports_list, set_module_exports_list, add_module_exports_list) -> + function + | File_sig.DeclareModuleExportsDef (loc, t) -> + ((loc, t) :: declare_module_exports_list, [], []) + | File_sig.SetModuleExportsDef expr -> + ( declare_module_exports_list, + (expr, add_module_exports_list) :: set_module_exports_list, + [] ) + | File_sig.AddModuleExportsDef (id, expr) -> + ( declare_module_exports_list, + set_module_exports_list, + (id, expr) :: add_module_exports_list )) + ([], [], []) + list + in + (match (declare_module_exports_list, set_module_exports_list, add_module_exports_list) with + | (_ :: _, _, _) -> + (* if there are any `declare module.exports: ...`, then the last such wins *) + let (loc, t) = List.hd (List.rev declare_module_exports_list) in + [declare_module_exports mod_exp_loc loc t] + | ([], _ :: _, _) -> + (* if there are any `module.exports = ...`, then the last such wins *) + let (expr, add_module_exports_list) = List.hd (List.rev set_module_exports_list) in + [set_module_exports mod_exp_loc outlined expr add_module_exports_list] + | ([], [], _) -> + (* otherwise, collect every `module.exports.X = ...` *) + add_module_exports mod_exp_loc outlined add_module_exports_list) + + let eval_export_default_declaration = + Ast.Statement.ExportDefaultDeclaration.( + function + | Declaration + ( loc, + Ast.Statement.FunctionDeclaration + ({ Ast.Function.id = Some _; _ } as function_declaration) ) -> + `Decl (Entry.function_declaration loc function_declaration) + | Declaration + ( loc, + Ast.Statement.FunctionDeclaration + { + Ast.Function.id = None; + generator; + tparams; + params; + return; + body; + async; + predicate = _; + sig_loc = _; + } ) -> + `Expr (loc, T.Function (Eval.function_ generator async tparams params return body)) + | Declaration (loc, Ast.Statement.ClassDeclaration ({ Ast.Class.id = Some _; _ } as class_)) + -> + `Decl (Entry.class_ loc class_) + | Declaration + ( loc, + Ast.Statement.ClassDeclaration + { Ast.Class.id = None; tparams; body; extends; implements; classDecorators = _ } ) -> + let (super, super_targs) = + match extends with + | None -> (None, None) + | Some (_, { Ast.Class.Extends.expr; targs }) -> (Some expr, targs) + in + `Expr + (loc, T.Outline (T.Class (None, Eval.class_ tparams body super super_targs implements))) + | Declaration _stmt -> assert false + | Expression (loc, Ast.Expression.Function ({ Ast.Function.id = Some _; _ } as function_)) -> + `Decl (Entry.function_declaration loc function_) + | Expression expr -> `Expr (Eval.literal_expr expr)) + + let export_name export_loc ?exported ?source local exportKind = + ( export_loc, + Ast.Statement.ExportNamedDeclaration + { + Ast.Statement.ExportNamedDeclaration.declaration = None; + specifiers = + Some + (Ast.Statement.ExportNamedDeclaration.ExportSpecifiers + [ + ( approx_loc export_loc, + { + Ast.Statement.ExportNamedDeclaration.ExportSpecifier.local = + Flow_ast_utils.ident_of_source local; + exported = Option.map ~f:Flow_ast_utils.ident_of_source exported; + } ); + ]); + source; + exportKind; + } ) + + let export_named_specifier export_loc local remote source exportKind = + let exported = + if snd remote = snd local then + None + else + Some remote + in + let source = + match source with + | None -> None + | Some source -> Some (T.source_of_source source) + in + export_name export_loc ?exported ?source local exportKind + + let export_star export_loc star_loc ?remote source exportKind = + ( export_loc, + Ast.Statement.ExportNamedDeclaration + { + Ast.Statement.ExportNamedDeclaration.declaration = None; + specifiers = + Some + (Ast.Statement.ExportNamedDeclaration.ExportBatchSpecifier + (star_loc, Option.map ~f:Flow_ast_utils.ident_of_source remote)); + source = Some (T.source_of_source source); + exportKind; + } ) + + let declare_export_default_declaration export_loc default_loc declaration = + ( export_loc, + Ast.Statement.DeclareExportDeclaration + { + default = Some default_loc; + Ast.Statement.DeclareExportDeclaration.declaration = Some declaration; + specifiers = None; + source = None; + } ) + + let export_value_named_declaration export_loc local = + export_name export_loc local Ast.Statement.ExportValue + + let export_value_default_named_declaration export_loc default local = + export_name export_loc local ~exported:default Ast.Statement.ExportValue + + let export_value_named_specifier export_loc local remote source = + export_named_specifier export_loc local remote source Ast.Statement.ExportValue + + let export_value_star export_loc star_loc source = + export_star export_loc star_loc source Ast.Statement.ExportValue + + let export_value_ns_star export_loc star_loc ns source = + export_star export_loc star_loc ~remote:ns source Ast.Statement.ExportValue + + let export_type_named_declaration export_loc local = + export_name export_loc local Ast.Statement.ExportType + + let export_type_named_specifier export_loc local remote source = + export_named_specifier export_loc local remote source Ast.Statement.ExportType + + let export_type_star export_loc star_loc source = + export_star export_loc star_loc source Ast.Statement.ExportType + + let eval_export_value_bindings outlined named named_infos star = + File_sig.( + let (named, ns) = + List.partition + (function + | (_, (_, ExportNamed { kind = NamedSpecifier _; _ })) + | (_, (_, ExportNs _)) -> + false + | (_, (_, _)) -> true) + named + in + let stmts = + List.fold_left + (fun acc -> function + | (export_loc, ExportStar { star_loc; source }) -> + export_value_star export_loc star_loc source :: acc) + [] + star + in + let seen = ref SSet.empty in + let stmts = + List.fold_left2 + (fun acc (n, (export_loc, export)) export_def -> + if SSet.mem n !seen then + acc + else ( + seen := SSet.add n !seen; + match (export, export_def) with + | (ExportDefault { default_loc; local }, DeclareExportDef decl) -> + begin + match local with + | Some id -> + export_value_default_named_declaration export_loc (default_loc, n) id :: acc + | None -> declare_export_default_declaration export_loc default_loc decl :: acc + end + | (ExportDefault { default_loc; _ }, ExportDefaultDef decl) -> + begin + match eval_export_default_declaration decl with + | `Decl (id, _kind) -> + export_value_default_named_declaration + export_loc + (default_loc, n) + (Flow_ast_utils.source_of_ident id) + :: acc + | `Expr expr_type -> + let declaration = + Ast.Statement.DeclareExportDeclaration.DefaultType + (T.type_of_expr_type outlined expr_type) + in + declare_export_default_declaration export_loc default_loc declaration :: acc + end + | (ExportNamed { loc; kind = NamedDeclaration }, DeclareExportDef _decl) -> + export_value_named_declaration export_loc (loc, n) :: acc + | (ExportNamed { loc; kind = NamedDeclaration }, ExportNamedDef _stmt) -> + export_value_named_declaration export_loc (loc, n) :: acc + | _ -> assert false + )) + stmts + named + named_infos + in + List.fold_left + (fun acc (n, (export_loc, export)) -> + match export with + | ExportNamed { loc; kind = NamedSpecifier { local = name; source } } -> + export_value_named_specifier export_loc name (loc, n) source :: acc + | ExportNs { loc; star_loc; source } -> + export_value_ns_star export_loc star_loc (loc, n) source :: acc + | _ -> assert false) + stmts + ns) + + let eval_export_type_bindings type_named type_named_infos type_star = + File_sig.( + let (type_named, type_ns) = + List.partition + (function + | (_, (_, TypeExportNamed { kind = NamedSpecifier _; _ })) -> false + | (_, (_, _)) -> true) + type_named + in + let stmts = + List.fold_left + (fun acc -> function + | (export_loc, ExportStar { star_loc; source }) -> + export_type_star export_loc star_loc source :: acc) + [] + type_star + in + let stmts = + List.fold_left2 + (fun acc (n, (export_loc, export)) export_def -> + (match (export, export_def) with + | (TypeExportNamed { loc; kind = NamedDeclaration }, DeclareExportDef _decl) -> + export_type_named_declaration export_loc (loc, n) + | (TypeExportNamed { loc; kind = NamedDeclaration }, ExportNamedDef _stmt) -> + export_type_named_declaration export_loc (loc, n) + | _ -> assert false) + :: acc) + stmts + type_named + type_named_infos + in + List.fold_left + (fun acc (n, (export_loc, export)) -> + (match export with + | TypeExportNamed { loc; kind = NamedSpecifier { local = name; source } } -> + export_type_named_specifier export_loc name (loc, n) source + | _ -> assert false) + :: acc) + stmts + type_ns) + + let exports outlined file_sig = + File_sig.( + let module_sig = file_sig.module_sig in + let { info = exports_info; module_kind; type_exports_named; type_exports_star; requires = _ } + = + module_sig + in + let { module_kind_info; type_exports_named_info } = exports_info in + let values = + match (module_kind, module_kind_info) with + | (CommonJS { mod_exp_loc }, CommonJSInfo cjs_exports_defs) -> + cjs_exports outlined (mod_exp_loc, cjs_exports_defs) + | (ES { named; star }, ESInfo named_infos) -> + eval_export_value_bindings outlined named named_infos star + | _ -> assert false + in + let types = + eval_export_type_bindings type_exports_named type_exports_named_info type_exports_star + in + (values, types)) + + let relativize loc program_loc = + Loc. + { + program_loc with + start = { line = program_loc._end.line + loc.start.line; column = loc.start.column }; + _end = { line = program_loc._end.line + loc._end.line; column = loc._end.column }; + } + + let make env file_sig program = + let (program_loc, _, _) = program in + let outlined = T.Outlined.create () in + let env = make_env outlined env in + let (values, types) = exports outlined file_sig in + let outlined_stmts = T.Outlined.get outlined in + ( program_loc, + List.sort Pervasives.compare (List.rev_append env @@ List.rev outlined_stmts) + @ List.sort Pervasives.compare (List.rev_append values @@ List.rev types), + [] ) + + (* no need to include the comments *) +end diff --git a/src/parser_utils/signature_builder_kind.ml b/src/parser_utils/signature_builder_kind.ml index cc4a56945ba..d7a776b0dd6 100644 --- a/src/parser_utils/signature_builder_kind.ml +++ b/src/parser_utils/signature_builder_kind.ml @@ -1,72 +1,101 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module Ast_utils = Flow_ast_utils module Ast = Flow_ast module Annot_path = struct type t = | Annot of (Loc.t, Loc.t) Ast.Type.annotation - | Object of t * string - | Array of t * int + | Object of Loc.t * (t * (Loc.t * string)) let mk_annot ?annot_path = function - | None -> annot_path - | Some annot -> Some (Annot (annot)) + | Ast.Type.Missing _ -> annot_path + | Ast.Type.Available annot -> Some (Annot annot) - let mk_object ?annot_path x = + let mk_object prop_loc ?annot_path (loc, x) = match annot_path with - | None -> None - | Some annot_path -> Some (Object (annot_path, x)) + | None -> None + | Some annot_path -> Some (Object (prop_loc, (annot_path, (loc, x)))) +end - let mk_array ?annot_path i = - match annot_path with - | None -> None - | Some annot_path -> Some (Array (annot_path, i)) +module Init_path = struct + type t = + | Init of (Loc.t, Loc.t) Ast.Expression.t + | Object of Loc.t * (t * (Loc.t * string)) + + let mk_init = function + | None -> None + | Some init -> Some (Init init) + + let mk_object prop_loc ?init_path (loc, x) = + match init_path with + | None -> None + | Some init_path -> Some (Object (prop_loc, (init_path, (loc, x)))) end module Sort = struct - type t = Type | Value + type t = + | Type + | Value + let to_string = function | Type -> "type" | Value -> "value" let is_import_type = - let open Ast.Statement.ImportDeclaration in - function - | ImportType | ImportTypeof -> true - | ImportValue -> true (* conditional *) + Ast.Statement.ImportDeclaration.( + function + | ImportType + | ImportTypeof -> + true + | ImportValue -> true) + + (* conditional *) let is_import_value = - let open Ast.Statement.ImportDeclaration in - function - | ImportType | ImportTypeof -> false - | ImportValue -> true + Ast.Statement.ImportDeclaration.( + function + | ImportType + | ImportTypeof -> + false + | ImportValue -> true) let of_import_kind = - let open Ast.Statement.ImportDeclaration in - function - | ImportValue | ImportTypeof -> Value - | ImportType -> Type + Ast.Statement.ImportDeclaration.( + function + | ImportValue + | ImportTypeof -> + Value + | ImportType -> Type) end type t = + | WithPropertiesDef of { + properties: ((Loc.t, Loc.t) Ast.Identifier.t * (Loc.t, Loc.t) Ast.Expression.t) list; + base: t; + } | VariableDef of { + id: (Loc.t, Loc.t) Ast.Identifier.t; annot: Annot_path.t option; - init: (Loc.t, Loc.t) Ast.Expression.t option; + init: Init_path.t option; } | FunctionDef of { generator: bool; + async: bool; tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; params: (Loc.t, Loc.t) Ast.Function.Params.t; - return: (Loc.t, Loc.t) Ast.Function.return; + return: (Loc.t, Loc.t) Ast.Type.annotation_or_hint; body: (Loc.t, Loc.t) Ast.Function.body; + predicate: (Loc.t, Loc.t) Ast.Type.Predicate.t option; } | DeclareFunctionDef of { annot: (Loc.t, Loc.t) Ast.Type.annotation; + predicate: (Loc.t, Loc.t) Ast.Type.Predicate.t option; } | ClassDef of { tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; @@ -77,7 +106,7 @@ type t = } | DeclareClassDef of { tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; - body: (Loc.t, Loc.t) Ast.Type.Object.t; + body: Loc.t * (Loc.t, Loc.t) Ast.Type.Object.t; extends: (Loc.t * (Loc.t, Loc.t) Ast.Type.Generic.t) option; mixins: (Loc.t * (Loc.t, Loc.t) Ast.Type.Generic.t) list; implements: (Loc.t, Loc.t) Ast.Class.Implements.t list; @@ -93,29 +122,31 @@ type t = } | InterfaceDef of { tparams: (Loc.t, Loc.t) Ast.Type.ParameterDeclaration.t option; - body: (Loc.t, Loc.t) Ast.Type.Object.t; + body: Loc.t * (Loc.t, Loc.t) Ast.Type.Object.t; extends: (Loc.t * (Loc.t, Loc.t) Ast.Type.Generic.t) list; } | ImportNamedDef of { kind: Ast.Statement.ImportDeclaration.importKind; - source: Ast_utils.source; - name: Ast_utils.ident; + source: Loc.t Ast_utils.source; + name: Loc.t Ast_utils.ident; } | ImportStarDef of { kind: Ast.Statement.ImportDeclaration.importKind; - source: Ast_utils.source; + source: Loc.t Ast_utils.source; } | RequireDef of { - source: Ast_utils.source; + source: Loc.t Ast_utils.source; + name: Loc.t Ast_utils.ident Nel.t option; } | SketchyToplevelDef -let to_string = function +let rec to_string = function + | WithPropertiesDef { base; _ } -> Printf.sprintf "WithPropertiesDef(%s)" (to_string base) | VariableDef _ -> "VariableDef" | FunctionDef _ -> "FunctionDef" - | DeclareFunctionDef _ -> "DeclareFunctionDef" - | ClassDef _ -> "ClassDef" - | DeclareClassDef _ -> "DeclareClassDef" + | DeclareFunctionDef _ -> "DeclareFunctionDef" + | ClassDef _ -> "ClassDef" + | DeclareClassDef _ -> "DeclareClassDef" | TypeDef _ -> "TypeDef" | OpaqueTypeDef _ -> "OpaqueTypeDef" | InterfaceDef _ -> "InterfaceDef" @@ -124,25 +155,29 @@ let to_string = function | RequireDef _ -> "RequireDef" | SketchyToplevelDef -> "SketchyToplevelDef" -let is_type = function +let rec is_type = function + | WithPropertiesDef { base; _ } -> is_type base | VariableDef _ -> true (* conditional *) | FunctionDef _ -> false - | DeclareFunctionDef _ -> true - | ClassDef _ -> true - | DeclareClassDef _ -> true + | DeclareFunctionDef _ -> true + | ClassDef _ -> true + | DeclareClassDef _ -> true | TypeDef _ -> true | OpaqueTypeDef _ -> true | InterfaceDef _ -> true | ImportNamedDef { kind; _ } -> Sort.is_import_type kind | ImportStarDef { kind; _ } -> Sort.is_import_type kind | RequireDef _ -> true (* conditional *) - | SketchyToplevelDef -> true (* don't care *) + | SketchyToplevelDef -> true -let is_value = function +(* don't care *) + +let rec is_value = function + | WithPropertiesDef { base; _ } -> is_value base | VariableDef _ -> true | FunctionDef _ -> true | DeclareFunctionDef _ -> true - | ClassDef _ -> true + | ClassDef _ -> true | DeclareClassDef _ -> true | TypeDef _ -> false | OpaqueTypeDef _ -> false @@ -150,8 +185,24 @@ let is_value = function | ImportNamedDef { kind; _ } -> Sort.is_import_value kind | ImportStarDef { kind; _ } -> Sort.is_import_value kind | RequireDef _ -> true - | SketchyToplevelDef -> true (* don't care *) + | SketchyToplevelDef -> true + +(* don't care *) let validator = function | Sort.Type -> is_type | Sort.Value -> is_value + +let get_function_kind_info = function + | FunctionDef { generator; async; tparams; params; return; body; predicate = _ } -> + Some (generator, async, tparams, params, return, body) + | VariableDef + { + id = _; + annot = None; + init = Some (Init_path.Init (_, Ast.Expression.(Function stuff | ArrowFunction stuff))); + } -> + Ast.Function.( + let { id = _; generator; async; tparams; params; return; body; _ } = stuff in + Some (generator, async, tparams, params, return, body)) + | _ -> None diff --git a/src/parser_utils/signature_builder_verify.ml b/src/parser_utils/signature_builder_verify.ml index e5a9760e605..dd837d94304 100644 --- a/src/parser_utils/signature_builder_verify.ml +++ b/src/parser_utils/signature_builder_verify.ml @@ -1,25 +1,30 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module Ast_utils = Flow_ast_utils module Ast = Flow_ast - -module LocMap = Utils_js.LocMap - +module LocMap = Loc_collections.LocMap module Kind = Signature_builder_kind module Entry = Signature_builder_entry - -module Deps = Signature_builder_deps +module Deps = Signature_builder_deps.With_Loc +module File_sig = File_sig.With_Loc module Error = Deps.Error module Dep = Deps.Dep - -module ObjProp = Ast.Expression.Object.Property +module EASort = Signature_builder_deps.ExpectedAnnotationSort module type EvalEnv = sig - val prevent_munge: bool + val prevent_munge : bool + + val facebook_fbt : string option + + (* hacks *) + val ignore_static_propTypes : bool + + val facebook_keyMirror : bool end (* A signature of a module is described by exported expressions / definitions, but what we're really @@ -66,333 +71,475 @@ end `import *` bring exported types in scope? These considerations will affect the computation step and ideally would be verified as well, but we're punting on them right now. *) -module Eval(Env: EvalEnv) = struct +module Eval (Env : EvalEnv) = struct + class predicate_visitor = + object (this) + inherit [Deps.t, Loc.t] Flow_ast_visitor.visitor ~init:Deps.bot as super + + val mutable params_ = SSet.empty + + method toplevel_expression params expr = + this#set_acc Deps.bot; + params_ <- params; + this#expression expr + + method! expression expr = + match snd expr with + | Ast.Expression.Array _ + | Ast.Expression.ArrowFunction _ + | Ast.Expression.Assignment _ + | Ast.Expression.Class _ + | Ast.Expression.Comprehension _ + | Ast.Expression.Function _ + | Ast.Expression.Generator _ + | Ast.Expression.Import _ + | Ast.Expression.JSXElement _ + | Ast.Expression.JSXFragment _ + | Ast.Expression.MetaProperty _ + | Ast.Expression.New _ + | Ast.Expression.Object _ + | Ast.Expression.OptionalCall _ + | Ast.Expression.TaggedTemplate _ + | Ast.Expression.TemplateLiteral _ + | Ast.Expression.TypeCast _ + | Ast.Expression.Update _ + | Ast.Expression.Yield _ -> + this#update_acc (fun deps -> + Deps.join (deps, Deps.top (Error.UnsupportedPredicateExpression (fst expr)))); + expr + | Ast.Expression.Binary _ + | Ast.Expression.Call _ + | Ast.Expression.Conditional _ + | Ast.Expression.Logical _ + | Ast.Expression.Member _ + | Ast.Expression.OptionalMember _ + | Ast.Expression.Sequence _ + | Ast.Expression.Unary _ -> + super#expression expr + | Ast.Expression.Identifier (_, { Ast.Identifier.name; _ }) -> + if not (SSet.mem name params_) then + this#update_acc (fun deps -> Deps.join (deps, Deps.value name)); + expr + | Ast.Expression.Literal _ + | Ast.Expression.Super + | Ast.Expression.This -> + expr + end + + let predicate_expression = + let visitor = new predicate_visitor in + (fun params expr -> visitor#eval (visitor#toplevel_expression params) expr) let rec type_ tps t = - let open Ast.Type in - match t with - | _, Any - | _, Mixed - | _, Empty - | _, Void - | _, Null - | _, Number - | _, String - | _, Boolean - | _, StringLiteral _ - | _, NumberLiteral _ - | _, BooleanLiteral _ -> Deps.bot - | _, Nullable t -> type_ tps t - | _, Function ft -> function_type tps ft - | _, Object ot -> object_type tps ot - | loc, Generic tr -> type_ref tps (loc, tr) - | _, Typeof v -> - begin match v with - | loc, Ast.Type.Generic vr -> value_ref tps (loc, vr) + Ast.Type.( + match t with + | (_, Any) + | (_, Mixed) + | (_, Empty) + | (_, Void) + | (_, Null) + | (_, Number) + | (_, BigInt) + | (_, String) + | (_, Boolean) + | (_, StringLiteral _) + | (_, NumberLiteral _) + | (_, BigIntLiteral _) + | (_, BooleanLiteral _) -> + Deps.bot + | (_, Nullable t) -> type_ tps t + | (_, Function ft) -> function_type tps ft + | (_, Object ot) -> object_type tps ot + | (loc, Generic tr) -> type_ref tps (loc, tr) + | (_, Typeof v) -> + begin + match v with + | (loc, Ast.Type.Generic vr) -> value_ref tps (loc, vr) | _ -> Deps.unreachable end - | _, Interface it -> interface_type tps it - | _, Array at -> array_type tps at - | _, Union (t1, t2, ts) -> + | (_, Interface it) -> interface_type tps it + | (_, Array at) -> array_type tps at + | (_, Union (t1, t2, ts)) -> let deps = type_ tps t1 in let deps = Deps.join (deps, type_ tps t2) in List.fold_left (Deps.reduce_join (type_ tps)) deps ts - | _, Intersection (t1, t2, ts) -> + | (_, Intersection (t1, t2, ts)) -> let deps = type_ tps t1 in let deps = Deps.join (deps, type_ tps t2) in List.fold_left (Deps.reduce_join (type_ tps)) deps ts - | _, Tuple ts -> - List.fold_left (Deps.reduce_join (type_ tps)) Deps.bot ts - | _, Exists -> Deps.unreachable + | (_, Tuple ts) -> List.fold_left (Deps.reduce_join (type_ tps)) Deps.bot ts + | (_, Exists) -> Deps.unreachable) and function_type = let function_type_param tps param = - let open Ast.Type.Function.Param in - let _, { annot; _ } = param in - type_ tps annot - - in fun tps ft -> - let open Ast.Type.Function in - let { params; return; _ } = ft in - let _, { Params.params; rest; } = params in - let deps = List.fold_left (Deps.reduce_join (function_type_param tps)) Deps.bot params in - let deps = match rest with - | None -> deps - | Some (_, { RestParam.argument }) -> Deps.join (deps, function_type_param tps argument) - in - Deps.join (deps, type_ tps return) + Ast.Type.Function.Param.( + let (_, { annot; _ }) = param in + type_ tps annot) + in + fun tps ft -> + Ast.Type.Function.( + let { tparams; params; return } = ft in + let (tps, deps) = type_params tps tparams in + let (_, { Params.params; rest }) = params in + let deps = List.fold_left (Deps.reduce_join (function_type_param tps)) deps params in + let deps = + match rest with + | None -> deps + | Some (_, { RestParam.argument }) -> Deps.join (deps, function_type_param tps argument) + in + Deps.join (deps, type_ tps return)) and object_type = let object_type_prop tps prop = - let open Ast.Type.Object.Property in - let _, { value; _ } = prop in - match value with + Ast.Type.Object.Property.( + let (_, { value; _ }) = prop in + match value with | Init t -> type_ tps t | Get (_, ft) - | Set (_, ft) - -> function_type tps ft + | Set (_, ft) -> + function_type tps ft) in let object_type_spread_prop tps prop = - let open Ast.Type.Object.SpreadProperty in - let _, { argument } = prop in - type_ tps argument + Ast.Type.Object.SpreadProperty.( + let (_, { argument }) = prop in + type_ tps argument) in let object_type_indexer tps prop = - let open Ast.Type.Object.Indexer in - let _, { key; value; _ } = prop in - Deps.join (type_ tps key, type_ tps value) + Ast.Type.Object.Indexer.( + let (_, { key; value; _ }) = prop in + Deps.join (type_ tps key, type_ tps value)) in let object_type_call_prop tps prop = - let open Ast.Type.Object.CallProperty in - let _, { value = (_, ft); _ } = prop in - function_type tps ft + Ast.Type.Object.CallProperty.( + let (_, { value = (_, ft); _ }) = prop in + function_type tps ft) in - let object_type_internal_slot tps prop = - let open Ast.Type.Object.InternalSlot in - let _, { value; _ } = prop in - type_ tps value - - in fun tps ot -> - let open Ast.Type.Object in - let { properties; _ } = ot in - List.fold_left (fun deps -> function - | Property prop -> Deps.join (deps, object_type_prop tps prop) - | SpreadProperty prop -> Deps.join (deps, object_type_spread_prop tps prop) - | Indexer prop -> Deps.join (deps, object_type_indexer tps prop) - | CallProperty prop -> Deps.join (deps, object_type_call_prop tps prop) - | InternalSlot prop -> Deps.join (deps, object_type_internal_slot tps prop) - ) Deps.bot properties + fun tps ot -> + Ast.Type.Object.( + let { properties; _ } = ot in + List.fold_left + (fun deps -> function + | Property prop -> Deps.join (deps, object_type_prop tps prop) + | Indexer prop -> Deps.join (deps, object_type_indexer tps prop) + | CallProperty prop -> Deps.join (deps, object_type_call_prop tps prop) + | SpreadProperty prop -> Deps.join (deps, object_type_spread_prop tps prop) + | InternalSlot _prop -> Deps.unreachable) + Deps.bot + properties) and interface_type tps it = - let open Ast.Type.Interface in + Ast.Type.Interface.( let { body = (_, ot); _ } = it in - object_type tps ot + object_type tps ot) - and array_type tps at = - type_ tps at + and array_type tps at = type_ tps at and type_ref = - let open Ast.Type.Generic in - let rec qualified_type_ref tps qualification = - let open Identifier in - match qualification with - | Unqualified (_, name) -> - if SSet.mem name tps then Deps.bot else Deps.type_ name - | Qualified (_, { qualification; _ }) -> qualified_type_ref tps qualification - in - fun tps (_, r) -> - let { id; targs } = r in - let deps = qualified_type_ref tps id in - Deps.join (deps, type_args tps targs) + Ast.Type.Generic.( + let rec qualified_type_ref tps qualification = + Identifier.( + match qualification with + | Unqualified (_, { Ast.Identifier.name; comments = _ }) -> + if SSet.mem name tps then + Deps.bot + else + Deps.type_ name + | Qualified (_, { qualification; _ }) -> qualified_type_ref tps qualification) + in + fun tps (_, r) -> + let { id; targs } = r in + let deps = qualified_type_ref tps id in + Deps.join (deps, type_args tps targs)) and value_ref = - let open Ast.Type.Generic in - let rec qualified_value_ref tps qualification = - let open Identifier in - match qualification with - | Unqualified (loc, name) -> - if SSet.mem name tps then Deps.top (Error.InvalidTypeParamUse loc) else Deps.value name - | Qualified (_, { qualification; _ }) -> qualified_value_ref tps qualification - in - fun tps (_, r) -> - let { id; targs } = r in - let deps = qualified_value_ref tps id in - Deps.join (deps, type_args tps targs) + Ast.Type.Generic.( + let rec qualified_value_ref tps qualification = + Identifier.( + match qualification with + | Unqualified (loc, { Ast.Identifier.name; comments = _ }) -> + if SSet.mem name tps then + Deps.top (Error.InvalidTypeParamUse loc) + else + Deps.value name + | Qualified (_, { qualification; _ }) -> qualified_value_ref tps qualification) + in + fun tps (_, r) -> + let { id; targs } = r in + let deps = qualified_value_ref tps id in + Deps.join (deps, type_args tps targs)) and type_args tps = function | None -> Deps.bot | Some (_, ts) -> List.fold_left (Deps.reduce_join (type_ tps)) Deps.bot ts - let opaque_type tps impltype supertype = - match impltype, supertype with - | None, None -> Deps.bot - | None, Some t | Some t, None -> type_ tps t - | Some t1, Some t2 -> Deps.join (type_ tps t1, type_ tps t2) - - let type_params = + and type_params = let type_param tps tparam = - let open Ast.Type.ParameterDeclaration.TypeParam in - let _, { name = (_, x); bound; default; _ } = tparam in - let deps = match bound with - | None -> Deps.bot - | Some (_, t) -> type_ tps t in - let deps = match default with - | None -> deps - | Some t -> Deps.join (deps, type_ tps t) in - x, deps - in fun tps -> - let init = tps, Deps.bot in + Ast.Type.ParameterDeclaration.TypeParam.( + let (_, { name = (_, { Ast.Identifier.name = x; comments = _ }); bound; default; _ }) = + tparam + in + let deps = + match bound with + | Ast.Type.Missing _ -> Deps.bot + | Ast.Type.Available (_, t) -> type_ tps t + in + let deps = + match default with + | None -> deps + | Some t -> Deps.join (deps, type_ tps t) + in + (x, deps)) + in + fun tps -> + let init = (tps, Deps.bot) in function | None -> init | Some (_, tparams) -> - List.fold_left (fun (tps, deps) tparam -> - let tp, deps' = type_param tps tparam in - SSet.add tp tps, Deps.join (deps, deps') - ) init tparams + List.fold_left + (fun (tps, deps) tparam -> + let (tp, deps') = type_param tps tparam in + (SSet.add tp tps, Deps.join (deps, deps'))) + init + tparams + + let type_opt tps = function + | None -> Deps.bot + | Some t -> type_ tps t let rec annot_path tps = function | Kind.Annot_path.Annot (_, t) -> type_ tps t - | Kind.Annot_path.Object (path, _) -> annot_path tps path - | Kind.Annot_path.Array (path, _) -> annot_path tps path + | Kind.Annot_path.Object (_, (path, _)) -> annot_path tps path + + let rec init_path tps = function + | Kind.Init_path.Init expr -> literal_expr tps expr + | Kind.Init_path.Object (_, (path, _)) -> init_path tps path - let rec annotation ?init tps (loc, annot) = + and annotation ~sort ?init tps (loc, annot) = match annot with - | Some path -> annot_path tps path - | None -> - begin match init with - | Some expr -> literal_expr tps expr - | None -> Deps.top (Error.ExpectedAnnotation loc) - end + | Some path -> annot_path tps path + | None -> + begin + match init with + | Some path -> init_path tps path + | None -> Deps.top (Error.ExpectedAnnotation (loc, sort)) + end + + and annotated_type ~sort tps loc = function + | Ast.Type.Missing _ -> Deps.top (Error.ExpectedAnnotation (loc, sort)) + | Ast.Type.Available (_, t) -> type_ tps t and pattern tps patt = - let open Ast.Pattern in - match patt with - | loc, Identifier { Identifier.annot; _ } -> annotation tps (loc, Kind.Annot_path.mk_annot annot) - | loc, Object { Object.annot; _ } -> annotation tps (loc, Kind.Annot_path.mk_annot annot) - | loc, Array { Array.annot; _ } -> annotation tps (loc, Kind.Annot_path.mk_annot annot) - | _, Assignment { Assignment.left; _ } -> pattern tps left - | loc, Expression _ -> Deps.todo loc "Expression" + Ast.Pattern.( + match patt with + | (loc, Identifier { Identifier.annot; _ }) + | (loc, Object { Object.annot; _ }) + | (loc, Array { Array.annot; _ }) -> + annotated_type ~sort:EASort.ArrayPattern tps loc annot + | (loc, Expression _) -> Deps.todo loc "Expression") and literal_expr tps = - let open Ast.Expression in - function - | _, Literal _ -> Deps.bot - | _, TemplateLiteral _ -> Deps.bot - | _, Identifier (_, name) -> Deps.value name - | _, Class stuff -> - let open Ast.Class in - let { id; body; tparams; extends; implements; _ } = stuff in - begin match id with - | None -> - begin - let super, super_targs = match extends with - | None -> None, None - | Some (_, { Extends.expr; targs; }) -> Some expr, targs in - class_ tparams body super super_targs implements - end - | Some (_, name) -> Deps.value name - end - | loc, Function stuff - | loc, ArrowFunction stuff - -> - let open Ast.Function in - let { id; generator; tparams; params; return; body; _ } = stuff in - begin match id with - | None -> function_ tps generator tparams params (loc, return) body - | Some (_, name) -> Deps.value name - end - | _, Object stuff -> - let open Ast.Expression.Object in - let { properties } = stuff in - object_ tps properties - | _, Array stuff -> - let open Ast.Expression.Array in - let { elements } = stuff in - array_ tps elements - | _, TypeCast stuff -> - let open Ast.Expression.TypeCast in - let { annot; _ } = stuff in - let _, t = annot in - type_ tps t - | loc, Member stuff -> - let open Ast.Expression.Member in - let { _object; property; _ } = stuff in - let deps = literal_expr tps _object in - begin match property with - | PropertyIdentifier _ - | PropertyPrivateName _ -> deps - | PropertyExpression _ -> Deps.top (Error.UnexpectedObjectKey loc) - end - | loc, Import _ -> Deps.dynamic_import loc - | loc, Call stuff - when begin - let { Ast.Expression.Call.callee; _ } = stuff in - match callee with - | _, Identifier (_, "require") -> true - | _ -> false - end -> Deps.dynamic_require loc - | loc, Unary stuff -> - let open Ast.Expression.Unary in - let { operator; argument; _ } = stuff in - arith_unary tps operator loc argument - | loc, Binary stuff -> - let open Ast.Expression.Binary in - let { operator; left; right } = stuff in - arith_binary tps operator loc left right - | loc, Sequence stuff -> - let open Ast.Expression.Sequence in - let { expressions } = stuff in - begin match List.rev expressions with - | expr::_ -> literal_expr tps expr - | [] -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Sequence)) - end - | loc, Assignment stuff -> - let open Ast.Expression.Assignment in - let { operator; left = _; right } = stuff in - begin match operator with - | Assign -> literal_expr tps right - | _ -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Assignment)) + Ast.Expression.( + function + | (loc, Literal { Ast.Literal.value; raw = _; comments = _ }) -> + begin + match value with + | Ast.Literal.String _ + | Ast.Literal.Number _ + | Ast.Literal.BigInt _ + | Ast.Literal.Boolean _ + | Ast.Literal.Null -> + Deps.bot + | _ -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Literal)) end - | _, Update stuff -> - let open Ast.Expression.Update in - (* This operation has a simple result type. *) - let { argument = _; _ } = stuff in - Deps.bot - - | loc, Call _ -> - Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Call)) - | loc, Comprehension _ -> + | (_, TemplateLiteral _) -> Deps.bot + | (_, Identifier stuff) -> identifier stuff + | (_, Class stuff) -> + Ast.Class.( + let { id; body; tparams; extends; implements; _ } = stuff in + let (super, super_targs) = + match extends with + | None -> (None, None) + | Some (_, { Extends.expr; targs }) -> (Some expr, targs) + in + let deps = class_ tparams body super super_targs implements in + begin + match id with + | None -> deps + | Some x -> + Deps.replace_local_with_dynamic_class (Flow_ast_utils.source_of_ident x) deps + end) + | (_, Function stuff) + | (_, ArrowFunction stuff) -> + Ast.Function.( + let { id = _; generator; tparams; params; return; body; predicate; _ } = stuff in + function_ tps generator tparams params return body predicate) + | (loc, Object stuff) -> + Ast.Expression.Object.( + let { properties; comments = _ } = stuff in + if properties = [] then + Deps.top (Error.EmptyObject loc) + else + object_ tps loc properties) + | (loc, Array stuff) -> + Ast.Expression.Array.( + let { elements; comments = _ } = stuff in + begin + match elements with + | [] -> Deps.top (Error.EmptyArray loc) + | e :: es -> array_ tps loc (e, es) + end) + | (_, TypeCast stuff) -> + Ast.Expression.TypeCast.( + let { annot; _ } = stuff in + let (_, t) = annot in + type_ tps t) + | (loc, Member stuff) -> member loc stuff + | (loc, Import _) -> Deps.dynamic_import loc + | ( loc, + Call + { + Ast.Expression.Call.callee = + (_, Identifier (_, { Ast.Identifier.name = "require"; comments = _ })); + _; + } ) -> + Deps.dynamic_require loc + | ( _, + Call + { + Ast.Expression.Call.callee = + ( _, + Member + { + Ast.Expression.Member._object = + (_, Identifier (_, { Ast.Identifier.name = "Object"; comments = _ })); + property = + Ast.Expression.Member.PropertyIdentifier + (_, { Ast.Identifier.name = "freeze"; comments = _ }); + } ); + targs = None; + arguments = [Expression ((_, Object _) as expr)]; + } ) -> + literal_expr tps expr + | ( _, + Call + { + Ast.Expression.Call.callee = + (_, Identifier (_, { Ast.Identifier.name = "keyMirror"; comments = _ })); + targs = None; + arguments = [Expression ((_, Object _) as expr)]; + } ) + when Env.facebook_keyMirror -> + literal_expr tps expr + | (loc, Unary stuff) -> + Ast.Expression.Unary.( + let { operator; argument; _ } = stuff in + arith_unary tps operator loc argument) + | (loc, Binary stuff) -> + Ast.Expression.Binary.( + let { operator; left; right } = stuff in + arith_binary tps operator loc left right) + | (loc, Sequence stuff) -> + Ast.Expression.Sequence.( + let { expressions } = stuff in + begin + match List.rev expressions with + | expr :: _ -> literal_expr tps expr + | [] -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Sequence)) + end) + | (loc, Assignment stuff) -> + Ast.Expression.Assignment.( + let { operator; left = _; right } = stuff in + begin + match operator with + | None -> literal_expr tps right + | Some _ -> + Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Assignment)) + end) + | (_, Update stuff) -> + Ast.Expression.Update.( + (* This operation has a simple result type. *) + let { argument = _; _ } = stuff in + Deps.bot) + | (loc, JSXElement e) -> + Ast.JSX.( + let { openingElement; closingElement = _; children = _ } = e in + let (_loc, { Opening.name; selfClosing = _; attributes = _ }) = openingElement in + begin + match (name, Env.facebook_fbt) with + | (Ast.JSX.Identifier (_loc_id, { Identifier.name = "fbt" }), Some _) -> Deps.bot + | _ -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.JSXElement)) + end) + | (loc, Call _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Call)) + | (loc, Comprehension _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Comprehension)) - | loc, Conditional _ -> + | (loc, Conditional _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Conditional)) - | loc, Generator _ -> + | (loc, Generator _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Generator)) - | loc, JSXElement _ -> - Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.JSXElement)) - | loc, JSXFragment _ -> + | (loc, JSXFragment _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.JSXFragment)) - | loc, Logical _ -> + | (loc, Logical _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Logical)) - | loc, MetaProperty _ -> + | (loc, MetaProperty _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.MetaProperty)) - | loc, New _ -> - Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.New)) - | loc, OptionalCall _ -> + | (loc, New _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.New)) + | (loc, OptionalCall _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.OptionalCall)) - | loc, OptionalMember _ -> + | (loc, OptionalMember _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.OptionalMember)) - | loc, Super -> - Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Super)) - | loc, TaggedTemplate _ -> + | (loc, Super) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Super)) + | (loc, TaggedTemplate _) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.TaggedTemplate)) - | loc, This -> - Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.This)) - | loc, Yield _ -> - Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Yield)) + | (loc, This) -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.This)) + | (loc, Yield _) -> + Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Yield))) + + and identifier stuff = + let (_, { Ast.Identifier.name; comments = _ }) = stuff in + Deps.value name + + and member loc stuff = + Ast.Expression.Member.( + let { _object; property; _ } = stuff in + let deps = + match _object with + | (_, Ast.Expression.Identifier stuff) -> identifier stuff + | (_, Ast.Expression.Member stuff) -> member loc stuff + | _ -> Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Member)) + in + match property with + | PropertyIdentifier _ + | PropertyPrivateName _ -> + deps + | PropertyExpression (key_loc, _) -> Deps.top (Error.UnexpectedObjectKey (loc, key_loc))) and arith_unary tps operator loc argument = - let open Ast.Expression.Unary in - match operator with - | Minus + Ast.Expression.Unary.( + match operator with | Plus - | Not | BitNot | Typeof | Void - | Delete - -> + | Delete -> (* These operations have simple result types. *) - ignore tps; ignore argument; Deps.bot + ignore tps; + ignore argument; + Deps.bot + | Minus + | Not -> + (* TODO: These operations are evaluated by Flow; they may or may not have simple result + types. Ideally we'd be verifying the argument. Unfortunately, we don't (see below). The + generator does some basic constant evaluation to compensate, but it's not enough. *) + ignore tps; + ignore argument; + Deps.bot | Await -> (* The result type of this operation depends in a complicated way on the argument type. *) - Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Unary)) + Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Unary))) - and arith_binary tps operator _loc left right = - let open Ast.Expression.Binary in - match operator with - | Plus -> - let deps = literal_expr tps left in - Deps.join (deps, literal_expr tps right) + and arith_binary tps operator loc left right = + Ast.Expression.Binary.( + match operator with | Equal | NotEqual | StrictEqual @@ -413,157 +560,278 @@ module Eval(Env: EvalEnv) = struct | Xor | BitAnd | In - | Instanceof - -> + | Instanceof -> (* These operations have simple result types. *) - ignore left; ignore right; Deps.bot + ignore tps; + ignore left; + ignore right; + Deps.bot + | Plus -> + (* The result type of this operation depends in a complicated way on the left/right types. *) + Deps.top (Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Binary))) - and function_ = - let open Ast.Function in - let function_params tps params = - let _, { Params.params; rest; } = params in - let deps = List.fold_left (Deps.reduce_join (pattern tps)) Deps.bot params in + and function_param tps (_, { Ast.Function.Param.argument; default = _ }) = pattern tps argument + + and function_rest_param tps (_, { Ast.Function.RestParam.argument }) = pattern tps argument + + and function_params tps params = + Ast.Function.( + let (_, { Params.params; rest }) = params in + let deps = List.fold_left (Deps.reduce_join (function_param tps)) Deps.bot params in match rest with - | None -> deps - | Some (_, { RestElement.argument }) -> Deps.join (deps, pattern tps argument) + | None -> deps + | Some param -> Deps.join (deps, function_rest_param tps param)) - in fun tps generator tparams params (loc, return) body -> - let tps, deps = type_params tps tparams in - let deps = Deps.join (deps, function_params tps params) in - match return with - | Missing _loc -> - if not generator && Signature_utils.Procedure_decider.is body then deps - else Deps.top (Error.ExpectedAnnotation loc) - | Available annot -> Deps.join (deps, annotation tps (loc, Kind.Annot_path.mk_annot (Some annot))) + and function_return tps ~is_missing_ok return = + match return with + | Ast.Type.Missing loc -> + if is_missing_ok () then + Deps.bot + else + Deps.top (Error.ExpectedAnnotation (loc, EASort.FunctionReturn)) + | Ast.Type.Available (_, t) -> type_ tps t + + and function_static tps (_id_prop, right) = literal_expr tps right + + and function_predicate params body predicate = + match (predicate, body) with + | (Some (_, Ast.Type.Predicate.Declared e), _) + | ( Some (_, Ast.Type.Predicate.Inferred), + ( Ast.Function.BodyBlock + ( _, + { + Ast.Statement.Block.body = + [(_, Ast.Statement.Return { Ast.Statement.Return.argument = Some e; _ })]; + } ) + | Ast.Function.BodyExpression e ) ) -> + let (_, { Ast.Function.Params.params; _ }) = params in + let params = + List.fold_left + (fun acc param -> + let (_, { Ast.Function.Param.argument; _ }) = param in + match argument with + | ( _, + Ast.Pattern.Identifier + { Ast.Pattern.Identifier.name = (_, { Ast.Identifier.name; _ }); _ } ) -> + name :: acc + | _ -> acc) + [] + params + in + let params = SSet.of_list params in + predicate_expression params e + | _ -> + (* We check for the form of the body of predicate functions in file_sig.ml *) + Deps.bot + + and declare_function_predicate t predicate = + match (t, predicate) with + | ( ( _, + Ast.Type.Function + { Ast.Type.Function.params = (_, { Ast.Type.Function.Params.params; _ }); _ } ), + Some (_, Ast.Type.Predicate.Declared e) ) -> + let params = + List.fold_left + (fun acc param -> + match param with + | (_, { Ast.Type.Function.Param.name = Some (_, { Ast.Identifier.name; _ }); _ }) -> + name :: acc + | _ -> acc) + [] + params + in + let params = SSet.of_list params in + predicate_expression params e + | _ -> + (* TODO better error messages when the predicate is ignored *) + Deps.bot + + and function_ tps generator tparams params return body predicate = + let (tps, deps) = type_params tps tparams in + let deps = Deps.join (deps, function_params tps params) in + let deps = + let is_missing_ok () = (not generator) && Signature_utils.Procedure_decider.is body in + Deps.join (deps, function_return tps ~is_missing_ok return) + in + let deps = Deps.join (deps, function_predicate params body predicate) in + deps and class_ = let class_element tps element = - let open Ast.Class in - match Env.prevent_munge, element with - | false, Body.Method (_, { Method.key = (ObjProp.Identifier (_, name)); _ }) - | false, Body.Property (_, { Property.key = (ObjProp.Identifier (_, name)); _ }) - when Signature_utils.is_munged_property_name name -> + Ast.Class.( + match element with + (* special cases *) + | Body.Method + ( _, + { + Method.key = + Ast.Expression.Object.Property.Identifier + (_, { Ast.Identifier.name; comments = _ }); + _; + } ) + | Body.Property + ( _, + { + Property.key = + Ast.Expression.Object.Property.Identifier + (_, { Ast.Identifier.name; comments = _ }); + _; + } ) + when (not Env.prevent_munge) && Signature_utils.is_munged_property_name name -> Deps.bot - | _, Body.Property (_, { Property.key = (ObjProp.Identifier (_, "propTypes")); static = true; _ }) -> + | Body.Property + ( _, + { + Property.key = + Ast.Expression.Object.Property.Identifier + (_, { Ast.Identifier.name = "propTypes"; comments = _ }); + static = true; + _; + } ) + when Env.ignore_static_propTypes -> Deps.bot - | _, Body.Method (_, { Method.value; _ }) -> - let loc, { Ast.Function.generator; tparams; params; return; body; _ } = value in - function_ tps generator tparams params (loc, return) body - | _, Body.Property (loc, { Property.annot; _ }) - | _, Body.PrivateField (loc, { PrivateField.annot; _ }) -> - annotation tps (loc, Kind.Annot_path.mk_annot annot) - - in fun tparams body super super_targs implements -> - let open Ast.Class in - let _, { Body.body } = body in - let tps, deps = type_params SSet.empty tparams in - let deps = List.fold_left (Deps.reduce_join (class_element tps)) deps body in - let deps = match super with - | None -> deps - | Some expr -> Deps.join (deps, literal_expr tps expr) in - let deps = Deps.join (deps, type_args tps super_targs) in - List.fold_left (Deps.reduce_join (implement tps)) deps implements + (* general cases *) + | Body.Method (_, { Method.value; _ }) -> + let (_, { Ast.Function.generator; tparams; params; return; body; predicate; _ }) = + value + in + function_ tps generator tparams params return body predicate + | Body.Property (loc, { Property.annot; key; _ }) -> + annotated_type ~sort:(EASort.Property key) tps loc annot + | Body.PrivateField (loc, { PrivateField.key; annot; _ }) -> + annotated_type ~sort:(EASort.PrivateField key) tps loc annot) + in + fun tparams body super super_targs implements -> + Ast.Class.( + let (_, { Body.body }) = body in + let (tps, deps) = type_params SSet.empty tparams in + let deps = List.fold_left (Deps.reduce_join (class_element tps)) deps body in + let deps = + match super with + | None -> deps + | Some expr -> Deps.join (deps, literal_expr tps expr) + in + let deps = Deps.join (deps, type_args tps super_targs) in + List.fold_left (Deps.reduce_join (implement tps)) deps implements) and array_ = - let spread_element tps spread_element = - let open Ast.Expression.SpreadElement in - let _, { argument } = spread_element in - literal_expr tps argument - in - let array_element tps expr_or_spread_opt = - let open Ast.Expression in - match expr_or_spread_opt with - | None -> Deps.bot + let array_element tps loc expr_or_spread_opt = + Ast.Expression.( + match expr_or_spread_opt with + | None -> Deps.top (Error.UnexpectedArrayHole loc) | Some (Expression expr) -> literal_expr tps expr - | Some (Spread spread) -> spread_element tps spread + | Some (Spread (spread_loc, _spread)) -> + Deps.top (Error.UnexpectedArraySpread (loc, spread_loc))) in - fun tps elements -> - List.fold_left (Deps.reduce_join (array_element tps)) Deps.bot elements + fun tps loc elements -> + Nel.fold_left (Deps.reduce_join (array_element tps loc)) Deps.bot elements and implement tps implement = - let open Ast.Class.Implements in - let _, { id = (_, name); targs } = implement in - let deps = if SSet.mem name tps then Deps.bot else Deps.type_ name in - Deps.join (deps, type_args tps targs) + Ast.Class.Implements.( + let (_, { id = (_, { Ast.Identifier.name; comments = _ }); targs }) = implement in + let deps = + if SSet.mem name tps then + Deps.bot + else + Deps.type_ name + in + Deps.join (deps, type_args tps targs)) and object_ = - let object_property tps = - let open Ast.Expression.Object.Property in - let object_key (loc, key) = - let open Ast.Expression.Object.Property in - match key with - | Literal _ - | Identifier _ - | PrivateName _ -> Deps.bot - | Computed _ -> Deps.top (Error.UnexpectedObjectKey loc) - in function - | loc, Init { key; value; _ } -> + let object_property tps loc = + Ast.Expression.Object.Property.( + let object_key (key_loc, key) = + Ast.Expression.Object.Property.( + match key with + | Literal _ + | Identifier _ + | PrivateName _ -> + Deps.bot + | Computed _ -> Deps.top (Error.UnexpectedObjectKey (loc, key_loc))) + in + function + | (loc, Init { key; value; _ }) -> let deps = object_key (loc, key) in Deps.join (deps, literal_expr tps value) - | loc, Method { key; value = (fn_loc, fn) } - | loc, Get { key; value = (fn_loc, fn) } - | loc, Set { key; value = (fn_loc, fn) } - -> + | (loc, Method { key; value = (_, fn) }) + | (loc, Get { key; value = (_, fn) }) + | (loc, Set { key; value = (_, fn) }) -> let deps = object_key (loc, key) in - let open Ast.Function in - let { generator; tparams; params; return; body; _ } = fn in - Deps.join (deps, function_ tps generator tparams params (fn_loc, return) body) + let { Ast.Function.generator; tparams; params; return; body; predicate; _ } = fn in + Deps.join (deps, function_ tps generator tparams params return body predicate)) in - let object_spread_property tps p = - let open Ast.Expression.Object.SpreadProperty in - let _, { argument } = p in - literal_expr tps argument + let object_spread_property tps prop = + Ast.Expression.Object.SpreadProperty.( + let (_, { argument }) = prop in + literal_expr tps argument) in - fun tps properties -> - let open Ast.Expression.Object in - List.fold_left (fun deps prop -> - match prop with - | Property p -> Deps.join (deps, object_property tps p) - | SpreadProperty p -> Deps.join (deps, object_spread_property tps p) - ) Deps.bot properties - + fun tps loc properties -> + Ast.Expression.Object.( + List.fold_left + (fun deps prop -> + match prop with + | Property p -> Deps.join (deps, object_property tps loc p) + | SpreadProperty p -> Deps.join (deps, object_spread_property tps p)) + Deps.bot + properties) end -module Verifier(Env: EvalEnv) = struct +module Verifier (Env : EvalEnv) = struct + module Eval = Eval (Env) - module Eval = Eval(Env) - - let eval (loc, kind) = + let rec eval id_loc (loc, kind) = match kind with - | Kind.VariableDef { annot; init } -> - Eval.annotation ?init SSet.empty (loc, annot) - | Kind.FunctionDef { generator; tparams; params; return; body; } -> - Eval.function_ SSet.empty generator tparams params (loc, return) body - | Kind.DeclareFunctionDef { annot = (_, t) } -> - Eval.type_ SSet.empty t - | Kind.ClassDef { tparams; body; super; super_targs; implements } -> - Eval.class_ tparams body super super_targs implements - | Kind.DeclareClassDef { tparams; body; extends; mixins; implements } -> - let tps, deps = Eval.type_params SSet.empty tparams in - let deps = Deps.join (deps, Eval.object_type tps body) in - let deps = match extends with - | None -> deps - | Some r -> Deps.join (deps, Eval.value_ref tps r) in - let deps = List.fold_left (Deps.reduce_join (Eval.value_ref tps)) deps mixins in - List.fold_left (Deps.reduce_join (Eval.implement tps)) deps implements - | Kind.TypeDef { tparams; right } -> - let tps, deps = Eval.type_params SSet.empty tparams in - Deps.join (deps, Eval.type_ tps right) - | Kind.OpaqueTypeDef { tparams; impltype; supertype } -> - let tps, deps = Eval.type_params SSet.empty tparams in - Deps.join (deps, Eval.opaque_type tps impltype supertype) - | Kind.InterfaceDef { tparams; body; extends } -> - let tps, deps = Eval.type_params SSet.empty tparams in - let deps = Deps.join (deps, Eval.object_type tps body) in - List.fold_left (Deps.reduce_join (Eval.type_ref tps)) deps extends - | Kind.ImportNamedDef { kind; source; name } -> - Deps.import_named (Kind.Sort.of_import_kind kind) source name - | Kind.ImportStarDef { kind; source } -> - Deps.import_star (Kind.Sort.of_import_kind kind) source - | Kind.RequireDef { source } -> - Deps.require source - | Kind.SketchyToplevelDef -> - Deps.top (Deps.Error.SketchyToplevelDef loc) + | Kind.WithPropertiesDef { base; properties } -> + begin + match Kind.get_function_kind_info base with + | Some (generator, _async, tparams, params, return, body) -> + let deps = Eval.function_ SSet.empty generator tparams params return body None in + let deps = + List.fold_left + (Deps.reduce_join (fun (_id_prop, expr) -> Eval.literal_expr SSet.empty expr)) + deps + properties + in + deps + | None -> eval id_loc (loc, base) + end + | Kind.VariableDef { id; annot; init } -> + Eval.annotation ~sort:(EASort.VariableDefinition id) ?init SSet.empty (id_loc, annot) + | Kind.FunctionDef { generator; async = _; tparams; params; return; body; predicate } -> + Eval.function_ SSet.empty generator tparams params return body predicate + | Kind.DeclareFunctionDef { annot = (_, t); predicate } -> + let deps = Eval.type_ SSet.empty t in + let deps = Deps.join (deps, Eval.declare_function_predicate t predicate) in + deps + | Kind.ClassDef { tparams; body; super; super_targs; implements } -> + Eval.class_ tparams body super super_targs implements + | Kind.DeclareClassDef { tparams; body = (_, body); extends; mixins; implements } -> + let (tps, deps) = Eval.type_params SSet.empty tparams in + let deps = Deps.join (deps, Eval.object_type tps body) in + let deps = + match extends with + | None -> deps + | Some r -> Deps.join (deps, Eval.value_ref tps r) + in + let deps = List.fold_left (Deps.reduce_join (Eval.value_ref tps)) deps mixins in + List.fold_left (Deps.reduce_join (Eval.implement tps)) deps implements + | Kind.TypeDef { tparams; right } -> + let (tps, deps) = Eval.type_params SSet.empty tparams in + Deps.join (deps, Eval.type_ tps right) + | Kind.OpaqueTypeDef { tparams; impltype; supertype } -> + let (tps, deps) = Eval.type_params SSet.empty tparams in + let deps = Deps.join (deps, Eval.type_opt tps impltype) in + Deps.join (deps, Eval.type_opt tps supertype) + | Kind.InterfaceDef { tparams; body = (_, body); extends } -> + let (tps, deps) = Eval.type_params SSet.empty tparams in + let deps = Deps.join (deps, Eval.object_type tps body) in + List.fold_left (Deps.reduce_join (Eval.type_ref tps)) deps extends + | Kind.ImportNamedDef { kind; source; name } -> + Deps.import_named (Kind.Sort.of_import_kind kind) source name + | Kind.ImportStarDef { kind; source } -> + Deps.import_star (Kind.Sort.of_import_kind kind) source + | Kind.RequireDef { source; name } -> Deps.require ?name source + | Kind.SketchyToplevelDef -> Deps.top (Deps.Error.SketchyToplevelDef loc) let cjs_exports = let tps = SSet.empty in @@ -572,231 +840,257 @@ module Verifier(Env: EvalEnv) = struct | File_sig.AddModuleExportsDef (_id, expr) -> Eval.literal_expr tps expr | File_sig.DeclareModuleExportsDef (_loc, t) -> Eval.type_ tps t - let eval_entry ((loc, _), kind) = - eval (loc, kind) + let eval_entry (id, kind) = + let (loc, _) = id in + eval loc kind - let eval_declare_variable declare_variable = - eval_entry (Entry.declare_variable declare_variable) + let eval_declare_variable loc declare_variable = + eval_entry (Entry.declare_variable loc declare_variable) - let eval_declare_function declare_function = - eval_entry (Entry.declare_function declare_function) + let eval_declare_function loc declare_function = + eval_entry (Entry.declare_function loc declare_function) - let eval_declare_class declare_class = - eval_entry (Entry.declare_class declare_class) + let eval_declare_class loc declare_class = eval_entry (Entry.declare_class loc declare_class) - let eval_type_alias type_alias = - eval_entry (Entry.type_alias type_alias) + let eval_type_alias loc type_alias = eval_entry (Entry.type_alias loc type_alias) - let eval_opaque_type opaque_type = - eval_entry (Entry.opaque_type opaque_type) + let eval_opaque_type loc opaque_type = eval_entry (Entry.opaque_type loc opaque_type) - let eval_interface interface = - eval_entry (Entry.interface interface) + let eval_interface loc interface = eval_entry (Entry.interface loc interface) let eval_function_declaration loc function_declaration = - let _, kind = Entry.function_declaration function_declaration in - eval (loc, kind) - - let eval_class loc class_ = - let _, kind = Entry.class_ class_ in - eval (loc, kind) - - let eval_variable_declaration variable_declaration = - List.fold_left (Deps.reduce_join eval_entry) Deps.bot @@ - Entry.variable_declaration variable_declaration - - let eval_stmt = Ast.Statement.(function - | _, VariableDeclaration variable_declaration -> eval_variable_declaration variable_declaration - | _, DeclareVariable declare_variable -> eval_declare_variable declare_variable - | loc, FunctionDeclaration function_declaration -> eval_function_declaration loc function_declaration - | _, DeclareFunction declare_function -> eval_declare_function declare_function - | loc, ClassDeclaration class_ -> eval_class loc class_ - | _, DeclareClass declare_class -> eval_declare_class declare_class - | _, TypeAlias type_alias -> eval_type_alias type_alias - | _, DeclareTypeAlias type_alias -> eval_type_alias type_alias - | _, OpaqueType opaque_type -> eval_opaque_type opaque_type - | _, DeclareOpaqueType opaque_type -> eval_opaque_type opaque_type - | _, InterfaceDeclaration interface -> eval_interface interface - | _, DeclareInterface interface -> eval_interface interface - - | _, Expression _ - | _, DeclareExportDeclaration _ - | _, ExportDefaultDeclaration _ - | _, ExportNamedDeclaration _ - | _, ImportDeclaration _ - | _, Block _ - | _, Break _ - | _, Continue _ - | _, Debugger - | _, DeclareModule _ - | _, DeclareModuleExports _ - | _, DoWhile _ - | _, Empty - | _, For _ - | _, ForIn _ - | _, ForOf _ - | _, If _ - | _, Labeled _ - | _, Return _ - | _, Switch _ - | _, Throw _ - | _, Try _ - | _, While _ - | _, With _ - -> assert false - ) - - let eval_declare_export_declaration = Ast.Statement.DeclareExportDeclaration.(function - | Variable (_, declare_variable) -> eval_declare_variable declare_variable - | Function (_, declare_function) -> eval_declare_function declare_function - | Class (_, declare_class) -> eval_declare_class declare_class - | NamedType (_, type_alias) -> eval_type_alias type_alias - | NamedOpaqueType (_, opaque_type) -> eval_opaque_type opaque_type - | Interface (_, interface) -> eval_interface interface - | DefaultType t -> Eval.type_ SSet.empty t - ) - - let eval_export_default_declaration = Ast.Statement.ExportDefaultDeclaration.(function - | Declaration (loc, Ast.Statement.FunctionDeclaration - ({ Ast.Function.id = Some _; _ } as function_declaration) - ) -> - eval_function_declaration loc function_declaration - | Declaration (loc, Ast.Statement.ClassDeclaration ({ Ast.Class.id = Some _; _ } as class_)) -> - eval_class loc class_ - | Declaration stmt -> eval_stmt stmt - | Expression (loc, Ast.Expression.Function ({ Ast.Function.id = Some _; _ } as function_)) -> - eval_function_declaration loc function_ - | Expression expr -> Eval.literal_expr SSet.empty expr - ) + eval_entry (Entry.function_declaration loc function_declaration) + + let eval_function_expression loc function_expression = + eval_entry (Entry.function_expression loc function_expression) + + let eval_class loc class_ = eval_entry (Entry.class_ loc class_) + + let eval_declare_export_declaration = + Ast.Statement.DeclareExportDeclaration.( + function + | Variable (loc, declare_variable) -> eval_declare_variable loc declare_variable + | Function (loc, declare_function) -> eval_declare_function loc declare_function + | Class (loc, declare_class) -> eval_declare_class loc declare_class + | NamedType (loc, type_alias) -> eval_type_alias loc type_alias + | NamedOpaqueType (loc, opaque_type) -> eval_opaque_type loc opaque_type + | Interface (loc, interface) -> eval_interface loc interface + | DefaultType t -> Eval.type_ SSet.empty t) + + let eval_export_default_declaration = + Ast.Statement.ExportDefaultDeclaration.( + function + | Declaration + ( loc, + Ast.Statement.FunctionDeclaration + ({ Ast.Function.id = Some _; _ } as function_declaration) ) -> + eval_function_declaration loc function_declaration + | Declaration + ( _, + Ast.Statement.FunctionDeclaration + { Ast.Function.id = None; generator; tparams; params; return; body; predicate; _ } ) + -> + Eval.function_ SSet.empty generator tparams params return body predicate + | Declaration (loc, Ast.Statement.ClassDeclaration ({ Ast.Class.id = Some _; _ } as class_)) + -> + eval_class loc class_ + | Declaration + ( _, + Ast.Statement.ClassDeclaration + { Ast.Class.id = None; tparams; body; extends; implements; _ } ) -> + let (super, super_targs) = + match extends with + | None -> (None, None) + | Some (_, { Ast.Class.Extends.expr; targs }) -> (Some expr, targs) + in + Eval.class_ tparams body super super_targs implements + | Declaration _stmt -> Deps.unreachable + | Expression (loc, Ast.Expression.Function ({ Ast.Function.id = Some _; _ } as function_)) -> + eval_function_expression loc function_ + | Expression expr -> Eval.literal_expr SSet.empty expr) let eval_export_value_bindings named named_infos = - let open File_sig in - SMap.fold (fun n export deps -> - Deps.join ( - deps, - let export_def = SMap.get n named_infos in - match export, export_def with - | ExportDefault _, Some (DeclareExportDef decl) -> - eval_declare_export_declaration decl - | ExportNamed { kind = NamedDeclaration; _ }, Some (DeclareExportDef decl) -> - eval_declare_export_declaration decl - | ExportDefault _, Some (ExportDefaultDef decl) -> - eval_export_default_declaration decl - | ExportNamed { kind = NamedDeclaration; _ }, Some (ExportNamedDef stmt) -> - eval_stmt stmt - | ExportNamed { kind = NamedSpecifier { local; source }; _ }, None -> - begin match source with - | None -> Deps.value (snd local) - | Some source -> - Deps.import_named Kind.Sort.Value source local - end - | ExportNs { source; _ }, None -> - Deps.import_star Kind.Sort.Value source - | _ -> assert false - ) - ) named Deps.bot + File_sig.( + let (named, ns) = + List.partition + (function + | (_, (_, ExportNamed { kind = NamedSpecifier _; _ })) + | (_, (_, ExportNs _)) -> + false + | (_, (_, _)) -> true) + named + in + let deps = + List.fold_left2 + (fun deps (n, (_, export)) export_def -> + Deps.join + ( deps, + match (export, export_def) with + | (ExportDefault { local; _ }, DeclareExportDef decl) -> + begin + match local with + | Some id -> Deps.value (snd id) + | None -> eval_declare_export_declaration decl + end + | (ExportNamed { kind = NamedDeclaration; _ }, DeclareExportDef _decl) -> + Deps.value n + | (ExportDefault { local; _ }, ExportDefaultDef decl) -> + begin + match local with + | Some id -> Deps.value (snd id) + | None -> eval_export_default_declaration decl + end + | (ExportNamed { kind = NamedDeclaration; _ }, ExportNamedDef _stmt) -> + Deps.value n + | _ -> assert false )) + Deps.bot + named + named_infos + in + List.fold_left + (fun deps (_, (_, export)) -> + Deps.join + ( deps, + match export with + | ExportNamed { kind = NamedSpecifier { local; source }; _ } -> + begin + match source with + | None -> Deps.value (snd local) + | Some source -> Deps.import_named Kind.Sort.Value source local + end + | ExportNs { source; _ } -> Deps.import_star Kind.Sort.Value source + | _ -> assert false )) + deps + ns) let eval_export_type_bindings type_named type_named_infos = - let open File_sig in - SMap.fold (fun n export deps -> - Deps.join ( - deps, - let export_def = SMap.get n type_named_infos in - match export, export_def with - | TypeExportNamed { kind = NamedDeclaration; _ }, Some (DeclareExportDef decl) -> - eval_declare_export_declaration decl - | TypeExportNamed { kind = NamedDeclaration; _ }, Some (ExportNamedDef stmt) -> - eval_stmt stmt - | TypeExportNamed { kind = NamedSpecifier { local; source }; _ }, None -> - begin match source with - | None -> Deps.type_ (snd local) - | Some source -> - Deps.import_named Kind.Sort.Type source local - end - | _ -> assert false - ) - ) type_named Deps.bot + File_sig.( + let (type_named, type_ns) = + List.partition + (function + | (_, (_, TypeExportNamed { kind = NamedSpecifier _; _ })) -> false + | (_, (_, _)) -> true) + type_named + in + let deps = + List.fold_left2 + (fun deps (n, (_, export)) export_def -> + Deps.join + ( deps, + match (export, export_def) with + | (TypeExportNamed { kind = NamedDeclaration; _ }, DeclareExportDef _decl) -> + Deps.type_ n + | (TypeExportNamed { kind = NamedDeclaration; _ }, ExportNamedDef _stmt) -> + Deps.type_ n + | _ -> assert false )) + Deps.bot + type_named + type_named_infos + in + List.fold_left + (fun deps (_, (_, export)) -> + Deps.join + ( deps, + match export with + | TypeExportNamed { kind = NamedSpecifier { local; source }; _ } -> + begin + match source with + | None -> Deps.type_ (snd local) + | Some source -> Deps.import_named Kind.Sort.Type source local + end + | _ -> assert false )) + deps + type_ns) let exports file_sig = - let open File_sig in - let module_sig = file_sig.module_sig in - let { - info = exports_info; - module_kind; - type_exports_named; - _ - } = module_sig in - let { module_kind_info; type_exports_named_info } = exports_info in - let deps = match module_kind, module_kind_info with - | CommonJS _, CommonJSInfo cjs_exports_defs -> - List.fold_left (Deps.reduce_join cjs_exports) Deps.bot cjs_exports_defs - | ES { named; _ }, ESInfo named_infos -> - eval_export_value_bindings named named_infos - | _ -> assert false - in - Deps.join ( - deps, - eval_export_type_bindings type_exports_named type_exports_named_info - ) + File_sig.( + let module_sig = file_sig.module_sig in + let { info = exports_info; module_kind; type_exports_named; _ } = module_sig in + let { module_kind_info; type_exports_named_info } = exports_info in + let deps = + match (module_kind, module_kind_info) with + | (CommonJS _, CommonJSInfo cjs_exports_defs) -> + List.fold_left (Deps.reduce_join cjs_exports) Deps.bot cjs_exports_defs + | (ES { named; _ }, ESInfo named_infos) -> eval_export_value_bindings named named_infos + | _ -> assert false + in + Deps.join (deps, eval_export_type_bindings type_exports_named type_exports_named_info)) - let dynamic_validator (dynamic_imports, dynamic_requires) = function + let dynamic_validator env (dynamic_imports, dynamic_requires) = function + | Dep.Class (loc, x) -> + if SMap.mem x env then + Deps.top (Deps.Error.SketchyToplevelDef loc) + else + Deps.bot | Dep.DynamicImport loc -> - begin match LocMap.get loc dynamic_imports with + begin + match LocMap.get loc dynamic_imports with | None -> Deps.top (Deps.Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Import)) | Some source -> Deps.import_star Kind.Sort.Value source end | Dep.DynamicRequire loc -> - begin match LocMap.get loc dynamic_requires with + begin + match LocMap.get loc dynamic_requires with | None -> Deps.top (Deps.Error.UnexpectedExpression (loc, Ast_utils.ExpressionSort.Call)) | Some source -> Deps.require source end let validate_and_eval env dynamic_sources dep = match dep with - | Dep.Local local -> - let sort, x = local in - begin match SMap.get x env with - | Some entries -> - let validate = Kind.validator sort in - Utils_js.LocMap.fold (fun loc kind deps -> - Deps.join ( - deps, - if validate kind then eval (loc, kind) - else Deps.top (Dep.expectation sort x loc) - ) - ) entries Deps.bot - | None -> Deps.global local - end - | Dep.Remote _ -> Deps.unit dep - | Dep.Dynamic dynamic -> dynamic_validator dynamic_sources dynamic + | Dep.Local local -> + let (sort, x) = local in + begin + match SMap.get x env with + | Some entries -> + let validate = Kind.validator sort in + Loc_collections.LocMap.fold + (fun loc kind deps -> + Deps.join + ( deps, + if validate (snd kind) then + eval loc kind + else + Deps.top (Dep.expectation sort x loc) )) + entries + Deps.bot + | None -> Deps.global local + end + | Dep.Remote _ -> Deps.unit dep + | Dep.Dynamic dynamic -> dynamic_validator env dynamic_sources dynamic let rec check cache env dynamic_sources deps = Deps.recurse (check_dep cache env dynamic_sources) deps and check_dep cache env dynamic_sources dep = - if Deps.DepSet.mem dep !cache then Deps.ErrorSet.empty - else begin + if Deps.DepSet.mem dep !cache then + Deps.PrintableErrorSet.empty + else ( cache := Deps.DepSet.add dep !cache; check cache env dynamic_sources (validate_and_eval env dynamic_sources dep) - end + ) let check env file_sig deps = let cache = ref Deps.DepSet.empty in let dynamic_sources = - let open File_sig in - let requires = file_sig.module_sig.requires in - let dynamic_imports = ref LocMap.empty in - let dynamic_requires = ref LocMap.empty in - List.iter (function - | ImportDynamic { source; import_loc } -> - dynamic_imports := LocMap.add import_loc source !dynamic_imports - | Require { source; require_loc; bindings = None } -> - dynamic_requires := LocMap.add require_loc source !dynamic_requires - | _ -> () - ) requires; - !dynamic_imports, !dynamic_requires in + File_sig.( + let requires = file_sig.module_sig.requires in + let dynamic_imports = ref LocMap.empty in + let dynamic_requires = ref LocMap.empty in + List.iter + (function + | ImportDynamic { source; import_loc } -> + dynamic_imports := LocMap.add import_loc source !dynamic_imports + | Require { source; require_loc; bindings = None } -> + dynamic_requires := LocMap.add require_loc source !dynamic_requires + | _ -> ()) + requires; + (!dynamic_imports, !dynamic_requires)) + in let errors = check cache env dynamic_sources deps in let remote_dependencies = Deps.DepSet.filter Dep.remote !cache in - errors, remote_dependencies - + let env = + let local_uses = Deps.DepSet.fold Dep.local_uses !cache SSet.empty in + SMap.filter (fun n _ -> SSet.mem n local_uses) env + in + (errors, remote_dependencies, env) end diff --git a/src/parser_utils/signature_utils.ml b/src/parser_utils/signature_utils.ml index c10d9667b4d..6a6e055213f 100644 --- a/src/parser_utils/signature_utils.ml +++ b/src/parser_utils/signature_utils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,41 +8,36 @@ open Flow_ast_visitor module Procedure_decider = struct - class decider = object(this) - inherit [bool] visitor ~init:true - - method private no = - this#update_acc (fun _ -> false) - - method! function_ _loc (expr: (Loc.t, Loc.t) Flow_ast.Function.t) = - expr - - method! return _loc (stmt: (Loc.t, Loc.t) Flow_ast.Statement.Return.t) = - let open Flow_ast.Statement.Return in - let { argument } = stmt in - begin match argument with - | None -> () - | Some _ -> this#no - end; - stmt - - method! function_body_any (body: (Loc.t, Loc.t) Flow_ast.Function.body) = - begin match body with - | Flow_ast.Function.BodyBlock (loc, block) -> - ignore @@ this#function_body loc block - | Flow_ast.Function.BodyExpression _ -> - this#no - end; - body - - end - - let is (body: (Loc.t, Loc.t) Flow_ast.Function.body) = + class decider = + object (this) + inherit [bool, Loc.t] visitor ~init:true + + method private no = this#update_acc (fun _ -> false) + + method! function_ _loc (expr : (Loc.t, Loc.t) Flow_ast.Function.t) = expr + + method! return _loc (stmt : (Loc.t, Loc.t) Flow_ast.Statement.Return.t) = + Flow_ast.Statement.Return.( + let { argument; comments = _ } = stmt in + begin + match argument with + | None -> () + | Some _ -> this#no + end; + stmt) + + method! function_body_any (body : (Loc.t, Loc.t) Flow_ast.Function.body) = + begin + match body with + | Flow_ast.Function.BodyBlock (loc, block) -> ignore @@ this#function_body loc block + | Flow_ast.Function.BodyExpression _ -> this#no + end; + body + end + + let is (body : (Loc.t, Loc.t) Flow_ast.Function.body) = let decider = new decider in decider#eval decider#function_body_any body - end -let is_munged_property_name name = (String.length name >= 2) - && name.[0] = '_' - && name.[1] <> '_' +let is_munged_property_name name = String.length name >= 2 && name.[0] = '_' && name.[1] <> '_' diff --git a/src/parser_utils/ssa_api.ml b/src/parser_utils/ssa_api.ml index b6303180854..50e30739c26 100644 --- a/src/parser_utils/ssa_api.ml +++ b/src/parser_utils/ssa_api.ml @@ -1,37 +1,70 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module LocMap = Utils_js.LocMap +module type S = sig + module L : Loc_sig.S -type read_loc = Loc.t -type write_loc = - | Write of Loc.t - | Uninitialized -type write_locs = write_loc list -type values = write_locs LocMap.t + type read_loc = L.t -let uninitialized = Uninitialized + type write_loc = + | Write of L.t + | Uninitialized -let write_locs_of_read_loc values read_loc = - LocMap.find read_loc values + type write_locs = write_loc list -let is_dead_write_loc values loc = - not (LocMap.exists (fun _read_loc write_locs -> List.mem (Write loc) write_locs) values) + type values = write_locs L.LMap.t -let print_write_loc write_loc = - match write_loc with + val uninitialized : write_loc + + val write_locs_of_read_loc : values -> read_loc -> write_locs + + val is_dead_write_loc : values -> L.t -> bool +end + +module Make (L : Loc_sig.S) : S with module L = L = struct + module L = L + + type read_loc = L.t + + type write_loc = + | Write of L.t + | Uninitialized + + type write_locs = write_loc list + + type values = write_locs L.LMap.t + + let uninitialized = Uninitialized + + let write_locs_of_read_loc values read_loc = L.LMap.find read_loc values + + let is_dead_write_loc values loc = + not (L.LMap.exists (fun _read_loc write_locs -> List.mem (Write loc) write_locs) values) +end + +module With_Loc = Make (Loc_sig.LocS) +module With_ALoc = Make (Loc_sig.ALocS) +include With_Loc + +let print_values = + let print_write_loc write_loc = + match write_loc with | Uninitialized -> "(uninitialized)" - | Write loc -> Loc.to_string loc - -let print_values values = - let kvlist = LocMap.bindings values in - let strlist = List.map (fun (read_loc, write_locs) -> - Printf.sprintf "%s => { %s }" - (Loc.to_string read_loc) - (String.concat ", " @@ List.map print_write_loc write_locs) - ) kvlist in - Printf.sprintf "[ %s ]" (String.concat "; " strlist) + | Write loc -> Loc.debug_to_string loc + in + fun values -> + let kvlist = Loc_collections.LocMap.bindings values in + let strlist = + Core_list.map + ~f:(fun (read_loc, write_locs) -> + Printf.sprintf + "%s => { %s }" + (Loc.debug_to_string read_loc) + (String.concat ", " @@ Core_list.map ~f:print_write_loc write_locs)) + kvlist + in + Printf.sprintf "[ %s ]" (String.concat "; " strlist) diff --git a/src/parser_utils/ssa_api.mli b/src/parser_utils/ssa_api.mli deleted file mode 100644 index 73437904493..00000000000 --- a/src/parser_utils/ssa_api.mli +++ /dev/null @@ -1,20 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open Utils_js - -type read_loc = Loc.t -type write_loc = - | Write of Loc.t - | Uninitialized -type write_locs = write_loc list -type values = write_locs LocMap.t - -val uninitialized: write_loc -val write_locs_of_read_loc: values -> read_loc -> write_locs -val is_dead_write_loc: values -> Loc.t -> bool -val print_values: values -> string diff --git a/src/parser_utils/ssa_builder.ml b/src/parser_utils/ssa_builder.ml index 9745e54a5f7..093cf3c92c8 100644 --- a/src/parser_utils/ssa_builder.ml +++ b/src/parser_utils/ssa_builder.ml @@ -1,1014 +1,1126 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Hoister -open Scope_builder - -(* For every read of a variable x, we are interested in tracking writes to x - that can reach that read. Ultimately the writes are going to be represented - as a list of locations, where each location corresponds to a "single static - assignment" of the variable in the code. But for the purposes of analysis, it - is useful to represent these writes with a data type that contains either a - single write, or a "join" of writes (in compiler terminology, a PHI node), or - a reference to something that is unknown at a particular point in the AST - during traversal, but will be known by the time traversal is complete. *) -module Val : sig - type t - - val mk_unresolved: int -> t - val empty: t - val uninitialized: t - val merge: t -> t -> t - - val one: Loc.t -> t - val all: Loc.t list -> t - - val resolve: unresolved:t -> t -> unit - val simplify: t -> Ssa_api.write_loc list -end = struct - type ref_state = - (* different unresolved vars are distinguished by their ids, which enables using structural - equality for computing normal forms: see below *) - | Unresolved of int - | Resolved of t - - and t = - | Uninitialized - | Loc of Loc.t - | PHI of t list - | REF of ref_state ref - - let mk_unresolved id = - REF (ref (Unresolved id)) - - let empty = PHI [] - - let uninitialized = Uninitialized - - let join = function - | [] -> empty - | [t] -> t - | ts -> PHI ts - - module ValSet = Set.Make (struct - type nonrec t = t - let compare = Pervasives.compare - end) - - let rec normalize t = match t with - | Uninitialized - | Loc _ - | REF { contents = Unresolved _ } - -> ValSet.singleton t - | PHI ts -> - List.fold_left (fun vals' t -> + +module Make + (L : Loc_sig.S) + (Ssa_api : Ssa_api.S with module L = L) + (Scope_builder : Scope_builder_sig.S with module L = L) = +struct + open Scope_builder + + (* For every read of a variable x, we are interested in tracking writes to x + that can reach that read. Ultimately the writes are going to be represented + as a list of locations, where each location corresponds to a "single static + assignment" of the variable in the code. But for the purposes of analysis, it + is useful to represent these writes with a data type that contains either a + single write, or a "join" of writes (in compiler terminology, a PHI node), or + a reference to something that is unknown at a particular point in the AST + during traversal, but will be known by the time traversal is complete. *) + module Val : sig + type t + + val mk_unresolved : int -> t + + val empty : t + + val uninitialized : t + + val merge : t -> t -> t + + val one : L.t -> t + + val all : L.t list -> t + + val resolve : unresolved:t -> t -> unit + + val simplify : t -> Ssa_api.write_loc list + end = struct + type ref_state = + (* different unresolved vars are distinguished by their ids, which enables using structural + equality for computing normal forms: see below *) + | Unresolved of int + | Resolved of t + + and t = + | Uninitialized + | Loc of L.t + | PHI of t list + | REF of ref_state ref + + let mk_unresolved id = REF (ref (Unresolved id)) + + let empty = PHI [] + + let uninitialized = Uninitialized + + let join = function + | [] -> empty + | [t] -> t + | ts -> PHI ts + + module ValSet = Set.Make (struct + type nonrec t = t + + let compare = Pervasives.compare + end) + + let rec normalize t = + match t with + | Uninitialized + | Loc _ + | REF { contents = Unresolved _ } -> + ValSet.singleton t + | PHI ts -> + List.fold_left + (fun vals' t -> + let vals = normalize t in + ValSet.union vals' vals) + ValSet.empty + ts + | REF ({ contents = Resolved t } as r) -> let vals = normalize t in - ValSet.union vals' vals - ) ValSet.empty ts - | REF ({ contents = Resolved t } as r) -> - let vals = normalize t in - let t' = join (ValSet.elements vals) in - r := Resolved t'; - vals - - let merge t1 t2 = - (* Merging can easily lead to exponential blowup in size of terms if we're not careful. We - amortize costs by computing normal forms as sets of "atomic" terms, so that merging would - correspond to set union. (Atomic terms include Uninitialized, Loc _, and REF { contents = - Unresolved _ }.) Note that normal forms might change over time, as unresolved refs become - resolved; thus, we do not shortcut normalization of previously normalized terms. Still, we - expect (and have experimentally validated that) the cost of computing normal forms becomes - smaller over time as terms remain close to their final normal forms. *) - let vals = ValSet.union (normalize t1) (normalize t2) in - join (ValSet.elements vals) - - let one loc = - Loc loc - - let all locs = - join (List.map (fun loc -> Loc loc) locs) - - (* Resolving unresolved to t essentially models an equation of the form - unresolved = t, where unresolved is a reference to an unknown and t is the - known. Since the only non-trivial operation in t is joining, it is OK to - erase any occurrences of unresolved in t: if t = unresolved | t' then - unresolved = t is the same as unresolved = t'. *) - let rec resolve ~unresolved t = - match unresolved with - | REF ({ contents = Unresolved _ } as r) -> - r := Resolved (erase r t) + let t' = join (ValSet.elements vals) in + r := Resolved t'; + vals + + let merge t1 t2 = + (* Merging can easily lead to exponential blowup in size of terms if we're not careful. We + amortize costs by computing normal forms as sets of "atomic" terms, so that merging would + correspond to set union. (Atomic terms include Uninitialized, Loc _, and REF { contents = + Unresolved _ }.) Note that normal forms might change over time, as unresolved refs become + resolved; thus, we do not shortcut normalization of previously normalized terms. Still, we + expect (and have experimentally validated that) the cost of computing normal forms becomes + smaller over time as terms remain close to their final normal forms. *) + let vals = ValSet.union (normalize t1) (normalize t2) in + join (ValSet.elements vals) + + let one loc = Loc loc + + let all locs = join (Core_list.map ~f:(fun loc -> Loc loc) locs) + + (* Resolving unresolved to t essentially models an equation of the form + unresolved = t, where unresolved is a reference to an unknown and t is the + known. Since the only non-trivial operation in t is joining, it is OK to + erase any occurrences of unresolved in t: if t = unresolved | t' then + unresolved = t is the same as unresolved = t'. *) + let rec resolve ~unresolved t = + match unresolved with + | REF ({ contents = Unresolved _ } as r) -> r := Resolved (erase r t) | _ -> failwith "Only an unresolved REF can be resolved" - and erase r t = match t with - | Uninitialized -> t - | Loc _ -> t - | PHI ts -> - let ts' = ListUtils.ident_map (erase r) ts in - if ts' == ts then t else PHI ts' - | REF r' -> - if r == r' then empty - else begin - let t_opt = !r' in - let t_opt' = match t_opt with - | Unresolved _ -> t_opt - | Resolved t -> let t' = erase r t in if t == t' then t_opt else Resolved t' - in - if t_opt != t_opt' then r' := t_opt'; - t - end - - (* Simplification converts a Val.t to a list of locations. *) - let simplify t = - let vals = normalize t in - List.map (function - | Uninitialized -> Ssa_api.Uninitialized - | Loc loc -> Ssa_api.Write loc - | REF { contents = Unresolved _ } -> failwith "An unresolved REF cannot be simplified" - | PHI _ - | REF { contents = Resolved _ } - -> failwith "A normalized value cannot be a PHI or a resolved REF" - ) (ValSet.elements vals) -end - -(* An environment is a map from variables to values. *) -module Env = struct - type t = Val.t SMap.t -end - -(* Abrupt completions induce control flows, so modeling them accurately is - necessary for soundness. *) -module AbruptCompletion = struct - type label = string - type t = - | Break of label option - | Continue of label option - | Return - | Throw - - let label_opt = Option.map ~f:(fun (_loc, label) -> label) - - let break x = Break (label_opt x) - let continue x = Continue (label_opt x) - let return = Return - let throw = Throw - - (* match particular abrupt completions *) - let mem list: t -> bool = - fun t -> List.mem t list - (* match all abrupt completions *) - let all: t -> bool = - fun _t -> true - - (* Model an abrupt completion as an OCaml exception. *) - exception Exn of t - - (* An abrupt completion carries an environment, which is the current - environment at the point where the abrupt completion is "raised." This - environment is merged wherever the abrupt completion is "handled." *) - type env = t * Env.t - -end - -(* Collect all values assigned to a variable, as a conservative fallback when we - don't have precise information. *) -module Havoc = struct - type t = { - unresolved: Val.t; (* always REF *) - mutable locs: Loc.t list; - } -end -let rec list_iter3 f l1 l2 l3 = - match l1, l2, l3 with - | [], [], [] -> () - | x1::l1, x2::l2, x3::l3 -> + and erase r t = + match t with + | Uninitialized -> t + | Loc _ -> t + | PHI ts -> + let ts' = ListUtils.ident_map (erase r) ts in + if ts' == ts then + t + else + PHI ts' + | REF r' -> + if r == r' then + empty + else + let t_opt = !r' in + let t_opt' = + match t_opt with + | Unresolved _ -> t_opt + | Resolved t -> + let t' = erase r t in + if t == t' then + t_opt + else + Resolved t' + in + if t_opt != t_opt' then r' := t_opt'; + t + + (* Simplification converts a Val.t to a list of locations. *) + let simplify t = + let vals = normalize t in + Core_list.map + ~f:(function + | Uninitialized -> Ssa_api.Uninitialized + | Loc loc -> Ssa_api.Write loc + | REF { contents = Unresolved _ } -> failwith "An unresolved REF cannot be simplified" + | PHI _ + | REF { contents = Resolved _ } -> + failwith "A normalized value cannot be a PHI or a resolved REF") + (ValSet.elements vals) + end + + (* An environment is a map from variables to values. *) + module Env = struct + type t = Val.t SMap.t + end + + (* Abrupt completions induce control flows, so modeling them accurately is + necessary for soundness. *) + module AbruptCompletion = struct + type label = string + + type t = + | Break of label option + | Continue of label option + | Return + | Throw + + let label_opt = Option.map ~f:Flow_ast_utils.name_of_ident + + let break x = Break (label_opt x) + + let continue x = Continue (label_opt x) + + let return = Return + + let throw = Throw + + (* match particular abrupt completions *) + let mem list : t -> bool = (fun t -> List.mem t list) + + (* match all abrupt completions *) + let all : t -> bool = (fun _t -> true) + + (* Model an abrupt completion as an OCaml exception. *) + exception Exn of t + + (* An abrupt completion carries an environment, which is the current + environment at the point where the abrupt completion is "raised." This + environment is merged wherever the abrupt completion is "handled." *) + type env = t * Env.t + end + + (* Collect all values assigned to a variable, as a conservative fallback when we + don't have precise information. *) + module Havoc = struct + type t = { + unresolved: Val.t; + (* always REF *) + mutable locs: L.t list; + } + end + + let rec list_iter3 f l1 l2 l3 = + match (l1, l2, l3) with + | ([], [], []) -> () + | (x1 :: l1, x2 :: l2, x3 :: l3) -> f x1 x2 x3; list_iter3 f l1 l2 l3 | _ -> assert false -type ssa = { - val_ref: Val.t ref; - havoc: Havoc.t; -} -class ssa_builder = object(this) - inherit scope_builder as super - - (* We maintain a map of read locations to raw Val.t terms, which are - simplified to lists of write locations once the analysis is done. *) - val mutable values: Val.t LocMap.t = LocMap.empty - method values: Ssa_api.values = - LocMap.map Val.simplify values - - val mutable id = 0 - method mk_unresolved = - id <- id + 1; - Val.mk_unresolved id - - (* Utils to manipulate single-static-assignment (SSA) environments. - - TODO: These low-level operations should probably be replaced by - higher-level "control-flow-graph" operations that can be implemented using - them, e.g., those that deal with branches and loops. *) - val mutable ssa_env: ssa SMap.t = SMap.empty - method ssa_env: Env.t = - SMap.map (fun { val_ref; _ } -> !val_ref) ssa_env - method merge_remote_ssa_env (env: Env.t): unit = - (* NOTE: env might have more keys than ssa_env, since the environment it - describes might be nested inside the current environment *) - SMap.iter (fun x { val_ref; _ } -> - val_ref := Val.merge !val_ref (SMap.find x env) - ) ssa_env - method merge_ssa_env (env1: Env.t) (env2: Env.t): unit = - let env1 = SMap.values env1 in - let env2 = SMap.values env2 in - let ssa_env = SMap.values ssa_env in - list_iter3 (fun { val_ref; _ } value1 value2 -> - val_ref := Val.merge value1 value2 - ) ssa_env env1 env2 - method merge_self_ssa_env (env: Env.t): unit = - let env = SMap.values env in - let ssa_env = SMap.values ssa_env in - List.iter2 (fun { val_ref; _ } value -> - val_ref := Val.merge !val_ref value - ) ssa_env env - method reset_ssa_env (env0: Env.t): unit = - let env0 = SMap.values env0 in - let ssa_env = SMap.values ssa_env in - List.iter2 (fun { val_ref; _ } value -> - val_ref := value - ) ssa_env env0 - method fresh_ssa_env: Env.t = - SMap.map (fun _ -> this#mk_unresolved) ssa_env - method assert_ssa_env (env0: Env.t): unit = - let env0 = SMap.values env0 in - let ssa_env = SMap.values ssa_env in - List.iter2 (fun { val_ref; _ } value -> - Val.resolve ~unresolved:value !val_ref - ) ssa_env env0 - method empty_ssa_env: Env.t = - SMap.map (fun _ -> Val.empty) ssa_env - method havoc_current_ssa_env: unit = - SMap.iter (fun _x { val_ref; havoc } -> - (* NOTE: havoc_env should already have all writes to x, so the only - additional thing that could come from ssa_env is "uninitialized." On - the other hand, we *dont* want to include "uninitialized" if it's no - longer in ssa_env, since that means that x has been initialized (and - there's no going back). *) - val_ref := Val.merge !val_ref havoc.Havoc.unresolved - ) ssa_env - method havoc_uninitialized_ssa_env: unit = - SMap.iter (fun _x { val_ref; havoc } -> - val_ref := Val.merge Val.uninitialized havoc.Havoc.unresolved - ) ssa_env - - method private mk_ssa_env = - SMap.map (fun _ -> { - val_ref = ref Val.uninitialized; - havoc = Havoc.{ unresolved = this#mk_unresolved; locs = [] } - }) - - method private push_ssa_env bindings = - let old_ssa_env = ssa_env in - let bindings = Bindings.to_map bindings in - ssa_env <- SMap.fold SMap.add (this#mk_ssa_env bindings) old_ssa_env; - bindings, old_ssa_env - - method private resolve_havocs = - SMap.iter (fun x _loc -> - let { havoc = { Havoc.unresolved; locs }; _ } = SMap.find x ssa_env in - Val.resolve ~unresolved (Val.all locs) - ) - - method private pop_ssa_env (bindings, old_ssa_env) = - this#resolve_havocs bindings; - ssa_env <- old_ssa_env - - method! with_bindings: 'a. ?lexical:bool -> Loc.t -> Bindings.t -> ('a -> 'a) -> 'a -> 'a = - fun ?lexical loc bindings visit node -> - let saved_state = this#push_ssa_env bindings in - this#run (fun () -> - ignore @@ super#with_bindings ?lexical loc bindings visit node - ) ~finally:(fun () -> - this#pop_ssa_env saved_state - ); - node - - (* Run some computation, catching any abrupt completions; do some final work, - and then re-raise any abrupt completions that were caught. *) - method run f ~finally = - let completion_state = this#run_to_completion f in - finally (); - this#from_completion completion_state - method run_to_completion f = - try f (); None with - | AbruptCompletion.Exn abrupt_completion -> Some abrupt_completion - method from_completion = function - | None -> () - | Some abrupt_completion -> raise (AbruptCompletion.Exn abrupt_completion) - - (* When an abrupt completion is raised, it falls through any subsequent - straight-line code, until it reaches a merge point in the control-flow - graph. At that point, it can be re-raised if and only if all other reaching - control-flow paths also raise the same abrupt completion. - - When re-raising is not possible, we have to save the abrupt completion and - the current environment in a list, so that we can merge such environments - later (when that abrupt completion and others like it are handled). - - Even when raising is possible, we still have to save the current - environment, since the current environment will have to be cleared to model - that the current values of all variables are unreachable. - - NOTE that raising is purely an optimization: we can have more precise - results with raising, but even if we never raised we'd still be sound. *) - - val mutable abrupt_completion_envs: AbruptCompletion.env list = [] - method raise_abrupt_completion: 'a. (AbruptCompletion.t -> 'a) = fun abrupt_completion -> - let env = this#ssa_env in - this#reset_ssa_env this#empty_ssa_env; - abrupt_completion_envs <- (abrupt_completion, env) :: abrupt_completion_envs; - raise (AbruptCompletion.Exn abrupt_completion) - - method expecting_abrupt_completions f = - let saved = abrupt_completion_envs in - abrupt_completion_envs <- []; - this#run f ~finally:(fun () -> - abrupt_completion_envs <- List.rev_append saved abrupt_completion_envs - ) - - (* Given multiple completion states, (re)raise if all of them are the same - abrupt completion. This function is called at merge points. *) - method merge_completion_states (hd_completion_state, tl_completion_states) = - match hd_completion_state with - | None -> () - | Some abrupt_completion -> - if List.for_all (function - | None -> false - | Some abrupt_completion' -> abrupt_completion = abrupt_completion' - ) tl_completion_states - then raise (AbruptCompletion.Exn abrupt_completion) - - (* Given a filter for particular abrupt completions to expect, find the saved - environments corresponding to them, and merge those environments with the - current environment. This function is called when exiting ASTs that - introduce (and therefore expect) particular abrupt completions. *) - method commit_abrupt_completion_matching filter completion_state = - let matching, non_matching = List.partition (fun (abrupt_completion, _env) -> - filter abrupt_completion - ) abrupt_completion_envs in - if matching <> [] - then begin - List.iter (fun (_abrupt_completion, env) -> - this#merge_remote_ssa_env env - ) matching; - abrupt_completion_envs <- non_matching - end else match completion_state with - | Some abrupt_completion when not (filter abrupt_completion) -> - raise (AbruptCompletion.Exn abrupt_completion) - | _ -> () - - (* Track the list of labels that might describe a loop. Used to detect which - labeled continues need to be handled by the loop. - - The idea is that a labeled statement adds its label to the list before - entering its child, and if the child is not a loop or another labeled - statement, the list will be cleared. A loop will consume the list, so we - also clear the list on our way out of any labeled statement. *) - val mutable possible_labeled_continues = [] - - (* write *) - method! pattern_identifier ?kind (ident: Loc.t Ast.Identifier.t) = - ignore kind; - let loc, x = ident in - begin match SMap.get x ssa_env with - | Some { val_ref; havoc } -> - val_ref := Val.one loc; - Havoc.(havoc.locs <- loc :: havoc.locs) - | _ -> () - end; - super#identifier ident - - (* read *) - method any_identifier (loc: Loc.t) (x: string) = - begin match SMap.get x ssa_env with - | Some { val_ref; _ } -> - values <- LocMap.add loc !val_ref values - | None -> () - end; - - method! identifier (ident: Loc.t Ast.Identifier.t) = - let loc, x = ident in - this#any_identifier loc x; - super#identifier ident - - method! jsx_identifier (ident: Loc.t Ast.JSX.Identifier.t) = - let loc, {Ast.JSX.Identifier.name} = ident in - this#any_identifier loc name; - super#jsx_identifier ident - - (* Order of evaluation matters *) - method! assignment _loc (expr: (Loc.t, Loc.t) Ast.Expression.Assignment.t) = - let open Ast.Expression.Assignment in - let { operator; left; right } = expr in - begin match operator with - | Assign -> - let open Ast.Pattern in - begin match left with - | _, (Identifier _ | Object _ | Array _) -> - (* given `x = e`, read e then write x *) + type ssa = { + val_ref: Val.t ref; + havoc: Havoc.t; + } + + class ssa_builder = + object (this) + inherit scope_builder as super + + (* We maintain a map of read locations to raw Val.t terms, which are + simplified to lists of write locations once the analysis is done. *) + val mutable values : Val.t L.LMap.t = L.LMap.empty + + method values : Ssa_api.values = L.LMap.map Val.simplify values + + val mutable id = 0 + + method mk_unresolved = + id <- id + 1; + Val.mk_unresolved id + + (* Utils to manipulate single-static-assignment (SSA) environments. + + TODO: These low-level operations should probably be replaced by + higher-level "control-flow-graph" operations that can be implemented using + them, e.g., those that deal with branches and loops. *) + val mutable ssa_env : ssa SMap.t = SMap.empty + + method ssa_env : Env.t = SMap.map (fun { val_ref; _ } -> !val_ref) ssa_env + + method merge_remote_ssa_env (env : Env.t) : unit = + (* NOTE: env might have more keys than ssa_env, since the environment it + describes might be nested inside the current environment *) + SMap.iter (fun x { val_ref; _ } -> val_ref := Val.merge !val_ref (SMap.find x env)) ssa_env + + method merge_ssa_env (env1 : Env.t) (env2 : Env.t) : unit = + let env1 = SMap.values env1 in + let env2 = SMap.values env2 in + let ssa_env = SMap.values ssa_env in + list_iter3 + (fun { val_ref; _ } value1 value2 -> val_ref := Val.merge value1 value2) + ssa_env + env1 + env2 + + method merge_self_ssa_env (env : Env.t) : unit = + let env = SMap.values env in + let ssa_env = SMap.values ssa_env in + List.iter2 (fun { val_ref; _ } value -> val_ref := Val.merge !val_ref value) ssa_env env + + method reset_ssa_env (env0 : Env.t) : unit = + let env0 = SMap.values env0 in + let ssa_env = SMap.values ssa_env in + List.iter2 (fun { val_ref; _ } value -> val_ref := value) ssa_env env0 + + method fresh_ssa_env : Env.t = SMap.map (fun _ -> this#mk_unresolved) ssa_env + + method assert_ssa_env (env0 : Env.t) : unit = + let env0 = SMap.values env0 in + let ssa_env = SMap.values ssa_env in + List.iter2 + (fun { val_ref; _ } value -> Val.resolve ~unresolved:value !val_ref) + ssa_env + env0 + + method empty_ssa_env : Env.t = SMap.map (fun _ -> Val.empty) ssa_env + + method havoc_current_ssa_env : unit = + SMap.iter + (fun _x { val_ref; havoc } -> + (* NOTE: havoc_env should already have all writes to x, so the only + additional thing that could come from ssa_env is "uninitialized." On + the other hand, we *dont* want to include "uninitialized" if it's no + longer in ssa_env, since that means that x has been initialized (and + there's no going back). *) + val_ref := Val.merge !val_ref havoc.Havoc.unresolved) + ssa_env + + method havoc_uninitialized_ssa_env : unit = + SMap.iter + (fun _x { val_ref; havoc } -> + val_ref := Val.merge Val.uninitialized havoc.Havoc.unresolved) + ssa_env + + method private mk_ssa_env = + SMap.map (fun _ -> + { + val_ref = ref Val.uninitialized; + havoc = Havoc.{ unresolved = this#mk_unresolved; locs = [] }; + }) + + method private push_ssa_env bindings = + let old_ssa_env = ssa_env in + let bindings = Bindings.to_map bindings in + ssa_env <- SMap.fold SMap.add (this#mk_ssa_env bindings) old_ssa_env; + (bindings, old_ssa_env) + + method private resolve_havocs = + SMap.iter (fun x _loc -> + let { havoc = { Havoc.unresolved; locs }; _ } = SMap.find x ssa_env in + Val.resolve ~unresolved (Val.all locs)) + + method private pop_ssa_env (bindings, old_ssa_env) = + this#resolve_havocs bindings; + ssa_env <- old_ssa_env + + method! with_bindings : 'a. ?lexical:bool -> L.t -> L.t Bindings.t -> ('a -> 'a) -> 'a -> 'a + = + fun ?lexical loc bindings visit node -> + let saved_state = this#push_ssa_env bindings in + this#run + (fun () -> ignore @@ super#with_bindings ?lexical loc bindings visit node) + ~finally:(fun () -> this#pop_ssa_env saved_state); + node + + (* Run some computation, catching any abrupt completions; do some final work, + and then re-raise any abrupt completions that were caught. *) + method run f ~finally = + let completion_state = this#run_to_completion f in + finally (); + this#from_completion completion_state + + method run_to_completion f = + try + f (); + None + with AbruptCompletion.Exn abrupt_completion -> Some abrupt_completion + + method from_completion = + function + | None -> () + | Some abrupt_completion -> raise (AbruptCompletion.Exn abrupt_completion) + + (* When an abrupt completion is raised, it falls through any subsequent + straight-line code, until it reaches a merge point in the control-flow + graph. At that point, it can be re-raised if and only if all other reaching + control-flow paths also raise the same abrupt completion. + + When re-raising is not possible, we have to save the abrupt completion and + the current environment in a list, so that we can merge such environments + later (when that abrupt completion and others like it are handled). + + Even when raising is possible, we still have to save the current + environment, since the current environment will have to be cleared to model + that the current values of all variables are unreachable. + + NOTE that raising is purely an optimization: we can have more precise + results with raising, but even if we never raised we'd still be sound. *) + val mutable abrupt_completion_envs : AbruptCompletion.env list = [] + + method raise_abrupt_completion : 'a. AbruptCompletion.t -> 'a = + fun abrupt_completion -> + let env = this#ssa_env in + this#reset_ssa_env this#empty_ssa_env; + abrupt_completion_envs <- (abrupt_completion, env) :: abrupt_completion_envs; + raise (AbruptCompletion.Exn abrupt_completion) + + method expecting_abrupt_completions f = + let saved = abrupt_completion_envs in + abrupt_completion_envs <- []; + this#run f ~finally:(fun () -> + abrupt_completion_envs <- List.rev_append saved abrupt_completion_envs) + + (* Given multiple completion states, (re)raise if all of them are the same + abrupt completion. This function is called at merge points. *) + method merge_completion_states (hd_completion_state, tl_completion_states) = + match hd_completion_state with + | None -> () + | Some abrupt_completion -> + if + List.for_all + (function + | None -> false + | Some abrupt_completion' -> abrupt_completion = abrupt_completion') + tl_completion_states + then + raise (AbruptCompletion.Exn abrupt_completion) + + (* Given a filter for particular abrupt completions to expect, find the saved + environments corresponding to them, and merge those environments with the + current environment. This function is called when exiting ASTs that + introduce (and therefore expect) particular abrupt completions. *) + method commit_abrupt_completion_matching filter completion_state = + let (matching, non_matching) = + List.partition + (fun (abrupt_completion, _env) -> filter abrupt_completion) + abrupt_completion_envs + in + if matching <> [] then ( + List.iter (fun (_abrupt_completion, env) -> this#merge_remote_ssa_env env) matching; + abrupt_completion_envs <- non_matching + ) else + match completion_state with + | Some abrupt_completion when not (filter abrupt_completion) -> + raise (AbruptCompletion.Exn abrupt_completion) + | _ -> () + + (* Track the list of labels that might describe a loop. Used to detect which + labeled continues need to be handled by the loop. + + The idea is that a labeled statement adds its label to the list before + entering its child, and if the child is not a loop or another labeled + statement, the list will be cleared. A loop will consume the list, so we + also clear the list on our way out of any labeled statement. *) + val mutable possible_labeled_continues = [] + + (* write *) + method! pattern_identifier ?kind (ident : (L.t, L.t) Ast.Identifier.t) = + ignore kind; + let (loc, { Ast.Identifier.name = x; comments = _ }) = ident in + begin + match SMap.get x ssa_env with + | Some { val_ref; havoc } -> + val_ref := Val.one loc; + Havoc.(havoc.locs <- loc :: havoc.locs) + | _ -> () + end; + super#identifier ident + + (* read *) + method any_identifier (loc : L.t) (x : string) = + match SMap.get x ssa_env with + | Some { val_ref; _ } -> values <- L.LMap.add loc !val_ref values + | None -> () + + method! identifier (ident : (L.t, L.t) Ast.Identifier.t) = + let (loc, { Ast.Identifier.name = x; comments = _ }) = ident in + this#any_identifier loc x; + super#identifier ident + + method! jsx_identifier (ident : L.t Ast.JSX.Identifier.t) = + let (loc, { Ast.JSX.Identifier.name }) = ident in + this#any_identifier loc name; + super#jsx_identifier ident + + (* Order of evaluation matters *) + method! assignment _loc (expr : (L.t, L.t) Ast.Expression.Assignment.t) = + Ast.Expression.Assignment.( + let { operator; left; right } = expr in + begin + match operator with + | None -> + Ast.Pattern.( + begin + match left with + | (_, (Identifier _ | Object _ | Array _)) -> + (* given `x = e`, read e then write x *) + ignore @@ this#expression right; + ignore @@ this#assignment_pattern left + | (_, Expression _) -> + (* given `o.x = e`, read o then read e *) + ignore @@ this#assignment_pattern left; + ignore @@ this#expression right + end) + | Some _ -> + Ast.Pattern.( + begin + match left with + | (_, Identifier { Identifier.name; _ }) -> + (* given `x += e`, read x then read e then write x *) + ignore @@ this#identifier name; + ignore @@ this#expression right; + ignore @@ this#assignment_pattern left + | (_, Expression _) -> + (* given `o.x += e`, read o then read e *) + ignore @@ this#assignment_pattern left; + ignore @@ this#expression right + | (_, (Object _ | Array _)) -> failwith "unexpected AST node" + end) + end; + expr) + + (* Order of evaluation matters *) + method! variable_declarator + ~kind (decl : (L.t, L.t) Ast.Statement.VariableDeclaration.Declarator.t) = + Ast.Statement.VariableDeclaration.Declarator.( + let (_loc, { id; init }) = decl in + Ast.Pattern.( + begin + match id with + | (_, (Identifier _ | Object _ | Array _)) -> + begin + match init with + | Some init -> + (* given `var x = e`, read e then write x *) + ignore @@ this#expression init; + ignore @@ this#variable_declarator_pattern ~kind id + | None -> + (* `var x;` is not a write of `x` *) + () + end + | (_, Expression _) -> failwith "unexpected AST node" + end; + decl)) + + (* read and write (when the argument is an identifier) *) + method! update_expression _loc (expr : (L.t, L.t) Ast.Expression.Update.t) = + Ast.Expression.Update.( + let { argument; operator = _; prefix = _ } = expr in + begin + match argument with + | (_, Ast.Expression.Identifier x) -> + (* given `x++`, read x then write x *) + ignore @@ this#identifier x; + ignore @@ this#pattern_identifier x + | _ -> + (* given `o.x++`, read o *) + ignore @@ this#expression argument + end; + expr) + + (* things that cause abrupt completions *) + method! break _loc (stmt : L.t Ast.Statement.Break.t) = + Ast.Statement.Break.( + let { label; comments = _ } = stmt in + this#raise_abrupt_completion (AbruptCompletion.break label)) + + method! continue _loc (stmt : L.t Ast.Statement.Continue.t) = + Ast.Statement.Continue.( + let { label; comments = _ } = stmt in + this#raise_abrupt_completion (AbruptCompletion.continue label)) + + method! return _loc (stmt : (L.t, L.t) Ast.Statement.Return.t) = + Ast.Statement.Return.( + let { argument; comments = _ } = stmt in + ignore @@ Flow_ast_mapper.map_opt this#expression argument; + this#raise_abrupt_completion AbruptCompletion.return) + + method! throw _loc (stmt : (L.t, L.t) Ast.Statement.Throw.t) = + Ast.Statement.Throw.( + let { argument } = stmt in + ignore @@ this#expression argument; + this#raise_abrupt_completion AbruptCompletion.throw) + + (** Control flow **) + + (** We describe the effect on the environment of evaluating node n using Hoare + triples of the form [PRE] n [POST], where PRE is the environment before + and POST is the environment after the evaluation of node n. Environments + must be joined whenever a node is reachable from multiple nodes, as can + happen after a branch or before a loop. **) + + (******************************************) + (* [PRE] if (e) { s1 } else { s2 } [POST] *) + (******************************************) + (* | *) + (* e *) + (* / \ *) + (* s1 s2 *) + (* \./ *) + (* | *) + (******************************************) + (* [PRE] e [ENV0] *) + (* [ENV0] s1 [ENV1] *) + (* [ENV0] s2 [ENV2] *) + (* POST = ENV1 | ENV2 *) + (******************************************) + method! if_statement _loc (stmt : (L.t, L.t) Ast.Statement.If.t) = + Ast.Statement.If.( + let { test; consequent; alternate; _ } = stmt in + ignore @@ this#expression test; + let env0 = this#ssa_env in + (* collect completions and environments of every branch *) + let then_completion_state = + this#run_to_completion (fun () -> + ignore @@ this#if_consequent_statement ~has_else:(alternate <> None) consequent) + in + let env1 = this#ssa_env in + this#reset_ssa_env env0; + let else_completion_state = + this#run_to_completion (fun () -> + ignore @@ Flow_ast_mapper.map_opt this#statement alternate) + in + (* merge environments *) + this#merge_self_ssa_env env1; + + (* merge completions *) + let if_completion_states = (then_completion_state, [else_completion_state]) in + this#merge_completion_states if_completion_states; + stmt) + + (********************************) + (* [PRE] while (e) { s } [POST] *) + (********************************) + (* | *) + (* e <-. *) + (* / \ / *) + (* | s *) + (* \ *) + (* | *) + (********************************) + (* PRE = ENV0 *) + (* [ENV0 | ENV1] e [ENV2] *) + (* [ENV2] s [ENV1] *) + (* POST = ENV2 *) + (********************************) + method! while_ _loc (stmt : (L.t, L.t) Ast.Statement.While.t) = + this#expecting_abrupt_completions (fun () -> + let continues = AbruptCompletion.continue None :: possible_labeled_continues in + Ast.Statement.While.( + let { test; body } = stmt in + (* placeholder for environment at the end of the loop body *) + let env1 = this#fresh_ssa_env in + this#merge_self_ssa_env env1; + ignore @@ this#expression test; + let env2 = this#ssa_env in + let loop_completion_state = + this#run_to_completion (fun () -> ignore @@ this#statement body) + in + (* continue exits *) + let loop_completion_state = + this#run_to_completion (fun () -> + this#commit_abrupt_completion_matching + (AbruptCompletion.mem continues) + loop_completion_state) + in + (* end of loop body *) + this#assert_ssa_env env1; + + (* out of the loop! this always happens right after evaluating the loop test *) + this#reset_ssa_env env2; + + (* we might also never enter the loop body *) + let while_completion_states = (None, [loop_completion_state]) in + let completion_state = + this#run_to_completion (fun () -> + this#merge_completion_states while_completion_states) + in + (* completion_state = None *) + (* break exits *) + this#commit_abrupt_completion_matching + AbruptCompletion.(mem [break None]) + completion_state)); + stmt + + (***********************************) + (* [PRE] do { s } while (e) [POST] *) + (***********************************) + (* | *) + (* s <-. *) + (* \ / *) + (* e *) + (* | *) + (***********************************) + (* PRE = ENV0 *) + (* [ENV0 | ENV1] s; e [ENV1] *) + (* POST = ENV1 *) + (***********************************) + method! do_while _loc (stmt : (L.t, L.t) Ast.Statement.DoWhile.t) = + this#expecting_abrupt_completions (fun () -> + let continues = AbruptCompletion.continue None :: possible_labeled_continues in + Ast.Statement.DoWhile.( + let { body; test; _ } = stmt in + let env1 = this#fresh_ssa_env in + this#merge_self_ssa_env env1; + let loop_completion_state = + this#run_to_completion (fun () -> ignore @@ this#statement body) + in + let loop_completion_state = + this#run_to_completion (fun () -> + this#commit_abrupt_completion_matching + (AbruptCompletion.mem continues) + loop_completion_state) + in + begin + match loop_completion_state with + | None -> ignore @@ this#expression test + | _ -> () + end; + this#assert_ssa_env env1; + let do_while_completion_states = (loop_completion_state, []) in + let completion_state = + this#run_to_completion (fun () -> + this#merge_completion_states do_while_completion_states) + in + (* completion_state = loop_completion_state *) + this#commit_abrupt_completion_matching + AbruptCompletion.(mem [break None]) + completion_state)); + stmt + + (**************************************) + (* [PRE] for (e; e1; e2) { s } [POST] *) + (**************************************) + (* | *) + (* e *) + (* | *) + (* e1 <---. *) + (* / \ | *) + (* | s | *) + (* | \ / *) + (* | e2 *) + (* \ *) + (* | *) + (**************************************) + (* [PRE] e [ENV0] *) + (* [ENV0 | ENV1] e1 [ENV2] *) + (* [ENV2] s; e2 [ENV1] *) + (* POST = ENV2 *) + (**************************************) + method! scoped_for_statement _loc (stmt : (L.t, L.t) Ast.Statement.For.t) = + this#expecting_abrupt_completions (fun () -> + let continues = AbruptCompletion.continue None :: possible_labeled_continues in + Ast.Statement.For.( + let { init; test; update; body } = stmt in + ignore @@ Flow_ast_mapper.map_opt this#for_statement_init init; + let env1 = this#fresh_ssa_env in + this#merge_self_ssa_env env1; + ignore @@ Flow_ast_mapper.map_opt this#expression test; + let env2 = this#ssa_env in + let loop_completion_state = + this#run_to_completion (fun () -> ignore @@ this#statement body) + in + (* continue *) + let loop_completion_state = + this#run_to_completion (fun () -> + this#commit_abrupt_completion_matching + (AbruptCompletion.mem continues) + loop_completion_state) + in + begin + match loop_completion_state with + | None -> ignore @@ Flow_ast_mapper.map_opt this#expression update + | _ -> () + end; + this#assert_ssa_env env1; + this#reset_ssa_env env2; + let for_completion_states = (None, [loop_completion_state]) in + let completion_state = + this#run_to_completion (fun () -> + this#merge_completion_states for_completion_states) + in + this#commit_abrupt_completion_matching + AbruptCompletion.(mem [break None]) + completion_state)); + stmt + + (*************************************) + (* [PRE] for (e1 in e2) { s } [POST] *) + (*************************************) + (* | *) + (* e2 *) + (* | *) + (* . <---. *) + (* / \ | *) + (* | e1 | *) + (* | \ / *) + (* | s *) + (* \ *) + (* | *) + (*************************************) + (* [PRE] e2 [ENV0] *) + (* ENV2 = ENV0 | ENV1 *) + (* [ENV2] e2 [ENV0] *) + (* [ENV0 | ENV1] e1; s [ENV1] *) + (* POST = ENV2 *) + (*************************************) + method! scoped_for_in_statement _loc (stmt : (L.t, L.t) Ast.Statement.ForIn.t) = + this#expecting_abrupt_completions (fun () -> + let continues = AbruptCompletion.continue None :: possible_labeled_continues in + Ast.Statement.ForIn.( + let { left; right; body; each = _ } = stmt in + ignore @@ this#expression right; + let env1 = this#fresh_ssa_env in + this#merge_self_ssa_env env1; + let env2 = this#ssa_env in + ignore @@ this#for_in_statement_lhs left; + let loop_completion_state = + this#run_to_completion (fun () -> ignore @@ this#statement body) + in + (* continue *) + let loop_completion_state = + this#run_to_completion (fun () -> + this#commit_abrupt_completion_matching + (AbruptCompletion.mem continues) + loop_completion_state) + in + this#assert_ssa_env env1; + this#reset_ssa_env env2; + let for_in_completion_states = (None, [loop_completion_state]) in + let completion_state = + this#run_to_completion (fun () -> + this#merge_completion_states for_in_completion_states) + in + this#commit_abrupt_completion_matching + AbruptCompletion.(mem [break None]) + completion_state)); + stmt + + (*************************************) + (* [PRE] for (e1 of e2) { s } [POST] *) + (*************************************) + (* | *) + (* e2 *) + (* | *) + (* . <---. *) + (* / \ | *) + (* | e1 | *) + (* | \ / *) + (* | s *) + (* \ *) + (* | *) + (*************************************) + (* [PRE] e2 [ENV0] *) + (* ENV2 = ENV0 | ENV1 *) + (* [ENV2] e2 [ENV0] *) + (* [ENV0 | ENV1] e1; s [ENV1] *) + (* POST = ENV2 *) + (*************************************) + method! scoped_for_of_statement _loc (stmt : (L.t, L.t) Ast.Statement.ForOf.t) = + this#expecting_abrupt_completions (fun () -> + let continues = AbruptCompletion.continue None :: possible_labeled_continues in + Ast.Statement.ForOf.( + let { left; right; body; async = _ } = stmt in + ignore @@ this#expression right; + let env1 = this#fresh_ssa_env in + this#merge_self_ssa_env env1; + let env2 = this#ssa_env in + ignore @@ this#for_of_statement_lhs left; + let loop_completion_state = + this#run_to_completion (fun () -> ignore @@ this#statement body) + in + (* continue *) + let loop_completion_state = + this#run_to_completion (fun () -> + this#commit_abrupt_completion_matching + (AbruptCompletion.mem continues) + loop_completion_state) + in + this#assert_ssa_env env1; + this#reset_ssa_env env2; + let for_of_completion_states = (None, [loop_completion_state]) in + let completion_state = + this#run_to_completion (fun () -> + this#merge_completion_states for_of_completion_states) + in + this#commit_abrupt_completion_matching + AbruptCompletion.(mem [break None]) + completion_state)); + stmt + + (***********************************************************) + (* [PRE] switch (e) { case e1: s1 ... case eN: sN } [POST] *) + (***********************************************************) + (* | *) + (* e *) + (* / *) + (* e1 *) + (* | \ *) + (* . s1 *) + (* | | *) + (* ei . *) + (* | \ | *) + (* . si *) + (* | | *) + (* eN . *) + (* | \ | *) + (* | sN *) + (* \ | *) + (* \| *) + (* | *) + (***********************************************************) + (* [PRE] e [ENV0] *) + (* ENV0' = empty *) + (* \forall i = 0..N-1: *) + (* [ENVi] ei+1 [ENVi+1] *) + (* [ENVi+1 | ENVi'] si+1 [ENVi+1'] *) + (* POST = ENVN | ENVN' *) + (***********************************************************) + method! switch _loc (switch : (L.t, L.t) Ast.Statement.Switch.t) = + this#expecting_abrupt_completions (fun () -> + Ast.Statement.Switch.( + let { discriminant; cases } = switch in + ignore @@ this#expression discriminant; + let (env, case_completion_states) = + List.fold_left + (fun acc stuff -> + let (_loc, case) = stuff in + this#ssa_switch_case acc case) + (this#empty_ssa_env, []) + cases + in + this#merge_self_ssa_env env; + + (* In general, cases are non-exhaustive. TODO: optimize with `default`. *) + let switch_completion_states = (None, case_completion_states) in + let completion_state = + this#run_to_completion (fun () -> + this#merge_completion_states switch_completion_states) + in + this#commit_abrupt_completion_matching + AbruptCompletion.(mem [break None]) + completion_state)); + switch + + method private ssa_switch_case + (env, case_completion_states) (case : (L.t, L.t) Ast.Statement.Switch.Case.t') = + Ast.Statement.Switch.Case.( + let { test; consequent } = case in + ignore @@ Flow_ast_mapper.map_opt this#expression test; + let env0 = this#ssa_env in + this#merge_ssa_env env0 env; + let case_completion_state = + this#run_to_completion (fun () -> ignore @@ this#statement_list consequent) + in + let env' = this#ssa_env in + this#reset_ssa_env env0; + (env', case_completion_state :: case_completion_states)) + + (****************************************) + (* [PRE] try { s1 } catch { s2 } [POST] *) + (****************************************) + (* | *) + (* s1 ..~ *) + (* | | *) + (* | s2 *) + (* \./ *) + (* | *) + (****************************************) + (* [PRE] s1 [ENV1] *) + (* [HAVOC] s2 [ENV2 ] *) + (* POST = ENV1 | ENV2 *) + (****************************************) + (*******************************************************) + (* [PRE] try { s1 } catch { s2 } finally { s3 } [POST] *) + (*******************************************************) + (* | *) + (* s1 ..~ *) + (* | | *) + (* | s2 ..~ *) + (* \./ | *) + (* |______| *) + (* | *) + (* s3 *) + (* | *) + (*******************************************************) + (* [PRE] s1 [ENV1] *) + (* [HAVOC] s2 [ENV2 ] *) + (* [HAVOC] s3 [ENV3 ] *) + (* POST = ENV3 *) + (*******************************************************) + method! try_catch _loc (stmt : (L.t, L.t) Ast.Statement.Try.t) = + this#expecting_abrupt_completions (fun () -> + Ast.Statement.Try.( + let { block = (loc, block); handler; finalizer; comments = _ } = stmt in + let try_completion_state = + this#run_to_completion (fun () -> ignore @@ this#block loc block) + in + let env1 = this#ssa_env in + let (catch_completion_state_opt, env2) = + match handler with + | Some (loc, clause) -> + (* NOTE: Havoc-ing the state when entering the handler is probably + overkill. We can be more precise but still correct by collecting all + possible writes in the try-block and merging them with the state when + entering the try-block. *) + this#havoc_current_ssa_env; + let catch_completion_state = + this#run_to_completion (fun () -> ignore @@ this#catch_clause loc clause) + in + ([catch_completion_state], this#ssa_env) + | None -> ([], this#empty_ssa_env) + in + this#merge_ssa_env env1 env2; + let try_catch_completion_states = + (try_completion_state, catch_completion_state_opt) + in + let completion_state = + this#run_to_completion (fun () -> + this#merge_completion_states try_catch_completion_states) + in + this#commit_abrupt_completion_matching AbruptCompletion.all completion_state; + begin + match finalizer with + | Some (_loc, block) -> + (* NOTE: Havoc-ing the state when entering the finalizer is probably + overkill. We can be more precise but still correct by collecting + all possible writes in the handler and merging them with the state + when entering the handler (which in turn should already account for + any contributions by the try-block). *) + this#havoc_current_ssa_env; + ignore @@ this#block loc block + | None -> () + end; + this#from_completion completion_state)); + stmt + + (* branching expressions *) + method! logical _loc (expr : (L.t, L.t) Ast.Expression.Logical.t) = + Ast.Expression.Logical.( + let { operator = _; left; right } = expr in + ignore @@ this#expression left; + let env1 = this#ssa_env in ignore @@ this#expression right; - ignore @@ this#assignment_pattern left - | _, Expression _ -> - (* given `o.x = e`, read o then read e *) - ignore @@ this#assignment_pattern left; - ignore @@ this#expression right - | _, Assignment _ -> failwith "unexpected AST node" - end - | _ -> - let open Ast.Pattern in - begin match left with - | _, Identifier { Identifier.name; _ } -> - (* given `x += e`, read x then read e then write x *) - ignore @@ this#identifier name; - ignore @@ this#expression right; - ignore @@ this#assignment_pattern left - | _, Expression _ -> - (* given `o.x += e`, read o then read e *) - ignore @@ this#assignment_pattern left; - ignore @@ this#expression right - | _, (Object _ | Array _ | Assignment _) -> failwith "unexpected AST node" - end - end; - expr - - (* Order of evaluation matters *) - method! variable_declarator ~kind (decl: (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.Declarator.t) = - let open Ast.Statement.VariableDeclaration.Declarator in - let (_loc, { id; init }) = decl in - let open Ast.Pattern in - begin match id with - | _, (Identifier _ | Object _ | Array _) -> - begin match init with - | Some init -> - (* given `var x = e`, read e then write x *) - ignore @@ this#expression init; - ignore @@ this#variable_declarator_pattern ~kind id - | None -> - (* `var x;` is not a write of `x` *) - () - end - | _, (Expression _ | Assignment _) -> failwith "unexpected AST node" - end; - decl - - (* read and write (when the argument is an identifier) *) - method! update_expression _loc (expr: (Loc.t, Loc.t) Ast.Expression.Update.t) = - let open Ast.Expression.Update in - let { argument; operator = _; prefix = _ } = expr in - begin match argument with - | _, Ast.Expression.Identifier x -> - (* given `x++`, read x then write x *) - ignore @@ this#identifier x; - ignore @@ this#pattern_identifier x - | _ -> - (* given `o.x++`, read o *) - ignore @@ this#expression argument - end; - expr - - (* things that cause abrupt completions *) - method! break _loc (stmt: Loc.t Ast.Statement.Break.t) = - let open Ast.Statement.Break in - let { label } = stmt in - this#raise_abrupt_completion (AbruptCompletion.break label) - - method! continue _loc (stmt: Loc.t Ast.Statement.Continue.t) = - let open Ast.Statement.Continue in - let { label } = stmt in - this#raise_abrupt_completion (AbruptCompletion.continue label) - - method! return _loc (stmt: (Loc.t, Loc.t) Ast.Statement.Return.t) = - let open Ast.Statement.Return in - let { argument } = stmt in - ignore @@ Flow_ast_mapper.map_opt this#expression argument; - this#raise_abrupt_completion AbruptCompletion.return - - method! throw _loc (stmt: (Loc.t, Loc.t) Ast.Statement.Throw.t) = - let open Ast.Statement.Throw in - let { argument } = stmt in - ignore @@ this#expression argument; - this#raise_abrupt_completion AbruptCompletion.throw - - (** Control flow **) - - (** We describe the effect on the environment of evaluating node n using Hoare - triples of the form [PRE] n [POST], where PRE is the environment before - and POST is the environment after the evaluation of node n. Environments - must be joined whenever a node is reachable from multiple nodes, as can - happen after a branch or before a loop. **) - - (******************************************) - (* [PRE] if (e) { s1 } else { s2 } [POST] *) - (******************************************) - (* | *) - (* e *) - (* / \ *) - (* s1 s2 *) - (* \./ *) - (* | *) - (******************************************) - (* [PRE] e [ENV0] *) - (* [ENV0] s1 [ENV1] *) - (* [ENV0] s2 [ENV2] *) - (* POST = ENV1 | ENV2 *) - (******************************************) - method! if_statement _loc (stmt: (Loc.t, Loc.t) Ast.Statement.If.t) = - let open Ast.Statement.If in - let { test; consequent; alternate } = stmt in - ignore @@ this#expression test; - let env0 = this#ssa_env in - (* collect completions and environments of every branch *) - let then_completion_state = this#run_to_completion (fun () -> - ignore @@ this#if_consequent_statement ~has_else:(alternate <> None) consequent - ) in - let env1 = this#ssa_env in - this#reset_ssa_env env0; - let else_completion_state = this#run_to_completion (fun () -> - ignore @@ Flow_ast_mapper.map_opt this#statement alternate - ) in - (* merge environments *) - this#merge_self_ssa_env env1; - (* merge completions *) - let if_completion_states = then_completion_state, [else_completion_state] in - this#merge_completion_states if_completion_states; - stmt - - (********************************) - (* [PRE] while (e) { s } [POST] *) - (********************************) - (* | *) - (* e <-. *) - (* / \ / *) - (* | s *) - (* \ *) - (* | *) - (********************************) - (* PRE = ENV0 *) - (* [ENV0 | ENV1] e [ENV2] *) - (* [ENV2] s [ENV1] *) - (* POST = ENV2 *) - (********************************) - method! while_ _loc (stmt: (Loc.t, Loc.t) Ast.Statement.While.t) = - this#expecting_abrupt_completions (fun () -> - let continues = (AbruptCompletion.continue None)::possible_labeled_continues in - let open Ast.Statement.While in - let { test; body } = stmt in - (* placeholder for environment at the end of the loop body *) - let env1 = this#fresh_ssa_env in - this#merge_self_ssa_env env1; - ignore @@ this#expression test; - let env2 = this#ssa_env in - let loop_completion_state = this#run_to_completion (fun () -> - ignore @@ this#statement body - ) in - (* continue exits *) - let loop_completion_state = this#run_to_completion (fun () -> - this#commit_abrupt_completion_matching (AbruptCompletion.mem continues) loop_completion_state - ) in - (* end of loop body *) - this#assert_ssa_env env1; - (* out of the loop! this always happens right after evaluating the loop test *) - this#reset_ssa_env env2; - (* we might also never enter the loop body *) - let while_completion_states = None, [loop_completion_state] in - let completion_state = this#run_to_completion (fun () -> - this#merge_completion_states while_completion_states - ) in (* completion_state = None *) - (* break exits *) - this#commit_abrupt_completion_matching AbruptCompletion.(mem [break None]) completion_state - ); - stmt - - (***********************************) - (* [PRE] do { s } while (e) [POST] *) - (***********************************) - (* | *) - (* s <-. *) - (* \ / *) - (* e *) - (* | *) - (***********************************) - (* PRE = ENV0 *) - (* [ENV0 | ENV1] s; e [ENV1] *) - (* POST = ENV1 *) - (***********************************) - method! do_while _loc (stmt: (Loc.t, Loc.t) Ast.Statement.DoWhile.t) = - this#expecting_abrupt_completions (fun () -> - let continues = (AbruptCompletion.continue None)::possible_labeled_continues in - let open Ast.Statement.DoWhile in - let { body; test } = stmt in - let env1 = this#fresh_ssa_env in - this#merge_self_ssa_env env1; - let loop_completion_state = this#run_to_completion (fun () -> - ignore @@ this#statement body - ) in - let loop_completion_state = this#run_to_completion (fun () -> - this#commit_abrupt_completion_matching (AbruptCompletion.mem continues) loop_completion_state - ) in - begin match loop_completion_state with - | None -> ignore @@ this#expression test - | _ -> () - end; - this#assert_ssa_env env1; - let do_while_completion_states = loop_completion_state, [] in - let completion_state = this#run_to_completion (fun () -> - this#merge_completion_states do_while_completion_states - ) in (* completion_state = loop_completion_state *) - this#commit_abrupt_completion_matching AbruptCompletion.(mem [break None]) completion_state - ); - stmt - - (**************************************) - (* [PRE] for (e; e1; e2) { s } [POST] *) - (**************************************) - (* | *) - (* e *) - (* | *) - (* e1 <---. *) - (* / \ | *) - (* | s | *) - (* | \ / *) - (* | e2 *) - (* \ *) - (* | *) - (**************************************) - (* [PRE] e [ENV0] *) - (* [ENV0 | ENV1] e1 [ENV2] *) - (* [ENV2] s; e2 [ENV1] *) - (* POST = ENV2 *) - (**************************************) - method! scoped_for_statement _loc (stmt: (Loc.t, Loc.t) Ast.Statement.For.t) = - this#expecting_abrupt_completions (fun () -> - let continues = (AbruptCompletion.continue None)::possible_labeled_continues in - let open Ast.Statement.For in - let { init; test; update; body } = stmt in - ignore @@ Flow_ast_mapper.map_opt this#for_statement_init init; - let env1 = this#fresh_ssa_env in - this#merge_self_ssa_env env1; - ignore @@ Flow_ast_mapper.map_opt this#expression test; - let env2 = this#ssa_env in - let loop_completion_state = this#run_to_completion (fun () -> - ignore @@ this#statement body; - ) in - (* continue *) - let loop_completion_state = this#run_to_completion (fun () -> - this#commit_abrupt_completion_matching (AbruptCompletion.mem continues) loop_completion_state - ) in - begin match loop_completion_state with - | None -> ignore @@ Flow_ast_mapper.map_opt this#expression update - | _ -> () - end; - this#assert_ssa_env env1; - this#reset_ssa_env env2; - let for_completion_states = None, [loop_completion_state] in - let completion_state = this#run_to_completion (fun () -> - this#merge_completion_states for_completion_states - ) in - this#commit_abrupt_completion_matching AbruptCompletion.(mem [break None]) completion_state - ); - stmt - - (*************************************) - (* [PRE] for (e1 in e2) { s } [POST] *) - (*************************************) - (* | *) - (* e2 *) - (* | *) - (* . <---. *) - (* / \ | *) - (* | e1 | *) - (* | \ / *) - (* | s *) - (* \ *) - (* | *) - (*************************************) - (* [PRE] e2 [ENV0] *) - (* ENV2 = ENV0 | ENV1 *) - (* [ENV2] e2 [ENV0] *) - (* [ENV0 | ENV1] e1; s [ENV1] *) - (* POST = ENV2 *) - (*************************************) - method! scoped_for_in_statement _loc (stmt: (Loc.t, Loc.t) Ast.Statement.ForIn.t) = - this#expecting_abrupt_completions (fun () -> - let continues = (AbruptCompletion.continue None)::possible_labeled_continues in - let open Ast.Statement.ForIn in - let { left; right; body; each = _ } = stmt in - ignore @@ this#expression right; - let env1 = this#fresh_ssa_env in - this#merge_self_ssa_env env1; - let env2 = this#ssa_env in - ignore @@ this#for_in_statement_lhs left; - let loop_completion_state = this#run_to_completion (fun () -> - ignore @@ this#statement body; - ) in - (* continue *) - let loop_completion_state = this#run_to_completion (fun () -> - this#commit_abrupt_completion_matching (AbruptCompletion.mem continues) loop_completion_state - ) in - this#assert_ssa_env env1; - this#reset_ssa_env env2; - let for_in_completion_states = None, [loop_completion_state] in - let completion_state = this#run_to_completion (fun () -> - this#merge_completion_states for_in_completion_states - ) in - this#commit_abrupt_completion_matching AbruptCompletion.(mem [break None]) completion_state - ); - stmt - - (*************************************) - (* [PRE] for (e1 of e2) { s } [POST] *) - (*************************************) - (* | *) - (* e2 *) - (* | *) - (* . <---. *) - (* / \ | *) - (* | e1 | *) - (* | \ / *) - (* | s *) - (* \ *) - (* | *) - (*************************************) - (* [PRE] e2 [ENV0] *) - (* ENV2 = ENV0 | ENV1 *) - (* [ENV2] e2 [ENV0] *) - (* [ENV0 | ENV1] e1; s [ENV1] *) - (* POST = ENV2 *) - (*************************************) - method! scoped_for_of_statement _loc (stmt: (Loc.t, Loc.t) Ast.Statement.ForOf.t) = - this#expecting_abrupt_completions (fun () -> - let continues = (AbruptCompletion.continue None)::possible_labeled_continues in - let open Ast.Statement.ForOf in - let { left; right; body; async = _ } = stmt in - ignore @@ this#expression right; - let env1 = this#fresh_ssa_env in - this#merge_self_ssa_env env1; - let env2 = this#ssa_env in - ignore @@ this#for_of_statement_lhs left; - let loop_completion_state = this#run_to_completion (fun () -> - ignore @@ this#statement body; - ) in - (* continue *) - let loop_completion_state = this#run_to_completion (fun () -> - this#commit_abrupt_completion_matching (AbruptCompletion.mem continues) loop_completion_state - ) in - this#assert_ssa_env env1; - this#reset_ssa_env env2; - let for_of_completion_states = None, [loop_completion_state] in - let completion_state = this#run_to_completion (fun () -> - this#merge_completion_states for_of_completion_states - ) in - this#commit_abrupt_completion_matching AbruptCompletion.(mem [break None]) completion_state - ); - stmt - - (***********************************************************) - (* [PRE] switch (e) { case e1: s1 ... case eN: sN } [POST] *) - (***********************************************************) - (* | *) - (* e *) - (* / *) - (* e1 *) - (* | \ *) - (* . s1 *) - (* | | *) - (* ei . *) - (* | \ | *) - (* . si *) - (* | | *) - (* eN . *) - (* | \ | *) - (* | sN *) - (* \ | *) - (* \| *) - (* | *) - (***********************************************************) - (* [PRE] e [ENV0] *) - (* ENV0' = empty *) - (* \forall i = 0..N-1: *) - (* [ENVi] ei+1 [ENVi+1] *) - (* [ENVi+1 | ENVi'] si+1 [ENVi+1'] *) - (* POST = ENVN | ENVN' *) - (***********************************************************) - method! switch _loc (switch: (Loc.t, Loc.t) Ast.Statement.Switch.t) = - this#expecting_abrupt_completions (fun () -> - let open Ast.Statement.Switch in - let { discriminant; cases } = switch in - ignore @@ this#expression discriminant; - let env, case_completion_states = List.fold_left (fun acc stuff -> - let _loc, case = stuff in - this#ssa_switch_case acc case - ) (this#empty_ssa_env, []) cases in - this#merge_self_ssa_env env; - (* In general, cases are non-exhaustive. TODO: optimize with `default`. *) - let switch_completion_states = None, case_completion_states in - let completion_state = this#run_to_completion (fun () -> - this#merge_completion_states switch_completion_states - ) in - this#commit_abrupt_completion_matching AbruptCompletion.(mem [break None]) completion_state - ); - switch - - method private ssa_switch_case (env, case_completion_states) (case: (Loc.t, Loc.t) Ast.Statement.Switch.Case.t') = - let open Ast.Statement.Switch.Case in - let { test; consequent } = case in - ignore @@ Flow_ast_mapper.map_opt this#expression test; - let env0 = this#ssa_env in - this#merge_ssa_env env0 env; - let case_completion_state = this#run_to_completion (fun () -> - ignore @@ this#statement_list consequent - ) in - let env' = this#ssa_env in - this#reset_ssa_env env0; - (env', case_completion_state :: case_completion_states) - - (****************************************) - (* [PRE] try { s1 } catch { s2 } [POST] *) - (****************************************) - (* | *) - (* s1 ..~ *) - (* | | *) - (* | s2 *) - (* \./ *) - (* | *) - (****************************************) - (* [PRE] s1 [ENV1] *) - (* [HAVOC] s2 [ENV2 ] *) - (* POST = ENV1 | ENV2 *) - (****************************************) - (*******************************************************) - (* [PRE] try { s1 } catch { s2 } finally { s3 } [POST] *) - (*******************************************************) - (* | *) - (* s1 ..~ *) - (* | | *) - (* | s2 ..~ *) - (* \./ | *) - (* |______| *) - (* | *) - (* s3 *) - (* | *) - (*******************************************************) - (* [PRE] s1 [ENV1] *) - (* [HAVOC] s2 [ENV2 ] *) - (* [HAVOC] s3 [ENV3 ] *) - (* POST = ENV3 *) - (*******************************************************) - method! try_catch _loc (stmt: (Loc.t, Loc.t) Ast.Statement.Try.t) = - this#expecting_abrupt_completions (fun () -> - let open Ast.Statement.Try in - let { block = (loc, block); handler; finalizer } = stmt in - let try_completion_state = this#run_to_completion (fun () -> - ignore @@ this#block loc block - ) in - let env1 = this#ssa_env in - let catch_completion_state, env2 = match handler with - | Some (loc, clause) -> - (* NOTE: Havoc-ing the state when entering the handler is probably - overkill. We can be more precise but still correct by collecting all - possible writes in the try-block and merging them with the state when - entering the try-block. *) + this#merge_self_ssa_env env1; + expr) + + method! conditional _loc (expr : (L.t, L.t) Ast.Expression.Conditional.t) = + Ast.Expression.Conditional.( + let { test; consequent; alternate } = expr in + ignore @@ this#predicate_expression test; + let env0 = this#ssa_env in + ignore @@ this#expression consequent; + let env1 = this#ssa_env in + this#reset_ssa_env env0; + ignore @@ this#expression alternate; + this#merge_self_ssa_env env1; + expr) + + (* We also havoc state when entering functions and exiting calls. *) + method! lambda loc params body = + this#expecting_abrupt_completions (fun () -> + let env = this#ssa_env in + this#run + (fun () -> + this#havoc_uninitialized_ssa_env; + let completion_state = + this#run_to_completion (fun () -> super#lambda loc params body) + in + this#commit_abrupt_completion_matching + AbruptCompletion.(mem [return; throw]) + completion_state) + ~finally:(fun () -> this#reset_ssa_env env)) + + method! call _loc (expr : (L.t, L.t) Ast.Expression.Call.t) = + Ast.Expression.Call.( + let { callee; targs = _; arguments } = expr in + ignore @@ this#expression callee; + ignore @@ ListUtils.ident_map this#expression_or_spread arguments; this#havoc_current_ssa_env; - let catch_completion_state = this#run_to_completion (fun () -> - ignore @@ this#catch_clause loc clause - ) in - catch_completion_state, this#ssa_env - | None -> None, this#empty_ssa_env - in - this#merge_ssa_env env1 env2; - let try_catch_completion_states = try_completion_state, [catch_completion_state] in - let completion_state = this#run_to_completion (fun () -> - this#merge_completion_states try_catch_completion_states - ) in - this#commit_abrupt_completion_matching AbruptCompletion.all completion_state; - begin match finalizer with - | Some (_loc, block) -> - (* NOTE: Havoc-ing the state when entering the finalizer is probably - overkill. We can be more precise but still correct by collecting - all possible writes in the handler and merging them with the state - when entering the handler (which in turn should already account for - any contributions by the try-block). *) - this#havoc_current_ssa_env; - ignore @@ this#block loc block - | None -> () - end; - this#from_completion completion_state - ); - stmt - - (* branching expressions *) - method! logical _loc (expr: (Loc.t, Loc.t) Ast.Expression.Logical.t) = - let open Ast.Expression.Logical in - let { operator = _; left; right } = expr in - ignore @@ this#expression left; - let env1 = this#ssa_env in - ignore @@ this#expression right; - this#merge_self_ssa_env env1; - expr - - method! conditional _loc (expr: (Loc.t, Loc.t) Ast.Expression.Conditional.t) = - let open Ast.Expression.Conditional in - let { test; consequent; alternate } = expr in - ignore @@ this#predicate_expression test; - let env0 = this#ssa_env in - ignore @@ this#expression consequent; - let env1 = this#ssa_env in - this#reset_ssa_env env0; - ignore @@ this#expression alternate; - this#merge_self_ssa_env env1; - expr - - (* We also havoc state when entering functions and exiting calls. *) - method! lambda loc params body = - this#expecting_abrupt_completions (fun () -> - let env = this#ssa_env in - this#run (fun () -> - this#havoc_uninitialized_ssa_env; - let completion_state = this#run_to_completion (fun () -> - super#lambda loc params body - ) in - this#commit_abrupt_completion_matching AbruptCompletion.(mem [return; throw]) completion_state - ) ~finally:(fun () -> - this#reset_ssa_env env - ) - ) - - method! call _loc (expr: (Loc.t, Loc.t) Ast.Expression.Call.t) = - let open Ast.Expression.Call in - let { callee; targs = _; arguments } = expr in - ignore @@ this#expression callee; - ignore @@ ListUtils.ident_map this#expression_or_spread arguments; - this#havoc_current_ssa_env; - expr - - (* Labeled statements handle labeled breaks, but also push labeled continues - that are expected to be handled by immediately nested loops. *) - method! labeled_statement _loc (stmt: (Loc.t, Loc.t) Ast.Statement.Labeled.t) = - this#expecting_abrupt_completions (fun () -> - let open Ast.Statement.Labeled in - let { label; body } = stmt in - possible_labeled_continues <- (AbruptCompletion.continue (Some label)) :: possible_labeled_continues; - let completion_state = this#run_to_completion (fun () -> - ignore @@ this#statement body; - ) in - possible_labeled_continues <- []; - this#commit_abrupt_completion_matching AbruptCompletion.(mem [break (Some label)]) completion_state - ); - stmt - - method! statement (stmt: (Loc.t, Loc.t) Ast.Statement.t) = - let open Ast.Statement in - begin match stmt with - | _, While _ - | _, DoWhile _ - | _, For _ - | _, ForIn _ - | _, ForOf _ - | _, Labeled _ -> () - | _ -> possible_labeled_continues <- [] - end; - super#statement stmt - - (* Function declarations are hoisted to the top of a block, so that they may be considered - initialized before they are read. *) - method! statement_list (stmts: (Loc.t, Loc.t) Ast.Statement.t list) = - let open Ast.Statement in - let function_decls, other_stmts = List.partition (function - | (_, FunctionDeclaration _) -> true - | _ -> false - ) stmts in - ignore @@ super#statement_list (function_decls @ other_stmts); - stmts + expr) + method! new_ _loc (expr : (L.t, L.t) Ast.Expression.New.t) = + Ast.Expression.New.( + let { callee; targs = _; arguments; comments = _ } = expr in + ignore @@ this#expression callee; + ignore @@ ListUtils.ident_map this#expression_or_spread arguments; + this#havoc_current_ssa_env; + expr) + + (* Labeled statements handle labeled breaks, but also push labeled continues + that are expected to be handled by immediately nested loops. *) + method! labeled_statement _loc (stmt : (L.t, L.t) Ast.Statement.Labeled.t) = + this#expecting_abrupt_completions (fun () -> + Ast.Statement.Labeled.( + let { label; body } = stmt in + possible_labeled_continues <- + AbruptCompletion.continue (Some label) :: possible_labeled_continues; + let completion_state = + this#run_to_completion (fun () -> ignore @@ this#statement body) + in + possible_labeled_continues <- []; + this#commit_abrupt_completion_matching + AbruptCompletion.(mem [break (Some label)]) + completion_state)); + stmt + + method! statement (stmt : (L.t, L.t) Ast.Statement.t) = + Ast.Statement.( + begin + match stmt with + | (_, While _) + | (_, DoWhile _) + | (_, For _) + | (_, ForIn _) + | (_, ForOf _) + | (_, Labeled _) -> + () + | _ -> possible_labeled_continues <- [] + end; + super#statement stmt) + + (* Function declarations are hoisted to the top of a block, so that they may be considered + initialized before they are read. *) + method! statement_list (stmts : (L.t, L.t) Ast.Statement.t list) = + Ast.Statement.( + let (function_decls, other_stmts) = + List.partition + (function + | (_, FunctionDeclaration _) -> true + | _ -> false) + stmts + in + ignore @@ super#statement_list (function_decls @ other_stmts); + stmts) + end + + let program_with_scope ?(ignore_toplevel = false) program = + let (loc, _, _) = program in + let ssa_walk = new ssa_builder in + let bindings = + if ignore_toplevel then + Bindings.empty + else + let hoist = new hoister in + hoist#eval hoist#program program + in + ignore @@ ssa_walk#with_bindings loc bindings ssa_walk#program program; + (ssa_walk#acc, ssa_walk#values) + + let program program = + let (_, values) = program_with_scope ~ignore_toplevel:true program in + values end -let program_with_scope ?(ignore_toplevel=false) program = - let loc, _, _ = program in - let ssa_walk = new ssa_builder in - let bindings = - if ignore_toplevel then Bindings.empty - else - let hoist = new hoister in - hoist#eval hoist#program program - in - ignore @@ ssa_walk#with_bindings loc bindings ssa_walk#program program; - ssa_walk#acc, ssa_walk#values - -let program program = - let _, values = program_with_scope ~ignore_toplevel:true program in - values +module With_Loc = Make (Loc_sig.LocS) (Ssa_api.With_Loc) (Scope_builder.With_Loc) +module With_ALoc = Make (Loc_sig.ALocS) (Ssa_api.With_ALoc) (Scope_builder.With_ALoc) +include With_Loc diff --git a/src/parsing/dune b/src/parsing/dune new file mode 100644 index 00000000000..9213f0e8a5e --- /dev/null +++ b/src/parsing/dune @@ -0,0 +1,15 @@ +(library + (name flow_parsing) + (wrapped false) + (libraries + flow_common + flow_monitor_rpc + flow_parser_utils + flow_procs + flow_shared_mem + flow_state_heaps_parsing + flow_state_readers + xx + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/parsing/parsing_service_js.ml b/src/parsing/parsing_service_js.ml index 7f3e177fbc0..9cdc27674b0 100644 --- a/src/parsing/parsing_service_js.ml +++ b/src/parsing/parsing_service_js.ml @@ -1,17 +1,34 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Utils_js open Sys_utils +type t = (Loc.t, Loc.t) Ast.program * File_sig.With_Loc.t + +type aloc_t = (ALoc.t, ALoc.t) Ast.program * File_sig.With_ALoc.t * ALoc.table option + +type parse_ok = + | Classic of t + | TypesFirst of t * aloc_t + +(* sig *) + +let basic = function + | Classic t -> t + | TypesFirst (t, _) -> t + +let sig_opt = function + | Classic _ -> None + | TypesFirst (_, t) -> Some t + type result = - | Parse_ok of (Loc.t, Loc.t) Ast.program * File_sig.t + | Parse_ok of parse_ok | Parse_fail of parse_failure | Parse_skip of parse_skip_reason @@ -22,9 +39,10 @@ and parse_skip_reason = and parse_failure = | Docblock_errors of docblock_error list | Parse_error of (Loc.t * Parse_error.t) - | File_sig_error of File_sig.error + | File_sig_error of File_sig.With_Loc.error and docblock_error = Loc.t * docblock_error_kind + and docblock_error_kind = | MultipleFlowAttributes | MultipleProvidesModuleAttributes @@ -34,28 +52,25 @@ and docblock_error_kind = (* results of parse job, returned by parse and reparse *) type results = { (* successfully parsed files *) - parse_ok: (File_sig.tolerable_error list) FilenameMap.t; - + parse_ok: File_sig.With_Loc.tolerable_error list FilenameMap.t; (* list of skipped files *) parse_skips: (File_key.t * Docblock.t) list; - (* list of files skipped due to an out of date hash *) parse_hash_mismatch_skips: FilenameSet.t; - (* list of failed files *) parse_fails: (File_key.t * Docblock.t * parse_failure) list; - (* set of unchanged files *) parse_unchanged: FilenameSet.t; } -let empty_result = { - parse_ok = FilenameMap.empty; - parse_skips = []; - parse_hash_mismatch_skips = FilenameSet.empty; - parse_fails = []; - parse_unchanged = FilenameSet.empty; -} +let empty_result = + { + parse_ok = FilenameMap.empty; + parse_skips = []; + parse_hash_mismatch_skips = FilenameSet.empty; + parse_fails = []; + parse_unchanged = FilenameSet.empty; + } (**************************** internal *********************************) (* TODO: add TypesForbidden (disables types even on files with @flow) and @@ -65,253 +80,296 @@ type types_mode = | TypesAllowed | TypesForbiddenByDefault +type parse_options = { + parse_fail: bool; + parse_types_mode: types_mode; + parse_use_strict: bool; + parse_prevent_munge: bool; + parse_module_ref_prefix: string option; + parse_facebook_fbt: string option; + parse_arch: Options.arch; + parse_abstract_locations: bool; +} + let parse_source_file ~fail ~types ~use_strict content file = - let parse_options = Some Parser_env.({ - (** - * Always parse ES proposal syntax. The user-facing config option to - * ignore/warn/enable them is handled during inference so that a clean error - * can be surfaced (rather than a more cryptic parse error). - *) - esproposal_class_instance_fields = true; - esproposal_class_static_fields = true; - esproposal_decorators = true; - esproposal_export_star_as = true; - esproposal_optional_chaining = true; - esproposal_nullish_coalescing = true; - types = types; - use_strict; - }) in - let ast, parse_errors = - Parser_flow.program_file ~fail ~parse_options content (Some file) in + let parse_options = + Some + Parser_env. + { + (* + * Always parse ES proposal syntax. The user-facing config option to + * ignore/warn/enable them is handled during inference so that a clean error + * can be surfaced (rather than a more cryptic parse error). + *) + enums = true; + esproposal_class_instance_fields = true; + esproposal_class_static_fields = true; + esproposal_decorators = true; + esproposal_export_star_as = true; + esproposal_optional_chaining = true; + esproposal_nullish_coalescing = true; + types; + use_strict; + } + in + let (ast, parse_errors) = Parser_flow.program_file ~fail ~parse_options content (Some file) in if fail then assert (parse_errors = []); ast let parse_json_file ~fail content file = - let parse_options = Some Parser_env.({ - esproposal_class_instance_fields = false; - esproposal_class_static_fields = false; - esproposal_decorators = false; - esproposal_export_star_as = false; - esproposal_optional_chaining = false; - esproposal_nullish_coalescing = false; - types = true; - use_strict = false; - }) in - + let parse_options = + Some + Parser_env. + { + enums = false; + esproposal_class_instance_fields = false; + esproposal_class_static_fields = false; + esproposal_decorators = false; + esproposal_export_star_as = false; + esproposal_optional_chaining = false; + esproposal_nullish_coalescing = false; + types = true; + use_strict = false; + } + in (* parse the file as JSON, then munge the AST to convert from an object into a `module.exports = {...}` statement *) - let expr, parse_errors = - Parser_flow.json_file ~fail ~parse_options content (Some file) in + let (expr, parse_errors) = Parser_flow.json_file ~fail ~parse_options content (Some file) in if fail then assert (parse_errors = []); - let open Ast in - let loc_none = Loc.none in - let module_exports = loc_none, Expression.(Member { Member. - _object = loc_none, Identifier (loc_none, "module"); - property = Member.PropertyIdentifier (loc_none, "exports"); - computed = false; - }) in - let loc = fst expr in - let statement = - loc, Statement.Expression { Statement.Expression. - expression = loc, Expression.Assignment { Expression.Assignment. - operator = Expression.Assignment.Assign; - left = loc_none, Pattern.Expression module_exports; - right = expr; - }; - directive = None; - } - in - let comments = ([]: Loc.t Comment.t list) in - (loc, [statement], comments) + Ast.( + let loc_none = Loc.none in + let module_exports = + ( loc_none, + Expression.( + Member + { + Member._object = + (loc_none, Identifier (Flow_ast_utils.ident_of_source (loc_none, "module"))); + property = + Member.PropertyIdentifier (Flow_ast_utils.ident_of_source (loc_none, "exports")); + }) ) + in + let loc = fst expr in + let statement = + ( loc, + Statement.Expression + { + Statement.Expression.expression = + ( loc, + Expression.Assignment + { + Expression.Assignment.operator = None; + left = (loc_none, Pattern.Expression module_exports); + right = expr; + } ); + directive = None; + } ) + in + let comments = ([] : Loc.t Comment.t list) in + (loc, [statement], comments)) (* Avoid lexing unbounded in perverse cases *) let docblock_max_tokens = 10 let extract_docblock = - let open Docblock in - (* walks a list of words, returns a list of errors and the extracted info. + Docblock.( + (* walks a list of words, returns a list of errors and the extracted info. if @flow or @providesModule is found more than once, the first one is used and an error is returned. *) - let rec parse_attributes (errors, info) = function - | (loc, "@flow") :: (_, "strict") :: xs -> + let rec parse_attributes (errors, info) = function + | (loc, "@flow") :: (_, "strict") :: xs -> let acc = - if info.flow <> None then (loc, MultipleFlowAttributes)::errors, info - else errors, { info with flow = Some OptInStrict } in + if info.flow <> None then + ((loc, MultipleFlowAttributes) :: errors, info) + else + (errors, { info with flow = Some OptInStrict }) + in parse_attributes acc xs - | (loc, "@flow") :: (_, "strict-local") :: xs -> + | (loc, "@flow") :: (_, "strict-local") :: xs -> let acc = - if info.flow <> None then (loc, MultipleFlowAttributes)::errors, info - else errors, { info with flow = Some OptInStrictLocal } in + if info.flow <> None then + ((loc, MultipleFlowAttributes) :: errors, info) + else + (errors, { info with flow = Some OptInStrictLocal }) + in parse_attributes acc xs - | (loc, "@flow") :: (_, "weak") :: xs -> + | (loc, "@flow") :: (_, "weak") :: xs -> let acc = - if info.flow <> None then (loc, MultipleFlowAttributes)::errors, info - else errors, { info with flow = Some OptInWeak } in + if info.flow <> None then + ((loc, MultipleFlowAttributes) :: errors, info) + else + (errors, { info with flow = Some OptInWeak }) + in parse_attributes acc xs - | (loc, "@flow") :: xs -> + | (loc, "@flow") :: xs -> let acc = - if info.flow <> None then (loc, MultipleFlowAttributes)::errors, info - else errors, { info with flow = Some OptIn } in + if info.flow <> None then + ((loc, MultipleFlowAttributes) :: errors, info) + else + (errors, { info with flow = Some OptIn }) + in parse_attributes acc xs - | (loc, "@noflow") :: xs -> + | (loc, "@noflow") :: xs -> let acc = - if info.flow <> None then (loc, MultipleFlowAttributes)::errors, info - else errors, { info with flow = Some OptOut } in + if info.flow <> None then + ((loc, MultipleFlowAttributes) :: errors, info) + else + (errors, { info with flow = Some OptOut }) + in parse_attributes acc xs - | (loc, "@providesModule") :: (_, m) :: xs -> + | (loc, "@providesModule") :: (_, m) :: xs -> let acc = if info.providesModule <> None then - (loc, MultipleProvidesModuleAttributes)::errors, info + ((loc, MultipleProvidesModuleAttributes) :: errors, info) else - errors, { info with providesModule = Some m } + (errors, { info with providesModule = Some m }) in parse_attributes acc xs - | (_, "@preventMunge") :: xs -> + | (_, "@preventMunge") :: xs -> (* dupes are ok since they can only be truthy *) let preventMunge = Some true in parse_attributes (errors, { info with preventMunge }) xs - | (csx_loc, "@csx") :: xs -> + | (csx_loc, "@csx") :: xs -> let acc = if info.jsx <> None then - (csx_loc, MultipleJSXAttributes)::errors, info + ((csx_loc, MultipleJSXAttributes) :: errors, info) else - errors, { info with jsx = Some Csx_pragma } + (errors, { info with jsx = Some Csx_pragma }) in parse_attributes acc xs - | [jsx_loc, "@jsx"] -> (jsx_loc, InvalidJSXAttribute None)::errors, info - | (jsx_loc, "@jsx") :: (expr_loc, expr) :: xs -> + | [(jsx_loc, "@jsx")] -> ((jsx_loc, InvalidJSXAttribute None) :: errors, info) + | (jsx_loc, "@jsx") :: (expr_loc, expr) :: xs -> let acc = - if info.jsx <> None - then (jsx_loc, MultipleJSXAttributes)::errors, info - else begin + if info.jsx <> None then + ((jsx_loc, MultipleJSXAttributes) :: errors, info) + else (* The point of the padding is to make the parsed code line up * with the comment in the original source *) - let padding = (String.make Loc.(expr_loc.start.line - 1) '\n') ^ - (String.make Loc.(expr_loc.start.column) ' ') in + let padding = + String.make Loc.(expr_loc.start.line - 1) '\n' + ^ String.make Loc.(expr_loc.start.column) ' ' + in try - let (jsx_expr, _) = Parser_flow.jsx_pragma_expression - (padding ^ expr) - expr_loc.Loc.source in - errors, { info with jsx = Some (Jsx_pragma (expr, jsx_expr)) } + let (jsx_expr, _) = + Parser_flow.jsx_pragma_expression (padding ^ expr) expr_loc.Loc.source + in + (errors, { info with jsx = Some (Jsx_pragma (expr, jsx_expr)) }) with - | Parse_error.Error [] -> - (expr_loc, InvalidJSXAttribute None)::errors, info - | Parse_error.Error ((_, e)::_) -> - let first_error = Some (Parse_error.PP.error e) in - (expr_loc, InvalidJSXAttribute first_error)::errors, info - end in + | Parse_error.Error [] -> ((expr_loc, InvalidJSXAttribute None) :: errors, info) + | Parse_error.Error ((_, e) :: _) -> + let first_error = Some (Parse_error.PP.error e) in + ((expr_loc, InvalidJSXAttribute first_error) :: errors, info) + in parse_attributes acc xs - | (_, "@typeAssert") :: xs -> - parse_attributes (errors, { info with typeAssert = true }) xs - | _ :: xs -> - parse_attributes (errors, info) xs - | [] -> (errors, info) - in - - let attributes_rx = Str.regexp "[ \t\r\n\\*/]+" in - let lines_rx = Str.regexp "\\(\r\n\\|\n\\|\r\\)" in - let calc_end start s = - Str.full_split lines_rx s - |> List.fold_left Loc.(fun _end elem -> - match elem with - | Str.Delim delim -> - let line_incr = if delim = "\r" then 0 else 1 in - let column = 0 in - let line = _end.line + line_incr in - let offset = _end.offset + (String.length delim) in - { column; line; offset; } - | Str.Text text -> - let length = String.length text in - let column = _end.column + length in - let offset = _end.offset + length in - { _end with column; offset; } - ) start in - let split loc s = - (* Need to add 2 characters for the start of the comment *) - let start = Loc.({ loc.start with - column = loc.start.column + 2; - offset = loc.start.offset + 2; - }) in - Str.full_split attributes_rx s - |> List.fold_left (fun (start, attributes) elem -> - match elem with - | Str.Delim s -> - (calc_end start s, attributes) - | Str.Text s -> - let _end = calc_end start s in - (_end, Loc.({loc with start; _end; }, s)::attributes) - ) (start, []) - |> snd - |> List.rev - - in - - let string_of_comment = function - | (loc, Ast.Comment.Block s) - | (loc, Ast.Comment.Line s) - -> loc, s - in - - let map_n = - let rec helper f remaining acc = function - | [] -> List.rev acc - | hd::rest -> - if remaining <= 0 then List.rev acc - else helper f (remaining - 1) ((f hd)::acc) rest + | (_, "@typeAssert") :: xs -> parse_attributes (errors, { info with typeAssert = true }) xs + | _ :: xs -> parse_attributes (errors, info) xs + | [] -> (errors, info) in - fun f n lst -> helper f n [] lst - in - - fun ~max_tokens filename content -> - (* Consume tokens in the file until we get a comment. This is a hack to - * support Nuclide, which needs 'use babel' as the first token due to - * contstraints with Atom (see https://github.com/atom/atom/issues/8416 for - * more context). At some point this should change back to consuming only - * the first token. *) - let lb = - try Sedlexing.Utf8.from_string content - with Sedlexing.MalFormed -> - Hh_logger.warn "File %s is malformed" (File_key.to_string filename); - Sedlexing.Utf8.from_string "" in - let env = - Lex_env.new_lex_env (Some filename) lb ~enable_types_in_comments:false in - let rec get_first_comment_contents ?(i=0) env = - if i < max_tokens then - let env, lexer_result = Lexer.token env in - match Lex_result.comments lexer_result with - | [] -> Token.( - (** - * Stop looking for docblocks if we see any tokens other than a - * string or a semicolon (`"use babel";` or `"use strict";`). - *) - match Lex_result.token lexer_result with - | T_STRING _ - | T_SEMICOLON - -> get_first_comment_contents ~i:(i + 1) env - | _ -> None - ) - | comments -> - Some (map_n string_of_comment (max_tokens - i) comments) - else None in - let info = - let filename_str = File_key.to_string filename in - if Filename.check_suffix filename_str Files.flow_ext - then { default_info with isDeclarationFile = true; } - else default_info in - match get_first_comment_contents env with - | Some comments -> - List.fold_left (fun acc (loc, s) -> - parse_attributes acc (split loc s) - ) ([], info) comments - | None -> [], info - -let parse_docblock - ~max_tokens file content -: docblock_error list * Docblock.t = + let attributes_rx = Str.regexp "[ \t\r\n\\*/]+" in + let lines_rx = Str.regexp "\\(\r\n\\|\n\\|\r\\)" in + let calc_end start s = + Str.full_split lines_rx s + |> List.fold_left + Loc.( + fun _end elem -> + match elem with + | Str.Delim delim -> + let line_incr = + if delim = "\r" then + 0 + else + 1 + in + let column = 0 in + let line = _end.line + line_incr in + { column; line } + | Str.Text text -> + let length = String.length text in + let column = _end.column + length in + { _end with column }) + start + in + let split loc s = + (* Need to add 2 characters for the start of the comment *) + let start = Loc.{ loc.start with column = loc.start.column + 2 } in + Str.full_split attributes_rx s + |> List.fold_left + (fun (start, attributes) elem -> + match elem with + | Str.Delim s -> (calc_end start s, attributes) + | Str.Text s -> + let _end = calc_end start s in + (_end, Loc.({ loc with start; _end }, s) :: attributes)) + (start, []) + |> snd + |> List.rev + in + let string_of_comment = function + | (loc, Ast.Comment.Block s) + | (loc, Ast.Comment.Line s) -> + (loc, s) + in + let map_n = + let rec helper f remaining acc = function + | [] -> List.rev acc + | hd :: rest -> + if remaining <= 0 then + List.rev acc + else + helper f (remaining - 1) (f hd :: acc) rest + in + (fun f n lst -> helper f n [] lst) + in + fun ~max_tokens filename content -> + (* Consume tokens in the file until we get a comment. This is a hack to + * support Nuclide, which needs 'use babel' as the first token due to + * contstraints with Atom (see https://github.com/atom/atom/issues/8416 for + * more context). At some point this should change back to consuming only + * the first token. *) + let lb = + try Sedlexing.Utf8.from_string content + with Sedlexing.MalFormed -> + Hh_logger.warn "File %s is malformed" (File_key.to_string filename); + Sedlexing.Utf8.from_string "" + in + let env = Lex_env.new_lex_env (Some filename) lb ~enable_types_in_comments:false in + let rec get_first_comment_contents ?(i = 0) env = + if i < max_tokens then + let (env, lexer_result) = Flow_lexer.token env in + match Lex_result.comments lexer_result with + | [] -> + Token.( + (* + * Stop looking for docblocks if we see any tokens other than a + * string or a semicolon (`"use babel";` or `"use strict";`). + *) + (match Lex_result.token lexer_result with + | T_STRING _ + | T_SEMICOLON -> + get_first_comment_contents ~i:(i + 1) env + | _ -> None)) + | comments -> Some (map_n string_of_comment (max_tokens - i) comments) + else + None + in + let info = + let filename_str = File_key.to_string filename in + if Filename.check_suffix filename_str Files.flow_ext then + { default_info with isDeclarationFile = true } + else + default_info + in + match get_first_comment_contents env with + | Some comments -> + List.fold_left (fun acc (loc, s) -> parse_attributes acc (split loc s)) ([], info) comments + | None -> ([], info)) + +let parse_docblock ~max_tokens file content : docblock_error list * Docblock.t = match file with | File_key.ResourceFile _ - | File_key.JsonFile _ -> [], Docblock.default_info + | File_key.JsonFile _ -> + ([], Docblock.default_info) | _ -> extract_docblock ~max_tokens file content (* Allow types based on `types_mode`, using the @flow annotation in the @@ -321,55 +379,103 @@ let types_checked types_mode info = match types_mode with | TypesAllowed -> true | TypesForbiddenByDefault -> - match Docblock.flow info with + (match Docblock.flow info with | None - | Some Docblock.OptOut -> false + | Some Docblock.OptOut -> + false | Some Docblock.OptIn | Some Docblock.OptInStrict | Some Docblock.OptInStrictLocal - | Some Docblock.OptInWeak -> true - -let do_parse ?(fail=true) ~types_mode ~use_strict ~info ?(prevent_munge=false) content file = - try ( + | Some Docblock.OptInWeak -> + true) + +let do_parse ~parse_options ~info content file = + let { + parse_fail = fail; + parse_types_mode = types_mode; + parse_use_strict = use_strict; + parse_prevent_munge = prevent_munge; + parse_module_ref_prefix = module_ref_prefix; + parse_facebook_fbt = facebook_fbt; + parse_arch = arch; + parse_abstract_locations = abstract_locations; + } = + parse_options + in + try match file with | File_key.JsonFile _ -> let ast = parse_json_file ~fail content file in - Parse_ok (ast, File_sig.init) - | File_key.ResourceFile _ -> - Parse_skip Skip_resource_file + Parse_ok (Classic (ast, File_sig.With_Loc.init)) + | File_key.ResourceFile _ -> Parse_skip Skip_resource_file | _ -> (* either all=true or @flow pragma exists *) let types_checked = types_checked types_mode info in (* always parse types for .flow files -- NB: will _not_ be inferred *) let types = types_checked || Docblock.isDeclarationFile info in - if not types - then Parse_skip Skip_non_flow_file + if not types then + Parse_skip Skip_non_flow_file else let ast = parse_source_file ~fail ~types ~use_strict content file in (* Only calculate file sigs for files which will actually be inferred. * The only files which are parsed but not inferred are .flow files with * no @flow pragma. *) if types_checked then - let prevent_munge = Option.map2 - (Some prevent_munge) - (Docblock.preventMunge info) - (||) + let prevent_munge = + match Docblock.preventMunge info with + | Some db_prevent_munge -> db_prevent_munge + | None -> prevent_munge in - match Signature_builder.program ast with + (* NOTE: This is a temporary hack that makes the signature verifier ignore any static + property named `propTypes` in any class. It should be killed with fire or replaced with + something that only works for React classes, in which case we must make a corresponding + change in the type system that enforces that any such property is private. *) + let ignore_static_propTypes = true in + (* NOTE: This is a Facebook-specific hack that makes the signature verifier and generator + recognize and process a custom `keyMirror` function that makes an enum out of the keys + of an object. *) + let facebook_keyMirror = true in + match Signature_builder.program ast ~module_ref_prefix with | Ok signature -> - let errors, _ = Signature_builder.Signature.verify ?prevent_munge signature in - let verified_file_sig = File_sig.verified errors (snd signature) in - Parse_ok (ast, verified_file_sig) + let (errors, sig_ast) = + Signature_builder.Signature.verify_and_generate + ~prevent_munge + ~facebook_fbt + ~ignore_static_propTypes + ~facebook_keyMirror + signature + ast + in + let sig_ast = Ast_loc_utils.loc_to_aloc_mapper#program sig_ast in + let (aloc_table, sig_ast) = + if abstract_locations then + let (aloc_table, sig_ast) = Ast_loc_utils.abstractify_alocs file sig_ast in + (Some aloc_table, sig_ast) + else + (None, sig_ast) + in + let file_sig = File_sig.With_Loc.verified errors (snd signature) in + let sig_file_sig = + match File_sig.With_ALoc.program ~ast:sig_ast ~module_ref_prefix with + | Ok fs -> fs + | Error _ -> assert false + in + begin + match arch with + | Options.Classic -> Parse_ok (Classic (ast, file_sig)) + | Options.TypesFirst -> + Parse_ok (TypesFirst ((ast, file_sig), (sig_ast, sig_file_sig, aloc_table))) + end | Error e -> Parse_fail (File_sig_error e) else - Parse_ok (ast, File_sig.init)) + Parse_ok (Classic (ast, File_sig.With_Loc.init)) with - | Parse_error.Error (first_parse_error::_) -> - Parse_fail (Parse_error first_parse_error) + | Parse_error.Error (first_parse_error :: _) -> Parse_fail (Parse_error first_parse_error) | e -> - let s = Printexc.to_string e in - let loc = Loc.({ none with source = Some file }) in - let err = loc, Parse_error.Assertion s in + let e = Exception.wrap e in + let s = Exception.get_ctor_string e in + let loc = Loc.{ none with source = Some file } in + let err = (loc, Parse_error.Assertion s) in Parse_fail (Parse_error err) let hash_content content = @@ -377,20 +483,24 @@ let hash_content content = Xx.update state content; Xx.digest state -let does_content_match_file_hash file content = +let does_content_match_file_hash ~reader file content = let content_hash = hash_content content in - match Parsing_heaps.get_file_hash file with + match Parsing_heaps.Reader.get_file_hash ~reader file with | None -> false | Some hash -> hash = content_hash (* parse file, store AST to shared heap on success. * Add success/error info to passed accumulator. *) let reducer - ~worker_mutator ~types_mode ~use_strict ~skip_hash_mismatch - ~max_header_tokens ~noflow ~parse_unchanged - parse_results - file -: results = + ~worker_mutator + ~reader + ~parse_options + ~skip_hash_mismatch + ~max_header_tokens + ~noflow + ~parse_unchanged + parse_results + file : results = (* It turns out that sometimes files appear and disappear very quickly. Just * because someone told us that this file exists and needs to be parsed, it * doesn't mean it actually still exists. If anything goes wrong reading this @@ -400,73 +510,82 @@ let reducer let filename_string = File_key.to_string file in try Some (cat filename_string) with e -> + let e = Exception.wrap e in prerr_endlinef "Parsing service failed to cat %s, so skipping it. Exception: %s" filename_string - (Printexc.to_string e); - None in + (Exception.to_string e); + None + in match content with | Some content -> - let new_hash = hash_content content in - (* If skip_hash_mismatch is true, then we're currently ensuring some files are parsed. That - * means we don't currently have the file's AST but we might have the file's hash in the - * non-oldified heap. What we want to avoid is parsing files which differ from the hash *) - if skip_hash_mismatch && Some new_hash <> Parsing_heaps.get_file_hash file - then - let parse_hash_mismatch_skips = - FilenameSet.add file parse_results.parse_hash_mismatch_skips - in - { parse_results with parse_hash_mismatch_skips } - else - let unchanged = - match Parsing_heaps.get_old_file_hash file with - | Some old_hash when old_hash = new_hash -> - (* If this optimization is turned off then still parse the file, even though it's - * unchanged *) - not parse_unchanged && - (* Let's disable this optimization for .flow files. Sometimes we still want to recheck - * foo.js.flow file because foo.js changed *) - not (File_key.check_suffix file Files.flow_ext) - | _ -> - (* The file has changed. Let's record the new hash *) - worker_mutator.Parsing_heaps.add_hash file new_hash; - false - in - if unchanged - then - let parse_unchanged = FilenameSet.add file parse_results.parse_unchanged in - { parse_results with parse_unchanged; } - else begin match parse_docblock ~max_tokens:max_header_tokens file content with - | [], info -> + let new_hash = hash_content content in + (* If skip_hash_mismatch is true, then we're currently ensuring some files are parsed. That + * means we don't currently have the file's AST but we might have the file's hash in the + * non-oldified heap. What we want to avoid is parsing files which differ from the hash *) + if + skip_hash_mismatch + && Some new_hash <> Parsing_heaps.Mutator_reader.get_file_hash ~reader file + then + let parse_hash_mismatch_skips = + FilenameSet.add file parse_results.parse_hash_mismatch_skips + in + { parse_results with parse_hash_mismatch_skips } + else + let unchanged = + match Parsing_heaps.Mutator_reader.get_old_file_hash ~reader file with + | Some old_hash when old_hash = new_hash -> + (* If this optimization is turned off then still parse the file, even though it's + * unchanged *) + not parse_unchanged + | _ -> + (* The file has changed. Let's record the new hash *) + worker_mutator.Parsing_heaps.add_hash file new_hash; + false + in + if unchanged then + let parse_unchanged = FilenameSet.add file parse_results.parse_unchanged in + { parse_results with parse_unchanged } + else ( + match parse_docblock ~max_tokens:max_header_tokens file content with + | ([], info) -> let info = - if noflow file then { info with Docblock.flow = Some Docblock.OptOut } - else info + if noflow file then + { info with Docblock.flow = Some Docblock.OptOut } + else + info in - begin match (do_parse ~types_mode ~use_strict ~info content file) with - | Parse_ok (ast, file_sig) -> - worker_mutator.Parsing_heaps.add_file file ast info file_sig; + begin + match do_parse ~parse_options ~info content file with + | Parse_ok parse_ok -> + let (ast, file_sig) = basic parse_ok in + let sig_opt = sig_opt parse_ok in + worker_mutator.Parsing_heaps.add_file file info (ast, file_sig) sig_opt; let parse_ok = - FilenameMap.add file file_sig.File_sig.tolerable_errors parse_results.parse_ok + FilenameMap.add + file + file_sig.File_sig.With_Loc.tolerable_errors + parse_results.parse_ok in - { parse_results with parse_ok; } - | Parse_fail converted -> + { parse_results with parse_ok } + | Parse_fail converted -> let fail = (file, info, converted) in let parse_fails = fail :: parse_results.parse_fails in - { parse_results with parse_fails; } - | Parse_skip Skip_non_flow_file - | Parse_skip Skip_resource_file -> + { parse_results with parse_fails } + | Parse_skip Skip_non_flow_file + | Parse_skip Skip_resource_file -> let parse_skips = (file, info) :: parse_results.parse_skips in - { parse_results with parse_skips; } + { parse_results with parse_skips } end - | docblock_errors, info -> + | (docblock_errors, info) -> let fail = (file, info, Docblock_errors docblock_errors) in let parse_fails = fail :: parse_results.parse_fails in - { parse_results with parse_fails; } - end + { parse_results with parse_fails } + ) | None -> - let info = Docblock.default_info in - let parse_skips = (file, info) :: parse_results.parse_skips in - { parse_results with parse_skips; } + let info = Docblock.default_info in + let parse_skips = (file, info) :: parse_results.parse_skips in + { parse_results with parse_skips } (* merge is just memberwise union/concat of results *) let merge r1 r2 = @@ -481,56 +600,64 @@ let merge r1 r2 = let opt_or_alternate opt alternate = match opt with - | Some x -> x - | None -> alternate + | Some x -> x + | None -> alternate (* types_mode and use_strict aren't special, they just happen to be the ones that needed to be overridden *) let get_defaults ~types_mode ~use_strict options = - let types_mode = opt_or_alternate - types_mode - (* force types when --all is set, but otherwise forbid them unless the file + let types_mode = + opt_or_alternate + types_mode + (* force types when --all is set, but otherwise forbid them unless the file has @flow in it. *) - (if Options.all options then TypesAllowed else TypesForbiddenByDefault) - in - let use_strict = opt_or_alternate - use_strict - (Options.modules_are_use_strict options) + ( if Options.all options then + TypesAllowed + else + TypesForbiddenByDefault ) in + let use_strict = opt_or_alternate use_strict (Options.modules_are_use_strict options) in let profile = Options.should_profile options in let max_header_tokens = Options.max_header_tokens options in - let noflow fn = - Files.is_untyped (Options.file_options options) (File_key.to_string fn) - in - types_mode, use_strict, profile, max_header_tokens, noflow + let noflow fn = Files.is_untyped (Options.file_options options) (File_key.to_string fn) in + (types_mode, use_strict, profile, max_header_tokens, noflow) (***************************** public ********************************) let progress_fn ~total ~start ~length:_ = let finished = start in - MonitorRPC.status_update - ServerStatus.(Parsing_progress { total = Some total; finished }) - -let next_of_filename_set ?(with_progress=false) workers filenames = - if with_progress - then MultiWorkerLwt.next ~progress_fn workers (FilenameSet.elements filenames) - else MultiWorkerLwt.next workers (FilenameSet.elements filenames) - -let parse ~worker_mutator ~types_mode ~use_strict ~skip_hash_mismatch ~profile ~max_header_tokens - ~noflow ~parse_unchanged workers next -: results Lwt.t = + MonitorRPC.status_update ServerStatus.(Parsing_progress { total = Some total; finished }) + +let next_of_filename_set ?(with_progress = false) workers filenames = + if with_progress then + MultiWorkerLwt.next ~progress_fn workers (FilenameSet.elements filenames) + else + MultiWorkerLwt.next workers (FilenameSet.elements filenames) + +let parse + ~worker_mutator + ~reader + ~parse_options + ~skip_hash_mismatch + ~profile + ~max_header_tokens + ~noflow + ~parse_unchanged + workers + next : results Lwt.t = let t = Unix.gettimeofday () in let reducer = reducer - ~worker_mutator ~types_mode ~use_strict ~skip_hash_mismatch - ~max_header_tokens ~noflow ~parse_unchanged + ~worker_mutator + ~reader + ~parse_options + ~skip_hash_mismatch + ~max_header_tokens + ~noflow + ~parse_unchanged in - let%lwt results = MultiWorkerLwt.call - workers - ~job: (List.fold_left reducer) - ~neutral: empty_result - ~merge - ~next + let%lwt results = + MultiWorkerLwt.call workers ~job:(List.fold_left reducer) ~neutral:empty_result ~merge ~next in if profile then let t2 = Unix.gettimeofday () in @@ -539,31 +666,57 @@ let parse ~worker_mutator ~types_mode ~use_strict ~skip_hash_mismatch ~profile ~ let mismatch_count = FilenameSet.cardinal results.parse_hash_mismatch_skips in let fail_count = List.length results.parse_fails in let unchanged_count = FilenameSet.cardinal results.parse_unchanged in - Hh_logger.info "parsed %d files (%d ok, %d skipped, %d bad hashes, %d failed, %d unchanged) in %f" + Hh_logger.info + "parsed %d files (%d ok, %d skipped, %d bad hashes, %d failed, %d unchanged) in %f" (ok_count + skip_count + mismatch_count + fail_count) - ok_count skip_count mismatch_count fail_count unchanged_count + ok_count + skip_count + mismatch_count + fail_count + unchanged_count (t2 -. t) - else (); + else + (); Lwt.return results let reparse - ~transaction ~types_mode ~use_strict ~profile ~max_header_tokens ~noflow - ~parse_unchanged ~with_progress ~workers ~modified:files ~deleted = + ~transaction + ~reader + ~parse_options + ~profile + ~max_header_tokens + ~noflow + ~parse_unchanged + ~with_progress + ~workers + ~modified:files + ~deleted = (* save old parsing info for files *) let all_files = FilenameSet.union files deleted in - let master_mutator, worker_mutator = Parsing_heaps.Reparse_mutator.create transaction all_files in + let (master_mutator, worker_mutator) = + Parsing_heaps.Reparse_mutator.create transaction all_files + in let next = next_of_filename_set ?with_progress workers files in let%lwt results = - parse ~worker_mutator ~types_mode ~use_strict ~skip_hash_mismatch:false ~profile - ~max_header_tokens ~noflow ~parse_unchanged workers next + parse + ~worker_mutator + ~reader + ~parse_options + ~skip_hash_mismatch:false + ~profile + ~max_header_tokens + ~noflow + ~parse_unchanged + workers + next in let modified = results.parse_ok |> FilenameMap.keys |> FilenameSet.of_list in - let modified = List.fold_left (fun acc (fail, _, _) -> - FilenameSet.add fail acc - ) modified results.parse_fails in - let modified = List.fold_left (fun acc (skip, _) -> - FilenameSet.add skip acc - ) modified results.parse_skips in + let modified = + List.fold_left (fun acc (fail, _, _) -> FilenameSet.add fail acc) modified results.parse_fails + in + let modified = + List.fold_left (fun acc (skip, _) -> FilenameSet.add skip acc) modified results.parse_skips + in let modified = FilenameSet.union modified results.parse_hash_mismatch_skips in SharedMem_js.collect `gentle; let unchanged = FilenameSet.diff files modified in @@ -571,33 +724,86 @@ let reparse Parsing_heaps.Reparse_mutator.revive_files master_mutator unchanged; Lwt.return (modified, results) -let parse_with_defaults ?types_mode ?use_strict options workers next = - let types_mode, use_strict, profile, max_header_tokens, noflow = +let make_parse_options_internal + ?(fail = true) ?(types_mode = TypesAllowed) ?use_strict ~docblock options = + let use_strict = + match use_strict with + | Some use_strict -> use_strict + | None -> Options.modules_are_use_strict options + in + let module_ref_prefix = Options.haste_module_ref_prefix options in + let facebook_fbt = Options.facebook_fbt options in + let arch = Options.arch options in + let abstract_locations = Options.abstract_locations options in + let prevent_munge = + let default = not (Options.should_munge_underscores options) in + match docblock with + | Some docblock -> Option.value (Docblock.preventMunge docblock) ~default + | None -> default + in + { + parse_fail = fail; + parse_types_mode = types_mode; + parse_use_strict = use_strict; + parse_prevent_munge = prevent_munge; + parse_module_ref_prefix = module_ref_prefix; + parse_facebook_fbt = facebook_fbt; + parse_arch = arch; + parse_abstract_locations = abstract_locations; + } + +let make_parse_options ?fail ?types_mode ?use_strict docblock options = + make_parse_options_internal ?fail ?types_mode ?use_strict ~docblock:(Some docblock) options + +let parse_with_defaults ?types_mode ?use_strict ~reader options workers next = + let (types_mode, use_strict, profile, max_header_tokens, noflow) = get_defaults ~types_mode ~use_strict options in - let parse_unchanged = true in (* This isn't a recheck, so there shouldn't be any unchanged *) + let parse_options = + make_parse_options_internal ~fail:true ~use_strict ~types_mode ~docblock:None options + in + let parse_unchanged = true in + (* This isn't a recheck, so there shouldn't be any unchanged *) let worker_mutator = Parsing_heaps.Parse_mutator.create () in parse - ~worker_mutator ~types_mode ~use_strict ~skip_hash_mismatch:false - ~profile ~max_header_tokens ~noflow ~parse_unchanged - workers next + ~worker_mutator + ~reader + ~parse_options + ~skip_hash_mismatch:false + ~profile + ~max_header_tokens + ~noflow + ~parse_unchanged + workers + next let reparse_with_defaults - ~transaction ?types_mode ?use_strict ?with_progress - ~workers ~modified ~deleted options = - let types_mode, use_strict, profile, max_header_tokens, noflow = + ~transaction ~reader ?types_mode ?use_strict ?with_progress ~workers ~modified ~deleted options + = + let (types_mode, use_strict, profile, max_header_tokens, noflow) = get_defaults ~types_mode ~use_strict options in - let parse_unchanged = false in (* We're rechecking, so let's skip files which haven't changed *) + let parse_unchanged = false in + (* We're rechecking, so let's skip files which haven't changed *) + let parse_options = make_parse_options_internal ~types_mode ~use_strict ~docblock:None options in reparse - ~transaction ~types_mode ~use_strict ~profile ~max_header_tokens ~noflow - ~parse_unchanged ~with_progress ~workers ~modified ~deleted + ~transaction + ~reader + ~parse_options + ~profile + ~max_header_tokens + ~noflow + ~parse_unchanged + ~with_progress + ~workers + ~modified + ~deleted (* ensure_parsed takes a set of files, finds the files which haven't been parsed, and parses them. * Any not-yet-parsed files who's on-disk contents don't match their already-known hash are skipped * and returned to the caller. *) -let ensure_parsed options workers files = - let types_mode, use_strict, profile, max_header_tokens, noflow = +let ensure_parsed ~reader options workers files = + let (types_mode, use_strict, profile, max_header_tokens, noflow) = get_defaults ~types_mode:None ~use_strict:None options in (* We want to parse unchanged files, since this is our first time parsing them *) @@ -605,31 +811,36 @@ let ensure_parsed options workers files = (* We're not replacing any info, so there's nothing to roll back. That means we can just use the * simle Parse_mutator rather than the rollback-able Reparse_mutator *) let worker_mutator = Parsing_heaps.Parse_mutator.create () in - let progress_fn ~total ~start ~length:_ = MonitorRPC.status_update ServerStatus.(Parsing_progress { total = Some total; finished = start }) in - - let%lwt files_missing_asts = MultiWorkerLwt.call workers - ~job:(List.fold_left (fun acc fn -> - if Parsing_heaps.has_ast fn - then acc - else FilenameSet.add fn acc - )) - ~merge:FilenameSet.union - ~neutral:FilenameSet.empty - ~next:(MultiWorkerLwt.next workers (FilenameSet.elements files)) - in - - let next = - MultiWorkerLwt.next ~progress_fn workers (FilenameSet.elements files_missing_asts) + let%lwt files_missing_asts = + MultiWorkerLwt.call + workers + ~job: + (List.fold_left (fun acc fn -> + if Parsing_heaps.Mutator_reader.has_ast ~reader fn then + acc + else + FilenameSet.add fn acc)) + ~merge:FilenameSet.union + ~neutral:FilenameSet.empty + ~next:(MultiWorkerLwt.next workers (FilenameSet.elements files)) in - - let%lwt results = parse - ~worker_mutator ~types_mode ~use_strict ~skip_hash_mismatch:true - ~profile ~max_header_tokens ~noflow ~parse_unchanged - workers next + let next = MultiWorkerLwt.next ~progress_fn workers (FilenameSet.elements files_missing_asts) in + let parse_options = make_parse_options_internal ~types_mode ~use_strict ~docblock:None options in + let%lwt results = + parse + ~worker_mutator + ~reader + ~parse_options + ~skip_hash_mismatch:true + ~profile + ~max_header_tokens + ~noflow + ~parse_unchanged + workers + next in - Lwt.return results.parse_hash_mismatch_skips diff --git a/src/parsing/parsing_service_js.mli b/src/parsing/parsing_service_js.mli index 4dcb063d96f..04b00253655 100644 --- a/src/parsing/parsing_service_js.mli +++ b/src/parsing/parsing_service_js.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,9 +11,21 @@ type types_mode = | TypesAllowed | TypesForbiddenByDefault +type t = (Loc.t, Loc.t) Flow_ast.program * File_sig.With_Loc.t + +type aloc_t = (ALoc.t, ALoc.t) Flow_ast.program * File_sig.With_ALoc.t * ALoc.table option + +type parse_ok = + | Classic of t + | TypesFirst of t * aloc_t + +(* sig *) + +val basic : parse_ok -> t + (* result of individual parse *) type result = - | Parse_ok of (Loc.t, Loc.t) Flow_ast.program * File_sig.t + | Parse_ok of parse_ok | Parse_fail of parse_failure | Parse_skip of parse_skip_reason @@ -24,9 +36,10 @@ and parse_skip_reason = and parse_failure = | Docblock_errors of docblock_error list | Parse_error of (Loc.t * Parse_error.t) - | File_sig_error of File_sig.error + | File_sig_error of File_sig.With_Loc.error and docblock_error = Loc.t * docblock_error_kind + and docblock_error_kind = | MultipleFlowAttributes | MultipleProvidesModuleAttributes @@ -36,52 +49,71 @@ and docblock_error_kind = (* results of parse job, returned by parse and reparse *) type results = { (* successfully parsed files *) - parse_ok: (File_sig.tolerable_error list) FilenameMap.t; - + parse_ok: File_sig.With_Loc.tolerable_error list FilenameMap.t; (* list of skipped files *) parse_skips: (File_key.t * Docblock.t) list; - (* list of files skipped due to an out of date hash *) parse_hash_mismatch_skips: FilenameSet.t; - (* list of failed files *) parse_fails: (File_key.t * Docblock.t * parse_failure) list; - (* set of unchanged files *) parse_unchanged: FilenameSet.t; } -val docblock_max_tokens: int +type parse_options = { + parse_fail: bool; + parse_types_mode: types_mode; + parse_use_strict: bool; + parse_prevent_munge: bool; + parse_module_ref_prefix: string option; + parse_facebook_fbt: string option; + parse_arch: Options.arch; + parse_abstract_locations: bool; +} + +val make_parse_options : + ?fail:bool -> + ?types_mode:types_mode -> + ?use_strict:bool -> + Docblock.t -> + Options.t -> + parse_options + +val docblock_max_tokens : int (* Use default values for the various settings that parse takes. Each one can be overridden individually *) -val parse_with_defaults: - ?types_mode: types_mode -> - ?use_strict: bool -> +val parse_with_defaults : + ?types_mode:types_mode -> + ?use_strict:bool -> + reader:Mutator_state_reader.t -> Options.t -> MultiWorkerLwt.worker list option -> File_key.t list Bucket.next -> results Lwt.t -val reparse_with_defaults: - transaction: Transaction.t -> - ?types_mode: types_mode -> - ?use_strict: bool -> - ?with_progress: bool -> - workers: MultiWorkerLwt.worker list option -> - modified: FilenameSet.t -> - deleted: FilenameSet.t -> +val reparse_with_defaults : + transaction:Transaction.t -> + reader:Mutator_state_reader.t -> + ?types_mode:types_mode -> + ?use_strict:bool -> + ?with_progress:bool -> + workers:MultiWorkerLwt.worker list option -> + modified:FilenameSet.t -> + deleted:FilenameSet.t -> Options.t -> (FilenameSet.t * results) Lwt.t -val ensure_parsed: +val ensure_parsed : + reader:Mutator_state_reader.t -> Options.t -> MultiWorkerLwt.worker list option -> FilenameSet.t -> FilenameSet.t Lwt.t -val parse_docblock: - max_tokens:int -> (* how many tokens to check in the beginning of the file *) +val parse_docblock : + max_tokens:int -> + (* how many tokens to check in the beginning of the file *) File_key.t -> string -> docblock_error list * Docblock.t @@ -93,21 +125,20 @@ val parse_json_file : Loc.t * (Loc.t * (Loc.t, Loc.t) Flow_ast.Statement.t') list * Loc.t Flow_ast.Comment.t list (* parse contents of a file *) -val do_parse: - ?fail:bool -> - types_mode: types_mode -> - use_strict: bool -> - info: Docblock.t -> - ?prevent_munge: bool -> - string -> (* contents of the file *) - File_key.t -> (* filename *) +val do_parse : + parse_options:parse_options -> + info:Docblock.t -> + string -> + (* contents of the file *) + File_key.t -> + (* filename *) result (* Utility to create the `next` parameter that `parse` requires *) -val next_of_filename_set: +val next_of_filename_set : ?with_progress:bool -> MultiWorkerLwt.worker list option -> FilenameSet.t -> File_key.t list Bucket.next -val does_content_match_file_hash: File_key.t -> string -> bool +val does_content_match_file_hash : reader:State_reader.t -> File_key.t -> string -> bool diff --git a/src/procs/dune b/src/procs/dune new file mode 100644 index 00000000000..833858f50b0 --- /dev/null +++ b/src/procs/dune @@ -0,0 +1,12 @@ +(library + (name flow_procs) + (wrapped false) + (libraries + core_kernel + flow_common_lwt + procs_procs + procs_bucket + lwt + ) + (preprocess (pps lwt_ppx)) +) \ No newline at end of file diff --git a/src/procs/multiWorkerLwt.ml b/src/procs/multiWorkerLwt.ml new file mode 100644 index 00000000000..e7611475055 --- /dev/null +++ b/src/procs/multiWorkerLwt.ml @@ -0,0 +1,141 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module Hh_bucket = Bucket + +let report_canceled_callback = ref (fun ~total:_ ~finished:_ -> ()) + +let set_report_canceled_callback callback = report_canceled_callback := callback + +let report_canceled ~total ~finished = !report_canceled_callback ~total ~finished + +include MultiWorker.CallFunctor (struct + type 'a result = 'a Lwt.t + + let return = Lwt.return + + let multi_threaded_call + (type a b c) + workers + (job : WorkerController.worker_id * c -> a -> b) + (merge : WorkerController.worker_id * b -> c -> c) + (neutral : c) + (next : a Hh_bucket.next) = + let acc = ref neutral in + let merge_with_acc = + (* Why do we need a lock? Well, we don't really know what is inside the merge function, and if + * something makes Lwt yield then we could end up with a race condition. At the moment, the + * merge function doesn't use Lwt, but it might in the future. Locking and unlocking is cheap, + * so I'm pre-emptively adding this lock *) + let merge_mutex = Lwt_mutex.create () in + fun result -> + Lwt_mutex.with_lock merge_mutex (fun () -> + acc := merge result !acc; + Lwt.return_unit) + in + (* Our next() function may give us a job, say there are no more jobs left, or tell us to + * try again later. This signal is to wake up any workers who were told "try again later" + *) + let wait_signal = Lwt_condition.create () in + (* Returns None if there will never be any more jobs *) + let rec get_job () = + match next () with + | Hh_bucket.Job bucket -> Lwt.return (Some bucket) + | Hh_bucket.Done -> Lwt.return None + | Hh_bucket.Wait -> + let%lwt () = Lwt_condition.wait wait_signal in + get_job () + in + let rec run_worker worker = + let idle_start_wall_time = Unix.gettimeofday () in + let%lwt bucket = get_job () in + match bucket with + | None -> Lwt.return idle_start_wall_time + | Some bucket -> + Measure.sample "worker_idle" (Unix.gettimeofday () -. idle_start_wall_time); + let worker_id = WorkerController.worker_id worker in + let%lwt result = + WorkerControllerLwt.call worker (fun xl -> job (worker_id, neutral) xl) bucket + in + let%lwt () = merge_with_acc (WorkerController.worker_id worker, result) in + (* Wait means "ask again after a worker has finished and has merged its result". So now that + * we've merged our response, let's wake any other workers which are waiting for work *) + Lwt_condition.broadcast wait_signal (); + run_worker worker + in + let%lwt () = + let worker_threads = List.map run_worker workers in + try%lwt + let%lwt idle_start_times = LwtUtils.all worker_threads in + let idle_end_wall_time = Unix.gettimeofday () in + List.iter + (fun idle_start_wall_time -> + Measure.sample "worker_done" (idle_end_wall_time -. idle_start_wall_time)) + idle_start_times; + Lwt.return_unit + with Lwt.Canceled -> + let total = List.length worker_threads in + let finished = ref 0 in + let worker_threads = + List.map + (fun thread -> + (let%lwt _ = thread in + Lwt.return_unit) + [%lwt.finally + incr finished; + report_canceled ~total ~finished:!finished; + Lwt.return_unit]) + worker_threads + in + (* For most exceptions, we want to propagate the exception as soon as one worker throws. + * However, for Canceled we want to wait for all the workers to process the Canceled. + * Lwt.join will wait for every thread to finish or fail *) + (Lwt.join worker_threads) + [%lwt.finally + WorkerCancel.resume_workers (); + Lwt.return_unit] + in + Lwt.return !acc +end) + +exception MultiWorkersBusy + +(* Currently, MultiWorker calls may not be interleaved, which can happen with + * Lwt. Keep track of whether we have a call in flight and raise an exception if + * we do when another comes in. *) +let is_busy = ref false + +let call_with_worker_id workers ~job ~merge ~neutral ~next = + if !is_busy then + raise MultiWorkersBusy + else ( + is_busy := true; + (call workers ~job ~merge ~neutral ~next) + [%lwt.finally + is_busy := false; + Lwt.return_unit] + ) + +let call workers ~job ~merge ~neutral ~next = + let job (_worker_id, a) b = job a b in + let merge (_worker_id, a) b = merge a b in + call_with_worker_id workers ~job ~merge ~neutral ~next + +(* A separate abstract type from MultiWorker.worker forces users to always use MultiWorkerLwt *) +type worker = WorkerController.worker + +let next ?progress_fn ?max_size workers = + Hh_bucket.make + ~num_workers: + (match workers with + | Some w -> List.length w + | None -> 1) + ?progress_fn + ?max_size + +(* Wrap WorkerController.make to abstract out the worker type *) +let make = WorkerController.make diff --git a/src/procs/multiWorkerLwt.mli b/src/procs/multiWorkerLwt.mli new file mode 100644 index 00000000000..1ae59d884d6 --- /dev/null +++ b/src/procs/multiWorkerLwt.mli @@ -0,0 +1,40 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module Hh_bucket = Bucket +open Core_kernel + +type worker + +val call : + worker list option -> + job:('c -> 'a -> 'b) -> + merge:('b -> 'c -> 'c) -> + neutral:'c -> + next:'a Hh_bucket.next -> + 'c Lwt.t + +val next : + ?progress_fn:(total:int -> start:int -> length:int -> unit) -> + ?max_size:int -> + worker list option -> + 'a list -> + 'a list Hh_bucket.next + +(* Creates a pool of workers. *) +val make : + ?call_wrapper: + (* See docs in WorkerController.worker for call_wrapper. *) + WorkerController.call_wrapper -> + saved_state:'a -> + entry:'a WorkerController.entry -> + nbr_procs:int -> + gc_control:Gc.control -> + heap_handle:SharedMem.handle -> + worker list + +val set_report_canceled_callback : (total:int -> finished:int -> unit) -> unit diff --git a/src/procs/workerControllerLwt.ml b/src/procs/workerControllerLwt.ml new file mode 100644 index 00000000000..fe6009d4cf2 --- /dev/null +++ b/src/procs/workerControllerLwt.ml @@ -0,0 +1,111 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open WorkerController + +(* This is basically an lwt thread that writes a job to the worker, waits for the response, and + * then returns the result. + * + * The main complication is that I, glevi, found a perf regression when I used Marshal_tools_lwt + * to send the job to the worker. Here's my hypothesis: + * + * 1. On a machine with many CPUs (like 56) we create 56 threads to send a job to each worker. + * 2. Lwt attempts to write the jobs to the workers in parallel. + * 3. Each worker spends more time between getting the first byte and last byte + * 4. Something something this leads to more context switches for the worker + * 5. The worker spends more time on a job + * + * This is reinforced by the observation that the regression only happens as the number of workers + * grows. + * + * By switching from Marshal_tools_lwt.to_fd_with_preamble to Marshal_tools.to_fd_with_preamble, + * the issue seems to have disappeared. Reading from the worker didn't seem to trigger a perf issue + * in my testing, but there's really nothing more urgent than reading a response from a finished + * worker, so reading in a blocking manner is fine. + *) +let call w (type a b) (f : a -> b) (x : a) : b Lwt.t = + if is_killed w then Printf.ksprintf failwith "killed worker (%d)" (worker_id w); + mark_busy w; + + (* Spawn the slave, if not prespawned. *) + let ({ Daemon.pid = slave_pid; channels = (inc, outc) } as h) = spawn w in + let infd = Daemon.descr_of_in_channel inc in + let outfd = Daemon.descr_of_out_channel outc in + let infd_lwt = Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true infd in + let outfd_lwt = Lwt_unix.of_unix_file_descr ~blocking:false ~set_flags:true outfd in + let request = wrap_request w f x in + (* Send the job *) + (let%lwt () = + try%lwt + (* Wait in an lwt-friendly manner for the worker to be writable (should be instant) *) + let%lwt () = Lwt_unix.wait_write outfd_lwt in + (* Write in a lwt-unfriendly, blocking manner to the worker *) + let _ = Marshal_tools.to_fd_with_preamble ~flags:[Marshal.Closures] outfd request in + Lwt.return_unit + with exn -> + let stack = Printexc.get_backtrace () in + Hh_logger.error "Failed to read response from work #%d\n%s" (worker_id w) stack; + + (* Failed to send the job to the worker. Is it because the worker is dead or is it + * something else? *) + let%lwt (pid, status) = Lwt_unix.waitpid [Unix.WNOHANG] slave_pid in + (match pid with + | 0 -> raise (Worker_failed_to_send_job (Other_send_job_failure exn)) + | _ -> raise (Worker_failed_to_send_job (Worker_already_exited status))) + in + (* Get the job's result *) + let%lwt res = + try%lwt + (* Wait in an lwt-friendly manner for the worker to finish the job *) + let%lwt () = Lwt_unix.wait_read infd_lwt in + (* Read in a lwt-unfriendly, blocking manner from the worker *) + (* Due to https://github.com/ocsigen/lwt/issues/564, annotation cannot go on let%let node *) + let data : b = Marshal_tools.from_fd_with_preamble infd in + let stats : Measure.record_data = Marshal_tools.from_fd_with_preamble infd in + Lwt.return (data, stats) + with + | Lwt.Canceled as exn -> + (* Worker is handling a job but we're cancelling *) + + (* Each worker might call this but that's ok *) + WorkerCancel.stop_workers (); + + (* Wait for the worker to finish cancelling *) + let%lwt () = Lwt_unix.wait_read infd_lwt in + (* Read the junk from the pipe *) + let _ = Marshal_tools.from_fd_with_preamble infd in + let _ = Marshal_tools.from_fd_with_preamble infd in + raise exn + | exn -> + let%lwt (pid, status) = Lwt_unix.waitpid [Unix.WNOHANG] slave_pid in + begin + match (pid, status) with + | (0, _) + | (_, Unix.WEXITED 0) -> + (* The slave is still running or exited normally. It's odd that we failed to read + * the response, so just raise that exception *) + raise exn + | (_, Unix.WEXITED i) when i = Exit_status.(exit_code Out_of_shared_memory) -> + raise SharedMem.Out_of_shared_memory + | (_, Unix.WEXITED i) -> + let () = Printf.eprintf "Subprocess(%d): fail %d" slave_pid i in + raise (Worker_failed (slave_pid, Worker_quit (Unix.WEXITED i))) + | (_, Unix.WSTOPPED i) -> + let () = Printf.eprintf "Subprocess(%d): stopped %d" slave_pid i in + raise (Worker_failed (slave_pid, Worker_quit (Unix.WSTOPPED i))) + | (_, Unix.WSIGNALED i) -> + let () = Printf.eprintf "Subprocess(%d): signaled %d" slave_pid i in + raise (Worker_failed (slave_pid, Worker_quit (Unix.WSIGNALED i))) + end + in + close w h; + Measure.merge (Measure.deserialize (snd res)); + Lwt.return (fst res)) + [%lwt.finally + (* No matter what, always mark worker as free when we're done *) + mark_free w; + Lwt.return_unit] diff --git a/src/procs/workerControllerLwt.mli b/src/procs/workerControllerLwt.mli new file mode 100644 index 00000000000..f8f2293e3ed --- /dev/null +++ b/src/procs/workerControllerLwt.mli @@ -0,0 +1,9 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* Call in a sub-process *) +val call : WorkerController.worker -> ('a -> 'b) -> 'a -> 'b Lwt.t diff --git a/src/server/command_handler/commandHandler.ml b/src/server/command_handler/commandHandler.ml index 45f0f7c12c7..16c24f9f8c9 100644 --- a/src/server/command_handler/commandHandler.ml +++ b/src/server/command_handler/commandHandler.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,286 +11,430 @@ open Utils_js open Lsp let status_log errors = - if Errors.ErrorSet.is_empty errors - then Hh_logger.info "Status: OK" - else Hh_logger.info "Status: Error"; + if Errors.ConcreteLocPrintableErrorSet.is_empty errors then + Hh_logger.info "Status: OK" + else + Hh_logger.info "Status: Error"; flush stdout -let convert_errors ~errors ~warnings = - if Errors.ErrorSet.is_empty errors && Errors.ErrorSet.is_empty warnings then +let convert_errors ~errors ~warnings ~suppressed_errors = + if + Errors.ConcreteLocPrintableErrorSet.is_empty errors + && Errors.ConcreteLocPrintableErrorSet.is_empty warnings + && suppressed_errors = [] + then ServerProt.Response.NO_ERRORS else - ServerProt.Response.ERRORS {errors; warnings} + ServerProt.Response.ERRORS { errors; warnings; suppressed_errors } -let get_status genv env client_root = - let server_root = Options.root genv.options in - let lazy_stats = Rechecker.get_lazy_stats genv env in +let get_status ~reader genv env client_root = + let options = genv.ServerEnv.options in + let server_root = Options.root options in + let lazy_stats = Rechecker.get_lazy_stats ~options env in let status_response = - if server_root <> client_root then begin - ServerProt.Response.DIRECTORY_MISMATCH { - ServerProt.Response.server=server_root; - ServerProt.Response.client=client_root - } - end else begin + if server_root <> client_root then + ServerProt.Response.DIRECTORY_MISMATCH + { ServerProt.Response.server = server_root; ServerProt.Response.client = client_root } + else (* collate errors by origin *) - let errors, warnings, _ = ErrorCollator.get env in - let warnings = if Options.should_include_warnings genv.options - then warnings - else Errors.ErrorSet.empty + let (errors, warnings, suppressed_errors) = ErrorCollator.get ~reader ~options env in + let warnings = + if Options.should_include_warnings options then + warnings + else + Errors.ConcreteLocPrintableErrorSet.empty + in + let suppressed_errors = + if Options.include_suppressions options then + suppressed_errors + else + [] in - (* TODO: check status.directory *) status_log errors; FlowEventLogger.status_response - ~num_errors:(Errors.ErrorSet.cardinal errors); - convert_errors errors warnings - end + ~num_errors:(Errors.ConcreteLocPrintableErrorSet.cardinal errors); + convert_errors ~errors ~warnings ~suppressed_errors in - status_response, lazy_stats + (status_response, lazy_stats) -let autocomplete ~options ~workers ~env ~profiling file_input = - let path, content = match file_input with +let autocomplete ~trigger_character ~reader ~options ~env ~profiling file_input = + let (path, content) = + match file_input with | File_input.FileName _ -> failwith "Not implemented" - | File_input.FileContent (_, content) -> - File_input.filename_of_file_input file_input, content + | File_input.FileContent (_, content) -> (File_input.filename_of_file_input file_input, content) in - let state = Autocomplete_js.autocomplete_set_hooks () in + let state = Autocomplete_js.autocomplete_set_hooks trigger_character in let path = File_key.SourceFile path in let%lwt check_contents_result = - Types_js.basic_check_contents ~options ~workers ~env ~profiling content path + Types_js.basic_check_contents ~options ~env ~profiling content path in let%lwt autocomplete_result = - map_error ~f:(fun str -> str, None) check_contents_result - %>>= (fun (cx, info, file_sig, _) -> - Profiling_js.with_timer_lwt profiling ~timer:"GetResults" ~f:(fun () -> + map_error ~f:(fun str -> (str, None)) check_contents_result + %>>= fun (cx, info, file_sig, tast) -> + Profiling_js.with_timer_lwt profiling ~timer:"GetResults" ~f:(fun () -> try_with_json (fun () -> - Lwt.return (AutocompleteService_js.autocomplete_get_results cx file_sig state info) - ) - ) - ) + Lwt.return + (AutocompleteService_js.autocomplete_get_results + ~reader + cx + file_sig + tast + state + trigger_character + info))) in - let results, json_data_to_log = split_result autocomplete_result in + let (results, json_data_to_log) = split_result autocomplete_result in Autocomplete_js.autocomplete_unset_hooks (); Lwt.return (results, json_data_to_log) -let check_file ~options ~workers ~env ~profiling ~force file_input = +let check_file ~options ~env ~profiling ~force file_input = let file = File_input.filename_of_file_input file_input in match file_input with | File_input.FileName _ -> failwith "Not implemented" | File_input.FileContent (_, content) -> - let should_check = - if force then - true - else - let (_, docblock) = Parsing_service_js.( + let should_check = + if force then + true + else + let (_, docblock) = + Parsing_service_js.( parse_docblock docblock_max_tokens (File_key.SourceFile file) content) - in - Docblock.is_flow docblock - in - if should_check then - let file = File_key.SourceFile file in - let%lwt _, errors, warnings = - Types_js.typecheck_contents ~options ~workers ~env ~profiling content file in - Lwt.return (convert_errors ~errors ~warnings) - else - Lwt.return (ServerProt.Response.NOT_COVERED) + Docblock.is_flow docblock + in + if should_check then + let file = File_key.SourceFile file in + let%lwt (_, errors, warnings) = + Types_js.typecheck_contents ~options ~env ~profiling content file + in + Lwt.return (convert_errors ~errors ~warnings ~suppressed_errors:[]) + else + Lwt.return ServerProt.Response.NOT_COVERED let infer_type - ~(options: Options.t) - ~(workers: MultiWorkerLwt.worker list option) - ~(env: ServerEnv.env ref) - ~(profiling: Profiling_js.running) - ((file_input, line, col, verbose, expand_aliases): - (File_input.t * int * int * Verbose.t option * bool)) - : ((Loc.t * Ty.t option, string) Core_result.t * Hh_json.json option) Lwt.t = + ~(options : Options.t) + ~(env : ServerEnv.env) + ~(profiling : Profiling_js.running) + ((file_input, line, col, verbose, expand_aliases, omit_targ_defaults) : + File_input.t * int * int * Verbose.t option * bool * bool) : + ((Loc.t * Ty.t option, string) Core_result.t * Hh_json.json option) Lwt.t = let file = File_input.filename_of_file_input file_input in let file = File_key.SourceFile file in let options = { options with Options.opt_verbose = verbose } in match File_input.content_of_file_input file_input with | Error e -> Lwt.return (Error e, None) | Ok content -> - let%lwt result = try_with_json (fun () -> - Type_info_service.type_at_pos ~options ~workers ~env ~profiling ~expand_aliases - file content line col - ) in + let%lwt result = + try_with_json (fun () -> + Type_info_service.type_at_pos + ~options + ~env + ~profiling + ~expand_aliases + ~omit_targ_defaults + file + content + line + col) + in Lwt.return (split_result result) -let dump_types ~options ~workers ~env ~profiling file_input = - let file = File_input.filename_of_file_input file_input in - let file = File_key.SourceFile file in +let insert_type + ~options + ~env + ~profiling + ~file_input + ~target + ~verbose + ~expand_aliases + ~omit_targ_defaults + ~location_is_strict + ~ambiguity_strategy = + let filename = File_input.filename_of_file_input file_input in + let file_key = File_key.SourceFile filename in + let options = { options with Options.opt_verbose = verbose } in File_input.content_of_file_input file_input - %>>= fun content -> - try_with begin fun () -> - Type_info_service.dump_types ~options ~workers ~env ~profiling file content - end + %>>= fun file_content -> + try_with (fun _ -> + let%lwt result = + Type_info_service.insert_type + ~options + ~env + ~profiling + ~file_key + ~file_content + ~target + ~expand_aliases + ~omit_targ_defaults + ~location_is_strict + ~ambiguity_strategy + in + Lwt.return result) + +let autofix_exports ~options ~env ~profiling ~input = + let filename = File_input.filename_of_file_input input in + let file_key = File_key.SourceFile filename in + File_input.content_of_file_input input + %>>= fun file_content -> + try_with (fun _ -> + let%lwt result = + Type_info_service.autofix_exports ~options ~env ~profiling ~file_key ~file_content + in + Lwt.return result) + +let collect_rage ~options ~reader ~env ~files = + let items = [] in + (* options *) + let data = Printf.sprintf "lazy_mode=%s\n" Options.(lazy_mode options |> lazy_mode_to_string) in + let items = ("options", data) :: items in + (* env: checked files *) + let data = + Printf.sprintf + "%s\n\n%s\n" + (CheckedSet.debug_counts_to_string env.checked_files) + (CheckedSet.debug_to_string ~limit:200 env.checked_files) + in + let items = ("env.checked_files", data) :: items in + (* env: dependency graph *) + let dependency_to_string (file, deps) = + let file = File_key.to_string file in + let deps = + Utils_js.FilenameSet.elements deps + |> Core_list.map ~f:File_key.to_string + |> ListUtils.first_upto_n 20 (fun t -> Some (Printf.sprintf " ...%d more" t)) + |> String.concat "," + in + file ^ ":" ^ deps ^ "\n" + in + let dependencies = + Dependency_info.all_dependency_graph env.ServerEnv.dependency_info + |> Utils_js.FilenameMap.bindings + |> Core_list.map ~f:dependency_to_string + |> ListUtils.first_upto_n 200 (fun t -> Some (Printf.sprintf "[shown 200/%d]\n" t)) + |> String.concat "" + in + let data = "DEPENDENCIES:\n" ^ dependencies in + let items = ("env.dependencies", data) :: items in + (* env: errors *) + let (errors, warnings, _) = ErrorCollator.get ~reader ~options env in + let json = + Errors.Json_output.json_of_errors_with_context + ~strip_root:None + ~stdin_file:None + ~suppressed_errors:[] + ~errors + ~warnings + () + in + let data = "ERRORS:\n" ^ Hh_json.json_to_multiline json in + let items = ("env.errors", data) :: items in + (* Checking if file hashes are up to date *) + let items = + Option.value_map files ~default:items ~f:(fun files -> + let buf = Buffer.create 1024 in + Printf.bprintf + buf + "Does the content on the disk match the most recent version of the file?\n\n"; + List.iter + (fun file -> + (* TODO - this isn't exactly right. It could be something else, right? *) + let file_key = File_key.SourceFile file in + let file_state = + if not (FilenameSet.mem file_key env.ServerEnv.files) then + "FILE NOT PARSED BY FLOW (likely ignored implicitly or explicitly)" + else + match Sys_utils.cat_or_failed file with + | None -> "ERROR! FAILED TO READ" + | Some content -> + if Parsing_service_js.does_content_match_file_hash ~reader file_key content then + "OK" + else + "HASH OUT OF DATE" + in + Printf.bprintf buf "%s: %s\n" file file_state) + files; + ("file hash check", Buffer.contents buf) :: items) + in + let items = + let buf = Buffer.create 127 in + let log str = + Buffer.add_string buf str; + Buffer.add_char buf '\n' + in + LoggingUtils.dump_server_options ~server_options:options ~log; + ("server_options", Buffer.contents buf) :: items + in + items -let coverage ~options ~workers ~env ~profiling ~force file_input = +let dump_types ~options ~env ~profiling file_input = let file = File_input.filename_of_file_input file_input in let file = File_key.SourceFile file in File_input.content_of_file_input file_input %>>= fun content -> - try_with begin fun () -> - Type_info_service.coverage ~options ~workers ~env ~profiling ~force file content - end - -let get_cycle ~env fn = - (* Re-calculate SCC *) - let parsed = !env.ServerEnv.files in - let dependency_graph = !env.ServerEnv.dependency_graph in - Lwt.return ( - let components = Sort_js.topsort ~roots:parsed dependency_graph in - - (* Get component for target file *) - let component = List.find (Nel.mem fn) components in - - (* Restrict dep graph to only in-cycle files *) - let subgraph = Nel.fold_left (fun acc f -> - Option.fold (FilenameMap.get f dependency_graph) ~init:acc ~f:(fun acc deps -> - let subdeps = FilenameSet.filter (fun f -> Nel.mem f component) deps in - if FilenameSet.is_empty subdeps - then acc - else FilenameMap.add f subdeps acc - ) - ) FilenameMap.empty component in - - (* Convert from map/set to lists for serialization to client. *) - let subgraph = FilenameMap.fold (fun f dep_fs acc -> - let f = File_key.to_string f in - let dep_fs = FilenameSet.fold (fun dep_f acc -> - (File_key.to_string dep_f)::acc - ) dep_fs [] in - (f, dep_fs)::acc - ) subgraph [] in + try_with (fun () -> Type_info_service.dump_types ~options ~env ~profiling file content) - Ok subgraph - ) - -let suggest ~options ~workers ~env ~profiling file_input = - let file = File_input.filename_of_file_input file_input in - let file = File_key.SourceFile file in - File_input.content_of_file_input file_input - %>>= fun content -> try_with (fun _ -> - let%lwt result = - Type_info_service.suggest ~options ~workers ~env ~profiling file content +let coverage ~options ~env ~profiling ~force ~trust file_input = + if Options.trust_mode options = Options.NoTrust && trust then + Error + "Coverage cannot be run in trust mode if the server is not in trust mode. \n\nRestart the Flow server with --trust-mode=check' to enable this command." + |> Lwt.return + else + let file = File_input.filename_of_file_input file_input in + let file = File_key.SourceFile file in + File_input.content_of_file_input file_input + %>>= fun content -> + try_with (fun () -> + Type_info_service.coverage ~options ~env ~profiling ~force ~trust file content) + +let batch_coverage ~options ~env ~trust ~batch = + if Options.trust_mode options = Options.NoTrust && trust then + Error + "Batch Coverage cannot be run in trust mode if the server is not in trust mode. \n\nRestart the Flow server with --trust-mode=check' to enable this command." + |> Lwt.return + else if Options.lazy_mode options <> Options.NON_LAZY_MODE then + Error + "Batch coverage cannot be run in lazy mode.\n\nRestart the Flow server with '--lazy-mode none' to enable this command." + |> Lwt.return + else + let is_checked key = CheckedSet.mem key env.checked_files in + let filter key = Core_list.exists ~f:(fun elt -> Files.is_prefix elt key) batch in + let coverage_map = + FilenameMap.filter + (fun key _ -> is_checked key && File_key.to_string key |> filter) + env.coverage in - match result with - | Ok (tc_errors, tc_warnings, suggest_warnings, annotated_program) -> - Lwt.return (Ok (ServerProt.Response.Suggest_Ok { - tc_errors; tc_warnings; suggest_warnings; annotated_program - })) - | Error errors -> - Lwt.return (Ok (ServerProt.Response.Suggest_Error errors)) - ) - -(* NOTE: currently, not only returns list of annotations, but also writes a - timestamped file with annotations *) -let port = Port_service_js.port_files + let response = + FilenameMap.fold (fun key coverage -> List.cons (key, coverage)) coverage_map [] + in + Ok response |> Lwt.return -let find_module ~options (moduleref, filename) = - let file = File_key.SourceFile filename in - let loc = {Loc.none with Loc.source = Some file} in - let module_name = Module_js.imported_module - ~options ~node_modules_containers:!Files.node_modules_containers - file (Nel.one loc) moduleref in - Module_heaps.get_file ~audit:Expensive.warn module_name - -let gen_flow_files ~options env files = - let errors, warnings, _ = ErrorCollator.get env in - let warnings = if Options.should_include_warnings options - then warnings - else Errors.ErrorSet.empty +let serialize_graph graph = + (* Convert from map/set to lists for serialization to client. *) + FilenameMap.fold + (fun f dep_fs acc -> + let f = File_key.to_string f in + let dep_fs = FilenameSet.fold (fun dep_f acc -> File_key.to_string dep_f :: acc) dep_fs [] in + (f, dep_fs) :: acc) + graph + [] + +let output_dependencies ~env root strip_root types_only outfile = + let strip_root = + if strip_root then + Files.relative_path root + else + fun x -> + x in - let result = if Errors.ErrorSet.is_empty errors - then begin - let (flow_files, non_flow_files, error) = - List.fold_left (fun (flow_files, non_flow_files, error) file -> - if error <> None then (flow_files, non_flow_files, error) else - match file with - | File_input.FileContent _ -> - let error_msg = "This command only works with file paths." in - let error = - Some (ServerProt.Response.GenFlowFiles_UnexpectedError error_msg) - in - (flow_files, non_flow_files, error) - | File_input.FileName fn -> - let file = File_key.SourceFile fn in - let checked = - let open Module_heaps in - match get_info file ~audit:Expensive.warn with - | Some info -> info.checked - | None -> false - in - if checked - then file::flow_files, non_flow_files, error - else flow_files, file::non_flow_files, error - ) ([], [], None) files - in - begin match error with - | Some e -> Error e - | None -> - try - let flow_file_cxs = List.map (fun file -> - let component = Nel.one file in - let { Merge_service.cx; _ } = Merge_service.merge_strict_context ~options component in - cx - ) flow_files in - - (* Non-@flow files *) - let result_contents = non_flow_files |> List.map (fun file -> - (File_key.to_string file, ServerProt.Response.GenFlowFiles_NonFlowFile) - ) in - - (* Codegen @flow files *) - let result_contents = List.fold_left2 (fun results file cx -> - let file_path = File_key.to_string file in - try - let code = FlowFileGen.flow_file cx in - (file_path, ServerProt.Response.GenFlowFiles_FlowFile code)::results - with exn -> - failwith (spf "%s: %s" file_path (Printexc.to_string exn)) - ) result_contents flow_files flow_file_cxs in - - Ok result_contents - with exn -> Error ( - ServerProt.Response.GenFlowFiles_UnexpectedError (Printexc.to_string exn) - ) - end - end else - Error (ServerProt.Response.GenFlowFiles_TypecheckError {errors; warnings}) + let dep_graph = + if types_only then + Dependency_info.dependency_graph + else + Dependency_info.all_dependency_graph in - result - -let convert_find_refs_result - (result: FindRefsTypes.find_refs_ok) - : ServerProt.Response.find_refs_success = - Option.map result ~f:begin fun (name, refs) -> - (name, List.map snd refs) - end - -let find_refs ~genv ~env ~profiling (file_input, line, col, global, multi_hop) = - let%lwt result, json = - FindRefs_js.find_refs ~genv ~env ~profiling ~file_input ~line ~col ~global ~multi_hop + let graph = serialize_graph (dep_graph env.ServerEnv.dependency_info) in + Hh_logger.info "printing dependency graph to %s\n" outfile; + let%lwt out = Lwt_io.open_file ~mode:Lwt_io.Output outfile in + let%lwt () = LwtUtils.output_graph out strip_root graph in + let%lwt () = Lwt_io.close out in + ok_unit |> Lwt.return + +let get_cycle ~env fn types_only = + (* Re-calculate SCC *) + let parsed = env.ServerEnv.files in + let dependency_info = env.ServerEnv.dependency_info in + let dependency_graph = + if types_only then + Dependency_info.dependency_graph dependency_info + else + Dependency_info.all_dependency_graph dependency_info in + Lwt.return + (Ok + (let components = Sort_js.topsort ~roots:parsed dependency_graph in + (* Get component for target file *) + let component = List.find (Nel.mem fn) components in + (* Restrict dep graph to only in-cycle files *) + Nel.fold_left + (fun acc f -> + Option.fold (FilenameMap.get f dependency_graph) ~init:acc ~f:(fun acc deps -> + let subdeps = FilenameSet.filter (fun f -> Nel.mem f component) deps in + if FilenameSet.is_empty subdeps then + acc + else + FilenameMap.add f subdeps acc)) + FilenameMap.empty + component + |> serialize_graph)) + +let suggest ~options ~env ~profiling file = + let file_name = File_input.filename_of_file_input file in + File_input.content_of_file_input file + %>>= fun file_content -> + try_with (fun _ -> + let%lwt result = Type_info_service.suggest ~options ~env ~profiling file_name file_content in + match result with + | Ok (tc_errors, tc_warnings, suggest_warnings, file_patch) -> + Lwt.return + (Ok + (ServerProt.Response.Suggest_Ok + { tc_errors; tc_warnings; suggest_warnings; file_patch })) + | Error errors -> Lwt.return (Ok (ServerProt.Response.Suggest_Error errors))) + +let find_module ~options ~reader (moduleref, filename) = + let file = File_key.SourceFile filename in + let loc = { Loc.none with Loc.source = Some file } in + let module_name = + Module_js.imported_module + ~options + ~reader:(Abstract_state_reader.State_reader reader) + ~node_modules_containers:!Files.node_modules_containers + file + (Nel.one (ALoc.of_loc loc)) + moduleref + in + Module_heaps.Reader.get_file ~reader ~audit:Expensive.warn module_name + +let convert_find_refs_result (result : FindRefsTypes.find_refs_ok) : + ServerProt.Response.find_refs_success = + Option.map result ~f:(fun (name, refs) -> (name, Core_list.map ~f:snd refs)) + +(* Find refs is a really weird command. Whereas other commands will cancel themselves if they find + * unchecked code, find refs will focus that code and keep chugging along. It may therefore change + * the env. Furthermore, it is written using a lot of `result`'s, which make it really hard to + * properly pass through the env. Therefore, it uses an `ServerEnv.env ref` instead of an + * `ServerEnv.env`. *) +let find_global_refs ~reader ~genv ~env ~profiling (file_input, line, col, multi_hop) = + let env = ref env in + let%lwt (result, dep_count) = + FindRefs_js.find_global_refs ~reader ~genv ~env ~profiling ~file_input ~line ~col ~multi_hop + in + let env = !env in let result = Core_result.map result ~f:convert_find_refs_result in - Lwt.return (result, json) + Lwt.return (env, result, dep_count) + +let find_local_refs ~reader ~options ~env ~profiling (file_input, line, col) = + FindRefs_js.find_local_refs ~reader ~options ~env ~profiling ~file_input ~line ~col + |> Lwt_result.map convert_find_refs_result (* This returns result, json_data_to_log, where json_data_to_log is the json data from * getdef_get_result which we end up using *) -let get_def ~options ~workers ~env ~profiling position = - GetDef_js.get_def ~options ~workers ~env ~profiling ~depth:0 position +let get_def ~options ~env ~profiling position = + GetDef_js.get_def ~options ~env ~profiling ~depth:0 position let module_name_of_string ~options module_name_str = let file_options = Options.file_options options in let path = Path.to_string (Path.make module_name_str) in - if Files.is_flow_file ~options:file_options path - then Modulename.Filename (File_key.SourceFile path) - else Modulename.String module_name_str + if Files.is_flow_file ~options:file_options path then + Modulename.Filename (File_key.SourceFile path) + else + Modulename.String module_name_str -let get_imports ~options module_names = +let get_imports ~options ~reader module_names = let add_to_results (map, non_flow) module_name_str = let module_name = module_name_of_string ~options module_name_str in - match Module_heaps.get_file ~audit:Expensive.warn module_name with + match Module_heaps.Reader.get_file ~reader ~audit:Expensive.warn module_name with | Some file -> (* We do not process all modules which are stored in our module * database. In case we do not process a module its requirements @@ -298,16 +442,22 @@ let get_imports ~options module_names = * client that these modules have not been processed. *) let { Module_heaps.checked; _ } = - Module_heaps.get_info_unsafe ~audit:Expensive.warn file in + Module_heaps.Reader.get_info_unsafe ~reader ~audit:Expensive.warn file + in if checked then let { Module_heaps.resolved_modules; _ } = - Module_heaps.get_resolved_requires_unsafe ~audit:Expensive.warn file in - let fsig = Parsing_heaps.get_file_sig_unsafe file in - let requires = File_sig.(require_loc_map fsig.module_sig) in - let mlocs = SMap.fold (fun mref locs acc -> - let m = SMap.find_unsafe mref resolved_modules in - Modulename.Map.add m locs acc - ) requires Modulename.Map.empty in + Module_heaps.Reader.get_resolved_requires_unsafe ~reader ~audit:Expensive.warn file + in + let fsig = Parsing_heaps.Reader.get_file_sig_unsafe ~reader file in + let requires = File_sig.With_Loc.(require_loc_map fsig.module_sig) in + let mlocs = + SMap.fold + (fun mref locs acc -> + let m = SMap.find_unsafe mref resolved_modules in + Modulename.Map.add m locs acc) + requires + Modulename.Map.empty + in (SMap.add module_name_str mlocs map, non_flow) else (map, SSet.add module_name_str non_flow) @@ -321,849 +471,1412 @@ let get_imports ~options module_names = * flow. *) List.fold_left add_to_results (SMap.empty, SSet.empty) module_names -let save_state ~saved_state_filename ~genv ~env = +let save_state ~saved_state_filename ~genv ~env ~profiling = try_with (fun () -> - let%lwt () = Saved_state.save ~saved_state_filename ~genv ~env:!env in - Lwt.return (Ok ()) - ) + let%lwt () = Saved_state.save ~saved_state_filename ~genv ~env ~profiling in + Lwt.return (Ok ())) -let handle_ephemeral_deferred_unsafe - genv env (request_id, { ServerProt.Request.client_logging_context=_; command; }) = - let env = ref env in - let respond msg = - MonitorRPC.respond_to_request ~request_id ~response:msg +let handle_autocomplete ~trigger_character ~reader ~options ~input ~profiling ~env = + let%lwt (result, json_data) = + autocomplete ~trigger_character ~reader ~options ~env ~profiling input + in + Lwt.return (ServerProt.Response.AUTOCOMPLETE result, json_data) + +let handle_autofix_exports ~options ~input ~profiling ~env = + let%lwt result = autofix_exports ~options ~env ~profiling ~input in + Lwt.return (ServerProt.Response.AUTOFIX_EXPORTS result, None) + +let handle_check_file ~options ~force ~input ~profiling ~env = + let%lwt response = check_file ~options ~env ~force ~profiling input in + Lwt.return (ServerProt.Response.CHECK_FILE response, None) + +let handle_coverage ~options ~force ~input ~trust ~profiling ~env = + let%lwt response = coverage ~options ~env ~profiling ~force ~trust input in + Lwt.return (ServerProt.Response.COVERAGE response, None) + +let handle_batch_coverage ~options ~profiling:_ ~env ~batch ~trust = + let%lwt response = batch_coverage ~options ~env ~batch ~trust in + Lwt.return (ServerProt.Response.BATCH_COVERAGE response, None) + +let handle_cycle ~fn ~types_only ~profiling:_ ~env = + let%lwt response = get_cycle ~env fn types_only in + Lwt.return (env, ServerProt.Response.CYCLE response, None) +let handle_dump_types ~options ~input ~profiling ~env = + let%lwt response = dump_types ~options ~env ~profiling input in + Lwt.return (ServerProt.Response.DUMP_TYPES response, None) + +let handle_find_module ~options ~reader ~moduleref ~filename ~profiling:_ ~env:_ = + let response = find_module ~options ~reader (moduleref, filename) in + Lwt.return (ServerProt.Response.FIND_MODULE response, None) + +let handle_find_refs ~reader ~genv ~filename ~line ~char ~global ~multi_hop ~profiling ~env = + let%lwt (env, result, dep_count) = + if global || multi_hop then + find_global_refs ~reader ~genv ~env ~profiling (filename, line, char, multi_hop) + else + let options = genv.ServerEnv.options in + let%lwt result = find_local_refs ~reader ~options ~env ~profiling (filename, line, char) in + Lwt.return (env, result, None) in - let options = genv.ServerEnv.options in - let workers = genv.ServerEnv.workers in - Hh_logger.debug "Request: %s" (ServerProt.Request.to_string command); - MonitorRPC.status_update ~event:ServerStatus.Handling_request_start; - let should_print_summary = Options.should_profile genv.options in - let%lwt profiling, json_data = - Profiling_js.with_profiling_lwt ~label:"Command" ~should_print_summary begin fun profiling -> - match command with - | ServerProt.Request.AUTOCOMPLETE fn -> - let%lwt result, json_data = autocomplete ~options ~workers ~env ~profiling fn in - ServerProt.Response.AUTOCOMPLETE result - |> respond; - Lwt.return json_data - | ServerProt.Request.CHECK_FILE (fn, verbose, force, include_warnings) -> - let options = { options with Options. - opt_verbose = verbose; - opt_include_warnings = options.Options.opt_include_warnings || include_warnings; - } in - let%lwt response = check_file ~options ~workers ~env ~force ~profiling fn in - ServerProt.Response.CHECK_FILE response - |> respond; - Lwt.return None - | ServerProt.Request.COVERAGE (fn, force) -> - let%lwt response = coverage ~options ~workers ~env ~profiling ~force fn in - ServerProt.Response.COVERAGE response - |> respond; - Lwt.return None - | ServerProt.Request.CYCLE fn -> - let file_options = Options.file_options options in - let fn = Files.filename_from_string ~options:file_options fn in - let%lwt response = get_cycle ~env fn in - ServerProt.Response.CYCLE response - |> respond; - Lwt.return None - | ServerProt.Request.DUMP_TYPES (fn) -> - let%lwt response = dump_types ~options ~workers ~env ~profiling fn in - ServerProt.Response.DUMP_TYPES response - |> respond; - Lwt.return None - | ServerProt.Request.FIND_MODULE (moduleref, filename) -> - ServerProt.Response.FIND_MODULE ( - find_module ~options (moduleref, filename): File_key.t option - ) |> respond; - Lwt.return None - | ServerProt.Request.FIND_REFS (fn, line, char, global, multi_hop) -> - let%lwt result, json_data = - find_refs ~genv ~env ~profiling (fn, line, char, global, multi_hop) in - ServerProt.Response.FIND_REFS result |> respond; - Lwt.return json_data - | ServerProt.Request.FORCE_RECHECK _ -> - failwith "force-recheck cannot be deferred" - | ServerProt.Request.GEN_FLOW_FILES (files, include_warnings) -> - let options = { options with Options. - opt_include_warnings = options.Options.opt_include_warnings || include_warnings; - } in - ServerProt.Response.GEN_FLOW_FILES ( - gen_flow_files ~options !env files: ServerProt.Response.gen_flow_files_response - ) |> respond; - Lwt.return None - | ServerProt.Request.GET_DEF (fn, line, char) -> - let%lwt result, json_data = get_def ~options ~workers ~env ~profiling (fn, line, char) in - ServerProt.Response.GET_DEF result - |> respond; - Lwt.return json_data - | ServerProt.Request.GET_IMPORTS module_names -> - ServerProt.Response.GET_IMPORTS ( - get_imports ~options module_names: ServerProt.Response.get_imports_response - ) |> respond; - Lwt.return None - | ServerProt.Request.INFER_TYPE (fn, line, char, verbose, expand_aliases) -> - let%lwt result, json_data = - infer_type ~options ~workers ~env ~profiling - (fn, line, char, verbose, expand_aliases) - in - ServerProt.Response.INFER_TYPE result - |> respond; - Lwt.return json_data - | ServerProt.Request.PORT (files) -> - ServerProt.Response.PORT (port files: ServerProt.Response.port_response) - |> respond; - Lwt.return None - | ServerProt.Request.REFACTOR (file_input, line, col, refactor_variant) -> - let open ServerProt.Response in - let%lwt result = - Refactor_js.refactor ~genv ~env ~profiling ~file_input ~line ~col ~refactor_variant - in - let result = - result - |> Core_result.map ~f:(Option.map ~f:(fun refactor_edits -> {refactor_edits})) - in - REFACTOR (result) - |> respond; - Lwt.return None - | ServerProt.Request.STATUS (client_root, include_warnings) -> - let genv = {genv with - options = let open Options in {genv.options with - opt_include_warnings = genv.options.opt_include_warnings || include_warnings - } - } in - let status_response, lazy_stats = get_status genv !env client_root in - respond (ServerProt.Response.STATUS {status_response; lazy_stats}); - begin match status_response with - | ServerProt.Response.DIRECTORY_MISMATCH {ServerProt.Response.server; client} -> - Hh_logger.fatal "Status: Error"; - Hh_logger.fatal "server_dir=%s, client_dir=%s" - (Path.to_string server) - (Path.to_string client); - Hh_logger.fatal "flow server is not listening to the same directory. Exiting."; - FlowExitStatus.(exit Server_client_directory_mismatch) - | _ -> () - end; - Lwt.return None - | ServerProt.Request.SUGGEST fn -> - let%lwt result = suggest ~options ~workers ~env ~profiling fn in - ServerProt.Response.SUGGEST result - |> respond; - Lwt.return None - | ServerProt.Request.SAVE_STATE out -> - let%lwt result = save_state ~saved_state_filename:out ~genv ~env in - ServerProt.Response.SAVE_STATE result - |> respond; - Lwt.return None - end + let json_data = + Some + (Hh_json.JSON_Object + ( ( "result", + Hh_json.JSON_String + (match result with + | Ok _ -> "SUCCESS" + | _ -> "FAILURE") ) + :: ("global", Hh_json.JSON_Bool global) + :: + (match dep_count with + | Some count -> [("deps", Hh_json.JSON_Number (string_of_int count))] + | None -> []) )) + in + Lwt.return (env, ServerProt.Response.FIND_REFS result, json_data) + +let handle_force_recheck ~files ~focus ~profile ~profiling = + let fileset = SSet.of_list files in + let reason = + LspProt.( + match files with + | [filename] -> Single_file_changed { filename } + | _ -> Many_files_changed { file_count = List.length files }) + in + (* `flow force-recheck --focus a.js` not only marks a.js as a focused file, but it also + * tells Flow that `a.js` has changed. In that case we push a.js to be rechecked and to be + * focused *) + let push ?callback files = + ServerMonitorListenerState.( + if focus then + push_files_to_force_focused_and_recheck ?callback ~reason files + else + push_files_to_recheck ?metadata:None ?callback ~reason files) + in + if profile then ( + let (wait_for_recheck_thread, wakener) = Lwt.task () in + push ~callback:(fun profiling -> Lwt.wakeup wakener profiling) fileset; + let%lwt recheck_profiling = wait_for_recheck_thread in + Option.iter recheck_profiling ~f:(fun recheck_profiling -> + Profiling_js.merge ~from:recheck_profiling ~into:profiling); + Lwt.return (ServerProt.Response.FORCE_RECHECK recheck_profiling, None) + ) else ( + (* If we're not profiling the recheck, then respond immediately *) + push fileset; + Lwt.return (ServerProt.Response.FORCE_RECHECK None, None) + ) + +let handle_get_def ~reader ~options ~filename ~line ~char ~profiling ~env = + let%lwt (result, json_data) = get_def ~reader ~options ~env ~profiling (filename, line, char) in + Lwt.return (ServerProt.Response.GET_DEF result, json_data) + +let handle_get_imports ~options ~reader ~module_names ~profiling:_ ~env:_ = + let response = get_imports ~options ~reader module_names in + Lwt.return (ServerProt.Response.GET_IMPORTS response, None) + +let handle_graph_dep_graph ~root ~strip_root ~outfile ~types_only ~profiling:_ ~env = + let%lwt response = output_dependencies ~env root strip_root types_only outfile in + Lwt.return (env, ServerProt.Response.GRAPH_DEP_GRAPH response, None) + +let handle_infer_type + ~options ~input ~line ~char ~verbose ~expand_aliases ~omit_targ_defaults ~profiling ~env = + let%lwt (result, json_data) = + infer_type + ~options + ~env + ~profiling + (input, line, char, verbose, expand_aliases, omit_targ_defaults) + in + Lwt.return (ServerProt.Response.INFER_TYPE result, json_data) + +let handle_insert_type + ~options + ~file_input + ~target + ~verbose + ~expand_aliases + ~omit_targ_defaults + ~location_is_strict + ~ambiguity_strategy + ~profiling + ~env = + let%lwt result = + insert_type + ~options + ~env + ~profiling + ~file_input + ~target + ~verbose + ~expand_aliases + ~omit_targ_defaults + ~location_is_strict + ~ambiguity_strategy in - let event = ServerStatus.(Finishing_up { - duration = Profiling_js.get_profiling_duration profiling; - info = CommandSummary (ServerProt.Request.to_string command)}) in - MonitorRPC.status_update ~event; - Lwt.return (!env, profiling, json_data) + Lwt.return (ServerProt.Response.INSERT_TYPE result, None) -let wrap_ephemeral_handler handler genv arg (request_id, command) = +let handle_rage ~reader ~options ~files ~profiling:_ ~env = + let items = collect_rage ~options ~reader ~env ~files:(Some files) in + Lwt.return (ServerProt.Response.RAGE items, None) + +let handle_refactor + ~reader ~genv ~input:file_input ~line ~char:col ~refactor_variant ~profiling ~env = + (* Refactor is another weird command that may mutate the env by doing a bunch of rechecking, + * since that's what find-refs does and refactor delegates to find-refs *) + ServerProt.Response.( + let env = ref env in + let%lwt result = + Refactor_js.refactor ~reader ~genv ~env ~profiling ~file_input ~line ~col ~refactor_variant + in + let env = !env in + let result = + result |> Core_result.map ~f:(Option.map ~f:(fun refactor_edits -> { refactor_edits })) + in + Lwt.return (env, REFACTOR result, None)) + +let handle_status ~reader ~genv ~client_root ~profiling:_ ~env = + let (status_response, lazy_stats) = get_status ~reader genv env client_root in + Lwt.return (env, ServerProt.Response.STATUS { status_response; lazy_stats }, None) + +let handle_suggest ~options ~input ~profiling ~env = + let%lwt result = suggest ~options ~env ~profiling input in + Lwt.return (ServerProt.Response.SUGGEST result, None) + +let handle_save_state ~saved_state_filename ~genv ~profiling ~env = + let%lwt result = save_state ~saved_state_filename ~genv ~env ~profiling in + Lwt.return (env, ServerProt.Response.SAVE_STATE result, None) + +let find_code_actions ~options ~env ~profiling ~params ~client = + CodeActionRequest.( + Flow_lsp_conversions.( + let { textDocument; range; _ } = params in + (* The current ide-lsp-server/flow-lsp-client doesn't necisarrily get restart for every project. + * Checking the option here ensures the the flow server doesn't do too much work for code + * action requests on projects where code actions are not enabled in the `.flowconfig`. + *) + if not options.Options.opt_lsp_code_actions then + Lwt.return (Ok []) + else + let (file_key, file, loc) = lsp_textDocument_and_range_to_flow textDocument range client in + match File_input.content_of_file_input file with + | Error msg -> Lwt.return (Error msg) + | Ok file_contents -> + Type_info_service.code_actions_at_loc + ~options + ~env + ~profiling + ~params + ~file_key + ~file_contents + ~loc)) + +type command_handler = + (* A command can be handled immediately if it is super duper fast and doesn't require the env. + * These commands will be handled as soon as we read them off the pipe. Almost nothing should ever + * be handled immediately *) + | Handle_immediately of + (profiling:Profiling_js.running -> + (ServerProt.Response.response * Hh_json.json option) Lwt.t) + (* A command is parallelizable if it passes four conditions + * + * 1. It is fast. Running it in parallel will block the current recheck, so it needs to be really + * fast. + * 2. It doesn't use the workers. Currently we can't deal with the recheck using the workers at the + * same time as a command using the workers + * 3. It doesn't return a new env. It really should be just a read-only job + * 4. It doesn't mind using slightly out of date data. During a recheck, it will be reading the + * oldified data + *) + | Handle_parallelizable of + (profiling:Profiling_js.running -> + env:ServerEnv.env -> + (ServerProt.Response.response * Hh_json.json option) Lwt.t) + (* A command is nonparallelizable if it can't be handled immediately or parallelized. *) + | Handle_nonparallelizable of + (profiling:Profiling_js.running -> + env:ServerEnv.env -> + (ServerEnv.env * ServerProt.Response.response * Hh_json.json option) Lwt.t) + +(* This command is parallelizable, but we will treat it as nonparallelizable if we've been told + * to wait_for_recheck by the .flowconfig or CLI *) +let mk_parallelizable ~wait_for_recheck ~options f = + let wait_for_recheck = + Option.value wait_for_recheck ~default:(Options.wait_for_recheck options) + in + if wait_for_recheck then + Handle_nonparallelizable + (fun ~profiling ~env -> + let%lwt (response, json_data) = f ~profiling ~env in + Lwt.return (env, response, json_data)) + else + Handle_parallelizable f + +(* This function is called as soon as we read an ephemeral command from the pipe. It decides whether + * the command should be handled immediately or deferred as parallelizable or nonparallelizable. + * This function does NOT run any handling code itself. *) +let get_ephemeral_handler genv command = + let options = genv.options in + let reader = State_reader.create () in + match command with + | ServerProt.Request.AUTOCOMPLETE { trigger_character; input; wait_for_recheck } -> + mk_parallelizable + ~wait_for_recheck + ~options + (handle_autocomplete ~trigger_character ~reader ~options ~input) + | ServerProt.Request.AUTOFIX_EXPORTS { input; verbose; wait_for_recheck } -> + let options = { options with Options.opt_verbose = verbose } in + mk_parallelizable ~wait_for_recheck ~options (handle_autofix_exports ~input ~options) + | ServerProt.Request.CHECK_FILE { input; verbose; force; include_warnings; wait_for_recheck } -> + let options = + { + options with + Options.opt_verbose = verbose; + opt_include_warnings = options.Options.opt_include_warnings || include_warnings; + } + in + mk_parallelizable ~wait_for_recheck ~options (handle_check_file ~options ~force ~input) + | ServerProt.Request.COVERAGE { input; force; wait_for_recheck; trust } -> + mk_parallelizable ~wait_for_recheck ~options (handle_coverage ~options ~force ~trust ~input) + | ServerProt.Request.BATCH_COVERAGE { batch; wait_for_recheck; trust } -> + mk_parallelizable ~wait_for_recheck ~options (handle_batch_coverage ~options ~trust ~batch) + | ServerProt.Request.CYCLE { filename; types_only } -> + (* The user preference is to make this wait for up-to-date data *) + let file_options = Options.file_options options in + let fn = Files.filename_from_string ~options:file_options filename in + Handle_nonparallelizable (handle_cycle ~fn ~types_only) + | ServerProt.Request.DUMP_TYPES { input; wait_for_recheck } -> + mk_parallelizable ~wait_for_recheck ~options (handle_dump_types ~options ~input) + | ServerProt.Request.FIND_MODULE { moduleref; filename; wait_for_recheck } -> + mk_parallelizable + ~wait_for_recheck + ~options + (handle_find_module ~options ~reader ~moduleref ~filename) + | ServerProt.Request.FIND_REFS { filename; line; char; global; multi_hop } -> + (* find-refs can take a while and may use MultiWorker. Furthermore, it may do a recheck and + * change env. Each of these 3 facts disqualifies find-refs from being parallelizable *) + Handle_nonparallelizable + (handle_find_refs ~reader ~genv ~filename ~line ~char ~global ~multi_hop) + | ServerProt.Request.FORCE_RECHECK { files; focus; profile } -> + Handle_immediately (handle_force_recheck ~files ~focus ~profile) + | ServerProt.Request.GET_DEF { filename; line; char; wait_for_recheck } -> + mk_parallelizable + ~wait_for_recheck + ~options + (handle_get_def ~reader ~options ~filename ~line ~char) + | ServerProt.Request.GET_IMPORTS { module_names; wait_for_recheck } -> + mk_parallelizable + ~wait_for_recheck + ~options + (handle_get_imports ~options ~reader ~module_names) + | ServerProt.Request.GRAPH_DEP_GRAPH { root; strip_root; outfile; types_only } -> + (* The user preference is to make this wait for up-to-date data *) + Handle_nonparallelizable (handle_graph_dep_graph ~root ~strip_root ~types_only ~outfile) + | ServerProt.Request.INFER_TYPE + { input; line; char; verbose; expand_aliases; omit_targ_defaults; wait_for_recheck } -> + mk_parallelizable + ~wait_for_recheck + ~options + (handle_infer_type ~options ~input ~line ~char ~verbose ~expand_aliases ~omit_targ_defaults) + | ServerProt.Request.RAGE { files } -> + mk_parallelizable ~wait_for_recheck:None ~options (handle_rage ~reader ~options ~files) + | ServerProt.Request.INSERT_TYPE + { + input; + target; + wait_for_recheck; + verbose; + expand_aliases; + omit_targ_defaults; + location_is_strict; + ambiguity_strategy; + } -> + mk_parallelizable + ~wait_for_recheck + ~options + (handle_insert_type + ~file_input:input + ~options + ~target + ~verbose + ~expand_aliases + ~omit_targ_defaults + ~location_is_strict + ~ambiguity_strategy) + | ServerProt.Request.REFACTOR { input; line; char; refactor_variant } -> + (* refactor delegates to find-refs, which is not parallelizable. Therefore refactor is also not + * parallelizable *) + Handle_nonparallelizable (handle_refactor ~reader ~genv ~input ~line ~char ~refactor_variant) + | ServerProt.Request.STATUS { client_root; include_warnings } -> + let genv = + { + genv with + options = + Options. + { + options with + opt_include_warnings = options.opt_include_warnings || include_warnings; + }; + } + in + (* `flow status` is often used by users to get all the current errors. After talking to some + * coworkers and users, glevi decided that users would rather that `flow status` always waits + * for the current recheck to finish. So even though we could technically make `flow status` + * parallelizable, we choose to make it nonparallelizable *) + Handle_nonparallelizable (handle_status ~reader ~genv ~client_root) + | ServerProt.Request.SUGGEST { input; wait_for_recheck } -> + mk_parallelizable ~wait_for_recheck ~options (handle_suggest ~options ~input) + | ServerProt.Request.SAVE_STATE { outfile } -> + (* save-state can take awhile to run. Furthermore, you probably don't want to run this with out + * of date data. So save-state is not parallelizable *) + Handle_nonparallelizable (handle_save_state ~saved_state_filename:outfile ~genv) + +(* This is the common code which wraps each command handler. It deals with stuff like logging and + * catching exceptions *) +let wrap_ephemeral_handler handler ~genv ~request_id ~client_context ~workload ~cmd_str arg = try%lwt - let%lwt ret, profiling, json_data = handler genv arg (request_id, command) in - FlowEventLogger.ephemeral_command_success - ?json_data - ~client_context:command.ServerProt.Request.client_logging_context - ~profiling; - Lwt.return ret - with exn -> - let backtrace = String.trim (Printexc.get_backtrace ()) in - let exn_str = Printf.sprintf - "%s%s%s" - (Printexc.to_string exn) - (if backtrace = "" then "" else "\n") - backtrace in - Hh_logger.error - "Uncaught exception while handling a request (%s): %s" - (ServerProt.Request.to_string command.ServerProt.Request.command) - exn_str; + Hh_logger.info "Request: %s" cmd_str; + MonitorRPC.status_update ~event:ServerStatus.Handling_request_start; + + let%lwt (ret, profiling, json_data) = handler ~genv ~request_id ~workload arg in + let event = + ServerStatus.( + Finishing_up + { + duration = Profiling_js.get_profiling_duration profiling; + info = CommandSummary cmd_str; + }) + in + MonitorRPC.status_update ~event; + FlowEventLogger.ephemeral_command_success ?json_data ~client_context ~profiling; + Lwt.return (Ok ret) + with + | Lwt.Canceled as exn -> + let exn = Exception.wrap exn in + Exception.reraise exn + | exn -> + let exn = Exception.wrap exn in + let exn_str = Exception.to_string exn in + Hh_logger.error ~exn "Uncaught exception while handling a request (%s)" cmd_str; FlowEventLogger.ephemeral_command_failure - ~client_context:command.ServerProt.Request.client_logging_context - ~json_data:(Hh_json.JSON_Object [ "exn", Hh_json.JSON_String exn_str ]); + ~client_context + ~json_data:(Hh_json.JSON_Object [("exn", Hh_json.JSON_String exn_str)]); MonitorRPC.request_failed ~request_id ~exn_str; - Lwt.return arg -let handle_ephemeral_deferred = wrap_ephemeral_handler handle_ephemeral_deferred_unsafe - -let should_handle_immediately { ServerProt.Request.client_logging_context=_; command; } = - match command with - | ServerProt.Request.FORCE_RECHECK _ -> - true - - | ServerProt.Request.AUTOCOMPLETE _ - | ServerProt.Request.CHECK_FILE _ - | ServerProt.Request.COVERAGE _ - | ServerProt.Request.CYCLE _ - | ServerProt.Request.DUMP_TYPES _ - | ServerProt.Request.FIND_MODULE _ - | ServerProt.Request.FIND_REFS _ - | ServerProt.Request.GEN_FLOW_FILES _ - | ServerProt.Request.GET_DEF _ - | ServerProt.Request.GET_IMPORTS _ - | ServerProt.Request.INFER_TYPE _ - | ServerProt.Request.PORT _ - | ServerProt.Request.REFACTOR _ - | ServerProt.Request.STATUS _ - | ServerProt.Request.SUGGEST _ - | ServerProt.Request.SAVE_STATE _ -> - false + Lwt.return (Error ()) (* A few commands need to be handled immediately, as soon as they arrive from the monitor. An * `env` is NOT available, since we don't have the server's full attention *) -let handle_ephemeral_immediately_unsafe - genv () (request_id, { ServerProt.Request.client_logging_context=_; command; }) = - let respond msg = - MonitorRPC.respond_to_request ~request_id ~response:msg - - in - Hh_logger.debug "Request: %s" (ServerProt.Request.to_string command); - MonitorRPC.status_update ~event:ServerStatus.Handling_request_start; +let handle_ephemeral_immediately_unsafe ~genv ~request_id ~workload () = let should_print_summary = Options.should_profile genv.options in - let%lwt profiling, json_data = - Profiling_js.with_profiling_lwt ~label:"Command" ~should_print_summary begin fun profiling -> - match command with - | ServerProt.Request.FORCE_RECHECK { files; focus; profile; } -> - let fileset = SSet.of_list files in - let push = ServerMonitorListenerState.( - if focus then push_files_to_focus else push_files_to_recheck - ) in - - if profile - then begin - let wait_for_recheck_thread, wakener = Lwt.task () in - push ~callback:(fun profiling -> Lwt.wakeup wakener profiling) fileset; - let%lwt recheck_profiling = wait_for_recheck_thread in - respond (ServerProt.Response.FORCE_RECHECK recheck_profiling); - Option.iter recheck_profiling ~f:(fun recheck_profiling -> - Profiling_js.merge ~from:recheck_profiling ~into:profiling - ); - Lwt.return None - end else begin - (* If we're not profiling the recheck, then respond immediately *) - respond (ServerProt.Response.FORCE_RECHECK None); - push fileset; - Lwt.return None - end - | ServerProt.Request.AUTOCOMPLETE _ - | ServerProt.Request.CHECK_FILE _ - | ServerProt.Request.COVERAGE _ - | ServerProt.Request.CYCLE _ - | ServerProt.Request.DUMP_TYPES _ - | ServerProt.Request.FIND_MODULE _ - | ServerProt.Request.FIND_REFS _ - | ServerProt.Request.GEN_FLOW_FILES _ - | ServerProt.Request.GET_DEF _ - | ServerProt.Request.GET_IMPORTS _ - | ServerProt.Request.INFER_TYPE _ - | ServerProt.Request.PORT _ - | ServerProt.Request.REFACTOR _ - | ServerProt.Request.STATUS _ - | ServerProt.Request.SUGGEST _ - | ServerProt.Request.SAVE_STATE _ -> - failwith (spf "Command %s must be deferred" (ServerProt.Request.to_string command)) - end + let%lwt (profiling, (response, json_data)) = + Profiling_js.with_profiling_lwt ~label:"Command" ~should_print_summary (fun profiling -> + workload ~profiling) in - let event = ServerStatus.(Finishing_up { - duration = Profiling_js.get_profiling_duration profiling; - info = CommandSummary (ServerProt.Request.to_string command)}) in - MonitorRPC.status_update ~event; + MonitorRPC.respond_to_request ~request_id ~response; + Lwt.return ((), profiling, json_data) let handle_ephemeral_immediately = wrap_ephemeral_handler handle_ephemeral_immediately_unsafe -let enqueue_or_handle_ephemeral genv (request_id, command) = - if should_handle_immediately command - then handle_ephemeral_immediately genv () (request_id, command) - else begin - ServerMonitorListenerState.push_new_workload - (fun env -> handle_ephemeral_deferred genv env (request_id, command)); +(* If command running in serial (i.e. not in parallel with a recheck) is canceled, it kicks off a + * recheck itself and then reruns itself + * + * While parallelizable commands can be run out of order (some might get deferred), + * nonparallelizable commands always run in order. So that's why we don't defer commands here. + * + * Since this might run a recheck, `workload ~profiling ~env` MUST return the new env. + *) +let rec run_command_in_serial ~genv ~env ~profiling ~workload = + try%lwt workload ~profiling ~env + with Lwt.Canceled -> + Hh_logger.info "Command successfully canceled. Running a recheck before restarting the command"; + let%lwt (recheck_profiling, env) = Rechecker.recheck_loop genv env in + List.iter (fun from -> Profiling_js.merge ~into:profiling ~from) recheck_profiling; + Hh_logger.info "Now restarting the command"; + run_command_in_serial ~genv ~env ~profiling ~workload + +(* A command that is running in parallel with a recheck, if canceled, can't just run a recheck + * itself. It needs to defer itself until later. *) +let run_command_in_parallel ~env ~profiling ~workload ~mk_workload = + try%lwt workload ~profiling ~env + with Lwt.Canceled as exn -> + let exn = Exception.wrap exn in + Hh_logger.info + "Command successfully canceled. Requeuing the command for after the next recheck."; + ServerMonitorListenerState.defer_parallelizable_workload (mk_workload ()); + Exception.reraise exn + +let rec handle_parallelizable_ephemeral_unsafe + ~client_context ~cmd_str ~genv ~request_id ~workload env = + let should_print_summary = Options.should_profile genv.options in + let%lwt (profiling, json_data) = + Profiling_js.with_profiling_lwt ~label:"Command" ~should_print_summary (fun profiling -> + let%lwt (response, json_data) = + let mk_workload () = + handle_parallelizable_ephemeral ~genv ~request_id ~client_context ~workload ~cmd_str + in + run_command_in_parallel ~env ~profiling ~workload ~mk_workload + in + MonitorRPC.respond_to_request ~request_id ~response; + + (* It sucks this has to live here. We need a better way to handle post-send logic + * TODO - Do we actually need this error? Why do we even send the path? *) + ServerProt.Response.( + match response with + | STATUS { lazy_stats = _; status_response = DIRECTORY_MISMATCH { server; client } } -> + Hh_logger.fatal "Status: Error"; + Hh_logger.fatal + "server_dir=%s, client_dir=%s" + (Path.to_string server) + (Path.to_string client); + Hh_logger.fatal "flow server is not listening to the same directory. Exiting."; + FlowExitStatus.(exit Server_client_directory_mismatch) + | _ -> ()); + Lwt.return json_data) + in + Lwt.return ((), profiling, json_data) + +and handle_parallelizable_ephemeral ~genv ~request_id ~client_context ~workload ~cmd_str env = + try%lwt + let handler = handle_parallelizable_ephemeral_unsafe ~client_context ~cmd_str in + let%lwt result = + wrap_ephemeral_handler handler ~genv ~request_id ~client_context ~workload ~cmd_str env + in + match result with + | Ok () + | Error () -> + Lwt.return_unit + with Lwt.Canceled -> + (* It's fine for parallelizable commands to be canceled - they'll be run again later *) + Lwt.return_unit + +let handle_nonparallelizable_ephemeral_unsafe ~genv ~request_id ~workload env = + let should_print_summary = Options.should_profile genv.options in + let%lwt (profiling, (env, json_data)) = + Profiling_js.with_profiling_lwt ~label:"Command" ~should_print_summary (fun profiling -> + let%lwt (env, response, json_data) = + run_command_in_serial ~genv ~env ~profiling ~workload + in + MonitorRPC.respond_to_request ~request_id ~response; + + Lwt.return (env, json_data)) + in + Lwt.return (env, profiling, json_data) + +let handle_nonparallelizable_ephemeral ~genv ~request_id ~client_context ~workload ~cmd_str env = + let%lwt result = + wrap_ephemeral_handler + handle_nonparallelizable_ephemeral_unsafe + ~genv + ~request_id + ~client_context + ~workload + ~cmd_str + env + in + match result with + | Ok env -> Lwt.return env + | Error () -> Lwt.return env + +let enqueue_or_handle_ephemeral genv (request_id, command_with_context) = + let { ServerProt.Request.client_logging_context = client_context; command } = + command_with_context + in + let cmd_str = ServerProt.Request.to_string command in + match get_ephemeral_handler genv command with + | Handle_immediately workload -> + let%lwt result = + handle_ephemeral_immediately ~genv ~request_id ~client_context ~workload ~cmd_str () + in + (match result with + | Ok () + | Error () -> + Lwt.return_unit) + | Handle_parallelizable workload -> + let workload = + handle_parallelizable_ephemeral ~genv ~request_id ~client_context ~workload ~cmd_str + in + ServerMonitorListenerState.push_new_parallelizable_workload workload; + Lwt.return_unit + | Handle_nonparallelizable workload -> + let workload = + handle_nonparallelizable_ephemeral ~genv ~request_id ~client_context ~workload ~cmd_str + in + ServerMonitorListenerState.push_new_workload workload; Lwt.return_unit - end -let did_open genv env client (files: (string*string) Nel.t) : ServerEnv.env Lwt.t = +let did_open ~reader genv env client (files : (string * string) Nel.t) : ServerEnv.env Lwt.t = let options = genv.ServerEnv.options in - begin match Persistent_connection.client_did_open env.connections client ~files with - | None -> Lwt.return env (* No new files were opened, so do nothing *) - | Some (connections, client) -> - let env = {env with connections} in - - match Options.lazy_mode options with - | Some Options.LAZY_MODE_IDE -> - (* LAZY_MODE_IDE is a lazy mode which infers the focused files based on what the IDE - * opens. So when an IDE opens a new file, that file is now focused. - * - * If the newly opened file was previously unchecked or checked as a dependency, then - * we will do a new recheck. - * - * If the newly opened file was already checked, then we'll just send the errors to - * the client - *) - let filenames = Nel.map (fun (fn, _content) -> fn) files in - let%lwt env, triggered_recheck = Lazy_mode_utils.focus_and_check genv env filenames in - if not triggered_recheck then begin - (* This open doesn't trigger a recheck, but we'll still send down the errors *) - let errors, warnings, _ = ErrorCollator.get_with_separate_warnings env in - Persistent_connection.send_errors_if_subscribed ~client ~errors ~warnings - end; - Lwt.return env - | Some Options.LAZY_MODE_FILESYSTEM - | None -> - (* In filesystem lazy mode or in non-lazy mode, the only thing we need to do when - * a new file is opened is to send the errors to the client *) - let errors, warnings, _ = ErrorCollator.get_with_separate_warnings env in - Persistent_connection.send_errors_if_subscribed ~client ~errors ~warnings; - Lwt.return env - end + match Options.lazy_mode options with + | Options.LAZY_MODE_IDE -> + (* LAZY_MODE_IDE is a lazy mode which infers the focused files based on what the IDE + * opens. So when an IDE opens a new file, that file is now focused. + * + * If the newly opened file was previously unchecked or checked as a dependency, then + * we will do a new recheck. + * + * If the newly opened file was already checked, then we'll just send the errors to + * the client + *) + let filenames = Nel.map (fun (fn, _content) -> fn) files in + let%lwt (env, triggered_recheck) = Lazy_mode_utils.focus_and_check genv env filenames in + ( if not triggered_recheck then + (* This open doesn't trigger a recheck, but we'll still send down the errors *) + let (errors, warnings, _) = ErrorCollator.get_with_separate_warnings ~reader ~options env in + Persistent_connection.send_errors_if_subscribed + ~client + ~errors_reason:LspProt.Env_change + ~errors + ~warnings ); + Lwt.return env + | Options.LAZY_MODE_FILESYSTEM + | Options.LAZY_MODE_WATCHMAN + | Options.NON_LAZY_MODE -> + (* In filesystem lazy mode or in non-lazy mode, the only thing we need to do when + * a new file is opened is to send the errors to the client *) + let (errors, warnings, _) = ErrorCollator.get_with_separate_warnings ~reader ~options env in + Persistent_connection.send_errors_if_subscribed + ~client + ~errors_reason:LspProt.Env_change + ~errors + ~warnings; + Lwt.return env -let did_close _genv env client (filenames: string Nel.t) : ServerEnv.env Lwt.t = - begin match Persistent_connection.client_did_close env.connections client ~filenames with - | None -> Lwt.return env (* No new files were closed, so do nothing *) - | Some (connections, client) -> - let errors, warnings, _ = ErrorCollator.get_with_separate_warnings env in - Persistent_connection.send_errors_if_subscribed ~client ~errors ~warnings; - Lwt.return {env with connections} - end - - -let with_error - ?(stack: Utils.callstack option) - ~(reason: string) - (metadata: Persistent_connection_prot.metadata) - : Persistent_connection_prot.metadata = - let open Persistent_connection_prot in - let local_stack = Printexc.get_callstack 100 |> Printexc.raw_backtrace_to_string in - let stack = Option.value stack ~default:(Utils.Callstack local_stack) in - let error_info = Some (ExpectedError, reason, stack) in - { metadata with error_info } - -let keyvals_of_json (json: Hh_json.json option) : (string * Hh_json.json) list = +let did_close ~reader genv env client : ServerEnv.env Lwt.t = + let options = genv.options in + let (errors, warnings, _) = ErrorCollator.get_with_separate_warnings ~reader ~options env in + Persistent_connection.send_errors_if_subscribed + ~client + ~errors_reason:LspProt.Env_change + ~errors + ~warnings; + Lwt.return env + +let with_error ?(stack : Utils.callstack option) ~(reason : string) (metadata : LspProt.metadata) : + LspProt.metadata = + LspProt.( + let local_stack = Exception.get_current_callstack_string 100 in + let stack = Option.value stack ~default:(Utils.Callstack local_stack) in + let error_info = Some (ExpectedError, reason, stack) in + { metadata with error_info }) + +let keyvals_of_json (json : Hh_json.json option) : (string * Hh_json.json) list = match json with | None -> [] | Some (Hh_json.JSON_Object keyvals) -> keyvals - | Some json -> ["json_data", json] - -let with_data - ~(extra_data: Hh_json.json option) - (metadata: Persistent_connection_prot.metadata) - : Persistent_connection_prot.metadata = - let open Persistent_connection_prot in - let extra_data = metadata.extra_data @ (keyvals_of_json extra_data) + | Some json -> [("json_data", json)] + +let with_data ~(extra_data : Hh_json.json option) (metadata : LspProt.metadata) : LspProt.metadata + = + LspProt.( + let extra_data = metadata.extra_data @ keyvals_of_json extra_data in + { metadata with extra_data }) + +type 'a persistent_handling_result = + ('a * LspProt.response * LspProt.metadata, 'a * LspProt.metadata) result + +let handle_persistent_canceled ~ret ~id ~metadata ~client:_ ~profiling:_ = + let e = Lsp_fmt.error_of_exn (Error.RequestCancelled "cancelled") in + let response = ResponseMessage (id, ErrorResult (e, "")) in + let metadata = with_error metadata ~reason:"cancelled" in + Lwt.return (Ok (ret, LspProt.LspFromServer (Some response), metadata)) + +let handle_persistent_subscribe ~reader ~options ~metadata ~client ~profiling:_ ~env = + let (current_errors, current_warnings, _) = + ErrorCollator.get_with_separate_warnings ~reader ~options env in - { metadata with extra_data } - -type persistent_handling_result = - (** IdeResponse means that handle_persistent_unsafe is responsible for sending - the message to the client, and handle_persistent is responsible for logging. *) - | IdeResponse of ( - ServerEnv.env * Hh_json.json option, - ServerEnv.env * Persistent_connection_prot.error_info - ) result - (** LspResponse means that handle_persistent is responsible for sending the - message (if needed) to the client, and lspCommand is responsible for logging. *) - | LspResponse of ( - ServerEnv.env * Lsp.lsp_message option * Persistent_connection_prot.metadata, - ServerEnv.env * Persistent_connection_prot.metadata - ) result - - -(** handle_persistent_unsafe: - either this method returns Ok (and optionally returns some logging data), - or it returns Error for some well-understood reason string, - or it might raise/Lwt.fail, indicating a misunderstood coding bug. *) -let handle_persistent_unsafe genv env client profiling msg : persistent_handling_result Lwt.t = - let open Persistent_connection_prot in - let options = genv.ServerEnv.options in - let workers = genv.ServerEnv.workers in - - match msg with - | LspToServer (RequestMessage (id, _), metadata) - when IdSet.mem id !(ServerMonitorListenerState.cancellation_requests) -> - let e = Lsp_fmt.error_of_exn (Error.RequestCancelled "cancelled") in - let response = ResponseMessage (id, ErrorResult (e, "")) in - Lwt.return (LspResponse (Ok (env, Some response, metadata))) - - | Subscribe -> - let current_errors, current_warnings, _ = ErrorCollator.get_with_separate_warnings env in - let new_connections = Persistent_connection.subscribe_client - ~clients:env.connections ~client ~current_errors ~current_warnings - in - Lwt.return (IdeResponse (Ok ({ env with connections = new_connections }, None))) - - | Autocomplete (file_input, id) -> - let env = ref env in - let%lwt results, json_data = autocomplete ~options ~workers ~env ~profiling file_input in - let wrapped = AutocompleteResult (results, id) in - Persistent_connection.send_message wrapped client; - Lwt.return (IdeResponse (Ok (!env, json_data))) - - | DidOpen filenames -> - Persistent_connection.send_message Persistent_connection_prot.DidOpenAck client; - let files = Nel.map (fun fn -> (fn, "%%Legacy IDE has no content")) filenames in - let%lwt env = did_open genv env client files in - Lwt.return (IdeResponse (Ok (env, None))) - - | LspToServer (NotificationMessage (DidOpenNotification params), metadata) -> - let open Lsp.DidOpen in - let open TextDocumentItem in - let content = params.textDocument.text in - let fn = params.textDocument.uri |> Lsp_helpers.lsp_uri_to_path in - let%lwt env = did_open genv env client (Nel.one (fn, content)) in - Lwt.return (LspResponse (Ok (env, None, metadata))) - - | LspToServer (NotificationMessage (DidChangeNotification params), metadata) -> - let open Lsp.DidChange in - let open VersionedTextDocumentIdentifier in - let open Persistent_connection in - let fn = params.textDocument.uri |> Lsp_helpers.lsp_uri_to_path in - begin match client_did_change env.connections client fn params.contentChanges with - | Ok (connections, _client) -> - Lwt.return (LspResponse (Ok ({ env with connections; }, None, metadata))) - | Error (reason, stack) -> - Lwt.return (LspResponse (Error (env, with_error metadata ~reason ~stack))) - end + Persistent_connection.subscribe_client ~client ~current_errors ~current_warnings; + Lwt.return (Ok (env, LspProt.LspFromServer None, metadata)) - | LspToServer (NotificationMessage (DidSaveNotification _params), metadata) -> - Lwt.return (LspResponse (Ok (env, None, metadata))) - - | Persistent_connection_prot.DidClose filenames -> - Persistent_connection.send_message Persistent_connection_prot.DidCloseAck client; - let%lwt env = did_close genv env client filenames in - Lwt.return (IdeResponse (Ok (env, None))) - - | LspToServer (NotificationMessage (DidCloseNotification params), metadata) -> - let open Lsp.DidClose in - let open TextDocumentIdentifier in - let fn = params.textDocument.uri |> Lsp_helpers.lsp_uri_to_path in - let%lwt env = did_close genv env client (Nel.one fn) in - Lwt.return (LspResponse (Ok (env, None, metadata))) - - | LspToServer (NotificationMessage (CancelRequestNotification params), metadata) -> - let id = params.CancelRequest.id in - (* by the time this cancel request shows up in the queue, then it must already *) - (* have had its effect if any on messages earlier in the queue, and so can be *) - (* removed. *) - ServerMonitorListenerState.(cancellation_requests := IdSet.remove id !cancellation_requests); - Lwt.return (LspResponse (Ok (env, None, metadata))) - - | LspToServer (RequestMessage (id, DefinitionRequest params), metadata) -> - let env = ref env in - let open TextDocumentPositionParams in - let (file, line, char) = Flow_lsp_conversions.lsp_DocumentPosition_to_flow params ~client in - let%lwt (result, extra_data) = - get_def ~options ~workers ~env ~profiling (file, line, char) in +(* A did_open notification can come in about N files, which is great. But sometimes we'll get + * N did_open notifications in quick succession. Let's batch them up and run them all at once! + *) +let (enqueue_did_open_files, handle_persistent_did_open_notification) = + let pending = ref SMap.empty in + let enqueue_did_open_files (files : (string * string) Nel.t) = + (* Overwrite the older content with the newer content *) + pending := Nel.fold_left (fun acc (fn, content) -> SMap.add fn content acc) !pending files + in + let get_and_clear_did_open_files () : (string * string) list = + let ret = SMap.elements !pending in + pending := SMap.empty; + ret + in + let handle_persistent_did_open_notification ~reader ~genv ~metadata ~client ~profiling:_ ~env = + let%lwt env = + match get_and_clear_did_open_files () with + | [] -> Lwt.return env + | first :: rest -> did_open ~reader genv env client (first, rest) + in + Lwt.return (Ok (env, LspProt.LspFromServer None, metadata)) + in + (enqueue_did_open_files, handle_persistent_did_open_notification) + +let handle_persistent_did_open_notification_no_op ~metadata ~client:_ ~profiling:_ = + Lwt.return (Ok ((), LspProt.LspFromServer None, metadata)) + +let handle_persistent_did_change_notification ~params ~metadata ~client ~profiling:_ = + Lsp.DidChange.( + VersionedTextDocumentIdentifier.( + Persistent_connection.( + let fn = params.textDocument.uri |> Lsp_helpers.lsp_uri_to_path in + match client_did_change client fn params.contentChanges with + | Ok () -> Lwt.return (Ok ((), LspProt.LspFromServer None, metadata)) + | Error (reason, stack) -> Lwt.return (Error ((), with_error metadata ~reason ~stack))))) + +let handle_persistent_did_save_notification ~metadata ~client:_ ~profiling:_ = + Lwt.return (Ok ((), LspProt.LspFromServer None, metadata)) + +let handle_persistent_did_close_notification ~reader ~genv ~metadata ~client ~profiling:_ ~env = + let%lwt env = did_close ~reader genv env client in + Lwt.return (Ok (env, LspProt.LspFromServer None, metadata)) + +let handle_persistent_did_close_notification_no_op ~metadata ~client:_ ~profiling:_ = + Lwt.return (Ok ((), LspProt.LspFromServer None, metadata)) + +let handle_persistent_cancel_notification ~params ~metadata ~client:_ ~profiling:_ ~env = + let id = params.CancelRequest.id in + (* by the time this cancel request shows up in the queue, then it must already *) + (* have had its effect if any on messages earlier in the queue, and so can be *) + (* removed. *) + ServerMonitorListenerState.(cancellation_requests := IdSet.remove id !cancellation_requests); + Lwt.return (Ok (env, LspProt.LspFromServer None, metadata)) + +let handle_persistent_get_def ~reader ~options ~id ~params ~loc ~metadata ~client ~profiling ~env = + TextDocumentPositionParams.( + let (file, line, char) = + match loc with + | Some loc -> loc + | None -> + (* We must have failed to get the client when we first tried. We could throw here, but this is + * a little more defensive. The only danger here is that the file contents may have changed *) + Flow_lsp_conversions.lsp_DocumentPosition_to_flow params client + in + let%lwt (result, extra_data) = get_def ~options ~reader ~env ~profiling (file, line, char) in let metadata = with_data ~extra_data metadata in - begin match result with - | Ok loc when loc = Loc.none -> - let response = ResponseMessage (id, DefinitionResult []) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | Ok loc -> - let default_uri = params.textDocument.TextDocumentIdentifier.uri in - let location = Flow_lsp_conversions.loc_to_lsp_with_default ~default_uri loc in - let definition_location = { Lsp.DefinitionLocation.location; title = None } in - let response = ResponseMessage (id, DefinitionResult [definition_location]) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | Error reason -> - Lwt.return (LspResponse (Error (!env, with_error metadata ~reason))) - end - - | LspToServer (RequestMessage (id, HoverRequest params), metadata) -> - let open TextDocumentPositionParams in - let env = ref env in - let (file, line, char) = Flow_lsp_conversions.lsp_DocumentPosition_to_flow params ~client in - let verbose = None in (* if Some, would write to server logs *) - let%lwt result, extra_data = - infer_type ~options ~workers ~env ~profiling (file, line, char, verbose, false) + match result with + | Ok loc when loc = Loc.none -> + let response = ResponseMessage (id, DefinitionResult []) in + Lwt.return (Ok ((), LspProt.LspFromServer (Some response), metadata)) + | Ok loc -> + let default_uri = params.textDocument.TextDocumentIdentifier.uri in + let location = Flow_lsp_conversions.loc_to_lsp_with_default ~default_uri loc in + let definition_location = { Lsp.DefinitionLocation.location; title = None } in + let response = ResponseMessage (id, DefinitionResult [definition_location]) in + Lwt.return (Ok ((), LspProt.LspFromServer (Some response), metadata)) + | Error reason -> Lwt.return (Error ((), with_error metadata ~reason))) + +let handle_persistent_infer_type ~options ~id ~params ~loc ~metadata ~client ~profiling ~env = + TextDocumentPositionParams.( + let (file, line, char) = + match loc with + | Some loc -> loc + | None -> + (* We must have failed to get the client when we first tried. We could throw here, but this is + * a little more defensive. The only danger here is that the file contents may have changed *) + Flow_lsp_conversions.lsp_DocumentPosition_to_flow params client + in + let verbose = None in + (* if Some, would write to server logs *) + let%lwt (result, extra_data) = + infer_type ~options ~env ~profiling (file, line, char, verbose, false, false) in let metadata = with_data ~extra_data metadata in - begin match result with - | Ok (loc, content) -> - (* loc may be the 'none' location; content may be None. *) - (* If both are none then we'll return null; otherwise we'll return a hover *) - let default_uri = params.textDocument.TextDocumentIdentifier.uri in - let location = Flow_lsp_conversions.loc_to_lsp_with_default ~default_uri loc in - let range = if loc = Loc.none then None else Some location.Lsp.Location.range in - let contents = match content with - | None -> [MarkedString "?"] - | Some content -> [MarkedCode ("flow", Ty_printer.string_of_t content)] in - let r = match range, content with - | None, None -> None - | _, _ -> Some {Lsp.Hover.contents; range;} in - let response = ResponseMessage (id, HoverResult r) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | Error reason -> - Lwt.return (LspResponse (Error (!env, with_error metadata ~reason))) - end + match result with + | Ok (loc, content) -> + (* loc may be the 'none' location; content may be None. *) + (* If both are none then we'll return null; otherwise we'll return a hover *) + let default_uri = params.textDocument.TextDocumentIdentifier.uri in + let location = Flow_lsp_conversions.loc_to_lsp_with_default ~default_uri loc in + let range = + if loc = Loc.none then + None + else + Some location.Lsp.Location.range + in + let contents = + match content with + | None -> [MarkedString "?"] + | Some content -> [MarkedCode ("flow", Ty_printer.string_of_t content)] + in + let r = + match (range, content) with + | (None, None) -> None + | (_, _) -> Some { Lsp.Hover.contents; range } + in + let response = ResponseMessage (id, HoverResult r) in + Lwt.return (Ok ((), LspProt.LspFromServer (Some response), metadata)) + | Error reason -> Lwt.return (Error ((), with_error metadata ~reason))) - | LspToServer (RequestMessage (id, CompletionRequest params), metadata) -> - let env = ref env in - let open Completion in - let (file, line, char) = Flow_lsp_conversions.lsp_DocumentPosition_to_flow params.loc ~client in - let fn_content = match file with - | File_input.FileContent (fn, content) -> - Ok (fn, content) +let handle_persistent_code_action_request ~options ~id ~params ~metadata ~client ~profiling ~env = + let%lwt result = find_code_actions ~options ~profiling ~env ~client ~params in + let response = + match result with + | Ok code_actions -> + Ok + ( (), + LspProt.LspFromServer (Some (ResponseMessage (id, CodeActionResult code_actions))), + metadata ) + | Error reason -> Error ((), with_error metadata ~reason) + in + Lwt.return response + +let handle_persistent_autocomplete_lsp + ~reader ~options ~id ~params ~loc ~metadata ~client ~profiling ~env = + let is_snippet_supported = Persistent_connection.client_snippet_support client in + Completion.( + let (file, line, char) = + match loc with + | Some loc -> loc + | None -> + (* We must have failed to get the client when we first tried. We could throw here, but this is + * a little more defensive. The only danger here is that the file contents may have changed *) + Flow_lsp_conversions.lsp_DocumentPosition_to_flow params.loc client + in + let trigger_character = + Option.value_map + ~f:(fun completionContext -> completionContext.triggerCharacter) + ~default:None + params.context + in + let fn_content = + match file with + | File_input.FileContent (fn, content) -> Ok (fn, content) | File_input.FileName fn -> - try - Ok (Some fn, Sys_utils.cat fn) - with e -> - let stack = Printexc.get_backtrace () in - Error (Printexc.to_string e, Utils.Callstack stack) + (try Ok (Some fn, Sys_utils.cat fn) + with e -> + let e = Exception.wrap e in + Error (Exception.get_ctor_string e, Utils.Callstack (Exception.get_backtrace_string e))) in - begin match fn_content with - | Error (reason, stack) -> - Lwt.return (LspResponse (Error (!env, with_error metadata ~reason ~stack))) - | Ok (fn, content) -> - let content_with_token = AutocompleteService_js.add_autocomplete_token content line char in - let file_with_token = File_input.FileContent (fn, content_with_token) in - let%lwt result, extra_data = - autocomplete ~options ~workers ~env ~profiling file_with_token - in - let metadata = with_data ~extra_data metadata in - begin match result with - | Ok items -> - let items = List.map Flow_lsp_conversions.flow_completion_to_lsp items in - let r = CompletionResult { Lsp.Completion.isIncomplete = false; items; } in - let response = ResponseMessage (id, r) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | Error reason -> - Lwt.return (LspResponse (Error (!env, with_error metadata ~reason))) - end - end - - | LspToServer (RequestMessage (id, DocumentHighlightRequest params), metadata) -> - let env = ref env in - let (file, line, char) = Flow_lsp_conversions.lsp_DocumentPosition_to_flow params ~client in - let global, multi_hop = false, false in (* multi_hop implies global *) - let%lwt result, extra_data = - find_refs ~genv ~env ~profiling (file, line, char, global, multi_hop) + match fn_content with + | Error (reason, stack) -> Lwt.return (Error ((), with_error metadata ~reason ~stack)) + | Ok (fn, content) -> + let content_with_token = AutocompleteService_js.add_autocomplete_token content line char in + let file_with_token = File_input.FileContent (fn, content_with_token) in + let%lwt (result, extra_data) = + autocomplete ~trigger_character ~reader ~options ~env ~profiling file_with_token + in + let metadata = with_data ~extra_data metadata in + begin + match result with + | Ok items -> + let items = + Core_list.map + ~f:(Flow_lsp_conversions.flow_completion_to_lsp is_snippet_supported) + items + in + let r = CompletionResult { Lsp.Completion.isIncomplete = false; items } in + let response = ResponseMessage (id, r) in + Lwt.return (Ok ((), LspProt.LspFromServer (Some response), metadata)) + | Error reason -> Lwt.return (Error ((), with_error metadata ~reason)) + end) + +let handle_persistent_document_highlight + ~reader ~options ~id ~params ~metadata ~client ~profiling ~env = + let (file, line, char) = Flow_lsp_conversions.lsp_DocumentPosition_to_flow params ~client in + let%lwt result = find_local_refs ~reader ~options ~env ~profiling (file, line, char) in + let extra_data = + Some + (Hh_json.JSON_Object + [ + ( "result", + Hh_json.JSON_String + (match result with + | Ok _ -> "SUCCESS" + | _ -> "FAILURE") ); + ]) + in + let metadata = with_data ~extra_data metadata in + match result with + | Ok (Some (_name, locs)) -> + (* All the locs are implicitly in the same file *) + let loc_to_highlight loc = + { + DocumentHighlight.range = Flow_lsp_conversions.loc_to_lsp_range loc; + kind = Some DocumentHighlight.Text; + } in - let metadata = with_data ~extra_data metadata in - begin match result with - | Ok (Some (_name, locs)) -> - (* All the locs are implicitly in the same file, because global=false. *) - let loc_to_highlight loc = { DocumentHighlight. - range = Flow_lsp_conversions.loc_to_lsp_range loc; - kind = Some DocumentHighlight.Text; - } in - let r = DocumentHighlightResult (List.map loc_to_highlight locs) in - let response = ResponseMessage (id, r) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | Ok (None) -> - (* e.g. if it was requested on a place that's not even an identifier *) - let r = DocumentHighlightResult [] in - let response = ResponseMessage (id, r) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | Error reason -> - Lwt.return (LspResponse (Error (!env, with_error metadata ~reason))) - end - - | LspToServer (RequestMessage (id, TypeCoverageRequest params), metadata) -> - let env = ref env in - let textDocument = params.TypeCoverage.textDocument in - let file = Flow_lsp_conversions.lsp_DocumentIdentifier_to_flow ~client textDocument in - (* if it isn't a flow file (i.e. lacks a @flow directive) then we won't do anything *) - let fkey = File_key.SourceFile (File_input.filename_of_file_input file) in - let content = File_input.content_of_file_input file in - let is_flow = match content with - | Ok content -> - let (_, docblock) = Parsing_service_js.(parse_docblock docblock_max_tokens fkey content) in - Docblock.is_flow docblock - | Error _ -> false in - let%lwt result = if is_flow then - let force = false in (* 'true' makes it report "unknown" for all exprs in non-flow files *) - coverage ~options ~workers ~env ~profiling ~force file + let r = DocumentHighlightResult (Core_list.map ~f:loc_to_highlight locs) in + let response = ResponseMessage (id, r) in + Lwt.return (Ok ((), LspProt.LspFromServer (Some response), metadata)) + | Ok None -> + (* e.g. if it was requested on a place that's not even an identifier *) + let r = DocumentHighlightResult [] in + let response = ResponseMessage (id, r) in + Lwt.return (Ok ((), LspProt.LspFromServer (Some response), metadata)) + | Error reason -> Lwt.return (Error ((), with_error metadata ~reason)) + +let handle_persistent_coverage ~options ~id ~params ~file ~metadata ~client ~profiling ~env = + let textDocument = params.TypeCoverage.textDocument in + let file = + match file with + | Some file -> file + | None -> + (* We must have failed to get the client when we first tried. We could throw here, but this is + * a little more defensive. The only danger here is that the file contents may have changed *) + Flow_lsp_conversions.lsp_DocumentIdentifier_to_flow textDocument ~client + in + (* if it isn't a flow file (i.e. lacks a @flow directive) then we won't do anything *) + let fkey = File_key.SourceFile (File_input.filename_of_file_input file) in + let content = File_input.content_of_file_input file in + let is_flow = + match content with + | Ok content -> + let (_, docblock) = Parsing_service_js.(parse_docblock docblock_max_tokens fkey content) in + Docblock.is_flow docblock + | Error _ -> false + in + let%lwt result = + if is_flow then + let force = false in + (* 'true' makes it report "unknown" for all exprs in non-flow files *) + coverage ~options ~env ~profiling ~force ~trust:false file else Lwt.return (Ok []) - in - begin match is_flow, result with - | false, _ -> - let range = {start={line=0; character=0;}; end_={line=1; character=0;};} in - let r = TypeCoverageResult { TypeCoverage. - coveredPercent = 0; - uncoveredRanges = [{TypeCoverage.range; message=None;}]; + in + match (is_flow, result) with + | (false, _) -> + let range = { start = { line = 0; character = 0 }; end_ = { line = 1; character = 0 } } in + let r = + TypeCoverageResult + { + TypeCoverage.coveredPercent = 0; + uncoveredRanges = [{ TypeCoverage.range; message = None }]; defaultMessage = "Use @flow to get type coverage for this file"; - } in - let response = ResponseMessage (id, r) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | true, Ok (all_locs) -> - (* Figure out the percentages *) - let accum_coverage (covered, total) (_loc, is_covered) = - (covered + if is_covered then 1 else 0), total + 1 in - let covered, total = Core_list.fold all_locs ~init:(0,0) ~f:accum_coverage in - let coveredPercent = if total = 0 then 100 else 100 * covered / total in - (* Figure out each individual uncovered span *) - let uncovereds = Core_list.filter_map all_locs ~f:(fun (loc, is_covered) -> - if is_covered then None else Some loc) in - (* Imagine a tree of uncovered spans based on range inclusion. *) - (* This sorted list is a pre-order flattening of that tree. *) - let sorted = Core_list.sort uncovereds ~cmp:(fun a b -> Pervasives.compare - (a.Loc.start.Loc.offset, a.Loc._end.Loc.offset) - (b.Loc.start.Loc.offset, b.Loc._end.Loc.offset)) in - (* We can use that sorted list to remove any span which contains another, so *) - (* the user only sees actionable reports of the smallest causes of untypedness. *) - (* The algorithm: accept a range if its immediate successor isn't contained by it. *) - let f (candidate, acc) loc = - if Loc.contains candidate loc then (loc, acc) else (loc, candidate :: acc) in - let singles = match sorted with - | [] -> [] - | (first::_) -> - let (final_candidate, singles) = Core_list.fold sorted ~init:(first,[]) ~f in - final_candidate :: singles in - (* Convert to LSP *) - let loc_to_lsp loc = - { TypeCoverage.range=Flow_lsp_conversions.loc_to_lsp_range loc; message=None; } in - let uncoveredRanges = Core_list.map singles ~f:loc_to_lsp in - (* Send the results! *) - let r = TypeCoverageResult { TypeCoverage. - coveredPercent; + } + in + let response = ResponseMessage (id, r) in + Lwt.return (Ok ((), LspProt.LspFromServer (Some response), metadata)) + | (true, Ok all_locs) -> + (* Figure out the percentages *) + let accum_coverage (covered, total) (_loc, cov) = + let covered = + match cov with + | Coverage_response.Tainted + | Coverage_response.Untainted -> + covered + 1 + | Coverage_response.Uncovered + | Coverage_response.Empty -> + covered + in + (covered, total + 1) + in + let (covered, total) = Core_list.fold all_locs ~init:(0, 0) ~f:accum_coverage in + let coveredPercent = + if total = 0 then + 100 + else + 100 * covered / total + in + (* Figure out each individual uncovered span *) + let uncovereds = + Core_list.filter_map all_locs ~f:(fun (loc, cov) -> + match cov with + | Coverage_response.Tainted + | Coverage_response.Untainted -> + None + | Coverage_response.Uncovered + | Coverage_response.Empty -> + Some loc) + in + (* Imagine a tree of uncovered spans based on range inclusion. *) + (* This sorted list is a pre-order flattening of that tree. *) + let sorted = Core_list.sort uncovereds ~cmp:Loc.compare in + (* We can use that sorted list to remove any span which contains another, so *) + (* the user only sees actionable reports of the smallest causes of untypedness. *) + (* The algorithm: accept a range if its immediate successor isn't contained by it. *) + let f (candidate, acc) loc = + if Loc.contains candidate loc then + (loc, acc) + else + (loc, candidate :: acc) + in + let singles = + match sorted with + | [] -> [] + | first :: _ -> + let (final_candidate, singles) = Core_list.fold sorted ~init:(first, []) ~f in + final_candidate :: singles + in + (* Convert to LSP *) + let loc_to_lsp loc = + { TypeCoverage.range = Flow_lsp_conversions.loc_to_lsp_range loc; message = None } + in + let uncoveredRanges = Core_list.map singles ~f:loc_to_lsp in + (* Send the results! *) + let r = + TypeCoverageResult + { + TypeCoverage.coveredPercent; uncoveredRanges; defaultMessage = "Un-type checked code. Consider adding type annotations."; - } in - let response = ResponseMessage (id, r) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | true, Error reason -> - Lwt.return (LspResponse (Error (!env, with_error metadata ~reason))) - end - - | LspToServer (RequestMessage (id, FindReferencesRequest params), metadata) -> - let open FindReferences in - let env = ref env in - let { loc; context = { includeDeclaration=_; includeIndirectReferences=multi_hop } } = params in + } + in + let response = ResponseMessage (id, r) in + Lwt.return (Ok ((), LspProt.LspFromServer (Some response), metadata)) + | (true, Error reason) -> Lwt.return (Error ((), with_error metadata ~reason)) + +let handle_persistent_find_refs ~reader ~genv ~id ~params ~metadata ~client ~profiling ~env = + FindReferences.( + let { loc; context = { includeDeclaration = _; includeIndirectReferences = multi_hop } } = + params + in (* TODO: respect includeDeclaration *) let (file, line, char) = Flow_lsp_conversions.lsp_DocumentPosition_to_flow loc ~client in - let global = true in - let%lwt result, extra_data = - find_refs ~genv ~env ~profiling (file, line, char, global, multi_hop) + let%lwt (env, result, dep_count) = + find_global_refs ~reader ~genv ~env ~profiling (file, line, char, multi_hop) + in + let extra_data = + Some + (Hh_json.JSON_Object + ( ( "result", + Hh_json.JSON_String + (match result with + | Ok _ -> "SUCCESS" + | _ -> "FAILURE") ) + :: ("global", Hh_json.JSON_Bool true) + :: + (match dep_count with + | Some count -> [("deps", Hh_json.JSON_Number (string_of_int count))] + | None -> []) )) in let metadata = with_data ~extra_data metadata in - begin match result with - | Ok (Some (_name, locs)) -> - let lsp_locs = Core_list.fold locs ~init:(Ok []) ~f:(fun acc loc -> - let location = Flow_lsp_conversions.loc_to_lsp loc in - Core_result.combine location acc ~ok:List.cons ~err:(fun e _ -> e)) in - begin match lsp_locs with + match result with + | Ok (Some (_name, locs)) -> + let lsp_locs = + Core_list.fold locs ~init:(Ok []) ~f:(fun acc loc -> + let location = Flow_lsp_conversions.loc_to_lsp loc in + Core_result.combine location acc ~ok:List.cons ~err:(fun e _ -> e)) + in + begin + match lsp_locs with | Ok lsp_locs -> let response = ResponseMessage (id, FindReferencesResult lsp_locs) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | Error reason -> - Lwt.return (LspResponse (Error (!env, with_error metadata ~reason))) + Lwt.return (Ok (env, LspProt.LspFromServer (Some response), metadata)) + | Error reason -> Lwt.return (Error (env, with_error metadata ~reason)) + end + | Ok None -> + (* e.g. if it was requested on a place that's not even an identifier *) + let r = FindReferencesResult [] in + let response = ResponseMessage (id, r) in + Lwt.return (Ok (env, LspProt.LspFromServer (Some response), metadata)) + | Error reason -> Lwt.return (Error (env, with_error metadata ~reason))) + +let handle_persistent_rename ~reader ~genv ~id ~params ~metadata ~client ~profiling ~env = + let { Rename.textDocument; position; newName } = params in + let file_input = Flow_lsp_conversions.lsp_DocumentIdentifier_to_flow textDocument ~client in + let (line, col) = Flow_lsp_conversions.lsp_position_to_flow position in + let refactor_variant = ServerProt.Request.RENAME newName in + let env = ref env in + let%lwt result = + Refactor_js.refactor ~reader ~genv ~env ~profiling ~file_input ~line ~col ~refactor_variant + in + let env = !env in + let edits_to_response (edits : (Loc.t * string) list) = + (* Extract the path from each edit and convert into a map from file to edits for that file *) + let file_to_edits : ((Loc.t * string) list SMap.t, string) result = + List.fold_left + begin + fun map edit -> + map + >>= fun map -> + let (loc, _) = edit in + let uri = Flow_lsp_conversions.file_key_to_uri Loc.(loc.source) in + uri + >>| fun uri -> + let lst = Option.value ~default:[] (SMap.get uri map) in + (* This reverses the list *) + SMap.add uri (edit :: lst) map end - | Ok (None) -> - (* e.g. if it was requested on a place that's not even an identifier *) - let r = FindReferencesResult [] in - let response = ResponseMessage (id, r) in - Lwt.return (LspResponse (Ok (!env, Some response, metadata))) - | Error reason -> - Lwt.return (LspResponse (Error (!env, with_error metadata ~reason))) - end - - | LspToServer (RequestMessage (id, RenameRequest params), metadata) -> - let env = ref env in - let { Rename.textDocument; position; newName } = params in - let file_input = Flow_lsp_conversions.lsp_DocumentIdentifier_to_flow textDocument ~client in - let (line, col) = Flow_lsp_conversions.lsp_position_to_flow position in - let refactor_variant = ServerProt.Request.RENAME newName in - let%lwt result = - Refactor_js.refactor ~genv ~env ~profiling ~file_input ~line ~col ~refactor_variant + (Ok SMap.empty) + edits + (* Reverse the lists to restore the original order *) + >>| SMap.map List.rev in - let edits_to_response (edits: (Loc.t * string) list) = - (* Extract the path from each edit and convert into a map from file to edits for that file *) - let file_to_edits: ((Loc.t * string) list SMap.t, string) result = - List.fold_left begin fun map edit -> - map >>= begin fun map -> - let (loc, _) = edit in - let uri = Flow_lsp_conversions.file_key_to_uri Loc.(loc.source) in - uri >>| begin fun uri -> - let lst = Option.value ~default:[] (SMap.get uri map) in - (* This reverses the list *) - SMap.add uri (edit::lst) map - end - end - end (Ok SMap.empty) edits - (* Reverse the lists to restore the original order *) - >>| SMap.map (List.rev) - in - (* Convert all of the edits to LSP edits *) - let file_to_textedits: (TextEdit.t list SMap.t, string) result = - file_to_edits >>| SMap.map (List.map Flow_lsp_conversions.flow_edit_to_textedit) - in - let workspace_edit: (WorkspaceEdit.t, string) result = - file_to_textedits >>| fun file_to_textedits -> - { WorkspaceEdit.changes = file_to_textedits } - in - match workspace_edit with - | Ok x -> - let response = ResponseMessage (id, RenameResult x) in - LspResponse (Ok (!env, Some response, metadata)) - | Error reason -> - LspResponse (Error (!env, with_error metadata ~reason)) + (* Convert all of the edits to LSP edits *) + let file_to_textedits : (TextEdit.t list SMap.t, string) result = + file_to_edits >>| SMap.map (Core_list.map ~f:Flow_lsp_conversions.flow_edit_to_textedit) in - Lwt.return begin match result with + let workspace_edit : (WorkspaceEdit.t, string) result = + file_to_textedits >>| (fun file_to_textedits -> { WorkspaceEdit.changes = file_to_textedits }) + in + match workspace_edit with + | Ok x -> + let response = ResponseMessage (id, RenameResult x) in + Ok (env, LspProt.LspFromServer (Some response), metadata) + | Error reason -> Error (env, with_error metadata ~reason) + in + Lwt.return + begin + match result with | Ok (Some edits) -> edits_to_response edits | Ok None -> edits_to_response [] - | Error reason -> - LspResponse (Error (!env, with_error metadata ~reason)) + | Error reason -> Error (env, with_error metadata ~reason) end - | LspToServer (RequestMessage (id, RageRequest), metadata) -> - let root = Path.to_string genv.ServerEnv.options.Options.opt_root in - let items = [] in - (* genv: lazy-mode options *) - let lazy_mode = genv.options.Options.opt_lazy_mode in - let data = Printf.sprintf "lazy_mode=%s\n" - (Option.value_map lazy_mode ~default:"None" ~f:Options.lazy_mode_to_string) in - let items = { Lsp.Rage.title = None; data; } :: items in - (* env: checked files *) - let data = Printf.sprintf "%s\n\n%s\n" - (CheckedSet.debug_counts_to_string env.checked_files) - (CheckedSet.debug_to_string ~limit:200 env.checked_files) in - let items = { Lsp.Rage.title = Some (root ^ ":env.checked_files"); data; } :: items in - (* env: dependency graph *) - let dependency_to_string (file, deps) = - let file = File_key.to_string file in - let deps = Utils_js.FilenameSet.elements deps - |> List.map File_key.to_string - |> ListUtils.first_upto_n 20 (fun t -> Some (Printf.sprintf " ...%d more" t)) - |> String.concat "," in - file ^ ":" ^ deps ^ "\n" in - let dependencies = Utils_js.FilenameMap.bindings env.ServerEnv.dependency_graph - |> List.map dependency_to_string - |> ListUtils.first_upto_n 200 (fun t -> Some (Printf.sprintf "[shown 200/%d]\n" t)) - |> String.concat "" in - let data = "DEPENDENCIES:\n" ^ dependencies in - let items = { Lsp.Rage.title = Some (root ^ ":env.dependencies"); data; } :: items in - (* env: errors *) - let errors, warnings, _ = ErrorCollator.get env in - let json = Errors.Json_output.json_of_errors_with_context ~strip_root:None ~stdin_file:None - ~suppressed_errors:[] ~errors ~warnings () in - let data = "ERRORS:\n" ^ (Hh_json.json_to_multiline json) in - let items = { Lsp.Rage.title = Some (root ^ ":env.errors"); data; } :: items in - (* done! *) - let response = ResponseMessage (id, RageResult items) in - Lwt.return (LspResponse (Ok (env, Some response, metadata))) - - | LspToServer (unhandled, metadata) -> - let reason = Printf.sprintf "not implemented: %s" (Lsp_fmt.message_name_to_string unhandled) in - Lwt.return (LspResponse (Error (env, with_error metadata ~reason))) - - -let handle_persistent - (genv: ServerEnv.genv) - (env: ServerEnv.env) - (client_id: Persistent_connection.Prot.client_id) - (request: Persistent_connection_prot.request) - : ServerEnv.env Lwt.t = - let open Persistent_connection_prot in - Hh_logger.debug "Persistent request: %s" (string_of_request request); - MonitorRPC.status_update ~event:ServerStatus.Handling_request_start; - - match Persistent_connection.get_client env.connections client_id with - | None -> - Hh_logger.error "Unknown persistent client %d. Maybe connection went away?" client_id; - Lwt.return env - | Some client -> begin - let client_context = Persistent_connection.get_logging_context client in - let should_print_summary = Options.should_profile genv.options in - let wall_start = Unix.gettimeofday () in - - let%lwt profiling, result = Profiling_js.with_profiling_lwt - ~label:"Command" ~should_print_summary - (fun profiling -> - try%lwt - handle_persistent_unsafe genv env client profiling request - with e -> - let stack = Utils.Callstack (Printexc.get_backtrace ()) in - let reason = Printexc.to_string e in - let error_info = (UnexpectedError, reason, stack) in - begin match request with - | LspToServer (_, metadata) -> - Lwt.return (LspResponse (Error (env, {metadata with error_info=Some error_info}))) +let handle_persistent_rage ~reader ~genv ~id ~metadata ~client:_ ~profiling:_ ~env = + let root = Path.to_string genv.ServerEnv.options.Options.opt_root in + let items = + collect_rage ~options:genv.ServerEnv.options ~reader ~env ~files:None + |> List.map (fun (title, data) -> { Lsp.Rage.title = Some (root ^ ":" ^ title); data }) + in + let response = ResponseMessage (id, RageResult items) in + Lwt.return (Ok ((), LspProt.LspFromServer (Some response), metadata)) + +let handle_persistent_unsupported ~unhandled ~metadata ~client:_ ~profiling:_ = + let reason = Printf.sprintf "not implemented: %s" (Lsp_fmt.message_name_to_string unhandled) in + Lwt.return (Error ((), with_error metadata ~reason)) + +type persistent_command_handler = + (* A command can be handled immediately if it is super duper fast and doesn't require the env. + * These commands will be handled as soon as we read them off the pipe. Almost nothing should ever + * be handled immediately *) + | Handle_persistent_immediately of + (client:Persistent_connection.single_client -> + profiling:Profiling_js.running -> + unit persistent_handling_result Lwt.t) + (* A command is parallelizable if it passes four conditions + * + * 1. It is fast. Running it in parallel will block the current recheck, so it needs to be really + * fast. + * 2. It doesn't use the workers. Currently we can't deal with the recheck using the workers at the + * same time as a command using the workers + * 3. It doesn't return a new env. It really should be just a read-only job + * 4. It doesn't mind using slightly out of date data. During a recheck, it will be reading the + * oldified data + *) + | Handle_parallelizable_persistent of + (client:Persistent_connection.single_client -> + profiling:Profiling_js.running -> + env:ServerEnv.env -> + unit persistent_handling_result Lwt.t) + (* A command is nonparallelizable if it can't be handled immediately or parallelized. *) + | Handle_nonparallelizable_persistent of + (client:Persistent_connection.single_client -> + profiling:Profiling_js.running -> + env:ServerEnv.env -> + ServerEnv.env persistent_handling_result Lwt.t) + +(* This command is parallelizable, but we will treat it as nonparallelizable if we've been told + * to wait_for_recheck by the .flowconfig *) +let mk_parallelizable_persistent ~options f = + let wait_for_recheck = Options.wait_for_recheck options in + if wait_for_recheck then + Handle_nonparallelizable_persistent + (fun ~client ~profiling ~env -> + let%lwt result = f ~client ~profiling ~env in + let result = + match result with + | Ok ((), msg, metadata) -> Ok (env, msg, metadata) + | Error ((), metadata) -> Error (env, metadata) + in + Lwt.return result) + else + Handle_parallelizable_persistent f + +(* get_persistent_handler can do a tiny little bit of work, but it's main job is just returning the + * persistent command's handler. + *) +let get_persistent_handler ~genv ~client_id ~request : persistent_command_handler = + LspProt.( + let options = genv.ServerEnv.options in + let reader = State_reader.create () in + match request with + | (LspToServer (RequestMessage (id, _)), metadata) + when IdSet.mem id !ServerMonitorListenerState.cancellation_requests -> + (* We don't do any work, we just immediately tell the monitor that this request was already + * canceled *) + Handle_persistent_immediately (handle_persistent_canceled ~ret:() ~id ~metadata) + | (Subscribe, metadata) -> + (* This mutates env, so it can't run in parallel *) + Handle_nonparallelizable_persistent (handle_persistent_subscribe ~reader ~options ~metadata) + | (LspToServer (NotificationMessage (DidOpenNotification params)), metadata) -> + Lsp.DidOpen.( + TextDocumentItem.( + let content = params.textDocument.text in + let fn = params.textDocument.uri |> Lsp_helpers.lsp_uri_to_path in + let files = Nel.one (fn, content) in + let did_anything_change = + match Persistent_connection.get_client client_id with + | None -> false + | Some client -> + (* We want to create a local copy of this file immediately, so we can respond to requests + * about this file *) + Persistent_connection.client_did_open client ~files + in + if did_anything_change then ( + enqueue_did_open_files files; + + (* This mutates env, so it can't run in parallel *) + Handle_nonparallelizable_persistent + (handle_persistent_did_open_notification ~reader ~genv ~metadata) + ) else + (* It's a no-op, so we can respond immediately *) + Handle_persistent_immediately (handle_persistent_did_open_notification_no_op ~metadata))) + | (LspToServer (NotificationMessage (DidChangeNotification params)), metadata) -> + (* This just updates our copy of the file in question. We want to do this immediately *) + Handle_persistent_immediately (handle_persistent_did_change_notification ~params ~metadata) + | (LspToServer (NotificationMessage (DidSaveNotification _params)), metadata) -> + (* No-op can be handled immediately *) + Handle_persistent_immediately (handle_persistent_did_save_notification ~metadata) + | (LspToServer (NotificationMessage (DidCloseNotification params)), metadata) -> + Lsp.DidClose.( + TextDocumentIdentifier.( + let fn = params.textDocument.uri |> Lsp_helpers.lsp_uri_to_path in + let filenames = Nel.one fn in + let did_anything_change = + match Persistent_connection.get_client client_id with + | None -> false + | Some client -> + (* Close this file immediately in case another didOpen comes soon *) + Persistent_connection.client_did_close client ~filenames + in + if did_anything_change then + (* This mutates env, so it can't run in parallel *) + Handle_nonparallelizable_persistent + (handle_persistent_did_close_notification ~reader ~genv ~metadata) + else + (* It's a no-op, so we can respond immediately *) + Handle_persistent_immediately + (handle_persistent_did_close_notification_no_op ~metadata))) + | (LspToServer (NotificationMessage (CancelRequestNotification params)), metadata) -> + (* The general idea here is this: + * + * 1. As soon as we get a cancel notification, add the ID to the canceled requests set. + * 2. When a request comes in or runs with the canceled ID, cancel that request and immediately + * respond that the request has been canceled. + * 3. When we go to run a request that has been canceled, skip it's normal handler and instead + * respond that the request has been canceled. + * 4. When the nonparallelizable cancel notification workload finally runs, remove the ID from + * the set. We're guaranteed that the canceled request will not show up later *) + let id = params.CancelRequest.id in + ServerMonitorListenerState.(cancellation_requests := IdSet.add id !cancellation_requests); + Handle_nonparallelizable_persistent (handle_persistent_cancel_notification ~params ~metadata) + | (LspToServer (RequestMessage (id, DefinitionRequest params)), metadata) -> + (* Grab the file contents immediately in case of any future didChanges *) + let loc = + Option.map (Persistent_connection.get_client client_id) ~f:(fun client -> + Flow_lsp_conversions.lsp_DocumentPosition_to_flow params ~client) + in + mk_parallelizable_persistent + ~options + (handle_persistent_get_def ~reader ~options ~id ~params ~loc ~metadata) + | (LspToServer (RequestMessage (id, HoverRequest params)), metadata) -> + (* Grab the file contents immediately in case of any future didChanges *) + let loc = + Option.map (Persistent_connection.get_client client_id) ~f:(fun client -> + Flow_lsp_conversions.lsp_DocumentPosition_to_flow params ~client) + in + mk_parallelizable_persistent + ~options + (handle_persistent_infer_type ~options ~id ~params ~loc ~metadata) + | (LspToServer (RequestMessage (id, CodeActionRequest params)), metadata) -> + mk_parallelizable_persistent + ~options + (handle_persistent_code_action_request ~options ~id ~params ~metadata) + | (LspToServer (RequestMessage (id, CompletionRequest params)), metadata) -> + (* Grab the file contents immediately in case of any future didChanges *) + let loc = params.Completion.loc in + let loc = + Option.map (Persistent_connection.get_client client_id) ~f:(fun client -> + Flow_lsp_conversions.lsp_DocumentPosition_to_flow loc ~client) + in + mk_parallelizable_persistent + ~options + (handle_persistent_autocomplete_lsp ~reader ~options ~id ~params ~loc ~metadata) + | (LspToServer (RequestMessage (id, DocumentHighlightRequest params)), metadata) -> + mk_parallelizable_persistent + ~options + (handle_persistent_document_highlight ~reader ~options ~id ~params ~metadata) + | (LspToServer (RequestMessage (id, TypeCoverageRequest params)), metadata) -> + (* Grab the file contents immediately in case of any future didChanges *) + let textDocument = params.TypeCoverage.textDocument in + let file = + Option.map (Persistent_connection.get_client client_id) ~f:(fun client -> + Flow_lsp_conversions.lsp_DocumentIdentifier_to_flow textDocument ~client) + in + mk_parallelizable_persistent + ~options + (handle_persistent_coverage ~options ~id ~params ~file ~metadata) + | (LspToServer (RequestMessage (id, FindReferencesRequest params)), metadata) -> + (* Like `flow find-refs`, this is kind of slow and mutates env, so it can't run in parallel *) + Handle_nonparallelizable_persistent + (handle_persistent_find_refs ~reader ~genv ~id ~params ~metadata) + | (LspToServer (RequestMessage (id, RenameRequest params)), metadata) -> + (* rename delegates to find-refs, which can be kind of slow and might mutate the env, so it + * can't run in parallel *) + Handle_nonparallelizable_persistent + (handle_persistent_rename ~reader ~genv ~id ~params ~metadata) + | (LspToServer (RequestMessage (id, RageRequest)), metadata) -> + (* Whoever is waiting for the rage results probably doesn't want to wait for a recheck *) + mk_parallelizable_persistent ~options (handle_persistent_rage ~reader ~genv ~id ~metadata) + | (LspToServer unhandled, metadata) -> + (* We can reject unsupported stuff immediately *) + Handle_persistent_immediately (handle_persistent_unsupported ~unhandled ~metadata)) + +let wrap_persistent_handler + (type a b c) + (handler : + genv:ServerEnv.genv -> + workload:a -> + client:Persistent_connection.single_client -> + profiling:Profiling_js.running -> + b -> + c persistent_handling_result Lwt.t) + ~(genv : ServerEnv.genv) + ~(client_id : LspProt.client_id) + ~(request : LspProt.request_with_metadata) + ~(workload : a) + ~(default_ret : c) + (arg : b) : c Lwt.t = + LspProt.( + match Persistent_connection.get_client client_id with + | None -> + Hh_logger.error "Unknown persistent client %d. Maybe connection went away?" client_id; + Lwt.return default_ret + | Some client -> + Hh_logger.info "Persistent request: %s" (string_of_request request); + MonitorRPC.status_update ~event:ServerStatus.Handling_request_start; + + let should_print_summary = Options.should_profile genv.options in + let%lwt (profiling, result) = + Profiling_js.with_profiling_lwt ~label:"Command" ~should_print_summary (fun profiling -> + match request with + | (LspToServer (RequestMessage (id, _)), metadata) + when IdSet.mem id !ServerMonitorListenerState.cancellation_requests -> + Hh_logger.info "Skipping canceled persistent request: %s" (string_of_request request); + + (* We can't actually skip a canceled request...we need to send a response. But we can + * skip the normal handler *) + handle_persistent_canceled ~ret:default_ret ~id ~metadata ~client ~profiling | _ -> - Lwt.return (IdeResponse (Error (env, error_info))) - end) - in + (try%lwt handler ~genv ~workload ~client ~profiling arg with + | Lwt.Canceled as e -> + (* Don't swallow Lwt.Canceled. Parallelizable commands may be canceled and run again + * later. *) + let e = Exception.wrap e in + Exception.reraise e + | e -> + let e = Exception.wrap e in + let stack = Utils.Callstack (Exception.get_backtrace_string e) in + let reason = Exception.get_ctor_string e in + let error_info = (UnexpectedError, reason, stack) in + begin + match request with + | (_, metadata) -> + Lwt.return + (Error (default_ret, { metadata with error_info = Some error_info })) + end)) + in + (* we'll send this "Finishing_up" event only after sending the LSP response *) + let event = + ServerStatus.( + Finishing_up + { + duration = Profiling_js.get_profiling_duration profiling; + info = CommandSummary (string_of_request request); + }) + in + let server_profiling = Some profiling in + let server_logging_context = Some (FlowEventLogger.get_context ()) in + (match result with + | Ok (ret, lsp_response, metadata) -> + let metadata = { metadata with server_profiling; server_logging_context } in + let response = (lsp_response, metadata) in + Persistent_connection.send_response response client; + Hh_logger.info "Persistent response: Ok %s" (LspProt.string_of_response lsp_response); + MonitorRPC.status_update ~event; + Lwt.return ret + | Error (ret, metadata) -> + let metadata = { metadata with server_profiling; server_logging_context } in + let (_, reason, Utils.Callstack stack) = Option.value_exn metadata.error_info in + let e = Lsp_fmt.error_of_exn (Failure reason) in + let lsp_response = + match request with + | (LspToServer (RequestMessage (id, _)), _) -> + let friendly_message = + "Flow encountered an unexpected error while handling this request. " + ^ "See the Flow logs for more details." + in + let e = { e with Lsp.Error.message = friendly_message } in + Some (ResponseMessage (id, ErrorResult (e, stack))) + | (LspToServer _, _) -> + LogMessage.( + let text = Printf.sprintf "%s [%i]\n%s" e.Error.message e.Error.code stack in + Some + (NotificationMessage + (TelemetryNotification { type_ = MessageType.ErrorMessage; message = text }))) + | _ -> None + in + let response = (LspProt.LspFromServer lsp_response, metadata) in + Persistent_connection.send_response response client; + Hh_logger.info + "Persistent response: Error lspFromServer %s" + (Option.value_map lsp_response ~default:"None" ~f:Lsp_fmt.message_name_to_string); + MonitorRPC.status_update ~event; + Lwt.return ret)) + +let handle_persistent_immediately_unsafe ~genv:_ ~workload ~client ~profiling () = + workload ~client ~profiling + +let handle_persistent_immediately ~genv ~client_id ~request ~workload = + wrap_persistent_handler + handle_persistent_immediately_unsafe + ~genv + ~client_id + ~request + ~workload + ~default_ret:() + () + +let rec handle_parallelizable_persistent_unsafe ~request ~genv ~workload ~client ~profiling env : + unit persistent_handling_result Lwt.t = + let mk_workload () = + let client_id = Persistent_connection.get_id client in + handle_parallelizable_persistent ~genv ~client_id ~request ~workload + in + let workload = workload ~client in + run_command_in_parallel ~env ~profiling ~workload ~mk_workload - (* we'll send this "Finishing_up" event only after sending the LSP response *) - let event = ServerStatus.(Finishing_up { - duration = Profiling_js.get_profiling_duration profiling; - info = CommandSummary (string_of_request request)}) in +and handle_parallelizable_persistent ~genv ~client_id ~request ~workload env : unit Lwt.t = + try%lwt + wrap_persistent_handler + (handle_parallelizable_persistent_unsafe ~request) + ~genv + ~client_id + ~request + ~workload + ~default_ret:() + env + with Lwt.Canceled -> + (* It's fine for parallelizable commands to be canceled - they'll be run again later *) + Lwt.return_unit - let server_profiling = Some profiling in - let server_logging_context = Some (FlowEventLogger.get_context ()) in +let handle_nonparallelizable_persistent_unsafe ~genv ~workload ~client ~profiling env = + let workload = workload ~client in + run_command_in_serial ~genv ~env ~profiling ~workload - match result with - | LspResponse (Ok (env, lsp_response, metadata)) -> - let metadata = {metadata with server_profiling; server_logging_context; } in - let response = LspFromServer (lsp_response, metadata) in - Persistent_connection.send_message response client; - MonitorRPC.status_update ~event; - Lwt.return env - - | LspResponse (Error (env, metadata)) -> - let metadata = {metadata with server_profiling; server_logging_context; } in - let (_, reason, Utils.Callstack stack) = Option.value_exn metadata.error_info in - let e = Lsp_fmt.error_of_exn (Failure reason) in - let lsp_response = match request with - | LspToServer (RequestMessage (id, _), _) -> - Some (ResponseMessage (id, ErrorResult (e, stack))) - | LspToServer _ -> - let open LogMessage in - let text = (Printf.sprintf "%s [%i]\n%s" e.Error.message e.Error.code stack) in - Some (NotificationMessage (TelemetryNotification - {type_=MessageType.ErrorMessage; message=text;})) - | _ -> None in - let response = LspFromServer (lsp_response, metadata) in - Persistent_connection.send_message response client; - MonitorRPC.status_update ~event; - Lwt.return env - - | IdeResponse (Ok (env, extra_data)) -> - let request = json_of_request request |> Hh_json.json_to_string in - let extra_data = keyvals_of_json extra_data in - FlowEventLogger.persistent_command_success - ~server_logging_context:None ~extra_data - ~persistent_context:None ~persistent_delay:None ~request ~client_context - ~server_profiling ~client_duration:None ~wall_start ~error:None; - MonitorRPC.status_update ~event; - Lwt.return env - - | IdeResponse (Error (env, (ExpectedError, reason, stack))) -> - let request = json_of_request request |> Hh_json.json_to_string in - FlowEventLogger.persistent_command_success - ~server_logging_context:None ~extra_data:[] - ~persistent_context:None ~persistent_delay:None ~request ~client_context - ~server_profiling ~client_duration:None ~wall_start ~error:(Some (reason, stack)); - MonitorRPC.status_update ~event; - Lwt.return env - - | IdeResponse (Error (env, (UnexpectedError, reason, stack))) -> - let request = json_of_request request |> Hh_json.json_to_string in - FlowEventLogger.persistent_command_failure - ~server_logging_context:None ~extra_data:[] - ~persistent_context:None ~persistent_delay:None ~request ~client_context - ~server_profiling ~client_duration:None ~wall_start ~error:(reason, stack); - Hh_logger.error "Uncaught exception handling persistent request (%s): %s" request reason; - MonitorRPC.status_update ~event; - Lwt.return env - end +let handle_nonparallelizable_persistent ~genv ~client_id ~request ~workload env = + wrap_persistent_handler + handle_nonparallelizable_persistent_unsafe + ~genv + ~client_id + ~request + ~workload + ~default_ret:env + env let enqueue_persistent - (genv: ServerEnv.genv) - (client_id: Persistent_connection.Prot.client_id) - (request: Persistent_connection_prot.request) - : unit = - let open MonitorProt.PersistentProt in - begin - match request with - | LspToServer (NotificationMessage (CancelRequestNotification params), _) -> - let id = params.CancelRequest.id in - ServerMonitorListenerState.(cancellation_requests := IdSet.add id !cancellation_requests) - | _ -> () - end; - ServerMonitorListenerState.push_new_workload - (fun env -> handle_persistent genv env client_id request); - () + (genv : ServerEnv.genv) + (client_id : LspProt.client_id) + (request : LspProt.request_with_metadata) : unit Lwt.t = + match get_persistent_handler ~genv ~client_id ~request with + | Handle_persistent_immediately workload -> + handle_persistent_immediately ~genv ~client_id ~request ~workload + | Handle_parallelizable_persistent workload -> + let workload = handle_parallelizable_persistent ~genv ~client_id ~request ~workload in + ServerMonitorListenerState.push_new_parallelizable_workload workload; + Lwt.return_unit + | Handle_nonparallelizable_persistent workload -> + let workload = handle_nonparallelizable_persistent ~genv ~client_id ~request ~workload in + ServerMonitorListenerState.push_new_workload workload; + Lwt.return_unit diff --git a/src/server/command_handler/commandHandler.mli b/src/server/command_handler/commandHandler.mli index 23ca0a2b378..d61c201b5de 100644 --- a/src/server/command_handler/commandHandler.mli +++ b/src/server/command_handler/commandHandler.mli @@ -1,17 +1,12 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val enqueue_or_handle_ephemeral: - ServerEnv.genv -> - MonitorProt.request_id * ServerProt.Request.command_with_context -> - unit Lwt.t +val enqueue_or_handle_ephemeral : + ServerEnv.genv -> MonitorProt.request_id * ServerProt.Request.command_with_context -> unit Lwt.t -val enqueue_persistent: - ServerEnv.genv -> - Persistent_connection_prot.client_id -> - Persistent_connection_prot.request -> - unit +val enqueue_persistent : + ServerEnv.genv -> LspProt.client_id -> LspProt.request_with_metadata -> unit Lwt.t diff --git a/src/server/command_handler/dune b/src/server/command_handler/dune new file mode 100644 index 00000000000..676b61c4553 --- /dev/null +++ b/src/server/command_handler/dune @@ -0,0 +1,21 @@ +(library + (name flow_server_command_handler) + (wrapped false) + (libraries + flow_common + flow_common_lsp_conversions + flow_exit_status + flow_logging_utils + flow_parser_utils + flow_server_env + flow_server_find_refs + flow_server_rechecker + flow_service_autocomplete + flow_service_inference + flow_service_inference_module + flow_service_type_info + flow_state_heaps_module + flow_typing + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/server/command_handler/getDef_js.ml b/src/server/command_handler/getDef_js.ml index ddc09636b47..c5117058ec2 100644 --- a/src/server/command_handler/getDef_js.ml +++ b/src/server/command_handler/getDef_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,6 +7,7 @@ open Core_result open Utils_js +open Parsing_heaps_utils type getdef_type = | Gdloc of Loc.t @@ -23,202 +24,226 @@ type state = { location that is fed into a subsequent get-def. *) type result = | Done of Loc.t - | Chain of int * int (* line, column *) + | Chain of int * int -let id state name = +(* line, column *) + +let id ~reader state name = let env = Env.all_entries () in - Scope.Entry.(match SMap.get name env with - | Some (Value { kind = Const ConstImportBinding; general = v; _ }) -> - (* for references to import bindings, point directly to the exports they + Scope.Entry.( + match SMap.get name env with + | Some (Value { kind = Const ConstImportBinding; general = v; _ }) -> + (* for references to import bindings, point directly to the exports they resolve to (rather than to the import bindings, which would themselves in turn point to the exports they resolve to) *) - state.getdef_type <- Some (Gdval v) - | Some (Type { type_binding_kind = ImportTypeBinding; _type = v; _ }) -> - (* similarly for import type bindings *) - state.getdef_type <- Some (Gdval v) - | Some entry -> - state.getdef_type <- Some (Gdloc (entry_loc entry)) - | None -> - () - ) - -let getdef_id (state, loc1) _cx name loc2 = - if Reason.in_range loc1 loc2 - then id state name; + state.getdef_type <- Some (Gdval v) + | Some (Type { type_binding_kind = ImportTypeBinding; type_ = v; _ }) -> + (* similarly for import type bindings *) + state.getdef_type <- Some (Gdval v) + | Some entry -> state.getdef_type <- Some (Gdloc (entry_loc entry |> loc_of_aloc ~reader)) + | None -> ()) + +let getdef_id ~reader (state, loc1) _cx name loc2 = + let loc2 = loc_of_aloc ~reader loc2 in + if Reason.in_range loc1 loc2 then id ~reader state name; false -let getdef_lval (state, loc1) _cx name loc2 rhs = - if Reason.in_range loc1 loc2 - then match rhs with - | Type_inference_hooks_js.Val v -> - state.getdef_type <- Some (Gdval v) - | Type_inference_hooks_js.Parent t -> - state.getdef_type <- Some (Gdmem (name, t)) - | Type_inference_hooks_js.Id -> - id state name - -let getdef_import (state, user_requested_loc) _cx source import_loc = - if (Reason.in_range user_requested_loc import_loc) - then ( +let getdef_lval ~reader (state, loc1) _cx name loc2 rhs = + let loc2 = loc_of_aloc ~reader loc2 in + if Reason.in_range loc1 loc2 then + match rhs with + | Type_inference_hooks_js.Val v -> state.getdef_type <- Some (Gdval v) + | Type_inference_hooks_js.Parent t -> state.getdef_type <- Some (Gdmem (name, t)) + | Type_inference_hooks_js.Id -> id ~reader state name + +let getdef_import ~reader (state, user_requested_loc) _cx (loc, name) import_loc = + let source = (loc_of_aloc ~reader loc, name) in + let import_loc = loc_of_aloc ~reader import_loc in + if Reason.in_range user_requested_loc import_loc then state.getdef_type <- Some (Gdrequire (source, import_loc)) - ) - -let getdef_require_pattern state loc = - state.getdef_require_patterns <- loc::state.getdef_require_patterns - -let extract_member_def cx this name = - let this_t = Flow_js.resolve_type cx this in - let member_result = Flow_js.Members.extract cx this_t in - - let result_str, t = Flow_js.Members.(match member_result with - | Success _ -> "SUCCESS", this - | SuccessModule _ -> "SUCCESS", this - | FailureNullishType -> "FAILURE_NULLABLE", this - | FailureAnyType -> "FAILURE_NO_COVERAGE", this - | FailureUnhandledType t -> "FAILURE_UNHANDLED_TYPE", t) in - - let json_data_to_log = Hh_json.(JSON_Object [ - "type", Debug_js.json_of_t ~depth:3 cx t; - "gd_name", JSON_String name; - "result", JSON_String result_str; - ]) in - - let command_result = Flow_js.Members.to_command_result member_result in - Done begin match command_result with - | Error _ -> Loc.none - | Ok result_map -> - begin match SMap.get name result_map with - | Some (loc, t) -> - begin match loc with - | None -> Type.loc_of_t t - | Some x -> x - end - | None -> Loc.none - end - end, Some json_data_to_log -let getdef_from_type_table cx loc = - let typetable = Context.type_table cx in - let type_info = - Type_table.find_type_info_with_pred typetable (fun l -> Loc.contains l loc) +let getdef_require_pattern ~reader state loc = + let loc = loc_of_aloc ~reader loc in + state.getdef_require_patterns <- loc :: state.getdef_require_patterns + +let extract_member_def ~reader cx this name = + let member_result = Members.extract cx this in + let (result_str, t) = + Members.( + match member_result with + | Success _ -> ("SUCCESS", this) + | SuccessModule _ -> ("SUCCESS", this) + | FailureNullishType -> ("FAILURE_NULLABLE", this) + | FailureAnyType -> ("FAILURE_NO_COVERAGE", this) + | FailureUnhandledType t -> ("FAILURE_UNHANDLED_TYPE", t) + | FailureUnhandledMembers t -> ("FAILURE_UNHANDLED_MEMBERS", t)) + in + let json_data_to_log = + Hh_json.( + JSON_Object + [ + ("type", Debug_js.json_of_t ~depth:3 cx t); + ("gd_name", JSON_String name); + ("result", JSON_String result_str); + ]) in - Option.bind type_info begin function - | _, (_, _, Type_table.Import (name, obj_t)) - | _, (name, _, Type_table.PropertyAccess obj_t) -> - Some (extract_member_def cx obj_t name) - | _ -> None - end + let command_result = Members.to_command_result member_result in + ( Done + begin + match command_result with + | Error _ -> Loc.none + | Ok result_map -> + begin + match SMap.get name result_map with + | Some (loc, t) -> + begin + match loc with + | None -> Type.loc_of_t t |> loc_of_aloc ~reader + | Some x -> loc_of_aloc ~reader x + end + | None -> Loc.none + end + end, + Some json_data_to_log ) + +let getdef_from_typed_ast ~reader cx typed_ast loc = + match Typed_ast_utils.find_get_def_info typed_ast loc with + | Some { Typed_ast_utils.get_def_prop_name = name; get_def_object_source } -> + let obj_t = + match get_def_object_source with + | Typed_ast_utils.GetDefType t -> t + | Typed_ast_utils.GetDefRequireLoc loc -> Context.find_require cx loc + in + Some (extract_member_def ~reader cx obj_t name) + | _ -> None (* TODO: the uses of `resolve_type` in the implementation below are pretty delicate, since in many cases the resulting type lacks location information. Edit with caution. *) -let getdef_get_result_from_hooks ~options cx state = - Ok begin match state.getdef_type with - | Some Gdloc loc -> - if List.exists (fun range -> Loc.contains range loc) state.getdef_require_patterns - then - let { Loc.line; column; _ } = loc.Loc.start in - Chain (line, column), None - else Done loc, None - | Some Gdval v -> - (* Use `possible_types_of_type` instead of `resolve_type` because we're +let getdef_get_result_from_hooks ~options ~reader cx state = + Ok + begin + match state.getdef_type with + | Some (Gdloc loc) -> + if List.exists (fun range -> Loc.contains range loc) state.getdef_require_patterns then + let { Loc.line; column; _ } = loc.Loc.start in + (Chain (line, column), None) + else + (Done loc, None) + | Some (Gdval v) -> + (* Use `possible_types_of_type` instead of `resolve_type` because we're actually interested in the location of the resolved types. *) - let ts = Flow_js.possible_types_of_type cx v in - Done begin match ts with - | [t] -> Type.def_loc_of_t t - | _ -> Loc.none - end, None - | Some Gdmem (name, this) -> - extract_member_def cx this name - | Some Gdrequire ((source_loc, name), require_loc) -> - let module_t = Flow_js.resolve_type cx (Context.find_require cx source_loc) in - (* function just so we don't do the work unless it's actually needed. *) - let get_imported_file () = - let filename = Module_heaps.get_file Expensive.warn ( - Module_js.imported_module ~options - ~node_modules_containers:!Files.node_modules_containers - (Context.file cx) (Nel.one require_loc) name - ) in - (match filename with - | Some file -> Loc.({none with source = Some file;}) - | None -> Loc.none) - in - Done Type.(match module_t with - (** - * TODO: Specialized `import` hooks so that get-defs on named - * imports point to their actual remote def location. - *) - | ModuleT(_, {cjs_export; _; }, _) -> - (* If we have a location for the cjs export, go there. Otherwise - * fall back to just the top of the file *) - let loc = match cjs_export with - | Some t -> loc_of_t t (* This can return Loc.none *) - | None -> Loc.none + let ts = Flow_js.possible_types_of_type cx v in + ( Done + begin + match ts with + | [t] -> Type.def_loc_of_t t |> loc_of_aloc ~reader + | _ -> Loc.none + end, + None ) + | Some (Gdmem (name, this)) -> extract_member_def ~reader cx this name + | Some (Gdrequire ((source_loc, name), require_loc)) -> + let module_t = + ALoc.of_loc source_loc |> Context.find_require cx |> Members.resolve_type cx + in + (* function just so we don't do the work unless it's actually needed. *) + let get_imported_file () = + let filename = + Module_heaps.Reader.get_file + ~reader + ~audit:Expensive.warn + (Module_js.imported_module + ~options + ~reader:(Abstract_state_reader.State_reader reader) + ~node_modules_containers:!Files.node_modules_containers + (Context.file cx) + (Nel.one (ALoc.of_loc require_loc)) + name) in - if loc = Loc.none then - get_imported_file () - else - loc - | DefT (_, AnyT) -> - get_imported_file () - | _ -> failwith ( - spf "Internal Flow Error: Expected ModuleT for %S, but got %S!" - name - (string_of_ctor module_t) - ) - ), None - | None -> Done Loc.none, None - end - -let getdef_get_result ~options cx state loc = - match getdef_from_type_table cx loc with + match filename with + | Some file -> Loc.{ none with source = Some file } + | None -> Loc.none + in + ( Done + Type.( + match module_t with + (* + * TODO: Specialized `import` hooks so that get-defs on named + * imports point to their actual remote def location. + *) + | ModuleT (_, { cjs_export; _ }, _) -> + (* If we have a location for the cjs export, go there. Otherwise + * fall back to just the top of the file *) + let loc = + match cjs_export with + | Some t -> loc_of_t t |> loc_of_aloc ~reader + (* This can return Loc.none *) + | None -> Loc.none + in + if loc = Loc.none then + get_imported_file () + else + loc + | AnyT _ -> get_imported_file () + | _ -> + failwith + (spf + "Internal Flow Error: Expected ModuleT for %S, but got %S!" + name + (string_of_ctor module_t))), + None ) + | None -> (Done Loc.none, None) + end + +let getdef_get_result ~options ~reader cx typed_ast state loc = + match getdef_from_typed_ast ~reader cx typed_ast loc with | Some x -> Ok x - | None -> getdef_get_result_from_hooks ~options cx state + | None -> getdef_get_result_from_hooks ~options ~reader cx state -let getdef_set_hooks pos = +let getdef_set_hooks ~reader pos = let state = { getdef_type = None; getdef_require_patterns = [] } in - Type_inference_hooks_js.set_id_hook (getdef_id (state, pos)); - Type_inference_hooks_js.set_lval_hook (getdef_lval (state, pos)); - Type_inference_hooks_js.set_import_hook (getdef_import (state, pos)); - Type_inference_hooks_js.set_require_pattern_hook (getdef_require_pattern state); + Type_inference_hooks_js.set_id_hook (getdef_id ~reader (state, pos)); + Type_inference_hooks_js.set_lval_hook (getdef_lval ~reader (state, pos)); + Type_inference_hooks_js.set_import_hook (getdef_import ~reader (state, pos)); + Type_inference_hooks_js.set_require_pattern_hook (getdef_require_pattern ~reader state); state -let getdef_unset_hooks () = - Type_inference_hooks_js.reset_hooks () +let getdef_unset_hooks () = Type_inference_hooks_js.reset_hooks () -let rec get_def ~options ~workers ~env ~profiling ~depth (file_input, line, col) = +let rec get_def ~options ~reader ~env ~profiling ~depth (file_input, line, col) = let filename = File_input.filename_of_file_input file_input in let file = File_key.SourceFile filename in let loc = Loc.make file line col in - let state = getdef_set_hooks loc in + let state = getdef_set_hooks ~reader loc in let%lwt check_result = File_input.content_of_file_input file_input - %>>= (fun content -> - Types_js.basic_check_contents ~options ~workers ~env ~profiling content file - ) + %>>= (fun content -> Types_js.basic_check_contents ~options ~env ~profiling content file) in let%lwt getdef_result = - map_error ~f:(fun str -> str, None) check_result - %>>= (fun (cx, _, _, _) -> Profiling_js.with_timer_lwt profiling ~timer:"GetResult" ~f:(fun () -> - try_with_json (fun () -> Lwt.return (getdef_get_result ~options cx state loc)) - )) + map_error ~f:(fun str -> (str, None)) check_result + %>>= fun (cx, _, _, typed_ast) -> + Profiling_js.with_timer_lwt profiling ~timer:"GetResult" ~f:(fun () -> + try_with_json (fun () -> + Lwt.return (getdef_get_result ~reader ~options cx typed_ast state loc))) in - let result, json_object = split_result getdef_result in + let (result, json_object) = split_result getdef_result in getdef_unset_hooks (); match result with | Error e -> Lwt.return (Error e, json_object) - | Ok ok -> (match ok with + | Ok ok -> + (match ok with | Done loc -> Lwt.return (Ok loc, json_object) | Chain (line, col) -> - let%lwt result, chain_json_object = - get_def ~options ~workers ~env ~profiling ~depth:(depth+1) (file_input, line, col) in - Lwt.return (match result with - | Error e -> Error e, json_object - | Ok loc' -> - (* Chaining can sometimes lead to a dead end, due to lack of type + let%lwt (result, chain_json_object) = + get_def ~options ~reader ~env ~profiling ~depth:(depth + 1) (file_input, line, col) + in + Lwt.return + (match result with + | Error e -> (Error e, json_object) + | Ok loc' -> + (* Chaining can sometimes lead to a dead end, due to lack of type information. In that case, fall back to the previous location. *) - if loc' = Loc.none - then (Ok loc, json_object) - else (Ok loc', chain_json_object) - ) - ) + if loc' = Loc.none then + (Ok loc, json_object) + else + (Ok loc', chain_json_object))) diff --git a/src/server/command_handler/refactor_js.ml b/src/server/command_handler/refactor_js.ml index 977f98c738c..fbd868105a6 100644 --- a/src/server/command_handler/refactor_js.ml +++ b/src/server/command_handler/refactor_js.ml @@ -1,170 +1,194 @@ (** - * Copyright (c) 2014, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - open Utils_js -let (>>=) = Core_result.(>>=) -let (>>|) = Core_result.(>>|) +let ( >>= ) = Core_result.( >>= ) + +let ( >>| ) = Core_result.( >>| ) let get_ref_kinds refs loc = - refs - |> List.filter (fun (_, ref_loc) -> ref_loc = loc) - |> List.map fst - -class rename_mapper refs new_name = object(this) - inherit Flow_ast_mapper.mapper as super - - method! identifier (expr: Loc.t Ast.Identifier.t) = - let loc, _ = expr in - if List.exists (fun (_, ref_loc) -> ref_loc = loc) refs then - loc, new_name - else - expr - - method! object_property_type (opt: (Loc.t, Loc.t) Ast.Type.Object.Property.t) = - let open Ast.Type.Object.Property in - let opt = super#object_property_type opt in - let loc, ({key; _} as property) = opt in - let key' = - let open Ast.Expression.Object.Property in - match key with - | Identifier id -> - let id' = this#identifier id in - if id == id' then key else Identifier id' - | _ -> key - in - if key == key' then opt else (loc, { property with key=key' }) - - method! pattern_object_property ?kind (prop: (Loc.t, Loc.t) Ast.Pattern.Object.Property.t') = - let open Ast.Pattern.Object.Property in - let { key; pattern; shorthand } = prop in - if not shorthand then - super#pattern_object_property prop - else begin - let key_loc = match key with - Literal (x, _) | Identifier (x, _) | Computed (x, _) -> x - in - let ref_kinds = get_ref_kinds refs key_loc in - let key' = - if List.mem FindRefsTypes.PropertyAccess ref_kinds then - this#pattern_object_property_key ?kind key - else - key - in - let pattern' = - if List.mem FindRefsTypes.Local ref_kinds then - this#pattern_object_property_pattern ?kind pattern - else - pattern - in - if key == key' && pattern == pattern' then - prop + refs |> List.filter (fun (_, ref_loc) -> ref_loc = loc) |> Core_list.map ~f:fst + +class rename_mapper refs new_name = + object (this) + inherit [Loc.t] Flow_ast_mapper.mapper as super + + method! identifier (expr : (Loc.t, Loc.t) Ast.Identifier.t) = + let (loc, _) = expr in + if List.exists (fun (_, ref_loc) -> ref_loc = loc) refs then + Flow_ast_utils.ident_of_source (loc, new_name) else - (* TODO if both changed (e.g. destructuring requires) then retain shorthand *) - { key = key'; pattern = pattern'; shorthand = false } - end + expr - method! object_property (prop: (Loc.t, Loc.t) Ast.Expression.Object.Property.t) = - let open Ast.Expression.Object.Property in - match prop with - | loc, Init { key; value; shorthand; } -> - if not shorthand then - super#object_property prop - else begin - let key_loc = match key with - Literal (x, _) | Identifier (x, _) | PrivateName (x, _) | Computed (x, _) -> x - in - let ref_kinds = get_ref_kinds refs key_loc in + method! object_property_type (opt : (Loc.t, Loc.t) Ast.Type.Object.Property.t) = + Ast.Type.Object.Property.( + let opt = super#object_property_type opt in + let (loc, ({ key; _ } as property)) = opt in let key' = - if List.mem FindRefsTypes.PropertyDefinition ref_kinds then - this#object_key key - else - key - in - let value' = - if List.mem FindRefsTypes.Local ref_kinds then - this#expression value - else - value + Ast.Expression.Object.Property.( + match key with + | Identifier id -> + let id' = this#identifier id in + if id == id' then + key + else + Identifier id' + | _ -> key) in - if key == key' && value == value' then - prop + if key == key' then + opt + else + (loc, { property with key = key' })) + + method! pattern_object_property ?kind (prop : (Loc.t, Loc.t) Ast.Pattern.Object.Property.t') = + Ast.Pattern.Object.Property.( + let { key; pattern; default; shorthand } = prop in + if not shorthand then + super#pattern_object_property prop else - (loc, Init { key = key'; value = value'; shorthand = false }) - end - (* TODO *) - | _ -> super#object_property prop -end + let key_loc = + match key with + | Literal (x, _) + | Identifier (x, _) + | Computed (x, _) -> + x + in + let ref_kinds = get_ref_kinds refs key_loc in + let key' = + if List.mem FindRefsTypes.PropertyAccess ref_kinds then + this#pattern_object_property_key ?kind key + else + key + in + let pattern' = + if List.mem FindRefsTypes.Local ref_kinds then + this#pattern_object_property_pattern ?kind pattern + else + pattern + in + (* TODO *) + let default' = default in + if key == key' && pattern == pattern' && default == default' then + prop + else + (* TODO if both changed (e.g. destructuring requires) then retain shorthand *) + { key = key'; pattern = pattern'; default = default'; shorthand = false }) + + method! object_property (prop : (Loc.t, Loc.t) Ast.Expression.Object.Property.t) = + Ast.Expression.Object.Property.( + match prop with + | (loc, Init { key; value; shorthand }) -> + if not shorthand then + super#object_property prop + else + let key_loc = + match key with + | Literal (x, _) + | Identifier (x, _) + | PrivateName (x, _) + | Computed (x, _) -> + x + in + let ref_kinds = get_ref_kinds refs key_loc in + (* What about computed properties? *) + let key' = + if List.mem FindRefsTypes.PropertyDefinition ref_kinds then + this#object_key key + else + key + in + let value' = + if List.mem FindRefsTypes.Local ref_kinds then + this#expression value + else + value + in + if key == key' && value == value' then + prop + else + (loc, Init { key = key'; value = value'; shorthand = false }) + (* TODO *) + | _ -> super#object_property prop) + end -let mapper_to_edits (ast_mapper: Flow_ast_mapper.mapper) (ast: (Loc.t, Loc.t) Ast.program) = +let mapper_to_edits (ast_mapper : Loc.t Flow_ast_mapper.mapper) (ast : (Loc.t, Loc.t) Ast.program) + = let new_ast = ast_mapper#program ast in let changes = Flow_ast_differ.program Flow_ast_differ.Standard ast new_ast in Ast_diff_printer.edits_of_changes None changes -let get_with_default default key map = - FilenameMap.find_opt key map - |> Option.value ~default +let get_with_default default key map = FilenameMap.find_opt key map |> Option.value ~default let split_by_source refs = - List.fold_left begin fun acc ref -> - let (_, loc) = ref in - acc >>= fun map -> - Core_result.of_option ~error:"No source found" Loc.(loc.source) - >>= fun source -> - let lst = ref :: (get_with_default [] source map) in - Ok (FilenameMap.add source lst map) - end (Ok FilenameMap.empty) refs + List.fold_left + begin + fun acc ref -> + let (_, loc) = ref in + acc + >>= fun map -> + Core_result.of_option ~error:"No source found" Loc.(loc.source) + >>= fun source -> + let lst = ref :: get_with_default [] source map in + Ok (FilenameMap.add source lst map) + end + (Ok FilenameMap.empty) + refs let apply_rename_to_file _file ast refs new_name = let mapper = new rename_mapper refs new_name in mapper_to_edits mapper ast -let apply_rename_to_files refs_by_file new_name = - FilenameMap.fold begin fun file refs acc -> - acc >>= fun edits -> - FindRefsUtils.get_ast_result file - >>| fun (ast, _, _) -> - let file_edits = apply_rename_to_file file ast refs new_name in - List.rev_append file_edits edits - end refs_by_file (Ok []) +let apply_rename_to_files ~reader refs_by_file new_name = + FilenameMap.fold + begin + fun file refs acc -> + acc + >>= fun edits -> + FindRefsUtils.get_ast_result ~reader file + >>| fun (ast, _, _) -> + let file_edits = apply_rename_to_file file ast refs new_name in + List.rev_append file_edits edits + end + refs_by_file + (Ok []) >>| List.rev type refactor_result = ((Loc.t * string) list option, string) result Lwt.t -let rename ~genv ~env ~profiling ~file_input ~line ~col ~new_name = +let rename ~reader ~genv ~env ~profiling ~file_input ~line ~col ~new_name = (* TODO verify that new name is a valid identifier *) (* TODO maybe do something with the json? *) (* TODO support rename based on multi-hop find-refs *) - let%lwt find_refs_response, _ = FindRefs_js.find_refs - ~genv - ~env - ~profiling - ~file_input - ~line - ~col - ~global:true - ~multi_hop:false + let%lwt (find_refs_response, _) = + FindRefs_js.find_global_refs + ~reader + ~genv + ~env + ~profiling + ~file_input + ~line + ~col + ~multi_hop:false in - find_refs_response %>>= begin function + find_refs_response + %>>= function | None -> Lwt.return (Ok None) | Some (_old_name, refs) -> (* TODO prevent naming conflicts *) (* TODO only rename renameable locations (e.g. not `default` in `export default`) *) split_by_source refs - %>>= begin fun refs_by_file -> - apply_rename_to_files refs_by_file new_name - %>>= fun (edits: (Loc.t * string) list) -> - Lwt.return @@ Ok (Some edits) - end - end + %>>= fun refs_by_file -> + apply_rename_to_files ~reader refs_by_file new_name + %>>= (fun (edits : (Loc.t * string) list) -> Lwt.return @@ Ok (Some edits)) -let refactor ~genv ~env ~profiling ~file_input ~line ~col ~refactor_variant : refactor_result = +let refactor ~reader ~genv ~env ~profiling ~file_input ~line ~col ~refactor_variant : + refactor_result = match refactor_variant with | ServerProt.Request.RENAME new_name -> - rename ~genv ~env ~profiling ~file_input ~line ~col ~new_name + rename ~reader ~genv ~env ~profiling ~file_input ~line ~col ~new_name diff --git a/src/server/dune b/src/server/dune new file mode 100644 index 00000000000..42a21163056 --- /dev/null +++ b/src/server/dune @@ -0,0 +1,18 @@ +(library + (name flow_server) + (wrapped false) + (libraries + flow_exit_status + flow_logging_lwt + flow_logging_utils + flow_monitor_rpc + flow_procs + flow_server_env + flow_server_files + flow_server_monitor_listener + flow_server_rechecker + flow_service_inference + flow_shared_mem + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/server/env/checkedSet.ml b/src/server/env/checkedSet.ml index 3f97f0928ff..413fcf28438 100644 --- a/src/server/env/checkedSet.ml +++ b/src/server/env/checkedSet.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -27,25 +27,39 @@ type kind = | Focused | Dependent | Dependency + type t = kind FilenameMap.t -let combine a b = match (a, b) with -| (Focused, _) | (_, Focused) -> Focused -| (Dependent, _) | (_, Dependent) -> Dependent -| _ -> Dependency +(* This uses polymorphic compare under the hood. Use caution if `kind` becomes a more complex + * type. *) +let debug_equal = FilenameMap.equal + +let combine a b = + match (a, b) with + | (Focused, _) + | (_, Focused) -> + Focused + | (Dependent, _) + | (_, Dependent) -> + Dependent + | _ -> Dependency let empty = FilenameMap.empty + let is_empty = FilenameMap.is_empty + let of_focused_list = List.fold_left (fun acc f -> FilenameMap.add f Focused acc) empty +let cardinal = FilenameMap.cardinal + let mem = FilenameMap.mem let add = let add_all files kind checked = - Option.value_map files + Option.value_map + files ~f:(fun files -> - FilenameSet.fold (fun f checked -> FilenameMap.add ~combine f kind checked) files checked - ) + FilenameSet.fold (fun f checked -> FilenameMap.add ~combine f kind checked) files checked) ~default:checked in fun ?focused ?dependents ?dependencies checked -> @@ -54,54 +68,89 @@ let add = |> add_all dependents Dependent |> add_all dependencies Dependency -let remove set_to_remove = - FilenameMap.filter (fun k _ -> not (FilenameSet.mem k set_to_remove)) +let remove set_to_remove = FilenameMap.filter (fun k _ -> not (FilenameSet.mem k set_to_remove)) -let fold f acc checked = - FilenameMap.fold (fun k _ acc -> f acc k) checked acc +let fold f acc checked = FilenameMap.fold (fun k _ acc -> f acc k) checked acc let union = FilenameMap.union ~combine:(fun _ a b -> Some (combine a b)) -let diff a b = FilenameMap.filter (fun k _ -> not (FilenameMap.mem k b)) a +(* Remove from `a` every key which exists in `b` and which has an equal or higher kind in `b` than + * it does in `a`, where Focused > Dependent > Dependency. So + * + * diff + * { A: Focused, B: Focused, C: Dependency, D: Dependent } + * { A: Focused, B: Dependent, C: Dependent} + * + * = { B: Focused, D: Dependent } + *) +let diff a b = + FilenameMap.filter + (fun k kind1 -> + let kind2 = FilenameMap.get k b in + match (kind1, kind2) with + | (_, None) -> true (* Key doesn't exist in b, so keep k around *) + | (_, Some Focused) -> false (* Focused removes anything *) + | (Focused, _) -> true (* Focused survives anything except Focused *) + | (_, Some Dependent) -> false (* Dependent removes anything except Focused *) + | (Dependent, Some Dependency) -> true (* Dependent survives Dependency *) + | (Dependency, Some Dependency) -> false) + (* Dependency removes Dependency *) + a let filter ~f checked = FilenameMap.filter (fun k _ -> f k) checked -let filter_into_set ~f checked = FilenameMap.fold - (fun key kind acc -> if f kind then FilenameSet.add key acc else acc) - checked - FilenameSet.empty +let filter_into_set ~f checked = + FilenameMap.fold + (fun key kind acc -> + if f kind then + FilenameSet.add key acc + else + acc) + checked + FilenameSet.empty (* Gives you a FilenameSet of all the checked files *) let all = filter_into_set ~f:(fun _ -> true) + (* Gives you a FilenameSet of all the focused files *) let focused = filter_into_set ~f:(fun kind -> kind = Focused) + (* Gives you a FilenameSet of all the dependent files *) let dependents = filter_into_set ~f:(fun kind -> kind = Dependent) + (* Gives you a FilenameSet of all the dependency files *) let dependencies = filter_into_set ~f:(fun kind -> kind = Dependency) (* Helper function for debugging *) -let debug_to_string ?(limit) = +let debug_to_string ?limit = let string_of_set set = - let files = Utils_js.FilenameSet.elements set - |> List.map (fun f -> spf "\"%s\"" (File_key.to_string f)) in - let files = match limit with + let files = + Utils_js.FilenameSet.elements set + |> Core_list.map ~f:(fun f -> spf "\"%s\"" (File_key.to_string f)) + in + let files = + match limit with | None -> files | Some n -> ListUtils.first_upto_n n (fun t -> Some (spf "[shown %d/%d]" n t)) files in String.concat "\n" files in fun checked -> - Printf.sprintf "Focused:\n%s\nDependents:\n%s\nDependencies:\n%s" + Printf.sprintf + "Focused:\n%s\nDependents:\n%s\nDependencies:\n%s" (checked |> focused |> string_of_set) (checked |> dependents |> string_of_set) (checked |> dependencies |> string_of_set) let debug_counts_to_string checked = - let focused, dependents, dependencies = FilenameMap.fold ( - fun _ kind (focused, dependents, dependencies) -> match kind with - | Focused -> (focused + 1, dependents, dependencies) - | Dependent -> (focused, dependents + 1, dependencies) - | Dependency -> (focused, dependents, dependencies + 1) - ) checked (0, 0, 0) in + let (focused, dependents, dependencies) = + FilenameMap.fold + (fun _ kind (focused, dependents, dependencies) -> + match kind with + | Focused -> (focused + 1, dependents, dependencies) + | Dependent -> (focused, dependents + 1, dependencies) + | Dependency -> (focused, dependents, dependencies + 1)) + checked + (0, 0, 0) + in Printf.sprintf "Focused: %d, Dependents: %d, Dependencies: %d" focused dependents dependencies diff --git a/src/server/env/checkedSet.mli b/src/server/env/checkedSet.mli index 83e25551d2c..bd78dc0ce68 100644 --- a/src/server/env/checkedSet.mli +++ b/src/server/env/checkedSet.mli @@ -1,35 +1,51 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type t -val empty: t -val is_empty: t -> bool -val of_focused_list: File_key.t list -> t -val mem: File_key.t -> t -> bool -val add: +val empty : t + +val is_empty : t -> bool + +val of_focused_list : File_key.t list -> t + +val cardinal : t -> int + +val mem : File_key.t -> t -> bool + +val add : ?focused:Utils_js.FilenameSet.t -> ?dependents:Utils_js.FilenameSet.t -> ?dependencies:Utils_js.FilenameSet.t -> t -> t -val remove: Utils_js.FilenameSet.t -> t -> t -val fold: ('a -> File_key.t -> 'a) -> 'a -> t -> 'a +val remove : Utils_js.FilenameSet.t -> t -> t + +val fold : ('a -> File_key.t -> 'a) -> 'a -> t -> 'a + +val union : t -> t -> t + +val diff : t -> t -> t + +val filter : f:(File_key.t -> bool) -> t -> t + +val all : t -> Utils_js.FilenameSet.t + +val focused : t -> Utils_js.FilenameSet.t + +val dependents : t -> Utils_js.FilenameSet.t -val union: t -> t -> t -val diff: t -> t -> t +val dependencies : t -> Utils_js.FilenameSet.t -val filter: f:(File_key.t -> bool) -> t -> t +(* This is O(n) in the size of the checked set. Because checked sets are typically very large, this +* operation should be avoided in production code. *) +val debug_equal : t -> t -> bool -val all: t -> Utils_js.FilenameSet.t -val focused: t -> Utils_js.FilenameSet.t -val dependents: t -> Utils_js.FilenameSet.t -val dependencies: t -> Utils_js.FilenameSet.t +val debug_to_string : ?limit:int -> t -> string -val debug_to_string: ?limit:int -> t -> string -val debug_counts_to_string: t -> string +val debug_counts_to_string : t -> string diff --git a/src/server/env/dependency_info.ml b/src/server/env/dependency_info.ml new file mode 100644 index 00000000000..105c285e8b4 --- /dev/null +++ b/src/server/env/dependency_info.ml @@ -0,0 +1,20 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Utils_js + +type t = + | Classic of FilenameSet.t FilenameMap.t + | TypesFirst of (FilenameSet.t * FilenameSet.t) FilenameMap.t + +let all_dependency_graph = function + | Classic map -> map + | TypesFirst map -> FilenameMap.map (fun (_sig_files, all_files) -> all_files) map + +let dependency_graph = function + | Classic map -> map + | TypesFirst map -> FilenameMap.map (fun (sig_files, _all_files) -> sig_files) map diff --git a/src/server/env/dune b/src/server/env/dune new file mode 100644 index 00000000000..e06f704488c --- /dev/null +++ b/src/server/env/dune @@ -0,0 +1,11 @@ +(library + (name flow_server_env) + (wrapped false) + (libraries + flow_common + flow_parser + flow_procs + flow_server_persistent_connection + flow_typing + ) +) diff --git a/src/server/env/serverEnv.ml b/src/server/env/serverEnv.ml index ed17851467a..f7f1ad22dd1 100644 --- a/src/server/env/serverEnv.ml +++ b/src/server/env/serverEnv.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,9 +10,9 @@ (*****************************************************************************) type genv = { - options : Options.t; - workers : MultiWorkerLwt.worker list option; - } + options: Options.t; + workers: MultiWorkerLwt.worker list option; +} (*****************************************************************************) (* The environment constantly maintained by the server *) @@ -21,34 +21,38 @@ type genv = { type errors = { (* errors are stored in a map from file path to error set, so that the errors from checking particular files can be cleared during recheck. *) - local_errors: Errors.ErrorSet.t Utils_js.FilenameMap.t; + local_errors: Flow_error.ErrorSet.t Utils_js.FilenameMap.t; (* errors encountered during merge have to be stored separately so dependencies can be cleared during merge. *) - merge_errors: Errors.ErrorSet.t Utils_js.FilenameMap.t; + merge_errors: Flow_error.ErrorSet.t Utils_js.FilenameMap.t; + (* warnings are stored in a map from file path to error set, so that the warnings + from checking particular files can be cleared during recheck. *) + warnings: Flow_error.ErrorSet.t Utils_js.FilenameMap.t; (* error suppressions in the code *) suppressions: Error_suppressions.t; - (* lint severity settings in the code *) - severity_cover_set: ExactCover.lint_severity_cover Utils_js.FilenameMap.t; } type collated_errors = { - collated_errorset: Errors.ErrorSet.t; - collated_warning_map: Errors.ErrorSet.t Utils_js.FilenameMap.t; - collated_suppressed_errors: (Errors.error * Utils_js.LocSet.t) list; + collated_errorset: Errors.ConcreteLocPrintableErrorSet.t; + collated_warning_map: Errors.ConcreteLocPrintableErrorSet.t Utils_js.FilenameMap.t; + collated_suppressed_errors: (Loc.t Errors.printable_error * Loc_collections.LocSet.t) list; } type env = { - (* All the files that we at least parse. *) - files: Utils_js.FilenameSet.t; - (* All the files that we at least parse. *) - dependency_graph: Utils_js.FilenameSet.t Utils_js.FilenameMap.t; - (* All the current files we typecheck. *) - checked_files: CheckedSet.t; - ordered_libs: string list; (* The lib files, in their merge order *) - libs: SSet.t; (* a subset of `files` *) - (* The files which didn't parse (skipped or errored) *) - unparsed: Utils_js.FilenameSet.t; - errors: errors; - collated_errors: collated_errors option ref; - connections: Persistent_connection.t; + (* All the files that we at least parse. *) + files: Utils_js.FilenameSet.t; + (* All the files that we at least parse. *) + dependency_info: Dependency_info.t; + (* All the current files we typecheck. *) + checked_files: CheckedSet.t; + ordered_libs: string list; + (* The lib files, in their merge order *) + libs: SSet.t; + (* a subset of `files` *) + (* The files which didn't parse (skipped or errored) *) + unparsed: Utils_js.FilenameSet.t; + errors: errors; + coverage: Coverage_response.file_coverage Utils_js.FilenameMap.t; + collated_errors: collated_errors option ref; + connections: Persistent_connection.t; } diff --git a/src/server/error_collator/dune b/src/server/error_collator/dune new file mode 100644 index 00000000000..a40ebf6b44c --- /dev/null +++ b/src/server/error_collator/dune @@ -0,0 +1,10 @@ +(library + (name flow_server_error_collator) + (wrapped false) + (libraries + flow_common + flow_server_env + flow_state_heaps_parsing + flow_state_readers + ) +) diff --git a/src/server/error_collator/errorCollator.ml b/src/server/error_collator/errorCollator.ml index 02a309946af..1031cb4faac 100644 --- a/src/server/error_collator/errorCollator.ml +++ b/src/server/error_collator/errorCollator.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -20,79 +20,134 @@ open Utils_js * then we can probably relax this. * 3. Throw away the collated errors when lazy mode's typecheck_contents adds more dependents or * dependencies to the checked set - **) -let regenerate = - let open Errors in - let open Error_suppressions in - let add_unused_suppression_warnings checked unused warnings = - (* For each unused suppression, create an warning *) - Error_suppressions.all_locs unused - |> List.fold_left - (fun warnings loc -> - let source_file = match Loc.source loc with Some x -> x | None -> File_key.SourceFile "-" in - (* In lazy mode, dependencies are modules which we typecheck not because we care about - * them, but because something important (a focused file or a focused file's dependent) - * needs these dependencies. Therefore, we might not typecheck a dependencies' dependents. - * - * This means there might be an unused suppression comment warning in a dependency which - * only shows up in lazy mode. To avoid this, we'll just avoid raising this kind of - * warning in any dependency.*) - if not (CheckedSet.dependencies checked |> FilenameSet.mem source_file) - then begin - let err = - let msg = Flow_error.EUnusedSuppression loc in - Flow_error.error_of_msg ~trace_reasons:[] ~source_file msg in - let file_warnings = FilenameMap.get source_file warnings - |> Option.value ~default:ErrorSet.empty - |> ErrorSet.add err in - FilenameMap.add source_file file_warnings warnings - end else + * *) +let regenerate ~reader = + Errors.( + Error_suppressions.( + let lazy_table_of_aloc = Parsing_heaps.Reader.get_sig_ast_aloc_table_unsafe_lazy ~reader in + let add_unused_suppression_warnings checked unused warnings = + (* For each unused suppression, create an warning *) + let deps = CheckedSet.dependencies checked in + let all_locs = Error_suppressions.all_locs unused in + Loc_collections.LocSet.fold + (fun loc warnings -> + let source_file = + match Loc.source loc with + | Some x -> x + | None -> File_key.SourceFile "-" + in + (* In lazy mode, dependencies are modules which we typecheck not because we care about + * them, but because something important (a focused file or a focused file's dependent) + * needs these dependencies. Therefore, we might not typecheck a dependencies' dependents. + * + * This means there might be an unused suppression comment warning in a dependency which + * only shows up in lazy mode. To avoid this, we'll just avoid raising this kind of + * warning in any dependency.*) + if not (FilenameSet.mem source_file deps) then + let err = + let msg = Error_message.EUnusedSuppression (ALoc.of_loc loc) in + Flow_error.error_of_msg ~trace_reasons:[] ~source_file msg + |> Flow_error.concretize_error lazy_table_of_aloc + |> Flow_error.make_error_printable lazy_table_of_aloc + in + let file_warnings = + FilenameMap.get source_file warnings + |> Option.value ~default:ConcreteLocPrintableErrorSet.empty + |> ConcreteLocPrintableErrorSet.add err + in + FilenameMap.add source_file file_warnings warnings + else + warnings) + all_locs warnings - ) - warnings - in - let acc_fun suppressions severity_cover filename file_errs - (errors, warnings, suppressed, unused) = - let file_errs, file_warns, file_suppressed, unused = - filter_suppressed_errors suppressions severity_cover file_errs ~unused in - let errors = ErrorSet.union file_errs errors in - let warnings = FilenameMap.add filename file_warns warnings in - let suppressed = List.rev_append file_suppressed suppressed in - (errors, warnings, suppressed, unused) - in - fun env -> - MonitorRPC.status_update ~event:ServerStatus.Collating_errors_start; - let { - ServerEnv.local_errors; merge_errors; suppressions; severity_cover_set; - } = env.ServerEnv.errors in + in + let acc_fun + (type a) + ~options + suppressions + (f : File_key.t -> ConcreteLocPrintableErrorSet.t -> a -> a) + filename + file_errs + (errors, suppressed, unused) = + let root = Options.root options in + let file_options = Some (Options.file_options options) in + let (file_errs, file_suppressed, unused) = + Flow_error.make_errors_printable lazy_table_of_aloc file_errs + |> filter_suppressed_errors ~root ~file_options suppressions ~unused + in + let errors = f filename file_errs errors in + let suppressed = List.rev_append file_suppressed suppressed in + (errors, suppressed, unused) + in + fun ~options env -> + MonitorRPC.status_update ~event:ServerStatus.Collating_errors_start; + let { ServerEnv.local_errors; merge_errors; warnings; suppressions } = + env.ServerEnv.errors + in + (* NOTE Here we ensure that signature-verification errors correspond to the + * currently checked files. We need to do this filtering, since errors outside + * the checked set are not suppressed correctly and so some of these errors + * might linger post-error-suppression. *) + let checked_files = env.ServerEnv.checked_files in + let local_errors = + FilenameMap.mapi + (fun file errorset -> + if CheckedSet.mem file checked_files then + errorset + else + Flow_error.ErrorSet.filter + (fun error -> + match Flow_error.kind_of_error error with + | Errors.LintError Lints.SignatureVerificationFailure + | Errors.InferWarning Errors.ExportKind -> + false + | _ -> true) + errorset) + local_errors + in + let acc_err_fun = + acc_fun ~options suppressions (fun _ -> ConcreteLocPrintableErrorSet.union) + in + let (collated_errorset, collated_suppressed_errors, unused) = + (ConcreteLocPrintableErrorSet.empty, [], suppressions) + |> FilenameMap.fold acc_err_fun local_errors + |> FilenameMap.fold acc_err_fun merge_errors + in + let acc_warn_fun = acc_fun ~options suppressions FilenameMap.add in + let (warnings, collated_suppressed_errors, unused) = + (FilenameMap.empty, collated_suppressed_errors, unused) + |> FilenameMap.fold acc_warn_fun warnings + in + let collated_warning_map = + add_unused_suppression_warnings env.ServerEnv.checked_files unused warnings + in + { collated_errorset; collated_warning_map; collated_suppressed_errors })) - let acc_fun = acc_fun suppressions severity_cover_set in - let collated_errorset, warnings, collated_suppressed_errors, unused = - (ErrorSet.empty, FilenameMap.empty, [], suppressions) - |> FilenameMap.fold acc_fun local_errors - |> FilenameMap.fold acc_fun merge_errors +let get_with_separate_warnings ~reader ~options env = + ServerEnv.( + let collated_errors = + match !(env.collated_errors) with + | None -> + let collated_errors = regenerate ~reader ~options env in + env.collated_errors := Some collated_errors; + collated_errors + | Some collated_errors -> collated_errors in - - let collated_warning_map = - add_unused_suppression_warnings env.ServerEnv.checked_files unused warnings in - { collated_errorset; collated_warning_map; collated_suppressed_errors } - -let get_with_separate_warnings env = - let open ServerEnv in - let collated_errors = match !(env.collated_errors) with - | None -> - let collated_errors = regenerate env in - env.collated_errors := Some collated_errors; - collated_errors - | Some collated_errors -> - collated_errors - in - let { collated_errorset; collated_warning_map; collated_suppressed_errors } = collated_errors in - (collated_errorset, collated_warning_map, collated_suppressed_errors) + let { collated_errorset; collated_warning_map; collated_suppressed_errors } = + collated_errors + in + (collated_errorset, collated_warning_map, collated_suppressed_errors)) (* combine error maps into a single error set and a single warning set *) -let get env = - let open Errors in - let errors, warning_map, suppressed_errors = get_with_separate_warnings env in - let warnings = FilenameMap.fold (fun _key -> ErrorSet.union) warning_map ErrorSet.empty in - (errors, warnings, suppressed_errors) +let get ~reader ~options env = + Errors.( + let (errors, warning_map, suppressed_errors) = + get_with_separate_warnings ~reader ~options env + in + let warnings = + FilenameMap.fold + (fun _key -> ConcreteLocPrintableErrorSet.union) + warning_map + ConcreteLocPrintableErrorSet.empty + in + (errors, warnings, suppressed_errors)) diff --git a/src/server/error_collator/errorCollator.mli b/src/server/error_collator/errorCollator.mli index 6e639b3b949..a8121f1da42 100644 --- a/src/server/error_collator/errorCollator.mli +++ b/src/server/error_collator/errorCollator.mli @@ -1,16 +1,22 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val regenerate: ServerEnv.env -> ServerEnv.collated_errors - -val get_with_separate_warnings: +val get_with_separate_warnings : + reader:State_reader.t -> + options:Options.t -> ServerEnv.env -> - Errors.ErrorSet.t * Errors.ErrorSet.t Utils_js.FilenameMap.t * (Errors.error * Utils_js.LocSet.t) list + Errors.ConcreteLocPrintableErrorSet.t + * Errors.ConcreteLocPrintableErrorSet.t Utils_js.FilenameMap.t + * (Loc.t Errors.printable_error * Loc_collections.LocSet.t) list -val get: +val get : + reader:State_reader.t -> + options:Options.t -> ServerEnv.env -> - Errors.ErrorSet.t * Errors.ErrorSet.t * (Errors.error * Utils_js.LocSet.t) list + Errors.ConcreteLocPrintableErrorSet.t + * Errors.ConcreteLocPrintableErrorSet.t + * (Loc.t Errors.printable_error * Loc_collections.LocSet.t) list diff --git a/src/server/find_refs/__tests__/find_refs_tests.ml b/src/server/find_refs/__tests__/find_refs_tests.ml new file mode 100644 index 00000000000..019d4b85f50 --- /dev/null +++ b/src/server/find_refs/__tests__/find_refs_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "utils" >::: [PropertyAccessSearcher_test.tests] + +let () = run_test_tt_main tests diff --git a/src/server/find_refs/__tests__/propertyAccessSearcher_test.ml b/src/server/find_refs/__tests__/propertyAccessSearcher_test.ml index 747492c9965..c60be6e2a48 100644 --- a/src/server/find_refs/__tests__/propertyAccessSearcher_test.ml +++ b/src/server/find_refs/__tests__/propertyAccessSearcher_test.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,58 +7,53 @@ open OUnit2 -let run ctxt expected name text = - let (ast, _, _) = - FindRefsUtils.compute_ast_result (File_key.SourceFile "/dummy.js") text - |> Core_result.ok_or_failwith - in - let result = PropertyAccessSearcher.search name ast in - assert_equal ~ctxt expected result +let run ctxt expected name content = + let file = File_key.SourceFile "/dummy.js" in + let info = FindRefsUtils.compute_docblock file content in + Parsing_service_js.( + let parse_options = + { + parse_fail = false; + parse_types_mode = TypesAllowed; + parse_use_strict = true; + parse_prevent_munge = false; + parse_module_ref_prefix = None; + parse_facebook_fbt = None; + parse_arch = Options.Classic; + parse_abstract_locations = false; + } + in + let result = Parsing_service_js.do_parse ~parse_options ~info content file in + let ast = + match result with + | Parsing_service_js.Parse_ok parse_ok -> + let (ast, _) = Parsing_service_js.basic parse_ok in + ast + | Parsing_service_js.Parse_fail _ -> failwith "Parse unexpectedly failed" + | Parsing_service_js.Parse_skip _ -> failwith "Parse unexpectedly skipped" + in + let result = PropertyAccessSearcher.search name ast in + assert_equal ~ctxt expected result) -let tests = "SymbolKind" >::: [ - "property_access_positive" >:: begin fun ctxt -> - run ctxt true "bar" "foo.bar" - end; - "property_access_negative" >:: begin fun ctxt -> - run ctxt false "bar" "foo.baz" - end; - "destructuring_shorthand_positive" >:: begin fun ctxt -> - run ctxt true "bar" "const {bar} = baz" - end; - "destructuring_shorthand_negative" >:: begin fun ctxt -> - run ctxt false "baz" "const {bar} = baz" - end; - "destructuring_positive" >:: begin fun ctxt -> - run ctxt true "foo" "const {foo: bar} = baz" - end; - "destructuring_negative" >:: begin fun ctxt -> - run ctxt false "bar" "const {foo: bar} = baz" - end; - "destructuring_negative" >:: begin fun ctxt -> - run ctxt false "bar" "const {foo: bar} = baz" - end; - "export_default_positive" >:: begin fun ctxt -> - run ctxt true "default" "export default 5" - end; - "export_default_negative" >:: begin fun ctxt -> - run ctxt false "bar" "export default bar" - end; - "import_default_positive" >:: begin fun ctxt -> - run ctxt true "default" "import bar from 'baz'" - end; - "import_default_negative" >:: begin fun ctxt -> - run ctxt false "bar" "import bar from 'baz'" - end; - "class_method" >:: begin fun ctxt -> - run ctxt true "bar" "class Foo { bar(): void {} }" - end; - "class_property" >:: begin fun ctxt -> - run ctxt true "bar" "class Foo { bar: number }" - end; - "optional_chain_new" >:: begin fun ctxt -> - run ctxt true "bar" "foo?.bar" - end; - "optional_chain_continued" >:: begin fun ctxt -> - run ctxt true "baz" "foo?.bar.baz" - end; -]; +let tests = + "SymbolKind" + >::: [ + ("property_access_positive" >:: (fun ctxt -> run ctxt true "bar" "foo.bar")); + ("property_access_negative" >:: (fun ctxt -> run ctxt false "bar" "foo.baz")); + ( "destructuring_shorthand_positive" + >:: (fun ctxt -> run ctxt true "bar" "const {bar} = baz") ); + ( "destructuring_shorthand_negative" + >:: (fun ctxt -> run ctxt false "baz" "const {bar} = baz") ); + ("destructuring_positive" >:: (fun ctxt -> run ctxt true "foo" "const {foo: bar} = baz")); + ("destructuring_negative" >:: (fun ctxt -> run ctxt false "bar" "const {foo: bar} = baz")); + ("destructuring_negative" >:: (fun ctxt -> run ctxt false "bar" "const {foo: bar} = baz")); + ("export_default_positive" >:: (fun ctxt -> run ctxt true "default" "export default 5")); + ("export_default_negative" >:: (fun ctxt -> run ctxt false "bar" "export default bar")); + ( "import_default_positive" + >:: (fun ctxt -> run ctxt true "default" "import bar from 'baz'") ); + ("import_default_negative" >:: (fun ctxt -> run ctxt false "bar" "import bar from 'baz'")); + ("class_method" >:: (fun ctxt -> run ctxt true "bar" "class Foo { bar(): void {} }")); + ("class_property" >:: (fun ctxt -> run ctxt true "bar" "class Foo { bar: number }")); + ("optional_chain_new" >:: (fun ctxt -> run ctxt true "bar" "foo?.bar")); + ("optional_chain_continued" >:: (fun ctxt -> run ctxt true "baz" "foo?.bar.baz")); + ] diff --git a/src/server/find_refs/__tests__/test.ml b/src/server/find_refs/__tests__/test.ml deleted file mode 100644 index f92887f76af..00000000000 --- a/src/server/find_refs/__tests__/test.ml +++ /dev/null @@ -1,14 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open OUnit2 - -let tests = "utils" >::: [ - PropertyAccessSearcher_test.tests; -] - -let () = run_test_tt_main tests diff --git a/src/server/find_refs/dune b/src/server/find_refs/dune new file mode 100644 index 00000000000..017e1122338 --- /dev/null +++ b/src/server/find_refs/dune @@ -0,0 +1,12 @@ +(library + (name flow_server_find_refs) + (wrapped false) + (libraries + flow_parser + flow_server_env + flow_server_lazy_mode_utils + flow_service_get_def + flow_state_readers + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/server/find_refs/findRefsTypes.ml b/src/server/find_refs/findRefsTypes.ml index 96e10c3ceed..7138f9d626a 100644 --- a/src/server/find_refs/findRefsTypes.ml +++ b/src/server/find_refs/findRefsTypes.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -18,6 +18,9 @@ let string_of_ref_kind = function | Other -> "Other" type single_ref = ref_kind * Loc.t -type find_refs_found = (string * single_ref list) + +type find_refs_found = string * single_ref list + type find_refs_ok = find_refs_found option + type find_refs_result = (find_refs_ok, string) result diff --git a/src/server/find_refs/findRefsUtils.ml b/src/server/find_refs/findRefsUtils.ml deleted file mode 100644 index 97ef354f271..00000000000 --- a/src/server/find_refs/findRefsUtils.ml +++ /dev/null @@ -1,67 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open Utils_js - -module Result = Core_result -let (>>=) = Result.(>>=) - -let compute_docblock file content = - let open Parsing_service_js in - let max_tokens = docblock_max_tokens in - let _errors, docblock = parse_docblock ~max_tokens file content in - docblock - -(* We use compute_ast_result (as opposed to get_ast_result) when the file contents we have might be - * different from what's on disk (and what is therefore stored in shared memory). This can be the - * case for local find-refs requests, where the client may pipe in file contents rather than just - * specifying a filename. For global find-refs, we assume that all dependent files are the same as - * what's on disk, so we can grab the AST from the heap instead. *) -let compute_ast_result file content = - let docblock = compute_docblock file content in - let open Parsing_service_js in - let types_mode = TypesAllowed in - let use_strict = true in - let result = do_parse ~fail:false ~types_mode ~use_strict ~info:docblock content file in - match result with - | Parse_ok (ast, file_sig) -> Ok (ast, file_sig, docblock) - (* The parse should not fail; we have passed ~fail:false *) - | Parse_fail _ -> Error "Parse unexpectedly failed" - | Parse_skip _ -> Error "Parse unexpectedly skipped" - -let get_ast_result file : ((Loc.t, Loc.t) Flow_ast.program * File_sig.t * Docblock.t, string) result = - let open Parsing_heaps in - let get_result f kind = - let error = - Printf.sprintf "Expected %s to be available for %s" - kind - (File_key.to_string file) - in - Result.of_option ~error (f file) - in - let ast_result = get_result get_ast "AST" in - let file_sig_result = get_result get_file_sig "file sig" in - let docblock_result = get_result get_docblock "docblock" in - ast_result >>= fun ast -> - file_sig_result >>= fun file_sig -> - docblock_result >>= fun docblock -> - Ok (ast, file_sig, docblock) - -let get_dependents options workers env file_key content = - let docblock = compute_docblock file_key content in - let modulename = Module_js.exported_module options file_key docblock in - Dep_service.dependent_files - workers - (* Surprisingly, creating this set doesn't seem to cause horrible performance but it's - probably worth looking at if you are searching for optimizations *) - ~unchanged:ServerEnv.(CheckedSet.all !env.checked_files) - ~new_or_changed:(FilenameSet.singleton file_key) - ~changed_modules:(Modulename.Set.singleton modulename) - -let lazy_mode_focus genv env path = - let%lwt env, _ = Lazy_mode_utils.focus_and_check genv env (Nel.one path) in - Lwt.return env diff --git a/src/server/find_refs/findRefsUtils.mli b/src/server/find_refs/findRefsUtils.mli deleted file mode 100644 index 7370a976cf5..00000000000 --- a/src/server/find_refs/findRefsUtils.mli +++ /dev/null @@ -1,32 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -val compute_docblock: File_key.t -> string (* content *) -> Docblock.t - -val compute_ast_result: - File_key.t -> - string (* content *) -> - ((Loc.t, Loc.t) Flow_ast.program * File_sig.t * Docblock.t, string) result - -val get_ast_result: - File_key.t -> - ((Loc.t, Loc.t) Flow_ast.program * File_sig.t * Docblock.t, string) result - -val get_dependents: - Options.t -> - MultiWorkerLwt.worker list option -> - ServerEnv.env ref -> - File_key.t -> - string (* content *) -> - (* transitive dependents, direct dependents *) - (Utils_js.FilenameSet.t * Utils_js.FilenameSet.t) Lwt.t - -val lazy_mode_focus: - ServerEnv.genv -> - ServerEnv.env -> - string (* path *) -> - ServerEnv.env Lwt.t diff --git a/src/server/find_refs/findRefs_js.ml b/src/server/find_refs/findRefs_js.ml index c0e727d6461..2af478e6113 100644 --- a/src/server/find_refs/findRefs_js.ml +++ b/src/server/find_refs/findRefs_js.ml @@ -1,76 +1,112 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -let (>>|) = Core_result.(>>|) +let ( >>= ) = Lwt_result.Infix.( >>= ) + +let ( >>| ) = Core_result.( >>| ) open Utils_js +open Loc_collections let locmap_of_bindings = - List.fold_left begin fun map (loc, x) -> - LocMap.add loc x map - end LocMap.empty + List.fold_left begin + fun map (loc, x) -> LocMap.add loc x map + end LocMap.empty + +(* Extract the loc from each ref, then sort and dedup by loc. This will have to be revisited + if we ever need to report multiple ref kinds for a single location. *) +let sort_and_dedup refs = + refs + |> Core_list.map ~f:(fun ((_, loc) as reference) -> (loc, reference)) + |> locmap_of_bindings + |> LocMap.bindings + |> Core_list.map ~f:snd + +let local_variable_refs ast_info loc = + let (ast, _, _) = ast_info in + match VariableFindRefs.local_find_refs ast loc with + | None -> (None, loc) + | Some (var_refs, local_def_loc) -> (Some var_refs, local_def_loc) + +let global_property_refs ~reader ~genv ~env ~def_info ~multi_hop = + match def_info with + | None -> Lwt.return (Ok None) + | Some def_info -> + let%lwt refs = PropertyFindRefs.find_global_refs ~reader genv env ~multi_hop def_info in + Lwt.return (refs >>| Option.some) -let sort_and_dedup = - Core_result.map ~f:begin - Option.map ~f:begin fun (name, refs) -> - let refs = - (* Extract the loc from each ref, then sort and dedup by loc. This will have to be revisited - * if we ever need to report multiple ref kinds for a single location. *) - refs - |> List.map (fun ((_, loc) as reference) -> (loc, reference)) - |> locmap_of_bindings - |> LocMap.bindings - |> List.map snd - in - name, refs - end - end +let local_property_refs ~reader ~options ~file_key ~ast_info ~def_info = + match def_info with + | None -> Lwt.return (Ok None) + | Some def_info -> + let refs = PropertyFindRefs.find_local_refs ~reader ~options file_key ast_info def_info in + Lwt.return (refs >>| Option.some) + +let find_global_refs ~reader ~genv ~env ~profiling ~file_input ~line ~col ~multi_hop = + let options = genv.ServerEnv.options in + let filename = File_input.filename_of_file_input file_input in + let file_key = File_key.SourceFile filename in + let loc = Loc.make file_key line col in + let%lwt result = + File_input.content_of_file_input file_input + %>>= fun content -> + FindRefsUtils.compute_ast_result options file_key content + %>>= fun ast_info -> + (* Start by running local variable find references *) + let (var_refs, loc) = local_variable_refs ast_info loc in + (* Run get-def on the local loc *) + GetDefUtils.get_def_info ~reader ~options !env profiling file_key ast_info loc + >>= fun def_info -> + (* Then run property find-refs *) + global_property_refs ~reader ~genv ~env ~def_info ~multi_hop + >>= fun prop_refs -> + (* If property find-refs returned nothing (for example if we are importing from an untyped + * module), then fall back on the local refs we computed earlier. *) + Lwt.return + (Ok + (match prop_refs with + | Some _ -> prop_refs + | None -> + begin + match var_refs with + | Some var_refs -> Some (var_refs, None) + | None -> None + end)) + in + let (result, dep_count) = + match result with + | Ok (Some ((name, refs), dep_count)) -> (Ok (Some (name, sort_and_dedup refs)), dep_count) + | Ok None -> (Ok None, None) + | Error err -> (Error err, None) + in + Lwt.return (result, dep_count) -let find_refs ~genv ~env ~profiling ~file_input ~line ~col ~global ~multi_hop = +let find_local_refs ~reader ~options ~env ~profiling ~file_input ~line ~col = let filename = File_input.filename_of_file_input file_input in let file_key = File_key.SourceFile filename in let loc = Loc.make file_key line col in - match File_input.content_of_file_input file_input with - | Error err -> Lwt.return (Error err, None) - | Ok content -> - let%lwt result = - FindRefsUtils.compute_ast_result file_key content %>>= fun (ast, file_sig, _) -> - let property_find_refs start_loc = - PropertyFindRefs.find_refs genv env ~profiling ~content file_key start_loc ~global ~multi_hop - in - (* Start by running local variable find references *) - match VariableFindRefs.local_find_refs ast loc with - (* Got nothing from local variable find-refs, try object property find-refs *) - | None -> property_find_refs loc - | Some ((name, local_refs), local_def_loc) -> - (* We got something from local variable find-refs -- now let's check if it's an exported - * symbol *) - let start_loc = match ImportExportSymbols.find_related_symbol file_sig local_def_loc with - (* It's a local variable but it's not related to an export/import. However, let's try - * property find-refs anyway in case the local is used as an object property shorthand. *) - | None -> loc - | Some related_loc -> related_loc - in - let%lwt refs = property_find_refs start_loc in - refs %>>| fun refs -> - (* If property find-refs returned nothing (for example if we are importing from an untyped - * module), then fall back on the local refs we computed earlier. *) - Lwt.return @@ Some (Option.value ~default:((name, local_refs), None) refs) - in - let json_data = match result with - | Ok (Some (_, Some count)) -> ["deps", Hh_json.JSON_Number (string_of_int count)] - | _ -> [] - in - (* Drop the dependent file count from the result *) - let result = result >>| Option.map ~f:(fun (result, _) -> result) in - let result = sort_and_dedup result in - let json_data = - ("result", Hh_json.JSON_String (match result with Ok _ -> "SUCCESS" | _ -> "FAILURE")) - :: ("global", Hh_json.JSON_Bool global) - :: json_data - in - Lwt.return (result, Some (Hh_json.JSON_Object json_data)) + File_input.content_of_file_input file_input + %>>= fun content -> + FindRefsUtils.compute_ast_result options file_key content + %>>= fun ast_info -> + (* Start by running local variable find references *) + let (var_refs, loc) = local_variable_refs ast_info loc in + (* Run get-def on the local loc *) + GetDefUtils.get_def_info ~reader ~options env profiling file_key ast_info loc + >>= fun def_info -> + (* Then run property find-refs *) + local_property_refs ~reader ~options ~file_key ~ast_info ~def_info + >>= fun prop_refs -> + (* If property find-refs returned nothing (for example if we are importing from an untyped + * module), then fall back on the local refs we computed earlier. *) + let refs = Option.first_some prop_refs var_refs in + let refs = + match refs with + | Some (name, refs) -> Some (name, sort_and_dedup refs) + | None -> None + in + Lwt.return (Ok refs) diff --git a/src/server/find_refs/findRefs_js.mli b/src/server/find_refs/findRefs_js.mli index 6a94be86d1b..1483d09906c 100644 --- a/src/server/find_refs/findRefs_js.mli +++ b/src/server/find_refs/findRefs_js.mli @@ -1,17 +1,27 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val find_refs: - genv: ServerEnv.genv -> - env: ServerEnv.env ref -> - profiling: Profiling_js.running -> - file_input: File_input.t -> - line: int -> - col: int -> - global: bool -> - multi_hop: bool -> - (FindRefsTypes.find_refs_result * Hh_json.json option) Lwt.t +val find_global_refs : + reader:State_reader.t -> + genv:ServerEnv.genv -> + env:ServerEnv.env ref -> + profiling:Profiling_js.running -> + file_input:File_input.t -> + line:int -> + col:int -> + multi_hop:bool -> + (FindRefsTypes.find_refs_result * int option) Lwt.t + +val find_local_refs : + reader:State_reader.t -> + options:Options.t -> + env:ServerEnv.env -> + profiling:Profiling_js.running -> + file_input:File_input.t -> + line:int -> + col:int -> + FindRefsTypes.find_refs_result Lwt.t diff --git a/src/server/find_refs/importExportSymbols.ml b/src/server/find_refs/importExportSymbols.ml deleted file mode 100644 index 68fc11939d2..00000000000 --- a/src/server/find_refs/importExportSymbols.ml +++ /dev/null @@ -1,72 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open File_sig - -let if_one_return_other x a b = - if x = a then Some b - else if x = b then Some a - else None - -let find_related_symbol_from_export loc = function - | ExportDefault {default_loc; local=Some(local, _); _} -> - if_one_return_other loc default_loc local - | ExportNamed {loc=remote_name_loc; kind } -> - begin match kind with - | NamedSpecifier { local = (local_loc, _); _ } -> - if_one_return_other loc remote_name_loc local_loc - | NamedDeclaration -> - if loc = remote_name_loc then Some remote_name_loc else None - end - | _ -> None - -let find_related_symbol_from_module_kind loc = function - | CommonJS _ -> None - | ES {named; _} -> - let exports = SMap.values named in - ListUtils.first_some_map (find_related_symbol_from_export loc) exports - -let find_related_symbol_from_require loc = function - | Import {named; _} -> - let loc_records (* list of {remote_loc, local_loc} *) = - SMap.fold begin fun _ local_name_to_locs acc -> - SMap.fold begin fun _ locs acc -> - List.rev_append (Nel.to_list locs) acc - end local_name_to_locs acc - end named [] - in - loc_records |> ListUtils.first_some_map begin fun {remote_loc; local_loc} -> - if_one_return_other loc remote_loc local_loc - end - | Require {bindings=Some bindings; require_loc; _} -> - begin match bindings with - | BindIdent (id_loc, _) -> if_one_return_other loc require_loc id_loc - | BindNamed named -> - let loc_records (* list of {remote_loc, local_loc} *) = - SMap.fold begin fun _ local_name_to_locs acc -> - SMap.fold begin fun _ locs acc -> - List.rev_append (Nel.to_list locs) acc - end local_name_to_locs acc - end named [] - in - loc_records |> ListUtils.first_some_map begin fun {remote_loc; local_loc} -> - if_one_return_other loc remote_loc local_loc - end - end - | _ -> None - -let find_related_symbol_from_requires loc requires = - ListUtils.first_some_map (find_related_symbol_from_require loc) requires - -let find_related_symbol file_sig loc = - match find_related_symbol_from_module_kind loc file_sig.module_sig.module_kind with - | Some _ as result -> result - | None -> find_related_symbol_from_requires loc file_sig.module_sig.requires - -let find_related_symbols file_sig starting_locs = - List.map (find_related_symbol file_sig) starting_locs - |> ListUtils.cat_maybes diff --git a/src/server/find_refs/propertyAccessSearcher.ml b/src/server/find_refs/propertyAccessSearcher.ml index 15bb62fcdb0..b390bb601ef 100644 --- a/src/server/find_refs/propertyAccessSearcher.ml +++ b/src/server/find_refs/propertyAccessSearcher.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,75 +7,99 @@ module Ast = Flow_ast -class property_access_searcher name = object(this) - inherit [bool] Flow_ast_visitor.visitor ~init:false as super - method! member loc expr = - let open Ast.Expression.Member in - begin match expr.property with - | PropertyIdentifier (_, x) when x = name -> - this#set_acc true - | _ -> () - end; - super#member loc expr - method! object_key (key: (Loc.t, Loc.t) Ast.Expression.Object.Property.key) = - let open Ast.Expression.Object.Property in - begin match key with - | Identifier (_, x) when x = name -> - this#set_acc true - | _ -> () - end; - super#object_key key - method! pattern_object_property ?kind (prop: (Loc.t, Loc.t) Ast.Pattern.Object.Property.t') = - let open Ast.Pattern.Object.Property in - let { key; _ } = prop in - begin match key with - | Identifier (_, x) when x = name -> - this#set_acc true - | _ -> () - end; - super#pattern_object_property ?kind prop - method! export_default_declaration loc (decl: (Loc.t, Loc.t) Ast.Statement.ExportDefaultDeclaration.t) = - if name = "default" then begin - this#set_acc true - end; - super#export_default_declaration loc decl - method! export_named_declaration loc (decl: (Loc.t, Loc.t) Ast.Statement.ExportNamedDeclaration.t) = - let open Ast.Statement.ExportNamedDeclaration in - let { declaration; _ } = decl in - let open Ast.Statement in - begin match declaration with - | Some (_, FunctionDeclaration { Ast.Function.id = Some (_, exported_name); _ }) - | Some (_, ClassDeclaration { Ast.Class.id = Some (_, exported_name); _ }) -> - if exported_name = name then - this#set_acc true - | Some (_, VariableDeclaration { VariableDeclaration.declarations = decls; _ }) -> - Ast_utils.bindings_of_variable_declarations decls - |> List.iter (fun (_, exported_name) -> if exported_name = name then this#set_acc true) - | _ -> () - (* TODO add type exports when find-refs supports them *) - end; - (* TODO specifiers *) - super#export_named_declaration loc decl - method! import_declaration loc (decl: (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.t) = - let open Ast.Statement.ImportDeclaration in - let { default; specifiers; _ } = decl in - if Option.is_some default && name = "default" then begin - this#set_acc true - end; - let handle_specifier = function - (* `import * as ...` - * No action needed since any references to actual exports will appear later as normal - * property references. *) - | ImportNamespaceSpecifier _ -> () - | ImportNamedSpecifiers named_specifiers -> - named_specifiers |> List.iter begin fun {remote=(_, remote_name); _} -> - if remote_name = name then +class property_access_searcher name = + object (this) + inherit [bool, Loc.t] Flow_ast_visitor.visitor ~init:false as super + + method! member loc expr = + Ast.Expression.Member.( + begin + match expr.property with + | PropertyIdentifier (_, { Ast.Identifier.name = id; comments = _ }) when id = name -> + this#set_acc true + | _ -> () + end; + super#member loc expr) + + method! object_key (key : (Loc.t, Loc.t) Ast.Expression.Object.Property.key) = + Ast.Expression.Object.Property.( + begin + match key with + | Identifier (_, { Ast.Identifier.name = id; comments = _ }) when id = name -> + this#set_acc true + | _ -> () + end; + super#object_key key) + + method! pattern_object_property ?kind (prop : (Loc.t, Loc.t) Ast.Pattern.Object.Property.t') = + Ast.Pattern.Object.Property.( + let { key; _ } = prop in + begin + match key with + | Identifier (_, { Ast.Identifier.name = id; comments = _ }) when id = name -> this#set_acc true - end - in - Option.iter specifiers ~f:handle_specifier; - super#import_declaration loc decl -end + | _ -> () + end; + super#pattern_object_property ?kind prop) + + method! export_default_declaration + loc (decl : (Loc.t, Loc.t) Ast.Statement.ExportDefaultDeclaration.t) = + if name = "default" then this#set_acc true; + super#export_default_declaration loc decl + + method! export_named_declaration + loc (decl : (Loc.t, Loc.t) Ast.Statement.ExportNamedDeclaration.t) = + Ast.Statement.ExportNamedDeclaration.( + let { declaration; _ } = decl in + Ast.Statement.( + begin + match declaration with + | Some + ( _, + FunctionDeclaration + { + Ast.Function.id = + Some (_, { Ast.Identifier.name = exported_name; comments = _ }); + _; + } ) + | Some + ( _, + ClassDeclaration + { + Ast.Class.id = Some (_, { Ast.Identifier.name = exported_name; comments = _ }); + _; + } ) -> + if exported_name = name then this#set_acc true + | Some (_, VariableDeclaration { VariableDeclaration.declarations = decls; _ }) -> + Flow_ast_utils.fold_bindings_of_variable_declarations + (fun () (_, { Ast.Identifier.name = exported_name; comments = _ }) -> + if exported_name = name then this#set_acc true) + () + decls + | _ -> () (* TODO add type exports when find-refs supports them *) + end; + + (* TODO specifiers *) + super#export_named_declaration loc decl)) + + method! import_declaration loc (decl : (Loc.t, Loc.t) Ast.Statement.ImportDeclaration.t) = + Ast.Statement.ImportDeclaration.( + let { default; specifiers; _ } = decl in + if Option.is_some default && name = "default" then this#set_acc true; + let handle_specifier = function + (* `import * as ...` + * No action needed since any references to actual exports will appear later as normal + * property references. *) + | ImportNamespaceSpecifier _ -> () + | ImportNamedSpecifiers named_specifiers -> + named_specifiers + |> List.iter + (fun { remote = (_, { Ast.Identifier.name = remote_name; comments = _ }); _ } -> + if remote_name = name then this#set_acc true) + in + Option.iter specifiers ~f:handle_specifier; + super#import_declaration loc decl) + end (* Returns true iff the given AST contains an access to a property with the given name *) let search name ast = diff --git a/src/server/find_refs/propertyAccessSearcher.mli b/src/server/find_refs/propertyAccessSearcher.mli index 14a4ef586ca..366b777e545 100644 --- a/src/server/find_refs/propertyAccessSearcher.mli +++ b/src/server/find_refs/propertyAccessSearcher.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/server/find_refs/propertyFindRefs.ml b/src/server/find_refs/propertyFindRefs.ml index 1f122afb01b..7a1fd9e2326 100644 --- a/src/server/find_refs/propertyFindRefs.ml +++ b/src/server/find_refs/propertyFindRefs.ml @@ -1,202 +1,54 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - +module File_sig = File_sig.With_Loc open Utils_js +open Parsing_heaps_utils +open Loc_collections open ServerEnv - -module Result = Core_result -let (>>=) = Result.(>>=) -let (>>|) = Result.(>>|) - open FindRefsUtils +open GetDefUtils -let add_ref_kind kind = List.map (fun loc -> (kind, loc)) - -(* The default visitor does not provide all of the context we need when visiting an object key. In - * particular, we need the location of the enclosing object literal. *) -class ['acc] object_key_visitor ~init = object(this) - inherit ['acc] Flow_ast_visitor.visitor ~init as super - - method! expression (exp: (Loc.t, Loc.t) Ast.Expression.t) = - let open Ast.Expression in - begin match exp with - | loc, Object x -> - this#visit_object_literal loc x - | _ -> () - end; - super#expression exp - - method private visit_object_literal (loc: Loc.t) (obj: (Loc.t, Loc.t) Ast.Expression.Object.t) = - let open Ast.Expression.Object in - let get_prop_key = - let open Property in - function Init { key; _ } | Method { key; _ } | Get { key; _ } | Set { key; _ } -> key - in - let { properties } = obj in - properties - |> List.iter begin function - | SpreadProperty _ -> () - | Property (_, prop) -> prop |> get_prop_key |> this#visit_object_key loc - end - - method private visit_object_key - (_literal_loc: Loc.t) - (_key: (Loc.t, Loc.t) Ast.Expression.Object.Property.key) = - () -end - -module ObjectKeyAtLoc : sig - (* Given a location, returns Some (enclosing_literal_loc, prop_loc, name) if the given location - * points to an object literal key. The first location returned is the location for the entire - * enclosing object literal. This is because later, we need to figure out which types are related - * to this object literal which is easier to do when we have the location of the actual object - * literal than if we only had the location of a single key. *) - val get: (Loc.t, Loc.t) Ast.program -> Loc.t -> (Loc.t * Loc.t * string) option -end = struct - class object_key_finder target_loc = object(this) - inherit [(Loc.t * Loc.t * string) option] object_key_visitor ~init:None - method! private visit_object_key - (literal_loc: Loc.t) - (key: (Loc.t, Loc.t) Ast.Expression.Object.Property.key) = - let open Ast.Expression.Object in - match key with - | Property.Identifier (prop_loc, name) when Loc.contains prop_loc target_loc -> - this#set_acc (Some (literal_loc, prop_loc, name)) - | _ -> () - end - - let get ast target_loc = - let finder = new object_key_finder target_loc in - finder#eval finder#program ast -end +let add_ref_kind kind = Core_list.map ~f:(fun loc -> (kind, loc)) module LiteralToPropLoc : sig (* Returns a map from object_literal_loc to prop_loc, for all object literals which contain the * given property name. *) - val make: (Loc.t, Loc.t) Ast.program -> prop_name: string -> Loc.t LocMap.t + val make : (Loc.t, Loc.t) Ast.program -> prop_name:string -> Loc.t LocMap.t end = struct - class locmap_builder prop_name = object(this) - inherit [Loc.t LocMap.t] object_key_visitor ~init:LocMap.empty - method! private visit_object_key - (literal_loc: Loc.t) - (key: (Loc.t, Loc.t) Ast.Expression.Object.Property.key) = - let open Ast.Expression.Object in - match key with - | Property.Identifier (prop_loc, name) when name = prop_name -> - this#update_acc (fun map -> LocMap.add literal_loc prop_loc map) - (* TODO consider supporting other property keys (e.g. literals). Also update the - * optimization in property_access_searcher below when this happens. *) - | _ -> () - end + class locmap_builder prop_name = + object (this) + inherit [Loc.t LocMap.t] object_key_visitor ~init:LocMap.empty + + method! private visit_object_key + (literal_loc : Loc.t) (key : (Loc.t, Loc.t) Ast.Expression.Object.Property.key) = + Ast.Expression.Object.( + match key with + | Property.Identifier (prop_loc, { Ast.Identifier.name; comments = _ }) + when name = prop_name -> + this#update_acc (fun map -> LocMap.add literal_loc prop_loc map) + (* TODO consider supporting other property keys (e.g. literals). Also update the + * optimization in property_access_searcher below when this happens. *) + | _ -> ()) + end let make ast ~prop_name = let builder = new locmap_builder prop_name in builder#eval builder#program ast end -(* If the given type refers to an object literal, return the location of the object literal. - * Otherwise return None *) -let get_object_literal_loc ty : Loc.t option = - let open Type in - let open Reason in - let reason_desc = - reason_of_t ty - (* TODO look into unwrap *) - |> desc_of_reason ~unwrap:false - in - match reason_desc with - | RObjectLit -> Some (Type.def_loc_of_t ty) - | _ -> None - -type def_kind = - (* Use of a property, e.g. `foo.bar`. Includes type of receiver (`foo`) and name of the property - * `bar` *) - | Use of Type.t * string - (* In a class, where a property/method is defined. Includes the type of the class and the name - of the property. *) - | Class_def of Type.t * string (* name *) * bool (* static *) - (* In an object type. Includes the location of the property definition and its name. *) - | Obj_def of Loc.t * string (* name *) - (* List of types that the object literal flows into directly, as well as the name of the - * property. *) - | Use_in_literal of Type.t Nel.t * string (* name *) - -let set_def_loc_hook prop_access_info literal_key_info target_loc = - let set_prop_access_info new_info = - let set_ok info = prop_access_info := Ok (Some info) in - let set_err err = prop_access_info := Error err in - match !prop_access_info with - | Error _ -> () - | Ok None -> prop_access_info := Ok (Some new_info) - | Ok (Some info) -> begin match info, new_info with - | Use _, Use _ - | Class_def _, Class_def _ - | Obj_def _, Obj_def _ -> - (* Due to generate_tests, we sometimes see hooks firing multiple times for the same - * location. This is innocuous and we should take the last result. *) - set_ok new_info - (* Literals can flow into multiple types. Include them all. *) - | Use_in_literal (types, name), Use_in_literal (new_types, new_name) -> - if name = new_name then - set_ok (Use_in_literal (Nel.rev_append new_types types, name)) - else - set_err "Names did not match" - (* We should not see mismatches. *) - | Use _, _ | Class_def _, _ | Obj_def _, _ | Use_in_literal _, _ -> - set_err "Unexpected mismatch between definition kind" - end - in - let use_hook ret _ctxt name loc ty = - begin if Loc.contains loc target_loc then - set_prop_access_info (Use (ty, name)) - end; - ret - in - let class_def_hook _ctxt ty static name loc = - if Loc.contains loc target_loc then - set_prop_access_info (Class_def (ty, name, static)) - in - let obj_def_hook _ctxt name loc = - if Loc.contains loc target_loc then - set_prop_access_info (Obj_def (loc, name)) - in - let export_named_hook name loc = - if Loc.contains loc target_loc then - set_prop_access_info (Obj_def (loc, name)) - in - let obj_to_obj_hook _ctxt obj1 obj2 = - match get_object_literal_loc obj1, literal_key_info with - | Some loc, Some (target_loc, _, name) when loc = target_loc -> - let open Type in - begin match obj2 with - | DefT (_, ObjT _) -> - set_prop_access_info (Use_in_literal (Nel.one obj2, name)) - | _ -> () - end - | _ -> () - in - - Type_inference_hooks_js.set_member_hook (use_hook false); - Type_inference_hooks_js.set_call_hook (use_hook ()); - Type_inference_hooks_js.set_class_member_decl_hook class_def_hook; - Type_inference_hooks_js.set_obj_prop_decl_hook obj_def_hook; - Type_inference_hooks_js.set_export_named_hook export_named_hook; - Type_inference_hooks_js.set_obj_to_obj_hook obj_to_obj_hook - -let set_get_refs_hook potential_refs potential_matching_literals target_name = +let set_get_refs_hook ~reader potential_refs potential_matching_literals target_name = let hook ret _ctxt name loc ty = - begin if name = target_name then + if name = target_name then (* Replace previous bindings of `loc`. We should always use the result of the last call to * the hook for a given location. For details see the comment on the generate_tests function * in flow_js.ml *) - potential_refs := LocMap.add loc ty !potential_refs - end; + potential_refs := ALocMap.add loc ty !potential_refs; ret in let lval_hook cx name loc = function @@ -205,253 +57,64 @@ let set_get_refs_hook potential_refs potential_matching_literals target_name = | _ -> () in let obj_to_obj_hook _ctxt obj1 obj2 = - let open Type in - match get_object_literal_loc obj1, obj2 with - | Some loc, DefT (_, ObjT _) -> - let entry = (loc, obj2) in - potential_matching_literals := entry:: !potential_matching_literals - | _ -> () + Type.( + match (get_object_literal_loc ~reader obj1, obj2) with + | (Some loc, DefT (_, _, ObjT _)) -> + let entry = (loc, obj2) in + potential_matching_literals := entry :: !potential_matching_literals + | _ -> ()) in - Type_inference_hooks_js.set_member_hook (hook false); Type_inference_hooks_js.set_call_hook (hook ()); - Type_inference_hooks_js.set_lval_hook (lval_hook); + Type_inference_hooks_js.set_lval_hook lval_hook; Type_inference_hooks_js.set_obj_to_obj_hook obj_to_obj_hook -let unset_hooks () = - Type_inference_hooks_js.reset_hooks () - -type single_def_info = - | Class of Loc.t - (* An object was found. *) - | Object of Loc.t - -(* If there are multiple relevant definition locations (e.g. the request was issued on an object - * literal which is associated with multiple types) then there will be multiple locations in no - * particular order. *) -type property_def_info = single_def_info Nel.t - -type def_info = - | Property of property_def_info * string (* name *) - | CJSExport of Loc.t - -let display_name_of_def_info = function - | Property (_, name) -> name - | CJSExport _ -> "module.exports" - -let loc_of_single_def_info = function - | Class loc -> loc - | Object loc -> loc - -let all_locs_of_property_def_info def_info = - def_info - |> Nel.map loc_of_single_def_info - -let all_locs_of_def_info = function - | Property (def_info, _) -> all_locs_of_property_def_info def_info - | CJSExport loc -> Nel.one loc - -type def_loc = - (* We found a class property. Include all overridden implementations. Superclass implementations - * are listed last. *) - | FoundClass of Loc.t Nel.t - (* We found an object property. *) - | FoundObject of Loc.t - | FoundUnion of def_loc Nel.t - (* This means we resolved the receiver type but did not find the definition. If this happens - * there must be a type error (which may be suppresssed) *) - | NoDefFound - (* This means it's a known type that we deliberately do not currently support. *) - | UnsupportedType - (* This means it's not well-typed, and could be anything *) - | AnyType - -let debug_string_of_locs locs = - locs |> Nel.to_list |> List.map Loc.to_string |> String.concat ", " - -(* Disable the unused value warning -- we want to keep this around for debugging *) -[@@@warning "-32"] -let debug_string_of_single_def_info = function - | Class loc -> spf "Class (%s)" (Loc.to_string loc) - | Object loc -> spf "Object (%s)" (Loc.to_string loc) - -let debug_string_of_property_def_info def_info = - def_info - |> Nel.map debug_string_of_single_def_info - |> Nel.to_list - |> String.concat ", " - |> spf "[%s]" - -let debug_string_of_def_info = function - | Property (def_info, name) -> - spf "Property (%s, %s)" (debug_string_of_property_def_info def_info) name - | CJSExport loc -> - spf "CJSExport (%s)" (Loc.to_string loc) - -let rec debug_string_of_def_loc = function - | FoundClass locs -> spf "FoundClass (%s)" (debug_string_of_locs locs) - | FoundObject loc -> spf "FoundObject (%s)" (Loc.to_string loc) - | FoundUnion def_locs -> - Nel.to_list def_locs |> List.map debug_string_of_def_loc |> String.concat ", " - |> spf "FoundUnion (%s)" - | NoDefFound -> "NoDefFound" - | UnsupportedType -> "UnsupportedType" - | AnyType -> "AnyType" -(* Re-enable the unused value warning *) -[@@@warning "+32"] - -let extract_instancet cx ty : (Type.t, string) result = - let open Type in - let resolved = Flow_js.resolve_type cx ty in - match resolved with - | ThisClassT (_, t) - | DefT (_, PolyT (_, ThisClassT (_, t), _)) -> Ok t - | _ -> - let type_string = string_of_ctor resolved in - Error ("Expected a class type to extract an instance type from, got " ^ type_string) - -(* Must be called with the result from Flow_js.Members.extract_type *) -let get_def_loc_from_extracted_type cx extracted_type name = - extracted_type - |> Flow_js.Members.extract_members cx - |> Flow_js.Members.to_command_result - >>| fun map -> match SMap.get name map with - | None -> None - (* Currently some types (e.g. spreads) do not contain locations for their properties. For now - * we'll just treat them as if the properties do not exist, but once this is fixed this case - * should be promoted to an error *) - | Some (None, _) -> None - | Some (Some loc, _) -> Some loc - -let rec extract_def_loc cx ty name : (def_loc, string) result = - let resolved = Flow_js.resolve_type cx ty in - extract_def_loc_resolved cx resolved name - -(* The same as get_def_loc_from_extracted_type except it recursively checks for overridden - * definitions of the member in superclasses and returns those as well *) -and extract_def_loc_from_instancet cx extracted_type super name : (def_loc, string) result = - let current_class_def_loc = get_def_loc_from_extracted_type cx extracted_type name in - current_class_def_loc - >>= begin function - | None -> Ok NoDefFound - | Some loc -> - extract_def_loc cx super name - >>= begin function - | FoundClass lst -> - (* Avoid duplicate entries. This can happen if a class does not override a method, - * so the definition points to the method definition in the parent class. Then we - * look at the parent class and find the same definition. *) - let lst = - if Nel.hd lst = loc then - lst - else - Nel.cons loc lst - in - Ok (FoundClass lst) - | FoundObject _ -> Error "A superclass should be a class, not an object" - | FoundUnion _ -> Error "A superclass should be a class, not a union" - (* If the superclass does not have a definition for this method, or it is for some reason - * not a class type, or we don't know its type, just return the location we already know - * about. *) - | NoDefFound | UnsupportedType | AnyType -> Ok (FoundClass (Nel.one loc)) - end - end - -and extract_def_loc_resolved cx ty name : (def_loc, string) result = - let open Flow_js.Members in - let open Type in - match Flow_js.Members.extract_type cx ty with - | Success (DefT (_, InstanceT (_, super, _, _))) as extracted_type -> - extract_def_loc_from_instancet cx extracted_type super name - | Success (DefT (_, ObjT _)) | SuccessModule _ as extracted_type -> - get_def_loc_from_extracted_type cx extracted_type name - >>| begin function - | None -> NoDefFound - | Some loc -> FoundObject loc - end - | Success (DefT (_, UnionT rep)) -> - let union_members = - UnionRep.members rep - |> List.map (fun member -> extract_def_loc cx member name) - |> Result.all - in - union_members - >>= begin fun members -> - Nel.of_list members - |> Result.of_option ~error:"Union should have at least one member" - end - >>| begin fun members_nel -> - FoundUnion members_nel - end - | Success _ - | FailureNullishType - | FailureUnhandledType _ -> - Ok UnsupportedType - | FailureAnyType -> - Ok AnyType - (* Returns `true` iff the given type is a reference to the symbol we are interested in *) -let type_matches_locs cx ty prop_def_info name = +let type_matches_locs ~reader cx ty prop_def_info name = let rec def_loc_matches_locs = function - | FoundClass ty_def_locs -> - prop_def_info |> Nel.exists begin function - | Object _ -> false - | Class loc -> - (* Only take the first extracted def loc -- that is, the one for the actual definition - * and not overridden implementations, and compare it to the list of def locs we are - * interested in *) - loc = Nel.hd ty_def_locs - end - | FoundObject loc -> - prop_def_info |> Nel.exists begin function - | Class _ -> false - | Object def_loc -> loc = def_loc - end - | FoundUnion def_locs -> - def_locs - |> Nel.map def_loc_matches_locs - |> Nel.fold_left ( || ) false - (* TODO we may want to surface AnyType results somehow since we can't be sure whether they - * are references or not. For now we'll leave them out. *) - | NoDefFound | UnsupportedType | AnyType -> false - in - extract_def_loc cx ty name >>| def_loc_matches_locs - -(* Takes the file key where the module reference appeared, as well as the module reference, and - * returns the file name for the module that the module reference refers to. *) -let file_key_of_module_ref file_key module_ref = - let resolved = Module_js.find_resolved_module - ~audit:Expensive.warn - file_key - module_ref - in - Module_heaps.get_file ~audit:Expensive.warn resolved - -let process_prop_refs cx potential_refs file_key prop_def_info name = - potential_refs |> - LocMap.bindings |> - List.map begin fun (ref_loc, ty) -> - type_matches_locs cx ty prop_def_info name - >>| function - | true -> Some ref_loc - | false -> None - end - |> Result.all - |> Result.map_error ~f:(fun err -> - Printf.sprintf - "Encountered while finding refs in `%s`: %s" - (File_key.to_string file_key) - err - ) - >>| begin fun refs -> - refs - |> ListUtils.cat_maybes - |> add_ref_kind FindRefsTypes.PropertyAccess - end - -let property_find_refs_in_file options ast_info file_key def_info name = - let potential_refs: Type.t LocMap.t ref = ref LocMap.empty in - let potential_matching_literals: (Loc.t * Type.t) list ref = ref [] in + | FoundClass ty_def_locs -> + prop_def_info + |> Nel.exists (function + | Object _ -> false + | Class loc -> + (* Only take the first extracted def loc -- that is, the one for the actual definition + * and not overridden implementations, and compare it to the list of def locs we are + * interested in *) + loc = Nel.hd ty_def_locs) + | FoundObject loc -> + prop_def_info + |> Nel.exists (function + | Class _ -> false + | Object def_loc -> loc = def_loc) + | FoundUnion def_locs -> def_locs |> Nel.map def_loc_matches_locs |> Nel.fold_left ( || ) false + (* TODO we may want to surface AnyType results somehow since we can't be sure whether they + * are references or not. For now we'll leave them out. *) + | NoDefFound + | UnsupportedType + | AnyType -> + false + in + extract_def_loc ~reader cx ty name >>| def_loc_matches_locs + +let process_prop_refs ~reader cx potential_refs file_key prop_def_info name = + potential_refs + |> ALocMap.bindings + |> Core_list.map ~f:(fun (ref_loc, ty) -> + type_matches_locs ~reader cx ty prop_def_info name + >>| function + | true -> Some (loc_of_aloc ~reader ref_loc) + | false -> None) + |> Result.all + |> Result.map_error ~f:(fun err -> + Printf.sprintf + "Encountered while finding refs in `%s`: %s" + (File_key.to_string file_key) + err) + >>| (fun refs -> refs |> ListUtils.cat_maybes |> add_ref_kind FindRefsTypes.PropertyAccess) + +let property_find_refs_in_file ~reader options ast_info file_key def_info name = + let potential_refs : Type.t ALocMap.t ref = ref ALocMap.empty in + let potential_matching_literals : (Loc.t * Type.t) list ref = ref [] in let (ast, file_sig, info) = ast_info in let info = Docblock.set_flow_mode_for_ide_command info in let local_defs = @@ -462,10 +125,10 @@ let property_find_refs_in_file options ast_info file_key def_info name = let has_symbol = PropertyAccessSearcher.search name ast in if not has_symbol then Ok local_defs - else begin - set_get_refs_hook potential_refs potential_matching_literals name; - let (cx, _) = Merge_service.merge_contents_context - options file_key ast info file_sig + else ( + set_get_refs_hook ~reader potential_refs potential_matching_literals name; + let (cx, _) = + Merge_service.merge_contents_context ~reader options file_key ast info file_sig in unset_hooks (); let literal_prop_refs_result = @@ -473,109 +136,119 @@ let property_find_refs_in_file options ast_info file_key def_info name = * examine *) let prop_loc_map = lazy (LiteralToPropLoc.make ast name) in let get_prop_loc_if_relevant (obj_loc, into_type) = - type_matches_locs cx into_type def_info name + type_matches_locs ~reader cx into_type def_info name >>| function | false -> None | true -> LocMap.get obj_loc (Lazy.force prop_loc_map) in !potential_matching_literals - |> List.map get_prop_loc_if_relevant + |> Core_list.map ~f:get_prop_loc_if_relevant |> Result.all - >>| begin fun refs -> - refs - |> ListUtils.cat_maybes - |> add_ref_kind FindRefsTypes.PropertyDefinition - end + >>| fun refs -> + refs |> ListUtils.cat_maybes |> add_ref_kind FindRefsTypes.PropertyDefinition in literal_prop_refs_result - >>= begin fun literal_prop_refs_result -> - process_prop_refs cx !potential_refs file_key def_info name - >>| (@) local_defs - >>| (@) literal_prop_refs_result - end - end - -let export_find_refs_in_file ast_info file_key def_loc = - let open File_sig in - let (_, file_sig, _) = ast_info in - let is_relevant module_ref = - Loc.source def_loc = file_key_of_module_ref file_key module_ref - in - let locs = List.fold_left begin fun acc require -> - match require with - | Require { source = (_, module_ref); require_loc; _ } -> - if is_relevant module_ref then - require_loc::acc + >>= fun literal_prop_refs_result -> + process_prop_refs ~reader cx !potential_refs file_key def_info name + >>| ( @ ) local_defs + >>| ( @ ) literal_prop_refs_result + ) + +let export_find_refs_in_file ~reader ast_info file_key def_loc = + File_sig.( + let (_, file_sig, _) = ast_info in + let is_relevant module_ref = + Loc.source def_loc = file_key_of_module_ref ~reader file_key module_ref + in + let locs = + List.fold_left + begin + fun acc require -> + match require with + | Require { source = (_, module_ref); require_loc; _ } -> + if is_relevant module_ref then + require_loc :: acc + else + acc + | _ -> acc + end + [] + file_sig.module_sig.requires + in + let locs = + if Loc.source def_loc = Some file_key then + def_loc :: locs else - acc - | _ -> acc - end [] file_sig.module_sig.requires in - let locs = - if Loc.source def_loc = Some file_key then - def_loc::locs - else - locs - in - Ok locs + locs + in + Ok locs) let add_related_bindings ast_info refs = let (ast, file_sig, _) = ast_info in - let locs = List.map snd refs in + let locs = Core_list.map ~f:snd refs in let related_bindings = ImportExportSymbols.find_related_symbols file_sig locs in - List.fold_left begin fun acc loc -> - let new_refs = - VariableFindRefs.local_find_refs ast loc - |> Option.value_map ~default:[] ~f:(fun ((_, refs), _) -> refs) - in - List.rev_append new_refs acc - end refs related_bindings - -let find_refs_in_file options ast_info file_key def_info = - let refs = match def_info with - | Property (def_info, name) -> - property_find_refs_in_file options ast_info file_key def_info name - | CJSExport loc -> - export_find_refs_in_file ast_info file_key loc >>| fun refs -> - add_ref_kind FindRefsTypes.Other refs + List.fold_left + begin + fun acc loc -> + let new_refs = + VariableFindRefs.local_find_refs ast loc + |> Option.value_map ~default:[] ~f:(fun ((_, refs), _) -> refs) + in + List.rev_append new_refs acc + end + refs + related_bindings + +let find_refs_in_file ~reader options ast_info file_key def_info = + let refs = + match def_info with + | Property (def_info, name) -> + property_find_refs_in_file ~reader options ast_info file_key def_info name + | CJSExport loc -> + export_find_refs_in_file ~reader ast_info file_key loc + >>| (fun refs -> add_ref_kind FindRefsTypes.Other refs) in refs >>| add_related_bindings ast_info -let find_refs_in_multiple_files genv all_deps def_info = - let {options; workers} = genv in - let dep_list: File_key.t list = FilenameSet.elements all_deps in +let find_refs_in_multiple_files ~reader genv all_deps def_info = + let { options; workers } = genv in + let dep_list : File_key.t list = FilenameSet.elements all_deps in let node_modules_containers = !Files.node_modules_containers in - let%lwt result = MultiWorkerLwt.call workers - ~job: begin fun _acc deps -> - (* Yay for global mutable state *) - Files.node_modules_containers := node_modules_containers; - deps |> List.map begin fun dep -> - get_ast_result dep >>= fun ast_info -> - find_refs_in_file options ast_info dep def_info - end - end - ~merge: (fun refs acc -> List.rev_append refs acc) - ~neutral: [] - ~next: (MultiWorkerLwt.next workers dep_list) + let%lwt result = + MultiWorkerLwt.call + workers + ~job: + begin + fun _acc deps -> + (* Yay for global mutable state *) + Files.node_modules_containers := node_modules_containers; + deps + |> Core_list.map ~f:(fun dep -> + get_ast_result ~reader dep + >>= (fun ast_info -> find_refs_in_file ~reader options ast_info dep def_info)) + end + ~merge:(fun refs acc -> List.rev_append refs acc) + ~neutral:[] + ~next:(MultiWorkerLwt.next workers dep_list) in (* The types got a little too complicated here. Writing out the intermediate types makes it a * bit clearer. *) - let result: (FindRefsTypes.single_ref list list, string) Result.t = Result.all result in - let result: (FindRefsTypes.single_ref list, string) Result.t = result >>| List.concat in + let result : (FindRefsTypes.single_ref list list, string) Result.t = Result.all result in + let result : (FindRefsTypes.single_ref list, string) Result.t = result >>| List.concat in Lwt.return result (* Get the source for each loc. Error if any loc is missing a source. *) -let files_of_locs (locs: Loc.t Nel.t) : (FilenameSet.t, string) result = +let files_of_locs (locs : Loc.t Nel.t) : (FilenameSet.t, string) result = let files_result = locs |> Nel.map (fun loc -> loc.Loc.source) |> Nel.map (Result.of_option ~error:"Expected a location with a source file") |> Nel.result_all in - files_result >>| fun files -> - Nel.to_list files |> FilenameSet.of_list + files_result >>| (fun files -> Nel.to_list files |> FilenameSet.of_list) (* Error if the set is empty *) -let nel_of_filename_set (set: FilenameSet.t) : (File_key.t Nel.t, string) result = +let nel_of_filename_set (set : FilenameSet.t) : (File_key.t Nel.t, string) result = set |> FilenameSet.elements |> Nel.of_list @@ -586,38 +259,36 @@ let roots_of_def_info def_info : (File_key.t Nel.t, string) result = let root_locs = all_locs_of_def_info def_info in files_of_locs root_locs >>= nel_of_filename_set -let deps_of_file_key genv env (file_key: File_key.t) : (FilenameSet.t, string) result Lwt.t = - let {options; workers} = genv in - File_key.to_path file_key %>>= fun path -> +let deps_of_file_key ~reader genv env (file_key : File_key.t) : + (FilenameSet.t, string) result Lwt.t = + let { options; workers } = genv in + File_key.to_path file_key + %>>= fun path -> let fileinput = File_input.FileName path in - File_input.content_of_file_input fileinput %>>| fun content -> - let%lwt all_deps, _ = get_dependents options workers env file_key content in + File_input.content_of_file_input fileinput + %>>| fun content -> + let%lwt all_deps = get_all_dependents ~reader options workers env file_key content in Lwt.return all_deps -let deps_of_file_keys genv env (file_keys: File_key.t list) : (FilenameSet.t, string) result Lwt.t = +let deps_of_file_keys ~reader genv env (file_keys : File_key.t list) : + (FilenameSet.t, string) result Lwt.t = (* We need to use map_s (rather than map_p) because we cannot interleave calls into * MultiWorkers. *) - let%lwt deps_result = Lwt_list.map_s (deps_of_file_key genv env) file_keys in - Result.all deps_result %>>| fun (deps: FilenameSet.t list) -> + let%lwt deps_result = Lwt_list.map_s (deps_of_file_key ~reader genv env) file_keys in + Result.all deps_result + %>>| fun (deps : FilenameSet.t list) -> Lwt.return @@ List.fold_left FilenameSet.union FilenameSet.empty deps let focus_and_check genv env paths = - let%lwt new_env, _ = - Lazy_mode_utils.focus_and_check genv !env paths - in + let%lwt (new_env, _) = Lazy_mode_utils.focus_and_check genv !env paths in env := new_env; Lwt.return_unit let focus_and_check_filename_set genv env files = - let paths = - files - |> FilenameSet.elements - |> List.map File_key.to_path - |> Result.all - in - paths %>>| fun paths -> - Nel.of_list paths - |> Option.value_map ~default:Lwt.return_unit ~f:(focus_and_check genv env) + let paths = files |> FilenameSet.elements |> Core_list.map ~f:File_key.to_path |> Result.all in + paths + %>>| fun paths -> + Nel.of_list paths |> Option.value_map ~default:Lwt.return_unit ~f:(focus_and_check genv env) (* Returns location pairs such that: * - Each location is the definition location for a property with the given @@ -629,48 +300,41 @@ let focus_and_check_filename_set genv env files = * the given file, we evaluate an `ObjT ~> ObjT` constraint relating the two * object types. * - Note that this can return locations outside of the given file. -*) -let find_related_defs_in_file options name file = + *) +let find_related_defs_in_file ~reader options name file = let get_single_def_info_pairs_if_relevant cx (t1, t2) = - map2 (extract_def_loc cx t1 name) (extract_def_loc cx t2 name) ~f:begin fun x y -> match x, y with - | FoundObject loc1, FoundObject loc2 -> [Object loc1, Object loc2] - | FoundClass class_locs, FoundObject obj_loc -> - class_locs - |> Nel.to_list - |> List.map (fun class_loc -> (Class class_loc, Object obj_loc)) - | _ -> [] + map2 (extract_def_loc ~reader cx t1 name) (extract_def_loc ~reader cx t2 name) ~f:(fun x y -> + match (x, y) with + | (FoundObject loc1, FoundObject loc2) -> [(Object loc1, Object loc2)] + | (FoundClass class_locs, FoundObject obj_loc) -> + class_locs + |> Nel.to_list + |> Core_list.map ~f:(fun class_loc -> (Class class_loc, Object obj_loc)) + | _ -> []) (* TODO union types *) - end - in - let related_types: (Type.t * Type.t) list ref = ref [] in - let hook _cx t1 t2 = - related_types := (t1, t2)::!related_types in + let related_types : (Type.t * Type.t) list ref = ref [] in + let hook _cx t1 t2 = related_types := (t1, t2) :: !related_types in Type_inference_hooks_js.set_obj_to_obj_hook hook; Type_inference_hooks_js.set_instance_to_obj_hook hook; let cx_result = - get_ast_result file >>| fun (ast, file_sig, docblock) -> - Merge_service.merge_contents_context - options file ast docblock file_sig + get_ast_result ~reader file + >>| fun (ast, file_sig, docblock) -> + Merge_service.merge_contents_context ~reader options file ast docblock file_sig in unset_hooks (); - cx_result >>= fun (cx, _) -> - let results: (((single_def_info * single_def_info) list) list, string) result = - !related_types - |> List.map (get_single_def_info_pairs_if_relevant cx) - |> Result.all + cx_result + >>= fun (cx, _) -> + let results : ((single_def_info * single_def_info) list list, string) result = + !related_types |> Core_list.map ~f:(get_single_def_info_pairs_if_relevant cx) |> Result.all in results >>| List.concat (* Returns all locations which are considered related to the given definition locations. Definition * locations are considered related if they refer to a property with the same name, and their * enclosing object types appear in a subtype relationship with each other. *) -let find_related_defs - genv - env - (def_info: property_def_info) - (name: string) - : (property_def_info, string) result Lwt.t = +let find_related_defs ~reader genv env (def_info : property_def_info) (name : string) : + (property_def_info, string) result Lwt.t = (* Outline: * - Create a disjoint set for definition locations * - Seed it with every given def_loc @@ -684,248 +348,101 @@ let find_related_defs * described above. * - Iterate until we reach a fixed point *) - let {options; workers} = genv in + let { options; workers } = genv in let related_defs = let uf = UnionFind.of_list (Nel.to_list def_info) in - let hd, tl = def_info in + let (hd, tl) = def_info in List.iter (UnionFind.union uf hd) tl; uf in let process_files file_set = let node_modules_containers = !Files.node_modules_containers in - let%lwt (result: ((single_def_info * single_def_info) list, string) result list) = - MultiWorkerLwt.call workers - ~job: begin fun _acc files -> - Files.node_modules_containers := node_modules_containers; - List.map (find_related_defs_in_file options name) files - end - ~merge: List.rev_append - ~neutral: [] - ~next: (MultiWorkerLwt.next workers (FilenameSet.elements file_set)) + let%lwt (result : ((single_def_info * single_def_info) list, string) result list) = + MultiWorkerLwt.call + workers + ~job: + begin + fun _acc files -> + Files.node_modules_containers := node_modules_containers; + Core_list.map ~f:(find_related_defs_in_file ~reader options name) files + end + ~merge:List.rev_append + ~neutral:[] + ~next:(MultiWorkerLwt.next workers (FilenameSet.elements file_set)) in - Result.all result %>>| fun (pairs: (single_def_info * single_def_info) list list) -> + Result.all result + %>>| fun (pairs : (single_def_info * single_def_info) list list) -> List.iter (List.iter (fun (x, y) -> UnionFind.union related_defs x y)) pairs; Lwt.return_unit in let get_unchecked_roots current_def_info checked_files = - current_def_info |> all_locs_of_property_def_info |> files_of_locs >>| fun roots -> - FilenameSet.diff roots checked_files + current_def_info + |> all_locs_of_property_def_info + |> files_of_locs + >>| (fun roots -> FilenameSet.diff roots checked_files) in let get_files_to_check unchecked_roots checked_files = - let%lwt deps = deps_of_file_keys genv env (FilenameSet.elements unchecked_roots) in - deps %>>| fun deps -> - Lwt.return ( - FilenameSet.union - (FilenameSet.diff deps checked_files) - unchecked_roots - ) + let%lwt deps = deps_of_file_keys ~reader genv env (FilenameSet.elements unchecked_roots) in + deps + %>>| fun deps -> + Lwt.return (FilenameSet.union (FilenameSet.diff deps checked_files) unchecked_roots) in let rec loop current_def_info checked_files = - get_unchecked_roots current_def_info checked_files %>>= fun unchecked_roots -> + get_unchecked_roots current_def_info checked_files + %>>= fun unchecked_roots -> if FilenameSet.is_empty unchecked_roots then Lwt.return (Ok current_def_info) - else begin + else let%lwt result = focus_and_check_filename_set genv env unchecked_roots in - result %>>= fun () -> + result + %>>= fun () -> let%lwt files_to_check = get_files_to_check unchecked_roots checked_files in - files_to_check %>>= fun files_to_check -> + files_to_check + %>>= fun files_to_check -> let%lwt check_result = process_files files_to_check in - check_result %>>= fun () -> + check_result + %>>= fun () -> let checked_files = FilenameSet.union checked_files files_to_check in let current_def_info = let updated_def_info = UnionFind.members related_defs (Nel.hd current_def_info) in - Nel.of_list updated_def_info - |> Result.of_option ~error:"Unexpected empty list" + Nel.of_list updated_def_info |> Result.of_option ~error:"Unexpected empty list" in - current_def_info %>>= fun current_def_info -> - loop current_def_info checked_files - end + current_def_info %>>= (fun current_def_info -> loop current_def_info checked_files) in loop def_info FilenameSet.empty -let def_info_of_typecheck_results cx props_access_info = - let def_info_of_class_member_locs locs = - (* We want to include the immediate implementation as well as all superclass implementations. - * If we wanted a mode where superclass implementations were not included, for example, we - * could choose to take only the first extracted location. *) - Nel.map (fun loc -> Class loc) locs - in - let def_info_of_type name ty = - let rec def_info_of_def_loc = function - | FoundClass locs -> Some (def_info_of_class_member_locs locs) - | FoundObject loc -> Some (Nel.one (Object loc)) - | FoundUnion def_locs -> - def_locs - |> Nel.map def_info_of_def_loc - |> Nel.cat_maybes - |> Option.map ~f:Nel.concat - | NoDefFound - | UnsupportedType - | AnyType -> None - in - extract_def_loc cx ty name >>| def_info_of_def_loc - in - match props_access_info with - | None -> Ok None - | Some (Obj_def (loc, name)) -> - Ok (Some (Nel.one (Object loc), name)) - | Some (Class_def (ty, name, static)) -> - if static then - (* Here, `ty` ends up resolving to `ObjT` so we lose the knowledge that this is a static - * property. This means that we don't get the fancy look-up-the-inheritance-chain behavior - * that we get with class instances. That would be nice to add at some point. *) - def_info_of_type name ty - >>| Option.map ~f:(fun def_info -> (def_info, name)) - else - (* We get the type of the class back here, so we need to extract the type of an instance *) - extract_instancet cx ty >>= fun ty -> - begin extract_def_loc_resolved cx ty name >>= function - | FoundClass locs -> Ok (Some (def_info_of_class_member_locs locs, name)) - | FoundUnion _ - | FoundObject _ -> Error "Expected to extract class def info from a class" - | _ -> Error "Unexpectedly failed to extract definition from known type" - end - | Some (Use (ty, name)) -> - def_info_of_type name ty - >>| Option.map ~f:(fun def_info -> (def_info, name)) - | Some (Use_in_literal (types, name)) -> - let def_infos_result = - Nel.map (def_info_of_type name) types - |> Nel.result_all - in - def_infos_result >>| fun def_infos -> - Nel.cat_maybes def_infos - |> Option.map ~f:(Nel.concat) - |> Option.map ~f:(fun def_info -> (def_info, name)) - -let add_literal_properties literal_key_info def_info = - (* If we happen to be on an object property, include the location of that - * property as a def loc. We don't want to do that above because: - * (a) We could also encounter a `Use_in_literal` if this object literal flows - * into another object type. This would force us to make props_access_info a - * list and add additional complexity just for the sake of this one case. - * (b) We would have to add a type inference hook, which we are trying to - * avoid. *) - let def_info = match def_info, literal_key_info with - | None, None -> Ok None - | Some _, None -> Ok def_info - | None, Some (_, loc, name) -> Ok (Some (Nel.one (Object loc), name)) - | Some (defs, name1), Some (_, loc, name2) -> - if name1 <> name2 then - Error "Unexpected name mismatch" - else - Ok (Some (Nel.cons (Object loc) defs, name1)) - in - Result.map def_info ~f:(Option.map ~f:(fun (prop_def_info, name) -> Property (prop_def_info, name))) - -let get_def_info genv env profiling file_key content loc: (def_info option, string) result Lwt.t = - let {options; workers} = genv in - let props_access_info = ref (Ok None) in - compute_ast_result file_key content - %>>= fun (ast, file_sig, info) -> - let info = Docblock.set_flow_mode_for_ide_command info in - let literal_key_info: (Loc.t * Loc.t * string) option = ObjectKeyAtLoc.get ast loc in - let%lwt cx = - set_def_loc_hook props_access_info literal_key_info loc; - let%lwt cx, _ = Profiling_js.with_timer_lwt profiling ~timer:"MergeContents" ~f:(fun () -> - let%lwt () = - Types_js.ensure_checked_dependencies ~options ~profiling ~workers ~env file_key file_sig - in - Lwt.return @@ - Merge_service.merge_contents_context options file_key ast info file_sig - ) in - Lwt.return cx - in - unset_hooks (); - !props_access_info %>>= fun props_access_info -> - let def_info = def_info_of_typecheck_results cx props_access_info in - let def_info = def_info >>= add_literal_properties literal_key_info in - let def_info = def_info >>= function - | Some _ as def_info -> Ok def_info - | None -> - (* Check if we are on a CJS import/export. These cases are not covered above since the type - * system hooks don't quite get us what we want. *) - let export_loc = - let open File_sig in - List.fold_left begin fun acc -> function - | Require { source = (_, module_ref); require_loc; _ } -> - if Loc.contains require_loc loc then begin match acc with - | Error _ -> acc - | Ok (Some _) -> Error "Did not expect multiple requires to match one location" - | Ok None -> - let external_file_sig = - let filename = file_key_of_module_ref file_key module_ref in - Option.bind filename Parsing_heaps.get_file_sig - in - Result.return @@ Option.bind external_file_sig begin fun external_file_sig -> - match external_file_sig.module_sig.module_kind with - | CommonJS { mod_exp_loc=Some loc; _ } -> Some loc - | _ -> None - end - end else acc - | _ -> acc - end (Ok None) file_sig.module_sig.requires - in - let export_loc = export_loc >>| function - | Some _ as x -> x - | None -> - let open File_sig in - match file_sig.module_sig.module_kind with - | CommonJS { mod_exp_loc=Some mod_exp_loc; _ } -> - if Loc.contains mod_exp_loc loc then Some mod_exp_loc - else None - | _ -> None - in - Result.map export_loc ~f:(Option.map ~f:(fun x -> CJSExport x)) - in - Lwt.return @@ def_info - -let find_refs_global genv env multi_hop def_info = +let find_global_refs ~reader genv env ~multi_hop def_info = let%lwt def_info = if multi_hop then match def_info with | Property (property_def_info, name) -> - let%lwt result = find_related_defs genv env property_def_info name in - result %>>| fun x -> Lwt.return @@ Property (x, name) + let%lwt result = find_related_defs ~reader genv env property_def_info name in + result %>>| (fun x -> Lwt.return @@ Property (x, name)) | CJSExport _ -> Lwt.return (Ok def_info) else Lwt.return (Ok def_info) in - def_info %>>= fun def_info -> - roots_of_def_info def_info %>>= fun root_file_keys -> - let root_file_paths_result = - Nel.map File_key.to_path root_file_keys - |> Nel.result_all - in - root_file_paths_result %>>= fun root_file_paths -> + def_info + %>>= fun def_info -> + roots_of_def_info def_info + %>>= fun root_file_keys -> + let root_file_paths_result = Nel.map File_key.to_path root_file_keys |> Nel.result_all in + root_file_paths_result + %>>= fun root_file_paths -> let%lwt () = focus_and_check genv env root_file_paths in - let%lwt deps_result = deps_of_file_keys genv env (Nel.to_list root_file_keys) in - deps_result %>>= fun deps -> + let%lwt deps_result = deps_of_file_keys ~reader genv env (Nel.to_list root_file_keys) in + deps_result + %>>= fun deps -> let dependent_file_count = FilenameSet.cardinal deps in let relevant_files = - Nel.to_list root_file_keys - |> FilenameSet.of_list - |> FilenameSet.union deps + Nel.to_list root_file_keys |> FilenameSet.of_list |> FilenameSet.union deps in - Hh_logger.info - "find-refs: searching %d dependent modules for references" - dependent_file_count; - let%lwt refs = find_refs_in_multiple_files genv relevant_files def_info in - refs %>>| fun refs -> - Lwt.return @@ Some ((display_name_of_def_info def_info, refs), Some dependent_file_count) - -let find_refs_local genv file_key content def_info = - compute_ast_result file_key content >>= fun ast_info -> - find_refs_in_file genv.options ast_info file_key def_info >>= fun refs -> - Ok (Some ((display_name_of_def_info def_info, refs), None)) + Hh_logger.info "find-refs: searching %d dependent modules for references" dependent_file_count; + let%lwt refs = find_refs_in_multiple_files ~reader genv relevant_files def_info in + refs + %>>| fun refs -> + Lwt.return ((display_name_of_def_info def_info, refs), Some dependent_file_count) -let find_refs genv env ~profiling ~content file_key loc ~global ~multi_hop = - let%lwt def_info = get_def_info genv env profiling file_key content loc in - def_info %>>= fun def_info_opt -> - match def_info_opt with - | None -> Lwt.return (Ok None) - | Some def_info -> - if global || multi_hop then - find_refs_global genv env multi_hop def_info - else - Lwt.return @@ find_refs_local genv file_key content def_info +let find_local_refs ~reader ~options file_key ast_info def_info = + find_refs_in_file ~reader options ast_info file_key def_info + >>= (fun refs -> Ok (display_name_of_def_info def_info, refs)) diff --git a/src/server/find_refs/propertyFindRefs.mli b/src/server/find_refs/propertyFindRefs.mli index 551f33b81e3..4a17625920a 100644 --- a/src/server/find_refs/propertyFindRefs.mli +++ b/src/server/find_refs/propertyFindRefs.mli @@ -1,17 +1,22 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val find_refs: +val find_local_refs : + reader:State_reader.t -> + options:Options.t -> + File_key.t -> + FindRefsUtils.ast_info -> + GetDefUtils.def_info -> + (FindRefsTypes.find_refs_found, string) result + +val find_global_refs : + reader:State_reader.t -> ServerEnv.genv -> ServerEnv.env ref -> - profiling: Profiling_js.running -> - content: string -> - File_key.t -> - Loc.t -> - global: bool -> - multi_hop: bool -> - ((FindRefsTypes.find_refs_found * int option) option, string) result Lwt.t + multi_hop:bool -> + GetDefUtils.def_info -> + (FindRefsTypes.find_refs_found * int option, string) result Lwt.t diff --git a/src/server/find_refs/variableFindRefs.ml b/src/server/find_refs/variableFindRefs.ml index 6eda2cb361b..e7bfab69582 100644 --- a/src/server/find_refs/variableFindRefs.ml +++ b/src/server/find_refs/variableFindRefs.ml @@ -1,26 +1,28 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -open Utils_js + +open Loc_collections +module Scope_api = Scope_api.With_Loc let local_find_refs ast loc = - let open Scope_api in - let scope_info = Scope_builder.program ast in - let all_uses = all_uses scope_info in - let matching_uses = LocSet.filter (fun use -> Loc.contains use loc) all_uses in - let num_matching_uses = LocSet.cardinal matching_uses in - if num_matching_uses = 0 then - None - else if num_matching_uses > 1 then - (* This is unlikely enough that we can just throw *) - failwith "Multiple identifiers were unexpectedly matched" - else - let use = LocSet.choose matching_uses in - let def = def_of_use scope_info use in - let sorted_locs = LocSet.elements @@ uses_of_def scope_info ~exclude_def:false def in - let name = Def.(def.actual_name) in - let sorted_locs = List.map (fun loc -> (FindRefsTypes.Local, loc)) sorted_locs in - Some ((name, sorted_locs), Nel.hd def.Def.locs) + Scope_api.( + let scope_info = Scope_builder.program ast in + let all_uses = all_uses scope_info in + let matching_uses = LocSet.filter (fun use -> Loc.contains use loc) all_uses in + let num_matching_uses = LocSet.cardinal matching_uses in + if num_matching_uses = 0 then + None + else if num_matching_uses > 1 then + (* This is unlikely enough that we can just throw *) + failwith "Multiple identifiers were unexpectedly matched" + else + let use = LocSet.choose matching_uses in + let def = def_of_use scope_info use in + let sorted_locs = LocSet.elements @@ uses_of_def scope_info ~exclude_def:false def in + let name = Def.(def.actual_name) in + let sorted_locs = Core_list.map ~f:(fun loc -> (FindRefsTypes.Local, loc)) sorted_locs in + Some ((name, sorted_locs), Nel.hd def.Def.locs)) diff --git a/src/server/find_refs/variableFindRefs.mli b/src/server/find_refs/variableFindRefs.mli index efe222bc583..cf764a14ded 100644 --- a/src/server/find_refs/variableFindRefs.mli +++ b/src/server/find_refs/variableFindRefs.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,7 +8,7 @@ (* This variant is limited strictly to local variables, and does not attempt to find anything to do * with exports (the above will find some additional locations related to imports/exports even with * global:false). *) -val local_find_refs: +val local_find_refs : (Loc.t, Loc.t) Flow_ast.program -> Loc.t -> - (FindRefsTypes.find_refs_found * Loc.t (* definition location *)) option + (FindRefsTypes.find_refs_found * Loc.t) (* definition location *) option diff --git a/src/server/lazy_mode_utils/dune b/src/server/lazy_mode_utils/dune new file mode 100644 index 00000000000..e4440465afb --- /dev/null +++ b/src/server/lazy_mode_utils/dune @@ -0,0 +1,9 @@ +(library + (name flow_server_lazy_mode_utils) + (wrapped false) + (libraries + flow_server_env + flow_server_rechecker + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/server/lazy_mode_utils/lazy_mode_utils.ml b/src/server/lazy_mode_utils/lazy_mode_utils.ml index 0c68685b665..69de508ea0e 100644 --- a/src/server/lazy_mode_utils/lazy_mode_utils.ml +++ b/src/server/lazy_mode_utils/lazy_mode_utils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,13 +10,13 @@ open Utils_js let focus_and_check genv env filenames = let filenames = SSet.of_list (Nel.to_list filenames) in - let focused = Rechecker.process_updates genv env filenames in - - let files_to_focus = focused + let focused = Rechecker.process_updates ~options:genv.ServerEnv.options env filenames in + let files_to_focus = + focused |> Fn.flip FilenameSet.diff (CheckedSet.focused env.checked_files) |> Fn.flip FilenameSet.diff (CheckedSet.dependents env.checked_files) in - - match%lwt Rechecker.recheck_single ~files_to_focus genv env with + let files_to_force = CheckedSet.add ~focused:files_to_focus CheckedSet.empty in + match%lwt Rechecker.recheck_single ~files_to_force genv env with | Error env -> Lwt.return (env, false) | Ok (_summary, env) -> Lwt.return (env, true) diff --git a/src/server/monitor_listener/dune b/src/server/monitor_listener/dune new file mode 100644 index 00000000000..7492866cf56 --- /dev/null +++ b/src/server/monitor_listener/dune @@ -0,0 +1,21 @@ +(library + (name flow_server_monitor_listener) + (wrapped false) + (modules :standard \ serverMonitorListenerState workloadStream) + (libraries + flow_exit_status + flow_server_command_handler + flow_server_env + ) + (preprocess (pps lwt_ppx)) +) + +(library + (name flow_server_monitor_listener_state) + (wrapped false) + (modules serverMonitorListenerState workloadStream) + (libraries + flow_server_env + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/server/monitor_listener/serverMonitorListener.ml b/src/server/monitor_listener/serverMonitorListener.ml index e5494a75f8f..126e4423adb 100644 --- a/src/server/monitor_listener/serverMonitorListener.ml +++ b/src/server/monitor_listener/serverMonitorListener.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -13,39 +13,58 @@ module ListenLoop = LwtLoop.Make (struct type acc = genv let handle_message genv = function - | MonitorProt.Request (request_id, command) -> - CommandHandler.enqueue_or_handle_ephemeral genv (request_id, command) - | MonitorProt.PersistentConnectionRequest (client_id, request) -> - CommandHandler.enqueue_persistent genv client_id request; - Lwt.return_unit - | MonitorProt.NewPersistentConnection (client_id, logging_context, lsp) -> - ServerMonitorListenerState.push_new_env_update (fun env -> { env with - connections = Persistent_connection.add_client env.connections client_id logging_context lsp - }); - Lwt.return_unit - | MonitorProt.DeadPersistentConnection client_id -> - ServerMonitorListenerState.push_new_env_update (fun env -> { env with - connections = Persistent_connection.remove_client env.connections client_id - }); - Lwt.return_unit - | MonitorProt.FileWatcherNotification changed_files -> - ServerMonitorListenerState.push_files_to_recheck changed_files; - Lwt.return_unit - | MonitorProt.PleaseDie please_die_reason -> - (* TODO - find a way to gracefully kill the workers. At the moment, if the workers are in the - * middle of a job this will lead to some log spew. We probably should send SIGTERM to each - * worker and set up a signal handler to kill the fork and exit gracefully. Might also want - * to use the SharedMem.cancel thingy *) - Hh_logger.info "Killing the worker processes"; - WorkerController.killall (); - let msg = match please_die_reason with - | MonitorProt.MonitorExiting (monitor_exit_status, monitor_msg) -> - Utils.spf - "Monitor is exiting with status %s (%s)" - (FlowExitStatus.to_string monitor_exit_status) - monitor_msg - in - FlowExitStatus.(exit ~msg Killed_by_monitor) + | MonitorProt.Request (request_id, command) -> + CommandHandler.enqueue_or_handle_ephemeral genv (request_id, command) + | MonitorProt.PersistentConnectionRequest (client_id, request) -> + CommandHandler.enqueue_persistent genv client_id request + | MonitorProt.NewPersistentConnection (client_id, lsp_init_params) -> + (* Immediately register the new client *) + Persistent_connection.add_client client_id lsp_init_params; + ServerMonitorListenerState.push_new_env_update (fun env -> + { + env with + connections = Persistent_connection.add_client_to_clients env.connections client_id; + }); + Lwt.return_unit + | MonitorProt.DeadPersistentConnection client_id -> + (* Immediately remove the dead client *) + Persistent_connection.remove_client client_id; + ServerMonitorListenerState.push_new_env_update (fun env -> + { + env with + connections = + Persistent_connection.remove_client_from_clients env.connections client_id; + }); + Lwt.return_unit + | MonitorProt.FileWatcherNotification (changed_files, metadata) -> + let file_count = SSet.cardinal changed_files in + let reason = + LspProt.( + match metadata with + | Some { MonitorProt.changed_mergebase = true; total_update_distance } -> + Rebased { distance = total_update_distance; file_count } + | _ when file_count = 1 -> + Single_file_changed { filename = SSet.elements changed_files |> List.hd } + | _ -> Many_files_changed { file_count }) + in + ServerMonitorListenerState.push_files_to_recheck ?metadata ~reason changed_files; + Lwt.return_unit + | MonitorProt.PleaseDie please_die_reason -> + (* TODO - find a way to gracefully kill the workers. At the moment, if the workers are in the + * middle of a job this will lead to some log spew. We probably should send SIGTERM to each + * worker and set up a signal handler to kill the fork and exit gracefully. Might also want + * to use the SharedMem_js.cancel thingy *) + Hh_logger.info "Killing the worker processes"; + WorkerController.killall (); + let msg = + match please_die_reason with + | MonitorProt.MonitorExiting (monitor_exit_status, monitor_msg) -> + Utils.spf + "Monitor is exiting with status %s (%s)" + (FlowExitStatus.to_string monitor_exit_status) + monitor_msg + in + FlowExitStatus.(exit ~msg Killed_by_monitor) let main genv = (* read a message from the monitor *) diff --git a/src/server/monitor_listener/serverMonitorListener.mli b/src/server/monitor_listener/serverMonitorListener.mli index 384df41f43e..aba4294efb8 100644 --- a/src/server/monitor_listener/serverMonitorListener.mli +++ b/src/server/monitor_listener/serverMonitorListener.mli @@ -1,8 +1,8 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val listen_for_messages: ServerEnv.genv -> unit Lwt.t +val listen_for_messages : ServerEnv.genv -> unit Lwt.t diff --git a/src/server/monitor_listener/serverMonitorListenerState.ml b/src/server/monitor_listener/serverMonitorListenerState.ml index bfad748b6a5..da1e6518dbb 100644 --- a/src/server/monitor_listener/serverMonitorListenerState.ml +++ b/src/server/monitor_listener/serverMonitorListenerState.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,16 +7,33 @@ module FilenameSet = Utils_js.FilenameSet -type workload = ServerEnv.env -> ServerEnv.env Lwt.t type env_update = ServerEnv.env -> ServerEnv.env (* Workloads are client requests which we processes FIFO *) -let workload_stream, push_new_workload = Lwt_stream.create () -let push_new_workload workload = push_new_workload (Some workload) +let workload_stream = WorkloadStream.create () + +let push_new_workload workload = WorkloadStream.push workload workload_stream + +let push_new_parallelizable_workload workload = + WorkloadStream.push_parallelizable workload workload_stream + +let deferred_parallelizable_workloads_rev = ref [] + +let defer_parallelizable_workload workload = + deferred_parallelizable_workloads_rev := workload :: !deferred_parallelizable_workloads_rev + +let requeue_deferred_parallelizable_workloads () = + let workloads = !deferred_parallelizable_workloads_rev in + deferred_parallelizable_workloads_rev := []; + Core_list.iter workloads ~f:(fun workload -> + WorkloadStream.requeue_parallelizable workload workload_stream) + (* Env updates are...well...updates to our env. They must be handled in the main thread. Also FIFO * but are quick to handle *) -let env_update_stream, push_new_env_update = Lwt_stream.create () +let (env_update_stream, push_new_env_update) = Lwt_stream.create () + let push_new_env_update env_update = push_new_env_update (Some env_update) + (* Outstanding cancellation requests are lodged here as soon as they arrive * from the monitor (NOT FIFO) as well as being lodged in the normal FIFO * queue. (1) if there was a workload sent prior to the cancellation request @@ -31,78 +48,166 @@ let push_new_env_update env_update = push_new_env_update (Some env_update) let cancellation_requests = ref Lsp.IdSet.empty type recheck_msg = { - files: SSet.t; callback: (Profiling_js.finished option -> unit) option; - focus: bool; + file_watcher_metadata: MonitorProt.file_watcher_metadata option; + files: recheck_files; + recheck_reason: LspProt.recheck_reason; } + +and recheck_files = + | ChangedFiles of SSet.t + | FilesToForceFocusedAndRecheck of SSet.t + | CheckedSetToForce of CheckedSet.t + (* Files which have changed *) -let recheck_stream, push_recheck_msg = Lwt_stream.create () -let push_recheck_msg ~focus ?callback files = push_recheck_msg (Some { files; callback; focus; }) -let push_files_to_recheck = push_recheck_msg ~focus:false -let push_files_to_focus = push_recheck_msg ~focus:true +let (recheck_stream, push_recheck_msg) = Lwt_stream.create () + +let push_recheck_msg ?metadata ?callback ~reason:recheck_reason files = + push_recheck_msg (Some { files; callback; file_watcher_metadata = metadata; recheck_reason }) + +let push_files_to_recheck ?metadata ?callback ~reason changed_files = + push_recheck_msg ?metadata ?callback ~reason (ChangedFiles changed_files) -let pop_next_workload () = - match Lwt_stream.get_available_up_to 1 workload_stream with - | [ workload ] -> Some workload - | [] -> None - | _ -> failwith "Unreachable" +let push_files_to_force_focused_and_recheck ?callback ~reason forced_focused_files = + push_recheck_msg ?callback ~reason (FilesToForceFocusedAndRecheck forced_focused_files) + +let push_checked_set_to_force ?callback ~reason checked_set = + push_recheck_msg ?callback ~reason (CheckedSetToForce checked_set) + +let pop_next_workload () = WorkloadStream.pop workload_stream + +let rec wait_and_pop_parallelizable_workload () = + let%lwt () = WorkloadStream.wait_for_parallelizable_workload workload_stream in + match WorkloadStream.pop_parallelizable workload_stream with + | Some workload -> Lwt.return workload + | None -> wait_and_pop_parallelizable_workload () let update_env env = - Lwt_stream.get_available env_update_stream - |> List.fold_left (fun env f -> f env) env + Lwt_stream.get_available env_update_stream |> List.fold_left (fun env f -> f env) env type recheck_workload = { files_to_recheck: FilenameSet.t; - files_to_focus: FilenameSet.t; + files_to_force: CheckedSet.t; profiling_callbacks: (Profiling_js.finished option -> unit) list; + metadata: MonitorProt.file_watcher_metadata; + recheck_reasons_rev: LspProt.recheck_reason list; } -let empty_recheck_workload = { - files_to_recheck = FilenameSet.empty; - files_to_focus = FilenameSet.empty; - profiling_callbacks = []; -} -let recheck_workload_is_empty { files_to_recheck; files_to_focus; profiling_callbacks=_; } = - FilenameSet.is_empty files_to_recheck && FilenameSet.is_empty files_to_focus +let empty_recheck_workload = + { + files_to_recheck = FilenameSet.empty; + files_to_force = CheckedSet.empty; + profiling_callbacks = []; + metadata = MonitorProt.empty_file_watcher_metadata; + recheck_reasons_rev = []; + } + +let recheck_workload_is_empty workload = + let { + files_to_recheck; + files_to_force; + profiling_callbacks = _; + metadata = _; + recheck_reasons_rev = _; + } = + workload + in + FilenameSet.is_empty files_to_recheck && CheckedSet.is_empty files_to_force + let recheck_acc = ref empty_recheck_workload -let recheck_fetch ~process_updates = + +(* Process the messages which are currently in the recheck stream and return the resulting workload + * + * The recheck stream gives us files as a set of strings. `process_updates` takes that set of + * strings and returns a `FilenameSet.t`. It filters out stuff we don't care about and causes us to + * exit on incompatible changes. + * + * `get_forced` is a function which gives us the `CheckedSet.t` of currently forced files. So if + * the recheck stream is asking us to focus `foo.js` but it's already focused, then we can ignore + * it. + *) +let recheck_fetch ~process_updates ~get_forced = recheck_acc := - Lwt_stream.get_available recheck_stream (* Get all the files which have changed *) - |> Core_list.fold_left ~init:(!recheck_acc) ~f:(fun workload { files; callback; focus; } -> - let files = process_updates files in - let workload = match callback with - | None -> workload - | Some callback -> - if FilenameSet.is_empty files - then begin - (* Call the callback immediately if there's nothing to recheck *) - callback None; - workload - end else - { workload with profiling_callbacks = callback :: workload.profiling_callbacks; } - in - if focus - then { workload with files_to_focus = FilenameSet.union files workload.files_to_focus; } - else { workload with files_to_recheck = FilenameSet.union files workload.files_to_recheck; } - ) -let get_and_clear_recheck_workload ~process_updates = - recheck_fetch ~process_updates; + Lwt_stream.get_available recheck_stream + (* Get all the files which have changed *) + |> Core_list.fold_left + ~init:!recheck_acc + ~f:(fun workload { files; callback; file_watcher_metadata; recheck_reason } -> + let (is_empty_msg, workload) = + match files with + | ChangedFiles changed_files -> + let updates = process_updates changed_files in + ( FilenameSet.is_empty updates, + { + workload with + files_to_recheck = FilenameSet.union updates workload.files_to_recheck; + } ) + | FilesToForceFocusedAndRecheck forced_focused_files -> + let updates = process_updates forced_focused_files in + let focused = FilenameSet.diff updates (get_forced () |> CheckedSet.focused) in + ( FilenameSet.is_empty updates, + { + workload with + files_to_force = CheckedSet.add ~focused workload.files_to_force; + files_to_recheck = FilenameSet.union updates workload.files_to_recheck; + } ) + | CheckedSetToForce checked_set -> + let checked_set = CheckedSet.diff checked_set (get_forced ()) in + ( CheckedSet.is_empty checked_set, + { + workload with + files_to_force = CheckedSet.union checked_set workload.files_to_force; + } ) + in + let workload = + match callback with + | None -> workload + | Some callback -> + if is_empty_msg then ( + (* Call the callback immediately if there's nothing to recheck *) + callback None; + workload + ) else + { workload with profiling_callbacks = callback :: workload.profiling_callbacks } + in + let workload = + { workload with recheck_reasons_rev = recheck_reason :: workload.recheck_reasons_rev } + in + MonitorProt.( + match file_watcher_metadata with + | None -> workload + | Some { total_update_distance; changed_mergebase } -> + let total_update_distance = + total_update_distance + workload.metadata.total_update_distance + in + let changed_mergebase = changed_mergebase || workload.metadata.changed_mergebase in + { workload with metadata = { total_update_distance; changed_mergebase } })) + +let get_and_clear_recheck_workload ~process_updates ~get_forced = + recheck_fetch ~process_updates ~get_forced; let recheck_workload = !recheck_acc in recheck_acc := empty_recheck_workload; recheck_workload -let rec wait_for_updates_for_recheck ~process_updates = + +let rec wait_for_updates_for_recheck ~process_updates ~get_forced = let%lwt _ = Lwt_stream.is_empty recheck_stream in - recheck_fetch ~process_updates; - if recheck_workload_is_empty !recheck_acc - then wait_for_updates_for_recheck ~process_updates - else Lwt.return_unit + recheck_fetch ~process_updates ~get_forced; + if recheck_workload_is_empty !recheck_acc then + wait_for_updates_for_recheck ~process_updates ~get_forced + else + Lwt.return_unit (* Block until any stream receives something *) -let wait_for_anything ~process_updates = - let%lwt () = Lwt.pick [ - (let%lwt _ = Lwt_stream.is_empty workload_stream in Lwt.return_unit); - (let%lwt _ = Lwt_stream.is_empty env_update_stream in Lwt.return_unit); - (let%lwt _ = Lwt_stream.is_empty recheck_stream in Lwt.return_unit); - wait_for_updates_for_recheck ~process_updates; - ] in +let wait_for_anything ~process_updates ~get_forced = + let%lwt () = + Lwt.pick + [ + WorkloadStream.wait_for_workload workload_stream; + (let%lwt _ = Lwt_stream.is_empty env_update_stream in + Lwt.return_unit); + (let%lwt _ = Lwt_stream.is_empty recheck_stream in + Lwt.return_unit); + wait_for_updates_for_recheck ~process_updates ~get_forced; + ] + in Lwt.return_unit diff --git a/src/server/monitor_listener/serverMonitorListenerState.mli b/src/server/monitor_listener/serverMonitorListenerState.mli index f07487a339d..7acd8bc6111 100644 --- a/src/server/monitor_listener/serverMonitorListenerState.mli +++ b/src/server/monitor_listener/serverMonitorListenerState.mli @@ -1,36 +1,71 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -type workload = ServerEnv.env -> ServerEnv.env Lwt.t type env_update = ServerEnv.env -> ServerEnv.env (* APIs to add to the state *) -val push_new_workload: workload -> unit -val push_new_env_update: env_update -> unit -val push_files_to_recheck: ?callback:(Profiling_js.finished option -> unit) -> SSet.t -> unit -val push_files_to_focus: ?callback:(Profiling_js.finished option -> unit) -> SSet.t -> unit -val cancellation_requests: Lsp.IdSet.t ref +val push_new_workload : WorkloadStream.workload -> unit + +val push_new_parallelizable_workload : WorkloadStream.parallelizable_workload -> unit + +val defer_parallelizable_workload : WorkloadStream.parallelizable_workload -> unit + +val requeue_deferred_parallelizable_workloads : unit -> unit + +val push_new_env_update : env_update -> unit + +val push_files_to_recheck : + ?metadata:MonitorProt.file_watcher_metadata -> + ?callback:(Profiling_js.finished option -> unit) -> + reason:LspProt.recheck_reason -> + SSet.t -> + unit + +val push_files_to_force_focused_and_recheck : + ?callback:(Profiling_js.finished option -> unit) -> + reason:LspProt.recheck_reason -> + SSet.t -> + unit + +val push_checked_set_to_force : + ?callback:(Profiling_js.finished option -> unit) -> + reason:LspProt.recheck_reason -> + CheckedSet.t -> + unit + +val cancellation_requests : Lsp.IdSet.t ref (* APIs to wait *) -val wait_for_anything: +val wait_for_anything : process_updates:(SSet.t -> Utils_js.FilenameSet.t) -> + get_forced:(unit -> CheckedSet.t) -> unit Lwt.t -val wait_for_updates_for_recheck: + +val wait_for_updates_for_recheck : process_updates:(SSet.t -> Utils_js.FilenameSet.t) -> + get_forced:(unit -> CheckedSet.t) -> unit Lwt.t (* APIs to consume *) type recheck_workload = { files_to_recheck: Utils_js.FilenameSet.t; - files_to_focus: Utils_js.FilenameSet.t; + files_to_force: CheckedSet.t; profiling_callbacks: (Profiling_js.finished option -> unit) list; + metadata: MonitorProt.file_watcher_metadata; + recheck_reasons_rev: LspProt.recheck_reason list; } -val pop_next_workload: unit -> workload option -val update_env: ServerEnv.env -> ServerEnv.env -val get_and_clear_recheck_workload: + +val pop_next_workload : unit -> WorkloadStream.workload option + +val wait_and_pop_parallelizable_workload : unit -> WorkloadStream.parallelizable_workload Lwt.t + +val update_env : ServerEnv.env -> ServerEnv.env + +val get_and_clear_recheck_workload : process_updates:(SSet.t -> Utils_js.FilenameSet.t) -> + get_forced:(unit -> CheckedSet.t) -> recheck_workload diff --git a/src/server/monitor_listener/workloadStream.ml b/src/server/monitor_listener/workloadStream.ml new file mode 100644 index 00000000000..00d9acf155a --- /dev/null +++ b/src/server/monitor_listener/workloadStream.ml @@ -0,0 +1,124 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* A WorkloadStream.t is a datastructure which keeps track of the workloads (aka commands) that the + * server has queued up to run. The basic operations are pushing new workloads and popping the + * oldest workloads. + * + * We keep parallelizable workloads and nonparallelizable workloads in separate queues. This allows + * the caller to ask for the next parallelizable workload or any workload. + * + * Parallelizable workloads can also be requeued. Requeueing basically sticks the workload at the + * front of the queue. We do this when parallelizable workloads are canceled due to a recheck. + *) + +type workload = ServerEnv.env -> ServerEnv.env Lwt.t + +type parallelizable_workload = ServerEnv.env -> unit Lwt.t + +type t = { + mutable parallelizable: (float * parallelizable_workload) ImmQueue.t; + mutable requeued_parallelizable: parallelizable_workload list; + mutable nonparallelizable: (float * workload) ImmQueue.t; + signal: unit Lwt_condition.t; +} + +let create () = + { + parallelizable = ImmQueue.empty; + requeued_parallelizable = []; + nonparallelizable = ImmQueue.empty; + signal = Lwt_condition.create (); + } + +(* Add a non-parallelizable workload to the stream and wake up anyone waiting *) +let push workload stream = + let now = Unix.gettimeofday () in + stream.nonparallelizable <- ImmQueue.push stream.nonparallelizable (now, workload); + Lwt_condition.broadcast stream.signal () + +(* Add a parallelizable workload to the stream and wake up anyone waiting *) +let push_parallelizable workload stream = + let now = Unix.gettimeofday () in + stream.parallelizable <- ImmQueue.push stream.parallelizable (now, workload); + Lwt_condition.broadcast stream.signal () + +(* Add a parallelizable workload to the front of the stream and wake up anyone waiting *) +let requeue_parallelizable workload stream = + stream.requeued_parallelizable <- workload :: stream.requeued_parallelizable; + Lwt_condition.broadcast stream.signal () + +(* Cast a parallelizable workload to a nonparallelizable workload. *) +let workload_of_parallelizable_workload parallelizable_workload env = + let%lwt () = parallelizable_workload env in + Lwt.return env + +(* Pop the oldest workload *) +let pop stream = + match stream.requeued_parallelizable with + | workload :: rest -> + (* Always prefer requeued parallelizable jobs *) + stream.requeued_parallelizable <- rest; + Some (workload_of_parallelizable_workload workload) + | [] -> + let (entry_p, parallelizable) = ImmQueue.peek stream.parallelizable in + let (entry_n, nonparallelizable) = ImmQueue.peek stream.nonparallelizable in + (* Pop from the parallelizable queue unless the nonparallelizable queue has an older entry *) + let use_parallelizable = + match (entry_p, entry_n) with + | (None, None) + | (Some _, None) -> + true + | (Some (timestamp_p, _), Some (timestamp_n, _)) -> timestamp_p <= timestamp_n + | (None, Some _) -> false + in + let (workload_opt, parallelizable, nonparallelizable) = + if use_parallelizable then + let (_, parallelizable) = ImmQueue.pop parallelizable in + let workload = + Option.map entry_p ~f:(fun (_, workload) -> workload_of_parallelizable_workload workload) + in + (workload, parallelizable, nonparallelizable) + else + let (_, nonparallelizable) = ImmQueue.pop nonparallelizable in + (Option.map entry_n ~f:snd, parallelizable, nonparallelizable) + in + stream.parallelizable <- parallelizable; + stream.nonparallelizable <- nonparallelizable; + workload_opt + +(* Pop the oldest parallelizable workload *) +let pop_parallelizable stream = + match stream.requeued_parallelizable with + | workload :: rest -> + (* Always prefer requeued parallelizable jobs *) + stream.requeued_parallelizable <- rest; + Some workload + | [] -> + let (entry_opt, parallelizable) = ImmQueue.pop stream.parallelizable in + stream.parallelizable <- parallelizable; + Option.map entry_opt ~f:snd + +(* Wait until there's a workload in the stream *) +let rec wait_for_workload stream = + if + stream.requeued_parallelizable = [] + && ImmQueue.is_empty stream.parallelizable + && ImmQueue.is_empty stream.nonparallelizable + then + let%lwt () = Lwt_condition.wait stream.signal in + wait_for_workload stream + else + Lwt.return_unit + +(* Wait until there's a parallelizable workload in the stream *) +let rec wait_for_parallelizable_workload stream = + if stream.requeued_parallelizable = [] && ImmQueue.is_empty stream.parallelizable then + let%lwt () = Lwt_condition.wait stream.signal in + wait_for_parallelizable_workload stream + else + Lwt.return_unit diff --git a/src/server/monitor_listener/workloadStream.mli b/src/server/monitor_listener/workloadStream.mli new file mode 100644 index 00000000000..a161e3989c4 --- /dev/null +++ b/src/server/monitor_listener/workloadStream.mli @@ -0,0 +1,28 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type workload = ServerEnv.env -> ServerEnv.env Lwt.t + +type parallelizable_workload = ServerEnv.env -> unit Lwt.t + +type t + +val create : unit -> t + +val push : workload -> t -> unit + +val push_parallelizable : parallelizable_workload -> t -> unit + +val requeue_parallelizable : parallelizable_workload -> t -> unit + +val pop : t -> workload option + +val pop_parallelizable : t -> parallelizable_workload option + +val wait_for_workload : t -> unit Lwt.t + +val wait_for_parallelizable_workload : t -> unit Lwt.t diff --git a/src/server/persistent_connection/dune b/src/server/persistent_connection/dune new file mode 100644 index 00000000000..5f966879552 --- /dev/null +++ b/src/server/persistent_connection/dune @@ -0,0 +1,8 @@ +(library + (name flow_server_persistent_connection) + (wrapped false) + (libraries + flow_monitor_rpc + flow_server_protocol + ) +) diff --git a/src/server/persistent_connection/persistent_connection.ml b/src/server/persistent_connection/persistent_connection.ml index da23cecbd4f..5a4cec139ee 100644 --- a/src/server/persistent_connection/persistent_connection.ml +++ b/src/server/persistent_connection/persistent_connection.ml @@ -1,224 +1,215 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module Prot = Persistent_connection_prot +module Prot = LspProt type single_client = { - is_lsp: bool; - logging_context: FlowEventLogger.logging_context; - subscribed: bool; - opened_files: string SMap.t; (* map from filename to content *) - client_id: Persistent_connection_prot.client_id; - lsp_initialize_params: Lsp.Initialize.params option; + client_id: Prot.client_id; + lsp_initialize_params: Lsp.Initialize.params; + mutable subscribed: bool; + mutable opened_files: string SMap.t; (* map from filename to content *) } -type t = single_client list +type t = Prot.client_id list -let to_string (clients: t) : string = - let client_to_string (client: single_client) : string = - Printf.sprintf "{id:%d opened:%d subscribed:%B context:%f}" - client.client_id (SMap.cardinal client.opened_files) - client.subscribed client.logging_context.FlowEventLogger.start_time - in - let clients_str = List.map client_to_string clients in - Printf.sprintf "[%s]" (String.concat ", " clients_str) +let active_clients : single_client IMap.t ref = ref IMap.empty + +let get_client client_id = IMap.get client_id !active_clients let empty = [] -let send_message_to_client response client = +let send_message_to_client (response : Prot.message_from_server) client = MonitorRPC.respond_to_persistent_connection ~client_id:client.client_id ~response -let send_message message client = send_message_to_client (message : Prot.response) client +let send_response (response : Prot.response_with_metadata) client = + send_message_to_client (Prot.RequestResponse response) client + +let send_notification (response : Prot.notification_from_server) client = + send_message_to_client (Prot.NotificationFromServer response) client let send_errors = (* We don't know what kind of file the filename represents, * so we have to try (almost) all of them. *) let get_warnings_for_file = let rec get_first_contained warn_map = function - | [] -> Errors.ErrorSet.empty - | filename::filenames -> - match Utils_js.FilenameMap.get filename warn_map with + | [] -> Errors.ConcreteLocPrintableErrorSet.empty + | filename :: filenames -> + (match Utils_js.FilenameMap.get filename warn_map with | Some errs -> errs - | None -> get_first_contained warn_map filenames + | None -> get_first_contained warn_map filenames) in fun filename warn_map -> - get_first_contained warn_map [ - File_key.SourceFile filename; - File_key.LibFile filename; - File_key.JsonFile filename; - File_key.ResourceFile filename; - ] + get_first_contained + warn_map + [ + File_key.SourceFile filename; + File_key.LibFile filename; + File_key.JsonFile filename; + File_key.ResourceFile filename; + ] in - - fun ~errors ~warnings client -> - let opened_filenames = SMap.bindings client.opened_files |> List.map fst in - let warnings = List.fold_right - (fun filename warn_acc -> - let file_warns = get_warnings_for_file filename warnings in - Errors.ErrorSet.union file_warns warn_acc) - opened_filenames Errors.ErrorSet.empty + fun ~errors_reason ~errors ~warnings client -> + let opened_filenames = SMap.bindings client.opened_files |> Core_list.map ~f:fst in + let warnings = + List.fold_right + (fun filename warn_acc -> + let file_warns = get_warnings_for_file filename warnings in + Errors.ConcreteLocPrintableErrorSet.union file_warns warn_acc) + opened_filenames + Errors.ConcreteLocPrintableErrorSet.empty in - send_message (Prot.Errors {errors; warnings}) client + send_notification (Prot.Errors { errors; warnings; errors_reason }) client -let send_errors_if_subscribed ~client ~errors ~warnings = - if client.subscribed - then send_errors ~errors ~warnings client +let send_errors_if_subscribed ~client ~errors_reason ~errors ~warnings = + if client.subscribed then send_errors ~errors_reason ~errors ~warnings client let send_single_lsp (message, metadata) client = - send_message (Prot.LspFromServer (message, metadata)) client + send_response (Prot.LspFromServer message, metadata) client -let send_single_start_recheck client = - send_message (Prot.StartRecheck) client +let send_single_start_recheck client = send_notification Prot.StartRecheck client -let send_single_end_recheck ~lazy_stats client = - send_message (Prot.EndRecheck lazy_stats) client +let send_single_end_recheck ~lazy_stats client = + send_notification (Prot.EndRecheck lazy_stats) client -let add_client clients client_id logging_context lsp = +let add_client client_id lsp_initialize_params = let new_client = - { - is_lsp = (lsp <> None); - logging_context; - subscribed = false; - opened_files = SMap.empty; - client_id; - lsp_initialize_params = lsp; - } + { subscribed = false; opened_files = SMap.empty; client_id; lsp_initialize_params } in - Hh_logger.info "Adding new persistent connection #%d" new_client.client_id; - (new_client :: clients) + active_clients := IMap.add client_id new_client !active_clients; + Hh_logger.info "Adding new persistent connection #%d" new_client.client_id -let remove_client clients client_id = +let remove_client client_id = Hh_logger.info "Removing persistent connection client #%d" client_id; - List.filter (fun client -> client.client_id != client_id) clients + active_clients := IMap.remove client_id !active_clients -let get_subscribed_clients = List.filter (fun c -> c.subscribed) +let add_client_to_clients clients client_id = client_id :: clients -let get_subscribed_lsp_clients = List.filter (fun c -> c.subscribed && c.is_lsp) +let remove_client_from_clients clients client_id = List.filter (fun id -> id != client_id) clients -let update_clients ~clients ~calc_errors_and_warnings = +let get_subscribed_clients = + List.fold_left + (fun acc client_id -> + match get_client client_id with + | Some client when client.subscribed -> client :: acc + | _ -> acc) + [] + +let get_subscribed_lsp_clients = + List.fold_left + (fun acc client_id -> + match get_client client_id with + | Some client when client.subscribed -> client :: acc + | _ -> acc) + [] + +let update_clients ~clients ~errors_reason ~calc_errors_and_warnings = let subscribed_clients = get_subscribed_clients clients in let subscribed_client_count = List.length subscribed_clients in let all_client_count = List.length clients in - if subscribed_clients <> [] - then begin - let errors, warnings = calc_errors_and_warnings () in - let error_count = Errors.ErrorSet.cardinal errors in + if subscribed_clients <> [] then ( + let (errors, warnings) = calc_errors_and_warnings () in + let error_count = Errors.ConcreteLocPrintableErrorSet.cardinal errors in let warning_file_count = Utils_js.FilenameMap.cardinal warnings in Hh_logger.info "sending (%d errors) and (warnings from %d files) to %d subscribed clients (of %d total)" - error_count warning_file_count subscribed_client_count all_client_count; - List.iter (send_errors ~errors ~warnings) subscribed_clients - end + error_count + warning_file_count + subscribed_client_count + all_client_count; + List.iter (send_errors ~errors_reason ~errors ~warnings) subscribed_clients + ) let send_lsp clients json = - clients - |> get_subscribed_lsp_clients - |> List.iter (send_single_lsp json) + clients |> get_subscribed_lsp_clients |> List.iter (send_single_lsp json) let send_start_recheck clients = - clients - |> get_subscribed_clients - |> List.iter send_single_start_recheck + clients |> get_subscribed_clients |> List.iter send_single_start_recheck let send_end_recheck ~lazy_stats clients = - clients - |> get_subscribed_clients - |> List.iter (send_single_end_recheck ~lazy_stats) - -let rec modify_item lst item f = match lst with - | [] -> raise Not_found - | hd::tl -> - (* Use identity, not structural equality *) - if hd == item then - (f hd)::tl - else - hd::(modify_item tl item f) - -let subscribe_client ~clients ~client ~current_errors ~current_warnings = + clients |> get_subscribed_clients |> List.iter (send_single_end_recheck ~lazy_stats) + +let subscribe_client ~client ~current_errors ~current_warnings = Hh_logger.info "Subscribing client #%d to push diagnostics" client.client_id; if client.subscribed then (* noop *) - clients - else begin - send_errors ~errors:current_errors ~warnings:current_warnings client; - modify_item clients client (fun c -> { c with subscribed = true }) - end - -let client_did_open - (clients: single_client list) - (client: single_client) - ~(files: (string * string) Nel.t) - : (single_client list * single_client) option = - Hh_logger.info "Client #%d opened %d file(s)" client.client_id (Nel.length files); + () + else + let errors_reason = Prot.New_subscription in + send_errors ~errors_reason ~errors:current_errors ~warnings:current_warnings client; + client.subscribed <- true + +let client_did_open (client : single_client) ~(files : (string * string) Nel.t) : bool = + (match Nel.length files with + | 1 -> Hh_logger.info "Client #%d opened %s" client.client_id (files |> Nel.hd |> fst) + | len -> Hh_logger.info "Client #%d opened %d files" client.client_id len); let add_file acc (filename, content) = SMap.add filename content acc in let new_opened_files = Nel.fold_left add_file client.opened_files files in (* SMap.add ensures physical equality if the map is unchanged, since 4.0.3, * so == is appropriate. *) if new_opened_files == client.opened_files then (* noop *) - None - else - let update_opened_files c = {c with opened_files = new_opened_files} in - let new_client = update_opened_files client in - let new_connections = modify_item clients client update_opened_files in - Some (new_connections, new_client) + false + else ( + client.opened_files <- new_opened_files; + true + ) let client_did_change - (clients: single_client list) - (client: single_client) - (fn: string) - (changes: Lsp.DidChange.textDocumentContentChangeEvent list) - : (single_client list * single_client, string * Utils.callstack) result = - try begin + (client : single_client) + (fn : string) + (changes : Lsp.DidChange.textDocumentContentChangeEvent list) : + (unit, string * Utils.callstack) result = + try let content = SMap.find fn client.opened_files in match Lsp_helpers.apply_changes content changes with | Error (reason, stack) -> Error (reason, stack) | Ok new_content -> let new_opened_files = SMap.add fn new_content client.opened_files in - let update_opened_files c = {c with opened_files = new_opened_files} in - let new_client = update_opened_files client in - let new_connections = modify_item clients client update_opened_files in - Ok (new_connections, new_client) - end with Not_found -> - let stack = Printexc.get_backtrace () in + client.opened_files <- new_opened_files; + Ok () + with Not_found as e -> + let e = Exception.wrap e in + let stack = Exception.get_backtrace_string e in Error (Printf.sprintf "File %s wasn't open to change" fn, Utils.Callstack stack) - -let client_did_close - (clients: single_client list) - (client: single_client) - ~(filenames: string Nel.t) - : (single_client list * single_client) option = - Hh_logger.info "Client #%d closed %d file(s)" client.client_id (Nel.length filenames); +let client_did_close (client : single_client) ~(filenames : string Nel.t) : bool = + (match Nel.length filenames with + | 1 -> Hh_logger.info "Client #%d closed %s" client.client_id (filenames |> Nel.hd) + | len -> Hh_logger.info "Client #%d closed %d files" client.client_id len); let remove_file acc filename = SMap.remove filename acc in let new_opened_files = Nel.fold_left remove_file client.opened_files filenames in (* SMap.remove ensures physical equality if the set is unchanged, * so == is appropriate. *) if new_opened_files == client.opened_files then (* noop *) - None - else - let update_opened_files c = {c with opened_files = new_opened_files} in - let new_client = update_opened_files client in - let new_connections = modify_item clients client update_opened_files in - Some (new_connections, new_client) + false + else ( + client.opened_files <- new_opened_files; + true + ) -let get_file (client: single_client) (fn: string) : File_input.t = +let get_file (client : single_client) (fn : string) : File_input.t = let content_opt = SMap.get fn client.opened_files in match content_opt with | None -> File_input.FileName fn | Some content -> File_input.FileContent (Some fn, content) -let get_logging_context client = client.logging_context - -let get_opened_files (clients: single_client list) : SSet.t = +let get_opened_files (clients : t) : SSet.t = let per_file filename _content acc = SSet.add filename acc in - let per_client acc client = SMap.fold per_file client.opened_files acc + let per_client acc client_id = + match get_client client_id with + | None -> acc + | Some client -> SMap.fold per_file client.opened_files acc in List.fold_left per_client SSet.empty clients -let get_client clients client_id = List.find_opt (fun c -> c.client_id = client_id) clients +let get_id client = client.client_id + +let client_snippet_support (client : single_client) = + Lsp.Initialize.( + client.lsp_initialize_params.client_capabilities.textDocument.completion.completionItem + .snippetSupport) diff --git a/src/server/persistent_connection/persistent_connection.mli b/src/server/persistent_connection/persistent_connection.mli index 483d61a2f22..af70a1cc784 100644 --- a/src/server/persistent_connection/persistent_connection.mli +++ b/src/server/persistent_connection/persistent_connection.mli @@ -1,80 +1,79 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module Prot = Persistent_connection_prot - (* Stores all the necessary information about current persistent connections *) type t type single_client -val to_string: t -> string +val empty : t + +val add_client : LspProt.client_id -> Lsp.Initialize.params -> unit -val empty: t +val remove_client : LspProt.client_id -> unit -val add_client: - t -> Prot.client_id -> FlowEventLogger.logging_context -> Lsp.Initialize.params option -> t -val remove_client: - t -> Prot.client_id -> t +val add_client_to_clients : t -> LspProt.client_id -> t + +val remove_client_from_clients : t -> LspProt.client_id -> t (* Send updates to all clients that are subscribed *) -val update_clients: +val update_clients : clients:t -> - calc_errors_and_warnings:(unit -> Errors.ErrorSet.t * Errors.ErrorSet.t Utils_js.FilenameMap.t) -> + errors_reason:LspProt.errors_reason -> + calc_errors_and_warnings: + (unit -> + Errors.ConcreteLocPrintableErrorSet.t + * Errors.ConcreteLocPrintableErrorSet.t Utils_js.FilenameMap.t) -> unit -val send_lsp: - t -> Lsp.lsp_message option * Prot.metadata -> unit -val send_start_recheck: - t -> unit -val send_end_recheck: - lazy_stats:ServerProt.Response.lazy_stats -> t -> unit + +val send_lsp : t -> Lsp.lsp_message option * LspProt.metadata -> unit + +val send_start_recheck : t -> unit + +val send_end_recheck : lazy_stats:ServerProt.Response.lazy_stats -> t -> unit (* Send a message to just one client *) -val send_message: Prot.response -> single_client -> unit -val send_errors_if_subscribed: +val send_response : LspProt.response_with_metadata -> single_client -> unit + +val send_errors_if_subscribed : client:single_client -> - errors:Errors.ErrorSet.t -> - warnings:Errors.ErrorSet.t Utils_js.FilenameMap.t -> unit + errors_reason:LspProt.errors_reason -> + errors:Errors.ConcreteLocPrintableErrorSet.t -> + warnings:Errors.ConcreteLocPrintableErrorSet.t Utils_js.FilenameMap.t -> + unit (* getters/setters on single_client *) -val subscribe_client: - clients:t -> +val subscribe_client : client:single_client -> - current_errors:Errors.ErrorSet.t -> - current_warnings:Errors.ErrorSet.t Utils_js.FilenameMap.t -> t + current_errors:Errors.ConcreteLocPrintableErrorSet.t -> + current_warnings:Errors.ConcreteLocPrintableErrorSet.t Utils_js.FilenameMap.t -> + unit -val client_did_open: - t -> - single_client -> - files:(string * string) Nel.t -> - (t * single_client) option +val client_did_open : single_client -> files:(string * string) Nel.t -> bool -val client_did_change: - t -> +val client_did_change : single_client -> string -> Lsp.DidChange.textDocumentContentChangeEvent list -> - (t * single_client, string * Utils.callstack) result + (unit, string * Utils.callstack) result -val client_did_close: - t -> - single_client -> - filenames:string Nel.t - -> (t * single_client) option - -val get_logging_context: single_client -> FlowEventLogger.logging_context +val client_did_close : single_client -> filenames:string Nel.t -> bool +val get_opened_files : t -> SSet.t (** Returns the set of all opened files across all clients. It's not meaningful to talk about the *content* of those opened files in cases where clients differ. *) -val get_opened_files: t -> SSet.t +val get_file : single_client -> string -> File_input.t (** Returns either FileContent for this file if it was opened by the persistent client, or FileName if it wasn't. *) -val get_file: single_client -> string -> File_input.t -val get_client: t -> Prot.client_id -> single_client option +val get_client : LspProt.client_id -> single_client option + +val get_id : single_client -> LspProt.client_id + +val client_snippet_support : single_client -> bool diff --git a/src/server/protocol/dune b/src/server/protocol/dune new file mode 100644 index 00000000000..74118dfe014 --- /dev/null +++ b/src/server/protocol/dune @@ -0,0 +1,15 @@ +(library + (name flow_server_protocol) + (wrapped false) + (libraries + flow_autofix_options + flow_common + flow_common_profiling + flow_exit_status + flow_parser_utils_replacement_printer + flow_server_status + flow_server_utils + flow_typing + lsp ; hack + ) +) diff --git a/src/server/protocol/lspProt.ml b/src/server/protocol/lspProt.ml new file mode 100644 index 00000000000..5834643e288 --- /dev/null +++ b/src/server/protocol/lspProt.ml @@ -0,0 +1,181 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type client_id = int + +type error_kind = + | ExpectedError + | UnexpectedError + +type error_info = error_kind * string * Utils.callstack + +type metadata = { + (* when did this work-item get triggered? *) + start_wall_time: float; + (* What was the thing that triggered this work-item *) + start_json_truncated: Hh_json.json; + (* What was the state of the server at the time the work-item was triggered? *) + (* Might be None e.g. if the server was down at the time or if we don't know *) + start_server_status: ServerStatus.status option; + start_watcher_status: FileWatcherStatus.status option; + (* And what was the state of the lspCommand client? Is optional only to save *) + (* space in the obvious cases that don't need explanation. *) + start_lsp_state: string option; + start_lsp_state_reason: string option; + (* If handling the workitem resulted in error, what was that error? *) + error_info: error_info option; + (* If the workitem was handled on the server, how long did it take there? *) + server_profiling: Profiling_js.finished option; + (* and if it had work done on the client, how long there? *) + client_duration: float option; + (* Did the handler for this workitem provide any extra data? *) + extra_data: (string * Hh_json.json) list; + (* The logging context for the server *) + server_logging_context: FlowEventLogger.logging_context option; + (* LSP method (e.g. 'textDocument/completion') *) + lsp_method_name: string; + (* If we're tracking an interaction in the lsp process, this is the id of the interaction *) + interaction_tracking_id: int option; +} + +(** For LSP work-items, we keep metadata about requests, to help us log better telemetry. + After the work has been handled, we fill out the second part of the metadata. +*) +let empty_metadata = + { + start_wall_time = 0.0; + start_server_status = None; + start_watcher_status = None; + start_json_truncated = Hh_json.JSON_Object []; + start_lsp_state = None; + start_lsp_state_reason = None; + error_info = None; + server_profiling = None; + client_duration = None; + extra_data = []; + server_logging_context = None; + lsp_method_name = ""; + interaction_tracking_id = None; + } + +(* This is the reason why we start to do a recheck. Since rechecks can be combined together, there + * may be multiple reasons for a single recheck *) +type recheck_reason = + (* One file changed on disk. *) + | Single_file_changed of { filename: string } + (* More than one file changed on disk *) + | Many_files_changed of { file_count: int } + (* If we're using Watchman as the filewatcher, we can tell when the mergebase changed. + * We can differentiate that from Many_files_changed *) + | Rebased of { + distance: int; + file_count: int; + } + (* If try to autocomplete in foo.js and it's dependencies are unchecked, then we start a recheck + * with a reason of Unchecked_dependencies { filename = "/path/to/foo.js"; } *) + | Unchecked_dependencies of { filename: string } + (* A lazy server started from saved state has an old dependency graph and has to update it *) + | Lazy_init_update_deps + (* A lazy server may decided to typecheck some files during init (like Watchman lazy mode will + * typecheck files which have changed since the mergebase) *) + | Lazy_init_typecheck + (* At init when we do a full check *) + | Full_init + +let verbose_string_of_recheck_reason = function + | Single_file_changed { filename } -> Printf.sprintf "1 file changed (%s)" filename + | Many_files_changed { file_count } -> Printf.sprintf "%d files changed" file_count + | Rebased { distance; file_count } -> + Printf.sprintf "Rebased %d commits & %d files changed" distance file_count + | Unchecked_dependencies { filename } -> Printf.sprintf "Unchecked dependencies of %s" filename + | Lazy_init_update_deps -> "Lazy init update deps" + | Lazy_init_typecheck -> "Lazy init typecheck" + | Full_init -> "Full init" + +let normalized_string_of_recheck_reason = function + | Single_file_changed { filename = _ } -> "singleFileChanged" + | Many_files_changed { file_count = _ } -> "manyFilesChanged" + | Rebased { distance = _; file_count = _ } -> "rebased" + | Unchecked_dependencies { filename = _ } -> "uncheckedDependencies" + | Lazy_init_update_deps -> "lazyInitUpdateDeps" + | Lazy_init_typecheck -> "lazyInitTypecheck" + | Full_init -> "fullInit" + +type request = + | Subscribe + | LspToServer of Lsp.lsp_message + +type request_with_metadata = request * metadata + +(* requests, notifications, responses from client *) + +let string_of_request = function + | (Subscribe, _) -> "subscribe" + | (LspToServer msg, _) -> Printf.sprintf "lspToServer %s" (Lsp_fmt.message_name_to_string msg) + +let json_of_request = + Hh_json.( + function + | (Subscribe, _) -> JSON_Object [("method", JSON_String "subscribe")] + | (LspToServer _, metadata) -> metadata.start_json_truncated) + +(* Why is the server sending us a list of errors *) +type errors_reason = + (* Sending all the errors at the end of the recheck *) + | End_of_recheck of { recheck_reasons: recheck_reason list } + (* Streaming errors during recheck *) + | Recheck_streaming of { recheck_reasons: recheck_reason list } + (* Sometimes the env changes, which influences which errors we send to the lsp. For example, we + * only send warnings for open files. When a file is opened or closed, we have to recalculate + * which warnings to send and send the updated set. *) + | Env_change + (* The persistent client just subscribed to errors, so was sent the initial error list *) + | New_subscription + +type response = LspFromServer of Lsp.lsp_message option + +type response_with_metadata = response * metadata + +type notification_from_server = + | Errors of { + errors: Errors.ConcreteLocPrintableErrorSet.t; + warnings: Errors.ConcreteLocPrintableErrorSet.t; + errors_reason: errors_reason; + } + | StartRecheck + | EndRecheck of ServerProt.Response.lazy_stats + (* only used for the subset of exists which client handles *) + | ServerExit of FlowExitStatus.t + | Please_hold of (ServerStatus.status * FileWatcherStatus.status) + (* monitor is about to close the connection *) + | EOF + +type message_from_server = + | RequestResponse of response_with_metadata + | NotificationFromServer of notification_from_server + +let string_of_response = function + | LspFromServer None -> "lspFromServer None" + | LspFromServer (Some msg) -> + Printf.sprintf "lspFromServer %s" (Lsp_fmt.message_name_to_string msg) + +let string_of_message_from_server = function + | RequestResponse (response, _) -> string_of_response response + | NotificationFromServer notification -> + begin + match notification with + | Errors _ -> "errors" + | StartRecheck -> "startRecheck" + | EndRecheck _ -> "endRecheck" + | ServerExit code -> "serverExit_" ^ FlowExitStatus.to_string code + | Please_hold (server_status, watcher_status) -> + Printf.sprintf + "pleaseHold_server=%s_watcher=%s" + (ServerStatus.string_of_status server_status) + (FileWatcherStatus.string_of_status watcher_status) + | EOF -> "EOF" + end diff --git a/src/server/protocol/monitorProt.ml b/src/server/protocol/monitorProt.ml index 53a9e9a6793..7268b56cef2 100644 --- a/src/server/protocol/monitorProt.ml +++ b/src/server/protocol/monitorProt.ml @@ -1,53 +1,60 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -(* The Flow server monitor uses this module to communicate with the server and with clients *) - -module PersistentProt = Persistent_connection_prot - (* Ephemeral socket connections expect a response to their requests. We use request_id to indicate * to which request a given response is replying *) type request_id = string -type please_die_reason = -| MonitorExiting of (FlowExitStatus.t * string) +type file_watcher_metadata = { + total_update_distance: int; + changed_mergebase: bool; +} + +let empty_file_watcher_metadata = { total_update_distance = 0; changed_mergebase = false } + +let merge_file_watcher_metadata a b = + { + total_update_distance = a.total_update_distance + b.total_update_distance; + changed_mergebase = a.changed_mergebase || b.changed_mergebase; + } + +type please_die_reason = MonitorExiting of (FlowExitStatus.t * string) (* These are the messages that the monitor sends to the server *) type monitor_to_server_message = -(* A request from an ephemeral socket connection. It expects a response *) -| Request of request_id * ServerProt.Request.command_with_context -(* A notification that there is a new persistent socket connection *) -| NewPersistentConnection of - PersistentProt.client_id * FlowEventLogger.logging_context * Lsp.Initialize.params option -(* A request from a persistent socket connection. It does not expect a response *) -| PersistentConnectionRequest of PersistentProt.client_id * PersistentProt.request -(* A notification that a persistent socket connection is dead *) -| DeadPersistentConnection of PersistentProt.client_id -(* The file watcher has noticed changes *) -| FileWatcherNotification of SSet.t -(* Monitor wants to kill the server but first asks nicely for the server to honorably kill itself *) -| PleaseDie of please_die_reason + (* A request from an ephemeral socket connection. It expects a response *) + | Request of request_id * ServerProt.Request.command_with_context + (* A notification that there is a new persistent socket connection *) + | NewPersistentConnection of LspProt.client_id * Lsp.Initialize.params + (* A request from a persistent socket connection. It does not expect a response *) + | PersistentConnectionRequest of LspProt.client_id * LspProt.request_with_metadata + (* A notification that a persistent socket connection is dead *) + | DeadPersistentConnection of LspProt.client_id + (* The file watcher has noticed changes *) + | FileWatcherNotification of SSet.t * file_watcher_metadata option + (* Monitor wants to kill the server but first asks nicely for the server to honorably kill itself *) + | PleaseDie of please_die_reason (* These are the messages that the server sends to the monitor *) type server_to_monitor_message = -(* A response to an ephemeral socket's request *) -| Response of request_id * ServerProt.Response.response -(* An exception was thrown while processing the request *) -| RequestFailed of request_id * string -(* A response to a persistent socket connection *) -| PersistentConnectionResponse of PersistentProt.client_id * PersistentProt.response -(* A notification of the server's current status *) -| StatusUpdate of ServerStatus.status + (* A response to an ephemeral socket's request *) + | Response of request_id * ServerProt.Response.response + (* An exception was thrown while processing the request *) + | RequestFailed of request_id * string + (* A response to a persistent socket connection *) + | PersistentConnectionResponse of LspProt.client_id * LspProt.message_from_server + (* A notification of the server's current status *) + | StatusUpdate of ServerStatus.status (* These are the messages that the server sends to an ephemeral socket connection *) type monitor_to_client_message = -(* The response from the server *) -| Data of ServerProt.Response.response -(* The server threw an exception while processing the request *) -| ServerException of string -(* The server is currently busy. Please wait for a response *) -| Please_hold of (ServerStatus.status * FileWatcherStatus.status) + (* The response from the server *) + | Data of ServerProt.Response.response + (* The server threw an exception while processing the request *) + | ServerException of string + (* The server is currently busy. Please wait for a response *) + | Please_hold of (ServerStatus.status * FileWatcherStatus.status) diff --git a/src/server/protocol/persistent_connection_prot.ml b/src/server/protocol/persistent_connection_prot.ml deleted file mode 100644 index 469dc8dabd2..00000000000 --- a/src/server/protocol/persistent_connection_prot.ml +++ /dev/null @@ -1,93 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type client_id = int - -type error_kind = ExpectedError | UnexpectedError -type error_info = error_kind * string * Utils.callstack - -(** For LSP work-items, we keep metadata about requests, to help us log better telemetry. - After the work has been handled, we fill out the second part of the metadata. -*) -type metadata = { - (* when did this work-item get triggered? *) - start_wall_time: float; - (* What was the thing that triggered this work-item *) - start_json_truncated: Hh_json.json; - (* What was the state of the server at the time the work-item was triggered? *) - (* Might be None e.g. if the server was down at the time or if we don't know *) - start_server_status: ServerStatus.status option; - start_watcher_status: FileWatcherStatus.status option; - (* And what was the state of the lspCommand client? Is optional only to save *) - (* space in the obvious cases that don't need explanation. *) - start_lsp_state: string option; - start_lsp_state_reason: string option; - - (* If handling the workitem resulted in error, what was that error? *) - error_info: error_info option; - (* If the workitem was handled on the server, how long did it take there? *) - server_profiling: Profiling_js.finished option; - (* and if it had work done on the client, how long there? *) - client_duration: float option; - (* Did the handler for this workitem provide any extra data? *) - extra_data: (string * Hh_json.json) list; - (* The logging context for the server *) - server_logging_context: FlowEventLogger.logging_context option; -} - - -type request = - | Subscribe - | Autocomplete of (File_input.t * (* request id *) int) - | DidOpen of (* filename *) string Nel.t - | DidClose of (* filename *) string Nel.t - | LspToServer of Lsp.lsp_message * metadata (* requests, notifications, responses from client *) - -let string_of_request = function -| Subscribe -> "subscribe" -| Autocomplete _ -> "autocomplete" -| DidOpen _ -> "didOpen" -| DidClose _ -> "didClose" -| LspToServer _ -> "lspToServer" - -let json_of_request = let open Hh_json in function -| Subscribe -> JSON_Object ["method", JSON_String "subscribe"] -| Autocomplete (f, _) -> JSON_Object ["method", JSON_String "autocomplete"; - "file", JSON_String (File_input.filename_of_file_input f)] -| DidOpen files -> JSON_Object ["method", JSON_String "didOpen"; - "files", JSON_Array (files |> Nel.to_list |> List.map Hh_json.string_)] -| DidClose files -> JSON_Object ["method", JSON_String "didClose"; - "files", JSON_Array (files |> Nel.to_list |> List.map Hh_json.string_)] -| LspToServer (_, metadata) -> metadata.start_json_truncated - -type response = - | Errors of {errors: Errors.ErrorSet.t; warnings: Errors.ErrorSet.t} - | StartRecheck - | EndRecheck of ServerProt.Response.lazy_stats - | AutocompleteResult of (ServerProt.Response.autocomplete_response * (* request id *) int) - | DidOpenAck - | DidCloseAck - | ServerExit of FlowExitStatus.t (* only used for the subset of exists which client handles *) - | LspFromServer of Lsp.lsp_message option * metadata - | Please_hold of (ServerStatus.status * FileWatcherStatus.status) - | EOF (* monitor is about to close the connection *) - -let string_of_response = function -| Errors _ -> "errors" -| StartRecheck -> "startRecheck" -| EndRecheck _ -> "endRecheck" -| AutocompleteResult _ -> "autocompleteResult" -| DidOpenAck -> "didOpenAck" -| DidCloseAck -> "didCloseAck" -| ServerExit code -> "serverExit_" ^ (FlowExitStatus.to_string code) -| LspFromServer (None,_) -> "lspFromServer None" -| LspFromServer (Some msg,_) -> Printf.sprintf "lspFromServer %s" - (Lsp_fmt.message_name_to_string msg) -| Please_hold (server_status, watcher_status) -> Printf.sprintf "pleaseHold_server=%s_watcher=%s" - (ServerStatus.string_of_status server_status) - (FileWatcherStatus.string_of_status watcher_status) -| EOF -> "EOF" diff --git a/src/server/protocol/serverProt.ml b/src/server/protocol/serverProt.ml index 4682b6fb614..c12b533105f 100644 --- a/src/server/protocol/serverProt.ml +++ b/src/server/protocol/serverProt.ml @@ -1,87 +1,187 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Request = struct - type refactor_variant = - | RENAME of string (* new name *) + type refactor_variant = RENAME of string + + (* new name *) type command = - | AUTOCOMPLETE of File_input.t - | CHECK_FILE of - File_input.t * - Verbose.t option * - bool * (* force *) - bool (* include_warnings *) - | COVERAGE of File_input.t * bool (* force *) - | CYCLE of string - | DUMP_TYPES of File_input.t - | FIND_MODULE of string * string - | FIND_REFS of File_input.t * int * int * bool * bool (* filename, line, char, global, multi_hop *) - | GEN_FLOW_FILES of File_input.t list * bool (* include_warnings *) - | GET_DEF of File_input.t * int * int (* filename, line, char *) - | GET_IMPORTS of string list - | INFER_TYPE of - File_input.t * (* filename|content *) - int * (* line *) - int * (* char *) - Verbose.t option * - bool (* expand type aliases *) - | PORT of string list - | REFACTOR of File_input.t * int * int * refactor_variant (* filename, line, char, refactor variant *) - | STATUS of Path.t * bool (* include_warnings *) - | FORCE_RECHECK of { files: string list; focus:bool; profile:bool } - | SUGGEST of File_input.t - | SAVE_STATE of Path.t + | AUTOCOMPLETE of { + input: File_input.t; + trigger_character: string option; + wait_for_recheck: bool option; + } + | AUTOFIX_EXPORTS of { + input: File_input.t; + verbose: Verbose.t option; + wait_for_recheck: bool option; + } + | CHECK_FILE of { + input: File_input.t; + verbose: Verbose.t option; + force: bool; + include_warnings: bool; + wait_for_recheck: bool option; + } + | COVERAGE of { + input: File_input.t; + force: bool; + wait_for_recheck: bool option; + trust: bool; + } + | BATCH_COVERAGE of { + batch: string list; + wait_for_recheck: bool option; + trust: bool; + } + | CYCLE of { + filename: string; + types_only: bool; + } + | DUMP_TYPES of { + input: File_input.t; + wait_for_recheck: bool option; + } + | FIND_MODULE of { + moduleref: string; + filename: string; + wait_for_recheck: bool option; + } + | FIND_REFS of { + filename: File_input.t; + line: int; + char: int; + global: bool; + multi_hop: bool; + } + | FORCE_RECHECK of { + files: string list; + focus: bool; + profile: bool; + } + | GET_DEF of { + filename: File_input.t; + line: int; + char: int; + wait_for_recheck: bool option; + } + | GET_IMPORTS of { + module_names: string list; + wait_for_recheck: bool option; + } + | GRAPH_DEP_GRAPH of { + root: string; + strip_root: bool; + outfile: string; + types_only: bool; + } + | INFER_TYPE of { + input: File_input.t; + line: int; + char: int; + verbose: Verbose.t option; + expand_aliases: bool; + omit_targ_defaults: bool; + wait_for_recheck: bool option; + } + | INSERT_TYPE of { + input: File_input.t; + target: Loc.t; + verbose: Verbose.t option; + location_is_strict: bool; + ambiguity_strategy: Autofix_options.ambiguity_strategy; + wait_for_recheck: bool option; + expand_aliases: bool; + omit_targ_defaults: bool; + } + | RAGE of { files: string list } + | REFACTOR of { + input: File_input.t; + line: int; + char: int; + refactor_variant: refactor_variant; + } + | SAVE_STATE of { outfile: Path.t } + | STATUS of { + client_root: Path.t; + include_warnings: bool; + } + | SUGGEST of { + input: File_input.t; + wait_for_recheck: bool option; + } let string_of_refactor_variant = function | RENAME new_name -> Printf.sprintf "rename(%s)" new_name let to_string = function - | AUTOCOMPLETE fn -> - Printf.sprintf "autocomplete %s" (File_input.filename_of_file_input fn) - | CHECK_FILE (fn, _, _, _) -> - Printf.sprintf "check %s" (File_input.filename_of_file_input fn) - | COVERAGE (fn, _) -> - Printf.sprintf "coverage %s" (File_input.filename_of_file_input fn) - | CYCLE fn -> - Printf.sprintf "cycle %s" fn - | DUMP_TYPES (fn) -> - Printf.sprintf "dump-types %s" (File_input.filename_of_file_input fn) - | FIND_MODULE (moduleref, filename) -> + | AUTOCOMPLETE { input; wait_for_recheck = _; trigger_character = _ } -> + Printf.sprintf "autocomplete %s" (File_input.filename_of_file_input input) + | AUTOFIX_EXPORTS { input; _ } -> + Printf.sprintf "autofix exports %s" (File_input.filename_of_file_input input) + | CHECK_FILE { input; verbose = _; force = _; include_warnings = _; wait_for_recheck = _ } -> + Printf.sprintf "check %s" (File_input.filename_of_file_input input) + | BATCH_COVERAGE { batch = _; wait_for_recheck = _; trust = _ } -> + Printf.sprintf "%s" "batch-coverage" + | COVERAGE { input; force = _; wait_for_recheck = _; trust = _ } -> + Printf.sprintf "coverage %s" (File_input.filename_of_file_input input) + | CYCLE { filename; types_only } -> + Printf.sprintf "cycle (types_only: %b) %s" types_only filename + | GRAPH_DEP_GRAPH _ -> Printf.sprintf "dep-graph" + | DUMP_TYPES { input; wait_for_recheck = _ } -> + Printf.sprintf "dump-types %s" (File_input.filename_of_file_input input) + | FIND_MODULE { moduleref; filename; wait_for_recheck = _ } -> Printf.sprintf "find-module %s %s" moduleref filename - | FIND_REFS (fn, line, char, global, multi_hop) -> - Printf.sprintf "find-refs %s:%d:%d:%B:%B" (File_input.filename_of_file_input fn) line char global multi_hop - | FORCE_RECHECK {files; focus; profile=_} -> + | FIND_REFS { filename; line; char; global; multi_hop } -> Printf.sprintf - "force-recheck %s (focus = %b)" (String.concat " " files) focus - | GEN_FLOW_FILES (files, _) -> - Printf.sprintf "gen-flow-files %s" - (files |> List.map File_input.filename_of_file_input |> String.concat " ") - | GET_DEF (fn, line, char) -> - Printf.sprintf "get-def %s:%d:%d" - (File_input.filename_of_file_input fn) line char - | GET_IMPORTS module_names -> + "find-refs %s:%d:%d:%B:%B" + (File_input.filename_of_file_input filename) + line + char + global + multi_hop + | FORCE_RECHECK { files; focus; profile = _ } -> + Printf.sprintf "force-recheck %s (focus = %b)" (String.concat " " files) focus + | GET_DEF { filename; line; char; wait_for_recheck = _ } -> + Printf.sprintf "get-def %s:%d:%d" (File_input.filename_of_file_input filename) line char + | GET_IMPORTS { module_names; wait_for_recheck = _ } -> Printf.sprintf "get-imports %s" (String.concat " " module_names) - | INFER_TYPE (fn, line, char, _, _) -> - Printf.sprintf "type-at-pos %s:%d:%d" - (File_input.filename_of_file_input fn) line char - | PORT (files) -> - Printf.sprintf "port %s" (String.concat " " files) - | REFACTOR (fn, line, char, kind) -> - Printf.sprintf "refactor %s:%d:%d:%s" - (File_input.filename_of_file_input fn) + | INFER_TYPE + { + input; + line; + char; + verbose = _; + expand_aliases = _; + omit_targ_defaults = _; + wait_for_recheck = _; + } -> + Printf.sprintf "type-at-pos %s:%d:%d" (File_input.filename_of_file_input input) line char + | INSERT_TYPE { input; target; _ } -> + Loc.( + Printf.sprintf + "autofix insert-type %s:%d:%d-%d:%d" + (File_input.filename_of_file_input input) + target.start.line + target.start.column + target._end.line + target._end.column) + | RAGE { files } -> Printf.sprintf "rage %s" (String.concat " " files) + | REFACTOR { input; line; char; refactor_variant } -> + Printf.sprintf + "refactor %s:%d:%d:%s" + (File_input.filename_of_file_input input) line char - (string_of_refactor_variant kind) - | STATUS (_, _) -> - "status" - | SUGGEST (_) -> - "suggest" - | SAVE_STATE out -> Printf.sprintf "save-state %s" (Path.to_string out) + (string_of_refactor_variant refactor_variant) + | STATUS { client_root = _; include_warnings = _ } -> "status" + | SUGGEST _ -> "suggest" + | SAVE_STATE { outfile } -> Printf.sprintf "save-state %s" (Path.to_string outfile) type command_with_context = { client_logging_context: FlowEventLogger.logging_context; @@ -90,86 +190,78 @@ module Request = struct end module Response = struct + type lazy_stats = { + lazy_mode: Options.lazy_mode; + checked_files: int; + total_files: int; + } (* Details about functions to be added in json output *) type func_param_result = { - param_name : string; - param_ty : string; - } + param_name: string; + param_ty: string; + } type func_details_result = { - param_tys : func_param_result list; - return_ty : string; - } + param_tys: func_param_result list; + return_ty: string; + } (* Results ready to be displayed to the user *) type complete_autocomplete_result = { - res_loc : Loc.t; - res_ty : string; - res_name : string; - func_details : func_details_result option; - } - - type autocomplete_response = ( - complete_autocomplete_result list, - string - ) result - - type coverage_response = ( - (Loc.t * bool) list, - string - ) result - - type dump_types_response = ( - (Loc.t * string) list, - string - ) result + res_loc: Loc.t; + res_ty: Loc.t * string; + res_kind: Lsp.Completion.completionItemKind option; + res_name: string; + func_details: func_details_result option; + } + + type autocomplete_response = (complete_autocomplete_result list, string) result + + type autofix_exports_response = (Replacement_printer.patch * string list, string) result + + type coverage_response = ((Loc.t * Coverage_response.expression_coverage) list, string) result + + type batch_coverage_response = + ((File_key.t * Coverage_response.file_coverage) list, string) result + + type dump_types_response = ((Loc.t * string) list, string) result (* name of the symbol, locations where it appears, or None if no symbols were found *) type find_refs_success = (string * Loc.t list) option + type find_refs_response = (find_refs_success, string) result type get_def_response = (Loc.t, string) result + type get_imports_response = Loc.t Nel.t Modulename.Map.t SMap.t * SSet.t - type infer_type_response = ( - Loc.t * Ty.t option, - string - ) result + + type infer_type_response = (Loc.t * Ty.t option, string) result + + type insert_type_response = (Replacement_printer.patch, string) result type textedit = Loc.t * string - type refactor_ok = { - refactor_edits: textedit list; - } + + type refactor_ok = { refactor_edits: textedit list } + + type rage_response = (string * string) list type refactor_response = (refactor_ok option, string) result type suggest_result = - | Suggest_Ok of { - tc_errors: Errors.ErrorSet.t; - tc_warnings: Errors.ErrorSet.t; - suggest_warnings: Errors.ErrorSet.t; - annotated_program: (Loc.t, Loc.t) Flow_ast.program; - } - | Suggest_Error of Errors.ErrorSet.t - - type suggest_response = ( - suggest_result, - string - ) result - - type cycle_response = (cycle_response_subgraph, string) result - and cycle_response_subgraph = (string * string list) list - - type gen_flow_files_error = - | GenFlowFiles_TypecheckError of {errors: Errors.ErrorSet.t; warnings: Errors.ErrorSet.t} - | GenFlowFiles_UnexpectedError of string - type gen_flow_files_result = - | GenFlowFiles_FlowFile of string - | GenFlowFiles_NonFlowFile - type gen_flow_files_response = - ((string * gen_flow_files_result) list, gen_flow_files_error) result - - type port_response = (string, exn) result SMap.t + | Suggest_Ok of { + tc_errors: Errors.ConcreteLocPrintableErrorSet.t; + tc_warnings: Errors.ConcreteLocPrintableErrorSet.t; + suggest_warnings: Errors.ConcreteLocPrintableErrorSet.t; + file_patch: Replacement_printer.patch; + } + | Suggest_Error of Errors.ConcreteLocPrintableErrorSet.t + + type suggest_response = (suggest_result, string) result + + type graph_response = (graph_response_subgraph, string) result + + and graph_response_subgraph = (string * string list) list type directory_mismatch = { server: Path.t; @@ -177,56 +269,63 @@ module Response = struct } type status_response = - | DIRECTORY_MISMATCH of directory_mismatch - | ERRORS of {errors: Errors.ErrorSet.t; warnings: Errors.ErrorSet.t} - | NO_ERRORS - | NOT_COVERED - - type lazy_stats = { - lazy_mode: Options.lazy_mode option; - checked_files: int; - total_files: int; - } + | DIRECTORY_MISMATCH of directory_mismatch + | ERRORS of { + errors: Errors.ConcreteLocPrintableErrorSet.t; + warnings: Errors.ConcreteLocPrintableErrorSet.t; + suppressed_errors: (Loc.t Errors.printable_error * Loc_collections.LocSet.t) list; + } + | NO_ERRORS + | NOT_COVERED type check_file_response = status_response type find_module_response = File_key.t option type response = - | AUTOCOMPLETE of autocomplete_response - | CHECK_FILE of check_file_response - | COVERAGE of coverage_response - | CYCLE of cycle_response - | DUMP_TYPES of dump_types_response - | FIND_MODULE of find_module_response - | FIND_REFS of find_refs_response - | GEN_FLOW_FILES of gen_flow_files_response - | GET_DEF of get_def_response - | GET_IMPORTS of get_imports_response - | INFER_TYPE of infer_type_response - | PORT of port_response - | REFACTOR of refactor_response - | STATUS of { status_response: status_response; lazy_stats: lazy_stats } - | FORCE_RECHECK of Profiling_js.finished option - | SUGGEST of suggest_response - | SAVE_STATE of (unit, string) result + | AUTOCOMPLETE of autocomplete_response + | AUTOFIX_EXPORTS of autofix_exports_response + | CHECK_FILE of check_file_response + | COVERAGE of coverage_response + | BATCH_COVERAGE of batch_coverage_response + | CYCLE of graph_response + | GRAPH_DEP_GRAPH of (unit, string) result + | DUMP_TYPES of dump_types_response + | FIND_MODULE of find_module_response + | FIND_REFS of find_refs_response + | FORCE_RECHECK of Profiling_js.finished option + | GET_DEF of get_def_response + | GET_IMPORTS of get_imports_response + | INFER_TYPE of infer_type_response + | INSERT_TYPE of insert_type_response + | RAGE of rage_response + | REFACTOR of refactor_response + | STATUS of { + status_response: status_response; + lazy_stats: lazy_stats; + } + | SUGGEST of suggest_response + | SAVE_STATE of (unit, string) result let to_string = function - | AUTOCOMPLETE _ -> "autocomplete response" - | CHECK_FILE _ -> "check_file response" - | COVERAGE _ -> "coverage response" - | CYCLE _ -> "cycle reponse" - | DUMP_TYPES _ -> "dump_types response" - | FIND_MODULE _ -> "find_module response" - | FIND_REFS _ -> "find_refs response" - | GEN_FLOW_FILES _ -> "gen_flow_files response" - | GET_DEF _ -> "get_def response" - | GET_IMPORTS _ -> "get_imports response" - | INFER_TYPE _ -> "infer_type response" - | PORT _ -> "port response" - | REFACTOR _ -> "refactor response" - | STATUS _ -> "status response" - | FORCE_RECHECK _ -> "force_recheck response" - | SUGGEST _ -> "suggest response" - | SAVE_STATE _ -> "save_state response" + | AUTOCOMPLETE _ -> "autocomplete response" + | AUTOFIX_EXPORTS _ -> "autofix exports response" + | CHECK_FILE _ -> "check_file response" + | COVERAGE _ -> "coverage response" + | BATCH_COVERAGE _ -> "batch-coverage response" + | CYCLE _ -> "cycle response" + | GRAPH_DEP_GRAPH _ -> "dep-graph response" + | DUMP_TYPES _ -> "dump_types response" + | FIND_MODULE _ -> "find_module response" + | FIND_REFS _ -> "find_refs response" + | FORCE_RECHECK _ -> "force_recheck response" + | GET_DEF _ -> "get_def response" + | GET_IMPORTS _ -> "get_imports response" + | INFER_TYPE _ -> "infer_type response" + | INSERT_TYPE _ -> "insert_type response" + | RAGE _ -> "rage response" + | REFACTOR _ -> "refactor response" + | STATUS _ -> "status response" + | SUGGEST _ -> "suggest response" + | SAVE_STATE _ -> "save_state response" end diff --git a/src/server/protocol/socketHandshake.ml b/src/server/protocol/socketHandshake.ml index 534c93e86e0..61a36f6c2a4 100644 --- a/src/server/protocol/socketHandshake.ml +++ b/src/server/protocol/socketHandshake.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,9 +10,10 @@ type build_id = string -let build_revision = match Build_id.build_revision with - | "" -> Flow_version.version - | x -> x +let build_revision = + match Build_id.build_revision with + | "" -> Flow_version.version + | x -> x (* * Handshake @@ -30,111 +31,163 @@ let build_revision = match Build_id.build_revision with * - might persist and leave the connection open (e.g. ok) *) -type client_handshake_wire = (string * string) -type server_handshake_wire = (string * string option) +type client_handshake_wire = string * string + +type server_handshake_wire = string * string option + +(* What to do on a version mismatch *) +type version_mismatch_strategy = + | Always_stop_server (* Always stop the server *) + | Stop_server_if_older (* Stop the server if it is older than the client. Otherwise send an error *) + | Error_client + +(* Don't stop the server, just send an error to the client *) type client_to_monitor_1 = { client_build_id: build_id; - is_stop_request: bool; (* are we requesting the server to stop? *) + (* Build_id.build_revision for the client *) + client_version: string; + (* Flow_version.version for the client *) + is_stop_request: bool; + (* are we requesting the server to stop? *) server_should_hangup_if_still_initializing: bool; - server_should_exit_if_version_mismatch: bool; + version_mismatch_strategy: version_mismatch_strategy; (* What to do on a version mismatch *) } type server_intent = - | Server_will_exit (* e.g. after receiving a stop request *) - | Server_will_hangup (* e.g. upon binary mismatch *) - | Server_will_continue (* e.g. upon success *) + | Server_will_exit (* after receiving a stop request or as a result of a version mismatch *) + | Server_will_hangup (* version mismatch but neither client nor server should restart *) + | Server_will_continue + +(* upon success *) type monitor_to_client_1 = { server_build_id: build_id; - server_bin: string; (* filepath to the server binary *) + (* Build_id.build_revision for the server *) + server_bin: string; + (* filepath to the server binary *) server_intent: server_intent; + (* The result of the handshake *) + server_version: string; (* Flow_version.version for the server *) } type client_type = | Ephemeral (* a client that sends a request, gets a response, and disconnects *) - | Persistent of { - logging_context: FlowEventLogger.logging_context; - lsp: Lsp.Initialize.params option; - } + | Persistent of { lsp_init_params: Lsp.Initialize.params } -type client_to_monitor_2 = { - client_type: client_type; -} +type client_to_monitor_2 = { client_type: client_type } type monitor_to_client_2 = | Server_has_too_many_clients | Server_still_initializing of (ServerStatus.status * FileWatcherStatus.status) - | Server_ready - -type client_handshake = (client_to_monitor_1 * client_to_monitor_2) -type server_handshake = (monitor_to_client_1 * monitor_to_client_2 option) - -let client_to_monitor_1__to_json (c: client_to_monitor_1) : Hh_json.json = - let open Hh_json in - JSON_Object [ - "client_build_id", JSON_String c.client_build_id; - "is_stop_request", JSON_Bool c.is_stop_request; - "server_should_hangup_if_still_initializing", - JSON_Bool c.server_should_hangup_if_still_initializing; - "server_should_exit_if_version_mismatch", - JSON_Bool c.server_should_exit_if_version_mismatch; - ] - -let default_client_to_monitor_1 = { - client_build_id = "INCOMPATIBLE"; - is_stop_request = false; - server_should_hangup_if_still_initializing = false; - server_should_exit_if_version_mismatch = true; -} - -let json_to__client_to_monitor_1 (json: Hh_json.json) : client_to_monitor_1 = - let open Hh_json_helpers in - let json = Some json in - let d = default_client_to_monitor_1 in - let client_build_id = - Jget.string_d json "client_build_id" ~default:d.client_build_id in - let is_stop_request = - Jget.bool_d json "is_stop_request" ~default:d.is_stop_request in - let server_should_hangup_if_still_initializing = - Jget.bool_d json "server_should_hangup_if_still_initializing" - ~default:d.server_should_hangup_if_still_initializing in - let server_should_exit_if_version_mismatch = - Jget.bool_d json "server_should_exit_if_version_mismatch" - ~default:d.server_should_exit_if_version_mismatch - in - { client_build_id; - is_stop_request; - server_should_hangup_if_still_initializing; - server_should_exit_if_version_mismatch; +type client_handshake = client_to_monitor_1 * client_to_monitor_2 + +type server_handshake = monitor_to_client_1 * monitor_to_client_2 option + +let version_mismatch_strategy_to_string = function + | Always_stop_server -> "Always_stop_server" + | Stop_server_if_older -> "Stop_server_if_older" + | Error_client -> "Error_client" + +let string_to_version_mismatch_strategy = function + | "Always_stop_server" -> Always_stop_server + | "Stop_server_if_older" -> Stop_server_if_older + | "Error_client" -> Error_client + | _ -> + (* If someone adds a new strategy in the future old servers shouldn't explode. *) + Error_client + +let client_to_monitor_1__to_json (c : client_to_monitor_1) : Hh_json.json = + Hh_json.( + JSON_Object + [ + ("client_build_id", JSON_String c.client_build_id); + ("is_stop_request", JSON_Bool c.is_stop_request); + ( "server_should_hangup_if_still_initializing", + JSON_Bool c.server_should_hangup_if_still_initializing ); + ("client_version", JSON_String c.client_version); + ( "version_mismatch_strategy", + JSON_String (version_mismatch_strategy_to_string c.version_mismatch_strategy) ); + (* Deprecated - sent to talk to old servers *) + ( "server_should_exit_if_version_mismatch", + JSON_Bool + (match c.version_mismatch_strategy with + | Always_stop_server -> true + | Stop_server_if_older -> true + (* Any server reading this field is older than this client *) + | Error_client -> false) ); + ]) + +let default_client_to_monitor_1 = + { + client_build_id = "INCOMPATIBLE"; + is_stop_request = false; + server_should_hangup_if_still_initializing = false; + client_version = "0.0.0"; + version_mismatch_strategy = Error_client; } -let monitor_to_client_1__to_json (m: monitor_to_client_1) : Hh_json.json = - let open Hh_json in - let intent_to_string intent = match intent with - | Server_will_exit -> "Server_will_exit" - | Server_will_hangup -> "Server_will_hangup" - | Server_will_continue -> "Server_will_continue" - in - JSON_Object [ - "server_build_id", JSON_String m.server_build_id; - "server_bin", JSON_String m.server_bin; - "server_intent", JSON_String (m.server_intent |> intent_to_string); - ] - -let json_to__monitor_to_client_1 (json: Hh_json.json) : monitor_to_client_1 = - let open Hh_json_helpers in - let json = Some json in - let string_to_intent s = match s with - | "Server_will_exit" -> Server_will_exit - | "Server_will_hangup" -> Server_will_hangup - | "Server_will_continue" -> Server_will_continue - | _ -> raise (Jget.Parse ("unknown intent " ^ s)) - in - let server_build_id = Jget.string_exn json "server_build_id" in - let server_bin = Jget.string_exn json "server_bin" in - let server_intent = Jget.string_exn json "server_intent" |> string_to_intent - in - { server_build_id; server_bin; server_intent; } +let json_to__client_to_monitor_1 (json : Hh_json.json) : client_to_monitor_1 = + Hh_json_helpers.( + let json = Some json in + let d = default_client_to_monitor_1 in + let client_build_id = Jget.string_d json "client_build_id" ~default:d.client_build_id in + let is_stop_request = Jget.bool_d json "is_stop_request" ~default:d.is_stop_request in + let server_should_hangup_if_still_initializing = + Jget.bool_d + json + "server_should_hangup_if_still_initializing" + ~default:d.server_should_hangup_if_still_initializing + in + let client_version = Jget.string_d json "client_version" ~default:d.client_version in + let version_mismatch_strategy = + match Jget.string_opt json "version_mismatch_strategy" with + | Some strategy -> string_to_version_mismatch_strategy strategy + | None -> + (match Jget.bool_opt json "server_should_exit_if_version_mismatch" with + | Some true -> Always_stop_server + | Some false + | None -> + Error_client) + in + { + client_build_id; + is_stop_request; + server_should_hangup_if_still_initializing; + client_version; + version_mismatch_strategy; + }) + +let monitor_to_client_1__to_json (m : monitor_to_client_1) : Hh_json.json = + Hh_json.( + let intent_to_string intent = + match intent with + | Server_will_exit -> "Server_will_exit" + | Server_will_hangup -> "Server_will_hangup" + | Server_will_continue -> "Server_will_continue" + in + JSON_Object + [ + ("server_build_id", JSON_String m.server_build_id); + ("server_bin", JSON_String m.server_bin); + ("server_intent", JSON_String (m.server_intent |> intent_to_string)); + ("server_version", JSON_String m.server_version); + ]) + +let json_to__monitor_to_client_1 (json : Hh_json.json) : monitor_to_client_1 = + Hh_json_helpers.( + let json = Some json in + let string_to_intent s = + match s with + | "Server_will_exit" -> Server_will_exit + | "Server_will_hangup" -> Server_will_hangup + | "Server_will_continue" -> Server_will_continue + | _ -> raise (Jget.Parse ("unknown intent " ^ s)) + in + let server_build_id = Jget.string_exn json "server_build_id" in + let server_bin = Jget.string_exn json "server_bin" in + let server_intent = Jget.string_exn json "server_intent" |> string_to_intent in + let server_version = Jget.string_d json ~default:"0.0.0" "server_version" in + { server_build_id; server_bin; server_intent; server_version }) diff --git a/src/server/rechecker/dune b/src/server/rechecker/dune new file mode 100644 index 00000000000..d8ce0869e37 --- /dev/null +++ b/src/server/rechecker/dune @@ -0,0 +1,31 @@ +(library + (name flow_server_rechecker) + (wrapped false) + (modules :standard \ recheck_updates) + (libraries + flow_common + flow_exit_status + flow_server_env + flow_server_error_collator + flow_server_files + flow_server_monitor_listener_state + flow_server_rechecker_updates + flow_service_inference + flow_service_inference_module + ) + (preprocess (pps lwt_ppx)) +) + +(library + (name flow_server_rechecker_updates) + (wrapped false) + (modules recheck_updates) + (libraries + flow_common + flow_exit_status + flow_parsing + flow_server_files + flow_service_inference_module + ) + (preprocess (pps lwt_ppx)) ; See T41851208 +) diff --git a/src/server/rechecker/recheck_updates.ml b/src/server/rechecker/recheck_updates.ml index 7ac6783fe8d..01d650fc591 100644 --- a/src/server/rechecker/recheck_updates.ml +++ b/src/server/rechecker/recheck_updates.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,9 +9,12 @@ module FilenameSet = Utils_js.FilenameSet let spf = Printf.sprintf -type error = { msg: string; exit_status: FlowExitStatus.t; } +type error = { + msg: string; + exit_status: FlowExitStatus.t; +} -let is_incompatible_package_json = +let is_incompatible_package_json ~reader = (* WARNING! Be careful when adding new incompatibilities to this function. While dfind will * return any file which changes within the watched directories, watchman only watches for * specific extensions and files. Make sure to update the watchman_expression_terms in our @@ -21,18 +24,15 @@ let is_incompatible_package_json = match Sys_utils.cat_or_failed filename_str with | None -> true (* Failed to read package.json *) | Some content -> - try - let ast = Parsing_service_js.parse_json_file ~fail:true content filename in - Module_js.package_incompatible filename_str ast - with _ -> true (* Failed to parse package.json *) + (try + let ast = Parsing_service_js.parse_json_file ~fail:true content filename in + Module_js.package_incompatible ~reader filename_str ast + with _ -> true) + (* Failed to parse package.json *) in - fun ~want ~sroot ~file_options f -> - ( - String_utils.string_starts_with f sroot || - Files.is_included file_options f - ) - && (Filename.basename f) = "package.json" + (String_utils.string_starts_with f sroot || Files.is_included file_options f) + && Filename.basename f = "package.json" && want f && is_incompatible f @@ -43,93 +43,107 @@ let is_incompatible_package_json = * 2. If we do care, are we unable to incrementally check this change. For example, maybe a libdef * changed or the .flowconfig changed. Maybe one day we'll learn to incrementally check those * changes, but for now we just need to exit and restart from scratch *) -let process_updates ~options ~libs updates = - let open Core_result in - let file_options = Options.file_options options in - let all_libs = - let known_libs = libs in - let _, maybe_new_libs = Files.init file_options in - SSet.union known_libs maybe_new_libs - in - let root = Options.root options in - let config_path = Server_files_js.config_file (Options.flowconfig_name options) root in - let sroot = Path.to_string root in - let want = Files.wanted ~options:file_options all_libs in - - Ok () - >>= fun () -> +let process_updates ?(skip_incompatible = false) ~options ~libs updates = + Core_result.( + let reader = State_reader.create () in + let file_options = Options.file_options options in + let all_libs = + let known_libs = libs in + let (_, maybe_new_libs) = Files.init file_options in + SSet.union known_libs maybe_new_libs + in + let root = Options.root options in + let config_path = Server_files_js.config_file (Options.flowconfig_name options) root in + let sroot = Path.to_string root in + let want = Files.wanted ~options:file_options all_libs in + Ok () + >>= fun () -> (* Die if the .flowconfig changed *) - if SSet.mem config_path updates - then - Error { - msg = spf "%s changed in an incompatible way. Exiting." config_path; - exit_status = FlowExitStatus.Flowconfig_changed; - } - else Ok () - >>= fun () -> - let is_incompatible_package_json = is_incompatible_package_json ~want ~sroot ~file_options in - - (* Die if a package.json changed in an incompatible way *) - let incompatible_packages = SSet.filter is_incompatible_package_json updates in - if not (SSet.is_empty incompatible_packages) - then - let messages = SSet.elements incompatible_packages - |> List.rev_map (spf "Modified package: %s") - |> String.concat "\n" in - Error { - msg = spf "%s\nPackages changed in an incompatible way. Exiting." messages; - exit_status = FlowExitStatus.Server_out_of_date; - } - else Ok () - >>= fun () -> - Option.value_map (Options.module_resolver options) ~default:(Ok ()) ~f:(fun module_resolver -> - let str_module_resolver = Path.to_string module_resolver in - if SSet.mem str_module_resolver updates - then - Error { - msg = Printf.sprintf "Module resolver %s changed in an incompatible way. Exiting.\n%!" - str_module_resolver; - exit_status = FlowExitStatus.Server_out_of_date; + if (not skip_incompatible) && SSet.mem config_path updates then + Error + { + msg = spf "%s changed in an incompatible way. Exiting." config_path; + exit_status = FlowExitStatus.Flowconfig_changed; } - else Ok () - ) - >>= fun () -> - - let flow_typed_path = Path.to_string (Files.get_flowtyped_path root) in - let is_changed_lib filename = - let is_lib = SSet.mem filename all_libs || filename = flow_typed_path in - is_lib && - let file = File_key.LibFile filename in - match Sys_utils.cat_or_failed filename with - | None -> true (* Failed to read lib file *) - | Some content -> - (* Check if the lib file's hash has changed *) - not (Parsing_service_js.does_content_match_file_hash file content) - in - - (* Die if a lib file changed *) - let libs = updates |> SSet.filter is_changed_lib in - if not (SSet.is_empty libs) - then - let messages = SSet.elements libs - |> List.rev_map (spf "Modified lib file: %s") - |> String.concat "\n" in - Error { - msg = spf "%s\nLib files changed in an incompatible way. Exiting" messages; - exit_status = FlowExitStatus.Server_out_of_date; - } - else Ok () - >>= fun () -> - let is_flow_file = Files.is_flow_file ~options:file_options in - Ok (SSet.fold (fun f acc -> - if is_flow_file f && - (* note: is_included may be expensive. check in-root match first. *) - (String_utils.string_starts_with f sroot || - Files.is_included file_options f) && - (* removes excluded and lib files. the latter are already filtered *) - want f - then - let filename = Files.filename_from_string ~options:file_options f in - FilenameSet.add filename acc - else acc - ) updates FilenameSet.empty) + else + Ok () + >>= fun () -> + let is_incompatible_package_json = + is_incompatible_package_json ~reader ~want ~sroot ~file_options + in + (* Die if a package.json changed in an incompatible way *) + let incompatible_packages = SSet.filter is_incompatible_package_json updates in + if (not skip_incompatible) && not (SSet.is_empty incompatible_packages) then + let messages = + SSet.elements incompatible_packages + |> List.rev_map (spf "Modified package: %s") + |> String.concat "\n" + in + Error + { + msg = spf "%s\nPackages changed in an incompatible way. Exiting." messages; + exit_status = FlowExitStatus.Server_out_of_date; + } + else + Ok () + >>= fun () -> + Option.value_map + (Options.module_resolver options) + ~default:(Ok ()) + ~f:(fun module_resolver -> + let str_module_resolver = Path.to_string module_resolver in + if (not skip_incompatible) && SSet.mem str_module_resolver updates then + Error + { + msg = + Printf.sprintf + "Module resolver %s changed in an incompatible way. Exiting.\n%!" + str_module_resolver; + exit_status = FlowExitStatus.Server_out_of_date; + } + else + Ok ()) + >>= fun () -> + let flow_typed_path = Path.to_string (Files.get_flowtyped_path root) in + let is_changed_lib filename = + let is_lib = SSet.mem filename all_libs || filename = flow_typed_path in + is_lib + && + let file = File_key.LibFile filename in + match Sys_utils.cat_or_failed filename with + | None -> true (* Failed to read lib file *) + | Some content -> + (* Check if the lib file's hash has changed *) + not (Parsing_service_js.does_content_match_file_hash ~reader file content) + in + (* Die if a lib file changed *) + let libs = updates |> SSet.filter is_changed_lib in + if (not skip_incompatible) && not (SSet.is_empty libs) then + let messages = + SSet.elements libs |> List.rev_map (spf "Modified lib file: %s") |> String.concat "\n" + in + Error + { + msg = spf "%s\nLib files changed in an incompatible way. Exiting" messages; + exit_status = FlowExitStatus.Server_out_of_date; + } + else + Ok () + >>= fun () -> + let is_flow_file = Files.is_flow_file ~options:file_options in + Ok + (SSet.fold + (fun f acc -> + if + is_flow_file f + (* note: is_included may be expensive. check in-root match first. *) + && (String_utils.string_starts_with f sroot || Files.is_included file_options f) + && (* removes excluded and lib files. the latter are already filtered *) + want f + then + let filename = Files.filename_from_string ~options:file_options f in + FilenameSet.add filename acc + else + acc) + updates + FilenameSet.empty)) diff --git a/src/server/rechecker/recheck_updates.mli b/src/server/rechecker/recheck_updates.mli index 98b93841ca7..c39e5ddbede 100644 --- a/src/server/rechecker/recheck_updates.mli +++ b/src/server/rechecker/recheck_updates.mli @@ -1,13 +1,17 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -type error = { msg: string; exit_status: FlowExitStatus.t; } +type error = { + msg: string; + exit_status: FlowExitStatus.t; +} -val process_updates: +val process_updates : + ?skip_incompatible:bool -> options:Options.t -> libs:SSet.t -> SSet.t -> diff --git a/src/server/rechecker/rechecker.ml b/src/server/rechecker/rechecker.ml index 99c4f4bc747..4103ac3bb10 100644 --- a/src/server/rechecker/rechecker.ml +++ b/src/server/rechecker/rechecker.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,9 +8,64 @@ open ServerEnv open Utils_js -let get_lazy_stats genv env = - { ServerProt.Response. - lazy_mode = Options.lazy_mode genv.options; +module Parallelizable_workload_loop = LwtLoop.Make (struct + type acc = unit Lwt.t * ServerEnv.env + + let main (wait_for_cancel, env) = + (* The Lwt.pick will arbitrarily choose one or the other thread if they are both ready. So let's + * explicitly check if the wait_for_cancel thread has resolved to give it priority *) + let () = + match Lwt.state wait_for_cancel with + | Lwt.Return () -> raise Lwt.Canceled + | Lwt.Fail exn -> + let exn = Exception.wrap exn in + Exception.reraise exn + | Lwt.Sleep -> () + in + (* Lwt.pick waits until one of these two promises is resolved (returns or fails). Then it + * cancels the other one and returns/fails. + * + * Normally, the wait_and_pop_parallelizable_workload thread will finish first. Then Lwt.pick + * will cancel the `let%lwt () = ... in raise Lwt.Canceled` thread. The `wait_for_cancel` thread + * is NOT cancelable so that will stay unresolved. + * + * Eventually, wait_for_cancel will resolve. Then we'll cancel the + * wait_and_pop_parallelizable_workload thread throw Lwt.Canceled *) + let%lwt workload = + Lwt.pick + [ + ServerMonitorListenerState.wait_and_pop_parallelizable_workload (); + (let%lwt () = wait_for_cancel in + raise Lwt.Canceled); + ] + in + (* We have a workload! Let's run it! *) + Hh_logger.info "Running a parallel workload"; + let%lwt () = workload env in + Lwt.return (wait_for_cancel, env) + + let catch _ exn = + let exn = Exception.wrap exn in + Exception.reraise exn +end) + +let start_parallelizable_workloads env = + (* The wait_for_cancel thread itself is NOT cancelable *) + let (wait_for_cancel, wakener) = Lwt.wait () in + let loop_thread = Parallelizable_workload_loop.run (wait_for_cancel, env) in + (* Allow this stop function to be called multiple times for the same loop *) + let already_woken = ref false in + fun () -> + (* Tell the loop to cancel at its earliest convinience *) + if not !already_woken then Lwt.wakeup wakener (); + already_woken := true; + + (* Wait for the loop to finish *) + loop_thread + +let get_lazy_stats ~options env = + { + ServerProt.Response.lazy_mode = Options.lazy_mode options; checked_files = CheckedSet.all env.checked_files |> FilenameSet.cardinal; total_files = FilenameSet.cardinal env.files; } @@ -19,107 +74,195 @@ let get_lazy_stats genv env = * FilenameSet. Updates may be coming in from the root, or an include path. * * If any update can't be processed incrementally, the Flow server will exit *) -let process_updates genv env updates = - let open Recheck_updates in - match process_updates ~options:genv.ServerEnv.options ~libs:env.ServerEnv.libs updates with - | Core_result.Ok updates -> updates - | Core_result.Error { msg; exit_status } -> begin - Hh_logger.fatal "Status: Error"; - Hh_logger.fatal "%s" msg; - FlowExitStatus.exit ~msg exit_status - end +let process_updates ~options env updates = + Recheck_updates.( + match process_updates ~options ~libs:env.ServerEnv.libs updates with + | Core_result.Ok updates -> updates + | Core_result.Error { msg; exit_status } -> + Hh_logger.fatal "Status: Error"; + Hh_logger.fatal "%s" msg; + FlowExitStatus.exit ~msg exit_status) (* on notification, execute client commands or recheck files *) -let recheck genv env ?(files_to_focus=FilenameSet.empty) updates = +let recheck + genv + env + ?(files_to_force = CheckedSet.empty) + ~file_watcher_metadata + ~recheck_reasons + ~will_be_checked_files + updates = (* Caller should have already checked this *) - assert (not (FilenameSet.is_empty updates)); + assert (not (FilenameSet.is_empty updates && CheckedSet.is_empty files_to_force)); MonitorRPC.status_update ~event:ServerStatus.Recheck_start; Persistent_connection.send_start_recheck env.connections; let options = genv.ServerEnv.options in let workers = genv.ServerEnv.workers in - - let%lwt profiling, summary, env = - Types_js.recheck ~options ~workers ~updates env ~files_to_focus in - - let lazy_stats = get_lazy_stats genv env in + let%lwt (profiling, summary, env) = + Types_js.recheck + ~options + ~workers + ~updates + env + ~files_to_force + ~file_watcher_metadata + ~recheck_reasons + ~will_be_checked_files + in + let lazy_stats = get_lazy_stats ~options env in Persistent_connection.send_end_recheck ~lazy_stats env.connections; + (* We must send "end_recheck" prior to sending errors+warnings so the client *) (* knows that this set of errors+warnings are final ones, not incremental. *) let calc_errors_and_warnings () = - let errors, warnings, _ = ErrorCollator.get_with_separate_warnings env in - errors, warnings + let reader = State_reader.create () in + let (errors, warnings, _) = ErrorCollator.get_with_separate_warnings ~reader ~options env in + (errors, warnings) in - Persistent_connection.update_clients ~clients:env.connections ~calc_errors_and_warnings; + let errors_reason = LspProt.End_of_recheck { recheck_reasons } in + Persistent_connection.update_clients + ~clients:env.connections + ~errors_reason + ~calc_errors_and_warnings; MonitorRPC.status_update ~event:(ServerStatus.Finishing_up summary); Lwt.return (profiling, env) (* Runs a function which should be canceled if we are notified about any file changes. After the - * thread is canceled, on_cancel is called and its result returned *) -let run_but_cancel_on_file_changes genv env ~f ~on_cancel = - let process_updates = process_updates genv env in - let run_thread = f () in + * thread is canceled, post_cancel is called and its result returned *) +let run_but_cancel_on_file_changes ~options env ~get_forced ~f ~pre_cancel ~post_cancel = + let process_updates = process_updates ~options env in + (* We don't want to start running f until we're in the try block *) + let (waiter, wakener) = Lwt.task () in + let run_thread = + let%lwt () = waiter in + f () + in let cancel_thread = let%lwt () = - if Options.enable_cancelable_rechecks genv.ServerEnv.options - then begin - let%lwt () = ServerMonitorListenerState.wait_for_updates_for_recheck ~process_updates in - Hh_logger.info "Canceling due to new file changes"; - Lwt.cancel run_thread; - Lwt.return_unit - end else - Lwt.return_unit + ServerMonitorListenerState.wait_for_updates_for_recheck ~process_updates ~get_forced in + Hh_logger.info "Canceling since a recheck is needed"; + let%lwt () = pre_cancel () in + Lwt.cancel run_thread; Lwt.return_unit in try%lwt + Lwt.wakeup wakener (); let%lwt ret = run_thread in Lwt.cancel cancel_thread; Lwt.return ret with Lwt.Canceled -> - on_cancel () + Lwt.cancel cancel_thread; + post_cancel () (* Perform a single recheck. This will incorporate any pending changes from the file watcher. * If any file watcher notifications come in during the recheck, it will be canceled and restarted * to include the new changes *) let rec recheck_single - ?(files_to_recheck=FilenameSet.empty) - ?(files_to_focus=FilenameSet.empty) - genv env = - let open ServerMonitorListenerState in - let env = update_env env in - let process_updates = process_updates genv env in - let workload = get_and_clear_recheck_workload ~process_updates in - let files_to_recheck = FilenameSet.union files_to_recheck workload.files_to_recheck in - let files_to_focus = FilenameSet.union files_to_focus workload.files_to_focus in - let all_files = FilenameSet.union files_to_recheck files_to_focus in - if FilenameSet.is_empty all_files - then begin - List.iter (fun callback -> callback None) workload.profiling_callbacks; - Lwt.return (Error env) (* Nothing to do *) - end else - let on_cancel () = - Hh_logger.info - "Recheck successfully canceled. Restarting the recheck to include new file changes"; - recheck_single ~files_to_recheck:all_files ~files_to_focus genv env + ?(files_to_recheck = FilenameSet.empty) + ?(files_to_force = CheckedSet.empty) + ?(file_watcher_metadata = MonitorProt.empty_file_watcher_metadata) + ?(recheck_reasons_list_rev = []) + genv + env = + ServerMonitorListenerState.( + let env = update_env env in + let options = genv.ServerEnv.options in + let process_updates = process_updates ~options env in + (* This ref is an estimate of the files which will be checked by the time the recheck is done. + * As the recheck progresses, the estimate will get better. We use this estimate to prevent + * canceling the recheck to force a file which we were already going to check + * + * This early estimate is not a very good estimate, since it's missing new dependents and + * dependencies. However it should be good enough to prevent rechecks continuously restarting as + * the server gets spammed with autocomplete requests *) + let will_be_checked_files = + ref (CheckedSet.union env.ServerEnv.checked_files files_to_force) in - let f () = - let%lwt profiling, env = recheck genv env ~files_to_focus all_files in - List.iter (fun callback -> callback (Some profiling)) workload.profiling_callbacks; - Lwt.return (Ok (profiling, env)) + let get_forced () = !will_be_checked_files in + let workload = get_and_clear_recheck_workload ~process_updates ~get_forced in + let files_to_recheck = FilenameSet.union files_to_recheck workload.files_to_recheck in + let files_to_force = CheckedSet.union files_to_force workload.files_to_force in + let file_watcher_metadata = + MonitorProt.merge_file_watcher_metadata file_watcher_metadata workload.metadata in + let recheck_reasons_list_rev = workload.recheck_reasons_rev :: recheck_reasons_list_rev in + if FilenameSet.is_empty files_to_recheck && CheckedSet.is_empty files_to_force then ( + List.iter (fun callback -> callback None) workload.profiling_callbacks; + Lwt.return (Error env) (* Nothing to do *) + ) else + (* Start the parallelizable workloads loop and return a function which will stop the loop *) + let stop_parallelizable_workloads = start_parallelizable_workloads env in + let post_cancel () = + Hh_logger.info + "Recheck successfully canceled. Restarting the recheck to include new file changes"; + recheck_single + ~files_to_recheck + ~files_to_force + ~file_watcher_metadata + ~recheck_reasons_list_rev + genv + env + in + let f () = + (* Take something like [[10, 9], [8], [7], [6,5,4,3], [2,1]] and output [1,2,3,4,5,6,7,8,9,10] + *) + let recheck_reasons = + List.fold_left + (fun recheck_reasons recheck_reasons_rev -> + List.rev_append recheck_reasons_rev recheck_reasons) + [] + recheck_reasons_list_rev + in + let%lwt (profiling, env) = + try%lwt + recheck + genv + env + ~files_to_force + ~file_watcher_metadata + ~recheck_reasons + ~will_be_checked_files + files_to_recheck + with exn -> + let exn = Exception.wrap exn in + let%lwt () = stop_parallelizable_workloads () in + Exception.reraise exn + in + let%lwt () = stop_parallelizable_workloads () in + List.iter (fun callback -> callback (Some profiling)) workload.profiling_callbacks; + + (* Now that the recheck is done, it's safe to retry deferred parallelizable workloads *) + ServerMonitorListenerState.requeue_deferred_parallelizable_workloads (); + Lwt.return (Ok (profiling, env)) + in + run_but_cancel_on_file_changes + ~options + env + ~get_forced + ~f + ~pre_cancel:stop_parallelizable_workloads + ~post_cancel) + +let recheck_loop = + (* It's not obvious to Mr Gabe how we should merge together the profiling info from multiple + * rechecks. But something is better than nothing... *) + let rec loop + ?(files_to_recheck = FilenameSet.empty) + ?(files_to_force = CheckedSet.empty) + ?(profiling = []) + genv + env = + match%lwt recheck_single ~files_to_recheck ~files_to_force genv env with + | Error env -> + (* No more work to do for now *) + Lwt.return (List.rev profiling, env) + | Ok (recheck_profiling, env) -> + (* We just finished a recheck. Let's see if there's any more stuff to recheck *) + loop ~profiling:(recheck_profiling :: profiling) genv env + in + (fun genv env -> loop genv env) - run_but_cancel_on_file_changes genv env ~f ~on_cancel - -let rec recheck_loop - ?(files_to_recheck=FilenameSet.empty) - ?(files_to_focus=FilenameSet.empty) - genv env = - match%lwt recheck_single ~files_to_recheck ~files_to_focus genv env with - | Error env -> - (* No more work to do for now *) - Lwt.return env - | Ok (_profiling, env) -> - (* We just finished a recheck. Let's see if there's any more stuff to recheck *) - recheck_loop genv env +let recheck_single ?files_to_force genv env = recheck_single ?files_to_force genv env diff --git a/src/server/rechecker/rechecker.mli b/src/server/rechecker/rechecker.mli index cdabf09d31d..bddcd9f7e1f 100644 --- a/src/server/rechecker/rechecker.mli +++ b/src/server/rechecker/rechecker.mli @@ -1,36 +1,20 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) (* filter and relativize updated file paths *) -val process_updates : - ServerEnv.genv -> - ServerEnv.env -> - SSet.t -> - Utils_js.FilenameSet.t +val process_updates : options:Options.t -> ServerEnv.env -> SSet.t -> Utils_js.FilenameSet.t -val recheck_single: - ?files_to_recheck:Utils_js.FilenameSet.t -> - ?files_to_focus:Utils_js.FilenameSet.t -> +val recheck_single : + ?files_to_force:CheckedSet.t -> ServerEnv.genv -> ServerEnv.env -> (Profiling_js.finished * ServerEnv.env, ServerEnv.env) result Lwt.t -val recheck_loop: - ?files_to_recheck:Utils_js.FilenameSet.t -> - ?files_to_focus:Utils_js.FilenameSet.t -> - ServerEnv.genv -> - ServerEnv.env -> - ServerEnv.env Lwt.t - -val run_but_cancel_on_file_changes: - ServerEnv.genv -> - ServerEnv.env -> - f:(unit -> 'a Lwt.t) -> - on_cancel:(unit -> 'a Lwt.t) -> - 'a Lwt.t +val recheck_loop : + ServerEnv.genv -> ServerEnv.env -> (Profiling_js.finished list * ServerEnv.env) Lwt.t -val get_lazy_stats: ServerEnv.genv -> ServerEnv.env -> ServerProt.Response.lazy_stats +val get_lazy_stats : options:Options.t -> ServerEnv.env -> ServerProt.Response.lazy_stats diff --git a/src/server/server.ml b/src/server/server.ml index e5365de88c7..165b27086b2 100644 --- a/src/server/server.ml +++ b/src/server/server.ml @@ -1,87 +1,106 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) let sample_init_memory profiling = - let open SharedMem_js in - let dep_stats = dep_stats () in - let hash_stats = hash_stats () in - let heap_size = heap_size () in - let memory_metrics = [ - "heap.size", heap_size; - "dep_table.nonempty_slots", dep_stats.nonempty_slots; - "dep_table.used_slots", dep_stats.used_slots; - "dep_table.slots", dep_stats.slots; - "hash_table.nonempty_slots", hash_stats.nonempty_slots; - "hash_table.used_slots", hash_stats.used_slots; - "hash_table.slots", hash_stats.slots; - ] in - List.iter (fun (metric, value) -> - Profiling_js.legacy_sample_memory - ~metric:("init_done." ^ metric) - ~value:(float_of_int value) - profiling - ) memory_metrics - -let init ~focus_targets genv = + SharedMem_js.( + let hash_stats = hash_stats () in + let heap_size = heap_size () in + let memory_metrics = + [ + ("heap.size", heap_size); + ("hash_table.nonempty_slots", hash_stats.nonempty_slots); + ("hash_table.used_slots", hash_stats.used_slots); + ("hash_table.slots", hash_stats.slots); + ] + in + List.iter + (fun (metric, value) -> + Profiling_js.legacy_sample_memory + ~metric:("init_done." ^ metric) + ~value:(float_of_int value) + profiling) + memory_metrics) + +let init ~profiling ?focus_targets genv = (* write binary path and version to server log *) Hh_logger.info "executable=%s" (Sys_utils.executable_path ()); Hh_logger.info "version=%s" Flow_version.version; let workers = genv.ServerEnv.workers in let options = genv.ServerEnv.options in - MultiWorkerLwt.set_report_canceled_callback (fun ~total ~finished -> - Hh_logger.info "Canceling progress %d/%d" finished total; - MonitorRPC.status_update - ~event:ServerStatus.(Canceling_progress { total = Some total; finished; }); - ); + Hh_logger.info "Canceling progress %d/%d" finished total; + MonitorRPC.status_update + ~event:ServerStatus.(Canceling_progress { total = Some total; finished })); MonitorRPC.status_update ~event:ServerStatus.Init_start; - let should_print_summary = Options.should_profile options in - let%lwt (profiling, env) = Profiling_js.with_profiling_lwt ~label:"Init" ~should_print_summary - begin fun profiling -> - let%lwt libs_ok, env = Types_js.init ~profiling ~workers options in - - (* If any libs errored, skip typechecking and just show lib errors. Note - * that `init` above has done all parsing, not just lib parsing, resolved - * and committed modules, etc. - * - * Furthermore, if we're in lazy mode, we forego typechecking until later, - * when it proceeds on an as-needed basis. *) - let%lwt env = - if not libs_ok || Options.is_lazy_mode options - then Lwt.return env - else Types_js.full_check ~profiling ~workers ~focus_targets ~options env - in + let%lwt (libs_ok, env, last_estimates) = Types_js.init ~profiling ~workers options in + (* If any libs errored, skip typechecking and just show lib errors. Note + * that `init` above has done all parsing, not just lib parsing, resolved + * and committed modules, etc. + * + * Furthermore, if we're in lazy mode, we forego typechecking until later, + * when it proceeds on an as-needed basis. *) + let%lwt (env, first_internal_error) = + if (not libs_ok) || Options.is_lazy_mode options then + Lwt.return (env, None) + else + Types_js.full_check ~profiling ~workers ?focus_targets ~options env + in + sample_init_memory profiling; - sample_init_memory profiling; + SharedMem_js.init_done (); - SharedMem_js.init_done(); + (* Return an env that initializes invariants required and maintained by + recheck, namely that `files` contains files that parsed successfully, and + `errors` contains the current set of errors. *) + Lwt.return (env, last_estimates, first_internal_error) - (* Return an env that initializes invariants required and maintained by - recheck, namely that `files` contains files that parsed successfully, and - `errors` contains the current set of errors. *) - Lwt.return env - end in - let event = ServerStatus.(Finishing_up { - duration = Profiling_js.get_profiling_duration profiling; - info = InitSummary}) in - MonitorRPC.status_update ~event; - Lwt.return (profiling, env) - -let rec run_workload genv env workload = - let on_cancel () = - Hh_logger.info "Workload successfully canceled. Running a recheck to pick up new file changes"; - let%lwt env = Rechecker.recheck_loop genv env in - Hh_logger.info "Now restarting the workload"; - run_workload genv env workload +(* A thread that samples memory stats every second and then logs an idle heartbeat event even + * `idle_period_in_seconds` seconds. *) +let rec log_on_idle = + (* The time in seconds to gather data before logging. Shouldn't be too small or we'll flood the + * logs. *) + let idle_period_in_seconds = 300 in + (* Grab memory stats. Since we're idle, we don't really care much about sharedmemory stats. But + * our cgroup stats may change depending on the memory pressure *) + let sample profiling = + let%lwt cgroup_stats = CGroup.get_stats () in + begin + match cgroup_stats with + | Error _ -> () + | Ok { CGroup.total; total_swap; anon; file; shmem } -> + Profiling_js.sample_memory profiling ~metric:"cgroup_total" ~value:(float total); + Profiling_js.sample_memory profiling ~metric:"cgroup_swap" ~value:(float total_swap); + Profiling_js.sample_memory profiling ~metric:"cgroup_anon" ~value:(float anon); + Profiling_js.sample_memory profiling ~metric:"cgroup_shmem" ~value:(float shmem); + Profiling_js.sample_memory profiling ~metric:"cgroup_file" ~value:(float file) + end; + Lwt.return_unit in - Rechecker.run_but_cancel_on_file_changes genv env ~f:(fun () -> workload env) ~on_cancel + (* Sample every second for `seconds_remaining` seconds *) + let rec sample_and_sleep profiling seconds_remaining = + if seconds_remaining > 0 then + let%lwt () = sample profiling in + let%lwt () = Lwt_unix.sleep 1.0 in + sample_and_sleep profiling (seconds_remaining - 1) + else + Lwt.return_unit + in + fun ~options start_time -> + let should_print_summary = Options.should_profile options in + let%lwt (profiling, ()) = + Profiling_js.with_profiling_lwt ~label:"Idle" ~should_print_summary (fun profiling -> + let%lwt () = sample_and_sleep profiling idle_period_in_seconds in + sample profiling) + in + FlowEventLogger.idle_heartbeat ~idle_time:(Unix.gettimeofday () -. start_time) ~profiling; + log_on_idle ~options start_time let rec serve ~genv ~env = Hh_logger.debug "Starting aggressive shared mem GC"; @@ -90,19 +109,28 @@ let rec serve ~genv ~env = MonitorRPC.status_update ~event:ServerStatus.Ready; + let options = genv.ServerEnv.options in + let idle_logging_thread = log_on_idle ~options (Unix.gettimeofday ()) in (* Ok, server is settled. Let's go to sleep until we get a message from the monitor *) - let%lwt () = ServerMonitorListenerState.wait_for_anything - ~process_updates:(Rechecker.process_updates genv env) + let%lwt () = + ServerMonitorListenerState.wait_for_anything + ~process_updates:(Rechecker.process_updates ~options env) + ~get_forced:(fun () -> env.ServerEnv.checked_files) + (* We're not in the middle of a recheck *) in + Lwt.cancel idle_logging_thread; (* If there's anything to recheck or updates to the env from the monitor, let's consume them *) - let%lwt env = Rechecker.recheck_loop genv env in - + let%lwt (_profiling, env) = Rechecker.recheck_loop genv env in (* Run a workload (if there is one) *) - let%lwt env = Option.value_map (ServerMonitorListenerState.pop_next_workload ()) - ~default:(Lwt.return env) - ~f:(run_workload genv env) in - + let%lwt env = + Option.value_map + (ServerMonitorListenerState.pop_next_workload ()) + ~default:(Lwt.return env) + ~f:(fun workload -> + Hh_logger.info "Running a serial workload"; + workload env) + in (* Flush the logs asynchronously *) Lwt.async EventLoggerLwt.flush; @@ -114,83 +142,133 @@ let rec serve ~genv ~env = * type-checker succeeded. So to know if there is some work to be done, * we look if env.modified changed. *) -let create_program_init ~shared_mem_config ~focus_targets options = - let handle = SharedMem_js.init shared_mem_config in +let create_program_init ~shared_mem_config ?focus_targets options = + let num_workers = Options.max_workers options in + let handle = SharedMem_js.init ~num_workers shared_mem_config in let genv = ServerEnvBuild.make_genv options handle in - - let program_init = fun () -> - let%lwt profiling, env = init ~focus_targets genv in - FlowEventLogger.init_done ~profiling; + let program_init profiling = + let%lwt ret = init ~profiling ?focus_targets genv in if shared_mem_config.SharedMem_js.log_level > 0 then Measure.print_stats (); - Lwt.return (profiling, env) + Lwt.return ret in - genv, program_init + (genv, program_init) let run ~monitor_channels ~shared_mem_config options = MonitorRPC.init ~channels:monitor_channels; - let genv, program_init = - create_program_init ~shared_mem_config ~focus_targets:None options in - + let (genv, program_init) = create_program_init ~shared_mem_config options in let initial_lwt_thread () = (* Read messages from the server monitor and add them to a stream as they come in *) let listening_thread = ServerMonitorListener.listen_for_messages genv in - (* Initialize *) let%lwt env = let t = Unix.gettimeofday () in Hh_logger.info "Initializing Server (This might take some time)"; - let%lwt _profiling, env = program_init () in + + let should_print_summary = Options.should_profile options in + let%lwt (profiling, (env, last_estimates, first_internal_error)) = + Profiling_js.with_profiling_lwt program_init ~label:"Init" ~should_print_summary + in + let event = + ServerStatus.( + Finishing_up + { duration = Profiling_js.get_profiling_duration profiling; info = InitSummary }) + in + MonitorRPC.status_update ~event; + + begin + match last_estimates with + | None -> FlowEventLogger.init_done ?first_internal_error profiling + | Some + { + Recheck_stats.estimated_time_to_recheck; + estimated_time_to_restart; + estimated_time_to_init; + estimated_time_per_file; + estimated_files_to_recheck; + estimated_files_to_init; + } -> + FlowEventLogger.init_done + ~estimated_time_to_recheck + ~estimated_time_to_restart + ~estimated_time_to_init + ~estimated_time_per_file + ~estimated_files_to_recheck + ~estimated_files_to_init + ?first_internal_error + profiling + end; + Hh_logger.info "Server is READY"; + let t' = Unix.gettimeofday () in Hh_logger.info "Took %f seconds to initialize." (t' -. t); + Lwt.return env in - (* Run both these threads. If either of them fail, return immediately *) - LwtUtils.iter_all [ - listening_thread; - serve ~genv ~env - ] + LwtUtils.iter_all [listening_thread; serve ~genv ~env] in LwtInit.run_lwt initial_lwt_thread let run_from_daemonize ~monitor_channels ~shared_mem_config options = - try run ~monitor_channels ~shared_mem_config options - with - | SharedMem_js.Out_of_shared_memory -> - let bt = Printexc.get_backtrace () in - let msg = Utils.spf "Out of shared memory%s" (if bt = "" then bt else ":\n"^bt) in - FlowExitStatus.(exit ~msg Out_of_shared_memory) + try run ~monitor_channels ~shared_mem_config options with + | SharedMem_js.Out_of_shared_memory as exn -> + let exn = Exception.wrap exn in + let bt = Exception.get_backtrace_string exn in + let msg = + Utils.spf + "Out of shared memory%s" + ( if bt = "" then + bt + else + ":\n" ^ bt ) + in + FlowExitStatus.(exit ~msg Out_of_shared_memory) | e -> - let bt = Printexc.get_backtrace () in - let msg = Utils.spf "Unhandled exception: %s%s" - (Printexc.to_string e) - (if bt = "" then bt else "\n"^bt) - in - FlowExitStatus.(exit ~msg Unknown_error) + let e = Exception.wrap e in + let msg = Utils.spf "Unhandled exception: %s" (Exception.to_string e) in + FlowExitStatus.(exit ~msg Unknown_error) -let check_once ~shared_mem_config ~client_include_warnings ?focus_targets options = +let check_once ~shared_mem_config ~format_errors ?focus_targets options = PidLog.disable (); MonitorRPC.disable (); LoggingUtils.set_server_options ~server_options:options; let initial_lwt_thread () = - let _, program_init = - create_program_init ~shared_mem_config ~focus_targets options in - let%lwt profiling, env = program_init () in - - let errors, warnings, suppressed_errors = ErrorCollator.get env in - let warnings = if client_include_warnings || Options.should_include_warnings options - then warnings - else Errors.ErrorSet.empty + let (_, program_init) = create_program_init ~shared_mem_config ?focus_targets options in + let should_print_summary = Options.should_profile options in + let%lwt (profiling, (print_errors, errors, warnings, first_internal_error)) = + Profiling_js.with_profiling_lwt ~label:"Init" ~should_print_summary (fun profiling -> + let%lwt (env, _, first_internal_error) = program_init profiling in + let reader = State_reader.create () in + let%lwt (errors, warnings, suppressed_errors) = + Profiling_js.with_timer_lwt ~timer:"CollateErrors" profiling ~f:(fun () -> + Lwt.return (ErrorCollator.get ~reader ~options env)) + in + let collated_errors = (errors, warnings, suppressed_errors) in + let%lwt print_errors = + Profiling_js.with_timer_lwt ~timer:"FormatErrors" profiling ~f:(fun () -> + Lwt.return (format_errors collated_errors)) + in + Lwt.return (print_errors, errors, warnings, first_internal_error)) in - Lwt.return (profiling, errors, warnings, suppressed_errors) + print_errors profiling; + + let event = + ServerStatus.( + Finishing_up + { duration = Profiling_js.get_profiling_duration profiling; info = InitSummary }) + in + MonitorRPC.status_update ~event; + + FlowEventLogger.init_done ?first_internal_error profiling; + + Lwt.return (errors, warnings) in LwtInit.run_lwt initial_lwt_thread let daemonize = let entry = Server_daemon.register_entry_point run_from_daemonize in fun ~log_file ~shared_mem_config ~argv ~file_watcher_pid options -> - Server_daemon.daemonize ~log_file ~shared_mem_config ~argv ~options - ~file_watcher_pid entry + Server_daemon.daemonize ~log_file ~shared_mem_config ~argv ~options ~file_watcher_pid entry diff --git a/src/server/server.mli b/src/server/server.mli index e626c543ca5..322d935b8d1 100644 --- a/src/server/server.mli +++ b/src/server/server.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2017-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,18 +7,24 @@ val check_once : shared_mem_config:SharedMem_js.config -> - client_include_warnings:bool -> + format_errors: + (Errors.ConcreteLocPrintableErrorSet.t + * (* errors *) + Errors.ConcreteLocPrintableErrorSet.t + * (* warnings *) + (Loc.t Errors.printable_error * Loc_collections.LocSet.t) list -> + (* suppressed errors *) Profiling_js.finished -> + unit (* print errors *)) -> ?focus_targets:Utils_js.FilenameSet.t -> Options.t -> - Profiling_js.finished * - Errors.ErrorSet.t * (* errors *) - Errors.ErrorSet.t * (* warnings *) - (Errors.error * Utils_js.LocSet.t) list (* suppressed errors *) + Errors.ConcreteLocPrintableErrorSet.t * (* errors *) Errors.ConcreteLocPrintableErrorSet.t + +(* warnings *) val daemonize : log_file:string -> shared_mem_config:SharedMem_js.config -> argv:string array -> - file_watcher_pid: int option -> + file_watcher_pid:int option -> Options.t -> (MonitorProt.server_to_monitor_message, MonitorProt.monitor_to_server_message) Daemon.handle diff --git a/src/server/serverEnvBuild.ml b/src/server/serverEnvBuild.ml index 45a823e6a7d..366d1cb33c0 100644 --- a/src/server/serverEnvBuild.ml +++ b/src/server/serverEnvBuild.ml @@ -1,11 +1,10 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) - (*****************************************************************************) (* Building the environment *) (*****************************************************************************) @@ -17,4 +16,4 @@ let make_genv options handle = else None in - { ServerEnv.options; workers; } + { ServerEnv.options; workers } diff --git a/src/server/serverWorker.ml b/src/server/serverWorker.ml index 494f2bd7ba8..b757337055d 100644 --- a/src/server/serverWorker.ml +++ b/src/server/serverWorker.ml @@ -1,39 +1,39 @@ (** - * Copyright (c) 2015-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +open Core_kernel + (* As for [Daemon.register_entry_point], this should stay at toplevel, in order to be executed before [Daemon.check_entry_point]. *) let entry = - WorkerController.register_entry_point ~restore:(fun (logger_level, log_filename, profile_id) -> - Hh_logger.Level.set_min_level logger_level; - Flow_server_profile.init_from_id profile_id; + WorkerController.register_entry_point + ~restore:(fun (logger_level, log_filename, profile_id) ~(worker_id : int) -> + Hh_logger.set_id (Printf.sprintf "flow serverWorker %d" worker_id); + Hh_logger.Level.set_min_level logger_level; + Flow_server_profile.init_from_id profile_id; - match log_filename with - | None -> () - | Some file -> - let log_fd = Unix.openfile file [Unix.O_WRONLY; Unix.O_CREAT; Unix.O_APPEND] 0o666 in - Hh_logger.set_log file (Unix.out_channel_of_descr log_fd) - ) + match log_filename with + | None -> () + | Some file -> + let log_fd = Unix.openfile file [Unix.O_WRONLY; Unix.O_CREAT; Unix.O_APPEND] 0o666 in + Hh_logger.set_log file (Unix.out_channel_of_descr log_fd)) (* Saves the default GC settings, which are restored by the workers. Workers can * have more relaxed GC configs as they are short-lived processes, and this * prevents the workers from inheriting GC settings the master needs. *) -let gc_control = Gc.{(get ()) with minor_heap_size = 1024 * 1024 * 2; } +let gc_control = Gc.{ (get ()) with Control.minor_heap_size = 1024 * 1024 * 2 } let make ~n heap_handle = MultiWorkerLwt.make ?call_wrapper:None - ~saved_state: ( - Hh_logger.Level.min_level (), - Hh_logger.get_log_name (), - Flow_server_profile.get_id () - ) + ~saved_state: + (Hh_logger.Level.min_level (), Hh_logger.get_log_name (), Flow_server_profile.get_id ()) ~entry - ~nbr_procs: n + ~nbr_procs:n ~gc_control ~heap_handle diff --git a/src/server/serverWorker.mli b/src/server/serverWorker.mli index 7d3c2b3bb2e..df0571facec 100644 --- a/src/server/serverWorker.mli +++ b/src/server/serverWorker.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2015-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/server/server_daemon.ml b/src/server/server_daemon.ml index 429d307ea29..90332f7d802 100644 --- a/src/server/server_daemon.ml +++ b/src/server/server_daemon.ml @@ -1,12 +1,11 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open Utils_js - module Server_files = Server_files_js type args = { @@ -19,33 +18,26 @@ type args = { file_watcher_pid: int option; } -type entry_point = ( - args, - MonitorProt.monitor_to_server_message, - MonitorProt.server_to_monitor_message -) Daemon.entry +type entry_point = + (args, MonitorProt.monitor_to_server_message, MonitorProt.server_to_monitor_message) Daemon.entry let open_log_file file = (* When opening a new foo.log file, if foo.log already exists, we move it to * foo.log.old. On Linux/OSX this is easy, we just call rename. On Windows, * the rename can fail if foo.log is open or if foo.log.old already exists. * Not a huge problem, we just need to be more intentional *) - if Sys.file_exists file - then begin + ( if Sys.file_exists file then let old_file = file ^ ".old" in - - (try - if Sys.file_exists old_file - then Sys.remove old_file; + try + if Sys.file_exists old_file then Sys.remove old_file; Sys.rename file old_file with e -> + let e = Exception.wrap e in Utils.prerr_endlinef "Log rotate: failed to move '%s' to '%s'\n%s" file old_file - (Printexc.to_string e) - ) - end; + (Exception.to_string e) ); Unix.openfile file [Unix.O_WRONLY; Unix.O_CREAT; Unix.O_APPEND] 0o666 let new_entry_point = @@ -55,15 +47,12 @@ let new_entry_point = Printf.sprintf "main_%d" !cpt let register_entry_point - (main: - monitor_channels:MonitorRPC.channels -> - shared_mem_config:SharedMem_js.config -> - Options.t -> - unit) -: entry_point = - Daemon.register_entry_point - (new_entry_point ()) - (fun args monitor_channels -> + (main : + monitor_channels:MonitorRPC.channels -> + shared_mem_config:SharedMem_js.config -> + Options.t -> + unit) : entry_point = + Daemon.register_entry_point (new_entry_point ()) (fun args monitor_channels -> let { shared_mem_config; options; @@ -72,13 +61,15 @@ let register_entry_point parent_pid; parent_logger_pid; file_watcher_pid; - } = args in + } = + args + in LoggingUtils.set_hh_logger_min_level options; Hh_logger.info "argv=%s" (argv |> Array.to_list |> String.concat " "); + LoggingUtils.dump_server_options ~server_options:options ~log:(Hh_logger.info "%s"); + + FlowEventLogger.restore_context logging_context; - (* This server might have been started by a monitor process which is already pretty old, so - * the start_time might be way out of date. *) - FlowEventLogger.(restore_context {logging_context with start_time = Unix.gettimeofday (); }); (* It makes the logs easier if all server logs have the "command" column set to "server", * regardless of whether they were started with `flow start` or `flow server` *) FlowEventLogger.set_command (Some "server"); @@ -86,7 +77,6 @@ let register_entry_point let root = Options.root options in let tmp_dir = Options.temp_dir options in - (* Create the pid log and record all the processes that already exist *) let flowconfig_name = Options.flowconfig_name options in PidLog.init (Server_files_js.pids_file ~flowconfig_name ~tmp_dir root); @@ -98,20 +88,15 @@ let register_entry_point main ~monitor_channels ~shared_mem_config options) -let daemonize ~log_file ~shared_mem_config ~argv ~options ~file_watcher_pid - main_entry = +let daemonize ~log_file ~shared_mem_config ~argv ~options ~file_watcher_pid main_entry = (* Let's make sure this isn't all for naught before we fork *) let root = Options.root options in let tmp_dir = Options.temp_dir options in let flowconfig_name = Options.flowconfig_name options in let lock = Server_files.lock_file ~flowconfig_name ~tmp_dir root in - if not (Lock.check lock) - then begin - let msg = spf - "Error: There is already a server running for %s" - (Path.to_string root) in - FlowExitStatus.(exit ~msg Lock_stolen) - end; + ( if not (Lock.check lock) then + let msg = spf "Error: There is already a server running for %s" (Path.to_string root) in + FlowExitStatus.(exit ~msg Lock_stolen) ); let null_fd = Daemon.null_fd () in let log_fd = open_log_file log_file in @@ -130,17 +115,23 @@ let daemonize ~log_file ~shared_mem_config ~argv ~options ~file_watcher_pid * So for now let's make Windows 7 not crash. It seems like `flow start` on * Windows 7 doesn't actually leak stdio, so a no op is acceptable *) - if Sys.win32 - then Unix.(try - set_close_on_exec stdout; - set_close_on_exec stderr - with Unix_error (EINVAL, _, _) -> ()); - Daemon.spawn (null_fd, log_fd, log_fd) (main_entry) { - shared_mem_config; - options; - logging_context = FlowEventLogger.get_context (); - argv; - parent_pid = Unix.getpid (); - parent_logger_pid = EventLogger.logger_pid (); - file_watcher_pid; - } + ( if Sys.win32 then + Unix.( + try + set_close_on_exec stdout; + set_close_on_exec stderr + with Unix_error (EINVAL, _, _) -> ()) ); + let name = spf "server master process watching %s" (Path.to_string root) in + Daemon.spawn + ~name + (null_fd, log_fd, log_fd) + main_entry + { + shared_mem_config; + options; + logging_context = FlowEventLogger.get_context (); + argv; + parent_pid = Unix.getpid (); + parent_logger_pid = EventLogger.logger_pid (); + file_watcher_pid; + } diff --git a/src/server/server_daemon.mli b/src/server/server_daemon.mli index 0d1a4fa3dd2..4f6352b0daa 100644 --- a/src/server/server_daemon.mli +++ b/src/server/server_daemon.mli @@ -1,25 +1,26 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) + type entry_point val register_entry_point : (monitor_channels:MonitorRPC.channels -> - shared_mem_config:SharedMem_js.config -> - Options.t -> - unit) -> + shared_mem_config:SharedMem_js.config -> + Options.t -> + unit) -> entry_point -val open_log_file: string -> Unix.file_descr +val open_log_file : string -> Unix.file_descr val daemonize : log_file:string -> shared_mem_config:SharedMem_js.config -> - argv: string array -> + argv:string array -> options:Options.t -> - file_watcher_pid: int option -> + file_watcher_pid:int option -> entry_point -> (MonitorProt.server_to_monitor_message, MonitorProt.monitor_to_server_message) Daemon.handle diff --git a/src/server/server_files/dune b/src/server/server_files/dune new file mode 100644 index 00000000000..9e6cf0f9633 --- /dev/null +++ b/src/server/server_files/dune @@ -0,0 +1,8 @@ +(library + (name flow_server_files) + (wrapped false) + (libraries + opaque_digest ; hack + sys_utils ; hack + ) +) diff --git a/src/server/server_files/server_files_js.ml b/src/server/server_files/server_files_js.ml index a70a7b72da7..b5cd71451c0 100644 --- a/src/server/server_files/server_files_js.ml +++ b/src/server/server_files/server_files_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,28 +8,71 @@ let default_flowconfig_name = ".flowconfig" let add_dir_sep dir = - let open Filename in - if check_suffix dir dir_sep - then dir - else dir ^ dir_sep + Filename.( + if check_suffix dir dir_sep then + dir + else + dir ^ dir_sep) let mk_root flowconfig_name root = if flowconfig_name = default_flowconfig_name then root - else Path.concat root flowconfig_name + else + Path.concat root flowconfig_name -let file_of_root extension ~flowconfig_name ~tmp_dir root = +let digest_root_part root_part max_len = + let len = String.length root_part in + if len <= max_len then + root_part + else + let prefix = String.sub root_part 0 5 in + let suffix = String.sub root_part (len - 5) 5 in + let digest = OpaqueDigest.to_hex (OpaqueDigest.string root_part) in + (* 5 char prefix + 5 char suffix + 2 underscores *) + let max_digest_length = max_len - 12 in + let digest_part = + if String.length digest > max_digest_length then + String.sub digest 0 max_digest_length + else + digest + in + Printf.sprintf "%s_%s_%s" prefix digest_part suffix + +let file_of_root ?max_root_part_len extension ~flowconfig_name ~tmp_dir root = let tmp_dir = tmp_dir |> Path.make |> Path.to_string |> add_dir_sep in let root = mk_root flowconfig_name root in let root_part = Path.slash_escaped_string_of_path root in + let root_part = + match max_root_part_len with + | None -> root_part + | Some max_root_part_len -> digest_root_part root_part max_root_part_len + in Printf.sprintf "%s%s.%s" tmp_dir root_part extension -let config_file flowconfig_name root = - Path.to_string (Path.concat root flowconfig_name) +let config_file flowconfig_name root = Path.to_string (Path.concat root flowconfig_name) + +(* Generating really long filenames can hit some limits. For example + * + * /* /usr/include/linux/limits.h */ + * #define NAME_MAX 255 /* # chars in a file name */ + * + * Which can cause ENAMETOOLONG or stuff like that. So let's cap our filenames (without extensions) + * at 200 characters *) +let max_root_part_len = 200 + +let log_file = file_of_root ~max_root_part_len "log" + +let monitor_log_file = file_of_root ~max_root_part_len "monitor_log" -let lock_file = file_of_root "lock" -let pids_file = file_of_root "pids" +let lock_file = file_of_root ~max_root_part_len "lock" + +let pids_file = file_of_root ~max_root_part_len "pids" + +let recheck_stats_file = file_of_root ~max_root_part_len "recheck_stats" + +(* Socket files don't care about length. socket.ml will worry about abridging those *) let socket_file = file_of_root "sockv3" + let legacy2_socket_file = file_of_root "sockv2" + let legacy1_socket_file = file_of_root "sock" -let dfind_log_file = file_of_root "dfind" diff --git a/src/server/server_files/server_files_js.mli b/src/server/server_files/server_files_js.mli index 55c37e46f40..2361f1ac41b 100644 --- a/src/server/server_files/server_files_js.mli +++ b/src/server/server_files/server_files_js.mli @@ -1,18 +1,26 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val default_flowconfig_name: string +val default_flowconfig_name : string -val file_of_root: string -> flowconfig_name:string -> tmp_dir:string -> Path.t -> string +val config_file : string -> Path.t -> string -val config_file: string -> Path.t -> string -val dfind_log_file: flowconfig_name:string -> tmp_dir:string -> Path.t -> string -val lock_file: flowconfig_name:string -> tmp_dir:string -> Path.t -> string -val pids_file: flowconfig_name:string -> tmp_dir:string -> Path.t -> string -val socket_file: flowconfig_name:string -> tmp_dir:string -> Path.t -> string -val legacy2_socket_file: flowconfig_name:string -> tmp_dir:string -> Path.t -> string -val legacy1_socket_file: flowconfig_name:string -> tmp_dir:string -> Path.t -> string +val log_file : flowconfig_name:string -> tmp_dir:string -> Path.t -> string + +val monitor_log_file : flowconfig_name:string -> tmp_dir:string -> Path.t -> string + +val lock_file : flowconfig_name:string -> tmp_dir:string -> Path.t -> string + +val pids_file : flowconfig_name:string -> tmp_dir:string -> Path.t -> string + +val socket_file : flowconfig_name:string -> tmp_dir:string -> Path.t -> string + +val legacy2_socket_file : flowconfig_name:string -> tmp_dir:string -> Path.t -> string + +val legacy1_socket_file : flowconfig_name:string -> tmp_dir:string -> Path.t -> string + +val recheck_stats_file : flowconfig_name:string -> tmp_dir:string -> Path.t -> string diff --git a/src/server/server_utils/dune b/src/server/server_utils/dune new file mode 100644 index 00000000000..9d0c077e717 --- /dev/null +++ b/src/server/server_utils/dune @@ -0,0 +1,7 @@ +(library + (name flow_server_utils) + (wrapped false) + (libraries + sys_utils ; hack + ) +) diff --git a/src/server/server_utils/file_input.ml b/src/server/server_utils/file_input.ml index 40d847f1c64..3c7d11fa2a7 100644 --- a/src/server/server_utils/file_input.ml +++ b/src/server/server_utils/file_input.ml @@ -1,13 +1,15 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type t = -| FileName of string -| FileContent of string option * string (* filename, content *) + | FileName of string + | FileContent of string option * string + +(* filename, content *) let path_of_file_input = function | FileName f -> Some f @@ -25,4 +27,6 @@ let content_of_file_input_unsafe = function let content_of_file_input file = try Ok (content_of_file_input_unsafe file) - with exn -> Error (Printexc.to_string exn) + with exn -> + let exn = Exception.wrap exn in + Error (Exception.to_string exn) diff --git a/src/server/shmem/dune b/src/server/shmem/dune new file mode 100644 index 00000000000..818aaf42766 --- /dev/null +++ b/src/server/shmem/dune @@ -0,0 +1,9 @@ +(library + (name flow_shared_mem) + (wrapped false) + (libraries + flow_monitor_rpc + heap_shared_mem ; hack + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/server/shmem/sharedMem_js.ml b/src/server/shmem/sharedMem_js.ml index e3adaa98082..872f8f4454d 100644 --- a/src/server/shmem/sharedMem_js.ml +++ b/src/server/shmem/sharedMem_js.ml @@ -1,49 +1,49 @@ -include SharedMem - -module Prefix = struct - include Prefix -end +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) -module Ident = struct - include Ident -end +include SharedMem +module Prefix = Prefix +module Ident = Ident module Collect : sig - val collect: [ `aggressive | `gentle ] -> unit - val with_memory_profiling_lwt: - profiling:Profiling_js.running -> - collect_at_end:bool -> - (unit -> 'a Lwt.t) -> - 'a Lwt.t + val collect : [ `gentle | `aggressive | `always_TEST ] -> unit + + val with_memory_profiling_lwt : + profiling:Profiling_js.running -> collect_at_end:bool -> (unit -> 'a Lwt.t) -> 'a Lwt.t end = struct let profile_before_collect_callback = ref (fun () -> ()) let collect effort = - if SharedMem.should_collect effort - then begin - (!profile_before_collect_callback) (); + if SharedMem.should_collect effort then ( + !profile_before_collect_callback (); MonitorRPC.status_update ~event:ServerStatus.GC_start; SharedMem.collect effort - end + ) let sample_memory profiling = let heap = heap_size () in let { nonempty_slots; used_slots; slots } = hash_stats () in - Profiling_js.sample_memory ~metric:"heap" ~value:(float_of_int heap) profiling; - Profiling_js.sample_memory ~metric:"hash_nonempty_slots" ~value:(float_of_int nonempty_slots) profiling; - Profiling_js.sample_memory ~metric:"hash_slots" ~value:(float_of_int slots) profiling; - Profiling_js.sample_memory ~metric:"hash_used_slots" ~value:(float_of_int used_slots) profiling + Profiling_js.sample_memory profiling ~metric:"heap" ~value:(float_of_int heap); + Profiling_js.sample_memory + profiling + ~metric:"hash_nonempty_slots" + ~value:(float_of_int nonempty_slots); + Profiling_js.sample_memory profiling ~metric:"hash_slots" ~value:(float_of_int slots); + Profiling_js.sample_memory profiling ~metric:"hash_used_slots" ~value:(float_of_int used_slots) let with_memory_profiling_lwt ~profiling ~collect_at_end f = sample_memory profiling; - profile_before_collect_callback := (fun () -> sample_memory profiling); + (profile_before_collect_callback := (fun () -> sample_memory profiling)); let%lwt ret = f () in - if collect_at_end then collect `aggressive; sample_memory profiling; - profile_before_collect_callback := (fun () -> ()); + (profile_before_collect_callback := (fun () -> ())); Lwt.return ret end diff --git a/src/server/watchman_expression_terms/dune b/src/server/watchman_expression_terms/dune new file mode 100644 index 00000000000..c720b8e106b --- /dev/null +++ b/src/server/watchman_expression_terms/dune @@ -0,0 +1,9 @@ +(library + (name flow_server_watchman_expression_terms) + (wrapped false) + (libraries + flow_common + flow_server_files + hh_json + ) +) diff --git a/src/server/watchman_expression_terms/watchman_expression_terms.ml b/src/server/watchman_expression_terms/watchman_expression_terms.ml new file mode 100644 index 00000000000..a2875725c0d --- /dev/null +++ b/src/server/watchman_expression_terms/watchman_expression_terms.ml @@ -0,0 +1,54 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +let make ~options = + let module J = Hh_json_helpers.AdhocJsonHelpers in + let file_options = Options.file_options options in + let suffixes = + let exts = SSet.elements @@ Files.get_all_watched_extensions file_options in + let exts = Files.flow_ext :: exts in + exts + (* Turn .foo.bar into .bar, since suffix can't deal with multi-part extensions *) + |> List.map (fun ext -> Filename.extension ("foo" ^ ext)) + (* Strip off the leading '.' *) + |> List.map (fun ext -> + if ext <> "" && ext.[0] = '.' then + String.sub ext 1 (String.length ext - 1) + else + ext) + in + (* Unfortunately watchman can't deal with absolute paths. Its "wholename" feature only + * works for relative paths to the watch root, and we don't know the watch root until we + * init. + * + * Luckily, all we really need is to specify a superset of the files we care about. So + * watching all .flowconfigs instead of just our .flowconfig is fine *) + let absolute_paths = + (* Config file *) + let flowconfig_name = Options.flowconfig_name options in + let paths = [Server_files_js.config_file flowconfig_name @@ Options.root options] in + (* Module resolver *) + Option.value_map (Options.module_resolver options) ~default:paths ~f:(fun module_resolver -> + Path.to_string module_resolver :: paths) + in + (* Include any file with this basename *) + let basenames = "package.json" :: List.map Filename.basename absolute_paths in + [ + J.strlist ["type"; "f"]; + (* Watch for files *) + J.pred "anyof" @@ [J.assoc_strlist "suffix" suffixes; J.assoc_strlist "name" basenames]; + J.pred "not" + @@ [ + (* Ignore changes in source control dirs *) + J.pred "anyof" + @@ [ + J.strlist ["dirname"; ".hg"]; + J.strlist ["dirname"; ".git"]; + J.strlist ["dirname"; ".svn"]; + ]; + ]; + ] diff --git a/src/services/autocomplete/autocompleteService_js.ml b/src/services/autocomplete/autocompleteService_js.ml index 9f4e3781745..df581537a24 100644 --- a/src/services/autocomplete/autocompleteService_js.ml +++ b/src/services/autocomplete/autocompleteService_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,215 +8,376 @@ open Autocomplete_js open Core_result open ServerProt.Response +open Parsing_heaps_utils let add_autocomplete_token contents line column = let line = line - 1 in Line.transform_nth contents line (fun line_str -> - let length = String.length line_str in - if length >= column - then ( - let start = String.sub line_str 0 column in - let end_ = String.sub line_str column (length - column) in - start ^ Autocomplete_js.autocomplete_suffix ^ end_ - ) else line_str - ) + let length = String.length line_str in + if length >= column then + let start = String.sub line_str 0 column in + let end_ = String.sub line_str column (length - column) in + start ^ Autocomplete_js.autocomplete_suffix ^ end_ + else + line_str) + +(* the autocomplete token inserts `suffix_len` characters, which are included + * in `ac_loc` returned by `Autocomplete_js`. They need to be removed before + * showing `ac_loc` to the client. *) +let remove_autocomplete_token_from_loc loc = + Loc.{ loc with _end = { loc._end with column = loc._end.column - Autocomplete_js.suffix_len } } let autocomplete_result_to_json ~strip_root result = let func_param_to_json param = - Hh_json.JSON_Object [ - "name", Hh_json.JSON_String param.param_name; - "type", Hh_json.JSON_String param.param_ty; - ] + Hh_json.JSON_Object + [ + ("name", Hh_json.JSON_String param.param_name); + ("type", Hh_json.JSON_String param.param_ty); + ] in let func_details_to_json details = match details with - | Some fd -> Hh_json.JSON_Object [ - "return_type", Hh_json.JSON_String fd.return_ty; - "params", Hh_json.JSON_Array (List.map func_param_to_json fd.param_tys); - ] - | None -> Hh_json.JSON_Null + | Some fd -> + Hh_json.JSON_Object + [ + ("return_type", Hh_json.JSON_String fd.return_ty); + ("params", Hh_json.JSON_Array (Core_list.map ~f:func_param_to_json fd.param_tys)); + ] + | None -> Hh_json.JSON_Null in let name = result.res_name in - let ty = result.res_ty in - Hh_json.JSON_Object ( - ("name", Hh_json.JSON_String name) :: - ("type", Hh_json.JSON_String ty) :: - ("func_details", func_details_to_json result.func_details) :: - (Errors.deprecated_json_props_of_loc ~strip_root result.res_loc) - ) + let (ty_loc, ty) = result.res_ty in + (* This is deprecated for two reasons: + * 1) The props are still our legacy, flat format rather than grouped into + * "loc" and "range" properties. + * 2) It's the location of the definition of the type (the "type loc"), + * which may be interesting but should be its own field. The loc should + * instead be the range to replace (usually but not always the token + * being completed; perhaps we also want to replace the whole member + * expression, for example). That's `result.res_loc`, but we're not + * exposing it in the legacy `flow autocomplete` API; use + * LSP instead. + *) + let deprecated_loc = Errors.deprecated_json_props_of_loc ~strip_root ty_loc in + Hh_json.JSON_Object + ( ("name", Hh_json.JSON_String name) + :: ("type", Hh_json.JSON_String ty) + :: ("func_details", func_details_to_json result.func_details) + :: deprecated_loc ) let autocomplete_response_to_json ~strip_root response = - let open Hh_json in - match response with + Hh_json.( + match response with | Error error -> - JSON_Object [ - "error", JSON_String error; - "result", JSON_Array []; (* TODO: remove this? kept for BC *) - ] + JSON_Object + [ + ("error", JSON_String error); + ("result", JSON_Array []); + (* TODO: remove this? kept for BC *) + + ] | Ok completions -> - let results = List.map - (autocomplete_result_to_json ~strip_root) - completions - in - JSON_Object ["result", JSON_Array results] + let results = List.map (autocomplete_result_to_json ~strip_root) completions in + JSON_Object [("result", JSON_Array results)]) let parameter_name is_opt name = - let opt = if is_opt then "?" else "" in - (Option.value name ~default:"_") ^ opt + let opt = + if is_opt then + "?" + else + "" + in + Option.value name ~default:"_" ^ opt -let autocomplete_create_result ((name, loc), ty) = - Ty.(match ty with - | Fun {fun_params; fun_rest_param; fun_return; _} -> - let param_tys = List.map (fun (n, t, fp) -> - let param_name = parameter_name fp.prm_optional n in - let param_ty = Ty_printer.string_of_t t in - { param_name; param_ty } - ) fun_params in - let param_tys = match fun_rest_param with - | None -> param_tys - | Some (name, t) -> - let param_name = "..." ^ parameter_name false name in - let param_ty = Ty_printer.string_of_t t in - param_tys @ [{ param_name; param_ty; }] +let lsp_completion_of_type = + Ty.( + function + | InterfaceDecl _ + | InlineInterface _ -> + Some Lsp.Completion.Interface + | ClassDecl _ -> Some Lsp.Completion.Class + | StrLit _ + | NumLit _ + | BoolLit _ -> + Some Lsp.Completion.Value + | Fun _ -> Some Lsp.Completion.Function + | TypeAlias _ + | Union _ -> + Some Lsp.Completion.Enum + | Module _ -> Some Lsp.Completion.Module + | Tup _ + | Bot _ + | Null + | Obj _ + | Inter _ + | TVar _ + | Bound _ + | Generic _ + | Any _ + | Top + | Void + | Num _ + | Str _ + | Bool _ + | Arr _ + | TypeOf _ + | Utility _ + | Mu _ -> + Some Lsp.Completion.Variable) + +let autocomplete_create_result (name, loc) (ty, ty_loc) = + let res_ty = (ty_loc, Ty_printer.string_of_t ~with_comments:false ty) in + let res_kind = lsp_completion_of_type ty in + Ty.( + match ty with + | Fun { fun_params; fun_rest_param; fun_return; _ } -> + let param_tys = + Core_list.map + ~f:(fun (n, t, fp) -> + let param_name = parameter_name fp.prm_optional n in + let param_ty = Ty_printer.string_of_t ~with_comments:false t in + { param_name; param_ty }) + fun_params in - let return = Ty_printer.string_of_t fun_return in - { res_loc = loc; - res_name = name; - res_ty = Ty_printer.string_of_t ty; - func_details = Some { param_tys; return_ty = return } } - | _ -> - { res_loc = loc; + let param_tys = + match fun_rest_param with + | None -> param_tys + | Some (name, t) -> + let param_name = "..." ^ parameter_name false name in + let param_ty = Ty_printer.string_of_t ~with_comments:false t in + param_tys @ [{ param_name; param_ty }] + in + let return = Ty_printer.string_of_t ~with_comments:false fun_return in + { + res_loc = loc; + res_kind; res_name = name; - res_ty = Ty_printer.string_of_t ty; - func_details = None } - ) - -let autocomplete_filter_members members = - SMap.filter (fun key _ -> - (* This is really for being better safe than sorry. It shouldn't happen. *) - not (is_autocomplete key) - && - (* filter out constructor, it shouldn't be called manually *) - not (key = "constructor") - && - (* strip out members from prototypes which are implicitly created for - internal reasons *) - not (Reason.is_internal_name key) - ) members - -let autocomplete_member ~ac_type cx file_sig this ac_name ac_loc docblock = Flow_js.( + res_ty; + func_details = Some { param_tys; return_ty = return }; + } + | _ -> { res_loc = loc; res_kind; res_name = name; res_ty; func_details = None }) - let this_t = resolve_type cx this in - (* Resolve primitive types to their internal class type. We do this to allow - autocompletion on these too. *) - let this_t = resolve_builtin_class cx this_t in - let result = Members.extract cx this_t in +let autocomplete_is_valid_member key = + (* This is really for being better safe than sorry. It shouldn't happen. *) + (not (is_autocomplete key)) + (* filter out constructor, it shouldn't be called manually *) + && (not (key = "constructor")) + && (* strip out members from prototypes which are implicitly created for + internal reasons *) + not (Reason.is_internal_name key) - let open Hh_json in - - let result_str, t = Members.(match result with - | Success _ -> "SUCCESS", this - | SuccessModule _ -> "SUCCESS", this - | FailureNullishType -> "FAILURE_NULLABLE", this - | FailureAnyType -> "FAILURE_NO_COVERAGE", this - | FailureUnhandledType t -> "FAILURE_UNHANDLED_TYPE", t) in - - let json_data_to_log = JSON_Object [ - "ac_type", JSON_String ac_type; - "ac_name", JSON_String ac_name; - (* don't need to strip root for logging *) - "ac_loc", JSON_Object (Errors.deprecated_json_props_of_loc ~strip_root:None ac_loc); - "loc", Reason.json_of_loc ac_loc; - "docblock", Docblock.json_of_docblock docblock; - "result", JSON_String result_str; - "type", Debug_js.json_of_t ~depth:3 cx t; - ] in - - match Members.to_command_result result with - | Error error -> Error (error, Some json_data_to_log) - | Ok result_map -> - let options = { - Ty_normalizer_env. - fall_through_merged = true; - expand_internal_types = true; - expand_type_aliases = false; - flag_shadowed_type_params = true; - } in - let file = Context.file cx in - let type_table = Context.type_table cx in - let genv = Ty_normalizer_env.mk_genv ~full_cx:cx ~file ~type_table ~file_sig in - let result = result_map - |> autocomplete_filter_members - |> SMap.mapi (fun name (_id_loc, t) -> ((name, Type.loc_of_t t), t)) - |> SMap.values - |> Ty_normalizer.from_types ~options ~genv - |> Core_list.filter_map ~f:(function - | l, Ok s -> Some (l, s) - | _ -> None - ) - |> List.map autocomplete_create_result - |> List.rev in - Ok (result, Some json_data_to_log) -) +let autocomplete_member + ~reader + ~exclude_proto_members + ~ac_type + cx + file_sig + typed_ast + this + ac_name + ac_loc + ac_trigger + docblock = + let ac_loc = loc_of_aloc ~reader ac_loc |> remove_autocomplete_token_from_loc in + let result = Members.extract ~exclude_proto_members cx this in + Hh_json.( + let (result_str, t) = + Members.( + match result with + | Success _ -> ("SUCCESS", this) + | SuccessModule _ -> ("SUCCESS", this) + | FailureNullishType -> ("FAILURE_NULLABLE", this) + | FailureAnyType -> ("FAILURE_NO_COVERAGE", this) + | FailureUnhandledType t -> ("FAILURE_UNHANDLED_TYPE", t) + | FailureUnhandledMembers t -> ("FAILURE_UNHANDLED_MEMBERS", t)) + in + let json_data_to_log = + JSON_Object + [ + ("ac_type", JSON_String ac_type); + ("ac_name", JSON_String ac_name); + (* don't need to strip root for logging *) + ("ac_loc", JSON_Object (Errors.deprecated_json_props_of_loc ~strip_root:None ac_loc)); + ("ac_trigger", JSON_String (Option.value ac_trigger ~default:"None")); + ("loc", Reason.json_of_loc ~offset_table:None ac_loc); + ("docblock", Docblock.json_of_docblock docblock); + ("result", JSON_String result_str); + ("type", Debug_js.json_of_t ~depth:3 cx t); + ] + in + match Members.to_command_result result with + | Error error -> Error (error, Some json_data_to_log) + | Ok result_map -> + let options = + { + Ty_normalizer_env.fall_through_merged = true; + expand_internal_types = true; + expand_type_aliases = false; + flag_shadowed_type_params = true; + preserve_inferred_literal_types = false; + evaluate_type_destructors = true; + optimize_types = true; + omit_targ_defaults = false; + merge_bot_and_any_kinds = true; + } + in + let file = Context.file cx in + let genv = Ty_normalizer_env.mk_genv ~full_cx:cx ~file ~typed_ast ~file_sig in + let rev_result = + SMap.fold + (fun name (_id_loc, t) acc -> + if not (autocomplete_is_valid_member name) then + acc + else + let loc = Type.loc_of_t t |> loc_of_aloc ~reader in + ((name, loc), t) :: acc) + result_map + [] + in + let result = + rev_result + |> Ty_normalizer.from_types ~options ~genv + |> Core_list.rev_filter_map ~f:(function + | ((name, ty_loc), Ok ty) -> + Some (autocomplete_create_result (name, ac_loc) (ty, ty_loc)) + | _ -> None) + in + Ok (result, Some json_data_to_log)) (* env is all visible bound names at cursor *) -let autocomplete_id cx file_sig env = - let result = SMap.fold (fun name entry acc -> - (* Filter out internal environment variables except for this and +let autocomplete_id ~reader cx ac_loc ac_trigger file_sig env typed_ast = + let ac_loc = loc_of_aloc ~reader ac_loc |> remove_autocomplete_token_from_loc in + let (result, errors) = + SMap.fold + (fun name entry (acc, errors) -> + (* Filter out internal environment variables except for this and super. *) - let is_this = name = (Reason.internal_name "this") in - let is_super = name = (Reason.internal_name "super") in - if not (is_this || is_super) && Reason.is_internal_name name - then acc - else ( - let (loc, name) = - (* renaming of this/super *) - if is_this - then (Loc.none, "this") - else if is_super - then (Loc.none, "super") - else (Scope.Entry.entry_loc entry, name) + let is_this = name = Reason.internal_name "this" in + let is_super = name = Reason.internal_name "super" in + if (not (is_this || is_super)) && Reason.is_internal_name name then + (acc, errors) + else + let (ty_loc, name) = + (* renaming of this/super *) + if is_this then + (Loc.none, "this") + else if is_super then + (Loc.none, "super") + else + (Scope.Entry.entry_loc entry |> loc_of_aloc ~reader, name) + in + let options = + { + Ty_normalizer_env.fall_through_merged = true; + expand_internal_types = true; + expand_type_aliases = false; + flag_shadowed_type_params = true; + preserve_inferred_literal_types = false; + evaluate_type_destructors = true; + optimize_types = true; + omit_targ_defaults = false; + merge_bot_and_any_kinds = true; + } + in + let file = Context.file cx in + let genv = Ty_normalizer_env.mk_genv ~full_cx:cx ~file ~typed_ast ~file_sig in + let type_ = Scope.Entry.actual_type entry in + match Ty_normalizer.from_type ~options ~genv type_ with + | Ok ty -> + let result = autocomplete_create_result (name, ac_loc) (ty, ty_loc) in + (result :: acc, errors) + | Error err -> (acc, err :: errors)) + env + ([], []) + in + let json_data_to_log = + Hh_json.( + let result_str = + match (result, errors) with + | (_, []) -> "SUCCESS" + | ([], _) -> "FAILURE_NORMALIZER" + | (_, _) -> "PARTIAL" in - let options = { - Ty_normalizer_env. - fall_through_merged = true; - expand_internal_types = true; - expand_type_aliases = false; - flag_shadowed_type_params = true; - } in - let file = Context.file cx in - let type_table = Context.type_table cx in - let genv = Ty_normalizer_env.mk_genv ~full_cx:cx ~file ~type_table ~file_sig in - let type_ = Scope.Entry.actual_type entry in - match Ty_normalizer.from_type ~options ~genv type_ with - | Ok t -> autocomplete_create_result ((name, loc), t) :: acc - | Error _ -> acc - ) - ) env [] in - Ok (result, None) + JSON_Object + [ + ("ac_type", JSON_String "Acid"); + ("ac_trigger", JSON_String (Option.value ac_trigger ~default:"None")); + ("result", JSON_String result_str); + ("count", JSON_Number (result |> List.length |> string_of_int)); + ( "errors", + JSON_Array + (Core_list.rev_map errors ~f:(fun err -> + JSON_String (Ty_normalizer.error_to_string err))) ); + ]) + in + Ok (result, Some json_data_to_log) (* Similar to autocomplete_member, except that we're not directly given an object type whose members we want to enumerate: instead, we are given a component class and we want to enumerate the members of its declared props type, so we need to extract that and then route to autocomplete_member. *) -let autocomplete_jsx cx file_sig cls ac_name ac_loc docblock = Flow_js.( +let autocomplete_jsx ~reader cx file_sig typed_ast cls ac_name ac_loc ac_trigger docblock = + Flow_js.( let reason = Reason.mk_reason (Reason.RCustom ac_name) ac_loc in let component_instance = mk_instance cx reason cls in - let props_object = Tvar.mk_where cx reason (fun tvar -> - let use_op = Type.Op Type.UnknownUse in - flow cx ( - component_instance, - Type.GetPropT (use_op, reason, Type.Named (reason, "props"), tvar)) - ) in - autocomplete_member ~ac_type:"Acjsx" cx file_sig props_object ac_name ac_loc docblock - ) + let props_object = + Tvar.mk_where cx reason (fun tvar -> + let use_op = Type.Op Type.UnknownUse in + flow + cx + (component_instance, Type.GetPropT (use_op, reason, Type.Named (reason, "props"), tvar))) + in + (* Only include own properties, so we don't suggest things like `hasOwnProperty` as potential JSX properties *) + autocomplete_member + ~reader + ~exclude_proto_members:true + ~ac_type:"Acjsx" + cx + file_sig + typed_ast + props_object + ac_name + ac_loc + ac_trigger + docblock) -let autocomplete_get_results cx file_sig state docblock = +let autocomplete_get_results ~reader cx file_sig typed_ast state trigger_character docblock = + let file_sig = File_sig.abstractify_locs file_sig in match !state with - | Some { ac_type = Acid (env); _; } -> - autocomplete_id cx file_sig env - | Some { ac_name; ac_loc; ac_type = Acmem (this); } -> - autocomplete_member ~ac_type:"Acmem" cx file_sig this ac_name ac_loc docblock - | Some { ac_name; ac_loc; ac_type = Acjsx (cls); } -> - autocomplete_jsx cx file_sig cls ac_name ac_loc docblock - | None -> Ok ([], None) + | Some { ac_loc; ac_type = Acid env; _ } -> + autocomplete_id ~reader cx ac_loc trigger_character file_sig env typed_ast + | Some { ac_name; ac_loc; ac_type = Acmem this } -> + autocomplete_member + ~reader + ~exclude_proto_members:false + ~ac_type:"Acmem" + cx + file_sig + typed_ast + this + ac_name + ac_loc + trigger_character + docblock + | Some { ac_name; ac_loc; ac_type = Acjsx cls } -> + autocomplete_jsx ~reader cx file_sig typed_ast cls ac_name ac_loc trigger_character docblock + | Some { ac_name = _; ac_loc = _; ac_type = Ackey } -> + let json_data_to_log = + Hh_json.( + JSON_Object + [ + ("ac_type", JSON_String "Ackey"); + ("ac_trigger", JSON_String (Option.value trigger_character ~default:"None")); + ]) + in + Ok ([], Some json_data_to_log) + | None -> + let json_data_to_log = + Hh_json.( + JSON_Object + [ + ("ac_type", JSON_String "None"); + ("ac_trigger", JSON_String (Option.value trigger_character ~default:"None")); + ]) + in + Ok ([], Some json_data_to_log) diff --git a/src/services/autocomplete/autocompleteService_js.mli b/src/services/autocomplete/autocompleteService_js.mli index 45907731ade..9b6a68b265d 100644 --- a/src/services/autocomplete/autocompleteService_js.mli +++ b/src/services/autocomplete/autocompleteService_js.mli @@ -1,21 +1,25 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val autocomplete_get_results: +val autocomplete_get_results : + reader:Parsing_heaps.Reader.reader -> Context.t -> - File_sig.t -> + File_sig.With_Loc.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.program -> Autocomplete_js.autocomplete_state option ref -> + string option -> Docblock.t -> - (ServerProt.Response.complete_autocomplete_result list * Hh_json.json option, - string * Hh_json.json option) Core_result.t + ( ServerProt.Response.complete_autocomplete_result list * Hh_json.json option, + string * Hh_json.json option ) + Core_result.t -val add_autocomplete_token: string -> int -> int -> string +val add_autocomplete_token : string -> int -> int -> string -val autocomplete_response_to_json: +val autocomplete_response_to_json : strip_root:Path.t option -> (ServerProt.Response.complete_autocomplete_result list, string) result -> Hh_json.json diff --git a/src/services/autocomplete/autocomplete_js.ml b/src/services/autocomplete/autocomplete_js.ml index 88f1500d9dc..841aaddf7ce 100644 --- a/src/services/autocomplete/autocomplete_js.ml +++ b/src/services/autocomplete/autocomplete_js.ml @@ -1,70 +1,68 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type autocomplete_type = -| Acid of Scope.Entry.t SMap.t -| Acmem of Type.t -| Acjsx of Type.t + | Acid of Scope.Entry.t SMap.t + | Ackey (* TODO: track which object, so we can complete the keys. + for now, just classifying the kind of autocomplete *) + | Acmem of Type.t + | Acjsx of Type.t type autocomplete_state = { ac_name: string; - ac_loc: Loc.t; + ac_loc: ALoc.t; ac_type: autocomplete_type; } let autocomplete_suffix = "AUTO332" + let suffix_len = String.length autocomplete_suffix + let is_autocomplete x = - String.length x >= suffix_len && + String.length x >= suffix_len + && let suffix = String.sub x (String.length x - suffix_len) suffix_len in suffix = autocomplete_suffix -let autocomplete_id state _cx ac_name ac_loc = - if is_autocomplete ac_name - then ( - state := Some ({ - ac_name; - ac_loc; - ac_type = Acid (Env.all_entries ()); - }); +let autocomplete_id from_trigger_character state _cx ac_name ac_loc = + if is_autocomplete ac_name && not from_trigger_character then ( + state := Some { ac_name; ac_loc; ac_type = Acid (Env.all_entries ()) }; + true + ) else + false + +let autocomplete_object_key from_trigger_character state _cx ac_name ac_loc = + if is_autocomplete ac_name && not from_trigger_character then ( + state := Some { ac_name; ac_loc; ac_type = Ackey }; true ) else false let autocomplete_member state _cx ac_name ac_loc this_t = - if is_autocomplete ac_name - then ( - state := Some ({ - ac_name; - ac_loc; - ac_type = Acmem (this_t); - }); + if is_autocomplete ac_name then ( + state := Some { ac_name; ac_loc; ac_type = Acmem this_t }; true ) else false let autocomplete_jsx state _cx ac_name ac_loc class_t = - if is_autocomplete ac_name - then ( - state := Some ({ - ac_name; - ac_loc; - ac_type = Acjsx (class_t); - }); + if is_autocomplete ac_name then ( + state := Some { ac_name; ac_loc; ac_type = Acjsx class_t }; true ) else false -let autocomplete_set_hooks () = +let autocomplete_set_hooks ~trigger_character = let state = ref None in - Type_inference_hooks_js.set_id_hook (autocomplete_id state); + Type_inference_hooks_js.set_id_hook (autocomplete_id (trigger_character <> None) state); + Type_inference_hooks_js.set_obj_prop_decl_hook + (autocomplete_object_key (trigger_character <> None) state); Type_inference_hooks_js.set_member_hook (autocomplete_member state); Type_inference_hooks_js.set_jsx_hook (autocomplete_jsx state); state -let autocomplete_unset_hooks () = - Type_inference_hooks_js.reset_hooks () +let autocomplete_unset_hooks () = Type_inference_hooks_js.reset_hooks () diff --git a/src/services/autocomplete/dune b/src/services/autocomplete/dune new file mode 100644 index 00000000000..870af76c680 --- /dev/null +++ b/src/services/autocomplete/dune @@ -0,0 +1,10 @@ +(library + (name flow_service_autocomplete) + (wrapped false) + (libraries + flow_server_protocol + flow_state_heaps_parsing + flow_typing + collections ; hack + ) +) diff --git a/src/services/flowFileGen/flowFileGen.ml b/src/services/flowFileGen/flowFileGen.ml deleted file mode 100644 index d9efe7caf99..00000000000 --- a/src/services/flowFileGen/flowFileGen.ml +++ /dev/null @@ -1,397 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - - -let spf = Printf.sprintf - -let exports_map cx module_name = - let module_map = Context.module_map cx in - match SMap.get module_name module_map with - | Some module_t -> ( - let module_t = Flow_js.resolve_type cx module_t in - match Flow_js.Members.extract cx module_t with - | Flow_js.Members.SuccessModule (named, cjs) -> (named, cjs) - | _ -> failwith ( - spf "Failed to extract the exports of %s" (Type.string_of_ctor module_t) - ) - ) - | None -> - failwith (spf "Unable to extract %s from the module_map!" module_name) - -let rec mark_declared_classes name t env = Codegen.(Type.( - match resolve_type t env with - | ThisClassT (_, DefT (_, InstanceT (_, _, _, {class_id; _;}))) -> - set_class_name class_id name env - | DefT (_, PolyT (_, t, _)) -> - mark_declared_classes name t env - | _ -> - env -)) - -let gen_imports env = - (** - * Print import statements. - * - * TODO: For now we just print all import statements, but it would be nice to - * only print the ones that are actually used by the declares. - *) - let import_stmts = Context.import_stmts env.Codegen.flow_cx in - let env = List.fold_left (fun env stmt -> - let open Flow_ast in - let open Statement in - let open ImportDeclaration in - let {importKind; source; specifiers; default;} = stmt in - let default = Option.map ~f:(fun (_, name) -> name) default in - let named, ns = match specifiers with - | Some (ImportNamespaceSpecifier (_, (_, name))) -> - ([], Some name) - | Some (ImportNamedSpecifiers xs) -> - (xs, None) - | None -> - ([], None) - in - let _, { Flow_ast.StringLiteral.value = source; _ } = source in - - let env = Codegen.add_str "import " env in - let env = - match importKind with - | ImportType -> Codegen.add_str "type " env - | ImportTypeof -> Codegen.add_str "typeof " env - | ImportValue -> env - in - let env = - match default with - | Some default -> - let env = Codegen.add_str default env in - if ns <> None || List.length named > 0 - then Codegen.add_str ", " env - else env - | None -> env - in - let env = - match ns with - | Some ns -> - Codegen.add_str "* as " env |> Codegen.add_str ns - | None -> env - in - let env = if List.length named = 0 then env else ( - let env = Codegen.add_str "{" env in - let env = - Codegen.gen_separated_list named ", " (fun {local; remote; kind;} env -> - let (_, remote) = remote in - match local with - | Some (_, local) when local <> remote -> - let env = - match kind with - | Some ImportType -> Codegen.add_str "type " env - | Some ImportTypeof -> Codegen.add_str "typeof " env - | Some ImportValue | None -> env - in - Codegen.add_str remote env - |> Codegen.add_str " as " - |> Codegen.add_str local - | Some _ | None -> Codegen.add_str remote env - ) env - in - Codegen.add_str "}" env - ) in - Codegen.add_str " from \"" env - |> Codegen.add_str source - |> Codegen.add_str "\";\n" - ) env import_stmts in - - (** - * For each imported type, mark any imported class types so that they are not - * re-declared. - *) - let imported_ts = Context.imported_ts env.Codegen.flow_cx in - (imported_ts, SMap.fold mark_declared_classes imported_ts env) - -let gen_class_body = - let gen_field ~static field_name p env = Codegen.(Type.( - (** - * All classes have an implicit `static name: string` field on them. - * No need to re-print this. - *) - let is_static_name_field = static && field_name = "name" && ( - match p with - | Field (_, t, _) -> - (match resolve_type t env with - | DefT (_, StrT AnyLiteral) -> true - | _ -> false) - | _ -> false - ) in - - let is_empty_constructor = not static && field_name = "constructor" && ( - match p with - | Method (_, t) -> - (match resolve_type t env with - | DefT (_, FunT (_, _, { params; return_t; _ })) -> - (params = []) && ( - match resolve_type return_t env with - | DefT (_, VoidT) -> true - | _ -> false - ) - | _ -> false) - | _ -> false - ) in - - if is_static_name_field || is_empty_constructor - then env - else ( - add_str " " env - |> gen_if static (add_str "static ") - |> gen_prop field_name p - |> add_str ";\n" - ) - )) in - - fun static fields methods env -> Codegen.( - let static_fields = Type.( - match static with - | DefT (_, ObjT {props_tmap; _}) -> - find_props props_tmap env - | t -> failwith ( - spf - "Internal Error: Unexpected class static type: %s" - (string_of_ctor t) - ) - ) in - - let static_fields_count = SMap.cardinal static_fields in - let fields_count = SMap.cardinal fields in - let methods_count = SMap.cardinal methods in - let total_members_count = - static_fields_count + fields_count + methods_count - in - - let env = add_str " {" env in - if total_members_count = 0 then add_str "}" env else ( - add_str "\n" env - |> SMap.fold (gen_field ~static:true) static_fields - |> add_str "\n" - |> SMap.fold (gen_field ~static:false) fields - |> SMap.fold (gen_field ~static:false) methods - |> add_str "}" - ) -) - -class unexported_class_visitor = object(self) - inherit [Codegen.codegen_env * Type.TypeSet.t * ISet.t] Type_visitor.t as super - - method! tvar cx pole (env, seen, imported_classids) r id = - let t = Codegen.resolve_type (Type.OpenT (r, id)) env in - self#type_ cx pole (env, seen, imported_classids) t - - method! type_ cx pole (env, seen, imported_classids) t = Codegen.(Type.( - if TypeSet.mem t seen then (env, seen, imported_classids) else ( - let seen = TypeSet.add t seen in - match t with - (* class_id = 0 is top of the inheritance chain *) - | DefT (_, InstanceT (_, _, _, {class_id; _;})) - when class_id = 0 || ISet.mem class_id imported_classids -> - (env, seen, imported_classids) - - | DefT (r, InstanceT (static, extends, implements, { - class_id; - own_props; - proto_props; - structural; - _ - })) when not (has_class_name class_id env || Reason.is_lib_reason r) -> - let class_name = next_class_name env in - - (** - * Add to the list of declared classes *FIRST* to prevent inifite loops - * on recursive references to this class from within itself. - *) - let env = set_class_name class_id class_name env in - let (env, seen, imported_classids) = super#type_ cx pole (env, seen, imported_classids) t in - - let env = env - |> add_str "declare " - |> add_str (if structural then "interface " else "class ") - |> add_str class_name - in - - let env = - match resolve_type extends env with - | ObjProtoT _ -> env - | DefT (_, ClassT t) when ( - match resolve_type t env with | ObjProtoT _ -> true | _ -> false - ) -> env - | ThisTypeAppT (_, extends, _, None) -> - add_str " extends " env |> gen_type extends - | ThisTypeAppT (_, extends, _, Some ts) -> - add_str " extends " env - |> gen_type extends - |> add_str "<" - |> gen_separated_list ts ", " gen_type - |> add_str ">" - | extends -> add_str " extends " env |> gen_type extends - in - - let env = match implements with - | [] -> env - | ts -> env - |> add_str " implements " - |> gen_separated_list ts ", " gen_type - in - - let fields = find_props own_props env in - let methods = find_props proto_props env in - let env = gen_class_body static fields methods env |> add_str "\n" in - (env, seen, imported_classids) - - | t -> super#type_ cx pole (env, seen, imported_classids) t - ) - )) -end - -let gen_local_classes = - let visitor = new unexported_class_visitor in - let gen_unexported_classes imported_classids _name t env = - let (env, _, _) = - visitor#type_ - env.Codegen.flow_cx - Type.Neutral - (env, Type.TypeSet.empty, imported_classids) - t - in - env - in - - fun named_exports cjs_export env -> - let (imported_ts, env) = gen_imports env in - - (* Find and mark all the declared *exported* classes first *) - let env = SMap.fold mark_declared_classes named_exports env in - - (** - * Codegen any classes that are referenced but not exported. We're careful - * to not codegen classes that are referenced but *imported* as well. - *) - let all_exports = - match cjs_export with - | None -> named_exports - | Some cjs_t -> SMap.add "*CJS*" cjs_t named_exports - in - let rec fold_imported_classid _name t set = Type.( - match Codegen.resolve_type t env with - | ThisClassT (_, DefT (_, InstanceT (_, _, _, {class_id; _;}))) -> - ISet.add class_id set - | DefT (_, PolyT (_, t, _)) -> fold_imported_classid _name t set - | _ -> set - ) in - let imported_classids = - SMap.fold fold_imported_classid imported_ts ISet.empty - in - SMap.fold (gen_unexported_classes imported_classids) all_exports env - -let gen_named_exports = - let rec fold_named_export name t env = Codegen.(Type.( - let env = ( - match resolve_type t env with - | DefT (_, FunT (_static, _prototype, { - params; - rest_param; - return_t; - _; - })) -> - let env = - if name = "default" - then add_str "declare export default function" env - else add_str "declare export function " env |> add_str name - in - gen_tparams_list env - |> add_str "(" - |> gen_func_params params rest_param - |> add_str "): " - |> gen_type return_t - |> add_str ";" - - | DefT (_, PolyT (tparams, t, _)) -> - add_tparams tparams env |> fold_named_export name t - - | ThisClassT (_, DefT (_, InstanceT (static, super, implements, { - own_props; - proto_props; - has_unknown_react_mixins = _; - structural; - _; - }))) -> - let fields = Codegen.find_props own_props env in - let methods = Codegen.find_props proto_props env in - let env = add_str "declare export " env in - let env = add_str ( - if structural then "interface" - else if name = "default" then "default class" - else spf "class %s" name - ) env in - let env = gen_tparams_list env in - let env = - match Codegen.resolve_type super env with - | ObjProtoT _ -> env - | (ThisTypeAppT _) as t -> add_str " extends " env |> gen_type t - | _ -> failwith ( - spf "Unexpected super type for class: %s" (string_of_ctor super) - ) - in - let env = match implements with - | [] -> env - | ts -> env - |> add_str " implements " - |> gen_separated_list ts ", " gen_type - in - gen_class_body static fields methods env - - | DefT (_, TypeT (_, t)) -> - add_str "export type " env - |> add_str name - |> gen_tparams_list - |> add_str " = " - |> gen_type t - |> add_str ";" - - | t -> - let env = - if name = "default" - then add_str "declare export default " env - else add_str "declare export var " env |> add_str name |> add_str ": " - in - gen_type t env |> add_str ";" - ) in - add_str "\n" env - )) in - SMap.fold fold_named_export - -let gen_exports named_exports cjs_export env = - match cjs_export with - | None -> gen_named_exports named_exports env - | Some cjs_t -> - let type_exports = SMap.filter Type.(fun _name t -> - let t = match t with OpenT _ -> Codegen.resolve_type t env | _ -> t in - match t with - | DefT (_, TypeT _) | DefT (_, PolyT (_, DefT (_, TypeT _), _)) -> true - | _ -> false - ) named_exports in - gen_named_exports type_exports env - |> Codegen.add_str "\ndeclare module.exports: " - |> Codegen.gen_type cjs_t - |> Codegen.add_str ";" - -let flow_file cx = - let module_ref = Context.module_ref cx in - let (named_exports, cjs_export) = exports_map cx module_ref in - (* Drop the loc *) - let named_exports = SMap.map snd named_exports in - - Codegen.mk_env cx - |> Codegen.add_str "// @flow\n\n" - |> gen_local_classes named_exports cjs_export - |> gen_exports named_exports cjs_export - |> Codegen.to_string diff --git a/src/services/flowFileGen/flowFileGen.mli b/src/services/flowFileGen/flowFileGen.mli deleted file mode 100644 index cab7f8c09c0..00000000000 --- a/src/services/flowFileGen/flowFileGen.mli +++ /dev/null @@ -1,8 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -val flow_file: Context.t -> string diff --git a/src/services/get_def/dune b/src/services/get_def/dune new file mode 100644 index 00000000000..c0bc554fc0d --- /dev/null +++ b/src/services/get_def/dune @@ -0,0 +1,13 @@ +(library + (name flow_service_get_def) + (wrapped false) + (libraries + flow_parser_utils + flow_procs + flow_server_env + flow_service_inference + flow_service_inference_module + flow_state_readers + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/services/get_def/findRefsUtils.ml b/src/services/get_def/findRefsUtils.ml new file mode 100644 index 00000000000..f7870eeefaa --- /dev/null +++ b/src/services/get_def/findRefsUtils.ml @@ -0,0 +1,79 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Utils_js +module Result = Core_result + +let ( >>= ) = Result.( >>= ) + +type ast_info = (Loc.t, Loc.t) Flow_ast.program * File_sig.With_Loc.t * Docblock.t + +let compute_docblock file content = + Parsing_service_js.( + let max_tokens = docblock_max_tokens in + let (_errors, docblock) = parse_docblock ~max_tokens file content in + docblock) + +(* We use compute_ast_result (as opposed to get_ast_result) when the file contents we have might be + * different from what's on disk (and what is therefore stored in shared memory). This can be the + * case for local find-refs requests, where the client may pipe in file contents rather than just + * specifying a filename. For global find-refs, we assume that all dependent files are the same as + * what's on disk, so we can grab the AST from the heap instead. *) +let compute_ast_result options file content = + let docblock = compute_docblock file content in + Parsing_service_js.( + let types_mode = TypesAllowed in + let use_strict = true in + let parse_options = make_parse_options ~fail:false ~types_mode ~use_strict docblock options in + let result = do_parse ~parse_options ~info:docblock content file in + match result with + | Parse_ok parse_ok -> + let (ast, file_sig) = basic parse_ok in + Ok (ast, file_sig, docblock) + (* The parse should not fail; we have passed ~fail:false *) + | Parse_fail _ -> Error "Parse unexpectedly failed" + | Parse_skip _ -> Error "Parse unexpectedly skipped") + +let get_ast_result ~reader file : + ((Loc.t, Loc.t) Flow_ast.program * File_sig.With_Loc.t * Docblock.t, string) result = + Parsing_heaps.( + let get_result f kind = + let error = + Printf.sprintf "Expected %s to be available for %s" kind (File_key.to_string file) + in + Result.of_option ~error (f file) + in + let ast_result = get_result (Reader.get_ast ~reader) "AST" in + let file_sig_result = get_result (Reader.get_file_sig ~reader) "file sig" in + let docblock_result = get_result (Reader.get_docblock ~reader) "docblock" in + ast_result + >>= fun ast -> + file_sig_result + >>= (fun file_sig -> docblock_result >>= (fun docblock -> Ok (ast, file_sig, docblock)))) + +let get_all_dependents ~reader options workers env file_key content = + let docblock = compute_docblock file_key content in + let reader = Abstract_state_reader.State_reader reader in + let modulename = Module_js.exported_module ~options file_key docblock in + let%lwt direct_deps = + Dep_service.calc_direct_dependents + ~reader + workers + (* Surprisingly, creating this set doesn't seem to cause horrible performance but it's + probably worth looking at if you are searching for optimizations *) + ~candidates:ServerEnv.(CheckedSet.all !env.checked_files) + ~root_files:(FilenameSet.singleton file_key) + ~root_modules:(Modulename.Set.singleton modulename) + in + let dependency_info = !env.ServerEnv.dependency_info in + let all_dependency_graph = Dependency_info.all_dependency_graph dependency_info in + let dependency_graph = Dependency_info.dependency_graph dependency_info in + Lwt.return + (Pure_dep_graph_operations.calc_all_dependents + ~dependency_graph + ~all_dependency_graph + direct_deps) diff --git a/src/services/get_def/findRefsUtils.mli b/src/services/get_def/findRefsUtils.mli new file mode 100644 index 00000000000..9f8c0885d0b --- /dev/null +++ b/src/services/get_def/findRefsUtils.mli @@ -0,0 +1,25 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type ast_info = (Loc.t, Loc.t) Flow_ast.program * File_sig.With_Loc.t * Docblock.t + +val compute_docblock : File_key.t -> string (* content *) -> Docblock.t + +val compute_ast_result : + Options.t -> File_key.t -> string (* content *) -> (ast_info, string) result + +val get_ast_result : reader:State_reader.t -> File_key.t -> (ast_info, string) result + +val get_all_dependents : + reader:State_reader.t -> + Options.t -> + MultiWorkerLwt.worker list option -> + ServerEnv.env ref -> + File_key.t -> + string (* content *) -> + (* transitive dependents *) + Utils_js.FilenameSet.t Lwt.t diff --git a/src/services/get_def/getDefUtils.ml b/src/services/get_def/getDefUtils.ml new file mode 100644 index 00000000000..e114d09da8d --- /dev/null +++ b/src/services/get_def/getDefUtils.ml @@ -0,0 +1,512 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Utils_js +open Parsing_heaps_utils +module Result = Core_result + +let ( >>= ) = Result.( >>= ) + +let ( >>| ) = Result.( >>| ) + +(* The default visitor does not provide all of the context we need when visiting an object key. In + * particular, we need the location of the enclosing object literal. *) +class ['acc] object_key_visitor ~init = + object (this) + inherit ['acc, Loc.t] Flow_ast_visitor.visitor ~init as super + + method! expression (exp : (Loc.t, Loc.t) Flow_ast.Expression.t) = + Flow_ast.Expression.( + begin + match exp with + | (loc, Object x) -> this#visit_object_literal loc x + | _ -> () + end; + super#expression exp) + + method private visit_object_literal + (loc : Loc.t) (obj : (Loc.t, Loc.t) Flow_ast.Expression.Object.t) = + Flow_ast.Expression.Object.( + let get_prop_key = + Property.( + function + | Init { key; _ } + | Method { key; _ } + | Get { key; _ } + | Set { key; _ } -> + key) + in + let { properties; comments = _ } = obj in + properties + |> List.iter (function + | SpreadProperty _ -> () + | Property (_, prop) -> prop |> get_prop_key |> this#visit_object_key loc)) + + method private visit_object_key + (_literal_loc : Loc.t) (_key : (Loc.t, Loc.t) Flow_ast.Expression.Object.Property.key) = + () + end + +module ObjectKeyAtLoc : sig + (* Given a location, returns Some (enclosing_literal_loc, prop_loc, name) if the given location + * points to an object literal key. The first location returned is the location for the entire + * enclosing object literal. This is because later, we need to figure out which types are related + * to this object literal which is easier to do when we have the location of the actual object + * literal than if we only had the location of a single key. *) + val get : (Loc.t, Loc.t) Flow_ast.program -> Loc.t -> (Loc.t * Loc.t * string) option +end = struct + class object_key_finder target_loc = + object (this) + inherit [(Loc.t * Loc.t * string) option] object_key_visitor ~init:None + + method! private visit_object_key + (literal_loc : Loc.t) (key : (Loc.t, Loc.t) Flow_ast.Expression.Object.Property.key) = + Flow_ast.Expression.Object.( + match key with + | Property.Identifier (prop_loc, { Flow_ast.Identifier.name; comments = _ }) + when Loc.contains prop_loc target_loc -> + this#set_acc (Some (literal_loc, prop_loc, name)) + | _ -> ()) + end + + let get ast target_loc = + let finder = new object_key_finder target_loc in + finder#eval finder#program ast +end + +(* If the given type refers to an object literal, return the location of the object literal. + * Otherwise return None *) +let get_object_literal_loc ~reader ty : Loc.t option = + Type.( + Reason.( + let reason_desc = + reason_of_t ty (* TODO look into unwrap *) |> desc_of_reason ~unwrap:false + in + match reason_desc with + | RObjectLit -> Some (Type.def_loc_of_t ty |> loc_of_aloc ~reader) + | _ -> None)) + +type def_kind = + (* Use of a property, e.g. `foo.bar`. Includes type of receiver (`foo`) and name of the property + * `bar` *) + | Use of Type.t * string + (* In a class, where a property/method is defined. Includes the type of the class and the name + of the property. *) + | Class_def of Type.t * string (* name *) * bool (* static *) + (* In an object type. Includes the location of the property definition and its name. *) + | Obj_def of Loc.t * string (* name *) + (* List of types that the object literal flows into directly, as well as the name of the + * property. *) + | Use_in_literal of Type.t Nel.t * string + +(* name *) + +let set_def_loc_hook ~reader prop_access_info literal_key_info target_loc = + let set_prop_access_info new_info = + let set_ok info = prop_access_info := Ok (Some info) in + let set_err err = prop_access_info := Error err in + match !prop_access_info with + | Error _ -> () + | Ok None -> prop_access_info := Ok (Some new_info) + | Ok (Some info) -> + begin + match (info, new_info) with + | (Use _, Use _) + | (Class_def _, Class_def _) + | (Obj_def _, Obj_def _) -> + (* Due to generate_tests, we sometimes see hooks firing multiple times for the same + * location. This is innocuous and we should take the last result. *) + set_ok new_info + (* Literals can flow into multiple types. Include them all. *) + | (Use_in_literal (types, name), Use_in_literal (new_types, new_name)) -> + if name = new_name then + set_ok (Use_in_literal (Nel.rev_append new_types types, name)) + else + set_err "Names did not match" + (* We should not see mismatches. *) + | (Use _, _) + | (Class_def _, _) + | (Obj_def _, _) + | (Use_in_literal _, _) -> + set_err "Unexpected mismatch between definition kind" + end + in + let use_hook ret _ctxt name loc ty = + let loc = loc_of_aloc ~reader loc in + if Loc.contains loc target_loc then set_prop_access_info (Use (ty, name)); + ret + in + let class_def_hook _ctxt ty static name loc = + let loc = loc_of_aloc ~reader loc in + if Loc.contains loc target_loc then set_prop_access_info (Class_def (ty, name, static)) + in + let obj_def_hook _ctxt name loc = + let loc = loc_of_aloc ~reader loc in + if Loc.contains loc target_loc then set_prop_access_info (Obj_def (loc, name)) + in + let export_named_hook name loc = + let loc = loc_of_aloc ~reader loc in + if Loc.contains loc target_loc then set_prop_access_info (Obj_def (loc, name)) + in + let obj_to_obj_hook _ctxt obj1 obj2 = + match (get_object_literal_loc ~reader obj1, literal_key_info) with + | (Some loc, Some (target_loc, _, name)) when loc = target_loc -> + Type.( + begin + match obj2 with + | DefT (_, _, ObjT _) -> set_prop_access_info (Use_in_literal (Nel.one obj2, name)) + | _ -> () + end) + | _ -> () + in + Type_inference_hooks_js.set_member_hook (use_hook false); + Type_inference_hooks_js.set_call_hook (use_hook ()); + Type_inference_hooks_js.set_class_member_decl_hook class_def_hook; + Type_inference_hooks_js.set_obj_type_prop_decl_hook obj_def_hook; + Type_inference_hooks_js.set_export_named_hook export_named_hook; + Type_inference_hooks_js.set_obj_to_obj_hook obj_to_obj_hook + +let unset_hooks () = Type_inference_hooks_js.reset_hooks () + +type single_def_info = + | Class of Loc.t + (* An object was found. *) + | Object of Loc.t + +(* If there are multiple relevant definition locations (e.g. the request was issued on an object + * literal which is associated with multiple types) then there will be multiple locations in no + * particular order. *) +type property_def_info = single_def_info Nel.t + +type def_info = + | Property of property_def_info * string (* name *) + | CJSExport of Loc.t + +let display_name_of_def_info = function + | Property (_, name) -> name + | CJSExport _ -> "module.exports" + +let loc_of_single_def_info = function + | Class loc -> loc + | Object loc -> loc + +let all_locs_of_property_def_info def_info = def_info |> Nel.map loc_of_single_def_info + +let all_locs_of_def_info = function + | Property (def_info, _) -> all_locs_of_property_def_info def_info + | CJSExport loc -> Nel.one loc + +type def_loc = + (* We found a class property. Include all overridden implementations. Superclass implementations + * are listed last. *) + | FoundClass of Loc.t Nel.t + (* We found an object property. *) + | FoundObject of Loc.t + | FoundUnion of def_loc Nel.t + (* This means we resolved the receiver type but did not find the definition. If this happens + * there must be a type error (which may be suppresssed) *) + | NoDefFound + (* This means it's a known type that we deliberately do not currently support. *) + | UnsupportedType + (* This means it's not well-typed, and could be anything *) + | AnyType + +let debug_string_of_locs locs = + locs |> Nel.to_list |> Core_list.map ~f:Loc.debug_to_string |> String.concat ", " + +(* Disable the unused value warning -- we want to keep this around for debugging *) +[@@@warning "-32"] + +let debug_string_of_single_def_info = function + | Class loc -> spf "Class (%s)" (Loc.debug_to_string loc) + | Object loc -> spf "Object (%s)" (Loc.debug_to_string loc) + +let debug_string_of_property_def_info def_info = + def_info + |> Nel.map debug_string_of_single_def_info + |> Nel.to_list + |> String.concat ", " + |> spf "[%s]" + +let debug_string_of_def_info = function + | Property (def_info, name) -> + spf "Property (%s, %s)" (debug_string_of_property_def_info def_info) name + | CJSExport loc -> spf "CJSExport (%s)" (Loc.debug_to_string loc) + +let rec debug_string_of_def_loc = function + | FoundClass locs -> spf "FoundClass (%s)" (debug_string_of_locs locs) + | FoundObject loc -> spf "FoundObject (%s)" (Loc.debug_to_string loc) + | FoundUnion def_locs -> + Nel.to_list def_locs + |> Core_list.map ~f:debug_string_of_def_loc + |> String.concat ", " + |> spf "FoundUnion (%s)" + | NoDefFound -> "NoDefFound" + | UnsupportedType -> "UnsupportedType" + | AnyType -> "AnyType" + +(* Re-enable the unused value warning *) +[@@@warning "+32"] + +let extract_instancet cx ty : (Type.t, string) result = + Type.( + let resolved = Members.resolve_type cx ty in + match resolved with + | ThisClassT (_, t) + | DefT (_, _, PolyT (_, _, ThisClassT (_, t), _)) -> + Ok t + | _ -> + let type_string = string_of_ctor resolved in + Error ("Expected a class type to extract an instance type from, got " ^ type_string)) + +(* Must be called with the result from Members.extract_type *) +let get_def_loc_from_extracted_type cx extracted_type name = + extracted_type + |> Members.extract_members cx + |> Members.to_command_result + >>| fun map -> + match SMap.get name map with + | None -> None + (* Currently some types (e.g. spreads) do not contain locations for their properties. For now + * we'll just treat them as if the properties do not exist, but once this is fixed this case + * should be promoted to an error *) + | Some (None, _) -> None + | Some (Some loc, _) -> Some loc + +let rec extract_def_loc ~reader cx ty name : (def_loc, string) result = + let resolved = Members.resolve_type cx ty in + extract_def_loc_resolved ~reader cx resolved name + +(* The same as get_def_loc_from_extracted_type except it recursively checks for overridden + * definitions of the member in superclasses and returns those as well *) +and extract_def_loc_from_instancet ~reader cx extracted_type super name : (def_loc, string) result + = + let current_class_def_loc = get_def_loc_from_extracted_type cx extracted_type name in + current_class_def_loc + >>= function + | None -> Ok NoDefFound + | Some loc -> + let loc = loc_of_aloc ~reader loc in + extract_def_loc ~reader cx super name + >>= begin + function + | FoundClass lst -> + (* Avoid duplicate entries. This can happen if a class does not override a method, + * so the definition points to the method definition in the parent class. Then we + * look at the parent class and find the same definition. *) + let lst = + if Nel.hd lst = loc then + lst + else + Nel.cons loc lst + in + Ok (FoundClass lst) + | FoundObject _ -> Error "A superclass should be a class, not an object" + | FoundUnion _ -> Error "A superclass should be a class, not a union" + (* If the superclass does not have a definition for this method, or it is for some reason + * not a class type, or we don't know its type, just return the location we already know + * about. *) + | NoDefFound + | UnsupportedType + | AnyType -> + Ok (FoundClass (Nel.one loc)) + end + +and extract_def_loc_resolved ~reader cx ty name : (def_loc, string) result = + Members.( + Type.( + match extract_type cx ty with + | Success (DefT (_, _, InstanceT (_, super, _, _))) as extracted_type -> + extract_def_loc_from_instancet ~reader cx extracted_type super name + | (Success (DefT (_, _, ObjT _)) | SuccessModule _) as extracted_type -> + get_def_loc_from_extracted_type cx extracted_type name + >>| begin + function + | None -> NoDefFound + | Some loc -> FoundObject (loc_of_aloc ~reader loc) + end + | Success (UnionT (_, rep)) -> + let union_members = + UnionRep.members rep + |> Core_list.map ~f:(fun member -> extract_def_loc ~reader cx member name) + |> Result.all + in + union_members + >>= begin + fun members -> + Nel.of_list members + |> Result.of_option ~error:"Union should have at least one member" + end + >>| (fun members_nel -> FoundUnion members_nel) + | Success _ + | FailureNullishType + | FailureUnhandledType _ + | FailureUnhandledMembers _ -> + Ok UnsupportedType + | FailureAnyType -> Ok AnyType)) + +(* Takes the file key where the module reference appeared, as well as the module reference, and + * returns the file name for the module that the module reference refers to. *) +let file_key_of_module_ref ~reader file_key module_ref = + let resolved = + Module_js.find_resolved_module + ~reader:(Abstract_state_reader.State_reader reader) + ~audit:Expensive.warn + file_key + module_ref + in + Module_heaps.Reader.get_file ~reader ~audit:Expensive.warn resolved + +let def_info_of_typecheck_results ~reader cx props_access_info = + let def_info_of_class_member_locs locs = + (* We want to include the immediate implementation as well as all superclass implementations. + * If we wanted a mode where superclass implementations were not included, for example, we + * could choose to take only the first extracted location. *) + Nel.map (fun loc -> Class loc) locs + in + let def_info_of_type name ty = + let rec def_info_of_def_loc = function + | FoundClass locs -> Some (def_info_of_class_member_locs locs) + | FoundObject loc -> Some (Nel.one (Object loc)) + | FoundUnion def_locs -> + def_locs |> Nel.map def_info_of_def_loc |> Nel.cat_maybes |> Option.map ~f:Nel.concat + | NoDefFound + | UnsupportedType + | AnyType -> + None + in + extract_def_loc ~reader cx ty name >>| def_info_of_def_loc + in + match props_access_info with + | None -> Ok None + | Some (Obj_def (loc, name)) -> Ok (Some (Nel.one (Object loc), name)) + | Some (Class_def (ty, name, static)) -> + if static then + (* Here, `ty` ends up resolving to `ObjT` so we lose the knowledge that this is a static + * property. This means that we don't get the fancy look-up-the-inheritance-chain behavior + * that we get with class instances. That would be nice to add at some point. *) + def_info_of_type name ty >>| Option.map ~f:(fun def_info -> (def_info, name)) + else + (* We get the type of the class back here, so we need to extract the type of an instance *) + extract_instancet cx ty + >>= fun ty -> + extract_def_loc_resolved ~reader cx ty name + >>= (function + | FoundClass locs -> Ok (Some (def_info_of_class_member_locs locs, name)) + | FoundUnion _ + | FoundObject _ -> + Error "Expected to extract class def info from a class" + | _ -> Error "Unexpectedly failed to extract definition from known type") + | Some (Use (ty, name)) -> + def_info_of_type name ty >>| Option.map ~f:(fun def_info -> (def_info, name)) + | Some (Use_in_literal (types, name)) -> + let def_infos_result = Nel.map (def_info_of_type name) types |> Nel.result_all in + def_infos_result + >>| fun def_infos -> + Nel.cat_maybes def_infos + |> Option.map ~f:Nel.concat + |> Option.map ~f:(fun def_info -> (def_info, name)) + +let add_literal_properties literal_key_info def_info = + (* If we happen to be on an object property, include the location of that + * property as a def loc. We don't want to do that above because: + * (a) We could also encounter a `Use_in_literal` if this object literal flows + * into another object type. This would force us to make props_access_info a + * list and add additional complexity just for the sake of this one case. + * (b) We would have to add a type inference hook, which we are trying to + * avoid. *) + let def_info = + match (def_info, literal_key_info) with + | (None, None) -> Ok None + | (Some _, None) -> Ok def_info + | (None, Some (_, loc, name)) -> Ok (Some (Nel.one (Object loc), name)) + | (Some (defs, name1), Some (_, loc, name2)) -> + if name1 <> name2 then + Error "Unexpected name mismatch" + else + Ok (Some (Nel.cons (Object loc) defs, name1)) + in + Result.map + def_info + ~f:(Option.map ~f:(fun (prop_def_info, name) -> Property (prop_def_info, name))) + +let get_def_info ~reader ~options env profiling file_key ast_info loc : + (def_info option, string) result Lwt.t = + let props_access_info = ref (Ok None) in + let (ast, file_sig, info) = ast_info in + (* Check if it's an exported symbol *) + let loc = Option.value (ImportExportSymbols.find_related_symbol file_sig loc) ~default:loc in + let info = Docblock.set_flow_mode_for_ide_command info in + let literal_key_info : (Loc.t * Loc.t * string) option = ObjectKeyAtLoc.get ast loc in + let%lwt cx = + set_def_loc_hook ~reader props_access_info literal_key_info loc; + let%lwt (cx, _) = + Profiling_js.with_timer_lwt profiling ~timer:"MergeContents" ~f:(fun () -> + let%lwt () = + Types_js.ensure_checked_dependencies ~options ~reader ~env file_key file_sig + in + Lwt.return + @@ Merge_service.merge_contents_context ~reader options file_key ast info file_sig) + in + Lwt.return cx + in + unset_hooks (); + !props_access_info + %>>= fun props_access_info -> + let def_info = def_info_of_typecheck_results ~reader cx props_access_info in + let def_info = def_info >>= add_literal_properties literal_key_info in + let def_info = + def_info + >>= function + | Some _ as def_info -> Ok def_info + | None -> + (* Check if we are on a CJS import/export. These cases are not covered above since the type + * system hooks don't quite get us what we want. *) + let export_loc = + File_sig.With_Loc.( + List.fold_left + begin + fun acc -> function + | Require { source = (_, module_ref); require_loc; _ } -> + if Loc.contains require_loc loc then + match acc with + | Error _ -> acc + | Ok (Some _) -> Error "Did not expect multiple requires to match one location" + | Ok None -> + let external_file_sig = + let filename = file_key_of_module_ref ~reader file_key module_ref in + Option.bind filename (Parsing_heaps.Reader.get_file_sig ~reader) + in + Result.return + @@ Option.bind external_file_sig (fun external_file_sig -> + match external_file_sig.module_sig.module_kind with + | CommonJS { mod_exp_loc = Some loc; _ } -> Some loc + | _ -> None) + else + acc + | _ -> acc + end + (Ok None) + file_sig.module_sig.requires) + in + let export_loc = + export_loc + >>| function + | Some _ as x -> x + | None -> + File_sig.With_Loc.( + (match file_sig.module_sig.module_kind with + | CommonJS { mod_exp_loc = Some mod_exp_loc; _ } -> + if Loc.contains mod_exp_loc loc then + Some mod_exp_loc + else + None + | _ -> None)) + in + Result.map export_loc ~f:(Option.map ~f:(fun x -> CJSExport x)) + in + Lwt.return @@ def_info diff --git a/src/services/get_def/importExportSymbols.ml b/src/services/get_def/importExportSymbols.ml new file mode 100644 index 00000000000..0ee5060055c --- /dev/null +++ b/src/services/get_def/importExportSymbols.ml @@ -0,0 +1,85 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open File_sig.With_Loc + +let if_one_return_other x a b = + if x = a then + Some b + else if x = b then + Some a + else + None + +let find_related_symbol_from_export loc = function + | (_, ExportDefault { default_loc; local = Some (local, _); _ }) -> + if_one_return_other loc default_loc local + | (_, ExportNamed { loc = remote_name_loc; kind }) -> + begin + match kind with + | NamedSpecifier { local = (local_loc, _); _ } -> + if_one_return_other loc remote_name_loc local_loc + | NamedDeclaration -> + if loc = remote_name_loc then + Some remote_name_loc + else + None + end + | _ -> None + +let find_related_symbol_from_module_kind loc = function + | CommonJS _ -> None + | ES { named; _ } -> + let exports = Core_list.map ~f:snd named in + ListUtils.first_some_map (find_related_symbol_from_export loc) exports + +let rec find_related_symbol_from_bindings loc remote_loc bindings = + match bindings with + | BindIdent (local_loc, _) -> if_one_return_other loc remote_loc local_loc + | BindNamed named -> + let loc_records (* list of related loc *) = + List.fold_left + (fun acc ((remote_loc, _), bindings) -> + find_related_symbol_from_bindings loc remote_loc bindings :: acc) + [] + named + in + loc_records |> ListUtils.first_some_map (fun x -> x) + +let find_related_symbol_from_require loc = function + | Import { named; _ } -> + let loc_records (* list of {remote_loc, local_loc} *) = + SMap.fold + begin + fun _ local_name_to_locs acc -> + SMap.fold + begin + fun _ locs acc -> List.rev_append (Nel.to_list locs) acc + end + local_name_to_locs + acc + end + named + [] + in + loc_records + |> ListUtils.first_some_map (fun { remote_loc; local_loc } -> + if_one_return_other loc remote_loc local_loc) + | Require { bindings = Some bindings; require_loc; _ } -> + find_related_symbol_from_bindings loc require_loc bindings + | _ -> None + +let find_related_symbol_from_requires loc requires = + ListUtils.first_some_map (find_related_symbol_from_require loc) requires + +let find_related_symbol file_sig loc = + match find_related_symbol_from_module_kind loc file_sig.module_sig.module_kind with + | Some _ as result -> result + | None -> find_related_symbol_from_requires loc file_sig.module_sig.requires + +let find_related_symbols file_sig starting_locs = + Core_list.map ~f:(find_related_symbol file_sig) starting_locs |> ListUtils.cat_maybes diff --git a/src/server/find_refs/importExportSymbols.mli b/src/services/get_def/importExportSymbols.mli similarity index 80% rename from src/server/find_refs/importExportSymbols.mli rename to src/services/get_def/importExportSymbols.mli index 1ce36f015d6..80855f2614e 100644 --- a/src/server/find_refs/importExportSymbols.mli +++ b/src/services/get_def/importExportSymbols.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -17,7 +17,7 @@ * Given the code: `import {foo} from 'bar'` * - If given the location for `foo`, will return that same location. *) -val find_related_symbols: File_sig.t -> Loc.t list -> Loc.t list +val find_related_symbols : File_sig.With_Loc.t -> Loc.t list -> Loc.t list (* As above but operates only on a single location *) -val find_related_symbol: File_sig.t -> Loc.t -> Loc.t option +val find_related_symbol : File_sig.With_Loc.t -> Loc.t -> Loc.t option diff --git a/src/services/inference/__tests__/inference_tests.ml b/src/services/inference/__tests__/inference_tests.ml new file mode 100644 index 00000000000..e8a366c7f89 --- /dev/null +++ b/src/services/inference/__tests__/inference_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "inference" >::: [Types_js_test.tests] + +let () = run_test_tt_main tests diff --git a/src/services/inference/__tests__/types_js_test.ml b/src/services/inference/__tests__/types_js_test.ml new file mode 100644 index 00000000000..40e2148811d --- /dev/null +++ b/src/services/inference/__tests__/types_js_test.ml @@ -0,0 +1,185 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 +open Utils_js + +(* Like `>::` except it expects the function to return `unit Lwt.t` rather than `unit` *) +let ( %>:: ) name f = name >:: (fun ctxt -> LwtInit.run_lwt (fun () -> f ctxt)) + +let assert_checked_sets_equal ~ctxt expected actual = + assert_equal + ~ctxt + ~cmp:CheckedSet.debug_equal + ~printer:CheckedSet.debug_to_string + expected + actual + +let dummy_flowconfig_params = + { + CommandUtils.ignores = []; + untyped = []; + declarations = []; + includes = []; + libs = []; + raw_lint_severities = []; + } + +let dummy_options_flags = + { + CommandUtils.Options_flags.all = false; + debug = false; + flowconfig_flags = dummy_flowconfig_params; + include_warnings = false; + max_warnings = None; + max_workers = None; + merge_timeout = None; + munge_underscore_members = false; + no_flowlib = false; + no_saved_state = true; + profile = false; + quiet = false; + saved_state_fetcher = None; + saved_state_force_recheck = false; + saved_state_no_fallback = false; + strip_root = false; + temp_dir = None; + traces = None; + trust_mode = None; + types_first = true; + abstract_locations = true; + verbose = None; + wait_for_recheck = None; + weak = false; + include_suppressions = false; + } + +let test_with_profiling test_fun ctxt = + let%lwt (_finished, result) = + Profiling_js.with_profiling_lwt ~label:"Test" ~should_print_summary:false (test_fun ctxt) + in + Lwt.return result + +let make_fake_file_key filename = File_key.SourceFile ("/tmp/fake/path/" ^ filename ^ ".js") + +let make_filename_set filenames = filenames |> List.map make_fake_file_key |> FilenameSet.of_list + +let make_checked_set ~focused ~dependents ~dependencies = + let focused = make_filename_set focused in + let dependents = make_filename_set dependents in + let dependencies = make_filename_set dependencies in + CheckedSet.add ~focused ~dependents ~dependencies CheckedSet.empty + +let make_dependency_graph lst = + List.fold_left + (fun map (file, dependencies) -> + let file = make_fake_file_key file in + if FilenameMap.mem file map then failwith "Duplicate key when constructing map"; + let dependency_set = make_filename_set dependencies in + FilenameMap.add file dependency_set map) + FilenameMap.empty + lst + +let determine_what_to_recheck ~profiling ~dependency_graph ~all_dependency_graph ~freshparsed = + (* Get all the files from the all_dependency_graph and consider them focused *) + let checked_files = + let focused_set = + List.fold_left + (fun set (file, dependencies) -> + let file = make_fake_file_key file in + let dependencies = make_filename_set dependencies in + set |> FilenameSet.add file |> FilenameSet.union dependencies) + FilenameSet.empty + all_dependency_graph + in + CheckedSet.add ~focused:focused_set CheckedSet.empty + in + let dependency_graph = make_dependency_graph dependency_graph in + let all_dependency_graph = make_dependency_graph all_dependency_graph in + let freshparsed = freshparsed |> List.map make_fake_file_key |> FilenameSet.of_list in + let flowconfig = FlowConfig.empty_config in + let root = Path.dummy_path in + let options = + CommandUtils.make_options + ~flowconfig_name:".flowconfig" + ~flowconfig + ~lazy_mode:None + ~root + dummy_options_flags + in + let is_file_checked _ = true in + let unchanged_checked = + CheckedSet.add + ~focused:(FilenameSet.diff (CheckedSet.all checked_files) freshparsed) + CheckedSet.empty + in + let direct_dependent_files = + FilenameMap.fold + (fun file deps acc -> + if FilenameSet.exists (fun x -> FilenameSet.mem x freshparsed) deps then + FilenameSet.add file acc + else + acc) + all_dependency_graph + FilenameSet.empty + in + Types_js.debug_determine_what_to_recheck + ~profiling + ~options + ~is_file_checked + ~ide_open_files:(lazy SSet.empty) + ~dependency_graph + ~all_dependency_graph + ~checked_files + ~freshparsed + ~unparsed_set:FilenameSet.empty + ~deleted:FilenameSet.empty + ~unchanged_checked + ~files_to_force:CheckedSet.empty + ~unchanged_files_to_force:CheckedSet.empty + ~direct_dependent_files + +(* There is memory sampling embedded throughout the code under test. It polls the shared memory + * system to get information about its usage. If the shared memory system is not initialized, we get + * crashes, so we have to initialize it before running tests. *) +let sharedmem_config = + { + SharedMem_js.global_size = 0; + heap_size = 1024 * 1024; + dep_table_pow = 17; + hash_table_pow = 19; + shm_dirs = ["/dev/shm"]; + shm_min_avail = 1024 * 256; + log_level = 0; + sample_rate = 0.0; + } + +let _ = SharedMem_js.hh_shared_init ~config:sharedmem_config ~shm_dir:None ~num_workers:1 + +let tests = + "determine_what_to_recheck" + >::: [ + "simple_test" + %>:: test_with_profiling (fun ctxt profiling -> + let dependency_graph = [("a", ["b"]); ("b", ["c"; "d"]); ("c", []); ("d", [])] in + let all_dependency_graph = + [("a", ["b"]); ("b", ["c"; "d"]); ("c", []); ("d", [])] + in + let freshparsed = ["b"] in + let%lwt (to_merge, _components, _recheck_set, _all_dependent_files) = + determine_what_to_recheck + ~profiling + ~dependency_graph + ~all_dependency_graph + ~freshparsed + in + let expected = + make_checked_set ~focused:["b"] ~dependents:["a"] ~dependencies:[] + in + assert_checked_sets_equal ~ctxt expected to_merge; + Lwt.return_unit); + ] diff --git a/src/services/inference/dep_service.ml b/src/services/inference/dep_service.ml index 7c3d1c1d36e..0102e9b18de 100644 --- a/src/services/inference/dep_service.ml +++ b/src/services/inference/dep_service.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -66,226 +66,201 @@ open Utils_js **) (* produce, given files in fileset: - (1) a map from those files to modules - (2) a dependent (reverse dependency) map for those files: + (1) a dependent (reverse dependency) map for those files: the key is the module provided by a file; the value is the subset of files which require that module directly - (3) a subset of those files that phantom depend on root_fileset + (2) a subset of those files that phantom depend on root_fileset + + IMPORTANT!!! The only state this function can read is the resolved requires! If you need this + function to read any other state, make sure to update the DirectDependentFilesCache! *) -let dependent_calc_utils workers fileset root_fileset = Module_heaps.( - let root_fileset = FilenameSet.fold (fun f root_fileset -> - match f with - | File_key.SourceFile s - | File_key.JsonFile s - | File_key.ResourceFile s -> SSet.add s root_fileset - | File_key.LibFile _ - | File_key.Builtins -> root_fileset - ) root_fileset SSet.empty in - (* Distribute work, looking up InfoHeap and ResolvedRequiresHeap once per file. *) - let job = List.fold_left (fun utils f -> - let resolved_requires = get_resolved_requires_unsafe ~audit:Expensive.ok f in - let required = Modulename.Set.of_list - (SMap.values resolved_requires.resolved_modules) +let calc_direct_dependents_utils ~reader workers fileset root_fileset = + Module_heaps.( + let root_fileset = + FilenameSet.fold + (fun f root_fileset -> + match f with + | File_key.SourceFile s + | File_key.JsonFile s + | File_key.ResourceFile s -> + SSet.add s root_fileset + | File_key.LibFile _ + | File_key.Builtins -> + root_fileset) + root_fileset + SSet.empty in - let info = get_info_unsafe ~audit:Expensive.ok f in - (* Add f |-> info._module to the `modules` map. This will be used downstream - in calc_all_dependents. - - TODO: Why do we this here rather than there? This used to be an - optimization, since InfoHeap and ResolvedRequiresHeap were - together. Will clean up later. - - TODO: explore whether we can avoid creating this map on every recheck, - instead maintaining the map incrementally and hopefully reusing large - parts of it. - *) - let entry = - info.module_name, - (* For every required module m, add f to the reverse dependency list for m, + (* Distribute work, looking up InfoHeap and ResolvedRequiresHeap once per file. *) + let job = + List.fold_left (fun utils f -> + let resolved_requires = + Reader_dispatcher.get_resolved_requires_unsafe ~reader ~audit:Expensive.ok f + in + let required = Modulename.Set.of_list (SMap.values resolved_requires.resolved_modules) in + let entry = + (* For every required module m, add f to the reverse dependency list for m, stored in `module_dependents_tbl`. This will be used downstream when computing direct_dependents, and also in calc_all_dependents. TODO: should generate this map once on startup, keep required_by in module records and update incrementally on recheck. *) - required, - (* If f's phantom dependents are in root_fileset, then add f to + ( required, + (* If f's phantom dependents are in root_fileset, then add f to `resolution_path_files`. These are considered direct dependencies (in addition to others computed by direct_dependents downstream). *) - resolved_requires.phantom_dependents |> SSet.exists (fun f -> - SSet.mem f root_fileset - ) - + resolved_requires.phantom_dependents + |> SSet.exists (fun f -> SSet.mem f root_fileset) ) + in + (f, entry) :: utils) in - (f, entry) :: utils - ) in - (* merge results *) - let merge = List.rev_append in - - let%lwt result = - MultiWorkerLwt.call workers ~job ~merge - ~neutral: [] - ~next: (MultiWorkerLwt.next workers (FilenameSet.elements fileset)) - in - let module_dependents_tbl = Hashtbl.create 0 in - let modules, resolution_path_files = - List.fold_left - (fun (modules, resolution_path_files) (f, (m, rs, b)) -> - Modulename.Set.iter (fun r -> - Hashtbl.add module_dependents_tbl r f - ) rs; - FilenameMap.add f m modules, - if b then FilenameSet.add f resolution_path_files else resolution_path_files - ) - (FilenameMap.empty, FilenameSet.empty) - result in - - Lwt.return (modules, module_dependents_tbl, resolution_path_files) -) - -(* given a reverse dependency map (from modules to the files which - require them), generate the closure of the dependencies of a - given fileset, using get_info_unsafe to map files to modules - *) -let calc_all_dependents modules module_dependents_tbl fileset = - let module_dependents m = Hashtbl.find_all module_dependents_tbl m in - let file_dependents f = - let m = FilenameMap.find_unsafe f modules in - let f_module = Module_js.eponymous_module f in - (* In general, a file exports its module via two names. See Modulename for - details. It suffices to note here that dependents of the file can use - either of those names to import the module. *) - List.rev_append (module_dependents m) (module_dependents f_module) |> FilenameSet.of_list - in - let rec expand fileset seen = - FilenameSet.fold (fun f acc -> - if FilenameSet.mem f !seen then acc else ( - seen := FilenameSet.add f !seen; - let dependents = file_dependents f in - FilenameSet.add f (FilenameSet.union acc (expand dependents seen)) - ) - ) fileset FilenameSet.empty - in expand fileset (ref FilenameSet.empty) + (* merge results *) + let merge = List.rev_append in + let%lwt result = + MultiWorkerLwt.call + workers + ~job + ~merge + ~neutral:[] + ~next:(MultiWorkerLwt.next workers (FilenameSet.elements fileset)) + in + let module_dependents_tbl = Hashtbl.create 0 in + let resolution_path_files = + List.fold_left + (fun resolution_path_files (f, (rs, b)) -> + Modulename.Set.iter (fun r -> Hashtbl.add module_dependents_tbl r f) rs; + if b then + FilenameSet.add f resolution_path_files + else + resolution_path_files) + FilenameSet.empty + result + in + Lwt.return (module_dependents_tbl, resolution_path_files)) -(* Identify the direct and transitive dependents of new, changed, and deleted - files. +(* Identify the direct dependents of new, changed, and deleted files. Files that must be rechecked include those that immediately or recursively depended on modules whose providers were affected by new, changed, or deleted files. The latter modules, marked "changed," are calculated earlier when picking providers. - - unchanged is all unchanged files in the current state - - new_or_changed is all files that have just been through local inference and - all skipped files that were also new or unchanged - - changed_modules is a conservative approximation of modules that no longer have - the same providers, or whose providers are changed files - - Return the subset of unchanged transitively dependent on updates, and - the subset directly dependent on them. -*) -let dependent_files workers ~unchanged ~new_or_changed ~changed_modules = - (* Get the modules provided by unchanged files, the reverse dependency map - for unchanged files, and the subset of unchanged files whose resolution - paths may encounter new or changed modules. *) - let%lwt modules, module_dependents_tbl, resolution_path_files = - dependent_calc_utils workers unchanged new_or_changed - in + - candidates is the set of files which could be dependents. The returned sets will be subsets of + the candidates set. For example, if we're calculating the dependents of all the changed files + then this would be the set of unchanged files + - root_files is the set of files for which we'd like to calculate dependents. This should be + disjoint from candidates. If we wanted to calculate the dependents of all the changed files then + this would be the set of changed files + - root_modules is the set of modules for which we'd like to calculate dependents. If we wanted to + calculate the dependents of all the changed files then this would be the set of module names + which have new providers. - (* resolution_path_files, plus files that require changed_modules *) - let direct_dependents = Modulename.Set.fold (fun m acc -> - let files = Hashtbl.find_all module_dependents_tbl m in - List.fold_left (fun acc f -> FilenameSet.add f acc) acc files - ) changed_modules resolution_path_files in + Return the subset of candidates directly dependent on root_modules / root_files. - (* (transitive dependents are re-merged, directs are also re-resolved) *) - Lwt.return ( - calc_all_dependents modules module_dependents_tbl direct_dependents, - direct_dependents - ) + IMPORTANT!!! The only state this function can read is the resolved requires! If you need this + function to read any other state, make sure to update the DirectDependentFilesCache! +*) +let calc_direct_dependents ~reader workers ~candidates ~root_files ~root_modules = + if FilenameSet.is_empty root_files && Modulename.Set.is_empty root_modules then + (* dependent_calc_utils is O(candidates), but if root_files and root_modules are empty then we + * can immediately return. We know that the empty set has no direct or transitive dependencies. + * This can save us a lot of time on very large repositories *) + Lwt.return FilenameSet.empty + else + (* Get the modules provided by candidate files, the reverse dependency map + for candidate files, and the subset of candidate files whose resolution + paths may encounter new or changed modules. *) + let%lwt (module_dependents_tbl, resolution_path_files) = + calc_direct_dependents_utils ~reader workers candidates root_files + in + (* resolution_path_files, plus files that require root_modules *) + let direct_dependents = + Modulename.Set.fold + (fun m acc -> + let files = Hashtbl.find_all module_dependents_tbl m in + List.fold_left (fun acc f -> FilenameSet.add f acc) acc files) + root_modules + resolution_path_files + in + Lwt.return direct_dependents (* Calculate module dependencies. Since this involves a lot of reading from shared memory, it is useful to parallelize this process (leading to big savings in init and recheck times). *) - -let checked_module ~audit m = - m |> Module_heaps.get_file_unsafe ~audit |> Module_js.checked_file ~audit +let checked_module ~reader ~audit m = + m + |> Module_heaps.Mutator_reader.get_file_unsafe ~reader ~audit + |> Module_js.checked_file ~reader:(Abstract_state_reader.Mutator_state_reader reader) ~audit (* A file is considered to implement a required module r only if the file is registered to provide r and the file is checked. Such a file must be merged before any file that requires module r, so this notion naturally gives rise to a dependency ordering among files for merging. *) -let implementation_file ~audit r = - if Module_heaps.module_exists r && checked_module ~audit r - then Some (Module_heaps.get_file_unsafe ~audit r) - else None - -let file_dependencies ~audit file = - let file_sig = Parsing_heaps.get_file_sig_unsafe file in - let require_loc = File_sig.(require_loc_map file_sig.module_sig) in +let implementation_file ~reader ~audit r = + if Module_heaps.Mutator_reader.module_exists ~reader r && checked_module ~reader ~audit r then + Some (Module_heaps.Mutator_reader.get_file_unsafe ~reader ~audit r) + else + None + +let file_dependencies ~audit ~reader file = + let file_sig = Parsing_heaps.Mutator_reader.get_file_sig_unsafe reader file in + let sig_file_sig_opt = Parsing_heaps.Mutator_reader.get_sig_file_sig reader file in + let require_set = File_sig.With_Loc.(require_set file_sig.module_sig) in + let sig_require_set = + match sig_file_sig_opt with + | None -> require_set + | Some sig_file_sig -> File_sig.With_ALoc.(require_set sig_file_sig.module_sig) + in let { Module_heaps.resolved_modules; _ } = - Module_heaps.get_resolved_requires_unsafe ~audit file + Module_heaps.Mutator_reader.get_resolved_requires_unsafe ~reader ~audit file in - SMap.fold (fun mref _ files -> - let m = SMap.find_unsafe mref resolved_modules in - match implementation_file m ~audit:Expensive.ok with - | Some f -> FilenameSet.add f files - | None -> files - ) require_loc FilenameSet.empty + SSet.fold + (fun mref (sig_files, all_files) -> + let m = SMap.find_unsafe mref resolved_modules in + match implementation_file ~reader m ~audit:Expensive.ok with + | Some f -> + if SSet.mem mref sig_require_set then + (FilenameSet.add f sig_files, FilenameSet.add f all_files) + else + (sig_files, FilenameSet.add f all_files) + | None -> (sig_files, all_files)) + require_set + (FilenameSet.empty, FilenameSet.empty) + +type dependency_graph = FilenameSet.t FilenameMap.t (* Calculates the dependency graph as a map from files to their dependencies. * Dependencies not in parsed are ignored. *) -let calc_partial_dependency_graph workers files ~parsed = - let%lwt dependency_graph = MultiWorkerLwt.call - workers - ~job: (List.fold_left (fun dependency_graph file -> - FilenameMap.add file (file_dependencies ~audit:Expensive.ok file) dependency_graph - )) - ~neutral: FilenameMap.empty - ~merge: FilenameMap.union - ~next: (MultiWorkerLwt.next workers (FilenameSet.elements files)) in - FilenameMap.map (FilenameSet.inter parsed) dependency_graph - |> Lwt.return - -let calc_dependency_graph workers ~parsed = - calc_partial_dependency_graph workers parsed ~parsed - -(* Returns a copy of the dependency graph with only those file -> dependency edges where file and - dependency are in files *) -let filter_dependency_graph dependency_graph files = - FilenameSet.fold (fun f -> - let fs = FilenameMap.find_unsafe f dependency_graph |> FilenameSet.inter files in - FilenameMap.add f fs - ) files FilenameMap.empty - -let rec closure graph = - FilenameSet.fold (fun file acc -> - match FilenameMap.get file graph with - | Some files -> - let files = FilenameSet.diff files acc in - let acc = FilenameSet.union files acc in - closure graph files acc - | None -> acc - ) - -(* `calc_all_dependencies graph files` will return the set of direct and transitive dependencies - * of `files`. This set does include `files`. - *) -let calc_all_dependencies dependency_graph files = - closure dependency_graph files files - -let reverse graph = - let acc = Hashtbl.create 0 in - FilenameMap.iter (fun f -> FilenameSet.iter (fun f' -> - Hashtbl.add acc f' f - )) graph; - FilenameMap.mapi (fun f _ -> - FilenameSet.of_list @@ Hashtbl.find_all acc f - ) graph +let calc_partial_dependency_info ~options ~reader workers files ~parsed = + let%lwt dependency_info = + MultiWorkerLwt.call + workers + ~job: + (List.fold_left (fun dependency_info file -> + FilenameMap.add + file + (file_dependencies ~audit:Expensive.ok ~reader file) + dependency_info)) + ~neutral:FilenameMap.empty + ~merge:FilenameMap.union + ~next:(MultiWorkerLwt.next workers (FilenameSet.elements files)) + in + let dependency_info = + match Options.arch options with + | Options.Classic -> + Dependency_info.Classic + (FilenameMap.map + (fun (_sig_files, all_files) -> FilenameSet.inter parsed all_files) + dependency_info) + | Options.TypesFirst -> + Dependency_info.TypesFirst + (FilenameMap.map + (fun (sig_files, all_files) -> + (FilenameSet.inter parsed sig_files, FilenameSet.inter parsed all_files)) + dependency_info) + in + Lwt.return dependency_info -(* `calc_all_reverse_dependencies graph files` will return the set of direct and transitive - * dependents of `files`. This set does include `files`. *) -let calc_all_reverse_dependencies dependency_graph files = - let rev_dependency_graph = reverse dependency_graph in - closure rev_dependency_graph files files +let calc_dependency_info ~options ~reader workers ~parsed = + calc_partial_dependency_info ~options ~reader workers parsed ~parsed diff --git a/src/services/inference/dep_service.mli b/src/services/inference/dep_service.mli index 7dc95eba031..932c5098640 100644 --- a/src/services/inference/dep_service.mli +++ b/src/services/inference/dep_service.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,36 +7,32 @@ open Utils_js -val dependent_files: - MultiWorkerLwt.worker list option -> (* workers *) - unchanged:FilenameSet.t -> - new_or_changed:FilenameSet.t -> - changed_modules:Modulename.Set.t -> - (* (transitive_dependents, direct_dependents) of changed_modules *) - (FilenameSet.t * FilenameSet.t) Lwt.t +val calc_direct_dependents : + reader:Abstract_state_reader.t -> + MultiWorkerLwt.worker list option -> + candidates:(* workers *) + FilenameSet.t -> + root_files:FilenameSet.t -> + root_modules:Modulename.Set.t -> + (* direct_dependents of changed_modules *) + FilenameSet.t Lwt.t -val calc_dependency_graph: - MultiWorkerLwt.worker list option -> (* workers *) - parsed:FilenameSet.t -> - FilenameSet.t FilenameMap.t Lwt.t +type dependency_graph = FilenameSet.t FilenameMap.t -val calc_partial_dependency_graph: - MultiWorkerLwt.worker list option -> (* workers *) - FilenameSet.t -> (* files *) - parsed:FilenameSet.t -> - FilenameSet.t FilenameMap.t Lwt.t +val calc_dependency_info : + options:Options.t -> + reader:Mutator_state_reader.t -> + MultiWorkerLwt.worker list option -> + parsed:(* workers *) + FilenameSet.t -> + Dependency_info.t Lwt.t -val filter_dependency_graph: - FilenameSet.t FilenameMap.t -> (* dependency graph *) - FilenameSet.t -> (* files *) - FilenameSet.t FilenameMap.t - -val calc_all_dependencies: - FilenameSet.t FilenameMap.t -> (* dependency graph *) - FilenameSet.t -> (* files *) - FilenameSet.t - -val calc_all_reverse_dependencies: - FilenameSet.t FilenameMap.t -> +val calc_partial_dependency_info : + options:Options.t -> + reader:Mutator_state_reader.t -> + MultiWorkerLwt.worker list option -> + (* workers *) FilenameSet.t -> - FilenameSet.t + parsed:(* files *) + FilenameSet.t -> + Dependency_info.t Lwt.t diff --git a/src/services/inference/dune b/src/services/inference/dune new file mode 100644 index 00000000000..a473c0dde87 --- /dev/null +++ b/src/services/inference/dune @@ -0,0 +1,28 @@ +(library + (name flow_service_inference) + (wrapped false) + (libraries + flow_common + flow_exit_status + flow_monitor_rpc + flow_parser + flow_parsing + flow_procs + flow_server_env + flow_server_files + flow_server_monitor_listener_state + flow_server_rechecker_updates + flow_server_status + flow_server_watchman_expression_terms + flow_service_inference_module + flow_service_saved_state + flow_state_heaps_context + flow_state_heaps_module + flow_typing + build_mode ; hack + cgroup ; hack + procs_bucket ; hack + watchman_lwt ; hack + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/services/inference/inference_utils.ml b/src/services/inference/inference_utils.ml index 6eb69e98dc4..e67d4b66794 100644 --- a/src/services/inference/inference_utils.ml +++ b/src/services/inference/inference_utils.ml @@ -1,49 +1,101 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +open Utils_js + let error_of_docblock_error ~source_file (loc, err) = - let flow_err = Flow_error.EDocblockError (loc, match err with - | Parsing_service_js.MultipleFlowAttributes -> Flow_error.MultipleFlowAttributes - | Parsing_service_js.MultipleProvidesModuleAttributes -> Flow_error.MultipleProvidesModuleAttributes - | Parsing_service_js.MultipleJSXAttributes -> Flow_error.MultipleJSXAttributes - | Parsing_service_js.InvalidJSXAttribute first_error -> Flow_error.InvalidJSXAttribute first_error - ) in + let flow_err = + Error_message.EDocblockError + ( ALoc.of_loc loc, + match err with + | Parsing_service_js.MultipleFlowAttributes -> Error_message.MultipleFlowAttributes + | Parsing_service_js.MultipleProvidesModuleAttributes -> + Error_message.MultipleProvidesModuleAttributes + | Parsing_service_js.MultipleJSXAttributes -> Error_message.MultipleJSXAttributes + | Parsing_service_js.InvalidJSXAttribute first_error -> + Error_message.InvalidJSXAttribute first_error ) + in Flow_error.error_of_msg ~trace_reasons:[] ~source_file flow_err -let set_of_docblock_errors ~source_file errors = - List.fold_left (fun acc err -> - Errors.ErrorSet.add (error_of_docblock_error ~source_file err) acc - ) Errors.ErrorSet.empty errors +let set_of_docblock_errors ~source_file = + Core_list.fold_left + ~f:(fun acc err -> Flow_error.ErrorSet.add (error_of_docblock_error ~source_file err) acc) + ~init:Flow_error.ErrorSet.empty let error_of_parse_error ~source_file (loc, err) = - let flow_err = Flow_error.EParseError (loc, err) in - Flow_error.error_of_msg ~trace_reasons:[] ~source_file flow_err + Error_message.EParseError (ALoc.of_loc loc, err) + |> Flow_error.error_of_msg ~trace_reasons:[] ~source_file -let set_of_parse_error ~source_file error = - Errors.ErrorSet.singleton (error_of_parse_error ~source_file error) +let set_of_parse_error ~source_file = + error_of_parse_error ~source_file %> Flow_error.ErrorSet.singleton + +let error_of_package_json_error ~source_file (loc, err) = + Error_message.EMalformedPackageJson (ALoc.of_loc loc, err) + |> Flow_error.error_of_msg ~trace_reasons:[] ~source_file + +let set_of_package_json_error ~source_file = + error_of_package_json_error ~source_file %> Flow_error.ErrorSet.singleton let error_of_file_sig_error ~source_file err = - let flow_err = match err with - | File_sig.IndeterminateModuleType loc -> Flow_error.EIndeterminateModuleType loc - in - Flow_error.error_of_msg ~trace_reasons:[] ~source_file flow_err + File_sig.With_Loc.( + let flow_err = + match err with + | IndeterminateModuleType loc -> Error_message.EIndeterminateModuleType (ALoc.of_loc loc) + in + Flow_error.error_of_msg ~trace_reasons:[] ~source_file flow_err) -let set_of_file_sig_error ~source_file error = - Errors.ErrorSet.singleton (error_of_file_sig_error ~source_file error) +let set_of_file_sig_error ~source_file = + error_of_file_sig_error ~source_file %> Flow_error.ErrorSet.singleton let error_of_file_sig_tolerable_error ~source_file err = - let flow_err = match err with - | File_sig.BadExportPosition loc -> Flow_error.EBadExportPosition loc - | File_sig.BadExportContext (name, loc) -> Flow_error.EBadExportContext (name, loc) - | File_sig.SignatureVerificationError sve -> Flow_error.ESignatureVerification sve - in - Flow_error.error_of_msg ~trace_reasons:[] ~source_file flow_err + File_sig.With_ALoc.( + let flow_err = + match err with + | BadExportPosition loc -> Error_message.EBadExportPosition loc + | BadExportContext (name, loc) -> Error_message.EBadExportContext (name, loc) + | SignatureVerificationError sve -> Error_message.ESignatureVerification sve + in + Flow_error.error_of_msg ~trace_reasons:[] ~source_file flow_err) + +let set_of_file_sig_tolerable_errors ~source_file = + Core_list.map ~f:(error_of_file_sig_tolerable_error ~source_file) %> Flow_error.ErrorSet.of_list + +(* This is an options-aware fold over the files in `m`. Function `f` will be applied + * to a file FILE in `m` iff: + * - flag 'opt_enforce_well_formed_exportst' is set to true, and + * - if at least one 'opt_enforce_well_formed_exports_whitelist=PATH' has been + * set, then there exists PATH for which FILE is within PATH. + *) +let fold_whitelisted_well_formed_exports ~f options m acc = + if not options.Options.opt_enforce_well_formed_exports then + acc + else + match options.Options.opt_enforce_well_formed_exports_whitelist with + | [] -> Utils_js.FilenameMap.fold f m acc + | paths -> + let root = Options.root options in + let paths = Core_list.map ~f:(Files.expand_project_root_token_to_string ~root) paths in + Utils_js.FilenameMap.fold + (fun file v b -> + let file_str = File_key.to_string file in + if List.exists (fun r -> String_utils.is_substring r file_str) paths then + f file v b + else + b) + m + acc -let set_of_file_sig_tolerable_errors ~source_file errors = - errors - |> List.map (error_of_file_sig_tolerable_error ~source_file) - |> Errors.ErrorSet.of_list +let well_formed_exports_enabled options file = + options.Options.opt_enforce_well_formed_exports + && + match options.Options.opt_enforce_well_formed_exports_whitelist with + | [] -> true + | paths -> + let root = Options.root options in + let paths = Core_list.map ~f:(Files.expand_project_root_token_to_string ~root) paths in + let file_str = File_key.to_string file in + List.exists (fun r -> String_utils.is_substring r file_str) paths diff --git a/src/services/inference/inference_utils.mli b/src/services/inference/inference_utils.mli index e00aa1d79f4..ce0180cd3a8 100644 --- a/src/services/inference/inference_utils.mli +++ b/src/services/inference/inference_utils.mli @@ -1,14 +1,34 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val error_of_docblock_error: source_file: File_key.t -> Parsing_service_js.docblock_error -> Errors.error -val set_of_docblock_errors: source_file: File_key.t -> Parsing_service_js.docblock_error list -> Errors.ErrorSet.t -val error_of_parse_error : source_file: File_key.t -> Loc.t * Parse_error.t -> Errors.error -val set_of_parse_error: source_file: File_key.t -> Loc.t * Parse_error.t -> Errors.ErrorSet.t -val error_of_file_sig_error : source_file: File_key.t -> File_sig.error -> Errors.error -val set_of_file_sig_error: source_file: File_key.t -> File_sig.error -> Errors.ErrorSet.t -val set_of_file_sig_tolerable_errors: source_file: File_key.t -> File_sig.tolerable_error list -> Errors.ErrorSet.t +val error_of_docblock_error : + source_file:File_key.t -> Parsing_service_js.docblock_error -> ALoc.t Flow_error.t + +val set_of_docblock_errors : + source_file:File_key.t -> Parsing_service_js.docblock_error list -> Flow_error.ErrorSet.t + +val error_of_parse_error : source_file:File_key.t -> Loc.t * Parse_error.t -> ALoc.t Flow_error.t + +val set_of_parse_error : source_file:File_key.t -> Loc.t * Parse_error.t -> Flow_error.ErrorSet.t + +val error_of_package_json_error : source_file:File_key.t -> Loc.t * string -> ALoc.t Flow_error.t + +val set_of_package_json_error : source_file:File_key.t -> Loc.t * string -> Flow_error.ErrorSet.t + +val error_of_file_sig_error : + source_file:File_key.t -> File_sig.With_Loc.error -> ALoc.t Flow_error.t + +val set_of_file_sig_error : + source_file:File_key.t -> File_sig.With_Loc.error -> Flow_error.ErrorSet.t + +val set_of_file_sig_tolerable_errors : + source_file:File_key.t -> File_sig.With_ALoc.tolerable_error list -> Flow_error.ErrorSet.t + +val fold_whitelisted_well_formed_exports : + f:(File_key.t -> 'a -> 'b -> 'b) -> Options.t -> 'a Utils_js.FilenameMap.t -> 'b -> 'b + +val well_formed_exports_enabled : Options.t -> File_key.t -> bool diff --git a/src/services/inference/init_js.ml b/src/services/inference/init_js.ml index 68e3f7aac40..3cbe50ca88b 100644 --- a/src/services/inference/init_js.ml +++ b/src/services/inference/init_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -14,13 +14,12 @@ future. *) open Utils_js - module Files = Files module Flow = Flow_js module Parsing = Parsing_service_js module Infer = Type_inference_js -let parse_lib_file options file = +let parse_lib_file ~reader options file = (* types are always allowed in lib files *) let types_mode = Parsing.TypesAllowed in (* lib files are always "use strict" *) @@ -29,28 +28,22 @@ let parse_lib_file options file = let lib_file = File_key.LibFile file in let filename_set = FilenameSet.singleton lib_file in let next = Parsing.next_of_filename_set (* workers *) None filename_set in - let%lwt results = Parsing.parse_with_defaults - ~types_mode - ~use_strict - options - (* workers *) None - next + let%lwt results = + Parsing.parse_with_defaults ~types_mode ~use_strict ~reader options (* workers *) None next in - Lwt.return ( - if not (FilenameMap.is_empty results.Parsing.parse_ok) then - let ast = Parsing_heaps.get_ast_unsafe lib_file in - let file_sig = Parsing_heaps.get_file_sig_unsafe lib_file in - Parsing.Parse_ok (ast, file_sig) + Lwt.return + ( if not (FilenameMap.is_empty results.Parsing.parse_ok) then + let ast = Parsing_heaps.Mutator_reader.get_ast_unsafe reader lib_file in + let file_sig = Parsing_heaps.Mutator_reader.get_file_sig_unsafe reader lib_file in + Parsing.Parse_ok (Parsing.Classic (ast, file_sig)) else if List.length results.Parsing.parse_fails > 0 then - let _, _, parse_fails = List.hd results.Parsing.parse_fails in + let (_, _, parse_fails) = List.hd results.Parsing.parse_fails in Parsing.Parse_fail parse_fails else if List.length results.Parsing.parse_skips > 0 then Parsing.Parse_skip Parsing.Skip_non_flow_file else - failwith "Internal error: no parse results found" - ) - with _ -> - failwith (spf "Can't read library definitions file %s, exiting." file) + failwith "Internal error: no parse results found" ) + with _ -> failwith (spf "Can't read library definitions file %s, exiting." file) (* process all lib files: parse, infer, and add the symbols they define to the builtins object. @@ -62,103 +55,102 @@ let parse_lib_file options file = returns list of (filename, success, errors, suppressions) tuples *) -let load_lib_files ~master_cx ~options files = - +let load_lib_files ~sig_cx ~options ~reader files = let verbose = Options.verbose options in - (* iterate in reverse override order *) let%lwt (_, result) = List.rev files - |> Lwt_list.fold_left_s ( - fun (exclude_syms, results) file -> - - let lib_file = File_key.LibFile file in - let lint_severities = options.Options.opt_lint_severities in - let file_options = Options.file_options options in - let%lwt result = parse_lib_file options file in - Lwt.return (match result with - | Parsing.Parse_ok (ast, file_sig) -> - - let metadata = - let open Context in - let metadata = metadata_of_options options in - { metadata with checked = false; weak = false } - in - - let sig_cx = Context.make_sig () in - let cx = Context.make sig_cx metadata lib_file Files.lib_module_ref in - Flow.mk_builtins cx; - - let syms = Infer.infer_lib_file cx ast - ~exclude_syms ~lint_severities ~file_options:(Some file_options) ~file_sig - in - - Context.merge_into (Context.sig_cx master_cx) sig_cx; - - let () = - let from_t = Context.find_module master_cx Files.lib_module_ref in - let to_t = Context.find_module cx Files.lib_module_ref in - Flow.flow_t master_cx (from_t, to_t) - in - - let errors = Context.errors cx in - let errors = - if options.Options.opt_enforce_well_formed_exports then - Inference_utils.set_of_file_sig_tolerable_errors - ~source_file:lib_file - file_sig.File_sig.tolerable_errors - |> Errors.ErrorSet.union errors - else - errors - in - let suppressions = Context.error_suppressions cx in - let severity_cover = Context.severity_cover cx in - - Context.remove_all_errors cx; - Context.remove_all_error_suppressions cx; - Context.remove_all_lint_severities cx; - - (if verbose != None then - prerr_endlinef "load_lib %s: added symbols { %s }" - file (String.concat ", " syms)); - - (* symbols loaded from this file are suppressed + |> Lwt_list.fold_left_s + (fun (exclude_syms, results) file -> + let lib_file = File_key.LibFile file in + let lint_severities = options.Options.opt_lint_severities in + let file_options = Options.file_options options in + let%lwt result = parse_lib_file ~reader options file in + Lwt.return + (match result with + | Parsing.Parse_ok parse_ok -> + let (ast, file_sig) = Parsing.basic parse_ok in + let file_sig = File_sig.abstractify_locs file_sig in + let metadata = + Context.( + let metadata = metadata_of_options options in + { metadata with checked = false; weak = false }) + in + (* Lib files use only concrete locations, so this is not needed. *) + let aloc_tables = FilenameMap.empty in + let rev_table = lazy (ALoc.make_empty_reverse_table ()) in + let cx = + Context.make + sig_cx + metadata + lib_file + aloc_tables + rev_table + Files.lib_module_ref + Context.Checking + in + let syms = + Infer.infer_lib_file + cx + ast + ~exclude_syms + ~lint_severities + ~file_options:(Some file_options) + ~file_sig + in + let errors = Context.errors cx in + let errors = + if Inference_utils.well_formed_exports_enabled options lib_file then + file_sig.File_sig.With_ALoc.tolerable_errors + |> Inference_utils.set_of_file_sig_tolerable_errors ~source_file:lib_file + |> Flow_error.ErrorSet.union errors + else + errors + in + let suppressions = Context.error_suppressions cx in + let severity_cover = Context.severity_cover cx in + let include_suppressions = Context.include_suppressions cx in + let (errors, warnings, suppressions) = + Error_suppressions.filter_lints + ~include_suppressions + suppressions + errors + (Context.aloc_tables cx) + severity_cover + in + Context.remove_all_errors cx; + Context.remove_all_error_suppressions cx; + + if verbose != None then + prerr_endlinef "load_lib %s: added symbols { %s }" file (String.concat ", " syms); + + (* symbols loaded from this file are suppressed if found in later ones *) - let exclude_syms = SSet.union exclude_syms (SSet.of_list syms) in - let result = (lib_file, true, errors, suppressions, severity_cover) in - exclude_syms, (result :: results) - - | Parsing.Parse_fail fail -> - let errors = match fail with - | Parsing.Parse_error error -> - Inference_utils.set_of_parse_error ~source_file:lib_file error - | Parsing.Docblock_errors errs -> - Inference_utils.set_of_docblock_errors ~source_file:lib_file errs - | Parsing.File_sig_error error -> - Inference_utils.set_of_file_sig_error ~source_file:lib_file error - in - let severity_cover = - Utils_js.FilenameMap.singleton - lib_file - (ExactCover.file_cover lib_file lint_severities) - in - let result = lib_file, false, errors, Error_suppressions.empty, severity_cover in - exclude_syms, (result :: results) - - | Parsing.Parse_skip - (Parsing.Skip_non_flow_file | Parsing.Skip_resource_file) -> - (* should never happen *) - let errs = Errors.ErrorSet.empty in - let suppressions = Error_suppressions.empty in - let severity_cover = - Utils_js.FilenameMap.singleton - lib_file - (ExactCover.file_cover lib_file lint_severities) - in - let result = lib_file, false, errs, suppressions, severity_cover in - exclude_syms, (result :: results) - ) - ) (SSet.empty, []) + let exclude_syms = SSet.union exclude_syms (SSet.of_list syms) in + let result = (lib_file, true, errors, warnings, suppressions) in + (exclude_syms, result :: results) + | Parsing.Parse_fail fail -> + let errors = + match fail with + | Parsing.Parse_error error -> + Inference_utils.set_of_parse_error ~source_file:lib_file error + | Parsing.Docblock_errors errs -> + Inference_utils.set_of_docblock_errors ~source_file:lib_file errs + | Parsing.File_sig_error error -> + Inference_utils.set_of_file_sig_error ~source_file:lib_file error + in + let result = + (lib_file, false, errors, Flow_error.ErrorSet.empty, Error_suppressions.empty) + in + (exclude_syms, result :: results) + | Parsing.Parse_skip (Parsing.Skip_non_flow_file | Parsing.Skip_resource_file) -> + (* should never happen *) + let errs = Flow_error.ErrorSet.empty in + let warnings = Flow_error.ErrorSet.empty in + let suppressions = Error_suppressions.empty in + let result = (lib_file, false, errs, warnings, suppressions) in + (exclude_syms, result :: results))) + (SSet.empty, []) in Lwt.return result @@ -166,27 +158,36 @@ let load_lib_files ~master_cx ~options files = parse and do local inference on library files, and set up master context. returns list of (lib file, success) pairs. *) -let init ~options lib_files = +let init ~options ~reader lib_files = + let sig_cx = Context.make_sig () in let master_cx = - let sig_cx = Context.make_sig () in let metadata = - let open Context in - let metadata = metadata_of_options options in - { metadata with checked = false; weak = false } + Context.( + let metadata = metadata_of_options options in + { metadata with checked = false; weak = false }) in - Context.make sig_cx metadata File_key.Builtins Files.lib_module_ref + (* Builtins use only concrete locations, so this is not needed. *) + let aloc_tables = FilenameMap.empty in + let rev_table = lazy (ALoc.make_empty_reverse_table ()) in + Context.make + sig_cx + metadata + File_key.Builtins + aloc_tables + rev_table + Files.lib_module_ref + Context.Checking in - Flow_js.mk_builtins master_cx; - let%lwt result = load_lib_files ~master_cx ~options lib_files in - - Flow.Cache.clear(); + let%lwt result = load_lib_files ~sig_cx ~options ~reader lib_files in + Flow.Cache.clear (); let reason = Reason.builtin_reason (Reason.RCustom "module") in let builtin_module = Obj_type.mk master_cx reason in Flow.flow_t master_cx (builtin_module, Flow.builtins master_cx); Merge_js.ContextOptimizer.sig_context master_cx [Files.lib_module_ref] |> ignore; + Context.remove_all_lint_severities master_cx; Context.clear_intermediates master_cx; (* store master signature context to heap *) diff --git a/src/services/inference/init_js.mli b/src/services/inference/init_js.mli index 2f193a2c8f5..9ca170c9808 100644 --- a/src/services/inference/init_js.mli +++ b/src/services/inference/init_js.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,10 +11,8 @@ to Types_js, where error management stuff lives. *) val init : - options: Options.t -> + options:Options.t -> + reader:Mutator_state_reader.t -> string list -> - (File_key.t * - bool * - Errors.ErrorSet.t * - Error_suppressions.t * - ExactCover.lint_severity_cover Utils_js.FilenameMap.t) list Lwt.t + (File_key.t * bool * Flow_error.ErrorSet.t * Flow_error.ErrorSet.t * Error_suppressions.t) list + Lwt.t diff --git a/src/services/inference/merge_service.ml b/src/services/inference/merge_service.ml index e03664465d3..68ffb244972 100644 --- a/src/services/inference/merge_service.ml +++ b/src/services/inference/merge_service.ml @@ -1,33 +1,52 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open Utils_js +open Loc_collections module Reqs = Merge_js.Reqs -type 'a merge_job_results = (File_key.t * ('a, Flow_error.error_message) result) list +type 'a unit_result = ('a, ALoc.t * Error_message.internal_error) result + +type 'a file_keyed_result = File_key.t * 'a unit_result + +type acc = + Flow_error.ErrorSet.t + * Flow_error.ErrorSet.t + * Error_suppressions.t + * Coverage_response.file_coverage FilenameMap.t + * float + +type 'a merge_job_results = 'a file_keyed_result list + type 'a merge_job = - worker_mutator: Context_heaps.Merge_context_mutator.worker_mutator -> + worker_mutator:Context_heaps.Merge_context_mutator.worker_mutator -> options:Options.t -> - 'a merge_job_results -> + reader:Mutator_state_reader.t -> File_key.t Nel.t -> - 'a merge_job_results + 'a unit_result -type 'a merge_results = 'a merge_job_results * int (* skipped count *) +type sig_opts_data = { + skipped_count: int; + sig_new_or_changed: FilenameSet.t; +} + +type 'a merge_results = 'a merge_job_results * sig_opts_data -type merge_strict_context_result = { +type merge_context_result = { cx: Context.t; other_cxs: Context.t list; master_cx: Context.sig_t; - file_sigs: File_sig.t FilenameMap.t; - typed_asts: (Loc.t, Loc.t * Type.t) Flow_ast.program FilenameMap.t; + file_sigs: File_sig.With_ALoc.t FilenameMap.t; + typed_asts: (ALoc.t, ALoc.t * Type.t) Flow_ast.program FilenameMap.t; + coverage_map: Coverage_response.file_coverage FilenameMap.t; } (* To merge the contexts of a component with their dependencies, we call the - functions `merge_component_strict` and `restore` defined in merge_js.ml + functions `merge_component` and `restore` defined in merge_js.ml with appropriate reqs prepared below. (a) orig_sig_cxs: the original signature contexts of dependencies outside the @@ -50,121 +69,185 @@ type merge_strict_context_result = { (g) decls: edges between files in the component and libraries, classified by requires (when implementations of such requires are not found). *) -let reqs_of_component component required = - let dep_cxs, reqs = - List.fold_left (fun (dep_cxs, reqs) req -> - let r, locs, resolved_r, file = req in - let locs = locs |> Nel.to_list |> LocSet.of_list in - Module_heaps.(match get_file Expensive.ok resolved_r with - | Some (File_key.ResourceFile f) -> - dep_cxs, Reqs.add_res f file locs reqs - | Some dep -> - let info = get_info_unsafe ~audit:Expensive.ok dep in - if info.checked && info.parsed then - (* checked implementation exists *) - let m = Files.module_ref dep in - if Nel.mem dep component then - (* impl is part of component *) - dep_cxs, Reqs.add_impl m file locs reqs - else - (* look up impl sig_context *) - let leader = Context_heaps.find_leader dep in - let dep_cx = Context_heaps.find_sig leader in - dep_cx::dep_cxs, Reqs.add_dep_impl m file (dep_cx, locs) reqs - else - (* unchecked implementation exists *) - dep_cxs, Reqs.add_unchecked r file locs reqs - | None -> - (* implementation doesn't exist *) - dep_cxs, Reqs.add_decl r file (locs, resolved_r) reqs - ) - ) ([], Reqs.empty) required +let reqs_of_component ~reader component required = + let (dep_cxs, reqs) = + List.fold_left + (fun (dep_cxs, reqs) req -> + let (r, locs, resolved_r, file) = req in + let locs = locs |> Nel.to_list |> ALocSet.of_list in + Module_heaps.( + match Reader_dispatcher.get_file ~reader ~audit:Expensive.ok resolved_r with + | Some (File_key.ResourceFile f) -> (dep_cxs, Reqs.add_res f file locs reqs) + | Some dep -> + let info = Reader_dispatcher.get_info_unsafe ~reader ~audit:Expensive.ok dep in + if info.checked && info.parsed then + (* checked implementation exists *) + let m = Files.module_ref dep in + if Nel.mem dep component then + (* impl is part of component *) + (dep_cxs, Reqs.add_impl m file locs reqs) + else + (* look up impl sig_context *) + let leader = Context_heaps.Reader_dispatcher.find_leader ~reader dep in + let dep_cx = Context_heaps.Reader_dispatcher.find_sig ~reader leader in + (dep_cx :: dep_cxs, Reqs.add_dep_impl m file (dep_cx, locs) reqs) + else + (* unchecked implementation exists *) + (dep_cxs, Reqs.add_unchecked r file locs reqs) + | None -> + (* implementation doesn't exist *) + (dep_cxs, Reqs.add_decl r file (locs, resolved_r) reqs))) + ([], Reqs.empty) + required in - - let master_cx = Context_heaps.find_sig File_key.Builtins in - - master_cx, dep_cxs, reqs - -let merge_strict_context ~options component = - let required, file_sigs = - Nel.fold_left (fun (required, file_sigs) file -> - let file_sig = Parsing_heaps.get_file_sig_unsafe file in - let require_loc_map = File_sig.(require_loc_map file_sig.module_sig) in - let required = SMap.fold (fun r locs acc -> - let resolved_r = Module_js.find_resolved_module ~audit:Expensive.ok - file r in - (r, locs, resolved_r, file) :: acc - ) require_loc_map required in - required, FilenameMap.add file file_sig file_sigs - ) ([], FilenameMap.empty) component in - - let master_cx, dep_cxs, file_reqs = - reqs_of_component component required + let master_cx = Context_heaps.Reader_dispatcher.find_sig ~reader File_key.Builtins in + (master_cx, dep_cxs, reqs) + +let merge_context_generic ~options ~reader ~get_ast_unsafe ~get_file_sig_unsafe ~phase component = + let (required, file_sigs) = + Nel.fold_left + (fun (required, file_sigs) file -> + let file_sig = get_file_sig_unsafe ~reader file in + let file_sigs = FilenameMap.add file file_sig file_sigs in + let require_loc_map = File_sig.With_ALoc.(require_loc_map file_sig.module_sig) in + let required = + SMap.fold + (fun r locs acc -> + let resolved_r = Module_js.find_resolved_module ~reader ~audit:Expensive.ok file r in + (r, locs, resolved_r, file) :: acc) + require_loc_map + required + in + (required, file_sigs)) + ([], FilenameMap.empty) + component in - + let (master_cx, dep_cxs, file_reqs) = reqs_of_component ~reader component required in let metadata = Context.metadata_of_options options in let lint_severities = Options.lint_severities options in let file_options = Some (Options.file_options options) in let strict_mode = Options.strict_mode options in - let ((cx, _), other_cxs) as cx_nel = Merge_js.merge_component_strict - ~metadata ~lint_severities ~file_options ~strict_mode ~file_sigs - ~get_ast_unsafe:Parsing_heaps.get_ast_unsafe - ~get_docblock_unsafe:Parsing_heaps.get_docblock_unsafe - ~do_gc:(Options.is_debug_mode options) - component file_reqs dep_cxs master_cx + let get_aloc_table_unsafe = + Parsing_heaps.Reader_dispatcher.get_sig_ast_aloc_table_unsafe ~reader in - - let typed_asts = Nel.fold_left (fun typed_asts (ctx, typed_ast) -> - let file = Context.file ctx in - FilenameMap.add file typed_ast typed_asts - ) FilenameMap.empty cx_nel in - - let other_cxs = List.map fst other_cxs in - - { cx; other_cxs; master_cx; file_sigs; typed_asts } - -(* Variation of merge_strict_context where requires may not have already been + let (((cx, _, _), other_cxs) as cx_nel) = + Merge_js.merge_component + ~metadata + ~lint_severities + ~file_options + ~strict_mode + ~file_sigs + ~phase + ~get_ast_unsafe:(get_ast_unsafe ~reader) + ~get_aloc_table_unsafe + ~get_docblock_unsafe:(Parsing_heaps.Reader_dispatcher.get_docblock_unsafe ~reader) + component + file_reqs + dep_cxs + master_cx + in + let (typed_asts, coverage_map) = + Nel.fold_left + (fun (typed_asts, cov_map) (ctx, typed_ast, cov) -> + let file = Context.file ctx in + (FilenameMap.add file typed_ast typed_asts, FilenameMap.add file cov cov_map)) + (FilenameMap.empty, FilenameMap.empty) + cx_nel + in + let other_cxs = Core_list.map ~f:(fun (cx, _, _) -> cx) other_cxs in + { cx; other_cxs; master_cx; file_sigs; typed_asts; coverage_map } + +let merge_context ~options ~reader component = + merge_context_generic + ~options + ~reader + ~phase: + (match Options.arch options with + | Options.Classic -> Context.Checking + | Options.TypesFirst -> Context.Merging) + ~get_ast_unsafe: + (match Options.arch options with + | Options.Classic -> + fun ~reader file -> + let ((_, _, comments) as ast) = + Parsing_heaps.Reader_dispatcher.get_ast_unsafe ~reader file + in + let aloc_ast = Ast_loc_utils.loc_to_aloc_mapper#program ast in + (comments, aloc_ast) + | Options.TypesFirst -> + fun ~reader file -> + let (_, _, comments) = Parsing_heaps.Reader_dispatcher.get_ast_unsafe ~reader file in + let aloc_ast = Parsing_heaps.Reader_dispatcher.get_sig_ast_unsafe ~reader file in + (comments, aloc_ast)) + ~get_file_sig_unsafe: + (match Options.arch options with + | Options.Classic -> + fun ~reader file -> + let loc_file_sig = Parsing_heaps.Reader_dispatcher.get_file_sig_unsafe ~reader file in + File_sig.abstractify_locs loc_file_sig + | Options.TypesFirst -> Parsing_heaps.Reader_dispatcher.get_sig_file_sig_unsafe) + component + +(* Variation of merge_context where requires may not have already been resolved. This is used by commands that make up a context on the fly. *) -let merge_contents_context options file ast info file_sig = +let merge_contents_context ~reader options file ast info file_sig = + let (_, _, comments) = ast in + let aloc_ast = Ast_loc_utils.loc_to_aloc_mapper#program ast in + let reader = Abstract_state_reader.State_reader reader in + let file_sig = File_sig.abstractify_locs file_sig in let required = - let require_loc_map = File_sig.(require_loc_map file_sig.module_sig) in - SMap.fold (fun r locs required -> - let resolved_r = Module_js.imported_module - ~options - ~node_modules_containers:!Files.node_modules_containers - file locs r in - (r, locs, resolved_r, file) :: required - ) require_loc_map [] + let require_loc_map = File_sig.With_ALoc.(require_loc_map file_sig.module_sig) in + SMap.fold + (fun r (locs : ALoc.t Nel.t) required -> + let resolved_r = + Module_js.imported_module + ~options + ~reader + ~node_modules_containers:!Files.node_modules_containers + file + locs + r + in + (r, locs, resolved_r, file) :: required) + require_loc_map + [] in let file_sigs = FilenameMap.singleton file file_sig in - let component = Nel.one file in - - let master_cx, dep_cxs, file_reqs = - begin try reqs_of_component component required with - | Key_not_found _ -> - failwith "not all dependencies are ready yet, aborting..." - | e -> raise e - end + let (master_cx, dep_cxs, file_reqs) = + try reqs_of_component ~reader component required with + | Key_not_found _ -> failwith "not all dependencies are ready yet, aborting..." + | e -> raise e in - let metadata = Context.metadata_of_options options in let lint_severities = Options.lint_severities options in let file_options = Some (Options.file_options options) in let strict_mode = Options.strict_mode options in - let cx, _ = Merge_js.merge_component_strict - ~metadata ~lint_severities ~file_options ~strict_mode ~file_sigs - ~get_ast_unsafe:(fun _ -> ast) - ~get_docblock_unsafe:(fun _ -> info) - component file_reqs dep_cxs master_cx + let get_aloc_table_unsafe = + Parsing_heaps.Reader_dispatcher.get_sig_ast_aloc_table_unsafe ~reader in - - cx + let ((cx, tast, _), _) = + Merge_js.merge_component + ~metadata + ~lint_severities + ~file_options + ~strict_mode + ~file_sigs + ~get_ast_unsafe:(fun _ -> (comments, aloc_ast)) + ~get_aloc_table_unsafe + ~get_docblock_unsafe:(fun _ -> info) + ~phase:Context.Checking + component + file_reqs + dep_cxs + master_cx + in + (cx, tast) (* Entry point for merging a component *) -let merge_strict_component ~worker_mutator ~options merged_acc component = +let merge_component ~worker_mutator ~options ~reader component = + let start_merge_time = Unix.gettimeofday () in let file = Nel.hd component in - (* We choose file as the leader, and other_files are followers. It is always OK to choose file as leader, as explained below. @@ -177,19 +260,29 @@ let merge_strict_component ~worker_mutator ~options merged_acc component = It also follows when file is checked, other_files must be checked too! *) - let info = Module_heaps.get_info_unsafe ~audit:Expensive.ok file in + let info = Module_heaps.Mutator_reader.get_info_unsafe ~reader ~audit:Expensive.ok file in if info.Module_heaps.checked then ( - let { cx; other_cxs = _; master_cx; _ } = merge_strict_context ~options component in - + let reader = Abstract_state_reader.Mutator_state_reader reader in + let { cx; other_cxs = _; master_cx; coverage_map; _ } = + merge_context ~options ~reader component + in let module_refs = List.rev_map Files.module_ref (Nel.to_list component) in let md5 = Merge_js.ContextOptimizer.sig_context cx module_refs in - Context.clear_master_shared cx master_cx; let errors = Context.errors cx in let suppressions = Context.error_suppressions cx in let severity_cover = Context.severity_cover cx in - + let include_suppressions = Context.include_suppressions cx in + let aloc_tables = Context.aloc_tables cx in + let (errors, warnings, suppressions) = + Error_suppressions.filter_lints + ~include_suppressions + suppressions + errors + aloc_tables + severity_cover + in Context.remove_all_errors cx; Context.remove_all_error_suppressions cx; Context.remove_all_lint_severities cx; @@ -197,19 +290,62 @@ let merge_strict_component ~worker_mutator ~options merged_acc component = Context.clear_intermediates cx; Context_heaps.Merge_context_mutator.add_merge_on_diff - ~audit:Expensive.ok worker_mutator cx component md5; - - (file, Ok (errors, suppressions, severity_cover)) :: merged_acc - ) - else - let errors = Errors.ErrorSet.empty in + ~audit:Expensive.ok + worker_mutator + cx + component + md5; + let merge_time = Unix.gettimeofday () -. start_merge_time in + Ok (errors, warnings, suppressions, coverage_map, merge_time) + ) else + let errors = Flow_error.ErrorSet.empty in let suppressions = Error_suppressions.empty in - let severity_cover = - Utils_js.FilenameMap.singleton - file - (ExactCover.file_cover file (Options.lint_severities options)) + let warnings = Flow_error.ErrorSet.empty in + let coverage = FilenameMap.empty in + Ok (errors, warnings, suppressions, coverage, 0.0) + +let check_file options ~reader file = + let start_check_time = Unix.gettimeofday () in + let info = Module_heaps.Mutator_reader.get_info_unsafe ~reader ~audit:Expensive.ok file in + if info.Module_heaps.checked then + let reader = Abstract_state_reader.Mutator_state_reader reader in + let { cx; coverage_map; _ } = + merge_context_generic + ~options + ~reader + ~phase:Context.Checking + ~get_ast_unsafe:(fun ~reader file -> + let ((_, _, comments) as ast) = + Parsing_heaps.Reader_dispatcher.get_ast_unsafe ~reader file + in + let aloc_ast = Ast_loc_utils.loc_to_aloc_mapper#program ast in + (comments, aloc_ast)) + ~get_file_sig_unsafe:(fun ~reader file -> + Parsing_heaps.Reader_dispatcher.get_file_sig_unsafe ~reader file + |> File_sig.abstractify_locs) + (Nel.one file) in - (file, Ok (errors, suppressions, severity_cover)) :: merged_acc + let errors = Context.errors cx in + let suppressions = Context.error_suppressions cx in + let severity_cover = Context.severity_cover cx in + let include_suppressions = Context.include_suppressions cx in + let aloc_tables = Context.aloc_tables cx in + let (errors, warnings, suppressions) = + Error_suppressions.filter_lints + ~include_suppressions + suppressions + errors + aloc_tables + severity_cover + in + let check_time = Unix.gettimeofday () -. start_check_time in + (errors, warnings, suppressions, coverage_map, check_time) + else + let errors = Flow_error.ErrorSet.empty in + let suppressions = Error_suppressions.empty in + let warnings = Flow_error.ErrorSet.empty in + let coverage = FilenameMap.empty in + (errors, warnings, suppressions, coverage, 0.0) (* Wrap a potentially slow operation with a timer that fires every interval seconds. When it fires, * it calls ~on_timer. When the operation finishes, the timer is cancelled *) @@ -217,122 +353,208 @@ let with_async_logging_timer ~interval ~on_timer ~f = let start_time = Unix.gettimeofday () in let timer = ref None in let cancel_timer () = Option.iter ~f:Timer.cancel_timer !timer in - let rec run_timer ?(first_run=false) () = - if not first_run - then begin + let rec run_timer ?(first_run = false) () = + ( if not first_run then let run_time = Unix.gettimeofday () -. start_time in - on_timer run_time - end; + on_timer run_time ); timer := Some (Timer.set_timer ~interval ~callback:run_timer) in (* Timer is unimplemented in Windows. *) if not Sys.win32 then run_timer ~first_run:true (); - let ret = begin try f () - with e -> - cancel_timer (); - raise e - end in + let ret = + try f () + with e -> + cancel_timer (); + raise e + in cancel_timer (); ret -let merge_strict_job ~worker_mutator ~job ~options merged elements = - List.fold_left (fun merged -> function - | Merge_stream.Component component -> - (* A component may have several files: there's always at least one, and +let merge_job ~worker_mutator ~reader ~job ~options merged elements = + List.fold_left + (fun merged -> function + | Merge_stream.Component component -> + (* A component may have several files: there's always at least one, and multiple files indicate a cycle. *) - let files = component - |> Nel.to_list - |> List.map File_key.to_string - |> String.concat "\n\t" - in - - let merge_timeout = Options.merge_timeout options in - let interval = Option.value_map ~f:(min 15.0) ~default:15.0 merge_timeout in - - try with_async_logging_timer - ~interval - ~on_timer:(fun run_time -> - Hh_logger.info "[%d] Slow MERGE (%f seconds so far): %s" (Unix.getpid()) run_time files; - Option.iter merge_timeout ~f:(fun merge_timeout -> - if run_time >= merge_timeout then raise (Flow_error.EMergeTimeout run_time) - ) - ) - ~f:(fun () -> - let start_time = Unix.gettimeofday () in - (* prerr_endlinef "[%d] MERGE: %s" (Unix.getpid()) files; *) - let ret = job ~worker_mutator ~options merged component in - let merge_time = Unix.gettimeofday () -. start_time in - if Options.should_profile options then begin - let length = Nel.length component in - let leader = Nel.hd component |> File_key.to_string in - Flow_server_profile.merge ~length ~merge_time ~leader; - if merge_time > 1.0 - then Hh_logger.info "[%d] perf: merged %s in %f" (Unix.getpid()) files merge_time - end; - ret - ) - with - | SharedMem_js.Out_of_shared_memory - | SharedMem_js.Heap_full - | SharedMem_js.Hash_table_full - | SharedMem_js.Dep_table_full as exc -> raise exc - (* A catch all suppression is probably a bad idea... *) - | exc -> - (* Ensure heaps are in a good state before continuing. *) - Context_heaps.Merge_context_mutator.add_merge_on_exn - ~audit:Expensive.ok worker_mutator ~options component; - (* In dev mode, fail hard, but log and continue in prod. *) - if Build_mode.dev then raise exc else - prerr_endlinef "(%d) merge_strict_job THROWS: [%d] %s\n" - (Unix.getpid()) (Nel.length component) (fmt_file_exc files exc); - (* An errored component is always changed. *) - let file = Nel.hd component in - let file_loc = Loc.({ none with source = Some file }) in - (* We can't pattern match on the exception type once it's marshalled + let files = + component |> Nel.to_list |> Core_list.map ~f:File_key.to_string |> String.concat "\n\t" + in + let merge_timeout = Options.merge_timeout options in + let interval = Option.value_map ~f:(min 15.0) ~default:15.0 merge_timeout in + (try + with_async_logging_timer + ~interval + ~on_timer:(fun run_time -> + Hh_logger.info + "[%d] Slow MERGE (%f seconds so far): %s" + (Unix.getpid ()) + run_time + files; + Option.iter merge_timeout ~f:(fun merge_timeout -> + if run_time >= merge_timeout then raise (Error_message.EMergeTimeout run_time))) + ~f:(fun () -> + let start_time = Unix.gettimeofday () in + (* prerr_endlinef "[%d] MERGE: %s" (Unix.getpid()) files; *) + let ret = job ~worker_mutator ~options ~reader component in + let merge_time = Unix.gettimeofday () -. start_time in + if Options.should_profile options then ( + let length = Nel.length component in + let leader = Nel.hd component |> File_key.to_string in + Flow_server_profile.merge ~length ~merge_time ~leader; + if merge_time > 1.0 then + Hh_logger.info "[%d] perf: merged %s in %f" (Unix.getpid ()) files merge_time + ); + (Nel.hd component, ret) :: merged) + with + | ( SharedMem_js.Out_of_shared_memory | SharedMem_js.Heap_full + | SharedMem_js.Hash_table_full | SharedMem_js.Dep_table_full ) as exc -> + raise exc + (* A catch all suppression is probably a bad idea... *) + | unwrapped_exc -> + let exc = Exception.wrap unwrapped_exc in + let exn_str = Printf.sprintf "%s: %s" files (Exception.to_string exc) in + (* Ensure heaps are in a good state before continuing. *) + Context_heaps.Merge_context_mutator.add_merge_on_exn + ~audit:Expensive.ok + worker_mutator + ~options + component; + + (* In dev mode, fail hard, but log and continue in prod. *) + if Build_mode.dev then + Exception.reraise exc + else + prerr_endlinef + "(%d) merge_job THROWS: [%d] %s\n" + (Unix.getpid ()) + (Nel.length component) + exn_str; + + (* An errored component is always changed. *) + let file = Nel.hd component in + let file_loc = Loc.{ none with source = Some file } |> ALoc.of_loc in + (* We can't pattern match on the exception type once it's marshalled back to the master process, so we pattern match on it here to create an error result. *) - let result = Error Flow_error.(match exc with - | EDebugThrow loc -> EInternal (loc, DebugThrow) - | EMergeTimeout s -> EInternal (file_loc, MergeTimeout s) - | _ -> EInternal (file_loc, MergeJobException exc) - ) in - (file, result) :: merged - ) merged elements - -(* make a map from component leaders to components *) + let result = + Error + Error_message.( + match unwrapped_exc with + | EDebugThrow loc -> (loc, DebugThrow) + | EMergeTimeout s -> (file_loc, MergeTimeout s) + | _ -> (file_loc, MergeJobException exc)) + in + (file, result) :: merged)) + merged + elements + let merge_runner - ~job ~master_mutator ~worker_mutator ~intermediate_result_callback ~options ~workers - dependency_graph component_map recheck_map = - (* make a map from files to their component leaders *) - let leader_map = - FilenameMap.fold (fun file component acc -> - Nel.fold_left (fun acc file_ -> - FilenameMap.add file_ file acc - ) acc component - ) component_map FilenameMap.empty + ~job + ~master_mutator + ~worker_mutator + ~reader + ~intermediate_result_callback + ~options + ~workers + ~dependency_graph + ~component_map + ~recheck_set = + let num_workers = Options.max_workers options in + (* (1) make a map from files to their component leaders + (2) lift recheck set from files to their component leaders *) + let (leader_map, recheck_leader_set) = + FilenameMap.fold + (fun leader component (leader_map, recheck_leader_set) -> + let (leader_map, recheck_leader) = + Nel.fold_left + (fun (leader_map, recheck_leader) file -> + ( FilenameMap.add file leader leader_map, + recheck_leader || FilenameSet.mem file recheck_set )) + (leader_map, false) + component + in + let recheck_leader_set = + if recheck_leader then + FilenameSet.add leader recheck_leader_set + else + recheck_leader_set + in + (leader_map, recheck_leader_set)) + component_map + (FilenameMap.empty, FilenameSet.empty) in - (* lift recheck map from files to leaders *) - let recheck_leader_map = FilenameMap.map ( - Nel.exists (fun f -> FilenameMap.find_unsafe f recheck_map) - ) component_map in - let start_time = Unix.gettimeofday () in - let {Merge_stream.next; merge; stats} = Merge_stream.make - ~dependency_graph ~leader_map ~component_map ~recheck_leader_map ~intermediate_result_callback + let stream = + Merge_stream.create + ~num_workers + ~arch:(Options.arch options) + ~dependency_graph + ~leader_map + ~component_map + ~recheck_leader_set + ~intermediate_result_callback in + Merge_stream.update_server_status stream; + (* returns parallel lists of filenames, error sets, and suppression sets *) - let%lwt ret = MultiWorkerLwt.call - workers - ~job: (merge_strict_job ~worker_mutator ~options ~job) - ~neutral: [] - ~merge:(merge ~master_mutator) - ~next + let%lwt ret = + MultiWorkerLwt.call + workers + ~job:(merge_job ~worker_mutator ~reader ~options ~job) + ~neutral:[] + ~merge:(Merge_stream.merge ~master_mutator ~reader stream) + ~next:(Merge_stream.next stream) in - let total_number_of_files = Merge_stream.get_total_files stats in - let files_skipped = Merge_stream.get_skipped_files stats in - Hh_logger.info "Merge skipped %d of %d modules" files_skipped total_number_of_files; + let total_files = Merge_stream.total_files stream in + let skipped_count = Merge_stream.skipped_count stream in + let sig_new_or_changed = Merge_stream.sig_new_or_changed master_mutator in + Hh_logger.info "Merge skipped %d of %d modules" skipped_count total_files; let elapsed = Unix.gettimeofday () -. start_time in - if Options.should_profile options then Hh_logger.info "merged (strict) in %f" elapsed; - Lwt.return (ret, files_skipped) - -let merge_strict = merge_runner ~job:merge_strict_component + if Options.should_profile options then Hh_logger.info "merged in %f" elapsed; + Lwt.return (ret, { skipped_count; sig_new_or_changed }) + +let merge = merge_runner ~job:merge_component + +let check options ~reader file = + let result = + let check_timeout = Options.merge_timeout options in + (* TODO: add new option *) + let interval = Option.value_map ~f:(min 5.0) ~default:5.0 check_timeout in + let file_str = File_key.to_string file in + try + with_async_logging_timer + ~interval + ~on_timer:(fun run_time -> + Hh_logger.info + "[%d] Slow CHECK (%f seconds so far): %s" + (Unix.getpid ()) + run_time + file_str; + Option.iter check_timeout ~f:(fun check_timeout -> + if run_time >= check_timeout then raise (Error_message.ECheckTimeout run_time))) + ~f:(fun () -> Ok (check_file options ~reader file)) + with + | ( SharedMem_js.Out_of_shared_memory | SharedMem_js.Heap_full | SharedMem_js.Hash_table_full + | SharedMem_js.Dep_table_full ) as exc -> + raise exc + (* A catch all suppression is probably a bad idea... *) + | unwrapped_exc -> + let exc = Exception.wrap unwrapped_exc in + let exn_str = Printf.sprintf "%s: %s" (File_key.to_string file) (Exception.to_string exc) in + (* In dev mode, fail hard, but log and continue in prod. *) + if Build_mode.dev then + Exception.reraise exc + else + prerr_endlinef "(%d) check_job THROWS: %s\n" (Unix.getpid ()) exn_str; + let file_loc = Loc.{ none with source = Some file } |> ALoc.of_loc in + (* We can't pattern match on the exception type once it's marshalled + back to the master process, so we pattern match on it here to create + an error result. *) + Error + Error_message.( + match unwrapped_exc with + | EDebugThrow loc -> (loc, DebugThrow) + | ECheckTimeout s -> (file_loc, CheckTimeout s) + | _ -> (file_loc, CheckJobException exc)) + in + (file, result) diff --git a/src/services/inference/merge_service.mli b/src/services/inference/merge_service.mli index ed7aa8a7f1e..78555276fcc 100644 --- a/src/services/inference/merge_service.mli +++ b/src/services/inference/merge_service.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,59 +7,80 @@ open Utils_js -type 'a merge_job_results = (File_key.t * ('a, Flow_error.error_message) result) list +type 'a unit_result = ('a, ALoc.t * Error_message.internal_error) result + +type 'a file_keyed_result = File_key.t * 'a unit_result + +type acc = + Flow_error.ErrorSet.t + * Flow_error.ErrorSet.t + * Error_suppressions.t + * Coverage_response.file_coverage FilenameMap.t + * float + +(* Time to check *) + +type 'a merge_job_results = 'a file_keyed_result list + type 'a merge_job = - worker_mutator: Context_heaps.Merge_context_mutator.worker_mutator -> + worker_mutator:Context_heaps.Merge_context_mutator.worker_mutator -> options:Options.t -> - 'a merge_job_results -> + reader:Mutator_state_reader.t -> File_key.t Nel.t -> - 'a merge_job_results + 'a unit_result -type 'a merge_results = 'a merge_job_results * int (* skipped count *) +type sig_opts_data = { + skipped_count: int; + sig_new_or_changed: FilenameSet.t; +} -type merge_strict_context_result = { +type 'a merge_results = 'a merge_job_results * sig_opts_data + +type merge_context_result = { cx: Context.t; other_cxs: Context.t list; master_cx: Context.sig_t; - file_sigs: File_sig.t FilenameMap.t; - typed_asts: (Loc.t, Loc.t * Type.t) Flow_ast.program FilenameMap.t; + file_sigs: File_sig.With_ALoc.t FilenameMap.t; + typed_asts: (ALoc.t, ALoc.t * Type.t) Flow_ast.program FilenameMap.t; + coverage_map: Coverage_response.file_coverage FilenameMap.t; } -val merge_strict_context: - options: Options.t -> - File_key.t Nel.t -> - merge_strict_context_result +val merge_context : + options:Options.t -> reader:Abstract_state_reader.t -> File_key.t Nel.t -> merge_context_result -val merge_contents_context: +val merge_contents_context : + reader:State_reader.t -> Options.t -> File_key.t -> (Loc.t, Loc.t) Flow_ast.program -> Docblock.t -> - File_sig.t -> - Context.t * (Loc.t, Loc.t * Type.t) Flow_ast.program + File_sig.With_Loc.t -> + Context.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.program -val merge_runner: - job: 'a merge_job -> - master_mutator: Context_heaps.Merge_context_mutator.master_mutator -> - worker_mutator: Context_heaps.Merge_context_mutator.worker_mutator -> - intermediate_result_callback: ('a merge_job_results Lazy.t -> unit) -> - options: Options.t -> - workers: MultiWorkerLwt.worker list option -> - FilenameSet.t FilenameMap.t -> - (File_key.t Nel.t) FilenameMap.t -> - bool FilenameMap.t -> +val merge_runner : + job:'a merge_job -> + master_mutator:Context_heaps.Merge_context_mutator.master_mutator -> + worker_mutator:Context_heaps.Merge_context_mutator.worker_mutator -> + reader:Mutator_state_reader.t -> + intermediate_result_callback:('a merge_job_results Lazy.t -> unit) -> + options:Options.t -> + workers:MultiWorkerLwt.worker list option -> + dependency_graph:FilenameSet.t FilenameMap.t -> + component_map:File_key.t Nel.t FilenameMap.t -> + recheck_set:FilenameSet.t -> 'a merge_results Lwt.t -val merge_strict: - master_mutator: Context_heaps.Merge_context_mutator.master_mutator -> - worker_mutator: Context_heaps.Merge_context_mutator.worker_mutator -> - intermediate_result_callback: - ((Errors.ErrorSet.t * - Error_suppressions.t * - ExactCover.lint_severity_cover Utils_js.FilenameMap.t) merge_job_results Lazy.t -> unit) -> - options: Options.t -> - workers: MultiWorkerLwt.worker list option -> - FilenameSet.t FilenameMap.t -> - (File_key.t Nel.t) FilenameMap.t -> - bool FilenameMap.t -> - (Errors.ErrorSet.t * Error_suppressions.t * ExactCover.lint_severity_cover Utils_js.FilenameMap.t) merge_results Lwt.t +val merge : + master_mutator:Context_heaps.Merge_context_mutator.master_mutator -> + worker_mutator:Context_heaps.Merge_context_mutator.worker_mutator -> + reader:Mutator_state_reader.t -> + intermediate_result_callback:(acc merge_job_results Lazy.t -> unit) -> + options:Options.t -> + workers:MultiWorkerLwt.worker list option -> + dependency_graph:FilenameSet.t FilenameMap.t -> + component_map:File_key.t Nel.t FilenameMap.t -> + recheck_set:FilenameSet.t -> + acc merge_results Lwt.t + +val check : + Options.t -> reader:Module_heaps.Mutator_reader.reader -> File_key.t -> acc file_keyed_result diff --git a/src/services/inference/merge_stream.ml b/src/services/inference/merge_stream.ml index 65813eb6371..98d3a889f4c 100644 --- a/src/services/inference/merge_stream.ml +++ b/src/services/inference/merge_stream.ml @@ -1,261 +1,263 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -open Utils_js +(* Custom bucketing scheme for dynamically growing and shrinking workloads when + merging files. + + We start out with files that have no dependencies: these files are available + for scheduling merge jobs. All other files are "blocked", i.e., they are *not + ready* for scheduling. + + NOTE: Scheduling merge jobs too early will cause crashes, since they will + need stuff that has not been computed yet! A more sophisticated scheme may be + designed to be tolerant to such failures, but the merge process is + complicated enough as is. Also, performance-wise blocking does not seem to be + an issue because files get unblocked pretty regularly (see below). -module Stream: sig - type 'a t - val empty: 'a t - val push: 'a -> 'a t -> 'a t - val pop_unsafe: 'a t -> ('a * 'a t) - val length: 'a t -> int -end = struct - type 'a t = int * 'a list - let empty = (0, []) - let push x (n, xs) = (n+1, x::xs) - let pop_unsafe xs = - match xs with - | (_, []) -> assert_false "pop_unsafe" - | (n, x::xs) -> x, (n-1, xs) - let length (n, _) = n -end + Each blocked file maintains a counter on the number of files blocking + them. As files are done, they decrement the counters of other files blocked + on them. As soon as some of the counters go to zero, the corresponding files + are made available for scheduling. -module MergeStats : sig - type t - val make: unit -> t - val get_total_files: t -> int - val increment_total_files: t -> int -> unit - val get_skipped_files: t -> int - val increment_skipped_files: t -> int -> unit -end = struct - type t = int ref (* total files *) * int ref (* skipped files *) - let make () = (ref 0, ref 0) - let get_total_files (total, _) = !total - let increment_total_files (total, _) x = - total := !total + x - let get_skipped_files (_, skipped) = !skipped - let increment_skipped_files (_, skipped) x = - skipped := !skipped + x -end + Finally, we maintain a counter on the total number of blocked files. When + that goes to zero, we prepare to exit! -type merge_stats = MergeStats.t -let get_total_files = MergeStats.get_total_files -let get_skipped_files = MergeStats.get_skipped_files + The underlying worker management scheme needs to know when to wait for more + work vs. when it can safely exit. We signal the former by returning a `Wait` + bucket, and the latter by returning a `Done` bucket. +*) + +open Utils_js type element = Component of File_key.t Nel.t type 'a merge_result = (File_key.t * 'a) list -type 'a merge_stream = { - next: unit -> element list Bucket.bucket; - merge: - master_mutator: Context_heaps.Merge_context_mutator.master_mutator -> - (* merged *) - 'a merge_result -> - (* accumulator *) - 'a merge_result -> - (* accumulated results *) - 'a merge_result; - stats: merge_stats +type node = { + component: File_key.t Nel.t; + mutable dependents: node FilenameMap.t; + (* the number of leaders this node is currently blocking on *) + mutable blocking: int; + mutable recheck: bool; + size: int; } -let make - ~dependency_graph - ~leader_map - ~component_map - ~recheck_leader_map - ~intermediate_result_callback = - - (* Custom bucketing scheme for dynamically growing and shrinking workloads when - merging files. - - We start out with files that have no dependencies: these files are available - for scheduling merge jobs. All other files are "blocked", i.e., they are *not - ready* for scheduling. +type 'a t = { + graph: node FilenameMap.t; + ready: node Queue.t; + num_workers: int; + total_components: int; + total_files: int; + arch: Options.arch; + mutable ready_components: int; + mutable ready_files: int; + mutable blocked_components: int; + mutable blocked_files: int; + mutable merged_components: int; + mutable merged_files: int; + mutable skipped_components: int; + mutable skipped_files: int; + intermediate_result_callback: 'a merge_result Lazy.t -> unit; +} - NOTE: Scheduling merge jobs too early will cause crashes, since they will - need stuff that has not been computed yet! A more sophisticated scheme may be - designed to be tolerant to such failures, but the merge process is - complicated enough as is. Also, performance-wise blocking does not seem to be - an issue because files get unblocked pretty regularly (see below). +let add_ready node stream = + assert (node.blocking = 0); + Queue.add node stream.ready; + stream.ready_components <- stream.ready_components + 1; + stream.ready_files <- stream.ready_files + node.size; + () - Each blocked file maintains a counter on the number of files blocking - them. As files are done, they decrement the counters of other files blocked - on them. As soon as some of the counters go to zero, the corresponding files - are made available for scheduling. +let pop_ready stream = + let node = Queue.pop stream.ready in + stream.ready_components <- stream.ready_components - 1; + stream.ready_files <- stream.ready_files - node.size; + node - Finally, we maintain a counter on the total number of blocked files. When - that goes to zero, we prepare to exit! +(* hard-coded, as in Bucket *) +let max_bucket_size = 500 - The underlying worker management scheme needs to know when to wait for more - work vs. when it can safely exit. We signal the former by returning a `Wait` - bucket, and the latter by returning a `Job []` bucket. - *) - let max_bucket_size = 500 in (* hard-coded, as in Bucket *) +let bucket_size stream = + (* NB: num_workers can be zero *) + let max_bucket_size = + if stream.ready_files < stream.num_workers * max_bucket_size then + 1 + (stream.ready_files / stream.num_workers) + else + max_bucket_size + in + min max_bucket_size stream.ready_files - (* For each leader, maps the number of leaders it is currently blocking on. *) - let blocking = Hashtbl.create 0 in - (* Counts the number of blocked leaders. *) - let blocked = ref 0 in +let is_done stream = stream.blocked_components = 0 - let total_number_of_files = ref (FilenameMap.fold (fun _ files acc -> - Nel.length files + acc - ) component_map 0) in - let stats = MergeStats.make () in - let record_merged x = - MergeStats.increment_total_files stats x +let create + ~num_workers + ~arch + ~dependency_graph + ~leader_map + ~component_map + ~recheck_leader_set + ~intermediate_result_callback = + (* create node for each component *) + let graph = + FilenameMap.mapi + (fun leader component -> + { + component; + (* computed later *) + dependents = FilenameMap.empty; + (* computed later *) + blocking = 0; + recheck = FilenameSet.mem leader recheck_leader_set; + size = Nel.length component; + }) + component_map in - let record_skipped x = - record_merged x; - MergeStats.increment_skipped_files stats x + let (total_components, total_files) = + FilenameMap.fold (fun _ node (c, f) -> (c + 1, f + node.size)) graph (0, 0) in - - (* stream of files available to schedule *) - let stream = ref Stream.empty in - - (* For each leader, maps other leaders that are dependent on it. *) - let dependents = + (* calculate dependents, blocking for each node *) + let () = let leader f = FilenameMap.find_unsafe f leader_map in - let dependency_dag = FilenameMap.fold (fun f fs dependency_dag -> - let leader_f = leader f in - let dep_leader_fs = match FilenameMap.get leader_f dependency_dag with - | Some dep_leader_fs -> dep_leader_fs - | _ -> FilenameSet.empty - in - let dep_leader_fs = FilenameSet.fold (fun f dep_leader_fs -> - let f = leader f in - if f = leader_f then dep_leader_fs - else FilenameSet.add f dep_leader_fs - ) fs dep_leader_fs in - FilenameMap.add leader_f dep_leader_fs dependency_dag - ) dependency_graph FilenameMap.empty in + FilenameMap.iter + (fun f dep_fs -> + let leader_f = leader f in + let node = FilenameMap.find_unsafe leader_f graph in + FilenameSet.iter + (fun dep_f -> + let dep_leader_f = leader dep_f in + if dep_leader_f = leader_f then + () + else + let dep_node = FilenameMap.find_unsafe dep_leader_f graph in + let dependents = FilenameMap.add leader_f node dep_node.dependents in + if dependents != dep_node.dependents then ( + dep_node.dependents <- dependents; + node.blocking <- node.blocking + 1 + )) + dep_fs) + dependency_graph + in + let stream = + { + graph; + ready = Queue.create (); + num_workers; + total_components; + total_files; + arch; + ready_components = 0; + ready_files = 0; + blocked_components = 0; + blocked_files = 0; + merged_components = 0; + merged_files = 0; + skipped_components = 0; + skipped_files = 0; + intermediate_result_callback; + } + in + (* calculate the components ready to schedule and blocked counts *) + FilenameMap.iter + (fun _ node -> + if node.blocking = 0 then + add_ready node stream + else ( + stream.blocked_components <- stream.blocked_components + 1; + stream.blocked_files <- stream.blocked_files + node.size + )) + graph; - FilenameMap.iter (fun leader_f dep_leader_fs -> - let n = FilenameSet.cardinal dep_leader_fs in - (* n files block leader_f *) - Hashtbl.add blocking leader_f n; - if n = 0 - then (* leader_f isn't blocked, add to stream *) - stream := Stream.push leader_f !stream - else (* one more blocked *) - incr blocked - ) dependency_dag; + stream - (* TODO: remember reverse dependencies to quickly calculate remerge sets *) - ref (Sort_js.reverse dependency_dag) +let update_server_status stream = + let status = + match stream.arch with + | Options.Classic -> + ServerStatus.( + Merging_progress { finished = stream.merged_files; total = Some stream.total_files }) + | Options.TypesFirst -> + ServerStatus.( + Merging_types_progress { finished = stream.merged_files; total = Some stream.total_files }) in + MonitorRPC.status_update status - (* For each leader, maps the files in its component *) - let components = ref component_map in - - (* For each leader, specifies whether to recheck its component *) - let to_recheck: bool FilenameMap.t ref = ref recheck_leader_map in +let next stream = + let rec take acc n = + if n <= 0 then + acc + else + let node = pop_ready stream in + take (Component node.component :: acc) (n - node.size) + in + fun () -> + let n = bucket_size stream in + match take [] n with + | [] -> + if is_done stream then + Bucket.Done + else + Bucket.Wait + | components -> Bucket.Job components - (* Take n files from stream. We take an entire component at once, which might - cause us to take more than n files. *) - let take = - let rec loop acc len n = - if n <= 0 then (acc, len) - else begin - let (f, stream') = Stream.pop_unsafe !stream in - stream := stream'; - let fs = FilenameMap.find_unsafe f !components in - let fs_len = Nel.length fs in - loop ((Component fs)::acc) (fs_len+len) (n-fs_len) - end - in - loop [] 0 +let merge ~master_mutator ~reader stream = + (* If a component is unchanged, either because we merged it and the sig hash + * was unchanged or because the component was skipped entirely, we need to + * revive the shared heap entires corresponding to the component. These heap + * entries were oldified before merge began. *) + let revive node = + node.component + |> Nel.to_list + |> FilenameSet.of_list + |> Context_heaps.Merge_context_mutator.revive_files master_mutator in + (* Record that a component was merged (or skipped) and recursively unblock its + * dependents. If a dependent has no more unmerged dependencies, make it + * available for scheduling. *) + let rec push diff node = + stream.merged_components <- stream.merged_components + 1; + stream.merged_files <- stream.merged_files + node.size; + if not diff then revive node; + FilenameMap.iter (fun _ node -> unblock diff node) node.dependents + and unblock diff node = + (* dependent blocked on one less *) + node.blocking <- node.blocking - 1; + + (* dependent should be rechecked if diff *) + node.recheck <- diff || node.recheck; - (* leader_map is a map from files to leaders *) - (* component_map is a map from leaders to components *) - (* dependency_graph is a map from files to dependencies *) - let next = - let procs = Sys_utils.nbr_procs in - fun () -> - let jobs = Stream.length !stream in - if jobs = 0 && !blocked <> 0 then Bucket.Wait + (* no more waiting, yay! *) + if node.blocking = 0 then ( + stream.blocked_components <- stream.blocked_components - 1; + stream.blocked_files <- stream.blocked_files - node.size; + if node.recheck then + add_ready node stream else - let bucket_size = - if jobs < procs * max_bucket_size - then 1 + (jobs / procs) - else max_bucket_size - in - let n = min bucket_size jobs in - let components, num_files = take n in - if components <> [] then begin - MonitorRPC.status_update ServerStatus.(Merging_progress { - finished = MergeStats.get_total_files stats; - total = Some !total_number_of_files; - }); - record_merged num_files; - Bucket.Job components - end else - Bucket.Done + skip node + ) + and skip node = + stream.skipped_components <- stream.skipped_components + 1; + stream.skipped_files <- stream.skipped_files + node.size; + push false node in + fun merged acc -> + stream.intermediate_result_callback (lazy merged); + List.iter + (fun (leader_f, _) -> + let node = FilenameMap.find_unsafe leader_f stream.graph in + let diff = Context_heaps.Mutator_reader.sig_hash_changed ~reader leader_f in + push diff node) + merged; + update_server_status stream; + List.rev_append merged acc - (* We know when files are done by having jobs return the files they processed, - and trapping the function that joins results. ;), yeah. *) - let merge = - (* Once a component is merged, unblock dependent components to make them - * available to workers. Accumulate list of skipped components. *) - let rec push skipped leader_f diff = - FilenameSet.fold (fun dep_leader_f skipped -> - let n = (Hashtbl.find blocking dep_leader_f) - 1 in - (* dep_leader blocked on one less *) - Hashtbl.replace blocking dep_leader_f n; - (* dep_leader should be rechecked if diff *) - let recheck = diff || FilenameMap.find_unsafe dep_leader_f !to_recheck in - to_recheck := FilenameMap.add dep_leader_f recheck !to_recheck; - (* no more waiting, yay! *) - if n = 0 then ( - (* one less blocked; add dep_leader_f to stream if we need to recheck, - otherwise recursively unblock dependents *) - decr blocked; - if recheck - then ( - stream := Stream.push dep_leader_f !stream; - skipped - ) else push (dep_leader_f::skipped) dep_leader_f false - ) else skipped - ) (FilenameMap.find_unsafe leader_f !dependents) skipped - in - fun ~master_mutator merged merged_acc -> - let () = intermediate_result_callback (lazy merged) in - let skipped = List.fold_left (fun skipped (leader_f, _) -> - let diff = Context_heaps.sig_hash_changed leader_f in - let () = - let fs = - FilenameMap.find_unsafe leader_f !components - |> Nel.to_list - |> FilenameSet.of_list - in - if not diff - then Context_heaps.Merge_context_mutator.revive_files master_mutator fs - in - push skipped leader_f diff - ) [] merged in - let skipped_length = List.fold_left (fun acc leader_f -> - let fs = - FilenameMap.find_unsafe leader_f !components - |> Nel.to_list - |> FilenameSet.of_list - in - Context_heaps.Merge_context_mutator.revive_files master_mutator fs; - FilenameSet.cardinal fs + acc - ) 0 skipped in - if skipped_length > 0 then begin - record_skipped skipped_length; - MonitorRPC.status_update ServerStatus.(Merging_progress { - finished = MergeStats.get_total_files stats; - total = Some !total_number_of_files; - }) - end; - List.rev_append merged merged_acc - in +(* NOTE: call these functions only at the end of merge, not during. *) +let total_files stream = stream.total_files + +let skipped_count stream = stream.skipped_files - { next; merge; stats } +(* See explanation in Context_heaps for why calling this function at the end of merge returns files + whose signatures are new or have changed. *) +let sig_new_or_changed = Context_heaps.Merge_context_mutator.unrevived_files diff --git a/src/services/inference/merge_stream.mli b/src/services/inference/merge_stream.mli index 9065dd25474..e274d423d3b 100644 --- a/src/services/inference/merge_stream.mli +++ b/src/services/inference/merge_stream.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,29 +9,34 @@ open Utils_js type element = Component of File_key.t Nel.t -type merge_stats -val get_total_files: merge_stats -> int -val get_skipped_files: merge_stats -> int - type 'a merge_result = (File_key.t * 'a) list -type 'a merge_stream = { - next: unit -> element list Bucket.bucket; - merge: - master_mutator: Context_heaps.Merge_context_mutator.master_mutator -> - (* merged *) - 'a merge_result -> - (* accumulator *) - 'a merge_result -> - (* accumulated results *) - 'a merge_result; - stats: merge_stats; -} - -val make : - dependency_graph: FilenameSet.t FilenameMap.t -> - leader_map: File_key.t FilenameMap.t -> - component_map: File_key.t Nel.t FilenameMap.t -> - recheck_leader_map: bool FilenameMap.t -> - intermediate_result_callback: ('a merge_result Lazy.t -> unit) -> - 'a merge_stream +type 'a t + +val create : + num_workers:int -> + arch:Options.arch -> + dependency_graph:FilenameSet.t FilenameMap.t -> + leader_map:File_key.t FilenameMap.t -> + component_map:File_key.t Nel.t FilenameMap.t -> + recheck_leader_set:FilenameSet.t -> + intermediate_result_callback:('a merge_result Lazy.t -> unit) -> + 'a t + +val update_server_status : 'a t -> unit + +val next : 'a t -> unit -> element list Bucket.bucket + +val merge : + master_mutator:Context_heaps.Merge_context_mutator.master_mutator -> + reader:Mutator_state_reader.t -> + 'a t -> + 'a merge_result -> + 'a merge_result -> + 'a merge_result + +val total_files : 'a t -> int + +val skipped_count : 'a t -> int + +val sig_new_or_changed : Context_heaps.Merge_context_mutator.master_mutator -> FilenameSet.t diff --git a/src/services/inference/module/dune b/src/services/inference/module/dune new file mode 100644 index 00000000000..8e7318f5bbd --- /dev/null +++ b/src/services/inference/module/dune @@ -0,0 +1,15 @@ +(library + (name flow_service_inference_module) + (wrapped false) + (libraries + flow_common + flow_common_audit + flow_common_modulename + flow_state_heaps_module + flow_state_heaps_parsing + flow_state_locals_module + flow_state_readers + flow_typing + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/services/inference/module/module_js.ml b/src/services/inference/module/module_js.ml index 67f6c0eeb52..538461d59f4 100644 --- a/src/services/inference/module/module_js.ml +++ b/src/services/inference/module/module_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -16,48 +16,51 @@ open Hh_json open Utils_js - -type mode = ModuleMode_Checked | ModuleMode_Weak | ModuleMode_Unchecked +type mode = + | ModuleMode_Checked + | ModuleMode_Weak + | ModuleMode_Unchecked type error = | ModuleDuplicateProviderError of { - module_name: string; - provider: File_key.t; - conflict: File_key.t; - } + module_name: string; + provider: File_key.t; + conflict: File_key.t; + } let choose_provider_and_warn_about_duplicates = let warn_duplicate_providers m current modules errmap = - List.fold_left (fun acc f -> - let w = ModuleDuplicateProviderError { - module_name = m; - provider = current; - conflict = f; - } in - FilenameMap.add f (match FilenameMap.get f acc with - | Some errset -> w::errset - | None -> [w] - ) acc - ) errmap modules in - + List.fold_left + (fun acc f -> + let w = + ModuleDuplicateProviderError { module_name = m; provider = current; conflict = f } + in + FilenameMap.add + f + (match FilenameMap.get f acc with + | Some errset -> w :: errset + | None -> [w]) + acc) + errmap + modules + in fun m errmap providers fallback -> - let definitions, implementations = - List.partition Files.has_flow_ext providers in - match implementations, definitions with + let (definitions, implementations) = List.partition Files.has_flow_ext providers in + match (implementations, definitions) with (* If there are no definitions or implementations, use the fallback *) - | [], [] -> fallback (), errmap + | ([], []) -> (fallback (), errmap) (* Else if there are no definitions, use the first implementation *) - | impl::dup_impls, [] -> - impl, warn_duplicate_providers m impl dup_impls errmap + | (impl :: dup_impls, []) -> (impl, warn_duplicate_providers m impl dup_impls errmap) (* Else use the first definition *) - | [], defn::dup_defns -> - defn, warn_duplicate_providers m defn dup_defns errmap + | ([], defn :: dup_defns) -> (defn, warn_duplicate_providers m defn dup_defns errmap) (* Don't complain about the first implementation being a duplicate *) - | impl::dup_impls, defn::dup_defns -> - let errmap = errmap - |> warn_duplicate_providers m impl dup_impls - |> warn_duplicate_providers m defn dup_defns in - defn, errmap + | (impl :: dup_impls, defn :: dup_defns) -> + let errmap = + errmap + |> warn_duplicate_providers m impl dup_impls + |> warn_duplicate_providers m defn dup_defns + in + (defn, errmap) (** * A set of module.name_mapper config entry allows users to specify regexp @@ -71,73 +74,77 @@ let choose_provider_and_warn_about_duplicates = *) let module_name_candidates ~options = Module_hashtables.memoize_with_module_name_candidates_cache ~f:(fun name -> - let mappers = Options.module_name_mappers options in - let root = Options.root options - |> Path.to_string - |> Sys_utils.normalize_filename_dir_sep in - let map_name mapped_names (regexp, template) = - let new_name = name - (* First we apply the mapper *) - |> Str.global_replace regexp template - (* Then we replace the PROJECT_ROOT placeholder. This works like - * Str.global_replace except it ignores things that look like - * backreferences, like \1 *) - |> Str.split_delim Files.project_root_token - |> String.concat root in - if new_name = name then mapped_names else new_name::mapped_names - in - List.rev (name::(List.fold_left map_name [] mappers)) - ) - -let add_package filename ast = - match Package_json.parse ast with - | Ok package -> - Module_heaps.Package_heap_mutator.add_package_json filename package - | Error parse_err -> - assert_false (spf "%s: %s" filename parse_err) - -let package_incompatible filename ast = - match Package_json.parse ast with - | Ok new_package -> - begin match Module_heaps.get_package filename with - | None -> true - | Some old_package -> old_package <> new_package - end - | Error parse_err -> - assert_false (spf "%s: %s" filename parse_err) + let mappers = Options.module_name_mappers options in + let root = Options.root options |> Path.to_string |> Sys_utils.normalize_filename_dir_sep in + let map_name mapped_names (regexp, template) = + let new_name = + name + (* First we apply the mapper *) + |> Str.global_replace regexp template + (* Then we replace the PROJECT_ROOT placeholder. This works like + * Str.global_replace except it ignores things that look like + * backreferences, like \1 *) + |> Str.split_delim Files.project_root_token + |> String.concat root + in + if new_name = name then + mapped_names + else + new_name :: mapped_names + in + List.rev (name :: List.fold_left map_name [] mappers)) + +let add_package filename = function + | Ok package -> Module_heaps.Package_heap_mutator.add_package_json filename package + | Error _ -> Module_heaps.Package_heap_mutator.add_error filename + +let package_incompatible ~reader filename ast = + let new_package = Package_json.parse ast in + let old_package = Module_heaps.Reader.get_package ~reader filename in + match (old_package, new_package) with + | (None, Ok _) -> true (* didn't exist before, found a new one *) + | (None, Error _) -> false (* didn't exist before, new one is invalid *) + | (Some (Error ()), Error _) -> false (* was invalid before, still invalid *) + | (Some (Error ()), Ok _) -> true (* was invalid before, new one is valid *) + | (Some (Ok _), Error _) -> true (* existed before, new one is invalid *) + | (Some (Ok old_package), Ok new_package) -> old_package <> new_package type resolution_acc = { mutable paths: SSet.t; - mutable errors: Flow_error.error_message list; + mutable errors: Error_message.t list; } (* Specification of a module system. Currently this signature is sufficient to model both Haste and Node, but should be further generalized. *) module type MODULE_SYSTEM = sig (* Given a file and docblock info, make the name of the module it exports. *) - val exported_module: Options.t -> File_key.t -> Docblock.t -> Modulename.t + val exported_module : Options.t -> File_key.t -> Docblock.t -> Modulename.t (* Given a file and a reference in it to an imported module, make the name of the module it refers to. If given an optional reference to an accumulator, record paths that were looked up but not found during resolution. *) - val imported_module: - options: Options.t -> + val imported_module : + options:Options.t -> + reader:Abstract_state_reader.t -> SSet.t -> - File_key.t -> Loc.t Nel.t -> + File_key.t -> + ALoc.t Nel.t -> ?resolution_acc:resolution_acc -> - string -> Modulename.t + string -> + Modulename.t (* for a given module name, choose a provider from among a set of files with that exported name. also check for duplicates and generate warnings, as dictated by module system rules. *) - val choose_provider: - string -> (* module name *) - FilenameSet.t -> (* set of candidate provider files *) + val choose_provider : + string -> + (* module name *) + FilenameSet.t -> + (* set of candidate provider files *) (* map from files to error sets (accumulator) *) error list FilenameMap.t -> (* file, error map (accumulator) *) - (File_key.t * error list FilenameMap.t) - + File_key.t * error list FilenameMap.t end (****************** Node module system *********************) @@ -152,72 +159,63 @@ end (* only purpose here is to guarantee a case-sensitive file exists and try to keep it from being too horrendously expensive *) -let case_sensitive = - not (Sys.file_exists (String.uppercase_ascii (Sys.getcwd ()))) +let case_sensitive = not (Sys.file_exists (String.uppercase_ascii (Sys.getcwd ()))) (* map of dirs to file lists *) + (** TODO [perf]: investigate whether this takes too much memory **) let files_in_dir = ref SMap.empty (* called from Types_js.typecheck, so we rebuild every time *) -let clear_filename_cache () = - files_in_dir := SMap.empty +let clear_filename_cache () = files_in_dir := SMap.empty (* case-sensitive dir_exists *) let rec dir_exists dir = - try Sys.is_directory dir && (case_sensitive || file_exists dir) - with _ -> false + (try Sys.is_directory dir && (case_sensitive || file_exists dir) with _ -> false) (* when system is case-insensitive, do our own file exists check *) and file_exists path = (* case doesn't matter for "/", ".", "..." and these serve as a base-case for * case-insensitive filesystems *) let dir = Filename.dirname path in - if ( + if case_sensitive || path = Filename.current_dir_name || path = Filename.parent_dir_name || path = dir - ) then Sys.file_exists path - else ( - let files = match SMap.get dir !files_in_dir with - | Some files -> files - | None -> + then + Sys.file_exists path + else + let files = + match SMap.get dir !files_in_dir with + | Some files -> files + | None -> let files = - if dir_exists dir - then SSet.of_list (Array.to_list (Sys.readdir dir)) - else SSet.empty in + if dir_exists dir then + SSet.of_list (Array.to_list (Sys.readdir dir)) + else + SSet.empty + in files_in_dir := SMap.add dir files !files_in_dir; files - in SSet.mem (Filename.basename path) files - ) - -let resolve_symlinks path = - Path.to_string (Path.make path) + in + SSet.mem (Filename.basename path) files -(** - * Given a list of lazy "option" expressions, evaluate each in the list - * sequentially until one produces a `Some` (and do not evaluate any remaining). - *) -let lazy_seq: 'a option Lazy.t list -> 'a option = - List.fold_left (fun acc lazy_expr -> - match acc with - | None -> Lazy.force lazy_expr - | Some _ -> acc - ) None +let resolve_symlinks path = Path.to_string (Path.make path) (* Every .js can be imported by its path, so it effectively exports a module by the name .js. Every .js.flow shadows the corresponding .js, so it effectively exports a module by the name .js. *) let eponymous_module file = - Modulename.Filename (match Files.chop_flow_ext file with - | Some file -> file - | None -> file - ) + Modulename.Filename + (match Files.chop_flow_ext file with + | Some file -> file + | None -> file) (*******************************) exception Module_resolver_fatal of string + exception Invalid_resolution module External = struct @@ -227,94 +225,86 @@ module External = struct let get_external_channels resolver = (* Create the channels if they don't exists *) - if !external_status && !external_channels = None - then begin + if !external_status && !external_channels = None then ( let program = Path.to_string resolver in - if not (Sys.file_exists program) then external_status := false - else begin + else let (child_r, parent_w) = Unix.pipe () in let (parent_r, child_w) = Unix.pipe () in - (* Don't leak these fds *) - List.iter (Unix.set_close_on_exec) [parent_w; parent_r]; - - let channels = ( - (Unix.out_channel_of_descr parent_w), - (Unix.in_channel_of_descr parent_r) - ) in + List.iter Unix.set_close_on_exec [parent_w; parent_r]; + let channels = (Unix.out_channel_of_descr parent_w, Unix.in_channel_of_descr parent_r) in try - ignore (Unix.create_process program [| program |] child_r child_w Unix.stderr); + ignore (Unix.create_process program [|program|] child_r child_w Unix.stderr); List.iter Unix.close [child_r; child_w]; external_channels := Some channels - with - | Unix.Unix_error (_, _, _) -> + with Unix.Unix_error (_, _, _) -> Hh_logger.info "Failed to create module resolver"; List.iter Unix.close [child_r; child_w; parent_r; parent_w] - end - end; + ); !external_channels let resolve_import opts f r = match Options.module_resolver opts with + | None -> None + | Some resolver -> + let issuer = File_key.to_string f in + let payload = json_to_string (JSON_Array [JSON_String r; JSON_String issuer]) in + (match get_external_channels resolver with | None -> None - | Some resolver -> - let issuer = File_key.to_string f in - let payload = json_to_string (JSON_Array [ JSON_String r; JSON_String issuer; ]) in - - match get_external_channels resolver with - | None -> None - | Some (out_channel, in_channel) -> - let response_data = - try - output_string out_channel (payload ^ "\n"); - Pervasives.flush out_channel; - - let response_text = input_line in_channel in - json_of_string response_text - with exn -> - let () = Hh_logger.fatal ~exn "Failed to talk to the module resolver" in - let exn_str = Printf.sprintf "Exception %s" (Printexc.to_string exn) in - raise (Module_resolver_fatal exn_str) - in - - let resolution = match response_data with - | JSON_Null -> None - | JSON_Array items -> + | Some (out_channel, in_channel) -> + let response_data = + try + output_string out_channel (payload ^ "\n"); + Pervasives.flush out_channel; + + let response_text = input_line in_channel in + json_of_string response_text + with exn -> + let exn = Exception.wrap exn in + let () = Hh_logger.fatal ~exn "Failed to talk to the module resolver" in + let exn_str = Printf.sprintf "Exception %s" (Exception.get_ctor_string exn) in + raise (Module_resolver_fatal exn_str) + in + let resolution = + match response_data with + | JSON_Null -> None + | JSON_Array items -> + begin + match items with + | [error; resolution] -> begin - match items with - | [ error; resolution ] -> - begin - match error with - | JSON_Null -> - begin - match resolution with - | JSON_Null -> None - | JSON_String r -> Some (resolve_symlinks r) - | _ -> raise (Invalid_resolution) - end - | _ -> None - end - | _ -> raise (Invalid_resolution) + match error with + | JSON_Null -> + begin + match resolution with + | JSON_Null -> None + | JSON_String r -> Some (resolve_symlinks r) + | _ -> raise Invalid_resolution + end + | _ -> None end - | _ -> raise (Invalid_resolution) in - - match resolution with - | None -> None - | Some r -> - let file_options = Options.file_options opts in - if not (Files.is_ignored file_options r) then Some r else None - + | _ -> raise Invalid_resolution + end + | _ -> raise Invalid_resolution + in + (match resolution with + | None -> None + | Some r -> + let file_options = Options.file_options opts in + if not (Files.is_ignored file_options r) then + Some r + else + None)) end (*******************************) module Node = struct - let exported_module _ file _ = - eponymous_module file + let exported_module _ file _ = eponymous_module file let record_path path = function | None -> () @@ -322,53 +312,58 @@ module Node = struct let path_if_exists = let path_exists ~file_options path = - (file_exists path) && - not (Files.is_ignored file_options path) && - not (dir_exists path) - in fun ~file_options resolution_acc path -> + file_exists path && (not (Files.is_ignored file_options path)) && not (dir_exists path) + in + fun ~file_options resolution_acc path -> let path = resolve_symlinks path in let declaration_path = path ^ Files.flow_ext in - if path_exists ~file_options declaration_path || - path_exists ~file_options path - then Some path - else (record_path path resolution_acc; None) + if path_exists ~file_options declaration_path || path_exists ~file_options path then + Some path + else ( + record_path path resolution_acc; + None + ) let path_if_exists_with_file_exts ~file_options resolution_acc path file_exts = - lazy_seq (file_exts |> List.map (fun ext -> - lazy (path_if_exists ~file_options resolution_acc (path ^ ext)) - )) + lazy_seq + ( file_exts + |> Core_list.map ~f:(fun ext -> + lazy (path_if_exists ~file_options resolution_acc (path ^ ext))) ) - let parse_main ~root ~file_options loc resolution_acc package_filename file_exts = + let parse_main + ~reader ~root ~file_options (loc : ALoc.t) resolution_acc package_filename file_exts = let package_filename = resolve_symlinks package_filename in - if not (file_exists package_filename) || (Files.is_ignored file_options package_filename) - then None + if (not (file_exists package_filename)) || Files.is_ignored file_options package_filename then + None else - let package = match Module_heaps.get_package package_filename with - | Some package -> package - | None -> - let msg = - let is_included = Files.is_included file_options package_filename in - let project_root_str = Path.to_string root in - let is_contained_in_root = - Files.is_prefix project_root_str package_filename - in - let package_relative_to_root = - spf "<>%s%s" - (Filename.dir_sep) - (Files.relative_path project_root_str package_filename) + let package = + match Module_heaps.Reader_dispatcher.get_package ~reader package_filename with + | Some (Ok package) -> package + | Some (Error ()) -> + (* invalid, but we already raised an error when building PackageHeap *) + Package_json.empty + | None -> + let msg = + let is_included = Files.is_included file_options package_filename in + let project_root_str = Path.to_string root in + let is_contained_in_root = Files.is_prefix project_root_str package_filename in + let package_relative_to_root = + spf + "<>%s%s" + Filename.dir_sep + (Files.relative_path project_root_str package_filename) + in + if is_included || is_contained_in_root then + Error_message.(EInternal (loc, PackageHeapNotFound package_relative_to_root)) + else + Error_message.EModuleOutsideRoot (loc, package_relative_to_root) in - if is_included || is_contained_in_root then ( - Flow_error.(EInternal (loc, PackageHeapNotFound package_relative_to_root)) - ) else ( - Flow_error.EModuleOutsideRoot (loc, package_relative_to_root) - ) - in - begin match resolution_acc with - | Some resolution_acc -> - resolution_acc.errors <- msg :: resolution_acc.errors - | None -> () - end; - Package_json.empty + begin + match resolution_acc with + | Some resolution_acc -> resolution_acc.errors <- msg :: resolution_acc.errors + | None -> () + end; + Package_json.empty in let dir = Filename.dirname package_filename in match Package_json.main package with @@ -376,76 +371,108 @@ module Node = struct | Some file -> let path = Files.normalize_path dir file in let path_w_index = Filename.concat path "index" in - - lazy_seq [ - lazy (path_if_exists ~file_options resolution_acc path); - lazy (path_if_exists_with_file_exts ~file_options resolution_acc path file_exts); - lazy (path_if_exists_with_file_exts ~file_options resolution_acc path_w_index file_exts); - ] - - let resolve_relative ~options (loc, _) ?resolution_acc root_path rel_path = + lazy_seq + [ + lazy (path_if_exists ~file_options resolution_acc path); + lazy (path_if_exists_with_file_exts ~file_options resolution_acc path file_exts); + lazy + (path_if_exists_with_file_exts ~file_options resolution_acc path_w_index file_exts); + ] + + let resolve_relative ~options ~reader ((loc : ALoc.t), _) ?resolution_acc root_path rel_path = let file_options = Options.file_options options in let path = Files.normalize_path root_path rel_path in - if Files.is_flow_file ~options:file_options path - then path_if_exists ~file_options resolution_acc path - else ( + if Files.is_flow_file ~options:file_options path then + path_if_exists ~file_options resolution_acc path + else let path_w_index = Filename.concat path "index" in (* We do not try resource file extensions here. So while you can write * require('foo') to require foo.js, it should never resolve to foo.css *) let file_exts = SSet.elements (Files.module_file_exts file_options) in let root = Options.root options in - lazy_seq ([ - lazy (path_if_exists_with_file_exts ~file_options resolution_acc path file_exts); - lazy (parse_main ~root ~file_options loc resolution_acc (Filename.concat path "package.json") file_exts); - lazy (path_if_exists_with_file_exts ~file_options resolution_acc path_w_index file_exts); - ]) - ) - - let rec node_module ~options node_modules_containers file loc resolution_acc dir r = - let file_options = Options.file_options options in - lazy_seq [ - lazy ( - if SSet.mem dir node_modules_containers then - lazy_seq (Files.node_resolver_dirnames file_options |> List.map (fun dirname -> - lazy (resolve_relative - ~options - loc ?resolution_acc dir (spf "%s%s%s" dirname Filename.dir_sep r) - ) - )) - else None - ); - - lazy ( - let parent_dir = Filename.dirname dir in - if dir = parent_dir then None - else node_module ~options node_modules_containers file loc resolution_acc (Filename.dirname dir) r - ); - ] + lazy_seq + [ + lazy (path_if_exists_with_file_exts ~file_options resolution_acc path file_exts); + lazy + (parse_main + ~reader + ~root + ~file_options + loc + resolution_acc + (Filename.concat path "package.json") + file_exts); + lazy (path_if_exists_with_file_exts ~file_options resolution_acc path_w_index file_exts); + ] - let absolute r = - Str.string_match Files.absolute_path_regexp r 0 + let rec node_module ~options ~reader node_modules_containers file loc resolution_acc dir r = + let file_options = Options.file_options options in + lazy_seq + [ + lazy + ( if SSet.mem dir node_modules_containers then + lazy_seq + ( Files.node_resolver_dirnames file_options + |> Core_list.map ~f:(fun dirname -> + lazy + (resolve_relative + ~options + ~reader + loc + ?resolution_acc + dir + (spf "%s%s%s" dirname Filename.dir_sep r))) ) + else + None ); + lazy + (let parent_dir = Filename.dirname dir in + if dir = parent_dir then + None + else + node_module + ~options + ~reader + node_modules_containers + file + loc + resolution_acc + (Filename.dirname dir) + r); + ] + + let absolute r = Str.string_match Files.absolute_path_regexp r 0 let explicitly_relative r = - Str.string_match Files.current_dir_name r 0 - || Str.string_match Files.parent_dir_name r 0 + Str.string_match Files.current_dir_name r 0 || Str.string_match Files.parent_dir_name r 0 - let resolve_import ~options node_modules_containers f loc ?resolution_acc import_str = + let resolve_import ~options ~reader node_modules_containers f loc ?resolution_acc import_str = let file = File_key.to_string f in let dir = Filename.dirname file in - if explicitly_relative import_str || absolute import_str - then resolve_relative ~options loc ?resolution_acc dir import_str - else node_module ~options node_modules_containers f loc resolution_acc dir import_str + if explicitly_relative import_str || absolute import_str then + resolve_relative ~options ~reader loc ?resolution_acc dir import_str + else + node_module ~options ~reader node_modules_containers f loc resolution_acc dir import_str - let imported_module ~options node_modules_containers file loc ?resolution_acc import_str = + let imported_module ~options ~reader node_modules_containers file loc ?resolution_acc import_str + = let candidates = module_name_candidates ~options import_str in - let rec choose_candidate = function | [] -> None | candidate :: candidates -> - match resolve_import ~options node_modules_containers file loc ?resolution_acc candidate with + let resolved = + resolve_import + ~options + ~reader + node_modules_containers + file + loc + ?resolution_acc + candidate + in + (match resolved with | None -> choose_candidate candidates - | Some _ as result -> result + | Some _ as result -> result) in match choose_candidate candidates with | Some str -> @@ -458,22 +485,20 @@ module Node = struct singleton provider set is craziness. *) let choose_provider m files errmap = let files = FilenameSet.elements files in - let fallback () = - failwith (spf "internal error: empty provider set for module %S" m) in + let fallback () = failwith (spf "internal error: empty provider set for module %S" m) in choose_provider_and_warn_about_duplicates m errmap files fallback - end (****************** Haste module system *********************) -module Haste: MODULE_SYSTEM = struct +module Haste : MODULE_SYSTEM = struct let short_module_name_of = function | File_key.Builtins -> assert false | File_key.LibFile file | File_key.SourceFile file | File_key.JsonFile file | File_key.ResourceFile file -> - Filename.basename file |> Filename.chop_extension + Filename.basename file |> Filename.chop_extension let is_mock = let mock_path = Str.regexp ".*/__mocks__/.*" in @@ -483,92 +508,80 @@ module Haste: MODULE_SYSTEM = struct | File_key.SourceFile file | File_key.JsonFile file | File_key.ResourceFile file -> - Str.string_match mock_path file 0 + (* Standardize \ to / in path for Windows *) + let file = Sys_utils.normalize_filename_dir_sep file in + Str.string_match mock_path file 0 let expand_project_root_token options str = - let root = Path.to_string (Options.root options) - |> Sys_utils.normalize_filename_dir_sep in - str - |> Str.split_delim Files.project_root_token - |> String.concat root - |> Str.regexp - - let is_haste_file options file = - let matched_haste_paths_whitelist file = List.exists - (fun r -> Str.string_match (expand_project_root_token options r) (File_key.to_string file) 0) - (Options.haste_paths_whitelist options) in - let matched_haste_paths_blacklist file = List.exists - (fun r -> Str.string_match (expand_project_root_token options r) (File_key.to_string file) 0) - (Options.haste_paths_blacklist options) in - (matched_haste_paths_whitelist file) && not (matched_haste_paths_blacklist file) - - let haste_name options file = - let reduce_name name (regexp, template) = - Str.global_replace regexp template name + Files.expand_project_root_token_to_regexp ~root:(Options.root options) str + + let is_haste_file = + let matched_haste_paths_whitelist options name = + List.exists + (fun r -> Str.string_match (expand_project_root_token options r) name 0) + (Options.haste_paths_whitelist options) in - List.fold_left - reduce_name - (File_key.to_string file) - (Options.haste_name_reducers options) + let matched_haste_paths_blacklist options name = + List.exists + (fun r -> Str.string_match (expand_project_root_token options r) name 0) + (Options.haste_paths_blacklist options) + in + fun options name -> + matched_haste_paths_whitelist options name + && not (matched_haste_paths_blacklist options name) - let rec exported_module options file info = + let haste_name = + let reduce_name name (regexp, template) = Str.global_replace regexp template name in + (fun options name -> List.fold_left reduce_name name (Options.haste_name_reducers options)) + + let exported_module options file info = match file with | File_key.SourceFile _ -> - if is_mock file - then Modulename.String (short_module_name_of file) - else if Options.haste_use_name_reducers options - then - if is_haste_file options file - then Modulename.String (haste_name options file) - else exported_non_haste_module options file - else begin match Docblock.providesModule info with + if is_mock file then + Modulename.String (short_module_name_of file) + else if Options.haste_use_name_reducers options then + (* Standardize \ to / in path for Windows *) + let normalized_file_name = + Sys_utils.normalize_filename_dir_sep (File_key.to_string file) + in + if is_haste_file options normalized_file_name then + Modulename.String (haste_name options normalized_file_name) + else + Modulename.Filename file + else ( + match Docblock.providesModule info with | Some m -> Modulename.String m - | None -> - (* If foo.js.flow doesn't have a @providesModule, then look at foo.js - * and use its @providesModule instead *) - exported_non_haste_module options file - end + | None -> Modulename.Filename file + ) | _ -> (* Lib files, resource files, etc don't have any fancy haste name *) Modulename.Filename file - and exported_non_haste_module options file = - match Files.chop_flow_ext file with - | Some file_without_flow_ext -> - if Parsing_heaps.has_ast file_without_flow_ext - then - let info = Parsing_heaps.get_docblock_unsafe file_without_flow_ext in - exported_module options file_without_flow_ext info - else - Modulename.Filename (file_without_flow_ext) - | None -> - Modulename.Filename file - - let expanded_name r = + let expanded_name ~reader r = match Str.split_delim (Str.regexp_string "/") r with | [] -> None - | package_name::rest -> - Module_heaps.get_package_directory package_name |> Option.map ~f:(fun package -> - Files.construct_path package rest - ) + | package_name :: rest -> + Module_heaps.Reader_dispatcher.get_package_directory ~reader package_name + |> Option.map ~f:(fun package -> Files.construct_path package rest) (* similar to Node resolution, with possible special cases *) - let resolve_import ~options node_modules_containers f loc ?resolution_acc r = + let resolve_import ~options ~reader node_modules_containers f loc ?resolution_acc r = let file = File_key.to_string f in - lazy_seq [ - lazy (External.resolve_import options f r); - lazy (Node.resolve_import ~options node_modules_containers f loc ?resolution_acc r); - lazy (match expanded_name r with - | Some r -> - Node.resolve_relative ~options loc ?resolution_acc (Filename.dirname file) r - | None -> None - ); - ] - - let imported_module ~options node_modules_containers file loc ?resolution_acc imported_name = + lazy_seq + [ + lazy (External.resolve_import options f r); + lazy (Node.resolve_import ~options ~reader node_modules_containers f loc ?resolution_acc r); + lazy + (match expanded_name ~reader r with + | Some r -> + Node.resolve_relative ~options ~reader loc ?resolution_acc (Filename.dirname file) r + | None -> None); + ] + + let imported_module + ~options ~reader node_modules_containers file loc ?resolution_acc imported_name = let candidates = module_name_candidates ~options imported_name in - - (** + (* * In Haste, we don't have an autoritative list of all valid module names * until after all modules have been sweeped (because the module name is * specified in the contents of the file). So, unlike the node module @@ -579,11 +592,20 @@ module Haste: MODULE_SYSTEM = struct * matching candidate (rather than the first *valid* matching candidate). *) let chosen_candidate = List.hd candidates in - - match resolve_import ~options node_modules_containers file loc ?resolution_acc chosen_candidate with + let resolved = + resolve_import + ~options + ~reader + node_modules_containers + file + loc + ?resolution_acc + chosen_candidate + in + match resolved with | Some name -> - let options = Options.file_options options in - eponymous_module (Files.filename_from_string ~options name) + let options = Options.file_options options in + eponymous_module (Files.filename_from_string ~options name) | None -> Modulename.String chosen_candidate (* in haste, many files may provide the same module. here we're also @@ -594,15 +616,12 @@ module Haste: MODULE_SYSTEM = struct rest. *) let choose_provider m files errmap = match FilenameSet.elements files with - | [] -> - failwith (spf "internal error: empty provider set for module %S" m) - | [f] -> - f, errmap + | [] -> failwith (spf "internal error: empty provider set for module %S" m) + | [f] -> (f, errmap) | files -> - let mocks, non_mocks = List.partition is_mock files in - let fallback () = List.hd mocks in - choose_provider_and_warn_about_duplicates m errmap non_mocks fallback - + let (mocks, non_mocks) = List.partition is_mock files in + let fallback () = List.hd mocks in + choose_provider_and_warn_about_duplicates m errmap non_mocks fallback end (****************** module system switch *********************) @@ -618,36 +637,42 @@ let get_module_system opts = match !module_system with | Some system -> system | None -> - let module M = (val (match Options.module_system opts with - | Options.Node -> (module Node: MODULE_SYSTEM) - | Options.Haste -> (module Haste: MODULE_SYSTEM) - )) in + let module M = + ( val match Options.module_system opts with + | Options.Node -> (module Node : MODULE_SYSTEM) + | Options.Haste -> (module Haste : MODULE_SYSTEM) ) + in let system = (module M : MODULE_SYSTEM) in module_system := Some system; system let exported_module ~options file info = - let module M = (val (get_module_system options)) in + let module M = (val get_module_system options) in M.exported_module options file info -let imported_module ~options ~node_modules_containers file loc ?resolution_acc r = - let module M = (val (get_module_system options)) in - M.imported_module ~options node_modules_containers file loc ?resolution_acc r +let imported_module ~options ~reader ~node_modules_containers file loc ?resolution_acc r = + let module M = (val get_module_system options) in + M.imported_module ~options ~reader node_modules_containers file loc ?resolution_acc r -let imported_modules ~options node_modules_containers file require_loc = +let imported_modules ~options ~reader node_modules_containers file require_loc = (* Resolve all reqs relative to the given cx. Accumulate dependent paths in resolution_acc. Return the map of reqs to their resolved names, and the set containing the resolved names. *) let resolution_acc = { paths = SSet.empty; errors = [] } in - let resolved_modules = SMap.fold (fun mref loc acc -> - let m = imported_module file loc mref - ~options ~node_modules_containers ~resolution_acc in - SMap.add mref m acc - ) require_loc SMap.empty in - resolved_modules, resolution_acc + let resolved_modules = + SMap.fold + (fun mref loc acc -> + let m = + imported_module file loc mref ~options ~reader ~node_modules_containers ~resolution_acc + in + SMap.add mref m acc) + require_loc + SMap.empty + in + (resolved_modules, resolution_acc) let choose_provider ~options m files errmap = - let module M = (val (get_module_system options)) in + let module M = (val get_module_system options) in M.choose_provider m files errmap (******************) @@ -655,36 +680,47 @@ let choose_provider ~options m files errmap = (******************) (* Look up cached resolved module. *) -let find_resolved_module ~audit file r = +let find_resolved_module ~reader ~audit file r = let { Module_heaps.resolved_modules; _ } = - Module_heaps.get_resolved_requires_unsafe ~audit file + Module_heaps.Reader_dispatcher.get_resolved_requires_unsafe ~reader ~audit file in SMap.find_unsafe r resolved_modules -let checked_file ~audit f = - let info = f |> Module_heaps.get_info_unsafe ~audit in +let checked_file ~reader ~audit f = + let info = f |> Module_heaps.Reader_dispatcher.get_info_unsafe ~reader ~audit in info.Module_heaps.checked (* TODO [perf]: measure size and possibly optimize *) (* Extract and process information from context. In particular, resolve references to required modules in a file, and record the results. *) -let resolved_requires_of ~options node_modules_containers f require_loc = - let resolved_modules, { paths; errors } = - imported_modules ~options node_modules_containers f require_loc in - errors, { Module_heaps. - resolved_modules; - phantom_dependents = paths; - } - -let add_parsed_resolved_requires ~mutator ~options ~node_modules_containers file = - let file_sig = Parsing_heaps.get_file_sig_unsafe file in - let require_loc = File_sig.(require_loc_map file_sig.module_sig) in - let errors, resolved_requires = - resolved_requires_of ~options node_modules_containers file require_loc in - Module_heaps.Resolved_requires_mutator.add_resolved_requires mutator file resolved_requires; - List.fold_left (fun acc msg -> - Errors.ErrorSet.add (Flow_error.error_of_msg - ~trace_reasons:[] ~source_file:file msg) acc) Errors.ErrorSet.empty errors +let resolved_requires_of ~options ~reader node_modules_containers f require_loc = + let (resolved_modules, { paths; errors }) = + imported_modules ~options ~reader node_modules_containers f require_loc + in + (errors, Module_heaps.mk_resolved_requires ~resolved_modules ~phantom_dependents:paths) + +let add_parsed_resolved_requires ~mutator ~reader ~options ~node_modules_containers file = + let file_sig = + Parsing_heaps.Mutator_reader.get_file_sig_unsafe ~reader file |> File_sig.abstractify_locs + in + let require_loc = File_sig.With_ALoc.(require_loc_map file_sig.module_sig) in + let (errors, resolved_requires) = + let reader = Abstract_state_reader.Mutator_state_reader reader in + resolved_requires_of ~options ~reader node_modules_containers file require_loc + in + let resolved_requires_changed = + Module_heaps.Resolved_requires_mutator.add_resolved_requires mutator file resolved_requires + in + let errorset = + List.fold_left + (fun acc msg -> + Flow_error.ErrorSet.add + (Flow_error.error_of_msg ~trace_reasons:[] ~source_file:file msg) + acc) + Flow_error.ErrorSet.empty + errors + in + (resolved_requires_changed, errorset) (* Repick providers for modules that are exported by new and changed files, or were provided by changed and deleted files. @@ -745,92 +781,138 @@ let add_parsed_resolved_requires ~mutator ~options ~node_modules_containers file (b) remove the unregistered modules from NameHeap (c) register the new providers in NameHeap *) -let commit_modules ~transaction ~workers ~options ~is_init new_or_changed dirty_modules = +let commit_modules ~transaction ~workers ~options ~reader ~is_init new_or_changed dirty_modules = let debug = Options.is_debug_mode options in - let mutator = Module_heaps.Commit_modules_mutator.create transaction is_init in (* prep for registering new mappings in NameHeap *) - let to_remove, providers, to_replace, errmap, changed_modules = List.fold_left - (fun (rem, prov, rep, errmap, diff) (m, f_opt) -> - match Module_hashtables.find_in_all_providers_unsafe m with - | ps when FilenameSet.is_empty ps -> - if debug then prerr_endlinef - "no remaining providers: %S" - (Modulename.to_string m); - (Modulename.Set.add m rem), prov, rep, errmap, (Modulename.Set.add m diff) - | ps -> - (* incremental: install empty error sets here for provider candidates. + let (to_remove, providers, to_replace, errmap, changed_modules) = + List.fold_left + (fun (rem, prov, rep, errmap, diff) (m, f_opt) -> + match Module_hashtables.Mutator_reader.find_in_all_providers_unsafe ~reader m with + | ps when FilenameSet.is_empty ps -> + if debug then prerr_endlinef "no remaining providers: %S" (Modulename.to_string m); + (Modulename.Set.add m rem, prov, rep, errmap, Modulename.Set.add m diff) + | ps -> + (* incremental: install empty error sets here for provider candidates. this will have the effect of resetting downstream errors for these files, when the returned error map is used by our caller. IMPORTANT: since each file may (does) provide more than one module, files may already have acquired errors earlier in this fold, so we must only add an empty entry if no entry is already present *) - let errmap = FilenameSet.fold (fun f acc -> - match FilenameMap.get f acc with - | Some _ -> acc - | None -> FilenameMap.add f [] acc - ) ps errmap in - (* now choose provider for m *) - let p, errmap = choose_provider - ~options (Modulename.to_string m) ps errmap in - (* register chosen provider in NameHeap *) - match f_opt with - | Some f -> - if f = p then begin - (* When can this happen? Say m pointed to f before, a different file + let errmap = + FilenameSet.fold + (fun f acc -> + match FilenameMap.get f acc with + | Some _ -> acc + | None -> FilenameMap.add f [] acc) + ps + errmap + in + (* now choose provider for m *) + let (p, errmap) = choose_provider ~options (Modulename.to_string m) ps errmap in + (* register chosen provider in NameHeap *) + (match f_opt with + | Some f -> + if f = p then ( + (* When can this happen? Say m pointed to f before, a different file f' that provides m changed (so m is not in old_modules), but f continues to be the chosen provider = p (winning over f'). *) - if debug then prerr_endlinef - "unchanged provider: %S -> %s" - (Modulename.to_string m) - (File_key.to_string p); - let diff = if FilenameSet.mem p new_or_changed - then Modulename.Set.add m diff - else diff in - rem, prov, rep, errmap, diff - end else begin - (* When can this happen? Say m pointed to f before, a different file + if debug then + prerr_endlinef + "unchanged provider: %S -> %s" + (Modulename.to_string m) + (File_key.to_string p); + let diff = + if FilenameSet.mem p new_or_changed then + Modulename.Set.add m diff + else + diff + in + (rem, prov, rep, errmap, diff) + ) else ( + (* When can this happen? Say m pointed to f before, a different file f' that provides m changed (so m is not in old_modules), and now f' becomes the chosen provider = p (winning over f). *) - if debug then prerr_endlinef - "new provider: %S -> %s replaces %s" - (Modulename.to_string m) - (File_key.to_string p) - (File_key.to_string f); - let diff = Modulename.Set.add m diff in - rem, p::prov, (m, p)::rep, errmap, diff - end - | None -> - (* When can this happen? Either m pointed to a file that used to + if debug then + prerr_endlinef + "new provider: %S -> %s replaces %s" + (Modulename.to_string m) + (File_key.to_string p) + (File_key.to_string f); + let diff = Modulename.Set.add m diff in + (rem, p :: prov, (m, p) :: rep, errmap, diff) + ) + | None -> + (* When can this happen? Either m pointed to a file that used to provide m and changed or got deleted (causing m to be in old_modules), or m didn't have a provider before. *) - if debug then prerr_endlinef - "initial provider %S -> %s" - (Modulename.to_string m) - (File_key.to_string p); - let diff = Modulename.Set.add m diff in - rem, p::prov, (m,p)::rep, errmap, diff - ) (Modulename.Set.empty, [], [], FilenameMap.empty, Modulename.Set.empty) dirty_modules in - - let%lwt () = Module_heaps.Commit_modules_mutator.remove_and_replace - mutator ~workers ~to_remove ~to_replace + if debug then + prerr_endlinef + "initial provider %S -> %s" + (Modulename.to_string m) + (File_key.to_string p); + let diff = Modulename.Set.add m diff in + (rem, p :: prov, (m, p) :: rep, errmap, diff))) + (Modulename.Set.empty, [], [], FilenameMap.empty, Modulename.Set.empty) + dirty_modules + in + let%lwt () = + Module_heaps.Commit_modules_mutator.remove_and_replace mutator ~workers ~to_remove ~to_replace in - if debug then prerr_endlinef "*** done committing modules ***"; Lwt.return (providers, changed_modules, errmap) -let get_files ~audit filename module_name = - (module_name, Module_heaps.get_file ~audit module_name):: - let f_module = eponymous_module filename in - if f_module = module_name then [] - else [f_module, Module_heaps.get_file ~audit f_module] - -let get_files_unsafe ~audit filename module_name = - (module_name, Module_heaps.get_file_unsafe ~audit module_name):: - let f_module = eponymous_module filename in - if f_module = module_name then [] - else [f_module, Module_heaps.get_file_unsafe ~audit f_module] +let get_files ~reader ~audit filename module_name = + (module_name, Module_heaps.Reader_dispatcher.get_file ~reader ~audit module_name) + :: + (let f_module = eponymous_module filename in + if f_module = module_name then + [] + else + [(f_module, Module_heaps.Reader_dispatcher.get_file ~reader ~audit f_module)]) + +let get_files_unsafe ~reader ~audit filename module_name = + (module_name, Module_heaps.Mutator_reader.get_file_unsafe ~reader ~audit module_name) + :: + (let f_module = eponymous_module filename in + if f_module = module_name then + [] + else + [(f_module, Module_heaps.Mutator_reader.get_file_unsafe ~reader ~audit f_module)]) + +let calc_modules_helper ~reader workers files = + MultiWorkerLwt.call + workers + ~job: + (List.fold_left (fun acc file -> + match Module_heaps.Mutator_reader.get_info ~reader ~audit:Expensive.ok file with + | Some info -> + let { Module_heaps.module_name; _ } = info in + (file, get_files_unsafe ~reader ~audit:Expensive.ok file module_name) :: acc + | None -> acc)) + ~neutral:[] + ~merge:List.rev_append + ~next:(MultiWorkerLwt.next workers (FilenameSet.elements files)) + +(* Given a set of files which are unchanged, return the set of modules which those files provide *) +let calc_unchanged_modules ~reader workers unchanged = + let%lwt old_file_module_assoc = calc_modules_helper ~reader workers unchanged in + let unchanged_modules = + List.fold_left + (fun unchanged_modules (file, module_provider_assoc) -> + List.fold_left + (fun unchanged_modules (module_name, provider) -> + if provider = file then + Modulename.Set.add module_name unchanged_modules + else + unchanged_modules) + unchanged_modules + module_provider_assoc) + Modulename.Set.empty + old_file_module_assoc + in + Lwt.return unchanged_modules (* Calculate the set of modules whose current providers are changed or deleted files. @@ -852,80 +934,68 @@ let calc_old_modules = let calc_from_module_assocs ~all_providers_mutator ~options old_file_module_assoc = (* files may or may not be registered as module providers. when they are, we need to clear their registrations *) - let old_modules = List.fold_left (fun old_modules (file, module_provider_assoc) -> - List.fold_left (fun old_modules (module_name, provider) -> - Module_hashtables.All_providers_mutator.remove_provider - all_providers_mutator file module_name; - if provider = file - then (module_name, Some provider)::old_modules - else old_modules - ) old_modules module_provider_assoc - ) [] old_file_module_assoc in - + let old_modules = + List.fold_left + (fun old_modules (file, module_provider_assoc) -> + List.fold_left + (fun old_modules (module_name, provider) -> + Module_hashtables.All_providers_mutator.remove_provider + all_providers_mutator + file + module_name; + if provider = file then + (module_name, Some provider) :: old_modules + else + old_modules) + old_modules + module_provider_assoc) + [] + old_file_module_assoc + in let debug = Options.is_debug_mode options in - if debug then prerr_endlinef - "*** old modules (changed and deleted files) %d ***" - (List.length old_modules); + if debug then + prerr_endlinef "*** old modules (changed and deleted files) %d ***" (List.length old_modules); (* return *) old_modules in - - fun workers ~all_providers_mutator ~options new_or_changed_or_deleted -> - let%lwt old_file_module_assoc = MultiWorkerLwt.call workers - ~job: (List.fold_left (fun acc file -> - match Module_heaps.get_info ~audit:Expensive.ok file with - | Some info -> - let { Module_heaps.module_name; _ } = info in - (file, - get_files_unsafe ~audit:Expensive.ok file module_name) :: acc - | None -> acc - )) - ~neutral: [] - ~merge: List.rev_append - ~next: (MultiWorkerLwt.next workers (FilenameSet.elements new_or_changed_or_deleted)) + fun workers ~all_providers_mutator ~options ~reader new_or_changed_or_deleted -> + let%lwt old_file_module_assoc = + calc_modules_helper ~reader workers new_or_changed_or_deleted in - Lwt.return (calc_from_module_assocs ~all_providers_mutator ~options old_file_module_assoc) - module IntroduceFiles : sig - val introduce_files: + val introduce_files : mutator:Module_heaps.Introduce_files_mutator.t -> + reader:Mutator_state_reader.t -> all_providers_mutator:Module_hashtables.All_providers_mutator.t -> workers:MultiWorkerLwt.worker list option -> - options: Options.t -> + options:Options.t -> parsed:File_key.t list -> unparsed:(File_key.t * Docblock.t) list -> - (Modulename.t * File_key.t option) list Lwt.t + (Modulename.t * File_key.t option) list Lwt.t - val introduce_files_from_saved_state: + val introduce_files_from_saved_state : mutator:Module_heaps.Introduce_files_mutator.t -> all_providers_mutator:Module_hashtables.All_providers_mutator.t -> workers:MultiWorkerLwt.worker list option -> - options: Options.t -> + options:Options.t -> parsed:(File_key.t * Module_heaps.info) list -> unparsed:(File_key.t * Module_heaps.info) list -> - (Modulename.t * File_key.t option) list Lwt.t + (Modulename.t * File_key.t option) list Lwt.t end = struct (* Before and after inference, we add per-file module info to the shared heap from worker processes. Note that we wait to choose providers until inference is complete. *) - let add_parsed_info ~mutator ~options file = + let add_parsed_info ~mutator ~reader ~options file = let force_check = Options.all options in - let docblock = Parsing_heaps.get_docblock_unsafe file in + let docblock = Parsing_heaps.Mutator_reader.get_docblock_unsafe ~reader file in let module_name = exported_module ~options file docblock in - let checked = - force_check || - Docblock.is_flow docblock - in - let info = { Module_heaps. - module_name; - checked; - parsed = true; - } in + let checked = force_check || Docblock.is_flow docblock in + let info = { Module_heaps.module_name; checked; parsed = true } in Module_heaps.Introduce_files_mutator.add_info mutator file info; - file, module_name + (file, module_name) (* We need to track files that have failed to parse. This begins with adding tracking records for unparsed files to InfoHeap. They never @@ -938,79 +1008,94 @@ end = struct let force_check = Options.all options in let module_name = exported_module ~options file docblock in let checked = - force_check || - File_key.is_lib_file file || - Docblock.is_flow docblock || - Docblock.isDeclarationFile docblock + force_check + || File_key.is_lib_file file + || Docblock.is_flow docblock + || Docblock.isDeclarationFile docblock in - let info = { Module_heaps. - module_name; - checked; - parsed = false; - } in + let info = { Module_heaps.module_name; checked; parsed = false } in Module_heaps.Introduce_files_mutator.add_info mutator file info; - file, module_name + (file, module_name) let calc_new_modules ~all_providers_mutator ~options file_module_assoc = (* all modules provided by newly parsed / unparsed files must be repicked *) - let new_modules = List.fold_left (fun new_modules (file, module_opt_provider_assoc) -> - List.fold_left (fun new_modules (module_, opt_provider) -> - Module_hashtables.All_providers_mutator.add_provider all_providers_mutator file module_; - (module_, opt_provider)::new_modules - ) new_modules module_opt_provider_assoc - ) [] file_module_assoc in - + let new_modules = + List.fold_left + (fun new_modules (file, module_opt_provider_assoc) -> + List.fold_left + (fun new_modules (module_, opt_provider) -> + Module_hashtables.All_providers_mutator.add_provider + all_providers_mutator + file + module_; + (module_, opt_provider) :: new_modules) + new_modules + module_opt_provider_assoc) + [] + file_module_assoc + in let debug = Options.is_debug_mode options in - if debug then prerr_endlinef - "*** new modules (new and changed files) %d ***" - (List.length new_modules); + if debug then + prerr_endlinef "*** new modules (new and changed files) %d ***" (List.length new_modules); new_modules let introduce_files_generic - ~add_parsed_info ~add_unparsed_info - ~all_providers_mutator ~workers ~options ~parsed ~unparsed = - + ~add_parsed_info + ~add_unparsed_info + ~reader + ~all_providers_mutator + ~workers + ~options + ~parsed + ~unparsed = (* add tracking modules for unparsed files *) - let%lwt unparsed_file_module_assoc = MultiWorkerLwt.call workers - ~job: (List.fold_left (fun file_module_assoc unparsed_file -> - let filename, m = add_unparsed_info ~options unparsed_file in - (filename, - get_files ~audit:Expensive.ok filename m) :: file_module_assoc - )) - ~neutral: [] - ~merge: List.rev_append - ~next: (MultiWorkerLwt.next workers unparsed) + let%lwt unparsed_file_module_assoc = + MultiWorkerLwt.call + workers + ~job: + (List.fold_left (fun file_module_assoc unparsed_file -> + let (filename, m) = add_unparsed_info ~options unparsed_file in + (filename, get_files ~reader ~audit:Expensive.ok filename m) :: file_module_assoc)) + ~neutral:[] + ~merge:List.rev_append + ~next:(MultiWorkerLwt.next workers unparsed) in (* create info for parsed files *) - let%lwt parsed_file_module_assoc = MultiWorkerLwt.call workers - ~job: (List.fold_left (fun file_module_assoc parsed_file -> - let filename, m = add_parsed_info ~options parsed_file in - (filename, - get_files ~audit:Expensive.ok filename m) :: file_module_assoc - )) - ~neutral: [] - ~merge: List.rev_append - ~next: (MultiWorkerLwt.next workers parsed) + let%lwt parsed_file_module_assoc = + MultiWorkerLwt.call + workers + ~job: + (List.fold_left (fun file_module_assoc parsed_file -> + let (filename, m) = add_parsed_info ~options parsed_file in + (filename, get_files ~reader ~audit:Expensive.ok filename m) :: file_module_assoc)) + ~neutral:[] + ~merge:List.rev_append + ~next:(MultiWorkerLwt.next workers parsed) in let new_file_module_assoc = - List.rev_append parsed_file_module_assoc unparsed_file_module_assoc in - + List.rev_append parsed_file_module_assoc unparsed_file_module_assoc + in Lwt.return (calc_new_modules ~all_providers_mutator ~options new_file_module_assoc) - let introduce_files ~mutator = - let add_parsed_info = add_parsed_info ~mutator in + let introduce_files ~mutator ~reader = + let add_parsed_info = add_parsed_info ~mutator ~reader in + let reader = Abstract_state_reader.Mutator_state_reader reader in let add_unparsed_info = add_unparsed_info ~mutator in - introduce_files_generic ~add_parsed_info ~add_unparsed_info + introduce_files_generic ~add_parsed_info ~add_unparsed_info ~reader let introduce_files_from_saved_state ~mutator = let add_info_from_saved_state ~options:_ (filename, info) = Module_heaps.Introduce_files_mutator.add_info mutator filename info; - filename, info.Module_heaps.module_name + (filename, info.Module_heaps.module_name) in + let reader = Abstract_state_reader.State_reader (State_reader.create ()) in introduce_files_generic - ~add_parsed_info:add_info_from_saved_state ~add_unparsed_info:add_info_from_saved_state + ~add_parsed_info:add_info_from_saved_state + ~add_unparsed_info:add_info_from_saved_state + ~reader end let introduce_files = IntroduceFiles.introduce_files + let introduce_files_from_saved_state = IntroduceFiles.introduce_files_from_saved_state diff --git a/src/services/inference/module/module_js.mli b/src/services/inference/module/module_js.mli index 36347d93407..547655ac230 100644 --- a/src/services/inference/module/module_js.mli +++ b/src/services/inference/module/module_js.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,95 +7,120 @@ open Utils_js - - -type mode = ModuleMode_Checked | ModuleMode_Weak | ModuleMode_Unchecked +type mode = + | ModuleMode_Checked + | ModuleMode_Weak + | ModuleMode_Unchecked type error = | ModuleDuplicateProviderError of { - module_name: string; - provider: File_key.t; - conflict: File_key.t; - } + module_name: string; + provider: File_key.t; + conflict: File_key.t; + } - -val eponymous_module: File_key.t -> Modulename.t +val eponymous_module : File_key.t -> Modulename.t (* export and import functions for the module system *) -val exported_module: - options: Options.t -> - File_key.t -> Docblock.t -> Modulename.t +val exported_module : options:Options.t -> File_key.t -> Docblock.t -> Modulename.t type resolution_acc = { mutable paths: SSet.t; - mutable errors: Flow_error.error_message list; + mutable errors: Error_message.t list; } -val imported_module: - options: Options.t -> - node_modules_containers: SSet.t -> - File_key.t -> Loc.t Nel.t -> ?resolution_acc:resolution_acc -> string -> Modulename.t -val find_resolved_module: - (File_key.t -> string -> Modulename.t) Expensive.t +val imported_module : + options:Options.t -> + reader:Abstract_state_reader.t -> + node_modules_containers:SSet.t -> + File_key.t -> + ALoc.t Nel.t -> + ?resolution_acc:resolution_acc -> + string -> + Modulename.t + +val find_resolved_module : + reader:Abstract_state_reader.t -> (File_key.t -> string -> Modulename.t) Expensive.t -val checked_file: (File_key.t -> bool) Expensive.t +val checked_file : reader:Abstract_state_reader.t -> (File_key.t -> bool) Expensive.t (* add module records for given files; returns the set of modules added *) -val introduce_files: +val introduce_files : mutator:Module_heaps.Introduce_files_mutator.t -> + reader:Mutator_state_reader.t -> all_providers_mutator:Module_hashtables.All_providers_mutator.t -> workers:MultiWorkerLwt.worker list option -> - options: Options.t -> + options:Options.t -> parsed:File_key.t list -> unparsed:(File_key.t * Docblock.t) list -> - (Modulename.t * File_key.t option) list Lwt.t + (Modulename.t * File_key.t option) list Lwt.t (* remove module records being tracked for given files; returns the set of modules removed *) -val calc_old_modules: +val calc_old_modules : MultiWorkerLwt.worker list option -> all_providers_mutator:Module_hashtables.All_providers_mutator.t -> options:Options.t -> + reader:Mutator_state_reader.t -> FilenameSet.t -> - (Modulename.t * File_key.t option) list Lwt.t + (Modulename.t * File_key.t option) list Lwt.t + +(* Given a set of files which haven't changed, return the modules currently being provided by these + * modules. + *) +val calc_unchanged_modules : + reader:Mutator_state_reader.t -> + MultiWorkerLwt.worker list option -> + FilenameSet.t -> + Modulename.Set.t Lwt.t (* repick providers for old and new modules *) -val commit_modules: - transaction: Transaction.t -> - workers: MultiWorkerLwt.worker list option -> - options: Options.t -> - is_init: bool -> - FilenameSet.t -> (* parsed / unparsed files *) - (Modulename.t * File_key.t option) list -> (* dirty modules *) - (File_key.t list * (* providers *) - Modulename.Set.t * (* changed modules *) - error list FilenameMap.t) Lwt.t (* filenames to error sets *) +val commit_modules : + transaction:Transaction.t -> + workers:MultiWorkerLwt.worker list option -> + options:Options.t -> + reader:Mutator_state_reader.t -> + is_init:bool -> + FilenameSet.t -> + (* parsed / unparsed files *) + (Modulename.t * File_key.t option) list -> + (* dirty modules *) + ( File_key.t list + * (* providers *) + Modulename.Set.t + * (* changed modules *) + error list FilenameMap.t ) + Lwt.t + +(* filenames to error sets *) (* resolve and add requires from context to store *) -val add_parsed_resolved_requires: +val add_parsed_resolved_requires : mutator:Module_heaps.Resolved_requires_mutator.t -> + reader:Mutator_state_reader.t -> options:Options.t -> - node_modules_containers: SSet.t -> + node_modules_containers:SSet.t -> File_key.t -> - Errors.ErrorSet.t + bool * Flow_error.ErrorSet.t -val add_package: string -> (Loc.t, Loc.t) Flow_ast.program -> unit +val add_package : string -> Loc.t Package_json.t_or_error -> unit -val package_incompatible: string -> (Loc.t, Loc.t) Flow_ast.program -> bool +val package_incompatible : + reader:State_reader.t -> string -> (Loc.t, Loc.t) Flow_ast.program -> bool (***************************************************) -val clear_filename_cache: unit -> unit +val clear_filename_cache : unit -> unit (* APIs mainly intended for saving and loading saved state *) -val introduce_files_from_saved_state: +val introduce_files_from_saved_state : mutator:Module_heaps.Introduce_files_mutator.t -> all_providers_mutator:Module_hashtables.All_providers_mutator.t -> workers:MultiWorkerLwt.worker list option -> - options: Options.t -> + options:Options.t -> parsed:(File_key.t * Module_heaps.info) list -> unparsed:(File_key.t * Module_heaps.info) list -> - (Modulename.t * File_key.t option) list Lwt.t + (Modulename.t * File_key.t option) list Lwt.t diff --git a/src/services/inference/pure_dep_graph_operations.ml b/src/services/inference/pure_dep_graph_operations.ml new file mode 100644 index 00000000000..fd2e8c73102 --- /dev/null +++ b/src/services/inference/pure_dep_graph_operations.ml @@ -0,0 +1,75 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Utils_js + +type dependency_graph = FilenameSet.t FilenameMap.t + +(* `closure graph files` returns all files in `graph` which are reachable from `files`, directly or + * indirectly. *) +let closure = + let rec helper graph = + FilenameSet.fold (fun file acc -> + match FilenameMap.get file graph with + | Some files -> + let files = FilenameSet.diff files acc in + let acc = FilenameSet.union files acc in + helper graph files acc + | None -> acc) + in + (fun graph files -> helper graph files files) + +let reverse graph = + let acc = Hashtbl.create 0 in + FilenameMap.iter (fun f -> FilenameSet.iter (fun f' -> Hashtbl.add acc f' f)) graph; + FilenameMap.mapi (fun f _ -> FilenameSet.of_list @@ Hashtbl.find_all acc f) graph + +(* `calc_direct_dependencies graph files` will return the set of direct dependencies of + `files`. This set includes `files`. *) +let calc_direct_dependencies dependency_graph files = + FilenameSet.fold + (fun file acc -> + match FilenameMap.get file dependency_graph with + | Some files -> FilenameSet.union files acc + | None -> acc) + files + files + +(* `calc_all_dependencies graph files` will return the set of direct and transitive dependencies + * of `files`. This set does include `files`. + *) +let calc_all_dependencies dependency_graph files = closure dependency_graph files + +(* `calc_all_dependents graph files` will return the set of direct and transitive dependents of + `files`. This set include `files`. + + A file is a dependent of `files` whenever its code depends on any file whose *signature*, in + turn, directly or transitively depends on `files`. *) +let calc_all_dependents ~dependency_graph ~all_dependency_graph files = + let rev_dependency_graph = reverse dependency_graph in + let all_type_dependents = closure rev_dependency_graph files in + FilenameMap.fold + (fun f code_dependencies acc -> + if + (not (FilenameSet.mem f all_type_dependents)) + && FilenameSet.exists (fun f' -> FilenameSet.mem f' all_type_dependents) code_dependencies + then + FilenameSet.add f acc + else + acc) + all_dependency_graph + all_type_dependents + +(* Returns a copy of the dependency graph with only those file -> dependency edges where file and + dependency are in files *) +let filter_dependency_graph dependency_graph files = + FilenameSet.fold + (fun f -> + let fs = FilenameMap.find_unsafe f dependency_graph |> FilenameSet.inter files in + FilenameMap.add f fs) + files + FilenameMap.empty diff --git a/src/services/inference/pure_dep_graph_operations.mli b/src/services/inference/pure_dep_graph_operations.mli new file mode 100644 index 00000000000..fac60804e3d --- /dev/null +++ b/src/services/inference/pure_dep_graph_operations.mli @@ -0,0 +1,24 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* Contains pure functions which perform calculations on the dependency graph *) + +type dependency_graph = Utils_js.FilenameSet.t Utils_js.FilenameMap.t + +val calc_direct_dependencies : dependency_graph -> Utils_js.FilenameSet.t -> Utils_js.FilenameSet.t + +val calc_all_dependencies : dependency_graph -> Utils_js.FilenameSet.t -> Utils_js.FilenameSet.t + +val calc_all_dependents : + dependency_graph:dependency_graph -> + all_dependency_graph:dependency_graph -> + Utils_js.FilenameSet.t -> + Utils_js.FilenameSet.t + +val filter_dependency_graph : + dependency_graph -> Utils_js.FilenameSet.t -> (* files *) + dependency_graph diff --git a/src/services/inference/recheck_stats.ml b/src/services/inference/recheck_stats.ml new file mode 100644 index 00000000000..af731e4b7f0 --- /dev/null +++ b/src/services/inference/recheck_stats.ml @@ -0,0 +1,219 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +let per_file_time_guess = 0.003 + +let per_file_time_key = "per_file_time" + +let estimates_key = "estimates" + +let estimated_time_to_recheck_key = "estimated_time_to_recheck" + +let estimated_time_to_restart_key = "estimated_time_to_restart" + +let estimated_time_to_init_key = "estimated_time_to_init" + +let estimated_time_per_file_key = "estimated_time_per_file" + +let estimated_files_to_recheck_key = "estimated_files_to_recheck" + +let estimated_files_to_init_key = "estimated_files_to_init" + +type estimates = { + estimated_time_to_recheck: float; + estimated_time_to_restart: float; + estimated_time_to_init: float; + estimated_time_per_file: float; + estimated_files_to_recheck: int; + estimated_files_to_init: int; +} + +type averages = { + init_time: float; + per_file_time: float; + parsed_count: int; +} + +let averages = ref None + +(* window should be a positive integer *) +let moving_average ~window ~avg ~sample ~sample_count = + let window = float_of_int window in + let sample_count = float_of_int sample_count in + if sample_count >= window then + sample + else + ((avg *. (window -. sample_count)) +. (sample *. sample_count)) /. window + +let get_file ~options = + let root = Options.root options in + let tmp_dir = Options.temp_dir options in + let flowconfig_name = Options.flowconfig_name options in + Server_files_js.recheck_stats_file ~flowconfig_name ~tmp_dir root + +let load_per_file_time ~options = + Lwt_result.( + let file = get_file ~options in + let%lwt result = + (try%lwt + Lwt_result.ok + (Lwt_io.open_file + ~flags:[Unix.O_RDONLY; Unix.O_NONBLOCK] + ~mode:Lwt_io.Input + ~perm:0o666 + file) + with Unix.Unix_error (Unix.ENOENT, _, _) -> Lwt_result.fail "File doesn't exist") + >>= fun ic -> + let%lwt contents = Lwt_io.read ic in + let%lwt () = Lwt_io.close ic in + Lwt_result.lift + (try + let json = Some (Hh_json.json_of_string contents) in + match Hh_json_helpers.Jget.float_opt json per_file_time_key with + | None -> + Result.Error + (Printf.sprintf "Failed to find key %S in JSON %S" per_file_time_key contents) + | Some v -> + Hh_json_helpers.Jget.( + let last_estimates = + match obj_opt json estimates_key with + | None -> None + | Some json -> + let json = Some json in + Some + { + estimated_time_to_recheck = float_exn json estimated_time_to_recheck_key; + estimated_time_to_restart = float_exn json estimated_time_to_restart_key; + estimated_time_to_init = float_exn json estimated_time_to_init_key; + estimated_time_per_file = float_exn json estimated_time_per_file_key; + estimated_files_to_recheck = int_exn json estimated_files_to_recheck_key; + estimated_files_to_init = int_exn json estimated_files_to_init_key; + } + in + Result.Ok (v, last_estimates)) + with + | Hh_json.Syntax_error str -> + Result.Error (Printf.sprintf "Failed to parse as JSON contents. %S: %S" str contents) + | Hh_json_helpers.Jget.Parse key -> + Result.Error + (Printf.sprintf "Failed to find key %S in estimates object. %S" key contents)) + in + match result with + | Result.Ok (per_file_time, last_estimates) -> Lwt.return (per_file_time, last_estimates) + | Result.Error reason -> + Hh_logger.info "Failed to load recheck stats from %S. Reason: %S" file reason; + Lwt.return (per_file_time_guess, None)) + +let save_averages ~options ?estimates new_averages = + averages := Some new_averages; + + let estimates = + Option.value_map + estimates + ~default:[] + ~f:(fun { + estimated_time_to_recheck; + estimated_time_to_restart; + estimated_time_to_init; + estimated_time_per_file; + estimated_files_to_recheck; + estimated_files_to_init; + } + -> + Hh_json. + [ + ( estimates_key, + JSON_Object + [ + ( estimated_time_to_recheck_key, + JSON_Number (Dtoa.ecma_string_of_float estimated_time_to_recheck) ); + ( estimated_time_to_restart_key, + JSON_Number (Dtoa.ecma_string_of_float estimated_time_to_restart) ); + ( estimated_time_to_init_key, + JSON_Number (Dtoa.ecma_string_of_float estimated_time_to_init) ); + ( estimated_time_per_file_key, + JSON_Number (Dtoa.ecma_string_of_float estimated_time_per_file) ); + ( estimated_files_to_recheck_key, + JSON_Number (string_of_int estimated_files_to_recheck) ); + (estimated_files_to_init_key, JSON_Number (string_of_int estimated_files_to_init)); + ] ); + ]) + in + let json_str = + Hh_json.( + json_to_string + @@ JSON_Object + ( (per_file_time_key, JSON_Number (Dtoa.ecma_string_of_float new_averages.per_file_time)) + :: estimates )) + in + let file = get_file ~options in + Lwt_result.( + let%lwt result = + (try%lwt + Lwt_result.ok + @@ Lwt_io.open_file + ~flags:[Unix.O_WRONLY; Unix.O_CREAT; Unix.O_TRUNC] + ~mode:Lwt_io.Output + ~perm:0o666 + file + with + | Unix.Unix_error (Unix.ENOENT, _, _) -> Lwt_result.fail "File doesn't exist" + | exn -> + let exn = Exception.wrap exn in + Lwt_result.fail (Printf.sprintf "Failed to open file\n%s" (Exception.to_string exn))) + >>= fun oc -> + try%lwt + let%lwt () = Lwt_io.write oc json_str in + Lwt_result.ok @@ Lwt_io.close oc + with exn -> + let exn = Exception.wrap exn in + Lwt_result.fail (Printf.sprintf "Failed to write file\n%s" (Exception.to_string exn)) + in + begin + match result with + | Result.Ok () -> () + | Result.Error msg -> Hh_logger.error "Failed to save per_file_time to %S. %s" file msg + end; + + Lwt.return_unit) + +let init ~options ~init_time ~parsed_count = + let%lwt (per_file_time, last_estimates) = load_per_file_time ~options in + averages := Some { init_time; per_file_time; parsed_count }; + Lwt.return last_estimates + +let with_averages f = + match !averages with + | None -> failwith "Recheck_stats needs to be initialized before it can be used" + | Some averages -> f averages + +let record_recheck_time ~options ~total_time ~rechecked_files = + (* rechecked_files should be non-negative. If it's 0, then we have no new information to add *) + if rechecked_files > 0 then + with_averages + @@ fun { init_time; per_file_time; parsed_count } -> + (* What should we do for tiny repositories? Let's make the window at least 15 samples big *) + let window = max parsed_count 15 in + let per_file_time = + moving_average + ~window + ~avg:per_file_time + ~sample:(total_time /. float_of_int rechecked_files) + ~sample_count:rechecked_files + in + save_averages ~options { init_time; per_file_time; parsed_count } + else + Lwt.return_unit + +let record_last_estimates ~options ~estimates = + with_averages @@ (fun averages -> save_averages ~options ~estimates averages) + +let get_init_time () = + with_averages @@ (fun { init_time; per_file_time = _; parsed_count = _ } -> init_time) + +let get_per_file_time () = + with_averages @@ (fun { init_time = _; per_file_time; parsed_count = _ } -> per_file_time) diff --git a/src/services/inference/recheck_stats.mli b/src/services/inference/recheck_stats.mli new file mode 100644 index 00000000000..0018841a084 --- /dev/null +++ b/src/services/inference/recheck_stats.mli @@ -0,0 +1,26 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type estimates = { + estimated_time_to_recheck: float; + estimated_time_to_restart: float; + estimated_time_to_init: float; + estimated_time_per_file: float; + estimated_files_to_recheck: int; + estimated_files_to_init: int; +} + +val init : options:Options.t -> init_time:float -> parsed_count:int -> estimates option Lwt.t + +val record_recheck_time : + options:Options.t -> total_time:float -> rechecked_files:int -> unit Lwt.t + +val record_last_estimates : options:Options.t -> estimates:estimates -> unit Lwt.t + +val get_init_time : unit -> float + +val get_per_file_time : unit -> float diff --git a/src/services/inference/types_js.ml b/src/services/inference/types_js.ml index 51eea317d50..cd360a30929 100644 --- a/src/services/inference/types_js.ml +++ b/src/services/inference/types_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,135 +11,280 @@ open Utils_js (****************** typecheck job helpers *********************) -let clear_errors (files: FilenameSet.t) errors = +let clear_errors (files : FilenameSet.t) errors = FilenameSet.fold - (fun file { ServerEnv.local_errors; merge_errors; suppressions; severity_cover_set; } -> + (fun file { ServerEnv.local_errors; merge_errors; warnings; suppressions } -> Hh_logger.debug "clear errors %s" (File_key.to_string file); - { ServerEnv. - local_errors = FilenameMap.remove file local_errors; + { + ServerEnv.local_errors = FilenameMap.remove file local_errors; merge_errors = FilenameMap.remove file merge_errors; + warnings = FilenameMap.remove file warnings; suppressions = Error_suppressions.remove file suppressions; - severity_cover_set = FilenameMap.remove file severity_cover_set; - } - ) files errors + }) + files + errors let update_errset map file errset = - if Errors.ErrorSet.is_empty errset then map + if Flow_error.ErrorSet.is_empty errset then + map else - let errset = match FilenameMap.get file map with - | Some prev_errset -> - Errors.ErrorSet.union prev_errset errset - | None -> errset + let errset = + match FilenameMap.get file map with + | Some prev_errset -> Flow_error.ErrorSet.union prev_errset errset + | None -> errset in FilenameMap.add file errset map -let merge_error_maps = FilenameMap.union ~combine:(fun _ x y -> Some (Errors.ErrorSet.union x y)) +let merge_error_maps = + FilenameMap.union ~combine:(fun _ x y -> Some (Flow_error.ErrorSet.union x y)) + +(* We just want to replace the old coverage with the new one *) +let update_coverage = FilenameMap.union ~combine:(fun _ _ -> Option.return) (* Filter out duplicate provider error, if any, for the given file. *) let filter_duplicate_provider map file = match FilenameMap.get file map with | Some prev_errset -> - let new_errset = Errors.ErrorSet.filter (fun err -> - not (Errors.is_duplicate_provider_error err) - ) prev_errset in + let new_errset = + Flow_error.ErrorSet.filter + (fun err -> not (Flow_error.kind_of_error err = Errors.DuplicateProviderError)) + prev_errset + in FilenameMap.add file new_errset map | None -> map -let with_timer_lwt ?options timer profiling f = - let should_print = Option.value_map options ~default:false ~f:(Options.should_profile) in - Profiling_js.with_timer_lwt ~should_print ~timer ~f profiling +let with_memory_info callback = + let%lwt cgroup_stats = CGroup.get_stats () in + (* Reading hash_stats while workers are writing can cause assertion errors *) + let hash_stats = (try Some (SharedMem_js.hash_stats ()) with _ -> None) in + let heap_size = SharedMem_js.heap_size () in + callback ~cgroup_stats ~hash_stats ~heap_size; + Lwt.return_unit + +module MemorySamplingLoop = LwtLoop.Make (struct + type acc = + cgroup_stats:(CGroup.stats, string) result -> + hash_stats:SharedMem_js.table_stats option -> + heap_size:int -> + unit + + let main callback = + let%lwt () = with_memory_info callback in + let%lwt () = Lwt_unix.sleep 1.0 in + Lwt.return callback + + let catch _ exn = + let exn = Exception.wrap exn in + Hh_logger.error "Exception in MemorySamplingLoop: %s" (Exception.to_string exn); + Lwt.return_unit +end) + +let with_timer_lwt = + let clear_worker_memory () = + ["worker_rss_start"; "worker_rss_delta"; "worker_rss_hwm_delta"] |> List.iter Measure.delete + in + let profile_add_memory profiling getter group metric = + getter "worker_rss_start" + |> Option.iter ~f:(fun start -> + getter "worker_rss_delta" + |> Option.iter ~f:(fun delta -> + getter "worker_rss_hwm_delta" + |> Option.iter ~f:(fun hwm_delta -> + Profiling_js.add_memory ~group ~metric ~start ~delta ~hwm_delta profiling))) + in + let sample_memory timer profiling ~cgroup_stats ~hash_stats ~heap_size = + Profiling_js.sample_memory profiling ~group:timer ~metric:"heap" ~value:(float heap_size); + + Option.iter hash_stats ~f:(fun { SharedMem_js.nonempty_slots; used_slots; slots } -> + Profiling_js.sample_memory + profiling + ~group:timer + ~metric:"hash_nonempty_slots" + ~value:(float nonempty_slots); + + Profiling_js.sample_memory + profiling + ~group:timer + ~metric:"hash_used_slots" + ~value:(float used_slots); + + Profiling_js.sample_memory profiling ~group:timer ~metric:"hash_slots" ~value:(float slots)); + + match cgroup_stats with + | Error _ -> () + | Ok { CGroup.total; total_swap; anon; file; shmem } -> + Profiling_js.sample_memory profiling ~group:timer ~metric:"cgroup_total" ~value:(float total); + + Profiling_js.sample_memory + profiling + ~group:timer + ~metric:"cgroup_swap" + ~value:(float total_swap); + + Profiling_js.sample_memory profiling ~group:timer ~metric:"cgroup_anon" ~value:(float anon); + + Profiling_js.sample_memory profiling ~group:timer ~metric:"cgroup_shmem" ~value:(float shmem); + + Profiling_js.sample_memory profiling ~group:timer ~metric:"cgroup_file" ~value:(float file) + in + fun ?options timer profiling f -> + let should_print = Option.value_map options ~default:false ~f:Options.should_profile in + let sample_memory = sample_memory timer profiling in + clear_worker_memory (); + + (* Record the cgroup info at the start *) + let%lwt () = with_memory_info sample_memory in + (* Asynchronously run a thread that periodically grabs the cgroup stats *) + let sampling_loop = MemorySamplingLoop.run sample_memory in + let%lwt ret = + try%lwt + let%lwt ret = Profiling_js.with_timer_lwt ~should_print ~timer ~f profiling in + Lwt.cancel sampling_loop; + Lwt.return ret + with exn -> + let exn = Exception.wrap exn in + Lwt.cancel sampling_loop; + Exception.reraise exn + in + (* Record the cgroup info at the end *) + let%lwt () = with_memory_info sample_memory in + profile_add_memory profiling Measure.get_mean timer "worker_rss_avg"; + profile_add_memory profiling Measure.get_max timer "worker_rss_max"; + clear_worker_memory (); + Lwt.return ret let collate_parse_results ~options parse_results = - let { Parsing_service_js. - parse_ok; parse_skips; parse_hash_mismatch_skips; parse_fails; parse_unchanged - } = parse_results in + let { + Parsing_service_js.parse_ok; + parse_skips; + parse_hash_mismatch_skips; + parse_fails; + parse_unchanged; + } = + parse_results + in (* No one who is calling collate_parse_results is skipping files with hash mismatches *) assert (FilenameSet.is_empty parse_hash_mismatch_skips); - let local_errors = List.fold_left (fun errors (file, _, fail) -> - let errset = match fail with - | Parsing_service_js.Parse_error err -> - Inference_utils.set_of_parse_error ~source_file:file err - | Parsing_service_js.Docblock_errors errs -> - Inference_utils.set_of_docblock_errors ~source_file:file errs - | Parsing_service_js.File_sig_error err -> - Inference_utils.set_of_file_sig_error ~source_file:file err - in - update_errset errors file errset - ) FilenameMap.empty parse_fails in - + let local_errors = + List.fold_left + (fun errors (file, _, fail) -> + let errset = + match fail with + | Parsing_service_js.Parse_error err -> + Inference_utils.set_of_parse_error ~source_file:file err + | Parsing_service_js.Docblock_errors errs -> + Inference_utils.set_of_docblock_errors ~source_file:file errs + | Parsing_service_js.File_sig_error err -> + Inference_utils.set_of_file_sig_error ~source_file:file err + in + update_errset errors file errset) + FilenameMap.empty + parse_fails + in let local_errors = (* In practice, the only `tolerable_errors` are related to well formed exports. If this flag * were not temporary in nature, it would be worth adding some complexity to avoid conflating * them. *) - if options.Options.opt_enforce_well_formed_exports then - FilenameMap.fold (fun file file_sig_errors errors -> - let errset = Inference_utils.set_of_file_sig_tolerable_errors - ~source_file:file file_sig_errors in - update_errset errors file errset - ) parse_ok local_errors - else + Inference_utils.fold_whitelisted_well_formed_exports + ~f:(fun file file_sig_errors errors -> + let file_sig_errors = File_sig.abstractify_tolerable_errors file_sig_errors in + let errset = + Inference_utils.set_of_file_sig_tolerable_errors ~source_file:file file_sig_errors + in + update_errset errors file errset) + options + parse_ok local_errors in + let unparsed = + List.fold_left + (fun unparsed (file, info, _) -> (file, info) :: unparsed) + parse_skips + parse_fails + in + let parse_ok = + FilenameMap.fold (fun k _ acc -> FilenameSet.add k acc) parse_ok FilenameSet.empty + in + (parse_ok, unparsed, parse_unchanged, local_errors) - let unparsed = List.fold_left (fun unparsed (file, info, _) -> - (file, info) :: unparsed - ) parse_skips parse_fails in - - let parse_ok = parse_ok |> FilenameMap.keys |> FilenameSet.of_list in - - parse_ok, unparsed, parse_unchanged, local_errors - -let parse ~options ~profiling ~workers parse_next = +let parse ~options ~profiling ~workers ~reader parse_next = with_timer_lwt ~options "Parsing" profiling (fun () -> - let%lwt results = Parsing_service_js.parse_with_defaults options workers parse_next in - Lwt.return (collate_parse_results ~options results) - ) + let%lwt results = + Parsing_service_js.parse_with_defaults ~reader options workers parse_next + in + Lwt.return (collate_parse_results ~options results)) -let reparse ~options ~profiling ~transaction ~workers ~modified ~deleted = +let reparse ~options ~profiling ~transaction ~reader ~workers ~modified ~deleted = with_timer_lwt ~options "Parsing" profiling (fun () -> - let%lwt new_or_changed, results = - Parsing_service_js.reparse_with_defaults - ~transaction ~with_progress:true ~workers ~modified ~deleted options - in - let parse_ok, unparsed, unchanged, local_errors = collate_parse_results ~options results in - Lwt.return (new_or_changed, parse_ok, unparsed, unchanged, local_errors) - ) + let%lwt (new_or_changed, results) = + Parsing_service_js.reparse_with_defaults + ~transaction + ~reader + ~with_progress:true + ~workers + ~modified + ~deleted + options + in + let (parse_ok, unparsed, unchanged, local_errors) = collate_parse_results ~options results in + Lwt.return (new_or_changed, parse_ok, unparsed, unchanged, local_errors)) let parse_contents ~options ~profiling ~check_syntax filename contents = with_timer_lwt ~options "Parsing" profiling (fun () -> - (* always enable types when checking an individual file *) - let types_mode = Parsing_service_js.TypesAllowed in - let use_strict = Options.modules_are_use_strict options in - let max_tokens = Options.max_header_tokens options in - - let docblock_errors, info = - Parsing_service_js.parse_docblock ~max_tokens filename contents in - let errors = Inference_utils.set_of_docblock_errors ~source_file:filename docblock_errors in - let parse_result = Parsing_service_js.do_parse - ~fail:check_syntax ~types_mode ~use_strict ~info - contents filename - in - Lwt.return (errors, parse_result, info) - ) + (* always enable types when checking an individual file *) + let types_mode = Parsing_service_js.TypesAllowed in + let max_tokens = Options.max_header_tokens options in + let (docblock_errors, info) = + Parsing_service_js.parse_docblock ~max_tokens filename contents + in + let errors = Inference_utils.set_of_docblock_errors ~source_file:filename docblock_errors in + let parse_options = + Parsing_service_js.make_parse_options ~fail:check_syntax ~types_mode info options + in + let parse_result = Parsing_service_js.do_parse ~info ~parse_options contents filename in + Lwt.return (errors, parse_result, info)) (* commit providers for old and new modules, collect errors. *) -let commit_modules, commit_modules_from_saved_state = - let commit_modules_generic ~introduce_files ~transaction ~all_providers_mutator ~options ~is_init - ~profiling ~workers ~parsed ~parsed_set ~unparsed ~unparsed_set ~old_modules ~deleted - ~local_errors ~new_or_changed = +let (commit_modules, commit_modules_from_saved_state) = + let commit_modules_generic + ~introduce_files + ~transaction + ~reader + ~all_providers_mutator + ~options + ~is_init + ~profiling + ~workers + ~parsed + ~parsed_set + ~unparsed + ~unparsed_set + ~old_modules + ~deleted + ~local_errors + ~new_or_changed = (* conservatively approximate set of modules whose providers will change *) (* register providers for modules, warn on dupes etc. *) - with_timer_lwt ~options "CommitModules" profiling (fun () -> - let all_files_set = FilenameSet.union (FilenameSet.union parsed_set unparsed_set) deleted in + with_timer_lwt ~options "CommitModules" profiling (fun () -> + let all_files_set = + FilenameSet.union (FilenameSet.union parsed_set unparsed_set) deleted + in let mutator = Module_heaps.Introduce_files_mutator.create transaction all_files_set in let%lwt new_modules = - introduce_files - ~mutator ~all_providers_mutator ~workers ~options ~parsed ~unparsed + introduce_files ~mutator ~all_providers_mutator ~workers ~options ~parsed ~unparsed in let dirty_modules = List.rev_append old_modules new_modules in - let%lwt providers, changed_modules, errmap = - Module_js.commit_modules ~transaction ~workers ~options ~is_init new_or_changed dirty_modules in - (* Providers might be new but not changed. This typically happens when old + let%lwt (providers, changed_modules, errmap) = + Module_js.commit_modules + ~transaction + ~workers + ~options + ~reader + ~is_init + new_or_changed + dirty_modules + in + (* Providers might be new but not changed. This typically happens when old providers are deleted, and previously duplicate providers become new providers. In such cases, we must clear the old duplicate provider errors for the new providers. @@ -148,103 +293,229 @@ let commit_modules, commit_modules_from_saved_state = that case they are rechecked and *all* their errors are cleared. But we don't care about optimizing that case for now.) *) let errors = List.fold_left filter_duplicate_provider local_errors providers in - Lwt.return ( - changed_modules, FilenameMap.fold (fun file errors acc -> - let errset = List.fold_left (fun acc err -> - match err with - | Module_js.ModuleDuplicateProviderError { module_name; provider; conflict; } -> - let msg = Flow_error.(EDuplicateModuleProvider { module_name; provider; conflict }) in - let error = Flow_error.error_of_msg ~trace_reasons:[] ~source_file:file msg in - Errors.ErrorSet.add error acc - ) Errors.ErrorSet.empty errors in - update_errset acc file errset - ) errmap errors - ) - ) + Lwt.return + ( changed_modules, + FilenameMap.fold + (fun file errors acc -> + let errset = + List.fold_left + (fun acc err -> + match err with + | Module_js.ModuleDuplicateProviderError { module_name; provider; conflict } + -> + let error = + Error_message.( + EDuplicateModuleProvider { module_name; provider; conflict }) + |> Flow_error.error_of_msg ~trace_reasons:[] ~source_file:file + in + Flow_error.ErrorSet.add error acc) + Flow_error.ErrorSet.empty + errors + in + update_errset acc file errset) + errmap + errors )) in - let commit_modules = - commit_modules_generic ~introduce_files:Module_js.introduce_files + let commit_modules ~transaction ~reader = + commit_modules_generic + ~introduce_files:(Module_js.introduce_files ~reader) + ~transaction + ~reader in - let commit_modules_from_saved_state = - commit_modules_generic ~introduce_files:Module_js.introduce_files_from_saved_state + let commit_modules_from_saved_state ~transaction ~reader = + commit_modules_generic + ~introduce_files:Module_js.introduce_files_from_saved_state + ~transaction + ~reader in - commit_modules, commit_modules_from_saved_state - -let resolve_requires ~transaction ~options ~profiling ~workers ~parsed ~parsed_set = + (commit_modules, commit_modules_from_saved_state) + +module DirectDependentFilesCache : sig + val clear : unit -> unit + + val with_cache : + options:Options.t -> + root_files:FilenameSet.t -> + on_miss:(unit -> FilenameSet.t Lwt.t) -> + FilenameSet.t Lwt.t +end = struct + type entry = { + direct_dependents: FilenameSet.t; + last_hit: float; + } + + type cache = { + entries: entry FilenameMap.t; + size: int; + } + + let empty_cache = { entries = FilenameMap.empty; size = 0 } + + let max_size = 100 + + let cache = ref empty_cache + + let clear () = cache := empty_cache + + let remove_oldest () = + let { entries; size } = !cache in + let oldest = + FilenameMap.fold + (fun key { last_hit; _ } acc -> + match acc with + | Some (_, oldest_hit) when oldest_hit <= last_hit -> acc + | _ -> Some (key, last_hit)) + entries + None + in + Option.iter oldest ~f:(fun (oldest_key, _) -> + cache := { entries = FilenameMap.remove oldest_key entries; size = size - 1 }) + + let add_after_miss ~root_file ~direct_dependents = + let entry = { direct_dependents; last_hit = Unix.gettimeofday () } in + let { entries; size } = !cache in + cache := { entries = FilenameMap.add root_file entry entries; size = size + 1 }; + if size > max_size then remove_oldest () + + let get_from_cache ~root_file = + let { entries; size } = !cache in + match FilenameMap.get root_file entries with + | None -> None + | Some entry -> + let entry = { entry with last_hit = Unix.gettimeofday () } in + cache := { entries = FilenameMap.add root_file entry entries; size }; + Some entry + + let with_cache ~options ~root_files ~on_miss = + match FilenameSet.elements root_files with + | [root_file] when Options.cache_direct_dependents options -> + begin + match get_from_cache ~root_file with + | None -> + let%lwt direct_dependents = on_miss () in + add_after_miss ~root_file ~direct_dependents; + Lwt.return direct_dependents + | Some { direct_dependents; last_hit = _ } -> Lwt.return direct_dependents + end + | _ -> + (* Cache is only for when there is a single root file *) + on_miss () +end + +let clear_cache_if_resolved_requires_changed resolved_requires_changed = + if resolved_requires_changed then ( + Hh_logger.info "Resolved requires changed"; + DirectDependentFilesCache.clear () + ) else + Hh_logger.info "Resolved requires are unchanged" + +let resolve_requires ~transaction ~reader ~options ~profiling ~workers ~parsed ~parsed_set = let node_modules_containers = !Files.node_modules_containers in let mutator = Module_heaps.Resolved_requires_mutator.create transaction parsed_set in - with_timer_lwt ~options "ResolveRequires" profiling (fun () -> - MultiWorkerLwt.call workers - ~job: (List.fold_left (fun errors_acc filename -> - let errors = Module_js.add_parsed_resolved_requires filename - ~mutator ~options ~node_modules_containers in - if Errors.ErrorSet.is_empty errors - then errors_acc - else FilenameMap.add filename errors errors_acc - ) - ) - ~neutral: FilenameMap.empty - ~merge: FilenameMap.union - ~next:(MultiWorkerLwt.next workers parsed) - ) + let merge (changed1, errors1) (changed2, errors2) = + (changed1 || changed2, FilenameMap.union errors1 errors2) + in + let%lwt (resolved_requires_changed, errors) = + with_timer_lwt ~options "ResolveRequires" profiling (fun () -> + MultiWorkerLwt.call + workers + ~job: + (List.fold_left (fun (changed, errors_acc) filename -> + let (resolved_requires_changed, errors) = + Module_js.add_parsed_resolved_requires + filename + ~mutator + ~reader + ~options + ~node_modules_containers + in + let changed = changed || resolved_requires_changed in + if Flow_error.ErrorSet.is_empty errors then + (changed, errors_acc) + else + (changed, FilenameMap.add filename errors errors_acc))) + ~neutral:(false, FilenameMap.empty) + ~merge + ~next:(MultiWorkerLwt.next workers parsed)) + in + clear_cache_if_resolved_requires_changed resolved_requires_changed; + Lwt.return (errors, resolved_requires_changed) let commit_modules_and_resolve_requires - ~transaction - ~all_providers_mutator - ~options - ~profiling - ~workers - ~old_modules - ~parsed_set - ~unparsed - ~unparsed_set - ~new_or_changed - ~deleted - ~errors - ~is_init = + ~transaction + ~reader + ~all_providers_mutator + ~options + ~profiling + ~workers + ~old_modules + ~parsed_set + ~unparsed + ~unparsed_set + ~new_or_changed + ~deleted + ~errors + ~is_init = (* TODO remove after lookup overhaul *) Module_js.clear_filename_cache (); - let { ServerEnv.local_errors; merge_errors; suppressions; severity_cover_set } = errors in - + let { ServerEnv.local_errors; merge_errors; warnings; suppressions } = errors in let parsed = FilenameSet.elements parsed_set in - - let%lwt changed_modules, local_errors = commit_modules - ~transaction ~all_providers_mutator ~options ~is_init ~profiling ~workers ~parsed ~parsed_set - ~unparsed ~unparsed_set ~old_modules ~deleted ~local_errors ~new_or_changed + let%lwt (changed_modules, local_errors) = + commit_modules + ~transaction + ~reader + ~all_providers_mutator + ~options + ~is_init + ~profiling + ~workers + ~parsed + ~parsed_set + ~unparsed + ~unparsed_set + ~old_modules + ~deleted + ~local_errors + ~new_or_changed in - - let%lwt resolve_errors = - resolve_requires ~transaction ~options ~profiling ~workers ~parsed ~parsed_set + let%lwt (resolve_errors, resolved_requires_changed) = + resolve_requires ~transaction ~reader ~options ~profiling ~workers ~parsed ~parsed_set in let local_errors = FilenameMap.union resolve_errors local_errors in + Lwt.return + ( changed_modules, + resolved_requires_changed, + { ServerEnv.local_errors; merge_errors; warnings; suppressions } ) - Lwt.return ( - changed_modules, { ServerEnv.local_errors; merge_errors; suppressions; severity_cover_set } - ) - -let error_set_of_merge_error file msg = - let error = Flow_error.error_of_msg ~trace_reasons:[] ~source_file:file msg in - Errors.ErrorSet.singleton error +let error_set_of_internal_error file (loc, internal_error) = + Error_message.EInternal (loc, internal_error) + |> Flow_error.error_of_msg ~trace_reasons:[] ~source_file:file + |> Flow_error.ErrorSet.singleton let calc_deps ~options ~profiling ~dependency_graph ~components to_merge = with_timer_lwt ~options "CalcDeps" profiling (fun () -> - let dependency_graph = Dep_service.filter_dependency_graph dependency_graph to_merge in - let components = List.filter (Nel.exists (fun f -> FilenameSet.mem f to_merge)) components in - if Options.should_profile options then Sort_js.log components; - let component_map = List.fold_left (fun component_map component -> - let file = Nel.hd component in - FilenameMap.add file component component_map - ) FilenameMap.empty components in - Lwt.return (dependency_graph, component_map) - ) + let dependency_graph = + Pure_dep_graph_operations.filter_dependency_graph dependency_graph to_merge + in + let components = List.filter (Nel.exists (fun f -> FilenameSet.mem f to_merge)) components in + if Options.should_profile options then Sort_js.log components; + let component_map = + List.fold_left + (fun component_map component -> + let file = Nel.hd component in + FilenameMap.add file component component_map) + FilenameMap.empty + components + in + Lwt.return (dependency_graph, component_map)) -(* The infer_input passed in basically tells us what the caller wants to typecheck. +(* The input passed in basically tells us what the caller wants to typecheck. * However, due to laziness, it's possible that certain dependents or dependencies have not been * checked yet. So we need to calculate all the transitive dependents and transitive dependencies - * and add them to infer_input, unless they're already checked and in unchanged_checked + * and add them to input, unless they're already checked and in unchanged_checked * - * Note that we do not want to add all_dependent_files to infer_input directly! We only want to + * Note that we do not want to add all_dependent_files to input directly! We only want to * pass the dependencies, and later add dependent files as needed. This is important for recheck * optimizations. We create the recheck map which indicates whether a given file needs to be * rechecked. Dependent files only need to be rechecked if their dependencies change. @@ -253,272 +524,520 @@ let include_dependencies_and_dependents ~options ~profiling ~unchanged_checked - ~infer_input + ~input + ~all_dependency_graph ~dependency_graph - ~all_dependent_files - ~direct_dependent_files = - let%lwt infer_input, components = with_timer_lwt ~options "PruneDeps" profiling (fun () -> - (* Don't just look up the dependencies of the focused or dependent modules. Also look up - * the dependencies of dependencies, since we need to check transitive dependencies *) - let preliminary_to_merge = CheckedSet.all - (CheckedSet.add ~dependents:all_dependent_files infer_input) in - (* So we want to prune our dependencies to only the dependencies which changed. However, - * two dependencies A and B might be in a cycle. If A changed and B did not, we still need to - * check both of them. So we need to calculate components before we can prune *) - (* Grab the subgraph containing all our dependencies and sort it into the strongly connected - * cycles *) - let components = Sort_js.topsort ~roots:preliminary_to_merge dependency_graph in - let dependencies = List.fold_left (fun dependencies component -> - if Nel.exists (fun fn -> not (CheckedSet.mem fn unchanged_checked)) component - (* If at least one member of the component is not unchanged, then keep the component *) - then Nel.fold_left (fun acc fn -> FilenameSet.add fn acc) dependencies component - (* If every element is unchanged, drop the component *) - else dependencies - ) FilenameSet.empty components in - Lwt.return (CheckedSet.add ~dependencies infer_input, components) - ) in - - (* NOTE: An important invariant here is that if we recompute Sort_js.topsort with infer_input + - all_dependent_files (which is = to_merge later) on dependency_graph, we would get exactly the - same components. Later, we will filter dependency_graph to just to_merge, and correspondingly - filter components as well. This will work out because every component is either entirely inside - to_merge or entirely outside. *) - - let to_merge = CheckedSet.add ~dependents:all_dependent_files infer_input in - - let recheck_map = - let roots = CheckedSet.add ~dependents:direct_dependent_files infer_input in - (* Definitely recheck inferred and direct_dependent_files. As merging proceeds, other - files in to_merge may or may not be rechecked. *) - CheckedSet.fold (fun recheck_map file -> - FilenameMap.add file (CheckedSet.mem file roots) recheck_map - ) FilenameMap.empty to_merge - in - - Lwt.return (to_merge, components, recheck_map) + ~all_dependent_files = + with_timer_lwt ~options "PruneDeps" profiling (fun () -> + (* Don't just look up the dependencies of the focused or dependent modules. Also look up + * the dependencies of dependencies, since we need to check transitive dependencies *) + let preliminary_to_merge = + Pure_dep_graph_operations.calc_direct_dependencies + all_dependency_graph + (CheckedSet.all (CheckedSet.add ~dependents:all_dependent_files input)) + in + (* So we want to prune our dependencies to only the dependencies which changed. However, two + dependencies A and B might be in a cycle. If A changed and B did not, we still need to check + B. Likewise, a dependent A and a dependency B might be in a cycle. If B is not a dependent + and A and B are unchanged, we still need to check B. So we need to calculate components + before we can prune. *) + (* Grab the subgraph containing all our dependencies and sort it into the strongly connected + cycles *) + let components = Sort_js.topsort ~roots:preliminary_to_merge dependency_graph in + let dependencies = + List.fold_left + (fun dependencies component -> + let dependencies = + if + Nel.exists (fun fn -> not (CheckedSet.mem fn unchanged_checked)) component + (* If some member of the component is not unchanged, then keep the component *) + then + Nel.fold_left (fun acc fn -> FilenameSet.add fn acc) dependencies component + (* If every element is unchanged, drop the component *) + else + dependencies + in + let dependencies = + let (dependents, non_dependents) = + List.partition (fun fn -> FilenameSet.mem fn all_dependent_files) + @@ Nel.to_list component + in + if + dependents <> [] && non_dependents <> [] + (* If some member of the component is a dependent and others are not, then keep the + others *) + then + List.fold_left (fun acc fn -> FilenameSet.add fn acc) dependencies non_dependents + (* If every element is a dependent or if every element is not, drop the component *) + else + dependencies + in + dependencies) + FilenameSet.empty + components + in + (* Definitely recheck input and dependencies. As merging proceeds, dependents may or may not be + rechecked. *) + let definitely_to_merge = CheckedSet.add ~dependencies input in + let to_merge = CheckedSet.add ~dependents:all_dependent_files definitely_to_merge in + (* NOTE: An important invariant here is that if we recompute Sort_js.topsort with to_merge on + dependency_graph, we would get exactly the same components. Later, we will filter + dependency_graph to just to_merge, and correspondingly filter components as well. This will + work out because every component is either entirely inside to_merge or entirely outside. *) + Lwt.return (to_merge, components, CheckedSet.all definitely_to_merge)) + +let remove_old_results (errors, warnings, suppressions, coverage, first_internal_error) file = + ( FilenameMap.remove file errors, + FilenameMap.remove file warnings, + Error_suppressions.remove file suppressions, + FilenameMap.remove file coverage, + first_internal_error ) + +let add_new_results + ~record_slow_file (errors, warnings, suppressions, coverage, first_internal_error) file result + = + match result with + | Ok (new_errors, new_warnings, new_suppressions, new_coverage, check_time) -> + if check_time > 1. then record_slow_file file check_time; + ( update_errset errors file new_errors, + update_errset warnings file new_warnings, + Error_suppressions.update_suppressions suppressions new_suppressions, + update_coverage coverage new_coverage, + first_internal_error ) + | Error (loc, internal_error) -> + let new_errors = error_set_of_internal_error file (loc, internal_error) in + let first_internal_error = + match first_internal_error with + | Some _ -> first_internal_error + | None -> + Some + (spf + "%s\n%s" + (ALoc.debug_to_string ~include_source:true loc) + (Error_message.string_of_internal_error internal_error)) + in + (update_errset errors file new_errors, warnings, suppressions, coverage, first_internal_error) let run_merge_service ~master_mutator ~worker_mutator + ~reader ~intermediate_result_callback ~options ~profiling ~workers - dependency_graph - component_map - recheck_map - acc - = + ~dependency_graph + ~component_map + ~recheck_set + acc = with_timer_lwt ~options "Merge" profiling (fun () -> - let%lwt merged, skipped_count = Merge_service.merge_strict - ~master_mutator ~worker_mutator ~intermediate_result_callback ~options ~workers - dependency_graph component_map recheck_map - in - let errs, suppressions, severity_cover_set = List.fold_left (fun acc (file, result) -> - let component = FilenameMap.find_unsafe file component_map in - (* remove all errors, suppressions for rechecked component *) - let errors, suppressions, severity_cover_set = - Nel.fold_left (fun (errors, suppressions, severity_cover_set) file -> - FilenameMap.remove file errors, - Error_suppressions.remove file suppressions, - FilenameMap.remove file severity_cover_set - ) acc component + let%lwt (merged, { Merge_service.skipped_count; sig_new_or_changed }) = + Merge_service.merge + ~master_mutator + ~worker_mutator + ~reader + ~intermediate_result_callback + ~options + ~workers + ~dependency_graph + ~component_map + ~recheck_set in - match result with - | Ok (new_errors, new_suppressions, new_severity_cover) -> - update_errset errors file new_errors, - Error_suppressions.update_suppressions suppressions new_suppressions, - FilenameMap.union new_severity_cover severity_cover_set - | Error msg -> - let new_errors = error_set_of_merge_error file msg in - update_errset errors file new_errors, suppressions, severity_cover_set - ) acc merged - in - Lwt.return (errs, suppressions, severity_cover_set, skipped_count) - ) - -(* This function does some last minute preparation and then calls into the merge service, which - * typechecks the code. By the time this function is called, we know exactly what we want to merge - * (though we may later decline to typecheck some files due to recheck optimizations) *) -let merge - ~transaction - ~options - ~profiling - ~workers - ~errors - ~unchanged_checked - ~to_merge - ~components - ~recheck_map - ~dependency_graph - ~deleted - ~persistent_connections - ~prep_merge = - let { ServerEnv.local_errors; merge_errors; suppressions; severity_cover_set } = errors in - - + let (errs, warnings, suppressions, coverage, first_internal_error) = + List.fold_left + (fun acc (file, result) -> + let component = FilenameMap.find_unsafe file component_map in + let acc = Nel.fold_left remove_old_results acc component in + add_new_results ~record_slow_file:(fun _ _ -> ()) acc file result) + acc + merged + in + Lwt.return + ( errs, + warnings, + suppressions, + coverage, + skipped_count, + sig_new_or_changed, + Option.map first_internal_error ~f:(spf "First merge internal error:\n%s") )) + +let mk_intermediate_result_callback + ~reader ~options ~profiling ~persistent_connections ~recheck_reasons suppressions = + let lazy_table_of_aloc = + Parsing_heaps.Mutator_reader.get_sig_ast_aloc_table_unsafe_lazy ~reader + in let%lwt send_errors_over_connection = match persistent_connections with | None -> Lwt.return (fun _ -> ()) - | Some clients -> with_timer_lwt ~options "MakeSendErrors" profiling (fun () -> - (* Each merge step uncovers new errors, warnings, suppressions and lint severity covers. - - While more suppressions and severity covers may come in later steps, the suppressions and - severity covers we've seen so far are sufficient to filter the errors and warnings we've - seen so far. - + | Some clients -> + with_timer_lwt ~options "MakeSendErrors" profiling (fun () -> + (* In classic, each merge step uncovers new errors, warnings, suppressions. + While more suppressions may come in later steps, the suppressions we've seen so far are + sufficient to filter the errors and warnings we've seen so far. Intuitively, we will not see an error (or warning) before we've seen all the files involved - in that error, and thus all the suppressions which could possibly suppress the error. *) - let open Errors in - let curr_errors = ref ErrorSet.empty in - let curr_warnings = ref ErrorSet.empty in - let curr_suppressions = ref suppressions in - let curr_severity_cover = ref severity_cover_set in - let filter = Error_suppressions.filter_suppressed_errors in - Lwt.return (function lazy results -> - let new_errors, new_warnings, suppressions, severity_cover = - List.fold_left (fun (errs_acc, warns_acc, supps_acc, lints_acc) result -> - let file, errs_and_warns, supps, lints = result in - let supps_acc = Error_suppressions.union supps_acc supps in - let lints_acc = FilenameMap.union lints_acc lints in - (* Filter errors and warnings based on suppressions we've seen so far. *) - let errs, warns, _, _ = filter supps_acc lints_acc errs_and_warns - ~unused:Error_suppressions.empty (* TODO: track unused suppressions *) - in - (* Only add errors we haven't seen before. *) - let errs_acc = ErrorSet.fold (fun err acc -> - if ErrorSet.mem err !curr_errors - then acc - else ErrorSet.add err acc - ) errs errs_acc in - (* Only add warnings we haven't seen before. Note that new warnings are stored by + in that error, and thus all the suppressions which could possibly suppress the error. + + In types-first, we have already accumulated suppressions in the overall merge step, and + each check step uses those suppressions to filter the errors and warnings uncovered. + *) + Errors.( + let curr_errors = ref ConcreteLocPrintableErrorSet.empty in + let curr_warnings = ref ConcreteLocPrintableErrorSet.empty in + let curr_suppressions = ref suppressions in + let root = Options.root options in + let file_options = Some (Options.file_options options) in + let filter = Error_suppressions.filter_suppressed_errors ~root ~file_options in + Lwt.return (function (lazy results) -> + let (new_errors, new_warnings, suppressions) = + List.fold_left + (fun (errs_acc, warns_acc, supps_acc) result -> + let (file, old_errs, old_warns, supps) = result in + let supps_acc = Error_suppressions.union supps_acc supps in + (* Filter errors and warnings based on suppressions we've seen so far. *) + let (errs, _, _) = + filter supps_acc old_errs ~unused:Error_suppressions.empty + (* TODO: track unused suppressions *) + in + (* Filter errors and warnings based on suppressions we've seen so far. *) + let (warns, _, _) = + filter supps_acc old_warns ~unused:Error_suppressions.empty + (* TODO: track unused suppressions *) + in + (* Only add errors we haven't seen before. *) + let errs_acc = + ConcreteLocPrintableErrorSet.fold + (fun err acc -> + if ConcreteLocPrintableErrorSet.mem err !curr_errors then + acc + else + ConcreteLocPrintableErrorSet.add err acc) + errs + errs_acc + in + (* Only add warnings we haven't seen before. Note that new warnings are stored by filename, because the clients only receive warnings for files they have open. *) - let warns_acc = - let acc = Option.value (FilenameMap.get file warns_acc) ~default:ErrorSet.empty in - let acc = ErrorSet.fold (fun warn acc -> - if ErrorSet.mem warn !curr_warnings - then acc - else ErrorSet.add warn acc - ) warns acc in - if ErrorSet.is_empty acc then warns_acc else FilenameMap.add file acc warns_acc - in - errs_acc, warns_acc, supps_acc, lints_acc - ) (ErrorSet.empty, FilenameMap.empty, !curr_suppressions, !curr_severity_cover) results - in - - curr_errors := ErrorSet.union new_errors !curr_errors; - curr_warnings := FilenameMap.fold (fun _ -> ErrorSet.union) new_warnings !curr_warnings; - curr_suppressions := suppressions; - curr_severity_cover := severity_cover; - - if not (ErrorSet.is_empty new_errors && FilenameMap.is_empty new_warnings) - then Persistent_connection.update_clients - ~clients - ~calc_errors_and_warnings:(fun () -> new_errors, new_warnings) - )) + let warns_acc = + let acc = + Option.value + (FilenameMap.get file warns_acc) + ~default:ConcreteLocPrintableErrorSet.empty + in + let acc = + ConcreteLocPrintableErrorSet.fold + (fun warn acc -> + if ConcreteLocPrintableErrorSet.mem warn !curr_warnings then + acc + else + ConcreteLocPrintableErrorSet.add warn acc) + warns + acc + in + if ConcreteLocPrintableErrorSet.is_empty acc then + warns_acc + else + FilenameMap.add file acc warns_acc + in + (errs_acc, warns_acc, supps_acc)) + (ConcreteLocPrintableErrorSet.empty, FilenameMap.empty, !curr_suppressions) + results + in + curr_errors := ConcreteLocPrintableErrorSet.union new_errors !curr_errors; + curr_warnings := + FilenameMap.fold + (fun _ -> ConcreteLocPrintableErrorSet.union) + new_warnings + !curr_warnings; + curr_suppressions := suppressions; + + if + not + ( ConcreteLocPrintableErrorSet.is_empty new_errors + && FilenameMap.is_empty new_warnings ) + then + let errors_reason = LspProt.Recheck_streaming { recheck_reasons } in + Persistent_connection.update_clients + ~clients + ~errors_reason + ~calc_errors_and_warnings:(fun () -> (new_errors, new_warnings))))) in + let intermediate_result_callback results = + let errors = + lazy + (Core_list.map + ~f:(fun (file, result) -> + match result with + | Ok (errors, warnings, suppressions, _, _) -> + let errors = Flow_error.make_errors_printable lazy_table_of_aloc errors in + let warnings = Flow_error.make_errors_printable lazy_table_of_aloc warnings in + (file, errors, warnings, suppressions) + | Error msg -> + let errors = error_set_of_internal_error file msg in + let errors = Flow_error.make_errors_printable lazy_table_of_aloc errors in + let suppressions = Error_suppressions.empty in + let warnings = Errors.ConcreteLocPrintableErrorSet.empty in + (file, errors, warnings, suppressions)) + (Lazy.force results)) + in + send_errors_over_connection errors + in + Lwt.return intermediate_result_callback - let%lwt () = match prep_merge with +(* This function does some last minute preparation and then calls into the merge service, which + * typechecks the code. By the time this function is called, we know exactly what we want to merge + * (though we may later decline to typecheck some files due to recheck optimizations) *) +let merge + ~transaction + ~reader + ~options + ~profiling + ~workers + ~errors + ~coverage + ~to_merge + ~components + ~recheck_set + ~dependency_graph + ~deleted + ~unparsed_set + ~persistent_connections + ~recheck_reasons + ~prep_merge = + let { ServerEnv.local_errors; merge_errors; warnings; suppressions } = errors in + let%lwt intermediate_result_callback = + let persistent_connections = + match Options.arch options with + | Options.Classic -> persistent_connections + | Options.TypesFirst -> None + in + mk_intermediate_result_callback + ~reader + ~options + ~profiling + ~persistent_connections + ~recheck_reasons + suppressions + in + let%lwt () = + match prep_merge with | None -> Lwt.return_unit | Some callback -> (* call supplied function to calculate closure of modules to merge *) - with_timer_lwt ~options "MakeMergeInput" profiling (fun () -> - Lwt.return (callback to_merge) - ) + with_timer_lwt ~options "MakeMergeInput" profiling (fun () -> Lwt.return (callback ())) in - (* to_merge is the union of inferred (newly inferred files) and the transitive closure of all dependents. - recheck_map maps each file in to_merge to whether it should be rechecked + recheck_set maps each file in to_merge to whether it should be rechecked initially. *) Hh_logger.info "to_merge: %s" (CheckedSet.debug_counts_to_string to_merge); Hh_logger.info "Calculating dependencies"; MonitorRPC.status_update ~event:ServerStatus.Calculating_dependencies_progress; let files_to_merge = CheckedSet.all to_merge in - let%lwt dependency_graph, component_map = - calc_deps ~options ~profiling ~dependency_graph ~components files_to_merge in - + let%lwt (dependency_graph, component_map) = + calc_deps ~options ~profiling ~dependency_graph ~components files_to_merge + in Hh_logger.info "Merging"; - let%lwt merge_errors, suppressions, severity_cover_set, skipped_count = - let intermediate_result_callback results = - let errors = lazy ( - List.map (fun (file, result) -> - match result with - | Ok (errors, suppressions, severity_cover) -> - file, errors, suppressions, severity_cover - | Error msg -> - let errors = error_set_of_merge_error file msg in - let suppressions = Error_suppressions.empty in - let severity_cover = - Utils_js.FilenameMap.singleton - file - (ExactCover.file_cover file (Options.lint_severities options)) - in - file, errors, suppressions, severity_cover - ) (Lazy.force results) - ) in - send_errors_over_connection errors - in - - let master_mutator, worker_mutator = + let%lwt ( ( merge_errors, + warnings, + suppressions, + coverage, + skipped_count, + sig_new_or_changed, + first_internal_error ), + time_to_merge ) = + let (master_mutator, worker_mutator) = Context_heaps.Merge_context_mutator.create - transaction (FilenameSet.union files_to_merge deleted) + transaction + (FilenameSet.union files_to_merge deleted |> FilenameSet.union unparsed_set) in - - let%lwt merge_errors, suppressions, severity_cover_set, skipped_count = + let merge_start_time = Unix.gettimeofday () in + let%lwt result = run_merge_service ~master_mutator ~worker_mutator + ~reader ~intermediate_result_callback ~options ~profiling ~workers - dependency_graph - component_map - recheck_map - (merge_errors, suppressions, severity_cover_set) + ~dependency_graph + ~component_map + ~recheck_set + (merge_errors, warnings, suppressions, coverage, None) in let%lwt () = - if Options.should_profile options - then with_timer_lwt ~options "PrintGCStats" profiling (fun () -> - Lwt.return (Gc.print_stat stderr) - ) - else Lwt.return_unit + if Options.should_profile options then + with_timer_lwt ~options "PrintGCStats" profiling (fun () -> + Lwt.return (Gc.print_stat stderr)) + else + Lwt.return_unit in + let time_to_merge = Unix.gettimeofday () -. merge_start_time in Hh_logger.info "Done"; - Lwt.return (merge_errors, suppressions, severity_cover_set, skipped_count) + Lwt.return (result, time_to_merge) in + let errors = { ServerEnv.local_errors; merge_errors; warnings; suppressions } in + (* compute the largest cycle, for logging *) + let top_cycle = + Utils_js.FilenameMap.fold + (fun leader members top -> + let count = Nel.length members in + if count = 1 then + top + else + match top with + | Some (_, top_count) -> + if count > top_count then + Some (leader, count) + else + top + | None -> Some (leader, count)) + component_map + None + in + Lwt.return + ( errors, + coverage, + skipped_count, + sig_new_or_changed, + top_cycle, + time_to_merge, + first_internal_error ) + +let check_files + ~reader + ~options + ~profiling + ~workers + ~errors + ~updated_errors + ~coverage + ~merged_files + ~direct_dependent_files + ~sig_new_or_changed + ~dependency_info + ~persistent_connections + ~recheck_reasons + ~cannot_skip_direct_dependents = + match Options.arch options with + | Options.Classic -> Lwt.return (updated_errors, coverage, 0., 0, None, None, None) + | Options.TypesFirst -> + with_timer_lwt ~options "Check" profiling (fun () -> + Hh_logger.info "Check prep"; + Hh_logger.info "new or changed signatures: %d" (FilenameSet.cardinal sig_new_or_changed); + let focused_to_check = CheckedSet.focused merged_files in + let merged_dependents = CheckedSet.dependents merged_files in + let skipped_count = ref 0 in + let (slowest_file, slowest_time, num_slow_files) = (ref None, ref 0., ref None) in + let record_slow_file file time = + (num_slow_files := + match !num_slow_files with + | None -> Some 1 + | Some n -> Some (n + 1)); + if time > !slowest_time then ( + slowest_time := time; + slowest_file := Some file + ) + in + let all_dependency_graph = Dependency_info.all_dependency_graph dependency_info in + (* skip dependents whenever none of their dependencies have new or changed signatures *) + let dependents_to_check = + FilenameSet.filter (fun f -> + (cannot_skip_direct_dependents && FilenameSet.mem f direct_dependent_files) + || FilenameSet.exists (fun f' -> FilenameSet.mem f' sig_new_or_changed) + @@ FilenameMap.find_unsafe f all_dependency_graph + || + ( incr skipped_count; + false )) + @@ merged_dependents + in + Hh_logger.info + "Check will skip %d of %d files" + !skipped_count + (* We can just add these counts without worrying about files which are in both sets. We + * got these both from a CheckedSet. CheckedSet's representation ensures that a single + * file cannot have more than one kind. *) + (FilenameSet.cardinal focused_to_check + FilenameSet.cardinal merged_dependents); + let files = FilenameSet.union focused_to_check dependents_to_check in + let%lwt intermediate_result_callback = + mk_intermediate_result_callback + ~reader + ~options + ~profiling + ~persistent_connections + ~recheck_reasons + updated_errors.ServerEnv.suppressions + in + Hh_logger.info "Checking files"; - let checked = CheckedSet.union unchanged_checked to_merge in - Hh_logger.info "Checked set: %s" (CheckedSet.debug_counts_to_string checked); - - let errors = { ServerEnv.local_errors; merge_errors; suppressions; severity_cover_set } in - let cycle_leaders = component_map - |> Utils_js.FilenameMap.elements - |> List.map (fun (leader, members) -> (leader, Nel.length members)) - |> List.filter (fun (_, member_count) -> member_count > 1) in - Lwt.return (checked, cycle_leaders, errors, skipped_count) - -let ensure_parsed ~options ~profiling ~workers files = + let check_start_time = Unix.gettimeofday () in + let job = + List.fold_left (fun acc file -> Merge_service.check options ~reader file :: acc) + in + let merge new_acc acc = + intermediate_result_callback (lazy new_acc); + List.rev_append new_acc acc + in + let progress_fn ~total ~start ~length:_ = + MonitorRPC.status_update + ServerStatus.(Checking_progress { total = Some total; finished = start }) + in + let max_size = Options.max_files_checked_per_worker options in + let%lwt ret = + MultiWorkerLwt.call + workers + ~job + ~neutral:[] + ~merge + ~next:(MultiWorkerLwt.next ~progress_fn ~max_size workers (FilenameSet.elements files)) + in + let { ServerEnv.merge_errors; warnings; _ } = errors in + let suppressions = updated_errors.ServerEnv.suppressions in + let (merge_errors, warnings, suppressions, coverage, first_internal_error) = + List.fold_left + (fun acc (file, result) -> + let acc = remove_old_results acc file in + add_new_results ~record_slow_file acc file result) + (merge_errors, warnings, suppressions, coverage, None) + ret + in + let time_to_check_merged = Unix.gettimeofday () -. check_start_time in + Hh_logger.info "Done"; + let errors = { errors with ServerEnv.merge_errors; warnings; suppressions } in + Lwt.return + ( errors, + coverage, + time_to_check_merged, + !skipped_count, + Option.map ~f:File_key.to_string !slowest_file, + !num_slow_files, + Option.map first_internal_error ~f:(spf "First check internal error:\n%s") )) + +let ensure_parsed ~options ~profiling ~workers ~reader files = with_timer_lwt ~options "EnsureParsed" profiling (fun () -> - let%lwt parse_hash_mismatch_skips = - Parsing_service_js.ensure_parsed options workers (CheckedSet.all files) - in - - if FilenameSet.is_empty parse_hash_mismatch_skips - then Lwt.return_unit - else begin - let files_to_recheck = FilenameSet.fold - (fun f acc -> SSet.add (File_key.to_string f) acc) - parse_hash_mismatch_skips - SSet.empty + let%lwt parse_hash_mismatch_skips = + Parsing_service_js.ensure_parsed ~reader options workers (CheckedSet.all files) in - ServerMonitorListenerState.push_files_to_recheck files_to_recheck; - raise Lwt.Canceled - end - ) + if FilenameSet.is_empty parse_hash_mismatch_skips then + Lwt.return_unit + else + let files_to_recheck = + FilenameSet.fold + (fun f acc -> SSet.add (File_key.to_string f) acc) + parse_hash_mismatch_skips + SSet.empty + in + let file_count = SSet.cardinal files_to_recheck in + let reason = + LspProt.( + if file_count = 1 then + Single_file_changed { filename = SSet.elements files_to_recheck |> List.hd } + else + Many_files_changed { file_count }) + in + ServerMonitorListenerState.push_files_to_recheck ~reason files_to_recheck; + raise Lwt.Canceled) (* When checking contents, ensure that dependencies are checked. Might have more general utility. @@ -527,212 +1046,251 @@ let ensure_parsed ~options ~profiling ~workers files = should be able to emit errors, even places like propertyFindRefs.get_def_info that invoke this function. But it looks like this codepath fails to emit StartRecheck and EndRecheck messages. *) -let ensure_checked_dependencies ~options ~profiling ~workers ~env file file_sig = +let ensure_checked_dependencies ~options ~reader ~env file file_sig = let resolved_requires = - let require_loc_map = File_sig.(require_loc_map file_sig.module_sig) in - SMap.fold (fun r locs resolved_rs -> - let resolved_r = Module_js.imported_module - ~options - ~node_modules_containers:!Files.node_modules_containers - file locs r in - Modulename.Set.add resolved_r resolved_rs - ) require_loc_map Modulename.Set.empty + let require_loc_map = File_sig.With_Loc.(require_loc_map file_sig.module_sig) in + SMap.fold + (fun r locs resolved_rs -> + let locs = Nel.map ALoc.of_loc locs in + let resolved_r = + Module_js.imported_module + ~options + ~reader:(Abstract_state_reader.State_reader reader) + ~node_modules_containers:!Files.node_modules_containers + file + locs + r + in + Modulename.Set.add resolved_r resolved_rs) + require_loc_map + Modulename.Set.empty in - - let infer_input = Modulename.Set.fold (fun m acc -> - match Module_heaps.get_file m ~audit:Expensive.warn with - | Some f -> - if FilenameSet.mem f !env.ServerEnv.files && Module_js.checked_file f ~audit:Expensive.warn - then CheckedSet.add ~dependencies:(FilenameSet.singleton f) acc - else acc - | None -> acc (* complain elsewhere about required module not found *) - ) resolved_requires CheckedSet.empty in - let unchanged_checked = !env.ServerEnv.checked_files in - - (* Often, all dependencies have already been checked, so infer_input contains no unchecked files. + let input = + Modulename.Set.fold + (fun m acc -> + match Module_heaps.Reader.get_file ~reader m ~audit:Expensive.warn with + | Some f -> + let reader = Abstract_state_reader.State_reader reader in + if + FilenameSet.mem f env.ServerEnv.files + && Module_js.checked_file ~reader f ~audit:Expensive.warn + then + CheckedSet.add ~dependencies:(FilenameSet.singleton f) acc + else + acc + | None -> acc) (* complain elsewhere about required module not found *) + resolved_requires + CheckedSet.empty + in + let checked = env.ServerEnv.checked_files in + (* Often, all dependencies have already been checked, so input contains no unchecked files. * In that case, let's short-circuit typecheck, since a no-op typecheck still takes time on * large repos *) - if CheckedSet.is_empty (CheckedSet.diff infer_input unchanged_checked) - then Lwt.return_unit - else begin - let errors = !env.ServerEnv.errors in - let all_dependent_files = FilenameSet.empty in - let direct_dependent_files = FilenameSet.empty in - let persistent_connections = Some (!env.ServerEnv.connections) in - let dependency_graph = !env.ServerEnv.dependency_graph in - let deleted = FilenameSet.empty in - - let%lwt to_merge, components, recheck_map = - include_dependencies_and_dependents - ~options ~profiling ~unchanged_checked ~infer_input ~dependency_graph ~all_dependent_files - ~direct_dependent_files - in - - let%lwt () = ensure_parsed ~options ~profiling ~workers to_merge in - - let%lwt checked, _cycle_leaders, errors, _skipped_count = Transaction.with_transaction (fun transaction -> - merge - ~transaction ~options ~profiling ~workers ~errors - ~unchanged_checked ~to_merge ~components ~recheck_map - ~dependency_graph ~deleted - ~persistent_connections - ~prep_merge:None - ) in - - (* During a normal initialization or recheck, we update the env with the errors and - * calculate the collated errors. However, this code is for when the server is in lazy mode, - * is trying to using typecheck_contents, and is making sure the dependencies are checked. Since - * we're messing with env.errors, we also need to set collated_errors to None. This will force - * us to recompute them the next time someone needs them *) - !env.ServerEnv.collated_errors := None; - env := { !env with ServerEnv. - checked_files = checked; - errors; - }; + let unchecked_dependencies = CheckedSet.diff input checked in + if CheckedSet.is_empty unchecked_dependencies then Lwt.return_unit - end + else ( + Hh_logger.info + "Canceling command due to %d unchecked dependencies" + (CheckedSet.cardinal unchecked_dependencies); + let reason = LspProt.Unchecked_dependencies { filename = File_key.to_string file } in + ServerMonitorListenerState.push_checked_set_to_force ~reason unchecked_dependencies; + raise Lwt.Canceled + ) (* Another special case, similar assumptions as above. *) -(** TODO: handle case when file+contents don't agree with file system state **) -let typecheck_contents_ ~options ~workers ~env ~check_syntax ~profiling contents filename = - let%lwt errors, parse_result, info = - parse_contents ~options ~profiling ~check_syntax filename contents in +(** TODO: handle case when file+contents don't agree with file system state **) +let typecheck_contents_ ~options ~env ~check_syntax ~profiling contents filename = + let%lwt (errors, parse_result, info) = + parse_contents ~options ~profiling ~check_syntax filename contents + in + let reader = State_reader.create () in + let lazy_table_of_aloc = Parsing_heaps.Reader.get_sig_ast_aloc_table_unsafe_lazy ~reader in match parse_result with - | Parsing_service_js.Parse_ok (ast, file_sig) -> - (* override docblock info *) - let info = Docblock.set_flow_mode_for_ide_command info in - - (* merge *) - let%lwt cx, typed_ast = with_timer_lwt ~options "MergeContents" profiling (fun () -> - let%lwt () = - ensure_checked_dependencies ~options ~profiling ~workers ~env filename file_sig - in - Lwt.return @@ Merge_service.merge_contents_context options filename ast info file_sig - ) in - - let errors = Context.errors cx in - let errors = - if options.Options.opt_enforce_well_formed_exports then - Inference_utils.set_of_file_sig_tolerable_errors - ~source_file:filename - file_sig.File_sig.tolerable_errors - |> Errors.ErrorSet.union errors - else - errors - in - - (* Suppressions for errors in this file can come from dependencies *) - let suppressions = - let open ServerEnv in + | Parsing_service_js.Parse_ok parse_ok -> + (* override docblock info *) + let (ast, file_sig) = Parsing_service_js.basic parse_ok in + let info = Docblock.set_flow_mode_for_ide_command info in + (* merge *) + let%lwt (cx, typed_ast) = + with_timer_lwt ~options "MergeContents" profiling (fun () -> + let%lwt () = ensure_checked_dependencies ~options ~reader ~env filename file_sig in + Lwt.return + (Merge_service.merge_contents_context ~reader options filename ast info file_sig)) + in + let errors = Context.errors cx in + let errors = + if Inference_utils.well_formed_exports_enabled options filename then + File_sig.With_Loc.(file_sig.tolerable_errors) + |> File_sig.abstractify_tolerable_errors + |> Inference_utils.set_of_file_sig_tolerable_errors ~source_file:filename + |> Flow_error.ErrorSet.union errors + else + errors + in + (* Suppressions for errors in this file can come from dependencies *) + let suppressions = + ServerEnv.( let new_suppressions = Context.error_suppressions cx in - let { suppressions; _ } = !env.errors in - Error_suppressions.update_suppressions suppressions new_suppressions - in - - (* Severity cover info can come from dependencies *) - let severity_cover = - let open ServerEnv in - let file_severity_cover = Context.severity_cover cx in - let { severity_cover_set; _ } = !env.errors in - FilenameMap.union file_severity_cover severity_cover_set - in - - (* Filter out suppressed errors *) - let errors, warnings, _, _ = - Error_suppressions.filter_suppressed_errors suppressions severity_cover errors - ~unused:Error_suppressions.empty (* TODO: track unused suppressions *) - in - - let warnings = if Options.should_include_warnings options - then warnings - else Errors.ErrorSet.empty - in - - Lwt.return (Some (cx, ast, file_sig, typed_ast), errors, warnings, info) - + let { suppressions; _ } = env.errors in + Error_suppressions.update_suppressions suppressions new_suppressions) + in + let severity_cover = Context.severity_cover cx in + let include_suppressions = Context.include_suppressions cx in + let aloc_tables = Context.aloc_tables cx in + let (errors, warnings, suppressions) = + Error_suppressions.filter_lints + ~include_suppressions + suppressions + errors + aloc_tables + severity_cover + in + let errors = Flow_error.make_errors_printable lazy_table_of_aloc errors in + let warnings = Flow_error.make_errors_printable lazy_table_of_aloc warnings in + let root = Options.root options in + let file_options = Some (Options.file_options options) in + (* Filter out suppressed errors *) + let (errors, _, _) = + Error_suppressions.filter_suppressed_errors + ~root + ~file_options + suppressions + errors + ~unused:Error_suppressions.empty + (* TODO: track unused suppressions *) + in + (* Filter out suppressed warnings *) + let (warnings, _, _) = + Error_suppressions.filter_suppressed_errors + ~root + ~file_options + suppressions + warnings + ~unused:Error_suppressions.empty + (* TODO: track unused suppressions *) + in + let warnings = + if Options.should_include_warnings options then + warnings + else + Errors.ConcreteLocPrintableErrorSet.empty + in + Lwt.return (Some (cx, ast, file_sig, typed_ast), errors, warnings, info) | Parsing_service_js.Parse_fail fails -> - let errors = match fails with + let errors = + match fails with | Parsing_service_js.Parse_error err -> - let err = Inference_utils.error_of_parse_error ~source_file:filename err in - Errors.ErrorSet.add err errors + let err = Inference_utils.error_of_parse_error ~source_file:filename err in + Flow_error.ErrorSet.add err errors | Parsing_service_js.Docblock_errors errs -> - List.fold_left (fun errors err -> + List.fold_left + (fun errors err -> let err = Inference_utils.error_of_docblock_error ~source_file:filename err in - Errors.ErrorSet.add err errors - ) errors errs + Flow_error.ErrorSet.add err errors) + errors + errs | Parsing_service_js.File_sig_error err -> - let err = Inference_utils.error_of_file_sig_error ~source_file:filename err in - Errors.ErrorSet.add err errors - in - Lwt.return (None, errors, Errors.ErrorSet.empty, info) - + let err = Inference_utils.error_of_file_sig_error ~source_file:filename err in + Flow_error.ErrorSet.add err errors + in + let errors = Flow_error.make_errors_printable lazy_table_of_aloc errors in + Lwt.return (None, errors, Errors.ConcreteLocPrintableErrorSet.empty, info) | Parsing_service_js.Parse_skip - (Parsing_service_js.Skip_non_flow_file - | Parsing_service_js.Skip_resource_file) -> - (* should never happen *) - Lwt.return (None, errors, Errors.ErrorSet.empty, info) - -let typecheck_contents ~options ~workers ~env ~profiling contents filename = - let%lwt cx_opt, errors, warnings, _info = - typecheck_contents_ ~options ~workers ~env ~check_syntax:true ~profiling contents filename in + (Parsing_service_js.Skip_non_flow_file | Parsing_service_js.Skip_resource_file) -> + (* should never happen *) + let errors = Flow_error.make_errors_printable lazy_table_of_aloc errors in + Lwt.return (None, errors, Errors.ConcreteLocPrintableErrorSet.empty, info) + +let typecheck_contents ~options ~env ~profiling contents filename = + let%lwt (cx_opt, errors, warnings, _info) = + typecheck_contents_ ~options ~env ~check_syntax:true ~profiling contents filename + in Lwt.return (cx_opt, errors, warnings) -let basic_check_contents ~options ~workers ~env ~profiling contents filename = +let basic_check_contents ~options ~env ~profiling contents filename = try%lwt - let%lwt cx_opt, _errors, _warnings, info = - typecheck_contents_ - ~options ~workers ~env ~check_syntax:false ~profiling contents filename in - let cx, file_sig, typed_ast = match cx_opt with - | Some (cx, _, file_sig, typed_ast) -> cx, file_sig, typed_ast - | None -> failwith "Couldn't parse file" in + let%lwt (cx_opt, _errors, _warnings, info) = + typecheck_contents_ ~options ~env ~check_syntax:false ~profiling contents filename + in + let (cx, file_sig, typed_ast) = + match cx_opt with + | Some (cx, _, file_sig, typed_ast) -> (cx, file_sig, typed_ast) + | None -> failwith "Couldn't parse file" + in Lwt.return (Ok (cx, info, file_sig, typed_ast)) with | Lwt.Canceled as exn -> raise exn | exn -> - Hh_logger.error ~exn "Uncaught exception in basic_check_contents"; - let e = spf "%s\n%s" - (Printexc.to_string exn) - (Printexc.get_backtrace ()) in + let exn = Exception.wrap exn in + let e = Exception.to_string exn in + Hh_logger.error "Uncaught exception in basic_check_contents\n%s" e; Lwt.return (Error e) -let init_package_heap ~options ~profiling parsed = +let init_package_heap ~options ~profiling ~reader parsed = with_timer_lwt ~options "PackageHeap" profiling (fun () -> - FilenameSet.iter (fun filename -> - match filename with - | File_key.JsonFile str when Filename.basename str = "package.json" -> - let ast = Parsing_heaps.get_ast_unsafe filename in - Module_js.add_package str ast - | _ -> () - ) parsed; - Lwt.return_unit - ) + let errors = + FilenameSet.fold + (fun filename errors -> + match filename with + | File_key.JsonFile str when Filename.basename str = "package.json" -> + let ast = Parsing_heaps.Mutator_reader.get_ast_unsafe ~reader filename in + let package = Package_json.parse ast in + Module_js.add_package str package; + begin + match package with + | Ok _ -> errors + | Error parse_err -> + let errset = + Inference_utils.set_of_package_json_error ~source_file:filename parse_err + in + update_errset errors filename errset + end + | _ -> errors) + parsed + FilenameMap.empty + in + Lwt.return errors) -let init_libs ~options ~profiling ~local_errors ~suppressions ~severity_cover_set ordered_libs = +let init_libs ~options ~profiling ~local_errors ~warnings ~suppressions ~reader ordered_libs = with_timer_lwt ~options "InitLibs" profiling (fun () -> - let%lwt lib_files = - let options = match Options.verbose options with - | Some { Verbose.enabled_during_flowlib = false; _; } -> - (* Normally we disable verbosity while loading the libs. But if we're running with - * --verbose-flowlib then we want to leave verbosity on *) - { options with Options.opt_verbose = None; } - | _ -> options + let%lwt lib_files = + let options = + match Options.verbose options with + | Some { Verbose.enabled_during_flowlib = false; _ } -> + (* Normally we disable verbosity while loading the libs. But if we're running with + * --verbose-flowlib then we want to leave verbosity on *) + { options with Options.opt_verbose = None } + | _ -> options + in + Init_js.init ~options ~reader ordered_libs in - Init_js.init ~options ordered_libs - in - - Lwt.return @@ List.fold_left (fun acc (lib_file, ok, errs, suppressions, severity_cover) -> - let all_ok, errors_acc, suppressions_acc, severity_cover_set_acc = acc in - let all_ok = if ok then all_ok else false in - let errors_acc = update_errset errors_acc lib_file errs in - let suppressions_acc = - Error_suppressions.update_suppressions suppressions_acc suppressions in - let severity_cover_set_acc = FilenameMap.union severity_cover severity_cover_set_acc in - all_ok, errors_acc, suppressions_acc, severity_cover_set_acc - ) (true, local_errors, suppressions, severity_cover_set) lib_files - ) - -(* Given a set of focused files and a set of parsed files, calculate all the dependents and + Lwt.return + @@ List.fold_left + (fun acc (lib_file, ok, errs, warnings, suppressions) -> + let (all_ok, errors_acc, warnings_acc, suppressions_acc) = acc in + let all_ok = + if ok then + all_ok + else + false + in + let errors_acc = update_errset errors_acc lib_file errs in + let warnings_acc = update_errset warnings_acc lib_file warnings in + let suppressions_acc = + Error_suppressions.update_suppressions suppressions_acc suppressions + in + (all_ok, errors_acc, warnings_acc, suppressions_acc)) + (true, local_errors, warnings, suppressions) + lib_files) + +let is_file_tracked_and_checked ~reader filename = + Module_heaps.Reader_dispatcher.is_tracked_file ~reader filename + (* otherwise, f is probably a directory *) + && Module_js.checked_file ~reader ~audit:Expensive.warn filename + +(* Given a CheckedSet of focused files and a dependency graph, calculate all the dependents and * dependencies and return them as a CheckedSet * * This is pretty darn expensive for large repos (on the order of a few seconds). What is taking @@ -740,590 +1298,1178 @@ let init_libs ~options ~profiling ~local_errors ~suppressions ~severity_cover_se * * - Around 75% of the time is dependent_files looking up the dependents * - Around 20% of the time is calc_dependency_graph building the dependency graph - **) -let focused_files_to_infer ~focused ~dependency_graph = - let focused = focused |> FilenameSet.filter (fun f -> - Module_heaps.is_tracked_file f (* otherwise, f is probably a directory *) - && Module_js.checked_file ~audit:Expensive.warn f) + * + * There are no expected invariants for the input sets. The returned set has the following invariants + * 1. Every recursive dependent of a focused file will be in the focused set or the dependent set + * + * `is_file_checked` should return a boolean indicating whether the file has @flow or is otherwise + * considered to be a file that Flow should check. Unfortunately the term "checked" is overloaded in + * this codebase. In some contexts it means the set of files that we are *currently* checking due to + * lazy mode. In other contexts, it means the set of files which are eligible to be checked. In this + * case, it has the latter meaning. + * *) +let focused_files_and_dependents_to_infer + ~is_file_checked + ~all_dependency_graph + ~dependency_graph + ~input_focused + ~input_dependencies + ~all_dependent_files = + let input = + CheckedSet.add + ~focused:input_focused + ~dependencies:(Option.value ~default:FilenameSet.empty input_dependencies) + CheckedSet.empty + in + (* Filter unchecked files out of the input *) + let input = CheckedSet.filter input ~f:is_file_checked in + let focused = CheckedSet.focused input in + (* Roots is the set of all focused files and all dependent files. *) + let roots = + Pure_dep_graph_operations.calc_all_dependents ~dependency_graph ~all_dependency_graph focused in - - let roots = Dep_service.calc_all_reverse_dependencies dependency_graph focused in - - let dependencies = Dep_service.calc_all_dependencies dependency_graph roots in let dependents = FilenameSet.diff roots focused in - - Lwt.return (CheckedSet.add ~focused ~dependents ~dependencies CheckedSet.empty) + let dependencies = CheckedSet.dependencies input in + let checked_files = CheckedSet.add ~focused ~dependents ~dependencies CheckedSet.empty in + (* It's possible that all_dependent_files contains foo.js, which is a dependent of a + * dependency. That's fine if foo.js is in the checked set. But if it's just some random + * other dependent then we need to filter it out. + *) + let all_dependent_files = FilenameSet.inter all_dependent_files (CheckedSet.all checked_files) in + Lwt.return (checked_files, all_dependent_files) let filter_out_node_modules ~options = let root = Options.root options in let file_options = Options.file_options options in FilenameSet.filter (fun fn -> - let filename_str = File_key.to_string fn in - not (Files.is_within_node_modules ~root ~options:file_options filename_str) - ) + let filename_str = File_key.to_string fn in + not (Files.is_within_node_modules ~root ~options:file_options filename_str)) -(* Without a set of focused files, we just focus on every parsed file. We won't focus on - * node_modules. If a node module is a dependency, then we'll just treat it as a dependency. - * Otherwise, we'll ignore it. *) -let unfocused_files_to_infer ~options ~parsed ~dependency_graph = - (* All the non-node_modules files *) - let focused = filter_out_node_modules ~options parsed in - - (* Calculate dependencies to figure out which node_modules stuff we depend on *) - let dependencies = Dep_service.calc_all_dependencies dependency_graph focused in - Lwt.return (CheckedSet.add ~focused ~dependencies CheckedSet.empty) +(* Filesystem lazy mode focuses on any file which changes. Non-lazy mode focuses on every file in + * the repo. In both cases, we never want node_modules to appear in the focused sets. + * + * There are no expected invariants for the input sets. The returned set has the following invariants + * 1. Node modules will only appear in the dependency set. + * 2. Dependent files are empty. + *) +let unfocused_files_and_dependents_to_infer + ~options ~input_focused ~input_dependencies ~all_dependent_files = + let focused = filter_out_node_modules ~options input_focused in + let dependencies = Option.value ~default:FilenameSet.empty input_dependencies in + Lwt.return (CheckedSet.add ~focused ~dependencies CheckedSet.empty, all_dependent_files) -let files_to_infer ~options ~focused ~profiling ~parsed ~dependency_graph = - with_timer_lwt ~options "FilesToInfer" profiling (fun () -> - match focused with - | None -> - unfocused_files_to_infer ~options ~parsed ~dependency_graph - | Some focused -> - focused_files_to_infer ~focused ~dependency_graph - ) +(* Called on initialization in non-lazy mode, with optional focus targets. -(* We maintain the following invariant across rechecks: The set of - `files` contains files that parsed successfully in the previous - phase (which could be the init phase or a previous recheck phase) -*) -let recheck_with_profiling - ~profiling ~transaction ~options ~workers ~updates env ~files_to_focus = - let errors = env.ServerEnv.errors in - - (* If foo.js is updated and foo.js.flow exists, then mark foo.js.flow as - * updated too. This is because sometimes we decide what foo.js.flow - * provides based on the existence of foo.js *) - let updates = FilenameSet.fold (fun file updates -> - if not (File_key.check_suffix file Files.flow_ext) && - Parsing_heaps.has_ast (File_key.with_suffix file Files.flow_ext) - then FilenameSet.add (File_key.with_suffix file Files.flow_ext) updates - else updates - ) updates updates in - - (* split updates into deleted files and modified files *) - (** NOTE: We use the term "modified" in the same sense as the underlying file - system: a modified file exists, and in relation to an old file system - state, a modified file could be any of "new," "changed," or "unchanged." - **) - let modified, deleted = FilenameSet.partition (fun f -> - Sys.file_exists (File_key.to_string f) - ) updates in - let deleted_count = FilenameSet.cardinal deleted in - let modified_count = FilenameSet.cardinal modified in - - (* log modified and deleted files *) - if deleted_count + modified_count > 0 then ( - Hh_logger.info "recheck %d modified, %d deleted files" - modified_count deleted_count; - let log_files files msg n = - Hh_logger.info "%s files:" msg; - let _ = FilenameSet.fold (fun f i -> - Hh_logger.info "%d/%d: %s" i n (File_key.to_string f); - i + 1 - ) files 1 - in () - in - if modified_count > 0 then log_files modified "modified" modified_count; - if deleted_count > 0 then log_files deleted "deleted" deleted_count - ); + When focus targets are not provided, the result is a checked set focusing on parsed files minus + node modules, plus no dependents (because effectively any dependent is already focused), plus all + their dependencies (minus those that are already focused). The set of dependencies might contain + node modules. - (* We don't need to delete things from the parsing heaps - they will be automatically oldified. - * Oldifying something removes it from the heap (but keeps it around in case we need it back) *) + When focus targets are provided, the result is a checked set focusing on those files, plus their + dependents, plus all their combined dependencies. All these sets might contain node modules. - Hh_logger.info "Parsing"; - (* reparse modified files, updating modified to new_or_changed to reflect - removal of unchanged files *) - let%lwt new_or_changed, freshparsed, unparsed, unchanged_parse, new_local_errors = - reparse ~options ~profiling ~transaction ~workers ~modified ~deleted in - - let%lwt new_or_changed, freshparsed = - if not (FilenameSet.is_empty files_to_focus) then begin - (* Normally we can ignore files which are unmodified. However, if someone passed force_focus, - * then we may need to ressurect some unmodified files into the new_or_changed and freshparsed - * sets. For example, if someone ran `flow force-recheck --focus a.js` and a.js is not - * focused, then we need to recheck it even if the file is unchanged + In either case, we can consider the result to be "closed" in terms of expected invariants. +*) +let files_to_infer ~options ~profiling ~reader ~dependency_info ?focus_targets ~parsed = + with_timer_lwt ~options "FilesToInfer" profiling (fun () -> + match focus_targets with + | None -> + unfocused_files_and_dependents_to_infer + ~options + ~input_focused:parsed + ~input_dependencies:None + ~all_dependent_files:FilenameSet.empty + | Some input_focused -> + let all_dependency_graph = Dependency_info.all_dependency_graph dependency_info in + let dependency_graph = Dependency_info.dependency_graph dependency_info in + let is_file_checked = + is_file_tracked_and_checked ~reader:(Abstract_state_reader.Mutator_state_reader reader) + in + focused_files_and_dependents_to_infer + ~is_file_checked + ~all_dependency_graph + ~dependency_graph + ~input_focused + ~input_dependencies:None + ~all_dependent_files:FilenameSet.empty) + +let restart_if_faster_than_recheck ~options ~env ~to_merge ~file_watcher_metadata = + match Options.lazy_mode options with + | Options.NON_LAZY_MODE + | Options.LAZY_MODE_FILESYSTEM + | Options.LAZY_MODE_IDE -> + (* Only watchman mode might restart *) + Lwt.return_none + | Options.LAZY_MODE_WATCHMAN -> + let { MonitorProt.total_update_distance; changed_mergebase } = file_watcher_metadata in + Hh_logger.info + "File watcher moved %d revisions and %s mergebase" + total_update_distance + ( if changed_mergebase then + "changed" + else + "did not change" ); + + if changed_mergebase then ( + (* TODO (glevi) - One of the numbers we need to estimate is "If we restart how many files + * would we merge". Currently we're looking at the number of already checked files. But a + * better way would be to * - * We avoid rechecking already-focused unmodified files since they're already focused and - * haven't changed :P *) - let unchanged_files_to_focus = - FilenameSet.diff - (FilenameSet.inter files_to_focus unchanged_parse) (* unchanged files to focus... *) - (CheckedSet.focused env.ServerEnv.checked_files) (* ...which aren't already focused *) + * 1. When watchman notices the mergebase changing, also record the files which have changed + * since the mergebase + * 2. Send these files to the server + * 3. Calculate the fanout of these files (we should have an updated dependency graph by now) + * 4. That should actually be the right number, instead of just an estimate. But it costs + * a little to compute the fanout + *) + let files_already_checked = CheckedSet.cardinal env.ServerEnv.checked_files in + let files_about_to_recheck = CheckedSet.cardinal to_merge in + Hh_logger.info + "We've already checked %d files. We're about to recheck %d files" + files_already_checked + files_about_to_recheck; + + let init_time = Recheck_stats.get_init_time () in + let per_file_time = Recheck_stats.get_per_file_time () in + let time_to_restart = init_time +. (per_file_time *. float_of_int files_already_checked) in + let time_to_recheck = per_file_time *. float_of_int files_about_to_recheck in + let estimates = + { + Recheck_stats.estimated_time_to_recheck = time_to_recheck; + estimated_time_to_restart = time_to_restart; + estimated_time_to_init = init_time; + estimated_time_per_file = per_file_time; + estimated_files_to_recheck = files_about_to_recheck; + estimated_files_to_init = files_already_checked; + } + in + Hh_logger.debug + "Estimated restart time: %fs to init + (%fs * %d files) = %fs" + init_time + per_file_time + files_already_checked + time_to_restart; + Hh_logger.debug + "Estimated recheck time: %fs * %d files = %fs" + per_file_time + files_about_to_recheck + time_to_recheck; + + Hh_logger.info + "Estimating a recheck would take %.2fs and a restart would take %.2fs" + time_to_recheck + time_to_restart; + let%lwt () = + if time_to_restart < time_to_recheck then + let%lwt () = Recheck_stats.record_last_estimates ~options ~estimates in + FlowExitStatus.(exit ~msg:"Restarting after a rebase to save time" Restart) + else + Lwt.return_unit in - let%lwt () = ensure_parsed ~options ~profiling ~workers - (CheckedSet.add ~focused:unchanged_files_to_focus CheckedSet.empty) + Lwt.return (Some estimates) + ) else + Lwt.return_none + +module Recheck : sig + type recheck_result = { + new_or_changed: Utils_js.FilenameSet.t; + deleted: Utils_js.FilenameSet.t; + all_dependent_files: Utils_js.FilenameSet.t; + top_cycle: (File_key.t * int) option; + merge_skip_count: int; + check_skip_count: int; + slowest_file: string option; + num_slow_files: int option; + estimates: Recheck_stats.estimates option; + } + + val full : + profiling:Profiling_js.running -> + transaction:Transaction.t -> + reader:Parsing_heaps.Mutator_reader.reader -> + options:Options.t -> + workers:MultiWorkerLwt.worker list option -> + updates:Utils_js.FilenameSet.t -> + files_to_force:CheckedSet.t -> + file_watcher_metadata:MonitorProt.file_watcher_metadata -> + recheck_reasons:LspProt.recheck_reason list -> + will_be_checked_files:CheckedSet.t ref -> + env:ServerEnv.env -> + (ServerEnv.env * recheck_result * string option) Lwt.t + + val parse_and_update_dependency_info : + profiling:Profiling_js.running -> + transaction:Transaction.t -> + reader:Parsing_heaps.Mutator_reader.reader -> + options:Options.t -> + workers:MultiWorkerLwt.worker list option -> + updates:Utils_js.FilenameSet.t -> + files_to_force:CheckedSet.t -> + recheck_reasons:LspProt.recheck_reason list -> + env:ServerEnv.env -> + ServerEnv.env Lwt.t + + (* Exposed only for testing purposes. Not meant for general consumption. *) + val determine_what_to_recheck : + profiling:Profiling_js.running -> + options:Options.t -> + is_file_checked:(File_key.t -> bool) -> + ide_open_files:SSet.t Lazy.t -> + dependency_graph:FilenameSet.t FilenameMap.t -> + all_dependency_graph:FilenameSet.t FilenameMap.t -> + checked_files:CheckedSet.t -> + freshparsed:FilenameSet.t -> + unparsed_set:FilenameSet.t -> + deleted:FilenameSet.t -> + unchanged_checked:CheckedSet.t -> + files_to_force:CheckedSet.t -> + unchanged_files_to_force:CheckedSet.t -> + direct_dependent_files:FilenameSet.t -> + (* to_merge, components, recheck_set, all_dependent_files *) + (CheckedSet.t * File_key.t Nel.t list * FilenameSet.t * FilenameSet.t) Lwt.t +end = struct + type recheck_result = { + new_or_changed: Utils_js.FilenameSet.t; + deleted: Utils_js.FilenameSet.t; + all_dependent_files: Utils_js.FilenameSet.t; + top_cycle: (File_key.t * int) option; + merge_skip_count: int; + check_skip_count: int; + slowest_file: string option; + num_slow_files: int option; + estimates: Recheck_stats.estimates option; + } + + (* This is the first part of the recheck. It parses the files and updates the dependency graph. It + * does NOT figure out which files to merge or merge them. + * + * It returns an updated env and a bunch of intermediate values which `recheck_merge` can use to + * calculate the to_merge and perform the merge *) + let recheck_parse_and_update_dependency_info + ~profiling + ~transaction + ~reader + ~options + ~workers + ~updates + ~files_to_force + ~recheck_reasons + ~env = + let lazy_table_of_aloc = + Parsing_heaps.Mutator_reader.get_sig_ast_aloc_table_unsafe_lazy ~reader + in + let errors = env.ServerEnv.errors in + (* files_to_force is a request to promote certain files to be checked as a dependency, dependent, + * or focused file. We can ignore a request if the file is already checked at the desired level + * or at a more important level *) + let files_to_force = CheckedSet.diff files_to_force env.ServerEnv.checked_files in + (* split updates into deleted files and modified files *) + (* NOTE: We use the term "modified" in the same sense as the underlying file + system: a modified file exists, and in relation to an old file system + state, a modified file could be any of "new," "changed," or "unchanged." + **) + let (modified, deleted) = + FilenameSet.partition (fun f -> Sys.file_exists (File_key.to_string f)) updates + in + let deleted_count = FilenameSet.cardinal deleted in + let modified_count = FilenameSet.cardinal modified in + (* log modified and deleted files *) + if deleted_count + modified_count > 0 then ( + Hh_logger.info "recheck %d modified, %d deleted files" modified_count deleted_count; + let log_files files msg n = + Hh_logger.info "%s files:" msg; + let _ = + FilenameSet.fold + (fun f i -> + Hh_logger.info "%d/%d: %s" i n (File_key.to_string f); + i + 1) + files + 1 + in + () in - Lwt.return ( - FilenameSet.union new_or_changed unchanged_files_to_focus, - FilenameSet.union freshparsed unchanged_files_to_focus - ) - end else Lwt.return (new_or_changed, freshparsed) - in - - let unparsed_set = - List.fold_left (fun set (fn, _) -> FilenameSet.add fn set) FilenameSet.empty unparsed - in - - (* clear errors for new, changed and deleted files *) - let errors = - errors - |> clear_errors new_or_changed - |> clear_errors deleted - in - - (* record reparse errors *) - let errors = - let () = - let error_set: Errors.ErrorSet.t = - FilenameMap.fold (fun _ -> Errors.ErrorSet.union) new_local_errors Errors.ErrorSet.empty + if modified_count > 0 then log_files modified "modified" modified_count; + if deleted_count > 0 then log_files deleted "deleted" deleted_count + ); + + (* We don't need to delete things from the parsing heaps - they will be automatically oldified. + * Oldifying something removes it from the heap (but keeps it around in case we need it back) *) + Hh_logger.info "Parsing"; + + (* reparse modified files, updating modified to new_or_changed to reflect + * removal of unchanged files + * + * new_or_changed - Set of files which are not unchanged. This includes freshparsed, fails & skips + * freshparsed - Set of files which parsed successfully + * unparsed - Set of files which were skipped (e.g. no @flow) or which we failed to parse + * unchanged_parse - Set of files who's file hash didn't changes + * new_local_errors - Parse errors, docblock errors, etc + *) + let%lwt (new_or_changed, freshparsed, unparsed, _unchanged_parse, new_local_errors) = + reparse ~options ~profiling ~transaction ~reader ~workers ~modified ~deleted + in + let unparsed_set = + List.fold_left (fun set (fn, _) -> FilenameSet.add fn set) FilenameSet.empty unparsed + in + (* clear errors for new, changed and deleted files *) + let errors = errors |> clear_errors new_or_changed |> clear_errors deleted in + (* record reparse errors *) + let errors = + let () = + let error_set : Flow_error.ErrorSet.t = + FilenameMap.fold + (fun _ -> Flow_error.ErrorSet.union) + new_local_errors + Flow_error.ErrorSet.empty + in + let error_set = Flow_error.make_errors_printable lazy_table_of_aloc error_set in + if Errors.ConcreteLocPrintableErrorSet.cardinal error_set > 0 then + Persistent_connection.update_clients + ~clients:env.ServerEnv.connections + ~errors_reason:(LspProt.Recheck_streaming { recheck_reasons }) + ~calc_errors_and_warnings:(fun () -> (error_set, FilenameMap.empty)) in - if Errors.ErrorSet.cardinal error_set > 0 - then Persistent_connection.update_clients - ~clients:env.ServerEnv.connections - ~calc_errors_and_warnings:(fun () -> error_set, FilenameMap.empty) + let local_errors = merge_error_maps new_local_errors errors.ServerEnv.local_errors in + { errors with ServerEnv.local_errors } in - let local_errors = merge_error_maps new_local_errors errors.ServerEnv.local_errors in - { errors with ServerEnv.local_errors } - in + (* get old (unchanged, undeleted) files that were parsed successfully *) + let old_parsed = env.ServerEnv.files in + let new_or_changed_or_deleted = FilenameSet.union new_or_changed deleted in + let unchanged = FilenameSet.diff old_parsed new_or_changed_or_deleted in + Hh_logger.debug + "recheck: old = %d, del = %d, fresh = %d, unmod = %d" + (FilenameSet.cardinal old_parsed) + (FilenameSet.cardinal deleted) + (FilenameSet.cardinal freshparsed) + (FilenameSet.cardinal unchanged); - (* get old (unchanged, undeleted) files that were parsed successfully *) - let old_parsed = env.ServerEnv.files in - let new_or_changed_or_deleted = FilenameSet.union new_or_changed deleted in - let unchanged = FilenameSet.diff old_parsed new_or_changed_or_deleted in + (* Here's where the interesting part of rechecking begins. Before diving into + code, let's think through the problem independently. - Hh_logger.debug - "recheck: old = %d, del = %d, fresh = %d, unmod = %d" - (FilenameSet.cardinal old_parsed) - (FilenameSet.cardinal deleted) - (FilenameSet.cardinal freshparsed) - (FilenameSet.cardinal unchanged); + Note that changing a file can be conceptually thought of as deleting the + file and then adding it back as a new file. While such a reduction might + miss optimization opportunities (so we don't actually implement it), it + simplifies thinking about correctness. - (** Here's where the interesting part of rechecking begins. Before diving into - code, let's think through the problem independently. + We focus on dependency management. Specifically, we discuss how to + correctly update InfoHeap and NameHeap, and calculate the set of unchanged + files whose imports might resolve to different files. (With these results, + the remaining part of rechecking is relatively simple.) - Note that changing a file can be conceptually thought of as deleting the - file and then adding it back as a new file. While such a reduction might - miss optimization opportunities (so we don't actually implement it), it - simplifies thinking about correctness. + Recall that InfoHeap maps file names in FS to module names in MS, where + each file name in FS must exist, different file names may map to the same + module name, and every module name in MS is mapped to by at least one file + name; and NameHeap maps module names in MS to file names in FS, where the + file name mapped to by a module name must map back to the same module name + in InfoHeap. A file's imports might resolve to different files if the + corresponding modules map to different files in NameHeap. - We focus on dependency management. Specifically, we discuss how to - correctly update InfoHeap and NameHeap, and calculate the set of unchanged - files whose imports might resolve to different files. (With these results, - the remaining part of rechecking is relatively simple.) + Deleting a file + =============== - Recall that InfoHeap maps file names in FS to module names in MS, where - each file name in FS must exist, different file names may map to the same - module name, and every module name in MS is mapped to by at least one file - name; and NameHeap maps module names in MS to file names in FS, where the - file name mapped to by a module name must map back to the same module name - in InfoHeap. A file's imports might resolve to different files if the - corresponding modules map to different files in NameHeap. + Suppose that a file D is deleted. Let D |-> m in InfoHeap, and m |-> F in + NameHeap. - Deleting a file - =============== + Remove D |-> m from InfoHeap. - Suppose that a file D is deleted. Let D |-> m in InfoHeap, and m |-> F in - NameHeap. + If F = D, then remove m |-> F from NameHeap and mark m "dirty": any file + importing m will be affected. If other files map to m in InfoHeap, map m + to one of those files in NameHeap. - Remove D |-> m from InfoHeap. + Adding a file + ============= - If F = D, then remove m |-> F from NameHeap and mark m "dirty": any file - importing m will be affected. If other files map to m in InfoHeap, map m - to one of those files in NameHeap. + Suppose that a new file N is added. - Adding a file - ============= + Map N to some module name, say m, in InfoHeap. If m is not mapped to any + file in NameHeap, add m |-> N to NameHeap and mark m "dirty." Otherwise, + decide whether to replace the existing mapping to m |-> N in NameHeap, and + pessimistically assuming it might be, mark m "dirty." - Suppose that a new file N is added. + Changing a file + ============= - Map N to some module name, say m, in InfoHeap. If m is not mapped to any - file in NameHeap, add m |-> N to NameHeap and mark m "dirty." Otherwise, - decide whether to replace the existing mapping to m |-> N in NameHeap, and - pessimistically assuming it might be, mark m "dirty." + What happens when a file C is changed? Suppose that C |-> m in InfoHeap, + and m |-> F in NameHeap. - Changing a file - ============= + Optimistically, C continues to map to m in InfoHeap and we do nothing. - What happens when a file C is changed? Suppose that C |-> m in InfoHeap, - and m |-> F in NameHeap. + However, let's pessimistically assume that C maps to a different m' in + InfoHeap. Considering C deleted and added back as new, we must remove C + |-> m from InfoHeap and add C |-> m' to InfoHeap. If F = C, then remove m + |-> F from NameHeap and mark m "dirty." If other files map to m in + InfoHeap, map m to one of those files in NameHeap. If m' is not mapped to + any file in NameHeap, add m' |-> C to NameHeap and mark m' "dirty." + Otherwise, decide whether to replace the existing mapping to m' |-> C in + NameHeap, and mark m' "dirty." - Optimistically, C continues to map to m in InfoHeap and we do nothing. + Summary + ======= - However, let's pessimistically assume that C maps to a different m' in - InfoHeap. Considering C deleted and added back as new, we must remove C - |-> m from InfoHeap and add C |-> m' to InfoHeap. If F = C, then remove m - |-> F from NameHeap and mark m "dirty." If other files map to m in - InfoHeap, map m to one of those files in NameHeap. If m' is not mapped to - any file in NameHeap, add m' |-> C to NameHeap and mark m' "dirty." - Otherwise, decide whether to replace the existing mapping to m' |-> C in - NameHeap, and mark m' "dirty." + Summarizing, if an existing file F1 is changed or deleted, and F1 |-> m in + InfoHeap and m |-> F in NameHeap, and F1 = F, then mark m "dirty." And if + a new file or a changed file F2 now maps to m' in InfoHeap, mark m' "dirty." - Summary - ======= + Ideally, any module name that does not map to a different file in NameHeap + should not be considered "dirty." - Summarizing, if an existing file F1 is changed or deleted, and F1 |-> m in - InfoHeap and m |-> F in NameHeap, and F1 = F, then mark m "dirty." And if - a new file or a changed file F2 now maps to m' in InfoHeap, mark m' "dirty." + In terms of implementation: - Ideally, any module name that does not map to a different file in NameHeap - should not be considered "dirty." + Deleted file + ============ - In terms of implementation: + Say it pointed to module OLD_M - Deleted file - ============ + 1. need to repick a provider for OLD_M *if OLD_M's current provider is this + file* + 2. files that depend on OLD_M need to be rechecked if: + a. the provider for OLD_M is **replaced** or **removed**; or + b. the provider for OLD_M is **unchanged**, but is a _changed file_ - Say it pointed to module OLD_M + New file + ======== - 1. need to repick a provider for OLD_M *if OLD_M's current provider is this - file* - 2. files that depend on OLD_M need to be rechecked if: - a. the provider for OLD_M is **replaced** or **removed**; or - b. the provider for OLD_M is **unchanged**, but is a _changed file_ + Say it points to module NEW_M - New file - ======== + 1. need to repick a provider for NEW_M + 2. files that depend on NEW_M need to be rechecked if: + a. the provider for NEW_M is **added** or **replaced**; or + b. the provider for NEW_M is **unchanged**, but is a _changed file_ - Say it points to module NEW_M + Changed file + ============ - 1. need to repick a provider for NEW_M - 2. files that depend on NEW_M need to be rechecked if: - a. the provider for NEW_M is **added** or **replaced**; or - b. the provider for NEW_M is **unchanged**, but is a _changed file_ + Say it pointed to module OLD_M, now points to module NEW_M - Changed file - ============ + * Is OLD_M different from NEW_M? *(= delete the file, then add it back)* - Say it pointed to module OLD_M, now points to module NEW_M + 1. need to repick providers for OLD_M *if OLD_M's current provider is this + file*. + 2. files that depend on OLD_M need to be rechecked if: + a. the provider for OLD_M is **replaced** or **removed**; or + b. the provider for OLD_M is **unchanged**, but is a _changed file_ + 3. need to repick a provider for NEW_M + 4. files that depend on NEW_M need to be rechecked if: + a. the provider for NEW_M is **added** or **replaced**; or + b. the provider for NEW_M is **unchanged**, but is a _changed file_ - * Is OLD_M different from NEW_M? *(= delete the file, then add it back)* + * TODO: Is OLD_M the same as NEW_M? - 1. need to repick providers for OLD_M *if OLD_M's current provider is this - file*. - 2. files that depend on OLD_M need to be rechecked if: - a. the provider for OLD_M is **replaced** or **removed**; or - b. the provider for OLD_M is **unchanged**, but is a _changed file_ - 3. need to repick a provider for NEW_M - 4. files that depend on NEW_M need to be rechecked if: - a. the provider for NEW_M is **added** or **replaced**; or - b. the provider for NEW_M is **unchanged**, but is a _changed file_ + 1. *don't repick a provider!* + 2. files that depend on OLD_M need to be rechecked if: OLD_M's current provider + is a _changed file_ - * TODO: Is OLD_M the same as NEW_M? + **) - 1. *don't repick a provider!* - 2. files that depend on OLD_M need to be rechecked if: OLD_M's current provider - is a _changed file_ + (* remember old modules *) + let unchanged_checked = + CheckedSet.remove new_or_changed_or_deleted env.ServerEnv.checked_files + in + let all_providers_mutator = Module_hashtables.All_providers_mutator.create transaction in + (* clear out records of files, and names of modules provided by those files *) + let%lwt old_modules = + with_timer_lwt ~options "ModuleClearFiles" profiling (fun () -> + Module_js.calc_old_modules + ~reader + workers + ~all_providers_mutator + ~options + new_or_changed_or_deleted) + in + (* We may be forcing a recheck on some unchanged files *) + let unchanged_files_to_force = + CheckedSet.filter files_to_force ~f:(fun fn -> + (not (FilenameSet.mem fn new_or_changed)) && FilenameSet.mem fn old_parsed) + in + MonitorRPC.status_update ServerStatus.Resolving_dependencies_progress; + let%lwt (changed_modules, resolved_requires_changed_in_commit_modules, errors) = + commit_modules_and_resolve_requires + ~transaction + ~reader + ~all_providers_mutator + ~options + ~profiling + ~workers + ~old_modules + ~parsed_set:freshparsed + ~unparsed + ~unparsed_set + ~new_or_changed + ~deleted + ~errors + ~is_init:false + in + (* We can ignore unchanged files which were forced as dependencies. We don't care about their + * dependents *) + let unchanged_files_with_dependents = + FilenameSet.union + (CheckedSet.focused unchanged_files_to_force) + (CheckedSet.dependents unchanged_files_to_force) + in + (* Figure out which modules the unchanged forced files provide. We need these to figure out + * which dependents need to be added to the checked set *) + let%lwt unchanged_modules = + with_timer_lwt ~options "CalcUnchangedModules" profiling (fun () -> + Module_js.calc_unchanged_modules ~reader workers unchanged_files_with_dependents) + in + let parsed = FilenameSet.union freshparsed unchanged in + (* direct_dependent_files are unchanged files which directly depend on changed modules, + or are new / changed files that are phantom dependents. all_dependent_files are + direct_dependent_files plus their dependents (transitive closure) *) + let%lwt direct_dependent_files = + with_timer_lwt ~options "DirectDependentFiles" profiling (fun () -> + let root_files = FilenameSet.union new_or_changed unchanged_files_with_dependents in + DirectDependentFilesCache.with_cache ~options ~root_files ~on_miss:(fun () -> + Dep_service.calc_direct_dependents + ~reader:(Abstract_state_reader.Mutator_state_reader reader) + workers + ~candidates:(FilenameSet.diff unchanged unchanged_files_with_dependents) + ~root_files + ~root_modules:(Modulename.Set.union unchanged_modules changed_modules))) + in + Hh_logger.info "Re-resolving directly dependent files"; - **) + let node_modules_containers = !Files.node_modules_containers in + (* requires in direct_dependent_files must be re-resolved before merging. *) + let mutator = + Module_heaps.Resolved_requires_mutator.create transaction direct_dependent_files + in + let%lwt resolved_requires_changed_in_reresolve_direct_dependents = + with_timer_lwt ~options "ReresolveDirectDependents" profiling (fun () -> + let%lwt resolved_requires_changed = + MultiWorkerLwt.call + workers + ~job:(fun anything_changed files -> + List.fold_left + (fun anything_changed filename -> + let (changed, errors) = + Module_js.add_parsed_resolved_requires + filename + ~mutator + ~reader + ~options + ~node_modules_containers + in + ignore errors; + + (* TODO: why, FFS, why? *) + anything_changed || changed) + anything_changed + files) + ~neutral:false + ~merge:(fun changed1 changed2 -> changed1 || changed2) + ~next:(MultiWorkerLwt.next workers (FilenameSet.elements direct_dependent_files)) + in + clear_cache_if_resolved_requires_changed resolved_requires_changed; + Lwt.return resolved_requires_changed) + in + Hh_logger.info "Recalculating dependency graph"; + let%lwt dependency_info = + with_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> + let files_to_update_dependency_info = + FilenameSet.union freshparsed direct_dependent_files + in + let%lwt updated_dependency_info = + Dep_service.calc_partial_dependency_info + ~options + ~reader + workers + files_to_update_dependency_info + ~parsed + in + let old_dependency_info = env.ServerEnv.dependency_info in + let to_remove = FilenameSet.union unparsed_set deleted in + match (old_dependency_info, updated_dependency_info) with + | (Dependency_info.Classic old_map, Dependency_info.Classic updated_map) -> + Lwt.return + (Dependency_info.Classic + ( old_map + |> FilenameSet.fold FilenameMap.remove to_remove + |> FilenameMap.union updated_map )) + | (Dependency_info.TypesFirst old_map, Dependency_info.TypesFirst updated_map) -> + Lwt.return + (Dependency_info.TypesFirst + ( old_map + |> FilenameSet.fold FilenameMap.remove to_remove + |> FilenameMap.union updated_map )) + | _ -> assert false) + in + (* Here's how to update unparsed: + * 1. Remove the parsed files. This removes any file which used to be unparsed but is now parsed + * 2. Remove the deleted files. This removes any previously unparsed file which was deleted + * 3. Add the newly unparsed files. This adds new unparsed files or files which became unparsed *) + let unparsed = + let to_remove = FilenameSet.union parsed deleted in + FilenameSet.diff env.ServerEnv.unparsed to_remove |> FilenameSet.union unparsed_set + in + let cannot_skip_direct_dependents = + (not (Options.allow_skip_direct_dependents options)) + || resolved_requires_changed_in_commit_modules + || resolved_requires_changed_in_reresolve_direct_dependents + || deleted_count > 0 + || FilenameSet.cardinal unparsed_set > 0 + in + let env = { env with ServerEnv.files = parsed; unparsed; dependency_info } in + let intermediate_values = + ( deleted, + direct_dependent_files, + errors, + files_to_force, + freshparsed, + new_or_changed, + unchanged_checked, + unchanged_files_to_force, + unparsed_set, + cannot_skip_direct_dependents ) + in + Lwt.return (env, intermediate_values) - (* remember old modules *) - let unchanged_checked = CheckedSet.remove new_or_changed_or_deleted env.ServerEnv.checked_files in + let determine_what_to_recheck + ~profiling + ~options + ~is_file_checked + ~ide_open_files + ~dependency_graph + ~all_dependency_graph + ~checked_files + ~freshparsed + ~unparsed_set + ~deleted + ~unchanged_checked + ~files_to_force + ~unchanged_files_to_force + ~direct_dependent_files = + let%lwt all_dependent_files = + with_timer_lwt ~options "AllDependentFiles" profiling (fun () -> + if + FilenameSet.is_empty direct_dependent_files + (* as is the case for anything doing `check_contents` *) + then + Lwt.return FilenameSet.empty + (* avoid O(dependency graph) calculations *) + else + Lwt.return + (Pure_dep_graph_operations.calc_all_dependents + ~dependency_graph + ~all_dependency_graph + direct_dependent_files)) + in + let acceptable_files_to_focus = + FilenameSet.union freshparsed (CheckedSet.all unchanged_files_to_force) + in + let%lwt (updated_checked_files, all_dependent_files) = + with_timer_lwt ~options "RecalcDepGraph" profiling (fun () -> + match Options.lazy_mode options with + | Options.NON_LAZY_MODE + (* Non lazy mode treats every file as focused. *) + + | Options.LAZY_MODE_WATCHMAN + (* Watchman mode treats every modified file as focused *) + + | Options.LAZY_MODE_FILESYSTEM -> + (* FS mode treats every modified file as focused *) + let old_focus_targets = CheckedSet.focused checked_files in + let old_focus_targets = FilenameSet.diff old_focus_targets deleted in + let old_focus_targets = FilenameSet.diff old_focus_targets unparsed_set in + let focused = FilenameSet.union old_focus_targets freshparsed in + unfocused_files_and_dependents_to_infer + ~options + ~input_focused:(FilenameSet.union focused (CheckedSet.focused files_to_force)) + ~input_dependencies:(Some (CheckedSet.dependencies files_to_force)) + ~all_dependent_files + | Options.LAZY_MODE_IDE -> + (* IDE mode only treats opened files as focused *) + (* Unfortunately, our checked_files set might be out of date. This update could have added + * some new dependents or dependencies. So we need to recalculate those. + * + * To calculate dependents and dependencies, we need to know what are the focused files. We + * define the focused files to be the union of + * + * 1. The files that were previously focused + * 2. Modified files that are currently open in the IDE + * 3. If this is a `flow force-recheck --focus A.js B.js C.js`, then A.js, B.js and C.js + * + * Remember that the IDE might open a new file or keep open a deleted file, so the focused + * set might be missing that file. If that file reappears, we must remember to refocus on + * it. + * *) + let open_in_ide = + let (lazy opened_files) = ide_open_files in + FilenameSet.filter + (function + | File_key.SourceFile fn + | File_key.LibFile fn + | File_key.JsonFile fn + | File_key.ResourceFile fn -> + SSet.mem fn opened_files + | File_key.Builtins -> false) + freshparsed + in + let input_focused = + CheckedSet.focused files_to_force + (* Files to force to be focused *) + |> filter_out_node_modules ~options + (* Never focus node modules *) + |> FilenameSet.union (CheckedSet.focused checked_files) + (* old focused *) + |> FilenameSet.union open_in_ide + (* Files which are open in the IDE *) + in + let input_dependencies = Some (CheckedSet.dependencies files_to_force) in + focused_files_and_dependents_to_infer + ~is_file_checked + ~all_dependency_graph + ~dependency_graph + ~input_focused + ~input_dependencies + ~all_dependent_files) + in + (* Filter updated_checked_files down to the files which we just parsed or unchanged files which + * will be focused *) + let input = + CheckedSet.filter updated_checked_files ~f:(fun fn -> + FilenameSet.mem fn acceptable_files_to_focus) + in + let%lwt (to_merge, components, recheck_set) = + include_dependencies_and_dependents + ~options + ~profiling + ~unchanged_checked + ~input + ~all_dependency_graph + ~dependency_graph + ~all_dependent_files + in + Lwt.return (to_merge, components, recheck_set, all_dependent_files) - let all_providers_mutator = Module_hashtables.All_providers_mutator.create transaction in + (* This function assumes it is called after recheck_parse_and_update_dependency_info. It uses some + * of the info computed by recheck_parse_and_update_dependency_info to figure out which files to + * merge. Then it merges them. *) + let recheck_merge + ~profiling + ~transaction + ~reader + ~options + ~workers + ~will_be_checked_files + ~file_watcher_metadata + ~intermediate_values + ~recheck_reasons + ~env = + let ( deleted, + direct_dependent_files, + errors, + files_to_force, + freshparsed, + new_or_changed, + unchanged_checked, + unchanged_files_to_force, + unparsed_set, + cannot_skip_direct_dependents ) = + intermediate_values + in + let dependency_info = env.ServerEnv.dependency_info in + let all_dependency_graph = Dependency_info.all_dependency_graph dependency_info in + let dependency_graph = Dependency_info.dependency_graph dependency_info in + let is_file_checked = + is_file_tracked_and_checked ~reader:(Abstract_state_reader.Mutator_state_reader reader) + in + let%lwt (to_merge, components, recheck_set, all_dependent_files) = + determine_what_to_recheck + ~profiling + ~options + ~is_file_checked + ~ide_open_files:(lazy (Persistent_connection.get_opened_files env.ServerEnv.connections)) + ~dependency_graph + ~all_dependency_graph + ~checked_files:env.ServerEnv.checked_files + ~freshparsed + ~unparsed_set + ~deleted + ~unchanged_checked + ~files_to_force + ~unchanged_files_to_force + ~direct_dependent_files + in + (* This is a much better estimate of what checked_files will be after the merge finishes. We now + * include the dependencies and dependents that are being implicitly included in the recheck. *) + will_be_checked_files := CheckedSet.union env.ServerEnv.checked_files to_merge; - (* clear out records of files, and names of modules provided by those files *) - let%lwt old_modules = with_timer_lwt ~options "ModuleClearFiles" profiling (fun () -> - Module_js.calc_old_modules workers ~all_providers_mutator ~options new_or_changed_or_deleted - ) in + let%lwt estimates = + restart_if_faster_than_recheck ~options ~env ~to_merge ~file_watcher_metadata + in + let%lwt () = ensure_parsed ~options ~profiling ~workers ~reader to_merge in + (* recheck *) + let%lwt ( updated_errors, + coverage, + merge_skip_count, + sig_new_or_changed, + top_cycle, + time_to_merge, + merge_internal_error ) = + merge + ~transaction + ~reader + ~options + ~profiling + ~workers + ~errors + ~coverage:env.ServerEnv.coverage + ~to_merge + ~components + ~recheck_set + ~dependency_graph + ~deleted + ~unparsed_set + ~persistent_connections:(Some env.ServerEnv.connections) + ~recheck_reasons + ~prep_merge: + (Some + (fun () -> + let n = FilenameSet.cardinal all_dependent_files in + if n > 0 then Hh_logger.info "remerge %d dependent files:" n; + + let _ = + FilenameSet.fold + (fun f i -> + Hh_logger.info "%d/%d: %s" i n (File_key.to_string f); + i + 1) + all_dependent_files + 1 + in + Hh_logger.info "Merge prep")) + in + Option.iter merge_internal_error ~f:(Hh_logger.error "%s"); + + let merged_files = to_merge in + let%lwt ( errors, + coverage, + time_to_check_merged, + check_skip_count, + slowest_file, + num_slow_files, + check_internal_error ) = + check_files + ~reader + ~options + ~profiling + ~workers + ~errors + ~updated_errors + ~coverage + ~merged_files + ~direct_dependent_files + ~sig_new_or_changed + ~dependency_info + ~persistent_connections:(Some env.ServerEnv.connections) + ~recheck_reasons + ~cannot_skip_direct_dependents + in + Option.iter check_internal_error ~f:(Hh_logger.error "%s"); - MonitorRPC.status_update ServerStatus.Resolving_dependencies_progress; - let%lwt changed_modules, errors = - commit_modules_and_resolve_requires + let%lwt () = + Recheck_stats.record_recheck_time + ~options + ~total_time:(time_to_merge +. time_to_check_merged) + ~rechecked_files:(CheckedSet.cardinal merged_files) + in + let checked_files = CheckedSet.union unchanged_checked merged_files in + Hh_logger.info "Checked set: %s" (CheckedSet.debug_counts_to_string checked_files); + + (* NOTE: unused fields are left in their initial empty state *) + env.ServerEnv.collated_errors := None; + Lwt.return + ( { env with ServerEnv.checked_files; errors; coverage }, + { + new_or_changed; + deleted; + all_dependent_files; + top_cycle; + merge_skip_count; + check_skip_count; + slowest_file; + num_slow_files; + estimates; + }, + Option.first_some merge_internal_error check_internal_error ) + + (* We maintain the following invariant across rechecks: The set of `files` contains files that + * parsed successfully in the previous phase (which could be the init phase or a previous recheck + * phase). + * + * This function has been split into two parts. This is because lazy saved state init needs to + * update the dependency graph for files which have changed since the saved state was generated, but + * doesn't want to merge those files yet. + *) + let full + ~profiling ~transaction - ~all_providers_mutator + ~reader ~options + ~workers + ~updates + ~files_to_force + ~file_watcher_metadata + ~recheck_reasons + ~will_be_checked_files + ~env = + let%lwt (env, intermediate_values) = + recheck_parse_and_update_dependency_info + ~profiling + ~transaction + ~reader + ~options + ~workers + ~updates + ~files_to_force + ~recheck_reasons + ~env + in + recheck_merge ~profiling + ~transaction + ~reader + ~options ~workers - ~old_modules - ~parsed_set:freshparsed - ~unparsed - ~unparsed_set - ~new_or_changed - ~deleted - ~errors - ~is_init:false in - - let parsed = FilenameSet.union freshparsed unchanged in - - (* direct_dependent_files are unchanged files which directly depend on changed modules, - or are new / changed files that are phantom dependents. dependent_files are - direct_dependent_files plus their dependents (transitive closure) *) - let%lwt all_dependent_files, direct_dependent_files = - with_timer_lwt ~options "DependentFiles" profiling (fun () -> - Dep_service.dependent_files - workers - ~unchanged - ~new_or_changed - ~changed_modules - ) in - - Hh_logger.info "Re-resolving directly dependent files"; + ~will_be_checked_files + ~file_watcher_metadata + ~intermediate_values + ~recheck_reasons + ~env - let node_modules_containers = !Files.node_modules_containers in - (* requires in direct_dependent_files must be re-resolved before merging. *) - let mutator = Module_heaps.Resolved_requires_mutator.create transaction direct_dependent_files in - let%lwt () = with_timer_lwt ~options "ReresolveDirectDependents" profiling (fun () -> - MultiWorkerLwt.call workers - ~job: (fun () files -> - List.iter (fun filename -> - let errors = Module_js.add_parsed_resolved_requires filename - ~mutator ~options ~node_modules_containers in - ignore errors (* TODO: why, FFS, why? *) - ) files - ) - ~neutral: () - ~merge: (fun () () -> ()) - ~next:(MultiWorkerLwt.next workers (FilenameSet.elements direct_dependent_files)) - ) in - - Hh_logger.info "Recalculating dependency graph"; - let%lwt dependency_graph = with_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> - let%lwt updated_dependency_graph = Dep_service.calc_partial_dependency_graph workers - (FilenameSet.union freshparsed direct_dependent_files) ~parsed in - let old_dependency_graph = env.ServerEnv.dependency_graph in - old_dependency_graph - |> FilenameSet.fold FilenameMap.remove deleted - |> FilenameMap.union updated_dependency_graph - |> Lwt.return - ) in - let%lwt updated_checked_files, all_dependent_files = - with_timer_lwt ~options "RecalcDepGraph" profiling (fun () -> - match Options.lazy_mode options with - | None (* Non lazy mode treats every file as focused. *) - | Some Options.LAZY_MODE_FILESYSTEM -> (* FS mode treats every modified file as focused *) - let old_focus_targets = CheckedSet.focused env.ServerEnv.checked_files in - let old_focus_targets = FilenameSet.diff old_focus_targets deleted in - let old_focus_targets = FilenameSet.diff old_focus_targets unparsed_set in - let parsed = FilenameSet.union old_focus_targets freshparsed in - let%lwt updated_checked_files = unfocused_files_to_infer - ~options ~parsed ~dependency_graph in - Lwt.return (updated_checked_files, all_dependent_files) - | Some Options.LAZY_MODE_IDE -> (* IDE mode only treats opened files as focused *) - (* Unfortunately, our checked_files set might be out of date. This update could have added - * some new dependents or dependencies. So we need to recalculate those. - * - * To calculate dependents and dependencies, we need to know what are the focused files. We - * define the focused files to be the union of - * - * 1. The files that were previously focused - * 2. Modified files that are currently open in the IDE - * 3. If this is a `flow force-recheck --focus A.js B.js C.js`, then A.js, B.js and C.js - * - * Remember that the IDE might open a new file or keep open a deleted file, so the focused - * set might be missing that file. If that file reappears, we must remember to refocus on - * it. - **) - let old_focused = CheckedSet.focused env.ServerEnv.checked_files in - let new_focused = - filter_out_node_modules ~options (FilenameSet.inter files_to_focus freshparsed) - in - - let open_in_ide = - let opened_files = Persistent_connection.get_opened_files env.ServerEnv.connections in - FilenameSet.filter (function - | File_key.SourceFile fn - | File_key.LibFile fn - | File_key.JsonFile fn - | File_key.ResourceFile fn -> SSet.mem fn opened_files - | File_key.Builtins -> false - ) freshparsed - in - let focused = old_focused - |> FilenameSet.union open_in_ide - |> FilenameSet.union new_focused in - let%lwt updated_checked_files = focused_files_to_infer ~focused ~dependency_graph in - - (* It's possible that all_dependent_files contains foo.js, which is a dependent of a - * dependency. That's fine if foo.js is in the checked set. But if it's just some random - * other dependent then we need to filter it out. - *) - let all_dependent_files = - FilenameSet.inter all_dependent_files (CheckedSet.all updated_checked_files) in - Lwt.return (updated_checked_files, all_dependent_files) - ) - in - - let infer_input = - CheckedSet.filter ~f:(fun fn -> FilenameSet.mem fn freshparsed) updated_checked_files in - - let%lwt to_merge, components, recheck_map = - include_dependencies_and_dependents - ~options ~profiling ~unchanged_checked ~infer_input ~dependency_graph ~all_dependent_files - ~direct_dependent_files - in + let parse_and_update_dependency_info + ~profiling + ~transaction + ~reader + ~options + ~workers + ~updates + ~files_to_force + ~recheck_reasons + ~env = + let%lwt (env, _intermediate_values) = + recheck_parse_and_update_dependency_info + ~profiling + ~transaction + ~reader + ~options + ~workers + ~updates + ~files_to_force + ~recheck_reasons + ~env + in + Lwt.return env +end - let%lwt () = ensure_parsed ~options ~profiling ~workers to_merge in +let with_transaction f = + Transaction.with_transaction + @@ fun transaction -> + let reader = Mutator_state_reader.create transaction in + f transaction reader - (* recheck *) - let%lwt checked, cycle_leaders, errors, skipped_count = merge - ~transaction +let recheck ~options - ~profiling ~workers - ~errors - ~unchanged_checked - ~to_merge - ~components - ~recheck_map - ~dependency_graph - ~deleted - ~persistent_connections:(Some env.ServerEnv.connections) - ~prep_merge:(Some (fun _to_merge -> - (* need to merge the closure of inferred files and their deps *) - - let n = FilenameSet.cardinal all_dependent_files in - if n > 0 - then Hh_logger.info "remerge %d dependent files:" n; - - let _ = FilenameSet.fold (fun f i -> - Hh_logger.info "%d/%d: %s" i n (File_key.to_string f); - i + 1 - ) all_dependent_files 1 in - Hh_logger.info "Merge prep"; - - (* merge errors for unchanged dependents will be cleared lazily *) - - (* to_merge is inferred files plus all dependents. prep for re-merge *) - (* NOTE: Non-@flow files don't have entries in ResolvedRequiresHeap, so - don't add then to the set of files to merge! Only inferred files (along - with dependents) should be merged: see below. *) - (* let _to_merge = CheckedSet.add ~dependents:all_dependent_files inferred in *) - () - )) - in - - (* Here's how to update unparsed: - * 1. Remove the parsed files. This removes any file which used to be unparsed but is now parsed - * 2. Remove the deleted files. This removes any previously unparsed file which was deleted - * 3. Add the newly unparsed files. This adds new unparsed files or files which became unparsed *) - let unparsed = - let to_remove = FilenameSet.union parsed deleted in - FilenameSet.diff env.ServerEnv.unparsed to_remove - |> FilenameSet.union unparsed_set - in - - (* NOTE: unused fields are left in their initial empty state *) - env.ServerEnv.collated_errors := None; - Lwt.return ( - ({ env with ServerEnv. - files = parsed; - unparsed; - dependency_graph; - checked_files = checked; - errors; - }, - (new_or_changed, deleted, all_dependent_files, cycle_leaders, skipped_count)) - ) - -let recheck ~options ~workers ~updates env ~files_to_focus = + ~updates + env + ~files_to_force + ~file_watcher_metadata + ~recheck_reasons + ~will_be_checked_files = let should_print_summary = Options.should_profile options in - let%lwt profiling, (env, (modified, deleted, dependent_files, cycle_leaders, skipped_count)) = + let%lwt (profiling, (env, stats, first_internal_error)) = Profiling_js.with_profiling_lwt ~label:"Recheck" ~should_print_summary (fun profiling -> - SharedMem_js.with_memory_profiling_lwt ~profiling ~collect_at_end:true (fun () -> - Transaction.with_transaction (fun transaction -> - recheck_with_profiling - ~profiling ~transaction ~options ~workers ~updates env ~files_to_focus - ) - ) - ) + SharedMem_js.with_memory_profiling_lwt ~profiling ~collect_at_end:true (fun () -> + with_transaction (fun transaction reader -> + Recheck.full + ~profiling + ~transaction + ~reader + ~options + ~workers + ~updates + ~env + ~files_to_force + ~file_watcher_metadata + ~recheck_reasons + ~will_be_checked_files))) + in + let { + Recheck.new_or_changed = modified; + deleted; + all_dependent_files = dependent_files; + top_cycle; + merge_skip_count; + check_skip_count; + slowest_file; + num_slow_files; + estimates; + } = + stats in - (** TODO: update log to reflect current terminology **) - FlowEventLogger.recheck ~modified ~deleted ~dependent_files ~profiling ~skipped_count; + let ( estimated_time_to_recheck, + estimated_time_to_restart, + estimated_time_to_init, + estimated_time_per_file, + estimated_files_to_recheck, + estimated_files_to_init ) = + Option.value_map + estimates + ~default:(None, None, None, None, None, None) + ~f:(fun { + Recheck_stats.estimated_time_to_recheck; + estimated_time_to_restart; + estimated_time_to_init; + estimated_time_per_file; + estimated_files_to_recheck; + estimated_files_to_init; + } + -> + ( Some estimated_time_to_recheck, + Some estimated_time_to_restart, + Some estimated_time_to_init, + Some estimated_time_per_file, + Some estimated_files_to_recheck, + Some estimated_files_to_init )) + in + (* TODO: update log to reflect current terminology **) + FlowEventLogger.recheck + ~recheck_reasons:(List.map LspProt.verbose_string_of_recheck_reason recheck_reasons) + ~modified + ~deleted + ~dependent_files + ~profiling + ~merge_skip_count + ~check_skip_count + ~estimated_time_to_recheck + ~estimated_time_to_restart + ~estimated_time_to_init + ~estimated_time_per_file + ~estimated_files_to_recheck + ~estimated_files_to_init + ~slowest_file + ~num_slow_files + ~first_internal_error + ~scm_update_distance:file_watcher_metadata.MonitorProt.total_update_distance + ~scm_changed_mergebase:file_watcher_metadata.MonitorProt.changed_mergebase; let duration = Profiling_js.get_profiling_duration profiling in let dependent_file_count = Utils_js.FilenameSet.cardinal dependent_files in - let changed_file_count = (Utils_js.FilenameSet.cardinal modified) - + (Utils_js.FilenameSet.cardinal deleted) in - let top_cycle = Core_list.fold cycle_leaders ~init:None ~f:(fun top (f2, count2) -> - match top with - | Some (f1, count1) -> if f2 > f1 then Some (f2, count2) else Some (f1, count1) - | None -> Some (f2, count2)) in - let summary = ServerStatus.({ - duration; - info = RecheckSummary {dependent_file_count; changed_file_count; top_cycle}; }) in + let changed_file_count = + Utils_js.FilenameSet.cardinal modified + Utils_js.FilenameSet.cardinal deleted + in + let summary = + ServerStatus. + { duration; info = RecheckSummary { dependent_file_count; changed_file_count; top_cycle } } + in Lwt.return (profiling, summary, env) (* creates a closure that lists all files in the given root, returned in chunks *) let make_next_files ~libs ~file_options root = let make_next_raw = - Files.make_next_files ~root ~all:false ~subdir:None ~options:file_options ~libs in + Files.make_next_files ~root ~all:false ~subdir:None ~options:file_options ~libs + in let total = ref 0 in fun () -> let files = make_next_raw () in - let finished = !total in let length = List.length files in - MonitorRPC.status_update ServerStatus.(Parsing_progress { - finished; - total = None; - }); + MonitorRPC.status_update ServerStatus.(Parsing_progress { finished; total = None }); total := finished + length; - files - |> List.map (Files.filename_from_string ~options:file_options) - |> Bucket.of_list + files |> Core_list.map ~f:(Files.filename_from_string ~options:file_options) |> Bucket.of_list -let init_from_saved_state ~profiling ~workers ~saved_state options = - Transaction.with_transaction @@ fun transaction -> +let mk_init_env ~files ~unparsed ~dependency_info ~ordered_libs ~libs ~errors ~coverage = + { + ServerEnv.files; + unparsed; + dependency_info; + checked_files = CheckedSet.empty; + ordered_libs; + libs; + errors; + coverage; + collated_errors = ref None; + connections = Persistent_connection.empty; + } +let init_from_saved_state ~profiling ~workers ~saved_state options = + with_transaction + @@ fun transaction reader -> let file_options = Options.file_options options in (* We don't want to walk the file system for the checked in files. But we still need to find the * flowlibs *) - let ordered_flowlib_libs, _ = Files.init ~flowlibs_only:true file_options in - - let { Saved_state. - flowconfig_hash=_; + let (ordered_flowlib_libs, _) = Files.init ~flowlibs_only:true file_options in + let { + Saved_state.flowconfig_hash = _; parsed_heaps; unparsed_heaps; ordered_non_flowlib_libs; local_errors; + warnings; + coverage; node_modules_containers; - } = saved_state in - + } = + saved_state + in Files.node_modules_containers := node_modules_containers; Hh_logger.info "Restoring heaps"; - let%lwt () = with_timer_lwt ~options "RestoreHeaps" profiling (fun () -> - let%lwt () = MultiWorkerLwt.call workers - ~job:(List.fold_left (fun () (fn, parsed_file_data) -> - (* Every package.json file should have a Package_json.t. Use those to restore the - * PackageHeap and the ReversePackageHeap *) - begin match fn with - | File_key.JsonFile str when Filename.basename str = "package.json" -> - begin match parsed_file_data.Saved_state.package with - | None -> failwith (Printf.sprintf "Saved state for `%s` missing Package_json.t data" str) - | Some package -> Module_heaps.Package_heap_mutator.add_package_json str package - end - | _ -> () - end; - - (* Restore the FileSigHeap *) - Parsing_heaps.From_saved_state.add_file_sig fn parsed_file_data.Saved_state.file_sig; - - (* Restore the FileHashHeap *) - Parsing_heaps.From_saved_state.add_file_hash fn parsed_file_data.Saved_state.hash; - - (* Restore the ResolvedRequiresHeap *) - Module_heaps.From_saved_state.add_resolved_requires - fn parsed_file_data.Saved_state.resolved_requires - )) - ~merge:(fun () () -> ()) - ~neutral:() - ~next:(MultiWorkerLwt.next workers (FilenameMap.bindings parsed_heaps)) - in - - MultiWorkerLwt.call workers - ~job:(List.fold_left (fun () (fn, unparsed_file_data) -> - (* Restore the FileHashHeap *) - let hash = unparsed_file_data.Saved_state.unparsed_hash in - Parsing_heaps.From_saved_state.add_file_hash fn hash; - )) - ~merge:(fun () () -> ()) - ~neutral:() - ~next:(MultiWorkerLwt.next workers (FilenameMap.bindings unparsed_heaps)) - ) in - + let%lwt () = + with_timer_lwt ~options "RestoreHeaps" profiling (fun () -> + let root = Options.root options |> Path.to_string in + let%lwt () = + MultiWorkerLwt.call + workers + ~job: + (List.fold_left (fun () (fn, parsed_file_data) -> + let { Saved_state.package; file_sig; hash; resolved_requires } = + Saved_state.denormalize_parsed_data + ~root + parsed_file_data.Saved_state.normalized_file_data + in + (* Every package.json file should have a Package_json.t. Use those to restore the + * PackageHeap and the ReversePackageHeap *) + begin + match fn with + | File_key.JsonFile str when Filename.basename str = "package.json" -> + begin + match package with + | None -> + failwith + (Printf.sprintf "Saved state for `%s` missing Package_json.t data" str) + | Some package -> + Module_heaps.Package_heap_mutator.add_package_json str package + end + | _ -> () + end; + + (* Restore the FileSigHeap *) + Parsing_heaps.From_saved_state.add_file_sig fn file_sig; + + (* Restore the FileHashHeap *) + Parsing_heaps.From_saved_state.add_file_hash fn hash; + + (* Restore the ResolvedRequiresHeap *) + Module_heaps.From_saved_state.add_resolved_requires fn resolved_requires)) + ~merge:(fun () () -> ()) + ~neutral:() + ~next:(MultiWorkerLwt.next workers (FilenameMap.bindings parsed_heaps)) + in + MultiWorkerLwt.call + workers + ~job: + (List.fold_left (fun () (fn, unparsed_file_data) -> + (* Restore the FileHashHeap *) + let hash = unparsed_file_data.Saved_state.unparsed_hash in + Parsing_heaps.From_saved_state.add_file_hash fn hash)) + ~merge:(fun () () -> ()) + ~neutral:() + ~next:(MultiWorkerLwt.next workers (FilenameMap.bindings unparsed_heaps))) + in Hh_logger.info "Loading libraries"; + (* We actually parse and typecheck the libraries, even though we're loading from saved state. * We'd need to check them anyway, as soon as any file is checked, since we don't track * dependents for libraries. And we don't really support incrementally checking libraries @@ -1337,37 +2483,42 @@ let init_from_saved_state ~profiling ~workers ~saved_state options = *) let ordered_libs = List.rev_append (List.rev ordered_flowlib_libs) ordered_non_flowlib_libs in let libs = SSet.of_list ordered_libs in - - let%lwt libs_ok, local_errors, suppressions, severity_cover_set = + let%lwt (libs_ok, local_errors, warnings, suppressions) = let suppressions = Error_suppressions.empty in - let severity_cover_set = FilenameMap.empty in - init_libs ~options ~profiling ~local_errors ~suppressions ~severity_cover_set ordered_libs + init_libs ~options ~profiling ~local_errors ~warnings ~suppressions ~reader ordered_libs in - Hh_logger.info "Resolving dependencies"; MonitorRPC.status_update ServerStatus.Resolving_dependencies_progress; - let%lwt parsed_set, unparsed_set, all_files, parsed, unparsed = + let%lwt (parsed_set, unparsed_set, all_files, parsed, unparsed) = with_timer_lwt ~options "PrepareCommitModules" profiling (fun () -> - let parsed_set = parsed_heaps |> FilenameMap.keys |> FilenameSet.of_list in - let unparsed_set = unparsed_heaps |> FilenameMap.keys |> FilenameSet.of_list in - let all_files = FilenameSet.union parsed_set unparsed_set in - let parsed = FilenameMap.fold - (fun fn data acc -> (fn, data.Saved_state.info)::acc) parsed_heaps [] - in - let unparsed = FilenameMap.fold - (fun fn data acc -> (fn, data.Saved_state.unparsed_info)::acc) unparsed_heaps [] - in - Lwt.return (parsed_set, unparsed_set, all_files, parsed, unparsed) - ) + let (parsed, parsed_set) = + FilenameMap.fold + (fun fn data (parsed, parsed_set) -> + let parsed = (fn, data.Saved_state.info) :: parsed in + let parsed_set = FilenameSet.add fn parsed_set in + (parsed, parsed_set)) + parsed_heaps + ([], FilenameSet.empty) + in + let (unparsed, unparsed_set) = + FilenameMap.fold + (fun fn data (unparsed, unparsed_set) -> + let unparsed = (fn, data.Saved_state.unparsed_info) :: unparsed in + let unparsed_set = FilenameSet.add fn unparsed_set in + (unparsed, unparsed_set)) + unparsed_heaps + ([], FilenameSet.empty) + in + let all_files = FilenameSet.union parsed_set unparsed_set in + Lwt.return (parsed_set, unparsed_set, all_files, parsed, unparsed)) in - let all_providers_mutator = Module_hashtables.All_providers_mutator.create transaction in - (* This will restore InfoHeap, NameHeap, & all_providers hashtable *) let%lwt _ = commit_modules_from_saved_state ~transaction + ~reader ~all_providers_mutator ~options ~is_init:true @@ -1382,24 +2533,29 @@ let init_from_saved_state ~profiling ~workers ~saved_state options = ~local_errors ~new_or_changed:all_files in - - let errors = { ServerEnv. - local_errors; - merge_errors = FilenameMap.empty; - suppressions; - severity_cover_set; - } in - - let%lwt dependency_graph = with_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> - Dep_service.calc_dependency_graph workers ~parsed:parsed_set - ) in - - Lwt.return (parsed_set, unparsed_set, dependency_graph, ordered_libs, libs, libs_ok, errors) + let errors = + { ServerEnv.local_errors; merge_errors = FilenameMap.empty; warnings; suppressions } + in + let%lwt dependency_info = + with_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> + Dep_service.calc_dependency_info ~options ~reader workers ~parsed:parsed_set) + in + let env = + mk_init_env + ~files:parsed_set + ~unparsed:unparsed_set + ~dependency_info + ~ordered_libs + ~libs + ~errors + ~coverage + in + Lwt.return (env, libs_ok) let init ~profiling ~workers options = let file_options = Options.file_options options in - - Transaction.with_transaction @@ fun transaction -> + with_transaction + @@ fun transaction reader -> (* TODO - explicitly order the libs. * * Should we let the filesystem dictate the order that we merge libs? Are we sheep? No! We are @@ -1412,42 +2568,44 @@ let init ~profiling ~workers options = * However making this change is likely going to be a breaking change for people with conflicting * libraries *) - let ordered_libs, libs = Files.init file_options in + let (ordered_libs, libs) = Files.init file_options in let next_files = make_next_files ~libs ~file_options (Options.root options) in - Hh_logger.info "Parsing"; - let%lwt parsed, unparsed, unchanged, local_errors = - parse ~options ~profiling ~workers next_files in - + let%lwt (parsed, unparsed, unchanged, local_errors) = + parse ~options ~profiling ~workers ~reader next_files + in + (* Parsing won't raise warnings *) + let warnings = FilenameMap.empty in + (* Libdefs have no coverage *) + let coverage = FilenameMap.empty in assert (FilenameSet.is_empty unchanged); Hh_logger.info "Building package heap"; - let%lwt () = init_package_heap ~options ~profiling parsed in - + let%lwt package_errors = init_package_heap ~options ~profiling ~reader parsed in + let local_errors = merge_error_maps package_errors local_errors in Hh_logger.info "Loading libraries"; - let%lwt libs_ok, local_errors, suppressions, (severity_cover_set: ExactCover.lint_severity_cover Utils_js.FilenameMap.t) = + let%lwt (libs_ok, local_errors, warnings, suppressions) = let suppressions = Error_suppressions.empty in - let severity_cover_set = FilenameMap.empty in - init_libs ~options ~profiling ~local_errors ~suppressions ~severity_cover_set ordered_libs in - + init_libs ~options ~profiling ~local_errors ~warnings ~suppressions ~reader ordered_libs + in Hh_logger.info "Resolving dependencies"; MonitorRPC.status_update ServerStatus.Resolving_dependencies_progress; - let all_files, unparsed_set = List.fold_left (fun (all_files, unparsed_set) (filename, _) -> - FilenameSet.add filename all_files, (FilenameSet.add filename unparsed_set) - ) (parsed, FilenameSet.empty) unparsed in - + let (all_files, unparsed_set) = + List.fold_left + (fun (all_files, unparsed_set) (filename, _) -> + (FilenameSet.add filename all_files, FilenameSet.add filename unparsed_set)) + (parsed, FilenameSet.empty) + unparsed + in let all_providers_mutator = Module_hashtables.All_providers_mutator.create transaction in - - let%lwt _, errors = - let errors = { ServerEnv. - local_errors; - merge_errors = FilenameMap.empty; - suppressions; - severity_cover_set; - } in + let%lwt (_, _, errors) = + let errors = + { ServerEnv.local_errors; merge_errors = FilenameMap.empty; warnings; suppressions } + in commit_modules_and_resolve_requires ~transaction + ~reader ~all_providers_mutator ~options ~profiling @@ -1461,14 +2619,26 @@ let init ~profiling ~workers options = ~errors ~is_init:true in - let%lwt dependency_graph = with_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> - Dep_service.calc_dependency_graph workers ~parsed - ) in - Lwt.return (parsed, unparsed_set, dependency_graph, ordered_libs, libs, libs_ok, errors) + let%lwt dependency_info = + with_timer_lwt ~options "CalcDepsTypecheck" profiling (fun () -> + Dep_service.calc_dependency_info ~options ~reader workers ~parsed) + in + let env = + mk_init_env + ~files:parsed + ~unparsed:unparsed_set + ~dependency_info + ~ordered_libs + ~libs + ~errors + ~coverage + in + Lwt.return (FilenameSet.empty, env, libs_ok) (* Does a best-effort job to load a saved state. If it fails, returns None *) let load_saved_state ~profiling ~workers options = - let%lwt fetch_profiling, fetch_result = match Options.saved_state_fetcher options with + let%lwt (fetch_profiling, fetch_result) = + match Options.saved_state_fetcher options with | Options.Dummy_fetcher -> Saved_state_dummy_fetcher.fetch ~options | Options.Local_fetcher -> Saved_state_local_fetcher.fetch ~options | Options.Fb_fetcher -> Saved_state_fb_fetcher.fetch ~options @@ -1478,110 +2648,259 @@ let load_saved_state ~profiling ~workers options = | Saved_state_fetcher.No_saved_state -> Hh_logger.info "No saved state available"; Lwt.return_none - | Saved_state_fetcher.Saved_state { saved_state_filename; changed_files; } -> - with_timer_lwt ~options "LoadSavedState" profiling (fun () -> - let changed_files_count = SSet.cardinal changed_files in - try%lwt - let%lwt saved_state = Saved_state.load ~workers ~saved_state_filename ~options in - let updates = Recheck_updates.process_updates - ~options - ~libs:(SSet.of_list saved_state.Saved_state.ordered_non_flowlib_libs) - changed_files + | Saved_state_fetcher.Saved_state { saved_state_filename; changed_files } -> + let changed_files_count = SSet.cardinal changed_files in + (try%lwt + let%lwt (load_profiling, saved_state) = + Saved_state.load ~workers ~saved_state_filename ~options + in + Profiling_js.merge load_profiling profiling; + + let updates = + Recheck_updates.process_updates + ~options + ~libs:(SSet.of_list saved_state.Saved_state.ordered_non_flowlib_libs) + changed_files + in + let updates = + match updates with + | Core_result.Error { Recheck_updates.msg; _ } -> + Hh_logger.error "The saved state is no longer valid due to file changes: %s" msg; + raise Saved_state.(Invalid_saved_state Changed_files) + | Core_result.Ok updates -> updates + in + Hh_logger.info + "Saved state script reports %d files changed & we care about %d of them" + (SSet.cardinal changed_files) + (FilenameSet.cardinal updates); + FlowEventLogger.set_saved_state_filename (Path.to_string saved_state_filename); + FlowEventLogger.load_saved_state_success ~changed_files_count; + Lwt.return_some (saved_state, updates) + with Saved_state.Invalid_saved_state invalid_reason -> + let invalid_reason = Saved_state.invalid_reason_to_string invalid_reason in + FlowEventLogger.load_saved_state_error + ~saved_state_filename:(Path.to_string saved_state_filename) + ~changed_files_count + ~invalid_reason; + if Options.saved_state_no_fallback options then + let msg = spf "Failed to load saved state: %s" invalid_reason in + FlowExitStatus.exit ~msg FlowExitStatus.Invalid_saved_state + else + Lwt.return_none) + +let query_watchman_for_changed_files ~options = + match Options.lazy_mode options with + | Options.NON_LAZY_MODE + | Options.LAZY_MODE_FILESYSTEM + | Options.LAZY_MODE_IDE -> + Lwt.return (fun ~libs:_ -> Lwt.return FilenameSet.(empty, empty)) + | Options.LAZY_MODE_WATCHMAN -> + let init_settings = + { + (* We're not setting up a subscription, we're just sending a single query *) + Watchman_lwt.subscribe_mode = None; + (* Hack makes this configurable in their local config. Apparently buck & hgwatchman + * use 10 seconds. But I've seen 10s timeout, so let's not set a timeout. Instead we'll + * manually timeout later *) + init_timeout = Watchman_lwt.No_timeout; + expression_terms = Watchman_expression_terms.make ~options; + subscription_prefix = "flow_server_watcher"; + roots = Files.watched_paths (Options.file_options options); + debug_logging = Options.is_debug_mode options; + } + in + let%lwt watchman_env = Watchman_lwt.init init_settings () in + let%lwt changed_files = + match watchman_env with + | None -> + failwith "Failed to set up Watchman in order to get the changes since the mergebase" + | Some watchman_env -> + (* No timeout. We'll time this out ourselves after init if we need *) + let%lwt changed_files = + Watchman_lwt.(get_changes_since_mergebase ~timeout:No_timeout watchman_env) in - let updates = match updates with - | Core_result.Error ({ Recheck_updates.msg; _; }) -> - Hh_logger.error "The saved state is no longer valid due to file changes: %s" msg; - raise Saved_state.(Invalid_saved_state Changed_files) - | Core_result.Ok updates -> updates in - Hh_logger.info "Saved state script reports %d files changed & we care about %d of them" - (SSet.cardinal changed_files) - (FilenameSet.cardinal updates); - FlowEventLogger.set_saved_state_filename (Path.to_string saved_state_filename); - FlowEventLogger.load_saved_state_success ~changed_files_count; - Lwt.return_some (saved_state, updates) - with Saved_state.Invalid_saved_state invalid_reason -> - let invalid_reason = Saved_state.invalid_reason_to_string invalid_reason in - FlowEventLogger.load_saved_state_error - ~saved_state_filename:(Path.to_string saved_state_filename) - ~changed_files_count - ~invalid_reason; - if Options.saved_state_no_fallback options - then - let msg = - spf "Failed to load saved state: %s" invalid_reason - in - FlowExitStatus.exit ~msg FlowExitStatus.Invalid_saved_state - else Lwt.return_none - ) + let%lwt () = Watchman_lwt.close watchman_env in + Lwt.return (SSet.of_list changed_files) + in + Lwt.return (fun ~libs -> + let updates = + Recheck_updates.process_updates ~skip_incompatible:true ~options ~libs changed_files + in + match updates with + | Core_result.Error { Recheck_updates.msg; _ } -> + failwith + (Printf.sprintf "skip_incompatible was set to true, how did we manage to error? %S" msg) + | Core_result.Ok updates -> + Hh_logger.info + "Watchman reports %d files changed since mergebase & we care about %d of them" + (SSet.cardinal changed_files) + (FilenameSet.cardinal updates); + + (* We have to explicitly focus on these files, since we just parsed them and it will appear + * to the rechecker that they're unchanged *) + let files_to_focus = updates in + Lwt.return (updates, files_to_focus)) let init ~profiling ~workers options = - let%lwt updates, (parsed, unparsed, dependency_graph, ordered_libs, libs, libs_ok, errors) = + let start_time = Unix.gettimeofday () in + (* Don't wait for this thread yet. It will run in the background. Then, after init is done, + * we'll wait on it. We do this because we want to send the status update that we're waiting for + * Watchman if init is done but Watchman is not *) + let get_watchman_updates_thread = query_watchman_for_changed_files ~options in + let%lwt (updates, env, libs_ok) = match%lwt load_saved_state ~profiling ~workers options with | None -> (* Either there is no saved state or we failed to load it for some reason *) - let%lwt init_ret = init ~profiling ~workers options in - Lwt.return (FilenameSet.empty, init_ret) + init ~profiling ~workers options | Some (saved_state, updates) -> (* We loaded a saved state successfully! We are awesome! *) - let%lwt init_ret = init_from_saved_state ~profiling ~workers ~saved_state options in - Lwt.return (updates, init_ret) + let%lwt (env, libs_ok) = init_from_saved_state ~profiling ~workers ~saved_state options in + let should_force_recheck = Options.saved_state_force_recheck options in + (* We know that all the files in updates have changed since the saved state was generated. We + * have two ways to deal with them: *) + if Options.lazy_mode options = Options.NON_LAZY_MODE || should_force_recheck then + (* In non-lazy mode, we return updates here. They will immediately be rechecked. Due to + * fanout, this can be a huge recheck, but it's sound. + * + * We'll also hit this code path in lazy modes if the user has passed + * --saved-state-force-recheck. These users want to force Flow to recheck all the files that + * have changed since the saved state was generated*) + Lwt.return (updates, env, libs_ok) + else + (* In lazy mode, we try to avoid the fanout problem. All we really want to do in lazy mode + * is to update the dependency graph and stuff like that. We don't actually want to merge + * anything yet. *) + with_transaction + @@ fun transaction reader -> + let recheck_reasons = [LspProt.Lazy_init_update_deps] in + let%lwt env = + Recheck.parse_and_update_dependency_info + ~profiling + ~transaction + ~reader + ~options + ~workers + ~updates + ~files_to_force:CheckedSet.empty + ~recheck_reasons + ~env + in + Lwt.return (FilenameSet.empty, env, libs_ok) + in + let%lwt (updates, files_to_focus) = + let now = Unix.gettimeofday () in + (* Let's give Watchman another 15 seconds to finish. *) + let timeout = 15.0 in + let deadline = now +. timeout in + MonitorRPC.status_update ~event:(ServerStatus.Watchman_wait_start deadline); + let%lwt (watchman_updates, files_to_focus) = + try%lwt + Lwt_unix.with_timeout timeout + @@ fun () -> + let%lwt get_watchman_updates = get_watchman_updates_thread in + get_watchman_updates ~libs:env.ServerEnv.libs + with Lwt_unix.Timeout -> + let msg = + Printf.sprintf + "Timed out after %ds waiting for Watchman." + (Unix.gettimeofday () -. start_time |> int_of_float) + in + FlowExitStatus.(exit ~msg Watchman_error) + in + Lwt.return (FilenameSet.union updates watchman_updates, files_to_focus) + in + let init_time = Unix.gettimeofday () -. start_time in + let%lwt last_estimates = + Recheck_stats.init ~options ~init_time ~parsed_count:(FilenameSet.cardinal env.ServerEnv.files) in - - let env = { ServerEnv. - files = parsed; - unparsed; - dependency_graph; - checked_files = CheckedSet.empty; - ordered_libs; - libs; - errors; - collated_errors = ref None; - connections = Persistent_connection.empty; - } in - (* Don't recheck if the libs are not ok *) - if FilenameSet.is_empty updates || not libs_ok - then Lwt.return (libs_ok, env) - else begin - let%lwt recheck_profiling, _summary, env = - recheck ~options ~workers ~updates env ~files_to_focus:FilenameSet.empty + if (FilenameSet.is_empty updates && FilenameSet.is_empty files_to_focus) || not libs_ok then + Lwt.return (libs_ok, env, last_estimates) + else + let files_to_force = CheckedSet.(add ~focused:files_to_focus empty) in + let recheck_reasons = [LspProt.Lazy_init_typecheck] in + let%lwt (recheck_profiling, _summary, env) = + recheck + ~options + ~workers + ~updates + env + ~files_to_force + ~file_watcher_metadata:MonitorProt.empty_file_watcher_metadata + ~recheck_reasons + ~will_be_checked_files:(ref files_to_force) in Profiling_js.merge ~from:recheck_profiling ~into:profiling; - Lwt.return (true, env) - end + Lwt.return (true, env, last_estimates) -let full_check ~profiling ~options ~workers ~focus_targets env = - let { ServerEnv.files = parsed; dependency_graph; errors; _; } = env in - let%lwt (checked_files, _, errors, _skipped_count) = Transaction.with_transaction (fun transaction -> - let%lwt infer_input = files_to_infer - ~options ~focused:focus_targets ~profiling ~parsed ~dependency_graph in +let full_check ~profiling ~options ~workers ?focus_targets env = + let { ServerEnv.files = parsed; dependency_info; errors; _ } = env in + with_transaction (fun transaction reader -> + let%lwt (input, all_dependent_files) = + files_to_infer ~options ~reader ?focus_targets ~profiling ~parsed ~dependency_info + in + let all_dependency_graph = Dependency_info.all_dependency_graph dependency_info in + let dependency_graph = Dependency_info.dependency_graph dependency_info in + let%lwt (to_merge, components, recheck_set) = + include_dependencies_and_dependents + ~options + ~profiling + ~unchanged_checked:CheckedSet.empty + ~input + ~all_dependency_graph + ~dependency_graph + ~all_dependent_files + in + (* The values to_merge and recheck_set are essentially the same as input, aggregated. This + is not surprising because files_to_infer returns a closed checked set. Thus, the only purpose + of calling include_dependencies_and_dependents is to compute components. *) + let%lwt () = ensure_parsed ~options ~profiling ~workers ~reader to_merge in + let dependency_graph = Dependency_info.dependency_graph dependency_info in + let recheck_reasons = [LspProt.Full_init] in + let%lwt (updated_errors, coverage, _, sig_new_or_changed, _, _, merge_internal_error) = + merge + ~transaction + ~reader + ~options + ~profiling + ~workers + ~errors + ~coverage:env.ServerEnv.coverage + ~to_merge + ~components + ~recheck_set + ~dependency_graph + ~deleted:FilenameSet.empty + ~unparsed_set:FilenameSet.empty + ~persistent_connections:None + ~recheck_reasons + ~prep_merge:None + in + Option.iter merge_internal_error ~f:(Hh_logger.error "%s"); - let unchanged_checked = CheckedSet.empty in - let%lwt to_merge, components, recheck_map = - include_dependencies_and_dependents - ~options ~profiling - ~unchanged_checked - ~infer_input - ~dependency_graph - ~all_dependent_files:FilenameSet.empty - ~direct_dependent_files:FilenameSet.empty - in + let merged_files = to_merge in + let%lwt (errors, coverage, _, _, _, _, check_internal_error) = + check_files + ~reader + ~options + ~profiling + ~workers + ~errors + ~updated_errors + ~coverage + ~merged_files + ~direct_dependent_files:FilenameSet.empty + ~sig_new_or_changed + ~dependency_info + ~persistent_connections:None + ~recheck_reasons + ~cannot_skip_direct_dependents:true + in + Option.iter check_internal_error ~f:(Hh_logger.error "%s"); - let%lwt () = ensure_parsed ~options ~profiling ~workers to_merge in + let first_internal_error = Option.first_some merge_internal_error check_internal_error in + let checked_files = merged_files in + Hh_logger.info "Checked set: %s" (CheckedSet.debug_counts_to_string checked_files); + Lwt.return ({ env with ServerEnv.checked_files; errors; coverage }, first_internal_error)) - merge - ~transaction - ~options - ~profiling - ~workers - ~errors - ~unchanged_checked - ~to_merge - ~components - ~recheck_map - ~dependency_graph - ~deleted:FilenameSet.empty - ~persistent_connections:None - ~prep_merge:None - ) in - Lwt.return { env with ServerEnv.checked_files; errors } +let debug_determine_what_to_recheck = Recheck.determine_what_to_recheck diff --git a/src/services/inference/types_js.mli b/src/services/inference/types_js.mli index 43c51cc7d12..3719fbdb662 100644 --- a/src/services/inference/types_js.mli +++ b/src/services/inference/types_js.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,13 +7,13 @@ open Utils_js -val init: +val init : profiling:Profiling_js.running -> workers:MultiWorkerLwt.worker list option -> Options.t -> - (bool (* libs_ok *) * ServerEnv.env) Lwt.t + (bool (* libs_ok *) * ServerEnv.env * Recheck_stats.estimates option) Lwt.t -val calc_deps: +val calc_deps : options:Options.t -> profiling:Profiling_js.running -> dependency_graph:FilenameSet.t FilenameMap.t -> @@ -22,55 +22,81 @@ val calc_deps: (FilenameSet.t FilenameMap.t * File_key.t Nel.t FilenameMap.t) Lwt.t (* incremental typecheck entry point *) -val recheck: +val recheck : options:Options.t -> workers:MultiWorkerLwt.worker list option -> updates:FilenameSet.t -> ServerEnv.env -> - files_to_focus:FilenameSet.t -> + files_to_force:CheckedSet.t -> + file_watcher_metadata:MonitorProt.file_watcher_metadata -> + recheck_reasons:LspProt.recheck_reason list -> + will_be_checked_files:CheckedSet.t ref -> (Profiling_js.finished * ServerStatus.summary * ServerEnv.env) Lwt.t (* initial (full) check *) -val full_check: +val full_check : profiling:Profiling_js.running -> options:Options.t -> workers:MultiWorkerLwt.worker list option -> - focus_targets:FilenameSet.t option -> + ?focus_targets:FilenameSet.t -> ServerEnv.env -> - ServerEnv.env Lwt.t + (ServerEnv.env * string option) Lwt.t -val basic_check_contents: - options: Options.t -> - workers: MultiWorkerLwt.worker list option -> - env: ServerEnv.env ref -> - profiling: Profiling_js.running -> - string -> (* contents *) - File_key.t -> (* fake file-/module name *) - (Context.t * - Docblock.t * - File_sig.t * - (Loc.t, Loc.t * Type.t) Flow_ast.program, - string) result Lwt.t +val basic_check_contents : + options:Options.t -> + env:ServerEnv.env -> + profiling:Profiling_js.running -> + string -> + (* contents *) + File_key.t -> + (* fake file-/module name *) + ( Context.t * Docblock.t * File_sig.With_Loc.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.program, + string ) + result + Lwt.t + +val typecheck_contents : + options:Options.t -> + env:ServerEnv.env -> + profiling:Profiling_js.running -> + string -> + (* contents *) + File_key.t -> + (* fake file-/module name *) + ( ( Context.t + * (Loc.t, Loc.t) Flow_ast.program + * File_sig.With_Loc.t + * (ALoc.t, ALoc.t * Type.t) Flow_ast.program ) + option + * Errors.ConcreteLocPrintableErrorSet.t + * (* errors *) + Errors.ConcreteLocPrintableErrorSet.t ) + Lwt.t -val typecheck_contents: - options: Options.t -> - workers: MultiWorkerLwt.worker list option -> - env: ServerEnv.env ref -> - profiling: Profiling_js.running -> - string -> (* contents *) - File_key.t -> (* fake file-/module name *) - ((Context.t * - (Loc.t, Loc.t) Flow_ast.program * - File_sig.t * - (Loc.t, Loc.t * Type.t) Flow_ast.program) option * - Errors.ErrorSet.t * (* errors *) - Errors.ErrorSet.t) Lwt.t (* warnings *) +(* warnings *) -val ensure_checked_dependencies: - options: Options.t -> - profiling: Profiling_js.running -> - workers: MultiWorkerLwt.worker list option -> - env: ServerEnv.env ref -> +val ensure_checked_dependencies : + options:Options.t -> + reader:State_reader.t -> + env:ServerEnv.env -> File_key.t -> - File_sig.t -> + File_sig.With_Loc.t -> unit Lwt.t + +(* Exposed only for testing purposes. Not meant for general consumption. *) +val debug_determine_what_to_recheck : + profiling:Profiling_js.running -> + options:Options.t -> + is_file_checked:(File_key.t -> bool) -> + ide_open_files:SSet.t Lazy.t -> + dependency_graph:FilenameSet.t FilenameMap.t -> + all_dependency_graph:FilenameSet.t FilenameMap.t -> + checked_files:CheckedSet.t -> + freshparsed:FilenameSet.t -> + unparsed_set:FilenameSet.t -> + deleted:FilenameSet.t -> + unchanged_checked:CheckedSet.t -> + files_to_force:CheckedSet.t -> + unchanged_files_to_force:CheckedSet.t -> + direct_dependent_files:FilenameSet.t -> + (CheckedSet.t * File_key.t Nel.t list * FilenameSet.t * FilenameSet.t) Lwt.t diff --git a/src/services/port/comments_js.ml b/src/services/port/comments_js.ml deleted file mode 100644 index f24897f3099..00000000000 --- a/src/services/port/comments_js.ml +++ /dev/null @@ -1,321 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -module Ast = Flow_ast - -open Utils_js - -(**************************) - -let unwrap_comment = Ast.Comment.(function - loc, Block str | loc, Line str -> (loc, str) -) - -let add_comment start _end cloc cstr cmap = - let span = { Loc.none with Loc.start; _end } in - SpanMap.add span (cloc, cstr) cmap - -let make_comment_map progspan = function - | [] -> SpanMap.empty - | comment :: comments -> Loc.( - let (lastloc, laststr), map = List.fold_left ( - fun ((prevloc, prevstr), map) c -> - let nextloc, nextstr = unwrap_comment c in - let map = add_comment prevloc.start nextloc.start - prevloc prevstr map in - (nextloc, nextstr), map - ) (unwrap_comment comment, SpanMap.empty) comments - in - (* last comment *) - add_comment lastloc.start progspan._end lastloc laststr map - ) - -(*****************) -(* dynamic types *) -(*****************) - -type dynamic = - | ParamD of string * string - | RetD of string - | TypeD of Loc.t * string - -let rec parse_docblock loc = function - | [t] -> - (try - let t = parse_dynamic_type t in - [TypeD (loc, t)] - with _ -> []) - - | "@param" :: t :: x :: xs -> - (try - let t = parse_dynamic_type t in - [ParamD (x, t)] - with _ -> []) @ - (parse_docblock loc xs) - - | "@return" :: t :: xs -> - (try - let t = parse_dynamic_type t in - [RetD t] - with _ -> []) @ - (parse_docblock loc xs) - - | _ :: xs -> parse_docblock loc xs - - | [] -> [] - -and parse_dynamic_type_ i s = match s with - - | _ when Str.string_match (Str.regexp "array<") s i -> - let t,i = parse_dynamic_type_ (Str.match_end()) s in - assert (Str.string_match (Str.regexp ">") s i); - spf "Array<%s>" t, Str.match_end() - - | _ when Str.string_match (Str.regexp "function():") s i -> - let t,i = parse_dynamic_type_ i s in - spf "() => %s" t, i - - | _ when Str.string_match (Str.regexp "function(") s i -> - let ts,i = parse_dynamic_types [] (Str.match_end()) "):" "," s in - let ts = ts |> List.rev_map ((^) "_: ") |> String.concat ", " in - let t,i = parse_dynamic_type_ i s in - spf "(%s) => %s" ts t, i - - | _ when Str.string_match (Str.regexp "?") s i -> - let t,i = parse_dynamic_type_ (Str.match_end()) s in - spf "?%s" t, i - - | _ when Str.string_match (Str.regexp "{") s i -> - let t,i = parse_dynamic_type_ (Str.match_end()) s in - assert (Str.string_match (Str.regexp "}") s i); - t, Str.match_end() - - | _ when Str.string_match (Str.regexp "array") s i -> - "Array", Str.match_end() - | _ when Str.string_match (Str.regexp "boolean") s i -> - "boolean", Str.match_end() - | _ when Str.string_match (Str.regexp "date") s i -> - "Date", Str.match_end() - | _ when Str.string_match (Str.regexp "function") s i -> - "(_: ...Array) => any", Str.match_end() - | _ when Str.string_match (Str.regexp "number") s i -> - "number", Str.match_end() - | _ when Str.string_match (Str.regexp "object") s i -> - "{}", Str.match_end() - | _ when Str.string_match (Str.regexp "RegExp") s i -> - "RegExp", Str.match_end() - | _ when Str.string_match (Str.regexp "string") s i -> - "string", Str.match_end() - | _ when Str.string_match (Str.regexp "*") s i -> - "any", Str.match_end() - | _ when Str.string_match (Str.regexp "[A-Za-z$_][A-Za-z$_0-9]*") s i -> - Str.matched_string s, Str.match_end() - - | _ -> assert false - -and parse_dynamic_types ts i close sep s = - let t,i = parse_dynamic_type_ i s in - match s with - | _ when Str.string_match (Str.regexp close) s i -> - t::ts, Str.match_end() - - | _ when Str.string_match (Str.regexp sep) s i -> - parse_dynamic_types (t::ts) (Str.match_end()) close sep s - - | _ -> assert false - -and parse_dynamic_type s = - let ts,i = parse_dynamic_types [] 0 "$" "|" s in - assert (i = String.length s); - match ts with - | [t] -> t - | _ -> ts |> List.rev |> String.concat " | " - -(* if there is a comment whose scope spans the given location, - return the map with that comment removed, and parsed type annos - *) -let mk_comment cmap loc = - match SpanMap.get loc cmap with - | Some (loc, cstr) -> - let words = Str.split (Str.regexp "[ \t\n\\*/]+") cstr in - SpanMap.remove loc cmap, parse_docblock loc words - | _ -> - cmap, [] - -let meta_fun cmap loc = - let cmap, annos = mk_comment cmap loc in - let tmap = List.fold_left (fun map -> function - | ParamD (x,t) -> map |> SMap.add x t - | RetD t -> map |> SMap.add (Reason.internal_name "return") t - | _ -> map - ) SMap.empty annos in - cmap, tmap - -let insert_before_with_suffix loc t suffix = Loc.( - loc.start.line, loc.start.column, spf ": %s%s" t suffix -) - -let insert_after loc t = Loc.( - loc._end.line, loc._end.column, spf ": %s" t -) - -let skip loc = Loc.( - (* NOTE: leave multi-line comments alone for now *) - let n = if loc.start.line <> loc._end.line then 0 - else loc._end.column - loc.start.column in - loc.start.line, loc.start.column, string_of_int n -) - -let meta_params params map cmap = - ListUtils.concat_fold Ast.Pattern.(fun cmap -> function - | nloc, Identifier { Ast.Pattern.Identifier.name = (_, name); _ } -> ( - match SMap.get name map with - | Some t -> cmap, [insert_after nloc t] - | None -> - let cmap, annos = mk_comment cmap nloc in - cmap, match annos with - | [TypeD (cloc, ctype)] -> - [skip cloc; insert_after nloc ctype] - | _ -> - [] - ) - | _ -> cmap, [] - ) cmap params - -let meta_return body map cmap = - let bloc = Ast.Function.(match body with - | BodyBlock (loc, _) -> loc - | BodyExpression (loc, _) -> loc (* probably wrong, it's after the => *) - ) in - match SMap.get (Reason.internal_name "return") map with - | Some t -> - cmap, [insert_before_with_suffix bloc t " "] - | None -> - let cmap, annos = mk_comment cmap bloc in - cmap, match annos with - | [TypeD (cloc, ctype)] -> - [skip cloc; insert_after cloc ctype] - | _ -> - [] - -let rec meta_array_element cmap = Ast.Expression.(function - | Some (Expression e) -> meta_expression cmap e - | Some (Spread (_, { SpreadElement.argument = e })) -> - meta_expression cmap e - | None -> - cmap, [] -) - -and meta_expression_or_spread cmap = Ast.Expression.(function - | Expression e -> - meta_expression cmap e - | Spread (_, { SpreadElement.argument }) -> - meta_expression cmap argument -) - -and meta_fbody cmap loc params body = - let cmap, tmap = meta_fun cmap loc in - ListUtils.concat_fold (fun cmap f -> f cmap) cmap [ - meta_params params tmap; - meta_return body tmap; - meta_body body - ] - -and meta_expression cmap = Ast.Expression.(function - | _, Object { Object.properties } -> - ListUtils.concat_fold (fun cmap -> function - | Object.Property (loc, Object.Property.Init { - value = (_, Function { - Ast.Function.params = (_, { Ast.Function.Params.params; _ }); body; _ - }); - key = Ast.Expression.Object.Property.Identifier _; - _ - }) -> - meta_fbody cmap loc params body - - | Object.Property (_, Object.Property.Init { value; _ }) -> - meta_expression cmap value - - | _ -> cmap, [] (* TODO? *) - ) cmap properties - - | _, Array { Array.elements } -> - ListUtils.concat_fold meta_array_element cmap elements - - | _, Call { Call.arguments; _ } -> - ListUtils.concat_fold meta_expression_or_spread cmap arguments - - | _, Assignment { Assignment.right; _ } -> - meta_expression cmap right - - | loc, Function { Ast.Function. - params = (_, { Ast.Function.Params.params; _ }); - body; _ - } - | loc, ArrowFunction { Ast.Function. - params = (_, { Ast.Function.Params.params; _ }); - body; _ - } -> - meta_fbody cmap loc params body - - | _ -> cmap, [] -) - -and meta_variable cmap (_, vdecl) = Ast.Statement.VariableDeclaration.( - let { Declarator.init; _ } = vdecl in - match init with - | Some expr -> meta_expression cmap expr - | None -> cmap, [] -) - -and meta_statement cmap = Ast.Statement.(function - | _, VariableDeclaration { VariableDeclaration.declarations; _ } -> - ListUtils.concat_fold meta_variable cmap declarations - - | _, Expression { Expression.expression = e; _ } -> - meta_expression cmap e - - | _, ClassDeclaration { Ast.Class.body; _ } -> - let _, { Ast.Class.Body.body = elements; _ } = body in - ListUtils.concat_fold Ast.Class.(fun cmap -> function - | Body.Method (loc, { - Method.key = Ast.Expression.Object.Property.Identifier _; - value = _, { Ast.Function. - params = (_, { Ast.Function.Params.params; _ }); - body; _ - }; - kind = Method.Method | Method.Constructor; - static = false; - decorators = _; - }) -> - meta_fbody cmap loc params body - | _ -> cmap, [] - ) cmap elements - - | loc, FunctionDeclaration { Ast.Function. - params = (_, { Ast.Function.Params.params; _ }); - body; _ - } -> - meta_fbody cmap loc params body - - | _ -> cmap, [] (* TODO *) -) - -and meta_body body cmap = Ast.Statement.( - match body with - | Ast.Function.BodyBlock (_, { Block.body }) -> - meta_statements cmap body - | Ast.Function.BodyExpression expr -> - meta_expression cmap expr -) - -and meta_statements cmap = ListUtils.concat_fold meta_statement cmap - -let meta_program (loc, statements, comments) = - let cmap = make_comment_map loc comments in - let _, edits = meta_statements cmap statements in - edits diff --git a/src/services/port/port_service_js.ml b/src/services/port/port_service_js.ml deleted file mode 100644 index fa0985166d1..00000000000 --- a/src/services/port/port_service_js.ml +++ /dev/null @@ -1,25 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -let port_file (file: string) : (string, exn) result = - try - let file = Path.to_string (Path.make file) in - let ast = Parsing_heaps.get_ast_unsafe (File_key.SourceFile file) in - let content = Sys_utils.cat file in - let lines = Str.split_delim (Str.regexp "\n") content in - let insertions = Comments_js.meta_program ast in - let insertions = List.sort Pervasives.compare insertions in - let new_content = Reason.do_patch lines insertions in - let patch_content = Diff.diff_of_file_and_string file new_content in - Ok patch_content - with exn -> - Error exn - -let port_files (files: string list) = - List.fold_left (fun result_map file -> - SMap.add file (port_file file) result_map - ) SMap.empty files diff --git a/src/services/saved_state/dune b/src/services/saved_state/dune new file mode 100644 index 00000000000..ce8942277b5 --- /dev/null +++ b/src/services/saved_state/dune @@ -0,0 +1,42 @@ +(library + (name flow_service_saved_state) + (wrapped false) + (c_names saved_state_compression_stubs) + (modules :standard \ + saved_state_fetcher + saved_state_dummy_fetcher + saved_state_local_fetcher + ) + (libraries + flow_common_build_id + flow_common_profiling + flow_config + flow_parser_utils + flow_saved_state_stubs + flow_server_env + flow_server_files + flow_state_heaps_module + flow_state_heaps_parsing + flow_typing + sys_utils ; hack + xx + ) + (preprocess (pps lwt_ppx)) +) + +(library + (name flow_service_saved_state_fetcher) + (wrapped false) + (modules + saved_state_fetcher + saved_state_dummy_fetcher + saved_state_local_fetcher + ) + (libraries + flow_common + flow_common_profiling + lwt.unix + sys_utils + ) + (preprocess (pps lwt_ppx)) +) diff --git a/src/services/saved_state/saved_state.ml b/src/services/saved_state/saved_state.ml index 2f5c515a09a..e040455a543 100644 --- a/src/services/saved_state/saved_state.ml +++ b/src/services/saved_state/saved_state.ml @@ -1,23 +1,29 @@ (** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * + * LICENSE file in the root directory of this source tree. *) open Utils_js +module File_sig = File_sig.With_Loc -(* For each parsed file, this is what we will save *) -type parsed_file_data = { - package: Package_json.t option; (* Only package.json files have this *) - info: Module_heaps.info; +type denormalized_file_data = { + package: Package_json.t option; + (* Only package.json files have this *) file_sig: File_sig.t; resolved_requires: Module_heaps.resolved_requires; hash: Xx.hash; } +type normalized_file_data = denormalized_file_data + +(* For each parsed file, this is what we will save *) +type parsed_file_data = { + info: Module_heaps.info; + normalized_file_data: normalized_file_data; +} + (* We also need to store the info for unparsed files *) type unparsed_file_data = { unparsed_info: Module_heaps.info; @@ -31,11 +37,9 @@ type saved_state_data = { * we probably could whitelist some config options, whitespace, etc. But for now, let's * invalidate the saved state if the config has changed at all *) flowconfig_hash: Xx.hash; - parsed_heaps: parsed_file_data FilenameMap.t; unparsed_heaps: unparsed_file_data FilenameMap.t; ordered_non_flowlib_libs: string list; - (* Why store local errors and not merge_errors/suppressions/etc? Well, I have a few reasons: * * 1. Much smaller data structure. The whole env.errors data structure can be hundreds of MBs @@ -44,16 +48,15 @@ type saved_state_data = { * 3. Local errors should be the same after a lazy init and after a full init. This isn't true * for the other members of env.errors which are filled in during typechecking *) - local_errors: Errors.ErrorSet.t Utils_js.FilenameMap.t; - - node_modules_containers: SSet.t; - - (* TODO - Figure out what to do aboute module.resolver *) + local_errors: Flow_error.ErrorSet.t Utils_js.FilenameMap.t; + warnings: Flow_error.ErrorSet.t Utils_js.FilenameMap.t; + coverage: Coverage_response.file_coverage Utils_js.FilenameMap.t; + node_modules_containers: SSet.t; (* TODO - Figure out what to do about module.resolver *) } let modulename_map_fn ~f = function -| Modulename.Filename fn -> Modulename.Filename (f fn) -| Modulename.String _ as module_name -> module_name + | Modulename.Filename fn -> Modulename.Filename (f fn) + | Modulename.String _ as module_name -> module_name (* Saving the saved state generally consists of 3 things: * @@ -66,54 +69,48 @@ let modulename_map_fn ~f = function * generates saved states has an easier time keeping up. But the perf of saving isn't as important * as the perf of loading *) -module Save: sig - val save: +module Save : sig + val save : saved_state_filename:Path.t -> genv:ServerEnv.genv -> env:ServerEnv.env -> + profiling:Profiling_js.running -> unit Lwt.t end = struct let normalize_file_key ~root = File_key.map (Files.relative_path root) (* A File_sig.t is a complicated data structure with Loc.t's hidden everywhere. The easiest way to * make sure we get them all is with a mapper *) - class file_sig_normalizer (root) = object - inherit File_sig.mapper + class file_sig_normalizer root = + object + inherit File_sig.mapper - method! loc (loc: Loc.t) = - { loc with - Loc.source = Option.map ~f:(normalize_file_key ~root) loc.Loc.source; - } - end - - (* A Error.error is a complicated data structure with Loc.t's hidden everywhere. The easiest way - * to make sure we get them all is with a mapper *) - class error_normalizer (root) = object - inherit Errors.mapper + method! loc (loc : Loc.t) = + { loc with Loc.source = Option.map ~f:(normalize_file_key ~root) loc.Loc.source } + end - method! loc (loc: Loc.t) = - { loc with - Loc.source = Option.map ~f:(normalize_file_key ~root) loc.Loc.source; - } - end + (* A Flow_error.t is a complicated data structure with Loc.t's hidden everywhere. *) + let normalize_error ~root = + Flow_error.map_loc_of_error (ALoc.update_source (Option.map ~f:(normalize_file_key ~root))) (* We write the Flow version at the beginning of each saved state file. It's an easy way to assert * upon reading the file that the writer and reader are the same version of Flow *) - let write_version = + let write_version fd = let version = Flow_build_id.get_build_id () in let version_length = String.length version in - assert (version_length = 16); (* Build ID should always be 16 bytes *) - let rec write_version fd offset len = - if len > 0 - then + (* Build ID should always be 16 bytes *) + assert (version_length = 16); + + let rec loop offset len = + if len > 0 then let%lwt bytes_written = Lwt_unix.write_string fd version offset len in let offset = offset + bytes_written in let len = len - bytes_written in - write_version fd offset len + loop offset len else Lwt.return version_length in - fun fd -> write_version fd 0 version_length + loop 0 version_length let normalize_info ~root info = let module_name = @@ -124,159 +121,200 @@ end = struct let normalize_parsed_data ~root parsed_file_data = (* info *) let info = normalize_info ~root parsed_file_data.info in - (* file_sig *) - let file_sig = (new file_sig_normalizer root)#file_sig parsed_file_data.file_sig in - + let file_sig = + (new file_sig_normalizer root)#file_sig parsed_file_data.normalized_file_data.file_sig + in (* resolved_requires *) - let { Module_heaps.resolved_modules; phantom_dependents } = - parsed_file_data.resolved_requires + let { Module_heaps.resolved_modules; phantom_dependents; hash } = + parsed_file_data.normalized_file_data.resolved_requires in let phantom_dependents = SSet.map (Files.relative_path root) phantom_dependents in - let resolved_modules = SMap.map - (modulename_map_fn ~f:(normalize_file_key ~root)) resolved_modules in - let resolved_requires = { Module_heaps.resolved_modules; phantom_dependents } in - + let resolved_modules = + SMap.map (modulename_map_fn ~f:(normalize_file_key ~root)) resolved_modules + in + let resolved_requires = { Module_heaps.resolved_modules; phantom_dependents; hash } in { - package = parsed_file_data.package; info; - file_sig; - resolved_requires; - hash = parsed_file_data.hash; + normalized_file_data = + { + package = parsed_file_data.normalized_file_data.package; + file_sig; + resolved_requires; + hash = parsed_file_data.normalized_file_data.hash; + }; } (* Collect all the data for a single parsed file *) - let collect_normalized_data_for_parsed_file ~root parsed_heaps fn = + let collect_normalized_data_for_parsed_file ~root ~reader parsed_heaps fn = let package = match fn with | File_key.JsonFile str when Filename.basename str = "package.json" -> - Some (Module_heaps.For_saved_state.get_package_json_unsafe str) + Some (Module_heaps.For_saved_state.get_package_json_unsafe str) | _ -> None in - - let file_data = { - package; - info = Module_heaps.get_info_unsafe ~audit:Expensive.ok fn; - file_sig = Parsing_heaps.get_file_sig_unsafe fn; - resolved_requires = Module_heaps.get_resolved_requires_unsafe ~audit:Expensive.ok fn; - hash = Parsing_heaps.get_file_hash_unsafe fn; - } in - + let file_data = + { + info = Module_heaps.Reader.get_info_unsafe ~reader ~audit:Expensive.ok fn; + normalized_file_data = + { + package; + file_sig = Parsing_heaps.Reader.get_file_sig_unsafe ~reader fn; + resolved_requires = + Module_heaps.Reader.get_resolved_requires_unsafe ~reader ~audit:Expensive.ok fn; + hash = Parsing_heaps.Reader.get_file_hash_unsafe ~reader fn; + }; + } + in let relative_fn = normalize_file_key ~root fn in - let relative_file_data = normalize_parsed_data ~root file_data in - FilenameMap.add relative_fn relative_file_data parsed_heaps (* Collect all the data for a single unparsed file *) - let collect_normalized_data_for_unparsed_file ~root unparsed_heaps fn = - let relative_file_data = { - unparsed_info = normalize_info ~root @@ Module_heaps.get_info_unsafe ~audit:Expensive.ok fn; - unparsed_hash = Parsing_heaps.get_file_hash_unsafe fn; - } in - + let collect_normalized_data_for_unparsed_file ~root ~reader unparsed_heaps fn = + let relative_file_data = + { + unparsed_info = + normalize_info ~root + @@ Module_heaps.Reader.get_info_unsafe ~reader ~audit:Expensive.ok fn; + unparsed_hash = Parsing_heaps.Reader.get_file_hash_unsafe ~reader fn; + } + in let relative_fn = normalize_file_key ~root fn in - FilenameMap.add relative_fn relative_file_data unparsed_heaps (* The builtin flowlibs are excluded from the saved state. The server which loads the saved state * will extract and typecheck its own builtin flowlibs *) let is_not_in_flowlib ~options = match (Options.file_options options).Files.default_lib_dir with - | None -> fun _ -> true (* There are no flowlibs *) + | None -> (fun _ -> true) (* There are no flowlibs *) | Some root -> let root_str = Path.to_string root in - fun f -> not (Files.is_prefix root_str f) + (fun f -> not (Files.is_prefix root_str f)) - let normalize_error_set ~root error_set = - let normalizer = new error_normalizer root in - Errors.ErrorSet.map normalizer#error error_set + let normalize_error_set ~root = Flow_error.ErrorSet.map (normalize_error ~root) (* Collect all the data for all the files *) - let collect_data ~workers ~genv ~env = + let collect_data ~workers ~genv ~env ~profiling = let options = genv.ServerEnv.options in let root = Options.root options |> Path.to_string in - let%lwt parsed_heaps = MultiWorkerLwt.call workers - ~job:(List.fold_left (collect_normalized_data_for_parsed_file ~root) ) - ~neutral:FilenameMap.empty - ~merge:FilenameMap.union - ~next:(MultiWorkerLwt.next workers (FilenameSet.elements env.ServerEnv.files)) + let reader = State_reader.create () in + let%lwt parsed_heaps = + Profiling_js.with_timer_lwt profiling ~timer:"CollectParsed" ~f:(fun () -> + MultiWorkerLwt.call + workers + ~job:(List.fold_left (collect_normalized_data_for_parsed_file ~root ~reader)) + ~neutral:FilenameMap.empty + ~merge:FilenameMap.union + ~next:(MultiWorkerLwt.next workers (FilenameSet.elements env.ServerEnv.files))) in - let%lwt unparsed_heaps = MultiWorkerLwt.call workers - ~job:(List.fold_left (collect_normalized_data_for_unparsed_file ~root) ) - ~neutral:FilenameMap.empty - ~merge:FilenameMap.union - ~next:(MultiWorkerLwt.next workers (FilenameSet.elements env.ServerEnv.unparsed)) + let%lwt unparsed_heaps = + Profiling_js.with_timer_lwt profiling ~timer:"CollectUnparsed" ~f:(fun () -> + MultiWorkerLwt.call + workers + ~job:(List.fold_left (collect_normalized_data_for_unparsed_file ~root ~reader)) + ~neutral:FilenameMap.empty + ~merge:FilenameMap.union + ~next:(MultiWorkerLwt.next workers (FilenameSet.elements env.ServerEnv.unparsed))) in let ordered_non_flowlib_libs = env.ServerEnv.ordered_libs |> List.filter (is_not_in_flowlib ~options) - |> List.map (Files.relative_path root) + |> Core_list.map ~f:(Files.relative_path root) + in + let local_errors = + FilenameMap.fold + (fun fn error_set acc -> + let normalized_fn = normalize_file_key ~root fn in + let normalized_error_set = normalize_error_set ~root error_set in + FilenameMap.add normalized_fn normalized_error_set acc) + env.ServerEnv.errors.ServerEnv.local_errors + FilenameMap.empty + in + let warnings = + FilenameMap.fold + (fun fn warning_set acc -> + let normalized_fn = normalize_file_key ~root fn in + let normalized_error_set = normalize_error_set ~root warning_set in + FilenameMap.add normalized_fn normalized_error_set acc) + env.ServerEnv.errors.ServerEnv.warnings + FilenameMap.empty in - let local_errors = FilenameMap.fold (fun fn error_set acc -> - let normalized_fn = normalize_file_key ~root fn in - let normalized_error_set = normalize_error_set ~root error_set in - FilenameMap.add normalized_fn normalized_error_set acc - ) env.ServerEnv.errors.ServerEnv.local_errors FilenameMap.empty in let node_modules_containers = SSet.map (Files.relative_path root) !Files.node_modules_containers in let flowconfig_hash = - FlowConfig.get_hash @@ Server_files_js.config_file (Options.flowconfig_name options) + FlowConfig.get_hash + @@ Server_files_js.config_file (Options.flowconfig_name options) @@ Options.root options in - Lwt.return { - flowconfig_hash; - parsed_heaps; - unparsed_heaps; - ordered_non_flowlib_libs; - local_errors; - node_modules_containers; - } + Lwt.return + { + flowconfig_hash; + parsed_heaps; + unparsed_heaps; + ordered_non_flowlib_libs; + local_errors; + warnings; + coverage = env.ServerEnv.coverage; + node_modules_containers; + } - let save ~saved_state_filename ~genv ~env = + let save ~saved_state_filename ~genv ~env ~profiling = Hh_logger.info "Collecting data for saved state"; let workers = genv.ServerEnv.workers in - - let%lwt data = collect_data ~workers ~genv ~env in - + let%lwt data = collect_data ~workers ~genv ~env ~profiling in let filename = Path.to_string saved_state_filename in - - Hh_logger.info "Writing saved-state file at %S" filename; - let%lwt fd = Lwt_unix.openfile filename [Unix.O_WRONLY; Unix.O_CREAT; Unix.O_TRUNC] 0o666 in let%lwt header_bytes_written = write_version fd in - - let%lwt data_bytes_written = - Marshal_tools_lwt.to_fd_with_preamble fd (data: saved_state_data) - in - let%lwt () = Lwt_unix.close fd in - - let bytes_written = - header_bytes_written + Marshal_tools_lwt.expected_preamble_size + data_bytes_written + Hh_logger.info "Compressing saved state with lz4"; + let%lwt saved_state_contents = + Profiling_js.with_timer_lwt profiling ~timer:"Compress" ~f:(fun () -> + Saved_state_compression.( + let compressed = marshal_and_compress data in + let orig_size = uncompressed_size compressed in + let new_size = compressed_size compressed in + Hh_logger.info + "Compressed from %d bytes to %d bytes (%3.2f%%)" + orig_size + new_size + (100. *. float_of_int new_size /. float_of_int orig_size); + Lwt.return compressed)) in - - Hh_logger.info "Finished writing %d bytes to saved-state file at %S" bytes_written filename; - - Lwt.return_unit + Profiling_js.with_timer_lwt profiling ~timer:"Write" ~f:(fun () -> + Hh_logger.info "Writing saved-state file at %S" filename; + let%lwt data_bytes_written = + Marshal_tools_lwt.to_fd_with_preamble + fd + (saved_state_contents : Saved_state_compression.compressed) + in + let%lwt () = Lwt_unix.close fd in + let bytes_written = + header_bytes_written + Marshal_tools_lwt.expected_preamble_size + data_bytes_written + in + Hh_logger.info "Finished writing %d bytes to saved-state file at %S" bytes_written filename; + + Lwt.return_unit) end type invalid_reason = -| Bad_header -| Build_mismatch -| Changed_files -| Failed_to_marshal -| File_does_not_exist -| Flowconfig_mismatch + | Bad_header + | Build_mismatch + | Changed_files + | Failed_to_marshal + | Failed_to_decompress + | File_does_not_exist + | Flowconfig_mismatch let invalid_reason_to_string = function -| Bad_header -> "Invalid saved state header" -| Build_mismatch -> "Build ID of saved state does not match this binary" -| Changed_files -> "A file change invalidated the saved state" -| Failed_to_marshal -> "Failed to unmarshal data from saved state" -| File_does_not_exist -> "Saved state file does not exist" -| Flowconfig_mismatch -> ".flowconfig has changed since saved state was generated" + | Bad_header -> "Invalid saved state header" + | Build_mismatch -> "Build ID of saved state does not match this binary" + | Changed_files -> "A file change invalidated the saved state" + | Failed_to_marshal -> "Failed to unmarshal data from saved state" + | Failed_to_decompress -> "Failed to decompress saved state data" + | File_does_not_exist -> "Saved state file does not exist" + | Flowconfig_mismatch -> ".flowconfig has changed since saved state was generated" exception Invalid_saved_state of invalid_reason @@ -288,64 +326,58 @@ exception Invalid_saved_state of invalid_reason * This is on the critical path for starting up a server with saved state. We really care about * the perf *) -module Load: sig - val load: +module Load : sig + val load : workers:MultiWorkerLwt.worker list option -> saved_state_filename:Path.t -> options:Options.t -> + profiling:Profiling_js.running -> saved_state_data Lwt.t -end = struct + val denormalize_parsed_data : root:string -> normalized_file_data -> denormalized_file_data +end = struct let denormalize_file_key ~root fn = File_key.map (Files.absolute_path root) fn - class file_sig_denormalizer (root) = object - inherit File_sig.mapper - - method! loc (loc: Loc.t) = - { loc with - Loc.source = Option.map ~f:(denormalize_file_key ~root) loc.Loc.source; - } - end + class file_sig_denormalizer root = + object + inherit File_sig.mapper - class error_denormalizer (root) = object - inherit Errors.mapper + method! loc (loc : Loc.t) = + { loc with Loc.source = Option.map ~f:(denormalize_file_key ~root) loc.Loc.source } + end - method! loc (loc: Loc.t) = - { loc with - Loc.source = Option.map ~f:(denormalize_file_key ~root) loc.Loc.source; - } - end + let denormalize_error ~root = + Flow_error.map_loc_of_error (ALoc.update_source (Option.map ~f:(denormalize_file_key ~root))) let verify_version = - let version_length = 16 in (* Flow_build_id should always be 16 bytes *) + let version_length = 16 in + (* Flow_build_id should always be 16 bytes *) let rec read_version fd buf offset len = - if len > 0 - then + if len > 0 then ( let%lwt bytes_read = Lwt_unix.read fd buf offset len in - if bytes_read = 0 - then begin + if bytes_read = 0 then ( Hh_logger.error "Invalid saved state version header. It should be %d bytes but only read %d bytes" version_length (version_length - len); raise (Invalid_saved_state Bad_header) - end; + ); let offset = offset + bytes_read in let len = len - bytes_read in read_version fd buf offset len - else + ) else let result = Bytes.to_string buf in let flow_build_id = Flow_build_id.get_build_id () in - if result <> flow_build_id - then begin + if result <> flow_build_id then ( Hh_logger.error "Saved-state file failed version check. Expected version %S but got %S" flow_build_id result; raise (Invalid_saved_state Build_mismatch) - end else Lwt.return_unit + ) else + Lwt.return_unit in - fun fd -> read_version fd (Bytes.create version_length) 0 version_length + (fun fd -> read_version fd (Bytes.create version_length) 0 version_length) let denormalize_info ~root info = let module_name = @@ -357,158 +389,173 @@ end = struct * * We do our best to avoid reading the file system (which Path.make will do) *) let denormalize_parsed_data ~root file_data = - (* info *) - let info = denormalize_info ~root file_data.info in - (* file_sig *) let file_sig = (new file_sig_denormalizer root)#file_sig file_data.file_sig in - (* resolved_requires *) - let { Module_heaps.resolved_modules; phantom_dependents } = file_data.resolved_requires in + let { Module_heaps.resolved_modules; phantom_dependents; hash } = + file_data.resolved_requires + in let phantom_dependents = SSet.map (Files.absolute_path root) phantom_dependents in - let resolved_modules = SMap.map - (modulename_map_fn ~f:(denormalize_file_key ~root)) resolved_modules in - let resolved_requires = { Module_heaps.resolved_modules; phantom_dependents } in + let resolved_modules = + SMap.map (modulename_map_fn ~f:(denormalize_file_key ~root)) resolved_modules + in + let resolved_requires = { Module_heaps.resolved_modules; phantom_dependents; hash } in + { package = file_data.package; file_sig; resolved_requires; hash = file_data.hash } - { - package = file_data.package; - info; - file_sig; - resolved_requires; - hash = file_data.hash; - } + let partially_denormalize_parsed_data ~root { info; normalized_file_data } = + let info = denormalize_info ~root info in + { info; normalized_file_data } - let progress_fn real_total offset ~total:_ ~start ~length:_ = - let finished = start + offset in + let progress_fn real_total ~total:_ ~start ~length:_ = MonitorRPC.status_update - ServerStatus.(Load_saved_state_progress { total = Some real_total; finished }) + ServerStatus.(Load_saved_state_progress { total = Some real_total; finished = start }) (* Denormalize the data for all the parsed files. This is kind of slow :( *) - let denormalize_parsed_heaps ~workers ~root ~progress_fn parsed_heaps = - let next = - MultiWorkerLwt.next ~progress_fn ~max_size:4000 workers (FilenameMap.bindings parsed_heaps) - in - MultiWorkerLwt.call workers - ~job:(List.fold_left (fun acc (relative_fn, parsed_file_data) -> - let parsed_file_data = denormalize_parsed_data ~root parsed_file_data in + let denormalize_parsed_heaps ~root parsed_heaps = + FilenameMap.fold + (fun relative_fn parsed_file_data acc -> + let parsed_file_data = partially_denormalize_parsed_data ~root parsed_file_data in let fn = denormalize_file_key ~root relative_fn in - FilenameMap.add fn parsed_file_data acc - )) - ~neutral:FilenameMap.empty - ~merge:FilenameMap.union - ~next + FilenameMap.add fn parsed_file_data acc) + parsed_heaps + FilenameMap.empty (* Denormalize the data for all the unparsed files *) let denormalize_unparsed_heaps ~workers ~root ~progress_fn unparsed_heaps = let next = MultiWorkerLwt.next ~progress_fn ~max_size:4000 workers (FilenameMap.bindings unparsed_heaps) in - MultiWorkerLwt.call workers - ~job:(List.fold_left (fun acc (relative_fn, unparsed_file_data) -> - let unparsed_info = denormalize_info ~root unparsed_file_data.unparsed_info in - let fn = denormalize_file_key ~root relative_fn in - FilenameMap.add fn { unparsed_info; unparsed_hash = unparsed_file_data.unparsed_hash; } acc - )) + MultiWorkerLwt.call + workers + ~job: + (List.fold_left (fun acc (relative_fn, unparsed_file_data) -> + let unparsed_info = denormalize_info ~root unparsed_file_data.unparsed_info in + let fn = denormalize_file_key ~root relative_fn in + FilenameMap.add + fn + { unparsed_info; unparsed_hash = unparsed_file_data.unparsed_hash } + acc)) ~neutral:FilenameMap.empty ~merge:FilenameMap.union ~next - let denormalize_error_set ~root normalized_error_set = - let denormalizer = new error_denormalizer root in - Errors.ErrorSet.map denormalizer#error normalized_error_set + let denormalize_error_set ~root = Flow_error.ErrorSet.map (denormalize_error ~root) (* Denormalize all the data *) let denormalize_data ~workers ~options ~data = let root = Options.root options |> Path.to_string in - let { flowconfig_hash; parsed_heaps; unparsed_heaps; ordered_non_flowlib_libs; local_errors; + warnings; + coverage; node_modules_containers; - } = data in - + } = + data + in let current_flowconfig_hash = let flowconfig_name = Options.flowconfig_name options in FlowConfig.get_hash @@ Server_files_js.config_file flowconfig_name @@ Options.root options in - - if flowconfig_hash <> current_flowconfig_hash - then begin + if flowconfig_hash <> current_flowconfig_hash then ( Hh_logger.error "Invalid saved state: .flowconfig has changed since this saved state was generated."; raise (Invalid_saved_state Flowconfig_mismatch) - end; - - let parsed_count = FilenameMap.cardinal parsed_heaps in - let progress_fn = progress_fn (parsed_count + (FilenameMap.cardinal unparsed_heaps)) in + ); Hh_logger.info "Denormalizing the data for the parsed files"; - let%lwt parsed_heaps = - let progress_fn = progress_fn 0 in - denormalize_parsed_heaps ~workers ~root ~progress_fn parsed_heaps - in - + let%lwt parsed_heaps = Lwt.return (denormalize_parsed_heaps ~root parsed_heaps) in Hh_logger.info "Denormalizing the data for the unparsed files"; let%lwt unparsed_heaps = - let progress_fn = progress_fn parsed_count in + let progress_fn = progress_fn (FilenameMap.cardinal unparsed_heaps) in denormalize_unparsed_heaps ~workers ~root ~progress_fn unparsed_heaps in - - let ordered_non_flowlib_libs = List.map (Files.absolute_path root) ordered_non_flowlib_libs in - - let local_errors = FilenameMap.fold (fun normalized_fn normalized_error_set acc -> - let fn = denormalize_file_key ~root normalized_fn in - let error_set = denormalize_error_set ~root normalized_error_set in - FilenameMap.add fn error_set acc - ) local_errors FilenameMap.empty in - + let ordered_non_flowlib_libs = + Core_list.map ~f:(Files.absolute_path root) ordered_non_flowlib_libs + in + let local_errors = + FilenameMap.fold + (fun normalized_fn normalized_error_set acc -> + let fn = denormalize_file_key ~root normalized_fn in + let error_set = denormalize_error_set ~root normalized_error_set in + FilenameMap.add fn error_set acc) + local_errors + FilenameMap.empty + in + let warnings = + FilenameMap.fold + (fun normalized_fn normalized_warning_set acc -> + let fn = denormalize_file_key ~root normalized_fn in + let warning_set = denormalize_error_set ~root normalized_warning_set in + FilenameMap.add fn warning_set acc) + warnings + FilenameMap.empty + in let node_modules_containers = SSet.map (Files.absolute_path root) node_modules_containers in + Lwt.return + { + flowconfig_hash; + parsed_heaps; + unparsed_heaps; + ordered_non_flowlib_libs; + local_errors; + warnings; + coverage; + node_modules_containers; + } - Lwt.return { - flowconfig_hash; - parsed_heaps; - unparsed_heaps; - ordered_non_flowlib_libs; - local_errors; - node_modules_containers; - } - - let load ~workers ~saved_state_filename ~options = + let load ~workers ~saved_state_filename ~options ~profiling = let filename = Path.to_string saved_state_filename in - Hh_logger.info "Reading saved-state file at %S" filename; MonitorRPC.status_update ServerStatus.Read_saved_state; - let%lwt fd = try%lwt - Lwt_unix.openfile filename [Unix.O_RDONLY; Unix.O_NONBLOCK] 0o666 - with - | Unix.Unix_error(Unix.ENOENT, _, _) as exn -> - Hh_logger.error ~exn "Failed to open %S" filename; - raise (Invalid_saved_state File_does_not_exist) + let%lwt fd = + try%lwt Lwt_unix.openfile filename [Unix.O_RDONLY; Unix.O_NONBLOCK] 0o666 + with Unix.Unix_error (Unix.ENOENT, _, _) as exn -> + let exn = Exception.wrap exn in + Hh_logger.error "Failed to open %S\n%s" filename (Exception.to_string exn); + raise (Invalid_saved_state File_does_not_exist) in - let%lwt () = verify_version fd in - let%lwt (data: saved_state_data) = - try%lwt Marshal_tools_lwt.from_fd_with_preamble fd - with exn -> - Hh_logger.error ~exn "Failed to parsed saved state data"; - raise (Invalid_saved_state Failed_to_marshal) + let%lwt (compressed_data : Saved_state_compression.compressed) = + Profiling_js.with_timer_lwt profiling ~timer:"Read" ~f:(fun () -> + try%lwt Marshal_tools_lwt.from_fd_with_preamble fd + with exn -> + let exn = Exception.wrap exn in + Hh_logger.error ~exn "Failed to parse saved state data"; + raise (Invalid_saved_state Failed_to_marshal)) in - let%lwt () = Lwt_unix.close fd in - + Hh_logger.info "Decompressing saved-state data"; + + let%lwt (data : saved_state_data) = + Profiling_js.with_timer_lwt profiling ~timer:"Decompress" ~f:(fun () -> + try Lwt.return (Saved_state_compression.decompress_and_unmarshal compressed_data) + with exn -> + let exn = Exception.wrap exn in + Hh_logger.error ~exn "Failed to decompress saved state"; + raise (Invalid_saved_state Failed_to_decompress)) + in Hh_logger.info "Denormalizing saved-state data"; - let%lwt data = denormalize_data ~workers ~options ~data in - + let%lwt data = + Profiling_js.with_timer_lwt profiling ~timer:"Denormalize" ~f:(fun () -> + denormalize_data ~workers ~options ~data) + in Hh_logger.info "Finished loading saved-state"; Lwt.return data end let save = Save.save -let load = Load.load + +let load ~workers ~saved_state_filename ~options = + let should_print_summary = Options.should_profile options in + Profiling_js.with_profiling_lwt ~label:"LoadSavedState" ~should_print_summary (fun profiling -> + Load.load ~workers ~saved_state_filename ~options ~profiling) + +let denormalize_parsed_data = Load.denormalize_parsed_data diff --git a/src/services/saved_state/saved_state.mli b/src/services/saved_state/saved_state.mli index 6a506a52f01..9840769613a 100644 --- a/src/services/saved_state/saved_state.mli +++ b/src/services/saved_state/saved_state.mli @@ -1,20 +1,25 @@ (** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * + * LICENSE file in the root directory of this source tree. *) -type parsed_file_data = { - package: Package_json.t option; (* Only package.json files have this *) - info: Module_heaps.info; - file_sig: File_sig.t; +type denormalized_file_data = { + package: Package_json.t option; + (* Only package.json files have this *) + file_sig: File_sig.With_Loc.t; resolved_requires: Module_heaps.resolved_requires; hash: Xx.hash; } +type normalized_file_data + +type parsed_file_data = { + info: Module_heaps.info; + normalized_file_data: normalized_file_data; +} + type unparsed_file_data = { unparsed_info: Module_heaps.info; unparsed_hash: Xx.hash; @@ -25,29 +30,36 @@ type saved_state_data = { parsed_heaps: parsed_file_data Utils_js.FilenameMap.t; unparsed_heaps: unparsed_file_data Utils_js.FilenameMap.t; ordered_non_flowlib_libs: string list; - local_errors: Errors.ErrorSet.t Utils_js.FilenameMap.t; + local_errors: Flow_error.ErrorSet.t Utils_js.FilenameMap.t; + warnings: Flow_error.ErrorSet.t Utils_js.FilenameMap.t; + coverage: Coverage_response.file_coverage Utils_js.FilenameMap.t; node_modules_containers: SSet.t; } type invalid_reason = -| Bad_header -| Build_mismatch -| Changed_files -| Failed_to_marshal -| File_does_not_exist -| Flowconfig_mismatch + | Bad_header + | Build_mismatch + | Changed_files + | Failed_to_marshal + | Failed_to_decompress + | File_does_not_exist + | Flowconfig_mismatch -val invalid_reason_to_string: invalid_reason -> string +val invalid_reason_to_string : invalid_reason -> string exception Invalid_saved_state of invalid_reason -val save: +val save : saved_state_filename:Path.t -> genv:ServerEnv.genv -> env:ServerEnv.env -> + profiling:Profiling_js.running -> unit Lwt.t -val load: + +val load : workers:MultiWorkerLwt.worker list option -> saved_state_filename:Path.t -> options:Options.t -> - saved_state_data Lwt.t + (Profiling_js.finished * saved_state_data) Lwt.t + +val denormalize_parsed_data : root:string -> normalized_file_data -> denormalized_file_data diff --git a/src/services/saved_state/saved_state_compression.ml b/src/services/saved_state/saved_state_compression.ml new file mode 100644 index 00000000000..2b22fd6ba48 --- /dev/null +++ b/src/services/saved_state/saved_state_compression.ml @@ -0,0 +1,20 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type compressed = { + compressed_data: string; + compressed_size: int; + uncompressed_size: int; +} + +external marshal_and_compress : 'a -> compressed = "marshal_and_compress_stub" + +external decompress_and_unmarshal : compressed -> 'a = "decompress_and_unmarshal_stub" + +let compressed_size { compressed_size; _ } = compressed_size + +let uncompressed_size { uncompressed_size; _ } = uncompressed_size diff --git a/src/services/saved_state/saved_state_compression.mli b/src/services/saved_state/saved_state_compression.mli new file mode 100644 index 00000000000..988278488eb --- /dev/null +++ b/src/services/saved_state/saved_state_compression.mli @@ -0,0 +1,21 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type compressed + +(* Pass in any OCaml value. We'll marshal it to a string & compress that *) +val marshal_and_compress : 'a -> compressed + +(* Given the output of `marshal_and_compress`, decompress it and unmarshal it back to the original + * OCaml value *) +val decompress_and_unmarshal : compressed -> 'a + +(* How many bytes is the compressed data *) +val compressed_size : compressed -> int + +(* How many bytes was the uncompressed (but marshaled) data *) +val uncompressed_size : compressed -> int diff --git a/src/services/saved_state/saved_state_compression_stubs.c b/src/services/saved_state/saved_state_compression_stubs.c new file mode 100644 index 00000000000..e085b3ee1f2 --- /dev/null +++ b/src/services/saved_state/saved_state_compression_stubs.c @@ -0,0 +1,95 @@ +/** + * Copyright (c) 2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the "hack" directory of this source tree. + * + */ + +#include + +#include +#include +#include +#include +#include +#include + +#include + +CAMLprim value marshal_and_compress_stub(value data) { + CAMLparam1(data); + CAMLlocal2(result, compressed_data); + + // TODO - well todoish. If data is a string, we don't need to marshal it. + // We can just compress it directly. That's what hh_shared.c does. But at + // the moment saved state is not a string, so we don't really need this + // optimization + + intnat serialized_size; + char *marshaled_value; + + // caml_output_value_to_malloc will allocate marshaled_value. We must free it + caml_output_value_to_malloc( + data, Val_int(0)/*flags*/, &marshaled_value, &serialized_size); + + if (serialized_size < 0) { + caml_raise_with_string( + *caml_named_value("c_assertion_failure"), + "Failed to marshal"); + } + + size_t uncompressed_size = (size_t) serialized_size; + + size_t max_compression_size = LZ4_compressBound(uncompressed_size); + char *compressed_value = caml_stat_alloc(max_compression_size); + size_t compressed_size = LZ4_compress_default( + marshaled_value, + compressed_value, + uncompressed_size, + max_compression_size); + // It's unfortunate we need to copy. But we don't know how large the OCaml + // string will be until after we compress. + // TODO: When we're >= OCaml 4.06, switch to caml_alloc_initialized_string + compressed_data = caml_alloc_string(compressed_size); + memcpy(String_val(compressed_data), compressed_value, compressed_size); + caml_stat_free(compressed_value); + + caml_stat_free(marshaled_value); + + result = caml_alloc_tuple(3); + Store_field(result, 0, compressed_data); + Store_field(result, 1, Val_int(compressed_size)); + Store_field(result, 2, Val_int(uncompressed_size)); + + CAMLreturn(result); +} + +CAMLprim value decompress_and_unmarshal_stub(value compressed) { + CAMLparam1(compressed); + CAMLlocal1(result); + + char *compressed_data = String_val(Field(compressed, 0)); + size_t compressed_size = Long_val(Field(compressed, 1)); + size_t uncompressed_size = Long_val(Field(compressed, 2)); + + char *marshaled_value = caml_stat_alloc(uncompressed_size); + size_t actual_uncompressed_size = LZ4_decompress_safe( + compressed_data, + marshaled_value, + compressed_size, + uncompressed_size); + + if (actual_uncompressed_size != uncompressed_size) { + caml_raise_with_string( + *caml_named_value("c_assertion_failure"), + "Failed to decompress"); + } + + result = caml_input_value_from_block(marshaled_value, uncompressed_size); + + caml_stat_free(marshaled_value); + + CAMLreturn(result); +} diff --git a/src/services/saved_state/saved_state_dummy_fetcher.ml b/src/services/saved_state/saved_state_dummy_fetcher.ml index a53dbb05284..e0275449347 100644 --- a/src/services/saved_state/saved_state_dummy_fetcher.ml +++ b/src/services/saved_state/saved_state_dummy_fetcher.ml @@ -1,17 +1,17 @@ (** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * + * LICENSE file in the root directory of this source tree. *) (* This saved state fetcher is used by servers which don't intend to use saved state *) -include (struct - let fetch ~options:_ = - Profiling_js.with_profiling_lwt - ~label:"FetchSavedState" - ~should_print_summary:false - (fun _ -> Lwt.return Saved_state_fetcher.No_saved_state) -end: Saved_state_fetcher.FETCHER) +include ( + struct + let fetch ~options:_ = + Profiling_js.with_profiling_lwt + ~label:"FetchSavedState" + ~should_print_summary:false + (fun _ -> Lwt.return Saved_state_fetcher.No_saved_state) + end : + Saved_state_fetcher.FETCHER ) diff --git a/src/services/saved_state/saved_state_fetcher.ml b/src/services/saved_state/saved_state_fetcher.ml index 1d9b0a27dac..8268c91c3c1 100644 --- a/src/services/saved_state/saved_state_fetcher.ml +++ b/src/services/saved_state/saved_state_fetcher.ml @@ -1,19 +1,17 @@ (** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * + * LICENSE file in the root directory of this source tree. *) type result = -| Saved_state of { - saved_state_filename: Path.t; - changed_files: SSet.t; -} -| No_saved_state + | Saved_state of { + saved_state_filename: Path.t; + changed_files: SSet.t; + } + | No_saved_state module type FETCHER = sig - val fetch: options:Options.t -> (Profiling_js.finished * result) Lwt.t + val fetch : options:Options.t -> (Profiling_js.finished * result) Lwt.t end diff --git a/src/services/saved_state/saved_state_local_fetcher.ml b/src/services/saved_state/saved_state_local_fetcher.ml index 48643a9f881..df8d33b9b01 100644 --- a/src/services/saved_state/saved_state_local_fetcher.ml +++ b/src/services/saved_state/saved_state_local_fetcher.ml @@ -1,10 +1,8 @@ (** - * Copyright (c) 2018, Facebook, Inc. - * All rights reserved. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the - * LICENSE file in the "hack" directory of this source tree. - * + * LICENSE file in the root directory of this source tree. *) (* This saved state fetcher is intended to be used mainly by tests. It assumes that there are 2 @@ -15,33 +13,38 @@ * * if either file doesn't exist then we assume there's no saved state *) -include (struct - let fetch ~options = - Profiling_js.with_profiling_lwt ~label:"FetchSavedState" ~should_print_summary:false (fun _ -> - let root_str = Options.root options |> Path.to_string in - let saved_state_file = Filename.concat root_str ".flow.saved_state" in - let changed_files_input_file = Filename.concat root_str ".flow.saved_state_file_changes" in - - let%lwt saved_state_exists = Lwt_unix.file_exists saved_state_file - and input_file_exists = Lwt_unix.file_exists changed_files_input_file in - - if saved_state_exists && input_file_exists - then - let changed_files = Sys_utils.lines_of_file changed_files_input_file - |> Files.canonicalize_filenames ~handle_imaginary:Files.imaginary_realpath ~cwd:(root_str) - |> SSet.of_list - in - Lwt.return (Saved_state_fetcher.Saved_state { - saved_state_filename = Path.make saved_state_file; - changed_files; - }) - else begin - if not saved_state_exists - then Hh_logger.error "File %S does not exist" saved_state_file; - if not input_file_exists - then Hh_logger.error "File %S does not exist" changed_files_input_file; +include ( + struct + let fetch ~options = + Profiling_js.with_profiling_lwt + ~label:"FetchSavedState" + ~should_print_summary:false + (fun _ -> + let root_str = Options.root options |> Path.to_string in + let saved_state_file = Filename.concat root_str ".flow.saved_state" in + let changed_files_input_file = + Filename.concat root_str ".flow.saved_state_file_changes" + in + let%lwt saved_state_exists = Lwt_unix.file_exists saved_state_file + and input_file_exists = Lwt_unix.file_exists changed_files_input_file in + if saved_state_exists && input_file_exists then + let changed_files = + Sys_utils.lines_of_file changed_files_input_file + |> Files.canonicalize_filenames + ~handle_imaginary:Files.imaginary_realpath + ~cwd:root_str + |> SSet.of_list + in + Lwt.return + (Saved_state_fetcher.Saved_state + { saved_state_filename = Path.make saved_state_file; changed_files }) + else ( + if not saved_state_exists then + Hh_logger.error "File %S does not exist" saved_state_file; + if not input_file_exists then + Hh_logger.error "File %S does not exist" changed_files_input_file; - Lwt.return Saved_state_fetcher.No_saved_state - end - ) -end: Saved_state_fetcher.FETCHER) + Lwt.return Saved_state_fetcher.No_saved_state + )) + end : + Saved_state_fetcher.FETCHER ) diff --git a/src/services/type_info/__tests__/insert_type_utils_tests.ml b/src/services/type_info/__tests__/insert_type_utils_tests.ml new file mode 100644 index 00000000000..605e141dcad --- /dev/null +++ b/src/services/type_info/__tests__/insert_type_utils_tests.ml @@ -0,0 +1,62 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 +open Ty + +let stylize = (new Insert_type_utils.stylize_ty_mapper ())#on_t Loc.none + +let tests = + "insert_type_utils" + >::: [ + ( "stylize_union_number_with_number_literal" + >:: fun ctxt -> + let t_in = Union (Num None, NumLit "1", []) in + let t_exp = Num None in + assert_equal ~ctxt ~printer:Ty.show t_exp (stylize t_in) ); + ( "stylize_union_string_with_string_literal" + >:: fun ctxt -> + let t_in = Union (StrLit "foo", Str None, []) in + let t_exp = Str None in + assert_equal ~ctxt ~printer:Ty.show t_exp (stylize t_in) ); + ( "stylize_union_true_and_false" + >:: fun ctxt -> + let t_in = Union (BoolLit true, BoolLit false, []) in + let t_exp = Bool None in + assert_equal ~ctxt ~printer:Ty.show t_exp (stylize t_in) ); + ( "stylize_union_true_and_bool" + >:: fun ctxt -> + let t_in = Union (BoolLit true, Bool None, []) in + let t_exp = Bool None in + assert_equal ~ctxt ~printer:Ty.show t_exp (stylize t_in) ); + ( "stylize_union_string_number_literals" + >:: fun ctxt -> + let t_in = Union (Str None, NumLit "1", [NumLit "2"]) in + let t_exp = Union (NumLit "1", NumLit "2", [Str None]) in + assert_equal ~ctxt ~printer:Ty.show t_exp (stylize t_in) ); + (* These tests just document that sorting is working in a sane order *) + ( "sort_types_numeric_literals" + >:: fun ctxt -> + let t_in = Union (NumLit "5", NumLit "11", [NumLit "1"; NumLit "2"]) in + let t_exp = Union (NumLit "1", NumLit "2", [NumLit "5"; NumLit "11"]) in + assert_equal ~ctxt ~printer:Ty.show t_exp (Insert_type.simplify t_in) ); + ( "sort_types_top_any" + >:: fun ctxt -> + let t_in = Union (Top, Any Annotated, []) in + let t_exp = Top in + assert_equal ~ctxt ~printer:Ty.show t_exp (Insert_type.simplify t_in) ); + ( "sort_types_bot_any" + >:: fun ctxt -> + let t_in = Union (Bot EmptyType, Any Annotated, []) in + let t_exp = Any Annotated in + assert_equal ~ctxt ~printer:Ty.show t_exp (Insert_type.simplify t_in) ); + ( "sort_types_any_first" + >:: fun ctxt -> + let t_in = Union (Void, Any Annotated, [Null; Str None; NumLit "5"; Bool None]) in + let t_exp = Union (Any Annotated, Void, [Null; Bool None; NumLit "5"; Str None]) in + assert_equal ~ctxt ~printer:Ty.show t_exp (Insert_type.simplify t_in) ); + ] diff --git a/src/services/type_info/__tests__/type_info_tests.ml b/src/services/type_info/__tests__/type_info_tests.ml new file mode 100644 index 00000000000..00b57070f21 --- /dev/null +++ b/src/services/type_info/__tests__/type_info_tests.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = "ty" >::: [Insert_type_utils_tests.tests; Validation_tests.tests] + +let () = run_test_tt_main tests diff --git a/src/services/type_info/__tests__/validation_tests.ml b/src/services/type_info/__tests__/validation_tests.ml new file mode 100644 index 00000000000..693de81087c --- /dev/null +++ b/src/services/type_info/__tests__/validation_tests.ml @@ -0,0 +1,43 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 +open Ty + +let tests = + "validation_tests" + >::: [ + (* Valid types *) + ( "Any_annotated" + >:: fun ctxt -> + let t = Any Annotated in + let (_, errs) = Insert_type_utils.validate_type ~size_limit:1000 t in + assert_equal ~ctxt ~printer:(fun _ -> "unit") (List.length errs) 0 ); + ( "Any_bound_function_this" + >:: fun ctxt -> + let t = Any (Unsound BoundFunctionThis) in + let (_, errs) = Insert_type_utils.validate_type ~size_limit:1000 t in + assert_equal ~ctxt ~printer:(fun _ -> "unit") (List.length errs) 0 ); + (* Invalid type (number | any(unsound)) - raises exception *) + ( "Any_unsound_unresolved_type" + >:: fun ctxt -> + Insert_type_utils.( + let t = Union (Num None, Any (Unsound UnresolvedType), []) in + let (_, errs) = validate_type ~size_limit:1000 t in + assert_equal ~ctxt ~printer:(fun _ -> "unit") errs [Any_Unsound UnresolvedType]) ); + (* Type too big - raises exception *) + ( "Type_too_big" + >:: fun ctxt -> + Insert_type_utils.( + let t = Union (Num None, Num None, []) in + let (_, errs) = validate_type ~size_limit:2 t in + assert_equal + ~ctxt + ~printer:(fun _ -> "unit") + errs + [TooBig { size_limit = 2; size = Some 3 }]) ); + ] diff --git a/src/services/type_info/autofix_exports.ml b/src/services/type_info/autofix_exports.ml new file mode 100644 index 00000000000..91ad8ca7df2 --- /dev/null +++ b/src/services/type_info/autofix_exports.ml @@ -0,0 +1,46 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module LocSet = Loc_collections.LocSet + +let set_of_fixable_signature_verification_locations file_sig = + File_sig.With_Loc.( + Signature_builder_deps.Error.( + let tolerable_errors = file_sig.File_sig.With_Loc.tolerable_errors in + let add_fixable_sig_ver_error acc = function + | SignatureVerificationError + ( ExpectedAnnotation (loc, _) + | UnexpectedExpression (loc, _) + | UnexpectedObjectKey (loc, _) + | UnexpectedObjectSpread (loc, _) + | EmptyArray loc + | EmptyObject loc + | UnexpectedArraySpread (loc, _) ) -> + LocSet.add loc acc + | _ -> acc + in + List.fold_left add_fixable_sig_ver_error LocSet.empty tolerable_errors)) + +let fix_signature_verification_error_at_loc ~full_cx ~file_sig ~typed_ast = + Insert_type.( + insert_type + ~full_cx + ~file_sig + ~typed_ast + ~expand_aliases:false + ~omit_targ_defaults:false + ~strict:false + ~ambiguity_strategy:Autofix_options.Generalize) + +let fix_signature_verification_errors ~full_cx ~file_sig ~typed_ast = + Insert_type.( + let do_it = fix_signature_verification_error_at_loc ~full_cx ~file_sig ~typed_ast in + let try_it loc (ast, it_errs) = + try (do_it ast loc, it_errs) + with FailedToInsertType err -> (ast, error_to_string err :: it_errs) + in + (fun ast locs -> LocSet.fold try_it locs (ast, []))) diff --git a/src/services/type_info/dune b/src/services/type_info/dune new file mode 100644 index 00000000000..4ca0fa11f76 --- /dev/null +++ b/src/services/type_info/dune @@ -0,0 +1,12 @@ +(library + (name flow_service_type_info) + (wrapped false) + (libraries + flow_common + flow_common_lsp_conversions + flow_parser + flow_parser_utils + flow_server_env + flow_service_inference + ) +) diff --git a/src/services/type_info/insert_type.ml b/src/services/type_info/insert_type.ml new file mode 100644 index 00000000000..78179f639ae --- /dev/null +++ b/src/services/type_info/insert_type.ml @@ -0,0 +1,506 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module Utils = Insert_type_utils + +type unexpected = + | UnknownTypeAtPoint of Loc.t + | FailedToSerialize of { + ty: Ty.t; + error_message: string; + } + | FailedToNormalizeNoMatch + +type expected = + | TypeAnnotationAtPoint of { + location: Loc.t; + type_ast: (Loc.t, Loc.t) Flow_ast.Type.t; + } + | InvalidAnnotationTarget of Loc.t + | UnsupportedAnnotation of { + location: Loc.t; + error_message: string; + } + | MulipleTypesPossibleAtPoint of { + generalized: (Loc.t, Loc.t) Flow_ast.Type.t; + specialized: (Loc.t, Loc.t) Flow_ast.Type.t; + } + | FailedToValidateType of { + error: Utils.validation_error; + error_message: string; + } + | FailedToTypeCheck of Errors.ConcreteLocPrintableErrorSet.t + | FailedToNormalize of (Loc.t * string) + +type errors = + | Unexpected of unexpected + | Expected of expected + +exception FailedToInsertType of errors + +let expected err = FailedToInsertType (Expected err) + +let unexpected err = FailedToInsertType (Unexpected err) + +exception FoundAmbiguousType + +class use_upper_bound_mapper = + object (this) + inherit [_] Ty.endo_ty as super + + method! on_Bot () t = + Ty.( + function + | NoLowerWithUpper (SomeKnownUpper ub) -> this#on_t () ub + | b -> super#on_Bot () t b) + end + +class allow_temporary_types_mapper = + object (this) + inherit use_upper_bound_mapper as super + + method! on_Arr () t = + function + | Ty.{ arr_literal = true; arr_elt_t; _ } -> + Ty.Generic + (Utils.temporary_arraylit_symbol, Ty.TypeAliasKind, Some [this#on_t () arr_elt_t]) + | arr -> super#on_Arr () t arr + + method! on_Obj () t = + function + | Ty.{ obj_literal = true; _ } as obj -> + let obj = { obj with Ty.obj_literal = false } in + Ty.Generic + ( Utils.temporary_objectlit_symbol, + Ty.TypeAliasKind, + Some [super#on_Obj () (Ty.Obj obj) obj] ) + | o -> super#on_Obj () t o + end + +let allow_temporary_types = (new allow_temporary_types_mapper)#on_t () + +class fail_on_ambiguity_mapper = + object + inherit use_upper_bound_mapper as super + + method! on_Num () t = + function + | Some _lit -> raise FoundAmbiguousType + | n -> super#on_Num () t n + + method! on_Bool () t = + function + | Some _lit -> raise FoundAmbiguousType + | n -> super#on_Bool () t n + + method! on_Str () t = + function + | Some _lit -> raise FoundAmbiguousType + | n -> super#on_Str () t n + + method! on_Arr () t = + function + | Ty.{ arr_literal = true; _ } -> raise FoundAmbiguousType + | arr -> super#on_Arr () t arr + + method! on_Obj () t = + function + | Ty.{ obj_literal = true; _ } -> raise FoundAmbiguousType + | obj -> super#on_Obj () t obj + end + +let fail_on_ambiguity = (new fail_on_ambiguity_mapper)#on_t () + +class generalize_temporary_types_mapper = + object + inherit use_upper_bound_mapper as super + + method! on_Num () t = + function + | Some _lit -> Ty.Num None + | n -> super#on_Num () t n + + method! on_Bool () t = + function + | Some _lit -> Ty.Bool None + | n -> super#on_Bool () t n + + method! on_Str () t = + function + | Some _lit -> Ty.Str None + | n -> super#on_Str () t n + + method! on_Arr () t = + function + | Ty.{ arr_literal = true; _ } as arr -> + let arr = Ty.{ arr with arr_literal = false } in + super#on_Arr () (Ty.Arr arr) arr + | arr -> super#on_Arr () t arr + + method! on_Obj () t = + function + | Ty.{ obj_exact = true; _ } as obj -> + let obj = Ty.{ obj with obj_exact = false } in + super#on_Obj () (Ty.Obj obj) obj + | obj -> super#on_Obj () t obj + end + +let generalize_temporary_types = (new generalize_temporary_types_mapper)#on_t () + +class allow_temporary_arr_and_obj_types_mapper = + object (this) + inherit generalize_temporary_types_mapper as super + + method! on_Arr () t = + function + | Ty.{ arr_literal = true; arr_elt_t; _ } -> + Ty.Generic + (Utils.temporary_arraylit_symbol, Ty.TypeAliasKind, Some [this#on_t () arr_elt_t]) + | arr -> super#on_Arr () t arr + + method! on_Obj () t = + function + | Ty.{ obj_literal = true; _ } as obj -> + let obj = { obj with Ty.obj_literal = false } in + Ty.Generic + ( Utils.temporary_objectlit_symbol, + Ty.TypeAliasKind, + Some [super#on_Obj () (Ty.Obj obj) obj] ) + | o -> super#on_Obj () t o + end + +let allow_temporary_arr_and_obj_types = (new allow_temporary_arr_and_obj_types_mapper)#on_t () + +class specialize_temporary_types_mapper = + object + inherit use_upper_bound_mapper as super + + method! on_Num () t = + function + | Some lit -> Ty.NumLit lit + | n -> super#on_Num () t n + + method! on_Bool () t = + function + | Some lit -> Ty.BoolLit lit + | n -> super#on_Bool () t n + + method! on_Str () t = + function + | Some lit -> Ty.StrLit lit + | n -> super#on_Str () t n + end + +let specialize_temporary_types = (new specialize_temporary_types_mapper)#on_t () + +class fixme_ambiguous_types_mapper = + object + inherit fail_on_ambiguity_mapper as super + + method! on_Num () t = + function + | Some _ -> Utils.Builtins.flowfixme + | n -> super#on_Num () t n + + method! on_Bool () t = + function + | Some _ -> Utils.Builtins.flowfixme + | n -> super#on_Bool () t n + + method! on_Str () t = + function + | Some _ -> Utils.Builtins.flowfixme + | n -> super#on_Str () t n + + method! on_Arr () t = + function + | Ty.{ arr_literal = true; _ } -> Utils.Builtins.flowfixme + | arr -> super#on_Arr () t arr + + method! on_Obj () t = + function + | Ty.{ obj_literal = true; _ } -> Utils.Builtins.flowfixme + | obj -> super#on_Obj () t obj + end + +let fixme_ambiguous_types = (new fixme_ambiguous_types_mapper)#on_t () + +let simplify = Ty_utils.simplify_type ~merge_kinds:true ~sort:true + +(* Generate an equivalent Flow_ast.Type *) +let serialize ?(imports_react = false) loc ty = + (new Utils.stylize_ty_mapper ~imports_react ())#on_t loc ty + |> simplify + |> Ty_serializer.type_ + |> function + | Ok ast -> Utils.patch_up_type_ast ast + | Error msg -> raise (unexpected (FailedToSerialize { ty; error_message = msg })) + +let remove_ambiguous_types ~ambiguity_strategy ty loc = + Autofix_options.( + match ambiguity_strategy with + | Fail -> + begin + try fail_on_ambiguity ty + with FoundAmbiguousType -> + raise + @@ expected + @@ MulipleTypesPossibleAtPoint + { + specialized = specialize_temporary_types ty |> serialize loc; + generalized = generalize_temporary_types ty |> serialize loc; + } + end + | Temporary -> allow_temporary_arr_and_obj_types ty + | Generalize -> generalize_temporary_types ty + | Specialize -> specialize_temporary_types ty + | Fixme -> fixme_ambiguous_types ty + | Suppress -> Utils.Builtins.flowfixme) + +(* This class maps each node that contains the target until a node is contained + by the target *) +class mapper ?(size_limit = 30) ~ambiguity_strategy ~strict ~normalize ~ty_lookup target = + let target_is_point = Utils.is_point target in + object (this) + inherit [Loc.t] Flow_ast_contains_mapper.mapper as super + + method private target_contains loc = Loc.contains target loc + + method private target_contained_by loc = Loc.contains loc target + + method private is_target loc = Loc.equal target loc + + method loc_annot_contains_target = this#target_contained_by + + method private synth_type location = + let scheme = ty_lookup location in + let ty = normalize location scheme in + begin + match Utils.validate_type ~size_limit ty with + | (_, error :: _) -> + (* TODO surface all errors *) + raise + @@ expected + @@ FailedToValidateType { error; error_message = Utils.serialize_validation_error error } + | (_, []) -> () + end; + let ty = remove_ambiguous_types ~ambiguity_strategy ty location in + (location, serialize ~imports_react:true location ty) + + method private synth_type_annotation_hint loc = Flow_ast.Type.Available (this#synth_type loc) + + (* If a type is missing and in the range of target then add a type annotation hint *) + method private update_type_annotation_hint ?type_loc ?(check_loc = false) annot = + Flow_ast.Type.( + match annot with + | Missing location when (not check_loc) || this#target_contained_by location -> + let type_loc = + match type_loc with + | Some type_loc -> type_loc + | None -> location + in + this#synth_type_annotation_hint type_loc + | Available (location, type_ast) when (not check_loc) || this#target_contained_by location + -> + raise @@ expected @@ TypeAnnotationAtPoint { location; type_ast } + | _ -> annot) + + method! type_annotation_hint = this#update_type_annotation_hint ?type_loc:None ~check_loc:true + + method! class_extends location (extends : ('loc, 'loc) Flow_ast.Class.Extends.t') = + match extends with + | { Flow_ast.Class.Extends.targs = None; _ } -> super#class_extends location extends + | _ -> + raise + @@ expected + @@ UnsupportedAnnotation { location; error_message = "Classes with type arguments" } + + method! function_param_pattern node = + Flow_ast.Pattern.( + Flow_ast.Pattern.Identifier.( + match node with + | (loc, Identifier ({ annot; _ } as id)) + when this#is_target loc || (target_is_point && this#target_contained_by loc) -> + if strict then + raise + @@ expected + @@ UnsupportedAnnotation + { location = loc; error_message = "Function parameter in strict mode." } + else + let annot = this#update_type_annotation_hint annot in + (loc, Identifier { id with annot }) + | _ -> super#function_param_pattern node)) + + method! class_element elem = + Flow_ast.Class.Body.( + Flow_ast.Class.Property.( + Flow_ast.Expression.Object.Property.( + let update_property loc prop annot = + let annot = this#update_type_annotation_hint annot in + Property (loc, { prop with annot }) + in + match elem with + | PrivateField (location, _) when this#is_target location -> + raise + @@ expected + @@ UnsupportedAnnotation { location; error_message = "Private field" } + | Property (loc, ({ annot; _ } as prop)) when this#is_target loc -> + update_property loc prop annot + | Property + ( loc, + ( { + key = Literal (kloc, _) | Identifier (kloc, _) | PrivateName (kloc, _); + annot; + _; + } as prop ) ) + when this#is_target kloc || (target_is_point && this#target_contained_by kloc) -> + if strict then + raise + @@ expected + @@ UnsupportedAnnotation + { location = kloc; error_message = "property key in strict mode" } + else + update_property loc prop annot + | _ -> super#class_element elem))) + + method! variable_declarator ~kind decl = + Flow_ast.Statement.VariableDeclaration.( + Flow_ast.Statement.VariableDeclaration.Declarator.( + Flow_ast.Pattern.( + Flow_ast.Pattern.Identifier.( + match (kind, decl) with + (* In `const x = exp;` the error appears on exp *) + | (Const, (dloc, ({ id = (iloc, Identifier id); init = Some (type_loc, _) } as decl))) + (* Use is_target of initialization expression location + because const signature verification errors point to expression *) + when this#is_target type_loc -> + let { annot; _ } = id in + let annot = this#update_type_annotation_hint ~type_loc annot in + (dloc, { decl with id = (iloc, Identifier { id with annot }) }) + | _ -> super#variable_declarator ~kind decl)))) + + method! variable_declarator_pattern ~kind node = + Flow_ast.Pattern.( + Flow_ast.Pattern.Identifier.( + Flow_ast.Statement.VariableDeclaration.( + let (loc, patt) = node in + if not (this#target_contained_by loc) then + node + else + match (patt, kind) with + (* In `const x = exp;` for signature varification errors the error appears on the exp portion. + When strict we only look for that error. *) + | (Identifier _, Const) when strict -> super#variable_declarator_pattern ~kind node + | (Identifier ({ name; annot; _ } as id), (Var | Let | Const)) + when target_is_point || this#is_target loc -> + let (type_loc, _) = name in + let annot = this#update_type_annotation_hint ~type_loc annot in + (loc, Identifier { id with annot }) + | _ -> super#variable_declarator_pattern ~kind node))) + + method! expression ((l, _) as e) = + Flow_ast.Expression.( + if this#target_contained_by l then + if this#is_target l then + (l, TypeCast TypeCast.{ expression = e; annot = this#synth_type l }) + else + super#expression e + else + e) + + method! program p = + let p' = super#program p in + if p == p' then raise @@ expected @@ InvalidAnnotationTarget target; + p' + end + +let type_lookup_at_location typed_ast loc = + match Typed_ast_utils.find_exact_match_annotation typed_ast (ALoc.of_loc loc) with + | Some p -> p + | None -> raise @@ unexpected @@ UnknownTypeAtPoint loc + +let normalize ~full_cx ~file_sig ~typed_ast ~expand_aliases ~omit_targ_defaults loc scheme = + Query_types.( + match + insert_type_normalize + ~full_cx + ~file_sig + ~typed_ast + ~expand_aliases + ~omit_targ_defaults + loc + scheme + with + | FailureNoMatch -> raise @@ unexpected @@ FailedToNormalizeNoMatch + | FailureUnparseable (loc, _, msg) -> raise @@ expected @@ FailedToNormalize (loc, msg) + | Success (_, ty) -> ty) + +let type_to_string t = + Js_layout_generator.type_ t + |> Pretty_printer.print ~source_maps:None ~skip_endline:true + |> Source.contents + +let unexpected_error_to_string = function + | UnknownTypeAtPoint _ -> "Couldn't locate a type for this annotation" + | FailedToSerialize { error_message = msg; _ } -> "couldn't print type: " ^ msg + | FailedToNormalizeNoMatch -> "couldn't print type: couldn't locate a type for this annotation" + +let expected_error_to_string = function + | TypeAnnotationAtPoint { location; type_ast } -> + "Preexisiting type annotation at " + ^ Loc.to_string_no_source location + ^ ": " + ^ type_to_string type_ast + | InvalidAnnotationTarget location -> + "Did not find an annotation at " ^ Loc.to_string_no_source location + | UnsupportedAnnotation { location; error_message } -> + error_message ^ " found at " ^ Loc.to_string_no_source location ^ " is not currently supported" + | FailedToTypeCheck _ -> "Failed to typecheck file" + | MulipleTypesPossibleAtPoint { generalized; specialized } -> + "Multiple types possible at point:\n" + ^ " generalized type: " + ^ type_to_string generalized + ^ "\n" + ^ " specialized type: " + ^ type_to_string specialized + ^ "\n" + | FailedToValidateType { error = Utils.TooBig { size_limit; size }; _ } -> + "The type that would be generated (size: " + ^ begin + match size with + | Some size -> string_of_int size + | None -> ">" ^ string_of_int Utils.validate_type_too_big_max + end + ^ ") exceeds the size limit (" + ^ string_of_int size_limit + ^ ")" + | FailedToValidateType { error_message = msg; _ } -> "Failed to validate type: " ^ msg + | FailedToNormalize (_, msg) -> "couldn't print type: " ^ msg + +let error_to_string = function + | Unexpected err -> "flow autofix insert-type: " ^ unexpected_error_to_string err + | Expected err -> "flow autofix insert-type: " ^ expected_error_to_string err + +let insert_type + ~full_cx + ~file_sig + ~typed_ast + ~expand_aliases + ~omit_targ_defaults + ~strict + ~ambiguity_strategy + ast + target = + let file_sig = File_sig.abstractify_locs file_sig in + let ty_lookup = type_lookup_at_location typed_ast in + let normalize = normalize ~full_cx ~file_sig ~typed_ast ~expand_aliases ~omit_targ_defaults in + (new mapper ~normalize ~ty_lookup ~strict ~ambiguity_strategy target)#program ast + +let mk_diff ast new_ast = Flow_ast_differ.(program Standard ast new_ast) + +let mk_patch ast new_ast file_content = + Replacement_printer.mk_patch_ast_differ (mk_diff ast new_ast) ast file_content diff --git a/src/services/type_info/insert_type_utils.ml b/src/services/type_info/insert_type_utils.ml new file mode 100644 index 00000000000..fe7b8401db5 --- /dev/null +++ b/src/services/type_info/insert_type_utils.ml @@ -0,0 +1,286 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type validation_error = + | TooBig of { + size_limit: int; + size: int option; + } + | Anonymous of Loc.t + | Any_Unsound of Ty.unsoundness_kind + | Recursive + | ReactElementConfigFunArg + | Empty_MatchingPropT + | Empty_TypeDestructorTriggerT of Loc.t + | Empty_SomeUnknownUpper of string + +let serialize_validation_error = function + | TooBig _ -> "TooBig" + | Anonymous loc -> Utils_js.spf "Anonymous (def: %s)" (Loc.to_string_no_source loc) + | Any_Unsound kind -> Utils_js.spf "Any_Unsound %s" (Ty_debug.dump_any_unsoundness_kind kind) + | Recursive -> "Recursive" + | ReactElementConfigFunArg -> "ReactElementConfigFunArg" + | Empty_MatchingPropT -> "Empty_MatchingPropT" + | Empty_TypeDestructorTriggerT loc -> + Utils_js.spf "Empty_TypeDestructorTriggerT (def: %s)" (Loc.to_string_no_source loc) + | Empty_SomeUnknownUpper u -> Utils_js.spf "Empty_SomeUnknownUpper (use: %s)" u + +let warn_shadow_prop ?(strip_root = None) name loc = + Hh_logger.warn "ShadowProp %s at %s" name (Reason.string_of_loc ?strip_root loc) + +exception Fatal of validation_error + +(* Raise an validation_error if there isn't a user facing type that is equivalent to the Ty *) +class type_validator_visitor = + object + inherit [_] Ty.endo_ty as super + + method! on_t env t = + match t with + (* Recursive types unsupported *) + | Ty.Mu _ + | Ty.TVar _ -> + env := Recursive :: !env; + Ty.explicit_any + | Ty.Bot (Ty.NoLowerWithUpper (Ty.SomeUnknownUpper u)) -> + env := Empty_SomeUnknownUpper u :: !env; + Ty.explicit_any + | Ty.Bot Ty.EmptyMatchingPropT -> + env := Empty_MatchingPropT :: !env; + Ty.explicit_any + | Ty.Bot (Ty.EmptyTypeDestructorTriggerT loc) -> + env := Empty_TypeDestructorTriggerT (ALoc.to_loc_exn loc) :: !env; + Ty.explicit_any + | Ty.Any + (Ty.Unsound + ( ( Ty.Constructor | Ty.DummyStatic | Ty.Existential | Ty.Exports + | Ty.FunctionPrototype | Ty.InferenceHooks | Ty.InstanceOfRefinement | Ty.Merged + | Ty.ResolveSpread | Ty.Unchecked | Ty.Unimplemented | Ty.UnresolvedType + | Ty.WeakContext ) as kind )) -> + env := Any_Unsound kind :: !env; + Ty.explicit_any + | Ty.Utility (Ty.ReactElementConfigType (Ty.Fun _)) -> + env := ReactElementConfigFunArg :: !env; + Ty.explicit_any + | Ty.Generic (symbol, _, _) + | Ty.ClassDecl (symbol, _) + | Ty.InterfaceDecl (symbol, _) + | Ty.Module (Some symbol, _) -> + let { Ty.anonymous; def_loc; _ } = symbol in + if anonymous then ( + env := Anonymous (ALoc.to_loc_exn def_loc) :: !env; + Ty.explicit_any + ) else + super#on_t env t + | _ -> super#on_t env t + end + +let validate_type_too_big_max = 1000 + +let validate_type ~size_limit t = + match Ty_utils.size_of_type ~max:size_limit t with + | None -> + let max = validate_type_too_big_max in + let error = TooBig { size_limit; size = Ty_utils.size_of_type ~max t } in + (t, [error]) + | Some _ -> + let env = ref [] in + let t = (new type_validator_visitor)#on_t env t in + (t, !env) + +(** Add named type parameter to ensure a Flow_ast.Type can be parsed after being + * pretty printed. + * + * This was originally in annotate exports and may not be necissary now if this + * issue with the pretty-printer/parser has been fixed. + *) + +(** WARNING! Hard-coded fixes ahead! + * + * The level of Flow_ast.Type.t nodes: These involve fixes without which the + * generated types might be unparseable. + * + * This fix may be possible to avoid if we fix either the pretty printer + * or the parser. Not sure which would actually need to be changed. + *) +class mapper_type_printing_hardcoded_fixes = + object (this) + inherit [Loc.t] Flow_ast_mapper.mapper as super + + method private normalize_function ff = + Flow_ast.Type.Function.( + let { params = (loc, { Params.params; rest }); _ } = ff in + let (normalized_params_rev, _) = + List.fold_left + (fun (p, c) param -> + match param with + | (loc, { Param.name = None; annot; optional }) -> + let normalized_param = + ( loc, + { + Param.name = + Some (Flow_ast_utils.ident_of_source (loc, Printf.sprintf "_%d" c)); + annot; + optional; + } ) + in + (normalized_param :: p, c + 1) + | _ -> (param :: p, c + 1)) + ([], 0) + params + in + let normalized_params = List.rev normalized_params_rev in + { ff with params = (loc, { Params.params = normalized_params; rest }) }) + + method private type_generic_normalize (t : ('loc, 'loc) Flow_ast.Type.t) = + super#type_ + (match t with + | (loc_f, Flow_ast.Type.Function ff) -> + let nf = this#normalize_function ff in + (loc_f, Flow_ast.Type.Function nf) + | _ -> t) + + method! type_parameter_instantiation (pi : ('loc, 'loc) Flow_ast.Type.ParameterInstantiation.t) + = + let (loc, targs) = pi in + let targs' = Core_list.map ~f:this#type_generic_normalize targs in + if targs' == targs then + pi + else + (loc, targs') + end + +let patch_up_type_ast = (new mapper_type_printing_hardcoded_fixes)#type_ + +(* returns true if a file_key is a libfile of name react.js *) +let is_react_file_key = function + | File_key.LibFile x -> Filename.basename x = "react.js" + | _ -> false + +(* returns true if a location has a source of react.js *) +let is_react_loc loc = + match ALoc.source loc with + | Some f -> is_react_file_key f + | _ -> false + +(* Apply stylistic changes to react types *) +class patch_up_react_mapper ?(imports_react = false) () = + object (this) + inherit [_] Ty.endo_ty as super + + method! on_t loc t = + match t with + (* If 'react' is not imported, then we treat the symbol as Remote, so that + * it is imported with the same mechanism we import other Remote symbols. + * Otherwise, we refer to these names as 'React.NAME'. *) + | Ty.Generic + ( ( { + Ty.name = + ( "AbstractComponent" | "ChildrenArray" | "ComponentType" | "Config" | "Context" + | "Element" | "ElementConfig" | "ElementProps" | "ElementRef" | "ElementType" + | "Key" | "Node" | "Portal" | "Ref" | "StatelessFunctionalComponent" ) as name; + provenance = Ty_symbol.Library; + def_loc; + _; + } as symbol ), + kind, + args_opt ) + when is_react_loc def_loc -> + let args_opt = Flow_ast_mapper.map_opt (ListUtils.ident_map (this#on_t loc)) args_opt in + let symbol = + if imports_react then + { symbol with Ty.name = "React." ^ name } + else + { symbol with Ty.provenance = Ty.Remote { Ty.imported_as = None } } + in + Ty.Generic (symbol, kind, args_opt) + | _ -> super#on_t loc t + + method! on_prop loc prop = + let prop = + match prop with + | Ty.NamedProp (name, named_prop) when Reason.is_internal_name name -> + warn_shadow_prop name loc; + + (* Shadow props appear as regular props *) + let name = String.sub name 1 (String.length name - 1) in + Ty.NamedProp (name, named_prop) + | prop -> prop + in + super#on_prop loc prop + end + +let reverse_append_all : 'a list list -> 'a list = List.fold_left List.rev_append [] + +type partition_acc = { + bools: Ty.t list; + nums: Ty.t list; + strings: Ty.t list; + others: Ty.t list; +} + +class stylize_ty_mapper ?(imports_react = false) () = + object + inherit patch_up_react_mapper ~imports_react () as super + + (* remove literals when the base type is in the union, and simplify true | false to bool *) + (* These simplifications should always be sound *) + method! on_Union loc t _ _ _ = + Ty.( + let filter_union (a : partition_acc) t = + match (t, a) with + (* If element of a base type and the base type is already present in the union + * ignore the element *) + | ((Bool None | BoolLit _), { bools = [Bool None]; _ }) + | ((Num None | NumLit _), { nums = [Num None]; _ }) + | ((Str None | StrLit _), { strings = [Str None]; _ }) -> + a + (* Otherwise, if we see the base element automatically discard all other elements *) + | (Bool None, _) -> { a with bools = [t] } + | (Num None, _) -> { a with nums = [t] } + | (Str None, _) -> { a with strings = [t] } + (* Otherwise, if it is bool check to see if we have enumerated both element *) + | (BoolLit true, { bools = [BoolLit false]; _ }) + | (BoolLit false, { bools = [BoolLit true]; _ }) -> + { a with bools = [Bool None] } + (* Otherwise, add literal types to the union *) + | (BoolLit _, { bools; _ }) -> { a with bools = t :: bools } + | (NumLit _, { nums; _ }) -> { a with nums = t :: nums } + | (StrLit _, { strings; _ }) -> { a with strings = t :: strings } + (* Note, any temporary base types get passed through with others *) + | (t, { others; _ }) -> { a with others = t :: others } + in + let empty = { bools = []; nums = []; strings = []; others = [] } in + let { bools; nums; strings; others } = Nel.fold_left filter_union empty (bk_union t) in + match reverse_append_all [others; strings; nums; bools] with + | [] -> failwith "Impossible! this only removes elements when others are added/present" + | [t] -> super#on_t loc t + | t1 :: t2 :: ts -> super#on_Union loc (Union (t1, t2, ts)) t1 t2 ts) + end + +(* Returns true if the location given a zero width location. *) +let is_point loc = Loc.(loc.start = loc._end) + +let temporary_objectlit_symbol = + { + Ty.provenance = Ty.Builtin; + def_loc = ALoc.none; + name = "$TEMPORARY$object"; + anonymous = false; + } + +let temporary_arraylit_symbol = + { Ty.provenance = Ty.Builtin; def_loc = ALoc.none; name = "$TEMPORARY$array"; anonymous = false } + +module Builtins = struct + let flowfixme = Ty.Generic (Ty_symbol.builtin_symbol "$FlowFixMe", Ty.TypeAliasKind, None) + + let flowfixme_empty = + Ty.Generic (Ty_symbol.builtin_symbol "$FlowFixMeEmpty", Ty.TypeAliasKind, None) + + let empty = Ty.Bot Ty.EmptyType +end diff --git a/src/services/type_info/suggest.ml b/src/services/type_info/suggest.ml index e3e91726af8..fced6e3b89f 100644 --- a/src/services/type_info/suggest.ml +++ b/src/services/type_info/suggest.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,146 +9,126 @@ module Ast = Flow_ast type warning = | MissingFromTypeTables - | NormalizerError of Ty_normalizer.error + | NormalizerError of string | NonFunctionType of string | Serializer of string | SkipEmpty let warning_desc_to_string = function - | MissingFromTypeTables -> - Utils_js.spf "Location was not found in type tables." - | NormalizerError err -> - Utils_js.spf "Normalizer error:\n%s" (Ty_normalizer.error_to_string err) - | NonFunctionType ty_str -> - Utils_js.spf "Expected function type but got: %s" ty_str - | Serializer err_msg -> - Utils_js.spf "Type serializer failed with:\n%s" err_msg - | SkipEmpty -> - Utils_js.spf "Inferred type is empty." - -class visitor ~cxs = object(this) - inherit [unit] Flow_ast_visitor.visitor ~init:() as super - - val mutable _warnings = Errors.ErrorSet.empty - - method private warn loc (w: warning) = - let open Errors in - let desc = warning_desc_to_string w in - _warnings <- ErrorSet.add (mk_error (loc |> ALoc.of_loc) (Friendly.message_of_string desc)) _warnings; - None - - method warnings () = _warnings - - method private inferred_type ~search ~index loc = - let search_loc = search loc in - match Utils_js.LocMap.get search_loc cxs with - | Some (Ok ty) -> ( - match index ty with - | Ok Ty.Bot -> - this#warn loc SkipEmpty - | Ok ty -> ( - match Ty_serializer.type_ ty with - | Ok type_ast -> - Some (Loc.none, type_ast) - | Error desc -> this#warn loc (Serializer desc) - ) - | Error err -> this#warn loc err - ) - | Some (Error err) -> this#warn loc (NormalizerError err) - | None -> this#warn loc MissingFromTypeTables - - method! expression (expr: (Loc.t, Loc.t) Ast.Expression.t) = - let open Ast.Expression in - match super#expression expr with - | loc, Function x -> - Flow_ast_mapper.id (this#function_return loc) x expr (fun x -> loc, Function x) - | loc, ArrowFunction x -> - Flow_ast_mapper.id (this#arrow_return loc) x expr (fun x -> loc, ArrowFunction x) - | expr -> expr - - method! statement (stmt: (Loc.t, Loc.t) Ast.Statement.t) = - let open Ast.Statement in - match super#statement stmt with - | (loc, FunctionDeclaration x) -> - Flow_ast_mapper.id (this#function_return loc) x stmt (fun x -> loc, FunctionDeclaration x) - | stmt -> stmt - - method! object_property (prop: (Loc.t, Loc.t) Ast.Expression.Object.Property.t) = - let open Ast.Expression.Object.Property in - let prop = super#object_property prop in - match prop with - | loc, Method ({ value = (fn_loc, fn); _ } as meth) -> - (* NOTE here we are indexing the type tables through the location of + | MissingFromTypeTables -> Utils_js.spf "Location was not found in type tables." + | NormalizerError err -> Utils_js.spf "Normalizer error:\n%s" err + | NonFunctionType ty_str -> Utils_js.spf "Expected function type but got: %s" ty_str + | Serializer err_msg -> Utils_js.spf "Type serializer failed with:\n%s" err_msg + | SkipEmpty -> Utils_js.spf "Inferred type is empty." + +class visitor ~ty_query = + object (this) + inherit [unit, Loc.t] Flow_ast_visitor.visitor ~init:() as super + + val mutable _warnings = Errors.ConcreteLocPrintableErrorSet.empty + + method private warn loc (w : warning) = + Errors.( + let desc = warning_desc_to_string w in + let err = mk_error loc (Friendly.message_of_string desc) in + _warnings <- ConcreteLocPrintableErrorSet.add err _warnings; + None) + + method warnings () = _warnings + + method private inferred_type ?blame_loc ?annotate_bottom:(ann_bot = false) loc = + let blame_loc = + match blame_loc with + | Some bloc -> bloc + | None -> loc + in + match ty_query loc with + | Query_types.Success (_, ty) -> + begin + match ty with + | Ty.Bot _ when not ann_bot -> this#warn blame_loc SkipEmpty + | _ -> + begin + match Ty_serializer.type_ ty with + | Ok type_ast -> Some (Loc.none, type_ast) + | Error desc -> this#warn blame_loc (Serializer desc) + end + end + | Query_types.FailureUnparseable (_, _, msg) -> this#warn blame_loc (NormalizerError msg) + | Query_types.FailureNoMatch -> this#warn blame_loc MissingFromTypeTables + + method! expression (expr : (Loc.t, Loc.t) Ast.Expression.t) = + Ast.Expression.( + let expr' = super#expression expr in + match expr' with + | (loc, Function x) -> + Flow_ast_mapper.id (this#callable_return loc) x expr' (fun x -> (loc, Function x)) + | (loc, ArrowFunction x) -> + Flow_ast_mapper.id (this#callable_return loc) x expr' (fun x -> (loc, ArrowFunction x)) + | _ -> expr') + + method! statement (stmt : (Loc.t, Loc.t) Ast.Statement.t) = + Ast.Statement.( + let stmt' = super#statement stmt in + match stmt' with + | (loc, FunctionDeclaration x) -> + Flow_ast_mapper.id (this#callable_return loc) x stmt' (fun x -> + (loc, FunctionDeclaration x)) + | _ -> stmt') + + method! object_property (prop : (Loc.t, Loc.t) Ast.Expression.Object.Property.t) = + Ast.Expression.Object.Property.( + let prop' = super#object_property prop in + match prop' with + | (loc, Method { value = (fn_loc, fn); key }) -> + (* NOTE here we are indexing the type tables through the location of the entire method. The coverage tables should account for that. Alternatively, we could have used the location of the identifier, that gets logged in the type_info tables. (This would require some deeper unfolding.) For the moment we need both tables, but revisit this if this changes. *) - let fn' = this#method_return fn_loc fn in - if fn == fn' then prop - else (loc, Method { meth with value = (fn_loc, fn') }) - | _ -> prop - - method! class_method loc (meth: (Loc.t, Loc.t) Ast.Class.Method.t') = - let open Ast.Class.Method in - let open Ast.Expression.Object.Property in - let meth = super#class_method loc meth in - let { key; value = (loc, func); _ } = meth in - match key with - | Identifier (id_loc, _) -> - let func' = this#method_return id_loc func in - { meth with value = (loc, func') } - | _ -> meth - - method! function_param_pattern (expr: (Loc.t, Loc.t) Ast.Pattern.t) = - let open Ast.Pattern in - let (loc, patt) = expr in - let patt' = match patt with - | Identifier { Identifier.name; annot; optional } -> ( - match annot with - | None -> - let annot = this#inferred_type ~search:(fun x -> x) - ~index:(fun x -> Ok x) loc in - Identifier { Identifier.name; annot; optional } - | Some _ -> patt - ) - | _ -> - let _, patt' = super#function_param_pattern expr in - patt' - in - if patt == patt' then expr else (loc, patt') - - method arrow_return loc func = - this#callable_return ~search:(fun x -> x) loc func - - method method_return loc func = - this#callable_return ~search:(fun x -> x) loc func - - (* Constructs that have keyword 'function', but may be missing a name. *) - method function_return loc func = - let open Ast.Function in - let { id; _ } = func in - let search = Type_table.function_decl_loc id in - this#callable_return ~search loc func - - method callable_return ~search loc func = - let open Ast.Function in - let { return; _ } = func in - let return' = - match return with - | Available _ -> return - | Missing _ -> - let index = Ty.(function - | Fun { fun_return; _ } -> Ok fun_return - | ty -> Error (NonFunctionType (Ty_printer.string_of_t ty)) - ) in - match this#inferred_type ~search ~index loc with - | Some annot -> Available annot - | None -> Missing loc - in - if return' == return - then func - else { func with return = return' } - -end + let key' = this#object_key key in + let fn' = this#callable_return fn_loc fn in + if key == key' && fn == fn' then + prop' + else + (loc, Method { key = key'; value = (fn_loc, fn') }) + | _ -> prop') + + method! class_method loc (meth : (Loc.t, Loc.t) Ast.Class.Method.t') = + Ast.Class.Method.( + Ast.Expression.Object.Property.( + let meth' = super#class_method loc meth in + let { key; value = (loc, func); _ } = meth' in + match key with + | Identifier (id_loc, _) -> + let func' = this#callable_return id_loc func in + { meth' with value = (loc, func') } + | _ -> meth')) + + method! function_param_pattern (patt : (Loc.t, Loc.t) Ast.Pattern.t) = + Ast.Pattern.( + Identifier.( + let patt' = super#function_param_pattern patt in + match patt' with + | (loc, Identifier ({ annot = Ast.Type.Missing _; _ } as id)) -> + begin + match this#inferred_type loc with + | Some annot -> (loc, Identifier { id with annot = Ast.Type.Available annot }) + | None -> patt' + end + | _ -> patt')) + + method callable_return loc func = + Ast.Function.( + let { return; _ } = func in + match return with + | Ast.Type.Available _ -> func + | Ast.Type.Missing missing_loc -> + begin + match this#inferred_type ~blame_loc:loc ~annotate_bottom:true missing_loc with + | Some annot -> { func with return = Ast.Type.Available annot } + | None -> func + end) + end diff --git a/src/services/type_info/type_info_service.ml b/src/services/type_info/type_info_service.ml index 33beb8369fc..bb190049932 100644 --- a/src/services/type_info/type_info_service.ml +++ b/src/services/type_info/type_info_service.ml @@ -1,73 +1,175 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open Core_result -let (>|=) = Lwt.(>|=) -let type_at_pos ~options ~workers ~env ~profiling ~expand_aliases file content line col = - Types_js.basic_check_contents ~options ~workers ~env ~profiling content file >|= - function +let ( >|= ) = Lwt.( >|= ) + +let type_at_pos ~options ~env ~profiling ~expand_aliases ~omit_targ_defaults file content line col + = + Types_js.basic_check_contents ~options ~env ~profiling content file + >|= function | Error str -> Error (str, None) | Ok (cx, _info, file_sig, typed_ast) -> let loc = Loc.make file line col in - let json_data, loc, ty = - let mk_data result_str loc ty_json = Hh_json.JSON_Object [ - "result", Hh_json.JSON_String result_str; - "loc", Reason.json_of_loc loc; - "type", ty_json; - ] in + let (json_data, loc, ty) = + let mk_data result_str loc ty_json = + Hh_json.JSON_Object + [ + ("result", Hh_json.JSON_String result_str); + ("loc", Reason.json_of_loc ~offset_table:None loc); + ("type", ty_json); + ] + in Query_types.( - let type_table = Context.type_table cx in let file = Context.file cx in - (* passing in type_table only because it seems necessary for constructing genv *) - let result = type_at_pos_type ~full_cx:cx ~file ~file_sig ~expand_aliases - ~type_table ~typed_ast loc in + let result = + type_at_pos_type + ~full_cx:cx + ~file + ~file_sig:(File_sig.abstractify_locs file_sig) + ~expand_aliases + ~omit_targ_defaults + ~typed_ast + loc + in match result with | FailureNoMatch -> - Hh_json.JSON_Object [ - "result", Hh_json.JSON_String "FAILURE_NO_MATCH" - ], Loc.none, None + (Hh_json.JSON_Object [("result", Hh_json.JSON_String "FAILURE_NO_MATCH")], Loc.none, None) | FailureUnparseable (loc, gt, _) -> let json = Hh_json.JSON_String (Type.string_of_ctor gt) in - mk_data "FAILURE_UNPARSEABLE" loc json, loc, None + (mk_data "FAILURE_UNPARSEABLE" loc json, loc, None) | Success (loc, ty) -> (* TODO use Ty_debug.json_of_t after making it faster using count_calls *) let json = Hh_json.JSON_String (Ty_printer.string_of_t ty) in - mk_data "SUCCESS" loc json, loc, Some ty - ) in - + (mk_data "SUCCESS" loc json, loc, Some ty)) + in Ok ((loc, ty), Some json_data) -let dump_types ~options ~workers ~env ~profiling file content = +let insert_type + ~options + ~env + ~profiling + ~file_key + ~file_content + ~target + ~expand_aliases + ~omit_targ_defaults + ~location_is_strict:strict + ~ambiguity_strategy = + Insert_type.( + Types_js.typecheck_contents ~options ~env ~profiling file_content file_key + >|= function + | (Some (full_cx, ast, file_sig, typed_ast), _, _) -> + begin + try + let new_ast = + Insert_type.insert_type + ~full_cx + ~file_sig + ~typed_ast + ~expand_aliases + ~omit_targ_defaults + ~strict + ~ambiguity_strategy + ast + target + in + Ok (mk_patch ast new_ast file_content) + with FailedToInsertType err -> Error (error_to_string err) + end + | (None, errs, _) -> Error (error_to_string (Expected (FailedToTypeCheck errs)))) + +let autofix_exports ~options ~env ~profiling ~file_key ~file_content = + Autofix_exports.( + Types_js.typecheck_contents ~options ~env ~profiling file_content file_key + >|= function + | (Some (full_cx, ast, file_sig, typed_ast), _, _) -> + let sv_errors = set_of_fixable_signature_verification_locations file_sig in + let fix_sv_errors = fix_signature_verification_errors ~full_cx ~file_sig ~typed_ast in + let (new_ast, it_errs) = fix_sv_errors ast sv_errors in + Ok (Insert_type.mk_patch ast new_ast file_content, it_errs) + | (None, _errs, _) -> Error ":o") + +let dump_types ~options ~env ~profiling file content = (* Print type using Flow type syntax *) let printer = Ty_printer.string_of_t in - Types_js.basic_check_contents ~options ~workers ~env ~profiling content file >|= - map ~f:(fun (cx, _info, file_sig, _) -> Query_types.dump_types cx file_sig ~printer) - + Types_js.basic_check_contents ~options ~env ~profiling content file + >|= map ~f:(fun (cx, _info, file_sig, tast) -> + let abs_file_sig = File_sig.abstractify_locs file_sig in + Query_types.dump_types ~printer cx abs_file_sig tast) -let coverage ~options ~workers ~env ~profiling ~force file content = +let coverage ~options ~env ~profiling ~force ~trust file content = let should_check = - if force then true else - let (_, docblock) = - Parsing_service_js.(parse_docblock docblock_max_tokens file content) in + if force then + true + else + let (_, docblock) = Parsing_service_js.(parse_docblock docblock_max_tokens file content) in Docblock.is_flow docblock in - Types_js.basic_check_contents ~options ~workers ~env ~profiling content file >|= - map ~f:(fun (cx, _, file_sig, _) -> Query_types.covered_types cx file_sig ~should_check) + Types_js.basic_check_contents ~options ~env ~profiling content file + >|= map ~f:(fun (cx, _, _, tast) -> + Query_types.covered_types cx ~should_check ~check_trust:trust tast) -let suggest ~options ~workers ~env ~profiling file content = - Types_js.typecheck_contents ~options ~workers ~env ~profiling content file >|= - function - | (Some (cx, ast, file_sig, _), tc_errors, tc_warnings) -> - let cxs = Query_types.suggest_types cx file_sig in - let visitor = new Suggest.visitor ~cxs in - let typed_ast = visitor#program ast in +let suggest ~options ~env ~profiling file_name file_content = + let file_key = File_key.SourceFile file_name in + Types_js.typecheck_contents ~options ~env ~profiling file_content file_key + >|= function + | (Some (cx, ast, file_sig, tast), tc_errors, tc_warnings) -> + let file_sig = File_sig.abstractify_locs file_sig in + let ty_query = Query_types.suggest_types cx file_sig tast in + let visitor = new Suggest.visitor ~ty_query in + let ast_with_suggestions = visitor#program ast in let suggest_warnings = visitor#warnings () in - Ok (tc_errors, tc_warnings, suggest_warnings, typed_ast) - | (None, errors, _) -> - Error errors + let ast_diff = Flow_ast_differ.(program Standard ast ast_with_suggestions) in + let file_patch = + Replacement_printer.mk_patch_ast_differ ast_diff ast_with_suggestions file_content + in + Ok (tc_errors, tc_warnings, suggest_warnings, file_patch) + | (None, errors, _) -> Error errors + +let code_actions_at_loc ~options ~env ~profiling ~params ~file_key ~file_contents ~loc = + Lsp.( + CodeAction.( + CodeActionRequest.( + CodeActionKind.( + let { textDocument; range = _; context } = params in + let uri = TextDocumentIdentifier.(textDocument.uri) in + Types_js.typecheck_contents ~options ~env ~profiling file_contents file_key + >|= function + | (Some (full_cx, ast, file_sig, typed_ast), _, _) -> + Autofix_exports.( + let fixable_locs = set_of_fixable_signature_verification_locations file_sig in + if + contains_kind_opt ~default:true quickfix context.only + && LocSet.mem loc fixable_locs + then + match + fix_signature_verification_error_at_loc ~full_cx ~file_sig ~typed_ast ast loc + with + | new_ast -> + let diff = Insert_type.mk_diff ast new_ast in + let edits = + Replacement_printer.mk_loc_patch_ast_differ diff ast + |> Flow_lsp_conversions.flow_loc_patch_to_lsp_edits + in + Ok + [ + Action + { + CodeAction.title = "insert type annotation"; + kind = quickfix; + (* Handing back the diagnostics we were given is a placeholder for + eventually generating the diagnostics for the errors we are fixing *) + diagnostics = CodeActionRequest.(context.diagnostics); + action = EditOnly WorkspaceEdit.{ changes = SMap.of_list [(uri, edits)] }; + }; + ] + else + Ok []) + | _ -> Ok [])))) diff --git a/src/services/type_info/type_info_service.mli b/src/services/type_info/type_info_service.mli index ea7b10bb3d9..57ef831d516 100644 --- a/src/services/type_info/type_info_service.mli +++ b/src/services/type_info/type_info_service.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,21 +7,19 @@ val type_at_pos : options:Options.t -> - workers:MultiWorkerLwt.worker list option -> - env:ServerEnv.env ref -> + env:ServerEnv.env -> profiling:Profiling_js.running -> expand_aliases:bool -> + omit_targ_defaults:bool -> File_key.t -> string -> int -> int -> - ((Loc.t * Ty.t option) * Hh_json.json option, - string * Hh_json.json option) Core_result.t Lwt.t + ((Loc.t * Ty.t option) * Hh_json.json option, string * Hh_json.json option) Core_result.t Lwt.t val dump_types : options:Options.t -> - workers:MultiWorkerLwt.worker list option -> - env:ServerEnv.env ref -> + env:ServerEnv.env -> profiling:Profiling_js.running -> File_key.t -> string -> @@ -29,23 +27,59 @@ val dump_types : val coverage : options:Options.t -> - workers:MultiWorkerLwt.worker list option -> - env:ServerEnv.env ref -> + env:ServerEnv.env -> profiling:Profiling_js.running -> force:bool -> + trust:bool -> File_key.t -> - string -> ((Loc.t * bool) list, string) Core_result.t Lwt.t + string -> + ((Loc.t * Coverage_response.expression_coverage) list, string) Core_result.t Lwt.t val suggest : options:Options.t -> - workers:MultiWorkerLwt.worker list option -> - env:ServerEnv.env ref -> + env:ServerEnv.env -> profiling:Profiling_js.running -> - File_key.t -> string -> - ((Errors.ErrorSet.t * (* Typechecking errors *) - Errors.ErrorSet.t * (* Typechecking warnings *) - Errors.ErrorSet.t * (* Suggest-related warnings (normalization etc.) *) - (Loc.t, Loc.t) Flow_ast.program), (* Annotated program *) - Errors.ErrorSet.t (* Parsing errors *) - ) Core_result.t Lwt.t + string -> + ( Errors.ConcreteLocPrintableErrorSet.t + * (* Typechecking errors *) + Errors.ConcreteLocPrintableErrorSet.t + * (* Typechecking warnings *) + Errors.ConcreteLocPrintableErrorSet.t + * (* Suggest-related warnings (normalization etc.) *) + Replacement_printer.patch, + (* Annotated program *) + Errors.ConcreteLocPrintableErrorSet.t (* Parsing errors *) ) + Core_result.t + Lwt.t + +val insert_type : + options:Options.t -> + env:ServerEnv.env -> + profiling:Profiling_js.running -> + file_key:File_key.t -> + file_content:string -> + target:Loc.t -> + expand_aliases:bool -> + omit_targ_defaults:bool -> + location_is_strict:bool -> + ambiguity_strategy:Autofix_options.ambiguity_strategy -> + (Replacement_printer.patch, string) Core_result.t Lwt.t + +val autofix_exports : + options:Options.t -> + env:ServerEnv.env -> + profiling:Profiling_js.running -> + file_key:File_key.t -> + file_content:string -> + (Replacement_printer.patch * string list, string) Core_result.t Lwt.t + +val code_actions_at_loc : + options:Options.t -> + env:ServerEnv.env -> + profiling:Profiling_js.running -> + params:Lsp.CodeActionRequest.params -> + file_key:File_key.t -> + file_contents:string -> + loc:Loc.t -> + (Lsp.CodeAction.command_or_action list, string) Core_result.t Lwt.t diff --git a/src/state/heaps/context/context_heaps.ml b/src/state/heaps/context/context_heaps.ml index 3f15e53c67d..11b248008b5 100644 --- a/src/state/heaps/context/context_heaps.ml +++ b/src/state/heaps/context/context_heaps.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,14 +9,17 @@ open Utils_js (****************** shared context heap *********************) -module SigContextHeap = SharedMem_js.WithCache (File_key) (struct - type t = Context.sig_t - let prefix = Prefix.make() - let description = "SigContext" - let use_sqlite_fallback () = false -end) +module SigContextHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = Context.sig_t -let master_sig: Context.sig_t option option ref = ref None + let prefix = Prefix.make () + + let description = "SigContext" + end) + +let master_sig : Context.sig_t option option ref = ref None let add_sig_context = Expensive.wrap SigContextHeap.add @@ -25,48 +28,25 @@ let add_sig ~audit cx = if cx_file = File_key.Builtins then master_sig := None; add_sig_context ~audit cx_file (Context.sig_cx cx) -let find_sig file = - let cx_opt = - if file = File_key.Builtins then - match !master_sig with - | Some cx_opt -> cx_opt - | None -> - let cx_opt = SigContextHeap.get file in - master_sig := Some cx_opt; - cx_opt - else SigContextHeap.get file - in - match cx_opt with - | Some cx -> cx - | None -> raise (Key_not_found ("SigContextHeap", File_key.to_string file)) +module SigHashHeap = + SharedMem_js.NoCache (SharedMem_js.Immediate) (File_key) + (struct + type t = Xx.hash + + let prefix = Prefix.make () + + let description = "SigHash" + end) + +module LeaderHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = File_key.t + + let prefix = Prefix.make () -module SigHashHeap = SharedMem_js.NoCache (File_key) (struct - type t = Xx.hash - let prefix = Prefix.make() - let description = "SigHash" - let use_sqlite_fallback () = false -end) - -module LeaderHeap = SharedMem_js.WithCache (File_key) (struct - type t = File_key.t - let prefix = Prefix.make() - let description = "Leader" - let use_sqlite_fallback () = false -end) - -let find_leader file = - match LeaderHeap.get file with - | Some leader -> leader - | None -> raise (Key_not_found ("LeaderHeap", (File_key.to_string file))) - -let sig_hash_changed f = - match SigHashHeap.get f with - | None -> false - | Some xx -> - match SigHashHeap.get_old f with - | None -> true - | Some xx_old -> - File_key.check_suffix f Files.flow_ext || xx <> xx_old + let description = "Leader" + end) let oldify_merge_batch files = LeaderHeap.oldify_batch files; @@ -84,48 +64,58 @@ let revive_merge_batch files = SigContextHeap.revive_batch files; SigHashHeap.revive_batch files -module Init_master_context_mutator: sig - val add_master_sig: (Context.t -> unit) Expensive.t +module Init_master_context_mutator : sig + val add_master_sig : (Context.t -> unit) Expensive.t end = struct - let add_master_sig ~audit cx = - add_sig ~audit cx + let add_master_sig ~audit cx = add_sig ~audit cx end -module Merge_context_mutator: sig +let currently_oldified_files : FilenameSet.t ref option ref = ref None + +module Merge_context_mutator : sig type master_mutator + type worker_mutator - val create: Transaction.t -> Utils_js.FilenameSet.t -> master_mutator * worker_mutator - val add_merge_on_diff: + + val create : Transaction.t -> Utils_js.FilenameSet.t -> master_mutator * worker_mutator + + val add_merge_on_diff : (worker_mutator -> Context.t -> File_key.t Nel.t -> Xx.hash -> unit) Expensive.t - val add_merge_on_exn: + + val add_merge_on_exn : (worker_mutator -> options:Options.t -> File_key.t Nel.t -> unit) Expensive.t - val revive_files: master_mutator -> Utils_js.FilenameSet.t -> unit + + val revive_files : master_mutator -> Utils_js.FilenameSet.t -> unit + + val unrevived_files : master_mutator -> Utils_js.FilenameSet.t end = struct type master_mutator = Utils_js.FilenameSet.t ref + type worker_mutator = unit let commit oldified_files = Hh_logger.debug "Committing context heaps"; remove_old_merge_batch oldified_files; + currently_oldified_files := None; Lwt.return_unit let rollback oldified_files = Hh_logger.debug "Rolling back context heaps"; revive_merge_batch oldified_files; + currently_oldified_files := None; Lwt.return_unit let create transaction files = let master_mutator = ref files in let worker_mutator = () in + currently_oldified_files := Some master_mutator; - let commit () = commit (!master_mutator) in - let rollback () = rollback (!master_mutator) in - + let commit () = commit !master_mutator in + let rollback () = rollback !master_mutator in oldify_merge_batch files; Transaction.add ~singleton:"Merge_context" ~commit ~rollback transaction; - master_mutator, worker_mutator - + (master_mutator, worker_mutator) (* While merging, we must keep LeaderHeap, SigContextHeap, and SigHashHeap in sync, sometimes creating new entries and sometimes reusing old entries. *) @@ -136,16 +126,17 @@ end = struct let leader_f = Context.file leader_cx in (* Ideally we'd assert that leader_f is a member of the oldified files, but it's a little too * expensive to send the set of oldified files to the worker *) - let diff = match SigHashHeap.get_old leader_f with - | None -> true - | Some xx_old -> - File_key.check_suffix leader_f Files.flow_ext || xx <> xx_old + let diff = + match SigHashHeap.get_old leader_f with + | None -> true + | Some xx_old -> xx <> xx_old in if diff then ( - Nel.iter (fun f -> - (* Ideally we'd assert that f is a member of the oldified files too *) - LeaderHeap.add f leader_f - ) component_files; + Nel.iter + (fun f -> + (* Ideally we'd assert that f is a member of the oldified files too *) + LeaderHeap.add f leader_f) + component_files; add_sig_context ~audit leader_f (Context.sig_cx leader_cx); SigHashHeap.add leader_f xx ) @@ -157,24 +148,132 @@ end = struct let sig_cx = Context.make_sig () in let cx = let metadata = Context.metadata_of_options options in + (* This context is only used to add *something* to the sighash when we encounter an unexpected + * exception during typechecking. It doesn't really matter what we choose, so we might as well + * make it the empty map. *) + let aloc_tables = FilenameMap.empty in + let rev_table = lazy (ALoc.make_empty_reverse_table ()) in let module_ref = Files.module_ref leader_f in - Context.make sig_cx metadata leader_f module_ref + Context.make sig_cx metadata leader_f aloc_tables rev_table module_ref Context.Merging + in + let module_refs = + Core_list.map + ~f:(fun f -> + let module_ref = Files.module_ref f in + let module_t = Type.AnyT.locationless Type.AnyError in + Context.add_module cx module_ref module_t; + + (* Ideally we'd assert that f is a member of the oldified files too *) + LeaderHeap.add f leader_f; + module_ref) + (Nel.to_list component) in - let module_refs = List.map (fun f -> - let module_ref = Files.module_ref f in - let module_t = Type.Locationless.AnyT.t in - Context.add_module cx module_ref module_t; - (* Ideally we'd assert that f is a member of the oldified files too *) - LeaderHeap.add f leader_f; - module_ref - ) (Nel.to_list component) in let xx = Merge_js.ContextOptimizer.sig_context cx module_refs in add_sig_context ~audit leader_f sig_cx; SigHashHeap.add leader_f xx let revive_files oldified_files files = (* Every file in files should be in the oldified set *) - assert (FilenameSet.is_empty (FilenameSet.diff files (!oldified_files))); - oldified_files := FilenameSet.diff (!oldified_files) files; + assert (FilenameSet.is_empty (FilenameSet.diff files !oldified_files)); + oldified_files := FilenameSet.diff !oldified_files files; revive_merge_batch files + + (* WARNING: Only call this function at the end of merge!!! Calling it during merge will return + meaningless results. + + Initially, `oldified_files` contains the set of files to be merged (see Merge_stream). During + merge, we call `revive_files` for files whose signatures have not changed. So the remaining + `oldified_files` at the end of merge must contain the set of files whose signatures are new or + have changed. In principle, we could maintain this state separately in Merge_stream, but it + seems wasteful to do so. *) + let unrevived_files oldified_files = !oldified_files +end + +module type READER = sig + type reader + + val find_sig : reader:reader -> File_key.t -> Context.sig_t + + val find_leader : reader:reader -> File_key.t -> File_key.t +end + +let find_sig ~get_sig ~reader:_ file = + let cx_opt = + if file = File_key.Builtins then ( + match !master_sig with + | Some cx_opt -> cx_opt + | None -> + let cx_opt = get_sig file in + master_sig := Some cx_opt; + cx_opt + ) else + get_sig file + in + match cx_opt with + | Some cx -> cx + | None -> raise (Key_not_found ("SigContextHeap", File_key.to_string file)) + +module Mutator_reader : sig + include READER with type reader = Mutator_state_reader.t + + val sig_hash_changed : reader:reader -> File_key.t -> bool +end = struct + type reader = Mutator_state_reader.t + + let find_sig = find_sig ~get_sig:SigContextHeap.get + + let find_leader ~reader:_ file = + match LeaderHeap.get file with + | Some leader -> leader + | None -> raise (Key_not_found ("LeaderHeap", File_key.to_string file)) + + let sig_hash_changed ~reader:_ f = + match SigHashHeap.get f with + | None -> false + | Some xx -> + (match SigHashHeap.get_old f with + | None -> true + | Some xx_old -> xx <> xx_old) +end + +module Reader : READER with type reader = State_reader.t = struct + type reader = State_reader.t + + let should_use_oldified file = + match !currently_oldified_files with + | None -> false + | Some oldified_files -> FilenameSet.mem file !oldified_files + + let find_sig ~reader file = + if should_use_oldified file then + find_sig ~get_sig:SigContextHeap.get_old ~reader file + else + find_sig ~get_sig:SigContextHeap.get ~reader file + + let find_leader ~reader:_ file = + let leader = + if should_use_oldified file then + LeaderHeap.get_old file + else + LeaderHeap.get file + in + match leader with + | Some leader -> leader + | None -> raise (Key_not_found ("LeaderHeap", File_key.to_string file)) +end + +module Reader_dispatcher : READER with type reader = Abstract_state_reader.t = struct + type reader = Abstract_state_reader.t + + open Abstract_state_reader + + let find_sig ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.find_sig ~reader + | State_reader reader -> Reader.find_sig ~reader + + let find_leader ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.find_leader ~reader + | State_reader reader -> Reader.find_leader ~reader end diff --git a/src/state/heaps/context/context_heaps.mli b/src/state/heaps/context/context_heaps.mli index 07f762cef80..8eca49f220d 100644 --- a/src/state/heaps/context/context_heaps.mli +++ b/src/state/heaps/context/context_heaps.mli @@ -1,27 +1,46 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val find_sig: File_key.t -> Context.sig_t +module type READER = sig + type reader -val find_leader: File_key.t -> File_key.t + val find_sig : reader:reader -> File_key.t -> Context.sig_t -val sig_hash_changed: File_key.t -> bool + val find_leader : reader:reader -> File_key.t -> File_key.t +end + +module Mutator_reader : sig + include READER with type reader = Mutator_state_reader.t + + val sig_hash_changed : reader:reader -> File_key.t -> bool +end + +module Reader : READER with type reader = State_reader.t + +module Reader_dispatcher : READER with type reader = Abstract_state_reader.t -module Init_master_context_mutator: sig - val add_master_sig: (Context.t -> unit) Expensive.t +module Init_master_context_mutator : sig + val add_master_sig : (Context.t -> unit) Expensive.t end -module Merge_context_mutator: sig +module Merge_context_mutator : sig type master_mutator + type worker_mutator - val create: Transaction.t -> Utils_js.FilenameSet.t -> master_mutator * worker_mutator - val add_merge_on_diff: + + val create : Transaction.t -> Utils_js.FilenameSet.t -> master_mutator * worker_mutator + + val add_merge_on_diff : (worker_mutator -> Context.t -> File_key.t Nel.t -> Xx.hash -> unit) Expensive.t - val add_merge_on_exn: + + val add_merge_on_exn : (worker_mutator -> options:Options.t -> File_key.t Nel.t -> unit) Expensive.t - val revive_files: master_mutator -> Utils_js.FilenameSet.t -> unit + + val revive_files : master_mutator -> Utils_js.FilenameSet.t -> unit + + val unrevived_files : master_mutator -> Utils_js.FilenameSet.t end diff --git a/src/state/heaps/context/dune b/src/state/heaps/context/dune new file mode 100644 index 00000000000..215d0fb4628 --- /dev/null +++ b/src/state/heaps/context/dune @@ -0,0 +1,12 @@ +(library + (name flow_state_heaps_context) + (wrapped false) + (libraries + flow_common_audit + flow_parser + flow_shared_mem + flow_state_readers + flow_typing + heap_shared_mem ; hack + ) +) diff --git a/src/state/heaps/diffing/diff_heaps.ml b/src/state/heaps/diffing/diff_heaps.ml index d29bce41583..64958d761f1 100644 --- a/src/state/heaps/diffing/diff_heaps.ml +++ b/src/state/heaps/diffing/diff_heaps.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -14,16 +14,13 @@ type patch = (int * int * string) list type key = File_key.t module DiffPatchHeap = - SharedMem_js.NoCache - (File_key) + SharedMem_js.NoCache (SharedMem_js.Immediate) (File_key) (struct type t = patch let prefix = Prefix.make () let description = "DiffPatch" - - let use_sqlite_fallback () = false end) let set_diff = Expensive.wrap DiffPatchHeap.add diff --git a/src/state/heaps/diffing/diff_heaps.mli b/src/state/heaps/diffing/diff_heaps.mli index 2d405dfc798..107ad6e6e7e 100644 --- a/src/state/heaps/diffing/diff_heaps.mli +++ b/src/state/heaps/diffing/diff_heaps.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/state/heaps/module/dune b/src/state/heaps/module/dune new file mode 100644 index 00000000000..95d56d2ba0e --- /dev/null +++ b/src/state/heaps/module/dune @@ -0,0 +1,13 @@ +(library + (name flow_state_heaps_module) + (wrapped false) + (libraries + flow_common_audit + flow_common_modulename + flow_parser_utils + flow_procs + flow_shared_mem + flow_state_readers + collections ; hack + ) +) diff --git a/src/state/heaps/module/module_heaps.ml b/src/state/heaps/module/module_heaps.ml index 42be0150d26..4e9fdd19584 100644 --- a/src/state/heaps/module/module_heaps.ml +++ b/src/state/heaps/module/module_heaps.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,164 +8,167 @@ (********************************** Name Heap *********************************) (* Maps module names to the filenames which provide those modules *) -module NameHeap = SharedMem_js.WithCache (Modulename.Key) (struct - type t = File_key.t - let prefix = Prefix.make() - let description = "Name" - let use_sqlite_fallback () = false -end) +module NameHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (Modulename.Key) + (struct + type t = File_key.t -let get_file = Expensive.wrap NameHeap.get -let module_exists = NameHeap.mem + let prefix = Prefix.make () -let get_file_unsafe ~audit m = - match get_file ~audit m with - | Some file -> file - | None -> failwith - (Printf.sprintf "file name not found for module %s" (Modulename.to_string m)) + let description = "Name" + end) (*************************** Resolved Requires Heap ***************************) (* Maps filenames to which other modules they require *) (* Subset of a file's context, with the important distinction that module references in the file have been resolved to module names. *) -(** TODO [perf] Make resolved_requires tighter. For info: - (1) checked? We know that requires and phantom dependents for unchecked - files are empty. - (2) parsed? We only care about the module provided by an unparsed file, but - that's probably guessable. -**) type resolved_requires = { - resolved_modules: Modulename.t SMap.t; (* map from module references in file + resolved_modules: Modulename.t SMap.t; + (* map from module references in file to module names they resolve to *) - phantom_dependents: SSet.t; (* set of paths that were looked up but not found + phantom_dependents: SSet.t; + (* set of paths that were looked up but not found when resolving module references in the file: when the paths come into existence, the module references need to be re-resolved. *) + hash: Xx.hash; (* An easy way to compare two resolved_requires to see if they've changed *) } +(** TODO [perf] Make resolved_requires tighter. For info: + (1) checked? We know that requires and phantom dependents for unchecked + files are empty. + + (2) parsed? We only care about the module provided by an unparsed file, but + that's probably guessable. +**) + +let mk_resolved_requires ~resolved_modules ~phantom_dependents = + let state = Xx.init () in + SMap.iter + (fun reference modulename -> + Xx.update state reference; + Xx.update state (Modulename.to_string modulename)) + resolved_modules; + SSet.iter (Xx.update state) phantom_dependents; + { resolved_modules; phantom_dependents; hash = Xx.digest state } -module ResolvedRequiresHeap = SharedMem_js.WithCache (File_key) (struct - type t = resolved_requires - let prefix = Prefix.make() - let description = "ResolvedRequires" - let use_sqlite_fallback () = false -end) +module ResolvedRequiresHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = resolved_requires -let get_resolved_requires_unsafe = Expensive.wrap (fun f -> - match ResolvedRequiresHeap.get f with - | Some resolved_requires -> resolved_requires - | None -> failwith - (Printf.sprintf "resolved requires not found for file %s" (File_key.to_string f)) -) + let prefix = Prefix.make () + + let description = "ResolvedRequires" + end) (********************************** Info Heap *********************************) (* Maps filenames to info about a module, including the module's name. *) (* note: currently we may have many files for one module name. *) (* this is an issue. *) - type info = { module_name: Modulename.t; - checked: bool; (* in flow? *) + checked: bool; + (* in flow? *) parsed: bool; (* if false, it's a tracking record only *) } -module InfoHeap = SharedMem_js.WithCache (File_key) (struct - type t = info - let prefix = Prefix.make() - let description = "Info" - let use_sqlite_fallback () = false -end) +module InfoHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = info -let get_info = Expensive.wrap InfoHeap.get + let prefix = Prefix.make () -let get_info_unsafe ~audit f = - match get_info ~audit f with - | Some info -> info - | None -> failwith (Printf.sprintf "module info not found for file %s" (File_key.to_string f)) - -let is_tracked_file = InfoHeap.mem + let description = "Info" + end) (******************************** Package Heaps *******************************) (* Maps filenames to info about a module, including the module's name. *) (* note: currently we may have many files for one module name. *) (* this is an issue. *) - (* shared heap for package.json tokens by filename *) -module PackageHeap = SharedMem_js.WithCache (StringKey) (struct - type t = Package_json.t - let prefix = Prefix.make() - let description = "Package" - let use_sqlite_fallback () = false - end) +module PackageHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (StringKey) + (struct + type t = (Package_json.t, unit) result + + let prefix = Prefix.make () + + let description = "Package" + end) (* shared heap for package.json directories by package name *) -module ReversePackageHeap = SharedMem_js.WithCache (StringKey) (struct - type t = string - let prefix = Prefix.make() - let description = "ReversePackage" - let use_sqlite_fallback () = false - end) +module ReversePackageHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (StringKey) + (struct + type t = string -let get_package = PackageHeap.get -let get_package_directory = ReversePackageHeap.get + let prefix = Prefix.make () + + let description = "ReversePackage" + end) (*********************************** Mutators *********************************) -module Commit_modules_mutator: sig +let currently_oldified_nameheap_modulenames : Modulename.Set.t ref option ref = ref None + +module Commit_modules_mutator : sig type t - val create: Transaction.t -> is_init:bool -> t - val remove_and_replace: + + val create : Transaction.t -> is_init:bool -> t + + val remove_and_replace : t -> workers:MultiWorkerLwt.worker list option -> to_remove:Modulename.Set.t -> - to_replace:(Modulename.t * File_key.t) list -> + to_replace:(Modulename.t * File_key.t) list -> unit Lwt.t end = struct - type t' = { + type t = { is_init: bool; - changed_files: Modulename.Set.t; + changed_files: Modulename.Set.t ref; } - type t = t' ref let commit mutator = Hh_logger.debug "Committing NameHeap"; - if not mutator.is_init - then NameHeap.remove_old_batch mutator.changed_files; + if not mutator.is_init then NameHeap.remove_old_batch !(mutator.changed_files); + currently_oldified_nameheap_modulenames := None; Lwt.return_unit let rollback mutator = Hh_logger.debug "Rolling back NameHeap"; - if not mutator.is_init - then NameHeap.revive_batch mutator.changed_files; + if not mutator.is_init then NameHeap.revive_batch !(mutator.changed_files); + currently_oldified_nameheap_modulenames := None; Lwt.return_unit let create transaction ~is_init = - let mutator = ref { changed_files = Modulename.Set.empty; is_init; } in - let commit () = commit (!mutator) in - let rollback () = rollback (!mutator) in + let changed_files = ref Modulename.Set.empty in + currently_oldified_nameheap_modulenames := Some changed_files; + let mutator = { changed_files; is_init } in + let commit () = commit mutator in + let rollback () = rollback mutator in Transaction.add ~singleton:"Commit_modules" ~commit ~rollback transaction; mutator let remove_and_replace mutator ~workers ~to_remove ~to_replace = (* During init we don't need to worry about oldifying, reviving, or removing old entries *) - if not !mutator.is_init - then begin + if not mutator.is_init then ( (* Verify there are no files we're both trying to remove and replace * - Note, to_replace may be a VERY LARGE list so avoid non-tail-recursive calls *) let to_replace_set = List.fold_left (fun set (f, _) -> Modulename.Set.add f set) Modulename.Set.empty to_replace in - (* to_remove_set and to_replace_set should be disjoint sets *) let changed_files = Modulename.Set.union to_remove to_replace_set in - mutator := { !mutator with changed_files; }; + mutator.changed_files := changed_files; (* Save the old data *) - NameHeap.oldify_batch changed_files; - end; + NameHeap.oldify_batch changed_files + ); (* Remove *) NameHeap.remove_batch to_remove; @@ -173,22 +176,27 @@ end = struct (* Replace *) MultiWorkerLwt.call workers - ~job: (fun () to_replace -> List.iter (fun (m, f) -> NameHeap.add m f) to_replace) - ~neutral: () - ~merge: (fun () () -> ()) - ~next: (MultiWorkerLwt.next workers to_replace) + ~job:(fun () to_replace -> List.iter (fun (m, f) -> NameHeap.add m f) to_replace) + ~neutral:() + ~merge:(fun () () -> ()) + ~next:(MultiWorkerLwt.next workers to_replace) end -module Resolved_requires_mutator: sig +let currently_oldified_resolved_requires : Utils_js.FilenameSet.t ref = + ref Utils_js.FilenameSet.empty + +module Resolved_requires_mutator : sig type t - val create: Transaction.t -> Utils_js.FilenameSet.t -> t - val add_resolved_requires: t -> File_key.t -> resolved_requires -> unit + + val create : Transaction.t -> Utils_js.FilenameSet.t -> t + + val add_resolved_requires : t -> File_key.t -> resolved_requires -> bool end = struct type t = unit (* We actually may have multiple Resolved_requires_mutator's in a single transaction. So we need to * assert that they never interfere with each other *) - let active_files = ref Utils_js.FilenameSet.empty + let active_files = currently_oldified_resolved_requires let commit files = Hh_logger.debug "Committing ResolvedRequiresHeap"; @@ -201,8 +209,10 @@ end = struct ResolvedRequiresHeap.revive_batch files let create transaction oldified_files = - if not (Utils_js.FilenameSet.is_empty (Utils_js.FilenameSet.inter oldified_files !active_files)) - then failwith "Multiple Resolved_requires_mutator's operating on the same files"; + if + not (Utils_js.FilenameSet.is_empty (Utils_js.FilenameSet.inter oldified_files !active_files)) + then + failwith "Multiple Resolved_requires_mutator's operating on the same files"; active_files := Utils_js.FilenameSet.union oldified_files !active_files; ResolvedRequiresHeap.oldify_batch oldified_files; @@ -213,29 +223,43 @@ end = struct (* This function runs on a worker process. Ideally, we'd assert that file is a member of * oldified_files, but for init and large rechecks this would involve sending a very large - * set to the workers, which is really slow. *) + * set to the workers, which is really slow. + * + * It returns true if the resolved requires changed and false otherwise *) let add_resolved_requires () file resolved_requires = - ResolvedRequiresHeap.add file resolved_requires + ResolvedRequiresHeap.add file resolved_requires; + + (* Check to see if the resolved requires changed at all with this addition *) + match ResolvedRequiresHeap.get_old file with + | None -> true + | Some old_resolve_requires -> old_resolve_requires.hash <> resolved_requires.hash end +let currently_oldified_infoheap_files : Utils_js.FilenameSet.t option ref = ref None + module Introduce_files_mutator : sig type t - val create: Transaction.t -> Utils_js.FilenameSet.t -> t - val add_info: t -> File_key.t -> info -> unit + + val create : Transaction.t -> Utils_js.FilenameSet.t -> t + + val add_info : t -> File_key.t -> info -> unit end = struct type t = unit let commit oldified_files = Hh_logger.debug "Committing InfoHeap"; InfoHeap.remove_old_batch oldified_files; + currently_oldified_infoheap_files := None; Lwt.return_unit let rollback oldified_files = Hh_logger.debug "Rolling back InfoHeap"; InfoHeap.revive_batch oldified_files; + currently_oldified_infoheap_files := None; Lwt.return_unit let create transaction oldified_files = + currently_oldified_infoheap_files := Some oldified_files; InfoHeap.oldify_batch oldified_files; let commit () = commit oldified_files in let rollback () = rollback oldified_files in @@ -243,23 +267,206 @@ end = struct (* Ideally we'd assert that file is in oldified_files, but passing through the oldified_files set * to the worker process which calls add_info is kind of expensive *) - let add_info () file info = - InfoHeap.add file info + let add_info () file info = InfoHeap.add file info end - (* Flow doesn't support incrementally changing the package heaps, so we don't need to add this to * a transaction *) -module Package_heap_mutator: sig - val add_package_json: string -> Package_json.t -> unit +module Package_heap_mutator : sig + val add_package_json : string -> Package_json.t -> unit + + val add_error : string -> unit end = struct let add_package_json filename package_json = - PackageHeap.add filename package_json; - begin match Package_json.name package_json with - | Some name -> - ReversePackageHeap.add name (Filename.dirname filename) + PackageHeap.add filename (Ok package_json); + match Package_json.name package_json with + | Some name -> ReversePackageHeap.add name (Filename.dirname filename) | None -> () - end + + let add_error filename = PackageHeap.add filename (Error ()) +end + +(*********************************** Readers **********************************) + +module type READER = sig + type reader + + val get_file : reader:reader -> (Modulename.t -> File_key.t option) Expensive.t + + val get_file_unsafe : reader:reader -> (Modulename.t -> File_key.t) Expensive.t + + val module_exists : reader:reader -> Modulename.t -> bool + + val get_resolved_requires_unsafe : reader:reader -> (File_key.t -> resolved_requires) Expensive.t + + (* given a filename, returns module info *) + val get_info_unsafe : reader:reader -> (File_key.t -> info) Expensive.t + + val get_info : reader:reader -> (File_key.t -> info option) Expensive.t + + val is_tracked_file : reader:reader -> File_key.t -> bool + + val get_package : reader:reader -> string -> (Package_json.t, unit) result option + + val get_package_directory : reader:reader -> string -> string option +end + +module Mutator_reader : READER with type reader = Mutator_state_reader.t = struct + type reader = Mutator_state_reader.t + + let get_file ~reader:_ = Expensive.wrap NameHeap.get + + let module_exists ~reader:_ = NameHeap.mem + + let get_file_unsafe ~reader ~audit m = + match get_file ~reader ~audit m with + | Some file -> file + | None -> + failwith (Printf.sprintf "file name not found for module %s" (Modulename.to_string m)) + + let get_resolved_requires_unsafe ~reader:_ = + Expensive.wrap (fun f -> + match ResolvedRequiresHeap.get f with + | Some resolved_requires -> resolved_requires + | None -> + failwith + (Printf.sprintf "resolved requires not found for file %s" (File_key.to_string f))) + + let get_info ~reader:_ = Expensive.wrap InfoHeap.get + + let get_info_unsafe ~reader ~audit f = + match get_info ~reader ~audit f with + | Some info -> info + | None -> failwith (Printf.sprintf "module info not found for file %s" (File_key.to_string f)) + + let is_tracked_file ~reader:_ = InfoHeap.mem + + let get_package ~reader:_ = PackageHeap.get + + let get_package_directory ~reader:_ = ReversePackageHeap.get +end + +module Reader : READER with type reader = State_reader.t = struct + type reader = State_reader.t + + let should_use_old_nameheap key = + match !currently_oldified_nameheap_modulenames with + | None -> false + | Some oldified_modulenames -> Modulename.Set.mem key !oldified_modulenames + + let should_use_old_resolved_requires f = + Utils_js.FilenameSet.mem f !currently_oldified_resolved_requires + + let should_use_old_infoheap f = + match !currently_oldified_infoheap_files with + | None -> false + | Some oldified_files -> Utils_js.FilenameSet.mem f oldified_files + + let get_file ~reader:_ ~audit key = + if should_use_old_nameheap key then + Expensive.wrap NameHeap.get_old ~audit key + else + Expensive.wrap NameHeap.get ~audit key + + let module_exists ~reader:_ key = + if should_use_old_nameheap key then + NameHeap.mem_old key + else + NameHeap.mem key + + let get_file_unsafe ~reader ~audit m = + match get_file ~reader ~audit m with + | Some file -> file + | None -> + failwith (Printf.sprintf "file name not found for module %s" (Modulename.to_string m)) + + let get_resolved_requires_unsafe ~reader:_ = + Expensive.wrap (fun f -> + let resolved_requires = + if should_use_old_resolved_requires f then + ResolvedRequiresHeap.get_old f + else + ResolvedRequiresHeap.get f + in + match resolved_requires with + | Some resolved_requires -> resolved_requires + | None -> + failwith + (Printf.sprintf "resolved requires not found for file %s" (File_key.to_string f))) + + let get_info ~reader:_ ~audit f = + if should_use_old_infoheap f then + Expensive.wrap InfoHeap.get_old ~audit f + else + Expensive.wrap InfoHeap.get ~audit f + + let get_info_unsafe ~reader ~audit f = + match get_info ~reader ~audit f with + | Some info -> info + | None -> failwith (Printf.sprintf "module info not found for file %s" (File_key.to_string f)) + + let is_tracked_file ~reader:_ f = + if should_use_old_infoheap f then + InfoHeap.mem_old f + else + InfoHeap.mem f + + (* We don't support incrementally updating the package heaps, so we never actually oldify + * anything. Therefore we always can read from the package heap directly *) + let get_package ~reader:_ = PackageHeap.get + + let get_package_directory ~reader:_ = ReversePackageHeap.get +end + +module Reader_dispatcher : READER with type reader = Abstract_state_reader.t = struct + type reader = Abstract_state_reader.t + + open Abstract_state_reader + + let get_file ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_file ~reader + | State_reader reader -> Reader.get_file ~reader + + let module_exists ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.module_exists ~reader + | State_reader reader -> Reader.module_exists ~reader + + let get_file_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_file_unsafe ~reader + | State_reader reader -> Reader.get_file_unsafe ~reader + + let get_resolved_requires_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_resolved_requires_unsafe ~reader + | State_reader reader -> Reader.get_resolved_requires_unsafe ~reader + + let get_info ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_info ~reader + | State_reader reader -> Reader.get_info ~reader + + let get_info_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_info_unsafe ~reader + | State_reader reader -> Reader.get_info_unsafe ~reader + + let is_tracked_file ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.is_tracked_file ~reader + | State_reader reader -> Reader.is_tracked_file ~reader + + let get_package ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_package ~reader + | State_reader reader -> Reader.get_package ~reader + + let get_package_directory ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_package_directory ~reader + | State_reader reader -> Reader.get_package_directory ~reader end (******************** APIs for saving/loading saved state *********************) @@ -270,7 +477,12 @@ end module For_saved_state = struct exception Package_not_found of string + + exception Package_not_valid of string + let get_package_json_unsafe file = - try PackageHeap.find_unsafe file - with Not_found -> raise (Package_not_found file) + match PackageHeap.find_unsafe file with + | Ok package -> package + | Error () -> raise (Package_not_valid file) + | exception Not_found -> raise (Package_not_found file) end diff --git a/src/state/heaps/module/module_heaps.mli b/src/state/heaps/module/module_heaps.mli index 51ea7471a0d..143d2539145 100644 --- a/src/state/heaps/module/module_heaps.mli +++ b/src/state/heaps/module/module_heaps.mli @@ -1,66 +1,94 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val get_file: (Modulename.t -> File_key.t option) Expensive.t -val get_file_unsafe: (Modulename.t -> File_key.t) Expensive.t -val module_exists: Modulename.t -> bool - type resolved_requires = { resolved_modules: Modulename.t SMap.t; phantom_dependents: SSet.t; + hash: Xx.hash; } -val get_resolved_requires_unsafe: (File_key.t -> resolved_requires) Expensive.t +val mk_resolved_requires : + resolved_modules:Modulename.t SMap.t -> phantom_dependents:SSet.t -> resolved_requires type info = { module_name: Modulename.t; - checked: bool; (* in flow? *) - parsed: bool; (* if false, it's a tracking record only *) + checked: bool; + (* in flow? *) + parsed: bool; (* if false, it's a tracking record only *) } -(* given a filename, returns module info *) -val get_info_unsafe: (File_key.t -> info) Expensive.t -val get_info: (File_key.t -> info option) Expensive.t -val is_tracked_file: File_key.t -> bool +module type READER = sig + type reader + + val get_file : reader:reader -> (Modulename.t -> File_key.t option) Expensive.t + + val get_file_unsafe : reader:reader -> (Modulename.t -> File_key.t) Expensive.t + + val module_exists : reader:reader -> Modulename.t -> bool + + val get_resolved_requires_unsafe : reader:reader -> (File_key.t -> resolved_requires) Expensive.t + + (* given a filename, returns module info *) + val get_info_unsafe : reader:reader -> (File_key.t -> info) Expensive.t + + val get_info : reader:reader -> (File_key.t -> info option) Expensive.t + + val is_tracked_file : reader:reader -> File_key.t -> bool + + val get_package : reader:reader -> string -> (Package_json.t, unit) result option + + val get_package_directory : reader:reader -> string -> string option +end + +module Mutator_reader : READER with type reader = Mutator_state_reader.t + +module Reader : READER with type reader = State_reader.t -val get_package: string -> Package_json.t option -val get_package_directory: string -> string option +module Reader_dispatcher : READER with type reader = Abstract_state_reader.t module Commit_modules_mutator : sig type t - val create: Transaction.t -> is_init:bool -> t - val remove_and_replace: + + val create : Transaction.t -> is_init:bool -> t + + val remove_and_replace : t -> workers:MultiWorkerLwt.worker list option -> to_remove:Modulename.Set.t -> - to_replace:(Modulename.t * File_key.t) list -> + to_replace:(Modulename.t * File_key.t) list -> unit Lwt.t end module Resolved_requires_mutator : sig type t - val create: Transaction.t -> Utils_js.FilenameSet.t -> t - val add_resolved_requires: t -> File_key.t -> resolved_requires -> unit + + val create : Transaction.t -> Utils_js.FilenameSet.t -> t + + val add_resolved_requires : t -> File_key.t -> resolved_requires -> bool end module Introduce_files_mutator : sig type t - val create: Transaction.t -> Utils_js.FilenameSet.t -> t - val add_info: t -> File_key.t -> info -> unit + + val create : Transaction.t -> Utils_js.FilenameSet.t -> t + + val add_info : t -> File_key.t -> info -> unit end module Package_heap_mutator : sig - val add_package_json: string -> Package_json.t -> unit + val add_package_json : string -> Package_json.t -> unit + + val add_error : string -> unit end module From_saved_state : sig - val add_resolved_requires: File_key.t -> resolved_requires -> unit + val add_resolved_requires : File_key.t -> resolved_requires -> unit end module For_saved_state : sig - val get_package_json_unsafe: string -> Package_json.t + val get_package_json_unsafe : string -> Package_json.t end diff --git a/src/state/heaps/parsing/dune b/src/state/heaps/parsing/dune new file mode 100644 index 00000000000..955a5e5b2d9 --- /dev/null +++ b/src/state/heaps/parsing/dune @@ -0,0 +1,14 @@ +(library + (name flow_state_heaps_parsing) + (wrapped false) + (libraries + flow_common + flow_parser + flow_parser_utils + flow_shared_mem + flow_state_heaps_parsing_exceptions + flow_state_readers + xx + heap_shared_mem ; hack + ) +) diff --git a/src/state/heaps/parsing/exceptions/dune b/src/state/heaps/parsing/exceptions/dune new file mode 100644 index 00000000000..b7ed0d873c3 --- /dev/null +++ b/src/state/heaps/parsing/exceptions/dune @@ -0,0 +1,4 @@ +(library + (name flow_state_heaps_parsing_exceptions) + (wrapped false) +) diff --git a/src/state/heaps/parsing/exceptions/parsing_heaps_exceptions.ml b/src/state/heaps/parsing/exceptions/parsing_heaps_exceptions.ml new file mode 100644 index 00000000000..601133bbed7 --- /dev/null +++ b/src/state/heaps/parsing/exceptions/parsing_heaps_exceptions.ml @@ -0,0 +1,20 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +exception Ast_not_found of string + +exception Sig_ast_not_found of string + +exception Sig_ast_ALoc_table_not_found of string + +exception Docblock_not_found of string + +exception Requires_not_found of string + +exception Sig_requires_not_found of string + +exception Hash_not_found of string diff --git a/src/state/heaps/parsing/parsing_heaps.ml b/src/state/heaps/parsing/parsing_heaps.ml index 315e5c973f6..317cace82ce 100644 --- a/src/state/heaps/parsing/parsing_heaps.ml +++ b/src/state/heaps/parsing/parsing_heaps.ml @@ -1,96 +1,310 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open Utils_js +open Parsing_heaps_exceptions (* shared heap for parsed ASTs by filename *) -module ASTHeap = SharedMem_js.WithCache (File_key) (struct - type t = (Loc.t, Loc.t) Flow_ast.program - let prefix = Prefix.make() - let description = "AST" - let use_sqlite_fallback () = false -end) - -module DocblockHeap = SharedMem_js.WithCache (File_key) (struct - type t = Docblock.t - let prefix = Prefix.make() - let description = "Docblock" - let use_sqlite_fallback () = false -end) - -module FileSigHeap = SharedMem_js.WithCache (File_key) (struct - type t = File_sig.t - let prefix = Prefix.make() - let description = "Requires" - let use_sqlite_fallback () = false -end) +module ASTHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = (RelativeLoc.t, RelativeLoc.t) Flow_ast.program + + let prefix = Prefix.make () + + let description = "AST" + end) + +module SigASTHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = (ALoc.t, ALoc.t) Flow_ast.program + + let prefix = Prefix.make () + + let description = "SigAST" + end) + +module SigASTALocTableHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = ALoc.table + + let prefix = Prefix.make () + + let description = "ALocTable" + end) + +(* There's some redundancy in the visitors here, but an attempt to avoid repeated code led, + * inexplicably, to a shared heap size regression under types-first: D15481813 *) +let loc_compactifier = + object (this) + inherit [Loc.t, Loc.t, RelativeLoc.t, RelativeLoc.t] Flow_polymorphic_ast_mapper.mapper + + method private compactify_loc loc = RelativeLoc.of_loc loc + + method on_loc_annot = this#compactify_loc + + method on_type_annot = this#compactify_loc + end + +let source_remover_aloc = + object (this) + inherit [ALoc.t, ALoc.t, ALoc.t, ALoc.t] Flow_polymorphic_ast_mapper.mapper + + method private remove_source = ALoc.update_source (fun _ -> None) + + method on_loc_annot = this#remove_source + + method on_type_annot = this#remove_source + end + +let compactify_loc ast = loc_compactifier#program ast + +let remove_source_aloc ast = source_remover_aloc#program ast + +let loc_decompactifier source = + object (this) + inherit [RelativeLoc.t, RelativeLoc.t, Loc.t, Loc.t] Flow_polymorphic_ast_mapper.mapper + + method private decompactify_loc loc = RelativeLoc.to_loc loc source + + method on_loc_annot = this#decompactify_loc + + method on_type_annot = this#decompactify_loc + end + +let source_adder_aloc source = + object (this) + inherit [ALoc.t, ALoc.t, ALoc.t, ALoc.t] Flow_polymorphic_ast_mapper.mapper + + method private add_source = ALoc.update_source (fun _ -> source) + + method on_loc_annot = this#add_source + + method on_type_annot = this#add_source + end + +let decompactify_loc file ast = (loc_decompactifier (Some file))#program ast + +let add_source_aloc file ast = (source_adder_aloc (Some file))#program ast + +module DocblockHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = Docblock.t + + let prefix = Prefix.make () + + let description = "Docblock" + end) + +module FileSigHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = File_sig.With_Loc.t + + let prefix = Prefix.make () + + let description = "Requires" + end) + +module SigFileSigHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + type t = File_sig.With_ALoc.t + + let prefix = Prefix.make () + + let description = "SigRequires" + end) (* Contains the hash for every file we even consider parsing *) -module FileHashHeap = SharedMem_js.WithCache (File_key) (struct - (* In the future I imagine a system like this: - * - * type t = { - * since_version: Recheck.version; - * hash: Sha1.hash; - * } - * - * Every time we notice files changing (via file_watcher or some other way) we bump the version - * so a recheck is known to be from version N to version N+1. If the recheck gets cancelled - * due to a file watcher event, then we're checking from version N to version N+2 (etc etc). - * - * Ideally we'd be able to ignore file watcher events that are either old & outdated or where - * hash hasn't changed (watchman can provide the sha1). - * - * And ideally a cancelled recheck leading to a recheck from version N to N+2 will still - * merge file foo.js, even if we parsed it at version N+1 & it's unchanged since version N+1. - *) - type t = Xx.hash - let prefix = Prefix.make() - let description = "FileHash" - let use_sqlite_fallback () = false -end) +module FileHashHeap = + SharedMem_js.WithCache (SharedMem_js.Immediate) (File_key) + (struct + (* In the future I imagine a system like this: + * + * type t = { + * since_version: Recheck.version; + * hash: Sha1.hash; + * } + * + * Every time we notice files changing (via file_watcher or some other way) we bump the version + * so a recheck is known to be from version N to version N+1. If the recheck gets cancelled + * due to a file watcher event, then we're checking from version N to version N+2 (etc etc). + * + * Ideally we'd be able to ignore file watcher events that are either old & outdated or where + * hash hasn't changed (watchman can provide the sha1). + * + * And ideally a cancelled recheck leading to a recheck from version N to N+2 will still + * merge file foo.js, even if we parsed it at version N+1 & it's unchanged since version N+1. + *) + type t = Xx.hash + + let prefix = Prefix.make () + + let description = "FileHash" + end) (* Groups operations on the multiple heaps that need to stay in sync *) module ParsingHeaps = struct - let add file ast info file_sig = - ASTHeap.add file ast; + let add file info (ast, file_sig) sig_opt = + ASTHeap.add file (compactify_loc ast); DocblockHeap.add file info; - FileSigHeap.add file file_sig + FileSigHeap.add file file_sig; + Option.iter sig_opt ~f:(fun (sig_ast, sig_file_sig, aloc_table) -> + SigASTHeap.add file (remove_source_aloc sig_ast); + Option.iter aloc_table ~f:(SigASTALocTableHeap.add file); + SigFileSigHeap.add file sig_file_sig) let oldify_batch files = ASTHeap.oldify_batch files; + SigASTHeap.oldify_batch files; + SigASTALocTableHeap.oldify_batch files; DocblockHeap.oldify_batch files; FileSigHeap.oldify_batch files; + SigFileSigHeap.oldify_batch files; FileHashHeap.oldify_batch files let remove_old_batch files = ASTHeap.remove_old_batch files; + SigASTHeap.remove_old_batch files; + SigASTALocTableHeap.remove_old_batch files; DocblockHeap.remove_old_batch files; FileSigHeap.remove_old_batch files; + SigFileSigHeap.remove_old_batch files; FileHashHeap.remove_old_batch files; SharedMem_js.collect `gentle let revive_batch files = ASTHeap.revive_batch files; + SigASTHeap.revive_batch files; + SigASTALocTableHeap.revive_batch files; DocblockHeap.revive_batch files; FileSigHeap.revive_batch files; + SigFileSigHeap.revive_batch files; FileHashHeap.revive_batch files end +module type READER = sig + type reader + + val has_ast : reader:reader -> File_key.t -> bool + + val get_ast : reader:reader -> File_key.t -> (Loc.t, Loc.t) Flow_ast.program option + + val get_docblock : reader:reader -> File_key.t -> Docblock.t option + + val get_file_sig : reader:reader -> File_key.t -> File_sig.With_Loc.t option + + val get_sig_file_sig : reader:reader -> File_key.t -> File_sig.With_ALoc.t option + + val get_file_hash : reader:reader -> File_key.t -> Xx.hash option + + val get_ast_unsafe : reader:reader -> File_key.t -> (Loc.t, Loc.t) Flow_ast.program + + val get_sig_ast_unsafe : reader:reader -> File_key.t -> (ALoc.t, ALoc.t) Flow_ast.program + + val get_sig_ast_aloc_table_unsafe : reader:reader -> File_key.t -> ALoc.table + + val get_sig_ast_aloc_table_unsafe_lazy : reader:reader -> ALoc.t -> ALoc.table Lazy.t + + val get_docblock_unsafe : reader:reader -> File_key.t -> Docblock.t + + val get_file_sig_unsafe : reader:reader -> File_key.t -> File_sig.With_Loc.t + + val get_sig_file_sig_unsafe : reader:reader -> File_key.t -> File_sig.With_ALoc.t + + val get_file_hash_unsafe : reader:reader -> File_key.t -> Xx.hash +end + +let make_lazy_aloc_table_fetcher ~get_sig_ast_aloc_table_unsafe ~reader aloc = + lazy + begin + let source = + match ALoc.source aloc with + | None -> failwith "Expected `aloc` to have a `source`" + | Some x -> x + in + get_sig_ast_aloc_table_unsafe ~reader source + end + +(* Init/recheck will use Mutator_reader to read the shared memory *) +module Mutator_reader : sig + include READER with type reader = Mutator_state_reader.t + + val get_old_file_hash : reader:Mutator_state_reader.t -> File_key.t -> Xx.hash option +end = struct + type reader = Mutator_state_reader.t + + let has_ast ~reader:_ = ASTHeap.mem + + let get_ast ~reader:_ key = + let ast = ASTHeap.get key in + Option.map ~f:(decompactify_loc key) ast + + let get_docblock ~reader:_ = DocblockHeap.get + + let get_file_sig ~reader:_ = FileSigHeap.get + + let get_sig_file_sig ~reader:_ = SigFileSigHeap.get + + let get_file_hash ~reader:_ = FileHashHeap.get + + let get_old_file_hash ~reader:_ = FileHashHeap.get_old + + let get_ast_unsafe ~reader:_ file = + try ASTHeap.find_unsafe file |> decompactify_loc file + with Not_found -> raise (Ast_not_found (File_key.to_string file)) + + let get_sig_ast_unsafe ~reader:_ file = + try SigASTHeap.find_unsafe file |> add_source_aloc file + with Not_found -> raise (Sig_ast_not_found (File_key.to_string file)) + + let get_sig_ast_aloc_table_unsafe ~reader:_ file = + try SigASTALocTableHeap.find_unsafe file + with Not_found -> raise (Sig_ast_ALoc_table_not_found (File_key.to_string file)) + + let get_sig_ast_aloc_table_unsafe_lazy = + make_lazy_aloc_table_fetcher ~get_sig_ast_aloc_table_unsafe + + let get_docblock_unsafe ~reader:_ file = + try DocblockHeap.find_unsafe file + with Not_found -> raise (Docblock_not_found (File_key.to_string file)) + + let get_file_sig_unsafe ~reader:_ file = + try FileSigHeap.find_unsafe file + with Not_found -> raise (Requires_not_found (File_key.to_string file)) + + let get_sig_file_sig_unsafe ~reader:_ file = + try SigFileSigHeap.find_unsafe file + with Not_found -> raise (Sig_requires_not_found (File_key.to_string file)) + + let get_file_hash_unsafe ~reader:_ file = + try FileHashHeap.find_unsafe file + with Not_found -> raise (Hash_not_found (File_key.to_string file)) +end + (* For use by a worker process *) type worker_mutator = { - add_file: File_key.t -> (Loc.t, Loc.t) Flow_ast.program -> Docblock.t -> File_sig.t -> unit; - add_hash: File_key.t -> Xx.hash -> unit + add_file: + File_key.t -> + Docblock.t -> + (Loc.t, Loc.t) Flow_ast.program * File_sig.With_Loc.t -> + ((ALoc.t, ALoc.t) Flow_ast.program * File_sig.With_ALoc.t * ALoc.table option) option -> + unit; + add_hash: File_key.t -> Xx.hash -> unit; } (* Parsing is pretty easy - there is no before state and no chance of rollbacks, so we don't * need to worry about a transaction *) -module Parse_mutator: sig - val create: unit -> worker_mutator +module Parse_mutator : sig + val create : unit -> worker_mutator end = struct let create () = { add_file = ParsingHeaps.add; add_hash = FileHashHeap.add } end @@ -103,90 +317,241 @@ end * If you revive some files before the transaction ends, then those won't be affected by * commit/rollback *) -module Reparse_mutator: sig +let currently_oldified_files : FilenameSet.t ref option ref = ref None + +module Reparse_mutator : sig type master_mutator (* Used by the master process *) - val create: Transaction.t -> FilenameSet.t -> master_mutator * worker_mutator - val revive_files: master_mutator -> FilenameSet.t -> unit + + val create : Transaction.t -> FilenameSet.t -> master_mutator * worker_mutator + + val revive_files : master_mutator -> FilenameSet.t -> unit end = struct type master_mutator = FilenameSet.t ref let commit oldified_files = Hh_logger.debug "Committing parsing heaps"; ParsingHeaps.remove_old_batch oldified_files; + currently_oldified_files := None; Lwt.return_unit let rollback oldified_files = Hh_logger.debug "Rolling back parsing heaps"; ParsingHeaps.revive_batch oldified_files; + currently_oldified_files := None; Lwt.return_unit (* Ideally we'd assert that file was oldified and not revived, but it's too expensive to pass the * set of oldified files to the worker *) - let add_file file ast info file_sig = - ParsingHeaps.add file ast info file_sig + let add_file file info (ast, file_sig) sig_opt = + ParsingHeaps.add file info (ast, file_sig) sig_opt (* Ideally we'd assert that file was oldified and not revived, but it's too expensive to pass the * set of oldified files to the worker *) - let add_hash file hash = - FileHashHeap.add file hash + let add_hash file hash = FileHashHeap.add file hash let create transaction files = let master_mutator = ref files in + currently_oldified_files := Some master_mutator; let worker_mutator = { add_file; add_hash } in - ParsingHeaps.oldify_batch files; - let commit () = commit (!master_mutator) in - let rollback () = rollback (!master_mutator) in + let commit () = commit !master_mutator in + let rollback () = rollback !master_mutator in Transaction.add ~singleton:"Reparse" ~commit ~rollback transaction; - master_mutator, worker_mutator + (master_mutator, worker_mutator) let revive_files oldified_files files = (* Every file in files should be in the oldified set *) - assert (FilenameSet.is_empty (FilenameSet.diff files (!oldified_files))); - oldified_files := FilenameSet.diff (!oldified_files) files; + assert (FilenameSet.is_empty (FilenameSet.diff files !oldified_files)); + oldified_files := FilenameSet.diff !oldified_files files; ParsingHeaps.revive_batch files end -let has_ast = ASTHeap.mem - -let has_old_ast = ASTHeap.mem_old - -let get_ast = ASTHeap.get - -let get_docblock = DocblockHeap.get - -let get_file_sig = FileSigHeap.get - -let get_file_hash = FileHashHeap.get - -let get_old_file_hash = FileHashHeap.get_old - -exception Ast_not_found of string -let get_ast_unsafe file = - try ASTHeap.find_unsafe file - with Not_found -> raise (Ast_not_found (File_key.to_string file)) - -exception Docblock_not_found of string -let get_docblock_unsafe file = - try DocblockHeap.find_unsafe file - with Not_found -> raise (Docblock_not_found (File_key.to_string file)) +(* This peaks at the Reparse_mutator's state and uses that to determine whether to read from the + * old or new heap. This is used by code outside of a init/recheck, like commands *) +module Reader : READER with type reader = State_reader.t = struct + type reader = State_reader.t + + let should_use_oldified key = + match !currently_oldified_files with + | None -> false + | Some oldified_files -> FilenameSet.mem key !oldified_files + + let has_ast ~reader:_ key = + if should_use_oldified key then + ASTHeap.mem_old key + else + ASTHeap.mem key + + let get_ast ~reader:_ key = + let ast = + if should_use_oldified key then + ASTHeap.get_old key + else + ASTHeap.get key + in + Option.map ~f:(decompactify_loc key) ast + + let get_sig_ast ~reader:_ key = + let ast = + if should_use_oldified key then + SigASTHeap.get_old key + else + SigASTHeap.get key + in + Option.map ~f:(add_source_aloc key) ast + + let get_sig_ast_aloc_table ~reader:_ key = + if should_use_oldified key then + SigASTALocTableHeap.get_old key + else + SigASTALocTableHeap.get key + + let get_docblock ~reader:_ key = + if should_use_oldified key then + DocblockHeap.get_old key + else + DocblockHeap.get key + + let get_file_sig ~reader:_ key = + if should_use_oldified key then + FileSigHeap.get_old key + else + FileSigHeap.get key + + let get_sig_file_sig ~reader:_ key = + if should_use_oldified key then + SigFileSigHeap.get_old key + else + SigFileSigHeap.get key + + let get_file_hash ~reader:_ key = + if should_use_oldified key then + FileHashHeap.get_old key + else + FileHashHeap.get key + + let get_ast_unsafe ~reader file = + match get_ast ~reader file with + | Some ast -> ast + | None -> raise (Ast_not_found (File_key.to_string file)) + + let get_sig_ast_unsafe ~reader file = + match get_sig_ast ~reader file with + | Some ast -> ast + | None -> raise (Sig_ast_not_found (File_key.to_string file)) + + let get_sig_ast_aloc_table_unsafe ~reader file = + match get_sig_ast_aloc_table ~reader file with + | Some table -> table + | None -> raise (Sig_ast_ALoc_table_not_found (File_key.to_string file)) + + let get_sig_ast_aloc_table_unsafe_lazy = + make_lazy_aloc_table_fetcher ~get_sig_ast_aloc_table_unsafe + + let get_docblock_unsafe ~reader file = + match get_docblock ~reader file with + | Some docblock -> docblock + | None -> raise (Docblock_not_found (File_key.to_string file)) + + let get_file_sig_unsafe ~reader file = + match get_file_sig ~reader file with + | Some file_sig -> file_sig + | None -> raise (Requires_not_found (File_key.to_string file)) + + let get_sig_file_sig_unsafe ~reader file = + match get_sig_file_sig ~reader file with + | Some file_sig -> file_sig + | None -> raise (Sig_requires_not_found (File_key.to_string file)) + + let get_file_hash_unsafe ~reader file = + match get_file_hash ~reader file with + | Some file_hash -> file_hash + | None -> raise (Hash_not_found (File_key.to_string file)) +end -exception Requires_not_found of string -let get_file_sig_unsafe file = - try FileSigHeap.find_unsafe file - with Not_found -> raise (Requires_not_found (File_key.to_string file)) +(* Reader_dispatcher is used by code which may or may not be running inside an init/recheck *) +module Reader_dispatcher : READER with type reader = Abstract_state_reader.t = struct + type reader = Abstract_state_reader.t + + open Abstract_state_reader + + let has_ast ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.has_ast ~reader + | State_reader reader -> Reader.has_ast ~reader + + let get_ast ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_ast ~reader + | State_reader reader -> Reader.get_ast ~reader + + let get_docblock ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_docblock ~reader + | State_reader reader -> Reader.get_docblock ~reader + + let get_file_sig ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_file_sig ~reader + | State_reader reader -> Reader.get_file_sig ~reader + + let get_sig_file_sig ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_sig_file_sig ~reader + | State_reader reader -> Reader.get_sig_file_sig ~reader + + let get_file_hash ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_file_hash ~reader + | State_reader reader -> Reader.get_file_hash ~reader + + let get_ast_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_ast_unsafe ~reader + | State_reader reader -> Reader.get_ast_unsafe ~reader + + let get_sig_ast_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_sig_ast_unsafe ~reader + | State_reader reader -> Reader.get_sig_ast_unsafe ~reader + + let get_sig_ast_aloc_table_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_sig_ast_aloc_table_unsafe ~reader + | State_reader reader -> Reader.get_sig_ast_aloc_table_unsafe ~reader + + let get_sig_ast_aloc_table_unsafe_lazy = + make_lazy_aloc_table_fetcher ~get_sig_ast_aloc_table_unsafe + + let get_docblock_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_docblock_unsafe ~reader + | State_reader reader -> Reader.get_docblock_unsafe ~reader + + let get_file_sig_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_file_sig_unsafe ~reader + | State_reader reader -> Reader.get_file_sig_unsafe ~reader + + let get_sig_file_sig_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_sig_file_sig_unsafe ~reader + | State_reader reader -> Reader.get_sig_file_sig_unsafe ~reader + + let get_file_hash_unsafe ~reader = + match reader with + | Mutator_state_reader reader -> Mutator_reader.get_file_hash_unsafe ~reader + | State_reader reader -> Reader.get_file_hash_unsafe ~reader +end -exception Hash_not_found of string -let get_file_hash_unsafe file = - try FileHashHeap.find_unsafe file - with Not_found -> raise (Hash_not_found (File_key.to_string file)) +module From_saved_state : sig + val add_file_sig : File_key.t -> File_sig.With_Loc.t -> unit -module From_saved_state: sig - val add_file_sig: File_key.t -> File_sig.t -> unit - val add_file_hash: File_key.t -> Xx.hash -> unit + val add_file_hash : File_key.t -> Xx.hash -> unit end = struct let add_file_sig = FileSigHeap.add + let add_file_hash = FileHashHeap.add end diff --git a/src/state/heaps/parsing/parsing_heaps.mli b/src/state/heaps/parsing/parsing_heaps.mli index a4c9f34451b..9b797fb5c7f 100644 --- a/src/state/heaps/parsing/parsing_heaps.mli +++ b/src/state/heaps/parsing/parsing_heaps.mli @@ -1,43 +1,77 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module type READER = sig + type reader + + val has_ast : reader:reader -> File_key.t -> bool + + val get_ast : reader:reader -> File_key.t -> (Loc.t, Loc.t) Flow_ast.program option + + val get_docblock : reader:reader -> File_key.t -> Docblock.t option + + val get_file_sig : reader:reader -> File_key.t -> File_sig.With_Loc.t option + + val get_sig_file_sig : reader:reader -> File_key.t -> File_sig.With_ALoc.t option + + val get_file_hash : reader:reader -> File_key.t -> Xx.hash option + + val get_ast_unsafe : reader:reader -> File_key.t -> (Loc.t, Loc.t) Flow_ast.program + + val get_sig_ast_unsafe : reader:reader -> File_key.t -> (ALoc.t, ALoc.t) Flow_ast.program + + val get_sig_ast_aloc_table_unsafe : reader:reader -> File_key.t -> ALoc.table + + val get_sig_ast_aloc_table_unsafe_lazy : reader:reader -> ALoc.t -> ALoc.table Lazy.t + + val get_docblock_unsafe : reader:reader -> File_key.t -> Docblock.t + + val get_file_sig_unsafe : reader:reader -> File_key.t -> File_sig.With_Loc.t + + val get_sig_file_sig_unsafe : reader:reader -> File_key.t -> File_sig.With_ALoc.t + + val get_file_hash_unsafe : reader:reader -> File_key.t -> Xx.hash +end + +module Mutator_reader : sig + include READER with type reader = Mutator_state_reader.t + + val get_old_file_hash : reader:Mutator_state_reader.t -> File_key.t -> Xx.hash option +end + +module Reader : READER with type reader = State_reader.t + +module Reader_dispatcher : READER with type reader = Abstract_state_reader.t + (* For use by a worker process *) type worker_mutator = { - add_file: File_key.t -> (Loc.t, Loc.t) Flow_ast.program -> Docblock.t -> File_sig.t -> unit; - add_hash: File_key.t -> Xx.hash -> unit + add_file: + File_key.t -> + Docblock.t -> + (Loc.t, Loc.t) Flow_ast.program * File_sig.With_Loc.t -> + ((ALoc.t, ALoc.t) Flow_ast.program * File_sig.With_ALoc.t * ALoc.table option) option -> + unit; + add_hash: File_key.t -> Xx.hash -> unit; } -module Parse_mutator: sig - val create: unit -> worker_mutator +module Parse_mutator : sig + val create : unit -> worker_mutator end -module Reparse_mutator: sig +module Reparse_mutator : sig type master_mutator (* Used by the master process *) - val create: Transaction.t -> Utils_js.FilenameSet.t -> master_mutator * worker_mutator - val revive_files: master_mutator -> Utils_js.FilenameSet.t -> unit -end - -val has_ast: File_key.t -> bool -val has_old_ast: File_key.t -> bool + val create : Transaction.t -> Utils_js.FilenameSet.t -> master_mutator * worker_mutator -val get_ast: File_key.t -> (Loc.t, Loc.t) Flow_ast.program option -val get_docblock: File_key.t -> Docblock.t option -val get_file_sig: File_key.t -> File_sig.t option -val get_file_hash: File_key.t -> Xx.hash option -val get_old_file_hash: File_key.t -> Xx.hash option + val revive_files : master_mutator -> Utils_js.FilenameSet.t -> unit +end -(* after parsing, retrieves ast and docblock by filename (unsafe) *) -val get_ast_unsafe: File_key.t -> (Loc.t, Loc.t) Flow_ast.program -val get_docblock_unsafe: File_key.t -> Docblock.t -val get_file_sig_unsafe: File_key.t -> File_sig.t -val get_file_hash_unsafe: File_key.t -> Xx.hash +module From_saved_state : sig + val add_file_sig : File_key.t -> File_sig.With_Loc.t -> unit -module From_saved_state: sig - val add_file_sig: File_key.t -> File_sig.t -> unit - val add_file_hash: File_key.t -> Xx.hash -> unit + val add_file_hash : File_key.t -> Xx.hash -> unit end diff --git a/src/state/heaps/parsing/parsing_heaps_utils.ml b/src/state/heaps/parsing/parsing_heaps_utils.ml new file mode 100644 index 00000000000..a50cb5ff816 --- /dev/null +++ b/src/state/heaps/parsing/parsing_heaps_utils.ml @@ -0,0 +1,10 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +let loc_of_aloc ~reader aloc = + let table = Parsing_heaps.Reader.get_sig_ast_aloc_table_unsafe_lazy ~reader aloc in + ALoc.to_loc table aloc diff --git a/src/state/locals/module/dune b/src/state/locals/module/dune new file mode 100644 index 00000000000..532ee8ce1b2 --- /dev/null +++ b/src/state/locals/module/dune @@ -0,0 +1,9 @@ +(library + (name flow_state_locals_module) + (wrapped false) + (libraries + flow_common + flow_common_modulename + flow_state_readers + ) +) diff --git a/src/state/locals/module/module_hashtables.ml b/src/state/locals/module/module_hashtables.ml index 10bfa71a5e0..437dde288c2 100644 --- a/src/state/locals/module/module_hashtables.ml +++ b/src/state/locals/module/module_hashtables.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,34 +7,55 @@ (* hash table from module names to all known provider files. maintained and used by commit_modules and remove_files *) + (** TODO [perf]: investigate whether this takes too much memory **) let all_providers = ref (Hashtbl.create 0) -let find_in_all_providers_unsafe modulename = - Hashtbl.find (!all_providers) modulename +let currently_oldified_all_providers : (Modulename.t, Utils_js.FilenameSet.t) Hashtbl.t option ref + = + ref None + +let find_in_all_providers_unsafe modulename = Hashtbl.find !all_providers modulename + +module type READER = sig + type reader + + val find_in_all_providers_unsafe : reader:reader -> Modulename.t -> Utils_js.FilenameSet.t +end + +module Mutator_reader : READER with type reader = Mutator_state_reader.t = struct + type reader = Mutator_state_reader.t + + let find_in_all_providers_unsafe ~reader:_ = find_in_all_providers_unsafe +end -module All_providers_mutator: sig +module All_providers_mutator : sig type t - val create: Transaction.t -> t - val add_provider: t -> File_key.t -> Modulename.t -> unit - val remove_provider: t -> File_key.t -> Modulename.t -> unit + + val create : Transaction.t -> t + + val add_provider : t -> File_key.t -> Modulename.t -> unit + + val remove_provider : t -> File_key.t -> Modulename.t -> unit end = struct type t = unit let create transaction = let old_table = Hashtbl.copy !all_providers in + currently_oldified_all_providers := Some old_table; let commit () = Hh_logger.debug "Committing all_providers hashtable"; + currently_oldified_all_providers := None; Lwt.return_unit in let rollback () = Hh_logger.debug "Rolling back all_providers hashtable"; all_providers := old_table; + currently_oldified_all_providers := None; Lwt.return_unit in - - Transaction.add ~commit ~rollback transaction + Transaction.add ~singleton:"All providers" ~commit ~rollback transaction (* Note that the module provided by a file is always accessible via its full path, so that it may be imported by specifying (a part of) that path in any @@ -60,24 +81,33 @@ end = struct that name when the /foo directory is moved to, say, /qux/foo. *) let add_provider () f m = - let provs = try Utils_js.FilenameSet.add f (find_in_all_providers_unsafe m) - with Not_found -> Utils_js.FilenameSet.singleton f in + let provs = + try Utils_js.FilenameSet.add f (find_in_all_providers_unsafe m) + with Not_found -> Utils_js.FilenameSet.singleton f + in Hashtbl.replace !all_providers m provs let remove_provider () f m = - let provs = try Utils_js.FilenameSet.remove f (find_in_all_providers_unsafe m) - with Not_found -> failwith (Printf.sprintf - "can't remove provider %s of %S, not found in all_providers" - (File_key.to_string f) (Modulename.to_string m)) + let provs = + try Utils_js.FilenameSet.remove f (find_in_all_providers_unsafe m) + with Not_found -> + failwith + (Printf.sprintf + "can't remove provider %s of %S, not found in all_providers" + (File_key.to_string f) + (Modulename.to_string m)) in Hashtbl.replace !all_providers m provs end -(* We actually don't need a mutator for module_name_candidates_cache. There are a few reasons why +(* We actually don't need a mutator or reader for module_name_candidates_cache. There are a few + * reasons why: + * * 1. It's really only used for memoization. We never remove or replace anything * 2. The code which populates it never changes during the lifetime of a server. So we never * really need to roll anything back ever *) let module_name_candidates_cache = Hashtbl.create 50 + let memoize_with_module_name_candidates_cache ~f name = try Hashtbl.find module_name_candidates_cache name with Not_found -> diff --git a/src/state/locals/module/module_hashtables.mli b/src/state/locals/module/module_hashtables.mli index 8feb04c457d..56da6763d20 100644 --- a/src/state/locals/module/module_hashtables.mli +++ b/src/state/locals/module/module_hashtables.mli @@ -1,17 +1,26 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val find_in_all_providers_unsafe: Modulename.t -> Utils_js.FilenameSet.t +module type READER = sig + type reader -module All_providers_mutator: sig + val find_in_all_providers_unsafe : reader:reader -> Modulename.t -> Utils_js.FilenameSet.t +end + +module Mutator_reader : READER with type reader = Mutator_state_reader.t + +module All_providers_mutator : sig type t - val create: Transaction.t -> t - val add_provider: t -> File_key.t -> Modulename.t -> unit - val remove_provider: t -> File_key.t -> Modulename.t -> unit + + val create : Transaction.t -> t + + val add_provider : t -> File_key.t -> Modulename.t -> unit + + val remove_provider : t -> File_key.t -> Modulename.t -> unit end -val memoize_with_module_name_candidates_cache: f:(string -> string list) -> string -> string list +val memoize_with_module_name_candidates_cache : f:(string -> string list) -> string -> string list diff --git a/src/state/readers/abstract_state_reader.ml b/src/state/readers/abstract_state_reader.ml new file mode 100644 index 00000000000..c4e9d440fd2 --- /dev/null +++ b/src/state/readers/abstract_state_reader.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* This module is documented in state_reader.ml *) + +type t = + | State_reader of State_reader.t + | Mutator_state_reader of Mutator_state_reader.t diff --git a/src/state/readers/dune b/src/state/readers/dune new file mode 100644 index 00000000000..314d52c633e --- /dev/null +++ b/src/state/readers/dune @@ -0,0 +1,7 @@ +(library + (name flow_state_readers) + (wrapped false) + (libraries + flow_common_transaction + ) +) diff --git a/src/state/readers/mutator_state_reader.ml b/src/state/readers/mutator_state_reader.ml new file mode 100644 index 00000000000..ab98d854bcf --- /dev/null +++ b/src/state/readers/mutator_state_reader.ml @@ -0,0 +1,12 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* This module is documented in state_reader.ml *) + +type t = unit + +let create _transaction = () diff --git a/src/state/readers/mutator_state_reader.mli b/src/state/readers/mutator_state_reader.mli new file mode 100644 index 00000000000..8fc2e4f1aa6 --- /dev/null +++ b/src/state/readers/mutator_state_reader.mli @@ -0,0 +1,10 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type t + +val create : Transaction.t -> t diff --git a/src/state/readers/state_reader.ml b/src/state/readers/state_reader.ml new file mode 100644 index 00000000000..42fc983a97d --- /dev/null +++ b/src/state/readers/state_reader.ml @@ -0,0 +1,26 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* State_reader, Mutator_state_reader, and Abstract_state_reader are modules which basically do + * nothing. All they do is leverage the OCaml type system in an attempt to force the Flow team to + * read from the correct part of the shared memory. + * + * Init and rechecks should use the Mutator_state_reader that is created for them when the + * transaction starts. This ensures that they are always reading from the new shared memory, + * unless they explicitly try to read oldified data. + * + * Everything else should use State_reader. This reads from the new shared memory too. However, + * if we're in the middle of a recheck and there is oldified data available for a file, it instead + * uses that data. This allows code using a State_reader to run in parallel with a recheck. + * + * Some library functions can run inside a init/recheck or outside one. This code will take an + * Abstract_state_reader, which allows us to share that code. + *) + +type t = unit + +let create () = () diff --git a/src/state/readers/state_reader.mli b/src/state/readers/state_reader.mli new file mode 100644 index 00000000000..83d0e121145 --- /dev/null +++ b/src/state/readers/state_reader.mli @@ -0,0 +1,10 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type t + +val create : unit -> t diff --git a/src/stubs/dumper.ml b/src/stubs/dumper.ml index 7f373fc2e1b..94c8f1fadb6 100644 --- a/src/stubs/dumper.ml +++ b/src/stubs/dumper.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/stubs/dune b/src/stubs/dune new file mode 100644 index 00000000000..9d962dfb576 --- /dev/null +++ b/src/stubs/dune @@ -0,0 +1,32 @@ +(library + (name flow_logging_stubs) + (wrapped false) + (modules flowEventLogger flow_server_profile flowInteractionLogger) + (libraries + lwt + ) +) + +(library + (name flow_logging_lwt_stubs) + (wrapped false) + (modules eventLoggerLwt) + (libraries + lwt + ) +) + +(library + (name flow_saved_state_stubs) + (wrapped false) + (modules saved_state_fb_fetcher) + (libraries + flow_service_saved_state_fetcher + ) +) + +(library + (name flow_extra_commands) + (wrapped false) + (modules extra_commands) +) diff --git a/src/stubs/eventLoggerLwt.ml b/src/stubs/eventLoggerLwt.ml index 4d73ea81772..eba4a05f7d5 100644 --- a/src/stubs/eventLoggerLwt.ml +++ b/src/stubs/eventLoggerLwt.ml @@ -1,11 +1,8 @@ (** - * Copyright (c) 2015, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the BSD-style license found in the - * LICENSE file in the "hack" directory of this source tree. An additional grant - * of patent rights can be found in the PATENTS file in the same directory. + * Copyright (c) Facebook, Inc. and its affiliates. * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. *) let flush () = Lwt.return_unit diff --git a/src/stubs/extra_commands.ml b/src/stubs/extra_commands.ml index 50039ca7397..9e97b0e848e 100644 --- a/src/stubs/extra_commands.ml +++ b/src/stubs/extra_commands.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/stubs/flowEventLogger.ml b/src/stubs/flowEventLogger.ml index ec27af2ec44..07f1715cc3d 100644 --- a/src/stubs/flowEventLogger.ml +++ b/src/stubs/flowEventLogger.ml @@ -1,18 +1,11 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -type logging_context = { - argv: string; - command: string option; - from: string option; - root: string option; - root_name: string option; - start_time: float; -} +type logging_context = { from: string option } type persistent_context = { start_lsp_state: string option; @@ -38,54 +31,130 @@ type persistent_delay = { recheck_worst_cycle_size: int option; } -let get_context _ = { - argv = ""; - command = None; - from = None; - root = None; - root_name = None; - start_time = 0.0; -} +let context = ref { from = None } + +let get_context () = !context + +let get_from_I_AM_A_CLOWN () = !context.from + let restore_context _ = () + let set_command _ = () -let set_from _ = () + +let set_from from = context := { from } + let set_root _ = () + let set_root_name _ = () + let set_saved_state_filename _ = () -let set_monitor_options ~file_watcher:_ = () -let set_server_options ~lazy_mode:_ ~cancelable_rechecks:_ = () + +let set_monitor_options ~file_watcher:_ = () + +let set_server_options + ~lazy_mode:_ ~arch:_ ~abstract_locations:_ ~max_workers:_ ~enabled_rollouts:_ = + () let status_response ~num_errors:_ = () -let init_done ~profiling:_ = () + +let init_done + ?estimated_time_to_recheck:_ + ?estimated_time_to_restart:_ + ?estimated_time_to_init:_ + ?estimated_time_per_file:_ + ?estimated_files_to_recheck:_ + ?estimated_files_to_init:_ + ?first_internal_error:_ + _profiling = + () + let init_flow_command ~version:_ = () + let killed _ = () + let lock_lost _ = () + let lock_stolen _ = () + let out_of_date _ = () + let exit _ _ = () + let report_from_monitor_server_exit_due_to_signal _ = () + let recheck + ~recheck_reasons:_ ~modified:_ ~deleted:_ ~dependent_files:_ - ~skipped_count:_ - ~profiling:_ = () + ~merge_skip_count:_ + ~check_skip_count:_ + ~profiling:_ + ~estimated_time_to_recheck:_ + ~estimated_time_to_restart:_ + ~estimated_time_to_init:_ + ~estimated_time_per_file:_ + ~estimated_files_to_recheck:_ + ~estimated_files_to_init:_ + ~first_internal_error:_ + ~slowest_file:_ + ~num_slow_files:_ + ~scm_update_distance:_ + ~scm_changed_mergebase:_ = + () + let murdered_by_oom_killer _ = () + let ephemeral_command_success ?json_data:_ ~client_context:_ ~profiling:_ = () + let ephemeral_command_failure ?json_data:_ ~client_context:_ = () -let persistent_command_success ~server_logging_context:_ ~request:_ ~extra_data:_ - ~client_context:_ ~persistent_context:_ ~persistent_delay:_ - ~server_profiling:_ ~client_duration:_ ~wall_start:_ ~error:_ = () -let persistent_command_failure ~server_logging_context:_ ~request:_ ~extra_data:_ - ~client_context:_ ~persistent_context:_ ~persistent_delay:_ - ~server_profiling:_ ~client_duration:_ ~wall_start:_ ~error:_ = () -let persistent_expected_error ~client_context:_ ~error:_ = () -let persistent_unexpected_error ~client_context:_ ~error:_ = () + +let persistent_command_success + ~server_logging_context:_ + ~request:_ + ~extra_data:_ + ~client_context:_ + ~persistent_context:_ + ~persistent_delay:_ + ~server_profiling:_ + ~client_duration:_ + ~wall_start:_ + ~error:_ = + () + +let persistent_command_failure + ~server_logging_context:_ + ~request:_ + ~extra_data:_ + ~client_context:_ + ~persistent_context:_ + ~persistent_delay:_ + ~server_profiling:_ + ~client_duration:_ + ~wall_start:_ + ~error:_ = + () + +let persistent_expected_error ~request:_ ~client_context:_ ~error:_ = () + +let persistent_unexpected_error ~request:_ ~client_context:_ ~error:_ = () + let saved_state_fb_fetcher_success - ~repo_root:_ ~merge_base_hash:_ ~merge_base_timestamp:_ ~saved_state_hash:_ - ~changed_files_count:_ ~saved_state_filename:_ ~profiling:_ = () + ~repo_root:_ + ~merge_base_hash:_ + ~merge_base_timestamp:_ + ~saved_state_hash:_ + ~changed_files_count:_ + ~saved_state_filename:_ + ~profiling:_ = + () let saved_state_fb_fetcher_error ~step:_ ~trace:_ ~profiling:_ = () let load_saved_state_success ~changed_files_count:_ = () + let load_saved_state_error ~saved_state_filename:_ ~changed_files_count:_ ~invalid_reason:_ = () + +let idle_heartbeat ~idle_time:_ ~profiling:_ = () + +let live_parse_errors ~request:_ ~data:_ ~wall_start:_ = () diff --git a/src/stubs/flowInteractionLogger.ml b/src/stubs/flowInteractionLogger.ml new file mode 100644 index 00000000000..9bf70ddba57 --- /dev/null +++ b/src/stubs/flowInteractionLogger.ml @@ -0,0 +1,26 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +let init () = () + +let set_server_config ~flowconfig_name:_ ~root:_ ~root_name:_ = () + +let interaction + ~source:_ + ~trigger:_ + ~ux:_ + ~start_time_ms:_ + ~end_time_ms:_ + ~start_server_status:_ + ~end_server_status:_ + ~start_buffer_status:_ + ~end_buffer_status:_ = + () + +let flush () = Lwt.return_unit + +let disable_logging () = () diff --git a/src/stubs/flow_server_profile.ml b/src/stubs/flow_server_profile.ml index b1e1183312a..bb212e4ec8c 100644 --- a/src/stubs/flow_server_profile.ml +++ b/src/stubs/flow_server_profile.ml @@ -1,14 +1,22 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) let init () = () + let init_from_id _ = () + let infer ~filename:_ ~infer_time:_ = () + let merge ~length:_ ~merge_time:_ ~leader:_ = () + let processor_sample () = () + let get_id () = "hello" + let print_url () = () + +let disable_logging () = () diff --git a/src/stubs/haste_module_preprocessor.ml b/src/stubs/haste_module_preprocessor.ml index 3d0e7bc0e1c..e9aad0dcc95 100644 --- a/src/stubs/haste_module_preprocessor.ml +++ b/src/stubs/haste_module_preprocessor.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/stubs/haste_module_preprocessor.mli b/src/stubs/haste_module_preprocessor.mli index 7dfda7f5f7a..7439c1b8216 100644 --- a/src/stubs/haste_module_preprocessor.mli +++ b/src/stubs/haste_module_preprocessor.mli @@ -1 +1,8 @@ -val preprocess_name: string -> string +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +val preprocess_name : string -> string diff --git a/src/stubs/saved_state_fb_fetcher.ml b/src/stubs/saved_state_fb_fetcher.ml index 8fd3dce5995..674af163cdd 100644 --- a/src/stubs/saved_state_fb_fetcher.ml +++ b/src/stubs/saved_state_fb_fetcher.ml @@ -1 +1,8 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + include Saved_state_dummy_fetcher diff --git a/src/third-party/lz4/Makefile b/src/third-party/lz4/Makefile deleted file mode 100644 index dd33f50351a..00000000000 --- a/src/third-party/lz4/Makefile +++ /dev/null @@ -1,182 +0,0 @@ -# ################################################################ -# LZ4 library - Makefile -# Copyright (C) Yann Collet 2011-2016 -# All rights reserved. -# -# This Makefile is validated for Linux, macOS, *BSD, Hurd, Solaris, MSYS2 targets -# -# BSD license -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, this -# list of conditions and the following disclaimer in the documentation and/or -# other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR -# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -# -# You can contact the author at : -# - LZ4 source repository : https://github.com/Cyan4973/lz4 -# - LZ4 forum froup : https://groups.google.com/forum/#!forum/lz4c -# ################################################################ - -# Version numbers -LIBVER_MAJOR_SCRIPT:=`sed -n '/define LZ4_VERSION_MAJOR/s/.*[[:blank:]]\([0-9][0-9]*\).*/\1/p' < ./lz4.h` -LIBVER_MINOR_SCRIPT:=`sed -n '/define LZ4_VERSION_MINOR/s/.*[[:blank:]]\([0-9][0-9]*\).*/\1/p' < ./lz4.h` -LIBVER_PATCH_SCRIPT:=`sed -n '/define LZ4_VERSION_RELEASE/s/.*[[:blank:]]\([0-9][0-9]*\).*/\1/p' < ./lz4.h` -LIBVER_SCRIPT:= $(LIBVER_MAJOR_SCRIPT).$(LIBVER_MINOR_SCRIPT).$(LIBVER_PATCH_SCRIPT) -LIBVER_MAJOR := $(shell echo $(LIBVER_MAJOR_SCRIPT)) -LIBVER_MINOR := $(shell echo $(LIBVER_MINOR_SCRIPT)) -LIBVER_PATCH := $(shell echo $(LIBVER_PATCH_SCRIPT)) -LIBVER := $(shell echo $(LIBVER_SCRIPT)) - -BUILD_STATIC:=yes - -CPPFLAGS+= -DXXH_NAMESPACE=LZ4_ -CFLAGS ?= -O3 -DEBUGFLAGS:= -Wall -Wextra -Wcast-qual -Wcast-align -Wshadow \ - -Wswitch-enum -Wdeclaration-after-statement -Wstrict-prototypes \ - -Wundef -Wpointer-arith -Wstrict-aliasing=1 -CFLAGS += $(DEBUGFLAGS) $(MOREFLAGS) -FLAGS = $(CPPFLAGS) $(CFLAGS) $(LDFLAGS) - -SRCFILES := $(sort $(wildcard *.c)) - - -# OS X linker doesn't support -soname, and use different extension -# see : https://developer.apple.com/library/mac/documentation/DeveloperTools/Conceptual/DynamicLibraries/100-Articles/DynamicLibraryDesignGuidelines.html -ifeq ($(shell uname), Darwin) - SHARED_EXT = dylib - SHARED_EXT_MAJOR = $(LIBVER_MAJOR).$(SHARED_EXT) - SHARED_EXT_VER = $(LIBVER).$(SHARED_EXT) - SONAME_FLAGS = -install_name $(LIBDIR)/liblz4.$(SHARED_EXT_MAJOR) -compatibility_version $(LIBVER_MAJOR) -current_version $(LIBVER) -else - SONAME_FLAGS = -Wl,-soname=liblz4.$(SHARED_EXT).$(LIBVER_MAJOR) - SHARED_EXT = so - SHARED_EXT_MAJOR = $(SHARED_EXT).$(LIBVER_MAJOR) - SHARED_EXT_VER = $(SHARED_EXT).$(LIBVER) -endif - -LIBLZ4 = liblz4.$(SHARED_EXT_VER) - -.PHONY: default -default: lib-release - -lib-release: DEBUGFLAGS := -lib-release: lib - -lib: liblz4.a liblz4 - -all: lib - -all32: CFLAGS+=-m32 -all32: all - -liblz4.a: $(SRCFILES) -ifeq ($(BUILD_STATIC),yes) # can be disabled on command line - @echo compiling static library - @$(CC) $(CPPFLAGS) $(CFLAGS) -c $^ - @$(AR) rcs $@ *.o -endif - -$(LIBLZ4): $(SRCFILES) - @echo compiling dynamic library $(LIBVER) -ifneq (,$(filter Windows%,$(OS))) - @$(CC) $(FLAGS) -DLZ4_DLL_EXPORT=1 -shared $^ -o dll\$@.dll - dlltool -D dll\liblz4.dll -d dll\liblz4.def -l dll\liblz4.lib -else - @$(CC) $(FLAGS) -shared $^ -fPIC -fvisibility=hidden $(SONAME_FLAGS) -o $@ - @echo creating versioned links - @ln -sf $@ liblz4.$(SHARED_EXT_MAJOR) - @ln -sf $@ liblz4.$(SHARED_EXT) -endif - -liblz4: $(LIBLZ4) - -clean: - @$(RM) core *.o liblz4.pc dll/liblz4.dll dll/liblz4.lib - @$(RM) *.a *.$(SHARED_EXT) *.$(SHARED_EXT_MAJOR) *.$(SHARED_EXT_VER) - @echo Cleaning library completed - - -#----------------------------------------------------------------------------- -# make install is validated only for Linux, OSX, BSD, Hurd and Solaris targets -#----------------------------------------------------------------------------- -ifneq (,$(filter $(shell uname),Linux Darwin GNU/kFreeBSD GNU OpenBSD FreeBSD NetBSD DragonFly SunOS)) - -DESTDIR ?= -# directory variables : GNU conventions prefer lowercase -# see https://www.gnu.org/prep/standards/html_node/Makefile-Conventions.html -# support both lower and uppercase (BSD), use uppercase in script -prefix ?= /usr/local -PREFIX ?= $(prefix) -exec_prefix ?= $(PREFIX) -libdir ?= $(exec_prefix)/lib -LIBDIR ?= $(libdir) -includedir ?= $(PREFIX)/include -INCLUDEDIR ?= $(includedir) - -ifneq (,$(filter $(shell uname),OpenBSD FreeBSD NetBSD DragonFly)) -PKGCONFIGDIR ?= $(PREFIX)/libdata/pkgconfig -else -PKGCONFIGDIR ?= $(LIBDIR)/pkgconfig -endif - -ifneq (,$(filter $(shell uname),SunOS)) -INSTALL ?= ginstall -else -INSTALL ?= install -endif - -INSTALL_PROGRAM ?= $(INSTALL) -INSTALL_DATA ?= $(INSTALL) -m 644 - -liblz4.pc: liblz4.pc.in Makefile - @echo creating pkgconfig - @sed -e 's|@PREFIX@|$(PREFIX)|' \ - -e 's|@LIBDIR@|$(LIBDIR)|' \ - -e 's|@INCLUDEDIR@|$(INCLUDEDIR)|' \ - -e 's|@VERSION@|$(LIBVER)|' \ - $< >$@ - -install: lib liblz4.pc - @$(INSTALL) -d -m 755 $(DESTDIR)$(PKGCONFIGDIR)/ $(DESTDIR)$(INCLUDEDIR)/ $(DESTDIR)$(LIBDIR)/ - @$(INSTALL_DATA) liblz4.pc $(DESTDIR)$(PKGCONFIGDIR)/ - @echo Installing libraries -ifeq ($(BUILD_STATIC),yes) - @$(INSTALL_DATA) liblz4.a $(DESTDIR)$(LIBDIR)/liblz4.a - @$(INSTALL_DATA) lz4frame_static.h $(DESTDIR)$(INCLUDEDIR)/lz4frame_static.h -endif - @$(INSTALL_PROGRAM) liblz4.$(SHARED_EXT_VER) $(DESTDIR)$(LIBDIR) - @ln -sf liblz4.$(SHARED_EXT_VER) $(DESTDIR)$(LIBDIR)/liblz4.$(SHARED_EXT_MAJOR) - @ln -sf liblz4.$(SHARED_EXT_VER) $(DESTDIR)$(LIBDIR)/liblz4.$(SHARED_EXT) - @echo Installing headers in $(INCLUDEDIR) - @$(INSTALL_DATA) lz4.h $(DESTDIR)$(INCLUDEDIR)/lz4.h - @$(INSTALL_DATA) lz4hc.h $(DESTDIR)$(INCLUDEDIR)/lz4hc.h - @$(INSTALL_DATA) lz4frame.h $(DESTDIR)$(INCLUDEDIR)/lz4frame.h - @echo lz4 libraries installed - -uninstall: - @$(RM) $(DESTDIR)$(LIBDIR)/pkgconfig/liblz4.pc - @$(RM) $(DESTDIR)$(LIBDIR)/liblz4.$(SHARED_EXT) - @$(RM) $(DESTDIR)$(LIBDIR)/liblz4.$(SHARED_EXT_MAJOR) - @$(RM) $(DESTDIR)$(LIBDIR)/liblz4.$(SHARED_EXT_VER) - @$(RM) $(DESTDIR)$(LIBDIR)/liblz4.a - @$(RM) $(DESTDIR)$(INCLUDEDIR)/lz4.h - @$(RM) $(DESTDIR)$(INCLUDEDIR)/lz4hc.h - @$(RM) $(DESTDIR)$(INCLUDEDIR)/lz4frame.h - @echo lz4 libraries successfully uninstalled - -endif diff --git a/src/third-party/lz4/VERSION b/src/third-party/lz4/VERSION index 8f382ecc8da..bf09dda64b0 100644 --- a/src/third-party/lz4/VERSION +++ b/src/third-party/lz4/VERSION @@ -1 +1 @@ -https://github.com/lz4/lz4 @ dccf8826f1d76efcbdc655e63cc04cdbd1123619 +https://github.com/lz4/lz4 @ 01d2a721d393646384291af2f6f2f940493cd78f diff --git a/src/third-party/lz4/dune b/src/third-party/lz4/dune new file mode 100644 index 00000000000..39a3af5397a --- /dev/null +++ b/src/third-party/lz4/dune @@ -0,0 +1,5 @@ +(library + (name lz4) + (preprocess no_preprocessing) + (c_names lz4 lz4frame lz4hc xxhash) +) diff --git a/src/third-party/lz4/liblz4.pc.in b/src/third-party/lz4/liblz4.pc.in deleted file mode 100644 index cb31cd78a03..00000000000 --- a/src/third-party/lz4/liblz4.pc.in +++ /dev/null @@ -1,14 +0,0 @@ -# LZ4 - Fast LZ compression algorithm -# Copyright (C) 2011-2014, Yann Collet. -# BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) - -prefix=@PREFIX@ -libdir=@LIBDIR@ -includedir=@INCLUDEDIR@ - -Name: lz4 -Description: extremely fast lossless compression algorithm library -URL: http://www.lz4.org/ -Version: @VERSION@ -Libs: -L@LIBDIR@ -llz4 -Cflags: -I@INCLUDEDIR@ diff --git a/src/third-party/lz4/lz4.c b/src/third-party/lz4/lz4.c index 179408da457..4046102e6de 100644 --- a/src/third-party/lz4/lz4.c +++ b/src/third-party/lz4/lz4.c @@ -1,6 +1,6 @@ /* LZ4 - Fast LZ compression algorithm - Copyright (C) 2011-2017, Yann Collet. + Copyright (C) 2011-present, Yann Collet. BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) @@ -69,9 +69,11 @@ * Prefer these methods in priority order (0 > 1 > 2) */ #ifndef LZ4_FORCE_MEMORY_ACCESS /* can be defined externally */ -# if defined(__GNUC__) && ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) ) +# if defined(__GNUC__) && \ + ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) \ + || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) ) # define LZ4_FORCE_MEMORY_ACCESS 2 -# elif defined(__INTEL_COMPILER) || defined(__GNUC__) +# elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || defined(__GNUC__) # define LZ4_FORCE_MEMORY_ACCESS 1 # endif #endif @@ -80,14 +82,17 @@ * LZ4_FORCE_SW_BITCOUNT * Define this parameter if your target system or compiler does not support hardware bit count */ -#if defined(_MSC_VER) && defined(_WIN32_WCE) /* Visual Studio for Windows CE does not support Hardware bit count */ +#if defined(_MSC_VER) && defined(_WIN32_WCE) /* Visual Studio for WinCE doesn't support Hardware bit count */ # define LZ4_FORCE_SW_BITCOUNT #endif + /*-************************************ * Dependency **************************************/ +#define LZ4_STATIC_LINKING_ONLY +#define LZ4_DISABLE_DEPRECATE_WARNINGS /* due to LZ4_decompress_safe_withPrefix64k */ #include "lz4.h" /* see also "memory routines" below */ @@ -145,18 +150,23 @@ # define expect(expr,value) (expr) #endif +#ifndef likely #define likely(expr) expect((expr) != 0, 1) +#endif +#ifndef unlikely #define unlikely(expr) expect((expr) != 0, 0) +#endif /*-************************************ * Memory routines **************************************/ #include /* malloc, calloc, free */ -#define ALLOCATOR(n,s) calloc(n,s) -#define FREEMEM free +#define ALLOC(s) malloc(s) +#define ALLOC_AND_ZERO(s) calloc(1,s) +#define FREEMEM(p) free(p) #include /* memset, memcpy */ -#define MEM_INIT memset +#define MEM_INIT(p,v,s) memset((p),(v),(s)) /*-************************************ @@ -269,11 +279,6 @@ static void LZ4_writeLE16(void* memPtr, U16 value) } } -static void LZ4_copy8(void* dst, const void* src) -{ - memcpy(dst,src,8); -} - /* customized variant of memcpy, which can overwrite up to 8 bytes beyond dstEnd */ LZ4_FORCE_O2_INLINE_GCC_PPC64LE void LZ4_wildCopy(void* dstPtr, const void* srcPtr, void* dstEnd) @@ -282,7 +287,7 @@ void LZ4_wildCopy(void* dstPtr, const void* srcPtr, void* dstEnd) const BYTE* s = (const BYTE*)srcPtr; BYTE* const e = (BYTE*)dstEnd; - do { LZ4_copy8(d,s); d+=8; s+=8; } while (d=1) +# include +#else +# ifndef assert +# define assert(condition) ((void)0) +# endif +#endif + +#define LZ4_STATIC_ASSERT(c) { enum { LZ4_static_assert = 1/(int)(!!(c)) }; } /* use after variable declarations */ #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=2) # include -# define DEBUGLOG(l, ...) { \ - if (l<=LZ4_DEBUG) { \ - fprintf(stderr, __FILE__ ": "); \ - fprintf(stderr, __VA_ARGS__); \ - fprintf(stderr, " \n"); \ +static int g_debuglog_enable = 1; +# define DEBUGLOG(l, ...) { \ + if ((g_debuglog_enable) && (l<=LZ4_DEBUG)) { \ + fprintf(stderr, __FILE__ ": "); \ + fprintf(stderr, __VA_ARGS__); \ + fprintf(stderr, " \n"); \ } } #else # define DEBUGLOG(l, ...) {} /* disabled */ @@ -330,7 +345,7 @@ static const int LZ4_minLength = (MFLIMIT+1); /*-************************************ * Common functions **************************************/ -static unsigned LZ4_NbCommonBytes (register reg_t val) +static unsigned LZ4_NbCommonBytes (reg_t val) { if (LZ4_isLittleEndian()) { if (sizeof(val)==8) { @@ -341,7 +356,14 @@ static unsigned LZ4_NbCommonBytes (register reg_t val) # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) return (__builtin_ctzll((U64)val) >> 3); # else - static const int DeBruijnBytePos[64] = { 0, 0, 0, 0, 0, 1, 1, 2, 0, 3, 1, 3, 1, 4, 2, 7, 0, 2, 3, 6, 1, 5, 3, 5, 1, 3, 4, 4, 2, 5, 6, 7, 7, 0, 1, 2, 3, 3, 4, 6, 2, 6, 5, 5, 3, 4, 5, 6, 7, 1, 2, 4, 6, 4, 4, 5, 7, 2, 6, 5, 7, 6, 7, 7 }; + static const int DeBruijnBytePos[64] = { 0, 0, 0, 0, 0, 1, 1, 2, + 0, 3, 1, 3, 1, 4, 2, 7, + 0, 2, 3, 6, 1, 5, 3, 5, + 1, 3, 4, 4, 2, 5, 6, 7, + 7, 0, 1, 2, 3, 3, 4, 6, + 2, 6, 5, 5, 3, 4, 5, 6, + 7, 1, 2, 4, 6, 4, 4, 5, + 7, 2, 6, 5, 7, 6, 7, 7 }; return DeBruijnBytePos[((U64)((val & -(long long)val) * 0x0218A392CDABBD3FULL)) >> 58]; # endif } else /* 32 bits */ { @@ -352,7 +374,10 @@ static unsigned LZ4_NbCommonBytes (register reg_t val) # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT) return (__builtin_ctz((U32)val) >> 3); # else - static const int DeBruijnBytePos[32] = { 0, 0, 3, 0, 3, 1, 3, 0, 3, 2, 2, 1, 3, 2, 0, 1, 3, 3, 1, 2, 2, 2, 2, 0, 3, 1, 2, 0, 1, 0, 1, 1 }; + static const int DeBruijnBytePos[32] = { 0, 0, 3, 0, 3, 1, 3, 0, + 3, 2, 2, 1, 3, 2, 0, 1, + 3, 3, 1, 2, 2, 2, 2, 0, + 3, 1, 2, 0, 1, 0, 1, 1 }; return DeBruijnBytePos[((U32)((val & -(S32)val) * 0x077CB531U)) >> 27]; # endif } @@ -392,11 +417,20 @@ static unsigned LZ4_NbCommonBytes (register reg_t val) } #define STEPSIZE sizeof(reg_t) -static unsigned LZ4_count(const BYTE* pIn, const BYTE* pMatch, const BYTE* pInLimit) +LZ4_FORCE_INLINE +unsigned LZ4_count(const BYTE* pIn, const BYTE* pMatch, const BYTE* pInLimit) { const BYTE* const pStart = pIn; - while (likely(pIn compression ru /*-************************************ * Local Structures and types **************************************/ -typedef enum { notLimited = 0, limitedOutput = 1 } limitedOutput_directive; -typedef enum { byPtr, byU32, byU16 } tableType_t; - -typedef enum { noDict = 0, withPrefix64k, usingExtDict } dict_directive; +typedef enum { notLimited = 0, limitedOutput = 1, fillOutput = 2 } limitedOutput_directive; +typedef enum { clearedTable = 0, byPtr, byU32, byU16 } tableType_t; + +/** + * This enum distinguishes several different modes of accessing previous + * content in the stream. + * + * - noDict : There is no preceding content. + * - withPrefix64k : Table entries up to ctx->dictSize before the current blob + * blob being compressed are valid and refer to the preceding + * content (of length ctx->dictSize), which is available + * contiguously preceding in memory the content currently + * being compressed. + * - usingExtDict : Like withPrefix64k, but the preceding content is somewhere + * else in memory, starting at ctx->dictionary with length + * ctx->dictSize. + * - usingDictCtx : Like usingExtDict, but everything concerning the preceding + * content is in a separate context, pointed to by + * ctx->dictCtx. ctx->dictionary, ctx->dictSize, and table + * entries in the current context that refer to positions + * preceding the beginning of the current compression are + * ignored. Instead, ctx->dictCtx->dictionary and ctx->dictCtx + * ->dictSize describe the location and size of the preceding + * content, and matches are found by looking in the ctx + * ->dictCtx->hashTable. + */ +typedef enum { noDict = 0, withPrefix64k, usingExtDict, usingDictCtx } dict_directive; typedef enum { noDictIssue = 0, dictSmall } dictIssue_directive; -typedef enum { endOnOutputSize = 0, endOnInputSize = 1 } endCondition_directive; -typedef enum { full = 0, partial = 1 } earlyEnd_directive; - /*-************************************ * Local Utils @@ -440,6 +494,21 @@ int LZ4_compressBound(int isize) { return LZ4_COMPRESSBOUND(isize); } int LZ4_sizeofState() { return LZ4_STREAMSIZE; } +/*-************************************ +* Internal Definitions used in Tests +**************************************/ +#if defined (__cplusplus) +extern "C" { +#endif + +int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize); + +int LZ4_decompress_safe_forceExtDict(const char* in, char* out, int inSize, int outSize, const void* dict, size_t dictSize); + +#if defined (__cplusplus) +} +#endif + /*-****************************** * Compression functions ********************************/ @@ -468,10 +537,25 @@ LZ4_FORCE_INLINE U32 LZ4_hashPosition(const void* const p, tableType_t const tab return LZ4_hash4(LZ4_read32(p), tableType); } -static void LZ4_putPositionOnHash(const BYTE* p, U32 h, void* tableBase, tableType_t const tableType, const BYTE* srcBase) +static void LZ4_putIndexOnHash(U32 idx, U32 h, void* tableBase, tableType_t const tableType) { switch (tableType) { + default: /* fallthrough */ + case clearedTable: /* fallthrough */ + case byPtr: { /* illegal! */ assert(0); return; } + case byU32: { U32* hashTable = (U32*) tableBase; hashTable[h] = idx; return; } + case byU16: { U16* hashTable = (U16*) tableBase; assert(idx < 65536); hashTable[h] = (U16)idx; return; } + } +} + +static void LZ4_putPositionOnHash(const BYTE* p, U32 h, + void* tableBase, tableType_t const tableType, + const BYTE* srcBase) +{ + switch (tableType) + { + case clearedTable: { /* illegal! */ assert(0); return; } case byPtr: { const BYTE** hashTable = (const BYTE**)tableBase; hashTable[h] = p; return; } case byU32: { U32* hashTable = (U32*) tableBase; hashTable[h] = (U32)(p-srcBase); return; } case byU16: { U16* hashTable = (U16*) tableBase; hashTable[h] = (U16)(p-srcBase); return; } @@ -484,19 +568,81 @@ LZ4_FORCE_INLINE void LZ4_putPosition(const BYTE* p, void* tableBase, tableType_ LZ4_putPositionOnHash(p, h, tableBase, tableType, srcBase); } -static const BYTE* LZ4_getPositionOnHash(U32 h, void* tableBase, tableType_t tableType, const BYTE* srcBase) +/* LZ4_getIndexOnHash() : + * Index of match position registered in hash table. + * hash position must be calculated by using base+index, or dictBase+index. + * Assumption 1 : only valid if tableType == byU32 or byU16. + * Assumption 2 : h is presumed valid (within limits of hash table) + */ +static U32 LZ4_getIndexOnHash(U32 h, const void* tableBase, tableType_t tableType) +{ + LZ4_STATIC_ASSERT(LZ4_MEMORY_USAGE > 2); + if (tableType == byU32) { + const U32* const hashTable = (const U32*) tableBase; + assert(h < (1U << (LZ4_MEMORY_USAGE-2))); + return hashTable[h]; + } + if (tableType == byU16) { + const U16* const hashTable = (const U16*) tableBase; + assert(h < (1U << (LZ4_MEMORY_USAGE-1))); + return hashTable[h]; + } + assert(0); return 0; /* forbidden case */ +} + +static const BYTE* LZ4_getPositionOnHash(U32 h, const void* tableBase, tableType_t tableType, const BYTE* srcBase) { - if (tableType == byPtr) { const BYTE** hashTable = (const BYTE**) tableBase; return hashTable[h]; } - if (tableType == byU32) { const U32* const hashTable = (U32*) tableBase; return hashTable[h] + srcBase; } - { const U16* const hashTable = (U16*) tableBase; return hashTable[h] + srcBase; } /* default, to ensure a return */ + if (tableType == byPtr) { const BYTE* const* hashTable = (const BYTE* const*) tableBase; return hashTable[h]; } + if (tableType == byU32) { const U32* const hashTable = (const U32*) tableBase; return hashTable[h] + srcBase; } + { const U16* const hashTable = (const U16*) tableBase; return hashTable[h] + srcBase; } /* default, to ensure a return */ } -LZ4_FORCE_INLINE const BYTE* LZ4_getPosition(const BYTE* p, void* tableBase, tableType_t tableType, const BYTE* srcBase) +LZ4_FORCE_INLINE const BYTE* LZ4_getPosition(const BYTE* p, + const void* tableBase, tableType_t tableType, + const BYTE* srcBase) { U32 const h = LZ4_hashPosition(p, tableType); return LZ4_getPositionOnHash(h, tableBase, tableType, srcBase); } +LZ4_FORCE_INLINE void LZ4_prepareTable( + LZ4_stream_t_internal* const cctx, + const int inputSize, + const tableType_t tableType) { + /* If the table hasn't been used, it's guaranteed to be zeroed out, and is + * therefore safe to use no matter what mode we're in. Otherwise, we figure + * out if it's safe to leave as is or whether it needs to be reset. + */ + if (cctx->tableType != clearedTable) { + if (cctx->tableType != tableType + || (tableType == byU16 && cctx->currentOffset + inputSize >= 0xFFFFU) + || (tableType == byU32 && cctx->currentOffset > 1 GB) + || tableType == byPtr + || inputSize >= 4 KB) + { + DEBUGLOG(4, "LZ4_prepareTable: Resetting table in %p", cctx); + MEM_INIT(cctx->hashTable, 0, LZ4_HASHTABLESIZE); + cctx->currentOffset = 0; + cctx->tableType = clearedTable; + } else { + DEBUGLOG(4, "LZ4_prepareTable: Re-use hash table (no reset)"); + } + } + + /* Adding a gap, so all previous entries are > MAX_DISTANCE back, is faster + * than compressing without a gap. However, compressing with + * currentOffset == 0 is faster still, so we preserve that case. + */ + if (cctx->currentOffset != 0 && tableType == byU32) { + DEBUGLOG(5, "LZ4_prepareTable: adding 64KB to currentOffset"); + cctx->currentOffset += 64 KB; + } + + /* Finally, clear history */ + cctx->dictCtx = NULL; + cctx->dictionary = NULL; + cctx->dictSize = 0; +} /** LZ4_compress_generic() : inlined, to ensure branches are decided at compilation time */ @@ -505,50 +651,70 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( const char* const source, char* const dest, const int inputSize, + int *inputConsumed, /* only written when outputLimited == fillOutput */ const int maxOutputSize, const limitedOutput_directive outputLimited, const tableType_t tableType, - const dict_directive dict, + const dict_directive dictDirective, const dictIssue_directive dictIssue, const U32 acceleration) { const BYTE* ip = (const BYTE*) source; - const BYTE* base; + + U32 const startIndex = cctx->currentOffset; + const BYTE* base = (const BYTE*) source - startIndex; const BYTE* lowLimit; - const BYTE* const lowRefLimit = ip - cctx->dictSize; - const BYTE* const dictionary = cctx->dictionary; - const BYTE* const dictEnd = dictionary + cctx->dictSize; - const ptrdiff_t dictDelta = dictEnd - (const BYTE*)source; + + const LZ4_stream_t_internal* dictCtx = (const LZ4_stream_t_internal*) cctx->dictCtx; + const BYTE* const dictionary = + dictDirective == usingDictCtx ? dictCtx->dictionary : cctx->dictionary; + const U32 dictSize = + dictDirective == usingDictCtx ? dictCtx->dictSize : cctx->dictSize; + const U32 dictDelta = (dictDirective == usingDictCtx) ? startIndex - dictCtx->currentOffset : 0; /* make indexes in dictCtx comparable with index in current context */ + + int const maybe_extMem = (dictDirective == usingExtDict) || (dictDirective == usingDictCtx); + U32 const prefixIdxLimit = startIndex - dictSize; /* used when dictDirective == dictSmall */ + const BYTE* const dictEnd = dictionary + dictSize; const BYTE* anchor = (const BYTE*) source; const BYTE* const iend = ip + inputSize; - const BYTE* const mflimit = iend - MFLIMIT; + const BYTE* const mflimitPlusOne = iend - MFLIMIT + 1; const BYTE* const matchlimit = iend - LASTLITERALS; + /* the dictCtx currentOffset is indexed on the start of the dictionary, + * while a dictionary in the current context precedes the currentOffset */ + const BYTE* dictBase = (dictDirective == usingDictCtx) ? + dictionary + dictSize - dictCtx->currentOffset : + dictionary + dictSize - startIndex; + BYTE* op = (BYTE*) dest; BYTE* const olimit = op + maxOutputSize; + U32 offset = 0; U32 forwardH; + DEBUGLOG(5, "LZ4_compress_generic: srcSize=%i, tableType=%u", inputSize, tableType); /* Init conditions */ + if (outputLimited == fillOutput && maxOutputSize < 1) return 0; /* Impossible to store anything */ if ((U32)inputSize > (U32)LZ4_MAX_INPUT_SIZE) return 0; /* Unsupported inputSize, too large (or negative) */ - switch(dict) - { - case noDict: - default: - base = (const BYTE*)source; - lowLimit = (const BYTE*)source; - break; - case withPrefix64k: - base = (const BYTE*)source - cctx->currentOffset; - lowLimit = (const BYTE*)source - cctx->dictSize; - break; - case usingExtDict: - base = (const BYTE*)source - cctx->currentOffset; - lowLimit = (const BYTE*)source; - break; + if ((tableType == byU16) && (inputSize>=LZ4_64Klimit)) return 0; /* Size too large (not within 64K limit) */ + if (tableType==byPtr) assert(dictDirective==noDict); /* only supported use case with byPtr */ + assert(acceleration >= 1); + + lowLimit = (const BYTE*)source - (dictDirective == withPrefix64k ? dictSize : 0); + + /* Update context state */ + if (dictDirective == usingDictCtx) { + /* Subsequent linked blocks can't use the dictionary. */ + /* Instead, they use the block we just compressed. */ + cctx->dictCtx = NULL; + cctx->dictSize = (U32)inputSize; + } else { + cctx->dictSize += (U32)inputSize; } - if ((tableType == byU16) && (inputSize>=LZ4_64Klimit)) return 0; /* Size too large (not within 64K limit) */ - if (inputSizecurrentOffset += (U32)inputSize; + cctx->tableType = (U16)tableType; + + if (inputSizehashTable, tableType, base); @@ -556,12 +722,12 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( /* Main Loop */ for ( ; ; ) { - ptrdiff_t refDelta = 0; const BYTE* match; BYTE* token; /* Find a match */ - { const BYTE* forwardIp = ip; + if (tableType == byPtr) { + const BYTE* forwardIp = ip; unsigned step = 1; unsigned searchMatchNb = acceleration << LZ4_skipTrigger; do { @@ -570,34 +736,89 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( forwardIp += step; step = (searchMatchNb++ >> LZ4_skipTrigger); - if (unlikely(forwardIp > mflimit)) goto _last_literals; + if (unlikely(forwardIp > mflimitPlusOne)) goto _last_literals; + assert(ip < mflimitPlusOne); match = LZ4_getPositionOnHash(h, cctx->hashTable, tableType, base); - if (dict==usingExtDict) { - if (match < (const BYTE*)source) { - refDelta = dictDelta; + forwardH = LZ4_hashPosition(forwardIp, tableType); + LZ4_putPositionOnHash(ip, h, cctx->hashTable, tableType, base); + + } while ( (match+MAX_DISTANCE < ip) + || (LZ4_read32(match) != LZ4_read32(ip)) ); + + } else { /* byU32, byU16 */ + + const BYTE* forwardIp = ip; + unsigned step = 1; + unsigned searchMatchNb = acceleration << LZ4_skipTrigger; + do { + U32 const h = forwardH; + U32 const current = (U32)(forwardIp - base); + U32 matchIndex = LZ4_getIndexOnHash(h, cctx->hashTable, tableType); + assert(matchIndex <= current); + assert(forwardIp - base < (ptrdiff_t)(2 GB - 1)); + ip = forwardIp; + forwardIp += step; + step = (searchMatchNb++ >> LZ4_skipTrigger); + + if (unlikely(forwardIp > mflimitPlusOne)) goto _last_literals; + assert(ip < mflimitPlusOne); + + if (dictDirective == usingDictCtx) { + if (matchIndex < startIndex) { + /* there was no match, try the dictionary */ + assert(tableType == byU32); + matchIndex = LZ4_getIndexOnHash(h, dictCtx->hashTable, byU32); + match = dictBase + matchIndex; + matchIndex += dictDelta; /* make dictCtx index comparable with current context */ + lowLimit = dictionary; + } else { + match = base + matchIndex; + lowLimit = (const BYTE*)source; + } + } else if (dictDirective==usingExtDict) { + if (matchIndex < startIndex) { + DEBUGLOG(7, "extDict candidate: matchIndex=%5u < startIndex=%5u", matchIndex, startIndex); + assert(startIndex - matchIndex >= MINMATCH); + match = dictBase + matchIndex; lowLimit = dictionary; } else { - refDelta = 0; + match = base + matchIndex; lowLimit = (const BYTE*)source; - } } + } + } else { /* single continuous memory segment */ + match = base + matchIndex; + } forwardH = LZ4_hashPosition(forwardIp, tableType); - LZ4_putPositionOnHash(ip, h, cctx->hashTable, tableType, base); + LZ4_putIndexOnHash(current, h, cctx->hashTable, tableType); + + if ((dictIssue == dictSmall) && (matchIndex < prefixIdxLimit)) continue; /* match outside of valid area */ + assert(matchIndex < current); + if ((tableType != byU16) && (matchIndex+MAX_DISTANCE < current)) continue; /* too far */ + if (tableType == byU16) assert((current - matchIndex) <= MAX_DISTANCE); /* too_far presumed impossible with byU16 */ - } while ( ((dictIssue==dictSmall) ? (match < lowRefLimit) : 0) - || ((tableType==byU16) ? 0 : (match + MAX_DISTANCE < ip)) - || (LZ4_read32(match+refDelta) != LZ4_read32(ip)) ); + if (LZ4_read32(match) == LZ4_read32(ip)) { + if (maybe_extMem) offset = current - matchIndex; + break; /* match found */ + } + + } while(1); } /* Catch up */ - while (((ip>anchor) & (match+refDelta > lowLimit)) && (unlikely(ip[-1]==match[refDelta-1]))) { ip--; match--; } + while (((ip>anchor) & (match > lowLimit)) && (unlikely(ip[-1]==match[-1]))) { ip--; match--; } /* Encode Literals */ { unsigned const litLength = (unsigned)(ip - anchor); token = op++; - if ((outputLimited) && /* Check output buffer overflow */ + if ((outputLimited == limitedOutput) && /* Check output buffer overflow */ (unlikely(op + litLength + (2 + 1 + LASTLITERALS) + (litLength/255) > olimit))) return 0; + if ((outputLimited == fillOutput) && + (unlikely(op + (litLength+240)/255 /* litlen */ + litLength /* literals */ + 2 /* offset */ + 1 /* token */ + MFLIMIT - MINMATCH /* min last literals so last match is <= end - MFLIMIT */ > olimit))) { + op--; + goto _last_literals; + } if (litLength >= RUN_MASK) { int len = (int)litLength-RUN_MASK; *token = (RUN_MASK< olimit)) { + /* the match was too close to the end, rewind and go to last literals */ + op = token; + goto _last_literals; + } + /* Encode Offset */ - LZ4_writeLE16(op, (U16)(ip-match)); op+=2; + if (maybe_extMem) { /* static test */ + DEBUGLOG(6, " with offset=%u (ext if > %i)", offset, (int)(ip - (const BYTE*)source)); + assert(offset <= MAX_DISTANCE && offset > 0); + LZ4_writeLE16(op, (U16)offset); op+=2; + } else { + DEBUGLOG(6, " with offset=%u (same segment)", (U32)(ip - match)); + assert(ip-match <= MAX_DISTANCE); + LZ4_writeLE16(op, (U16)(ip - match)); op+=2; + } /* Encode MatchLength */ { unsigned matchCode; - if ((dict==usingExtDict) && (lowLimit==dictionary)) { - const BYTE* limit; - match += refDelta; - limit = ip + (dictEnd-match); + if ( (dictDirective==usingExtDict || dictDirective==usingDictCtx) + && (lowLimit==dictionary) /* match within extDict */ ) { + const BYTE* limit = ip + (dictEnd-match); + assert(dictEnd > match); if (limit > matchlimit) limit = matchlimit; matchCode = LZ4_count(ip+MINMATCH, match+MINMATCH, limit); ip += MINMATCH + matchCode; if (ip==limit) { - unsigned const more = LZ4_count(ip, (const BYTE*)source, matchlimit); + unsigned const more = LZ4_count(limit, (const BYTE*)source, matchlimit); matchCode += more; ip += more; } + DEBUGLOG(6, " with matchLength=%u starting in extDict", matchCode+MINMATCH); } else { matchCode = LZ4_count(ip+MINMATCH, match+MINMATCH, matchlimit); ip += MINMATCH + matchCode; + DEBUGLOG(6, " with matchLength=%u", matchCode+MINMATCH); } - if ( outputLimited && /* Check output buffer overflow */ - (unlikely(op + (1 + LASTLITERALS) + (matchCode>>8) > olimit)) ) - return 0; + if ((outputLimited) && /* Check output buffer overflow */ + (unlikely(op + (1 + LASTLITERALS) + (matchCode>>8) > olimit)) ) { + if (outputLimited == limitedOutput) + return 0; + if (outputLimited == fillOutput) { + /* Match description too long : reduce it */ + U32 newMatchCode = 15 /* in token */ - 1 /* to avoid needing a zero byte */ + ((U32)(olimit - op) - 2 - 1 - LASTLITERALS) * 255; + ip -= matchCode - newMatchCode; + matchCode = newMatchCode; + } + } if (matchCode >= ML_MASK) { *token += ML_MASK; matchCode -= ML_MASK; @@ -656,37 +912,80 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( anchor = ip; /* Test end of chunk */ - if (ip > mflimit) break; + if (ip >= mflimitPlusOne) break; /* Fill table */ LZ4_putPosition(ip-2, cctx->hashTable, tableType, base); /* Test next position */ - match = LZ4_getPosition(ip, cctx->hashTable, tableType, base); - if (dict==usingExtDict) { - if (match < (const BYTE*)source) { - refDelta = dictDelta; - lowLimit = dictionary; - } else { - refDelta = 0; - lowLimit = (const BYTE*)source; - } } - LZ4_putPosition(ip, cctx->hashTable, tableType, base); - if ( ((dictIssue==dictSmall) ? (match>=lowRefLimit) : 1) - && (match+MAX_DISTANCE>=ip) - && (LZ4_read32(match+refDelta)==LZ4_read32(ip)) ) - { token=op++; *token=0; goto _next_match; } + if (tableType == byPtr) { + + match = LZ4_getPosition(ip, cctx->hashTable, tableType, base); + LZ4_putPosition(ip, cctx->hashTable, tableType, base); + if ( (match+MAX_DISTANCE >= ip) + && (LZ4_read32(match) == LZ4_read32(ip)) ) + { token=op++; *token=0; goto _next_match; } + + } else { /* byU32, byU16 */ + + U32 const h = LZ4_hashPosition(ip, tableType); + U32 const current = (U32)(ip-base); + U32 matchIndex = LZ4_getIndexOnHash(h, cctx->hashTable, tableType); + assert(matchIndex < current); + if (dictDirective == usingDictCtx) { + if (matchIndex < startIndex) { + /* there was no match, try the dictionary */ + matchIndex = LZ4_getIndexOnHash(h, dictCtx->hashTable, byU32); + match = dictBase + matchIndex; + lowLimit = dictionary; /* required for match length counter */ + matchIndex += dictDelta; + } else { + match = base + matchIndex; + lowLimit = (const BYTE*)source; /* required for match length counter */ + } + } else if (dictDirective==usingExtDict) { + if (matchIndex < startIndex) { + match = dictBase + matchIndex; + lowLimit = dictionary; /* required for match length counter */ + } else { + match = base + matchIndex; + lowLimit = (const BYTE*)source; /* required for match length counter */ + } + } else { /* single memory segment */ + match = base + matchIndex; + } + LZ4_putIndexOnHash(current, h, cctx->hashTable, tableType); + assert(matchIndex < current); + if ( ((dictIssue==dictSmall) ? (matchIndex >= prefixIdxLimit) : 1) + && ((tableType==byU16) ? 1 : (matchIndex+MAX_DISTANCE >= current)) + && (LZ4_read32(match) == LZ4_read32(ip)) ) { + token=op++; + *token=0; + if (maybe_extMem) offset = current - matchIndex; + DEBUGLOG(6, "seq.start:%i, literals=%u, match.start:%i", + (int)(anchor-(const BYTE*)source), 0, (int)(ip-(const BYTE*)source)); + goto _next_match; + } + } /* Prepare next loop */ forwardH = LZ4_hashPosition(++ip, tableType); + } _last_literals: /* Encode Last Literals */ - { size_t const lastRun = (size_t)(iend - anchor); + { size_t lastRun = (size_t)(iend - anchor); if ( (outputLimited) && /* Check output buffer overflow */ - ((op - (BYTE*)dest) + lastRun + 1 + ((lastRun+255-RUN_MASK)/255) > (U32)maxOutputSize) ) - return 0; + (op + lastRun + 1 + ((lastRun+255-RUN_MASK)/255) > olimit)) { + if (outputLimited == fillOutput) { + /* adapt lastRun to fill 'dst' */ + lastRun = (olimit-op) - 1; + lastRun -= (lastRun+240)/255; + } + if (outputLimited == limitedOutput) + return 0; + } if (lastRun >= RUN_MASK) { size_t accumulator = lastRun - RUN_MASK; *op++ = RUN_MASK << ML_BITS; @@ -696,44 +995,97 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( *op++ = (BYTE)(lastRun<internal_donotuse; + if (acceleration < 1) acceleration = ACCELERATION_DEFAULT; LZ4_resetStream((LZ4_stream_t*)state); + if (maxOutputSize >= LZ4_compressBound(inputSize)) { + if (inputSize < LZ4_64Klimit) { + return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0, notLimited, byU16, noDict, noDictIssue, acceleration); + } else { + const tableType_t tableType = ((sizeof(void*)==4) && ((uptrval)source > MAX_DISTANCE)) ? byPtr : byU32; + return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0, notLimited, tableType, noDict, noDictIssue, acceleration); + } + } else { + if (inputSize < LZ4_64Klimit) {; + return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration); + } else { + const tableType_t tableType = ((sizeof(void*)==4) && ((uptrval)source > MAX_DISTANCE)) ? byPtr : byU32; + return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, noDict, noDictIssue, acceleration); + } + } +} + +/** + * LZ4_compress_fast_extState_fastReset() : + * A variant of LZ4_compress_fast_extState(). + * + * Using this variant avoids an expensive initialization step. It is only safe + * to call if the state buffer is known to be correctly initialized already + * (see comment in lz4.h on LZ4_resetStream_fast() for a definition of + * "correctly initialized"). + */ +int LZ4_compress_fast_extState_fastReset(void* state, const char* src, char* dst, int srcSize, int dstCapacity, int acceleration) +{ + LZ4_stream_t_internal* ctx = &((LZ4_stream_t*)state)->internal_donotuse; if (acceleration < 1) acceleration = ACCELERATION_DEFAULT; - if (maxOutputSize >= LZ4_compressBound(inputSize)) { - if (inputSize < LZ4_64Klimit) - return LZ4_compress_generic(ctx, source, dest, inputSize, 0, notLimited, byU16, noDict, noDictIssue, acceleration); - else - return LZ4_compress_generic(ctx, source, dest, inputSize, 0, notLimited, (sizeof(void*)==8) ? byU32 : byPtr, noDict, noDictIssue, acceleration); + if (dstCapacity >= LZ4_compressBound(srcSize)) { + if (srcSize < LZ4_64Klimit) { + const tableType_t tableType = byU16; + LZ4_prepareTable(ctx, srcSize, tableType); + if (ctx->currentOffset) { + return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, notLimited, tableType, noDict, dictSmall, acceleration); + } else { + return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, notLimited, tableType, noDict, noDictIssue, acceleration); + } + } else { + const tableType_t tableType = ((sizeof(void*)==4) && ((uptrval)src > MAX_DISTANCE)) ? byPtr : byU32; + LZ4_prepareTable(ctx, srcSize, tableType); + return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, notLimited, tableType, noDict, noDictIssue, acceleration); + } } else { - if (inputSize < LZ4_64Klimit) - return LZ4_compress_generic(ctx, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration); - else - return LZ4_compress_generic(ctx, source, dest, inputSize, maxOutputSize, limitedOutput, (sizeof(void*)==8) ? byU32 : byPtr, noDict, noDictIssue, acceleration); + if (srcSize < LZ4_64Klimit) { + const tableType_t tableType = byU16; + LZ4_prepareTable(ctx, srcSize, tableType); + if (ctx->currentOffset) { + return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity, limitedOutput, tableType, noDict, dictSmall, acceleration); + } else { + return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity, limitedOutput, tableType, noDict, noDictIssue, acceleration); + } + } else { + const tableType_t tableType = ((sizeof(void*)==4) && ((uptrval)src > MAX_DISTANCE)) ? byPtr : byU32; + LZ4_prepareTable(ctx, srcSize, tableType); + return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity, limitedOutput, tableType, noDict, noDictIssue, acceleration); + } } } int LZ4_compress_fast(const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration) { + int result; #if (LZ4_HEAPMODE) - void* ctxPtr = ALLOCATOR(1, sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */ + LZ4_stream_t* ctxPtr = ALLOC(sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */ + if (ctxPtr == NULL) return 0; #else LZ4_stream_t ctx; - void* const ctxPtr = &ctx; + LZ4_stream_t* const ctxPtr = &ctx; #endif - - int const result = LZ4_compress_fast_extState(ctxPtr, source, dest, inputSize, maxOutputSize, acceleration); + result = LZ4_compress_fast_extState(ctxPtr, source, dest, inputSize, maxOutputSize, acceleration); #if (LZ4_HEAPMODE) FREEMEM(ctxPtr); @@ -756,172 +1108,15 @@ int LZ4_compress_fast_force(const char* source, char* dest, int inputSize, int m LZ4_resetStream(&ctx); if (inputSize < LZ4_64Klimit) - return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration); + return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration); else - return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, inputSize, maxOutputSize, limitedOutput, sizeof(void*)==8 ? byU32 : byPtr, noDict, noDictIssue, acceleration); -} - - -/*-****************************** -* *_destSize() variant -********************************/ - -static int LZ4_compress_destSize_generic( - LZ4_stream_t_internal* const ctx, - const char* const src, - char* const dst, - int* const srcSizePtr, - const int targetDstSize, - const tableType_t tableType) -{ - const BYTE* ip = (const BYTE*) src; - const BYTE* base = (const BYTE*) src; - const BYTE* lowLimit = (const BYTE*) src; - const BYTE* anchor = ip; - const BYTE* const iend = ip + *srcSizePtr; - const BYTE* const mflimit = iend - MFLIMIT; - const BYTE* const matchlimit = iend - LASTLITERALS; - - BYTE* op = (BYTE*) dst; - BYTE* const oend = op + targetDstSize; - BYTE* const oMaxLit = op + targetDstSize - 2 /* offset */ - 8 /* because 8+MINMATCH==MFLIMIT */ - 1 /* token */; - BYTE* const oMaxMatch = op + targetDstSize - (LASTLITERALS + 1 /* token */); - BYTE* const oMaxSeq = oMaxLit - 1 /* token */; - - U32 forwardH; - - - /* Init conditions */ - if (targetDstSize < 1) return 0; /* Impossible to store anything */ - if ((U32)*srcSizePtr > (U32)LZ4_MAX_INPUT_SIZE) return 0; /* Unsupported input size, too large (or negative) */ - if ((tableType == byU16) && (*srcSizePtr>=LZ4_64Klimit)) return 0; /* Size too large (not within 64K limit) */ - if (*srcSizePtrhashTable, tableType, base); - ip++; forwardH = LZ4_hashPosition(ip, tableType); - - /* Main Loop */ - for ( ; ; ) { - const BYTE* match; - BYTE* token; - - /* Find a match */ - { const BYTE* forwardIp = ip; - unsigned step = 1; - unsigned searchMatchNb = 1 << LZ4_skipTrigger; - - do { - U32 h = forwardH; - ip = forwardIp; - forwardIp += step; - step = (searchMatchNb++ >> LZ4_skipTrigger); - - if (unlikely(forwardIp > mflimit)) goto _last_literals; - - match = LZ4_getPositionOnHash(h, ctx->hashTable, tableType, base); - forwardH = LZ4_hashPosition(forwardIp, tableType); - LZ4_putPositionOnHash(ip, h, ctx->hashTable, tableType, base); - - } while ( ((tableType==byU16) ? 0 : (match + MAX_DISTANCE < ip)) - || (LZ4_read32(match) != LZ4_read32(ip)) ); - } - - /* Catch up */ - while ((ip>anchor) && (match > lowLimit) && (unlikely(ip[-1]==match[-1]))) { ip--; match--; } - - /* Encode Literal length */ - { unsigned litLength = (unsigned)(ip - anchor); - token = op++; - if (op + ((litLength+240)/255) + litLength > oMaxLit) { - /* Not enough space for a last match */ - op--; - goto _last_literals; - } - if (litLength>=RUN_MASK) { - unsigned len = litLength - RUN_MASK; - *token=(RUN_MASK<= 255 ; len-=255) *op++ = 255; - *op++ = (BYTE)len; - } - else *token = (BYTE)(litLength< oMaxMatch) { - /* Match description too long : reduce it */ - matchLength = (15-1) + (oMaxMatch-op) * 255; - } - ip += MINMATCH + matchLength; - - if (matchLength>=ML_MASK) { - *token += ML_MASK; - matchLength -= ML_MASK; - while (matchLength >= 255) { matchLength-=255; *op++ = 255; } - *op++ = (BYTE)matchLength; - } - else *token += (BYTE)(matchLength); - } - - anchor = ip; - - /* Test end of block */ - if (ip > mflimit) break; - if (op > oMaxSeq) break; - - /* Fill table */ - LZ4_putPosition(ip-2, ctx->hashTable, tableType, base); - - /* Test next position */ - match = LZ4_getPosition(ip, ctx->hashTable, tableType, base); - LZ4_putPosition(ip, ctx->hashTable, tableType, base); - if ( (match+MAX_DISTANCE>=ip) - && (LZ4_read32(match)==LZ4_read32(ip)) ) - { token=op++; *token=0; goto _next_match; } - - /* Prepare next loop */ - forwardH = LZ4_hashPosition(++ip, tableType); - } - -_last_literals: - /* Encode Last Literals */ - { size_t lastRunSize = (size_t)(iend - anchor); - if (op + 1 /* token */ + ((lastRunSize+240)/255) /* litLength */ + lastRunSize /* literals */ > oend) { - /* adapt lastRunSize to fill 'dst' */ - lastRunSize = (oend-op) - 1; - lastRunSize -= (lastRunSize+240)/255; - } - ip = anchor + lastRunSize; - - if (lastRunSize >= RUN_MASK) { - size_t accumulator = lastRunSize - RUN_MASK; - *op++ = RUN_MASK << ML_BITS; - for(; accumulator >= 255 ; accumulator-=255) *op++ = 255; - *op++ = (BYTE) accumulator; - } else { - *op++ = (BYTE)(lastRunSize<= LZ4_compressBound(*srcSizePtr)) { /* compression success is guaranteed */ return LZ4_compress_fast_extState(state, src, dst, *srcSizePtr, targetDstSize, 1); } else { - if (*srcSizePtr < LZ4_64Klimit) - return LZ4_compress_destSize_generic(&state->internal_donotuse, src, dst, srcSizePtr, targetDstSize, byU16); - else - return LZ4_compress_destSize_generic(&state->internal_donotuse, src, dst, srcSizePtr, targetDstSize, sizeof(void*)==8 ? byU32 : byPtr); - } + if (*srcSizePtr < LZ4_64Klimit) { + return LZ4_compress_generic(&state->internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, targetDstSize, fillOutput, byU16, noDict, noDictIssue, 1); + } else { + tableType_t const tableType = ((sizeof(void*)==4) && ((uptrval)src > MAX_DISTANCE)) ? byPtr : byU32; + return LZ4_compress_generic(&state->internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, targetDstSize, fillOutput, tableType, noDict, noDictIssue, 1); + } } } int LZ4_compress_destSize(const char* src, char* dst, int* srcSizePtr, int targetDstSize) { #if (LZ4_HEAPMODE) - LZ4_stream_t* ctx = (LZ4_stream_t*)ALLOCATOR(1, sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */ + LZ4_stream_t* ctx = (LZ4_stream_t*)ALLOC(sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */ + if (ctx == NULL) return 0; #else LZ4_stream_t ctxBody; LZ4_stream_t* ctx = &ctxBody; @@ -962,20 +1159,28 @@ int LZ4_compress_destSize(const char* src, char* dst, int* srcSizePtr, int targe LZ4_stream_t* LZ4_createStream(void) { - LZ4_stream_t* lz4s = (LZ4_stream_t*)ALLOCATOR(8, LZ4_STREAMSIZE_U64); + LZ4_stream_t* lz4s = (LZ4_stream_t*)ALLOC(sizeof(LZ4_stream_t)); LZ4_STATIC_ASSERT(LZ4_STREAMSIZE >= sizeof(LZ4_stream_t_internal)); /* A compilation error here means LZ4_STREAMSIZE is not large enough */ + DEBUGLOG(4, "LZ4_createStream %p", lz4s); + if (lz4s == NULL) return NULL; LZ4_resetStream(lz4s); return lz4s; } void LZ4_resetStream (LZ4_stream_t* LZ4_stream) { + DEBUGLOG(5, "LZ4_resetStream (ctx:%p)", LZ4_stream); MEM_INIT(LZ4_stream, 0, sizeof(LZ4_stream_t)); } +void LZ4_resetStream_fast(LZ4_stream_t* ctx) { + LZ4_prepareTable(&(ctx->internal_donotuse), 0, byU32); +} + int LZ4_freeStream (LZ4_stream_t* LZ4_stream) { if (!LZ4_stream) return 0; /* support free on NULL */ + DEBUGLOG(5, "LZ4_freeStream %p", LZ4_stream); FREEMEM(LZ4_stream); return (0); } @@ -985,43 +1190,70 @@ int LZ4_freeStream (LZ4_stream_t* LZ4_stream) int LZ4_loadDict (LZ4_stream_t* LZ4_dict, const char* dictionary, int dictSize) { LZ4_stream_t_internal* dict = &LZ4_dict->internal_donotuse; + const tableType_t tableType = byU32; const BYTE* p = (const BYTE*)dictionary; const BYTE* const dictEnd = p + dictSize; const BYTE* base; - if ((dict->initCheck) || (dict->currentOffset > 1 GB)) /* Uninitialized structure, or reuse overflow */ - LZ4_resetStream(LZ4_dict); + DEBUGLOG(4, "LZ4_loadDict (%i bytes from %p into %p)", dictSize, dictionary, LZ4_dict); - if (dictSize < (int)HASH_UNIT) { - dict->dictionary = NULL; - dict->dictSize = 0; - return 0; - } + /* It's necessary to reset the context, + * and not just continue it with prepareTable() + * to avoid any risk of generating overflowing matchIndex + * when compressing using this dictionary */ + LZ4_resetStream(LZ4_dict); + + /* We always increment the offset by 64 KB, since, if the dict is longer, + * we truncate it to the last 64k, and if it's shorter, we still want to + * advance by a whole window length so we can provide the guarantee that + * there are only valid offsets in the window, which allows an optimization + * in LZ4_compress_fast_continue() where it uses noDictIssue even when the + * dictionary isn't a full 64k. */ if ((dictEnd - p) > 64 KB) p = dictEnd - 64 KB; - dict->currentOffset += 64 KB; - base = p - dict->currentOffset; + base = dictEnd - 64 KB - dict->currentOffset; dict->dictionary = p; dict->dictSize = (U32)(dictEnd - p); - dict->currentOffset += dict->dictSize; + dict->currentOffset += 64 KB; + dict->tableType = tableType; + + if (dictSize < (int)HASH_UNIT) { + return 0; + } while (p <= dictEnd-HASH_UNIT) { - LZ4_putPosition(p, dict->hashTable, byU32, base); + LZ4_putPosition(p, dict->hashTable, tableType, base); p+=3; } return dict->dictSize; } +void LZ4_attach_dictionary(LZ4_stream_t *working_stream, const LZ4_stream_t *dictionary_stream) { + if (dictionary_stream != NULL) { + /* If the current offset is zero, we will never look in the + * external dictionary context, since there is no value a table + * entry can take that indicate a miss. In that case, we need + * to bump the offset to something non-zero. + */ + if (working_stream->internal_donotuse.currentOffset == 0) { + working_stream->internal_donotuse.currentOffset = 64 KB; + } + working_stream->internal_donotuse.dictCtx = &(dictionary_stream->internal_donotuse); + } else { + working_stream->internal_donotuse.dictCtx = NULL; + } +} + -static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict, const BYTE* src) +static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict, int nextSize) { - if ((LZ4_dict->currentOffset > 0x80000000) || - ((uptrval)LZ4_dict->currentOffset > (uptrval)src)) { /* address space overflow */ + if (LZ4_dict->currentOffset + nextSize > 0x80000000) { /* potential ptrdiff_t overflow (32-bits mode) */ /* rescale hash table */ U32 const delta = LZ4_dict->currentOffset - 64 KB; const BYTE* dictEnd = LZ4_dict->dictionary + LZ4_dict->dictSize; int i; + DEBUGLOG(4, "LZ4_renormDictT"); for (i=0; ihashTable[i] < delta) LZ4_dict->hashTable[i]=0; else LZ4_dict->hashTable[i] -= delta; @@ -1035,15 +1267,25 @@ static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict, const BYTE* src) int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration) { + const tableType_t tableType = byU32; LZ4_stream_t_internal* streamPtr = &LZ4_stream->internal_donotuse; - const BYTE* const dictEnd = streamPtr->dictionary + streamPtr->dictSize; + const BYTE* dictEnd = streamPtr->dictionary + streamPtr->dictSize; + + DEBUGLOG(5, "LZ4_compress_fast_continue (inputSize=%i)", inputSize); - const BYTE* smallest = (const BYTE*) source; if (streamPtr->initCheck) return 0; /* Uninitialized structure detected */ - if ((streamPtr->dictSize>0) && (smallest>dictEnd)) smallest = dictEnd; - LZ4_renormDictT(streamPtr, smallest); + LZ4_renormDictT(streamPtr, inputSize); /* avoid index overflow */ if (acceleration < 1) acceleration = ACCELERATION_DEFAULT; + /* invalidate tiny dictionaries */ + if ( (streamPtr->dictSize-1 < 4) /* intentional underflow */ + && (dictEnd != (const BYTE*)source) ) { + DEBUGLOG(5, "LZ4_compress_fast_continue: dictSize(%u) at addr:%p is too small", streamPtr->dictSize, streamPtr->dictionary); + streamPtr->dictSize = 0; + streamPtr->dictionary = (const BYTE*)source; + dictEnd = (const BYTE*)source; + } + /* Check overlapping input/dictionary space */ { const BYTE* sourceEnd = (const BYTE*) source + inputSize; if ((sourceEnd > streamPtr->dictionary) && (sourceEnd < dictEnd)) { @@ -1056,46 +1298,61 @@ int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, ch /* prefix mode : source data follows dictionary */ if (dictEnd == (const BYTE*)source) { - int result; if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) - result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, dictSmall, acceleration); + return LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, withPrefix64k, dictSmall, acceleration); else - result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, noDictIssue, acceleration); - streamPtr->dictSize += (U32)inputSize; - streamPtr->currentOffset += (U32)inputSize; - return result; + return LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, withPrefix64k, noDictIssue, acceleration); } /* external dictionary mode */ { int result; - if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) - result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, dictSmall, acceleration); - else - result = LZ4_compress_generic(streamPtr, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, noDictIssue, acceleration); + if (streamPtr->dictCtx) { + /* We depend here on the fact that dictCtx'es (produced by + * LZ4_loadDict) guarantee that their tables contain no references + * to offsets between dictCtx->currentOffset - 64 KB and + * dictCtx->currentOffset - dictCtx->dictSize. This makes it safe + * to use noDictIssue even when the dict isn't a full 64 KB. + */ + if (inputSize > 4 KB) { + /* For compressing large blobs, it is faster to pay the setup + * cost to copy the dictionary's tables into the active context, + * so that the compression loop is only looking into one table. + */ + memcpy(streamPtr, streamPtr->dictCtx, sizeof(LZ4_stream_t)); + result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, noDictIssue, acceleration); + } else { + result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingDictCtx, noDictIssue, acceleration); + } + } else { + if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) { + result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, dictSmall, acceleration); + } else { + result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, noDictIssue, acceleration); + } + } streamPtr->dictionary = (const BYTE*)source; streamPtr->dictSize = (U32)inputSize; - streamPtr->currentOffset += (U32)inputSize; return result; } } -/* Hidden debug function, to force external dictionary mode */ -int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_dict, const char* source, char* dest, int inputSize) +/* Hidden debug function, to force-test external dictionary mode */ +int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_dict, const char* source, char* dest, int srcSize) { LZ4_stream_t_internal* streamPtr = &LZ4_dict->internal_donotuse; int result; - const BYTE* const dictEnd = streamPtr->dictionary + streamPtr->dictSize; - const BYTE* smallest = dictEnd; - if (smallest > (const BYTE*) source) smallest = (const BYTE*) source; - LZ4_renormDictT(streamPtr, smallest); + LZ4_renormDictT(streamPtr, srcSize); - result = LZ4_compress_generic(streamPtr, source, dest, inputSize, 0, notLimited, byU32, usingExtDict, noDictIssue, 1); + if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) { + result = LZ4_compress_generic(streamPtr, source, dest, srcSize, NULL, 0, notLimited, byU32, usingExtDict, dictSmall, 1); + } else { + result = LZ4_compress_generic(streamPtr, source, dest, srcSize, NULL, 0, notLimited, byU32, usingExtDict, noDictIssue, 1); + } streamPtr->dictionary = (const BYTE*)source; - streamPtr->dictSize = (U32)inputSize; - streamPtr->currentOffset += (U32)inputSize; + streamPtr->dictSize = (U32)srcSize; return result; } @@ -1126,27 +1383,33 @@ int LZ4_saveDict (LZ4_stream_t* LZ4_dict, char* safeBuffer, int dictSize) -/*-***************************** -* Decompression functions -*******************************/ +/*-******************************* + * Decompression functions + ********************************/ + +typedef enum { endOnOutputSize = 0, endOnInputSize = 1 } endCondition_directive; +typedef enum { decode_full_block = 0, partial_decode = 1 } earlyEnd_directive; + +#undef MIN +#define MIN(a,b) ( (a) < (b) ? (a) : (b) ) + /*! LZ4_decompress_generic() : * This generic decompression function covers all use cases. * It shall be instantiated several times, using different sets of directives. * Note that it is important for performance that this function really get inlined, * in order to remove useless branches during compilation optimization. */ -LZ4_FORCE_O2_GCC_PPC64LE -LZ4_FORCE_INLINE int LZ4_decompress_generic( +LZ4_FORCE_INLINE int +LZ4_decompress_generic( const char* const src, char* const dst, int srcSize, int outputSize, /* If endOnInput==endOnInputSize, this value is `dstCapacity` */ - int endOnInput, /* endOnOutputSize, endOnInputSize */ - int partialDecoding, /* full, partial */ - int targetOutputSize, /* only used if partialDecoding==partial */ - int dict, /* noDict, withPrefix64k, usingExtDict */ - const BYTE* const lowPrefix, /* == dst when no prefix */ + endCondition_directive endOnInput, /* endOnOutputSize, endOnInputSize */ + earlyEnd_directive partialDecoding, /* full, partial */ + dict_directive dict, /* noDict, withPrefix64k, usingExtDict */ + const BYTE* const lowPrefix, /* always <= dst, == dst when no prefix */ const BYTE* const dictStart, /* only if dict==usingExtDict */ const size_t dictSize /* note : = 0 if noDict */ ) @@ -1157,31 +1420,82 @@ LZ4_FORCE_INLINE int LZ4_decompress_generic( BYTE* op = (BYTE*) dst; BYTE* const oend = op + outputSize; BYTE* cpy; - BYTE* oexit = op + targetOutputSize; const BYTE* const dictEnd = (const BYTE*)dictStart + dictSize; - const unsigned dec32table[] = {0, 1, 2, 1, 4, 4, 4, 4}; - const int dec64table[] = {0, 0, 0, -1, 0, 1, 2, 3}; + const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4}; + const int dec64table[8] = {0, 0, 0, -1, -4, 1, 2, 3}; const int safeDecode = (endOnInput==endOnInputSize); const int checkOffset = ((safeDecode) && (dictSize < (int)(64 KB))); + /* Set up the "end" pointers for the shortcut. */ + const BYTE* const shortiend = iend - (endOnInput ? 14 : 8) /*maxLL*/ - 2 /*offset*/; + const BYTE* const shortoend = oend - (endOnInput ? 14 : 8) /*maxLL*/ - 18 /*maxML*/; + + DEBUGLOG(5, "LZ4_decompress_generic (srcSize:%i, dstSize:%i)", srcSize, outputSize); /* Special cases */ - if ((partialDecoding) && (oexit > oend-MFLIMIT)) oexit = oend-MFLIMIT; /* targetOutputSize too high => decode everything */ + assert(lowPrefix <= op); + assert(src != NULL); if ((endOnInput) && (unlikely(outputSize==0))) return ((srcSize==1) && (*ip==0)) ? 0 : -1; /* Empty output buffer */ - if ((!endOnInput) && (unlikely(outputSize==0))) return (*ip==0?1:-1); + if ((!endOnInput) && (unlikely(outputSize==0))) return (*ip==0 ? 1 : -1); + if ((endOnInput) && unlikely(srcSize==0)) return -1; /* Main Loop : decode sequences */ while (1) { - size_t length; const BYTE* match; size_t offset; - /* get literal length */ unsigned const token = *ip++; - if ((length=(token>>ML_BITS)) == RUN_MASK) { + size_t length = token >> ML_BITS; /* literal length */ + + assert(!endOnInput || ip <= iend); /* ip < iend before the increment */ + + /* A two-stage shortcut for the most common case: + * 1) If the literal length is 0..14, and there is enough space, + * enter the shortcut and copy 16 bytes on behalf of the literals + * (in the fast mode, only 8 bytes can be safely copied this way). + * 2) Further if the match length is 4..18, copy 18 bytes in a similar + * manner; but we ensure that there's enough space in the output for + * those 18 bytes earlier, upon entering the shortcut (in other words, + * there is a combined check for both stages). + */ + if ( (endOnInput ? length != RUN_MASK : length <= 8) + /* strictly "less than" on input, to re-enter the loop with at least one byte */ + && likely((endOnInput ? ip < shortiend : 1) & (op <= shortoend)) ) { + /* Copy the literals */ + memcpy(op, ip, endOnInput ? 16 : 8); + op += length; ip += length; + + /* The second stage: prepare for match copying, decode full info. + * If it doesn't work out, the info won't be wasted. */ + length = token & ML_MASK; /* match length */ + offset = LZ4_readLE16(ip); ip += 2; + match = op - offset; + assert(match <= op); /* check overflow */ + + /* Do not deal with overlapping matches. */ + if ( (length != ML_MASK) + && (offset >= 8) + && (dict==withPrefix64k || match >= lowPrefix) ) { + /* Copy the match. */ + memcpy(op + 0, match + 0, 8); + memcpy(op + 8, match + 8, 8); + memcpy(op +16, match +16, 2); + op += length + MINMATCH; + /* Both stages worked, load the next token. */ + continue; + } + + /* The second stage didn't work out, but the info is ready. + * Propel it right to the point of match copying. */ + goto _copy_match; + } + + /* decode literal length */ + if (length == RUN_MASK) { unsigned s; + if (unlikely(endOnInput ? ip >= iend-RUN_MASK : 0)) goto _output_error; /* overflow detection */ do { s = *ip++; length += s; @@ -1192,11 +1506,12 @@ LZ4_FORCE_INLINE int LZ4_decompress_generic( /* copy literals */ cpy = op+length; - if ( ((endOnInput) && ((cpy>(partialDecoding?oexit:oend-MFLIMIT)) || (ip+length>iend-(2+1+LASTLITERALS))) ) - || ((!endOnInput) && (cpy>oend-WILDCOPYLENGTH)) ) + LZ4_STATIC_ASSERT(MFLIMIT >= WILDCOPYLENGTH); + if ( ((endOnInput) && ((cpy>oend-MFLIMIT) || (ip+length>iend-(2+1+LASTLITERALS))) ) + || ((!endOnInput) && (cpy>oend-WILDCOPYLENGTH)) ) { if (partialDecoding) { - if (cpy > oend) goto _output_error; /* Error : write attempt beyond end of output buffer */ + if (cpy > oend) { cpy = oend; length = oend-op; } /* Partial decoding : stop in the middle of literal segment */ if ((endOnInput) && (ip+length > iend)) goto _output_error; /* Error : read attempt beyond end of input buffer */ } else { if ((!endOnInput) && (cpy != oend)) goto _output_error; /* Error : block decoding must stop exactly there */ @@ -1205,19 +1520,31 @@ LZ4_FORCE_INLINE int LZ4_decompress_generic( memcpy(op, ip, length); ip += length; op += length; - break; /* Necessarily EOF, due to parsing restrictions */ + if (!partialDecoding || (cpy == oend)) { + /* Necessarily EOF, due to parsing restrictions */ + break; + } + + } else { + LZ4_wildCopy(op, ip, cpy); /* may overwrite up to WILDCOPYLENGTH beyond cpy */ + ip += length; op = cpy; } - LZ4_wildCopy(op, ip, cpy); - ip += length; op = cpy; /* get offset */ offset = LZ4_readLE16(ip); ip+=2; match = op - offset; - if ((checkOffset) && (unlikely(match + dictSize < lowPrefix))) goto _output_error; /* Error : offset outside buffers */ - LZ4_write32(op, (U32)offset); /* costs ~1%; silence an msan warning when offset==0 */ /* get matchlength */ length = token & ML_MASK; + +_copy_match: + if ((checkOffset) && (unlikely(match + dictSize < lowPrefix))) goto _output_error; /* Error : offset outside buffers */ + if (!partialDecoding) { + assert(oend > op); + assert(oend - op >= 4); + LZ4_write32(op, 0); /* silence an msan warning when offset==0; costs <1%; */ + } /* note : when partialDecoding, there is no guarantee that at least 4 bytes remain available in output buffer */ + if (length == ML_MASK) { unsigned s; do { @@ -1229,21 +1556,24 @@ LZ4_FORCE_INLINE int LZ4_decompress_generic( } length += MINMATCH; - /* check external dictionary */ + /* match starting within external dictionary */ if ((dict==usingExtDict) && (match < lowPrefix)) { - if (unlikely(op+length > oend-LASTLITERALS)) goto _output_error; /* doesn't respect parsing restriction */ + if (unlikely(op+length > oend-LASTLITERALS)) { + if (partialDecoding) length = MIN(length, (size_t)(oend-op)); + else goto _output_error; /* doesn't respect parsing restriction */ + } if (length <= (size_t)(lowPrefix-match)) { - /* match can be copied as a single segment from external dictionary */ + /* match fits entirely within external dictionary : just copy */ memmove(op, dictEnd - (lowPrefix-match), length); op += length; } else { - /* match encompass external dictionary and current block */ - size_t const copySize = (size_t)(lowPrefix-match); + /* match stretches into both external dictionary and current block */ + size_t const copySize = (size_t)(lowPrefix - match); size_t const restSize = length - copySize; memcpy(op, dictEnd - copySize, copySize); op += copySize; - if (restSize > (size_t)(op-lowPrefix)) { /* overlap copy */ + if (restSize > (size_t)(op - lowPrefix)) { /* overlap copy */ BYTE* const endOfMatch = op + restSize; const BYTE* copyFrom = lowPrefix; while (op < endOfMatch) *op++ = *copyFrom++; @@ -1256,32 +1586,51 @@ LZ4_FORCE_INLINE int LZ4_decompress_generic( /* copy match within block */ cpy = op + length; + + /* partialDecoding : may not respect endBlock parsing restrictions */ + assert(op<=oend); + if (partialDecoding && (cpy > oend-MATCH_SAFEGUARD_DISTANCE)) { + size_t const mlen = MIN(length, (size_t)(oend-op)); + const BYTE* const matchEnd = match + mlen; + BYTE* const copyEnd = op + mlen; + if (matchEnd > op) { /* overlap copy */ + while (op < copyEnd) *op++ = *match++; + } else { + memcpy(op, match, mlen); + } + op = copyEnd; + if (op==oend) break; + continue; + } + if (unlikely(offset<8)) { - const int dec64 = dec64table[offset]; op[0] = match[0]; op[1] = match[1]; op[2] = match[2]; op[3] = match[3]; - match += dec32table[offset]; + match += inc32table[offset]; memcpy(op+4, match, 4); - match -= dec64; - } else { LZ4_copy8(op, match); match+=8; } + match -= dec64table[offset]; + } else { + memcpy(op, match, 8); + match += 8; + } op += 8; - if (unlikely(cpy>oend-12)) { - BYTE* const oCopyLimit = oend-(WILDCOPYLENGTH-1); + if (unlikely(cpy > oend-MATCH_SAFEGUARD_DISTANCE)) { + BYTE* const oCopyLimit = oend - (WILDCOPYLENGTH-1); if (cpy > oend-LASTLITERALS) goto _output_error; /* Error : last LASTLITERALS bytes must be literals (uncompressed) */ if (op < oCopyLimit) { LZ4_wildCopy(op, match, oCopyLimit); match += oCopyLimit - op; op = oCopyLimit; } - while (op16) LZ4_wildCopy(op+8, match+8, cpy); + memcpy(op, match, 8); + if (length > 16) LZ4_wildCopy(op+8, match+8, cpy); } - op=cpy; /* correction */ + op = cpy; /* wildcopy correction */ } /* end of decoding */ @@ -1296,30 +1645,106 @@ LZ4_FORCE_INLINE int LZ4_decompress_generic( } +/*===== Instantiate the API decoding functions. =====*/ + LZ4_FORCE_O2_GCC_PPC64LE int LZ4_decompress_safe(const char* source, char* dest, int compressedSize, int maxDecompressedSize) { - return LZ4_decompress_generic(source, dest, compressedSize, maxDecompressedSize, endOnInputSize, full, 0, noDict, (BYTE*)dest, NULL, 0); + return LZ4_decompress_generic(source, dest, compressedSize, maxDecompressedSize, + endOnInputSize, decode_full_block, noDict, + (BYTE*)dest, NULL, 0); } LZ4_FORCE_O2_GCC_PPC64LE -int LZ4_decompress_safe_partial(const char* source, char* dest, int compressedSize, int targetOutputSize, int maxDecompressedSize) +int LZ4_decompress_safe_partial(const char* src, char* dst, int compressedSize, int targetOutputSize, int dstCapacity) { - return LZ4_decompress_generic(source, dest, compressedSize, maxDecompressedSize, endOnInputSize, partial, targetOutputSize, noDict, (BYTE*)dest, NULL, 0); + dstCapacity = MIN(targetOutputSize, dstCapacity); + return LZ4_decompress_generic(src, dst, compressedSize, dstCapacity, + endOnInputSize, partial_decode, + noDict, (BYTE*)dst, NULL, 0); } LZ4_FORCE_O2_GCC_PPC64LE int LZ4_decompress_fast(const char* source, char* dest, int originalSize) { - return LZ4_decompress_generic(source, dest, 0, originalSize, endOnOutputSize, full, 0, withPrefix64k, (BYTE*)(dest - 64 KB), NULL, 64 KB); + return LZ4_decompress_generic(source, dest, 0, originalSize, + endOnOutputSize, decode_full_block, withPrefix64k, + (BYTE*)dest - 64 KB, NULL, 0); +} + +/*===== Instantiate a few more decoding cases, used more than once. =====*/ + +LZ4_FORCE_O2_GCC_PPC64LE /* Exported, an obsolete API function. */ +int LZ4_decompress_safe_withPrefix64k(const char* source, char* dest, int compressedSize, int maxOutputSize) +{ + return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, + endOnInputSize, decode_full_block, withPrefix64k, + (BYTE*)dest - 64 KB, NULL, 0); +} + +/* Another obsolete API function, paired with the previous one. */ +int LZ4_decompress_fast_withPrefix64k(const char* source, char* dest, int originalSize) +{ + /* LZ4_decompress_fast doesn't validate match offsets, + * and thus serves well with any prefixed dictionary. */ + return LZ4_decompress_fast(source, dest, originalSize); +} + +LZ4_FORCE_O2_GCC_PPC64LE +static int LZ4_decompress_safe_withSmallPrefix(const char* source, char* dest, int compressedSize, int maxOutputSize, + size_t prefixSize) +{ + return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, + endOnInputSize, decode_full_block, noDict, + (BYTE*)dest-prefixSize, NULL, 0); } +LZ4_FORCE_O2_GCC_PPC64LE +int LZ4_decompress_safe_forceExtDict(const char* source, char* dest, + int compressedSize, int maxOutputSize, + const void* dictStart, size_t dictSize) +{ + return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, + endOnInputSize, decode_full_block, usingExtDict, + (BYTE*)dest, (const BYTE*)dictStart, dictSize); +} + +LZ4_FORCE_O2_GCC_PPC64LE +static int LZ4_decompress_fast_extDict(const char* source, char* dest, int originalSize, + const void* dictStart, size_t dictSize) +{ + return LZ4_decompress_generic(source, dest, 0, originalSize, + endOnOutputSize, decode_full_block, usingExtDict, + (BYTE*)dest, (const BYTE*)dictStart, dictSize); +} + +/* The "double dictionary" mode, for use with e.g. ring buffers: the first part + * of the dictionary is passed as prefix, and the second via dictStart + dictSize. + * These routines are used only once, in LZ4_decompress_*_continue(). + */ +LZ4_FORCE_INLINE +int LZ4_decompress_safe_doubleDict(const char* source, char* dest, int compressedSize, int maxOutputSize, + size_t prefixSize, const void* dictStart, size_t dictSize) +{ + return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, + endOnInputSize, decode_full_block, usingExtDict, + (BYTE*)dest-prefixSize, (const BYTE*)dictStart, dictSize); +} + +LZ4_FORCE_INLINE +int LZ4_decompress_fast_doubleDict(const char* source, char* dest, int originalSize, + size_t prefixSize, const void* dictStart, size_t dictSize) +{ + return LZ4_decompress_generic(source, dest, 0, originalSize, + endOnOutputSize, decode_full_block, usingExtDict, + (BYTE*)dest-prefixSize, (const BYTE*)dictStart, dictSize); +} /*===== streaming decompression functions =====*/ LZ4_streamDecode_t* LZ4_createStreamDecode(void) { - LZ4_streamDecode_t* lz4s = (LZ4_streamDecode_t*) ALLOCATOR(1, sizeof(LZ4_streamDecode_t)); + LZ4_streamDecode_t* lz4s = (LZ4_streamDecode_t*) ALLOC_AND_ZERO(sizeof(LZ4_streamDecode_t)); return lz4s; } @@ -1330,12 +1755,11 @@ int LZ4_freeStreamDecode (LZ4_streamDecode_t* LZ4_stream) return 0; } -/*! - * LZ4_setStreamDecode() : - * Use this function to instruct where to find the dictionary. - * This function is not necessary if previous data is still available where it was decoded. - * Loading a size of 0 is allowed (same effect as no dictionary). - * Return : 1 if OK, 0 if error +/*! LZ4_setStreamDecode() : + * Use this function to instruct where to find the dictionary. + * This function is not necessary if previous data is still available where it was decoded. + * Loading a size of 0 is allowed (same effect as no dictionary). + * @return : 1 if OK, 0 if error */ int LZ4_setStreamDecode (LZ4_streamDecode_t* LZ4_streamDecode, const char* dictionary, int dictSize) { @@ -1347,6 +1771,25 @@ int LZ4_setStreamDecode (LZ4_streamDecode_t* LZ4_streamDecode, const char* dicti return 1; } +/*! LZ4_decoderRingBufferSize() : + * when setting a ring buffer for streaming decompression (optional scenario), + * provides the minimum size of this ring buffer + * to be compatible with any source respecting maxBlockSize condition. + * Note : in a ring buffer scenario, + * blocks are presumed decompressed next to each other. + * When not enough space remains for next block (remainingSize < maxBlockSize), + * decoding resumes from beginning of ring buffer. + * @return : minimum ring buffer size, + * or 0 if there is an error (invalid maxBlockSize). + */ +int LZ4_decoderRingBufferSize(int maxBlockSize) +{ + if (maxBlockSize < 0) return 0; + if (maxBlockSize > LZ4_MAX_INPUT_SIZE) return 0; + if (maxBlockSize < 16) maxBlockSize = 16; + return LZ4_DECODER_RING_BUFFER_SIZE(maxBlockSize); +} + /* *_continue() : These decoding functions allow decompression of multiple blocks in "streaming" mode. @@ -1360,19 +1803,32 @@ int LZ4_decompress_safe_continue (LZ4_streamDecode_t* LZ4_streamDecode, const ch LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse; int result; - if (lz4sd->prefixEnd == (BYTE*)dest) { - result = LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, - endOnInputSize, full, 0, - usingExtDict, lz4sd->prefixEnd - lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize); + if (lz4sd->prefixSize == 0) { + /* The first call, no dictionary yet. */ + assert(lz4sd->extDictSize == 0); + result = LZ4_decompress_safe(source, dest, compressedSize, maxOutputSize); + if (result <= 0) return result; + lz4sd->prefixSize = result; + lz4sd->prefixEnd = (BYTE*)dest + result; + } else if (lz4sd->prefixEnd == (BYTE*)dest) { + /* They're rolling the current segment. */ + if (lz4sd->prefixSize >= 64 KB - 1) + result = LZ4_decompress_safe_withPrefix64k(source, dest, compressedSize, maxOutputSize); + else if (lz4sd->extDictSize == 0) + result = LZ4_decompress_safe_withSmallPrefix(source, dest, compressedSize, maxOutputSize, + lz4sd->prefixSize); + else + result = LZ4_decompress_safe_doubleDict(source, dest, compressedSize, maxOutputSize, + lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize); if (result <= 0) return result; lz4sd->prefixSize += result; lz4sd->prefixEnd += result; } else { + /* The buffer wraps around, or they're switching to another buffer. */ lz4sd->extDictSize = lz4sd->prefixSize; lz4sd->externalDict = lz4sd->prefixEnd - lz4sd->extDictSize; - result = LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, - endOnInputSize, full, 0, - usingExtDict, (BYTE*)dest, lz4sd->externalDict, lz4sd->extDictSize); + result = LZ4_decompress_safe_forceExtDict(source, dest, compressedSize, maxOutputSize, + lz4sd->externalDict, lz4sd->extDictSize); if (result <= 0) return result; lz4sd->prefixSize = result; lz4sd->prefixEnd = (BYTE*)dest + result; @@ -1387,19 +1843,26 @@ int LZ4_decompress_fast_continue (LZ4_streamDecode_t* LZ4_streamDecode, const ch LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse; int result; - if (lz4sd->prefixEnd == (BYTE*)dest) { - result = LZ4_decompress_generic(source, dest, 0, originalSize, - endOnOutputSize, full, 0, - usingExtDict, lz4sd->prefixEnd - lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize); + if (lz4sd->prefixSize == 0) { + assert(lz4sd->extDictSize == 0); + result = LZ4_decompress_fast(source, dest, originalSize); + if (result <= 0) return result; + lz4sd->prefixSize = originalSize; + lz4sd->prefixEnd = (BYTE*)dest + originalSize; + } else if (lz4sd->prefixEnd == (BYTE*)dest) { + if (lz4sd->prefixSize >= 64 KB - 1 || lz4sd->extDictSize == 0) + result = LZ4_decompress_fast(source, dest, originalSize); + else + result = LZ4_decompress_fast_doubleDict(source, dest, originalSize, + lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize); if (result <= 0) return result; lz4sd->prefixSize += originalSize; lz4sd->prefixEnd += originalSize; } else { lz4sd->extDictSize = lz4sd->prefixSize; lz4sd->externalDict = lz4sd->prefixEnd - lz4sd->extDictSize; - result = LZ4_decompress_generic(source, dest, 0, originalSize, - endOnOutputSize, full, 0, - usingExtDict, (BYTE*)dest, lz4sd->externalDict, lz4sd->extDictSize); + result = LZ4_decompress_fast_extDict(source, dest, originalSize, + lz4sd->externalDict, lz4sd->extDictSize); if (result <= 0) return result; lz4sd->prefixSize = originalSize; lz4sd->prefixEnd = (BYTE*)dest + originalSize; @@ -1416,36 +1879,23 @@ Advanced decoding functions : the dictionary must be explicitly provided within parameters */ -LZ4_FORCE_O2_GCC_PPC64LE -LZ4_FORCE_INLINE int LZ4_decompress_usingDict_generic(const char* source, char* dest, int compressedSize, int maxOutputSize, int safe, const char* dictStart, int dictSize) +int LZ4_decompress_safe_usingDict(const char* source, char* dest, int compressedSize, int maxOutputSize, const char* dictStart, int dictSize) { if (dictSize==0) - return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe, full, 0, noDict, (BYTE*)dest, NULL, 0); + return LZ4_decompress_safe(source, dest, compressedSize, maxOutputSize); if (dictStart+dictSize == dest) { - if (dictSize >= (int)(64 KB - 1)) - return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe, full, 0, withPrefix64k, (BYTE*)dest-64 KB, NULL, 0); - return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe, full, 0, noDict, (BYTE*)dest-dictSize, NULL, 0); + if (dictSize >= 64 KB - 1) + return LZ4_decompress_safe_withPrefix64k(source, dest, compressedSize, maxOutputSize); + return LZ4_decompress_safe_withSmallPrefix(source, dest, compressedSize, maxOutputSize, dictSize); } - return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe, full, 0, usingExtDict, (BYTE*)dest, (const BYTE*)dictStart, dictSize); + return LZ4_decompress_safe_forceExtDict(source, dest, compressedSize, maxOutputSize, dictStart, dictSize); } -LZ4_FORCE_O2_GCC_PPC64LE -int LZ4_decompress_safe_usingDict(const char* source, char* dest, int compressedSize, int maxOutputSize, const char* dictStart, int dictSize) -{ - return LZ4_decompress_usingDict_generic(source, dest, compressedSize, maxOutputSize, 1, dictStart, dictSize); -} - -LZ4_FORCE_O2_GCC_PPC64LE int LZ4_decompress_fast_usingDict(const char* source, char* dest, int originalSize, const char* dictStart, int dictSize) { - return LZ4_decompress_usingDict_generic(source, dest, 0, originalSize, 0, dictStart, dictSize); -} - -/* debug function */ -LZ4_FORCE_O2_GCC_PPC64LE -int LZ4_decompress_safe_forceExtDict(const char* source, char* dest, int compressedSize, int maxOutputSize, const char* dictStart, int dictSize) -{ - return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, endOnInputSize, full, 0, usingExtDict, (BYTE*)dest, (const BYTE*)dictStart, dictSize); + if (dictSize==0 || dictStart+dictSize == dest) + return LZ4_decompress_fast(source, dest, originalSize); + return LZ4_decompress_fast_extDict(source, dest, originalSize, dictStart, dictSize); } @@ -1453,64 +1903,67 @@ int LZ4_decompress_safe_forceExtDict(const char* source, char* dest, int compres * Obsolete Functions ***************************************************/ /* obsolete compression functions */ -int LZ4_compress_limitedOutput(const char* source, char* dest, int inputSize, int maxOutputSize) { return LZ4_compress_default(source, dest, inputSize, maxOutputSize); } -int LZ4_compress(const char* source, char* dest, int inputSize) { return LZ4_compress_default(source, dest, inputSize, LZ4_compressBound(inputSize)); } -int LZ4_compress_limitedOutput_withState (void* state, const char* src, char* dst, int srcSize, int dstSize) { return LZ4_compress_fast_extState(state, src, dst, srcSize, dstSize, 1); } -int LZ4_compress_withState (void* state, const char* src, char* dst, int srcSize) { return LZ4_compress_fast_extState(state, src, dst, srcSize, LZ4_compressBound(srcSize), 1); } -int LZ4_compress_limitedOutput_continue (LZ4_stream_t* LZ4_stream, const char* src, char* dst, int srcSize, int maxDstSize) { return LZ4_compress_fast_continue(LZ4_stream, src, dst, srcSize, maxDstSize, 1); } -int LZ4_compress_continue (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize) { return LZ4_compress_fast_continue(LZ4_stream, source, dest, inputSize, LZ4_compressBound(inputSize), 1); } +int LZ4_compress_limitedOutput(const char* source, char* dest, int inputSize, int maxOutputSize) +{ + return LZ4_compress_default(source, dest, inputSize, maxOutputSize); +} +int LZ4_compress(const char* source, char* dest, int inputSize) +{ + return LZ4_compress_default(source, dest, inputSize, LZ4_compressBound(inputSize)); +} +int LZ4_compress_limitedOutput_withState (void* state, const char* src, char* dst, int srcSize, int dstSize) +{ + return LZ4_compress_fast_extState(state, src, dst, srcSize, dstSize, 1); +} +int LZ4_compress_withState (void* state, const char* src, char* dst, int srcSize) +{ + return LZ4_compress_fast_extState(state, src, dst, srcSize, LZ4_compressBound(srcSize), 1); +} +int LZ4_compress_limitedOutput_continue (LZ4_stream_t* LZ4_stream, const char* src, char* dst, int srcSize, int dstCapacity) +{ + return LZ4_compress_fast_continue(LZ4_stream, src, dst, srcSize, dstCapacity, 1); +} +int LZ4_compress_continue (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize) +{ + return LZ4_compress_fast_continue(LZ4_stream, source, dest, inputSize, LZ4_compressBound(inputSize), 1); +} /* -These function names are deprecated and should no longer be used. +These decompression functions are deprecated and should no longer be used. They are only provided here for compatibility with older user programs. - LZ4_uncompress is totally equivalent to LZ4_decompress_fast - LZ4_uncompress_unknownOutputSize is totally equivalent to LZ4_decompress_safe */ -int LZ4_uncompress (const char* source, char* dest, int outputSize) { return LZ4_decompress_fast(source, dest, outputSize); } -int LZ4_uncompress_unknownOutputSize (const char* source, char* dest, int isize, int maxOutputSize) { return LZ4_decompress_safe(source, dest, isize, maxOutputSize); } - +int LZ4_uncompress (const char* source, char* dest, int outputSize) +{ + return LZ4_decompress_fast(source, dest, outputSize); +} +int LZ4_uncompress_unknownOutputSize (const char* source, char* dest, int isize, int maxOutputSize) +{ + return LZ4_decompress_safe(source, dest, isize, maxOutputSize); +} /* Obsolete Streaming functions */ int LZ4_sizeofStreamState() { return LZ4_STREAMSIZE; } -static void LZ4_init(LZ4_stream_t* lz4ds, BYTE* base) -{ - MEM_INIT(lz4ds, 0, sizeof(LZ4_stream_t)); - lz4ds->internal_donotuse.bufferStart = base; -} - int LZ4_resetStreamState(void* state, char* inputBuffer) { - if ((((uptrval)state) & 3) != 0) return 1; /* Error : pointer is not aligned on 4-bytes boundary */ - LZ4_init((LZ4_stream_t*)state, (BYTE*)inputBuffer); + (void)inputBuffer; + LZ4_resetStream((LZ4_stream_t*)state); return 0; } void* LZ4_create (char* inputBuffer) { - LZ4_stream_t* lz4ds = (LZ4_stream_t*)ALLOCATOR(8, sizeof(LZ4_stream_t)); - LZ4_init (lz4ds, (BYTE*)inputBuffer); - return lz4ds; -} - -char* LZ4_slideInputBuffer (void* LZ4_Data) -{ - LZ4_stream_t_internal* ctx = &((LZ4_stream_t*)LZ4_Data)->internal_donotuse; - int dictSize = LZ4_saveDict((LZ4_stream_t*)LZ4_Data, (char*)ctx->bufferStart, 64 KB); - return (char*)(ctx->bufferStart + dictSize); + (void)inputBuffer; + return LZ4_createStream(); } -/* Obsolete streaming decompression functions */ - -int LZ4_decompress_safe_withPrefix64k(const char* source, char* dest, int compressedSize, int maxOutputSize) -{ - return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, endOnInputSize, full, 0, withPrefix64k, (BYTE*)dest - 64 KB, NULL, 64 KB); -} - -int LZ4_decompress_fast_withPrefix64k(const char* source, char* dest, int originalSize) +char* LZ4_slideInputBuffer (void* state) { - return LZ4_decompress_generic(source, dest, 0, originalSize, endOnOutputSize, full, 0, withPrefix64k, (BYTE*)dest - 64 KB, NULL, 64 KB); + /* avoid const char * -> char * conversion warning */ + return (char *)(uptrval)((LZ4_stream_t*)state)->internal_donotuse.dictionary; } #endif /* LZ4_COMMONDEFS_ONLY */ diff --git a/src/third-party/lz4/lz4.h b/src/third-party/lz4/lz4.h index d284d630043..059ef7c1b7d 100644 --- a/src/third-party/lz4/lz4.h +++ b/src/third-party/lz4/lz4.h @@ -1,7 +1,7 @@ /* * LZ4 - Fast LZ compression algorithm * Header File - * Copyright (C) 2011-2017, Yann Collet. + * Copyright (C) 2011-present, Yann Collet. BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) @@ -46,7 +46,7 @@ extern "C" { /** Introduction - LZ4 is lossless compression algorithm, providing compression speed at 400 MB/s per core, + LZ4 is lossless compression algorithm, providing compression speed at 500 MB/s per core, scalable with multi-cores CPU. It features an extremely fast decoder, with speed in multiple GB/s per core, typically reaching RAM speed limits on multi-core systems. @@ -62,8 +62,8 @@ extern "C" { An additional format, called LZ4 frame specification (doc/lz4_Frame_format.md), take care of encoding standard metadata alongside LZ4-compressed blocks. - If your application requires interoperability, it's recommended to use it. - A library is provided to take care of it, see lz4frame.h. + Frame format is required for interoperability. + It is delivered through a companion API, declared in lz4frame.h. */ /*^*************************************************************** @@ -72,24 +72,28 @@ extern "C" { /* * LZ4_DLL_EXPORT : * Enable exporting of functions when building a Windows DLL -* LZ4LIB_API : +* LZ4LIB_VISIBILITY : * Control library symbols visibility. */ +#ifndef LZ4LIB_VISIBILITY +# if defined(__GNUC__) && (__GNUC__ >= 4) +# define LZ4LIB_VISIBILITY __attribute__ ((visibility ("default"))) +# else +# define LZ4LIB_VISIBILITY +# endif +#endif #if defined(LZ4_DLL_EXPORT) && (LZ4_DLL_EXPORT==1) -# define LZ4LIB_API __declspec(dllexport) +# define LZ4LIB_API __declspec(dllexport) LZ4LIB_VISIBILITY #elif defined(LZ4_DLL_IMPORT) && (LZ4_DLL_IMPORT==1) -# define LZ4LIB_API __declspec(dllimport) /* It isn't required but allows to generate better code, saving a function pointer load from the IAT and an indirect jump.*/ -#elif defined(__GNUC__) && (__GNUC__ >= 4) -# define LZ4LIB_API __attribute__ ((__visibility__ ("default"))) +# define LZ4LIB_API __declspec(dllimport) LZ4LIB_VISIBILITY /* It isn't required but allows to generate better code, saving a function pointer load from the IAT and an indirect jump.*/ #else -# define LZ4LIB_API +# define LZ4LIB_API LZ4LIB_VISIBILITY #endif - /*------ Version ------*/ #define LZ4_VERSION_MAJOR 1 /* for breaking interface changes */ #define LZ4_VERSION_MINOR 8 /* for new (non-breaking) interface capabilities */ -#define LZ4_VERSION_RELEASE 0 /* for tweaks, bug-fixes, or development */ +#define LZ4_VERSION_RELEASE 3 /* for tweaks, bug-fixes, or development */ #define LZ4_VERSION_NUMBER (LZ4_VERSION_MAJOR *100*100 + LZ4_VERSION_MINOR *100 + LZ4_VERSION_RELEASE) @@ -98,8 +102,8 @@ extern "C" { #define LZ4_EXPAND_AND_QUOTE(str) LZ4_QUOTE(str) #define LZ4_VERSION_STRING LZ4_EXPAND_AND_QUOTE(LZ4_LIB_VERSION) -LZ4LIB_API int LZ4_versionNumber (void); /**< library version number; to be used when checking dll version */ -LZ4LIB_API const char* LZ4_versionString (void); /**< library version string; to be used when checking dll version */ +LZ4LIB_API int LZ4_versionNumber (void); /**< library version number; useful to check dll version */ +LZ4LIB_API const char* LZ4_versionString (void); /**< library version string; unseful to check dll version */ /*-************************************ @@ -109,7 +113,7 @@ LZ4LIB_API const char* LZ4_versionString (void); /**< library version string; * LZ4_MEMORY_USAGE : * Memory usage formula : N->2^N Bytes (examples : 10 -> 1KB; 12 -> 4KB ; 16 -> 64KB; 20 -> 1MB; etc.) * Increasing memory usage improves compression ratio - * Reduced memory usage can improve speed, due to cache effect + * Reduced memory usage may improve speed, thanks to cache effect * Default value is 14, for 16KB, which nicely fits into Intel x86 L1 cache */ #ifndef LZ4_MEMORY_USAGE @@ -120,30 +124,29 @@ LZ4LIB_API const char* LZ4_versionString (void); /**< library version string; * Simple Functions **************************************/ /*! LZ4_compress_default() : - Compresses 'sourceSize' bytes from buffer 'source' - into already allocated 'dest' buffer of size 'maxDestSize'. - Compression is guaranteed to succeed if 'maxDestSize' >= LZ4_compressBound(sourceSize). + Compresses 'srcSize' bytes from buffer 'src' + into already allocated 'dst' buffer of size 'dstCapacity'. + Compression is guaranteed to succeed if 'dstCapacity' >= LZ4_compressBound(srcSize). It also runs faster, so it's a recommended setting. - If the function cannot compress 'source' into a more limited 'dest' budget, + If the function cannot compress 'src' into a more limited 'dst' budget, compression stops *immediately*, and the function result is zero. - As a consequence, 'dest' content is not valid. - This function never writes outside 'dest' buffer, nor read outside 'source' buffer. - sourceSize : Max supported value is LZ4_MAX_INPUT_VALUE - maxDestSize : full or partial size of buffer 'dest' (which must be already allocated) - return : the number of bytes written into buffer 'dest' (necessarily <= maxOutputSize) - or 0 if compression fails */ -LZ4LIB_API int LZ4_compress_default(const char* source, char* dest, int sourceSize, int maxDestSize); + Note : as a consequence, 'dst' content is not valid. + Note 2 : This function is protected against buffer overflow scenarios (never writes outside 'dst' buffer, nor read outside 'source' buffer). + srcSize : max supported value is LZ4_MAX_INPUT_SIZE. + dstCapacity : size of buffer 'dst' (which must be already allocated) + return : the number of bytes written into buffer 'dst' (necessarily <= dstCapacity) + or 0 if compression fails */ +LZ4LIB_API int LZ4_compress_default(const char* src, char* dst, int srcSize, int dstCapacity); /*! LZ4_decompress_safe() : - compressedSize : is the precise full size of the compressed block. - maxDecompressedSize : is the size of destination buffer, which must be already allocated. - return : the number of bytes decompressed into destination buffer (necessarily <= maxDecompressedSize) - If destination buffer is not large enough, decoding will stop and output an error code (<0). + compressedSize : is the exact complete size of the compressed block. + dstCapacity : is the size of destination buffer, which must be already allocated. + return : the number of bytes decompressed into destination buffer (necessarily <= dstCapacity) + If destination buffer is not large enough, decoding will stop and output an error code (negative value). If the source stream is detected malformed, the function will stop decoding and return a negative result. - This function is protected against buffer overflow exploits, including malicious data packets. - It never writes outside output buffer, nor reads outside input buffer. + This function is protected against malicious data packets. */ -LZ4LIB_API int LZ4_decompress_safe (const char* source, char* dest, int compressedSize, int maxDecompressedSize); +LZ4LIB_API int LZ4_decompress_safe (const char* src, char* dst, int compressedSize, int dstCapacity); /*-************************************ @@ -157,22 +160,22 @@ LZ4_compressBound() : Provides the maximum size that LZ4 compression may output in a "worst case" scenario (input data not compressible) This function is primarily useful for memory allocation purposes (destination buffer size). Macro LZ4_COMPRESSBOUND() is also provided for compilation-time evaluation (stack memory allocation for example). - Note that LZ4_compress_default() compress faster when dest buffer size is >= LZ4_compressBound(srcSize) + Note that LZ4_compress_default() compresses faster when dstCapacity is >= LZ4_compressBound(srcSize) inputSize : max supported value is LZ4_MAX_INPUT_SIZE return : maximum output size in a "worst case" scenario - or 0, if input size is too large ( > LZ4_MAX_INPUT_SIZE) + or 0, if input size is incorrect (too large or negative) */ LZ4LIB_API int LZ4_compressBound(int inputSize); /*! LZ4_compress_fast() : - Same as LZ4_compress_default(), but allows to select an "acceleration" factor. + Same as LZ4_compress_default(), but allows selection of "acceleration" factor. The larger the acceleration value, the faster the algorithm, but also the lesser the compression. It's a trade-off. It can be fine tuned, with each successive value providing roughly +~3% to speed. An acceleration value of "1" is the same as regular LZ4_compress_default() - Values <= 0 will be replaced by ACCELERATION_DEFAULT (see lz4.c), which is 1. + Values <= 0 will be replaced by ACCELERATION_DEFAULT (currently == 1, see lz4.c). */ -LZ4LIB_API int LZ4_compress_fast (const char* source, char* dest, int sourceSize, int maxDestSize, int acceleration); +LZ4LIB_API int LZ4_compress_fast (const char* src, char* dst, int srcSize, int dstCapacity, int acceleration); /*! @@ -180,58 +183,79 @@ LZ4_compress_fast_extState() : Same compression function, just using an externally allocated memory space to store compression state. Use LZ4_sizeofState() to know how much memory must be allocated, and allocate it on 8-bytes boundaries (using malloc() typically). - Then, provide it as 'void* state' to compression function. + Then, provide this buffer as 'void* state' to compression function. */ LZ4LIB_API int LZ4_sizeofState(void); -LZ4LIB_API int LZ4_compress_fast_extState (void* state, const char* source, char* dest, int inputSize, int maxDestSize, int acceleration); +LZ4LIB_API int LZ4_compress_fast_extState (void* state, const char* src, char* dst, int srcSize, int dstCapacity, int acceleration); -/*! -LZ4_compress_destSize() : - Reverse the logic, by compressing as much data as possible from 'source' buffer - into already allocated buffer 'dest' of size 'targetDestSize'. - This function either compresses the entire 'source' content into 'dest' if it's large enough, - or fill 'dest' buffer completely with as much data as possible from 'source'. - *sourceSizePtr : will be modified to indicate how many bytes where read from 'source' to fill 'dest'. - New value is necessarily <= old value. - return : Nb bytes written into 'dest' (necessarily <= targetDestSize) - or 0 if compression fails +/*! LZ4_compress_destSize() : + * Reverse the logic : compresses as much data as possible from 'src' buffer + * into already allocated buffer 'dst', of size >= 'targetDestSize'. + * This function either compresses the entire 'src' content into 'dst' if it's large enough, + * or fill 'dst' buffer completely with as much data as possible from 'src'. + * note: acceleration parameter is fixed to "default". + * + * *srcSizePtr : will be modified to indicate how many bytes where read from 'src' to fill 'dst'. + * New value is necessarily <= input value. + * @return : Nb bytes written into 'dst' (necessarily <= targetDestSize) + * or 0 if compression fails. */ -LZ4LIB_API int LZ4_compress_destSize (const char* source, char* dest, int* sourceSizePtr, int targetDestSize); +LZ4LIB_API int LZ4_compress_destSize (const char* src, char* dst, int* srcSizePtr, int targetDstSize); -/*! -LZ4_decompress_fast() : - originalSize : is the original and therefore uncompressed size - return : the number of bytes read from the source buffer (in other words, the compressed size) - If the source stream is detected malformed, the function will stop decoding and return a negative result. - Destination buffer must be already allocated. Its size must be a minimum of 'originalSize' bytes. - note : This function fully respect memory boundaries for properly formed compressed data. - It is a bit faster than LZ4_decompress_safe(). - However, it does not provide any protection against intentionally modified data stream (malicious input). - Use this function in trusted environment only (data to decode comes from a trusted source). -*/ -LZ4LIB_API int LZ4_decompress_fast (const char* source, char* dest, int originalSize); - -/*! -LZ4_decompress_safe_partial() : - This function decompress a compressed block of size 'compressedSize' at position 'source' - into destination buffer 'dest' of size 'maxDecompressedSize'. - The function tries to stop decompressing operation as soon as 'targetOutputSize' has been reached, - reducing decompression time. - return : the number of bytes decoded in the destination buffer (necessarily <= maxDecompressedSize) - Note : this number can be < 'targetOutputSize' should the compressed block to decode be smaller. - Always control how many bytes were decoded. - If the source stream is detected malformed, the function will stop decoding and return a negative result. - This function never writes outside of output buffer, and never reads outside of input buffer. It is therefore protected against malicious data packets -*/ -LZ4LIB_API int LZ4_decompress_safe_partial (const char* source, char* dest, int compressedSize, int targetOutputSize, int maxDecompressedSize); +/*! LZ4_decompress_fast() : **unsafe!** + * This function used to be a bit faster than LZ4_decompress_safe(), + * though situation has changed in recent versions, + * and now `LZ4_decompress_safe()` can be as fast and sometimes faster than `LZ4_decompress_fast()`. + * Moreover, LZ4_decompress_fast() is not protected vs malformed input, as it doesn't perform full validation of compressed data. + * As a consequence, this function is no longer recommended, and may be deprecated in future versions. + * It's only remaining specificity is that it can decompress data without knowing its compressed size. + * + * originalSize : is the uncompressed size to regenerate. + * `dst` must be already allocated, its size must be >= 'originalSize' bytes. + * @return : number of bytes read from source buffer (== compressed size). + * If the source stream is detected malformed, the function stops decoding and returns a negative result. + * note : This function requires uncompressed originalSize to be known in advance. + * The function never writes past the output buffer. + * However, since it doesn't know its 'src' size, it may read past the intended input. + * Also, because match offsets are not validated during decoding, + * reads from 'src' may underflow. + * Use this function in trusted environment **only**. + */ +LZ4LIB_API int LZ4_decompress_fast (const char* src, char* dst, int originalSize); + +/*! LZ4_decompress_safe_partial() : + * Decompress an LZ4 compressed block, of size 'srcSize' at position 'src', + * into destination buffer 'dst' of size 'dstCapacity'. + * Up to 'targetOutputSize' bytes will be decoded. + * The function stops decoding on reaching this objective, + * which can boost performance when only the beginning of a block is required. + * + * @return : the number of bytes decoded in `dst` (necessarily <= dstCapacity) + * If source stream is detected malformed, function returns a negative result. + * + * Note : @return can be < targetOutputSize, if compressed block contains less data. + * + * Note 2 : this function features 2 parameters, targetOutputSize and dstCapacity, + * and expects targetOutputSize <= dstCapacity. + * It effectively stops decoding on reaching targetOutputSize, + * so dstCapacity is kind of redundant. + * This is because in a previous version of this function, + * decoding operation would not "break" a sequence in the middle. + * As a consequence, there was no guarantee that decoding would stop at exactly targetOutputSize, + * it could write more bytes, though only up to dstCapacity. + * Some "margin" used to be required for this operation to work properly. + * This is no longer necessary. + * The function nonetheless keeps its signature, in an effort to not break API. + */ +LZ4LIB_API int LZ4_decompress_safe_partial (const char* src, char* dst, int srcSize, int targetOutputSize, int dstCapacity); /*-********************************************* * Streaming Compression Functions ***********************************************/ -typedef union LZ4_stream_u LZ4_stream_t; /* incomplete type (defined later) */ +typedef union LZ4_stream_u LZ4_stream_t; /* incomplete type (defined later) */ /*! LZ4_createStream() and LZ4_freeStream() : * LZ4_createStream() will allocate and initialize an `LZ4_stream_t` structure. @@ -255,84 +279,206 @@ LZ4LIB_API void LZ4_resetStream (LZ4_stream_t* streamPtr); LZ4LIB_API int LZ4_loadDict (LZ4_stream_t* streamPtr, const char* dictionary, int dictSize); /*! LZ4_compress_fast_continue() : - * Compress content into 'src' using data from previously compressed blocks, improving compression ratio. + * Compress 'src' content using data from previously compressed blocks, for better compression ratio. * 'dst' buffer must be already allocated. * If dstCapacity >= LZ4_compressBound(srcSize), compression is guaranteed to succeed, and runs faster. * - * Important : Up to 64KB of previously compressed data is assumed to remain present and unmodified in memory ! - * Special 1 : If input buffer is a double-buffer, it can have any size, including < 64 KB. - * Special 2 : If input buffer is a ring-buffer, it can have any size, including < 64 KB. - * * @return : size of compressed block - * or 0 if there is an error (typically, compressed data cannot fit into 'dst') - * After an error, the stream status is invalid, it can only be reset or freed. + * or 0 if there is an error (typically, cannot fit into 'dst'). + * + * Note 1 : Each invocation to LZ4_compress_fast_continue() generates a new block. + * Each block has precise boundaries. + * It's not possible to append blocks together and expect a single invocation of LZ4_decompress_*() to decompress them together. + * Each block must be decompressed separately, calling LZ4_decompress_*() with associated metadata. + * + * Note 2 : The previous 64KB of source data is __assumed__ to remain present, unmodified, at same address in memory! + * + * Note 3 : When input is structured as a double-buffer, each buffer can have any size, including < 64 KB. + * Make sure that buffers are separated, by at least one byte. + * This construction ensures that each block only depends on previous block. + * + * Note 4 : If input buffer is a ring-buffer, it can have any size, including < 64 KB. + * + * Note 5 : After an error, the stream status is invalid, it can only be reset or freed. */ LZ4LIB_API int LZ4_compress_fast_continue (LZ4_stream_t* streamPtr, const char* src, char* dst, int srcSize, int dstCapacity, int acceleration); /*! LZ4_saveDict() : - * If previously compressed data block is not guaranteed to remain available at its current memory location, + * If last 64KB data cannot be guaranteed to remain available at its current memory location, * save it into a safer place (char* safeBuffer). - * Note : it's not necessary to call LZ4_loadDict() after LZ4_saveDict(), dictionary is immediately usable. - * @return : saved dictionary size in bytes (necessarily <= dictSize), or 0 if error. + * This is schematically equivalent to a memcpy() followed by LZ4_loadDict(), + * but is much faster, because LZ4_saveDict() doesn't need to rebuild tables. + * @return : saved dictionary size in bytes (necessarily <= maxDictSize), or 0 if error. */ -LZ4LIB_API int LZ4_saveDict (LZ4_stream_t* streamPtr, char* safeBuffer, int dictSize); +LZ4LIB_API int LZ4_saveDict (LZ4_stream_t* streamPtr, char* safeBuffer, int maxDictSize); /*-********************************************** * Streaming Decompression Functions * Bufferless synchronous API ************************************************/ -typedef union LZ4_streamDecode_u LZ4_streamDecode_t; /* incomplete type (defined later) */ +typedef union LZ4_streamDecode_u LZ4_streamDecode_t; /* tracking context */ /*! LZ4_createStreamDecode() and LZ4_freeStreamDecode() : - * creation / destruction of streaming decompression tracking structure. - * A tracking structure can be re-used multiple times sequentially. */ + * creation / destruction of streaming decompression tracking context. + * A tracking context can be re-used multiple times. + */ LZ4LIB_API LZ4_streamDecode_t* LZ4_createStreamDecode(void); LZ4LIB_API int LZ4_freeStreamDecode (LZ4_streamDecode_t* LZ4_stream); /*! LZ4_setStreamDecode() : - * An LZ4_streamDecode_t structure can be allocated once and re-used multiple times. + * An LZ4_streamDecode_t context can be allocated once and re-used multiple times. * Use this function to start decompression of a new stream of blocks. - * A dictionary can optionnally be set. Use NULL or size 0 for a simple reset order. + * A dictionary can optionally be set. Use NULL or size 0 for a reset order. + * Dictionary is presumed stable : it must remain accessible and unmodified during next decompression. * @return : 1 if OK, 0 if error */ LZ4LIB_API int LZ4_setStreamDecode (LZ4_streamDecode_t* LZ4_streamDecode, const char* dictionary, int dictSize); +/*! LZ4_decoderRingBufferSize() : v1.8.2 + * Note : in a ring buffer scenario (optional), + * blocks are presumed decompressed next to each other + * up to the moment there is not enough remaining space for next block (remainingSize < maxBlockSize), + * at which stage it resumes from beginning of ring buffer. + * When setting such a ring buffer for streaming decompression, + * provides the minimum size of this ring buffer + * to be compatible with any source respecting maxBlockSize condition. + * @return : minimum ring buffer size, + * or 0 if there is an error (invalid maxBlockSize). + */ +LZ4LIB_API int LZ4_decoderRingBufferSize(int maxBlockSize); +#define LZ4_DECODER_RING_BUFFER_SIZE(mbs) (65536 + 14 + (mbs)) /* for static allocation; mbs presumed valid */ + /*! LZ4_decompress_*_continue() : * These decoding functions allow decompression of consecutive blocks in "streaming" mode. * A block is an unsplittable entity, it must be presented entirely to a decompression function. - * Decompression functions only accept one block at a time. - * Previously decoded blocks *must* remain available at the memory position where they were decoded (up to 64 KB). + * Decompression functions only accepts one block at a time. + * The last 64KB of previously decoded data *must* remain available and unmodified at the memory position where they were decoded. + * If less than 64KB of data has been decoded, all the data must be present. * - * Special : if application sets a ring buffer for decompression, it must respect one of the following conditions : - * - Exactly same size as encoding buffer, with same update rule (block boundaries at same positions) - * In which case, the decoding & encoding ring buffer can have any size, including very small ones ( < 64 KB). - * - Larger than encoding buffer, by a minimum of maxBlockSize more bytes. - * maxBlockSize is implementation dependent. It's the maximum size of any single block. + * Special : if decompression side sets a ring buffer, it must respect one of the following conditions : + * - Decompression buffer size is _at least_ LZ4_decoderRingBufferSize(maxBlockSize). + * maxBlockSize is the maximum size of any single block. It can have any value > 16 bytes. + * In which case, encoding and decoding buffers do not need to be synchronized. + * Actually, data can be produced by any source compliant with LZ4 format specification, and respecting maxBlockSize. + * - Synchronized mode : + * Decompression buffer size is _exactly_ the same as compression buffer size, + * and follows exactly same update rule (block boundaries at same positions), + * and decoding function is provided with exact decompressed size of each block (exception for last block of the stream), + * _then_ decoding & encoding ring buffer can have any size, including small ones ( < 64 KB). + * - Decompression buffer is larger than encoding buffer, by a minimum of maxBlockSize more bytes. * In which case, encoding and decoding buffers do not need to be synchronized, * and encoding ring buffer can have any size, including small ones ( < 64 KB). - * - _At least_ 64 KB + 8 bytes + maxBlockSize. - * In which case, encoding and decoding buffers do not need to be synchronized, - * and encoding ring buffer can have any size, including larger than decoding buffer. - * Whenever these conditions are not possible, save the last 64KB of decoded data into a safe buffer, - * and indicate where it is saved using LZ4_setStreamDecode() before decompressing next block. + * + * Whenever these conditions are not possible, + * save the last 64KB of decoded data into a safe buffer where it can't be modified during decompression, + * then indicate where this data is saved using LZ4_setStreamDecode(), before decompressing next block. */ -LZ4LIB_API int LZ4_decompress_safe_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int compressedSize, int maxDecompressedSize); -LZ4LIB_API int LZ4_decompress_fast_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int originalSize); +LZ4LIB_API int LZ4_decompress_safe_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* src, char* dst, int srcSize, int dstCapacity); +LZ4LIB_API int LZ4_decompress_fast_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* src, char* dst, int originalSize); /*! LZ4_decompress_*_usingDict() : * These decoding functions work the same as * a combination of LZ4_setStreamDecode() followed by LZ4_decompress_*_continue() * They are stand-alone, and don't need an LZ4_streamDecode_t structure. + * Dictionary is presumed stable : it must remain accessible and unmodified during next decompression. */ -LZ4LIB_API int LZ4_decompress_safe_usingDict (const char* source, char* dest, int compressedSize, int maxDecompressedSize, const char* dictStart, int dictSize); -LZ4LIB_API int LZ4_decompress_fast_usingDict (const char* source, char* dest, int originalSize, const char* dictStart, int dictSize); +LZ4LIB_API int LZ4_decompress_safe_usingDict (const char* src, char* dst, int srcSize, int dstCapcity, const char* dictStart, int dictSize); +LZ4LIB_API int LZ4_decompress_fast_usingDict (const char* src, char* dst, int originalSize, const char* dictStart, int dictSize); /*^********************************************** * !!!!!! STATIC LINKING ONLY !!!!!! ***********************************************/ + +/*-************************************ + * Unstable declarations + ************************************** + * Declarations in this section should be considered unstable. + * Use at your own peril, etc., etc. + * They may be removed in the future. + * Their signatures may change. + **************************************/ + +#ifdef LZ4_STATIC_LINKING_ONLY + +/*! LZ4_resetStream_fast() : + * Use this, like LZ4_resetStream(), to prepare a context for a new chain of + * calls to a streaming API (e.g., LZ4_compress_fast_continue()). + * + * Note: + * Using this in advance of a non- streaming-compression function is redundant, + * and potentially bad for performance, since they all perform their own custom + * reset internally. + * + * Differences from LZ4_resetStream(): + * When an LZ4_stream_t is known to be in a internally coherent state, + * it can often be prepared for a new compression with almost no work, only + * sometimes falling back to the full, expensive reset that is always required + * when the stream is in an indeterminate state (i.e., the reset performed by + * LZ4_resetStream()). + * + * LZ4_streams are guaranteed to be in a valid state when: + * - returned from LZ4_createStream() + * - reset by LZ4_resetStream() + * - memset(stream, 0, sizeof(LZ4_stream_t)), though this is discouraged + * - the stream was in a valid state and was reset by LZ4_resetStream_fast() + * - the stream was in a valid state and was then used in any compression call + * that returned success + * - the stream was in an indeterminate state and was used in a compression + * call that fully reset the state (e.g., LZ4_compress_fast_extState()) and + * that returned success + * + * When a stream isn't known to be in a valid state, it is not safe to pass to + * any fastReset or streaming function. It must first be cleansed by the full + * LZ4_resetStream(). + */ +LZ4LIB_API void LZ4_resetStream_fast (LZ4_stream_t* streamPtr); + +/*! LZ4_compress_fast_extState_fastReset() : + * A variant of LZ4_compress_fast_extState(). + * + * Using this variant avoids an expensive initialization step. It is only safe + * to call if the state buffer is known to be correctly initialized already + * (see above comment on LZ4_resetStream_fast() for a definition of "correctly + * initialized"). From a high level, the difference is that this function + * initializes the provided state with a call to something like + * LZ4_resetStream_fast() while LZ4_compress_fast_extState() starts with a + * call to LZ4_resetStream(). + */ +LZ4LIB_API int LZ4_compress_fast_extState_fastReset (void* state, const char* src, char* dst, int srcSize, int dstCapacity, int acceleration); + +/*! LZ4_attach_dictionary() : + * This is an experimental API that allows for the efficient use of a + * static dictionary many times. + * + * Rather than re-loading the dictionary buffer into a working context before + * each compression, or copying a pre-loaded dictionary's LZ4_stream_t into a + * working LZ4_stream_t, this function introduces a no-copy setup mechanism, + * in which the working stream references the dictionary stream in-place. + * + * Several assumptions are made about the state of the dictionary stream. + * Currently, only streams which have been prepared by LZ4_loadDict() should + * be expected to work. + * + * Alternatively, the provided dictionary stream pointer may be NULL, in which + * case any existing dictionary stream is unset. + * + * If a dictionary is provided, it replaces any pre-existing stream history. + * The dictionary contents are the only history that can be referenced and + * logically immediately precede the data compressed in the first subsequent + * compression call. + * + * The dictionary will only remain attached to the working stream through the + * first compression call, at the end of which it is cleared. The dictionary + * stream (and source buffer) must remain in-place / accessible / unchanged + * through the completion of the first compression call on the stream. + */ +LZ4LIB_API void LZ4_attach_dictionary(LZ4_stream_t *working_stream, const LZ4_stream_t *dictionary_stream); + +#endif + /*-************************************ * Private definitions ************************************** @@ -347,14 +493,16 @@ LZ4LIB_API int LZ4_decompress_fast_usingDict (const char* source, char* dest, in #if defined(__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) #include -typedef struct { +typedef struct LZ4_stream_t_internal LZ4_stream_t_internal; +struct LZ4_stream_t_internal { uint32_t hashTable[LZ4_HASH_SIZE_U32]; uint32_t currentOffset; - uint32_t initCheck; + uint16_t initCheck; + uint16_t tableType; const uint8_t* dictionary; - uint8_t* bufferStart; /* obsolete, used for slideInputBuffer */ + const LZ4_stream_t_internal* dictCtx; uint32_t dictSize; -} LZ4_stream_t_internal; +}; typedef struct { const uint8_t* externalDict; @@ -365,14 +513,16 @@ typedef struct { #else -typedef struct { +typedef struct LZ4_stream_t_internal LZ4_stream_t_internal; +struct LZ4_stream_t_internal { unsigned int hashTable[LZ4_HASH_SIZE_U32]; unsigned int currentOffset; - unsigned int initCheck; + unsigned short initCheck; + unsigned short tableType; const unsigned char* dictionary; - unsigned char* bufferStart; /* obsolete, used for slideInputBuffer */ + const LZ4_stream_t_internal* dictCtx; unsigned int dictSize; -} LZ4_stream_t_internal; +}; typedef struct { const unsigned char* externalDict; @@ -429,11 +579,9 @@ union LZ4_streamDecode_u { # define LZ4_DEPRECATED(message) /* disable deprecation warnings */ #else # define LZ4_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__) -# if defined(__clang__) /* clang doesn't handle mixed C++11 and CNU attributes */ -# define LZ4_DEPRECATED(message) __attribute__((deprecated(message))) -# elif defined (__cplusplus) && (__cplusplus >= 201402) /* C++14 or greater */ +# if defined (__cplusplus) && (__cplusplus >= 201402) /* C++14 or greater */ # define LZ4_DEPRECATED(message) [[deprecated(message)]] -# elif (LZ4_GCC_VERSION >= 405) +# elif (LZ4_GCC_VERSION >= 405) || defined(__clang__) # define LZ4_DEPRECATED(message) __attribute__((deprecated(message))) # elif (LZ4_GCC_VERSION >= 301) # define LZ4_DEPRECATED(message) __attribute__((deprecated)) @@ -446,26 +594,34 @@ union LZ4_streamDecode_u { #endif /* LZ4_DISABLE_DEPRECATE_WARNINGS */ /* Obsolete compression functions */ -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_default() instead") int LZ4_compress (const char* source, char* dest, int sourceSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_default() instead") int LZ4_compress_limitedOutput (const char* source, char* dest, int sourceSize, int maxOutputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_fast_extState() instead") int LZ4_compress_withState (void* state, const char* source, char* dest, int inputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_fast_extState() instead") int LZ4_compress_limitedOutput_withState (void* state, const char* source, char* dest, int inputSize, int maxOutputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_fast_continue() instead") int LZ4_compress_continue (LZ4_stream_t* LZ4_streamPtr, const char* source, char* dest, int inputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_fast_continue() instead") int LZ4_compress_limitedOutput_continue (LZ4_stream_t* LZ4_streamPtr, const char* source, char* dest, int inputSize, int maxOutputSize); +LZ4_DEPRECATED("use LZ4_compress_default() instead") LZ4LIB_API int LZ4_compress (const char* source, char* dest, int sourceSize); +LZ4_DEPRECATED("use LZ4_compress_default() instead") LZ4LIB_API int LZ4_compress_limitedOutput (const char* source, char* dest, int sourceSize, int maxOutputSize); +LZ4_DEPRECATED("use LZ4_compress_fast_extState() instead") LZ4LIB_API int LZ4_compress_withState (void* state, const char* source, char* dest, int inputSize); +LZ4_DEPRECATED("use LZ4_compress_fast_extState() instead") LZ4LIB_API int LZ4_compress_limitedOutput_withState (void* state, const char* source, char* dest, int inputSize, int maxOutputSize); +LZ4_DEPRECATED("use LZ4_compress_fast_continue() instead") LZ4LIB_API int LZ4_compress_continue (LZ4_stream_t* LZ4_streamPtr, const char* source, char* dest, int inputSize); +LZ4_DEPRECATED("use LZ4_compress_fast_continue() instead") LZ4LIB_API int LZ4_compress_limitedOutput_continue (LZ4_stream_t* LZ4_streamPtr, const char* source, char* dest, int inputSize, int maxOutputSize); /* Obsolete decompression functions */ -LZ4LIB_API LZ4_DEPRECATED("use LZ4_decompress_fast() instead") int LZ4_uncompress (const char* source, char* dest, int outputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_decompress_safe() instead") int LZ4_uncompress_unknownOutputSize (const char* source, char* dest, int isize, int maxOutputSize); +LZ4_DEPRECATED("use LZ4_decompress_fast() instead") LZ4LIB_API int LZ4_uncompress (const char* source, char* dest, int outputSize); +LZ4_DEPRECATED("use LZ4_decompress_safe() instead") LZ4LIB_API int LZ4_uncompress_unknownOutputSize (const char* source, char* dest, int isize, int maxOutputSize); -/* Obsolete streaming functions; use new streaming interface whenever possible */ -LZ4LIB_API LZ4_DEPRECATED("use LZ4_createStream() instead") void* LZ4_create (char* inputBuffer); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_createStream() instead") int LZ4_sizeofStreamState(void); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_resetStream() instead") int LZ4_resetStreamState(void* state, char* inputBuffer); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_saveDict() instead") char* LZ4_slideInputBuffer (void* state); +/* Obsolete streaming functions; degraded functionality; do not use! + * + * In order to perform streaming compression, these functions depended on data + * that is no longer tracked in the state. They have been preserved as well as + * possible: using them will still produce a correct output. However, they don't + * actually retain any history between compression calls. The compression ratio + * achieved will therefore be no better than compressing each chunk + * independently. + */ +LZ4_DEPRECATED("Use LZ4_createStream() instead") LZ4LIB_API void* LZ4_create (char* inputBuffer); +LZ4_DEPRECATED("Use LZ4_createStream() instead") LZ4LIB_API int LZ4_sizeofStreamState(void); +LZ4_DEPRECATED("Use LZ4_resetStream() instead") LZ4LIB_API int LZ4_resetStreamState(void* state, char* inputBuffer); +LZ4_DEPRECATED("Use LZ4_saveDict() instead") LZ4LIB_API char* LZ4_slideInputBuffer (void* state); /* Obsolete streaming decoding functions */ -LZ4LIB_API LZ4_DEPRECATED("use LZ4_decompress_safe_usingDict() instead") int LZ4_decompress_safe_withPrefix64k (const char* src, char* dst, int compressedSize, int maxDstSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_decompress_fast_usingDict() instead") int LZ4_decompress_fast_withPrefix64k (const char* src, char* dst, int originalSize); +LZ4_DEPRECATED("use LZ4_decompress_safe_usingDict() instead") LZ4LIB_API int LZ4_decompress_safe_withPrefix64k (const char* src, char* dst, int compressedSize, int maxDstSize); +LZ4_DEPRECATED("use LZ4_decompress_fast_usingDict() instead") LZ4LIB_API int LZ4_decompress_fast_withPrefix64k (const char* src, char* dst, int originalSize); #endif /* LZ4_H_2983827168210 */ diff --git a/src/third-party/lz4/lz4frame.c b/src/third-party/lz4/lz4frame.c index 3adbdd9f0c5..08bf0faee1d 100644 --- a/src/third-party/lz4/lz4frame.c +++ b/src/third-party/lz4/lz4frame.c @@ -46,11 +46,25 @@ You can contact the author at : #endif +/*-************************************ +* Tuning parameters +**************************************/ +/* + * LZ4F_HEAPMODE : + * Select how default compression functions will allocate memory for their hash table, + * in memory stack (0:default, fastest), or in memory heap (1:requires malloc()). + */ +#ifndef LZ4F_HEAPMODE +# define LZ4F_HEAPMODE 0 +#endif + + /*-************************************ * Memory routines **************************************/ #include /* malloc, calloc, free */ -#define ALLOCATOR(s) calloc(1,s) +#define ALLOC(s) malloc(s) +#define ALLOC_AND_ZERO(s) calloc(1,s) #define FREEMEM free #include /* memset, memcpy, memmove */ #define MEM_INIT memset @@ -59,7 +73,9 @@ You can contact the author at : /*-************************************ * Includes **************************************/ -#include "lz4frame_static.h" +#define LZ4F_STATIC_LINKING_ONLY +#include "lz4frame.h" +#define LZ4_STATIC_LINKING_ONLY #include "lz4.h" #define LZ4_HC_STATIC_LINKING_ONLY #include "lz4hc.h" @@ -70,8 +86,29 @@ You can contact the author at : /*-************************************ * Debug **************************************/ +#if defined(LZ4_DEBUG) && (LZ4_DEBUG>=1) +# include +#else +# ifndef assert +# define assert(condition) ((void)0) +# endif +#endif + #define LZ4F_STATIC_ASSERT(c) { enum { LZ4F_static_assert = 1/(int)(!!(c)) }; } /* use only *after* variable declarations */ +#if defined(LZ4_DEBUG) && (LZ4_DEBUG>=2) && !defined(DEBUGLOG) +# include +static int g_debuglog_enable = 1; +# define DEBUGLOG(l, ...) { \ + if ((g_debuglog_enable) && (l<=LZ4_DEBUG)) { \ + fprintf(stderr, __FILE__ ": "); \ + fprintf(stderr, __VA_ARGS__); \ + fprintf(stderr, " \n"); \ + } } +#else +# define DEBUGLOG(l, ...) {} /* disabled */ +#endif + /*-************************************ * Basic Types @@ -180,7 +217,8 @@ typedef struct LZ4F_cctx_s U64 totalInSize; XXH32_state_t xxh; void* lz4CtxPtr; - U32 lz4CtxLevel; /* 0: unallocated; 1: LZ4_stream_t; 3: LZ4_streamHC_t */ + U16 lz4CtxAlloc; /* sized for: 0 = none, 1 = lz4 ctx, 2 = lz4hc ctx */ + U16 lz4CtxState; /* in use as: 0 = none, 1 = lz4 ctx, 2 = lz4hc ctx */ } LZ4F_cctx_t; @@ -271,7 +309,7 @@ static size_t LZ4F_compressBound_internal(size_t srcSize, size_t alreadyBuffered) { LZ4F_preferences_t prefsNull; - memset(&prefsNull, 0, sizeof(prefsNull)); + MEM_INIT(&prefsNull, 0, sizeof(prefsNull)); prefsNull.frameInfo.contentChecksumFlag = LZ4F_contentChecksumEnabled; /* worst case */ { const LZ4F_preferences_t* const prefsPtr = (preferencesPtr==NULL) ? &prefsNull : preferencesPtr; U32 const flush = prefsPtr->autoFlush | (srcSize==0); @@ -281,7 +319,7 @@ static size_t LZ4F_compressBound_internal(size_t srcSize, size_t const bufferedSize = MIN(alreadyBuffered, maxBuffered); size_t const maxSrcSize = srcSize + bufferedSize; unsigned const nbFullBlocks = (unsigned)(maxSrcSize / blockSize); - size_t const partialBlockSize = (srcSize - (srcSize==0)) & (blockSize-1); /* 0 => -1 == MAX => blockSize-1 */ + size_t const partialBlockSize = maxSrcSize & (blockSize-1); size_t const lastBlockSize = flush ? partialBlockSize : 0; unsigned const nbBlocks = nbFullBlocks + (lastBlockSize>0); @@ -300,7 +338,7 @@ size_t LZ4F_compressFrameBound(size_t srcSize, const LZ4F_preferences_t* prefere size_t const headerSize = maxFHSize; /* max header size, including optional fields */ if (preferencesPtr!=NULL) prefs = *preferencesPtr; - else memset(&prefs, 0, sizeof(prefs)); + else MEM_INIT(&prefs, 0, sizeof(prefs)); prefs.autoFlush = 1; return headerSize + LZ4F_compressBound_internal(srcSize, &prefs, 0);; @@ -316,27 +354,22 @@ size_t LZ4F_compressFrameBound(size_t srcSize, const LZ4F_preferences_t* prefere * @return : number of bytes written into dstBuffer, * or an error code if it fails (can be tested using LZ4F_isError()) */ -size_t LZ4F_compressFrame_usingCDict(void* dstBuffer, size_t dstCapacity, +size_t LZ4F_compressFrame_usingCDict(LZ4F_cctx* cctx, + void* dstBuffer, size_t dstCapacity, const void* srcBuffer, size_t srcSize, const LZ4F_CDict* cdict, const LZ4F_preferences_t* preferencesPtr) { - LZ4F_cctx_t cctxI; - LZ4_stream_t lz4ctx; LZ4F_preferences_t prefs; LZ4F_compressOptions_t options; BYTE* const dstStart = (BYTE*) dstBuffer; BYTE* dstPtr = dstStart; BYTE* const dstEnd = dstStart + dstCapacity; - memset(&cctxI, 0, sizeof(cctxI)); - cctxI.version = LZ4F_VERSION; - cctxI.maxBufferSize = 5 MB; /* mess with real buffer size to prevent dynamic allocation; works only because autoflush==1 & stableSrc==1 */ - if (preferencesPtr!=NULL) prefs = *preferencesPtr; else - memset(&prefs, 0, sizeof(prefs)); + MEM_INIT(&prefs, 0, sizeof(prefs)); if (prefs.frameInfo.contentSize != 0) prefs.frameInfo.contentSize = (U64)srcSize; /* auto-correct content size if selected (!=0) */ @@ -345,32 +378,24 @@ size_t LZ4F_compressFrame_usingCDict(void* dstBuffer, size_t dstCapacity, if (srcSize <= LZ4F_getBlockSize(prefs.frameInfo.blockSizeID)) prefs.frameInfo.blockMode = LZ4F_blockIndependent; /* only one block => no need for inter-block link */ - if (prefs.compressionLevel < LZ4HC_CLEVEL_MIN) { - cctxI.lz4CtxPtr = &lz4ctx; - cctxI.lz4CtxLevel = 1; - } /* fast compression context pre-created on stack */ - - memset(&options, 0, sizeof(options)); + MEM_INIT(&options, 0, sizeof(options)); options.stableSrc = 1; if (dstCapacity < LZ4F_compressFrameBound(srcSize, &prefs)) /* condition to guarantee success */ return err0r(LZ4F_ERROR_dstMaxSize_tooSmall); - { size_t const headerSize = LZ4F_compressBegin_usingCDict(&cctxI, dstBuffer, dstCapacity, cdict, &prefs); /* write header */ + { size_t const headerSize = LZ4F_compressBegin_usingCDict(cctx, dstBuffer, dstCapacity, cdict, &prefs); /* write header */ if (LZ4F_isError(headerSize)) return headerSize; dstPtr += headerSize; /* header size */ } - { size_t const cSize = LZ4F_compressUpdate(&cctxI, dstPtr, dstEnd-dstPtr, srcBuffer, srcSize, &options); + { size_t const cSize = LZ4F_compressUpdate(cctx, dstPtr, dstEnd-dstPtr, srcBuffer, srcSize, &options); if (LZ4F_isError(cSize)) return cSize; dstPtr += cSize; } - { size_t const tailSize = LZ4F_compressEnd(&cctxI, dstPtr, dstEnd-dstPtr, &options); /* flush last block, and generate suffix */ + { size_t const tailSize = LZ4F_compressEnd(cctx, dstPtr, dstEnd-dstPtr, &options); /* flush last block, and generate suffix */ if (LZ4F_isError(tailSize)) return tailSize; dstPtr += tailSize; } - if (prefs.compressionLevel >= LZ4HC_CLEVEL_MIN) /* Ctx allocation only for lz4hc */ - FREEMEM(cctxI.lz4CtxPtr); - return (dstPtr - dstStart); } @@ -386,9 +411,44 @@ size_t LZ4F_compressFrame(void* dstBuffer, size_t dstCapacity, const void* srcBuffer, size_t srcSize, const LZ4F_preferences_t* preferencesPtr) { - return LZ4F_compressFrame_usingCDict(dstBuffer, dstCapacity, - srcBuffer, srcSize, - NULL, preferencesPtr); + size_t result; +#if (LZ4F_HEAPMODE) + LZ4F_cctx_t *cctxPtr; + result = LZ4F_createCompressionContext(&cctxPtr, LZ4F_VERSION); + if (LZ4F_isError(result)) return result; +#else + LZ4F_cctx_t cctx; + LZ4_stream_t lz4ctx; + LZ4F_cctx_t *cctxPtr = &cctx; + + DEBUGLOG(4, "LZ4F_compressFrame"); + MEM_INIT(&cctx, 0, sizeof(cctx)); + cctx.version = LZ4F_VERSION; + cctx.maxBufferSize = 5 MB; /* mess with real buffer size to prevent dynamic allocation; works only because autoflush==1 & stableSrc==1 */ + if (preferencesPtr == NULL || + preferencesPtr->compressionLevel < LZ4HC_CLEVEL_MIN) + { + LZ4_resetStream(&lz4ctx); + cctxPtr->lz4CtxPtr = &lz4ctx; + cctxPtr->lz4CtxAlloc = 1; + cctxPtr->lz4CtxState = 1; + } +#endif + + result = LZ4F_compressFrame_usingCDict(cctxPtr, dstBuffer, dstCapacity, + srcBuffer, srcSize, + NULL, preferencesPtr); + +#if (LZ4F_HEAPMODE) + LZ4F_freeCompressionContext(cctxPtr); +#else + if (preferencesPtr != NULL && + preferencesPtr->compressionLevel >= LZ4HC_CLEVEL_MIN) + { + FREEMEM(cctxPtr->lz4CtxPtr); + } +#endif + return result; } @@ -411,13 +471,14 @@ struct LZ4F_CDict_s { LZ4F_CDict* LZ4F_createCDict(const void* dictBuffer, size_t dictSize) { const char* dictStart = (const char*)dictBuffer; - LZ4F_CDict* cdict = (LZ4F_CDict*) malloc(sizeof(*cdict)); + LZ4F_CDict* cdict = (LZ4F_CDict*) ALLOC(sizeof(*cdict)); + DEBUGLOG(4, "LZ4F_createCDict"); if (!cdict) return NULL; if (dictSize > 64 KB) { dictStart += dictSize - 64 KB; dictSize = 64 KB; } - cdict->dictContent = ALLOCATOR(dictSize); + cdict->dictContent = ALLOC(dictSize); cdict->fastCtx = LZ4_createStream(); cdict->HCCtx = LZ4_createStreamHC(); if (!cdict->dictContent || !cdict->fastCtx || !cdict->HCCtx) { @@ -425,9 +486,8 @@ LZ4F_CDict* LZ4F_createCDict(const void* dictBuffer, size_t dictSize) return NULL; } memcpy(cdict->dictContent, dictStart, dictSize); - LZ4_resetStream(cdict->fastCtx); LZ4_loadDict (cdict->fastCtx, (const char*)cdict->dictContent, (int)dictSize); - LZ4_resetStreamHC(cdict->HCCtx, LZ4HC_CLEVEL_DEFAULT); + LZ4_setCompressionLevel(cdict->HCCtx, LZ4HC_CLEVEL_DEFAULT); LZ4_loadDictHC(cdict->HCCtx, (const char*)cdict->dictContent, (int)dictSize); return cdict; } @@ -456,7 +516,7 @@ void LZ4F_freeCDict(LZ4F_CDict* cdict) */ LZ4F_errorCode_t LZ4F_createCompressionContext(LZ4F_compressionContext_t* LZ4F_compressionContextPtr, unsigned version) { - LZ4F_cctx_t* const cctxPtr = (LZ4F_cctx_t*)ALLOCATOR(sizeof(LZ4F_cctx_t)); + LZ4F_cctx_t* const cctxPtr = (LZ4F_cctx_t*)ALLOC_AND_ZERO(sizeof(LZ4F_cctx_t)); if (cctxPtr==NULL) return err0r(LZ4F_ERROR_allocation_failed); cctxPtr->version = version; @@ -482,6 +542,36 @@ LZ4F_errorCode_t LZ4F_freeCompressionContext(LZ4F_compressionContext_t LZ4F_comp } +/** + * This function prepares the internal LZ4(HC) stream for a new compression, + * resetting the context and attaching the dictionary, if there is one. + * + * It needs to be called at the beginning of each independent compression + * stream (i.e., at the beginning of a frame in blockLinked mode, or at the + * beginning of each block in blockIndependent mode). + */ +static void LZ4F_initStream(void* ctx, + const LZ4F_CDict* cdict, + int level, + LZ4F_blockMode_t blockMode) { + if (level < LZ4HC_CLEVEL_MIN) { + if (cdict != NULL || blockMode == LZ4F_blockLinked) { + /* In these cases, we will call LZ4_compress_fast_continue(), + * which needs an already reset context. Otherwise, we'll call a + * one-shot API. The non-continued APIs internally perform their own + * resets at the beginning of their calls, where they know what + * tableType they need the context to be in. So in that case this + * would be misguided / wasted work. */ + LZ4_resetStream_fast((LZ4_stream_t*)ctx); + } + LZ4_attach_dictionary((LZ4_stream_t *)ctx, cdict ? cdict->fastCtx : NULL); + } else { + LZ4_resetStreamHC_fast((LZ4_streamHC_t*)ctx, level); + LZ4_attach_HC_dictionary((LZ4_streamHC_t *)ctx, cdict ? cdict->HCCtx : NULL); + } +} + + /*! LZ4F_compressBegin_usingCDict() : * init streaming compression and writes frame header into dstBuffer. * dstBuffer must be >= LZ4F_HEADER_SIZE_MAX bytes. @@ -499,21 +589,33 @@ size_t LZ4F_compressBegin_usingCDict(LZ4F_cctx* cctxPtr, BYTE* headerStart; if (dstCapacity < maxFHSize) return err0r(LZ4F_ERROR_dstMaxSize_tooSmall); - memset(&prefNull, 0, sizeof(prefNull)); + MEM_INIT(&prefNull, 0, sizeof(prefNull)); if (preferencesPtr == NULL) preferencesPtr = &prefNull; cctxPtr->prefs = *preferencesPtr; /* Ctx Management */ - { U32 const tableID = (cctxPtr->prefs.compressionLevel < LZ4HC_CLEVEL_MIN) ? 1 : 2; /* 0:nothing ; 1:LZ4 table ; 2:HC tables */ - if (cctxPtr->lz4CtxLevel < tableID) { + { U16 const ctxTypeID = (cctxPtr->prefs.compressionLevel < LZ4HC_CLEVEL_MIN) ? 1 : 2; + if (cctxPtr->lz4CtxAlloc < ctxTypeID) { FREEMEM(cctxPtr->lz4CtxPtr); - if (cctxPtr->prefs.compressionLevel < LZ4HC_CLEVEL_MIN) + if (cctxPtr->prefs.compressionLevel < LZ4HC_CLEVEL_MIN) { cctxPtr->lz4CtxPtr = (void*)LZ4_createStream(); - else + } else { cctxPtr->lz4CtxPtr = (void*)LZ4_createStreamHC(); + } if (cctxPtr->lz4CtxPtr == NULL) return err0r(LZ4F_ERROR_allocation_failed); - cctxPtr->lz4CtxLevel = tableID; - } } + cctxPtr->lz4CtxAlloc = ctxTypeID; + cctxPtr->lz4CtxState = ctxTypeID; + } else if (cctxPtr->lz4CtxState != ctxTypeID) { + /* otherwise, a sufficient buffer is allocated, but we need to + * reset it to the correct context type */ + if (cctxPtr->prefs.compressionLevel < LZ4HC_CLEVEL_MIN) { + LZ4_resetStream((LZ4_stream_t *) cctxPtr->lz4CtxPtr); + } else { + LZ4_resetStreamHC((LZ4_streamHC_t *) cctxPtr->lz4CtxPtr, cctxPtr->prefs.compressionLevel); + } + cctxPtr->lz4CtxState = ctxTypeID; + } + } /* Buffer Management */ if (cctxPtr->prefs.frameInfo.blockSizeID == 0) @@ -527,7 +629,7 @@ size_t LZ4F_compressBegin_usingCDict(LZ4F_cctx* cctxPtr, if (cctxPtr->maxBufferSize < requiredBuffSize) { cctxPtr->maxBufferSize = 0; FREEMEM(cctxPtr->tmpBuff); - cctxPtr->tmpBuff = (BYTE*)ALLOCATOR(requiredBuffSize); + cctxPtr->tmpBuff = (BYTE*)ALLOC_AND_ZERO(requiredBuffSize); if (cctxPtr->tmpBuff == NULL) return err0r(LZ4F_ERROR_allocation_failed); cctxPtr->maxBufferSize = requiredBuffSize; } } @@ -539,19 +641,10 @@ size_t LZ4F_compressBegin_usingCDict(LZ4F_cctx* cctxPtr, cctxPtr->cdict = cdict; if (cctxPtr->prefs.frameInfo.blockMode == LZ4F_blockLinked) { /* frame init only for blockLinked : blockIndependent will be init at each block */ - if (cdict) { - if (cctxPtr->prefs.compressionLevel < LZ4HC_CLEVEL_MIN) { - memcpy(cctxPtr->lz4CtxPtr, cdict->fastCtx, sizeof(*cdict->fastCtx)); - } else { - memcpy(cctxPtr->lz4CtxPtr, cdict->HCCtx, sizeof(*cdict->HCCtx)); - LZ4_setCompressionLevel((LZ4_streamHC_t*)cctxPtr->lz4CtxPtr, cctxPtr->prefs.compressionLevel); - } - } else { - if (cctxPtr->prefs.compressionLevel < LZ4HC_CLEVEL_MIN) - LZ4_resetStream((LZ4_stream_t*)(cctxPtr->lz4CtxPtr)); - else - LZ4_resetStreamHC((LZ4_streamHC_t*)(cctxPtr->lz4CtxPtr), cctxPtr->prefs.compressionLevel); - } + LZ4F_initStream(cctxPtr->lz4CtxPtr, cdict, cctxPtr->prefs.compressionLevel, LZ4F_blockLinked); + } + if (preferencesPtr->compressionLevel >= LZ4HC_CLEVEL_MIN) { + LZ4_favorDecompressionSpeed((LZ4_streamHC_t*)cctxPtr->lz4CtxPtr, (int)preferencesPtr->favorDecSpeed); } /* Magic Number */ @@ -604,10 +697,10 @@ size_t LZ4F_compressBegin(LZ4F_cctx* cctxPtr, } -/* LZ4F_compressBound() : - * @ return size of Dst buffer given a srcSize to handle worst case situations. - * The LZ4F_frameInfo_t structure is optional : if NULL, preferences will be set to cover worst case situations. - * This function cannot fail. +/* LZ4F_compressBound() : + * @return minimum capacity of dstBuffer for a given srcSize to handle worst case scenario. + * LZ4F_preferences_t structure is optional : if NULL, preferences will be set to cover worst case scenario. + * This function cannot fail. */ size_t LZ4F_compressBound(size_t srcSize, const LZ4F_preferences_t* preferencesPtr) { @@ -645,29 +738,29 @@ static size_t LZ4F_makeBlock(void* dst, const void* src, size_t srcSize, static int LZ4F_compressBlock(void* ctx, const char* src, char* dst, int srcSize, int dstCapacity, int level, const LZ4F_CDict* cdict) { - int const acceleration = (level < -1) ? -level : 1; + int const acceleration = (level < 0) ? -level + 1 : 1; + LZ4F_initStream(ctx, cdict, level, LZ4F_blockIndependent); if (cdict) { - memcpy(ctx, cdict->fastCtx, sizeof(*cdict->fastCtx)); return LZ4_compress_fast_continue((LZ4_stream_t*)ctx, src, dst, srcSize, dstCapacity, acceleration); + } else { + return LZ4_compress_fast_extState_fastReset(ctx, src, dst, srcSize, dstCapacity, acceleration); } - return LZ4_compress_fast_extState(ctx, src, dst, srcSize, dstCapacity, acceleration); } static int LZ4F_compressBlock_continue(void* ctx, const char* src, char* dst, int srcSize, int dstCapacity, int level, const LZ4F_CDict* cdict) { - int const acceleration = (level < -1) ? -level : 1; + int const acceleration = (level < 0) ? -level + 1 : 1; (void)cdict; /* init once at beginning of frame */ return LZ4_compress_fast_continue((LZ4_stream_t*)ctx, src, dst, srcSize, dstCapacity, acceleration); } static int LZ4F_compressBlockHC(void* ctx, const char* src, char* dst, int srcSize, int dstCapacity, int level, const LZ4F_CDict* cdict) { + LZ4F_initStream(ctx, cdict, level, LZ4F_blockIndependent); if (cdict) { - memcpy(ctx, cdict->HCCtx, sizeof(*cdict->HCCtx)); - LZ4_setCompressionLevel((LZ4_streamHC_t*)ctx, level); return LZ4_compress_HC_continue((LZ4_streamHC_t*)ctx, src, dst, srcSize, dstCapacity); } - return LZ4_compress_HC_extStateHC(ctx, src, dst, srcSize, dstCapacity, level); + return LZ4_compress_HC_extStateHC_fastReset(ctx, src, dst, srcSize, dstCapacity, level); } static int LZ4F_compressBlockHC_continue(void* ctx, const char* src, char* dst, int srcSize, int dstCapacity, int level, const LZ4F_CDict* cdict) @@ -716,10 +809,12 @@ size_t LZ4F_compressUpdate(LZ4F_cctx* cctxPtr, LZ4F_lastBlockStatus lastBlockCompressed = notDone; compressFunc_t const compress = LZ4F_selectCompression(cctxPtr->prefs.frameInfo.blockMode, cctxPtr->prefs.compressionLevel); + DEBUGLOG(4, "LZ4F_compressUpdate (srcSize=%zu)", srcSize); if (cctxPtr->cStage != 1) return err0r(LZ4F_ERROR_GENERIC); - if (dstCapacity < LZ4F_compressBound_internal(srcSize, &(cctxPtr->prefs), cctxPtr->tmpInSize)) return err0r(LZ4F_ERROR_dstMaxSize_tooSmall); - memset(&cOptionsNull, 0, sizeof(cOptionsNull)); + if (dstCapacity < LZ4F_compressBound_internal(srcSize, &(cctxPtr->prefs), cctxPtr->tmpInSize)) + return err0r(LZ4F_ERROR_dstMaxSize_tooSmall); + MEM_INIT(&cOptionsNull, 0, sizeof(cOptionsNull)); if (compressOptionsPtr == NULL) compressOptionsPtr = &cOptionsNull; /* complete tmp buffer */ @@ -887,8 +982,7 @@ typedef enum { dstage_getBlockHeader, dstage_storeBlockHeader, dstage_copyDirect, dstage_getBlockChecksum, dstage_getCBlock, dstage_storeCBlock, - dstage_decodeCBlock, dstage_decodeCBlock_intoDst, - dstage_decodeCBlock_intoTmp, dstage_flushOut, + dstage_flushOut, dstage_getSuffix, dstage_storeSuffix, dstage_getSFrameSize, dstage_storeSFrameSize, dstage_skipSkippable @@ -924,7 +1018,7 @@ struct LZ4F_dctx_s { */ LZ4F_errorCode_t LZ4F_createDecompressionContext(LZ4F_dctx** LZ4F_decompressionContextPtr, unsigned versionNumber) { - LZ4F_dctx* const dctx = (LZ4F_dctx*)ALLOCATOR(sizeof(LZ4F_dctx)); + LZ4F_dctx* const dctx = (LZ4F_dctx*)ALLOC_AND_ZERO(sizeof(LZ4F_dctx)); if (dctx==NULL) return err0r(LZ4F_ERROR_GENERIC); dctx->version = versionNumber; @@ -996,7 +1090,7 @@ static size_t LZ4F_decodeHeader(LZ4F_dctx* dctx, const void* src, size_t srcSize /* need to decode header to get frameInfo */ if (srcSize < minFHSize) return err0r(LZ4F_ERROR_frameHeader_incomplete); /* minimal frame header size */ - memset(&(dctx->frameInfo), 0, sizeof(dctx->frameInfo)); + MEM_INIT(&(dctx->frameInfo), 0, sizeof(dctx->frameInfo)); /* special case : skippable frames */ if ((LZ4F_readLE32(srcPtr) & 0xFFFFFFF0U) == LZ4F_MAGIC_SKIPPABLE_START) { @@ -1129,24 +1223,31 @@ LZ4F_errorCode_t LZ4F_getFrameInfo(LZ4F_dctx* dctx, LZ4F_frameInfo_t* frameInfoP /* LZ4F_updateDict() : * only used for LZ4F_blockLinked mode */ -static void LZ4F_updateDict(LZ4F_dctx* dctx, const BYTE* dstPtr, size_t dstSize, const BYTE* dstPtr0, unsigned withinTmp) +static void LZ4F_updateDict(LZ4F_dctx* dctx, + const BYTE* dstPtr, size_t dstSize, const BYTE* dstBufferStart, + unsigned withinTmp) { if (dctx->dictSize==0) dctx->dict = (const BYTE*)dstPtr; /* priority to dictionary continuity */ - if (dctx->dict + dctx->dictSize == dstPtr) { /* dictionary continuity */ + if (dctx->dict + dctx->dictSize == dstPtr) { /* dictionary continuity, directly within dstBuffer */ dctx->dictSize += dstSize; return; } - if (dstPtr - dstPtr0 + dstSize >= 64 KB) { /* dstBuffer large enough to become dictionary */ - dctx->dict = (const BYTE*)dstPtr0; - dctx->dictSize = dstPtr - dstPtr0 + dstSize; + if (dstPtr - dstBufferStart + dstSize >= 64 KB) { /* history in dstBuffer becomes large enough to become dictionary */ + dctx->dict = (const BYTE*)dstBufferStart; + dctx->dictSize = dstPtr - dstBufferStart + dstSize; return; } - if ((withinTmp) && (dctx->dict == dctx->tmpOutBuffer)) { - /* assumption : dctx->dict + dctx->dictSize == dctx->tmpOut + dctx->tmpOutStart */ + assert(dstSize < 64 KB); /* if dstSize >= 64 KB, dictionary would be set into dstBuffer directly */ + + /* dstBuffer does not contain whole useful history (64 KB), so it must be saved within tmpOut */ + + if ((withinTmp) && (dctx->dict == dctx->tmpOutBuffer)) { /* continue history within tmpOutBuffer */ + /* withinTmp expectation : content of [dstPtr,dstSize] is same as [dict+dictSize,dstSize], so we just extend it */ + assert(dctx->dict + dctx->dictSize == dctx->tmpOut + dctx->tmpOutStart); dctx->dictSize += dstSize; return; } @@ -1167,7 +1268,7 @@ static void LZ4F_updateDict(LZ4F_dctx* dctx, const BYTE* dstPtr, size_t dstSize, if (dctx->dict == dctx->tmpOutBuffer) { /* copy dst into tmp to complete dict */ if (dctx->dictSize + dstSize > dctx->maxBufferSize) { /* tmp buffer not large enough */ - size_t const preserveSize = 64 KB - dstSize; /* note : dstSize < 64 KB */ + size_t const preserveSize = 64 KB - dstSize; memcpy(dctx->tmpOutBuffer, dctx->dict + dctx->dictSize - preserveSize, preserveSize); dctx->dictSize = preserveSize; } @@ -1177,7 +1278,7 @@ static void LZ4F_updateDict(LZ4F_dctx* dctx, const BYTE* dstPtr, size_t dstSize, } /* join dict & dest into tmp */ - { size_t preserveSize = 64 KB - dstSize; /* note : dstSize < 64 KB */ + { size_t preserveSize = 64 KB - dstSize; if (preserveSize > dctx->dictSize) preserveSize = dctx->dictSize; memcpy(dctx->tmpOutBuffer, dctx->dict + dctx->dictSize - preserveSize, preserveSize); memcpy(dctx->tmpOutBuffer + preserveSize, dstPtr, dstSize); @@ -1223,7 +1324,7 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, size_t nextSrcSizeHint = 1; - memset(&optionsNull, 0, sizeof(optionsNull)); + MEM_INIT(&optionsNull, 0, sizeof(optionsNull)); if (decompressOptionsPtr==NULL) decompressOptionsPtr = &optionsNull; *srcSizePtr = 0; *dstSizePtr = 0; @@ -1244,7 +1345,7 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, } dctx->tmpInSize = 0; if (srcEnd-srcPtr == 0) return minFHSize; /* 0-size input */ - dctx->tmpInTarget = minFHSize; /* minimum to attempt decode */ + dctx->tmpInTarget = minFHSize; /* minimum size to decode header */ dctx->dStage = dstage_storeFrameHeader; /* fall-through */ @@ -1272,11 +1373,11 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, if (bufferNeeded > dctx->maxBufferSize) { /* tmp buffers too small */ dctx->maxBufferSize = 0; /* ensure allocation will be re-attempted on next entry*/ FREEMEM(dctx->tmpIn); - dctx->tmpIn = (BYTE*)ALLOCATOR(dctx->maxBlockSize + 4 /* block checksum */); + dctx->tmpIn = (BYTE*)ALLOC(dctx->maxBlockSize + 4 /* block checksum */); if (dctx->tmpIn == NULL) return err0r(LZ4F_ERROR_allocation_failed); FREEMEM(dctx->tmpOutBuffer); - dctx->tmpOutBuffer= (BYTE*)ALLOCATOR(bufferNeeded); + dctx->tmpOutBuffer= (BYTE*)ALLOC(bufferNeeded); if (dctx->tmpOutBuffer== NULL) return err0r(LZ4F_ERROR_allocation_failed); dctx->maxBufferSize = bufferNeeded; @@ -1401,8 +1502,7 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, U32 const calcCRC = XXH32_digest(&dctx->blockChecksum); if (readCRC != calcCRC) return err0r(LZ4F_ERROR_blockChecksum_invalid); - } - } + } } dctx->dStage = dstage_getBlockHeader; /* new block */ break; @@ -1415,9 +1515,8 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, /* input large enough to read full block directly */ selectedIn = srcPtr; srcPtr += dctx->tmpInTarget; - dctx->dStage = dstage_decodeCBlock; - break; + if (0) /* jump over next block */ case dstage_storeCBlock: { size_t const wantedData = dctx->tmpInTarget - dctx->tmpInSize; size_t const inputLeft = (size_t)(srcEnd-srcPtr); @@ -1431,30 +1530,32 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, break; } selectedIn = dctx->tmpIn; - dctx->dStage = dstage_decodeCBlock; } - /* fall-through */ - /* At this stage, input is large enough to decode a block */ - case dstage_decodeCBlock: + /* At this stage, input is large enough to decode a block */ if (dctx->frameInfo.blockChecksumFlag) { dctx->tmpInTarget -= 4; + assert(selectedIn != NULL); /* selectedIn is defined at this stage (either srcPtr, or dctx->tmpIn) */ { U32 const readBlockCrc = LZ4F_readLE32(selectedIn + dctx->tmpInTarget); U32 const calcBlockCrc = XXH32(selectedIn, dctx->tmpInTarget, 0); if (readBlockCrc != calcBlockCrc) return err0r(LZ4F_ERROR_blockChecksum_invalid); } } - if ((size_t)(dstEnd-dstPtr) < dctx->maxBlockSize) /* not enough place into dst : decode into tmpOut */ - dctx->dStage = dstage_decodeCBlock_intoTmp; - else - dctx->dStage = dstage_decodeCBlock_intoDst; - break; - case dstage_decodeCBlock_intoDst: - { int const decodedSize = LZ4_decompress_safe_usingDict( + if ((size_t)(dstEnd-dstPtr) >= dctx->maxBlockSize) { + const char* dict = (const char*)dctx->dict; + size_t dictSize = dctx->dictSize; + int decodedSize; + if (dict && dictSize > 1 GB) { + /* the dictSize param is an int, avoid truncation / sign issues */ + dict += dictSize - 64 KB; + dictSize = 64 KB; + } + /* enough capacity in `dst` to decompress directly there */ + decodedSize = LZ4_decompress_safe_usingDict( (const char*)selectedIn, (char*)dstPtr, (int)dctx->tmpInTarget, (int)dctx->maxBlockSize, - (const char*)dctx->dict, (int)dctx->dictSize); + dict, (int)dictSize); if (decodedSize < 0) return err0r(LZ4F_ERROR_GENERIC); /* decompression failed */ if (dctx->frameInfo.contentChecksumFlag) XXH32_update(&(dctx->xxh), dstPtr, decodedSize); @@ -1470,7 +1571,6 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, break; } - case dstage_decodeCBlock_intoTmp: /* not enough place into dst : decode into tmpOut */ /* ensure enough place for tmpOut */ if (dctx->frameInfo.blockMode == LZ4F_blockLinked) { @@ -1483,14 +1583,21 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, } else { /* dict not within tmp */ size_t const reservedDictSpace = MIN(dctx->dictSize, 64 KB); dctx->tmpOut = dctx->tmpOutBuffer + reservedDictSpace; - } - } + } } /* Decode block */ - { int const decodedSize = LZ4_decompress_safe_usingDict( + { const char* dict = (const char*)dctx->dict; + size_t dictSize = dctx->dictSize; + int decodedSize; + if (dict && dictSize > 1 GB) { + /* the dictSize param is an int, avoid truncation / sign issues */ + dict += dictSize - 64 KB; + dictSize = 64 KB; + } + decodedSize = LZ4_decompress_safe_usingDict( (const char*)selectedIn, (char*)dctx->tmpOut, (int)dctx->tmpInTarget, (int)dctx->maxBlockSize, - (const char*)dctx->dict, (int)dctx->dictSize); + dict, (int)dictSize); if (decodedSize < 0) /* decompression failed */ return err0r(LZ4F_ERROR_decompressionFailed); if (dctx->frameInfo.contentChecksumFlag) @@ -1508,8 +1615,8 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, memcpy(dstPtr, dctx->tmpOut + dctx->tmpOutStart, sizeToCopy); /* dictionary management */ - if (dctx->frameInfo.blockMode==LZ4F_blockLinked) - LZ4F_updateDict(dctx, dstPtr, sizeToCopy, dstStart, 1); + if (dctx->frameInfo.blockMode == LZ4F_blockLinked) + LZ4F_updateDict(dctx, dstPtr, sizeToCopy, dstStart, 1 /*withinTmp*/); dctx->tmpOutStart += sizeToCopy; dstPtr += sizeToCopy; @@ -1518,8 +1625,9 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, dctx->dStage = dstage_getBlockHeader; /* get next block */ break; } + /* could not flush everything : stop there, just request a block header */ + doAnotherStage = 0; nextSrcSizeHint = BHSize; - doAnotherStage = 0; /* still some data to flush */ break; } @@ -1556,7 +1664,7 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, selectedIn = dctx->tmpIn; } /* if (dctx->dStage == dstage_storeSuffix) */ - /* case dstage_checkSuffix: */ /* no direct call, avoid scan-build warning */ + /* case dstage_checkSuffix: */ /* no direct entry, avoid initialization risks */ { U32 const readCRC = LZ4F_readLE32(selectedIn); U32 const resultCRC = XXH32_digest(&(dctx->xxh)); if (readCRC != resultCRC) @@ -1580,8 +1688,7 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, if (dctx->dStage == dstage_storeSFrameSize) case dstage_storeSFrameSize: - { - size_t const sizeToCopy = MIN(dctx->tmpInTarget - dctx->tmpInSize, + { size_t const sizeToCopy = MIN(dctx->tmpInTarget - dctx->tmpInSize, (size_t)(srcEnd - srcPtr) ); memcpy(dctx->header + dctx->tmpInSize, srcPtr, sizeToCopy); srcPtr += sizeToCopy; @@ -1595,7 +1702,7 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, selectedIn = dctx->header + 4; } /* if (dctx->dStage == dstage_storeSFrameSize) */ - /* case dstage_decodeSFrameSize: */ /* no direct access */ + /* case dstage_decodeSFrameSize: */ /* no direct entry */ { size_t const SFrameSize = LZ4F_readLE32(selectedIn); dctx->frameInfo.contentSize = SFrameSize; dctx->tmpInTarget = SFrameSize; @@ -1614,7 +1721,7 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, LZ4F_resetDecompressionContext(dctx); break; } - } + } /* switch (dctx->dStage) */ } /* while (doAnotherStage) */ /* preserve history within tmp whenever necessary */ @@ -1631,7 +1738,8 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, if (dctx->tmpOutSize > 64 KB) copySize = 0; if (copySize > preserveSize) copySize = preserveSize; - memcpy(dctx->tmpOutBuffer + preserveSize - copySize, oldDictEnd - copySize, copySize); + if (copySize > 0) + memcpy(dctx->tmpOutBuffer + preserveSize - copySize, oldDictEnd - copySize, copySize); dctx->dict = dctx->tmpOutBuffer; dctx->dictSize = preserveSize + dctx->tmpOutStart; @@ -1639,7 +1747,8 @@ size_t LZ4F_decompress(LZ4F_dctx* dctx, const BYTE* const oldDictEnd = dctx->dict + dctx->dictSize; size_t const newDictSize = MIN(dctx->dictSize, 64 KB); - memcpy(dctx->tmpOutBuffer, oldDictEnd - newDictSize, newDictSize); + if (newDictSize > 0) + memcpy(dctx->tmpOutBuffer, oldDictEnd - newDictSize, newDictSize); dctx->dict = dctx->tmpOutBuffer; dctx->dictSize = newDictSize; diff --git a/src/third-party/lz4/lz4frame.h b/src/third-party/lz4/lz4frame.h index 88a6513c420..75f1fd91b28 100644 --- a/src/third-party/lz4/lz4frame.h +++ b/src/third-party/lz4/lz4frame.h @@ -33,9 +33,10 @@ */ /* LZ4F is a stand-alone API to create LZ4-compressed frames - * conformant with specification v1.5.1. + * conformant with specification v1.6.1. * It also offers streaming capabilities. - * lz4.h is not required when using lz4frame.h. + * lz4.h is not required when using lz4frame.h, + * except to get constant such as LZ4_VERSION_NUMBER. * */ #ifndef LZ4F_H_09782039843 @@ -93,8 +94,8 @@ extern "C" { **************************************/ typedef size_t LZ4F_errorCode_t; -LZ4FLIB_API unsigned LZ4F_isError(LZ4F_errorCode_t code); /**< tells if a `LZ4F_errorCode_t` function result is an error code */ -LZ4FLIB_API const char* LZ4F_getErrorName(LZ4F_errorCode_t code); /**< return error code string; useful for debugging */ +LZ4FLIB_API unsigned LZ4F_isError(LZ4F_errorCode_t code); /**< tells when a function result is an error code */ +LZ4FLIB_API const char* LZ4F_getErrorName(LZ4F_errorCode_t code); /**< return error code string; for debugging */ /*-************************************ @@ -159,38 +160,48 @@ typedef LZ4F_contentChecksum_t contentChecksum_t; /*! LZ4F_frameInfo_t : * makes it possible to set or read frame parameters. - * It's not required to set all fields, as long as the structure was initially memset() to zero. - * For all fields, 0 sets it to default value */ + * Structure must be first init to 0, using memset() or LZ4F_INIT_FRAMEINFO, + * setting all parameters to default. + * It's then possible to update selectively some parameters */ typedef struct { - LZ4F_blockSizeID_t blockSizeID; /* max64KB, max256KB, max1MB, max4MB ; 0 == default */ - LZ4F_blockMode_t blockMode; /* LZ4F_blockLinked, LZ4F_blockIndependent ; 0 == default */ - LZ4F_contentChecksum_t contentChecksumFlag; /* if enabled, frame is terminated with a 32-bits checksum of decompressed data ; 0 == disabled (default) */ - LZ4F_frameType_t frameType; /* read-only field : LZ4F_frame or LZ4F_skippableFrame */ - unsigned long long contentSize; /* Size of uncompressed content ; 0 == unknown */ - unsigned dictID; /* Dictionary ID, sent by the compressor to help decoder select the correct dictionary; 0 == no dictID provided */ - LZ4F_blockChecksum_t blockChecksumFlag; /* if enabled, each block is followed by a checksum of block's compressed data ; 0 == disabled (default) */ + LZ4F_blockSizeID_t blockSizeID; /* max64KB, max256KB, max1MB, max4MB; 0 == default */ + LZ4F_blockMode_t blockMode; /* LZ4F_blockLinked, LZ4F_blockIndependent; 0 == default */ + LZ4F_contentChecksum_t contentChecksumFlag; /* 1: frame terminated with 32-bit checksum of decompressed data; 0: disabled (default) */ + LZ4F_frameType_t frameType; /* read-only field : LZ4F_frame or LZ4F_skippableFrame */ + unsigned long long contentSize; /* Size of uncompressed content ; 0 == unknown */ + unsigned dictID; /* Dictionary ID, sent by compressor to help decoder select correct dictionary; 0 == no dictID provided */ + LZ4F_blockChecksum_t blockChecksumFlag; /* 1: each block followed by a checksum of block's compressed data; 0: disabled (default) */ } LZ4F_frameInfo_t; +#define LZ4F_INIT_FRAMEINFO { 0, 0, 0, 0, 0, 0, 0 } /* v1.8.3+ */ + /*! LZ4F_preferences_t : - * makes it possible to supply detailed compression parameters to the stream interface. - * It's not required to set all fields, as long as the structure was initially memset() to zero. + * makes it possible to supply advanced compression instructions to streaming interface. + * Structure must be first init to 0, using memset() or LZ4F_INIT_PREFERENCES, + * setting all parameters to default. * All reserved fields must be set to zero. */ typedef struct { LZ4F_frameInfo_t frameInfo; - int compressionLevel; /* 0 == default (fast mode); values above LZ4HC_CLEVEL_MAX count as LZ4HC_CLEVEL_MAX; values below 0 trigger "fast acceleration", proportional to value */ - unsigned autoFlush; /* 1 == always flush, to reduce usage of internal buffers */ - unsigned reserved[4]; /* must be zero for forward compatibility */ + int compressionLevel; /* 0: default (fast mode); values > LZ4HC_CLEVEL_MAX count as LZ4HC_CLEVEL_MAX; values < 0 trigger "fast acceleration" */ + unsigned autoFlush; /* 1: always flush; reduces usage of internal buffers */ + unsigned favorDecSpeed; /* 1: parser favors decompression speed vs compression ratio. Only works for high compression modes (>= LZ4HC_CLEVEL_OPT_MIN) */ /* v1.8.2+ */ + unsigned reserved[3]; /* must be zero for forward compatibility */ } LZ4F_preferences_t; -LZ4FLIB_API int LZ4F_compressionLevel_max(void); +#define LZ4F_INIT_PREFERENCES { LZ4F_INIT_FRAMEINFO, 0, 0, 0, { 0, 0, 0 } } /* v1.8.3+ */ /*-********************************* * Simple compression function ***********************************/ + +LZ4FLIB_API int LZ4F_compressionLevel_max(void); + /*! LZ4F_compressFrameBound() : - * Returns the maximum possible size of a frame compressed with LZ4F_compressFrame() given srcSize content and preferences. - * Note : this result is only usable with LZ4F_compressFrame(), not with multi-segments compression. + * Returns the maximum possible compressed size with LZ4F_compressFrame() given srcSize and preferences. + * `preferencesPtr` is optional. It can be replaced by NULL, in which case, the function will assume default preferences. + * Note : this result is only usable with LZ4F_compressFrame(). + * It may also be used with LZ4F_compressUpdate() _if no flush() operation_ is performed. */ LZ4FLIB_API size_t LZ4F_compressFrameBound(size_t srcSize, const LZ4F_preferences_t* preferencesPtr); @@ -219,8 +230,9 @@ typedef struct { /*--- Resource Management ---*/ -#define LZ4F_VERSION 100 +#define LZ4F_VERSION 100 /* This number can be used to check for an incompatible API breaking change */ LZ4FLIB_API unsigned LZ4F_getVersion(void); + /*! LZ4F_createCompressionContext() : * The first thing to do is to create a compressionContext object, which will be used in all compression operations. * This is achieved using LZ4F_createCompressionContext(), which takes as argument a version. @@ -235,7 +247,7 @@ LZ4FLIB_API LZ4F_errorCode_t LZ4F_freeCompressionContext(LZ4F_cctx* cctx); /*---- Compression ----*/ -#define LZ4F_HEADER_SIZE_MAX 19 +#define LZ4F_HEADER_SIZE_MAX 19 /* LZ4 Frame header size can vary from 7 to 19 bytes */ /*! LZ4F_compressBegin() : * will write the frame header into dstBuffer. * dstCapacity must be >= LZ4F_HEADER_SIZE_MAX bytes. @@ -248,45 +260,58 @@ LZ4FLIB_API size_t LZ4F_compressBegin(LZ4F_cctx* cctx, const LZ4F_preferences_t* prefsPtr); /*! LZ4F_compressBound() : - * Provides dstCapacity given a srcSize to guarantee operation success in worst case situations. - * prefsPtr is optional : you can provide NULL as argument, preferences will be set to cover worst case scenario. - * Result is always the same for a srcSize and prefsPtr, so it can be trusted to size reusable buffers. - * When srcSize==0, LZ4F_compressBound() provides an upper bound for LZ4F_flush() and LZ4F_compressEnd() operations. + * Provides minimum dstCapacity required to guarantee compression success + * given a srcSize and preferences, covering worst case scenario. + * prefsPtr is optional : when NULL is provided, preferences will be set to cover worst case scenario. + * Estimation is valid for either LZ4F_compressUpdate(), LZ4F_flush() or LZ4F_compressEnd(), + * Estimation includes the possibility that internal buffer might already be filled by up to (blockSize-1) bytes. + * It also includes frame footer (ending + checksum), which would have to be generated by LZ4F_compressEnd(). + * Estimation doesn't include frame header, as it was already generated by LZ4F_compressBegin(). + * Result is always the same for a srcSize and prefsPtr, so it can be trusted to size reusable buffers. + * When srcSize==0, LZ4F_compressBound() provides an upper bound for LZ4F_flush() and LZ4F_compressEnd() operations. */ LZ4FLIB_API size_t LZ4F_compressBound(size_t srcSize, const LZ4F_preferences_t* prefsPtr); /*! LZ4F_compressUpdate() : - * LZ4F_compressUpdate() can be called repetitively to compress as much data as necessary. - * An important rule is that dstCapacity MUST be large enough to ensure operation success even in worst case situations. - * This value is provided by LZ4F_compressBound(). - * If this condition is not respected, LZ4F_compress() will fail (result is an errorCode). - * LZ4F_compressUpdate() doesn't guarantee error recovery. When an error occurs, compression context must be freed or resized. + * LZ4F_compressUpdate() can be called repetitively to compress as much data as necessary. + * Important rule: dstCapacity MUST be large enough to ensure operation success even in worst case situations. + * This value is provided by LZ4F_compressBound(). + * If this condition is not respected, LZ4F_compress() will fail (result is an errorCode). + * LZ4F_compressUpdate() doesn't guarantee error recovery. + * When an error occurs, compression context must be freed or resized. * `cOptPtr` is optional : NULL can be provided, in which case all options are set to default. * @return : number of bytes written into `dstBuffer` (it can be zero, meaning input data was just buffered). * or an error code if it fails (which can be tested using LZ4F_isError()) */ -LZ4FLIB_API size_t LZ4F_compressUpdate(LZ4F_cctx* cctx, void* dstBuffer, size_t dstCapacity, const void* srcBuffer, size_t srcSize, const LZ4F_compressOptions_t* cOptPtr); +LZ4FLIB_API size_t LZ4F_compressUpdate(LZ4F_cctx* cctx, + void* dstBuffer, size_t dstCapacity, + const void* srcBuffer, size_t srcSize, + const LZ4F_compressOptions_t* cOptPtr); /*! LZ4F_flush() : - * When data must be generated and sent immediately, without waiting for a block to be completely filled, - * it's possible to call LZ4_flush(). It will immediately compress any data buffered within cctx. + * When data must be generated and sent immediately, without waiting for a block to be completely filled, + * it's possible to call LZ4_flush(). It will immediately compress any data buffered within cctx. * `dstCapacity` must be large enough to ensure the operation will be successful. * `cOptPtr` is optional : it's possible to provide NULL, all options will be set to default. - * @return : number of bytes written into dstBuffer (it can be zero, which means there was no data stored within cctx) + * @return : nb of bytes written into dstBuffer (can be zero, when there is no data stored within cctx) * or an error code if it fails (which can be tested using LZ4F_isError()) */ -LZ4FLIB_API size_t LZ4F_flush(LZ4F_cctx* cctx, void* dstBuffer, size_t dstCapacity, const LZ4F_compressOptions_t* cOptPtr); +LZ4FLIB_API size_t LZ4F_flush(LZ4F_cctx* cctx, + void* dstBuffer, size_t dstCapacity, + const LZ4F_compressOptions_t* cOptPtr); /*! LZ4F_compressEnd() : * To properly finish an LZ4 frame, invoke LZ4F_compressEnd(). * It will flush whatever data remained within `cctx` (like LZ4_flush()) * and properly finalize the frame, with an endMark and a checksum. * `cOptPtr` is optional : NULL can be provided, in which case all options will be set to default. - * @return : number of bytes written into dstBuffer (necessarily >= 4 (endMark), or 8 if optional frame checksum is enabled) + * @return : nb of bytes written into dstBuffer, necessarily >= 4 (endMark), * or an error code if it fails (which can be tested using LZ4F_isError()) * A successful call to LZ4F_compressEnd() makes `cctx` available again for another compression task. */ -LZ4FLIB_API size_t LZ4F_compressEnd(LZ4F_cctx* cctx, void* dstBuffer, size_t dstCapacity, const LZ4F_compressOptions_t* cOptPtr); +LZ4FLIB_API size_t LZ4F_compressEnd(LZ4F_cctx* cctx, + void* dstBuffer, size_t dstCapacity, + const LZ4F_compressOptions_t* cOptPtr); /*-********************************* @@ -296,21 +321,21 @@ typedef struct LZ4F_dctx_s LZ4F_dctx; /* incomplete type */ typedef LZ4F_dctx* LZ4F_decompressionContext_t; /* compatibility with previous API versions */ typedef struct { - unsigned stableDst; /* pledge that at least 64KB+64Bytes of previously decompressed data remain unmodifed where it was decoded. This optimization skips storage operations in tmp buffers */ + unsigned stableDst; /* pledges that last 64KB decompressed data will remain available unmodified. This optimization skips storage operations in tmp buffers. */ unsigned reserved[3]; /* must be set to zero for forward compatibility */ } LZ4F_decompressOptions_t; /* Resource management */ -/*!LZ4F_createDecompressionContext() : - * Create an LZ4F_dctx object, to track all decompression operations. - * The version provided MUST be LZ4F_VERSION. - * The function provides a pointer to an allocated and initialized LZ4F_dctx object. - * The result is an errorCode, which can be tested using LZ4F_isError(). - * dctx memory can be released using LZ4F_freeDecompressionContext(); - * The result of LZ4F_freeDecompressionContext() is indicative of the current state of decompressionContext when being released. - * That is, it should be == 0 if decompression has been completed fully and correctly. +/*! LZ4F_createDecompressionContext() : + * Create an LZ4F_dctx object, to track all decompression operations. + * The version provided MUST be LZ4F_VERSION. + * The function provides a pointer to an allocated and initialized LZ4F_dctx object. + * The result is an errorCode, which can be tested using LZ4F_isError(). + * dctx memory can be released using LZ4F_freeDecompressionContext(); + * Result of LZ4F_freeDecompressionContext() indicates current state of decompressionContext when being released. + * That is, it should be == 0 if decompression has been completed fully and correctly. */ LZ4FLIB_API LZ4F_errorCode_t LZ4F_createDecompressionContext(LZ4F_dctx** dctxPtr, unsigned version); LZ4FLIB_API LZ4F_errorCode_t LZ4F_freeDecompressionContext(LZ4F_dctx* dctx); @@ -347,27 +372,32 @@ LZ4FLIB_API size_t LZ4F_getFrameInfo(LZ4F_dctx* dctx, /*! LZ4F_decompress() : * Call this function repetitively to regenerate compressed data from `srcBuffer`. - * The function will attempt to decode up to *srcSizePtr bytes from srcBuffer, into dstBuffer of capacity *dstSizePtr. + * The function will read up to *srcSizePtr bytes from srcBuffer, + * and decompress data into dstBuffer, of capacity *dstSizePtr. * - * The number of bytes regenerated into dstBuffer is provided within *dstSizePtr (necessarily <= original value). + * The nb of bytes consumed from srcBuffer will be written into *srcSizePtr (necessarily <= original value). + * The nb of bytes decompressed into dstBuffer will be written into *dstSizePtr (necessarily <= original value). * - * The number of bytes consumed from srcBuffer is provided within *srcSizePtr (necessarily <= original value). - * Number of bytes consumed can be < number of bytes provided. - * It typically happens when dstBuffer is not large enough to contain all decoded data. + * The function does not necessarily read all input bytes, so always check value in *srcSizePtr. * Unconsumed source data must be presented again in subsequent invocations. * - * `dstBuffer` content is expected to be flushed between each invocation, as its content will be overwritten. - * `dstBuffer` itself can be changed at will between each consecutive function invocation. + * `dstBuffer` can freely change between each consecutive function invocation. + * `dstBuffer` content will be overwritten. * * @return : an hint of how many `srcSize` bytes LZ4F_decompress() expects for next call. * Schematically, it's the size of the current (or remaining) compressed block + header of next block. * Respecting the hint provides some small speed benefit, because it skips intermediate buffers. * This is just a hint though, it's always possible to provide any srcSize. + * * When a frame is fully decoded, @return will be 0 (no more data expected). + * When provided with more bytes than necessary to decode a frame, + * LZ4F_decompress() will stop reading exactly at end of current frame, and @return 0. + * * If decompression failed, @return is an error code, which can be tested using LZ4F_isError(). + * After a decompression error, the `dctx` context is not resumable. + * Use LZ4F_resetDecompressionContext() to return to clean state. * * After a frame is fully decoded, dctx can be used again to decompress another frame. - * After a decompression error, use LZ4F_resetDecompressionContext() before re-using dctx, to return to clean state. */ LZ4FLIB_API size_t LZ4F_decompress(LZ4F_dctx* dctx, void* dstBuffer, size_t* dstSizePtr, @@ -375,11 +405,11 @@ LZ4FLIB_API size_t LZ4F_decompress(LZ4F_dctx* dctx, const LZ4F_decompressOptions_t* dOptPtr); -/*! LZ4F_resetDecompressionContext() : v1.8.0 +/*! LZ4F_resetDecompressionContext() : added in v1.8.0 * In case of an error, the context is left in "undefined" state. * In which case, it's necessary to reset it, before re-using it. - * This method can also be used to abruptly stop an unfinished decompression, - * and start a new one using the same context. */ + * This method can also be used to abruptly stop any unfinished decompression, + * and start a new one using same context resources. */ LZ4FLIB_API void LZ4F_resetDecompressionContext(LZ4F_dctx* dctx); /* always successful */ @@ -389,3 +419,123 @@ LZ4FLIB_API void LZ4F_resetDecompressionContext(LZ4F_dctx* dctx); /* always su #endif #endif /* LZ4F_H_09782039843 */ + +#if defined(LZ4F_STATIC_LINKING_ONLY) && !defined(LZ4F_H_STATIC_09782039843) +#define LZ4F_H_STATIC_09782039843 + +#if defined (__cplusplus) +extern "C" { +#endif + +/* These declarations are not stable and may change in the future. They are + * therefore only safe to depend on when the caller is statically linked + * against the library. To access their declarations, define + * LZ4F_STATIC_LINKING_ONLY. + * + * There is a further protection mechanism where these symbols aren't published + * into shared/dynamic libraries. You can override this behavior and force + * them to be published by defining LZ4F_PUBLISH_STATIC_FUNCTIONS. Use at + * your own risk. + */ +#ifdef LZ4F_PUBLISH_STATIC_FUNCTIONS +#define LZ4FLIB_STATIC_API LZ4FLIB_API +#else +#define LZ4FLIB_STATIC_API +#endif + + +/* --- Error List --- */ +#define LZ4F_LIST_ERRORS(ITEM) \ + ITEM(OK_NoError) \ + ITEM(ERROR_GENERIC) \ + ITEM(ERROR_maxBlockSize_invalid) \ + ITEM(ERROR_blockMode_invalid) \ + ITEM(ERROR_contentChecksumFlag_invalid) \ + ITEM(ERROR_compressionLevel_invalid) \ + ITEM(ERROR_headerVersion_wrong) \ + ITEM(ERROR_blockChecksum_invalid) \ + ITEM(ERROR_reservedFlag_set) \ + ITEM(ERROR_allocation_failed) \ + ITEM(ERROR_srcSize_tooLarge) \ + ITEM(ERROR_dstMaxSize_tooSmall) \ + ITEM(ERROR_frameHeader_incomplete) \ + ITEM(ERROR_frameType_unknown) \ + ITEM(ERROR_frameSize_wrong) \ + ITEM(ERROR_srcPtr_wrong) \ + ITEM(ERROR_decompressionFailed) \ + ITEM(ERROR_headerChecksum_invalid) \ + ITEM(ERROR_contentChecksum_invalid) \ + ITEM(ERROR_frameDecoding_alreadyStarted) \ + ITEM(ERROR_maxCode) + +#define LZ4F_GENERATE_ENUM(ENUM) LZ4F_##ENUM, + +/* enum list is exposed, to handle specific errors */ +typedef enum { LZ4F_LIST_ERRORS(LZ4F_GENERATE_ENUM) } LZ4F_errorCodes; + +LZ4FLIB_STATIC_API LZ4F_errorCodes LZ4F_getErrorCode(size_t functionResult); + + + +/********************************** + * Bulk processing dictionary API + *********************************/ +typedef struct LZ4F_CDict_s LZ4F_CDict; + +/*! LZ4_createCDict() : + * When compressing multiple messages / blocks with the same dictionary, it's recommended to load it just once. + * LZ4_createCDict() will create a digested dictionary, ready to start future compression operations without startup delay. + * LZ4_CDict can be created once and shared by multiple threads concurrently, since its usage is read-only. + * `dictBuffer` can be released after LZ4_CDict creation, since its content is copied within CDict */ +LZ4FLIB_STATIC_API LZ4F_CDict* LZ4F_createCDict(const void* dictBuffer, size_t dictSize); +LZ4FLIB_STATIC_API void LZ4F_freeCDict(LZ4F_CDict* CDict); + + +/*! LZ4_compressFrame_usingCDict() : + * Compress an entire srcBuffer into a valid LZ4 frame using a digested Dictionary. + * cctx must point to a context created by LZ4F_createCompressionContext(). + * If cdict==NULL, compress without a dictionary. + * dstBuffer MUST be >= LZ4F_compressFrameBound(srcSize, preferencesPtr). + * If this condition is not respected, function will fail (@return an errorCode). + * The LZ4F_preferences_t structure is optional : you may provide NULL as argument, + * but it's not recommended, as it's the only way to provide dictID in the frame header. + * @return : number of bytes written into dstBuffer. + * or an error code if it fails (can be tested using LZ4F_isError()) */ +LZ4FLIB_STATIC_API size_t LZ4F_compressFrame_usingCDict( + LZ4F_cctx* cctx, + void* dst, size_t dstCapacity, + const void* src, size_t srcSize, + const LZ4F_CDict* cdict, + const LZ4F_preferences_t* preferencesPtr); + + +/*! LZ4F_compressBegin_usingCDict() : + * Inits streaming dictionary compression, and writes the frame header into dstBuffer. + * dstCapacity must be >= LZ4F_HEADER_SIZE_MAX bytes. + * `prefsPtr` is optional : you may provide NULL as argument, + * however, it's the only way to provide dictID in the frame header. + * @return : number of bytes written into dstBuffer for the header, + * or an error code (which can be tested using LZ4F_isError()) */ +LZ4FLIB_STATIC_API size_t LZ4F_compressBegin_usingCDict( + LZ4F_cctx* cctx, + void* dstBuffer, size_t dstCapacity, + const LZ4F_CDict* cdict, + const LZ4F_preferences_t* prefsPtr); + + +/*! LZ4F_decompress_usingDict() : + * Same as LZ4F_decompress(), using a predefined dictionary. + * Dictionary is used "in place", without any preprocessing. + * It must remain accessible throughout the entire frame decoding. */ +LZ4FLIB_STATIC_API size_t LZ4F_decompress_usingDict( + LZ4F_dctx* dctxPtr, + void* dstBuffer, size_t* dstSizePtr, + const void* srcBuffer, size_t* srcSizePtr, + const void* dict, size_t dictSize, + const LZ4F_decompressOptions_t* decompressOptionsPtr); + +#if defined (__cplusplus) +} +#endif + +#endif /* defined(LZ4F_STATIC_LINKING_ONLY) && !defined(LZ4F_H_STATIC_09782039843) */ diff --git a/src/third-party/lz4/lz4frame_static.h b/src/third-party/lz4/lz4frame_static.h index 1899f8e4662..925a2c5c330 100644 --- a/src/third-party/lz4/lz4frame_static.h +++ b/src/third-party/lz4/lz4frame_static.h @@ -36,108 +36,12 @@ #ifndef LZ4FRAME_STATIC_H_0398209384 #define LZ4FRAME_STATIC_H_0398209384 -#if defined (__cplusplus) -extern "C" { -#endif - -/* lz4frame_static.h should be used solely in the context of static linking. - * It contains definitions which are not stable and may change in the future. - * Never use it in the context of DLL linking. +/* The declarations that formerly were made here have been merged into + * lz4frame.h, protected by the LZ4F_STATIC_LINKING_ONLY macro. Going forward, + * it is recommended to simply include that header directly. */ - -/* --- Dependency --- */ +#define LZ4F_STATIC_LINKING_ONLY #include "lz4frame.h" - -/* --- Error List --- */ -#define LZ4F_LIST_ERRORS(ITEM) \ - ITEM(OK_NoError) \ - ITEM(ERROR_GENERIC) \ - ITEM(ERROR_maxBlockSize_invalid) \ - ITEM(ERROR_blockMode_invalid) \ - ITEM(ERROR_contentChecksumFlag_invalid) \ - ITEM(ERROR_compressionLevel_invalid) \ - ITEM(ERROR_headerVersion_wrong) \ - ITEM(ERROR_blockChecksum_invalid) \ - ITEM(ERROR_reservedFlag_set) \ - ITEM(ERROR_allocation_failed) \ - ITEM(ERROR_srcSize_tooLarge) \ - ITEM(ERROR_dstMaxSize_tooSmall) \ - ITEM(ERROR_frameHeader_incomplete) \ - ITEM(ERROR_frameType_unknown) \ - ITEM(ERROR_frameSize_wrong) \ - ITEM(ERROR_srcPtr_wrong) \ - ITEM(ERROR_decompressionFailed) \ - ITEM(ERROR_headerChecksum_invalid) \ - ITEM(ERROR_contentChecksum_invalid) \ - ITEM(ERROR_frameDecoding_alreadyStarted) \ - ITEM(ERROR_maxCode) - -#define LZ4F_GENERATE_ENUM(ENUM) LZ4F_##ENUM, - -/* enum list is exposed, to handle specific errors */ -typedef enum { LZ4F_LIST_ERRORS(LZ4F_GENERATE_ENUM) } LZ4F_errorCodes; - -LZ4F_errorCodes LZ4F_getErrorCode(size_t functionResult); - - - -/********************************** - * Bulk processing dictionary API - *********************************/ -typedef struct LZ4F_CDict_s LZ4F_CDict; - -/*! LZ4_createCDict() : - * When compressing multiple messages / blocks with the same dictionary, it's recommended to load it just once. - * LZ4_createCDict() will create a digested dictionary, ready to start future compression operations without startup delay. - * LZ4_CDict can be created once and shared by multiple threads concurrently, since its usage is read-only. - * `dictBuffer` can be released after LZ4_CDict creation, since its content is copied within CDict */ -LZ4F_CDict* LZ4F_createCDict(const void* dictBuffer, size_t dictSize); -void LZ4F_freeCDict(LZ4F_CDict* CDict); - - -/*! LZ4_compressFrame_usingCDict() : - * Compress an entire srcBuffer into a valid LZ4 frame using a digested Dictionary. - * If cdict==NULL, compress without a dictionary. - * dstBuffer MUST be >= LZ4F_compressFrameBound(srcSize, preferencesPtr). - * If this condition is not respected, function will fail (@return an errorCode). - * The LZ4F_preferences_t structure is optional : you may provide NULL as argument, - * but it's not recommended, as it's the only way to provide dictID in the frame header. - * @return : number of bytes written into dstBuffer. - * or an error code if it fails (can be tested using LZ4F_isError()) */ -size_t LZ4F_compressFrame_usingCDict(void* dst, size_t dstCapacity, - const void* src, size_t srcSize, - const LZ4F_CDict* cdict, - const LZ4F_preferences_t* preferencesPtr); - - -/*! LZ4F_compressBegin_usingCDict() : - * Inits streaming dictionary compression, and writes the frame header into dstBuffer. - * dstCapacity must be >= LZ4F_HEADER_SIZE_MAX bytes. - * `prefsPtr` is optional : you may provide NULL as argument, - * however, it's the only way to provide dictID in the frame header. - * @return : number of bytes written into dstBuffer for the header, - * or an error code (which can be tested using LZ4F_isError()) */ -size_t LZ4F_compressBegin_usingCDict(LZ4F_cctx* cctx, - void* dstBuffer, size_t dstCapacity, - const LZ4F_CDict* cdict, - const LZ4F_preferences_t* prefsPtr); - - -/*! LZ4F_decompress_usingDict() : - * Same as LZ4F_decompress(), using a predefined dictionary. - * Dictionary is used "in place", without any preprocessing. - * It must remain accessible throughout the entire frame decoding. */ -size_t LZ4F_decompress_usingDict(LZ4F_dctx* dctxPtr, - void* dstBuffer, size_t* dstSizePtr, - const void* srcBuffer, size_t* srcSizePtr, - const void* dict, size_t dictSize, - const LZ4F_decompressOptions_t* decompressOptionsPtr); - - -#if defined (__cplusplus) -} -#endif - #endif /* LZ4FRAME_STATIC_H_0398209384 */ diff --git a/src/third-party/lz4/lz4hc.c b/src/third-party/lz4/lz4hc.c index d7f8d23b597..e913ee7b323 100644 --- a/src/third-party/lz4/lz4hc.c +++ b/src/third-party/lz4/lz4hc.c @@ -49,6 +49,7 @@ /*=== Dependency ===*/ +#define LZ4_HC_STATIC_LINKING_ONLY #include "lz4hc.h" @@ -66,6 +67,7 @@ /*=== Constants ===*/ #define OPTIMAL_ML (int)((ML_MASK-1)+MINMATCH) +#define LZ4_OPT_NUM (1<<12) /*=== Macros ===*/ @@ -77,21 +79,33 @@ static U32 LZ4HC_hashPtr(const void* ptr) { return HASH_FUNCTION(LZ4_read32(ptr)); } +/*=== Enums ===*/ +typedef enum { noDictCtx, usingDictCtx } dictCtx_directive; /************************************** * HC Compression **************************************/ -static void LZ4HC_init (LZ4HC_CCtx_internal* hc4, const BYTE* start) +static void LZ4HC_clearTables (LZ4HC_CCtx_internal* hc4) { MEM_INIT((void*)hc4->hashTable, 0, sizeof(hc4->hashTable)); MEM_INIT(hc4->chainTable, 0xFF, sizeof(hc4->chainTable)); - hc4->nextToUpdate = 64 KB; - hc4->base = start - 64 KB; +} + +static void LZ4HC_init (LZ4HC_CCtx_internal* hc4, const BYTE* start) +{ + uptrval startingOffset = hc4->end - hc4->base; + if (startingOffset > 1 GB) { + LZ4HC_clearTables(hc4); + startingOffset = 0; + } + startingOffset += 64 KB; + hc4->nextToUpdate = (U32) startingOffset; + hc4->base = start - startingOffset; hc4->end = start; - hc4->dictBase = start - 64 KB; - hc4->dictLimit = 64 KB; - hc4->lowLimit = 64 KB; + hc4->dictBase = start - startingOffset; + hc4->dictLimit = (U32) startingOffset; + hc4->lowLimit = (U32) startingOffset; } @@ -116,56 +130,80 @@ LZ4_FORCE_INLINE void LZ4HC_Insert (LZ4HC_CCtx_internal* hc4, const BYTE* ip) hc4->nextToUpdate = target; } - -LZ4_FORCE_INLINE int LZ4HC_InsertAndFindBestMatch (LZ4HC_CCtx_internal* const hc4, /* Index table will be updated */ - const BYTE* const ip, const BYTE* const iLimit, - const BYTE** matchpos, - const int maxNbAttempts) +/** LZ4HC_countBack() : + * @return : negative value, nb of common bytes before ip/match */ +LZ4_FORCE_INLINE +int LZ4HC_countBack(const BYTE* const ip, const BYTE* const match, + const BYTE* const iMin, const BYTE* const mMin) { - U16* const chainTable = hc4->chainTable; - U32* const HashTable = hc4->hashTable; - const BYTE* const base = hc4->base; - const BYTE* const dictBase = hc4->dictBase; - const U32 dictLimit = hc4->dictLimit; - const U32 lowLimit = (hc4->lowLimit + 64 KB > (U32)(ip-base)) ? hc4->lowLimit : (U32)(ip - base) - (64 KB - 1); - U32 matchIndex; - int nbAttempts = maxNbAttempts; - size_t ml = 0; + int back = 0; + int const min = (int)MAX(iMin - ip, mMin - match); + assert(min <= 0); + assert(ip >= iMin); assert((size_t)(ip-iMin) < (1U<<31)); + assert(match >= mMin); assert((size_t)(match - mMin) < (1U<<31)); + while ( (back > min) + && (ip[back-1] == match[back-1]) ) + back--; + return back; +} - /* HC4 match finder */ - LZ4HC_Insert(hc4, ip); - matchIndex = HashTable[LZ4HC_hashPtr(ip)]; +/* LZ4HC_countPattern() : + * pattern32 must be a sample of repetitive pattern of length 1, 2 or 4 (but not 3!) */ +static unsigned +LZ4HC_countPattern(const BYTE* ip, const BYTE* const iEnd, U32 const pattern32) +{ + const BYTE* const iStart = ip; + reg_t const pattern = (sizeof(pattern)==8) ? (reg_t)pattern32 + (((reg_t)pattern32) << 32) : pattern32; + + while (likely(ip < iEnd-(sizeof(pattern)-1))) { + reg_t const diff = LZ4_read_ARCH(ip) ^ pattern; + if (!diff) { ip+=sizeof(pattern); continue; } + ip += LZ4_NbCommonBytes(diff); + return (unsigned)(ip - iStart); + } - while ((matchIndex>=lowLimit) && (nbAttempts)) { - nbAttempts--; - if (matchIndex >= dictLimit) { - const BYTE* const match = base + matchIndex; - if ( (*(match+ml) == *(ip+ml)) /* can be longer */ - && (LZ4_read32(match) == LZ4_read32(ip)) ) - { - size_t const mlt = LZ4_count(ip+MINMATCH, match+MINMATCH, iLimit) + MINMATCH; - if (mlt > ml) { ml = mlt; *matchpos = match; } - } - } else { - const BYTE* const match = dictBase + matchIndex; - if (LZ4_read32(match) == LZ4_read32(ip)) { - size_t mlt; - const BYTE* vLimit = ip + (dictLimit - matchIndex); - if (vLimit > iLimit) vLimit = iLimit; - mlt = LZ4_count(ip+MINMATCH, match+MINMATCH, vLimit) + MINMATCH; - if ((ip+mlt == vLimit) && (vLimit < iLimit)) - mlt += LZ4_count(ip+mlt, base+dictLimit, iLimit); - if (mlt > ml) { ml = mlt; *matchpos = base + matchIndex; } /* virtual matchpos */ - } + if (LZ4_isLittleEndian()) { + reg_t patternByte = pattern; + while ((ip>= 8; + } + } else { /* big endian */ + U32 bitOffset = (sizeof(pattern)*8) - 8; + while (ip < iEnd) { + BYTE const byte = (BYTE)(pattern >> bitOffset); + if (*ip != byte) break; + ip ++; bitOffset -= 8; } - matchIndex -= DELTANEXTU16(chainTable, matchIndex); } - return (int)ml; + return (unsigned)(ip - iStart); +} + +/* LZ4HC_reverseCountPattern() : + * pattern must be a sample of repetitive pattern of length 1, 2 or 4 (but not 3!) + * read using natural platform endianess */ +static unsigned +LZ4HC_reverseCountPattern(const BYTE* ip, const BYTE* const iLow, U32 pattern) +{ + const BYTE* const iStart = ip; + + while (likely(ip >= iLow+4)) { + if (LZ4_read32(ip-4) != pattern) break; + ip -= 4; + } + { const BYTE* bytePtr = (const BYTE*)(&pattern) + 3; /* works for any endianess */ + while (likely(ip>iLow)) { + if (ip[-1] != *bytePtr) break; + ip--; bytePtr--; + } } + return (unsigned)(iStart - ip); } +typedef enum { rep_untested, rep_not, rep_confirmed } repeat_state_e; +typedef enum { favorCompressionRatio=0, favorDecompressionSpeed } HCfavor_e; -LZ4_FORCE_INLINE int LZ4HC_InsertAndGetWiderMatch ( +LZ4_FORCE_INLINE int +LZ4HC_InsertAndGetWiderMatch ( LZ4HC_CCtx_internal* hc4, const BYTE* const ip, const BYTE* const iLowLimit, @@ -173,67 +211,191 @@ LZ4_FORCE_INLINE int LZ4HC_InsertAndGetWiderMatch ( int longest, const BYTE** matchpos, const BYTE** startpos, - const int maxNbAttempts) + const int maxNbAttempts, + const int patternAnalysis, + const int chainSwap, + const dictCtx_directive dict, + const HCfavor_e favorDecSpeed) { U16* const chainTable = hc4->chainTable; U32* const HashTable = hc4->hashTable; + const LZ4HC_CCtx_internal * const dictCtx = hc4->dictCtx; const BYTE* const base = hc4->base; const U32 dictLimit = hc4->dictLimit; const BYTE* const lowPrefixPtr = base + dictLimit; - const U32 lowLimit = (hc4->lowLimit + 64 KB > (U32)(ip-base)) ? hc4->lowLimit : (U32)(ip - base) - (64 KB - 1); + const U32 ipIndex = (U32)(ip - base); + const U32 lowestMatchIndex = (hc4->lowLimit + 64 KB > ipIndex) ? hc4->lowLimit : ipIndex - MAX_DISTANCE; const BYTE* const dictBase = hc4->dictBase; - int const delta = (int)(ip-iLowLimit); + int const lookBackLength = (int)(ip-iLowLimit); int nbAttempts = maxNbAttempts; + int matchChainPos = 0; + U32 const pattern = LZ4_read32(ip); U32 matchIndex; + U32 dictMatchIndex; + repeat_state_e repeat = rep_untested; + size_t srcPatternLength = 0; - + DEBUGLOG(7, "LZ4HC_InsertAndGetWiderMatch"); /* First Match */ LZ4HC_Insert(hc4, ip); matchIndex = HashTable[LZ4HC_hashPtr(ip)]; + DEBUGLOG(7, "First match at index %u / %u (lowestMatchIndex)", + matchIndex, lowestMatchIndex); - while ((matchIndex>=lowLimit) && (nbAttempts)) { + while ((matchIndex>=lowestMatchIndex) && (nbAttempts)) { + int matchLength=0; nbAttempts--; - if (matchIndex >= dictLimit) { + assert(matchIndex < ipIndex); + if (favorDecSpeed && (ipIndex - matchIndex < 8)) { + /* do nothing */ + } else if (matchIndex >= dictLimit) { /* within current Prefix */ const BYTE* const matchPtr = base + matchIndex; - if (*(iLowLimit + longest) == *(matchPtr - delta + longest)) { - if (LZ4_read32(matchPtr) == LZ4_read32(ip)) { - int mlt = MINMATCH + LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, iHighLimit); - int back = 0; - - while ( (ip+back > iLowLimit) - && (matchPtr+back > lowPrefixPtr) - && (ip[back-1] == matchPtr[back-1])) { - back--; - } - - mlt -= back; - - if (mlt > longest) { - longest = mlt; - *matchpos = matchPtr+back; - *startpos = ip+back; + assert(matchPtr >= lowPrefixPtr); + assert(matchPtr < ip); + assert(longest >= 1); + if (LZ4_read16(iLowLimit + longest - 1) == LZ4_read16(matchPtr - lookBackLength + longest - 1)) { + if (LZ4_read32(matchPtr) == pattern) { + int const back = lookBackLength ? LZ4HC_countBack(ip, matchPtr, iLowLimit, lowPrefixPtr) : 0; + matchLength = MINMATCH + LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, iHighLimit); + matchLength -= back; + if (matchLength > longest) { + longest = matchLength; + *matchpos = matchPtr + back; + *startpos = ip + back; } } } - } else { + } else { /* lowestMatchIndex <= matchIndex < dictLimit */ const BYTE* const matchPtr = dictBase + matchIndex; - if (LZ4_read32(matchPtr) == LZ4_read32(ip)) { - int mlt; - int back=0; + if (LZ4_read32(matchPtr) == pattern) { + const BYTE* const dictStart = dictBase + hc4->lowLimit; + int back = 0; const BYTE* vLimit = ip + (dictLimit - matchIndex); if (vLimit > iHighLimit) vLimit = iHighLimit; + matchLength = LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, vLimit) + MINMATCH; + if ((ip+matchLength == vLimit) && (vLimit < iHighLimit)) + matchLength += LZ4_count(ip+matchLength, lowPrefixPtr, iHighLimit); + back = lookBackLength ? LZ4HC_countBack(ip, matchPtr, iLowLimit, dictStart) : 0; + matchLength -= back; + if (matchLength > longest) { + longest = matchLength; + *matchpos = base + matchIndex + back; /* virtual pos, relative to ip, to retrieve offset */ + *startpos = ip + back; + } } } + + if (chainSwap && matchLength==longest) { /* better match => select a better chain */ + assert(lookBackLength==0); /* search forward only */ + if (matchIndex + longest <= ipIndex) { + U32 distanceToNextMatch = 1; + int pos; + for (pos = 0; pos <= longest - MINMATCH; pos++) { + U32 const candidateDist = DELTANEXTU16(chainTable, matchIndex + pos); + if (candidateDist > distanceToNextMatch) { + distanceToNextMatch = candidateDist; + matchChainPos = pos; + } } + if (distanceToNextMatch > 1) { + if (distanceToNextMatch > matchIndex) break; /* avoid overflow */ + matchIndex -= distanceToNextMatch; + continue; + } } } + + { U32 const distNextMatch = DELTANEXTU16(chainTable, matchIndex); + if (patternAnalysis && distNextMatch==1 && matchChainPos==0) { + U32 const matchCandidateIdx = matchIndex-1; + /* may be a repeated pattern */ + if (repeat == rep_untested) { + if ( ((pattern & 0xFFFF) == (pattern >> 16)) + & ((pattern & 0xFF) == (pattern >> 24)) ) { + repeat = rep_confirmed; + srcPatternLength = LZ4HC_countPattern(ip+sizeof(pattern), iHighLimit, pattern) + sizeof(pattern); + } else { + repeat = rep_not; + } } + if ( (repeat == rep_confirmed) + && (matchCandidateIdx >= dictLimit) ) { /* same segment only */ + const BYTE* const matchPtr = base + matchCandidateIdx; + if (LZ4_read32(matchPtr) == pattern) { /* good candidate */ + size_t const forwardPatternLength = LZ4HC_countPattern(matchPtr+sizeof(pattern), iHighLimit, pattern) + sizeof(pattern); + const BYTE* const lowestMatchPtr = (lowPrefixPtr + MAX_DISTANCE >= ip) ? lowPrefixPtr : ip - MAX_DISTANCE; + size_t const backLength = LZ4HC_reverseCountPattern(matchPtr, lowestMatchPtr, pattern); + size_t const currentSegmentLength = backLength + forwardPatternLength; + + if ( (currentSegmentLength >= srcPatternLength) /* current pattern segment large enough to contain full srcPatternLength */ + && (forwardPatternLength <= srcPatternLength) ) { /* haven't reached this position yet */ + matchIndex = matchCandidateIdx + (U32)forwardPatternLength - (U32)srcPatternLength; /* best position, full pattern, might be followed by more match */ + } else { + matchIndex = matchCandidateIdx - (U32)backLength; /* farthest position in current segment, will find a match of length currentSegmentLength + maybe some back */ + if (lookBackLength==0) { /* no back possible */ + size_t const maxML = MIN(currentSegmentLength, srcPatternLength); + if ((size_t)longest < maxML) { + assert(base + matchIndex < ip); + if (ip - (base+matchIndex) > MAX_DISTANCE) break; + assert(maxML < 2 GB); + longest = (int)maxML; + *matchpos = base + matchIndex; /* virtual pos, relative to ip, to retrieve offset */ + *startpos = ip; + } + { U32 const distToNextPattern = DELTANEXTU16(chainTable, matchIndex); + if (distToNextPattern > matchIndex) break; /* avoid overflow */ + matchIndex -= distToNextPattern; + } } } + continue; + } } + } } /* PA optimization */ + + /* follow current chain */ + matchIndex -= DELTANEXTU16(chainTable, matchIndex+matchChainPos); + + } /* while ((matchIndex>=lowestMatchIndex) && (nbAttempts)) */ + + if (dict == usingDictCtx && nbAttempts && ipIndex - lowestMatchIndex < MAX_DISTANCE) { + size_t const dictEndOffset = dictCtx->end - dictCtx->base; + assert(dictEndOffset <= 1 GB); + dictMatchIndex = dictCtx->hashTable[LZ4HC_hashPtr(ip)]; + matchIndex = dictMatchIndex + lowestMatchIndex - (U32)dictEndOffset; + while (ipIndex - matchIndex <= MAX_DISTANCE && nbAttempts--) { + const BYTE* const matchPtr = dictCtx->base + dictMatchIndex; + + if (LZ4_read32(matchPtr) == pattern) { + int mlt; + int back = 0; + const BYTE* vLimit = ip + (dictEndOffset - dictMatchIndex); + if (vLimit > iHighLimit) vLimit = iHighLimit; mlt = LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, vLimit) + MINMATCH; - if ((ip+mlt == vLimit) && (vLimit < iHighLimit)) - mlt += LZ4_count(ip+mlt, base+dictLimit, iHighLimit); - while ((ip+back > iLowLimit) && (matchIndex+back > lowLimit) && (ip[back-1] == matchPtr[back-1])) back--; + back = lookBackLength ? LZ4HC_countBack(ip, matchPtr, iLowLimit, dictCtx->base + dictCtx->dictLimit) : 0; mlt -= back; - if (mlt > longest) { longest = mlt; *matchpos = base + matchIndex + back; *startpos = ip+back; } + if (mlt > longest) { + longest = mlt; + *matchpos = base + matchIndex + back; + *startpos = ip + back; + } + } + + { U32 const nextOffset = DELTANEXTU16(dictCtx->chainTable, dictMatchIndex); + dictMatchIndex -= nextOffset; + matchIndex -= nextOffset; } } - matchIndex -= DELTANEXTU16(chainTable, matchIndex); } return longest; } +LZ4_FORCE_INLINE +int LZ4HC_InsertAndFindBestMatch(LZ4HC_CCtx_internal* const hc4, /* Index table will be updated */ + const BYTE* const ip, const BYTE* const iLimit, + const BYTE** matchpos, + const int maxNbAttempts, + const int patternAnalysis, + const dictCtx_directive dict) +{ + const BYTE* uselessPtr = ip; + /* note : LZ4HC_InsertAndGetWiderMatch() is able to modify the starting position of a match (*startpos), + * but this won't be the case here, as we define iLowLimit==ip, + * so LZ4HC_InsertAndGetWiderMatch() won't be allowed to search past ip */ + return LZ4HC_InsertAndGetWiderMatch(hc4, ip, ip, iLimit, MINMATCH-1, matchpos, &uselessPtr, maxNbAttempts, patternAnalysis, 0 /*chainSwap*/, dict, favorCompressionRatio); +} + + typedef enum { noLimit = 0, @@ -241,10 +403,6 @@ typedef enum { limitedDestSize = 2, } limitedOutput_directive; -#ifndef LZ4HC_DEBUG -# define LZ4HC_DEBUG 0 -#endif - /* LZ4HC_encodeSequence() : * @return : 0 if ok, * 1 if buffer issue detected */ @@ -260,9 +418,21 @@ LZ4_FORCE_INLINE int LZ4HC_encodeSequence ( size_t length; BYTE* const token = (*op)++; -#if LZ4HC_DEBUG - printf("literal : %u -- match : %u -- offset : %u\n", - (U32)(*ip - *anchor), (U32)matchLength, (U32)(*ip-match)); +#if defined(LZ4_DEBUG) && (LZ4_DEBUG >= 6) + static const BYTE* start = NULL; + static U32 totalCost = 0; + U32 const pos = (start==NULL) ? 0 : (U32)(*anchor - start); + U32 const ll = (U32)(*ip - *anchor); + U32 const llAdd = (ll>=15) ? ((ll-15) / 255) + 1 : 0; + U32 const mlAdd = (matchLength>=19) ? ((matchLength-19) / 255) + 1 : 0; + U32 const cost = 1 + llAdd + ll + 2 + mlAdd; + if (start==NULL) start = *anchor; /* only works for single segment */ + /* g_debuglog_enable = (pos >= 2228) & (pos <= 2262); */ + DEBUGLOG(6, "pos:%7u -- literals:%3u, match:%4i, offset:%5u, cost:%3u + %u", + pos, + (U32)(*ip - *anchor), matchLength, (U32)(*ip-match), + cost, totalCost); + totalCost += cost; #endif /* Encode Literal length */ @@ -282,9 +452,11 @@ LZ4_FORCE_INLINE int LZ4HC_encodeSequence ( *op += length; /* Encode Offset */ + assert( (*ip - match) <= MAX_DISTANCE ); /* note : consider providing offset as a value, rather than as a pointer difference */ LZ4_writeLE16(*op, (U16)(*ip-match)); *op += 2; /* Encode MatchLength */ + assert(matchLength >= MINMATCH); length = (size_t)(matchLength - MINMATCH); if ((limit) && (*op + (length >> 8) + (1 + LASTLITERALS) > oend)) return 1; /* Check output limit */ if (length >= ML_MASK) { @@ -304,21 +476,19 @@ LZ4_FORCE_INLINE int LZ4HC_encodeSequence ( return 0; } -/* btopt */ -#include "lz4opt.h" - - -static int LZ4HC_compress_hashChain ( +LZ4_FORCE_INLINE int LZ4HC_compress_hashChain ( LZ4HC_CCtx_internal* const ctx, const char* const source, char* const dest, int* srcSizePtr, int const maxOutputSize, unsigned maxNbAttempts, - limitedOutput_directive limit + const limitedOutput_directive limit, + const dictCtx_directive dict ) { const int inputSize = *srcSizePtr; + const int patternAnalysis = (maxNbAttempts > 128); /* levels 9+ */ const BYTE* ip = (const BYTE*) source; const BYTE* anchor = ip; @@ -330,55 +500,47 @@ static int LZ4HC_compress_hashChain ( BYTE* op = (BYTE*) dest; BYTE* oend = op + maxOutputSize; - int ml, ml2, ml3, ml0; + int ml0, ml, ml2, ml3; + const BYTE* start0; + const BYTE* ref0; const BYTE* ref = NULL; const BYTE* start2 = NULL; const BYTE* ref2 = NULL; const BYTE* start3 = NULL; const BYTE* ref3 = NULL; - const BYTE* start0; - const BYTE* ref0; /* init */ *srcSizePtr = 0; - if (limit == limitedDestSize && maxOutputSize < 1) return 0; /* Impossible to store anything */ - if ((U32)inputSize > (U32)LZ4_MAX_INPUT_SIZE) return 0; /* Unsupported input size, too large (or negative) */ - - ctx->end += inputSize; - if (limit == limitedDestSize) oend -= LASTLITERALS; /* Hack for support limitations LZ4 decompressor */ + if (limit == limitedDestSize) oend -= LASTLITERALS; /* Hack for support LZ4 format restriction */ if (inputSize < LZ4_minLength) goto _last_literals; /* Input too small, no compression (all literals) */ - ip++; - /* Main Loop */ - while (ip < mflimit) { - ml = LZ4HC_InsertAndFindBestMatch (ctx, ip, matchlimit, (&ref), maxNbAttempts); - if (!ml) { ip++; continue; } + while (ip <= mflimit) { + ml = LZ4HC_InsertAndFindBestMatch (ctx, ip, matchlimit, &ref, maxNbAttempts, patternAnalysis, dict); + if (ml encode ML1 */ optr = op; if (LZ4HC_encodeSequence(&ip, &op, &anchor, ml, ref, limit, oend)) goto _dest_overflow; continue; } - if (start0 < ip) { - if (start2 < ip + ml0) { /* empirical */ - ip = start0; - ref = ref0; - ml = ml0; - } - } + if (start0 < ip) { /* first match was skipped at least once */ + if (start2 < ip + ml0) { /* squeezing ML1 between ML0(original ML1) and ML2 */ + ip = start0; ref = ref0; ml = ml0; /* restore initial ML1 */ + } } /* Here, start0==ip */ if ((start2 - ip) < 3) { /* First Match too small : removed */ @@ -406,12 +568,15 @@ static int LZ4HC_compress_hashChain ( } /* Now, we have start2 = ip+new_ml, with new_ml = min(ml, OPTIMAL_ML=18) */ - if (start2 + ml2 < mflimit) - ml3 = LZ4HC_InsertAndGetWiderMatch(ctx, start2 + ml2 - 3, start2, matchlimit, ml2, &ref3, &start3, maxNbAttempts); - else + if (start2 + ml2 <= mflimit) { + ml3 = LZ4HC_InsertAndGetWiderMatch(ctx, + start2 + ml2 - 3, start2, matchlimit, ml2, &ref3, &start3, + maxNbAttempts, patternAnalysis, 0, dict, favorCompressionRatio); + } else { ml3 = ml2; + } - if (ml3 == ml2) { /* No better match : 2 sequences to encode */ + if (ml3 == ml2) { /* No better match => encode ML1 and ML2 */ /* ip & ref are known; Now for ml */ if (start2 < ip+ml) ml = (int)(start2 - ip); /* Now, encode 2 sequences */ @@ -456,11 +621,12 @@ static int LZ4HC_compress_hashChain ( } /* - * OK, now we have 3 ascending matches; let's write at least the first one - * ip & ref are known; Now for ml + * OK, now we have 3 ascending matches; + * let's write the first one ML1. + * ip & ref are known; Now decide ml. */ if (start2 < ip+ml) { - if ((start2 - ip) < (int)ML_MASK) { + if ((start2 - ip) < OPTIMAL_ML) { int correction; if (ml > OPTIMAL_ML) ml = OPTIMAL_ML; if (ip + ml > start2 + ml2 - MINMATCH) ml = (int)(start2 - ip) + ml2 - MINMATCH; @@ -477,14 +643,13 @@ static int LZ4HC_compress_hashChain ( optr = op; if (LZ4HC_encodeSequence(&ip, &op, &anchor, ml, ref, limit, oend)) goto _dest_overflow; - ip = start2; - ref = ref2; - ml = ml2; + /* ML2 becomes ML1 */ + ip = start2; ref = ref2; ml = ml2; - start2 = start3; - ref2 = ref3; - ml2 = ml3; + /* ML3 becomes ML2 */ + start2 = start3; ref2 = ref3; ml2 = ml3; + /* let's find a new ML3 */ goto _Search3; } @@ -527,12 +692,110 @@ static int LZ4HC_compress_hashChain ( return 0; } -static int LZ4HC_getSearchNum(int compressionLevel) + +static int LZ4HC_compress_optimal( LZ4HC_CCtx_internal* ctx, + const char* const source, char* dst, + int* srcSizePtr, int dstCapacity, + int const nbSearches, size_t sufficient_len, + const limitedOutput_directive limit, int const fullUpdate, + const dictCtx_directive dict, + HCfavor_e favorDecSpeed); + + +LZ4_FORCE_INLINE int LZ4HC_compress_generic_internal ( + LZ4HC_CCtx_internal* const ctx, + const char* const src, + char* const dst, + int* const srcSizePtr, + int const dstCapacity, + int cLevel, + const limitedOutput_directive limit, + const dictCtx_directive dict + ) +{ + typedef enum { lz4hc, lz4opt } lz4hc_strat_e; + typedef struct { + lz4hc_strat_e strat; + U32 nbSearches; + U32 targetLength; + } cParams_t; + static const cParams_t clTable[LZ4HC_CLEVEL_MAX+1] = { + { lz4hc, 2, 16 }, /* 0, unused */ + { lz4hc, 2, 16 }, /* 1, unused */ + { lz4hc, 2, 16 }, /* 2, unused */ + { lz4hc, 4, 16 }, /* 3 */ + { lz4hc, 8, 16 }, /* 4 */ + { lz4hc, 16, 16 }, /* 5 */ + { lz4hc, 32, 16 }, /* 6 */ + { lz4hc, 64, 16 }, /* 7 */ + { lz4hc, 128, 16 }, /* 8 */ + { lz4hc, 256, 16 }, /* 9 */ + { lz4opt, 96, 64 }, /*10==LZ4HC_CLEVEL_OPT_MIN*/ + { lz4opt, 512,128 }, /*11 */ + { lz4opt,16384,LZ4_OPT_NUM }, /* 12==LZ4HC_CLEVEL_MAX */ + }; + + DEBUGLOG(4, "LZ4HC_compress_generic(%p, %p, %d)", ctx, src, *srcSizePtr); + + if (limit == limitedDestSize && dstCapacity < 1) return 0; /* Impossible to store anything */ + if ((U32)*srcSizePtr > (U32)LZ4_MAX_INPUT_SIZE) return 0; /* Unsupported input size (too large or negative) */ + + ctx->end += *srcSizePtr; + if (cLevel < 1) cLevel = LZ4HC_CLEVEL_DEFAULT; /* note : convention is different from lz4frame, maybe something to review */ + cLevel = MIN(LZ4HC_CLEVEL_MAX, cLevel); + { cParams_t const cParam = clTable[cLevel]; + HCfavor_e const favor = ctx->favorDecSpeed ? favorDecompressionSpeed : favorCompressionRatio; + if (cParam.strat == lz4hc) + return LZ4HC_compress_hashChain(ctx, + src, dst, srcSizePtr, dstCapacity, + cParam.nbSearches, limit, dict); + assert(cParam.strat == lz4opt); + return LZ4HC_compress_optimal(ctx, + src, dst, srcSizePtr, dstCapacity, + cParam.nbSearches, cParam.targetLength, limit, + cLevel == LZ4HC_CLEVEL_MAX, /* ultra mode */ + dict, favor); + } +} + +static void LZ4HC_setExternalDict(LZ4HC_CCtx_internal* ctxPtr, const BYTE* newBlock); + +static int LZ4HC_compress_generic_noDictCtx ( + LZ4HC_CCtx_internal* const ctx, + const char* const src, + char* const dst, + int* const srcSizePtr, + int const dstCapacity, + int cLevel, + limitedOutput_directive limit + ) { - switch (compressionLevel) { - default: return 0; /* unused */ - case 11: return 128; - case 12: return 1<<10; + assert(ctx->dictCtx == NULL); + return LZ4HC_compress_generic_internal(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit, noDictCtx); +} + +static int LZ4HC_compress_generic_dictCtx ( + LZ4HC_CCtx_internal* const ctx, + const char* const src, + char* const dst, + int* const srcSizePtr, + int const dstCapacity, + int cLevel, + limitedOutput_directive limit + ) +{ + const size_t position = ctx->end - ctx->base - ctx->lowLimit; + assert(ctx->dictCtx != NULL); + if (position >= 64 KB) { + ctx->dictCtx = NULL; + return LZ4HC_compress_generic_noDictCtx(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit); + } else if (position == 0 && *srcSizePtr > 4 KB) { + memcpy(ctx, ctx->dictCtx, sizeof(LZ4HC_CCtx_internal)); + LZ4HC_setExternalDict(ctx, (const BYTE *)src); + ctx->compressionLevel = (short)cLevel; + return LZ4HC_compress_generic_noDictCtx(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit); + } else { + return LZ4HC_compress_generic_internal(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit, usingDictCtx); } } @@ -546,33 +809,21 @@ static int LZ4HC_compress_generic ( limitedOutput_directive limit ) { - if (cLevel < 1) cLevel = LZ4HC_CLEVEL_DEFAULT; /* note : convention is different from lz4frame, maybe to reconsider */ - if (cLevel > 9) { - if (limit == limitedDestSize) cLevel = 10; - switch (cLevel) { - case 10: - return LZ4HC_compress_hashChain(ctx, src, dst, srcSizePtr, dstCapacity, 1 << 12, limit); - case 11: - ctx->searchNum = LZ4HC_getSearchNum(cLevel); - return LZ4HC_compress_optimal(ctx, src, dst, *srcSizePtr, dstCapacity, limit, 128, 0); - default: - cLevel = 12; - /* fall-through */ - case 12: - ctx->searchNum = LZ4HC_getSearchNum(cLevel); - return LZ4HC_compress_optimal(ctx, src, dst, *srcSizePtr, dstCapacity, limit, LZ4_OPT_NUM, 1); - } + if (ctx->dictCtx == NULL) { + return LZ4HC_compress_generic_noDictCtx(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit); + } else { + return LZ4HC_compress_generic_dictCtx(ctx, src, dst, srcSizePtr, dstCapacity, cLevel, limit); } - return LZ4HC_compress_hashChain(ctx, src, dst, srcSizePtr, dstCapacity, 1 << (cLevel-1), limit); /* levels 1-9 */ } int LZ4_sizeofStateHC(void) { return sizeof(LZ4_streamHC_t); } -int LZ4_compress_HC_extStateHC (void* state, const char* src, char* dst, int srcSize, int dstCapacity, int compressionLevel) +int LZ4_compress_HC_extStateHC_fastReset (void* state, const char* src, char* dst, int srcSize, int dstCapacity, int compressionLevel) { LZ4HC_CCtx_internal* const ctx = &((LZ4_streamHC_t*)state)->internal_donotuse; if (((size_t)(state)&(sizeof(void*)-1)) != 0) return 0; /* Error : state is not aligned for pointers (32 or 64 bits) */ + LZ4_resetStreamHC_fast((LZ4_streamHC_t*)state, compressionLevel); LZ4HC_init (ctx, (const BYTE*)src); if (dstCapacity < LZ4_compressBound(srcSize)) return LZ4HC_compress_generic (ctx, src, dst, &srcSize, dstCapacity, compressionLevel, limitedOutput); @@ -580,10 +831,17 @@ int LZ4_compress_HC_extStateHC (void* state, const char* src, char* dst, int src return LZ4HC_compress_generic (ctx, src, dst, &srcSize, dstCapacity, compressionLevel, noLimit); } +int LZ4_compress_HC_extStateHC (void* state, const char* src, char* dst, int srcSize, int dstCapacity, int compressionLevel) +{ + if (((size_t)(state)&(sizeof(void*)-1)) != 0) return 0; /* Error : state is not aligned for pointers (32 or 64 bits) */ + LZ4_resetStreamHC ((LZ4_streamHC_t*)state, compressionLevel); + return LZ4_compress_HC_extStateHC_fastReset(state, src, dst, srcSize, dstCapacity, compressionLevel); +} + int LZ4_compress_HC(const char* src, char* dst, int srcSize, int dstCapacity, int compressionLevel) { #if defined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1 - LZ4_streamHC_t* const statePtr = (LZ4_streamHC_t*)malloc(sizeof(LZ4_streamHC_t)); + LZ4_streamHC_t* const statePtr = (LZ4_streamHC_t*)ALLOC(sizeof(LZ4_streamHC_t)); #else LZ4_streamHC_t state; LZ4_streamHC_t* const statePtr = &state; @@ -596,11 +854,11 @@ int LZ4_compress_HC(const char* src, char* dst, int srcSize, int dstCapacity, in } /* LZ4_compress_HC_destSize() : - * currently, only compatible with Hash Chain implementation, - * hence limit compression level to LZ4HC_CLEVEL_OPT_MIN-1*/ + * only compatible with regular HC parser */ int LZ4_compress_HC_destSize(void* LZ4HC_Data, const char* source, char* dest, int* sourceSizePtr, int targetDestSize, int cLevel) { LZ4HC_CCtx_internal* const ctx = &((LZ4_streamHC_t*)LZ4HC_Data)->internal_donotuse; + LZ4_resetStreamHC((LZ4_streamHC_t*)LZ4HC_Data, cLevel); LZ4HC_init(ctx, (const BYTE*) source); return LZ4HC_compress_generic(ctx, source, dest, sourceSizePtr, targetDestSize, cLevel, limitedDestSize); } @@ -611,8 +869,15 @@ int LZ4_compress_HC_destSize(void* LZ4HC_Data, const char* source, char* dest, i * Streaming Functions **************************************/ /* allocation */ -LZ4_streamHC_t* LZ4_createStreamHC(void) { return (LZ4_streamHC_t*)malloc(sizeof(LZ4_streamHC_t)); } -int LZ4_freeStreamHC (LZ4_streamHC_t* LZ4_streamHCPtr) { +LZ4_streamHC_t* LZ4_createStreamHC(void) { + LZ4_streamHC_t* const LZ4_streamHCPtr = (LZ4_streamHC_t*)ALLOC(sizeof(LZ4_streamHC_t)); + if (LZ4_streamHCPtr==NULL) return NULL; + LZ4_resetStreamHC(LZ4_streamHCPtr, LZ4HC_CLEVEL_DEFAULT); + return LZ4_streamHCPtr; +} + +int LZ4_freeStreamHC (LZ4_streamHC_t* LZ4_streamHCPtr) { + DEBUGLOG(4, "LZ4_freeStreamHC(%p)", LZ4_streamHCPtr); if (!LZ4_streamHCPtr) return 0; /* support free on NULL */ free(LZ4_streamHCPtr); return 0; @@ -623,47 +888,61 @@ int LZ4_freeStreamHC (LZ4_streamHC_t* LZ4_streamHCPtr) { void LZ4_resetStreamHC (LZ4_streamHC_t* LZ4_streamHCPtr, int compressionLevel) { LZ4_STATIC_ASSERT(sizeof(LZ4HC_CCtx_internal) <= sizeof(size_t) * LZ4_STREAMHCSIZE_SIZET); /* if compilation fails here, LZ4_STREAMHCSIZE must be increased */ + DEBUGLOG(4, "LZ4_resetStreamHC(%p, %d)", LZ4_streamHCPtr, compressionLevel); + LZ4_streamHCPtr->internal_donotuse.end = (const BYTE *)(ptrdiff_t)-1; + LZ4_streamHCPtr->internal_donotuse.base = NULL; + LZ4_streamHCPtr->internal_donotuse.dictCtx = NULL; + LZ4_streamHCPtr->internal_donotuse.favorDecSpeed = 0; + LZ4_setCompressionLevel(LZ4_streamHCPtr, compressionLevel); +} + +void LZ4_resetStreamHC_fast (LZ4_streamHC_t* LZ4_streamHCPtr, int compressionLevel) +{ + DEBUGLOG(4, "LZ4_resetStreamHC_fast(%p, %d)", LZ4_streamHCPtr, compressionLevel); + LZ4_streamHCPtr->internal_donotuse.end -= (uptrval)LZ4_streamHCPtr->internal_donotuse.base; LZ4_streamHCPtr->internal_donotuse.base = NULL; - if (compressionLevel > LZ4HC_CLEVEL_MAX) compressionLevel = LZ4HC_CLEVEL_MAX; /* cap compression level */ - LZ4_streamHCPtr->internal_donotuse.compressionLevel = compressionLevel; - LZ4_streamHCPtr->internal_donotuse.searchNum = LZ4HC_getSearchNum(compressionLevel); + LZ4_streamHCPtr->internal_donotuse.dictCtx = NULL; + LZ4_setCompressionLevel(LZ4_streamHCPtr, compressionLevel); } void LZ4_setCompressionLevel(LZ4_streamHC_t* LZ4_streamHCPtr, int compressionLevel) { - int const currentCLevel = LZ4_streamHCPtr->internal_donotuse.compressionLevel; - int const minCLevel = currentCLevel < LZ4HC_CLEVEL_OPT_MIN ? 1 : LZ4HC_CLEVEL_OPT_MIN; - int const maxCLevel = currentCLevel < LZ4HC_CLEVEL_OPT_MIN ? LZ4HC_CLEVEL_OPT_MIN-1 : LZ4HC_CLEVEL_MAX; - compressionLevel = MIN(compressionLevel, minCLevel); - compressionLevel = MAX(compressionLevel, maxCLevel); - LZ4_streamHCPtr->internal_donotuse.compressionLevel = compressionLevel; + if (compressionLevel < 1) compressionLevel = LZ4HC_CLEVEL_DEFAULT; + if (compressionLevel > LZ4HC_CLEVEL_MAX) compressionLevel = LZ4HC_CLEVEL_MAX; + LZ4_streamHCPtr->internal_donotuse.compressionLevel = (short)compressionLevel; +} + +void LZ4_favorDecompressionSpeed(LZ4_streamHC_t* LZ4_streamHCPtr, int favor) +{ + LZ4_streamHCPtr->internal_donotuse.favorDecSpeed = (favor!=0); } int LZ4_loadDictHC (LZ4_streamHC_t* LZ4_streamHCPtr, const char* dictionary, int dictSize) { LZ4HC_CCtx_internal* const ctxPtr = &LZ4_streamHCPtr->internal_donotuse; + DEBUGLOG(4, "LZ4_loadDictHC(%p, %p, %d)", LZ4_streamHCPtr, dictionary, dictSize); if (dictSize > 64 KB) { dictionary += dictSize - 64 KB; dictSize = 64 KB; } + LZ4_resetStreamHC(LZ4_streamHCPtr, ctxPtr->compressionLevel); LZ4HC_init (ctxPtr, (const BYTE*)dictionary); ctxPtr->end = (const BYTE*)dictionary + dictSize; - if (ctxPtr->compressionLevel >= LZ4HC_CLEVEL_OPT_MIN) - LZ4HC_updateBinTree(ctxPtr, ctxPtr->end - MFLIMIT, ctxPtr->end - LASTLITERALS); - else - if (dictSize >= 4) LZ4HC_Insert (ctxPtr, ctxPtr->end-3); + if (dictSize >= 4) LZ4HC_Insert (ctxPtr, ctxPtr->end-3); return dictSize; } +void LZ4_attach_HC_dictionary(LZ4_streamHC_t *working_stream, const LZ4_streamHC_t *dictionary_stream) { + working_stream->internal_donotuse.dictCtx = dictionary_stream != NULL ? &(dictionary_stream->internal_donotuse) : NULL; +} /* compression */ static void LZ4HC_setExternalDict(LZ4HC_CCtx_internal* ctxPtr, const BYTE* newBlock) { - if (ctxPtr->compressionLevel >= LZ4HC_CLEVEL_OPT_MIN) - LZ4HC_updateBinTree(ctxPtr, ctxPtr->end - MFLIMIT, ctxPtr->end - LASTLITERALS); - else - if (ctxPtr->end >= ctxPtr->base + 4) LZ4HC_Insert (ctxPtr, ctxPtr->end-3); /* Referencing remaining dictionary content */ + DEBUGLOG(4, "LZ4HC_setExternalDict(%p, %p)", ctxPtr, newBlock); + if (ctxPtr->end >= ctxPtr->base + ctxPtr->dictLimit + 4) + LZ4HC_Insert (ctxPtr, ctxPtr->end-3); /* Referencing remaining dictionary content */ /* Only one memory segment for extDict, so any previous extDict is lost at this stage */ ctxPtr->lowLimit = ctxPtr->dictLimit; @@ -680,6 +959,7 @@ static int LZ4_compressHC_continue_generic (LZ4_streamHC_t* LZ4_streamHCPtr, limitedOutput_directive limit) { LZ4HC_CCtx_internal* const ctxPtr = &LZ4_streamHCPtr->internal_donotuse; + DEBUGLOG(4, "LZ4_compressHC_continue_generic(%p, %p, %d)", LZ4_streamHCPtr, src, *srcSizePtr); /* auto-init if forgotten */ if (ctxPtr->base == NULL) LZ4HC_init (ctxPtr, (const BYTE*) src); @@ -717,8 +997,6 @@ int LZ4_compress_HC_continue (LZ4_streamHC_t* LZ4_streamHCPtr, const char* src, int LZ4_compress_HC_continue_destSize (LZ4_streamHC_t* LZ4_streamHCPtr, const char* src, char* dst, int* srcSizePtr, int targetDestSize) { - LZ4HC_CCtx_internal* const ctxPtr = &LZ4_streamHCPtr->internal_donotuse; - if (ctxPtr->compressionLevel >= LZ4HC_CLEVEL_OPT_MIN) LZ4HC_init(ctxPtr, (const BYTE*)src); /* not compatible with btopt implementation */ return LZ4_compressHC_continue_generic(LZ4_streamHCPtr, src, dst, srcSizePtr, targetDestSize, limitedDestSize); } @@ -730,6 +1008,7 @@ int LZ4_saveDictHC (LZ4_streamHC_t* LZ4_streamHCPtr, char* safeBuffer, int dictS { LZ4HC_CCtx_internal* const streamPtr = &LZ4_streamHCPtr->internal_donotuse; int const prefixSize = (int)(streamPtr->end - (streamPtr->base + streamPtr->dictLimit)); + DEBUGLOG(4, "LZ4_saveDictHC(%p, %p, %d)", LZ4_streamHCPtr, safeBuffer, dictSize); if (dictSize > 64 KB) dictSize = 64 KB; if (dictSize < 4) dictSize = 0; if (dictSize > prefixSize) dictSize = prefixSize; @@ -769,17 +1048,17 @@ int LZ4_resetStreamStateHC(void* state, char* inputBuffer) { LZ4HC_CCtx_internal *ctx = &((LZ4_streamHC_t*)state)->internal_donotuse; if ((((size_t)state) & (sizeof(void*)-1)) != 0) return 1; /* Error : pointer is not aligned for pointer (32 or 64 bits) */ + LZ4_resetStreamHC((LZ4_streamHC_t*)state, ((LZ4_streamHC_t*)state)->internal_donotuse.compressionLevel); LZ4HC_init(ctx, (const BYTE*)inputBuffer); - ctx->inputBuffer = (BYTE*)inputBuffer; return 0; } -void* LZ4_createHC (char* inputBuffer) +void* LZ4_createHC (const char* inputBuffer) { - LZ4_streamHC_t* hc4 = (LZ4_streamHC_t*)ALLOCATOR(1, sizeof(LZ4_streamHC_t)); + LZ4_streamHC_t* hc4 = (LZ4_streamHC_t*)ALLOC(sizeof(LZ4_streamHC_t)); if (hc4 == NULL) return NULL; /* not enough memory */ + LZ4_resetStreamHC(hc4, 0 /* compressionLevel */); LZ4HC_init (&hc4->internal_donotuse, (const BYTE*)inputBuffer); - hc4->internal_donotuse.inputBuffer = (BYTE*)inputBuffer; return hc4; } @@ -801,7 +1080,333 @@ int LZ4_compressHC2_limitedOutput_continue (void* LZ4HC_Data, const char* src, c char* LZ4_slideInputBufferHC(void* LZ4HC_Data) { - LZ4HC_CCtx_internal* const hc4 = &((LZ4_streamHC_t*)LZ4HC_Data)->internal_donotuse; - int const dictSize = LZ4_saveDictHC((LZ4_streamHC_t*)LZ4HC_Data, (char*)(hc4->inputBuffer), 64 KB); - return (char*)(hc4->inputBuffer + dictSize); + LZ4_streamHC_t *ctx = (LZ4_streamHC_t*)LZ4HC_Data; + const BYTE *bufferStart = ctx->internal_donotuse.base + ctx->internal_donotuse.lowLimit; + LZ4_resetStreamHC_fast(ctx, ctx->internal_donotuse.compressionLevel); + /* avoid const char * -> char * conversion warning :( */ + return (char *)(uptrval)bufferStart; +} + + +/* ================================================ + * LZ4 Optimal parser (levels 10-12) + * ===============================================*/ +typedef struct { + int price; + int off; + int mlen; + int litlen; +} LZ4HC_optimal_t; + +/* price in bytes */ +LZ4_FORCE_INLINE int LZ4HC_literalsPrice(int const litlen) +{ + int price = litlen; + if (litlen >= (int)RUN_MASK) + price += 1 + (litlen-RUN_MASK)/255; + return price; +} + + +/* requires mlen >= MINMATCH */ +LZ4_FORCE_INLINE int LZ4HC_sequencePrice(int litlen, int mlen) +{ + int price = 1 + 2 ; /* token + 16-bit offset */ + + price += LZ4HC_literalsPrice(litlen); + + if (mlen >= (int)(ML_MASK+MINMATCH)) + price += 1 + (mlen-(ML_MASK+MINMATCH))/255; + + return price; } + + +typedef struct { + int off; + int len; +} LZ4HC_match_t; + +LZ4_FORCE_INLINE LZ4HC_match_t +LZ4HC_FindLongerMatch(LZ4HC_CCtx_internal* const ctx, + const BYTE* ip, const BYTE* const iHighLimit, + int minLen, int nbSearches, + const dictCtx_directive dict, + const HCfavor_e favorDecSpeed) +{ + LZ4HC_match_t match = { 0 , 0 }; + const BYTE* matchPtr = NULL; + /* note : LZ4HC_InsertAndGetWiderMatch() is able to modify the starting position of a match (*startpos), + * but this won't be the case here, as we define iLowLimit==ip, + * so LZ4HC_InsertAndGetWiderMatch() won't be allowed to search past ip */ + int matchLength = LZ4HC_InsertAndGetWiderMatch(ctx, ip, ip, iHighLimit, minLen, &matchPtr, &ip, nbSearches, 1 /*patternAnalysis*/, 1 /*chainSwap*/, dict, favorDecSpeed); + if (matchLength <= minLen) return match; + if (favorDecSpeed) { + if ((matchLength>18) & (matchLength<=36)) matchLength=18; /* favor shortcut */ + } + match.len = matchLength; + match.off = (int)(ip-matchPtr); + return match; +} + + +static int LZ4HC_compress_optimal ( LZ4HC_CCtx_internal* ctx, + const char* const source, + char* dst, + int* srcSizePtr, + int dstCapacity, + int const nbSearches, + size_t sufficient_len, + const limitedOutput_directive limit, + int const fullUpdate, + const dictCtx_directive dict, + const HCfavor_e favorDecSpeed) +{ +#define TRAILING_LITERALS 3 + LZ4HC_optimal_t opt[LZ4_OPT_NUM + TRAILING_LITERALS]; /* ~64 KB, which is a bit large for stack... */ + + const BYTE* ip = (const BYTE*) source; + const BYTE* anchor = ip; + const BYTE* const iend = ip + *srcSizePtr; + const BYTE* const mflimit = iend - MFLIMIT; + const BYTE* const matchlimit = iend - LASTLITERALS; + BYTE* op = (BYTE*) dst; + BYTE* opSaved = (BYTE*) dst; + BYTE* oend = op + dstCapacity; + + /* init */ + DEBUGLOG(5, "LZ4HC_compress_optimal"); + *srcSizePtr = 0; + if (limit == limitedDestSize) oend -= LASTLITERALS; /* Hack for support LZ4 format restriction */ + if (sufficient_len >= LZ4_OPT_NUM) sufficient_len = LZ4_OPT_NUM-1; + + /* Main Loop */ + assert(ip - anchor < LZ4_MAX_INPUT_SIZE); + while (ip <= mflimit) { + int const llen = (int)(ip - anchor); + int best_mlen, best_off; + int cur, last_match_pos = 0; + + LZ4HC_match_t const firstMatch = LZ4HC_FindLongerMatch(ctx, ip, matchlimit, MINMATCH-1, nbSearches, dict, favorDecSpeed); + if (firstMatch.len==0) { ip++; continue; } + + if ((size_t)firstMatch.len > sufficient_len) { + /* good enough solution : immediate encoding */ + int const firstML = firstMatch.len; + const BYTE* const matchPos = ip - firstMatch.off; + opSaved = op; + if ( LZ4HC_encodeSequence(&ip, &op, &anchor, firstML, matchPos, limit, oend) ) /* updates ip, op and anchor */ + goto _dest_overflow; + continue; + } + + /* set prices for first positions (literals) */ + { int rPos; + for (rPos = 0 ; rPos < MINMATCH ; rPos++) { + int const cost = LZ4HC_literalsPrice(llen + rPos); + opt[rPos].mlen = 1; + opt[rPos].off = 0; + opt[rPos].litlen = llen + rPos; + opt[rPos].price = cost; + DEBUGLOG(7, "rPos:%3i => price:%3i (litlen=%i) -- initial setup", + rPos, cost, opt[rPos].litlen); + } } + /* set prices using initial match */ + { int mlen = MINMATCH; + int const matchML = firstMatch.len; /* necessarily < sufficient_len < LZ4_OPT_NUM */ + int const offset = firstMatch.off; + assert(matchML < LZ4_OPT_NUM); + for ( ; mlen <= matchML ; mlen++) { + int const cost = LZ4HC_sequencePrice(llen, mlen); + opt[mlen].mlen = mlen; + opt[mlen].off = offset; + opt[mlen].litlen = llen; + opt[mlen].price = cost; + DEBUGLOG(7, "rPos:%3i => price:%3i (matchlen=%i) -- initial setup", + mlen, cost, mlen); + } } + last_match_pos = firstMatch.len; + { int addLit; + for (addLit = 1; addLit <= TRAILING_LITERALS; addLit ++) { + opt[last_match_pos+addLit].mlen = 1; /* literal */ + opt[last_match_pos+addLit].off = 0; + opt[last_match_pos+addLit].litlen = addLit; + opt[last_match_pos+addLit].price = opt[last_match_pos].price + LZ4HC_literalsPrice(addLit); + DEBUGLOG(7, "rPos:%3i => price:%3i (litlen=%i) -- initial setup", + last_match_pos+addLit, opt[last_match_pos+addLit].price, addLit); + } } + + /* check further positions */ + for (cur = 1; cur < last_match_pos; cur++) { + const BYTE* const curPtr = ip + cur; + LZ4HC_match_t newMatch; + + if (curPtr > mflimit) break; + DEBUGLOG(7, "rPos:%u[%u] vs [%u]%u", + cur, opt[cur].price, opt[cur+1].price, cur+1); + if (fullUpdate) { + /* not useful to search here if next position has same (or lower) cost */ + if ( (opt[cur+1].price <= opt[cur].price) + /* in some cases, next position has same cost, but cost rises sharply after, so a small match would still be beneficial */ + && (opt[cur+MINMATCH].price < opt[cur].price + 3/*min seq price*/) ) + continue; + } else { + /* not useful to search here if next position has same (or lower) cost */ + if (opt[cur+1].price <= opt[cur].price) continue; + } + + DEBUGLOG(7, "search at rPos:%u", cur); + if (fullUpdate) + newMatch = LZ4HC_FindLongerMatch(ctx, curPtr, matchlimit, MINMATCH-1, nbSearches, dict, favorDecSpeed); + else + /* only test matches of minimum length; slightly faster, but misses a few bytes */ + newMatch = LZ4HC_FindLongerMatch(ctx, curPtr, matchlimit, last_match_pos - cur, nbSearches, dict, favorDecSpeed); + if (!newMatch.len) continue; + + if ( ((size_t)newMatch.len > sufficient_len) + || (newMatch.len + cur >= LZ4_OPT_NUM) ) { + /* immediate encoding */ + best_mlen = newMatch.len; + best_off = newMatch.off; + last_match_pos = cur + 1; + goto encode; + } + + /* before match : set price with literals at beginning */ + { int const baseLitlen = opt[cur].litlen; + int litlen; + for (litlen = 1; litlen < MINMATCH; litlen++) { + int const price = opt[cur].price - LZ4HC_literalsPrice(baseLitlen) + LZ4HC_literalsPrice(baseLitlen+litlen); + int const pos = cur + litlen; + if (price < opt[pos].price) { + opt[pos].mlen = 1; /* literal */ + opt[pos].off = 0; + opt[pos].litlen = baseLitlen+litlen; + opt[pos].price = price; + DEBUGLOG(7, "rPos:%3i => price:%3i (litlen=%i)", + pos, price, opt[pos].litlen); + } } } + + /* set prices using match at position = cur */ + { int const matchML = newMatch.len; + int ml = MINMATCH; + + assert(cur + newMatch.len < LZ4_OPT_NUM); + for ( ; ml <= matchML ; ml++) { + int const pos = cur + ml; + int const offset = newMatch.off; + int price; + int ll; + DEBUGLOG(7, "testing price rPos %i (last_match_pos=%i)", + pos, last_match_pos); + if (opt[cur].mlen == 1) { + ll = opt[cur].litlen; + price = ((cur > ll) ? opt[cur - ll].price : 0) + + LZ4HC_sequencePrice(ll, ml); + } else { + ll = 0; + price = opt[cur].price + LZ4HC_sequencePrice(0, ml); + } + + assert((U32)favorDecSpeed <= 1); + if (pos > last_match_pos+TRAILING_LITERALS + || price <= opt[pos].price - (int)favorDecSpeed) { + DEBUGLOG(7, "rPos:%3i => price:%3i (matchlen=%i)", + pos, price, ml); + assert(pos < LZ4_OPT_NUM); + if ( (ml == matchML) /* last pos of last match */ + && (last_match_pos < pos) ) + last_match_pos = pos; + opt[pos].mlen = ml; + opt[pos].off = offset; + opt[pos].litlen = ll; + opt[pos].price = price; + } } } + /* complete following positions with literals */ + { int addLit; + for (addLit = 1; addLit <= TRAILING_LITERALS; addLit ++) { + opt[last_match_pos+addLit].mlen = 1; /* literal */ + opt[last_match_pos+addLit].off = 0; + opt[last_match_pos+addLit].litlen = addLit; + opt[last_match_pos+addLit].price = opt[last_match_pos].price + LZ4HC_literalsPrice(addLit); + DEBUGLOG(7, "rPos:%3i => price:%3i (litlen=%i)", last_match_pos+addLit, opt[last_match_pos+addLit].price, addLit); + } } + } /* for (cur = 1; cur <= last_match_pos; cur++) */ + + best_mlen = opt[last_match_pos].mlen; + best_off = opt[last_match_pos].off; + cur = last_match_pos - best_mlen; + + encode: /* cur, last_match_pos, best_mlen, best_off must be set */ + assert(cur < LZ4_OPT_NUM); + assert(last_match_pos >= 1); /* == 1 when only one candidate */ + DEBUGLOG(6, "reverse traversal, looking for shortest path (last_match_pos=%i)", last_match_pos); + { int candidate_pos = cur; + int selected_matchLength = best_mlen; + int selected_offset = best_off; + while (1) { /* from end to beginning */ + int const next_matchLength = opt[candidate_pos].mlen; /* can be 1, means literal */ + int const next_offset = opt[candidate_pos].off; + DEBUGLOG(7, "pos %i: sequence length %i", candidate_pos, selected_matchLength); + opt[candidate_pos].mlen = selected_matchLength; + opt[candidate_pos].off = selected_offset; + selected_matchLength = next_matchLength; + selected_offset = next_offset; + if (next_matchLength > candidate_pos) break; /* last match elected, first match to encode */ + assert(next_matchLength > 0); /* can be 1, means literal */ + candidate_pos -= next_matchLength; + } } + + /* encode all recorded sequences in order */ + { int rPos = 0; /* relative position (to ip) */ + while (rPos < last_match_pos) { + int const ml = opt[rPos].mlen; + int const offset = opt[rPos].off; + if (ml == 1) { ip++; rPos++; continue; } /* literal; note: can end up with several literals, in which case, skip them */ + rPos += ml; + assert(ml >= MINMATCH); + assert((offset >= 1) && (offset <= MAX_DISTANCE)); + opSaved = op; + if ( LZ4HC_encodeSequence(&ip, &op, &anchor, ml, ip - offset, limit, oend) ) /* updates ip, op and anchor */ + goto _dest_overflow; + } } + } /* while (ip <= mflimit) */ + + _last_literals: + /* Encode Last Literals */ + { size_t lastRunSize = (size_t)(iend - anchor); /* literals */ + size_t litLength = (lastRunSize + 255 - RUN_MASK) / 255; + size_t const totalSize = 1 + litLength + lastRunSize; + if (limit == limitedDestSize) oend += LASTLITERALS; /* restore correct value */ + if (limit && (op + totalSize > oend)) { + if (limit == limitedOutput) return 0; /* Check output limit */ + /* adapt lastRunSize to fill 'dst' */ + lastRunSize = (size_t)(oend - op) - 1; + litLength = (lastRunSize + 255 - RUN_MASK) / 255; + lastRunSize -= litLength; + } + ip = anchor + lastRunSize; + + if (lastRunSize >= RUN_MASK) { + size_t accumulator = lastRunSize - RUN_MASK; + *op++ = (RUN_MASK << ML_BITS); + for(; accumulator >= 255 ; accumulator -= 255) *op++ = 255; + *op++ = (BYTE) accumulator; + } else { + *op++ = (BYTE)(lastRunSize << ML_BITS); + } + memcpy(op, anchor, lastRunSize); + op += lastRunSize; + } + + /* End */ + *srcSizePtr = (int) (((const char*)ip) - source); + return (int) ((char*)op-dst); + + _dest_overflow: + if (limit == limitedDestSize) { + op = opSaved; /* restore correct out pointer */ + goto _last_literals; + } + return 0; + } diff --git a/src/third-party/lz4/lz4hc.h b/src/third-party/lz4/lz4hc.h index 9618459f5dd..970fa396621 100644 --- a/src/third-party/lz4/lz4hc.h +++ b/src/third-party/lz4/lz4hc.h @@ -39,14 +39,14 @@ extern "C" { #endif /* --- Dependency --- */ -/* note : lz4hc is not an independent module, it requires lz4.h/lz4.c for proper compilation */ +/* note : lz4hc requires lz4.h/lz4.c for compilation */ #include "lz4.h" /* stddef, LZ4LIB_API, LZ4_DEPRECATED */ /* --- Useful constants --- */ #define LZ4HC_CLEVEL_MIN 3 #define LZ4HC_CLEVEL_DEFAULT 9 -#define LZ4HC_CLEVEL_OPT_MIN 11 +#define LZ4HC_CLEVEL_OPT_MIN 10 #define LZ4HC_CLEVEL_MAX 12 @@ -54,12 +54,12 @@ extern "C" { * Block Compression **************************************/ /*! LZ4_compress_HC() : - * Compress data from `src` into `dst`, using the more powerful but slower "HC" algorithm. + * Compress data from `src` into `dst`, using the more powerful but slower "HC" algorithm. * `dst` must be already allocated. - * Compression is guaranteed to succeed if `dstCapacity >= LZ4_compressBound(srcSize)` (see "lz4.h") - * Max supported `srcSize` value is LZ4_MAX_INPUT_SIZE (see "lz4.h") - * `compressionLevel` : Recommended values are between 4 and 9, although any value between 1 and LZ4HC_CLEVEL_MAX will work. - * Values >LZ4HC_CLEVEL_MAX behave the same as LZ4HC_CLEVEL_MAX. + * Compression is guaranteed to succeed if `dstCapacity >= LZ4_compressBound(srcSize)` (see "lz4.h") + * Max supported `srcSize` value is LZ4_MAX_INPUT_SIZE (see "lz4.h") + * `compressionLevel` : any value between 1 and LZ4HC_CLEVEL_MAX will work. + * Values > LZ4HC_CLEVEL_MAX behave the same as LZ4HC_CLEVEL_MAX. * @return : the number of bytes written into 'dst' * or 0 if compression fails. */ @@ -72,12 +72,12 @@ LZ4LIB_API int LZ4_compress_HC (const char* src, char* dst, int srcSize, int dst /*! LZ4_compress_HC_extStateHC() : - * Same as LZ4_compress_HC(), but using an externally allocated memory segment for `state`. + * Same as LZ4_compress_HC(), but using an externally allocated memory segment for `state`. * `state` size is provided by LZ4_sizeofStateHC(). - * Memory segment must be aligned on 8-bytes boundaries (which a normal malloc() will do properly). + * Memory segment must be aligned on 8-bytes boundaries (which a normal malloc() should do properly). */ -LZ4LIB_API int LZ4_compress_HC_extStateHC(void* state, const char* src, char* dst, int srcSize, int maxDstSize, int compressionLevel); LZ4LIB_API int LZ4_sizeofStateHC(void); +LZ4LIB_API int LZ4_compress_HC_extStateHC(void* state, const char* src, char* dst, int srcSize, int maxDstSize, int compressionLevel); /*-************************************ @@ -87,10 +87,10 @@ LZ4LIB_API int LZ4_sizeofStateHC(void); typedef union LZ4_streamHC_u LZ4_streamHC_t; /* incomplete type (defined later) */ /*! LZ4_createStreamHC() and LZ4_freeStreamHC() : - * These functions create and release memory for LZ4 HC streaming state. - * Newly created states are automatically initialized. - * Existing states can be re-used several times, using LZ4_resetStreamHC(). - * These methods are API and ABI stable, they can be used in combination with a DLL. + * These functions create and release memory for LZ4 HC streaming state. + * Newly created states are automatically initialized. + * Existing states can be re-used several times, using LZ4_resetStreamHC(). + * These methods are API and ABI stable, they can be used in combination with a DLL. */ LZ4LIB_API LZ4_streamHC_t* LZ4_createStreamHC(void); LZ4LIB_API int LZ4_freeStreamHC (LZ4_streamHC_t* streamHCPtr); @@ -123,13 +123,13 @@ LZ4LIB_API int LZ4_saveDictHC (LZ4_streamHC_t* streamHCPtr, char* safeBuffer, in */ - /*-************************************* +/*-************************************************************** * PRIVATE DEFINITIONS : * Do not use these definitions. * They are exposed to allow static allocation of `LZ4_streamHC_t`. * Using these definitions makes the code vulnerable to potential API break when upgrading LZ4 - **************************************/ -#define LZ4HC_DICTIONARY_LOGSIZE 17 /* because of btopt, hc would only need 16 */ + ****************************************************************/ +#define LZ4HC_DICTIONARY_LOGSIZE 16 #define LZ4HC_MAXD (1<= 199901L) /* C99 */) #include -typedef struct +typedef struct LZ4HC_CCtx_internal LZ4HC_CCtx_internal; +struct LZ4HC_CCtx_internal { uint32_t hashTable[LZ4HC_HASHTABLESIZE]; uint16_t chainTable[LZ4HC_MAXD]; const uint8_t* end; /* next block here to continue on current prefix */ const uint8_t* base; /* All index relative to this position */ const uint8_t* dictBase; /* alternate base for extDict */ - uint8_t* inputBuffer; /* deprecated */ uint32_t dictLimit; /* below that point, need extDict */ uint32_t lowLimit; /* below that point, no more dict */ uint32_t nextToUpdate; /* index from which to continue dictionary update */ - uint32_t searchNum; /* only for optimal parser */ - uint32_t compressionLevel; -} LZ4HC_CCtx_internal; + short compressionLevel; + short favorDecSpeed; + const LZ4HC_CCtx_internal* dictCtx; +}; #else -typedef struct +typedef struct LZ4HC_CCtx_internal LZ4HC_CCtx_internal; +struct LZ4HC_CCtx_internal { unsigned int hashTable[LZ4HC_HASHTABLESIZE]; unsigned short chainTable[LZ4HC_MAXD]; const unsigned char* end; /* next block here to continue on current prefix */ const unsigned char* base; /* All index relative to this position */ const unsigned char* dictBase; /* alternate base for extDict */ - unsigned char* inputBuffer; /* deprecated */ unsigned int dictLimit; /* below that point, need extDict */ unsigned int lowLimit; /* below that point, no more dict */ unsigned int nextToUpdate; /* index from which to continue dictionary update */ - unsigned int searchNum; /* only for optimal parser */ - int compressionLevel; -} LZ4HC_CCtx_internal; + short compressionLevel; + short favorDecSpeed; + const LZ4HC_CCtx_internal* dictCtx; +}; #endif -#define LZ4_STREAMHCSIZE (4*LZ4HC_HASHTABLESIZE + 2*LZ4HC_MAXD + 56) /* 393268 */ +#define LZ4_STREAMHCSIZE (4*LZ4HC_HASHTABLESIZE + 2*LZ4HC_MAXD + 56) /* 262200 */ #define LZ4_STREAMHCSIZE_SIZET (LZ4_STREAMHCSIZE / sizeof(size_t)) union LZ4_streamHC_u { size_t table[LZ4_STREAMHCSIZE_SIZET]; @@ -197,26 +199,32 @@ union LZ4_streamHC_u { /* see lz4.h LZ4_DISABLE_DEPRECATE_WARNINGS to turn off deprecation warnings */ /* deprecated compression functions */ -/* these functions will trigger warning messages in future releases */ -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC() instead") int LZ4_compressHC (const char* source, char* dest, int inputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC() instead") int LZ4_compressHC_limitedOutput (const char* source, char* dest, int inputSize, int maxOutputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC() instead") int LZ4_compressHC2 (const char* source, char* dest, int inputSize, int compressionLevel); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC() instead") int LZ4_compressHC2_limitedOutput (const char* source, char* dest, int inputSize, int maxOutputSize, int compressionLevel); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC_extStateHC() instead") int LZ4_compressHC_withStateHC (void* state, const char* source, char* dest, int inputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC_extStateHC() instead") int LZ4_compressHC_limitedOutput_withStateHC (void* state, const char* source, char* dest, int inputSize, int maxOutputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC_extStateHC() instead") int LZ4_compressHC2_withStateHC (void* state, const char* source, char* dest, int inputSize, int compressionLevel); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC_extStateHC() instead") int LZ4_compressHC2_limitedOutput_withStateHC(void* state, const char* source, char* dest, int inputSize, int maxOutputSize, int compressionLevel); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC_continue() instead") int LZ4_compressHC_continue (LZ4_streamHC_t* LZ4_streamHCPtr, const char* source, char* dest, int inputSize); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC_continue() instead") int LZ4_compressHC_limitedOutput_continue (LZ4_streamHC_t* LZ4_streamHCPtr, const char* source, char* dest, int inputSize, int maxOutputSize); - -/* Deprecated Streaming functions using older model; should no longer be used */ -LZ4LIB_API LZ4_DEPRECATED("use LZ4_createStreamHC() instead") void* LZ4_createHC (char* inputBuffer); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_saveDictHC() instead") char* LZ4_slideInputBufferHC (void* LZ4HC_Data); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_freeStreamHC() instead") int LZ4_freeHC (void* LZ4HC_Data); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC_continue() instead") int LZ4_compressHC2_continue (void* LZ4HC_Data, const char* source, char* dest, int inputSize, int compressionLevel); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_compress_HC_continue() instead") int LZ4_compressHC2_limitedOutput_continue (void* LZ4HC_Data, const char* source, char* dest, int inputSize, int maxOutputSize, int compressionLevel); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_createStreamHC() instead") int LZ4_sizeofStreamStateHC(void); -LZ4LIB_API LZ4_DEPRECATED("use LZ4_resetStreamHC() instead") int LZ4_resetStreamStateHC(void* state, char* inputBuffer); +LZ4_DEPRECATED("use LZ4_compress_HC() instead") LZ4LIB_API int LZ4_compressHC (const char* source, char* dest, int inputSize); +LZ4_DEPRECATED("use LZ4_compress_HC() instead") LZ4LIB_API int LZ4_compressHC_limitedOutput (const char* source, char* dest, int inputSize, int maxOutputSize); +LZ4_DEPRECATED("use LZ4_compress_HC() instead") LZ4LIB_API int LZ4_compressHC2 (const char* source, char* dest, int inputSize, int compressionLevel); +LZ4_DEPRECATED("use LZ4_compress_HC() instead") LZ4LIB_API int LZ4_compressHC2_limitedOutput (const char* source, char* dest, int inputSize, int maxOutputSize, int compressionLevel); +LZ4_DEPRECATED("use LZ4_compress_HC_extStateHC() instead") LZ4LIB_API int LZ4_compressHC_withStateHC (void* state, const char* source, char* dest, int inputSize); +LZ4_DEPRECATED("use LZ4_compress_HC_extStateHC() instead") LZ4LIB_API int LZ4_compressHC_limitedOutput_withStateHC (void* state, const char* source, char* dest, int inputSize, int maxOutputSize); +LZ4_DEPRECATED("use LZ4_compress_HC_extStateHC() instead") LZ4LIB_API int LZ4_compressHC2_withStateHC (void* state, const char* source, char* dest, int inputSize, int compressionLevel); +LZ4_DEPRECATED("use LZ4_compress_HC_extStateHC() instead") LZ4LIB_API int LZ4_compressHC2_limitedOutput_withStateHC(void* state, const char* source, char* dest, int inputSize, int maxOutputSize, int compressionLevel); +LZ4_DEPRECATED("use LZ4_compress_HC_continue() instead") LZ4LIB_API int LZ4_compressHC_continue (LZ4_streamHC_t* LZ4_streamHCPtr, const char* source, char* dest, int inputSize); +LZ4_DEPRECATED("use LZ4_compress_HC_continue() instead") LZ4LIB_API int LZ4_compressHC_limitedOutput_continue (LZ4_streamHC_t* LZ4_streamHCPtr, const char* source, char* dest, int inputSize, int maxOutputSize); + +/* Obsolete streaming functions; degraded functionality; do not use! + * + * In order to perform streaming compression, these functions depended on data + * that is no longer tracked in the state. They have been preserved as well as + * possible: using them will still produce a correct output. However, use of + * LZ4_slideInputBufferHC() will truncate the history of the stream, rather + * than preserve a window-sized chunk of history. + */ +LZ4_DEPRECATED("use LZ4_createStreamHC() instead") LZ4LIB_API void* LZ4_createHC (const char* inputBuffer); +LZ4_DEPRECATED("use LZ4_saveDictHC() instead") LZ4LIB_API char* LZ4_slideInputBufferHC (void* LZ4HC_Data); +LZ4_DEPRECATED("use LZ4_freeStreamHC() instead") LZ4LIB_API int LZ4_freeHC (void* LZ4HC_Data); +LZ4_DEPRECATED("use LZ4_compress_HC_continue() instead") LZ4LIB_API int LZ4_compressHC2_continue (void* LZ4HC_Data, const char* source, char* dest, int inputSize, int compressionLevel); +LZ4_DEPRECATED("use LZ4_compress_HC_continue() instead") LZ4LIB_API int LZ4_compressHC2_limitedOutput_continue (void* LZ4HC_Data, const char* source, char* dest, int inputSize, int maxOutputSize, int compressionLevel); +LZ4_DEPRECATED("use LZ4_createStreamHC() instead") LZ4LIB_API int LZ4_sizeofStreamStateHC(void); +LZ4_DEPRECATED("use LZ4_resetStreamHC() instead") LZ4LIB_API int LZ4_resetStreamStateHC(void* state, char* inputBuffer); #if defined (__cplusplus) @@ -225,18 +233,23 @@ LZ4LIB_API LZ4_DEPRECATED("use LZ4_resetStreamHC() instead") int LZ4_resetStr #endif /* LZ4_HC_H_19834876238432 */ -/*-************************************************ + +/*-************************************************** * !!!!! STATIC LINKING ONLY !!!!! * Following definitions are considered experimental. * They should not be linked from DLL, * as there is no guarantee of API stability yet. * Prototypes will be promoted to "stable" status - * after successful usage in real-life scenarios. - *************************************************/ + * after successfull usage in real-life scenarios. + ***************************************************/ #ifdef LZ4_HC_STATIC_LINKING_ONLY /* protection macro */ #ifndef LZ4_HC_SLO_098092834 #define LZ4_HC_SLO_098092834 +#if defined (__cplusplus) +extern "C" { +#endif + /*! LZ4_compress_HC_destSize() : v1.8.0 (experimental) * Will try to compress as much data from `src` as possible * that can fit into `targetDstSize` budget. @@ -246,9 +259,9 @@ LZ4LIB_API LZ4_DEPRECATED("use LZ4_resetStreamHC() instead") int LZ4_resetStr * `srcSizePtr` : value will be updated to indicate how much bytes were read from `src` */ int LZ4_compress_HC_destSize(void* LZ4HC_Data, - const char* src, char* dst, - int* srcSizePtr, int targetDstSize, - int compressionLevel); + const char* src, char* dst, + int* srcSizePtr, int targetDstSize, + int compressionLevel); /*! LZ4_compress_HC_continue_destSize() : v1.8.0 (experimental) * Similar as LZ4_compress_HC_continue(), @@ -258,21 +271,85 @@ int LZ4_compress_HC_destSize(void* LZ4HC_Data, * @return : the number of bytes written into 'dst' * or 0 if compression fails. * `srcSizePtr` : value will be updated to indicate how much bytes were read from `src`. - * Important : due to limitations, this prototype only works well up to cLevel < LZ4HC_CLEVEL_OPT_MIN - * beyond that level, compression performance will be much reduced due to internal incompatibilities */ int LZ4_compress_HC_continue_destSize(LZ4_streamHC_t* LZ4_streamHCPtr, const char* src, char* dst, int* srcSizePtr, int targetDstSize); /*! LZ4_setCompressionLevel() : v1.8.0 (experimental) - * It's possible to change compression level after LZ4_resetStreamHC(), between 2 invocations of LZ4_compress_HC_continue*(), - * but that requires to stay in the same mode (aka 1-10 or 11-12). - * This function ensures this condition. + * It's possible to change compression level between 2 invocations of LZ4_compress_HC_continue*() */ void LZ4_setCompressionLevel(LZ4_streamHC_t* LZ4_streamHCPtr, int compressionLevel); +/*! LZ4_favorDecompressionSpeed() : v1.8.2 (experimental) + * Parser will select decisions favoring decompression over compression ratio. + * Only work at highest compression settings (level >= LZ4HC_CLEVEL_OPT_MIN) + */ +void LZ4_favorDecompressionSpeed(LZ4_streamHC_t* LZ4_streamHCPtr, int favor); + +/*! LZ4_resetStreamHC_fast() : + * When an LZ4_streamHC_t is known to be in a internally coherent state, + * it can often be prepared for a new compression with almost no work, only + * sometimes falling back to the full, expensive reset that is always required + * when the stream is in an indeterminate state (i.e., the reset performed by + * LZ4_resetStreamHC()). + * + * LZ4_streamHCs are guaranteed to be in a valid state when: + * - returned from LZ4_createStreamHC() + * - reset by LZ4_resetStreamHC() + * - memset(stream, 0, sizeof(LZ4_streamHC_t)) + * - the stream was in a valid state and was reset by LZ4_resetStreamHC_fast() + * - the stream was in a valid state and was then used in any compression call + * that returned success + * - the stream was in an indeterminate state and was used in a compression + * call that fully reset the state (LZ4_compress_HC_extStateHC()) and that + * returned success + */ +void LZ4_resetStreamHC_fast(LZ4_streamHC_t* LZ4_streamHCPtr, int compressionLevel); + +/*! LZ4_compress_HC_extStateHC_fastReset() : + * A variant of LZ4_compress_HC_extStateHC(). + * + * Using this variant avoids an expensive initialization step. It is only safe + * to call if the state buffer is known to be correctly initialized already + * (see above comment on LZ4_resetStreamHC_fast() for a definition of + * "correctly initialized"). From a high level, the difference is that this + * function initializes the provided state with a call to + * LZ4_resetStreamHC_fast() while LZ4_compress_HC_extStateHC() starts with a + * call to LZ4_resetStreamHC(). + */ +int LZ4_compress_HC_extStateHC_fastReset (void* state, const char* src, char* dst, int srcSize, int dstCapacity, int compressionLevel); + +/*! LZ4_attach_HC_dictionary() : + * This is an experimental API that allows for the efficient use of a + * static dictionary many times. + * + * Rather than re-loading the dictionary buffer into a working context before + * each compression, or copying a pre-loaded dictionary's LZ4_streamHC_t into a + * working LZ4_streamHC_t, this function introduces a no-copy setup mechanism, + * in which the working stream references the dictionary stream in-place. + * + * Several assumptions are made about the state of the dictionary stream. + * Currently, only streams which have been prepared by LZ4_loadDictHC() should + * be expected to work. + * + * Alternatively, the provided dictionary stream pointer may be NULL, in which + * case any existing dictionary stream is unset. + * + * A dictionary should only be attached to a stream without any history (i.e., + * a stream that has just been reset). + * + * The dictionary will remain attached to the working stream only for the + * current stream session. Calls to LZ4_resetStreamHC(_fast) will remove the + * dictionary context association from the working stream. The dictionary + * stream (and source buffer) must remain in-place / accessible / unchanged + * through the lifetime of the stream session. + */ +LZ4LIB_API void LZ4_attach_HC_dictionary(LZ4_streamHC_t *working_stream, const LZ4_streamHC_t *dictionary_stream); +#if defined (__cplusplus) +} +#endif #endif /* LZ4_HC_SLO_098092834 */ #endif /* LZ4_HC_STATIC_LINKING_ONLY */ diff --git a/src/third-party/lz4/lz4opt.h b/src/third-party/lz4/lz4opt.h deleted file mode 100644 index 584dc97f934..00000000000 --- a/src/third-party/lz4/lz4opt.h +++ /dev/null @@ -1,366 +0,0 @@ -/* - lz4opt.h - Optimal Mode of LZ4 - Copyright (C) 2015-2017, Przemyslaw Skibinski - Note : this file is intended to be included within lz4hc.c - - BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following disclaimer - in the documentation and/or other materials provided with the - distribution. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - You can contact the author at : - - LZ4 source repository : https://github.com/lz4/lz4 - - LZ4 public forum : https://groups.google.com/forum/#!forum/lz4c -*/ - -#define LZ4_OPT_NUM (1<<12) - - -typedef struct { - int off; - int len; -} LZ4HC_match_t; - -typedef struct { - int price; - int off; - int mlen; - int litlen; -} LZ4HC_optimal_t; - - -/* price in bytes */ -LZ4_FORCE_INLINE size_t LZ4HC_literalsPrice(size_t litlen) -{ - size_t price = litlen; - if (litlen >= (size_t)RUN_MASK) - price += 1 + (litlen-RUN_MASK)/255; - return price; -} - - -/* requires mlen >= MINMATCH */ -LZ4_FORCE_INLINE size_t LZ4HC_sequencePrice(size_t litlen, size_t mlen) -{ - size_t price = 2 + 1; /* 16-bit offset + token */ - - price += LZ4HC_literalsPrice(litlen); - - if (mlen >= (size_t)(ML_MASK+MINMATCH)) - price+= 1 + (mlen-(ML_MASK+MINMATCH))/255; - - return price; -} - - -/*-************************************* -* Binary Tree search -***************************************/ -LZ4_FORCE_INLINE int LZ4HC_BinTree_InsertAndGetAllMatches ( - LZ4HC_CCtx_internal* ctx, - const BYTE* const ip, - const BYTE* const iHighLimit, - size_t best_mlen, - LZ4HC_match_t* matches, - int* matchNum) -{ - U16* const chainTable = ctx->chainTable; - U32* const HashTable = ctx->hashTable; - const BYTE* const base = ctx->base; - const U32 dictLimit = ctx->dictLimit; - const U32 current = (U32)(ip - base); - const U32 lowLimit = (ctx->lowLimit + MAX_DISTANCE > current) ? ctx->lowLimit : current - (MAX_DISTANCE - 1); - const BYTE* const dictBase = ctx->dictBase; - const BYTE* match; - int nbAttempts = ctx->searchNum; - int mnum = 0; - U16 *ptr0, *ptr1, delta0, delta1; - U32 matchIndex; - size_t matchLength = 0; - U32* HashPos; - - if (ip + MINMATCH > iHighLimit) return 1; - - /* HC4 match finder */ - HashPos = &HashTable[LZ4HC_hashPtr(ip)]; - matchIndex = *HashPos; - *HashPos = current; - - ptr0 = &DELTANEXTMAXD(current*2+1); - ptr1 = &DELTANEXTMAXD(current*2); - delta0 = delta1 = (U16)(current - matchIndex); - - while ((matchIndex < current) && (matchIndex>=lowLimit) && (nbAttempts)) { - nbAttempts--; - if (matchIndex >= dictLimit) { - match = base + matchIndex; - matchLength = LZ4_count(ip, match, iHighLimit); - } else { - const BYTE* vLimit = ip + (dictLimit - matchIndex); - match = dictBase + matchIndex; - if (vLimit > iHighLimit) vLimit = iHighLimit; - matchLength = LZ4_count(ip, match, vLimit); - if ((ip+matchLength == vLimit) && (vLimit < iHighLimit)) - matchLength += LZ4_count(ip+matchLength, base+dictLimit, iHighLimit); - if (matchIndex+matchLength >= dictLimit) - match = base + matchIndex; /* to prepare for next usage of match[matchLength] */ - } - - if (matchLength > best_mlen) { - best_mlen = matchLength; - if (matches) { - if (matchIndex >= dictLimit) - matches[mnum].off = (int)(ip - match); - else - matches[mnum].off = (int)(ip - (base + matchIndex)); /* virtual matchpos */ - matches[mnum].len = (int)matchLength; - mnum++; - } - if (best_mlen > LZ4_OPT_NUM) break; - } - - if (ip+matchLength >= iHighLimit) /* equal : no way to know if inf or sup */ - break; /* drop , to guarantee consistency ; miss a bit of compression, but other solutions can corrupt the tree */ - - DEBUGLOG(6, "ip :%016llX", (U64)ip); - DEBUGLOG(6, "match:%016llX", (U64)match); - if (*(ip+matchLength) < *(match+matchLength)) { - *ptr0 = delta0; - ptr0 = &DELTANEXTMAXD(matchIndex*2); - if (*ptr0 == (U16)-1) break; - delta0 = *ptr0; - delta1 += delta0; - matchIndex -= delta0; - } else { - *ptr1 = delta1; - ptr1 = &DELTANEXTMAXD(matchIndex*2+1); - if (*ptr1 == (U16)-1) break; - delta1 = *ptr1; - delta0 += delta1; - matchIndex -= delta1; - } - } - - *ptr0 = (U16)-1; - *ptr1 = (U16)-1; - if (matchNum) *matchNum = mnum; - /* if (best_mlen > 8) return best_mlen-8; */ - if (!matchNum) return 1; - return 1; -} - - -LZ4_FORCE_INLINE void LZ4HC_updateBinTree(LZ4HC_CCtx_internal* ctx, const BYTE* const ip, const BYTE* const iHighLimit) -{ - const BYTE* const base = ctx->base; - const U32 target = (U32)(ip - base); - U32 idx = ctx->nextToUpdate; - while(idx < target) - idx += LZ4HC_BinTree_InsertAndGetAllMatches(ctx, base+idx, iHighLimit, 8, NULL, NULL); -} - - -/** Tree updater, providing best match */ -LZ4_FORCE_INLINE int LZ4HC_BinTree_GetAllMatches ( - LZ4HC_CCtx_internal* ctx, - const BYTE* const ip, const BYTE* const iHighLimit, - size_t best_mlen, LZ4HC_match_t* matches, const int fullUpdate) -{ - int mnum = 0; - if (ip < ctx->base + ctx->nextToUpdate) return 0; /* skipped area */ - if (fullUpdate) LZ4HC_updateBinTree(ctx, ip, iHighLimit); - best_mlen = LZ4HC_BinTree_InsertAndGetAllMatches(ctx, ip, iHighLimit, best_mlen, matches, &mnum); - ctx->nextToUpdate = (U32)(ip - ctx->base + best_mlen); - return mnum; -} - - -#define SET_PRICE(pos, ml, offset, ll, cost) \ -{ \ - while (last_pos < pos) { opt[last_pos+1].price = 1<<30; last_pos++; } \ - opt[pos].mlen = (int)ml; \ - opt[pos].off = (int)offset; \ - opt[pos].litlen = (int)ll; \ - opt[pos].price = (int)cost; \ -} - - -static int LZ4HC_compress_optimal ( - LZ4HC_CCtx_internal* ctx, - const char* const source, - char* dest, - int inputSize, - int maxOutputSize, - limitedOutput_directive limit, - size_t sufficient_len, - const int fullUpdate - ) -{ - LZ4HC_optimal_t opt[LZ4_OPT_NUM + 1]; /* this uses a bit too much stack memory to my taste ... */ - LZ4HC_match_t matches[LZ4_OPT_NUM + 1]; - - const BYTE* ip = (const BYTE*) source; - const BYTE* anchor = ip; - const BYTE* const iend = ip + inputSize; - const BYTE* const mflimit = iend - MFLIMIT; - const BYTE* const matchlimit = (iend - LASTLITERALS); - BYTE* op = (BYTE*) dest; - BYTE* const oend = op + maxOutputSize; - - /* init */ - DEBUGLOG(5, "LZ4HC_compress_optimal"); - if (sufficient_len >= LZ4_OPT_NUM) sufficient_len = LZ4_OPT_NUM-1; - ctx->end += inputSize; - ip++; - - /* Main Loop */ - while (ip < mflimit) { - size_t const llen = ip - anchor; - size_t last_pos = 0; - size_t match_num, cur, best_mlen, best_off; - memset(opt, 0, sizeof(LZ4HC_optimal_t)); /* memset only the first one */ - - match_num = LZ4HC_BinTree_GetAllMatches(ctx, ip, matchlimit, MINMATCH-1, matches, fullUpdate); - if (!match_num) { ip++; continue; } - - if ((size_t)matches[match_num-1].len > sufficient_len) { - /* good enough solution : immediate encoding */ - best_mlen = matches[match_num-1].len; - best_off = matches[match_num-1].off; - cur = 0; - last_pos = 1; - goto encode; - } - - /* set prices using matches at position = 0 */ - { size_t matchNb; - for (matchNb = 0; matchNb < match_num; matchNb++) { - size_t mlen = (matchNb>0) ? (size_t)matches[matchNb-1].len+1 : MINMATCH; - best_mlen = matches[matchNb].len; /* necessarily < sufficient_len < LZ4_OPT_NUM */ - for ( ; mlen <= best_mlen ; mlen++) { - size_t const cost = LZ4HC_sequencePrice(llen, mlen) - LZ4HC_literalsPrice(llen); - SET_PRICE(mlen, mlen, matches[matchNb].off, 0, cost); /* updates last_pos and opt[pos] */ - } } } - - if (last_pos < MINMATCH) { ip++; continue; } /* note : on clang at least, this test improves performance */ - - /* check further positions */ - opt[0].mlen = opt[1].mlen = 1; - for (cur = 1; cur <= last_pos; cur++) { - const BYTE* const curPtr = ip + cur; - - /* establish baseline price if cur is literal */ - { size_t price, litlen; - if (opt[cur-1].mlen == 1) { - /* no match at previous position */ - litlen = opt[cur-1].litlen + 1; - if (cur > litlen) { - price = opt[cur - litlen].price + LZ4HC_literalsPrice(litlen); - } else { - price = LZ4HC_literalsPrice(llen + litlen) - LZ4HC_literalsPrice(llen); - } - } else { - litlen = 1; - price = opt[cur - 1].price + LZ4HC_literalsPrice(1); - } - - if (price < (size_t)opt[cur].price) - SET_PRICE(cur, 1 /*mlen*/, 0 /*off*/, litlen, price); /* note : increases last_pos */ - } - - if (cur == last_pos || curPtr >= mflimit) break; - - match_num = LZ4HC_BinTree_GetAllMatches(ctx, curPtr, matchlimit, MINMATCH-1, matches, fullUpdate); - if ((match_num > 0) && (size_t)matches[match_num-1].len > sufficient_len) { - /* immediate encoding */ - best_mlen = matches[match_num-1].len; - best_off = matches[match_num-1].off; - last_pos = cur + 1; - goto encode; - } - - /* set prices using matches at position = cur */ - { size_t matchNb; - for (matchNb = 0; matchNb < match_num; matchNb++) { - size_t ml = (matchNb>0) ? (size_t)matches[matchNb-1].len+1 : MINMATCH; - best_mlen = (cur + matches[matchNb].len < LZ4_OPT_NUM) ? - (size_t)matches[matchNb].len : LZ4_OPT_NUM - cur; - - for ( ; ml <= best_mlen ; ml++) { - size_t ll, price; - if (opt[cur].mlen == 1) { - ll = opt[cur].litlen; - if (cur > ll) - price = opt[cur - ll].price + LZ4HC_sequencePrice(ll, ml); - else - price = LZ4HC_sequencePrice(llen + ll, ml) - LZ4HC_literalsPrice(llen); - } else { - ll = 0; - price = opt[cur].price + LZ4HC_sequencePrice(0, ml); - } - - if (cur + ml > last_pos || price < (size_t)opt[cur + ml].price) { - SET_PRICE(cur + ml, ml, matches[matchNb].off, ll, price); - } } } } - } /* for (cur = 1; cur <= last_pos; cur++) */ - - best_mlen = opt[last_pos].mlen; - best_off = opt[last_pos].off; - cur = last_pos - best_mlen; - -encode: /* cur, last_pos, best_mlen, best_off must be set */ - opt[0].mlen = 1; - while (1) { /* from end to beginning */ - size_t const ml = opt[cur].mlen; - int const offset = opt[cur].off; - opt[cur].mlen = (int)best_mlen; - opt[cur].off = (int)best_off; - best_mlen = ml; - best_off = offset; - if (ml > cur) break; /* can this happen ? */ - cur -= ml; - } - - /* encode all recorded sequences */ - cur = 0; - while (cur < last_pos) { - int const ml = opt[cur].mlen; - int const offset = opt[cur].off; - if (ml == 1) { ip++; cur++; continue; } - cur += ml; - if ( LZ4HC_encodeSequence(&ip, &op, &anchor, ml, ip - offset, limit, oend) ) return 0; - } - } /* while (ip < mflimit) */ - - /* Encode Last Literals */ - { int lastRun = (int)(iend - anchor); - if ((limit) && (((char*)op - dest) + lastRun + 1 + ((lastRun+255-RUN_MASK)/255) > (U32)maxOutputSize)) return 0; /* Check output limit */ - if (lastRun>=(int)RUN_MASK) { *op++=(RUN_MASK< 254 ; lastRun-=255) *op++ = 255; *op++ = (BYTE) lastRun; } - else *op++ = (BYTE)(lastRun< List.fold_left func init l + | Lookup {mapping_arr; _} -> Array.fold_left func init mapping_arr + + let make_lookup mapping_arr = Lookup { + mapping_arr; + last_ind = -1; + } + + let addition_list holder = + match holder with + | Addition l -> l + | _ -> raise (MappingHolderException "Cannot get list from frozen holder") +end + +let freeze_for_lookup map = + match map.mappings with + | Addition l -> { map with mappings = MappingHolder.make_lookup (Array.of_list l) } + | _ -> map + let sources_set map = - List.fold_left (fun acc mapping -> + MappingHolder.fold_left (fun acc mapping -> match mapping with | { original = Some { source; _ }; _ } -> SSet.add source acc | { original = None; _ } -> acc @@ -56,49 +87,112 @@ let sources_set map = let sources map = SSet.elements (sources_set map) -(* Searches for `needle` in `arr`. If `needle` doesn't exist, returns the closest lower bound. *) +(* Searches for `needle` and the index it's found at in `arr`. If `needle` doesn't exist, + returns the closest lower bound. *) let rec binary_search ~cmp needle arr l u = let len = Array.length arr in if len = 0 then None - else if l >= len then Some arr.(len - 1) - else if u < l then Some arr.(l) + else if l >= len then Some (arr.(len - 1), len - 1) + else if u < l then Some (arr.(l), l) else let i = (l + u) / 2 in let k = cmp needle arr.(i) in - if k = 0 then Some arr.(i) + if k = 0 then Some (arr.(i), i) else if k < 0 then binary_search ~cmp needle arr l (i - 1) else binary_search ~cmp needle arr (i + 1) u -let find_original map generated = - let mappings = Array.of_list map.mappings in - let len = Array.length mappings in +let original_from_found = function + | Some ({ original; _ },_) -> original + | None -> None + +let find_original_unknown_arr mappings_arr generated = + let len = Array.length mappings_arr in let cmp = fun { line = a_line; col = a_col } { generated_loc = { line = b_line; col = b_col }; _ } -> let k = b_line - a_line in if k = 0 then b_col - a_col else k in - match binary_search ~cmp generated mappings 0 (len - 1) with - | Some { original; _ } -> original - | None -> None + binary_search ~cmp generated mappings_arr 0 (len - 1) + +(* Try to inform local search based on last found index, else binary search *) +let find_original_from_context c generated = + let len = Array.length c.mapping_arr in + if len = 0 then None else + let line_for_index ind = c.mapping_arr.(ind).generated_loc.line in + let column_for_index ind = c.mapping_arr.(ind).generated_loc.col in + let try_local_search = + if c.last_ind < 0 then None else + let recent_line = line_for_index c.last_ind in + if generated.line <> recent_line then None else + let look_distance = 5 in + (* Left and right can be deceiving. The array is sorted descending. *) + let rec look_right cur_ind looks_remaining = + if cur_ind = (len - 1) || + (line_for_index (cur_ind + 1)) <> recent_line + then + Some (c.mapping_arr.(cur_ind), cur_ind) + else if (column_for_index (cur_ind + 1)) <= generated.col + then + Some (c.mapping_arr.(cur_ind + 1), cur_ind + 1) + else if looks_remaining = 0 then None + else look_right (cur_ind + 1) (looks_remaining - 1) in + let rec look_left cur_ind looks_remaining = + if cur_ind = 0 || + (line_for_index (cur_ind - 1)) <> recent_line || + (column_for_index (cur_ind - 1)) > generated.col + then + Some (c.mapping_arr.(cur_ind), cur_ind) + else if looks_remaining = 0 then None + else look_left (cur_ind - 1) (looks_remaining - 1) in + let cur_col = column_for_index c.last_ind in + if generated.col < cur_col then + look_right c.last_ind look_distance + else + look_left c.last_ind look_distance + in + let findings = + match try_local_search with + | Some _ -> try_local_search + | _ -> find_original_unknown_arr c.mapping_arr generated + in + let () = match findings with + | Some (_, ind) -> c.last_ind <- ind + | None -> () + in + original_from_found findings + + +let find_original map generated = + match map.mappings with + | Addition l -> + let found = find_original_unknown_arr (Array.of_list l) generated in + original_from_found found + | Lookup c -> find_original_from_context c generated (* for each mapping in `map`, update to the `original` info corresponding to that loc in map2 *) let compose map map2 = - let mappings, names = List.fold_left (fun (mappings, names) mapping -> - let mapping, names = match mapping.original with - | Some { original_loc; _ } -> - begin match find_original map2 original_loc with - | Some ({ name; _ } as original) -> - let mapping = { mapping with original = Some original } in - let names = match name with Some name -> SSet.add name names | None -> names in - mapping, names - | None -> mapping, names - end - | None -> mapping, names - in - mapping::mappings, names - ) ([], map.names) map.mappings in - { map with mappings = List.rev mappings; names } + let mappings, names, sources_contents = + List.fold_left (fun (mappings, names, sources_contents) mapping -> + match mapping.original with + | Some { original_loc; _ } -> + begin match find_original map2 original_loc with + | Some ({ name; source; _ } as original) -> + let mapping = { mapping with original = Some original } in + let names = match name with Some name -> SSet.add name names | None -> names in + let sources_contents = + match SMap.find_opt source map2.sources_contents with + | Some content -> SMap.add source content sources_contents + | _ -> sources_contents + in + mapping::mappings, names, sources_contents + | None -> mappings, names, sources_contents + end + | None -> mappings, names, sources_contents + ) ([], SSet.empty, SMap.empty) (MappingHolder.addition_list map.mappings) in + { map with mappings = Addition (List.rev mappings); names; sources_contents; + source_root = map2.source_root } + let add_mapping ~original ~generated map = let names = match original.name with @@ -109,8 +203,8 @@ let add_mapping ~original ~generated map = original = Some original; generated_loc = generated; } in - let mappings = mapping::map.mappings in - { map with mappings; names } + let mappings = mapping::(MappingHolder.addition_list map.mappings) in + { map with mappings = Addition mappings; names } let add_source_content ~source ~content map = let sources_contents = SMap.add source content map.sources_contents in @@ -204,7 +298,7 @@ let string_of_mappings map = end in (buf, { state with prev_mapping = Some mapping }) - ) (buf, state) (List.rev map.mappings) in + ) (buf, state) (List.rev (MappingHolder.addition_list map.mappings)) in Buffer.contents buf let mappings_of_stream = @@ -291,6 +385,7 @@ let sources_contents map = let version _map = "3" let names map = SSet.elements map.names let source_root map = map.source_root +let file map = map.file module type Json_writer_intf = sig type t @@ -390,7 +485,7 @@ module Make_json_reader (Json : Json_reader_intf) : (Json_reader with type json file; source_root; names = SSet.of_list names; - mappings; + mappings = Addition mappings; sources_contents = match sources_contents with Some x -> x | None -> SMap.empty; } end diff --git a/src/third-party/ocaml-sourcemaps/src/sourcemap.mli b/src/third-party/ocaml-sourcemaps/src/sourcemap.mli index e8f6e3b600b..a2376ce6b25 100644 --- a/src/third-party/ocaml-sourcemaps/src/sourcemap.mli +++ b/src/third-party/ocaml-sourcemaps/src/sourcemap.mli @@ -18,6 +18,7 @@ and line_col = { val create: ?file:string -> ?source_root:string -> unit -> t +val freeze_for_lookup: t -> t val find_original: t -> line_col -> original option val compose: t -> t -> t @@ -27,6 +28,7 @@ val add_mapping: original:original -> generated:line_col -> t -> t val add_source_content: source:string -> content:string -> t -> t val version: t -> string +val file: t -> string option val string_of_mappings: t -> string val names: t -> string list val source_root: t -> string option diff --git a/src/third-party/ocaml-sourcemaps/test/compose_test.ml b/src/third-party/ocaml-sourcemaps/test/compose_test.ml index 60318b91db3..bdcedc2fe6c 100644 --- a/src/third-party/ocaml-sourcemaps/test/compose_test.ml +++ b/src/third-party/ocaml-sourcemaps/test/compose_test.ml @@ -9,30 +9,90 @@ open OUnit2 (* open Sourcemaps *) open Test_utils -let bar = Sourcemap.({ - source = "bar.js"; - original_loc = { line = 10; col = 5 }; - name = None; +(** + * Imagine we have three versions of the same file from a transformation pipeline. + * /v2_files/v2.js was generated from /v1_files/v1.js and produced a map: map_1_2 + * /v3_files/v3.js was generated from /v2_files/v2.js and produced a map: map_2_3 + * We want to reconstruct map_1_3 by composing the two maps. + *) + +let v1_source = "a\nb" +let v2_source = "// Preamble\nA B" +let v3_source = "a\n\nb" + +let v1_a = Sourcemap.({ + source = "v1.js"; + original_loc = { line = 1; col = 0 }; + name = Some "a"; +}) + +let v1_b = Sourcemap.({ + source = "v1.js"; + original_loc = { line = 2; col = 0 }; + name = Some "b"; }) -let foo = Sourcemap.({ - source = "foo.js"; - original_loc = { line = 3; col = 1 }; - name = None; + +let v2_a = Sourcemap.({ + source = "v2.js"; + original_loc = { line = 2; col = 0 }; + name = Some "A"; +}) + +let v2_b = Sourcemap.({ + source = "v2.js"; + original_loc = { line = 1; col = 2 }; + name = Some "B"; +}) + +let v3_a = Sourcemap.({ + source = "v3.js"; + original_loc = { line = 1; col = 0 }; + name = Some "a"; }) -let map = - Sourcemap.create () - |> Sourcemap.add_mapping ~original:foo ~generated:{ Sourcemap.line = 1; col = 1 } -let map2 = - Sourcemap.create () - |> Sourcemap.add_mapping ~original:bar ~generated:{ Sourcemap.line = 3; col = 1 } + +let v3_b = Sourcemap.({ + source = "v3.js"; + original_loc = { line = 3; col = 0 }; + name = Some "b"; +}) + +let map_1_2 = + Sourcemap.create () ~file:"v2.js" ~source_root:"/v1_files/" + |> Sourcemap.add_mapping ~original:v1_b ~generated:v2_b.Sourcemap.original_loc + |> Sourcemap.add_mapping ~original:v1_a ~generated:v2_a.Sourcemap.original_loc + |> Sourcemap.add_source_content ~source:"v1.js" ~content:v1_source + +let map_2_3 = + Sourcemap.create () ~file:"v3.js" ~source_root:"/v2_files/" + |> Sourcemap.add_mapping ~original:v2_a ~generated:v3_a.Sourcemap.original_loc + |> Sourcemap.add_mapping ~original:v2_b ~generated:v3_b.Sourcemap.original_loc + |> Sourcemap.add_source_content ~source:"v2.js" ~content:v2_source let tests = "compose" >::: [ "basic" >:: begin fun ctxt -> let expected = - Sourcemap.create () - |> Sourcemap.add_mapping ~original:bar ~generated:{ Sourcemap.line = 1; col = 1 } + Sourcemap.create () ~file:"v3.js" ~source_root:"/v1_files/" + |> Sourcemap.add_mapping ~original:v1_a ~generated:v3_a.Sourcemap.original_loc + |> Sourcemap.add_mapping ~original:v1_b ~generated:v3_b.Sourcemap.original_loc + |> Sourcemap.add_source_content ~source:"v1.js" ~content:v1_source + in + let map_1_3 = Sourcemap.compose map_2_3 map_1_2 in + assert_equal_sourcemaps ~ctxt expected map_1_3 + end; + "empty_map_2_3" >:: begin fun ctxt -> + let expected = + Sourcemap.create () ~file:"v3.js" ~source_root:"/v1_files/" + in + let map_2_3 = Sourcemap.create () ~file:"v3.js" ~source_root:"/v2_files/" in + let map_1_3 = Sourcemap.compose map_2_3 map_1_2 in + assert_equal_sourcemaps ~ctxt expected map_1_3 + end; + "empty_map_1_2" >:: begin fun ctxt -> + let expected = + Sourcemap.create () ~file:"v3.js" ~source_root:"/v1_files/" in - let map3 = Sourcemap.compose map map2 in - assert_equal_sourcemaps ~ctxt expected map3 + let map_1_2 = Sourcemap.create () ~file:"v2.js" ~source_root:"/v1_files/" in + let map_1_3 = Sourcemap.compose map_2_3 map_1_2 in + assert_equal_sourcemaps ~ctxt expected map_1_3 end; ] diff --git a/src/third-party/ocaml-sourcemaps/test/original_loc_frozen_test.ml b/src/third-party/ocaml-sourcemaps/test/original_loc_frozen_test.ml new file mode 100644 index 00000000000..25f0fc49652 --- /dev/null +++ b/src/third-party/ocaml-sourcemaps/test/original_loc_frozen_test.ml @@ -0,0 +1,73 @@ +(** + * Copyright (c) 2018-present, Facebook, Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 +(* open Sourcemaps *) + +let bar = Sourcemap.({ + source = "bar.js"; + original_loc = { line = 1; col = 1 }; + name = None; +}) +let foo = Sourcemap.({ + source = "foo.js"; + original_loc = { line = 1; col = 1 }; + name = None; +}) +let map = + Sourcemap.create () + |> Sourcemap.add_mapping ~original:bar ~generated:{ Sourcemap.line = 3; col = 1 } + |> Sourcemap.add_mapping ~original:foo ~generated:{ Sourcemap.line = 3; col = 5 } + |> Sourcemap.freeze_for_lookup + +let print_option f = function + | Some x -> "Some " ^ (f x) + | None -> "None" + +let print_original { Sourcemap.source; original_loc = { Sourcemap.line; col }; name } = + let name = print_option (fun x -> x) name in + Printf.sprintf + "{ source = %S; original_loc = { line = %d; col = %d }; name = %s }" + source line col name + +let tests = "original_loc_frozen" >::: [ + "matches_start" >:: begin fun ctxt -> + let expected = Some bar in + let actual = Sourcemap.find_original map { Sourcemap.line = 3; col = 1 } in + assert_equal ~ctxt ~printer:(print_option print_original) expected actual; + + let expected = Some foo in + let actual = Sourcemap.find_original map { Sourcemap.line = 3; col = 5 } in + assert_equal ~ctxt ~printer:(print_option print_original) expected actual; + end; + + "midpoint" >:: begin fun ctxt -> + let expected = Some bar in + let actual = Sourcemap.find_original map { Sourcemap.line = 3; col = 3 } in + assert_equal ~ctxt ~printer:(print_option print_original) expected actual; + end; + + "before_start" >:: begin fun ctxt -> + let expected = Some bar in + let actual = Sourcemap.find_original map { Sourcemap.line = 1; col = 2 } in + assert_equal ~ctxt ~printer:(print_option print_original) expected actual; + end; + + "past_end" >:: begin fun ctxt -> + let expected = Some foo in + let actual = Sourcemap.find_original map { Sourcemap.line = 3; col = 8 } in + assert_equal ~ctxt ~printer:(print_option print_original) expected actual; + end; + + "empty" >:: begin fun ctxt -> + let expected = None in + let map = Sourcemap.freeze_for_lookup (Sourcemap.create ()) in + let actual = Sourcemap.find_original map { Sourcemap.line = 3; col = 3 } in + + assert_equal ~ctxt ~printer:(print_option print_original) expected actual; + end; +] diff --git a/src/third-party/ocaml-sourcemaps/test/test.ml b/src/third-party/ocaml-sourcemaps/test/test.ml index 5a31b40fd14..eba68e4096c 100644 --- a/src/third-party/ocaml-sourcemaps/test/test.ml +++ b/src/third-party/ocaml-sourcemaps/test/test.ml @@ -11,6 +11,7 @@ let tests = "sourcemaps" >::: [ Compose_test.tests; Json_test.tests; Original_loc_test.tests; + Original_loc_frozen_test.tests; ] let () = run_test_tt_main tests diff --git a/src/third-party/ocaml-sourcemaps/test/test_utils.ml b/src/third-party/ocaml-sourcemaps/test/test_utils.ml index 53540e5d804..9bcc9a5a8f0 100644 --- a/src/third-party/ocaml-sourcemaps/test/test_utils.ml +++ b/src/third-party/ocaml-sourcemaps/test/test_utils.ml @@ -8,6 +8,10 @@ open OUnit2 (* open Sourcemaps *) +let opt_printer x = + match x with + | Some x -> x + | None -> "" let assert_equal_sourcemaps ~ctxt expected actual = assert_equal ~ctxt ~msg:"Versions not equal" (Sourcemap.version expected) (Sourcemap.version actual); @@ -16,9 +20,12 @@ let assert_equal_sourcemaps ~ctxt expected actual = assert_equal ~ctxt ~msg:"Source root not equal" (Sourcemap.source_root expected) (Sourcemap.source_root actual); assert_equal ~ctxt ~msg:"Names not equal" - (Sourcemap.names expected) (Sourcemap.names actual); + (Sourcemap.names expected) (Sourcemap.names actual) ~printer:(String.concat "; "); assert_equal ~ctxt ~msg:"Mappings not equal" ~printer:(fun x -> x) (Sourcemap.string_of_mappings expected) (Sourcemap.string_of_mappings actual); assert_equal ~ctxt ~msg:"Source content not equal" (Sourcemap.sources_contents expected) (Sourcemap.sources_contents actual); - assert_equal ~ctxt expected actual + assert_equal ~ctxt ~msg:"File not equal" + (Sourcemap.file expected) (Sourcemap.file actual) ~printer:opt_printer; + assert_equal ~ctxt ~msg:"Source root not equal" + (Sourcemap.source_root expected) (Sourcemap.source_root actual) ~printer:opt_printer diff --git a/src/third-party/ocaml-vlq/src/dune b/src/third-party/ocaml-vlq/src/dune new file mode 100644 index 00000000000..7b2282ee90e --- /dev/null +++ b/src/third-party/ocaml-vlq/src/dune @@ -0,0 +1,3 @@ +(library + (name flow_third_party_vlq) + (wrapped false)) diff --git a/src/typing/__tests__/signature_generator_test.ml b/src/typing/__tests__/signature_generator_test.ml new file mode 100644 index 00000000000..c40a2641cc9 --- /dev/null +++ b/src/typing/__tests__/signature_generator_test.ml @@ -0,0 +1,785 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +module Translate = + Estree_translator.Translate + (Json_of_estree) + (struct + (* TODO: make these configurable via CLI flags *) + let include_interned_comments = false + + let include_comments = true + + let include_locs = true + end) + +let pretty_print program = + Source.contents + @@ Pretty_printer.print ~source_maps:None ~skip_endline:true + @@ Js_layout_generator.program_simple program + +let print_ast program = Hh_json.json_to_string ~pretty:true @@ Translate.program None program + +let verify_and_generate + ?prevent_munge ?facebook_fbt ?ignore_static_propTypes ?facebook_keyMirror contents = + let contents = String.concat "\n" contents in + let program = Signature_verifier_test.parse contents in + let signature = + match Signature_builder.program ~module_ref_prefix:None program with + | Ok signature -> signature + | Error _ -> failwith "Signature builder failure!" + in + Signature_builder.Signature.verify_and_generate + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + signature + program + +let mk_signature_generator_test + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + contents + expected_msgs + ctxt = + let msgs = + let (_errors, program) = + verify_and_generate + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + contents + in + String.split_on_char '\n' @@ pretty_print program + in + let printer v = "\n" ^ String.concat "\n" v in + assert_equal + ~ctxt + ~cmp:(Signature_verifier_test.eq printer) + ~printer + ~msg:"Results don't match!" + expected_msgs + msgs + +let mk_generated_signature_file_sig_test + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + contents + expected_msgs + ctxt = + let msgs = + let (_errors, program) = + verify_and_generate + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + contents + in + match File_sig.With_Loc.program ~ast:program ~module_ref_prefix:None with + | Ok fs -> File_sig.With_Loc.to_string fs |> String.split_on_char '\n' + | Error _ -> [] + in + let printer v = "\n" ^ String.concat "\n" v in + assert_equal + ~ctxt + ~cmp:(Signature_verifier_test.eq printer) + ~printer + ~msg:"Results don't match!" + expected_msgs + msgs + +let mk_verified_signature_generator_test + ?prevent_munge ?facebook_fbt ?ignore_static_propTypes ?facebook_keyMirror contents ctxt = + let msgs = + let (_errors, _program) = + verify_and_generate + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + contents + in + [] + in + let printer v = String.concat "\n" v in + assert_equal + ~ctxt + ~cmp:(Signature_verifier_test.eq printer) + ~printer + ~msg:"Results don't match!" + [] + msgs + +let verified_signature_generator_tests = + List.fold_left + (fun acc + ( (prevent_munge, facebook_fbt, ignore_static_propTypes, facebook_keyMirror, name), + contents, + error_msgs, + _other_msgs ) -> + if error_msgs = [] then + let name = "verified_" ^ name in + ( name + >:: mk_verified_signature_generator_test + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + contents ) + :: acc + else + acc) + [] + Signature_verifier_test.tests_data + +let generated_signature_file_sig_tests = + [ + "multiple_bindings_destructured_require" + >:: mk_generated_signature_file_sig_test + [ + "const {"; + " foo: foo2,"; + " bar: { barX },"; + "} = require('./something');"; + "module.exports = { foo2, barX }"; + ] + [ + "{"; + " module_sig: {"; + " requires: ["; + " Require (./something, Some (BindNamed: bar));"; + " Require (./something, Some (BindNamed: foo));"; + " ];"; + " module_kind: CommonJS;"; + " type_exports_named: {"; + " };"; + " type_exports_star: ["; + " ];"; + " };"; + "}"; + ]; + ] + +let tests = + "signature_generator" + >::: [ + "dead_type" >:: mk_signature_generator_test ["type U = number"] []; + "dead_declare_type" >:: mk_signature_generator_test ["declare type U = number"] []; + "dead_types_transitive" + >:: mk_signature_generator_test ["type U = number"; "declare type T = U"] []; + "export_type_alias" + >:: mk_signature_generator_test + ["type U = number"; "export type T = U"] + ["type U = number;"; "type T = U;"; "export type {T};"]; + (* TODO: change of spaces *) + "export_type_specifier" + >:: mk_signature_generator_test + ["type U = number"; "export type { U }"] + ["type U = number;"; "export type {U};"]; + "export_type_specifier_local" + >:: mk_signature_generator_test + ["type U = number"; "export type { U as U2 }"] + ["type U = number;"; "export type {U as U2};"]; + "export_type_specifier_remote" + >:: mk_signature_generator_test + ["export type { K } from './foo'"] + ["export type {K} from \"./foo\";"]; + (* TODO: change of quotes *) + "export_type_specifier_remote_local1" + >:: mk_signature_generator_test + ["export type { K as K2 } from './foo'"] + ["export type {K as K2} from \"./foo\";"]; + "export_type_specifier_remote_local2" + >:: mk_signature_generator_test + ["type K = number"; "export type { K as K2 } from './foo'"] + ["export type {K as K2} from \"./foo\";"]; + "export_type_specifier_remote_local3" + >:: mk_signature_generator_test + ["export type K = number"; "export type { K as K2 } from './foo'"] + ["type K = number;"; "export type {K};"; "export type {K as K2} from \"./foo\";"]; + "export_type_batch" + >:: mk_signature_generator_test + ["export type * from './foo'"] + ["export type * from \"./foo\";"]; + "dead_var" >:: mk_signature_generator_test ["var x: number = 0"] []; + "dead_declare_var" >:: mk_signature_generator_test ["declare var x: number"] []; + "dead_transitive" >:: mk_signature_generator_test ["class C { }"; "var x: C = new C"] []; + "module_exports_function_expression" + >:: mk_signature_generator_test + ["module.exports = function() { }"] + ["declare module.exports: () => void;"]; + "module_exports_literal" + >:: mk_signature_generator_test + ["module.exports = 'hello'"] + ["declare module.exports: $TEMPORARY$string<'hello'>;"]; + "module_exports_object" + >:: mk_signature_generator_test + ["module.exports = { x: 'hello' }"] + ["declare module.exports: $TEMPORARY$object<{|x: $TEMPORARY$string<'hello'>|}>;"]; + "module_exports_object_with_spread" + >:: mk_signature_generator_test + ["const y = { y: 'world'};"; "module.exports = { x: 'hello', ...y }"] + [ + "declare var y: $TEMPORARY$object<{|y: $TEMPORARY$string<'world'>|}>;"; + "declare module.exports: $TEMPORARY$object<"; + " {|x: $TEMPORARY$string<'hello'>, ...typeof y|},"; + ">;"; + ]; + "module_exports_array_one" + >:: mk_signature_generator_test + ["module.exports = ['hello']"] + ["declare module.exports: $TEMPORARY$array<$TEMPORARY$string<'hello'>>;"]; + "module_exports_array_many" + >:: mk_signature_generator_test + ["module.exports = ['hello', 42]"] + [ + "declare module.exports: $TEMPORARY$array<"; + " $TEMPORARY$string<'hello'> | $TEMPORARY$number<42>,"; + ">;"; + ]; + "module_exports_class_expression" + >:: mk_signature_generator_test + ["module.exports = class { m(x: number): number { return x; } }"] + ["declare class $1 {m(x: number): number}"; "declare module.exports: typeof $1;"]; + (* outlining *) + "module_exports_named_class_expression" + >:: mk_signature_generator_test + ["module.exports = class C { m(x: C): C { return x; } }"] + ["declare class C {m(x: C): C}"; "declare module.exports: typeof C;"]; + (* outlining *) + "module_exports_require" + >:: mk_signature_generator_test + ["module.exports = require('./foo')"] + ["const $1 = require(\"./foo\");"; "declare module.exports: typeof $1;"]; + (* outlining *) + "module_exports_import" + >:: mk_signature_generator_test + ["module.exports = import('./foo')"] + ["import * as $1 from \"./foo\";"; "declare module.exports: typeof $1;"]; + (* outlining *) + "module_exports_bindings" + >:: mk_signature_generator_test + [ + "function foo() { }"; + "class C { }"; + "const x: number = 0"; + "const o = { p: x };"; + "module.exports = { foo, C, x, p: o.p }"; + ] + [ + "declare function foo(): void;"; + "declare class C {}"; + "declare var x: number;"; + "declare var o: $TEMPORARY$object<{|p: typeof x|}>;"; + "declare module.exports: $TEMPORARY$object<"; + " {|foo: typeof foo, C: typeof C, x: typeof x, p: typeof o.p|},"; + ">;"; + ]; + "declare_module_exports" + >:: mk_signature_generator_test + ["declare module.exports: () => void"] + ["declare module.exports: () => void;"]; + "export_default_expression" + >:: mk_signature_generator_test + ["export default function(x: number): number { return x; }"] + ["declare export default (x: number) => number;"]; + "declare_export_default_type" + >:: mk_signature_generator_test + ["declare export default (number) => number"] + ["declare export default (number) => number;"]; + "export_default_function_declaration" + >:: mk_signature_generator_test + ["export default function foo(): void { }"] + ["declare function foo(): void;"; "export {foo as default};"]; + "export_default_class_declaration" + >:: mk_signature_generator_test + ["export default class C { x: number = 0; }"] + ["declare class C {x: number}"; "export {C as default};"]; + "export_default_class_declaration_with_private_fields" + >:: mk_signature_generator_test + ["export default class C { #x: number = 0; }"] + ["declare class C {}"; "export {C as default};"]; + "declare_export_default_function_declaration" + >:: mk_signature_generator_test + ["declare export default function foo(): void;"] + ["declare function foo(): void;"; "export {foo as default};"]; + "declare_export_default_class_declaration" + >:: mk_signature_generator_test + ["declare export default class C { x: number; }"] + ["declare class C {x: number}"; "export {C as default};"]; + "export_function_declaration" + >:: mk_signature_generator_test + ["export function foo(): void { }"] + ["declare function foo(): void;"; "export {foo};"]; + "export_class_declaration" + >:: mk_signature_generator_test + ["export class C { x: number = 0; }"] + ["declare class C {x: number}"; "export {C};"]; + "declare_export_function_declaration" + >:: mk_signature_generator_test + ["declare export function foo(): void;"] + ["declare function foo(): void;"; "export {foo};"]; + "declare_export_class_declaration" + >:: mk_signature_generator_test + ["declare export class C { x: number; }"] + ["declare class C {x: number}"; "export {C};"]; + "export_specifier" + >:: mk_signature_generator_test + ["var x: number = 0"; "export { x }"] + ["declare var x: number;"; "export {x};"]; + "export_specifier_local" + >:: mk_signature_generator_test + ["var x: number = 0"; "export { x as x2 }"] + ["declare var x: number;"; "export {x as x2};"]; + "export_specifier_remote" + >:: mk_signature_generator_test + ["export { k } from './foo'"] + ["export {k} from \"./foo\";"]; + "export_specifier_remote_local1" + >:: mk_signature_generator_test + ["export { k as k2 } from './foo'"] + ["export {k as k2} from \"./foo\";"]; + "export_specifier_remote_local2" + >:: mk_signature_generator_test + ["function k() { }"; "export { k as k2 } from './foo'"] + ["export {k as k2} from \"./foo\";"]; + "export_specifier_remote_local3" + >:: mk_signature_generator_test + ["export function k() { }"; "export { k as k2 } from './foo'"] + ["declare function k(): void;"; "export {k};"; "export {k as k2} from \"./foo\";"]; + "export_batch" + >:: mk_signature_generator_test ["export * from './foo'"] ["export * from \"./foo\";"]; + "export_batch_local" + >:: mk_signature_generator_test + ["export * as Foo from './foo'"] + ["export * as Foo from \"./foo\";"]; + "import_default" + >:: mk_signature_generator_test + ["import C from './foo'"; "declare module.exports: C"] + ["import C from \"./foo\";"; "declare module.exports: C;"]; + "import_specifier" + >:: mk_signature_generator_test + ["import { C } from './foo'"; "declare module.exports: C"] + ["import {C} from \"./foo\";"; "declare module.exports: C;"]; + "import_specifier_local" + >:: mk_signature_generator_test + ["import { C as C2 } from './foo'"; "declare module.exports: C2"] + ["import {C as C2} from \"./foo\";"; "declare module.exports: C2;"]; + "import_specifier_local_dead" + >:: mk_signature_generator_test + ["import { C as C2 } from './foo'"; "declare module.exports: C"] + ["declare module.exports: C;"]; + "import_batch" + >:: mk_signature_generator_test + ["import * as Foo from './foo'"; "declare module.exports: Foo.C"] + ["import * as Foo from \"./foo\";"; "declare module.exports: Foo.C;"]; + "import_type_default" + >:: mk_signature_generator_test + ["import type C from './foo'"; "declare module.exports: C"] + ["import type C from \"./foo\";"; "declare module.exports: C;"]; + "import_type_specifier" + >:: mk_signature_generator_test + ["import type { T } from './foo'"; "declare module.exports: T"] + ["import type {T} from \"./foo\";"; "declare module.exports: T;"]; + "import_type_specifier2" + >:: mk_signature_generator_test + ["import { type T } from './foo'"; "declare module.exports: T"] + [ + "import type {T} from \"./foo\";"; + (* TODO: change of specifier kind *) + "declare module.exports: T;"; + ]; + "import_type_specifier_local" + >:: mk_signature_generator_test + ["import type { T as T2 } from './foo'"; "declare module.exports: T2"] + ["import type {T as T2} from \"./foo\";"; "declare module.exports: T2;"]; + "import_type_specifier_local2" + >:: mk_signature_generator_test + ["import { type T as T2 } from './foo'"; "declare module.exports: T2"] + ["import type {T as T2} from \"./foo\";"; "declare module.exports: T2;"]; + "import_type_specifier_local_dead" + >:: mk_signature_generator_test + ["import type { T as T2 } from './foo'"; "declare module.exports: T"] + ["declare module.exports: T;"]; + "import_typeof_specifier" + >:: mk_signature_generator_test + ["import { typeof x as T2 } from './foo'"; "declare module.exports: T2"] + ["import typeof {x as T2} from \"./foo\";"; "declare module.exports: T2;"]; + "import_dynamic" >:: mk_signature_generator_test ["import './foo'"] []; + "require" + >:: mk_signature_generator_test + ["const Foo = require('./foo')"; "declare module.exports: Foo.C"] + ["const Foo = require(\"./foo\");"; "declare module.exports: Foo.C;"]; + "require_destructured" + >:: mk_signature_generator_test + ["const { C } = require('./foo')"; "declare module.exports: C"] + ["const {C} = require(\"./foo\");"; "declare module.exports: C;"]; + "require_destructured_local" + >:: mk_signature_generator_test + ["const { C: C2 } = require('./foo')"; "declare module.exports: C2"] + ["const {C: C2} = require(\"./foo\");"; "declare module.exports: C2;"]; + "require_destructured_deep" + >:: mk_signature_generator_test + [ + "const { C: C2, D: { E: E2 } } = require('./foo')"; + "declare module.exports: [ C2, E2 ]"; + ] + [ + "const {C: C2} = require(\"./foo\");"; + "const {D: {E: E2}} = require(\"./foo\");"; + "declare module.exports: [C2, E2];"; + ]; + "require_destructured_local_dead" + >:: mk_signature_generator_test + ["const { C: C2 } = require('./foo')"; "declare module.exports: C"] + ["declare module.exports: C;"]; + "composite" + >:: mk_signature_generator_test + [ + "export type T = number"; + "type U = T"; + (* reachable *) + "import { type V } from './foo'"; + (* dead *) + "type W = [U, V]"; + (* dead *) + "function foo() { return [0, 0]; }"; + (* dead *) + "class B { +x: T = 0; m() { (foo(): W); } }"; + (* reachable, but as declaration *) + "export interface A { +x: U; }"; + "module.exports = function(x: B): A { return x; }"; + ] + [ + "type T = number;"; + "type U = T;"; + ""; + (* TODO: pretty printing adds newlines for dead stuff *) + "declare class B {+x: T, m(): void}"; + "interface A {+x: U}"; + "export type {T};"; + ""; + "export type {A};"; + "declare module.exports: (x: B) => A;"; + ]; + "class_statics" + >:: mk_signature_generator_test + ["export class C {"; " static x: number = 0;"; " static foo(): void { }"; "}"] + ["declare class C {static x: number, static foo(): void}"; "export {C};"]; + "class_statics2" + >:: mk_signature_generator_test + ["export class C {"; " foo: () => void;"; " static foo(): void { }"; "}"] + ["declare class C {foo: () => void, static foo(): void}"; "export {C};"]; + "class_implements" + >:: mk_signature_generator_test + [ + "interface I {"; + " foo(x?: string): void;"; + "}"; + "export class C implements I {"; + " foo(x?: string): void { }"; + "}"; + ] + [ + "interface I {foo(x?: string): void}"; + "declare class C implements I {foo(x?: string): void}"; + "export {C};"; + ]; + "class_extends_error" + >:: mk_signature_generator_test + ["export class C extends (undefined: any) { }"] + ["declare class C extends $TEMPORARY$Super$FlowFixMe {}"; "export {C};"]; + "function_overloading" + >:: mk_signature_generator_test + [ + "declare function foo(x: T): void;"; + "declare function foo(x: T): void;"; + "export function foo(x: T): void { }"; + ] + [ + "declare function foo(x: T): void;"; + "declare function foo(x: T): void;"; + "declare function foo(x: T): void;"; + "export {foo};"; + ]; + "function_overloading2" + >:: mk_signature_generator_test + [ + "declare export function foo(x?: null, y?: null): void;"; + "declare export function foo(x: null, y?: null): void;"; + ] + [ + "declare function foo(x?: null, y?: null): void;"; + "declare function foo(x: null, y?: null): void;"; + "export {foo};"; + ]; + "opaque_type" + >:: mk_signature_generator_test + [ + "declare export opaque type T1"; + "declare export opaque type T2: number"; + "opaque type T3 = number"; + (* dead *) + "export opaque type T4: number = T3"; + "opaque type T5 = number"; + "export opaque type T6: T5 = number"; + "export opaque type T7 = number;"; + ] + [ + "opaque type T1;"; + "opaque type T2: number;"; + "opaque type T3 = number;"; + "opaque type T4: number = T3;"; + "opaque type T5 = number;"; + "opaque type T6: T5 = number;"; + "opaque type T7 = number;"; + "export type {T1};"; + "export type {T2};"; + ""; + "export type {T4};"; + ""; + "export type {T6};"; + "export type {T7};"; + ]; + "import_then_destructure" + >:: mk_signature_generator_test + ["import Foo from 'foo';"; "const { Bar } = Foo;"; "module.exports = Bar;"] + [ + "import Foo from \"foo\";"; + "declare var Bar: typeof Foo.Bar;"; + "declare module.exports: typeof Bar;"; + ]; + "import_then_destructure2" + >:: mk_signature_generator_test + ["import Foo from 'foo';"; "const { Foo: Bar } = { Foo };"; "module.exports = Bar;"] + [ + "import Foo from \"foo\";"; + "declare var Bar: typeof $1.Foo;"; + "declare var $1: $TEMPORARY$object<{|Foo: typeof Foo|}>;"; + "declare module.exports: typeof Bar;"; + ]; + "optional_param" + >:: mk_signature_generator_test + ["module.exports = function(x?: number) { }"] + ["declare module.exports: (x?: number) => void;"]; + "optional_param_default" + >:: mk_signature_generator_test + ["module.exports = function(x: number = 0) { }"] + ["declare module.exports: (x?: number) => void;"]; + "optional_destructured_param_default" + >:: mk_signature_generator_test + ["module.exports = function({ x }: { x: number } = { x: 0 }) { }"] + ["declare module.exports: (_?: {x: number}) => void;"]; + "array_summary_number" + >:: mk_signature_generator_test + ["module.exports = [1, 2, 3]"] + [ + "declare module.exports: $TEMPORARY$array<"; + " $TEMPORARY$number<1> | $TEMPORARY$number<2> | $TEMPORARY$number<3>,"; + ">;"; + ]; + "array_summary_array" + >:: mk_signature_generator_test + ["module.exports = [[1, 2], [3]]"] + [ + "declare module.exports: $TEMPORARY$array<"; + " "; + " | $TEMPORARY$array<$TEMPORARY$number<1> | $TEMPORARY$number<2>>"; + " | $TEMPORARY$array<$TEMPORARY$number<3>>,"; + ">;"; + ]; + "array_summary_object" + >:: mk_signature_generator_test + ["module.exports = [{ x: 1 }, { x: 2 }]"] + [ + "declare module.exports: $TEMPORARY$array<"; + " "; + " | $TEMPORARY$object<{|x: $TEMPORARY$number<1>|}>"; + " | $TEMPORARY$object<{|x: $TEMPORARY$number<2>|}>,"; + ">;"; + ]; + "array_summary_object_array" + >:: mk_signature_generator_test + ["module.exports = [{ x: [1, 2] }, { x: [3] }]"] + [ + "declare module.exports: $TEMPORARY$array<"; + " "; + " | $TEMPORARY$object<"; + " {|x: $TEMPORARY$array<$TEMPORARY$number<1> | $TEMPORARY$number<2>>|},"; + " >"; + " | $TEMPORARY$object<{|x: $TEMPORARY$array<$TEMPORARY$number<3>>|}>,"; + ">;"; + ]; + "frozen_object" + >:: mk_signature_generator_test + ["module.exports = Object.freeze({ foo: 42, bar: 'hello' })"] + [ + "declare module.exports: $TEMPORARY$Object$freeze<"; + " {|foo: $TEMPORARY$number<42>, bar: $TEMPORARY$string<'hello'>|},"; + ">;"; + ]; + "fbt_empty_open_close" + >:: mk_signature_generator_test + ~facebook_fbt:(Some "FbtElement") + ["module.exports = "] + ["declare module.exports: FbtElement;"]; + "fbt_empty_open" + >:: mk_signature_generator_test + ~facebook_fbt:(Some "FbtElement") + ["module.exports = "] + ["declare module.exports: FbtElement;"]; + "fbt_with_child" + >:: mk_signature_generator_test + ~facebook_fbt:(Some "FbtElement") + ["function foo(){}"; "module.exports = "] + ["declare module.exports: FbtElement;"]; + "keyMirror" + >:: mk_signature_generator_test + ~facebook_keyMirror:true + ["module.exports = keyMirror({"; " a: null,"; " b: null,"; "})"] + ["declare module.exports: $TEMPORARY$object<{|a: 'a', b: 'b'|}>;"]; + "unusual_cjs_exports1" + >:: mk_signature_generator_test + ["exports.wut = 'dead';"; "module.exports = { x: 42 };"] + ["declare module.exports: $TEMPORARY$object<{|x: $TEMPORARY$number<42>|}>;"]; + "unusual_cjs_exports2" + >:: mk_signature_generator_test + ["module.exports = { x: 42 };"; "module.exports.wut = 'wut';"] + [ + "declare module.exports: $TEMPORARY$module$exports$assign<"; + " $TEMPORARY$object<{|x: $TEMPORARY$number<42>|}>,"; + " {wut: $TEMPORARY$string<'wut'>, ...},"; + ">;"; + ]; + "unusual_cjs_exports3" + >:: mk_signature_generator_test + [ + "module.exports = { x: 0xdead };"; + "module.exports.wut = 'dead';"; + "module.exports = { x: 42 };"; + "module.exports.wut = 'wut';"; + ] + [ + "declare module.exports: $TEMPORARY$module$exports$assign<"; + " $TEMPORARY$object<{|x: $TEMPORARY$number<42>|}>,"; + " {wut: $TEMPORARY$string<'wut'>, ...},"; + ">;"; + ]; + "function_statics" + >:: mk_signature_generator_test + [ + "function bar(): void { };"; + "const x = 42;"; + "bar.x = x;"; + "module.exports = bar;"; + ] + [ + "declare var bar: $TEMPORARY$function<() => void, {x: typeof x, ...}>;"; + "declare var x: $TEMPORARY$number<42>;"; + ""; + "declare module.exports: typeof bar;"; + ]; + "function_predicates1" + >:: mk_signature_generator_test + [ + "function foo(str: ?string): boolean %checks {"; + "return str == null || str === '';"; + "}"; + "module.exports = foo;"; + ] + [ + "declare function foo(str: ?string): boolean %checks(str == null || str === \"\");"; + "declare module.exports: typeof foo;"; + ]; + "function_predicates2" + >:: mk_signature_generator_test + [ + "declare function foo(str: ?string): boolean %checks(str == null || str === '');"; + "module.exports = foo;"; + ] + [ + "declare function foo(str: ?string): boolean %checks(str == null || str === \"\");"; + "declare module.exports: typeof foo;"; + ]; + "function_predicates2" + >:: mk_signature_generator_test + [ + "function foo1(x: ?string): boolean %checks { return x == null || x === ''; };"; + "function foo2(x: ?string): boolean %checks { return foo1(x); }"; + "module.exports = foo2;"; + ] + [ + "declare function foo1(x: ?string): boolean %checks(x == null || x === \"\");"; + "declare function foo2(x: ?string): boolean %checks(foo1(x));"; + "declare module.exports: typeof foo2;"; + ]; + "function_predicates3" + >:: mk_signature_generator_test + [ + "class A {};"; + "function foo(x: mixed): boolean %checks { return x instanceof A; };"; + "module.exports = foo;"; + ] + [ + "declare class A {}"; + "declare function foo(x: mixed): boolean %checks(x instanceof A);"; + "declare module.exports: typeof foo;"; + ]; + "function_predicates4" + >:: mk_signature_generator_test + [ + "function foo(x: mixed): boolean %checks { return typeof x === \"number\"; };"; + "const obj = { foo };"; + "function bar(x: mixed): boolean %checks { return obj.foo(x); };"; + "module.exports = bar;"; + ] + [ + "declare function foo(x: mixed): boolean %checks(typeof x === \"number\");"; + "declare var obj: $TEMPORARY$object<{|foo: typeof foo|}>;"; + "declare function bar(x: mixed): boolean %checks(obj.foo(x));"; + "declare module.exports: typeof bar;"; + ]; + "destructure_annot" + >:: mk_signature_generator_test + ["var { a }: { a: number } = { a: 0 };"; "module.exports = a"] + [ + "declare var a: typeof $1.a;"; + "declare var $1: {a: number};"; + "declare module.exports: typeof a;"; + ]; + "destructure_annot2" + >:: mk_signature_generator_test + ["var { a: x }: { a: number } = { a: 0 };"; "module.exports = x"] + [ + "declare var x: typeof $1.a;"; + "declare var $1: {a: number};"; + "declare module.exports: typeof x;"; + ]; + "async_function_1" + >:: mk_signature_generator_test + ["async function foo() {};"; "module.exports = foo"] + ["declare function foo(): Promise;"; "declare module.exports: typeof foo;"]; + "async_function_2" + >:: mk_signature_generator_test + ["module.exports = async () => {}"] + ["declare module.exports: () => Promise;"]; + "async_method" + >:: mk_signature_generator_test + ["class C { async m() {} };"; "module.exports = C"] + ["declare class C {m(): Promise}"; "declare module.exports: typeof C;"]; + ] + @ verified_signature_generator_tests + @ generated_signature_file_sig_tests diff --git a/src/typing/__tests__/signature_verifier_test.ml b/src/typing/__tests__/signature_verifier_test.ml new file mode 100644 index 00000000000..6d5f4d222f6 --- /dev/null +++ b/src/typing/__tests__/signature_verifier_test.ml @@ -0,0 +1,513 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let parse contents = + let parse_options = + Some + { + Parser_env.default_parse_options with + Parser_env.esproposal_class_instance_fields = true; + Parser_env.esproposal_class_static_fields = true; + Parser_env.esproposal_export_star_as = true; + } + in + let (ast, _errors) = Parser_flow.program ~parse_options contents in + ast + +let eq printer v1 v2 = printer v1 = printer v2 + +let name ?prevent_munge ?facebook_fbt ?ignore_static_propTypes ?facebook_keyMirror x = + (prevent_munge, facebook_fbt, ignore_static_propTypes, facebook_keyMirror, x) + +let tests_data = + [ + (name "export_number_literal", ["export default 0;"], [], []); + ( name "export_function_literal", + ["export default function(x: number): number { return x };"], + [], + [] ); + ( name "export_function_literal_check1", + ["export default function(x): number { return x };"], + ["Expected annotation at array pattern @ (1, 24) to (1, 25)"], + [] ); + ( name "export_function_literal_check2", + ["export default function(x: number) { return x };"], + ["Expected annotation at function return @ (1, 34) to (1, 34)"], + [] ); + ( name "export_function_reference", + ["function foo(x: number): number { return x }"; "export default foo;"], + [], + ["Reachable: foo"] ); + ( name "export_function_reference_check1", + ["function foo(x): number { return x }"; "export default foo;"], + ["Expected annotation at array pattern @ (1, 13) to (1, 14)"], + ["Reachable: foo"] ); + ( name "export_function_reference_check2", + ["function foo(x: number) { return x }"; "export default foo;"], + ["Expected annotation at function return @ (1, 23) to (1, 23)"], + ["Reachable: foo"] ); + (name "export_object_literal_property_literal", ["export default { p: 0 };"], [], []); + ( name "export_object_literal_property_reference", + ["var x: number = 0;"; "export default { p: x };"], + [], + ["Reachable: x"] ); + ( name "export_object_literal_property_reference_check", + ["var x = 0;"; "export default { p: x };"], + ["Expected annotation at declaration of variable `x` @ (1, 4) to (1, 5)"], + ["Reachable: x"] ); + ( name "empty_object_literal", + ["export default { };"], + ["Cannot determine types of initialized properties of an empty object @ (1, 15) to (1, 18)"], + [] ); + ( name "export_class_reference", + [ + "class C {"; + " f: number = 0;"; + " m(x: number): number { return x; }"; + "}"; + "export default C;"; + ], + [], + ["Reachable: C"] ); + ( name "export_class_reference_check1", + ["class C {"; " f = 0;"; " m(x: number): number { return x; }"; "}"; "export default C;"], + ["Expected annotation at property `f` @ (2, 2) to (2, 8)"], + ["Reachable: C"] ); + ( name "export_class_reference_check2", + ["class C {"; " f: number = 0;"; " m(x): number { return x; }"; "}"; "export default C;"], + ["Expected annotation at array pattern @ (3, 4) to (3, 5)"], + ["Reachable: C"] ); + ( name "export_class_reference_check3", + ["class C {"; " f: number = 0;"; " m(x: number) { return x; }"; "}"; "export default C;"], + ["Expected annotation at function return @ (3, 14) to (3, 14)"], + ["Reachable: C"] ); + ( name "type_alias_dependencies", + [ + "type T1 = number;"; + "type T2 = number;"; + "type T3 = number;"; + "class C {"; + " f: T1 = 0;"; + " m(x: T2): T3 { return x; }"; + "}"; + "export default C;"; + ], + [], + ["Reachable: C, T1, T2, T3"] ); + ( name "class_dependencies", + [ + "class D { f: number = 0; }"; + "class C {"; + " f: D = new D;"; + " m(x: D): D { return x; }"; + "}"; + "export default C;"; + ], + [], + ["Reachable: C, D"] ); + ( name "class_dependencies_check", + [ + "class D { f = 0; }"; + "class C {"; + " f: D = new D;"; + " m(x: D): D { return x; }"; + "}"; + "export default C;"; + ], + ["Expected annotation at property `f` @ (1, 10) to (1, 16)"], + ["Reachable: C, D"] ); + ( name "export_new_typecast", + [ + "class D { f: number = 0; }"; + "class C {"; + " f: D = new D;"; + " m(x: D): D { return x; }"; + "}"; + "export default (new C: C);"; + ], + [], + ["Reachable: C, D"] ); + ( name "export_new_typecast_check", + [ + "class D { f = 0; }"; + "class C {"; + " f: D = new D;"; + " m(x: D): D { return x; }"; + "}"; + "export default (new C: C);"; + ], + ["Expected annotation at property `f` @ (1, 10) to (1, 16)"], + ["Reachable: C, D"] ); + ( name "recursive_dependencies", + ["class C {"; " f: C = new C;"; " m(x: C): C { return x; }"; "}"; "export default C;"], + [], + ["Reachable: C"] ); + ( name "recursive_dependencies_check", + ["class C {"; " f = new C;"; " m(x: C): C { return x; }"; "}"; "export default C;"], + ["Expected annotation at property `f` @ (2, 2) to (2, 12)"], + ["Reachable: C"] ); + ( name "typeof_dependencies", + ["var x: number = 0"; "class C {"; " p: typeof x = 0"; "}"; "export default (new C: C);"], + [], + ["Reachable: C, x"] ); + ( name "typeof_dependencies_check", + ["var x = 0"; "class C {"; " p: typeof x = 0"; "}"; "export default (new C: C);"], + ["Expected annotation at declaration of variable `x` @ (1, 4) to (1, 5)"], + ["Reachable: C, x"] ); + (name "const_initializer", ["const x = 0"; "export default { x };"], [], ["Reachable: x"]); + ( name "empty_array_literal", + ["export default [ ];"], + ["Cannot determine the element type of an empty array @ (1, 15) to (1, 18)"], + [] ); + ( name "non_empty_array_literal", + ["const x = 0"; "var y = false"; "export default [ x, y ];"], + ["Expected annotation at declaration of variable `y` @ (2, 4) to (2, 5)"], + ["Reachable: x, y"] ); + (name "void_function", ["function foo() {}"; "export default foo;"], [], ["Reachable: foo"]); + ( name "void_generator", + ["function* foo() { yield 0; }"; "export default foo;"], + ["Expected annotation at function return @ (1, 15) to (1, 15)"], + ["Reachable: foo"] ); + ( name "import_default_dependencies", + [ + "import x from './import_default_dependencies_helper';"; + "class C {"; + " p: typeof x = 0"; + "}"; + "export default (new C: C);"; + ], + [], + ["import { default } from './import_default_dependencies_helper'"; "Reachable: C, x"] ); + ( name "import_type_dependencies", + [ + "import type { T1, T2, T3 } from './import_type_dependencies_helper';"; + "class C {"; + " f: T1 = 0;"; + " m(x: T2): T3 { return x; }"; + "}"; + "export default C;"; + ], + [], + [ + "import type { T1 } from './import_type_dependencies_helper'"; + "import type { T2 } from './import_type_dependencies_helper'"; + "import type { T3 } from './import_type_dependencies_helper'"; + "Reachable: C, T1, T2, T3"; + ] ); + ( name "qualified_references", + [ + "import M1 from './qualified_references_helper';"; + "import type M2 from './qualified_references_helper';"; + "class C {"; + " m(x: M1.T): M2.T { return x; }"; + "}"; + "export default C;"; + ], + [], + [ + "import type { default } from './qualified_references_helper'"; + "import { default } from './qualified_references_helper'"; + "Reachable: C, M1, M2"; + ] ); + ( name "hoisted_requires", + [ + "const M = require('./hoisted_requires_helper');"; + "if (Math.random() < 0.5) {"; + " var { D } = require('./hoisted_requires_helper');"; + "} else {"; + " var { D } = require('./hoisted_requires_helper');"; + "}"; + "var D = 0;"; + "class C extends M.D {"; + " f: D = 0;"; + "}"; + "module.exports = C;"; + ], + ["Expected annotation at declaration of variable `D` @ (7, 4) to (7, 5)"], + [ + "require('./hoisted_requires_helper')"; + "require('./hoisted_requires_helper').D"; + "require('./hoisted_requires_helper').D"; + "Reachable: C, D, M"; + ] ); + ( name "hoisted_locals", + [ + "const M = require('./hoisted_locals_helper');"; + "if (Math.random() < 0.5) {"; + " var D = 0;"; + "} else {"; + " var D = false;"; + "}"; + "class C extends M.D {"; + " f: D = 0;"; + "}"; + "module.exports = C;"; + ], + [ + "Unexpected toplevel definition that needs hoisting @ (3, 2) to (3, 12)"; + "Unexpected toplevel definition that needs hoisting @ (5, 2) to (5, 16)"; + ], + ["require('./hoisted_locals_helper')"; "Reachable: C, D, M"] ); + ( name "dynamic_requires", + ["module.exports = require('./dynamic_requires_helper');"], + [], + ["require('./dynamic_requires_helper')"] ); + ( name "scope_extrusion", + [ + "{"; + " class C {}"; + " var x: C = new C;"; + "}"; + "class C {"; + " f = 0;"; + "}"; + "module.exports = x;"; + ], + ["Unexpected toplevel definition that needs hoisting @ (3, 2) to (3, 19)"], + ["Reachable: x"] ); + ( name "scope_extrusion_nested", + [ + "{"; + " class C {}"; + " let y = 0;"; + " if (b) {"; + " var x: C = new C;"; + " }"; + "}"; + "class C {"; + " f = 0;"; + "}"; + "module.exports = { x, y };"; + ], + ["Unexpected toplevel definition that needs hoisting @ (5, 4) to (5, 21)"], + ["global value: y"; "Reachable: x"] ); + ( name "report_all_errors", + [ + "class A {"; + " f = (x: number) => x; // C"; + "}"; + "module.exports = {"; + " a: A, // A"; + " b: (x: string) => x, // B"; + "};"; + ], + [ + "Expected annotation at property `f` @ (2, 2) to (2, 23)"; + "Expected annotation at function return @ (6, 16) to (6, 16)"; + ], + ["Reachable: A"] ); + ( name "munged_methods_ignored", + ["class C {"; " _method() { return 1; }"; "}"; "export default C;"], + [], + ["Reachable: C"] ); + ( name "munged_methods_not_ignored_if_directive" ~prevent_munge:true, + ["class C {"; " _method() { return 1; }"; "}"; "export default C;"], + ["Expected annotation at function return @ (2, 11) to (2, 11)"], + ["Reachable: C"] ); + ( name "munged_fields_ignored", + ["class C {"; " _method = () => { return 1; }"; "}"; "export default C;"], + [], + ["Reachable: C"] ); + ( name "munged_fields_not_ignored_if_directive" ~prevent_munge:true, + ["class C {"; " _method = () => { return 1; }"; "}"; "export default C;"], + ["Expected annotation at property `_method` @ (2, 2) to (2, 31)"], + ["Reachable: C"] ); + ( name "propTypes_static_ignored" ~ignore_static_propTypes:true, + ["class C {"; " static propTypes = {}"; "}"; "export default C;"], + [], + ["Reachable: C"] ); + ( name "propTypes_member_failure", + ["class C {"; " propTypes = {}"; "}"; "export default C;"], + ["Expected annotation at property `propTypes` @ (2, 2) to (2, 16)"], + ["Reachable: C"] ); + ( name "array_spread", + ["module.exports = [1, ...[2, 3], 4]"], + ["Unexpected array spread @ (1, 21) to (1, 30)"], + [] ); + ( name "array_hole", + ["module.exports = [,]"], + ["Unexpected array hole @ (1, 17) to (1, 20)"], + [] ); + (name "object_spread", ["module.exports = { x: 'x', ...{ y: 'y' }, z: 'z' }"], [], []); + (name "reference_expression1", ["module.exports = Number.NaN"], [], ["global value: Number"]); + ( name "reference_expression2", + ["module.exports = 'x'.length"], + ["Cannot determine the type of this member expression @ (1, 17) to (1, 27)"], + [] ); + (name "arith_expression1", ["module.exports = 6*7"], [], []); + ( name "arith_expression2", + ["module.exports = 6+7"], + ["Cannot determine the type of this binary expression @ (1, 17) to (1, 20)"], + [] ); + (name "named_class_expression", ["module.exports = class C { }"], [], []); + (name "named_function_expression", ["module.exports = function foo() { }"], [], []); + ( name "interface_coverage", + ["declare interface Foo { }"; "declare export class C {"; " foo: Foo;"; "}"], + [], + ["Reachable: C, Foo"] ); + ( name "bound_coverage", + ["type Foo = number"; "export type T = (X) => void"], + [], + ["Reachable: Foo, T"] ); + (name "recursive_class_coverage", ["module.exports = class C { x: C; }"], [], []); + ( name "shadowed_class_expression", + ["class C { }"; "module.exports = class C { }"], + ["Unexpected toplevel definition that needs hoisting @ (2, 23) to (2, 24)"], + [] ); + (name "frozen_object", ["module.exports = Object.freeze({ foo: 42, bar: 'hello' })"], [], []); + ( name "fbt_empty_open_close" ~facebook_fbt:(Some "FbtElement"), + ["module.exports = "], + [], + [] ); + (name "fbt_empty_open" ~facebook_fbt:(Some "FbtElement"), ["module.exports = "], [], []); + ( name "fbt_with_child" ~facebook_fbt:(Some "FbtElement"), + ["function foo(){}"; "module.exports = "], + [], + [] ); + ( name "keymirror" ~facebook_keyMirror:true, + ["module.exports = keyMirror({"; " a: null,"; " b: null,"; "})"], + [], + [] ); + ( name "jsx_div", + ["module.exports =
"], + ["Cannot determine the type of this JSX element @ (1, 17) to (1, 28)"], + [] ); + ( name "function_return", + ["var n = false;"; "export function foo(x: X) { return 1; };"], + [ + "Expected annotation at declaration of variable `n` @ (1, 4) to (1, 5)"; + "Expected annotation at function return @ (2, 38) to (2, 38)"; + ], + ["Reachable: foo, n"] ); + ( name "function_return_2", + ["var n = false;"; "export function bar(x: (typeof n) => void) { return 1; };"], + [ + "Expected annotation at declaration of variable `n` @ (1, 4) to (1, 5)"; + "Expected annotation at function return @ (2, 42) to (2, 42)"; + ], + ["Reachable: bar, n"] ); + ( name "function_statics", + ["function bar(): void { };"; "const x = 42;"; "bar.x = x;"; "module.exports = bar;"], + [], + ["Reachable: bar, x"] ); + ( name "function_predicates_1", + [ + "class A {}"; + "export function foo(x: mixed): boolean %checks {"; + " return x === new A;"; + "}"; + ], + ["Unsupported predicate expression @ (3, 15) to (3, 20)"], + ["Reachable: foo"] ); + ( name "function_predicates_2", + [ + "declare function bar(x: mixed): boolean %checks(x === null);"; + "export function foo(x: mixed): boolean %checks {"; + " return bar(x);"; + "}"; + ], + [], + ["Reachable: bar, foo"] ); + ( name "function_predicates_3", + [ + "function bar(x: mixed): %checks { return x === null; }"; + "declare export function foo(x: mixed): boolean %checks(bar(x));"; + ], + ["Expected annotation at function return @ (1, 31) to (1, 31)"], + ["Reachable: bar, foo"] ); + ( name "function_predicates_4", + [ + "function one() { return 1; }"; + "const n = one()"; + "export function isOne(x: mixed): boolean %checks {"; + " return x === n;"; + "}"; + ], + ["Cannot determine the type of this call expression @ (2, 10) to (2, 15)"], + ["Reachable: isOne, n"] ); + ( name "function_predicates_5", + [ + "const one = 1;"; + "export function isOne(x: mixed): boolean %checks {"; + " return x === one;"; + "}"; + ], + [], + ["Reachable: isOne, one"] ); + ( name "async_function_1", + ["async function foo() {};"; "module.exports = foo;"], + [], + ["Reachable: foo"] ); + ( name "async_function_2", + ["async function foo() { return 1; };"; "module.exports = foo;"], + ["Expected annotation at function return @ (1, 20) to (1, 20)"], + ["Reachable: foo"] ); + ( name "async_function_3", + ["module.exports = async () => await 1;"], + ["Expected annotation at function return @ (1, 25) to (1, 25)"], + [] ); + ] + +let mk_signature_verifier_test + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + contents + expected_msgs + ctxt = + let contents = String.concat "\n" contents in + let signature = + match Signature_builder.program ~module_ref_prefix:None (parse contents) with + | Ok signature -> signature + | Error _ -> failwith "Signature builder failure!" + in + let (errors, remote_dependencies, env) = + Signature_builder.Signature.verify + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + signature + in + let error_msgs = + Core_list.map ~f:Signature_builder_deps.Error.debug_to_string + @@ Signature_builder_deps.PrintableErrorSet.elements errors + in + let remote_dependency_msgs = + Core_list.map ~f:Signature_builder_deps.Dep.to_string + @@ Signature_builder_deps.DepSet.elements remote_dependencies + in + let reachable_msg_opt = + if SMap.is_empty env then + [] + else + [Printf.sprintf "Reachable: %s" @@ String.concat ", " @@ SMap.ordered_keys env] + in + let msgs = error_msgs @ remote_dependency_msgs @ reachable_msg_opt in + let printer = String.concat "; " in + assert_equal ~ctxt ~cmp:(eq printer) ~printer ~msg:"Results don't match!" expected_msgs msgs + +let tests = + "signature_verifier" + >::: Core_list.map + ~f: + (fun ( (prevent_munge, facebook_fbt, ignore_static_propTypes, facebook_keyMirror, name), + contents, + error_msgs, + other_msgs ) -> + name + >:: mk_signature_verifier_test + ?prevent_munge + ?facebook_fbt + ?ignore_static_propTypes + ?facebook_keyMirror + contents + (error_msgs @ other_msgs)) + tests_data diff --git a/src/typing/__tests__/test.ml b/src/typing/__tests__/test.ml deleted file mode 100644 index ff6f64a656e..00000000000 --- a/src/typing/__tests__/test.ml +++ /dev/null @@ -1,14 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open OUnit2 - -let tests = "typing" >::: [ - Typed_ast_test.tests; -] - -let () = run_test_tt_main tests diff --git a/src/typing/__tests__/typed_ast_test.ml b/src/typing/__tests__/typed_ast_test.ml index de7ec62334c..33ed6e09bad 100644 --- a/src/typing/__tests__/typed_ast_test.ml +++ b/src/typing/__tests__/typed_ast_test.ml @@ -1,67 +1,81 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open OUnit2 +module File_sig = File_sig.With_Loc (* pretty much copied from Flow_dot_js *) -let metadata = { Context. - (* local *) - checked = true; - munge_underscores = false; - verbose = None; - weak = false; - jsx = Options.Jsx_react; - strict = false; - strict_local = false; - - (* global *) - max_literal_length = 100; - enable_const_params = false; - enforce_strict_call_arity = true; - esproposal_class_static_fields = Options.ESPROPOSAL_ENABLE; - esproposal_class_instance_fields = Options.ESPROPOSAL_ENABLE; - esproposal_decorators = Options.ESPROPOSAL_ENABLE; - esproposal_export_star_as = Options.ESPROPOSAL_ENABLE; - esproposal_optional_chaining = Options.ESPROPOSAL_ENABLE; - esproposal_nullish_coalescing = Options.ESPROPOSAL_ENABLE; - facebook_fbt = None; - ignore_non_literal_requires = false; - max_trace_depth = 0; - max_workers = 0; - root = Path.dummy_path; - strip_root = true; - suppress_comments = []; - suppress_types = SSet.empty; -} +let metadata = + { + Context.checked (* local *) = true; + munge_underscores = false; + verbose = None; + weak = false; + jsx = Options.Jsx_react; + strict = false; + strict_local = false; + include_suppressions = false; + (* global *) + max_literal_length = 100; + enable_const_params = false; + enable_enums = true; + enforce_strict_call_arity = true; + esproposal_class_static_fields = Options.ESPROPOSAL_ENABLE; + esproposal_class_instance_fields = Options.ESPROPOSAL_ENABLE; + esproposal_decorators = Options.ESPROPOSAL_WARN; + esproposal_export_star_as = Options.ESPROPOSAL_ENABLE; + esproposal_optional_chaining = Options.ESPROPOSAL_ENABLE; + esproposal_nullish_coalescing = Options.ESPROPOSAL_ENABLE; + exact_by_default = false; + facebook_fbs = None; + facebook_fbt = None; + haste_module_ref_prefix = None; + ignore_non_literal_requires = false; + max_trace_depth = 0; + max_workers = 0; + recursion_limit = 10000; + root = Path.dummy_path; + strip_root = true; + suppress_comments = []; + suppress_types = SSet.empty; + default_lib_dir = None; + trust_mode = Options.NoTrust; + type_asserts = false; + } (* somewhat copied from Flow_dot_js *) let parse_content file content = - let parse_options = Some Parser_env.({ - esproposal_class_instance_fields = true; - esproposal_class_static_fields = true; - esproposal_decorators = true; - esproposal_export_star_as = true; - esproposal_optional_chaining = true; - esproposal_nullish_coalescing = true; - types = true; - use_strict = false; - }) in - let ast, parse_errors = + let parse_options = + Some + Parser_env. + { + enums = true; + esproposal_class_instance_fields = true; + esproposal_class_static_fields = true; + esproposal_decorators = true; + esproposal_export_star_as = true; + esproposal_optional_chaining = true; + esproposal_nullish_coalescing = true; + types = true; + use_strict = false; + } + in + let (ast, _parse_errors) = Parser_flow.program_file ~fail:false ~parse_options content (Some file) in - assert_equal parse_errors []; - match File_sig.program ~ast with - | Ok fsig -> ast, fsig - | Error _ -> assert_failure "File_sig.program failed" + match File_sig.program ~ast ~module_ref_prefix:None with + | Ok fsig -> Ok (ast, fsig) + | Error e -> Error e (* copied from Type_inference_js *) (* TODO: consider whether require tvars are necessary, and if not, take this out *) let add_require_tvars = let add cx desc loc = + let loc = ALoc.of_loc loc in let reason = Reason.mk_reason desc loc in let t = Tvar.mk cx reason in Context.add_require cx loc t @@ -71,96 +85,204 @@ let add_require_tvars = module`s (for now). This won't fly forever so at some point we'll need to move `declare module` storage into the modulemap just like normal modules and merge them as such. *) + let loc = ALoc.of_loc loc in let reason = Reason.mk_reason desc loc in let t = Flow_js.get_builtin cx m_name reason in Context.add_require cx loc t in fun cx file_sig -> - let open File_sig in - SMap.iter (fun mref locs -> - let desc = Reason.RCustom mref in - Nel.iter (add cx desc) locs - ) (require_loc_map file_sig.module_sig); - SMap.iter (fun _ (_, module_sig) -> - SMap.iter (fun mref locs -> - let m_name = Reason.internal_module_name mref in - let desc = Reason.RCustom mref in - Nel.iter (add_decl cx m_name desc) locs - ) (require_loc_map module_sig) - ) file_sig.declare_modules - -let lib_before_and_after_stmts file_name = + File_sig.( + SMap.iter + (fun mref locs -> + let desc = Reason.RCustom mref in + Nel.iter (add cx desc) locs) + (require_loc_map file_sig.module_sig); + SMap.iter + (fun _ (_, module_sig) -> + SMap.iter + (fun mref locs -> + let m_name = Reason.internal_module_name mref in + let desc = Reason.RCustom mref in + Nel.iter (add_decl cx m_name desc) locs) + (require_loc_map module_sig)) + file_sig.declare_modules) + +let before_and_after_stmts file_name = let content = Sys_utils.cat file_name in let file_key = File_key.LibFile file_name in - let (_, stmts, _), file_sig = parse_content file_key content in - let cx = - let sig_cx = Context.make_sig () in - Context.make sig_cx metadata file_key Files.lib_module_ref - in - Flow_js.mk_builtins cx; - Flow_js.Cache.clear (); - add_require_tvars cx file_sig; - let module_scope = Scope.fresh () in - Env.init_env cx module_scope; - let t_stmts = - try - Statement.toplevel_decls cx stmts; - Statement.toplevels cx stmts - with - | Abnormal.Exn (Abnormal.Stmts t_stmts, Abnormal.Throw) -> t_stmts - | _ -> assert_failure "constraint generation raised unexpected exception" - in - stmts, t_stmts + match parse_content file_key content with + | Error e -> Error e + | Ok ((_, stmts, _), file_sig) -> + let cx = + let sig_cx = Context.make_sig () in + let aloc_table = Utils_js.FilenameMap.empty in + let rev_table = lazy (ALoc.make_empty_reverse_table ()) in + Context.make + sig_cx + metadata + file_key + aloc_table + rev_table + Files.lib_module_ref + Context.Checking + in + Flow_js.mk_builtins cx; + Flow_js.Cache.clear (); + add_require_tvars cx file_sig; + let module_scope = Scope.fresh () in + Env.init_env cx module_scope; + let stmts = Core_list.map ~f:Ast_loc_utils.loc_to_aloc_mapper#statement stmts in + let t_stmts = + try + Statement.toplevel_decls cx stmts; + Statement.toplevels cx stmts + with + | Abnormal.Exn (Abnormal.Stmts t_stmts, _) -> t_stmts + | Abnormal.Exn (Abnormal.Stmt t_stmt, _) -> [t_stmt] + | Abnormal.Exn (Abnormal.Expr (annot, t_expr), _) -> + [ + ( annot, + Flow_ast.Statement.Expression + { Flow_ast.Statement.Expression.expression = t_expr; directive = None } ); + ] + | e -> + let e = Exception.wrap e in + let message = Exception.get_ctor_string e in + let stack = Exception.get_backtrace_string e in + assert_failure (Utils_js.spf "Exception: %s\nStack:\n%s\n" message stack) + in + Ok (stmts, t_stmts) + +class ['a, 'b] loc_none_mapper = + object + inherit ['a, 'b, Loc.t, Loc.t] Flow_polymorphic_ast_mapper.mapper + method on_loc_annot (_x : 'a) = Loc.none -class ['a] loc_none_mapper = object(_) - inherit [Loc.t, 'a, Loc.t, Loc.t] Flow_polymorphic_ast_mapper.mapper - method on_loc_annot (_x: Loc.t) = Loc.none - method on_type_annot (_x: 'a) = Loc.none -end + method on_type_annot (_x : 'b) = Loc.none + end + +class aloc_mapper = + object + inherit [ALoc.t, ALoc.t * Type.t, ALoc.t, ALoc.t] Flow_polymorphic_ast_mapper.mapper -let generate_stmts_layout stmts = - let none_mapper = new loc_none_mapper in - let prog = Loc.none, List.map none_mapper#statement stmts, [] in - let layout = Js_layout_generator.program ~preserve_docblock:false ~checksum:None prog in - layout |> Pretty_printer.print ~source_maps:None |> Source.contents + method on_loc_annot x = x + + method on_type_annot (x, _) = x + end let diff_dir = - let flowconfig_name = Server_files_js.default_flowconfig_name in let tmp_dir = FlowConfig.temp_dir FlowConfig.empty_config in - let root = CommandUtils.guess_root flowconfig_name (Some "flow/tests") in Random.self_init (); let extension = Printf.sprintf "typed_ast_test_%d" (Random.int 0x3FFFFFFF) in - Server_files_js.file_of_root extension ~flowconfig_name ~tmp_dir root - -let check_structural_equality (stmts1, stmts2) = - let diff_output : int option ref = ref None in - let err : exn option ref = ref None in - begin try - Disk.mkdir_p diff_dir; - let stmts1_file = Path.to_string (Path.concat (Path.make diff_dir) "A.js") in - let oc1 = open_out stmts1_file in - output_string oc1 (generate_stmts_layout stmts1); - close_out oc1; - let stmts2_file = Path.to_string (Path.concat (Path.make diff_dir) "B.js") in - let oc2 = open_out stmts2_file in - output_string oc2 (generate_stmts_layout stmts2); - close_out oc2; - diff_output := Some (Sys.command (Printf.sprintf "diff %s %s" stmts1_file stmts2_file)) - with e -> - err := Some e; - end; - Disk.rm_dir_tree diff_dir; - Option.iter ~f:raise (!err); - begin match !diff_output with - | None -> assert_failure "diff wasn't able to run for some reason" - | Some 0 -> () - | Some _ -> assert_failure "ASTs are different." - end + Filename.concat tmp_dir extension -let test_case file_name _ = - file_name |> lib_before_and_after_stmts |> check_structural_equality +let system_diff ~f prefix = + let dump_stmts filename stmts = + let stmts = f stmts in + let stmts_file = Path.to_string (Path.concat (Path.make diff_dir) filename) in + let oc = open_out stmts_file in + output_string oc stmts; + close_out oc; + stmts_file + in + fun stmts1 stmts2 -> + let result = + try + Disk.mkdir_p diff_dir; + let stmts1_file = dump_stmts (prefix ^ "_A.js") stmts1 in + let stmts2_file = dump_stmts (prefix ^ "_B.js") stmts2 in + let out_file = + prefix ^ "_diff.txt" |> Path.concat (Path.make diff_dir) |> Path.to_string + in + let cmd = Utils_js.spf "diff -U7 %s %s > %s" stmts1_file stmts2_file out_file in + match Sys.command cmd with + | 0 + | 1 -> + let chan = open_in out_file in + let s = Sys_utils.read_all chan in + Utils_js.print_endlinef "READ: %s" s; + close_in chan; + Ok s + | code -> + Utils_js.print_endlinef "diff read error code %d" code; + Error "diff wasn't able to run for some reason" + with e -> + let e = Exception.wrap e in + let msg = Exception.get_ctor_string e in + Error msg + in + Disk.rm_dir_tree diff_dir; + match result with + | Ok diff -> diff + | Error msg -> failwith msg + +let pp_diff = + let aloc_pp fmt x = Loc.pp fmt (ALoc.to_loc_exn x) in + let string_of_ast stmts = + List.map (Flow_ast.Statement.show aloc_pp aloc_pp) stmts |> String.concat "\n" + in + let string_of_src stmts = + let none_mapper = new loc_none_mapper in + let prog = (Loc.none, Core_list.map ~f:none_mapper#statement stmts, []) in + let layout = Js_layout_generator.program ~preserve_docblock:false ~checksum:None prog in + layout |> Pretty_printer.print ~source_maps:None |> Source.contents + in + fun fmt (stmts1, stmts2) -> + let ast_diff = system_diff ~f:string_of_ast "ast" stmts1 stmts2 in + let src_diff = system_diff ~f:string_of_src "src" stmts1 stmts2 in + Format.pp_print_string + fmt + ("\n" ^ "AST tree diff:\n" ^ ast_diff ^ "\n\n" ^ "Source diff:\n" ^ src_diff) + +let check_structural_equality relative_path file_name stmts1 stmts2 = + let aloc_mapper = new aloc_mapper in + let stmts2 = aloc_mapper#toplevel_statement_list stmts2 in + let path = + match Sys_utils.realpath file_name with + | Some path -> path + | None -> relative_path + in + let msg = + path + ^ ":\n" + ^ "The structure of the produced Typed AST differs from that of the parsed AST.\n\n" + ^ "To fix this do one of the following:\n" + ^ " * restore the produced Typed AST, or\n" + ^ " * include \"" + ^ relative_path + ^ "\" in the blacklist section\n" + ^ " in src/typing/__tests__/typed_ast_test.ml and file a task with the\n" + ^ " 'flow-typed-ast' tag.\n" + in + assert_equal ~pp_diff ~msg stmts1 stmts2 + +let test_case relative_path file_name _ = + match before_and_after_stmts file_name with + | Ok (s, s') -> check_structural_equality relative_path file_name s s' + | Error (File_sig.IndeterminateModuleType _) -> () -let tests = "TypedAST" >::: [ - "lib_serviceworkers" >:: test_case "flow/lib/serviceworkers.js" -] +(* This list includes files for which the produced Typed AST differs in structure + * from the parsed AST. *) +let blacklist = SSet.of_list ["invariant_reachability/index.js"] + +let tests = + let relative_test_dir = "flow/tests" in + let root = Option.value_exn (Sys_utils.realpath relative_test_dir) in + let files = CommandUtils.expand_file_list [relative_test_dir] in + let tests = + let slash_regex = Str.regexp_string "/" in + SSet.fold + (fun file acc -> + let relative_path = Files.relative_path root file in + if SSet.mem relative_path blacklist then + acc + else + let test_name = + relative_path |> Str.global_replace slash_regex "_" |> Filename.chop_extension + in + (test_name >:: test_case relative_path (relative_test_dir ^ "/" ^ relative_path)) :: acc) + files + [] + in + "TypedAST" >::: tests diff --git a/src/typing/__tests__/typing_tests.ml b/src/typing/__tests__/typing_tests.ml new file mode 100644 index 00000000000..7f9ec449b70 --- /dev/null +++ b/src/typing/__tests__/typing_tests.ml @@ -0,0 +1,30 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open OUnit2 + +let tests = + "typing" + >::: [Typed_ast_test.tests; Signature_verifier_test.tests; Signature_generator_test.tests] + +let _handle = + let one_gig = 1024 * 1024 * 1024 in + SharedMem_js.( + init + ~num_workers:0 + { + global_size = 0; + heap_size = 5 * one_gig; + dep_table_pow = 0; + hash_table_pow = 19; + shm_dirs = []; + shm_min_avail = one_gig / 2; + log_level = 0; + sample_rate = 0.0; + }) + +let () = run_test_tt_main tests diff --git a/src/typing/abnormal.ml b/src/typing/abnormal.ml index f300e6b7916..3ba533e78d3 100644 --- a/src/typing/abnormal.ml +++ b/src/typing/abnormal.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -25,82 +25,101 @@ let to_string = function | Continue label -> opt_label "Continue" label type payload = - | Stmt of (Loc.t, Loc.t * Type.t) Flow_ast.Statement.t - | Stmts of (Loc.t, Loc.t * Type.t) Flow_ast.Statement.t list + | Expr of ALoc.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.Expression.t + | Stmt of (ALoc.t, ALoc.t * Type.t) Flow_ast.Statement.t + | Stmts of (ALoc.t, ALoc.t * Type.t) Flow_ast.Statement.t list exception Exn of payload * t open Utils_js (* called from traversal. abnormal indicates control flow directive encountered *) -let throw_stmt_control_flow_exception stmt abnormal = - raise (Exn (Stmt stmt, abnormal)) -let throw_stmts_control_flow_exception stmts abnormal = - raise (Exn (Stmts stmts, abnormal)) +let throw_stmt_control_flow_exception stmt abnormal = raise (Exn (Stmt stmt, abnormal)) + +let throw_stmts_control_flow_exception stmts abnormal = raise (Exn (Stmts stmts, abnormal)) + +let throw_expr_control_flow_exception loc expr abnormal = raise (Exn (Expr (loc, expr), abnormal)) (* if argument is Some abnormal, throw it *) let check_stmt_control_flow_exception = function - | stmt, None -> stmt - | stmt, Some abnormal -> throw_stmt_control_flow_exception stmt abnormal + | (stmt, None) -> stmt + | (stmt, Some abnormal) -> throw_stmt_control_flow_exception stmt abnormal + let check_stmts_control_flow_exception = function - | stmts, None -> stmts - | stmts, Some abnormal -> throw_stmts_control_flow_exception stmts abnormal + | (stmts, None) -> stmts + | (stmts, Some abnormal) -> throw_stmts_control_flow_exception stmts abnormal (* helper *) let check_env_depth depth = let new_depth = Env.env_depth () in - if new_depth = depth then () - else assert_false (spf - "env depth %d != %d after no control flow catch" - new_depth depth) + if new_depth = depth then + () + else + assert_false (spf "env depth %d != %d after no control flow catch" new_depth depth) (* catch_stmt_control_flow_exception runs a function which is expected to either - return a statement or raise Exn (Stmt _, _). The function should never raise - Exn (Stmts _, _). + return a statement or raise Exn (Stmt _, _) or Exn (Expr _, _). The function + should never raise Exn (Stmts _, _). If the function raises an + Exn (Expr _, _), an Expression statement will be constructed around the + expression. Similarly, the function passed into catch_stmt_control_flow_exception should return a statement list or raise Exn (Stmts _, _), and never raise - Exn (Stmt _, _). - For both: + Exn (Stmt _, _) or Exn (Expr _, _); and likewise + catch_expr_control_flow_exception should raise Expr and not Stmts or Stmt + For all: If the passed-in function returns an AST, then we return that AST and None. Otherwise, if it raises with some AST payload and an abnormal flow, then we return the payload AST and Some . *) -let - catch_stmt_control_flow_exception, - catch_stmts_control_flow_exception = +let ( catch_stmt_control_flow_exception, + catch_stmts_control_flow_exception, + catch_expr_control_flow_exception ) = let catch_control_flow_exception p f = let depth = Env.env_depth () in - try ( + try let res = f () in check_env_depth depth; - res, None - ) with + (res, None) + with | Exn (payload, abnormal) -> Env.trunc_env depth; - p payload, Some abnormal - | exn -> - raise exn + (p payload, Some abnormal) + | exn -> raise exn in - catch_control_flow_exception (function - | Stmt stmt -> stmt - | Stmts _ -> assert_false "Statement expected"), - catch_control_flow_exception (function - | Stmts stmts -> stmts - | Stmt _ -> assert_false "Statement list expected") + ( catch_control_flow_exception (function + | Stmt stmt -> stmt + (* If we catch an Expr payload, then it was generated by seeing an invariant() call + and we can reconstruct a statement around it (rather than constructing a statement + around it when we create the payload) *) + | Expr (loc, exp) -> + ( loc, + Flow_ast.Statement.Expression + { Flow_ast.Statement.Expression.expression = exp; directive = None } ) + | Stmts _ -> assert_false "Statement expected"), + catch_control_flow_exception (function + | Stmts stmts -> stmts + | Stmt _ + | Expr _ -> + assert_false "Statement list expected"), + catch_control_flow_exception (function + | Expr (_, exp) -> exp + | Stmt _ + | Stmts _ -> + assert_false "Expression expected") ) (* like check_control_flow_exception, except break statements specifying the given label (or None) are ignored *) let ignore_break_to_label label = function - | ast, Some (Break break_label) when break_label = label -> ast, None + | (ast, Some (Break break_label)) when break_label = label -> (ast, None) | result -> result (* like ignore_break_to_label, except continue statements on the same label (or None) are also ignored *) let ignore_break_or_continue_to_label label res = match ignore_break_to_label label res with - | ast, Some (Continue cont_label) when cont_label = label -> ast, None - | result -> result + | (ast, Some (Continue cont_label)) when cont_label = label -> (ast, None) + | result -> result (********************************************************************) @@ -109,32 +128,32 @@ let ignore_break_or_continue_to_label label res = module AbnormalMap : MyMap.S with type key = t = MyMap.Make (struct type abnormal = t + type t = abnormal + let compare = Pervasives.compare end) -let abnormals: Env.t AbnormalMap.t ref = ref AbnormalMap.empty +let abnormals : Env.t AbnormalMap.t ref = ref AbnormalMap.empty (** record the appearance of a control flow directive. associate the given env if passed *) -let save ?(env=[]) abnormal = - abnormals := AbnormalMap.add abnormal env !abnormals +let save ?(env = []) abnormal = abnormals := AbnormalMap.add abnormal env !abnormals (** set or remove a given control flow directive's value, and return the current one *) let swap_saved abnormal value = let old = AbnormalMap.get abnormal !abnormals in - if old <> value then begin - abnormals := match value with + ( if old <> value then + abnormals := + match value with | None -> AbnormalMap.remove abnormal !abnormals - | Some env -> AbnormalMap.add abnormal env !abnormals - end; + | Some env -> AbnormalMap.add abnormal env !abnormals ); old (** remove a given control flow directive's value, and return the current one *) -let clear_saved abnormal = - swap_saved abnormal None +let clear_saved abnormal = swap_saved abnormal None let string = function | Return -> "return" diff --git a/src/typing/assert_ground.ml b/src/typing/assert_ground.ml new file mode 100644 index 00000000000..a9b80d9b2a4 --- /dev/null +++ b/src/typing/assert_ground.ml @@ -0,0 +1,369 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Constraint +open Reason +open Utils_js +open Type +module Marked = Marked.IdMarked +module FlowError = Flow_error + +class type_finder t = + object (_self) + inherit [bool] Type_visitor.t as super + + method! type_ cx pole found = + function + | t' -> t = t' || super#type_ cx pole found t + end + +(* Given a type, report missing annotation errors if + + - the given type is a tvar whose id isn't explicitly specified in the given + skip set, or isn't explicitly marked as derivable, or if + + - the tvar appears in a negative position + + Type variables that are in the skip set are marked in assume_ground as + depending on `require`d modules. Thus, e.g., when the superclass of an + exported class is `require`d, we should not insist on an annotation for the + superclass. +*) +(* need to consider only "def" types *) + +module Kit (Flow : Flow_common.S) : Flow_common.ASSERT_GROUND = struct + include Flow + + class assert_ground_visitor r ~max_reasons ~should_munge_underscores = + object (self) + inherit [Marked.t] Type_visitor.t as super + + (* Track prop maps which correspond to object literals. We don't ask for + annotations for object literals which reach exports. Instead, we walk the + properties covariantly. *) + val mutable objlits : int Properties.Map.t = Properties.Map.empty + + (* Track prop maps which correspond to instance fields and methods, indicating + any fields which are initialized. We don't ask for annotations for (a) + munged property names, which are private and thus not inputs, and (b) + initialized field names. *) + val mutable insts : (int * SSet.t) Properties.Map.t = Properties.Map.empty + + val depth = ref 0 + + val reason_stack = ref (Nel.one r) + + method private push_frame r = + incr depth; + if max_reasons > 0 && Nel.length !reason_stack < max_reasons then ( + reason_stack := Nel.cons r !reason_stack; + true + ) else + false + + method private pop_frame did_add = + decr depth; + if max_reasons > 0 && did_add then + (* We start with a Nel and always add in push_frame, so the tail should always + * be non-empty *) + reason_stack := Nel.of_list_exn (Nel.tl !reason_stack) + + method private with_frame r f = + let did_add = self#push_frame r in + let result = f () in + self#pop_frame did_add; + result + + (* Tvars with reasons that match should not be missing annotation errors. *) + method private skip_reason r = + match desc_of_reason r with + (* No possible annotation for `this` type. *) + | RThis -> true + (* Treat * as an annotation, even though it is inferred, because the + resulting errors are confusing and have unpredictable locations. *-types + are already deprecated, and this wart will go away entirely when we + finally remove support. *) + | RExistential -> true + | _ -> false + + method private derivable_reason r = + match desc_of_reason r with + | RShadowProperty _ -> true + | _ -> is_derivable_reason r + + method! tvar cx pole seen r id = + let (root_id, constraints) = Context.find_constraints cx id in + if id != root_id then + self#tvar cx pole seen r root_id + else if self#skip_reason r then + seen + else + let pole = + if self#derivable_reason r then + Polarity.Positive + else + pole + in + (* TODO: clean up the match pole below. Visiting a tvar with a negative + polarity will add an error and resolve the tvar to any. We don't need + to also walk the positive edge of the tvar. This behavior is a bit + different from what the Marked module provides, but treating negative + as neutral gives the correct behavior. *) + let marked_pole = + match pole with + | Polarity.Negative -> Polarity.Neutral + | _ -> pole + in + match Marked.add id marked_pole seen with + | None -> seen + | Some (pole, seen) -> + (match pole with + | Polarity.Neutral + | Polarity.Negative -> + AnyT.locationless AnyError |> unify_opt cx ~unify_any:true (OpenT (r, id)); + let trace_reasons = + if max_reasons = 0 then + [] + else + Core_list.map + ~f:(fun reason -> repos_reason (def_aloc_of_reason reason) reason) + (Nel.to_list !reason_stack) + in + add_output cx (Error_message.EMissingAnnotation (r, trace_reasons)); + seen + | Polarity.Positive -> + (match constraints with + | FullyResolved _ -> + (* A fully resolved node corresponds to either (a) a tvar imported + from a dependency, which has already gone through assert_ground + or (b) a tvar corresponding to an annotation in this file which + certainly does not contain unresolved tvars. + + In either case, it is not necessary to visit the structure of the + resolved type, as we will not find anything to complain about. *) + seen + | Resolved (_, t) -> self#type_ cx Polarity.Positive seen t + | Unresolved { lower; _ } -> + TypeMap.fold (fun t _ seen -> self#type_ cx Polarity.Positive seen t) lower seen)) + + method! type_ cx pole seen t = + Option.iter + ~f:(fun { Verbose.depth = verbose_depth; indent; enabled_during_flowlib = _ } -> + let pid = Context.pid_prefix cx in + let indent = String.make (!depth * indent) ' ' in + prerr_endlinef + "\n%s%sassert_ground (%s): %s" + indent + pid + (Polarity.string pole) + (Debug_js.dump_t cx ~depth:verbose_depth t)) + (Context.verbose cx); + self#with_frame (reason_of_t t) (fun () -> + let seen = + match t with + | BoundT _ -> seen + | MergedT _ -> + (* The base class implementation will walk uses here, but there's no + reasonable way to complain about missing annotations for MergedT, + which was added to avoid missing annotations. *) + seen + | ReposT (r, _) -> + (* It's possible that we might encounter a substituted this type in a + * negative position. This is normally an error, but might be + * suppresesed or otherwise still present in the exports. If we + * encounter this, we should just ignore it. *) + if desc_of_reason r = RThisType then + seen + else + super#type_ cx pole seen t + | EvalT (_, TypeDestructorT _, _) -> + (* Type destructors are annotations, so we should never complain about + missing annotations due them. The default visitor _should_ never + visit a tvar in an input position, but do to some wacky stuff in + eval, it's possible today. *) + seen + | KeysT _ -> + (* Same idea as type destructors. *) + seen + | TypeAppT (_, _, c, ts) -> self#typeapp ts cx pole seen c + | DefT (r, _, ArrT (ArrayAT (t, ts))) when is_literal_array_reason r -> + self#arrlit cx pole seen t ts + | DefT (r, _, ObjT o) when is_literal_object_reason r -> + let refcnt = + (try Properties.Map.find_unsafe o.props_tmap objlits with Not_found -> 0) + in + objlits <- Properties.Map.add o.props_tmap (refcnt + 1) objlits; + let seen = super#type_ cx pole seen t in + objlits <- + ( if refcnt = 0 then + Properties.Map.remove o.props_tmap objlits + else + Properties.Map.add o.props_tmap refcnt objlits ); + seen + | DefT (_, _, InstanceT (static, _, _, i)) -> + let static_props_id = + match static with + | DefT (_, _, ObjT o) -> Some o.props_tmap + | _ -> None + in + let own_refcnt = + (try fst (Properties.Map.find_unsafe i.own_props insts) with Not_found -> 0) + in + let proto_refcnt = + (try fst (Properties.Map.find_unsafe i.proto_props insts) with Not_found -> 0) + in + let static_refcnt = + Option.value_map static_props_id ~default:0 ~f:(fun id -> + (try fst (Properties.Map.find_unsafe id insts) with Not_found -> 0)) + in + insts <- + Properties.Map.add i.own_props (own_refcnt + 1, i.initialized_fields) insts; + insts <- Properties.Map.add i.proto_props (proto_refcnt + 1, SSet.empty) insts; + Option.iter static_props_id (fun id -> + insts <- + Properties.Map.add id (static_refcnt + 1, i.initialized_static_fields) insts); + let seen = super#type_ cx pole seen t in + insts <- + ( if own_refcnt = 0 then + Properties.Map.remove i.own_props insts + else + Properties.Map.add i.own_props (own_refcnt, i.initialized_fields) insts ); + insts <- + ( if proto_refcnt = 0 then + Properties.Map.remove i.proto_props insts + else + Properties.Map.add i.proto_props (own_refcnt, SSet.empty) insts ); + Option.iter static_props_id (fun id -> + insts <- + ( if static_refcnt = 0 then + Properties.Map.remove id insts + else + Properties.Map.add id (static_refcnt, i.initialized_static_fields) insts )); + seen + | DefT (r, _, FunT (static, prototype, ft)) -> + (* This won't propagate to any other types because this happens post-merge *) + let any kind = AnyT.locationless (Unsound kind) in + any DummyStatic |> unify_opt cx ~unify_any:true static; + any FunctionPrototype |> unify_opt cx ~unify_any:true prototype; + any BoundFunctionThis |> unify_opt cx ~unify_any:true ft.this_t; + super#type_ + cx + pole + seen + (DefT + ( r, + bogus_trust (), + FunT + ( any DummyStatic, + any FunctionPrototype, + { ft with this_t = any BoundFunctionThis } ) )) + | _ -> super#type_ cx pole seen t + in + seen) + + method! props cx pole seen id = + if Properties.Map.mem id objlits then + self#objlit_props cx pole seen id + else + match Properties.Map.get id insts with + | Some (_, init) -> self#inst_props cx pole seen id init + | _ -> super#props cx pole seen id + + method private arrlit cx pole seen t ts = + let seen = self#type_ cx pole seen t in + let seen = Option.fold ts ~init:seen ~f:(List.fold_left (self#type_ cx pole)) in + seen + + method private objlit_props cx pole seen id = + let props = Context.find_props cx id in + SMap.fold + (fun _ p acc -> Property.read_t p |> Option.fold ~f:(self#type_ cx pole) ~init:acc) + props + seen + + method private inst_props cx pole seen id init = + let props = Context.find_props cx id in + SMap.fold + (fun x p acc -> + if is_munged_prop_name_with_munge x ~should_munge_underscores then + acc + else if SSet.mem x init then + Property.read_t p |> Option.fold ~f:(self#type_ cx pole) ~init:acc + else + self#prop cx pole acc p) + props + seen + + method private typeapp = + let rec loop ?constant_polarity_param cx pole seen = function + | (_, []) -> seen + | ([], _) -> seen + | (tparam :: tparams, targ :: targs) -> + let param_polarity = + match constant_polarity_param with + | Some (s, p) when tparam.name = s -> p + | _ -> Polarity.mult (pole, tparam.polarity) + in + let seen = self#type_ cx param_polarity seen targ in + loop cx pole seen (tparams, targs) + in + fun targs cx pole seen -> function + | OpenT (r, id) -> + let seen = self#tvar cx Polarity.Positive seen r id in + (match Context.find_graph cx id with + | Resolved (_, t) + | FullyResolved (_, t) -> + self#typeapp targs cx pole seen t + | Unresolved { lower; _ } -> + TypeMap.fold (fun t _ acc -> self#typeapp targs cx pole acc t) lower seen) + | AnnotT (_, t, _) -> self#typeapp targs cx pole seen t + (* Shallowly check to see if it is an EvalT. If the EvalT's first + * value is a BoundT, we can visit that parameter with a constant + * positive polarity if it does not appear in the defer_use_t. + *) + | DefT + ( _, + _, + PolyT + ( _, + tparams, + DefT + ( _, + _, + TypeT + ( _, + EvalT ((BoundT (_, s, _) as t), TypeDestructorT (_, _, destructor), _) + ) ), + _ ) ) -> + if (new type_finder t)#destructor cx false destructor then + loop cx pole seen (Nel.to_list tparams, targs) + else + loop + cx + pole + seen + (Nel.to_list tparams, targs) + ~constant_polarity_param:(s, Polarity.Positive) + | DefT (_, _, PolyT (_, tparams, _, _)) -> loop cx pole seen (Nel.to_list tparams, targs) + | DefT (_, _, EmptyT _) -> seen + | AnyT _ -> seen + | _ -> + (* We don't error here on an unexpected typeapp because we would have already + * caught that this type is not polymorphic earlier *) + seen + end + + let enforce_strict cx t ~should_munge_underscores = + let visitor = + new assert_ground_visitor + (reason_of_t t) + ~max_reasons:(Context.max_trace_depth cx) + ~should_munge_underscores + in + let seen = visitor#type_ cx Polarity.Positive Marked.empty t in + ignore (seen : Marked.t) +end diff --git a/src/typing/changeset.ml b/src/typing/changeset.ml index 133e77e3c8b..a0bd57af691 100644 --- a/src/typing/changeset.ml +++ b/src/typing/changeset.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -15,131 +15,132 @@ module Utils = Utils_js (* operations on vars and refis. refine is a read that results in a type update *) -type op = Read | Write | Refine +type op = + | Read + | Write + | Refine let string_of_op = function -| Read -> "Read" -| Write -> "Write" -| Refine -> "Refine" + | Read -> "Read" + | Write -> "Write" + | Refine -> "Refine" (* ref to scope entry *) module EntryRef = struct type t = int * string * op + let compare = Pervasives.compare end -module EntryRefSet : Set.S with type elt = EntryRef.t -= Set.Make(EntryRef) +module EntryRefSet : Set.S with type elt = EntryRef.t = Set.Make (EntryRef) (* ref to scope refi *) module RefiRef = struct type t = int * Key.t * op + let compare = Pervasives.compare end -module RefiRefSet : Set.S with type elt = RefiRef.t -= Set.Make(RefiRef) +module RefiRefSet : Set.S with type elt = RefiRef.t = Set.Make (RefiRef) (* changeset is a set of changed variables by name and a set of changed refinements by key *) type t = EntryRefSet.t * RefiRefSet.t -let empty = EntryRefSet.empty, RefiRefSet.empty +let empty = (EntryRefSet.empty, RefiRefSet.empty) -let add_var var_ref (vars, refis) = - EntryRefSet.add var_ref vars, refis +let add_var var_ref (vars, refis) = (EntryRefSet.add var_ref vars, refis) -let add_refi refi_ref (vars, refis) = - vars, RefiRefSet.add refi_ref refis +let add_refi refi_ref (vars, refis) = (vars, RefiRefSet.add refi_ref refis) (* ugh ocaml *) -let iset_of_list list = - List.fold_left (fun acc elem -> ISet.add elem acc) ISet.empty list +let iset_of_list list = List.fold_left (fun acc elem -> ISet.add elem acc) ISet.empty list (* filter changeset to contain only changes for given scopes *) let include_scopes ids (vars, refis) = let mem = let ids = iset_of_list ids in - fun id -> ISet.mem id ids + (fun id -> ISet.mem id ids) in - EntryRefSet.filter (fun (scope_id, _, _) -> mem scope_id) vars, - RefiRefSet.filter (fun (scope_id, _, _) -> mem scope_id) refis + ( EntryRefSet.filter (fun (scope_id, _, _) -> mem scope_id) vars, + RefiRefSet.filter (fun (scope_id, _, _) -> mem scope_id) refis ) let include_ops ops (vars, refis) = - EntryRefSet.filter (fun (_, _, op) -> List.mem op ops) vars, - RefiRefSet.filter (fun (_, _, op) -> List.mem op ops) refis + ( EntryRefSet.filter (fun (_, _, op) -> List.mem op ops) vars, + RefiRefSet.filter (fun (_, _, op) -> List.mem op ops) refis ) let include_reads = include_ops [Read] + let include_writes = include_ops [Write] + let exclude_refines = include_ops [Read; Write] let iter ?ops f_vars f_refis changeset = - let vars, refis = match ops with - | None -> changeset - | Some ops -> include_ops ops changeset + let (vars, refis) = + match ops with + | None -> changeset + | Some ops -> include_ops ops changeset in vars |> EntryRefSet.iter f_vars; refis |> RefiRefSet.iter f_refis let iter_reads = iter ~ops:[Read] + let iter_writes = iter ~ops:[Write] + let iter_refines = iter ~ops:[Refine] + let iter_type_updates = iter ~ops:[Write; Refine] -let is_empty (vars, refis) = - vars = EntryRefSet.empty && - refis = RefiRefSet.empty +let is_empty (vars, refis) = vars = EntryRefSet.empty && refis = RefiRefSet.empty let union (vars1, refis1) (vars2, refis2) = - EntryRefSet.union vars1 vars2, - RefiRefSet.union refis1 refis2 + (EntryRefSet.union vars1 vars2, RefiRefSet.union refis1 refis2) let inter (vars1, refis1) (vars2, refis2) = - EntryRefSet.inter vars1 vars2, - RefiRefSet.inter refis1 refis2 + (EntryRefSet.inter vars1 vars2, RefiRefSet.inter refis1 refis2) let diff (vars1, refis1) (vars2, refis2) = - EntryRefSet.diff vars1 vars2, - RefiRefSet.diff refis1 refis2 + (EntryRefSet.diff vars1 vars2, RefiRefSet.diff refis1 refis2) -let comp x y = - union (diff x y) (diff y x) +let comp x y = union (diff x y) (diff y x) let string_of_entry_ref (scope_id, name, op) = - Utils.spf "(%d, %s, %s)" - scope_id - name - (string_of_op op) + Utils.spf "(%d, %s, %s)" scope_id name (string_of_op op) let string_of_refi_ref (scope_id, key, op) = - Utils.spf "(%d, %s, %s)" - scope_id - (Key.string_of_key key) - (string_of_op op) + Utils.spf "(%d, %s, %s)" scope_id (Key.string_of_key key) (string_of_op op) let to_string = let string_of_changed_vars changed_vars = - Utils.spf "{ %s }" - (let entry_refs = EntryRefSet.fold (fun entry_ref acc -> - string_of_entry_ref entry_ref :: acc - ) changed_vars [] in - String.concat "; " (List.rev entry_refs)) + Utils.spf + "{ %s }" + (let entry_refs = + EntryRefSet.fold + (fun entry_ref acc -> string_of_entry_ref entry_ref :: acc) + changed_vars + [] + in + String.concat "; " (List.rev entry_refs)) in let string_of_changed_refis changed_refis = - Utils.spf "{ %s }" - (let refi_refs = RefiRefSet.fold (fun refi_ref acc -> - string_of_refi_ref refi_ref :: acc - ) changed_refis [] in - String.concat "; " (List.rev refi_refs)) + Utils.spf + "{ %s }" + (let refi_refs = + RefiRefSet.fold (fun refi_ref acc -> string_of_refi_ref refi_ref :: acc) changed_refis [] + in + String.concat "; " (List.rev refi_refs)) in fun (changed_vars, changed_refis) -> - Utils.spf "%s, %s" + Utils.spf + "%s, %s" (string_of_changed_vars changed_vars) (string_of_changed_refis changed_refis) (*************************************************************) (** change tracking **) + (** provides an API over a global stack of changesets to track read/write ops and refinements as AST traversal duing infer drives calls into Env module. @@ -149,59 +150,54 @@ let to_string = (* due to the current dependency situation, we locate the global changeset stack here for now, so it can be accessed from both Env and Flow_js. *) +module Global = struct + type 'a stack = 'a list ref + + let changesets : t stack = ref [] + + let is_active () = List.length !changesets > 0 + + let init () = changesets := [] -type 'a stack = 'a list ref -let changesets: t stack = ref [] - -let is_active () = - List.length !changesets > 0 - -let init () = - changesets := [] - -let push () = - changesets := (EntryRefSet.empty, RefiRefSet.empty) :: !changesets - -let pop () = - changesets := List.tl !changesets - -(* return the current changeset *) -let peek () = - List.hd !changesets - -(* helper: transform current changeset, given - transform functions for vars and/or refis. - swap, return prev *) -let swap f_vars f_refis = - let prev_vars, prev_refis = peek () in - let apply_opt arg = function None -> arg | Some f -> f arg in - let new_vars = apply_opt prev_vars f_vars in - let new_refis = apply_opt prev_refis f_refis in - changesets := (new_vars, new_refis) :: List.tl !changesets; - prev_vars, prev_refis - -(* clear changeset, return previous *) -let clear () = - swap (Some (fun _ -> EntryRefSet.empty)) (Some (fun _ -> RefiRefSet.empty)) - -(* restore passed changeset, return previous *) -let restore (vars, refis) = - swap (Some (fun _ -> vars)) (Some (fun _ -> refis)) - -(* merge changeset with passed one, return previous *) -let merge (vars, refis) = - swap (Some (EntryRefSet.union vars)) (Some (RefiRefSet.union refis)) - -(* filter changes targeting the given scope from the current changeset *) -let filter_scope_changes id = - swap - (Some (EntryRefSet.filter (fun (scope_id, _, _) -> scope_id != id))) - (Some (RefiRefSet.filter (fun (scope_id, _, _) -> scope_id != id))) - -(* record a changed var in current changeset *) -let change_var entry_ref = - ignore (swap (Some (EntryRefSet.add entry_ref)) None) - -(* record a refinement in current changeset *) -let change_refi refi_ref = - ignore (swap None (Some (RefiRefSet.add refi_ref))) + let push () = changesets := (EntryRefSet.empty, RefiRefSet.empty) :: !changesets + + let pop () = changesets := List.tl !changesets + + (* return the current changeset *) + let peek () = List.hd !changesets + + (* helper: transform current changeset, given + transform functions for vars and/or refis. + swap, return prev *) + let swap f_vars f_refis = + let (prev_vars, prev_refis) = peek () in + let apply_opt arg = function + | None -> arg + | Some f -> f arg + in + let new_vars = apply_opt prev_vars f_vars in + let new_refis = apply_opt prev_refis f_refis in + changesets := (new_vars, new_refis) :: List.tl !changesets; + (prev_vars, prev_refis) + + (* clear changeset, return previous *) + let clear () = swap (Some (fun _ -> EntryRefSet.empty)) (Some (fun _ -> RefiRefSet.empty)) + + (* restore passed changeset, return previous *) + let restore (vars, refis) = swap (Some (fun _ -> vars)) (Some (fun _ -> refis)) + + (* merge changeset with passed one, return previous *) + let merge (vars, refis) = swap (Some (EntryRefSet.union vars)) (Some (RefiRefSet.union refis)) + + (* filter changes targeting the given scope from the current changeset *) + let filter_scope_changes id = + swap + (Some (EntryRefSet.filter (fun (scope_id, _, _) -> scope_id != id))) + (Some (RefiRefSet.filter (fun (scope_id, _, _) -> scope_id != id))) + + (* record a changed var in current changeset *) + let change_var entry_ref = ignore (swap (Some (EntryRefSet.add entry_ref)) None) + + (* record a refinement in current changeset *) + let change_refi refi_ref = ignore (swap None (Some (RefiRefSet.add refi_ref))) +end diff --git a/src/typing/class_sig.ml b/src/typing/class_sig.ml index 873ebd488ed..634dc4980c6 100644 --- a/src/typing/class_sig.ml +++ b/src/typing/class_sig.ml @@ -1,781 +1,871 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - module Flow = Flow_js - open Reason +open Utils_js +include Class_sig_intf + +module Make (F : Func_sig.S) = struct + type func_sig = F.t -type set_asts = - (Loc.t, Loc.t * Type.t) Ast.Function.body option * - (Loc.t, Loc.t * Type.t) Ast.Expression.t option - -> unit + type func_params_tast = F.func_params_tast -type set_type = Type.t -> unit + type set_asts = + func_params_tast option + * (ALoc.t, ALoc.t * Type.t) Ast.Function.body option + * (ALoc.t, ALoc.t * Type.t) Ast.Expression.t option -> + unit -and field = - | Annot of Type.t - | Infer of Func_sig.t * set_asts + type set_type = Type.t -> unit -type field' = Loc.t option * Type.polarity * field + and field = + | Annot of Type.t + | Infer of func_sig * set_asts -type func_info = Loc.t option * Func_sig.t * set_asts * set_type + type field' = ALoc.t option * Polarity.t * field -type signature = { - reason: reason; - fields: field' SMap.t; - private_fields: field' SMap.t; - proto_fields: field' SMap.t; - (* Multiple function signatures indicates an overloaded method. Note that + type func_info = ALoc.t option * func_sig * set_asts * set_type + + type signature = { + reason: reason; + fields: field' SMap.t; + private_fields: field' SMap.t; + proto_fields: field' SMap.t; + (* Multiple function signatures indicates an overloaded method. Note that function signatures are stored in reverse definition order. *) - methods: func_info Nel.t SMap.t; - getters: func_info SMap.t; - setters: func_info SMap.t; - calls: Type.t list; - call_deprecated: Type.t option; -} - -type t = { - id: int; - tparams: Type.typeparam list; - tparams_map: Type.t SMap.t; - super: super; - (* Multiple function signatures indicates an overloaded constructor. Note that + methods: func_info Nel.t SMap.t; + getters: func_info SMap.t; + setters: func_info SMap.t; + calls: Type.t list; + } + + type t = { + id: ALoc.t; + tparams: Type.typeparams; + tparams_map: Type.t SMap.t; + super: super; + (* Multiple function signatures indicates an overloaded constructor. Note that function signatures are stored in reverse definition order. *) - constructor: func_info list; - static: signature; - instance: signature; -} - -and super = - | Interface of { - extends: typeapp list; - callable: bool; - } - | Class of { - extends: extends; - mixins: typeapp list; (* declare class only *) - implements: typeapp list - } + constructor: func_info list; + static: signature; + instance: signature; + } -and extends = - | Explicit of typeapp - | Implicit of { null: bool } - -and typeapp = Loc.t * Type.t * Type.t list option - -let empty id reason tparams tparams_map super = - let empty_sig reason = { - reason; - fields = SMap.empty; - private_fields = SMap.empty; - proto_fields = SMap.empty; - methods = SMap.empty; - getters = SMap.empty; - setters = SMap.empty; - calls = []; - call_deprecated = None; - } in - let constructor = [] in - let static = - let reason = replace_reason (fun desc -> RStatics desc) reason in - empty_sig reason - in - let instance = empty_sig reason in - { id; tparams; tparams_map; super; constructor; static; instance } - -let structural x = - match x.super with - | Interface _ -> true - | Class _ -> false - -let map_sig ~static f s = - if static - then {s with static = f s.static} - else {s with instance = f s.instance} - -let with_sig ~static f s = - if static then f s.static else f s.instance - -let add_private_field name loc polarity field = map_sig (fun s -> { - s with - private_fields = SMap.add name (Some loc, polarity, field) s.private_fields; -}) - -let add_constructor loc fsig ?(set_asts=ignore) ?(set_type=ignore) s = - {s with constructor = [loc, Func_sig.to_ctor_sig fsig, set_asts, set_type]} - -let add_default_constructor reason s = - let fsig = Func_sig.default_constructor reason in - add_constructor None fsig s - -let append_constructor loc fsig ?(set_asts=ignore) ?(set_type=ignore) s = - {s with constructor = (loc, Func_sig.to_ctor_sig fsig, set_asts, set_type)::s.constructor} - -let add_field' ~static name fld x = - let flat = static || structural x in - map_sig ~static (fun s -> { - s with - fields = SMap.add name fld s.fields; - proto_fields = if flat then SMap.remove name s.proto_fields else s.proto_fields; - methods = if flat then SMap.remove name s.methods else s.methods; - getters = if flat then SMap.remove name s.getters else s.getters; - setters = if flat then SMap.remove name s.setters else s.setters; - }) x - -let add_field ~static name loc polarity field x = - add_field' ~static name (Some loc, polarity, field) x - -let add_indexer ~static polarity ~key ~value x = - let kloc, k = key in - let vloc, v = value in - x |> add_field ~static "$key" kloc polarity (Annot k) - |> add_field ~static "$value" vloc polarity (Annot v) - -let add_name_field x = - let r = replace_reason (fun desc -> RNameProperty desc) x.instance.reason in - let t = Type.StrT.why r in - add_field' ~static:true "name" (None, Type.Neutral, Annot t) x - -let add_proto_field name loc polarity field x = - map_sig ~static:false (fun s -> { - s with - proto_fields = SMap.add name (Some loc, polarity, field) s.proto_fields; - methods = SMap.remove name s.methods; - getters = SMap.remove name s.getters; - setters = SMap.remove name s.setters; - }) x - -let add_method ~static name loc fsig ?(set_asts=ignore) ?(set_type=ignore) x = - let flat = static || structural x in - let func_info = Some loc, fsig, set_asts, set_type in - map_sig ~static (fun s -> { - s with - fields = if flat then SMap.remove name s.fields else s.fields; - proto_fields = SMap.remove name s.proto_fields; - methods = SMap.add name (Nel.one func_info) s.methods; - getters = SMap.remove name s.getters; - setters = SMap.remove name s.setters; - }) x - -(* Appending a method builds a list of function signatures. This implements the + and super = + | Interface of { + inline: bool; + (* Anonymous interface, can appear anywhere inside a type *) + extends: typeapp list; + callable: bool; + } + | Class of { + extends: extends; + mixins: typeapp list; + (* declare class only *) + implements: typeapp list; + } + + and extends = + | Explicit of typeapp + | Implicit of { null: bool } + + and typeapp = ALoc.t * Type.t * Type.t list option + + let empty id reason tparams tparams_map super = + let empty_sig reason = + { + reason; + fields = SMap.empty; + private_fields = SMap.empty; + proto_fields = SMap.empty; + methods = SMap.empty; + getters = SMap.empty; + setters = SMap.empty; + calls = []; + } + in + let constructor = [] in + let static = + let reason = update_desc_reason (fun desc -> RStatics desc) reason in + empty_sig reason + in + let instance = empty_sig reason in + { id; tparams; tparams_map; super; constructor; static; instance } + + let structural x = + match x.super with + | Interface _ -> true + | Class _ -> false + + let inst_kind x = + match x.super with + | Interface { inline; _ } -> Type.InterfaceKind { inline } + | Class _ -> Type.ClassKind + + let map_sig ~static f s = + if static then + { s with static = f s.static } + else + { s with instance = f s.instance } + + let with_sig ~static f s = + if static then + f s.static + else + f s.instance + + let add_private_field name loc polarity field = + map_sig (fun s -> + { s with private_fields = SMap.add name (Some loc, polarity, field) s.private_fields }) + + let public_fields_of_signature ~static s = + ( if static then + s.static + else + s.instance ) + .fields + + let private_fields_of_signature ~static s = + ( if static then + s.static + else + s.instance ) + .private_fields + + let add_constructor loc fsig ?(set_asts = ignore) ?(set_type = ignore) s = + { s with constructor = [(loc, F.to_ctor_sig fsig, set_asts, set_type)] } + + let add_default_constructor reason s = + let fsig = F.default_constructor reason in + add_constructor None fsig s + + let append_constructor loc fsig ?(set_asts = ignore) ?(set_type = ignore) s = + { s with constructor = (loc, F.to_ctor_sig fsig, set_asts, set_type) :: s.constructor } + + let add_field' ~static name fld x = + let flat = static || structural x in + map_sig + ~static + (fun s -> + { + s with + fields = SMap.add name fld s.fields; + proto_fields = + ( if flat then + SMap.remove name s.proto_fields + else + s.proto_fields ); + methods = + ( if flat then + SMap.remove name s.methods + else + s.methods ); + getters = + ( if flat then + SMap.remove name s.getters + else + s.getters ); + setters = + ( if flat then + SMap.remove name s.setters + else + s.setters ); + }) + x + + let add_field ~static name loc polarity field x = + add_field' ~static name (Some loc, polarity, field) x + + let add_indexer ~static polarity ~key ~value x = + x + |> add_field' ~static "$key" (None, Polarity.Neutral, Annot key) + |> add_field' ~static "$value" (None, polarity, Annot value) + + let add_name_field x = + let r = update_desc_reason (fun desc -> RNameProperty desc) x.instance.reason in + let t = Type.StrT.why r |> Type.with_trust Trust.bogus_trust in + add_field' ~static:true "name" (None, Polarity.Neutral, Annot t) x + + let add_proto_field name loc polarity field x = + map_sig + ~static:false + (fun s -> + { + s with + proto_fields = SMap.add name (Some loc, polarity, field) s.proto_fields; + methods = SMap.remove name s.methods; + getters = SMap.remove name s.getters; + setters = SMap.remove name s.setters; + }) + x + + let add_method ~static name loc fsig ?(set_asts = ignore) ?(set_type = ignore) x = + let flat = static || structural x in + let func_info = (Some loc, fsig, set_asts, set_type) in + map_sig + ~static + (fun s -> + { + s with + fields = + ( if flat then + SMap.remove name s.fields + else + s.fields ); + proto_fields = SMap.remove name s.proto_fields; + methods = SMap.add name (Nel.one func_info) s.methods; + getters = SMap.remove name s.getters; + setters = SMap.remove name s.setters; + }) + x + + (* Appending a method builds a list of function signatures. This implements the bahvior of interfaces and declared classes, which interpret duplicate definitions as branches of a single overloaded method. *) -let append_method ~static name loc fsig ?(set_asts=ignore) ?(set_type=ignore) x = - let flat = static || structural x in - let func_info = Some loc, fsig, set_asts, set_type in - map_sig ~static (fun s -> { - s with - fields = if flat then SMap.remove name s.fields else s.fields; - proto_fields = SMap.remove name s.proto_fields; - methods = ( - match SMap.get name s.methods with - | Some fsigs -> SMap.add name (Nel.cons func_info fsigs) s.methods - | None -> SMap.add name (Nel.one func_info) s.methods - ); - getters = SMap.remove name s.getters; - setters = SMap.remove name s.setters; - }) x - -let append_call ~static t = map_sig ~static (fun s -> - (* Note that $call properties always override the call property syntax. - As before, if both are present, the $call property is used and the call - property is ignored. *) - match s.call_deprecated with - | None -> { s with calls = t :: s.calls } - | Some _ -> s -) - -let add_call_deprecated ~static t = map_sig ~static (fun s -> - (* Note that $call properties always override the call property syntax. - As before, if both are present, the $call property is used and the call - property is ignored. *) - { s with call_deprecated = Some t; calls = [] } -) - -let add_getter ~static name loc fsig ?(set_asts=ignore) ?(set_type=ignore) x = - let flat = static || structural x in - let func_info = Some loc, fsig, set_asts, set_type in - map_sig ~static (fun s -> { - s with - fields = if flat then SMap.remove name s.fields else s.fields; - proto_fields = SMap.remove name s.proto_fields; - methods = SMap.remove name s.methods; - getters = SMap.add name func_info s.getters; - }) x - -let add_setter ~static name loc fsig ?(set_asts=ignore) ?(set_type=ignore) x = - let flat = static || structural x in - let func_info = Some loc, fsig, set_asts, set_type in - map_sig ~static (fun s -> { - s with - fields = if flat then SMap.remove name s.fields else s.fields; - proto_fields = SMap.remove name s.proto_fields; - methods = SMap.remove name s.methods; - setters = SMap.add name func_info s.setters; - }) x - -let mem_constructor {constructor; _} = constructor <> [] - -let mem_field x = with_sig (fun s -> SMap.mem x s.fields) - -let iter_methods f s = - SMap.iter (fun _ -> Nel.iter f) s.methods; - SMap.iter (fun _ -> f) s.getters; - SMap.iter (fun _ -> f) s.setters - -(* TODO? *) -let subst_field cx map (loc, polarity, field) = - loc, polarity, match field with - | Annot t -> Annot (Flow.subst cx map t) - | Infer (fsig, set_asts) -> Infer (Func_sig.subst cx map fsig, set_asts) - -let subst_sig cx map s = - let subst_func_sig (loc, sig_, f, g) = (loc, Func_sig.subst cx map sig_, f, g) in - { - reason = s.reason; - fields = SMap.map (subst_field cx map) s.fields; - private_fields = SMap.map (subst_field cx map) s.private_fields; - proto_fields = SMap.map (subst_field cx map) s.proto_fields; - methods = SMap.map (Nel.map subst_func_sig) s.methods; - getters = SMap.map (subst_func_sig) s.getters; - setters = SMap.map (subst_func_sig) s.setters; - calls = List.map (Flow.subst cx map) s.calls; - call_deprecated = Option.map ~f:(Flow.subst cx map) s.call_deprecated; - } + let append_method ~static name loc fsig ?(set_asts = ignore) ?(set_type = ignore) x = + let flat = static || structural x in + let func_info = (Some loc, fsig, set_asts, set_type) in + map_sig + ~static + (fun s -> + { + s with + fields = + ( if flat then + SMap.remove name s.fields + else + s.fields ); + proto_fields = SMap.remove name s.proto_fields; + methods = + (match SMap.get name s.methods with + | Some fsigs -> SMap.add name (Nel.cons func_info fsigs) s.methods + | None -> SMap.add name (Nel.one func_info) s.methods); + getters = SMap.remove name s.getters; + setters = SMap.remove name s.setters; + }) + x + + let append_call ~static t = map_sig ~static (fun s -> { s with calls = t :: s.calls }) + + let add_getter ~static name loc fsig ?(set_asts = ignore) ?(set_type = ignore) x = + let flat = static || structural x in + let func_info = (Some loc, fsig, set_asts, set_type) in + map_sig + ~static + (fun s -> + { + s with + fields = + ( if flat then + SMap.remove name s.fields + else + s.fields ); + proto_fields = SMap.remove name s.proto_fields; + methods = SMap.remove name s.methods; + getters = SMap.add name func_info s.getters; + }) + x + + let add_setter ~static name loc fsig ?(set_asts = ignore) ?(set_type = ignore) x = + let flat = static || structural x in + let func_info = (Some loc, fsig, set_asts, set_type) in + map_sig + ~static + (fun s -> + { + s with + fields = + ( if flat then + SMap.remove name s.fields + else + s.fields ); + proto_fields = SMap.remove name s.proto_fields; + methods = SMap.remove name s.methods; + setters = SMap.add name func_info s.setters; + }) + x + + let mem_constructor { constructor; _ } = constructor <> [] + + let mem_field x = with_sig (fun s -> SMap.mem x s.fields) + + let iter_methods f s = + SMap.iter (fun _ -> Nel.iter f) s.methods; + SMap.iter (fun _ -> f) s.getters; + SMap.iter (fun _ -> f) s.setters + + (* TODO? *) + let subst_field cx map (loc, polarity, field) = + ( loc, + polarity, + match field with + | Annot t -> Annot (Flow.subst cx map t) + | Infer (fsig, set_asts) -> Infer (F.subst cx map fsig, set_asts) ) + + let subst_sig cx map s = + let subst_func_sig (loc, sig_, f, g) = (loc, F.subst cx map sig_, f, g) in + { + reason = s.reason; + fields = SMap.map (subst_field cx map) s.fields; + private_fields = SMap.map (subst_field cx map) s.private_fields; + proto_fields = SMap.map (subst_field cx map) s.proto_fields; + methods = SMap.map (Nel.map subst_func_sig) s.methods; + getters = SMap.map subst_func_sig s.getters; + setters = SMap.map subst_func_sig s.setters; + calls = Core_list.map ~f:(Flow.subst cx map) s.calls; + } -let subst_typeapp cx map (loc, c, targs) = - let c = Flow.subst cx map c in - let targs = Option.map ~f:(List.map (Flow.subst cx map)) targs in - (loc, c, targs) + let subst_typeapp cx map (loc, c, targs) = + let c = Flow.subst cx map c in + let targs = Option.map ~f:(Core_list.map ~f:(Flow.subst cx map)) targs in + (loc, c, targs) + + let subst_extends cx map = function + | Explicit tapp -> Explicit (subst_typeapp cx map tapp) + | Implicit { null = _ } as extends -> extends + + let subst_super cx map = function + | Interface { inline; extends; callable } -> + Interface { inline; extends = Core_list.map ~f:(subst_typeapp cx map) extends; callable } + | Class { extends; mixins; implements } -> + Class + { + extends = subst_extends cx map extends; + mixins = Core_list.map ~f:(subst_typeapp cx map) mixins; + implements = Core_list.map ~f:(subst_typeapp cx map) implements; + } + + let generate_tests cx f x = + Flow.generate_tests cx (Type.TypeParams.to_list x.tparams) (fun map -> + f + { + id = x.id; + tparams = x.tparams; + tparams_map = SMap.map (Flow.subst cx map) x.tparams_map; + super = subst_super cx map x.super; + constructor = + List.map (fun (loc, sig_, g, h) -> (loc, F.subst cx map sig_, g, h)) x.constructor; + static = subst_sig cx map x.static; + instance = subst_sig cx map x.instance; + }) + + let to_field (loc, polarity, field) = + let t = + match field with + | Annot t -> t + | Infer (fsig, _) -> F.gettertype fsig + in + Type.Field (loc, t, polarity) -let subst_extends cx map = function - | Explicit tapp -> Explicit (subst_typeapp cx map tapp) - | Implicit {null=_} as extends -> extends + let to_prop_map cx = SMap.map to_field %> Context.generate_property_map cx -let subst_super cx map = function - | Interface { extends; callable } -> - Interface { - extends = List.map (subst_typeapp cx map) extends; - callable; - } - | Class { extends; mixins; implements } -> - Class { - extends = subst_extends cx map extends; - mixins = List.map (subst_typeapp cx map) mixins; - implements = List.map (subst_typeapp cx map) implements; - } - -let generate_tests cx f x = - Flow.generate_tests cx x.tparams (fun map -> f { - id = x.id; - tparams = x.tparams; - tparams_map = SMap.map (Flow.subst cx map) x.tparams_map; - super = subst_super cx map x.super; - constructor = - List.map - (fun (loc, sig_, g, h) -> loc, Func_sig.subst cx map sig_, g, h) - x.constructor; - static = subst_sig cx map x.static; - instance = subst_sig cx map x.instance; - }) - -let to_field (loc, polarity, field) = - let t = match field with - | Annot t -> t - | Infer (fsig, _) -> Func_sig.gettertype fsig - in - Type.Field (loc, t, polarity) - -let elements cx ?constructor s = - let methods = - (* If this is an overloaded method, create an intersection, attributed + let elements cx ?constructor s = + let methods = + (* If this is an overloaded method, create an intersection, attributed to the first declared function signature. If there is a single function signature for this method, simply return the method type. *) - SMap.mapi Type.(fun name xs -> - let ms = - Nel.rev_map (fun (loc, x, _, set_type) -> loc, Func_sig.methodtype cx x, set_type) xs in - (* Keep track of these before intersections are merged, to enable - * type information on every member of the intersection. *) - ms |> Nel.iter (fun (loc, t, set_type) -> - Option.iter loc ~f:(fun loc -> - let id_info = name, t, Type_table.Other in - Type_table.set_info loc id_info (Context.type_table cx); - set_type t - ) - ); - match ms with - | (loc, t, _), [] -> loc, t - | (loc0, t0, _), (_, t1, _)::ts -> - let ts = List.map (fun (_loc, t, _) -> t) ts in - loc0, DefT (reason_of_t t0, IntersectionT (InterRep.make t0 t1 ts)) - ) s.methods - in - - (* Re-add the constructor as a method. *) - let methods = match constructor with - | Some t -> SMap.add "constructor" t methods - | None -> methods - in - - (* If there is a both a getter and a setter, then flow the setter type to + SMap.mapi + Type.( + fun _name xs -> + let ms = + Nel.rev_map (fun (loc, x, _, set_type) -> (loc, F.methodtype cx x, set_type)) xs + in + (* Keep track of these before intersections are merged, to enable + * type information on every member of the intersection. *) + ms |> Nel.iter (fun (loc, t, set_type) -> Option.iter loc ~f:(fun _loc -> set_type t)); + match ms with + | ((loc, t, _), []) -> (loc, t) + | ((loc0, t0, _), (_, t1, _) :: ts) -> + let ts = Core_list.map ~f:(fun (_loc, t, _) -> t) ts in + (loc0, IntersectionT (reason_of_t t0, InterRep.make t0 t1 ts))) + s.methods + in + (* Re-add the constructor as a method. *) + let methods = + match constructor with + | Some t -> SMap.add "constructor" t methods + | None -> methods + in + (* If there is a both a getter and a setter, then flow the setter type to the getter. Otherwise just use the getter type or the setter type *) - let getters = - SMap.map - (fun (loc, t, _, set_type) -> loc, Func_sig.gettertype t, set_type) - s.getters in - let setters = - SMap.map - (fun (loc, t, _, set_type) -> loc, Func_sig.settertype t, set_type) - s.setters in - - (* Register getters and setters with the type table *) - let register_accessors = SMap.iter (fun name (loc, t, set_type) -> - Option.iter ~f:(fun loc -> - let id_info = name, t, Type_table.Other in - Type_table.set_info loc id_info (Context.type_table cx); - set_type t - ) loc - ) in - register_accessors getters; - register_accessors setters; - - let getters_and_setters = SMap.merge (fun _ getter setter -> - match getter, setter with - | Some (loc1, t1, _), Some (loc2, t2, _) -> Some (Type.GetSet (loc1, t1, loc2, t2)) - | Some (loc, t, _), None -> Some (Type.Get (loc, t)) - | None, Some (loc, t, _) -> Some (Type.Set (loc, t)) - | _ -> None - ) getters setters in - - let fields = SMap.map to_field s.fields in - - (* Register fields with the type table *) - SMap.iter (fun name fld -> - let loc_type_opt = match fld with - | Some loc, _, Annot t -> Some (loc, t) - | Some loc, _, Infer (func_sig, _) -> Some (loc, Func_sig.gettertype func_sig) - | _ -> None + let getters = + SMap.map (fun (loc, t, _, set_type) -> (loc, F.gettertype t, set_type)) s.getters in - Option.iter ~f:(fun (loc, t) -> - let id_info = name, t, Type_table.Other in - Type_table.set_info loc id_info (Context.type_table cx) - ) loc_type_opt - ) s.fields; - - let methods = SMap.map (fun (loc, t) -> Type.Method (loc, t)) methods in - - (* Treat proto fields as methods, as they are on the proto object *) - let methods = SMap.fold (fun name fld acc -> - SMap.add name (to_field fld) acc - ) s.proto_fields methods in - - (* Treat getters and setters as methods *) - let methods = SMap.union getters_and_setters methods in - - (* Previously, call properties were stored in the props map under the key - $call. Unfortunately, this made it possible to specify call properties - using this syntax in interfaces, declared classes, and even normal classes. - - Note that $call properties always override the call property syntax - As before, if both are present, the $call property is used and the call - property is ignored. *) - let call = match s.call_deprecated with - | Some t -> Some t - | None -> - match List.rev s.calls with - | [] -> None - | [t] -> Some t - | t0::t1::ts -> - let open Type in - let t = DefT (reason_of_t t0, IntersectionT (InterRep.make t0 t1 ts)) in - Some t - in - - (* Only un-initialized fields require annotations, so determine now - * (syntactically) which fields have initializers *) - let initialized_fields = SMap.fold (fun x (_, _, field) acc -> - match field with - | Annot _ -> acc - | Infer _ -> SSet.add x acc - ) s.fields SSet.empty in - - initialized_fields, fields, methods, call - -let specialize cx targs c = - let open Type in - let reason = reason_of_t c in - Tvar.mk_derivable_where cx reason (fun tvar -> - Flow.flow cx (c, SpecializeT (unknown_use, reason, reason, None, targs, tvar)) - ) - -let statictype cx tparams_with_this x = - let s = x.static in - let inited_fields, fields, methods, call = elements cx s in - let props = SMap.union fields methods - ~combine:(fun _ _ -> - Utils_js.assert_false (Utils_js.spf - "static fields and methods must be disjoint: %s" - (Debug_js.dump_reason cx s.reason))) - in - let static_proto = match x.super with - | Interface _ -> Type.NullProtoT s.reason (* interfaces don't have statics *) - | Class { extends; _ } -> - match extends with - (* class B extends A {}; B.__proto__ === A *) - | Explicit (annot_loc, c, targs) -> - let this = SMap.find_unsafe "this" tparams_with_this in - (* Eagerly specialize when there are no targs *) - let c = if targs = None then specialize cx targs c else c in - Type.(class_type (this_typeapp ~annot_loc c this targs)) - (* class A {}; A.__proto__ === Function.prototype *) - | Implicit _ -> Type.FunProtoT s.reason - in - (* Statics are not exact, because we allow width subtyping between them. + let setters = + SMap.map (fun (loc, t, _, set_type) -> (loc, F.settertype t, set_type)) s.setters + in + (* Register getters and setters with the typed AST *) + let register_accessors = + SMap.iter (fun _ (loc, t, set_type) -> Option.iter ~f:(fun _ -> set_type t) loc) + in + register_accessors getters; + register_accessors setters; + + let getters_and_setters = + SMap.merge + (fun _ getter setter -> + match (getter, setter) with + | (Some (loc1, t1, _), Some (loc2, t2, _)) -> Some (Type.GetSet (loc1, t1, loc2, t2)) + | (Some (loc, t, _), None) -> Some (Type.Get (loc, t)) + | (None, Some (loc, t, _)) -> Some (Type.Set (loc, t)) + | _ -> None) + getters + setters + in + let fields = SMap.map to_field s.fields in + let methods = SMap.map (fun (loc, t) -> Type.Method (loc, t)) methods in + (* Treat proto fields as methods, as they are on the proto object *) + let methods = + SMap.fold (fun name fld acc -> SMap.add name (to_field fld) acc) s.proto_fields methods + in + (* Treat getters and setters as methods *) + let methods = SMap.union getters_and_setters methods in + let call = + match List.rev s.calls with + | [] -> None + | [t] -> Some t + | t0 :: t1 :: ts -> + Type.( + let t = IntersectionT (reason_of_t t0, InterRep.make t0 t1 ts) in + Some t) + in + (* Only un-initialized fields require annotations, so determine now + * (syntactically) which fields have initializers *) + let initialized_fields = + SMap.fold + (fun x (_, _, field) acc -> + match field with + | Annot _ -> acc + | Infer _ -> SSet.add x acc) + s.fields + SSet.empty + in + (initialized_fields, fields, methods, call) + + let specialize cx targs c = + Type.( + let reason = reason_of_t c in + Tvar.mk_derivable_where cx reason (fun tvar -> + Flow.flow cx (c, SpecializeT (unknown_use, reason, reason, None, targs, tvar)))) + + let statictype cx static_proto x = + let s = x.static in + let (inited_fields, fields, methods, call) = elements cx s in + let props = + SMap.union fields methods ~combine:(fun _ _ -> + Utils_js.assert_false + (Utils_js.spf + "static fields and methods must be disjoint: %s" + (Debug_js.dump_reason cx s.reason))) + in + (* Statics are not exact, because we allow width subtyping between them. Specifically, given class A and class B extends A, Class <: Class
. *) - let static = - Obj_type.mk_with_proto cx s.reason ~props ?call static_proto - ~sealed:true ~exact:false - in - let open Type in - match static with - | DefT (_, ObjT o) -> inited_fields, o - | _ -> failwith "statics must be an ObjT" - -let insttype cx ~initialized_static_fields s = - let constructor = - let ts = List.rev_map (fun (loc, t, _, _) -> loc, Func_sig.methodtype cx t) s.constructor in - match ts with - | [] -> None - | [x] -> Some x - | (loc0, t0)::(_loc1, t1)::ts -> - let ts = List.map snd ts in - let open Type in - let t = DefT (reason_of_t t0, IntersectionT (InterRep.make t0 t1 ts)) in - Some (loc0, t) - in - let type_args = List.map (fun {Type.name; reason; polarity; _} -> - let t = SMap.find_unsafe name s.tparams_map in - name, reason, t, polarity - ) s.tparams in - let initialized_fields, fields, methods, call = elements cx ?constructor s.instance in - { Type. - class_id = s.id; - type_args; - own_props = Context.make_property_map cx fields; - proto_props = Context.make_property_map cx methods; - inst_call_t = Option.map call ~f:(Context.make_call_prop cx); - initialized_fields; - initialized_static_fields; - has_unknown_react_mixins = false; - structural = structural s; - } + let static = + Obj_type.mk_with_proto cx s.reason ~props ?call static_proto ~sealed:true ~exact:false + in + Type.( + match static with + | DefT (_, _, ObjT o) -> (inited_fields, o) + | _ -> failwith "statics must be an ObjT") + + let insttype cx ~initialized_static_fields s = + let constructor = + let ts = List.rev_map (fun (loc, t, _, _) -> (loc, F.methodtype cx t)) s.constructor in + match ts with + | [] -> None + | [x] -> Some x + | (loc0, t0) :: (_loc1, t1) :: ts -> + let ts = Core_list.map ~f:snd ts in + Type.( + let t = IntersectionT (reason_of_t t0, InterRep.make t0 t1 ts) in + Some (loc0, t)) + in + let type_args = + Core_list.map + ~f:(fun { Type.name; reason; polarity; _ } -> + let t = SMap.find_unsafe name s.tparams_map in + (name, reason, t, polarity)) + (Type.TypeParams.to_list s.tparams) + in + let (initialized_fields, fields, methods, call) = elements cx ?constructor s.instance in + { + Type.class_id = s.id; + type_args; + own_props = Context.generate_property_map cx fields; + proto_props = Context.generate_property_map cx methods; + inst_call_t = Option.map call ~f:(Context.make_call_prop cx); + initialized_fields; + initialized_static_fields; + has_unknown_react_mixins = false; + inst_kind = inst_kind s; + } -let add_this self cx reason tparams tparams_map = - (* We haven't computed the instance type yet, but we can still capture a + let add_this self cx reason tparams tparams_map = + (* We haven't computed the instance type yet, but we can still capture a reference to it using the class name (as long as the class has a name). We need this reference to constrain the `this` in the class. *) - let rec_instance_type = - match tparams with - | [] -> - Flow.mk_instance cx reason self - | _ -> - let targs = List.map (fun tp -> - let {Type.reason; name; polarity; _} = tp in - Type.BoundT (reason, name, polarity) - ) tparams in - Type.typeapp self targs - in - let this_reason = replace_reason_const RThisType reason in - let this_tp = { Type. - name = "this"; - reason = this_reason; - bound = rec_instance_type; - polarity = Type.Positive; - default = None; - } in - rec_instance_type, - (* Add the type of `this` to the end of the list of type - parameters. Remember, order is important, since we don't have recursive - bounds (aka F-bounds): the bound of This refers to all the other type - parameters! *) - tparams@[this_tp], - SMap.add "this" (Type.BoundT (this_reason, "this", Type.Positive)) tparams_map - -let remove_this x = - if structural x then x else { - x with - tparams = List.rev (List.tl (List.rev x.tparams)); - tparams_map = SMap.remove "this" x.tparams_map; - } + let rec_instance_type = + match tparams with + | None -> Flow.mk_instance cx reason self + | _ -> + let targs = + Core_list.map + ~f:(fun tp -> + let { Type.reason; name; polarity; _ } = tp in + Type.BoundT (reason, name, polarity)) + (Type.TypeParams.to_list tparams) + in + Type.typeapp self targs + in + let this_reason = replace_desc_reason RThisType reason in + let this_tp = + { + Type.name = "this"; + reason = this_reason; + bound = rec_instance_type; + polarity = Polarity.Positive; + default = None; + } + in + let tparams = + (* Use the loc for the original tparams, or just the loc for the this type if there are no + * tparams *) + let loc = Option.value_map ~default:(aloc_of_reason this_reason) ~f:fst tparams in + (* Add the type of `this` to the end of the list of type + parameters. Remember, order is important, since we don't have recursive + bounds (aka F-bounds): the bound of This refers to all the other type + parameters! *) + let tparams_lst = Type.TypeParams.to_list tparams @ [this_tp] in + (* Obviously there is at least one element since we just added `this_tp` *) + let tparams_nel = Option.value_exn (Nel.of_list tparams_lst) in + Some (loc, tparams_nel) + in + ( rec_instance_type, + tparams, + SMap.add "this" (Type.BoundT (this_reason, "this", Polarity.Positive)) tparams_map ) + + let remove_this x = + if structural x then + x + else + let tparams = + (* Remove the last type param. Assert that we have at least one type param. *) + let (loc, tparams_nel) = Option.value_exn x.tparams in + tparams_nel + |> Nel.to_list + |> List.rev + |> List.tl + |> List.rev + |> Nel.of_list + |> Option.map ~f:(fun nel -> (loc, nel)) + in + { x with tparams; tparams_map = SMap.remove "this" x.tparams_map } -let supertype cx tparams_with_this x = - let super_reason = replace_reason (fun d -> RSuperOf d) x.instance.reason in - let open Type in - match x.super with - | Interface {extends; callable} -> - let extends = List.map (function - | loc, c, None -> - let reason = annot_reason (repos_reason loc (reason_of_t c)) in - Flow.mk_instance cx reason c - | annot_loc, c, Some targs -> typeapp ~annot_loc c targs - ) extends in - (* If the interface definition includes a callable property, add the + let supertype cx tparams_with_this x = + let super_reason = update_desc_reason (fun d -> RSuperOf d) x.instance.reason in + let static_reason = x.static.reason in + Type.( + match x.super with + | Interface { inline = _; extends; callable } -> + let extends = + Core_list.map + ~f:(function + | (loc, c, None) -> + let reason = repos_reason loc ~annot_loc:loc (reason_of_t c) in + Flow.mk_instance cx reason c + | (annot_loc, c, Some targs) -> typeapp ~annot_loc c targs) + extends + in + (* If the interface definition includes a callable property, add the function prototype to the super type *) - let extends = - if callable - then (FunProtoT super_reason)::extends - else extends - in - (* Interfaces support multiple inheritance, which is modelled as an + let extends = + if callable then + FunProtoT super_reason :: extends + else + extends + in + (* Interfaces support multiple inheritance, which is modelled as an intersection of super types. TODO: Instead of using an intersection for this, we should resolve the extends and build a flattened type, and just use FunProtoT/ObjProtoT as the prototype *) - (match extends with - | [] -> ObjProtoT super_reason - | [t] -> t - | t0::t1::ts -> DefT (super_reason, IntersectionT (InterRep.make t0 t1 ts))) - | Class {extends; mixins; _} -> - let this = SMap.find_unsafe "this" tparams_with_this in - let t = match extends with - | Explicit (annot_loc, c, targs) -> - (* Eagerly specialize when there are no targs *) - let c = if targs = None then specialize cx targs c else c in - this_typeapp ~annot_loc c this targs - | Implicit {null} -> - if null then NullProtoT super_reason else ObjProtoT super_reason + let super = + match extends with + | [] -> ObjProtoT super_reason + | [t] -> t + | t0 :: t1 :: ts -> IntersectionT (super_reason, InterRep.make t0 t1 ts) + in + (* interfaces don't have statics *) + let static_proto = Type.NullProtoT static_reason in + (super, static_proto) + | Class { extends; mixins; _ } -> + let this = SMap.find_unsafe "this" tparams_with_this in + let (extends_t, static_proto) = + match extends with + | Explicit (annot_loc, c, targs) -> + (* Eagerly specialize when there are no targs *) + let c = + if targs = None then + specialize cx targs c + else + c + in + let t = this_typeapp ~annot_loc c this targs in + (* class B extends A {}; B.__proto__ === A *) + let static_proto = class_type ~annot_loc t in + (t, static_proto) + | Implicit { null } -> + let t = + if null then + NullProtoT super_reason + else + ObjProtoT super_reason + in + (* class A {}; A.__proto__ === Function.prototype *) + let static_proto = FunProtoT static_reason in + (t, static_proto) + in + let mixins_rev = + List.rev_map + (fun (annot_loc, c, targs) -> + (* Eagerly specialize when there are no targs *) + let c = + if targs = None then + specialize cx targs c + else + c + in + this_typeapp ~annot_loc c this targs) + mixins + in + let super = + match List.rev_append mixins_rev [extends_t] with + | [] -> failwith "impossible" + | [t] -> t + | t0 :: t1 :: ts -> IntersectionT (super_reason, InterRep.make t0 t1 ts) + in + (super, static_proto)) + + let thistype cx x = + let tparams_with_this = x.tparams_map in + let x = remove_this x in + let { static = { reason = sreason; _ }; instance = { reason; _ }; _ } = x in + let (super, static_proto) = supertype cx tparams_with_this x in + let implements = + match x.super with + | Interface _ -> [] + | Class { implements; _ } -> + Core_list.map + ~f:(function + | (loc, c, None) -> + let reason = repos_reason loc ~annot_loc:loc (Type.reason_of_t c) in + Flow.mk_instance cx reason c + | (annot_loc, c, Some targs) -> Type.typeapp ~annot_loc c targs) + implements in - let mixins_rev = List.rev_map (fun (annot_loc, c, targs) -> - (* Eagerly specialize when there are no targs *) - let c = if targs = None then specialize cx targs c else c in - this_typeapp ~annot_loc c this targs - ) mixins in - match List.rev_append mixins_rev [t] with - | [] -> failwith "impossible" - | [t] -> t - | t0::t1::ts -> - DefT (super_reason, IntersectionT (InterRep.make t0 t1 ts)) - -let thistype cx x = - let tparams_with_this = x.tparams_map in - let x = remove_this x in - let { - static = {reason = sreason; _}; - instance = {reason; _}; - _; - } = x in - let super = supertype cx tparams_with_this x in - let implements = match x.super with - | Interface _ -> [] - | Class {implements; _} -> - List.map (function - | loc, c, None -> - let reason = annot_reason (repos_reason loc (Type.reason_of_t c)) in - Flow.mk_instance cx reason c - | annot_loc, c, Some targs -> Type.typeapp ~annot_loc c targs - ) implements - in - let initialized_static_fields, static_objtype = statictype cx tparams_with_this x in - let insttype = insttype cx ~initialized_static_fields x in - let open Type in - let static = DefT (sreason, ObjT static_objtype) in - DefT (reason, InstanceT (static, super, implements, insttype)) - -let check_implements cx def_reason x = - match x.super with - | Interface _ -> () - | Class {implements; _} -> - let this = thistype cx x in + let (initialized_static_fields, static_objtype) = statictype cx static_proto x in + let insttype = insttype cx ~initialized_static_fields x in + Type.( + let static = DefT (sreason, bogus_trust (), ObjT static_objtype) in + DefT (reason, bogus_trust (), InstanceT (static, super, implements, insttype))) + + let check_implements cx def_reason x = + match x.super with + | Interface _ -> () + | Class { implements; _ } -> + let this = thistype cx x in + let reason = x.instance.reason in + Type.( + List.iter + (fun (loc, c, targs_opt) -> + let i = + match targs_opt with + | None -> + let reason = repos_reason loc ~annot_loc:loc (reason_of_t c) in + Flow.mk_instance cx reason c + | Some targs -> typeapp ~annot_loc:loc c targs + in + let use_op = + Op + (ClassImplementsCheck + { def = def_reason; name = reason; implements = reason_of_t i }) + in + Flow.flow cx (i, ImplementsT (use_op, this))) + implements) + + let check_super cx def_reason x = + let tparams_with_this = x.tparams_map in + let x = remove_this x in let reason = x.instance.reason in - let open Type in - List.iter (fun (loc, c, targs_opt) -> - let i = match targs_opt with - | None -> - let reason = annot_reason (repos_reason loc (reason_of_t c)) in - Flow.mk_instance cx reason c - | Some targs -> typeapp ~annot_loc:loc c targs - in - let use_op = Op (ClassImplementsCheck { - def = def_reason; - name = reason; - implements = reason_of_t i; - }) in - Flow.flow cx (i, ImplementsT (use_op, this)) - ) implements - -let check_super cx def_reason x = - let tparams_with_this = x.tparams_map in - let x = remove_this x in - let reason = x.instance.reason in - let open Type in - - (* NOTE: SuperT ignores the constructor anyway, so we don't pass it here. + Type.( + (* NOTE: SuperT ignores the constructor anyway, so we don't pass it here. Call properties are also ignored, so we ignore that result. *) - let _, own, proto, _call = elements cx ?constructor:None x.instance in - let static = - (* NOTE: The own, proto maps are disjoint by construction. *) - let _, own, proto, _call = elements cx x.static in - SMap.union own proto - in - - SMap.iter (fun x p1 -> - match SMap.get x proto with - | None -> () - | Some p2 -> - let use_op = Op (ClassOwnProtoCheck { - prop = x; - own_loc = Property.first_loc p1; - proto_loc = Property.first_loc p2; - }) in - let propref = Named (reason, x) in - Flow.flow_p cx ~use_op reason reason propref (p1, p2) - ) own; - - let super = supertype cx tparams_with_this x in - let use_op = Op (ClassExtendsCheck { - def = def_reason; - name = reason; - extends = reason_of_t super; - }) in - Flow.flow cx (super, SuperT (use_op, reason, Derived {own; proto; static})) - -(* TODO: Ideally we should check polarity for all class types, but this flag is - flipped off for interface/declare class currently. *) -let classtype cx ?(check_polarity=true) x = - let this = thistype cx x in - let { tparams; _ } = remove_this x in - let open Type in - (if check_polarity then Flow.check_polarity cx Positive this); - let t = - if structural x - then class_type ~structural:true this - else this_class_type this - in - poly_type (Context.make_nominal cx) tparams t - -(* Processes the bodies of instance and static class members. *) -let toplevels cx ~decls ~stmts ~expr x = - Env.in_lex_scope cx (fun () -> - let new_entry ?(state=Scope.State.Initialized) t = - Scope.Entry.new_let ~loc:(Type.loc_of_t t) ~state t - in - - let method_ this super ~set_asts f = - let save_return = Abnormal.clear_saved Abnormal.Return in - let save_throw = Abnormal.clear_saved Abnormal.Throw in - let asts = f |> Func_sig.generate_tests cx ( - Func_sig.toplevels None cx this super ~decls ~stmts ~expr - ) in - set_asts asts; - ignore (Abnormal.swap_saved Abnormal.Return save_return); - ignore (Abnormal.swap_saved Abnormal.Throw save_throw) - in - - let field config this super _name (_, _, value) = - match config, value with - | Options.ESPROPOSAL_IGNORE, _ -> () - | _, Annot _ -> () - | _, Infer (fsig, set_asts) -> method_ this super ~set_asts fsig - in + let (_, own, proto, _call) = elements cx ?constructor:None x.instance in + let static = + (* NOTE: The own, proto maps are disjoint by construction. *) + let (_, own, proto, _call) = elements cx x.static in + SMap.union own proto + in + SMap.iter + (fun x p1 -> + match SMap.get x proto with + | None -> () + | Some p2 -> + let use_op = + Op + (ClassOwnProtoCheck + { prop = x; own_loc = Property.first_loc p1; proto_loc = Property.first_loc p2 }) + in + let propref = Named (reason, x) in + Flow.flow_p cx ~use_op reason reason propref (p1, p2)) + own; + + let (super, _) = supertype cx tparams_with_this x in + let use_op = + Op (ClassExtendsCheck { def = def_reason; name = reason; extends = reason_of_t super }) + in + Flow.flow cx (super, SuperT (use_op, reason, Derived { own; proto; static }))) - let this = SMap.find_unsafe "this" x.tparams_map in - let static = Type.class_type this in + (* TODO: Ideally we should check polarity for all class types, but this flag is + flipped off for interface/declare class currently. *) + let classtype cx ?(check_polarity = true) x = + let this = thistype cx x in + let { tparams; _ } = remove_this x in + Type.( + if check_polarity then Flow.check_polarity cx Polarity.Positive this; + let t = + if structural x then + class_type ~structural:true this + else + this_class_type this + in + poly_type_of_tparams (Context.make_nominal cx) tparams t) - let super, static_super = - let super_reason = replace_reason (fun d -> RSuperOf d) x.instance.reason in - match x.super with - | Interface _ -> failwith "tried to evaluate toplevel of interface" - | Class {extends; _} -> - match extends with - | Explicit (annot_loc, c, targs) -> - (* Eagerly specialize when there are no targs *) - (* TODO: We can also specialize when there are targs, because this + (* Processes the bodies of instance and static class members. *) + let toplevels cx ~decls ~stmts ~expr ~private_property_map x = + Env.in_lex_scope cx (fun () -> + let new_entry ?(state = Scope.State.Initialized) t = + Scope.Entry.new_let ~loc:(Type.loc_of_t t) ~state t + in + let method_ this super ~set_asts f = + let save_return = Abnormal.clear_saved Abnormal.Return in + let save_throw = Abnormal.clear_saved Abnormal.Throw in + let asts = + f |> F.generate_tests cx (F.toplevels None cx this super ~decls ~stmts ~expr) + in + set_asts asts; + ignore (Abnormal.swap_saved Abnormal.Return save_return); + ignore (Abnormal.swap_saved Abnormal.Throw save_throw) + in + let field config this super _name (_, _, value) = + match (config, value) with + | (Options.ESPROPOSAL_IGNORE, _) -> () + | (_, Annot _) -> () + | (_, Infer (fsig, set_asts)) -> method_ this super ~set_asts fsig + in + let this = SMap.find_unsafe "this" x.tparams_map in + let static = Type.class_type this in + let (super, static_super) = + let super_reason = update_desc_reason (fun d -> RSuperOf d) x.instance.reason in + match x.super with + | Interface _ -> failwith "tried to evaluate toplevel of interface" + | Class { extends; _ } -> + (match extends with + | Explicit (annot_loc, c, targs) -> + (* Eagerly specialize when there are no targs *) + (* TODO: We can also specialize when there are targs, because this code executes within generate_tests. However, the type normalizer expects a PolyT here. *) - let c = if targs = None then specialize cx targs c else c in - let t = Type.this_typeapp ~annot_loc c this targs in - t, Type.class_type t - | Implicit {null} -> - let open Type in - (if null then NullProtoT super_reason else ObjProtoT super_reason), - FunProtoT super_reason - in - - (* Bind private fields to the environment *) - let to_prop_map = fun x -> Context.make_property_map cx (SMap.map to_field x) in - Env.bind_class cx x.id (to_prop_map x.instance.private_fields) - (to_prop_map x.static.private_fields); - - x |> with_sig ~static:true (fun s -> - (* process static methods and fields *) - let this, super = new_entry static, new_entry static_super in - iter_methods (fun (_loc, f, set_asts, _) -> method_ this super ~set_asts f) s; - let config = Context.esproposal_class_static_fields cx in - SMap.iter (field config this super) s.fields; - SMap.iter (field config this super) s.private_fields - ); - - x |> with_sig ~static:false (fun s -> - (* process constructor *) - begin - (* When in a derived constructor, leave this and super undeclared, the + let c = + if targs = None then + specialize cx targs c + else + c + in + let t = Type.this_typeapp ~annot_loc c this targs in + (t, Type.class_type ~annot_loc t) + | Implicit { null } -> + Type. + ( ( if null then + NullProtoT super_reason + else + ObjProtoT super_reason ), + FunProtoT super_reason )) + in + (* Bind private fields to the environment *) + Env.bind_class cx x.id private_property_map (to_prop_map cx x.static.private_fields); + + x + |> with_sig ~static:true (fun s -> + (* process static methods and fields *) + let (this, super) = (new_entry static, new_entry static_super) in + iter_methods (fun (_loc, f, set_asts, _) -> method_ this super ~set_asts f) s; + let config = Context.esproposal_class_static_fields cx in + SMap.iter (field config this super) s.fields; + SMap.iter (field config this super) s.private_fields); + + x + |> with_sig ~static:false (fun s -> + (* process constructor *) + begin + (* When in a derived constructor, leave this and super undeclared, the same way let-scoped variables are stored in the environment before their declaration. Once we see a super() call, the bindings are treated as declared and initialized. This protects against using `this` before it is allocated by the superclass. *) - let derived_ctor = match x.super with - | Class {extends = Explicit _; _} -> true - | _ -> false - in - let new_entry t = - if derived_ctor then - new_entry t ~state:Scope.State.Undeclared - else - new_entry t - in - let this, super = new_entry this, new_entry super in - x.constructor |> List.iter (fun (_, fsig, set_asts, _) -> - method_ this super ~set_asts fsig - ) - end; - - (* process instance methods and fields *) - begin - let this, super = new_entry this, new_entry super in - iter_methods (fun (_, msig, set_asts, _) -> method_ this super ~set_asts msig) s; - let config = Context.esproposal_class_instance_fields cx in - SMap.iter (field config this super) s.fields; - SMap.iter (field config this super) s.private_fields; - SMap.iter (field config this super) s.proto_fields; + let derived_ctor = + match x.super with + | Class { extends = Explicit _; _ } -> true + | _ -> false + in + let new_entry t = + if derived_ctor then + new_entry t ~state:Scope.State.Undeclared + else + new_entry t + in + let (this, super) = (new_entry this, new_entry super) in + x.constructor + |> List.iter (fun (_, fsig, set_asts, _) -> method_ this super ~set_asts fsig) + end; + + (* process instance methods and fields *) + let (this, super) = (new_entry this, new_entry super) in + iter_methods (fun (_, msig, set_asts, _) -> method_ this super ~set_asts msig) s; + let config = Context.esproposal_class_instance_fields cx in + SMap.iter (field config this super) s.fields; + SMap.iter (field config this super) s.private_fields; + SMap.iter (field config this super) s.proto_fields)) + + module This = struct + let is_bound_to_empty x = + Type.( + Flow.match_this_binding x.tparams_map (function + | DefT (_, _, EmptyT _) -> true + | _ -> false)) + + exception FoundInClass + + class detector = + object + inherit [ALoc.t] Flow_ast_mapper.mapper as super + + method! generic_identifier_type (git : (ALoc.t, ALoc.t) Ast.Type.Generic.Identifier.t) = + Ast.Type.Generic.Identifier.( + match git with + | Unqualified (_, { Ast.Identifier.name = "this"; comments = _ }) -> raise FoundInClass + | _ -> super#generic_identifier_type git) end - )) - -module This = struct - let is_bound_to_empty x = - let open Type in - Flow.match_this_binding x.tparams_map - (function DefT (_, EmptyT) -> true | _ -> false) - - exception FoundInClass - class detector = object - inherit Flow_ast_mapper.mapper as super - - method! generic_identifier_type (git: (Loc.t, Loc.t) Ast.Type.Generic.Identifier.t) = - let open Ast.Type.Generic.Identifier in - match git with - | Unqualified (_, "this") -> raise FoundInClass - | _ -> super#generic_identifier_type git - end - let in_class c = - try (new detector)#class_ Loc.none c |> ignore; false - with FoundInClass -> true + let in_class c = + try + (new detector)#class_ ALoc.none c |> ignore; + false + with FoundInClass -> true + end end - -let with_typeparams cx f x = - Type_table.with_typeparams x.tparams (Context.type_table cx) f diff --git a/src/typing/class_sig.mli b/src/typing/class_sig.mli index 4d67f8874d2..eb0fb2da3ec 100644 --- a/src/typing/class_sig.mli +++ b/src/typing/class_sig.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,194 +7,7 @@ (** Intermediate representation for classes and interfaces *) -type t +include module type of Class_sig_intf -type set_asts = - (Loc.t, Loc.t * Type.t) Flow_ast.Function.body option * - (Loc.t, Loc.t * Type.t) Flow_ast.Expression.t option - -> unit - -type set_type = Type.t -> unit - -and field = - | Annot of Type.t - | Infer of Func_sig.t * set_asts - -type super = - | Interface of { - extends: typeapp list; - callable: bool; - } - | Class of { - extends: extends; - mixins: typeapp list; (* declare class only *) - implements: typeapp list - } - -and extends = - | Explicit of typeapp - | Implicit of { null: bool } - -and typeapp = Loc.t * Type.t * Type.t list option - -(** 1. Constructors **) - -(** Create signature with no elements. *) -val empty: - int -> (* id *) - Reason.t -> - Type.typeparam list -> - Type.t SMap.t -> (* tparams_map *) - super -> - t - -(** Add constructor to signature. - - Overwrites any existing constructor. This implements the behavior of - classes, which permit duplicate definitions where latter definitions - overwrite former ones. *) -val add_constructor: - Loc.t option -> - Func_sig.t -> - ?set_asts:set_asts -> - ?set_type:set_type -> - t -> t - -val add_default_constructor: Reason.t -> t -> t - -(** Add constructor override to signature. - - Does not overwrite existing constructors. This implements the behavior of - interfaces, which interpret duplicate definitions as branches of a single - overloaded constructor. *) -val append_constructor: - Loc.t option -> - Func_sig.t -> - ?set_asts:set_asts -> - ?set_type:set_type -> - t -> t - -(** Add field to signature. *) -val add_field: static:bool -> string -> Loc.t -> Type.polarity -> field -> t -> t - -(** Add indexer to signature. *) -val add_indexer: - static:bool -> - Type.polarity -> - key:(Loc.t * Type.t) -> - value:(Loc.t * Type.t) -> - t -> t - -(** Add static `name` field. *) -val add_name_field: t -> t - -(** Add proto field to signature. *) -val add_proto_field: string -> Loc.t -> Type.polarity -> field -> t -> t - -(** Add private field to signature. *) -val add_private_field: string -> Loc.t -> Type.polarity -> field -> static:bool -> t -> t - -(** Add method to signature. - - Overwrites any existing synonymous method. This implements the behavior of - classes, which permit duplicate definitions where latter definitions - overwrite former ones. *) -val add_method: - static:bool -> - string -> - Loc.t -> - Func_sig.t -> - ?set_asts:set_asts -> - ?set_type:set_type -> - t -> t - -(** Add method override to signature. - - Does not overwrite existing synonymous methods. This implements the - behavior of interfaces, which interpret duplicate definitions as branches - of a single overloaded method. *) -val append_method: - static:bool -> - string -> - Loc.t -> - Func_sig.t -> - ?set_asts:set_asts -> - ?set_type:set_type -> - t -> t - -val append_call: static:bool -> Type.t -> t -> t - -val add_call_deprecated: static:bool -> Type.t -> t -> t - -(** Add getter to signature. *) -val add_getter: - static:bool -> - string -> - Loc.t -> - Func_sig.t -> - ?set_asts:set_asts -> - ?set_type:set_type -> - t -> t - -(** Add setter to signature. *) -val add_setter: - static:bool -> - string -> - Loc.t -> - Func_sig.t -> - ?set_asts:set_asts -> - ?set_type:set_type -> - t -> t - -(** Check if this signature defines a given field *) -val mem_field: string -> static:bool -> t -> bool - -(** Check if this signature defines a constructor *) -val mem_constructor: t -> bool - -val add_this: - Type.t -> (* self *) - Context.t -> - Reason.t -> - Type.typeparam list -> - Type.t SMap.t -> (* tparams_map *) - Type.t * Type.typeparam list * Type.t SMap.t - -(** 1. Manipulation *) - -(** Emits constraints to ensure the signature is compatible with its declared - interface implementations (classes) *) -val check_implements: Context.t -> Reason.reason -> t -> unit - -(** Emits constraints to ensure the signature is compatible with its declared - superclass (classes) or extends/mixins (interfaces) *) -val check_super: Context.t -> Reason.reason -> t -> unit - -(** Invoke callback with type parameters substituted by upper/lower bounds. *) -val generate_tests: Context.t -> - (t -> 'a) -> t -> 'a - -(** Evaluate the class body. *) -val toplevels: Context.t -> - decls:(Context.t -> (Loc.t, Loc.t) Flow_ast.Statement.t list -> unit) -> - stmts:(Context.t -> (Loc.t, Loc.t) Flow_ast.Statement.t list -> - (Loc.t, Loc.t * Type.t) Flow_ast.Statement.t list) -> - expr:(Context.t -> (Loc.t, Loc.t) Flow_ast.Expression.t -> - (Loc.t, Loc.t * Type.t) Flow_ast.Expression.t) -> - t -> unit - -(** 1. Type Conversion *) - -val thistype: Context.t -> t -> Type.t - -(* Create a (polymorphic) class type. *) -val classtype: Context.t -> - ?check_polarity:bool -> - t -> Type.t - -module This: sig - val is_bound_to_empty: t -> bool - val in_class: (Loc.t, Loc.t) Flow_ast.Class.t -> bool -end - -val with_typeparams: Context.t -> (unit -> 'a) -> t -> 'a +module Make (F : Func_sig.S) : + S with type func_sig = F.t and type func_params_tast = F.func_params_tast diff --git a/src/typing/class_sig_intf.ml b/src/typing/class_sig_intf.ml new file mode 100644 index 00000000000..19967b3211a --- /dev/null +++ b/src/typing/class_sig_intf.ml @@ -0,0 +1,208 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module type S = sig + type func_sig + + type func_params_tast + + type t + + type set_asts = + func_params_tast option + * (ALoc.t, ALoc.t * Type.t) Flow_ast.Function.body option + * (ALoc.t, ALoc.t * Type.t) Flow_ast.Expression.t option -> + unit + + type set_type = Type.t -> unit + + and field = + | Annot of Type.t + | Infer of func_sig * set_asts + + type field' = ALoc.t option * Polarity.t * field + + type super = + | Interface of { + inline: bool; + extends: typeapp list; + callable: bool; + } + | Class of { + extends: extends; + mixins: typeapp list; + (* declare class only *) + implements: typeapp list; + } + + and extends = + | Explicit of typeapp + | Implicit of { null: bool } + + and typeapp = ALoc.t * Type.t * Type.t list option + + (** 1. Constructors **) + + val empty : + ALoc.t -> (* id *) + Reason.t -> Type.typeparams -> Type.t SMap.t -> (* tparams_map *) + super -> t + (** Create signature with no elements. *) + + val add_constructor : + ALoc.t option -> func_sig -> ?set_asts:set_asts -> ?set_type:set_type -> t -> t + (** Add constructor to signature. + + Overwrites any existing constructor. This implements the behavior of + classes, which permit duplicate definitions where latter definitions + overwrite former ones. *) + + val add_default_constructor : Reason.t -> t -> t + + val append_constructor : + ALoc.t option -> func_sig -> ?set_asts:set_asts -> ?set_type:set_type -> t -> t + (** Add constructor override to signature. + + Does not overwrite existing constructors. This implements the behavior of + interfaces, which interpret duplicate definitions as branches of a single + overloaded constructor. *) + + val add_field : static:bool -> string -> ALoc.t -> Polarity.t -> field -> t -> t + (** Add field to signature. *) + + val add_indexer : static:bool -> Polarity.t -> key:Type.t -> value:Type.t -> t -> t + (** Add indexer to signature. *) + + val add_name_field : t -> t + (** Add static `name` field. *) + + val add_proto_field : string -> ALoc.t -> Polarity.t -> field -> t -> t + (** Add proto field to signature. *) + + val add_private_field : string -> ALoc.t -> Polarity.t -> field -> static:bool -> t -> t + (** Add private field to signature. *) + + (* Access public fields of signature *) + val public_fields_of_signature : static:bool -> t -> field' SMap.t + + (* Access private fields of signature *) + val private_fields_of_signature : static:bool -> t -> field' SMap.t + + val add_method : + static:bool -> + string -> + ALoc.t -> + func_sig -> + ?set_asts:set_asts -> + ?set_type:set_type -> + t -> + t + (** Add method to signature. + + Overwrites any existing synonymous method. This implements the behavior of + classes, which permit duplicate definitions where latter definitions + overwrite former ones. *) + + val append_method : + static:bool -> + string -> + ALoc.t -> + func_sig -> + ?set_asts:set_asts -> + ?set_type:set_type -> + t -> + t + (** Add method override to signature. + + Does not overwrite existing synonymous methods. This implements the + behavior of interfaces, which interpret duplicate definitions as branches + of a single overloaded method. *) + + val append_call : static:bool -> Type.t -> t -> t + + val add_getter : + static:bool -> + string -> + ALoc.t -> + func_sig -> + ?set_asts:set_asts -> + ?set_type:set_type -> + t -> + t + (** Add getter to signature. *) + + val add_setter : + static:bool -> + string -> + ALoc.t -> + func_sig -> + ?set_asts:set_asts -> + ?set_type:set_type -> + t -> + t + (** Add setter to signature. *) + + val mem_field : string -> static:bool -> t -> bool + (** Check if this signature defines a given field *) + + val mem_constructor : t -> bool + (** Check if this signature defines a constructor *) + + val add_this : + Type.t -> + (* self *) + Context.t -> + Reason.t -> + Type.typeparams -> + Type.t SMap.t -> + (* tparams_map *) + Type.t * Type.typeparams * Type.t SMap.t + + val to_prop_map : Context.t -> field' SMap.t -> Type.Properties.id + + (** 1. Manipulation *) + + val check_implements : Context.t -> Reason.reason -> t -> unit + (** Emits constraints to ensure the signature is compatible with its declared + interface implementations (classes) *) + + val check_super : Context.t -> Reason.reason -> t -> unit + (** Emits constraints to ensure the signature is compatible with its declared + superclass (classes) or extends/mixins (interfaces) *) + + val generate_tests : Context.t -> (t -> 'a) -> t -> 'a + (** Invoke callback with type parameters substituted by upper/lower bounds. *) + + val toplevels : + Context.t -> + decls:(Context.t -> (ALoc.t, ALoc.t) Flow_ast.Statement.t list -> unit) -> + stmts: + (Context.t -> + (ALoc.t, ALoc.t) Flow_ast.Statement.t list -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Statement.t list) -> + expr: + (Context.t -> + (ALoc.t, ALoc.t) Flow_ast.Expression.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Expression.t) -> + private_property_map:Type.Properties.id -> + t -> + unit + (** Evaluate the class body. *) + + (** 1. Type Conversion *) + + val thistype : Context.t -> t -> Type.t + + (* Create a (polymorphic) class type. *) + val classtype : Context.t -> ?check_polarity:bool -> t -> Type.t + + module This : sig + val is_bound_to_empty : t -> bool + + val in_class : (ALoc.t, ALoc.t) Flow_ast.Class.t -> bool + end +end diff --git a/src/typing/codegen.ml b/src/typing/codegen.ml deleted file mode 100644 index cf215d2b844..00000000000 --- a/src/typing/codegen.ml +++ /dev/null @@ -1,468 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -(** - * This file is a general-purpose utility for generating code. It is - * Context.t-aware, which allows it to resolve and codegen type syntax from - * types themselves. - * - * Example usage: - * - * let code_str = - * Codegen.mk_env cx - * |> Codegen.add_str "/* Before printed type */\n" - * |> Codegen.gen_type NullT.t - * |> Codegen.add_str "\n/* After printed type */\n" - * |> Codegen.to_string - * in - * print_endline code_str - *) - -let spf = Printf.sprintf - -type codegen_env = { - buf: Buffer.t; - class_names: string IMap.t; - mutable next_class_name: int; - flow_cx: Context.t; - tparams: Type.typeparam list; - applied_tparams: Type.t list; -} - -let add_applied_tparams applied_tparams env = {env with applied_tparams;} -let add_str str env = Buffer.add_string env.buf str; env -let add_tparams tparams env = {env with tparams;} -let find_props tmap_id env = Context.find_props env.flow_cx tmap_id -let has_class_name class_id env = IMap.mem class_id env.class_names -let next_class_name env = - let id = env.next_class_name in - env.next_class_name <- id + 1; - spf "Class%d" id -let resolve_type t env = Flow_js.resolve_type env.flow_cx t -let resolve_tvar tvar env = Flow_js.resolve_tvar env.flow_cx tvar -let set_class_name class_id name env = - {env with class_names = IMap.add class_id name env.class_names;} -let to_string env = Buffer.contents env.buf - -let mk_env merged_flow_cx = { - applied_tparams = []; - buf = Buffer.create 320; - class_names = IMap.empty; - flow_cx = merged_flow_cx; - next_class_name = 0; - tparams = []; -} - -(** - * Just a helper function to simplify this: - * - * let env = - * add_str "first" env - * |> add_str "second" - * |> add_str "third" - * in - * let env = - * if conditional - * then add_str "maybe fourth" env - * else env - * in - * add_str "fifth" env - * |> add_str "sixth" - * |> add_str "seventh" - * - * into this: - * - * add_str "first" env - * |> add_str "second" - * |> add_str "third" - * |> gen_if conditional (add_str "maybe fourth") - * |> add_str "fifth" - * |> add_str "sixth" - * |> add_str "seventh" - *) -let gen_if conditional gen_fn env = - if conditional then gen_fn env else env - -(** - * Given a type which must be a built-in class instance type, trace out the - * built-in's name and codegen it. - * - * NOTE: It would be good to come back to this and find a more general - * (less fragile) way of preserving class names alongside instance types. - *) -let gen_builtin_class_type t env = Type.( - (* AVERT YOUR EYES *) - let reason = reason_of_t t in - let builtin_name = DescFormat.name_of_instance_reason reason in - - (** - * Assert that the builtin name we found does match with the class_id we're - * backtracking. This is super defensive just because our method of getting - * a builtin's name is so hacky. Once we make that better, we can be less - * defensive here. - *) - let classid = - match t with - | DefT (_, InstanceT (_, _, _, {class_id; _;})) -> class_id - | t -> failwith ( - spf - ("Internal error: Expected an InstanceT while looking up a builtin " ^^ - "class name, but got a %s!") - (string_of_ctor t) - ) - in - - let builtin_t = Flow_js.get_builtin env.flow_cx builtin_name reason in - let builtin_classid = - match resolve_type builtin_t env with - | ThisClassT(_, DefT (_, InstanceT (_, _, _, {class_id; _;}))) -> - class_id - | DefT (_, PolyT(_, ThisClassT(_, DefT (_, InstanceT(_, _, _, {class_id; _;}))), _)) -> - class_id - | builtin_t -> failwith (spf "Unexpected global type: %s" (string_of_ctor builtin_t)) - in - - if builtin_classid = classid - then add_str builtin_name env - else failwith ( - "Internal error: Encountered an instance type for a class that " ^ - "has not been defined!" - ) -) - -(* Helper to generate a list of items with some separator between. *) -let gen_separated_list list sep gen_fn env = - let count = List.length list in - let (env, _) = List.fold_left (fun (env, idx) item -> - let idx = idx + 1 in - let env = gen_fn item env in - ((if idx < count then add_str sep env else env), idx) - ) (env, 0) list in - env - -(* Generate type syntax for a given type *) -let rec gen_type t env = Type.( - match t with - | AnnotT (_, source_t, _) -> gen_type (resolve_type source_t env) env - | OpaqueT (_, {underlying_t = Some t; _}) -> gen_type t env - | OpaqueT (_, {super_t = Some t; _}) -> gen_type t env - | DefT (_, AnyFunT) -> add_str "Function" env - | DefT (_, AnyObjT) -> add_str "Object" env - | DefT (_, AnyT) - | AnyWithLowerBoundT _ - | AnyWithUpperBoundT _ - | MergedT _ - -> add_str "any" env - | DefT (_, ArrT arrtype) -> - (match arrtype with - | ArrayAT (elemt, None) -> - add_str "Array<" env - |> gen_type elemt - |> add_str ">" - | ROArrayAT (elemt) -> - add_str "$ReadOnlyArray<" env - |> gen_type elemt - |> add_str ">" - | ArrayAT (_, Some tuple_types) - | TupleAT (_, tuple_types) -> - env - |> add_str "[" - |> gen_separated_list tuple_types ", " gen_type - |> add_str "]" - | EmptyAT -> - (* There isn't any real way to write this type at the moment *) - add_str "Array" env - ) - - | DefT (_, BoolT (Some _)) -> - (* TODO: Consider polarity and print the literal type when appropriate *) - add_str "boolean" env - | DefT (_, BoolT None) -> - add_str "boolean" env - | BoundT (_, name, _) -> add_str name env - | DefT (_, ClassT t) -> - add_str "Class<" env - |> gen_type t - |> add_str ">" - | DefT (_, CharSetT chars) -> - add_str "$CharSet<\"" env - |> add_str (String_utils.CharSet.to_string chars) - |> add_str "\">" - | CustomFunT (_, ObjectAssign) -> add_str "Object$Assign" env - | CustomFunT (_, ObjectGetPrototypeOf) -> add_str "Object$GetPrototypeOf" env - | CustomFunT (_, ObjectSetPrototypeOf) -> add_str "Object$SetPrototypeOf" env - | CustomFunT (_, Compose false) -> add_str "$Compose" env - | CustomFunT (_, Compose true) -> add_str "$ComposeReverse" env - | CustomFunT (_, ReactPropType (React.PropType.Primitive (_, t))) -> - add_str "React$PropType$Primitive<" env - |> gen_type t - |> add_str ">" - | CustomFunT (_, ReactPropType (React.PropType.Complex kind)) -> - add_str React.PropType.(match kind with - | ArrayOf -> "React$PropType$ArrayOf" - | InstanceOf -> "React$PropType$InstanceOf" - | ObjectOf -> "React$PropType$ObjectOf" - | OneOf -> "React$PropType$OneOf" - | OneOfType -> "React$PropType$OneOfType" - | Shape -> "React$PropType$Shape" - ) env - | CustomFunT (_, ReactCreateClass) -> add_str "React$CreateClass" env - | CustomFunT (_, ReactCreateElement) -> add_str "React$CreateElement" env - | CustomFunT (_, ReactCloneElement) -> add_str "React$CloneElement" env - | CustomFunT (_, ReactElementFactory _) -> add_str "React$ElementFactory" env - | CustomFunT (_, Idx) -> add_str "$Facebookism$Idx" env - | CustomFunT (_, TypeAssertIs) -> add_str "$Facebookism$TypeAssertIs" env - | CustomFunT (_, TypeAssertThrows) -> add_str "$Facebookism$TypeAssertThrows" env - | CustomFunT (_, TypeAssertWraps) -> add_str "$Facebookism$TypeAssertWraps" env - | CustomFunT (_, DebugPrint) -> add_str "$Flow$DebugPrint" env - | CustomFunT (_, DebugThrow) -> add_str "$Flow$DebugThrow" env - | CustomFunT (_, DebugSleep) -> add_str "$Flow$DebugSleep" env - (* TODO: Once predicate types are a little more fleshed out, fill out this - * codegen. - *) - | OpenPredT (_, _, _, _) -> add_str "mixed /* TODO: OpenPredT */" env - | ExactT (_, t) -> add_str "$Exact<" env |> gen_type t |> add_str ">" - | ObjProtoT _ -> add_str "typeof Object.prototype" env - | FunProtoT _ -> add_str "typeof Function.prototype" env - | FunProtoApplyT _ -> add_str "typeof Function.prototype.apply" env - | FunProtoBindT _ -> add_str "typeof Function.prototype.bind" env - | FunProtoCallT _ -> add_str "typeof Function.prototype.call" env - | DefT (_, FunT (_static, _prototype, ft)) -> - let {params; rest_param; return_t; _;} = ft in - gen_tparams_list env - |> add_str "(" - |> gen_func_params params rest_param - |> add_str ") => " - |> gen_type return_t - | DefT (_, InstanceT (_static, _super, _, {class_id; _;})) -> ( - (* TODO: See if we can preserve class names *) - let env = - match IMap.get class_id env.class_names with - | Some name -> add_str name env - | None -> gen_builtin_class_type t env - in - gen_tparams_list env - ) - | DefT (_, IntersectionT intersection) -> gen_intersection_list intersection env - | KeysT (_, t) -> add_str "$Keys<" env |> gen_type t |> add_str ">" - | DefT (_, MaybeT t) -> add_str "?" env |> gen_type t - | DefT (_, MixedT _) -> add_str "mixed" env - | DefT (_, NumT (Literal _)) -> - (* TODO: Consider polarity and print the literal type when appropriate *) - add_str "number" env - | DefT (_, NumT (Truthy|AnyLiteral)) -> add_str "number" env - | DefT (_, NullT) | NullProtoT _ -> add_str "null" env - | DefT (_, ObjT {flags = _; dict_t; call_t = _; props_tmap; proto_t = _;}) -> ( - let env = add_str "{" env in - - (* Generate prop entries *) - let props = find_props props_tmap env in - let props = SMap.elements props |> List.sort (fun (k1, _) (k2, _) -> - Pervasives.compare k1 k2 - ) in - let env = gen_separated_list props ", " (fun (k, p) env -> - gen_prop k p env - ) env in - - (* Generate potential dict entry *) - let env = - match dict_t with - | Some {dict_name; key; value; dict_polarity} -> - let key_name = ( - match dict_name with - | Some n -> n - | None -> "_" - ) in - let sigil = Type.Polarity.sigil dict_polarity in - let key = resolve_type key env in - let value = resolve_type value env in - gen_if (List.length props > 0) (add_str ", ") env - |> add_str sigil - |> add_str "[" - |> add_str key_name - |> add_str ": " - |> gen_type key - |> add_str "]: " - |> gen_type value - | None -> env - in - - add_str "}" env - ) - | DefT (_, OptionalT t) -> add_str "void | " env |> gen_type t - | OpenT tvar -> gen_type (resolve_tvar tvar env) env - | DefT (_, PolyT (tparams, t, _)) -> gen_type t (add_tparams tparams env) - | ReposT (_, t) -> gen_type t env - | InternalT (ReposUpperT (_, t)) -> gen_type t env - | ShapeT t -> add_str "$Shape<" env |> gen_type t |> add_str ">" - | DefT (_, SingletonBoolT v) -> add_str (spf "%b" v) env - | DefT (_, SingletonNumT (_, v)) -> add_str (spf "%s" v) env - | DefT (_, SingletonStrT v) -> add_str (spf "%S" v) env - | DefT (_, StrT (Literal _)) -> - (* TODO: Consider polarity and print the literal type when appropriate *) - add_str "string" env - | DefT (_, StrT (Truthy|AnyLiteral)) -> add_str "string" env - | ThisClassT (_, t) -> gen_type t env - | ThisTypeAppT (_, t, _, Some ts) -> add_applied_tparams ts env |> gen_type t - | ThisTypeAppT (_, t, _, None) -> gen_type t env - | DefT (_, TypeAppT (_, t, ts)) -> add_applied_tparams ts env |> gen_type t - | DefT (_, TypeT (_, t)) -> gen_type t env - | DefT (_, UnionT union) -> gen_union_list union env - | DefT (_, VoidT) -> add_str "void" env - | InternalT (OptionalChainVoidT _) -> add_str "void" env - - (** - * These types can't be expressed in code well so we fail back to `mixed`. - * - * TODO: This handling is a little low-fidelity which may not work for all - * cases. It works for current needs (best-effort codegen of shadow - * files), but at some point it might make sense to offer other kinds of - * handling for these types depening on the needs of the API user - * (i.e. raise, etc). - *) - | InternalT (ChoiceKitT _) - | TypeDestructorTriggerT _ - | DefT (_, EmptyT) - | EvalT _ - | ExistsT _ - | InternalT (ExtendsT _) - | DefT (_, IdxWrapper _) - | ModuleT _ - | OpaqueT _ - | MatchingPropT _ - -> add_str (spf "mixed /* UNEXPECTED TYPE: %s */" (string_of_ctor t)) env -) - -and gen_prop k p env = - let open Type in - - let gen_getter k t env = - add_str "get " env - |> add_str k - |> add_str "(): " - |> gen_type t - in - - let gen_setter k t env = - add_str "set " env - |> add_str k - |> add_str "(" - |> gen_func_param (Some "value") t - |> add_str "): void" - in - - let rec gen_method k t env = - match t with - | DefT (_, FunT (_static, _prototype, ft)) -> - let {params; rest_param; return_t; _;} = ft in - add_str k env - |> gen_tparams_list - |> add_str "(" - |> gen_func_params params rest_param - |> add_str "): " - |> gen_type return_t - | DefT (_, PolyT (tparams, t, _)) -> gen_method k t (add_tparams tparams env) - | _ -> add_str (spf "mixed /* UNEXPECTED TYPE: %s */" (string_of_ctor t)) env - in - - match p with - | Field (_, t, polarity) -> - let sigil = Polarity.sigil polarity in - let (sep, t) = - match resolve_type t env with - | DefT (_, OptionalT t) -> ("?: ", resolve_type t env) - | t -> (": ", t) - in - add_str sigil env - |> add_str k - |> add_str sep - |> gen_type t - | Get (_, t) -> gen_getter k t env - | Set (_, t) -> gen_setter k t env - | GetSet (_, t1, _, t2) -> - gen_getter k t1 env |> gen_setter k t2 - | Method (_, t) -> gen_method k t env - -and gen_func_params params rest_param env = - let params_rev = List.fold_left (fun acc (name, t) -> - (name, t, false) :: acc - ) [] params in - let params_rev = match rest_param with - | None -> params_rev - | Some (name, _, t) -> (name, t, true) :: params_rev - in - let params = List.rev params_rev in - gen_separated_list params ", " (fun (name, t, is_rest) env -> - if is_rest - then gen_func_rest_param name t env - else gen_func_param name t env - ) env - -and gen_func_rest_param name t env = - let name = Option.value name ~default:"_" in - add_str "..." env - |> add_str name - |> add_str ": " - |> gen_type t - -and gen_func_param name t env = - let open Type in - let name = Option.value name ~default:"_" in - match t with - | DefT (_, OptionalT t) -> - add_str name env - |> add_str "?: " - |> gen_type t - | t -> - add_str name env - |> add_str ": " - |> gen_type t - -and gen_intersection_list intersection env = - let members = Type.InterRep.members intersection in - gen_separated_list members " & " gen_type env - -and gen_tparams_list = Type.( - let gen_tparam {reason = _; name; bound; polarity; default;} env = - let bound = resolve_type bound env in - let env = add_str (Polarity.sigil polarity) env in - let env = add_str name env in - let env = ( - match bound with - | DefT (_, MixedT _) -> env - | bound -> add_str ": " env |> gen_type bound - ) in - let env = ( - match default with - | Some default -> add_str " = " env |> gen_type default - | None -> env - ) in - env - in - - fun env -> - let tparams = env.tparams in - let params_count = List.length tparams in - let applied_tparams = env.applied_tparams in - let applied_tparams_count = List.length applied_tparams in - match (params_count, applied_tparams_count) with - | (0, 0) -> env - | (_, 0) -> - {env with tparams = []; } - |> add_str "<" - |> gen_separated_list tparams ", " gen_tparam - |> add_str ">" - | _ -> - {env with tparams = []; applied_tparams = []; } - |> add_str "<" - |> gen_separated_list applied_tparams ", " gen_type - |> add_str ">" -) - -and gen_union_list union env = - let members = Type.UnionRep.members union in - gen_separated_list members " | " gen_type env diff --git a/src/typing/constraint.ml b/src/typing/constraint.ml index 1155f528289..d48c122d932 100644 --- a/src/typing/constraint.ml +++ b/src/typing/constraint.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -23,8 +23,8 @@ type ident = int root structure (see below). **) type node = -| Goto of ident -| Root of root + | Goto of ident + | Root of root (** A root structure carries the actual non-trivial state of a tvar, and consists of: @@ -57,9 +57,16 @@ and root = { **) and constraints = -| Resolved of Type.t -| Unresolved of bounds + | Resolved of Type.use_op * Type.t + | FullyResolved of Type.use_op * Type.t + | Unresolved of bounds +and bounds = { + mutable lower: (Trace.t * Type.use_op) TypeMap.t; + mutable upper: Trace.t UseTypeMap.t; + mutable lowertvars: Trace.t IMap.t; + mutable uppertvars: Trace.t IMap.t; +} (** The bounds structure carries the evolving constraints on the solution of an unresolved tvar. @@ -77,28 +84,13 @@ and constraints = The use_op in the lower TypeMap represents the use_op when a lower bound was added. **) -and bounds = { - mutable lower: (Trace.t * Type.use_op) TypeMap.t; - mutable upper: Trace.t UseTypeMap.t; - mutable lowertvars: Trace.t IMap.t; - mutable uppertvars: Trace.t IMap.t; -} - -let new_bounds () = { - lower = TypeMap.empty; - upper = UseTypeMap.empty; - lowertvars = IMap.empty; - uppertvars = IMap.empty; -} - -let new_unresolved_root () = - Root { rank = 0; constraints = Unresolved (new_bounds ()) } -let copy_bounds = function - | { lower; upper; lowertvars; uppertvars; } -> - { lower; upper; lowertvars; uppertvars; } +let new_bounds () = + { + lower = TypeMap.empty; + upper = UseTypeMap.empty; + lowertvars = IMap.empty; + uppertvars = IMap.empty; + } -let copy_node node = match node with - | Root { rank; constraints = Unresolved bounds } -> - Root { rank; constraints = Unresolved (copy_bounds bounds) } - | _ -> node +let new_unresolved_root () = Root { rank = 0; constraints = Unresolved (new_bounds ()) } diff --git a/src/typing/constraint.mli b/src/typing/constraint.mli index 037579062c7..0aa8ee18e89 100644 --- a/src/typing/constraint.mli +++ b/src/typing/constraint.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -10,8 +10,8 @@ type ident = int (***************************************) type node = -| Goto of ident -| Root of root + | Goto of ident + | Root of root and root = { rank: int; @@ -19,8 +19,9 @@ and root = { } and constraints = -| Resolved of Type.t -| Unresolved of bounds + | Resolved of Type.use_op * Type.t + | FullyResolved of Type.use_op * Type.t + | Unresolved of bounds and bounds = { mutable lower: (Trace.t * Type.use_op) Type.TypeMap.t; @@ -29,6 +30,4 @@ and bounds = { mutable uppertvars: Trace.t IMap.t; } -val new_unresolved_root: unit -> node - -val copy_node: node -> node +val new_unresolved_root : unit -> node diff --git a/src/typing/context.ml b/src/typing/context.ml index e16545fca09..d6473a3e888 100644 --- a/src/typing/context.ml +++ b/src/typing/context.ml @@ -1,17 +1,23 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module LocMap = Utils_js.LocMap +module ALocMap = Loc_collections.ALocMap +module Scope_api = Scope_api.With_ALoc exception Props_not_found of Type.Properties.id + exception Call_not_found of int + exception Exports_not_found of Type.Exports.id + exception Require_not_found of string + exception Module_not_found of string + exception Tvar_not_found of Constraint.ident type env = Scope.t list @@ -22,13 +28,14 @@ type metadata = { munge_underscores: bool; verbose: Verbose.t option; weak: bool; + include_suppressions: bool; jsx: Options.jsx_mode; strict: bool; strict_local: bool; - (* global *) max_literal_length: int; enable_const_params: bool; + enable_enums: bool; enforce_strict_call_arity: bool; esproposal_class_static_fields: Options.esproposal_feature_mode; esproposal_class_instance_fields: Options.esproposal_feature_mode; @@ -36,64 +43,69 @@ type metadata = { esproposal_export_star_as: Options.esproposal_feature_mode; esproposal_optional_chaining: Options.esproposal_feature_mode; esproposal_nullish_coalescing: Options.esproposal_feature_mode; + exact_by_default: bool; + facebook_fbs: string option; facebook_fbt: string option; + haste_module_ref_prefix: string option; ignore_non_literal_requires: bool; max_trace_depth: int; + recursion_limit: int; root: Path.t; strip_root: bool; suppress_comments: Str.regexp list; suppress_types: SSet.t; max_workers: int; + default_lib_dir: Path.t option; + trust_mode: Options.trust_mode; + type_asserts: bool; } -type module_kind = - | CommonJSModule of Loc.t option - | ESModule - type test_prop_hit_or_miss = | Hit | Miss of string option * (Reason.t * Reason.t) * Type.use_op -type type_assert_kind = Is | Throws | Wraps +type type_assert_kind = + | Is + | Throws + | Wraps + +type voidable_check = { + public_property_map: Type.Properties.id; + private_property_map: Type.Properties.id; + errors: ALoc.t Property_assignment.errors; +} type sig_t = { (* map from tvar ids to nodes (type info structures) *) mutable graph: Constraint.node IMap.t; - + (* map from tvar ids to trust nodes *) + mutable trust_graph: Trust_constraint.node IMap.t; (* obj types point to mutable property maps *) mutable property_maps: Type.Properties.map; - (* indirection to support context opt *) mutable call_props: Type.t IMap.t; - (* modules point to mutable export maps *) mutable export_maps: Type.Exports.map; - (* map from evaluation ids to types *) mutable evaluated: Type.t IMap.t; - (* graph tracking full resolution of types *) mutable type_graph: Graph_explorer.graph; - (* map of speculation ids to sets of unresolved tvars *) mutable all_unresolved: ISet.t IMap.t; - (* map from frame ids to env snapshots *) mutable envs: env IMap.t; - (* map from module names to their types *) mutable module_map: Type.t SMap.t; - + (* We track nominal ids in the context to help decide when the types exported by a module have + meaningfully changed: see Merge_js.ContextOptimizer. **) + mutable nominal_ids: ISet.t; (* map from TypeAssert assertion locations to the type being asserted *) - mutable type_asserts: (type_assert_kind * Loc.t) LocMap.t; - - mutable errors: Errors.ErrorSet.t; - + mutable type_asserts_map: (type_assert_kind * ALoc.t) ALocMap.t; + mutable errors: Flow_error.ErrorSet.t; mutable error_suppressions: Error_suppressions.t; mutable severity_cover: ExactCover.lint_severity_cover Utils_js.FilenameMap.t; - (* map from exists proposition locations to the types of values running through them *) - mutable exists_checks: ExistsCheck.t LocMap.t; + mutable exists_checks: ExistsCheck.t ALocMap.t; (* map from exists proposition locations to the types of excuses for them *) (* If a variable appears in something like `x || ''`, the existence check * is excused and not considered sketchy. (The program behaves identically to how it would @@ -101,309 +113,436 @@ type sig_t = { * common pattern. Excusing it eliminates a lot of noise from the lint rule. *) (* The above example assumes that x is a string. If it were a different type * it wouldn't be excused. *) - mutable exists_excuses: ExistsCheck.t LocMap.t; - + mutable exists_excuses: ExistsCheck.t ALocMap.t; + (* For the definite instance property assignment analysis, we should only + * emit errors for a given property if VoidT flows to the type of that + * property. Ideally, we would create a VoidT ~> property type flow when we + * perform the analysis. The problem is that doing that causes the type + * inference behavior to depend on lint settings which can lead to some weird + * behavior, such as extra errors even when the lint is off. The solution is + * to collect all of potential errors that we would have created a flow for + * in the context and deal with them post-merge. At this point, the tvars of + * nearly all properties will have a concrete type that we can safely pattern + * match on without affecting other constraints. For the unresolved tvars, we + * conservatively emit errors. + *) + mutable voidable_checks: voidable_check list; mutable test_prop_hits_and_misses: test_prop_hit_or_miss IMap.t; - - mutable optional_chains_useful: (Reason.t * bool) LocMap.t; - - mutable invariants_useful: (Reason.t * bool) LocMap.t; + mutable optional_chains_useful: (Reason.t * bool) ALocMap.t; + mutable invariants_useful: (Reason.t * bool) ALocMap.t; } +type phase = + | Checking + | Merging + type t = { sig_cx: sig_t; - file: File_key.t; - module_ref: string; + phase: phase; + (* Tables for the current component (cycle) *) + aloc_tables: ALoc.table Lazy.t Utils_js.FilenameMap.t; + (* Reverse lookup table for the current file. Unlike the aloc_tables, we only + store the rev_table for the leader file, rather than the whole component. + We only need this table during the check phase, when we are checking single + files, so storing the rev table for the whole component would be a waste + of space/ *) + rev_aloc_table: ALoc.reverse_table Lazy.t; metadata: metadata; - - mutable module_kind: module_kind; - - mutable import_stmts: (Loc.t, Loc.t) Flow_ast.Statement.ImportDeclaration.t list; + module_info: Module_info.t; + mutable import_stmts: (ALoc.t, ALoc.t) Flow_ast.Statement.ImportDeclaration.t list; mutable imported_ts: Type.t SMap.t; - - (* set of "nominal" ids (created by Flow_js.mk_nominal_id) *) - (** Nominal ids are used to identify classes and to check nominal subtyping - between classes. They are different from other "structural" ids, used to - identify type variables and property maps, where subtyping cares about the - underlying types rather than the ids themselves. We track nominal ids in - the context to help decide when the types exported by a module have - meaningfully changed: see Merge_js.ContextOptimizer. **) - mutable nominal_ids: ISet.t; - - mutable require_map: Type.t LocMap.t; - - type_table: Type_table.t; - annot_table: (Loc.t, Type.t) Hashtbl.t; - refs_table: (Loc.t, Loc.t) Hashtbl.t; - - mutable declare_module_ref: string option; - - mutable use_def : Scope_api.info * Ssa_api.values; + mutable require_map: Type.t ALocMap.t; + trust_constructor: unit -> Trust.trust_rep; + mutable declare_module_ref: Module_info.t option; + mutable use_def: Scope_api.info * Ssa_api.With_ALoc.values; } -let metadata_of_options options = { - (* local *) - checked = Options.all options; - munge_underscores = Options.should_munge_underscores options; - verbose = Options.verbose options; - weak = Options.weak_by_default options; - jsx = Options.Jsx_react; - strict = false; - strict_local = false; - - (* global *) - max_literal_length = Options.max_literal_length options; - enable_const_params = Options.enable_const_params options; - enforce_strict_call_arity = Options.enforce_strict_call_arity options; - esproposal_class_instance_fields = Options.esproposal_class_instance_fields options; - esproposal_class_static_fields = Options.esproposal_class_static_fields options; - esproposal_decorators = Options.esproposal_decorators options; - esproposal_export_star_as = Options.esproposal_export_star_as options; - esproposal_optional_chaining = Options.esproposal_optional_chaining options; - esproposal_nullish_coalescing = Options.esproposal_nullish_coalescing options; - facebook_fbt = Options.facebook_fbt options; - ignore_non_literal_requires = Options.should_ignore_non_literal_requires options; - max_trace_depth = Options.max_trace_depth options; - max_workers = Options.max_workers options; - root = Options.root options; - strip_root = Options.should_strip_root options; - suppress_comments = Options.suppress_comments options; - suppress_types = Options.suppress_types options; -} - -let empty_use_def = Scope_api.{ max_distinct = 0; scopes = IMap.empty }, LocMap.empty - -let make_sig () = { - graph = IMap.empty; - property_maps = Type.Properties.Map.empty; - call_props = IMap.empty; - export_maps = Type.Exports.Map.empty; - evaluated = IMap.empty; - type_graph = Graph_explorer.new_graph ISet.empty; - all_unresolved = IMap.empty; - envs = IMap.empty; - module_map = SMap.empty; - type_asserts = LocMap.empty; - errors = Errors.ErrorSet.empty; - error_suppressions = Error_suppressions.empty; - severity_cover = Utils_js.FilenameMap.empty; - exists_checks = LocMap.empty; - exists_excuses = LocMap.empty; - test_prop_hits_and_misses = IMap.empty; - optional_chains_useful = LocMap.empty; - invariants_useful = LocMap.empty; -} +let metadata_of_options options = + { + (* local *) + checked = Options.all options; + munge_underscores = Options.should_munge_underscores options; + verbose = Options.verbose options; + weak = Options.weak_by_default options; + include_suppressions = Options.include_suppressions options; + jsx = Options.Jsx_react; + strict = false; + strict_local = false; + (* global *) + max_literal_length = Options.max_literal_length options; + enable_const_params = Options.enable_const_params options; + enable_enums = Options.enums options; + enforce_strict_call_arity = Options.enforce_strict_call_arity options; + esproposal_class_instance_fields = Options.esproposal_class_instance_fields options; + esproposal_class_static_fields = Options.esproposal_class_static_fields options; + esproposal_decorators = Options.esproposal_decorators options; + esproposal_export_star_as = Options.esproposal_export_star_as options; + esproposal_optional_chaining = Options.esproposal_optional_chaining options; + esproposal_nullish_coalescing = Options.esproposal_nullish_coalescing options; + exact_by_default = Options.exact_by_default options; + facebook_fbs = Options.facebook_fbs options; + facebook_fbt = Options.facebook_fbt options; + haste_module_ref_prefix = Options.haste_module_ref_prefix options; + ignore_non_literal_requires = Options.should_ignore_non_literal_requires options; + max_trace_depth = Options.max_trace_depth options; + max_workers = Options.max_workers options; + recursion_limit = Options.recursion_limit options; + root = Options.root options; + strip_root = Options.should_strip_root options; + suppress_comments = Options.suppress_comments options; + suppress_types = Options.suppress_types options; + default_lib_dir = (Options.file_options options).Files.default_lib_dir; + trust_mode = Options.trust_mode options; + type_asserts = Options.type_asserts options; + } + +let empty_use_def = (Scope_api.{ max_distinct = 0; scopes = IMap.empty }, ALocMap.empty) + +let make_sig () = + { + graph = IMap.empty; + trust_graph = IMap.empty; + property_maps = Type.Properties.Map.empty; + call_props = IMap.empty; + export_maps = Type.Exports.Map.empty; + evaluated = IMap.empty; + type_graph = Graph_explorer.new_graph (); + all_unresolved = IMap.empty; + envs = IMap.empty; + module_map = SMap.empty; + nominal_ids = ISet.empty; + type_asserts_map = ALocMap.empty; + errors = Flow_error.ErrorSet.empty; + error_suppressions = Error_suppressions.empty; + severity_cover = Utils_js.FilenameMap.empty; + exists_checks = ALocMap.empty; + exists_excuses = ALocMap.empty; + voidable_checks = []; + test_prop_hits_and_misses = IMap.empty; + optional_chains_useful = ALocMap.empty; + invariants_useful = ALocMap.empty; + } (* create a new context structure. Flow_js.fresh_context prepares for actual use. *) -let make sig_cx metadata file module_ref = { - sig_cx; - - file; - module_ref; - metadata; - - module_kind = CommonJSModule(None); - - import_stmts = []; - imported_ts = SMap.empty; +let make sig_cx metadata file aloc_tables rev_aloc_table module_ref phase = + { + sig_cx; + file; + phase; + aloc_tables; + rev_aloc_table; + metadata; + module_info = Module_info.empty_cjs_module module_ref; + import_stmts = []; + imported_ts = SMap.empty; + require_map = ALocMap.empty; + trust_constructor = Trust.literal_trust; + declare_module_ref = None; + use_def = empty_use_def; + } - nominal_ids = ISet.empty; - - require_map = LocMap.empty; - - type_table = Type_table.create (); - annot_table = Hashtbl.create 0; - refs_table = Hashtbl.create 0; +let sig_cx cx = cx.sig_cx - declare_module_ref = None; +let graph_sig sig_cx = sig_cx.graph - use_def = empty_use_def; -} +let trust_graph_sig sig_cx = sig_cx.trust_graph -let sig_cx cx = cx.sig_cx -let graph_sig sig_cx = sig_cx.graph let find_module_sig sig_cx m = - try SMap.find_unsafe m sig_cx.module_map - with Not_found -> raise (Module_not_found m) + (try SMap.find_unsafe m sig_cx.module_map with Not_found -> raise (Module_not_found m)) + +(* modules *) -let push_declare_module cx module_ref = +let push_declare_module cx info = match cx.declare_module_ref with | Some _ -> failwith "declare module must be one level deep" - | None -> cx.declare_module_ref <- Some module_ref + | None -> cx.declare_module_ref <- Some info let pop_declare_module cx = match cx.declare_module_ref with | None -> failwith "pop empty declare module" | Some _ -> cx.declare_module_ref <- None +let module_info cx = + match cx.declare_module_ref with + | Some info -> info + | None -> cx.module_info + +let module_kind cx = + let info = module_info cx in + info.Module_info.kind + +let module_ref cx = + let info = module_info cx in + info.Module_info.ref + (* accessors *) +let current_phase cx = cx.phase + let all_unresolved cx = cx.sig_cx.all_unresolved -let annot_table cx = cx.annot_table + let envs cx = cx.sig_cx.envs + +let trust_constructor cx = cx.trust_constructor + +let cx_with_trust cx trust = { cx with trust_constructor = trust } + +let metadata cx = cx.metadata + let max_literal_length cx = cx.metadata.max_literal_length + let enable_const_params cx = cx.metadata.enable_const_params || cx.metadata.strict || cx.metadata.strict_local + +let enable_enums cx = cx.metadata.enable_enums + let enforce_strict_call_arity cx = cx.metadata.enforce_strict_call_arity + let errors cx = cx.sig_cx.errors + let error_suppressions cx = cx.sig_cx.error_suppressions + let esproposal_class_static_fields cx = cx.metadata.esproposal_class_static_fields + let esproposal_class_instance_fields cx = cx.metadata.esproposal_class_instance_fields + let esproposal_decorators cx = cx.metadata.esproposal_decorators + let esproposal_export_star_as cx = cx.metadata.esproposal_export_star_as + let esproposal_optional_chaining cx = cx.metadata.esproposal_optional_chaining + let esproposal_nullish_coalescing cx = cx.metadata.esproposal_nullish_coalescing + let evaluated cx = cx.sig_cx.evaluated + +let exact_by_default cx = cx.metadata.exact_by_default + let file cx = cx.file + +let aloc_tables cx = cx.aloc_tables + let find_props cx id = try Type.Properties.Map.find_unsafe id cx.sig_cx.property_maps with Not_found -> raise (Props_not_found id) + let find_call cx id = - try IMap.find_unsafe id cx.sig_cx.call_props - with Not_found -> raise (Call_not_found id) + (try IMap.find_unsafe id cx.sig_cx.call_props with Not_found -> raise (Call_not_found id)) + let find_exports cx id = try Type.Exports.Map.find_unsafe id cx.sig_cx.export_maps with Not_found -> raise (Exports_not_found id) + let find_require cx loc = - try LocMap.find_unsafe loc cx.require_map - with Not_found -> raise (Require_not_found (Loc.to_string ~include_source:true loc)) + try ALocMap.find_unsafe loc cx.require_map + with Not_found -> raise (Require_not_found (ALoc.debug_to_string ~include_source:true loc)) + let find_module cx m = find_module_sig (sig_cx cx) m + let find_tvar cx id = - try IMap.find_unsafe id cx.sig_cx.graph - with Not_found -> raise (Tvar_not_found id) -let mem_nominal_id cx id = ISet.mem id cx.nominal_ids + (try IMap.find_unsafe id cx.sig_cx.graph with Not_found -> raise (Tvar_not_found id)) + +let mem_nominal_id cx id = ISet.mem id cx.sig_cx.nominal_ids + let graph cx = graph_sig cx.sig_cx + +let trust_graph cx = trust_graph_sig cx.sig_cx + let import_stmts cx = cx.import_stmts + let imported_ts cx = cx.imported_ts + let is_checked cx = cx.metadata.checked + let is_verbose cx = cx.metadata.verbose <> None + let is_weak cx = cx.metadata.weak -let is_strict cx = (Option.is_some cx.declare_module_ref) || cx.metadata.strict + +let is_strict cx = Option.is_some cx.declare_module_ref || cx.metadata.strict + let is_strict_local cx = cx.metadata.strict_local + +let include_suppressions cx = cx.metadata.include_suppressions + let severity_cover cx = cx.sig_cx.severity_cover + let max_trace_depth cx = cx.metadata.max_trace_depth -let module_kind cx = cx.module_kind + let require_map cx = cx.require_map + let module_map cx = cx.sig_cx.module_map -let module_ref cx = - match cx.declare_module_ref with - | Some module_ref -> module_ref - | None -> cx.module_ref + let property_maps cx = cx.sig_cx.property_maps + let call_props cx = cx.sig_cx.call_props -let refs_table cx = cx.refs_table + let export_maps cx = cx.sig_cx.export_maps + +let recursion_limit cx = cx.metadata.recursion_limit + let root cx = cx.metadata.root + +let facebook_fbs cx = cx.metadata.facebook_fbs + let facebook_fbt cx = cx.metadata.facebook_fbt + +let haste_module_ref_prefix cx = cx.metadata.haste_module_ref_prefix + let should_ignore_non_literal_requires cx = cx.metadata.ignore_non_literal_requires -let should_munge_underscores cx = cx.metadata.munge_underscores + +let should_munge_underscores cx = cx.metadata.munge_underscores + let should_strip_root cx = cx.metadata.strip_root + let suppress_comments cx = cx.metadata.suppress_comments + let suppress_types cx = cx.metadata.suppress_types -let type_asserts cx = cx.sig_cx.type_asserts +let default_lib_dir cx = cx.metadata.default_lib_dir + +let type_asserts_map cx = cx.sig_cx.type_asserts_map + let type_graph cx = cx.sig_cx.type_graph -let type_table cx = cx.type_table + +let trust_mode cx = cx.metadata.trust_mode + +let type_asserts cx = cx.metadata.type_asserts + let verbose cx = cx.metadata.verbose + let max_workers cx = cx.metadata.max_workers + let jsx cx = cx.metadata.jsx + let exists_checks cx = cx.sig_cx.exists_checks + let exists_excuses cx = cx.sig_cx.exists_excuses + +let voidable_checks cx = cx.sig_cx.voidable_checks + let use_def cx = cx.use_def -let pid_prefix (cx: t) = - if max_workers cx > 0 - then Printf.sprintf "[%d] " (Unix.getpid ()) - else "" - -let copy_of_context cx = { - cx with - sig_cx = { - cx.sig_cx with - graph = IMap.map Constraint.copy_node cx.sig_cx.graph; - property_maps = cx.sig_cx.property_maps; - call_props = cx.sig_cx.call_props; - }; - type_table = Type_table.copy cx.type_table; -} +let trust_tracking cx = + match cx.metadata.trust_mode with + | Options.CheckTrust + | Options.SilentTrust -> + true + | Options.NoTrust -> false + +let trust_errors cx = + match cx.metadata.trust_mode with + | Options.CheckTrust -> true + | Options.SilentTrust + | Options.NoTrust -> + false + +let pid_prefix (cx : t) = + if max_workers cx > 0 then + Printf.sprintf "[%d] " (Unix.getpid ()) + else + "" + +(* Create a shallow copy of this context, so that mutations to the sig_cx's + * fields will not affect the copy. *) +let copy_of_context cx = + { + cx with + sig_cx = { cx.sig_cx with graph = cx.sig_cx.graph; trust_graph = cx.sig_cx.trust_graph }; + } (* mutators *) -let add_env cx frame env = - cx.sig_cx.envs <- IMap.add frame env cx.sig_cx.envs -let add_error cx error = - cx.sig_cx.errors <- Errors.ErrorSet.add error cx.sig_cx.errors +let add_env cx frame env = cx.sig_cx.envs <- IMap.add frame env cx.sig_cx.envs + +let add_error cx error = cx.sig_cx.errors <- Flow_error.ErrorSet.add error cx.sig_cx.errors + let add_error_suppression cx loc = - cx.sig_cx.error_suppressions <- - Error_suppressions.add loc cx.sig_cx.error_suppressions + cx.sig_cx.error_suppressions <- Error_suppressions.add loc cx.sig_cx.error_suppressions + let add_severity_cover cx filekey severity_cover = - cx.sig_cx.severity_cover <- Utils_js.FilenameMap.add filekey severity_cover cx.sig_cx.severity_cover -let add_lint_suppressions cx suppressions = cx.sig_cx.error_suppressions <- - Error_suppressions.add_lint_suppressions suppressions cx.sig_cx.error_suppressions -let add_import_stmt cx stmt = - cx.import_stmts <- stmt::cx.import_stmts -let add_imported_t cx name t = - cx.imported_ts <- SMap.add name t cx.imported_ts -let add_require cx loc tvar = - cx.require_map <- LocMap.add loc tvar cx.require_map -let add_module cx name tvar = - cx.sig_cx.module_map <- SMap.add name tvar cx.sig_cx.module_map + cx.sig_cx.severity_cover <- + Utils_js.FilenameMap.add filekey severity_cover cx.sig_cx.severity_cover + +let add_lint_suppressions cx suppressions = + cx.sig_cx.error_suppressions <- + Error_suppressions.add_lint_suppressions suppressions cx.sig_cx.error_suppressions + +let add_import_stmt cx stmt = cx.import_stmts <- stmt :: cx.import_stmts + +let add_imported_t cx name t = cx.imported_ts <- SMap.add name t cx.imported_ts + +let add_require cx loc tvar = cx.require_map <- ALocMap.add loc tvar cx.require_map + +let add_module cx name tvar = cx.sig_cx.module_map <- SMap.add name tvar cx.sig_cx.module_map + let add_property_map cx id pmap = cx.sig_cx.property_maps <- Type.Properties.Map.add id pmap cx.sig_cx.property_maps -let add_call_prop cx id t = - cx.sig_cx.call_props <- IMap.add id t cx.sig_cx.call_props + +let add_call_prop cx id t = cx.sig_cx.call_props <- IMap.add id t cx.sig_cx.call_props + let add_export_map cx id tmap = cx.sig_cx.export_maps <- Type.Exports.Map.add id tmap cx.sig_cx.export_maps -let add_tvar cx id bounds = - cx.sig_cx.graph <- IMap.add id bounds cx.sig_cx.graph -let add_nominal_id cx id = - cx.nominal_ids <- ISet.add id cx.nominal_ids + +let add_tvar cx id bounds = cx.sig_cx.graph <- IMap.add id bounds cx.sig_cx.graph + +let add_trust_var cx id bounds = cx.sig_cx.trust_graph <- IMap.add id bounds cx.sig_cx.trust_graph + +let add_nominal_id cx id = cx.sig_cx.nominal_ids <- ISet.add id cx.sig_cx.nominal_ids + let add_type_assert cx k v = - cx.sig_cx.type_asserts <- LocMap.add k v cx.sig_cx.type_asserts -let remove_all_errors cx = - cx.sig_cx.errors <- Errors.ErrorSet.empty -let remove_all_error_suppressions cx = - cx.sig_cx.error_suppressions <- Error_suppressions.empty -let remove_all_lint_severities cx = - cx.sig_cx.severity_cover <- Utils_js.FilenameMap.empty -let remove_tvar cx id = - cx.sig_cx.graph <- IMap.remove id cx.sig_cx.graph -let set_all_unresolved cx all_unresolved = - cx.sig_cx.all_unresolved <- all_unresolved -let set_envs cx envs = - cx.sig_cx.envs <- envs -let set_evaluated cx evaluated = - cx.sig_cx.evaluated <- evaluated -let set_graph cx graph = - cx.sig_cx.graph <- graph -let set_module_kind cx module_kind = - cx.module_kind <- module_kind -let set_property_maps cx property_maps = - cx.sig_cx.property_maps <- property_maps -let set_call_props cx call_props = - cx.sig_cx.call_props <- call_props -let set_export_maps cx export_maps = - cx.sig_cx.export_maps <- export_maps -let set_type_graph cx type_graph = - cx.sig_cx.type_graph <- type_graph -let set_exists_checks cx exists_checks = - cx.sig_cx.exists_checks <- exists_checks -let set_exists_excuses cx exists_excuses = - cx.sig_cx.exists_excuses <- exists_excuses -let set_use_def cx use_def = - cx.use_def <- use_def -let set_module_map cx module_map = - cx.sig_cx.module_map <- module_map + cx.sig_cx.type_asserts_map <- ALocMap.add k v cx.sig_cx.type_asserts_map + +let add_voidable_check cx voidable_check = + cx.sig_cx.voidable_checks <- voidable_check :: cx.sig_cx.voidable_checks + +let remove_all_errors cx = cx.sig_cx.errors <- Flow_error.ErrorSet.empty + +let remove_all_error_suppressions cx = cx.sig_cx.error_suppressions <- Error_suppressions.empty + +let remove_all_lint_severities cx = cx.sig_cx.severity_cover <- Utils_js.FilenameMap.empty + +let remove_tvar cx id = cx.sig_cx.graph <- IMap.remove id cx.sig_cx.graph + +let set_all_unresolved cx all_unresolved = cx.sig_cx.all_unresolved <- all_unresolved + +let set_envs cx envs = cx.sig_cx.envs <- envs + +let set_evaluated cx evaluated = cx.sig_cx.evaluated <- evaluated + +let set_graph cx graph = cx.sig_cx.graph <- graph + +let set_trust_graph cx trust_graph = cx.sig_cx.trust_graph <- trust_graph + +let set_property_maps cx property_maps = cx.sig_cx.property_maps <- property_maps + +let set_call_props cx call_props = cx.sig_cx.call_props <- call_props + +let set_export_maps cx export_maps = cx.sig_cx.export_maps <- export_maps + +let set_type_graph cx type_graph = cx.sig_cx.type_graph <- type_graph + +let set_exists_checks cx exists_checks = cx.sig_cx.exists_checks <- exists_checks + +let set_exists_excuses cx exists_excuses = cx.sig_cx.exists_excuses <- exists_excuses + +let set_use_def cx use_def = cx.use_def <- use_def + +let set_module_map cx module_map = cx.sig_cx.module_map <- module_map let clear_intermediates cx = cx.sig_cx.envs <- IMap.empty; cx.sig_cx.all_unresolved <- IMap.empty; - cx.sig_cx.exists_checks <- LocMap.empty; - cx.sig_cx.exists_excuses <- LocMap.empty; + cx.sig_cx.nominal_ids <- ISet.empty; + cx.sig_cx.type_graph <- Graph_explorer.Tbl.create 0; + + (* still 176 bytes :/ *) + cx.sig_cx.exists_checks <- ALocMap.empty; + cx.sig_cx.exists_excuses <- ALocMap.empty; + cx.sig_cx.voidable_checks <- []; cx.sig_cx.test_prop_hits_and_misses <- IMap.empty; - cx.sig_cx.optional_chains_useful <- LocMap.empty; - cx.sig_cx.invariants_useful <- LocMap.empty; + cx.sig_cx.optional_chains_useful <- ALocMap.empty; + cx.sig_cx.invariants_useful <- ALocMap.empty; () (* Given a sig context, it makes sense to clear the parts that are shared with @@ -413,80 +552,106 @@ let clear_intermediates cx = in other sig contexts. This saves a lot of shared memory as well as deserialization time. *) let clear_master_shared cx master_cx = - set_graph cx (graph cx |> IMap.filter (fun id _ -> not - (IMap.mem id master_cx.graph))); - set_property_maps cx (property_maps cx |> Type.Properties.Map.filter (fun id _ -> not - (Type.Properties.Map.mem id master_cx.property_maps))); - set_call_props cx (call_props cx |> IMap.filter (fun id _ -> not - (IMap.mem id master_cx.call_props))); - set_evaluated cx (evaluated cx |> IMap.filter (fun id _ -> not - (IMap.mem id master_cx.evaluated))) + set_graph cx (graph cx |> IMap.filter (fun id _ -> not (IMap.mem id master_cx.graph))); + set_trust_graph + cx + (trust_graph cx |> IMap.filter (fun id _ -> not (IMap.mem id master_cx.trust_graph))); + set_property_maps + cx + ( property_maps cx + |> Type.Properties.Map.filter (fun id _ -> + not (Type.Properties.Map.mem id master_cx.property_maps)) ); + set_call_props + cx + (call_props cx |> IMap.filter (fun id _ -> not (IMap.mem id master_cx.call_props))); + set_evaluated cx (evaluated cx |> IMap.filter (fun id _ -> not (IMap.mem id master_cx.evaluated))) let test_prop_hit cx id = - cx.sig_cx.test_prop_hits_and_misses <- - IMap.add id Hit cx.sig_cx.test_prop_hits_and_misses + cx.sig_cx.test_prop_hits_and_misses <- IMap.add id Hit cx.sig_cx.test_prop_hits_and_misses let test_prop_miss cx id name reasons use = if not (IMap.mem id cx.sig_cx.test_prop_hits_and_misses) then - cx.sig_cx.test_prop_hits_and_misses <- - IMap.add id (Miss (name, reasons, use)) cx.sig_cx.test_prop_hits_and_misses + cx.sig_cx.test_prop_hits_and_misses <- + IMap.add id (Miss (name, reasons, use)) cx.sig_cx.test_prop_hits_and_misses let test_prop_get_never_hit cx = - List.fold_left (fun acc (_, hit_or_miss) -> - match hit_or_miss with - | Hit -> acc - | Miss (name, reasons, use_op) -> (name, reasons, use_op)::acc - ) [] (IMap.bindings cx.sig_cx.test_prop_hits_and_misses) + List.fold_left + (fun acc (_, hit_or_miss) -> + match hit_or_miss with + | Hit -> acc + | Miss (name, reasons, use_op) -> (name, reasons, use_op) :: acc) + [] + (IMap.bindings cx.sig_cx.test_prop_hits_and_misses) let mark_optional_chain cx loc lhs_reason ~useful = - cx.sig_cx.optional_chains_useful <- LocMap.add loc (lhs_reason, useful) ~combine:( - fun (r, u) (_, u') -> (r, u || u') - ) cx.sig_cx.optional_chains_useful + cx.sig_cx.optional_chains_useful <- + ALocMap.add + loc + (lhs_reason, useful) + ~combine:(fun (r, u) (_, u') -> (r, u || u')) + cx.sig_cx.optional_chains_useful let unnecessary_optional_chains cx = - LocMap.fold (fun loc (r, useful) acc -> - if useful then acc else (loc, r) :: acc - ) cx.sig_cx.optional_chains_useful [] + ALocMap.fold + (fun loc (r, useful) acc -> + if useful then + acc + else + (loc, r) :: acc) + cx.sig_cx.optional_chains_useful + [] let mark_invariant cx loc reason ~useful = - cx.sig_cx.invariants_useful <- LocMap.add loc (reason, useful) ~combine:( - fun (r, u) (_, u') -> (r, u || u') - ) cx.sig_cx.invariants_useful + cx.sig_cx.invariants_useful <- + ALocMap.add + loc + (reason, useful) + ~combine:(fun (r, u) (_, u') -> (r, u || u')) + cx.sig_cx.invariants_useful let unnecessary_invariants cx = - LocMap.fold (fun loc (r, useful) acc -> - if useful then acc else (loc, r) :: acc - ) cx.sig_cx.invariants_useful [] + ALocMap.fold + (fun loc (r, useful) acc -> + if useful then + acc + else + (loc, r) :: acc) + cx.sig_cx.invariants_useful + [] (* utils *) -let iter_props cx id f = - find_props cx id - |> SMap.iter f +let find_real_props cx id = + find_props cx id |> SMap.filter (fun x _ -> not (Reason.is_internal_name x)) + +let iter_props cx id f = find_props cx id |> SMap.iter f -let has_prop cx id x = - find_props cx id - |> SMap.mem x +let iter_real_props cx id f = find_real_props cx id |> SMap.iter f -let get_prop cx id x = - find_props cx id - |> SMap.get x +let has_prop cx id x = find_props cx id |> SMap.mem x -let set_prop cx id x p = - find_props cx id - |> SMap.add x p - |> add_property_map cx id +let get_prop cx id x = find_props cx id |> SMap.get x -let has_export cx id name = - find_exports cx id |> SMap.mem name +let set_prop cx id x p = find_props cx id |> SMap.add x p |> add_property_map cx id -let set_export cx id name t = - find_exports cx id - |> SMap.add name t - |> add_export_map cx id +let has_export cx id name = find_exports cx id |> SMap.mem name + +let set_export cx id name t = find_exports cx id |> SMap.add name t |> add_export_map cx id (* constructors *) -let make_property_map cx pmap = - let id = Type.Properties.mk_id () in +let generate_property_map cx pmap = + let id = Reason.mk_id () in + add_nominal_id cx (id :> int); + let id = Type.Properties.id_of_int id in + add_property_map cx id pmap; + id + +let make_source_property_map cx pmap loc = + (* To prevent cases where we might compare a concrete and an abstract + aloc (like in a cycle) we abstractify all incoming alocs before adding + them to the map. The only exception is for library files, which have only + concrete definitions and by definition cannot appear in cycles. *) + let loc = ALoc.lookup_key_if_possible cx.rev_aloc_table loc in + let id = Type.Properties.id_of_aloc loc in add_property_map cx id pmap; id @@ -511,21 +676,9 @@ let merge_into sig_cx sig_cx_other = sig_cx.call_props <- IMap.union sig_cx_other.call_props sig_cx.call_props; sig_cx.export_maps <- Type.Exports.Map.union sig_cx_other.export_maps sig_cx.export_maps; sig_cx.evaluated <- IMap.union sig_cx_other.evaluated sig_cx.evaluated; - sig_cx.type_graph <- Graph_explorer.union sig_cx_other.type_graph sig_cx.type_graph; sig_cx.graph <- IMap.union sig_cx_other.graph sig_cx.graph; - sig_cx.type_asserts <- LocMap.union sig_cx.type_asserts sig_cx_other.type_asserts; - - (* These entries are intermediates, and will be cleared from dep_cxs before - merge. However, initializing builtins is a bit different, and actually copy - these things from the lib cxs into the master sig_cx before we clear the - indeterminates and calculate the sig sig_cx. *) - sig_cx.envs <- IMap.union sig_cx_other.envs sig_cx.envs; - sig_cx.errors <- Errors.ErrorSet.union sig_cx_other.errors sig_cx.errors; - sig_cx.error_suppressions <- Error_suppressions.union sig_cx_other.error_suppressions sig_cx.error_suppressions; - sig_cx.severity_cover <- Utils_js.FilenameMap.union sig_cx_other.severity_cover sig_cx.severity_cover; - sig_cx.exists_checks <- LocMap.union sig_cx_other.exists_checks sig_cx.exists_checks; - sig_cx.exists_excuses <- LocMap.union sig_cx_other.exists_excuses sig_cx.exists_excuses; - sig_cx.all_unresolved <- IMap.union sig_cx_other.all_unresolved sig_cx.all_unresolved; + sig_cx.trust_graph <- IMap.union sig_cx_other.trust_graph sig_cx.trust_graph; + sig_cx.type_asserts_map <- ALocMap.union sig_cx.type_asserts_map sig_cx_other.type_asserts_map; () (* Find the constraints of a type variable in the graph. @@ -536,38 +689,67 @@ let merge_into sig_cx sig_cx_other = is a goto node, and it points to another type variable: a linked list of such type variables must be traversed until a root is reached. *) let rec find_graph cx id = - let _, constraints = find_constraints cx id in + let (_, constraints) = find_constraints cx id in constraints and find_constraints cx id = - let root_id, root = find_root cx id in - root_id, root.Constraint.constraints + let (root_id, root) = find_root cx id in + (root_id, root.Constraint.constraints) (* Find the root of a type variable, potentially traversing a chain of type variables, while short-circuiting all the type variables in the chain to the root during traversal to speed up future traversals. *) and find_root cx id = - let open Constraint in - match IMap.get id (graph cx) with - | Some (Goto next_id) -> - let root_id, root = find_root cx next_id in - if root_id != next_id then add_tvar cx id (Goto root_id) else (); - root_id, root - - | Some (Root root) -> - id, root - - | None -> - let msg = Utils_js.spf "find_root: tvar %d not found in file %s" id - (File_key.to_string @@ file cx) + Constraint.( + match IMap.get id (graph cx) with + | Some (Goto next_id) -> + let (root_id, root) = find_root cx next_id in + if root_id != next_id then + add_tvar cx id (Goto root_id) + else + (); + (root_id, root) + | Some (Root root) -> (id, root) + | None -> + let msg = + Utils_js.spf "find_root: tvar %d not found in file %s" id (File_key.to_string @@ file cx) in - Utils_js.assert_false msg + Utils_js.assert_false msg) let rec find_resolved cx = function | Type.OpenT (_, id) -> - begin match find_graph cx id with - | Constraint.Resolved t -> Some t - | Constraint.Unresolved _ -> None - end + Constraint.( + begin + match find_graph cx id with + | Resolved (_, t) + | FullyResolved (_, t) -> + Some t + | Unresolved _ -> None + end) | Type.AnnotT (_, t, _) -> find_resolved cx t | t -> Some t + +let rec find_trust_root cx (id : Trust_constraint.ident) = + Trust_constraint.( + match IMap.get id (trust_graph cx) with + | Some (TrustGoto next_id) -> + let (root_id, root) = find_trust_root cx next_id in + if root_id != next_id then Trust_constraint.new_goto root_id |> add_trust_var cx id; + (root_id, root) + | Some (TrustRoot root) -> (id, root) + | None -> + let msg = + Utils_js.spf + "find_trust_root: trust var %d not found in file %s" + id + (File_key.to_string @@ file cx) + in + Utils_js.assert_false msg) + +let find_trust_constraints cx id = + let (root_id, root) = find_trust_root cx id in + (root_id, Trust_constraint.get_constraints root) + +let find_trust_graph cx id = + let (_, constraints) = find_trust_constraints cx id in + constraints diff --git a/src/typing/context.mli b/src/typing/context.mli index 6e4bd8634f3..6c46949e9ed 100644 --- a/src/typing/context.mli +++ b/src/typing/context.mli @@ -1,21 +1,26 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -open Utils_js +open Loc_collections exception Props_not_found of Type.Properties.id + exception Exports_not_found of Type.Exports.id + exception Require_not_found of string + exception Module_not_found of string + exception Tvar_not_found of Constraint.ident type env = Scope.t list type t + type sig_t type metadata = { @@ -24,12 +29,14 @@ type metadata = { munge_underscores: bool; verbose: Verbose.t option; weak: bool; + include_suppressions: bool; jsx: Options.jsx_mode; strict: bool; strict_local: bool; (* global *) max_literal_length: int; enable_const_params: bool; + enable_enums: bool; enforce_strict_call_arity: bool; esproposal_class_static_fields: Options.esproposal_feature_mode; esproposal_class_instance_fields: Options.esproposal_feature_mode; @@ -37,132 +44,290 @@ type metadata = { esproposal_export_star_as: Options.esproposal_feature_mode; esproposal_optional_chaining: Options.esproposal_feature_mode; esproposal_nullish_coalescing: Options.esproposal_feature_mode; + exact_by_default: bool; + facebook_fbs: string option; facebook_fbt: string option; + haste_module_ref_prefix: string option; ignore_non_literal_requires: bool; max_trace_depth: int; + recursion_limit: int; root: Path.t; strip_root: bool; suppress_comments: Str.regexp list; suppress_types: SSet.t; max_workers: int; + default_lib_dir: Path.t option; + trust_mode: Options.trust_mode; + type_asserts: bool; } -type module_kind = - | CommonJSModule of Loc.t option - | ESModule +type phase = + | Checking + | Merging + +type type_assert_kind = + | Is + | Throws + | Wraps + +type voidable_check = { + public_property_map: Type.Properties.id; + private_property_map: Type.Properties.id; + errors: ALoc.t Property_assignment.errors; +} + +val make_sig : unit -> sig_t + +val make : + sig_t -> + metadata -> + File_key.t -> + ALoc.table Lazy.t Utils_js.FilenameMap.t -> + ALoc.reverse_table Lazy.t -> + string -> + phase -> + t -type type_assert_kind = Is | Throws | Wraps +val metadata_of_options : Options.t -> metadata -val make_sig: unit -> sig_t -val make: sig_t -> metadata -> File_key.t -> string -> t -val metadata_of_options: Options.t -> metadata +val trust_constructor : t -> unit -> Trust.trust_rep -val sig_cx: t -> sig_t -val graph_sig: sig_t -> Constraint.node IMap.t -val find_module_sig: sig_t -> string -> Type.t +val cx_with_trust : t -> (unit -> Trust.trust_rep) -> t + +val sig_cx : t -> sig_t + +val graph_sig : sig_t -> Constraint.node IMap.t + +val find_module_sig : sig_t -> string -> Type.t (* accessors *) -val all_unresolved: t -> ISet.t IMap.t -val annot_table: t -> (Loc.t, Type.t) Hashtbl.t -val max_literal_length: t -> int -val enable_const_params: t -> bool -val enforce_strict_call_arity: t -> bool -val envs: t -> env IMap.t -val errors: t -> Errors.ErrorSet.t -val error_suppressions: t -> Error_suppressions.t -val esproposal_class_static_fields: t -> Options.esproposal_feature_mode -val esproposal_class_instance_fields: t -> Options.esproposal_feature_mode -val esproposal_decorators: t -> Options.esproposal_feature_mode -val esproposal_export_star_as: t -> Options.esproposal_feature_mode -val esproposal_optional_chaining: t -> Options.esproposal_feature_mode -val esproposal_nullish_coalescing: t -> Options.esproposal_feature_mode -val evaluated: t -> Type.t IMap.t -val file: t -> File_key.t -val find_props: t -> Type.Properties.id -> Type.Properties.t -val find_call: t -> int -> Type.t -val find_exports: t -> Type.Exports.id -> Type.Exports.t -val find_require: t -> Loc.t -> Type.t -val find_module: t -> string -> Type.t -val find_tvar: t -> Constraint.ident -> Constraint.node -val mem_nominal_id: t -> Constraint.ident -> bool -val graph: t -> Constraint.node IMap.t -val import_stmts: t -> (Loc.t, Loc.t) Flow_ast.Statement.ImportDeclaration.t list -val imported_ts: t -> Type.t SMap.t -val is_checked: t -> bool -val is_verbose: t -> bool -val is_weak: t -> bool -val is_strict: t -> bool -val is_strict_local: t -> bool -val severity_cover: t -> ExactCover.lint_severity_cover Utils_js.FilenameMap.t -val max_trace_depth: t -> int -val module_kind: t -> module_kind -val require_map: t -> Type.t LocMap.t -val module_map: t -> Type.t SMap.t -val module_ref: t -> string -val property_maps: t -> Type.Properties.map -val call_props: t -> Type.t IMap.t -val refs_table: t -> (Loc.t, Loc.t) Hashtbl.t -val export_maps: t -> Type.Exports.map -val root: t -> Path.t -val facebook_fbt: t -> string option -val should_ignore_non_literal_requires: t -> bool -val should_munge_underscores: t -> bool -val should_strip_root: t -> bool -val suppress_comments: t -> Str.regexp list -val suppress_types: t -> SSet.t -val type_graph: t -> Graph_explorer.graph -val type_table: t -> Type_table.t -val type_asserts: t -> (type_assert_kind * Loc.t) LocMap.t -val verbose: t -> Verbose.t option -val max_workers: t -> int -val jsx: t -> Options.jsx_mode -val exists_checks: t -> ExistsCheck.t LocMap.t -val exists_excuses: t -> ExistsCheck.t LocMap.t -val use_def: t -> Scope_api.info * Ssa_api.values -val pid_prefix: t -> string - -val copy_of_context: t -> t -val merge_into: sig_t -> sig_t -> unit - -val push_declare_module: t -> string -> unit -val pop_declare_module: t -> unit +val current_phase : t -> phase + +val all_unresolved : t -> ISet.t IMap.t + +val metadata : t -> metadata + +val max_literal_length : t -> int + +val enable_const_params : t -> bool + +val enable_enums : t -> bool + +val enforce_strict_call_arity : t -> bool + +val envs : t -> env IMap.t + +val errors : t -> Flow_error.ErrorSet.t + +val error_suppressions : t -> Error_suppressions.t + +val esproposal_class_static_fields : t -> Options.esproposal_feature_mode + +val esproposal_class_instance_fields : t -> Options.esproposal_feature_mode + +val esproposal_decorators : t -> Options.esproposal_feature_mode + +val esproposal_export_star_as : t -> Options.esproposal_feature_mode + +val esproposal_optional_chaining : t -> Options.esproposal_feature_mode + +val esproposal_nullish_coalescing : t -> Options.esproposal_feature_mode + +val evaluated : t -> Type.t IMap.t + +val exact_by_default : t -> bool + +val file : t -> File_key.t + +val aloc_tables : t -> ALoc.table Lazy.t Utils_js.FilenameMap.t + +val find_props : t -> Type.Properties.id -> Type.Properties.t + +val find_real_props : t -> Type.Properties.id -> Type.Properties.t + +val find_call : t -> int -> Type.t + +val find_exports : t -> Type.Exports.id -> Type.Exports.t + +val find_require : t -> ALoc.t -> Type.t + +val find_module : t -> string -> Type.t + +val find_tvar : t -> Constraint.ident -> Constraint.node + +val mem_nominal_id : t -> Constraint.ident -> bool + +val graph : t -> Constraint.node IMap.t + +val trust_graph : t -> Trust_constraint.node IMap.t + +val import_stmts : t -> (ALoc.t, ALoc.t) Flow_ast.Statement.ImportDeclaration.t list + +val imported_ts : t -> Type.t SMap.t + +val is_checked : t -> bool + +val is_verbose : t -> bool + +val is_weak : t -> bool + +val is_strict : t -> bool + +val is_strict_local : t -> bool + +val include_suppressions : t -> bool + +val severity_cover : t -> ExactCover.lint_severity_cover Utils_js.FilenameMap.t + +val max_trace_depth : t -> int + +val module_kind : t -> Module_info.kind + +val require_map : t -> Type.t ALocMap.t + +val module_map : t -> Type.t SMap.t + +val module_ref : t -> string + +val property_maps : t -> Type.Properties.map + +val call_props : t -> Type.t IMap.t + +val export_maps : t -> Type.Exports.map + +val recursion_limit : t -> int + +val root : t -> Path.t + +val facebook_fbs : t -> string option + +val facebook_fbt : t -> string option + +val haste_module_ref_prefix : t -> string option + +val should_ignore_non_literal_requires : t -> bool + +val should_munge_underscores : t -> bool + +val should_strip_root : t -> bool + +val suppress_comments : t -> Str.regexp list + +val suppress_types : t -> SSet.t + +val default_lib_dir : t -> Path.t option + +val trust_mode : t -> Options.trust_mode + +val trust_tracking : t -> bool + +val trust_errors : t -> bool + +val type_asserts : t -> bool + +val type_graph : t -> Graph_explorer.graph + +val type_asserts_map : t -> (type_assert_kind * ALoc.t) ALocMap.t + +val verbose : t -> Verbose.t option + +val max_workers : t -> int + +val jsx : t -> Options.jsx_mode + +val exists_checks : t -> ExistsCheck.t ALocMap.t + +val exists_excuses : t -> ExistsCheck.t ALocMap.t + +val voidable_checks : t -> voidable_check list + +val use_def : t -> Scope_api.With_ALoc.info * Ssa_api.With_ALoc.values + +val pid_prefix : t -> string + +val copy_of_context : t -> t + +val merge_into : sig_t -> sig_t -> unit + +(* modules *) +val push_declare_module : t -> Module_info.t -> unit + +val pop_declare_module : t -> unit + +val module_info : t -> Module_info.t (* mutators *) -val add_env: t -> int -> env -> unit -val add_error: t -> Errors.error -> unit -val add_error_suppression: t -> Loc.t -> unit -val add_severity_cover: t -> File_key.t -> ExactCover.lint_severity_cover -> unit -val add_lint_suppressions: t -> LocSet.t -> unit -val add_import_stmt: t -> (Loc.t, Loc.t) Flow_ast.Statement.ImportDeclaration.t -> unit -val add_imported_t: t -> string -> Type.t -> unit -val add_require: t -> Loc.t -> Type.t -> unit -val add_module: t -> string -> Type.t -> unit -val add_property_map: t -> Type.Properties.id -> Type.Properties.t -> unit -val add_call_prop: t -> int -> Type.t -> unit -val add_export_map: t -> Type.Exports.id -> Type.Exports.t -> unit -val add_tvar: t -> Constraint.ident -> Constraint.node -> unit -val add_nominal_id: t -> Constraint.ident -> unit -val add_type_assert: t -> Loc.t -> (type_assert_kind * Loc.t) -> unit -val remove_all_errors: t -> unit -val remove_all_error_suppressions: t -> unit -val remove_all_lint_severities: t -> unit -val remove_tvar: t -> Constraint.ident -> unit -val set_envs: t -> env IMap.t -> unit -val set_evaluated: t -> Type.t IMap.t -> unit -val set_type_graph: t -> Graph_explorer.graph -> unit -val set_all_unresolved: t -> ISet.t IMap.t -> unit -val set_graph: t -> Constraint.node IMap.t -> unit -val set_module_kind: t -> module_kind -> unit -val set_property_maps: t -> Type.Properties.map -> unit -val set_call_props: t -> Type.t IMap.t -> unit -val set_export_maps: t -> Type.Exports.map -> unit -val set_exists_checks: t -> ExistsCheck.t LocMap.t -> unit -val set_exists_excuses: t -> ExistsCheck.t LocMap.t -> unit -val set_use_def: t -> Scope_api.info * Ssa_api.values -> unit -val set_module_map: t -> Type.t SMap.t -> unit - -val clear_intermediates: t -> unit -val clear_master_shared: t -> sig_t -> unit +val add_env : t -> int -> env -> unit + +val add_error : t -> ALoc.t Flow_error.t -> unit + +val add_error_suppression : t -> Loc.t -> unit + +val add_severity_cover : t -> File_key.t -> ExactCover.lint_severity_cover -> unit + +val add_lint_suppressions : t -> LocSet.t -> unit + +val add_import_stmt : t -> (ALoc.t, ALoc.t) Flow_ast.Statement.ImportDeclaration.t -> unit + +val add_imported_t : t -> string -> Type.t -> unit + +val add_require : t -> ALoc.t -> Type.t -> unit + +val add_module : t -> string -> Type.t -> unit + +val add_property_map : t -> Type.Properties.id -> Type.Properties.t -> unit + +val add_call_prop : t -> int -> Type.t -> unit + +val add_export_map : t -> Type.Exports.id -> Type.Exports.t -> unit + +val add_tvar : t -> Constraint.ident -> Constraint.node -> unit + +val add_trust_var : t -> Trust_constraint.ident -> Trust_constraint.node -> unit + +val add_nominal_id : t -> Constraint.ident -> unit + +val add_type_assert : t -> ALoc.t -> type_assert_kind * ALoc.t -> unit + +val add_voidable_check : t -> voidable_check -> unit + +val remove_all_errors : t -> unit + +val remove_all_error_suppressions : t -> unit + +val remove_all_lint_severities : t -> unit + +val remove_tvar : t -> Constraint.ident -> unit + +val set_envs : t -> env IMap.t -> unit + +val set_evaluated : t -> Type.t IMap.t -> unit + +val set_type_graph : t -> Graph_explorer.graph -> unit + +val set_all_unresolved : t -> ISet.t IMap.t -> unit + +val set_graph : t -> Constraint.node IMap.t -> unit + +val set_trust_graph : t -> Trust_constraint.node IMap.t -> unit + +val set_property_maps : t -> Type.Properties.map -> unit + +val set_call_props : t -> Type.t IMap.t -> unit + +val set_export_maps : t -> Type.Exports.map -> unit + +val set_exists_checks : t -> ExistsCheck.t ALocMap.t -> unit + +val set_exists_excuses : t -> ExistsCheck.t ALocMap.t -> unit + +val set_use_def : t -> Scope_api.With_ALoc.info * Ssa_api.With_ALoc.values -> unit + +val set_module_map : t -> Type.t SMap.t -> unit + +val clear_intermediates : t -> unit + +val clear_master_shared : t -> sig_t -> unit (* Flow allows you test test if a property exists inside a conditional. However, we only wan to * allow this test if there's a chance that the property might exist. So `if (foo.bar)` should be @@ -176,33 +341,58 @@ val clear_master_shared: t -> sig_t -> unit * we record if testing a property ever succeeds. If if never succeeds after typechecking is done, * we emit an error. *) -val test_prop_hit: t -> Constraint.ident -> unit -val test_prop_miss: t -> Constraint.ident -> string option -> (Reason.t * Reason.t) -> Type.use_op -> unit -val test_prop_get_never_hit: t -> (string option * (Reason.t * Reason.t) * Type.use_op) list +val test_prop_hit : t -> Constraint.ident -> unit + +val test_prop_miss : + t -> Constraint.ident -> string option -> Reason.t * Reason.t -> Type.use_op -> unit + +val test_prop_get_never_hit : t -> (string option * (Reason.t * Reason.t) * Type.use_op) list + +val mark_optional_chain : t -> ALoc.t -> Reason.t -> useful:bool -> unit + +val unnecessary_optional_chains : t -> (ALoc.t * Reason.t) list -val mark_optional_chain: t -> Loc.t -> Reason.t -> useful:bool -> unit -val unnecessary_optional_chains: t -> (Loc.t * Reason.t) list -val mark_invariant: t -> Loc.t -> Reason.t -> useful:bool -> unit -val unnecessary_invariants: t -> (Loc.t * Reason.t) list +val mark_invariant : t -> ALoc.t -> Reason.t -> useful:bool -> unit + +val unnecessary_invariants : t -> (ALoc.t * Reason.t) list (* utils *) -val iter_props: t -> Type.Properties.id -> (string -> Type.Property.t -> unit) -> unit -val has_prop: t -> Type.Properties.id -> string -> bool -val get_prop: t -> Type.Properties.id -> string -> Type.Property.t option -val set_prop: t -> Type.Properties.id -> string -> Type.Property.t -> unit -val has_export: t -> Type.Exports.id -> string -> bool -val set_export: t -> Type.Exports.id -> string -> (Loc.t option * Type.t) -> unit +val iter_props : t -> Type.Properties.id -> (string -> Type.Property.t -> unit) -> unit + +val iter_real_props : t -> Type.Properties.id -> (string -> Type.Property.t -> unit) -> unit + +val has_prop : t -> Type.Properties.id -> string -> bool + +val get_prop : t -> Type.Properties.id -> string -> Type.Property.t option + +val set_prop : t -> Type.Properties.id -> string -> Type.Property.t -> unit + +val has_export : t -> Type.Exports.id -> string -> bool + +val set_export : t -> Type.Exports.id -> string -> ALoc.t option * Type.t -> unit (* constructors *) -val make_property_map: t -> Type.Properties.t -> Type.Properties.id -val make_call_prop: t -> Type.t -> int -val make_export_map: t -> Type.Exports.t -> Type.Exports.id -val make_nominal: t -> int - -val find_constraints: - t -> - Constraint.ident -> - Constraint.ident * Constraint.constraints -val find_graph: t -> Constraint.ident -> Constraint.constraints -val find_root: t -> Constraint.ident -> Constraint.ident * Constraint.root -val find_resolved: t -> Type.t -> Type.t option +val generate_property_map : t -> Type.Properties.t -> Type.Properties.id + +val make_source_property_map : t -> Type.Properties.t -> ALoc.t -> Type.Properties.id + +val make_call_prop : t -> Type.t -> int + +val make_export_map : t -> Type.Exports.t -> Type.Exports.id + +val make_nominal : t -> int + +val find_constraints : t -> Constraint.ident -> Constraint.ident * Constraint.constraints + +val find_graph : t -> Constraint.ident -> Constraint.constraints + +val find_root : t -> Constraint.ident -> Constraint.ident * Constraint.root + +val find_resolved : t -> Type.t -> Type.t option + +val find_trust_constraints : + t -> Trust_constraint.ident -> Trust_constraint.ident * Trust_constraint.constraints + +val find_trust_graph : t -> Trust_constraint.ident -> Trust_constraint.constraints + +val find_trust_root : t -> Trust_constraint.ident -> Trust_constraint.ident * Trust_constraint.root diff --git a/src/typing/coverage.ml b/src/typing/coverage.ml new file mode 100644 index 00000000000..701e8bb8231 --- /dev/null +++ b/src/typing/coverage.ml @@ -0,0 +1,312 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(** + * This module computes whether a type is considered covered. It does so by + * visiting the type until a "concrete" constructor is found. We consider a + * constructor concrete if it has some runtime significance. So for example the + * arrow constructor and the object constructor are concrete, whereas the union + * or the intersection constructors are not. This means that whenever we encounter + * a union we need to visit its parts to determine if this is a covered type. + * + * Some constructors that we, perhaps controversially, consider concrete are: + * - AnyWithLowerBoundT + * - AnyWithUpperBoundT + * - BoundT + * - KeysT + * - ExistsT + * + * In addition to being considered concrete the above constructors are also + * considered covered. + *) + +open Type +open Utils_js + +type op_mode = + | OpAnd + | OpOr + +let unit_of_op = function + | OpAnd -> true (* mixed *) + | OpOr -> false + +(* empty *) + +module Taint = struct + type t = + | Untainted + | Tainted + + let of_trust cx tr = + if Trust_helpers.actual_trust cx tr |> Trust.is_tainted then + Tainted + else + Untainted + + let to_string = function + | Untainted -> "Untainted" + | Tainted -> "Tainted" + + let m_and = function + | (Tainted, t) + | (t, Tainted) -> + t + | (Untainted, Untainted) -> Untainted + + let m_or = function + | (Tainted, _) + | (_, Tainted) -> + Tainted + | (Untainted, Untainted) -> Untainted + + let to_bool = function + | Untainted -> true + | Tainted -> false + + let merge = function + | OpAnd -> m_and + | OpOr -> m_or +end + +module Kind = struct + type t = + | Checked + | Any + | Empty + + let to_string = function + | Checked -> "Checked" + | Any -> "Any" + | Empty -> "Empty" + + let m_and = function + | (Any, _) -> Any + | (_, Any) -> Any + | (Empty, _) -> Empty + | (_, Empty) -> Empty + | (Checked, Checked) -> Checked + + let m_or = function + | (Any, _) -> Any + | (_, Any) -> Any + | (Empty, m2) -> m2 + | (m1, Empty) -> m1 + | (Checked, Checked) -> Checked + + let merge kind x = + match kind with + | OpAnd -> m_and x + | OpOr -> m_or x + + let to_bool = function + | Any + | Empty -> + false + | Checked -> true +end + +let merge op ((k1, t1), (k2, t2)) = (Kind.merge op (k1, k2), Taint.merge op (t1, t2)) + +type tvar_status = + | Started + | Done of (Kind.t * Taint.t) + +class visitor = + object (self) + val mutable tvar_cache : tvar_status IMap.t = IMap.empty + (** + * Type variables may appear in a cycle in the dependency graph, which requires + * us to track the ones we've visited to avoid infinite recursion. There are three + * stages of tvar resolution w.r.t coverage: + * + * - The tvar has not been seen before (there is no entry in tvar_cache). In this + * case we descend into the lower bounds of the tvar, marking its binding as + * Started in the tvar_cache. + * + * - The tvar has been seen and has been resolved (status = Done _). In this case + * we reuse the cached result. + * + * - The tvar is in the process of resolution (status = Started). + * These are types of the form: + * + * type X = X | number + * ^ + * we consider the recursive occurence as uncovered (Any). This case should + * be rare and it's arguable if we should be allowing it in the first place, + * so we assign the value that corresponds to the fewest guarantees. + *) + + method private tvar cx id = + let (root_id, constraints) = Context.find_constraints cx id in + if id != root_id then + self#tvar cx root_id + else + match IMap.get root_id tvar_cache with + | Some Started -> (Kind.Any, Taint.Tainted) + | Some (Done cov) -> cov + | None -> + tvar_cache <- IMap.add root_id Started tvar_cache; + Constraint.( + let cov = + match constraints with + | Resolved (_, t) + | FullyResolved (_, t) -> + self#type_ cx t + | Unresolved bounds -> + let bounds = TypeMap.keys bounds.lower in + self#types_list cx OpOr bounds + in + tvar_cache <- IMap.add root_id (Done cov) tvar_cache; + cov) + + method type_ cx = + function + | OpenT (_, id) -> self#tvar cx id + | MergedT (_, uses) -> self#merged_t cx uses + | EvalT (t, _, id) -> self#eval_t cx t id + (* Non-concrete (fallthrough) constructors *) + | AnnotT (_, t, _) + | ExactT (_, t) + | DefT (_, _, PolyT (_, _, t, _)) + | TypeAppT (_, _, t, _) + | DefT (_, _, TypeT (_, t)) + | OpenPredT (_, t, _, _) + | ReposT (_, t) + | ShapeT t + | ThisClassT (_, t) + | ThisTypeAppT (_, t, _, _) -> + self#type_ cx t + | UnionT (_, rep) -> + let (t0, (t1, ts)) = UnionRep.members_nel rep in + self#types_nel cx OpOr (t0, t1 :: ts) + | IntersectionT (_, rep) -> + let (t0, (t1, ts)) = InterRep.members_nel rep in + self#types_nel cx OpAnd (t0, t1 :: ts) + (* Concrete covered constructors *) + | BoundT _ + | CustomFunT _ + | ExistsT _ + | FunProtoT _ + | FunProtoApplyT _ + | FunProtoBindT _ + | FunProtoCallT _ + | InternalT _ + | KeysT _ + | MaybeT _ + | ModuleT _ + | NullProtoT _ + | OpaqueT _ + | ObjProtoT _ + | OptionalT _ -> + (Kind.Checked, Taint.Untainted) + | DefT (_, t, ArrT _) + | DefT (_, t, BoolT _) + | DefT (_, t, CharSetT _) + | DefT (_, t, ClassT _) + | DefT (_, t, FunT _) + | DefT (_, t, InstanceT _) + | DefT (_, t, IdxWrapper _) + | DefT (_, t, MixedT _) + | DefT (_, t, NumT _) + | DefT (_, t, NullT) + | DefT (_, t, ObjT _) + | DefT (_, t, ReactAbstractComponentT _) + | DefT (_, t, SingletonNumT _) + | DefT (_, t, SingletonStrT _) + | DefT (_, t, SingletonBoolT _) + | DefT (_, t, StrT _) + | DefT (_, t, VoidT) -> + (Kind.Checked, Taint.of_trust cx t) + (* Concrete uncovered constructors *) + (* TODO: Rethink coverage and trust for these types *) + | MatchingPropT _ + | TypeDestructorTriggerT _ -> + (Kind.Empty, Taint.Untainted) + | DefT (_, t, EmptyT _) -> (Kind.Empty, Taint.of_trust cx t) + | AnyT _ -> (Kind.Any, Taint.Tainted) + + method private types_of_use acc = + function + | UseT (_, t) -> t :: acc + | ReposLowerT (_, _, u) -> self#types_of_use acc u + | _ -> acc + + method private merged_t cx uses = + let ts = List.fold_left self#types_of_use [] uses in + self#types_list cx OpAnd ts + + method private eval_t cx t id = + let evaluated = Context.evaluated cx in + let t = + match IMap.get id evaluated with + | Some cached -> cached + | None -> t + in + self#type_ cx t + + method private types_ cx op acc = + function + | [] -> acc + | t :: ts -> + let cov = self#type_ cx t in + let ((merged_kind, _) as merged) = merge op (cov, acc) in + begin + match merged_kind with + | Kind.Any -> + (* Cannot recover from Any, so exit early *) + (Kind.Any, Taint.Tainted) + | Kind.Checked + | Kind.Empty -> + self#types_ cx op merged ts + end + + method private types_list cx op ts = + match ts with + | [] -> (Kind.Empty, Taint.Tainted) + | t :: ts -> self#types_nel cx op (t, ts) + + method private types_nel cx op (t, ts) = + let (init_kind, init_trust) = self#type_ cx t in + match init_kind with + | Kind.Any -> (Kind.Any, Taint.Tainted) + | Kind.Checked + | Kind.Empty -> + self#types_ cx op (init_kind, init_trust) ts + end + +open Coverage_response + +let result_of_coverage = function + | (Kind.Any, Taint.Untainted) -> + assert_false "Any coverage kind cannot be associated with untainted" + | (Kind.Any, _) -> Uncovered + | (Kind.Empty, _) -> Empty + | (Kind.Checked, Taint.Tainted) -> Tainted + | (Kind.Checked, Taint.Untainted) -> Untainted + +let to_bool = function + | Empty + | Uncovered -> + false + | Tainted + | Untainted -> + true + +let m_or = function + | (Uncovered, _) + | (_, Uncovered) -> + Uncovered + | (Empty, m2) + | (Untainted, m2) -> + m2 + | (m1, Empty) + | (m1, Untainted) -> + m1 + | (Tainted, Tainted) -> Tainted + +let initial_coverage = { untainted = 0; tainted = 0; uncovered = 0; empty = 0 } diff --git a/src/typing/coverage_response/coverage_response.ml b/src/typing/coverage_response/coverage_response.ml new file mode 100644 index 00000000000..7f607e1ce90 --- /dev/null +++ b/src/typing/coverage_response/coverage_response.ml @@ -0,0 +1,27 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* Expressions can be in one of four states: + - Uncovered: This is an any or any-like type and is not covered by Flow. + - Empty: This is an empty type, representing unreachable code, and is not covered by Flow. + - Tainted: This is covered in that it has a static type, but trust analysis has indicated + that this type may not accurately represent its type at runtime. + - Untainted: This is covered, and its static type can be shown to accurately reflect + its runtime type. + *) +type expression_coverage = + | Uncovered + | Empty + | Tainted + | Untainted + +type file_coverage = { + untainted: int; + tainted: int; + uncovered: int; + empty: int; +} diff --git a/src/typing/coverage_response/dune b/src/typing/coverage_response/dune new file mode 100644 index 00000000000..33caef69777 --- /dev/null +++ b/src/typing/coverage_response/dune @@ -0,0 +1,4 @@ +(library + (name flow_typing_coverage_response) + (wrapped false) +) diff --git a/src/typing/custom_fun_kit.ml b/src/typing/custom_fun_kit.ml new file mode 100644 index 00000000000..74d50dfaa94 --- /dev/null +++ b/src/typing/custom_fun_kit.ml @@ -0,0 +1,241 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Reason +open Type +module FlowError = Flow_error + +module type CUSTOM_FUN = sig + val run : + Context.t -> + Trace.t -> + use_op:Type.use_op -> + Reason.t -> + Type.custom_fun_kind -> + Type.t list -> + Type.t option -> + Type.t -> + unit +end + +module Kit (Flow : Flow_common.S) = struct + include Flow + + (* Creates the appropriate constraints for the compose() function and its + * reversed variant. *) + let rec run_compose cx trace ~use_op reason_op reverse fns spread_fn tin tout = + match (reverse, fns, spread_fn) with + (* Call the tail functions in our array first and call our head function + * last after that. *) + | (false, fn :: fns, _) -> + let reason = replace_desc_reason (RCustom "compose intermediate value") (reason_of_t fn) in + let tvar = + Tvar.mk_where cx reason (fun tvar -> + run_compose cx trace ~use_op reason_op reverse fns spread_fn tin tvar) + in + rec_flow + cx + trace + (fn, CallT (use_op, reason, mk_functioncalltype reason_op None [Arg tvar] tout)) + (* If the compose function is reversed then we want to call the tail + * functions in our array after we call the head function. *) + | (true, fn :: fns, _) -> + let reason = replace_desc_reason (RCustom "compose intermediate value") (reason_of_t fn) in + let tvar = + Tvar.mk_where cx reason (fun tvar -> + rec_flow + cx + trace + (fn, CallT (use_op, reason, mk_functioncalltype reason_op None [Arg tin] tvar))) + in + run_compose cx trace ~use_op reason_op reverse fns spread_fn tvar tout + (* If there are no functions and no spread function then we are an identity + * function. *) + | (_, [], None) -> rec_flow_t cx trace (tin, tout) + (* Correctly implementing spreads of unknown arity for the compose function + * is a little tricky. Let's look at a couple of cases. + * + * const fn = (x: number): string => x.toString(); + * declare var fns: Array; + * const x = 42; + * compose(...fns)(x); + * + * This would be invalid. We could have 0 or 1 fn in our fns array, but 2 fn + * would be wrong because string is incompatible with number. It breaks down + * as such: + * + * 1. x = 42 + * 2. fn(x) = '42' + * 3. fn(fn(x)) is an error because '42' is not a number. + * + * To get an error in this case we would only need to call the spread + * argument twice. Now let's look at a case where things get recursive: + * + * type Fn = (O) => $PropertyType; + * declare var fns: Array; + * const x = { p: { p: 42 } }; + * compose(...fns)(x); + * + * 1. x = { p: { p: 42 } } + * 2. fn(x) = { p: 42 } + * 3. fn(fn(x)) = 42 + * 4. fn(fn(fn(x))) throws an error because the p property is not in 42. + * + * Here we would need to call fn 3 times before getting an error. Now + * consider: + * + * type Fn = (O) => $PropertyType; + * declare var fns: Array; + * type X = { p: X }; + * declare var x: X; + * compose(...fns)(x); + * + * This is valid. + * + * To implement spreads in compose functions we first add a constraint based + * on tin and tout assuming that the spread is empty. Then we emit recursive + * constraints: + * + * spread_fn(tin) ~> tout + * spread_fn(tout) ~> tin + * + * The implementation of Flow should be able to terminate these recursive + * constraints. If it doesn't then we have a bug. *) + | (_, [], Some spread_fn) -> + run_compose cx trace ~use_op reason_op reverse [] None tin tout; + run_compose cx trace ~use_op reason_op reverse [spread_fn] None tin tout; + run_compose cx trace ~use_op reason_op reverse [spread_fn] None tout tin + + let run cx trace ~use_op reason_op kind args spread_arg tout = + match kind with + | Compose reverse -> + (* Drop the specific argument reasons since run_compose will emit CallTs + * with completely unrelated argument reasons. *) + let use_op = + match use_op with + | Op (FunCall { op; fn; args = _; local }) -> Op (FunCall { op; fn; args = []; local }) + | Op (FunCallMethod { op; fn; prop; args = _; local }) -> + Op (FunCallMethod { op; fn; prop; args = []; local }) + | _ -> use_op + in + let tin = Tvar.mk cx reason_op in + let tvar = Tvar.mk cx reason_op in + run_compose cx trace ~use_op reason_op reverse args spread_arg tin tvar; + let funt = + FunT + ( dummy_static reason_op, + dummy_prototype, + mk_functiontype reason_op [tin] ~rest_param:None ~def_reason:reason_op tvar ) + in + rec_flow_t cx trace (DefT (reason_op, bogus_trust (), funt), tout) + | ReactCreateElement -> + (match args with + (* React.createElement(component) *) + | [component] -> + let config = + let r = replace_desc_reason RReactProps reason_op in + Obj_type.mk_with_proto cx r ~sealed:true ~exact:true ~frozen:true (ObjProtoT r) + in + rec_flow + cx + trace + ( component, + ReactKitT (use_op, reason_op, React.CreateElement0 (false, config, ([], None), tout)) + ) + (* React.createElement(component, config, ...children) *) + | component :: config :: children -> + rec_flow + cx + trace + ( component, + ReactKitT + ( use_op, + reason_op, + React.CreateElement0 (false, config, (children, spread_arg), tout) ) ) + (* React.createElement() *) + | _ -> + (* If we don't have the arguments we need, add an arity error. *) + add_output cx ~trace (Error_message.EReactElementFunArity (reason_op, "createElement", 1))) + | ReactCloneElement -> + (match args with + (* React.cloneElement(element) *) + | [element] -> + (* Create the expected type for our element with a fresh tvar in the + * component position. *) + let expected_element = + get_builtin_typeapp + cx + ~trace + (reason_of_t element) + "React$Element" + [Tvar.mk cx reason_op] + in + (* Flow the element arg to our expected element. *) + rec_flow_t cx trace (element, expected_element); + + (* Flow our expected element to the return type. *) + rec_flow_t cx trace (expected_element, tout) + (* React.cloneElement(element, config, ...children) *) + | element :: config :: children -> + (* Create a tvar for our component. *) + let component = Tvar.mk cx reason_op in + (* Flow the element arg to the element type we expect. *) + rec_flow_t + cx + trace + (element, get_builtin_typeapp cx ~trace reason_op "React$Element" [component]); + + (* Create a React element using the config and children. *) + rec_flow + cx + trace + ( component, + ReactKitT + (use_op, reason_op, React.CreateElement0 (true, config, (children, spread_arg), tout)) + ) + (* React.cloneElement() *) + | _ -> + (* If we don't have the arguments we need, add an arity error. *) + add_output cx ~trace (Error_message.EReactElementFunArity (reason_op, "cloneElement", 1))) + | ReactElementFactory component -> + (match args with + (* React.createFactory(component)() *) + | [] -> + let config = + let r = replace_desc_reason RReactProps reason_op in + Obj_type.mk_with_proto cx r ~sealed:true ~exact:true ~frozen:true (ObjProtoT r) + in + rec_flow + cx + trace + ( component, + ReactKitT (use_op, reason_op, React.CreateElement0 (false, config, ([], None), tout)) + ) + (* React.createFactory(component)(config, ...children) *) + | config :: children -> + rec_flow + cx + trace + ( component, + ReactKitT + ( use_op, + reason_op, + React.CreateElement0 (false, config, (children, spread_arg), tout) ) )) + | ObjectAssign + | ObjectGetPrototypeOf + | ObjectSetPrototypeOf + | ReactPropType _ + | ReactCreateClass + | Idx + | TypeAssertIs + | TypeAssertThrows + | TypeAssertWraps + | DebugPrint + | DebugThrow + | DebugSleep -> + failwith "implemented elsewhere" +end diff --git a/src/typing/debug_js.ml b/src/typing/debug_js.ml index 7acd25800bd..fb77e9d4fe1 100644 --- a/src/typing/debug_js.ml +++ b/src/typing/debug_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -25,6 +25,7 @@ let string_of_pred_ctor = function | FunP -> "FunP" | ObjP -> "ObjP" | ArrP -> "ArrP" + | SymbolP -> "SymbolP" | SingletonBoolP _ -> "SingletonBoolP" | SingletonStrP _ -> "SingletonStrP" | SingletonNumP _ -> "SingletonNumP" @@ -41,24 +42,50 @@ let string_of_type_map = function | ObjectMapi _ -> "ObjectMapi" let string_of_polarity = function - | Negative -> "Negative" - | Neutral -> "Neutral" - | Positive -> "Positive" + | Polarity.Negative -> "Negative" + | Polarity.Neutral -> "Neutral" + | Polarity.Positive -> "Positive" -let string_of_enum = function - | Enum.Str x -> spf "string %s" x - | Enum.Num (_,x) -> spf "number %s" x - | Enum.Bool x -> spf "boolean %b" x - | Enum.Null -> "null" - | Enum.Void -> "void" +let string_of_union_enum = function + | UnionEnum.Str x -> spf "string %s" x + | UnionEnum.Num (_, x) -> spf "number %s" x + | UnionEnum.Bool x -> spf "boolean %b" x + | UnionEnum.Null -> "null" + | UnionEnum.Void -> "void" let string_of_sentinel = function - | Enum.One enum -> string_of_enum enum - | Enum.Many enums -> ListUtils.to_string " | " string_of_enum @@ EnumSet.elements enums + | UnionEnum.One enum -> string_of_union_enum enum + | UnionEnum.Many enums -> + ListUtils.to_string " | " string_of_union_enum @@ UnionEnumSet.elements enums -let string_of_rw = function - | Read -> "Read" - | Write _ -> "Write" +let string_of_selector = function + | Elem _ -> "Elem _" (* TODO print info about the key *) + | Prop (x, _) -> spf "Prop %s" x + | ArrRest i -> spf "ArrRest %i" i + | ObjRest xs -> spf "ObjRest [%s]" (String.concat "; " xs) + | Default -> "Default" + +let string_of_destructor = function + | NonMaybeType -> "NonMaybeType" + | PropertyType x -> spf "PropertyType %s" x + | ElementType _ -> "ElementType" + | Bind _ -> "Bind" + | ReadOnlyType -> "ReadOnly" + | SpreadType _ -> "Spread" + | RestType _ -> "Rest" + | ValuesType -> "Values" + | CallType _ -> "CallType" + | TypeMap (TupleMap _) -> "TupleMap" + | TypeMap (ObjectMap _) -> "ObjectMap" + | TypeMap (ObjectMapi _) -> "ObjectMapi" + | ReactElementPropsType -> "ReactElementProps" + | ReactElementConfigType -> "ReactElementConfig" + | ReactElementRefType -> "ReactElementRef" + | ReactConfigType _ -> "ReactConfig" + +let string_of_destruct_kind = function + | DestructAnnot -> "Annot" + | DestructInfer -> "Infer" type json_cx = { stack: ISet.t; @@ -68,1617 +95,1467 @@ type json_cx = { strip_root: Path.t option; } +let json_of_aloc ?strip_root ?catch_offset_errors ~offset_table aloc = + (* Okay because this is only for debugging output *) + if ALoc.ALocRepresentationDoNotUse.is_abstract aloc then + Hh_json.( + let key = ALoc.ALocRepresentationDoNotUse.get_key_exn aloc in + let source = ALoc.source aloc in + JSON_Object + [ + ("source", json_of_source ?strip_root source); + ("type", json_source_type_of_source source); + ("key", JSON_Number (ALoc.ALocRepresentationDoNotUse.string_of_key key)); + ]) + else + json_of_loc ?strip_root ?catch_offset_errors ~offset_table (ALoc.to_loc_exn aloc) + +let json_of_reason ?(strip_root = None) ~offset_table r = + Hh_json.( + JSON_Object + [ + ("pos", json_of_aloc ~strip_root ~offset_table (aloc_of_reason r)); + ("desc", JSON_String (string_of_desc (desc_of_reason ~unwrap:false r))); + ]) + let check_depth continuation json_cx = let depth = json_cx.depth - 1 in - if depth < 0 - then fun _ -> Hh_json.JSON_Null - else continuation { json_cx with depth; } + if depth < 0 then + fun _ -> + Hh_json.JSON_Null + else + continuation { json_cx with depth } let rec _json_of_t json_cx t = count_calls ~counter:json_cx.size ~default:Hh_json.JSON_Null (fun () -> - check_depth _json_of_t_impl json_cx t - ) - -and _json_of_tvar json_cx id = Hh_json.( - [ - "id", int_ id - ] @ - if ISet.mem id json_cx.stack then [] - else [ - "node", json_of_node json_cx id - ] -) - -and _json_of_t_impl json_cx t = Hh_json.( - JSON_Object ([ - "reason", json_of_reason ~strip_root:json_cx.strip_root (reason_of_t t); - "kind", JSON_String (string_of_ctor t) - ] @ - match t with - | OpenT (_, id) -> _json_of_tvar json_cx id - - | DefT (_, NumT lit) -> - begin match lit with - | Literal (_, (_, raw)) -> ["literal", JSON_String raw] - | Truthy -> ["refinement", JSON_String "Truthy"] - | AnyLiteral -> [] - end - - | DefT (_, StrT lit) -> _json_of_string_literal lit - - | DefT (_, BoolT b) -> - (match b with - | Some b -> ["literal", JSON_Bool b] - | None -> []) - - | DefT (_, EmptyT) - | DefT (_, MixedT _) - | DefT (_, AnyT) - | DefT (_, NullT) - | DefT (_, VoidT) - -> [] - - | NullProtoT _ - | ObjProtoT _ - | FunProtoT _ - | FunProtoApplyT _ - | FunProtoBindT _ - | FunProtoCallT _ - -> [] - - | DefT (_, FunT (static, proto, funtype)) -> [ - "static", _json_of_t json_cx static; - "prototype", _json_of_t json_cx proto; - "funType", json_of_funtype json_cx funtype - ] - - | DefT (_, ObjT objtype) -> [ - "type", json_of_objtype json_cx objtype - ] - - | DefT (_, ArrT (ArrayAT (elemt, tuple_types))) -> [ - "kind", JSON_String "Array"; - "elemType", _json_of_t json_cx elemt; - "tupleType", match tuple_types with - | Some tuplet -> JSON_Array (List.map (_json_of_t json_cx) tuplet) - | None -> JSON_Null - ] - - | DefT (_, ArrT (TupleAT (elemt, tuple_types))) -> [ - "kind", JSON_String "Tuple"; - "elemType", _json_of_t json_cx elemt; - "tupleType", JSON_Array (List.map (_json_of_t json_cx) tuple_types); - ] - - | DefT (_, ArrT (ROArrayAT (elemt))) -> [ - "kind", JSON_String "ReadOnlyArray"; - "elemType", _json_of_t json_cx elemt; - ] - - | DefT (_, ArrT EmptyAT) -> [ - "kind", JSON_String "EmptyArray"; - ] - - | DefT (_, CharSetT chars) -> [ - "chars", JSON_String (String_utils.CharSet.to_string chars); - ] - - | DefT (_, ClassT t) -> [ - "type", _json_of_t json_cx t - ] - - | DefT (_, InstanceT (static, super, implements, instance)) -> [ - "static", _json_of_t json_cx static; - "super", _json_of_t json_cx super; - "implements", JSON_Array (List.map (_json_of_t json_cx) implements); - "instance", json_of_insttype json_cx instance - ] - - | DefT (_, OptionalT t) -> [ - "type", _json_of_t json_cx t - ] - - | EvalT (t, defer_use_t, id) -> [ - "type", _json_of_t json_cx t; - "defer_use_type", json_of_defer_use_t json_cx defer_use_t - ] @ - let evaluated = Context.evaluated json_cx.cx in - begin match IMap.get id evaluated with - | None -> [] - | Some t -> [ "result", _json_of_t json_cx t ] - end - - | DefT (_, PolyT (tparams, t, id)) -> [ - "id", JSON_Number (string_of_int id); - "typeParams", JSON_Array (List.map (json_of_typeparam json_cx) tparams); - "type", _json_of_t json_cx t - ] - - | DefT (_, TypeAppT (_, t, targs)) -> [ - "typeArgs", JSON_Array (List.map (_json_of_t json_cx) targs); - "type", _json_of_t json_cx t - ] - - | ThisClassT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | ThisTypeAppT (_, t, this, targs_opt) -> ( - match targs_opt with - | Some targs -> [ "typeArgs", JSON_Array (List.map (_json_of_t json_cx) targs) ] - | None -> [] - ) @ [ - "thisArg", _json_of_t json_cx this; - "type", _json_of_t json_cx t - ] - - | BoundT (_, name, polarity) -> [ - "name", JSON_String name; - "polarity", json_of_polarity json_cx polarity - ] - - | ExistsT _ -> - [] - - | ExactT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | DefT (_, MaybeT t) -> [ - "type", _json_of_t json_cx t - ] - - | DefT (_, IntersectionT rep) -> [ - let ts = InterRep.members rep in - "types", JSON_Array (List.map (_json_of_t json_cx) ts) - ] - - | DefT (_, UnionT rep) -> [ - let ts = UnionRep.members rep in - "types", JSON_Array (List.map (_json_of_t json_cx) ts) - ] - - | AnyWithLowerBoundT t - | AnyWithUpperBoundT t -> [ - "type", _json_of_t json_cx t - ] - - | MergedT (_, uses) -> [ - "uses", JSON_Array (List.map (_json_of_use_t json_cx) uses); - ] - - | DefT (_, AnyObjT) - | DefT (_, AnyFunT) -> - [] - - | DefT (_, IdxWrapper t) -> [ - "wrappedObj", _json_of_t json_cx t - ] - - | ShapeT t -> [ - "type", _json_of_t json_cx t - ] - - | MatchingPropT (_, x, t) -> [ - "name", JSON_String x; - "type", _json_of_t json_cx t - ] - - | KeysT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | DefT (_, SingletonStrT s) -> [ - "literal", JSON_String s - ] - - | DefT (_, SingletonNumT (_, raw)) -> [ - "literal", JSON_String raw - ] - - | DefT (_, SingletonBoolT b) -> [ - "literal", JSON_Bool b - ] - - | DefT (_, TypeT (_, t)) -> [ - "result", _json_of_t json_cx t - ] - - | AnnotT (_, t, use_desc) -> [ - "type", _json_of_t json_cx t; - "useDesc", JSON_Bool use_desc; - ] - - | OpaqueT (_, opaquetype) -> - let t = match opaquetype.underlying_t with - | Some t -> _json_of_t json_cx t - | None -> JSON_Null in - let st = match opaquetype.super_t with - | Some st -> _json_of_t json_cx st - | None -> JSON_Null in - [ - "type", t; - "id", JSON_String (string_of_int opaquetype.opaque_id); - "supertype", st - ] - - | ModuleT (_, {exports_tmap; cjs_export; has_every_named_export;}, is_strict) -> - let tmap = Context.find_exports json_cx.cx exports_tmap in - let cjs_export = match cjs_export with - | Some(t) -> _json_of_t json_cx t - | None -> JSON_Null - in - [ - "namedExports", json_of_loc_tmap json_cx tmap; - "cjsExport", cjs_export; - "hasEveryNamedExport", JSON_Bool has_every_named_export; - "isStrict", JSON_Bool is_strict; - ] - - | InternalT (ExtendsT (_, t1, t2)) -> [ - "type1", _json_of_t json_cx t1; - "type2", _json_of_t json_cx t2 - ] - - | InternalT (ChoiceKitT (_, tool)) -> [ - "tool", JSON_String (match tool with - | Trigger -> "trigger" - ); - ] - - | TypeDestructorTriggerT (_, _, _, s, t) -> [ - "destructor", json_of_destructor json_cx s; - "type", _json_of_t json_cx t; - ] - - | CustomFunT (_, kind) -> [ - "kind", _json_of_custom_fun_kind kind; - ] @ (match kind with - | ReactElementFactory t -> ["componentType", _json_of_t json_cx t] - | _ -> [] - ) - - | OpenPredT (_,t, pos_preds, neg_preds) -> [ - let json_key_map f map = JSON_Object ( - Key_map.elements map |> - List.map (Utils_js.map_pair Key.string_of_key f) - ) in - let json_pred_key_map = json_key_map (json_of_pred json_cx) in - "OpenPred", JSON_Object [ - ("base_type", _json_of_t_impl json_cx t); - ("pos_preds", json_pred_key_map pos_preds); - ("neg_preds", json_pred_key_map neg_preds) - ] - ] - - | ReposT (_, t) - | InternalT (ReposUpperT (_, t)) -> [ - "type", _json_of_t json_cx t - ] - - | InternalT (OptionalChainVoidT _) -> [] - ) -) - -and _json_of_import_kind = Hh_json.(function - | ImportType -> JSON_String "ImportType" - | ImportTypeof -> JSON_String "ImportTypeof" - | ImportValue -> JSON_String "ImportValue" -) - -and _json_of_string_literal = Hh_json.(function - | Literal (_, s) -> ["literal", JSON_String s] - | Truthy -> ["refinement", JSON_String "Truthy"] - | AnyLiteral -> [] -) - -and _json_of_cont json_cx = Hh_json.(function - | Upper u -> [ - "cont", JSON_String "upper"; - "type", _json_of_use_t json_cx u - ] - | Lower (op, l) -> [ - "cont", JSON_String "lower"; - "use", JSON_String (string_of_use_op op); - "type", _json_of_t json_cx l - ] -) - -and _json_of_custom_fun_kind kind = Hh_json.JSON_String (match kind with - | ObjectAssign -> "Object.assign" - | ObjectGetPrototypeOf -> "Object.getPrototypeOf" - | ObjectSetPrototypeOf -> "Object.setPrototypeOf" - | Compose false -> "Compose" - | Compose true -> "ComposeReverse" - | ReactPropType _ -> "ReactPropsCheckType" - | ReactCreateClass -> "React.createClass" - | ReactCreateElement -> "React.createElement" - | ReactCloneElement -> "React.cloneElement" - | ReactElementFactory _ -> "React.createFactory" - | Idx -> "idx" - | TypeAssertIs -> "TypeAssert.is" - | TypeAssertThrows -> "TypeAssert.throws" - | TypeAssertWraps -> "TypeAssert.wraps" - | DebugPrint -> "$Flow$DebugPrint" - | DebugThrow -> "$Flow$DebugThrow" - | DebugSleep -> "$Flow$DebugSleep" -) + check_depth _json_of_t_impl json_cx t) + +and _json_of_tvar json_cx id = + Hh_json.( + [("id", int_ id)] + @ + if ISet.mem id json_cx.stack then + [] + else + [("node", json_of_node json_cx id)]) + +and _json_of_targ json_cx t = + Hh_json.( + JSON_Object + (match t with + | ImplicitArg _ -> [("kind", JSON_String "implicit")] + | ExplicitArg t -> [("kind", JSON_String "explicit"); ("type", _json_of_t json_cx t)])) + +and _json_of_t_impl json_cx t = + Hh_json.( + JSON_Object + ( [ + ( "reason", + json_of_reason ~strip_root:json_cx.strip_root ~offset_table:None (reason_of_t t) ); + ("kind", JSON_String (string_of_ctor t)); + ] + @ + match t with + | OpenT (_, id) -> _json_of_tvar json_cx id + | DefT (_, _, NumT lit) -> + begin + match lit with + | Literal (_, (_, raw)) -> [("literal", JSON_String raw)] + | Truthy -> [("refinement", JSON_String "Truthy")] + | AnyLiteral -> [] + end + | DefT (_, _, StrT lit) -> _json_of_string_literal lit + | DefT (_, _, BoolT b) -> + (match b with + | Some b -> [("literal", JSON_Bool b)] + | None -> []) + | DefT (_, _, EmptyT _) + | DefT (_, _, MixedT _) + | AnyT _ + | DefT (_, _, NullT) + | DefT (_, _, VoidT) -> + [] + | NullProtoT _ + | ObjProtoT _ + | FunProtoT _ + | FunProtoApplyT _ + | FunProtoBindT _ + | FunProtoCallT _ -> + [] + | DefT (_, _, FunT (static, proto, funtype)) -> + [ + ("static", _json_of_t json_cx static); + ("prototype", _json_of_t json_cx proto); + ("funType", json_of_funtype json_cx funtype); + ] + | DefT (_, _, ObjT objtype) -> [("type", json_of_objtype json_cx objtype)] + | DefT (_, _, ArrT (ArrayAT (elemt, tuple_types))) -> + [ + ("kind", JSON_String "Array"); + ("elemType", _json_of_t json_cx elemt); + ( "tupleType", + match tuple_types with + | Some tuplet -> JSON_Array (Core_list.map ~f:(_json_of_t json_cx) tuplet) + | None -> JSON_Null ); + ] + | DefT (_, _, ArrT (TupleAT (elemt, tuple_types))) -> + [ + ("kind", JSON_String "Tuple"); + ("elemType", _json_of_t json_cx elemt); + ("tupleType", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) tuple_types)); + ] + | DefT (_, _, ArrT (ROArrayAT elemt)) -> + [("kind", JSON_String "ReadOnlyArray"); ("elemType", _json_of_t json_cx elemt)] + | DefT (_, _, CharSetT chars) -> + [("chars", JSON_String (String_utils.CharSet.to_string chars))] + | DefT (_, _, ClassT t) -> [("type", _json_of_t json_cx t)] + | DefT (_, _, InstanceT (static, super, implements, instance)) -> + [ + ("static", _json_of_t json_cx static); + ("super", _json_of_t json_cx super); + ("implements", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) implements)); + ("instance", json_of_insttype json_cx instance); + ] + | OptionalT (_, t) -> [("type", _json_of_t json_cx t)] + | EvalT (t, defer_use_t, id) -> + [ + ("type", _json_of_t json_cx t); + ("defer_use_type", json_of_defer_use_t json_cx defer_use_t); + ] + @ + let evaluated = Context.evaluated json_cx.cx in + begin + match IMap.get id evaluated with + | None -> [] + | Some t -> [("result", _json_of_t json_cx t)] + end + | DefT (_, _, PolyT (_, tparams, t, id)) -> + [ + ("id", JSON_Number (string_of_int id)); + ( "typeParams", + JSON_Array (Core_list.map ~f:(json_of_typeparam json_cx) (Nel.to_list tparams)) ); + ("type", _json_of_t json_cx t); + ] + | TypeAppT (_, _, t, targs) -> + [ + ("typeArgs", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) targs)); + ("type", _json_of_t json_cx t); + ] + | ThisClassT (_, t) -> [("type", _json_of_t json_cx t)] + | ThisTypeAppT (_, t, this, targs_opt) -> + (match targs_opt with + | Some targs -> [("typeArgs", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) targs))] + | None -> []) + @ [("thisArg", _json_of_t json_cx this); ("type", _json_of_t json_cx t)] + | BoundT (_, name, polarity) -> + [("name", JSON_String name); ("polarity", json_of_polarity json_cx polarity)] + | ExistsT _ -> [] + | ExactT (_, t) -> [("type", _json_of_t json_cx t)] + | MaybeT (_, t) -> [("type", _json_of_t json_cx t)] + | IntersectionT (_, rep) -> + [ + (let ts = InterRep.members rep in + ("types", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) ts))); + ] + | UnionT (_, rep) -> + [ + (let ts = UnionRep.members rep in + ("types", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) ts))); + ] + | MergedT (_, uses) -> + [("uses", JSON_Array (Core_list.map ~f:(_json_of_use_t json_cx) uses))] + | DefT (_, _, IdxWrapper t) -> [("wrappedObj", _json_of_t json_cx t)] + | DefT (_, _, ReactAbstractComponentT { config; instance }) -> + [("config", _json_of_t json_cx config); ("instance", _json_of_t json_cx instance)] + | ShapeT t -> [("type", _json_of_t json_cx t)] + | MatchingPropT (_, x, t) -> [("name", JSON_String x); ("type", _json_of_t json_cx t)] + | KeysT (_, t) -> [("type", _json_of_t json_cx t)] + | DefT (_, _, SingletonStrT s) -> [("literal", JSON_String s)] + | DefT (_, _, SingletonNumT (_, raw)) -> [("literal", JSON_String raw)] + | DefT (_, _, SingletonBoolT b) -> [("literal", JSON_Bool b)] + | DefT (_, _, TypeT (_, t)) -> [("result", _json_of_t json_cx t)] + | AnnotT (_, t, use_desc) -> + [("type", _json_of_t json_cx t); ("useDesc", JSON_Bool use_desc)] + | OpaqueT (_, opaquetype) -> + let t = + match opaquetype.underlying_t with + | Some t -> _json_of_t json_cx t + | None -> JSON_Null + in + let st = + match opaquetype.super_t with + | Some st -> _json_of_t json_cx st + | None -> JSON_Null + in + [ + ("type", t); + ("id", JSON_String (ALoc.debug_to_string opaquetype.opaque_id)); + ("supertype", st); + ] + | ModuleT (_, { exports_tmap; cjs_export; has_every_named_export }, is_strict) -> + let tmap = Context.find_exports json_cx.cx exports_tmap in + let cjs_export = + match cjs_export with + | Some t -> _json_of_t json_cx t + | None -> JSON_Null + in + [ + ("namedExports", json_of_loc_tmap json_cx tmap); + ("cjsExport", cjs_export); + ("hasEveryNamedExport", JSON_Bool has_every_named_export); + ("isStrict", JSON_Bool is_strict); + ] + | InternalT (ExtendsT (_, t1, t2)) -> + [("type1", _json_of_t json_cx t1); ("type2", _json_of_t json_cx t2)] + | InternalT (ChoiceKitT (_, tool)) -> + [ + ( "tool", + JSON_String + (match tool with + | Trigger -> "trigger") ); + ] + | TypeDestructorTriggerT (_, _, _, s, t) -> + [("destructor", json_of_destructor json_cx s); ("type", _json_of_t json_cx t)] + | CustomFunT (_, kind) -> + [("kind", _json_of_custom_fun_kind kind)] + @ + (match kind with + | ReactElementFactory t -> [("componentType", _json_of_t json_cx t)] + | _ -> []) + | OpenPredT (_, t, pos_preds, neg_preds) -> + [ + (let json_key_map f map = + JSON_Object + (Key_map.elements map |> Core_list.map ~f:(Utils_js.map_pair Key.string_of_key f)) + in + let json_pred_key_map = json_key_map (json_of_pred json_cx) in + ( "OpenPred", + JSON_Object + [ + ("base_type", _json_of_t_impl json_cx t); + ("pos_preds", json_pred_key_map pos_preds); + ("neg_preds", json_pred_key_map neg_preds); + ] )); + ] + | ReposT (_, t) + | InternalT (ReposUpperT (_, t)) -> + [("type", _json_of_t json_cx t)] + | InternalT (OptionalChainVoidT _) -> [] )) + +and _json_of_import_kind = + Hh_json.( + function + | ImportType -> JSON_String "ImportType" + | ImportTypeof -> JSON_String "ImportTypeof" + | ImportValue -> JSON_String "ImportValue") + +and _json_of_string_literal = + Hh_json.( + function + | Literal (_, s) -> [("literal", JSON_String s)] + | Truthy -> [("refinement", JSON_String "Truthy")] + | AnyLiteral -> []) + +and _json_of_cont json_cx = + Hh_json.( + function + | Upper u -> [("cont", JSON_String "upper"); ("type", _json_of_use_t json_cx u)] + | Lower (op, l) -> + [ + ("cont", JSON_String "lower"); + ("use", JSON_String (string_of_use_op op)); + ("type", _json_of_t json_cx l); + ]) + +and _json_of_custom_fun_kind kind = + Hh_json.JSON_String + (match kind with + | ObjectAssign -> "Object.assign" + | ObjectGetPrototypeOf -> "Object.getPrototypeOf" + | ObjectSetPrototypeOf -> "Object.setPrototypeOf" + | Compose false -> "Compose" + | Compose true -> "ComposeReverse" + | ReactPropType _ -> "ReactPropsCheckType" + | ReactCreateClass -> "React.createClass" + | ReactCreateElement -> "React.createElement" + | ReactCloneElement -> "React.cloneElement" + | ReactElementFactory _ -> "React.createFactory" + | Idx -> "idx" + | TypeAssertIs -> "TypeAssert.is" + | TypeAssertThrows -> "TypeAssert.throws" + | TypeAssertWraps -> "TypeAssert.wraps" + | DebugPrint -> "$Flow$DebugPrint" + | DebugThrow -> "$Flow$DebugThrow" + | DebugSleep -> "$Flow$DebugSleep") and _json_of_use_t json_cx = check_depth _json_of_use_t_impl json_cx -and _json_of_use_t_impl json_cx t = Hh_json.( - JSON_Object ([ - "reason", json_of_reason ~strip_root:json_cx.strip_root (reason_of_use_t t); - "kind", JSON_String (string_of_use_ctor t) - ] @ - match t with - | UseT (op, t) -> [ - "use", JSON_String (string_of_use_op op); - "type", _json_of_t json_cx t - ] - - | AssertArithmeticOperandT _ -> [] - | AssertBinaryInLHST _ -> [] - | AssertBinaryInRHST _ -> [] - | AssertForInRHST _ -> [] - - | BecomeT (_, t) -> [ - "result", _json_of_t json_cx t - ] - - | BindT (_, _, funtype, pass) -> [ - "funType", json_of_funcalltype json_cx funtype; - "passThrough", JSON_Bool pass - ] - - | CallT (_, _, funtype) -> [ - "funType", json_of_funcalltype json_cx funtype - ] - - | MethodT (_, _, _, propref, funtype, _) -> [ - "propRef", json_of_propref json_cx propref; - "funType", json_of_funcalltype json_cx funtype - ] - - | ReposLowerT (_, use_desc, use_t) -> [ - "type", _json_of_use_t json_cx use_t; - "useDesc", JSON_Bool use_desc; - ] - - | ReposUseT (_, use_desc, op, t) -> [ - "use", JSON_String (string_of_use_op op); - "type", _json_of_t json_cx t; - "useDesc", JSON_Bool use_desc; - ] - - | SetPropT (_, _, name, _, t, _) - | GetPropT (_, _, name, t) - | MatchPropT (_, _, name, t) - | TestPropT (_, _, name, t) -> [ - "propRef", json_of_propref json_cx name; - "propType", _json_of_t json_cx t - ] - | SetPrivatePropT (_, _, name, _, _, t, _) - | GetPrivatePropT (_, _, name, _, _, t) -> [ - "propRef", JSON_String name; - "propType", _json_of_t json_cx t - ] - - | SetElemT (_, _, indext, elemt, _) - | GetElemT (_, _, indext, elemt) -> [ - "indexType", _json_of_t json_cx indext; - "elemType", _json_of_t json_cx elemt - ] - - | CallElemT (_, _, indext, funtype) -> [ - "indexType", _json_of_t json_cx indext; - "funType", json_of_funcalltype json_cx funtype - ] - - | GetStaticsT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | GetProtoT (_, t) - | SetProtoT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | ConstructorT (_, _, targs, args, t) -> [ - "typeArgs", (match targs with - | None -> JSON_Null - | Some ts -> JSON_Array (List.map (_json_of_t json_cx) ts)); - "argTypes", JSON_Array (List.map (json_of_funcallarg json_cx) args); - "type", _json_of_t json_cx t - ] - - | SuperT (_, _, Derived {own; proto; static}) -> [ - "own", json_of_pmap json_cx own; - "proto", json_of_pmap json_cx proto; - "static", json_of_pmap json_cx static; - ] - - | ImplementsT (op, t) -> [ - "use", JSON_String (string_of_use_op op); - "instance", _json_of_t json_cx t; - ] - - | MixinT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | ToStringT (_, t) -> [ - "type", _json_of_use_t json_cx t - ] - - | AdderT (_, _, _, l, r) -> [ - "leftType", _json_of_t json_cx l; - "rightType", _json_of_t json_cx r - ] - - | ComparatorT (_, _, t) -> [ - "type", _json_of_t json_cx t - ] - - | UnaryMinusT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | PredicateT (p, t) -> [ - "pred", json_of_pred json_cx p; - "type", _json_of_t json_cx t - ] - - | GuardT (p, r, t) -> [ - "pred", json_of_pred json_cx p; - "result", _json_of_t json_cx r; - "sink", _json_of_t json_cx t - ] - - | EqT (_, _, t) -> [ - "type", _json_of_t json_cx t - ] - - | AndT (_, right, res) - | OrT (_, right, res) - | NullishCoalesceT (_, right, res) -> [ - "rightType", _json_of_t json_cx right; - "resultType", _json_of_t json_cx res - ] - - | NotT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | SpecializeT (_, _, _, cache, targs_opt, tvar) -> [ - "cache", json_of_specialize_cache json_cx cache - ] @ ( - match targs_opt with - | Some targs -> [ "types", JSON_Array (List.map (_json_of_t json_cx) targs) ] - | None -> [] - ) @ [ - "tvar", _json_of_t json_cx tvar - ] - - | ThisSpecializeT (_, this, k) -> - ("this", _json_of_t json_cx this) :: _json_of_cont json_cx k - - | VarianceCheckT (_, targs, polarity) -> [ - "types", JSON_Array (List.map (_json_of_t json_cx) targs); - "polarity", json_of_polarity json_cx polarity - ] - - | TypeAppVarianceCheckT (_, _, _, targs) -> [ - "typeArgs", JSON_Array (List.map (fun (t1, t2) -> - JSON_Object [ - "t1", _json_of_t json_cx t1; - "t2", _json_of_t json_cx t2; + +and _json_of_use_t_impl json_cx t = + Hh_json.( + JSON_Object + ( [ + ( "reason", + json_of_reason ~strip_root:json_cx.strip_root ~offset_table:None (reason_of_use_t t) ); + ("kind", JSON_String (string_of_use_ctor t)); + ] + @ + match t with + | UseT (op, t) -> + [("use", JSON_String (string_of_use_op op)); ("type", _json_of_t json_cx t)] + | AssertArithmeticOperandT _ -> [] + | AssertBinaryInLHST _ -> [] + | AssertBinaryInRHST _ -> [] + | AssertForInRHST _ -> [] + | BecomeT (_, t) -> [("result", _json_of_t json_cx t)] + | BindT (_, _, funtype, pass) -> + [("funType", json_of_funcalltype json_cx funtype); ("passThrough", JSON_Bool pass)] + | CallT (_, _, funtype) -> [("funType", json_of_funcalltype json_cx funtype)] + | MethodT (_, _, _, propref, funtype, _) -> + [ + ("propRef", json_of_propref json_cx propref); + ("funType", json_of_funcalltype json_cx funtype); ] - ) targs) - ] - - | ConcretizeTypeAppsT (_, (ts1, _, _), (t2, ts2, _, _), will_flip) -> [ - "willFlip", JSON_Bool will_flip; - "currentTypeArgs", JSON_Array (List.map (_json_of_t json_cx) ts1); - "currentUpper", _json_of_t json_cx t2; - "currentUpperTypeArgs", JSON_Array (List.map (_json_of_t json_cx) ts2); - ] - - | LookupT (_, rstrict, _, propref, action) -> - (match rstrict with - | NonstrictReturning (default_opt, test_opt) -> - let ret = match default_opt with - | Some (default, result) -> + | ReposLowerT (_, use_desc, use_t) -> + [("type", _json_of_use_t json_cx use_t); ("useDesc", JSON_Bool use_desc)] + | ReposUseT (_, use_desc, op, t) -> + [ + ("use", JSON_String (string_of_use_op op)); + ("type", _json_of_t json_cx t); + ("useDesc", JSON_Bool use_desc); + ] + | SetPropT (_, _, name, _, _, t, _) + | GetPropT (_, _, name, t) + | MatchPropT (_, _, name, t) + | TestPropT (_, _, name, t) -> + [("propRef", json_of_propref json_cx name); ("propType", _json_of_t json_cx t)] + | SetPrivatePropT (_, _, name, _, _, _, t, _) + | GetPrivatePropT (_, _, name, _, _, t) -> + [("propRef", JSON_String name); ("propType", _json_of_t json_cx t)] + | SetElemT (_, _, indext, _, elemt, _) + | GetElemT (_, _, indext, elemt) -> + [("indexType", _json_of_t json_cx indext); ("elemType", _json_of_t json_cx elemt)] + | CallElemT (_, _, indext, funtype) -> + [ + ("indexType", _json_of_t json_cx indext); + ("funType", json_of_funcalltype json_cx funtype); + ] + | GetStaticsT (_, t) -> [("type", _json_of_t json_cx t)] + | GetProtoT (_, t) + | SetProtoT (_, t) -> + [("type", _json_of_t json_cx t)] + | ConstructorT (_, _, targs, args, t) -> + [ + ( "typeArgs", + match targs with + | None -> JSON_Null + | Some ts -> JSON_Array (Core_list.map ~f:(_json_of_targ json_cx) ts) ); + ("argTypes", JSON_Array (Core_list.map ~f:(json_of_funcallarg json_cx) args)); + ("type", _json_of_t json_cx t); + ] + | SuperT (_, _, Derived { own; proto; static }) -> + [ + ("own", json_of_pmap json_cx own); + ("proto", json_of_pmap json_cx proto); + ("static", json_of_pmap json_cx static); + ] + | ImplementsT (op, t) -> + [("use", JSON_String (string_of_use_op op)); ("instance", _json_of_t json_cx t)] + | MixinT (_, t) -> [("type", _json_of_t json_cx t)] + | ToStringT (_, t) -> [("type", _json_of_use_t json_cx t)] + | AdderT (_, _, _, l, r) -> + [("leftType", _json_of_t json_cx l); ("rightType", _json_of_t json_cx r)] + | ComparatorT (_, _, t) -> [("type", _json_of_t json_cx t)] + | UnaryMinusT (_, t) -> [("type", _json_of_t json_cx t)] + | PredicateT (p, t) -> [("pred", json_of_pred json_cx p); ("type", _json_of_t json_cx t)] + | GuardT (p, r, t) -> + [ + ("pred", json_of_pred json_cx p); + ("result", _json_of_t json_cx r); + ("sink", _json_of_t json_cx t); + ] + | EqT (_, _, t) -> [("type", _json_of_t json_cx t)] + | AndT (_, right, res) + | OrT (_, right, res) + | NullishCoalesceT (_, right, res) -> + [("rightType", _json_of_t json_cx right); ("resultType", _json_of_t json_cx res)] + | NotT (_, t) -> [("type", _json_of_t json_cx t)] + | SpecializeT (_, _, _, cache, targs_opt, tvar) -> + [("cache", json_of_specialize_cache json_cx cache)] + @ (match targs_opt with + | Some targs -> [("types", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) targs))] + | None -> []) + @ [("tvar", _json_of_t json_cx tvar)] + | ThisSpecializeT (_, this, k) -> + ("this", _json_of_t json_cx this) :: _json_of_cont json_cx k + | VarianceCheckT (_, targs, polarity) -> + [ + ("types", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) targs)); + ("polarity", json_of_polarity json_cx polarity); + ] + | TypeAppVarianceCheckT (_, _, _, targs) -> + [ + ( "typeArgs", + JSON_Array + (Core_list.map + ~f:(fun (t1, t2) -> + JSON_Object [("t1", _json_of_t json_cx t1); ("t2", _json_of_t json_cx t2)]) + targs) ); + ] + | ConcretizeTypeAppsT (_, (ts1, _, _), (t2, ts2, _, _), will_flip) -> + [ + ("willFlip", JSON_Bool will_flip); + ("currentTypeArgs", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) ts1)); + ("currentUpper", _json_of_t json_cx t2); + ("currentUpperTypeArgs", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) ts2)); + ] + | LookupT (_, rstrict, _, propref, action) -> + (match rstrict with + | NonstrictReturning (default_opt, test_opt) -> + let ret = + match default_opt with + | Some (default, result) -> + [ + ("defaultType", _json_of_t json_cx default); + ("resultType", _json_of_t json_cx result); + ] + | None -> [] + in + Option.value_map test_opt ~default:ret ~f:(fun (id, _) -> ("testID", int_ id) :: ret) + | Strict r -> + [("strictReason", json_of_reason ~strip_root:json_cx.strip_root ~offset_table:None r)] + | ShadowRead (_, ids) -> [ - "defaultType", _json_of_t json_cx default; - "resultType", _json_of_t json_cx result; + ( "shadowRead", + JSON_Array + ( Nel.to_list ids + |> Core_list.map ~f:(fun id -> JSON_Number (Properties.string_of_id id)) ) ); ] - | None -> [] - in - Option.value_map test_opt ~default:ret ~f:(fun (id, _) -> ("testID", int_ id) :: ret) - | Strict r -> [ - "strictReason", json_of_reason ~strip_root:json_cx.strip_root r + | ShadowWrite ids -> + [ + ( "shadowWrite", + JSON_Array + ( Nel.to_list ids + |> Core_list.map ~f:(fun id -> JSON_Number (Properties.string_of_id id)) ) ); + ]) + @ [ + ("propref", json_of_propref json_cx propref); + ("action", json_of_lookup_action json_cx action); + ] + | ObjAssignFromT (_, _, proto, tvar, kind) -> + [ + ("target", _json_of_t json_cx proto); + ("resultType", _json_of_t json_cx tvar); + ("kind", json_of_obj_assign_kind json_cx kind); ] - | ShadowRead (_, ids) -> [ - "shadowRead", JSON_Array (Nel.to_list ids |> List.map (fun id -> - JSON_Number (Properties.string_of_id id) - ))] - | ShadowWrite ids -> [ - "shadowWrite", JSON_Array (Nel.to_list ids |> List.map (fun id -> - JSON_Number (Properties.string_of_id id) - ))] - ) @ [ - "propref", json_of_propref json_cx propref; - "action", json_of_lookup_action json_cx action - ] - - | ObjAssignFromT (_, proto, tvar, kind) -> [ - "target", _json_of_t json_cx proto; - "resultType", _json_of_t json_cx tvar; - "kind", json_of_obj_assign_kind json_cx kind; - ] - - | ObjAssignToT (_, from, tvar, kind) -> [ - "source", _json_of_t json_cx from; - "resultType", _json_of_t json_cx tvar; - "kind", json_of_obj_assign_kind json_cx kind; - ] - - | ObjFreezeT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | ObjRestT (_, excludes, tvar) -> [ - "excludedProps", JSON_Array (List.map (fun s -> JSON_String s) excludes); - "resultType", _json_of_t json_cx tvar; - ] - - | ObjSealT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | ObjTestProtoT (_, res) -> [ - "returnType", _json_of_t json_cx res - ] - - | ObjTestT (_, default, res) -> [ - "defaultType", _json_of_t json_cx default; - "resultType", _json_of_t json_cx res - ] - - | ArrRestT (_, _, i, t) -> [ - "index", JSON_Number (string_of_int i); - "resultType", _json_of_t json_cx t - ] - - | UnifyT (t1, t2) -> [ - "type1", _json_of_t json_cx t1; - "type2", _json_of_t json_cx t2 - ] - - | GetKeysT (_, t) -> [ - "type", _json_of_use_t json_cx t - ] - - | HasOwnPropT (_, _, key) -> [ - "key", JSON_Object (_json_of_string_literal key) - ] - - | GetValuesT (_, t) -> [ - "type", _json_of_t json_cx t - ] - - | ElemT (_, _, base, action) -> [ - "baseType", _json_of_t json_cx base; - match action with - | ReadElem t -> "readElem", _json_of_t json_cx t - | WriteElem (t, _) -> "writeElem", _json_of_t json_cx t - | CallElem (_, funtype) -> "callElem", json_of_funcalltype json_cx funtype - ] - - | MakeExactT (_, cont) -> _json_of_cont json_cx cont - - | CJSRequireT (_, export, _) -> [ - "export", - _json_of_t json_cx export - ] - | ImportModuleNsT (_, t, _) -> [ - "t_out", _json_of_t json_cx t - ] - | ImportDefaultT (_, import_kind, (local_name, module_name), t, _) -> [ - "import_kind", _json_of_import_kind import_kind; - "local_name", JSON_String local_name; - "module_name", JSON_String module_name; - "t_out", _json_of_t json_cx t; - ] - | ImportNamedT (_, import_kind, export_name, module_name, t, _) -> [ - "import_kind", _json_of_import_kind import_kind; - "export_name", JSON_String export_name; - "module_name", JSON_String module_name; - "t_out", _json_of_t json_cx t; - ] - | ImportTypeT (_, export_name, t) - | ImportTypeofT (_, export_name, t) -> [ - "export_name", JSON_String export_name; - "t_out", _json_of_t json_cx t; - ] - | AssertImportIsValueT (_, name) -> [ - "name", JSON_String name; - ] - - | AssertRestParamT _ -> [] - - | CJSExtractNamedExportsT (_, (module_t_reason, exporttypes, is_strict), t_out) -> [ - "module", _json_of_t json_cx (ModuleT (module_t_reason, exporttypes, is_strict)); - "t_out", _json_of_t json_cx t_out; - ] - | CopyNamedExportsT (_, target_module_t, t_out) -> [ - "target_module_t", _json_of_t json_cx target_module_t; - "t_out", _json_of_t json_cx t_out; - ] - | CopyTypeExportsT (_, target_module_t, t_out) -> [ - "target_module_t", _json_of_t json_cx target_module_t; - "t_out", _json_of_t json_cx t_out; - ] - | ExportNamedT (_, skip_dupes, tmap, t_out) -> [ - "skip_duplicates", JSON_Bool skip_dupes; - "tmap", json_of_loc_tmap json_cx tmap; - "t_out", _json_of_t json_cx t_out; - ] - | ExportTypeT (_, skip_dupes, name, t, t_out) -> [ - "skip_duplicates", JSON_Bool skip_dupes; - "name", JSON_String name; - "tmap", _json_of_t json_cx t; - "t_out", _json_of_t json_cx t_out; - ] - - | DebugPrintT _ -> [] - | DebugSleepT _ -> [] - - | MapTypeT (_, kind, t) -> [ - "kind", JSON_String (string_of_type_map kind); - "t", _json_of_t json_cx t; - ] - - | ObjKitT (_, _, _, _, tout) -> [ - "t_out", _json_of_t json_cx tout; - ] - - | ReactKitT (_, _, React.CreateElement0 (shape, config, (children, children_spread), t_out)) -> [ - "shape", JSON_Bool shape; - "config", _json_of_t json_cx config; - "children", JSON_Array (List.map (_json_of_t json_cx) children); - "childrenSpread", (match children_spread with - | Some children_spread -> _json_of_t json_cx children_spread - | None -> JSON_Null); - "returnType", _json_of_t json_cx t_out; - ] - - | ReactKitT (_, _, React.CreateElement (shape, component, config, (children, children_spread), t_out)) -> [ - "shape", JSON_Bool shape; - "component", _json_of_t json_cx component; - "config", _json_of_t json_cx config; - "children", JSON_Array (List.map (_json_of_t json_cx) children); - "childrenSpread", (match children_spread with - | Some children_spread -> _json_of_t json_cx children_spread - | None -> JSON_Null); - "returnType", _json_of_t json_cx t_out; - ] - - | ReactKitT _ -> [] (* TODO *) - - | ChoiceKitUseT (_, tool) -> [ - "tool", JSON_String (match tool with - | FullyResolveType _ -> "fullyResolveType" - | TryFlow _ -> "tryFlow" - ); - ] - - | IntersectionPreprocessKitT (_, tool) -> [ - "tool", JSON_String (match tool with - | ConcretizeTypes _ -> "concretizeTypes" - | SentinelPropTest _ -> "sentinelPropTest" - | PropExistsTest _ -> "propExistsTest" - ); - ] - - | SentinelPropTestT (_, l, key, sense, sentinel, result) -> [ - "l", _json_of_t json_cx l; - "key", JSON_String key; - "sense", JSON_Bool sense; - "sentinel", json_of_sentinel json_cx sentinel; - "result", _json_of_t json_cx result; - ] - | IdxUnwrap (_, t_out) -> [ - "t_out", _json_of_t json_cx t_out - ] - | IdxUnMaybeifyT (_, t_out) -> [ - "t_out", _json_of_t json_cx t_out - ] - - | OptionalChainT (_, _, uses) -> [ - "chain", JSON_Array (Nel.to_list @@ Nel.map (fun (use, tout) -> - _json_of_use_t json_cx (apply_opt_use use tout) - ) uses); - ] - - | InvariantT _ -> [] - - | CallLatentPredT (_, sense, offset, l, t) -> [ - "sense", JSON_Bool sense; - "offset", JSON_Number (spf "%d" offset); - "t_in", _json_of_t json_cx l; - "t_out", _json_of_t json_cx t - ] - - | CallOpenPredT (_, sense, key, l, t) -> [ - "sense", JSON_Bool sense; - "key", JSON_String (Key.string_of_key key); - "t_in", _json_of_t json_cx l; - "t_out", _json_of_t json_cx t - ] - - | SubstOnPredT (_, subst, t) -> [ - "PredWithSubst", JSON_Object [ - ("subst", JSON_Array (subst |> SMap.elements |> - List.map (fun (x,k) -> - JSON_Array [JSON_String x; JSON_String (Key.string_of_key k)]))); - ("pred_t", _json_of_t_impl json_cx t) - ] - ] - - | RefineT (_, p, t) -> [ - "Refined", JSON_Object [ - ("pred_t", json_of_pred json_cx p); - ("refined_t", _json_of_t_impl json_cx t) - ] - ] - | ResolveSpreadT (_, _, { - rrt_resolved; - rrt_unresolved; - rrt_resolve_to; - }) -> [ - "resolved", JSON_Array (List.map (fun param -> - let kind, t = match param with - | ResolvedArg t -> "ResolvedArg", t - | ResolvedSpreadArg (r, at) -> - "ResolvedSpreadArg", DefT (r, ArrT at) - | ResolvedAnySpreadArg r -> - "ResolvedAnySpreadArg", DefT (r, AnyT) - in - JSON_Object [ - "kind", JSON_String kind; - "type", _json_of_t_impl json_cx t; + | ObjAssignToT (_, _, from, tvar, kind) -> + [ + ("source", _json_of_t json_cx from); + ("resultType", _json_of_t json_cx tvar); + ("kind", json_of_obj_assign_kind json_cx kind); + ] + | ObjFreezeT (_, t) -> [("type", _json_of_t json_cx t)] + | ObjRestT (_, excludes, tvar) -> + [ + ("excludedProps", JSON_Array (Core_list.map ~f:(fun s -> JSON_String s) excludes)); + ("resultType", _json_of_t json_cx tvar); + ] + | ObjSealT (_, t) -> [("type", _json_of_t json_cx t)] + | ObjTestProtoT (_, res) -> [("returnType", _json_of_t json_cx res)] + | ObjTestT (_, default, res) -> + [("defaultType", _json_of_t json_cx default); ("resultType", _json_of_t json_cx res)] + | ArrRestT (_, _, i, t) -> + [("index", JSON_Number (string_of_int i)); ("resultType", _json_of_t json_cx t)] + | UnifyT (t1, t2) -> [("type1", _json_of_t json_cx t1); ("type2", _json_of_t json_cx t2)] + | GetKeysT (_, t) -> [("type", _json_of_use_t json_cx t)] + | HasOwnPropT (_, _, key) -> [("key", JSON_Object (_json_of_string_literal key))] + | GetValuesT (_, t) -> [("type", _json_of_t json_cx t)] + | ElemT (_, _, base, action) -> + [ + ("baseType", _json_of_t json_cx base); + (match action with + | ReadElem t -> ("readElem", _json_of_t json_cx t) + | WriteElem (t, _, _) -> ("writeElem", _json_of_t json_cx t) + | CallElem (_, funtype) -> ("callElem", json_of_funcalltype json_cx funtype)); + ] + | MakeExactT (_, cont) -> _json_of_cont json_cx cont + | CJSRequireT (_, export, _) -> [("export", _json_of_t json_cx export)] + | ImportModuleNsT (_, t, _) -> [("t_out", _json_of_t json_cx t)] + | ImportDefaultT (_, import_kind, (local_name, module_name), t, _) -> + [ + ("import_kind", _json_of_import_kind import_kind); + ("local_name", JSON_String local_name); + ("module_name", JSON_String module_name); + ("t_out", _json_of_t json_cx t); + ] + | ImportNamedT (_, import_kind, export_name, module_name, t, _) -> + [ + ("import_kind", _json_of_import_kind import_kind); + ("export_name", JSON_String export_name); + ("module_name", JSON_String module_name); + ("t_out", _json_of_t json_cx t); + ] + | ImportTypeT (_, export_name, t) + | ImportTypeofT (_, export_name, t) -> + [("export_name", JSON_String export_name); ("t_out", _json_of_t json_cx t)] + | AssertImportIsValueT (_, name) -> [("name", JSON_String name)] + | CJSExtractNamedExportsT (_, (module_t_reason, exporttypes, is_strict), t_out) -> + [ + ("module", _json_of_t json_cx (ModuleT (module_t_reason, exporttypes, is_strict))); + ("t_out", _json_of_t json_cx t_out); + ] + | CopyNamedExportsT (_, target_module_t, t_out) -> + [ + ("target_module_t", _json_of_t json_cx target_module_t); + ("t_out", _json_of_t json_cx t_out); + ] + | CopyTypeExportsT (_, target_module_t, t_out) -> + [ + ("target_module_t", _json_of_t json_cx target_module_t); + ("t_out", _json_of_t json_cx t_out); + ] + | ExportNamedT (_, skip_dupes, tmap, _export_kind, t_out) -> + [ + ("skip_duplicates", JSON_Bool skip_dupes); + ("tmap", json_of_loc_tmap json_cx tmap); + ("t_out", _json_of_t json_cx t_out); + ] + | ExportTypeT (_, skip_dupes, name, t, t_out) -> + [ + ("skip_duplicates", JSON_Bool skip_dupes); + ("name", JSON_String name); + ("tmap", _json_of_t json_cx t); + ("t_out", _json_of_t json_cx t_out); + ] + | AssertExportIsTypeT (_, name, t_out) -> + [("name", JSON_String name); ("t_out", _json_of_t json_cx t_out)] + | DebugPrintT _ -> [] + | DebugSleepT _ -> [] + | MapTypeT (_, _, kind, t) -> + [("kind", JSON_String (string_of_type_map kind)); ("t", _json_of_t json_cx t)] + | ObjKitT (_, _, _, _, tout) -> [("t_out", _json_of_t json_cx tout)] + | ReactKitT (_, _, React.CreateElement0 (shape, config, (children, children_spread), t_out)) + -> + [ + ("shape", JSON_Bool shape); + ("config", _json_of_t json_cx config); + ("children", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) children)); + ( "childrenSpread", + match children_spread with + | Some children_spread -> _json_of_t json_cx children_spread + | None -> JSON_Null ); + ("returnType", _json_of_t json_cx t_out); + ] + | ReactKitT + (_, _, React.CreateElement (shape, component, config, (children, children_spread), t_out)) + -> + [ + ("shape", JSON_Bool shape); + ("component", _json_of_t json_cx component); + ("config", _json_of_t json_cx config); + ("children", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) children)); + ( "childrenSpread", + match children_spread with + | Some children_spread -> _json_of_t json_cx children_spread + | None -> JSON_Null ); + ("returnType", _json_of_t json_cx t_out); ] - ) rrt_resolved); - "unresolved", JSON_Array (List.map (fun param -> - let kind, t = match param with - | UnresolvedArg t -> "UnresolvedArg", t - | UnresolvedSpreadArg t -> "UnresolvedSpreadArg", t in - JSON_Object [ - "kind", JSON_String kind; - "type", _json_of_t_impl json_cx t; + | ReactKitT _ -> [] (* TODO *) + | ChoiceKitUseT (_, tool) -> + [ + ( "tool", + JSON_String + (match tool with + | FullyResolveType _ -> "fullyResolveType" + | TryFlow _ -> "tryFlow") ); ] - ) rrt_unresolved); - "resolve_to", json_of_resolve_to json_cx rrt_resolve_to; - ] - | CondT (_, consequent, alternate, t_out) -> [ - "consequent", (match consequent with - | Some t -> _json_of_t json_cx t - | None -> JSON_Null); - "alternate", _json_of_t json_cx alternate; - "t_out", _json_of_t json_cx t_out; - ] - | ExtendsUseT (_, _, _, t1, t2) -> [ - "type1", _json_of_t json_cx t1; - "type2", _json_of_t json_cx t2 - ] - ) -) + | IntersectionPreprocessKitT (_, tool) -> + [ + ( "tool", + JSON_String + (match tool with + | ConcretizeTypes _ -> "concretizeTypes" + | SentinelPropTest _ -> "sentinelPropTest" + | PropExistsTest _ -> "propExistsTest") ); + ] + | SentinelPropTestT (_, l, key, sense, sentinel, result) -> + [ + ("l", _json_of_t json_cx l); + ("key", JSON_String key); + ("sense", JSON_Bool sense); + ("sentinel", json_of_sentinel json_cx sentinel); + ("result", _json_of_t json_cx result); + ] + | IdxUnwrap (_, t_out) -> [("t_out", _json_of_t json_cx t_out)] + | IdxUnMaybeifyT (_, t_out) -> [("t_out", _json_of_t json_cx t_out)] + | OptionalChainT (_, _, uses) -> + [ + ( "chain", + JSON_Array + ( Nel.to_list + @@ Nel.map (fun (use, tout) -> _json_of_use_t json_cx (apply_opt_use use tout)) uses + ) ); + ] + | InvariantT _ -> [] + | CallLatentPredT (_, sense, offset, l, t) -> + [ + ("sense", JSON_Bool sense); + ("offset", JSON_Number (spf "%d" offset)); + ("t_in", _json_of_t json_cx l); + ("t_out", _json_of_t json_cx t); + ] + | CallOpenPredT (_, sense, key, l, t) -> + [ + ("sense", JSON_Bool sense); + ("key", JSON_String (Key.string_of_key key)); + ("t_in", _json_of_t json_cx l); + ("t_out", _json_of_t json_cx t); + ] + | SubstOnPredT (_, subst, t) -> + [ + ( "PredWithSubst", + JSON_Object + [ + ( "subst", + JSON_Array + ( subst + |> SMap.elements + |> Core_list.map ~f:(fun (x, k) -> + JSON_Array [JSON_String x; JSON_String (Key.string_of_key k)]) ) ); + ("pred_t", _json_of_t_impl json_cx t); + ] ); + ] + | ReactPropsToOut (_, props) -> [("props", _json_of_t json_cx props)] + | ReactInToProps (_, props) -> [("props", _json_of_t json_cx props)] + | RefineT (_, p, t) -> + [ + ( "Refined", + JSON_Object + [("pred_t", json_of_pred json_cx p); ("refined_t", _json_of_t_impl json_cx t)] ); + ] + | ResolveSpreadT (_, _, { rrt_resolved; rrt_unresolved; rrt_resolve_to }) -> + [ + ( "resolved", + JSON_Array + (Core_list.map + ~f:(fun param -> + let (kind, t) = + match param with + | ResolvedArg t -> ("ResolvedArg", t) + | ResolvedSpreadArg (r, at) -> + ("ResolvedSpreadArg", DefT (r, bogus_trust (), ArrT at)) + | ResolvedAnySpreadArg r -> ("ResolvedAnySpreadArg", AnyT.make Untyped r) + in + JSON_Object [("kind", JSON_String kind); ("type", _json_of_t_impl json_cx t)]) + rrt_resolved) ); + ( "unresolved", + JSON_Array + (Core_list.map + ~f:(fun param -> + let (kind, t) = + match param with + | UnresolvedArg t -> ("UnresolvedArg", t) + | UnresolvedSpreadArg t -> ("UnresolvedSpreadArg", t) + in + JSON_Object [("kind", JSON_String kind); ("type", _json_of_t_impl json_cx t)]) + rrt_unresolved) ); + ("resolve_to", json_of_resolve_to json_cx rrt_resolve_to); + ] + | CondT (_, consequent, alternate, t_out) -> + [ + ( "consequent", + match consequent with + | Some t -> _json_of_t json_cx t + | None -> JSON_Null ); + ("alternate", _json_of_t json_cx alternate); + ("t_out", _json_of_t json_cx t_out); + ] + | ExtendsUseT (_, _, _, t1, t2) -> + [("type1", _json_of_t json_cx t1); ("type2", _json_of_t json_cx t2)] + | DestructuringT (_, k, s, t_out) -> + [ + ( "kind", + JSON_String + (match k with + | DestructAnnot -> "annot" + | DestructInfer -> "infer") ); + ("selector", json_of_selector json_cx s); + ("t_out", _json_of_t json_cx t_out); + ] + | ModuleExportsAssignT (_, assign, t_out) -> + [("assign", _json_of_t json_cx assign); ("t_out", _json_of_t json_cx t_out)] )) and json_of_resolve_to json_cx = check_depth json_of_resolve_to_impl json_cx -and json_of_resolve_to_impl json_cx resolve_to = Hh_json.(JSON_Object ( - match resolve_to with - | ResolveSpreadsToTuple (id, elem_t, tout) - | ResolveSpreadsToArrayLiteral (id, elem_t, tout) -> [ - "id", JSON_Number (string_of_int id); - "elem_t", _json_of_t json_cx elem_t; - "t_out", _json_of_t json_cx tout; - ] - | ResolveSpreadsToArray (elem_t, tout) -> [ - "elem_t", _json_of_t json_cx elem_t; - "t_out", _json_of_t json_cx tout; - ] - | ResolveSpreadsToMultiflowCallFull (id, ft) - | ResolveSpreadsToMultiflowSubtypeFull (id, ft) -> [ - "id", JSON_Number (string_of_int id); - "funtype", json_of_funtype json_cx ft; - ] - | ResolveSpreadsToCustomFunCall (id, kind, tout) -> [ - "id", JSON_Number (string_of_int id); - "kind", _json_of_custom_fun_kind kind; - "t_out", _json_of_t json_cx tout; - ] - | ResolveSpreadsToMultiflowPartial (id, ft, call_reason, tout) -> [ - "id", JSON_Number (string_of_int id); - "funtype", json_of_funtype json_cx ft; - "callReason", json_of_reason ~strip_root:json_cx.strip_root call_reason; - "t_out", _json_of_t json_cx tout; - ] - | ResolveSpreadsToCallT (fct, tin) -> [ - "funcalltype", json_of_funcalltype json_cx fct; - "t_in", _json_of_t json_cx tin; - ] -)) + +and json_of_resolve_to_impl json_cx resolve_to = + Hh_json.( + JSON_Object + (match resolve_to with + | ResolveSpreadsToTuple (id, elem_t, tout) + | ResolveSpreadsToArrayLiteral (id, elem_t, tout) -> + [ + ("id", JSON_Number (string_of_int id)); + ("elem_t", _json_of_t json_cx elem_t); + ("t_out", _json_of_t json_cx tout); + ] + | ResolveSpreadsToArray (elem_t, tout) -> + [("elem_t", _json_of_t json_cx elem_t); ("t_out", _json_of_t json_cx tout)] + | ResolveSpreadsToMultiflowCallFull (id, ft) + | ResolveSpreadsToMultiflowSubtypeFull (id, ft) -> + [("id", JSON_Number (string_of_int id)); ("funtype", json_of_funtype json_cx ft)] + | ResolveSpreadsToCustomFunCall (id, kind, tout) -> + [ + ("id", JSON_Number (string_of_int id)); + ("kind", _json_of_custom_fun_kind kind); + ("t_out", _json_of_t json_cx tout); + ] + | ResolveSpreadsToMultiflowPartial (id, ft, call_reason, tout) -> + [ + ("id", JSON_Number (string_of_int id)); + ("funtype", json_of_funtype json_cx ft); + ( "callReason", + json_of_reason ~strip_root:json_cx.strip_root ~offset_table:None call_reason ); + ("t_out", _json_of_t json_cx tout); + ] + | ResolveSpreadsToCallT (fct, tin) -> + [("funcalltype", json_of_funcalltype json_cx fct); ("t_in", _json_of_t json_cx tin)])) and _json_of_enum _json_cx = function - | Enum.Str s -> Hh_json.JSON_String s - | Enum.Num (_, raw) -> Hh_json.JSON_String raw - | Enum.Bool b -> Hh_json.JSON_Bool b - | Enum.Null -> Hh_json.JSON_Null - | Enum.Void -> Hh_json.JSON_Null (* hmm, undefined doesn't exist in JSON *) + | UnionEnum.Str s -> Hh_json.JSON_String s + | UnionEnum.Num (_, raw) -> Hh_json.JSON_String raw + | UnionEnum.Bool b -> Hh_json.JSON_Bool b + | UnionEnum.Null -> Hh_json.JSON_Null + | UnionEnum.Void -> Hh_json.JSON_Null +(* hmm, undefined doesn't exist in JSON *) and json_of_sentinel json_cx = check_depth json_of_sentinel_impl json_cx + and json_of_sentinel_impl json_cx = function - | Enum.One enum -> _json_of_enum json_cx enum - | Enum.Many enums -> - Hh_json.JSON_Array ( - List.map (_json_of_enum json_cx) @@ EnumSet.elements enums - ) + | UnionEnum.One enum -> _json_of_enum json_cx enum + | UnionEnum.Many enums -> + Hh_json.JSON_Array (Core_list.map ~f:(_json_of_enum json_cx) @@ UnionEnumSet.elements enums) and json_of_polarity json_cx = check_depth json_of_polarity_impl json_cx -and json_of_polarity_impl _json_cx polarity = - Hh_json.JSON_String (string_of_polarity polarity) + +and json_of_polarity_impl _json_cx polarity = Hh_json.JSON_String (string_of_polarity polarity) and json_of_typeparam json_cx = check_depth json_of_typeparam_impl json_cx -and json_of_typeparam_impl json_cx tparam = Hh_json.( - JSON_Object ([ - "reason", json_of_reason ~strip_root:json_cx.strip_root tparam.reason; - "name", JSON_String tparam.name; - "bound", _json_of_t json_cx tparam.bound; - "polarity", json_of_polarity json_cx tparam.polarity; - ] @ match tparam.default with - | None -> [] - | Some t -> ["default", _json_of_t json_cx t]) -) + +and json_of_typeparam_impl json_cx tparam = + Hh_json.( + JSON_Object + ( [ + ("reason", json_of_reason ~strip_root:json_cx.strip_root ~offset_table:None tparam.reason); + ("name", JSON_String tparam.name); + ("bound", _json_of_t json_cx tparam.bound); + ("polarity", json_of_polarity json_cx tparam.polarity); + ] + @ + match tparam.default with + | None -> [] + | Some t -> [("default", _json_of_t json_cx t)] )) and json_of_objtype json_cx = check_depth json_of_objtype_impl json_cx -and json_of_objtype_impl json_cx objtype = Hh_json.( - let pmap = Context.find_props json_cx.cx objtype.props_tmap in - JSON_Object ([ - "flags", json_of_flags json_cx objtype.flags; - ] @ (match objtype.dict_t with - | None -> [] - | Some d -> ["dictType", json_of_dicttype json_cx d] - ) @ [ - "propTypes", json_of_pmap json_cx pmap; - "prototype", _json_of_t json_cx objtype.proto_t - ]) -) -and json_of_dicttype json_cx = check_depth json_of_dicttype_impl json_cx -and json_of_dicttype_impl json_cx dicttype = Hh_json.( - JSON_Object ( - (match dicttype.dict_name with - | None -> [] - | Some name -> ["name", JSON_String name] - ) @ [ - "keyType", _json_of_t json_cx dicttype.key; - "valueType", _json_of_t json_cx dicttype.value - ]) -) +and json_of_objtype_impl json_cx objtype = + Hh_json.( + let pmap = Context.find_props json_cx.cx objtype.props_tmap in + JSON_Object + ( [("flags", json_of_flags json_cx objtype.flags)] + @ (match objtype.dict_t with + | None -> [] + | Some d -> [("dictType", json_of_dicttype json_cx d)]) + @ [ + ("propTypes", json_of_pmap json_cx pmap); + ("prototype", _json_of_t json_cx objtype.proto_t); + ] )) -and json_of_flags json_cx = check_depth json_of_flags_impl json_cx -and json_of_flags_impl _json_cx flags = Hh_json.( - JSON_Object [ - "frozen", JSON_Bool flags.frozen; - "sealed", JSON_Bool (match flags.sealed with - | Sealed -> true - | UnsealedInFile _ -> false); - "exact", JSON_Bool flags.exact; - ] -) +and json_of_dicttype json_cx = check_depth json_of_dicttype_impl json_cx -and json_of_changeset json_cx = check_depth json_of_changeset_impl json_cx -and json_of_changeset_impl _json_cx = Hh_json.( - - let json_of_entry_ref (scope_id, name, op) = - JSON_Object [ - "scope_id", int_ scope_id; - "name", JSON_String name; - "op", JSON_String (Changeset.string_of_op op) - ] - in +and json_of_dicttype_impl json_cx dicttype = + Hh_json.( + JSON_Object + ( (match dicttype.dict_name with + | None -> [] + | Some name -> [("name", JSON_String name)]) + @ [ + ("keyType", _json_of_t json_cx dicttype.key); + ("valueType", _json_of_t json_cx dicttype.value); + ] )) - let json_of_changed_vars changed_vars = - JSON_Array (List.rev (Changeset.EntryRefSet.fold - (fun entry_ref acc -> json_of_entry_ref entry_ref :: acc) - changed_vars [] - )) - in +and json_of_flags json_cx = check_depth json_of_flags_impl json_cx - let json_of_refi_ref (scope_id, key, op) = - JSON_Object [ - "scope_id", int_ scope_id; - "key", JSON_String (Key.string_of_key key); - "op", JSON_String (Changeset.string_of_op op) - ] - in +and json_of_flags_impl _json_cx flags = + Hh_json.( + JSON_Object + [ + ("frozen", JSON_Bool flags.frozen); + ( "sealed", + JSON_Bool + (match flags.sealed with + | Sealed -> true + | UnsealedInFile _ -> false) ); + ("exact", JSON_Bool flags.exact); + ]) - let json_of_changed_refis changed_refis = - JSON_Array (List.rev (Changeset.RefiRefSet.fold - (fun refi_ref acc -> json_of_refi_ref refi_ref :: acc) - changed_refis [] - )) - in +and json_of_changeset json_cx = check_depth json_of_changeset_impl json_cx - fun (changed_vars, changed_refis) -> - JSON_Object [ - "vars", json_of_changed_vars changed_vars; - "refis", json_of_changed_refis changed_refis - ] -) +and json_of_changeset_impl _json_cx = + Hh_json.( + let json_of_entry_ref (scope_id, name, op) = + JSON_Object + [ + ("scope_id", int_ scope_id); + ("name", JSON_String name); + ("op", JSON_String (Changeset.string_of_op op)); + ] + in + let json_of_changed_vars changed_vars = + JSON_Array + (List.rev + (Changeset.EntryRefSet.fold + (fun entry_ref acc -> json_of_entry_ref entry_ref :: acc) + changed_vars + [])) + in + let json_of_refi_ref (scope_id, key, op) = + JSON_Object + [ + ("scope_id", int_ scope_id); + ("key", JSON_String (Key.string_of_key key)); + ("op", JSON_String (Changeset.string_of_op op)); + ] + in + let json_of_changed_refis changed_refis = + JSON_Array + (List.rev + (Changeset.RefiRefSet.fold + (fun refi_ref acc -> json_of_refi_ref refi_ref :: acc) + changed_refis + [])) + in + fun (changed_vars, changed_refis) -> + JSON_Object + [ + ("vars", json_of_changed_vars changed_vars); + ("refis", json_of_changed_refis changed_refis); + ]) and json_of_funtype json_cx = check_depth json_of_funtype_impl json_cx -and json_of_funtype_impl json_cx { - this_t; - params; - rest_param; - return_t; - is_predicate; - closure_t; - changeset; - def_reason; -} = Hh_json.( - let rec params_names (any, names_rev) = function - | [] -> if any then Some names_rev else None - | (None, _)::xs -> params_names (any, "_"::names_rev) xs - | (Some name, _)::xs -> params_names (true, name::names_rev) xs - in - JSON_Object ([ - "thisType", _json_of_t json_cx this_t; - "paramTypes", JSON_Array (List.map (fun (_, t) -> _json_of_t json_cx t) params) - ] @ (match params_names (false, []) params with - | None -> [] - | Some names_rev -> [ - "paramNames", - JSON_Array (List.rev_map (fun s -> JSON_String s) names_rev) - ] - ) @ [ - "restParam", (match rest_param with - | None -> JSON_Null - | Some (name, _, t) -> JSON_Object ( - [ - "restParamType", _json_of_t json_cx t; - ] @ (match name with + +and json_of_funtype_impl + json_cx + { this_t; params; rest_param; return_t; is_predicate; closure_t; changeset; def_reason } = + Hh_json.( + let rec params_names (any, names_rev) = function + | [] -> + if any then + Some names_rev + else + None + | (None, _) :: xs -> params_names (any, "_" :: names_rev) xs + | (Some name, _) :: xs -> params_names (true, name :: names_rev) xs + in + JSON_Object + ( [ + ("thisType", _json_of_t json_cx this_t); + ("paramTypes", JSON_Array (Core_list.map ~f:(fun (_, t) -> _json_of_t json_cx t) params)); + ] + @ (match params_names (false, []) params with | None -> [] - | Some name -> ["restParamName", JSON_String name]))); - "returnType", _json_of_t json_cx return_t; - "isPredicate", JSON_Bool is_predicate; - "closureIndex", int_ closure_t; - "changeset", json_of_changeset json_cx changeset; - "defLoc", json_of_reason ~strip_root:json_cx.strip_root def_reason; - ]) -) + | Some names_rev -> + [("paramNames", JSON_Array (List.rev_map (fun s -> JSON_String s) names_rev))]) + @ [ + ( "restParam", + match rest_param with + | None -> JSON_Null + | Some (name, _, t) -> + JSON_Object + ( [("restParamType", _json_of_t json_cx t)] + @ + match name with + | None -> [] + | Some name -> [("restParamName", JSON_String name)] ) ); + ("returnType", _json_of_t json_cx return_t); + ("isPredicate", JSON_Bool is_predicate); + ("closureIndex", int_ closure_t); + ("changeset", json_of_changeset json_cx changeset); + ("defLoc", json_of_reason ~strip_root:json_cx.strip_root ~offset_table:None def_reason); + ] )) and json_of_funcalltype json_cx = check_depth json_of_funcalltype_impl json_cx -and json_of_funcalltype_impl json_cx { - call_this_t; - call_targs; - call_args_tlist; - call_tout; - call_closure_t; - call_strict_arity; -} = Hh_json.( - let arg_types = List.map (json_of_funcallarg json_cx) call_args_tlist in - JSON_Object ([ - "thisType", _json_of_t json_cx call_this_t; - "typeArgs", (match call_targs with - | None -> JSON_Null - | Some ts -> JSON_Array (List.map (_json_of_t json_cx) ts)); - "argTypes", JSON_Array arg_types; - "tout", _json_of_t json_cx call_tout; - "closureIndex", int_ call_closure_t; - "strictArity", JSON_Bool call_strict_arity; - ]) -) + +and json_of_funcalltype_impl + json_cx + { call_this_t; call_targs; call_args_tlist; call_tout; call_closure_t; call_strict_arity } = + Hh_json.( + let arg_types = Core_list.map ~f:(json_of_funcallarg json_cx) call_args_tlist in + JSON_Object + [ + ("thisType", _json_of_t json_cx call_this_t); + ( "typeArgs", + match call_targs with + | None -> JSON_Null + | Some ts -> JSON_Array (Core_list.map ~f:(_json_of_targ json_cx) ts) ); + ("argTypes", JSON_Array arg_types); + ("tout", _json_of_t json_cx call_tout); + ("closureIndex", int_ call_closure_t); + ("strictArity", JSON_Bool call_strict_arity); + ]) and json_of_funcallarg json_cx = check_depth json_of_funcallarg_impl json_cx + and json_of_funcallarg_impl json_cx arg = - let kind, t = match arg with - | Arg t -> "argument", t - | SpreadArg t -> "spread", t + let (kind, t) = + match arg with + | Arg t -> ("argument", t) + | SpreadArg t -> ("spread", t) in - - Hh_json.(JSON_Object ([ - "argKind", JSON_String kind; - "argType", _json_of_t json_cx t; - ])) + Hh_json.(JSON_Object [("argKind", JSON_String kind); ("argType", _json_of_t json_cx t)]) and json_of_insttype json_cx = check_depth json_of_insttype_impl json_cx -and json_of_insttype_impl json_cx insttype = Hh_json.( - let own_props = Context.find_props json_cx.cx insttype.own_props in - let proto_props = Context.find_props json_cx.cx insttype.proto_props in - JSON_Object [ - "classId", int_ insttype.class_id; - "typeArgs", JSON_Array (List.map (fun (x, _, t, p) -> - JSON_Object [ - "name", JSON_String x; - "type", _json_of_t json_cx t; - "polarity", json_of_polarity json_cx p; - ] - ) insttype.type_args); - "fieldTypes", json_of_pmap json_cx own_props; - "methodTypes", json_of_pmap json_cx proto_props; - "mixins", JSON_Bool insttype.has_unknown_react_mixins; - "structural", JSON_Bool insttype.structural; - ] -) + +and json_of_insttype_impl json_cx insttype = + Hh_json.( + let own_props = Context.find_props json_cx.cx insttype.own_props in + let proto_props = Context.find_props json_cx.cx insttype.proto_props in + let inst_kind = + match insttype.inst_kind with + | ClassKind -> JSON_String "class" + | InterfaceKind { inline } -> JSON_Object [("inline", JSON_Bool inline)] + in + JSON_Object + [ + ("classId", json_of_aloc ~offset_table:None insttype.class_id); + ( "typeArgs", + JSON_Array + (Core_list.map + ~f:(fun (x, _, t, p) -> + JSON_Object + [ + ("name", JSON_String x); + ("type", _json_of_t json_cx t); + ("polarity", json_of_polarity json_cx p); + ]) + insttype.type_args) ); + ("fieldTypes", json_of_pmap json_cx own_props); + ("methodTypes", json_of_pmap json_cx proto_props); + ("mixins", JSON_Bool insttype.has_unknown_react_mixins); + ("inst_kind", inst_kind); + ]) and json_of_selector json_cx = check_depth json_of_selector_impl json_cx -and json_of_selector_impl json_cx = Hh_json.(function - | Prop x -> JSON_Object [ - "propName", JSON_String x; - ] - | Elem key -> JSON_Object [ - "keyType", _json_of_t json_cx key; - ] - | ObjRest excludes -> JSON_Object [ - "excludedProps", JSON_Array (List.map (fun s -> JSON_String s) excludes); - ] - | ArrRest i -> JSON_Object [ - "index", JSON_Number (string_of_int i); - ] - | Default -> JSON_Object [ - "default", JSON_Bool true; - ] - | Become -> JSON_Object [ - "become", JSON_Bool true; - ] - | Refine p -> JSON_Object [ - "predicate", json_of_pred json_cx p - ] -) + +and json_of_selector_impl json_cx = + Hh_json.( + function + | Prop (x, _) -> JSON_Object [("propName", JSON_String x)] + | Elem key -> JSON_Object [("keyType", _json_of_t json_cx key)] + | ObjRest excludes -> + JSON_Object + [("excludedProps", JSON_Array (Core_list.map ~f:(fun s -> JSON_String s) excludes))] + | ArrRest i -> JSON_Object [("index", JSON_Number (string_of_int i))] + | Default -> JSON_Object [("default", JSON_Bool true)]) and json_of_destructor json_cx = check_depth json_of_destructor_impl json_cx -and json_of_destructor_impl json_cx = Hh_json.(function - | NonMaybeType -> JSON_Object [ - "non null/void", JSON_Bool true; - ] - | PropertyType x -> JSON_Object [ - "propName", JSON_String x; - ] - | ElementType t -> JSON_Object [ - "elementType", _json_of_t json_cx t - ] - | Bind t -> JSON_Object [ - "thisType", _json_of_t json_cx t - ] - | ReadOnlyType -> JSON_Object [ - "readOnly", JSON_Bool true - ] - | SpreadType (target, ts) -> - let open Object.Spread in - JSON_Object ( - (match target with - | Value -> [ - "target", JSON_String "Value"; - ] - | Annot { make_exact } -> [ - "target", JSON_String "Annot"; - "makeExact", JSON_Bool make_exact; - ] - ) @ [ - "spread", JSON_Array (List.map (_json_of_t json_cx) ts); - ] - ) - | RestType (merge_mode, t) -> - let open Object.Rest in - JSON_Object [ - "mergeMode", JSON_String (match merge_mode with - | Sound -> "Sound" - | IgnoreExactAndOwn -> "IgnoreExactAndOwn" - | ReactConfigMerge -> "ReactConfigMerge"); - "restType", _json_of_t json_cx t; - ] - | ValuesType -> JSON_Object [ - "values", JSON_Bool true; - ] - | CallType args -> JSON_Object [ - "args", JSON_Array (List.map (_json_of_t json_cx) args); - ] - | TypeMap tmap -> json_of_type_map json_cx tmap - | ReactElementPropsType -> JSON_Object [ - "reactElementProps", JSON_Bool true - ] - | ReactElementConfigType -> JSON_Object [ - "reactElementConfig", JSON_Bool true - ] - | ReactElementRefType -> JSON_Object [ - "reactElementRef", JSON_Bool true - ] -) + +and json_of_destructor_impl json_cx = + Hh_json.( + function + | NonMaybeType -> JSON_Object [("non null/void", JSON_Bool true)] + | PropertyType x -> JSON_Object [("propName", JSON_String x)] + | ElementType t -> JSON_Object [("elementType", _json_of_t json_cx t)] + | Bind t -> JSON_Object [("thisType", _json_of_t json_cx t)] + | ReadOnlyType -> JSON_Object [("readOnly", JSON_Bool true)] + | SpreadType (target, ts, head_slice) -> + Object.Spread.( + JSON_Object + ( (match target with + | Value -> [("target", JSON_String "Value")] + | Annot { make_exact } -> + [("target", JSON_String "Annot"); ("makeExact", JSON_Bool make_exact)]) + @ [ + ("spread", JSON_Array (Core_list.map ~f:(json_of_spread_operand json_cx) ts)); + ( "head_slice", + match head_slice with + | None -> JSON_Null + | Some head_slice -> json_of_spread_operand_slice json_cx head_slice ); + ] )) + | RestType (merge_mode, t) -> + Object.Rest.( + JSON_Object + [ + ( "mergeMode", + JSON_String + (match merge_mode with + | Sound -> "Sound" + | IgnoreExactAndOwn -> "IgnoreExactAndOwn" + | ReactConfigMerge _ -> "ReactConfigMerge") ); + ("restType", _json_of_t json_cx t); + ]) + | ValuesType -> JSON_Object [("values", JSON_Bool true)] + | CallType args -> + JSON_Object [("args", JSON_Array (Core_list.map ~f:(_json_of_t json_cx) args))] + | TypeMap tmap -> json_of_type_map json_cx tmap + | ReactElementPropsType -> JSON_Object [("reactElementProps", JSON_Bool true)] + | ReactElementConfigType -> JSON_Object [("reactElementConfig", JSON_Bool true)] + | ReactElementRefType -> JSON_Object [("reactElementRef", JSON_Bool true)] + | ReactConfigType t -> + JSON_Object [("reactConfig", JSON_Bool true); ("default_props", _json_of_t json_cx t)]) + +and json_of_spread_operand_slice json_cx { Object.Spread.reason; prop_map; dict } = + Hh_json.( + JSON_Object + [ + ("reason", json_of_reason ~strip_root:json_cx.strip_root ~offset_table:None reason); + ( "props", + JSON_Object (SMap.fold (fun k p acc -> (k, json_of_prop json_cx p) :: acc) prop_map []) + ); + ( "dict", + match dict with + | Some dict -> json_of_dicttype json_cx dict + | None -> JSON_Null ); + ]) + +and json_of_spread_operand json_cx = + Hh_json.( + function + | Object.Spread.Slice operand_slice -> + JSON_Object + [ + ("kind", JSON_String "slice"); + ("slice", json_of_spread_operand_slice json_cx operand_slice); + ] + | Object.Spread.Type t -> + JSON_Object [("kind", JSON_String "type"); ("type", _json_of_t json_cx t)]) and json_of_type_map json_cx = check_depth json_of_type_map_impl json_cx -and json_of_type_map_impl json_cx = Hh_json.(function - | TupleMap t -> JSON_Object [ - "tupleMap", _json_of_t json_cx t; - ] - | ObjectMap t -> JSON_Object [ - "objectMap", _json_of_t json_cx t; - ] - | ObjectMapi t -> JSON_Object [ - "objectMapi", _json_of_t json_cx t; - ] -) + +and json_of_type_map_impl json_cx = + Hh_json.( + function + | TupleMap t -> JSON_Object [("tupleMap", _json_of_t json_cx t)] + | ObjectMap t -> JSON_Object [("objectMap", _json_of_t json_cx t)] + | ObjectMapi t -> JSON_Object [("objectMapi", _json_of_t json_cx t)]) and json_of_propref json_cx = check_depth json_of_propref_impl json_cx -and json_of_propref_impl json_cx = Hh_json.(function - | Named (r, x) -> JSON_Object [ - "reason", json_of_reason ~strip_root:json_cx.strip_root r; - "name", JSON_String x; - ] - | Computed t -> JSON_Object [ - "elem", _json_of_t json_cx t - ] -) + +and json_of_propref_impl json_cx = + Hh_json.( + function + | Named (r, x) -> + JSON_Object + [ + ("reason", json_of_reason ~strip_root:json_cx.strip_root ~offset_table:None r); + ("name", JSON_String x); + ] + | Computed t -> JSON_Object [("elem", _json_of_t json_cx t)]) and json_of_loc_tmap json_cx = check_depth json_of_loc_tmap_impl json_cx -and json_of_loc_tmap_impl json_cx bindings = Hh_json.( - let lst = SMap.fold (fun name (loc, t) acc -> - json_of_type_binding json_cx (name, (loc, t)) :: acc - ) bindings [] in - JSON_Array (List.rev lst) -) + +and json_of_loc_tmap_impl json_cx bindings = + Hh_json.( + let lst = + SMap.fold + (fun name (loc, t) acc -> json_of_type_binding json_cx (name, (loc, t)) :: acc) + bindings + [] + in + JSON_Array (List.rev lst)) and json_of_pmap json_cx = check_depth json_of_pmap_impl json_cx -and json_of_pmap_impl json_cx bindings = Hh_json.( - let lst = SMap.fold (fun name p acc -> - json_of_prop_binding json_cx (name, p) :: acc - ) bindings [] in - JSON_Array (List.rev lst) -) + +and json_of_pmap_impl json_cx bindings = + Hh_json.( + let lst = + SMap.fold (fun name p acc -> json_of_prop_binding json_cx (name, p) :: acc) bindings [] + in + JSON_Array (List.rev lst)) and json_of_defer_use_t json_cx = check_depth json_of_defer_use_t_impl json_cx -and json_of_defer_use_t_impl json_cx = Hh_json.(function - | DestructuringT (_, s) -> JSON_Object [ - "selector", json_of_selector json_cx s - ] - | TypeDestructorT (_, _, s) -> JSON_Object [ - "destructor", json_of_destructor json_cx s - ] -) + +and json_of_defer_use_t_impl json_cx = + Hh_json.( + function + | LatentPredT (_, p) -> JSON_Object [("predicate", json_of_pred json_cx p)] + | TypeDestructorT (_, _, s) -> JSON_Object [("destructor", json_of_destructor json_cx s)]) and json_of_prop_binding json_cx = check_depth json_of_prop_binding_impl json_cx -and json_of_prop_binding_impl json_cx (name, p) = Hh_json.( - JSON_Object [ - "name", JSON_String name; - "prop", json_of_prop json_cx p; - ] -) + +and json_of_prop_binding_impl json_cx (name, p) = + Hh_json.(JSON_Object [("name", JSON_String name); ("prop", json_of_prop json_cx p)]) and json_of_prop json_cx = check_depth json_of_prop_impl json_cx -and json_of_prop_impl json_cx p = Hh_json.( - JSON_Object (match p with - | Field (_loc, t, polarity) -> [ - "field", _json_of_t json_cx t; - "polarity", json_of_polarity json_cx polarity - ] - | Get (_loc, t) -> [ - "getter", _json_of_t json_cx t; - ] - | Set (_loc, t) -> [ - "setter", _json_of_t json_cx t; - ] - | GetSet (_loc1, t1, _loc2, t2) -> [ - "getter", _json_of_t json_cx t1; - "setter", _json_of_t json_cx t2; - ] - | Method (_loc, t) -> [ - "method", _json_of_t json_cx t; - ] -)) + +and json_of_prop_impl json_cx p = + Hh_json.( + JSON_Object + (match p with + | Field (_loc, t, polarity) -> + [("field", _json_of_t json_cx t); ("polarity", json_of_polarity json_cx polarity)] + | Get (_loc, t) -> [("getter", _json_of_t json_cx t)] + | Set (_loc, t) -> [("setter", _json_of_t json_cx t)] + | GetSet (_loc1, t1, _loc2, t2) -> + [("getter", _json_of_t json_cx t1); ("setter", _json_of_t json_cx t2)] + | Method (_loc, t) -> [("method", _json_of_t json_cx t)])) and json_of_type_binding json_cx = check_depth json_of_type_binding_impl json_cx -and json_of_type_binding_impl json_cx (name, (loc, t)) = Hh_json.( - let loc_json = match loc with - | None -> Hh_json.JSON_Null - | Some loc -> json_of_loc ~strip_root:json_cx.strip_root loc - in - JSON_Object [ - "name", JSON_String name; - "type", _json_of_t json_cx t; - "loc", loc_json - ] -) + +and json_of_type_binding_impl json_cx (name, (loc, t)) = + Hh_json.( + let loc_json = + match loc with + | None -> Hh_json.JSON_Null + | Some loc -> json_of_aloc ~strip_root:json_cx.strip_root ~offset_table:None loc + in + JSON_Object [("name", JSON_String name); ("type", _json_of_t json_cx t); ("loc", loc_json)]) and json_of_pred json_cx = check_depth json_of_pred_impl json_cx -and json_of_pred_impl json_cx p = Hh_json.( - JSON_Object ([ - "kind", JSON_String (string_of_pred_ctor p) - ] @ - match p with - | AndP (l, r) - | OrP (l, r) -> [ - "left", json_of_pred json_cx l; - "right", json_of_pred json_cx r - ] - | NotP p -> ["pred", json_of_pred json_cx p] - - | LeftP (b, t) - | RightP (b, t) -> [ - "binaryTest", json_of_binary_test json_cx b; - "type", _json_of_t json_cx t - ] - - | SingletonBoolP value -> ["value", JSON_Bool value] - | SingletonStrP (_, _, str) -> ["value", JSON_String str] - | SingletonNumP (_, _, (_,raw)) -> ["value", JSON_String raw] - - | PropExistsP (_, key, _) -> ["propName", JSON_String key] - - | ExistsP _ - | VoidP - | NullP - | MaybeP - | BoolP - | StrP - | NumP - | FunP - | ObjP - | ArrP - -> [] - - | LatentP (t,i) -> [ - "latent", JSON_Object [ - ("type", _json_of_t_impl json_cx t); - ("position", JSON_Number (spf "%d" i)) - ] - ] -)) + +and json_of_pred_impl json_cx p = + Hh_json.( + JSON_Object + ( [("kind", JSON_String (string_of_pred_ctor p))] + @ + match p with + | AndP (l, r) + | OrP (l, r) -> + [("left", json_of_pred json_cx l); ("right", json_of_pred json_cx r)] + | NotP p -> [("pred", json_of_pred json_cx p)] + | LeftP (b, t) + | RightP (b, t) -> + [("binaryTest", json_of_binary_test json_cx b); ("type", _json_of_t json_cx t)] + | SingletonBoolP (_, value) -> [("value", JSON_Bool value)] + | SingletonStrP (_, _, str) -> [("value", JSON_String str)] + | SingletonNumP (_, _, (_, raw)) -> [("value", JSON_String raw)] + | PropExistsP (key, _) -> [("propName", JSON_String key)] + | ExistsP _ + | VoidP + | NullP + | MaybeP + | BoolP + | StrP + | SymbolP + | NumP + | FunP + | ObjP + | ArrP -> + [] + | LatentP (t, i) -> + [ + ( "latent", + JSON_Object + [("type", _json_of_t_impl json_cx t); ("position", JSON_Number (spf "%d" i))] ); + ] )) and json_of_binary_test json_cx = check_depth json_of_binary_test_impl json_cx -and json_of_binary_test_impl _json_cx b = Hh_json.( - JSON_Object ([ - "kind", JSON_String (string_of_binary_test_ctor b) - ] @ - match b with - | InstanceofTest -> [] - | SentinelProp s -> ["key", JSON_String s] -)) + +and json_of_binary_test_impl _json_cx b = + Hh_json.( + JSON_Object + ( [("kind", JSON_String (string_of_binary_test_ctor b))] + @ + match b with + | InstanceofTest -> [] + | SentinelProp s -> [("key", JSON_String s)] )) and json_of_node json_cx = check_depth json_of_node_impl json_cx -and json_of_node_impl json_cx id = Hh_json.( - JSON_Object ( - let json_cx = { json_cx with stack = ISet.add id json_cx.stack } in - match IMap.find_unsafe id (Context.graph json_cx.cx) with - | Constraint.Goto id -> - ["kind", JSON_String "Goto"] - @ ["id", int_ id] - | Constraint.Root root -> - ["kind", JSON_String "Root"] - @ ["root", json_of_root json_cx root] - ) -) + +and json_of_node_impl json_cx id = + Hh_json.( + JSON_Object + (let json_cx = { json_cx with stack = ISet.add id json_cx.stack } in + match IMap.find_unsafe id (Context.graph json_cx.cx) with + | Constraint.Goto id -> [("kind", JSON_String "Goto")] @ [("id", int_ id)] + | Constraint.Root root -> + [("kind", JSON_String "Root")] @ [("root", json_of_root json_cx root)])) and json_of_root json_cx = check_depth json_of_root_impl json_cx -and json_of_root_impl json_cx root = Hh_json.(Constraint.( - JSON_Object ([ - "rank", int_ root.rank; - "constraints", json_of_constraints json_cx root.constraints - ]) -)) + +and json_of_root_impl json_cx root = + Hh_json.( + Constraint.( + JSON_Object + [("rank", int_ root.rank); ("constraints", json_of_constraints json_cx root.constraints)])) and json_of_constraints json_cx = check_depth json_of_constraints_impl json_cx -and json_of_constraints_impl json_cx constraints = Hh_json.( - JSON_Object ( - match constraints with - | Constraint.Resolved t -> - ["kind", JSON_String "Resolved"] - @ ["type", _json_of_t json_cx t] - | Constraint.Unresolved bounds -> - ["kind", JSON_String "Unresolved"] - @ ["bounds", json_of_bounds json_cx bounds] - ) -) + +and json_of_constraints_impl json_cx constraints = + Hh_json.( + JSON_Object + Constraint.( + match constraints with + | Resolved (_, t) + | FullyResolved (_, t) -> + [("kind", JSON_String "Resolved")] @ [("type", _json_of_t json_cx t)] + | Unresolved bounds -> + [("kind", JSON_String "Unresolved")] @ [("bounds", json_of_bounds json_cx bounds)])) and json_of_bounds json_cx = check_depth json_of_bounds_impl json_cx -and json_of_bounds_impl json_cx bounds = Hh_json.( - match bounds with - | { Constraint.lower; upper; lowertvars; uppertvars; } -> JSON_Object ([ - "lower", json_of_tkeys json_cx lower; - "upper", json_of_use_tkeys json_cx upper; - "lowertvars", json_of_tvarkeys json_cx lowertvars; - "uppertvars", json_of_tvarkeys json_cx uppertvars; - ]) -) + +and json_of_bounds_impl json_cx bounds = + Hh_json.( + match bounds with + | { Constraint.lower; upper; lowertvars; uppertvars } -> + JSON_Object + [ + ("lower", json_of_tkeys json_cx lower); + ("upper", json_of_use_tkeys json_cx upper); + ("lowertvars", json_of_tvarkeys json_cx lowertvars); + ("uppertvars", json_of_tvarkeys json_cx uppertvars); + ]) and json_of_tkeys json_cx = check_depth json_of_tkeys_impl json_cx -and json_of_tkeys_impl json_cx tmap = Hh_json.( - JSON_Array (TypeMap.fold (fun t _ acc -> _json_of_t json_cx t :: acc) tmap []) -) + +and json_of_tkeys_impl json_cx tmap = + Hh_json.(JSON_Array (TypeMap.fold (fun t _ acc -> _json_of_t json_cx t :: acc) tmap [])) and json_of_use_tkeys json_cx = check_depth json_of_use_tkeys_impl json_cx -and json_of_use_tkeys_impl json_cx tmap = Hh_json.( - let f = fun t _ acc -> _json_of_use_t json_cx t :: acc in - JSON_Array (UseTypeMap.fold f tmap []) -) + +and json_of_use_tkeys_impl json_cx tmap = + Hh_json.( + let f t _ acc = _json_of_use_t json_cx t :: acc in + JSON_Array (UseTypeMap.fold f tmap [])) and json_of_tvarkeys json_cx = check_depth json_of_tvarkeys_impl json_cx -and json_of_tvarkeys_impl _json_cx imap = Hh_json.( - JSON_Array (IMap.fold (fun i _ acc -> ((int_ i) :: acc)) imap []) -) - -and json_of_lookup_action json_cx = - check_depth json_of_lookup_action_impl json_cx -and json_of_lookup_action_impl json_cx action = Hh_json.( - JSON_Object ( - match action with - | RWProp (_, _, t, rw) -> [ - "kind", JSON_String "RWProp"; - "rw", JSON_String (string_of_rw rw); - "t", _json_of_t json_cx t - ] - | LookupProp (op, p) -> [ - "kind", JSON_String "LookupProp"; - "use", JSON_String (string_of_use_op op); - "prop", json_of_prop json_cx p; - ] - | SuperProp (_, p) -> [ - "kind", JSON_String "SuperProp"; - "prop", json_of_prop json_cx p; - ] - | MatchProp (_, t) -> [ - "kind", JSON_String "MatchProp"; - "t", _json_of_t json_cx t - ] - ) -) - -and json_of_specialize_cache json_cx = - check_depth json_of_specialize_cache_impl json_cx -and json_of_specialize_cache_impl json_cx cache = Hh_json.( - JSON_Object ( - match cache with - | None -> [] - | Some rs -> [ - "reasons", JSON_Array - (List.map (json_of_reason ~strip_root:json_cx.strip_root) rs); - ] - ) -) - -and json_of_obj_assign_kind json_cx = - check_depth json_of_obj_assign_kind_impl json_cx - -and json_of_obj_assign_kind_impl _json_cx kind = Hh_json.JSON_String ( - match kind with - | ObjAssign _ -> "normal" - | ObjSpreadAssign -> "spread" -) - -let json_of_t ?(size=5000) ?(depth=1000) ?(strip_root=None) cx t = - let json_cx = { - cx; - size = ref size; - depth; - stack = ISet.empty; - strip_root; - } in + +and json_of_tvarkeys_impl _json_cx imap = + Hh_json.(JSON_Array (IMap.fold (fun i _ acc -> int_ i :: acc) imap [])) + +and json_of_lookup_action json_cx = check_depth json_of_lookup_action_impl json_cx + +and json_of_lookup_action_impl json_cx action = + Hh_json.( + JSON_Object + (match action with + | ReadProp { use_op = _; obj_t = _; tout } -> + [("kind", JSON_String "ReadProp"); ("t", _json_of_t json_cx tout)] + | WriteProp { use_op = _; obj_t = _; prop_tout = _; tin; write_ctx = _; mode = _ } -> + [("kind", JSON_String "WriteProp"); ("t", _json_of_t json_cx tin)] + | LookupProp (op, p) -> + [ + ("kind", JSON_String "LookupProp"); + ("use", JSON_String (string_of_use_op op)); + ("prop", json_of_prop json_cx p); + ] + | SuperProp (_, p) -> [("kind", JSON_String "SuperProp"); ("prop", json_of_prop json_cx p)] + | MatchProp (_, t) -> [("kind", JSON_String "MatchProp"); ("t", _json_of_t json_cx t)])) + +and json_of_specialize_cache json_cx = check_depth json_of_specialize_cache_impl json_cx + +and json_of_specialize_cache_impl json_cx cache = + Hh_json.( + JSON_Object + (match cache with + | None -> [] + | Some rs -> + [ + ( "reasons", + JSON_Array + (Core_list.map + ~f:(json_of_reason ~strip_root:json_cx.strip_root ~offset_table:None) + rs) ); + ])) + +and json_of_obj_assign_kind json_cx = check_depth json_of_obj_assign_kind_impl json_cx + +and json_of_obj_assign_kind_impl _json_cx kind = + Hh_json.JSON_String + (match kind with + | ObjAssign _ -> "normal" + | ObjSpreadAssign -> "spread") + +let json_of_t ?(size = 5000) ?(depth = 1000) ?(strip_root = None) cx t = + let json_cx = { cx; size = ref size; depth; stack = ISet.empty; strip_root } in _json_of_t json_cx t -let json_of_use_t ?(size=5000) ?(depth=1000) ?(strip_root=None) cx use_t = - let json_cx = { - cx; - size = ref size; - depth; - stack = ISet.empty; - strip_root; - } in +let json_of_use_t ?(size = 5000) ?(depth = 1000) ?(strip_root = None) cx use_t = + let json_cx = { cx; size = ref size; depth; stack = ISet.empty; strip_root } in _json_of_use_t json_cx use_t -let jstr_of_t ?(size=5000) ?(depth=1000) ?(strip_root=None) cx t = +let jstr_of_t ?(size = 5000) ?(depth = 1000) ?(strip_root = None) cx t = Hh_json.json_to_multiline (json_of_t ~size ~depth ~strip_root cx t) -let jstr_of_use_t ?(size=5000) ?(depth=1000) ?(strip_root=None) cx use_t = +let jstr_of_use_t ?(size = 5000) ?(depth = 1000) ?(strip_root = None) cx use_t = Hh_json.json_to_multiline (json_of_use_t ~size ~depth ~strip_root cx use_t) -let json_of_graph ?(size=5000) ?(depth=1000) ?(strip_root=None) cx = Hh_json.( - let entries = IMap.fold (fun id _ entries -> - let json_cx = { - cx; - size = ref size; - depth; - stack = ISet.empty; - strip_root; - } in - (spf "%d" id, json_of_node json_cx id) :: entries - ) (Context.graph cx) [] in - JSON_Object (List.rev entries) -) - -let jstr_of_graph ?(size=5000) ?(depth=1000) ?(strip_root=None) cx = - Hh_json.json_to_multiline (json_of_graph ~size ~depth ~strip_root cx) +let json_of_graph ?(size = 5000) ?(depth = 1000) ?(strip_root = None) cx = + Hh_json.( + let entries = + IMap.fold + (fun id _ entries -> + let json_cx = { cx; size = ref size; depth; stack = ISet.empty; strip_root } in + (spf "%d" id, json_of_node json_cx id) :: entries) + (Context.graph cx) + [] + in + JSON_Object (List.rev entries)) +let jstr_of_graph ?(size = 5000) ?(depth = 1000) ?(strip_root = None) cx = + Hh_json.json_to_multiline (json_of_graph ~size ~depth ~strip_root cx) (* scopes *) -let json_of_scope = Scope.( - let open Hh_json in - - let json_of_value_impl json_cx { Entry. - kind; value_state; value_declare_loc; value_assign_loc; specific; general; - } = - JSON_Object [ - "entry_type", JSON_String "Value"; - "kind", JSON_String (Entry.string_of_value_kind kind); - "value_state", JSON_String (State.to_string value_state); - "value_declare_loc", - json_of_loc ~strip_root:json_cx.strip_root value_declare_loc; - "value_assign_loc", - json_of_loc ~strip_root:json_cx.strip_root value_assign_loc; - "specific", _json_of_t json_cx specific; - "general", _json_of_t json_cx general; - ] - in - let json_of_value json_cx = check_depth json_of_value_impl json_cx in - - let json_of_type_impl json_cx { Entry.type_state; type_loc; _type; - type_binding_kind = _ } = - JSON_Object [ - "entry_type", JSON_String "Type"; - "type_state", JSON_String (State.to_string type_state); - "type_loc", json_of_loc ~strip_root:json_cx.strip_root type_loc; - "_type", _json_of_t json_cx _type; - ] - in - let json_of_type json_cx = check_depth json_of_type_impl json_cx in - - let json_of_class json_cx c = - let pmap = Context.find_props json_cx.cx c.class_private_fields in - JSON_Object [ - "class_id", JSON_String (string_of_int c.class_binding_id); - "class_private_fields", json_of_pmap json_cx pmap; - ] in - - let json_of_entry_impl json_cx = Entry.(function - | Value r -> json_of_value json_cx r - | Type r -> json_of_type json_cx r - | Class r -> json_of_class json_cx r - ) in - let json_of_entry json_cx = check_depth json_of_entry_impl json_cx in - - let json_of_entries_impl json_cx entries = - let props = SMap.fold (fun name entry acc -> - (name, json_of_entry json_cx entry) :: acc - ) entries [] - |> List.rev - in - JSON_Object props - in - let json_of_entries json_cx = check_depth json_of_entries_impl json_cx in - - let json_of_refi_impl json_cx { refi_loc; refined; original } = - JSON_Object [ - "refi_loc", json_of_loc ~strip_root:json_cx.strip_root refi_loc; - "refined", _json_of_t json_cx refined; - "original", _json_of_t json_cx original; - ] - in - let json_of_refi json_cx = check_depth json_of_refi_impl json_cx in +let json_of_scope = + Scope.( + Hh_json.( + let json_of_value_impl + json_cx + { Entry.kind; value_state; value_declare_loc; value_assign_loc; specific; general } = + JSON_Object + [ + ("entry_type", JSON_String "Value"); + ("kind", JSON_String (Entry.string_of_value_kind kind)); + ("value_state", JSON_String (State.to_string value_state)); + ( "value_declare_loc", + json_of_aloc ~strip_root:json_cx.strip_root ~offset_table:None value_declare_loc ); + ( "value_assign_loc", + json_of_aloc ~strip_root:json_cx.strip_root ~offset_table:None value_assign_loc ); + ("specific", _json_of_t json_cx specific); + ("general", _json_of_t json_cx general); + ] + in + let json_of_value json_cx = check_depth json_of_value_impl json_cx in + let json_of_type_impl json_cx { Entry.type_state; type_loc; type_; type_binding_kind = _ } = + JSON_Object + [ + ("entry_type", JSON_String "Type"); + ("type_state", JSON_String (State.to_string type_state)); + ("type_loc", json_of_aloc ~strip_root:json_cx.strip_root ~offset_table:None type_loc); + ("type_", _json_of_t json_cx type_); + ] + in + let json_of_type json_cx = check_depth json_of_type_impl json_cx in + let json_of_class json_cx c = + let pmap = Context.find_props json_cx.cx c.class_private_fields in + JSON_Object + [ + ("class_id", JSON_String (ALoc.debug_to_string c.class_binding_id)); + ("class_private_fields", json_of_pmap json_cx pmap); + ] + in + let json_of_entry_impl json_cx = + Entry.( + function + | Value r -> json_of_value json_cx r + | Type r -> json_of_type json_cx r + | Class r -> json_of_class json_cx r) + in + let json_of_entry json_cx = check_depth json_of_entry_impl json_cx in + let json_of_entries_impl json_cx entries = + let props = + SMap.fold (fun name entry acc -> (name, json_of_entry json_cx entry) :: acc) entries [] + |> List.rev + in + JSON_Object props + in + let json_of_entries json_cx = check_depth json_of_entries_impl json_cx in + let json_of_refi_impl json_cx { refi_loc; refined; original } = + JSON_Object + [ + ("refi_loc", json_of_aloc ~strip_root:json_cx.strip_root ~offset_table:None refi_loc); + ("refined", _json_of_t json_cx refined); + ("original", _json_of_t json_cx original); + ] + in + let json_of_refi json_cx = check_depth json_of_refi_impl json_cx in + let json_of_refis_impl json_cx refis = + let props = + Key_map.fold + (fun key refi acc -> (Key.string_of_key key, json_of_refi json_cx refi) :: acc) + refis + [] + |> List.rev + in + JSON_Object props + in + let json_of_refis json_cx = check_depth json_of_refis_impl json_cx in + fun ?(size = 5000) ?(depth = 1000) ?(strip_root = None) cx scope -> + let json_cx = { cx; size = ref size; depth; stack = ISet.empty; strip_root } in + JSON_Object + [ + ("kind", JSON_String (string_of_kind scope.kind)); + ("entries", json_of_entries json_cx scope.entries); + ("refis", json_of_refis json_cx scope.refis); + ])) - let json_of_refis_impl json_cx refis = - let props = Key_map.fold (fun key refi acc -> - (Key.string_of_key key, json_of_refi json_cx refi) :: acc - ) refis [] - |> List.rev - in - JSON_Object props - in - let json_of_refis json_cx = check_depth json_of_refis_impl json_cx in - - fun ?(size=5000) ?(depth=1000) ?(strip_root=None) cx scope -> - let json_cx = { - cx; - size = ref size; - depth; - stack = ISet.empty; - strip_root; - } in - JSON_Object [ - "kind", JSON_String (string_of_kind scope.kind); - "entries", json_of_entries json_cx scope.entries; - "refis", json_of_refis json_cx scope.refis; - ] -) - -let json_of_env ?(size=5000) ?(depth=1000) cx env = - Hh_json.JSON_Array (List.map (json_of_scope ~size ~depth cx) env) +let json_of_env ?(size = 5000) ?(depth = 1000) cx env = + Hh_json.JSON_Array (Core_list.map ~f:(json_of_scope ~size ~depth cx) env) (*****************************************************************) (* debug printer *) +let lookup_trust cx id = + Trust_constraint.( + match Context.find_trust_graph cx id with + | TrustResolved trust -> trust + | TrustUnresolved b -> get_trust b) + let dump_reason cx reason = - let strip_root = if Context.should_strip_root cx - then Some (Context.root cx) - else None in + let strip_root = + if Context.should_strip_root cx then + Some (Context.root cx) + else + None + in Reason.dump_reason ~strip_root reason let rec dump_t_ (depth, tvars) cx t = - - let p ?(reason=true) ?(extra="") t = - spf "%s (%s%s%s)" + let p ?(reason = true) ?(extra = "") ?(trust = None) t = + spf + "%s %s(%s%s%s)" (string_of_ctor t) - (if reason then spf "%S" (dump_reason cx (reason_of_t t)) else "") - (if reason && extra <> "" then ", " else "") + ( if not (Context.trust_tracking cx) then + "" + else + Option.value_map ~default:"" ~f:(lookup_trust cx |> string_of_trust_rep) trust ) + ( if reason then + spf "%S" (dump_reason cx (reason_of_t t)) + else + "" ) + ( if reason && extra <> "" then + ", " + else + "" ) extra in - - let kid = dump_t_ (depth-1, tvars) cx in - let tvar id = dump_tvar_ (depth-1, tvars) cx id in - - let string_of_destructor = function - | NonMaybeType -> "non-maybe type" - | PropertyType x -> spf "property type `%s`" x - | ElementType _ -> "element type" - | Bind _ -> "bind" - | ReadOnlyType -> "read only" - | SpreadType _ -> "spread" - | RestType _ -> "rest" - | ValuesType -> "values" - | CallType _ -> "function call" - | TypeMap (TupleMap _) -> "tuple map" - | TypeMap (ObjectMap _) -> "object map" - | TypeMap (ObjectMapi _) -> "object mapi" - | ReactElementPropsType -> "React element props" - | ReactElementConfigType -> "React element config" - | ReactElementRefType -> "React element instance" - in - - let defer_use = - let string_of_selector = function - | Prop name -> spf "prop `%s`" name - | Elem _ -> "elem" - | ObjRest _ -> "obj rest" - | ArrRest i -> spf "arr rest at index %d" i - | Default -> "default" - | Become -> "become" - | Refine _ -> "refine" - in - fun expr t -> match expr with - | DestructuringT (_, selector) -> - spf "Destructure %s on %s" (string_of_selector selector) t + let kid = dump_t_ (depth - 1, tvars) cx in + let tvar id = dump_tvar_ (depth - 1, tvars) cx id in + let defer_use expr t = + match expr with + | LatentPredT (_, p) -> spf "LatentPred %s on %s" (string_of_predicate p) t | TypeDestructorT (use_op, _, destructor) -> - spf "%s, TypeDestruct %s on %s" - (string_of_use_op use_op) - (string_of_destructor destructor) - t + spf "%s, TypeDestruct %s on %s" (string_of_use_op use_op) (string_of_destructor destructor) t in - let string_of_mixed_flavor = function | Mixed_everything -> "Mixed_everything" + | Mixed_function -> "Mixed_function" | Mixed_truthy -> "Mixed_truthy" | Mixed_non_maybe -> "Mixed_non_maybe" | Mixed_non_null -> "Mixed_non_null" | Mixed_non_void -> "Mixed_non_void" - | Empty_intersection -> "Empty_intersection" + | Mixed_symbol -> "Mixed_symbol" + in + let string_of_any_source = function + | Annotated -> "Annotated" + | AnyError -> "Error" + | Unsound _ -> "Unsound" + | Untyped -> "Untyped" in - let custom_fun = let react_prop_type = - let open React.PropType in - let complex = function - | ArrayOf -> "ArrayOf" - | InstanceOf -> "InstanceOf" - | ObjectOf -> "ObjectOf" - | OneOf -> "OneOf" - | OneOfType -> "OneOfType" - | Shape -> "Shape" - in - function - | Primitive (is_required, t) -> - spf "Primitive (%b, %s)" is_required (kid t) - | Complex kind -> complex kind + React.PropType.( + let complex = function + | ArrayOf -> "ArrayOf" + | InstanceOf -> "InstanceOf" + | ObjectOf -> "ObjectOf" + | OneOf -> "OneOf" + | OneOfType -> "OneOfType" + | Shape -> "Shape" + in + function + | Primitive (is_required, t) -> spf "Primitive (%b, %s)" is_required (kid t) + | Complex kind -> complex kind) in function | ObjectAssign -> "ObjectAssign" @@ -1699,434 +1576,555 @@ let rec dump_t_ (depth, tvars) cx t = | DebugThrow -> "DebugThrow" | DebugSleep -> "DebugSleep" in - - if depth = 0 then string_of_ctor t - else match t with - | OpenT (_, id) -> p ~extra:(tvar id) t - | DefT (_, NumT lit) -> p ~extra:(match lit with - | Literal (_, (_, raw)) -> raw - | Truthy -> "truthy" - | AnyLiteral -> "") t - | DefT (_, StrT c) -> p ~extra:(match c with - | Literal (_, s) -> spf "%S" s - | Truthy -> "truthy" - | AnyLiteral -> "") t - | DefT (_, BoolT c) -> p ~extra:(match c with - | Some b -> spf "%B" b - | None -> "") t - | DefT (_, FunT (_, _, {params; return_t; this_t; _})) -> p - ~extra:(spf "(%s) => %s" - (kid this_t) - (String.concat "; " (List.map (fun (_, t) -> kid t) params)) - (kid return_t)) t - | DefT (_, MixedT flavor) -> p ~extra:(string_of_mixed_flavor flavor) t - | DefT (_, EmptyT) - | DefT (_, AnyT) - | DefT (_, NullT) - | DefT (_, VoidT) - -> p t - | NullProtoT _ - | ObjProtoT _ - | FunProtoT _ - | FunProtoApplyT _ - | FunProtoBindT _ - | FunProtoCallT _ -> p t - | DefT (_, PolyT (tps, c, id)) -> p ~extra:(spf "%s [%s] #%d" - (kid c) - (String.concat "; " (List.map (fun tp -> tp.name) tps)) - id) t - | ThisClassT (_, inst) -> p ~extra:(kid inst) t - | BoundT (_, name, _) -> p ~extra:name t - | ExistsT _ -> p t - | DefT (_, ObjT { props_tmap; _ }) -> p t - ~extra:(Properties.string_of_id props_tmap) - | DefT (_, ArrT (ArrayAT (elemt, None))) -> p ~extra:(spf "Array %s" (kid elemt)) t - | DefT (_, ArrT (ArrayAT (elemt, Some tup))) -> p - ~extra:(spf "Array %s, %s" (kid elemt) - (spf "[%s]" (String.concat "; " (List.map kid tup)))) t - | DefT (_, ArrT (TupleAT (_, tup))) -> p - ~extra:(spf "Tuple [%s]" (String.concat ", " (List.map kid tup))) t - | DefT (_, ArrT (ROArrayAT (elemt))) -> p - ~extra:(spf "ReadOnlyArray %s" (kid elemt)) t - | DefT (_, ArrT EmptyAT) -> p ~extra:("EmptyArray") t - | DefT (_, CharSetT chars) -> p ~extra:(spf "<%S>" (String_utils.CharSet.to_string chars)) t - | DefT (_, ClassT inst) -> p ~extra:(kid inst) t - | DefT (_, InstanceT (_, _, _, { class_id; _ })) -> p ~extra:(spf "#%d" class_id) t - | DefT (_, TypeT (_, arg)) -> p ~extra:(kid arg) t - | AnnotT (_, arg, use_desc) -> - p ~extra:(spf "use_desc=%b, %s" use_desc (kid arg)) t - | OpaqueT (_, {underlying_t = Some arg; _}) -> p ~extra:(spf "%s" (kid arg)) t - | OpaqueT _ -> p t - | DefT (_, OptionalT arg) -> p ~extra:(kid arg) t - | EvalT (arg, expr, id) -> p - ~extra:(spf "%s, %d" (defer_use expr (kid arg)) id) t - | DefT (_, TypeAppT (_, base, args)) -> p ~extra:(spf "%s, [%s]" - (kid base) (String.concat "; " (List.map kid args))) t - | ThisTypeAppT (_, base, this, args_opt) -> p ~reason:false - ~extra:begin match args_opt with - | Some args -> spf "%s, %s, [%s]" (kid base) (kid this) - (String.concat "; " (List.map kid args)) - | None -> spf "%s, %s" (kid base) (kid this) - end t - | ExactT (_, arg) -> p ~extra:(kid arg) t - | DefT (_, MaybeT arg) -> p ~extra:(kid arg) t - | DefT (_, IntersectionT rep) -> p ~extra:(spf "[%s]" - (String.concat "; " (List.map kid (InterRep.members rep)))) t - | DefT (_, UnionT rep) -> p ~extra:(spf "[%s]" - (String.concat "; " (List.map kid (UnionRep.members rep)))) t - | AnyWithLowerBoundT arg - | AnyWithUpperBoundT arg -> p ~reason:false ~extra:(kid arg) t - | MergedT (_, uses) -> p ~extra:("[" ^ - (String.concat ", " (List.map (dump_use_t_ (depth - 1, tvars) cx) uses)) - ^ "]") t - | DefT (_, AnyObjT) - | DefT (_, AnyFunT) -> p t - | DefT (_, IdxWrapper inner_obj) -> p ~extra:(kid inner_obj) t - | ShapeT arg -> p ~reason:false ~extra:(kid arg) t - | MatchingPropT (_, _, arg) -> p ~extra:(kid arg) t - | KeysT (_, arg) -> p ~extra:(kid arg) t - | DefT (_, SingletonStrT s) -> p ~extra:(spf "%S" s) t - | DefT (_, SingletonNumT (_, s)) -> p ~extra:s t - | DefT (_, SingletonBoolT b) -> p ~extra:(spf "%B" b) t - | ModuleT _ -> p t - | InternalT (ExtendsT (_, l, u)) -> p ~extra:(spf "%s, %s" (kid l) (kid u)) t - | CustomFunT (_, kind) -> p ~extra:(custom_fun kind) t - | InternalT (ChoiceKitT _) -> p t - | TypeDestructorTriggerT (_, _, _, s, x) -> p ~extra:(spf "%s on upper, %s" - (string_of_destructor s) (kid x)) t - | OpenPredT (_, arg, p_pos, p_neg) -> p t - ~extra:(spf "%s, {%s}, {%s}" (kid arg) - (String.concat "; " (List.map (fun (k,p) -> - spf "%s: %s" (Key.string_of_key k) (string_of_predicate p) - ) (Key_map.elements p_pos))) - (String.concat "; " (List.map (fun (k,p) -> - spf "%s: %s" (Key.string_of_key k) (string_of_predicate p) - ) (Key_map.elements p_neg)))) - | ReposT (_, arg) - | InternalT (ReposUpperT (_, arg)) -> p ~extra:(kid arg) t - | InternalT (OptionalChainVoidT _) -> p t + if depth = 0 then + string_of_ctor t + else + match t with + | OpenT (_, id) -> p ~extra:(tvar id) t + | DefT (_, trust, NumT lit) -> + p + ~trust:(Some trust) + ~extra: + (match lit with + | Literal (_, (_, raw)) -> raw + | Truthy -> "truthy" + | AnyLiteral -> "") + t + | DefT (_, trust, StrT c) -> + p + ~trust:(Some trust) + ~extra: + (match c with + | Literal (_, s) -> spf "%S" s + | Truthy -> "truthy" + | AnyLiteral -> "") + t + | DefT (_, trust, BoolT c) -> + p + ~trust:(Some trust) + ~extra: + (match c with + | Some b -> spf "%B" b + | None -> "") + t + | DefT (_, trust, FunT (_, _, { params; return_t; this_t; _ })) -> + p + ~trust:(Some trust) + ~extra: + (spf + "(%s) => %s" + (kid this_t) + (String.concat "; " (Core_list.map ~f:(fun (_, t) -> kid t) params)) + (kid return_t)) + t + | AnyT (_, src) -> p ~extra:(string_of_any_source src) t + | DefT (_, trust, MixedT flavor) -> + p ~trust:(Some trust) ~extra:(string_of_mixed_flavor flavor) t + | DefT (_, trust, EmptyT _) + | DefT (_, trust, NullT) + | DefT (_, trust, VoidT) -> + p ~trust:(Some trust) t + | NullProtoT _ + | ObjProtoT _ + | FunProtoT _ + | FunProtoApplyT _ + | FunProtoBindT _ + | FunProtoCallT _ -> + p t + | DefT (_, trust, PolyT (_, tps, c, id)) -> + p + ~trust:(Some trust) + ~extra: + (spf + "%s [%s] #%d" + (kid c) + (String.concat "; " (Core_list.map ~f:(fun tp -> tp.name) (Nel.to_list tps))) + id) + t + | ThisClassT (_, inst) -> p ~extra:(kid inst) t + | BoundT (_, name, _) -> p ~extra:name t + | ExistsT _ -> p t + | DefT (_, trust, ObjT { props_tmap; _ }) -> + p ~trust:(Some trust) t ~extra:(Properties.string_of_id props_tmap) + | DefT (_, trust, ArrT (ArrayAT (elemt, None))) -> + p ~trust:(Some trust) ~extra:(spf "Array %s" (kid elemt)) t + | DefT (_, trust, ArrT (ArrayAT (elemt, Some tup))) -> + p + ~trust:(Some trust) + ~extra: + (spf + "Array %s, %s" + (kid elemt) + (spf "[%s]" (String.concat "; " (Core_list.map ~f:kid tup)))) + t + | DefT (_, trust, ArrT (TupleAT (_, tup))) -> + p + ~trust:(Some trust) + ~extra:(spf "Tuple [%s]" (String.concat ", " (Core_list.map ~f:kid tup))) + t + | DefT (_, trust, ArrT (ROArrayAT elemt)) -> + p ~trust:(Some trust) ~extra:(spf "ReadOnlyArray %s" (kid elemt)) t + | DefT (_, trust, CharSetT chars) -> + p ~trust:(Some trust) ~extra:(spf "<%S>" (String_utils.CharSet.to_string chars)) t + | DefT (_, trust, ClassT inst) -> p ~trust:(Some trust) ~extra:(kid inst) t + | DefT (_, trust, InstanceT (_, _, _, { class_id; _ })) -> + p ~trust:(Some trust) ~extra:(spf "#%s" (ALoc.debug_to_string class_id)) t + | DefT (_, trust, TypeT (_, arg)) -> p ~trust:(Some trust) ~extra:(kid arg) t + | AnnotT (_, arg, use_desc) -> p ~extra:(spf "use_desc=%b, %s" use_desc (kid arg)) t + | OpaqueT (_, { underlying_t = Some arg; _ }) -> p ~extra:(spf "%s" (kid arg)) t + | OpaqueT _ -> p t + | OptionalT (_, arg) -> p ~extra:(kid arg) t + | EvalT (arg, expr, id) -> p ~extra:(spf "%s, %d" (defer_use expr (kid arg)) id) t + | TypeAppT (_, _, base, args) -> + p ~extra:(spf "%s, [%s]" (kid base) (String.concat "; " (Core_list.map ~f:kid args))) t + | ThisTypeAppT (_, base, this, args_opt) -> + p + ~reason:false + ~extra: + begin + match args_opt with + | Some args -> + spf + "%s, %s, [%s]" + (kid base) + (kid this) + (String.concat "; " (Core_list.map ~f:kid args)) + | None -> spf "%s, %s" (kid base) (kid this) + end + t + | ExactT (_, arg) -> p ~extra:(kid arg) t + | MaybeT (_, arg) -> p ~extra:(kid arg) t + | IntersectionT (_, rep) -> + p ~extra:(spf "[%s]" (String.concat "; " (Core_list.map ~f:kid (InterRep.members rep)))) t + | UnionT (_, rep) -> + p ~extra:(spf "[%s]" (String.concat "; " (Core_list.map ~f:kid (UnionRep.members rep)))) t + | MergedT (_, uses) -> + p + ~extra: + ( "[" + ^ String.concat ", " (Core_list.map ~f:(dump_use_t_ (depth - 1, tvars) cx) uses) + ^ "]" ) + t + | DefT (_, trust, IdxWrapper inner_obj) -> p ~trust:(Some trust) ~extra:(kid inner_obj) t + | DefT (_, trust, ReactAbstractComponentT _) -> p ~trust:(Some trust) t + | ShapeT arg -> p ~reason:false ~extra:(kid arg) t + | MatchingPropT (_, _, arg) -> p ~extra:(kid arg) t + | KeysT (_, arg) -> p ~extra:(kid arg) t + | DefT (_, trust, SingletonStrT s) -> p ~trust:(Some trust) ~extra:(spf "%S" s) t + | DefT (_, trust, SingletonNumT (_, s)) -> p ~trust:(Some trust) ~extra:s t + | DefT (_, trust, SingletonBoolT b) -> p ~trust:(Some trust) ~extra:(spf "%B" b) t + | ModuleT (_, { exports_tmap; _ }, _) -> + p + t + ~extra: + ( Context.find_exports cx exports_tmap + |> SMap.bindings + |> Core_list.map ~f:(fun (name, (_, t)) -> kid t |> spf "%s: %s" name) + |> String.concat ", " + |> spf "[%s]" ) + | InternalT (ExtendsT (_, l, u)) -> p ~extra:(spf "%s, %s" (kid l) (kid u)) t + | CustomFunT (_, kind) -> p ~extra:(custom_fun kind) t + | InternalT (ChoiceKitT _) -> p t + | TypeDestructorTriggerT (_, _, _, s, x) -> + p ~extra:(spf "%s on upper, %s" (string_of_destructor s) (kid x)) t + | OpenPredT (_, arg, p_pos, p_neg) -> + p + t + ~extra: + (spf + "%s, {%s}, {%s}" + (kid arg) + (String.concat + "; " + (Core_list.map + ~f:(fun (k, p) -> spf "%s: %s" (Key.string_of_key k) (string_of_predicate p)) + (Key_map.elements p_pos))) + (String.concat + "; " + (Core_list.map + ~f:(fun (k, p) -> spf "%s: %s" (Key.string_of_key k) (string_of_predicate p)) + (Key_map.elements p_neg)))) + | ReposT (_, arg) + | InternalT (ReposUpperT (_, arg)) -> + p ~extra:(kid arg) t + | InternalT (OptionalChainVoidT _) -> p t and dump_use_t_ (depth, tvars) cx t = - - let p ?(reason=true) ?(extra="") use_t = - spf "%s (%s%s%s)" + let p ?(reason = true) ?(extra = "") use_t = + spf + "%s (%s%s%s)" (string_of_use_ctor use_t) - (if reason then spf "%S" (dump_reason cx (reason_of_use_t use_t)) else "") - (if reason && extra <> "" then ", " else "") + ( if reason then + spf "%S" (dump_reason cx (reason_of_use_t use_t)) + else + "" ) + ( if reason && extra <> "" then + ", " + else + "" ) extra in - - let kid t = dump_t_ (depth-1, tvars) cx t in - let use_kid use_t = dump_use_t_ (depth-1, tvars) cx use_t in - let tvar id = dump_tvar_ (depth-1, tvars) cx id in - let prop p = dump_prop_ (depth-1, tvars) cx p in - + let kid t = dump_t_ (depth - 1, tvars) cx t in + let use_kid use_t = dump_use_t_ (depth - 1, tvars) cx use_t in + let tvar id = dump_tvar_ (depth - 1, tvars) cx id in + let prop p = dump_prop_ (depth - 1, tvars) cx p in let string_of_use_op = string_of_use_op_rec in - let call_arg_kid = function - | Arg t -> kid t - | SpreadArg t -> spf "...%s" (kid t) + | Arg t -> kid t + | SpreadArg t -> spf "...%s" (kid t) + in + let tlist ts = spf "[%s]" (String.concat "; " (Core_list.map ~f:kid ts)) in + let props map = + spf + "{%s}" + (String.concat "; " (SMap.fold (fun k p acc -> spf "%s = %s" k (prop p) :: acc) map [])) in - - let tlist ts = spf "[%s]" (String.concat "; " (List.map kid ts)) in - let props map = spf "{%s}" (String.concat "; " ( - SMap.fold (fun k p acc -> - spf "%s = %s" k (prop p) :: acc - ) map [] - )) in - let propref = function | Named (r, x) -> spf "%S %s" (dump_reason cx r) x | Computed t -> kid t in - let lookup_kind = function - | NonstrictReturning (default_opt, testid_opt) -> - spf "Nonstrict%s%s" - (Option.value_map default_opt ~default:"" ~f:(fun (t, _) -> spf " returning %s" (kid t))) - (Option.value_map testid_opt ~default:"" ~f:(fun (id, _) -> spf " for test id %d" id)) - | Strict r -> spf "Strict %S" (dump_reason cx r) - | ShadowRead (_, ids) -> spf "ShadowRead [%s]" - (String.concat "; " (Nel.to_list ids |> List.map Properties.string_of_id)) - | ShadowWrite ids -> spf "ShadowWrite [%s]" - (String.concat "; " (Nel.to_list ids |> List.map Properties.string_of_id)) + | NonstrictReturning (default_opt, testid_opt) -> + spf + "Nonstrict%s%s" + (Option.value_map default_opt ~default:"" ~f:(fun (t, _) -> spf " returning %s" (kid t))) + (Option.value_map testid_opt ~default:"" ~f:(fun (id, _) -> spf " for test id %d" id)) + | Strict r -> spf "Strict %S" (dump_reason cx r) + | ShadowRead (_, ids) -> + spf + "ShadowRead [%s]" + (String.concat "; " (Nel.to_list ids |> Core_list.map ~f:Properties.string_of_id)) + | ShadowWrite ids -> + spf + "ShadowWrite [%s]" + (String.concat "; " (Nel.to_list ids |> Core_list.map ~f:Properties.string_of_id)) in - let lookup_action = function - | RWProp (_, _, t, Read) -> spf "Read %s" (kid t) - | RWProp (_, _, t, Write _) -> spf "Write %s" (kid t) - | LookupProp (op, p) -> spf "Lookup (%s, %s)" (string_of_use_op op) (prop p) - | SuperProp (_, p) -> spf "Super %s" (prop p) - | MatchProp (_, t) -> spf "Match %s" (kid t) + | ReadProp { tout; _ } -> spf "Read %s" (kid tout) + | WriteProp { tin; _ } -> spf "Write %s" (kid tin) + | LookupProp (op, p) -> spf "Lookup (%s, %s)" (string_of_use_op op) (prop p) + | SuperProp (_, p) -> spf "Super %s" (prop p) + | MatchProp (_, t) -> spf "Match %s" (kid t) in - let specialize_cache = function | None -> "None" - | Some rs -> spf "Some [%s]" - (String.concat "; " @@ List.map (dump_reason cx) rs) + | Some rs -> spf "Some [%s]" (String.concat "; " @@ Core_list.map ~f:(dump_reason cx) rs) in - let try_flow = function | UnionCases (use_op, t, _rep, ts) -> - spf "(%s, %s, [%s])" - (string_of_use_op use_op) - (kid t) - (String.concat "; " (List.map kid ts)) + spf + "(%s, %s, [%s])" + (string_of_use_op use_op) + (kid t) + (String.concat "; " (Core_list.map ~f:kid ts)) | IntersectionCases (ts, use_t) -> - spf "([%s], %s)" (String.concat "; " (List.map kid ts)) (use_kid use_t) + spf "([%s], %s)" (String.concat "; " (Core_list.map ~f:kid ts)) (use_kid use_t) in - let react_kit = - let open React in - let resolved_object (_, pmap, _, _) = props pmap in - let resolve_array = function - | ResolveArray -> "ResolveArray" - | ResolveElem (todo, done_rev) -> - spf "ResolveElem (%s, %s)" (tlist todo) (tlist done_rev) - in - let resolve_object = function - | ResolveObject -> "ResolveObject" - | ResolveDict (_, todo, acc) -> - spf "ResolveDict (%s, %s)" (props todo) (resolved_object acc) - | ResolveProp (k, todo, acc) -> - spf "ResolveProp (%s, %s, %s)" k (props todo) (resolved_object acc) - in - let simplify_prop_type = SimplifyPropType.(function - | ArrayOf -> "ArrayOf" - | InstanceOf -> "InstanceOf" - | ObjectOf -> "ObjectOf" - | OneOf tool -> spf "OneOf (%s)" (resolve_array tool) - | OneOfType tool -> spf "OneOfType (%s)" (resolve_array tool) - | Shape tool -> spf "Shape (%s)" (resolve_object tool) - ) in - let create_class = CreateClass.( - let tool = function - | Spec _ -> "Spec" - | Mixins _ -> "Mixins" - | Statics _ -> "Statics" - | PropTypes (_, tool) -> - spf "PropTypes (%s)" (resolve_object tool) - | DefaultProps _ -> "DefaultProps" - | InitialState _ -> "InitialState" + React.( + let resolved_object (_, pmap, _, _) = props pmap in + let resolve_array = function + | ResolveArray -> "ResolveArray" + | ResolveElem (todo, done_rev) -> spf "ResolveElem (%s, %s)" (tlist todo) (tlist done_rev) in - let knot {this; static; state_t; default_t} = - spf "{this = %s; static = %s; state = %s; default = %s}" - (kid this) - (kid static) - (kid state_t) - (kid default_t) + let resolve_object = function + | ResolveObject -> "ResolveObject" + | ResolveDict (_, todo, acc) -> + spf "ResolveDict (%s, %s)" (props todo) (resolved_object acc) + | ResolveProp (k, todo, acc) -> + spf "ResolveProp (%s, %s, %s)" k (props todo) (resolved_object acc) in - fun t k -> spf "%s, %s" (tool t) (knot k) - ) in - function - | CreateElement0 (_, config, (children, children_spread), tout) - | CreateElement (_, _, config, (children, children_spread), tout) -> p - ~extra:(spf "CreateElement (%s; %s%s) => %s" - (kid config) - (String.concat "; " (List.map kid children)) - (match children_spread with - | Some children_spread -> spf "; ...%s" (kid children_spread) - | None -> "") - (kid tout)) t - | GetProps tout -> spf "GetProps (%s)" (kid tout) - | GetConfig tout -> spf "GetConfig (%s)" (kid tout) - | GetRef tout -> spf "GetRef (%s)" (kid tout) - | SimplifyPropType (tool, tout) -> - spf "SimplifyPropType (%s, %s)" (simplify_prop_type tool) (kid tout) - | CreateClass (tool, knot, tout) -> - spf "CreateClass (%s, %s)" (create_class tool knot) (kid tout) + let simplify_prop_type = + SimplifyPropType.( + function + | ArrayOf -> "ArrayOf" + | InstanceOf -> "InstanceOf" + | ObjectOf -> "ObjectOf" + | OneOf tool -> spf "OneOf (%s)" (resolve_array tool) + | OneOfType tool -> spf "OneOfType (%s)" (resolve_array tool) + | Shape tool -> spf "Shape (%s)" (resolve_object tool)) + in + let create_class = + CreateClass.( + let tool = function + | Spec _ -> "Spec" + | Mixins _ -> "Mixins" + | Statics _ -> "Statics" + | PropTypes (_, tool) -> spf "PropTypes (%s)" (resolve_object tool) + | DefaultProps _ -> "DefaultProps" + | InitialState _ -> "InitialState" + in + let knot { this; static; state_t; default_t } = + spf + "{this = %s; static = %s; state = %s; default = %s}" + (kid this) + (kid static) + (kid state_t) + (kid default_t) + in + (fun t k -> spf "%s, %s" (tool t) (knot k))) + in + function + | CreateElement0 (_, config, (children, children_spread), tout) + | CreateElement (_, _, config, (children, children_spread), tout) -> + p + ~extra: + (spf + "CreateElement (%s; %s%s) => %s" + (kid config) + (String.concat "; " (Core_list.map ~f:kid children)) + (match children_spread with + | Some children_spread -> spf "; ...%s" (kid children_spread) + | None -> "") + (kid tout)) + t + | ConfigCheck config -> spf "ConfigCheck (%s)" (kid config) + | GetProps tout -> spf "GetProps (%s)" (kid tout) + | GetConfig tout -> spf "GetConfig (%s)" (kid tout) + | GetConfigType (default_props, tout) -> + spf "GetConfigType (%s, %s)" (kid default_props) (kid tout) + | GetRef tout -> spf "GetRef (%s)" (kid tout) + | SimplifyPropType (tool, tout) -> + spf "SimplifyPropType (%s, %s)" (simplify_prop_type tool) (kid tout) + | CreateClass (tool, knot, tout) -> + spf "CreateClass (%s, %s)" (create_class tool knot) (kid tout)) in - - let slice (_, props, dict, {exact; _}) = - let xs = match dict with - | Some {dict_polarity=p; _} -> [(Polarity.sigil p)^"[]"] - | None -> [] + let slice { Object.reason = _; props; dict; flags = { exact; _ } } = + let xs = + match dict with + | Some { dict_polarity = p; _ } -> [Polarity.sigil p ^ "[]"] + | None -> [] + in + let xs = + SMap.fold + (fun k (t, _) xs -> + let opt = + match t with + | OptionalT _ -> "?" + | _ -> "" + in + (k ^ opt) :: xs) + props + xs in - let xs = SMap.fold (fun k (t,_) xs -> - let opt = match t with DefT (_, OptionalT _) -> "?" | _ -> "" in - (k^opt)::xs - ) props xs in let xs = String.concat "; " xs in - if exact - then spf "{|%s|}" xs - else spf "{%s}" xs + if exact then + spf "{|%s|}" xs + else + spf "{%s}" xs in - - let object_kit = - let open Object in - let join = function And -> "And" | Or -> "Or" in - let resolved xs = - spf "[%s]" (String.concat "; " (List.map slice (Nel.to_list xs))) - in - let resolve = function - | Next -> "Next" - | List0 (todo, j) -> - spf "List0 ([%s], %s)" - (String.concat "; " (List.map kid (Nel.to_list todo))) - (join j) - | List (todo, done_rev, j) -> - spf "List ([%s], [%s], %s)" - (String.concat "; " (List.map kid todo)) - (String.concat "; " (List.map resolved (Nel.to_list done_rev))) - (join j) - in - let resolve_tool = function - | Resolve tool -> spf "Resolve %s" (resolve tool) - | Super (s, tool) -> spf "Super (%s, %s)" (slice s) (resolve tool) + let operand_slice reason prop_map dict = + let props = + SMap.fold + (fun k p acc -> + match (Type.Property.read_t p, Type.Property.write_t p) with + | (Some t, _) + | (_, Some t) -> + SMap.add k (t, true) acc + | _ -> acc) + prop_map + SMap.empty in - let spread target state = - let open Object.Spread in - let target = - (match target with - | Annot { make_exact } -> spf "Annot { make_exact=%b }" make_exact - | Value -> "Value") + let flags = { exact = true; sealed = Sealed; frozen = false } in + slice { Object.reason; props; dict; flags } + in + let object_kit = + Object.( + let join (_loc, op) = + match op with + | And -> "And" + | Or -> "Or" in - let state = - let {todo_rev; acc} = state in - spf "{todo_rev=[%s]; acc=[%s]}" - (String.concat "; " (List.map kid todo_rev)) - (String.concat "; " (List.map resolved acc)) + let resolved xs = + spf "[%s]" (String.concat "; " (Core_list.map ~f:slice (Nel.to_list xs))) in - spf "Spread (%s, %s)" target state - in - let rest merge_mode state = - let open Object.Rest in - spf "Rest ({merge_mode=%s}, %s)" - (match merge_mode with - | Sound -> "Sound" - | IgnoreExactAndOwn -> "IgnoreExactAndOwn" - | ReactConfigMerge -> "ReactConfigMerge") - (match state with - | One t -> spf "One (%s)" (kid t) - | Done o -> spf "Done (%s)" (resolved o)) - in - let react_props state = - let open Object.ReactConfig in - spf "(%s)" - (match state with - | Config _ -> "Config" - | Defaults _ -> "Defaults") - in - let tool = function - | ReadOnly -> "ReadOnly" - | Spread (options, state) -> spread options state - | Rest (options, state) -> rest options state - | ReactConfig state -> react_props state - in - fun a b -> - spf "(%s, %s)" (resolve_tool a) (tool b) + let resolve = function + | Next -> "Next" + | List0 (todo, j) -> + spf + "List0 ([%s], %s)" + (String.concat "; " (Core_list.map ~f:kid (Nel.to_list todo))) + (join j) + | List (todo, done_rev, j) -> + spf + "List ([%s], [%s], %s)" + (String.concat "; " (Core_list.map ~f:kid todo)) + (String.concat "; " (Core_list.map ~f:resolved (Nel.to_list done_rev))) + (join j) + in + let resolve_tool = function + | Resolve tool -> spf "Resolve %s" (resolve tool) + | Super (s, tool) -> spf "Super (%s, %s)" (slice s) (resolve tool) + in + let acc_element = function + | Spread.InlineSlice { Spread.reason; prop_map; dict } -> + operand_slice reason prop_map dict + | Spread.ResolvedSlice xs -> resolved xs + in + let spread target state = + Object.Spread.( + let target = + match target with + | Annot { make_exact } -> spf "Annot { make_exact=%b }" make_exact + | Value -> "Value" + in + let spread_operand = function + | Slice { Spread.reason; prop_map; dict } -> operand_slice reason prop_map dict + | Type t -> kid t + in + let state = + let { todo_rev; acc } = state in + spf + "{todo_rev=[%s]; acc=[%s]}" + (String.concat "; " (Core_list.map ~f:spread_operand todo_rev)) + (String.concat "; " (Core_list.map ~f:acc_element acc)) + in + spf "Spread (%s, %s)" target state) + in + let rest merge_mode state = + Object.Rest.( + spf + "Rest ({merge_mode=%s}, %s)" + (match merge_mode with + | Sound -> "Sound" + | IgnoreExactAndOwn -> "IgnoreExactAndOwn" + | ReactConfigMerge _ -> "ReactConfigMerge") + (match state with + | One t -> spf "One (%s)" (kid t) + | Done o -> spf "Done (%s)" (resolved o))) + in + let react_props state = + Object.ReactConfig.( + spf + "(%s)" + (match state with + | Config _ -> "Config" + | Defaults _ -> "Defaults")) + in + let tool = function + | ReadOnly -> "ReadOnly" + | ObjectRep -> "ObjectRep" + | Spread (options, state) -> spread options state + | Rest (options, state) -> rest options state + | ReactConfig state -> react_props state + in + (fun a b -> spf "(%s, %s)" (resolve_tool a) (tool b))) in - - if depth = 0 then string_of_use_ctor t - else match t with - | UseT (use_op, OpenT (r, id)) -> - spf "UseT (%s, OpenT (%S, %d))" - (string_of_use_op use_op) - (dump_reason cx r) - id - | UseT (use_op, t) -> spf "UseT (%s, %s)" (string_of_use_op use_op) (kid t) - | AdderT (use_op, _, _, x, y) -> p ~extra:(spf "%s, %s, %s" - (string_of_use_op use_op) - (kid x) - (kid y)) t - | AndT (_, x, y) -> p ~extra:(spf "%s, %s" (kid x) (kid y)) t - | ArrRestT (use_op, _, _, _) -> p ~extra:(string_of_use_op use_op) t - | AssertArithmeticOperandT _ -> p t - | AssertBinaryInLHST _ -> p t - | AssertBinaryInRHST _ -> p t - | AssertForInRHST _ -> p t - | AssertImportIsValueT _ -> p t - | AssertRestParamT _ -> p t - | BecomeT (_, arg) -> p ~extra:(kid arg) t - | BindT _ -> p t - | CallElemT (_, _, ix, _) -> p ~extra:(kid ix) t - | CallT (use_op,_,{call_args_tlist;call_tout;call_this_t;_}) -> p - ~extra:(spf "%s, (%s) => %s" + if depth = 0 then + string_of_use_ctor t + else + match t with + | UseT (use_op, OpenT (r, id)) -> + spf "UseT (%s, OpenT (%S, %d))" (string_of_use_op use_op) (dump_reason cx r) id + | UseT (use_op, (DefT (_, trust, _) as t)) -> + spf + "UseT (%s, %s%s)" (string_of_use_op use_op) - (kid call_this_t) - (String.concat "; " (List.map call_arg_kid call_args_tlist)) - (kid call_tout)) t - | CallLatentPredT _ -> p t - | CallOpenPredT _ -> p t - | ChoiceKitUseT (_, TryFlow (_, spec)) -> - p ~extra:(try_flow spec) t - | ChoiceKitUseT (_, FullyResolveType id) -> p ~extra:(tvar id) t - | CJSExtractNamedExportsT _ -> p t - | CJSRequireT _ -> p t - | ComparatorT (_, _, arg) -> p ~extra:(kid arg) t - | ConstructorT _ -> p t - | CopyNamedExportsT _ -> p t - | CopyTypeExportsT _ -> p t - | DebugPrintT _ -> p t - | DebugSleepT _ -> p t - | ElemT _ -> p t - | EqT (_, _, arg) -> p ~extra:(kid arg) t - | ExportNamedT (_, _, tmap, arg) -> p t - ~extra:(spf "%s, {%s}" - (kid arg) - (String.concat "; " - (List.map (fun (x,_) -> x) - (SMap.bindings tmap)))) - | ExportTypeT _ -> p t - | GetElemT (_, _, ix, etype) -> p ~extra:(spf "%s, %s" (kid ix) (kid etype)) t - | GetKeysT _ -> p t - | GetValuesT _ -> p t - | MatchPropT (use_op, _, prop, ptype) - | GetPropT (use_op, _, prop, ptype) -> p ~extra:(spf "%s, (%s), %s" - (string_of_use_op use_op) - (propref prop) - (kid ptype)) t - | GetPrivatePropT (_, _, prop, _, _, ptype) -> p ~extra:(spf "(%s), %s" - (prop) - (kid ptype)) t - | GetProtoT (_, arg) -> p ~extra:(kid arg) t - | GetStaticsT (_, arg) -> p ~extra:(kid arg) t - | GuardT (pred, result, sink) -> p ~reason:false - ~extra:(spf "%s, %s, %s" - (string_of_predicate pred) (kid result) (kid sink)) - t - | HasOwnPropT _ -> p t - | IdxUnMaybeifyT _ -> p t - | IdxUnwrap _ -> p t - | ImportDefaultT _ -> p t - | ImportModuleNsT _ -> p t - | ImportNamedT _ -> p t - | ImportTypeofT _ -> p t - | ImportTypeT _ -> p t - | IntersectionPreprocessKitT _ -> p t - | InvariantT _ -> p t - | LookupT (_, kind, _, prop, action) -> p ~extra:(spf "%S, %s, %s" - (propref prop) - (lookup_kind kind) - (lookup_action action)) t - | MakeExactT _ -> p t - | MapTypeT _ -> p t - | MethodT (_, _, _, prop, _, _) -> p ~extra:(spf "(%s)" (propref prop)) t - | MixinT (_, arg) -> p ~extra:(kid arg) t - | NotT (_, arg) -> p ~extra:(kid arg) t - | NullishCoalesceT (_, x, y) -> p ~extra:(spf "%s, %s" (kid x) (kid y)) t - | ObjAssignToT (_, arg1, arg2, _) -> p t - ~extra:(spf "%s, %s" (kid arg1) (kid arg2)) - | ObjAssignFromT (_, arg1, arg2, _) -> p t - ~extra:(spf "%s, %s" (kid arg1) (kid arg2)) - | ObjFreezeT _ -> p t - | ObjRestT (_, xs, arg) -> p t - ~extra:(spf "[%s], %s" (String.concat "; " xs) (kid arg)) - | ObjSealT _ -> p t - | ObjTestProtoT _ -> p t - | ObjTestT _ -> p t - | OptionalChainT _ -> p t - | OrT (_, x, y) -> p ~extra:(spf "%s, %s" (kid x) (kid y)) t - | PredicateT (pred, arg) -> p ~reason:false - ~extra:(spf "%s, %s" (string_of_predicate pred) (kid arg)) t - | ReactKitT (use_op, _, tool) -> p t - ~extra:(spf "%s, %s" (string_of_use_op use_op) (react_kit tool)) - | RefineT _ -> p t - | ReposLowerT (_, use_desc, arg) -> p t - ~extra:(spf "use_desc=%b, %s" use_desc (use_kid arg)) - | ReposUseT (_, use_desc, use_op, arg) -> p t - ~extra:(spf "use_desc=%b, %s" use_desc (use_kid (UseT (use_op, arg)))) - | ResolveSpreadT (use_op, _, {rrt_resolve_to; _;}) -> + ( if Context.trust_tracking cx then + string_of_trust_rep (lookup_trust cx) trust + else + "" ) + (kid t) + | UseT (use_op, t) -> spf "UseT (%s, %s)" (string_of_use_op use_op) (kid t) + | AdderT (use_op, _, _, x, y) -> + p ~extra:(spf "%s, %s, %s" (string_of_use_op use_op) (kid x) (kid y)) t + | AndT (_, x, y) -> p ~extra:(spf "%s, %s" (kid x) (kid y)) t + | ArrRestT (use_op, _, _, _) -> p ~extra:(string_of_use_op use_op) t + | AssertArithmeticOperandT _ -> p t + | AssertBinaryInLHST _ -> p t + | AssertBinaryInRHST _ -> p t + | AssertForInRHST _ -> p t + | AssertImportIsValueT _ -> p t + | BecomeT (_, arg) -> p ~extra:(kid arg) t + | BindT _ -> p t + | CallElemT (_, _, ix, _) -> p ~extra:(kid ix) t + | CallT (use_op, _, { call_args_tlist; call_tout; call_this_t; _ }) -> + p + ~extra: + (spf + "%s, (%s) => %s" + (string_of_use_op use_op) + (kid call_this_t) + (String.concat "; " (Core_list.map ~f:call_arg_kid call_args_tlist)) + (kid call_tout)) + t + | CallLatentPredT _ -> p t + | CallOpenPredT _ -> p t + | ChoiceKitUseT (_, TryFlow (_, spec)) -> p ~extra:(try_flow spec) t + | ChoiceKitUseT (_, FullyResolveType id) -> p ~extra:(tvar id) t + | CJSExtractNamedExportsT _ -> p t + | CJSRequireT _ -> p t + | ComparatorT (_, _, arg) -> p ~extra:(kid arg) t + | ConstructorT _ -> p t + | CopyNamedExportsT _ -> p t + | CopyTypeExportsT _ -> p t + | DebugPrintT _ -> p t + | DebugSleepT _ -> p t + | ElemT _ -> p t + | EqT (_, _, arg) -> p ~extra:(kid arg) t + | ExportNamedT (_, _, tmap, _export_kind, arg) -> + p + t + ~extra: + (spf + "%s, {%s}" + (kid arg) + (String.concat "; " (Core_list.map ~f:(fun (x, _) -> x) (SMap.bindings tmap)))) + | ExportTypeT _ -> p t + | AssertExportIsTypeT _ -> p t + | GetElemT (_, _, ix, etype) -> p ~extra:(spf "%s, %s" (kid ix) (kid etype)) t + | GetKeysT _ -> p t + | GetValuesT _ -> p t + | MatchPropT (use_op, _, prop, ptype) + | GetPropT (use_op, _, prop, ptype) -> + p ~extra:(spf "%s, (%s), %s" (string_of_use_op use_op) (propref prop) (kid ptype)) t + | GetPrivatePropT (_, _, prop, _, _, ptype) -> p ~extra:(spf "(%s), %s" prop (kid ptype)) t + | GetProtoT (_, arg) -> p ~extra:(kid arg) t + | GetStaticsT (_, arg) -> p ~extra:(kid arg) t + | GuardT (pred, result, sink) -> + p + ~reason:false + ~extra:(spf "%s, %s, %s" (string_of_predicate pred) (kid result) (kid sink)) + t + | HasOwnPropT _ -> p t + | IdxUnMaybeifyT _ -> p t + | IdxUnwrap _ -> p t + | ImportDefaultT _ -> p t + | ImportModuleNsT _ -> p t + | ImportNamedT _ -> p t + | ImportTypeofT _ -> p t + | ImportTypeT _ -> p t + | IntersectionPreprocessKitT _ -> p t + | InvariantT _ -> p t + | LookupT (_, kind, _, prop, action) -> + p ~extra:(spf "%S, %s, %s" (propref prop) (lookup_kind kind) (lookup_action action)) t + | MakeExactT _ -> p t + | MapTypeT _ -> p t + | MethodT (_, _, _, prop, _, _) -> p ~extra:(spf "(%s)" (propref prop)) t + | MixinT (_, arg) -> p ~extra:(kid arg) t + | NotT (_, arg) -> p ~extra:(kid arg) t + | NullishCoalesceT (_, x, y) -> p ~extra:(spf "%s, %s" (kid x) (kid y)) t + | ObjAssignToT (_, _, arg1, arg2, _) -> p t ~extra:(spf "%s, %s" (kid arg1) (kid arg2)) + | ObjAssignFromT (_, _, arg1, arg2, _) -> p t ~extra:(spf "%s, %s" (kid arg1) (kid arg2)) + | ObjFreezeT _ -> p t + | ObjRestT (_, xs, arg) -> p t ~extra:(spf "[%s], %s" (String.concat "; " xs) (kid arg)) + | ObjSealT _ -> p t + | ObjTestProtoT _ -> p t + | ObjTestT _ -> p t + | OptionalChainT _ -> p t + | OrT (_, x, y) -> p ~extra:(spf "%s, %s" (kid x) (kid y)) t + | PredicateT (pred, arg) -> + p ~reason:false ~extra:(spf "%s, %s" (string_of_predicate pred) (kid arg)) t + | ReactKitT (use_op, _, tool) -> + p t ~extra:(spf "%s, %s" (string_of_use_op use_op) (react_kit tool)) + | RefineT _ -> p t + | ReactPropsToOut (_, props) + | ReactInToProps (_, props) -> + p ~extra:(kid props |> spf "%s") t + | ReposLowerT (_, use_desc, arg) -> p t ~extra:(spf "use_desc=%b, %s" use_desc (use_kid arg)) + | ReposUseT (_, use_desc, use_op, arg) -> + p t ~extra:(spf "use_desc=%b, %s" use_desc (use_kid (UseT (use_op, arg)))) + | ResolveSpreadT (use_op, _, { rrt_resolve_to; _ }) -> (match rrt_resolve_to with | ResolveSpreadsToTuple (_, elem_t, tout) | ResolveSpreadsToArrayLiteral (_, elem_t, tout) @@ -2138,180 +2136,202 @@ and dump_use_t_ (depth, tvars) cx t = p ~extra:(spf "%s, %s" (string_of_use_op use_op) (kid tin)) t | ResolveSpreadsToMultiflowCallFull _ | ResolveSpreadsToMultiflowSubtypeFull _ - | ResolveSpreadsToCustomFunCall _ - -> p ~extra:(string_of_use_op use_op) t) - | SentinelPropTestT (_, l, _key, sense, sentinel, result) -> p ~reason:false - ~extra:(spf "%s, %b, %s, %s" - (kid l) - sense - (string_of_sentinel sentinel) - (kid result)) - t - | SubstOnPredT _ -> p t - | SuperT _ -> p t - | ImplementsT (_, arg) -> p ~reason:false ~extra:(kid arg) t - | SetElemT (_, _, ix, etype, _) -> p ~extra:(spf "%s, %s" (kid ix) (kid etype)) t - | SetPropT (use_op, _, prop, _, ptype, _) -> p ~extra:(spf "%s, (%s), %s" - (string_of_use_op use_op) - (propref prop) - (kid ptype)) t - | SetPrivatePropT (_, _, prop, _, _, ptype, _) -> p ~extra:(spf "(%s), %s" - (prop) - (kid ptype)) t - | SetProtoT (_, arg) -> p ~extra:(kid arg) t - | SpecializeT (_, _, _, cache, args_opt, ret) -> p ~extra:begin match args_opt with - | Some args -> spf "%s, [%s], %s" - (specialize_cache cache) (String.concat "; " (List.map kid args)) (kid ret) - | None -> spf "%s, %s" - (specialize_cache cache) (kid ret) - end t - | ObjKitT (use_op, _, resolve_tool, tool, tout) -> p ~extra:(spf "%s, %s, %s" - (string_of_use_op use_op) - (object_kit resolve_tool tool) - (kid tout)) t - | TestPropT (_, _, prop, ptype) -> p ~extra:(spf "(%s), %s" - (propref prop) - (kid ptype)) t - | ThisSpecializeT (_, this, _) -> p ~extra:(spf "%s" (kid this)) t - | ToStringT (_, arg) -> p ~extra:(use_kid arg) t - | UnaryMinusT _ -> p t - | UnifyT (x, y) -> p ~reason:false ~extra:(spf "%s, %s" (kid x) (kid y)) t - | VarianceCheckT (_, args, pol) -> p ~extra:(spf "[%s], %s" - (String.concat "; " (List.map kid args)) (Polarity.string pol)) t - | ConcretizeTypeAppsT _ -> p t - | TypeAppVarianceCheckT _ -> p t - | CondT (_, then_t, else_t, tout) -> p t - ~extra:(spf "%s, %s, %s" - (match then_t with None -> "None" | Some t -> spf "Some (%s)" (kid t)) - (kid else_t) - (kid tout)) - | ExtendsUseT (_, _, nexts, l, u) -> p ~extra:(spf "[%s], %s, %s" - (String.concat "; " (List.map kid nexts)) (kid l) (kid u)) t + | ResolveSpreadsToCustomFunCall _ -> + p ~extra:(string_of_use_op use_op) t) + | SentinelPropTestT (_, l, _key, sense, sentinel, result) -> + p + ~reason:false + ~extra:(spf "%s, %b, %s, %s" (kid l) sense (string_of_sentinel sentinel) (kid result)) + t + | SubstOnPredT _ -> p t + | SuperT _ -> p t + | ImplementsT (_, arg) -> p ~reason:false ~extra:(kid arg) t + | SetElemT (_, _, ix, _, etype, _) -> p ~extra:(spf "%s, %s" (kid ix) (kid etype)) t + | SetPropT (use_op, _, prop, _, _, ptype, _) -> + p ~extra:(spf "%s, (%s), %s" (string_of_use_op use_op) (propref prop) (kid ptype)) t + | SetPrivatePropT (_, _, prop, _, _, _, ptype, _) -> + p ~extra:(spf "(%s), %s" prop (kid ptype)) t + | SetProtoT (_, arg) -> p ~extra:(kid arg) t + | SpecializeT (_, _, _, cache, args_opt, ret) -> + p + ~extra: + begin + match args_opt with + | Some args -> + spf + "%s, [%s], %s" + (specialize_cache cache) + (String.concat "; " (Core_list.map ~f:kid args)) + (kid ret) + | None -> spf "%s, %s" (specialize_cache cache) (kid ret) + end + t + | ObjKitT (use_op, _, resolve_tool, tool, tout) -> + p + ~extra: + (spf "%s, %s, %s" (string_of_use_op use_op) (object_kit resolve_tool tool) (kid tout)) + t + | TestPropT (_, _, prop, ptype) -> p ~extra:(spf "(%s), %s" (propref prop) (kid ptype)) t + | ThisSpecializeT (_, this, _) -> p ~extra:(spf "%s" (kid this)) t + | ToStringT (_, arg) -> p ~extra:(use_kid arg) t + | UnaryMinusT _ -> p t + | UnifyT (x, y) -> p ~reason:false ~extra:(spf "%s, %s" (kid x) (kid y)) t + | VarianceCheckT (_, args, pol) -> + p + ~extra: + (spf "[%s], %s" (String.concat "; " (Core_list.map ~f:kid args)) (Polarity.string pol)) + t + | ConcretizeTypeAppsT _ -> p t + | TypeAppVarianceCheckT _ -> p t + | CondT (_, then_t, else_t, tout) -> + p + t + ~extra: + (spf + "%s, %s, %s" + (match then_t with + | None -> "None" + | Some t -> spf "Some (%s)" (kid t)) + (kid else_t) + (kid tout)) + | ExtendsUseT (_, _, nexts, l, u) -> + p + ~extra: + (spf "[%s], %s, %s" (String.concat "; " (Core_list.map ~f:kid nexts)) (kid l) (kid u)) + t + | DestructuringT (_, k, s, tout) -> + p t ~extra:(spf "%s, %s, %s" (string_of_destruct_kind k) (string_of_selector s) (kid tout)) + | ModuleExportsAssignT (_, _, _) -> p t and dump_tvar_ (depth, tvars) cx id = - if ISet.mem id tvars then spf "%d, ^" id else - let stack = ISet.add id tvars in - let open Constraint in - try - match Context.find_tvar cx id with - | Goto g -> spf "%d, Goto %d" id g - | Root { constraints = Resolved t; _ } -> - spf "%d, Resolved %s" id (dump_t_ (depth-1, stack) cx t) - | Root { constraints = Unresolved { lower; upper; _ }; _ } -> - if lower = TypeMap.empty && upper = UseTypeMap.empty - then spf "%d" id - else spf "%d, [%s], [%s]" id - (String.concat "; " (List.rev (TypeMap.fold - (fun t _ acc -> - dump_t_ (depth-1, stack) cx t :: acc - ) lower []))) - (String.concat "; " (List.rev (UseTypeMap.fold - (fun use_t _ acc -> - dump_use_t_ (depth-1, stack) cx use_t :: acc - ) upper []))) - with Context.Tvar_not_found _ -> - string_of_int id + if ISet.mem id tvars then + spf "%d, ^" id + else + let stack = ISet.add id tvars in + Constraint.( + try + match Context.find_tvar cx id with + | Goto g -> spf "%d, Goto %d" id g + | Root { constraints = Resolved (_, t) | FullyResolved (_, t); _ } -> + spf "%d, Resolved %s" id (dump_t_ (depth - 1, stack) cx t) + | Root { constraints = Unresolved { lower; upper; _ }; _ } -> + if lower = TypeMap.empty && upper = UseTypeMap.empty then + spf "%d" id + else + spf + "%d, [%s], [%s]" + id + (String.concat + "; " + (List.rev + (TypeMap.fold (fun t _ acc -> dump_t_ (depth - 1, stack) cx t :: acc) lower []))) + (String.concat + "; " + (List.rev + (UseTypeMap.fold + (fun use_t _ acc -> dump_use_t_ (depth - 1, stack) cx use_t :: acc) + upper + []))) + with Context.Tvar_not_found _ -> spf "Not Found: %d" id) and dump_prop_ (depth, tvars) cx p = - let kid t = dump_t_ (depth-1, tvars) cx t in + let kid t = dump_t_ (depth, tvars) cx t in match p with - | Field (_loc, t, polarity) -> - spf "Field (%s) %s" (string_of_polarity polarity) (kid t) - | Get (_loc, t) -> - spf "Get %s" (kid t) - | Set (_loc, t) -> - spf "Set %s" (kid t) - | GetSet (_loc1, t1, _loc2, t2) -> - spf "Get %s Set %s" (kid t1) (kid t2) - | Method (_loc, t) -> - spf "Method %s" (kid t) + | Field (_loc, t, polarity) -> spf "Field (%s) %s" (string_of_polarity polarity) (kid t) + | Get (_loc, t) -> spf "Get %s" (kid t) + | Set (_loc, t) -> spf "Set %s" (kid t) + | GetSet (_loc1, t1, _loc2, t2) -> spf "Get %s Set %s" (kid t1) (kid t2) + | Method (_loc, t) -> spf "Method %s" (kid t) (* This is the type-dump debugging API. We should make sure these are not called recursively to avoid circumventing one of the termination mechanisms: depth or tvar-set. *) -let dump_t ?(depth=3) cx t = - dump_t_ (depth, ISet.empty) cx t +let dump_t ?(depth = 3) cx t = dump_t_ (depth, ISet.empty) cx t -let dump_use_t ?(depth=3) cx t = - dump_use_t_ (depth, ISet.empty) cx t +let dump_use_t ?(depth = 3) cx t = dump_use_t_ (depth, ISet.empty) cx t -let dump_prop ?(depth=3) cx p = - dump_prop_ (depth, ISet.empty) cx p +let dump_prop ?(depth = 3) cx p = dump_prop_ (depth, ISet.empty) cx p -let dump_tvar ?(depth=3) cx id = - dump_tvar_ (depth, ISet.empty) cx id +let dump_tvar ?(depth = 3) cx id = dump_tvar_ (depth, ISet.empty) cx id + +let dump_flow ?(depth = 3) cx (l, u) = + spf "Lower: %s ~>\n Upper: %s" (dump_t ~depth cx l) (dump_use_t ~depth cx u) (*****************************************************) (* scopes and types *) -let string_of_scope_entry = Scope.( - - let string_of_value_binding cx { Entry. - kind; value_state; value_declare_loc; value_assign_loc; specific; general; - } = - spf "{ kind: %s; value_state: %s; value_declare_loc: %S; \ - value_assign_loc: %s; specific: %s; general: %s }" - (Entry.string_of_value_kind kind) - (State.to_string value_state) - (string_of_loc value_declare_loc) - (string_of_loc value_assign_loc) - (dump_t cx specific) - (dump_t cx general) - in - - let string_of_type_binding cx { Entry.type_state; type_loc; _type; - type_binding_kind = _ } = - spf "{ type_state: %s; type_loc: %S; _type: %s }" - (State.to_string type_state) - (string_of_loc type_loc) - (dump_t cx _type) - in - - fun cx -> Entry.(function - | Value r -> spf "Value %s" (string_of_value_binding cx r) - | Type r -> spf "Type %s" (string_of_type_binding cx r) - | Class r -> spf "Class %s" (string_of_int r.class_binding_id) - ) -) +let string_of_scope_entry = + Scope.( + let string_of_value_binding + cx { Entry.kind; value_state; value_declare_loc; value_assign_loc; specific; general } = + spf + "{ kind: %s; value_state: %s; value_declare_loc: %S; value_assign_loc: %s; specific: %s; general: %s }" + (Entry.string_of_value_kind kind) + (State.to_string value_state) + (string_of_aloc value_declare_loc) + (string_of_aloc value_assign_loc) + (dump_t cx specific) + (dump_t cx general) + in + let string_of_type_binding cx { Entry.type_state; type_loc; type_; type_binding_kind = _ } = + spf + "{ type_state: %s; type_loc: %S; type_: %s }" + (State.to_string type_state) + (string_of_aloc type_loc) + (dump_t cx type_) + in + fun cx -> + Entry.( + function + | Value r -> spf "Value %s" (string_of_value_binding cx r) + | Type r -> spf "Type %s" (string_of_type_binding cx r) + | Class r -> spf "Class %s" (ALoc.debug_to_string r.class_binding_id))) let string_of_scope_entries cx entries = - let strings = SMap.fold (fun name entry acc -> - (spf "%s: %s" name (string_of_scope_entry cx entry)) - :: acc - ) entries [] + let strings = + SMap.fold + (fun name entry acc -> spf "%s: %s" name (string_of_scope_entry cx entry) :: acc) + entries + [] |> String.concat "; \n" - in spf "[ %s ]" strings + in + spf "[ %s ]" strings let string_of_scope_refi cx { Scope.refi_loc; refined; original } = - spf "{ refi_loc: %S; refined: %s; original: %s }" - (string_of_loc refi_loc) + spf + "{ refi_loc: %S; refined: %s; original: %s }" + (string_of_aloc refi_loc) (dump_t cx refined) (dump_t cx original) - let string_of_scope_refis cx refis = - let strings = Key_map.fold (fun key refi acc -> - (spf "%s: %s" - (Key.string_of_key key) - (string_of_scope_refi cx refi)) - :: acc - ) refis [] + let strings = + Key_map.fold + (fun key refi acc -> + spf "%s: %s" (Key.string_of_key key) (string_of_scope_refi cx refi) :: acc) + refis + [] |> String.concat ";\n" - in spf "[ %s ]" strings + in + spf "[ %s ]" strings -let string_of_scope cx scope = Scope.( - spf "{ kind: %s;\nentries:\n%s\nrefis:\n%s\n}" - (string_of_kind scope.kind) - (string_of_scope_entries cx scope.entries) - (string_of_scope_refis cx scope.refis) -) +let string_of_scope cx scope = + Scope.( + spf + "{ kind: %s;\nentries:\n%s\nrefis:\n%s\n}" + (string_of_kind scope.kind) + (string_of_scope_entries cx scope.entries) + (string_of_scope_refis cx scope.refis)) let string_of_reason cx reason = - let strip_root = if Context.should_strip_root cx - then Some (Context.root cx) - else None in + let strip_root = + if Context.should_strip_root cx then + Some (Context.root cx) + else + None + in Reason.string_of_reason ~strip_root reason let string_of_file cx = @@ -2320,490 +2340,563 @@ let string_of_file cx = | false -> filename | true -> let root_str = Path.to_string (Context.root cx) ^ Filename.dir_sep in - if String_utils.string_starts_with filename root_str - then Files.relative_path root_str filename - else filename - -let string_of_selector = function - | Elem _ -> "Elem _" (* TODO print info about the key *) - | Prop x -> spf "Prop %s" x - | ArrRest i -> spf "ArrRest %i" i - | ObjRest xs -> spf "ObjRest [%s]" (String.concat "; " xs) - | Default -> "Default" - | Become -> "Become" - | Refine p -> spf "Refine with %s" (string_of_predicate p) - -let string_of_destructor = function - | NonMaybeType -> "NonMaybeType" - | PropertyType x -> spf "PropertyType %s" x - | ElementType _ -> "ElementType" - | Bind _ -> "Bind" - | ReadOnlyType -> "ReadOnly" - | SpreadType _ -> "Spread" - | RestType _ -> "Rest" - | ValuesType -> "Values" - | CallType _ -> "CallType" - | TypeMap (TupleMap _) -> "TupleMap" - | TypeMap (ObjectMap _) -> "ObjectMap" - | TypeMap (ObjectMapi _) -> "ObjectMapi" - | ReactElementPropsType -> "ReactElementProps" - | ReactElementConfigType -> "ReactElementConfig" - | ReactElementRefType -> "ReactElementRef" - -let string_of_default = Default.fold - ~expr:(fun (loc, _) -> - spf "Expr %s" (string_of_loc loc)) - ~selector:(fun _ str sel -> - spf "Selector (%s) (%s)" str (string_of_selector sel)) - ~cons:(fun str default -> - spf "Cons (%s) (%s)" str default) - -let dump_flow_error = - let open Flow_error in - let string_of_use_op = string_of_use_op_rec in - let dump_internal_error = function - | PackageHeapNotFound _ -> "PackageHeapNotFound" - | AbnormalControlFlow -> "AbnormalControlFlow" - | MethodNotAFunction -> "MethodNotAFunction" - | OptionalMethod -> "OptionalMethod" - | OpenPredWithoutSubst -> "OpenPredWithoutSubst" - | PredFunWithoutParamNames -> "PredFunWithoutParamNames" - | UnsupportedGuardPredicate _ -> "UnsupportedGuardPredicate" - | BreakEnvMissingForCase -> "BreakEnvMissingForCase" - | PropertyDescriptorPropertyCannotBeRead -> - "PropertyDescriptorPropertyCannotBeRead" - | ForInLHS -> "ForInLHS" - | ForOfLHS -> "ForOfLHS" - | InstanceLookupComputed -> "InstanceLookupComputed" - | PropRefComputedOpen -> "PropRefComputedOpen" - | PropRefComputedLiteral -> "PropRefComputedLiteral" - | ShadowReadComputed -> "ShadowReadComputed" - | ShadowWriteComputed -> "ShadowWriteComputed" - | RestParameterNotIdentifierPattern -> "RestParameterNotIdentifierPattern" - | InterfaceTypeSpread -> "InterfaceTypeSpread" - | Flow_error.DebugThrow -> "DebugThrow" - | MergeTimeout _ -> "MergeTimeout" - | MergeJobException _ -> "MergeJobException" - | UnexpectedTypeapp _ -> "UnexpectedTypeapp" - in - let dump_upper_kind = function - | IncompatibleGetPropT _ -> "IncompatibleGetPropT" - | IncompatibleSetPropT _ -> "IncompatibleSetPropT" - | IncompatibleMatchPropT _ -> "IncompatibleSetPropT" - | IncompatibleGetPrivatePropT -> "IncompatibleGetPrivatePropT" - | IncompatibleSetPrivatePropT -> "IncompatibleSetPrivatePropT" - | IncompatibleMethodT _ -> "IncompatibleMethodT" - | IncompatibleCallT -> "IncompatibleCallT" - | IncompatibleConstructorT -> "IncompatibleConstructorT" - | IncompatibleGetElemT _ -> "IncompatibleGetElemT" - | IncompatibleSetElemT _ -> "IncompatibleSetElemT" - | IncompatibleCallElemT _ -> "IncompatibleCallElemT" - | IncompatibleElemTOfArrT -> "IncompatibleElemTOfArrT" - | IncompatibleObjAssignFromTSpread -> "IncompatibleObjAssignFromTSpread" - | IncompatibleObjAssignFromT -> "IncompatibleObjAssignFromT" - | IncompatibleObjRestT -> "IncompatibleObjRestT" - | IncompatibleObjSealT -> "IncompatibleObjSealT" - | IncompatibleArrRestT -> "IncompatibleArrRestT" - | IncompatibleSuperT -> "IncompatibleSuperT" - | IncompatibleMixinT -> "IncompatibleMixinT" - | IncompatibleSpecializeT -> "IncompatibleSpecializeT" - | IncompatibleThisSpecializeT -> "IncompatibleThisSpecializeT" - | IncompatibleVarianceCheckT -> "IncompatibleVarianceCheckT" - | IncompatibleGetKeysT -> "IncompatibleGetKeysT" - | IncompatibleHasOwnPropT _ -> "IncompatibleHasOwnPropT" - | IncompatibleGetValuesT -> "IncompatibleGetValuesT" - | IncompatibleUnaryMinusT -> "IncompatibleUnaryMinusT" - | IncompatibleMapTypeTObject -> "IncompatibleMapTypeTObject" - | IncompatibleTypeAppVarianceCheckT -> "IncompatibleTypeAppVarianceCheckT" - | IncompatibleGetStaticsT -> "IncompatibleGetStaticsT" - | IncompatibleUnclassified ctor -> spf "IncompatibleUnclassified %S" ctor - in - fun cx err -> - match err with - | EIncompatible { - lower = (reason_lower, _lower_kind); - upper = (reason_upper, upper_kind); - use_op; - branches = _; - } -> - spf "EIncompatible { lower = (%s, _); upper = (%s, %s); use_op = %s; branches = _ }" + if String_utils.string_starts_with filename root_str then + Files.relative_path root_str filename + else + filename + +let string_of_default = + Default.fold + ~expr:(fun (loc, _) -> spf "Expr %s" (string_of_loc loc)) + ~selector:(fun _ str sel -> spf "Selector (%s) (%s)" str (string_of_selector sel)) + ~cons:(fun str default -> spf "Cons (%s) (%s)" str default) + +let dump_error_message = + Error_message.( + let string_of_use_op = string_of_use_op_rec in + let dump_internal_error = function + | PackageHeapNotFound _ -> "PackageHeapNotFound" + | AbnormalControlFlow -> "AbnormalControlFlow" + | MethodNotAFunction -> "MethodNotAFunction" + | OptionalMethod -> "OptionalMethod" + | OpenPredWithoutSubst -> "OpenPredWithoutSubst" + | PredFunWithoutParamNames -> "PredFunWithoutParamNames" + | UnsupportedGuardPredicate _ -> "UnsupportedGuardPredicate" + | BreakEnvMissingForCase -> "BreakEnvMissingForCase" + | PropertyDescriptorPropertyCannotBeRead -> "PropertyDescriptorPropertyCannotBeRead" + | ForInLHS -> "ForInLHS" + | ForOfLHS -> "ForOfLHS" + | InstanceLookupComputed -> "InstanceLookupComputed" + | PropRefComputedOpen -> "PropRefComputedOpen" + | PropRefComputedLiteral -> "PropRefComputedLiteral" + | ShadowReadComputed -> "ShadowReadComputed" + | ShadowWriteComputed -> "ShadowWriteComputed" + | RestParameterNotIdentifierPattern -> "RestParameterNotIdentifierPattern" + | InterfaceTypeSpread -> "InterfaceTypeSpread" + | Error_message.DebugThrow -> "DebugThrow" + | MergeTimeout _ -> "MergeTimeout" + | MergeJobException _ -> "MergeJobException" + | CheckTimeout _ -> "CheckTimeout" + | CheckJobException _ -> "CheckJobException" + | UnexpectedTypeapp _ -> "UnexpectedTypeapp" + in + let dump_upper_kind = function + | IncompatibleGetPropT _ -> "IncompatibleGetPropT" + | IncompatibleSetPropT _ -> "IncompatibleSetPropT" + | IncompatibleMatchPropT _ -> "IncompatibleSetPropT" + | IncompatibleGetPrivatePropT -> "IncompatibleGetPrivatePropT" + | IncompatibleSetPrivatePropT -> "IncompatibleSetPrivatePropT" + | IncompatibleMethodT _ -> "IncompatibleMethodT" + | IncompatibleCallT -> "IncompatibleCallT" + | IncompatibleMixedCallT -> "IncompatibleMixedCallT" + | IncompatibleConstructorT -> "IncompatibleConstructorT" + | IncompatibleGetElemT _ -> "IncompatibleGetElemT" + | IncompatibleSetElemT _ -> "IncompatibleSetElemT" + | IncompatibleCallElemT _ -> "IncompatibleCallElemT" + | IncompatibleElemTOfArrT -> "IncompatibleElemTOfArrT" + | IncompatibleObjAssignFromTSpread -> "IncompatibleObjAssignFromTSpread" + | IncompatibleObjAssignFromT -> "IncompatibleObjAssignFromT" + | IncompatibleObjRestT -> "IncompatibleObjRestT" + | IncompatibleObjSealT -> "IncompatibleObjSealT" + | IncompatibleArrRestT -> "IncompatibleArrRestT" + | IncompatibleSuperT -> "IncompatibleSuperT" + | IncompatibleMixinT -> "IncompatibleMixinT" + | IncompatibleSpecializeT -> "IncompatibleSpecializeT" + | IncompatibleThisSpecializeT -> "IncompatibleThisSpecializeT" + | IncompatibleVarianceCheckT -> "IncompatibleVarianceCheckT" + | IncompatibleGetKeysT -> "IncompatibleGetKeysT" + | IncompatibleHasOwnPropT _ -> "IncompatibleHasOwnPropT" + | IncompatibleGetValuesT -> "IncompatibleGetValuesT" + | IncompatibleUnaryMinusT -> "IncompatibleUnaryMinusT" + | IncompatibleMapTypeTObject -> "IncompatibleMapTypeTObject" + | IncompatibleTypeAppVarianceCheckT -> "IncompatibleTypeAppVarianceCheckT" + | IncompatibleGetStaticsT -> "IncompatibleGetStaticsT" + | IncompatibleUnclassified ctor -> spf "IncompatibleUnclassified %S" ctor + in + fun cx err -> + match err with + | EIncompatible + { + lower = (reason_lower, _lower_kind); + upper = (reason_upper, upper_kind); + use_op; + branches = _; + } -> + spf + "EIncompatible { lower = (%s, _); upper = (%s, %s); use_op = %s; branches = _ }" (dump_reason cx reason_lower) (dump_reason cx reason_upper) (dump_upper_kind upper_kind) (match use_op with | None -> "None" | Some use_op -> spf "Some(%s)" (string_of_use_op use_op)) - | EIncompatibleDefs { use_op; reason_lower; reason_upper; branches = _ } -> - spf "EIncompatibleDefs { reason_lower = %s; reason_upper = %s; use_op = %s; branches = _ }" + | EIncompatibleDefs { use_op; reason_lower; reason_upper; branches = _ } -> + spf + "EIncompatibleDefs { reason_lower = %s; reason_upper = %s; use_op = %s; branches = _ }" (dump_reason cx reason_lower) (dump_reason cx reason_upper) (string_of_use_op use_op) - | EIncompatibleProp { reason_prop; reason_obj; special=_; prop=_; use_op=_ } -> - spf "EIncompatibleProp { reason_prop = %s; reason_obj = %s; special = _; prop = _; use_op = _ }" + | EIncompatibleProp { reason_prop; reason_obj; special = _; prop = _; use_op = _ } -> + spf + "EIncompatibleProp { reason_prop = %s; reason_obj = %s; special = _; prop = _; use_op = _ }" (dump_reason cx reason_prop) (dump_reason cx reason_obj) - | EDebugPrint (reason, _) -> - spf "EDebugPrint (%s, _)" (dump_reason cx reason) - | EImportValueAsType (reason, str) -> + | EDebugPrint (reason, _) -> spf "EDebugPrint (%s, _)" (dump_reason cx reason) + | EExportValueAsType (reason, str) -> + spf "EExportValueAsType (%s, %s)" (dump_reason cx reason) str + | EImportValueAsType (reason, str) -> spf "EImportValueAsType (%s, %s)" (dump_reason cx reason) str - | EImportTypeAsTypeof (reason, str) -> + | EImportTypeAsTypeof (reason, str) -> spf "EImportTypeAsTypeof (%s, %s)" (dump_reason cx reason) str - | EImportTypeAsValue (reason, str) -> + | EImportTypeAsValue (reason, str) -> spf "EImportTypeAsValue (%s, %s)" (dump_reason cx reason) str - | ERefineAsValue (reason, str) -> - spf "ERefineAsValue (%s, %s)" (dump_reason cx reason) str - | ENoDefaultExport (reason, module_name, _) -> + | ERefineAsValue (reason, str) -> spf "ERefineAsValue (%s, %s)" (dump_reason cx reason) str + | ENoDefaultExport (reason, module_name, _) -> spf "ENoDefaultExport (%s, %s)" (dump_reason cx reason) module_name - | EOnlyDefaultExport (reason, module_name, export_name) -> + | EOnlyDefaultExport (reason, module_name, export_name) -> spf "EOnlyDefaultExport (%s, %s, %s)" (dump_reason cx reason) module_name export_name - | ENoNamedExport (reason, module_name, export_name, _) -> + | ENoNamedExport (reason, module_name, export_name, _) -> spf "ENoNamedExport (%s, %s, %s)" (dump_reason cx reason) module_name export_name - | EMissingTypeArgs { reason_tapp; reason_arity; min_arity; max_arity } -> - spf "EMissingTypeArgs { reason_tapp=%s; reason_arity=%s; min_arity=%d; max_arity=%d }" + | EMissingTypeArgs { reason_tapp; reason_arity; min_arity; max_arity } -> + spf + "EMissingTypeArgs { reason_tapp=%s; reason_arity=%s; min_arity=%d; max_arity=%d }" (dump_reason cx reason_tapp) (dump_reason cx reason_arity) min_arity max_arity - | EValueUsedAsType (reason1, reason2) -> - spf "EValueUsedAsType (%s, %s)" - (dump_reason cx reason1) - (dump_reason cx reason2) - | EExpectedStringLit ((reason1, reason2), expected, literal, use_op) -> - let literal = match literal with - | Literal (_, str) -> spf "%S" str - | Truthy -> "truthy" - | AnyLiteral -> "any" - in - spf "EExpectedStringLit ((%s, %s), %S, %S, %s)" - (dump_reason cx reason1) - (dump_reason cx reason2) - expected - literal + | EValueUsedAsType { reason_use } -> + spf "EValueUsedAsType { use = %s }" (dump_reason cx reason_use) + | EExpectedStringLit { reason_lower; reason_upper; use_op } -> + spf + "EExpectedStringLit { reason_lower = %s; reason_upper = %s; use_op = %s }" + (dump_reason cx reason_lower) + (dump_reason cx reason_upper) (string_of_use_op use_op) - | EExpectedNumberLit ((reason1, reason2), (_, expected), literal, use_op) -> - let literal = match literal with - | Literal (_, (_, raw)) -> spf "%S" raw - | Truthy -> "truthy" - | AnyLiteral -> "any" - in - spf "EExpectedNumberLit ((%s, %s), %s, %s, %s)" - (dump_reason cx reason1) - (dump_reason cx reason2) - expected - literal + | EExpectedNumberLit { reason_lower; reason_upper; use_op } -> + spf + "EExpectedNumberLit { reason_lower = %s; reason_upper = %s; use_op = %s }" + (dump_reason cx reason_lower) + (dump_reason cx reason_upper) (string_of_use_op use_op) - | EExpectedBooleanLit ((reason1, reason2), expected, literal, use_op) -> - let literal = match literal with - | Some b -> spf "%b" b - | None -> "any" - in - spf "EExpectedBooleanLit ((%s, %s), %b, %s, %s)" - (dump_reason cx reason1) - (dump_reason cx reason2) - expected - literal + | EExpectedBooleanLit { reason_lower; reason_upper; use_op } -> + spf + "EExpectedBooleanLit { reason_lower = %s; reason_upper = %s; use_op = %s }" + (dump_reason cx reason_lower) + (dump_reason cx reason_upper) (string_of_use_op use_op) - | EPropNotFound (prop, (prop_reason, obj_reason), use_op) -> - spf "EPropNotFound (%s, %s, %s, %s)" - (match prop with Some prop -> spf "Some %s" prop | None -> "None") + | EPropNotFound (prop, (prop_reason, obj_reason), use_op) -> + spf + "EPropNotFound (%s, %s, %s, %s)" + (match prop with + | Some prop -> spf "Some %s" prop + | None -> "None") (dump_reason cx prop_reason) (dump_reason cx obj_reason) (string_of_use_op use_op) - | EPropAccess ((reason1, reason2), x, _, _, _) -> - spf "EPropAccess ((%s, %s), %s, _, _, _)" - (dump_reason cx reason1) - (dump_reason cx reason2) - (match x with Some x -> spf "%S" x | None -> "(computed)") - | EPropPolarityMismatch ((reason1, reason2), x, _, _) -> - spf "EPropPolarityMismatch ((%s, %s), %s, _, _)" + | EPropNotReadable { reason_prop; prop_name; use_op } -> + spf + "EPropNotReadable { reason_prop = %s; prop_name = %s; use_op = %s }" + (dump_reason cx reason_prop) + (match prop_name with + | Some x -> spf "%S" x + | None -> "(computed)") + (string_of_use_op use_op) + | EPropNotWritable { reason_prop; prop_name; use_op } -> + spf + "EPropNotWritable { reason_prop = %s; prop_name = %s; use_op = %s }" + (dump_reason cx reason_prop) + (match prop_name with + | Some x -> spf "%S" x + | None -> "(computed)") + (string_of_use_op use_op) + | EPropPolarityMismatch ((reason1, reason2), x, _, _) -> + spf + "EPropPolarityMismatch ((%s, %s), %s, _, _)" (dump_reason cx reason1) (dump_reason cx reason2) - (match x with Some x -> spf "%S" x | None -> "(computed)") - | EPolarityMismatch { reason; name; expected_polarity; actual_polarity } -> - spf "EPolarityMismatch { reason=%s; name=%S; expected_polarity=%s; actual_polarity=%s }" + (match x with + | Some x -> spf "%S" x + | None -> "(computed)") + | EPolarityMismatch { reason; name; expected_polarity; actual_polarity } -> + spf + "EPolarityMismatch { reason=%s; name=%S; expected_polarity=%s; actual_polarity=%s }" (dump_reason cx reason) name (Polarity.string expected_polarity) (Polarity.string actual_polarity) - | EStrictLookupFailed ((reason1, reason2), reason, x, use_op) -> - spf "EStrictLookupFailed ((%s, %s), %s, %s, %s)" + | EStrictLookupFailed ((reason1, reason2), reason, x, use_op) -> + spf + "EStrictLookupFailed ((%s, %s), %s, %s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) (dump_reason cx reason) - (match x with Some x -> spf "Some(%S)" x | None -> "None") + (match x with + | Some x -> spf "Some(%S)" x + | None -> "None") (match use_op with | Some use_op -> spf "Some(%s)" (string_of_use_op use_op) | None -> "None") - | EPrivateLookupFailed ((reason1, reason2), x, use_op) -> - spf "EPrivateLookupFailed ((%s, %s), %s, %s)" + | EPrivateLookupFailed ((reason1, reason2), x, use_op) -> + spf + "EPrivateLookupFailed ((%s, %s), %s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) x (string_of_use_op use_op) - | EAdditionMixed (reason, use_op) -> - spf "EAdditionMixed (%s, %s)" - (dump_reason cx reason) - (string_of_use_op use_op) - | EComparison (reason1, reason2) -> - spf "EComparison (%s, %s)" - (dump_reason cx reason1) - (dump_reason cx reason2) - | ETupleArityMismatch ((reason1, reason2), arity1, arity2, use_op) -> - spf "ETupleArityMismatch (%s, %s, %d, %d, %s)" + | EAdditionMixed (reason, use_op) -> + spf "EAdditionMixed (%s, %s)" (dump_reason cx reason) (string_of_use_op use_op) + | EComparison (reason1, reason2) -> + spf "EComparison (%s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) + | ETupleArityMismatch ((reason1, reason2), arity1, arity2, use_op) -> + spf + "ETupleArityMismatch (%s, %s, %d, %d, %s)" (dump_reason cx reason1) (dump_reason cx reason2) - arity1 arity2 + arity1 + arity2 (string_of_use_op use_op) - | ENonLitArrayToTuple ((reason1, reason2), use_op) -> - spf "ENonLitArrayToTuple ((%s, %s), %s)" + | ENonLitArrayToTuple ((reason1, reason2), use_op) -> + spf + "ENonLitArrayToTuple ((%s, %s), %s)" (dump_reason cx reason1) (dump_reason cx reason2) (string_of_use_op use_op) - | ETupleOutOfBounds ((reason1, reason2), arity1, arity2, use_op) -> - spf "ETupleOutOfBounds (%s, %s, %d, %d, %s)" - (dump_reason cx reason1) - (dump_reason cx reason2) - arity1 arity2 + | ETupleOutOfBounds { use_op; reason; reason_op; length; index } -> + spf + "ETupleOutOfBounds { use_op = %s; reason = %s; reason_op = %s; length = %d; index = %s }" + (string_of_use_op use_op) + (dump_reason cx reason) + (dump_reason cx reason_op) + length + index + | ETupleNonIntegerIndex { use_op; reason; index } -> + spf + "ETupleNonIntegerIndex { use_op = %s; reason = %s; index = %s }" + (string_of_use_op use_op) + (dump_reason cx reason) + index + | ETupleUnsafeWrite { reason; use_op } -> + spf + "ETupleUnsafeWrite { reason = %s; use_op = %s }" + (dump_reason cx reason) (string_of_use_op use_op) - | ETupleUnsafeWrite ((reason1, reason2), use_op) -> - spf "ETupleUnsafeWrite (%s, %s, %s)" + | EROArrayWrite ((reason1, reason2), use_op) -> + spf + "EROArrayWrite (%s, %s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) (string_of_use_op use_op) - | EUnionSpeculationFailed { use_op; reason; reason_op; branches = _ } -> - spf "EUnionSpeculationFailed { use_op = %s; reason = %s; reason_op = %s; branches = _ }" + | EUnionSpeculationFailed { use_op; reason; reason_op; branches = _ } -> + spf + "EUnionSpeculationFailed { use_op = %s; reason = %s; reason_op = %s; branches = _ }" (string_of_use_op use_op) (dump_reason cx reason) (dump_reason cx reason_op) - | ESpeculationAmbiguous ((reason1, reason2), _, _, _) -> - spf "ESpeculationAmbiguous ((%s, %s), _, _, _)" - (dump_reason cx reason1) - (dump_reason cx reason2) - | EIncompatibleWithExact ((reason1, reason2), use_op) -> - spf "EIncompatibleWithExact ((%s, %s), %s)" + | ESpeculationAmbiguous { reason; _ } -> + spf "ESpeculationAmbiguous { reason = %s; _ }" (dump_reason cx reason) + | EIncompatibleWithExact ((reason1, reason2), use_op) -> + spf + "EIncompatibleWithExact ((%s, %s), %s)" (dump_reason cx reason1) (dump_reason cx reason2) (string_of_use_op use_op) - | EUnsupportedExact (reason1, reason2) -> - spf "EUnsupportedExact (%s, %s)" - (dump_reason cx reason1) - (dump_reason cx reason2) - | EIdxArity reason -> - spf "EIdxArity (%s)" (dump_reason cx reason) - | EIdxUse1 reason -> - spf "EIdxUse1 (%s)" (dump_reason cx reason) - | EIdxUse2 reason -> - spf "EIdxUse2 (%s)" (dump_reason cx reason) - | EUnexpectedThisType loc -> - spf "EUnexpectedThisType (%s)" (string_of_loc loc) - | ETypeParamArity (loc, expected) -> - spf "ETypeParamArity (%s, %d)" (string_of_loc loc) expected - | ETypeParamMinArity (loc, expected) -> - spf "ETypeParamMinArity (%s, %d)" (string_of_loc loc) expected - | ECallTypeArity { call_loc; is_new; reason_arity; expected_arity } -> - spf "ECallTypeArity { call_loc=%s; is_new=%b; reason_arity=%s; expected_arity=%d; }" - (string_of_loc call_loc) is_new (dump_reason cx reason_arity) expected_arity - | ETooManyTypeArgs (reason_tapp, reason_arity, maximum_arity) -> - spf "ETooManyTypeArgs (%s, %s, %d)" + | EUnsupportedExact (reason1, reason2) -> + spf "EUnsupportedExact (%s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) + | EIdxArity reason -> spf "EIdxArity (%s)" (dump_reason cx reason) + | EIdxUse1 reason -> spf "EIdxUse1 (%s)" (dump_reason cx reason) + | EIdxUse2 reason -> spf "EIdxUse2 (%s)" (dump_reason cx reason) + | EUnexpectedThisType loc -> spf "EUnexpectedThisType (%s)" (string_of_aloc loc) + | ETypeParamArity (loc, expected) -> + spf "ETypeParamArity (%s, %d)" (string_of_aloc loc) expected + | ETypeParamMinArity (loc, expected) -> + spf "ETypeParamMinArity (%s, %d)" (string_of_aloc loc) expected + | ECallTypeArity { call_loc; is_new; reason_arity; expected_arity } -> + spf + "ECallTypeArity { call_loc=%s; is_new=%b; reason_arity=%s; expected_arity=%d; }" + (string_of_aloc call_loc) + is_new + (dump_reason cx reason_arity) + expected_arity + | ETooManyTypeArgs (reason_tapp, reason_arity, maximum_arity) -> + spf + "ETooManyTypeArgs (%s, %s, %d)" (dump_reason cx reason_tapp) (dump_reason cx reason_arity) maximum_arity - | ETooFewTypeArgs (reason_tapp, reason_arity, minimum_arity) -> - spf "ETooFewTypeArgs (%s, %s, %d)" + | ETooFewTypeArgs (reason_tapp, reason_arity, minimum_arity) -> + spf + "ETooFewTypeArgs (%s, %s, %d)" (dump_reason cx reason_tapp) (dump_reason cx reason_arity) minimum_arity - | EInvalidTypeArgs (reason_tapp, reason_arity) -> - spf "EInvalidTypeArgs (%s, %s)" - (dump_reason cx reason_tapp) - (dump_reason cx reason_arity) - | EPropertyTypeAnnot loc -> - spf "EPropertyTypeAnnot (%s)" (string_of_loc loc) - | EExportsAnnot loc -> - spf "EExportsAnnot (%s)" (string_of_loc loc) - | ECharSetAnnot loc -> - spf "ECharSetAnnot (%s)" (string_of_loc loc) - | EInvalidCharSet { invalid = (reason, _); valid; use_op } -> - spf "EInvalidCharSet { invalid = (%s, _); valid = %s; use_op = %s }" + | EInvalidTypeArgs (reason_tapp, reason_arity) -> + spf "EInvalidTypeArgs (%s, %s)" (dump_reason cx reason_tapp) (dump_reason cx reason_arity) + | EPropertyTypeAnnot loc -> spf "EPropertyTypeAnnot (%s)" (string_of_aloc loc) + | EExportsAnnot loc -> spf "EExportsAnnot (%s)" (string_of_aloc loc) + | ECharSetAnnot loc -> spf "ECharSetAnnot (%s)" (string_of_aloc loc) + | EInvalidCharSet { invalid = (reason, _); valid; use_op } -> + spf + "EInvalidCharSet { invalid = (%s, _); valid = %s; use_op = %s }" (dump_reason cx reason) (dump_reason cx valid) (string_of_use_op use_op) - | EUnsupportedKeyInObjectType loc -> - spf "EUnsupportedKeyInObjectType (%s)" (string_of_loc loc) - | EPredAnnot loc -> - spf "EPredAnnot (%s)" (string_of_loc loc) - | ERefineAnnot loc -> - spf "ERefineAnnot (%s)" (string_of_loc loc) - | EUnexpectedTypeof loc -> - spf "EUnexpectedTypeof (%s)" (string_of_loc loc) - | EFunPredCustom ((reason1, reason2), msg) -> - spf "EFunPredCustom (%s, %s, %S)" - (dump_reason cx reason1) - (dump_reason cx reason2) - msg - | EFunctionIncompatibleWithShape (lower, upper, use_op) -> - spf "EFunctionIncompatibleWithShape (%s, %s, %s)" + | EUnsupportedKeyInObjectType loc -> + spf "EUnsupportedKeyInObjectType (%s)" (string_of_aloc loc) + | EPredAnnot loc -> spf "EPredAnnot (%s)" (string_of_aloc loc) + | ERefineAnnot loc -> spf "ERefineAnnot (%s)" (string_of_aloc loc) + | ETrustedAnnot loc -> spf "ETrustedAnnot (%s)" (string_of_aloc loc) + | EPrivateAnnot loc -> spf "EPrivateAnnot (%s)" (string_of_aloc loc) + | EUnexpectedTypeof loc -> spf "EUnexpectedTypeof (%s)" (string_of_aloc loc) + | EFunPredCustom ((reason1, reason2), msg) -> + spf "EFunPredCustom (%s, %s, %S)" (dump_reason cx reason1) (dump_reason cx reason2) msg + | EIncompatibleWithShape (lower, upper, use_op) -> + spf + "EIncompatibleWithShape (%s, %s, %s)" (dump_reason cx lower) (dump_reason cx upper) (string_of_use_op use_op) - | EInternal (loc, err) -> - spf "EInternal (%s, %s)" (string_of_loc loc) (dump_internal_error err) - | EUnsupportedSyntax (loc, _) -> - spf "EUnsupportedSyntax (%s, _)" (string_of_loc loc) - | EUseArrayLiteral loc -> - spf "EUseArrayLiteral (%s)" (string_of_loc loc) - | EMissingAnnotation (reason, _) -> - spf "EMissingAnnotation (%s)" (dump_reason cx reason) - | EBindingError (_binding_error, loc, x, entry) -> - spf "EBindingError (_, %s, %s, %s)" - (string_of_loc loc) + | EInternal (loc, err) -> + spf "EInternal (%s, %s)" (string_of_aloc loc) (dump_internal_error err) + | EUnsupportedSyntax (loc, _) -> spf "EUnsupportedSyntax (%s, _)" (string_of_aloc loc) + | EUseArrayLiteral loc -> spf "EUseArrayLiteral (%s)" (string_of_aloc loc) + | EMissingAnnotation (reason, _) -> spf "EMissingAnnotation (%s)" (dump_reason cx reason) + | EBindingError (_binding_error, loc, x, entry) -> + spf + "EBindingError (_, %s, %s, %s)" + (string_of_aloc loc) x (Scope.Entry.string_of_kind entry) - | ERecursionLimit (reason1, reason2) -> - spf "ERecursionLimit (%s, %s)" - (dump_reason cx reason1) - (dump_reason cx reason2) - | EModuleOutsideRoot (loc, name) -> - spf "EModuleOutsideRoot (%s, %S)" (string_of_loc loc) name - | EExperimentalDecorators loc -> - spf "EExperimentalDecorators (%s)" (string_of_loc loc) - | EExperimentalClassProperties (loc, static) -> - spf "EExperimentalClassProperties (%s, %b)" (string_of_loc loc) static - | EUnsafeGetSet loc -> - spf "EUnsafeGetSet (%s)" (string_of_loc loc) - | EExperimentalExportStarAs loc -> - spf "EExperimentalExportStarAs (%s)" (string_of_loc loc) - | EIndeterminateModuleType loc -> - spf "EIndeterminateModuleType (%s)" (string_of_loc loc) - | EBadExportPosition loc -> - spf "EBadExportPosition (%s)" (string_of_loc loc) - | EBadExportContext (name, loc) -> - spf "EBadExportContext (%s, %s)" name (string_of_loc loc) - | EUnreachable loc -> - spf "EUnreachable (%s)" (string_of_loc loc) - | EInvalidObjectKit { reason; reason_op; use_op; _ } -> - spf "EInvalidObjectKit { reason = %s; reason_op = %s; use_op = %s }" + | ERecursionLimit (reason1, reason2) -> + spf "ERecursionLimit (%s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) + | EModuleOutsideRoot (loc, name) -> + spf "EModuleOutsideRoot (%s, %S)" (string_of_aloc loc) name + | EMalformedPackageJson (loc, error) -> + spf "EMalformedPackageJson (%s, %S)" (string_of_aloc loc) error + | EExperimentalDecorators loc -> spf "EExperimentalDecorators (%s)" (string_of_aloc loc) + | EExperimentalClassProperties (loc, static) -> + spf "EExperimentalClassProperties (%s, %b)" (string_of_aloc loc) static + | EUnsafeGetSet loc -> spf "EUnsafeGetSet (%s)" (string_of_aloc loc) + | EUninitializedInstanceProperty (loc, err) -> + spf + "EUninitializedInstanceProperty (%s, %s)" + (string_of_aloc loc) + Lints.( + match err with + | PropertyNotDefinitelyInitialized -> "PropertyNotDefinitelyInitialized" + | ReadFromUninitializedProperty -> "ReadFromUninitializedProperty" + | MethodCallBeforeEverythingInitialized -> "MethodCallBeforeEverythingInitialized" + | PropertyFunctionCallBeforeEverythingInitialized -> + "PropertyFunctionCallBeforeEverythingInitialized" + | ThisBeforeEverythingInitialized -> "ThisBeforeEverythingInitialized") + | EExperimentalExportStarAs loc -> spf "EExperimentalExportStarAs (%s)" (string_of_aloc loc) + | EExperimentalEnums loc -> spf "EExperimentalEnums (%s)" (string_of_aloc loc) + | EIndeterminateModuleType loc -> spf "EIndeterminateModuleType (%s)" (string_of_aloc loc) + | EBadExportPosition loc -> spf "EBadExportPosition (%s)" (string_of_aloc loc) + | EBadExportContext (name, loc) -> spf "EBadExportContext (%s, %s)" name (string_of_aloc loc) + | EUnreachable loc -> spf "EUnreachable (%s)" (string_of_aloc loc) + | EInvalidObjectKit { reason; reason_op; use_op } -> + spf + "EInvalidObjectKit { reason = %s; reason_op = %s; use_op = %s }" (dump_reason cx reason) (dump_reason cx reason_op) (string_of_use_op use_op) - | EInvalidTypeof (loc, name) -> - spf "EInvalidTypeof (%s, %S)" (string_of_loc loc) name - | EBinaryInLHS reason -> - spf "EBinaryInLHS (%s)" (dump_reason cx reason) - | EBinaryInRHS reason -> - spf "EBinaryInRHS (%s)" (dump_reason cx reason) - | EArithmeticOperand reason -> - spf "EArithmeticOperand (%s)" (dump_reason cx reason) - | EForInRHS reason -> - spf "EForInRHS (%s)" (dump_reason cx reason) - | EObjectComputedPropertyAccess (reason1, reason2) -> - spf "EObjectComputedPropertyAccess (%s, %s)" + | EInvalidTypeof (loc, name) -> spf "EInvalidTypeof (%s, %S)" (string_of_aloc loc) name + | EBinaryInLHS reason -> spf "EBinaryInLHS (%s)" (dump_reason cx reason) + | EBinaryInRHS reason -> spf "EBinaryInRHS (%s)" (dump_reason cx reason) + | EArithmeticOperand reason -> spf "EArithmeticOperand (%s)" (dump_reason cx reason) + | EForInRHS reason -> spf "EForInRHS (%s)" (dump_reason cx reason) + | EObjectComputedPropertyAccess (reason1, reason2) -> + spf + "EObjectComputedPropertyAccess (%s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) - | EObjectComputedPropertyAssign (reason1, reason2) -> - spf "EObjectComputedPropertyAssign (%s, %s)" + | EObjectComputedPropertyAssign (reason1, reason2) -> + spf + "EObjectComputedPropertyAssign (%s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) - | EInvalidLHSInAssignment loc -> - spf "EInvalidLHSInAssignment (%s)" (string_of_loc loc) - | EIncompatibleWithUseOp (reason1, reason2, use_op) -> - spf "EIncompatibleWithUseOp (%s, %s, %s)" + | EInvalidLHSInAssignment loc -> spf "EInvalidLHSInAssignment (%s)" (string_of_aloc loc) + | EIncompatibleWithUseOp (reason1, reason2, use_op) -> + spf + "EIncompatibleWithUseOp (%s, %s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) (string_of_use_op use_op) - | EUnsupportedImplements reason -> - spf "EUnsupportedImplements (%s)" (dump_reason cx reason) - | EReactKit ((reason1, reason2), _, use_op) -> - spf "EReactKit (%s, %s, _, %s)" + | ETrustIncompatibleWithUseOp (reason1, reason2, use_op) -> + spf + "ETrustIncompatibleWithUseOp (%s, %s, %s)" (dump_reason cx reason1) (dump_reason cx reason2) (string_of_use_op use_op) - | EReactElementFunArity (reason, _, _) -> + | EUnsupportedImplements reason -> spf "EUnsupportedImplements (%s)" (dump_reason cx reason) + | ENotAReactComponent { reason; use_op } -> + spf + "ENotAReactComponent { reason = %s; use_op = %s }" + (dump_reason cx reason) + (string_of_use_op use_op) + | EInvalidReactConfigType { reason; use_op } -> + spf + "EInvalidReactConfigType { reason = %s; use_op = %s }" + (dump_reason cx reason) + (string_of_use_op use_op) + | EInvalidReactPropType { reason; use_op; tool = _ } -> + spf + "EInvalidReactPropType { reason = %s; use_op = %s; _ }" + (dump_reason cx reason) + (string_of_use_op use_op) + | EInvalidReactCreateClass { reason; use_op; tool = _ } -> + spf + "EInvalidReactCreateClass { reason = %s; use_op = %s; _ }" + (dump_reason cx reason) + (string_of_use_op use_op) + | EReactElementFunArity (reason, _, _) -> spf "EReactElementFunArity (%s)" (dump_reason cx reason) - | EFunctionCallExtraArg (unused_reason, def_reason, param_count, use_op) -> - spf "EFunctionCallExtraArg (%s, %s, %d, %s)" - (dump_reason cx unused_reason) + | EFunctionCallExtraArg (unused_reason, def_reason, param_count, use_op) -> + spf + "EFunctionCallExtraArg (%s, %s, %d, %s)" + (dump_reason cx unused_reason) (dump_reason cx def_reason) param_count (string_of_use_op use_op) - | EUnsupportedSetProto reason -> - spf "EUnsupportedSetProto (%s)" (dump_reason cx reason) - | EDuplicateModuleProvider { module_name; provider; conflict } -> - spf "EDuplicateModuleProvider (%S, %s, %s)" + | EUnsupportedSetProto reason -> spf "EUnsupportedSetProto (%s)" (dump_reason cx reason) + | EDuplicateModuleProvider { module_name; provider; conflict } -> + spf + "EDuplicateModuleProvider (%S, %s, %s)" module_name (File_key.to_string provider) (File_key.to_string conflict) - | EParseError (loc, _parse_error) -> - spf "EParseError (%s, _)" - (string_of_loc loc) - (* TODO: string of parse error constructor *) - | EDocblockError (loc, err) -> - spf "EDocblockError (%s, %s)" - (string_of_loc loc) - (match err with - | MultipleFlowAttributes -> "MultipleFlowAttributes" - | MultipleProvidesModuleAttributes -> "MultipleProvidesModuleAttributes" - | MultipleJSXAttributes -> "MultipleJSXAttributes" - | InvalidJSXAttribute _ -> "InvalidJSXAttribute") - | EUntypedTypeImport (loc, module_name) -> - spf "EUntypedTypeImport (%s, %s)" (string_of_loc loc) module_name - | EUntypedImport (loc, module_name) -> - spf "EUntypedImport (%s, %s)" (string_of_loc loc) module_name - | ENonstrictImport loc -> - spf "ENonstrictImport (%s)" (string_of_loc loc) - | EUnclearType loc -> - spf "EUnclearType (%s)" (string_of_loc loc) - | EDeprecatedType loc -> - spf "EDeprecatedType (%s)" (string_of_loc loc) - | EUnsafeGettersSetters loc -> - spf "EUnclearGettersSetters (%s)" (string_of_loc loc) - | EDeprecatedCallSyntax loc -> - spf "EDeprecatedCallSyntax (%s)" (string_of_loc loc) - | EUnusedSuppression loc -> - spf "EUnusedSuppression (%s)" (string_of_loc loc) - | ELintSetting (loc, kind) -> - let open LintSettings in - let kind_str = match kind with - | Invalid_setting -> "Invalid_setting" - | Malformed_argument -> "Malformed_argument" - | Naked_comment -> "Naked_comment" - | Nonexistent_rule -> "Nonexistent_rule" - | Overwritten_argument -> "Overwritten_argument" - | Redundant_argument -> "Redundant_argument" - in - spf "ELintSetting (%s, %s)" (string_of_loc loc) kind_str - | ESketchyNullLint { kind; loc; null_loc; falsy_loc } -> - let open Lints in - let kind_str = match kind with - | SketchyNullBool -> "SketchyNullBool" - | SketchyNullString -> "SketchyNullString" - | SketchyNullNumber -> "SketchyNullNumber" - | SketchyNullMixed -> "SketchyNullMixed" - in - spf "ESketchyNullLint {kind=%s; loc=%s; null_loc=%s; falsy_loc=%s}" - kind_str - (string_of_loc loc) - (string_of_loc null_loc) - (string_of_loc falsy_loc) - | ESketchyNumberLint (kind, reason) -> - let open Lints in - let kind_str = match kind with - | SketchyNumberAnd -> "SketchyNumberAnd" + | EParseError (loc, _parse_error) -> spf "EParseError (%s, _)" (string_of_aloc loc) + (* TODO: string of parse error constructor *) + | EDocblockError (loc, err) -> + spf + "EDocblockError (%s, %s)" + (string_of_aloc loc) + (match err with + | MultipleFlowAttributes -> "MultipleFlowAttributes" + | MultipleProvidesModuleAttributes -> "MultipleProvidesModuleAttributes" + | MultipleJSXAttributes -> "MultipleJSXAttributes" + | InvalidJSXAttribute _ -> "InvalidJSXAttribute") + | EImplicitInexactObject loc -> spf "EImplicitInexactObject (%s)" (string_of_aloc loc) + | EUntypedTypeImport (loc, module_name) -> + spf "EUntypedTypeImport (%s, %s)" (string_of_aloc loc) module_name + | EUntypedImport (loc, module_name) -> + spf "EUntypedImport (%s, %s)" (string_of_aloc loc) module_name + | ENonstrictImport loc -> spf "ENonstrictImport (%s)" (string_of_aloc loc) + | EUnclearType loc -> spf "EUnclearType (%s)" (string_of_aloc loc) + | EDeprecatedUtility (loc, name) -> + spf "EDeprecatedUtility (%s, %s)" (string_of_aloc loc) name + | EDynamicExport (reason, reason') -> + spf "EDynamicExport (%s, %s)" (dump_reason cx reason) (dump_reason cx reason') + | EDeprecatedType loc -> spf "EDeprecatedType (%s)" (string_of_aloc loc) + | EUnsafeGettersSetters loc -> spf "EUnclearGettersSetters (%s)" (string_of_aloc loc) + | EUnusedSuppression loc -> spf "EUnusedSuppression (%s)" (string_of_aloc loc) + | ELintSetting (loc, kind) -> + LintSettings.( + let kind_str = + match kind with + | Invalid_setting -> "Invalid_setting" + | Malformed_argument -> "Malformed_argument" + | Naked_comment -> "Naked_comment" + | Nonexistent_rule -> "Nonexistent_rule" + | Overwritten_argument -> "Overwritten_argument" + | Redundant_argument -> "Redundant_argument" + in + spf "ELintSetting (%s, %s)" (string_of_loc loc) kind_str) + | ESketchyNullLint { kind; loc; null_loc; falsy_loc } -> + Lints.( + let kind_str = + match kind with + | SketchyNullBool -> "SketchyNullBool" + | SketchyNullString -> "SketchyNullString" + | SketchyNullNumber -> "SketchyNullNumber" + | SketchyNullMixed -> "SketchyNullMixed" + in + spf + "ESketchyNullLint {kind=%s; loc=%s; null_loc=%s; falsy_loc=%s}" + kind_str + (string_of_aloc loc) + (string_of_aloc null_loc) + (string_of_aloc falsy_loc)) + | ESketchyNumberLint (kind, reason) -> + Lints.( + let kind_str = + match kind with + | SketchyNumberAnd -> "SketchyNumberAnd" + in + spf "ESketchyNumberLint (%s) (%s)" kind_str (dump_reason cx reason)) + | EInvalidPrototype reason -> spf "EInvalidPrototype (%s)" (dump_reason cx reason) + | EExperimentalOptionalChaining loc -> + spf "EExperimentalOptionalChaining (%s)" (string_of_aloc loc) + | EOptionalChainingMethods loc -> spf "EOptionalChainingMethods (%s)" (string_of_aloc loc) + | EUnnecessaryOptionalChain (loc, _) -> + spf "EUnnecessaryOptionalChain (%s)" (string_of_aloc loc) + | EUnnecessaryInvariant (loc, _) -> spf "EUnnecessaryInvariant (%s)" (string_of_aloc loc) + | EInexactSpread (reason, reason_op) -> + spf "EInexactSpread (%s, %s)" (dump_reason cx reason) (dump_reason cx reason_op) + | EUnexpectedTemporaryBaseType loc -> + spf "EUnexpectedTemporaryBaseType (%s)" (string_of_aloc loc) + | ECannotDelete (l1, r1) -> + spf "ECannotDelete (%s, %s)" (string_of_aloc l1) (dump_reason cx r1) + | ESignatureVerification sve -> + spf + "ESignatureVerification (%s)" + (Signature_builder_deps.With_ALoc.Error.debug_to_string sve) + | EBigIntNotYetSupported reason -> spf "EBigIntNotYetSupported (%s)" (dump_reason cx reason) + | ENonArraySpread reason -> spf "ENonArraySpread (%s)" (dump_reason cx reason) + | ECannotSpreadInterface { spread_reason; interface_reason } -> + spf + "ECannotSpreadInterface (%s) (%s)" + (dump_reason cx spread_reason) + (dump_reason cx interface_reason) + | ECannotSpreadIndexerOnRight { spread_reason; object_reason; key_reason } -> + spf + "ECannotSpreadIndexerOnRight (%s) (%s) (%s)" + (dump_reason cx spread_reason) + (dump_reason cx object_reason) + (dump_reason cx key_reason) + | EUnableToSpread { spread_reason; object1_reason; object2_reason; propname; error_kind = _ } + -> + spf + "EUnableToSpread (%s) (%s) (%s) (%s)" + (dump_reason cx spread_reason) + (dump_reason cx object1_reason) + (dump_reason cx object2_reason) + propname + | EInexactMayOverwriteIndexer { spread_reason; key_reason; value_reason; object2_reason } -> + spf + "EInexactMayOverwriteIndexer (%s) (%s) (%s) (%s)" + (dump_reason cx spread_reason) + (dump_reason cx key_reason) + (dump_reason cx value_reason) + (dump_reason cx object2_reason)) + +module Verbose = struct + let print_if_verbose_lazy cx trace ?(delim = "") ?(indent = 0) (lines : string Lazy.t list) = + match Context.verbose cx with + | Some { Verbose.indent = num_spaces; _ } -> + let indent = indent + Trace.trace_depth trace - 1 in + let prefix = String.make (indent * num_spaces) ' ' in + let pid = Context.pid_prefix cx in + let add_prefix line = spf "\n%s%s%s" prefix pid (Lazy.force line) in + let lines = Core_list.map ~f:add_prefix lines in + prerr_endline (String.concat delim lines) + | None -> () + + let print_if_verbose cx trace ?(delim = "") ?(indent = 0) (lines : string list) = + match Context.verbose cx with + | Some _ -> + let lines = Core_list.map ~f:(fun line -> lazy line) lines in + print_if_verbose_lazy cx trace ~delim ~indent lines + | None -> () + + let print_types_if_verbose cx trace ?(note : string option) ((l : Type.t), (u : Type.use_t)) = + match Context.verbose cx with + | Some { Verbose.depth; _ } -> + let delim = + match note with + | Some x -> spf " ~> %s" x + | None -> " ~>" in - spf "ESketchyNumberLint (%s) (%s)" kind_str (dump_reason cx reason) - | EInvalidPrototype reason -> - spf "EInvalidPrototype (%s)" (dump_reason cx reason) - | EExperimentalOptionalChaining loc -> - spf "EExperimentalOptionalChaining (%s)" (string_of_loc loc) - | EOptionalChainingMethods loc -> - spf "EOptionalChainingMethods (%s)" (string_of_loc loc) - | EUnnecessaryOptionalChain (loc, _) -> - spf "EUnnecessaryOptionalChain (%s)" (string_of_loc loc) - | EUnnecessaryInvariant (loc, _) -> - spf "EUnnecessaryInvariant (%s)" (string_of_loc loc) - | EInexactSpread (reason, reason_op) -> - spf "EInexactSpread (%s, %s)" - (dump_reason cx reason) - (dump_reason cx reason_op) - | ESignatureVerification sve -> - spf "ESignatureVerification (%s)" (Signature_builder_deps.Error.to_string sve) + print_if_verbose cx trace ~delim [dump_t ~depth cx l; dump_use_t ~depth cx u] + | None -> () +end diff --git a/src/typing/debug_js.mli b/src/typing/debug_js.mli index 1c4815ce77e..19774fb881a 100644 --- a/src/typing/debug_js.mli +++ b/src/typing/debug_js.mli @@ -1,40 +1,72 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val json_of_t: ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Type.t -> Hh_json.json -val jstr_of_t: ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Type.t -> string -val json_of_use_t: ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Type.use_t -> Hh_json.json -val jstr_of_use_t: ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Type.use_t -> string -val json_of_graph: ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Hh_json.json -val jstr_of_graph: ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> string -val json_of_scope: ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Scope.t -> Hh_json.json -val json_of_env: ?size:int -> ?depth:int -> Context.t -> Scope.t list -> Hh_json.json - -val string_of_scope_entry: Context.t -> Scope.Entry.t -> string -val string_of_scope_entries: - Context.t -> - Scope.Entry.t SMap.t -> - string -val string_of_scope_refi: Context.t -> Scope.refi_binding -> string -val string_of_scope_refis: - Context.t -> - Scope.refi_binding Key_map.t -> - string -val string_of_scope: Context.t -> Scope.t -> string - -val string_of_reason: Context.t -> Reason.t -> string -val string_of_file: Context.t -> string -val string_of_selector: Type.TypeTerm.selector -> string -val string_of_destructor: Type.TypeTerm.destructor -> string -val string_of_default: (Loc.t, Loc.t) Flow_ast.Expression.t Default.t -> string - -val dump_t: ?depth:int -> Context.t -> Type.t -> string -val dump_use_t: ?depth:int -> Context.t -> Type.use_t -> string -val dump_tvar: ?depth:int -> Context.t -> Constraint.ident -> string -val dump_prop: ?depth:int -> Context.t -> Type.Property.t -> string -val dump_reason: Context.t -> Reason.t -> string -val dump_flow_error: Context.t -> Flow_error.error_message -> string +val json_of_t : + ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Type.t -> Hh_json.json + +val jstr_of_t : + ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Type.t -> string + +val json_of_use_t : + ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Type.use_t -> Hh_json.json + +val jstr_of_use_t : + ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Type.use_t -> string + +val json_of_graph : + ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Hh_json.json + +val jstr_of_graph : ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> string + +val json_of_scope : + ?size:int -> ?depth:int -> ?strip_root:Path.t option -> Context.t -> Scope.t -> Hh_json.json + +val json_of_env : ?size:int -> ?depth:int -> Context.t -> Scope.t list -> Hh_json.json + +val string_of_scope_entry : Context.t -> Scope.Entry.t -> string + +val string_of_scope_entries : Context.t -> Scope.Entry.t SMap.t -> string + +val string_of_scope_refi : Context.t -> Scope.refi_binding -> string + +val string_of_scope_refis : Context.t -> Scope.refi_binding Key_map.t -> string + +val string_of_scope : Context.t -> Scope.t -> string + +val string_of_reason : Context.t -> Reason.t -> string + +val string_of_file : Context.t -> string + +val string_of_selector : Type.TypeTerm.selector -> string + +val string_of_destructor : Type.TypeTerm.destructor -> string + +val string_of_default : (Loc.t, Loc.t) Flow_ast.Expression.t Default.t -> string + +val dump_t : ?depth:int -> Context.t -> Type.t -> string + +val dump_use_t : ?depth:int -> Context.t -> Type.use_t -> string + +val dump_tvar : ?depth:int -> Context.t -> Constraint.ident -> string + +val dump_prop : ?depth:int -> Context.t -> Type.Property.t -> string + +val dump_reason : Context.t -> Reason.t -> string + +val dump_error_message : Context.t -> Error_message.t -> string + +val dump_flow : ?depth:int -> Context.t -> Type.t * Type.use_t -> string + +module Verbose : sig + val print_if_verbose_lazy : + Context.t -> Trace.t -> ?delim:string -> ?indent:int -> string Lazy.t list -> unit + + val print_if_verbose : + Context.t -> Trace.t -> ?delim:string -> ?indent:int -> string list -> unit + + val print_types_if_verbose : Context.t -> Trace.t -> ?note:string -> Type.t * Type.use_t -> unit +end diff --git a/src/typing/default.ml b/src/typing/default.ml index 1b40004b323..32eb81caee8 100644 --- a/src/typing/default.ml +++ b/src/typing/default.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -38,15 +38,21 @@ type 'a t = | Cons of 'a * 'a t | Selector of reason * 'a t * TypeTerm.selector -let expr ?default e = match default with -| Some default -> Cons (e, default) -| None -> Expr e +let expr ?default e = + match default with + | Some default -> Cons (e, default) + | None -> Expr e let elem key reason default = Selector (reason, default, Elem key) -let prop x reason default = Selector (reason, default, Prop x) + +let prop x reason has_default default = Selector (reason, default, Prop (x, has_default)) + let arr_rest i reason default = Selector (reason, default, ArrRest i) + let obj_rest xs reason default = Selector (reason, default, ObjRest xs) +let default reason d = Selector (reason, d, Default) + let rec fold ~expr ~cons ~selector = function | Expr e -> expr e | Cons (e, d) -> cons (expr e) (fold ~expr ~selector ~cons d) diff --git a/src/typing/destructuring.ml b/src/typing/destructuring.ml index 842449af09a..4c95745d23b 100644 --- a/src/typing/destructuring.ml +++ b/src/typing/destructuring.ml @@ -1,252 +1,323 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module Ast = Flow_ast - -(* AST handling for destructuring exprs *) - - -open Utils_js -open Reason -open Type - (* Destructuring visitor for tree-shaped patterns, parameteric over an action f to perform at the leaves. A type for the pattern is passed, which is taken apart as the visitor goes deeper. *) -(** NOTE: Since the type of the pattern may contain (unsubstituted) type - parameters, it is important that this visitor does not emit constraints: - otherwise, we may end up with (unsubstituted) type parameters appearing as - lower or upper bounds of constraints, which would violate a core - invariant. So, instead we model the operation of destructuring with a - wrapper constructor, `DestructuringT` (with lazy evaluation rules that - trigger whenever a result is needed, e.g., to interact in flows with other - lower and upper bounds). **) - (** TODO currently type annotations internal to patterns get parsed but not * checked. We should either update this to give users a warning that internal * annotations aren't checked, or update this to check internal annotations. *) -let destructuring cx ~expr ~f = Ast.Pattern.( - let rec recurse ?parent_pattern_t curr_t init default = function - | top_loc, Array { Array.elements; _; } -> Array.( - let elements = elements |> List.mapi (fun i -> function - | Some (Element ((loc, _) as p)) -> - let key = DefT (mk_reason RNumber loc, NumT ( - Literal (None, (float i, string_of_int i)) - )) in - let reason = mk_reason (RCustom (spf "element %d" i)) loc in - let init = Option.map init (fun init -> - loc, Ast.Expression.(Member Member.({ - _object = init; - property = PropertyExpression ( - loc, - Ast.Expression.Literal { Ast.Literal. - value = Ast.Literal.Number (float i); - raw = string_of_int i; - } - ); - computed = true; - })) - ) in - let refinement = Option.bind init (fun init -> - Refinement.get cx init loc - ) in - let parent_pattern_t, tvar = (match refinement with - | Some refined_t -> refined_t, refined_t - | None -> - curr_t, - EvalT (curr_t, DestructuringT (reason, Elem key), mk_id()) - ) in - let default = Option.map default (Default.elem key reason) in - Some (Element (recurse ~parent_pattern_t tvar init default p)) - | Some (RestElement (loc, { RestElement.argument = p })) -> - let reason = mk_reason RArrayPatternRestProp loc in - let tvar = - EvalT (curr_t, DestructuringT (reason, ArrRest i), mk_id()) - in - let default = Option.map default (Default.arr_rest i reason) in - Some (RestElement (loc, { - RestElement.argument = recurse ~parent_pattern_t:curr_t tvar init default p - })) - | None -> None - ) - in - (* Type annotations in patterns are currently ignored *) - let annot = None in - (top_loc, curr_t), Array { elements; annot; } - ) - - | top_loc, Object { Object.properties; _; } -> Object.( - let _, rev_props = List.fold_left (fun (xs, rev_props) -> function - | Property (loc, prop) -> - begin match prop with - | { Property. - key = Property.Identifier (loc, name); - pattern = p; _; - } - | { Property.key = - Property.Literal (loc, { Ast.Literal. - value = Ast.Literal.String name; _ }); - pattern = p; _; } - -> - let reason = mk_reason (RProperty (Some name)) loc in - let init = Option.map init (fun init -> - loc, Ast.Expression.(Member Member.({ - _object = init; - property = PropertyIdentifier (loc, name); - computed = false; - })) - ) in - let refinement = Option.bind init (fun init -> - Refinement.get cx init loc - ) in - let parent_pattern_t, tvar = (match refinement with - | Some refined_t -> refined_t, refined_t - | None -> - (* use the same reason for the prop name and the lookup. - given `var {foo} = ...`, `foo` is both. compare to `a.foo` - where `foo` is the name and `a.foo` is the lookup. *) - curr_t, - EvalT (curr_t, DestructuringT (reason, Prop name), mk_id()) - ) in - let default = Option.map default (Default.prop name reason) in - (** - * We are within a destructuring pattern and a `get-def` on this identifier should - * point at the "def" of the original property. To accompish this, we emit the type - * of the parent pattern so that get-def can dive in to that type and extract the - * location of the "def" of this property. - *) - Type_inference_hooks_js.dispatch_lval_hook - cx - name - loc - (Type_inference_hooks_js.Parent parent_pattern_t); - let pattern = recurse ~parent_pattern_t tvar init default p in - let key = match prop.Property.key with - | (Property.Literal _ | Property.Identifier _) as key -> key - | Property.Computed _ -> assert_false "precondition not met" - in - name :: xs, - Property (loc, { prop with Property.key; pattern; }) :: rev_props - | { Property.key = Property.Computed key; pattern = p; _; } -> - let (_, key_t), _ as key_ast = expr cx key in - let loc = fst key in - let reason = mk_reason (RProperty None) loc in - let init = Option.map init (fun init -> - loc, Ast.Expression.(Member Member.({ - _object = init; - property = PropertyExpression key; - computed = true; - })) - ) in - let refinement = Option.bind init (fun init -> - Refinement.get cx init loc - ) in - let parent_pattern_t, tvar = (match refinement with - | Some refined_t -> refined_t, refined_t - | None -> - curr_t, - EvalT (curr_t, DestructuringT (reason, Elem key_t), mk_id ()) - ) in - let default = Option.map default (Default.elem key_t reason) in - let pattern = recurse ~parent_pattern_t tvar init default p in - xs, Property (loc, { - prop with Property.key = Property.Computed key_ast; pattern; - }) :: rev_props - | { Property.key = Property.Literal _; _ } -> - Flow_js.add_output cx Flow_error.(EUnsupportedSyntax - (loc, DestructuringObjectPropertyLiteralNonString)); - xs, rev_props - end - - | RestProperty (loc, { RestProperty.argument = p }) -> - let reason = mk_reason RObjectPatternRestProp loc in - let tvar = - EvalT (curr_t, DestructuringT (reason, ObjRest xs), mk_id()) - in - let default = Option.map default (Default.obj_rest xs reason) in - let argument = recurse ~parent_pattern_t:curr_t tvar init default p in - xs, RestProperty (loc, { RestProperty.argument }) :: rev_props - ) ([], []) properties - in - let properties = List.rev rev_props in - (* Type annotations in patterns are currently ignored *) - let annot = None in - (top_loc, curr_t), Object { Object.properties; annot } - ) - - | loc, Identifier { Identifier.name = (id_loc, name); optional; _ } -> - begin match parent_pattern_t with - (* If there was a parent pattern, we already dispatched the hook if relevant. *) - | Some _ -> () - (** - * If there was no parent_pattern, we must not be within a destructuring - * pattern and a `get-def` on this identifier should point at the - * location where the binding is introduced. +module Ast = Flow_ast +module Tast_utils = Typed_ast_utils +open Reason +open Type + +type state = { + parent: Type.t option; + current: Type.t; + init: (ALoc.t, ALoc.t) Flow_ast.Expression.t option; + default: Type.t Default.t option; + annot: bool; +} + +type expr = + Context.t -> + (ALoc.t, ALoc.t) Flow_ast.Expression.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Expression.t + +type callback = use_op:Type.use_op -> ALoc.t -> string -> Type.t Default.t option -> Type.t -> unit + +let empty ?init ?default ~annot current = { parent = None; current; init; default; annot } + +let destruct cx reason ~annot selector t = + let kind = + if annot then + DestructAnnot + else + DestructInfer + in + Tvar.mk_where cx reason (fun tout -> + Flow_js.flow cx (t, DestructuringT (reason, kind, selector, tout))) + +let pattern_default cx ~expr acc = function + | None -> (acc, None) + | Some e -> + let { current; default; annot; _ } = acc in + let (((loc, t), _) as e) = expr cx e in + let default = Some (Default.expr ?default t) in + let reason = mk_reason RDefaultValue loc in + let current = destruct cx reason ~annot Default current in + let acc = { acc with current; default } in + (acc, Some e) + +let array_element cx acc i loc = + let { current; init; default; annot; _ } = acc in + let key = + DefT (mk_reason RNumber loc, bogus_trust (), NumT (Literal (None, (float i, string_of_int i)))) + in + let reason = mk_reason (RCustom (Utils_js.spf "element %d" i)) loc in + let init = + Option.map init (fun init -> + ( loc, + Ast.Expression.( + Member + Member. + { + _object = init; + property = + PropertyExpression + ( loc, + Ast.Expression.Literal + { + Ast.Literal.value = Ast.Literal.Number (float i); + raw = string_of_int i; + comments = Flow_ast_utils.mk_comments_opt (); + } ); + }) )) + in + let refinement = Option.bind init (fun init -> Refinement.get cx init loc) in + let (parent, current) = + match refinement with + | Some t -> (None, t) + | None -> (Some current, destruct cx reason ~annot (Elem key) current) + in + let default = Option.map default (Default.elem key reason) in + { acc with parent; current; init; default } + +let array_rest_element cx acc i loc = + let { current; default; annot; _ } = acc in + let reason = mk_reason RArrayPatternRestProp loc in + let (parent, current) = (Some current, destruct cx reason ~annot (ArrRest i) current) in + let default = Option.map default (Default.arr_rest i reason) in + { acc with parent; current; default } + +let object_named_property ~has_default cx acc loc x comments = + let { current; init; default; annot; _ } = acc in + let reason = mk_reason (RProperty (Some x)) loc in + let init = + Option.map init (fun init -> + ( loc, + Ast.Expression.( + Member + Member. + { + _object = init; + property = PropertyIdentifier (loc, { Ast.Identifier.name = x; comments }); + }) )) + in + let refinement = Option.bind init (fun init -> Refinement.get cx init loc) in + let default = + Option.map default (fun default -> + let d = Default.prop x reason has_default default in + if has_default then + Default.default reason d + else + d) + in + let (parent, current) = + match refinement with + | Some t -> (None, t) + | None -> + (* use the same reason for the prop name and the lookup. + given `var {foo} = ...`, `foo` is both. compare to `a.foo` + where `foo` is the name and `a.foo` is the lookup. *) + (Some current, destruct cx reason ~annot (Prop (x, has_default)) current) + in + let () = + match parent with + | None -> () (* TODO: get-def when object property is refined *) + | Some t -> + (* + * We are within a destructuring pattern and a `get-def` on this identifier should + * point at the "def" of the original property. To accompish this, we emit the type + * of the parent pattern so that get-def can dive in to that type and extract the + * location of the "def" of this property. *) - | None -> - Type_inference_hooks_js.dispatch_lval_hook cx name loc Type_inference_hooks_js.Id - end; - let curr_t = mod_reason_of_t (replace_reason (function - | RDefaultValue - | RArrayPatternRestProp - | RObjectPatternRestProp - -> RIdentifier name - | desc -> desc - )) curr_t in - let id_info = name, curr_t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name) loc); - init = (match init with - | Some init -> mk_expression_reason init - | None -> reason_of_t curr_t); - }) in - f ~use_op loc name default curr_t; - (* Type annotations in patterns are currently ignored *) - let annot = None in - (loc, curr_t), Identifier { Identifier.name = ((id_loc, curr_t), name); optional; annot; } - - | loc, Assignment { Assignment.left; right } -> - let default = Some (Default.expr ?default right) in - let reason = mk_reason RDefaultValue loc in - let tvar = - EvalT (curr_t, DestructuringT (reason, Default), mk_id()) - in - let left = recurse ?parent_pattern_t tvar init default left in - (loc, curr_t), Assignment { Assignment. - left; - right = Typed_ast.error_annot, Typed_ast.Expression.unimplemented - } - - | loc, Expression _ -> - Flow_js.add_output cx Flow_error.(EUnsupportedSyntax - (loc, DestructuringExpressionPattern)); - (loc, curr_t), Expression (Typed_ast.error_annot, Typed_ast.Expression.error) - - in fun t init default pattern -> recurse t init default pattern -) - - -let type_of_pattern = Ast.Pattern.(function - | _, Array { Array.annot; _; } -> annot - - | _, Object { Object.annot; _; } -> annot - - | _, Identifier { Identifier.annot; _; } -> annot - - | _, _ -> None -) + Type_inference_hooks_js.dispatch_lval_hook cx x loc (Type_inference_hooks_js.Parent t) + in + { acc with parent; current; init; default } + +let object_computed_property cx ~expr acc e = + let { current; init; default; annot; _ } = acc in + let (((loc, t), _) as e') = expr cx e in + let reason = mk_reason (RProperty None) loc in + let init = + Option.map init (fun init -> + (loc, Ast.Expression.(Member Member.{ _object = init; property = PropertyExpression e }))) + in + let refinement = Option.bind init (fun init -> Refinement.get cx init loc) in + let (parent, current) = + match refinement with + | Some t -> (None, t) + | None -> (Some current, destruct cx reason ~annot (Elem t) current) + in + let default = Option.map default (Default.elem t reason) in + ({ acc with parent; current; init; default }, e') + +let object_rest_property cx acc xs loc = + let { current; default; annot; _ } = acc in + let reason = mk_reason RObjectPatternRestProp loc in + let (parent, current) = (Some current, destruct cx reason ~annot (ObjRest xs) current) in + let default = Option.map default (Default.obj_rest xs reason) in + { acc with parent; current; default } + +let object_property + cx ~expr ~has_default (acc : state) xs (key : (ALoc.t, ALoc.t) Ast.Pattern.Object.Property.key) + : state * string list * (ALoc.t, ALoc.t * Type.t) Ast.Pattern.Object.Property.key = + Ast.Pattern.Object.( + match key with + | Property.Identifier (loc, { Ast.Identifier.name = x; comments }) -> + let acc = object_named_property ~has_default cx acc loc x comments in + ( acc, + x :: xs, + Property.Identifier ((loc, acc.current), { Ast.Identifier.name = x; comments }) ) + | Property.Literal (loc, ({ Ast.Literal.value = Ast.Literal.String x; _ } as lit)) -> + let acc = object_named_property ~has_default cx acc loc x None in + (acc, x :: xs, Property.Literal (loc, lit)) + | Property.Computed e -> + let (acc, e) = object_computed_property cx ~expr acc e in + (acc, xs, Property.Computed e) + | Property.Literal (loc, _) -> + Flow_js.add_output + cx + Error_message.(EUnsupportedSyntax (loc, DestructuringObjectPropertyLiteralNonString)); + (acc, xs, Tast_utils.error_mapper#pattern_object_property_key key)) + +let identifier cx ~f acc loc name = + let { parent; current; init; default; annot } = acc in + let () = + match parent with + (* If there was a parent pattern, we already dispatched the hook if relevant. *) + | Some _ -> () + (* + * If there was no parent_pattern, we must not be within a destructuring + * pattern and a `get-def` on this identifier should point at the + * location where the binding is introduced. + *) + | None -> Type_inference_hooks_js.dispatch_lval_hook cx name loc Type_inference_hooks_js.Id + in + let current = + mod_reason_of_t + (update_desc_reason (function + | RDefaultValue + | RArrayPatternRestProp + | RObjectPatternRestProp -> + RIdentifier name + | desc -> desc)) + current + in + let reason = mk_reason (RIdentifier name) loc in + let current = + (* If we are destructuring an annotation, the chain of constraints leading + * to here will preserve the 0->1 constraint. The mk_typeof_annotation + * helper will wrap the destructured type in an AnnotT, to ensure it is + * resolved before it is used as an upper bound. The helper also enforces + * the destructured type is 0->1 via BecomeT. + * + * The BecomeT part should not be necessary, but for now it is. Ideally an + * annotation would recursively be 0->1, but it's possible for them to + * contain inferred parts. For example, a class's instance type where one of + * the fields is unannotated. *) + if annot then + Flow_js.mk_typeof_annotation cx reason current + else + current + in + let use_op = + Op + (AssignVar + { + var = Some reason; + init = + (match init with + | Some init -> mk_expression_reason init + | None -> reason_of_t current); + }) + in + f ~use_op loc name default current + +let rec pattern cx ~expr ~f acc (loc, p) = + Ast.Pattern. + ( (loc, acc.current), + match p with + | Array { Array.elements; annot; comments } -> + let elements = array_elements cx ~expr ~f acc elements in + let annot = Tast_utils.unimplemented_mapper#type_annotation_hint annot in + Array { Array.elements; annot; comments } + | Object { Object.properties; annot } -> + let properties = object_properties cx ~expr ~f acc properties in + let annot = Tast_utils.unimplemented_mapper#type_annotation_hint annot in + Object { Object.properties; annot } + | Identifier { Identifier.name = id; optional; annot } -> + let (id_loc, { Ast.Identifier.name; comments }) = id in + let id = ((id_loc, acc.current), { Ast.Identifier.name; comments }) in + let annot = Tast_utils.unimplemented_mapper#type_annotation_hint annot in + identifier cx ~f acc id_loc name; + Identifier { Identifier.name = id; optional; annot } + | Expression e -> + Flow_js.add_output + cx + Error_message.(EUnsupportedSyntax (loc, DestructuringExpressionPattern)); + Expression (Tast_utils.error_mapper#expression e) ) + +and array_elements cx ~expr ~f acc = + Ast.Pattern.Array.( + List.mapi (fun i -> + Option.map ~f:(function + | Element (loc, { Element.argument = p; default = d }) -> + let acc = array_element cx acc i loc in + let (acc, d) = pattern_default cx ~expr acc d in + let p = pattern cx ~expr ~f acc p in + Element (loc, { Element.argument = p; default = d }) + | RestElement (loc, { RestElement.argument = p }) -> + let acc = array_rest_element cx acc i loc in + let p = pattern cx ~expr ~f acc p in + RestElement (loc, { RestElement.argument = p })))) + +and object_properties = + Ast.Pattern.Object.( + let prop cx ~expr ~f acc xs p = + match p with + | Property (loc, { Property.key; pattern = p; default = d; shorthand }) -> + let has_default = d <> None in + let (acc, xs, key) = object_property cx ~expr ~has_default acc xs key in + let (acc, d) = pattern_default cx ~expr acc d in + let p = pattern cx ~expr ~f acc p in + (xs, Property (loc, { Property.key; pattern = p; default = d; shorthand })) + | RestProperty (loc, { RestProperty.argument = p }) -> + let acc = object_rest_property cx acc xs loc in + let p = pattern cx ~expr ~f acc p in + (xs, RestProperty (loc, { RestProperty.argument = p })) + in + let rec loop cx ~expr ~f acc xs rev_ps = function + | [] -> List.rev rev_ps + | p :: ps -> + let (xs, p) = prop cx ~expr ~f acc xs p in + loop cx ~expr ~f acc xs (p :: rev_ps) ps + in + (fun cx ~expr ~f acc ps -> loop cx ~expr ~f acc [] [] ps)) + +let type_of_pattern (_, p) = + Ast.Pattern.( + match p with + | Array { Array.annot; _ } + | Object { Object.annot; _ } + | Identifier { Identifier.annot; _ } -> + annot + | _ -> Ast.Type.Missing ALoc.none) + (* instantiate pattern visitor for assignments *) -let destructuring_assignment cx ~expr rhs_t init = +let assignment cx ~expr rhs_t init = + let acc = empty ~init ~annot:false rhs_t in let f ~use_op loc name _default t = (* TODO destructuring+defaults unsupported in assignment expressions *) ignore Env.(set_var cx ~use_op name t loc) in - destructuring cx ~expr rhs_t (Some init) None ~f + pattern cx ~expr ~f acc diff --git a/src/typing/destructuring.mli b/src/typing/destructuring.mli index 9bb1c58d7ba..d897451f130 100644 --- a/src/typing/destructuring.mli +++ b/src/typing/destructuring.mli @@ -1,33 +1,57 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val destructuring : +type state + +type expr = Context.t -> - expr:(Context.t -> (Loc.t, Loc.t) Flow_ast.Expression.t -> (Loc.t, Loc.t * Type.t) Flow_ast.Expression.t) -> - f:(use_op:Type.use_op -> - Loc.t -> - string -> - (Loc.t, Loc.t) Flow_ast.Expression.t Default.t option -> - Type.t -> unit) -> + (ALoc.t, ALoc.t) Flow_ast.Expression.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Expression.t + +type callback = use_op:Type.use_op -> ALoc.t -> string -> Type.t Default.t option -> Type.t -> unit + +val empty : + ?init:(ALoc.t, ALoc.t) Flow_ast.Expression.t -> + ?default:Type.t Default.t -> + annot:bool -> Type.t -> - (Loc.t, Loc.t) Flow_ast.Expression.t option -> - (Loc.t, Loc.t) Flow_ast.Expression.t Default.t option -> - (Loc.t, Loc.t) Flow_ast.Pattern.t -> - (Loc.t, Loc.t * Type.t) Flow_ast.Pattern.t + state + +val pattern : + Context.t -> + expr:expr -> + f:callback -> + state -> + (ALoc.t, ALoc.t) Flow_ast.Pattern.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Pattern.t + +val array_elements : + Context.t -> + expr:expr -> + f:callback -> + state -> + (ALoc.t, ALoc.t) Flow_ast.Pattern.Array.element option list -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Pattern.Array.element option list + +val object_properties : + Context.t -> + expr:expr -> + f:callback -> + state -> + (ALoc.t, ALoc.t) Flow_ast.Pattern.Object.property list -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Pattern.Object.property list + val type_of_pattern : - 'a * (Loc.t, Loc.t) Flow_ast.Pattern.t' -> - (Loc.t, Loc.t) Flow_ast.Type.annotation option -val destructuring_assignment : + 'a * (ALoc.t, ALoc.t) Flow_ast.Pattern.t' -> (ALoc.t, ALoc.t) Flow_ast.Type.annotation_or_hint + +val assignment : Context.t -> - expr:( - Context.t -> (Loc.t, Loc.t) Flow_ast.Expression.t -> - (Loc.t, Loc.t * Type.t) Flow_ast.Expression.t - ) -> + expr:expr -> Type.t -> - (Loc.t, Loc.t) Flow_ast.Expression.t -> - (Loc.t, Loc.t) Flow_ast.Pattern.t -> - (Loc.t, Loc.t * Type.t) Flow_ast.Pattern.t + (ALoc.t, ALoc.t) Flow_ast.Expression.t -> + (ALoc.t, ALoc.t) Flow_ast.Pattern.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Pattern.t diff --git a/src/typing/dune b/src/typing/dune new file mode 100644 index 00000000000..ab1ecf40ffb --- /dev/null +++ b/src/typing/dune @@ -0,0 +1,74 @@ +(library + (name flow_typing_changeset) + (wrapped false) + (modules changeset) + (libraries + flow_typing_key + ) +) + +(library + (name flow_typing_key) + (wrapped false) + (modules key key_map) + (libraries + flow_common + flow_common_utils + ) +) + +(library + (name flow_typing_scope) + (wrapped false) + (modules scope) + (libraries + flow_typing_key + flow_typing_trust + flow_typing_type + ) +) + +(library + (name flow_typing_trust) + (wrapped false) + (modules trust) + (libraries + flow_common_utils + ) +) + +(library + (name flow_typing_type) + (wrapped false) + (modules type) + (libraries + flow_common + flow_typing_changeset + flow_typing_key + flow_typing_polarity + flow_typing_trust + ) +) + +(library + (name flow_typing) + (wrapped false) + (modules (:standard \ changeset key key_map trust type scope)) + (libraries + flow_common + flow_common_errors + flow_common_modulename + flow_common_monad + flow_common_tarjan + flow_parser_utils + flow_state_heaps_parsing_exceptions + flow_typing_coverage_response + flow_typing_errors + flow_typing_polarity + flow_typing_ty + flow_typing_type + worker_cancel ; hack + xx + ) + (modules_without_implementation graph partition resolvable_type_job) +) diff --git a/src/typing/env.ml b/src/typing/env.ml index a49886a400e..0545a8966f4 100644 --- a/src/typing/env.ml +++ b/src/typing/env.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,12 +11,12 @@ associated type information. *) open Utils_js +open Loc_collections open Type open Reason open Scope - -module FlowError = Flow_error module Flow = Flow_js +module Scope_api = Scope_api.With_ALoc (* lookup modes: @@ -50,7 +50,10 @@ module Flow = Flow_js rule #2, hence the need for a special mode. *) module LookupMode = struct - type t = ForValue | ForType | ForTypeof + type t = + | ForValue + | ForType + | ForTypeof end open LookupMode @@ -65,7 +68,7 @@ type t = Scope.t list traversed. changesets are also managed here, but live in a separate Changeset module for dependency reasons. *) -let scopes: t ref = ref [] +let scopes : t ref = ref [] (* symbols whose bindings are forcibly prevented from being created, initialized, etc. This set is initialized in init_env, and is normally @@ -74,47 +77,39 @@ let scopes: t ref = ref [] register it as a deferred global lookup, which will then be linked to the override. See Init_js.load_lib_files. *) -let exclude_symbols: SSet.t ref = ref SSet.empty +let exclude_symbols : SSet.t ref = ref SSet.empty -let set_exclude_symbols syms = - exclude_symbols := syms +let set_exclude_symbols syms = exclude_symbols := syms -let is_excluded name = - SSet.mem name !exclude_symbols +let is_excluded name = SSet.mem name !exclude_symbols (* scopes *) (* return the current scope *) -let peek_scope () = - List.hd !scopes +let peek_scope () = List.hd !scopes (* return current scope stack *) -let peek_env () = - !scopes +let peek_env () = !scopes let string_of_env cx env = - spf "[ %s ]" (String.concat ";\n" - (List.map (Debug_js.string_of_scope cx) env)) + spf "[ %s ]" (String.concat ";\n" (Core_list.map ~f:(Debug_js.string_of_scope cx) env)) (* return the value of f applied to topmost var scope in a scope list *) let rec top_var_scope = function -| [] -> assert_false "empty scope list" -| scope :: scopes -> - match scope.kind with - | VarScope _ -> scope - | _ -> top_var_scope scopes + | [] -> assert_false "empty scope list" + | scope :: scopes -> + (match scope.kind with + | VarScope _ -> scope + | _ -> top_var_scope scopes) (* get top var scope of current env *) -let peek_var_scope () = - top_var_scope (peek_env ()) +let peek_var_scope () = top_var_scope (peek_env ()) (* each varscope carries a frame id *) -let peek_frame () = - (peek_scope ()).id +let peek_frame () = (peek_scope ()).id (* use the passed f to iterate over all scopes *) -let iter_scopes f = - List.iter f !scopes +let iter_scopes f = List.iter f !scopes (* apply function f to local scopes: the 0 or more lex scopes between us and the closest var scope, @@ -122,17 +117,16 @@ let iter_scopes f = let iter_local_scopes f = let rec loop = function | [] -> assert_false "empty scope list" - | scope::scopes -> - f scope; - (match scope.kind with - | LexScope -> loop scopes - | _ -> ()) + | scope :: scopes -> + f scope; + (match scope.kind with + | LexScope -> loop scopes + | _ -> ()) in loop !scopes (* clone the given scope stack (snapshots entry maps) *) -let clone_env scopes = - List.map Scope.clone scopes +let clone_env scopes = Core_list.map ~f:Scope.clone scopes let var_scope_kind () = let scope = peek_var_scope () in @@ -148,32 +142,36 @@ let is_func_kind k scope = let in_async_scope () = match var_scope_kind () with - | Async | AsyncGenerator -> true + | Async + | AsyncGenerator -> + true | _ -> false let in_generator_scope () = match var_scope_kind () with - | Generator | AsyncGenerator -> true + | Generator + | AsyncGenerator -> + true | _ -> false -let in_predicate_scope () = - is_func_kind Predicate (peek_var_scope ()) +let in_predicate_scope () = is_func_kind Predicate (peek_var_scope ()) (* build a map of all var entries - no refis - in the current scope stack. Note that we accumulate entries bottom-up, so that shadowing is properly maintained *) let all_entries () = - List.fold_left (fun entries scope -> - SMap.union scope.entries entries - ) SMap.empty (List.rev !scopes) + List.fold_left + (fun entries scope -> SMap.union scope.entries entries) + SMap.empty + (List.rev !scopes) (* whole env *) (* clear environment *) let havoc_current_activation () = scopes := []; - Changeset.init () + Changeset.Global.init () (* save environment to context *) let snapshot_env cx = @@ -186,10 +184,10 @@ let snapshot_env cx = (* TODO maintain changelist here too *) let push_var_scope cx scope = (match scope.kind with - | VarScope _ -> () - | _ -> assert_false "push_var_scope on non-var scope"); + | VarScope _ -> () + | _ -> assert_false "push_var_scope on non-var scope"); scopes := scope :: !scopes; - Changeset.push (); + Changeset.Global.push (); snapshot_env cx (*** @@ -207,8 +205,8 @@ let push_var_scope cx scope = let saved_closure_changeset = ref (Some Changeset.empty) let save_closure_changeset scopes = - let ids = List.map (fun { id; _ } -> id) scopes in - let changeset = Changeset.(include_scopes ids (peek ())) in + let ids = Core_list.map ~f:(fun { id; _ } -> id) scopes in + let changeset = Changeset.(include_scopes ids (Global.peek ())) in saved_closure_changeset := Some changeset let retrieve_closure_changeset () = @@ -227,7 +225,7 @@ let pop_var_scope () = | { kind = VarScope _; _ } :: tail_scopes -> save_closure_changeset tail_scopes; scopes := tail_scopes; - Changeset.pop () + Changeset.Global.pop () | [] -> assert_false "empty scope list" | _ -> assert_false "top scope is non-var" @@ -242,7 +240,8 @@ let pop_lex_scope () = match !scopes with | { kind = LexScope; id; _ } :: tail_scopes -> (* cull any changelist entries for this scope *) - ignore (Changeset.filter_scope_changes id); + ignore (Changeset.Global.filter_scope_changes id); + (* pop *) scopes := tail_scopes | [] -> assert_false "empty scope list" @@ -255,24 +254,23 @@ let in_lex_scope cx f = result (* depth of current env *) -let env_depth () = - List.length !scopes +let env_depth () = List.length !scopes (* strip the given number of scopes from top of env *) let trunc_env = let rec trunc = function - | 0, scopes -> scopes - | _, [] -> assert_false "trunc_env: scopes underflow" - | n, scope :: scopes -> - ignore (Changeset.filter_scope_changes scope.id); - trunc (n - 1, scopes) + | (0, scopes) -> scopes + | (_, []) -> assert_false "trunc_env: scopes underflow" + | (n, scope :: scopes) -> + ignore (Changeset.Global.filter_scope_changes scope.id); + trunc (n - 1, scopes) in fun depth -> let cur = !scopes in scopes := trunc (List.length cur - depth, cur) (* initialize a new environment (once per module) *) -let init_env ?(exclude_syms=SSet.empty) cx module_scope = +let init_env ?(exclude_syms = SSet.empty) cx module_scope = set_exclude_symbols exclude_syms; havoc_current_activation (); let global_scope = Scope.fresh ~var_scope_kind:Global () in @@ -283,13 +281,13 @@ let init_env ?(exclude_syms=SSet.empty) cx module_scope = envs must be congruent - we measure length as a quick check, with a more thorough check on env merge/copy *) let update_env cx loc new_scopes = - - (if List.length new_scopes != List.length (peek_env ()) - then assert_false (spf - "update_env %s: unequal length scope lists, old %d new %d " - (string_of_loc loc) - (List.length new_scopes) - (List.length (peek_env ())))); + if List.length new_scopes != List.length (peek_env ()) then + assert_false + (spf + "update_env %s: unequal length scope lists, old %d new %d " + (string_of_aloc loc) + (List.length new_scopes) + (List.length (peek_env ()))); scopes := new_scopes; snapshot_env cx @@ -298,10 +296,7 @@ let update_env cx loc new_scopes = let global_any = ["eval"; "arguments"] -let global_lexicals = [ - (internal_name "super"); - (internal_name "this") -] +let global_lexicals = [internal_name "super"; internal_name "this"] (* any names that haven't been resolved in upper scopes wind up here. after handling special names, we add a Var @@ -332,32 +327,33 @@ let global_lexicals = [ *) let cache_global cx name ?desc loc global_scope = let t = - if List.mem name global_any - then AnyT.at loc - else if List.mem name global_lexicals - then ObjProtoT (mk_reason (RCustom "global object") loc) + if List.mem name global_any then + AnyT.at Annotated loc + else if List.mem name global_lexicals then + ObjProtoT (mk_reason (RCustom "global object") loc) else - let desc = match desc with - | Some desc -> desc - | None -> RIdentifier name + let desc = + match desc with + | Some desc -> desc + | None -> RIdentifier name in let reason = mk_reason desc loc in Flow.get_builtin cx name reason in let entry = Entry.new_var t ~loc ~state:State.Initialized in Scope.add_entry name entry global_scope; - global_scope, entry + (global_scope, entry) let local_scope_entry_exists name = let rec loop = function | [] -> assert_false "empty scope list" - | scope::scopes -> - match Scope.get_entry name scope with - | Some _ -> true - | None -> - match scopes with - | [] -> false - | _ -> loop scopes + | scope :: scopes -> + (match Scope.get_entry name scope with + | Some _ -> true + | None -> + (match scopes with + | [] -> false + | _ -> loop scopes)) in loop !scopes @@ -368,34 +364,35 @@ let local_scope_entry_exists name = let find_entry cx name ?desc loc = let rec loop = function | [] -> assert_false "empty scope list" - | scope::scopes -> - match Scope.get_entry name scope with - | Some entry -> scope, entry + | scope :: scopes -> + (match Scope.get_entry name scope with + | Some entry -> (scope, entry) | None -> (* keep looking until we're at the global scope *) - match scopes with + (match scopes with | [] -> cache_global cx name ?desc loc scope - | _ -> loop scopes + | _ -> loop scopes)) in loop !scopes let get_class_entries () = let rec loop class_bindings = function | [] -> assert_false "empty scope list" - | scope::scopes -> - match Scope.get_entry (internal_name "class") scope with + | scope :: scopes -> + (match Scope.get_entry (internal_name "class") scope with | Some entry -> loop (entry :: class_bindings) scopes | None -> (* keep looking until we're at the global scope *) - match scopes with + (match scopes with | [] -> class_bindings - | _ -> loop class_bindings scopes + | _ -> loop class_bindings scopes)) in let class_bindings = loop [] !scopes in let to_class_record = function - | Entry.Class c -> c - | _ -> assert_false "Internal Error: Non-class binding stored with .class" in - List.map to_class_record class_bindings + | Entry.Class c -> c + | _ -> assert_false "Internal Error: Non-class binding stored with .class" + in + Core_list.map ~f:to_class_record class_bindings (* Search for the scope which binds the given name, through the topmost LexScopes and up to the first VarScope. If the entry @@ -403,11 +400,11 @@ let get_class_entries () = let find_entry_in_var_scope name = let rec loop = function | [] -> assert_false "empty scope list" - | scope::scopes -> - match Scope.get_entry name scope, scope.kind with - | Some entry, _ -> scope, Some entry - | None, VarScope _ -> scope, None - | None, LexScope -> loop scopes + | scope :: scopes -> + (match (Scope.get_entry name scope, scope.kind) with + | (Some entry, _) -> (scope, Some entry) + | (None, VarScope _) -> (scope, None) + | (None, LexScope) -> loop scopes) in loop !scopes @@ -417,21 +414,20 @@ let find_entry_in_var_scope name = let find_refi_in_var_scope key = let rec loop = function | [] -> assert_false "empty scope list" - | scope::scopes -> - match Scope.get_refi key scope, scope.kind with - | Some refi, _ -> Some (scope, refi) - | None, VarScope _ -> None - | None, LexScope -> loop scopes + | scope :: scopes -> + (match (Scope.get_refi key scope, scope.kind) with + | (Some refi, _) -> Some (scope, refi) + | (None, VarScope _) -> None + | (None, LexScope) -> loop scopes) in loop !scopes (* helpers *) let binding_error msg cx name entry loc = - Flow.add_output cx (FlowError.EBindingError (msg, loc, name, entry)) + Flow.add_output cx (Error_message.EBindingError (msg, loc, name, entry)) -let already_bound_error = - binding_error FlowError.ENameAlreadyBound +let already_bound_error = binding_error Error_message.ENameAlreadyBound (* initialization of entries happens during a preliminary pass through a scoped region of the AST (dynamic for hoisted things, lexical for @@ -449,133 +445,125 @@ let bind_entry cx name entry loc = binding is found, or realize a binding error *) let rec loop = function | [] -> assert_false "empty scope list" - | scope::scopes -> - match get_entry name scope with - + | scope :: scopes -> + (match get_entry name scope with (* if no entry already exists, this might be our scope *) - | None -> Entry.( - match scope.Scope.kind, entry with - (* lex scopes can only hold let/const/class bindings *) - (* var scope can hold all binding types *) - | LexScope, Value { Entry.kind = Let _; _ } - | LexScope, Value { Entry.kind = Const _; _ } - | LexScope, Class _ - | VarScope _, _ -> - let loc = entry_loc entry in - Type_inference_hooks_js.dispatch_ref_hook cx loc loc; - add_entry name entry scope - (* otherwise, keep looking for our scope *) - | _ -> loop scopes) - + | None -> + Entry.( + (match (scope.Scope.kind, entry) with + (* lex scopes can only hold let/const/class bindings *) + (* var scope can hold all binding types *) + | (LexScope, Value { Entry.kind = Let _; _ }) + | (LexScope, Value { Entry.kind = Const _; _ }) + | (LexScope, Class _) + | (VarScope _, _) -> + add_entry name entry scope + (* otherwise, keep looking for our scope *) + | _ -> loop scopes)) (* some rebindings are allowed, but usually an error *) | Some prev -> - match scope.kind with - + (match scope.kind with (* specifically a var scope allows some shadowing *) - | VarScope _ -> Entry.( - let can_shadow = function - (* funcs/vars can shadow other funcs/vars -- only in var scope *) - | (Var _ | Let FunctionBinding), - (Var _ | Let FunctionBinding) -> true - (* vars can shadow function params *) - | Var _, Let ParamBinding -> true - | Var _, Let ConstlikeParamBinding -> true - | Var _, Const ConstParamBinding -> true - | _ -> false - in - match entry, prev with - (* good shadowing leaves existing entry, unifies with new *) - | Value e, Value p - when can_shadow (Entry.kind_of_value e, Entry.kind_of_value p) -> - (* TODO currently we don't step on specific. shouldn't we? *) - Flow.unify cx - (Entry.general_of_value p) (Entry.general_of_value e) - (* bad shadowing is a binding error *) - | _ -> already_bound_error cx name prev loc) - + | VarScope _ -> + Entry.( + let can_shadow = function + (* funcs/vars can shadow other funcs/vars -- only in var scope *) + | ((Var _ | Let FunctionBinding), (Var _ | Let FunctionBinding)) -> true + (* vars can shadow function params *) + | (Var _, Let ParamBinding) -> true + | (Var _, Let ConstlikeParamBinding) -> true + | (Var _, Const ConstParamBinding) -> true + | _ -> false + in + (match (entry, prev) with + (* good shadowing leaves existing entry, unifies with new *) + | (Value e, Value p) when can_shadow (Entry.kind_of_value e, Entry.kind_of_value p) -> + (* TODO currently we don't step on specific. shouldn't we? *) + Flow.unify cx (Entry.general_of_value p) (Entry.general_of_value e) + (* bad shadowing is a binding error *) + | _ -> already_bound_error cx name prev loc)) (* shadowing in a lex scope is always an error *) - | LexScope -> already_bound_error cx name prev loc + | LexScope -> already_bound_error cx name prev loc)) in if not (is_excluded name) then loop !scopes (* bind class entry *) let bind_class cx class_id class_private_fields class_private_static_fields = - bind_entry cx (internal_name "class") - (Entry.new_class class_id class_private_fields class_private_static_fields) Loc.none + bind_entry + cx + (internal_name "class") + (Entry.new_class class_id class_private_fields class_private_static_fields) + ALoc.none (* bind var entry *) -let bind_var ?(state=State.Declared) cx name t loc = +let bind_var ?(state = State.Declared) cx name t loc = bind_entry cx name (Entry.new_var t ~loc ~state) loc (* bind let entry *) -let bind_let ?(state=State.Undeclared) cx name t loc = +let bind_let ?(state = State.Undeclared) cx name t loc = bind_entry cx name (Entry.new_let t ~loc ~state) loc (* bind implicit let entry *) -let bind_implicit_let ?(state=State.Undeclared) kind cx name t loc = +let bind_implicit_let ?(state = State.Undeclared) kind cx name t loc = bind_entry cx name (Entry.new_let t ~kind ~loc ~state) loc -let bind_fun ?(state=State.Declared) = - bind_implicit_let ~state Entry.FunctionBinding +let bind_fun ?(state = State.Declared) = bind_implicit_let ~state Entry.FunctionBinding (* bind const entry *) -let bind_const ?(state=State.Undeclared) cx name t loc = +let bind_const ?(state = State.Undeclared) cx name t loc = bind_entry cx name (Entry.new_const t ~loc ~state) loc -let bind_import cx name t loc = - bind_entry cx name (Entry.new_import t ~loc) loc +let bind_import cx name t loc = bind_entry cx name (Entry.new_import t ~loc) loc (* bind implicit const entry *) -let bind_implicit_const ?(state=State.Undeclared) kind cx name t loc = +let bind_implicit_const ?(state = State.Undeclared) kind cx name t loc = bind_entry cx name (Entry.new_const t ~kind ~loc ~state) loc (* bind type entry *) -let bind_type ?(state=State.Declared) cx name t loc = +let bind_type ?(state = State.Declared) cx name t loc = bind_entry cx name (Entry.new_type t ~loc ~state) loc -let bind_import_type cx name t loc = - bind_entry cx name (Entry.new_import_type t ~loc) loc +let bind_import_type cx name t loc = bind_entry cx name (Entry.new_import_type t ~loc) loc (* vars coming from 'declare' statements are preinitialized *) let bind_declare_var = bind_var ~state:State.Initialized (* bind entry for declare function *) let bind_declare_fun = - - let update_type seen_t new_t = match seen_t with - | DefT (reason, IntersectionT rep) -> - DefT (reason, IntersectionT (InterRep.append [new_t] rep)) - | _ -> - let reason = replace_reason_const RIntersectionType (reason_of_t seen_t) in - DefT (reason, IntersectionT (InterRep.make seen_t new_t [])) + let update_type seen_t new_t = + match seen_t with + | IntersectionT (reason, rep) -> IntersectionT (reason, InterRep.append [new_t] rep) + | _ -> + let reason = replace_desc_reason RIntersectionType (reason_of_t seen_t) in + IntersectionT (reason, InterRep.make seen_t new_t []) in - fun cx name t loc -> - if not (is_excluded name) - then ( + if not (is_excluded name) then let scope = peek_scope () in match Scope.get_entry name scope with | None -> let entry = Entry.new_var t ~loc ~state:State.Initialized in Scope.add_entry name entry scope - | Some prev -> - Entry.(match prev with - - | Value v - when (match Entry.kind_of_value v with Var _ -> true | _ -> false) -> - let entry = Value { v with - value_state = State.Initialized; - specific = update_type v.specific t; - general = update_type v.general t; - } in - Scope.add_entry name entry scope - - | _ -> - (* declare function shadows some other kind of binding *) - already_bound_error cx name prev loc - ) - ) + Entry.( + (match prev with + | Value v + when match Entry.kind_of_value v with + | Var _ -> true + | _ -> false -> + let entry = + Value + { + v with + value_state = State.Initialized; + specific = update_type v.specific t; + general = update_type v.general t; + } + in + Scope.add_entry name entry scope + | _ -> + (* declare function shadows some other kind of binding *) + already_bound_error cx name prev loc)) (* helper: move a Let/Const's entry's state from Undeclared to Declared. Only needed for let and const to push things into scope for potentially @@ -583,125 +571,147 @@ let bind_declare_fun = immediately on binding. *) let declare_value_entry kind cx name loc = - if not (is_excluded name) - then Entry.( - let scope, entry = find_entry cx name loc in - match entry with - | Value v when - Entry.kind_of_value v = kind && - Entry.state_of_value v = State.Undeclared -> - let new_entry = Value { v with value_state = State.Declared } in - Scope.add_entry name new_entry scope - | _ -> - already_bound_error cx name entry loc - ) + if not (is_excluded name) then + Entry.( + let (scope, entry) = find_entry cx name loc in + match entry with + | Value v when Entry.kind_of_value v = kind && Entry.state_of_value v = State.Undeclared -> + let new_entry = Value { v with value_state = State.Declared } in + Scope.add_entry name new_entry scope + | _ -> already_bound_error cx name entry loc) let declare_let = declare_value_entry Entry.(Let LetVarBinding) -let declare_implicit_let kind = - declare_value_entry (Entry.Let kind) + +let declare_implicit_let kind = declare_value_entry (Entry.Let kind) + let declare_const = declare_value_entry Entry.(Const ConstVarBinding) +let declare_implicit_const kind = declare_value_entry (Entry.Const kind) + let promote_to_const_like cx loc = try - let info, values = Context.use_def cx in + let (info, values) = Context.use_def cx in let uses = Scope_api.uses_of_use info loc in (* We consider a binding to be const-like if all reads point to the same write, modulo initialization. *) - let writes = LocSet.fold (fun use acc -> - match LocMap.get use values with - | None -> (* use is a write *) acc - | Some write_locs -> (* use is a read *) - (* collect writes pointed to by the read, modulo initialization *) - List.fold_left (fun acc -> function - | Ssa_api.Uninitialized -> acc - | Ssa_api.Write loc -> LocSet.add loc acc - ) acc write_locs - ) uses LocSet.empty in - LocSet.cardinal writes <= 1 + let writes = + ALocSet.fold + (fun use acc -> + match ALocMap.get use values with + | None -> (* use is a write *) acc + | Some write_locs -> + (* use is a read *) + (* collect writes pointed to by the read, modulo initialization *) + List.fold_left + (fun acc -> function + | Ssa_api.With_ALoc.Uninitialized -> acc + | Ssa_api.With_ALoc.Write loc -> ALocSet.add loc acc) + acc + write_locs) + uses + ALocSet.empty + in + ALocSet.cardinal writes <= 1 with _ -> false +let initialized_value_entry cx kind specific loc v = + Entry.( + (* Maybe promote to const-like *) + let new_kind = + match kind with + | Var VarBinding -> + if promote_to_const_like cx loc then + Var ConstlikeVarBinding + else + kind + | Let LetVarBinding -> + if promote_to_const_like cx loc then + Let ConstlikeLetVarBinding + else + kind + | _ -> kind + in + Value { v with Entry.kind = new_kind; value_state = State.Initialized; specific }) + (* helper - update var entry to reflect assignment/initialization *) (* note: here is where we understand that a name can be multiply var-bound *) let init_value_entry kind cx ~use_op name ~has_anno specific loc = - if not (is_excluded name) - then Entry.( - let scope, entry = find_entry cx name loc in - match kind, entry with - | Var _, Value ({ Entry.kind = Var _; _ } as v) - | Let _, Value ({ Entry.kind = Let _; - value_state = State.Undeclared | State.Declared; _ } as v) - | Const _, Value ({ Entry.kind = Const _; - value_state = State.Undeclared | State.Declared; _ } as v) -> - Changeset.change_var (scope.id, name, Changeset.Write); - if specific != v.general then ( - Flow_js.flow cx (specific, UseT (use_op, v.general)); - ); - (* note that annotation supercedes specific initializer type *) - let new_kind = - match kind with - | Var VarBinding -> - if (promote_to_const_like cx loc) - then Entry.(Var ConstlikeVarBinding) - else kind - | Let LetVarBinding -> - if (promote_to_const_like cx loc) - then Entry.(Let ConstlikeLetVarBinding) - else kind - | _ -> kind in - let value_binding = { v with - Entry.kind = new_kind; - value_state = State.Initialized; - specific = if has_anno then v.general else specific - } in - let new_entry = Value value_binding in - Scope.add_entry name new_entry scope; - | _ -> - (* Incompatible or non-redeclarable new and previous entries. + if not (is_excluded name) then + Entry.( + let (scope, entry) = find_entry cx name loc in + match (kind, entry) with + | (Var _, Value ({ Entry.kind = Var _; _ } as v)) + | ( Let _, + Value ({ Entry.kind = Let _; value_state = State.Undeclared | State.Declared; _ } as v) + ) + | ( Const _, + Value ({ Entry.kind = Const _; value_state = State.Undeclared | State.Declared; _ } as v) + ) -> + Changeset.Global.change_var (scope.id, name, Changeset.Write); + if specific != v.general then Flow_js.flow cx (specific, UseT (use_op, v.general)); + + (* note that annotation supercedes specific initializer type *) + let specific = + if has_anno then + v.general + else + specific + in + let new_entry = initialized_value_entry cx kind specific loc v in + Scope.add_entry name new_entry scope + | _ -> + (* Incompatible or non-redeclarable new and previous entries. We will have already issued an error in `bind_value_entry`, so we can prune this case here. *) - () - ) + ()) let init_var = init_value_entry Entry.(Var VarBinding) + let init_let = init_value_entry Entry.(Let LetVarBinding) + let init_implicit_let kind = init_value_entry (Entry.Let kind) + let init_fun = init_implicit_let ~has_anno:false Entry.FunctionBinding + let init_const = init_value_entry Entry.(Const ConstVarBinding) +let init_implicit_const kind = init_value_entry Entry.(Const kind) + (* update type alias to reflect initialization in code *) -let init_type cx name _type loc = - if not (is_excluded name) - then Entry.( - let scope, entry = find_entry cx name loc in - match entry with - | Type ({ type_state = State.Declared; _ } as t)-> - Flow.flow_t cx (_type, t._type); - let new_entry = Type { t with type_state = State.Initialized; _type } in - Scope.add_entry name new_entry scope - | _ -> - (* Incompatible or non-redeclarable new and previous entries. +let init_type cx name type_ loc = + if not (is_excluded name) then + Entry.( + let (scope, entry) = find_entry cx name loc in + match entry with + | Type ({ type_state = State.Declared; _ } as t) -> + Flow.flow_t cx (type_, t.type_); + let new_entry = Type { t with type_state = State.Initialized; type_ } in + Scope.add_entry name new_entry scope + | _ -> + (* Incompatible or non-redeclarable new and previous entries. We will have already issued an error in `bind_value_entry`, so we can prune this case here. *) - () - ) + ()) (* treat a var's declared (annotated) type as an initializer *) let pseudo_init_declared_type cx name loc = - if not (is_excluded name) - then Entry.( - let scope, entry = find_entry cx name loc in - match entry with - | Value value_binding -> - let entry = Value { value_binding with - value_state = State.Declared; - specific = value_binding.general - } in - Scope.add_entry name entry scope - | Type _ -> - assert_false (spf "pseudo_init_declared_type %s: Type entry" name) - | Class _ -> - assert_false (spf "pseudo_init_declared_type %s: Class entry" name) - ) + if not (is_excluded name) then + Entry.( + let (scope, entry) = find_entry cx name loc in + match entry with + | Value ({ Entry.kind = Var _; _ } as v) + | Value + ({ Entry.kind = Let _ | Const _; value_state = State.(Undeclared | Declared); _ } as v) + -> + Changeset.Global.change_var (scope.id, name, Changeset.Write); + let kind = v.Entry.kind in + let entry = initialized_value_entry cx kind v.general loc v in + Scope.add_entry name entry scope + | _ -> + (* Incompatible or non-redeclarable new and previous entries. + We will have already issued an error in `bind_value_entry`, + so we can prune this case here. *) + ()) (* helper for read/write tdz checks *) (* for now, we only enforce TDZ within the same activation. @@ -710,502 +720,506 @@ let pseudo_init_declared_type cx name loc = *) let same_activation target = let rec loop target = function - | [] -> assert_false "target scope not found" - | scope :: _ when scope.id = target.id -> - (* target is nearer than (or actually is) nearest VarScope *) - true - | scope :: scopes -> - match scope.kind with - | VarScope _ -> - (* found var scope before target *) - false - | LexScope -> - (* still in inner lex scopes, keep looking *) - loop target scopes + | [] -> assert_false "target scope not found" + | scope :: _ when scope.id = target.id -> + (* target is nearer than (or actually is) nearest VarScope *) + true + | scope :: scopes -> + (match scope.kind with + | VarScope _ -> + (* found var scope before target *) + false + | LexScope -> + (* still in inner lex scopes, keep looking *) + loop target scopes) in (* search outward for target scope *) loop target (peek_env ()) (* get types from value entry, does uninitialized -> undefined behavior *) -let value_entry_types ?(lookup_mode=ForValue) scope = Entry.(function - (* from value positions, a same-activation ref to var or an explicit let +let value_entry_types ?(lookup_mode = ForValue) scope = + Entry.( + function + (* from value positions, a same-activation ref to var or an explicit let before initialization yields undefined. *) -| { Entry.kind = Var _ | Let LetVarBinding; - value_state = State.Declared | State.MaybeInitialized as state; - value_declare_loc; specific; general; _ } - when lookup_mode = ForValue && same_activation scope - -> - let uninit desc = VoidT.make (mk_reason desc value_declare_loc) in - let specific = if state = State.Declared - then uninit (RCustom "uninitialized variable") - else (* State.MaybeInitialized *) - let desc = (RCustom "possibly uninitialized variable") in - let rep = UnionRep.make (uninit desc) specific [] in - DefT (mk_reason desc value_declare_loc, UnionT rep) - in - specific, general - -| { specific; general; _ } -> - specific, general -) + | { + Entry.kind = Var _ | Let LetVarBinding; + value_state = (State.Declared | State.MaybeInitialized) as state; + value_declare_loc; + specific; + general; + _; + } + when lookup_mode = ForValue && same_activation scope -> + let uninit desc = VoidT.make (mk_reason desc value_declare_loc) |> with_trust bogus_trust in + let specific = + if state = State.Declared then + uninit (RCustom "uninitialized variable") + else + (* State.MaybeInitialized *) + let desc = RCustom "possibly uninitialized variable" in + let rep = UnionRep.make (uninit desc) specific [] in + UnionT (mk_reason desc value_declare_loc, rep) + in + (specific, general) + | { specific; general; _ } -> (specific, general)) (* emit tdz error for value entry *) -let tdz_error cx name loc v = Entry.( - (* second clause of error message is due to switch scopes *) - let msg = FlowError.EReferencedBeforeDeclaration in - binding_error msg cx name (Value v) loc -) +let tdz_error cx name loc v = + Entry.( + (* second clause of error message is due to switch scopes *) + let msg = Error_message.EReferencedBeforeDeclaration in + binding_error msg cx name (Value v) loc) (* helper for read/write tdz checks *) (* functions are block-scoped, but also hoisted. forward ref ok *) -let allow_forward_ref = Scope.Entry.(function - | Var _ | Let FunctionBinding -> true - | _ -> false -) +let allow_forward_ref = + Scope.Entry.( + function + | Var _ + | Let FunctionBinding -> + true + | _ -> false) (* helper - does semantic checking and returns entry type *) -let read_entry ~track_ref ~lookup_mode ~specific cx name ?desc loc = - let scope, entry = find_entry cx name ?desc loc in - if track_ref then Type_inference_hooks_js.dispatch_ref_hook cx - (Entry.entry_loc entry) loc; - Entry.(match entry with - - | Type _ when lookup_mode != ForType -> - let msg = FlowError.ETypeInValuePosition in - binding_error msg cx name entry loc; - AnyT.at (entry_loc entry) - - | Type t -> - t._type - - | Class _ -> assert_false "Internal Error: Classes should only be read using get_class_entries" - - | Value v -> - match v with - | { Entry.kind; value_state = State.Undeclared; value_declare_loc; _ } - when lookup_mode = ForValue && not (allow_forward_ref kind) - && same_activation scope -> - tdz_error cx name loc v; - AnyT.at value_declare_loc - | _ -> - Changeset.change_var (scope.id, name, Changeset.Read); - let s, g = value_entry_types ~lookup_mode scope v in - if specific then s else g - ) +let read_entry ~lookup_mode ~specific cx name ?desc loc = + let (scope, entry) = find_entry cx name ?desc loc in + Entry.( + match entry with + | Type _ when lookup_mode != ForType -> + let msg = Error_message.ETypeInValuePosition in + binding_error msg cx name entry loc; + AnyT.at AnyError (entry_loc entry) + | Type t -> t.type_ + | Class _ -> assert_false "Internal Error: Classes should only be read using get_class_entries" + | Value v -> + (match v with + | { Entry.kind; value_state = State.Undeclared; value_declare_loc; _ } + when lookup_mode = ForValue && (not (allow_forward_ref kind)) && same_activation scope -> + tdz_error cx name loc v; + AnyT.at AnyError value_declare_loc + | _ -> + Changeset.Global.change_var (scope.id, name, Changeset.Read); + let (s, g) = value_entry_types ~lookup_mode scope v in + if specific then + s + else + g)) let rec seek_env f = function -| [] -> None -| scope :: scopes -> - match f scope with - | Some x -> Some x - | None -> seek_env f scopes + | [] -> None + | scope :: scopes -> + (match f scope with + | Some x -> Some x + | None -> seek_env f scopes) (* get env entry for name, if it exists *) -let get_env_entry name = - seek_env (Scope.get_entry name) +let get_env_entry name = seek_env (Scope.get_entry name) (* get current env entry for name, if it exists *) -let get_current_env_entry name = - get_env_entry name !scopes +let get_current_env_entry name = get_env_entry name !scopes (* get env refi for key, if it exists *) -let get_env_refi key = - seek_env (Scope.get_refi key) +let get_env_refi key = seek_env (Scope.get_refi key) (* get current env refi for name, if it exists *) -let get_current_env_refi key = - get_env_refi key !scopes +let get_current_env_refi key = get_env_refi key !scopes (* get var's specific type (and track the reference) *) -let get_var ?(lookup_mode=ForValue) = - read_entry ~track_ref:true ~lookup_mode ~specific:true ?desc:None +let get_var ?(lookup_mode = ForValue) = read_entry ~lookup_mode ~specific:true ?desc:None (* query var's specific type *) -let query_var ~track_ref ?(lookup_mode=ForValue) = - read_entry ~track_ref ~lookup_mode ~specific:true +let query_var ?(lookup_mode = ForValue) = read_entry ~lookup_mode ~specific:true -let get_internal_var cx name loc = - query_var ~track_ref:false cx (internal_name name) loc +let get_internal_var cx name loc = query_var cx (internal_name name) loc (* get var's general type - for annotated vars, this is the annotated type, and for others it's the union of all types assigned to the var throughout its lifetime. *) -let get_var_declared_type ?(lookup_mode=ForValue) = - read_entry ~track_ref:false ~lookup_mode ~specific:false ?desc:None +let get_var_declared_type ?(lookup_mode = ForValue) = + read_entry ~lookup_mode ~specific:false ?desc:None (* Unify declared type with another type. This is useful for allowing forward references in declared types to other types declared later in scope. *) -let unify_declared_type ?(lookup_mode=ForValue) cx name t = - Entry.(match get_current_env_entry name with - | Some (Value v) when lookup_mode = ForValue -> - Flow.unify cx t (general_of_value v) - | Some entry when lookup_mode <> ForValue -> - Flow.unify cx t (Entry.declared_type entry) - | _ -> () - ) +let unify_declared_type ?(lookup_mode = ForValue) cx name t = + Entry.( + match get_current_env_entry name with + | Some (Value v) when lookup_mode = ForValue -> Flow.unify cx t (general_of_value v) + | Some entry when lookup_mode <> ForValue -> Flow.unify cx t (Entry.declared_type entry) + | _ -> ()) + +(* Unify declared function type with another type. This is similarly motivated as above, except that + we also need to take overloading into account. See `bind_declare_fun` for similar logic. *) +let unify_declared_fun_type = + let find_type aloc = function + | IntersectionT (_, rep) -> + let match_type t = aloc_of_reason (reason_of_t t) = aloc in + begin + match List.find_opt match_type (InterRep.members rep) with + | Some t -> t + | None -> assert_false "Internal Error: Improper overloaded declare function entries." + end + | v -> v + in + fun cx name aloc t -> + Entry.( + match get_current_env_entry name with + | Some (Value v) -> Flow.unify cx t (find_type aloc (general_of_value v)) + | _ -> ()) let is_global_var _cx name = let rec loop = function | [] -> true - | scope::scopes -> - match Scope.get_entry name scope with + | scope :: scopes -> + (match Scope.get_entry name scope with | Some _ -> Scope.is_global scope - | None -> loop scopes + | None -> loop scopes) in loop !scopes (* get var type, with given location used in type's reason *) -let var_ref ?(lookup_mode=ForValue) cx name ?desc loc = - let t = query_var ~track_ref:true ~lookup_mode cx name ?desc loc in +let var_ref ?(lookup_mode = ForValue) cx name ?desc loc = + let t = query_var ~lookup_mode cx name ?desc loc in Flow.reposition cx loc t (* get refinement entry *) let get_refinement cx key loc = match find_refi_in_var_scope key with - | Some (_, { refined; _ }) -> - Some (Flow.reposition cx loc refined) + | Some (_, { refined; _ }) -> Some (Flow.reposition cx loc refined) | _ -> None (* helper: update let or var entry *) -let update_var ?(track_ref=false) op cx ~use_op name specific loc = - let scope, entry = find_entry cx name loc in - if track_ref then Type_inference_hooks_js.dispatch_ref_hook cx - (Entry.entry_loc entry) loc; - Entry.(match entry with - | Value ({ - Entry.kind = (Let _ as kind); value_state = State.Undeclared; _ - } as v) when not (allow_forward_ref kind) && same_activation scope -> - tdz_error cx name loc v; - None - | Value ({ Entry.kind = Let _ | Var _; _ } as v) -> - let change = scope.id, name, op in - Changeset.change_var change; - let use_op = match op with - | Changeset.Write -> use_op - | Changeset.Refine -> use_op - | Changeset.Read -> unknown_use (* this is impossible *) - in - Flow.flow cx (specific, UseT (use_op, Entry.general_of_value v)); - (* add updated entry *) - let update = Entry.Value { - v with Entry. - value_state = State.Initialized; - specific; - value_assign_loc = loc; - } in - Scope.add_entry name update scope; - Some change - | Value { Entry.kind = Const ConstVarBinding; _ } -> - let msg = FlowError.EConstReassigned in - binding_error msg cx name entry loc; - None - | Value { Entry.kind = Const ConstImportBinding; _; } -> - let msg = FlowError.EImportReassigned in - binding_error msg cx name entry loc; - None - | Value { Entry.kind = Const ConstParamBinding; _ } -> - (* TODO: remove extra info when surface syntax is added *) - let msg = FlowError.EConstParamReassigned in - binding_error msg cx name entry loc; - None - | Type _ -> - let msg = FlowError.ETypeAliasInValuePosition in - binding_error msg cx name entry loc; - None - | Class _ -> assert_false "Internal error: update_var called on Class" - ) +let update_var op cx ~use_op name specific loc = + let (scope, entry) = find_entry cx name loc in + Entry.( + match entry with + | Value ({ Entry.kind = Let _ as kind; value_state = State.Undeclared; _ } as v) + when (not (allow_forward_ref kind)) && same_activation scope -> + tdz_error cx name loc v; + None + | Value ({ Entry.kind = Let _ | Var _; _ } as v) -> + let change = (scope.id, name, op) in + Changeset.Global.change_var change; + let use_op = + match op with + | Changeset.Write -> use_op + | Changeset.Refine -> use_op + | Changeset.Read -> unknown_use + (* this is impossible *) + in + Flow.flow cx (specific, UseT (use_op, Entry.general_of_value v)); + + (* add updated entry *) + let update = + Entry.Value + { v with Entry.value_state = State.Initialized; specific; value_assign_loc = loc } + in + Scope.add_entry name update scope; + Some change + | Value { Entry.kind = Const ConstVarBinding; _ } -> + let msg = Error_message.EConstReassigned in + binding_error msg cx name entry loc; + None + | Value { Entry.kind = Const EnumNameBinding; _ } -> + let msg = Error_message.EEnumReassigned in + binding_error msg cx name entry loc; + None + | Value { Entry.kind = Const ConstImportBinding; _ } -> + let msg = Error_message.EImportReassigned in + binding_error msg cx name entry loc; + None + | Value { Entry.kind = Const ConstParamBinding; _ } -> + (* TODO: remove extra info when surface syntax is added *) + let msg = Error_message.EConstParamReassigned in + binding_error msg cx name entry loc; + None + | Type _ -> + let msg = Error_message.ETypeAliasInValuePosition in + binding_error msg cx name entry loc; + None + | Class _ -> assert_false "Internal error: update_var called on Class") (* update var by direct assignment *) -let set_var = update_var ~track_ref:true Changeset.Write +let set_var = update_var Changeset.Write let set_internal_var cx name t loc = - update_var ~track_ref:false Changeset.Write cx ~use_op:unknown_use (internal_name name) t loc + update_var Changeset.Write cx ~use_op:unknown_use (internal_name name) t loc (* update var by refinement test *) let refine_var = update_var Changeset.Refine ~use_op:(Op (Internal Refinement)) (* set const's specific type to reflect a refinement test (internal) *) let refine_const cx name specific loc = - let scope, entry = find_entry cx name loc in - Entry.(match entry with - | Value ({ Entry.kind = Const _; _ } as v) -> - let change = scope.id, name, Changeset.Refine in - Changeset.change_var change; - let general = Entry.general_of_value v in - Flow.flow cx (specific, UseT (Op (Internal Refinement), general)); - let update = Value { - v with value_state = State.Initialized; specific - } in - Scope.add_entry name update scope; - Some change - | _ -> - assert_false (spf "refine_const called on %s %s" - (Entry.string_of_kind entry) name) - ) + let (scope, entry) = find_entry cx name loc in + Entry.( + match entry with + | Value ({ Entry.kind = Const _; value_state = State.Undeclared; _ } as v) + when same_activation scope -> + tdz_error cx name loc v; + None + | Value ({ Entry.kind = Const _; _ } as v) -> + let change = (scope.id, name, Changeset.Refine) in + Changeset.Global.change_var change; + let general = Entry.general_of_value v in + Flow.flow cx (specific, UseT (Op (Internal Refinement), general)); + let update = Value { v with value_state = State.Initialized; specific } in + Scope.add_entry name update scope; + Some change + | _ -> assert_false (spf "refine_const called on %s %s" (Entry.string_of_kind entry) name)) (* given a list of envs (scope lists), return true iff all envs are the same length and all scope ids and kinds match *) let envs_congruent envs = let rec check_scopes envs = let env0 = List.hd envs in - env0 = [] || - let scope0 = List.hd env0 in - let check_scope env = - let scope = List.hd env in - scope.id = scope0.id && scope.kind = scope0.kind - in - List.for_all check_scope (List.tl envs) && - check_scopes (List.map List.tl envs) + env0 = [] + || + let scope0 = List.hd env0 in + let check_scope env = + let scope = List.hd env in + scope.id = scope0.id && scope.kind = scope0.kind + in + List.for_all check_scope (List.tl envs) && check_scopes (Core_list.map ~f:List.tl envs) in let envs = ListUtils.phys_uniq envs in - List.length envs <= 1 || - let len = List.length (List.hd envs) in - List.for_all (fun env -> List.length env = len) (List.tl envs) && - check_scopes envs + List.length envs <= 1 + || + let len = List.length (List.hd envs) in + List.for_all (fun env -> List.length env = len) (List.tl envs) && check_scopes envs (* find scopes with a given id in a list of envs. envs are assumed congruent amd assumed to contain scope id *) let rec find_scope cx loc envs scope_id = match envs with | (scope0 :: _) :: _ -> - if scope0.id = scope_id - then List.(map hd envs) - else find_scope cx loc List.(map tl envs) scope_id + if scope0.id = scope_id then + List.(map hd envs) + else + find_scope cx loc List.(map tl envs) scope_id | _ -> - assert_false (spf "find_scopes %s: scope %d not found. head env %s" - (string_of_loc loc) scope_id - (string_of_env cx (List.hd envs))) + assert_false + (spf + "find_scopes %s: scope %d not found. head env %s" + (string_of_aloc loc) + scope_id + (string_of_env cx (List.hd envs))) (* The following function takes a changset and a triple of environments - original and two derivations - and merges the bindings indicated by changeset keys from the derivations into the original. *) let merge_env = - (* find scope triple in env triple *) let find_scope_triple cx loc (env0, env1, env2) id = let lst = find_scope cx loc [env0; env1; env2] id in List.(nth lst 0, nth lst 1, nth lst 2) in - let create_union cx loc name l1 l2 = let reason = mk_reason name loc in Tvar.mk_where cx reason (fun tvar -> - Flow.flow cx (l1, UseT (Op (Internal MergeEnv), tvar)); - Flow.flow cx (l2, UseT (Op (Internal MergeEnv), tvar)); - ) + Flow.flow cx (l1, UseT (Op (Internal MergeEnv), tvar)); + Flow.flow cx (l2, UseT (Op (Internal MergeEnv), tvar))) in - (* merge_entry helper - calculate new specific type *) let merge_specific cx loc name (specific0, general0) specific1 specific2 = - (** if both children are unchanged, or 1 child is unchanged and the other + (* if both children are unchanged, or 1 child is unchanged and the other is bottom (EmptyT), then we can avoid creating a merged specific *) - if (specific0 = specific1 && (specific0 = specific2 || is_bot specific2)) - || (specific0 = specific2 && is_bot specific1) - then specific0 + if + (specific0 = specific1 && (specific0 = specific2 || is_bot specific2)) + || (specific0 = specific2 && is_bot specific1) + then + specific0 (* child has reverted to original - shortcut *) - else if specific1 = general0 || specific2 = general0 - then general0 + else if specific1 = general0 || specific2 = general0 then + general0 (* general case *) else let tvar = create_union cx loc name specific1 specific2 in Flow.flow cx (tvar, UseT (Op (Internal MergeEnv), general0)); tvar in - (* propagate var state updates from child entries *) - let merge_states orig child1 child2 = Entry.( - match orig.Entry.kind with - | Var _ | Let _ - when - child1.value_state = State.Initialized && - child2.value_state = State.Initialized -> - (* if both branches have initialized, we can set parent state *) - State.Initialized - | Var _ | Let _ - when - child1.value_state >= State.Declared && - child2.value_state >= State.Declared && - (child1.value_state >= State.MaybeInitialized || - child2.value_state >= State.MaybeInitialized) -> - (* if either branch has initialized, we can set parent state *) - State.MaybeInitialized - | _ -> orig.value_state - ) in - + let merge_states orig child1 child2 = + Entry.( + match orig.Entry.kind with + | Var _ + | Let _ + when child1.value_state = State.Initialized && child2.value_state = State.Initialized -> + (* if both branches have initialized, we can set parent state *) + State.Initialized + | Var _ + | Let _ + when child1.value_state >= State.Declared + && child2.value_state >= State.Declared + && ( child1.value_state >= State.MaybeInitialized + || child2.value_state >= State.MaybeInitialized ) -> + (* if either branch has initialized, we can set parent state *) + State.MaybeInitialized + | _ -> orig.value_state) + in let merge_entry cx loc envs ((scope_id, name, _) as entry_ref) = - let scope0, scope1, scope2 = find_scope_triple cx loc envs scope_id in + let (scope0, scope1, scope2) = find_scope_triple cx loc envs scope_id in let get = get_entry name in - Entry.(match get scope0, get scope1, get scope2 with - (* merge child var and let types back to original *) - | Some Value orig, Some Value child1, Some Value child2 -> - let { specific = s0; general = g0; _ } = orig in - let { specific = s1; _ } = child1 in - let { specific = s2; _ } = child2 in - let specific = merge_specific cx loc (RIdentifier name) (s0, g0) s1 s2 in - let value_state = merge_states orig child1 child2 in - (* replace entry if anything changed *) - if specific == s0 && value_state = orig.value_state - then () - else let e = Entry.Value { orig with Entry.specific; value_state } in - add_entry name e scope0 - (* type aliases can't be refined or reassigned, shouldn't be here *) - | Some Type _, Some Type _, Some Type _ -> - assert_false (spf "merge_env %s: type alias %s found in changelist" - (string_of_loc loc) name) - (* global lookups may leave uneven new entries, which we can forget *) - | _, _, _ when is_global scope0 -> - () - (* missing completely from non-global scope *) - | None, None, None -> - assert_false (spf - "%smerge_entry %s %s: missing from scopes:\n%s\n%s\n%s" - (Context.pid_prefix cx) - (string_of_loc loc) - (Changeset.string_of_entry_ref entry_ref) - (Debug_js.string_of_scope cx scope0) - (Debug_js.string_of_scope cx scope1) - (Debug_js.string_of_scope cx scope2)) - (* a newly created entry may exist in one lex child - + Entry.( + match (get scope0, get scope1, get scope2) with + (* merge child var and let types back to original *) + | (Some (Value orig), Some (Value child1), Some (Value child2)) -> + let { specific = s0; general = g0; _ } = orig in + let { specific = s1; _ } = child1 in + let { specific = s2; _ } = child2 in + let specific = merge_specific cx loc (RIdentifier name) (s0, g0) s1 s2 in + let value_state = merge_states orig child1 child2 in + (* replace entry if anything changed *) + if specific == s0 && value_state = orig.value_state then + () + else + let e = Entry.Value { orig with Entry.specific; value_state } in + add_entry name e scope0 + (* type aliases can't be refined or reassigned, shouldn't be here *) + | (Some (Type _), Some (Type _), Some (Type _)) -> + assert_false + (spf "merge_env %s: type alias %s found in changelist" (string_of_aloc loc) name) + (* global lookups may leave uneven new entries, which we can forget *) + | (_, _, _) when is_global scope0 -> () + (* missing completely from non-global scope *) + | (None, None, None) -> + assert_false + (spf + "%smerge_entry %s %s: missing from scopes:\n%s\n%s\n%s" + (Context.pid_prefix cx) + (string_of_aloc loc) + (Changeset.string_of_entry_ref entry_ref) + (Debug_js.string_of_scope cx scope0) + (Debug_js.string_of_scope cx scope1) + (Debug_js.string_of_scope cx scope2)) + (* a newly created entry may exist in one lex child - this pattern is due to our current switch handling *) - | None, Some (Value _ as entry), None when Scope.is_lex scope1 -> - add_entry name entry scope0 - | None, None, Some (Value _ as entry) when Scope.is_lex scope2 -> - add_entry name entry scope0 - (* otherwise, non-refinement uneven distributions are asserts. *) - | orig, child1, child2 -> - let print_entry_kind_opt = function - | None -> "None" - | Some e -> spf "Some %s" Entry.(string_of_kind e) - in assert_false (spf - "merge_env %s: non-uniform distribution of entry %s: %s, %s, %s" - (string_of_loc loc) - name - (print_entry_kind_opt orig) - (print_entry_kind_opt child1) - (print_entry_kind_opt child2)) - ) in - + | (None, Some (Value _ as entry), None) when Scope.is_lex scope1 -> + add_entry name entry scope0 + | (None, None, Some (Value _ as entry)) when Scope.is_lex scope2 -> + add_entry name entry scope0 + (* otherwise, non-refinement uneven distributions are asserts. *) + | (orig, child1, child2) -> + let print_entry_kind_opt = function + | None -> "None" + | Some e -> spf "Some %s" Entry.(string_of_kind e) + in + assert_false + (spf + "merge_env %s: non-uniform distribution of entry %s: %s, %s, %s" + (string_of_aloc loc) + name + (print_entry_kind_opt orig) + (print_entry_kind_opt child1) + (print_entry_kind_opt child2))) + in let merge_refi cx loc envs (scope_id, key, _) = - let scope0, scope1, scope2 = find_scope_triple cx loc envs scope_id in + let (scope0, scope1, scope2) = find_scope_triple cx loc envs scope_id in let get = get_refi key in - match get scope0, get scope1, get scope2 with + match (get scope0, get scope1, get scope2) with (* evenly distributed refinements are merged *) - | Some base, Some child1, Some child2 -> + | (Some base, Some child1, Some child2) -> let name = Key.reason_desc key in - let refined = merge_specific cx loc name (base.refined, base.original) - child1.refined child2.refined in - if refined == base.refined - then () - else add_refi key { base with refined } scope0 - + let refined = + merge_specific cx loc name (base.refined, base.original) child1.refined child2.refined + in + if refined == base.refined then + () + else + add_refi key { base with refined } scope0 (* refi was introduced in both children *) - | None, Some child1, Some child2 -> + | (None, Some child1, Some child2) -> let name = Key.reason_desc key in let refined = create_union cx loc name child1.refined child2.refined in let original = create_union cx loc name child1.original child2.original in let refi = { refi_loc = loc; refined; original } in add_refi key refi scope0 - (* refi was cleared in a child env. clear from original *) - | Some _, _, _ -> - remove_refi key scope0 + | (Some _, _, _) -> remove_refi key scope0 (* refi was introduced - and possibly also removed by havoc - after envs diverged *) - | None, _, _ -> - () + | (None, _, _) -> () in - (* merge entries and refis found in changeset *) fun cx loc (env0, env1, env2) changeset -> - begin if not (envs_congruent [env0; env1; env2]) then assert_false - (spf "merge_env %s: envs not congruent: %d %d %d" - (string_of_loc loc) - (List.length env0) (List.length env1) (List.length env2)) - end; - changeset |> Changeset.iter_type_updates - (merge_entry cx loc (env0, env1, env2)) - (merge_refi cx loc (env0, env1, env2)) + if not (envs_congruent [env0; env1; env2]) then + assert_false + (spf + "merge_env %s: envs not congruent: %d %d %d" + (string_of_aloc loc) + (List.length env0) + (List.length env1) + (List.length env2)); + changeset + |> Changeset.iter_type_updates + (merge_entry cx loc (env0, env1, env2)) + (merge_refi cx loc (env0, env1, env2)) (* copy changes from env2 into env1 *) let copy_env = - (* find sscope pair in env pair *) let find_scope_pair cx loc (env0, env1) id = let lst = find_scope cx loc [env0; env1] id in List.(nth lst 0, nth lst 1) in - (* look for and copy entry, starting in topmost scope *) let copy_entry cx loc envs (scope_id, name, _) = - - let scope1, scope2 = find_scope_pair cx loc envs scope_id in + let (scope1, scope2) = find_scope_pair cx loc envs scope_id in let get = get_entry name in - Entry.(match get scope1, get scope2 with - (* for values, flow env2's specific type into env1's specific type *) - | Some Value v1, Some Value v2 -> - (* flow child2's specific type to child1 in place *) - Flow.flow cx (v2.specific, UseT (Op (Internal CopyEnv), v1.specific)); - (* update state *) - if v1.value_state < State.Initialized - && v2.value_state >= State.MaybeInitialized - then ( - let new_entry = Value { - v1 with value_state = State.MaybeInitialized - } in - add_entry name new_entry scope1 - ) - - (* type aliases shouldn't be here *) - | Some Type _, Some Type _ -> - assert_false (spf "copy_env %s: type alias %s found in changelist" - (string_of_loc loc) name) - - (* global lookups may leave new entries in env2, or orphan changes *) - (* ...which we can forget *) - | None, _ when is_global scope1 -> - () - - (* changeset entry exists only in lex scope *) - | Some Value _, None when Scope.is_lex scope1 -> - () - | None, Some (Value _ as entry) when Scope.is_lex scope2 -> - add_entry name entry scope1 - - (* uneven distributions *) - | entry1, entry2 -> - let print_entry_kind_opt = function - | None -> "None" - | Some e -> spf "Some %s" (Entry.string_of_kind e) - in assert_false (spf - "copy_env %s: non-uniform distribution of entry %s: %s, %s" - (string_of_loc loc) - name - (print_entry_kind_opt entry1) - (print_entry_kind_opt entry2)) - ) in - + Entry.( + match (get scope1, get scope2) with + (* for values, flow env2's specific type into env1's specific type *) + | (Some (Value v1), Some (Value v2)) -> + (* flow child2's specific type to child1 in place *) + Flow.flow cx (v2.specific, UseT (Op (Internal CopyEnv), v1.specific)); + + (* update state *) + if v1.value_state < State.Initialized && v2.value_state >= State.MaybeInitialized then + let new_entry = Value { v1 with value_state = State.MaybeInitialized } in + add_entry name new_entry scope1 + (* type aliases shouldn't be here *) + | (Some (Type _), Some (Type _)) -> + assert_false + (spf "copy_env %s: type alias %s found in changelist" (string_of_aloc loc) name) + (* global lookups may leave new entries in env2, or orphan changes *) + (* ...which we can forget *) + | (None, _) when is_global scope1 -> () + (* changeset entry exists only in lex scope *) + | (Some (Value _), None) when Scope.is_lex scope1 -> () + | (None, Some (Value _ as entry)) when Scope.is_lex scope2 -> add_entry name entry scope1 + (* uneven distributions *) + | (entry1, entry2) -> + let print_entry_kind_opt = function + | None -> "None" + | Some e -> spf "Some %s" (Entry.string_of_kind e) + in + assert_false + (spf + "copy_env %s: non-uniform distribution of entry %s: %s, %s" + (string_of_aloc loc) + name + (print_entry_kind_opt entry1) + (print_entry_kind_opt entry2))) + in (* look for and copy refinement in top scope only *) let copy_refi cx loc envs (scope_id, key, _) = - let scope0, scope1 = find_scope_pair cx loc envs scope_id in + let (scope0, scope1) = find_scope_pair cx loc envs scope_id in let get = get_refi key in - match get scope0, get scope1 with + match (get scope0, get scope1) with (* flow child refi's type back to parent *) - | Some { refined = t1; _ }, Some { refined = t2; _ } -> + | (Some { refined = t1; _ }, Some { refined = t2; _ }) -> Flow.flow cx (t2, UseT (Op (Internal CopyEnv), t1)) (* uneven cases imply refi was added after splitting: remove *) - | _ -> - () + | _ -> () in - (* copy entries and refis bound to names and keys, respectively *) fun cx loc (env1, env2) changeset -> - (if envs_congruent [env1; env2] then () - else assert_false (spf "copy_env %s: envs not congruent" - (string_of_loc loc))); - changeset |> Changeset.iter_type_updates - (copy_entry cx loc (env1, env2)) - (copy_refi cx loc (env1, env2)) + if envs_congruent [env1; env2] then + () + else + assert_false (spf "copy_env %s: envs not congruent" (string_of_aloc loc)); + changeset + |> Changeset.iter_type_updates (copy_entry cx loc (env1, env2)) (copy_refi cx loc (env1, env2)) (* in the top scope, convert specific types to tvars with former specific type as incoming lower bound, and general type as @@ -1213,10 +1227,9 @@ let copy_env = during path-dependent analysis. *) let widen_env = - let widened cx loc name specific general = - if specific = general - then None + if specific = general then + None else let reason = mk_reason name loc in let tvar = Tvar.mk cx reason in @@ -1224,28 +1237,25 @@ let widen_env = Flow.flow cx (tvar, UseT (Op (Internal WidenEnv), general)); Some tvar in - let widen_var cx loc name ({ Entry.specific; general; _ } as var) = match widened cx loc name specific general with | None -> var | Some specific -> { var with Entry.specific } in - let widen_refi cx loc name ({ refined; original; _ } as refi) = match widened cx loc name refined original with | None -> refi | Some refined -> { refi with refined } in - fun cx loc -> iter_local_scopes (fun scope -> - scope |> Scope.update_entries Entry.(fun name -> function - | Value var -> Value (widen_var cx loc (RIdentifier name) var) - | entry -> entry - ); - scope |> Scope.update_refis (fun key refi -> - widen_refi cx loc (Key.reason_desc key) refi) - ) + scope + |> Scope.update_entries + Entry.( + fun name -> function + | Value var -> Value (widen_var cx loc (RIdentifier name) var) + | entry -> entry); + scope |> Scope.update_refis (fun key refi -> widen_refi cx loc (Key.reason_desc key) refi)) (* The protocol around havoc has changed a few times. The following function used to do most of the work, but is now subsumed by @@ -1253,8 +1263,7 @@ let widen_env = a function body. Also see below. *) (* clear refinement informnation for all binding entries in env *) -let havoc_all () = - iter_scopes Scope.havoc +let havoc_all () = iter_scopes Scope.havoc (* set specific type of every non-internal var *and const* in top activation to undefined, and clear heap refinements. @@ -1264,44 +1273,42 @@ let havoc_all () = TODO rework the early-exit stuff to not break invariants. Until then it'll remain a source of bugs. *) -let reset_current_activation loc = - iter_local_scopes (Scope.reset loc) +let reset_current_activation loc = iter_local_scopes (Scope.reset loc) (* clear refinement info for (topmost bindings of) given names in env *) -let havoc_vars = Scope.( - (* clear specific info for (topmost binding of) given var in env *) - let havoc_entry (_, name, _) = - let rec loop = function - | [] -> () - | scope :: scopes -> - match get_entry name scope with - | Some entry -> - let entry = Entry.havoc name entry in - add_entry name entry scope - | None -> - loop scopes - in loop !scopes - in - (* clear refinement for (topmost binding of) given key in env *) - let havoc_refi (_, key, _) = - let rec loop = function - | [] -> () - | scope :: scopes -> - match get_refi key scope with - | Some _ -> - remove_refi key scope - | None -> - loop scopes - in loop !scopes - in - Changeset.iter_type_updates havoc_entry havoc_refi -) +let havoc_vars = + Scope.( + (* clear specific info for (topmost binding of) given var in env *) + let havoc_entry (_, name, _) = + let rec loop = function + | [] -> () + | scope :: scopes -> + (match get_entry name scope with + | Some entry -> + let entry = Entry.havoc name entry in + add_entry name entry scope + | None -> loop scopes) + in + loop !scopes + in + (* clear refinement for (topmost binding of) given key in env *) + let havoc_refi (_, key, _) = + let rec loop = function + | [] -> () + | scope :: scopes -> + (match get_refi key scope with + | Some _ -> remove_refi key scope + | None -> loop scopes) + in + loop !scopes + in + Changeset.iter_type_updates havoc_entry havoc_refi) (* Clear entries for heap refinement pseudovars in env. If name is passed, clear only those refis that depend on it. Real variables are left untouched. *) -let havoc_heap_refinements () = iter_scopes (Scope.havoc_all_refis) +let havoc_heap_refinements () = iter_scopes Scope.havoc_all_refis let havoc_heap_refinements_with_propname ~private_ name = iter_scopes (Scope.havoc_refis ~private_ ~name) @@ -1323,14 +1330,15 @@ let havoc_heap_refinements_with_propname ~private_ name = *) let add_heap_refinement op key refi_loc refined original = let refi = { refi_loc; refined; original } in - let base, _ = key in - let scope, _ = find_entry_in_var_scope base in - let change = scope.id, key, op in - Changeset.change_refi change; + let (base, _) = key in + let (scope, _) = find_entry_in_var_scope base in + let change = (scope.id, key, op) in + Changeset.Global.change_refi change; Scope.add_refi key refi scope; change let set_expr = add_heap_refinement Changeset.Write + let refine_expr = add_heap_refinement Changeset.Refine (* add predicate refinements from given preds map to environment. @@ -1340,33 +1348,67 @@ let refine_expr = add_heap_refinement Changeset.Refine others can be obtained via query_var. *) let refine_with_preds cx loc preds orig_types = + let refine_type orig_type pred refined_type = + let rec check_literal_subtypes pred = + (* When refining a type against a literal, we want to be sure that that literal can actually + inhabit that type *) + match pred with + | SingletonBoolP (loc, b) -> + let reason = loc |> mk_reason (RBooleanLit b) in + Flow.flow + cx + ( DefT (reason, bogus_trust (), BoolT (Some b)), + UseT (Op (Internal Refinement), orig_type) ) + | SingletonStrP (loc, b, str) -> + let reason = loc |> mk_reason (RStringLit str) in + Flow.flow + cx + ( DefT (reason, bogus_trust (), StrT (Literal (Some b, str))), + UseT (Op (Internal Refinement), orig_type) ) + | SingletonNumP (loc, b, ((_, str) as num)) -> + let reason = loc |> mk_reason (RNumberLit str) in + Flow.flow + cx + ( DefT (reason, bogus_trust (), NumT (Literal (Some b, num))), + UseT (Op (Internal Refinement), orig_type) ) + | LeftP (SentinelProp name, (DefT (reason, _, (BoolT _ | StrT _ | NumT _)) as t)) -> + Flow.flow cx (MatchingPropT (reason, name, t), UseT (Op (Internal Refinement), orig_type)) + | NotP p -> check_literal_subtypes p + | OrP (p1, p2) + | AndP (p1, p2) -> + check_literal_subtypes p1; + check_literal_subtypes p2 + | _ -> () + in + check_literal_subtypes pred; + Flow.flow cx (orig_type, PredicateT (pred, refined_type)) + in let mk_refi_type orig_type pred refi_reason = - Tvar.mk_where cx refi_reason (fun refined_type -> - Flow.flow cx (orig_type, PredicateT (pred, refined_type))) + refine_type orig_type pred |> Tvar.mk_where cx refi_reason in let refine_with_pred key pred acc = let refi_reason = mk_reason (RRefined (Key.reason_desc key)) loc in match key with (* for real consts/lets/vars, we model assignment/initialization *) - | name, [] when not (is_internal_name name) -> - Entry.(match find_entry cx name loc with - | _, Value v -> - let orig_type = - query_var ~track_ref:false cx name loc - in - let refi_type = mk_refi_type orig_type pred refi_reason in - let refine = match Entry.kind_of_value v with - | Const _ -> refine_const - | _ -> refine_var - in - begin match refine cx name refi_type loc with - | Some change -> Changeset.add_var change acc - | None -> acc - end - | _, _ -> - Flow.add_output cx (FlowError.ERefineAsValue (refi_reason, name)); - acc - ) + | (name, []) when not (is_internal_name name) -> + Entry.( + (match find_entry cx name loc with + | (_, Value v) -> + let orig_type = query_var cx name loc in + let refi_type = mk_refi_type orig_type pred refi_reason in + let refine = + match Entry.kind_of_value v with + | Const _ -> refine_const + | _ -> refine_var + in + begin + match refine cx name refi_type loc with + | Some change -> Changeset.add_var change acc + | None -> acc + end + | (_, _) -> + Flow.add_output cx (Error_message.ERefineAsValue (refi_reason, name)); + acc)) (* for heap refinements, we just add new entries *) | _ -> let orig_type = Key_map.find_unsafe key orig_types in @@ -1381,15 +1423,13 @@ let refine_with_preds cx loc preds orig_types = state of the cloned environment back into the reinstated original *) let in_refined_env cx loc preds orig_types f = - let oldset = Changeset.clear () in + let oldset = Changeset.Global.clear () in let orig_env = peek_env () in let new_env = clone_env orig_env in update_env cx loc new_env; let _ = refine_with_preds cx loc preds orig_types in - let result = f () in - - let newset = Changeset.merge oldset in + let newset = Changeset.Global.merge oldset in merge_env cx loc (orig_env, orig_env, new_env) newset; update_env cx loc orig_env; diff --git a/src/typing/env.mli b/src/typing/env.mli index 02ad88c5ed8..e2bfb1fd5b2 100644 --- a/src/typing/env.mli +++ b/src/typing/env.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,202 +9,178 @@ open Scope type t = Scope.t list -val peek_scope: unit -> Scope.t +val peek_scope : unit -> Scope.t -val peek_env: unit -> t +val peek_env : unit -> t -val clone_env: t -> t +val clone_env : t -> t -val string_of_env: Context.t -> t -> string +val string_of_env : Context.t -> t -> string -val var_scope_kind: unit -> Scope.var_scope_kind +val var_scope_kind : unit -> Scope.var_scope_kind -val in_async_scope: unit -> bool -val in_generator_scope: unit -> bool -val in_predicate_scope: unit -> bool +val in_async_scope : unit -> bool -val all_entries: unit -> Entry.t SMap.t +val in_generator_scope : unit -> bool -val find_entry: - Context.t -> - string -> - ?desc:Reason.reason_desc -> - Loc.t -> - Scope.t * Entry.t +val in_predicate_scope : unit -> bool -val peek_frame: unit -> int +val all_entries : unit -> Entry.t SMap.t -val push_var_scope: Context.t -> Scope.t -> unit -val pop_var_scope: unit -> unit +val find_entry : Context.t -> string -> ?desc:Reason.reason_desc -> ALoc.t -> Scope.t * Entry.t -val retrieve_closure_changeset: unit -> Changeset.t +val peek_frame : unit -> int -val in_lex_scope: Context.t -> (unit -> 'a) -> 'a +val push_var_scope : Context.t -> Scope.t -> unit -val env_depth: unit -> int -val trunc_env: int -> unit +val pop_var_scope : unit -> unit -val init_env: - ?exclude_syms:SSet.t -> - Context.t -> - Scope.t -> - unit +val retrieve_closure_changeset : unit -> Changeset.t + +val in_lex_scope : Context.t -> (unit -> 'a) -> 'a -val update_env: Context.t -> Loc.t -> t -> unit +val env_depth : unit -> int + +val trunc_env : int -> unit + +val init_env : ?exclude_syms:SSet.t -> Context.t -> Scope.t -> unit + +val update_env : Context.t -> ALoc.t -> t -> unit (***) -val promote_to_const_like: Context.t -> Loc.t -> bool +val promote_to_const_like : Context.t -> ALoc.t -> bool -val bind_class: Context.t -> int -> Type.Properties.id -> Type.Properties.id -> unit +val bind_class : Context.t -> ALoc.t -> Type.Properties.id -> Type.Properties.id -> unit -val bind_var: ?state:State.t -> Context.t -> string -> Type.t -> - Loc.t -> unit +val bind_var : ?state:State.t -> Context.t -> string -> Type.t -> ALoc.t -> unit -val bind_let: ?state:State.t -> Context.t -> string -> Type.t -> - Loc.t -> unit +val bind_let : ?state:State.t -> Context.t -> string -> Type.t -> ALoc.t -> unit -val bind_implicit_let: ?state:State.t -> Entry.let_binding_kind -> - Context.t -> string -> Type.t -> Loc.t -> unit +val bind_implicit_let : + ?state:State.t -> Entry.let_binding_kind -> Context.t -> string -> Type.t -> ALoc.t -> unit -val bind_fun: ?state:State.t -> Context.t -> string -> Type.t -> - Loc.t -> unit +val bind_fun : ?state:State.t -> Context.t -> string -> Type.t -> ALoc.t -> unit -val bind_implicit_const: ?state:State.t -> Entry.const_binding_kind -> - Context.t -> string -> Type.t -> Loc.t -> unit +val bind_implicit_const : + ?state:State.t -> Entry.const_binding_kind -> Context.t -> string -> Type.t -> ALoc.t -> unit -val bind_const: ?state:State.t -> Context.t -> string -> Type.t -> - Loc.t -> unit +val bind_const : ?state:State.t -> Context.t -> string -> Type.t -> ALoc.t -> unit -val bind_import: Context.t -> string -> Type.t -> Loc.t -> unit +val bind_import : Context.t -> string -> Type.t -> ALoc.t -> unit -val bind_type: ?state:State.t -> Context.t -> string -> Type.t -> - Loc.t -> unit +val bind_type : ?state:State.t -> Context.t -> string -> Type.t -> ALoc.t -> unit -val bind_import_type: Context.t -> string -> Type.t -> Loc.t -> unit +val bind_import_type : Context.t -> string -> Type.t -> ALoc.t -> unit -val bind_declare_var: Context.t -> string -> Type.t -> Loc.t -> unit -val bind_declare_fun: Context.t -> string -> Type.t -> Loc.t -> unit +val bind_declare_var : Context.t -> string -> Type.t -> ALoc.t -> unit -val declare_const: Context.t -> string -> Loc.t -> unit -val declare_let: Context.t -> string -> Loc.t -> unit +val bind_declare_fun : Context.t -> string -> Type.t -> ALoc.t -> unit -val declare_implicit_let: Entry.let_binding_kind -> Context.t -> string -> - Loc.t -> unit +val declare_let : Context.t -> string -> ALoc.t -> unit -val init_var: Context.t -> use_op:Type.use_op -> string -> has_anno:bool -> Type.t -> Loc.t -> unit -val init_let: Context.t -> use_op:Type.use_op -> string -> has_anno:bool -> Type.t -> Loc.t -> unit -val init_implicit_let: - Entry.let_binding_kind - -> Context.t - -> use_op:Type.use_op - -> string - -> has_anno:bool - -> Type.t - -> Loc.t - -> unit -val init_fun: Context.t -> use_op:Type.use_op -> string -> Type.t -> Loc.t -> unit -val init_const: Context.t -> use_op:Type.use_op -> string -> has_anno:bool -> Type.t -> Loc.t -> unit -val init_type: Context.t -> string -> Type.t -> Loc.t -> unit +val declare_implicit_let : Entry.let_binding_kind -> Context.t -> string -> ALoc.t -> unit -val pseudo_init_declared_type: Context.t -> string -> Loc.t -> unit +val declare_const : Context.t -> string -> ALoc.t -> unit -module LookupMode: sig - type t = ForValue | ForType | ForTypeof -end +val declare_implicit_const : Entry.const_binding_kind -> Context.t -> string -> ALoc.t -> unit -val local_scope_entry_exists: string -> bool +val init_var : + Context.t -> use_op:Type.use_op -> string -> has_anno:bool -> Type.t -> ALoc.t -> unit -val get_env_entry: string -> t -> Scope.Entry.t option -val get_current_env_entry: string -> Scope.Entry.t option -val get_env_refi: Key.t -> t -> Scope.refi_binding option -val get_current_env_refi: Key.t -> Scope.refi_binding option -val get_class_entries: unit -> Type.class_binding list +val init_let : + Context.t -> use_op:Type.use_op -> string -> has_anno:bool -> Type.t -> ALoc.t -> unit -val get_var: - ?lookup_mode:LookupMode.t -> +val init_implicit_let : + Entry.let_binding_kind -> Context.t -> + use_op:Type.use_op -> string -> - Loc.t -> - Type.t + has_anno:bool -> + Type.t -> + ALoc.t -> + unit -val get_internal_var: - Context.t -> - string -> - Loc.t -> - Type.t +val init_fun : Context.t -> use_op:Type.use_op -> string -> Type.t -> ALoc.t -> unit -val get_var_declared_type: - ?lookup_mode:LookupMode.t -> - Context.t -> - string -> - Loc.t -> - Type.t +val init_const : + Context.t -> use_op:Type.use_op -> string -> has_anno:bool -> Type.t -> ALoc.t -> unit -val unify_declared_type: - ?lookup_mode:LookupMode.t -> +val init_implicit_const : + Entry.const_binding_kind -> Context.t -> + use_op:Type.use_op -> string -> + has_anno:bool -> Type.t -> + ALoc.t -> unit -val var_ref: - ?lookup_mode:LookupMode.t -> - Context.t -> - string -> - ?desc:Reason.reason_desc -> - Loc.t -> - Type.t +val init_type : Context.t -> string -> Type.t -> ALoc.t -> unit -val set_var: Context.t -> use_op:Type.use_op -> string -> Type.t -> Loc.t -> - Changeset.EntryRef.t option +val pseudo_init_declared_type : Context.t -> string -> ALoc.t -> unit -val set_internal_var: Context.t -> string -> Type.t -> Loc.t -> - Changeset.EntryRef.t option +module LookupMode : sig + type t = + | ForValue + | ForType + | ForTypeof +end -val set_expr: Key.t -> Loc.t -> Type.t -> Type.t -> - Changeset.RefiRef.t +val local_scope_entry_exists : string -> bool -val refine_with_preds: - Context.t -> - Loc.t -> - Type.predicate Key_map.t -> - Type.t Key_map.t -> - Changeset.t +val get_env_entry : string -> t -> Scope.Entry.t option -val in_refined_env: - Context.t -> - Loc.t -> - Type.predicate Key_map.t -> - Type.t Key_map.t -> - (unit -> 'a) -> - 'a +val get_current_env_entry : string -> Scope.Entry.t option -val merge_env: - Context.t -> - Loc.t -> - t * t * t -> - Changeset.t -> - unit +val get_env_refi : Key.t -> t -> Scope.refi_binding option -val widen_env: Context.t -> Loc.t -> unit +val get_current_env_refi : Key.t -> Scope.refi_binding option -val copy_env: - Context.t -> - Loc.t -> - t * t -> - Changeset.t -> - unit +val get_class_entries : unit -> Type.class_binding list + +val get_var : ?lookup_mode:LookupMode.t -> Context.t -> string -> ALoc.t -> Type.t + +val get_internal_var : Context.t -> string -> ALoc.t -> Type.t + +val get_var_declared_type : ?lookup_mode:LookupMode.t -> Context.t -> string -> ALoc.t -> Type.t + +val unify_declared_type : ?lookup_mode:LookupMode.t -> Context.t -> string -> Type.t -> unit + +val unify_declared_fun_type : Context.t -> string -> ALoc.t -> Type.t -> unit + +val var_ref : + ?lookup_mode:LookupMode.t -> Context.t -> string -> ?desc:Reason.reason_desc -> ALoc.t -> Type.t + +val set_var : + Context.t -> use_op:Type.use_op -> string -> Type.t -> ALoc.t -> Changeset.EntryRef.t option + +val set_internal_var : Context.t -> string -> Type.t -> ALoc.t -> Changeset.EntryRef.t option + +val set_expr : Key.t -> ALoc.t -> Type.t -> Type.t -> Changeset.RefiRef.t + +val refine_with_preds : + Context.t -> ALoc.t -> Type.predicate Key_map.t -> Type.t Key_map.t -> Changeset.t + +val in_refined_env : + Context.t -> ALoc.t -> Type.predicate Key_map.t -> Type.t Key_map.t -> (unit -> 'a) -> 'a + +val merge_env : Context.t -> ALoc.t -> t * t * t -> Changeset.t -> unit + +val widen_env : Context.t -> ALoc.t -> unit + +val copy_env : Context.t -> ALoc.t -> t * t -> Changeset.t -> unit + +val havoc_all : unit -> unit -val havoc_all: unit -> unit +val reset_current_activation : ALoc.t -> unit -val reset_current_activation: Loc.t -> unit +val havoc_vars : Changeset.t -> unit -val havoc_vars: Changeset.t -> unit +val havoc_heap_refinements : unit -> unit -val havoc_heap_refinements: unit -> unit -val havoc_heap_refinements_with_propname: private_:bool -> string -> unit +val havoc_heap_refinements_with_propname : private_:bool -> string -> unit -val get_refinement: Context.t -> Key.t -> Loc.t -> Type.t option +val get_refinement : Context.t -> Key.t -> ALoc.t -> Type.t option -val is_global_var: Context.t -> string -> bool +val is_global_var : Context.t -> string -> bool diff --git a/src/typing/errors/dune b/src/typing/errors/dune new file mode 100644 index 00000000000..22dd2330297 --- /dev/null +++ b/src/typing/errors/dune @@ -0,0 +1,10 @@ +(library + (name flow_typing_errors) + (wrapped false) + (libraries + flow_common_errors + flow_parser_utils + flow_typing_scope + flow_typing_type + ) +) diff --git a/src/typing/errors/error_message.ml b/src/typing/errors/error_message.ml new file mode 100644 index 00000000000..71055a0705c --- /dev/null +++ b/src/typing/errors/error_message.ml @@ -0,0 +1,2442 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Type +open Reason +open Utils_js + +exception EDebugThrow of ALoc.t + +exception EMergeTimeout of float + +exception ECheckTimeout of float + +type invalid_char_set = + | DuplicateChar of Char.t + | InvalidChar of Char.t + +module InvalidCharSetSet = Set.Make (struct + type t = invalid_char_set + + let compare = Pervasives.compare +end) + +type t = ALoc.t t' + +and 'loc t' = + | EIncompatible of { + lower: 'loc virtual_reason * lower_kind option; + upper: 'loc virtual_reason * 'loc upper_kind; + use_op: 'loc virtual_use_op option; + branches: ('loc Reason.virtual_reason * t) list; + } + | EIncompatibleDefs of { + use_op: 'loc virtual_use_op; + reason_lower: 'loc virtual_reason; + reason_upper: 'loc virtual_reason; + branches: ('loc Reason.virtual_reason * t) list; + } + | EIncompatibleProp of { + prop: string option; + reason_prop: 'loc virtual_reason; + reason_obj: 'loc virtual_reason; + special: lower_kind option; + use_op: 'loc virtual_use_op option; + } + | EDebugPrint of 'loc virtual_reason * string + | EExportValueAsType of 'loc virtual_reason * string + | EImportValueAsType of 'loc virtual_reason * string + | EImportTypeAsTypeof of 'loc virtual_reason * string + | EImportTypeAsValue of 'loc virtual_reason * string + | ERefineAsValue of 'loc virtual_reason * string + | ENoDefaultExport of 'loc virtual_reason * string * string option + | EOnlyDefaultExport of 'loc virtual_reason * string * string + | ENoNamedExport of 'loc virtual_reason * string * string * string option + | EMissingTypeArgs of { + reason_tapp: 'loc virtual_reason; + reason_arity: 'loc virtual_reason; + min_arity: int; + max_arity: int; + } + | EValueUsedAsType of { reason_use: 'loc virtual_reason } + | EExpectedStringLit of { + reason_lower: 'loc virtual_reason; + reason_upper: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + } + | EExpectedNumberLit of { + reason_lower: 'loc virtual_reason; + reason_upper: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + } + | EExpectedBooleanLit of { + reason_lower: 'loc virtual_reason; + reason_upper: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + } + | EPropNotFound of + string option * ('loc virtual_reason * 'loc virtual_reason) * 'loc virtual_use_op + | EPropNotReadable of { + reason_prop: 'loc virtual_reason; + prop_name: string option; + use_op: 'loc virtual_use_op; + } + | EPropNotWritable of { + reason_prop: 'loc virtual_reason; + prop_name: string option; + use_op: 'loc virtual_use_op; + } + | EPropPolarityMismatch of + ('loc virtual_reason * 'loc virtual_reason) + * string option + * (Polarity.t * Polarity.t) + * 'loc virtual_use_op + | EPolarityMismatch of { + reason: 'loc virtual_reason; + name: string; + expected_polarity: Polarity.t; + actual_polarity: Polarity.t; + } + | EStrictLookupFailed of + ('loc virtual_reason * 'loc virtual_reason) + * 'loc virtual_reason + * string option + * 'loc virtual_use_op option + | EPrivateLookupFailed of + ('loc virtual_reason * 'loc virtual_reason) * string * 'loc virtual_use_op + | EAdditionMixed of 'loc virtual_reason * 'loc virtual_use_op + | EComparison of ('loc virtual_reason * 'loc virtual_reason) + | ETupleArityMismatch of + ('loc virtual_reason * 'loc virtual_reason) * int * int * 'loc virtual_use_op + | ENonLitArrayToTuple of ('loc virtual_reason * 'loc virtual_reason) * 'loc virtual_use_op + | ETupleOutOfBounds of { + use_op: 'loc virtual_use_op; + reason: 'loc virtual_reason; + reason_op: 'loc virtual_reason; + length: int; + index: string; + } + | ETupleNonIntegerIndex of { + use_op: 'loc virtual_use_op; + reason: 'loc virtual_reason; + index: string; + } + | ETupleUnsafeWrite of { + reason: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + } + | EROArrayWrite of ('loc virtual_reason * 'loc virtual_reason) * 'loc virtual_use_op + | EUnionSpeculationFailed of { + use_op: 'loc virtual_use_op; + reason: 'loc virtual_reason; + reason_op: 'loc virtual_reason; + branches: ('loc virtual_reason * t) list; + } + | ESpeculationAmbiguous of { + reason: 'loc virtual_reason; + prev_case: int * 'loc virtual_reason; + case: int * 'loc virtual_reason; + cases: 'loc virtual_reason list; + } + | EIncompatibleWithExact of ('loc virtual_reason * 'loc virtual_reason) * 'loc virtual_use_op + | EUnsupportedExact of ('loc virtual_reason * 'loc virtual_reason) + | EIdxArity of 'loc virtual_reason + | EIdxUse1 of 'loc virtual_reason + | EIdxUse2 of 'loc virtual_reason + | EUnexpectedThisType of 'loc + | ETypeParamArity of 'loc * int + | ECallTypeArity of { + call_loc: 'loc; + is_new: bool; + reason_arity: 'loc virtual_reason; + expected_arity: int; + } + | ETypeParamMinArity of 'loc * int + | ETooManyTypeArgs of 'loc virtual_reason * 'loc virtual_reason * int + | ETooFewTypeArgs of 'loc virtual_reason * 'loc virtual_reason * int + | EInvalidTypeArgs of 'loc virtual_reason * 'loc virtual_reason + | EPropertyTypeAnnot of 'loc + | EExportsAnnot of 'loc + | ECharSetAnnot of 'loc + | EInvalidCharSet of { + invalid: 'loc virtual_reason * InvalidCharSetSet.t; + valid: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + } + | EUnsupportedKeyInObjectType of 'loc + | EPredAnnot of 'loc + | ERefineAnnot of 'loc + | ETrustedAnnot of 'loc + | EPrivateAnnot of 'loc + | EUnexpectedTypeof of 'loc + | EFunPredCustom of ('loc virtual_reason * 'loc virtual_reason) * string + | EIncompatibleWithShape of 'loc virtual_reason * 'loc virtual_reason * 'loc virtual_use_op + | EInternal of 'loc * internal_error + | EUnsupportedSyntax of 'loc * unsupported_syntax + | EUseArrayLiteral of 'loc + | EMissingAnnotation of 'loc virtual_reason * 'loc virtual_reason list + | EBindingError of binding_error * 'loc * string * Scope.Entry.t + | ERecursionLimit of ('loc virtual_reason * 'loc virtual_reason) + | EModuleOutsideRoot of 'loc * string + | EMalformedPackageJson of 'loc * string + | EExperimentalClassProperties of 'loc * bool + | EUninitializedInstanceProperty of 'loc * Lints.property_assignment_kind + | EExperimentalDecorators of 'loc + | EExperimentalExportStarAs of 'loc + | EExperimentalEnums of 'loc + | EUnsafeGetSet of 'loc + | EIndeterminateModuleType of 'loc + | EBadExportPosition of 'loc + | EBadExportContext of string * 'loc + | EUnreachable of 'loc + | EInvalidObjectKit of { + reason: 'loc virtual_reason; + reason_op: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + } + | EInvalidTypeof of 'loc * string + | EBinaryInLHS of 'loc virtual_reason + | EBinaryInRHS of 'loc virtual_reason + | EArithmeticOperand of 'loc virtual_reason + | EForInRHS of 'loc virtual_reason + | EObjectComputedPropertyAccess of ('loc virtual_reason * 'loc virtual_reason) + | EObjectComputedPropertyAssign of ('loc virtual_reason * 'loc virtual_reason) + | EInvalidLHSInAssignment of 'loc + | EIncompatibleWithUseOp of 'loc virtual_reason * 'loc virtual_reason * 'loc virtual_use_op + | ETrustIncompatibleWithUseOp of 'loc virtual_reason * 'loc virtual_reason * 'loc virtual_use_op + | EUnsupportedImplements of 'loc virtual_reason + | ENotAReactComponent of { + reason: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + } + | EInvalidReactConfigType of { + reason: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + } + | EInvalidReactPropType of { + reason: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + tool: React.SimplifyPropType.tool; + } + | EInvalidReactCreateClass of { + reason: 'loc virtual_reason; + use_op: 'loc virtual_use_op; + tool: React.CreateClass.tool; + } + | EReactElementFunArity of 'loc virtual_reason * string * int + | EFunctionCallExtraArg of 'loc virtual_reason * 'loc virtual_reason * int * 'loc virtual_use_op + | EUnsupportedSetProto of 'loc virtual_reason + | EDuplicateModuleProvider of { + module_name: string; + provider: File_key.t; + conflict: File_key.t; + } + | EParseError of 'loc * Parse_error.t + | EDocblockError of 'loc * docblock_error + | EImplicitInexactObject of 'loc + (* The string is either the name of a module or "the module that exports `_`". *) + | EUntypedTypeImport of 'loc * string + | EUntypedImport of 'loc * string + | ENonstrictImport of 'loc + | EUnclearType of 'loc + | EDeprecatedType of 'loc + | EDeprecatedUtility of 'loc * string + | EDynamicExport of 'loc virtual_reason * 'loc virtual_reason + | EUnsafeGettersSetters of 'loc + | EUnusedSuppression of 'loc + | ELintSetting of LintSettings.lint_parse_error + | ESketchyNullLint of { + kind: Lints.sketchy_null_kind; + loc: 'loc; + null_loc: 'loc; + falsy_loc: 'loc; + } + | ESketchyNumberLint of Lints.sketchy_number_kind * 'loc virtual_reason + | EInvalidPrototype of 'loc virtual_reason + | EExperimentalOptionalChaining of 'loc + | EOptionalChainingMethods of 'loc + | EUnnecessaryOptionalChain of 'loc * 'loc virtual_reason + | EUnnecessaryInvariant of 'loc * 'loc virtual_reason + | EInexactSpread of 'loc virtual_reason * 'loc virtual_reason + | EUnexpectedTemporaryBaseType of 'loc + | ECannotDelete of 'loc * 'loc virtual_reason + | EBigIntNotYetSupported of 'loc virtual_reason + (* These are unused when calculating locations so we can leave this as Aloc *) + | ESignatureVerification of Signature_builder_deps.With_ALoc.Error.t + | ENonArraySpread of 'loc virtual_reason + | ECannotSpreadInterface of { + spread_reason: 'loc virtual_reason; + interface_reason: 'loc virtual_reason; + } + | ECannotSpreadIndexerOnRight of { + spread_reason: 'loc virtual_reason; + object_reason: 'loc virtual_reason; + key_reason: 'loc virtual_reason; + } + | EUnableToSpread of { + spread_reason: 'loc virtual_reason; + object1_reason: 'loc virtual_reason; + object2_reason: 'loc virtual_reason; + propname: string; + error_kind: spread_error_kind; + } + | EInexactMayOverwriteIndexer of { + spread_reason: 'loc virtual_reason; + key_reason: 'loc virtual_reason; + value_reason: 'loc virtual_reason; + object2_reason: 'loc virtual_reason; + } + +and spread_error_kind = + | Indexer + | Inexact + +and binding_error = + | ENameAlreadyBound + | EReferencedBeforeDeclaration + | ETypeInValuePosition + | ETypeAliasInValuePosition + | EConstReassigned + | EConstParamReassigned + | EImportReassigned + | EEnumReassigned + +and docblock_error = + | MultipleFlowAttributes + | MultipleProvidesModuleAttributes + | MultipleJSXAttributes + | InvalidJSXAttribute of string option + +and internal_error = + | PackageHeapNotFound of string + | AbnormalControlFlow + | MethodNotAFunction + | OptionalMethod + | OpenPredWithoutSubst + | PredFunWithoutParamNames + | UnsupportedGuardPredicate of string + | BreakEnvMissingForCase + | PropertyDescriptorPropertyCannotBeRead + | ForInLHS + | ForOfLHS + | InstanceLookupComputed + | PropRefComputedOpen + | PropRefComputedLiteral + | ShadowReadComputed + | ShadowWriteComputed + | RestParameterNotIdentifierPattern + | InterfaceTypeSpread + | DebugThrow + | MergeTimeout of float + | MergeJobException of Exception.t + | CheckTimeout of float + | CheckJobException of Exception.t + | UnexpectedTypeapp of string + +and unsupported_syntax = + | ComprehensionExpression + | GeneratorExpression + | MetaPropertyExpression + | ObjectPropertyLiteralNonString + | ObjectPropertyGetSet + | ObjectPropertyComputedGetSet + | InvariantSpreadArgument + | ClassPropertyLiteral + | ClassPropertyComputed + | ReactCreateClassPropertyNonInit + | RequireDynamicArgument + | RequireLazyDynamicArgument + | CatchParameterAnnotation + | CatchParameterDeclaration + | DestructuringObjectPropertyLiteralNonString + | DestructuringExpressionPattern + | PredicateDeclarationForImplementation + | PredicateDeclarationWithoutExpression + | PredicateDeclarationAnonymousParameters + | PredicateInvalidBody + | PredicateFunctionAbstractReturnType + | PredicateVoidReturn + | MultipleIndexers + | MultipleProtos + | ExplicitCallAfterProto + | ExplicitProtoAfterCall + | SpreadArgument + | ImportDynamicArgument + | IllegalName + | UnsupportedInternalSlot of { + name: string; + static: bool; + } + +and lower_kind = + | Possibly_null + | Possibly_void + | Possibly_null_or_void + | Incompatible_intersection + +and 'loc upper_kind = + | IncompatibleGetPropT of 'loc * string option + | IncompatibleSetPropT of 'loc * string option + | IncompatibleMatchPropT of 'loc * string option + | IncompatibleGetPrivatePropT + | IncompatibleSetPrivatePropT + | IncompatibleMethodT of 'loc * string option + | IncompatibleCallT + | IncompatibleMixedCallT + | IncompatibleConstructorT + | IncompatibleGetElemT of 'loc + | IncompatibleSetElemT of 'loc + | IncompatibleCallElemT of 'loc + | IncompatibleElemTOfArrT + | IncompatibleObjAssignFromTSpread + | IncompatibleObjAssignFromT + | IncompatibleObjRestT + | IncompatibleObjSealT + | IncompatibleArrRestT + | IncompatibleSuperT + | IncompatibleMixinT + | IncompatibleSpecializeT + | IncompatibleThisSpecializeT + | IncompatibleVarianceCheckT + | IncompatibleGetKeysT + | IncompatibleHasOwnPropT of 'loc * string option + | IncompatibleGetValuesT + | IncompatibleUnaryMinusT + | IncompatibleMapTypeTObject + | IncompatibleTypeAppVarianceCheckT + | IncompatibleGetStaticsT + | IncompatibleUnclassified of string + +let map_loc_of_error_message (f : 'a -> 'b) : 'a t' -> 'b t' = + let map_use_op = TypeUtil.mod_loc_of_virtual_use_op f in + let map_reason = Reason.map_reason_locs f in + let map_branch (r, e) = (map_reason r, e) in + let map_upper_kind = function + | IncompatibleGetPropT (loc, s) -> IncompatibleGetPropT (f loc, s) + | IncompatibleSetPropT (loc, s) -> IncompatibleSetPropT (f loc, s) + | IncompatibleMatchPropT (loc, s) -> IncompatibleMatchPropT (f loc, s) + | IncompatibleMethodT (loc, s) -> IncompatibleMethodT (f loc, s) + | IncompatibleHasOwnPropT (loc, s) -> IncompatibleHasOwnPropT (f loc, s) + | IncompatibleGetElemT loc -> IncompatibleGetElemT (f loc) + | IncompatibleSetElemT loc -> IncompatibleSetElemT (f loc) + | IncompatibleCallElemT loc -> IncompatibleCallElemT (f loc) + | ( IncompatibleGetPrivatePropT | IncompatibleSetPrivatePropT | IncompatibleCallT + | IncompatibleMixedCallT | IncompatibleConstructorT | IncompatibleElemTOfArrT + | IncompatibleObjAssignFromTSpread | IncompatibleObjAssignFromT | IncompatibleObjRestT + | IncompatibleObjSealT | IncompatibleArrRestT | IncompatibleSuperT | IncompatibleMixinT + | IncompatibleSpecializeT | IncompatibleThisSpecializeT | IncompatibleVarianceCheckT + | IncompatibleGetKeysT | IncompatibleGetValuesT | IncompatibleUnaryMinusT + | IncompatibleMapTypeTObject | IncompatibleTypeAppVarianceCheckT | IncompatibleGetStaticsT + | IncompatibleUnclassified _ ) as u -> + u + in + function + | EIncompatible { use_op; lower = (lreason, lkind); upper = (ureason, ukind); branches } -> + EIncompatible + { + use_op = Option.map ~f:map_use_op use_op; + lower = (map_reason lreason, lkind); + upper = (map_reason ureason, map_upper_kind ukind); + branches = Core_list.map ~f:map_branch branches; + } + | EIncompatibleDefs { use_op; reason_lower; reason_upper; branches } -> + EIncompatibleDefs + { + use_op = map_use_op use_op; + reason_lower = map_reason reason_lower; + reason_upper = map_reason reason_upper; + branches = Core_list.map ~f:map_branch branches; + } + | EIncompatibleProp { use_op; prop; reason_prop; reason_obj; special } -> + EIncompatibleProp + { + use_op = Option.map ~f:map_use_op use_op; + prop; + reason_prop = map_reason reason_prop; + reason_obj = map_reason reason_obj; + special; + } + | EExpectedStringLit { reason_lower; reason_upper; use_op } -> + EExpectedStringLit + { + reason_lower = map_reason reason_lower; + reason_upper = map_reason reason_upper; + use_op = map_use_op use_op; + } + | EExpectedNumberLit { reason_lower; reason_upper; use_op } -> + EExpectedNumberLit + { + reason_lower = map_reason reason_lower; + reason_upper = map_reason reason_upper; + use_op = map_use_op use_op; + } + | EExpectedBooleanLit { reason_lower; reason_upper; use_op } -> + EExpectedBooleanLit + { + reason_lower = map_reason reason_lower; + reason_upper = map_reason reason_upper; + use_op = map_use_op use_op; + } + | EPropNotFound (prop, (r1, r2), op) -> + EPropNotFound (prop, (map_reason r1, map_reason r2), map_use_op op) + | EPropNotReadable { reason_prop; prop_name; use_op } -> + EPropNotReadable + { reason_prop = map_reason reason_prop; prop_name; use_op = map_use_op use_op } + | EPropNotWritable { reason_prop; prop_name; use_op } -> + EPropNotWritable + { reason_prop = map_reason reason_prop; prop_name; use_op = map_use_op use_op } + | EPropPolarityMismatch ((r1, r2), p, ps, op) -> + EPropPolarityMismatch ((map_reason r1, map_reason r2), p, ps, map_use_op op) + | EStrictLookupFailed ((r1, r2), r, p, op) -> + EStrictLookupFailed + ((map_reason r1, map_reason r2), map_reason r, p, Option.map ~f:map_use_op op) + | EPrivateLookupFailed ((r1, r2), x, op) -> + EPrivateLookupFailed ((map_reason r1, map_reason r2), x, map_use_op op) + | EAdditionMixed (r, op) -> EAdditionMixed (map_reason r, map_use_op op) + | ETupleArityMismatch ((r1, r2), l, i, op) -> + ETupleArityMismatch ((map_reason r1, map_reason r2), l, i, map_use_op op) + | ENonLitArrayToTuple ((r1, r2), op) -> + ENonLitArrayToTuple ((map_reason r1, map_reason r2), map_use_op op) + | ETupleOutOfBounds { use_op; reason; reason_op; length; index } -> + ETupleOutOfBounds + { + use_op = map_use_op use_op; + reason = map_reason reason; + reason_op = map_reason reason_op; + length; + index; + } + | ETupleNonIntegerIndex { use_op; reason; index } -> + ETupleNonIntegerIndex { use_op = map_use_op use_op; reason = map_reason reason; index } + | ETupleUnsafeWrite { reason; use_op } -> + ETupleUnsafeWrite { reason = map_reason reason; use_op = map_use_op use_op } + | EROArrayWrite ((r1, r2), op) -> EROArrayWrite ((map_reason r1, map_reason r2), map_use_op op) + | EUnionSpeculationFailed { use_op; reason; reason_op; branches } -> + EUnionSpeculationFailed + { + use_op = map_use_op use_op; + reason = map_reason reason; + reason_op = map_reason reason_op; + branches = Core_list.map ~f:map_branch branches; + } + | EIncompatibleWithExact ((r1, r2), op) -> + EIncompatibleWithExact ((map_reason r1, map_reason r2), map_use_op op) + | EInvalidCharSet { invalid = (ir, set); valid; use_op } -> + EInvalidCharSet + { invalid = (map_reason ir, set); valid = map_reason valid; use_op = map_use_op use_op } + | EIncompatibleWithShape (l, u, use_op) -> + EIncompatibleWithShape (map_reason l, map_reason u, map_use_op use_op) + | EInvalidObjectKit { reason; reason_op; use_op } -> + EInvalidObjectKit + { reason = map_reason reason; reason_op = map_reason reason_op; use_op = map_use_op use_op } + | EIncompatibleWithUseOp (rl, ru, op) -> + EIncompatibleWithUseOp (map_reason rl, map_reason ru, map_use_op op) + | ETrustIncompatibleWithUseOp (rl, ru, op) -> + ETrustIncompatibleWithUseOp (map_reason rl, map_reason ru, map_use_op op) + | ENotAReactComponent { reason; use_op } -> + ENotAReactComponent { reason = map_reason reason; use_op = map_use_op use_op } + | EInvalidReactConfigType { reason; use_op } -> + EInvalidReactConfigType { reason = map_reason reason; use_op = map_use_op use_op } + | EInvalidReactPropType { reason; use_op; tool } -> + EInvalidReactPropType { reason = map_reason reason; use_op = map_use_op use_op; tool } + | EInvalidReactCreateClass { reason; use_op; tool } -> + EInvalidReactCreateClass { reason = map_reason reason; use_op = map_use_op use_op; tool } + | EFunctionCallExtraArg (rl, ru, n, op) -> + EFunctionCallExtraArg (map_reason rl, map_reason ru, n, map_use_op op) + | EDebugPrint (r, s) -> EDebugPrint (map_reason r, s) + | EExportValueAsType (r, s) -> EExportValueAsType (map_reason r, s) + | EImportValueAsType (r, s) -> EImportValueAsType (map_reason r, s) + | EImportTypeAsTypeof (r, s) -> EImportTypeAsTypeof (map_reason r, s) + | EImportTypeAsValue (r, s) -> EImportTypeAsValue (map_reason r, s) + | ERefineAsValue (r, s) -> ERefineAsValue (map_reason r, s) + | ENoDefaultExport (r, s1, s2) -> ENoDefaultExport (map_reason r, s1, s2) + | EOnlyDefaultExport (r, s1, s2) -> EOnlyDefaultExport (map_reason r, s1, s2) + | ENoNamedExport (r, s1, s2, s3) -> ENoNamedExport (map_reason r, s1, s2, s3) + | EMissingTypeArgs { reason_tapp; reason_arity; min_arity; max_arity } -> + EMissingTypeArgs + { + reason_tapp = map_reason reason_tapp; + reason_arity = map_reason reason_arity; + min_arity; + max_arity; + } + | EValueUsedAsType { reason_use } -> EValueUsedAsType { reason_use = map_reason reason_use } + | EPolarityMismatch { reason; name; expected_polarity; actual_polarity } -> + EPolarityMismatch { reason = map_reason reason; name; expected_polarity; actual_polarity } + | EComparison (r1, r2) -> EComparison (map_reason r1, map_reason r2) + | ESpeculationAmbiguous + { reason; prev_case = (prev_i, prev_case_reason); case = (i, case_reason); cases } -> + ESpeculationAmbiguous + { + reason = map_reason reason; + prev_case = (prev_i, map_reason prev_case_reason); + case = (i, map_reason case_reason); + cases = Core_list.map ~f:map_reason cases; + } + | EUnsupportedExact (r1, r2) -> EUnsupportedExact (map_reason r1, map_reason r2) + | EIdxArity r -> EIdxArity (map_reason r) + | EIdxUse1 r -> EIdxUse1 (map_reason r) + | EIdxUse2 r -> EIdxUse2 (map_reason r) + | EUnexpectedThisType loc -> EUnexpectedThisType (f loc) + | ETypeParamArity (loc, i) -> ETypeParamArity (f loc, i) + | ECallTypeArity { call_loc; is_new; reason_arity; expected_arity } -> + ECallTypeArity + { call_loc = f call_loc; is_new; expected_arity; reason_arity = map_reason reason_arity } + | ETypeParamMinArity (loc, i) -> ETypeParamMinArity (f loc, i) + | ETooManyTypeArgs (r1, r2, i) -> ETooManyTypeArgs (map_reason r1, map_reason r2, i) + | ETooFewTypeArgs (r1, r2, i) -> ETooFewTypeArgs (map_reason r1, map_reason r2, i) + | EInvalidTypeArgs (r1, r2) -> EInvalidTypeArgs (map_reason r1, map_reason r2) + | EPropertyTypeAnnot loc -> EPropertyTypeAnnot (f loc) + | EExportsAnnot loc -> EExportsAnnot (f loc) + | ECharSetAnnot loc -> ECharSetAnnot (f loc) + | EUnsupportedKeyInObjectType loc -> EUnsupportedKeyInObjectType (f loc) + | EPredAnnot loc -> EPredAnnot (f loc) + | ERefineAnnot loc -> ERefineAnnot (f loc) + | ETrustedAnnot loc -> ETrustedAnnot (f loc) + | EPrivateAnnot loc -> EPrivateAnnot (f loc) + | EUnexpectedTypeof loc -> EUnexpectedTypeof (f loc) + | EFunPredCustom ((r1, r2), s) -> EFunPredCustom ((map_reason r1, map_reason r2), s) + | EInternal (loc, i) -> EInternal (f loc, i) + | EUnsupportedSyntax (loc, u) -> EUnsupportedSyntax (f loc, u) + | EUseArrayLiteral loc -> EUseArrayLiteral (f loc) + | EMissingAnnotation (r, rs) -> EMissingAnnotation (map_reason r, Core_list.map ~f:map_reason rs) + | EBindingError (b, loc, s, scope) -> EBindingError (b, f loc, s, scope) + | ERecursionLimit (r1, r2) -> ERecursionLimit (map_reason r1, map_reason r2) + | EModuleOutsideRoot (loc, s) -> EModuleOutsideRoot (f loc, s) + | EMalformedPackageJson (loc, s) -> EMalformedPackageJson (f loc, s) + | EExperimentalDecorators loc -> EExperimentalDecorators (f loc) + | EExperimentalClassProperties (loc, b) -> EExperimentalClassProperties (f loc, b) + | EUnsafeGetSet loc -> EUnsafeGetSet (f loc) + | EUninitializedInstanceProperty (loc, e) -> EUninitializedInstanceProperty (f loc, e) + | EExperimentalExportStarAs loc -> EExperimentalExportStarAs (f loc) + | EExperimentalEnums loc -> EExperimentalEnums (f loc) + | EIndeterminateModuleType loc -> EIndeterminateModuleType (f loc) + | EBadExportPosition loc -> EBadExportPosition (f loc) + | EBadExportContext (s, loc) -> EBadExportContext (s, f loc) + | EUnreachable loc -> EUnreachable (f loc) + | EInvalidTypeof (loc, s) -> EInvalidTypeof (f loc, s) + | EBinaryInLHS r -> EBinaryInLHS (map_reason r) + | EBinaryInRHS r -> EBinaryInRHS (map_reason r) + | EArithmeticOperand r -> EArithmeticOperand (map_reason r) + | EForInRHS r -> EForInRHS (map_reason r) + | EObjectComputedPropertyAccess (r1, r2) -> + EObjectComputedPropertyAccess (map_reason r1, map_reason r2) + | EObjectComputedPropertyAssign (r1, r2) -> + EObjectComputedPropertyAssign (map_reason r1, map_reason r2) + | EInvalidLHSInAssignment l -> EInvalidLHSInAssignment (f l) + | EUnsupportedImplements r -> EUnsupportedImplements (map_reason r) + | EReactElementFunArity (r, s, i) -> EReactElementFunArity (map_reason r, s, i) + | EUnsupportedSetProto r -> EUnsupportedSetProto (map_reason r) + | EDuplicateModuleProvider { module_name = _; provider = _; conflict = _ } as e -> e + | EParseError (loc, p) -> EParseError (f loc, p) + | EDocblockError (loc, e) -> EDocblockError (f loc, e) + | EImplicitInexactObject loc -> EImplicitInexactObject (f loc) + | EUntypedTypeImport (loc, s) -> EUntypedTypeImport (f loc, s) + | EUntypedImport (loc, s) -> EUntypedImport (f loc, s) + | ENonstrictImport loc -> ENonstrictImport (f loc) + | EUnclearType loc -> EUnclearType (f loc) + | EDeprecatedType loc -> EDeprecatedType (f loc) + | EDeprecatedUtility (loc, s) -> EDeprecatedUtility (f loc, s) + | EDynamicExport (r1, r2) -> EDynamicExport (map_reason r1, map_reason r2) + | EUnsafeGettersSetters loc -> EUnsafeGettersSetters (f loc) + | EUnusedSuppression loc -> EUnusedSuppression (f loc) + | ELintSetting _ as e -> e + | ESketchyNullLint { kind; loc; null_loc; falsy_loc } -> + ESketchyNullLint { kind; loc = f loc; null_loc = f null_loc; falsy_loc = f falsy_loc } + | ESketchyNumberLint (kind, r) -> ESketchyNumberLint (kind, map_reason r) + | EInvalidPrototype r -> EInvalidPrototype (map_reason r) + | EExperimentalOptionalChaining loc -> EExperimentalOptionalChaining (f loc) + | EOptionalChainingMethods loc -> EOptionalChainingMethods (f loc) + | EUnnecessaryOptionalChain (loc, r) -> EUnnecessaryOptionalChain (f loc, map_reason r) + | EUnnecessaryInvariant (loc, r) -> EUnnecessaryInvariant (f loc, map_reason r) + | EInexactSpread (r1, r2) -> EInexactSpread (map_reason r1, map_reason r2) + | EUnexpectedTemporaryBaseType loc -> EUnexpectedTemporaryBaseType (f loc) + | ECannotDelete (l1, r1) -> ECannotDelete (f l1, map_reason r1) + | EBigIntNotYetSupported r -> EBigIntNotYetSupported (map_reason r) + | ESignatureVerification _ as e -> e + | ENonArraySpread r -> ENonArraySpread (map_reason r) + | ECannotSpreadInterface { spread_reason; interface_reason } -> + ECannotSpreadInterface + { spread_reason = map_reason spread_reason; interface_reason = map_reason interface_reason } + | ECannotSpreadIndexerOnRight { spread_reason; object_reason; key_reason } -> + ECannotSpreadIndexerOnRight + { + spread_reason = map_reason spread_reason; + object_reason = map_reason object_reason; + key_reason = map_reason key_reason; + } + | EUnableToSpread { spread_reason; object1_reason; object2_reason; propname; error_kind } -> + EUnableToSpread + { + spread_reason = map_reason spread_reason; + object1_reason = map_reason object1_reason; + object2_reason = map_reason object2_reason; + propname; + error_kind; + } + | EInexactMayOverwriteIndexer { spread_reason; key_reason; value_reason; object2_reason } -> + EInexactMayOverwriteIndexer + { + spread_reason = map_reason spread_reason; + key_reason = map_reason key_reason; + value_reason = map_reason value_reason; + object2_reason = map_reason object2_reason; + } + +let desc_of_reason r = Reason.desc_of_reason ~unwrap:(is_scalar_reason r) r + +(* A utility function for getting and updating the use_op in error messages. *) +let util_use_op_of_msg nope util = function + | EIncompatible { use_op; lower; upper; branches } -> + Option.value_map use_op ~default:nope ~f:(fun use_op -> + util use_op (fun use_op -> EIncompatible { use_op = Some use_op; lower; upper; branches })) + | EIncompatibleDefs { use_op; reason_lower; reason_upper; branches } -> + util use_op (fun use_op -> EIncompatibleDefs { use_op; reason_lower; reason_upper; branches }) + | EIncompatibleProp { use_op; prop; reason_prop; reason_obj; special } -> + Option.value_map use_op ~default:nope ~f:(fun use_op -> + util use_op (fun use_op -> + EIncompatibleProp { use_op = Some use_op; prop; reason_prop; reason_obj; special })) + | ETrustIncompatibleWithUseOp (rl, ru, op) -> + util op (fun op -> ETrustIncompatibleWithUseOp (rl, ru, op)) + | EExpectedStringLit { reason_lower; reason_upper; use_op } -> + util use_op (fun use_op -> EExpectedStringLit { reason_lower; reason_upper; use_op }) + | EExpectedNumberLit { reason_lower; reason_upper; use_op } -> + util use_op (fun use_op -> EExpectedNumberLit { reason_lower; reason_upper; use_op }) + | EExpectedBooleanLit { reason_lower; reason_upper; use_op } -> + util use_op (fun use_op -> EExpectedBooleanLit { reason_lower; reason_upper; use_op }) + | EPropNotFound (prop, rs, op) -> util op (fun op -> EPropNotFound (prop, rs, op)) + | EPropNotReadable { reason_prop; prop_name; use_op } -> + util use_op (fun use_op -> EPropNotReadable { reason_prop; prop_name; use_op }) + | EPropNotWritable { reason_prop; prop_name; use_op } -> + util use_op (fun use_op -> EPropNotWritable { reason_prop; prop_name; use_op }) + | EPropPolarityMismatch (rs, p, ps, op) -> + util op (fun op -> EPropPolarityMismatch (rs, p, ps, op)) + | EStrictLookupFailed (rs, r, p, Some op) -> + util op (fun op -> EStrictLookupFailed (rs, r, p, Some op)) + | EPrivateLookupFailed (rs, x, op) -> util op (fun op -> EPrivateLookupFailed (rs, x, op)) + | EAdditionMixed (r, op) -> util op (fun op -> EAdditionMixed (r, op)) + | ETupleArityMismatch (rs, x, y, op) -> util op (fun op -> ETupleArityMismatch (rs, x, y, op)) + | ENonLitArrayToTuple (rs, op) -> util op (fun op -> ENonLitArrayToTuple (rs, op)) + | ETupleOutOfBounds { use_op; reason; reason_op; length; index } -> + util use_op (fun use_op -> ETupleOutOfBounds { use_op; reason; reason_op; length; index }) + | ETupleNonIntegerIndex { use_op; reason; index } -> + util use_op (fun use_op -> ETupleNonIntegerIndex { use_op; reason; index }) + | ETupleUnsafeWrite { reason; use_op } -> + util use_op (fun use_op -> ETupleUnsafeWrite { reason; use_op }) + | EROArrayWrite (rs, op) -> util op (fun op -> EROArrayWrite (rs, op)) + | EUnionSpeculationFailed { use_op; reason; reason_op; branches } -> + util use_op (fun use_op -> EUnionSpeculationFailed { use_op; reason; reason_op; branches }) + | EIncompatibleWithExact (rs, op) -> util op (fun op -> EIncompatibleWithExact (rs, op)) + | EInvalidCharSet { invalid; valid; use_op } -> + util use_op (fun use_op -> EInvalidCharSet { invalid; valid; use_op }) + | EIncompatibleWithShape (l, u, use_op) -> + util use_op (fun use_op -> EIncompatibleWithShape (l, u, use_op)) + | EInvalidObjectKit { reason; reason_op; use_op } -> + util use_op (fun use_op -> EInvalidObjectKit { reason; reason_op; use_op }) + | EIncompatibleWithUseOp (rl, ru, op) -> util op (fun op -> EIncompatibleWithUseOp (rl, ru, op)) + | ENotAReactComponent { reason; use_op } -> + util use_op (fun use_op -> ENotAReactComponent { reason; use_op }) + | EInvalidReactConfigType { reason; use_op } -> + util use_op (fun use_op -> EInvalidReactConfigType { reason; use_op }) + | EInvalidReactPropType { reason; use_op; tool } -> + util use_op (fun use_op -> EInvalidReactPropType { reason; use_op; tool }) + | EInvalidReactCreateClass { reason; use_op; tool } -> + util use_op (fun use_op -> EInvalidReactCreateClass { reason; use_op; tool }) + | EFunctionCallExtraArg (rl, ru, n, op) -> + util op (fun op -> EFunctionCallExtraArg (rl, ru, n, op)) + | EDebugPrint (_, _) + | EExportValueAsType (_, _) + | EImportValueAsType (_, _) + | EImportTypeAsTypeof (_, _) + | EImportTypeAsValue (_, _) + | ERefineAsValue (_, _) + | ENoDefaultExport (_, _, _) + | EOnlyDefaultExport (_, _, _) + | ENoNamedExport (_, _, _, _) + | EMissingTypeArgs { reason_tapp = _; reason_arity = _; min_arity = _; max_arity = _ } + | EValueUsedAsType _ + | EPolarityMismatch { reason = _; name = _; expected_polarity = _; actual_polarity = _ } + | EStrictLookupFailed (_, _, _, None) + | EComparison (_, _) + | ESpeculationAmbiguous _ + | EUnsupportedExact (_, _) + | EIdxArity _ + | EIdxUse1 _ + | EIdxUse2 _ + | EUnexpectedThisType _ + | ETypeParamArity (_, _) + | ECallTypeArity _ + | ETypeParamMinArity (_, _) + | ETooManyTypeArgs (_, _, _) + | ETooFewTypeArgs (_, _, _) + | EInvalidTypeArgs (_, _) + | EPropertyTypeAnnot _ + | EExportsAnnot _ + | ECharSetAnnot _ + | EUnsupportedKeyInObjectType _ + | EPredAnnot _ + | ERefineAnnot _ + | ETrustedAnnot _ + | EPrivateAnnot _ + | EUnexpectedTypeof _ + | EFunPredCustom (_, _) + | EInternal (_, _) + | EUnsupportedSyntax (_, _) + | EUseArrayLiteral _ + | EMissingAnnotation _ + | EBindingError (_, _, _, _) + | ERecursionLimit (_, _) + | EModuleOutsideRoot (_, _) + | EMalformedPackageJson (_, _) + | EExperimentalDecorators _ + | EExperimentalClassProperties (_, _) + | EUnsafeGetSet _ + | EUninitializedInstanceProperty _ + | EExperimentalExportStarAs _ + | EExperimentalEnums _ + | EIndeterminateModuleType _ + | EBadExportPosition _ + | EBadExportContext _ + | EUnreachable _ + | EInvalidTypeof (_, _) + | EBinaryInLHS _ + | EBinaryInRHS _ + | EArithmeticOperand _ + | EForInRHS _ + | EObjectComputedPropertyAccess (_, _) + | EObjectComputedPropertyAssign (_, _) + | EInvalidLHSInAssignment _ + | EUnsupportedImplements _ + | EReactElementFunArity (_, _, _) + | EUnsupportedSetProto _ + | EDuplicateModuleProvider { module_name = _; provider = _; conflict = _ } + | EParseError (_, _) + | EDocblockError (_, _) + | EImplicitInexactObject _ + | EUntypedTypeImport (_, _) + | EUntypedImport (_, _) + | ENonstrictImport _ + | EUnclearType _ + | EDeprecatedType _ + | EDeprecatedUtility _ + | EDynamicExport _ + | EUnsafeGettersSetters _ + | EUnusedSuppression _ + | ELintSetting _ + | ESketchyNullLint { kind = _; loc = _; null_loc = _; falsy_loc = _ } + | ESketchyNumberLint _ + | EInvalidPrototype _ + | EExperimentalOptionalChaining _ + | EOptionalChainingMethods _ + | EUnnecessaryOptionalChain _ + | EUnnecessaryInvariant _ + | EInexactSpread _ + | EUnexpectedTemporaryBaseType _ + | ECannotDelete _ + | EBigIntNotYetSupported _ + | ESignatureVerification _ + | ENonArraySpread _ + | ECannotSpreadInterface _ + | ECannotSpreadIndexerOnRight _ + | EUnableToSpread _ + | EInexactMayOverwriteIndexer _ -> + nope + +(* Not all messages (i.e. those whose locations are based on use_ops) have locations that can be + determined while locations are abstract. We just return None in this case. *) +let aloc_of_msg : t -> ALoc.t option = function + | EValueUsedAsType { reason_use = primary } + | EComparison (primary, _) + | EFunPredCustom ((primary, _), _) + | EDynamicExport (_, primary) + | EInexactSpread (_, primary) + | EInvalidTypeArgs (_, primary) + | ETooFewTypeArgs (primary, _, _) + | ETooManyTypeArgs (primary, _, _) -> + Some (aloc_of_reason primary) + | ESketchyNumberLint (_, reason) + | EInvalidPrototype reason + | EBigIntNotYetSupported reason + | EUnsupportedSetProto reason + | EReactElementFunArity (reason, _, _) + | EUnsupportedImplements reason + | EObjectComputedPropertyAssign (_, reason) + | EObjectComputedPropertyAccess (_, reason) + | EForInRHS reason + | EBinaryInRHS reason + | EBinaryInLHS reason + | EArithmeticOperand reason + | ERecursionLimit (reason, _) + | EMissingAnnotation (reason, _) + | EIdxArity reason + | EIdxUse1 reason + | EIdxUse2 reason + | EUnsupportedExact (_, reason) + | EPolarityMismatch { reason; _ } + | ENoNamedExport (reason, _, _, _) + | EOnlyDefaultExport (reason, _, _) + | ENoDefaultExport (reason, _, _) + | ERefineAsValue (reason, _) + | EImportTypeAsValue (reason, _) + | EImportTypeAsTypeof (reason, _) + | EExportValueAsType (reason, _) + | EImportValueAsType (reason, _) + | EDebugPrint (reason, _) + | ENonArraySpread reason -> + Some (aloc_of_reason reason) + (* We position around the use of the object instead of the spread because the + * spread may be part of a polymorphic type signature. If we add a suppression there, + * the reduction in coverage is far more drastic. *) + | ECannotSpreadInterface { spread_reason = _; interface_reason = reason } + | ECannotSpreadIndexerOnRight { spread_reason = _; object_reason = reason; key_reason = _ } + | EUnableToSpread + { + spread_reason = _; + object1_reason = _; + object2_reason = reason; + propname = _; + error_kind = _; + } + | EInexactMayOverwriteIndexer + { spread_reason = _; key_reason = _; value_reason = _; object2_reason = reason } -> + Some (aloc_of_reason reason) + | EUntypedTypeImport (loc, _) + | EUntypedImport (loc, _) + | ENonstrictImport loc + | EUnclearType loc + | EDeprecatedType loc + | EDeprecatedUtility (loc, _) + | EUnsafeGettersSetters loc + | EUnnecessaryOptionalChain (loc, _) + | EUnnecessaryInvariant (loc, _) + | EOptionalChainingMethods loc + | EExperimentalOptionalChaining loc + | EUnusedSuppression loc + | EDocblockError (loc, _) + | EImplicitInexactObject loc + | EParseError (loc, _) + | EInvalidLHSInAssignment loc + | EInvalidTypeof (loc, _) + | EUnreachable loc + | EUnexpectedTemporaryBaseType loc + | ECannotDelete (loc, _) + | EBadExportContext (_, loc) + | EBadExportPosition loc + | EIndeterminateModuleType loc + | EExperimentalExportStarAs loc + | EExperimentalEnums loc + | EUnsafeGetSet loc + | EUninitializedInstanceProperty (loc, _) + | EExperimentalClassProperties (loc, _) + | EExperimentalDecorators loc + | EModuleOutsideRoot (loc, _) + | EMalformedPackageJson (loc, _) + | EUseArrayLiteral loc + | EUnsupportedSyntax (loc, _) + | EInternal (loc, _) + | EUnexpectedTypeof loc + | EPrivateAnnot loc + | ETrustedAnnot loc + | ERefineAnnot loc + | EPredAnnot loc + | EUnsupportedKeyInObjectType loc + | ECharSetAnnot loc + | EExportsAnnot loc + | EPropertyTypeAnnot loc + | EUnexpectedThisType loc + | ETypeParamMinArity (loc, _) -> + Some loc + | ELintSetting (loc, _) -> Some (ALoc.of_loc loc) + | ETypeParamArity (loc, _) -> Some loc + | ESketchyNullLint { loc; _ } -> Some loc + | ECallTypeArity { call_loc; _ } -> Some call_loc + | EMissingTypeArgs { reason_tapp; _ } -> Some (aloc_of_reason reason_tapp) + | ESignatureVerification sve -> + Signature_builder_deps.With_ALoc.Error.( + (match sve with + | ExpectedSort (_, _, loc) + | ExpectedAnnotation (loc, _) + | InvalidTypeParamUse loc + | UnexpectedObjectKey (loc, _) + | UnexpectedObjectSpread (loc, _) + | UnexpectedArraySpread (loc, _) + | UnexpectedArrayHole loc + | EmptyArray loc + | EmptyObject loc + | UnexpectedExpression (loc, _) + | SketchyToplevelDef loc + | UnsupportedPredicateExpression loc + | TODO (_, loc) -> + Some loc)) + | EDuplicateModuleProvider { conflict; _ } -> + let loc1 = + Loc.( + let pos = { line = 1; column = 0 } in + { source = Some conflict; start = pos; _end = pos }) + in + Some (ALoc.of_loc loc1) + | EBindingError (_, loc, _, _) -> Some loc + | ESpeculationAmbiguous { reason; _ } -> Some (aloc_of_reason reason) + | EStrictLookupFailed ((reason, _), lreason, _, _) when is_builtin_reason ALoc.source lreason -> + Some (aloc_of_reason reason) + | EFunctionCallExtraArg _ + | ENotAReactComponent _ + | EInvalidReactConfigType _ + | EInvalidReactPropType _ + | EInvalidReactCreateClass _ + | EIncompatibleWithUseOp _ + | ETrustIncompatibleWithUseOp _ + | EIncompatibleDefs _ + | EInvalidObjectKit _ + | EIncompatibleWithShape _ + | EInvalidCharSet _ + | EIncompatibleWithExact _ + | EUnionSpeculationFailed _ + | ETupleUnsafeWrite _ + | EROArrayWrite _ + | ETupleOutOfBounds _ + | ETupleNonIntegerIndex _ + | ENonLitArrayToTuple _ + | ETupleArityMismatch _ + | EAdditionMixed _ + | EPrivateLookupFailed _ + | EStrictLookupFailed _ + | EPropPolarityMismatch _ + | EPropNotReadable _ + | EPropNotWritable _ + | EPropNotFound _ + | EExpectedBooleanLit _ + | EExpectedNumberLit _ + | EExpectedStringLit _ + | EIncompatibleProp _ + | EIncompatible _ -> + None + +let kind_of_msg = + Errors.( + function + | EUntypedTypeImport _ -> LintError Lints.UntypedTypeImport + | EUntypedImport _ -> LintError Lints.UntypedImport + | ENonstrictImport _ -> LintError Lints.NonstrictImport + | EUnclearType _ -> LintError Lints.UnclearType + | EDeprecatedType _ -> LintError Lints.DeprecatedType + | EDeprecatedUtility _ -> LintError Lints.DeprecatedUtility + | EDynamicExport _ -> LintError Lints.DynamicExport + | EUnsafeGettersSetters _ -> LintError Lints.UnsafeGettersSetters + | ESketchyNullLint { kind; _ } -> LintError (Lints.SketchyNull kind) + | ESketchyNumberLint (kind, _) -> LintError (Lints.SketchyNumber kind) + | EUnnecessaryOptionalChain _ -> LintError Lints.UnnecessaryOptionalChain + | EUnnecessaryInvariant _ -> LintError Lints.UnnecessaryInvariant + | EInexactSpread _ -> LintError Lints.InexactSpread + | ESignatureVerification _ -> LintError Lints.SignatureVerificationFailure + | EImplicitInexactObject _ -> LintError Lints.ImplicitInexactObject + | EUninitializedInstanceProperty _ -> LintError Lints.UninitializedInstanceProperty + | ENonArraySpread _ -> LintError Lints.NonArraySpread + | EBadExportPosition _ + | EBadExportContext _ -> + InferWarning ExportKind + | EUnexpectedTypeof _ + | EExperimentalDecorators _ + | EExperimentalClassProperties _ + | EUnsafeGetSet _ + | EExperimentalExportStarAs _ + | EExperimentalEnums _ + | EIndeterminateModuleType _ + | EUnreachable _ + | EInvalidTypeof _ -> + InferWarning OtherKind + | EInternal _ -> InternalError + | ERecursionLimit _ -> RecursionLimitError + | EDuplicateModuleProvider _ -> DuplicateProviderError + | EParseError _ -> ParseError + | EDocblockError _ + | ELintSetting _ + | EExperimentalOptionalChaining _ + | EOptionalChainingMethods _ -> + PseudoParseError + | _ -> InferError) + +let mk_prop_message = + Errors.Friendly.( + function + | None + | Some "$key" + | Some "$value" -> + [text "an index signature declaring the expected key / value type"] + | Some "$call" -> [text "a call signature declaring the expected parameter / return type"] + | Some prop -> [text "property "; code prop]) + +let string_of_internal_error = function + | PackageHeapNotFound pkg -> spf "package %S was not found in the PackageHeap!" pkg + | AbnormalControlFlow -> "abnormal control flow" + | MethodNotAFunction -> "expected function type" + | OptionalMethod -> "optional methods are not supported" + | OpenPredWithoutSubst -> "OpenPredT ~> OpenPredT without substitution" + | PredFunWithoutParamNames -> "FunT -> FunT no params" + | UnsupportedGuardPredicate pred -> spf "unsupported guard predicate (%s)" pred + | BreakEnvMissingForCase -> "break env missing for case" + | PropertyDescriptorPropertyCannotBeRead -> "unexpected property in properties object" + | ForInLHS -> "unexpected LHS in for...in" + | ForOfLHS -> "unexpected LHS in for...of" + | InstanceLookupComputed -> "unexpected computed property lookup on InstanceT" + | PropRefComputedOpen -> "unexpected open computed property element type" + | PropRefComputedLiteral -> "unexpected literal computed property element type" + | ShadowReadComputed -> "unexpected shadow read on computed property" + | ShadowWriteComputed -> "unexpected shadow write on computed property" + | RestParameterNotIdentifierPattern -> + "unexpected rest parameter, expected an identifier pattern" + | InterfaceTypeSpread -> "unexpected spread property in interface" + | DebugThrow -> "debug throw" + | MergeTimeout s -> spf "merge job timed out after %0.2f seconds" s + | MergeJobException exc -> "uncaught exception: " ^ Exception.to_string exc + | CheckTimeout s -> spf "check job timed out after %0.2f seconds" s + | CheckJobException exc -> "uncaught exception: " ^ Exception.to_string exc + | UnexpectedTypeapp s -> "unexpected typeapp: " ^ s + +(* Friendly messages are created differently based on the specific error they come from, so + we collect the ingredients here and pass them to make_error_printable *) +type 'loc friendly_message_recipe = + | IncompatibleUse of + 'loc + * 'loc upper_kind + * 'loc Reason.virtual_reason + * 'loc Reason.virtual_reason + * 'loc Type.virtual_use_op + | Speculation of 'loc * 'loc Type.virtual_use_op * ('loc Reason.virtual_reason * t) list + | Incompatible of + 'loc Reason.virtual_reason * 'loc Reason.virtual_reason * 'loc Type.virtual_use_op + | IncompatibleTrust of + 'loc Reason.virtual_reason * 'loc Reason.virtual_reason * 'loc Type.virtual_use_op + | PropMissing of 'loc * string option * 'loc Reason.virtual_reason * 'loc Type.virtual_use_op + | Normal of 'loc Errors.Friendly.message_feature list + | UseOp of 'loc * 'loc Errors.Friendly.message_feature list * 'loc Type.virtual_use_op + | PropPolarityMismatch of + string option + * ('loc Reason.virtual_reason * Polarity.t) + * ('loc Reason.virtual_reason * Polarity.t) + * 'loc Type.virtual_use_op + +let friendly_message_of_msg : Loc.t t' -> Loc.t friendly_message_recipe = + let text = Errors.Friendly.text in + let code = Errors.Friendly.code in + let ref = Errors.Friendly.ref in + let desc = Errors.Friendly.ref ~loc:false in + let msg_export prefix export_name = + if export_name = "default" then + (text "", text "the default export") + else + (text prefix, code export_name) + in + Errors.( + function + | EIncompatible + { lower = (reason_lower, _); upper = (reason_upper, upper_kind); use_op; branches } -> + if branches = [] then + IncompatibleUse + ( loc_of_reason reason_upper, + upper_kind, + reason_lower, + reason_upper, + Option.value ~default:unknown_use use_op ) + else + Speculation (loc_of_reason reason_upper, Option.value ~default:unknown_use use_op, branches) + | EIncompatibleDefs { use_op; reason_lower; reason_upper; branches } -> + if branches = [] then + Incompatible (reason_lower, reason_upper, use_op) + else + Speculation (loc_of_reason reason_upper, use_op, branches) + | EIncompatibleProp { prop; reason_prop; reason_obj; special = _; use_op } -> + PropMissing + (loc_of_reason reason_prop, prop, reason_obj, Option.value ~default:unknown_use use_op) + | EDebugPrint (_, str) -> Normal [text str] + | EExportValueAsType (_, export_name) -> + Normal [text "Cannot export the value "; code export_name; text " as a type."] + | EImportValueAsType (_, export_name) -> + let (prefix, export) = msg_export "the value " export_name in + Normal + [ + text "Cannot import "; + prefix; + export; + text " as a type. "; + code "import type"; + text " only works on type exports like type aliases, "; + text "interfaces, and classes. If you intended to import the type of a "; + text "value use "; + code "import typeof"; + text " instead."; + ] + | EImportTypeAsTypeof (_, export_name) -> + let (prefix, export) = msg_export "the type " export_name in + Normal + [ + text "Cannot import "; + prefix; + export; + text " as a type. "; + code "import typeof"; + text " only works on value exports like variables, "; + text "functions, and classes. If you intended to import a type use "; + code "import type"; + text " instead."; + ] + | EImportTypeAsValue (_, export_name) -> + let (prefix, export) = msg_export "the type " export_name in + Normal + [ + text "Cannot import "; + prefix; + export; + text " as a value. "; + text "Use "; + code "import type"; + text " instead."; + ] + | ERefineAsValue (_, name) -> + let (_, export) = msg_export "" name in + Normal + [ + text "Cannot refine "; + export; + text " as a value. "; + (* text "Use "; code "import type"; text " instead."; *) + + ] + | ENoDefaultExport (_, module_name, suggestion) -> + Normal + ( [ + text "Cannot import a default export because there is no default export "; + text "in "; + code module_name; + text "."; + ] + @ + match suggestion with + | None -> [] + | Some suggestion -> + [ + text " "; + text "Did you mean "; + code (spf "import {%s} from \"%s\"" suggestion module_name); + text "?"; + ] ) + | EOnlyDefaultExport (_, module_name, export_name) -> + Normal + [ + text "Cannot import "; + code export_name; + text " because "; + text "there is no "; + code export_name; + text " export in "; + code module_name; + text ". Did you mean "; + code (spf "import %s from \"...\"" export_name); + text "?"; + ] + | ENoNamedExport (_, module_name, export_name, suggestion) -> + Normal + ( [ + text "Cannot import "; + code export_name; + text " because "; + text "there is no "; + code export_name; + text " export in "; + code module_name; + text "."; + ] + @ + match suggestion with + | None -> [] + | Some suggestion -> [text " Did you mean "; code suggestion; text "?"] ) + | EMissingTypeArgs { reason_tapp; reason_arity; min_arity; max_arity } -> + let (arity, args) = + if min_arity = max_arity then + ( spf "%d" max_arity, + if max_arity = 1 then + "argument" + else + "arguments" ) + else + (spf "%d-%d" min_arity max_arity, "arguments") + in + let reason_arity = replace_desc_reason (desc_of_reason reason_tapp) reason_arity in + Normal [text "Cannot use "; ref reason_arity; text (spf " without %s type %s." arity args)] + | ETooManyTypeArgs (reason_tapp, reason_arity, n) -> + let reason_arity = replace_desc_reason (desc_of_reason reason_tapp) reason_arity in + Normal + [ + text "Cannot use "; + ref reason_arity; + text " with more than "; + text + (spf + "%n type %s." + n + ( if n == 1 then + "argument" + else + "arguments" )); + ] + | ETooFewTypeArgs (reason_tapp, reason_arity, n) -> + let reason_arity = replace_desc_reason (desc_of_reason reason_tapp) reason_arity in + Normal + [ + text "Cannot use "; + ref reason_arity; + text " with fewer than "; + text + (spf + "%n type %s." + n + ( if n == 1 then + "argument" + else + "arguments" )); + ] + | EInvalidTypeArgs (reason_main, reason_tapp) -> + Normal + [text "Cannot use "; ref reason_main; text " with "; ref reason_tapp; text " argument"] + | ETypeParamArity (_, n) -> + if n = 0 then + Normal [text "Cannot apply type because it is not a polymorphic type."] + else + Normal + [ + text "Cannot use type without exactly "; + text + (spf + "%n type %s." + n + ( if n == 1 then + "argument" + else + "arguments" )); + ] + | ETypeParamMinArity (_, n) -> + Normal + [ + text "Cannot use type without at least "; + text + (spf + "%n type %s." + n + ( if n == 1 then + "argument" + else + "arguments" )); + ] + | ECallTypeArity { call_loc = _; is_new; reason_arity; expected_arity = n } -> + let use = + if is_new then + "construct " + else + "call " + in + if n = 0 then + Normal + [ + text "Cannot "; + text use; + text "non-polymorphic "; + ref reason_arity; + text " with type arguments."; + ] + else + Normal + [ + text "Cannot "; + text use; + ref reason_arity; + text " without exactly "; + text + (spf + "%n type argument%s." + n + ( if n == 1 then + "" + else + "s" )); + ] + | EValueUsedAsType { reason_use } -> + Normal + [ + text "Cannot use "; + desc reason_use; + text " as a type. "; + text "A name can be used as a type only if it refers to "; + text "a type definition, an interface definition, or a class definition. "; + text "To get the type of a non-class value, use "; + code "typeof"; + text "."; + ] + | EExpectedStringLit { reason_lower; reason_upper; use_op } -> + Incompatible (reason_lower, reason_upper, use_op) + | EExpectedNumberLit { reason_lower; reason_upper; use_op } -> + Incompatible (reason_lower, reason_upper, use_op) + | EExpectedBooleanLit { reason_lower; reason_upper; use_op } -> + Incompatible (reason_lower, reason_upper, use_op) + | EPropNotFound (prop, reasons, use_op) -> + let (reason_prop, reason_obj) = reasons in + PropMissing (loc_of_reason reason_prop, prop, reason_obj, use_op) + | EPropNotReadable { reason_prop; prop_name = x; use_op } -> + UseOp (loc_of_reason reason_prop, mk_prop_message x @ [text " is not readable"], use_op) + | EPropNotWritable { reason_prop; prop_name = x; use_op } -> + UseOp (loc_of_reason reason_prop, mk_prop_message x @ [text " is not writable"], use_op) + | EPropPolarityMismatch (reasons, x, (p1, p2), use_op) -> + let (lreason, ureason) = reasons in + PropPolarityMismatch (x, (lreason, p1), (ureason, p2), use_op) + | EPolarityMismatch { reason; name; expected_polarity; actual_polarity } -> + let polarity_string = function + | Polarity.Positive -> "output" + | Polarity.Negative -> "input" + | Polarity.Neutral -> "input/output" + in + let expected_polarity = polarity_string expected_polarity in + let actual_polarity = polarity_string actual_polarity in + let reason_targ = mk_reason (RIdentifier name) (def_loc_of_reason reason) in + Normal + [ + text "Cannot use "; + ref reason_targ; + text (" in an " ^ actual_polarity ^ " "); + text "position because "; + ref reason_targ; + text " is expected to occur only in "; + text (expected_polarity ^ " positions."); + ] + | EStrictLookupFailed (reasons, lreason, x, use_op) -> + (* if we're looking something up on the global/builtin object, then tweak + the error to say that `x` doesn't exist. We can tell this is the + global object because that should be the only object created with + `builtin_reason` instead of an actual location (isee `Init_js.init`). *) + if is_builtin_reason Loc.source lreason then + let (reason, _) = reasons in + let msg = + match x with + | Some x when is_internal_module_name x -> + [text "Cannot resolve module "; code (uninternal_module_name x); text "."] + | None -> [text "Cannot resolve name "; desc reason; text "."] + | Some x when is_internal_name x -> [text "Cannot resolve name "; desc reason; text "."] + | Some x -> [text "Cannot resolve name "; code x; text "."] + in + Normal msg + else + let (reason_prop, reason_obj) = reasons in + PropMissing + (loc_of_reason reason_prop, x, reason_obj, Option.value ~default:unknown_use use_op) + | EPrivateLookupFailed (reasons, x, use_op) -> + PropMissing (loc_of_reason (fst reasons), Some ("#" ^ x), snd reasons, use_op) + | EAdditionMixed (reason, use_op) -> + UseOp + ( loc_of_reason reason, + [ref reason; text " could either behave like a string or like a number"], + use_op ) + | EComparison (lower, upper) -> + Normal [text "Cannot compare "; ref lower; text " to "; ref upper; text "."] + | ETupleArityMismatch (reasons, l1, l2, use_op) -> + let (lower, upper) = reasons in + UseOp + ( loc_of_reason lower, + [ + ref lower; + text (spf " has an arity of %d but " l1); + ref upper; + text (spf " has an arity of %d" l2); + ], + use_op ) + | ENonLitArrayToTuple (reasons, use_op) -> + let (lower, upper) = reasons in + UseOp + ( loc_of_reason lower, + [ + ref lower; + text " has an unknown number of elements, so is "; + text "incompatible with "; + ref upper; + ], + use_op ) + | ETupleOutOfBounds { reason; reason_op; length; index; use_op } -> + UseOp + ( loc_of_reason reason, + [ + ref reason_op; + text + (spf + " only has %d element%s, so index %s is out of bounds" + length + ( if length == 1 then + "" + else + "s" ) + index); + ], + use_op ) + | ETupleNonIntegerIndex { reason; index; use_op } -> + let index_ref = Errors.Friendly.(Reference ([Code index], def_loc_of_reason reason)) in + UseOp + ( loc_of_reason reason, + [ + text "the index into a tuple must be an integer, but "; + index_ref; + text " is not an integer"; + ], + use_op ) + | ETupleUnsafeWrite { reason; use_op } -> + UseOp + ( loc_of_reason reason, + [text "the index must be statically known to write a tuple element"], + use_op ) + | EROArrayWrite (reasons, use_op) -> + let (lower, _) = reasons in + UseOp (loc_of_reason lower, [text "read-only arrays cannot be written to"], use_op) + | EUnionSpeculationFailed { use_op; reason; reason_op = _; branches } -> + Speculation (loc_of_reason reason, use_op, branches) + | ESpeculationAmbiguous + { reason = _; prev_case = (prev_i, prev_case); case = (i, case); cases = case_rs } -> + Friendly.( + let prev_case_r = + mk_reason (RCustom ("case " ^ string_of_int (prev_i + 1))) (loc_of_reason prev_case) + in + let case_r = mk_reason (RCustom ("case " ^ string_of_int (i + 1))) (loc_of_reason case) in + Normal + ( [ + text "Could not decide which case to select, since "; + ref prev_case_r; + text " "; + text "may work but if it doesn't "; + ref case_r; + text " looks promising "; + text "too. To fix add a type annotation "; + ] + @ conjunction_concat + ~conjunction:"or" + (Core_list.map + ~f:(fun case_r -> + let text = "to " ^ string_of_desc (desc_of_reason case_r) in + [ref (mk_reason (RCustom text) (loc_of_reason case_r))]) + case_rs) + @ [text "."] )) + | EIncompatibleWithExact (reasons, use_op) -> + let (lower, upper) = reasons in + UseOp + ( loc_of_reason lower, + [text "inexact "; ref lower; text " is incompatible with exact "; ref upper], + use_op ) + | EUnsupportedExact (_, lower) -> + Normal [text "Cannot create exact type from "; ref lower; text "."] + | EIdxArity _ -> + Normal + [ + text "Cannot call "; + code "idx(...)"; + text " because only exactly two "; + text "arguments are allowed."; + ] + | EIdxUse1 _ -> + Normal + [ + text "Cannot call "; + code "idx(...)"; + text " because the callback "; + text "argument must not be annotated."; + ] + | EIdxUse2 _ -> + Normal + [ + text "Cannot call "; + code "idx(...)"; + text " because the callback must "; + text "only access properties on the callback parameter."; + ] + | EUnexpectedThisType _ -> Normal [text "Unexpected use of "; code "this"; text " type."] + | EPropertyTypeAnnot _ -> + Normal + [ + text "Cannot use "; + code "$PropertyType"; + text " because the second "; + text "type argument must be a string literal."; + ] + | EExportsAnnot _ -> + Normal + [ + text "Cannot use "; + code "$Exports"; + text " because the first type "; + text "argument must be a string literal."; + ] + | ECharSetAnnot _ -> + Normal + [ + text "Cannot use "; + code "$CharSet"; + text " because the first type "; + text "argument must be a string literal."; + ] + | EInvalidCharSet { invalid = (invalid_reason, invalid_chars); valid = valid_reason; use_op } + -> + let valid_reason = + mk_reason (desc_of_reason valid_reason) (def_loc_of_reason valid_reason) + in + let invalids = + InvalidCharSetSet.fold + (fun c acc -> + match c with + | InvalidChar c -> [code (String.make 1 c); text " is not a member of the set"] :: acc + | DuplicateChar c -> [code (String.make 1 c); text " is duplicated"] :: acc) + invalid_chars + [] + |> List.rev + in + UseOp + ( loc_of_reason invalid_reason, + [ref invalid_reason; text " is incompatible with "; ref valid_reason; text " since "] + @ Friendly.conjunction_concat ~conjunction:"and" invalids, + use_op ) + | EUnsupportedKeyInObjectType _ -> Normal [text "Unsupported key in object type."] + | EPredAnnot _ -> + Normal + [ + text "Cannot use "; + code "$Pred"; + text " because the first "; + text "type argument must be a number literal."; + ] + | ERefineAnnot _ -> + Normal + [ + text "Cannot use "; + code "$Refine"; + text " because the third "; + text "type argument must be a number literal."; + ] + | ETrustedAnnot _ -> Normal [text "Not a valid type to mark as "; code "$Trusted"; text "."] + | EPrivateAnnot _ -> Normal [text "Not a valid type to mark as "; code "$Private"; text "."] + | EUnexpectedTypeof _ -> + Normal [code "typeof"; text " can only be used to get the type of variables."] + | EFunPredCustom ((a, b), msg) -> + Normal [ref a; text ". "; text msg; text " "; ref b; text "."] + | EIncompatibleWithShape (lower, upper, use_op) -> + UseOp + ( loc_of_reason lower, + [ref lower; text " is incompatible with "; code "$Shape"; text " of "; ref upper], + use_op ) + | EInternal (_, internal_error) -> + let msg = string_of_internal_error internal_error in + Normal [text (spf "Internal error: %s" msg)] + | EUnsupportedSyntax (_, unsupported_syntax) -> + let msg = + match unsupported_syntax with + | ComprehensionExpression + | GeneratorExpression + | MetaPropertyExpression -> + [text "Not supported."] + | ObjectPropertyLiteralNonString -> + [text "Non-string literal property keys not supported."] + | ObjectPropertyGetSet -> [text "Get/set properties not yet supported."] + | ObjectPropertyComputedGetSet -> + [text "Computed getters and setters are not yet supported."] + | InvariantSpreadArgument -> + [text "Unsupported arguments in call to "; code "invariant"; text "."] + | ClassPropertyLiteral -> [text "Literal properties not yet supported."] + | ClassPropertyComputed -> [text "Computed property keys not supported."] + | ReactCreateClassPropertyNonInit -> + [text "Unsupported property specification in "; code "createClass"; text "."] + | RequireDynamicArgument -> + [text "The parameter passed to "; code "require"; text " must be a string literal."] + | ImportDynamicArgument -> + [text "The parameter passed to "; code "import"; text " must be a string literal."] + | RequireLazyDynamicArgument -> + [ + text "The first argument to "; + code "requireLazy"; + text " must be an "; + text "array literal of string literals and the second argument must "; + text "be a callback."; + ] + | CatchParameterAnnotation -> + [text "Type annotations for catch parameters are not yet supported."] + | CatchParameterDeclaration -> [text "Unsupported catch parameter declaration."] + | DestructuringObjectPropertyLiteralNonString -> + [text "Unsupported non-string literal object property in destructuring."] + | DestructuringExpressionPattern -> + [text "Unsupported expression pattern in destructuring."] + | PredicateDeclarationForImplementation -> + [text "Cannot declare predicate when a function body is present."] + | PredicateDeclarationWithoutExpression -> + [text "Predicate function declarations need to declare a "; text "predicate expression."] + | PredicateDeclarationAnonymousParameters -> + [ + text "Predicate function declarations cannot use anonymous "; + text "function parameters."; + ] + | PredicateInvalidBody -> + [ + text "Invalid body for predicate function. Expected a simple return "; + text "statement as body."; + ] + | PredicateFunctionAbstractReturnType -> + [ + text "The return type of a predicate function cannot contain a generic type. "; + text "The function predicate will be ignored here."; + ] + | PredicateVoidReturn -> [text "Predicate functions need to return non-void."] + | MultipleIndexers -> [text "Multiple indexers are not supported."] + | MultipleProtos -> [text "Multiple prototypes specified."] + | ExplicitCallAfterProto -> [text "Unexpected call property after explicit prototype."] + | ExplicitProtoAfterCall -> [text "Unexpected prototype after call property."] + | SpreadArgument -> [text "A spread argument is unsupported here."] + | IllegalName -> [text "Illegal name."] + | UnsupportedInternalSlot { name; static = false } -> + [text "Unsupported internal slot "; code name; text "."] + | UnsupportedInternalSlot { name; static = true } -> + [text "Unsupported static internal slot "; code name; text "."] + in + Normal msg + | EUseArrayLiteral _ -> + Normal [text "Use an array literal instead of "; code "new Array(...)"; text "."] + | EMissingAnnotation (reason, _) -> + let default = [text "Missing type annotation for "; desc reason; text "."] in + let msg = + match desc_of_reason reason with + | RTypeParam (_, (RImplicitInstantiation, _), _) -> + [ + text "Please use a concrete type annotation instead of "; + code "_"; + text " in this position."; + ] + | RTypeParam (_, (reason_op_desc, reason_op_loc), (reason_tapp_desc, reason_tapp_loc)) -> + let reason_op = mk_reason reason_op_desc reason_op_loc in + let reason_tapp = mk_reason reason_tapp_desc reason_tapp_loc in + default + @ [ + text " "; + desc reason; + text " is a type parameter declared in "; + ref reason_tapp; + text " and was implicitly instantiated at "; + ref reason_op; + text "."; + ] + | _ -> default + in + (* We don't collect trace info in the assert_ground_visitor because traces + * represent tests of lower bounds to upper bounds, and the assert_ground + * visitor is just visiting types. Instead, we collect a list of types we + * visited to get to the missing annotation error and report that as the + * trace *) + Normal msg + | EBindingError (binding_error, _, x, entry) -> + let desc = + if x = internal_name "this" then + RThis + else if x = internal_name "super" then + RSuper + else + RIdentifier x + in + (* We can call to_loc here because reaching this point requires that everything else + in the error message is concretized already; making Scopes polymorphic is not a good idea *) + let x = mk_reason desc (Scope.Entry.entry_loc entry |> ALoc.to_loc_exn) in + let msg = + match binding_error with + | ENameAlreadyBound -> + [text "Cannot declare "; ref x; text " because the name is already bound."] + | EReferencedBeforeDeclaration -> + if desc = RThis || desc = RSuper then + [ + text "Must call "; + code "super"; + text " before accessing "; + ref x; + text " in a derived constructor."; + ] + else + [ + text "Cannot use variable "; + ref x; + text " because the declaration "; + text "either comes later or was skipped."; + ] + | ETypeInValuePosition + | ETypeAliasInValuePosition -> + [text "Cannot reference type "; ref x; text " from a value position."] + | EConstReassigned + | EConstParamReassigned -> + [text "Cannot reassign constant "; ref x; text "."] + | EImportReassigned -> [text "Cannot reassign import "; ref x; text "."] + | EEnumReassigned -> [text "Cannot reassign enum "; ref x; text "."] + in + Normal msg + | ERecursionLimit _ -> Normal [text "*** Recursion limit exceeded ***"] + | EModuleOutsideRoot (_, package_relative_to_root) -> + Normal + [ + text "This module resolves to "; + code package_relative_to_root; + text " which "; + text "is outside both your root directory and all of the entries in the "; + code "[include]"; + text " section of your "; + code ".flowconfig"; + text ". "; + text "You should either add this directory to the "; + code "[include]"; + text " "; + text "section of your "; + code ".flowconfig"; + text ", move your "; + code ".flowconfig"; + text " file higher in the project directory tree, or "; + text "move this package under your Flow root directory."; + ] + | EMalformedPackageJson (_, error) -> Normal [text error] + | EExperimentalDecorators _ -> + Normal + [ + text "Experimental decorator usage. Decorators are an early stage "; + text "proposal that may change. Additionally, Flow does not account for "; + text "the type implications of decorators at this time."; + ] + | EExperimentalClassProperties (_, static) -> + let (config_name, config_key) = + if static then + ("class static field", "class_static_fields") + else + ("class instance field", "class_instance_fields") + in + Normal + [ + text ("Experimental " ^ config_name ^ " usage. "); + text (String.capitalize_ascii config_name ^ "s are an active early stage "); + text "feature proposal that may change. You may opt-in to using them "; + text "anyway in Flow by putting "; + code ("esproposal." ^ config_key ^ "=enable"); + text " "; + text "into the "; + code "[options]"; + text " section of your "; + code ".flowconfig"; + text "."; + ] + | EUnsafeGetSet _ -> + Normal + [ + text "Potentially unsafe get/set usage. Getters and setters with side "; + text "effects are potentially unsafe and so disabled by default. You may "; + text "opt-in to using them anyway by putting "; + code "unsafe.enable_getters_and_setters"; + text " into the "; + code "[options]"; + text " section of your "; + code ".flowconfig"; + text "."; + ] + | EUninitializedInstanceProperty (_loc, err) -> + Lints.( + (match err with + | PropertyNotDefinitelyInitialized -> + Normal + [ + text "Class property not definitely initialized in the constructor. "; + text "Can you add an assignment to the property declaration?"; + ] + | ReadFromUninitializedProperty -> + Normal + [ + text "It is unsafe to read from a class property before it is "; + text "definitely initialized."; + ] + | MethodCallBeforeEverythingInitialized -> + Normal + [ + text "It is unsafe to call a method in the constructor before all "; + text "class properties are definitely initialized."; + ] + | PropertyFunctionCallBeforeEverythingInitialized -> + Normal + [ + text "It is unsafe to call a property function in the constructor "; + text "before all class properties are definitely initialized."; + ] + | ThisBeforeEverythingInitialized -> + Normal + [ + text "It is unsafe to use "; + code "this"; + text " in the constructor "; + text "before all class properties are definitely initialized."; + ])) + | EExperimentalExportStarAs _ -> + Normal + [ + text "Experimental "; + code "export * as"; + text " usage. "; + code "export * as"; + text " is an active early stage feature propsal that "; + text "may change. You may opt-in to using it anyway by putting "; + code "esproposal.export_star_as=enable"; + text " into the "; + code "[options]"; + text " section of your "; + code ".flowconfig"; + text "."; + ] + | EExperimentalEnums _ -> + Normal + [ + text "Experimental "; + code "enum"; + text " usage. "; + text "You may opt-in to using enums by putting "; + code "experimental.enums=true"; + text " into the "; + code "[options]"; + text " section of your "; + code ".flowconfig"; + text "."; + ] + | EIndeterminateModuleType _ -> + Normal + [ + text "Unable to determine module type (CommonJS vs ES) if both an export "; + text "statement and "; + code "module.exports"; + text " are used in the "; + text "same module!"; + ] + | EBadExportPosition _ -> Normal [text "Exports can only appear at the top level"] + | EBadExportContext (name, _) -> + Normal [code name; text " may only be used as part of a legal top level export statement"] + | EUnexpectedTemporaryBaseType _ -> + Normal [text "The type argument of a temporary base type must be a compatible literal type"] + | ECannotDelete (_, expr) -> + Normal + [ + text "Cannot delete "; + ref expr; + text " because only member expressions and variables can be deleted."; + ] + | ESignatureVerification sve -> + Signature_builder_deps.With_ALoc.Error.( + let msg = + match sve with + | ExpectedSort (sort, x, _) -> + [code x; text (spf " is not a %s." (Signature_builder_kind.Sort.to_string sort))] + | ExpectedAnnotation (_, sort) -> + [ + text + (spf + "Missing type annotation at %s:" + (Signature_builder_deps.With_ALoc.ExpectedAnnotationSort.to_string sort)); + ] + | InvalidTypeParamUse _ -> [text "Invalid use of type parameter:"] + | UnexpectedObjectKey _ -> [text "Expected simple key in object:"] + | UnexpectedObjectSpread _ -> [text "Unexpected spread in object:"] + | UnexpectedArraySpread _ -> [text "Unexpected spread in array:"] + | UnexpectedArrayHole _ -> [text "Unexpected array hole:"] + | EmptyArray _ -> + [ + text "Cannot determine the element type of an empty array. "; + text + "Please provide an annotation, e.g., by adding a type cast around this expression."; + ] + | EmptyObject _ -> + [ + text "Cannot determine types of initialized properties of an empty object. "; + text + "Please provide an annotation, e.g., by adding a type cast around this expression."; + ] + | UnexpectedExpression (_, esort) -> + [ + text + (spf + "Cannot determine the type of this %s. " + (Flow_ast_utils.ExpressionSort.to_string esort)); + text + "Please provide an annotation, e.g., by adding a type cast around this expression."; + ] + | SketchyToplevelDef _ -> [text "Unexpected toplevel definition that needs hoisting:"] + | UnsupportedPredicateExpression _ -> + [text "Unsupported kind of expression in predicate function:"] + | TODO (msg, _) -> + [text (spf "TODO: %s is not supported yet, try using a type cast." msg)] + in + Normal + ( text "Failed to build a typed interface for this module. " + :: text "The exports of this module must be annotated with types. " + :: msg )) + | EUnreachable _ -> Normal [text "Unreachable code."] + | EInvalidObjectKit { reason; reason_op = _; use_op } -> + UseOp (loc_of_reason reason, [ref reason; text " is not an object"], use_op) + | EInvalidTypeof (_, typename) -> + Normal + [ + text "Cannot compare the result of "; + code "typeof"; + text " to string "; + text "literal "; + code typename; + text " because it is not a valid "; + code "typeof"; + text " return value."; + ] + | EArithmeticOperand reason -> + Normal + [ + text "Cannot perform arithmetic operation because "; + ref reason; + text " "; + text "is not a number."; + ] + | EBinaryInLHS reason -> + (* TODO: or symbol *) + Normal + [ + text "Cannot use "; + code "in"; + text " because on the left-hand side, "; + ref reason; + text " must be a string or number."; + ] + | EBinaryInRHS reason -> + Normal + [ + text "Cannot use "; + code "in"; + text " because on the right-hand side, "; + ref reason; + text " must be an object or array."; + ] + | EForInRHS reason -> + Normal + [ + text "Cannot iterate using a "; + code "for...in"; + text " statement "; + text "because "; + ref reason; + text " is not an object, null, or undefined."; + ] + | EObjectComputedPropertyAccess (_, reason_prop) -> + Normal [text "Cannot access computed property using "; ref reason_prop; text "."] + | EObjectComputedPropertyAssign (_, reason_prop) -> + Normal [text "Cannot assign computed property using "; ref reason_prop; text "."] + | EInvalidLHSInAssignment _ -> Normal [text "Invalid left-hand side in assignment expression."] + | EIncompatibleWithUseOp (l_reason, u_reason, use_op) -> + Incompatible (l_reason, u_reason, use_op) + | ETrustIncompatibleWithUseOp (l_reason, u_reason, use_op) -> + IncompatibleTrust (l_reason, u_reason, use_op) + | EUnsupportedImplements reason -> + Normal [text "Cannot implement "; desc reason; text " because it is not an interface."] + | ENotAReactComponent { reason; use_op } -> + UseOp (loc_of_reason reason, [ref reason; text " is not a React component"], use_op) + | EInvalidReactConfigType { reason; use_op } -> + UseOp (loc_of_reason reason, [ref reason; text " cannot calculate config"], use_op) + | EInvalidReactPropType { reason; use_op; tool } -> + React.( + React.SimplifyPropType.( + let is_not_prop_type = "is not a React propType" in + let msg = + match tool with + | ArrayOf -> is_not_prop_type + | InstanceOf -> "is not a class" + | ObjectOf -> is_not_prop_type + | OneOf ResolveArray -> "is not an array" + | OneOf (ResolveElem _) -> "is not a literal" + | OneOfType ResolveArray -> "is not an array" + | OneOfType (ResolveElem _) -> is_not_prop_type + | Shape ResolveObject -> "is not an object" + | Shape (ResolveDict _) -> is_not_prop_type + | Shape (ResolveProp _) -> is_not_prop_type + in + UseOp (loc_of_reason reason, [ref reason; text (" " ^ msg)], use_op))) + | EInvalidReactCreateClass { reason; use_op; tool } -> + React.( + React.CreateClass.( + let is_not_prop_type = "is not a React propType" in + let msg = + match tool with + | Spec _ -> "is not an exact object" + | Mixins _ -> "is not a tuple" + | Statics _ -> "is not an object" + | PropTypes (_, ResolveObject) -> "is not an object" + | PropTypes (_, ResolveDict _) -> is_not_prop_type + | PropTypes (_, ResolveProp _) -> is_not_prop_type + | DefaultProps _ -> "is not an object" + | InitialState _ -> "is not an object or null" + in + UseOp (loc_of_reason reason, [ref reason; text (" " ^ msg)], use_op))) + | EReactElementFunArity (_, fn, n) -> + Normal + [ + text "Cannot call "; + code ("React." ^ fn); + text " "; + text + (spf + "without at least %d argument%s." + n + ( if n == 1 then + "" + else + "s" )); + ] + | EFunctionCallExtraArg (unused_reason, def_reason, param_count, use_op) -> + let msg = + match param_count with + | 0 -> "no arguments are expected by" + | 1 -> "no more than 1 argument is expected by" + | n -> spf "no more than %d arguments are expected by" n + in + UseOp (loc_of_reason unused_reason, [text msg; text " "; ref def_reason], use_op) + | EUnsupportedSetProto _ -> Normal [text "Mutating this prototype is unsupported."] + | EDuplicateModuleProvider { module_name; provider; _ } -> + let loc = + Loc.( + let pos = { line = 1; column = 0 } in + { source = Some provider; start = pos; _end = pos }) + in + Normal + [ + text "Duplicate module provider for "; + code module_name; + text ". Change "; + text "either this module provider or the "; + ref (mk_reason (RCustom "current module provider") loc); + text "."; + ] + | EParseError (_, parse_error) -> + Normal (Friendly.message_of_string (Parse_error.PP.error parse_error)) + | EDocblockError (_, err) -> + let msg = + match err with + | MultipleFlowAttributes -> + [ + text "Unexpected "; + code "@flow"; + text " declaration. Only one per "; + text "file is allowed."; + ] + | MultipleProvidesModuleAttributes -> + [ + text "Unexpected "; + code "@providesModule"; + text " declaration. "; + text "Only one per file is allowed."; + ] + | MultipleJSXAttributes -> + [ + text "Unexpected "; + code "@jsx"; + text " declaration. Only one per "; + text "file is allowed."; + ] + | InvalidJSXAttribute first_error -> + [ + text "Invalid "; + code "@jsx"; + text " declaration. Should have the form "; + code "@jsx LeftHandSideExpression"; + text " with no spaces."; + ] + @ + (match first_error with + | None -> [] + | Some first_error -> [text (spf " Parse error: %s." first_error)]) + in + Normal msg + | EImplicitInexactObject _ -> + Normal + [ + text "Please add "; + code "..."; + text " to the end of the list of "; + text "properties to express an inexact object type."; + ] + | EUntypedTypeImport (_, module_name) -> + Normal + [ + text "Importing a type from an untyped module makes it "; + code "any"; + text " "; + text "and is not safe! Did you mean to add "; + code "// @flow"; + text " to "; + text "the top of "; + code module_name; + text "?"; + ] + | EUntypedImport (_, module_name) -> + Normal + [ + text "Importing from an untyped module makes it "; + code "any"; + text " "; + text "and is not safe! Did you mean to add "; + code "// @flow"; + text " "; + text "to the top of "; + code module_name; + text "?"; + ] + | ENonstrictImport _ -> + Normal + [ + text "Dependencies of a "; + code "@flow strict"; + text " module must "; + text "also be "; + code "@flow strict"; + text "!"; + ] + | EUnclearType _ -> + Normal + [ + text "Unclear type. Using "; + code "any"; + text ", "; + code "Object"; + text ", or "; + code "Function"; + text " types is not safe!"; + ] + | EDeprecatedType _ -> + Normal [text "Deprecated type. Using "; code "*"; text " types is not recommended!"] + | EDeprecatedUtility (_, name) -> + Normal [text "Deprecated utility. Using "; code name; text " types is not recommended!"] + | EDynamicExport (reason, reason_exp) -> + Normal + [ + text "Dynamic "; + ref reason; + text " unsafely appears in exported "; + ref reason_exp; + text ". This can cause importing modules to lose type coverage!"; + ] + | EUnsafeGettersSetters _ -> + Normal [text "Getters and setters can have side effects and are unsafe."] + | EUnusedSuppression _ -> Normal [text "Unused suppression comment."] + | ELintSetting (_, kind) -> + let msg = + match kind with + | LintSettings.Redundant_argument -> + [text "Redundant argument. This argument doesn't change any lint settings."] + | LintSettings.Overwritten_argument -> + [ + text "Redundant argument. The values set by this argument are "; + text "overwritten later in this comment."; + ] + | LintSettings.Naked_comment -> + [text "Malformed lint rule. At least one argument is required."] + | LintSettings.Nonexistent_rule -> + [ + text "Nonexistent/misspelled lint rule. Perhaps you have a "; + text "missing/extra "; + code ","; + text "?"; + ] + | LintSettings.Invalid_setting -> + [text "Invalid setting. Valid settings are error, warn, and off."] + | LintSettings.Malformed_argument -> + [ + text "Malformed lint rule. Properly formed rules contain a single "; + code ":"; + text " character. Perhaps you have a missing/extra "; + code ","; + text "?"; + ] + in + Normal msg + | ESketchyNullLint { kind = sketchy_kind; loc = _; falsy_loc; null_loc } -> + let (type_str, value_str) = + match sketchy_kind with + | Lints.SketchyNullBool -> ("boolean", "false") + | Lints.SketchyNullNumber -> ("number", "0") + | Lints.SketchyNullString -> ("string", "an empty string") + | Lints.SketchyNullMixed -> ("mixed", "false") + in + Normal + [ + text "Sketchy null check on "; + ref (mk_reason (RCustom type_str) falsy_loc); + text " "; + text "which is potentially "; + text value_str; + text ". Perhaps you meant to "; + text "check for "; + ref (mk_reason RNullOrVoid null_loc); + text "?"; + ] + | ESketchyNumberLint (_, reason) -> + Normal + [ + text "Avoid using "; + code "&&"; + text " to check the value of "; + ref reason; + text ". "; + text "Consider handling falsy values (0 and NaN) by using a conditional to choose an "; + text "explicit default instead."; + ] + | EInvalidPrototype reason -> + Normal [text "Cannot use "; ref reason; text " as a prototype. Expected an object or null."] + | EExperimentalOptionalChaining _ -> + Normal + [ + text "Experimental optional chaining ("; + code "?."; + text ") usage. "; + text "Optional chaining is an active early-stage feature proposal that "; + text "may change. You may opt in to using it anyway by putting "; + code "esproposal.optional_chaining=enable"; + text " into the "; + code "[options]"; + text " section of your "; + code ".flowconfig"; + text "."; + ] + | EOptionalChainingMethods _ -> + Normal [text "Flow does not yet support method or property calls in optional chains."] + | EUnnecessaryOptionalChain (_, lhs_reason) -> + Normal + [ + text "This use of optional chaining ("; + code "?."; + text ") is unnecessary because "; + ref lhs_reason; + text " cannot be nullish or because an earlier "; + code "?."; + text " will short-circuit the nullish case."; + ] + | EUnnecessaryInvariant (_, reason) -> + Normal + [ + text "This use of `invariant` is unnecessary because "; + ref reason; + text " is always truthy."; + ] + | EInexactSpread (reason, reason_op) -> + Normal + [ + text "Cannot determine the type of "; + ref reason_op; + text " because "; + text "it contains a spread of inexact "; + ref reason; + text ". "; + text "Being inexact, "; + ref reason; + text " might be missing the types of some properties that are being copied. "; + text "Perhaps you could make it exact?"; + ] + | EBigIntNotYetSupported reason -> + Normal [text "BigInt "; ref reason; text " is not yet supported."] + | ENonArraySpread reason -> + Normal + [ + text "Cannot spread non-array iterable "; + ref reason; + text ". Use "; + code "...Array.from()"; + text " instead."; + ] + | ECannotSpreadInterface { spread_reason; interface_reason } -> + Normal + [ + text "Cannot determine a type for "; + ref spread_reason; + text ". "; + ref interface_reason; + text " cannot be spread because interfaces do not "; + text "track the own-ness of their properties. Can you use an object type instead?"; + ] + | ECannotSpreadIndexerOnRight { spread_reason; object_reason; key_reason } -> + Normal + [ + text "Cannot determine a type for "; + ref spread_reason; + text ". "; + ref object_reason; + text " cannot be spread because the indexer "; + ref key_reason; + text " may overwrite properties with explicit keys in a way that Flow cannot track. "; + text "Can you spread "; + ref object_reason; + text " first or remove the indexer?"; + ] + | EUnableToSpread { spread_reason; object1_reason; object2_reason; propname; error_kind } -> + let (error_reason, fix_suggestion) = + match error_kind with + | Inexact -> ("is inexact", [text " Can you make "; ref object2_reason; text " exact?"]) + | Indexer -> + ( "has an indexer", + [ + text " Can you remove the indexer in "; + ref object2_reason; + text " or make "; + code propname; + text " a required property?"; + ] ) + in + Normal + ( [ + text "Cannot determine a type for "; + ref spread_reason; + text ". "; + ref object2_reason; + text " "; + text error_reason; + text ", so it may contain "; + code propname; + text " with a type that conflicts with "; + code propname; + text "'s definition in "; + ref object1_reason; + text "."; + ] + @ fix_suggestion ) + | EInexactMayOverwriteIndexer { spread_reason; key_reason; value_reason; object2_reason } -> + Normal + [ + text "Cannot determine a type for "; + ref spread_reason; + text ". "; + ref object2_reason; + text " is inexact and may "; + text "have a property key that conflicts with "; + ref key_reason; + text " or a property value that conflicts with "; + ref value_reason; + text ". Can you make "; + ref object2_reason; + text " exact?"; + ]) + +let is_lint_error = function + | EUntypedTypeImport _ + | EUntypedImport _ + | ENonstrictImport _ + | EUnclearType _ + | EDeprecatedType _ + | EDeprecatedUtility _ + | EDynamicExport _ + | EUnsafeGettersSetters _ + | ESketchyNullLint _ + | ESketchyNumberLint _ + | EInexactSpread _ + | EBigIntNotYetSupported _ + | EUnnecessaryOptionalChain _ + | EUnnecessaryInvariant _ + | EImplicitInexactObject _ + | EUninitializedInstanceProperty _ + | ENonArraySpread _ -> + true + | _ -> false diff --git a/src/typing/errors/error_suppressions.ml b/src/typing/errors/error_suppressions.ml new file mode 100644 index 00000000000..41f3c4c646b --- /dev/null +++ b/src/typing/errors/error_suppressions.ml @@ -0,0 +1,290 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* This is a data structure used to track what locations are being suppressed + * and which suppressions have yet to be used. + *) + +open Severity +open Utils_js +open Loc_collections + +exception No_source of string + +module FileSuppressions : sig + type t + + val empty : t + + val is_empty : t -> bool + + val add : Loc.t -> t -> t + + val remove : Loc.t -> t -> t + + val union : t -> t -> t + + val add_lint_suppression : Loc.t -> t -> t + + val remove_lint_suppression : Loc.t -> t -> t + + val suppression_at_loc : Loc.t -> t -> LocSet.t option + + val all_locs : t -> LocSet.t +end = struct + type error_suppressions = LocSet.t SpanMap.t + + type t = { + suppressions: error_suppressions; + lint_suppressions: LocSet.t; + } + + let empty = { suppressions = SpanMap.empty; lint_suppressions = LocSet.empty } + + let is_empty { suppressions; lint_suppressions } = + SpanMap.is_empty suppressions && LocSet.is_empty lint_suppressions + + let add loc { suppressions; lint_suppressions } = + let suppression_loc = + Loc.( + let start = { line = loc._end.line + 1; column = 0 } in + let _end = { line = loc._end.line + 2; column = 0 } in + { loc with start; _end }) + in + let suppressions = + SpanMap.add suppression_loc (LocSet.singleton loc) suppressions ~combine:LocSet.union + in + { suppressions; lint_suppressions } + + let remove loc ({ suppressions; _ } as orig) = + { orig with suppressions = SpanMap.remove loc suppressions } + + let union a b = + { + suppressions = SpanMap.union a.suppressions b.suppressions; + lint_suppressions = LocSet.union a.lint_suppressions b.lint_suppressions; + } + + let add_lint_suppression lint_suppression t = + { t with lint_suppressions = LocSet.add lint_suppression t.lint_suppressions } + + let remove_lint_suppression lint_suppression ({ lint_suppressions; _ } as orig) = + { orig with lint_suppressions = LocSet.remove lint_suppression lint_suppressions } + + let suppression_at_loc loc { suppressions; _ } = SpanMap.get loc suppressions + + let all_locs { suppressions; lint_suppressions } = + suppressions |> SpanMap.values |> List.fold_left LocSet.union lint_suppressions +end + +type t = FileSuppressions.t FilenameMap.t + +let empty = FilenameMap.empty + +let file_of_loc_unsafe loc = + match loc.Loc.source with + | Some x -> x + | None -> raise (No_source (Loc.debug_to_string ~include_source:true loc)) + +let add loc map = + let file = file_of_loc_unsafe loc in + let suppressions = FileSuppressions.empty |> FileSuppressions.add loc in + FilenameMap.add ~combine:FileSuppressions.union file suppressions map + +let union = + let combine _key x y = Some (FileSuppressions.union x y) in + (fun a b -> Utils_js.FilenameMap.union ~combine a b) + +let add_lint_suppressions lint_suppressions map = + LocSet.fold + begin + fun loc acc -> + let file = file_of_loc_unsafe loc in + let file_suppressions = + FilenameMap.get file acc |> Option.value ~default:FileSuppressions.empty + in + let file_suppressions = FileSuppressions.add_lint_suppression loc file_suppressions in + FilenameMap.add file file_suppressions acc + end + lint_suppressions + map + +let remove = FilenameMap.remove + +(* raises if `loc` has no filename *) +let file_suppressions_of_loc loc suppressions_map = + let file = file_of_loc_unsafe loc in + match FilenameMap.get file suppressions_map with + | Some x -> x + | None -> FileSuppressions.empty + +(* raises if `loc` has no filename *) +let suppression_at_loc loc suppressions_map = + let file_suppressions = file_suppressions_of_loc loc suppressions_map in + FileSuppressions.suppression_at_loc loc file_suppressions + +(* raises if `loc` has no filename. + * no-op if suppressions_map does not contain an entry for that file. *) +let update_file_suppressions f loc suppressions_map = + let file = file_of_loc_unsafe loc in + match FilenameMap.get file suppressions_map with + | None -> suppressions_map + | Some file_suppressions -> + let file_suppressions = f file_suppressions in + FilenameMap.add file file_suppressions suppressions_map + +let remove_suppression_from_map loc (suppressions_map : t) = + update_file_suppressions (FileSuppressions.remove loc) loc suppressions_map + +let remove_lint_suppression_from_map loc (suppressions_map : t) = + update_file_suppressions (FileSuppressions.remove_lint_suppression loc) loc suppressions_map + +let check_loc suppressions (result, used, (unused : t)) loc = + (* We only want to check the starting position of the reason *) + let loc = Loc.first_char loc in + match suppression_at_loc loc suppressions with + | Some locs -> + let used = LocSet.union locs used in + let unused = remove_suppression_from_map loc unused in + (Off, used, unused) + | None -> (result, used, unused) + +(* Checks if any of the given locations should be suppressed. *) +let check_locs locs (suppressions : t) (unused : t) = + (* We need to check every location in order to figure out which suppressions + are really unused...that's why we don't shortcircuit as soon as we find a + matching error suppression. + If the "primary" location has severity = Off, the error should be + suppressed even if it is not explicit. *) + List.fold_left (check_loc suppressions) (Err, LocSet.empty, unused) locs + +let in_node_modules ~root ~file_options loc = + match Option.both (Loc.source loc) file_options with + | None -> false + | Some (file, options) -> Files.is_within_node_modules ~root ~options (File_key.to_string file) + +let check ~root ~file_options (err : Loc.t Errors.printable_error) (suppressions : t) (unused : t) + = + let locs = + Errors.locs_of_printable_error err + (* It is possible for errors to contain locations without a source, but suppressions always + * exist in an actual file so there is no point checking if suppressions exist at locations + * without a source. *) + |> List.filter (fun loc -> Option.is_some (Loc.source loc)) + in + (* Ignore lint errors from node modules. *) + let ignore = + match Errors.kind_of_printable_error err with + | Errors.LintError _ -> in_node_modules ~root ~file_options (Errors.loc_of_printable_error err) + | _ -> false + in + if ignore then + None + else + let (result, used, unused) = check_locs locs suppressions unused in + let result = + match Errors.kind_of_printable_error err with + | Errors.RecursionLimitError -> + (* TODO: any related suppressions should not be considered used *) + Err + | _ -> result + in + Some (result, used, unused) + +(* Gets the locations of the suppression comments that are yet unused *) + +let all_locs map = + FilenameMap.fold + (fun _k v acc -> LocSet.union acc (FileSuppressions.all_locs v)) + map + LocSet.empty + +let filter_suppressed_errors ~root ~file_options suppressions errors ~unused = + (* Filter out suppressed errors. also track which suppressions are used. *) + Errors.ConcreteLocPrintableErrorSet.fold + (fun error ((errors, suppressed, unused) as acc) -> + match check ~root ~file_options error suppressions unused with + | None -> acc + | Some (severity, used, unused) -> + (match severity with + | Off -> (errors, (error, used) :: suppressed, unused) + | _ -> (Errors.ConcreteLocPrintableErrorSet.add error errors, suppressed, unused))) + errors + (Errors.ConcreteLocPrintableErrorSet.empty, [], unused) + +let update_suppressions current_suppressions new_suppressions = + FilenameMap.fold + begin + fun file file_suppressions acc -> + if FileSuppressions.is_empty file_suppressions then + FilenameMap.remove file acc + else + FilenameMap.add file file_suppressions acc + end + new_suppressions + current_suppressions + +let get_lint_settings severity_cover loc = + Option.Monad_infix.( + Loc.source loc + >>= (fun source -> Utils_js.FilenameMap.get source severity_cover >>= ExactCover.find_opt loc)) + +(* Filter out lint errors which are definitely suppressed or were never + * enabled in the first place. *) +let filter_lints suppressions errors aloc_tables ~include_suppressions severity_cover = + Flow_error.( + ErrorSet.fold + (fun error (errors, warnings, suppressions) -> + Severity.( + match (msg_of_error error |> Error_message.kind_of_msg, loc_of_error error) with + | (Errors.LintError lint_kind, Some loc) -> + let loc = ALoc.to_loc_with_tables aloc_tables loc in + begin + match get_lint_settings severity_cover loc with + | None -> + (* This shouldn't happen -- the primary location of a lint error + * should always be in the file where the error was found. Until we + * are more confident that this invariant holds, pass the lint warning + * back to the master process, where it will be filtered in the + * context of the full severity cover set. *) + (errors, ErrorSet.add error warnings, suppressions) + | Some lint_settings -> + (* Lint settings can only affect lint errors when located at the + * error's "primary" location. This is a nice property, since it means + * we can filter out some lint errors here instead of passing them + * back and filtering them later. + * + * Note that a lint error might still be filtered out later, since a + * lint error can be suppressed by a "regular" suppression comment. *) + (match LintSettings.get_value lint_kind lint_settings with + | Off when include_suppressions -> + (* When --include-suppressions is active we only want to remove lints that were + never enabled in the first place, as opposed to those that are enabled but + suppressed. We also add them as an error regardless of what they were in the + first place. *) + if LintSettings.is_explicit lint_kind lint_settings then + (ErrorSet.add error errors, warnings, suppressions) + else + (errors, warnings, suppressions) + | Off -> + let suppressions = + match LintSettings.get_loc lint_kind lint_settings with + | Some used_suppression -> + remove_lint_suppression_from_map used_suppression suppressions + | _ -> suppressions + in + (errors, warnings, suppressions) + | Warn -> (errors, ErrorSet.add error warnings, suppressions) + | Err -> (ErrorSet.add error errors, warnings, suppressions)) + end + (* Non-lint errors can be suppressed by any location present in the error. + * A dependency location might be part of the error, and the corresponding + * suppression is not available from this worker. We need to pass back all + * errors to be filtered in the master process. *) + | _ -> (ErrorSet.add error errors, warnings, suppressions))) + errors + (ErrorSet.empty, ErrorSet.empty, suppressions)) diff --git a/src/typing/errors/error_suppressions.mli b/src/typing/errors/error_suppressions.mli new file mode 100644 index 00000000000..14a41abe2e0 --- /dev/null +++ b/src/typing/errors/error_suppressions.mli @@ -0,0 +1,51 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type t + +val empty : t + +(* Raises if the given loc has `source` set to `None` *) +val add : Loc.t -> t -> t + +val add_lint_suppressions : Loc_collections.LocSet.t -> t -> t + +val remove : File_key.t -> t -> t + +(* Union the two collections of suppressions. If they both contain suppressions for a given file, + * include both sets of suppressions. *) +val union : t -> t -> t + +(* Union the two collections of suppressions. If they both contain suppressions for a given file, + * discard those included in the first argument. *) +val update_suppressions : t -> t -> t + +val all_locs : t -> Loc_collections.LocSet.t + +val filter_suppressed_errors : + root:Path.t -> + file_options:Files.options option -> + t -> + Errors.ConcreteLocPrintableErrorSet.t -> + unused:t -> + Errors.ConcreteLocPrintableErrorSet.t + * (Loc.t Errors.printable_error * Loc_collections.LocSet.t) list + * t + +(* We use an PrintableErrorSet here (as opposed to a ConcretePrintableErrorSet) because this operation happens + during merge rather than during collation as filter_suppressed_errors does *) +val filter_lints : + t -> + Flow_error.ErrorSet.t -> + (* If needed, we will resolve abstract locations using these tables. Context.aloc_tables is most + * likely the right thing to pass to this. *) + ALoc.table Lazy.t Utils_js.FilenameMap.t -> + include_suppressions:bool -> + ExactCover.lint_severity_cover Utils_js.FilenameMap.t -> + Flow_error.ErrorSet.t * Flow_error.ErrorSet.t * t + +val get_lint_settings : 'a ExactCover.t Utils_js.FilenameMap.t -> Loc.t -> 'a option diff --git a/src/typing/errors/flow_error.ml b/src/typing/errors/flow_error.ml new file mode 100644 index 00000000000..d9795eb1241 --- /dev/null +++ b/src/typing/errors/flow_error.ml @@ -0,0 +1,1009 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Type +open Utils_js +open Reason +open Error_message + +exception ImproperlyFormattedError of Loc.t Error_message.t' + +type 'loc t = { + loc: 'loc option; + msg: 'loc Error_message.t'; + source_file: File_key.t; + trace_reasons: 'loc Reason.virtual_reason list; +} + +let loc_of_error { loc; _ } = loc + +let msg_of_error { msg; _ } = msg + +let source_file { source_file; _ } = source_file + +let trace_reasons { trace_reasons; _ } = trace_reasons + +let map_loc_of_error f { loc; msg; source_file; trace_reasons } = + { + loc = Option.map ~f loc; + msg = map_loc_of_error_message f msg; + source_file; + trace_reasons = Core_list.map ~f:(Reason.map_reason_locs f) trace_reasons; + } + +let concretize_error lazy_table_of_aloc = + map_loc_of_error (fun aloc -> + let table = lazy_table_of_aloc aloc in + ALoc.to_loc table aloc) + +let kind_of_error err = msg_of_error err |> kind_of_msg + +(* I wish OCaml's scoping for types was better *) +type 'loc err = 'loc t + +module Error (M : Set.OrderedType) : Set.OrderedType with type t = M.t err = struct + type t = M.t err + + let compare = compare +end + +module ErrorSet = Set.Make (Error (ALoc)) +module ConcreteErrorSet = Set.Make (Error (Loc)) + +(* Rank scores for signals of different strength on an x^2 scale so that greater + * signals dominate lesser signals. *) +let reason_score = 100 + +let frame_score = reason_score * 2 + +let type_arg_frame_score = frame_score * 2 + +let tuple_element_frame_score = type_arg_frame_score * 2 + +(* Gets the score of a use_op. Used in score_of_msg. See the comment on + * score_of_msg to learn more about scores. + * + * Calculated by taking the count of all the frames. *) +let score_of_use_op use_op = + let score = + fold_use_op + (* Comparing the scores of use_ops only works when they all have the same + * root_use_op! If two use_ops have different roots, we can't realistically + * compare the number of frames since the basis is completely different. + * + * So we require a Speculation root use_op to be passed into score_of_use_op + * and we perform a structural equality check using that. + * + * Otherwise, the total score from score_of_use_op is -1. This way, errors + * which match Speculation will be promoted. It is more likely the user was + * trying to target these branches. *) + (function + | Type.Speculation _ -> Ok 0 + | _ -> Error (-1)) + (fun acc frame -> + match acc with + | Error _ -> acc + | Ok acc -> + Ok + ( acc + + + match frame with + (* Later params that error get a higher score. This roughly represents how + * much type-checking work Flow successfully completed before erroring. + * Useful for basically only overloaded function error messages. + * + * The signal that this gives us is that we successfully type checked n + * params in the call before erroring. If there was no error, Flow may + * have gone to successfully check another m params. However, we will + * never know that. n is our best approximation. It rewards errors near + * the end of a call and punishes (slightly) errors near the beginning of + * a call. + * + * This, however, turns out to be consistent with code style in modern + * JavaScript. As an unspoken convention, more complex arguments usually + * go last. For overloaded functions, the switching generally happens on + * the first argument. The "tag". This gives us confidence that n on + * FunParam is a good heuristic for the score. + * + * FunRestParam is FunParam, but at the end. So give it a larger score + * then FunParam after adding n. + * + * We do _not_ add n to the score if this use_op was added to an implicit type parameter. *) + | FunParam { n; _ } -> frame_score + n + | FunRestParam _ -> frame_score + frame_score - 1 + (* FunCompatibility is generally followed by another use_op. So let's not + * count FunCompatibility. *) + | FunCompatibility _ -> 0 + (* FunMissingArg means the error is *less* likely to be correct. *) + | FunMissingArg _ -> 0 + (* Higher signal then PropertyCompatibility, for example. *) + | TypeArgCompatibility _ -> type_arg_frame_score + | ArrayElementCompatibility _ -> type_arg_frame_score + (* Higher signal then TypeArgCompatibility. *) + | TupleElementCompatibility _ -> tuple_element_frame_score + (* ImplicitTypeParam is an internal marker use_op that doesn't get + * rendered in error messages. So it doesn't necessarily signal anything + * about the user's intent. *) + | ImplicitTypeParam -> 0 + | _ -> frame_score )) + use_op + in + match score with + | Ok n -> n + | Error n -> n + +(* Gets the score of an error message. The score is an approximation of how + * close the user was to getting their code right. A higher score means the user + * was closer then a lower score. A score of 0 means we have no signal about + * how close the user was. For example, consider the following two flows: + * + * number ~> {p: string} + * + * {p: number} ~> {p: string} + * + * Clearly, the user was closer to being correct with the second flow. So this + * function should assign the number ~> string error a higher score then the + * number ~> object error. + * + * Now consider: + * + * number ~> string + * + * number ~> {p: string} + * + * This time we kept the lower bound the same and changed the upper bound. The + * first flow is this time is closer to the user's intent then the second flow. + * So we give the number ~> string message a higher score then the + * number ~> object message. + * + * This scoring mechanism is useful for union and intersection error messages + * where we want to approximate which branch the user meant to target with + * their code. Branches with higher scores have a higher liklihood of being + * the branch the user was targeting. *) +let score_of_msg msg = + (* Start by getting the score based off the use_op of our error message. If + * the message does not have a use_op then we return 0. This score + * contribution declares that greater complexity in the use is more likely to + * cause a match. *) + let score = util_use_op_of_msg 0 (fun op _ -> score_of_use_op op) msg in + (* Special cases for messages which increment the score. *) + let score = + score + + + match msg with + (* If a property doesn't exist, we still use a PropertyCompatibility use_op. + * This PropertyCompatibility when counted in our score is dishonest since + * a missing prop does not increase the likelihood that the user was close to + * the right types. *) + | EIncompatibleProp { use_op = Some (Frame (PropertyCompatibility _, _)); _ } + | EPropNotFound (_, _, Frame (PropertyCompatibility _, _)) + | EStrictLookupFailed (_, _, _, Some (Frame (PropertyCompatibility _, _))) -> + -frame_score + | _ -> 0 + in + (* If we have two incompatible types and both incompatible types are scalar or + * both types are arrays then increment our score. This is based on the belief + * that the solutions with the lowest possible complexity are closest to each + * other. e.g. number ~> string. If one type is a scalar or array and the + * other type is not then we decrement our score. *) + let score = + score + + + let reasons = + match msg with + | EIncompatibleDefs { reason_lower = rl; reason_upper = ru; branches = []; use_op = _ } + | EIncompatibleWithUseOp (rl, ru, _) + | EIncompatibleWithExact ((rl, ru), _) -> + Some (rl, ru) + | _ -> None + in + match reasons with + | Some (rl, ru) -> + if is_nullish_reason rl && is_nullish_reason ru then + reason_score + else if + (* T ~> null should have a lower score then T ~> scalar *) + is_nullish_reason rl || is_nullish_reason ru + then + 0 + else if is_scalar_reason rl && is_scalar_reason ru then + reason_score + else if is_scalar_reason rl || is_scalar_reason ru then + 1 + else if is_array_reason rl && is_array_reason ru then + reason_score + else if is_array_reason rl || is_array_reason ru then + 1 + else + reason_score + | None -> reason_score + in + score + +(* Decide reason order based on UB's flavor and blamability. + If the order is unchanged, maintain reference equality. *) +let ordered_reasons ((rl, ru) as reasons) = + if is_blamable_reason ru && not (is_blamable_reason rl) then + (ru, rl) + else + reasons + +let error_of_msg ~trace_reasons ~source_file (msg : ALoc.t Error_message.t') : ALoc.t t = + { loc = aloc_of_msg msg; msg; source_file; trace_reasons } + +let rec make_error_printable lazy_table_of_aloc (error : Loc.t t) : Loc.t Errors.printable_error = + Errors.( + let { + loc : Loc.t option; + msg : Loc.t Error_message.t'; + source_file; + trace_reasons : Loc.t virtual_reason list; + } = + error + in + let kind = kind_of_msg msg in + let mk_info (reason : concrete_reason) extras = + let desc = string_of_desc (desc_of_reason reason) in + (* For descriptions that are an identifier wrapped in primes, e.g. `A`, then + * we want to unwrap the primes and just show A. This looks better in infos. + * However, when an identifier wrapped with primes is inside some other text + * then we want to keep the primes since they help with readability. *) + let desc = + if + String.length desc > 2 + && desc.[0] = '`' + && desc.[String.length desc - 1] = '`' + && not (String.contains desc ' ') + then + String.sub desc 1 (String.length desc - 2) + else + desc + in + (loc_of_reason reason, desc :: extras) + in + let info_of_reason (r : concrete_reason) = mk_info r [] in + let trace_infos = Core_list.map ~f:info_of_reason trace_reasons in + (* Flip the lower/upper reasons of a frame_use_op. *) + let flip_frame = function + | ArrayElementCompatibility c -> + ArrayElementCompatibility { lower = c.upper; upper = c.lower } + | FunCompatibility c -> FunCompatibility { lower = c.upper; upper = c.lower } + | FunParam c -> FunParam { c with lower = c.upper; upper = c.lower } + | FunRestParam c -> FunRestParam { lower = c.upper; upper = c.lower } + | FunReturn c -> FunReturn { lower = c.upper; upper = c.lower } + | IndexerKeyCompatibility c -> IndexerKeyCompatibility { lower = c.upper; upper = c.lower } + | PropertyCompatibility c -> + PropertyCompatibility { c with lower = c.upper; upper = c.lower } + | ReactConfigCheck -> ReactConfigCheck + | TupleElementCompatibility c -> + TupleElementCompatibility { c with lower = c.upper; upper = c.lower } + | TypeArgCompatibility c -> TypeArgCompatibility { c with lower = c.upper; upper = c.lower } + | ( CallFunCompatibility _ | TupleMapFunCompatibility _ | ObjMapFunCompatibility _ + | ObjMapiFunCompatibility _ | TypeParamBound _ | FunMissingArg _ | ImplicitTypeParam + | ReactGetConfig _ | UnifyFlip ) as use_op -> + use_op + in + (* Unification produces two errors. One for both sides. For example, + * {p: number} ~> {p: string} errors on both number ~> string and + * string ~> number. Showing both errors to our user is often redundant. + * So we use this utility to flip the string ~> number case and produce an + * error identical to one we've produced before. These two errors will be + * deduped in our PrintableErrorSet. *) + let dedupe_by_flip = + (* Loop over through the use_op chain. *) + let rec loop = function + (* Roots don't flip. *) + | Op _ as use_op -> (false, use_op) + (* Start flipping if we are on the reverse side of unification. *) + | Frame (UnifyFlip, use_op) -> + let (flip, use_op) = loop use_op in + (not flip, use_op) + (* If we are in flip mode then flip our frame. *) + | Frame (frame, use_op) -> + let (flip, use_op) = loop use_op in + if flip then + (true, Frame (flip_frame frame, use_op)) + else + (false, Frame (frame, use_op)) + in + fun (lower, upper) use_op -> + let (flip, use_op) = loop use_op in + if flip then + ((upper, lower), use_op) + else + ((lower, upper), use_op) + in + (* In friendly error messages, we always want to point to a value as the + * primary location. Or an annotation on a value. Normally, values are found + * in the lower bound. However, in contravariant positions this flips. In this + * function we normalize the lower/upper variables in use_ops so that lower + * always points to the value. Example: + * + * ((x: number) => {}: (x: string) => void); + * + * We want to point to number. However, number is in the upper position since + * number => void ~> string => void flips arguments to string ~> number. This + * function flips contravariant positions like function arguments back. *) + let flip_contravariant = + (* Is this frame part of a contravariant position? *) + let is_contravariant = function + | (FunParam _, Frame (FunCompatibility _, _)) -> (true, true) + | (FunRestParam _, Frame (FunCompatibility _, _)) -> (true, true) + | (ReactGetConfig { polarity = Polarity.Negative }, _) -> (true, false) + | (TypeArgCompatibility { polarity = Polarity.Negative; _ }, _) -> (true, false) + | _ -> (false, false) + in + let is_contravariant_root = function + | FunImplicitReturn _ -> true + | _ -> false + in + (* Loop through the use_op and flip the contravariants. *) + let rec loop = function + | Op root_use_op as use_op -> (is_contravariant_root root_use_op, use_op) + (* If the frame is contravariant then flip. *) + | Frame (frame, use_op) -> + let (flip, use_op) = loop use_op in + let (contravariant, flip_self) = is_contravariant (frame, use_op) in + let flip = + if contravariant then + not flip + else + flip + in + let flip_self = flip && ((not contravariant) || flip_self) in + let frame = + if flip_self then + flip_frame frame + else + frame + in + (flip, Frame (frame, use_op)) + in + fun (lower, upper) use_op -> + let (flip, use_op) = loop use_op in + if flip then + ((upper, lower), use_op) + else + ((lower, upper), use_op) + in + let text = Friendly.text in + let code = Friendly.code in + let ref = Friendly.ref in + let desc = Friendly.ref ~loc:false in + (* Unwrap a use_op for the friendly error format. Takes the smallest location + * where we found the error and a use_op which we will unwrap. *) + let unwrap_use_ops = + Friendly.( + let rec loop (loc : Loc.t) frames (use_op : Loc.t virtual_use_op) = + let action = + match use_op with + | Op UnknownUse + | Op (Internal _) -> + `UnknownRoot false + | Op (Type.Speculation _) -> `UnknownRoot true + | Op (ObjectSpread { op }) -> `Root (op, None, [text "Cannot spread "; desc op]) + | Op (ObjectChain { op }) -> + `Root (op, None, [text "Incorrect arguments passed to "; desc op]) + | Op (Addition { op; left; right }) -> + `Root (op, None, [text "Cannot add "; desc left; text " and "; desc right]) + | Op (AssignVar { var; init }) -> + `Root + ( init, + None, + match var with + | Some var -> [text "Cannot assign "; desc init; text " to "; desc var] + | None -> [text "Cannot assign "; desc init; text " to variable"] ) + | Op (DeleteVar { var }) -> `Root (var, None, [text "Cannot delete "; desc var]) + | Op (InitField { op; body }) -> + `Root (op, None, [text "Cannot initialize "; desc op; text " with "; desc body]) + | Op (Cast { lower; upper }) -> + `Root (lower, None, [text "Cannot cast "; desc lower; text " to "; desc upper]) + | Op (ClassExtendsCheck { extends; def; _ }) -> + `Root (def, None, [text "Cannot extend "; ref extends; text " with "; desc def]) + | Op (ClassImplementsCheck { implements; def; _ }) -> + `Root (def, None, [text "Cannot implement "; ref implements; text " with "; desc def]) + | Op (ClassOwnProtoCheck { prop; own_loc; proto_loc }) -> + (match (own_loc, proto_loc) with + | (None, None) -> `UnknownRoot true + | (Some loc, None) -> + let def = mk_reason (RProperty (Some prop)) loc in + `Root (def, None, [text "Cannot shadow proto property"]) + | (None, Some loc) -> + let def = mk_reason (RProperty (Some prop)) loc in + `Root (def, None, [text "Cannot define shadowed proto property"]) + | (Some own_loc, Some proto_loc) -> + let def = mk_reason (RProperty (Some prop)) own_loc in + let proto = mk_reason (RIdentifier prop) proto_loc in + `Root (def, None, [text "Cannot shadow proto property "; ref proto])) + | Op (Coercion { from; target }) -> + `Root (from, None, [text "Cannot coerce "; desc from; text " to "; desc target]) + | Op (FunCall { op; fn; _ }) -> `Root (op, Some fn, [text "Cannot call "; desc fn]) + | Op (FunCallMethod { op; fn; prop; _ }) -> + `Root (op, Some prop, [text "Cannot call "; desc fn]) + | Frame + ( FunParam _, + ( Op (Type.Speculation (Op (FunCall _ | FunCallMethod _ | JSXCreateElement _))) + as use_op ) ) -> + `Next use_op + | Frame + ( FunParam { n; name; lower = lower'; _ }, + Op (FunCall { args; fn; _ } | FunCallMethod { args; fn; _ }) ) -> + let lower = + if List.length args > n - 1 then + List.nth args (n - 1) + else + lower' + in + let param = + match name with + | Some name -> code name + | None -> text (spf "the %s parameter" (Utils_js.ordinal n)) + in + `Root + ( lower, + None, + [ + text "Cannot call "; + desc fn; + text " with "; + desc lower; + text " bound to "; + param; + ] ) + | Op (FunReturnStatement { value }) -> + `Root (value, None, [text "Cannot return "; desc value]) + | Op (FunImplicitReturn { upper; fn }) -> + `Root + ( upper, + None, + [text "Cannot expect "; desc upper; text " as the return type of "; desc fn] ) + | Op (GeneratorYield { value }) -> + `Root (value, None, [text "Cannot yield "; desc value]) + | Op (GetProperty prop) -> `Root (prop, None, [text "Cannot get "; desc prop]) + | Frame (FunParam _, Op (JSXCreateElement { op; component; _ })) + | Op (JSXCreateElement { op; component; _ }) -> + `Root (op, Some component, [text "Cannot create "; desc component; text " element"]) + | Op (ReactCreateElementCall { op; component; _ }) -> + `Root (op, Some component, [text "Cannot create "; desc component; text " element"]) + | Op (ReactGetIntrinsic { literal }) -> + `Root (literal, None, [text "Cannot create "; desc literal; text " element"]) + | Op (TypeApplication { type' }) -> + `Root (type', None, [text "Cannot instantiate "; desc type']) + | Op (SetProperty { prop; value; lhs; _ }) -> + let loc_reason = + if Loc.contains (loc_of_reason lhs) loc then + lhs + else + value + in + `Root (loc_reason, None, [text "Cannot assign "; desc value; text " to "; desc prop]) + | Op (DeleteProperty { prop; lhs }) -> + `Root (lhs, None, [text "Cannot delete "; desc prop]) + | Frame (ArrayElementCompatibility { lower; _ }, use_op) -> + `Frame (lower, use_op, [text "array element"]) + | Frame (FunParam { n; lower; _ }, use_op) -> + `Frame (lower, use_op, [text "the "; text (Utils_js.ordinal n); text " argument"]) + | Frame (FunRestParam _, use_op) -> `Next use_op + | Frame (FunReturn { lower; _ }, use_op) -> + `Frame (repos_reason loc lower, use_op, [text "the return value"]) + | Frame (IndexerKeyCompatibility { lower; _ }, use_op) -> + `Frame (lower, use_op, [text "the indexer property's key"]) + | Frame + ( PropertyCompatibility { prop = None | Some "$key" | Some "$value"; lower; _ }, + use_op ) -> + `Frame (lower, use_op, [text "the indexer property"]) + | Frame (PropertyCompatibility { prop = Some "$call"; lower; _ }, use_op) -> + `Frame (lower, use_op, [text "the callable signature"]) + | Frame (PropertyCompatibility { prop = Some prop; lower; _ }, use_op) -> + let repos_small_reason loc reason = function + (* If we are checking class extensions or implementations then the + * object reason will point to the class name. So don't reposition with + * this reason. *) + | Op (ClassExtendsCheck _) -> repos_reason loc reason + | Op (ClassImplementsCheck _) -> repos_reason loc reason + | _ -> reason + in + let lower = repos_small_reason loc lower use_op in + let rec loop lower = function + (* Don't match $key/$value/$call properties since they have special + * meaning. As defined above. *) + | Frame (PropertyCompatibility { prop = Some prop; lower = lower'; _ }, use_op) + when prop <> "$key" && prop <> "$value" && prop <> "$call" -> + let lower' = repos_small_reason (loc_of_reason lower) lower' use_op in + (* Perform the same frame location unwrapping as we do in our + * general code. *) + let lower = + if Loc.contains (loc_of_reason lower') (loc_of_reason lower) then + lower + else + lower' + in + let (lower, props, use_op) = loop lower use_op in + (lower, prop :: props, use_op) + (* Perform standard iteration through these use_ops. *) + | use_op -> (lower, [], use_op) + in + (* Loop through our parent use_op to get our property path. *) + let (lower, props, use_op) = loop lower use_op in + (* Create our final action. *) + `Frame + ( lower, + use_op, + [ + text "property "; + code (List.fold_left (fun acc prop -> prop ^ "." ^ acc) prop props); + ] ) + | Frame (TupleElementCompatibility { n; lower; _ }, use_op) -> + `Frame (lower, use_op, [text "index "; text (string_of_int (n - 1))]) + | Frame (TypeArgCompatibility { targ; lower; _ }, use_op) -> + `Frame (lower, use_op, [text "type argument "; ref targ]) + | Frame (TypeParamBound { name }, use_op) -> + `FrameWithoutLoc (use_op, [text "type argument "; code name]) + | Frame (FunCompatibility { lower; _ }, use_op) -> `NextWithLoc (lower, use_op) + | Frame (FunMissingArg _, use_op) + | Frame (ImplicitTypeParam, use_op) + | Frame (ReactConfigCheck, use_op) + | Frame (ReactGetConfig _, use_op) + | Frame (UnifyFlip, use_op) + | Frame (CallFunCompatibility _, use_op) + | Frame (TupleMapFunCompatibility _, use_op) + | Frame (ObjMapFunCompatibility _, use_op) + | Frame (ObjMapiFunCompatibility _, use_op) -> + `Next use_op + in + match action with + (* Skip this use_op and go to the next one. *) + | `Next use_op -> loop loc frames use_op + (* Skip this use_op, don't add a frame, but do use the loc to reposition + * our primary location. *) + | `NextWithLoc (frame_reason, use_op) -> + (* If our current loc is inside our frame_loc then use our current loc + * since it is the smallest possible loc in our frame_loc. *) + let frame_loc = loc_of_reason frame_reason in + let loc = + if Loc.contains frame_loc loc then + loc + else + frame_loc + in + loop loc frames use_op + (* Add our frame message and reposition the location if appropriate. *) + | `Frame (frame_reason, use_op, frame) -> + (* If our current loc is inside our frame_loc then use our current loc + * since it is the smallest possible loc in our frame_loc. *) + let frame_loc = loc_of_reason frame_reason in + let frame_contains_loc = Loc.contains frame_loc loc in + let loc = + if frame_contains_loc then + loc + else + frame_loc + in + (* Add our frame and recurse with the next use_op. *) + let (all_frames, local_frames) = frames in + let frames = + ( frame :: all_frames, + if frame_contains_loc then + local_frames + else + frame :: local_frames ) + in + loop loc frames use_op + (* Same logic as `Frame except we don't have a frame location. *) + | `FrameWithoutLoc (use_op, frame) -> + let (all_frames, local_frames) = frames in + let frames = (frame :: all_frames, frame :: local_frames) in + loop loc frames use_op + (* We don't know what our root is! Return what we do know. *) + | `UnknownRoot show_all_frames -> + let (all_frames, local_frames) = frames in + ( None, + loc, + if show_all_frames then + all_frames + else + local_frames ) + (* Finish up be returning our root location, root message, primary loc, + * and frames. *) + | `Root (root_reason, root_specific_reason, root_message) -> + (* If our current loc is inside our root_loc then use our current loc + * since it is the smallest possible loc in our root_loc. *) + let root_loc = loc_of_reason root_reason in + let root_specific_loc = Option.map root_specific_reason loc_of_reason in + let loc = + if Loc.contains root_loc loc && Loc.compare root_loc loc <> 0 then + loc + else + Option.value root_specific_loc ~default:root_loc + in + (* Return our root loc and message in addition to the true primary loc + * and frames. *) + let (all_frames, _) = frames in + (Some (root_loc, root_message), loc, all_frames) + in + fun (loc : Loc.t) (use_op : Loc.t virtual_use_op) -> + let (root, loc, frames) = loop loc ([], []) use_op in + let root = + Option.map root (fun (root_loc, root_message) -> + (root_loc, root_message @ [text " because"])) + in + (root, loc, frames)) + in + (* Make a friendly error based on a use_op. The message we are provided should + * not have any punctuation. Punctuation will be provided after the frames of + * an error message. *) + let mk_use_op_error (loc : Loc.t) (use_op : Loc.t virtual_use_op) message = + let (root, loc, frames) = unwrap_use_ops loc use_op in + mk_error ~trace_infos ?root ~frames loc message + in + (* Make a friendly error based on failed speculation. *) + let mk_use_op_speculation_error (loc : Loc.t) (use_op : Loc.t virtual_use_op) branches = + let (root, loc, frames) = unwrap_use_ops loc use_op in + let speculation_errors = + Core_list.map + ~f:(fun (_, (msg : Error_message.t)) -> + let score = score_of_msg msg in + let error = + error_of_msg ~trace_reasons:[] ~source_file msg + |> concretize_error lazy_table_of_aloc + |> make_error_printable lazy_table_of_aloc + in + (score, error)) + branches + in + mk_speculation_error ~kind:InferError ~trace_infos ~loc ~root ~frames ~speculation_errors + in + (* An error between two incompatible types. A "lower" type and an "upper" + * type. The use_op describes the path which we followed to find + * this incompatibility. + * + * This is a specialization of mk_incompatible_use_error. *) + let mk_incompatible_error lower upper use_op = + let ((lower, upper), use_op) = dedupe_by_flip (lower, upper) use_op in + let ((lower, upper), use_op) = flip_contravariant (lower, upper) use_op in + match use_op with + (* Add a custom message for Coercion root_use_ops that does not include the + * upper bound. *) + | Op (Coercion { from; _ }) -> + mk_use_op_error (loc_of_reason from) use_op [ref lower; text " should not be coerced"] + (* Ending with FunMissingArg gives us a different error message. Even though + * this error was generated by an incompatibility, we want to show a more + * descriptive error message. *) + | Frame (FunMissingArg { def; op; _ }, use_op) -> + let message = + match use_op with + | Op (FunCall _ | FunCallMethod _) -> + let def = + update_desc_reason + (function + | RFunctionType -> RFunction RNormal + | desc -> desc) + def + in + [ref def; text " requires another argument"] + | Frame (CallFunCompatibility { n }, _) -> + let exp = + if n = 1 then + "one argument" + else + string_of_int n ^ " arguments" + in + [ + ref op; + text (spf " passes only %s to the provided function type, but " exp); + ref def; + text (spf " expects more than %s. See " exp); + text Friendly.(docs.call); + text " for documentation"; + ] + | Frame (TupleMapFunCompatibility { value }, _) -> + [ + ref op; + text " expects the provided function type to take only one argument, the value type "; + ref value; + text ", but "; + ref def; + text " takes more than one argument. See "; + text Friendly.(docs.tuplemap); + text " for documentation"; + ] + | Frame (ObjMapFunCompatibility { value }, _) -> + [ + ref op; + text " expects the provided function type to take only one argument, the value type "; + ref value; + text ", but "; + ref def; + text " takes more than one argument. See "; + text Friendly.(docs.objmap); + text " for documentation"; + ] + | Frame (ObjMapiFunCompatibility { key; value }, _) -> + [ + ref op; + text " expects the provided function type to take only two arguments, the key type "; + ref key; + text " and the value type "; + ref value; + text ", but "; + ref def; + text " takes more than two arguments. See "; + text Friendly.(docs.objmapi); + text " for documentation"; + ] + | _ -> [ref def; text " requires another argument from "; ref op] + in + mk_use_op_error (loc_of_reason op) use_op message + | _ -> + let root_use_op = root_of_use_op use_op in + (match root_use_op with + (* Further customize functions with an implicit return. Functions with an + * implicit return have a lower position which is not valuable. Also + * clarify that the type was implicitly-returned. + * + * In flip_contravariant we flip upper/lower for all FunImplicitReturn. So + * reverse those back as well. *) + | FunImplicitReturn { upper = return; _ } -> + mk_use_op_error + (loc_of_reason lower) + use_op + ( [ref lower; text " is incompatible with "] + @ + if Loc.compare (loc_of_reason return) (loc_of_reason upper) = 0 then + [text "implicitly-returned "; desc upper] + else + [ref upper] ) + (* Default incompatibility. *) + | _ -> + begin + match (desc_of_reason lower, desc_of_reason upper) with + | (RPolyTest _, RPolyTest _) when loc_of_reason lower = loc_of_reason upper -> + mk_use_op_error + (loc_of_reason lower) + use_op + [ + text "the expected type is not parametric in "; + ref upper; + text ", perhaps due to the use of "; + code "*"; + text " or the lack of a type annotation"; + ] + | (RLongStringLit n, RStringLit _) -> + mk_use_op_error + (loc_of_reason lower) + use_op + [ + ref lower; + text " is incompatible with "; + ref upper; + text " because strings longer than "; + code (string_of_int n); + text " characters are not treated as literals"; + ] + | _ -> + mk_use_op_error + (loc_of_reason lower) + use_op + [ref lower; text " is incompatible with "; ref upper] + end) + in + let mk_trust_incompatible_error lower upper use_op = + match (desc_of_reason lower, desc_of_reason upper) with + | ((RAnyExplicit | RAnyImplicit), (RTrusted _ | RPrivate (RTrusted _))) + | ((RPrivate _ | RTrusted (RPrivate _)), (RAnyExplicit | RAnyImplicit)) -> + mk_use_op_error + (loc_of_reason lower) + use_op + [ref lower; text " is incompatible with "; ref upper] + | ((RAnyExplicit | RAnyImplicit), _) -> + mk_use_op_error + (loc_of_reason lower) + use_op + [ref lower; text " is incompatible with trusted "; ref upper] + | (_, (RAnyExplicit | RAnyImplicit)) -> + mk_use_op_error + (loc_of_reason lower) + use_op + [text "private "; ref lower; text " is incompatible with "; ref upper] + | (RPrivate _, RTrusted _) -> + mk_use_op_error + (loc_of_reason lower) + use_op + ( [ + text "`any` may have been passed into "; + ref lower; + text " and `any` is incompatible with "; + ref upper; + text ", and "; + ] + @ [ + ref upper; + text " may be passed into `any` and "; + ref lower; + text " is incompatible with `any`"; + ] ) + | (_, (RTrusted _ | RPrivate (RTrusted _))) -> + mk_use_op_error + (loc_of_reason lower) + use_op + [ + text "`any` may have been passed into "; + ref lower; + text " and `any` is incompatible with "; + ref upper; + ] + | ((RPrivate _ | RTrusted (RPrivate _)), _) -> + mk_use_op_error + (loc_of_reason lower) + use_op + [ + ref upper; + text " may be passed into `any` and "; + ref lower; + text " is incompatible with `any`"; + ] + | _ -> + mk_use_op_error + (loc_of_reason lower) + use_op + [ref lower; text " is incompatible with "; ref upper] + in + (* When we fail to find a property on an object we use this function to create + * an error. prop_loc should be the position of the use which caused this + * error. The use_op represents how we got to this error. + * + * If the use_op is a PropertyCompatibility frame then we encountered this + * error while subtyping two objects. In this case we add a bit more + * information to the error message. *) + let mk_prop_missing_error prop_loc prop lower use_op = + let (loc, lower, upper, use_op) = + match use_op with + (* If we are missing a property while performing property compatibility + * then we are subtyping. Record the upper reason. *) + | Frame (PropertyCompatibility { prop = compat_prop; lower; upper; _ }, use_op) + when prop = compat_prop -> + (loc_of_reason lower, lower, Some upper, use_op) + (* Otherwise this is a general property missing error. *) + | _ -> (prop_loc, lower, None, use_op) + in + (* If we were subtyping that add to the error message so our user knows what + * object required the missing property. *) + let prop_message = mk_prop_message prop in + let message = + match upper with + | Some upper -> + prop_message @ [text " is missing in "; ref lower; text " but exists in "] @ [ref upper] + | None -> prop_message @ [text " is missing in "; ref lower] + in + (* Finally, create our error message. *) + mk_use_op_error loc use_op message + in + (* An error that occurs when some arbitrary "use" is incompatible with the + * "lower" type. The use_op describes the path which we followed to find this + * incompatibility. + * + * Similar to mk_incompatible_error except with any arbitrary *use* + * instead of specifically an upper type. This error handles all use + * incompatibilities in general. *) + let mk_incompatible_use_error use_loc use_kind lower upper use_op = + let nope msg = mk_use_op_error use_loc use_op [ref lower; text (" " ^ msg)] in + match use_kind with + | IncompatibleElemTOfArrT -> nope "is not an array index" + | IncompatibleGetPrivatePropT + | IncompatibleSetPrivatePropT -> + nope "is not a class with private properties" + | IncompatibleMixedCallT -> + mk_use_op_error + use_loc + use_op + [text "the parameter types of an "; ref lower; text " are unknown"] + | IncompatibleCallT + | IncompatibleConstructorT -> + nope "is not a function" + | IncompatibleObjAssignFromTSpread + | IncompatibleArrRestT -> + nope "is not an array" + | IncompatibleObjAssignFromT + | IncompatibleObjRestT + | IncompatibleObjSealT + | IncompatibleGetKeysT + | IncompatibleGetValuesT -> + nope "is not an object" + | IncompatibleMapTypeTObject -> + mk_use_op_error use_loc use_op [ref lower; text " is not a valid argument of "; ref upper] + | IncompatibleMixinT + | IncompatibleThisSpecializeT -> + nope "is not a class" + | IncompatibleSpecializeT + | IncompatibleVarianceCheckT + | IncompatibleTypeAppVarianceCheckT -> + nope "is not a polymorphic type" + | IncompatibleSuperT -> nope "is not inheritable" + | IncompatibleUnaryMinusT -> nope "is not a number" + | IncompatibleGetPropT (prop_loc, prop) + | IncompatibleSetPropT (prop_loc, prop) + | IncompatibleMatchPropT (prop_loc, prop) + | IncompatibleHasOwnPropT (prop_loc, prop) + | IncompatibleMethodT (prop_loc, prop) -> + mk_prop_missing_error prop_loc prop lower use_op + | IncompatibleGetElemT prop_loc + | IncompatibleSetElemT prop_loc + | IncompatibleCallElemT prop_loc -> + mk_prop_missing_error prop_loc None lower use_op + | IncompatibleGetStaticsT -> nope "is not an instance type" + (* unreachable or unclassified use-types. until we have a mechanical way + to verify that all legit use types are listed above, we can't afford + to throw on a use type, so mark the error instead *) + | IncompatibleUnclassified ctor -> nope (spf "is not supported by unclassified use %s" ctor) + in + (* When an object property has a polarity that is incompatible with another + * error then we create one of these errors. We use terms like "read-only" and + * "write-only" to better reflect how the user thinks about these properties. + * Other terminology could include "contravariant", "covariant", and + * "invariant". Generally these terms are impenatrable to the average + * JavaScript developer. If we had more documentation explaining these terms + * it may be fair to use them in error messages. *) + let mk_prop_polarity_mismatch_error prop (lower, lpole) (upper, upole) use_op = + (* Remove redundant PropertyCompatibility if one exists. *) + let use_op = + match use_op with + | Frame (PropertyCompatibility c, use_op) when c.prop = prop -> use_op + | _ -> use_op + in + let expected = + match lpole with + | Polarity.Positive -> "read-only" + | Polarity.Negative -> "write-only" + | Polarity.Neutral -> + (match upole with + | Polarity.Negative -> "readable" + | Polarity.Positive -> "writable" + | Polarity.Neutral -> failwith "unreachable") + in + let actual = + match upole with + | Polarity.Positive -> "read-only" + | Polarity.Negative -> "write-only" + | Polarity.Neutral -> + (match lpole with + | Polarity.Negative -> "readable" + | Polarity.Positive -> "writable" + | Polarity.Neutral -> failwith "unreachable") + in + mk_use_op_error + (loc_of_reason lower) + use_op + ( mk_prop_message prop + @ [text (" is " ^ expected ^ " in "); ref lower; text " but "] + @ [text (actual ^ " in "); ref upper] ) + in + match (loc, friendly_message_of_msg msg) with + | (Some loc, Error_message.Normal msg) -> mk_error ~trace_infos ~kind loc msg + | (None, UseOp (loc, text, use_op)) -> mk_use_op_error loc use_op text + | (None, PropMissing (prop_loc, prop, lower, use_op)) -> + mk_prop_missing_error prop_loc prop lower use_op + | (None, PropPolarityMismatch (x, p1, p2, use_op)) -> + mk_prop_polarity_mismatch_error x p1 p2 use_op + | (None, IncompatibleUse (loc, use_kind, lower, upper, use_op)) -> + mk_incompatible_use_error loc use_kind lower upper use_op + | (None, Incompatible (lower, upper, use_op)) -> mk_incompatible_error lower upper use_op + | (None, IncompatibleTrust (lower, upper, use_op)) -> + mk_trust_incompatible_error lower upper use_op + | (None, Error_message.Speculation (loc, use_op, branches)) -> + mk_use_op_speculation_error loc use_op branches + | (None, Error_message.Normal _) + | (Some _, _) -> + raise (ImproperlyFormattedError msg)) + +let make_errors_printable lazy_table_of_aloc set = + Errors.( + ErrorSet.fold + ( concretize_error lazy_table_of_aloc + %> make_error_printable lazy_table_of_aloc + %> ConcreteLocPrintableErrorSet.add ) + set + ConcreteLocPrintableErrorSet.empty) diff --git a/src/typing/errors/flow_error.mli b/src/typing/errors/flow_error.mli new file mode 100644 index 00000000000..1711b251cf1 --- /dev/null +++ b/src/typing/errors/flow_error.mli @@ -0,0 +1,36 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type 'loc t + +val loc_of_error : 'loc t -> 'loc option + +val msg_of_error : 'loc t -> 'loc Error_message.t' + +val source_file : 'loc t -> File_key.t + +val trace_reasons : 'loc t -> 'loc Reason.virtual_reason list + +val kind_of_error : 'loc t -> Errors.error_kind + +val error_of_msg : + trace_reasons:Reason.t list -> source_file:File_key.t -> Error_message.t -> ALoc.t t + +val make_error_printable : (ALoc.t -> ALoc.table Lazy.t) -> Loc.t t -> Loc.t Errors.printable_error + +val ordered_reasons : Reason.t * Reason.t -> Reason.t * Reason.t + +module ErrorSet : Set.S with type elt = ALoc.t t + +module ConcreteErrorSet : Set.S with type elt = Loc.t t + +val make_errors_printable : + (ALoc.t -> ALoc.table Lazy.t) -> ErrorSet.t -> Errors.ConcreteLocPrintableErrorSet.t + +val map_loc_of_error : ('a -> 'b) -> 'a t -> 'b t + +val concretize_error : (ALoc.t -> ALoc.table Lazy.t) -> ALoc.t t -> Loc.t t diff --git a/src/typing/existsCheck.ml b/src/typing/existsCheck.ml index ffbf0bfd960..7d6847ff5fe 100644 --- a/src/typing/existsCheck.ml +++ b/src/typing/existsCheck.ml @@ -1,30 +1,25 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type t = { - null_loc: Loc.t option; - bool_loc: Loc.t option; - string_loc: Loc.t option; - number_loc: Loc.t option; - mixed_loc: Loc.t option; + null_loc: ALoc.t option; + bool_loc: ALoc.t option; + string_loc: ALoc.t option; + number_loc: ALoc.t option; + mixed_loc: ALoc.t option; } -let empty = { - null_loc = None; - bool_loc = None; - string_loc = None; - number_loc = None; - mixed_loc = None; -} +let empty = + { null_loc = None; bool_loc = None; string_loc = None; number_loc = None; mixed_loc = None } -let to_string t = +let debug_to_string t = let string_of_loc_option = function | None -> "None" - | Some loc -> Loc.to_string ~include_source:true loc + | Some loc -> ALoc.debug_to_string ~include_source:true loc in [ ("null_loc", t.null_loc); @@ -33,7 +28,7 @@ let to_string t = ("number_loc", t.number_loc); ("mixed_loc", t.mixed_loc); ] - |> List.map (fun (name, loc_opt) -> (name, string_of_loc_option loc_opt)) - |> List.map (fun (name, loc) -> Printf.sprintf " %s: %s;\n" name loc) + |> Core_list.map ~f:(fun (name, loc_opt) -> (name, string_of_loc_option loc_opt)) + |> Core_list.map ~f:(fun (name, loc) -> Printf.sprintf " %s: %s;\n" name loc) |> String.concat "" |> Printf.sprintf "{\n%s}" diff --git a/src/typing/flow_common.ml b/src/typing/flow_common.ml new file mode 100644 index 00000000000..3f0051ab691 --- /dev/null +++ b/src/typing/flow_common.ml @@ -0,0 +1,135 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Reason +open Type + +(* This is here instead of assert_ground.ml to avoid the duplication of the enforce strict + * signature *) +module type ASSERT_GROUND = sig + val enforce_strict : Context.t -> Type.t -> should_munge_underscores:bool -> unit +end + +module type TRUST_CHECKING = sig + val trust_flow_to_use_t : Context.t -> Trace.t -> Type.t -> Type.use_t -> unit + + val trust_flow : Context.t -> Trace.t -> Type.use_op -> Type.t -> Type.t -> unit + + val mk_trust_var : Context.t -> ?initial:Trust.trust_qualifier -> unit -> Type.ident + + val strengthen_trust : + Context.t -> Type.ident -> Trust.trust_qualifier -> Error_message.t -> unit +end + +module type S = sig + val add_output : Context.t -> ?trace:Trace.t -> Error_message.t -> unit + + val check_polarity : Context.t -> ?trace:Trace.t -> Polarity.t -> Type.t -> unit + + val eval_evalt : Context.t -> ?trace:Trace.t -> Type.t -> Type.defer_use_t -> int -> Type.t + + val eval_selector : + Context.t -> ?trace:Trace.t -> reason -> Type.t -> Type.selector -> Type.t -> unit + + val filter_maybe : Context.t -> ?trace:Trace.t -> reason -> Type.t -> Type.t + + val filter_optional : Context.t -> ?trace:Trace.t -> reason -> Type.t -> Type.t + + val flow : Context.t -> Type.t * Type.use_t -> unit + + val flow_p : + Context.t -> + ?use_op:use_op -> + reason -> + reason -> + Type.propref -> + Type.property * Type.property -> + unit + + val flow_t : Context.t -> Type.t * Type.t -> unit + + val generate_tests : Context.t -> Type.typeparam list -> (Type.t SMap.t -> 'a) -> 'a + + val get_builtin : Context.t -> ?trace:Trace.t -> string -> reason -> Type.t + + val get_builtin_type : + Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> string -> Type.t + + val get_builtin_typeapp : + Context.t -> ?trace:Trace.t -> reason -> string -> Type.t list -> Type.t + + val is_munged_prop_name : Context.t -> name -> bool + + val is_munged_prop_name_with_munge : name -> should_munge_underscores:bool -> bool + + val lookup_builtin : + Context.t -> ?trace:Trace.t -> string -> reason -> Type.lookup_kind -> Type.t -> unit + + val match_this_binding : Type.t SMap.t -> (Type.t -> bool) -> bool + + val mk_instance : Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> Type.t -> Type.t + + val mk_typeof_annotation : + Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> Type.t -> Type.t + + val mk_type_destructor : + Context.t -> + trace:Trace.t -> + use_op -> + reason -> + Type.t -> + Type.destructor -> + int -> + bool * Type.t + + val reposition : + Context.t -> + ?trace:Trace.t -> + ALoc.t -> + ?desc:reason_desc -> + ?annot_loc:ALoc.t -> + Type.t -> + Type.t + + val rec_flow : Context.t -> Trace.t -> Type.t * Type.use_t -> unit + + val rec_flow_t : Context.t -> Trace.t -> ?use_op:Type.use_op -> Type.t * Type.t -> unit + + val rec_unify : + Context.t -> Trace.t -> use_op:Type.use_op -> ?unify_any:bool -> Type.t -> Type.t -> unit + + val resolve_spread_list : + Context.t -> + use_op:use_op -> + reason_op:reason -> + unresolved_param list -> + spread_resolve -> + unit + + val set_builtin : Context.t -> ?trace:Trace.t -> string -> Type.t -> unit + + val string_key : string -> reason -> Type.t + + val tvar_with_constraint : Context.t -> ?trace:Trace.t -> ?derivable:bool -> Type.use_t -> Type.t + + val unify : Context.t -> Type.t -> Type.t -> unit + + val unify_opt : + Context.t -> + ?trace:Trace.t -> + ?use_op:Type.use_op -> + ?unify_any:bool -> + Type.t -> + Type.t -> + unit + + val union_of_ts : reason -> Type.t list -> Type.t + + include ASSERT_GROUND + + include TRUST_CHECKING +end diff --git a/src/typing/flow_error.ml b/src/typing/flow_error.ml deleted file mode 100644 index 11f6ca9a090..00000000000 --- a/src/typing/flow_error.ml +++ /dev/null @@ -1,2158 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open Type -open Utils_js -open Reason - -exception EDebugThrow of Loc.t -exception EMergeTimeout of float - -type invalid_char_set = - | DuplicateChar of Char.t - | InvalidChar of Char.t - -module InvalidCharSetSet = Set.Make(struct - type t = invalid_char_set - let compare = Pervasives.compare -end) - -type error_message = - | EIncompatible of { - lower: reason * lower_kind option; - upper: reason * upper_kind; - use_op: use_op option; - branches: (Reason.t * error_message) list; - } - | EIncompatibleDefs of { - use_op: use_op; - reason_lower: reason; - reason_upper: reason; - branches: (Reason.t * error_message) list; - } - | EIncompatibleProp of { - prop: string option; - reason_prop: reason; - reason_obj: reason; - special: lower_kind option; - use_op: use_op option; - } - | EDebugPrint of reason * string - | EImportValueAsType of reason * string - | EImportTypeAsTypeof of reason * string - | EImportTypeAsValue of reason * string - | ERefineAsValue of reason * string - | ENoDefaultExport of reason * string * string option - | EOnlyDefaultExport of reason * string * string - | ENoNamedExport of reason * string * string * string option - | EMissingTypeArgs of { reason_tapp: reason; reason_arity: reason; min_arity: int; max_arity: int } - | EValueUsedAsType of (reason * reason) - | EExpectedStringLit of (reason * reason) * string * string Type.literal * use_op - | EExpectedNumberLit of - (reason * reason) * - Type.number_literal * - Type.number_literal Type.literal * - use_op - | EExpectedBooleanLit of (reason * reason) * bool * bool option * use_op - | EPropNotFound of string option * (reason * reason) * use_op - | EPropAccess of (reason * reason) * string option * Type.polarity * Type.rw * use_op - | EPropPolarityMismatch of (reason * reason) * string option * (Type.polarity * Type.polarity) * use_op - | EPolarityMismatch of { - reason: reason; - name: string; - expected_polarity: Type.polarity; - actual_polarity: Type.polarity; - } - | EStrictLookupFailed of (reason * reason) * reason * string option * use_op option - | EPrivateLookupFailed of (reason * reason) * string * use_op - | EAdditionMixed of reason * use_op - | EComparison of (reason * reason) - | ETupleArityMismatch of (reason * reason) * int * int * use_op - | ENonLitArrayToTuple of (reason * reason) * use_op - | ETupleOutOfBounds of (reason * reason) * int * int * use_op - | ETupleUnsafeWrite of (reason * reason) * use_op - | EUnionSpeculationFailed of { - use_op: use_op; - reason: reason; - reason_op: reason; - branches: (reason * error_message) list; - } - | ESpeculationAmbiguous of (reason * reason) * (int * reason) * (int * reason) * reason list - | EIncompatibleWithExact of (reason * reason) * use_op - | EUnsupportedExact of (reason * reason) - | EIdxArity of reason - | EIdxUse1 of reason - | EIdxUse2 of reason - | EUnexpectedThisType of Loc.t - | ETypeParamArity of Loc.t * int - | ECallTypeArity of { - call_loc: Loc.t; - is_new: bool; - reason_arity: reason; - expected_arity: int; - } - | ETypeParamMinArity of Loc.t * int - | ETooManyTypeArgs of reason * reason * int - | ETooFewTypeArgs of reason * reason * int - | EInvalidTypeArgs of reason * reason - | EPropertyTypeAnnot of Loc.t - | EExportsAnnot of Loc.t - | ECharSetAnnot of Loc.t - | EInvalidCharSet of { invalid: reason * InvalidCharSetSet.t; valid: reason; use_op: use_op } - | EUnsupportedKeyInObjectType of Loc.t - | EPredAnnot of Loc.t - | ERefineAnnot of Loc.t - | EUnexpectedTypeof of Loc.t - | EFunPredCustom of (reason * reason) * string - | EFunctionIncompatibleWithShape of reason * reason * use_op - | EInternal of Loc.t * internal_error - | EUnsupportedSyntax of Loc.t * unsupported_syntax - | EUseArrayLiteral of Loc.t - | EMissingAnnotation of reason * reason list - | EBindingError of binding_error * Loc.t * string * Scope.Entry.t - | ERecursionLimit of (reason * reason) - | EModuleOutsideRoot of Loc.t * string - | EExperimentalDecorators of Loc.t - | EExperimentalClassProperties of Loc.t * bool - | EUnsafeGetSet of Loc.t - | EExperimentalExportStarAs of Loc.t - | EIndeterminateModuleType of Loc.t - | EBadExportPosition of Loc.t - | EBadExportContext of string * Loc.t - | EUnreachable of Loc.t - | EInvalidObjectKit of { tool: Object.tool; reason: reason; reason_op: reason; use_op: use_op } - | EInvalidTypeof of Loc.t * string - | EBinaryInLHS of reason - | EBinaryInRHS of reason - | EArithmeticOperand of reason - | EForInRHS of reason - | EObjectComputedPropertyAccess of (reason * reason) - | EObjectComputedPropertyAssign of (reason * reason) - | EInvalidLHSInAssignment of Loc.t - | EIncompatibleWithUseOp of reason * reason * use_op - | EUnsupportedImplements of reason - | EReactKit of (reason * reason) * React.tool * use_op - | EReactElementFunArity of reason * string * int - | EFunctionCallExtraArg of reason * reason * int * use_op - | EUnsupportedSetProto of reason - | EDuplicateModuleProvider of { - module_name: string; - provider: File_key.t; - conflict: File_key.t - } - | EParseError of Loc.t * Parse_error.t - | EDocblockError of Loc.t * docblock_error - (* The string is either the name of a module or "the module that exports `_`". *) - | EUntypedTypeImport of Loc.t * string - | EUntypedImport of Loc.t * string - | ENonstrictImport of Loc.t - | EUnclearType of Loc.t - | EDeprecatedType of Loc.t - | EUnsafeGettersSetters of Loc.t - | EUnusedSuppression of Loc.t - | ELintSetting of LintSettings.lint_parse_error - | ESketchyNullLint of { - kind: Lints.sketchy_null_kind; - loc: Loc.t; - null_loc: Loc.t; - falsy_loc: Loc.t; - } - | ESketchyNumberLint of Lints.sketchy_number_kind * reason - | EInvalidPrototype of reason - | EExperimentalOptionalChaining of Loc.t - | EOptionalChainingMethods of Loc.t - | EUnnecessaryOptionalChain of Loc.t * reason - | EUnnecessaryInvariant of Loc.t * reason - | EInexactSpread of reason * reason - | EDeprecatedCallSyntax of Loc.t - | ESignatureVerification of Signature_builder_deps.Error.t - -and binding_error = - | ENameAlreadyBound - | EReferencedBeforeDeclaration - | ETypeInValuePosition - | ETypeAliasInValuePosition - | EConstReassigned - | EConstParamReassigned - | EImportReassigned - -and docblock_error = - | MultipleFlowAttributes - | MultipleProvidesModuleAttributes - | MultipleJSXAttributes - | InvalidJSXAttribute of string option - -and internal_error = - | PackageHeapNotFound of string - | AbnormalControlFlow - | MethodNotAFunction - | OptionalMethod - | OpenPredWithoutSubst - | PredFunWithoutParamNames - | UnsupportedGuardPredicate of string - | BreakEnvMissingForCase - | PropertyDescriptorPropertyCannotBeRead - | ForInLHS - | ForOfLHS - | InstanceLookupComputed - | PropRefComputedOpen - | PropRefComputedLiteral - | ShadowReadComputed - | ShadowWriteComputed - | RestParameterNotIdentifierPattern - | InterfaceTypeSpread - | DebugThrow - | MergeTimeout of float - | MergeJobException of exn - | UnexpectedTypeapp of string - -and unsupported_syntax = - | ComprehensionExpression - | GeneratorExpression - | MetaPropertyExpression - | ObjectPropertyLiteralNonString - | ObjectPropertyGetSet - | ObjectPropertyComputedGetSet - | InvariantSpreadArgument - | ClassPropertyLiteral - | ClassPropertyComputed - | ReactCreateClassPropertyNonInit - | RequireDynamicArgument - | RequireLazyDynamicArgument - | CatchParameterAnnotation - | CatchParameterDeclaration - | DestructuringObjectPropertyLiteralNonString - | DestructuringExpressionPattern - | PredicateDeclarationForImplementation - | PredicateDeclarationWithoutExpression - | PredicateDeclarationAnonymousParameters - | PredicateInvalidBody - | PredicateVoidReturn - | MultipleIndexers - | SpreadArgument - | ImportDynamicArgument - | IllegalName - | UnsupportedInternalSlot of { name: string; static: bool } - -and lower_kind = - | Possibly_null - | Possibly_void - | Possibly_null_or_void - | Incompatible_intersection - -and upper_kind = - | IncompatibleGetPropT of Loc.t * string option - | IncompatibleSetPropT of Loc.t * string option - | IncompatibleMatchPropT of Loc.t * string option - | IncompatibleGetPrivatePropT - | IncompatibleSetPrivatePropT - | IncompatibleMethodT of Loc.t * string option - | IncompatibleCallT - | IncompatibleConstructorT - | IncompatibleGetElemT of Loc.t - | IncompatibleSetElemT of Loc.t - | IncompatibleCallElemT of Loc.t - | IncompatibleElemTOfArrT - | IncompatibleObjAssignFromTSpread - | IncompatibleObjAssignFromT - | IncompatibleObjRestT - | IncompatibleObjSealT - | IncompatibleArrRestT - | IncompatibleSuperT - | IncompatibleMixinT - | IncompatibleSpecializeT - | IncompatibleThisSpecializeT - | IncompatibleVarianceCheckT - | IncompatibleGetKeysT - | IncompatibleHasOwnPropT of Loc.t * string option - | IncompatibleGetValuesT - | IncompatibleUnaryMinusT - | IncompatibleMapTypeTObject - | IncompatibleTypeAppVarianceCheckT - | IncompatibleGetStaticsT - | IncompatibleUnclassified of string - -let desc_of_reason r = Reason.desc_of_reason ~unwrap:(is_scalar_reason r) r - -(* A utility function for getting and updating the use_op in error messages. *) -let util_use_op_of_msg nope util = function -| EIncompatible {use_op; lower; upper; branches} -> - Option.value_map use_op ~default:nope ~f:(fun use_op -> - util use_op (fun use_op -> - EIncompatible {use_op=Some use_op; lower; upper; branches})) -| EIncompatibleDefs {use_op; reason_lower; reason_upper; branches} -> - util use_op (fun use_op -> - EIncompatibleDefs {use_op; reason_lower; reason_upper; branches}) -| EIncompatibleProp {use_op; prop; reason_prop; reason_obj; special} -> - Option.value_map use_op ~default:nope ~f:(fun use_op -> - util use_op (fun use_op -> - EIncompatibleProp {use_op=Some use_op; prop; reason_prop; reason_obj; special})) -| EExpectedStringLit (rs, u, l, op) -> util op (fun op -> EExpectedStringLit (rs, u, l, op)) -| EExpectedNumberLit (rs, u, l, op) -> util op (fun op -> EExpectedNumberLit (rs, u, l, op)) -| EExpectedBooleanLit (rs, u, l, op) -> util op (fun op -> EExpectedBooleanLit (rs, u, l, op)) -| EPropNotFound (prop, rs, op) -> util op (fun op -> EPropNotFound (prop, rs, op)) -| EPropAccess (rs, prop, p, rw, op) -> util op (fun op -> EPropAccess (rs, prop, p, rw, op)) -| EPropPolarityMismatch (rs, p, ps, op) -> util op (fun op -> EPropPolarityMismatch (rs, p, ps, op)) -| EStrictLookupFailed (rs, r, p, Some op) -> - util op (fun op -> EStrictLookupFailed (rs, r, p, Some op)) -| EPrivateLookupFailed (rs, x, op) -> util op (fun op -> EPrivateLookupFailed (rs, x, op)) -| EAdditionMixed (r, op) -> util op (fun op -> EAdditionMixed (r, op)) -| ETupleArityMismatch (rs, x, y, op) -> util op (fun op -> ETupleArityMismatch (rs, x, y, op)) -| ENonLitArrayToTuple (rs, op) -> util op (fun op -> ENonLitArrayToTuple (rs, op)) -| ETupleOutOfBounds (rs, l, i, op) -> util op (fun op -> ETupleOutOfBounds (rs, l, i, op)) -| ETupleUnsafeWrite (rs, op) -> util op (fun op -> ETupleUnsafeWrite (rs, op)) -| EUnionSpeculationFailed {use_op; reason; reason_op; branches} -> - util use_op (fun use_op -> EUnionSpeculationFailed {use_op; reason; reason_op; branches}) -| EIncompatibleWithExact (rs, op) -> util op (fun op -> EIncompatibleWithExact (rs, op)) -| EInvalidCharSet {invalid; valid; use_op} -> - util use_op (fun use_op -> EInvalidCharSet {invalid; valid; use_op}) -| EFunctionIncompatibleWithShape (l, u, use_op) -> - util use_op (fun use_op -> EFunctionIncompatibleWithShape (l, u, use_op)) -| EInvalidObjectKit {tool; reason; reason_op; use_op} -> - util use_op (fun use_op -> EInvalidObjectKit {tool; reason; reason_op; use_op}) -| EIncompatibleWithUseOp (rl, ru, op) -> util op (fun op -> EIncompatibleWithUseOp (rl, ru, op)) -| EReactKit (rs, t, op) -> util op (fun op -> EReactKit (rs, t, op)) -| EFunctionCallExtraArg (rl, ru, n, op) -> util op (fun op -> EFunctionCallExtraArg (rl, ru, n, op)) -| EDebugPrint (_, _) -| EImportValueAsType (_, _) -| EImportTypeAsTypeof (_, _) -| EImportTypeAsValue (_, _) -| ERefineAsValue (_, _) -| ENoDefaultExport (_, _, _) -| EOnlyDefaultExport (_, _, _) -| ENoNamedExport (_, _, _, _) -| EMissingTypeArgs {reason_tapp=_; reason_arity=_; min_arity=_; max_arity=_} -| EValueUsedAsType (_, _) -| EPolarityMismatch {reason=_; name=_; expected_polarity=_; actual_polarity=_} -| EStrictLookupFailed (_, _, _, None) -| EComparison (_, _) -| ESpeculationAmbiguous (_, _, _, _) -| EUnsupportedExact (_, _) -| EIdxArity (_) -| EIdxUse1 (_) -| EIdxUse2 (_) -| EUnexpectedThisType (_) -| ETypeParamArity (_, _) -| ECallTypeArity _ -| ETypeParamMinArity (_, _) -| ETooManyTypeArgs (_, _, _) -| ETooFewTypeArgs (_, _, _) -| EInvalidTypeArgs (_, _) -| EPropertyTypeAnnot (_) -| EExportsAnnot (_) -| ECharSetAnnot (_) -| EUnsupportedKeyInObjectType (_) -| EPredAnnot (_) -| ERefineAnnot (_) -| EUnexpectedTypeof (_) -| EFunPredCustom (_, _) -| EInternal (_, _) -| EUnsupportedSyntax (_, _) -| EUseArrayLiteral (_) -| EMissingAnnotation (_) -| EBindingError (_, _, _, _) -| ERecursionLimit (_, _) -| EModuleOutsideRoot (_, _) -| EExperimentalDecorators (_) -| EExperimentalClassProperties (_, _) -| EUnsafeGetSet (_) -| EExperimentalExportStarAs (_) -| EIndeterminateModuleType (_) -| EBadExportPosition (_) -| EBadExportContext (_) -| EUnreachable (_) -| EInvalidTypeof (_, _) -| EBinaryInLHS (_) -| EBinaryInRHS (_) -| EArithmeticOperand (_) -| EForInRHS (_) -| EObjectComputedPropertyAccess (_, _) -| EObjectComputedPropertyAssign (_, _) -| EInvalidLHSInAssignment (_) -| EUnsupportedImplements (_) -| EReactElementFunArity (_, _, _) -| EUnsupportedSetProto (_) -| EDuplicateModuleProvider {module_name=_; provider=_; conflict=_} -| EParseError (_, _) -| EDocblockError (_, _) -| EUntypedTypeImport (_, _) -| EUntypedImport (_, _) -| ENonstrictImport (_) -| EUnclearType (_) -| EDeprecatedType (_) -| EUnsafeGettersSetters (_) -| EUnusedSuppression (_) -| ELintSetting (_) -| ESketchyNullLint {kind=_; loc=_; null_loc=_; falsy_loc=_} -| ESketchyNumberLint _ -| EInvalidPrototype (_) -| EExperimentalOptionalChaining _ -| EOptionalChainingMethods _ -| EUnnecessaryOptionalChain _ -| EUnnecessaryInvariant _ -| EInexactSpread _ -| EDeprecatedCallSyntax _ -| ESignatureVerification _ - -> nope - -(* Rank scores for signals of different strength on an x^2 scale so that greater - * signals dominate lesser signals. *) -let reason_score = 100 -let frame_score = reason_score * 2 -let type_arg_frame_score = frame_score * 2 -let tuple_element_frame_score = type_arg_frame_score * 2 -let property_sentinel_score = tuple_element_frame_score * 2 - -(* Gets the score of a use_op. Used in score_of_msg. See the comment on - * score_of_msg to learn more about scores. - * - * Calculated by taking the count of all the frames. *) -let score_of_use_op use_op = - let score = fold_use_op - (* Comparing the scores of use_ops only works when they all have the same - * root_use_op! If two use_ops have different roots, we can't realistically - * compare the number of frames since the basis is completely different. - * - * So we require a Speculation root use_op to be passed into score_of_use_op - * and we perform a structural equality check using that. - * - * Otherwise, the total score from score_of_use_op is -1. This way, errors - * which match Speculation will be promoted. It is more likely the user was - * trying to target these branches. *) - (function - | Speculation _ -> Ok 0 - | _ -> Error (-1)) - - (fun acc frame -> match acc with Error _ -> acc | Ok acc -> - Ok (acc + (match frame with - (* Later params that error get a higher score. This roughly represents how - * much type-checking work Flow successfully completed before erroring. - * Useful for basically only overloaded function error messages. - * - * The signal that this gives us is that we successfully type checked n - * params in the call before erroring. If there was no error, Flow may - * have gone to successfully check another m params. However, we will - * never know that. n is our best approximation. It rewards errors near - * the end of a call and punishes (slightly) errors near the beginning of - * a call. - * - * This, however, turns out to be consistent with code style in modern - * JavaScript. As an unspoken convention, more complex arguments usually - * go last. For overloaded functions, the switching generally happens on - * the first argument. The "tag". This gives us confidence that n on - * FunParam is a good heuristic for the score. - * - * FunRestParam is FunParam, but at the end. So give it a larger score - * then FunParam after adding n. - * - * We do _not_ add n to the score if this use_op was added to an implicit type parameter. *) - | FunParam {n; _} -> frame_score + n - | FunRestParam _ -> frame_score + frame_score - 1 - (* FunCompatibility is generally followed by another use_op. So let's not - * count FunCompatibility. *) - | FunCompatibility _ -> 0 - (* FunMissingArg means the error is *less* likely to be correct. *) - | FunMissingArg _ -> 0 - (* Higher signal then PropertyCompatibility, for example. *) - | TypeArgCompatibility _ -> type_arg_frame_score - | ArrayElementCompatibility _ -> type_arg_frame_score - (* Higher signal then TypeArgCompatibility. *) - | TupleElementCompatibility _ -> tuple_element_frame_score - (* If we error-ed on a sentinel prop compatibility then tank the score of - * this use_op. This is so that the score of errors which passed sentinel - * compatibility are always picked relative to the score of errors which - * failed their sentinel prop checks. *) - | PropertyCompatibility {is_sentinel=true; _} -> -property_sentinel_score - (* ImplicitTypeParam is an internal marker use_op that doesn't get - * rendered in error messages. So it doesn't necessarily signal anything - * about the user's intent. *) - | ImplicitTypeParam _ -> 0 - | _ -> frame_score))) - use_op - in - match score with - | Ok n -> n - | Error n -> n - -(* Gets the score of an error message. The score is an approximation of how - * close the user was to getting their code right. A higher score means the user - * was closer then a lower score. A score of 0 means we have no signal about - * how close the user was. For example, consider the following two flows: - * - * number ~> {p: string} - * - * {p: number} ~> {p: string} - * - * Clearly, the user was closer to being correct with the second flow. So this - * function should assign the number ~> string error a higher score then the - * number ~> object error. - * - * Now consider: - * - * number ~> string - * - * number ~> {p: string} - * - * This time we kept the lower bound the same and changed the upper bound. The - * first flow is this time is closer to the user's intent then the second flow. - * So we give the number ~> string message a higher score then the - * number ~> object message. - * - * This scoring mechanism is useful for union and intersection error messages - * where we want to approximate which branch the user meant to target with - * their code. Branches with higher scores have a higher liklihood of being - * the branch the user was targeting. *) -let score_of_msg msg = - (* Start by getting the score based off the use_op of our error message. If - * the message does not have a use_op then we return 0. This score - * contribution declares that greater complexity in the use is more likely to - * cause a match. *) - let score = util_use_op_of_msg 0 (fun op _ -> score_of_use_op op) msg in - (* Special cases for messages which increment the score. *) - let score = score + match msg with - (* If a property doesn't exist, we still use a PropertyCompatibility use_op. - * This PropertyCompatibility when counted in our score is dishonest since - * a missing prop does not increase the likelihood that the user was close to - * the right types. *) - | EIncompatibleProp {use_op=Some (Frame (PropertyCompatibility _, _)); _} - | EPropNotFound (_, _, Frame (PropertyCompatibility _, _)) - | EStrictLookupFailed (_, _, _, Some (Frame (PropertyCompatibility _, _))) - -> -frame_score - | _ - -> 0 - in - (* If we have two incompatible types and both incompatible types are scalar or - * both types are arrays then increment our score. This is based on the belief - * that the solutions with the lowest possible complexity are closest to each - * other. e.g. number ~> string. If one type is a scalar or array and the - * other type is not then we decrement our score. *) - let score = score + ( - let reasons = match msg with - | EIncompatibleDefs {reason_lower=rl; reason_upper=ru; branches=[]; use_op=_} - | EIncompatibleWithUseOp (rl, ru, _) - | EIncompatibleWithExact ((rl, ru), _) - -> Some (rl, ru) - | _ - -> None - in - match reasons with - | Some ((rl, ru)) -> - if is_nullish_reason rl && is_nullish_reason ru then reason_score else - (* T ~> null should have a lower score then T ~> scalar *) - if is_nullish_reason rl || is_nullish_reason ru then 0 else - - if is_scalar_reason rl && is_scalar_reason ru then reason_score else - if is_scalar_reason rl || is_scalar_reason ru then 1 else - - if is_array_reason rl && is_array_reason ru then reason_score else - if is_array_reason rl || is_array_reason ru then 1 else - reason_score - | None -> - reason_score - ) in - score - -(* Decide reason order based on UB's flavor and blamability. - If the order is unchanged, maintain reference equality. *) -let ordered_reasons ((rl, ru) as reasons) = - if (is_blamable_reason ru && not (is_blamable_reason rl)) - then ru, rl - else reasons - -let rec error_of_msg ~trace_reasons ~source_file = - let open Errors in - - let mk_info reason extras = - let desc = string_of_desc (desc_of_reason reason) in - (* For descriptions that are an identifier wrapped in primes, e.g. `A`, then - * we want to unwrap the primes and just show A. This looks better in infos. - * However, when an identifier wrapped with primes is inside some other text - * then we want to keep the primes since they help with readability. *) - let desc = if ( - (String.length desc > 2) && - ((String.get desc 0) = '`') && - ((String.get desc ((String.length desc) - 1)) = '`') && - not (String.contains desc ' ') - ) then ( - String.sub desc 1 ((String.length desc) - 2) - ) else desc in - aloc_of_reason reason |> ALoc.to_loc, desc :: extras - in - - let info_of_reason r = mk_info r [] in - - let trace_infos = List.map info_of_reason trace_reasons in - - (* Flip the lower/upper reasons of a frame_use_op. *) - let flip_frame = function - | ArrayElementCompatibility c -> ArrayElementCompatibility {lower = c.upper; upper = c.lower} - | FunCompatibility c -> FunCompatibility {lower = c.upper; upper = c.lower} - | FunParam c -> FunParam {c with lower = c.upper; upper = c.lower} - | FunRestParam c -> FunRestParam {lower = c.upper; upper = c.lower} - | FunReturn c -> FunReturn {lower = c.upper; upper = c.lower} - | IndexerKeyCompatibility c -> IndexerKeyCompatibility {lower = c.upper; upper = c.lower} - | PropertyCompatibility c -> PropertyCompatibility {c with lower = c.upper; upper = c.lower} - | ReactConfigCheck -> ReactConfigCheck - | TupleElementCompatibility c -> - TupleElementCompatibility {c with lower = c.upper; upper = c.lower} - | TypeArgCompatibility c -> TypeArgCompatibility {c with lower = c.upper; upper = c.lower} - | TypeParamBound _ - | FunMissingArg _ - | ImplicitTypeParam _ - | UnifyFlip - as use_op -> use_op - in - - (* Unification produces two errors. One for both sides. For example, - * {p: number} ~> {p: string} errors on both number ~> string and - * string ~> number. Showing both errors to our user is often redundant. - * So we use this utility to flip the string ~> number case and produce an - * error identical to one we've produced before. These two errors will be - * deduped in our ErrorSet. *) - let dedupe_by_flip = - (* Loop over through the use_op chain. *) - let rec loop = function - (* Roots don't flip. *) - | Op _ as use_op -> (false, use_op) - (* Start flipping if we are on the reverse side of unification. *) - | Frame (UnifyFlip, use_op) -> - let (flip, use_op) = loop use_op in - (not flip, use_op) - (* If we are in flip mode then flip our frame. *) - | Frame (frame, use_op) -> - let (flip, use_op) = loop use_op in - if flip - then (true, Frame (flip_frame frame, use_op)) - else (false, Frame (frame, use_op)) - in - fun (lower, upper) use_op -> - let (flip, use_op) = loop use_op in - if flip - then ((upper, lower), use_op) - else ((lower, upper), use_op) - in - - (* In friendly error messages, we always want to point to a value as the - * primary location. Or an annotation on a value. Normally, values are found - * in the lower bound. However, in contravariant positions this flips. In this - * function we normalize the lower/upper variables in use_ops so that lower - * always points to the value. Example: - * - * ((x: number) => {}: (x: string) => void); - * - * We want to point to number. However, number is in the upper position since - * number => void ~> string => void flips arguments to string ~> number. This - * function flips contravariant positions like function arguments back. *) - let flip_contravariant = - (* Is this frame part of a contravariant position? *) - let is_contravariant = function - | FunParam _, Frame (FunCompatibility _, _) -> (true, true) - | FunRestParam _, Frame (FunCompatibility _, _) -> (true, true) - | TypeArgCompatibility {polarity = Negative; _}, _ -> (true, false) - | _ -> (false, false) - in - let is_contravariant_root = function - | FunImplicitReturn _ -> true - | _ -> false - in - (* Loop through the use_op and flip the contravariants. *) - let rec loop = function - | Op root_use_op as use_op -> (is_contravariant_root root_use_op, use_op) - (* If the frame is contravariant then flip. *) - | Frame (frame, use_op) -> - let (flip, use_op) = loop use_op in - let (contravariant, flip_self) = is_contravariant (frame, use_op) in - let flip = if contravariant then not flip else flip in - let flip_self = flip && (not contravariant || flip_self) in - let frame = if flip_self then flip_frame frame else frame in - (flip, Frame (frame, use_op)) - in - fun (lower, upper) use_op -> - let (flip, use_op) = loop use_op in - if flip - then ((upper, lower), use_op) - else ((lower, upper), use_op) - in - - let text = Friendly.text in - let code = Friendly.code in - let ref = Friendly.ref in - let desc = Friendly.ref ~loc:false in - - (* Unwrap a use_op for the friendly error format. Takes the smallest location - * where we found the error and a use_op which we will unwrap. *) - let unwrap_use_ops = - let open Friendly in - let rec loop loc frames use_op = - let action = match use_op with - | Op UnknownUse - | Op (Internal _) - -> `UnknownRoot false - - | Op (Speculation _) -> - `UnknownRoot true - - | Op (Addition {op; left; right}) -> - `Root (op, None, - [text "Cannot add "; desc left; text " and "; desc right]) - - | Op (AssignVar {var; init}) -> - `Root (init, None, match var with - | Some var -> [text "Cannot assign "; desc init; text " to "; desc var] - | None -> [text "Cannot assign "; desc init; text " to variable"]) - - | Op Cast {lower; upper} -> - `Root (lower, None, - [text "Cannot cast "; desc lower; text " to "; desc upper]) - - | Op ClassExtendsCheck {extends; def; _} -> - `Root (def, None, - [text "Cannot extend "; ref extends; text " with "; desc def]) - - | Op ClassImplementsCheck {implements; def; _} -> - `Root (def, None, - [text "Cannot implement "; ref implements; text " with "; desc def]) - - | Op ClassOwnProtoCheck {prop; own_loc; proto_loc} -> - (match own_loc, proto_loc with - | None, None -> `UnknownRoot true - | Some loc, None -> - let def = mk_reason (RProperty (Some prop)) loc in - `Root (def, None, [text "Cannot shadow proto property"]) - | None, Some loc -> - let def = mk_reason (RProperty (Some prop)) loc in - `Root (def, None, [text "Cannot define shadowed proto property"]) - | Some own_loc, Some proto_loc -> - let def = mk_reason (RProperty (Some prop)) own_loc in - let proto = mk_reason (RIdentifier prop) proto_loc in - `Root (def, None, [text "Cannot shadow proto property "; ref proto])) - - | Op Coercion {from; target} -> - `Root (from, None, - [text "Cannot coerce "; desc from; text " to "; desc target]) - - | Op (FunCall {op; fn; _}) -> - `Root (op, Some fn, [text "Cannot call "; desc fn]) - - | Op (FunCallMethod {op; fn; prop; _}) -> - `Root (op, Some prop, [text "Cannot call "; desc fn]) - - | Frame (FunParam _, ((Op (Speculation - (Op (FunCall _ | FunCallMethod _ | JSXCreateElement _)))) as use_op)) - -> `Next use_op - - | Frame (FunParam {n; name; lower = lower'; _}, - Op (FunCall {args; fn; _} | FunCallMethod {args; fn; _})) -> - let lower = if List.length args > n - 1 then List.nth args (n - 1) else lower' in - let param = match name with - | Some name -> code name - | None -> text (spf "the %s parameter" (Utils_js.ordinal n)) - in - `Root (lower, None, - [text "Cannot call "; desc fn; text " with "; desc lower; text " bound to "; param]) - - | Op (FunReturnStatement {value}) -> - `Root (value, None, - [text "Cannot return "; desc value]) - - | Op (FunImplicitReturn {upper; fn}) -> - `Root (upper, None, - [text "Cannot expect "; desc upper; text " as the return type of "; desc fn]) - - | Op (GeneratorYield {value}) -> - `Root (value, None, - [text "Cannot yield "; desc value]) - - | Op (GetProperty prop) -> - `Root (prop, None, - [text "Cannot get "; desc prop]) - - | Frame (FunParam _, Op (JSXCreateElement {op; component; _})) - | Op (JSXCreateElement {op; component; _}) -> - `Root (op, Some component, - [text "Cannot create "; desc component; text " element"]) - - | Op (ReactCreateElementCall {op; component; _}) -> - `Root (op, Some component, - [text "Cannot create "; desc component; text " element"]) - - | Op (ReactGetIntrinsic {literal}) -> - `Root (literal, None, - [text "Cannot create "; desc literal; text " element"]) - - | Op (TypeApplication {type'}) -> - `Root (type', None, - [text "Cannot instantiate "; desc type']) - - | Op (SetProperty {prop; value; lhs; _}) -> - let loc_reason = if Loc.contains (aloc_of_reason lhs |> ALoc.to_loc) loc then lhs else value in - `Root (loc_reason, None, - [text "Cannot assign "; desc value; text " to "; desc prop]) - - | Frame (ArrayElementCompatibility {lower; _}, use_op) -> - `Frame (lower, use_op, - [text "array element"]) - - | Frame (FunParam {n; lower; _}, use_op) -> - `Frame (lower, use_op, - [text "the "; text (Utils_js.ordinal n); text " argument"]) - - | Frame (FunRestParam _, use_op) -> - `Next use_op - - | Frame (FunReturn {lower; _}, use_op) -> - `Frame (repos_reason loc lower, use_op, - [text "the return value"]) - - | Frame (IndexerKeyCompatibility {lower; _}, use_op) -> - `Frame (lower, use_op, - [text "the indexer property's key"]) - - | Frame (PropertyCompatibility {prop=None | Some "$key" | Some "$value"; lower; _}, use_op) -> - `Frame (lower, use_op, - [text "the indexer property"]) - - | Frame (PropertyCompatibility {prop=Some "$call"; lower; _}, use_op) -> - `Frame (lower, use_op, - [text "the callable signature"]) - - | Frame (PropertyCompatibility {prop=Some prop; lower; _}, use_op) -> - let repos_small_reason loc reason = function - (* If we are checking class extensions or implementations then the - * object reason will point to the class name. So don't reposition with - * this reason. *) - | Op (ClassExtendsCheck _) -> repos_reason loc reason - | Op (ClassImplementsCheck _) -> repos_reason loc reason - | _ -> reason - in - let lower = repos_small_reason loc lower use_op in - let rec loop lower = function - (* Don't match $key/$value/$call properties since they have special - * meaning. As defined above. *) - | Frame (PropertyCompatibility {prop=Some prop; lower=lower'; _}, use_op) - when prop <> "$key" && prop <> "$value" && prop <> "$call" -> - let lower' = repos_small_reason (aloc_of_reason lower |> ALoc.to_loc) lower' use_op in - (* Perform the same frame location unwrapping as we do in our - * general code. *) - let lower = if - Loc.contains - (aloc_of_reason lower' |> ALoc.to_loc) - (aloc_of_reason lower |> ALoc.to_loc) - then lower else lower' in - let (lower, props, use_op) = loop lower use_op in - (lower, prop::props, use_op) - (* Perform standard iteration through these use_ops. *) - | use_op -> (lower, [], use_op) - in - (* Loop through our parent use_op to get our property path. *) - let (lower, props, use_op) = loop lower use_op in - (* Create our final action. *) - `Frame (lower, use_op, - [text "property "; code - (List.fold_left (fun acc prop -> prop ^ "." ^ acc) prop props)]) - - | Frame (TupleElementCompatibility {n; lower; _}, use_op) -> - `Frame (lower, use_op, - [text "index "; text (string_of_int (n - 1))]) - - | Frame (TypeArgCompatibility {targ; lower; _}, use_op) -> - `Frame (lower, use_op, - [text "type argument "; ref targ]) - - | Frame (TypeParamBound {name}, use_op) -> - `FrameWithoutLoc (use_op, - [text "type argument "; code name]) - - | Frame (FunCompatibility {lower; _}, use_op) -> - `NextWithLoc (lower, use_op) - - | Frame (FunMissingArg _, use_op) - | Frame (ImplicitTypeParam _, use_op) - | Frame (ReactConfigCheck, use_op) - | Frame (UnifyFlip, use_op) - -> `Next use_op - in - match action with - (* Skip this use_op and go to the next one. *) - | `Next use_op -> loop loc frames use_op - (* Skip this use_op, don't add a frame, but do use the loc to reposition - * our primary location. *) - | `NextWithLoc (frame_reason, use_op) -> - (* If our current loc is inside our frame_loc then use our current loc - * since it is the smallest possible loc in our frame_loc. *) - let frame_loc = aloc_of_reason frame_reason |> ALoc.to_loc in - let loc = if Loc.contains frame_loc loc then loc else frame_loc in - loop loc frames use_op - (* Add our frame message and reposition the location if appropriate. *) - | `Frame (frame_reason, use_op, frame) -> - (* If our current loc is inside our frame_loc then use our current loc - * since it is the smallest possible loc in our frame_loc. *) - let frame_loc = aloc_of_reason frame_reason |> ALoc.to_loc in - let frame_contains_loc = Loc.contains frame_loc loc in - let loc = if frame_contains_loc then loc else frame_loc in - (* Add our frame and recurse with the next use_op. *) - let (all_frames, local_frames) = frames in - let frames = (frame::all_frames, - if frame_contains_loc then local_frames else frame::local_frames) in - loop loc frames use_op - (* Same logic as `Frame except we don't have a frame location. *) - | `FrameWithoutLoc (use_op, frame) -> - let (all_frames, local_frames) = frames in - let frames = (frame::all_frames, frame::local_frames) in - loop loc frames use_op - (* We don't know what our root is! Return what we do know. *) - | `UnknownRoot show_all_frames -> - let (all_frames, local_frames) = frames in - (None, loc, if show_all_frames then all_frames else local_frames) - (* Finish up be returning our root location, root message, primary loc, - * and frames. *) - | `Root (root_reason, root_specific_reason, root_message) -> - (* If our current loc is inside our root_loc then use our current loc - * since it is the smallest possible loc in our root_loc. *) - let root_loc = aloc_of_reason root_reason |> ALoc.to_loc in - let root_specific_loc = Option.map root_specific_reason (aloc_of_reason %> ALoc.to_loc) in - let loc = if Loc.contains root_loc loc && Loc.compare root_loc loc <> 0 - then loc - else Option.value root_specific_loc ~default:root_loc - in - (* Return our root loc and message in addition to the true primary loc - * and frames. *) - let (all_frames, _) = frames in - (Some (root_loc, root_message), loc, all_frames) - in - fun loc use_op -> - let (root, loc, frames) = loop loc ([], []) use_op in - let root = Option.map root (fun (root_loc, root_message) -> - (root_loc, root_message @ [text " because"])) in - (root, loc, frames) - in - - (* Make a friendly error based on a use_op. The message we are provided should - * not have any punctuation. Punctuation will be provided after the frames of - * an error message. *) - let mk_use_op_error loc use_op message = - let (root, loc, frames) = unwrap_use_ops (loc |> ALoc.to_loc) use_op in - mk_error - ~trace_infos - ?root - ~frames - (ALoc.of_loc loc) - message - in - - (* Make a friendly error based on failed speculation. *) - let mk_use_op_speculation_error loc use_op branches = - let (root, loc, frames) = unwrap_use_ops (loc |> ALoc.to_loc) use_op in - let speculation_errors = List.map (fun (_, msg) -> - let score = score_of_msg msg in - let error = error_of_msg ~trace_reasons:[] ~source_file msg in - (score, error) - ) branches in - mk_speculation_error - ~kind:InferError - ~trace_infos - ~loc - ~root - ~frames - ~speculation_errors - in - - (* An error between two incompatible types. A "lower" type and an "upper" - * type. The use_op describes the path which we followed to find - * this incompatibility. - * - * This is a specialization of mk_incompatible_use_error. *) - let mk_incompatible_error lower upper use_op = - let ((lower, upper), use_op) = dedupe_by_flip (lower, upper) use_op in - let ((lower, upper), use_op) = flip_contravariant (lower, upper) use_op in - match use_op with - (* Add a custom message for Coercion root_use_ops that does not include the - * upper bound. *) - | Op (Coercion {from; _}) -> - mk_use_op_error (aloc_of_reason from) use_op - [ref lower; text " should not be coerced"] - (* Ending with FunMissingArg gives us a different error message. Even though - * this error was generated by an incompatibility, we want to show a more - * descriptive error message. *) - | Frame (FunMissingArg { def; op; _ }, use_op) -> - let message = match use_op with - | Op (FunCall _ | FunCallMethod _) -> - let def = replace_reason (function - | RFunctionType -> RFunction RNormal - | desc -> desc - ) def in - [ref def; text " requires another argument"] - | _ -> - [ref def; text " requires another argument from "; ref op] - in - mk_use_op_error (aloc_of_reason op) use_op message - | _ -> - let root_use_op = root_of_use_op use_op in - (match root_use_op with - (* Further customize functions with an implicit return. Functions with an - * implicit return have a lower position which is not valuable. Also - * clarify that the type was implicitly-returned. - * - * In flip_contravariant we flip upper/lower for all FunImplicitReturn. So - * reverse those back as well. *) - | FunImplicitReturn {upper=return; _} -> - mk_use_op_error (aloc_of_reason lower) use_op ( - [ref lower; text " is incompatible with "] @ - if - Loc.compare (aloc_of_reason return |> ALoc.to_loc) (aloc_of_reason upper |> ALoc.to_loc) - = 0 - then - [text "implicitly-returned "; desc upper] - else - [ref upper] - ) - (* Default incompatibility. *) - | _ -> - begin match desc_of_reason lower, desc_of_reason upper with - | RPolyTest _, RPolyTest _ when aloc_of_reason lower = aloc_of_reason upper -> - mk_use_op_error (aloc_of_reason lower) use_op - [text "the expected type is not parametric in "; ref upper; - text ", perhaps due to the use of "; code "*"; - text " or the lack of a type annotation";] - | _ -> - mk_use_op_error (aloc_of_reason lower) use_op - [ref lower; text " is incompatible with "; ref upper] - end - ) - in - - let mk_prop_message = function - | None | Some "$key" | Some "$value" -> [text "an indexer property"] - | Some "$call" -> [text "a callable signature"] - | Some prop -> [text "property "; code prop] - in - - (* When we fail to find a property on an object we use this function to create - * an error. prop_loc should be the position of the use which caused this - * error. The use_op represents how we got to this error. - * - * If the use_op is a PropertyCompatibility frame then we encountered this - * error while subtyping two objects. In this case we add a bit more - * information to the error message. *) - let mk_prop_missing_error prop_loc prop lower use_op = - let (loc, lower, upper, use_op) = match use_op with - (* If we are missing a property while performing property compatibility - * then we are subtyping. Record the upper reason. *) - | Frame (PropertyCompatibility {prop=compat_prop; lower; upper; _}, use_op) - when prop = compat_prop -> - (aloc_of_reason lower, lower, Some upper, use_op) - (* Otherwise this is a general property missing error. *) - | _ -> (prop_loc, lower, None, use_op) - in - (* If we were subtyping that add to the error message so our user knows what - * object required the missing property. *) - let prop_message = mk_prop_message prop in - let message = match upper with - | Some upper -> - prop_message @ [text " is missing in "; ref lower; text " but exists in "] @ - [ref upper] - | None -> - prop_message @ [text " is missing in "; ref lower] - in - (* Finally, create our error message. *) - mk_use_op_error loc use_op message - in - - (* An error that occurs when some arbitrary "use" is incompatible with the - * "lower" type. The use_op describes the path which we followed to find this - * incompatibility. - * - * Similar to mk_incompatible_error except with any arbitrary *use* - * instead of specifically an upper type. This error handles all use - * incompatibilities in general. *) - let mk_incompatible_use_error use_loc use_kind lower use_op = - let nope msg = - mk_use_op_error use_loc use_op - [ref lower; text (" " ^ msg)] - in - match use_kind with - | IncompatibleElemTOfArrT - -> nope "is not an array index" - | IncompatibleGetPrivatePropT - | IncompatibleSetPrivatePropT - -> nope "is not a class with private properties" - | IncompatibleCallT - | IncompatibleConstructorT - -> nope "is not a function" - | IncompatibleObjAssignFromTSpread - | IncompatibleArrRestT - -> nope "is not an array" - | IncompatibleObjAssignFromT - | IncompatibleObjRestT - | IncompatibleObjSealT - | IncompatibleGetKeysT - | IncompatibleGetValuesT - | IncompatibleMapTypeTObject - -> nope "is not an object" - | IncompatibleMixinT - | IncompatibleThisSpecializeT - -> nope "is not a class" - | IncompatibleSpecializeT - | IncompatibleVarianceCheckT - | IncompatibleTypeAppVarianceCheckT - -> nope "is not a polymorphic type" - | IncompatibleSuperT - -> nope "is not inheritable" - | IncompatibleUnaryMinusT - -> nope "is not a number" - | IncompatibleGetPropT (prop_loc, prop) - | IncompatibleSetPropT (prop_loc, prop) - | IncompatibleMatchPropT (prop_loc, prop) - | IncompatibleHasOwnPropT (prop_loc, prop) - | IncompatibleMethodT (prop_loc, prop) - -> mk_prop_missing_error (prop_loc |> ALoc.of_loc) prop lower use_op - | IncompatibleGetElemT prop_loc - | IncompatibleSetElemT prop_loc - | IncompatibleCallElemT prop_loc - -> mk_prop_missing_error (prop_loc |> ALoc.of_loc) None lower use_op - | IncompatibleGetStaticsT - -> nope "is not an instance type" - (* unreachable or unclassified use-types. until we have a mechanical way - to verify that all legit use types are listed above, we can't afford - to throw on a use type, so mark the error instead *) - | IncompatibleUnclassified ctor - -> nope (spf "is not supported by unclassified use %s" ctor) - in - - (* When an object property has a polarity that is incompatible with another - * error then we create one of these errors. We use terms like "read-only" and - * "write-only" to better reflect how the user thinks about these properties. - * Other terminology could include "contravariant", "covariant", and - * "invariant". Generally these terms are impenatrable to the average - * JavaScript developer. If we had more documentation explaining these terms - * it may be fair to use them in error messages. *) - let mk_prop_polarity_mismatch_error prop (lower, lpole) (upper, upole) use_op = - (* Remove redundant PropertyCompatibility if one exists. *) - let use_op = match use_op with - | Frame (PropertyCompatibility c, use_op) when c.prop = prop -> use_op - | _ -> - use_op - in - let expected = match lpole with - | Positive -> "read-only" - | Negative -> "write-only" - | Neutral -> - (match upole with - | Negative -> "readable" - | Positive -> "writable" - | Neutral -> failwith "unreachable") - in - let actual = match upole with - | Positive -> "read-only" - | Negative -> "write-only" - | Neutral -> - (match lpole with - | Negative -> "readable" - | Positive -> "writable" - | Neutral -> failwith "unreachable") - in - mk_use_op_error (aloc_of_reason lower) use_op ( - mk_prop_message prop @ - [text (" is " ^ expected ^ " in "); ref lower; text " but "] @ - [text (actual ^ " in "); ref upper] - ) - in - - let msg_export export_name = - if export_name = "default" then - text "the default export" - else - code export_name - in - - let mk_signature_verification_error loc msgs = - mk_error ~trace_infos loc - ((text "Could not build a typed interface for this module. ")::msgs) - in - - function - | EIncompatible { - lower = (reason_lower, _); - upper = (reason_upper, upper_kind); - use_op; - branches; - } -> - if branches = [] then - mk_incompatible_use_error - (aloc_of_reason reason_upper) - upper_kind - reason_lower - (Option.value ~default:unknown_use use_op) - else - mk_use_op_speculation_error - (aloc_of_reason reason_upper) - (Option.value ~default:unknown_use use_op) - branches - - | EIncompatibleDefs { use_op; reason_lower; reason_upper; branches } -> - if branches = [] then - mk_incompatible_error - reason_lower - reason_upper - use_op - else - mk_use_op_speculation_error - (aloc_of_reason reason_upper) - use_op - branches - - | EIncompatibleProp { prop; reason_prop; reason_obj; special=_; use_op } -> - mk_prop_missing_error - (aloc_of_reason reason_prop) prop reason_obj (Option.value ~default:unknown_use use_op) - - | EDebugPrint (r, str) -> - mk_error ~trace_infos (aloc_of_reason r) [text str] - - | EImportValueAsType (r, export_name) -> - mk_error ~trace_infos (aloc_of_reason r) [ - text "Cannot import the value "; msg_export export_name; text " as a type. "; - code "import type"; text " only works on type exports. Like type aliases, "; - text "interfaces, and classes. If you intended to import the type of a "; - text "value use "; code "import typeof"; text " instead."; - ] - - | EImportTypeAsTypeof (r, export_name) -> - mk_error ~trace_infos (aloc_of_reason r) [ - text "Cannot import the type "; msg_export export_name; text " as a type. "; - code "import typeof"; text " only works on value exports. Like variables, "; - text "functions, and classes. If you intended to import a type use "; - code "import type"; text " instead."; - ] - - | EImportTypeAsValue (r, export_name) -> - mk_error ~trace_infos (aloc_of_reason r) [ - text "Cannot import the type "; msg_export export_name; text " as a value. "; - text "Use "; code "import type"; text " instead."; - ] - - | ERefineAsValue (r, name) -> - mk_error ~trace_infos (aloc_of_reason r) [ - text "Cannot refine "; msg_export name; text " as a value. "; - (* text "Use "; code "import type"; text " instead."; *) - ] - - | ENoDefaultExport (r, module_name, suggestion) -> - mk_error ~trace_infos (aloc_of_reason r) ( - [ - text "Cannot import a default export because there is no default export "; - text "in "; code module_name; text "."; - ] @ - match suggestion with - | None -> [] - | Some suggestion -> [text " "; - text "Did you mean "; - code (spf "import {%s} from \"%s\"" suggestion module_name); - text "?"; - ] - ) - - | EOnlyDefaultExport (r, module_name, export_name) -> - mk_error ~trace_infos (aloc_of_reason r) [ - text "Cannot import "; code export_name; text " because "; - text "there is no "; code export_name; text " export in "; - code module_name; text ". Did you mean "; - code (spf "import %s from \"...\"" export_name); text "?"; - ] - - | ENoNamedExport (r, module_name, export_name, suggestion) -> - mk_error ~trace_infos (aloc_of_reason r) ( - [ - text "Cannot import "; code export_name; text " because "; - text "there is no "; code export_name; text " export in "; - code module_name; text "."; - ] @ - match suggestion with - | None -> [] - | Some suggestion -> [text " Did you mean "; code suggestion; text "?"] - ) - - | EMissingTypeArgs { reason_tapp; reason_arity; min_arity; max_arity } -> - let arity, args = - if min_arity = max_arity then - spf "%d" max_arity, if max_arity = 1 then "argument" else "arguments" - else - spf "%d-%d" min_arity max_arity, "arguments" - in - let reason_arity = replace_reason_const (desc_of_reason reason_tapp) reason_arity in - mk_error ~trace_infos (aloc_of_reason reason_tapp) - [text "Cannot use "; ref reason_arity; text (spf " without %s type %s." arity args)] - - | ETooManyTypeArgs (reason_tapp, reason_arity, n) -> - let reason_arity = replace_reason_const (desc_of_reason reason_tapp) reason_arity in - mk_error ~trace_infos (aloc_of_reason reason_tapp) [ - text "Cannot use "; ref reason_arity; text " with more than "; - text (spf "%n type %s." n (if n == 1 then "argument" else "arguments")) - ] - - | ETooFewTypeArgs (reason_tapp, reason_arity, n) -> - let reason_arity = replace_reason_const (desc_of_reason reason_tapp) reason_arity in - mk_error ~trace_infos (aloc_of_reason reason_tapp) [ - text "Cannot use "; ref reason_arity; text " with fewer than "; - text (spf "%n type %s." n (if n == 1 then "argument" else "arguments")) - ] - - | EInvalidTypeArgs (reason_main, reason_tapp) -> - mk_error ~trace_infos (aloc_of_reason reason_tapp) [ - text "Cannot use "; ref reason_main; text " with "; ref reason_tapp; text " argument"; - ] - - | ETypeParamArity (loc, n) -> - if n = 0 then - mk_error ~trace_infos (loc |> ALoc.of_loc) - [text "Cannot apply type because it is not a polymorphic type."] - else - mk_error ~trace_infos (loc |> ALoc.of_loc) [ - text "Cannot use type without exactly "; - text (spf "%n type %s." n (if n == 1 then "argument" else "arguments")); - ] - - | ETypeParamMinArity (loc, n) -> - mk_error ~trace_infos (loc |> ALoc.of_loc) [ - text "Cannot use type without at least "; - text (spf "%n type %s." n (if n == 1 then "argument" else "arguments")); - ] - - | ECallTypeArity { call_loc; is_new; reason_arity; expected_arity = n } -> - let use = if is_new then "construct " else "call " in - if n = 0 then - mk_error ~trace_infos (call_loc |> ALoc.of_loc) [ - text "Cannot "; text use; text "non-polymorphic "; ref reason_arity; - text " with type arguments."; - ] - else - mk_error ~trace_infos (call_loc |> ALoc.of_loc) [ - text "Cannot "; text use; ref reason_arity; text " without exactly "; - text (spf "%n type argument%s." n (if n == 1 then "" else "s")); - ] - - | EValueUsedAsType reasons -> - let (value, _) = reasons in - mk_error ~trace_infos (aloc_of_reason value) [ - text "Cannot use "; desc value; text " as a type because "; - desc value; text " is a value. To get the type of "; - text "a value use "; code "typeof"; text "."; - ] - - | EExpectedStringLit (reasons, _, _, use_op) -> - let (reason_lower, reason_upper) = reasons in - mk_incompatible_error reason_lower reason_upper use_op - - | EExpectedNumberLit (reasons, _, _, use_op) -> - let (reason_lower, reason_upper) = reasons in - mk_incompatible_error reason_lower reason_upper use_op - - | EExpectedBooleanLit (reasons, _, _, use_op) -> - let (reason_lower, reason_upper) = reasons in - mk_incompatible_error reason_lower reason_upper use_op - - | EPropNotFound (prop, reasons, use_op) -> - let (reason_prop, reason_obj) = reasons in - mk_prop_missing_error - (aloc_of_reason reason_prop) prop reason_obj use_op - - | EPropAccess (reasons, x, _, rw, use_op) -> - let (reason_prop, _) = reasons in - let rw = match rw with - | Read -> "readable" - | Write _ -> "writable" - in - mk_use_op_error (aloc_of_reason reason_prop) use_op - (mk_prop_message x @ [text (spf " is not %s" rw)]) - - | EPropPolarityMismatch (reasons, x, (p1, p2), use_op) -> - let (lreason, ureason) = reasons in - mk_prop_polarity_mismatch_error - x (lreason, p1) (ureason, p2) use_op - - | EPolarityMismatch { reason; name; expected_polarity; actual_polarity } -> - let polarity_string = function - | Positive -> "output" - | Negative -> "input" - | Neutral -> "input/output" - in - let expected_polarity = polarity_string expected_polarity in - let actual_polarity = polarity_string actual_polarity in - let reason_targ = mk_reason (RIdentifier name) (def_loc_of_reason reason) in - mk_error ~trace_infos (aloc_of_reason reason) [ - text "Cannot use "; ref reason_targ; text (" in an " ^ actual_polarity ^ " "); - text "position because "; ref reason_targ; text " is expected to occur only in "; - text (expected_polarity ^ " positions."); - ] - - | EStrictLookupFailed (reasons, lreason, x, use_op) -> - (* if we're looking something up on the global/builtin object, then tweak - the error to say that `x` doesn't exist. We can tell this is the - global object because that should be the only object created with - `builtin_reason` instead of an actual location (see `Init_js.init`). *) - if is_builtin_reason lreason then - let (reason, _) = reasons in - let msg = match x with - | Some x when is_internal_module_name x -> - [text "Cannot resolve module "; code (uninternal_module_name x); text "."] - | None -> [text "Cannot resolve name "; desc reason; text "."] - | Some x when is_internal_name x -> [text "Cannot resolve name "; desc reason; text "."] - | Some x -> [text "Cannot resolve name "; code x; text "."] - in - mk_error ~trace_infos (aloc_of_reason reason) msg - else - let (reason_prop, reason_obj) = reasons in - mk_prop_missing_error - (aloc_of_reason reason_prop) x reason_obj (Option.value ~default:unknown_use use_op) - - | EPrivateLookupFailed (reasons, x, use_op) -> - mk_prop_missing_error - (aloc_of_reason (fst reasons)) (Some ("#" ^ x)) (snd reasons) use_op - - | EAdditionMixed (reason, use_op) -> - mk_use_op_error (aloc_of_reason reason) use_op - [ref reason; text " could either behave like a string or like a number"] - - | EComparison (lower, upper) -> - mk_error ~trace_infos (aloc_of_reason lower) - [text "Cannot compare "; ref lower; text " to "; ref upper; text "."] - - | ETupleArityMismatch (reasons, l1, l2, use_op) -> - let (lower, upper) = reasons in - mk_use_op_error (aloc_of_reason lower) use_op [ - ref lower; text (spf " has an arity of %d but " l1); ref upper; - text (spf " has an arity of %d" l2); - ] - - | ENonLitArrayToTuple (reasons, use_op) -> - let (lower, upper) = reasons in - mk_use_op_error (aloc_of_reason lower) use_op [ - ref lower; text " has an unknown number of elements, so is "; - text "incompatible with "; ref upper; - ] - - | ETupleOutOfBounds (reasons, length, index, use_op) -> - let (lower, upper) = reasons in - mk_use_op_error (aloc_of_reason lower) use_op [ - ref upper; - text (spf " only has %d element%s, so index %d is out of bounds" - length (if length == 1 then "" else "s") index); - ] - - | ETupleUnsafeWrite (reasons, use_op) -> - let (lower, _) = reasons in - mk_use_op_error (aloc_of_reason lower) use_op - [text "the index must be statically known to write a tuple element"] - - | EUnionSpeculationFailed { use_op; reason; reason_op=_; branches } -> - mk_use_op_speculation_error - (aloc_of_reason reason) - use_op - branches - - | ESpeculationAmbiguous ((union_r, _), (prev_i, prev_case), (i, case), case_rs) -> - let open Friendly in - let prev_case_r = - mk_reason (RCustom - ("case " ^ string_of_int (prev_i + 1))) (aloc_of_reason prev_case |> ALoc.to_loc) - in - let case_r = - mk_reason (RCustom - ("case " ^ string_of_int (i + 1))) (aloc_of_reason case |> ALoc.to_loc) - in - mk_error (aloc_of_reason union_r) ( - [ - text "Could not decide which case to select. Since "; ref prev_case_r; text " "; - text "may work but if it doesn't "; ref case_r; text " looks promising "; - text "too. To fix add a type annotation "; - ] @ - (conjunction_concat ~conjunction:"or" (List.map (fun case_r -> - let text = "to " ^ (string_of_desc (desc_of_reason case_r)) in - [ref (mk_reason (RCustom text) (aloc_of_reason case_r |> ALoc.to_loc))] - ) case_rs)) @ - [text "."] - ) - - | EIncompatibleWithExact (reasons, use_op) -> - let (lower, upper) = reasons in - mk_use_op_error (aloc_of_reason lower) use_op - [text "inexact "; ref lower; text " is incompatible with exact "; ref upper] - - | EUnsupportedExact (_, lower) -> - mk_error ~trace_infos (aloc_of_reason lower) - [text "Cannot create exact type from "; ref lower; text "."] - - | EIdxArity reason -> - mk_error ~trace_infos (aloc_of_reason reason) [ - text "Cannot call "; code "idx(...)"; text " because only exactly two "; - text "arguments are allowed." - ] - - | EIdxUse1 reason -> - mk_error ~trace_infos (aloc_of_reason reason) [ - text "Cannot call "; code "idx(...)"; text " because the callback "; - text "argument must not be annotated."; - ] - - | EIdxUse2 reason -> - mk_error ~trace_infos (aloc_of_reason reason) [ - text "Cannot call "; code "idx(...)"; text " because the callback must "; - text "only access properties on the callback parameter."; - ] - - | EUnexpectedThisType loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) - [text "Unexpected use of "; code "this"; text " type."] - - | EPropertyTypeAnnot loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) [ - text "Cannot use "; code "$PropertyType"; text " because the second "; - text "type argument must be a string literal."; - ] - - | EExportsAnnot loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) [ - text "Cannot use "; code "$Exports"; text " because the first type "; - text "argument must be a string literal."; - ] - - | ECharSetAnnot loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) [ - text "Cannot use "; code "$CharSet"; text " because the first type "; - text "argument must be a string literal."; - ] - - | EInvalidCharSet { - invalid = (invalid_reason, invalid_chars); - valid = valid_reason; - use_op; - } -> - let valid_reason = mk_reason (desc_of_reason valid_reason) (def_loc_of_reason valid_reason) in - let invalids = - InvalidCharSetSet.fold (fun c acc -> - match c with - | InvalidChar c -> - [code (String.make 1 c); text " is not a member of the set"] :: acc - | DuplicateChar c -> - [code (String.make 1 c); text " is duplicated"] :: acc - ) invalid_chars [] - |> List.rev - in - mk_use_op_error (aloc_of_reason invalid_reason) use_op ( - [ref invalid_reason; text " is incompatible with "; ref valid_reason; text " since "] @ - Friendly.conjunction_concat ~conjunction:"and" invalids - ) - - | EUnsupportedKeyInObjectType loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) - [text "Unsupported key in object type."] - - | EPredAnnot loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) [ - text "Cannot use "; code "$Pred"; text " because the first "; - text "type argument must be a number literal."; - ] - - | ERefineAnnot loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) [ - text "Cannot use "; code "$Refine"; text " because the third "; - text "type argument must be a number literal."; - ] - - | EUnexpectedTypeof loc -> - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) - [code "typeof"; text " can only be used to get the type of variables."] - - | EFunPredCustom ((a, b), msg) -> - mk_error (aloc_of_reason a) - [ref a; text ". "; text msg; text " "; ref b; text "."] - - | EFunctionIncompatibleWithShape (lower, upper, use_op) -> - mk_use_op_error (aloc_of_reason lower) use_op [ - ref lower; text " is incompatible with "; code "$Shape"; text " of "; - ref upper; - ] - - | EInternal (loc, internal_error) -> - let msg = match internal_error with - | PackageHeapNotFound pkg -> - spf "package %S was not found in the PackageHeap!" pkg - | AbnormalControlFlow -> - "abnormal control flow" - | MethodNotAFunction -> - "expected function type" - | OptionalMethod -> - "optional methods are not supported" - | OpenPredWithoutSubst -> - "OpenPredT ~> OpenPredT without substitution" - | PredFunWithoutParamNames -> - "FunT -> FunT no params" - | UnsupportedGuardPredicate pred -> - spf "unsupported guard predicate (%s)" pred - | BreakEnvMissingForCase -> - "break env missing for case" - | PropertyDescriptorPropertyCannotBeRead -> - "unexpected property in properties object" - | ForInLHS -> - "unexpected LHS in for...in" - | ForOfLHS -> - "unexpected LHS in for...of" - | InstanceLookupComputed -> - "unexpected computed property lookup on InstanceT" - | PropRefComputedOpen -> - "unexpected open computed property element type" - | PropRefComputedLiteral -> - "unexpected literal computed property element type" - | ShadowReadComputed -> - "unexpected shadow read on computed property" - | ShadowWriteComputed -> - "unexpected shadow write on computed property" - | RestParameterNotIdentifierPattern -> - "unexpected rest parameter, expected an identifier pattern" - | InterfaceTypeSpread -> - "unexpected spread property in interface" - | DebugThrow -> - "debug throw" - | MergeTimeout s -> - spf "merge job timed out after %0.2f seconds" s - | MergeJobException exc -> - "uncaught exception: "^(Utils_js.fmt_exc exc) - | UnexpectedTypeapp s -> - "unexpected typeapp: "^s - in - mk_error ~trace_infos ~kind:InternalError (loc |> ALoc.of_loc) - [text (spf "Internal error: %s" msg)] - - | EUnsupportedSyntax (loc, unsupported_syntax) -> - let msg = match unsupported_syntax with - | ComprehensionExpression - | GeneratorExpression - | MetaPropertyExpression -> - [text "Not supported."] - | ObjectPropertyLiteralNonString -> - [text "Non-string literal property keys not supported."] - | ObjectPropertyGetSet -> - [text "Get/set properties not yet supported."] - | ObjectPropertyComputedGetSet -> - [text "Computed getters and setters are not yet supported."] - | InvariantSpreadArgument -> - [text "Unsupported arguments in call to "; code "invariant"; text "."] - | ClassPropertyLiteral -> - [text "Literal properties not yet supported."] - | ClassPropertyComputed -> - [text "Computed property keys not supported."] - | ReactCreateClassPropertyNonInit -> - [text "Unsupported property specification in "; code "createClass"; text "."] - | RequireDynamicArgument -> - [text "The parameter passed to "; code "require"; text " must be a string literal."] - | ImportDynamicArgument -> - [text "The parameter passed to "; code "import"; text " must be a string literal."] - | RequireLazyDynamicArgument -> [ - text "The first argument to "; code "requireLazy"; text " must be an "; - text "array literal of string literals and the second argument must "; - text "be a callback."; - ] - | CatchParameterAnnotation -> - [text "Type annotations for catch parameters are not yet supported."] - | CatchParameterDeclaration -> - [text "Unsupported catch parameter declaration."] - | DestructuringObjectPropertyLiteralNonString -> - [text "Unsupported non-string literal object property in destructuring."] - | DestructuringExpressionPattern -> - [text "Unsupported expression pattern in destructuring."] - | PredicateDeclarationForImplementation -> - [text "Cannot declare predicate when a function body is present."] - | PredicateDeclarationWithoutExpression -> [ - text "Predicate function declarations need to declare a "; - text "predicate expression." - ] - | PredicateDeclarationAnonymousParameters -> [ - text "Predicate function declarations cannot use anonymous "; - text "function parameters."; - ] - | PredicateInvalidBody -> [ - text "Invalid body for predicate function. Expected a simple return "; - text "statement as body." - ] - | PredicateVoidReturn -> - [text "Predicate functions need to return non-void."] - | MultipleIndexers -> - [text "Multiple indexers are not supported."] - | SpreadArgument -> - [text "A spread argument is unsupported here."] - | IllegalName -> - [text "Illegal name."] - | UnsupportedInternalSlot {name; static = false} -> - [text "Unsupported internal slot "; code name; text "."] - | UnsupportedInternalSlot {name; static = true} -> - [text "Unsupported static internal slot "; code name; text "."] - in - mk_error ~trace_infos (loc |> ALoc.of_loc) msg - - | EUseArrayLiteral loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) - [text "Use an array literal instead of "; code "new Array(...)"; text "."] - - | EMissingAnnotation (reason, trace_reasons) -> - let tail = match (desc_of_reason reason) with - | RTypeParam (_, (reason_op_desc, reason_op_loc), (reason_tapp_desc, reason_tapp_loc)) -> - let reason_op = mk_reason reason_op_desc reason_op_loc in - let reason_tapp = mk_reason reason_tapp_desc reason_tapp_loc in - [text " "; desc reason; text " is a type parameter declared in "; ref reason_tapp; - text " and was implicitly instantiated at "; ref reason_op; text "."] - | _ -> [] in - (* We don't collect trace info in the assert_ground_visitor because traces - * represent tests of lower bounds to upper bounds, and the assert_ground - * visitor is just visiting types. Instead, we collect a list of types we - * visited to get to the missing annotation error and report that as the - * trace *) - let trace_infos = List.map info_of_reason trace_reasons in - mk_error ~trace_infos (aloc_of_reason reason) - ([text "Missing type annotation for "; desc reason; text "."] @ tail) - - | EBindingError (binding_error, loc, x, entry) -> - let desc = - if x = internal_name "this" then RThis - else if x = internal_name "super" then RSuper - else RIdentifier x - in - let x = mk_reason desc (Scope.Entry.entry_loc entry) in - let msg = match binding_error with - | ENameAlreadyBound -> - [text "Cannot declare "; ref x; text " because the name is already bound."] - | EReferencedBeforeDeclaration -> - if desc = RThis || desc = RSuper then [ - text "Must call "; code "super"; text " before accessing "; ref x; - text " in a derived constructor." - ] else [ - text "Cannot use variable "; ref x; text " because the declaration "; - text "either comes later or was skipped."; - ] - | ETypeInValuePosition - | ETypeAliasInValuePosition - -> [text "Cannot reference type "; ref x; text " from a value position."] - | EConstReassigned - | EConstParamReassigned - -> [text "Cannot reassign constant "; ref x; text "."] - | EImportReassigned -> - [text "Cannot reassign import "; ref x; text "."] - in - mk_error ~trace_infos (loc |> ALoc.of_loc) msg - - | ERecursionLimit (r, _) -> - mk_error ~kind:RecursionLimitError (aloc_of_reason r) - [text "*** Recursion limit exceeded ***"] - - | EModuleOutsideRoot (loc, package_relative_to_root) -> - mk_error ~trace_infos (loc |> ALoc.of_loc) [ - text "This module resolves to "; code package_relative_to_root; text " which "; - text "is outside both your root directory and all of the entries in the "; - code "[include]"; text " section of your "; code ".flowconfig"; text ". "; - text "You should either add this directory to the "; code "[include]"; text " "; - text "section of your "; code ".flowconfig"; text ", move your "; - code ".flowconfig"; text " file higher in the project directory tree, or "; - text "move this package under your Flow root directory."; - ] - - | EExperimentalDecorators loc -> - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) [ - text "Experimental decorator usage. Decorators are an early stage "; - text "proposal that may change. Additionally, Flow does not account for "; - text "the type implications of decorators at this time."; - ] - - | EExperimentalClassProperties (loc, static) -> - let config_name, config_key = - if static - then "class static field", "class_static_fields" - else "class instance field", "class_instance_fields" - in - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) [ - text ("Experimental " ^ config_name ^ " usage. "); - text (String.capitalize_ascii config_name ^ "s are an active early stage "); - text "feature proposal that may change. You may opt-in to using them "; - text "anyway in Flow by putting "; code ("esproposal." ^ config_key ^ "=enable"); text " "; - text "into the "; code "[options]"; text " section of your "; - code ".flowconfig"; text "."; - ] - - | EUnsafeGetSet loc -> - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) [ - text "Potentially unsafe get/set usage. Getters and setters with side "; - text "effects are potentially unsafe and so disabled by default. You may "; - text "opt-in to using them anyway by putting "; - code "unsafe.enable_getters_and_setters"; text " into the "; - code "[options]"; text " section of your "; code ".flowconfig"; text "."; - ] - - | EExperimentalExportStarAs loc -> - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) [ - text "Experimental "; code "export * as"; text " usage. "; - code "export * as"; text " is an active early stage feature propsal that "; - text "may change. You may opt-in to using it anyway by putting "; - code "esproposal.export_star_as=enable"; text " into the "; - code "[options]"; text " section of your "; code ".flowconfig"; text "."; - ] - - | EIndeterminateModuleType loc -> - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) [ - text "Unable to determine module type (CommonJS vs ES) if both an export "; - text "statement and "; code "module.exports"; text " are used in the "; - text "same module!"; - ] - - | EBadExportPosition loc -> - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) [ - text "Exports can only appear at the top level" - ] - - | EBadExportContext (name, loc) -> - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) [ - code name; - text " may only be used as part of a legal top level export statement"; - ] - - | ESignatureVerification sve -> - let open Signature_builder_deps.Error in - begin match sve with - | ExpectedSort (sort, x, loc) -> - mk_signature_verification_error (loc |> ALoc.of_loc) [ - code x; - text (spf " is not a %s." (Signature_builder_kind.Sort.to_string sort)) - ] - | ExpectedAnnotation loc -> - mk_signature_verification_error (loc |> ALoc.of_loc) [ - text "Missing type annotation:" - ] - | InvalidTypeParamUse loc -> - mk_signature_verification_error (loc |> ALoc.of_loc) [ - text "Invalid use of type parameter:" - ] - | UnexpectedObjectKey loc -> - mk_signature_verification_error (loc |> ALoc.of_loc) [ - text "Expected simple object key:" - ] - | UnexpectedExpression (loc, esort) -> - mk_signature_verification_error (loc |> ALoc.of_loc) [ - text ( - spf "Expected literal expression instead of %s, try using a type cast." - (Ast_utils.ExpressionSort.to_string esort) - ) - ] - | SketchyToplevelDef loc -> - mk_signature_verification_error (loc |> ALoc.of_loc) [ - text "Unexpected toplevel definition that needs hoisting:" - ] - | TODO (msg, loc) -> - mk_signature_verification_error (loc |> ALoc.of_loc) [ - text (spf "TODO: %s is not supported yet, try using a type cast." msg) - ] - end - - | EUnreachable loc -> - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) - [text "Unreachable code."] - - | EInvalidObjectKit { tool=_; reason; reason_op=_; use_op } -> - mk_use_op_error (aloc_of_reason reason) use_op - [ref reason; text " is not an object"] - - | EInvalidTypeof (loc, typename) -> - mk_error ~trace_infos ~kind:InferWarning (loc |> ALoc.of_loc) [ - text "Cannot compare the result of "; code "typeof"; text " to string "; - text "literal "; code typename; text " because it is not a valid "; - code "typeof"; text " return value."; - ] - - | EArithmeticOperand reason -> - mk_error ~trace_infos (aloc_of_reason reason) [ - text "Cannot perform arithmetic operation because "; ref reason; text " "; - text "is not a number."; - ] - - | EBinaryInLHS reason -> - (* TODO: or symbol *) - mk_error ~trace_infos (aloc_of_reason reason) [ - text "Cannot use "; code "in"; text " because on the left-hand side, "; - ref reason; text " must be a string or number."; - ] - - | EBinaryInRHS reason -> - mk_error ~trace_infos (aloc_of_reason reason) [ - text "Cannot use "; code "in"; text " because on the right-hand side, "; - ref reason; text " must be an object or array."; - ] - - | EForInRHS reason -> - mk_error ~trace_infos (aloc_of_reason reason) [ - text "Cannot iterate using a "; code "for...in"; text " statement "; - text "because "; ref reason; text " is not an object, null, or undefined."; - ] - - | EObjectComputedPropertyAccess (_, reason_prop) -> - mk_error ~trace_infos (aloc_of_reason reason_prop) - [text "Cannot access computed property using "; ref reason_prop; text "."] - - | EObjectComputedPropertyAssign (_, reason_prop) -> - mk_error ~trace_infos (aloc_of_reason reason_prop) - [text "Cannot assign computed property using "; ref reason_prop; text "."] - - | EInvalidLHSInAssignment loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) - [text "Invalid left-hand side in assignment expression."] - - | EIncompatibleWithUseOp (l_reason, u_reason, use_op) -> - mk_incompatible_error l_reason u_reason use_op - - | EUnsupportedImplements reason -> - mk_error ~trace_infos (aloc_of_reason reason) - [text "Cannot implement "; desc reason; text " because it is not an interface."] - - | EReactKit (reasons, tool, use_op) -> - let open React in - let (_, reason) = reasons in - let is_not_prop_type = "is not a React propType" in - let msg = match tool with - | GetProps _ - | GetConfig _ - | GetRef _ - | CreateElement0 _ - | CreateElement _ - -> "is not a React component" - | SimplifyPropType (tool, _) -> - SimplifyPropType.(match tool with - | ArrayOf -> is_not_prop_type - | InstanceOf -> "is not a class" - | ObjectOf -> is_not_prop_type - | OneOf ResolveArray -> "is not an array" - | OneOf (ResolveElem _) -> "is not a literal" - | OneOfType ResolveArray -> "is not an array" - | OneOfType (ResolveElem _) -> is_not_prop_type - | Shape ResolveObject -> "is not an object" - | Shape (ResolveDict _) -> is_not_prop_type - | Shape (ResolveProp _) -> is_not_prop_type - ) - | CreateClass (tool, _, _) -> - CreateClass.(match tool with - | Spec _ -> "is not an exact object" - | Mixins _ -> "is not a tuple" - | Statics _ -> "is not an object" - | PropTypes (_, ResolveObject) -> "is not an object" - | PropTypes (_, ResolveDict _) -> is_not_prop_type - | PropTypes (_, ResolveProp _) -> is_not_prop_type - | DefaultProps _ -> "is not an object" - | InitialState _ -> "is not an object or null" - ) - in - mk_use_op_error (aloc_of_reason reason) use_op - [ref reason; text (" " ^ msg)] - - | EReactElementFunArity (reason, fn, n) -> - mk_error ~trace_infos (aloc_of_reason reason) [ - text "Cannot call "; code ("React." ^ fn); text " "; - text (spf "without at least %d argument%s." n (if n == 1 then "" else "s")); - ] - - | EFunctionCallExtraArg (unused_reason, def_reason, param_count, use_op) -> - let msg = match param_count with - | 0 -> "no arguments are expected by" - | 1 -> "no more than 1 argument is expected by" - | n -> spf "no more than %d arguments are expected by" n - in - mk_use_op_error (aloc_of_reason unused_reason) use_op - [text msg; text " "; ref def_reason] - - | EUnsupportedSetProto reason -> - mk_error ~trace_infos (aloc_of_reason reason) - [text "Mutating this prototype is unsupported."] - - | EDuplicateModuleProvider {module_name; provider; conflict} -> - let (loc1, loc2) = Loc.( - let pos = { line = 1; column = 0; offset = 0 } in - let loc1 = { source = Some conflict; start = pos; _end = pos } in - let loc2 = { source = Some provider; start = pos; _end = pos } in - (loc1, loc2) - ) in - mk_error ~trace_infos ~kind:DuplicateProviderError (loc1 |> ALoc.of_loc) [ - text "Duplicate module provider for "; code module_name; text ". Change "; - text "either this module provider or the "; - ref (mk_reason (RCustom "current module provider") loc2); - text "."; - ] - - | EParseError (loc, parse_error) -> - mk_error ~kind:ParseError (loc |> ALoc.of_loc) - (Friendly.message_of_string (Parse_error.PP.error parse_error)) - - | EDocblockError (loc, err) -> - let msg = match err with - | MultipleFlowAttributes -> [ - text "Unexpected "; code "@flow"; text " declaration. Only one per "; - text "file is allowed."; - ] - | MultipleProvidesModuleAttributes -> [ - text "Unexpected "; code "@providesModule"; text " declaration. "; - text "Only one per file is allowed."; - ] - | MultipleJSXAttributes -> [ - text "Unexpected "; code "@jsx"; text " declaration. Only one per "; - text "file is allowed."; - ] - | InvalidJSXAttribute first_error -> [ - text "Invalid "; code "@jsx"; text " declaration. Should have the form "; - code "@jsx LeftHandSideExpression"; text " with no spaces."; - ] @ - match first_error with - | None -> [] - | Some first_error -> [text (spf " Parse error: %s." first_error)] - in - mk_error ~kind:ParseError (loc |> ALoc.of_loc) msg - - | EUntypedTypeImport (loc, module_name) -> - mk_error ~trace_infos ~kind:(LintError Lints.UntypedTypeImport) (loc |> ALoc.of_loc) [ - text "Importing a type from an untyped module makes it "; code "any"; text " "; - text "and is not safe! Did you mean to add "; code "// @flow"; text " to "; - text "the top of "; code module_name; text "?"; - ] - - | EUntypedImport (loc, module_name) -> - mk_error ~trace_infos ~kind:(LintError Lints.UntypedImport) (loc |> ALoc.of_loc) [ - text "Importing from an untyped module makes it "; code "any"; text " "; - text "and is not safe! Did you mean to add "; code "// @flow"; text " "; - text "to the top of "; code module_name; text "?"; - ] - - | ENonstrictImport loc -> - mk_error ~trace_infos ~kind:(LintError Lints.NonstrictImport) (loc |> ALoc.of_loc) [ - text "Dependencies of a "; code "@flow strict"; text " module must "; - text "also be "; code "@flow strict"; text "!" - ] - - | EUnclearType loc -> - mk_error ~trace_infos ~kind:(LintError Lints.UnclearType) (loc |> ALoc.of_loc) [ - text "Unclear type. Using "; code "any"; text ", "; - code "Object"; text ", "; code "Function"; text ", "; - code "$Subtype<...>"; text ", or "; code "$Supertype<...>"; text " types is not safe!" - ] - - | EDeprecatedType loc -> - mk_error ~trace_infos ~kind:(LintError Lints.DeprecatedType) (loc |> ALoc.of_loc) [ - text "Deprecated type. Using "; code "*"; text " types is not recommended!" - ] - - | EUnsafeGettersSetters loc -> - mk_error ~trace_infos ~kind:(LintError Lints.UnsafeGettersSetters) (loc |> ALoc.of_loc) - [text "Getters and setters can have side effects and are unsafe."] - - | EDeprecatedCallSyntax loc -> - mk_error ~trace_infos ~kind:(LintError Lints.DeprecatedCallSyntax) (loc |> ALoc.of_loc) - [text "Deprecated $call syntax. Use callable property syntax instead."] - - | EUnusedSuppression loc -> - mk_error ~trace_infos (loc |> ALoc.of_loc) - [text "Unused suppression comment."] - - | ELintSetting (loc, kind) -> - let msg = match kind with - | LintSettings.Redundant_argument -> [ - text "Redundant argument. This argument doesn't change any lint settings." - ] - | LintSettings.Overwritten_argument -> [ - text "Redundant argument. The values set by this argument are "; - text "overwritten later in this comment."; - ] - | LintSettings.Naked_comment -> [ - text "Malformed lint rule. At least one argument is required." - ] - | LintSettings.Nonexistent_rule -> [ - text "Nonexistent/misspelled lint rule. Perhaps you have a "; - text "missing/extra "; code ","; text "?"; - ] - | LintSettings.Invalid_setting -> [ - text "Invalid setting. Valid settings are error, warn, and off." - ] - | LintSettings.Malformed_argument -> [ - text "Malformed lint rule. Properly formed rules contain a single "; - code ":"; text " character. Perhaps you have a missing/extra "; - code ","; text "?"; - ] - in - mk_error ~trace_infos ~kind:ParseError (loc |> ALoc.of_loc) msg - - | ESketchyNullLint { kind; loc; falsy_loc; null_loc } -> - let type_str, value_str = match kind with - | Lints.SketchyNullBool -> "boolean", "false" - | Lints.SketchyNullNumber -> "number", "0" - | Lints.SketchyNullString -> "string", "an empty string" - | Lints.SketchyNullMixed -> "mixed", "false" - in - mk_error ~trace_infos ~kind:(LintError (Lints.SketchyNull kind)) (loc |> ALoc.of_loc) [ - text "Sketchy null check on "; ref (mk_reason (RCustom type_str) falsy_loc); text " "; - text "which is potentially "; text value_str; text ". Perhaps you meant to "; - text "check for "; ref (mk_reason RNullOrVoid null_loc); text "?"; - ] - - | ESketchyNumberLint (Lints.SketchyNumberAnd, reason) -> - mk_error ~trace_infos ~kind:Lints.(LintError (SketchyNumber SketchyNumberAnd)) (aloc_of_reason reason) [ - text "Avoid using "; code "&&"; text " to check the value of "; ref reason; text ". "; - text "Consider handling falsy values (0 and NaN) by using a conditional to choose an "; - text "explicit default instead."; - ] - - | EInvalidPrototype reason -> - mk_error ~trace_infos (aloc_of_reason reason) - [text "Cannot use "; ref reason; text " as a prototype. Expected an object or null."] - - | EExperimentalOptionalChaining loc -> - mk_error ~trace_infos ~kind:ParseError (loc |> ALoc.of_loc) [ - text "Experimental optional chaining ("; code "?."; text ") usage. "; - text "Optional chaining is an active early-stage feature proposal that "; - text "may change. You may opt in to using it anyway by putting "; - code "esproposal.optional_chaining=enable"; text " into the "; - code "[options]"; text " section of your "; code ".flowconfig"; text "."; - ] - - | EOptionalChainingMethods loc -> - mk_error ~trace_infos ~kind:ParseError (loc |> ALoc.of_loc) [ - text "Flow does not yet support method or property calls in optional chains." - ] - - | EUnnecessaryOptionalChain (loc, lhs_reason) -> - mk_error ~trace_infos ~kind:(LintError Lints.UnnecessaryOptionalChain) (loc |> ALoc.of_loc) [ - text "This use of optional chaining ("; code "?."; text ") is unnecessary because "; - ref lhs_reason; text " cannot be nullish or because an earlier "; code "?."; - text " will short-circuit the nullish case."; - ] - - | EUnnecessaryInvariant (loc, reason) -> - mk_error ~trace_infos ~kind:(LintError Lints.UnnecessaryInvariant) (loc |> ALoc.of_loc) [ - text "This use of `invariant` is unnecessary because "; ref reason; - text " is always truthy." - ] - - | EInexactSpread (reason, reason_op) -> - mk_error ~kind:(LintError Lints.InexactSpread) (aloc_of_reason reason) [ - text "Cannot determine the type of "; ref reason_op; text " because "; - text "it contains a spread of inexact "; ref reason; text ". "; - text "Being inexact, "; ref reason; - text " might be missing the types of some properties that are being copied. "; - text "Perhaps you could make it exact?" - ] - -let is_lint_error = function - | EUntypedTypeImport _ - | EUntypedImport _ - | ENonstrictImport _ - | EUnclearType _ - | EDeprecatedType _ - | EUnsafeGettersSetters _ - | ESketchyNullLint _ - | ESketchyNumberLint _ - | EInexactSpread _ - | EUnnecessaryOptionalChain _ - | EUnnecessaryInvariant _ - -> true - | _ -> false diff --git a/src/typing/flow_js.ml b/src/typing/flow_js.ml index ce7fa55272b..cafe988af17 100644 --- a/src/typing/flow_js.ml +++ b/src/typing/flow_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -19,20 +19,22 @@ is guaranteed to exist, and is usually reached in very few steps. *) open Utils_js +open Loc_collections open Reason open Constraint open Type - +open Debug_js.Verbose module FlowError = Flow_error (* type exemplar set - reasons are not considered in compare *) -module TypeExSet = Set.Make(struct +module TypeExSet = Set.Make (struct include Type + let compare = reasonless_compare end) let matching_sentinel_prop reason key sentinel_value = - MatchingPropT (reason, key, DefT (reason, sentinel_value)) + MatchingPropT (reason, key, DefT (reason, bogus_trust (), sentinel_value)) (**************************************************************) @@ -70,73 +72,77 @@ let not_linked (id1, _bounds1) (_id2, bounds2) = of scopes they share, and havoc the variables in the called function's write set which live in those scopes. *) -let havoc_call_env = Scope.( - - let overlapped_call_scopes func_env call_env = - let rec loop = function - | func_scope :: func_scopes, call_scope :: call_scopes - when func_scope.id = call_scope.id -> - call_scope :: loop (func_scopes, call_scopes) - | _ -> [] +let havoc_call_env = + Scope.( + let overlapped_call_scopes func_env call_env = + let rec loop = function + | (func_scope :: func_scopes, call_scope :: call_scopes) when func_scope.id = call_scope.id + -> + call_scope :: loop (func_scopes, call_scopes) + | _ -> [] + in + loop (List.rev func_env, List.rev call_env) in - loop (List.rev func_env, List.rev call_env) - in - - let havoc_entry cx scope ((_, name, _) as entry_ref) = - (if Context.is_verbose cx then - prerr_endlinef "%shavoc_entry %s %s" - (Context.pid_prefix cx) - (Changeset.string_of_entry_ref entry_ref) - (Debug_js.string_of_scope cx scope) - ); - match get_entry name scope with - | Some _ -> - havoc_entry name scope; - Changeset.(if is_active () then change_var entry_ref) - | None -> - (* global scopes may lack entries, if function closes over + let havoc_entry cx scope ((_, name, _) as entry_ref) = + if Context.is_verbose cx then + prerr_endlinef + "%shavoc_entry %s %s" + (Context.pid_prefix cx) + (Changeset.string_of_entry_ref entry_ref) + (Debug_js.string_of_scope cx scope); + match get_entry name scope with + | Some _ -> + havoc_entry name scope; + Changeset.(if Global.is_active () then Global.change_var entry_ref) + | None -> + (* global scopes may lack entries, if function closes over path-refined global vars (artifact of deferred lookup) *) - if is_global scope then () - else assert_false (spf "missing entry %S in scope %d: { %s }" - name scope.id (String.concat ", " - (SMap.fold (fun n _ acc -> n :: acc) scope.entries []))) - in - - let havoc_refi cx scope ((_, key, _) as refi_ref) = - (if Context.is_verbose cx then - prerr_endlinef "%shavoc_refi %s" - (Context.pid_prefix cx) - (Changeset.string_of_refi_ref refi_ref)); - match get_refi key scope with - | Some _ -> - havoc_refi key scope; - Changeset.(if is_active () then change_refi refi_ref) - | None -> - (* global scopes may lack entries, if function closes over + if is_global scope then + () + else + assert_false + (spf + "missing entry %S in scope %d: { %s }" + name + scope.id + (String.concat ", " (SMap.fold (fun n _ acc -> n :: acc) scope.entries []))) + in + let havoc_refi cx scope ((_, key, _) as refi_ref) = + if Context.is_verbose cx then + prerr_endlinef + "%shavoc_refi %s" + (Context.pid_prefix cx) + (Changeset.string_of_refi_ref refi_ref); + match get_refi key scope with + | Some _ -> + havoc_refi key scope; + Changeset.(if Global.is_active () then Global.change_refi refi_ref) + | None -> + (* global scopes may lack entries, if function closes over path-refined global vars (artifact of deferred lookup) *) - if is_global scope then () - else assert_false (spf "missing refi %S in scope %d: { %s }" - (Key.string_of_key key) scope.id - (String.concat ", " (Key_map.fold ( - fun k _ acc -> (Key.string_of_key k) :: acc) scope.refis []))) - in - - fun cx func_frame call_frame changeset -> - if func_frame = 0 || call_frame = 0 || Changeset.is_empty changeset - then () - else - let func_env = IMap.get func_frame (Context.envs cx) in - let call_env = IMap.get call_frame (Context.envs cx) in - Option.iter (Option.both func_env call_env) ~f:(fun (func_env, call_env) -> - overlapped_call_scopes func_env call_env |> - List.iter (fun ({ id; _ } as scope) -> - Changeset.include_scopes [id] changeset |> - Changeset.iter_writes - (havoc_entry cx scope) - (havoc_refi cx scope) - ) - ); -) + if is_global scope then + () + else + assert_false + (spf + "missing refi %S in scope %d: { %s }" + (Key.string_of_key key) + scope.id + (String.concat + ", " + (Key_map.fold (fun k _ acc -> Key.string_of_key k :: acc) scope.refis []))) + in + fun cx func_frame call_frame changeset -> + if func_frame = 0 || call_frame = 0 || Changeset.is_empty changeset then + () + else + let func_env = IMap.get func_frame (Context.envs cx) in + let call_env = IMap.get call_frame (Context.envs cx) in + Option.iter (Option.both func_env call_env) ~f:(fun (func_env, call_env) -> + overlapped_call_scopes func_env call_env + |> List.iter (fun ({ id; _ } as scope) -> + Changeset.include_scopes [id] changeset + |> Changeset.iter_writes (havoc_entry cx scope) (havoc_refi cx scope)))) (********************************************************************) @@ -155,11 +161,12 @@ let visit_eval_id cx id f = let types_of constraints = match constraints with | Unresolved { lower; _ } -> TypeMap.keys lower - | Resolved t -> [t] + | Resolved (_, t) + | FullyResolved (_, t) -> + [t] (* Def types that describe the solution of a type variable. *) -let possible_types cx id = types_of (Context.find_graph cx id) - |> List.filter is_proper_def +let possible_types cx id = types_of (Context.find_graph cx id) |> List.filter is_proper_def let possible_types_of_type cx = function | OpenT (_, id) -> possible_types cx id @@ -168,333 +175,11 @@ let possible_types_of_type cx = function let uses_of constraints = match constraints with | Unresolved { upper; _ } -> UseTypeMap.keys upper - | Resolved t -> [UseT (unknown_use, t)] - -let possible_uses cx id = uses_of (Context.find_graph cx id) - -let rec list_map2 f ts1 ts2 = match (ts1,ts2) with - | ([],_) | (_,[]) -> [] - | (t1::ts1,t2::ts2) -> (f (t1,t2)):: (list_map2 f ts1 ts2) - -let rec merge_type cx = - let create_union rep = - DefT (locationless_reason (RCustom "union"), UnionT rep) - in - - let create_intersection rep = - DefT (locationless_reason (RCustom "intersection"), IntersectionT rep) - in - - function - | DefT (_, NumT _), (DefT (_, NumT _) as t) - | DefT (_, StrT _), (DefT (_, StrT _) as t) - | DefT (_, BoolT _), (DefT (_, BoolT _) as t) - | DefT (_, NullT), (DefT (_, NullT) as t) - | DefT (_, VoidT), (DefT (_, VoidT) as t) - | DefT (_, AnyObjT), (DefT (_, AnyObjT) as t) - -> t - - | (ObjProtoT _, (ObjProtoT _ as t)) - -> t - - | DefT (_, AnyT), t | t, DefT (_, AnyT) -> t - - | DefT (_, EmptyT), t | t, DefT (_, EmptyT) -> t - | _, (DefT (_, MixedT _) as t) | (DefT (_, MixedT _) as t), _ -> t - - | DefT (_, NullT), (DefT (_, MaybeT _) as t) | (DefT (_, MaybeT _) as t), DefT (_, NullT) - | DefT (_, VoidT), (DefT (_, MaybeT _) as t) | (DefT (_, MaybeT _) as t), DefT (_, VoidT) - -> t - - | (DefT (_, FunT (_,_,ft1)) as fun1), (DefT (_, FunT (_,_,ft2)) as fun2) -> - (* Functions with different number of parameters cannot be merged into a - * single function type. Instead, we should turn them into a union *) - let params = - if List.length ft1.params <> List.length ft2.params then None else - let params = List.map2 (fun (name1, t1) (name2, t2) -> - (* TODO: How to merge param names? *) - let name = match name1, name2 with - | None, None -> None - | Some name, _ - | _, Some name -> Some name - in - name, merge_type cx (t1, t2) - ) ft1.params ft2.params in - match ft1.rest_param, ft2.rest_param with - | None, Some _ - | Some _, None -> None - | None, None -> Some (params, None) - | Some r1, Some r2 -> Some (params, Some (r1, r2)) - in - begin match params with - | None -> create_union (UnionRep.make fun1 fun2 []) - | Some (params, rest_params) -> - let params_names, tins = List.split params in - let rest_param = match rest_params with - | None -> None - | Some ((name1, loc, rest_t1), (name2, _, rest_t2)) -> - (* TODO: How to merge rest names and locs? *) - let name = match name1, name2 with - | None, None -> None - | Some name, _ - | _, Some name -> Some name in - Some (name, loc, merge_type cx (rest_t1, rest_t2)) - in - let tout = merge_type cx (ft1.return_t, ft2.return_t) in - let reason = locationless_reason (RCustom "function") in - DefT (reason, FunT ( - dummy_static reason, - dummy_prototype, - mk_functiontype reason tins tout ~rest_param - ~def_reason:reason ~params_names - )) - end - - | (DefT (_, ObjT o1) as t1), (DefT (_, ObjT o2) as t2) -> - let map1 = Context.find_props cx o1.props_tmap in - let map2 = Context.find_props cx o2.props_tmap in - - (* Create an intermediate map of booleans indicating whether two objects can - * be merged, based on the properties in each map. *) - let merge_map = SMap.merge (fun _ p1_opt p2_opt -> - match p1_opt, p2_opt with - | None, None -> None - (* In general, even objects with disjoint key sets can not be merged due - * to width subtyping. For example, {x:T} and {y:U} is not the same as - * {x:T,y:U}, because {x,y} is a valid inhabitant of {x:T} and the type of - * y may != U. However, if either object type is exact, disjointness is - * sufficient. *) - | Some _, None | None, Some _ -> Some (o1.flags.exact || o2.flags.exact) - (* Covariant fields can be merged. *) - | Some (Field (_, _, Positive)), Some (Field (_, _, Positive)) -> Some true - (* Getters are covariant and thus can be merged. *) - | Some (Get _), Some (Get _) -> Some true - (* Anything else is can't be merged. *) - | _ -> Some false - ) map1 map2 in - - let merge_dict = match o1.dict_t, o2.dict_t with - (* If neither object has an indexer, neither will the merged object. *) - | None, None -> Some None - (* If both objects covariant indexers, we can merge them. However, if the - * key types are disjoint, the resulting dictionary is not useful. *) - | Some {key = k1; value = v1; dict_polarity = Positive; _}, - Some {key = k2; value = v2; dict_polarity = Positive; _} -> - (* TODO: How to merge indexer names? *) - Some (Some { - dict_name = None; - key = create_intersection (InterRep.make k1 k2 []); - value = merge_type cx (v1, v2); - dict_polarity = Positive; - }) - (* Don't merge objects with possibly incompatible indexers. *) - | _ -> None - in - - let merge_call = match o1.call_t, o2.call_t with - | None, None -> Some None - | Some _, None -> if o2.flags.exact then Some o1.call_t else None - | None, Some _ -> if o1.flags.exact then Some o2.call_t else None - | Some id1, Some id2 -> - let c1 = Context.find_call cx id1 in - let c2 = Context.find_call cx id2 in - let id = Context.make_call_prop cx (create_union (UnionRep.make c1 c2 [])) in - Some (Some id) - in - - (* Only merge objects if every property can be merged. *) - let should_merge = SMap.for_all (fun _ x -> x) merge_map in - - (* Don't merge objects with different prototypes. *) - let should_merge = should_merge && o1.proto_t = o2.proto_t in - - (match should_merge, merge_dict, merge_call with - | true, Some dict, Some call -> - let map = SMap.merge (fun _ p1_opt p2_opt -> - match p1_opt, p2_opt with - (* Merge disjoint+exact objects. *) - | Some t, None - | None, Some t -> Some t - (* Shouldn't happen, per merge_map above. *) - | _ -> None - ) map1 map2 in - let id = Context.make_property_map cx map in - let sealed = match o1.flags.sealed, o2.flags.sealed with - | Sealed, Sealed -> Sealed - | UnsealedInFile s1, UnsealedInFile s2 when s1 = s2 -> UnsealedInFile s1 - | _ -> UnsealedInFile None - in - let flags = { - sealed; - exact = o1.flags.exact && o2.flags.exact; - frozen = o1.flags.frozen && o2.flags.frozen; - } in - let reason = locationless_reason (RCustom "object") in - mk_object_def_type ~reason ~flags ~dict ~call id o1.proto_t - | _ -> - create_union (UnionRep.make t1 t2 [])) - - | DefT (_, ArrT (ArrayAT (t1, ts1))), - DefT (_, ArrT (ArrayAT (t2, ts2))) -> - let tuple_types = match ts1, ts2 with - | None, _ - | _, None -> None - | Some ts1, Some ts2 -> Some (list_map2 (merge_type cx) ts1 ts2) in - - DefT (locationless_reason (RCustom "array"), - ArrT (ArrayAT( merge_type cx (t1, t2), tuple_types)) - ) - - | DefT (_, ArrT (TupleAT (t1, ts1))), - DefT (_, ArrT (TupleAT(t2, ts2))) when List.length ts1 = List.length ts2 -> - - DefT (locationless_reason (RCustom "tuple"), - ArrT (TupleAT (merge_type cx (t1, t2), list_map2 (merge_type cx) ts1 ts2)) - ) - - | DefT (_, ArrT (ROArrayAT elemt1)), - DefT (_, ArrT (ROArrayAT elemt2)) -> - - DefT (locationless_reason (RCustom "read only array"), - ArrT (ROArrayAT (merge_type cx (elemt1, elemt2))) - ) - - | DefT (_, ArrT EmptyAT), - DefT (_, ArrT EmptyAT) -> - - DefT (locationless_reason (RCustom "empty array"), ArrT EmptyAT) - - | (DefT (_, MaybeT t1), DefT (_, MaybeT t2)) - | (DefT (_, MaybeT t1), t2) - | (t1, DefT (_, MaybeT t2)) -> - let t = merge_type cx (t1, t2) in - let reason = locationless_reason (RMaybe (desc_of_t t)) in - DefT (reason, MaybeT t) - - | DefT (_, UnionT rep1), DefT (_, UnionT rep2) -> - create_union (UnionRep.rev_append rep1 rep2) - - | (DefT (_, UnionT rep), t) - | (t, DefT (_, UnionT rep)) -> - create_union (UnionRep.cons t rep) - - (* TODO: do we need to do anything special for merging Null with Void, - Optional with other types, etc.? *) - - | (t1, t2) -> - create_union (UnionRep.make t1 t2 []) - -and resolve_type cx = function - | OpenT tvar -> resolve_tvar cx tvar - | t -> t - -and resolve_tvar cx (_, id) = - let ts = possible_types cx id in - (* The list of types returned by possible_types is often empty, and the - most common reason is that we don't have enough type coverage to - resolve id. Thus, we take the unit of merging to be `any`. (Something - similar happens when summarizing exports in ContextOptimizer.) - - In the future, we might report errors in some cases where - possible_types returns an empty list: e.g., when we detect unreachable - code, or even we don't have enough type coverage. Irrespective of these - changes, the above decision would continue to make sense: as errors - become stricter, type resolution should become even more lenient to - improve failure tolerance. *) - List.fold_left (fun u t -> - merge_type cx (t, u) - ) Locationless.AnyT.t ts - -(** The following functions do "shallow" walks over types, respectively from - requires and from exports, in order to report missing annotations. There are - some opportunities for future work: - - - Rewrite these functions using a type visitor class. - - - Consider using gc to crawl the graph further down from requires, and - maybe also up from exports. Preliminary experiments along those lines - suggest that a general walk doesn't always give expected results. As an - example in one direction, the signature of a class is reachable from a - `require`d superclass, but the corresponding constraint simply checks for - consistency of overrides, and should not relax reporting missing annotations - in the signature. As an example in the other direction, an exported function - may have an open `this` type that we cannot expect to be annotated. -**) - -(* To avoid complaining about "missing" annotations where external types are - used in the exported type, we mark requires and their uses as types. *) - -(* TODO: All said and done, this strategy to avoid complaining about missing - annotations that depend on requires is a hack intended to achieve the ideal - of being able to "look up" annotations in required modules, when they're - already provided. The latter should be possible if we switch reporting - missing annotations from early (during the "infer" phase) to late (during - the "merge" phase). *) - -let rec assume_ground cx ?(depth=1) ids t = - begin match Context.verbose cx with - | Some { Verbose.depth = verbose_depth; indent; enabled_during_flowlib=_; } -> - let pid = Context.pid_prefix cx in - let indent = String.make ((depth - 1) * indent) ' ' in - prerr_endlinef "\n%s%sassume_ground: %s" - indent pid (Debug_js.dump_use_t cx ~depth:verbose_depth t) - | None -> () - end; - begin match t with - | UseT (_, OpenT(_,id)) -> - assume_ground_id ~depth:(depth + 1) cx ids id - - (** The subset of operations to crawl. The type variables denoting the - results of these operations would be ignored by the is_required check in - `assert_ground`. - - These are intended to be exactly the operations that might be involved - when extracting (parts of) requires/imports. As such, they need to be - kept in sync as module system conventions evolve. *) - - | ReposLowerT (_, _, use_t) -> - assume_ground cx ~depth:(depth + 1) ids use_t - - | ImportModuleNsT (_, t, _) - | CJSRequireT (_, t, _) - | ImportTypeT (_, _, t) - | ImportTypeofT (_, _, t) - - (** Other common operations that might happen immediately after extracting - (parts of) requires/imports. *) + | Resolved (use_op, t) + | FullyResolved (use_op, t) -> + [UseT (use_op, t)] - | GetPropT (_, _, _, t) - | CallT (_, _, { call_tout = t; _ }) - | MethodT (_, _, _, _, { call_tout = t; _ }, _) - | ConstructorT (_, _, _, _, t) -> - assume_ground cx ~depth:(depth + 1) ids (UseT (unknown_use, t)) - - | _ -> () - end; - if Context.is_verbose cx then - let pid = Context.pid_prefix cx in - if depth = 1 then - prerr_endlinef "\n%sAssumed ground: %s" - pid - (!ids |> ISet.elements |> List.map string_of_int |> String.concat ", ") - -and assume_ground_id cx ~depth ids_ref id = - let root_id, constraints = Context.find_constraints cx id in - let ids = !ids_ref in - let ids' = ISet.add root_id ids in - if ids' != ids then ( - ids_ref := ids'; - match constraints with - | Unresolved { upper; uppertvars; _ } -> - upper |> UseTypeMap.iter (fun t _ -> - assume_ground cx ~depth ids_ref t - ); - uppertvars |> IMap.iter (fun id _ -> - assume_ground_id cx ~depth ids_ref id - ) - | Resolved _ -> - () - ) +let possible_uses cx id = uses_of (Context.find_graph cx id) |> List.filter is_proper_use (**************) (* builtins *) @@ -511,8 +196,7 @@ let mk_builtins cx = let lookup_module cx m = Context.find_module cx m (* The builtins reference is accessed just like references to other modules. *) -let builtins cx = - lookup_module cx Files.lib_module_ref +let builtins cx = lookup_module cx Files.lib_module_ref (***********************) (* instantiation utils *) @@ -528,17 +212,21 @@ module ImplicitTypeArgument = struct (* Create a reason that is positioned at reason_op, but has a def_loc at * typeparam.reason. *) let loc_op = aloc_of_reason reason_op in - let desc = RTypeParam (typeparam.name, (desc_of_reason reason_op, loc_op |> ALoc.to_loc), - (desc_of_reason reason_tapp, def_loc_of_reason reason_tapp)) in - let reason = mk_reason desc (def_loc_of_reason typeparam.reason) in - let reason = repos_reason (loc_op |> ALoc.to_loc) reason in + let desc = + RTypeParam + ( typeparam.name, + (desc_of_reason reason_op, loc_op), + (desc_of_reason reason_tapp, def_aloc_of_reason reason_tapp) ) + in + let reason = mk_reason desc (def_aloc_of_reason typeparam.reason) in + let reason = repos_reason loc_op reason in Tvar.mk cx reason (* Abstract a type argument that is created by implicit instantiation above. Sometimes, these type arguments are involved in type expansion loops, so we abstract them to detect such loops. *) let abstract_targ tvar = - let reason, _ = open_tvar tvar in + let (reason, _) = open_tvar tvar in let desc = desc_of_reason reason in match desc with | RTypeParam _ -> Some (OpenT (locationless_reason desc, 0)) @@ -561,53 +249,58 @@ end module TypeAppExpansion : sig type entry - val push_unless_loop : Context.t -> (Type.t * Type.t list) -> bool + + val push_unless_loop : Context.t -> Type.t * Type.t list -> bool + val pop : unit -> unit + val get : unit -> entry list + val set : entry list -> unit end = struct type entry = Type.t * TypeSet.t list - let stack = ref ([]: entry list) + + let stack = ref ([] : entry list) (* visitor to collect roots of type applications nested in a type *) - let roots_collector = object - inherit [TypeSet.t] Type_visitor.t as super - - method! type_ cx pole acc t = match t with - | DefT (_, TypeAppT (_, c, _)) -> super#type_ cx pole (TypeSet.add c acc) t - | OpenT _ -> (match ImplicitTypeArgument.abstract_targ t with - | None -> acc - | Some t -> TypeSet.add t acc - ) - | _ -> super#type_ cx pole acc t - end + let roots_collector = + object + inherit [TypeSet.t] Type_visitor.t as super + + method! type_ cx pole acc t = + match t with + | TypeAppT (_, _, c, _) -> super#type_ cx pole (TypeSet.add c acc) t + | OpenT _ -> + (match ImplicitTypeArgument.abstract_targ t with + | None -> acc + | Some t -> TypeSet.add t acc) + | _ -> super#type_ cx pole acc t + end - let collect_roots cx = roots_collector#type_ cx Neutral TypeSet.empty + let collect_roots cx = roots_collector#type_ cx Polarity.Neutral TypeSet.empty (* Util to stringify a list, given a separator string and a function that maps elements of the list to strings. Should probably be moved somewhere else for general reuse. *) - let string_of_list list sep f = - list |> List.map f |> String.concat sep + let string_of_list list sep f = list |> Core_list.map ~f |> String.concat sep let string_of_desc_of_t t = DescFormat.name_of_instance_reason (reason_of_t t) (* show entries in the stack *) let show_entry (c, tss) = - spf "%s<%s>" (string_of_desc_of_t c) ( - string_of_list tss "," (fun ts -> - let ts = TypeSet.elements ts in - spf "[%s]" (string_of_list ts ";" string_of_desc_of_t) - )) + spf + "%s<%s>" + (string_of_desc_of_t c) + (string_of_list tss "," (fun ts -> + let ts = TypeSet.elements ts in + spf "[%s]" (string_of_list ts ";" string_of_desc_of_t))) - let _dump_stack () = - string_of_list !stack "\n" show_entry + let _dump_stack () = string_of_list !stack "\n" show_entry (* Detect whether pushing would cause a loop. Push only if no loop is detected, and return whether push happened. *) let push_unless_loop = - (* Say that targs are possibly expanding when, given previous targs and current targs, each previously non-empty targ is contained in the corresponding current targ. *) @@ -615,48 +308,54 @@ end = struct (* The following helper carries around a bit that indicates whether prev_tss contains at least one non-empty set. *) let rec loop seen_nonempty_prev_ts = function - | prev_ts::prev_tss, ts::tss -> + | (prev_ts :: prev_tss, ts :: tss) -> (* if prev_ts is not a subset of ts, we have found a counterexample and we can bail out *) - TypeSet.subset prev_ts ts && - (* otherwise, we recurse on the remaining targs, updating the bit *) - loop (seen_nonempty_prev_ts || not (TypeSet.is_empty prev_ts)) - (prev_tss, tss) - | [], [] -> + TypeSet.subset prev_ts ts + && (* otherwise, we recurse on the remaining targs, updating the bit *) + loop (seen_nonempty_prev_ts || not (TypeSet.is_empty prev_ts)) (prev_tss, tss) + | ([], []) -> (* we have found no counterexamples, so it comes down to whether we've seen any non-empty prev_ts *) seen_nonempty_prev_ts - | [], _ | _, [] -> + | ([], _) + | (_, []) -> (* something's wrong around arities, but that's not our problem, so bail out *) false - in loop false (prev_tss, tss) - - in fun cx (c, ts) -> - let tss = List.map (collect_roots cx) ts in - let loop = !stack |> List.exists (fun (prev_c, prev_tss) -> - c = prev_c && possibly_expanding_targs prev_tss tss - ) in - if loop then false - else begin + in + loop false (prev_tss, tss) + in + fun cx (c, ts) -> + let tss = Core_list.map ~f:(collect_roots cx) ts in + let loop = + !stack + |> List.exists (fun (prev_c, prev_tss) -> + c = prev_c && possibly_expanding_targs prev_tss tss) + in + if loop then + false + else ( stack := (c, tss) :: !stack; if Context.is_verbose cx then prerr_endlinef "typeapp stack entry: %s" (show_entry (c, tss)); true - end + ) let pop () = stack := List.tl !stack + let get () = !stack + let set _stack = stack := _stack end module Cache = struct - module FlowSet = struct let empty = TypeMap.empty let add_not_found l us setr = - setr := TypeMap.add l us !setr; false + setr := TypeMap.add l us !setr; + false let cache (l, u) setr = match TypeMap.get l !setr with @@ -666,41 +365,49 @@ module Cache = struct let us' = UseTypeSet.add u us in us' == us || add_not_found l us' setr - let fold f = - TypeMap.fold (fun l -> UseTypeSet.fold (fun u -> f (l, u))) + let fold f = TypeMap.fold (fun l -> UseTypeSet.fold (fun u -> f (l, u))) end (* Cache that remembers pairs of types that are passed to __flow. *) module FlowConstraint = struct let cache = ref FlowSet.empty + let rec toplevel_use_op = function + | Frame (_frame, use_op) -> toplevel_use_op use_op + | Op (Speculation use_op) -> toplevel_use_op use_op + | use_op -> use_op + (* attempt to read LB/UB pair from cache, add if absent *) - let get cx (l, u) = match l, u with + let get cx (l, u) = + match (l, u) with (* Don't cache constraints involving type variables, since the corresponding typing rules are already sufficiently robust. *) - | OpenT _, _ | _, UseT (_, OpenT _) -> false + | (OpenT _, _) + | (_, UseT (_, OpenT _)) -> + false | _ -> (* Use ops are purely for better error messages: they should have no effect on type checking. However, recursively nested use ops can pose non-termination problems. To ensure proper caching, we hash use ops - to just their top-level structure. *) - let u = mod_use_op_of_use_t (function - | Frame (frame, use_op) when use_op <> unknown_use -> Frame (frame, unknown_use) - | Op (Speculation use_op) when use_op <> unknown_use -> Op (Speculation unknown_use) - | use_op -> use_op) u in + to just their toplevel structure. *) + let u = mod_use_op_of_use_t toplevel_use_op u in let found = FlowSet.cache (l, u) cache in if found && Context.is_verbose cx then - prerr_endlinef "%sFlowConstraint cache hit on (%s, %s)" + prerr_endlinef + "%sFlowConstraint cache hit on (%s, %s)" (Context.pid_prefix cx) - (string_of_ctor l) (string_of_use_ctor u); + (string_of_ctor l) + (string_of_use_ctor u); found end (* Cache that maps TypeApp(Poly (...id), ts) to its result. *) module Subst = struct let cache = Hashtbl.create 0 + let find = Hashtbl.find_opt cache - let add = Hashtbl.replace cache + + let add = Hashtbl.add cache end (* Cache that limits instantiation of polymorphic definitions. Intuitively, @@ -712,15 +419,15 @@ module Cache = struct those representing the result): the cache would be useless if we considered those type variables as part of the identity of the operation. *) module PolyInstantiation = struct - type cache_key = Loc.t * reason * op_reason + type cache_key = ALoc.t * reason * op_reason + and op_reason = reason Nel.t - let cache: (cache_key, Type.t) Hashtbl.t = Hashtbl.create 0 + let cache : (cache_key, Type.t) Hashtbl.t = Hashtbl.create 0 let find cx reason_tapp typeparam op_reason = - let loc = def_loc_of_reason reason_tapp in - try - Hashtbl.find cache (loc, typeparam.reason, op_reason) + let loc = def_aloc_of_reason reason_tapp in + try Hashtbl.find cache (loc, typeparam.reason, op_reason) with _ -> let t = ImplicitTypeArgument.mk_targ cx typeparam (Nel.hd op_reason) reason_tapp in Hashtbl.add cache (loc, typeparam.reason, op_reason) t; @@ -731,42 +438,56 @@ module Cache = struct module Eval = struct type id_cache_key = Type.t * Type.defer_use_t + type repos_cache_key = Type.t * Type.defer_use_t * int - let id_cache: (id_cache_key, int) Hashtbl.t = Hashtbl.create 0 - let repos_cache: (repos_cache_key, Type.t) Hashtbl.t = Hashtbl.create 0 + let eval_id_cache : (int, Type.t) Hashtbl.t = Hashtbl.create 0 + + let id_cache : (id_cache_key, int) Hashtbl.t = Hashtbl.create 0 + + let repos_cache : (repos_cache_key, Type.t) Hashtbl.t = Hashtbl.create 0 let id t defer_use = - let cache_key = t, defer_use in - try - Hashtbl.find id_cache cache_key - with _ -> - let i = mk_id () in - Hashtbl.add id_cache cache_key i; - i + match t with + | EvalT (_, d, i) when d = defer_use -> + (match Hashtbl.find_opt eval_id_cache i with + | Some t -> t + | None -> + let i = mk_id () in + Hashtbl.add eval_id_cache i t; + EvalT (t, defer_use, i)) + | _ -> + let cache_key = (t, defer_use) in + let id = + match Hashtbl.find_opt id_cache cache_key with + | Some i -> i + | None -> + let i = mk_id () in + Hashtbl.add id_cache cache_key i; + i + in + EvalT (t, defer_use, id) let find_repos t defer_use id = - let cache_key = t, defer_use, id in - try Some (Hashtbl.find repos_cache cache_key) - with _ -> None + let cache_key = (t, defer_use, id) in + Hashtbl.find_opt repos_cache cache_key let add_repos t defer_use id tvar = - let cache_key = t, defer_use, id in + let cache_key = (t, defer_use, id) in Hashtbl.add repos_cache cache_key tvar end module Fix = struct type cache_key = reason * Type.t - let cache: (cache_key, Type.t) Hashtbl.t = Hashtbl.create 0 + let cache : (cache_key, Type.t) Hashtbl.t = Hashtbl.create 0 let find reason i = - let cache_key = reason, i in - try Some (Hashtbl.find cache cache_key) - with _ -> None + let cache_key = (reason, i) in + Hashtbl.find_opt cache cache_key let add reason i tvar = - let cache_key = reason, i in + let cache_key = (reason, i) in Hashtbl.add cache cache_key tvar end @@ -775,371 +496,96 @@ module Cache = struct Hashtbl.clear Subst.cache; Hashtbl.clear PolyInstantiation.cache; repos_cache := Repos_cache.empty; + Hashtbl.clear Eval.eval_id_cache; Hashtbl.clear Eval.id_cache; Hashtbl.clear Eval.repos_cache; Hashtbl.clear Fix.cache; () - let stats_poly_instantiation () = - Hashtbl.stats PolyInstantiation.cache + let stats_poly_instantiation () = Hashtbl.stats PolyInstantiation.cache (* debug util: please don't dead-code-eliminate *) (* Summarize flow constraints in cache as ctor/reason pairs, and return counts for each group. *) let summarize_flow_constraint () = - let group_counts = FlowSet.fold (fun (l,u) map -> - let key = spf "[%s] %s => [%s] %s" - (string_of_ctor l) (string_of_reason (reason_of_t l)) - (string_of_use_ctor u) (string_of_reason (reason_of_use_t u)) in - match SMap.get key map with - | None -> SMap.add key 0 map - | Some i -> SMap.add key (i+1) map - ) !FlowConstraint.cache SMap.empty in - SMap.elements group_counts |> List.sort - (fun (_,i1) (_,i2) -> Pervasives.compare i1 i2) - -end - -(* Iterate over properties of an object, prioritizing sentinel properties (if - any) and ignoring shadow properties (if any). - - The first argument to f is a boolean which denotes if the property is a - sentinel property. *) -let iter_real_props cx id f = - Context.find_props cx id - |> SMap.filter (fun x _ -> not (is_internal_name x)) - |> SMap.iter (f ~is_sentinel:false) - -(* Helper module for full type resolution as needed to check union and - intersection types. - - Given a type, we walk it to collect the parts of it we wish to resolve. Once - these parts are resolved, they must themselves be walked to collect further - parts to resolve, and so on. In other words, type resolution jobs are created - and processed in rounds, moving closer and closer to full resolution of the - original type. Needless to say, these jobs can be recursive, and so must be - managed carefully for termination and performance. The job management itself - is done in Graph_explorer. (The jobs are naturally modeled as a graph with - dynamically created nodes and edges.) - - Here, we define the function that creates a single round of such jobs. -*) - -module ResolvableTypeJob = struct - - (* A datatype describing type resolution jobs. - - We unfold types as we go, looking for parts that cannot be unfolded - immediately (thus needing resolution to proceed). - - The handling of these parts involve calls to `flow` and `unify`, and is - thus decoupled from the walker itself for clarity. Here, we just create - different jobs for different parts encountered. These jobs are further - processed by bindings_of_jobs. - - Briefly, jobs are created for the following cases. (1) Annotation sources - need to be resolved. (2) So do heads of type applications. (3) Resolved - tvars are recursively unfolded, but we need to remember which resolved - tvars have been unfolded to prevent infinite unfolding. (4) Unresolved - tvars are handled differently based on context: when they are expected - (e.g., when they are part of inferred types), they are logged; when they - are unexpected (e.g., when they are part of annotations), they are - converted to `any`. For more details see bindings_of_jobs. - - *) - type t = - | Binding of Type.tvar - | OpenResolved - | OpenUnresolved of int option * reason * Constraint.ident - - (* log_unresolved is a mode that determines whether to log unresolved tvars: - it is None when resolving annotations, and Some speculation_id when - resolving inferred types. *) - let rec collect_of_types ?log_unresolved cx reason = - List.fold_left (collect_of_type ?log_unresolved cx reason) - - and collect_of_type ?log_unresolved cx reason acc = function - | OpenT tvar -> - let r, id = tvar in - if IMap.mem id acc then acc - else if is_constant_reason r - (* It is important to consider reads of constant property names as fully - resolvable, especially since constant property names are often used to - store literals that serve as tags for disjoint unions. Unfortunately, - today we cannot distinguish such reads from others, so we rely on a - common style convention to recognize constant property names. For now - this hack pays for itself: we do not ask such reads to be annotated - with the corresponding literal types to decide membership in those - disjoint unions. *) - then IMap.add id (Binding tvar) acc - else begin match Context.find_graph cx id with - | Resolved t -> - let acc = IMap.add id OpenResolved acc in - collect_of_type ?log_unresolved cx reason acc t - | Unresolved _ -> - if is_instantiable_reason r || is_instantiable_reason reason - (* Instantiable reasons indicate unresolved tvars that are created - "fresh" for the sole purpose of binding to other types, e.g. as - instantiations of type parameters or as existentials. Constraining - them during speculative matching typically do not cause side effects - across branches, and help make progress. *) - then acc - else IMap.add id (OpenUnresolved (log_unresolved, r, id)) acc - end - - | AnnotT (_, t, _) -> - begin match t with - | OpenT ((_, id) as tvar) -> - if IMap.mem id acc then acc - else IMap.add id (Binding tvar) acc - | _ -> - collect_of_type ?log_unresolved cx reason acc t - end - - | ThisTypeAppT (_, poly_t, _, targs_opt) -> - let targs = match targs_opt with | None -> [] | Some targs -> targs in - begin match poly_t with - | OpenT tvar -> - let _, id = tvar in - if IMap.mem id acc then - collect_of_types ?log_unresolved cx reason acc targs - else begin - let acc = IMap.add id (Binding tvar) acc in - collect_of_types ?log_unresolved cx reason acc targs - end - - | _ -> - let ts = poly_t::targs in - collect_of_types ?log_unresolved cx reason acc ts - end - - | DefT (_, TypeAppT (_, poly_t, targs)) - -> - begin match poly_t with - | OpenT tvar -> - let _, id = tvar in - if IMap.mem id acc then - collect_of_types ?log_unresolved cx reason acc targs - else begin - let acc = IMap.add id (Binding tvar) acc in - collect_of_types ?log_unresolved cx reason acc targs - end - - | _ -> - let ts = poly_t::targs in - collect_of_types ?log_unresolved cx reason acc ts - end - - (* Some common kinds of types are quite overloaded: sometimes they - correspond to types written by the user, but sometimes they also model - internal types, and as such carry other bits of information. For now, we - walk only some parts of these types. These parts are chosen such that - they directly correspond to parts of the surface syntax of types. It is - less clear what it means to resolve other "internal" parts of these - types. In theory, ignoring them *might* lead to bugs, but we've not seen - examples of such bugs yet. Leaving further investigation of this point as - future work. *) - - | DefT (_, ObjT { props_tmap; dict_t; call_t; _ }) -> - let props_tmap = Context.find_props cx props_tmap in - let ts = SMap.fold (fun x p ts -> - (* avoid resolving types of shadow properties *) - if is_internal_name x then ts - else Property.fold_t (fun ts t -> t::ts) ts p - ) props_tmap [] in - let ts = match dict_t with - | None -> ts - | Some { key; value; _ } -> key::value::ts - in - let ts = match call_t with - | None -> ts - | Some id -> (Context.find_call cx id)::ts - in - collect_of_types ?log_unresolved cx reason acc ts - | DefT (_, FunT (_, _, { params; return_t; _ })) -> - let ts = List.fold_left (fun acc (_, t) -> t::acc) [return_t] params in - collect_of_types ?log_unresolved cx reason acc ts - | DefT (_, ArrT (ArrayAT (elemt, tuple_types))) -> - let ts = Option.value ~default:[] tuple_types in - let ts = elemt::ts in - collect_of_types ?log_unresolved cx reason acc ts - | DefT (_, ArrT (TupleAT (elemt, tuple_types))) -> - collect_of_types ?log_unresolved cx reason acc (elemt::tuple_types) - | DefT (_, ArrT (ROArrayAT (elemt))) -> - collect_of_type ?log_unresolved cx reason acc elemt - | DefT (_, ArrT EmptyAT) -> acc - | DefT (_, InstanceT (static, super, _, - { class_id; type_args; own_props; proto_props; inst_call_t; _ })) -> - let ts = if class_id = 0 then [] else [super; static] in - let ts = List.fold_left (fun ts (_, _, t, _) -> t::ts) ts type_args in - let props_tmap = SMap.union - (Context.find_props cx own_props) - (Context.find_props cx proto_props) - in - let ts = SMap.fold (fun _ p ts -> - Property.fold_t (fun ts t -> t::ts) ts p - ) props_tmap ts in - let ts = match inst_call_t with - | None -> ts - | Some id -> (Context.find_call cx id)::ts - in - collect_of_types ?log_unresolved cx reason acc ts - | DefT (_, PolyT (_, t, _)) -> - collect_of_type ?log_unresolved cx reason acc t - | BoundT _ -> - acc - - | EvalT (_, TypeDestructorT _, id) -> - (match IMap.get id (Context.evaluated cx) with - | Some (OpenT ((_, id) as tvar)) -> - IMap.add id (Binding tvar) acc - | _ -> acc) - - (* TODO: The following kinds of types are not walked out of laziness. It's - not immediately clear what we'd gain (or lose) by walking them. *) - - | EvalT _ - | InternalT (ChoiceKitT (_, _)) - | TypeDestructorTriggerT _ - | ModuleT (_, _, _) - | InternalT (ExtendsT _) - -> - acc - - (* The following cases exactly follow Type_visitor (i.e., they do the - standard walk). TODO: Rewriting this walker as a subclass of Type_visitor - would be quite nice (as long as we confirm that the resulting - virtualization of calls to this function doesn't lead to perf - degradation: this function is expected to be quite hot). *) - - | DefT (_, OptionalT t) | DefT (_, MaybeT t) -> - collect_of_type ?log_unresolved cx reason acc t - | DefT (_, UnionT rep) -> - let ts = UnionRep.members rep in - collect_of_types ?log_unresolved cx reason acc ts - | DefT (_, IntersectionT rep) -> - let ts = InterRep.members rep in - collect_of_types ?log_unresolved cx reason acc ts - - | OpaqueT (_, {underlying_t; super_t; _}) -> - let acc = Option.fold underlying_t ~init:acc ~f:(collect_of_type ?log_unresolved cx reason) in - let acc = Option.fold super_t ~init:acc ~f:(collect_of_type ?log_unresolved cx reason) in - acc - - | AnyWithUpperBoundT t - | AnyWithLowerBoundT t - | ExactT (_, t) - | DefT (_, TypeT (_, t)) - | DefT (_, ClassT t) - | ThisClassT (_, t) - -> - collect_of_type ?log_unresolved cx reason acc t - - | KeysT (_, t) -> - collect_of_type ?log_unresolved cx reason acc t - - | ShapeT (t) -> - collect_of_type ?log_unresolved cx reason acc t - - | MatchingPropT (_, _, t) -> - collect_of_type ?log_unresolved cx reason acc t - - | DefT (_, IdxWrapper t) -> - collect_of_type ?log_unresolved cx reason acc t - - | ReposT (_, t) - | InternalT (ReposUpperT (_, t)) -> - collect_of_type ?log_unresolved cx reason acc t - - | InternalT (OptionalChainVoidT _) -> acc - - | DefT (_, NumT _) - | DefT (_, StrT _) - | DefT (_, BoolT _) - | DefT (_, VoidT) - | DefT (_, NullT) - | DefT (_, EmptyT) - | DefT (_, MixedT _) - | DefT (_, SingletonBoolT _) - | DefT (_, SingletonNumT _) - | DefT (_, SingletonStrT _) - | DefT (_, AnyT) - | DefT (_, AnyObjT) - | DefT (_, AnyFunT) - | DefT (_, CharSetT _) - -> acc - - | MergedT (_, uses) -> - List.fold_left (collect_of_use ?log_unresolved cx reason) acc uses - - | FunProtoBindT _ - | FunProtoCallT _ - | FunProtoApplyT _ - | FunProtoT _ - | NullProtoT _ - | ObjProtoT _ - | CustomFunT (_, _) - - | ExistsT _ - | OpenPredT _ - -> - acc - - (* TODO: Support for use types is currently sketchy. Full resolution of use - types are only needed for choice-making on intersections. We care about - calls in particular because one of the biggest uses of intersections is - function overloading. More uses will be added over time. *) - and collect_of_use ?log_unresolved cx reason acc = function - | UseT (_, t) -> - collect_of_type ?log_unresolved cx reason acc t - | CallT (_, _, fct) -> - let arg_types = - List.map (function Arg t | SpreadArg t -> t) fct.call_args_tlist in - collect_of_types ?log_unresolved cx reason acc (arg_types @ [fct.call_tout]) - | GetPropT (_, _, _, t_out) -> - collect_of_type ?log_unresolved cx reason acc t_out - | _ -> acc - + let group_counts = + FlowSet.fold + (fun (l, u) map -> + let key = + spf + "[%s] %s => [%s] %s" + (string_of_ctor l) + (string_of_reason (reason_of_t l)) + (string_of_use_ctor u) + (string_of_reason (reason_of_use_t u)) + in + match SMap.get key map with + | None -> SMap.add key 0 map + | Some i -> SMap.add key (i + 1) map) + !FlowConstraint.cache + SMap.empty + in + SMap.elements group_counts |> List.sort (fun (_, i1) (_, i2) -> Pervasives.compare i1 i2) end (*********************************************************************) -exception SpeculativeError of FlowError.error_message +exception SpeculativeError of Error_message.t let add_output cx ?trace msg = - if Speculation.speculating () - then - if (FlowError.is_lint_error msg) - then ignore @@ Speculation.(defer_action cx (Action.Error msg)) - else begin - if Context.is_verbose cx then - prerr_endlinef "\nspeculative_error: %s" (Debug_js.dump_flow_error cx msg); - raise (SpeculativeError msg) - end - else begin - if Context.is_verbose cx then - prerr_endlinef "\nadd_output: %s" (Debug_js.dump_flow_error cx msg); - - let trace_reasons = match trace with + let trace_reasons = + match trace with | None -> [] | Some trace -> (* format a trace into list of (reason, desc) pairs used - downstream for obscure reasons, and then to messages *) + downstream for obscure reasons, and then to messages *) let max_trace_depth = Context.max_trace_depth cx in - if max_trace_depth = 0 then [] else + if max_trace_depth = 0 then + [] + else Trace.reasons_of_trace ~level:max_trace_depth trace - in + in + let is_enabled = + match Error_message.kind_of_msg msg with + | Errors.LintError lint_kind -> + begin + match Error_message.aloc_of_msg msg with + | Some loc -> + ALoc.to_loc_with_tables (Context.aloc_tables cx) loc + |> Error_suppressions.get_lint_settings (Context.severity_cover cx) + |> Option.value_map ~default:true ~f:(fun lint_settings -> + LintSettings.is_explicit lint_kind lint_settings + || LintSettings.get_value lint_kind lint_settings <> Severity.Off) + | _ -> true + end + | _ -> true + in + (* If the lint error isn't enabled at this location and isn't explicitly suppressed, just don't + even add it *) + if not is_enabled then + () + else if Speculation.speculating () then + if Error_message.is_lint_error msg then + ignore @@ Speculation.(defer_action cx (Action.Error msg)) + else ( + if Context.is_verbose cx then + prerr_endlinef "\nspeculative_error: %s" (Debug_js.dump_error_message cx msg); + raise (SpeculativeError msg) + ) + else ( + if Context.is_verbose cx then + prerr_endlinef "\nadd_output: %s" (Debug_js.dump_error_message cx msg); let error = FlowError.error_of_msg ~trace_reasons ~source_file:(Context.file cx) msg in - (* catch no-loc errors early, before they get into error map *) - if Loc.source (Errors.loc_of_error error) = None then - assert_false ( - spf "add_output: no source for error: %s" - (Debug_js.dump_flow_error cx msg)); + if + Flow_error.loc_of_error error + |> Option.value_map ~default:false ~f:(fun loc -> ALoc.source loc = None) + then + assert_false (spf "add_output: no source for error: %s" (Debug_js.dump_error_message cx msg)); Context.add_error cx error - end + ) (********************) (* subtype relation *) @@ -1155,19 +601,18 @@ let add_output cx ?trace msg = When check is called with a trace whose depth exceeds a constant limit, we throw a LimitExceeded exception. *) + module RecursionCheck : sig exception LimitExceeded of Trace.t - val check: Trace.t -> unit + val check : Context.t -> Trace.t -> unit end = struct exception LimitExceeded of Trace.t - let limit = 10000 (* check trace depth as a proxy for recursion depth and throw when limit is exceeded *) - let check trace = - if Trace.trace_depth trace >= limit - then raise (LimitExceeded trace) + let check cx trace = + if Trace.trace_depth trace >= Context.recursion_limit cx then raise (LimitExceeded trace) end (* The main problem with constant folding is infinite recursion. Consider a loop @@ -1177,314 +622,648 @@ end * doing constant folding. * * One solution is for constant-folding-location to keep count of how many times - * we have seen a reason. Then, when we've seen it multiple times, we can decide + * we have seen a reason at a given position in the array. + * Then, when we've seen it multiple times in the same place, we can decide * to stop doing constant folding. *) + +module ConstFoldMap = MyMap.Make (struct + type t = reason * int + + let compare = Pervasives.compare +end) + module ConstFoldExpansion : sig - val guard: int -> reason -> (int -> 't) -> 't + val guard : int -> reason * int -> (int -> 't) -> 't end = struct - let rmaps: int ReasonMap.t IMap.t ref = ref IMap.empty - - let get_rmap id = Option.value ~default:ReasonMap.empty (IMap.get id !rmaps) - - let increment reason rmap = - match ReasonMap.get reason rmap with - | None -> 0, ReasonMap.add reason 1 rmap - | Some count -> count, ReasonMap.add reason (count + 1) rmap - - let decrement reason rmap = - match ReasonMap.get reason rmap with - | Some count -> - if count > 1 - then ReasonMap.add reason (count - 1) rmap - else ReasonMap.remove reason rmap - | None -> rmap - - let push id reason = - let rmap = get_rmap id in - let old_value, new_reason_map = increment reason rmap in - rmaps := IMap.add id new_reason_map !rmaps; - old_value - - let pop id reason = - let rmap = - get_rmap id - |> decrement reason in - if ReasonMap.is_empty rmap - then rmaps := IMap.remove id !rmaps - else rmaps := IMap.add id rmap !rmaps - - let guard id reason f = - let count = push id reason in - let ret = f count in - pop id reason; - ret + let rmaps : int ConstFoldMap.t IMap.t ref = ref IMap.empty + + let get_rmap id = IMap.get id !rmaps |> Option.value ~default:ConstFoldMap.empty + + let increment reason_with_pos rmap = + match ConstFoldMap.get reason_with_pos rmap with + | None -> (0, ConstFoldMap.add reason_with_pos 1 rmap) + | Some count -> (count, ConstFoldMap.add reason_with_pos (count + 1) rmap) + + let guard id reason_with_pos f = + let (count, rmap) = get_rmap id |> increment reason_with_pos in + rmaps := IMap.add id rmap !rmaps; + f count end exception Not_expect_bound of string (* Sometimes we don't expect to see type parameters, e.g. when they should have been substituted away. *) -let not_expect_bound t = match t with - | BoundT _ -> - raise (Not_expect_bound (spf "Did not expect %s" (string_of_ctor t))) +let not_expect_bound t = + match t with + | BoundT _ -> raise (Not_expect_bound (spf "Did not expect %s" (string_of_ctor t))) | _ -> () -let not_expect_bound_use t = - lift_to_use not_expect_bound t +let not_expect_bound_use t = lift_to_use not_expect_bound t (* Sometimes we expect to see only proper def types. Proper def types make sense as use types. *) let expect_proper_def t = - if not (is_proper_def t) then - assert_false (spf "Did not expect %s" (string_of_ctor t)) + if not (is_proper_def t) then assert_false (spf "Did not expect %s" (string_of_ctor t)) -let expect_proper_def_use t = - lift_to_use expect_proper_def t +let expect_proper_def_use t = lift_to_use expect_proper_def t let check_nonstrict_import cx trace is_strict imported_is_strict reason = - if is_strict && (not imported_is_strict) then - let loc = Reason.aloc_of_reason reason |> ALoc.to_loc in - let message = FlowError.ENonstrictImport loc in + if is_strict && not imported_is_strict then + let loc = Reason.aloc_of_reason reason in + let message = Error_message.ENonstrictImport loc in add_output cx ~trace message -let print_if_verbose_lazy cx trace - ?(delim = "") - ?(indent = 0) - (lines: string Lazy.t list) = - match Context.verbose cx with - | Some { Verbose.indent = num_spaces; _ } -> - let indent = indent + Trace.trace_depth trace - 1 in - let prefix = String.make (indent * num_spaces) ' ' in - let pid = Context.pid_prefix cx in - let add_prefix line = spf "\n%s%s%s" prefix pid (Lazy.force line) in - let lines = List.map add_prefix lines in - prerr_endline (String.concat delim lines) - | None -> - () - -let print_if_verbose cx trace ?(delim = "") ?(indent = 0) (lines: string list) = - match Context.verbose cx with - | Some _ -> - let lines = List.map (fun line -> lazy line) lines in - print_if_verbose_lazy cx trace ~delim ~indent lines - | None -> - () - -let print_types_if_verbose cx trace - ?(note: string option) - ((l: Type.t), (u: Type.use_t)) = - match Context.verbose cx with - | Some { Verbose.depth; _ } -> - let delim = match note with Some x -> spf " ~> %s" x | None -> " ~>" in - print_if_verbose cx trace ~delim [ - Debug_js.dump_t ~depth cx l; - Debug_js.dump_use_t ~depth cx u; - ] - | None -> - () - let subst = Subst.subst -(********************** start of slab **********************************) - -(** NOTE: Do not call this function directly. Instead, call the wrapper - functions `rec_flow`, `join_flow`, or `flow_opt` (described below) inside - this module, and the function `flow` outside this module. **) -let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = - if ground_subtype (l, u) then - print_types_if_verbose cx trace (l, u) - else if Cache.FlowConstraint.get cx (l, u) then - print_types_if_verbose cx trace ~note:"(cached)" (l, u) - else ( - print_types_if_verbose cx trace (l, u); - - (* limit recursion depth *) - RecursionCheck.check trace; - - (* Expect that l is a def type. On the other hand, u may be a use type or a - def type: the latter typically when we have annotations. *) +let check_canceled = + let count = ref 0 in + fun () -> + let n = (!count + 1) mod 128 in + count := n; + if n = 0 then WorkerCancel.check_should_exit () + +let error_message_kind_of_lower = function + | DefT (_, _, NullT) -> Some Error_message.Possibly_null + | DefT (_, _, VoidT) -> Some Error_message.Possibly_void + | MaybeT _ -> Some Error_message.Possibly_null_or_void + | IntersectionT _ + | _ -> + None - (* Type parameters should always be substituted out, and as such they should - never appear "exposed" in flows. (They can still appear bound inside - polymorphic definitions.) *) - not_expect_bound l; - not_expect_bound_use u; - (* Types that are classified as def types but don't make sense as use types - should not appear as use types. *) - expect_proper_def_use u; +let error_message_kind_of_upper = function + | GetPropT (_, _, Named (r, name), _) -> + Error_message.IncompatibleGetPropT (aloc_of_reason r, Some name) + | GetPropT (_, _, Computed t, _) -> Error_message.IncompatibleGetPropT (loc_of_t t, None) + | GetPrivatePropT (_, _, _, _, _, _) -> Error_message.IncompatibleGetPrivatePropT + | SetPropT (_, _, Named (r, name), _, _, _, _) -> + Error_message.IncompatibleSetPropT (aloc_of_reason r, Some name) + | SetPropT (_, _, Computed t, _, _, _, _) -> Error_message.IncompatibleSetPropT (loc_of_t t, None) + | MatchPropT (_, _, Named (r, name), _) -> + Error_message.IncompatibleMatchPropT (aloc_of_reason r, Some name) + | MatchPropT (_, _, Computed t, _) -> Error_message.IncompatibleMatchPropT (loc_of_t t, None) + | SetPrivatePropT (_, _, _, _, _, _, _, _) -> Error_message.IncompatibleSetPrivatePropT + | MethodT (_, _, _, Named (r, name), _, _) -> + Error_message.IncompatibleMethodT (aloc_of_reason r, Some name) + | MethodT (_, _, _, Computed t, _, _) -> Error_message.IncompatibleMethodT (loc_of_t t, None) + | CallT _ -> Error_message.IncompatibleCallT + | ConstructorT _ -> Error_message.IncompatibleConstructorT + | GetElemT (_, _, t, _) -> Error_message.IncompatibleGetElemT (loc_of_t t) + | SetElemT (_, _, t, _, _, _) -> Error_message.IncompatibleSetElemT (loc_of_t t) + | CallElemT (_, _, t, _) -> Error_message.IncompatibleCallElemT (loc_of_t t) + | ElemT (_, _, DefT (_, _, ArrT _), _) -> Error_message.IncompatibleElemTOfArrT + | ObjAssignFromT (_, _, _, _, ObjSpreadAssign) -> Error_message.IncompatibleObjAssignFromTSpread + | ObjAssignFromT _ -> Error_message.IncompatibleObjAssignFromT + | ObjRestT _ -> Error_message.IncompatibleObjRestT + | ObjSealT _ -> Error_message.IncompatibleObjSealT + | ArrRestT _ -> Error_message.IncompatibleArrRestT + | SuperT _ -> Error_message.IncompatibleSuperT + | MixinT _ -> Error_message.IncompatibleMixinT + | SpecializeT _ -> Error_message.IncompatibleSpecializeT + | ConcretizeTypeAppsT _ -> Error_message.IncompatibleSpecializeT + | ThisSpecializeT _ -> Error_message.IncompatibleThisSpecializeT + | VarianceCheckT _ -> Error_message.IncompatibleVarianceCheckT + | GetKeysT _ -> Error_message.IncompatibleGetKeysT + | HasOwnPropT (_, r, Literal (_, name)) -> + Error_message.IncompatibleHasOwnPropT (aloc_of_reason r, Some name) + | HasOwnPropT (_, r, _) -> Error_message.IncompatibleHasOwnPropT (aloc_of_reason r, None) + | GetValuesT _ -> Error_message.IncompatibleGetValuesT + | UnaryMinusT _ -> Error_message.IncompatibleUnaryMinusT + | MapTypeT (_, _, (ObjectMap _ | ObjectMapi _), _) -> Error_message.IncompatibleMapTypeTObject + | TypeAppVarianceCheckT _ -> Error_message.IncompatibleTypeAppVarianceCheckT + | GetStaticsT _ -> Error_message.IncompatibleGetStaticsT + | use_t -> Error_message.IncompatibleUnclassified (string_of_use_ctor use_t) + +let use_op_of_lookup_action = function + | ReadProp { use_op; _ } -> Some use_op + | WriteProp { use_op; _ } -> Some use_op + | LookupProp (use_op, _) -> Some use_op + | SuperProp (use_op, _) -> Some use_op + | MatchProp (use_op, _) -> Some use_op - (* Before processing the flow action, check that it is not deferred. If it - is, then when speculation is complete, the action either fires or is - discarded depending on whether the case that created the action is - selected or not. *) - if Speculation.(defer_action cx (Action.Flow (l, u))) then - print_if_verbose cx trace ~indent:1 ["deferred during speculation"] +(* some types need to be resolved before proceeding further *) +let needs_resolution = function + | OpenT _ + | UnionT _ + | OptionalT _ + | MaybeT _ + | AnnotT _ -> + true + | _ -> false - else match (l,u) with +let is_object_prototype_method = function + | "isPrototypeOf" + | "hasOwnProperty" + | "propertyIsEnumerable" + | "toLocaleString" + | "toString" + | "valueOf" -> + true + | _ -> false - (********) - (* eval *) - (********) +(* This must list all of the properties on Function.prototype. *) +let is_function_prototype = function + | "apply" + | "bind" + | "call" + | "arguments" + | "caller" + | "length" + | "name" -> + true + | x -> is_object_prototype_method x - | EvalT (t, TypeDestructorT (use_op', reason, d), id), _ -> - let _, result = mk_type_destructor cx ~trace use_op' reason t d id in - rec_flow cx trace (result, u) +(* neither object prototype methods nor callable signatures should be + * implied by an object indexer type *) +let is_dictionary_exempt = function + | x when is_object_prototype_method x -> true + | _ -> false - | _, UseT (use_op, EvalT (t, TypeDestructorT (use_op', reason, d), id)) -> - let slingshot, result = mk_type_destructor cx ~trace use_op' reason t d id in - if slingshot - then rec_flow cx trace (result, ReposUseT (reason, false, use_op, l)) - else rec_flow cx trace (l, UseT (use_op, result)) +(* common case checking a function as an object *) +let quick_error_fun_as_obj cx trace ~use_op reason statics reason_o props = + let statics_own_props = + match statics with + | DefT (_, _, ObjT { props_tmap; _ }) -> Some (Context.find_props cx props_tmap) + | AnyT _ + | DefT (_, _, MixedT _) -> + Some SMap.empty + | _ -> None + in + match statics_own_props with + | Some statics_own_props -> + let props_not_found = + SMap.filter + (fun x p -> + let optional = + match p with + | Field (_, OptionalT _, _) -> true + | _ -> false + in + not (optional || is_function_prototype x || SMap.mem x statics_own_props)) + props + in + SMap.iter + (fun x _ -> + let use_op = + Frame (PropertyCompatibility { prop = Some x; lower = reason; upper = reason_o }, use_op) + in + let reason_prop = update_desc_reason (fun desc -> RPropertyOf (x, desc)) reason_o in + let err = Error_message.EPropNotFound (Some x, (reason_prop, reason), use_op) in + add_output cx ~trace err) + props_not_found; + not (SMap.is_empty props_not_found) + | None -> false - | EvalT (t, DestructuringT (reason, s), i), _ -> - rec_flow cx trace (eval_selector cx ~trace reason t s i, u) - - (** NOTE: the rule with EvalT (_, DestructuringT _, _) as upper bound is - moved below the OpenT rules, so that we can take advantage of the - caching inherent in those rules (in particular, when OpenT is a lower - bound). This caching seems necessary to avoid non-termination. There - could be other, better ways of achieving the same effect. **) - - (******************) - (* process X ~> Y *) - (******************) - - | (OpenT(_, tvar1), UseT (use_op, OpenT(_, tvar2))) -> - let id1, constraints1 = Context.find_constraints cx tvar1 in - let id2, constraints2 = Context.find_constraints cx tvar2 in - - (match constraints1, constraints2 with - | Unresolved bounds1, Unresolved bounds2 -> - if not_linked (id1, bounds1) (id2, bounds2) then ( - add_upper_edges cx trace (id1, bounds1) (id2, bounds2); - add_lower_edges cx trace (id1, bounds1) (id2, bounds2); - flows_across cx trace ~use_op bounds1.lower bounds2.upper; - ); - - | Unresolved bounds1, Resolved t2 -> - edges_and_flows_to_t cx trace (id1, bounds1) (UseT (use_op, t2)) +(* NOTE: The following function looks similar to TypeUtil.quick_subtype, but is in fact more + complicated: it avoids deep structural checks, admits `any`, etc. It might be worth it to + simplify this function later. *) +let ground_subtype = function + (* tvars are not considered ground, so they're not part of this relation *) + | (OpenT _, _) + | (_, UseT (_, OpenT _)) -> + false + | (UnionT _, _) -> false + | (DefT (_, _, NumT _), UseT (_, DefT (_, _, NumT _))) + | (DefT (_, _, StrT _), UseT (_, DefT (_, _, StrT _))) + | (DefT (_, _, BoolT _), UseT (_, DefT (_, _, BoolT _))) + | (DefT (_, _, NullT), UseT (_, DefT (_, _, NullT))) + | (DefT (_, _, VoidT), UseT (_, DefT (_, _, VoidT))) -> + true + | (DefT (_, _, NullT), UseT (_, DefT (_, _, MixedT (Mixed_non_maybe | Mixed_non_null)))) + | (DefT (_, _, VoidT), UseT (_, DefT (_, _, MixedT (Mixed_non_maybe | Mixed_non_void)))) -> + false + | (_, UseT (_, DefT (_, _, MixedT _))) -> true + (* we handle the any propagation check later *) + | (AnyT _, _) -> false + | (_, UseT (_, AnyT _)) -> false + (* opt: avoid builtin lookups *) + | (ObjProtoT _, UseT (_, ObjProtoT _)) + | (FunProtoT _, UseT (_, FunProtoT _)) + | (FunProtoT _, UseT (_, ObjProtoT _)) + | (DefT (_, _, ObjT { proto_t = ObjProtoT _; _ }), UseT (_, ObjProtoT _)) + | (DefT (_, _, ObjT { proto_t = FunProtoT _; _ }), UseT (_, FunProtoT _)) + | (DefT (_, _, ObjT { proto_t = FunProtoT _; _ }), UseT (_, ObjProtoT _)) -> + true + | _ -> false - | Resolved t1, Unresolved bounds2 -> - edges_and_flows_from_t cx trace ~use_op t1 (id2, bounds2) +let numeric = function + | DefT (_, _, NumT _) -> true + | DefT (_, _, SingletonNumT _) -> true + | _ -> false - | Resolved t1, Resolved t2 -> - rec_flow cx trace (t1, UseT (use_op, t2)) - ); +let dateiform = function + | DefT (reason, _, InstanceT _) -> DescFormat.name_of_instance_reason reason = "Date" + | _ -> false - (******************) - (* process Y ~> U *) - (******************) +let numberesque = function + | x -> numeric x || dateiform x - | (OpenT(r, tvar), t2) -> - let t2 = match desc_of_reason r with - | RTypeParam (_, (_, loc), _) -> - mod_use_op_of_use_t (fun op -> Frame (ImplicitTypeParam loc, op)) t2 - | _ -> t2 - in +let function_like = function + | DefT (_, _, ClassT _) + | DefT (_, _, FunT _) + | CustomFunT _ + | FunProtoApplyT _ + | FunProtoBindT _ + | FunProtoCallT _ -> + true + | _ -> false - let id1, constraints1 = Context.find_constraints cx tvar in - (match constraints1 with - | Unresolved bounds1 -> - edges_and_flows_to_t cx trace (id1, bounds1) t2 +let function_use = function + | UseT (_, DefT (_, _, FunT _)) -> true + | _ -> false - | Resolved t1 -> - rec_flow cx trace (t1, t2) - ); +let object_like = function + | DefT (_, _, (ObjT _ | InstanceT _)) + | AnyT _ -> + true + | t -> function_like t - (******************) - (* process L ~> X *) - (******************) +let object_use = function + | UseT (_, DefT (_, _, ObjT _)) -> true + | _ -> false - | (t1, UseT (use_op, OpenT(_, tvar))) -> - let id2, constraints2 = Context.find_constraints cx tvar in - (match constraints2 with - | Unresolved bounds2 -> - edges_and_flows_from_t cx trace ~use_op t1 (id2, bounds2) +let object_like_op = function + | SetPropT _ + | GetPropT _ + | TestPropT _ + | MethodT _ + | LookupT _ + | MatchPropT _ + | GetProtoT _ + | SetProtoT _ + | SuperT _ + | GetKeysT _ + | HasOwnPropT _ + | GetValuesT _ + | ObjAssignToT _ + | ObjAssignFromT _ + | ObjRestT _ + | SetElemT _ + | GetElemT _ + | UseT (_, AnyT _) -> + true + | _ -> false - | Resolved t2 -> - rec_flow cx trace (t1, UseT (use_op, t2)) - ); +let function_like_op = function + | CallT _ + | ConstructorT _ + | UseT (_, AnyT _) -> + true + | t -> object_like_op t - (*****************) - (* any with uses *) - (*****************) +let equatable = function + | (DefT (_, _, NumT _), DefT (_, _, NumT _)) + | (DefT (_, _, SingletonNumT _), DefT (_, _, SingletonNumT _)) + | (DefT (_, _, SingletonNumT _), DefT (_, _, NumT _)) + | (DefT (_, _, NumT _), DefT (_, _, SingletonNumT _)) + | (DefT (_, _, StrT _), DefT (_, _, StrT _)) + | (DefT (_, _, StrT _), DefT (_, _, SingletonStrT _)) + | (DefT (_, _, SingletonStrT _), DefT (_, _, StrT _)) + | (DefT (_, _, SingletonStrT _), DefT (_, _, SingletonStrT _)) + | (DefT (_, _, BoolT _), DefT (_, _, BoolT _)) + | (DefT (_, _, BoolT _), DefT (_, _, SingletonBoolT _)) + | (DefT (_, _, SingletonBoolT _), DefT (_, _, BoolT _)) + | (DefT (_, _, SingletonBoolT _), DefT (_, _, SingletonBoolT _)) + | (DefT (_, _, EmptyT _), _) + | (_, DefT (_, _, EmptyT _)) + | (_, DefT (_, _, MixedT _)) + | (DefT (_, _, MixedT _), _) + | (AnyT _, _) + | (_, AnyT _) + | (DefT (_, _, VoidT), _) + | (_, DefT (_, _, VoidT)) + | (DefT (_, _, NullT), _) + | (_, DefT (_, _, NullT)) -> + true + | ( DefT + (_, _, (NumT _ | StrT _ | BoolT _ | SingletonNumT _ | SingletonStrT _ | SingletonBoolT _)), + _ ) + | ( _, + DefT + (_, _, (NumT _ | StrT _ | BoolT _ | SingletonNumT _ | SingletonStrT _ | SingletonBoolT _)) + ) -> + false + | _ -> true - | _, UseT (_, MergedT (_, uses)) -> - List.iter (fun u -> rec_flow cx trace (l, u)) uses +(* Creates a union from a list of types. Since unions require a minimum of two + types this function will return an empty type when there are no types in the + list, or the list head when there is one type in the list. *) +let union_of_ts reason ts = + match ts with + (* If we have no types then this is an error. *) + | [] -> DefT (reason, bogus_trust (), EmptyT Bottom) + (* If we only have one type then only that should be used. *) + | [t0] -> t0 + (* If we have more than one type then we make a union type. *) + | t0 :: t1 :: ts -> UnionT (reason, UnionRep.make t0 t1 ts) + (* generics *) - | MergedT _, ReposUseT (reason, use_desc, use_op, l) -> - let loc = aloc_of_reason reason in - let desc = if use_desc then Some (desc_of_reason reason) else None in - let u = reposition cx ~trace (loc |> ALoc.to_loc) ?desc l in - rec_flow cx trace (l, UseT (use_op, u)) +(** Harness for testing parameterized types. Given a test function and a list + of type params, generate a bunch of argument maps and invoke the test + function on each, using Reason.TestID to keep the reasons generated by + each test disjoint from the others. - | MergedT (reason, _), _ -> - rec_flow cx trace (EmptyT.why reason, u) + In the general case we simply test every combination of p = bot, p = bound + for each param p. For many parameter lists this will be more than strictly + necessary, but determining the minimal set of tests for interrelated params + is subtle. For now, our only refinement is to isolate all params with an + upper bound of MixedT (making them trivially unrelated to each other) and + generate a smaller set of argument maps for these which only cover a) bot, + bound for each param, and b) every pairwise bot/bound combination. These + maps are then used as seeds for powersets over the remaining params. - (****************) - (* eval, contd. *) - (****************) + NOTE: Since the same AST is traversed by each generated test, the order + of generated tests is important for the proper functioning of hooks that + record information on the side as ASTs are traversed. Adopting the + convention that the last traversal "wins" (which would happen, e.g, when + the recorded information at a location is replaced every time that + location is encountered), we want the last generated test to always be + the one where all type parameters are substituted by their bounds + (instead of Bottom), so that the recorded information is the same as if + all type parameters were indeed erased and replaced by their bounds. + *) +and generate_tests : 'a. Context.t -> Type.typeparam list -> (Type.t SMap.t -> 'a) -> 'a = + (* make bot type for given param *) + let mk_bot _ { name; reason; _ } = + let desc = RPolyTest (name, RIncompatibleInstantiation name) in + DefT (replace_desc_reason desc reason, bogus_trust (), EmptyT Zeroed) + in + (* make bound type for given param and argument map *) + let mk_bound cx prev_args { bound; name; reason = param_reason; _ } = + (* For the top bound, we match the reason locations that appear in the + * respective bot bound: + * - 'loc' is the location of the type parameter (may be repositioned later) + * - 'def_loc' is the location of the type parameter, and + * - 'annot_loc_opt' is the location of the bound (if present). + *) + mod_reason_of_t + (fun bound_reason -> + let param_loc = Reason.aloc_of_reason param_reason in + let annot_loc = annot_aloc_of_reason bound_reason in + let desc = desc_of_reason ~unwrap:false bound_reason in + repos_reason param_loc ?annot_loc (mk_reason (RPolyTest (name, desc)) param_loc)) + (subst cx prev_args bound) + in + (* make argument map by folding mk_arg over param list *) + let mk_argmap mk_arg = + List.fold_left (fun acc ({ name; _ } as p) -> SMap.add name (mk_arg acc p) acc) SMap.empty + in + (* for each p, a map with p bot and others bound + map with all bound *) + let linear cx = function + | [] -> [SMap.empty] + | params -> + let all = mk_argmap (mk_bound cx) params in + let each = + Core_list.map ~f:(fun ({ name; _ } as p) -> SMap.add name (mk_bot SMap.empty p) all) params + in + List.rev (all :: each) + in + (* a map for every combo of bot/bound params *) + let powerset cx params arg_map = + let none = mk_argmap mk_bot params in + List.fold_left + (fun maps ({ name; _ } as p) -> + let bots = Core_list.map ~f:(SMap.add name (SMap.find_unsafe name none)) maps in + let bounds = Core_list.map ~f:(fun m -> SMap.add name (mk_bound cx m p) m) maps in + bots @ bounds) + [arg_map] + params + in + (* main - run f over a collection of arg maps generated for params *) + fun cx params f -> + if params = [] then + f SMap.empty + else + let is_free = function + | { bound = DefT (_, _, MixedT _); _ } -> true + | _ -> false + in + let (free_params, dep_params) = List.partition is_free params in + let free_sets = linear cx free_params in + let powersets = Core_list.map ~f:(powerset cx dep_params) free_sets in + let (hd_map, tl_maps) = + match List.flatten powersets with + | x :: xs -> (x, xs) + | [] -> assert false + in + Core_list.fold_left ~f:(Fn.const (TestID.run f)) ~init:(f hd_map) tl_maps - | _, UseT (use_op, EvalT (t, DestructuringT (reason, s), i)) -> - rec_flow cx trace (l, UseT (use_op, eval_selector cx ~trace reason t s i)) +let inherited_method x = x <> "constructor" - (***************************) - (* type destructor trigger *) - (***************************) +let match_this_binding map f = + match SMap.find_unsafe "this" map with + | ReposT (_, t) -> f t + | _ -> failwith "not a this binding" - (* For evaluating type destructors we add a trigger, TypeDestructorTriggerT, - * to both sides of a type. When TypeDestructorTriggerT sees a new upper or - * lower bound we destruct that bound and flow the result in the same - * direction to some tout type. *) +let poly_minimum_arity = + let f n typeparam = + if typeparam.default = None then + n + 1 + else + n + in + Nel.fold_left f 0 - (* Don't let two TypeDestructorTriggerTs reach each other or else we quickly - * run into non-termination scenarios. *) - | TypeDestructorTriggerT _, UseT (_, TypeDestructorTriggerT _) -> () +(********************** start of slab **********************************) +module M__flow + (ReactJs : React_kit.REACT) + (AssertGround : Flow_common.ASSERT_GROUND) + (TrustChecking : Flow_common.TRUST_CHECKING) + (CustomFunKit : Custom_fun_kit.CUSTOM_FUN) + (ObjectKit : Object_kit.OBJECT) = +struct + (** NOTE: Do not call this function directly. Instead, call the wrapper + functions `rec_flow`, `join_flow`, or `flow_opt` (described below) inside + this module, and the function `flow` outside this module. **) + let rec __flow cx ((l : Type.t), (u : Type.use_t)) trace = + if ground_subtype (l, u) then ( + if Context.trust_tracking cx then TrustChecking.trust_flow_to_use_t cx trace l u; + print_types_if_verbose cx trace (l, u) + ) else if Cache.FlowConstraint.get cx (l, u) then + print_types_if_verbose cx trace ~note:"(cached)" (l, u) + else ( + print_types_if_verbose cx trace (l, u); + if Context.trust_tracking cx then TrustChecking.trust_flow_to_use_t cx trace l u; + + (* limit recursion depth *) + RecursionCheck.check cx trace; + + (* Check if this worker has been told to cancel *) + check_canceled (); + + (* Expect that l is a def type. On the other hand, u may be a use type or a + def type: the latter typically when we have annotations. *) - | l, UseT (_, TypeDestructorTriggerT (use_op', reason, repos, d, tout)) -> - let l = match repos with - | None -> l - | Some (reason, use_desc) -> reposition_reason cx ~trace reason ~use_desc l - in - eval_destructor cx ~trace use_op' reason l d tout - - | TypeDestructorTriggerT (use_op', reason, _, d, tout), UseT (use_op, AnnotT (r, t, use_desc)) -> - let tout' = Tvar.mk_where cx reason (fun tout' -> - let repos = Some (r, use_desc) in - rec_flow cx trace (t, UseT (use_op, TypeDestructorTriggerT (use_op', reason, repos, d, tout'))) - ) in - rec_flow cx trace (tout', ReposUseT (reason, false, use_op, tout)) - - | TypeDestructorTriggerT (use_op', reason, _, d, tout), UseT (use_op, u) -> - (* With the same "slingshot" trick used by AnnotT, hold the lower bound - * at bay until result itself gets concretized, and then flow the lower - * bound to that concrete type. *) - let t = Tvar.mk_where cx reason (fun t -> eval_destructor cx ~trace use_op' reason u d t) in - let use_desc = false in - rec_flow cx trace (t, ReposUseT (reason, use_desc, use_op, tout)) - - (* Ignore any non-type uses. The implementation of type destructors operate - * solely on types and not arbitrary uses. We also don't want to add errors - * for arbitrary uses that get added to the subject of our trigger in type - * destruction evaluation. - * - * This may be a risky behavior when considering tvars with *only* non-type - * uses. However, such tvars are rare and often come from non-sensical - * programs. - * - * Type destructors, currently, may only be created as type annotations. - * This means that the type is either always 0->1, or it is a polymorphic - * type argument which will be instantiated with an open tvar. Polymorphic - * type arguments will also always get some type upper bound with the - * default type being MixedT. We destruct these upper bounds. *) - | TypeDestructorTriggerT _, _ -> () + (* Type parameters should always be substituted out, and as such they should + never appear "exposed" in flows. (They can still appear bound inside + polymorphic definitions.) *) + not_expect_bound l; + not_expect_bound_use u; - (************************) - (* Full type resolution *) - (************************) + (* Types that are classified as def types but don't make sense as use types + should not appear as use types. *) + expect_proper_def_use u; - (* Full resolution of a type involves (1) walking the type to collect a + (* Before processing the flow action, check that it is not deferred. If it + is, then when speculation is complete, the action either fires or is + discarded depending on whether the case that created the action is + selected or not. *) + if Speculation.(defer_action cx (Action.Flow (l, u))) then + print_if_verbose cx trace ~indent:1 ["deferred during speculation"] + (* Either propagate AnyT through the use type, or short-circuit because any <: u trivially *) + else if + match l with + | AnyT _ -> any_propagated cx trace l u + | _ -> false + (* Either propagate AnyT through the def type, or short-circuit because l <: any trivially *) + then + () + else if + match u with + | UseT (use_op, (AnyT _ as any)) -> any_propagated_use cx trace use_op any l + | _ -> false + then + () + else if + match l with + | DefT (_, _, EmptyT flavor) -> empty_success flavor u + | _ -> false + then + () + else + match (l, u) with + (********) + (* eval *) + (********) + | (EvalT (t, TypeDestructorT (use_op', reason, d), id), _) -> + let (_, result) = mk_type_destructor cx ~trace use_op' reason t d id in + rec_flow cx trace (result, u) + | (_, UseT (use_op, EvalT (t, TypeDestructorT (use_op', reason, d), id))) -> + let (slingshot, result) = mk_type_destructor cx ~trace use_op' reason t d id in + if slingshot then + rec_flow cx trace (result, ReposUseT (reason, false, use_op, l)) + else + rec_flow cx trace (l, UseT (use_op, result)) + | (EvalT (t, LatentPredT (reason, p), i), _) -> + rec_flow cx trace (eval_latent_pred cx ~trace reason t p i, u) + (******************) + (* process X ~> Y *) + (******************) + | (OpenT (_, tvar1), UseT (use_op, OpenT (_, tvar2))) -> + let (id1, constraints1) = Context.find_constraints cx tvar1 in + let (id2, constraints2) = Context.find_constraints cx tvar2 in + (match (constraints1, constraints2) with + | (Unresolved bounds1, Unresolved bounds2) -> + if not_linked (id1, bounds1) (id2, bounds2) then ( + add_upper_edges cx trace (id1, bounds1) (id2, bounds2); + add_lower_edges cx trace (id1, bounds1) (id2, bounds2); + flows_across cx trace ~use_op bounds1.lower bounds2.upper + ) + | (Unresolved bounds1, (Resolved (use_op', t2) | FullyResolved (use_op', t2))) -> + let t2_use = flow_use_op use_op' (UseT (use_op, t2)) in + edges_and_flows_to_t cx trace (id1, bounds1) t2_use + | ((Resolved (_, t1) | FullyResolved (_, t1)), Unresolved bounds2) -> + edges_and_flows_from_t cx trace ~use_op t1 (id2, bounds2) + | ( (Resolved (_, t1) | FullyResolved (_, t1)), + (Resolved (use_op', t2) | FullyResolved (use_op', t2)) ) -> + let t2_use = flow_use_op use_op' (UseT (use_op, t2)) in + rec_flow cx trace (t1, t2_use)) + (******************) + (* process Y ~> U *) + (******************) + | (OpenT (r, tvar), t2) -> + let t2 = + match desc_of_reason r with + | RTypeParam _ -> mod_use_op_of_use_t (fun op -> Frame (ImplicitTypeParam, op)) t2 + | _ -> t2 + in + let (id1, constraints1) = Context.find_constraints cx tvar in + (match constraints1 with + | Unresolved bounds1 -> edges_and_flows_to_t cx trace (id1, bounds1) t2 + | Resolved (_, t1) + | FullyResolved (_, t1) -> + rec_flow cx trace (t1, t2)) + (******************) + (* process L ~> X *) + (******************) + | (t1, UseT (use_op, OpenT (_, tvar))) -> + let (id2, constraints2) = Context.find_constraints cx tvar in + (match constraints2 with + | Unresolved bounds2 -> edges_and_flows_from_t cx trace ~use_op t1 (id2, bounds2) + | Resolved (use_op', t2) + | FullyResolved (use_op', t2) -> + let t2_use = flow_use_op use_op' (UseT (use_op, t2)) in + rec_flow cx trace (t1, t2_use)) + (*****************) + (* any with uses *) + (*****************) + | (_, UseT (_, MergedT (_, uses))) -> List.iter (fun u -> rec_flow cx trace (l, u)) uses + | (MergedT (reason, _), _) -> rec_flow cx trace (Unsoundness.why Merged reason, u) + (****************) + (* eval, contd. *) + (****************) + | (_, UseT (use_op, EvalT (t, LatentPredT (reason, p), i))) -> + rec_flow cx trace (l, UseT (use_op, eval_latent_pred cx ~trace reason t p i)) + (***************************) + (* type destructor trigger *) + (***************************) + + (* For evaluating type destructors we add a trigger, TypeDestructorTriggerT, + * to both sides of a type. When TypeDestructorTriggerT sees a new upper or + * lower bound we destruct that bound and flow the result in the same + * direction to some tout type. *) + + (* Don't let two TypeDestructorTriggerTs reach each other or else we quickly + * run into non-termination scenarios. *) + | (TypeDestructorTriggerT _, UseT (_, TypeDestructorTriggerT _)) -> () + | (l, UseT (_, TypeDestructorTriggerT (use_op', reason, repos, d, tout))) -> + let l = + match repos with + | None -> l + | Some (reason, use_desc) -> reposition_reason cx ~trace reason ~use_desc l + in + eval_destructor cx ~trace use_op' reason l d tout + | ( TypeDestructorTriggerT (use_op', reason, _, d, tout), + UseT (use_op, AnnotT (r, t, use_desc)) ) -> + let tout' = + Tvar.mk_where cx reason (fun tout' -> + let repos = Some (r, use_desc) in + rec_flow + cx + trace + (t, UseT (use_op, TypeDestructorTriggerT (use_op', reason, repos, d, tout')))) + in + rec_flow cx trace (tout', ReposUseT (reason, false, use_op, tout)) + | (TypeDestructorTriggerT (use_op', reason, _, d, tout), UseT (use_op, u)) -> + (* With the same "slingshot" trick used by AnnotT, hold the lower bound + * at bay until result itself gets concretized, and then flow the lower + * bound to that concrete type. *) + let t = + Tvar.mk_where cx reason (fun t -> eval_destructor cx ~trace use_op' reason u d t) + in + let use_desc = false in + rec_flow cx trace (t, ReposUseT (reason, use_desc, use_op, tout)) + (* Ignore any non-type uses. The implementation of type destructors operate + * solely on types and not arbitrary uses. We also don't want to add errors + * for arbitrary uses that get added to the subject of our trigger in type + * destruction evaluation. + * + * This may be a risky behavior when considering tvars with *only* non-type + * uses. However, such tvars are rare and often come from non-sensical + * programs. + * + * Type destructors, currently, may only be created as type annotations. + * This means that the type is either always 0->1, or it is a polymorphic + * type argument which will be instantiated with an open tvar. Polymorphic + * type arguments will also always get some type upper bound with the + * default type being MixedT. We destruct these upper bounds. *) + | (TypeDestructorTriggerT _, _) -> () + (************************) + (* Full type resolution *) + (************************) + + (* Full resolution of a type involves (1) walking the type to collect a bunch of unresolved tvars (2) emitting constraints that, once those tvars are resolved, recursively trigger the process for the resolved types (3) finishing when no unresolved tvars remain. @@ -1512,14 +1291,11 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = in the check, which literally tries each branch of the union or intersection in turn, maintaining some matching state as it goes: see speculative_matches for details). *) - - | t, ChoiceKitUseT (reason, FullyResolveType id) -> - fully_resolve_type cx trace reason id t - - | InternalT (ChoiceKitT (_, Trigger)), ChoiceKitUseT (reason, TryFlow (i, spec)) -> - speculative_matches cx trace reason i spec - - (* Intersection types need a preprocessing step before they can be checked; + | (t, ChoiceKitUseT (reason, FullyResolveType id)) -> + fully_resolve_type cx trace reason id t + | (InternalT (ChoiceKitT (_, Trigger)), ChoiceKitUseT (reason, TryFlow (i, spec))) -> + speculative_matches cx trace reason i spec + (* Intersection types need a preprocessing step before they can be checked; this step brings it closer to parity with the checking of union types, where the preprocessing effectively happens "automatically." This apparent asymmetry is explained in prep_try_intersection. @@ -1532,172 +1308,140 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = it. Also, unlike full type resolution, the tvars that are concretized don't necessarily have the 0->1 property: they could be concretized at different types, as more and more lower bounds appear. *) - - | DefT (_, UnionT urep), IntersectionPreprocessKitT (_, ConcretizeTypes _) -> - UnionRep.members urep |> List.iter (fun t -> - rec_flow cx trace (t, u) - ) - - | DefT (lreason, MaybeT t), IntersectionPreprocessKitT (_, ConcretizeTypes _) -> - let lreason = replace_reason_const RNullOrVoid lreason in - rec_flow cx trace (NullT.make lreason, u); - rec_flow cx trace (VoidT.make lreason, u); - rec_flow cx trace (t, u); - - | DefT (r, OptionalT t), IntersectionPreprocessKitT (_, ConcretizeTypes _) -> - rec_flow cx trace (VoidT.why r, u); - rec_flow cx trace (t, u); - - | AnnotT (r, t, use_desc), IntersectionPreprocessKitT (_, ConcretizeTypes _) -> - (* TODO: directly derive loc and desc from the reason of tvar *) - let loc = aloc_of_reason r |> ALoc.to_loc in - let desc = if use_desc then Some (desc_of_reason r) else None in - rec_flow cx trace (reposition ~trace cx loc ?desc t, u) - - | t, IntersectionPreprocessKitT (reason, - ConcretizeTypes (unresolved, resolved, DefT (r, IntersectionT rep), u)) -> - prep_try_intersection cx trace reason unresolved (t::resolved) u r rep - - (*****************************) - (* Refinement type subtyping *) - (*****************************) - - | _, RefineT (reason, LatentP (fun_t, idx), tvar) -> - flow cx (fun_t, CallLatentPredT (reason, true, idx, l, tvar)) - - (*************) - (* Debugging *) - (*************) - - | _, DebugPrintT reason -> - let str = Debug_js.jstr_of_t cx l in - add_output cx ~trace (FlowError.EDebugPrint (reason, str)) - - | DefT (_, NumT (Literal (_, (n, _)))), DebugSleepT _ -> - let n = ref n in - while !n > 0.0 do - WorkerCancel.check_should_exit (); - Unix.sleepf (min (!n) 1.0); - n := !n -. 1. - done - - (*************************) - (* repositioning, part 1 *) - (*************************) - - (* if a ReposT is used as a lower bound, `reposition` can reposition it *) - | ReposT (reason, l), _ -> - rec_flow cx trace (reposition_reason cx ~trace reason l, u) - - (* if a ReposT is used as an upper bound, wrap the now-concrete lower bound + | (UnionT (_, urep), IntersectionPreprocessKitT (_, ConcretizeTypes _)) -> + UnionRep.members urep |> List.iter (fun t -> rec_flow cx trace (t, u)) + | (MaybeT (lreason, t), IntersectionPreprocessKitT (_, ConcretizeTypes _)) -> + let lreason = replace_desc_reason RNullOrVoid lreason in + rec_flow cx trace (NullT.make lreason |> with_trust Trust.bogus_trust, u); + rec_flow cx trace (VoidT.make lreason |> with_trust Trust.bogus_trust, u); + rec_flow cx trace (t, u) + | (OptionalT (r, t), IntersectionPreprocessKitT (_, ConcretizeTypes _)) -> + rec_flow cx trace (VoidT.why r |> with_trust Trust.bogus_trust, u); + rec_flow cx trace (t, u) + | (AnnotT (r, t, use_desc), IntersectionPreprocessKitT (_, ConcretizeTypes _)) -> + (* TODO: directly derive loc and desc from the reason of tvar *) + let loc = aloc_of_reason r in + let desc = + if use_desc then + Some (desc_of_reason r) + else + None + in + rec_flow cx trace (reposition ~trace cx loc ?desc t, u) + | ( t, + IntersectionPreprocessKitT + (reason, ConcretizeTypes (unresolved, resolved, IntersectionT (r, rep), u)) ) -> + prep_try_intersection cx trace reason unresolved (t :: resolved) u r rep + (*****************************) + (* Refinement type subtyping *) + (*****************************) + | (_, RefineT (reason, LatentP (fun_t, idx), tvar)) -> + flow cx (fun_t, CallLatentPredT (reason, true, idx, l, tvar)) + (*************) + (* Debugging *) + (*************) + | (_, DebugPrintT reason) -> + let str = Debug_js.jstr_of_t ~depth:10 cx l in + add_output cx ~trace (Error_message.EDebugPrint (reason, str)) + | (DefT (_, _, NumT (Literal (_, (n, _)))), DebugSleepT _) -> + let n = ref n in + while !n > 0.0 do + WorkerCancel.check_should_exit (); + Unix.sleepf (min !n 1.0); + n := !n -. 1. + done + (*************************) + (* repositioning, part 1 *) + (*************************) + + (* if a ReposT is used as a lower bound, `reposition` can reposition it *) + | (ReposT (reason, l), _) -> rec_flow cx trace (reposition_reason cx ~trace reason l, u) + (* if a ReposT is used as an upper bound, wrap the now-concrete lower bound in a `ReposUpperT`, which will repos `u` when `u` becomes concrete. *) - | _, UseT (use_op, ReposT (reason, u)) -> - rec_flow cx trace (InternalT (ReposUpperT (reason, l)), UseT (use_op, u)) - - | InternalT (ReposUpperT (reason, l)), UseT (use_op, u) -> - (* since this guarantees that `u` is not an OpenT, it's safe to use + | (_, UseT (use_op, ReposT (reason, u))) -> + rec_flow cx trace (InternalT (ReposUpperT (reason, l)), UseT (use_op, u)) + | (InternalT (ReposUpperT (reason, l)), UseT (use_op, u)) -> + (* since this guarantees that `u` is not an OpenT, it's safe to use `reposition` on the upper bound here. *) - let u = reposition_reason cx ~trace reason u in - rec_flow cx trace (l, UseT (use_op, u)) - - | InternalT (ReposUpperT (_, l)), _ -> - rec_flow cx trace (l, u) - - (***************) - (* annotations *) - (***************) - - (* Special cases where we want to recursively concretize types within the + let u = reposition_reason cx ~trace reason u in + rec_flow cx trace (l, UseT (use_op, u)) + | (InternalT (ReposUpperT (_, l)), _) -> rec_flow cx trace (l, u) + (***************) + (* annotations *) + (***************) + + (* Special cases where we want to recursively concretize types within the lower bound. *) - - | DefT (r, UnionT rep), ReposUseT (reason, use_desc, use_op, l) -> - let rep = UnionRep.ident_map (annot use_desc) rep in - let annot_loc = annot_loc_of_reason reason in - let r = repos_reason (aloc_of_reason reason |> ALoc.to_loc) ?annot_loc r in - let r = - if use_desc - then replace_reason_const (desc_of_reason reason) r - else r - in - rec_flow cx trace (l, UseT (use_op, DefT (r, UnionT rep))) - - | DefT (r, MaybeT u), ReposUseT (reason, use_desc, use_op, l) -> - let annot_loc = annot_loc_of_reason reason in - let r = repos_reason (aloc_of_reason reason |> ALoc.to_loc) ?annot_loc r in - let r = - if use_desc - then replace_reason_const (desc_of_reason reason) r - else r - in - rec_flow cx trace (l, UseT (use_op, DefT (r, MaybeT (annot use_desc u)))) - - | DefT (r, OptionalT u), ReposUseT (reason, use_desc, use_op, l) -> - let annot_loc = annot_loc_of_reason reason in - let r = repos_reason (aloc_of_reason reason |> ALoc.to_loc) ?annot_loc r in - let r = - if use_desc - then replace_reason_const (desc_of_reason reason) r - else r - in - rec_flow cx trace (l, UseT (use_op, DefT (r, OptionalT (annot use_desc u)))) - - (* Waits for a def type to become concrete, repositions it as an upper UseT + | (UnionT (r, rep), ReposUseT (reason, use_desc, use_op, l)) -> + let rep = UnionRep.ident_map (annot use_desc) rep in + let annot_loc = annot_aloc_of_reason reason in + let r = repos_reason (aloc_of_reason reason) ?annot_loc r in + let r = + if use_desc then + replace_desc_reason (desc_of_reason reason) r + else + r + in + rec_flow cx trace (l, UseT (use_op, UnionT (r, rep))) + | (MaybeT (r, u), ReposUseT (reason, use_desc, use_op, l)) -> + let annot_loc = annot_aloc_of_reason reason in + let r = repos_reason (aloc_of_reason reason) ?annot_loc r in + let r = + if use_desc then + replace_desc_reason (desc_of_reason reason) r + else + r + in + rec_flow cx trace (l, UseT (use_op, MaybeT (r, annot use_desc u))) + | (OptionalT (r, u), ReposUseT (reason, use_desc, use_op, l)) -> + let annot_loc = annot_aloc_of_reason reason in + let r = repos_reason (aloc_of_reason reason) ?annot_loc r in + let r = + if use_desc then + replace_desc_reason (desc_of_reason reason) r + else + r + in + rec_flow cx trace (l, UseT (use_op, OptionalT (r, annot use_desc u))) + (* Waits for a def type to become concrete, repositions it as an upper UseT using the stored reason. This can be used to store a reason as it flows through a tvar. *) - - | (u_def, ReposUseT (reason, use_desc, use_op, l)) -> - let u = reposition_reason cx ~trace reason ~use_desc u_def in - rec_flow cx trace (l, UseT (use_op, u)) - - (* The sink component of an annotation constrains values flowing + | (u_def, ReposUseT (reason, use_desc, use_op, l)) -> + let u = reposition_reason cx ~trace reason ~use_desc u_def in + rec_flow cx trace (l, UseT (use_op, u)) + (* The sink component of an annotation constrains values flowing into the annotated site. *) - - | _, UseT (use_op, AnnotT (r, t, use_desc)) -> - rec_flow cx trace (t, ReposUseT (r, use_desc, use_op, l)) - - (* The source component of an annotation flows out of the annotated + | (_, UseT (use_op, AnnotT (r, t, use_desc))) -> + rec_flow cx trace (t, ReposUseT (r, use_desc, use_op, l)) + (* The source component of an annotation flows out of the annotated site to downstream uses. *) - - | AnnotT (r, t, use_desc), u -> - let t = reposition_reason ~trace cx r ~use_desc t in - rec_flow cx trace (t, u) - - (****************************************************************) - (* BecomeT unifies a tvar with an incoming concrete lower bound *) - (****************************************************************) - - (* MatchingPropT is triggered by a refinement, which means that the - BecomeT has already fired and become the type being refined. We - prevent the refined type from unifying with the original type - because the former is necessarily a subtype of the latter and - attempting to unify them is a symptom of an issue with BecomeT. *) - | MatchingPropT _, BecomeT _ -> - () - - | _, BecomeT (reason, t) -> - let l = reposition ~trace cx (aloc_of_reason reason |> ALoc.to_loc) l in - rec_unify cx trace ~use_op:unknown_use ~unify_any:true l t - - (***********************) - (* guarded unification *) - (***********************) - - (** Utility to unify a pair of types based on a trigger. Triggers are + | (AnnotT (r, t, use_desc), u) -> + let t = reposition_reason ~trace cx r ~use_desc t in + rec_flow cx trace (t, u) + (****************************************************************) + (* BecomeT unifies a tvar with an incoming concrete lower bound *) + (****************************************************************) + | (_, BecomeT (reason, t)) when is_proper_def l -> + let l = reposition ~trace cx (aloc_of_reason reason) l in + rec_unify cx trace ~use_op:unknown_use ~unify_any:true l t + (***********************) + (* guarded unification *) + (***********************) + + (* Utility to unify a pair of types based on a trigger. Triggers are commonly type variables that are set up to record when certain operations have been processed: until then, they remain latent. For example, we can respond to events such as "a property is added," "a refinement succeeds," etc., by setting up unification constraints that are processed only when the corresponding triggers fire. *) + | (_, UnifyT (t, t_other)) -> + rec_unify cx trace ~use_op:unknown_use ~unify_any:true t t_other + (*********************************************************************) + (* `import type` creates a properly-parameterized type alias for the *) + (* remote type -- but only for particular, valid remote types. *) + (*********************************************************************) - | (_, UnifyT(t,t_other)) -> - rec_unify cx trace ~use_op:unknown_use ~unify_any:true t t_other - - (*********************************************************************) - (* `import type` creates a properly-parameterized type alias for the *) - (* remote type -- but only for particular, valid remote types. *) - (*********************************************************************) - - (** TODO: This rule allows interpreting an object as a type! + (* TODO: This rule allows interpreting an object as a type! It is currently used to work with modules that export named types, e.g. 'react' or 'immutable'. For example, one can do @@ -1726,87 +1470,94 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = type` to `import` followed by `type`. **) - | DefT (_, ObjT _), ImportTypeT(_, "default", t) -> - rec_flow_t cx trace (l, t) - - | (exported_type, ImportTypeT(reason, export_name, t)) -> - (match canonicalize_imported_type cx trace reason exported_type with - | Some imported_t -> rec_flow_t cx trace (imported_t, t) - | None -> add_output cx ~trace ( - FlowError.EImportValueAsType (reason, export_name) - ) - ) - - (************************************************************************) - (* `import typeof` creates a properly-parameterized type alias for the *) - (* "typeof" the remote export. *) - (************************************************************************) - | DefT (_, PolyT(typeparams, ((DefT (_, ClassT _) | DefT (_, FunT _)) as lower_t), id)), - ImportTypeofT(reason, _, t) -> - let typeof_t = mk_typeof_annotation cx ~trace reason lower_t in - rec_flow_t cx trace (poly_type id typeparams - (DefT (reason, TypeT (ImportTypeofKind, typeof_t))), t) - - | (DefT (_, TypeT _) | DefT (_, PolyT(_, DefT (_, TypeT _), _))), - ImportTypeofT(reason, export_name, _) -> - add_output cx ~trace (FlowError.EImportTypeAsTypeof (reason, export_name)) - - | (_, ImportTypeofT(reason, _, t)) -> - let typeof_t = mk_typeof_annotation cx ~trace reason l in - rec_flow_t cx trace (DefT (reason, TypeT (ImportTypeofKind, typeof_t)), t) - - (**************************************************************************) - (* Module exports *) - (* *) - (* Flow supports both CommonJS and standard ES modules as well as some *) - (* interoperability semantics for communicating between the two module *) - (* systems in both directions. *) - (* *) - (* In order to support both systems at once, Flow abstracts the notion of *) - (* module exports by storing a type map for each of the exports of a *) - (* given module, and for each module there is a ModuleT that maintains *) - (* this type map. The exported types are then considered immutable once *) - (* the module has finished inference. *) - (* *) - (* When a type is set for the CommonJS exports value, we store it *) - (* separately from the normal named exports tmap that ES exports are *) - (* stored within. This allows us to distinguish CommonJS modules from ES *) - (* modules when interpreting an ES import statement -- which is important *) - (* because ES ModuleNamespace objects built from CommonJS exports are a *) - (* little bit magic. *) - (* *) - (* For example: If a CommonJS module exports an object, we will extract *) - (* each of the properties of that object and consider them as "named" *) - (* exports for the purposes of an import statement elsewhere: *) - (* *) - (* // CJSModule.js *) - (* module.exports = { *) - (* someNumber: 42 *) - (* }; *) - (* *) - (* // ESModule.js *) - (* import {someNumber} from "CJSModule"; *) - (* var a: number = someNumber; *) - (* *) - (* We also map CommonJS export values to the "default" export for *) - (* purposes of import statements in other modules: *) - (* *) - (* // CJSModule.js *) - (* module.exports = { *) - (* someNumber: 42 *) - (* }; *) - (* *) - (* // ESModule.js *) - (* import CJSDefaultExport from "CJSModule"; *) - (* var a: number = CJSDefaultExport.someNumber; *) - (* *) - (* Note that the ModuleT type is not intended to be surfaced to any *) - (* userland-visible constructs. Instead it's meant as an internal *) - (* construct that is only *mapped* to/from userland constructs (such as a *) - (* CommonJS exports object or an ES ModuleNamespace object). *) - (**************************************************************************) - - (* In the following rules, ModuleT appears in two contexts: as imported + | ((ExactT (_, DefT (_, _, ObjT _)) | DefT (_, _, ObjT _)), ImportTypeT (_, "default", t)) + -> + rec_flow_t cx trace (l, t) + | (exported_type, ImportTypeT (reason, export_name, t)) -> + (match canonicalize_imported_type cx trace reason exported_type with + | Some imported_t -> rec_flow_t cx trace (imported_t, t) + | None -> add_output cx ~trace (Error_message.EImportValueAsType (reason, export_name))) + (************************************************************************) + (* `import typeof` creates a properly-parameterized type alias for the *) + (* "typeof" the remote export. *) + (************************************************************************) + | ( DefT + ( _, + _, + PolyT + ( tparams_loc, + typeparams, + ((DefT (_, _, ClassT _) | DefT (_, _, FunT _)) as lower_t), + id ) ), + ImportTypeofT (reason, _, t) ) -> + let typeof_t = mk_typeof_annotation cx ~trace reason lower_t in + rec_flow_t + cx + trace + ( poly_type + id + tparams_loc + typeparams + (DefT (reason, bogus_trust (), TypeT (ImportTypeofKind, typeof_t))), + t ) + | ( (DefT (_, _, TypeT _) | DefT (_, _, PolyT (_, _, DefT (_, _, TypeT _), _))), + ImportTypeofT (reason, export_name, _) ) -> + add_output cx ~trace (Error_message.EImportTypeAsTypeof (reason, export_name)) + | (_, ImportTypeofT (reason, _, t)) -> + let typeof_t = mk_typeof_annotation cx ~trace reason l in + rec_flow_t cx trace (DefT (reason, bogus_trust (), TypeT (ImportTypeofKind, typeof_t)), t) + (**************************************************************************) + (* Module exports *) + (* *) + (* Flow supports both CommonJS and standard ES modules as well as some *) + (* interoperability semantics for communicating between the two module *) + (* systems in both directions. *) + (* *) + (* In order to support both systems at once, Flow abstracts the notion of *) + (* module exports by storing a type map for each of the exports of a *) + (* given module, and for each module there is a ModuleT that maintains *) + (* this type map. The exported types are then considered immutable once *) + (* the module has finished inference. *) + (* *) + (* When a type is set for the CommonJS exports value, we store it *) + (* separately from the normal named exports tmap that ES exports are *) + (* stored within. This allows us to distinguish CommonJS modules from ES *) + (* modules when interpreting an ES import statement -- which is important *) + (* because ES ModuleNamespace objects built from CommonJS exports are a *) + (* little bit magic. *) + (* *) + (* For example: If a CommonJS module exports an object, we will extract *) + (* each of the properties of that object and consider them as "named" *) + (* exports for the purposes of an import statement elsewhere: *) + (* *) + (* // CJSModule.js *) + (* module.exports = { *) + (* someNumber: 42 *) + (* }; *) + (* *) + (* // ESModule.js *) + (* import {someNumber} from "CJSModule"; *) + (* var a: number = someNumber; *) + (* *) + (* We also map CommonJS export values to the "default" export for *) + (* purposes of import statements in other modules: *) + (* *) + (* // CJSModule.js *) + (* module.exports = { *) + (* someNumber: 42 *) + (* }; *) + (* *) + (* // ESModule.js *) + (* import CJSDefaultExport from "CJSModule"; *) + (* var a: number = CJSDefaultExport.someNumber; *) + (* *) + (* Note that the ModuleT type is not intended to be surfaced to any *) + (* userland-visible constructs. Instead it's meant as an internal *) + (* construct that is only *mapped* to/from userland constructs (such as a *) + (* CommonJS exports object or an ES ModuleNamespace object). *) + (**************************************************************************) + + (* In the following rules, ModuleT appears in two contexts: as imported modules, and as modules to be exported. As a module to be exported, ModuleT denotes a "growing" module. In this @@ -1820,232 +1571,276 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = take it apart and read it. The same rules could also be hit by modules that are not @flow, so the rules have to deal with `any`. *) - (* util that grows a module by adding named exports from a given map *) - | (ModuleT(_, exports, _), ExportNamedT(_, skip_dupes, tmap, t_out)) -> - tmap |> SMap.iter (fun name (loc, t) -> - if skip_dupes && Context.has_export cx exports.exports_tmap name - then () - else Context.set_export cx exports.exports_tmap name (loc, t) - ); - rec_flow_t cx trace (l, t_out) - - (** Copy the named exports from a source module into a target module. Used + (* util that grows a module by adding named exports from a given map *) + | (ModuleT (_, exports, _), ExportNamedT (reason, skip_dupes, tmap, export_kind, t_out)) -> + tmap + |> SMap.iter (fun name (loc, t) -> + if skip_dupes && Context.has_export cx exports.exports_tmap name then + () + else + let t' = + match export_kind with + | ExportValue + (* If it's a re-export, we can assume that the appropriate export checks have been + * applied in the original module. *) + + | ReExport -> + t + (* If it's of the form `export type` then check to make sure it's actually a type. *) + | ExportType -> + let t' = Tvar.mk cx (reason_of_t t) in + rec_flow cx trace (t, AssertExportIsTypeT (reason, name, t')); + t' + in + Context.set_export cx exports.exports_tmap name (loc, t')); + rec_flow_t cx trace (l, t_out) + | (_, AssertExportIsTypeT (_, name, t_out)) -> + if is_type l then + rec_flow_t cx trace (l, t_out) + else + let reason = reason_of_t l in + add_output cx ~trace Error_message.(EExportValueAsType (reason, name)); + rec_flow_t cx trace (AnyT.error reason, t_out) + (* Copy the named exports from a source module into a target module. Used to implement `export * from 'SomeModule'`, with the current module as the target and the imported module as the source. *) - | (ModuleT(_, source_exports, _), - CopyNamedExportsT(reason, target_module_t, t_out)) -> - let source_tmap = Context.find_exports cx source_exports.exports_tmap in - rec_flow cx trace ( - target_module_t, - ExportNamedT(reason, (*skip_dupes*)true, source_tmap, t_out) - ) - - (** - * Copy only the type exports from a source module into a target module. - * Used to implement `export type * from ...`. - *) - | ModuleT(_, source_exports, _), - CopyTypeExportsT(reason, target_module_t, t_out) -> - let source_exports = Context.find_exports cx source_exports.exports_tmap in - (* Remove locations. TODO at some point we may want to include them here. *) - let source_exports = SMap.map snd source_exports in - let target_module_t = - SMap.fold (fun export_name export_t target_module_t -> - Tvar.mk_where cx reason (fun t -> rec_flow cx trace ( - export_t, - ExportTypeT(reason, true, export_name, target_module_t, t) - )) - ) source_exports target_module_t - in - rec_flow_t cx trace (target_module_t, t_out) - - (** - * Export a type from a given ModuleT, but only if the type is compatible - * with `import type`/`export type`. When it is not compatible, it is simply - * not added to the exports map. - * - * Note that this is very similar to `ExportNamedT` except that it only - * exports one type at a time and it takes the type to be exported as a - * lower (so that the type can be filtered post-resolution). - *) - | l, ExportTypeT(reason, skip_dupes, export_name, target_module_t, t_out) -> - let is_type_export = ( - match l with - | DefT (_, ObjT _) when export_name = "default" -> true - | l -> canonicalize_imported_type cx trace reason l <> None - ) in - if is_type_export then - rec_flow cx trace (target_module_t, ExportNamedT( - reason, - skip_dupes, - (* TODO we may want to add location information here *) - SMap.singleton export_name (None, l), - t_out - )) - else - rec_flow_t cx trace (target_module_t, t_out) - - (* There is nothing to copy from a module exporting `any` or `Object`. *) - | DefT (_, (AnyT | AnyObjT)), CopyNamedExportsT(_, target_module, t) -> - rec_flow_t cx trace (target_module, t) - - (** - * ObjT CommonJS export values have their properties turned into named - * exports - *) - | DefT (_, ObjT {props_tmap; proto_t; _;}), - CJSExtractNamedExportsT( - reason, (module_t_reason, exporttypes, is_strict), t_out - ) -> - - (* Copy props from the prototype *) - let module_t = Tvar.mk_where cx reason (fun t -> - rec_flow cx trace ( - proto_t, - CJSExtractNamedExportsT(reason, (module_t_reason, exporttypes, is_strict), t) - ) - ) in - - (* Copy own props *) - rec_flow cx trace (module_t, ExportNamedT( - reason, - false, (* skip_dupes *) - Properties.extract_named_exports (Context.find_props cx props_tmap), - t_out - )) - - (** - * InstanceT CommonJS export values have their properties turned into named - * exports - *) - | DefT (_, InstanceT(_, _, _, {own_props; proto_props; _;})), - CJSExtractNamedExportsT( - reason, (module_t_reason, exporttypes, is_strict), t_out - ) -> - - let module_t = ModuleT (module_t_reason, exporttypes, is_strict) in - - let extract_named_exports id = - Context.find_props cx id - |> SMap.filter (fun x _ -> not (is_munged_prop_name cx x)) - |> Properties.extract_named_exports - in - - (* Copy own props *) - let module_t = Tvar.mk_where cx reason (fun t -> - rec_flow cx trace (module_t, ExportNamedT( - reason, - false, (* skip_dupes *) - extract_named_exports own_props, - t - )) - ) in - - (* Copy proto props *) - (* TODO: own props should take precedence *) - rec_flow cx trace (module_t, ExportNamedT( - reason, - false, (* skip_dupes *) - extract_named_exports proto_props, - t_out - )) - - (* If the module is exporting any or Object, then we allow any named - * import - *) - | DefT (_, (AnyT | AnyObjT)), - CJSExtractNamedExportsT(_, (module_t_reason, exporttypes, is_strict), t_out) -> - let module_t = ModuleT ( - module_t_reason, - { exporttypes with has_every_named_export = true; }, - is_strict - ) in - rec_flow_t cx trace (module_t, t_out) - - (** - * All other CommonJS export value types do not get merged into the named - * exports tmap in any special way. - *) - | (_, CJSExtractNamedExportsT(_, (module_t_reason, exporttypes, is_strict), t_out)) -> - let module_t = ModuleT (module_t_reason, exporttypes, is_strict) in - rec_flow_t cx trace (module_t, t_out) - - (**************************************************************************) - (* Module imports *) - (* *) - (* The process of importing from a module consists of reading from the *) - (* foreign ModuleT type and generating a user-visible construct from it. *) - (* *) - (* For CommonJS imports (AKA 'require()'), if the foreign module is an ES *) - (* module we generate an object whose properties correspond to each of *) - (* the named exports of the foreign module. If the foreign module is also *) - (* a CommonJS module, use the type of the foreign CommonJS exports value *) - (* directly. *) - (* *) - (* For ES imports (AKA `import` statements), simply generate a model of *) - (* an ES ModuleNamespace object from the individual named exports of the *) - (* foreign module. This object can then be passed up to "userland" *) - (* directly (via `import * as`) or it can be used to extract individual *) - (* exports from the foreign module (via `import {}` and `import X from`). *) - (**************************************************************************) - - (* require('SomeModule') *) - | (ModuleT(_, exports, imported_is_strict), CJSRequireT(reason, t, is_strict)) -> - check_nonstrict_import cx trace is_strict imported_is_strict reason; - let cjs_exports = ( - match exports.cjs_export with - | Some t -> - (* reposition the export to point at the require(), like the object + | (ModuleT (_, source_exports, _), CopyNamedExportsT (reason, target_module_t, t_out)) -> + let source_tmap = Context.find_exports cx source_exports.exports_tmap in + rec_flow + cx + trace + ( target_module_t, + ExportNamedT (reason, (*skip_dupes*) true, source_tmap, ReExport, t_out) ) + (* + * Copy only the type exports from a source module into a target module. + * Used to implement `export type * from ...`. + *) + | (ModuleT (_, source_exports, _), CopyTypeExportsT (reason, target_module_t, t_out)) -> + let source_exports = Context.find_exports cx source_exports.exports_tmap in + (* Remove locations. TODO at some point we may want to include them here. *) + let source_exports = SMap.map snd source_exports in + let target_module_t = + SMap.fold + (fun export_name export_t target_module_t -> + Tvar.mk_where cx reason (fun t -> + rec_flow + cx + trace + (export_t, ExportTypeT (reason, true, export_name, target_module_t, t)))) + source_exports + target_module_t + in + rec_flow_t cx trace (target_module_t, t_out) + (* + * Export a type from a given ModuleT, but only if the type is compatible + * with `import type`/`export type`. When it is not compatible, it is simply + * not added to the exports map. + * + * Note that this is very similar to `ExportNamedT` except that it only + * exports one type at a time and it takes the type to be exported as a + * lower (so that the type can be filtered post-resolution). + *) + | (l, ExportTypeT (reason, skip_dupes, export_name, target_module_t, t_out)) -> + let is_type_export = + match l with + | DefT (_, _, ObjT _) when export_name = "default" -> true + | l -> canonicalize_imported_type cx trace reason l <> None + in + if is_type_export then + rec_flow + cx + trace + ( target_module_t, + ExportNamedT + ( reason, + skip_dupes, + (* TODO we may want to add location information here *) + SMap.singleton export_name (None, l), + ReExport, + t_out ) ) + else + rec_flow_t cx trace (target_module_t, t_out) + (* There is nothing to copy from a module exporting `any` or `Object`. *) + | ( AnyT (lreason, _), + ( CopyNamedExportsT (reason, target_module, t) + | CopyTypeExportsT (reason, target_module, t) ) ) -> + let () = + match desc_of_reason lreason with + (* Use a special reason so we can tell the difference between an any-typed import + * from an untyped module and an any-typed import from a nonexistent module. *) + | RUntypedModule module_name -> + let loc = Reason.aloc_of_reason reason in + let message = Error_message.EUntypedImport (loc, module_name) in + add_output cx ~trace message + | _ -> () + in + rec_flow_t cx trace (target_module, t) + (* + * ObjT CommonJS export values have their properties turned into named + * exports + *) + | ( ( DefT (_, _, ObjT { props_tmap; proto_t; _ }) + | ExactT (_, DefT (_, _, ObjT { props_tmap; proto_t; _ })) ), + CJSExtractNamedExportsT (reason, (module_t_reason, exporttypes, is_strict), t_out) ) -> + (* Copy props from the prototype *) + let module_t = + Tvar.mk_where cx reason (fun t -> + rec_flow + cx + trace + ( proto_t, + CJSExtractNamedExportsT (reason, (module_t_reason, exporttypes, is_strict), t) + )) + in + (* Copy own props *) + rec_flow + cx + trace + ( module_t, + ExportNamedT + ( reason, + false, + (* skip_dupes *) + Properties.extract_named_exports (Context.find_props cx props_tmap), + ExportValue, + t_out ) ) + (* + * InstanceT CommonJS export values have their properties turned into named + * exports + *) + | ( DefT (_, _, InstanceT (_, _, _, { own_props; proto_props; _ })), + CJSExtractNamedExportsT (reason, (module_t_reason, exporttypes, is_strict), t_out) ) -> + let module_t = ModuleT (module_t_reason, exporttypes, is_strict) in + let extract_named_exports id = + Context.find_props cx id + |> SMap.filter (fun x _ -> not (is_munged_prop_name cx x)) + |> Properties.extract_named_exports + in + (* Copy own props *) + let module_t = + Tvar.mk_where cx reason (fun t -> + rec_flow + cx + trace + ( module_t, + ExportNamedT + ( reason, + false, + (* skip_dupes *) + extract_named_exports own_props, + ExportValue, + t ) )) + in + (* Copy proto props *) + (* TODO: own props should take precedence *) + rec_flow + cx + trace + ( module_t, + ExportNamedT + ( reason, + false, + (* skip_dupes *) + extract_named_exports proto_props, + ExportValue, + t_out ) ) + (* If the module is exporting any or Object, then we allow any named + * import + *) + | (AnyT _, CJSExtractNamedExportsT (_, (module_t_reason, exporttypes, is_strict), t_out)) + -> + let module_t = + ModuleT (module_t_reason, { exporttypes with has_every_named_export = true }, is_strict) + in + rec_flow_t cx trace (module_t, t_out) + (* + * All other CommonJS export value types do not get merged into the named + * exports tmap in any special way. + *) + | (_, CJSExtractNamedExportsT (_, (module_t_reason, exporttypes, is_strict), t_out)) -> + let module_t = ModuleT (module_t_reason, exporttypes, is_strict) in + rec_flow_t cx trace (module_t, t_out) + (**************************************************************************) + (* Module imports *) + (* *) + (* The process of importing from a module consists of reading from the *) + (* foreign ModuleT type and generating a user-visible construct from it. *) + (* *) + (* For CommonJS imports (AKA 'require()'), if the foreign module is an ES *) + (* module we generate an object whose properties correspond to each of *) + (* the named exports of the foreign module. If the foreign module is also *) + (* a CommonJS module, use the type of the foreign CommonJS exports value *) + (* directly. *) + (* *) + (* For ES imports (AKA `import` statements), simply generate a model of *) + (* an ES ModuleNamespace object from the individual named exports of the *) + (* foreign module. This object can then be passed up to "userland" *) + (* directly (via `import * as`) or it can be used to extract individual *) + (* exports from the foreign module (via `import {}` and `import X from`). *) + (**************************************************************************) + + (* require('SomeModule') *) + | (ModuleT (_, exports, imported_is_strict), CJSRequireT (reason, t, is_strict)) -> + check_nonstrict_import cx trace is_strict imported_is_strict reason; + let cjs_exports = + match exports.cjs_export with + | Some t -> + (* reposition the export to point at the require(), like the object we create below for non-CommonJS exports *) - reposition ~trace cx (aloc_of_reason reason |> ALoc.to_loc) t - | None -> - (* convert ES module's named exports to an object *) - let proto = ObjProtoT reason in + reposition ~trace cx (aloc_of_reason reason) t + | None -> + (* convert ES module's named exports to an object *) + let proto = ObjProtoT reason in + let exports_tmap = Context.find_exports cx exports.exports_tmap in + let props = + SMap.map (fun (loc, t) -> Field (loc, t, Polarity.Positive)) exports_tmap + in + Obj_type.mk_with_proto cx reason ~sealed:true ~frozen:true ~props proto + in + rec_flow_t cx trace (cjs_exports, t) + (* import * as X from 'SomeModule'; *) + | (ModuleT (_, exports, imported_is_strict), ImportModuleNsT (reason, t, is_strict)) -> + check_nonstrict_import cx trace is_strict imported_is_strict reason; let exports_tmap = Context.find_exports cx exports.exports_tmap in - let props = SMap.map (fun (loc, t) -> Field (loc, t, Positive)) exports_tmap in - Obj_type.mk_with_proto cx reason - ~sealed:true ~frozen:true ~props proto - ) in - rec_flow_t cx trace (cjs_exports, t) - - (* import * as X from 'SomeModule'; *) - | (ModuleT(_, exports, imported_is_strict), ImportModuleNsT(reason, t, is_strict)) -> - check_nonstrict_import cx trace is_strict imported_is_strict reason; - let exports_tmap = Context.find_exports cx exports.exports_tmap in - let props = SMap.map (fun (loc, t) -> Field (loc, t, Positive)) exports_tmap in - let props = match exports.cjs_export with - | Some t -> - (* TODO this Field should probably have a location *) - let p = Field (None, t, Neutral) in - SMap.add "default" p props - | None -> props - in - let dict = if exports.has_every_named_export - then Some { - key = StrT.why reason; - value = AnyT.why reason; - dict_name = None; - dict_polarity = Neutral; - } - else None in - let proto = ObjProtoT reason in - let ns_obj = Obj_type.mk_with_proto cx reason - ~sealed:true ~frozen:true ?dict ~props proto - in - rec_flow_t cx trace (ns_obj, t) - - (* import [type] X from 'SomeModule'; *) - | ModuleT(module_reason, exports, imported_is_strict), - ImportDefaultT(reason, import_kind, (local_name, module_name), t, is_strict) -> - check_nonstrict_import cx trace is_strict imported_is_strict reason; - let export_t = match exports.cjs_export with - | Some t -> t - | None -> - let exports_tmap = Context.find_exports cx exports.exports_tmap in - match SMap.get "default" exports_tmap with + let props = SMap.map (fun (loc, t) -> Field (loc, t, Polarity.Positive)) exports_tmap in + let props = + match exports.cjs_export with + | Some t -> + (* TODO this Field should probably have a location *) + let p = Field (None, t, Polarity.Positive) in + SMap.add "default" p props + | None -> props + in + let dict = + if exports.has_every_named_export then + Some + { + key = StrT.why reason |> with_trust bogus_trust; + value = AnyT.untyped reason; + dict_name = None; + dict_polarity = Polarity.Neutral; + } + else + None + in + let proto = ObjProtoT reason in + let ns_obj = + Obj_type.mk_with_proto cx reason ~sealed:true ~frozen:true ?dict ~props proto + in + rec_flow_t cx trace (ns_obj, t) + (* import [type] X from 'SomeModule'; *) + | ( ModuleT (module_reason, exports, imported_is_strict), + ImportDefaultT (reason, import_kind, (local_name, module_name), t, is_strict) ) -> + check_nonstrict_import cx trace is_strict imported_is_strict reason; + let export_t = + match exports.cjs_export with + | Some t -> t + | None -> + let exports_tmap = Context.find_exports cx exports.exports_tmap in + (match SMap.get "default" exports_tmap with | Some (_, t) -> t | None -> - (** + (* * A common error while using `import` syntax is to forget or * misunderstand the difference between `import foo from ...` * and `import {foo} from ...`. The former means to import the @@ -2060,640 +1855,635 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = *) let known_exports = SMap.keys exports_tmap in let suggestion = typo_suggestion known_exports local_name in - add_output cx ~trace (FlowError.ENoDefaultExport - (reason, module_name, suggestion)); - AnyT.why module_reason - in - - let import_t = ( - match import_kind with - | ImportType -> - Tvar.mk_where cx reason (fun tvar -> - rec_flow cx trace (export_t, ImportTypeT(reason, "default", tvar)) - ) - | ImportTypeof -> - Tvar.mk_where cx reason (fun tvar -> - rec_flow cx trace (export_t, ImportTypeofT(reason, "default", tvar)) - ) - | ImportValue -> - rec_flow cx trace (export_t, AssertImportIsValueT(reason, "default")); - export_t - ) in - rec_flow_t cx trace (import_t, t) - - (* import {X} from 'SomeModule'; *) - | ModuleT(_, exports, imported_is_strict), - ImportNamedT(reason, import_kind, export_name, module_name, t, is_strict) -> - check_nonstrict_import cx trace is_strict imported_is_strict reason; - (** - * When importing from a CommonJS module, we shadow any potential named - * exports called "default" with a pointer to the raw `module.exports` - * object + add_output + cx + ~trace + (Error_message.ENoDefaultExport (reason, module_name, suggestion)); + AnyT.error module_reason) + in + let import_t = + match import_kind with + | ImportType -> + Tvar.mk_where cx reason (fun tvar -> + rec_flow cx trace (export_t, ImportTypeT (reason, "default", tvar))) + | ImportTypeof -> + Tvar.mk_where cx reason (fun tvar -> + rec_flow cx trace (export_t, ImportTypeofT (reason, "default", tvar))) + | ImportValue -> + rec_flow cx trace (export_t, AssertImportIsValueT (reason, "default")); + export_t + in + rec_flow_t cx trace (import_t, t) + (* import {X} from 'SomeModule'; *) + | ( ModuleT (_, exports, imported_is_strict), + ImportNamedT (reason, import_kind, export_name, module_name, t, is_strict) ) -> + check_nonstrict_import cx trace is_strict imported_is_strict reason; + + (* + * When importing from a CommonJS module, we shadow any potential named + * exports called "default" with a pointer to the raw `module.exports` + * object + *) + let exports_tmap = + let exports_tmap = Context.find_exports cx exports.exports_tmap in + (* Drop locations; they are not needed here *) + let exports_tmap = SMap.map snd exports_tmap in + match exports.cjs_export with + | Some t -> SMap.add "default" t exports_tmap + | None -> exports_tmap + in + let has_every_named_export = exports.has_every_named_export in + let import_t = + match (import_kind, SMap.get export_name exports_tmap) with + | (ImportType, Some t) -> + Tvar.mk_where cx reason (fun tvar -> + rec_flow cx trace (t, ImportTypeT (reason, export_name, tvar))) + | (ImportType, None) when has_every_named_export -> + let t = AnyT.untyped reason in + Tvar.mk_where cx reason (fun tvar -> + rec_flow cx trace (t, ImportTypeT (reason, export_name, tvar))) + | (ImportTypeof, Some t) -> + Tvar.mk_where cx reason (fun tvar -> + rec_flow cx trace (t, ImportTypeofT (reason, export_name, tvar))) + | (ImportTypeof, None) when has_every_named_export -> + let t = AnyT.untyped reason in + Tvar.mk_where cx reason (fun tvar -> + rec_flow cx trace (t, ImportTypeofT (reason, export_name, tvar))) + | (ImportValue, Some t) -> + rec_flow cx trace (t, AssertImportIsValueT (reason, export_name)); + t + | (ImportValue, None) when has_every_named_export -> + let t = AnyT.untyped reason in + rec_flow cx trace (t, AssertImportIsValueT (reason, export_name)); + t + | (_, None) -> + let num_exports = SMap.cardinal exports_tmap in + let has_default_export = SMap.get "default" exports_tmap <> None in + let msg = + if num_exports = 1 && has_default_export then + Error_message.EOnlyDefaultExport (reason, module_name, export_name) + else + let known_exports = SMap.keys exports_tmap in + let suggestion = typo_suggestion known_exports export_name in + Error_message.ENoNamedExport (reason, module_name, export_name, suggestion) + in + add_output cx ~trace msg; + AnyT.error reason + in + rec_flow_t cx trace (import_t, t) + | (AnyT (lreason, src), (CJSRequireT (reason, t, _) | ImportModuleNsT (reason, t, _))) -> + let () = + match desc_of_reason lreason with + (* Use a special reason so we can tell the difference between an any-typed import + * from an untyped module and an any-typed import from a nonexistent module. *) + | RUntypedModule module_name -> + let loc = Reason.aloc_of_reason reason in + let message = Error_message.EUntypedImport (loc, module_name) in + add_output cx ~trace message + | _ -> () + in + rec_flow_t cx trace (AnyT.why src reason, t) + | (AnyT (lreason, src), ImportDefaultT (reason, import_kind, _, t, _)) -> + let () = + match (import_kind, desc_of_reason lreason) with + (* Use a special reason so we can tell the difference between an any-typed type import + * from an untyped module and an any-typed import from a nonexistent module. *) + | ((ImportType | ImportTypeof), RUntypedModule module_name) -> + let loc = Reason.aloc_of_reason reason in + let message = Error_message.EUntypedTypeImport (loc, module_name) in + add_output cx ~trace message + | (ImportValue, RUntypedModule module_name) -> + let loc = Reason.aloc_of_reason reason in + let message = Error_message.EUntypedImport (loc, module_name) in + add_output cx ~trace message + | _ -> () + in + rec_flow_t cx trace (AnyT.why src reason, t) + | (AnyT (lreason, src), ImportNamedT (reason, import_kind, _, _, t, _)) -> + let () = + match (import_kind, desc_of_reason lreason) with + (* Use a special reason so we can tell the difference between an any-typed type import + * from an untyped module and an any-typed type import from a nonexistent module. *) + | ((ImportType | ImportTypeof), RUntypedModule module_name) -> + let loc = Reason.aloc_of_reason reason in + let message = Error_message.EUntypedTypeImport (loc, module_name) in + add_output cx ~trace message + | (ImportValue, RUntypedModule module_name) -> + let loc = Reason.aloc_of_reason reason in + let message = Error_message.EUntypedImport (loc, module_name) in + add_output cx ~trace message + | _ -> () + in + rec_flow_t cx trace (AnyT.why src reason, t) + | ( (DefT (_, _, PolyT (_, _, DefT (_, _, TypeT _), _)) | DefT (_, _, TypeT _)), + AssertImportIsValueT (reason, name) ) -> + add_output cx ~trace (Error_message.EImportTypeAsValue (reason, name)) + | (_, AssertImportIsValueT (_, _)) -> () + (*******************************) + (* common implicit conversions *) + (*******************************) + | (_, UseT (_, DefT (_, _, NumT _))) when numeric l -> () + | (_, UseT (_, AnyT _)) when function_like l -> () + | (AnyT _, GetPropT (_, _, Named (_, x), _)) + | (AnyT _, SetPropT (_, _, Named (_, x), _, _, _, _)) + | (AnyT _, LookupT (_, _, _, Named (_, x), _)) + | (AnyT _, MethodT (_, _, _, Named (_, x), _, _)) + when is_function_prototype x -> + () + | (AnyT _, UseT (_, u)) when function_like u -> () + | (AnyT _, UseT (_, u)) when object_like u -> () + | (_, UseT (_, AnyT _)) when object_like l -> () + | (AnyT _, UseT (_, u)) when object_like u -> () + (* + * Handling for the idx() custom function. + * + * idx(a, a => a.b.c) is a 2-arg function with semantics meant to simlify + * the process of extracting a property from a chain of maybe-typed property + * accesses. + * + * As an example, if you consider an object type such as: + * + * { + * me: ?{ + * firstName: string, + * lastName: string, + * friends: ?Array, + * } + * } + * + * The process of getting to the friends of my first friend (safely) looks + * something like this: + * + * let friendsOfFriend = obj.me && obj.me.friends && obj.me.friends[0] + * && obj.me.friends[0].friends; + * + * This is verbose to say the least. To simplify, we can define a function + * called idx() as: + * + * function idx(obj, callback) { + * try { return callback(obj); } catch (e) { + * if (isNullPropertyAccessError(e)) { + * return null; + * } else { + * throw e; + * } + * } + * } + * + * This function can then be used to safely dive into the aforementioned + * object tersely: + * + * let friendsOfFriend = idx(obj, obj => obj.me.friends[0].friends); + * + * If we assume these semantics, then we can model the type of this function + * by wrapping the `obj` parameter in a special signifying wrapper type that + * is only valid against use types associated with property accesses. Any + * time this specially wrapper type flows into a property access operation, + * we: + * + * 1) Strip away any potential MaybeT from the contained type + * 2) Forward the un-Maybe'd type on to the access operation + * 3) Wrap the result back in the special wrapper + * + * We can then flow this wrapped `obj` to a call on the callback function, + * remove the wrapper from the return type, and return that value wrapped in + * a MaybeT. + * + * ...of course having a `?.` operator in the language would be a nice + * reason to throw all of this clownerous hackery away... *) - let exports_tmap = ( - let exports_tmap = Context.find_exports cx exports.exports_tmap in - (* Drop locations; they are not needed here *) - let exports_tmap = SMap.map snd exports_tmap in - match exports.cjs_export with - | Some t -> SMap.add "default" t exports_tmap - | None -> exports_tmap - ) in - let has_every_named_export = exports.has_every_named_export in - let import_t = ( - match (import_kind, SMap.get export_name exports_tmap) with - | (ImportType, Some t) -> - Tvar.mk_where cx reason (fun tvar -> - rec_flow cx trace (t, ImportTypeT(reason, export_name, tvar)) - ) - | (ImportType, None) when has_every_named_export -> - let t = AnyT.why reason in - Tvar.mk_where cx reason (fun tvar -> - rec_flow cx trace (t, ImportTypeT(reason, export_name, tvar)) - ) - | (ImportTypeof, Some t) -> - Tvar.mk_where cx reason (fun tvar -> - rec_flow cx trace (t, ImportTypeofT(reason, export_name, tvar)) - ) - | (ImportTypeof, None) when has_every_named_export -> - let t = AnyT.why reason in - Tvar.mk_where cx reason (fun tvar -> - rec_flow cx trace (t, ImportTypeofT(reason, export_name, tvar)) - ) - | (ImportValue, Some t) -> - rec_flow cx trace (t, AssertImportIsValueT(reason, export_name)); - t - | (ImportValue, None) when has_every_named_export -> - let t = AnyT.why reason in - rec_flow cx trace (t, AssertImportIsValueT(reason, export_name)); - t - | (_, None) -> - let num_exports = SMap.cardinal exports_tmap in - let has_default_export = SMap.get "default" exports_tmap <> None in - - let msg = - if num_exports = 1 && has_default_export - then - FlowError.EOnlyDefaultExport (reason, module_name, export_name) - else - let known_exports = SMap.keys exports_tmap in - let suggestion = typo_suggestion known_exports export_name in - FlowError.ENoNamedExport (reason, module_name, export_name, suggestion) - in - add_output cx ~trace msg; - AnyT.why reason - ) in - rec_flow_t cx trace (import_t, t) - - (* imports are `any`-typed when they are from (1) unchecked modules or (2) - modules with `any`-typed exports *) - | DefT (_, AnyObjT), - ( CJSRequireT(reason, t, _) - | ImportModuleNsT(reason, t, _) - | ImportDefaultT(reason, _, _, t, _) - | ImportNamedT(reason, _, _, _, t, _) - ) -> - rec_flow_t cx trace (AnyT.why reason, t) - - | DefT (lreason, AnyT), (CJSRequireT(reason, t, _) | ImportModuleNsT(reason, t, _)) -> - let () = match desc_of_reason lreason with - (* Use a special reason so we can tell the difference between an any-typed import - * from an untyped module and an any-typed import from a nonexistent module. *) - | RUntypedModule module_name -> - let loc = Reason.aloc_of_reason reason in - let message = FlowError.EUntypedImport (loc |> ALoc.to_loc, module_name) in - add_output cx ~trace message - | _ -> () - in - rec_flow_t cx trace (AnyT.why reason, t) - - | DefT (lreason, AnyT), ImportDefaultT(reason, import_kind, _, t, _) -> - let () = match import_kind, desc_of_reason lreason with - (* Use a special reason so we can tell the difference between an any-typed type import - * from an untyped module and an any-typed import from a nonexistent module. *) - | (ImportType | ImportTypeof), RUntypedModule module_name -> - let loc = Reason.aloc_of_reason reason in - let message = FlowError.EUntypedTypeImport (loc |> ALoc.to_loc, module_name) in - add_output cx ~trace message - | ImportValue, RUntypedModule module_name -> - let loc = Reason.aloc_of_reason reason in - let message = FlowError.EUntypedImport (loc |> ALoc.to_loc, module_name) in - add_output cx ~trace message - | _ -> () - in - rec_flow_t cx trace (AnyT.why reason, t) - - | DefT (lreason, AnyT), ImportNamedT(reason, import_kind, _, _, t, _) -> - let () = match import_kind, desc_of_reason lreason with - (* Use a special reason so we can tell the difference between an any-typed type import - * from an untyped module and an any-typed type import from a nonexistent module. *) - | (ImportType | ImportTypeof), RUntypedModule module_name -> - let loc = Reason.aloc_of_reason reason in - let message = FlowError.EUntypedTypeImport (loc |> ALoc.to_loc, module_name) in - add_output cx ~trace message - | ImportValue, RUntypedModule module_name -> - let loc = Reason.aloc_of_reason reason in - let message = FlowError.EUntypedImport (loc |> ALoc.to_loc, module_name) in - add_output cx ~trace message - | _ -> () - in - rec_flow_t cx trace (AnyT.why reason, t) - - | (DefT (_, PolyT (_, DefT (_, TypeT _), _)) | DefT (_, TypeT _)), - AssertImportIsValueT(reason, name) -> - add_output cx ~trace (FlowError.EImportTypeAsValue (reason, name)) - - | (_, AssertImportIsValueT(_, _)) -> () - - (*******************************) - (* common implicit conversions *) - (*******************************) - - | (_, UseT (_, DefT (_, NumT _))) when numeric l -> () - - | (_, UseT (_, DefT (_, AnyObjT))) when object_like l -> () - | (DefT (_, AnyObjT), UseT (_, u)) when object_like u -> () - - | (_, UseT (_, DefT (_, AnyFunT))) when function_like l -> () - - | DefT (reason, AnyFunT), GetPropT (_, _, Named (_, x), _) - | DefT (reason, AnyFunT), SetPropT (_, _, Named (_, x), _, _, _) - | DefT (reason, AnyFunT), LookupT (_, _, _, Named (_, x), _) - | DefT (reason, AnyFunT), MethodT (_, _, _, Named (_, x), _, _) - when is_function_prototype x -> - rec_flow cx trace (FunProtoT reason, u) - | DefT (_, AnyFunT), UseT (_, u) when function_like u -> () - | DefT (_, AnyFunT), UseT (_, u) when object_like u -> () - | DefT (_, AnyFunT), UseT (_, DefT (_, AnyFunT)) -> () - - (** - * Handling for the idx() custom function. - * - * idx(a, a => a.b.c) is a 2-arg function with semantics meant to simlify - * the process of extracting a property from a chain of maybe-typed property - * accesses. - * - * As an example, if you consider an object type such as: - * - * { - * me: ?{ - * firstName: string, - * lastName: string, - * friends: ?Array, - * } - * } - * - * The process of getting to the friends of my first friend (safely) looks - * something like this: - * - * let friendsOfFriend = obj.me && obj.me.friends && obj.me.friends[0] - * && obj.me.friends[0].friends; - * - * This is verbose to say the least. To simplify, we can define a function - * called idx() as: - * - * function idx(obj, callback) { - * try { return callback(obj); } catch (e) { - * if (isNullPropertyAccessError(e)) { - * return null; - * } else { - * throw e; - * } - * } - * } - * - * This function can then be used to safely dive into the aforementioned - * object tersely: - * - * let friendsOfFriend = idx(obj, obj => obj.me.friends[0].friends); - * - * If we assume these semantics, then we can model the type of this function - * by wrapping the `obj` parameter in a special signifying wrapper type that - * is only valid against use types associated with property accesses. Any - * time this specially wrapper type flows into a property access operation, - * we: - * - * 1) Strip away any potential MaybeT from the contained type - * 2) Forward the un-Maybe'd type on to the access operation - * 3) Wrap the result back in the special wrapper - * - * We can then flow this wrapped `obj` to a call on the callback function, - * remove the wrapper from the return type, and return that value wrapped in - * a MaybeT. - * - * ...of course having a `?.` operator in the language would be a nice - * reason to throw all of this clownerous hackery away... - *) - | CustomFunT (lreason, Idx), - CallT (use_op, reason_op, { - call_this_t; - call_targs; - call_args_tlist; - call_tout; - call_closure_t; - call_strict_arity; - }) -> - let tout = match call_targs, call_args_tlist with - | None, (Arg obj)::(Arg cb)::[] -> - let wrapped_obj = DefT (reason_op, IdxWrapper obj) in - let callback_result = Tvar.mk_where cx reason_op (fun t -> - rec_flow cx trace (cb, CallT (use_op, reason_op, { - call_this_t; - call_targs = None; - call_args_tlist = [Arg wrapped_obj]; - call_tout = t; - call_closure_t; - call_strict_arity; - })) - ) in - let unwrapped_t = Tvar.mk_where cx reason_op (fun t -> - rec_flow cx trace (callback_result, IdxUnwrap(reason_op, t)) - ) in - let maybe_r = replace_reason (fun desc -> RMaybe desc) reason_op in - DefT (maybe_r, MaybeT unwrapped_t) - | None, (SpreadArg t1)::(SpreadArg t2)::_ -> - add_output cx ~trace FlowError.( - EUnsupportedSyntax (loc_of_t t1, SpreadArgument)); - add_output cx ~trace FlowError.( - EUnsupportedSyntax (loc_of_t t2, SpreadArgument)); - AnyT.why reason_op - | None, (SpreadArg t)::_ - | None, _::(SpreadArg t)::_ -> - let spread_loc = loc_of_t t in - add_output cx ~trace FlowError.( - EUnsupportedSyntax (spread_loc, SpreadArgument)); - AnyT.why reason_op - | Some _, _ -> - add_output cx ~trace FlowError.(ECallTypeArity { - call_loc = aloc_of_reason reason_op |> ALoc.to_loc; - is_new = false; - reason_arity = lreason; - expected_arity = 0; - }); - AnyT.why reason_op - | _ -> - (* Why is idx strict about arity? No other functions are. *) - add_output cx ~trace FlowError.(EIdxArity reason_op); - AnyT.why reason_op - in - rec_flow_t cx trace (tout, call_tout) - - (* Unwrap idx() callback param *) - | DefT (_, IdxWrapper obj), IdxUnwrap (_, t) -> rec_flow_t cx trace (obj, t) - | (_, IdxUnwrap (_, t)) -> rec_flow_t cx trace (l, t) - - (* De-maybe-ify an idx() property access *) - | (DefT (_, MaybeT inner_t), IdxUnMaybeifyT _) - | (DefT (_, OptionalT inner_t), IdxUnMaybeifyT _) - -> rec_flow cx trace (inner_t, u) - | DefT (_, NullT), IdxUnMaybeifyT _ -> () - | DefT (_, VoidT), IdxUnMaybeifyT _ -> () - | _, IdxUnMaybeifyT (_, t) when ( - match l with - | DefT (_, (UnionT _ | IntersectionT _)) -> false - | _ -> true - ) -> - rec_flow_t cx trace (l, t) - - (* The set of valid uses of an idx() callback parameter. In general this + | ( CustomFunT (lreason, Idx), + CallT + ( use_op, + reason_op, + { + call_this_t; + call_targs; + call_args_tlist; + call_tout; + call_closure_t; + call_strict_arity; + } ) ) -> + let tout = + match (call_targs, call_args_tlist) with + | (None, [Arg obj; Arg cb]) -> + let wrapped_obj = DefT (reason_op, bogus_trust (), IdxWrapper obj) in + let callback_result = + Tvar.mk_where cx reason_op (fun t -> + rec_flow + cx + trace + ( cb, + CallT + ( use_op, + reason_op, + { + call_this_t; + call_targs = None; + call_args_tlist = [Arg wrapped_obj]; + call_tout = t; + call_closure_t; + call_strict_arity; + } ) )) + in + let unwrapped_t = + Tvar.mk_where cx reason_op (fun t -> + rec_flow cx trace (callback_result, IdxUnwrap (reason_op, t))) + in + let maybe_r = update_desc_reason (fun desc -> RMaybe desc) reason_op in + MaybeT (maybe_r, unwrapped_t) + | (None, SpreadArg t1 :: SpreadArg t2 :: _) -> + add_output cx ~trace Error_message.(EUnsupportedSyntax (loc_of_t t1, SpreadArgument)); + add_output cx ~trace Error_message.(EUnsupportedSyntax (loc_of_t t2, SpreadArgument)); + AnyT.error reason_op + | (None, SpreadArg t :: _) + | (None, _ :: SpreadArg t :: _) -> + let spread_loc = loc_of_t t in + add_output cx ~trace Error_message.(EUnsupportedSyntax (spread_loc, SpreadArgument)); + AnyT.error reason_op + | (Some _, _) -> + add_output + cx + ~trace + Error_message.( + ECallTypeArity + { + call_loc = aloc_of_reason reason_op; + is_new = false; + reason_arity = lreason; + expected_arity = 0; + }); + AnyT.error reason_op + | _ -> + (* Why is idx strict about arity? No other functions are. *) + add_output cx ~trace Error_message.(EIdxArity reason_op); + AnyT.error reason_op + in + rec_flow_t cx trace (tout, call_tout) + (* Unwrap idx() callback param *) + | (DefT (_, _, IdxWrapper obj), IdxUnwrap (_, t)) -> rec_flow_t cx trace (obj, t) + | (_, IdxUnwrap (_, t)) -> rec_flow_t cx trace (l, t) + (* De-maybe-ify an idx() property access *) + | (MaybeT (_, inner_t), IdxUnMaybeifyT _) + | (OptionalT (_, inner_t), IdxUnMaybeifyT _) -> + rec_flow cx trace (inner_t, u) + | (DefT (_, _, NullT), IdxUnMaybeifyT _) -> () + | (DefT (_, _, VoidT), IdxUnMaybeifyT _) -> () + | (_, IdxUnMaybeifyT (_, t)) + when match l with + | UnionT _ + | IntersectionT _ -> + false + | _ -> true -> + rec_flow_t cx trace (l, t) + (* The set of valid uses of an idx() callback parameter. In general this should be limited to the various forms of property access operations. *) - | DefT (idx_reason, IdxWrapper obj), ReposLowerT (reason_op, use_desc, u) -> - let repositioned_obj = Tvar.mk_where cx reason_op (fun t -> - rec_flow cx trace (obj, ReposLowerT (reason_op, use_desc, UseT (unknown_use, t))) - ) in - rec_flow cx trace (DefT (idx_reason, IdxWrapper repositioned_obj), u) - - | DefT (idx_reason, IdxWrapper obj), GetPropT (use_op, reason_op, propname, t_out) -> - let de_maybed_obj = Tvar.mk_where cx idx_reason (fun t -> - rec_flow cx trace (obj, IdxUnMaybeifyT (idx_reason, t)) - ) in - let prop_type = Tvar.mk_where cx reason_op (fun t -> - rec_flow cx trace (de_maybed_obj, GetPropT (use_op, reason_op, propname, t)) - ) in - rec_flow_t cx trace (DefT (idx_reason, IdxWrapper prop_type), t_out) - - | DefT (idx_reason, IdxWrapper obj), - GetPrivatePropT (use_op, reason_op, name, class_bindings, static, t_out) -> - let de_maybed_obj = Tvar.mk_where cx idx_reason (fun t -> - rec_flow cx trace (obj, IdxUnMaybeifyT (idx_reason, t)) - ) in - let prop_type = Tvar.mk_where cx reason_op (fun t -> - rec_flow cx trace (de_maybed_obj, - GetPrivatePropT (use_op, reason_op, name, class_bindings, static, t)) - ) in - rec_flow_t cx trace (DefT (idx_reason, IdxWrapper prop_type), t_out) - - | DefT (idx_reason, IdxWrapper obj), GetElemT (use_op, reason_op, prop, t_out) -> - let de_maybed_obj = Tvar.mk_where cx idx_reason (fun t -> - rec_flow cx trace (obj, IdxUnMaybeifyT (idx_reason, t)) - ) in - let prop_type = Tvar.mk_where cx reason_op (fun t -> - rec_flow cx trace (de_maybed_obj, GetElemT (use_op, reason_op, prop, t)) - ) in - rec_flow_t cx trace (DefT (idx_reason, IdxWrapper prop_type), t_out) - - | DefT (reason, IdxWrapper _), UseT _ -> - add_output cx ~trace (FlowError.EIdxUse1 reason) - - | DefT (reason, IdxWrapper _), _ -> - add_output cx ~trace (FlowError.EIdxUse2 reason) - - (*********************) - (* type assert calls *) - (*********************) - - | CustomFunT (fun_reason, TypeAssertIs), - CallT (use_op, reason_op, call_type) - | CustomFunT (fun_reason, TypeAssertThrows), - CallT (use_op, reason_op, call_type) - | CustomFunT (fun_reason, TypeAssertWraps), - CallT (use_op, reason_op, call_type) -> - - let call_loc = aloc_of_reason reason_op in - let fun_loc = aloc_of_reason fun_reason in - let fun_reason_new = mk_reason RFunctionType (fun_loc |> ALoc.to_loc) in - - (* Add Flow errors for calls that attempt to assert types that cannot be + | (DefT (idx_reason, trust, IdxWrapper obj), ReposLowerT (reason_op, use_desc, u)) -> + let repositioned_obj = + Tvar.mk_where cx reason_op (fun t -> + rec_flow cx trace (obj, ReposLowerT (reason_op, use_desc, UseT (unknown_use, t)))) + in + rec_flow cx trace (DefT (idx_reason, trust, IdxWrapper repositioned_obj), u) + | (DefT (idx_reason, trust, IdxWrapper obj), GetPropT (use_op, reason_op, propname, t_out)) + -> + let de_maybed_obj = + Tvar.mk_where cx idx_reason (fun t -> + rec_flow cx trace (obj, IdxUnMaybeifyT (idx_reason, t))) + in + let prop_type = + Tvar.mk_where cx reason_op (fun t -> + rec_flow cx trace (de_maybed_obj, GetPropT (use_op, reason_op, propname, t))) + in + rec_flow_t cx trace (DefT (idx_reason, trust, IdxWrapper prop_type), t_out) + | ( DefT (idx_reason, trust, IdxWrapper obj), + GetPrivatePropT (use_op, reason_op, name, class_bindings, static, t_out) ) -> + let de_maybed_obj = + Tvar.mk_where cx idx_reason (fun t -> + rec_flow cx trace (obj, IdxUnMaybeifyT (idx_reason, t))) + in + let prop_type = + Tvar.mk_where cx reason_op (fun t -> + rec_flow + cx + trace + ( de_maybed_obj, + GetPrivatePropT (use_op, reason_op, name, class_bindings, static, t) )) + in + rec_flow_t cx trace (DefT (idx_reason, trust, IdxWrapper prop_type), t_out) + | (DefT (idx_reason, trust, IdxWrapper obj), GetElemT (use_op, reason_op, prop, t_out)) -> + let de_maybed_obj = + Tvar.mk_where cx idx_reason (fun t -> + rec_flow cx trace (obj, IdxUnMaybeifyT (idx_reason, t))) + in + let prop_type = + Tvar.mk_where cx reason_op (fun t -> + rec_flow cx trace (de_maybed_obj, GetElemT (use_op, reason_op, prop, t))) + in + rec_flow_t cx trace (DefT (idx_reason, trust, IdxWrapper prop_type), t_out) + | (DefT (reason, _, IdxWrapper _), UseT _) -> + add_output cx ~trace (Error_message.EIdxUse1 reason) + | (DefT (reason, _, IdxWrapper _), _) -> + add_output cx ~trace (Error_message.EIdxUse2 reason) + (*********************) + (* type assert calls *) + (*********************) + | (CustomFunT (fun_reason, TypeAssertIs), CallT (use_op, reason_op, call_type)) + | (CustomFunT (fun_reason, TypeAssertThrows), CallT (use_op, reason_op, call_type)) + | (CustomFunT (fun_reason, TypeAssertWraps), CallT (use_op, reason_op, call_type)) -> + let call_loc = aloc_of_reason reason_op in + let fun_loc = aloc_of_reason fun_reason in + let fun_reason_new = mk_reason RFunctionType fun_loc in + (* Add Flow errors for calls that attempt to assert types that cannot be checked at runtime. *) - let reason = mk_reason (RCustom "TypeAssert library function") (call_loc |> ALoc.to_loc) in - let return_t = begin match call_type.call_targs with - | None -> - add_output cx ~trace (FlowError.ETooFewTypeArgs (reason, reason, 1)); - AnyT.at (fun_loc |> ALoc.to_loc) - | Some [t] -> - let kind, return_t = begin match l with - | CustomFunT (_, TypeAssertIs) -> Context.Is, BoolT.at (fun_loc |> ALoc.to_loc) - | CustomFunT (_, TypeAssertThrows) -> Context.Throws, t - | CustomFunT (_, TypeAssertWraps) -> - (* For TypeAssertWraps, return type is Result *) - let mk_bool b = DefT (mk_reason (RBooleanLit b) (fun_loc |> ALoc.to_loc), SingletonBoolT b) in - let pmap_fail = - Properties.add_field "error" Neutral None (StrT.at (fun_loc |> ALoc.to_loc)) - (Properties.add_field "success" Neutral None (mk_bool false) SMap.empty) in - let pmap_succ = - Properties.add_field "value" Neutral None t - (Properties.add_field "success" Neutral None (mk_bool true) SMap.empty) in - let id_succ, id_fail = - Context.make_property_map cx pmap_fail, - Context.make_property_map cx pmap_succ in - let reason = mk_reason (RCustom "Result") (fun_loc |> ALoc.to_loc) in - let obj_fail, obj_succ = - mk_object_def_type ~reason ~dict:None ~call:None id_fail dummy_prototype, - mk_object_def_type ~reason ~dict:None ~call:None id_succ dummy_prototype in - Context.Wraps, - DefT (mk_reason RUnion (fun_loc |> ALoc.to_loc), UnionT (UnionRep.make obj_fail obj_succ [])) - | _ -> failwith "cannot reach this case" - end in - Context.add_type_assert cx (call_loc |> ALoc.to_loc) (kind, TypeUtil.loc_of_t t); return_t - | Some _ -> - add_output cx ~trace (FlowError.ETooManyTypeArgs (reason, reason, 1)); - AnyT.at (fun_loc |> ALoc.to_loc) - end in - - let funtype = DefT (fun_reason_new, FunT ( - dummy_static reason, - DefT (mk_reason RPrototype (fun_loc |> ALoc.to_loc), AnyT), - { - this_t = DefT (mk_reason RThis (fun_loc |> ALoc.to_loc), AnyT); - params = [(Some "value", MixedT.at (fun_loc |> ALoc.to_loc))]; - rest_param = None; - return_t = return_t; - is_predicate = false; - closure_t = 0; - changeset = Changeset.empty; - def_reason = fun_reason_new; - })) - in - rec_flow cx trace ( - funtype, CallT (use_op, reason_op, {call_type with call_targs = None}) - ) - - (*********************) - (* optional chaining *) - (*********************) - - | DefT (r, (NullT | VoidT)), OptionalChainT (r', lhs_reason, chain) -> - Context.mark_optional_chain cx (aloc_of_reason r' |> ALoc.to_loc) lhs_reason ~useful:true; - Nel.iter (fun (_, t_out) -> rec_flow_t cx trace (InternalT (OptionalChainVoidT r), t_out)) chain; - - | InternalT (OptionalChainVoidT _), OptionalChainT (r', lhs_reason, chain) -> - Context.mark_optional_chain cx (aloc_of_reason r' |> ALoc.to_loc) lhs_reason ~useful:false; - Nel.iter (fun (_, t_out) -> rec_flow_t cx trace (l, t_out)) chain; - - | _, OptionalChainT (r', lhs_reason, chain) when ( - match l with - | DefT (_, (MaybeT _ | OptionalT _ | UnionT _ | IntersectionT _)) -> false - | _ -> true - ) -> - Context.mark_optional_chain cx (aloc_of_reason r' |> ALoc.to_loc) lhs_reason ~useful:( - match l with - | DefT (_, (MixedT _ | AnyT | AnyObjT | AnyFunT)) -> true - | _ -> false - ); - let lhs_t = ref l in - Nel.iter (fun (opt_use, t_out) -> - let t_out' = Tvar.mk cx (reason_of_t t_out) in - rec_flow cx trace (!lhs_t, apply_opt_use opt_use t_out'); - rec_flow_t cx trace (t_out', t_out); - lhs_t := t_out'; - ) chain; - - | InternalT (OptionalChainVoidT r), u -> - rec_flow cx trace (DefT (r, VoidT), u); - - (*************) - (* invariant *) - (*************) - - | _, InvariantT r' -> Context.mark_invariant cx (aloc_of_reason r' |> ALoc.to_loc) (reason_of_t l) ~useful:( - match Type_filter.not_exists l with - | DefT (_, EmptyT) -> false - | _ -> true - ) - - (***************) - (* maybe types *) - (***************) - - (** The type maybe(T) is the same as null | undefined | UseT *) - - | DefT (r, (NullT | VoidT)), UseT (use_op, DefT (_, MaybeT tout)) -> - rec_flow cx trace (EmptyT.why r, UseT (use_op, tout)) - - | DefT (_, MaybeT _), ReposLowerT (reason_op, use_desc, u) -> - (* Don't split the maybe type into its constituent members. Instead, + let reason = mk_reason (RCustom "TypeAssert library function") call_loc in + let return_t = + match call_type.call_targs with + | None -> + add_output cx ~trace (Error_message.ETooFewTypeArgs (reason, reason, 1)); + AnyT.at AnyError fun_loc + | Some [ExplicitArg t] -> + let (kind, return_t) = + match l with + | CustomFunT (_, TypeAssertIs) -> + (Context.Is, BoolT.at fun_loc |> with_trust bogus_trust) + | CustomFunT (_, TypeAssertThrows) -> (Context.Throws, t) + | CustomFunT (_, TypeAssertWraps) -> + (* For TypeAssertWraps, return type is Result *) + let mk_bool b = + DefT (mk_reason (RBooleanLit b) fun_loc, bogus_trust (), SingletonBoolT b) + in + let pmap_fail = + SMap.empty + |> Properties.add_field "success" Polarity.Neutral None (mk_bool false) + |> Properties.add_field + "error" + Polarity.Neutral + None + (StrT.at fun_loc |> with_trust bogus_trust) + in + let pmap_succ = + SMap.empty + |> Properties.add_field "success" Polarity.Neutral None (mk_bool true) + |> Properties.add_field "value" Polarity.Neutral None t + in + let (id_succ, id_fail) = + ( Context.generate_property_map cx pmap_fail, + Context.generate_property_map cx pmap_succ ) + in + let reason = mk_reason (RCustom "Result") fun_loc in + let (obj_fail, obj_succ) = + ( mk_object_def_type ~reason ~dict:None ~call:None id_fail dummy_prototype, + mk_object_def_type ~reason ~dict:None ~call:None id_succ dummy_prototype ) + in + ( Context.Wraps, + UnionT (mk_reason RUnion fun_loc, UnionRep.make obj_fail obj_succ []) ) + | _ -> failwith "cannot reach this case" + in + Context.add_type_assert cx call_loc (kind, TypeUtil.loc_of_t t); + return_t + | Some _ -> + add_output cx ~trace (Error_message.ETooManyTypeArgs (reason, reason, 1)); + AnyT.at AnyError fun_loc + in + let funtype = + DefT + ( fun_reason_new, + bogus_trust (), + FunT + ( dummy_static reason, + mk_reason RPrototype fun_loc |> Unsoundness.function_proto_any, + { + this_t = mk_reason RThis fun_loc |> MixedT.make |> with_trust bogus_trust; + params = [(Some "value", MixedT.at fun_loc |> with_trust bogus_trust)]; + rest_param = None; + return_t; + is_predicate = false; + closure_t = 0; + changeset = Changeset.empty; + def_reason = fun_reason_new; + } ) ) + in + rec_flow + cx + trace + (funtype, CallT (use_op, reason_op, { call_type with call_targs = None })) + (*********************) + (* optional chaining *) + (*********************) + | (DefT (r, _, (NullT | VoidT)), OptionalChainT (r', lhs_reason, chain)) -> + Context.mark_optional_chain cx (aloc_of_reason r') lhs_reason ~useful:true; + Nel.iter + (fun (_, t_out) -> rec_flow_t cx trace (InternalT (OptionalChainVoidT r), t_out)) + chain + | (InternalT (OptionalChainVoidT _), OptionalChainT (r', lhs_reason, chain)) -> + Context.mark_optional_chain cx (aloc_of_reason r') lhs_reason ~useful:false; + Nel.iter (fun (_, t_out) -> rec_flow_t cx trace (l, t_out)) chain + | (_, OptionalChainT (r', lhs_reason, chain)) + when match l with + | MaybeT _ + | OptionalT _ + | UnionT _ + | IntersectionT _ -> + false + | _ -> true -> + Context.mark_optional_chain + cx + (aloc_of_reason r') + lhs_reason + ~useful: + (match l with + | DefT (_, _, MixedT _) + | AnyT _ -> + true + | _ -> false); + let lhs_t = ref l in + Nel.iter + (fun (opt_use, t_out) -> + let t_out' = Tvar.mk cx (reason_of_t t_out) in + rec_flow cx trace (!lhs_t, apply_opt_use opt_use t_out'); + rec_flow_t cx trace (t_out', t_out); + lhs_t := t_out') + chain + | (InternalT (OptionalChainVoidT r), u) -> + rec_flow cx trace (DefT (r, bogus_trust (), VoidT), u) + (*************) + (* invariant *) + (*************) + | (_, InvariantT r') -> + Context.mark_invariant + cx + (aloc_of_reason r') + (reason_of_t l) + ~useful: + (match Type_filter.not_exists l with + | DefT (_, _, EmptyT Bottom) -> false + | _ -> true) + (***************) + (* maybe types *) + (***************) + + (* The type maybe(T) is the same as null | undefined | UseT *) + | (DefT (r, trust, MixedT Mixed_everything), UseT (use_op, MaybeT (_, tout))) -> + rec_flow cx trace (DefT (r, trust, MixedT Mixed_non_maybe), UseT (use_op, tout)) + | (DefT (r, trust, (NullT | VoidT)), UseT (use_op, MaybeT (_, tout))) -> + rec_flow cx trace (EmptyT.why r trust, UseT (use_op, tout)) + | (MaybeT _, ReposLowerT (reason_op, use_desc, u)) -> + (* Don't split the maybe type into its constituent members. Instead, reposition the entire maybe type. *) - let loc = aloc_of_reason reason_op in - let desc = if use_desc then Some (desc_of_reason reason_op) else None in - rec_flow cx trace (reposition cx ~trace (loc |> ALoc.to_loc) ?desc l, u) - - | DefT (_, MaybeT t), ObjAssignFromT (_, _, _, ObjAssign _) -> - (* This isn't correct, but matches the existing incorrectness of spreads - * today. In particular, spreading `null` and `void` become {}. The wrong - * part is that spreads should distribute through unions, so `{...?T}` - * should be `{...null}|{...void}|{...T}`, which simplifies to `{}`. *) - rec_flow cx trace (t, u) - - | DefT (_, MaybeT t), UseT (_, DefT (_, MaybeT _)) -> - rec_flow cx trace (t, u) - - | (DefT (reason, MaybeT t), _) -> - let reason = replace_reason_const ~keep_def_loc:true RNullOrVoid reason in - rec_flow cx trace (NullT.make reason, u); - rec_flow cx trace (VoidT.make reason, u); - rec_flow cx trace (t, u) - - (******************) - (* optional types *) - (******************) - - (** The type optional(T) is the same as undefined | UseT *) - - | DefT (r, VoidT), UseT (use_op, DefT (_, OptionalT tout)) -> - rec_flow cx trace (EmptyT.why r, UseT (use_op, tout)) - - | DefT (_, OptionalT _), ReposLowerT (reason, use_desc, u) -> - (* Don't split the optional type into its constituent members. Instead, + let loc = aloc_of_reason reason_op in + let desc = + if use_desc then + Some (desc_of_reason reason_op) + else + None + in + rec_flow cx trace (reposition cx ~trace loc ?desc l, u) + | (MaybeT (r, t), DestructuringT (reason, DestructAnnot, s, tout)) -> + let f t = + AnnotT + ( reason, + Tvar.mk_where cx reason (fun tvar -> + rec_flow cx trace (t, DestructuringT (reason, DestructAnnot, s, tvar))), + false ) + in + let void_t = VoidT.why r |> with_trust bogus_trust in + let null_t = NullT.why r |> with_trust bogus_trust in + let rep = UnionRep.make (f void_t) (f null_t) [f t] in + rec_unify cx trace ~use_op:unknown_use (UnionT (reason, rep)) tout + | (MaybeT (_, t), ObjAssignFromT (_, _, _, _, ObjAssign _)) -> + (* This isn't correct, but matches the existing incorrectness of spreads + * today. In particular, spreading `null` and `void` become {}. The wrong + * part is that spreads should distribute through unions, so `{...?T}` + * should be `{...null}|{...void}|{...T}`, which simplifies to `{}`. *) + rec_flow cx trace (t, u) + | (MaybeT (_, t), UseT (_, MaybeT _)) -> rec_flow cx trace (t, u) + | (MaybeT (reason, t), _) -> + let reason = replace_desc_reason RNullOrVoid reason in + rec_flow cx trace (NullT.make reason |> with_trust Trust.bogus_trust, u); + rec_flow cx trace (VoidT.make reason |> with_trust Trust.bogus_trust, u); + rec_flow cx trace (t, u) + (******************) + (* optional types *) + (******************) + + (* The type optional(T) is the same as undefined | UseT *) + | (DefT (r, trust, VoidT), UseT (use_op, OptionalT (_, tout))) -> + rec_flow cx trace (EmptyT.why r trust, UseT (use_op, tout)) + | (OptionalT _, ReposLowerT (reason, use_desc, u)) -> + (* Don't split the optional type into its constituent members. Instead, reposition the entire optional type. *) - rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) - - | DefT (_, OptionalT t), ObjAssignFromT (_, _, _, ObjAssign _) -> - (* This isn't correct, but matches the existing incorrectness of spreads - * today. In particular, spreading `null` and `void` become {}. The wrong - * part is that spreads should distribute through unions, so `{...?T}` - * should be `{...null}|{...void}|{...T}`, which simplifies to `{}`. *) - rec_flow cx trace (t, u) - - | DefT (_, OptionalT t), UseT (_, DefT (_, OptionalT _)) - | DefT (_, OptionalT t), UseT (_, DefT (_, MaybeT _)) -> - rec_flow cx trace (t, u) - - | DefT (r, OptionalT t), _ -> - rec_flow cx trace (VoidT.why r, u); - rec_flow cx trace (t, u) - - (*****************) - (* logical types *) - (*****************) - - | DefT (_, AnyT), NotT (reason, tout) -> - rec_flow_t cx trace (AnyT.why reason, tout) - - (* !x when x is of unknown truthiness *) - | DefT (_, BoolT None), NotT (reason, tout) - | DefT (_, StrT AnyLiteral), NotT (reason, tout) - | DefT (_, NumT AnyLiteral), NotT (reason, tout) -> - rec_flow_t cx trace (BoolT.at (aloc_of_reason reason |> ALoc.to_loc), tout) - - (* !x when x is falsy *) - | DefT (_, BoolT (Some false)), NotT (reason, tout) - | DefT (_, SingletonBoolT false), NotT (reason, tout) - | DefT (_, StrT (Literal (_, ""))), NotT (reason, tout) - | DefT (_, SingletonStrT ""), NotT (reason, tout) - | DefT (_, NumT (Literal (_, (0., _)))), NotT (reason, tout) - | DefT (_, SingletonNumT (0., _)), NotT (reason, tout) - | DefT (_, NullT), NotT (reason, tout) - | DefT (_, VoidT), NotT (reason, tout) -> - let reason = replace_reason_const (RBooleanLit true) reason in - rec_flow_t cx trace (DefT (reason, BoolT (Some true)), tout) - - (* !x when x is truthy *) - | (_, NotT(reason, tout)) -> - let reason = replace_reason_const (RBooleanLit false) reason in - rec_flow_t cx trace (DefT (reason, BoolT (Some false)), tout) - - | (left, AndT(_, right, u)) -> - begin match left with - | DefT (reason, NumT _) -> - add_output cx ~trace (FlowError.ESketchyNumberLint (Lints.SketchyNumberAnd, reason)) - | _ -> () - end; - (* a falsy && b ~> a + rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) + | (OptionalT (r, t), DestructuringT (reason, DestructAnnot, s, tout)) -> + let f t = + AnnotT + ( reason, + Tvar.mk_where cx reason (fun tvar -> + rec_flow cx trace (t, DestructuringT (reason, DestructAnnot, s, tvar))), + false ) + in + let void_t = VoidT.why r |> with_trust bogus_trust in + let rep = UnionRep.make (f void_t) (f t) [] in + rec_unify cx trace ~use_op:unknown_use (UnionT (reason, rep)) tout + | (OptionalT (_, t), ObjAssignFromT (_, _, _, _, ObjAssign _)) -> + (* This isn't correct, but matches the existing incorrectness of spreads + * today. In particular, spreading `null` and `void` become {}. The wrong + * part is that spreads should distribute through unions, so `{...?T}` + * should be `{...null}|{...void}|{...T}`, which simplifies to `{}`. *) + rec_flow cx trace (t, u) + | (OptionalT (_, t), UseT (_, OptionalT _)) + | (OptionalT (_, t), UseT (_, MaybeT _)) -> + rec_flow cx trace (t, u) + | (OptionalT (r, t), _) -> + rec_flow cx trace (VoidT.why r |> with_trust Trust.bogus_trust, u); + rec_flow cx trace (t, u) + (*****************) + (* logical types *) + (*****************) + + (* !x when x is of unknown truthiness *) + | (DefT (_, trust, BoolT None), NotT (reason, tout)) + | (DefT (_, trust, StrT AnyLiteral), NotT (reason, tout)) + | (DefT (_, trust, NumT AnyLiteral), NotT (reason, tout)) -> + rec_flow_t cx trace (BoolT.at (aloc_of_reason reason) trust, tout) + (* !x when x is falsy *) + | (DefT (_, trust, BoolT (Some false)), NotT (reason, tout)) + | (DefT (_, trust, SingletonBoolT false), NotT (reason, tout)) + | (DefT (_, trust, StrT (Literal (_, ""))), NotT (reason, tout)) + | (DefT (_, trust, SingletonStrT ""), NotT (reason, tout)) + | (DefT (_, trust, NumT (Literal (_, (0., _)))), NotT (reason, tout)) + | (DefT (_, trust, SingletonNumT (0., _)), NotT (reason, tout)) + | (DefT (_, trust, NullT), NotT (reason, tout)) + | (DefT (_, trust, VoidT), NotT (reason, tout)) -> + let reason = replace_desc_reason (RBooleanLit true) reason in + rec_flow_t cx trace (DefT (reason, trust, BoolT (Some true)), tout) + (* !x when x is truthy *) + | (_, NotT (reason, tout)) -> + let reason = replace_desc_reason (RBooleanLit false) reason in + rec_flow_t cx trace (DefT (reason, bogus_trust (), BoolT (Some false)), tout) + | (left, AndT (_, right, u)) -> + begin + match left with + | DefT (reason, _, NumT _) -> + add_output + cx + ~trace + (Error_message.ESketchyNumberLint (Lints.SketchyNumberAnd, reason)) + | _ -> () + end; + + (* a falsy && b ~> a a truthy && b ~> b a && b ~> a falsy | b *) - (match Type_filter.exists left with - | DefT (_, EmptyT) -> (* falsy *) - rec_flow cx trace (left, PredicateT (NotP (ExistsP None), u)) - | _ -> - (match Type_filter.not_exists left with - | DefT (_, EmptyT) -> (* truthy *) - rec_flow cx trace (right, UseT (unknown_use, u)) - | _ -> - rec_flow cx trace (left, PredicateT (NotP (ExistsP None), u)); - rec_flow cx trace (right, UseT (unknown_use, u)) - ) - ) - - | (left, OrT(_, right, u)) -> - (* a truthy || b ~> a + (match Type_filter.exists left with + | DefT (_, _, EmptyT Bottom) -> + (* falsy *) + rec_flow cx trace (left, PredicateT (NotP (ExistsP None), u)) + | _ -> + (match Type_filter.not_exists left with + | DefT (_, _, EmptyT Bottom) -> + (* truthy *) + rec_flow cx trace (right, UseT (unknown_use, u)) + | _ -> + rec_flow cx trace (left, PredicateT (NotP (ExistsP None), u)); + rec_flow cx trace (right, UseT (unknown_use, u)))) + | (left, OrT (_, right, u)) -> + (* a truthy || b ~> a a falsy || b ~> b a || b ~> a truthy | b *) - (match Type_filter.not_exists left with - | DefT (_, EmptyT) -> (* truthy *) - rec_flow cx trace (left, PredicateT (ExistsP None, u)) - | _ -> - (match Type_filter.exists left with - | DefT (_, EmptyT) -> (* falsy *) - rec_flow cx trace (right, UseT (unknown_use, u)) - | _ -> - rec_flow cx trace (left, PredicateT (ExistsP None, u)); - rec_flow cx trace (right, UseT (unknown_use, u)) - ) - ) - - | (left, NullishCoalesceT(_, right, u)) when ( - match left with - | DefT (_, ( - OptionalT _ - | MaybeT _ - | UnionT _ - | IntersectionT _ - )) -> false - | _ -> true - ) -> - begin match left with - | DefT (_, ( - NullT - | VoidT - )) -> rec_flow_t cx trace (right, u) - | _ -> rec_flow_t cx trace (left, u) - end - - (*****************************) - (* upper and lower any types *) - (*****************************) - - (** AnyWithLowerBoundT and AnyWithUpperBoundT are mildly useful types that - model subtyping constraints without introducing potentially unwanted - effects: they can appear on both sides of a type, but only constrain one - of those sides. In some sense, they are liked bounded AnyT: indeed, AnyT - has the same behavior as AnyWithLowerBound (EmptyT) and - AnyWithUpperBoundT (MixedT). Thus, these types can be used instead of - AnyT when some precise typechecking is required without overconstraining - the system. A completely static alternative would be achieved with - bounded type variables, which Flow does not support yet. **) - - | AnyWithLowerBoundT t, _ -> - rec_flow cx trace (t, u) - - | _, UseT (use_op, AnyWithLowerBoundT t) -> - rec_flow cx trace (l, UseT (use_op, MixedT.why (reason_of_t t))) - - | AnyWithUpperBoundT t, _ -> - rec_flow cx trace (EmptyT.why (reason_of_t t), u) - - | _, UseT (_, AnyWithUpperBoundT t) -> - rec_flow_t cx trace (l, t) - - - | _, ReactKitT (use_op, reason_op, React.CreateElement0 (clone, config, children, tout)) -> - let tool = React.CreateElement (clone, l, config, children, tout) in - rec_flow cx trace (l, ReactKitT (use_op, reason_op, tool)) - - (*********************) - (* type applications *) - (*********************) - - (* Sometimes a polymorphic class may have a polymorphic method whose return + (match Type_filter.not_exists left with + | DefT (_, _, EmptyT Bottom) -> + (* truthy *) + rec_flow cx trace (left, PredicateT (ExistsP None, u)) + | _ -> + (match Type_filter.exists left with + | DefT (_, _, EmptyT Bottom) -> + (* falsy *) + rec_flow cx trace (right, UseT (unknown_use, u)) + | _ -> + rec_flow cx trace (left, PredicateT (ExistsP None, u)); + rec_flow cx trace (right, UseT (unknown_use, u)))) + | (left, NullishCoalesceT (_, right, u)) + when match left with + | OptionalT _ + | MaybeT _ + | UnionT _ + | IntersectionT _ -> + false + | _ -> true -> + begin + match left with + | DefT (_, _, (NullT | VoidT)) -> rec_flow_t cx trace (right, u) + | _ -> rec_flow_t cx trace (left, u) + end + | (_, ReactKitT (use_op, reason_op, React.CreateElement0 (clone, config, children, tout))) + -> + let tool = React.CreateElement (clone, l, config, children, tout) in + rec_flow cx trace (l, ReactKitT (use_op, reason_op, tool)) + (*********************) + (* type applications *) + (*********************) + + (* Sometimes a polymorphic class may have a polymorphic method whose return type is a type application on the same polymorphic class, possibly expanded. See Array#map or Array#concat, e.g. It is not unusual for programmers to reuse variables, assigning the result of a method call on @@ -2711,139 +2501,158 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = Also worth noting is that we can never safely cache def types. This is because substitution of type parameters in def types does not affect their reasons, so we'd trivially lose precision. *) - - | (ThisTypeAppT(reason_tapp,c,this,ts), _) -> - let reason_op = reason_of_use_t u in - let tc = specialize_class cx trace ~reason_op ~reason_tapp c ts in - instantiate_this_class cx trace reason_tapp tc this (Upper u) - - | (_, UseT (use_op, ThisTypeAppT(reason_tapp,c,this,ts))) -> - let reason_op = reason_of_t l in - let tc = specialize_class cx trace ~reason_op ~reason_tapp c ts in - instantiate_this_class cx trace reason_tapp tc this (Lower (use_op, l)) - - | DefT (_, TypeAppT _), ReposLowerT (reason, use_desc, u) -> - rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) - - | DefT (reason_tapp, TypeAppT(use_op, c, ts)), MethodT (_, _, _, _, _, _) -> - let reason_op = reason_of_use_t u in - let t = mk_typeapp_instance cx - ~trace ~use_op ~reason_op ~reason_tapp ~cache:[] c ts in - rec_flow cx trace (t, u) - - (* If we have a TypeAppT (c, ts) ~> TypeAppT (c, ts) then we want to - * concretize both cs to PolyTs so that we may referentially compare them. - * We cannot compare the non-concretized versions since they may have been - * reposition, they may be two OpenTs from different locations, or any other - * way you can access the same PolyT via different means that results in a - * different c being passed to TypeAppT. - * - * We use the ConcretizeTypeAppsT use type to concretize both the c of our - * upper and lower TypeAppT bound. We start by concretizing the upper bound - * which we signal by setting the final element in ConcretizeTypeAppsT to - * true. *) - | DefT (r1, TypeAppT (op1, c1, ts1)), - UseT (use_op, DefT (r2, TypeAppT (op2, c2, ts2))) -> - if TypeAppExpansion.push_unless_loop cx (c1, ts1) then ( - if TypeAppExpansion.push_unless_loop cx (c2, ts2) then ( - rec_flow cx trace (c2, ConcretizeTypeAppsT - (use_op, (ts2, op2, r2), (c1, ts1, op1, r1), true)); - TypeAppExpansion.pop (); - ); - TypeAppExpansion.pop (); - ); - - - (* When we have concretized the c for our upper bound TypeAppT then we want - * to concretize the lower bound. We flip all our arguments to - * ConcretizeTypeAppsT and set the final element to false to signal that we - * have concretized the upper bound's c. - * - * If the upper bound's c is not a PolyT then we will fall down to an - * incompatible use error. *) - | DefT (_, PolyT _) as c2, - ConcretizeTypeAppsT (use_op, (ts2, op2, r2), (c1, ts1, op1, r1), true) -> - rec_flow cx trace (c1, ConcretizeTypeAppsT - (use_op, (ts1, op1, r1), (c2, ts2, op2, r2), false)) - - (* When we have concretized the c for our lower bound TypeAppT then we can - * finally run our TypeAppT ~> TypeAppT logic. If we have referentially the - * same PolyT for each TypeAppT then we want to check the type arguments - * only. (Checked in the when condition.) If we do not have the same PolyT - * for each TypeAppT then we want to expand our TypeAppTs and compare the - * expanded results. - * - * If the lower bound's c is not a PolyT then we will fall down to an - * incompatible use error. - * - * The upper bound's c should always be a PolyT here since we could not have - * made it here if it was not given the logic of our earlier case. *) - | DefT (_, PolyT (_, _, id1)), - ConcretizeTypeAppsT (use_op, (ts1, _, r1), (DefT (_, PolyT (_, _, id2)), ts2, _, r2), false) - when id1 = id2 && List.length ts1 = List.length ts2 -> - let targs = List.map2 (fun t1 t2 -> (t1, t2)) ts1 ts2 in - rec_flow cx trace (l, - TypeAppVarianceCheckT (use_op, r1, r2, targs)) - - (* This is the case which implements the expansion for our - * TypeAppT (c, ts) ~> TypeAppT (c, ts) when the cs are unequal. *) - | DefT (_, PolyT (xs1, t1, id1)), - ConcretizeTypeAppsT (use_op, (ts1, op1, r1), (DefT (_, PolyT (xs2, t2, id2)), ts2, op2, r2), false) -> - let t1 = mk_typeapp_instance_of_poly cx trace ~use_op:op2 ~reason_op:r2 ~reason_tapp:r1 - id1 xs1 t1 ts1 in - let t2 = mk_typeapp_instance_of_poly cx trace ~use_op:op1 ~reason_op:r1 ~reason_tapp:r2 - id2 xs2 t2 ts2 in - rec_flow cx trace (t1, UseT (use_op, t2)) - - | DefT (reason_tapp, TypeAppT (use_op, c, ts)), _ -> - if TypeAppExpansion.push_unless_loop cx (c, ts) then ( - let reason_op = reason_of_use_t u in - let t = mk_typeapp_instance cx ~trace ~use_op ~reason_op ~reason_tapp c ts in - rec_flow cx trace (t, u); - TypeAppExpansion.pop (); - ); - - | _, UseT (use_op, DefT (reason_tapp, TypeAppT (use_op_tapp, c, ts))) -> - if TypeAppExpansion.push_unless_loop cx (c, ts) then ( - let reason_op = reason_of_t l in - let t = mk_typeapp_instance cx ~trace ~use_op:use_op_tapp ~reason_op ~reason_tapp c ts in - rec_flow cx trace (l, UseT (use_op, t)); - TypeAppExpansion.pop (); - ); - - (**********************) - (* opaque types *) - (**********************) - - (* If the ids are equal, we use flow_type_args to make sure that the type arguments of each - * are compatible with each other. If there are no type args, this doesn't do anything *) - | OpaqueT (lreason, {opaque_id = id1; opaque_type_args = ltargs; _}), - UseT (use_op, OpaqueT (ureason, {opaque_id = id2; opaque_type_args = utargs; _})) when id1 = id2 -> - flow_type_args cx trace ~use_op lreason ureason ltargs utargs - - (* Repositioning should happen before opaque types are considered so that we can - * have the "most recent" location when we do look at the opaque type *) - | OpaqueT _, ReposLowerT (reason, use_desc, u) -> - rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) - - (* If the type is still in the same file it was defined, we allow it to - * expose its underlying type information *) - | OpaqueT (r, {underlying_t = Some t; _}), _ - when Loc.source (aloc_of_reason r |> ALoc.to_loc) = Loc.source (def_loc_of_reason r) -> - rec_flow cx trace (t, u) - - (* If the lower bound is in the same file as where the opaque type was defined, - * we expose the underlying type information *) - | _, UseT (use_op, OpaqueT (r, {underlying_t = Some t; _})) - when Loc.source (aloc_of_reason (reason_of_t l) |> ALoc.to_loc) = - Loc.source (def_loc_of_reason r) -> - rec_flow cx trace (l, UseT (use_op, t)) - - (*****************************************************************) - (* Intersection type preprocessing for certain object predicates *) - (*****************************************************************) - - (* Predicate refinements on intersections of object types need careful + | (ThisTypeAppT (reason_tapp, c, this, ts), _) -> + let reason_op = reason_of_use_t u in + let tc = specialize_class cx trace ~reason_op ~reason_tapp c ts in + instantiate_this_class cx trace reason_tapp tc this (Upper u) + | (_, UseT (use_op, ThisTypeAppT (reason_tapp, c, this, ts))) -> + let reason_op = reason_of_t l in + let tc = specialize_class cx trace ~reason_op ~reason_tapp c ts in + instantiate_this_class cx trace reason_tapp tc this (Lower (use_op, l)) + | (TypeAppT _, ReposLowerT (reason, use_desc, u)) -> + rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) + | (TypeAppT (reason_tapp, use_op, c, ts), MethodT (_, _, _, _, _, _)) -> + let reason_op = reason_of_use_t u in + let t = mk_typeapp_instance cx ~trace ~use_op ~reason_op ~reason_tapp ~cache:[] c ts in + rec_flow cx trace (t, u) + (* If we have a TypeAppT (c, ts) ~> TypeAppT (c, ts) then we want to + * concretize both cs to PolyTs so that we may referentially compare them. + * We cannot compare the non-concretized versions since they may have been + * reposition, they may be two OpenTs from different locations, or any other + * way you can access the same PolyT via different means that results in a + * different c being passed to TypeAppT. + * + * We use the ConcretizeTypeAppsT use type to concretize both the c of our + * upper and lower TypeAppT bound. We start by concretizing the upper bound + * which we signal by setting the final element in ConcretizeTypeAppsT to + * true. *) + | (TypeAppT (r1, op1, c1, ts1), UseT (use_op, TypeAppT (r2, op2, c2, ts2))) -> + if TypeAppExpansion.push_unless_loop cx (c1, ts1) then ( + if TypeAppExpansion.push_unless_loop cx (c2, ts2) then ( + rec_flow + cx + trace + (c2, ConcretizeTypeAppsT (use_op, (ts2, op2, r2), (c1, ts1, op1, r1), true)); + TypeAppExpansion.pop () + ); + TypeAppExpansion.pop () + ) + (* When we have concretized the c for our upper bound TypeAppT then we want + * to concretize the lower bound. We flip all our arguments to + * ConcretizeTypeAppsT and set the final element to false to signal that we + * have concretized the upper bound's c. + * + * If the upper bound's c is not a PolyT then we will fall down to an + * incompatible use error. *) + | ( (DefT (_, _, PolyT _) as c2), + ConcretizeTypeAppsT (use_op, (ts2, op2, r2), (c1, ts1, op1, r1), true) ) -> + rec_flow + cx + trace + (c1, ConcretizeTypeAppsT (use_op, (ts1, op1, r1), (c2, ts2, op2, r2), false)) + (* When we have concretized the c for our lower bound TypeAppT then we can + * finally run our TypeAppT ~> TypeAppT logic. If we have referentially the + * same PolyT for each TypeAppT then we want to check the type arguments + * only. (Checked in the when condition.) If we do not have the same PolyT + * for each TypeAppT then we want to expand our TypeAppTs and compare the + * expanded results. + * + * If the lower bound's c is not a PolyT then we will fall down to an + * incompatible use error. + * + * The upper bound's c should always be a PolyT here since we could not have + * made it here if it was not given the logic of our earlier case. *) + | ( DefT (_, _, PolyT (_, _, _, id1)), + ConcretizeTypeAppsT + (use_op, (ts1, _, r1), (DefT (_, _, PolyT (_, _, _, id2)), ts2, _, r2), false) ) + when id1 = id2 && List.length ts1 = List.length ts2 -> + let targs = List.map2 (fun t1 t2 -> (t1, t2)) ts1 ts2 in + rec_flow cx trace (l, TypeAppVarianceCheckT (use_op, r1, r2, targs)) + (* This is the case which implements the expansion for our + * TypeAppT (c, ts) ~> TypeAppT (c, ts) when the cs are unequal. *) + | ( DefT (_, _, PolyT (tparams_loc1, xs1, t1, id1)), + ConcretizeTypeAppsT + ( use_op, + (ts1, op1, r1), + (DefT (_, _, PolyT (tparams_loc2, xs2, t2, id2)), ts2, op2, r2), + false ) ) -> + let (op1, op2) = + match root_of_use_op use_op with + | UnknownUse -> (op1, op2) + | _ -> (use_op, use_op) + in + let t1 = + mk_typeapp_instance_of_poly + cx + trace + ~use_op:op2 + ~reason_op:r2 + ~reason_tapp:r1 + id1 + tparams_loc1 + xs1 + t1 + ts1 + in + let t2 = + mk_typeapp_instance_of_poly + cx + trace + ~use_op:op1 + ~reason_op:r1 + ~reason_tapp:r2 + id2 + tparams_loc2 + xs2 + t2 + ts2 + in + rec_flow cx trace (t1, UseT (use_op, t2)) + | (TypeAppT (reason_tapp, use_op, c, ts), _) -> + if TypeAppExpansion.push_unless_loop cx (c, ts) then ( + let reason_op = reason_of_use_t u in + let t = mk_typeapp_instance cx ~trace ~use_op ~reason_op ~reason_tapp c ts in + rec_flow cx trace (t, u); + TypeAppExpansion.pop () + ) + | (_, UseT (use_op, TypeAppT (reason_tapp, use_op_tapp, c, ts))) -> + if TypeAppExpansion.push_unless_loop cx (c, ts) then ( + let reason_op = reason_of_t l in + let t = + mk_typeapp_instance cx ~trace ~use_op:use_op_tapp ~reason_op ~reason_tapp c ts + in + rec_flow cx trace (l, UseT (use_op, t)); + TypeAppExpansion.pop () + ) + (**********************) + (* opaque types *) + (**********************) + + (* If the ids are equal, we use flow_type_args to make sure that the type arguments of each + * are compatible with each other. If there are no type args, this doesn't do anything *) + | ( OpaqueT (lreason, { opaque_id = id1; opaque_type_args = ltargs; _ }), + UseT (use_op, OpaqueT (ureason, { opaque_id = id2; opaque_type_args = utargs; _ })) ) + when ALoc.concretize_equal (Context.aloc_tables cx) id1 id2 -> + flow_type_args cx trace ~use_op lreason ureason ltargs utargs + (* Repositioning should happen before opaque types are considered so that we can + * have the "most recent" location when we do look at the opaque type *) + | (OpaqueT _, ReposLowerT (reason, use_desc, u)) -> + rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) + (* If the type is still in the same file it was defined, we allow it to + * expose its underlying type information *) + | (OpaqueT (r, { underlying_t = Some t; _ }), _) + when ALoc.source (aloc_of_reason r) = ALoc.source (def_aloc_of_reason r) -> + rec_flow cx trace (t, u) + (* If the lower bound is in the same file as where the opaque type was defined, + * we expose the underlying type information *) + | (_, UseT (use_op, OpaqueT (r, { underlying_t = Some t; _ }))) + when ALoc.source (aloc_of_reason (reason_of_t l)) = ALoc.source (def_aloc_of_reason r) -> + rec_flow cx trace (l, UseT (use_op, t)) + (*****************************************************************) + (* Intersection type preprocessing for certain object predicates *) + (*****************************************************************) + + (* Predicate refinements on intersections of object types need careful handling. An intersection of object types passes a predicate when any of those object types passes the predicate: however, the refined type must be the intersection as a whole, not the particular object type that @@ -2856,20 +2665,15 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = object type with all the properties of those object types. The added complication arises as an implementation detail, because we do not concatenate those object types explicitly. *) - - | _, IntersectionPreprocessKitT (_, - SentinelPropTest (sense, key, t, inter, tvar)) -> - sentinel_prop_test_generic key cx trace tvar inter (sense, l, t) - - | _, IntersectionPreprocessKitT (reason, - PropExistsTest (sense, key, inter, tvar)) -> - prop_exists_test_generic reason key cx trace tvar inter sense l - - (***********************) - (* Singletons and keys *) - (***********************) - - (** Finite keysets over arbitrary objects can be represented by KeysT. While + | (_, IntersectionPreprocessKitT (_, SentinelPropTest (sense, key, t, inter, tvar))) -> + sentinel_prop_test_generic key cx trace tvar inter (sense, l, t) + | (_, IntersectionPreprocessKitT (_, PropExistsTest (sense, key, inter, tvar))) -> + prop_exists_test_generic key cx trace tvar inter sense l + (***********************) + (* Singletons and keys *) + (***********************) + + (* Finite keysets over arbitrary objects can be represented by KeysT. While it is possible to also represent singleton string types using KeysT (by taking the keyset of an object with a single property whose key is that string and whose value is ignored), we can model them more directly @@ -2887,313 +2691,357 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = would preclude other strings to be stored in that location. Thus, by necessity we allow all string types to flow to StrT (whereas only exactly matching string literal types may flow to SingletonStrT). **) - - | DefT (rl, StrT actual), UseT (use_op, DefT (ru, SingletonStrT expected)) -> - if TypeUtil.literal_eq expected actual - then () - else - let reasons = FlowError.ordered_reasons (rl, ru) in - add_output cx ~trace - (FlowError.EExpectedStringLit (reasons, expected, actual, use_op)) - - | DefT (rl, NumT actual), UseT (use_op, DefT (ru, SingletonNumT expected)) -> - if TypeUtil.number_literal_eq expected actual - then () - else - let reasons = FlowError.ordered_reasons (rl, ru) in - add_output cx ~trace - (FlowError.EExpectedNumberLit (reasons, expected, actual, use_op)) - - | DefT (rl, BoolT actual), UseT (use_op, DefT (ru, SingletonBoolT expected)) -> - if TypeUtil.boolean_literal_eq expected actual - then () - else - let reasons = FlowError.ordered_reasons (rl, ru) in - add_output cx ~trace - (FlowError.EExpectedBooleanLit (reasons, expected, actual, use_op)) - - (*****************************************************) - (* keys (NOTE: currently we only support string keys *) - (*****************************************************) - - | DefT (reason_s, StrT literal), UseT (use_op, KeysT (reason_op, o)) -> - let reason_next = match literal with - | Literal (_, x) -> replace_reason_const (RProperty (Some x)) reason_s - | _ -> replace_reason_const RUnknownString reason_s in - (* check that o has key x *) - let u = HasOwnPropT(use_op, reason_next, literal) in - rec_flow cx trace (o, ReposLowerT(reason_op, false, u)) - - | KeysT _, ToStringT (_, t) -> - (* KeysT outputs strings, so we know ToStringT will be a no-op. *) - rec_flow cx trace (l, t) - - | KeysT (reason1, o1), _ -> - (* flow all keys of o1 to u *) - rec_flow cx trace (o1, GetKeysT (reason1, u)) - - (* helpers *) - - | DefT (reason_o, ObjT { props_tmap = mapr; dict_t; _; }), - HasOwnPropT (use_op, reason_op, x) -> - (match x, dict_t with - (* If we have a literal string and that property exists *) - | Literal (_, x), _ when Context.has_prop cx mapr x -> () - (* If we have a dictionary, try that next *) - | _, Some { key; _ } -> rec_flow_t cx trace (DefT (reason_op, StrT x), key) - | _ -> - let prop = match x with - | Literal (_, prop) -> Some prop - | _ -> None - in - let err = FlowError.EPropNotFound (prop, (reason_op, reason_o), use_op) in - add_output cx ~trace err) - - | DefT (reason_o, InstanceT (_, _, _, instance)), - HasOwnPropT(use_op, reason_op, Literal (_, x)) -> - let own_props = Context.find_props cx instance.own_props in - let proto_props = Context.find_props cx instance.proto_props in - let fields = SMap.union own_props proto_props in - (match SMap.get x fields with - | Some _ -> () - | None -> - let err = FlowError.EPropNotFound (Some x, (reason_op, reason_o), use_op) in - add_output cx ~trace err) - - | DefT (reason_o, InstanceT (_, _, _, _)), HasOwnPropT(use_op, reason_op, _) -> - let err = FlowError.EPropNotFound (None, (reason_op, reason_o), use_op) in - add_output cx ~trace err - - (* AnyObjT has every prop *) - | DefT (_, AnyObjT), HasOwnPropT _ -> () - - | DefT (_, ObjT { flags; props_tmap; dict_t; _ }), GetKeysT (reason_op, keys) -> - begin match flags.sealed with - | Sealed -> - (* flow each key of l to keys *) - Context.iter_props cx props_tmap (fun x _ -> - let reason = replace_reason_const (RStringLit x) reason_op in - let t = DefT (reason, StrT (Literal (None, x))) in - rec_flow cx trace (t, keys) - ); - Option.iter dict_t (fun { key; _ } -> - rec_flow cx trace (key, ToStringT (reason_op, keys)) - ); - | _ -> - rec_flow cx trace (StrT.why reason_op, keys) - end - - | DefT (_, InstanceT (_, _, _, instance)), GetKeysT (reason_op, keys) -> - (* methods are not enumerable, so only walk fields *) - let own_props = Context.find_props cx instance.own_props in - own_props |> SMap.iter (fun x _ -> - let reason = replace_reason_const (RStringLit x) reason_op in - let t = DefT (reason, StrT (Literal (None, x))) in - rec_flow cx trace (t, keys) - ) - - | DefT (reason, (AnyObjT | AnyFunT)), GetKeysT (_, keys) -> - rec_flow cx trace (StrT.why reason, keys) - - | DefT (_, AnyT), GetKeysT (reason_op, keys) -> - rec_flow cx trace (AnyT.why reason_op, keys) - - (** In general, typechecking is monotonic in the sense that more constraints + | (DefT (rl, _, StrT actual), UseT (use_op, DefT (ru, _, SingletonStrT expected))) -> + if TypeUtil.literal_eq expected actual then + () + else + (* TODO: ordered_reasons should not be necessary *) + let (rl, ru) = FlowError.ordered_reasons (rl, ru) in + add_output + cx + ~trace + (Error_message.EExpectedStringLit { reason_lower = rl; reason_upper = ru; use_op }) + | (DefT (rl, _, NumT actual), UseT (use_op, DefT (ru, _, SingletonNumT expected))) -> + if TypeUtil.number_literal_eq expected actual then + () + else + (* TODO: ordered_reasons should not be necessary *) + let (rl, ru) = FlowError.ordered_reasons (rl, ru) in + add_output + cx + ~trace + (Error_message.EExpectedNumberLit { reason_lower = rl; reason_upper = ru; use_op }) + | (DefT (rl, _, BoolT actual), UseT (use_op, DefT (ru, _, SingletonBoolT expected))) -> + if TypeUtil.boolean_literal_eq expected actual then + () + else + (* TODO: ordered_reasons should not be necessary *) + let (rl, ru) = FlowError.ordered_reasons (rl, ru) in + add_output + cx + ~trace + (Error_message.EExpectedBooleanLit { reason_lower = rl; reason_upper = ru; use_op }) + (*****************************************************) + (* keys (NOTE: currently we only support string keys *) + (*****************************************************) + | (DefT (reason_s, _, StrT literal), UseT (use_op, KeysT (reason_op, o))) -> + let reason_next = + match literal with + | Literal (_, x) -> replace_desc_new_reason (RProperty (Some x)) reason_s + | _ -> replace_desc_new_reason RUnknownString reason_s + in + (* check that o has key x *) + let u = HasOwnPropT (use_op, reason_next, literal) in + rec_flow cx trace (o, ReposLowerT (reason_op, false, u)) + | (KeysT _, ToStringT (_, t)) -> + (* KeysT outputs strings, so we know ToStringT will be a no-op. *) + rec_flow cx trace (l, t) + | (KeysT (reason1, o1), _) -> + (* flow all keys of o1 to u *) + rec_flow cx trace (o1, GetKeysT (reason1, u)) + (* helpers *) + | ( DefT (reason_o, _, ObjT { props_tmap = mapr; dict_t; _ }), + HasOwnPropT (use_op, reason_op, x) ) -> + (match (x, dict_t) with + (* If we have a literal string and that property exists *) + | (Literal (_, x), _) when Context.has_prop cx mapr x -> () + (* If we have a dictionary, try that next *) + | (_, Some { key; _ }) -> + rec_flow_t cx trace (DefT (reason_op, bogus_trust (), StrT x), key) + | _ -> + let prop = + match x with + | Literal (_, prop) -> Some prop + | _ -> None + in + let err = Error_message.EPropNotFound (prop, (reason_op, reason_o), use_op) in + add_output cx ~trace err) + | ( DefT (reason_o, _, InstanceT (_, _, _, instance)), + HasOwnPropT (use_op, reason_op, Literal (_, x)) ) -> + let own_props = Context.find_props cx instance.own_props in + let proto_props = Context.find_props cx instance.proto_props in + let fields = SMap.union own_props proto_props in + (match SMap.get x fields with + | Some _ -> () + | None -> + let err = Error_message.EPropNotFound (Some x, (reason_op, reason_o), use_op) in + add_output cx ~trace err) + | (DefT (reason_o, _, InstanceT (_, _, _, _)), HasOwnPropT (use_op, reason_op, _)) -> + let err = Error_message.EPropNotFound (None, (reason_op, reason_o), use_op) in + add_output cx ~trace err + (* AnyT has every prop *) + | (AnyT _, HasOwnPropT _) -> () + | (DefT (_, _, ObjT { flags; props_tmap; dict_t; _ }), GetKeysT (reason_op, keys)) -> + begin + match flags.sealed with + | Sealed -> + (* flow the union of keys of l to keys *) + let keylist = + SMap.fold + (fun x _ acc -> + let reason = replace_desc_new_reason (RStringLit x) reason_op in + DefT (reason, bogus_trust (), SingletonStrT x) :: acc) + (Context.find_props cx props_tmap) + [] + in + rec_flow cx trace (union_of_ts reason_op keylist, keys); + Option.iter dict_t (fun { key; _ } -> + rec_flow cx trace (key, ToStringT (reason_op, keys))) + | _ -> rec_flow cx trace (StrT.why reason_op |> with_trust bogus_trust, keys) + end + | (DefT (_, _, InstanceT (_, _, _, instance)), GetKeysT (reason_op, keys)) -> + (* methods are not enumerable, so only walk fields *) + let own_props = Context.find_props cx instance.own_props in + let keylist = + SMap.fold + (fun x _ acc -> + let reason = replace_desc_new_reason (RStringLit x) reason_op in + DefT (reason, bogus_trust (), SingletonStrT x) :: acc) + own_props + [] + in + rec_flow cx trace (union_of_ts reason_op keylist, keys) + | (AnyT _, GetKeysT (reason_op, keys)) -> + rec_flow cx trace (StrT.why reason_op |> with_trust literal_trust, keys) + (* In general, typechecking is monotonic in the sense that more constraints produce more errors. However, sometimes we may want to speculatively try out constraints, backtracking if they produce errors (and removing the errors produced). This is useful to typecheck union types and intersection types: see below. **) - (** NOTE: It is important that any def type that simplifies to a union or + (* NOTE: It is important that any def type that simplifies to a union or intersection of other def types be processed before we process unions and intersections: otherwise we may get spurious errors. **) - (**********) - (* values *) - (**********) - - | DefT (_, ObjT o), GetValuesT (reason, values) -> - let { - flags; - proto_t = _; - props_tmap = tmap; - dict_t; - call_t = _; (* call props excluded from values *) - } = o in - (* Find all of the props. *) - let props = Context.find_props cx tmap in - (* Get the read type for all readable properties and discard the rest. *) - let ts = SMap.fold (fun _ prop ts -> - match Property.read_t prop with - | Some t -> - let t = if flags.frozen then - match t with - | DefT (t_reason, StrT (Literal (_, lit))) -> - let t_reason = replace_reason_const (RStringLit lit) t_reason in - DefT (t_reason, SingletonStrT lit) - | DefT (t_reason, NumT (Literal (_, lit))) -> - let t_reason = replace_reason_const (RNumberLit (snd lit)) t_reason in - DefT (t_reason, SingletonNumT lit) - | DefT (t_reason, BoolT (Some lit)) -> - let t_reason = replace_reason_const (RBooleanLit lit) t_reason in - DefT (t_reason, SingletonBoolT lit) - | _ -> t - else t in - t :: ts - | None -> ts - ) props [] in - (* If the object has a dictionary value then add that to our types. *) - let ts = match dict_t with - | Some { value; _ } -> value :: ts - | None -> ts in - (* Create a union type from all our selected types. *) - let values_l = union_of_ts reason ts in - rec_flow_t cx trace (values_l, values) - - | DefT (_, InstanceT (_, _, _, { own_props; _ })), GetValuesT (reason, values) -> - (* Find all of the props. *) - let props = Context.find_props cx own_props in - (* Get the read type for all readable properties and discard the rest. *) - let ts = SMap.fold (fun key prop ts -> - match Property.read_t prop with - (* We don't want to include the property type if its name is the + (**********) + (* values *) + (**********) + | (DefT (_, _, ObjT o), GetValuesT (reason, values)) -> + let { + flags; + proto_t = _; + props_tmap = tmap; + dict_t; + call_t = _ (* call props excluded from values *); + } = + o + in + (* Find all of the props. *) + let props = Context.find_props cx tmap in + (* Get the read type for all readable properties and discard the rest. *) + let ts = + SMap.fold + (fun _ prop ts -> + match Property.read_t prop with + | Some t -> + let t = + if flags.frozen then + match t with + | DefT (t_reason, trust, StrT (Literal (_, lit))) -> + let t_reason = replace_desc_reason (RStringLit lit) t_reason in + DefT (t_reason, trust, SingletonStrT lit) + | DefT (t_reason, trust, NumT (Literal (_, lit))) -> + let t_reason = replace_desc_reason (RNumberLit (snd lit)) t_reason in + DefT (t_reason, trust, SingletonNumT lit) + | DefT (t_reason, trust, BoolT (Some lit)) -> + let t_reason = replace_desc_reason (RBooleanLit lit) t_reason in + DefT (t_reason, trust, SingletonBoolT lit) + | _ -> t + else + t + in + t :: ts + | None -> ts) + props + [] + in + (* If the object has a dictionary value then add that to our types. *) + let ts = + match dict_t with + | Some { value; _ } -> value :: ts + | None -> ts + in + (* Create a union type from all our selected types. *) + let values_l = union_of_ts reason ts in + rec_flow_t cx trace (values_l, values) + | (DefT (_, _, InstanceT (_, _, _, { own_props; _ })), GetValuesT (reason, values)) -> + (* Find all of the props. *) + let props = Context.find_props cx own_props in + (* Get the read type for all readable properties and discard the rest. *) + let ts = + SMap.fold + (fun key prop ts -> + match Property.read_t prop with + (* We don't want to include the property type if its name is the internal value "$key" because that will be the type for the instance index and not the value. *) - | Some t when key != "$key" -> t :: ts - | _ -> ts - ) props [] in - (* Create a union type from all our selected types. *) - let values_l = union_of_ts reason ts in - rec_flow_t cx trace (values_l, values) - - (* Any will always be ok *) - | DefT (_, AnyT), GetValuesT (reason, values) - | DefT (_, AnyObjT), GetValuesT (reason, values) -> - rec_flow_t cx trace (AnyT.why reason, values) - - (********************************) - (* union and intersection types *) - (********************************) - - (* Don't split the union type into its constituent members. Instead, + | Some t when key != "$key" -> t :: ts + | _ -> ts) + props + [] + in + (* Create a union type from all our selected types. *) + let values_l = union_of_ts reason ts in + rec_flow_t cx trace (values_l, values) + (* Any will always be ok *) + | (AnyT (_, src), GetValuesT (reason, values)) -> + rec_flow_t cx trace (AnyT.why src reason, values) + (********************************) + (* union and intersection types *) + (********************************) + + (* Don't split the union type into its constituent members. Instead, reposition the entire union type. *) - | DefT (_, UnionT _), ReposLowerT (reason, use_desc, u) -> - rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) - - | DefT (_, UnionT _), ObjKitT (use_op, reason, resolve_tool, tool, tout) -> - object_kit cx trace ~use_op reason resolve_tool tool tout l - - (* cases where there is no loss of precision *) - - (** Optimization where an union is a subset of another. Equality modulo - reasons is important for this optimization to be effective, since types - are repositioned everywhere. - - TODO: (1) Define a more general partial equality, that takes into - account unified type variables. (2) Get rid of UnionRep.quick_mem. **) - | DefT (_, UnionT rep1), UseT (_, DefT (_, UnionT rep2)) when - let ts2 = Type_mapper.union_flatten cx @@ UnionRep.members rep2 in - Type_mapper.union_flatten cx @@ UnionRep.members rep1 |> List.for_all (fun t1 -> - List.exists (TypeUtil.quick_subtype t1) ts2 - ) -> - () - - (* Optimization to treat maybe and optional types as special unions for subset comparision *) - - | DefT (reason, UnionT rep), UseT (use_op, DefT (r, MaybeT maybe)) -> - let void = (VoidT.why r) in - let null = (NullT.why r) in - let filter_void t = TypeUtil.quick_subtype t void in - let filter_null t = TypeUtil.quick_subtype t null in - let filter_null_and_void t = filter_void t || filter_null t in - let remove_predicate predicate = - UnionRep.members - %> Type_mapper.union_flatten cx - %> Core_list.rev_filter ~f:(predicate %> not) - %> union_of_ts reason in - (* if the union doesn't contain void or null, + | (UnionT _, ReposLowerT (reason, use_desc, u)) -> + rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) + | (UnionT (_, rep), DestructuringT (reason, DestructAnnot, s, tout)) -> + let (t0, (t1, ts)) = UnionRep.members_nel rep in + let f t = + AnnotT + ( reason, + Tvar.mk_where cx reason (fun tvar -> + rec_flow cx trace (t, DestructuringT (reason, DestructAnnot, s, tvar))), + false ) + in + let rep = UnionRep.make (f t0) (f t1) (Core_list.map ts ~f) in + rec_unify cx trace ~use_op:unknown_use (UnionT (reason, rep)) tout + | (UnionT _, ObjKitT (use_op, reason, resolve_tool, tool, tout)) -> + ObjectKit.run cx trace ~use_op reason resolve_tool tool tout l + (* cases where there is no loss of precision *) + | (UnionT _, UseT (_, (UnionT _ as u))) when union_optimization_guard cx l u -> () + (* Optimization to treat maybe and optional types as special unions for subset comparision *) + | (UnionT (reason, rep), UseT (use_op, MaybeT (r, maybe))) -> + let checked_trust = Context.trust_errors cx in + let void = VoidT.why r |> with_trust bogus_trust in + let null = NullT.why r |> with_trust bogus_trust in + let filter_void t = TypeUtil.quick_subtype checked_trust t void in + let filter_null t = TypeUtil.quick_subtype checked_trust t null in + let filter_null_and_void t = filter_void t || filter_null t in + let remove_predicate predicate = + UnionRep.members + %> Type_mapper.union_flatten cx + %> Core_list.rev_filter ~f:(predicate %> not) + %> union_of_ts reason + in + (* if the union doesn't contain void or null, then everything in it must be upper-bounded by maybe *) - begin match UnionRep.quick_mem_enum void rep, UnionRep.quick_mem_enum null rep with - | UnionRep.No, UnionRep.No -> rec_flow_t ~use_op cx trace (l, maybe) - | UnionRep.Yes, UnionRep.No -> - rec_flow_t ~use_op cx trace (remove_predicate filter_void rep, maybe) - | UnionRep.No, UnionRep.Yes -> - rec_flow_t ~use_op cx trace (remove_predicate filter_null rep, maybe) - | UnionRep.Yes, UnionRep.Yes -> - rec_flow_t ~use_op cx trace (remove_predicate filter_null_and_void rep, maybe) - | _ -> UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t, u)) - end - - | DefT (reason, UnionT rep), UseT (use_op, DefT (r, OptionalT opt)) -> - let void = (VoidT.why r) in - let remove_void = - UnionRep.members - %> Type_mapper.union_flatten cx - %> Core_list.rev_filter ~f:(fun t -> TypeUtil.quick_subtype t void |> not) - %> union_of_ts reason in - (* if the union doesn't contain void, then everything in it must be upper-bounded by u *) - begin match UnionRep.quick_mem_enum void rep with - | UnionRep.No -> rec_flow_t ~use_op cx trace (l, opt) - | UnionRep.Yes -> rec_flow_t ~use_op cx trace (remove_void rep, opt) - | _ -> UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t, u)) - end - - | DefT (r, UnionT rep), SentinelPropTestT (_reason, l, _key, sense, sentinel, result) -> - (* we have the check l.key === sentinel where l.key is a union *) - if sense then - match sentinel with - | Enum.One enum -> begin - let def = match enum with - | Enum.Str v -> SingletonStrT v - | Enum.Num v -> SingletonNumT v - | Enum.Bool v -> SingletonBoolT v - | Enum.Void -> VoidT - | Enum.Null -> NullT in - match UnionRep.quick_mem_enum (DefT (r, def)) rep with - | UnionRep.No -> () (* provably unreachable, so prune *) - | UnionRep.Yes -> rec_flow_t cx trace (l, result) - | UnionRep.Conditional _ | UnionRep.Unknown -> (* inconclusive: the union is not concretized *) - UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t,u)) + match + ( UnionRep.quick_mem_enum checked_trust void rep, + UnionRep.quick_mem_enum checked_trust null rep ) + with + | (UnionRep.No, UnionRep.No) -> rec_flow_t ~use_op cx trace (l, maybe) + | (UnionRep.Yes, UnionRep.No) -> + rec_flow_t ~use_op cx trace (remove_predicate filter_void rep, maybe) + | (UnionRep.No, UnionRep.Yes) -> + rec_flow_t ~use_op cx trace (remove_predicate filter_null rep, maybe) + | (UnionRep.Yes, UnionRep.Yes) -> + rec_flow_t ~use_op cx trace (remove_predicate filter_null_and_void rep, maybe) + | _ -> UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t, u)) end - | Enum.Many enums -> - let acc = EnumSet.fold (fun enum acc -> - let def = match enum with - | Enum.Str v -> SingletonStrT v - | Enum.Num v -> SingletonNumT v - | Enum.Bool v -> SingletonBoolT v - | Enum.Void -> VoidT - | Enum.Null -> NullT in - UnionRep.join_quick_mem_results (acc, UnionRep.quick_mem_enum (DefT (r, def)) rep) - ) enums UnionRep.No in - begin match acc with - | UnionRep.No -> () (* provably unreachable, so prune *) - | UnionRep.Yes -> rec_flow_t cx trace (l, result) - | UnionRep.Conditional _ | UnionRep.Unknown -> (* inconclusive: the union is not concretized *) - UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t,u)) + | (UnionT (reason, rep), UseT (use_op, OptionalT (r, opt))) -> + let checked_trust = Context.trust_errors cx in + let void = VoidT.why r |> with_trust bogus_trust in + let remove_void = + UnionRep.members + %> Type_mapper.union_flatten cx + %> Core_list.rev_filter ~f:(fun t -> + TypeUtil.quick_subtype checked_trust t void |> not) + %> union_of_ts reason + in + (* if the union doesn't contain void, then everything in it must be upper-bounded by u *) + begin + match UnionRep.quick_mem_enum checked_trust void rep with + | UnionRep.No -> rec_flow_t ~use_op cx trace (l, opt) + | UnionRep.Yes -> rec_flow_t ~use_op cx trace (remove_void rep, opt) + | _ -> UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t, u)) end - else - (* for l.key !== sentinel where l.key is a union, we can't really prove + | (UnionT (_, rep1), EqT (_, _, UnionT (_, rep2))) -> + if + match (UnionRep.check_enum rep1, UnionRep.check_enum rep2) with + (* If both enums are subsets of each other, they contain the same elements. + 2 n log n still grows slower than n^2 *) + | (Some enums1, Some enums2) -> + UnionEnumSet.subset enums1 enums2 && UnionEnumSet.subset enums2 enums1 + | _ -> false + then + () + else + UnionRep.members rep1 |> Core_list.iter ~f:(fun t -> rec_flow cx trace (t, u)) + | (UnionT _, EqT (reason, flip, t)) when needs_resolution t -> + rec_flow cx trace (t, EqT (reason, not flip, l)) + | (UnionT (r, rep), SentinelPropTestT (_reason, l, _key, sense, sentinel, result)) -> + (* we have the check l.key === sentinel where l.key is a union *) + if sense then + match sentinel with + | UnionEnum.One enum -> + let def = + match enum with + | UnionEnum.Str v -> SingletonStrT v + | UnionEnum.Num v -> SingletonNumT v + | UnionEnum.Bool v -> SingletonBoolT v + | UnionEnum.Void -> VoidT + | UnionEnum.Null -> NullT + in + (match + UnionRep.quick_mem_enum + (Context.trust_errors cx) + (DefT (r, Trust.bogus_trust (), def)) + rep + with + | UnionRep.No -> () (* provably unreachable, so prune *) + | UnionRep.Yes -> rec_flow_t cx trace (l, result) + | UnionRep.Conditional _ + | UnionRep.Unknown -> + (* inconclusive: the union is not concretized *) + UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t, u))) + | UnionEnum.Many enums -> + let acc = + UnionEnumSet.fold + (fun enum acc -> + let def = + match enum with + | UnionEnum.Str v -> SingletonStrT v + | UnionEnum.Num v -> SingletonNumT v + | UnionEnum.Bool v -> SingletonBoolT v + | UnionEnum.Void -> VoidT + | UnionEnum.Null -> NullT + in + UnionRep.join_quick_mem_results + ( acc, + UnionRep.quick_mem_enum + (Context.trust_errors cx) + (DefT (r, Trust.bogus_trust (), def)) + rep )) + enums + UnionRep.No + in + begin + match acc with + | UnionRep.No -> () (* provably unreachable, so prune *) + | UnionRep.Yes -> rec_flow_t cx trace (l, result) + | UnionRep.Conditional _ + | UnionRep.Unknown -> + (* inconclusive: the union is not concretized *) + UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t, u)) + end + else + (* for l.key !== sentinel where l.key is a union, we can't really prove that the check is guaranteed to fail (assuming the union doesn't degenerate to a singleton) *) - rec_flow_t cx trace (l, result) - - | DefT (_, UnionT rep), _ - when (match u with - (* For l.key !== sentinel when sentinel has a union type, don't split the union. This + rec_flow_t cx trace (l, result) + | (UnionT (_, rep), _) + when match u with + (* For l.key !== sentinel when sentinel has a union type, don't split the union. This prevents a drastic blowup of cases which can cause perf problems. *) - | PredicateT (RightP (SentinelProp _, _), _) - | PredicateT (NotP (RightP (SentinelProp _, _)), _) -> false - | _ -> true - ) -> - UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t,u)) - - | _, UseT (use_op, DefT (_, IntersectionT rep)) -> - InterRep.members rep |> List.iter (fun t -> - rec_flow cx trace (l, UseT (use_op, t)) - ) - - (* When a subtyping question involves a union appearing on the right or an + | PredicateT (RightP (SentinelProp _, _), _) + | PredicateT (NotP (RightP (SentinelProp _, _)), _) -> + false + | _ -> true -> + UnionRep.members rep |> List.iter (fun t -> rec_flow cx trace (t, u)) + | (_, UseT (use_op, IntersectionT (_, rep))) -> + InterRep.members rep |> List.iter (fun t -> rec_flow cx trace (l, UseT (use_op, t))) + (* When a subtyping question involves a union appearing on the right or an intersection appearing on the left, the simplification rules are imprecise: we split the union / intersection into cases and try to prove that the subtyping question holds for one of the cases, but each of those @@ -3209,333 +3057,365 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = An orthogonal benefit is that for large unions or intersections, checking inclusion is significantly faster that splitting for proving simple inequalities (O(n) instead of O(n^2) for n cases). *) - - | DefT (_, IntersectionT rep), UseT (_, u) - when List.mem u (InterRep.members rep) -> - () - - | _, UseT (_, DefT (_, UnionT rep)) when - let ts = Type_mapper.union_flatten cx @@ UnionRep.members rep in - List.exists (TypeUtil.quick_subtype l) ts -> - () - - | _, UseT (use_op, DefT (r, UnionT rep)) -> - (* Try the branches of the union in turn, with the goal of selecting the correct branch. This + | (IntersectionT (_, rep), UseT (_, u)) when List.mem u (InterRep.members rep) -> () + (* String enum sets can be handled in logarithmic time by just + * checking for membership in the set. + *) + | (DefT (reason_l, _, StrT (Literal (_, x))), UseT (use_op, UnionT (reason_u, rep))) + when match UnionRep.check_enum rep with + | Some enums -> + if not (UnionEnumSet.mem (UnionEnum.Str x) enums) then + add_output + cx + ~trace + (Error_message.EIncompatibleWithUseOp + (reason_l, UnionRep.specialized_reason reason_u rep, use_op)); + true + | _ -> false -> + () + | (_, UseT (_, UnionT (_, rep))) + when let ts = Type_mapper.union_flatten cx @@ UnionRep.members rep in + List.exists (TypeUtil.quick_subtype (Context.trust_errors cx) l) ts -> + () + | (_, UseT (use_op, UnionT (r, rep))) -> + (* Try the branches of the union in turn, with the goal of selecting the correct branch. This process is reused for intersections as well. See comments on try_union and try_intersection. *) - try_union cx trace use_op l r rep - - (* maybe and optional types are just special union types *) - - | t1, UseT (use_op, DefT (_, MaybeT t2)) -> - rec_flow cx trace (t1, UseT (use_op, t2)) - - | t1, UseT (use_op, DefT (_, OptionalT t2)) -> - rec_flow cx trace (t1, UseT (use_op, t2)) - - (** special treatment for some operations on intersections: these + try_union cx trace use_op l r rep + (* maybe and optional types are just special union types *) + | (t1, UseT (use_op, MaybeT (_, t2))) -> rec_flow cx trace (t1, UseT (use_op, t2)) + | (t1, UseT (use_op, OptionalT (_, t2))) -> rec_flow cx trace (t1, UseT (use_op, t2)) + (* special treatment for some operations on intersections: these rules fire for particular UBs whose constraints can (or must) be resolved against intersection LBs as a whole, instead of by decomposing the intersection into its parts. *) - (** lookup of properties **) - | DefT (_, IntersectionT rep), - LookupT (reason, strict, try_ts_on_failure, s, t) -> - let ts = InterRep.members rep in - assert (ts <> []); - (* Since s could be in any object type in the list ts, we try to look it + (* lookup of properties **) + | (IntersectionT (_, rep), LookupT (reason, strict, try_ts_on_failure, s, t)) -> + let ts = InterRep.members rep in + assert (ts <> []); + + (* Since s could be in any object type in the list ts, we try to look it up in the first element of ts, pushing the rest into the list try_ts_on_failure (see below). *) - rec_flow cx trace - (List.hd ts, - LookupT (reason, strict, (List.tl ts) @ try_ts_on_failure, s, t)) - - | DefT (_, IntersectionT _), TestPropT (reason, _, prop, tout) -> - rec_flow cx trace (l, GetPropT (unknown_use, reason, prop, tout)) - - (** extends **) - | DefT (_, IntersectionT rep), - ExtendsUseT (use_op, reason, try_ts_on_failure, l, u) -> - let t, ts = InterRep.members_nel rep in - let try_ts_on_failure = (Nel.to_list ts) @ try_ts_on_failure in - (* Since s could be in any object type in the list ts, we try to look it + rec_flow + cx + trace + (List.hd ts, LookupT (reason, strict, List.tl ts @ try_ts_on_failure, s, t)) + | (IntersectionT _, TestPropT (reason, _, prop, tout)) -> + rec_flow cx trace (l, GetPropT (unknown_use, reason, prop, tout)) + (* extends **) + | (IntersectionT (_, rep), ExtendsUseT (use_op, reason, try_ts_on_failure, l, u)) -> + let (t, ts) = InterRep.members_nel rep in + let try_ts_on_failure = Nel.to_list ts @ try_ts_on_failure in + (* Since s could be in any object type in the list ts, we try to look it up in the first element of ts, pushing the rest into the list try_ts_on_failure (see below). *) - rec_flow cx trace (t, ExtendsUseT (use_op, reason, try_ts_on_failure, l, u)) - - (** consistent override of properties **) - | DefT (_, IntersectionT rep), SuperT (use_op, reason, derived) -> - InterRep.members rep |> List.iter (fun t -> - let u = match use_op with - | Op (ClassExtendsCheck c) -> - let use_op = Op (ClassExtendsCheck { c with extends = reason_of_t t }) in - SuperT (use_op, reason, derived) - | _ -> - u - in - rec_flow cx trace (t, u)) - - (** structural subtype multiple inheritance **) - | DefT (_, IntersectionT rep), ImplementsT (use_op, this) -> - InterRep.members rep |> List.iter (fun t -> - let u = match use_op with - | Op (ClassImplementsCheck c) -> - let use_op = Op (ClassImplementsCheck { c with implements = reason_of_t t }) in - ImplementsT (use_op, this) - | _ -> - u - in - rec_flow cx trace (t, u)) - - (** object types: an intersection may satisfy an object UB without + rec_flow cx trace (t, ExtendsUseT (use_op, reason, try_ts_on_failure, l, u)) + (* consistent override of properties **) + | (IntersectionT (_, rep), SuperT (use_op, reason, derived)) -> + InterRep.members rep + |> List.iter (fun t -> + let u = + match use_op with + | Op (ClassExtendsCheck c) -> + let use_op = Op (ClassExtendsCheck { c with extends = reason_of_t t }) in + SuperT (use_op, reason, derived) + | _ -> u + in + rec_flow cx trace (t, u)) + (* structural subtype multiple inheritance **) + | (IntersectionT (_, rep), ImplementsT (use_op, this)) -> + InterRep.members rep + |> List.iter (fun t -> + let u = + match use_op with + | Op (ClassImplementsCheck c) -> + let use_op = + Op (ClassImplementsCheck { c with implements = reason_of_t t }) + in + ImplementsT (use_op, this) + | _ -> u + in + rec_flow cx trace (t, u)) + (* object types: an intersection may satisfy an object UB without any particular member of the intersection doing so completely. Here we trap object UBs with more than one property, and decompose them into singletons. Note: should be able to do this with LookupT rather than slices, but that approach behaves in nonobvious ways. TODO why? *) - | DefT (_, IntersectionT _), - UseT (use_op, DefT (r, ObjT { flags; props_tmap; proto_t; dict_t; call_t })) - when SMap.cardinal (Context.find_props cx props_tmap) > 1 -> - iter_real_props cx props_tmap (fun ~is_sentinel:_ x p -> - let pmap = SMap.singleton x p in - let id = Context.make_property_map cx pmap in - let obj = mk_objecttype ~flags ~dict:dict_t ~call:call_t id dummy_prototype in - rec_flow cx trace (l, UseT (use_op, DefT (r, ObjT obj))) - ); - rec_flow cx trace (l, UseT (use_op, proto_t)) - - (** predicates: prevent a predicate upper bound from prematurely decomposing + | ( IntersectionT _, + UseT (use_op, DefT (r, _, ObjT { flags; props_tmap; proto_t; dict_t; call_t })) ) + when SMap.cardinal (Context.find_props cx props_tmap) > 1 -> + Context.iter_real_props cx props_tmap (fun x p -> + let pmap = SMap.singleton x p in + let id = Context.generate_property_map cx pmap in + let obj = mk_objecttype ~flags ~dict:dict_t ~call:call_t id dummy_prototype in + rec_flow cx trace (l, UseT (use_op, DefT (r, bogus_trust (), ObjT obj)))); + rec_flow cx trace (l, UseT (use_op, proto_t)) + (* predicates: prevent a predicate upper bound from prematurely decomposing an intersection lower bound *) - | DefT (_, IntersectionT _), PredicateT (pred, tout) -> - predicate cx trace tout l pred - - (* same for guards *) - | DefT (_, IntersectionT _), GuardT (pred, result, tout) -> - guard cx trace l pred result tout - - (** ObjAssignFromT copies multiple properties from its incoming LB. + | (IntersectionT _, PredicateT (pred, tout)) -> predicate cx trace tout l pred + (* same for guards *) + | (IntersectionT _, GuardT (pred, result, tout)) -> guard cx trace l pred result tout + (* ObjAssignFromT copies multiple properties from its incoming LB. Here we simulate a merged object type by iterating over the entire intersection. *) - | DefT (_, IntersectionT rep), - ObjAssignFromT (reason_op, proto, tout, kind) -> - let tvar = List.fold_left (fun tout t -> - let tvar = match Cache.Fix.find reason_op t with - | Some tvar -> tvar - | None -> - Tvar.mk_where cx reason_op (fun tvar -> - Cache.Fix.add reason_op t tvar; - rec_flow cx trace (t, ObjAssignFromT (reason_op, proto, tvar, kind)) - ) - in - rec_flow_t cx trace (tvar, tout); - tvar - ) (Tvar.mk cx reason_op) (InterRep.members rep) in - rec_flow_t cx trace (tvar, tout) - - (** This duplicates the (_, ReposLowerT u) near the end of this pattern + | (IntersectionT (_, rep), ObjAssignFromT (use_op, reason_op, proto, tout, kind)) -> + let tvar = + List.fold_left + (fun tout t -> + let tvar = + match Cache.Fix.find reason_op t with + | Some tvar -> tvar + | None -> + Tvar.mk_where cx reason_op (fun tvar -> + Cache.Fix.add reason_op t tvar; + rec_flow cx trace (t, ObjAssignFromT (use_op, reason_op, proto, tvar, kind))) + in + rec_flow_t cx ~use_op trace (tvar, tout); + tvar) + (Tvar.mk cx reason_op) + (InterRep.members rep) + in + rec_flow_t cx ~use_op trace (tvar, tout) + (* This duplicates the (_, ReposLowerT u) near the end of this pattern match but has to appear here to preempt the (IntersectionT, _) in between so that we reposition the entire intersection. *) - | DefT (_, IntersectionT _), ReposLowerT (reason, use_desc, u) -> - rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) - - | DefT (_, IntersectionT _), ObjKitT (use_op, reason, resolve_tool, tool, tout) -> - object_kit cx trace ~use_op reason resolve_tool tool tout l - - (* CallT uses that arise from the CallType type destructor are processed + | (IntersectionT _, ReposLowerT (reason, use_desc, u)) -> + rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) + | (IntersectionT _, ObjKitT (use_op, reason, resolve_tool, tool, tout)) -> + ObjectKit.run cx trace ~use_op reason resolve_tool tool tout l + (* CallT uses that arise from the CallType type destructor are processed without preparation (see below). This is because in these cases, the return type is intended to be 0-1, whereas preparation (as implemented currently) destroys 0-1 behavior. *) - | DefT (r, IntersectionT rep), CallT (_, reason, _) when is_calltype_reason reason -> - try_intersection cx trace u r rep - - (** All other pairs with an intersection lower bound come here. Before + | (IntersectionT (r, rep), CallT (_, reason, _)) when is_calltype_reason reason -> + try_intersection cx trace u r rep + (* All other pairs with an intersection lower bound come here. Before further processing, we ensure that the upper bound is concretized. See prep_try_intersection for details. **) - (* (After the above preprocessing step, try the branches of the intersection + (* (After the above preprocessing step, try the branches of the intersection in turn, with the goal of selecting the correct branch. This process is reused for unions as well. See comments on try_union and try_intersection.) *) - - | DefT (r, IntersectionT rep), u -> - prep_try_intersection cx trace - (reason_of_use_t u) (parts_to_replace u) [] u r rep - - (************) - (* matching *) - (************) - - | MatchingPropT (reason, x, t), UseT (_, l) -> - (* Things that can have properties are object-like (objects, instances, + | (IntersectionT (r, rep), u) -> + prep_try_intersection cx trace (reason_of_use_t u) (parts_to_replace cx u) [] u r rep + (************) + (* matching *) + (************) + | (MatchingPropT (reason, x, t), UseT (use_op, l)) -> + (* Things that can have properties are object-like (objects, instances, and their exact versions). Notably, "meta" types like union, annot, typeapp, eval, maybe, optional, and intersection should have boiled away by this point. *) - let propref = Named (reason, x) in - let strict = NonstrictReturning (None, None) in - let u = LookupT (reason, strict, [], propref, MatchProp (unknown_use, t)) in - rec_flow cx trace (l, u) - - | MatchingPropT _, _ when is_use u -> - () (* TODO: empty? *) - - (*************************) - (* Resolving rest params *) - (*************************) - - (* `any` is obviously fine as a spread element. `Object` is fine because - * any Iterable can be spread, and `Object` is the any type that covers - * iterable objects. *) - | DefT (r, (AnyT | AnyObjT)), - ResolveSpreadT (use_op, reason_op, { - rrt_resolved; - rrt_unresolved; - rrt_resolve_to; - }) -> - - let rrt_resolved = (ResolvedAnySpreadArg r)::rrt_resolved in - resolve_spread_list_rec - cx ~trace ~use_op ~reason_op - (rrt_resolved, rrt_unresolved) rrt_resolve_to - - | _, - ResolveSpreadT (use_op, reason_op, { - rrt_resolved; - rrt_unresolved; - rrt_resolve_to; - }) -> - let reason = reason_of_t l in - - let r, arrtype = match l with - | DefT (r, ArrT arrtype) -> - (* Arrays *) - r, arrtype - | _ -> - (* Non-array non-any iterables *) - let reason = reason_of_t l in - let element_tvar = Tvar.mk cx reason in - let iterable = - let targs = [element_tvar; AnyT.why reason; AnyT.why reason] in - get_builtin_typeapp cx - (replace_reason_const (RCustom "Iterable expected for spread") reason) - "$Iterable" targs - in - flow_t cx (l, iterable); - reason, ArrayAT (element_tvar, None) - in - - let elemt = elemt_of_arrtype r arrtype in - - begin match rrt_resolve_to with - (* Any ResolveSpreadsTo* which does some sort of constant folding needs to - * carry an id around to break the infinite recursion that constant - * constant folding can trigger *) - | ResolveSpreadsToTuple (id, elem_t, tout) - | ResolveSpreadsToArrayLiteral (id, elem_t, tout) -> - (* You might come across code like - * - * for (let x = 1; x < 3; x++) { foo = [...foo, x]; } - * - * where every time you spread foo, you flow another type into foo. So - * each time `l ~> ResolveSpreadT` is processed, it might produce a new - * `l ~> ResolveSpreadT` with a new `l`. - * - * Here is how we avoid this: - * - * 1. We use ConstFoldExpansion to detect when we see a ResolveSpreadT - * upper bound multiple times - * 2. When a ResolveSpreadT upper bound multiple times, we change it into - * a ResolveSpreadT upper bound that resolves to a more general type. - * This should prevent more distinct lower bounds from flowing in - * 3. rec_flow caches (l,u) pairs. - *) - - - let reason_elemt = reason_of_t elemt in - ConstFoldExpansion.guard id reason_elemt (fun recursion_depth -> - match recursion_depth with - | 0 -> - (* The first time we see this, we process it normally *) - let rrt_resolved = - ResolvedSpreadArg(reason, arrtype)::rrt_resolved in - resolve_spread_list_rec - cx ~trace ~use_op ~reason_op - (rrt_resolved, rrt_unresolved) rrt_resolve_to - | 1 -> - (* To avoid infinite recursion, let's deconstruct to a simpler case - * where we no longer resolve to a tuple but instead just resolve to - * an array. *) - rec_flow cx trace (l, ResolveSpreadT (use_op, reason_op, { - rrt_resolved; - rrt_unresolved; - rrt_resolve_to = ResolveSpreadsToArray (elem_t, tout); - })) - | _ -> - (* We've already deconstructed, so there's nothing left to do *) - () - ) - - | ResolveSpreadsToMultiflowCallFull (id, _) - | ResolveSpreadsToMultiflowSubtypeFull (id, _) - | ResolveSpreadsToCustomFunCall (id, _, _) - | ResolveSpreadsToMultiflowPartial (id, _, _, _) -> - let reason_elemt = reason_of_t elemt in - ConstFoldExpansion.guard id reason_elemt (fun recursion_depth -> - match recursion_depth with - | 0 -> - (* The first time we see this, we process it normally *) - let rrt_resolved = - ResolvedSpreadArg(reason, arrtype)::rrt_resolved in - resolve_spread_list_rec - cx ~trace ~use_op ~reason_op - (rrt_resolved, rrt_unresolved) rrt_resolve_to - | 1 -> - (* Consider - * - * function foo(...args) { foo(1, ...args); } - * foo(); - * - * Because args is unannotated, we try to infer it. However, due to - * the constant folding we do with spread arguments, we'll first - * infer that it is [], then [] | [1], then [] | [1] | [1,1] ...etc - * - * We can recognize that we're stuck in a constant folding loop. But - * how to break it? - * - * In this case, we are constant folding by recognizing when args is - * a tuple or an array literal. We can break the loop by turning - * tuples or array literals into simple arrays. - *) - - let new_arrtype = match arrtype with - (* These can get us into constant folding loops *) - | ArrayAT (elemt, Some _) - | TupleAT (elemt, _) -> ArrayAT (elemt, None) - (* These cannot *) - | ArrayAT (_, None) - | ROArrayAT _ - | EmptyAT -> arrtype in - - let rrt_resolved = - ResolvedSpreadArg(reason, new_arrtype)::rrt_resolved in - resolve_spread_list_rec - cx ~trace ~use_op ~reason_op - (rrt_resolved, rrt_unresolved) rrt_resolve_to - | _ -> () - ) - - (* no caching *) - | ResolveSpreadsToArray _ - | ResolveSpreadsToCallT _ - -> - let rrt_resolved = ResolvedSpreadArg(reason, arrtype)::rrt_resolved in - resolve_spread_list_rec - cx ~trace ~use_op ~reason_op - (rrt_resolved, rrt_unresolved) rrt_resolve_to - end - - (* singleton lower bounds are equivalent to the corresponding + let propref = Named (reason, x) in + let strict = NonstrictReturning (None, None) in + let u = LookupT (reason, strict, [], propref, MatchProp (use_op, t)) in + rec_flow cx trace (l, u) + | (MatchingPropT _, _) when is_use u -> () (* TODO: empty? *) + (*************************) + (* Resolving rest params *) + (*************************) + + (* `any` is obviously fine as a spread element. `Object` is fine because + * any Iterable can be spread, and `Object` is the any type that covers + * iterable objects. *) + | ( AnyT (r, _), + ResolveSpreadT (use_op, reason_op, { rrt_resolved; rrt_unresolved; rrt_resolve_to }) ) + -> + let rrt_resolved = ResolvedAnySpreadArg r :: rrt_resolved in + resolve_spread_list_rec + cx + ~trace + ~use_op + ~reason_op + (rrt_resolved, rrt_unresolved) + rrt_resolve_to + | (_, ResolveSpreadT (use_op, reason_op, { rrt_resolved; rrt_unresolved; rrt_resolve_to })) + -> + let reason = reason_of_t l in + let arrtype = + match l with + | DefT (_, _, ArrT arrtype) -> + (* Arrays *) + arrtype + | _ -> + (* Non-array non-any iterables *) + let resolve_array_like = + match rrt_resolve_to with + (* Spreading iterables in a type context is always OK *) + | ResolveSpreadsToMultiflowSubtypeFull _ -> false + (* Function.prototype.apply takes array-likes, not iterables *) + | ResolveSpreadsToCallT _ -> true + (* Otherwise we're spreading values, which we may need to warn about *) + | ResolveSpreadsToArray _ + | ResolveSpreadsToArrayLiteral _ + | ResolveSpreadsToCustomFunCall _ + | ResolveSpreadsToMultiflowCallFull _ + | ResolveSpreadsToMultiflowPartial _ + | ResolveSpreadsToTuple _ -> + add_output cx ~trace (Error_message.ENonArraySpread reason); + false + in + let reason = reason_of_t l in + let element_tvar = Tvar.mk cx reason in + let iterable_or_array_like = + if resolve_array_like then + let targs = [element_tvar] in + get_builtin_typeapp + cx + (replace_desc_new_reason + (RCustom "Array-like object expected for apply") + reason) + "$ArrayLike" + targs + else + let targs = + [ + element_tvar; + Unsoundness.why ResolveSpread reason; + Unsoundness.why ResolveSpread reason; + ] + in + get_builtin_typeapp + cx + (replace_desc_new_reason (RCustom "Iterable expected for spread") reason) + "$Iterable" + targs + in + flow_t cx (l, iterable_or_array_like); + ArrayAT (element_tvar, None) + in + let elemt = elemt_of_arrtype arrtype in + begin + match rrt_resolve_to with + (* Any ResolveSpreadsTo* which does some sort of constant folding needs to + * carry an id around to break the infinite recursion that constant + * constant folding can trigger *) + | ResolveSpreadsToTuple (id, elem_t, tout) + | ResolveSpreadsToArrayLiteral (id, elem_t, tout) -> + (* You might come across code like + * + * for (let x = 1; x < 3; x++) { foo = [...foo, x]; } + * + * where every time you spread foo, you flow another type into foo. So + * each time `l ~> ResolveSpreadT` is processed, it might produce a new + * `l ~> ResolveSpreadT` with a new `l`. + * + * Here is how we avoid this: + * + * 1. We use ConstFoldExpansion to detect when we see a ResolveSpreadT + * upper bound multiple times + * 2. When a ResolveSpreadT upper bound multiple times, we change it into + * a ResolveSpreadT upper bound that resolves to a more general type. + * This should prevent more distinct lower bounds from flowing in + * 3. rec_flow caches (l,u) pairs. + *) + let reason_elemt = reason_of_t elemt in + let pos = Core_list.length rrt_resolved in + ConstFoldExpansion.guard id (reason_elemt, pos) (fun recursion_depth -> + match recursion_depth with + | 0 -> + (* The first time we see this, we process it normally *) + let rrt_resolved = ResolvedSpreadArg (reason, arrtype) :: rrt_resolved in + resolve_spread_list_rec + cx + ~trace + ~use_op + ~reason_op + (rrt_resolved, rrt_unresolved) + rrt_resolve_to + | 1 -> + (* To avoid infinite recursion, let's deconstruct to a simpler case + * where we no longer resolve to a tuple but instead just resolve to + * an array. *) + rec_flow + cx + trace + ( l, + ResolveSpreadT + ( use_op, + reason_op, + { + rrt_resolved; + rrt_unresolved; + rrt_resolve_to = ResolveSpreadsToArray (elem_t, tout); + } ) ) + | _ -> + (* We've already deconstructed, so there's nothing left to do *) + ()) + | ResolveSpreadsToMultiflowCallFull (id, _) + | ResolveSpreadsToMultiflowSubtypeFull (id, _) + | ResolveSpreadsToCustomFunCall (id, _, _) + | ResolveSpreadsToMultiflowPartial (id, _, _, _) -> + let reason_elemt = reason_of_t elemt in + let pos = Core_list.length rrt_resolved in + ConstFoldExpansion.guard id (reason_elemt, pos) (fun recursion_depth -> + match recursion_depth with + | 0 -> + (* The first time we see this, we process it normally *) + let rrt_resolved = ResolvedSpreadArg (reason, arrtype) :: rrt_resolved in + resolve_spread_list_rec + cx + ~trace + ~use_op + ~reason_op + (rrt_resolved, rrt_unresolved) + rrt_resolve_to + | 1 -> + (* Consider + * + * function foo(...args) { foo(1, ...args); } + * foo(); + * + * Because args is unannotated, we try to infer it. However, due to + * the constant folding we do with spread arguments, we'll first + * infer that it is [], then [] | [1], then [] | [1] | [1,1] ...etc + * + * We can recognize that we're stuck in a constant folding loop. But + * how to break it? + * + * In this case, we are constant folding by recognizing when args is + * a tuple or an array literal. We can break the loop by turning + * tuples or array literals into simple arrays. + *) + let new_arrtype = + match arrtype with + (* These can get us into constant folding loops *) + | ArrayAT (elemt, Some _) + | TupleAT (elemt, _) -> + ArrayAT (elemt, None) + (* These cannot *) + | ArrayAT (_, None) + | ROArrayAT _ -> + arrtype + in + let rrt_resolved = ResolvedSpreadArg (reason, new_arrtype) :: rrt_resolved in + resolve_spread_list_rec + cx + ~trace + ~use_op + ~reason_op + (rrt_resolved, rrt_unresolved) + rrt_resolve_to + | _ -> ()) + (* no caching *) + | ResolveSpreadsToArray _ + | ResolveSpreadsToCallT _ -> + let rrt_resolved = ResolvedSpreadArg (reason, arrtype) :: rrt_resolved in + resolve_spread_list_rec + cx + ~trace + ~use_op + ~reason_op + (rrt_resolved, rrt_unresolved) + rrt_resolve_to + end + (* singleton lower bounds are equivalent to the corresponding primitive with a literal constraint. These conversions are low precedence to allow equality exploits above, such as the UnionT membership check, to fire. @@ -3548,93 +3428,79 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = from the latter kind of flow, but it's unclear how difficult it would be in practice. *) - - | DefT (_, (SingletonStrT _ | SingletonNumT _ | SingletonBoolT _)), - ReposLowerT (reason, use_desc, u) -> - rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) - - | DefT (reason, SingletonStrT key), _ -> - rec_flow cx trace (DefT (reason, StrT (Literal (None, key))), u) - - | DefT (reason, SingletonNumT lit), _ -> - rec_flow cx trace (DefT (reason, NumT (Literal (None, lit))), u) - - | DefT (reason, SingletonBoolT b), _ -> - rec_flow cx trace (DefT (reason, BoolT (Some b)), u) - - (* NullProtoT is necessary as an upper bound, to distinguish between - (ObjT _, NullProtoT _) constraints and (ObjT _, DefT (_, NullT)), but as - a lower bound, it's the same as DefT (_, NullT) *) - | NullProtoT reason, _ -> - rec_flow cx trace (DefT (reason, NullT), u) - - (************************************************************************) - (* exact object types *) - (************************************************************************) - - (* ExactT comes from annotation, may behave as LB or UB *) - - (* when $Exact ~> UB, forward to MakeExactT *) - | ExactT (r, t), _ -> - rec_flow cx trace (t, MakeExactT (r, Upper u)) - - (* ObjT LB ~> $Exact. make exact if exact and unsealed *) - | DefT (_, ObjT { flags; _ }), UseT (use_op, ExactT (r, t)) -> - if flags.exact && (Obj_type.sealed_in_op r flags.sealed) - then rec_flow cx trace (t, MakeExactT (r, Lower (use_op, l))) - else begin - let reasons = FlowError.ordered_reasons (reason_of_t l, r) in - add_output cx ~trace (FlowError.EIncompatibleWithExact (reasons, use_op)); - (* Continue the Flow even after we've errored. Often, there is more that - * is different then just the fact that the upper bound is exact and the - * lower bound is not. This could easily hide errors in ObjT ~> ExactT *) - rec_flow cx trace (l, UseT (use_op, t)) - end - - (* any specializations ~> $Exact. unwrap exact *) - | DefT (_, AnyObjT), UseT (use_op, ExactT (_, t)) - | DefT (_, AnyFunT), UseT (use_op, ExactT (_, t)) -> - rec_flow cx trace (l, UseT (use_op, t)) - - (* inexact LB ~> $Exact. error *) - | _, UseT (use_op, ExactT (ru, _)) -> - let reasons = FlowError.ordered_reasons (reason_of_t l, ru) in - add_output cx ~trace (FlowError.EIncompatibleWithExact (reasons, use_op)) - - (* LB ~> MakeExactT (_, UB) exactifies LB, then flows result to UB *) - - (* exactify incoming LB object type, flow to UB *) - | DefT (r, ObjT obj), MakeExactT (_, Upper u) -> - let exactobj = { obj with flags = { obj.flags with exact = true } } in - rec_flow cx trace (DefT (r, ObjT exactobj), u) - - (* exactify incoming UB object type, flow to LB *) - | DefT (ru, ObjT obj_u), MakeExactT (reason_op, Lower (use_op, l)) -> - (* forward to standard obj ~> obj *) - let ru = repos_reason (aloc_of_reason reason_op |> ALoc.to_loc) ru in - let xu = { obj_u with flags = { obj_u.flags with exact = true } } in - rec_flow cx trace (l, UseT (use_op, DefT (ru, ObjT xu))) - - | DefT (_, AnyT), MakeExactT (reason_op, k) -> - continue cx trace (AnyT.why reason_op) k - - | DefT (_, VoidT), MakeExactT (reason_op, k) -> - continue cx trace (VoidT.why reason_op) k - - | DefT (_, EmptyT), MakeExactT (reason_op, k) -> - continue cx trace (EmptyT.why reason_op) k - - (* unsupported kind *) - | _, MakeExactT (ru, _) -> - add_output cx ~trace (FlowError.EUnsupportedExact (ru, reason_of_t l)) - - (*******************************************) - (* Refinement based on function predicates *) - (*******************************************) - - (** Call to predicated (latent) functions *) - - (* Calls to functions appearing in predicate refinement contexts dispatch + | ( DefT (_, _, (SingletonStrT _ | SingletonNumT _ | SingletonBoolT _)), + ReposLowerT (reason, use_desc, u) ) -> + rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) + | (DefT (reason, trust, SingletonStrT key), _) -> + rec_flow cx trace (DefT (reason, trust, StrT (Literal (None, key))), u) + | (DefT (reason, trust, SingletonNumT lit), _) -> + rec_flow cx trace (DefT (reason, trust, NumT (Literal (None, lit))), u) + | (DefT (reason, trust, SingletonBoolT b), _) -> + rec_flow cx trace (DefT (reason, trust, BoolT (Some b)), u) + (* NullProtoT is necessary as an upper bound, to distinguish between + (ObjT _, NullProtoT _) constraints and (ObjT _, DefT (_, _, NullT)), but as + a lower bound, it's the same as DefT (_, _, NullT) *) + | (NullProtoT reason, _) -> rec_flow cx trace (DefT (reason, bogus_trust (), NullT), u) + (************************************************************************) + (* exact object types *) + (************************************************************************) + + (* ExactT comes from annotation, may behave as LB or UB *) + + (* when $Exact ~> UB, forward to MakeExactT *) + | (ExactT (r, t), _) -> rec_flow cx trace (t, MakeExactT (r, Upper u)) + (* ObjT LB ~> $Exact. make exact if exact and unsealed *) + | (DefT (_, _, ObjT { flags; _ }), UseT (use_op, ExactT (r, t))) -> + if flags.exact && Obj_type.sealed_in_op r flags.sealed then + rec_flow cx trace (t, MakeExactT (r, Lower (use_op, l))) + else + let reasons = FlowError.ordered_reasons (reason_of_t l, r) in + add_output cx ~trace (Error_message.EIncompatibleWithExact (reasons, use_op)); + + (* Continue the Flow even after we've errored. Often, there is more that + * is different then just the fact that the upper bound is exact and the + * lower bound is not. This could easily hide errors in ObjT ~> ExactT *) + rec_flow cx trace (l, UseT (use_op, t)) + (* any ~> $Exact. unwrap exact *) + | (AnyT _, UseT (use_op, ExactT (_, t))) -> rec_flow cx trace (l, UseT (use_op, t)) + | (DefT (_, _, EmptyT _), UseT (use_op, ExactT (_, t))) -> + rec_flow cx trace (l, UseT (use_op, t)) + (* Shapes need to be trapped here to avoid error-ing when used as exact types. Below (see + "matching shapes of objects"), we have a rule that allows ShapeT(o) to be used just as o is + allowed to be used. *) + | (ShapeT o, UseT (_, ExactT _)) -> rec_flow cx trace (o, u) + (* inexact LB ~> $Exact. error *) + | (_, UseT (use_op, ExactT (ru, _))) -> + let reasons = FlowError.ordered_reasons (reason_of_t l, ru) in + add_output cx ~trace (Error_message.EIncompatibleWithExact (reasons, use_op)) + (* LB ~> MakeExactT (_, UB) exactifies LB, then flows result to UB *) + + (* exactify incoming LB object type, flow to UB *) + | (DefT (r, trust, ObjT obj), MakeExactT (_, Upper u)) -> + let exactobj = { obj with flags = { obj.flags with exact = true } } in + rec_flow cx trace (DefT (r, trust, ObjT exactobj), u) + (* exactify incoming UB object type, flow to LB *) + | (DefT (ru, trust, ObjT obj_u), MakeExactT (reason_op, Lower (use_op, l))) -> + (* forward to standard obj ~> obj *) + let ru = repos_reason (aloc_of_reason reason_op) ru in + let xu = { obj_u with flags = { obj_u.flags with exact = true } } in + rec_flow cx trace (l, UseT (use_op, DefT (ru, trust, ObjT xu))) + | (AnyT (_, src), MakeExactT (reason_op, k)) -> + continue cx trace (AnyT.why src reason_op) k + | (DefT (_, trust, VoidT), MakeExactT (reason_op, k)) -> + continue cx trace (VoidT.why reason_op trust) k + | (DefT (_, trust, EmptyT _), MakeExactT (reason_op, k)) -> + continue cx trace (EmptyT.why reason_op trust) k + (* unsupported kind *) + | (_, MakeExactT (ru, _)) -> + add_output cx ~trace (Error_message.EUnsupportedExact (ru, reason_of_t l)) + (*******************************************) + (* Refinement based on function predicates *) + (*******************************************) + + (* Call to predicated (latent) functions *) + + (* Calls to functions appearing in predicate refinement contexts dispatch to this case. Here, the return type of the function holds the predicate that will refine the incoming `unrefined_t` and flow a filtered (refined) version of this type into `fresh_t`. @@ -3659,217 +3525,230 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = `params`) raise errors, but also propagate the unrefined types (as if the refinement never took place). *) - | DefT (lreason, FunT (_, _, { - params; - return_t; - is_predicate = true; - _ - })), - CallLatentPredT (reason, sense, index, unrefined_t, fresh_t) -> - (* TODO: for the moment we only support simple keys (empty projection) + | ( DefT (lreason, _, FunT (_, _, { params; return_t; is_predicate = true; _ })), + CallLatentPredT (reason, sense, index, unrefined_t, fresh_t) ) -> + (* TODO: for the moment we only support simple keys (empty projection) that exactly correspond to the function's parameters *) - let name_or_err = try - let (name, _) = List.nth params (index-1) in - Ok name - with - | Invalid_argument _ -> - Error ("Negative refinement index.", - (lreason, reason)) - | Failure msg when msg = "nth" -> - let r1 = replace_reason (fun desc -> RCustom ( - spf "%s that uses predicate on parameter at position %d" - (string_of_desc desc) - index - )) reason in - let r2 = replace_reason (fun desc -> RCustom ( - spf "%s with %d parameters" - (string_of_desc desc) - (List.length params) - )) lreason in - Error ("This is incompatible with", (r1, r2)) - in - (match name_or_err with - | Ok (Some name) -> - let key = name, [] in - rec_flow cx trace - (return_t, CallOpenPredT (reason, sense, key, unrefined_t, fresh_t)) - | Ok None -> - let loc = aloc_of_reason lreason in - add_output cx ~trace FlowError.(EInternal - (loc |> ALoc.to_loc, PredFunWithoutParamNames)) - | Error (msg, reasons) -> - add_output cx ~trace (FlowError.EFunPredCustom (reasons, msg)); - rec_flow_t cx trace (unrefined_t, fresh_t)) - - - (* Fall through all the remaining cases *) - | _, CallLatentPredT (_,_,_,unrefined_t, fresh_t) -> - rec_flow_t cx trace (unrefined_t, fresh_t) - - (** Trap the return type of a predicated function *) - - | OpenPredT (_, _, p_pos, p_neg), - CallOpenPredT (_, sense, key, unrefined_t, fresh_t) -> - begin - let preds = if sense then p_pos else p_neg in - match Key_map.get key preds with - | Some p -> rec_flow cx trace (unrefined_t, PredicateT (p, fresh_t)) - | _ -> rec_flow_t cx trace (unrefined_t, fresh_t) - end - - (* Any other flow to `CallOpenPredT` does not actually refine the + let name_or_err = + try + let (name, _) = List.nth params (index - 1) in + Ok name + with + | Invalid_argument _ -> Error ("Negative refinement index.", (lreason, reason)) + | Failure msg when msg = "nth" -> + let r1 = + update_desc_new_reason + (fun desc -> + RCustom + (spf + "%s that uses predicate on parameter at position %d" + (string_of_desc desc) + index)) + reason + in + let r2 = + update_desc_new_reason + (fun desc -> + RCustom + (spf "%s with %d parameters" (string_of_desc desc) (List.length params))) + lreason + in + Error ("This is incompatible with", (r1, r2)) + in + (match name_or_err with + | Ok (Some name) -> + let key = (name, []) in + rec_flow cx trace (return_t, CallOpenPredT (reason, sense, key, unrefined_t, fresh_t)) + | Ok None -> + let loc = aloc_of_reason lreason in + add_output cx ~trace Error_message.(EInternal (loc, PredFunWithoutParamNames)) + | Error (msg, reasons) -> + add_output cx ~trace (Error_message.EFunPredCustom (reasons, msg)); + rec_flow_t cx trace (unrefined_t, fresh_t)) + (* Fall through all the remaining cases *) + | (_, CallLatentPredT (_, _, _, unrefined_t, fresh_t)) -> + rec_flow_t cx trace (unrefined_t, fresh_t) + (* Trap the return type of a predicated function *) + | (OpenPredT (_, _, p_pos, p_neg), CallOpenPredT (_, sense, key, unrefined_t, fresh_t)) -> + let preds = + if sense then + p_pos + else + p_neg + in + (match Key_map.get key preds with + | Some p -> rec_flow cx trace (unrefined_t, PredicateT (p, fresh_t)) + | _ -> rec_flow_t cx trace (unrefined_t, fresh_t)) + (* Any other flow to `CallOpenPredT` does not actually refine the type in question so we just fall back to regular flow. *) - | _, CallOpenPredT (_, _, _, unrefined_t, fresh_t) -> - rec_flow_t cx trace (unrefined_t, fresh_t) - - (********************************) - (* Function-predicate subtyping *) - (********************************) - - (* When decomposing function subtyping for predicated functions we need to - * pair-up the predicates that each of the two functions established - * before we can check for predicate implication. The predicates encoded - * inside the two `OpenPredT`s refer to the formal parameters of the two - * functions (which are not the same). `SubstOnPredT` is a use that does - * this matching by carrying a substitution (`subst`) from keys from the - * function in the left-hand side to keys in the right-hand side. - * - * Each matched pair of predicates is subsequently checked for consistency. - *) - | OpenPredT (_, t1, _, _), - SubstOnPredT (_, _, OpenPredT (_, t2, p_pos_2, p_neg_2)) - when Key_map.(is_empty p_pos_2 && is_empty p_neg_2) -> - rec_flow_t cx trace (t1, t2) - - | OpenPredT _, UseT (_, OpenPredT _) -> - let loc = aloc_of_reason (reason_of_use_t u) in - add_output cx ~trace FlowError.(EInternal (loc |> ALoc.to_loc, OpenPredWithoutSubst)) - - (*********************************************) - (* Using predicate functions as regular ones *) - (*********************************************) - - | OpenPredT (_, l, _, _), _ -> rec_flow cx trace (l, u) - - (********************) - (* mixin conversion *) - (********************) - - (* A class can be viewed as a mixin by extracting its immediate properties, + | (_, CallOpenPredT (_, _, _, unrefined_t, fresh_t)) -> + rec_flow_t cx trace (unrefined_t, fresh_t) + (********************************) + (* Function-predicate subtyping *) + (********************************) + + (* When decomposing function subtyping for predicated functions we need to + * pair-up the predicates that each of the two functions established + * before we can check for predicate implication. The predicates encoded + * inside the two `OpenPredT`s refer to the formal parameters of the two + * functions (which are not the same). `SubstOnPredT` is a use that does + * this matching by carrying a substitution (`subst`) from keys from the + * function in the left-hand side to keys in the right-hand side. + * + * Each matched pair of predicates is subsequently checked for consistency. + *) + | (OpenPredT (_, t1, _, _), SubstOnPredT (_, _, OpenPredT (_, t2, p_pos_2, p_neg_2))) + when Key_map.(is_empty p_pos_2 && is_empty p_neg_2) -> + rec_flow_t cx trace (t1, t2) + | (OpenPredT _, UseT (_, OpenPredT _)) -> + let loc = aloc_of_reason (reason_of_use_t u) in + add_output cx ~trace Error_message.(EInternal (loc, OpenPredWithoutSubst)) + (*********************************************) + (* Using predicate functions as regular ones *) + (*********************************************) + | (OpenPredT (_, l, _, _), _) -> rec_flow cx trace (l, u) + (********************) + (* mixin conversion *) + (********************) + + (* A class can be viewed as a mixin by extracting its immediate properties, and "erasing" its static and super *) - - | ThisClassT (_, DefT (_, InstanceT (_, _, _, instance))), MixinT (r, tvar) -> - let static = ObjProtoT r in - let super = ObjProtoT r in - rec_flow cx trace ( - this_class_type (DefT (r, InstanceT (static, super, [], instance))), - UseT (unknown_use, tvar) - ) - - | DefT (_, PolyT (xs, ThisClassT (_, DefT (_, InstanceT (_, _, _, insttype))), _)), - MixinT (r, tvar) -> - let static = ObjProtoT r in - let super = ObjProtoT r in - let instance = DefT (r, InstanceT (static, super, [], insttype)) in - rec_flow cx trace ( - poly_type (Context.make_nominal cx) xs (this_class_type instance), - UseT (unknown_use, tvar) - ) - - | DefT (_, AnyT), MixinT (r, tvar) -> - rec_flow_t cx trace (AnyT.why r, tvar) - - (* TODO: it is conceivable that other things (e.g. functions) could also be + | (ThisClassT (_, DefT (_, trust, InstanceT (_, _, _, instance))), MixinT (r, tvar)) -> + let static = ObjProtoT r in + let super = ObjProtoT r in + rec_flow + cx + trace + ( this_class_type (DefT (r, trust, InstanceT (static, super, [], instance))), + UseT (unknown_use, tvar) ) + | ( DefT + ( _, + _, + PolyT + ( tparams_loc, + xs, + ThisClassT (_, DefT (_, trust, InstanceT (_, _, _, insttype))), + _ ) ), + MixinT (r, tvar) ) -> + let static = ObjProtoT r in + let super = ObjProtoT r in + let instance = DefT (r, trust, InstanceT (static, super, [], insttype)) in + rec_flow + cx + trace + ( poly_type (Context.make_nominal cx) tparams_loc xs (this_class_type instance), + UseT (unknown_use, tvar) ) + | (AnyT (_, src), MixinT (r, tvar)) -> rec_flow_t cx trace (AnyT.why src r, tvar) + (* TODO: it is conceivable that other things (e.g. functions) could also be viewed as mixins (e.g. by extracting properties in their prototypes), but such enhancements are left as future work. *) - (***************************************) - (* generic function may be specialized *) - (***************************************) + (***************************************) + (* generic function may be specialized *) + (***************************************) - (* Instantiate a polymorphic definition using the supplied type + (* Instantiate a polymorphic definition using the supplied type arguments. Use the instantiation cache if directed to do so by the operation. (SpecializeT operations are created when processing TypeAppT types, so the decision to cache or not originates there.) *) - - | DefT (_, PolyT (xs,t,id)), SpecializeT(use_op,reason_op,reason_tapp,cache,ts,tvar) -> - let ts = Option.value ts ~default:[] in - let t_ = mk_typeapp_of_poly cx trace ~use_op ~reason_op ~reason_tapp ?cache id xs t ts in - rec_flow_t cx trace (t_, tvar) - - | DefT (_, PolyT (tps, _, _)), VarianceCheckT(_, ts, polarity) -> - variance_check cx ~trace polarity (tps, ts) - - (* When we are checking the polarity of a super class where the super class has no type + | ( DefT (_, _, PolyT (tparams_loc, xs, t, id)), + SpecializeT (use_op, reason_op, reason_tapp, cache, ts, tvar) ) -> + let ts = Option.value ts ~default:[] in + let t_ = + mk_typeapp_of_poly + cx + trace + ~use_op + ~reason_op + ~reason_tapp + ?cache + id + tparams_loc + xs + t + ts + in + rec_flow_t cx trace (t_, tvar) + | (DefT (_, _, PolyT (_, tps, _, _)), VarianceCheckT (_, ts, polarity)) -> + variance_check cx ~trace polarity (Nel.to_list tps, ts) + (* When we are checking the polarity of a super class where the super class has no type args, we end up generating this constraint. Since it has no type args, we never resolve to a PolyT, but we still want to check the polarity in this case. *) - | DefT (_, ClassT _), VarianceCheckT(_, [], polarity) -> - check_polarity cx ~trace polarity l - - | DefT (_, PolyT (tparams, _, _)), - TypeAppVarianceCheckT (use_op, reason_op, reason_tapp, targs) -> - let minimum_arity = poly_minimum_arity tparams in - let maximum_arity = List.length tparams in - let reason_arity = - let tp1, tpN = List.hd tparams, List.hd (List.rev tparams) in - let loc = Loc.btwn (aloc_of_reason tp1.reason |> ALoc.to_loc) (aloc_of_reason tpN.reason |> ALoc.to_loc) in - mk_reason (RCustom "See type parameters of definition here") loc in - if List.length targs > maximum_arity then ( - add_output cx ~trace - (FlowError.ETooManyTypeArgs (reason_tapp, reason_arity, maximum_arity)); - ) else ( - let unused_targs = List.fold_left (fun targs { name; default; polarity; reason; _ } -> - match default, targs with - | None, [] -> - (* fewer arguments than params but no default *) - add_output cx ~trace (FlowError.ETooFewTypeArgs - (reason_tapp, reason_arity, minimum_arity)); - [] - | _, [] -> [] - | _, (t1, t2)::targs -> - let use_op = Frame (TypeArgCompatibility { - name; - targ = reason; - lower = reason_op; - upper = reason_tapp; - polarity; - }, use_op) in - (match polarity with - | Positive -> rec_flow cx trace (t1, UseT (use_op, t2)) - | Negative -> rec_flow cx trace (t2, UseT (use_op, t1)) - | Neutral -> rec_unify cx trace ~use_op t1 t2); - targs - ) targs tparams in - assert (unused_targs = []); - ) - - (* empty targs specialization of non-polymorphic classes is a no-op *) - | (DefT (_, ClassT _) | ThisClassT _), SpecializeT(_,_,_,_,None,tvar) -> - rec_flow_t cx trace (l, tvar) - - | DefT (_, AnyT), SpecializeT (_, _, _, _, _, tvar) -> - rec_flow_t cx trace (l, tvar) - - (* this-specialize a this-abstracted class by substituting This *) - | ThisClassT (_, i), ThisSpecializeT(r, this, k) -> - let i = subst cx (SMap.singleton "this" this) i in - continue_repos cx trace r i k - - (* this-specialization of non-this-abstracted classes is a no-op *) - | DefT (_, ClassT i), ThisSpecializeT(r, _this, k) -> - (* TODO: check that this is a subtype of i? *) - continue_repos cx trace r i k - - | DefT (_, AnyT), ThisSpecializeT (r, _, k) -> - continue_repos cx trace r l k - - | DefT (_, PolyT _), ReposLowerT (reason, use_desc, u) -> - rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) - - | (ThisClassT _, ReposLowerT (reason, use_desc, u)) -> - rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) - - (* When do we consider a polymorphic type T to be a subtype of another + | (DefT (_, _, ClassT _), VarianceCheckT (_, [], polarity)) -> + check_polarity cx ~trace polarity l + | ( DefT (_, _, PolyT (tparams_loc, tparams, _, _)), + TypeAppVarianceCheckT (use_op, reason_op, reason_tapp, targs) ) -> + let minimum_arity = poly_minimum_arity tparams in + let maximum_arity = Nel.length tparams in + let reason_arity = + mk_reason (RCustom "See type parameters of definition here") tparams_loc + in + if List.length targs > maximum_arity then + add_output + cx + ~trace + (Error_message.ETooManyTypeArgs (reason_tapp, reason_arity, maximum_arity)) + else + let (unused_targs, _, _) = + Nel.fold_left + (fun (targs, map1, map2) tparam -> + let { name; default; polarity; reason; _ } = tparam in + let flow_targs t1 t2 = + let use_op = + Frame + ( TypeArgCompatibility + { + name; + targ = reason; + lower = reason_op; + upper = reason_tapp; + polarity; + }, + use_op ) + in + match polarity with + | Polarity.Positive -> rec_flow cx trace (t1, UseT (use_op, t2)) + | Polarity.Negative -> rec_flow cx trace (t2, UseT (use_op, t1)) + | Polarity.Neutral -> rec_unify cx trace ~use_op t1 t2 + in + match (default, targs) with + | (None, []) -> + (* fewer arguments than params but no default *) + add_output + cx + ~trace + (Error_message.ETooFewTypeArgs (reason_tapp, reason_arity, minimum_arity)); + ([], map1, map2) + | (Some default, []) -> + let t1 = subst cx ~use_op map1 default in + let t2 = subst cx ~use_op map2 default in + flow_targs t1 t2; + ([], SMap.add name t1 map1, SMap.add name t2 map2) + | (_, (t1, t2) :: targs) -> + flow_targs t1 t2; + (targs, SMap.add name t1 map1, SMap.add name t2 map2)) + (targs, SMap.empty, SMap.empty) + tparams + in + assert (unused_targs = []) + (* empty targs specialization of non-polymorphic classes is a no-op *) + | ((DefT (_, _, ClassT _) | ThisClassT _), SpecializeT (_, _, _, _, None, tvar)) -> + rec_flow_t cx trace (l, tvar) + | (AnyT _, SpecializeT (_, _, _, _, _, tvar)) -> rec_flow_t cx trace (l, tvar) + (* this-specialize a this-abstracted class by substituting This *) + | (ThisClassT (_, i), ThisSpecializeT (r, this, k)) -> + let i = subst cx (SMap.singleton "this" this) i in + continue_repos cx trace r i k + (* this-specialization of non-this-abstracted classes is a no-op *) + | (DefT (_, _, ClassT i), ThisSpecializeT (r, _this, k)) -> + (* TODO: check that this is a subtype of i? *) + continue_repos cx trace r i k + | (AnyT _, ThisSpecializeT (r, _, k)) -> continue_repos cx trace r l k + | (DefT (_, _, PolyT _), ReposLowerT (reason, use_desc, u)) -> + rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) + | (ThisClassT _, ReposLowerT (reason, use_desc, u)) -> + rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) + (* When do we consider a polymorphic type T to be a subtype of another polymorphic type T'? This is the subject of a long line of research. A rule that works (Cardelli/Wegner) is: force U = U', and prove that T is a subtype of T' for any X:U'. A more general rule that proves @@ -3901,47 +3780,64 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = overridden with a generic method, as long as the non-generic signature can be derived as a specialization of the generic signature. *) - (** some shortcuts **) - | DefT (_, PolyT (_, _, id1)), UseT (_, DefT (_, PolyT (_, _, id2))) - when id1 = id2 -> () - - | DefT (r1, PolyT (params1, t1, id1)), UseT (use_op, DefT (r2, PolyT (params2, t2, id2))) -> - let n1 = List.length params1 in - let n2 = List.length params2 in - if n2 > n1 then - add_output cx ~trace (FlowError.ETooManyTypeArgs (r2, r1, n1)) - else if n2 < n1 then - add_output cx ~trace (FlowError.ETooFewTypeArgs (r2, r1, n1)) - else - (** for equal-arity polymorphic types, flow param upper bounds, then instances parameterized + (* some shortcuts **) + | (DefT (_, _, PolyT (_, _, _, id1)), UseT (_, DefT (_, _, PolyT (_, _, _, id2)))) + when id1 = id2 -> + () + | ( DefT (r1, _, PolyT (tparams_loc1, params1, t1, id1)), + UseT (use_op, DefT (r2, _, PolyT (tparams_loc2, params2, t2, id2))) ) -> + let n1 = Nel.length params1 in + let n2 = Nel.length params2 in + if n2 > n1 then + add_output cx ~trace (Error_message.ETooManyTypeArgs (r2, r1, n1)) + else if n2 < n1 then + add_output cx ~trace (Error_message.ETooFewTypeArgs (r2, r1, n1)) + else + (* for equal-arity polymorphic types, flow param upper bounds, then instances parameterized by these *) - let args1 = instantiate_poly_param_upper_bounds cx params1 in - let args2 = instantiate_poly_param_upper_bounds cx params2 in - List.iter2 (fun arg1 arg2 -> rec_flow_t cx trace ~use_op (arg2, arg1)) args1 args2; - let inst1 = - let r = reason_of_t t1 in - mk_typeapp_of_poly cx trace - ~use_op ~reason_op:r ~reason_tapp:r id1 params1 t1 args1 in - let inst2 = - let r = reason_of_t t2 in - mk_typeapp_of_poly cx trace - ~use_op ~reason_op:r ~reason_tapp:r id2 params2 t2 args2 in - rec_flow_t cx trace (inst1, inst2) - - (** general case **) - | _, UseT (use_op, DefT (_, PolyT (ids, t, _))) -> - generate_tests cx ids (fun map_ -> - rec_flow cx trace (l, UseT (use_op, subst cx ~use_op map_ t)) - ) - - (* TODO: ideally we'd do the same when lower bounds flow to a + let args1 = instantiate_poly_param_upper_bounds cx params1 in + let args2 = instantiate_poly_param_upper_bounds cx params2 in + List.iter2 (fun arg1 arg2 -> rec_flow_t cx trace ~use_op (arg2, arg1)) args1 args2; + let inst1 = + let r = reason_of_t t1 in + mk_typeapp_of_poly + cx + trace + ~use_op + ~reason_op:r + ~reason_tapp:r + id1 + tparams_loc1 + params1 + t1 + args1 + in + let inst2 = + let r = reason_of_t t2 in + mk_typeapp_of_poly + cx + trace + ~use_op + ~reason_op:r + ~reason_tapp:r + id2 + tparams_loc2 + params2 + t2 + args2 + in + rec_flow_t cx trace (inst1, inst2) + (* general case **) + | (_, UseT (use_op, DefT (_, _, PolyT (_, ids, t, _)))) -> + generate_tests cx (Nel.to_list ids) (fun map_ -> + rec_flow cx trace (l, UseT (use_op, subst cx ~use_op map_ t))) + (* TODO: ideally we'd do the same when lower bounds flow to a this-abstracted class, but fixing the class is easier; might need to revisit *) - | (_, UseT (use_op, ThisClassT (r, i))) -> - let reason = reason_of_t l in - rec_flow cx trace (l, UseT (use_op, fix_this_class cx trace reason (r, i))) - - (** This rule is hit when a polymorphic type appears outside a + | (_, UseT (use_op, ThisClassT (r, i))) -> + let reason = reason_of_t l in + rec_flow cx trace (l, UseT (use_op, fix_this_class cx trace reason (r, i))) + (* This rule is hit when a polymorphic type appears outside a type application expression - i.e. not followed by a type argument list delimited by angle brackets. We want to require full expressions in type positions like annotations, @@ -3949,31 +3845,54 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = extends clauses and at function call sites - without explicit type arguments, since typically they're easily inferred from context. *) - | DefT (reason_tapp, (PolyT (ids, t, _))), _ -> - let reason_op = reason_of_use_t u in - begin match u with - | UseT (use_op, DefT (_, TypeT _)) -> - ignore use_op; (* TODO: add use op to missing type arg error? *) - add_output cx ~trace (FlowError.EMissingTypeArgs { - reason_tapp = reason_tapp; - reason_arity = mk_poly_arity_reason ids; - min_arity = poly_minimum_arity ids; - max_arity = List.length ids; - }) - (* Special case for `_ instanceof C` where C is polymorphic *) - | PredicateT ((RightP (InstanceofTest, _) | NotP (RightP (InstanceofTest, _))), _) -> - let l = instantiate_poly_default_args cx trace - ~use_op:unknown_use ~reason_op ~reason_tapp (ids, t) in - rec_flow cx trace (l, u) - (* Special case for React.PropTypes.instanceOf arguments, which are an + | (DefT (reason_tapp, _, PolyT (tparams_loc, ids, t, _)), _) -> + let reason_op = reason_of_use_t u in + begin + match u with + | UseT (use_op, DefT (_, _, TypeT _)) -> + ignore use_op; + + (* TODO: add use op to missing type arg error? *) + add_output + cx + ~trace + (Error_message.EMissingTypeArgs + { + reason_tapp; + reason_arity = mk_poly_arity_reason tparams_loc; + min_arity = poly_minimum_arity ids; + max_arity = Nel.length ids; + }) + (* Special case for `_ instanceof C` where C is polymorphic *) + | PredicateT ((RightP (InstanceofTest, _) | NotP (RightP (InstanceofTest, _))), _) -> + let l = + instantiate_poly_default_args + cx + trace + ~use_op:unknown_use + ~reason_op + ~reason_tapp + (tparams_loc, ids, t) + in + rec_flow cx trace (l, u) + (* Special case for React.PropTypes.instanceOf arguments, which are an exception to type arg arity strictness, because it's not possible to provide args and we need to interpret the value as a type. *) - | ReactKitT (use_op, reason_op, (React.SimplifyPropType - (React.SimplifyPropType.InstanceOf, _) as tool)) -> - let l = instantiate_poly_default_args cx trace - ~use_op ~reason_op ~reason_tapp (ids, t) in - react_kit cx trace ~use_op reason_op l tool - (* Calls to polymorphic functions may cause non-termination, e.g. when the + | ReactKitT + ( use_op, + reason_op, + (React.SimplifyPropType (React.SimplifyPropType.InstanceOf, _) as tool) ) -> + let l = + instantiate_poly_default_args + cx + trace + ~use_op + ~reason_op + ~reason_tapp + (tparams_loc, ids, t) + in + ReactJs.run cx trace ~use_op reason_op l tool + (* Calls to polymorphic functions may cause non-termination, e.g. when the results of the calls feed back as subtle variations of the original arguments. This is similar to how we may have non-termination with method calls on type applications. Thus, it makes sense to replicate @@ -3998,61 +3917,264 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = positions we should be able to subject reasons to arbitrary tweaking, without fearing regressions in termination guarantees. *) - | CallT (use_op, _, calltype) when not (is_typemap_reason reason_op) -> - begin match calltype.call_targs with - | None -> - let arg_reasons = List.map (function - | Arg t -> reason_of_t t - | SpreadArg t -> reason_of_t t - ) calltype.call_args_tlist in - let t_ = instantiate_poly cx trace - ~use_op ~reason_op ~reason_tapp ~cache:arg_reasons (ids,t) in - rec_flow cx trace (t_, u) - | Some targs -> - let t_ = instantiate_poly_with_targs cx trace (ids, t) targs - ~use_op ~reason_op ~reason_tapp in - rec_flow cx trace (t_, - CallT (use_op, reason_op, {calltype with call_targs = None})) - end - | ConstructorT (use_op, reason_op, Some targs, args, tout) -> - let t_ = instantiate_poly_with_targs cx trace (ids, t) targs - ~use_op ~reason_op ~reason_tapp in - rec_flow cx trace (t_, ConstructorT (use_op, reason_op, None, args, tout)) - | _ -> - let t_ = instantiate_poly cx trace - ~use_op:unknown_use ~reason_op ~reason_tapp (ids,t) in - rec_flow cx trace (t_, u) - end + | CallT (use_op, _, calltype) when not (is_typemap_reason reason_op) -> + begin + match calltype.call_targs with + | None -> + let arg_reasons = + Core_list.map + ~f:(function + | Arg t -> reason_of_t t + | SpreadArg t -> reason_of_t t) + calltype.call_args_tlist + in + let t_ = + instantiate_poly + cx + trace + ~use_op + ~reason_op + ~reason_tapp + ~cache:arg_reasons + (tparams_loc, ids, t) + in + rec_flow cx trace (t_, u) + | Some targs -> + let t_ = + instantiate_poly_call_or_new + cx + trace + (tparams_loc, ids, t) + targs + ~use_op + ~reason_op + ~reason_tapp + in + rec_flow + cx + trace + (t_, CallT (use_op, reason_op, { calltype with call_targs = None })) + end + | ConstructorT (use_op, reason_op, Some targs, args, tout) -> + let t_ = + instantiate_poly_call_or_new + cx + trace + (tparams_loc, ids, t) + targs + ~use_op + ~reason_op + ~reason_tapp + in + rec_flow cx trace (t_, ConstructorT (use_op, reason_op, None, args, tout)) + | _ -> + let use_op = + match use_op_of_use_t u with + | Some use_op -> use_op + | None -> unknown_use + in + let t_ = + instantiate_poly cx trace ~use_op ~reason_op ~reason_tapp (tparams_loc, ids, t) + in + rec_flow cx trace (t_, u) + end + (* when a this-abstracted class flows to upper bounds, fix the class *) + | (ThisClassT (r, i), _) -> + let reason = reason_of_use_t u in + rec_flow cx trace (fix_this_class cx trace reason (r, i), u) + (*****************************) + (* React Abstract Components *) + (*****************************) + (* + * In all of these cases, we check: + * 1. configu <: configl + * 2. default_propsl = default_propsu + * 3. instancel <: instanceu + * + * 2. is necessary because we allow the default props of a component to be read and + * written. + * + * 1. Is necessary because we need to ensure that any config object that is passed to u + * is compatible with the config of l. This also is sufficient; unification is not required. + * We can think of AbstractComponents as some sort of callable that accepts a config object. + * The only place that the config object type would appear is in the callable signature, which + * is contravariant. + * + * In reality, a component is turned into an element via createElement, which accepts a + * component and a config object. From there, it creates an object that will become the + * props of a component by combining the config object with the component's default props. + * This process creates a new fresh unaliased props object, which is passed to the component. + * + * 3. Is necessary because we need to ensure the ref passed in is compatible with the instance + * type of the component. React will assign ref.current to the instance of the component, so we + * need to ensure that the type we assign is compatible with the type ref.current. + *) + + (* Class component ~> AbstractComponent *) + | ( DefT (reasonl, _, ClassT this), + UseT (use_op, DefT (_reasonu, _, ReactAbstractComponentT { config; instance })) ) -> + (* Contravariant config check *) + React_kit.get_config + cx + trace + l + ~use_op + ~reason_op:reasonl + ~rec_flow + ~rec_flow_t + ~rec_unify + ~get_builtin_type + ~add_output + (React.GetConfig l) + Polarity.Negative + config; + + (* check instancel <: instanceu *) + rec_flow_t cx trace ~use_op (this, instance) + (* Function Component ~> AbstractComponent *) + | ( DefT (reasonl, _, FunT (_, _, { return_t; _ })), + UseT (use_op, DefT (_reasonu, _, ReactAbstractComponentT { config; instance })) ) -> + (* Function components will not always have an annotation, so the config may + * never resolve. To determine config compatibility, we instead + * call createElement on the function with the given component to determine + * the compatibility. + * + * We use ConfigCheck instead of CreateElement because: + * 1. We can't perform the key check. If config is mixed, which can happen in + * polymorphic HOCs then the [string]: mixed indexer causes spurious errors. + * 2. We check the ref here, so we don't need to check it in the config as well. + *) + rec_flow cx trace (l, ReactKitT (use_op, reasonl, React.ConfigCheck config)); + + (* Ensure this is a function component *) + rec_flow_t ~use_op cx trace (return_t, get_builtin_type cx reasonl "React$Node"); + + (* A function component instance type is always void, so flow void to instance *) + rec_flow_t + cx + trace + ~use_op + (VoidT.make (replace_desc_new_reason RVoid reasonl) |> with_trust bogus_trust, instance) + (* Object Component ~> AbstractComponent *) + | ( DefT (reasonl, _, ObjT { call_t = Some id; _ }), + UseT (use_op, DefT (reasonu, trust, ReactAbstractComponentT { config; instance })) ) -> + rec_flow cx trace (l, ReactKitT (use_op, reasonl, React.ConfigCheck config)); + + (* Ensure the callable signature's return type is compatible with React.Node. We + * do this by flowing it to (...empty): React.Node *) + let funtype = + mk_functiontype + reasonu + [] + ~rest_param: + (Some + ( None, + aloc_of_reason reasonu, + EmptyT.why (replace_desc_new_reason REmpty reasonu) (bogus_trust ()) )) + ~def_reason:reasonl + (get_builtin_type cx reasonu "React$Node") + in + let mixed = MixedT.why reasonu (bogus_trust ()) in + rec_flow_t + ~use_op + cx + trace + (Context.find_call cx id, DefT (reasonu, trust, FunT (mixed, mixed, funtype))); + + (* An object component instance type is always void, so flow void to instance *) + rec_flow_t + cx + trace + ~use_op + (VoidT.make (replace_desc_new_reason RVoid reasonl) |> with_trust bogus_trust, instance) + (* AbstractComponent ~> AbstractComponent *) + | ( DefT (_reasonl, _, ReactAbstractComponentT { config = configl; instance = instancel }), + UseT + ( use_op, + DefT + (_reasonu, _, ReactAbstractComponentT { config = configu; instance = instanceu }) + ) ) -> + rec_flow_t cx trace ~use_op (configu, configl); + rec_flow_t cx trace ~use_op (instancel, instanceu) + (* When looking at properties of an AbstractComponent, we delegate to a union of + * function component and class component + *) + | ( DefT (r, _, ReactAbstractComponentT _), + (TestPropT _ | GetPropT _ | SetPropT _ | GetElemT _ | SetElemT _) ) -> + let statics = get_builtin_type cx ~trace r "React$AbstractComponentStatics" in + rec_flow cx trace (statics, u) + (******************) + (* React GetProps *) + (******************) + + (* props is invariant in the class *) + | (DefT (r, _, ClassT _), (ReactPropsToOut (_, props) | ReactInToProps (_, props))) -> + rec_flow_t cx trace (l, React_kit.component_class cx r ~get_builtin_typeapp props) + (* Functions with rest params or that are predicates cannot be React components *) + | ( DefT (reason, _, FunT (_, _, { params; rest_param = None; is_predicate = false; _ })), + ReactPropsToOut (_, props) ) -> + (* Contravariance *) + Core_list.hd params + |> Option.value_map ~f:snd ~default:(Obj_type.mk ~sealed:true cx reason) + |> (fun t -> rec_flow_t cx trace (t, props)) + | ( DefT + ( reason, + _, + FunT (_, _, { params; return_t; rest_param = None; is_predicate = false; _ }) ), + ReactInToProps (reason_op, props) ) -> + (* Contravariance *) + Core_list.hd params + |> Option.value_map ~f:snd ~default:(Obj_type.mk ~sealed:true cx reason) + |> fun t -> + rec_flow_t cx trace (props, t); + rec_flow_t cx trace (return_t, get_builtin_type cx reason_op "React$Node") + | (DefT (r, _, FunT _), (ReactInToProps (_, props) | ReactPropsToOut (_, props))) -> + React.GetProps props + |> React_kit.err_incompatible cx trace ~use_op:unknown_use ~add_output r + | ( DefT (r, _, ObjT { call_t = Some id; _ }), + (ReactInToProps (_, props) | ReactPropsToOut (_, props)) ) -> + begin + match Context.find_call cx id with + | ( DefT (_, _, FunT (_, _, { rest_param = None; is_predicate = false; _ })) + | DefT (_, _, PolyT (_, _, DefT (_, _, FunT _), _)) ) as fun_t -> + (* Keep the object's reason for better error reporting *) + rec_flow cx trace (Fn.const r |> Fn.flip mod_reason_of_t fun_t, u) + | _ -> + React.GetProps props + |> React_kit.err_incompatible cx trace ~use_op:unknown_use ~add_output r + end + | (AnyT _, ReactPropsToOut (_, props)) -> rec_flow_t cx trace (l, props) + | (AnyT _, ReactInToProps (_, props)) -> rec_flow_t cx trace (props, l) + | (DefT (r, _, _), (ReactPropsToOut (_, props) | ReactInToProps (_, props))) -> + React.GetProps props + |> React_kit.err_incompatible cx trace ~use_op:unknown_use ~add_output r + (***********************************************) + (* function types deconstruct into their parts *) + (***********************************************) + + (* FunT ~> FunT *) + | (DefT (lreason, _, FunT (_, _, ft1)), UseT (use_op, DefT (ureason, _, FunT (_, _, ft2)))) + -> + let use_op = + Frame + ( FunCompatibility { lower = lreason; upper = ureason }, + (* The $call PropertyCompatibility is redundant when we have a + * FunCompatibility use_op. *) + match use_op with + | Frame (PropertyCompatibility { prop = Some "$call"; _ }, use_op) -> use_op + | _ -> use_op ) + in + rec_flow cx trace (ft2.this_t, UseT (use_op, ft1.this_t)); + let args = List.rev_map (fun (_, t) -> Arg t) ft2.params in + let args = + List.rev + (match ft2.rest_param with + | Some (_, _, rest) -> SpreadArg rest :: args + | None -> args) + in + multiflow_subtype cx trace ~use_op ureason args ft1; - (* when a this-abstracted class flows to upper bounds, fix the class *) - | (ThisClassT (r, i), _) -> - let reason = reason_of_use_t u in - rec_flow cx trace (fix_this_class cx trace reason (r, i), u) - - (***********************************************) - (* function types deconstruct into their parts *) - (***********************************************) - - (* FunT ~> FunT *) - - | DefT (lreason, FunT (_, _, ft1)), - UseT (use_op, DefT (ureason, FunT (_, _, ft2))) -> - let use_op = Frame ( - FunCompatibility { lower = lreason; upper = ureason }, - (* The $call PropertyCompatibility is redundant when we have a - * FunCompatibility use_op. *) - match use_op with - | Frame (PropertyCompatibility {prop = Some "$call"; _}, use_op) -> use_op - | _ -> use_op - ) in - rec_flow cx trace (ft2.this_t, UseT (use_op, ft1.this_t)); - let args = List.rev_map (fun (_, t) -> Arg t) ft2.params in - let args = List.rev (match ft2.rest_param with - | Some (_, _, rest) -> (SpreadArg rest) :: args - | None -> args) in - multiflow_subtype cx trace ~use_op ureason args ft1; - - (* Well-formedness adjustment: If this is predicate function subtyping, + (* Well-formedness adjustment: If this is predicate function subtyping, make sure to apply a latent substitution on the right-hand use to bridge the mismatch of the parameter naming. Otherwise, proceed with the subtyping of the return types normally. In general it should @@ -4060,399 +4182,448 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = should not flow to other OpenPredTs without wrapping the latter in SubstOnPredT. *) - if ft2.is_predicate then - if not ft1.is_predicate then - (* Non-predicate functions are incompatible with predicate ones + if ft2.is_predicate then + if not ft1.is_predicate then + (* Non-predicate functions are incompatible with predicate ones TODO: somehow the original flow needs to be propagated as well *) - add_output cx ~trace (FlowError.EFunPredCustom ( - (lreason, ureason), - "Function is incompatible with")) - else - let reason = replace_reason (fun desc -> - RCustom (spf "predicate of %s" (string_of_desc desc)) - ) (reason_of_t ft2.return_t) in - let rec subst_map (n, map) = function - | (Some k, _)::ps1, (Some v, _)::ps2 -> - subst_map (n+1, SMap.add k (v,[]) map) (ps1, ps2) - | _, [] -> Ok map - | [], ps2 -> - (* Flag an error if predicate counts do not coincide + add_output + cx + ~trace + (Error_message.EFunPredCustom ((lreason, ureason), "Function is incompatible with")) + else + let reason = + update_desc_new_reason + (fun desc -> RCustom (spf "predicate of %s" (string_of_desc desc))) + (reason_of_t ft2.return_t) + in + let rec subst_map (n, map) = function + | ((Some k, _) :: ps1, (Some v, _) :: ps2) -> + subst_map (n + 1, SMap.add k (v, []) map) (ps1, ps2) + | (_, []) -> Ok map + | ([], ps2) -> + (* Flag an error if predicate counts do not coincide TODO: somehow the original flow needs to be propagated as well *) - let mod_reason n = replace_reason (fun _ -> - RCustom (spf "predicate function with %d arguments" n) - ) in - let n2 = n + (List.length ps2) in - Error (FlowError.EFunPredCustom ( - (mod_reason n lreason, - mod_reason n2 ureason), - "Predicate function is incompatible with")) - | (None, _)::_, _ | _, (None, _)::_ -> - let loc = aloc_of_reason ureason in - Error (FlowError.(EInternal (loc |> ALoc.to_loc, PredFunWithoutParamNames))) - in - match subst_map (0, SMap.empty) (ft1.params, ft2.params) with - | Error e -> add_output cx ~trace e - | Ok map -> - rec_flow cx trace (ft1.return_t, - SubstOnPredT (reason, map, ft2.return_t)) - else ( - let use_op = Frame (FunReturn { - lower = reason_of_t ft1.return_t; - upper = reason_of_t ft2.return_t; - }, use_op) in - rec_flow cx trace (ft1.return_t, UseT (use_op, ft2.return_t)) - ) + let mod_reason n = + update_desc_new_reason (fun _ -> + RCustom (spf "predicate function with %d arguments" n)) + in + let n2 = n + List.length ps2 in + Error + (Error_message.EFunPredCustom + ( (mod_reason n lreason, mod_reason n2 ureason), + "Predicate function is incompatible with" )) + | ((None, _) :: _, _) + | (_, (None, _) :: _) -> + let loc = aloc_of_reason ureason in + Error Error_message.(EInternal (loc, PredFunWithoutParamNames)) + in + match subst_map (0, SMap.empty) (ft1.params, ft2.params) with + | Error e -> add_output cx ~trace e + | Ok map -> rec_flow cx trace (ft1.return_t, SubstOnPredT (reason, map, ft2.return_t)) + else + let use_op = + Frame + ( FunReturn { lower = reason_of_t ft1.return_t; upper = reason_of_t ft2.return_t }, + use_op ) + in + rec_flow cx trace (ft1.return_t, UseT (use_op, ft2.return_t)) + (* FunT ~> CallT *) + | (DefT (reason_fundef, _, FunT (_, _, funtype)), CallT (use_op, reason_callsite, calltype)) + -> + let { this_t = o1; params = _; return_t = t1; closure_t = func_scope_id; changeset; _ } = + funtype + in + let { + call_this_t = o2; + call_targs; + call_args_tlist = tins2; + call_tout = t2; + call_closure_t = call_scope_id; + call_strict_arity; + } = + calltype + in + rec_flow cx trace (o2, UseT (use_op, o1)); + + Option.iter call_targs ~f:(fun _ -> + add_output + cx + ~trace + Error_message.( + ECallTypeArity + { + call_loc = aloc_of_reason reason_callsite; + is_new = false; + reason_arity = reason_fundef; + expected_arity = 0; + })); + + if call_strict_arity then + multiflow_call cx trace ~use_op reason_callsite tins2 funtype + else + multiflow_subtype cx trace ~use_op reason_callsite tins2 funtype; - (* FunT ~> CallT *) - - | DefT (reason_fundef, FunT (_, _, funtype)), - CallT (use_op, reason_callsite, calltype) -> - let { - this_t = o1; - params = _; - return_t = t1; - closure_t = func_scope_id; - changeset; _ - } = funtype in - let { - call_this_t = o2; - call_targs; - call_args_tlist = tins2; - call_tout = t2; - call_closure_t = call_scope_id; - call_strict_arity - } = calltype in - - rec_flow cx trace (o2, UseT (use_op, o1)); - - Option.iter call_targs ~f:(fun _ -> - add_output cx ~trace FlowError.(ECallTypeArity { - call_loc = aloc_of_reason reason_callsite |> ALoc.to_loc; - is_new = false; - reason_arity = reason_fundef; - expected_arity = 0; - })); - - if call_strict_arity - then multiflow_call cx trace ~use_op reason_callsite tins2 funtype - else multiflow_subtype cx trace ~use_op reason_callsite tins2 funtype; - - (* flow return type of function to the tvar holding the return type of the + (* flow return type of function to the tvar holding the return type of the call. clears the op stack because the result of the call is not the call itself. *) - rec_flow_t cx trace ( - reposition cx ~trace (aloc_of_reason reason_callsite |> ALoc.to_loc) t1, - t2 - ); - - (if Context.is_verbose cx then - prerr_endlinef "%shavoc_call_env fundef %s callsite %s" - (Context.pid_prefix cx) - (Debug_js.string_of_reason cx reason_fundef) - (Debug_js.string_of_reason cx reason_callsite)); - havoc_call_env cx func_scope_id call_scope_id changeset; - - | DefT (reason_fundef, (AnyFunT | AnyT)), - CallT (use_op, reason_op, calltype) -> - let { - call_this_t; - call_targs = _; (* An untyped receiver can't do anything with type args *) - call_args_tlist; - call_tout; - call_closure_t = _; - call_strict_arity = _; - } = calltype in - let any = AnyT.why reason_fundef in - rec_flow_t cx trace (call_this_t, any); - call_args_iter (fun t -> rec_flow cx trace (t, UseT (use_op, any))) call_args_tlist; - rec_flow_t cx trace (AnyT.why reason_op, call_tout) - - (* Special handlers for builtin functions *) - - | CustomFunT (_, ObjectAssign), - CallT (_, reason_op, { call_targs = None; call_args_tlist = dest_t::ts; call_tout; _ }) -> - let dest_t = extract_non_spread cx ~trace dest_t in - let t = chain_objects cx ~trace reason_op dest_t ts in - rec_flow_t cx trace (t, call_tout) - - | CustomFunT (_, ObjectGetPrototypeOf), - CallT (_, reason_op, { call_targs = None; call_args_tlist = arg::_; call_tout; _ }) -> - let l = extract_non_spread cx ~trace arg in - rec_flow cx trace (l, GetProtoT (reason_op, call_tout)) - - | CustomFunT (_, ObjectSetPrototypeOf), - CallT (_, reason_op, { call_targs = None; call_args_tlist = arg1::arg2::_; call_tout; _ }) -> - let target = extract_non_spread cx ~trace arg1 in - let proto = extract_non_spread cx ~trace arg2 in - rec_flow cx trace (target, SetProtoT (reason_op, proto)); - rec_flow_t cx trace (BoolT.why reason_op, call_tout) - - | DefT (reason, StrT (Literal (_, str))), - UseT (use_op, DefT (reason_op, CharSetT chars)) -> - let module CharSet = String_utils.CharSet in - let open Flow_error in - let invalid, _ = String_utils.fold_left ~f:(fun (invalid, seen) chr -> - if not (CharSet.mem chr chars) then - InvalidCharSetSet.add (InvalidChar chr) invalid, seen - else if CharSet.mem chr seen then - InvalidCharSetSet.add (DuplicateChar chr) invalid, seen - else - invalid, CharSet.add chr seen - ) ~acc:(InvalidCharSetSet.empty, CharSet.empty) str in - if not (InvalidCharSetSet.is_empty invalid) then - add_output cx ~trace (FlowError.EInvalidCharSet { - invalid = ( - replace_reason_const ~keep_def_loc:true (RStringLit str) reason, - invalid - ); - valid = reason_op; - use_op; - }) - - | DefT (reason, CharSetT _), _ -> - rec_flow cx trace (StrT.why reason, u) - - | _, UseT (use_op, DefT (reason, CharSetT _)) -> - rec_flow cx trace (l, UseT (use_op, StrT.why reason)) - - (* React prop type functions are modeled as a custom function type in Flow, + rec_flow_t cx trace (reposition cx ~trace (aloc_of_reason reason_callsite) t1, t2); + + if Context.is_verbose cx then + prerr_endlinef + "%shavoc_call_env fundef %s callsite %s" + (Context.pid_prefix cx) + (Debug_js.string_of_reason cx reason_fundef) + (Debug_js.string_of_reason cx reason_callsite); + havoc_call_env cx func_scope_id call_scope_id changeset + | (AnyT (reason_fundef, _), CallT (use_op, reason_op, calltype)) -> + let { + call_this_t; + call_targs = _; + (* An untyped receiver can't do anything with type args *) + call_args_tlist; + call_tout; + call_closure_t = _; + call_strict_arity = _; + } = + calltype + in + let any = AnyT.untyped reason_fundef in + rec_flow_t cx ~use_op trace (call_this_t, any); + call_args_iter (fun t -> rec_flow cx trace (t, UseT (use_op, any))) call_args_tlist; + rec_flow_t cx ~use_op trace (AnyT.untyped reason_op, call_tout) + (* Special handlers for builtin functions *) + | ( CustomFunT (_, ObjectAssign), + CallT + ( use_op, + reason_op, + { call_targs = None; call_args_tlist = dest_t :: ts; call_tout; _ } ) ) -> + let dest_t = extract_non_spread cx ~trace dest_t in + let t = chain_objects cx ~trace reason_op dest_t ts in + rec_flow_t cx ~use_op trace (t, call_tout) + | ( CustomFunT (_, ObjectGetPrototypeOf), + CallT (_, reason_op, { call_targs = None; call_args_tlist = arg :: _; call_tout; _ }) + ) -> + let l = extract_non_spread cx ~trace arg in + rec_flow cx trace (l, GetProtoT (reason_op, call_tout)) + | ( CustomFunT (_, ObjectSetPrototypeOf), + CallT + ( use_op, + reason_op, + { call_targs = None; call_args_tlist = arg1 :: arg2 :: _; call_tout; _ } ) ) -> + let target = extract_non_spread cx ~trace arg1 in + let proto = extract_non_spread cx ~trace arg2 in + rec_flow cx trace (target, SetProtoT (reason_op, proto)); + rec_flow_t cx ~use_op trace (BoolT.why reason_op |> with_trust bogus_trust, call_tout) + | ( DefT (reason, _, StrT (Literal (_, str))), + UseT (use_op, DefT (reason_op, _, CharSetT chars)) ) -> + let module CharSet = String_utils.CharSet in + Error_message.( + let (invalid, _) = + String_utils.fold_left + ~f:(fun (invalid, seen) chr -> + if not (CharSet.mem chr chars) then + (InvalidCharSetSet.add (InvalidChar chr) invalid, seen) + else if CharSet.mem chr seen then + (InvalidCharSetSet.add (DuplicateChar chr) invalid, seen) + else + (invalid, CharSet.add chr seen)) + ~acc:(InvalidCharSetSet.empty, CharSet.empty) + str + in + if not (InvalidCharSetSet.is_empty invalid) then + add_output + cx + ~trace + (EInvalidCharSet + { + invalid = (replace_desc_reason (RStringLit str) reason, invalid); + valid = reason_op; + use_op; + })) + | (DefT (reason, trust, CharSetT _), _) -> rec_flow cx trace (StrT.why reason trust, u) + | (_, UseT (use_op, DefT (reason, trust, CharSetT _))) -> + rec_flow cx trace (l, UseT (use_op, StrT.why reason trust)) + (* React prop type functions are modeled as a custom function type in Flow, so that Flow can exploit the extra information to gratuitously hardcode best-effort static checking of dynamic prop type validation. A prop type is either a primitive or some complex type, which is a function that simplifies to a primitive prop type when called. *) - - | CustomFunT (_, ReactPropType (React.PropType.Primitive (false, t))), - GetPropT (_, reason_op, Named (_, "isRequired"), tout) -> - let prop_type = React.PropType.Primitive (true, t) in - rec_flow_t cx trace (CustomFunT (reason_op, ReactPropType prop_type), tout) - - | CustomFunT (reason, ReactPropType (React.PropType.Primitive (req, _))), _ - when object_use u || function_use u || function_like_op u -> - let builtin_name = - if req - then "ReactPropsCheckType" - else "ReactPropsChainableTypeChecker" - in - let l = get_builtin_type cx ~trace reason builtin_name in - rec_flow cx trace (l, u) - - | CustomFunT (_, ReactPropType React.PropType.Complex kind), - CallT (_, reason_op, { call_targs = None; call_args_tlist = arg1::_; call_tout; _ }) -> - let open React in - let tool = match kind with - | PropType.ArrayOf -> SimplifyPropType.ArrayOf - | PropType.InstanceOf -> SimplifyPropType.InstanceOf - | PropType.ObjectOf -> SimplifyPropType.ObjectOf - | PropType.OneOf -> SimplifyPropType.OneOf ResolveArray - | PropType.OneOfType -> SimplifyPropType.OneOfType ResolveArray - | PropType.Shape -> SimplifyPropType.Shape ResolveObject - in - let t = extract_non_spread cx ~trace arg1 in - rec_flow cx trace (t, ReactKitT (unknown_use, reason_op, - SimplifyPropType (tool, call_tout))) - - | CustomFunT (reason, ReactPropType React.PropType.Complex kind), _ - when object_use u || function_use u || function_like_op u -> - rec_flow cx trace (get_builtin_prop_type cx ~trace reason kind, u) - - | CustomFunT (_, ReactCreateClass), - CallT (use_op, reason_op, { call_targs = None; call_args_tlist = arg1::_; call_tout; _ }) -> - let loc_op = aloc_of_reason reason_op in - let loc_tapp = def_loc_of_reason (reason_of_t call_tout) in - let desc_tapp = desc_of_reason (reason_of_t call_tout) in - let spec = extract_non_spread cx ~trace arg1 in - let mk_tvar f = Tvar.mk cx (f reason_op) in - let knot = { React.CreateClass. - this = mk_tvar (replace_reason_const RThisType); - static = mk_tvar (replace_reason_const RThisType); - state_t = mk_tvar (replace_reason - (fun d -> RTypeParam ("State", (d, loc_op |> ALoc.to_loc), (desc_tapp, loc_tapp)))); - default_t = mk_tvar (replace_reason - (fun d -> RTypeParam ("Default", (d, loc_op |> ALoc.to_loc), (desc_tapp, loc_tapp)))); - } in - rec_flow cx trace (spec, ReactKitT (use_op, reason_op, - React.CreateClass (React.CreateClass.Spec [], knot, call_tout))); - - | _, ReactKitT (use_op, reason_op, tool) -> - react_kit cx trace ~use_op reason_op l tool - - (* Facebookisms are special Facebook-specific functions that are not + | ( CustomFunT (_, ReactPropType (React.PropType.Primitive (false, t))), + GetPropT (_, reason_op, Named (_, "isRequired"), tout) ) -> + let prop_type = React.PropType.Primitive (true, t) in + rec_flow_t cx trace (CustomFunT (reason_op, ReactPropType prop_type), tout) + | (CustomFunT (reason, ReactPropType (React.PropType.Primitive (req, _))), _) + when object_use u || function_use u || function_like_op u -> + let builtin_name = + if req then + "ReactPropsCheckType" + else + "ReactPropsChainableTypeChecker" + in + let l = get_builtin_type cx ~trace reason builtin_name in + rec_flow cx trace (l, u) + | ( CustomFunT (_, ReactPropType (React.PropType.Complex kind)), + CallT + (use_op, reason_op, { call_targs = None; call_args_tlist = arg1 :: _; call_tout; _ }) + ) -> + React.( + let tool = + match kind with + | PropType.ArrayOf -> SimplifyPropType.ArrayOf + | PropType.InstanceOf -> SimplifyPropType.InstanceOf + | PropType.ObjectOf -> SimplifyPropType.ObjectOf + | PropType.OneOf -> SimplifyPropType.OneOf ResolveArray + | PropType.OneOfType -> SimplifyPropType.OneOfType ResolveArray + | PropType.Shape -> SimplifyPropType.Shape ResolveObject + in + let t = extract_non_spread cx ~trace arg1 in + rec_flow cx trace (t, ReactKitT (use_op, reason_op, SimplifyPropType (tool, call_tout)))) + | (CustomFunT (reason, ReactPropType (React.PropType.Complex kind)), _) + when object_use u || function_use u || function_like_op u -> + rec_flow cx trace (get_builtin_prop_type cx ~trace reason kind, u) + | ( CustomFunT (_, ReactPropType (React.PropType.Primitive (is_req1, t1))), + UseT (use_op, CustomFunT (_, ReactPropType (React.PropType.Primitive (is_req2, t2)))) + ) + when (not is_req2) || is_req1 -> + rec_unify cx trace ~use_op t1 t2 + | ( CustomFunT (_, ReactCreateClass), + CallT + (use_op, reason_op, { call_targs = None; call_args_tlist = arg1 :: _; call_tout; _ }) + ) -> + let loc_op = aloc_of_reason reason_op in + let loc_tapp = def_aloc_of_reason (reason_of_t call_tout) in + let desc_tapp = desc_of_reason (reason_of_t call_tout) in + let spec = extract_non_spread cx ~trace arg1 in + let mk_tvar f = Tvar.mk cx (f reason_op |> derivable_reason) in + let knot = + { + React.CreateClass.this = mk_tvar (replace_desc_reason RThisType); + static = mk_tvar (replace_desc_reason RThisType); + state_t = + mk_tvar + (update_desc_reason (fun d -> + RTypeParam ("State", (d, loc_op), (desc_tapp, loc_tapp)))); + default_t = + mk_tvar + (update_desc_reason (fun d -> + RTypeParam ("Default", (d, loc_op), (desc_tapp, loc_tapp)))); + } + in + rec_flow + cx + trace + ( spec, + ReactKitT + (use_op, reason_op, React.CreateClass (React.CreateClass.Spec [], knot, call_tout)) + ) + | (_, ReactKitT (use_op, reason_op, tool)) -> ReactJs.run cx trace ~use_op reason_op l tool + (* Facebookisms are special Facebook-specific functions that are not expressable with our current type syntax, so we've hacked in special handling. Terminate with extreme prejudice. *) - - | CustomFunT (_, DebugPrint), - CallT (_, reason_op, { call_targs = None; call_args_tlist; call_tout; _ }) -> - List.iter (fun arg -> match arg with - | Arg t -> rec_flow cx trace (t, DebugPrintT reason_op) - | SpreadArg t -> - add_output cx ~trace - (FlowError.(EUnsupportedSyntax (loc_of_t t, SpreadArgument))); - ) call_args_tlist; - rec_flow_t cx trace (VoidT.why reason_op, call_tout); - - | CustomFunT (_, DebugThrow), CallT (_, reason_op, _) -> - raise (Flow_error.EDebugThrow (aloc_of_reason reason_op |> ALoc.to_loc)) - - | CustomFunT (_, DebugSleep), - CallT (_, reason_op, { call_targs = None; call_args_tlist=arg1::_; call_tout; _ }) -> - let t = extract_non_spread cx ~trace arg1 in - rec_flow cx trace (t, DebugSleepT reason_op); - rec_flow_t cx trace (VoidT.why reason_op, call_tout) - - | CustomFunT (lreason, ( - Compose _ - | ReactCreateElement - | ReactCloneElement - | ReactElementFactory _ - as kind)), - CallT (use_op, reason_op, calltype) -> - let { - call_targs; - call_args_tlist = args; - call_tout = tout; - call_this_t = _; - call_closure_t = _; - call_strict_arity = _; - } = calltype in - - (* None of the supported custom funs are polymorphic, so error here + | ( CustomFunT (_, DebugPrint), + CallT (use_op, reason_op, { call_targs = None; call_args_tlist; call_tout; _ }) ) -> + List.iter + (fun arg -> + match arg with + | Arg t -> rec_flow cx trace (t, DebugPrintT reason_op) + | SpreadArg t -> + add_output + cx + ~trace + Error_message.(EUnsupportedSyntax (loc_of_t t, SpreadArgument))) + call_args_tlist; + rec_flow_t cx ~use_op trace (VoidT.why reason_op |> with_trust bogus_trust, call_tout) + | (CustomFunT (_, DebugThrow), CallT (_, reason_op, _)) -> + raise (Error_message.EDebugThrow (aloc_of_reason reason_op)) + | ( CustomFunT (_, DebugSleep), + CallT + (use_op, reason_op, { call_targs = None; call_args_tlist = arg1 :: _; call_tout; _ }) + ) -> + let t = extract_non_spread cx ~trace arg1 in + rec_flow cx trace (t, DebugSleepT reason_op); + rec_flow_t cx ~use_op trace (VoidT.why reason_op |> with_trust bogus_trust, call_tout) + | ( CustomFunT + ( lreason, + ( (Compose _ | ReactCreateElement | ReactCloneElement | ReactElementFactory _) as + kind ) ), + CallT (use_op, reason_op, calltype) ) -> + let { + call_targs; + call_args_tlist = args; + call_tout = tout; + call_this_t = _; + call_closure_t = _; + call_strict_arity = _; + } = + calltype + in + (* None of the supported custom funs are polymorphic, so error here instead of threading targs into spread resolution. *) - Option.iter call_targs ~f:(fun _ -> - add_output cx ~trace FlowError.( - ECallTypeArity { - call_loc = aloc_of_reason reason_op |> ALoc.to_loc; - is_new = false; - reason_arity = lreason; - expected_arity = 0; - })); - - resolve_call_list cx ~trace ~use_op reason_op args ( - ResolveSpreadsToCustomFunCall (mk_id (), kind, tout)) - - | CustomFunT (reason, _), _ when function_like_op u -> - rec_flow cx trace (DefT (reason, AnyFunT), u) - - (*********************************************) - (* object types deconstruct into their parts *) - (*********************************************) - - (* ObjT -> ObjT *) - - | DefT (lreason, ObjT ({ props_tmap = lflds; _ } as l_obj)), - UseT (use_op, (DefT (ureason, ObjT ({ props_tmap = uflds; _ } as u_obj)) as u_deft)) -> - Type_inference_hooks_js.dispatch_obj_to_obj_hook cx l u_deft; - if lflds = uflds then () - else flow_obj_to_obj cx trace ~use_op (lreason, l_obj) (ureason, u_obj) - - - | DefT (_, ObjT _), UseT (_, NullProtoT _) -> () - - (* InstanceT -> ObjT *) - - | DefT (lreason, InstanceT (_, super, _, { - own_props = lown; - proto_props = lproto; - inst_call_t = lcall; _ })), - UseT (use_op, (DefT (ureason, ObjT { - props_tmap = uflds; - proto_t = uproto; - call_t = ucall; _ }) as u_deft)) -> - Type_inference_hooks_js.dispatch_instance_to_obj_hook cx l u_deft; - - let lflds = - let own_props = Context.find_props cx lown in - let proto_props = Context.find_props cx lproto in - SMap.union own_props proto_props - in - - Option.iter ucall ~f:(fun ucall -> - let prop_name = Some "$call" in - let use_op = Frame (PropertyCompatibility { - prop = prop_name; - lower = lreason; - upper = ureason; - is_sentinel = false; - }, use_op) in - (match lcall with - | Some lcall -> - rec_flow cx trace (Context.find_call cx lcall, - UseT (use_op, Context.find_call cx ucall)) - | None -> - let reason_prop = replace_reason_const (RProperty prop_name) ureason in - add_output cx ~trace (FlowError.EStrictLookupFailed - ((reason_prop, lreason), lreason, prop_name, Some use_op))) - ); - - iter_real_props cx uflds (fun ~is_sentinel s up -> - let use_op = Frame (PropertyCompatibility { - prop = Some s; - lower = lreason; - upper = ureason; - is_sentinel; - }, use_op) in - let propref = - let reason_prop = replace_reason_const (RProperty (Some s)) ureason in - Named (reason_prop, s) - in - match SMap.get s lflds with - | Some lp -> - rec_flow_p cx trace ~use_op lreason ureason propref (lp, up) - | _ -> - let strict = match up with - | Field (_, DefT (_, OptionalT _), _) -> NonstrictReturning (None, None) - | _ -> Strict lreason in - rec_flow cx trace (super, ReposLowerT (lreason, false, - LookupT (ureason, strict, [], propref, LookupProp (use_op, up)))) - ); - - rec_flow cx trace (l, UseT (use_op, uproto)) - - (* For some object `x` and constructor `C`, if `x instanceof C`, then the + Option.iter call_targs ~f:(fun _ -> + add_output + cx + ~trace + Error_message.( + ECallTypeArity + { + call_loc = aloc_of_reason reason_op; + is_new = false; + reason_arity = lreason; + expected_arity = 0; + })); + let make_op_nonlocal = function + | FunCall op -> FunCall { op with local = false } + | FunCallMethod op -> FunCallMethod { op with local = false } + | op -> op + in + let use_op = mod_root_of_use_op make_op_nonlocal use_op in + resolve_call_list + cx + ~trace + ~use_op + reason_op + args + (ResolveSpreadsToCustomFunCall (mk_id (), kind, tout)) + | ( CustomFunT (_, (ObjectAssign | ObjectGetPrototypeOf | ObjectSetPrototypeOf)), + MethodT (use_op, reason_call, _, Named (_, "call"), calltype, _) ) -> + rec_flow cx trace (l, CallT (use_op, reason_call, calltype)) + (* Custom functions are still functions, so they have all the prototype properties *) + | (CustomFunT (r, _), _) when function_like_op u -> rec_flow cx trace (FunProtoT r, u) + (*********************************************) + (* object types deconstruct into their parts *) + (*********************************************) + + (* ObjT -> ObjT *) + | ( DefT (lreason, _, ObjT ({ props_tmap = lflds; _ } as l_obj)), + UseT (use_op, (DefT (ureason, _, ObjT ({ props_tmap = uflds; _ } as u_obj)) as u_deft)) + ) -> + Type_inference_hooks_js.dispatch_obj_to_obj_hook cx l u_deft; + let print_fast_path = + match Context.verbose cx with + | Some _ -> true + | _ -> false + in + if lflds = uflds then ( + if print_fast_path then prerr_endline "ObjT ~> ObjT fast path: yes" + ) else ( + if print_fast_path then prerr_endline "ObjT ~> ObjT fast path: no"; + flow_obj_to_obj cx trace ~use_op (lreason, l_obj) (ureason, u_obj) + ) + | (DefT (_, _, ObjT _), UseT (_, NullProtoT _)) -> () + (* InstanceT -> ObjT *) + | ( DefT + ( lreason, + _, + InstanceT + (_, super, _, { own_props = lown; proto_props = lproto; inst_call_t = lcall; _ }) + ), + UseT + ( use_op, + ( DefT + (ureason, _, ObjT { props_tmap = uflds; proto_t = uproto; call_t = ucall; _ }) + as u_deft ) ) ) -> + Type_inference_hooks_js.dispatch_instance_to_obj_hook cx l u_deft; + + let lflds = + let own_props = Context.find_props cx lown in + let proto_props = Context.find_props cx lproto in + SMap.union own_props proto_props + in + Option.iter ucall ~f:(fun ucall -> + let prop_name = Some "$call" in + let use_op = + Frame + ( PropertyCompatibility { prop = prop_name; lower = lreason; upper = ureason }, + use_op ) + in + match lcall with + | Some lcall -> + rec_flow + cx + trace + (Context.find_call cx lcall, UseT (use_op, Context.find_call cx ucall)) + | None -> + let reason_prop = replace_desc_reason (RProperty prop_name) ureason in + add_output + cx + ~trace + (Error_message.EStrictLookupFailed + ((reason_prop, lreason), lreason, prop_name, Some use_op))); + + Context.iter_real_props cx uflds (fun s up -> + let use_op = + Frame + ( PropertyCompatibility { prop = Some s; lower = lreason; upper = ureason }, + use_op ) + in + let propref = + let reason_prop = replace_desc_reason (RProperty (Some s)) ureason in + Named (reason_prop, s) + in + match SMap.get s lflds with + | Some lp -> rec_flow_p cx trace ~use_op lreason ureason propref (lp, up) + | _ -> + let strict = + match up with + | Field (_, OptionalT _, _) -> NonstrictReturning (None, None) + | _ -> Strict lreason + in + rec_flow + cx + trace + ( super, + ReposLowerT + ( lreason, + false, + LookupT (ureason, strict, [], propref, LookupProp (use_op, up)) ) )); + + rec_flow cx trace (l, UseT (use_op, uproto)) + (* For some object `x` and constructor `C`, if `x instanceof C`, then the object is a subtype. We use `ExtendsT` to walk the proto chain of the object, in case it includes a nominal type. *) - | DefT (_, ObjT _), UseT (use_op, (DefT (_, InstanceT _) as u)) -> - rec_flow cx trace (l, extends_use_type use_op l u) - - (****************************************) - (* You can cast an object to a function *) - (****************************************) - - | DefT (reason, (ObjT _ | InstanceT _)), ( - UseT (use_op, DefT (reason_op, (FunT _ | AnyFunT))) | - CallT (use_op, reason_op, _) - ) -> - let prop_name = Some "$call" in - let use_op = match u with - | UseT (_, DefT (_, (FunT _ | AnyFunT))) -> - Frame (PropertyCompatibility { - prop = prop_name; - lower = reason; - upper = reason_op; - is_sentinel = false; - }, use_op) - | _ -> use_op in - let fun_t = match l with - | DefT (_, ObjT {call_t = Some id; _}) - | DefT (_, InstanceT (_, _, _, {inst_call_t = Some id; _})) -> - Context.find_call cx id - | _ -> - let reason_prop = replace_reason_const (RProperty prop_name) reason_op in - add_output cx ~trace (FlowError.EStrictLookupFailed - ((reason_prop, reason), reason, prop_name, Some use_op)); - AnyT.why reason_op - in - (match u with - | UseT (_, (DefT (_, (FunT _ | AnyFunT)) as u_def)) -> - rec_flow cx trace (fun_t, UseT (use_op, u_def)) - | _ -> rec_flow cx trace (fun_t, u)) - - (******************************) - (* matching shapes of objects *) - (******************************) + | (DefT (_, _, ObjT _), UseT (use_op, (DefT (_, _, InstanceT _) as u))) -> + rec_flow cx trace (l, extends_use_type use_op l u) + (****************************************) + (* You can cast an object to a function *) + (****************************************) + | ( DefT (reason, _, (ObjT _ | InstanceT _)), + ( UseT (use_op, (DefT (reason_op, _, FunT _) | AnyT (reason_op, _))) + | CallT (use_op, reason_op, _) ) ) -> + let prop_name = Some "$call" in + let use_op = + match u with + | UseT (_, (DefT (_, _, FunT _) | AnyT _)) -> + Frame + ( PropertyCompatibility { prop = prop_name; lower = reason; upper = reason_op }, + use_op ) + | _ -> use_op + in + let fun_t = + match l with + | DefT (_, _, ObjT { call_t = Some id; _ }) + | DefT (_, _, InstanceT (_, _, _, { inst_call_t = Some id; _ })) -> + Context.find_call cx id + | _ -> + let reason_prop = replace_desc_reason (RProperty prop_name) reason_op in + add_output + cx + ~trace + (Error_message.EStrictLookupFailed + ((reason_prop, reason), reason, prop_name, Some use_op)); + AnyT.error reason_op + in + (match u with + | UseT (_, (DefT (_, _, FunT _) as u_def)) + | UseT (_, (AnyT _ as u_def)) -> + rec_flow cx trace (fun_t, UseT (use_op, u_def)) + | _ -> rec_flow cx trace (fun_t, u)) + (******************************) + (* matching shapes of objects *) + (******************************) - (** When something of type ShapeT(o) is used, it behaves like it had type o. + (* When something of type ShapeT(o) is used, it behaves like it had type o. On the other hand, things that can be passed to something of type ShapeT(o) must be "subobjects" of o: they may have fewer properties, but @@ -4466,497 +4637,477 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = TODO: The type constructors ShapeT, ObjAssignToT/ObjAssignFromT, ObjRestT express related meta-operations on objects. Consolidate these meta-operations and ensure consistency of their semantics. **) - - | (ShapeT (o), _) -> rec_flow cx trace (o, u) - - | DefT (reason, ObjT { props_tmap = mapr; call_t = None; _ }), UseT (use_op', ShapeT proto) -> - (* TODO: ShapeT should have its own reason *) - let reason_op = reason_of_t proto in - iter_real_props cx mapr (fun ~is_sentinel x p -> - let use_op = Frame (PropertyCompatibility { - prop = Some x; - lower = reason; - upper = reason_of_t proto; - is_sentinel; - }, use_op') in - let reason_prop = replace_reason (fun desc -> - RPropertyOf (x, desc) - ) reason in - match Property.read_t p with - | Some t -> - let propref = Named (reason_prop, x) in - let t = filter_optional cx ~trace reason_prop t in - rec_flow cx trace (proto, MatchPropT (use_op, reason_op, propref, t)) - | None -> - add_output cx ~trace (FlowError.EPropAccess ( - (reason_prop, reason_op), Some x, Property.polarity p, Read, use_op' - )) - ) - - (* Function definitions are incompatible with ShapeT. ShapeT is meant to - * match an object type with a subset of the props in the type being - * destructured. It would be complicated and confusing to use a function for - * this. - * - * This invariant is important for the React setState() type definition. *) - | DefT (_, (FunT _ | ObjT {call_t = Some _; _})), UseT (use_op, ShapeT o) -> - add_output cx ~trace - (FlowError.EFunctionIncompatibleWithShape (reason_of_t l, reason_of_t o, use_op)) - - | (_, UseT (_, ShapeT (o))) -> - let reason = reason_of_t o in - rec_flow cx trace (l, ObjAssignFromT (reason, o, Locationless.AnyT.t, default_obj_assign_kind)) - - | DefT (_, AnyT), ObjTestT (reason_op, _, u) -> - rec_flow_t cx trace (AnyT.why reason_op, u) - - | _, ObjTestT (reason_op, default, u) -> - let u = ReposLowerT (reason_op, false, UseT (unknown_use, u)) in - if object_like l - then rec_flow cx trace (l, u) - else rec_flow cx trace (default, u) - - | DefT (_, (AnyT | AnyObjT)), ObjTestProtoT (reason_op, u) -> - rec_flow_t cx trace (AnyT.why reason_op, u) - - | DefT (_, NullT), ObjTestProtoT (reason_op, u) -> - rec_flow_t cx trace (NullProtoT.why reason_op, u) - - | _, ObjTestProtoT (reason_op, u) -> - let proto = - if object_like l - then reposition cx ~trace (aloc_of_reason reason_op |> ALoc.to_loc) l - else - let () = add_output cx ~trace - (FlowError.EInvalidPrototype (reason_of_t l)) in - ObjProtoT.why reason_op - in - rec_flow_t cx trace (proto, u) - - (********************************************) - (* array types deconstruct into their parts *) - (********************************************) - - (* Arrays can flow to arrays *) - | DefT (r1, ArrT (ArrayAT (t1, ts1))), - UseT (use_op, DefT (r2, ArrT (ArrayAT (t2, ts2)))) -> - let use_op = Frame (ArrayElementCompatibility { - lower = r1; - upper = r2; - }, use_op) in - let lit1 = (desc_of_reason r1) = RArrayLit in - let ts1 = Option.value ~default:[] ts1 in - let ts2 = Option.value ~default:[] ts2 in - array_flow cx trace use_op lit1 r1 (ts1, t1, ts2, t2) - - (* Tuples can flow to tuples with the same arity *) - | DefT (r1, ArrT (TupleAT (_, ts1))), - UseT (use_op, DefT (r2, ArrT (TupleAT (_, ts2)))) -> - let fresh = (desc_of_reason r1) = RArrayLit in - let l1 = List.length ts1 in - let l2 = List.length ts2 in - if l1 <> l2 then - add_output cx ~trace (FlowError.ETupleArityMismatch - ((r1, r2), l1, l2, use_op)); - let n = ref 0 in - iter2opt (fun t1 t2 -> - match t1, t2 with - | Some t1, Some t2 -> - n := !n + 1; - let use_op = Frame (TupleElementCompatibility { - n = !n; - lower = r1; - upper = r2; - }, use_op) in - flow_to_mutable_child cx trace use_op fresh t1 t2 - | _ -> () - ) (ts1, ts2); - - (* Arrays with known elements can flow to tuples *) - | DefT (r1, ArrT (ArrayAT (t1, ts1))), - UseT (use_op, DefT (r2, ArrT (TupleAT _))) -> - begin match ts1 with - | None -> add_output cx ~trace (FlowError.ENonLitArrayToTuple ((r1, r2), use_op)) - | Some ts1 -> - rec_flow cx trace (DefT (r1, ArrT (TupleAT (t1, ts1))), u) - end - - (* EmptyAT arrays are the subtype of all arrays *) - | DefT (_, ArrT EmptyAT), UseT (_, DefT (_, ArrT _)) -> () - - (* Read only arrays are the super type of all tuples and arrays *) - | DefT (r1, ArrT (ArrayAT (t1, _) | TupleAT (t1, _) | ROArrayAT (t1))), - UseT (use_op, DefT (r2, ArrT (ROArrayAT (t2)))) -> - let use_op = Frame (ArrayElementCompatibility { - lower = r1; - upper = r2; - }, use_op) in - rec_flow cx trace (t1, UseT (use_op, t2)) - - (**************************************************) - (* instances of classes follow declared hierarchy *) - (**************************************************) - - | DefT (_, InstanceT _), UseT (use_op, (DefT (_, InstanceT _) as u)) -> - rec_flow cx trace (l, extends_use_type use_op l u) - - | DefT (reason, InstanceT (_, super, implements, instance)), - ExtendsUseT (use_op, reason_op, try_ts_on_failure, l, - (DefT (_, InstanceT (_, _, _, instance_super)) as u)) -> - if instance.class_id = instance_super.class_id - then begin - (if instance.class_id != instance_super.class_id then - assert_false "unexpected difference in class_ids in flow_instts"); - let { type_args = tmap1; _ } = instance in - let { type_args = tmap2; _ } = instance_super in - let ureason = replace_reason (function RExtends desc -> desc | desc -> desc) reason_op in - flow_type_args cx trace ~use_op reason ureason tmap1 tmap2 - end - else - (* If this instance type has declared implementations, any structural + | (ShapeT o, _) -> rec_flow cx trace (o, u) + | (DefT (reason, _, ObjT ({ call_t = None; _ } as o)), UseT (use_op, ShapeT proto)) -> + let props = Context.find_real_props cx o.props_tmap in + match_shape cx trace ~use_op proto reason props + | ( DefT (reason, _, InstanceT (_, _, _, ({ inst_call_t = None; _ } as i))), + UseT (use_op, ShapeT proto) ) -> + let own_props = Context.find_props cx i.own_props in + let proto_props = Context.find_props cx i.proto_props in + let proto_props = + match i.inst_kind with + | InterfaceKind _ -> proto_props + | ClassKind -> SMap.remove "constructor" proto_props + in + let props = SMap.union own_props proto_props in + match_shape cx trace ~use_op proto reason props + (* Function definitions are incompatible with ShapeT. ShapeT is meant to + * match an object type with a subset of the props in the type being + * destructured. It would be complicated and confusing to use a function for + * this. + * + * This invariant is important for the React setState() type definition. *) + | (_, UseT (use_op, ShapeT o)) -> + add_output + cx + ~trace + (Error_message.EIncompatibleWithShape (reason_of_t l, reason_of_t o, use_op)) + | (AnyT (_, src), ObjTestT (reason_op, _, u)) -> + rec_flow_t cx trace (AnyT.why src reason_op, u) + | (_, ObjTestT (reason_op, default, u)) -> + let u = ReposLowerT (reason_op, false, UseT (unknown_use, u)) in + if object_like l then + rec_flow cx trace (l, u) + else + rec_flow cx trace (default, u) + | (AnyT (_, src), ObjTestProtoT (reason_op, u)) -> + rec_flow_t cx trace (AnyT.why src reason_op, u) + | (DefT (_, trust, NullT), ObjTestProtoT (reason_op, u)) -> + rec_flow_t cx trace (NullProtoT.why reason_op trust, u) + | (_, ObjTestProtoT (reason_op, u)) -> + let proto = + if object_like l then + reposition cx ~trace (aloc_of_reason reason_op) l + else + let () = add_output cx ~trace (Error_message.EInvalidPrototype (reason_of_t l)) in + ObjProtoT.why reason_op |> with_trust bogus_trust + in + rec_flow_t cx trace (proto, u) + (********************************************) + (* array types deconstruct into their parts *) + (********************************************) + + (* Arrays can flow to arrays *) + | ( DefT (r1, _, ArrT (ArrayAT (t1, ts1))), + UseT (use_op, DefT (r2, _, ArrT (ArrayAT (t2, ts2)))) ) -> + let use_op = Frame (ArrayElementCompatibility { lower = r1; upper = r2 }, use_op) in + let lit1 = desc_of_reason r1 = RArrayLit in + let ts1 = Option.value ~default:[] ts1 in + let ts2 = Option.value ~default:[] ts2 in + array_flow cx trace use_op lit1 r1 (ts1, t1, ts2, t2) + (* Tuples can flow to tuples with the same arity *) + | ( DefT (r1, _, ArrT (TupleAT (_, ts1))), + UseT (use_op, DefT (r2, _, ArrT (TupleAT (_, ts2)))) ) -> + let fresh = desc_of_reason r1 = RArrayLit in + let l1 = List.length ts1 in + let l2 = List.length ts2 in + if l1 <> l2 then + add_output cx ~trace (Error_message.ETupleArityMismatch ((r1, r2), l1, l2, use_op)); + let n = ref 0 in + iter2opt + (fun t1 t2 -> + match (t1, t2) with + | (Some t1, Some t2) -> + n := !n + 1; + let use_op = + Frame (TupleElementCompatibility { n = !n; lower = r1; upper = r2 }, use_op) + in + flow_to_mutable_child cx trace use_op fresh t1 t2 + | _ -> ()) + (ts1, ts2) + (* Arrays with known elements can flow to tuples *) + | ( DefT (r1, trust, ArrT (ArrayAT (t1, ts1))), + UseT (use_op, DefT (r2, _, ArrT (TupleAT _))) ) -> + begin + match ts1 with + | None -> add_output cx ~trace (Error_message.ENonLitArrayToTuple ((r1, r2), use_op)) + | Some ts1 -> rec_flow cx trace (DefT (r1, trust, ArrT (TupleAT (t1, ts1))), u) + end + (* Read only arrays are the super type of all tuples and arrays *) + | ( DefT (r1, _, ArrT (ArrayAT (t1, _) | TupleAT (t1, _) | ROArrayAT t1)), + UseT (use_op, DefT (r2, _, ArrT (ROArrayAT t2))) ) -> + let use_op = Frame (ArrayElementCompatibility { lower = r1; upper = r2 }, use_op) in + rec_flow cx trace (t1, UseT (use_op, t2)) + | (DefT (_, _, InstanceT _), UseT (use_op, DefT (r2, _, ArrT (ArrayAT (elemt, _))))) -> + let arrt = get_builtin_typeapp cx ~trace r2 "Array" [elemt] in + rec_flow cx trace (l, UseT (use_op, arrt)) + | (DefT (_, _, InstanceT _), UseT (use_op, DefT (r2, _, ArrT (ROArrayAT elemt)))) -> + let arrt = get_builtin_typeapp cx ~trace r2 "$ReadOnlyArray" [elemt] in + rec_flow cx trace (l, UseT (use_op, arrt)) + (**************************************************) + (* instances of classes follow declared hierarchy *) + (**************************************************) + | (DefT (_, _, InstanceT _), UseT (use_op, (DefT (_, _, InstanceT _) as u))) -> + rec_flow cx trace (l, extends_use_type use_op l u) + | ( DefT (reason, _, InstanceT (_, super, implements, instance)), + ExtendsUseT + ( use_op, + reason_op, + try_ts_on_failure, + l, + (DefT (_, _, InstanceT (_, _, _, instance_super)) as u) ) ) -> + if + ALoc.concretize_equal + (Context.aloc_tables cx) + instance.class_id + instance_super.class_id + then + let { type_args = tmap1; _ } = instance in + let { type_args = tmap2; _ } = instance_super in + let ureason = + update_desc_reason + (function + | RExtends desc -> desc + | desc -> desc) + reason_op + in + flow_type_args cx trace ~use_op reason ureason tmap1 tmap2 + else + (* If this instance type has declared implementations, any structural tests have already been performed at the declaration site. We can then use the ExtendsT use type to search for a nominally matching implementation, thereby short-circuiting a potentially expensive structural test at the use site. *) - let u = ExtendsUseT (use_op, reason_op, try_ts_on_failure @ implements, l, u) in - rec_flow cx trace (super, ReposLowerT (reason, false, u)) - - (********************************************************) - (* runtime types derive static types through annotation *) - (********************************************************) - - | DefT (_, ClassT it), UseT (_, DefT (r, TypeT (_, t))) -> - (* a class value annotation becomes the instance type *) - rec_flow cx trace (it, BecomeT (r, t)) - - | DefT (_, AnyT), UseT (_, DefT (reason, TypeT (_, t))) -> - (* any can function as class, hence ok for annotations *) - rec_flow cx trace (l, BecomeT (reason, t)) - - | DefT (_, TypeT (_, l)), UseT (use_op, DefT (_, TypeT (_, u))) -> - rec_unify cx trace ~use_op ~unify_any:true l u - - (* non-class/function values used in annotations are errors *) - | _, UseT (_, DefT (ru, TypeT _)) -> - add_output cx ~trace (FlowError.EValueUsedAsType (reason_of_t l, ru)) - - | DefT (rl, ClassT l), UseT (use_op, DefT (_, ClassT u)) -> - rec_flow cx trace ( - reposition cx ~trace (aloc_of_reason rl |> ALoc.to_loc) l, - UseT (use_op, u)) - - | DefT (_, FunT (static1, prototype, _)), - UseT (use_op, DefT (_, ClassT (DefT (_, InstanceT (static2, _, _, _)) as u_))) -> - rec_unify cx trace ~use_op static1 static2; - rec_unify cx trace ~use_op prototype u_ - - | DefT (_, AnyT), UseT (use_op, DefT (_, ClassT u)) -> - rec_flow cx trace (l, UseT (use_op, u)) - - (*********************************************************) - (* class types derive instance types (with constructors) *) - (*********************************************************) - - | DefT (reason, ClassT this), - ConstructorT (use_op, reason_op, targs, args, t) -> - let reason_o = replace_reason_const RConstructorReturn reason in - (* early error if type args passed to non-polymorphic class *) - Option.iter targs ~f:(fun _ -> - add_output cx ~trace FlowError.(ECallTypeArity { - call_loc = aloc_of_reason reason_op |> ALoc.to_loc; - is_new = true; - reason_arity = reason_of_t this; - expected_arity = 0; - })); - (* call this.constructor(args) *) - let ret = Tvar.mk_where cx reason_op (fun t -> - let funtype = mk_methodcalltype this None args t in - let propref = Named (reason_o, "constructor") in - rec_flow cx trace ( - this, - MethodT (use_op, reason_op, reason_o, propref, funtype, None) - ); - ) in - (* return this *) - rec_flow cx trace (ret, ObjTestT (annot_reason reason_op, this, t)) - - (****************************************************************) - (* function types derive objects through explicit instantiation *) - (****************************************************************) - - | DefT (lreason, FunT (_, proto, ({ - this_t = this; - return_t = ret; - _ } as ft))), - ConstructorT (use_op, reason_op, targs, args, t) -> - (* TODO: closure *) - (** create new object **) - let reason_c = replace_reason_const RNewObject reason_op in - let objtype = - let sealed = UnsealedInFile (Loc.source (loc_of_t proto)) in - let flags = { default_flags with sealed } in - let dict = None in - let call = None in - let pmap = Context.make_property_map cx SMap.empty in - mk_objecttype ~flags ~dict ~call pmap proto - in - let new_obj = DefT (reason_c, ObjT objtype) in - (** error if type arguments are provided to non-polymorphic constructor **) - Option.iter targs ~f:(fun _ -> - add_output cx ~trace FlowError.(ECallTypeArity { - call_loc = aloc_of_reason reason_op |> ALoc.to_loc; - is_new = true; - reason_arity = lreason; - expected_arity = 0; - })); - (** call function with this = new_obj, params = args **) - rec_flow_t cx trace (new_obj, this); - multiflow_call cx trace ~use_op reason_op args ft; - (** if ret is object-like, return ret; otherwise return new_obj **) - let reason_o = replace_reason_const RConstructorReturn reason_op in - rec_flow cx trace (ret, ObjTestT(reason_o, new_obj, t)) - - | DefT (_, AnyFunT), ConstructorT (use_op, reason_op, targs, args, t) -> - let reason_o = replace_reason_const RConstructorReturn reason_op in - ignore targs; (* An untyped receiver can't do anything with type args *) - call_args_iter - (fun t -> rec_flow cx trace (t, UseT (use_op, AnyT.why reason_op))) - args; - rec_flow_t cx trace (DefT (reason_o, AnyObjT), t); - - | DefT (_, AnyT), ConstructorT (use_op, reason_op, targs, args, t) -> - ignore targs; (* An untyped receiver can't do anything with type args *) - call_args_iter (fun t -> - rec_flow cx trace (t, UseT (use_op, AnyT.why reason_op)) - ) args; - rec_flow_t cx trace (AnyT.why reason_op, t); - - (* Since we don't know the signature of a method on AnyFunT, assume every + let u = ExtendsUseT (use_op, reason_op, try_ts_on_failure @ implements, l, u) in + rec_flow cx trace (super, ReposLowerT (reason, false, u)) + (********************************************************) + (* runtime types derive static types through annotation *) + (********************************************************) + | (DefT (_, _, ClassT it), UseT (_, DefT (r, _, TypeT (_, t)))) -> + (* a class value annotation becomes the instance type *) + rec_flow cx trace (it, BecomeT (r, t)) + | (DefT (_, _, TypeT (_, l)), UseT (use_op, DefT (_, _, TypeT (_, u)))) -> + rec_unify cx trace ~use_op ~unify_any:true l u + (* non-class/function values used in annotations are errors *) + | (_, UseT (_, DefT (reason_use, _, TypeT _))) -> + add_output cx ~trace Error_message.(EValueUsedAsType { reason_use }) + | (DefT (rl, _, ClassT l), UseT (use_op, DefT (_, _, ClassT u))) -> + rec_flow cx trace (reposition cx ~trace (aloc_of_reason rl) l, UseT (use_op, u)) + | ( DefT (_, _, FunT (static1, prototype, _)), + UseT (use_op, DefT (_, _, ClassT (DefT (_, _, InstanceT (static2, _, _, _)) as u_))) ) + -> + rec_unify cx trace ~use_op static1 static2; + rec_unify cx trace ~use_op prototype u_ + (*********************************************************) + (* class types derive instance types (with constructors) *) + (*********************************************************) + | (DefT (reason, _, ClassT this), ConstructorT (use_op, reason_op, targs, args, t)) -> + let reason_o = replace_desc_reason RConstructorReturn reason in + (* early error if type args passed to non-polymorphic class *) + Option.iter targs ~f:(fun _ -> + add_output + cx + ~trace + Error_message.( + ECallTypeArity + { + call_loc = aloc_of_reason reason_op; + is_new = true; + reason_arity = reason_of_t this; + expected_arity = 0; + })); + + (* call this.constructor(args) *) + let ret = + Tvar.mk_where cx reason_op (fun t -> + let funtype = mk_methodcalltype this None args t in + let propref = Named (reason_o, "constructor") in + rec_flow + cx + trace + (this, MethodT (use_op, reason_op, reason_o, propref, funtype, None))) + in + (* return this *) + rec_flow cx trace (ret, ObjTestT (annot_reason reason_op, this, t)) + (****************************************************************) + (* function types derive objects through explicit instantiation *) + (****************************************************************) + | ( DefT (lreason, _, FunT (_, proto, ({ this_t = this; return_t = ret; _ } as ft))), + ConstructorT (use_op, reason_op, targs, args, t) ) -> + (* TODO: closure *) + (* create new object **) + let reason_c = replace_desc_reason RNewObject reason_op in + let objtype = + let sealed = UnsealedInFile (ALoc.source (loc_of_t proto)) in + let flags = { default_flags with sealed } in + let dict = None in + let call = None in + let pmap = Context.generate_property_map cx SMap.empty in + mk_objecttype ~flags ~dict ~call pmap proto + in + let new_obj = DefT (reason_c, bogus_trust (), ObjT objtype) in + (* error if type arguments are provided to non-polymorphic constructor **) + Option.iter targs ~f:(fun _ -> + add_output + cx + ~trace + Error_message.( + ECallTypeArity + { + call_loc = aloc_of_reason reason_op; + is_new = true; + reason_arity = lreason; + expected_arity = 0; + })); + + (* call function with this = new_obj, params = args **) + rec_flow_t cx trace (new_obj, this); + multiflow_call cx trace ~use_op reason_op args ft; + + (* if ret is object-like, return ret; otherwise return new_obj **) + let reason_o = replace_desc_reason RConstructorReturn reason_op in + rec_flow cx trace (ret, ObjTestT (reason_o, new_obj, t)) + | (AnyT _, ConstructorT (use_op, reason_op, targs, args, t)) -> + ignore targs; + + (* An untyped receiver can't do anything with type args *) + call_args_iter + (fun t -> rec_flow cx trace (t, UseT (use_op, AnyT.untyped reason_op))) + args; + rec_flow_t cx trace (AnyT.untyped reason_op, t) + (* Since we don't know the signature of a method on AnyT, assume every parameter is an AnyT. *) - | DefT (_, AnyFunT), - MethodT (use_op, reason_op, _, _, { call_args_tlist; call_tout; _}, prop_t) -> - let any = AnyT.why reason_op in - call_args_iter (fun t -> rec_flow cx trace (t, UseT (use_op, any))) call_args_tlist; - Option.iter ~f:(fun prop_t -> rec_flow_t cx trace (any, prop_t)) prop_t; - rec_flow_t cx trace (any, call_tout) - - (*************************) - (* statics can be read *) - (*************************) - - | DefT (_, InstanceT (static, _, _, _)), GetStaticsT (reason_op, tout) -> - rec_flow cx trace (static, ReposLowerT (reason_op, false, - UseT (unknown_use, tout))) - - | DefT (_, AnyT), GetStaticsT (reason_op, tout) -> - rec_flow_t cx trace (AnyT.why reason_op, tout) - - | ObjProtoT _, GetStaticsT (reason_op, tout) -> - (* ObjProtoT not only serves as the instance type of the root class, but + | (AnyT _, MethodT (use_op, reason_op, _, _, { call_args_tlist; call_tout; _ }, prop_t)) -> + let any = AnyT.untyped reason_op in + call_args_iter (fun t -> rec_flow cx trace (t, UseT (use_op, any))) call_args_tlist; + Option.iter ~f:(fun prop_t -> rec_flow_t cx trace (any, prop_t)) prop_t; + rec_flow_t cx trace (any, call_tout) + (*************************) + (* statics can be read *) + (*************************) + | (DefT (_, _, InstanceT (static, _, _, _)), GetStaticsT (reason_op, tout)) -> + rec_flow cx trace (static, ReposLowerT (reason_op, false, UseT (unknown_use, tout))) + | (AnyT (_, src), GetStaticsT (reason_op, tout)) -> + rec_flow_t cx trace (AnyT.why src reason_op, tout) + | (ObjProtoT _, GetStaticsT (reason_op, tout)) -> + (* ObjProtoT not only serves as the instance type of the root class, but also as the statics of the root class. *) - rec_flow cx trace (l, ReposLowerT (reason_op, false, - UseT (unknown_use, tout))) + rec_flow cx trace (l, ReposLowerT (reason_op, false, UseT (unknown_use, tout))) + (********************) + (* __proto__ getter *) + (********************) - (********************) - (* __proto__ getter *) - (********************) - - (* TODO: Fix GetProtoT for InstanceT (and ClassT). + (* TODO: Fix GetProtoT for InstanceT (and ClassT). The __proto__ object of an instance is an ObjT having the properties in insttype.methods_tmap, not the super instance. *) - | DefT (_, InstanceT (_, super, _, _)), GetProtoT (reason_op, t) -> - let proto = reposition cx ~trace (aloc_of_reason reason_op |> ALoc.to_loc) super in - rec_flow_t cx trace (proto, t) - - | DefT (_, ObjT {proto_t; _}), GetProtoT (reason_op, t) -> - let proto = reposition cx ~trace (aloc_of_reason reason_op |> ALoc.to_loc) proto_t in - rec_flow_t cx trace (proto, t) - - | ObjProtoT _, GetProtoT (reason_op, t) -> - let proto = NullT.why reason_op in - rec_flow_t cx trace (proto, t) - - | FunProtoT reason, GetProtoT (reason_op, t) -> - let proto = ObjProtoT (repos_reason (aloc_of_reason reason_op |> ALoc.to_loc) reason) in - rec_flow_t cx trace (proto, t) - - | DefT (_, (AnyT | AnyObjT | AnyFunT)), GetProtoT (reason_op, t) -> - let proto = AnyT.why reason_op in - rec_flow_t cx trace (proto, t) - - (********************) - (* __proto__ setter *) - (********************) - - | DefT (_, (AnyT | AnyObjT | AnyFunT)), SetProtoT _ -> () - - | _, SetProtoT (reason_op, _) -> - add_output cx ~trace (FlowError.EUnsupportedSetProto reason_op) - - (********************************************************) - (* instances of classes may have their fields looked up *) - (********************************************************) - - | DefT (lreason, InstanceT (_, super, _, instance)), - LookupT (reason_op, kind, try_ts_on_failure, (Named (_, x) as propref), action) -> - let own_props = Context.find_props cx instance.own_props in - let proto_props = Context.find_props cx instance.proto_props in - let pmap = SMap.union own_props proto_props in - (match SMap.get x pmap with - | None -> - (* If there are unknown mixins, the lookup should become nonstrict, as + | (DefT (_, _, InstanceT (_, super, _, _)), GetProtoT (reason_op, t)) -> + let proto = reposition cx ~trace (aloc_of_reason reason_op) super in + rec_flow_t cx trace (proto, t) + | (DefT (_, _, ObjT { proto_t; _ }), GetProtoT (reason_op, t)) -> + let proto = reposition cx ~trace (aloc_of_reason reason_op) proto_t in + rec_flow_t cx trace (proto, t) + | (ObjProtoT _, GetProtoT (reason_op, t)) -> + let proto = NullT.why reason_op |> with_trust bogus_trust in + rec_flow_t cx trace (proto, t) + | (FunProtoT reason, GetProtoT (reason_op, t)) -> + let proto = ObjProtoT (repos_reason (aloc_of_reason reason_op) reason) in + rec_flow_t cx trace (proto, t) + | (AnyT _, GetProtoT (reason_op, t)) -> + let proto = AnyT.untyped reason_op in + rec_flow_t cx trace (proto, t) + (********************) + (* __proto__ setter *) + (********************) + | (AnyT _, SetProtoT _) -> () + | (_, SetProtoT (reason_op, _)) -> + add_output cx ~trace (Error_message.EUnsupportedSetProto reason_op) + (********************************************************) + (* instances of classes may have their fields looked up *) + (********************************************************) + | ( DefT (lreason, _, InstanceT (_, super, _, instance)), + LookupT (reason_op, kind, try_ts_on_failure, (Named (_, x) as propref), action) ) -> + let own_props = Context.find_props cx instance.own_props in + let proto_props = Context.find_props cx instance.proto_props in + let pmap = SMap.union own_props proto_props in + (match SMap.get x pmap with + | None -> + (* If there are unknown mixins, the lookup should become nonstrict, as the searched-for property may be found in a mixin. *) - let kind = match instance.has_unknown_react_mixins, kind with - | true, Strict _ -> NonstrictReturning (None, None) - | _ -> kind - in - rec_flow cx trace (super, - LookupT (reason_op, kind, try_ts_on_failure, propref, action)) - | Some p -> - (match kind with - | NonstrictReturning (_, Some (id, _)) -> Context.test_prop_hit cx id - | _ -> ()); - perform_lookup_action cx trace propref p lreason reason_op action) - | DefT (_, InstanceT _), LookupT (reason_op, _, _, Computed _, _) -> - (* Instances don't have proper dictionary support. All computed accesses + let kind = + match (instance.has_unknown_react_mixins, kind) with + | (true, Strict _) -> NonstrictReturning (None, None) + | _ -> kind + in + rec_flow cx trace (super, LookupT (reason_op, kind, try_ts_on_failure, propref, action)) + | Some p -> + (match kind with + | NonstrictReturning (_, Some (id, _)) -> Context.test_prop_hit cx id + | _ -> ()); + perform_lookup_action cx trace propref p PropertyMapProperty lreason reason_op action) + | (DefT (_, _, InstanceT _), LookupT (reason_op, _, _, Computed _, _)) -> + (* Instances don't have proper dictionary support. All computed accesses are converted to named property access to `$key` and `$value` during element resolution in ElemT. *) - let loc = aloc_of_reason reason_op in - add_output cx ~trace FlowError.(EInternal (loc |> ALoc.to_loc, InstanceLookupComputed)) - - (********************************) - (* ... and their fields written *) - (********************************) - - | DefT (reason_c, InstanceT (_, super, _, instance)), - SetPropT (use_op, reason_op, Named (reason_prop, x), wr_ctx, tin, prop_t) -> - let own_props = Context.find_props cx instance.own_props in - let proto_props = Context.find_props cx instance.proto_props in - let fields = SMap.union own_props proto_props in - let strict = Strict reason_c in - set_prop cx ~wr_ctx trace ~use_op reason_prop reason_op strict l super x - fields tin prop_t; - - | DefT (reason_c, InstanceT _), - SetPrivatePropT (use_op, reason_op, x, [], _, _, _) -> - add_output cx ~trace (FlowError.EPrivateLookupFailed ((reason_op, reason_c), x, use_op)) - - | DefT (reason_c, InstanceT (_, _, _, instance)), - SetPrivatePropT (use_op, reason_op, x, scope::scopes, static, tin, prop_t) -> - if scope.class_binding_id != instance.class_id then - rec_flow cx trace ( - l, SetPrivatePropT (use_op, reason_op, x, scopes, static, tin, prop_t) - ) - else ( - let map = - if static - then scope.class_private_static_fields - else scope.class_private_fields - in - match SMap.get x (Context.find_props cx map) with - | None -> - add_output cx ~trace (FlowError.EPrivateLookupFailed ((reason_op, reason_c), x, use_op)) - | Some p -> - let action = RWProp (use_op, l, tin, Write (Normal, prop_t)) in - let propref = Named (reason_op, x) in - perform_lookup_action cx trace propref p reason_c reason_op action - ) - - | DefT (_, InstanceT _), SetPropT (_, reason_op, Computed _, _, _, _) -> - (* Instances don't have proper dictionary support. All computed accesses + let loc = aloc_of_reason reason_op in + add_output cx ~trace Error_message.(EInternal (loc, InstanceLookupComputed)) + (********************************) + (* ... and their fields written *) + (********************************) + | ( DefT (reason_c, _, InstanceT (_, super, _, instance)), + SetPropT (use_op, reason_op, Named (reason_prop, x), mode, wr_ctx, tin, prop_t) ) -> + let own_props = Context.find_props cx instance.own_props in + let proto_props = Context.find_props cx instance.proto_props in + let fields = SMap.union own_props proto_props in + let strict = Strict reason_c in + set_prop + cx + ~mode + ~wr_ctx + trace + ~use_op + reason_prop + reason_op + strict + l + super + x + fields + tin + prop_t + | (DefT (reason_c, _, InstanceT _), SetPrivatePropT (use_op, reason_op, x, _, [], _, _, _)) + -> + add_output + cx + ~trace + (Error_message.EPrivateLookupFailed ((reason_op, reason_c), x, use_op)) + | ( DefT (reason_c, _, InstanceT (_, _, _, instance)), + SetPrivatePropT (use_op, reason_op, x, mode, scope :: scopes, static, tin, prop_tout) + ) -> + if scope.class_binding_id != instance.class_id then + rec_flow + cx + trace + (l, SetPrivatePropT (use_op, reason_op, x, mode, scopes, static, tin, prop_tout)) + else + let map = + if static then + scope.class_private_static_fields + else + scope.class_private_fields + in + (match SMap.get x (Context.find_props cx map) with + | None -> + add_output + cx + ~trace + (Error_message.EPrivateLookupFailed ((reason_op, reason_c), x, use_op)) + | Some p -> + let action = + WriteProp { use_op; obj_t = l; prop_tout; tin; write_ctx = Normal; mode } + in + let propref = Named (reason_op, x) in + perform_lookup_action + cx + trace + propref + p + PropertyMapProperty + reason_c + reason_op + action) + | (DefT (_, _, InstanceT _), SetPropT (_, reason_op, Computed _, _, _, _, _)) -> + (* Instances don't have proper dictionary support. All computed accesses are converted to named property access to `$key` and `$value` during element resolution in ElemT. *) - let loc = aloc_of_reason reason_op in - add_output cx ~trace FlowError.(EInternal (loc |> ALoc.to_loc, InstanceLookupComputed)) - - | DefT (reason_c, InstanceT (_, super, _, instance)), - MatchPropT (use_op, reason_op, Named (reason_prop, x), prop_t) -> - let own_props = Context.find_props cx instance.own_props in - let proto_props = Context.find_props cx instance.proto_props in - let fields = SMap.union own_props proto_props in - let strict = Strict reason_c in - match_prop cx trace ~use_op reason_prop reason_op strict super x fields prop_t - - (*****************************) - (* ... and their fields read *) - (*****************************) - - | DefT (_, InstanceT _) as instance, GetPropT (_, _, Named (_, "constructor"), t) -> - rec_flow_t cx trace (class_type instance, t) - - | DefT (reason_c, InstanceT (_, super, _, instance)), - GetPropT (use_op, reason_op, Named (reason_prop, x), tout) -> - let own_props = Context.find_props cx instance.own_props in - let proto_props = Context.find_props cx instance.proto_props in - let fields = SMap.union own_props proto_props in - let strict = - if instance.has_unknown_react_mixins then NonstrictReturning (None, None) - else Strict reason_c - in - get_prop cx trace ~use_op reason_prop reason_op strict l super x fields tout - - | DefT (reason_c, InstanceT _), - GetPrivatePropT (use_op, reason_op, x, [], _, _) -> - add_output cx ~trace (FlowError.EPrivateLookupFailed ((reason_op, reason_c), x, use_op)) - - | DefT (reason_c, InstanceT (_, _, _, instance)), - GetPrivatePropT (use_op, reason_op, x, scope::scopes, static, tout) -> - if scope.class_binding_id <> instance.class_id then - rec_flow cx trace (l, GetPrivatePropT (use_op, reason_op, x, scopes, static, tout)) - else - let map = - if static - then scope.class_private_static_fields - else scope.class_private_fields - in - (match SMap.get x (Context.find_props cx map) with - | None -> - add_output cx ~trace (FlowError.EPrivateLookupFailed ((reason_op, reason_c), x, use_op)) - | Some p -> - let action = RWProp (use_op, l, tout, Read) in - let propref = Named (reason_op, x) in - perform_lookup_action cx trace propref p reason_c reason_op action) - - | DefT (_, InstanceT _), GetPropT (_, reason_op, Computed _, _) -> - (* Instances don't have proper dictionary support. All computed accesses + let loc = aloc_of_reason reason_op in + add_output cx ~trace Error_message.(EInternal (loc, InstanceLookupComputed)) + | ( DefT (reason_c, _, InstanceT (_, super, _, instance)), + MatchPropT (use_op, reason_op, Named (reason_prop, x), prop_t) ) -> + let own_props = Context.find_props cx instance.own_props in + let proto_props = Context.find_props cx instance.proto_props in + let fields = SMap.union own_props proto_props in + let strict = Strict reason_c in + match_prop cx trace ~use_op reason_prop reason_op strict super x fields prop_t + (*****************************) + (* ... and their fields read *) + (*****************************) + | ((DefT (r, _, InstanceT _) as instance), GetPropT (_, _, Named (_, "constructor"), t)) -> + rec_flow_t cx trace (class_type ?annot_loc:(annot_aloc_of_reason r) instance, t) + | ( DefT (reason_c, _, InstanceT (_, super, _, instance)), + GetPropT (use_op, reason_op, Named (reason_prop, x), tout) ) -> + let own_props = Context.find_props cx instance.own_props in + let proto_props = Context.find_props cx instance.proto_props in + let fields = SMap.union own_props proto_props in + let strict = + if instance.has_unknown_react_mixins then + NonstrictReturning (None, None) + else + Strict reason_c + in + get_prop cx trace ~use_op reason_prop reason_op strict l super x fields tout + | (DefT (reason_c, _, InstanceT _), GetPrivatePropT (use_op, reason_op, x, [], _, _)) -> + add_output + cx + ~trace + (Error_message.EPrivateLookupFailed ((reason_op, reason_c), x, use_op)) + | ( DefT (reason_c, _, InstanceT (_, _, _, instance)), + GetPrivatePropT (use_op, reason_op, x, scope :: scopes, static, tout) ) -> + if scope.class_binding_id <> instance.class_id then + rec_flow cx trace (l, GetPrivatePropT (use_op, reason_op, x, scopes, static, tout)) + else + let map = + if static then + scope.class_private_static_fields + else + scope.class_private_fields + in + (match SMap.get x (Context.find_props cx map) with + | None -> + add_output + cx + ~trace + (Error_message.EPrivateLookupFailed ((reason_op, reason_c), x, use_op)) + | Some p -> + let action = ReadProp { use_op; obj_t = l; tout } in + let propref = Named (reason_op, x) in + perform_lookup_action + cx + trace + propref + p + PropertyMapProperty + reason_c + reason_op + action) + | (DefT (_, _, InstanceT _), GetPropT (_, reason_op, Computed _, _)) -> + (* Instances don't have proper dictionary support. All computed accesses are converted to named property access to `$key` and `$value` during element resolution in ElemT. *) - let loc = aloc_of_reason reason_op in - add_output cx ~trace FlowError.(EInternal (loc |> ALoc.to_loc, InstanceLookupComputed)) - - (********************************) - (* ... and their methods called *) - (********************************) - - | DefT (reason_c, InstanceT (_, super, _, instance)), - MethodT (use_op, reason_call, reason_lookup, Named (reason_prop, x), - funtype, prop_t) - -> (* TODO: closure *) - let own_props = Context.find_props cx instance.own_props in - let proto_props = Context.find_props cx instance.proto_props in - let props = SMap.union own_props proto_props in - let funt = Tvar.mk cx reason_lookup in - let strict = - if instance.has_unknown_react_mixins then NonstrictReturning (None, None) - else Strict reason_c - in - get_prop cx trace ~use_op reason_prop reason_lookup strict l super x props funt; - Option.iter ~f:(fun prop_t -> rec_flow_t cx trace (funt, prop_t)) prop_t; + let loc = aloc_of_reason reason_op in + add_output cx ~trace Error_message.(EInternal (loc, InstanceLookupComputed)) + (********************************) + (* ... and their methods called *) + (********************************) + | ( DefT (reason_c, _, InstanceT (_, super, _, instance)), + MethodT (use_op, reason_call, reason_lookup, Named (reason_prop, x), funtype, prop_t) + ) -> + (* TODO: closure *) + let own_props = Context.find_props cx instance.own_props in + let proto_props = Context.find_props cx instance.proto_props in + let props = SMap.union own_props proto_props in + let funt = Tvar.mk cx reason_lookup in + let strict = + if instance.has_unknown_react_mixins then + NonstrictReturning (None, None) + else + Strict reason_c + in + get_prop cx trace ~use_op reason_prop reason_lookup strict l super x props funt; + Option.iter ~f:(fun prop_t -> rec_flow_t cx trace (funt, prop_t)) prop_t; - (* suppress ops while calling the function. if `funt` is a `FunT`, then + (* suppress ops while calling the function. if `funt` is a `FunT`, then `CallT` will set its own ops during the call. if `funt` is something else, then something like `VoidT ~> CallT` doesn't need the op either because we want to point at the call and undefined thing. *) - rec_flow cx trace (funt, CallT (use_op, reason_call, funtype)); - - | DefT (_, InstanceT _), MethodT (_, reason_call, _, Computed _, _, _) -> - (* Instances don't have proper dictionary support. All computed accesses + rec_flow cx trace (funt, CallT (use_op, reason_call, funtype)) + | (DefT (_, _, InstanceT _), MethodT (_, reason_call, _, Computed _, _, _)) -> + (* Instances don't have proper dictionary support. All computed accesses are converted to named property access to `$key` and `$value` during element resolution in ElemT. *) - let loc = aloc_of_reason reason_call in - add_output cx ~trace FlowError.(EInternal (loc |> ALoc.to_loc, InstanceLookupComputed)) - - (** In traditional type systems, object types are not extensible. E.g., an + let loc = aloc_of_reason reason_call in + add_output cx ~trace Error_message.(EInternal (loc, InstanceLookupComputed)) + (* In traditional type systems, object types are not extensible. E.g., an object {x: 0, y: ""} has type {x: number; y: string}. While it is possible to narrow the object's type to hide some of its properties (aka width subtyping), extending its type to model new properties is @@ -5010,14 +5161,16 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = **) - (**********************************************************************) - (* objects can be assigned, i.e., their properties can be set in bulk *) - (**********************************************************************) - - | to_obj, ObjAssignToT (reason, from_obj, t, kind) -> - rec_flow cx trace (from_obj, ObjAssignFromT (reason, to_obj, t, kind)) + (**********************************************************************) + (* objects can be assigned, i.e., their properties can be set in bulk *) + (**********************************************************************) - (** When some object-like type O1 flows to + (* Special case any. Otherwise this will lead to confusing errors when any tranforms to an + object type. *) + | (AnyT _, ObjAssignToT (use_op, _, _, t, _)) -> rec_flow_t cx ~use_op trace (l, t) + | (to_obj, ObjAssignToT (use_op, reason, from_obj, t, kind)) -> + rec_flow cx trace (from_obj, ObjAssignFromT (use_op, reason, to_obj, t, kind)) + (* When some object-like type O1 flows to ObjAssignFromT(_,O2,X,ObjAssign), the properties of O1 are copied to O2, and O2 is linked to X to signal that the copying is done; the intention is that when those properties are read through X, they should @@ -5028,744 +5181,789 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = with reads of those properties through X as soon as O2 is resolved. To avoid this race, we make O2 flow to ObjAssignToT(_,O1,X,ObjAssign); when O2 is resolved, we make the switch. **) - - | DefT (lreason, ObjT { props_tmap = mapr; flags; dict_t; _ }), - ObjAssignFromT (reason_op, to_obj, t, ObjAssign error_flags) -> - Context.iter_props cx mapr (fun x p -> - (* move the reason to the call site instead of the definition, so + | ( DefT (lreason, _, ObjT { props_tmap = mapr; flags; dict_t; _ }), + ObjAssignFromT (use_op, reason_op, to_obj, t, ObjAssign error_flags) ) -> + Context.iter_props cx mapr (fun x p -> + (* move the reason to the call site instead of the definition, so that it is in the same scope as the Object.assign, so that strictness rules apply. *) - let reason_prop = - lreason - |> replace_reason (fun desc -> RPropertyOf (x, desc)) - |> repos_reason (aloc_of_reason reason_op |> ALoc.to_loc) - in - match Property.read_t p with - | Some t -> - let propref = Named (reason_prop, x) in - let t = filter_optional cx ~trace reason_prop t in - rec_flow cx trace (to_obj, SetPropT ( - unknown_use, reason_prop, propref, Normal, t, None - )) - | None -> - add_output cx ~trace (FlowError.EPropAccess ( - (reason_prop, reason_op), Some x, Property.polarity p, Read, unknown_use - )) - ); - if dict_t <> None then rec_flow_t cx trace (DefT (reason_op, AnyObjT), t) - else begin - if error_flags.assert_exact && not flags.exact - then add_output cx ~trace (FlowError.EInexactSpread (lreason, reason_op)); - rec_flow_t cx trace (to_obj, t) - end - - | DefT (lreason, InstanceT (_, _, _, { own_props; proto_props; _ })), - ObjAssignFromT (reason_op, to_obj, t, ObjAssign _) -> - let own_props = Context.find_props cx own_props in - let proto_props = Context.find_props cx proto_props in - let props = SMap.union own_props proto_props in - let props_to_skip = ["$key"; "$value"] in - props |> SMap.iter (fun x p -> - if not (List.mem x props_to_skip) then ( - match Property.read_t p with - | Some t -> - let propref = Named (reason_op, x) in - rec_flow cx trace (to_obj, SetPropT ( - unknown_use, reason_op, propref, Normal, t, None - )) - | None -> - add_output cx ~trace (FlowError.EPropAccess ( - (lreason, reason_op), Some x, Property.polarity p, Read, unknown_use - )) - ) - ); - rec_flow_t cx trace (to_obj, t) - - (* AnyObjT has every prop, each one typed as `any`, so spreading it into an + let reason_prop = + lreason + |> update_desc_reason (fun desc -> RPropertyOf (x, desc)) + |> repos_reason (aloc_of_reason reason_op) + in + match Property.read_t p with + | Some t -> + let propref = Named (reason_prop, x) in + let t = filter_optional cx ~trace reason_prop t in + rec_flow + cx + trace + (to_obj, SetPropT (use_op, reason_prop, propref, Assign, Normal, t, None)) + | None -> + add_output + cx + ~trace + (Error_message.EPropNotReadable { reason_prop; prop_name = Some x; use_op })); + if dict_t <> None then + rec_flow_t cx trace ~use_op (AnyT.make Untyped reason_op, t) + else ( + if error_flags.assert_exact && not flags.exact then + add_output cx ~trace (Error_message.EInexactSpread (lreason, reason_op)); + rec_flow_t cx trace ~use_op (to_obj, t) + ) + | ( DefT (lreason, _, InstanceT (_, _, _, { own_props; proto_props; _ })), + ObjAssignFromT (use_op, reason_op, to_obj, t, ObjAssign _) ) -> + let own_props = Context.find_props cx own_props in + let proto_props = Context.find_props cx proto_props in + let props = SMap.union own_props proto_props in + let props_to_skip = ["$key"; "$value"] in + props + |> SMap.iter (fun x p -> + if not (List.mem x props_to_skip) then + match Property.read_t p with + | Some t -> + let propref = Named (reason_op, x) in + rec_flow + cx + trace + (to_obj, SetPropT (use_op, reason_op, propref, Assign, Normal, t, None)) + | None -> + add_output + cx + ~trace + (Error_message.EPropNotReadable + { reason_prop = lreason; prop_name = Some x; use_op })); + rec_flow_t cx ~use_op trace (to_obj, t) + (* AnyT has every prop, each one typed as `any`, so spreading it into an existing object destroys all of the keys, turning the result into an - AnyObjT as well. TODO: wait for `to_obj` to be resolved, and then call + AnyT as well. TODO: wait for `to_obj` to be resolved, and then call `SetPropT (_, _, _, AnyT, _)` on all of its props. *) - | DefT (_, AnyObjT), ObjAssignFromT (reason, _, t, ObjAssign _) -> - rec_flow_t cx trace (DefT (reason, AnyObjT), t) - - | ObjProtoT _, ObjAssignFromT (_, to_obj, t, ObjAssign _) -> - rec_flow_t cx trace (to_obj, t) - - (* Object.assign semantics *) - | DefT (_, (NullT | VoidT)), ObjAssignFromT (_, to_obj, tout, ObjAssign _) -> - rec_flow_t cx trace (to_obj, tout) - - (* {...mixed} is the equivalent of {...{[string]: mixed}} *) - | DefT (reason, MixedT _), ObjAssignFromT (_, _, _, ObjAssign _) -> - let dict = { - dict_name = None; - key = StrT.make reason; - value = l; - dict_polarity = Neutral; - } in - let o = Obj_type.mk_with_proto cx reason - (ObjProtoT reason) - ~dict - ~sealed:true ~exact:true - in - rec_flow cx trace (o, u) - - | DefT (arr_r, ArrT arrtype), ObjAssignFromT (r, o, t, ObjSpreadAssign) -> - begin match arrtype with - | ArrayAT (elemt, None) - | ROArrayAT (elemt) -> - (* Object.assign(o, ...Array) -> Object.assign(o, x) *) - rec_flow cx trace (elemt, ObjAssignFromT (r, o, t, default_obj_assign_kind)) - | TupleAT (_, ts) - | ArrayAT (_, Some ts) -> - (* Object.assign(o, ...[x,y,z]) -> Object.assign(o, x, y, z) *) - List.iter (fun from -> - rec_flow cx trace (from, ObjAssignFromT (r, o, t, default_obj_assign_kind)) - ) ts - | EmptyAT -> - (* Object.assign(o, ...EmptyAT) -> Object.assign(o, empty) *) - rec_flow cx trace (DefT (arr_r, EmptyT), ObjAssignFromT (r, o, t, default_obj_assign_kind)) - end - - (*************************) - (* objects can be copied *) - (*************************) + | (AnyT (_, src), ObjAssignFromT (use_op, reason, _, t, ObjAssign _)) -> + rec_flow_t cx ~use_op trace (AnyT.make src reason, t) + | (AnyT _, ObjAssignFromT (use_op, _, _, t, _)) -> rec_flow_t cx ~use_op trace (l, t) + | (ObjProtoT _, ObjAssignFromT (use_op, _, to_obj, t, ObjAssign _)) -> + rec_flow_t cx ~use_op trace (to_obj, t) + (* Object.assign semantics *) + | (DefT (_, _, (NullT | VoidT)), ObjAssignFromT (use_op, _, to_obj, tout, ObjAssign _)) -> + rec_flow_t cx ~use_op trace (to_obj, tout) + (* {...mixed} is the equivalent of {...{[string]: mixed}} *) + | (DefT (reason, _, MixedT _), ObjAssignFromT (_, _, _, _, ObjAssign _)) -> + let dict = + { + dict_name = None; + key = StrT.make reason |> with_trust bogus_trust; + value = l; + dict_polarity = Polarity.Neutral; + } + in + let o = + Obj_type.mk_with_proto cx reason (ObjProtoT reason) ~dict ~sealed:true ~exact:true + in + rec_flow cx trace (o, u) + | (DefT (_, _, ArrT arrtype), ObjAssignFromT (use_op, r, o, t, ObjSpreadAssign)) -> + begin + match arrtype with + | ArrayAT (elemt, None) + | ROArrayAT elemt -> + (* Object.assign(o, ...Array) -> Object.assign(o, x) *) + rec_flow cx trace (elemt, ObjAssignFromT (use_op, r, o, t, default_obj_assign_kind)) + | TupleAT (_, ts) + | ArrayAT (_, Some ts) -> + (* Object.assign(o, ...[x,y,z]) -> Object.assign(o, x, y, z) *) + List.iter + (fun from -> + rec_flow + cx + trace + (from, ObjAssignFromT (use_op, r, o, t, default_obj_assign_kind))) + ts + end + (*************************) + (* objects can be copied *) + (*************************) - (* Note: The story around unsealed objects and rest is not great. One + (* Note: The story around unsealed objects and rest is not great. One thought is to insert a special kind of shadow property into the host object, which directs all writes (other than those in `xs`) to the unsealed rest result object. For now, the design here is incomplete. *) - - | DefT (_, ObjT { props_tmap; flags; _ }), ObjRestT (reason, xs, t) -> - let props = Context.find_props cx props_tmap in - let props = List.fold_left (fun map x -> SMap.remove x map) props xs in - (* Remove shadow properties from rest result *) - let props = SMap.filter (fun x _ -> not (is_internal_name x)) props in - let proto = ObjProtoT reason in - let sealed = Obj_type.sealed_in_op reason flags.sealed in - (* A rest result can not be exact if the source object is unsealed, + | (DefT (_, _, ObjT { props_tmap; flags; _ }), ObjRestT (reason, xs, t)) -> + let props = Context.find_props cx props_tmap in + let props = List.fold_left (fun map x -> SMap.remove x map) props xs in + (* Remove shadow properties from rest result *) + let props = SMap.filter (fun x _ -> not (is_internal_name x)) props in + let proto = ObjProtoT reason in + let sealed = Obj_type.sealed_in_op reason flags.sealed in + (* A rest result can not be exact if the source object is unsealed, because we may not have seen all the writes yet. *) - let exact = sealed && flags.exact in - let o = Obj_type.mk_with_proto cx reason ~props proto ~sealed ~exact in - rec_flow_t cx trace (o, t) - - | DefT (reason, InstanceT (_, super, _, insttype)), - ObjRestT (reason_op, xs, t) -> - (* Spread fields from super into an object *) - let obj_super = Tvar.mk_where cx reason_op (fun tvar -> - let u = ObjRestT (reason_op, xs, tvar) in - rec_flow cx trace (super, ReposLowerT (reason, false, u)) - ) in - - (* Spread own props from the instance into another object *) - let props = Context.find_props cx insttype.own_props in - let props = List.fold_left (fun props x -> SMap.remove x props) props xs in - let proto = ObjProtoT reason_op in - let obj_inst = Obj_type.mk_with_proto cx reason_op ~props proto in - - (* ObjAssign the inst-generated obj into the super-generated obj *) - let o = Tvar.mk_where cx reason_op (fun tvar -> - rec_flow cx trace ( - obj_inst, - ObjAssignFromT (reason_op, obj_super, tvar, default_obj_assign_kind) - ) - ) in - - rec_flow_t cx trace (o, t) - - | DefT (_, AnyT), ObjRestT (reason, _, t) -> - rec_flow_t cx trace (AnyT.why reason, t) - - (* ...AnyObjT and AnyFunT yield AnyObjT *) - | DefT (_, (AnyFunT | AnyObjT)), ObjRestT (reason, _, t) -> - rec_flow_t cx trace (DefT (reason, AnyObjT), t) - - | (ObjProtoT _, ObjRestT (reason, _, t)) -> - let obj = Obj_type.mk_with_proto cx reason l in - rec_flow_t cx trace (obj, t) - - | DefT (_, (NullT | VoidT)), ObjRestT (reason, _, t) -> - (* mirroring Object.assign semantics, treat null/void as empty objects *) - let o = Obj_type.mk cx reason in - rec_flow_t cx trace (o, t) - - (*************************************) - (* objects can be copied-then-sealed *) - (*************************************) - | DefT (_, ObjT { props_tmap = mapr; _ }), ObjSealT (reason, t) -> - let props = Context.find_props cx mapr in - let new_obj = - Obj_type.mk_with_proto cx reason ~sealed:true ~props l - in - rec_flow_t cx trace (new_obj, t) - - | DefT (_, AnyT), ObjSealT (reason, tout) -> - rec_flow_t cx trace (AnyT.why reason, tout) - - | DefT (_, AnyObjT), ObjSealT (reason, tout) -> - rec_flow_t cx trace (DefT (reason, AnyObjT), tout) - - (*************************) - (* objects can be frozen *) - (*************************) - - | DefT (reason_o, ObjT objtype), ObjFreezeT (reason_op, t) -> - (* make the reason describe the result (e.g. a frozen object literal), + let exact = sealed && flags.exact in + let o = Obj_type.mk_with_proto cx reason ~props proto ~sealed ~exact in + rec_flow_t cx trace (o, t) + | (DefT (reason, _, InstanceT (_, super, _, insttype)), ObjRestT (reason_op, xs, t)) -> + (* Spread fields from super into an object *) + let obj_super = + Tvar.mk_where cx reason_op (fun tvar -> + let u = ObjRestT (reason_op, xs, tvar) in + rec_flow cx trace (super, ReposLowerT (reason, false, u))) + in + (* Spread own props from the instance into another object *) + let props = Context.find_props cx insttype.own_props in + let props = List.fold_left (fun props x -> SMap.remove x props) props xs in + let proto = ObjProtoT reason_op in + let obj_inst = Obj_type.mk_with_proto cx reason_op ~props proto in + (* ObjAssign the inst-generated obj into the super-generated obj *) + let use_op = Op (ObjectSpread { op = reason_op }) in + let o = + Tvar.mk_where cx reason_op (fun tvar -> + rec_flow + cx + trace + ( obj_inst, + ObjAssignFromT (use_op, reason_op, obj_super, tvar, default_obj_assign_kind) )) + in + rec_flow_t cx ~use_op trace (o, t) + | (AnyT (_, src), ObjRestT (reason, _, t)) -> rec_flow_t cx trace (AnyT.why src reason, t) + | (ObjProtoT _, ObjRestT (reason, _, t)) -> + let obj = Obj_type.mk_with_proto cx reason l in + rec_flow_t cx trace (obj, t) + | (DefT (_, _, (NullT | VoidT)), ObjRestT (reason, _, t)) -> + (* mirroring Object.assign semantics, treat null/void as empty objects *) + let o = Obj_type.mk cx reason in + rec_flow_t cx trace (o, t) + (*************************************) + (* objects can be copied-then-sealed *) + (*************************************) + | (DefT (_, _, ObjT { props_tmap = mapr; _ }), ObjSealT (reason, t)) -> + let props = Context.find_props cx mapr in + let new_obj = Obj_type.mk_with_proto cx reason ~sealed:true ~props l in + rec_flow_t cx trace (new_obj, t) + | (AnyT (_, src), ObjSealT (reason, tout)) -> + rec_flow_t cx trace (AnyT.why src reason, tout) + (*************************) + (* objects can be frozen *) + (*************************) + | (DefT (reason_o, trust, ObjT objtype), ObjFreezeT (reason_op, t)) -> + (* make the reason describe the result (e.g. a frozen object literal), but point at the entire Object.freeze call. *) - let desc = RFrozen (desc_of_reason reason_o) in - let reason = replace_reason_const desc reason_op in - - let flags = {frozen = true; sealed = Sealed; exact = true;} in - let new_obj = DefT (reason, ObjT {objtype with flags}) in - rec_flow_t cx trace (new_obj, t) - - | DefT (_, AnyT), ObjFreezeT (reason_op, t) -> - rec_flow_t cx trace (AnyT.why reason_op, t) - - | DefT (_, AnyObjT), ObjFreezeT (reason_op, t) -> - rec_flow_t cx trace (DefT (reason_op, AnyObjT), t) - - (*******************************************) - (* objects may have their fields looked up *) - (*******************************************) - - | DefT (reason_obj, ObjT o), - LookupT (reason_op, strict, try_ts_on_failure, propref, action) -> - (match get_obj_prop cx trace o propref reason_op with - | Some p -> - (match strict with - | NonstrictReturning (_, Some (id, _)) -> Context.test_prop_hit cx id - | _ -> ()); - perform_lookup_action cx trace propref p reason_obj reason_op action - | None -> - let strict = match Obj_type.sealed_in_op reason_op o.flags.sealed, strict with - | false, ShadowRead (strict, ids) -> - ShadowRead (strict, Nel.cons o.props_tmap ids) - | false, ShadowWrite ids -> - ShadowWrite (Nel.cons o.props_tmap ids) - | _ -> strict - in - rec_flow cx trace (o.proto_t, - LookupT (reason_op, strict, try_ts_on_failure, propref, action))); - - | DefT (reason, (AnyT | AnyObjT)), - LookupT (reason_op, kind, _, propref, action) -> - (match action with - | SuperProp (_, lp) when Property.write_t lp = None -> - (* Without this exception, we will call rec_flow_p where - * `write_t lp = None` and `write_t up = Some`, which is a polarity - * mismatch error. Instead of this, we could "read" `mixed` from - * covariant props, which would always flow into `any`. *) - () - | _ -> - let p = Field (None, AnyT.why reason_op, Neutral) in - (match kind with - | NonstrictReturning (_, Some (id, _)) -> Context.test_prop_hit cx id - | _ -> ()); - perform_lookup_action cx trace propref p reason reason_op action) - - (*****************************************) - (* ... and their fields written *) - (*****************************************) - - | DefT (_, ObjT {flags; _}), - SetPropT (use_op, reason_op, Named (prop, "constructor"), _, _, _) -> - if flags.frozen - then - add_output cx ~trace - (FlowError.EPropAccess ((prop, reason_op), Some "constructor", - Positive, Write (Normal, None), use_op)) - - (** o.x = ... has the additional effect of o[_] = ... **) - - | DefT (_, ObjT { flags; _ }), SetPropT (use_op, reason_op, prop, _, _, _) - when flags.frozen -> - let reason_prop, prop = match prop with - | Named (r, prop) -> r, Some prop - | Computed t -> reason_of_t t, None - in - add_output cx ~trace (FlowError.EPropAccess ((reason_prop, reason_op), prop, - Positive, Write (Normal, None), use_op)) - - | DefT (reason_obj, ObjT o), SetPropT (use_op, reason_op, propref, _, tin, prop_t) -> - write_obj_prop cx trace ~use_op o propref reason_obj reason_op tin prop_t - - (* Since we don't know the type of the prop, use AnyT. *) - | DefT (_, (AnyT | AnyObjT)), SetPropT (use_op, reason_op, _, _, t, prop_t) -> - Option.iter ~f:(fun t -> rec_flow_t cx trace (AnyT.why reason_op, t)) prop_t; - rec_flow cx trace (t, UseT (use_op, AnyT.why reason_op)) - - | DefT (reason_obj, ObjT o), MatchPropT (use_op, reason_op, propref, proptype) -> - match_obj_prop cx trace ~use_op o propref reason_obj reason_op proptype - - | DefT (_, (AnyT | AnyObjT | AnyFunT)), MatchPropT (use_op, reason_op, _, t) -> - rec_flow cx trace (t, UseT (use_op, AnyT.why reason_op)) - - (*****************************) - (* ... and their fields read *) - (*****************************) - - | DefT (_, ObjT _), GetPropT (_, reason_op, Named (_, "constructor"), tout) -> - rec_flow_t cx trace (AnyT.why reason_op, tout) - - | DefT (reason_obj, ObjT o), GetPropT (use_op, reason_op, propref, tout) -> - read_obj_prop cx trace ~use_op o propref reason_obj reason_op tout - - | DefT (_, (AnyObjT | AnyT)), GetPropT (_, reason_op, _, tout) -> - rec_flow_t cx trace (AnyT.why reason_op, tout) - - (********************************) - (* ... and their methods called *) - (********************************) - - | DefT (_, ObjT _), MethodT(_, _, _, Named (_, "constructor"), _, _) -> () - - | DefT (reason_obj, ObjT o), - MethodT (use_op, reason_call, reason_lookup, propref, funtype, prop_t) -> - let t = Tvar.mk_where cx reason_lookup (fun tout -> - read_obj_prop cx trace ~use_op o propref reason_obj reason_lookup tout - ) in - Option.iter ~f:(fun prop_t -> rec_flow_t cx trace (t, prop_t)) prop_t; - rec_flow cx trace (t, CallT (use_op, reason_call, funtype)) - - (* Since we don't know the signature of a method on AnyObjT, assume every - parameter is an AnyT. *) - | DefT (_, (AnyObjT | AnyT)), - MethodT (use_op, reason_op, _, _, { call_args_tlist; call_tout; _}, prop_t) -> - let any = AnyT.why reason_op in - Option.iter ~f:(fun prop_t -> rec_flow_t cx trace (any, prop_t)) prop_t; - call_args_iter (fun t -> rec_flow cx trace (t, UseT (use_op, any))) call_args_tlist; - rec_flow_t cx trace (any, call_tout) - - (******************************************) - (* strings may have their characters read *) - (******************************************) - - | DefT (reason_s, StrT _), GetElemT (use_op, reason_op, index, tout) -> - rec_flow cx trace (index, UseT (use_op, NumT.why reason_s)); - rec_flow_t cx trace (StrT.why reason_op, tout) - - (** Expressions may be used as keys to access objects and arrays. In + let desc = RFrozen (desc_of_reason reason_o) in + let reason = replace_desc_reason desc reason_op in + let flags = { frozen = true; sealed = Sealed; exact = true } in + let new_obj = DefT (reason, trust, ObjT { objtype with flags }) in + rec_flow_t cx trace (new_obj, t) + | (AnyT (_, src), ObjFreezeT (reason_op, t)) -> + rec_flow_t cx trace (AnyT.why src reason_op, t) + (*******************************************) + (* objects may have their fields looked up *) + (*******************************************) + | ( DefT (reason_obj, _, ObjT o), + LookupT (reason_op, strict, try_ts_on_failure, propref, action) ) -> + (match get_obj_prop cx trace o propref reason_op with + | Some (p, target_kind) -> + (match strict with + | NonstrictReturning (_, Some (id, _)) -> Context.test_prop_hit cx id + | _ -> ()); + perform_lookup_action cx trace propref p target_kind reason_obj reason_op action + | None -> + let strict = + match (Obj_type.sealed_in_op reason_op o.flags.sealed, strict) with + | (false, ShadowRead (strict, ids)) -> ShadowRead (strict, Nel.cons o.props_tmap ids) + | (false, ShadowWrite ids) -> ShadowWrite (Nel.cons o.props_tmap ids) + | _ -> strict + in + rec_flow + cx + trace + (o.proto_t, LookupT (reason_op, strict, try_ts_on_failure, propref, action))) + | (AnyT (reason, _), LookupT (reason_op, kind, _, propref, action)) -> + (match action with + | SuperProp (_, lp) when Property.write_t lp = None -> + (* Without this exception, we will call rec_flow_p where + * `write_t lp = None` and `write_t up = Some`, which is a polarity + * mismatch error. Instead of this, we could "read" `mixed` from + * covariant props, which would always flow into `any`. *) + () + | _ -> + let p = Field (None, AnyT.untyped reason_op, Polarity.Neutral) in + (match kind with + | NonstrictReturning (_, Some (id, _)) -> Context.test_prop_hit cx id + | _ -> ()); + perform_lookup_action cx trace propref p DynamicProperty reason reason_op action) + (*****************************************) + (* ... and their fields written *) + (*****************************************) + | ( DefT (_, _, ObjT { flags; _ }), + SetPropT (use_op, _, Named (prop, "constructor"), _, _, _, _) ) -> + if flags.frozen then + add_output + cx + ~trace + (Error_message.EPropNotWritable + { reason_prop = prop; prop_name = Some "constructor"; use_op }) + (* o.x = ... has the additional effect of o[_] = ... **) + | (DefT (_, _, ObjT { flags; _ }), SetPropT (use_op, _, prop, _, _, _, _)) + when flags.frozen -> + let (reason_prop, prop) = + match prop with + | Named (r, prop) -> (r, Some prop) + | Computed t -> (reason_of_t t, None) + in + add_output + cx + ~trace + (Error_message.EPropNotWritable { reason_prop; prop_name = prop; use_op }) + | ( DefT (reason_obj, _, ObjT o), + SetPropT (use_op, reason_op, propref, mode, _, tin, prop_t) ) -> + write_obj_prop cx trace ~use_op ~mode o propref reason_obj reason_op tin prop_t + (* Since we don't know the type of the prop, use AnyT. *) + | (AnyT _, SetPropT (use_op, reason_op, _, _, _, t, prop_t)) -> + Option.iter ~f:(fun t -> rec_flow_t cx trace (AnyT.untyped reason_op, t)) prop_t; + rec_flow cx trace (t, UseT (use_op, AnyT.untyped reason_op)) + | (DefT (reason_obj, _, ObjT o), MatchPropT (use_op, reason_op, propref, proptype)) -> + match_obj_prop cx trace ~use_op o propref reason_obj reason_op proptype + | (AnyT _, MatchPropT (use_op, reason_op, _, t)) -> + rec_flow cx trace (t, UseT (use_op, AnyT.untyped reason_op)) + (*****************************) + (* ... and their fields read *) + (*****************************) + | (DefT (_, _, ObjT _), GetPropT (_, reason_op, Named (_, "constructor"), tout)) -> + rec_flow_t cx trace (Unsoundness.why Constructor reason_op, tout) + | (DefT (reason_obj, _, ObjT o), GetPropT (use_op, reason_op, propref, tout)) -> + read_obj_prop cx trace ~use_op o propref reason_obj reason_op tout + | (AnyT _, GetPropT (_, reason_op, _, tout)) -> + rec_flow_t cx trace (AnyT.untyped reason_op, tout) + (********************************) + (* ... and their methods called *) + (********************************) + | (DefT (_, _, ObjT _), MethodT (_, _, _, Named (_, "constructor"), _, _)) -> () + | ( DefT (reason_obj, _, ObjT o), + MethodT (use_op, reason_call, reason_lookup, propref, funtype, prop_t) ) -> + let t = + Tvar.mk_where cx reason_lookup (fun tout -> + read_obj_prop cx trace ~use_op o propref reason_obj reason_lookup tout) + in + Option.iter ~f:(fun prop_t -> rec_flow_t cx trace (t, prop_t)) prop_t; + rec_flow cx trace (t, CallT (use_op, reason_call, funtype)) + (******************************************) + (* strings may have their characters read *) + (******************************************) + | (DefT (reason_s, trust, StrT _), GetElemT (use_op, reason_op, index, tout)) -> + rec_flow cx trace (index, UseT (use_op, NumT.why reason_s |> with_trust bogus_trust)); + rec_flow_t cx trace (StrT.why reason_op trust, tout) + (* Expressions may be used as keys to access objects and arrays. In general, we cannot evaluate such expressions at compile time. However, in some idiomatic special cases, we can; in such cases, we know exactly which strings/numbers the keys may be, and thus, we can use precise properties and indices to resolve the accesses. *) - (**********************************************************************) - (* objects/arrays may have their properties/elements written and read *) - (**********************************************************************) - - | DefT (_, (ObjT _ | AnyObjT | ArrT _ | AnyT)), SetElemT (use_op, reason_op, key, tin, tout) -> - rec_flow cx trace (key, ElemT (use_op, reason_op, l, WriteElem (tin, tout))) - - | DefT (_, (ObjT _ | AnyObjT | ArrT _ | AnyT)), GetElemT (use_op, reason_op, key, tout) -> - rec_flow cx trace (key, ElemT (use_op, reason_op, l, ReadElem tout)) - - | DefT (_, (ObjT _ | AnyObjT | ArrT _ | AnyT)), - CallElemT (reason_call, reason_lookup, key, ft) -> - let action = CallElem (reason_call, ft) in - rec_flow cx trace (key, ElemT (unknown_use, reason_lookup, l, action)) - - | _, ElemT (use_op, reason_op, (DefT (_, ObjT _) as obj), action) -> - let propref = match l with - | DefT (reason_x, StrT (Literal (_, x))) -> - let reason_prop = replace_reason_const (RProperty (Some x)) reason_x in - Named (reason_prop, x) - | _ -> Computed l - in - (match action with - | ReadElem t -> - rec_flow cx trace (obj, GetPropT (use_op, reason_op, propref, t)) - | WriteElem (tin, tout) -> - rec_flow cx trace (obj, SetPropT (use_op, reason_op, propref, Normal, tin, None)); - Option.iter ~f:(fun t -> rec_flow_t cx trace (obj, t)) tout - | CallElem (reason_call, ft) -> - rec_flow cx trace (obj, MethodT (use_op, reason_call, reason_op, propref, ft, None))) - - | _, ElemT (use_op, reason_op, (DefT (_, (AnyObjT | AnyT)) as obj), action) -> - let value = AnyT.why reason_op in - perform_elem_action cx trace ~use_op reason_op obj value action - - (* It is not safe to write to an unknown index in a tuple. However, any is - * a source of unsoundness, so that's ok. `tup[(0: any)] = 123` should not - * error when `tup[0] = 123` does not. *) - | DefT (_, AnyT), - ElemT (use_op, reason_op, (DefT (r, ArrT arrtype) as arr), action) -> - let value = elemt_of_arrtype r arrtype in - perform_elem_action cx trace ~use_op reason_op arr value action - - | l, ElemT (use_op, reason, (DefT (reason_tup, ArrT arrtype) as arr), action) when numeric l -> - let value, ts, is_tuple = begin match arrtype with - | ArrayAT(value, ts) -> value, ts, false - | TupleAT(value, ts) -> value, Some ts, true - | ROArrayAT (value) -> value, None, true - | EmptyAT -> DefT (reason_tup, EmptyT), None, true - end in - let exact_index, value = match l with - | DefT (_, NumT (Literal (_, (float_value, _)))) -> - begin match ts with - | None -> false, value - | Some ts -> - let index = int_of_float float_value in + (**********************************************************************) + (* objects/arrays may have their properties/elements written and read *) + (**********************************************************************) + | ( (DefT (_, _, (ObjT _ | ArrT _)) | AnyT _), + SetElemT (use_op, reason_op, key, mode, tin, tout) ) -> + rec_flow cx trace (key, ElemT (use_op, reason_op, l, WriteElem (tin, tout, mode))) + | ((DefT (_, _, (ObjT _ | ArrT _)) | AnyT _), GetElemT (use_op, reason_op, key, tout)) -> + rec_flow cx trace (key, ElemT (use_op, reason_op, l, ReadElem tout)) + | ( (DefT (_, _, (ObjT _ | ArrT _)) | AnyT _), + CallElemT (reason_call, reason_lookup, key, ft) ) -> + let action = CallElem (reason_call, ft) in + rec_flow cx trace (key, ElemT (unknown_use, reason_lookup, l, action)) + | (_, ElemT (use_op, reason_op, (DefT (_, _, ObjT _) as obj), action)) -> + let propref = + match l with + | DefT (reason_x, _, StrT (Literal (_, x))) -> + let reason_prop = replace_desc_reason (RProperty (Some x)) reason_x in + Named (reason_prop, x) + | _ -> Computed l + in + (match action with + | ReadElem t -> rec_flow cx trace (obj, GetPropT (use_op, reason_op, propref, t)) + | WriteElem (tin, tout, mode) -> + rec_flow cx trace (obj, SetPropT (use_op, reason_op, propref, mode, Normal, tin, None)); + Option.iter ~f:(fun t -> rec_flow_t cx trace (obj, t)) tout + | CallElem (reason_call, ft) -> + rec_flow cx trace (obj, MethodT (use_op, reason_call, reason_op, propref, ft, None))) + | (_, ElemT (use_op, reason_op, (AnyT _ as obj), action)) -> + let value = AnyT.untyped reason_op in + perform_elem_action cx trace ~use_op ~restrict_deletes:false reason_op obj value action + (* It is not safe to write to an unknown index in a tuple. However, any is + * a source of unsoundness, so that's ok. `tup[(0: any)] = 123` should not + * error when `tup[0] = 123` does not. *) + | (AnyT _, ElemT (use_op, reason_op, (DefT (reason_tup, _, ArrT arrtype) as arr), action)) + -> + begin + match (action, arrtype) with + | (WriteElem _, ROArrayAT _) -> + let reasons = (reason_op, reason_tup) in + add_output cx ~trace (Error_message.EROArrayWrite (reasons, use_op)) + | _ -> () + end; + let value = elemt_of_arrtype arrtype in + perform_elem_action cx trace ~use_op ~restrict_deletes:false reason_op arr value action + | (l, ElemT (use_op, reason, (DefT (reason_tup, _, ArrT arrtype) as arr), action)) + when numeric l -> + let (value, ts, is_index_restricted, is_tuple) = + match arrtype with + | ArrayAT (value, ts) -> (value, ts, false, false) + | TupleAT (value, ts) -> (value, Some ts, true, true) + | ROArrayAT value -> (value, None, true, false) + in + let (can_write_tuple, value) = + match l with + | DefT (index_reason, _, NumT (Literal (_, (float_value, _)))) -> begin - try true, List.nth ts index - with _ -> - if is_tuple then begin - let reasons = (reason, reason_tup) in - let error = - FlowError.ETupleOutOfBounds (reasons, List.length ts, index, use_op) - in - add_output cx ~trace error; - true, DefT (mk_reason RTupleOutOfBoundsAccess (aloc_of_reason reason |> ALoc.to_loc), VoidT) - end else true, value + match ts with + | None -> (false, value) + | Some ts -> + let index_string = Dtoa.ecma_string_of_float float_value in + begin + match int_of_string_opt index_string with + | Some index -> + let value_opt = + (try List.nth_opt ts index with Invalid_argument _ -> None) + in + begin + match value_opt with + | Some value -> (true, value) + | None -> + if is_tuple then ( + add_output + cx + ~trace + (Error_message.ETupleOutOfBounds + { + use_op; + reason; + reason_op = reason_tup; + length = List.length ts; + index = index_string; + }); + ( true, + AnyT.error + (mk_reason RTupleOutOfBoundsAccess (aloc_of_reason reason)) ) + ) else + (true, value) + end + | None -> + (* not an integer index *) + if is_tuple then ( + add_output + cx + ~trace + (Error_message.ETupleNonIntegerIndex + { use_op; reason = index_reason; index = index_string }); + (true, AnyT.error reason) + ) else + (true, value) + end end + | _ -> (false, value) + in + ( if is_index_restricted && not can_write_tuple then + match action with + (* These are safe to do with tuples and unknown indexes *) + | ReadElem _ + | CallElem _ -> + () + (* This isn't *) + | WriteElem _ -> + let error = + match ts with + | Some _ -> Error_message.ETupleUnsafeWrite { reason; use_op } + | None -> Error_message.EROArrayWrite ((reason, reason_tup), use_op) + in + add_output cx ~trace error ); + + perform_elem_action cx trace ~use_op ~restrict_deletes:is_tuple reason arr value action + | (DefT (_, _, ArrT _), GetPropT (_, reason_op, Named (_, "constructor"), tout)) -> + rec_flow_t cx trace (Unsoundness.why Constructor reason_op, tout) + | (DefT (_, _, ArrT _), SetPropT (_, _, Named (_, "constructor"), _, _, _, _)) + | (DefT (_, _, ArrT _), MethodT (_, _, _, Named (_, "constructor"), _, _)) -> + () + (**************************************************) + (* array pattern can consume the rest of an array *) + (**************************************************) + | (DefT (_, trust, ArrT arrtype), ArrRestT (_, reason, i, tout)) -> + let arrtype = + match arrtype with + | ArrayAT (_, None) + | ROArrayAT _ -> + arrtype + | ArrayAT (elemt, Some ts) -> ArrayAT (elemt, Some (Core_list.drop ts i)) + | TupleAT (elemt, ts) -> TupleAT (elemt, Core_list.drop ts i) + in + let a = DefT (reason, trust, ArrT arrtype) in + rec_flow_t cx trace (a, tout) + | (AnyT (_, src), ArrRestT (_, reason, _, tout)) -> + rec_flow_t cx trace (AnyT.why src reason, tout) + (*****************) + (* destructuring *) + (*****************) + | (_, DestructuringT (reason, kind, s, tout)) -> + begin + match kind with + | DestructAnnot -> + (* NB: BecomeT used to enforce that 0->1 property is preserved. Is + * currently necessary, since 0->1 annotations are not always + * recursively 0->1 -- e.g., class instance types. *) + let tvar = Tvar.mk cx reason in + eval_selector cx ~trace reason l s tvar; + rec_flow cx trace (tvar, BecomeT (reason, tout)) + | DestructInfer -> eval_selector cx ~trace reason l s tout end - | _ -> false, value - in - if is_tuple && not exact_index then begin - match action with - (* These are safe to do with tuples and unknown indexes *) - | ReadElem _ | CallElem _ -> () - (* This isn't *) - | WriteElem _ -> - let reasons = (reason, reason_tup) in - add_output - cx - ~trace - (FlowError.ETupleUnsafeWrite (reasons, use_op)) - end; - - perform_elem_action cx trace ~use_op reason arr value action - - - | DefT (_, ArrT _), GetPropT (_, reason_op, Named (_, "constructor"), tout) -> - rec_flow_t cx trace (AnyT.why reason_op, tout) - - | DefT (_, ArrT _), SetPropT (_, _, Named (_, "constructor"), _, _, _) - | DefT (_, ArrT _), MethodT (_, _, _, Named (_, "constructor"), _, _) -> - () - - (**************************************************) - (* array pattern can consume the rest of an array *) - (**************************************************) - - | DefT (_, ArrT arrtype), ArrRestT (_, reason, i, tout) -> - let arrtype = match arrtype with - | ArrayAT (_, None) - | ROArrayAT _ - | EmptyAT -> arrtype - | ArrayAT (elemt, Some ts) -> ArrayAT (elemt, Some (Core_list.drop ts i)) - | TupleAT (elemt, ts) -> TupleAT (elemt, Core_list.drop ts i) in - let a = DefT (reason, ArrT arrtype) in - rec_flow_t cx trace (a, tout) - - | DefT (_, AnyT), ArrRestT (_, reason, _, tout) -> - rec_flow_t cx trace (AnyT.why reason, tout) - - (**************) - (* object kit *) - (**************) - - | _, ObjKitT (use_op, reason, resolve_tool, tool, tout) -> - object_kit cx trace ~use_op reason resolve_tool tool tout l - - (**************************************************) - (* function types can be mapped over a structure *) - (**************************************************) - - | DefT (_, (AnyT | AnyObjT)), MapTypeT (reason_op, _, tout) -> - rec_flow_t cx trace (AnyT.why reason_op, tout) - - | DefT (_, ArrT arrtype), MapTypeT (reason_op, TupleMap funt, tout) -> - let f x = EvalT (funt, TypeDestructorT (unknown_use, reason_op, CallType [x]), mk_id ()) in - let arrtype = match arrtype with - | ArrayAT (elemt, ts) -> ArrayAT (f elemt, Option.map ~f:(List.map f) ts) - | TupleAT (elemt, ts) -> TupleAT (f elemt, List.map f ts) - | ROArrayAT (elemt) -> ROArrayAT (f elemt) - | EmptyAT -> EmptyAT in - let t = - let reason = replace_reason_const RArrayType reason_op in - DefT (reason, ArrT arrtype) - in - rec_flow_t cx trace (t, tout) - - | _, MapTypeT (reason, TupleMap funt, tout) -> - let iter = get_builtin cx ~trace "$iterate" reason in - let elemt = EvalT (iter, TypeDestructorT - (unknown_use, reason, CallType [l]), mk_id ()) in - let t = DefT (reason, ArrT (ROArrayAT elemt)) in - rec_flow cx trace (t, MapTypeT (reason, TupleMap funt, tout)) - - | DefT (_, ObjT o), MapTypeT (reason_op, ObjectMap funt, tout) -> - let map_t t = - let t, opt = match t with - | DefT (_, OptionalT t) -> t, true - | _ -> t, false - in - let t = EvalT (funt, TypeDestructorT - (unknown_use, reason_op, CallType [t]), mk_id ()) in - if opt - then optional t - else t - in - let props_tmap = - Context.find_props cx o.props_tmap - |> Properties.map_fields map_t - |> Context.make_property_map cx - in - let dict_t = Option.map ~f:(fun dict -> - let value = map_t dict.value in - {dict with value} - ) o.dict_t in - let mapped_t = - let reason = replace_reason_const RObjectType reason_op in - DefT (reason, ObjT {o with props_tmap; dict_t}) - in - rec_flow_t cx trace (mapped_t, tout) - - | DefT (_, ObjT o), MapTypeT (reason_op, ObjectMapi funt, tout) -> - let mapi_t key t = - let t, opt = match t with - | DefT (_, OptionalT t) -> t, true - | _ -> t, false - in - let t = EvalT (funt, TypeDestructorT - (unknown_use, reason_op, CallType [key; t]), mk_id ()) in - if opt - then optional t - else t - in - let mapi_field key t = - let reason = replace_reason_const (RStringLit key) reason_op in - mapi_t (DefT (reason, SingletonStrT key)) t - in - let props_tmap = - Context.find_props cx o.props_tmap - |> Properties.mapi_fields mapi_field - |> Context.make_property_map cx - in - let dict_t = Option.map ~f:(fun dict -> - let value = mapi_t dict.key dict.value in - {dict with value} - ) o.dict_t in - let mapped_t = - let reason = replace_reason_const RObjectType reason_op in - DefT (reason, ObjT {o with props_tmap; dict_t}) - in - rec_flow_t cx trace (mapped_t, tout) - - (***********************************************) - (* functions may have their prototypes written *) - (***********************************************) - - | DefT (_, FunT (_, t, _)), SetPropT (_, reason_op, Named (_, "prototype"), _, tin, _) -> - rec_flow cx trace (tin, ObjAssignFromT (reason_op, t, Locationless.AnyT.t, default_obj_assign_kind)) - - (*********************************) - (* ... and their prototypes read *) - (*********************************) - - | DefT (_, FunT (_, t, _)), GetPropT (_, _, Named (_, "prototype"), tout) -> - rec_flow_t cx trace (t, tout) - - | DefT (reason, ClassT instance), GetPropT (_, _, Named (_, "prototype"), tout) -> - let instance = reposition cx ~trace (aloc_of_reason reason |> ALoc.to_loc) instance in - rec_flow_t cx trace (instance, tout) - - (**************************************) - (* ... and their fields/elements read *) - (**************************************) - - | DefT (_, AnyFunT), ( - GetPropT (_, reason_op, _, tout) - | GetElemT (_, reason_op, _, tout) - ) -> - rec_flow_t cx trace (AnyT.why reason_op, tout) - - | DefT (reason_fun, AnyFunT), LookupT (reason_op, kind, _, x, action) -> - (match kind with - | NonstrictReturning (_, Some (id, _)) -> Context.test_prop_hit cx id - | _ -> ()); - let p = Field (None, AnyT.why reason_op, Neutral) in - perform_lookup_action cx trace x p reason_fun reason_op action - - (*****************************************) - (* ... and their fields/elements written *) - (*****************************************) - - | DefT (_, AnyFunT), SetPropT (use_op, reason_op, _, _, tin, prop_t) -> - let any = AnyT.why reason_op in - Option.iter ~f:(fun t -> rec_flow_t cx trace (any, t)) prop_t; - rec_flow cx trace (tin, UseT (use_op, any)); - - | DefT (_, AnyFunT), SetElemT (use_op, reason_op, _, tin, tout) -> - rec_flow cx trace (tin, UseT (use_op, AnyT.why reason_op)); - Option.iter ~f:(fun t -> rec_flow_t cx trace (l, t)) tout - - (***************************************************************) - (* functions may be called by passing a receiver and arguments *) - (***************************************************************) - - | FunProtoCallT _, - CallT (use_op, reason_op, ({call_this_t = func; call_args_tlist; _} as funtype)) -> - (* Drop the first argument in the use_op. *) - let use_op = match use_op with - | Op FunCall {op; fn; args = _ :: args} -> Op (FunCall {op; fn; args}) - | Op FunCallMethod {op; fn; prop; args = _ :: args} -> Op (FunCallMethod {op; fn; prop; args}) - | _ -> use_op - in - begin match call_args_tlist with - (* func.call() *) - | [] -> - let funtype = { funtype with - call_this_t = VoidT.why reason_op; - call_args_tlist = []; - } in - rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) - - (* func.call(this_t, ...call_args_tlist) *) - | (Arg call_this_t)::call_args_tlist -> - let funtype = { funtype with call_this_t; call_args_tlist } in - rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) - - (* func.call(...call_args_tlist) *) - | (SpreadArg _ as first_arg)::_ -> - let call_this_t = extract_non_spread cx ~trace first_arg in - - let funtype = { funtype with call_this_t; } in - rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) - end - - (*******************************************) - (* ... or a receiver and an argument array *) - (*******************************************) - - (* resolves the arguments... *) - | FunProtoApplyT _, - CallT (use_op, reason_op, ({call_this_t = func; call_args_tlist; _} as funtype)) -> - (* Drop the specific AST derived argument reasons. Our new arguments come - * from arbitrary positions in the array. *) - let use_op = match use_op with - | Op FunCall {op; fn; args = _} -> Op (FunCall {op; fn; args = []}) - | Op FunCallMethod {op; fn; prop; args = _} -> Op (FunCallMethod {op; fn; prop; args = []}) - | _ -> use_op - in - begin match call_args_tlist with - (* func.apply() *) - | [] -> - let funtype = { funtype with - call_this_t = VoidT.why reason_op; - call_args_tlist = []; - } in - rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) - - (* func.apply(this_arg) *) - | (Arg this_arg)::[] -> - let funtype = { funtype with call_this_t = this_arg; call_args_tlist = [] } in - rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) - - (* func.apply(this_arg, ts) *) - | first_arg::(Arg ts)::_ -> - let call_this_t = extract_non_spread cx ~trace first_arg in - let call_args_tlist = [ SpreadArg ts ] in - let funtype = { funtype with call_this_t; call_args_tlist; } in - (* Ignoring `this_arg`, we're basically doing func(...ts). Normally - * spread arguments are resolved for the multiflow application, however - * there are a bunch of special-cased functions like bind(), call(), - * apply, etc which look at the arguments a little earlier. If we delay - * resolving the spread argument, then we sabotage them. So we resolve - * it early *) - let t = Tvar.mk_where cx reason_op (fun t -> - let resolve_to = ResolveSpreadsToCallT (funtype, t) in - resolve_call_list cx ~trace ~use_op reason_op call_args_tlist resolve_to - ) in - rec_flow_t cx trace (func, t) - - | (SpreadArg t1)::(SpreadArg t2)::_ -> - add_output cx ~trace - (FlowError.(EUnsupportedSyntax (loc_of_t t1, SpreadArgument))); - add_output cx ~trace - (FlowError.(EUnsupportedSyntax (loc_of_t t2, SpreadArgument))) - | (SpreadArg t)::_ - | (Arg _)::(SpreadArg t)::_ -> - add_output cx ~trace - (FlowError.(EUnsupportedSyntax (loc_of_t t, SpreadArgument))) - end - - (************************************************************************) - (* functions may be bound by passing a receiver and (partial) arguments *) - (************************************************************************) - - | FunProtoBindT lreason, - CallT (use_op, reason_op, ({ - call_this_t = func; - call_targs; - call_args_tlist = first_arg::call_args_tlist; - _ - } as funtype)) -> - Option.iter call_targs ~f:(fun _ -> - add_output cx ~trace FlowError.(ECallTypeArity { - call_loc = aloc_of_reason reason_op |> ALoc.to_loc; - is_new = false; - reason_arity = lreason; - expected_arity = 0; - })); - let call_this_t = extract_non_spread cx ~trace first_arg in - let call_targs = None in - let funtype = { funtype with call_this_t; call_targs; call_args_tlist } in - rec_flow cx trace (func, BindT (use_op, reason_op, funtype, false)) - - | DefT (reason, FunT (_, _, ({this_t = o1; _} as ft))), - BindT (use_op, reason_op, calltype, _) -> - let { - call_this_t = o2; - call_targs = _; (* always None *) - call_args_tlist = tins2; - call_tout; - call_closure_t = _; - call_strict_arity = _; - } = calltype in - - (* TODO: closure *) - - rec_flow_t cx trace (o2,o1); - - let resolve_to = - ResolveSpreadsToMultiflowPartial (mk_id (), ft, reason_op, call_tout) in - resolve_call_list cx ~trace ~use_op reason tins2 resolve_to; - - | DefT (_, ObjT {call_t = Some id; _}), BindT _ -> - rec_flow cx trace (Context.find_call cx id, u) - - | DefT (_, InstanceT (_, _, _, {inst_call_t = Some id; _})), BindT _ -> - rec_flow cx trace (Context.find_call cx id, u) - - | DefT (_, (AnyT | AnyFunT)), - BindT (use_op, reason, calltype, _) -> - let { - call_this_t; - call_targs = _; (* always None *) - call_args_tlist; - call_tout; - call_closure_t = _; - call_strict_arity = _; - } = calltype in - rec_flow_t cx trace (AnyT.why reason, call_this_t); - call_args_iter (fun param_t -> - rec_flow cx trace (AnyT.why reason, UseT (use_op, param_t)) - ) call_args_tlist; - rec_flow_t cx trace (l, call_tout) - - | _, BindT (_, _, { call_tout; _ }, true) -> - rec_flow_t cx trace (l, call_tout) - - (***********************************************) - (* You can use a function as a callable object *) - (***********************************************) - - (* FunT ~> ObjT *) - - (* TODO: This rule doesn't interact very well with union-type checking. It + (**************) + (* object kit *) + (**************) + | (_, ObjKitT (use_op, reason, resolve_tool, tool, tout)) -> + ObjectKit.run cx trace ~use_op reason resolve_tool tool tout l + (**************************************************) + (* function types can be mapped over a structure *) + (**************************************************) + | (AnyT _, MapTypeT (_, reason_op, _, tout)) -> + rec_flow_t cx trace (AnyT.untyped reason_op, tout) + | (DefT (_, trust, ArrT arrtype), MapTypeT (use_op, reason_op, TupleMap funt, tout)) -> + let f x = + let use_op = Frame (TupleMapFunCompatibility { value = reason_of_t x }, use_op) in + EvalT (funt, TypeDestructorT (use_op, reason_op, CallType [x]), mk_id ()) + in + let arrtype = + match arrtype with + | ArrayAT (elemt, ts) -> ArrayAT (f elemt, Option.map ~f:(Core_list.map ~f) ts) + | TupleAT (elemt, ts) -> TupleAT (f elemt, Core_list.map ~f ts) + | ROArrayAT elemt -> ROArrayAT (f elemt) + in + let t = + let reason = replace_desc_reason RArrayType reason_op in + DefT (reason, trust, ArrT arrtype) + in + rec_flow_t cx trace (t, tout) + | (_, MapTypeT (use_op, reason, TupleMap funt, tout)) -> + let iter = get_builtin cx ~trace "$iterate" reason in + let elemt = EvalT (iter, TypeDestructorT (use_op, reason, CallType [l]), mk_id ()) in + let t = DefT (reason, bogus_trust (), ArrT (ROArrayAT elemt)) in + rec_flow cx trace (t, MapTypeT (use_op, reason, TupleMap funt, tout)) + | (DefT (_, trust, ObjT o), MapTypeT (use_op, reason_op, ObjectMap funt, tout)) -> + let map_t t = + let (t, opt) = + match t with + | OptionalT (_, t) -> (t, true) + | _ -> (t, false) + in + let use_op = Frame (ObjMapFunCompatibility { value = reason_of_t t }, use_op) in + let t = EvalT (funt, TypeDestructorT (use_op, reason_op, CallType [t]), mk_id ()) in + if opt then + optional t + else + t + in + let props_tmap = + Context.find_props cx o.props_tmap + |> Properties.map_fields map_t + |> Context.generate_property_map cx + in + let dict_t = + Option.map + ~f:(fun dict -> + let value = map_t dict.value in + { dict with value }) + o.dict_t + in + let mapped_t = + let reason = replace_desc_reason RObjectType reason_op in + let t = DefT (reason, trust, ObjT { o with props_tmap; dict_t }) in + if o.flags.exact then + ExactT (reason, t) + else + t + in + rec_flow_t cx trace (mapped_t, tout) + | (DefT (_, trust, ObjT o), MapTypeT (use_op, reason_op, ObjectMapi funt, tout)) -> + let mapi_t key t = + let (t, opt) = + match t with + | OptionalT (_, t) -> (t, true) + | _ -> (t, false) + in + let use_op = + Frame + (ObjMapiFunCompatibility { key = reason_of_t key; value = reason_of_t t }, use_op) + in + let t = + EvalT (funt, TypeDestructorT (use_op, reason_op, CallType [key; t]), mk_id ()) + in + if opt then + optional t + else + t + in + let mapi_field key t = + let reason = replace_desc_reason (RStringLit key) reason_op in + mapi_t (DefT (reason, bogus_trust (), SingletonStrT key)) t + in + let props_tmap = + Context.find_props cx o.props_tmap + |> Properties.mapi_fields mapi_field + |> Context.generate_property_map cx + in + let dict_t = + Option.map + ~f:(fun dict -> + let value = mapi_t dict.key dict.value in + { dict with value }) + o.dict_t + in + let mapped_t = + let reason = replace_desc_reason RObjectType reason_op in + let t = DefT (reason, trust, ObjT { o with props_tmap; dict_t }) in + if o.flags.exact then + ExactT (reason, t) + else + t + in + rec_flow_t cx trace (mapped_t, tout) + (***********************************************) + (* functions may have their prototypes written *) + (***********************************************) + | ( DefT (_, _, FunT (_, t, _)), + SetPropT (use_op, reason_op, Named (_, "prototype"), _, _, tin, _) ) -> + rec_flow + cx + trace + ( tin, + ObjAssignFromT + ( use_op, + reason_op, + t, + AnyT.locationless Unsoundness.function_proto, + default_obj_assign_kind ) ) + (*********************************) + (* ... and their prototypes read *) + (*********************************) + | (DefT (_, _, FunT (_, t, _)), GetPropT (_, _, Named (_, "prototype"), tout)) -> + rec_flow_t cx trace (t, tout) + | (DefT (reason, _, ClassT instance), GetPropT (_, _, Named (_, "prototype"), tout)) -> + let instance = reposition cx ~trace (aloc_of_reason reason) instance in + rec_flow_t cx trace (instance, tout) + (***************************************************************************) + (* assignment of properties to module.exports; *) + (* the only interesting case is where functions may have their statics set *) + (***************************************************************************) + | (_, ModuleExportsAssignT (_, assign, tout)) -> + let l' = + match l with + | DefT (r, trust, FunT (statics, proto, ft)) -> + let reason = reason_of_t statics in + let statics' = mod_reason_of_t (fun _ -> reason) assign in + DefT (r, trust, FunT (statics', proto, ft)) + | _ -> l + in + rec_flow_t cx trace (l', tout) + (***************************************************************) + (* functions may be called by passing a receiver and arguments *) + (***************************************************************) + | ( FunProtoCallT _, + CallT (use_op, reason_op, ({ call_this_t = func; call_args_tlist; _ } as funtype)) ) -> + (* Drop the first argument in the use_op. *) + let use_op = + match use_op with + | Op (FunCall { op; fn; args = _ :: args; local }) -> + Op (FunCall { op; fn; args; local }) + | Op (FunCallMethod { op; fn; prop; args = _ :: args; local }) -> + Op (FunCallMethod { op; fn; prop; args; local }) + | _ -> use_op + in + begin + match call_args_tlist with + (* func.call() *) + | [] -> + let funtype = + { + funtype with + call_this_t = VoidT.why reason_op |> with_trust bogus_trust; + call_args_tlist = []; + } + in + rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) + (* func.call(this_t, ...call_args_tlist) *) + | Arg call_this_t :: call_args_tlist -> + let funtype = { funtype with call_this_t; call_args_tlist } in + rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) + (* func.call(...call_args_tlist) *) + | (SpreadArg _ as first_arg) :: _ -> + let call_this_t = extract_non_spread cx ~trace first_arg in + let funtype = { funtype with call_this_t } in + rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) + end + (*******************************************) + (* ... or a receiver and an argument array *) + (*******************************************) + + (* resolves the arguments... *) + | ( FunProtoApplyT lreason, + CallT (use_op, reason_op, ({ call_this_t = func; call_args_tlist; _ } as funtype)) ) -> + (* Drop the specific AST derived argument reasons. Our new arguments come + * from arbitrary positions in the array. *) + let use_op = + match use_op with + | Op (FunCall { op; fn; args = _; local }) -> Op (FunCall { op; fn; args = []; local }) + | Op (FunCallMethod { op; fn; prop; args = _; local }) -> + Op (FunCallMethod { op; fn; prop; args = []; local }) + | _ -> use_op + in + begin + match call_args_tlist with + (* func.apply() *) + | [] -> + let funtype = + { + funtype with + call_this_t = VoidT.why reason_op |> with_trust bogus_trust; + call_args_tlist = []; + } + in + rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) + (* func.apply(this_arg) *) + | [Arg this_arg] -> + let funtype = { funtype with call_this_t = this_arg; call_args_tlist = [] } in + rec_flow cx trace (func, CallT (use_op, reason_op, funtype)) + (* func.apply(this_arg, ts) *) + | [first_arg; Arg ts] -> + let call_this_t = extract_non_spread cx ~trace first_arg in + let call_args_tlist = [SpreadArg ts] in + let funtype = { funtype with call_this_t; call_args_tlist } in + (* Ignoring `this_arg`, we're basically doing func(...ts). Normally + * spread arguments are resolved for the multiflow application, however + * there are a bunch of special-cased functions like bind(), call(), + * apply, etc which look at the arguments a little earlier. If we delay + * resolving the spread argument, then we sabotage them. So we resolve + * it early *) + let t = + Tvar.mk_where cx reason_op (fun t -> + let resolve_to = ResolveSpreadsToCallT (funtype, t) in + resolve_call_list cx ~trace ~use_op reason_op call_args_tlist resolve_to) + in + rec_flow_t cx trace (func, t) + | [SpreadArg t1; SpreadArg t2] -> + add_output cx ~trace Error_message.(EUnsupportedSyntax (loc_of_t t1, SpreadArgument)); + add_output cx ~trace Error_message.(EUnsupportedSyntax (loc_of_t t2, SpreadArgument)) + | [SpreadArg t] + | [Arg _; SpreadArg t] -> + add_output cx ~trace Error_message.(EUnsupportedSyntax (loc_of_t t, SpreadArgument)) + | _ :: _ :: _ :: _ -> + Error_message.EFunctionCallExtraArg + (mk_reason RFunctionUnusedArgument (aloc_of_reason lreason), lreason, 2, use_op) + |> add_output cx ~trace + end + (************************************************************************) + (* functions may be bound by passing a receiver and (partial) arguments *) + (************************************************************************) + | ( FunProtoBindT lreason, + CallT + ( use_op, + reason_op, + ( { + call_this_t = func; + call_targs; + call_args_tlist = first_arg :: call_args_tlist; + _; + } as funtype ) ) ) -> + Option.iter call_targs ~f:(fun _ -> + add_output + cx + ~trace + Error_message.( + ECallTypeArity + { + call_loc = aloc_of_reason reason_op; + is_new = false; + reason_arity = lreason; + expected_arity = 0; + })); + let call_this_t = extract_non_spread cx ~trace first_arg in + let call_targs = None in + let funtype = { funtype with call_this_t; call_targs; call_args_tlist } in + rec_flow cx trace (func, BindT (use_op, reason_op, funtype, false)) + | ( DefT (reason, _, FunT (_, _, ({ this_t = o1; _ } as ft))), + BindT (use_op, reason_op, calltype, _) ) -> + let { + call_this_t = o2; + call_targs = _; + (* always None *) + call_args_tlist = tins2; + call_tout; + call_closure_t = _; + call_strict_arity = _; + } = + calltype + in + (* TODO: closure *) + rec_flow_t cx trace (o2, o1); + + let resolve_to = ResolveSpreadsToMultiflowPartial (mk_id (), ft, reason_op, call_tout) in + resolve_call_list cx ~trace ~use_op reason tins2 resolve_to + | (DefT (_, _, ObjT { call_t = Some id; _ }), BindT _) -> + rec_flow cx trace (Context.find_call cx id, u) + | (DefT (_, _, InstanceT (_, _, _, { inst_call_t = Some id; _ })), BindT _) -> + rec_flow cx trace (Context.find_call cx id, u) + | (AnyT _, BindT (use_op, reason, calltype, _)) -> + let { + call_this_t; + call_targs = _; + (* always None *) + call_args_tlist; + call_tout; + call_closure_t = _; + call_strict_arity = _; + } = + calltype + in + rec_flow_t cx trace (AnyT.untyped reason, call_this_t); + call_args_iter + (fun param_t -> rec_flow cx trace (AnyT.untyped reason, UseT (use_op, param_t))) + call_args_tlist; + rec_flow_t cx trace (l, call_tout) + | (_, BindT (_, _, { call_tout; _ }, true)) -> rec_flow_t cx trace (l, call_tout) + (***********************************************) + (* You can use a function as a callable object *) + (***********************************************) + | ( DefT (_, _, FunT _), + UseT + ( use_op, + DefT + ( _, + _, + ( ObjT { call_t = Some id; _ } + | InstanceT (_, _, _, { inst_call_t = Some id; _ }) ) ) ) ) -> + let t = Context.find_call cx id in + rec_flow cx trace (l, UseT (use_op, t)) + (* FunT ~> ObjT *) + + (* Previously, call properties were stored in the props map, and were + checked against dictionary upper bounds. This is wrong, but useful for + distinguishing between thunk-like types found in graphql-js. + + Now that call properties are stored separately, it is particularly + egregious to emit this constraint. This only serves to maintain buggy + behavior, which should be fixed, and this code removed. *) + | ( DefT (lreason, _, FunT _), + UseT (use_op, DefT (ureason, _, ObjT { dict_t = Some udict; _ })) ) -> + let { value; dict_polarity; _ } = udict in + let lit = is_literal_object_reason lreason in + let s = "$call" in + let use_op = + Frame + (PropertyCompatibility { prop = Some s; lower = lreason; upper = ureason }, use_op) + in + let lp = Field (None, l, Polarity.Positive) in + let up = Field (None, value, dict_polarity) in + if lit then + match (Property.read_t lp, Property.read_t up) with + | (Some lt, Some ut) -> rec_flow cx trace (lt, UseT (use_op, ut)) + | _ -> () + else + let reason_prop = replace_desc_reason (RProperty (Some s)) lreason in + let propref = Named (reason_prop, s) in + rec_flow_p cx trace ~use_op lreason ureason propref (lp, up) + (* TODO: This rule doesn't interact very well with union-type checking. It looks up Function.prototype, which currently doesn't appear structurally in the function type, and thus may not be fully resolved when the function type is checked with a union containing the object @@ -5786,305 +5984,269 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = the union type contains both a function type and a object type as members, clearly intending for function types to match the former instead of the latter. *) - | DefT (reason, FunT (statics, _, _)), - UseT (use_op, DefT (reason_o, ObjT { props_tmap; _ })) -> - if not - (quick_error_fun_as_obj cx trace ~use_op reason statics reason_o - (Context.find_props cx props_tmap)) - then - rec_flow cx trace (statics, u) - - (* TODO: similar concern as above *) - | DefT (reason, FunT (statics, _, _)), - UseT (use_op, DefT (reason_inst, InstanceT (_, _, _, { - own_props; - structural = true; - _; - }))) -> - if not - (quick_error_fun_as_obj cx trace ~use_op reason statics reason_inst - (SMap.filter (fun x _ -> x = "constructor") - (Context.find_props cx own_props))) - then - rec_flow cx trace (statics, u) - - (***************************************************************) - (* Enable structural subtyping for upperbounds like interfaces *) - (***************************************************************) - - | _, UseT (use_op, (DefT (_,InstanceT (_,_,_,{structural=true;_})) as i)) -> - rec_flow cx trace (i, ImplementsT (use_op, l)) - - | (ObjProtoT _ | FunProtoT _ | DefT (_, NullT)), ImplementsT _ -> () - - | DefT (reason_inst, InstanceT (_, super, _, { - own_props; - proto_props; - inst_call_t; - structural = true; - _; - })), - ImplementsT (use_op, t) -> - structural_subtype cx trace ~use_op t reason_inst - (own_props, proto_props, inst_call_t); - rec_flow cx trace (super, - ReposLowerT (reason_inst, false, ImplementsT (use_op, t))) - - | _, ImplementsT _ -> - add_output cx ~trace (FlowError.EUnsupportedImplements (reason_of_t l)) - - (*********************************************************************) - (* class A is a base class of class B iff *) - (* properties in B that override properties in A or its base classes *) - (* have the same signatures *) - (*********************************************************************) - - (** The purpose of SuperT is to establish consistency between overriding + | ( DefT (reason, _, FunT (statics, _, _)), + UseT (use_op, DefT (reason_o, _, ObjT { props_tmap; _ })) ) -> + if + not + (quick_error_fun_as_obj + cx + trace + ~use_op + reason + statics + reason_o + (Context.find_props cx props_tmap)) + then + rec_flow cx trace (statics, u) + (* TODO: similar concern as above *) + | ( DefT (reason, _, FunT (statics, _, _)), + UseT + ( use_op, + DefT + ( reason_inst, + _, + InstanceT (_, _, _, { own_props; inst_kind = InterfaceKind _; _ }) ) ) ) -> + if + not + (quick_error_fun_as_obj + cx + trace + ~use_op + reason + statics + reason_inst + (SMap.filter (fun x _ -> x = "constructor") (Context.find_props cx own_props))) + then + rec_flow cx trace (statics, u) + (***************************************************************) + (* Enable structural subtyping for upperbounds like interfaces *) + (***************************************************************) + | ( _, + UseT + (use_op, (DefT (_, _, InstanceT (_, _, _, { inst_kind = InterfaceKind _; _ })) as i)) + ) -> + rec_flow cx trace (i, ImplementsT (use_op, l)) + | ((ObjProtoT _ | FunProtoT _ | DefT (_, _, NullT)), ImplementsT _) -> () + | ( DefT + ( reason_inst, + _, + InstanceT + ( _, + super, + _, + { own_props; proto_props; inst_call_t; inst_kind = InterfaceKind _; _ } ) ), + ImplementsT (use_op, t) ) -> + structural_subtype cx trace ~use_op t reason_inst (own_props, proto_props, inst_call_t); + rec_flow cx trace (super, ReposLowerT (reason_inst, false, ImplementsT (use_op, t))) + | (_, ImplementsT _) -> + add_output cx ~trace (Error_message.EUnsupportedImplements (reason_of_t l)) + (*********************************************************************) + (* class A is a base class of class B iff *) + (* properties in B that override properties in A or its base classes *) + (* have the same signatures *) + (*********************************************************************) + + (* The purpose of SuperT is to establish consistency between overriding properties with overridden properties. As such, the lookups performed for the inherited properties are non-strict: they are not required to exist. **) + | ( DefT (ureason, _, InstanceT (st, _, _, _)), + SuperT (use_op, reason, Derived { own; proto; static }) ) -> + let check_super l = check_super cx trace ~use_op reason ureason l in + SMap.iter (check_super l) own; + SMap.iter (fun x p -> if inherited_method x then check_super l x p) proto; - | DefT (ureason, InstanceT (st, _, _, _)), - SuperT (use_op, reason, Derived {own; proto; static}) -> - let check_super l = check_super cx trace ~use_op reason ureason l in - SMap.iter (check_super l) own; - SMap.iter (fun x p -> if inherited_method x then check_super l x p) proto; - (* TODO: inherited_method logic no longer applies for statics. It used to + (* TODO: inherited_method logic no longer applies for statics. It used to when call properties were included in the props, but that is no longer the case. All that remains is the "constructor" prop, which has no special meaning on the static object. *) - SMap.iter (fun x p -> if inherited_method x then check_super st x p) static; - - (***********************) - (* opaque types part 2 *) - (***********************) - - (* Don't refine opaque types based on its bound *) - | OpaqueT _, PredicateT (p, t) -> predicate cx trace t l p - | OpaqueT _, GuardT (pred, result, sink) -> guard cx trace l pred result sink - - (* Preserve OpaqueT as consequent, but branch based on the bound *) - | OpaqueT (_, {super_t = Some t; _}), CondT (r, then_t_opt, else_t, tout) -> - let then_t_opt = match then_t_opt with - | Some _ -> then_t_opt - | None -> Some l - in - rec_flow cx trace (t, CondT (r, then_t_opt, else_t, tout)) - - (* Opaque types may be treated as their supertype when they are a lower bound for a use *) - | OpaqueT (_, {super_t = Some t; _}), _ -> - rec_flow cx trace (t, u) - - (***********************************************************) - (* addition *) - (***********************************************************) - - | (l, AdderT (use_op, reason, flip, r, u)) -> - flow_addition cx trace use_op reason flip l r u - - (*********************************************************) - (* arithmetic/bitwise/update operations besides addition *) - (*********************************************************) - - | _, AssertArithmeticOperandT _ when numeric l -> () - | _, AssertArithmeticOperandT _ -> - add_output cx ~trace (FlowError.EArithmeticOperand (reason_of_t l)) - - (***********************************************************************) - (* Rest param annotations must be super types of the array bottom type *) - (***********************************************************************) - - | rest, AssertRestParamT r -> - (* This allows rest to be things like Iterable, mixed, Array, [1,2] - but disallows things like number, string, boolean *) - rec_flow_t cx trace (DefT (r, ArrT EmptyAT), rest) - - (***********************************************************) - (* coercion *) - (***********************************************************) - - (* string and number can be coerced to strings *) - | DefT (_, NumT _), UseT (Op Coercion _, DefT (_, StrT _)) -> () - - (**************************) - (* relational comparisons *) - (**************************) - - | (l, ComparatorT(reason, flip, r)) -> - flow_comparator cx trace reason flip l r; - - | (l, EqT(reason, flip, r)) -> - flow_eq cx trace reason flip l r; - - (************************) - (* unary minus operator *) - (************************) - - | DefT (_, NumT lit), UnaryMinusT (reason_op, t_out) -> - let num = match lit with - | Literal (_, (value, raw)) -> - let (value, raw) = Ast_utils.negate_number_literal (value, raw) in - DefT (replace_reason_const RNumber reason_op, NumT (Literal (None, (value, raw)))) - | AnyLiteral - | Truthy -> - l - in - rec_flow_t cx trace (num, t_out) - - | DefT (_, AnyT), UnaryMinusT (reason_op, t_out) -> - rec_flow_t cx trace (AnyT.why reason_op, t_out) - - (************************) - (* binary `in` operator *) - (************************) - - (* the left-hand side of a `(x in y)` expression is a string or number + SMap.iter (fun x p -> if inherited_method x then check_super st x p) static + (***********************) + (* opaque types part 2 *) + (***********************) + + (* Don't refine opaque types based on its bound *) + | (OpaqueT _, PredicateT (p, t)) -> predicate cx trace t l p + | (OpaqueT _, GuardT (pred, result, sink)) -> guard cx trace l pred result sink + (* Preserve OpaqueT as consequent, but branch based on the bound *) + | (OpaqueT (_, { super_t = Some t; _ }), CondT (r, then_t_opt, else_t, tout)) -> + let then_t_opt = + match then_t_opt with + | Some _ -> then_t_opt + | None -> Some l + in + rec_flow cx trace (t, CondT (r, then_t_opt, else_t, tout)) + (* Opaque types may be treated as their supertype when they are a lower bound for a use *) + | (OpaqueT (_, { super_t = Some t; _ }), _) -> rec_flow cx trace (t, u) + (***********************************************************) + (* addition *) + (***********************************************************) + | (l, AdderT (use_op, reason, flip, r, u)) -> + flow_addition cx trace use_op reason flip l r u + (*********************************************************) + (* arithmetic/bitwise/update operations besides addition *) + (*********************************************************) + | (_, AssertArithmeticOperandT _) when numberesque l -> () + | (_, AssertArithmeticOperandT _) -> + add_output cx ~trace (Error_message.EArithmeticOperand (reason_of_t l)) + (***********************************************************) + (* coercion *) + (***********************************************************) + + (* string and number can be coerced to strings *) + | (DefT (_, _, NumT _), UseT (Op (Coercion _), DefT (_, _, StrT _))) -> () + (**************************) + (* relational comparisons *) + (**************************) + | (l, ComparatorT (reason, flip, r)) -> flow_comparator cx trace reason flip l r + | (l, EqT (reason, flip, r)) -> flow_eq cx trace reason flip l r + (************************) + (* unary minus operator *) + (************************) + | (DefT (_, trust, NumT lit), UnaryMinusT (reason_op, t_out)) -> + let num = + match lit with + | Literal (_, (value, raw)) -> + let (value, raw) = Flow_ast_utils.negate_number_literal (value, raw) in + DefT + (replace_desc_reason RNumber reason_op, trust, NumT (Literal (None, (value, raw)))) + | AnyLiteral + | Truthy -> + l + in + rec_flow_t cx trace (num, t_out) + | (AnyT _, UnaryMinusT (reason_op, t_out)) -> + rec_flow_t cx trace (AnyT.untyped reason_op, t_out) + (************************) + (* binary `in` operator *) + (************************) + + (* the left-hand side of a `(x in y)` expression is a string or number TODO: also, symbols *) - | DefT (_, StrT _), AssertBinaryInLHST _ -> () - | DefT (_, NumT _), AssertBinaryInLHST _ -> () - | _, AssertBinaryInLHST _ -> - add_output cx ~trace (FlowError.EBinaryInLHS (reason_of_t l)) - - (* the right-hand side of a `(x in y)` expression must be object-like *) - | DefT (_, ArrT _), AssertBinaryInRHST _ -> () - | _, AssertBinaryInRHST _ when object_like l -> () - | _, AssertBinaryInRHST _ -> - add_output cx ~trace (FlowError.EBinaryInRHS (reason_of_t l)) - - (******************) - (* `for...in` RHS *) - (******************) - - (* objects are allowed. arrays _could_ be, but are not because it's + | (DefT (_, _, StrT _), AssertBinaryInLHST _) -> () + | (DefT (_, _, NumT _), AssertBinaryInLHST _) -> () + | (_, AssertBinaryInLHST _) -> + add_output cx ~trace (Error_message.EBinaryInLHS (reason_of_t l)) + (* the right-hand side of a `(x in y)` expression must be object-like *) + | (DefT (_, _, ArrT _), AssertBinaryInRHST _) -> () + | (_, AssertBinaryInRHST _) when object_like l -> () + | (_, AssertBinaryInRHST _) -> + add_output cx ~trace (Error_message.EBinaryInRHS (reason_of_t l)) + (******************) + (* `for...in` RHS *) + (******************) + + (* objects are allowed. arrays _could_ be, but are not because it's generally safer to use a for or for...of loop instead. *) - | _, AssertForInRHST _ when object_like l -> () - | (DefT (_, AnyObjT) | ObjProtoT _), AssertForInRHST _ -> () - - (* null/undefined are allowed *) - | DefT (_, (NullT | VoidT)), AssertForInRHST _ -> () - - | _, AssertForInRHST _ -> - add_output cx ~trace (FlowError.EForInRHS (reason_of_t l)) - - (**************************************) - (* types may be refined by predicates *) - (**************************************) - - | _, PredicateT(p,t) -> - predicate cx trace t l p - - | _, GuardT (pred, result, sink) -> - guard cx trace l pred result sink - - | DefT (_, StrT lit), - SentinelPropTestT (reason, l, key, sense, Enum.(One Str sentinel), result) -> - begin match lit with - | Literal (_, value) when (value = sentinel) != sense -> - if not sense - then () (* provably unreachable, so prune *) - else - let l = matching_sentinel_prop reason key (SingletonStrT sentinel) in - rec_flow_t cx trace (l, result) - | _ -> - rec_flow_t cx trace (l, result) - end - - | DefT (_, NumT lit), - SentinelPropTestT (reason, l, key, sense, Enum.(One Num sentinel_lit), result) -> - let sentinel, _ = sentinel_lit in - begin match lit with - | Literal (_, (value, _)) when (value = sentinel) != sense -> - if not sense - then () (* provably unreachable, so prune *) + | (_, AssertForInRHST _) when object_like l -> () + | ((AnyT _ | ObjProtoT _), AssertForInRHST _) -> () + (* null/undefined are allowed *) + | (DefT (_, _, (NullT | VoidT)), AssertForInRHST _) -> () + | (_, AssertForInRHST _) -> add_output cx ~trace (Error_message.EForInRHS (reason_of_t l)) + (**************************************) + (* types may be refined by predicates *) + (**************************************) + | (_, PredicateT (p, t)) -> predicate cx trace t l p + | (_, GuardT (pred, result, sink)) -> guard cx trace l pred result sink + | ( DefT (_, _, StrT lit), + SentinelPropTestT (reason, l, key, sense, UnionEnum.(One (Str sentinel)), result) ) -> + begin + match lit with + | Literal (_, value) when value = sentinel != sense -> + if not sense then + () + (* provably unreachable, so prune *) + else + let l = matching_sentinel_prop reason key (SingletonStrT sentinel) in + rec_flow_t cx trace (l, result) + | _ -> rec_flow_t cx trace (l, result) + end + | ( DefT (_, _, NumT lit), + SentinelPropTestT (reason, l, key, sense, UnionEnum.(One (Num sentinel_lit)), result) + ) -> + let (sentinel, _) = sentinel_lit in + begin + match lit with + | Literal (_, (value, _)) when value = sentinel != sense -> + if not sense then + () + (* provably unreachable, so prune *) + else + let l = matching_sentinel_prop reason key (SingletonNumT sentinel_lit) in + rec_flow_t cx trace (l, result) + | _ -> rec_flow_t cx trace (l, result) + end + | ( DefT (_, _, BoolT lit), + SentinelPropTestT (reason, l, key, sense, UnionEnum.(One (Bool sentinel)), result) ) -> + begin + match lit with + | Some value when value = sentinel != sense -> + if not sense then + () + (* provably unreachable, so prune *) + else + let l = matching_sentinel_prop reason key (SingletonBoolT sentinel) in + rec_flow_t cx trace (l, result) + | _ -> rec_flow_t cx trace (l, result) + end + | ( DefT (_, _, NullT), + SentinelPropTestT (_reason, l, _key, sense, UnionEnum.(One Null), result) ) -> + if not sense then + () else - let l = matching_sentinel_prop reason key (SingletonNumT sentinel_lit) in rec_flow_t cx trace (l, result) - | _ -> - rec_flow_t cx trace (l, result) - end - - | DefT (_, BoolT lit), - SentinelPropTestT (reason, l, key, sense, Enum.(One Bool sentinel), result) -> - begin match lit with - | Some value when (value = sentinel) != sense -> - if not sense - then () (* provably unreachable, so prune *) + | ( DefT (_, _, VoidT), + SentinelPropTestT (_reason, l, _key, sense, UnionEnum.(One Void), result) ) -> + if not sense then + () else - let l = matching_sentinel_prop reason key (SingletonBoolT sentinel) in - rec_flow_t cx trace (l, result) - | _ -> rec_flow_t cx trace (l, result) - end - - | DefT (_, NullT), - SentinelPropTestT (_reason, l, _key, sense, Enum.(One Null), result) -> - if not sense - then () - else rec_flow_t cx trace (l, result) - - | DefT (_, VoidT), - SentinelPropTestT (_reason, l, _key, sense, Enum.(One Void), result) -> - if not sense - then () - else rec_flow_t cx trace (l, result) - - | DefT (_, (StrT _ | NumT _ | BoolT _ | NullT | VoidT)), - SentinelPropTestT (reason, obj, key, sense, Enum.Many enums, result) -> - if sense - then EnumSet.iter (fun enum -> - rec_flow cx trace (l, SentinelPropTestT (reason, obj, key, sense, Enum.One enum, result)) - ) enums - else rec_flow_t cx trace (obj, result) - - | DefT (_, (StrT _ | NumT _ | BoolT _ | NullT | VoidT)), - SentinelPropTestT (_reason, l, _key, sense, _, result) -> - (* types don't match (would've been matched above) *) - (* we don't prune other types like objects or instances, even though + | ( DefT (_, _, (StrT _ | NumT _ | BoolT _ | NullT | VoidT)), + SentinelPropTestT (_reason, l, _key, sense, _, result) ) -> + (* types don't match (would've been matched above) *) + (* we don't prune other types like objects or instances, even though a test like `if (ObjT === StrT)` seems obviously unreachable, but we have to be wary of toString and valueOf on objects/instances. *) - if sense - then () (* provably unreachable, so prune *) - else rec_flow_t cx trace (l, result) - - | _, SentinelPropTestT (_, l, _, _, _, result) -> - (* property exists, but is not something we can use for refinement *) - rec_flow_t cx trace (l, result) - - (*********************) - (* functions statics *) - (*********************) - - | DefT (reason, FunT (static, _, _)), _ when object_like_op u -> - rec_flow cx trace (static, ReposLowerT (reason, false, u)) - - (*****************) - (* class statics *) - (*****************) - - (* For GetPrivatePropT and SetPrivatePropT, the instance id is needed to determine whether - * or not the private static field exists on that class. Since we look through the scopes for - * the type of the field, there is no need to look at the static member of the instance. - * Instead, we just flip the boolean flag to true, indicating that when the - * InstanceT ~> Set/GetPrivatePropT constraint is processed that we should look at the - * private static fields instead of the private instance fields. *) - | DefT (reason, ClassT instance), GetPrivatePropT (use_op, reason_op, x, scopes, _, tout) -> - let u = GetPrivatePropT (use_op, reason_op, x, scopes, true, tout) in - rec_flow cx trace (instance, ReposLowerT (reason, false, u)) - - | DefT (reason, ClassT instance), SetPrivatePropT (use_op, reason_op, x, scopes, _, tout, tp) -> - let u = SetPrivatePropT (use_op, reason_op, x, scopes, true, tout, tp) in - rec_flow cx trace (instance, ReposLowerT (reason, false, u)) - - | DefT (reason, ClassT instance), _ when object_use u || object_like_op u -> - let statics = Tvar.mk cx reason in - rec_flow cx trace (instance, GetStaticsT (reason, statics)); - rec_flow cx trace (statics, u) - - (************************) - (* classes as functions *) - (************************) + if sense then + () + (* provably unreachable, so prune *) + else + rec_flow_t cx trace (l, result) + | (_, SentinelPropTestT (_, l, _, _, _, result)) -> + (* property exists, but is not something we can use for refinement *) + rec_flow_t cx trace (l, result) + (*********************) + (* functions statics *) + (*********************) + | (DefT (reason, _, FunT (static, _, _)), _) when object_like_op u -> + rec_flow cx trace (static, ReposLowerT (reason, false, u)) + (*****************) + (* class statics *) + (*****************) + + (* For GetPrivatePropT and SetPrivatePropT, the instance id is needed to determine whether + * or not the private static field exists on that class. Since we look through the scopes for + * the type of the field, there is no need to look at the static member of the instance. + * Instead, we just flip the boolean flag to true, indicating that when the + * InstanceT ~> Set/GetPrivatePropT constraint is processed that we should look at the + * private static fields instead of the private instance fields. *) + | ( DefT (reason, _, ClassT instance), + GetPrivatePropT (use_op, reason_op, x, scopes, _, tout) ) -> + let u = GetPrivatePropT (use_op, reason_op, x, scopes, true, tout) in + rec_flow cx trace (instance, ReposLowerT (reason, false, u)) + | ( DefT (reason, _, ClassT instance), + SetPrivatePropT (use_op, reason_op, x, mode, scopes, _, tout, tp) ) -> + let u = SetPrivatePropT (use_op, reason_op, x, mode, scopes, true, tout, tp) in + rec_flow cx trace (instance, ReposLowerT (reason, false, u)) + | (DefT (reason, _, ClassT instance), _) when object_use u || object_like_op u -> + let statics = Tvar.mk cx reason in + rec_flow cx trace (instance, GetStaticsT (reason, statics)); + rec_flow cx trace (statics, u) + (************************) + (* classes as functions *) + (************************) - (* When a class value flows to a function annotation or call site, check for + (* When a class value flows to a function annotation or call site, check for the presence of a call property in the former (as a static) compatible with the latter. @@ -6101,13 +6263,12 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = excluded from subclass compatibility checks, but are allowed on ClassT types. *) - | DefT (reason, ClassT instance), (UseT (_, DefT (_, FunT _)) | CallT _) -> - let statics = Tvar.mk cx reason in - rec_flow cx trace (instance, GetStaticsT (reason, statics)); - rec_flow cx trace (statics, u) - - (**************************************************************************) - (* TestPropT is emitted for property reads in the context of branch tests. + | (DefT (reason, _, ClassT instance), (UseT (_, DefT (_, _, FunT _)) | CallT _)) -> + let statics = Tvar.mk cx reason in + rec_flow cx trace (instance, GetStaticsT (reason, statics)); + rec_flow cx trace (statics, u) + (**************************************************************************) + (* TestPropT is emitted for property reads in the context of branch tests. Such tests are always non-strict, in that we don't immediately report an error if the property is not found not in the object type. Instead, if the property is not found, we control the result type of the read based @@ -6118,107 +6279,120 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = object types, the property *might* exist at run time, and since we don't know what the type of the property would be, we set things up so that the result of the read cannot be used in any interesting way. *) - (**************************************************************************) - - | DefT (_, NullT), TestPropT (reason_op, _, propref, tout) -> - (* The wildcard TestPropT implementation forwards the lower bound to + (**************************************************************************) + | (DefT (_, _, NullT), TestPropT (reason_op, _, propref, tout)) -> + (* The wildcard TestPropT implementation forwards the lower bound to LookupT. This is unfortunate, because LookupT is designed to terminate (successfully) on NullT, but property accesses on null should be type errors. Ideally, we should prevent LookupT constraints from being syntax-driven, in order to preserve the delicate invariants that surround it. *) - rec_flow cx trace (l, GetPropT (unknown_use, reason_op, propref, tout)) - - | _, TestPropT (reason_op, id, propref, tout) -> - (* NonstrictReturning lookups unify their result, but we don't want to + rec_flow cx trace (l, GetPropT (unknown_use, reason_op, propref, tout)) + | (DefT (r, trust, MixedT (Mixed_truthy | Mixed_non_maybe)), TestPropT (_, id, _, tout)) -> + (* Special-case property tests of definitely non-null/non-void values to + return mixed and treat them as a hit. *) + Context.test_prop_hit cx id; + rec_flow_t cx trace (DefT (r, trust, MixedT Mixed_everything), tout) + | (_, TestPropT (reason_op, id, propref, tout)) -> + (* NonstrictReturning lookups unify their result, but we don't want to unify with the tout tvar directly, so we create an indirection here to ensure we only supply lower bounds to tout. *) - let lookup_default = Tvar.mk_where cx reason_op (fun tvar -> - rec_flow_t cx trace (tvar, tout) - ) in - let name = name_of_propref propref in - let test_info = Some (id, (reason_op, reason_of_t l)) in - let lookup_default = match l with - | DefT (_, ObjT { flags; _ }) - when flags.exact -> - if Obj_type.sealed_in_op reason_op flags.sealed then - let r = replace_reason_const (RMissingProperty name) reason_op in - Some (DefT (r, VoidT), lookup_default) - else - (* This is an unsealed object. We don't now when (or even if) this - * property access will resolve, since reads and writes can happen - * in any order. - * - * Due to this, we never error on property accesses. TODO: Build a - * separate mechanism unsealed objects that errors after merge if a - * shadow prop is read but never written. - * - * We also should not return a default type on lookup failure, - * because a later write could make the lookup succeed. - *) - let () = Context.test_prop_hit cx id in - None - | _ -> - (* Note: a lot of other types could in principle be considered + let lookup_default = + Tvar.mk_where cx reason_op (fun tvar -> rec_flow_t cx trace (tvar, tout)) + in + let name = name_of_propref propref in + let test_info = Some (id, (reason_op, reason_of_t l)) in + let lookup_default = + match l with + | DefT (_, _, ObjT { flags; _ }) when flags.exact -> + if Obj_type.sealed_in_op reason_op flags.sealed then + let r = replace_desc_reason (RMissingProperty name) reason_op in + Some (DefT (r, bogus_trust (), VoidT), lookup_default) + else + (* This is an unsealed object. We don't now when (or even if) this + * property access will resolve, since reads and writes can happen + * in any order. + * + * Due to this, we never error on property accesses. TODO: Build a + * separate mechanism unsealed objects that errors after merge if a + * shadow prop is read but never written. + * + * We also should not return a default type on lookup failure, + * because a later write could make the lookup succeed. + *) + let () = Context.test_prop_hit cx id in + None + | _ -> + (* Note: a lot of other types could in principle be considered "exact". For example, new instances of classes could have exact types; so could `super` references (since they are statically rather than dynamically bound). However, currently we don't support any other exact types. Considering exact types inexact is sound, so there is no problem falling back to the same conservative approximation we use for inexact types in those cases. *) - let r = replace_reason_const (RUnknownProperty name) reason_op in - Some (DefT (r, MixedT Mixed_everything), lookup_default) - in - let lookup_kind = NonstrictReturning (lookup_default, test_info) in - rec_flow cx trace (l, - LookupT (reason_op, lookup_kind, [], propref, - RWProp (unknown_use, l, tout, Read))) - - (************) - (* indexing *) - (************) - - | DefT (_, InstanceT _), GetElemT (use_op, reason, i, t) -> - rec_flow cx trace (l, SetPropT (use_op, reason, Named (reason, "$key"), Normal, i, None)); - rec_flow cx trace (l, GetPropT (use_op, reason, Named (reason, "$value"), t)) - - | DefT (_, InstanceT _), SetElemT (use_op, reason, i, tin, tout) -> - rec_flow cx trace (l, SetPropT (use_op, reason, Named (reason, "$key"), Normal, i, None)); - rec_flow cx trace (l, SetPropT (use_op, reason, Named (reason, "$value"), Normal, tin, None)); - Option.iter ~f:(fun t -> rec_flow_t cx trace (l, t)) tout + let r = replace_desc_reason (RUnknownProperty name) reason_op in + Some (DefT (r, bogus_trust (), MixedT Mixed_everything), lookup_default) + in + let lookup_kind = NonstrictReturning (lookup_default, test_info) in + rec_flow + cx + trace + ( l, + LookupT + ( reason_op, + lookup_kind, + [], + propref, + ReadProp { use_op = unknown_use; obj_t = l; tout } ) ) + (************) + (* indexing *) + (************) + | (DefT (_, _, InstanceT _), GetElemT (use_op, reason, i, t)) -> + rec_flow + cx + trace + (l, SetPropT (use_op, reason, Named (reason, "$key"), Assign, Normal, i, None)); + rec_flow cx trace (l, GetPropT (use_op, reason, Named (reason, "$value"), t)) + | (DefT (_, _, InstanceT _), SetElemT (use_op, reason, i, mode, tin, tout)) -> + rec_flow + cx + trace + (l, SetPropT (use_op, reason, Named (reason, "$key"), mode, Normal, i, None)); + rec_flow + cx + trace + (l, SetPropT (use_op, reason, Named (reason, "$value"), mode, Normal, tin, None)); + Option.iter ~f:(fun t -> rec_flow_t cx trace (l, t)) tout + (***************************) + (* conditional type switch *) + (***************************) + + (* Use our alternate if our lower bound is empty. *) + | (DefT (_, _, EmptyT Bottom), CondT (_, _, else_t, tout)) -> + rec_flow_t cx trace (else_t, tout) + (* Otherwise continue by Flowing out lower bound to tout. *) + | (_, CondT (_, then_t_opt, _, tout)) -> + let then_t = + match then_t_opt with + | Some t -> t + | None -> l + in + rec_flow_t cx trace (then_t, tout) + (*************************) + (* repositioning, part 2 *) + (*************************) - (***************************) - (* conditional type switch *) - (***************************) - - (* Use our alternate if our lower bound is empty. *) - | DefT (_, EmptyT), CondT (_, _, else_t, tout) -> - rec_flow_t cx trace (else_t, tout) - - (* Otherwise continue by Flowing out lower bound to tout. *) - | _, CondT (_, then_t_opt, _, tout) -> - let then_t = match then_t_opt with - | Some t -> t - | None -> l - in - rec_flow_t cx trace (then_t, tout) - - (*************************) - (* repositioning, part 2 *) - (*************************) - - (* waits for a lower bound to become concrete, and then repositions it to + (* waits for a lower bound to become concrete, and then repositions it to the location stored in the ReposLowerT, which is usually the location where that lower bound was used; the lower bound's location (which is being overwritten) is where it was defined. *) - | _, ReposLowerT (reason, use_desc, u) -> - rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) - - (***************) - (* unsupported *) - (***************) + | (_, ReposLowerT (reason, use_desc, u)) -> + rec_flow cx trace (reposition_reason cx ~trace reason ~use_desc l, u) + (***************) + (* unsupported *) + (***************) - (** Lookups can be strict or non-strict, as denoted by the presence or + (* Lookups can be strict or non-strict, as denoted by the presence or absence of strict_reason in the following two pattern matches. Strictness derives from whether the object is sealed and was created in the same scope in which the lookup occurs - see @@ -6226,138 +6400,158 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = to find the desired property causes an error; a non-strict one does not. *) - - | (DefT (_, NullT) | ObjProtoT _), - LookupT (reason, strict, next::try_ts_on_failure, propref, t) -> - (* When s is not found, we always try to look it up in the next element in + | ( (DefT (_, _, NullT) | ObjProtoT _), + LookupT (reason, strict, next :: try_ts_on_failure, propref, t) ) -> + (* When s is not found, we always try to look it up in the next element in the list try_ts_on_failure. *) - rec_flow cx trace - (next, LookupT (reason, strict, try_ts_on_failure, propref, t)) - - | (ObjProtoT _ | FunProtoT _), - LookupT (reason_op, _, [], Named (_, "__proto__"), RWProp (_, l, t, rw)) -> - (* __proto__ is a getter/setter on Object.prototype *) - let u = match rw with - | Read -> GetProtoT (reason_op, t) - | Write _ -> SetProtoT (reason_op, t) - in - rec_flow cx trace (l, u) - - | ObjProtoT _, LookupT (reason_op, _, [], Named (_, x), _) - when is_object_prototype_method x -> - (** TODO: These properties should go in Object.prototype. Currently we + rec_flow cx trace (next, LookupT (reason, strict, try_ts_on_failure, propref, t)) + | ( (ObjProtoT _ | FunProtoT _), + LookupT + (reason_op, _, [], Named (_, "__proto__"), ReadProp { use_op = _; obj_t = l; tout }) + ) -> + (* __proto__ is a getter/setter on Object.prototype *) + rec_flow cx trace (l, GetProtoT (reason_op, tout)) + | ( (ObjProtoT _ | FunProtoT _), + LookupT + ( reason_op, + _, + [], + Named (_, "__proto__"), + WriteProp { use_op = _; obj_t = l; prop_tout = _; tin; write_ctx = _; mode = _ } ) + ) -> + (* __proto__ is a getter/setter on Object.prototype *) + rec_flow cx trace (l, SetProtoT (reason_op, tin)) + | (ObjProtoT _, LookupT (reason_op, _, [], Named (_, x), _)) + when is_object_prototype_method x -> + (* TODO: These properties should go in Object.prototype. Currently we model Object.prototype as a ObjProtoT, as an optimization against a possible deluge of shadow properties on Object.prototype, since it is shared by every object. **) - rec_flow cx trace (get_builtin_type cx ~trace reason_op "Object", u) - - | FunProtoT _, LookupT (reason_op, _, _, Named (_, x), _) - when is_function_prototype x -> - (** TODO: Ditto above comment for Function.prototype *) - rec_flow cx trace (get_builtin_type cx ~trace reason_op "Function", u) - - | (DefT (reason, NullT) | ObjProtoT reason | FunProtoT reason), - LookupT (reason_op, Strict strict_reason, [], - (Named (reason_prop, x) as propref), action) -> - let use_op = use_op_of_lookup_action action in - add_output cx ~trace (FlowError.EStrictLookupFailed - ((reason_prop, strict_reason), reason, Some x, use_op)); - let p = Field (None, AnyT.why reason_op, Neutral) in - perform_lookup_action cx trace propref p reason reason_op action - - | (DefT (reason, NullT) | ObjProtoT reason | FunProtoT reason), - LookupT (reason_op, Strict strict_reason, [], - (Computed elem_t as propref), action) -> - (match elem_t with - | OpenT _ -> - let loc = loc_of_t elem_t in - add_output cx ~trace FlowError.(EInternal (loc, PropRefComputedOpen)) - | DefT (_, StrT (Literal _)) -> - let loc = loc_of_t elem_t in - add_output cx ~trace FlowError.(EInternal (loc, PropRefComputedLiteral)) - | DefT (_, AnyT) | DefT (_, StrT _) | DefT (_, NumT _) -> - (* any, string, and number keys are allowed, but there's nothing else to + rec_flow cx trace (get_builtin_type cx ~trace reason_op "Object", u) + | (FunProtoT _, LookupT (reason_op, _, _, Named (_, x), _)) when is_function_prototype x -> + (* TODO: Ditto above comment for Function.prototype *) + rec_flow cx trace (get_builtin_type cx ~trace reason_op "Function", u) + | ( (DefT (reason, _, NullT) | ObjProtoT reason | FunProtoT reason), + LookupT + (reason_op, Strict strict_reason, [], (Named (reason_prop, x) as propref), action) ) + -> + let use_op = use_op_of_lookup_action action in + add_output + cx + ~trace + (Error_message.EStrictLookupFailed + ((reason_prop, strict_reason), reason, Some x, use_op)); + let p = Field (None, AnyT.error reason_op, Polarity.Neutral) in + perform_lookup_action cx trace propref p DynamicProperty reason reason_op action + | ( (DefT (reason, _, NullT) | ObjProtoT reason | FunProtoT reason), + LookupT (reason_op, Strict strict_reason, [], (Computed elem_t as propref), action) ) + -> + (match elem_t with + | OpenT _ -> + let loc = loc_of_t elem_t in + add_output cx ~trace Error_message.(EInternal (loc, PropRefComputedOpen)) + | DefT (_, _, StrT (Literal _)) -> + let loc = loc_of_t elem_t in + add_output cx ~trace Error_message.(EInternal (loc, PropRefComputedLiteral)) + | AnyT _ -> + let p = Field (None, AnyT.untyped reason_op, Polarity.Neutral) in + perform_lookup_action cx trace propref p DynamicProperty reason reason_op action + | DefT (_, _, StrT _) + | DefT (_, _, NumT _) -> + (* string, and number keys are allowed, but there's nothing else to flow without knowing their literal values. *) - let p = Field (None, AnyT.why reason_op, Neutral) in - perform_lookup_action cx trace propref p reason reason_op action - | _ -> - let reason_prop = reason_of_t elem_t in - let use_op = use_op_of_lookup_action action in - add_output cx ~trace (FlowError.EStrictLookupFailed - ((reason_prop, strict_reason), reason, None, use_op))) - - | (DefT (reason, NullT) | ObjProtoT reason | FunProtoT reason), - LookupT (reason_op, ShadowRead (strict, rev_proto_ids), [], - (Named (reason_prop, x) as propref), action) -> - (* Emit error if this is a strict read. See `lookup_kinds` in types.ml. *) - (match strict with - | None -> () - | Some strict_reason -> - let use_op = use_op_of_lookup_action action in - add_output cx ~trace (FlowError.EStrictLookupFailed - ((reason_prop, strict_reason), reason, Some x, use_op))); - - (* Install shadow prop (if necessary) and link up proto chain. *) - let prop_loc = def_loc_of_reason reason_prop in - let p = find_or_intro_shadow_prop cx trace reason_op x prop_loc (Nel.rev rev_proto_ids) in - perform_lookup_action cx trace propref p reason reason_op action - - | (DefT (reason, NullT) | ObjProtoT reason | FunProtoT reason), LookupT (reason_op, - ShadowWrite rev_proto_ids, [], (Named (lookup_reason, x) as propref), action) -> - let id, proto_ids = Nel.rev rev_proto_ids in - let pmap = Context.find_props cx id in - (* Re-check written-to unsealed object to see if prop was added since we - * last looked. See comment above `find` in `find_or_intro_shadow_prop`. - *) - let p = match SMap.get x pmap with - | Some p -> p - | None -> - match SMap.get (internal_name x) pmap with - | Some p -> - (* unshadow *) - pmap - |> SMap.remove (internal_name x) - |> SMap.add x p - |> Context.add_property_map cx id; - p - | None -> - (* Create prop and link shadow props along the proto chain. *) - let reason_prop = replace_reason_const (RShadowProperty x) reason_op in - let t = Tvar.mk cx reason_prop in - let prop_loc = def_loc_of_reason lookup_reason in - (match proto_ids with - | [] -> () - | id::ids -> - let p_proto = find_or_intro_shadow_prop cx trace reason_op x prop_loc (id, ids) in - let t_proto = Property.assert_field p_proto in - rec_flow cx trace (t_proto, UnifyT (t_proto, t))); - (* Add prop *) - let p = Field (Some prop_loc, t, Neutral) in - pmap - |> SMap.add x p - |> Context.add_property_map cx id; - p - in - perform_lookup_action cx trace propref p reason reason_op action - - | (DefT (_, NullT) | ObjProtoT _ | FunProtoT _), - LookupT (_, ShadowRead _, [], Computed elem_t, _) -> - let loc = loc_of_t elem_t in - add_output cx ~trace FlowError.(EInternal (loc, ShadowReadComputed)) - - | (DefT (_, NullT) | ObjProtoT _ | FunProtoT _), - LookupT (_, ShadowWrite _, [], Computed elem_t, _) -> - let loc = loc_of_t elem_t in - add_output cx ~trace FlowError.(EInternal (loc, ShadowWriteComputed)) - - (* LookupT is a non-strict lookup *) - | (DefT (_, NullT) | - ObjProtoT _ | - FunProtoT _ | - (* TODO: why would mixed appear here? *) - DefT (_, MixedT (Mixed_truthy | Mixed_non_maybe))), - LookupT (_, NonstrictReturning (t_opt, test_opt), [], propref, action) -> - (* don't fire + let p = + Field (None, Unsoundness.why ComputedNonLiteralKey reason_op, Polarity.Neutral) + in + perform_lookup_action cx trace propref p PropertyMapProperty reason reason_op action + | _ -> + let reason_prop = reason_of_t elem_t in + let use_op = use_op_of_lookup_action action in + add_output + cx + ~trace + (Error_message.EStrictLookupFailed + ((reason_prop, strict_reason), reason, None, use_op))) + | ( (DefT (reason, _, NullT) | ObjProtoT reason | FunProtoT reason), + LookupT + ( reason_op, + ShadowRead (strict, rev_proto_ids), + [], + (Named (reason_prop, x) as propref), + action ) ) -> + (* Emit error if this is a strict read. See `lookup_kinds` in types.ml. *) + (match strict with + | None -> () + | Some strict_reason -> + let use_op = use_op_of_lookup_action action in + add_output + cx + ~trace + (Error_message.EStrictLookupFailed + ((reason_prop, strict_reason), reason, Some x, use_op))); + + (* Install shadow prop (if necessary) and link up proto chain. *) + let prop_loc = def_aloc_of_reason reason_prop in + let p = + find_or_intro_shadow_prop cx trace reason_op x prop_loc (Nel.rev rev_proto_ids) + in + perform_lookup_action cx trace propref p PropertyMapProperty reason reason_op action + | ( (DefT (reason, _, NullT) | ObjProtoT reason | FunProtoT reason), + LookupT + ( reason_op, + ShadowWrite rev_proto_ids, + [], + (Named (lookup_reason, x) as propref), + action ) ) -> + let (id, proto_ids) = Nel.rev rev_proto_ids in + let pmap = Context.find_props cx id in + (* Re-check written-to unsealed object to see if prop was added since we + * last looked. See comment above `find` in `find_or_intro_shadow_prop`. + *) + let p = + match SMap.get x pmap with + | Some p -> p + | None -> + (match SMap.get (internal_name x) pmap with + | Some p -> + (* unshadow *) + pmap + |> SMap.remove (internal_name x) + |> SMap.add x p + |> Context.add_property_map cx id; + p + | None -> + (* Create prop and link shadow props along the proto chain. *) + let reason_prop = replace_desc_new_reason (RShadowProperty x) reason_op in + let t = Tvar.mk cx reason_prop in + let prop_loc = def_aloc_of_reason lookup_reason in + (match proto_ids with + | [] -> () + | id :: ids -> + let p_proto = + find_or_intro_shadow_prop cx trace reason_op x prop_loc (id, ids) + in + let t_proto = Property.assert_field p_proto in + rec_flow cx trace (t_proto, UnifyT (t_proto, t))); + + (* Add prop *) + let p = Field (Some prop_loc, t, Polarity.Neutral) in + pmap |> SMap.add x p |> Context.add_property_map cx id; + p) + in + perform_lookup_action cx trace propref p PropertyMapProperty reason reason_op action + | ( (DefT (_, _, NullT) | ObjProtoT _ | FunProtoT _), + LookupT (_, ShadowRead _, [], Computed elem_t, _) ) -> + let loc = loc_of_t elem_t in + add_output cx ~trace Error_message.(EInternal (loc, ShadowReadComputed)) + | ( (DefT (_, _, NullT) | ObjProtoT _ | FunProtoT _), + LookupT (_, ShadowWrite _, [], Computed elem_t, _) ) -> + let loc = loc_of_t elem_t in + add_output cx ~trace Error_message.(EInternal (loc, ShadowWriteComputed)) + (* LookupT is a non-strict lookup *) + | ( (DefT (_, _, NullT) | ObjProtoT _ | FunProtoT _), + LookupT (_, NonstrictReturning (t_opt, test_opt), [], propref, action) ) -> + (* don't fire ...unless a default return value is given. Two examples: @@ -6369,228 +6563,168 @@ let rec __flow cx ((l: Type.t), (u: Type.use_t)) trace = a condition, in which case we consider the object's property to be `mixed`. *) - let use_op = Option.value ~default:unknown_use (use_op_of_lookup_action action) in - - Option.iter test_opt ~f:(fun (id, reasons) -> - (* TODO - as mentioned above, it's unclear why mixed is included in this case. Since you - * can read any property from mixed, we don't want to treat it as a miss *) - match l with - | DefT (_, MixedT _) -> Context.test_prop_hit cx id - | _ -> Context.test_prop_miss cx id (name_of_propref propref) reasons use_op - ); - - begin match t_opt with - | Some (not_found, t) -> - rec_unify cx trace ~use_op ~unify_any:true t not_found - | None -> () - end - - (* SuperT only involves non-strict lookups *) - | (DefT (_, NullT), SuperT _) - | (ObjProtoT _, SuperT _) - | (FunProtoT _, SuperT _) -> () + let use_op = Option.value ~default:unknown_use (use_op_of_lookup_action action) in + Option.iter test_opt ~f:(fun (id, reasons) -> + Context.test_prop_miss cx id (name_of_propref propref) reasons use_op); - (** ExtendsT searches for a nominal superclass. The search terminates with + begin + match t_opt with + | Some (not_found, t) -> rec_unify cx trace ~use_op ~unify_any:true t not_found + | None -> () + end + (* SuperT only involves non-strict lookups *) + | (DefT (_, _, NullT), SuperT _) + | (ObjProtoT _, SuperT _) + | (FunProtoT _, SuperT _) -> + () + (* ExtendsT searches for a nominal superclass. The search terminates with either failure at the root or a structural subtype check. **) - - | DefT (_, AnyObjT), ExtendsUseT _ -> () - - | DefT (lreason, ObjT { proto_t; _ }), ExtendsUseT _ -> - let l = reposition cx ~trace (aloc_of_reason lreason |> ALoc.to_loc) proto_t in - rec_flow cx trace (l, u) - - | DefT (reason, ClassT instance), ExtendsUseT _ -> - let statics = Tvar.mk cx reason in - rec_flow cx trace (instance, GetStaticsT (reason, statics)); - rec_flow cx trace (statics, u) - - | DefT (_, NullT), - ExtendsUseT (use_op, reason, next::try_ts_on_failure, l, u) -> - (* When seaching for a nominal superclass fails, we always try to look it + | (AnyT _, ExtendsUseT _) -> () + | (DefT (lreason, _, ObjT { proto_t; _ }), ExtendsUseT _) -> + let l = reposition cx ~trace (aloc_of_reason lreason) proto_t in + rec_flow cx trace (l, u) + | (DefT (reason, _, ClassT instance), ExtendsUseT _) -> + let statics = Tvar.mk cx reason in + rec_flow cx trace (instance, GetStaticsT (reason, statics)); + rec_flow cx trace (statics, u) + | (DefT (_, _, NullT), ExtendsUseT (use_op, reason, next :: try_ts_on_failure, l, u)) -> + (* When seaching for a nominal superclass fails, we always try to look it up in the next element in the list try_ts_on_failure. *) - rec_flow cx trace - (next, ExtendsUseT (use_op, reason, try_ts_on_failure, l, u)) - - | DefT (_, NullT), - ExtendsUseT (use_op, _, [], l, DefT (reason_inst, InstanceT (_, super, _, { - own_props; - proto_props; - inst_call_t; - structural = true; - _; - }))) -> - structural_subtype cx trace ~use_op l reason_inst - (own_props, proto_props, inst_call_t); - rec_flow cx trace (l, UseT (use_op, super)) - - | DefT (_, NullT), - ExtendsUseT (use_op, _, [], t, tc) -> - let reason_l, reason_u = Flow_error.ordered_reasons (reason_of_t t, reason_of_t tc) in - add_output cx ~trace (FlowError.EIncompatibleWithUseOp (reason_l, reason_u, use_op)) - - (*******************************) - (* ToString abstract operation *) - (*******************************) - - (* ToStringT passes through strings unchanged, and flows a generic StrT otherwise *) - - | DefT (_, StrT _), ToStringT (_, t_out) -> - rec_flow cx trace (l, t_out) - - | _, ToStringT (reason_op, t_out) -> - rec_flow cx trace (StrT.why reason_op, t_out) - - (**********************) - (* Array library call *) - (**********************) - - | DefT (reason, ArrT (ArrayAT(t, _))), - (GetPropT _ | SetPropT _ | MethodT _ | LookupT _) -> - rec_flow cx trace (get_builtin_typeapp cx ~trace reason "Array" [t], u) - - | DefT (reason, ArrT (TupleAT _ | ROArrayAT _ | EmptyAT as arrtype)), - (GetPropT _ | SetPropT _ | MethodT _ | LookupT _) -> - let t = elemt_of_arrtype reason arrtype in - rec_flow - cx trace (get_builtin_typeapp cx ~trace reason "$ReadOnlyArray" [t], u) - - (***********************) - (* String library call *) - (***********************) - - | DefT (reason, StrT _), u when primitive_promoting_use_t u -> - rec_flow cx trace (get_builtin_type cx ~trace reason "String",u) - - (***********************) - (* Number library call *) - (***********************) - - | DefT (reason, NumT _), u when primitive_promoting_use_t u -> - rec_flow cx trace (get_builtin_type cx ~trace reason "Number",u) - - (***********************) - (* Boolean library call *) - (***********************) - - | DefT (reason, BoolT _), u when primitive_promoting_use_t u -> - rec_flow cx trace (get_builtin_type cx ~trace reason "Boolean",u) - - (*************************) - (* Function library call *) - (*************************) - - | FunProtoT reason, _ -> - let use_desc = true in - let fun_proto = get_builtin_type cx ~trace reason ~use_desc "Function" in - rec_flow cx trace (fun_proto, u) - - | _, UseT (use_op, FunProtoT reason) -> - let use_desc = true in - let fun_proto = get_builtin_type cx ~trace reason ~use_desc "Function" in - rec_flow cx trace (l, UseT (use_op, fun_proto)) - - (***********************) - (* Object library call *) - (***********************) - - | ObjProtoT reason, _ -> - let use_desc = true in - let obj_proto = get_builtin_type cx ~trace reason ~use_desc "Object" in - rec_flow cx trace (obj_proto, u) - - | _, UseT (use_op, ObjProtoT reason) -> - let use_desc = true in - let obj_proto = get_builtin_type cx ~trace reason ~use_desc "Object" in - rec_flow cx trace (l, UseT (use_op, obj_proto)) - - (* Special cases of FunT *) - | FunProtoApplyT reason, _ - | FunProtoBindT reason, _ - | FunProtoCallT reason, _ -> - rec_flow cx trace (FunProtoT reason, u) - - | _, LookupT (_, _, _, propref, lookup_action) -> - let use_op = use_op_of_lookup_action lookup_action in - add_output cx ~trace (FlowError.EIncompatibleProp { - prop = (match propref with Named (_, name) -> Some name | Computed _ -> None); - reason_prop = reason_of_propref propref; - reason_obj = reason_of_t l; - special = flow_error_kind_of_lower l; - use_op; - }) - - | _, UseT (use_op, u) -> - add_output cx ~trace (FlowError.EIncompatibleWithUseOp ( - reason_of_t l, reason_of_t u, use_op - )) - - | _ -> - add_output cx ~trace (FlowError.EIncompatible { - lower = (reason_of_t l, flow_error_kind_of_lower l); - upper = (reason_of_use_t u, flow_error_kind_of_upper u); - use_op = use_op_of_use_t u; - branches = []; - }) - ) - -and flow_error_kind_of_lower = function - | DefT (_, NullT) -> Some Flow_error.Possibly_null - | DefT (_, VoidT) -> Some Flow_error.Possibly_void - | DefT (_, MaybeT _) -> Some Flow_error.Possibly_null_or_void - | DefT (_, IntersectionT _) - | DefT (_, MixedT Empty_intersection) -> Some Flow_error.Incompatible_intersection - | _ -> None - -and flow_error_kind_of_upper = function - | GetPropT (_, _, Named (r, name), _) -> FlowError.IncompatibleGetPropT (aloc_of_reason r |> ALoc.to_loc, Some name) - | GetPropT (_, _, Computed t, _) -> FlowError.IncompatibleGetPropT (loc_of_t t, None) - | GetPrivatePropT (_, _, _, _, _, _) -> FlowError.IncompatibleGetPrivatePropT - | SetPropT (_, _, Named (r, name), _, _, _) -> FlowError.IncompatibleSetPropT (aloc_of_reason r |> ALoc.to_loc, Some name) - | SetPropT (_, _, Computed t, _, _, _) -> FlowError.IncompatibleSetPropT (loc_of_t t, None) - | MatchPropT (_, _, Named (r, name), _) -> FlowError.IncompatibleMatchPropT (aloc_of_reason r |> ALoc.to_loc, Some name) - | MatchPropT (_, _, Computed t, _) -> FlowError.IncompatibleMatchPropT (loc_of_t t, None) - | SetPrivatePropT (_, _, _, _, _, _, _) -> FlowError.IncompatibleSetPrivatePropT - | MethodT (_, _, _, Named (r, name), _, _) -> FlowError.IncompatibleMethodT (aloc_of_reason r |> ALoc.to_loc, Some name) - | MethodT (_, _, _, Computed t, _, _) -> FlowError.IncompatibleMethodT (loc_of_t t, None) - | CallT _ -> FlowError.IncompatibleCallT - | ConstructorT _ -> FlowError.IncompatibleConstructorT - | GetElemT (_, _, t, _) -> FlowError.IncompatibleGetElemT (loc_of_t t) - | SetElemT (_, _, t, _, _) -> FlowError.IncompatibleSetElemT (loc_of_t t) - | CallElemT (_, _, t, _) -> FlowError.IncompatibleCallElemT (loc_of_t t) - | ElemT (_, _, DefT (_, ArrT _), _) -> FlowError.IncompatibleElemTOfArrT - | ObjAssignFromT (_, _, _, ObjSpreadAssign) -> FlowError.IncompatibleObjAssignFromTSpread - | ObjAssignFromT _ -> FlowError.IncompatibleObjAssignFromT - | ObjRestT _ -> FlowError.IncompatibleObjRestT - | ObjSealT _ -> FlowError.IncompatibleObjSealT - | ArrRestT _ -> FlowError.IncompatibleArrRestT - | SuperT _ -> FlowError.IncompatibleSuperT - | MixinT _ -> FlowError.IncompatibleMixinT - | SpecializeT _ -> FlowError.IncompatibleSpecializeT - | ConcretizeTypeAppsT _ -> FlowError.IncompatibleSpecializeT - | ThisSpecializeT _ -> FlowError.IncompatibleThisSpecializeT - | VarianceCheckT _ -> FlowError.IncompatibleVarianceCheckT - | GetKeysT _ -> FlowError.IncompatibleGetKeysT - | HasOwnPropT (_, r, Literal (_, name)) -> FlowError.IncompatibleHasOwnPropT (aloc_of_reason r |> ALoc.to_loc, Some name) - | HasOwnPropT (_, r, _) -> FlowError.IncompatibleHasOwnPropT (aloc_of_reason r |> ALoc.to_loc, None) - | GetValuesT _ -> FlowError.IncompatibleGetValuesT - | UnaryMinusT _ -> FlowError.IncompatibleUnaryMinusT - | MapTypeT (_, (ObjectMap _ | ObjectMapi _), _) -> FlowError.IncompatibleMapTypeTObject - | TypeAppVarianceCheckT _ -> FlowError.IncompatibleTypeAppVarianceCheckT - | GetStaticsT _ -> FlowError.IncompatibleGetStaticsT - | use_t -> FlowError.IncompatibleUnclassified (string_of_use_ctor use_t) - -and use_op_of_lookup_action = function - | RWProp (use_op, _, _, _) -> Some use_op - | LookupProp (use_op, _) -> Some use_op - | SuperProp (use_op, _) -> Some use_op - | MatchProp (use_op, _) -> Some use_op - -(* some types need to be resolved before proceeding further *) -and needs_resolution = function - | OpenT _ | DefT (_, UnionT _) | DefT (_, OptionalT _) | DefT (_, MaybeT _) | AnnotT _ -> true - | _ -> false + rec_flow cx trace (next, ExtendsUseT (use_op, reason, try_ts_on_failure, l, u)) + | ( DefT (_, _, NullT), + ExtendsUseT + ( use_op, + _, + [], + l, + DefT + ( reason_inst, + _, + InstanceT + ( _, + super, + _, + { own_props; proto_props; inst_call_t; inst_kind = InterfaceKind _; _ } ) + ) ) ) -> + structural_subtype cx trace ~use_op l reason_inst (own_props, proto_props, inst_call_t); + rec_flow cx trace (l, UseT (use_op, super)) + (***********************) + (* Object library call *) + (***********************) + | (ObjProtoT reason, _) -> + let use_desc = true in + let obj_proto = get_builtin_type cx ~trace reason ~use_desc "Object" in + rec_flow cx trace (obj_proto, u) + | (_, UseT (use_op, ObjProtoT reason)) -> + let use_desc = true in + let obj_proto = get_builtin_type cx ~trace reason ~use_desc "Object" in + rec_flow cx trace (l, UseT (use_op, obj_proto)) + (*************************) + (* Function library call *) + (*************************) + | (FunProtoT reason, _) -> + let use_desc = true in + let fun_proto = get_builtin_type cx ~trace reason ~use_desc "Function" in + rec_flow cx trace (fun_proto, u) + | (_, UseT (use_op, FunProtoT reason)) -> + let use_desc = true in + let fun_proto = get_builtin_type cx ~trace reason ~use_desc "Function" in + rec_flow cx trace (l, UseT (use_op, fun_proto)) + | (_, ExtendsUseT (use_op, _, [], t, tc)) -> + let (reason_l, reason_u) = FlowError.ordered_reasons (reason_of_t t, reason_of_t tc) in + add_output cx ~trace (Error_message.EIncompatibleWithUseOp (reason_l, reason_u, use_op)) + (*******************************) + (* ToString abstract operation *) + (*******************************) + + (* ToStringT passes through strings unchanged, and flows a generic StrT otherwise *) + | (DefT (_, _, StrT _), ToStringT (_, t_out)) -> rec_flow cx trace (l, t_out) + | (_, ToStringT (reason_op, t_out)) -> + rec_flow cx trace (StrT.why reason_op |> with_trust bogus_trust, t_out) + (**********************) + (* Array library call *) + (**********************) + | ( DefT (reason, _, ArrT (ArrayAT (t, _))), + (GetPropT _ | SetPropT _ | MethodT _ | LookupT _) ) -> + rec_flow cx trace (get_builtin_typeapp cx ~trace reason "Array" [t], u) + | ( DefT (reason, _, ArrT ((TupleAT _ | ROArrayAT _) as arrtype)), + (GetPropT _ | SetPropT _ | MethodT _ | LookupT _) ) -> + let t = elemt_of_arrtype arrtype in + rec_flow cx trace (get_builtin_typeapp cx ~trace reason "$ReadOnlyArray" [t], u) + (***********************) + (* String library call *) + (***********************) + | (DefT (reason, _, StrT _), u) when primitive_promoting_use_t u -> + rec_flow cx trace (get_builtin_type cx ~trace reason "String", u) + (***********************) + (* Number library call *) + (***********************) + | (DefT (reason, _, NumT _), u) when primitive_promoting_use_t u -> + rec_flow cx trace (get_builtin_type cx ~trace reason "Number", u) + (***********************) + (* Boolean library call *) + (***********************) + | (DefT (reason, _, BoolT _), u) when primitive_promoting_use_t u -> + rec_flow cx trace (get_builtin_type cx ~trace reason "Boolean", u) + (*****************************************************) + (* Nice error messages for mixed function refinement *) + (*****************************************************) + | ( DefT (lreason, _, MixedT Mixed_function), + (MethodT _ | SetPropT _ | GetPropT _ | MatchPropT _ | LookupT _) ) -> + rec_flow cx trace (FunProtoT lreason, u) + | ( DefT (lreason, _, MixedT Mixed_function), + (CallT (use_op, ureason, _) | UseT (use_op, DefT (ureason, _, FunT _))) ) -> + add_output + cx + ~trace + (Error_message.EIncompatible + { + lower = (lreason, None); + upper = (ureason, Error_message.IncompatibleMixedCallT); + use_op = Some use_op; + branches = []; + }); + rec_flow cx trace (AnyT.make AnyError lreason, u) + (* Special cases of FunT *) + | (FunProtoApplyT reason, _) + | (FunProtoBindT reason, _) + | (FunProtoCallT reason, _) -> + rec_flow cx trace (FunProtoT reason, u) + | (_, LookupT (_, _, _, propref, lookup_action)) -> + let use_op = use_op_of_lookup_action lookup_action in + add_output + cx + ~trace + (Error_message.EIncompatibleProp + { + prop = + (match propref with + | Named (_, name) -> Some name + | Computed _ -> None); + reason_prop = reason_of_propref propref; + reason_obj = reason_of_t l; + special = error_message_kind_of_lower l; + use_op; + }) + | (_, UseT (use_op, u)) -> + add_output + cx + ~trace + (Error_message.EIncompatibleWithUseOp (reason_of_t l, reason_of_t u, use_op)) + | _ -> + add_output + cx + ~trace + (Error_message.EIncompatible + { + lower = (reason_of_t l, error_message_kind_of_lower l); + upper = (reason_of_use_t u, error_message_kind_of_upper u); + use_op = use_op_of_use_t u; + branches = []; + }) + ) -(** + (** * Addition * * According to the spec, given l + r: @@ -6618,1225 +6752,1505 @@ and needs_resolution = function * TODO: handle symbols (which raise a TypeError, so should be banned) * **) -and flow_addition cx trace use_op reason flip l r u = - if needs_resolution r then rec_flow cx trace (r, AdderT (use_op, reason, not flip, l, u)) else - let (l, r) = if flip then (r, l) else (l, r) in - let loc = aloc_of_reason reason in - begin match l, r with - | DefT (_, StrT _), DefT (_, StrT _) - | DefT (_, StrT _), DefT (_, NumT _) - | DefT (_, NumT _), DefT (_, StrT _) -> - rec_flow_t cx trace (StrT.at (loc |> ALoc.to_loc), u) - - (* unreachable additions are unreachable *) - | DefT (_, EmptyT), _ - | _, DefT (_, EmptyT) -> - rec_flow_t cx trace (EmptyT.at (loc |> ALoc.to_loc), u) - - | DefT (reason, MixedT _), _ - | _, DefT (reason, MixedT _) -> - add_output cx ~trace (FlowError.EAdditionMixed (reason, use_op)) - - | DefT (_, (NumT _ | BoolT _ | NullT | VoidT)), - DefT (_, (NumT _ | BoolT _ | NullT | VoidT)) -> - rec_flow_t cx trace (NumT.at (loc |> ALoc.to_loc), u) - - | DefT (_, StrT _), _ -> - rec_flow cx trace (r, UseT (use_op, l)); - rec_flow_t cx trace (StrT.at (loc |> ALoc.to_loc), u); - - | _, DefT (_, StrT _) -> - rec_flow cx trace (l, UseT (use_op, r)); - rec_flow_t cx trace (StrT.at (loc |> ALoc.to_loc), u); - - | DefT (_, AnyT), _ - | _, DefT (_, AnyT) -> - rec_flow_t cx trace (AnyT.at (loc |> ALoc.to_loc), u) - - | DefT (_, NumT _), _ -> - rec_flow cx trace (r, UseT (use_op, l)); - rec_flow_t cx trace (NumT.at (loc |> ALoc.to_loc), u); - - | _, DefT (_, NumT _) -> - rec_flow cx trace (l, UseT (use_op, r)); - rec_flow_t cx trace (NumT.at (loc |> ALoc.to_loc), u); - - | (_, _) -> - let fake_str = StrT.why reason in - rec_flow cx trace (l, UseT (use_op, fake_str)); - rec_flow cx trace (r, UseT (use_op, fake_str)); - rec_flow cx trace (fake_str, UseT (use_op, u)); - end; - -(** + and flow_addition cx trace use_op reason flip l r u = + if needs_resolution r then + rec_flow cx trace (r, AdderT (use_op, reason, not flip, l, u)) + else + let (l, r) = + if flip then + (r, l) + else + (l, r) + in + let loc = aloc_of_reason reason in + match (l, r) with + | (DefT (_, _, StrT _), DefT (_, _, StrT _)) + | (DefT (_, _, StrT _), DefT (_, _, NumT _)) + | (DefT (_, _, NumT _), DefT (_, _, StrT _)) -> + rec_flow_t cx trace (StrT.at loc |> with_trust bogus_trust, u) + (* unreachable additions are unreachable *) + | (DefT (_, _, EmptyT Bottom), _) + | (_, DefT (_, _, EmptyT Bottom)) -> + rec_flow_t cx trace (EmptyT.at loc |> with_trust bogus_trust, u) + | (DefT (reason, _, MixedT _), _) + | (_, DefT (reason, _, MixedT _)) -> + add_output cx ~trace (Error_message.EAdditionMixed (reason, use_op)) + | (DefT (_, _, EmptyT Zeroed), t) + | (t, DefT (_, _, EmptyT Zeroed)) -> + rec_flow_t cx trace (t, u) + | ( DefT (_, _, (NumT _ | BoolT _ | NullT | VoidT)), + DefT (_, _, (NumT _ | BoolT _ | NullT | VoidT)) ) -> + rec_flow_t cx trace (NumT.at loc |> with_trust bogus_trust, u) + | (DefT (_, _, StrT _), _) -> + rec_flow cx trace (r, UseT (use_op, l)); + rec_flow_t cx trace (StrT.at loc |> with_trust bogus_trust, u) + | (_, DefT (_, _, StrT _)) -> + rec_flow cx trace (l, UseT (use_op, r)); + rec_flow_t cx trace (StrT.at loc |> with_trust bogus_trust, u) + | (AnyT (_, src), _) + | (_, AnyT (_, src)) -> + rec_flow_t cx trace (AnyT.at src loc, u) + | (DefT (_, _, NumT _), _) -> + rec_flow cx trace (r, UseT (use_op, l)); + rec_flow_t cx trace (NumT.at loc |> with_trust bogus_trust, u) + | (_, DefT (_, _, NumT _)) -> + rec_flow cx trace (l, UseT (use_op, r)); + rec_flow_t cx trace (NumT.at loc |> with_trust bogus_trust, u) + | (_, _) -> + let fake_str = StrT.why reason |> with_trust bogus_trust in + rec_flow cx trace (l, UseT (use_op, fake_str)); + rec_flow cx trace (r, UseT (use_op, fake_str)); + rec_flow cx trace (fake_str, UseT (use_op, u)) + + (** * relational comparisons like <, >, <=, >= * * typecheck iff either of the following hold: * number <> number = number * string <> string = string **) -and flow_comparator cx trace reason flip l r = - if needs_resolution r then rec_flow cx trace (r, ComparatorT (reason, not flip, l)) else - let (l, r) = if flip then (r, l) else (l, r) in - match l, r with - | DefT (_, StrT _), DefT (_, StrT _) -> () - | (_, _) when numeric l && numeric r -> () - | (_, _) -> - let reasons = FlowError.ordered_reasons (reason_of_t l, reason_of_t r) in - add_output cx ~trace (FlowError.EComparison reasons) + and flow_comparator cx trace reason flip l r = + if needs_resolution r then + rec_flow cx trace (r, ComparatorT (reason, not flip, l)) + else + let (l, r) = + if flip then + (r, l) + else + (l, r) + in + match (l, r) with + | (DefT (_, _, StrT _), DefT (_, _, StrT _)) -> () + | (_, _) when numberesque l && numberesque r -> () + | (DefT (_, _, EmptyT _), _) + | (_, DefT (_, _, EmptyT _)) -> + () + | _ -> + let reasons = FlowError.ordered_reasons (reason_of_t l, reason_of_t r) in + add_output cx ~trace (Error_message.EComparison reasons) -(** + (** * == equality * * typecheck iff they intersect (otherwise, unsafe coercions may happen). * * note: any types may be compared with === (in)equality. **) -and flow_eq cx trace reason flip l r = - if needs_resolution r then rec_flow cx trace (r, EqT(reason, not flip, l)) else - let (l, r) = if flip then (r, l) else (l, r) in - if equatable (l, r) then () - else - let reasons = FlowError.ordered_reasons (reason_of_t l, reason_of_t r) in - add_output cx ~trace (FlowError.EComparison reasons) - - -and flow_obj_to_obj cx trace ~use_op (lreason, l_obj) (ureason, u_obj) = - let { - flags = lflags; - dict_t = ldict; - call_t = lcall; - props_tmap = lflds; - proto_t = lproto; - } = l_obj in - let { - flags = rflags; - dict_t = udict; - call_t = ucall; - props_tmap = uflds; - proto_t = uproto; - } = u_obj in - - (* if inflowing type is literal (thus guaranteed to be - unaliased), propertywise subtyping is sound *) - let lit = is_literal_object_reason lreason || lflags.frozen in + and flow_eq cx trace reason flip l r = + if needs_resolution r then + rec_flow cx trace (r, EqT (reason, not flip, l)) + else + let (l, r) = + if flip then + (r, l) + else + (l, r) + in + if equatable (l, r) then + () + else + let reasons = FlowError.ordered_reasons (reason_of_t l, reason_of_t r) in + add_output cx ~trace (Error_message.EComparison reasons) - (* If both are dictionaries, ensure the keys and values are compatible + and flow_obj_to_obj cx trace ~use_op (lreason, l_obj) (ureason, u_obj) = + let { flags = lflags; dict_t = ldict; call_t = lcall; props_tmap = lflds; proto_t = lproto } = + l_obj + in + let { flags = rflags; dict_t = udict; call_t = ucall; props_tmap = uflds; proto_t = uproto } = + u_obj + in + (* if inflowing type is literal (thus guaranteed to be + unaliased), propertywise subtyping is sound *) + let lit = is_literal_object_reason lreason || lflags.frozen in + (* If both are dictionaries, ensure the keys and values are compatible with each other. *) - (match ldict, udict with - | Some {key = lk; value = lv; dict_polarity = lpolarity; _}, - Some {key = uk; value = uv; dict_polarity = upolarity; _} -> + (match (ldict, udict) with + | ( Some { key = lk; value = lv; dict_polarity = lpolarity; _ }, + Some { key = uk; value = uv; dict_polarity = upolarity; _ } ) -> (* Don't report polarity errors when checking the indexer key. We would * report these errors again a second time when checking values. *) - rec_flow_p cx trace ~report_polarity:false ~use_op:(Frame (IndexerKeyCompatibility { - lower = lreason; - upper = ureason; - }, use_op)) lreason ureason (Computed uk) + rec_flow_p + cx + trace + ~report_polarity:false + ~use_op:(Frame (IndexerKeyCompatibility { lower = lreason; upper = ureason }, use_op)) + lreason + ureason + (Computed uk) (Field (None, lk, lpolarity), Field (None, uk, upolarity)); - rec_flow_p cx trace ~use_op:(Frame (PropertyCompatibility { - prop = None; - lower = lreason; - upper = ureason; - is_sentinel = false; - }, use_op)) lreason ureason (Computed uv) + rec_flow_p + cx + trace + ~use_op: + (Frame (PropertyCompatibility { prop = None; lower = lreason; upper = ureason }, use_op)) + lreason + ureason + (Computed uv) (Field (None, lv, lpolarity), Field (None, uv, upolarity)) | _ -> ()); - if rflags.exact && rflags.sealed = Sealed && not (is_literal_object_reason ureason) - then ( - iter_real_props cx lflds (fun ~is_sentinel s _ -> - if not (Context.has_prop cx uflds s) - then ( - let use_op = Frame (PropertyCompatibility { - prop = Some s; - (* Lower and upper are reversed in this case since the lower object - * is the one requiring the prop. *) - lower = ureason; - upper = lreason; - is_sentinel; - }, use_op) in - let reason_prop = replace_reason_const (RProperty (Some s)) lreason in - let err = FlowError.EPropNotFound (Some s, (reason_prop, ureason), use_op) in - add_output cx ~trace err - ) + if rflags.exact && rflags.sealed = Sealed && not (is_literal_object_reason ureason) then ( + Context.iter_real_props cx lflds (fun s _ -> + if not (Context.has_prop cx uflds s) then + let use_op = + Frame + ( PropertyCompatibility + { + prop = Some s; + (* Lower and upper are reversed in this case since the lower object + * is the one requiring the prop. *) + lower = ureason; + upper = lreason; + }, + use_op ) + in + let reason_prop = replace_desc_reason (RProperty (Some s)) lreason in + let err = Error_message.EPropNotFound (Some s, (reason_prop, ureason), use_op) in + add_output cx ~trace err); + Option.iter lcall ~f:(fun _ -> + if Option.is_none ucall then + let prop = Some "$call" in + let use_op = + Frame + ( PropertyCompatibility + { + prop; + (* Lower and upper are reversed in this case since the lower object + * is the one requiring the prop. *) + lower = ureason; + upper = lreason; + }, + use_op ) + in + let reason_prop = replace_desc_reason (RProperty prop) lreason in + let err = Error_message.EPropNotFound (prop, (reason_prop, ureason), use_op) in + add_output cx ~trace err) ); - Option.iter lcall ~f:(fun _ -> - if Option.is_none ucall - then ( - let prop = Some "$call" in - let use_op = Frame (PropertyCompatibility { - prop; - (* Lower and upper are reversed in this case since the lower object - * is the one requiring the prop. *) - lower = ureason; - upper = lreason; - is_sentinel = false; - }, use_op) in - let reason_prop = replace_reason_const (RProperty prop) lreason in - let err = FlowError.EPropNotFound (prop, (reason_prop, ureason), use_op) in - add_output cx ~trace err - ) - ) - ); - - (match ucall with - | Some ucall -> - let prop_name = Some "$call" in - let use_op = Frame (PropertyCompatibility { - prop = prop_name; - lower = lreason; - upper = ureason; - is_sentinel = false; - }, use_op) in - (match lcall with - | Some lcall -> - rec_flow cx trace (Context.find_call cx lcall, - UseT (use_op, Context.find_call cx ucall)) - | None -> - let reason_prop = replace_reason_const (RProperty prop_name) ureason in - add_output cx ~trace (FlowError.EStrictLookupFailed - ((reason_prop, lreason), lreason, prop_name, Some use_op))) - | None -> ()); - - (* Properties in u must either exist in l, or match l's indexer. *) - iter_real_props cx uflds (fun ~is_sentinel s up -> - let reason_prop = replace_reason_const (RProperty (Some s)) ureason in - let propref = Named (reason_prop, s) in - let use_op' = use_op in - let use_op = Frame (PropertyCompatibility { - prop = Some s; - lower = lreason; - upper = ureason; - is_sentinel; - }, use_op') in - match Context.get_prop cx lflds s, ldict with - | Some lp, _ -> - if lit then ( - (* prop from unaliased LB: check <:, then make exact *) - (match Property.read_t lp, Property.read_t up with - | Some lt, Some ut -> rec_flow cx trace (lt, UseT (use_op, ut)) - | _ -> ()); - (* Band-aid to avoid side effect in speculation mode. Even in + + (match ucall with + | Some ucall -> + let prop_name = Some "$call" in + let use_op = + Frame (PropertyCompatibility { prop = prop_name; lower = lreason; upper = ureason }, use_op) + in + (match lcall with + | Some lcall -> + rec_flow cx trace (Context.find_call cx lcall, UseT (use_op, Context.find_call cx ucall)) + | None -> + let reason_prop = replace_desc_reason (RProperty prop_name) ureason in + add_output + cx + ~trace + (Error_message.EStrictLookupFailed + ((reason_prop, lreason), lreason, prop_name, Some use_op))) + | None -> ()); + + (* Properties in u must either exist in l, or match l's indexer. *) + Context.iter_real_props cx uflds (fun s up -> + let reason_prop = replace_desc_reason (RProperty (Some s)) ureason in + let propref = Named (reason_prop, s) in + let use_op' = use_op in + let use_op = + Frame (PropertyCompatibility { prop = Some s; lower = lreason; upper = ureason }, use_op') + in + match (Context.get_prop cx lflds s, ldict) with + | (Some lp, _) -> + if lit then ( + (* prop from unaliased LB: check <:, then make exact *) + (match (Property.read_t lp, Property.read_t up) with + | (Some lt, Some ut) -> rec_flow cx trace (lt, UseT (use_op, ut)) + | _ -> ()); + + (* Band-aid to avoid side effect in speculation mode. Even in non-speculation mode, the side effect here is racy, so it either needs to be taken out or replaced with something more robust. Tracked by #11299251. *) - if not (Speculation.speculating ()) then - Context.set_prop cx lflds s up - ) else ( - (* prop from aliased LB *) - rec_flow_p cx trace ~use_op lreason ureason propref (lp, up) - ) - | None, Some { key; value; dict_polarity; _ } - when not (is_dictionary_exempt s) -> - rec_flow cx trace (string_key s reason_prop, UseT ( - Frame (IndexerKeyCompatibility {lower = lreason; upper = ureason}, use_op'), - key - )); - let lp = Field (None, value, dict_polarity) in - let up = match up with - | Field (loc, DefT (_, OptionalT ut), upolarity) -> - Field (loc, ut, upolarity) - | _ -> up - in - if lit - then - match Property.read_t lp, Property.read_t up with - | Some lt, Some ut -> rec_flow cx trace (lt, UseT (use_op, ut)) - | _ -> () - else - rec_flow_p cx trace ~use_op lreason ureason propref (lp, up) - | _ -> - (* property doesn't exist in inflowing type *) - match up with - | Field (_, DefT (_, OptionalT _), _) when lit -> - (* if property is marked optional or otherwise has a maybe type, + if not (Speculation.speculating ()) then Context.set_prop cx lflds s up + ) else + (* prop from aliased LB *) + rec_flow_p cx trace ~use_op lreason ureason propref (lp, up) + | (None, Some { key; value; dict_polarity; _ }) when not (is_dictionary_exempt s) -> + rec_flow + cx + trace + ( string_key s reason_prop, + UseT + (Frame (IndexerKeyCompatibility { lower = lreason; upper = ureason }, use_op'), key) + ); + let lp = Field (None, value, dict_polarity) in + let up = + match up with + | Field (loc, OptionalT (_, ut), upolarity) -> Field (loc, ut, upolarity) + | _ -> up + in + if lit then + match (Property.read_t lp, Property.read_t up) with + | (Some lt, Some ut) -> rec_flow cx trace (lt, UseT (use_op, ut)) + | _ -> () + else + rec_flow_p cx trace ~use_op lreason ureason propref (lp, up) + | _ -> + (* property doesn't exist in inflowing type *) + (match up with + | Field (_, OptionalT _, _) when lit -> + (* if property is marked optional or otherwise has a maybe type, and if inflowing type is a literal (i.e., it is not an annotation), then we add it to the inflowing type as an optional property *) - (* Band-aid to avoid side effect in speculation mode. Even in + (* Band-aid to avoid side effect in speculation mode. Even in non-speculation mode, the side effect here is racy, so it either needs to be taken out or replaced with something more robust. Tracked by #11299251. *) - if not (Speculation.speculating ()) then - Context.set_prop cx lflds s up; - | Field (_, DefT (_, OptionalT _), Positive) - when lflags.exact && Obj_type.sealed_in_op ureason lflags.sealed -> - rec_flow cx trace (lproto, - LookupT (ureason, NonstrictReturning (None, None), [], propref, - LookupProp (use_op, up))) - | _ -> - (* otherwise, look up the property in the prototype *) - let strict = match Obj_type.sealed_in_op ureason lflags.sealed, ldict with - | false, None -> ShadowRead (Some lreason, Nel.one lflds) - | true, None -> Strict lreason - | _ -> NonstrictReturning (None, None) - in - rec_flow cx trace (lproto, - LookupT (ureason, strict, [], propref, - LookupProp (use_op, up))) - (* TODO: instead, consider extending inflowing type with s:t2 when it - is not sealed *) - ); - - (* Any properties in l but not u must match indexer *) - (match udict with - | None -> () - | Some { key; value; dict_polarity; _ } -> - iter_real_props cx lflds (fun ~is_sentinel s lp -> - if not (Context.has_prop cx uflds s) - then ( - rec_flow cx trace (string_key s lreason, UseT ( - Frame (IndexerKeyCompatibility {lower = lreason; upper = ureason}, use_op), - key - )); - let use_op = Frame (PropertyCompatibility { - prop = Some s; - lower = lreason; - upper = ureason; - is_sentinel; - }, use_op) in - let lp = match lp with - | Field (loc, DefT (_, OptionalT lt), lpolarity) -> - Field (loc, lt, lpolarity) - | _ -> lp - in - let up = Field (None, value, dict_polarity) in - if lit - then - match Property.read_t lp, Property.read_t up with - | Some lt, Some ut -> rec_flow cx trace (lt, UseT (use_op, ut)) - | _ -> () - else - let reason_prop = replace_reason_const (RProperty (Some s)) lreason in - let propref = Named (reason_prop, s) in - rec_flow_p cx trace ~use_op lreason ureason propref (lp, up))); + if not (Speculation.speculating ()) then Context.set_prop cx lflds s up + | Field (_, OptionalT _, Polarity.Positive) + when lflags.exact && Obj_type.sealed_in_op ureason lflags.sealed -> + rec_flow + cx + trace + ( lproto, + LookupT + (ureason, NonstrictReturning (None, None), [], propref, LookupProp (use_op, up)) + ) + | _ -> + (* When an object type is unsealed, typing it as another object type should add properties + of that object type to it as needed. We do this when not speculating, because adding + properties changes state, and the state change is necessary to enforce + consistency. + + TODO: adding properties to unsealed objects directly is done whether speculating or not, + and that should also be done when not speculating; during speculating, it should be a + deferred action. *) + if + (not (Obj_type.sealed_in_op ureason lflags.sealed)) + && not (Speculation.speculating ()) + then + Context.set_prop cx lflds s up + else + (* otherwise, look up the property in the prototype *) + let strict = + match (Obj_type.sealed_in_op ureason lflags.sealed, ldict) with + | (false, None) -> ShadowRead (Some lreason, Nel.one lflds) + | (true, None) -> Strict lreason + | _ -> NonstrictReturning (None, None) + in + rec_flow + cx + trace + (lproto, LookupT (ureason, strict, [], propref, LookupProp (use_op, up))))); + + (* Any properties in l but not u must match indexer *) + (match udict with + | None -> () + | Some { key; value; dict_polarity; _ } -> + Context.iter_real_props cx lflds (fun s lp -> + if not (Context.has_prop cx uflds s) then ( + rec_flow + cx + trace + ( string_key s lreason, + UseT + ( Frame (IndexerKeyCompatibility { lower = lreason; upper = ureason }, use_op), + key ) ); + let use_op = + Frame + (PropertyCompatibility { prop = Some s; lower = lreason; upper = ureason }, use_op) + in + let lp = + match lp with + | Field (loc, OptionalT (_, lt), lpolarity) -> Field (loc, lt, lpolarity) + | _ -> lp + in + let up = Field (None, value, dict_polarity) in + if lit then + match (Property.read_t lp, Property.read_t up) with + | (Some lt, Some ut) -> rec_flow cx trace (lt, UseT (use_op, ut)) + | _ -> () + else + let reason_prop = replace_desc_reason (RProperty (Some s)) lreason in + let propref = Named (reason_prop, s) in + rec_flow_p cx trace ~use_op lreason ureason propref (lp, up) + )); (* Previously, call properties were stored in the props map, and were - checked against dictionary upper bounds. This is wrong, but useful for - distinguishing between thunk-like types found in graphql-js. - - Now that call properties are stored separately, it is particularly - egregious to emit this constraint. This only serves to maintain buggy - behavior, which should be fixed, and this code removed. *) - (match lcall, ucall with - | Some lcall, None -> + checked against dictionary upper bounds. This is wrong, but useful for + distinguishing between thunk-like types found in graphql-js. + + Now that call properties are stored separately, it is particularly + egregious to emit this constraint. This only serves to maintain buggy + behavior, which should be fixed, and this code removed. *) + (match (lcall, ucall) with + | (Some lcall, None) -> let s = "$call" in - let use_op = Frame (PropertyCompatibility { - prop = Some s; - lower = lreason; - upper = ureason; - is_sentinel = false; - }, use_op) in - let lp = match Context.find_call cx lcall with - | DefT (_, OptionalT t) -> Field (None, t, Positive) - | t -> Field (None, t, Positive) + let use_op = + Frame (PropertyCompatibility { prop = Some s; lower = lreason; upper = ureason }, use_op) + in + let lp = + match Context.find_call cx lcall with + | OptionalT (_, t) -> Field (None, t, Polarity.Positive) + | t -> Field (None, t, Polarity.Positive) in let up = Field (None, value, dict_polarity) in - if lit - then - match Property.read_t lp, Property.read_t up with - | Some lt, Some ut -> rec_flow cx trace (lt, UseT (use_op, ut)) + if lit then + match (Property.read_t lp, Property.read_t up) with + | (Some lt, Some ut) -> rec_flow cx trace (lt, UseT (use_op, ut)) | _ -> () else - let reason_prop = replace_reason_const (RProperty (Some s)) lreason in + let reason_prop = replace_desc_reason (RProperty (Some s)) lreason in let propref = Named (reason_prop, s) in rec_flow_p cx trace ~use_op lreason ureason propref (lp, up) - | _ -> ()); - ); - - rec_flow cx trace (uproto, - ReposUseT (ureason, false, use_op, DefT (lreason, ObjT l_obj))) - -and is_object_prototype_method = function - | "isPrototypeOf" - | "hasOwnProperty" - | "propertyIsEnumerable" - | "toLocaleString" - | "toString" - | "valueOf" -> true - | _ -> false - -(* This must list all of the properties on Function.prototype. AnyFunT is a - function that lets you get/set any property you want on it in an untracked - way (like AnyObjT, but callable), except for these properties. - - Ideally we'd be able to look these up from the Function lib declaration, but - we don't have a good way to do that while still allowing AnyFunT to act like - a dictionary. *) -and is_function_prototype = function - | "apply" - | "bind" - | "call" - | "arguments" - | "caller" - | "length" - | "name" -> true - | x -> is_object_prototype_method x - -(* neither object prototype methods nor callable signatures should be - * implied by an object indexer type *) -and is_dictionary_exempt = function - | x when is_object_prototype_method x -> true - | _ -> false - -(* common case checking a function as an object *) -and quick_error_fun_as_obj cx trace ~use_op reason statics reason_o props = - let statics_own_props = match statics with - | DefT (_, ObjT { props_tmap; _ }) -> Some (Context.find_props cx props_tmap) - | DefT (_, AnyFunT) - | DefT (_, MixedT _) -> Some SMap.empty - | _ -> None - in - match statics_own_props with - | Some statics_own_props -> - let props_not_found = SMap.filter (fun x p -> - let optional = match p with - | Field (_, DefT (_, OptionalT _), _) -> true - |_ -> false + | _ -> ())); + + rec_flow + cx + trace + (uproto, ReposUseT (ureason, false, use_op, DefT (lreason, bogus_trust (), ObjT l_obj))) + + (* Returns true when __flow should succeed immediately if EmptyT of a given + flavor flows into u. *) + and empty_success flavor u = + match (flavor, u) with + (* Work has to happen when Empty flows to these types whether the EmptyT + originates from generic testing or elsewhere. This logic was previously + captured in ground_subtype. *) + | (_, UseT (_, OpenT _)) + | (_, UseT (_, TypeDestructorTriggerT _)) + | (_, ChoiceKitUseT _) + | (_, CondT _) + | (_, DestructuringT _) + | (_, MakeExactT _) + | (_, ObjKitT _) + | (_, ReposLowerT _) + | (_, ReposUseT _) + | (_, UnifyT _) -> + false + | (Bottom, _) -> true + (* After this line, flavor is always Zeroed. *) + (* Special cases: these cases actually utilize the fact that the LHS is Empty, + either by specially propagating it or selecting cases, etc. *) + | (_, UseT (_, ExactT _)) + | (_, AdderT _) + | (_, AndT _) + | (_, OrT _) + (* Propagation cases: these cases don't use the fact that the LHS is + empty, but they propagate the LHS to other types and trigger additional + flows that may need to occur. *) + + | (_, UseT (_, DefT (_, _, PolyT _))) + | (_, UseT (_, TypeAppT _)) + | (_, UseT (_, MaybeT _)) + | (_, UseT (_, MergedT _)) + | (_, UseT (_, OpaqueT _)) + | (_, UseT (_, OptionalT _)) + | (_, UseT (_, ReposT _)) + | (_, UseT (_, ThisClassT _)) + | (_, UseT (_, ThisTypeAppT _)) + | (_, UseT (_, UnionT _)) + | (_, AssertExportIsTypeT _) + | (_, AssertImportIsValueT _) + | (_, BecomeT _) + | (_, BindT _) + | (_, CallLatentPredT _) + | (_, CallOpenPredT _) + | (_, CJSExtractNamedExportsT _) + | (_, ComparatorT _) + | (_, DebugPrintT _) + | (_, EqT _) + | (_, ExportTypeT _) + | (_, IdxUnwrap _) + | (_, ImportTypeT _) + | (_, ImportTypeofT _) + | (_, IntersectionPreprocessKitT _) + | (_, InvariantT _) + | (_, MapTypeT (_, _, TupleMap _, _)) + | (_, NotT _) + | (_, NullishCoalesceT _) + | (_, ObjAssignToT _) + | (_, ObjTestT _) + | (_, ObjTestProtoT _) + | (_, OptionalChainT _) + | (_, SentinelPropTestT _) + | (_, TestPropT _) -> + false + (* Error prevention: we should succeed because otherwise we'll hit + a case with a wildcard on the LHS that raises an error, which in + this situation would be spurious *) + | (_, UseT (_, AnnotT _)) + | (_, UseT (_, EvalT _)) + | (_, UseT (_, DefT (_, _, TypeT _))) + | (_, UseT (_, ShapeT _)) + | (_, AssertArithmeticOperandT _) + | (_, AssertBinaryInLHST _) + | (_, AssertBinaryInRHST _) + | (_, AssertForInRHST _) + | (_, LookupT _) + | (_, ImplementsT _) + | (_, SetProtoT _) + (* No more work: we can succeed without flowing EmptyT any further + because the relevant cases don't propagate the LHS to any other + types; either the flow would succeed anyways or it would fall + through to the final catch-all error case and cause a spurious + error. *) + + | (_, UseT _) + | (_, ArrRestT _) + | (_, CallElemT _) + | (_, CallT _) + | (_, CJSRequireT _) + | (_, ConcretizeTypeAppsT _) + | (_, ConstructorT _) + | (_, CopyNamedExportsT _) + | (_, CopyTypeExportsT _) + | (_, DebugSleepT _) + | (_, ElemT _) + | (_, ExportNamedT _) + | (_, ExtendsUseT _) + | (_, GetElemT _) + | (_, GetKeysT _) + | (_, GetPrivatePropT _) + | (_, GetPropT _) + | (_, GetProtoT _) + | (_, GetStaticsT _) + | (_, GetValuesT _) + | (_, GuardT _) + | (_, HasOwnPropT _) + | (_, IdxUnMaybeifyT _) + | (_, ImportDefaultT _) + | (_, ImportModuleNsT _) + | (_, ImportNamedT _) + | (_, MatchPropT _) + | (_, MapTypeT _) (* Note the TupleMap case above *) + | (_, MethodT _) + | (_, MixinT _) + | (_, ObjAssignFromT _) + | (_, ObjFreezeT _) + | (_, ObjRestT _) + | (_, ObjSealT _) + | (_, PredicateT _) + | (_, ReactInToProps _) + | (_, ReactKitT _) + | (_, ReactPropsToOut _) + | (_, RefineT _) + | (_, ResolveSpreadT _) + | (_, SetElemT _) + | (_, SetPrivatePropT _) + | (_, SetPropT _) + | (_, SpecializeT _) + | (_, SubstOnPredT _) + | (_, SuperT _) + | (_, ThisSpecializeT _) + | (_, ToStringT _) + | (_, TypeAppVarianceCheckT _) + | (_, UnaryMinusT _) + | (_, VarianceCheckT _) + | (_, ModuleExportsAssignT _) -> + true + + (* "Expands" any to match the form of a type. Allows us to reuse our propagation rules for any + cases. Note that it is not always safe to do this (ie in the case of unions). + Note: we can get away with a shallow (i.e. non-recursive) expansion here because the flow between + the any-expanded type and the original will handle the any-propagation to any relevant positions, + some of which may invoke this function when they hit the any propagation functions in the + recusive call to __flow. *) + and expand_any _cx any t = + let only_any _ = any in + match t with + | DefT (r, trust, ArrT (ArrayAT _)) -> DefT (r, trust, ArrT (ArrayAT (any, None))) + | DefT (r, trust, ArrT (TupleAT (_, ts))) -> + DefT (r, trust, ArrT (TupleAT (any, Core_list.map ~f:only_any ts))) + | OpaqueT (r, ({ underlying_t; super_t; opaque_type_args; _ } as opaquetype)) -> + let opaquetype = + { + opaquetype with + underlying_t = Option.(underlying_t >>| only_any); + super_t = Option.(super_t >>| only_any); + opaque_type_args = + Core_list.( + opaque_type_args >>| (fun (str, r', _, polarity) -> (str, r', any, polarity))); + } in - not ( - optional || - is_function_prototype x || - SMap.mem x statics_own_props - ) - ) props in - SMap.iter (fun x _ -> - let use_op = Frame (PropertyCompatibility { - prop = Some x; - lower = reason; - upper = reason_o; - is_sentinel = false; - }, use_op) in - let reason_prop = - replace_reason (fun desc -> RPropertyOf (x, desc)) reason_o in - let err = FlowError.EPropNotFound (Some x, (reason_prop, reason), use_op) in - add_output cx ~trace err - ) props_not_found; - not (SMap.is_empty props_not_found) - | None -> false - -(* NOTE: The following function looks similar to TypeUtil.quick_subtype, but is in fact more - complicated: it avoids deep structural checks, admits `any`, etc. It might be worth it to - simplify this function later. *) -and ground_subtype = function - | TypeDestructorTriggerT _, UseT (_, TypeDestructorTriggerT _) -> false - (* tvars are not considered ground, so they're not part of this relation *) - | (OpenT _, _) | (_, UseT (_, OpenT _)) -> false + OpaqueT (r, opaquetype) + | _ -> + (* Just returning any would result in infinite recursion in most cases *) + failwith "no any expansion defined for this case" - (* Allow any lower bound to be repositioned *) - | (_, ReposLowerT _) -> false - | (_, ReposUseT _) -> false + and any_prop_to_function + use_op + { + this_t; + params; + rest_param; + return_t; + closure_t = _; + is_predicate = _; + changeset = _; + def_reason = _; + } + covariant + contravariant = + List.iter (snd %> contravariant ~use_op) params; + Option.iter ~f:(fun (_, _, t) -> contravariant ~use_op t) rest_param; + contravariant ~use_op this_t; + covariant ~use_op return_t + + (* types trapped for any propagation. Returns true if this function handles the any case, either + by propagating or by doing the trivial case. False if the usetype needs to be handled + separately. *) + and any_propagated cx trace any u = + let covariant_flow ~use_op t = rec_flow_t cx trace ~use_op (any, t) in + let contravariant_flow ~use_op t = rec_flow_t cx trace ~use_op (t, any) in + match u with + | NotT (reason, t) -> + rec_flow_t cx trace (AnyT.why (AnyT.source any) reason, t); + true + | SubstOnPredT (_, _, OpenPredT (_, t, _, _)) -> + covariant_flow ~use_op:unknown_use t; + true + | UseT (use_op, DefT (_, _, ArrT (ROArrayAT t))) (* read-only arrays are covariant *) + | UseT (use_op, DefT (_, _, ClassT t)) (* mk_instance ~for_type:false *) + | UseT (use_op, ExactT (_, t)) + | UseT (use_op, OpenPredT (_, t, _, _)) + | UseT (use_op, ShapeT t) -> + covariant_flow ~use_op t; + true + | UseT (use_op, DefT (_, _, ReactAbstractComponentT { config; instance })) -> + contravariant_flow ~use_op config; + covariant_flow ~use_op instance; + true + (* Some types just need to be expanded and filled with any types *) + | UseT (use_op, (DefT (_, _, ArrT (ArrayAT _)) as t)) + | UseT (use_op, (DefT (_, _, ArrT (TupleAT _)) as t)) + | UseT (use_op, (OpaqueT _ as t)) -> + rec_flow_t cx trace ~use_op (expand_any cx any t, t); + true + | UseT (use_op, DefT (_, _, FunT (_, _, funtype))) -> + (* function type *) + any_prop_to_function use_op funtype covariant_flow contravariant_flow; + true + | UseT (_, DefT (reason, _, TypeT (_, t))) -> + (* import type *) + (* any can function as class, hence ok for annotations *) + rec_flow cx trace (any, BecomeT (reason, t)); + true + | ReactKitT (_, _, React.CreateClass (React.CreateClass.PropTypes _, _, _)) + | ReactKitT (_, _, React.SimplifyPropType _) -> + (* Propagating through here causes exponential blowup. React PropTypes are deprecated + anyways, so it is not unreasonable to just not trust them *) + true + | AdderT _ + | AndT _ + | ArrRestT _ + | BecomeT _ + | BindT _ + | CallT _ + | CallElemT _ + | CallLatentPredT _ + | CallOpenPredT _ + | ChoiceKitUseT _ + | CJSExtractNamedExportsT _ + | CJSRequireT _ + | CondT _ + | ConstructorT _ + | CopyNamedExportsT _ + | CopyTypeExportsT _ + | DestructuringT _ + | ElemT _ + | ExportNamedT _ + | ExportTypeT _ + | AssertExportIsTypeT _ + | GetElemT _ + | GetKeysT _ + | GetPrivatePropT _ + | GetPropT _ + | GetProtoT _ + | GetStaticsT _ + | GetValuesT _ + | GuardT _ + | IdxUnMaybeifyT _ + | IdxUnwrap _ + | ImportDefaultT _ + | ImportModuleNsT _ + | ImportNamedT _ + | ImportTypeT _ + | ImportTypeofT _ + | IntersectionPreprocessKitT _ + | LookupT _ + | MatchPropT _ + | MakeExactT _ + | MapTypeT _ + | MethodT _ + | MixinT _ + | NullishCoalesceT _ + | ObjFreezeT _ + | ObjKitT _ + | ObjRestT _ + | ObjSealT _ + | ObjTestProtoT _ + | ObjTestT _ + | OptionalChainT _ + | OrT _ + | PredicateT _ + | ReactKitT _ + | RefineT _ + | ReposLowerT _ + | ReposUseT _ + | ResolveSpreadT _ + | SentinelPropTestT _ + | SetElemT _ + | SetPropT _ + | ModuleExportsAssignT _ + | SpecializeT _ + | SubstOnPredT _ + (* Should be impossible. We only generate these with OpenPredTs. *) + + | TestPropT _ + | ThisSpecializeT _ + | ToStringT _ + | UnaryMinusT _ + | UnifyT _ + | UseT (_, AnnotT _) (* this transforms into a ReposUseT *) + | UseT (_, MaybeT _) (* used to filter maybe *) + | UseT (_, MergedT _) (* Already handled in __flow *) + | UseT (_, OptionalT _) (* used to filter optional *) + | ObjAssignFromT _ + (* Handled in __flow *) + + | ObjAssignToT _ (* Handled in __flow *) + | UseT (_, ThisTypeAppT _) + (* Should never occur, so we just defer to __flow to handle errors *) + + | UseT (_, InternalT _) + | UseT (_, MatchingPropT _) + | UseT (_, DefT (_, _, IdxWrapper _)) + | UseT (_, ModuleT _) + | ReactPropsToOut _ + | ReactInToProps _ + (* Ideally, any would pollute every member of the union. However, it should be safe to only + taint the type in the branch that flow picks when generating constraints for this, so + this can be handled by the pre-existing rules *) + + | UseT (_, UnionT _) + | UseT (_, IntersectionT _) (* Already handled in the wildcard case in __flow *) + | UseT (_, OpenT _) -> + false + (* These types have no t_out, so can't propagate anything. Thus we short-circuit by returning + true *) + | AssertArithmeticOperandT _ + | AssertBinaryInLHST _ + | AssertBinaryInRHST _ + | AssertForInRHST _ + | AssertImportIsValueT _ + | ComparatorT _ + | DebugPrintT _ + | DebugSleepT _ + | EqT _ + | HasOwnPropT _ + | ImplementsT _ + | InvariantT _ + | SetPrivatePropT _ + | SetProtoT _ + | SuperT _ + | TypeAppVarianceCheckT _ + | VarianceCheckT _ + | ConcretizeTypeAppsT _ + | ExtendsUseT _ + | UseT (_, KeysT _) (* Any won't interact with the type inside KeysT, so it can't be tainted *) + -> + true + (* TODO: Punt on these for now, but figure out whether these should fall through or not *) + | UseT (_, CustomFunT (_, ReactElementFactory _)) + | UseT (_, CustomFunT (_, ReactPropType _)) + | UseT (_, CustomFunT (_, ObjectAssign)) + | UseT (_, CustomFunT (_, ObjectGetPrototypeOf)) + | UseT (_, CustomFunT (_, ObjectSetPrototypeOf)) + | UseT (_, CustomFunT (_, Compose _)) + | UseT (_, CustomFunT (_, ReactCreateClass)) + | UseT (_, CustomFunT (_, ReactCreateElement)) + | UseT (_, CustomFunT (_, ReactCloneElement)) + | UseT (_, CustomFunT (_, Idx)) + | UseT (_, CustomFunT (_, TypeAssertIs)) + | UseT (_, CustomFunT (_, TypeAssertThrows)) + | UseT (_, CustomFunT (_, TypeAssertWraps)) + | UseT (_, CustomFunT (_, DebugPrint)) + | UseT (_, CustomFunT (_, DebugThrow)) + | UseT (_, CustomFunT (_, DebugSleep)) + | UseT (_, DefT (_, _, ObjT _)) + | UseT (_, DefT (_, _, InstanceT _)) + | UseT _ -> + true + + (* Propagates any flows in case of contravariant/invariant subtypes: the any must pollute + all types in contravariant positions when t <: any. *) + and any_propagated_use cx trace use_op any l = + let covariant_flow ~use_op t = rec_flow_t cx trace ~use_op (t, any) in + let contravariant_flow ~use_op t = rec_flow_t cx trace ~use_op (any, t) in + match l with + | DefT (_, _, FunT (_, _, funtype)) -> + (* function types are contravariant in the arguments *) + any_prop_to_function use_op funtype covariant_flow contravariant_flow; + true + (* Some types just need to be expanded and filled with any types *) + | (DefT (_, _, ArrT (ArrayAT _)) as t) + | (DefT (_, _, ArrT (TupleAT _)) as t) + | (OpaqueT _ as t) -> + rec_flow_t cx trace ~use_op (t, expand_any cx any t); + true + | KeysT _ -> + (* Keys cannot be tainted by any *) + true + | DefT (_, _, ClassT t) + | DefT (_, _, ArrT (ROArrayAT t)) + | DefT (_, _, TypeT (_, t)) -> + covariant_flow ~use_op t; + true + | DefT (_, _, ReactAbstractComponentT { config; instance }) -> + contravariant_flow ~use_op config; + covariant_flow ~use_op instance; + true + (* These types have no negative positions in their lower bounds *) + | ExistsT _ + | FunProtoApplyT _ + | FunProtoBindT _ + | FunProtoCallT _ + | FunProtoT _ + | ObjProtoT _ + | NullProtoT _ -> + true + (* Handled already in __flow *) + | AnnotT _ + | ExactT _ + | ThisClassT _ + | ReposT _ + | EvalT _ + | MergedT _ + | OpenPredT _ + | InternalT (ReposUpperT _) + | InternalT (OptionalChainVoidT _) + | MatchingPropT _ + | ShapeT _ + | OptionalT _ + | MaybeT _ + | DefT (_, _, PolyT _) + | TypeAppT _ + | UnionT _ + | IntersectionT _ + | ThisTypeAppT _ -> + false + (* Should never occur as the lower bound of any *) + | BoundT _ + | InternalT (ChoiceKitT _) + | InternalT (ExtendsT _) + | ModuleT _ -> + false + (* Need special action later *) + | OpenT _ -> false + (* TODO: Punt on these for now, but figure out whether these should fall through or not *) + | CustomFunT (_, ReactElementFactory _) + | CustomFunT (_, ReactPropType _) + | CustomFunT (_, ObjectAssign) + | CustomFunT (_, ObjectGetPrototypeOf) + | CustomFunT (_, ObjectSetPrototypeOf) + | CustomFunT (_, Compose _) + | CustomFunT (_, ReactCreateClass) + | CustomFunT (_, ReactCreateElement) + | CustomFunT (_, ReactCloneElement) + | CustomFunT (_, Idx) + | CustomFunT (_, TypeAssertIs) + | CustomFunT (_, TypeAssertThrows) + | CustomFunT (_, TypeAssertWraps) + | CustomFunT (_, DebugPrint) + | CustomFunT (_, DebugThrow) + | CustomFunT (_, DebugSleep) + | DefT (_, _, ObjT _) + | DefT (_, _, InstanceT _) + | DefT _ + | AnyT _ + | TypeDestructorTriggerT _ -> + true - | (_, ObjKitT _) -> false + (*********************) + (* inheritance utils *) + (*********************) + and flow_type_args cx trace ~use_op lreason ureason targs1 targs2 = + List.iter2 + (fun (x, targ_reason, t1, polarity) (_, _, t2, _) -> + let use_op = + Frame + ( TypeArgCompatibility + { name = x; targ = targ_reason; lower = lreason; upper = ureason; polarity }, + use_op ) + in + match polarity with + | Polarity.Negative -> rec_flow cx trace (t2, UseT (use_op, t1)) + | Polarity.Positive -> rec_flow cx trace (t1, UseT (use_op, t2)) + | Polarity.Neutral -> rec_unify cx trace ~use_op t1 t2) + targs1 + targs2 + + (* dispatch checks to verify that lower satisfies the structural + requirements given in the tuple. *) + (* TODO: own_props/proto_props is misleading, since they come from interfaces, + which don't have an own/proto distinction. *) + and structural_subtype cx trace ~use_op lower reason_struct (own_props, proto_props, call_id) = + let lreason = reason_of_t lower in + let own_props = Context.find_props cx own_props in + let proto_props = Context.find_props cx proto_props in + let call_t = Option.map call_id ~f:(Context.find_call cx) in + own_props + |> SMap.iter (fun s p -> + let use_op = + Frame + ( PropertyCompatibility { prop = Some s; lower = lreason; upper = reason_struct }, + use_op ) + in + match p with + | Field (_, OptionalT (_, t), polarity) -> + let propref = + let reason_prop = + update_desc_reason (fun desc -> ROptional (RPropertyOf (s, desc))) reason_struct + in + Named (reason_prop, s) + in + rec_flow + cx + trace + ( lower, + LookupT + ( reason_struct, + NonstrictReturning (None, None), + [], + propref, + LookupProp (use_op, Field (None, t, polarity)) ) ) + | _ -> + let propref = + let reason_prop = + update_desc_reason (fun desc -> RPropertyOf (s, desc)) reason_struct + in + Named (reason_prop, s) + in + rec_flow + cx + trace + (lower, LookupT (reason_struct, Strict lreason, [], propref, LookupProp (use_op, p)))); + proto_props + |> SMap.iter (fun s p -> + let use_op = + Frame + ( PropertyCompatibility { prop = Some s; lower = lreason; upper = reason_struct }, + use_op ) + in + let propref = + let reason_prop = + update_desc_reason (fun desc -> RPropertyOf (s, desc)) reason_struct + in + Named (reason_prop, s) + in + rec_flow + cx + trace + (lower, LookupT (reason_struct, Strict lreason, [], propref, LookupProp (use_op, p)))); + call_t + |> Option.iter ~f:(fun ut -> + let prop_name = Some "$call" in + let use_op = + Frame + ( PropertyCompatibility { prop = prop_name; lower = lreason; upper = reason_struct }, + use_op ) + in + match lower with + | DefT (_, _, ObjT { call_t = Some lid; _ }) + | DefT (_, _, InstanceT (_, _, _, { inst_call_t = Some lid; _ })) -> + let lt = Context.find_call cx lid in + rec_flow cx trace (lt, UseT (use_op, ut)) + | _ -> + let reason_prop = + update_desc_reason (fun desc -> RPropertyOf ("$call", desc)) reason_struct + in + add_output + cx + ~trace + (Error_message.EStrictLookupFailed + ((reason_prop, lreason), lreason, prop_name, Some use_op))) + + and check_super cx trace ~use_op lreason ureason t x p = + let use_op = + Frame (PropertyCompatibility { prop = Some x; lower = lreason; upper = ureason }, use_op) + in + let strict = NonstrictReturning (None, None) in + let reason_prop = replace_desc_reason (RProperty (Some x)) lreason in + lookup_prop cx trace t reason_prop lreason strict x (SuperProp (use_op, p)) - | (_, ChoiceKitUseT _) -> false + and eval_latent_pred cx ?trace reason curr_t p i = + let evaluated = Context.evaluated cx in + match IMap.get i evaluated with + | None -> + Tvar.mk_where cx reason (fun tvar -> + Context.set_evaluated cx (IMap.add i tvar evaluated); + flow_opt cx ?trace (curr_t, RefineT (reason, p, tvar))) + | Some it -> it + + and eval_evalt cx ?trace t evaluator id = + match evaluator with + | LatentPredT (reason, pred) -> eval_latent_pred cx ?trace reason t pred id + | TypeDestructorT (use_op, reason, d) -> + let (_, result) = + mk_type_destructor + cx + ~trace:(Option.value ~default:Trace.dummy_trace trace) + use_op + reason + t + d + id + in + result + + and eval_selector cx ?trace reason curr_t s tvar = + flow_opt + cx + ?trace + ( curr_t, + match s with + | Prop (x, has_default) -> + let lookup_ub () = + let use_op = unknown_use in + let action = ReadProp { use_op; obj_t = curr_t; tout = tvar } in + (* LookupT unifies with the default with tvar. To get around that, we can create some + * indirection with a fresh tvar in between to ensure that we only add a lower bound + *) + let default_tout = + Tvar.mk_where cx reason (fun tout -> flow_opt cx ?trace (tout, UseT (use_op, tvar))) + in + let void_reason = replace_desc_reason RVoid (reason_of_t tvar) in + let strict = + NonstrictReturning + (Some (DefT (void_reason, bogus_trust (), VoidT), default_tout), None) + in + LookupT (reason, strict, [], Named (reason, x), action) + in + (* We use GetPropT instead of a strict lookup because a strict lookup directly on + * an unsealed object would cause an error. *) + let getprop_ub () = GetPropT (unknown_use, reason, Named (reason, x), tvar) in + if has_default then + match curr_t with + | DefT (_, _, NullT) -> getprop_ub () + | DefT (_, _, ObjT { flags = { exact = true; _ }; proto_t = ObjProtoT _; _ }) -> + lookup_ub () + | _ -> getprop_ub () + else + getprop_ub () + | Elem key -> GetElemT (unknown_use, reason, key, tvar) + | ObjRest xs -> ObjRestT (reason, xs, tvar) + | ArrRest i -> ArrRestT (unknown_use, reason, i, tvar) + | Default -> PredicateT (NotP VoidP, tvar) ) + + and mk_type_destructor cx ~trace use_op reason t d id = + let evaluated = Context.evaluated cx in + (* As an optimization, unwrap resolved tvars so that they are only evaluated + * once to an annotation instead of a tvar that gets a bound on both sides. *) + let t = + match t with + | OpenT (_, id) -> + let (_, constraints) = Context.find_constraints cx id in + (match constraints with + | Resolved (_, t) + | FullyResolved (_, t) -> + t + | Unresolved _ -> t) + | _ -> t + in + match (t, IMap.get id evaluated) with + (* The OpenT branch is a correct implementation of type destructors for all + * types. However, because it adds a constraint to both sides of a type we may + * end up doing some work twice. So as an optimization for concrete types + * we have a fall-through branch that only evaluates our type destructor once. + * The second branch then uses AnnotT to both concretize the result for use + * as a lower or upper bound and prevent new bounds from being added to + * the result. + * + * MergedT should also get this treatment as it is a merged "description" of + * an OpenT. *) + | ((OpenT _ | MergedT _), Some t) -> (false, t) + | ((OpenT _ | MergedT _), None) -> + ( false, + Tvar.mk_where cx reason (fun tvar -> + Context.set_evaluated cx (IMap.add id tvar evaluated); + let x = TypeDestructorTriggerT (use_op, reason, None, d, tvar) in + rec_flow_t cx trace (t, x); + rec_flow_t cx trace (x, t)) ) + | (_, Some t) -> (true, t) + | (AnnotT (r, t, use_desc), None) -> + ( true, + Tvar.mk_where cx reason (fun tvar -> + Context.set_evaluated cx (IMap.add id tvar evaluated); + let repos = Some (r, use_desc) in + let x = TypeDestructorTriggerT (use_op, reason, repos, d, tvar) in + rec_flow_t cx trace (t, x)) ) + | (_, None) -> + ( true, + Tvar.mk_where cx reason (fun tvar -> + Context.set_evaluated cx (IMap.add id tvar evaluated); + eval_destructor cx ~trace use_op reason t d tvar) ) - (* Allow deferred unification with `any` *) - | (_, UnifyT _) -> false + and eval_destructor cx ~trace use_op reason t d tout = + match t with + (* Specialize TypeAppTs before evaluating them so that we can handle special + cases. Like the union case below. mk_typeapp_instance will return an AnnotT + which will be fully resolved using the AnnotT case above. *) + | TypeAppT (reason_tapp, use_op_tapp, c, ts) -> + let destructor = TypeDestructorT (use_op, reason, d) in + let t = + mk_typeapp_instance cx ~trace ~use_op:use_op_tapp ~reason_op:reason ~reason_tapp c ts + in + rec_flow_t cx trace (Cache.Eval.id t destructor, tout) + (* If we are destructuring a union, evaluating the destructor on the union + itself may have the effect of splitting the union into separate lower + bounds, which prevents the speculative match process from working. + Instead, we preserve the union by pushing down the destructor onto the + branches of the unions. *) + | UnionT (r, rep) -> + let destructor = TypeDestructorT (use_op, reason, d) in + rec_flow_t + cx + trace + (UnionT (r, rep |> UnionRep.ident_map (fun t -> Cache.Eval.id t destructor)), tout) + | MaybeT (r, t) -> + let destructor = TypeDestructorT (use_op, reason, d) in + let reason = replace_desc_new_reason RNullOrVoid r in + let rep = + UnionRep.make + (let null = NullT.make reason |> with_trust bogus_trust in + Cache.Eval.id null destructor) + (let void = VoidT.make reason |> with_trust bogus_trust in + Cache.Eval.id void destructor) + [Cache.Eval.id t destructor] + in + rec_flow_t cx trace (UnionT (r, rep), tout) + | AnnotT (r, t, use_desc) -> + let t = reposition_reason ~trace cx r ~use_desc t in + let destructor = TypeDestructorT (use_op, reason, d) in + rec_flow_t cx trace (Cache.Eval.id t destructor, tout) + | _ -> + rec_flow + cx + trace + ( t, + match d with + | NonMaybeType -> + let maybe_r = update_desc_reason (fun desc -> RMaybe desc) reason in + (* We intentionally use `unknown_use` here! When we flow to a tout we never + * want to carry a `use_op`. We want whatever `use_op` the tout is used with + * to win. *) + UseT (unknown_use, MaybeT (maybe_r, tout)) + | PropertyType x -> + let reason_op = replace_desc_reason (RProperty (Some x)) reason in + GetPropT (use_op, reason, Named (reason_op, x), tout) + | ElementType t -> GetElemT (use_op, reason, t, tout) + | Bind t -> BindT (use_op, reason, mk_methodcalltype t None [] tout, true) + | SpreadType (options, todo_rev, head_slice) -> + Object.( + Object.Spread.( + let tool = Resolve Next in + let state = + { + todo_rev; + acc = Option.value_map ~f:(fun x -> [InlineSlice x]) ~default:[] head_slice; + } + in + ObjKitT (use_op, reason, tool, Spread (options, state), tout))) + | RestType (options, t) -> + Object.( + Object.Rest.( + let tool = Resolve Next in + let state = One t in + ObjKitT (use_op, reason, tool, Rest (options, state), tout))) + | ReadOnlyType -> Object.(ObjKitT (use_op, reason, Resolve Next, ReadOnly, tout)) + | ValuesType -> GetValuesT (reason, tout) + | CallType args -> + let args = Core_list.map ~f:(fun arg -> Arg arg) args in + let call = mk_functioncalltype reason None args tout in + let call = { call with call_strict_arity = false } in + let use_op = + match use_op with + (* The following use ops are for operations that internally delegate to CallType. We + don't want to leak the internally delegation to error messages by pushing an + additional frame. Alternatively, we could have pushed here and filtered out when + rendering error messages, but that seems a bit wasteful. *) + | Frame (TupleMapFunCompatibility _, _) + | Frame (ObjMapFunCompatibility _, _) + | Frame (ObjMapiFunCompatibility _, _) -> + use_op + (* For external CallType operations, we push an additional frame to distinguish their + error messages from those of "normal" calls. *) + | _ -> Frame (CallFunCompatibility { n = List.length args }, use_op) + in + CallT (use_op, reason, call) + | TypeMap tmap -> MapTypeT (use_op, reason, tmap, tout) + | ReactElementPropsType -> ReactKitT (use_op, reason, React.GetProps tout) + | ReactElementConfigType -> ReactKitT (use_op, reason, React.GetConfig tout) + | ReactElementRefType -> ReactKitT (use_op, reason, React.GetRef tout) + | ReactConfigType default_props -> + ReactKitT (use_op, reason, React.GetConfigType (default_props, tout)) ) + + (* TODO: flesh this out *) + and check_polarity cx ?trace polarity = function + (* base case *) + | BoundT (reason, name, tp_polarity) -> + if not (Polarity.compat (tp_polarity, polarity)) then + add_output + cx + ?trace + (Error_message.EPolarityMismatch + { reason; name; expected_polarity = tp_polarity; actual_polarity = polarity }) + | OpenT _ + | DefT (_, _, NumT _) + | DefT (_, _, StrT _) + | DefT (_, _, BoolT _) + | DefT (_, _, EmptyT _) + | DefT (_, _, MixedT _) + | AnyT _ + | DefT (_, _, NullT) + | DefT (_, _, VoidT) + | DefT (_, _, SingletonStrT _) + | DefT (_, _, SingletonNumT _) + | DefT (_, _, SingletonBoolT _) + | DefT (_, _, CharSetT _) -> + () + | ExistsT _ -> () + | InternalT (OptionalChainVoidT _) -> () + | OptionalT (_, t) + | ExactT (_, t) + | MaybeT (_, t) + | ReposT (_, t) + | InternalT (ReposUpperT (_, t)) -> + check_polarity cx ?trace polarity t + | DefT (_, _, ClassT t) -> check_polarity cx ?trace polarity t + | DefT (_, _, TypeT (_, t)) -> check_polarity cx ?trace polarity t + | DefT (_, _, InstanceT (static, super, _, instance)) -> + check_polarity cx ?trace polarity static; + check_polarity cx ?trace polarity super; + check_polarity_propmap cx ?trace polarity instance.own_props; + check_polarity_propmap cx ?trace ~skip_ctor:true polarity instance.proto_props + | DefT (_, _, FunT (_, _, func)) -> + let f = check_polarity cx ?trace (Polarity.inv polarity) in + List.iter (fun (_, t) -> f t) func.params; + Option.iter ~f:(fun (_, _, t) -> f t) func.rest_param; + check_polarity cx ?trace polarity func.return_t + | DefT (_, _, ArrT (ArrayAT (elemt, _))) -> check_polarity cx ?trace Polarity.Neutral elemt + | DefT (_, _, ArrT (TupleAT (_, tuple_types))) -> + List.iter (check_polarity cx ?trace Polarity.Neutral) tuple_types + | DefT (_, _, ArrT (ROArrayAT elemt)) -> check_polarity cx ?trace polarity elemt + | DefT (_, _, ObjT obj) -> + check_polarity_propmap cx ?trace polarity obj.props_tmap; + (match obj.dict_t with + | Some { key; value; dict_polarity; _ } -> + check_polarity cx ?trace Polarity.Neutral key; + check_polarity cx ?trace (Polarity.mult (polarity, dict_polarity)) value + | None -> ()) + | DefT (_, _, IdxWrapper obj) -> check_polarity cx ?trace polarity obj + | UnionT (_, rep) -> List.iter (check_polarity cx ?trace polarity) (UnionRep.members rep) + | IntersectionT (_, rep) -> + List.iter (check_polarity cx ?trace polarity) (InterRep.members rep) + | DefT (_, _, PolyT (_, xs, t, _)) -> + Nel.iter (check_polarity_typeparam cx ?trace polarity) xs; + check_polarity cx ?trace polarity t + | ThisTypeAppT (_, c, _, None) -> check_polarity cx ?trace Polarity.Positive c + | ThisTypeAppT (_, c, _, Some ts) + | TypeAppT (_, _, c, ts) -> + check_polarity cx ?trace Polarity.Positive c; + check_polarity_typeapp cx ?trace polarity c ts + | DefT (_, _, ReactAbstractComponentT { config; instance }) -> + check_polarity cx ?trace Polarity.Negative config; + check_polarity cx ?trace Polarity.Positive instance + | OpaqueT (_, opaquetype) -> + Option.iter ~f:(check_polarity cx ?trace polarity) opaquetype.underlying_t; + Option.iter ~f:(check_polarity cx ?trace polarity) opaquetype.super_t + | ShapeT t -> check_polarity cx ?trace polarity t + | KeysT (_, t) -> check_polarity cx ?trace Polarity.Positive t + | ThisClassT _ + | ModuleT _ + | AnnotT _ + | MatchingPropT _ + | NullProtoT _ + | ObjProtoT _ + | FunProtoT _ + | FunProtoApplyT _ + | FunProtoBindT _ + | FunProtoCallT _ + | EvalT _ + | InternalT (ExtendsT _) + | InternalT (ChoiceKitT _) + | TypeDestructorTriggerT _ + | CustomFunT _ + | OpenPredT _ + | MergedT _ -> + () - (* Allow any propagation to dictionaries *) - | DefT (_, AnyT), ElemT _ -> false + (* TODO *) + and check_polarity_propmap cx ?trace ?(skip_ctor = false) polarity id = + let pmap = Context.find_props cx id in + SMap.iter + (fun x p -> + if skip_ctor && x = "constructor" then + () + else + check_polarity_prop cx ?trace polarity p) + pmap + + and check_polarity_prop cx ?trace polarity = function + | Field (_, t, p) -> check_polarity cx ?trace (Polarity.mult (polarity, p)) t + | Get (_, t) -> check_polarity cx ?trace polarity t + | Set (_, t) -> check_polarity cx ?trace (Polarity.inv polarity) t + | GetSet (_, t1, _, t2) -> + check_polarity cx ?trace polarity t1; + check_polarity cx ?trace (Polarity.inv polarity) t2 + | Method (_, t) -> check_polarity cx ?trace polarity t + + and check_polarity_typeparam cx ?trace polarity tp = + let polarity = Polarity.mult (polarity, tp.polarity) in + check_polarity cx ?trace polarity tp.bound; + Option.iter ~f:(check_polarity cx ?trace polarity) tp.default + + and check_polarity_typeapp cx ?trace polarity c ts = + let reason = update_desc_reason (fun desc -> RVarianceCheck desc) (reason_of_t c) in + flow_opt cx ?trace (c, VarianceCheckT (reason, ts, polarity)) + + and variance_check cx ?trace polarity = function + | ([], _) + | (_, []) -> + (* ignore typeapp arity mismatch, since it's handled elsewhere *) + () + | (tp :: tps, t :: ts) -> + check_polarity cx ?trace (Polarity.mult (polarity, tp.polarity)) t; + variance_check cx ?trace polarity (tps, ts) + + (* Instantiate a polymorphic definition given tparam instantiations in a Call or + * New expression. *) + and instantiate_poly_call_or_new + cx trace ~use_op ~reason_op ~reason_tapp ?cache ?errs_ref (tparams_loc, xs, t) targs = + let (_, ts) = + Nel.fold_left + (fun (targs, ts) typeparam -> + match targs with + | [] -> ([], ts) + | ExplicitArg t :: targs -> (targs, t :: ts) + | ImplicitArg (r, id) :: targs -> + let reason = mk_reason RImplicitInstantiation (aloc_of_reason r) in + let t = ImplicitTypeArgument.mk_targ cx typeparam reason reason_tapp in + rec_flow_t cx trace ~use_op (t, OpenT (r, id)); + (targs, t :: ts)) + (targs, []) + xs + in + instantiate_poly_with_targs + cx + trace + ~use_op + ~reason_op + ~reason_tapp + ?cache + ?errs_ref + (tparams_loc, xs, t) + (List.rev ts) + + (* Instantiate a polymorphic definition given type arguments. *) + and instantiate_poly_with_targs + cx trace ~use_op ~reason_op ~reason_tapp ?cache ?errs_ref (tparams_loc, xs, t) ts = + let minimum_arity = poly_minimum_arity xs in + let maximum_arity = Nel.length xs in + let reason_arity = mk_poly_arity_reason tparams_loc in + if List.length ts > maximum_arity then ( + add_output + cx + ~trace + (Error_message.ETooManyTypeArgs (reason_tapp, reason_arity, maximum_arity)); + Option.iter errs_ref ~f:(fun errs_ref -> + errs_ref := `ETooManyTypeArgs (reason_arity, maximum_arity) :: !errs_ref) + ); + let (map, _) = + Nel.fold_left + (fun (map, ts) typeparam -> + let (t, ts) = + match (typeparam, ts) with + | ({ default = Some default; _ }, []) -> + (* fewer arguments than params and we have a default *) + (subst cx ~use_op map default, []) + | ({ default = None; _ }, []) -> + (* fewer arguments than params but no default *) + add_output + cx + ~trace + (Error_message.ETooFewTypeArgs (reason_tapp, reason_arity, minimum_arity)); + Option.iter errs_ref ~f:(fun errs_ref -> + errs_ref := `ETooFewTypeArgs (reason_arity, minimum_arity) :: !errs_ref); + (AnyT (reason_op, AnyError), []) + | (_, t :: ts) -> (t, ts) + in + let t_ = cache_instantiate cx trace ~use_op ?cache typeparam reason_op reason_tapp t in + let frame = Frame (TypeParamBound { name = typeparam.name }, use_op) in + rec_flow_t cx trace ~use_op:frame (t_, subst cx ~use_op map typeparam.bound); + (SMap.add typeparam.name t_ map, ts)) + (SMap.empty, ts) + xs + in + reposition cx ~trace (aloc_of_reason reason_tapp) (subst cx ~use_op map t) - | DefT (_, UnionT _), _ -> false - - (* Allow EmptyT ~> CondT *) - | (_, CondT _) -> false - - | _, MakeExactT _ -> false - - | DefT (_, NumT _), UseT (_, DefT (_, NumT _)) - | DefT (_, StrT _), UseT (_, DefT (_, StrT _)) - | DefT (_, BoolT _), UseT (_, DefT (_, BoolT _)) - | DefT (_, NullT), UseT (_, DefT (_, NullT)) - | DefT (_, VoidT), UseT (_, DefT (_, VoidT)) - | DefT (_, EmptyT), _ - | _, UseT (_, DefT (_, MixedT _)) - -> true - - | DefT (_, AnyT), u -> not (any_propagating_use_t u) - | _, UseT (_, DefT (_, AnyT)) -> true - - (* opt: avoid builtin lookups *) - | ObjProtoT _, UseT (_, ObjProtoT _) - | FunProtoT _, UseT (_, FunProtoT _) - | FunProtoT _, UseT (_, ObjProtoT _) - | DefT (_, ObjT {proto_t = ObjProtoT _; _}), UseT (_, ObjProtoT _) - | DefT (_, ObjT {proto_t = FunProtoT _; _}), UseT (_, FunProtoT _) - | DefT (_, ObjT {proto_t = FunProtoT _; _}), UseT (_, ObjProtoT _) - -> true - - | _ -> - false - -and numeric = function - | DefT (_, NumT _) -> true - | DefT (_, SingletonNumT _) -> true - - | DefT (reason, InstanceT _) -> - DescFormat.name_of_instance_reason reason = "Date" - - | _ -> false - -and object_like = function - | DefT (_, (AnyObjT | ObjT _ | InstanceT _)) -> true - | t -> function_like t - -and object_use = function - | UseT (_, DefT (_, ObjT _)) -> true - | _ -> false - -and object_like_op = function - | SetPropT _ | GetPropT _ | TestPropT _ | MethodT _ | LookupT _ | MatchPropT _ - | GetProtoT _ | SetProtoT _ - | SuperT _ - | GetKeysT _ | HasOwnPropT _ | GetValuesT _ - | ObjAssignToT _ | ObjAssignFromT _ | ObjRestT _ - | SetElemT _ | GetElemT _ - | UseT (_, DefT (_, AnyObjT)) -> true - | _ -> false - -and function_use = function - | UseT (_, DefT (_, FunT _)) -> true - | _ -> false - -(* TODO: why is AnyFunT missing? *) -and function_like = function - | DefT (_, ClassT _) - | DefT (_, FunT _) - | CustomFunT _ - | FunProtoApplyT _ - | FunProtoBindT _ - | FunProtoCallT _ - -> true - | _ -> false - -and function_like_op = function - | CallT _ - | ConstructorT _ - | UseT (_, DefT (_, AnyFunT)) -> true - | t -> object_like_op t - -and equatable = function - | DefT (_, NumT _), DefT (_, NumT _) - | DefT (_, StrT _), DefT (_, StrT _) - | DefT (_, BoolT _), DefT (_, BoolT _) - | DefT (_, EmptyT), _ | _, DefT (_, EmptyT) - | _, DefT (_, MixedT _) | DefT (_, MixedT _), _ - | DefT (_, AnyT), _ | _, DefT (_, AnyT) - | DefT (_, VoidT), _ | _, DefT (_, VoidT) - | DefT (_, NullT), _ | _, DefT (_, NullT) - -> true - - | DefT (_, (NumT _ | StrT _ | BoolT _)), _ - | _, DefT (_, (NumT _ | StrT _ | BoolT _)) - -> false - - | _ -> true - -(* Creates a union from a list of types. Since unions require a minimum of two - types this function will return an empty type when there are no types in the - list, or the list head when there is one type in the list. *) -and union_of_ts reason ts = - match ts with - (* If we have no types then this is an error. *) - | [] -> DefT (reason, EmptyT) - (* If we only have one type then only that should be used. *) - | t0::[] -> t0 - (* If we have more than one type then we make a union type. *) - | t0::t1::ts -> - let union = UnionT (UnionRep.make t0 t1 ts) in - DefT (reason, union) - -(* generics *) - -(** Harness for testing parameterized types. Given a test function and a list - of type params, generate a bunch of argument maps and invoke the test - function on each, using Reason.TestID to keep the reasons generated by - each test disjoint from the others. - - In the general case we simply test every combination of p = bot, p = bound - for each param p. For many parameter lists this will be more than strictly - necessary, but determining the minimal set of tests for interrelated params - is subtle. For now, our only refinement is to isolate all params with an - upper bound of MixedT (making them trivially unrelated to each other) and - generate a smaller set of argument maps for these which only cover a) bot, - bound for each param, and b) every pairwise bot/bound combination. These - maps are then used as seeds for powersets over the remaining params. - - NOTE: Since the same AST is traversed by each generated test, the order - of generated tests is important for the proper functioning of hooks that - record information on the side as ASTs are traversed. Adopting the - convention that the last traversal "wins" (which would happen, e.g, when - the recorded information at a location is replaced every time that - location is encountered), we want the last generated test to always be - the one where all type parameters are substituted by their bounds - (instead of Bottom), so that the recorded information is the same as if - all type parameters were indeed erased and replaced by their bounds. - *) -and generate_tests : 'a . Context.t -> Type.typeparam list -> (Type.t SMap.t -> 'a) -> 'a = - (* make bot type for given param *) - let mk_bot _ { name; reason; _ } = - let desc = RPolyTest (name, RIncompatibleInstantiation name) in - DefT (replace_reason_const desc reason, EmptyT) - in - (* make bound type for given param and argument map *) - let mk_bound cx prev_args { bound; name; reason = param_reason; _ } = - (* For the top bound, we match the reason locations that appear in the - * respective bot bound: - * - 'loc' is the location of the type parameter (may be repositioned later) - * - 'def_loc' is the location of the type parameter, and - * - 'annot_loc_opt' is the location of the bound (if present). - *) - mod_reason_of_t (fun bound_reason -> - let param_loc = Reason.aloc_of_reason param_reason in - let annot_loc = annot_loc_of_reason bound_reason in - let desc = desc_of_reason ~unwrap:false bound_reason in - repos_reason (param_loc |> ALoc.to_loc) ?annot_loc (mk_reason (RPolyTest (name, desc)) (param_loc |> ALoc.to_loc)) - ) (subst cx prev_args bound) - in - (* make argument map by folding mk_arg over param list *) - let mk_argmap mk_arg = - List.fold_left (fun acc ({ name; _ } as p) -> - SMap.add name (mk_arg acc p) acc - ) SMap.empty - in - (* for each p, a map with p bot and others bound + map with all bound *) - let linear cx = function - | [] -> [SMap.empty] - | params -> - let all = mk_argmap (mk_bound cx) params in - let each = List.map (fun ({ name; _ } as p) -> - SMap.add name (mk_bot SMap.empty p) all - ) params in - List.rev (all :: each) - in - (* a map for every combo of bot/bound params *) - let powerset cx params arg_map = - let none = mk_argmap mk_bot params in - List.fold_left (fun maps ({ name; _ } as p) -> - let bots = List.map (SMap.add name (SMap.find_unsafe name none)) maps in - let bounds = List.map (fun m -> SMap.add name (mk_bound cx m p) m) maps in - bots @ bounds - ) [arg_map] params - in - (* main - run f over a collection of arg maps generated for params *) - fun cx params f -> - if params = [] then f SMap.empty else - let is_free = function { bound = DefT (_, MixedT _); _ } -> true | _ -> false in - let free_params, dep_params = List.partition is_free params in - let free_sets = linear cx free_params in - let powersets = List.map (powerset cx dep_params) free_sets in - let hd_map, tl_maps = - match List.flatten powersets with - | x::xs -> x, xs - | [] -> assert false + (* Given a type parameter, a supplied type argument for specializing it, and a + reason for specialization, either return the type argument or, when directed, + look up the instantiation cache for an existing type argument for the same + purpose and unify it with the supplied type argument. *) + and cache_instantiate cx trace ~use_op ?cache typeparam reason_op reason_tapp t = + match cache with + | None -> t + | Some rs -> + (match desc_of_reason reason_tapp with + (* This reason description cannot be trusted for caching purposes. *) + | RTypeAppImplicit _ -> t + | _ -> + let t_ = Cache.PolyInstantiation.find cx reason_tapp typeparam (reason_op, rs) in + rec_unify cx trace ~use_op ~unify_any:true t t_; + t_) + + (* Instantiate a polymorphic definition with stated bound or 'any' for args *) + (* Needed only for `instanceof` refis and React.PropTypes.instanceOf types *) + and instantiate_poly_default_args cx trace ~use_op ~reason_op ~reason_tapp (tparams_loc, xs, t) = + (* Remember: other_bound might refer to other type params *) + let (ts, _) = + Nel.fold_left + (fun (ts, map) typeparam -> + let t = Unsoundness.why InstanceOfRefinement reason_op in + (t :: ts, SMap.add typeparam.name t map)) + ([], SMap.empty) + xs in - List.fold_left (Fn.const (TestID.run f)) (f hd_map) tl_maps - -(*********************) -(* inheritance utils *) -(*********************) - -and flow_type_args cx trace ~use_op lreason ureason targs1 targs2 = - List.iter2 (fun (x, targ_reason, t1, polarity) (_, _, t2, _) -> - let use_op = Frame (TypeArgCompatibility { - name = x; - targ = targ_reason; - lower = lreason; - upper = ureason; - polarity; - }, use_op) in - match polarity with - | Negative -> rec_flow cx trace (t2, UseT (use_op, t1)) - | Positive -> rec_flow cx trace (t1, UseT (use_op, t2)) - | Neutral -> rec_unify cx trace ~use_op t1 t2 - ) targs1 targs2; - -and inherited_method x = x <> "constructor" - -(* dispatch checks to verify that lower satisfies the structural - requirements given in the tuple. *) -(* TODO: own_props/proto_props is misleading, since they come from interfaces, - which don't have an own/proto distinction. *) -and structural_subtype cx trace ?(use_op=unknown_use) lower reason_struct - (own_props, proto_props, call_id) = - let lreason = reason_of_t lower in - let own_props = Context.find_props cx own_props in - let proto_props = Context.find_props cx proto_props in - let call_t = Option.map call_id ~f:(Context.find_call cx) in - own_props |> SMap.iter (fun s p -> - let use_op = Frame (PropertyCompatibility { - prop = Some s; - lower = lreason; - upper = reason_struct; - is_sentinel = false; - }, use_op) in - match p with - | Field (_, DefT (_, OptionalT t), polarity) -> - let propref = - let reason_prop = replace_reason (fun desc -> - ROptional (RPropertyOf (s, desc)) - ) reason_struct in - Named (reason_prop, s) - in - rec_flow cx trace (lower, - LookupT (reason_struct, NonstrictReturning (None, None), [], propref, - LookupProp (use_op, Field (None, t, polarity)))) - | _ -> - let propref = - let reason_prop = replace_reason (fun desc -> - RPropertyOf (s, desc) - ) reason_struct in - Named (reason_prop, s) - in - rec_flow cx trace (lower, - LookupT (reason_struct, Strict lreason, [], propref, - LookupProp (use_op, p))) - ); - proto_props |> SMap.iter (fun s p -> - let use_op = Frame (PropertyCompatibility { - prop = Some s; - lower = lreason; - upper = reason_struct; - is_sentinel = false; - }, use_op) in - let propref = - let reason_prop = replace_reason (fun desc -> - RPropertyOf (s, desc) - ) reason_struct in - Named (reason_prop, s) + let ts = List.rev ts in + instantiate_poly_with_targs cx trace ~use_op ~reason_op ~reason_tapp (tparams_loc, xs, t) ts + + (* Instantiate a polymorphic definition by creating fresh type arguments. *) + and instantiate_poly cx trace ~use_op ~reason_op ~reason_tapp ?cache (tparams_loc, xs, t) = + let ts = + xs + |> Nel.map (fun typeparam -> ImplicitTypeArgument.mk_targ cx typeparam reason_op reason_tapp) in - rec_flow cx trace (lower, - LookupT (reason_struct, Strict lreason, [], propref, - LookupProp (use_op, p))) - ); - call_t |> Option.iter ~f:(fun ut -> - let prop_name = Some "$call" in - let use_op = Frame (PropertyCompatibility { - prop = prop_name; - lower = lreason; - upper = reason_struct; - is_sentinel = false; - }, use_op) in - match lower with - | DefT (_, ObjT {call_t = Some lid; _}) - | DefT (_, InstanceT (_, _, _, {inst_call_t = Some lid; _})) -> - let lt = Context.find_call cx lid in - rec_flow cx trace (lt, UseT (use_op, ut)) - | _ -> - let reason_prop = replace_reason (fun desc -> - RPropertyOf ("$call", desc) - ) reason_struct in - add_output cx ~trace (FlowError.EStrictLookupFailed - ((reason_prop, lreason), lreason, prop_name, Some use_op)) - ); - -and check_super cx trace ~use_op lreason ureason t x p = - let use_op = Frame (PropertyCompatibility { - prop = Some x; - lower = lreason; - upper = ureason; - is_sentinel = false; - }, use_op) in - let strict = NonstrictReturning (None, None) in - let reason_prop = replace_reason_const (RProperty (Some x)) lreason in - lookup_prop cx trace t reason_prop lreason strict x (SuperProp (use_op, p)) - -and eval_selector cx ?trace reason curr_t s i = - let evaluated = Context.evaluated cx in - match IMap.get i evaluated with - | None -> - Tvar.mk_where cx reason (fun tvar -> - Context.set_evaluated cx (IMap.add i tvar evaluated); - flow_opt cx ?trace (curr_t, match s with - | Prop x -> GetPropT (unknown_use, reason, Named (reason, x), tvar) - | Elem key -> GetElemT (unknown_use, reason, key, tvar) - | ObjRest xs -> ObjRestT (reason, xs, tvar) - | ArrRest i -> ArrRestT (unknown_use, reason, i, tvar) - | Default -> PredicateT (NotP VoidP, tvar) - | Become -> BecomeT (reason, tvar) - | Refine p -> RefineT (reason, p, tvar) - ) - ) - | Some it -> - it - -and mk_type_destructor cx ~trace use_op reason t d id = - let evaluated = Context.evaluated cx in - (* As an optimization, unwrap resolved tvars so that they are only evaluated - * once to an annotation instead of a tvar that gets a bound on both sides. *) - let t = match t with - | OpenT (_, id) -> - let _, constraints = Context.find_constraints cx id in - (match constraints with - | Resolved t -> t - | _ -> t) - | _ -> t - in - match t, IMap.get id evaluated with - (* The OpenT branch is a correct implementation of type destructors for all - * types. However, because it adds a constraint to both sides of a type we may - * end up doing some work twice. So as an optimization for concrete types - * we have a fall-through branch that only evaluates our type destructor once. - * The second branch then uses AnnotT to both concretize the result for use - * as a lower or upper bound and prevent new bounds from being added to - * the result. - * - * MergedT should also get this treatment as it is a merged "description" of - * an OpenT. *) - | (OpenT _ | MergedT _), Some t -> false, t - | (OpenT _ | MergedT _), None -> - false, Tvar.mk_where cx reason (fun tvar -> - Context.set_evaluated cx (IMap.add id tvar evaluated); - let x = TypeDestructorTriggerT (use_op, reason, None, d, tvar) in - rec_flow_t cx trace (t, x); - rec_flow_t cx trace (x, t); - ) - | _, Some t -> true, t - | AnnotT (r, t, use_desc), None -> - true, Tvar.mk_where cx reason (fun tvar -> - Context.set_evaluated cx (IMap.add id tvar evaluated); - let repos = Some (r, use_desc) in - let x = TypeDestructorTriggerT (use_op, reason, repos, d, tvar) in - rec_flow_t cx trace (t, x); - ) - | _, None -> - true, Tvar.mk_where cx reason (fun tvar -> - Context.set_evaluated cx (IMap.add id tvar evaluated); - eval_destructor cx ~trace use_op reason t d tvar; - ) - -and eval_destructor cx ~trace use_op reason t d tout = match t with -(* Specialize TypeAppTs before evaluating them so that we can handle special - cases. Like the union case below. mk_typeapp_instance will return an AnnotT - which will be fully resolved using the AnnotT case above. *) -| DefT (reason_tapp, TypeAppT (use_op_tapp, c, ts)) -> - let destructor = TypeDestructorT (use_op, reason, d) in - let t = mk_typeapp_instance cx ~trace ~use_op:use_op_tapp ~reason_op:reason ~reason_tapp c ts in - rec_flow_t cx trace (EvalT (t, destructor, Cache.Eval.id t destructor), tout) -(* If we are destructuring a union, evaluating the destructor on the union - itself may have the effect of splitting the union into separate lower - bounds, which prevents the speculative match process from working. - Instead, we preserve the union by pushing down the destructor onto the - branches of the unions. *) -| DefT (r, UnionT rep) -> - rec_flow_t cx trace (DefT (r, UnionT (rep |> UnionRep.ident_map (fun t -> - let destructor = TypeDestructorT (use_op, reason, d) in - EvalT (t, destructor, Cache.Eval.id t destructor) - ))), tout) -| DefT (r, MaybeT t) -> - let destructor = TypeDestructorT (use_op, reason, d) in - let reason = replace_reason_const RNullOrVoid r in - let rep = UnionRep.make - (let null = NullT.make reason in EvalT (null, destructor, Cache.Eval.id null destructor)) - (let void = VoidT.make reason in EvalT (void, destructor, Cache.Eval.id void destructor)) - [EvalT (t, destructor, Cache.Eval.id t destructor)] - in - rec_flow_t cx trace (DefT (r, UnionT rep), tout) -| _ -> - rec_flow cx trace (t, match d with - | NonMaybeType -> - let maybe_r = replace_reason (fun desc -> RMaybe desc) reason in - (* We intentionally use `unknown_use` here! When we flow to a tout we never - * want to carry a `use_op`. We want whatever `use_op` the tout is used with - * to win. *) - UseT (unknown_use, DefT (maybe_r, MaybeT tout)) - | PropertyType x -> - let reason_op = replace_reason_const (RProperty (Some x)) reason in - GetPropT (use_op, reason, Named (reason_op, x), tout) - | ElementType t -> GetElemT (use_op, reason, t, tout) - | Bind t -> BindT (use_op, reason, mk_methodcalltype t None [] tout, true) - | SpreadType (options, todo_rev) -> - let open Object in - let open Object.Spread in - let tool = Resolve Next in - let state = { todo_rev; acc = [] } in - ObjKitT (use_op, reason, tool, Spread (options, state), tout) - | RestType (options, t) -> - let open Object in - let open Object.Rest in - let tool = Resolve Next in - let state = One t in - ObjKitT (use_op, reason, tool, Rest (options, state), tout) - | ReadOnlyType -> - let open Object in - ObjKitT (use_op, reason, Resolve (Next), ReadOnly, tout) - | ValuesType -> GetValuesT (reason, tout) - | CallType args -> - let args = List.map (fun arg -> Arg arg) args in - let call = mk_functioncalltype reason None args tout in - let call = {call with call_strict_arity = false} in - CallT (use_op, reason, call) - | TypeMap tmap -> MapTypeT (reason, tmap, tout) - | ReactElementPropsType -> ReactKitT (use_op, reason, React.GetProps tout) - | ReactElementConfigType -> ReactKitT (use_op, reason, React.GetConfig tout) - | ReactElementRefType -> ReactKitT (use_op, reason, React.GetRef tout) - ) - -and match_this_binding map f = - match SMap.find_unsafe "this" map with - | ReposT (_, t) -> f t - | _ -> failwith "not a this binding" - -(* TODO: flesh this out *) -and check_polarity cx ?trace polarity = function - (* base case *) - | BoundT (reason, name, tp_polarity) -> - if not (Polarity.compat (tp_polarity, polarity)) - then add_output cx ?trace (FlowError.EPolarityMismatch { - reason = reason; - name = name; - expected_polarity = tp_polarity; - actual_polarity = polarity; - }) - - | OpenT _ - - | DefT (_, NumT _) - | DefT (_, StrT _) - | DefT (_, BoolT _) - | DefT (_, EmptyT) - | DefT (_, MixedT _) - | DefT (_, AnyT) - | DefT (_, NullT) - | DefT (_, VoidT) - | DefT (_, SingletonStrT _) - | DefT (_, SingletonNumT _) - | DefT (_, SingletonBoolT _) - | DefT (_, AnyObjT) - | DefT (_, AnyFunT) - | DefT (_, CharSetT _) - -> () - | ExistsT _ - -> () - - | InternalT (OptionalChainVoidT _) -> () - - | DefT (_, OptionalT t) - | ExactT (_, t) - | DefT (_, MaybeT t) - | AnyWithLowerBoundT t - | AnyWithUpperBoundT t - | ReposT (_, t) - | InternalT (ReposUpperT (_, t)) - -> check_polarity cx ?trace polarity t - - | DefT (_, ClassT t) - -> check_polarity cx ?trace polarity t - - | DefT (_, TypeT (_, t)) - -> check_polarity cx ?trace polarity t - - | DefT (_, InstanceT (static, super, _, instance)) -> - check_polarity cx ?trace polarity static; - check_polarity cx ?trace polarity super; - check_polarity_propmap cx ?trace polarity instance.own_props; - check_polarity_propmap cx ?trace ~skip_ctor:true polarity instance.proto_props - - | DefT (_, FunT (_, _, func)) -> - let f = check_polarity cx ?trace (Polarity.inv polarity) in - List.iter (fun (_, t) -> f t) func.params; - Option.iter ~f:(fun (_, _, t) -> f t) func.rest_param; - check_polarity cx ?trace polarity func.return_t - - | DefT (_, ArrT (ArrayAT (elemt, _))) -> - check_polarity cx ?trace Neutral elemt - - | DefT (_, ArrT (TupleAT (_, tuple_types))) -> - List.iter (check_polarity cx ?trace Neutral) tuple_types - - | DefT (_, ArrT (ROArrayAT (elemt))) -> - check_polarity cx ?trace polarity elemt - - | DefT (_, ArrT EmptyAT) -> () - - | DefT (_, ObjT obj) -> - check_polarity_propmap cx ?trace polarity obj.props_tmap; - (match obj.dict_t with - | Some { key; value; dict_polarity; _ } -> - check_polarity cx ?trace Neutral key; - check_polarity cx ?trace (Polarity.mult (polarity, dict_polarity)) value - | None -> ()) - - | DefT (_, IdxWrapper obj) -> check_polarity cx ?trace polarity obj - - | DefT (_, UnionT rep) -> - List.iter (check_polarity cx ?trace polarity) (UnionRep.members rep) - - | DefT (_, IntersectionT rep) -> - List.iter (check_polarity cx ?trace polarity) (InterRep.members rep) - - | DefT (_, PolyT (xs, t, _)) -> - List.iter (check_polarity_typeparam cx ?trace polarity) xs; - check_polarity cx ?trace polarity t - - | ThisTypeAppT (_, c, _, None) -> - check_polarity cx ?trace Positive c - - | ThisTypeAppT (_, c, _, Some ts) - | DefT (_, TypeAppT (_, c, ts)) - -> - check_polarity cx ?trace Positive c; - check_polarity_typeapp cx ?trace polarity c ts - - | OpaqueT (_, opaquetype) -> - Option.iter ~f:(check_polarity cx ?trace polarity) opaquetype.underlying_t; - Option.iter ~f:(check_polarity cx ?trace polarity) opaquetype.super_t - - | ShapeT t -> - check_polarity cx ?trace polarity t - - | KeysT (_, t) -> - check_polarity cx ?trace Positive t + instantiate_poly_with_targs + cx + trace + ~use_op + ~reason_op + ~reason_tapp + ?cache + (tparams_loc, xs, t) + (Nel.to_list ts) + + (* instantiate each param of a polymorphic type with its upper bound *) + and instantiate_poly_param_upper_bounds cx typeparams = + let (_, revlist) = + Nel.fold_left + (fun (map, list) { name; bound; _ } -> + let t = subst cx map bound in + (SMap.add name t map, t :: list)) + (SMap.empty, []) + typeparams + in + List.rev revlist - | ThisClassT _ - | ModuleT _ - | AnnotT _ - | MatchingPropT _ - | NullProtoT _ - | ObjProtoT _ - | FunProtoT _ - | FunProtoApplyT _ - | FunProtoBindT _ - | FunProtoCallT _ - | EvalT _ - | InternalT (ExtendsT _) - | InternalT (ChoiceKitT _) - | TypeDestructorTriggerT _ - | CustomFunT _ - | OpenPredT _ - | MergedT _ - -> () (* TODO *) - -and check_polarity_propmap cx ?trace ?(skip_ctor=false) polarity id = - let pmap = Context.find_props cx id in - SMap.iter (fun x p -> - if skip_ctor && x = "constructor" - then () - else check_polarity_prop cx ?trace polarity p - ) pmap - -and check_polarity_prop cx ?trace polarity = function - | Field (_, t, p) -> check_polarity cx ?trace (Polarity.mult (polarity, p)) t - | Get (_, t) -> check_polarity cx ?trace polarity t - | Set (_, t) -> check_polarity cx ?trace (Polarity.inv polarity) t - | GetSet (_, t1, _, t2) -> - check_polarity cx ?trace polarity t1; - check_polarity cx ?trace (Polarity.inv polarity) t2 - | Method (_, t) -> check_polarity cx ?trace polarity t - -and check_polarity_typeparam cx ?trace polarity tp = - let polarity = Polarity.mult (polarity, tp.polarity) in - check_polarity cx ?trace polarity tp.bound; - Option.iter ~f:(check_polarity cx ?trace polarity) tp.default - -and check_polarity_typeapp cx ?trace polarity c ts = - let reason = replace_reason (fun desc -> - RVarianceCheck desc - ) (reason_of_t c) in - flow_opt cx ?trace (c, VarianceCheckT(reason, ts, polarity)) - -and variance_check cx ?trace polarity = function - | [], _ | _, [] -> - (* ignore typeapp arity mismatch, since it's handled elsewhere *) - () - | tp::tps, t::ts -> - check_polarity cx ?trace (Polarity.mult (polarity, tp.polarity)) t; - variance_check cx ?trace polarity (tps, ts) - -and poly_minimum_arity xs = - List.filter (fun typeparam -> typeparam.default = None) xs - |> List.length - -(* Instantiate a polymorphic definition given type arguments. *) -and instantiate_poly_with_targs - cx - trace - ~use_op - ~reason_op - ~reason_tapp - ?cache - ?errs_ref - (xs,t) - ts - = - let minimum_arity = poly_minimum_arity xs in - let maximum_arity = List.length xs in - let reason_arity = mk_poly_arity_reason xs in - if List.length ts > maximum_arity - then begin - add_output cx ~trace (FlowError.ETooManyTypeArgs (reason_tapp, reason_arity, maximum_arity)); - Option.iter errs_ref - ~f:(fun errs_ref -> errs_ref := `ETooManyTypeArgs(reason_arity, maximum_arity)::!errs_ref) - end; - let map, _ = List.fold_left - (fun (map, ts) typeparam -> - let t, ts = match typeparam, ts with - | {default=Some default; _;}, [] -> - (* fewer arguments than params and we have a default *) - subst cx ~use_op map default, [] - | {default=None; _;}, [] -> - (* fewer arguments than params but no default *) - add_output cx ~trace (FlowError.ETooFewTypeArgs (reason_tapp, reason_arity, minimum_arity)); - Option.iter errs_ref - ~f:(fun errs_ref -> errs_ref := `ETooFewTypeArgs(reason_arity, minimum_arity)::!errs_ref); - DefT (reason_op, AnyT), [] - | _, t::ts -> - t, ts in - let t_ = cache_instantiate cx trace ?cache typeparam reason_op reason_tapp t in - let frame = Frame (TypeParamBound { - name = typeparam.name; - }, use_op) in - rec_flow_t cx trace ~use_op:frame (t_, subst cx ~use_op map typeparam.bound); - SMap.add typeparam.name t_ map, ts - ) - (SMap.empty, ts) - xs in - reposition cx ~trace (aloc_of_reason reason_tapp |> ALoc.to_loc) (subst cx ~use_op map t) + and mk_poly_arity_reason tparams_loc = + mk_reason (RCustom "See type parameters of definition here") tparams_loc -(* Given a type parameter, a supplied type argument for specializing it, and a - reason for specialization, either return the type argument or, when directed, - look up the instantiation cache for an existing type argument for the same - purpose and unify it with the supplied type argument. *) -and cache_instantiate cx trace ?cache typeparam reason_op reason_tapp t = - match cache with - | None -> t - | Some rs -> - let t_ = Cache.PolyInstantiation.find cx reason_tapp typeparam (reason_op, rs) in - rec_unify cx trace ~use_op:unknown_use ~unify_any:true t t_; - t_ - -(* Instantiate a polymorphic definition with stated bound or 'any' for args *) -(* Needed only for `instanceof` refis and React.PropTypes.instanceOf types *) -and instantiate_poly_default_args cx trace ~use_op ~reason_op ~reason_tapp (xs,t) = - (* Remember: other_bound might refer to other type params *) - let ts, _ = List.fold_left - (fun (ts, map) typeparam -> - let t = match typeparam.bound with - | DefT (_, MixedT _) -> AnyT.why reason_op - | other_bound -> AnyWithUpperBoundT (subst cx ~use_op map other_bound) in - (t::ts, SMap.add typeparam.name t map) - ) ([], SMap.empty) - xs in - let ts = List.rev ts in - instantiate_poly_with_targs cx trace ~use_op ~reason_op ~reason_tapp (xs,t) ts - -(* Instantiate a polymorphic definition by creating fresh type arguments. *) -and instantiate_poly cx trace ~use_op ~reason_op ~reason_tapp ?cache (xs,t) = - let ts = xs |> List.map (fun typeparam -> - ImplicitTypeArgument.mk_targ cx typeparam reason_op reason_tapp - ) in - instantiate_poly_with_targs cx trace ~use_op ~reason_op ~reason_tapp ?cache (xs,t) ts - -(* instantiate each param of a polymorphic type with its upper bound *) -and instantiate_poly_param_upper_bounds cx typeparams = - let _, revlist = List.fold_left ( - fun (map, list) { name; bound; _ } -> - let t = subst cx map bound in - SMap.add name t map, t :: list - ) (SMap.empty, []) typeparams in - List.rev revlist - -and mk_poly_arity_reason xs = - let x1, xN = List.hd xs, List.hd (List.rev xs) in - let loc = Loc.btwn (aloc_of_reason x1.reason |> ALoc.to_loc) (aloc_of_reason xN.reason |> ALoc.to_loc) in - mk_reason (RCustom "See type parameters of definition here") loc - -(* Fix a this-abstracted instance type by tying a "knot": assume that the + (* Fix a this-abstracted instance type by tying a "knot": assume that the fixpoint is some `this`, substitute it as This in the instance type, and finally unify it with the instance type. Return the class type wrapping the instance type. *) -and fix_this_class cx trace reason (r, i) = - let i' = match Cache.Fix.find reason i with - | Some i' -> i' - | None -> - let this = Tvar.mk cx reason in - let i' = subst cx (SMap.singleton "this" this) i in - Cache.Fix.add reason i i'; - rec_unify cx trace ~use_op:unknown_use this i'; - i' - in - DefT (r, ClassT i') - -and canonicalize_imported_type cx trace reason t = - match t with - | DefT (_, ClassT inst) -> - Some (DefT (reason, TypeT (ImportClassKind, inst))) - - | DefT (_, FunT (_, prototype, _)) -> - Some (DefT (reason, TypeT (ImportFunKind, prototype))) - - | DefT (_, PolyT (typeparams, DefT (_, ClassT inst), id)) -> - Some (poly_type id typeparams (DefT (reason, TypeT (ImportClassKind, inst)))) - - | DefT (_, PolyT (typeparams, DefT (_, FunT (_, prototype, _)), id)) -> - Some (poly_type id typeparams (DefT (reason, TypeT (ImportFunKind, prototype)))) + and fix_this_class cx trace reason (r, i) = + let i' = + match Cache.Fix.find reason i with + | Some i' -> i' + | None -> + let this = Tvar.mk cx reason in + let i' = subst cx (SMap.singleton "this" this) i in + Cache.Fix.add reason i i'; + rec_unify cx trace ~use_op:unknown_use this i'; + i' + in + DefT (r, bogus_trust (), ClassT i') + + and is_type = function + | DefT (_, _, ClassT _) + | ThisClassT (_, _) + | DefT (_, _, TypeT _) + | AnyT _ -> + true + | DefT (_, _, PolyT (_, _, t', _)) -> is_type t' + | _ -> false - (* delay fixing a polymorphic this-abstracted class until it is specialized, + and canonicalize_imported_type cx trace reason t = + match t with + | DefT (_, trust, ClassT inst) -> Some (DefT (reason, trust, TypeT (ImportClassKind, inst))) + | DefT (_, _, PolyT (tparams_loc, typeparams, DefT (_, trust, ClassT inst), id)) -> + Some + (poly_type id tparams_loc typeparams (DefT (reason, trust, TypeT (ImportClassKind, inst)))) + (* delay fixing a polymorphic this-abstracted class until it is specialized, by transforming the instance type to a type application *) - | DefT (_, PolyT (typeparams, ThisClassT _, _)) -> - let targs = List.map (fun tp -> - BoundT (tp.reason, tp.name, tp.polarity) - ) typeparams in - Some (poly_type (mk_id ()) typeparams (class_type (typeapp t targs))) - - | DefT (_, PolyT (_, DefT (_, TypeT _), _)) -> - Some t - - (* fix this-abstracted class when used as a type *) - | ThisClassT(r, i) -> - Some (fix_this_class cx trace reason (r, i)) - - | DefT (_, TypeT _) -> - Some t - - | DefT (_, AnyT) -> - Some t - - | _ -> - None + | DefT (_, _, PolyT (tparams_loc, typeparams, ThisClassT _, _)) -> + let targs = + typeparams |> Nel.map (fun tp -> BoundT (tp.reason, tp.name, tp.polarity)) |> Nel.to_list + in + let tapp = typeapp ~implicit:true t targs in + Some (poly_type (Context.make_nominal cx) tparams_loc typeparams (class_type tapp)) + | DefT (_, _, PolyT (_, _, DefT (_, _, TypeT _), _)) -> Some t + (* fix this-abstracted class when used as a type *) + | ThisClassT (r, i) -> Some (fix_this_class cx trace reason (r, i)) + | DefT (_, _, TypeT _) -> Some t + | AnyT _ -> Some t + | _ -> None -(* Specialize This in a class. Eventually this causes substitution. *) -and instantiate_this_class cx trace reason tc this k = - rec_flow cx trace (tc, ThisSpecializeT (reason, this, k)) + (* Specialize This in a class. Eventually this causes substitution. *) + and instantiate_this_class cx trace reason tc this k = + rec_flow cx trace (tc, ThisSpecializeT (reason, this, k)) -(* Specialize targs in a class. This is somewhat different from + (* Specialize targs in a class. This is somewhat different from mk_typeapp_instance, in that it returns the specialized class type, not the specialized instance type. *) -and specialize_class cx trace ~reason_op ~reason_tapp c = function - | None -> c - | Some ts -> - Tvar.mk_where cx reason_tapp (fun tout -> - rec_flow cx trace (c, SpecializeT (unknown_use, reason_op, reason_tapp, None, Some ts, tout)) - ) - -(* Object assignment patterns. In the `Object.assign` model (chain_objects), an + and specialize_class cx trace ~reason_op ~reason_tapp c = function + | None -> c + | Some ts -> + Tvar.mk_where cx reason_tapp (fun tout -> + rec_flow + cx + trace + (c, SpecializeT (unknown_use, reason_op, reason_tapp, None, Some ts, tout))) + + (* Object assignment patterns. In the `Object.assign` model (chain_objects), an existing object receives properties from other objects. This pattern suffers from "races" in the type checker, since the object supposed to receive properties is available even when the other objects supplying the properties are not yet available. *) - -and chain_objects cx ?trace reason this those = - let result = List.fold_left (fun result that -> - let that, kind = match that with - | Arg t -> t, default_obj_assign_kind - | SpreadArg t -> - (* If someone does Object.assign({}, ...Array) we can treat it like + and chain_objects cx ?trace reason this those = + let result = + List.fold_left + (fun result that -> + let (that, kind) = + match that with + | Arg t -> (t, default_obj_assign_kind) + | SpreadArg t -> + (* If someone does Object.assign({}, ...Array) we can treat it like Object.assign({}, obj). *) - t, ObjSpreadAssign + (t, ObjSpreadAssign) + in + Tvar.mk_where cx reason (fun t -> + flow_opt + cx + ?trace + (result, ObjAssignToT (Op (ObjectChain { op = reason }), reason, that, t, kind)))) + this + those in - Tvar.mk_where cx reason (fun t -> - flow_opt cx ?trace (result, ObjAssignToT(reason, that, t, kind)); - ) - ) this those in - reposition cx ?trace (aloc_of_reason reason |> ALoc.to_loc) result - -(*******************************************************) -(* Entry points into the process of trying different *) -(* branches of union and intersection types. *) -(*******************************************************) + reposition cx ?trace (aloc_of_reason reason) result + (*******************************************************) + (* Entry points into the process of trying different *) + (* branches of union and intersection types. *) + (*******************************************************) -(* The problem we're trying to solve here is common to checking unions and + (* The problem we're trying to solve here is common to checking unions and intersections: how do we make a choice between alternatives, when (i) we have only partial information (i.e., while we're in the middle of type inference) and when (ii) we want to avoid regret (i.e., by not committing to an @@ -7917,46 +8331,46 @@ and chain_objects cx ?trace reason this those = determined by annotations). *) -(** Every choice-making process on a union or intersection type is assigned a + (** Every choice-making process on a union or intersection type is assigned a unique identifier, called the speculation_id. This identifier keeps track of unresolved tvars encountered when trying to fully resolve types. **) -and try_union cx trace use_op l reason rep = - let ts = UnionRep.members rep in - let speculation_id = mk_id() in - Speculation.init_speculation cx speculation_id; - - (* collect parts of the union type to be fully resolved *) - let imap = - (* since any final optimization must have happened after full resolution *) - if UnionRep.is_optimized_finally rep then IMap.empty - else ResolvableTypeJob.collect_of_types cx reason IMap.empty ts in - (* collect parts of the lower bound to be fully resolved, while logging + and try_union cx trace use_op l reason rep = + let ts = UnionRep.members rep in + let speculation_id = mk_id () in + Speculation.init_speculation cx speculation_id; + + (* collect parts of the union type to be fully resolved *) + let imap = + (* since any final optimization must have happened after full resolution *) + if UnionRep.is_optimized_finally rep then + IMap.empty + else + ResolvableTypeJob.collect_of_types cx IMap.empty ts + in + (* collect parts of the lower bound to be fully resolved, while logging unresolved tvars *) - let imap = ResolvableTypeJob.collect_of_type - ~log_unresolved:speculation_id cx reason imap l in - (* fully resolve the collected types *) - resolve_bindings_init cx trace reason (bindings_of_jobs cx trace imap) @@ - (* ...and then begin the choice-making process *) - try_flow_continuation cx trace reason speculation_id (UnionCases (use_op, l, rep, ts)) - -and try_intersection cx trace u reason rep = - let ts = InterRep.members rep in - let speculation_id = mk_id() in - Speculation.init_speculation cx speculation_id; - - (* collect parts of the intersection type to be fully resolved *) - let imap = ResolvableTypeJob.collect_of_types cx reason IMap.empty ts in - (* collect parts of the upper bound to be fully resolved, while logging + let imap = ResolvableTypeJob.collect_of_type ~log_unresolved:speculation_id cx imap l in + (* fully resolve the collected types *) + resolve_bindings_init cx trace reason (bindings_of_jobs cx trace imap) + @@ (* ...and then begin the choice-making process *) + try_flow_continuation cx trace reason speculation_id (UnionCases (use_op, l, rep, ts)) + + and try_intersection cx trace u reason rep = + let ts = InterRep.members rep in + let speculation_id = mk_id () in + Speculation.init_speculation cx speculation_id; + + (* collect parts of the intersection type to be fully resolved *) + let imap = ResolvableTypeJob.collect_of_types cx IMap.empty ts in + (* collect parts of the upper bound to be fully resolved, while logging unresolved tvars *) - let imap = ResolvableTypeJob.collect_of_use - ~log_unresolved:speculation_id cx reason imap u in - (* fully resolve the collected types *) - resolve_bindings_init cx trace reason (bindings_of_jobs cx trace imap) @@ - (* ...and then begin the choice-making process *) - try_flow_continuation cx trace reason speculation_id (IntersectionCases(ts, u)) - -(* Preprocessing for intersection types. + let imap = ResolvableTypeJob.collect_of_use ~log_unresolved:speculation_id cx imap u in + (* fully resolve the collected types *) + resolve_bindings_init cx trace reason (bindings_of_jobs cx trace imap) + @@ (* ...and then begin the choice-making process *) + try_flow_continuation cx trace reason speculation_id (IntersectionCases (ts, u)) + (* Preprocessing for intersection types. Before feeding into the choice-making machinery described above, we preprocess upper bounds of intersection types. This preprocessing seems @@ -7983,87 +8397,141 @@ and try_intersection cx trace u reason rep = for those parts, call the choice-making process. *) -(** The following function concretizes each tvar in unresolved in turn, + (** The following function concretizes each tvar in unresolved in turn, recording their corresponding concrete lower bounds in resolved as it goes. At each step, it emits a ConcretizeTypes constraint on an unresolved tvar, which in turn calls into this function when a concrete lower bound appears on that tvar. **) -and prep_try_intersection cx trace reason unresolved resolved u r rep = - match unresolved with - | [] -> try_intersection cx trace (replace_parts resolved u) r rep - | tvar::unresolved -> - rec_flow cx trace (tvar, intersection_preprocess_kit reason - (ConcretizeTypes (unresolved, resolved, DefT (r, IntersectionT rep), u))) - -(* some patterns need to be concretized before proceeding further *) -and patt_that_needs_concretization = function - | OpenT _ | DefT (_, UnionT _) | DefT (_, MaybeT _) | DefT (_, OptionalT _) | AnnotT _ -> true - | _ -> false + and prep_try_intersection cx trace reason unresolved resolved u r rep = + match unresolved with + | [] -> try_intersection cx trace (replace_parts cx resolved u) r rep + | tvar :: unresolved -> + rec_flow + cx + trace + ( tvar, + intersection_preprocess_kit + reason + (ConcretizeTypes (unresolved, resolved, IntersectionT (r, rep), u)) ) + + (* some patterns need to be concretized before proceeding further *) + and patt_that_needs_concretization = function + | OpenT _ + | UnionT _ + | MaybeT _ + | OptionalT _ + | AnnotT _ -> + true + | _ -> false -(* for now, we only care about concretizating parts of functions and calls *) -and parts_to_replace = function - | UseT (_, DefT (_, FunT (_, _, ft))) -> - let ts = List.fold_left (fun acc (_, t) -> - if patt_that_needs_concretization t - then t::acc - else acc - ) [] ft.params in - (match ft.rest_param with - | Some (_, _, t) when patt_that_needs_concretization t -> t::ts - | _ -> ts) - | CallT (_, _, callt) -> - List.fold_left (fun acc -> function - | Arg t | SpreadArg t when patt_that_needs_concretization t -> t::acc - | _ -> acc - ) [] callt.call_args_tlist - | _ -> [] + (* for now, we only care about concretizating parts of functions and calls *) + and parts_to_replace cx = function + | UseT (_, DefT (_, _, ObjT { call_t = Some id; _ })) -> + begin + match Context.find_call cx id with + | DefT (_, _, FunT (_, _, ft)) -> + let ts = + List.fold_left + (fun acc (_, t) -> + if patt_that_needs_concretization t then + t :: acc + else + acc) + [] + ft.params + in + (match ft.rest_param with + | Some (_, _, t) when patt_that_needs_concretization t -> t :: ts + | _ -> ts) + | _ -> [] + end + | UseT (_, DefT (_, _, FunT (_, _, ft))) -> + let ts = + List.fold_left + (fun acc (_, t) -> + if patt_that_needs_concretization t then + t :: acc + else + acc) + [] + ft.params + in + (match ft.rest_param with + | Some (_, _, t) when patt_that_needs_concretization t -> t :: ts + | _ -> ts) + | CallT (_, _, callt) -> + List.fold_left + (fun acc -> function + | Arg t + | SpreadArg t + when patt_that_needs_concretization t -> + t :: acc + | _ -> acc) + [] + callt.call_args_tlist + | _ -> [] + + (* replace unresolved types (xs) with resolved (ys) *) + and replace_parts = + let rec replace_params acc = function + | (ys, []) -> (ys, List.rev acc) + | (ys, ((name, x) as param) :: params) -> + if patt_that_needs_concretization x then + replace_params ((name, List.hd ys) :: acc) (List.tl ys, params) + else + replace_params (param :: acc) (ys, params) + in + let replace_rest_param = function + | (ys, None) -> (ys, None) + | (ys, (Some (name, loc, x) as param)) -> + if patt_that_needs_concretization x then + (List.tl ys, Some (name, loc, List.hd ys)) + else + (ys, param) + in + let replace_arg ys = function + | Arg x when patt_that_needs_concretization x -> (Arg (List.hd ys), List.tl ys) + | SpreadArg x when patt_that_needs_concretization x -> (SpreadArg (List.hd ys), List.tl ys) + | arg -> (arg, ys) + in + let rec replace_args acc = function + | (ys, []) -> (ys, List.rev acc) + | (ys, arg :: args) -> + let (arg, ys) = replace_arg ys arg in + replace_args (arg :: acc) (ys, args) + in + fun cx resolved -> function + | UseT (op, DefT (r1, t1, ObjT ({ call_t = Some id; _ } as o))) as u -> + begin + match Context.find_call cx id with + | DefT (r2, t2, FunT (static, proto, ft)) -> + let (resolved, params) = replace_params [] (resolved, ft.params) in + let (resolved, rest_param) = replace_rest_param (resolved, ft.rest_param) in + assert (resolved = []); + let id' = + Context.make_call_prop + cx + (DefT (r2, t2, FunT (static, proto, { ft with params; rest_param }))) + in + UseT (op, DefT (r1, t1, ObjT { o with call_t = Some id' })) + | _ -> u + end + | UseT (op, DefT (r, trust, FunT (t1, t2, ft))) -> + let (resolved, params) = replace_params [] (resolved, ft.params) in + let (resolved, rest_param) = replace_rest_param (resolved, ft.rest_param) in + assert (resolved = []); + UseT (op, DefT (r, trust, FunT (t1, t2, { ft with params; rest_param }))) + | CallT (op, r, callt) -> + let (resolved, call_args_tlist) = replace_args [] (resolved, callt.call_args_tlist) in + assert (resolved = []); + CallT (op, r, { callt with call_args_tlist }) + | u -> u -(* replace unresolved types (xs) with resolved (ys) *) -and replace_parts = - let rec replace_params acc = function - | ys, [] -> ys, List.rev acc - | ys, ((name, x) as param)::params -> - if patt_that_needs_concretization x - then replace_params ((name, List.hd ys)::acc) (List.tl ys, params) - else replace_params (param::acc) (ys, params) - in - let replace_rest_param = function - | ys, None -> ys, None - | ys, (Some (name, loc, x) as param) -> - if patt_that_needs_concretization x - then List.tl ys, Some (name, loc, List.hd ys) - else ys, param - in - let replace_arg ys = function - | Arg x when patt_that_needs_concretization x -> - Arg (List.hd ys), List.tl ys - | SpreadArg x when patt_that_needs_concretization x -> - SpreadArg (List.hd ys), List.tl ys - | arg -> arg, ys - in - let rec replace_args acc = function - | ys, [] -> ys, List.rev acc - | ys, arg::args -> - let arg, ys = replace_arg ys arg in - replace_args (arg::acc) (ys, args) - in - fun resolved -> function - | UseT (op, DefT (r, FunT (t1, t2, ft))) -> - let resolved, params = replace_params [] (resolved, ft.params) in - let resolved, rest_param = replace_rest_param (resolved, ft.rest_param) in - assert (resolved = []); - UseT (op, DefT (r, FunT (t1, t2, { ft with params; rest_param }))) - | CallT (op, r, callt) -> - let resolved, call_args_tlist = replace_args [] (resolved, callt.call_args_tlist) in - assert (resolved = []); - CallT (op, r, { callt with call_args_tlist }) - | u -> u - -(************************) -(* Full type resolution *) -(************************) - -(* Here we continue where we left off at ResolvableTypeJob. Once we have + (************************) + (* Full type resolution *) + (************************) + + (* Here we continue where we left off at ResolvableTypeJob. Once we have collected a set of type resolution jobs, we create so-called bindings from these jobs. A binding is a (id, tvar) pair, where tvar is what needs to be resolved, and id is an identifier that serves as an index for that job. @@ -8083,34 +8551,38 @@ and replace_parts = These decisions were made in ResolvableTypeJob.collect_of_types and are reflected in the use (or not) of OpenUnresolved (see below). *) + and bindings_of_jobs cx trace jobs = + IMap.fold + ResolvableTypeJob.( + fun id job bindings -> + match job with + | OpenResolved -> bindings + | Binding tvar -> (id, tvar) :: bindings + | OpenUnresolved (log_unresolved, reason, id) -> + begin + match log_unresolved with + | Some speculation_id -> + Speculation.add_unresolved_to_speculation cx speculation_id id + | None -> + Unsoundness.unresolved_any reason |> resolve_id cx trace ~use_op:unknown_use id + end; + bindings) + jobs + [] -and bindings_of_jobs cx trace jobs = - IMap.fold ResolvableTypeJob.(fun id job bindings -> match job with - | OpenResolved -> bindings - | Binding tvar -> (id, tvar)::bindings - | OpenUnresolved (log_unresolved, reason, id) -> - begin match log_unresolved with - | Some speculation_id -> - Speculation.add_unresolved_to_speculation cx speculation_id id - | None -> - resolve_id cx trace ~use_op:unknown_use id (AnyT.make reason) - end; - bindings - ) jobs [] - -(* Entry point into full type resolution. Create an identifier for the goal + (* Entry point into full type resolution. Create an identifier for the goal tvar, and call the general full type resolution function below. *) -and resolve_bindings_init cx trace reason bindings done_tvar = - let id = create_goal cx done_tvar in - resolve_bindings cx trace reason id bindings + and resolve_bindings_init cx trace reason bindings done_tvar = + let id = create_goal cx done_tvar in + resolve_bindings cx trace reason id bindings -and create_goal cx tvar = - let i = mk_id () in - Graph_explorer.node (Context.type_graph cx) i; - Context.set_evaluated cx (IMap.add i tvar (Context.evaluated cx)); - i + and create_goal cx tvar = + let i = mk_id () in + Graph_explorer.node (Context.type_graph cx) i; + Context.set_evaluated cx (IMap.add i tvar (Context.evaluated cx)); + i -(* Let id be the identifier associated with a tvar that is not yet + (* Let id be the identifier associated with a tvar that is not yet resolved. (Here, resolved/unresolved refer to the state of the tvar in the context graph: does it point to Resolved _ or Unresolved _?) As soon as the tvar is resolved to some type, we generate some bindings by walking that @@ -8144,113 +8616,102 @@ and create_goal cx tvar = called, which in turn calls back into this function (thus closing the recursive loop). *) - -and resolve_bindings cx trace reason id bindings = - let bindings = filter_bindings cx bindings in - let fully_resolve_ids = connect_id_to_bindings cx id bindings in - ISet.iter (fun id -> - match IMap.get id (Context.evaluated cx) with - | None -> () - | Some tvar -> trigger cx trace reason tvar - ) fully_resolve_ids; - List.iter (resolve_binding cx trace reason) bindings - -and fully_resolve_type cx trace reason id t = - if is_unexplored_source cx id then - let imap = ResolvableTypeJob.collect_of_type cx reason IMap.empty t in - let bindings = bindings_of_jobs cx trace imap in - (* NOTE: bindings_of_jobs might change the state of id because it resolves it, so check - again. TODO: there must be a better way *) + and resolve_bindings cx trace reason id bindings = + let bindings = filter_bindings cx bindings in + let fully_resolve_ids = connect_id_to_bindings cx id bindings in + ISet.iter + (fun id -> + match IMap.get id (Context.evaluated cx) with + | None -> () + | Some tvar -> trigger cx trace reason tvar) + fully_resolve_ids; + List.iter (resolve_binding cx trace reason) bindings + + and fully_resolve_type cx trace reason id t = if is_unexplored_source cx id then - resolve_bindings cx trace reason id bindings + let imap = ResolvableTypeJob.collect_of_type cx IMap.empty t in + let bindings = bindings_of_jobs cx trace imap in + (* NOTE: bindings_of_jobs might change the state of id because it resolves it, so check + again. TODO: there must be a better way *) + if is_unexplored_source cx id then resolve_bindings cx trace reason id bindings -and filter_bindings cx = - List.filter (fun (id, _) -> is_unfinished_target cx id) + and filter_bindings cx = List.filter (fun (id, _) -> is_unfinished_target cx id) -and connect_id_to_bindings cx id bindings = - let ids, _ = List.split bindings in - Graph_explorer.edges (Context.type_graph cx) (id, ids) + and connect_id_to_bindings cx id bindings = + let (ids, _) = List.split bindings in + Graph_explorer.edges (Context.type_graph cx) (id, ids) -(* Sanity conditions on source and target before adding edges to the + (* Sanity conditions on source and target before adding edges to the graph. Nodes are in one of three states, described in Graph_explorer: Not_found (corresponding to unresolved tvars), Found _ (corresponding to resolved but not yet fully resolved tvars), and Finished (corresponding to fully resolved tvars). *) + and is_unexplored_source cx id = + match Graph_explorer.stat_graph id (Context.type_graph cx) with + | Graph_explorer.Finished -> false + | Graph_explorer.Node_not_found -> false + | Graph_explorer.Found node -> Graph_explorer.is_unexplored_node node -and is_unexplored_source cx id = - match Graph_explorer.stat_graph id (Context.type_graph cx) with - | Graph_explorer.Finished -> false - | Graph_explorer.Not_found -> false - | Graph_explorer.Found node -> Graph_explorer.is_unexplored_node node - -and is_unfinished_target cx id = - let type_graph = Context.type_graph cx in - match Graph_explorer.stat_graph id type_graph with - | Graph_explorer.Finished -> false - | Graph_explorer.Not_found -> - Graph_explorer.node type_graph id; - true - | Graph_explorer.Found node -> - not (Graph_explorer.is_finished_node node) + and is_unfinished_target cx id = + let type_graph = Context.type_graph cx in + match Graph_explorer.stat_graph id type_graph with + | Graph_explorer.Finished -> false + | Graph_explorer.Node_not_found -> + Graph_explorer.node type_graph id; + true + | Graph_explorer.Found node -> not (Graph_explorer.is_finished_node node) -(** utils for creating toolkit types **) + (** utils for creating toolkit types **) -and choice_kit reason k = - InternalT (ChoiceKitT (reason, k)) + and choice_kit reason k = InternalT (ChoiceKitT (reason, k)) -and choice_kit_use reason k = - ChoiceKitUseT (reason, k) + and choice_kit_use reason k = ChoiceKitUseT (reason, k) -and intersection_preprocess_kit reason k = - IntersectionPreprocessKitT (reason, k) + and intersection_preprocess_kit reason k = IntersectionPreprocessKitT (reason, k) -(** utils for emitting toolkit constraints **) + (** utils for emitting toolkit constraints **) -and trigger cx trace reason done_tvar = - rec_flow cx trace (choice_kit reason Trigger, UseT (unknown_use, done_tvar)) + and trigger cx trace reason done_tvar = + rec_flow cx trace (choice_kit reason Trigger, UseT (unknown_use, done_tvar)) -and try_flow_continuation cx trace reason speculation_id spec = - tvar_with_constraint cx ~trace - (choice_kit_use reason (TryFlow (speculation_id, spec))) + and try_flow_continuation cx trace reason speculation_id spec = + tvar_with_constraint cx ~trace (choice_kit_use reason (TryFlow (speculation_id, spec))) -and resolve_binding cx trace reason (id, tvar) = - rec_flow cx trace ( - OpenT tvar, - choice_kit_use reason (FullyResolveType id) - ) + and resolve_binding cx trace reason (id, tvar) = + rec_flow cx trace (OpenT tvar, choice_kit_use reason (FullyResolveType id)) -(************************) -(* Speculative matching *) -(************************) + (************************) + (* Speculative matching *) + (************************) -(* Speculatively match a pair of types, returning whether some error was + (* Speculatively match a pair of types, returning whether some error was encountered or not. Speculative matching happens in the context of a particular "branch": this context controls how some constraints emitted during the matching might be processed. See comments in Speculation for details on branches. See also speculative_matches, which calls this function iteratively and processes its results. *) -and speculative_match cx trace branch l u = - let typeapp_stack = TypeAppExpansion.get () in - let cache = !Cache.FlowConstraint.cache in - Speculation.set_speculative branch; - let restore () = - Speculation.restore_speculative (); - Cache.FlowConstraint.cache := cache; - TypeAppExpansion.set typeapp_stack - in - try - rec_flow cx trace (l, u); - restore (); - None - with - | SpeculativeError err -> - restore (); - Some err - | exn -> - restore (); - raise exn - -(* Speculatively match several alternatives in turn, as presented when checking + and speculative_match cx trace branch l u = + let typeapp_stack = TypeAppExpansion.get () in + let cache = !Cache.FlowConstraint.cache in + Speculation.set_speculative branch; + let restore () = + Speculation.restore_speculative (); + Cache.FlowConstraint.cache := cache; + TypeAppExpansion.set typeapp_stack + in + try + rec_flow cx trace (l, u); + restore (); + None + with + | SpeculativeError err -> + restore (); + Some err + | exn -> + restore (); + raise exn + + (* Speculatively match several alternatives in turn, as presented when checking a union or intersection type. This process maintains a so-called "match state" that describes the best possible choice found so far, and can terminate in various ways: @@ -8293,19 +8754,21 @@ and speculative_match cx trace branch l u = error messages and to ignore unresolved tvars that are deemed irrelevant to choice-making. *) -and speculative_matches cx trace r speculation_id spec = - (* explore optimization opportunities *) - if optimize_spec_try_shortcut cx trace r spec then () - else long_path_speculative_matches cx trace r speculation_id spec - -and long_path_speculative_matches cx trace r speculation_id spec = Speculation.Case.( - (* extract stuff to ignore while considering actions *) - let ignore = ignore_of_spec spec in - (* split spec into a list of pairs of types to try speculative matching on *) - let trials = trials_of_spec spec in + and speculative_matches cx trace r speculation_id spec = + (* explore optimization opportunities *) + if optimize_spec_try_shortcut cx trace r spec then + () + else + long_path_speculative_matches cx trace r speculation_id spec - let rec loop match_state = function - (* Here match_state can take on various values: + and long_path_speculative_matches cx trace r speculation_id spec = + Speculation.Case.( + (* extract stuff to ignore while considering actions *) + let ignore = ignore_of_spec spec in + (* split spec into a list of pairs of types to try speculative matching on *) + let trials = trials_of_spec spec in + let rec loop match_state = function + (* Here match_state can take on various values: (a) (NoMatch errs) indicates that everything has failed up to this point, with errors recorded in errs. Note that the initial value of acc is @@ -8314,100 +8777,104 @@ and long_path_speculative_matches cx trace r speculation_id spec = Speculation.C (b) (ConditionalMatch case) indicates the a promising alternative has been found, but not chosen yet. *) - | [] -> return match_state - - | (case_id, case_r, l, u)::trials -> - let case = { case_id; unresolved = ISet.empty; actions = []} in - (* speculatively match the pair of types in this trial *) - let error = speculative_match cx trace - { Speculation.ignore; speculation_id; case } l u in - match error with - | None -> - (* no error, looking great so far... *) - begin match match_state with - | Speculation.NoMatch _ -> - (* everything had failed up to this point. so no ambiguity yet... *) - if ISet.is_empty case.unresolved - (* ...and no unresolved tvars encountered during the speculative + | [] -> return match_state + | (case_id, case_r, l, u) :: trials -> + let case = { case_id; unresolved = ISet.empty; actions = [] } in + (* speculatively match the pair of types in this trial *) + let error = + speculative_match cx trace { Speculation.ignore; speculation_id; case } l u + in + (match error with + | None -> + (* no error, looking great so far... *) + begin + match match_state with + | Speculation.NoMatch _ -> + (* everything had failed up to this point. so no ambiguity yet... *) + if + ISet.is_empty case.unresolved + (* ...and no unresolved tvars encountered during the speculative match! This is great news. It means that this alternative will definitely succeed. Fire any deferred actions and short-cut. *) - then fire_actions cx trace spec case.actions - (* Otherwise, record that we've found a promising alternative. *) - else loop (Speculation.ConditionalMatch case) trials - - | Speculation.ConditionalMatch prev_case -> - (* umm, there's another previously found promising alternative *) - (* so compute the difference in side effects between that alternative + then + fire_actions cx trace spec case.actions + (* Otherwise, record that we've found a promising alternative. *) + else + loop (Speculation.ConditionalMatch case) trials + | Speculation.ConditionalMatch prev_case -> + (* umm, there's another previously found promising alternative *) + (* so compute the difference in side effects between that alternative and this *) - let ts = diff prev_case case in - (* if the side effects of the previously found promising alternative + let ts = diff cx prev_case case in + (* if the side effects of the previously found promising alternative are fewer, then keep holding on to that alternative *) - if ts = [] then loop match_state trials - (* otherwise, we have an ambiguity; blame the unresolved tvars and + if ts = [] then + loop match_state trials + (* otherwise, we have an ambiguity; blame the unresolved tvars and short-cut *) - else begin - let prev_case_id = prev_case.case_id in - let cases: Type.t list = choices_of_spec spec in - blame_unresolved cx trace prev_case_id case_id cases case_r r ts - end - end - | Some err -> - (* if an error is found, then throw away this alternative... *) - begin match match_state with - | Speculation.NoMatch errs -> - (* ...adding to the error list if no promising alternative has been + else + let prev_case_id = prev_case.case_id in + let cases : Type.t list = choices_of_spec spec in + blame_unresolved cx trace prev_case_id case_id cases case_r ts + end + | Some err -> + (* if an error is found, then throw away this alternative... *) + begin + match match_state with + | Speculation.NoMatch errs -> + (* ...adding to the error list if no promising alternative has been found yet *) - loop (Speculation.NoMatch (err::errs)) trials - | _ -> loop match_state trials - end - - and return = function - | Speculation.ConditionalMatch case -> - (* best choice that survived, congrats! fire deferred actions *) - fire_actions cx trace spec case.actions - | Speculation.NoMatch msgs -> - (* everything failed; make a really detailed error message listing out the + loop (Speculation.NoMatch (err :: errs)) trials + | _ -> loop match_state trials + end) + and return = function + | Speculation.ConditionalMatch case -> + (* best choice that survived, congrats! fire deferred actions *) + fire_actions cx trace spec case.actions + | Speculation.NoMatch msgs -> + (* everything failed; make a really detailed error message listing out the error found for each alternative *) - let ts = choices_of_spec spec in - assert (List.length ts = List.length msgs); - let branches = List.mapi (fun i msg -> - let reason = reason_of_t (List.nth ts i) in - (reason, msg) - ) msgs in - (* Add the error. *) - begin match spec with - | UnionCases (use_op, l, _rep, us) -> - let reason = reason_of_t l in - let reason_op = mk_union_reason r us in - add_output cx ~trace - (Flow_error.EUnionSpeculationFailed { use_op; reason; reason_op; branches }) - - | IntersectionCases (ls, upper) -> - let err = - let reason_lower = mk_intersection_reason r ls in - match upper with - | UseT (use_op, t) -> - Flow_error.EIncompatibleDefs { - use_op; - reason_lower; - reason_upper = reason_of_t t; - branches; - } - | _ -> - Flow_error.EIncompatible { - use_op = use_op_of_use_t upper; - lower = (reason_lower, Some Flow_error.Incompatible_intersection); - upper = (reason_of_use_t upper, flow_error_kind_of_upper upper); - branches; - } - in - add_output cx ~trace err - end - - in loop (Speculation.NoMatch []) trials -) + let ts = choices_of_spec spec in + assert (List.length ts = List.length msgs); + let branches = + List.mapi + (fun i msg -> + let reason = reason_of_t (List.nth ts i) in + (reason, msg)) + msgs + in + (* Add the error. *) + begin + match spec with + | UnionCases (use_op, l, _rep, us) -> + let reason = reason_of_t l in + let reason_op = mk_union_reason r us in + add_output + cx + ~trace + (Error_message.EUnionSpeculationFailed { use_op; reason; reason_op; branches }) + | IntersectionCases (ls, upper) -> + let err = + let reason_lower = mk_intersection_reason r ls in + match upper with + | UseT (use_op, t) -> + Error_message.EIncompatibleDefs + { use_op; reason_lower; reason_upper = reason_of_t t; branches } + | _ -> + Error_message.EIncompatible + { + use_op = use_op_of_use_t upper; + lower = (reason_lower, Some Error_message.Incompatible_intersection); + upper = (reason_of_use_t upper, error_message_kind_of_upper upper); + branches; + } + in + add_output cx ~trace err + end + in + loop (Speculation.NoMatch []) trials) -(* Make an informative error message that points out the ambiguity, and where + (* Make an informative error message that points out the ambiguity, and where additional annotations can help disambiguate. Recall that an ambiguity arises precisely when: @@ -8421,41 +8888,40 @@ and long_path_speculative_matches cx trace r speculation_id spec = Speculation.C encounters potentially side-effectful constraints involving unresolved tvars during a trial. *) -and blame_unresolved cx trace prev_i i cases case_r r tvars = - let rs = tvars |> List.map (fun (_, r) -> r) |> List.sort compare in - let prev_case = reason_of_t (List.nth cases prev_i) in - let case = reason_of_t (List.nth cases i) in - add_output cx ~trace (FlowError.ESpeculationAmbiguous ( - (case_r, r), - (prev_i, prev_case), - (i, case), - rs - )) - -and trials_of_spec = function - | UnionCases (use_op, l, _rep, us) -> - (* NB: Even though we know the use_op for the original constraint, don't + and blame_unresolved cx trace prev_i i cases case_r tvars = + let rs = tvars |> Core_list.map ~f:(fun (_, r) -> r) |> List.sort compare in + let prev_case = reason_of_t (List.nth cases prev_i) in + let case = reason_of_t (List.nth cases i) in + add_output + cx + ~trace + (Error_message.ESpeculationAmbiguous + { reason = case_r; prev_case = (prev_i, prev_case); case = (i, case); cases = rs }) + + and trials_of_spec = function + | UnionCases (use_op, l, _rep, us) -> + (* NB: Even though we know the use_op for the original constraint, don't embed it in the nested constraints to avoid unnecessary verbosity. We will unwrap the original use_op once in EUnionSpeculationFailed. *) - List.mapi (fun i u -> (i, reason_of_t l, l, UseT (Op (Speculation use_op), u))) us - | IntersectionCases (ls, u) -> - List.mapi (fun i l -> (i, reason_of_use_t u, l, - mod_use_op_of_use_t (fun use_op -> Op (Speculation use_op)) u)) ls - -and choices_of_spec = function - | UnionCases (_, _, _, ts) - | IntersectionCases (ts, _) - -> ts - -and ignore_of_spec = function - | IntersectionCases (_, CallT (_, _, { - call_tout = OpenT (_, id); _ - })) -> Some id - | IntersectionCases (_, GetPropT (_, _, _, OpenT (_, id))) -> Some id - | _ -> None - -(* spec optimization *) -(* Currently, the only optimizations we do are for enums and for disjoint unions. + List.mapi (fun i u -> (i, reason_of_t l, l, UseT (Op (Speculation use_op), u))) us + | IntersectionCases (ls, u) -> + List.mapi + (fun i l -> + (i, reason_of_use_t u, l, mod_use_op_of_use_t (fun use_op -> Op (Speculation use_op)) u)) + ls + + and choices_of_spec = function + | UnionCases (_, _, _, ts) + | IntersectionCases (ts, _) -> + ts + + and ignore_of_spec = function + | IntersectionCases (_, CallT (_, _, { call_tout = OpenT (_, id); _ })) -> Some id + | IntersectionCases (_, GetPropT (_, _, _, OpenT (_, id))) -> Some id + | _ -> None + + (* spec optimization *) + (* Currently, the only optimizations we do are for enums and for disjoint unions. When a literal type is checked against a union of literal types, we hope the union is an enum and try to optimize the representation of the union as such. We also try to use our optimization to @@ -8466,688 +8932,694 @@ and ignore_of_spec = function during speculative matching, by checking sentinel properties first we force immediate match failures in the vast majority of cases without having to do any useless additional work. *) -and optimize_spec_try_shortcut cx trace reason_op = function - | UnionCases (use_op, l, rep, _ts) -> - if not (UnionRep.is_optimized_finally rep) - then UnionRep.optimize rep - ~flatten:(Type_mapper.union_flatten cx) - ~find_resolved:(Context.find_resolved cx) - ~find_props:(Context.find_props cx); - begin match l with - | DefT (_, - (StrT (Literal _) | NumT (Literal _) | BoolT (Some _) | - SingletonStrT _ | SingletonNumT _ | SingletonBoolT _ | - VoidT | NullT)) -> - shortcut_enum cx trace reason_op use_op l rep - | DefT (_, ObjT _) | ExactT (_, DefT (_, ObjT _)) -> - shortcut_disjoint_union cx trace reason_op use_op l rep - | _ -> false - end - | IntersectionCases _ -> false - -and shortcut_enum cx trace reason_op use_op l rep = - quick_mem_result cx trace reason_op use_op l rep @@ - UnionRep.quick_mem_enum l rep - -and shortcut_disjoint_union cx trace reason_op use_op l rep = - quick_mem_result cx trace reason_op use_op l rep @@ - UnionRep.quick_mem_disjoint_union l rep - ~find_resolved:(Context.find_resolved cx) - ~find_props:(Context.find_props cx) - -and quick_mem_result cx trace reason_op use_op l rep = function - | UnionRep.Yes -> (* membership check succeeded *) - true (* Our work here is done, so no need to continue. *) - | UnionRep.No -> (* membership check failed *) - let r = UnionRep.specialized_reason reason_op rep in - rec_flow cx trace (l, UseT (use_op, DefT (r, EmptyT))); - true (* Our work here is done, so no need to continue. *) - | UnionRep.Conditional t -> (* conditional match *) - rec_flow cx trace (l, UseT (use_op, t)); - true (* Our work here is done, so no need to continue. *) - | UnionRep.Unknown -> (* membership check was inconclusive *) - false (* Continue to speculative matching. *) - -(* When we fire_actions we also need to reconstruct the use_op for each action - * since before beginning speculation we replaced each use_op with - * an UnknownUse. *) -and fire_actions cx trace spec = List.iter (function - | _, Speculation.Action.Flow (l, u) -> (match spec with - | IntersectionCases (_, u') -> - let use_op = use_op_of_use_t u' in - (match use_op with - | None -> rec_flow cx trace (l, u) - | Some use_op -> - rec_flow cx trace (l, - mod_use_op_of_use_t (replace_speculation_root_use_op use_op) u)) - | UnionCases (use_op, _, _, _) -> - rec_flow cx trace (l, - mod_use_op_of_use_t (replace_speculation_root_use_op use_op) u) - ) - | _, Speculation.Action.Unify (use_op, t1, t2) -> (match spec with - | IntersectionCases (_, u') -> - let use_op' = use_op_of_use_t u' in - (match use_op' with - | None -> rec_unify cx trace t1 t2 ~use_op - | Some use_op' -> - rec_unify cx trace t1 t2 - ~use_op:(replace_speculation_root_use_op use_op' use_op) - ) - | UnionCases (use_op', _, _, _) -> - rec_unify cx trace t1 t2 - ~use_op:(replace_speculation_root_use_op use_op' use_op) - ) - | _, Speculation.Action.Error msg -> - add_output cx ~trace msg -) - -and mk_union_reason r us = - List.fold_left (fun reason t -> - let rdesc = string_of_desc (desc_of_reason ~unwrap:false reason) in - let tdesc = string_of_desc (desc_of_reason ~unwrap:false (reason_of_t t)) in - let udesc = if not (String_utils.string_starts_with rdesc "union:") - then spf "union: %s" tdesc - else if String_utils.string_ends_with rdesc "..." - then rdesc - else if String_utils.string_ends_with rdesc (tdesc ^ "(s)") - then rdesc - else if String.length rdesc >= 256 - then spf "%s | ..." rdesc - else if String_utils.string_ends_with rdesc tdesc - then spf "%s(s)" rdesc - else spf "%s | %s" rdesc tdesc - in - replace_reason_const (RCustom udesc) reason - ) r us - -and mk_intersection_reason r _ls = - replace_reason_const RIntersection r + and optimize_spec_try_shortcut cx trace reason_op = function + | UnionCases (use_op, l, rep, _ts) -> + if not (UnionRep.is_optimized_finally rep) then + UnionRep.optimize + rep + ~flatten:(Type_mapper.union_flatten cx) + ~find_resolved:(Context.find_resolved cx) + ~find_props:(Context.find_props cx); + begin + match l with + | DefT + ( _, + _, + ( StrT (Literal _) + | NumT (Literal _) + | BoolT (Some _) + | SingletonStrT _ | SingletonNumT _ | SingletonBoolT _ | VoidT | NullT ) ) -> + shortcut_enum cx trace reason_op use_op l rep + | DefT (_, _, ObjT _) + | ExactT (_, DefT (_, _, ObjT _)) -> + shortcut_disjoint_union cx trace reason_op use_op l rep + | _ -> false + end + | IntersectionCases _ -> false + + and shortcut_enum cx trace reason_op use_op l rep = + quick_mem_result cx trace reason_op use_op l rep + @@ UnionRep.quick_mem_enum (Context.trust_errors cx) l rep + + and shortcut_disjoint_union cx trace reason_op use_op l rep = + quick_mem_result cx trace reason_op use_op l rep + @@ UnionRep.quick_mem_disjoint_union + (Context.trust_errors cx) + l + rep + ~find_resolved:(Context.find_resolved cx) + ~find_props:(Context.find_props cx) + + and quick_mem_result cx trace reason_op use_op l rep = function + | UnionRep.Yes -> + (* membership check succeeded *) + true + (* Our work here is done, so no need to continue. *) + | UnionRep.No -> + (* membership check failed *) + let r = UnionRep.specialized_reason reason_op rep in + rec_flow cx trace (l, UseT (use_op, DefT (r, bogus_trust (), EmptyT Bottom))); + true + (* Our work here is done, so no need to continue. *) + | UnionRep.Conditional t -> + (* conditional match *) + rec_flow cx trace (l, UseT (use_op, t)); + true (* Our work here is done, so no need to continue. *) + | UnionRep.Unknown -> + (* membership check was inconclusive *) + false + + (* Continue to speculative matching. *) + + (* When we fire_actions we also need to reconstruct the use_op for each action + * since before beginning speculation we replaced each use_op with + * an UnknownUse. *) + and fire_actions cx trace spec = + List.iter (function + | (_, Speculation.Action.Flow (l, u)) -> + (match spec with + | IntersectionCases (_, u') -> + let use_op = use_op_of_use_t u' in + (match use_op with + | None -> rec_flow cx trace (l, u) + | Some use_op -> + rec_flow cx trace (l, mod_use_op_of_use_t (replace_speculation_root_use_op use_op) u)) + | UnionCases (use_op, _, _, _) -> + rec_flow cx trace (l, mod_use_op_of_use_t (replace_speculation_root_use_op use_op) u)) + | (_, Speculation.Action.Unify (use_op, t1, t2)) -> + (match spec with + | IntersectionCases (_, u') -> + let use_op' = use_op_of_use_t u' in + (match use_op' with + | None -> rec_unify cx trace t1 t2 ~use_op + | Some use_op' -> + rec_unify cx trace t1 t2 ~use_op:(replace_speculation_root_use_op use_op' use_op)) + | UnionCases (use_op', _, _, _) -> + rec_unify cx trace t1 t2 ~use_op:(replace_speculation_root_use_op use_op' use_op)) + | (_, Speculation.Action.Error msg) -> add_output cx ~trace msg) + + and mk_union_reason r us = + List.fold_left + (fun reason t -> + let rdesc = string_of_desc (desc_of_reason ~unwrap:false reason) in + let tdesc = string_of_desc (desc_of_reason ~unwrap:false (reason_of_t t)) in + let udesc = + if not (String_utils.string_starts_with rdesc "union:") then + spf "union: %s" tdesc + else if String_utils.string_ends_with rdesc "..." then + rdesc + else if String_utils.string_ends_with rdesc (tdesc ^ "(s)") then + rdesc + else if String.length rdesc >= 256 then + spf "%s | ..." rdesc + else if String_utils.string_ends_with rdesc tdesc then + spf "%s(s)" rdesc + else + spf "%s | %s" rdesc tdesc + in + replace_desc_reason (RCustom udesc) reason) + r + us -(* property lookup functions in objects and instances *) + and mk_intersection_reason r _ls = + replace_desc_reason RIntersection r + (* property lookup functions in objects and instances *) -(** + (** * Determines whether a property name should be considered "munged"/private when * the `munge_underscores` config option is set. *) -and is_munged_prop_name cx name = - (Context.should_munge_underscores cx) - && Signature_utils.is_munged_property_name name - -and lookup_prop cx trace l reason_prop reason_op strict x action = - let l = - (* munge names beginning with single _ *) - if is_munged_prop_name cx x - then ObjProtoT (reason_of_t l) - else l - in - let propref = Named (reason_prop, x) in - rec_flow cx trace (l, LookupT (reason_op, strict, [], propref, action)) - -and access_prop cx trace reason_prop reason_op strict super x pmap action = - match SMap.get x pmap with - | Some p -> - perform_lookup_action cx trace (Named (reason_prop, x)) p reason_prop reason_op action - | None -> - lookup_prop cx trace super reason_prop reason_op strict x action - -and get_prop cx trace ~use_op reason_prop reason_op strict l super x map tout = - RWProp (use_op, l, tout, Read) - |> access_prop cx trace reason_prop reason_op strict super x map - -and match_prop cx trace ~use_op reason_prop reason_op strict super x pmap prop_t = - MatchProp (use_op, prop_t) - |> access_prop cx trace reason_prop reason_op strict super x pmap - -and set_prop cx ?(wr_ctx=Normal) trace ~use_op reason_prop reason_op strict l super x pmap tin prop_t = - RWProp (use_op, l, tin, Write (wr_ctx, prop_t)) - |> access_prop cx trace reason_prop reason_op strict super x pmap - -and get_obj_prop cx trace o propref reason_op = - let named_prop = match propref with - | Named (_, x) -> Context.get_prop cx o.props_tmap x - | Computed _ -> None - in - match propref, named_prop, o.dict_t with - | _, Some _, _ -> - (* Property exists on this property map *) - named_prop - | Named (_, x), None, Some { key; value; dict_polarity; _ } - when not (is_dictionary_exempt x) -> - (* Dictionaries match all property reads *) - rec_flow_t cx trace (string_key x reason_op, key); - Some (Field (None, value, dict_polarity)) - | Computed k, None, Some { key; value; dict_polarity; _ } -> - rec_flow_t cx trace (k, key); - Some (Field (None, value, dict_polarity)) - | _ -> None - -and read_obj_prop cx trace ~use_op o propref reason_obj reason_op tout = - let l = DefT (reason_obj, ObjT o) in - (match get_obj_prop cx trace o propref reason_op with - | Some p -> - let action = RWProp (use_op, l, tout, Read) in - perform_lookup_action cx trace propref p reason_obj reason_op action - | None -> - match propref with - | Named _ -> - let strict = - if Obj_type.sealed_in_op reason_op o.flags.sealed - then Strict reason_obj - else ShadowRead (None, Nel.one o.props_tmap) - in - rec_flow cx trace (o.proto_t, - LookupT (reason_op, strict, [], propref, RWProp (use_op, l, tout, Read))) - | Computed elem_t -> - match elem_t with - | OpenT _ -> - let loc = loc_of_t elem_t in - add_output cx ~trace FlowError.(EInternal (loc, PropRefComputedOpen)) - | DefT (_, StrT Literal _) -> - let loc = loc_of_t elem_t in - add_output cx ~trace FlowError.(EInternal (loc, PropRefComputedLiteral)) - | DefT (_, AnyT) | DefT (_, StrT _) | DefT (_, NumT _) -> - (* any, string, and number keys are allowed, but there's nothing else to - flow without knowing their literal values. *) - rec_flow_t cx trace (AnyT.why reason_op, tout) - | _ -> - let reason_prop = reason_of_t elem_t in - add_output cx ~trace (FlowError.EObjectComputedPropertyAccess - (reason_op, reason_prop))) - -and writelike_obj_prop cx trace ~use_op o propref reason_obj reason_op prop_t action = - match get_obj_prop cx trace o propref reason_op with - | Some p -> - perform_lookup_action cx trace propref p reason_obj reason_op action - | None -> - match propref with - | Named (reason_prop, prop) -> - let sealed = Obj_type.sealed_in_op reason_op o.flags.sealed in - if sealed && o.flags.exact - then - add_output cx ~trace (FlowError.EPropNotFound - (Some prop, (reason_prop, reason_obj), use_op)) + and is_munged_prop_name cx name = + is_munged_prop_name_with_munge + name + ~should_munge_underscores:(Context.should_munge_underscores cx) + + and is_munged_prop_name_with_munge name ~should_munge_underscores = + Signature_utils.is_munged_property_name name && should_munge_underscores + + and lookup_prop cx trace l reason_prop reason_op strict x action = + let l = + (* munge names beginning with single _ *) + if is_munged_prop_name cx x then + ObjProtoT (reason_of_t l) else + l + in + let propref = Named (reason_prop, x) in + rec_flow cx trace (l, LookupT (reason_op, strict, [], propref, action)) + + and access_prop cx trace reason_prop reason_op strict super x pmap action = + match SMap.get x pmap with + | Some p -> + perform_lookup_action + cx + trace + (Named (reason_prop, x)) + p + PropertyMapProperty + reason_prop + reason_op + action + | None -> lookup_prop cx trace super reason_prop reason_op strict x action + + and get_prop cx trace ~use_op reason_prop reason_op strict l super x map tout = + ReadProp { use_op; obj_t = l; tout } + |> access_prop cx trace reason_prop reason_op strict super x map + + and match_prop cx trace ~use_op reason_prop reason_op strict super x pmap prop_t = + MatchProp (use_op, prop_t) |> access_prop cx trace reason_prop reason_op strict super x pmap + + and set_prop + cx + ?(wr_ctx = Normal) + ~mode + trace + ~use_op + reason_prop + reason_op + strict + l + super + x + pmap + tin + prop_tout = + let action = WriteProp { use_op; obj_t = l; prop_tout; tin; write_ctx = wr_ctx; mode } in + access_prop cx trace reason_prop reason_op strict super x pmap action + + and get_obj_prop cx trace o propref reason_op = + let named_prop = + match propref with + | Named (_, x) -> Context.get_prop cx o.props_tmap x + | Computed _ -> None + in + match (propref, named_prop, o.dict_t) with + | (_, Some prop, _) -> + (* Property exists on this property map *) + Some (prop, PropertyMapProperty) + | (Named (_, x), None, Some { key; value; dict_polarity; _ }) when not (is_dictionary_exempt x) + -> + (* Dictionaries match all property reads *) + rec_flow_t cx trace (string_key x reason_op, key); + Some (Field (None, value, dict_polarity), IndexerProperty) + | (Computed k, None, Some { key; value; dict_polarity; _ }) -> + rec_flow_t cx trace (k, key); + Some (Field (None, value, dict_polarity), IndexerProperty) + | _ -> None + + and read_obj_prop cx trace ~use_op o propref reason_obj reason_op tout = + let l = DefT (reason_obj, bogus_trust (), ObjT o) in + match get_obj_prop cx trace o propref reason_op with + | Some (p, target_kind) -> + let action = ReadProp { use_op; obj_t = l; tout } in + perform_lookup_action cx trace propref p target_kind reason_obj reason_op action + | None -> + (match propref with + | Named _ -> let strict = - if sealed - then Strict reason_obj - else ShadowWrite (Nel.one o.props_tmap) + if Obj_type.sealed_in_op reason_op o.flags.sealed then + Strict reason_obj + else + ShadowRead (None, Nel.one o.props_tmap) in - rec_flow cx trace (o.proto_t, LookupT (reason_op, strict, [], propref, action)) - | Computed elem_t -> - match elem_t with - | OpenT _ -> - let loc = loc_of_t elem_t in - add_output cx ~trace FlowError.(EInternal (loc, PropRefComputedOpen)) - | DefT (_, StrT Literal _) -> - let loc = loc_of_t elem_t in - add_output cx ~trace FlowError.(EInternal (loc, PropRefComputedLiteral)) - | DefT (_, AnyT) | DefT (_, StrT _) | DefT (_, NumT _) -> - (* any, string, and number keys are allowed, but there's nothing else to + rec_flow + cx + trace + ( o.proto_t, + LookupT (reason_op, strict, [], propref, ReadProp { use_op; obj_t = l; tout }) ) + | Computed elem_t -> + (match elem_t with + | OpenT _ -> + let loc = loc_of_t elem_t in + add_output cx ~trace Error_message.(EInternal (loc, PropRefComputedOpen)) + | DefT (_, _, StrT (Literal _)) -> + let loc = loc_of_t elem_t in + add_output cx ~trace Error_message.(EInternal (loc, PropRefComputedLiteral)) + | AnyT _ -> rec_flow_t cx trace (AnyT.untyped reason_op, tout) + | DefT (_, _, StrT _) + | DefT (_, _, NumT _) -> + (* string, and number keys are allowed, but there's nothing else to flow without knowing their literal values. *) - rec_flow_t cx trace (prop_t, AnyT.why reason_op) - | _ -> - let reason_prop = reason_of_t elem_t in - add_output cx ~trace (FlowError.EObjectComputedPropertyAssign - (reason_op, reason_prop)) - -and match_obj_prop cx trace ~use_op o propref reason_obj reason_op prop_t = - MatchProp (use_op, prop_t) - |> writelike_obj_prop cx trace ~use_op o propref reason_obj reason_op prop_t - -and write_obj_prop cx trace ~use_op o propref reason_obj reason_op tin prop_t = - RWProp (use_op, DefT (reason_obj, ObjT o), tin, Write (Normal, prop_t)) - |> writelike_obj_prop cx trace ~use_op o propref reason_obj reason_op tin - -and find_or_intro_shadow_prop cx trace reason_op x prop_loc = - let intro_shadow_prop id = - let reason_prop = replace_reason_const (RShadowProperty x) reason_op in - let t = Tvar.mk cx reason_prop in - let p = Field (Some prop_loc, t, Neutral) in - Context.set_prop cx id (internal_name x) p; - t, p - in - - (* Given some shadow property type and a prototype chain (o.proto, - * o.proto.proto, ...), link all types along the prototype chain together. - * If there is a write to the prototype later on, we unify the property types - * together. If there is no write, the property types are safely independent. - *) - let rec chain_link t = function - | [] -> () - | id::ids -> - let t_proto = Property.assert_field (find (id, ids)) in - rec_flow cx trace (t_proto, UnifyT (t_proto, t)) - - (* Check at each step to see if a prop was added since we looked. - * - * Imports and builtins are merged in after local inference, potentially - * deferring multiple shadow reads/writes on a tvar. If this shadow read - * follow a deferred shadow write, a property will exist. If it follows a - * deferred shadow read, a shadow property will exist. In either case, we - * don't need to create a shadow property, nor do we need to continue - * unifying up the proto chain, as the work is necessarily already done. - *) - and find (id, proto_ids) = - match Context.get_prop cx id x with - | Some p -> p + rec_flow_t cx trace (Unsoundness.why ComputedNonLiteralKey reason_op, tout) + | _ -> + let reason_prop = reason_of_t elem_t in + add_output + cx + ~trace + (Error_message.EObjectComputedPropertyAccess (reason_op, reason_prop)))) + + and writelike_obj_prop cx trace ~use_op o propref reason_obj reason_op prop_t action = + match get_obj_prop cx trace o propref reason_op with + | Some (p, target_kind) -> + perform_lookup_action cx trace propref p target_kind reason_obj reason_op action | None -> - match Context.get_prop cx id (internal_name x) with + (match propref with + | Named (reason_prop, prop) -> + let sealed = Obj_type.sealed_in_op reason_op o.flags.sealed in + if sealed && o.flags.exact then + add_output + cx + ~trace + (Error_message.EPropNotFound (Some prop, (reason_prop, reason_obj), use_op)) + else + let strict = + if sealed then + Strict reason_obj + else + ShadowWrite (Nel.one o.props_tmap) + in + rec_flow cx trace (o.proto_t, LookupT (reason_op, strict, [], propref, action)) + | Computed elem_t -> + (match elem_t with + | OpenT _ -> + let loc = loc_of_t elem_t in + add_output cx ~trace Error_message.(EInternal (loc, PropRefComputedOpen)) + | DefT (_, _, StrT (Literal _)) -> + let loc = loc_of_t elem_t in + add_output cx ~trace Error_message.(EInternal (loc, PropRefComputedLiteral)) + | AnyT _ -> rec_flow_t cx trace (prop_t, AnyT.untyped reason_op) + | DefT (_, _, StrT _) + | DefT (_, _, NumT _) -> + (* string and number keys are allowed, but there's nothing else to + flow without knowing their literal values. *) + rec_flow_t cx trace (prop_t, Unsoundness.why ComputedNonLiteralKey reason_op) + | _ -> + let reason_prop = reason_of_t elem_t in + add_output + cx + ~trace + (Error_message.EObjectComputedPropertyAssign (reason_op, reason_prop)))) + + and match_obj_prop cx trace ~use_op o propref reason_obj reason_op prop_t = + MatchProp (use_op, prop_t) + |> writelike_obj_prop cx trace ~use_op o propref reason_obj reason_op prop_t + + and write_obj_prop cx trace ~use_op ~mode o propref reason_obj reason_op tin prop_tout = + let obj_t = DefT (reason_obj, bogus_trust (), ObjT o) in + let action = WriteProp { use_op; obj_t; prop_tout; tin; write_ctx = Normal; mode } in + writelike_obj_prop cx trace ~use_op o propref reason_obj reason_op tin action + + and match_shape cx trace ~use_op proto reason props = + (* TODO: ShapeT should have its own reason *) + let reason_op = reason_of_t proto in + SMap.iter + (fun x p -> + let reason_prop = update_desc_reason (fun desc -> RPropertyOf (x, desc)) reason in + match Property.read_t p with + | Some t -> + let use_op = + Frame + (PropertyCompatibility { prop = Some x; upper = reason; lower = reason_op }, use_op) + in + let propref = Named (reason_prop, x) in + let t = filter_optional cx ~trace reason_prop t in + rec_flow cx trace (proto, MatchPropT (use_op, reason_op, propref, t)) + | None -> + add_output + cx + ~trace + (Error_message.EPropNotReadable { reason_prop; prop_name = Some x; use_op })) + props + + and find_or_intro_shadow_prop cx trace reason_op x prop_loc = + let intro_shadow_prop id = + let reason_prop = replace_desc_reason (RShadowProperty x) reason_op in + let t = Tvar.mk cx reason_prop in + let p = Field (Some prop_loc, t, Polarity.Neutral) in + Context.set_prop cx id (internal_name x) p; + (t, p) + in + (* Given some shadow property type and a prototype chain (o.proto, + * o.proto.proto, ...), link all types along the prototype chain together. + * If there is a write to the prototype later on, we unify the property types + * together. If there is no write, the property types are safely independent. + *) + let rec chain_link t = function + | [] -> () + | id :: ids -> + let t_proto = Property.assert_field (find (id, ids)) in + rec_flow cx trace (t_proto, UnifyT (t_proto, t)) + (* Check at each step to see if a prop was added since we looked. + * + * Imports and builtins are merged in after local inference, potentially + * deferring multiple shadow reads/writes on a tvar. If this shadow read + * follow a deferred shadow write, a property will exist. If it follows a + * deferred shadow read, a shadow property will exist. In either case, we + * don't need to create a shadow property, nor do we need to continue + * unifying up the proto chain, as the work is necessarily already done. + *) + and find (id, proto_ids) = + match Context.get_prop cx id x with | Some p -> p | None -> - let t, p = intro_shadow_prop id in - chain_link t proto_ids; - p - - in find - -(* filter out undefined from a type *) -and filter_optional cx ?trace reason opt_t = - Tvar.mk_where cx reason (fun t -> - flow_opt_t cx ?trace (opt_t, DefT (reason, OptionalT t)) - ) - -(* filter out undefined and null from a type *) -and filter_maybe cx ?trace reason maybe_t = - Tvar.mk_where cx reason (fun t -> - flow_opt_t cx ?trace (maybe_t, DefT (reason, MaybeT t)) - ) - -and update_sketchy_null cx opt_loc t = - let open ExistsCheck in - match t with - (* Ignore AnyTs for sketchy null checks; otherwise they'd always trigger the lint. *) - | DefT (_, AnyT) -> () - | _ -> - match opt_loc with - | None -> () - | Some loc -> - let t_loc = - let reason = reason_of_t t in - match annot_loc_of_reason reason with - | Some loc -> Some loc - | None -> Some (def_loc_of_reason reason) - in - let exists_checks = Context.exists_checks cx in - let exists_check = LocMap.get loc exists_checks |> Option.value ~default:ExistsCheck.empty in - let exists_check = match Type_filter.maybe t with - | DefT (_, EmptyT) -> exists_check - | _ -> {exists_check with null_loc = t_loc} - in - let exists_check = - match t |> Type_filter.not_exists |> Type_filter.not_maybe with - | DefT (_, BoolT _) -> {exists_check with bool_loc = t_loc} - | DefT (_, StrT _) -> {exists_check with string_loc = t_loc} - | DefT (_, NumT _) -> {exists_check with number_loc = t_loc} - | DefT (_, MixedT _) -> {exists_check with mixed_loc = t_loc} - | _ -> exists_check - in - let exists_checks = if exists_check = ExistsCheck.empty - then exists_checks - else LocMap.add loc exists_check exists_checks - in - Context.set_exists_checks cx exists_checks - -(**********) -(* guards *) -(**********) - -and guard cx trace source pred result sink = match pred with + (match Context.get_prop cx id (internal_name x) with + | Some p -> p + | None -> + let (t, p) = intro_shadow_prop id in + chain_link t proto_ids; + p) + in + find -| ExistsP loc -> - update_sketchy_null cx loc source; - begin match Type_filter.exists source with - | DefT (_, EmptyT) -> () - | _ -> rec_flow_t cx trace (result, sink) - end + (* filter out undefined from a type *) + and filter_optional cx ?trace reason opt_t = + Tvar.mk_where cx reason (fun t -> flow_opt_t cx ?trace (opt_t, OptionalT (reason, t))) -| NotP (ExistsP loc) -> - update_sketchy_null cx loc source; - begin match Type_filter.not_exists source with - | DefT (_, EmptyT) -> () - | _ -> rec_flow_t cx trace (result, sink) - end + (* filter out undefined and null from a type *) + and filter_maybe cx ?trace reason maybe_t = + Tvar.mk_where cx reason (fun t -> flow_opt_t cx ?trace (maybe_t, MaybeT (reason, t))) -| _ -> - let loc = aloc_of_reason (reason_of_t sink) in - let pred_str = string_of_predicate pred in - add_output cx ~trace - FlowError.(EInternal (loc |> ALoc.to_loc, UnsupportedGuardPredicate pred_str)) + and update_sketchy_null cx opt_loc t = + ExistsCheck.( + match t with + (* Ignore AnyTs for sketchy null checks; otherwise they'd always trigger the lint. *) + | AnyT _ -> () + | _ -> + (match opt_loc with + | None -> () + | Some loc -> + let t_loc = + let reason = reason_of_t t in + match annot_aloc_of_reason reason with + | Some loc -> Some loc + | None -> Some (def_aloc_of_reason reason) + in + let exists_checks = Context.exists_checks cx in + let exists_check = + ALocMap.get loc exists_checks |> Option.value ~default:ExistsCheck.empty + in + let exists_check = + match Type_filter.maybe t with + | DefT (_, _, EmptyT _) -> exists_check + | _ -> { exists_check with null_loc = t_loc } + in + let exists_check = + match t |> Type_filter.not_exists |> Type_filter.not_maybe with + | DefT (_, _, BoolT _) -> { exists_check with bool_loc = t_loc } + | DefT (_, _, StrT _) -> { exists_check with string_loc = t_loc } + | DefT (_, _, NumT _) -> { exists_check with number_loc = t_loc } + | DefT (_, _, MixedT _) -> { exists_check with mixed_loc = t_loc } + | _ -> exists_check + in + let exists_checks = + if exists_check = ExistsCheck.empty then + exists_checks + else + ALocMap.add loc exists_check exists_checks + in + Context.set_exists_checks cx exists_checks)) + + (**********) + (* guards *) + (**********) + and guard cx trace source pred result sink = + match pred with + | ExistsP loc -> + update_sketchy_null cx loc source; + begin + match Type_filter.exists source with + | DefT (_, _, EmptyT _) -> () + | _ -> rec_flow_t cx trace (result, sink) + end + | NotP (ExistsP loc) -> + update_sketchy_null cx loc source; + begin + match Type_filter.not_exists source with + | DefT (_, _, EmptyT _) -> () + | _ -> rec_flow_t cx trace (result, sink) + end + | _ -> + let loc = aloc_of_reason (reason_of_t sink) in + let pred_str = string_of_predicate pred in + add_output cx ~trace Error_message.(EInternal (loc, UnsupportedGuardPredicate pred_str)) -(**************) -(* predicates *) -(**************) + (**************) + (* predicates *) + (**************) -(* t - predicate output recipient (normally a tvar) + (* t - predicate output recipient (normally a tvar) l - incoming concrete LB (predicate input) result - guard result in case of success p - predicate *) -and predicate cx trace t l p = match p with - - (************************) - (* deconstruction of && *) - (************************) - - | AndP (p1,p2) -> - let reason = replace_reason_const RAnd (reason_of_t t) in - let tvar = Tvar.mk cx reason in - rec_flow cx trace (l,PredicateT(p1,tvar)); - rec_flow cx trace (tvar,PredicateT(p2,t)) - - (************************) - (* deconstruction of || *) - (************************) - - | OrP (p1, p2) -> - rec_flow cx trace (l,PredicateT(p1,t)); - rec_flow cx trace (l,PredicateT(p2,t)) - - (*********************************) - (* deconstruction of binary test *) - (*********************************) - - (* when left is evaluated, store it and evaluate right *) - | LeftP (b, r) -> - rec_flow cx trace (r, PredicateT(RightP(b, l), t)) - | NotP LeftP (b, r) -> - rec_flow cx trace (r, PredicateT(NotP(RightP(b, l)), t)) - - (* when right is evaluated, call appropriate handler *) - | RightP (b, actual_l) -> - let r = l in - let l = actual_l in - binary_predicate cx trace true b l r t - | NotP RightP (b, actual_l) -> - let r = l in - let l = actual_l in - binary_predicate cx trace false b l r t - - (***********************) - (* typeof _ ~ "boolean" *) - (***********************) - - | BoolP -> - rec_flow_t cx trace (Type_filter.boolean l, t) - - | NotP BoolP -> - rec_flow_t cx trace (Type_filter.not_boolean l, t) - - (***********************) - (* typeof _ ~ "string" *) - (***********************) - - | StrP -> - rec_flow_t cx trace (Type_filter.string l, t) - - | NotP StrP -> - rec_flow_t cx trace (Type_filter.not_string l, t) - - (*********************) - (* _ ~ "some string" *) - (*********************) - - | SingletonStrP (expected_loc, sense, lit) -> - let filtered_str = Type_filter.string_literal expected_loc sense lit l in - rec_flow_t cx trace (filtered_str, t) - - | NotP SingletonStrP (_, _, lit) -> - let filtered_str = Type_filter.not_string_literal lit l in - rec_flow_t cx trace (filtered_str, t) - - (*********************) - (* _ ~ some number n *) - (*********************) - - | SingletonNumP (expected_loc, sense, lit) -> - let filtered_num = Type_filter.number_literal expected_loc sense lit l in - rec_flow_t cx trace (filtered_num, t) - - | NotP SingletonNumP (_, _, lit) -> - let filtered_num = Type_filter.not_number_literal lit l in - rec_flow_t cx trace (filtered_num, t) - - (***********************) - (* typeof _ ~ "number" *) - (***********************) - - | NumP -> - rec_flow_t cx trace (Type_filter.number l, t) - - | NotP NumP -> - rec_flow_t cx trace (Type_filter.not_number l, t) - - (***********************) - (* typeof _ ~ "function" *) - (***********************) - - | FunP -> - rec_flow_t cx trace (Type_filter.function_ l, t) - - | NotP FunP -> - rec_flow_t cx trace (Type_filter.not_function l, t) - - (***********************) - (* typeof _ ~ "object" *) - (***********************) - - | ObjP -> - rec_flow_t cx trace (Type_filter.object_ cx l, t) - - | NotP ObjP -> - rec_flow_t cx trace (Type_filter.not_object l, t) - - (*******************) - (* Array.isArray _ *) - (*******************) - - | ArrP -> - rec_flow_t cx trace (Type_filter.array l, t) - - | NotP ArrP -> - rec_flow_t cx trace (Type_filter.not_array l, t) - - (***********************) - (* typeof _ ~ "undefined" *) - (***********************) - - | VoidP -> - let filtered = Type_filter.undefined l in - rec_flow_t cx trace (filtered, t) - - | NotP VoidP -> - let filtered = Type_filter.not_undefined l in - rec_flow_t cx trace (filtered, t) - - (********) - (* null *) - (********) - - | NullP -> - let filtered = Type_filter.null l in - rec_flow_t cx trace (filtered, t) - - | NotP NullP -> - let filtered = Type_filter.not_null l in - rec_flow_t cx trace (filtered, t) - - (*********) - (* maybe *) - (*********) - - | MaybeP -> - let filtered = Type_filter.maybe l in - rec_flow_t cx trace (filtered, t) - - | NotP MaybeP -> - let filtered = Type_filter.not_maybe l in - rec_flow_t cx trace (filtered, t) - - (********) - (* true *) - (********) - - | SingletonBoolP true -> - let filtered = Type_filter.true_ l in - rec_flow_t cx trace (filtered, t) - - | NotP (SingletonBoolP true) -> - let filtered = Type_filter.not_true l in - rec_flow_t cx trace (filtered, t) - - (*********) - (* false *) - (*********) - - | SingletonBoolP false -> - let filtered = Type_filter.false_ l in - rec_flow_t cx trace (filtered, t) + and predicate cx trace t l p = + match p with + (************************) + (* deconstruction of && *) + (************************) + | AndP (p1, p2) -> + let reason = replace_desc_reason RAnd (reason_of_t t) in + let tvar = Tvar.mk cx reason in + rec_flow cx trace (l, PredicateT (p1, tvar)); + rec_flow cx trace (tvar, PredicateT (p2, t)) + (************************) + (* deconstruction of || *) + (************************) + | OrP (p1, p2) -> + rec_flow cx trace (l, PredicateT (p1, t)); + rec_flow cx trace (l, PredicateT (p2, t)) + (*********************************) + (* deconstruction of binary test *) + (*********************************) - | NotP (SingletonBoolP false) -> - let filtered = Type_filter.not_false l in - rec_flow_t cx trace (filtered, t) + (* when left is evaluated, store it and evaluate right *) + | LeftP (b, r) -> rec_flow cx trace (r, PredicateT (RightP (b, l), t)) + | NotP (LeftP (b, r)) -> rec_flow cx trace (r, PredicateT (NotP (RightP (b, l)), t)) + (* when right is evaluated, call appropriate handler *) + | RightP (b, actual_l) -> + let r = l in + let l = actual_l in + binary_predicate cx trace true b l r t + | NotP (RightP (b, actual_l)) -> + let r = l in + let l = actual_l in + binary_predicate cx trace false b l r t + (***********************) + (* typeof _ ~ "boolean" *) + (***********************) + | BoolP -> rec_flow_t cx trace (Type_filter.boolean l, t) + | NotP BoolP -> rec_flow_t cx trace (Type_filter.not_boolean l, t) + (***********************) + (* typeof _ ~ "string" *) + (***********************) + | StrP -> rec_flow_t cx trace (Type_filter.string l, t) + | NotP StrP -> rec_flow_t cx trace (Type_filter.not_string l, t) + (***********************) + (* typeof _ ~ "symbol" *) + (***********************) + | SymbolP -> rec_flow_t cx trace (Type_filter.symbol l, t) + | NotP SymbolP -> rec_flow_t cx trace (Type_filter.not_symbol l, t) + (*********************) + (* _ ~ "some string" *) + (*********************) + | SingletonStrP (expected_loc, sense, lit) -> + let filtered_str = Type_filter.string_literal expected_loc sense lit l in + rec_flow_t cx trace (filtered_str, t) + | NotP (SingletonStrP (_, _, lit)) -> + let filtered_str = Type_filter.not_string_literal lit l in + rec_flow_t cx trace (filtered_str, t) + (*********************) + (* _ ~ some number n *) + (*********************) + | SingletonNumP (expected_loc, sense, lit) -> + let filtered_num = Type_filter.number_literal expected_loc sense lit l in + rec_flow_t cx trace (filtered_num, t) + | NotP (SingletonNumP (_, _, lit)) -> + let filtered_num = Type_filter.not_number_literal lit l in + rec_flow_t cx trace (filtered_num, t) + (***********************) + (* typeof _ ~ "number" *) + (***********************) + | NumP -> rec_flow_t cx trace (Type_filter.number l, t) + | NotP NumP -> rec_flow_t cx trace (Type_filter.not_number l, t) + (***********************) + (* typeof _ ~ "function" *) + (***********************) + | FunP -> rec_flow_t cx trace (Type_filter.function_ l, t) + | NotP FunP -> rec_flow_t cx trace (Type_filter.not_function l, t) + (***********************) + (* typeof _ ~ "object" *) + (***********************) + | ObjP -> rec_flow_t cx trace (Type_filter.object_ cx l, t) + | NotP ObjP -> rec_flow_t cx trace (Type_filter.not_object l, t) + (*******************) + (* Array.isArray _ *) + (*******************) + | ArrP -> rec_flow_t cx trace (Type_filter.array l, t) + | NotP ArrP -> rec_flow_t cx trace (Type_filter.not_array l, t) + (***********************) + (* typeof _ ~ "undefined" *) + (***********************) + | VoidP -> + let filtered = Type_filter.undefined l in + rec_flow_t cx trace (filtered, t) + | NotP VoidP -> + let filtered = Type_filter.not_undefined l in + rec_flow_t cx trace (filtered, t) + (********) + (* null *) + (********) + | NullP -> + let filtered = Type_filter.null l in + rec_flow_t cx trace (filtered, t) + | NotP NullP -> + let filtered = Type_filter.not_null l in + rec_flow_t cx trace (filtered, t) + (*********) + (* maybe *) + (*********) + | MaybeP -> + let filtered = Type_filter.maybe l in + rec_flow_t cx trace (filtered, t) + | NotP MaybeP -> + let filtered = Type_filter.not_maybe l in + rec_flow_t cx trace (filtered, t) + (********) + (* true *) + (********) + | SingletonBoolP (_, true) -> + let filtered = Type_filter.true_ l in + rec_flow_t cx trace (filtered, t) + | NotP (SingletonBoolP (_, true)) -> + let filtered = Type_filter.not_true l in + rec_flow_t cx trace (filtered, t) + (*********) + (* false *) + (*********) + | SingletonBoolP (_, false) -> + let filtered = Type_filter.false_ l in + rec_flow_t cx trace (filtered, t) + | NotP (SingletonBoolP (_, false)) -> + let filtered = Type_filter.not_false l in + rec_flow_t cx trace (filtered, t) + (************************) + (* truthyness *) + (************************) + | ExistsP loc -> + update_sketchy_null cx loc l; + let filtered = Type_filter.exists l in + rec_flow_t cx trace (filtered, t) + | NotP (ExistsP loc) -> + update_sketchy_null cx loc l; + let filtered = Type_filter.not_exists l in + rec_flow_t cx trace (filtered, t) + | PropExistsP (key, loc) -> + update_sketchy_null cx loc l; + prop_exists_test cx trace key true l t + | NotP (PropExistsP (key, loc)) -> + update_sketchy_null cx loc l; + prop_exists_test cx trace key false l t + (* unreachable *) + | NotP (NotP _) + | NotP (AndP _) + | NotP (OrP _) -> + assert_false (spf "Unexpected predicate %s" (string_of_predicate p)) + (********************) + (* Latent predicate *) + (********************) + | LatentP (fun_t, idx) -> + let reason = update_desc_reason (fun desc -> RPredicateCall desc) (reason_of_t fun_t) in + rec_flow cx trace (fun_t, CallLatentPredT (reason, true, idx, l, t)) + | NotP (LatentP (fun_t, idx)) -> + let neg_reason = + update_desc_reason (fun desc -> RPredicateCallNeg desc) (reason_of_t fun_t) + in + rec_flow cx trace (fun_t, CallLatentPredT (neg_reason, false, idx, l, t)) - (************************) - (* truthyness *) - (************************) + and prop_exists_test cx trace key sense obj result = + prop_exists_test_generic key cx trace result obj sense obj - | ExistsP loc -> - update_sketchy_null cx loc l; - let filtered = Type_filter.exists l in - rec_flow_t cx trace (filtered, t) - - | NotP (ExistsP loc) -> - update_sketchy_null cx loc l; - let filtered = Type_filter.not_exists l in - rec_flow_t cx trace (filtered, t) - - | PropExistsP (reason, key, loc) -> - update_sketchy_null cx loc l; - prop_exists_test cx trace reason key true l t - - | NotP (PropExistsP (reason, key, loc)) -> - update_sketchy_null cx loc l; - prop_exists_test cx trace reason key false l t - - (* unreachable *) - | NotP (NotP _) - | NotP (AndP _) - | NotP (OrP _) -> - assert_false (spf "Unexpected predicate %s" (string_of_predicate p)) - - (********************) - (* Latent predicate *) - (********************) - - | LatentP (fun_t, idx) -> - let reason = replace_reason (fun desc -> - RPredicateCall desc - ) (reason_of_t fun_t) in - rec_flow cx trace (fun_t, CallLatentPredT (reason, true, idx, l, t)) - - | NotP (LatentP (fun_t, idx)) -> - let neg_reason = replace_reason (fun desc -> - RPredicateCallNeg desc - ) (reason_of_t fun_t) in - rec_flow cx trace (fun_t, - CallLatentPredT (neg_reason, false, idx, l, t)) - -and prop_exists_test cx trace reason key sense obj result = - prop_exists_test_generic reason key cx trace result obj sense obj - -and prop_exists_test_generic - reason key cx trace result orig_obj sense = function - | DefT (lreason, ObjT { flags; props_tmap; _}) as obj -> - (match Context.get_prop cx props_tmap key with - | Some p -> - (match Property.read_t p with - | Some t -> - (* prop is present on object type *) - let pred = if sense then ExistsP None else NotP (ExistsP None) in - rec_flow cx trace (t, GuardT (pred, orig_obj, result)) + and prop_exists_test_generic key cx trace result orig_obj sense = function + | DefT (lreason, _, ObjT { flags; props_tmap; _ }) as obj -> + (match Context.get_prop cx props_tmap key with + | Some p -> + (match Property.read_t p with + | Some t -> + (* prop is present on object type *) + let pred = + if sense then + ExistsP None + else + NotP (ExistsP None) + in + rec_flow cx trace (t, GuardT (pred, orig_obj, result)) + | None -> + (* prop cannot be read *) + add_output + cx + ~trace + (Error_message.EPropNotReadable + { reason_prop = lreason; prop_name = Some key; use_op = unknown_use })) + | None when flags.exact && Obj_type.sealed_in_op (reason_of_t result) flags.sealed -> + (* prop is absent from exact object type *) + if sense then + () + else + rec_flow_t cx trace (orig_obj, result) | None -> - (* prop cannot be read *) - add_output cx ~trace (FlowError.EPropAccess ( - (lreason, reason), Some key, Property.polarity p, Read, unknown_use - )) - ) - | None when flags.exact && Obj_type.sealed_in_op (reason_of_t result) flags.sealed -> - (* prop is absent from exact object type *) - if sense - then () - else rec_flow_t cx trace (orig_obj, result) - | None -> - (* prop is absent from inexact object type *) - (* TODO: possibly unsound to filter out orig_obj here, but if we don't, + (* prop is absent from inexact object type *) + (* TODO: possibly unsound to filter out orig_obj here, but if we don't, case elimination based on prop existence checking doesn't work for (disjoint unions of) intersections of objects, where the prop appears in a different branch of the intersection. It is easy to avoid this unsoundness with slightly more work, but will wait until a refactoring of property lookup lands to revisit. Tracked by #11301092. *) - if orig_obj = obj then rec_flow_t cx trace (orig_obj, result)) - - | DefT (_, IntersectionT rep) -> - (* For an intersection of object types, try the test for each object type in + if orig_obj = obj then rec_flow_t cx trace (orig_obj, result)) + | IntersectionT (_, rep) -> + (* For an intersection of object types, try the test for each object type in turn, while recording the original intersection so that we end up with the right refinement. See the comment on the implementation of IntersectionPreprocessKit for more details. *) - let reason = reason_of_t result in - InterRep.members rep |> List.iter (fun obj -> - rec_flow cx trace (obj, - intersection_preprocess_kit reason - (PropExistsTest(sense, key, orig_obj, result)))) - - | _ -> - rec_flow_t cx trace (orig_obj, result) - -and binary_predicate cx trace sense test left right result = - let handler = - match test with - | InstanceofTest -> instanceof_test - | SentinelProp key -> sentinel_prop_test key - in - handler cx trace result (sense, left, right) + let reason = reason_of_t result in + InterRep.members rep + |> List.iter (fun obj -> + rec_flow + cx + trace + ( obj, + intersection_preprocess_kit reason (PropExistsTest (sense, key, orig_obj, result)) + )) + | _ -> rec_flow_t cx trace (orig_obj, result) + + and binary_predicate cx trace sense test left right result = + let handler = + match test with + | InstanceofTest -> instanceof_test + | SentinelProp key -> sentinel_prop_test key + in + handler cx trace result (sense, left, right) -and instanceof_test cx trace result = function - (** instanceof on an ArrT is a special case since we treat ArrT as its own + and instanceof_test cx trace result = function + (* instanceof on an ArrT is a special case since we treat ArrT as its own type, rather than an InstanceT of the Array builtin class. So, we resolve the ArrT to an InstanceT of Array, and redo the instanceof check. We do it at this stage instead of simply converting (ArrT, InstanceofP c) to (InstanceT(Array), InstanceofP c) because this allows c to be resolved first. *) - | true, - (DefT (reason, ArrT arrtype) as arr), - DefT (r, ClassT (DefT (_, (InstanceT _)) as a)) -> - - let elemt = elemt_of_arrtype reason arrtype in - - let right = extends_type r arr a in - let arrt = get_builtin_typeapp cx ~trace reason "Array" [elemt] in - rec_flow cx trace (arrt, PredicateT(LeftP(InstanceofTest, right), result)) - - | false, - (DefT (reason, ArrT arrtype) as arr), - DefT (r, ClassT (DefT (_, (InstanceT _)) as a)) -> - - let elemt = elemt_of_arrtype reason arrtype in - - let right = extends_type r arr a in - let arrt = get_builtin_typeapp cx ~trace reason "Array" [elemt] in - let pred = NotP(LeftP(InstanceofTest, right)) in - rec_flow cx trace (arrt, PredicateT (pred, result)) - - (** An object is considered `instanceof` a function F when it is constructed + | ( true, + (DefT (reason, _, ArrT arrtype) as arr), + DefT (r, _, ClassT (DefT (_, _, InstanceT _) as a)) ) -> + let elemt = elemt_of_arrtype arrtype in + let right = extends_type r arr a in + let arrt = get_builtin_typeapp cx ~trace reason "Array" [elemt] in + rec_flow cx trace (arrt, PredicateT (LeftP (InstanceofTest, right), result)) + | ( false, + (DefT (reason, _, ArrT arrtype) as arr), + DefT (r, _, ClassT (DefT (_, _, InstanceT _) as a)) ) -> + let elemt = elemt_of_arrtype arrtype in + let right = extends_type r arr a in + let arrt = get_builtin_typeapp cx ~trace reason "Array" [elemt] in + let pred = NotP (LeftP (InstanceofTest, right)) in + rec_flow cx trace (arrt, PredicateT (pred, result)) + (* An object is considered `instanceof` a function F when it is constructed by F. Note that this is incomplete with respect to the runtime semantics, where instanceof is transitive: if F.prototype `instanceof` G, then the object is `instanceof` G. There is nothing fundamentally difficult in modeling the complete semantics, but we haven't found a need to do it. **) - | true, - (DefT (_, ObjT {proto_t = proto2; _}) as obj), - DefT (_, FunT (_, proto1, _)) + | (true, (DefT (_, _, ObjT { proto_t = proto2; _ }) as obj), DefT (_, _, FunT (_, proto1, _))) when proto1 = proto2 -> - - rec_flow_t cx trace (obj, result) - - (** Suppose that we have an instance x of class C, and we check whether x is + rec_flow_t cx trace (obj, result) + (* Suppose that we have an instance x of class C, and we check whether x is `instanceof` class A. To decide what the appropriate refinement for x should be, we need to decide whether C extends A, choosing either C or A based on the result. Thus, we generate a constraint to decide whether C @@ -9156,96 +9628,77 @@ and instanceof_test cx trace result = function class. (As a technical tool, we use Extends(_, _) to perform this recursion; it is also used elsewhere for running similar recursive subclass decisions.) **) - | true, - (DefT (_, InstanceT _) as c), - DefT (r, ClassT (DefT (_, (InstanceT _)) as a)) -> - predicate cx trace result - (extends_type r c a) - (RightP (InstanceofTest, c)) - - (** If C is a subclass of A, then don't refine the type of x. Otherwise, + | (true, (DefT (_, _, InstanceT _) as c), DefT (r, _, ClassT (DefT (_, _, InstanceT _) as a))) + -> + predicate cx trace result (extends_type r c a) (RightP (InstanceofTest, c)) + (* If C is a subclass of A, then don't refine the type of x. Otherwise, refine the type of x to A. (In general, the type of x should be refined to C & A, but that's hard to compute.) **) - | true, - DefT (reason, InstanceT (_, super_c, _, instance_c)), - (InternalT (ExtendsT (_, c, DefT (_, InstanceT (_, _, _, instance_a)))) as right) - -> (* TODO: intersection *) - - if instance_a.class_id = instance_c.class_id - then rec_flow_t cx trace (c, result) - else - (** Recursively check whether super(C) extends A, with enough context. **) - let pred = LeftP(InstanceofTest, right) in - let u = PredicateT(pred, result) in - rec_flow cx trace (super_c, ReposLowerT (reason, false, u)) - - (** If we are checking `instanceof Object` or `instanceof Function`, objects + | ( true, + DefT (reason, _, InstanceT (_, super_c, _, instance_c)), + (InternalT (ExtendsT (_, c, DefT (_, _, InstanceT (_, _, _, instance_a)))) as right) ) -> + (* TODO: intersection *) + if ALoc.concretize_equal (Context.aloc_tables cx) instance_a.class_id instance_c.class_id + then + rec_flow_t cx trace (c, result) + else + (* Recursively check whether super(C) extends A, with enough context. **) + let pred = LeftP (InstanceofTest, right) in + let u = PredicateT (pred, result) in + rec_flow cx trace (super_c, ReposLowerT (reason, false, u)) + (* If we are checking `instanceof Object` or `instanceof Function`, objects with `ObjProtoT` or `FunProtoT` should pass. *) - | true, ObjProtoT reason, (InternalT (ExtendsT _) as right) -> - let obj_proto = get_builtin_type cx ~trace reason ~use_desc:true "Object" in - rec_flow cx trace (obj_proto, - PredicateT (LeftP (InstanceofTest, right), result)) - - | true, FunProtoT reason, (InternalT (ExtendsT _) as right) -> - let fun_proto = get_builtin_type cx ~trace reason ~use_desc:true "Function" in - rec_flow cx trace (fun_proto, - PredicateT (LeftP (InstanceofTest, right), result)) - - (** We hit the root class, so C is not a subclass of A **) - | true, DefT (_, NullT), InternalT (ExtendsT (r, _, a)) -> - rec_flow_t cx trace (reposition cx ~trace (aloc_of_reason r |> ALoc.to_loc) a, result) - - (** Prune the type when any other `instanceof` check succeeds (since this is + | (true, ObjProtoT reason, (InternalT (ExtendsT _) as right)) -> + let obj_proto = get_builtin_type cx ~trace reason ~use_desc:true "Object" in + rec_flow cx trace (obj_proto, PredicateT (LeftP (InstanceofTest, right), result)) + | (true, FunProtoT reason, (InternalT (ExtendsT _) as right)) -> + let fun_proto = get_builtin_type cx ~trace reason ~use_desc:true "Function" in + rec_flow cx trace (fun_proto, PredicateT (LeftP (InstanceofTest, right), result)) + (* We hit the root class, so C is not a subclass of A **) + | (true, DefT (_, _, NullT), InternalT (ExtendsT (r, _, a))) -> + rec_flow_t cx trace (reposition cx ~trace (aloc_of_reason r) a, result) + (* If we're refining mixed with instanceof A, then flow A to the result *) + | ( true, + DefT (_, _, MixedT _), + DefT (class_reason, _, ClassT (DefT (instance_reason, _, InstanceT _) as a)) ) -> + let desc = desc_of_reason instance_reason in + let loc = aloc_of_reason class_reason in + rec_flow_t cx trace (reposition cx ~trace ~desc loc a, result) + (* Prune the type when any other `instanceof` check succeeds (since this is impossible). *) - | true, _, _ -> - () - - | false, - DefT (_, ObjT {proto_t = proto2; _}), - DefT (_, FunT (_, proto1, _)) + | (true, _, _) -> () + | (false, DefT (_, _, ObjT { proto_t = proto2; _ }), DefT (_, _, FunT (_, proto1, _))) when proto1 = proto2 -> - () - - (** Like above, now suppose that we have an instance x of class C, and we + () + (* Like above, now suppose that we have an instance x of class C, and we check whether x is _not_ `instanceof` class A. To decide what the appropriate refinement for x should be, we need to decide whether C extends A, choosing either nothing or C based on the result. **) - | false, - (DefT (_, InstanceT _) as c), - DefT (r, ClassT (DefT (_, (InstanceT _)) as a)) -> - predicate cx trace result - (extends_type r c a) - (NotP(RightP(InstanceofTest, c))) - - (** If C is a subclass of A, then do nothing, since this check cannot + | (false, (DefT (_, _, InstanceT _) as c), DefT (r, _, ClassT (DefT (_, _, InstanceT _) as a))) + -> + predicate cx trace result (extends_type r c a) (NotP (RightP (InstanceofTest, c))) + (* If C is a subclass of A, then do nothing, since this check cannot succeed. Otherwise, don't refine the type of x. **) - | false, - DefT (reason, InstanceT (_, super_c, _, instance_c)), - (InternalT (ExtendsT(_, _, DefT (_, InstanceT (_, _, _, instance_a)))) as right) - -> - - if instance_a.class_id = instance_c.class_id - then () - else - let u = PredicateT(NotP(LeftP(InstanceofTest, right)), result) in - rec_flow cx trace (super_c, ReposLowerT (reason, false, u)) - - | false, - ObjProtoT _, - InternalT (ExtendsT(r, c, _)) - -> - (** We hit the root class, so C is not a subclass of A **) - rec_flow_t cx trace (reposition cx ~trace (aloc_of_reason r |> ALoc.to_loc) c, result) - - (** Don't refine the type when any other `instanceof` check fails. **) - | false, left, _ -> - rec_flow_t cx trace (left, result) + | ( false, + DefT (reason, _, InstanceT (_, super_c, _, instance_c)), + (InternalT (ExtendsT (_, _, DefT (_, _, InstanceT (_, _, _, instance_a)))) as right) ) -> + if ALoc.concretize_equal (Context.aloc_tables cx) instance_a.class_id instance_c.class_id + then + () + else + let u = PredicateT (NotP (LeftP (InstanceofTest, right)), result) in + rec_flow cx trace (super_c, ReposLowerT (reason, false, u)) + | (false, ObjProtoT _, InternalT (ExtendsT (r, c, _))) -> + (* We hit the root class, so C is not a subclass of A **) + rec_flow_t cx trace (reposition cx ~trace (aloc_of_reason r) c, result) + (* Don't refine the type when any other `instanceof` check fails. **) + | (false, left, _) -> rec_flow_t cx trace (left, result) -and sentinel_prop_test key cx trace result (sense, obj, t) = - sentinel_prop_test_generic key cx trace result obj (sense, obj, t) + and sentinel_prop_test key cx trace result (sense, obj, t) = + sentinel_prop_test_generic key cx trace result obj (sense, obj, t) -and sentinel_prop_test_generic key cx trace result orig_obj = - (** Evaluate a refinement predicate of the form + and sentinel_prop_test_generic key cx trace result orig_obj = + (* Evaluate a refinement predicate of the form obj.key eq value @@ -9275,135 +9728,174 @@ and sentinel_prop_test_generic key cx trace result orig_obj = the predicate function and its callers to understand how the context is set up so that filtering ultimately only depends on what flows to result. **) - - let flow_sentinel sense props_tmap obj sentinel = - match Context.get_prop cx props_tmap key with - | Some p -> - (match Property.read_t p with - | Some t -> - let desc = RMatchingProp (key, match sentinel with - | Enum.(One Str s) -> RStringLit s - | Enum.(One Num (_, n)) -> RNumberLit n - | Enum.(One Bool b) -> RBooleanLit b - | Enum.(One Null) -> RNull - | Enum.(One Void) -> RVoid - | Enum.(Many _enums) -> REnum - ) in - let reason = replace_reason_const desc (reason_of_t result) in - let test = SentinelPropTestT (reason, orig_obj, key, sense, sentinel, result) in - rec_flow cx trace (t, test) + let flow_sentinel sense props_tmap obj sentinel = + match Context.get_prop cx props_tmap key with + | Some p -> + (match Property.read_t p with + | Some t -> + let desc = + RMatchingProp + ( key, + match sentinel with + | UnionEnum.(One (Str s)) -> RStringLit s + | UnionEnum.(One (Num (_, n))) -> RNumberLit n + | UnionEnum.(One (Bool b)) -> RBooleanLit b + | UnionEnum.(One Null) -> RNull + | UnionEnum.(One Void) -> RVoid + | UnionEnum.(Many _enums) -> RUnionEnum ) + in + let reason = replace_desc_reason desc (reason_of_t result) in + let test = SentinelPropTestT (reason, orig_obj, key, sense, sentinel, result) in + rec_flow cx trace (t, test) + | None -> + let reason_obj = reason_of_t obj in + add_output + cx + ~trace + (Error_message.EPropNotReadable + { reason_prop = reason_obj; prop_name = Some key; use_op = unknown_use })) | None -> - let reason_obj = reason_of_t obj in - let reason = reason_of_t result in - add_output cx ~trace (FlowError.EPropAccess ( - (reason_obj, reason), Some key, Property.polarity p, Read, unknown_use - )) - ) - | None -> - (* TODO: possibly unsound to filter out orig_obj here, but if we + (* TODO: possibly unsound to filter out orig_obj here, but if we don't, case elimination based on sentinel prop checking doesn't work for (disjoint unions of) intersections of objects, where the sentinel prop and the payload appear in different branches of the intersection. It is easy to avoid this unsoundness with slightly more work, but will wait until a refactoring of property lookup lands to revisit. Tracked by #11301092. *) - if orig_obj = obj then rec_flow_t cx trace (orig_obj, result) - in - let sentinel_of_literal = function - | DefT (_, StrT (Literal (_, value))) -> Some Enum.(One (Str value)) - | DefT (_, NumT (Literal (_, value))) -> Some Enum.(One (Num value)) - | DefT (_, BoolT (Some value)) -> Some Enum.(One (Bool value)) - | DefT (_, VoidT) -> Some Enum.(One Void) - | DefT (_, NullT) -> Some Enum.(One Null) - | DefT (_, UnionT rep) -> - begin match UnionRep.check_enum rep with - | Some enums -> Some Enum.(Many enums) - | None -> None - end - | _ -> None - in - fun (sense, obj, t) -> match sentinel_of_literal t with - | Some s -> - begin match obj with - (* obj.key ===/!== literal value *) - | DefT (_, ObjT { props_tmap; _}) -> - flow_sentinel sense props_tmap obj s - - (* instance.key ===/!== literal value *) - | DefT (_, InstanceT (_, _, _, { own_props; _})) -> - (* TODO: add test for sentinel test on implements *) - flow_sentinel sense own_props obj s - - | DefT (_, IntersectionT rep) -> - (* For an intersection of object types, try the test for each object + if orig_obj = obj then rec_flow_t cx trace (orig_obj, result) + in + let sentinel_of_literal = function + | DefT (_, _, StrT (Literal (_, value))) + | DefT (_, _, SingletonStrT value) -> + Some UnionEnum.(One (Str value)) + | DefT (_, _, NumT (Literal (_, value))) + | DefT (_, _, SingletonNumT value) -> + Some UnionEnum.(One (Num value)) + | DefT (_, _, BoolT (Some value)) + | DefT (_, _, SingletonBoolT value) -> + Some UnionEnum.(One (Bool value)) + | DefT (_, _, VoidT) -> Some UnionEnum.(One Void) + | DefT (_, _, NullT) -> Some UnionEnum.(One Null) + | UnionT (_, rep) -> + begin + match UnionRep.check_enum rep with + | Some enums -> Some UnionEnum.(Many enums) + | None -> None + end + | _ -> None + in + fun (sense, obj, t) -> + match sentinel_of_literal t with + | Some s -> + begin + match obj with + (* obj.key ===/!== literal value *) + | DefT (_, _, ObjT { props_tmap; _ }) -> flow_sentinel sense props_tmap obj s + (* instance.key ===/!== literal value *) + | DefT (_, _, InstanceT (_, _, _, { own_props; _ })) -> + (* TODO: add test for sentinel test on implements *) + flow_sentinel sense own_props obj s + | IntersectionT (_, rep) -> + (* For an intersection of object types, try the test for each object type in turn, while recording the original intersection so that we end up with the right refinement. See the comment on the implementation of IntersectionPreprocessKit for more details. *) - let reason = reason_of_t result in - InterRep.members rep |> List.iter (fun obj -> - rec_flow cx trace ( - obj, - intersection_preprocess_kit reason - (SentinelPropTest(sense, key, t, orig_obj, result)) - ) - ) - | _ -> + let reason = reason_of_t result in + InterRep.members rep + |> List.iter (fun obj -> + rec_flow + cx + trace + ( obj, + intersection_preprocess_kit + reason + (SentinelPropTest (sense, key, t, orig_obj, result)) )) + | _ -> + (* not enough info to refine *) + rec_flow_t cx trace (orig_obj, result) + end + | None -> (* not enough info to refine *) rec_flow_t cx trace (orig_obj, result) - end - | None -> - (* not enough info to refine *) - rec_flow_t cx trace (orig_obj, result) - -(*******************************************************************) -(* /predicate *) -(*******************************************************************) - -and flow_use_op op1 u = - let ignore_root = function - | UnknownUse -> true - (* If we are speculating then a Speculation use_op should be considered - * "opaque". If we are not speculating then Speculation use_ops that escaped - * (through benign tvars) should be ignored. - * - * Ideally we could replace the Speculation use_ops on benign tvars with their - * underlying use_op after speculation ends. *) - | Speculation _ -> not (Speculation.speculating ()) - | _ -> false - in - mod_use_op_of_use_t (fun op2 -> - let alt = fold_use_op - (* If the root of the previous use_op is UnknownUse and our alternate - * use_op does not have an UnknownUse root then we use our - * alternate use_op. *) - ignore_root - (fun alt -> function - (* If the use was added to an implicit type param then we want to use - * our alternate if the implicit type param use_op chain is inside - * the implicit type param instantiation. This means we always prefer - * pointing to the use_op chain outside of the type parameter - * instantiation. This ensures suppression comments are added to the - * correct location. - * - * Instead of using locs to pick our chain, there may be some type - * theory rule we can employ. However, such a rule would likely depend - * on tracking the inputs/outputs when type parameters are instantiated. - * For now, using locs is cleaner then adding unreliable bits to type - * parameter uses in inputs/outputs. *) - | ImplicitTypeParam loc_op when not alt -> - let loc2 = loc_of_root_use_op (root_of_use_op op2) in - Loc.contains loc_op loc2 - | _ -> alt) - op2 - in - if alt && not (ignore_root (root_of_use_op op1)) then op1 else op2 - ) u -(***********************) -(* bounds manipulation *) -(***********************) + (*******************************************************************) + (* /predicate *) + (*******************************************************************) + and flow_use_op op1 u = + let ignore_root = function + | UnknownUse -> true + | Internal _ -> true + (* If we are speculating then a Speculation use_op should be considered + * "opaque". If we are not speculating then Speculation use_ops that escaped + * (through benign tvars) should be ignored. + * + * Ideally we could replace the Speculation use_ops on benign tvars with their + * underlying use_op after speculation ends. *) + | Speculation _ -> not (Speculation.speculating ()) + | _ -> false + in + if ignore_root (root_of_use_op op1) then + u + else + mod_use_op_of_use_t + (fun op2 -> + let root_of_op2 = root_of_use_op op2 in + let should_replace = + fold_use_op + (* If the root of the previous use_op is UnknownUse and our alternate + * use_op does not have an UnknownUse root then we use our + * alternate use_op. *) + ignore_root + (fun should_replace -> function + (* If the use was added to an implicit type param then we want to use + * our alternate if the implicit type param use_op chain is inside + * the implicit type param instantiation. Since we can't directly compare + * abstract locations, we determine whether to do this using a heuristic + * based on the 'locality' of the use_op root. *) + | ImplicitTypeParam when not should_replace -> + (match root_of_op2 with + | FunCall { local; _ } + | FunCallMethod { local; _ } -> + local + | Addition _ + | AssignVar _ + | Coercion _ + | DeleteVar _ + | DeleteProperty _ + | FunImplicitReturn _ + | FunReturnStatement _ + | GetProperty _ + | SetProperty _ + | JSXCreateElement _ + | ObjectSpread _ + | ObjectChain _ + | TypeApplication _ + | Speculation _ + | InitField _ -> + true + | Cast _ + | ClassExtendsCheck _ + | ClassImplementsCheck _ + | ClassOwnProtoCheck _ + | GeneratorYield _ + | Internal _ + | ReactCreateElementCall _ + | ReactGetIntrinsic _ + | UnknownUse -> + false) + | _ -> should_replace) + op2 + in + if should_replace then + op1 + else + op2) + u + (***********************) + (* bounds manipulation *) + (***********************) -(** The following general considerations apply when manipulating bounds. + (** The following general considerations apply when manipulating bounds. 1. All type variables start out as roots, but some of them eventually become goto nodes. As such, bounds of roots may contain goto nodes. However, we @@ -9432,255 +9924,246 @@ and flow_use_op op1 u = **) -(* for each l in ls: l => u *) -and flows_to_t cx trace ls u = - ls |> TypeMap.iter (fun l (trace_l, use_op) -> - let u = flow_use_op use_op u in - join_flow cx [trace_l; trace] (l, u) - ) - -(* for each u in us: l => u *) -and flows_from_t cx trace ~use_op l us = - us |> UseTypeMap.iter (fun u trace_u -> - let u = flow_use_op use_op u in - join_flow cx [trace; trace_u] (l, u) - ) - -(* for each l in ls, u in us: l => u *) -and flows_across cx trace ~use_op ls us = - ls |> TypeMap.iter (fun l (trace_l, use_op') -> - us |> UseTypeMap.iter (fun u trace_u -> - let u = flow_use_op use_op' (flow_use_op use_op u) in - join_flow cx [trace_l; trace; trace_u] (l, u) - ) - ) - -(* bounds.upper += u *) -and add_upper u trace bounds = - bounds.upper <- UseTypeMap.add u trace bounds.upper - -(* bounds.lower += l *) -and add_lower l (trace, use_op) bounds = - bounds.lower <- TypeMap.add l (trace, use_op) bounds.lower - -(* Helper for functions that follow. *) -(* Given a map of bindings from tvars to traces, a tvar to skip, and an `each` + (* for each l in ls: l => u *) + and flows_to_t cx trace ls u = + ls + |> TypeMap.iter (fun l (trace_l, use_op) -> + let u = flow_use_op use_op u in + join_flow cx [trace_l; trace] (l, u)) + + (* for each u in us: l => u *) + and flows_from_t cx trace ~use_op l us = + us + |> UseTypeMap.iter (fun u trace_u -> + let u = flow_use_op use_op u in + join_flow cx [trace; trace_u] (l, u)) + + (* for each l in ls, u in us: l => u *) + and flows_across cx trace ~use_op ls us = + ls + |> TypeMap.iter (fun l (trace_l, use_op') -> + us + |> UseTypeMap.iter (fun u trace_u -> + let u = flow_use_op use_op' (flow_use_op use_op u) in + join_flow cx [trace_l; trace; trace_u] (l, u))) + + (* bounds.upper += u *) + and add_upper u trace bounds = bounds.upper <- UseTypeMap.add u trace bounds.upper + + (* bounds.lower += l *) + and add_lower l (trace, use_op) bounds = + bounds.lower <- TypeMap.add l (trace, use_op) bounds.lower + + (* Helper for functions that follow. *) + (* Given a map of bindings from tvars to traces, a tvar to skip, and an `each` function taking a tvar and its associated trace, apply `each` to all unresolved root constraints reached from the bound tvars, except those of skip_tvar. (Typically skip_tvar is a tvar that will be processed separately, so we don't want to redo that work. We also don't want to consider any tvar that has already been resolved, because the resolved type will be processed separately, too, as part of the bounds of skip_tvar. **) -and iter_with_filter cx bindings skip_id each = - bindings |> IMap.iter (fun id trace -> - match Context.find_constraints cx id with - | root_id, Unresolved bounds when root_id <> skip_id -> - each (root_id, bounds) trace - | _ -> - () - ) - -(* for each id in id1 + bounds1.lowertvars: + and iter_with_filter cx bindings skip_id each = + bindings + |> IMap.iter (fun id trace -> + match Context.find_constraints cx id with + | (root_id, Unresolved bounds) when root_id <> skip_id -> each (root_id, bounds) trace + | _ -> ()) + (* for each id in id1 + bounds1.lowertvars: id.bounds.upper += t2 *) -(** When going through bounds1.lowertvars, filter out id1. **) -(** As an optimization, skip id1 when it will become either a resolved root or a - goto node (so that updating its bounds is unnecessary). **) -and edges_to_t cx trace ?(opt=false) (id1, bounds1) t2 = - if not opt then add_upper t2 trace bounds1; - iter_with_filter cx bounds1.lowertvars id1 (fun (_, bounds) trace_l -> - add_upper t2 (Trace.concat_trace [trace_l; trace]) bounds - ) + (* When going through bounds1.lowertvars, filter out id1. **) -(* for each id in id2 + bounds2.uppertvars: + (** As an optimization, skip id1 when it will become either a resolved root or a + goto node (so that updating its bounds is unnecessary). **) + and edges_to_t cx trace ?(opt = false) (id1, bounds1) t2 = + if not opt then add_upper t2 trace bounds1; + iter_with_filter cx bounds1.lowertvars id1 (fun (_, bounds) trace_l -> + add_upper t2 (Trace.concat_trace [trace_l; trace]) bounds) + (* for each id in id2 + bounds2.uppertvars: id.bounds.lower += t1 *) -(** When going through bounds2.uppertvars, filter out id2. **) -(** As an optimization, skip id2 when it will become either a resolved root or a + (* When going through bounds2.uppertvars, filter out id2. **) + + (** As an optimization, skip id2 when it will become either a resolved root or a goto node (so that updating its bounds is unnecessary). **) -and edges_from_t cx trace ~use_op ?(opt=false) t1 (id2, bounds2) = - if not opt then add_lower t1 (trace, use_op) bounds2; - iter_with_filter cx bounds2.uppertvars id2 (fun (_, bounds) trace_u -> - add_lower t1 (Trace.concat_trace [trace; trace_u], use_op) bounds - ) + and edges_from_t cx trace ~use_op ?(opt = false) t1 (id2, bounds2) = + if not opt then add_lower t1 (trace, use_op) bounds2; + iter_with_filter cx bounds2.uppertvars id2 (fun (_, bounds) trace_u -> + add_lower t1 (Trace.concat_trace [trace; trace_u], use_op) bounds) -(* for each id' in id + bounds.lowertvars: + (* for each id' in id + bounds.lowertvars: id'.bounds.upper += us *) -and edges_to_ts cx trace ?(opt=false) (id, bounds) us = - us |> UseTypeMap.iter (fun u trace_u -> - edges_to_t cx (Trace.concat_trace[trace;trace_u]) ~opt (id, bounds) u - ) + and edges_to_ts cx trace ?(opt = false) (id, bounds) us = + us + |> UseTypeMap.iter (fun u trace_u -> + edges_to_t cx (Trace.concat_trace [trace; trace_u]) ~opt (id, bounds) u) -(* for each id' in id + bounds.uppertvars: + (* for each id' in id + bounds.uppertvars: id'.bounds.lower += ls *) -and edges_from_ts cx trace ?(opt=false) ls (id, bounds) = - ls |> TypeMap.iter (fun l (trace_l, use_op) -> - edges_from_t cx (Trace.concat_trace [trace_l; trace]) ~use_op ~opt l (id, bounds) - ) - -(* for each id in id1 + bounds1.lowertvars: + and edges_from_ts cx trace ?(opt = false) ls (id, bounds) = + ls + |> TypeMap.iter (fun l (trace_l, use_op) -> + edges_from_t cx (Trace.concat_trace [trace_l; trace]) ~use_op ~opt l (id, bounds)) + (* for each id in id1 + bounds1.lowertvars: id.bounds.upper += t2 for each l in bounds1.lower: l => t2 *) -(** As an invariant, bounds1.lower should already contain id.bounds.lower for - each id in bounds1.lowertvars. **) -and edges_and_flows_to_t cx trace ?(opt=false) (id1, bounds1) t2 = - if not (UseTypeMap.mem t2 bounds1.upper) then ( - edges_to_t cx trace ~opt (id1, bounds1) t2; - flows_to_t cx trace bounds1.lower t2 - ) -(* for each id in id2 + bounds2.uppertvars: + (** As an invariant, bounds1.lower should already contain id.bounds.lower for + each id in bounds1.lowertvars. **) + and edges_and_flows_to_t cx trace ?(opt = false) (id1, bounds1) t2 = + if not (UseTypeMap.mem t2 bounds1.upper) then ( + edges_to_t cx trace ~opt (id1, bounds1) t2; + flows_to_t cx trace bounds1.lower t2 + ) + (* for each id in id2 + bounds2.uppertvars: id.bounds.lower += t1 for each u in bounds2.upper: t1 => u *) -(** As an invariant, bounds2.upper should already contain id.bounds.upper for - each id in bounds2.uppertvars. **) -and edges_and_flows_from_t cx trace ~use_op ?(opt=false) t1 (id2, bounds2) = - if not (TypeMap.mem t1 bounds2.lower) then ( - edges_from_t cx trace ~use_op ~opt t1 (id2, bounds2); - flows_from_t cx trace ~use_op t1 bounds2.upper - ) -(* bounds.uppertvars += id *) -and add_uppertvar id trace bounds = - bounds.uppertvars <- IMap.add id trace bounds.uppertvars + (** As an invariant, bounds2.upper should already contain id.bounds.upper for + each id in bounds2.uppertvars. **) + and edges_and_flows_from_t cx trace ~use_op ?(opt = false) t1 (id2, bounds2) = + if not (TypeMap.mem t1 bounds2.lower) then ( + edges_from_t cx trace ~use_op ~opt t1 (id2, bounds2); + flows_from_t cx trace ~use_op t1 bounds2.upper + ) -(* bounds.lowertvars += id *) -and add_lowertvar id trace bounds = - bounds.lowertvars <- IMap.add id trace bounds.lowertvars + (* bounds.uppertvars += id *) + and add_uppertvar id trace bounds = bounds.uppertvars <- IMap.add id trace bounds.uppertvars -(* for each id in id1 + bounds1.lowertvars: + (* bounds.lowertvars += id *) + and add_lowertvar id trace bounds = + bounds.lowertvars <- IMap.add id trace bounds.lowertvars + (* for each id in id1 + bounds1.lowertvars: id.bounds.uppertvars += id2 *) -(** When going through bounds1.lowertvars, filter out id1. **) -(** As an optimization, skip id1 when it will become either a resolved root or a - goto node (so that updating its bounds is unnecessary). **) -and edges_to_tvar cx trace ?(opt=false) (id1, bounds1) id2 = - if not opt then add_uppertvar id2 trace bounds1; - iter_with_filter cx bounds1.lowertvars id1 (fun (_, bounds) trace_l -> - add_uppertvar id2 (Trace.concat_trace[trace_l;trace]) bounds - ) + (* When going through bounds1.lowertvars, filter out id1. **) -(* for each id in id2 + bounds2.uppertvars: + (** As an optimization, skip id1 when it will become either a resolved root or a + goto node (so that updating its bounds is unnecessary). **) + and edges_to_tvar cx trace ?(opt = false) (id1, bounds1) id2 = + if not opt then add_uppertvar id2 trace bounds1; + iter_with_filter cx bounds1.lowertvars id1 (fun (_, bounds) trace_l -> + add_uppertvar id2 (Trace.concat_trace [trace_l; trace]) bounds) + (* for each id in id2 + bounds2.uppertvars: id.bounds.lowertvars += id1 *) -(** When going through bounds2.uppertvars, filter out id2. **) -(** As an optimization, skip id2 when it will become either a resolved root or a + (* When going through bounds2.uppertvars, filter out id2. **) + + (** As an optimization, skip id2 when it will become either a resolved root or a goto node (so that updating its bounds is unnecessary). **) -and edges_from_tvar cx trace ?(opt=false) id1 (id2, bounds2) = - if not opt then add_lowertvar id1 trace bounds2; - iter_with_filter cx bounds2.uppertvars id2 (fun (_, bounds) trace_u -> - add_lowertvar id1 (Trace.concat_trace[trace;trace_u]) bounds - ) + and edges_from_tvar cx trace ?(opt = false) id1 (id2, bounds2) = + if not opt then add_lowertvar id1 trace bounds2; + iter_with_filter cx bounds2.uppertvars id2 (fun (_, bounds) trace_u -> + add_lowertvar id1 (Trace.concat_trace [trace; trace_u]) bounds) -(* for each id in id1 + bounds1.lowertvars: + (* for each id in id1 + bounds1.lowertvars: id.bounds.upper += bounds2.upper id.bounds.uppertvars += id2 id.bounds.uppertvars += bounds2.uppertvars *) -and add_upper_edges cx trace ?(opt=false) (id1, bounds1) (id2, bounds2) = - edges_to_ts cx trace ~opt (id1, bounds1) bounds2.upper; - edges_to_tvar cx trace ~opt (id1, bounds1) id2; - iter_with_filter cx bounds2.uppertvars id2 (fun (tvar, _) trace_u -> - let trace = Trace.concat_trace [trace;trace_u] in - edges_to_tvar cx trace ~opt (id1, bounds1) tvar - ) - -(* for each id in id2 + bounds2.uppertvars: + and add_upper_edges cx trace ?(opt = false) (id1, bounds1) (id2, bounds2) = + edges_to_ts cx trace ~opt (id1, bounds1) bounds2.upper; + edges_to_tvar cx trace ~opt (id1, bounds1) id2; + iter_with_filter cx bounds2.uppertvars id2 (fun (tvar, _) trace_u -> + let trace = Trace.concat_trace [trace; trace_u] in + edges_to_tvar cx trace ~opt (id1, bounds1) tvar) + + (* for each id in id2 + bounds2.uppertvars: id.bounds.lower += bounds1.lower id.bounds.lowertvars += id1 id.bounds.lowertvars += bounds1.lowertvars *) -and add_lower_edges cx trace ?(opt=false) (id1, bounds1) (id2, bounds2) = - edges_from_ts cx trace ~opt bounds1.lower (id2, bounds2); - edges_from_tvar cx trace ~opt id1 (id2, bounds2); - iter_with_filter cx bounds1.lowertvars id1 (fun (tvar, _) trace_l -> - let trace = Trace.concat_trace [trace_l;trace] in - edges_from_tvar cx trace ~opt tvar (id2, bounds2) - ) - -(***************) -(* unification *) -(***************) + and add_lower_edges cx trace ?(opt = false) (id1, bounds1) (id2, bounds2) = + edges_from_ts cx trace ~opt bounds1.lower (id2, bounds2); + edges_from_tvar cx trace ~opt id1 (id2, bounds2); + iter_with_filter cx bounds1.lowertvars id1 (fun (tvar, _) trace_l -> + let trace = Trace.concat_trace [trace_l; trace] in + edges_from_tvar cx trace ~opt tvar (id2, bounds2)) -and unify_flip use_op = Frame (UnifyFlip, use_op) + (***************) + (* unification *) + (***************) + and unify_flip use_op = Frame (UnifyFlip, use_op) -(* Chain a root to another root. If both roots are unresolved, this amounts to + (* Chain a root to another root. If both roots are unresolved, this amounts to copying over the bounds of one root to another, and adding all the connections necessary when two non-unifiers flow to each other. If one or both of the roots are resolved, they effectively act like the corresponding concrete types. *) -and goto cx trace ~use_op (id1, root1) (id2, root2) = - match root1.constraints, root2.constraints with - | Unresolved bounds1, Unresolved bounds2 -> - let cond1 = not_linked (id1, bounds1) (id2, bounds2) in - let cond2 = not_linked (id2, bounds2) (id1, bounds1) in - if cond1 then - flows_across cx trace ~use_op bounds1.lower bounds2.upper; - if cond2 then - flows_across cx trace ~use_op:(unify_flip use_op) bounds2.lower bounds1.upper; - if cond1 then ( - add_upper_edges cx trace ~opt:true (id1, bounds1) (id2, bounds2); - add_lower_edges cx trace (id1, bounds1) (id2, bounds2); - ); - if cond2 then ( - add_upper_edges cx trace (id2, bounds2) (id1, bounds1); - add_lower_edges cx trace ~opt:true (id2, bounds2) (id1, bounds1); - ); - Context.add_tvar cx id1 (Goto id2); - - | Unresolved bounds1, Resolved t2 -> - let t2_use = UseT (use_op, t2) in - edges_and_flows_to_t cx trace ~opt:true (id1, bounds1) t2_use; - edges_and_flows_from_t cx trace ~use_op:(unify_flip use_op) ~opt:true t2 (id1, bounds1); - Context.add_tvar cx id1 (Goto id2); - - | Resolved t1, Unresolved bounds2 -> - let t1_use = UseT (unify_flip use_op, t1) in - edges_and_flows_to_t cx trace ~opt:true (id2, bounds2) t1_use; - edges_and_flows_from_t cx trace ~use_op ~opt:true t1 (id2, bounds2); - Context.add_tvar cx id2 (Goto id1); - - | Resolved t1, Resolved t2 -> - (* replace node first, in case rec_unify recurses back to these tvars *) - Context.add_tvar cx id1 (Goto id2); - rec_unify cx trace ~use_op t1 t2; - -(* Unify two type variables. This involves finding their roots, and making one + and goto cx trace ~use_op (id1, root1) (id2, root2) = + match (root1.constraints, root2.constraints) with + | (Unresolved bounds1, Unresolved bounds2) -> + let cond1 = not_linked (id1, bounds1) (id2, bounds2) in + let cond2 = not_linked (id2, bounds2) (id1, bounds1) in + if cond1 then flows_across cx trace ~use_op bounds1.lower bounds2.upper; + if cond2 then flows_across cx trace ~use_op:(unify_flip use_op) bounds2.lower bounds1.upper; + if cond1 then ( + add_upper_edges cx trace ~opt:true (id1, bounds1) (id2, bounds2); + add_lower_edges cx trace (id1, bounds1) (id2, bounds2) + ); + if cond2 then ( + add_upper_edges cx trace (id2, bounds2) (id1, bounds1); + add_lower_edges cx trace ~opt:true (id2, bounds2) (id1, bounds1) + ); + Context.add_tvar cx id1 (Goto id2) + | (Unresolved bounds1, (Resolved (_, t2) | FullyResolved (_, t2))) -> + let t2_use = UseT (use_op, t2) in + edges_and_flows_to_t cx trace ~opt:true (id1, bounds1) t2_use; + edges_and_flows_from_t cx trace ~use_op:(unify_flip use_op) ~opt:true t2 (id1, bounds1); + Context.add_tvar cx id1 (Goto id2) + | ((Resolved (_, t1) | FullyResolved (_, t1)), Unresolved bounds2) -> + let t1_use = UseT (unify_flip use_op, t1) in + edges_and_flows_to_t cx trace ~opt:true (id2, bounds2) t1_use; + edges_and_flows_from_t cx trace ~use_op ~opt:true t1 (id2, bounds2); + Context.add_tvar cx id2 (Goto id1) + | ((Resolved (_, t1) | FullyResolved (_, t1)), (Resolved (_, t2) | FullyResolved (_, t2))) -> + (* replace node first, in case rec_unify recurses back to these tvars *) + Context.add_tvar cx id1 (Goto id2); + rec_unify cx trace ~use_op t1 t2 + + (* Unify two type variables. This involves finding their roots, and making one point to the other. Ranks are used to keep chains short. *) -and merge_ids cx trace ~use_op id1 id2 = - let (id1, root1), (id2, root2) = Context.find_root cx id1, Context.find_root cx id2 in - if id1 = id2 then () - else if root1.rank < root2.rank - then goto cx trace ~use_op (id1, root1) (id2, root2) - else if root2.rank < root1.rank - then goto cx trace ~use_op:(unify_flip use_op) (id2, root2) (id1, root1) - else ( - Context.add_tvar cx id2 (Root { root2 with rank = root1.rank+1; }); - goto cx trace ~use_op (id1, root1) (id2, root2); - ) + and merge_ids cx trace ~use_op id1 id2 = + let ((id1, root1), (id2, root2)) = (Context.find_root cx id1, Context.find_root cx id2) in + if id1 = id2 then + () + else if root1.rank < root2.rank then + goto cx trace ~use_op (id1, root1) (id2, root2) + else if root2.rank < root1.rank then + goto cx trace ~use_op:(unify_flip use_op) (id2, root2) (id1, root1) + else ( + Context.add_tvar cx id2 (Root { root2 with rank = root1.rank + 1 }); + goto cx trace ~use_op (id1, root1) (id2, root2) + ) -(* Resolve a type variable to a type. This involves finding its root, and + (* Resolve a type variable to a type. This involves finding its root, and resolving to that type. *) -and resolve_id cx trace ~use_op id t = - let id, root = Context.find_root cx id in - match root.constraints with - | Unresolved bounds -> - Context.add_tvar cx id (Root { root with constraints = Resolved t }); - edges_and_flows_to_t cx trace ~opt:true (id, bounds) (UseT (use_op, t)); - edges_and_flows_from_t cx trace ~use_op ~opt:true t (id, bounds); - - | Resolved t_ -> - rec_unify cx trace ~use_op t_ t + and resolve_id cx trace ~use_op ?(fully_resolved = false) id t = + let (id, root) = Context.find_root cx id in + match root.constraints with + | Unresolved bounds -> + let constraints = + if fully_resolved then + FullyResolved (use_op, t) + else + Resolved (use_op, t) + in + Context.add_tvar cx id (Root { root with constraints }); + edges_and_flows_to_t cx trace ~opt:true (id, bounds) (UseT (use_op, t)); + edges_and_flows_from_t cx trace ~use_op ~opt:true t (id, bounds) + | Resolved (_, t_) + | FullyResolved (_, t_) -> + rec_unify cx trace ~use_op t_ t -(******************) + (******************) -(* Unification of two types *) + (* Unification of two types *) -(* It is potentially dangerous to unify a type variable to a type that "forgets" + (* It is potentially dangerous to unify a type variable to a type that "forgets" constraints during propagation. These types are "any-like": the canonical example of such a type is any. Overall, we want unification to be a sound "optimization," in the sense that replacing bidirectional flows with @@ -9691,254 +10174,299 @@ and resolve_id cx trace ~use_op id t = However, unifying with any-like types is sometimes desirable / intentional. Thus, we limit the set of types on which unification is banned - to just AnyWithUpperBoundT, AnyWithLowerBoundT, and MergedT which are - internal types. + to just MergedT which is an internal type. *) -and ok_unify ~unify_any desc = function - | DefT (_, AnyT) | AnyWithUpperBoundT _ | AnyWithLowerBoundT _ -> - (match desc with RExistential -> true | _ -> unify_any) - | MergedT _ -> false - | _ -> true - -and __unify cx ~use_op ~unify_any t1 t2 trace = - begin match Context.verbose cx with - | Some { Verbose.indent; depth; enabled_during_flowlib=_; } -> - let indent = String.make ((Trace.trace_depth trace - 1) * indent) ' ' in - let pid = Context.pid_prefix cx in - prerr_endlinef - "\n%s%s%s =\n%s%s%s" - indent pid (Debug_js.dump_t ~depth cx t1) - indent pid (Debug_js.dump_t ~depth cx t2) - | None -> () - end; - - (* If the type is the same type or we have already seen this type pair in our - * cache then do not continue. *) - if t1 = t2 then () else ( + and ok_unify ~unify_any desc = function + | AnyT _ -> + (match desc with + | RExistential -> true + | _ -> unify_any) + | MergedT _ -> false + | _ -> true + + and __unify cx ~use_op ~unify_any t1 t2 trace = + begin + match Context.verbose cx with + | Some { Verbose.indent; depth; enabled_during_flowlib = _ } -> + let indent = String.make ((Trace.trace_depth trace - 1) * indent) ' ' in + let pid = Context.pid_prefix cx in + prerr_endlinef + "\n%s%s%s =\n%s%s%s" + indent + pid + (Debug_js.dump_t ~depth cx t1) + indent + pid + (Debug_js.dump_t ~depth cx t2) + | None -> () + end; - (* limit recursion depth *) - RecursionCheck.check trace; + (* If the type is the same type or we have already seen this type pair in our + * cache then do not continue. *) + if t1 = t2 then + () + else ( + (* limit recursion depth *) + RecursionCheck.check cx trace; - (* In general, unifying t1 and t2 should have similar effects as flowing t1 to + (* In general, unifying t1 and t2 should have similar effects as flowing t1 to t2 and flowing t2 to t1. This also means that any restrictions on such flows should also be enforced here. In particular, we don't expect t1 or t2 to be type parameters, and we don't expect t1 or t2 to be def types that don't make sense as use types. See __flow for more details. *) - not_expect_bound t1; - not_expect_bound t2; - expect_proper_def t1; - expect_proper_def t2; + not_expect_bound t1; + not_expect_bound t2; + expect_proper_def t1; + expect_proper_def t2; - (* Before processing the unify action, check that it is not deferred. If it + (* Before processing the unify action, check that it is not deferred. If it is, then when speculation is complete, the action either fires or is discarded depending on whether the case that created the action is selected or not. *) - if not Speculation.(defer_action cx (Action.Unify (use_op, t1, t2))) then - - match t1, t2 with - - | OpenT (_, id1), OpenT (_, id2) -> - merge_ids cx trace ~use_op id1 id2 - - | OpenT (r, id), t when ok_unify ~unify_any (desc_of_reason r) t -> - resolve_id cx trace ~use_op id t - | t, OpenT (r, id) when ok_unify ~unify_any (desc_of_reason r) t -> - resolve_id cx trace ~use_op:(unify_flip use_op) id t - - | DefT (_, PolyT (_, _, id1)), DefT (_, PolyT (_, _, id2)) - when id1 = id2 -> () - - | DefT (r1, PolyT (params1, t1, id1)), DefT (r2, PolyT (params2, t2, id2)) -> - let n1 = List.length params1 in - let n2 = List.length params2 in - if n2 > n1 then - add_output cx ~trace (FlowError.ETooManyTypeArgs (r2, r1, n1)) - else if n2 < n1 then - add_output cx ~trace (FlowError.ETooFewTypeArgs (r2, r1, n1)) - else - (** for equal-arity polymorphic types, unify param upper bounds + if not Speculation.(defer_action cx (Action.Unify (use_op, t1, t2))) then + match (t1, t2) with + | (OpenT (_, id1), OpenT (_, id2)) -> merge_ids cx trace ~use_op id1 id2 + | (OpenT (r, id), t) when ok_unify ~unify_any (desc_of_reason r) t -> + resolve_id cx trace ~use_op id t + | (t, OpenT (r, id)) when ok_unify ~unify_any (desc_of_reason r) t -> + resolve_id cx trace ~use_op:(unify_flip use_op) id t + | (DefT (_, _, PolyT (_, _, _, id1)), DefT (_, _, PolyT (_, _, _, id2))) when id1 = id2 -> + () + | ( DefT (r1, _, PolyT (tparams_loc1, params1, t1, id1)), + DefT (r2, _, PolyT (tparams_loc2, params2, t2, id2)) ) -> + let n1 = Nel.length params1 in + let n2 = Nel.length params2 in + if n2 > n1 then + add_output cx ~trace (Error_message.ETooManyTypeArgs (r2, r1, n1)) + else if n2 < n1 then + add_output cx ~trace (Error_message.ETooFewTypeArgs (r2, r1, n1)) + else + (* for equal-arity polymorphic types, unify param upper bounds with each other, then instances parameterized by these *) - let args1 = instantiate_poly_param_upper_bounds cx params1 in - let args2 = instantiate_poly_param_upper_bounds cx params2 in - List.iter2 (rec_unify cx trace ~use_op) args1 args2; - let inst1 = - let r = reason_of_t t1 in - mk_typeapp_of_poly cx trace - ~use_op ~reason_op:r ~reason_tapp:r id1 params1 t1 args1 in - let inst2 = - let r = reason_of_t t2 in - mk_typeapp_of_poly cx trace - ~use_op ~reason_op:r ~reason_tapp:r id2 params2 t2 args2 in - rec_unify cx trace ~use_op inst1 inst2 - - | DefT (_, ArrT (ArrayAT(t1, ts1))), - DefT (_, ArrT (ArrayAT(t2, ts2))) -> - let ts1 = Option.value ~default:[] ts1 in - let ts2 = Option.value ~default:[] ts2 in - array_unify cx trace ~use_op (ts1, t1, ts2, t2) - - | DefT (r1, ArrT (TupleAT (_, ts1))), - DefT (r2, ArrT (TupleAT (_, ts2))) -> - let l1 = List.length ts1 in - let l2 = List.length ts2 in - if l1 <> l2 then - add_output cx ~trace (FlowError.ETupleArityMismatch - ((r1, r2), l1, l2, use_op)); - iter2opt (fun t1 t2 -> - match t1, t2 with - | Some t1, Some t2 -> rec_unify cx trace ~use_op t1 t2 - | _ -> () - ) (ts1, ts2) - - | DefT (lreason, ObjT { props_tmap = lflds; dict_t = ldict; _ }), - DefT (ureason, ObjT { props_tmap = uflds; dict_t = udict; _ }) -> - - (* ensure the keys and values are compatible with each other. *) - begin match ldict, udict with - | Some {key = lk; value = lv; _}, Some {key = uk; value = uv; _} -> - rec_unify cx trace lk uk ~use_op:(Frame (IndexerKeyCompatibility { - lower = lreason; - upper = ureason; - }, use_op)); - rec_unify cx trace lv uv ~use_op:(Frame (PropertyCompatibility { - prop = None; - lower = lreason; - upper = ureason; - is_sentinel = false; - }, use_op)) - | Some _, None -> - let use_op = Frame (PropertyCompatibility { - prop = None; - lower = ureason; - upper = lreason; - is_sentinel = false; - }, use_op) in - let lreason = replace_reason_const RSomeProperty lreason in - let err = FlowError.EPropNotFound (None, (lreason, ureason), use_op) in - add_output cx ~trace err - | None, Some _ -> - let use_op = Frame (PropertyCompatibility { - prop = None; - lower = lreason; - upper = ureason; - is_sentinel = false; - }, Frame (UnifyFlip, use_op)) in - let ureason = replace_reason_const RSomeProperty ureason in - let err = FlowError.EPropNotFound (None, (ureason, lreason), use_op) in - add_output cx ~trace err - | None, None -> () - end; - - let lpmap = Context.find_props cx lflds in - let upmap = Context.find_props cx uflds in - SMap.merge (fun x lp up -> - if not (is_internal_name x || is_dictionary_exempt x) - then (match lp, up with - | Some p1, Some p2 -> - unify_props cx trace ~use_op x lreason ureason p1 p2 - | Some p1, None -> - unify_prop_with_dict cx trace ~use_op x p1 lreason ureason udict - | None, Some p2 -> - unify_prop_with_dict cx trace ~use_op x p2 ureason lreason ldict - | None, None -> ()); - None - ) lpmap upmap |> ignore + let args1 = instantiate_poly_param_upper_bounds cx params1 in + let args2 = instantiate_poly_param_upper_bounds cx params2 in + List.iter2 (rec_unify cx trace ~use_op) args1 args2; + let inst1 = + let r = reason_of_t t1 in + mk_typeapp_of_poly + cx + trace + ~use_op + ~reason_op:r + ~reason_tapp:r + id1 + tparams_loc1 + params1 + t1 + args1 + in + let inst2 = + let r = reason_of_t t2 in + mk_typeapp_of_poly + cx + trace + ~use_op + ~reason_op:r + ~reason_tapp:r + id2 + tparams_loc2 + params2 + t2 + args2 + in + rec_unify cx trace ~use_op inst1 inst2 + | (DefT (_, _, ArrT (ArrayAT (t1, ts1))), DefT (_, _, ArrT (ArrayAT (t2, ts2)))) -> + let ts1 = Option.value ~default:[] ts1 in + let ts2 = Option.value ~default:[] ts2 in + array_unify cx trace ~use_op (ts1, t1, ts2, t2) + | (DefT (r1, _, ArrT (TupleAT (_, ts1))), DefT (r2, _, ArrT (TupleAT (_, ts2)))) -> + let l1 = List.length ts1 in + let l2 = List.length ts2 in + if l1 <> l2 then + add_output cx ~trace (Error_message.ETupleArityMismatch ((r1, r2), l1, l2, use_op)); + iter2opt + (fun t1 t2 -> + match (t1, t2) with + | (Some t1, Some t2) -> rec_unify cx trace ~use_op t1 t2 + | _ -> ()) + (ts1, ts2) + | ( DefT (lreason, _, ObjT { props_tmap = lflds; dict_t = ldict; _ }), + DefT (ureason, _, ObjT { props_tmap = uflds; dict_t = udict; _ }) ) -> + (* ensure the keys and values are compatible with each other. *) + begin + match (ldict, udict) with + | (Some { key = lk; value = lv; _ }, Some { key = uk; value = uv; _ }) -> + rec_unify + cx + trace + lk + uk + ~use_op: + (Frame (IndexerKeyCompatibility { lower = lreason; upper = ureason }, use_op)); + rec_unify + cx + trace + lv + uv + ~use_op: + (Frame + ( PropertyCompatibility { prop = None; lower = lreason; upper = ureason }, + use_op )) + | (Some _, None) -> + let use_op = + Frame + (PropertyCompatibility { prop = None; lower = ureason; upper = lreason }, use_op) + in + let lreason = replace_desc_reason RSomeProperty lreason in + let err = Error_message.EPropNotFound (None, (lreason, ureason), use_op) in + add_output cx ~trace err + | (None, Some _) -> + let use_op = + Frame + ( PropertyCompatibility { prop = None; lower = lreason; upper = ureason }, + Frame (UnifyFlip, use_op) ) + in + let ureason = replace_desc_reason RSomeProperty ureason in + let err = Error_message.EPropNotFound (None, (ureason, lreason), use_op) in + add_output cx ~trace err + | (None, None) -> () + end; + + let lpmap = Context.find_props cx lflds in + let upmap = Context.find_props cx uflds in + SMap.merge + (fun x lp up -> + ( if not (is_internal_name x || is_dictionary_exempt x) then + match (lp, up) with + | (Some p1, Some p2) -> unify_props cx trace ~use_op x lreason ureason p1 p2 + | (Some p1, None) -> + unify_prop_with_dict cx trace ~use_op x p1 lreason ureason udict + | (None, Some p2) -> + unify_prop_with_dict cx trace ~use_op x p2 ureason lreason ldict + | (None, None) -> () ); + None) + lpmap + upmap + |> ignore + | (DefT (_, _, FunT (_, _, funtype1)), DefT (_, _, FunT (_, _, funtype2))) + when List.length funtype1.params = List.length funtype2.params -> + rec_unify cx trace ~use_op funtype1.this_t funtype2.this_t; + List.iter2 + (fun (_, t1) (_, t2) -> rec_unify cx trace ~use_op t1 t2) + funtype1.params + funtype2.params; + rec_unify cx trace ~use_op funtype1.return_t funtype2.return_t + | (TypeAppT (_, _, c1, ts1), TypeAppT (_, _, c2, ts2)) + when c1 = c2 && List.length ts1 = List.length ts2 -> + List.iter2 (rec_unify cx trace ~use_op) ts1 ts2 + | (AnnotT (_, OpenT (_, id1), _), AnnotT (_, OpenT (_, id2), _)) -> + (* It is tempting to unify the tvars here, but that would be problematic. These tvars should + eventually resolve to the type definitions that these annotations reference. By unifying + them, we might accidentally resolve one of the tvars to the type definition of the other, + which would lead to confusing behavior. + + On the other hand, if the tvars are already resolved, then we can do something + interesting... *) + begin + match (Context.find_graph cx id1, Context.find_graph cx id2) with + | ( (Resolved (_, t1) | FullyResolved (_, t1)), + (Resolved (_, t2) | FullyResolved (_, t2)) ) + (* Can we unify these types? Tempting, again, but annotations can refer to recursive type + definitions, and we might get into an infinite loop (which could perhaps be avoided by + a unification cache, but we'd rather not cache if we can get away with it). + + The alternative is to do naive unification, but we must be careful. In particular, it + could cause confusing errors: recall that the naive unification of annotations goes + through repositioning over these types. + + But if we simulate the same repositioning here, we won't really save anything. For + example, these types could be essentially the same union, and repositioning them would + introduce differences in their representations that would kill other + optimizations. Thus, we focus on the special case where these types have the same + reason, and then do naive unification. *) + when Reason.concretize_equal + (Context.aloc_tables cx) + (reason_of_t t1) + (reason_of_t t2) -> + naive_unify cx trace ~use_op t1 t2 + | _ -> naive_unify cx trace ~use_op t1 t2 + end + | _ -> naive_unify cx trace ~use_op t1 t2 + ) - | DefT (_, FunT (_, _, funtype1)), DefT (_, FunT (_, _, funtype2)) - when List.length funtype1.params = - List.length funtype2.params -> - rec_unify cx trace ~use_op funtype1.this_t funtype2.this_t; - List.iter2 (fun (_, t1) (_, t2) -> + and unify_props cx trace ~use_op x r1 r2 p1 p2 = + let use_op = Frame (PropertyCompatibility { prop = Some x; lower = r1; upper = r2 }, use_op) in + (* If both sides are neutral fields, we can just unify once *) + match (p1, p2) with + | (Field (_, t1, Polarity.Neutral), Field (_, t2, Polarity.Neutral)) -> rec_unify cx trace ~use_op t1 t2 - ) funtype1.params funtype2.params; - rec_unify cx trace ~use_op funtype1.return_t funtype2.return_t - - | DefT (_, TypeAppT (_, c1, ts1)), DefT (_, TypeAppT (_, c2, ts2)) - when c1 = c2 && List.length ts1 = List.length ts2 -> - List.iter2 (rec_unify cx trace ~use_op) ts1 ts2 + | _ -> + (* Otherwise, unify read/write sides separately. *) + (match (Property.read_t p1, Property.read_t p2) with + | (Some t1, Some t2) -> rec_unify cx trace ~use_op t1 t2 + | _ -> ()); + (match (Property.write_t p1, Property.write_t p2) with + | (Some t1, Some t2) -> rec_unify cx trace ~use_op t1 t2 + | _ -> ()); - | _ -> - naive_unify cx trace ~use_op t1 t2 - ) + (* Error if polarity is not compatible both ways. *) + let polarity1 = Property.polarity p1 in + let polarity2 = Property.polarity p2 in + if not (Polarity.compat (polarity1, polarity2) && Polarity.compat (polarity2, polarity1)) + then + add_output + cx + ~trace + (Error_message.EPropPolarityMismatch ((r1, r2), Some x, (polarity1, polarity2), use_op)) -and unify_props cx trace ~use_op x r1 r2 p1 p2 = - let use_op = Frame (PropertyCompatibility { - prop = Some x; - lower = r1; - upper = r2; - is_sentinel = false; - }, use_op) in - - (* If both sides are neutral fields, we can just unify once *) - match p1, p2 with - | Field (_, t1, Neutral), - Field (_, t2, Neutral) -> - rec_unify cx trace ~use_op t1 t2; - | _ -> - (* Otherwise, unify read/write sides separately. *) - (match Property.read_t p1, Property.read_t p2 with - | Some t1, Some t2 -> - rec_unify cx trace ~use_op t1 t2; - | _ -> ()); - (match Property.write_t p1, Property.write_t p2 with - | Some t1, Some t2 -> - rec_unify cx trace ~use_op t1 t2; - | _ -> ()); - (* Error if polarity is not compatible both ways. *) - let polarity1 = Property.polarity p1 in - let polarity2 = Property.polarity p2 in - if not ( - Polarity.compat (polarity1, polarity2) && - Polarity.compat (polarity2, polarity1) - ) then - add_output cx ~trace (FlowError.EPropPolarityMismatch - ((r1, r2), Some x, (polarity1, polarity2), use_op)) - -(* If some property `x` exists in one object but not another, ensure the + (* If some property `x` exists in one object but not another, ensure the property is compatible with a dictionary, or error if none. *) -and unify_prop_with_dict cx trace ~use_op x p prop_obj_reason dict_reason dict = - (* prop_obj_reason: reason of the object containing the prop + and unify_prop_with_dict cx trace ~use_op x p prop_obj_reason dict_reason dict = + (* prop_obj_reason: reason of the object containing the prop dict_reason: reason of the object potentially containing a dictionary prop_reason: reason of the prop itself *) - let prop_reason = replace_reason_const (RProperty (Some x)) prop_obj_reason in - match dict with - | Some { key; value; dict_polarity; _ } -> - rec_flow cx trace (string_key x prop_reason, UseT ( - Frame (IndexerKeyCompatibility {lower = dict_reason; upper = prop_obj_reason}, use_op), - key - )); - let p2 = Field (None, value, dict_polarity) in - unify_props cx trace ~use_op x prop_obj_reason dict_reason p p2 - | None -> - let use_op = Frame (PropertyCompatibility { - prop = Some x; - lower = dict_reason; - upper = prop_obj_reason; - is_sentinel = false; - }, use_op) in - let err = FlowError.EPropNotFound (Some x, (prop_reason, dict_reason), use_op) in - add_output cx ~trace err - -(* TODO: Unification between concrete types is still implemented as + let prop_reason = replace_desc_reason (RProperty (Some x)) prop_obj_reason in + match dict with + | Some { key; value; dict_polarity; _ } -> + rec_flow + cx + trace + ( string_key x prop_reason, + UseT + ( Frame + (IndexerKeyCompatibility { lower = dict_reason; upper = prop_obj_reason }, use_op), + key ) ); + let p2 = Field (None, value, dict_polarity) in + unify_props cx trace ~use_op x prop_obj_reason dict_reason p p2 + | None -> + let use_op = + Frame + ( PropertyCompatibility { prop = Some x; lower = dict_reason; upper = prop_obj_reason }, + use_op ) + in + let err = Error_message.EPropNotFound (Some x, (prop_reason, dict_reason), use_op) in + add_output cx ~trace err + + (* TODO: Unification between concrete types is still implemented as bidirectional flows. This means that the destructuring work is duplicated, and we're missing some opportunities for nested unification. *) + and naive_unify cx trace ~use_op t1 t2 = + rec_flow_t cx trace ~use_op (t1, t2); + rec_flow_t cx trace ~use_op:(unify_flip use_op) (t2, t1) -and naive_unify cx trace ~use_op t1 t2 = - rec_flow_t cx trace ~use_op (t1, t2); - rec_flow_t cx trace ~use_op:(unify_flip use_op) (t2, t1) - -(* mutable sites on parent values (i.e. object properties, + (* mutable sites on parent values (i.e. object properties, array elements) must be typed invariantly when a value flows to the parent, unless the incoming value is fresh, in which case covariant typing is sound (since no alias will break if the subtyped child value is replaced by a non-subtyped value *) -and flow_to_mutable_child cx trace use_op fresh t1 t2 = - if fresh - then rec_flow cx trace (t1, UseT (use_op, t2)) - else rec_unify cx trace ~use_op t1 t2 + and flow_to_mutable_child cx trace use_op fresh t1 t2 = + if fresh then + rec_flow cx trace (t1, UseT (use_op, t2)) + else + rec_unify cx trace ~use_op t1 t2 -(* Subtyping of arrays is complicated by tuples. Currently, there are three + (* Subtyping of arrays is complicated by tuples. Currently, there are three different kinds of types, all encoded by arrays: 1. Array (array type) @@ -9981,338 +10509,355 @@ and flow_to_mutable_child cx trace use_op fresh t1 t2 = * [T1] ~> Array[U1, U2] checks T1 ~> U1 * Array[T1, T2] ~> Array[U1, U2] checks [T1, T2] ~> Array[U1, U2] -*) -and array_flow cx trace use_op lit1 r1 ?(index=0) = function - (* empty array / array literal / tuple flowing to array / array literal / + *) + and array_flow cx trace use_op lit1 r1 ?(index = 0) = function + (* empty array / array literal / tuple flowing to array / array literal / tuple (includes several cases, analyzed below) *) - | [], e1, _, e2 -> - (* if lower bound is an empty array / array literal *) - if index = 0 then - (* general element1 = general element2 *) - flow_to_mutable_child cx trace use_op lit1 e1 e2 + | ([], e1, _, e2) -> + (* if lower bound is an empty array / array literal *) + if index = 0 then + (* general element1 = general element2 *) + flow_to_mutable_child cx trace use_op lit1 e1 e2 (* otherwise, lower bound is an empty tuple (nothing to do) *) - (* non-empty array literal / tuple ~> empty array / array literal / tuple *) - | _, e1, [], e2 -> - (* general element1 < general element2 *) - rec_flow cx trace (e1, UseT (use_op, e2)) - - (* non-empty array literal / tuple ~> non-empty array literal / tuple *) - | t1 :: ts1, e1, t2 :: ts2, e2 -> - (* specific element1 = specific element2 *) - flow_to_mutable_child cx trace use_op lit1 t1 t2; - array_flow cx trace use_op lit1 r1 ~index:(index+1) (ts1,e1, ts2,e2) - -(* TODO: either ensure that array_unify is the same as array_flow both ways, or + (* non-empty array literal / tuple ~> empty array / array literal / tuple *) + | (_, e1, [], e2) -> + (* general element1 < general element2 *) + rec_flow cx trace (e1, UseT (use_op, e2)) + (* non-empty array literal / tuple ~> non-empty array literal / tuple *) + | (t1 :: ts1, e1, t2 :: ts2, e2) -> + (* specific element1 = specific element2 *) + flow_to_mutable_child cx trace use_op lit1 t1 t2; + array_flow cx trace use_op lit1 r1 ~index:(index + 1) (ts1, e1, ts2, e2) + + (* TODO: either ensure that array_unify is the same as array_flow both ways, or document why not. *) -(* array helper *) -and array_unify cx trace ~use_op = function - | [], e1, [], e2 -> - (* general element1 = general element2 *) - rec_unify cx trace ~use_op e1 e2 - - | ts1, _, [], e2 - | [], e2, ts1, _ -> - (* specific element1 = general element2 *) - List.iter (fun t1 -> rec_unify cx trace ~use_op t1 e2) ts1 - - | t1 :: ts1, e1, t2 :: ts2, e2 -> - (* specific element1 = specific element2 *) - rec_unify cx trace ~use_op t1 t2; - array_unify cx trace ~use_op (ts1, e1, ts2, e2) - - -(*******************************************************************) -(* subtyping a sequence of arguments with a sequence of parameters *) -(*******************************************************************) - -(* Process spread arguments and then apply the arguments to the parameters *) -and multiflow_call cx trace ~use_op reason_op args ft = - let resolve_to = ResolveSpreadsToMultiflowCallFull (mk_id (), ft) in - resolve_call_list cx ~trace ~use_op reason_op args resolve_to - -(* Process spread arguments and then apply the arguments to the parameters *) -and multiflow_subtype cx trace ~use_op reason_op args ft = - let resolve_to = ResolveSpreadsToMultiflowSubtypeFull (mk_id (), ft) in - resolve_call_list cx ~trace ~use_op reason_op args resolve_to - -(* Like multiflow_partial, but if there is no spread argument, it flows VoidT to - * all unused parameters *) -and multiflow_full - cx ~trace ~use_op reason_op ~is_strict ~def_reason - ~spread_arg ~rest_param (arglist, parlist) = - - let unused_parameters, _ = multiflow_partial - cx ~trace ~use_op reason_op ~is_strict ~def_reason - ~spread_arg ~rest_param (arglist, parlist) in - - let _ = List.fold_left (fun n (_, param) -> - let use_op = Frame (FunMissingArg { n; op = reason_op; def = def_reason }, use_op) in - rec_flow cx trace (VoidT.why reason_op, UseT (use_op, param)); - n + 1 - ) ((List.length parlist - List.length unused_parameters) + 1) unused_parameters in - - () - -(* This is a tricky function. The simple description is that it flows all the - * arguments to all the parameters. This function is used by - * Function.prototype.apply, so after the arguments are applied, it returns the - * unused parameters. - * - * It is a little trickier in that there may be a single spread argument after - * all the regular arguments. There may also be a rest parameter. - *) -and multiflow_partial = - let rec multiflow_non_spreads cx ~use_op n (arglist, parlist) = - match (arglist, parlist) with - (* Do not complain on too many arguments. + (* array helper *) + and array_unify cx trace ~use_op = function + | ([], e1, [], e2) -> + (* general element1 = general element2 *) + rec_unify cx trace ~use_op e1 e2 + | (ts1, _, [], e2) + | ([], e2, ts1, _) -> + (* specific element1 = general element2 *) + List.iter (fun t1 -> rec_unify cx trace ~use_op t1 e2) ts1 + | (t1 :: ts1, e1, t2 :: ts2, e2) -> + (* specific element1 = specific element2 *) + rec_unify cx trace ~use_op t1 t2; + array_unify cx trace ~use_op (ts1, e1, ts2, e2) + + (*******************************************************************) + (* subtyping a sequence of arguments with a sequence of parameters *) + (*******************************************************************) + + (* Process spread arguments and then apply the arguments to the parameters *) + and multiflow_call cx trace ~use_op reason_op args ft = + let resolve_to = ResolveSpreadsToMultiflowCallFull (mk_id (), ft) in + resolve_call_list cx ~trace ~use_op reason_op args resolve_to + + (* Process spread arguments and then apply the arguments to the parameters *) + and multiflow_subtype cx trace ~use_op reason_op args ft = + let resolve_to = ResolveSpreadsToMultiflowSubtypeFull (mk_id (), ft) in + resolve_call_list cx ~trace ~use_op reason_op args resolve_to + + (* Like multiflow_partial, but if there is no spread argument, it flows VoidT to + * all unused parameters *) + and multiflow_full + cx ~trace ~use_op reason_op ~is_strict ~def_reason ~spread_arg ~rest_param (arglist, parlist) + = + let (unused_parameters, _) = + multiflow_partial + cx + ~trace + ~use_op + reason_op + ~is_strict + ~def_reason + ~spread_arg + ~rest_param + (arglist, parlist) + in + let _ = + List.fold_left + (fun n (_, param) -> + let use_op = Frame (FunMissingArg { n; op = reason_op; def = def_reason }, use_op) in + rec_flow cx trace (VoidT.why reason_op |> with_trust bogus_trust, UseT (use_op, param)); + n + 1) + (List.length parlist - List.length unused_parameters + 1) + unused_parameters + in + () + + (* This is a tricky function. The simple description is that it flows all the + * arguments to all the parameters. This function is used by + * Function.prototype.apply, so after the arguments are applied, it returns the + * unused parameters. + * + * It is a little trickier in that there may be a single spread argument after + * all the regular arguments. There may also be a rest parameter. + *) + and multiflow_partial = + let rec multiflow_non_spreads cx ~use_op n (arglist, parlist) = + match (arglist, parlist) with + (* Do not complain on too many arguments. This pattern is ubiqutous and causes a lot of noise when complained about. Note: optional/rest parameters do not provide a workaround in this case. *) - | (_, []) - (* No more arguments *) - | ([], _) -> [], arglist, parlist - - | (tin::tins, (name, tout)::touts) -> - (* flow `tin` (argument) to `tout` (param). normally, `tin` is passed - through a `ReposLowerT` to make sure that the concrete type points at - the arg's location. however, if `tin` is an implicit type argument - (e.g. the `x` in `function foo(x: T)`), then don't reposition it - because implicit type args have no explicit location to point at. - instead, let it flow through transparently, so that we point at the - place that constrained the type arg. this is pretty hacky. *) - let tout = - let use_op = Frame (FunParam { - n; - name; - lower=(reason_of_t tin); - upper=(reason_of_t tout); - }, use_op) in - let u = UseT (use_op, tout) in - match desc_of_t tin with - | RTypeParam _ -> u - | _ -> ReposLowerT (reason_of_t tin, false, u) - in - let used_pairs, unused_arglist, unused_parlist = - multiflow_non_spreads cx ~use_op (n + 1) (tins, touts) in - (tin, tout)::used_pairs, unused_arglist, unused_parlist - in - fun cx ~trace ~use_op ~is_strict ~def_reason ~spread_arg ~rest_param - reason_op (arglist, parlist) -> - - (* Handle all the non-spread arguments and all the non-rest parameters *) - let used_pairs, unused_arglist, unused_parlist = - multiflow_non_spreads cx ~use_op 1 (arglist, parlist) in - - (* If there is a spread argument, it will consume all the unused parameters *) - let used_pairs, unused_parlist = match spread_arg with - | None -> used_pairs, unused_parlist - | Some spread_arg_elemt -> - (* The spread argument may be an empty array and to be 100% correct, we - * should flow VoidT to every remaining parameter, however we don't. This - * is consistent with how we treat arrays almost everywhere else *) - used_pairs @ List.map - (fun (_, param) -> - let use_op = Frame (FunRestParam { - lower=(reason_of_t spread_arg_elemt); - upper=(reason_of_t param); - }, use_op) in - (spread_arg_elemt, UseT (use_op, param))) - unused_parlist, - [] - + | (_, []) + (* No more arguments *) + + | ([], _) -> + ([], arglist, parlist) + | (tin :: tins, (name, tout) :: touts) -> + (* flow `tin` (argument) to `tout` (param). *) + let tout = + let use_op = + Frame (FunParam { n; name; lower = reason_of_t tin; upper = reason_of_t tout }, use_op) + in + UseT (use_op, tout) + in + let (used_pairs, unused_arglist, unused_parlist) = + multiflow_non_spreads cx ~use_op (n + 1) (tins, touts) + in + ((tin, tout) :: used_pairs, unused_arglist, unused_parlist) in - - (* If there is a rest parameter, it will consume all the unused arguments *) - begin match rest_param with - | None -> - if is_strict && Context.enforce_strict_call_arity cx - then begin match unused_arglist with - | [] -> () - | first_unused_arg :: _ -> - FlowError.EFunctionCallExtraArg ( - mk_reason RFunctionUnusedArgument (loc_of_t first_unused_arg), - def_reason, - List.length parlist, - use_op - ) - |> add_output cx ~trace - end; - (* Flow the args and params after we add the EFunctionCallExtraArg error. - * This improves speculation error reporting. *) - List.iter (rec_flow cx trace) used_pairs; - - unused_parlist, rest_param - | Some (name, loc, rest_param) -> - List.iter (rec_flow cx trace) used_pairs; - - let orig_rest_reason = repos_reason loc (reason_of_t rest_param) in - - (* We're going to build an array literal with all the unused arguments - * (and the spread argument if it exists). Then we're going to flow that - * to the rest parameter *) - let rev_elems = - List.rev_map (fun arg -> UnresolvedArg arg) unused_arglist in - - let unused_rest_param = match spread_arg with - | None -> - (* If the rest parameter is consuming N elements, then drop N elements - * from the rest parameter *) - let rest_reason = reason_of_t rest_param in - Tvar.mk_derivable_where cx rest_reason (fun tout -> - let i = List.length rev_elems in - rec_flow cx trace (rest_param, ArrRestT (use_op, orig_rest_reason, i, tout)) - ) - | Some _ -> - (* If there is a spread argument, then a tuple rest parameter will error - * anyway. So let's assume that the rest param is an array with unknown - * arity. Dropping elements from it isn't worth doing *) - rest_param - in - - let elems = match spread_arg with - | None -> List.rev rev_elems - | Some spread_arg_elemt -> - let reason = reason_of_t spread_arg_elemt in - let spread_array = DefT (reason, ArrT (ArrayAT (spread_arg_elemt, None))) in - List.rev_append rev_elems [ UnresolvedSpreadArg (spread_array) ] + fun cx + ~trace + ~use_op + ~is_strict + ~def_reason + ~spread_arg + ~rest_param + reason_op + (arglist, parlist) -> + (* Handle all the non-spread arguments and all the non-rest parameters *) + let (used_pairs, unused_arglist, unused_parlist) = + multiflow_non_spreads cx ~use_op 1 (arglist, parlist) in - - let arg_array_reason = replace_reason_const - (RRestArray (desc_of_reason reason_op)) reason_op in - - let arg_array = Tvar.mk_where cx arg_array_reason (fun tout -> - let reason_op = arg_array_reason in - let element_reason = replace_reason_const Reason.inferred_union_elem_array_desc reason_op in - let elem_t = Tvar.mk cx element_reason in - let resolve_to = (ResolveSpreadsToArrayLiteral (mk_id (), elem_t, tout)) in - resolve_spread_list cx ~use_op ~reason_op elems resolve_to - ) in - let () = - let use_op = Frame (FunRestParam { - lower = reason_of_t arg_array; - upper = reason_of_t rest_param; - }, use_op) in - rec_flow cx trace (arg_array, UseT (use_op, rest_param)) + (* If there is a spread argument, it will consume all the unused parameters *) + let (used_pairs, unused_parlist) = + match spread_arg with + | None -> (used_pairs, unused_parlist) + | Some spread_arg_elemt -> + (* The spread argument may be an empty array and to be 100% correct, we + * should flow VoidT to every remaining parameter, however we don't. This + * is consistent with how we treat arrays almost everywhere else *) + ( used_pairs + @ List.map + (fun (_, param) -> + let use_op = + Frame + ( FunRestParam + { lower = reason_of_t spread_arg_elemt; upper = reason_of_t param }, + use_op ) + in + (spread_arg_elemt, UseT (use_op, param))) + unused_parlist, + [] ) in - - unused_parlist, Some (name, loc, unused_rest_param) - end - -and resolve_call_list cx ~trace ~use_op reason_op args resolve_to = - let unresolved = List.map - (function - | Arg t -> UnresolvedArg t - | SpreadArg t -> UnresolvedSpreadArg t) - args in - resolve_spread_list_rec cx ~trace ~use_op ~reason_op ([], unresolved) resolve_to - -and resolve_spread_list cx ~use_op ~reason_op list resolve_to = - resolve_spread_list_rec cx ~use_op ~reason_op ([], list) resolve_to - -(* This function goes through the unresolved elements to find the next rest - * element to resolve *) -and resolve_spread_list_rec - cx ?trace ~use_op ~reason_op (resolved, unresolved) resolve_to = - match resolved, unresolved with - | resolved, [] -> - finish_resolve_spread_list - cx ?trace ~use_op ~reason_op (List.rev resolved) resolve_to - | resolved, UnresolvedArg(next)::unresolved -> + (* If there is a rest parameter, it will consume all the unused arguments *) + match rest_param with + | None -> + ( if is_strict && Context.enforce_strict_call_arity cx then + match unused_arglist with + | [] -> () + | first_unused_arg :: _ -> + Error_message.EFunctionCallExtraArg + ( mk_reason RFunctionUnusedArgument (loc_of_t first_unused_arg), + def_reason, + List.length parlist, + use_op ) + |> add_output cx ~trace ); + + (* Flow the args and params after we add the EFunctionCallExtraArg error. + * This improves speculation error reporting. *) + List.iter (rec_flow cx trace) used_pairs; + + (unused_parlist, rest_param) + | Some (name, loc, rest_param) -> + List.iter (rec_flow cx trace) used_pairs; + + let orig_rest_reason = repos_reason loc (reason_of_t rest_param) in + (* We're going to build an array literal with all the unused arguments + * (and the spread argument if it exists). Then we're going to flow that + * to the rest parameter *) + let rev_elems = List.rev_map (fun arg -> UnresolvedArg arg) unused_arglist in + let unused_rest_param = + match spread_arg with + | None -> + (* If the rest parameter is consuming N elements, then drop N elements + * from the rest parameter *) + let rest_reason = reason_of_t rest_param in + Tvar.mk_derivable_where cx rest_reason (fun tout -> + let i = List.length rev_elems in + rec_flow cx trace (rest_param, ArrRestT (use_op, orig_rest_reason, i, tout))) + | Some _ -> + (* If there is a spread argument, then a tuple rest parameter will error + * anyway. So let's assume that the rest param is an array with unknown + * arity. Dropping elements from it isn't worth doing *) + rest_param + in + let elems = + match spread_arg with + | None -> List.rev rev_elems + | Some spread_arg_elemt -> + let reason = reason_of_t spread_arg_elemt in + let spread_array = + DefT (reason, bogus_trust (), ArrT (ArrayAT (spread_arg_elemt, None))) + in + List.rev_append rev_elems [UnresolvedSpreadArg spread_array] + in + let arg_array_reason = + replace_desc_reason (RRestArray (desc_of_reason reason_op)) reason_op + in + let arg_array = + Tvar.mk_where cx arg_array_reason (fun tout -> + let reason_op = arg_array_reason in + let element_reason = + replace_desc_reason Reason.inferred_union_elem_array_desc reason_op + in + let elem_t = Tvar.mk cx element_reason in + ResolveSpreadsToArrayLiteral (mk_id (), elem_t, tout) + |> resolve_spread_list cx ~use_op ~reason_op elems) + in + let () = + let use_op = + Frame + ( FunRestParam { lower = reason_of_t arg_array; upper = reason_of_t rest_param }, + use_op ) + in + rec_flow cx trace (arg_array, UseT (use_op, rest_param)) + in + (unused_parlist, Some (name, loc, unused_rest_param)) + + and resolve_call_list cx ~trace ~use_op reason_op args resolve_to = + let unresolved = + List.map + (function + | Arg t -> UnresolvedArg t + | SpreadArg t -> UnresolvedSpreadArg t) + args + in + resolve_spread_list_rec cx ~trace ~use_op ~reason_op ([], unresolved) resolve_to + + and resolve_spread_list cx ~use_op ~reason_op list resolve_to = + resolve_spread_list_rec cx ~use_op ~reason_op ([], list) resolve_to + + (* This function goes through the unresolved elements to find the next rest + * element to resolve *) + and resolve_spread_list_rec cx ?trace ~use_op ~reason_op (resolved, unresolved) resolve_to = + match (resolved, unresolved) with + | (resolved, []) -> + finish_resolve_spread_list cx ?trace ~use_op ~reason_op (List.rev resolved) resolve_to + | (resolved, UnresolvedArg next :: unresolved) -> resolve_spread_list_rec cx ?trace ~use_op ~reason_op - (ResolvedArg(next)::resolved, unresolved) + (ResolvedArg next :: resolved, unresolved) resolve_to - | resolved, UnresolvedSpreadArg(next)::unresolved -> - flow_opt cx ?trace (next, ResolveSpreadT (use_op, reason_op, { - rrt_resolved = resolved; - rrt_unresolved = unresolved; - rrt_resolve_to = resolve_to; - })) - -(* Now that everything is resolved, we can construct whatever type we're trying - * to resolve to. *) -and finish_resolve_spread_list = - (* Turn tuple rest params into single params *) - let flatten_spread_args list = - list - |> List.fold_left (fun acc param -> match param with - | ResolvedSpreadArg (_, arrtype) -> - begin match arrtype with - | ArrayAT (_, Some tuple_types) - | TupleAT (_, tuple_types) -> - List.fold_left - (fun acc elem -> ResolvedArg(elem)::acc) - acc - tuple_types - | ArrayAT (_, None) - | ROArrayAT (_) - | EmptyAT - -> param::acc - end - | ResolvedAnySpreadArg _ - | ResolvedArg _ -> param::acc - ) [] - |> List.rev - - in - - let spread_resolved_to_any = List.exists (function - | ResolvedAnySpreadArg _ -> true - | ResolvedArg _ | ResolvedSpreadArg _ -> false) - - in - - let finish_array cx ?trace ~reason_op ~resolve_to resolved elemt tout = - (* Did `any` flow to one of the rest parameters? If so, we need to resolve - * to a type that is both a subtype and supertype of the desired type. *) - let result = if spread_resolved_to_any resolved - then match resolve_to with - (* Array is a good enough any type for arrays *) - | `Array -> DefT (reason_op, ArrT (ArrayAT (AnyT.why reason_op, None))) - (* Array literals can flow to a tuple. Arrays can't. So if the presence - * of an `any` forces us to degrade an array literal to Array then - * we might get a new error. Since introducing `any`'s shouldn't cause - * errors, this is bad. Instead, let's degrade array literals to `any` *) - | `Literal - (* There is no AnyTupleT type, so let's degrade to `any`. *) - | `Tuple -> AnyT.why reason_op - else begin - (* Spreads that resolve to tuples are flattened *) - let elems = flatten_spread_args resolved in - - let tuple_types = match resolve_to with - | `Literal - | `Tuple -> - elems - (* If no spreads are left, then this is a tuple too! *) - |> List.fold_left (fun acc elem -> - match (acc, elem) with - | None, _ -> None - | _, ResolvedSpreadArg _ -> None - | Some tuple_types, ResolvedArg t -> Some (t::tuple_types) - | _, ResolvedAnySpreadArg _ -> failwith "Should not be hit" - ) (Some []) - |> Option.map ~f:List.rev - | `Array -> None in - - (* We infer the array's general element type by looking at the type of - * every element in the array *) - let tset = List.fold_left (fun tset elem -> - let elemt = match elem with - | ResolvedSpreadArg (r, arrtype) -> elemt_of_arrtype r arrtype - | ResolvedArg elemt -> elemt - | ResolvedAnySpreadArg _ -> failwith "Should not be hit" - in - - TypeExSet.add elemt tset - ) TypeExSet.empty elems in - - (* composite elem type is an upper bound of all element types *) - (* Should the element type of the array be the union of its element types? + | (resolved, UnresolvedSpreadArg next :: unresolved) -> + flow_opt + cx + ?trace + ( next, + ResolveSpreadT + ( use_op, + reason_op, + { rrt_resolved = resolved; rrt_unresolved = unresolved; rrt_resolve_to = resolve_to } + ) ) + + (* Now that everything is resolved, we can construct whatever type we're trying + * to resolve to. *) + and finish_resolve_spread_list = + (* Turn tuple rest params into single params *) + let flatten_spread_args list = + list + |> Core_list.fold_left + ~f:(fun acc param -> + match param with + | ResolvedSpreadArg (_, arrtype) -> + begin + match arrtype with + | ArrayAT (_, Some tuple_types) + | TupleAT (_, tuple_types) -> + Core_list.fold_left + ~f:(fun acc elem -> ResolvedArg elem :: acc) + ~init:acc + tuple_types + | ArrayAT (_, None) + | ROArrayAT _ -> + param :: acc + end + | ResolvedAnySpreadArg _ + | ResolvedArg _ -> + param :: acc) + ~init:[] + |> Core_list.rev + in + let spread_resolved_to_any = + List.exists (function + | ResolvedAnySpreadArg _ -> true + | ResolvedArg _ + | ResolvedSpreadArg _ -> + false) + in + let finish_array cx ~use_op ?trace ~reason_op ~resolve_to resolved elemt tout = + (* Did `any` flow to one of the rest parameters? If so, we need to resolve + * to a type that is both a subtype and supertype of the desired type. *) + let result = + if spread_resolved_to_any resolved then + match resolve_to with + (* Array is a good enough any type for arrays *) + | `Array -> + DefT (reason_op, bogus_trust (), ArrT (ArrayAT (AnyT.untyped reason_op, None))) + (* Array literals can flow to a tuple. Arrays can't. So if the presence + * of an `any` forces us to degrade an array literal to Array then + * we might get a new error. Since introducing `any`'s shouldn't cause + * errors, this is bad. Instead, let's degrade array literals to `any` *) + | `Literal + (* There is no AnyTupleT type, so let's degrade to `any`. *) + + | `Tuple -> + AnyT.untyped reason_op + else + (* Spreads that resolve to tuples are flattened *) + let elems = flatten_spread_args resolved in + let tuple_types = + match resolve_to with + | `Literal + | `Tuple -> + elems + (* If no spreads are left, then this is a tuple too! *) + |> List.fold_left + (fun acc elem -> + match (acc, elem) with + | (None, _) -> None + | (_, ResolvedSpreadArg _) -> None + | (Some tuple_types, ResolvedArg t) -> Some (t :: tuple_types) + | (_, ResolvedAnySpreadArg _) -> failwith "Should not be hit") + (Some []) + |> Option.map ~f:List.rev + | `Array -> None + in + (* We infer the array's general element type by looking at the type of + * every element in the array *) + let tset = + List.fold_left + (fun tset elem -> + let elemt = + match elem with + | ResolvedSpreadArg (_, arrtype) -> elemt_of_arrtype arrtype + | ResolvedArg elemt -> elemt + | ResolvedAnySpreadArg _ -> failwith "Should not be hit" + in + TypeExSet.add elemt tset) + TypeExSet.empty + elems + in + (* composite elem type is an upper bound of all element types *) + (* Should the element type of the array be the union of its element types? No. Instead of using a union, we use an unresolved tvar to represent the least upper bound of each element type. Effectively, @@ -10336,2156 +10881,753 @@ and finish_resolve_spread_list = have to do that pinning more carefully, and using an unresolved tvar instead of a union here doesn't conflict with those plans. *) - TypeExSet.elements tset |> List.iter (fun t -> - flow cx (t, UseT (unknown_use, elemt))); - - match tuple_types, resolve_to with - | _, `Array -> - DefT (reason_op, ArrT (ArrayAT (elemt, None))) - | _, `Literal -> - DefT (reason_op, ArrT (ArrayAT (elemt, tuple_types))) - | Some tuple_types, `Tuple -> - DefT (reason_op, ArrT (TupleAT (elemt, tuple_types))) - | None, `Tuple -> - DefT (reason_op, ArrT (ArrayAT (elemt, None))) - end in - - flow_opt_t cx ?trace (result, tout) - in - - (* If there are no spread elements or if all the spread elements resolved to - * tuples or array literals, then this is easy. We just flatten them all. - * - * However, if we have a spread that resolved to any or to an array of - * unknown length, then we're in trouble. Basically, any remaining argument - * might flow to any remaining parameter. - *) - let flatten_call_arg = - let rec flatten r args spread resolved = - if resolved = [] - then args, spread - else match spread with - | None -> - (match resolved with - | (ResolvedArg t)::rest -> - flatten r (t::args) spread rest - | (ResolvedSpreadArg - (_, (ArrayAT (_, Some ts) | TupleAT (_, ts))))::rest -> - let args = List.rev_append ts args in - flatten r args spread rest - | ResolvedSpreadArg (r, _)::_ - | ResolvedAnySpreadArg r :: _ -> - (* We weren't able to flatten the call argument list to remove all - * spreads. This means we need to build a spread argument, with - * unknown arity. *) - let tset = TypeExSet.empty in - flatten r args (Some (Nel.one r, tset)) resolved - | [] -> failwith "Empty list already handled" - ) - | Some (spread_reasons, tset) -> - let spread_reason, elemt, rest = (match resolved with - | (ResolvedArg t)::rest -> - reason_of_t t, t, rest - | (ResolvedSpreadArg (r, arrtype))::rest -> - r, elemt_of_arrtype r arrtype, rest - | (ResolvedAnySpreadArg reason)::rest -> - reason, AnyT.why reason, rest - | [] -> failwith "Empty list already handled") + TypeExSet.elements tset |> List.iter (fun t -> flow cx (t, UseT (use_op, elemt))); + + match (tuple_types, resolve_to) with + | (_, `Array) -> DefT (reason_op, bogus_trust (), ArrT (ArrayAT (elemt, None))) + | (_, `Literal) -> DefT (reason_op, bogus_trust (), ArrT (ArrayAT (elemt, tuple_types))) + | (Some tuple_types, `Tuple) -> + DefT (reason_op, bogus_trust (), ArrT (TupleAT (elemt, tuple_types))) + | (None, `Tuple) -> DefT (reason_op, bogus_trust (), ArrT (ArrayAT (elemt, None))) + in + flow_opt_t cx ~use_op ?trace (result, tout) + in + (* If there are no spread elements or if all the spread elements resolved to + * tuples or array literals, then this is easy. We just flatten them all. + * + * However, if we have a spread that resolved to any or to an array of + * unknown length, then we're in trouble. Basically, any remaining argument + * might flow to any remaining parameter. + *) + let flatten_call_arg = + let rec flatten r args spread resolved = + if resolved = [] then + (args, spread) + else + match spread with + | None -> + (match resolved with + | ResolvedArg t :: rest -> flatten r (t :: args) spread rest + | ResolvedSpreadArg (_, (ArrayAT (_, Some ts) | TupleAT (_, ts))) :: rest -> + let args = List.rev_append ts args in + flatten r args spread rest + | ResolvedSpreadArg (r, _) :: _ + | ResolvedAnySpreadArg r :: _ -> + (* We weren't able to flatten the call argument list to remove all + * spreads. This means we need to build a spread argument, with + * unknown arity. *) + let tset = TypeExSet.empty in + flatten r args (Some tset) resolved + | [] -> failwith "Empty list already handled") + | Some tset -> + let (elemt, rest) = + match resolved with + | ResolvedArg t :: rest -> (t, rest) + | ResolvedSpreadArg (_, arrtype) :: rest -> (elemt_of_arrtype arrtype, rest) + | ResolvedAnySpreadArg reason :: rest -> (AnyT.untyped reason, rest) + | [] -> failwith "Empty list already handled" + in + let tset = TypeExSet.add elemt tset in + flatten r args (Some tset) rest + in + fun cx ~use_op r resolved -> + let (args, spread) = flatten r [] None resolved in + let spread = + Option.map + ~f:(fun tset -> + let r = mk_reason RArray (aloc_of_reason r) in + Tvar.mk_where cx r (fun tvar -> + TypeExSet.elements tset |> List.iter (fun t -> flow cx (t, UseT (use_op, tvar))))) + spread in - let spread_reasons = Nel.cons spread_reason spread_reasons in - let tset = TypeExSet.add elemt tset in - flatten r args (Some (spread_reasons, tset)) rest - + (List.rev args, spread) in - fun cx r resolved -> - let args, spread = flatten r [] None resolved in - let spread = Option.map - ~f:(fun (spread_reasons, tset) -> - let last = Nel.hd spread_reasons in - let first = Nel.(hd (rev spread_reasons)) in - let loc = Loc.btwn (aloc_of_reason first |> ALoc.to_loc) (aloc_of_reason last |> ALoc.to_loc) in - let r = mk_reason RArray loc in - Tvar.mk_where cx r (fun tvar -> - TypeExSet.elements tset - |> List.iter (fun t -> flow cx (t, UseT (unknown_use, tvar))) - ) - ) - spread + (* This is used for things like Function.prototype.bind, which partially + * apply arguments and then return the new function. *) + let finish_multiflow_partial cx ?trace ~use_op ~reason_op ft call_reason resolved tout = + (* Multiflows always come out of a flow *) + let trace = + match trace with + | Some trace -> trace + | None -> failwith "All multiflows show have a trace" in - List.rev args, spread - - in - - (* This is used for things like Function.prototype.bind, which partially - * apply arguments and then return the new function. *) - let finish_multiflow_partial - cx ?trace ~use_op ~reason_op ft call_reason resolved tout = - (* Multiflows always come out of a flow *) - let trace = match trace with - | Some trace -> trace - | None -> failwith "All multiflows show have a trace" in - - let {params; rest_param; return_t; def_reason; _} = ft in - - let args, spread_arg = flatten_call_arg cx reason_op resolved in - - let params, rest_param = multiflow_partial - cx ~trace ~use_op reason_op ~is_strict:true ~def_reason ~spread_arg ~rest_param - (args, params) in - let params_names, params_tlist = List.split params in - - (* e.g. "bound function type", positioned at reason_op *) - let bound_reason = - let desc = RBound (desc_of_reason reason_op) in - replace_reason_const desc call_reason + let { params; rest_param; return_t; def_reason; _ } = ft in + let (args, spread_arg) = flatten_call_arg cx ~use_op reason_op resolved in + let (params, rest_param) = + multiflow_partial + cx + ~trace + ~use_op + reason_op + ~is_strict:true + ~def_reason + ~spread_arg + ~rest_param + (args, params) + in + let (params_names, params_tlist) = List.split params in + (* e.g. "bound function type", positioned at reason_op *) + let bound_reason = + let desc = RBound (desc_of_reason reason_op) in + replace_desc_reason desc call_reason + in + let def_reason = reason_op in + let funt = + DefT + ( reason_op, + bogus_trust (), + FunT + ( dummy_static bound_reason, + dummy_prototype, + mk_methodtype + dummy_this + params_tlist + return_t + ~rest_param + ~def_reason + ~params_names ) ) + in + rec_flow_t cx trace (funt, tout) in - let def_reason = reason_op in - - let funt = DefT (reason_op, FunT ( - dummy_static bound_reason, - dummy_prototype, - mk_boundfunctiontype params_tlist return_t - ~rest_param ~def_reason ~params_names - )) in - rec_flow_t cx trace (funt, tout) - - in - - (* This is used for things like function application, where all the arguments - * are applied to a function *) - let finish_multiflow_full cx ?trace ~use_op ~reason_op ~is_strict ft resolved = - (* Multiflows always come out of a flow *) - let trace = match trace with - | Some trace -> trace - | None -> failwith "All multiflows show have a trace" in - - let {params; rest_param; def_reason; _} = ft in - - let args, spread_arg = flatten_call_arg cx reason_op resolved in - multiflow_full - cx ~trace ~use_op reason_op ~is_strict ~def_reason - ~spread_arg ~rest_param (args, params) - - in - - (* Similar to finish_multiflow_full but for custom functions. *) - let finish_custom_fun_call cx ?trace ~use_op ~reason_op kind tout resolved = - (* Multiflows always come out of a flow *) - let trace = match trace with - | Some trace -> trace - | None -> failwith "All multiflows show have a trace" in - - let args, spread_arg = flatten_call_arg cx reason_op resolved in - custom_fun_call cx trace ~use_op reason_op kind args spread_arg tout - in - - (* This is used for things like Function.prototype.apply, whose second arg is - * basically a spread argument that we'd like to resolve *) - let finish_call_t cx ?trace ~use_op ~reason_op funcalltype resolved tin = - let flattened = flatten_spread_args resolved in - let call_args_tlist = List.map (function - | ResolvedArg t -> Arg t - | ResolvedSpreadArg (r, arrtype) -> SpreadArg (DefT (r, ArrT arrtype)) - | ResolvedAnySpreadArg r -> SpreadArg (AnyT.why r)) flattened in - let call_t = CallT (use_op, reason_op, { funcalltype with call_args_tlist; }) in - flow_opt cx ?trace (tin, call_t) - - in - fun cx ?trace ~use_op ~reason_op resolved resolve_to -> ( - match resolve_to with - | ResolveSpreadsToTuple (_, elem_t, tout)-> - finish_array cx ?trace ~reason_op ~resolve_to:`Tuple resolved elem_t tout - | ResolveSpreadsToArrayLiteral (_, elem_t, tout) -> - finish_array cx ?trace ~reason_op ~resolve_to:`Literal resolved elem_t tout - | ResolveSpreadsToArray (elem_t, tout) -> - finish_array cx ?trace ~reason_op ~resolve_to:`Array resolved elem_t tout - | ResolveSpreadsToMultiflowPartial (_, ft, call_reason, tout) -> - finish_multiflow_partial cx ?trace ~use_op ~reason_op ft call_reason resolved tout - | ResolveSpreadsToMultiflowCallFull (_, ft) -> - finish_multiflow_full cx ?trace ~use_op ~reason_op ~is_strict:true ft resolved - | ResolveSpreadsToMultiflowSubtypeFull (_, ft) -> - finish_multiflow_full cx ?trace ~use_op ~reason_op ~is_strict:false ft resolved - | ResolveSpreadsToCustomFunCall (_, kind, tout) -> - finish_custom_fun_call cx ?trace ~use_op ~reason_op kind tout resolved - | ResolveSpreadsToCallT (funcalltype, tin) -> - finish_call_t cx ?trace ~use_op ~reason_op funcalltype resolved tin - ) - -and perform_lookup_action cx trace propref p lreason ureason = function - | LookupProp (use_op, up) -> - rec_flow_p cx trace ~use_op lreason ureason propref (p, up) - | SuperProp (use_op, lp) -> - rec_flow_p cx trace ~use_op ureason lreason propref (lp, p) - | RWProp (use_op, _, tout, rw) -> - begin match rw, Property.access rw p with - (* TODO: Sam, comment repositioning logic here *) - | Read, Some t -> - let loc = aloc_of_reason ureason in - rec_flow_t cx trace (reposition cx ~trace (loc |> ALoc.to_loc) t, tout) - | Write (_, prop_t), Some t -> - rec_flow cx trace (tout, UseT (use_op, t)); - Option.iter ~f:(fun prop_t -> rec_flow_t cx trace (t, prop_t)) prop_t - | _, None -> - let r, x = match propref with - | Named (r, x) -> r, Some x - | Computed t -> reason_of_t t, None + (* This is used for things like function application, where all the arguments + * are applied to a function *) + let finish_multiflow_full cx ?trace ~use_op ~reason_op ~is_strict ft resolved = + (* Multiflows always come out of a flow *) + let trace = + match trace with + | Some trace -> trace + | None -> failwith "All multiflows show have a trace" in - add_output cx ~trace - (FlowError.EPropAccess ((r, ureason), x, Property.polarity p, rw, use_op)) - end - | MatchProp (use_op, tin) -> - begin match Property.access Read p with - | Some t -> rec_flow cx trace (tin, UseT (use_op, t)) - | None -> - let r, x = match propref with - | Named (r, x) -> r, Some x - | Computed t -> reason_of_t t, None - in - add_output cx ~trace - (FlowError.EPropAccess ((r, ureason), x, Property.polarity p, Read, use_op)) - end + let { params; rest_param; def_reason; _ } = ft in + let (args, spread_arg) = flatten_call_arg cx ~use_op reason_op resolved in + multiflow_full + cx + ~trace + ~use_op + reason_op + ~is_strict + ~def_reason + ~spread_arg + ~rest_param + (args, params) + in + (* Similar to finish_multiflow_full but for custom functions. *) + let finish_custom_fun_call cx ?trace ~use_op ~reason_op kind tout resolved = + (* Multiflows always come out of a flow *) + let trace = + match trace with + | Some trace -> trace + | None -> failwith "All multiflows show have a trace" + in + let (args, spread_arg) = flatten_call_arg cx ~use_op reason_op resolved in + CustomFunKit.run cx trace ~use_op reason_op kind args spread_arg tout + in + (* This is used for things like Function.prototype.apply, whose second arg is + * basically a spread argument that we'd like to resolve *) + let finish_call_t cx ?trace ~use_op ~reason_op funcalltype resolved tin = + let flattened = flatten_spread_args resolved in + let call_args_tlist = + Core_list.map + ~f:(function + | ResolvedArg t -> Arg t + | ResolvedSpreadArg (r, arrtype) -> SpreadArg (DefT (r, bogus_trust (), ArrT arrtype)) + | ResolvedAnySpreadArg r -> SpreadArg (AnyT.untyped r)) + flattened + in + let call_t = CallT (use_op, reason_op, { funcalltype with call_args_tlist }) in + flow_opt cx ?trace (tin, call_t) + in + fun cx ?trace ~use_op ~reason_op resolved resolve_to -> + match resolve_to with + | ResolveSpreadsToTuple (_, elem_t, tout) -> + finish_array cx ~use_op ?trace ~reason_op ~resolve_to:`Tuple resolved elem_t tout + | ResolveSpreadsToArrayLiteral (_, elem_t, tout) -> + finish_array cx ~use_op ?trace ~reason_op ~resolve_to:`Literal resolved elem_t tout + | ResolveSpreadsToArray (elem_t, tout) -> + finish_array cx ~use_op ?trace ~reason_op ~resolve_to:`Array resolved elem_t tout + | ResolveSpreadsToMultiflowPartial (_, ft, call_reason, tout) -> + finish_multiflow_partial cx ?trace ~use_op ~reason_op ft call_reason resolved tout + | ResolveSpreadsToMultiflowCallFull (_, ft) -> + finish_multiflow_full cx ?trace ~use_op ~reason_op ~is_strict:true ft resolved + | ResolveSpreadsToMultiflowSubtypeFull (_, ft) -> + finish_multiflow_full cx ?trace ~use_op ~reason_op ~is_strict:false ft resolved + | ResolveSpreadsToCustomFunCall (_, kind, tout) -> + finish_custom_fun_call cx ?trace ~use_op ~reason_op kind tout resolved + | ResolveSpreadsToCallT (funcalltype, tin) -> + finish_call_t cx ?trace ~use_op ~reason_op funcalltype resolved tin + + and perform_lookup_action cx trace propref p target_kind lreason ureason = function + | LookupProp (use_op, up) -> rec_flow_p cx trace ~use_op lreason ureason propref (p, up) + | SuperProp (use_op, lp) -> rec_flow_p cx trace ~use_op ureason lreason propref (lp, p) + | ReadProp { use_op; obj_t = _; tout } -> + begin + match Property.read_t p with + | Some t -> + let loc = aloc_of_reason ureason in + rec_flow_t cx trace (reposition cx ~trace loc t, tout) + | None -> + let (reason_prop, prop_name) = + match propref with + | Named (r, x) -> (r, Some x) + | Computed t -> (reason_of_t t, None) + in + let msg = Error_message.EPropNotReadable { reason_prop; prop_name; use_op } in + add_output cx ~trace msg + end + | WriteProp { use_op; obj_t = _; tin; write_ctx; prop_tout; mode } -> + begin + match (Property.write_t ~ctx:write_ctx p, target_kind, mode) with + | (Some t, IndexerProperty, Delete) -> + (* Always OK to delete a property we found via an indexer *) + let void = VoidT.why (reason_of_t t) |> with_trust literal_trust in + Option.iter ~f:(fun prop_tout -> rec_flow_t cx trace (void, prop_tout)) prop_tout + | (Some t, _, _) -> + rec_flow cx trace (tin, UseT (use_op, t)); + Option.iter ~f:(fun prop_tout -> rec_flow_t cx trace (t, prop_tout)) prop_tout + | (None, _, _) -> + let (reason_prop, prop_name) = + match propref with + | Named (r, x) -> (r, Some x) + | Computed t -> (reason_of_t t, None) + in + let msg = Error_message.EPropNotWritable { reason_prop; prop_name; use_op } in + add_output cx ~trace msg + end + | MatchProp (use_op, tin) -> + begin + match Property.read_t p with + | Some t -> rec_flow cx trace (tin, UseT (use_op, t)) + | None -> + let (reason_prop, prop_name) = + match propref with + | Named (r, x) -> (r, Some x) + | Computed t -> (reason_of_t t, None) + in + add_output cx ~trace (Error_message.EPropNotReadable { reason_prop; prop_name; use_op }) + end -and perform_elem_action cx trace ~use_op reason_op l value = function - | ReadElem t -> - let loc = aloc_of_reason reason_op in - rec_flow_t cx trace (reposition cx ~trace (loc |> ALoc.to_loc) value, t) - | WriteElem (tin, tout) -> - rec_flow cx trace (tin, UseT (use_op, value)); - Option.iter ~f:(fun t -> rec_flow_t cx trace (l, t)) tout - | CallElem (reason_call, ft) -> - rec_flow cx trace (value, CallT (use_op, reason_call, ft)) + and perform_elem_action cx trace ~use_op ~restrict_deletes reason_op l value action = + match (action, restrict_deletes) with + | (ReadElem t, _) -> + let loc = aloc_of_reason reason_op in + rec_flow_t cx trace (reposition cx ~trace loc value, t) + | (WriteElem (tin, tout, Assign), _) + | (WriteElem (tin, tout, Delete), true) -> + rec_flow cx trace (tin, UseT (use_op, value)); + Option.iter ~f:(fun t -> rec_flow_t cx trace (l, t)) tout + | (WriteElem (tin, tout, Delete), false) -> + (* Ok to delete arbitrary elements on arrays, not OK for tuples *) + rec_flow + cx + trace + (tin, UseT (use_op, VoidT.why (reason_of_t value) |> with_trust literal_trust)); + Option.iter ~f:(fun t -> rec_flow_t cx trace (l, t)) tout + | (CallElem (reason_call, ft), _) -> rec_flow cx trace (value, CallT (use_op, reason_call, ft)) -and string_key s reason = - let key_reason = replace_reason_const (RPropertyIsAString s) reason in - DefT (key_reason, StrT (Literal (None, s))) + and string_key s reason = + let key_reason = replace_desc_reason (RPropertyIsAString s) reason in + DefT (key_reason, bogus_trust (), StrT (Literal (None, s))) -(* builtins, contd. *) + (* builtins, contd. *) + and get_builtin cx ?trace x reason = + Tvar.mk_where cx reason (fun builtin -> + let propref = Named (reason, x) in + flow_opt cx ?trace (builtins cx, GetPropT (unknown_use, reason, propref, builtin))) -and get_builtin cx ?trace x reason = - Tvar.mk_where cx reason (fun builtin -> + and lookup_builtin cx ?trace x reason strict builtin = let propref = Named (reason, x) in - flow_opt cx ?trace (builtins cx, GetPropT (unknown_use, reason, propref, builtin)) - ) - -and lookup_builtin cx ?trace x reason strict builtin = - let propref = Named (reason, x) in - let l = builtins cx in - flow_opt cx ?trace (l, - LookupT (reason, strict, [], propref, RWProp (unknown_use, l, builtin, Read))) - -and get_builtin_typeapp cx ?trace reason x ts = - typeapp (get_builtin cx ?trace x reason) ts - -(* Specialize a polymorphic class, make an instance of the specialized class. *) -and mk_typeapp_instance cx ?trace ~use_op ~reason_op ~reason_tapp ?cache c ts = - let t = Tvar.mk cx reason_tapp in - flow_opt cx ?trace (c, SpecializeT (use_op, reason_op, reason_tapp, cache, Some ts, t)); - mk_instance cx ?trace reason_tapp t - -and mk_typeapp_instance_of_poly cx trace ~use_op ~reason_op ~reason_tapp id xs t ts = - let t = mk_typeapp_of_poly cx trace ~use_op ~reason_op ~reason_tapp id xs t ts in - mk_instance cx ~trace reason_tapp t - -and mk_typeapp_of_poly cx trace ~use_op ~reason_op ~reason_tapp ?cache id xs t ts = - match cache with - | Some cache -> - instantiate_poly_with_targs cx trace ~use_op ~reason_op ~reason_tapp ~cache (xs,t) ts - | None -> - let key = id, ts in - match Cache.Subst.find key with + let l = builtins cx in + flow_opt + cx + ?trace + ( l, + LookupT + ( reason, + strict, + [], + propref, + ReadProp { use_op = unknown_use; obj_t = l; tout = builtin } ) ) + + and get_builtin_typeapp cx ?trace reason x ts = + typeapp ?annot_loc:(annot_aloc_of_reason reason) (get_builtin cx ?trace x reason) ts + + (* Specialize a polymorphic class, make an instance of the specialized class. *) + and mk_typeapp_instance cx ?trace ~use_op ~reason_op ~reason_tapp ?cache c ts = + let t = Tvar.mk cx reason_tapp in + flow_opt cx ?trace (c, SpecializeT (use_op, reason_op, reason_tapp, cache, Some ts, t)); + mk_instance_raw cx ?trace reason_tapp ~reason_type:(reason_of_t c) t + + and mk_typeapp_instance_of_poly cx trace ~use_op ~reason_op ~reason_tapp id tparams_loc xs t ts = + let t = mk_typeapp_of_poly cx trace ~use_op ~reason_op ~reason_tapp id tparams_loc xs t ts in + mk_instance cx ~trace reason_tapp t + + and mk_typeapp_of_poly cx trace ~use_op ~reason_op ~reason_tapp ?cache id tparams_loc xs t ts = + match cache with + | Some cache -> + instantiate_poly_with_targs + cx + trace + ~use_op + ~reason_op + ~reason_tapp + ~cache + (tparams_loc, xs, t) + ts | None -> - let errs_ref = ref [] in - let t = instantiate_poly_with_targs cx trace ~use_op ~reason_op ~reason_tapp - ~errs_ref (xs,t) ts in - Cache.Subst.add key (!errs_ref, t); - t - | Some (errs, t) -> - errs |> List.iter (function - | `ETooManyTypeArgs (reason_arity, maximum_arity) -> - let msg = FlowError.ETooManyTypeArgs (reason_tapp, reason_arity, maximum_arity) in - add_output cx ~trace msg - | `ETooFewTypeArgs (reason_arity, maximum_arity) -> - let msg = FlowError.ETooFewTypeArgs (reason_tapp, reason_arity, maximum_arity) in - add_output cx ~trace msg - ); + let key = (id, ts) in + (match Cache.Subst.find key with + | None -> + let errs_ref = ref [] in + let t = + instantiate_poly_with_targs + cx + trace + ~use_op + ~reason_op + ~reason_tapp + ~errs_ref + (tparams_loc, xs, t) + ts + in + Cache.Subst.add key (!errs_ref, t); + t + | Some (errs, t) -> + errs + |> List.iter (function + | `ETooManyTypeArgs (reason_arity, maximum_arity) -> + let msg = + Error_message.ETooManyTypeArgs (reason_tapp, reason_arity, maximum_arity) + in + add_output cx ~trace msg + | `ETooFewTypeArgs (reason_arity, maximum_arity) -> + let msg = + Error_message.ETooFewTypeArgs (reason_tapp, reason_arity, maximum_arity) + in + add_output cx ~trace msg); + t) + + and mk_instance cx ?trace instance_reason ?use_desc c = + mk_instance_raw cx ?trace instance_reason ?use_desc ~reason_type:instance_reason c + + and mk_instance_raw cx ?trace instance_reason ?(use_desc = false) ~reason_type c = + (* Make an annotation. *) + let source = + Tvar.mk_where cx instance_reason (fun t -> + (* this part is similar to making a runtime value *) + flow_opt_t cx ?trace (c, DefT (reason_type, bogus_trust (), TypeT (InstanceKind, t)))) + in + AnnotT (instance_reason, source, use_desc) + (* Optimization where an union is a subset of another. Equality modulo + reasons is important for this optimization to be effective, since types + are repositioned everywhere. *) + + (** TODO: (1) Define a more general partial equality, that takes into + account unified type variables. (2) Get rid of UnionRep.quick_mem. **) + and union_optimization_guard = + (* Check if l is a subset of u. Flatten both unions and then check that each element + of l appears somewhere in u *) + let union_subtype cx rep1 rep2 = + let ts2 = Type_mapper.union_flatten cx @@ UnionRep.members rep2 in + Type_mapper.union_flatten cx @@ UnionRep.members rep1 + |> Core_list.for_all ~f:(fun t1 -> + Core_list.exists ~f:(TypeUtil.quick_subtype (Context.trust_errors cx) t1) ts2) + in + let rec union_optimization_guard_impl seen cx l u = + match (l, u) with + | (UnionT (_, rep1), UnionT (_, rep2)) -> + rep1 = rep2 + || + (* Try n log n check before n^2 check *) + begin + match (UnionRep.check_enum rep1, UnionRep.check_enum rep2) with + | (Some enums1, Some enums2) -> UnionEnumSet.subset enums1 enums2 + | (_, _) -> + (* Check if u contains l after unwrapping annots, tvars and repos types. + This is faster than the n^2 case below because it avoids flattening both + unions *) + UnionRep.members rep2 + |> Core_list.map ~f:(Type_mapper.unwrap_type cx) + |> Core_list.exists ~f:(fun u -> + (not (TypeSet.mem u seen)) + && union_optimization_guard_impl (TypeSet.add u seen) cx l u) + || union_subtype cx rep1 rep2 + end + | _ -> false + in + union_optimization_guard_impl TypeSet.empty + + and reposition_reason cx ?trace reason ?(use_desc = false) t = + reposition + cx + ?trace + (aloc_of_reason reason) + ?desc: + ( if use_desc then + Some (desc_of_reason reason) + else + None ) + ?annot_loc:(annot_aloc_of_reason reason) t -and mk_instance cx ?trace instance_reason ?(use_desc=false) c = - (* Make an annotation. *) - let source = Tvar.mk_where cx instance_reason (fun t -> - (* this part is similar to making a runtime value *) - flow_opt_t cx ?trace (c, DefT (instance_reason, TypeT (InstanceKind, t))) - ) in - AnnotT (instance_reason, source, use_desc) - -and reposition_reason cx ?trace reason ?(use_desc=false) t = - reposition - cx - ?trace - (aloc_of_reason reason |> ALoc.to_loc) - ?desc:(if use_desc then Some (desc_of_reason reason) else None) - ?annot_loc:(annot_loc_of_reason reason) - t - -(* set the position of the given def type from a reason *) -and reposition cx ?trace (loc: Loc.t) ?desc ?annot_loc t = - let mod_reason reason = - let reason = repos_reason loc ?annot_loc reason in - match desc with - | Some d -> replace_reason_const d reason - | None -> reason - in - let rec recurse seen = function - | OpenT (r, id) as t -> - let reason = mod_reason r in - let use_desc = Option.is_some desc in - let constraints = Context.find_graph cx id in - begin match constraints with - | Resolved t -> - (* A tvar may be resolved to a type that has special repositioning logic, + (* set the position of the given def type from a reason *) + and reposition cx ?trace (loc : ALoc.t) ?desc ?annot_loc t = + let mod_reason reason = + let reason = repos_reason loc ?annot_loc reason in + match desc with + | Some d -> replace_desc_new_reason d reason + | None -> reason + in + let rec recurse seen = function + | OpenT (r, id) as t -> + let reason = mod_reason r in + let use_desc = Option.is_some desc in + let constraints = Context.find_graph cx id in + begin + match constraints with + (* TODO: In the FullyResolved case, repositioning will cause us to "lose" + the fully resolved status. We should be able to preserve it. *) + | Resolved (use_op, t) + | FullyResolved (use_op, t) -> + (* A tvar may be resolved to a type that has special repositioning logic, like UnionT. We want to recurse to pick up that logic, but must be careful as the union may refer back to the tvar itself, causing a loop. To break the loop, we pass down a map of "already seen" tvars. *) - (match IMap.get id seen with - | Some t -> t - | None -> - (* Create a fresh tvar which can be passed in `seen` *) - let mk_tvar_where = if is_derivable_reason r - then Tvar.mk_derivable_where - else Tvar.mk_where - in - mk_tvar_where cx reason (fun tvar -> - let t' = recurse (IMap.add id tvar seen) t in - (* All `t` in `Resolved t` are concrete. Because `t` is a concrete + (match IMap.get id seen with + | Some t -> t + | None -> + (* Create a fresh tvar which can be passed in `seen` *) + let mk_tvar_where = + if is_derivable_reason r then + Tvar.mk_derivable_where + else + Tvar.mk_where + in + (* The resulting tvar should be fully resolved if this one is *) + let fully_resolved = + match constraints with + | Resolved _ -> false + | FullyResolved _ -> true + | Unresolved _ -> assert_false "handled below" + in + mk_tvar_where cx reason (fun tvar -> + (* All `t` in `Resolved (_, t)` are concrete. Because `t` is a concrete type, `t'` is also necessarily concrete (i.e., reposition preserves open -> open, concrete -> concrete). The unification below thus results in resolving `tvar` to `t'`, so we end up with a resolved tvar whenever we started with one. *) - unify_opt cx ?trace ~unify_any:true tvar t'; - )) - | _ -> - (* Try to re-use an already created repositioning tvar. + let t' = recurse (IMap.add id tvar seen) t in + (* resolve_id requires a trace param *) + let trace = + match trace with + | None -> Trace.unit_trace tvar (UseT (use_op, t')) + | Some trace -> + let max = Context.max_trace_depth cx in + Trace.rec_trace ~max tvar (UseT (use_op, t')) trace + in + let (_, id) = open_tvar tvar in + resolve_id cx trace ~use_op ~fully_resolved id t')) + | Unresolved _ -> + (* Try to re-use an already created repositioning tvar. See repos_cache.ml for details. *) - match Repos_cache.find id reason !Cache.repos_cache with - | Some t -> t - | None -> - let mk_tvar_where = if is_derivable_reason r - then Tvar.mk_derivable_where - else Tvar.mk_where - in - mk_tvar_where cx reason (fun tvar -> - Cache.(repos_cache := Repos_cache.add reason t tvar !repos_cache); - flow_opt cx ?trace (t, ReposLowerT (reason, use_desc, UseT (unknown_use, tvar))) - ) - end - | EvalT (root, defer_use_t, id) as t -> - (* Modifying the reason of `EvalT`, as we do for other types, is not + (match Repos_cache.find id reason !Cache.repos_cache with + | Some t -> t + | None -> + let mk_tvar_where = + if is_derivable_reason r then + Tvar.mk_derivable_where + else + Tvar.mk_where + in + mk_tvar_where cx reason (fun tvar -> + Cache.(repos_cache := Repos_cache.add reason t tvar !repos_cache); + flow_opt cx ?trace (t, ReposLowerT (reason, use_desc, UseT (unknown_use, tvar))))) + end + | EvalT (root, defer_use_t, id) as t -> + (* Modifying the reason of `EvalT`, as we do for other types, is not enough, since it will only affect the reason of the resulting tvar. Instead, repositioning a `EvalT` should simulate repositioning the resulting tvar, i.e., flowing repositioned *lower bounds* to the resulting tvar. (Another way of thinking about this is that a `EvalT` is just as transparent as its resulting tvar.) *) - let defer_use_t = mod_reason_of_defer_use_t mod_reason defer_use_t in - let reason = reason_of_defer_use_t defer_use_t in - let use_desc = Option.is_some desc in - begin match Cache.Eval.find_repos root defer_use_t id with - | Some tvar -> tvar - | None -> - Tvar.mk_where cx reason (fun tvar -> - Cache.Eval.add_repos root defer_use_t id tvar; - flow_opt cx ?trace (t, ReposLowerT (reason, use_desc, UseT (unknown_use, tvar))) - ) - end - | DefT (r, MaybeT t) -> - (* repositions both the MaybeT and the nested type. MaybeT represets `?T`. + let defer_use_t = mod_reason_of_defer_use_t mod_reason defer_use_t in + let reason = reason_of_defer_use_t defer_use_t in + let use_desc = Option.is_some desc in + begin + match Cache.Eval.find_repos root defer_use_t id with + | Some tvar -> tvar + | None -> + Tvar.mk_where cx reason (fun tvar -> + Cache.Eval.add_repos root defer_use_t id tvar; + flow_opt cx ?trace (t, ReposLowerT (reason, use_desc, UseT (unknown_use, tvar)))) + end + | MaybeT (r, t) -> + (* repositions both the MaybeT and the nested type. MaybeT represets `?T`. elsewhere, when we decompose into T | NullT | VoidT, we use the reason of the MaybeT for NullT and VoidT but don't reposition `t`, so that any errors on the NullT or VoidT point at ?T, but errors on the T point at T. *) - let r = mod_reason r in - DefT (r, MaybeT (recurse seen t)) - | DefT (r, OptionalT t) -> - let r = mod_reason r in - DefT (r, OptionalT (recurse seen t)) - | DefT (r, UnionT rep) -> - let r = mod_reason r in - let rep = UnionRep.ident_map (recurse seen) rep in - DefT (r, UnionT rep) - | OpaqueT (r, opaquetype) -> - let r = mod_reason r in - OpaqueT (r, { opaquetype with - underlying_t = OptionUtils.ident_map (recurse seen) opaquetype.underlying_t; - super_t = OptionUtils.ident_map (recurse seen) opaquetype.super_t; }) - | ExactT (r, t) -> - let r = mod_reason r in - ExactT (r, recurse seen t) - | t -> - mod_reason_of_t mod_reason t - in - recurse IMap.empty t + let r = mod_reason r in + MaybeT (r, recurse seen t) + | OptionalT (r, t) -> + let r = mod_reason r in + OptionalT (r, recurse seen t) + | UnionT (r, rep) -> + let r = mod_reason r in + let rep = UnionRep.ident_map (recurse seen) rep in + UnionT (r, rep) + | OpaqueT (r, opaquetype) -> + let r = mod_reason r in + OpaqueT + ( r, + { + opaquetype with + underlying_t = OptionUtils.ident_map (recurse seen) opaquetype.underlying_t; + super_t = OptionUtils.ident_map (recurse seen) opaquetype.super_t; + } ) + | ExactT (r, t) -> + let r = mod_reason r in + ExactT (r, recurse seen t) + | t -> mod_reason_of_t mod_reason t + in + recurse IMap.empty t -(* Given the type of a value v, return the type term representing the `typeof v` + (* Given the type of a value v, return the type term representing the `typeof v` annotation expression. If the type of v is a tvar, we need to take extra care. Annotations are designed to constrain types, and therefore should not themselves grow when used. *) -and mk_typeof_annotation cx ?trace reason ?(use_desc=false) t = - let source = match t with - | OpenT _ -> - (* Ensure that `source` is a 0->1 type by creating a tvar that resolves to + and mk_typeof_annotation cx ?trace reason ?(use_desc = false) t = + let source = + match t with + | OpenT _ -> + (* Ensure that `source` is a 0->1 type by creating a tvar that resolves to the first lower bound. If there are multiple lower bounds, the typeof itself is an error. *) - Tvar.mk_where cx reason (fun t' -> - flow_opt cx ?trace (t, BecomeT (reason, t'))) - | _ -> - (* If this is not a tvar, then it should be 0->1 (see TODO). Note that + Tvar.mk_where cx reason (fun t' -> flow_opt cx ?trace (t, BecomeT (reason, t'))) + | _ -> + (* If this is not a tvar, then it should be 0->1 (see TODO). Note that BoundT types potentially appear unsubstituted at this point, so we can't emit constraints even if we wanted to. *) - (* TODO: Even in this case, the type might recursively include tvars, which + (* TODO: Even in this case, the type might recursively include tvars, which allows them to widen unexpectedly and may cause unpreditable behavior. *) - t - in - AnnotT (annot_reason reason, source, use_desc) - -and get_builtin_type cx ?trace reason ?(use_desc=false) x = - let t = get_builtin cx ?trace x reason in - mk_instance cx ?trace reason ~use_desc t - -and get_builtin_prop_type cx ?trace reason tool = - let x = React.PropType.(match tool with - | ArrayOf -> "React$PropTypes$arrayOf" - | InstanceOf -> "React$PropTypes$instanceOf" - | ObjectOf -> "React$PropTypes$objectOf" - | OneOf -> "React$PropTypes$oneOf" - | OneOfType -> "React$PropTypes$oneOfType" - | Shape -> "React$PropTypes$shape" - ) in - get_builtin_type cx ?trace reason x - -and instantiate_poly_t cx t = function - | None -> (* nothing to do *) t - | Some types -> match t with - | DefT (_, PolyT (type_params, t_, _)) -> ( - try - let subst_map = List.fold_left2 (fun acc {name; _} type_ -> - SMap.add name type_ acc - ) SMap.empty type_params types in - subst cx subst_map t_ - with _ -> - prerr_endline "Instantiating poly type failed"; - t - ) - | DefT (_, (AnyT | AnyObjT)) - | DefT (_, (TypeT (_, DefT (_, (AnyT | AnyObjT))))) -> - t - | _ -> - assert_false ("unexpected args passed to instantiate_poly_t: " ^ (string_of_ctor t)) - -and instantiate_type t = - match t with - | ThisClassT (_, t) | DefT (_, ClassT t) -> t - | _ -> AnyT.why (reason_of_t t) (* ideally, assert false *) - -and call_args_iter f = List.iter (function Arg t | SpreadArg t -> f t) - -(* There's a lot of code that looks at a call argument list and tries to do - * something with one or two arguments. Usually this code assumes that the - * argument is not a spread argument. This utility function helps with that *) -and extract_non_spread cx ~trace = function -| Arg t -> t -| SpreadArg arr -> - let reason = reason_of_t arr in - let loc = loc_of_t arr in - add_output cx ~trace (FlowError.(EUnsupportedSyntax (loc, SpreadArgument))); - AnyT.why reason - -(** TODO: this should rather be moved close to ground_type_impl/resolve_type - etc. but Ocaml name resolution rules make that require a lot more moving - code around. **) -and resolve_builtin_class cx ?trace = function - | DefT (reason, BoolT _) -> - let bool_t = get_builtin_type cx ?trace reason "Boolean" in - resolve_type cx bool_t - | DefT (reason, NumT _) -> - let num_t = get_builtin_type cx ?trace reason "Number" in - resolve_type cx num_t - | DefT (reason, StrT _) -> - let string_t = get_builtin_type cx ?trace reason "String" in - resolve_type cx string_t - | DefT (reason, ArrT arrtype) -> - let builtin, elemt = match arrtype with - | ArrayAT (elemt, _) -> get_builtin cx ?trace "Array" reason, elemt - | TupleAT (elemt, _) - | ROArrayAT (elemt) -> get_builtin cx ?trace "$ReadOnlyArray" reason, elemt - | EmptyAT -> get_builtin cx ?trace "$ReadOnlyArray" reason, DefT (reason, EmptyT) + t + in + AnnotT (annot_reason reason, source, use_desc) + + and get_builtin_type cx ?trace reason ?(use_desc = false) x = + let t = get_builtin cx ?trace x reason in + mk_instance cx ?trace reason ~use_desc t + + and get_builtin_prop_type cx ?trace reason tool = + let x = + React.PropType.( + match tool with + | ArrayOf -> "React$PropTypes$arrayOf" + | InstanceOf -> "React$PropTypes$instanceOf" + | ObjectOf -> "React$PropTypes$objectOf" + | OneOf -> "React$PropTypes$oneOf" + | OneOfType -> "React$PropTypes$oneOfType" + | Shape -> "React$PropTypes$shape") in - let array_t = resolve_type cx builtin in - let array_t = instantiate_poly_t cx array_t (Some [elemt]) in - instantiate_type array_t - | t -> - t - -and set_builtin cx ?trace x t = - let reason = builtin_reason (RCustom x) in - let propref = Named (reason, x) in - flow_opt cx ?trace (builtins cx, SetPropT (unknown_use, reason, propref, Normal, t, None)) - -(* Wrapper functions around __flow that manage traces. Use these functions for + get_builtin_type cx ?trace reason x + + and call_args_iter f = + List.iter (function + | Arg t + | SpreadArg t + -> f t) + + (* There's a lot of code that looks at a call argument list and tries to do + * something with one or two arguments. Usually this code assumes that the + * argument is not a spread argument. This utility function helps with that *) + and extract_non_spread cx ~trace = function + | Arg t -> t + | SpreadArg arr -> + let reason = reason_of_t arr in + let loc = loc_of_t arr in + add_output cx ~trace Error_message.(EUnsupportedSyntax (loc, SpreadArgument)); + AnyT.error reason + + and set_builtin cx ?trace x t = + let reason = builtin_reason (RCustom x) in + let propref = Named (reason, x) in + flow_opt + cx + ?trace + (builtins cx, SetPropT (unknown_use, reason, propref, Assign, Normal, t, None)) + + (* Wrapper functions around __flow that manage traces. Use these functions for all recursive calls in the implementation of __flow. *) -(* Call __flow while concatenating traces. Typically this is used in code that + (* Call __flow while concatenating traces. Typically this is used in code that propagates bounds across type variables, where nothing interesting is going on other than concatenating subtraces to make longer traces to describe transitive data flows *) -and join_flow cx ts (t1, t2) = - __flow cx (t1, t2) (Trace.concat_trace ts) + and join_flow cx ts (t1, t2) = __flow cx (t1, t2) (Trace.concat_trace ts) -(* Call __flow while embedding traces. Typically this is used in code that + (* Call __flow while embedding traces. Typically this is used in code that simplifies a constraint to generate subconstraints: the current trace is "pushed" when recursing into the subconstraints, so that when we finally hit an error and walk back, we can know why the particular constraints that caused the immediate error were generated. *) -and rec_flow cx trace (t1, t2) = - let max = Context.max_trace_depth cx in - __flow cx (t1, t2) (Trace.rec_trace ~max t1 t2 trace) - -and rec_flow_t cx trace ?(use_op=unknown_use) (t1, t2) = - rec_flow cx trace (t1, UseT (use_op, t2)) - -and flow_opt_p cx ?trace ~use_op ~report_polarity lreason ureason propref = - function - (* unification cases *) - | Field (_, lt, Neutral), - Field (_, ut, Neutral) -> - unify_opt cx ?trace ~use_op lt ut - (* directional cases *) - | lp, up -> - let x = match propref with Named (_, x) -> Some x | Computed _ -> None in - (match Property.read_t lp, Property.read_t up with - | Some lt, Some ut -> - flow_opt cx ?trace (lt, UseT (use_op, ut)) - | None, Some _ when report_polarity -> - add_output cx ?trace (FlowError.EPropPolarityMismatch ( - (lreason, ureason), x, - (Property.polarity lp, Property.polarity up), - use_op)) - | _ -> ()); - (match Property.write_t lp, Property.write_t up with - | Some lt, Some ut -> - flow_opt cx ?trace (ut, UseT (use_op, lt)) - | None, Some _ when report_polarity -> - add_output cx ?trace (FlowError.EPropPolarityMismatch ( - (lreason, ureason), x, - (Property.polarity lp, Property.polarity up), - use_op)) - | _ -> ()) - -and rec_flow_p cx trace ?(use_op=unknown_use) ?(report_polarity=true) = - flow_opt_p cx ~trace ~use_op ~report_polarity - -(* Ideally this function would not be required: either we call `flow` from + and rec_flow cx trace (t1, t2) = + let max = Context.max_trace_depth cx in + __flow cx (t1, t2) (Trace.rec_trace ~max t1 t2 trace) + + and rec_flow_t cx trace ?(use_op = unknown_use) (t1, t2) = + rec_flow cx trace (t1, UseT (use_op, t2)) + + and flow_opt_p cx ?trace ~use_op ~report_polarity lreason ureason propref = function + (* unification cases *) + | (Field (_, lt, Polarity.Neutral), Field (_, ut, Polarity.Neutral)) -> + unify_opt cx ?trace ~use_op lt ut + (* directional cases *) + | (lp, up) -> + let x = + match propref with + | Named (_, x) -> Some x + | Computed _ -> None + in + (match (Property.read_t lp, Property.read_t up) with + | (Some lt, Some ut) -> flow_opt cx ?trace (lt, UseT (use_op, ut)) + | (None, Some _) when report_polarity -> + add_output + cx + ?trace + (Error_message.EPropPolarityMismatch + ((lreason, ureason), x, (Property.polarity lp, Property.polarity up), use_op)) + | _ -> ()); + (match (Property.write_t lp, Property.write_t up) with + | (Some lt, Some ut) -> flow_opt cx ?trace (ut, UseT (use_op, lt)) + | (None, Some _) when report_polarity -> + add_output + cx + ?trace + (Error_message.EPropPolarityMismatch + ((lreason, ureason), x, (Property.polarity lp, Property.polarity up), use_op)) + | _ -> ()) + + and rec_flow_p cx trace ?(use_op = unknown_use) ?(report_polarity = true) = + flow_opt_p cx ~trace ~use_op ~report_polarity + + (* Ideally this function would not be required: either we call `flow` from outside without a trace (see below), or we call one of the functions above with a trace. However, there are some functions that need to call __flow, which are themselves called both from outside and inside (with or without traces), so they call this function instead. *) -and flow_opt cx ?trace (t1, t2) = - let trace = match trace with - | None -> Trace.unit_trace t1 t2 - | Some trace -> + and flow_opt cx ?trace (t1, t2) = + let trace = + match trace with + | None -> Trace.unit_trace t1 t2 + | Some trace -> let max = Context.max_trace_depth cx in - Trace.rec_trace ~max t1 t2 trace in - __flow cx (t1, t2) trace - -and flow_opt_t cx ?trace (t1, t2) = - flow_opt cx ?trace (t1, UseT (unknown_use, t2)) - -(* Externally visible function for subtyping. *) -(* Calls internal entry point and traps runaway recursion. *) -and flow cx (lower, upper) = - try - flow_opt cx (lower, upper) - with - | RecursionCheck.LimitExceeded trace -> - (* log and continue *) - let rl = reason_of_t lower in - let ru = reason_of_use_t upper in - let reasons = - if is_use upper - then ru, rl - else FlowError.ordered_reasons (rl, ru) - in - add_output cx ~trace (FlowError.ERecursionLimit reasons) - | ex -> - (* rethrow *) - raise ex - -and flow_t cx (t1, t2) = - flow cx (t1, UseT (unknown_use, t2)) - -and flow_p cx ?(use_op=unknown_use) lreason ureason propref props = - flow_opt_p cx ~use_op ~report_polarity:true lreason ureason propref props - -and tvar_with_constraint cx ?trace ?(derivable=false) u = - let reason = reason_of_use_t u in - let mk_tvar_where = - if derivable - then Tvar.mk_derivable_where - else Tvar.mk_where - in - mk_tvar_where cx reason (fun tvar -> - flow_opt cx ?trace (tvar, u) - ) - -(* Wrapper functions around __unify that manage traces. Use these functions for - all recursive calls in the implementation of __unify. *) - -and rec_unify cx trace ~use_op ?(unify_any=false) t1 t2 = - let max = Context.max_trace_depth cx in - __unify cx ~use_op ~unify_any t1 t2 - (Trace.rec_trace ~max t1 (UseT (use_op, t2)) trace) - -and unify_opt cx ?trace ?(use_op=unknown_use) ?(unify_any=false) t1 t2 = - let trace = match trace with - | None -> Trace.unit_trace t1 (UseT (unknown_use, t2)) - | Some trace -> - let max = Context.max_trace_depth cx in - Trace.rec_trace ~max t1 (UseT (unknown_use, t2)) trace - in - __unify cx ~use_op ~unify_any t1 t2 trace - -(* Externally visible function for unification. *) -(* Calls internal entry point and traps runaway recursion. *) -and unify cx t1 t2 = - try - unify_opt cx ~unify_any:true t1 t2 - with - | RecursionCheck.LimitExceeded trace -> - (* log and continue *) - let reasons = FlowError.ordered_reasons (reason_of_t t1, reason_of_t t2) in - add_output cx ~trace (FlowError.ERecursionLimit reasons) - | ex -> - (* rethrow *) - raise ex - -and continue cx trace t = function - | Lower (use_op, l) -> rec_flow cx trace (l, UseT (use_op, t)) - | Upper u -> rec_flow cx trace (t, u) - -and continue_repos cx trace reason ?(use_desc=false) t = function - | Lower (use_op, l) -> rec_flow cx trace (t, ReposUseT (reason, use_desc, use_op, l)) - | Upper u -> rec_flow cx trace (t, ReposLowerT (reason, use_desc, u)) - -and react_kit cx trace ~use_op reason_op l u = - React_kit.run - ~add_output - ~reposition - ~rec_flow - ~rec_flow_t - ~get_builtin - ~get_builtin_type - ~get_builtin_typeapp - ~mk_instance - ~string_key - ~mk_type_destructor - ~sealed_in_op:Obj_type.sealed_in_op - ~union_of_ts - ~filter_maybe - cx trace ~use_op reason_op l u - -and custom_fun_call cx trace ~use_op reason_op kind args spread_arg tout = match kind with - | Compose reverse -> - (* Drop the specific argument reasons since run_compose will emit CallTs - * with completely unrelated argument reasons. *) - let use_op = match use_op with - | Op FunCall {op; fn; args = _} -> Op (FunCall {op; fn; args = []}) - | Op FunCallMethod {op; fn; prop; args = _} -> Op (FunCallMethod {op; fn; prop; args = []}) - | _ -> use_op - in - let tin = Tvar.mk cx reason_op in - let tvar = Tvar.mk cx reason_op in - run_compose cx trace ~use_op reason_op reverse args spread_arg tin tvar; - let funt = FunT ( - dummy_static reason_op, - dummy_prototype, - mk_functiontype reason_op [tin] ~rest_param:None ~def_reason:reason_op tvar - ) in - rec_flow_t cx trace (DefT (reason_op, funt), tout) - - | ReactCreateElement -> - (match args with - (* React.createElement(component) *) - | component::[] -> - let config = - let r = replace_reason_const RReactProps reason_op in - Obj_type.mk_with_proto - cx r ~sealed:true ~exact:true ~frozen:true (ObjProtoT r) - in - rec_flow cx trace (component, ReactKitT (use_op, reason_op, - React.CreateElement0 (false, config, ([], None), tout))) - (* React.createElement(component, config, ...children) *) - | component::config::children -> - rec_flow cx trace (component, ReactKitT (use_op, reason_op, - React.CreateElement0 (false, config, (children, spread_arg), tout))) - (* React.createElement() *) - | _ -> - (* If we don't have the arguments we need, add an arity error. *) - add_output cx ~trace (FlowError.EReactElementFunArity (reason_op, "createElement", 1))) - - | ReactCloneElement -> (match args with - (* React.cloneElement(element) *) - | element::[] -> - (* Create the expected type for our element with a fresh tvar in the - * component position. *) - let expected_element = - get_builtin_typeapp cx ~trace (reason_of_t element) - "React$Element" [Tvar.mk cx reason_op] in - (* Flow the element arg to our expected element. *) - rec_flow_t cx trace (element, expected_element); - (* Flow our expected element to the return type. *) - rec_flow_t cx trace (expected_element, tout) - (* React.cloneElement(element, config, ...children) *) - | element::config::children -> - (* Create a tvar for our component. *) - let component = Tvar.mk cx reason_op in - (* Flow the element arg to the element type we expect. *) - rec_flow_t cx trace ( - element, - get_builtin_typeapp cx ~trace reason_op - "React$Element" [component] - ); - (* Create a React element using the config and children. *) - rec_flow cx trace (component, - ReactKitT (use_op, reason_op, - React.CreateElement0 (true, config, (children, spread_arg), tout))) - (* React.cloneElement() *) - | _ -> - (* If we don't have the arguments we need, add an arity error. *) - add_output cx ~trace (FlowError.EReactElementFunArity (reason_op, "cloneElement", 1))) - - | ReactElementFactory component -> (match args with - (* React.createFactory(component)() *) - | [] -> - let config = - let r = replace_reason_const RReactProps reason_op in - Obj_type.mk_with_proto - cx r ~sealed:true ~exact:true ~frozen:true (ObjProtoT r) - in - rec_flow cx trace (component, - ReactKitT (use_op, reason_op, - React.CreateElement0 (false, config, ([], None), tout))) - (* React.createFactory(component)(config, ...children) *) - | config::children -> - rec_flow cx trace (component, - ReactKitT (use_op, reason_op, - React.CreateElement0 (false, config, (children, spread_arg), tout)))) - - | ObjectAssign - | ObjectGetPrototypeOf - | ObjectSetPrototypeOf - | ReactPropType _ - | ReactCreateClass - | Idx - | TypeAssertIs - | TypeAssertThrows - | TypeAssertWraps - | DebugPrint - | DebugThrow - | DebugSleep - -> failwith "implemented elsewhere" - -(* Creates the appropriate constraints for the compose() function and its - * reversed variant. *) -and run_compose cx trace ~use_op reason_op reverse fns spread_fn tin tout = - match reverse, fns, spread_fn with - (* Call the tail functions in our array first and call our head function - * last after that. *) - | false, fn::fns, _ -> - let reason = replace_reason_const (RCustom "compose intermediate value") - (reason_of_t fn) in - let tvar = Tvar.mk_where cx reason (fun tvar -> - run_compose cx trace ~use_op reason_op reverse fns spread_fn tin tvar) in - rec_flow cx trace (fn, - CallT (use_op, reason, mk_functioncalltype reason_op None [Arg tvar] tout)) - - (* If the compose function is reversed then we want to call the tail - * functions in our array after we call the head function. *) - | true, fn::fns, _ -> - let reason = replace_reason_const (RCustom "compose intermediate value") - (reason_of_t fn) in - let tvar = Tvar.mk_where cx reason (fun tvar -> - rec_flow cx trace (fn, - CallT (use_op, reason, mk_functioncalltype reason_op None [Arg tin] tvar))) in - run_compose cx trace ~use_op reason_op reverse fns spread_fn tvar tout - - (* If there are no functions and no spread function then we are an identity - * function. *) - | _, [], None -> - rec_flow_t cx trace (tin, tout) - - (* Correctly implementing spreads of unknown arity for the compose function - * is a little tricky. Let's look at a couple of cases. - * - * const fn = (x: number): string => x.toString(); - * declare var fns: Array; - * const x = 42; - * compose(...fns)(x); - * - * This would be invalid. We could have 0 or 1 fn in our fns array, but 2 fn - * would be wrong because string is incompatible with number. It breaks down - * as such: - * - * 1. x = 42 - * 2. fn(x) = '42' - * 3. fn(fn(x)) is an error because '42' is not a number. - * - * To get an error in this case we would only need to call the spread - * argument twice. Now let's look at a case where things get recursive: - * - * type Fn = (O) => $PropertyType; - * declare var fns: Array; - * const x = { p: { p: 42 } }; - * compose(...fns)(x); - * - * 1. x = { p: { p: 42 } } - * 2. fn(x) = { p: 42 } - * 3. fn(fn(x)) = 42 - * 4. fn(fn(fn(x))) throws an error because the p property is not in 42. - * - * Here we would need to call fn 3 times before getting an error. Now - * consider: - * - * type Fn = (O) => $PropertyType; - * declare var fns: Array; - * type X = { p: X }; - * declare var x: X; - * compose(...fns)(x); - * - * This is valid. - * - * To implement spreads in compose functions we first add a constraint based - * on tin and tout assuming that the spread is empty. Then we emit recursive - * constraints: - * - * spread_fn(tin) ~> tout - * spread_fn(tout) ~> tin - * - * The implementation of Flow should be able to terminate these recursive - * constraints. If it doesn't then we have a bug. *) - | _, [], Some spread_fn -> - run_compose cx trace ~use_op reason_op reverse [] None tin tout; - run_compose cx trace ~use_op reason_op reverse [spread_fn] None tin tout; - run_compose cx trace ~use_op reason_op reverse [spread_fn] None tout tin - -and object_kit = - let open Object in - - (*******************************) - (* Shared Object Kit Utilities *) - (*******************************) - - let read_prop r flags x p = - let t = match Property.read_t p with - | Some t -> t - | None -> - let reason = replace_reason_const (RUnknownProperty (Some x)) r in - let t = DefT (reason, MixedT Mixed_everything) in - t - in - t, flags.exact - in - - let read_dict r {value; dict_polarity; _} = - if Polarity.compat (dict_polarity, Positive) - then value - else - let reason = replace_reason_const (RUnknownProperty None) r in - DefT (reason, MixedT Mixed_everything) - in - - (* Treat dictionaries as optional, own properties. Dictionary reads should - * be exact. TODO: Forbid writes to indexers through the photo chain. - * Property accesses which read from dictionaries normally result in a - * non-optional result, but that leads to confusing spread results. For - * example, `p` in `{...{|p:T|},...{[]:U}` should `T|U`, not `U`. *) - let get_prop r p dict = - match p, dict with - | Some _, _ -> p - | None, Some d -> Some (optional (read_dict r d), true) - | None, None -> None - in - - (* Lift a pairwise function like spread2 to a function over a resolved list *) - let merge (f: slice -> slice -> slice) = - let f' (x0: resolved) (x1: resolved) = - Nel.map_concat (fun slice1 -> - Nel.map (f slice1) x0 - ) x1 - in - let rec loop x0 = function - | [] -> x0 - | x1::xs -> loop (f' x0 x1) xs - in - fun x0 (x1,xs) -> loop (f' x0 x1) xs - in - - (*****************) - (* Object Spread *) - (*****************) - - let object_spread = - let open Object.Spread in - - (* Compute spread result: slice * slice -> slice *) - let spread2 reason (r1, props1, dict1, flags1) (r2, props2, dict2, flags2) = - let union t1 t2 = DefT (reason, UnionT (UnionRep.make t1 t2 [])) in - let merge_props (t1, own1) (t2, own2) = - let t1, opt1 = match t1 with DefT (_, OptionalT t) -> t, true | _ -> t1, false in - let t2, opt2 = match t2 with DefT (_, OptionalT t) -> t, true | _ -> t2, false in - (* An own, non-optional property definitely overwrites earlier properties. - Otherwise, the type might come from either side. *) - let t, own = - if own2 && not opt2 then t2, own2 - else union t1 t2, own1 || own2 - in - (* If either property is own, the result is non-optional unless the own - property is itself optional. Non-own implies optional (see mk_object), - so we don't need to handle those cases here. *) - let opt = - if own1 && own2 then opt1 && opt2 - else own1 && opt1 || own2 && opt2 - in - let t = if opt then optional t else t in - t, own - in - let props = SMap.merge (fun x p1 p2 -> - (* Due to width subtyping, failing to read from an inexact object does not - imply non-existence, but rather an unknown result. *) - let unknown r = - let r = replace_reason_const (RUnknownProperty (Some x)) r in - DefT (r, MixedT Mixed_everything), false - in - match get_prop r1 p1 dict1, get_prop r2 p2 dict2 with - | None, None -> None - | Some p1, Some p2 -> Some (merge_props p1 p2) - | Some p1, None -> - if flags2.exact - then Some p1 - else Some (merge_props p1 (unknown r2)) - | None, Some p2 -> - if flags1.exact - then Some p2 - else Some (merge_props (unknown r1) p2) - ) props1 props2 in - let dict = Option.merge dict1 dict2 (fun d1 d2 -> { - dict_name = None; - key = union d1.key d2.key; - value = union (read_dict r1 d1) (read_dict r2 d2); - dict_polarity = Neutral - }) in - let flags = { - frozen = flags1.frozen && flags2.frozen; - sealed = Sealed; - exact = - flags1.exact && flags2.exact && - Obj_type.sealed_in_op reason flags1.sealed && - Obj_type.sealed_in_op reason flags2.sealed; - } in - reason, props, dict, flags - in - - let spread reason = function - | x, [] -> x - | x0, x1::xs -> merge (spread2 reason) x0 (x1, xs) - in - - let mk_object cx reason target (r, props, dict, flags) = - let props = SMap.map (fun (t, own) -> - (* Spread only copies over own properties. If `not own`, then the property - might be on a proto object instead, so make the result optional. *) - let t = match t with - | DefT (_, OptionalT _) -> t - | _ -> if own then t else optional t - in - Field (None, t, Neutral) - ) props in - let id = Context.make_property_map cx props in - let proto = ObjProtoT reason in - let flags = - let exact = match target with - (* Type spread result is exact if annotated to be exact *) - | Annot { make_exact } -> make_exact - (* Value spread result is exact if all inputs are exact *) - | Value -> flags.exact - in - { sealed = Sealed; frozen = false; exact } - in - let call = None in - let t = mk_object_def_type ~reason:r ~flags ~dict ~call id proto in - (* Wrap the final type in an `ExactT` if we have an exact flag *) - if flags.exact then ExactT (reason, t) else t - in - - fun options state cx trace use_op reason tout x -> - let reason = replace_reason invalidate_rtype_alias reason in - let {todo_rev; acc} = state in - Nel.iter (fun (r, _, _, {exact; _}) -> - match options with - | Annot { make_exact } when make_exact && not exact -> - add_output cx ~trace (FlowError. - EIncompatibleWithExact ((r, reason), unknown_use)) - | _ -> () - ) x; - match todo_rev with - | [] -> - let t = match spread reason (Nel.rev (x, acc)) with - | x, [] -> mk_object cx reason options x - | x0, x1::xs -> - DefT (reason, UnionT (UnionRep.make - (mk_object cx reason options x0) - (mk_object cx reason options x1) - (List.map (mk_object cx reason options) xs))) - in - (* Intentional UnknownUse here. *) - rec_flow_t cx trace (t, tout) - | t::todo_rev -> - let tool = Resolve Next in - let state = {todo_rev; acc = x::acc} in - rec_flow cx trace (t, ObjKitT (use_op, reason, tool, Spread (options, state), tout)) - in - - (***************) - (* Object Rest *) - (***************) - - let object_rest = - let open Object.Rest in - - let optional = function - | (DefT (_, OptionalT _)) as t -> t - | t -> Type.optional t + Trace.rec_trace ~max t1 t2 trace in - - (* Subtract the second slice from the first slice and return the difference - * slice. The runtime implementation of this type operation is: - * - * const result = {}; - * - * for (const p in props1) { - * if (hasOwnProperty(props1, p)) { - * if (!hasOwnProperty(props2, p)) { - * result[p] = props1[p]; - * } - * } - * } - * - * The resulting object only has a property if the property is own in props1 and - * it is not an own property of props2. - *) - let rest cx trace ~use_op reason merge_mode - (r1, props1, dict1, flags1) - (r2, props2, dict2, flags2) = - let props = SMap.merge (fun k p1 p2 -> - match merge_mode, get_prop r1 p1 dict1, get_prop r2 p2 dict2, flags2.exact with - (* If the object we are using to subtract has an optional property, non-own - * property, or is inexact then we should add this prop to our result, but - * make it optional as we cannot know for certain whether or not at runtime - * the property would be subtracted. - * - * Sound subtraction also considers exactness and owness to determine - * optionality. If p2 is maybe-own then sometimes it may not be - * subtracted and so is optional. If props2 is not exact then we may - * optionally have some undocumented prop. *) - | (Sound | IgnoreExactAndOwn), - Some (t1, _), Some ((DefT (_, OptionalT _) as t2), _), _ - | Sound, - Some (t1, _), Some (t2, false), _ - | Sound, - Some (t1, _), Some (t2, _), false -> - rec_flow cx trace (t1, UseT (use_op, optional t2)); - Some (Field (None, optional t1, Neutral)) - - (* Otherwise if the object we are using to subtract has a non-optional own - * property and the object is exact then we never add that property to our - * source object. *) - | (Sound | IgnoreExactAndOwn), - None, Some (t2, _), _ -> - let reason = replace_reason_const (RUndefinedProperty k) r1 in - rec_flow cx trace (VoidT.make reason, UseT (use_op, t2)); - None - | (Sound | IgnoreExactAndOwn), - Some (t1, _), Some (t2, _), _ -> - rec_flow cx trace (t1, UseT (use_op, t2)); - None - - (* If we have some property in our first object and none in our second - * object, but our second object is inexact then we want to make our - * property optional and flow that type to mixed. *) - | Sound, - Some (t1, _), None, false -> - rec_flow cx trace (t1, UseT (use_op, MixedT.make r2)); - Some (Field (None, optional t1, Neutral)) - - (* If neither object has the prop then we don't add a prop to our - * result here. *) - | (Sound | IgnoreExactAndOwn | ReactConfigMerge), - None, None, _ - -> None - - (* If our first object has a prop and our second object does not have that - * prop then we will copy over that prop. If the first object's prop is - * non-own then sometimes we may not copy it over so we mark it - * as optional. *) - | IgnoreExactAndOwn, Some (t, _), None, _ -> Some (Field (None, t, Neutral)) - | ReactConfigMerge, Some (t, _), None, _ -> Some (Field (None, t, Positive)) - | Sound, Some (t, true), None, _ -> Some (Field (None, t, Neutral)) - | Sound, Some (t, false), None, _ -> Some (Field (None, optional t, Neutral)) - - (* React config merging is special. We are trying to solve for C - * in the equation (where ... represents spread instead of rest): - * - * {...DP, ...C} = P - * - * Where DP and P are known. Consider this case: - * - * {...{p?}, ...C} = {p} - * - * The solution for C here is {p} instead of {p?} since - * {...{p?}, ...{p?}} is {p?} instead of {p}. This is inconsistent with - * the behavior of other object rest merge modes implemented in this - * pattern match. *) - | ReactConfigMerge, - Some (t1, _), Some (DefT (_, OptionalT t2), _), _ -> - (* We only test the subtyping relation of t1 and t2 if both t1 and t2 - * are optional types. If t1 is required then t2 will always - * be overwritten. *) - (match t1 with - | DefT (_, OptionalT t1) -> rec_flow_t cx trace (t2, t1) - | _ -> ()); - Some (Field (None, t1, Positive)) - (* Using our same equation. Consider this case: - * - * {...{p}, ...C} = {p} - * - * The solution for C here is {p?}. An empty object, {}, is not a valid - * solution unless that empty object is exact. Even for exact objects, - * {|p?|} is the best solution since it accepts more valid - * programs then {||}. *) - | ReactConfigMerge, - Some (t1, _), Some (t2, _), _ -> - (* The DP type for p must be a subtype of the P type for p. *) - rec_flow_t cx trace (t2, t1); - Some (Field (None, optional t1, Positive)) - (* Consider this case: - * - * {...{p}, ...C} = {} - * - * For C there will be no prop. However, if the props object is exact - * then we need to throw an error. *) - | ReactConfigMerge, - None, Some (_, _), _ -> - if flags1.exact then ( - let use_op = Frame (PropertyCompatibility { - prop = Some k; - lower = r2; - upper = r1; - is_sentinel = false; - }, unknown_use) in - let r2 = replace_reason_const (RProperty (Some k)) r2 in - let err = FlowError.EPropNotFound (Some k, (r2, r1), use_op) in - add_output cx ~trace err - ); - None - - ) props1 props2 in - let dict = match dict1, dict2 with - | None, None -> None - | Some dict, None -> Some dict - | None, Some _ -> None - (* If our first and second objects have a dictionary then we use our first - * dictionary, but we make the value optional since any set of keys may have - * been removed. *) - | Some dict1, Some dict2 -> - rec_flow cx trace (dict1.value, UseT (use_op, dict2.value)); - Some ({ - dict_name = None; - key = dict1.key; - value = optional dict1.value; - dict_polarity = Neutral; - }) - in - let flags = { - frozen = false; - sealed = Sealed; - exact = flags1.exact && Obj_type.sealed_in_op reason flags1.sealed; - } in - let id = Context.make_property_map cx props in - let proto = ObjProtoT r1 in - let call = None in - let t = mk_object_def_type ~reason:r1 ~flags ~dict ~call id proto in - (* Wrap the final type in an `ExactT` if we have an exact flag *) - if flags.exact then ExactT (r1, t) else t - in - - fun options state cx trace use_op reason tout x -> - match state with - | One t -> - let tool = Resolve Next in - let state = Done x in - rec_flow cx trace (t, ObjKitT (use_op, reason, tool, Rest (options, state), tout)) - | Done base -> - let xs = Nel.map_concat (fun slice -> - Nel.map (rest cx trace ~use_op reason options slice) x - ) base in - let t = match xs with - | (x, []) -> x - | (x0, x1::xs) -> DefT (reason, UnionT (UnionRep.make x0 x1 xs)) - in - (* Intentional UnknownUse here. *) - rec_flow_t cx trace (t, tout) - in - - (********************) - (* Object Read Only *) - (********************) - - let object_read_only = - let polarity = Positive in - - let mk_read_only_object cx reason slice = - let (r, props, dict, flags) = slice in - - let props = SMap.map (fun (t, _) -> Field (None, t, polarity)) props in - let dict = Option.map dict (fun dict -> { dict with dict_polarity = polarity }) in - let call = None in - let id = Context.make_property_map cx props in - let proto = ObjProtoT reason in - let t = mk_object_def_type ~reason:r ~flags ~dict ~call id proto in - if flags.exact then ExactT (reason, t) else t - in - - fun cx trace _ reason tout x -> - let t = match Nel.map (mk_read_only_object cx reason) x with - | (t, []) -> t - | (t0, t1::ts) -> DefT (reason, UnionT (UnionRep.make t0 t1 ts)) - in - (* Intentional UnknownUse here. *) - rec_flow_t cx trace (t, tout) - in - - (****************) - (* React Config *) - (****************) - - let react_config = - let open Object.ReactConfig in - - (* All props currently have a neutral polarity. However, they should have a - * positive polarity (or even better, constant) since React.createElement() - * freezes the type of props. We use a neutral polarity today because the - * props type we flow the config into is written by users who very rarely - * add a positive variance annotation. We may consider marking that type as - * constant in the future as well. *) - let prop_polarity = Neutral in - - let finish cx trace reason config defaults children = - let (config_reason, config_props, config_dict, config_flags) = config in - (* If we have some type for children then we want to add a children prop - * to our config props. *) - let config_props = - Option.value_map children ~default:config_props ~f:(fun children -> - SMap.add "children" (children, true) config_props - ) - in - (* Remove the key and ref props from our config. We check key and ref - * independently of our config. So we must remove them so the user can't - * see them. *) - let config_props = SMap.remove "key" config_props in - let config_props = SMap.remove "ref" config_props in - (* Create the final props map and dict. - * - * NOTE: React will copy any enumerable prop whether or not it - * is own to the config. *) - let props, dict, flags = match defaults with - (* If we have some default props then we want to add the types for those - * default props to our final props object. *) - | Some (defaults_reason, defaults_props, defaults_dict, defaults_flags) -> - (* Merge our props and default props. *) - let props = SMap.merge (fun _ p1 p2 -> - let p1 = get_prop config_reason p1 config_dict in - let p2 = get_prop defaults_reason p2 defaults_dict in - match p1, p2 with - | None, None -> None - | Some (t, _), None -> Some (Field (None, t, prop_polarity)) - | None, Some (t, _) -> Some (Field (None, t, prop_polarity)) - (* If a property is defined in both objects, and the first property's - * type includes void then we want to replace every occurrence of void - * with the second property's type. This is consistent with the behavior - * of function default arguments. If you call a function, `f`, like: - * `f(undefined)` and there is a default value for the first argument, - * then we will ignore the void type and use the type for the default - * parameter instead. *) - | Some (t1, _), Some (t2, _) -> - (* Use CondT to replace void with t1. *) - let t = Tvar.mk_where cx reason (fun tvar -> - rec_flow cx trace (filter_optional cx ~trace reason t1, - CondT (reason, None, t2, tvar)) - ) in - Some (Field (None, t, prop_polarity)) - ) config_props defaults_props in - (* Merge the dictionary from our config with the defaults dictionary. *) - let dict = Option.merge config_dict defaults_dict (fun d1 d2 -> { - dict_name = None; - key = DefT (reason, UnionT (UnionRep.make d1.key d2.key [])); - value = DefT (reason, UnionT (UnionRep.make - (read_dict config_reason d1) - (read_dict defaults_reason d2) [])); - dict_polarity = prop_polarity; - }) in - (* React freezes the config so we set the frozen flag to true. The - * final object is only exact if both the config and defaults objects - * are exact. *) - let flags = { - frozen = true; - sealed = Sealed; - exact = - config_flags.exact && defaults_flags.exact && - Obj_type.sealed_in_op reason config_flags.sealed && - Obj_type.sealed_in_op reason defaults_flags.sealed; - } in - props, dict, flags - (* Otherwise turn our slice props map into an object props. *) - | None -> - (* All of the fields are read-only so we create positive fields. *) - let props = SMap.map (fun (t, _) -> Field (None, t, prop_polarity)) config_props in - (* Create a new dictionary from our config's dictionary with a - * positive polarity. *) - let dict = Option.map config_dict (fun d -> { - dict_name = None; - key = d.key; - value = d.value; - dict_polarity = prop_polarity; - }) in - (* React freezes the config so we set the frozen flag to true. The - * final object is only exact if the config object is exact. *) - let flags = { - frozen = true; - sealed = Sealed; - exact = config_flags.exact && Obj_type.sealed_in_op reason config_flags.sealed; - } in - props, dict, flags + __flow cx (t1, t2) trace + + and flow_opt_t cx ?(use_op = unknown_use) ?trace (t1, t2) = + flow_opt cx ?trace (t1, UseT (use_op, t2)) + + (* Externally visible function for subtyping. *) + (* Calls internal entry point and traps runaway recursion. *) + and flow cx (lower, upper) = + try flow_opt cx (lower, upper) with + | RecursionCheck.LimitExceeded trace -> + (* log and continue *) + let rl = reason_of_t lower in + let ru = reason_of_use_t upper in + let reasons = + if is_use upper then + (ru, rl) + else + FlowError.ordered_reasons (rl, ru) in - let call = None in - (* Finish creating our props object. *) - let id = Context.make_property_map cx props in - let proto = ObjProtoT reason in - let t = DefT (reason, ObjT (mk_objecttype ~flags ~dict ~call id proto)) in - if flags.exact then ExactT (reason, t) else t - in + add_output cx ~trace (Error_message.ERecursionLimit reasons) + | ex -> + (* rethrow *) + raise ex - fun state cx trace use_op reason tout x -> - match state with - (* If we have some type for default props then we need to wait for that - * type to resolve before finishing our props type. *) - | Config { defaults = Some t; children } -> - let tool = Resolve Next in - let state = Defaults { config = x; children } in - rec_flow cx trace (t, ObjKitT (use_op, reason, tool, ReactConfig state, tout)) - (* If we have no default props then finish our object and flow it to our - * tout type. *) - | Config { defaults = None; children } -> - let ts = Nel.map (fun x -> finish cx trace reason x None children) x in - let t = match ts with - | t, [] -> t - | t0, t1::ts -> DefT (reason, UnionT (UnionRep.make t0 t1 ts)) - in - rec_flow cx trace (t, UseT (use_op, tout)) - (* If we had default props and those defaults resolved then finish our - * props object with those default props. *) - | Defaults { config; children } -> - let ts = Nel.map_concat (fun c -> - Nel.map (fun d -> finish cx trace reason c (Some d) children) x - ) config in - let t = match ts with - | t, [] -> t - | t0, t1::ts -> DefT (reason, UnionT (UnionRep.make t0 t1 ts)) - in - rec_flow cx trace (t, UseT (use_op, tout)) - in + and flow_t cx (t1, t2) = flow cx (t1, UseT (unknown_use, t2)) - (*********************) - (* Object Resolution *) - (*********************) - - let next = function - | Spread (options, state) -> object_spread options state - | Rest (options, state) -> object_rest options state - | ReactConfig state -> react_config state - | ReadOnly -> object_read_only - in + and flow_p cx ?(use_op = unknown_use) lreason ureason propref props = + flow_opt_p cx ~use_op ~report_polarity:true lreason ureason propref props - (* Intersect two object slices: slice * slice -> slice - * - * In general it is unsound to combine intersection types, but since object - * kit utilities never write to their arguments, it is safe in this specific - * case. - * - * {...{p:T}&{q:U}} = {...{p:T,q:U}} - * {...{p:T}&{p:U}} = {...{p:T&U}} - * {...A&(B|C)} = {...(A&B)|(A&C)} - * {...(A|B)&C} = {...(A&C)|(B&C)} - *) - let intersect2 reason (r1, props1, dict1, flags1) (r2, props2, dict2, flags2) = - let intersection t1 t2 = DefT (reason, IntersectionT (InterRep.make t1 t2 [])) in - let merge_props (t1, own1) (t2, own2) = - let t1, t2, opt = match t1, t2 with - | DefT (_, OptionalT t1), DefT (_, OptionalT t2) -> t1, t2, true - | DefT (_, OptionalT t1), t2 | t1, DefT (_, OptionalT t2) | t1, t2 -> t1, t2, false - in - let t = intersection t1 t2 in - let t = if opt then optional t else t in - t, own1 || own2 - in - let r = - let loc = Loc.btwn (aloc_of_reason r1 |> ALoc.to_loc) (aloc_of_reason r2 |> ALoc.to_loc) in - mk_reason RObjectType loc + and tvar_with_constraint cx ?trace ?(derivable = false) u = + let reason = reason_of_use_t u in + let mk_tvar_where = + if derivable then + Tvar.mk_derivable_where + else + Tvar.mk_where in - let props = SMap.merge (fun _ p1 p2 -> - let read_dict r d = optional (read_dict r d), true in - match p1, p2 with - | None, None -> None - | Some p1, Some p2 -> Some (merge_props p1 p2) - | Some p1, None -> - (match dict2 with - | Some d2 -> Some (merge_props p1 (read_dict r2 d2)) - | None -> Some p1) - | None, Some p2 -> - (match dict1 with - | Some d1 -> Some (merge_props (read_dict r1 d1) p2) - | None -> Some p2) - ) props1 props2 in - let dict = Option.merge dict1 dict2 (fun d1 d2 -> { - dict_name = None; - key = intersection d1.key d2.key; - value = intersection (read_dict r1 d1) (read_dict r2 d2); - dict_polarity = Neutral; - }) in - let flags = { - frozen = flags1.frozen || flags2.frozen; - sealed = Sealed; - exact = flags1.exact || flags2.exact; - } in - r, props, dict, flags - in + mk_tvar_where cx reason (fun tvar -> flow_opt cx ?trace (tvar, u)) - let resolved cx trace use_op reason resolve_tool tool tout x = - match resolve_tool with - | Next -> next tool cx trace use_op reason tout x - | List0 ((t, todo), join) -> - let resolve_tool = Resolve (List (todo, Nel.one x, join)) in - rec_flow cx trace (t, ObjKitT (use_op, reason, resolve_tool, tool, tout)) - | List (todo, done_rev, join) -> - match todo with - | [] -> - let x = match join with - | Or -> Nel.cons x done_rev |> Nel.concat - | And -> merge (intersect2 reason) x done_rev - in - next tool cx trace use_op reason tout x - | t::todo -> - let done_rev = Nel.cons x done_rev in - let resolve_tool = Resolve (List (todo, done_rev, join)) in - rec_flow cx trace (t, ObjKitT (use_op, reason, resolve_tool, tool, tout)) - in - - let object_slice cx r id dict flags = - let props = Context.find_props cx id in - let props = SMap.mapi (read_prop r flags) props in - let dict = Option.map dict (fun d -> { - dict_name = None; - key = d.key; - value = read_dict r d; - dict_polarity = Neutral; - }) in - (r, props, dict, flags) - in + (* Wrapper functions around __unify that manage traces. Use these functions for + all recursive calls in the implementation of __unify. *) + and rec_unify cx trace ~use_op ?(unify_any = false) t1 t2 = + let max = Context.max_trace_depth cx in + __unify cx ~use_op ~unify_any t1 t2 (Trace.rec_trace ~max t1 (UseT (use_op, t2)) trace) - let interface_slice cx r id = - let flags = {frozen=false; exact=false; sealed=Sealed} in - let id, dict = - let props = Context.find_props cx id in - match SMap.get "$key" props, SMap.get "$value" props with - | Some (Field (_, key, polarity)), Some (Field (_, value, polarity')) - when polarity = polarity' -> - let props = props |> SMap.remove "$key" |> SMap.remove "$value" in - let id = Context.make_property_map cx props in - let dict = {dict_name = None; key; value; dict_polarity = polarity} in - id, Some dict - | _ -> id, None + and unify_opt cx ?trace ?(use_op = unknown_use) ?(unify_any = false) t1 t2 = + let trace = + match trace with + | None -> Trace.unit_trace t1 (UseT (unknown_use, t2)) + | Some trace -> + let max = Context.max_trace_depth cx in + Trace.rec_trace ~max t1 (UseT (unknown_use, t2)) trace in - object_slice cx r id dict flags - in - - let resolve cx trace use_op reason resolve_tool tool tout = function - (* We extract the props from an ObjT. *) - | DefT (r, ObjT {props_tmap; dict_t; flags; _}) -> - let x = Nel.one (object_slice cx r props_tmap dict_t flags) in - resolved cx trace use_op reason resolve_tool tool tout x - (* We take the fields from an InstanceT excluding methods (because methods - * are always on the prototype). We also want to resolve fields from the - * InstanceT's super class so we recurse. *) - | DefT (r, InstanceT (_, super, _, {own_props; _})) -> - let resolve_tool = Super (interface_slice cx r own_props, resolve_tool) in - rec_flow cx trace (super, ObjKitT (use_op, reason, resolve_tool, tool, tout)) - (* Statics of a class. TODO: This logic is unfortunately duplicated from the - * top-level pattern matching against class lower bounds to object-like - * uses. This duplication should be removed. *) - | DefT (r, ClassT i) -> - let t = Tvar.mk cx r in - rec_flow cx trace (i, GetStaticsT (r, t)); - rec_flow cx trace (t, ObjKitT (use_op, reason, Resolve resolve_tool, tool, tout)) - (* Resolve each member of a union. *) - | DefT (_, UnionT rep) -> - let t, todo = UnionRep.members_nel rep in - let resolve_tool = Resolve (List0 (todo, Or)) in - rec_flow cx trace (t, ObjKitT (use_op, reason, resolve_tool, tool, tout)) - (* Resolve each member of an intersection. *) - | DefT (_, IntersectionT rep) -> - let t, todo = InterRep.members_nel rep in - let resolve_tool = Resolve (List0 (todo, And)) in - rec_flow cx trace (t, ObjKitT (use_op, reason, resolve_tool, tool, tout)) - (* Mirroring Object.assign() and {...null} semantics, treat null/void as - * empty objects. *) - | DefT (_, (NullT | VoidT)) -> - let flags = { frozen = true; sealed = Sealed; exact = true } in - let x = Nel.one (reason, SMap.empty, None, flags) in - resolved cx trace use_op reason resolve_tool tool tout x - (* mixed is treated as {[string]: mixed}. Any JavaScript value may be - * treated as an object and so this is safe. *) - | DefT (r, MixedT _) as t -> - let flags = { frozen = true; sealed = Sealed; exact = true } in - let x = Nel.one (reason, SMap.empty, Some ({ - dict_name = None; - key = StrT.make r; - value = t; - dict_polarity = Neutral; - }), flags) in - resolved cx trace use_op reason resolve_tool tool tout x - (* If we see an empty then propagate empty to tout. *) - | DefT (r, EmptyT) -> - rec_flow cx trace (EmptyT.make r, UseT (use_op, tout)) - (* Propagate any. *) - | DefT (_, (AnyT | AnyObjT)) -> - rec_flow cx trace (AnyT.why reason, UseT (use_op, tout)) - (* Other types have reasonable object representations that may be added as - * new uses of the object kit resolution code is found. *) - | t -> - add_output cx ~trace (FlowError.EInvalidObjectKit { - tool; - reason = reason_of_t t; - reason_op = reason; - use_op; - }) - in - - let super cx trace use_op reason resolve_tool tool tout acc = function - | DefT (r, InstanceT (_, super, _, {own_props; _})) -> - let slice = interface_slice cx r own_props in - let acc = intersect2 reason acc slice in - let resolve_tool = Super (acc, resolve_tool) in - rec_flow cx trace (super, ObjKitT (use_op, reason, resolve_tool, tool, tout)) - | DefT (_, (AnyT | AnyObjT)) -> - rec_flow cx trace (AnyT.why reason, UseT (use_op, tout)) - | _ -> - next tool cx trace use_op reason tout (Nel.one acc) - in - - fun cx trace ~use_op reason resolve_tool tool tout l -> - match resolve_tool with - | Resolve resolve_tool -> resolve cx trace use_op reason resolve_tool tool tout l - | Super (acc, resolve_tool) -> super cx trace use_op reason resolve_tool tool tout acc l - -(************* end of slab **************************************************) - -let intersect_members cx members = - match members with - | [] -> SMap.empty - | _ -> - let map = SMap.map (fun x -> [x]) (List.hd members) in - let map = List.fold_left (fun acc x -> - SMap.merge (fun _ tl t -> - match (tl, t) with - | (None, None) -> None - | (None, Some _) -> None - | (Some _, None) -> None - | (Some tl, Some t) -> Some (t :: tl) - ) acc x - ) map (List.tl members) in - SMap.map (List.fold_left (fun (_, acc) (loc, t) -> - (* Arbitrarily use the last location encountered *) - loc, merge_type cx (acc, t) - ) (None, Locationless.EmptyT.t)) map - -(* It's kind of lame that Members is in this module, but it uses a bunch of - internal APIs so for now it's easier to keep it here than to expose those - APIs *) -module Members : sig - type ('success, 'success_module) generic_t = - | Success of 'success - | SuccessModule of 'success_module - | FailureNullishType - | FailureAnyType - | FailureUnhandledType of Type.t - - type t = ( - (* Success *) (Loc.t option * Type.t) SMap.t, - (* SuccessModule *) (Loc.t option * Type.t) SMap.t * (Type.t option) - ) generic_t - - (* For debugging purposes *) - val string_of_extracted_type: (Type.t, Type.t) generic_t -> string - - val to_command_result: t -> ((Loc.t option * Type.t) SMap.t, string) result - - val extract: Context.t -> Type.t -> t - - val extract_type: Context.t -> Type.t -> (Type.t, Type.t) generic_t - val extract_members: Context.t -> (Type.t, Type.t) generic_t -> t - -end = struct - - type ('success, 'success_module) generic_t = - | Success of 'success - | SuccessModule of 'success_module - | FailureNullishType - | FailureAnyType - | FailureUnhandledType of Type.t - - type t = ( - (* Success *) (Loc.t option * Type.t) SMap.t, - (* SuccessModule *) (Loc.t option * Type.t) SMap.t * (Type.t option) - ) generic_t - - let string_of_extracted_type = function - | Success t -> Printf.sprintf "Success (%s)" (Type.string_of_ctor t) - | SuccessModule t -> Printf.sprintf "SuccessModule (%s)" (Type.string_of_ctor t) - | FailureNullishType -> "FailureNullishType" - | FailureAnyType -> "FailureAnyType" - | FailureUnhandledType t -> Printf.sprintf "FailureUnhandledType (%s)" (Type.string_of_ctor t) - - let to_command_result = function - | Success map - | SuccessModule (map, None) -> - Ok map - | SuccessModule (named_exports, Some cjs_export) -> - Ok (SMap.add "default" (None, cjs_export) named_exports) - | FailureNullishType -> - Error "autocomplete on possibly null or undefined value" - | FailureAnyType -> - Error "not enough type information to autocomplete" - | FailureUnhandledType t -> - Error (spf - "autocomplete on unexpected type of value %s (please file a task!)" - (string_of_ctor t)) - - let find_props cx fields = - SMap.filter (fun key _ -> - (* Filter out keys that start with "$" *) - not (String.length key >= 1 && key.[0] = '$') - ) (Context.find_props cx fields) - - let rec extract_type cx this_t = match this_t with - | DefT (_, MaybeT ty) -> - extract_type cx ty - | DefT (_, (NullT | VoidT)) - | InternalT (OptionalChainVoidT _) -> - FailureNullishType - | DefT (_, AnyT) -> - FailureAnyType - | DefT (reason, AnyObjT) -> - extract_type cx (get_builtin_type cx reason "Object") - | DefT (reason, AnyFunT) -> - let rep = InterRep.make - (get_builtin_type cx reason "Function") - (get_builtin_type cx reason "Object") - [] - in - extract_type cx (DefT (reason, IntersectionT rep)) - | AnnotT (_, source_t, _) -> - let source_t = resolve_type cx source_t in - extract_type cx source_t - | DefT (_, InstanceT _ ) as t -> - Success t - | DefT (_, ObjT _) as t -> - Success t - | ExactT (_, t) -> - let t = resolve_type cx t in - extract_type cx t - | ModuleT _ as t -> - SuccessModule t - | ThisTypeAppT (_, c, _, ts_opt) -> - let c = resolve_type cx c in - let inst_t = instantiate_poly_t cx c ts_opt in - let inst_t = instantiate_type inst_t in - extract_type cx inst_t - | DefT (_, TypeAppT (_, c, ts)) -> - let c = resolve_type cx c in - let inst_t = instantiate_poly_t cx c (Some ts) in - let inst_t = instantiate_type inst_t in - extract_type cx inst_t - | DefT (_, PolyT (_, sub_type, _)) -> - (* TODO: replace type parameters with stable/proper names? *) - extract_type cx sub_type - | ThisClassT (_, DefT (_, InstanceT (static, _, _, _))) - | DefT (_, ClassT (DefT (_, InstanceT (static, _, _, _)))) -> - let static_t = resolve_type cx static in - extract_type cx static_t - | DefT (_, FunT _) as t -> - Success t - | DefT (_, IntersectionT _ ) as t -> - Success t - | DefT (_, UnionT _ ) as t -> - Success t - | DefT (reason, SingletonStrT _) - | DefT (reason, StrT _) -> - extract_type cx (get_builtin_type cx reason "String") - | DefT (reason, SingletonNumT _) - | DefT (reason, NumT _) -> - extract_type cx (get_builtin_type cx reason "Number") - | DefT (reason, SingletonBoolT _) - | DefT (reason, BoolT _) -> - extract_type cx (get_builtin_type cx reason "Boolean") - - | DefT (reason, CharSetT _) -> - extract_type cx (get_builtin_type cx reason "String") - - | DefT (_, IdxWrapper t) -> - let t = resolve_type cx t in - extract_type cx t - - | ReposT (_, t) - | InternalT (ReposUpperT (_, t)) -> - extract_type cx t - - | OpaqueT (_, {underlying_t = Some t; _}) - | OpaqueT (_, {super_t = Some t; _}) - -> extract_type cx t - - | AnyWithLowerBoundT _ - | AnyWithUpperBoundT _ - | MergedT _ - | DefT (_, ArrT _) - | BoundT _ - | InternalT (ChoiceKitT (_, _)) - | TypeDestructorTriggerT _ - | DefT (_, ClassT _) - | CustomFunT (_, _) - | MatchingPropT (_, _, _) - | DefT (_, EmptyT) - | EvalT (_, _, _) - | ExistsT _ - | InternalT (ExtendsT _) - | FunProtoApplyT _ - | FunProtoBindT _ - | FunProtoCallT _ - | FunProtoT _ - | KeysT (_, _) - | DefT (_, MixedT _) - | NullProtoT _ - | ObjProtoT _ - | OpaqueT _ - | OpenPredT (_, _, _, _) - | OpenT _ - | DefT (_, OptionalT _) - | ShapeT _ - | ThisClassT _ - | DefT (_, TypeT _) - -> - FailureUnhandledType this_t - - let rec extract_members cx = function - | FailureNullishType -> FailureNullishType - | FailureAnyType -> FailureAnyType - | FailureUnhandledType t -> FailureUnhandledType t - | Success (DefT (_, InstanceT (_, super, _, {own_props; proto_props; _}))) -> - let members = SMap.fold (fun x p acc -> - (* TODO: It isn't currently possible to return two types for a given - * property in autocomplete, so for now we just return the getter - * type. *) - let loc, t = match p with - | Field (loc, t, _) - | Get (loc, t) - | Set (loc, t) - (* arbitrarily use the location for the getter. maybe we can send both in the future *) - | GetSet (loc, t, _, _) - | Method (loc, t) -> - (loc, t) - in - SMap.add x (loc, t) acc - ) (find_props cx own_props) SMap.empty in - (* TODO: own props should take precedence *) - let members = SMap.fold (fun x p acc -> - match Property.read_t p with - | Some t -> - let loc = Property.read_loc p in - SMap.add x (loc, t) acc - | None -> acc - ) (find_props cx proto_props) members in - let super_t = resolve_type cx super in - let super_flds = extract_members_as_map cx super_t in - Success (AugmentableSMap.augment super_flds ~with_bindings:members) - | Success (DefT (_, ObjT {props_tmap = flds; proto_t = proto; _})) -> - let proto_reason = reason_of_t proto in - let rep = InterRep.make - proto - (get_builtin_type cx proto_reason "Object") - [] - in - let proto_t = resolve_type cx (DefT (proto_reason, IntersectionT rep)) in - let prot_members = extract_members_as_map cx proto_t in - let members = SMap.fold (fun x p acc -> - match Property.read_t p with - | Some t -> - let loc = Property.read_loc p in - SMap.add x (loc, t) acc - | None -> acc - ) (find_props cx flds) SMap.empty in - Success (AugmentableSMap.augment prot_members ~with_bindings:members) - | SuccessModule (ModuleT (_, {exports_tmap; cjs_export; has_every_named_export = _;}, _)) -> - let named_exports = Context.find_exports cx exports_tmap in - let cjs_export = - match cjs_export with - | Some t -> Some (resolve_type cx t) - | None -> None - in - SuccessModule (named_exports, cjs_export) - | Success (DefT (_, FunT (static, proto, _))) -> - let static_t = resolve_type cx static in - let proto_t = resolve_type cx proto in - let members = extract_members_as_map cx static_t in - let prot_members = extract_members_as_map cx proto_t in - Success (AugmentableSMap.augment prot_members ~with_bindings:members) - | Success (DefT (_, IntersectionT rep)) -> - (* Intersection type should autocomplete for every property of - every type in the intersection *) - let ts = InterRep.members rep in - let ts = List.map (resolve_type cx) ts in - let members = List.map (extract_members_as_map cx) ts in - Success (List.fold_left (fun acc members -> - AugmentableSMap.augment acc ~with_bindings:members - ) SMap.empty members) - | Success (DefT (_, UnionT rep)) -> - (* Union type should autocomplete for only the properties that are in - * every type in the intersection *) - let ts = List.map (resolve_type cx) (UnionRep.members rep) in - let members = ts - (* Although we'll ignore the any-ish and nullish members of the union *) - |> List.filter (function - | DefT (_, (AnyT | AnyObjT | AnyFunT | NullT | VoidT)) -> false - | _ -> true - ) - |> List.map (extract_members_as_map cx) - |> intersect_members cx in - Success members - | Success t | SuccessModule t -> - FailureUnhandledType t - - (* TODO: Think of a better place to put this *) - and extract cx this_t = - let t = extract_type cx this_t in - extract_members cx t - - and extract_members_as_map cx this_t = - let members = extract cx this_t in - match to_command_result members with - | Ok map -> map - | Error _ -> SMap.empty - + __unify cx ~use_op ~unify_any t1 t2 trace + + (* Externally visible function for unification. *) + (* Calls internal entry point and traps runaway recursion. *) + and unify cx t1 t2 = + try unify_opt cx ~unify_any:true t1 t2 with + | RecursionCheck.LimitExceeded trace -> + (* log and continue *) + let reasons = FlowError.ordered_reasons (reason_of_t t1, reason_of_t t2) in + add_output cx ~trace (Error_message.ERecursionLimit reasons) + | ex -> + (* rethrow *) + raise ex + + and continue cx trace t = function + | Lower (use_op, l) -> rec_flow cx trace (l, UseT (use_op, t)) + | Upper u -> rec_flow cx trace (t, u) + + and continue_repos cx trace reason ?(use_desc = false) t = function + | Lower (use_op, l) -> rec_flow cx trace (t, ReposUseT (reason, use_desc, use_op, l)) + | Upper u -> rec_flow cx trace (t, ReposLowerT (reason, use_desc, u)) + + include AssertGround + include TrustChecking end -class type_finder t = object (_self) - inherit [bool] Type_visitor.t as super - method! type_ cx pole found = function - | t' -> (t = t') || super#type_ cx pole found t -end +module rec FlowJs : Flow_common.S = struct + module React = React_kit.Kit (FlowJs) + module AssertGround = Assert_ground.Kit (FlowJs) + module TrustKit = Trust_checking.TrustKit (FlowJs) + module CustomFun = Custom_fun_kit.Kit (FlowJs) + module ObjectKit = Object_kit.Kit (FlowJs) + include M__flow (React) (AssertGround) (TrustKit) (CustomFun) (ObjectKit) -module Marked = Marked.IdMarked - -class assert_ground_visitor skip r context = object (self) - inherit [Marked.t] Type_visitor.t as super - - (* Track prop maps which correspond to object literals. We don't ask for - annotations for object literals which reach exports. Instead, we walk the - properties covariantly. *) - val mutable objlits: int Properties.Map.t = Properties.Map.empty - - (* Track prop maps which correspond to instance fields and methods, indicating - any fields which are initialized. We don't ask for annotations for (a) - munged property names, which are private and thus not inputs, and (b) - initialized field names. *) - val mutable insts: (int * SSet.t) Properties.Map.t = Properties.Map.empty - - val depth = ref 0 - val reason_stack = ref [r] - val max_reasons = Context.max_trace_depth context - - method private push_frame r = - incr depth; - if max_reasons > 0 then ( - let head_loc = def_loc_of_reason (List.hd !reason_stack) in - let curr_loc = def_loc_of_reason r in - let should_add = Loc.span_compare head_loc curr_loc = 0 in - reason_stack := if should_add - then r::(List.tl !reason_stack) - else r::!reason_stack; - if max_reasons > 0 && List.length !reason_stack > max_reasons then ( - let top_half = ListUtils.first_n (max_reasons / 2) !reason_stack in - let bottom_half_num = if max_reasons mod 2 = 1 - then max_reasons / 2 + 1 - else max_reasons / 2 in - let bottom_half = ListUtils.last_n bottom_half_num !reason_stack in - reason_stack := top_half @ bottom_half); - should_add - ) else false - - method private pop_frame did_add = - decr depth; - if max_reasons > 0 then ( - if did_add then - reason_stack := List.tl !reason_stack; - ) + let add_output = add_output - method private with_frame r f = - let did_add = self#push_frame r in - let result = f () in - self#pop_frame did_add; - result - - - (* Tvars with reasons that match should not be missing annotation errors. *) - method private skip_reason r = - match desc_of_reason r with - (* No possible annotation for `this` type. *) - | RThis -> true - | _ -> false + let union_of_ts = union_of_ts - method private derivable_reason r = - match desc_of_reason r with - | RExistential -> true - | RShadowProperty _ -> true - | _ -> is_derivable_reason r + let generate_tests = generate_tests - method! tvar cx pole seen r id = - let root_id, constraints = Context.find_constraints cx id in - if id != root_id - then self#tvar cx pole seen r root_id - else - if ISet.mem id skip then seen else - if self#skip_reason r then seen else - let pole = if self#derivable_reason r then Positive else pole in - match Marked.add id pole seen with - | None -> seen - | Some (pole, seen) -> - if Polarity.compat (pole, Negative) - then ( - unify_opt cx ~unify_any:true (OpenT (r, id)) Locationless.AnyT.t; - let trace_reasons = if max_reasons = 0 - then [] - else List.map (fun reason -> - repos_reason (def_loc_of_reason reason) reason) - !reason_stack in - add_output cx (FlowError.EMissingAnnotation (r, trace_reasons)) - ); - if Polarity.compat (pole, Positive) - then List.fold_left (self#type_ cx Positive) seen (types_of constraints) - else seen - - method! type_ cx pole seen t = - Option.iter ~f:(fun { Verbose.depth = verbose_depth; indent; enabled_during_flowlib=_; } -> - let pid = Context.pid_prefix cx in - let indent = String.make (!depth * indent) ' ' in - prerr_endlinef "\n%s%sassert_ground (%s): %s" indent pid - (Polarity.string pole) - (Debug_js.dump_t cx ~depth:verbose_depth t) - ) (Context.verbose cx); - self#with_frame (reason_of_t t) (fun () -> - let seen = - match t with - | BoundT _ -> seen - | AnnotT _ -> seen - | MergedT _ -> - (* The base class implementation will walk uses here, but there's no - reasonable way to complain about missing annotations for MergedT, - which was added to avoid missing annotations. *) - seen - | EvalT (_, TypeDestructorT _, _) -> - (* Type destructors are annotations, so we should never complain about - missing annotations due them. The default visitor _should_ never - visit a tvar in an input position, but do to some wacky stuff in - eval, it's possible today. *) - seen - | KeysT _ -> - (* Same idea as type destructors. *) - seen - | DefT (_, TypeAppT (_, c, ts)) -> - self#typeapp ts cx pole seen c - | DefT (r, ArrT (ArrayAT (t, ts))) when is_literal_array_reason r -> - self#arrlit cx pole seen t ts - | DefT (r, ObjT o) when is_literal_object_reason r -> - let refcnt = - try Properties.Map.find_unsafe o.props_tmap objlits - with Not_found -> 0 - in - objlits <- Properties.Map.add o.props_tmap (refcnt+1) objlits; - let seen = super#type_ cx pole seen t in - objlits <- ( - if refcnt = 0 - then Properties.Map.remove o.props_tmap objlits - else Properties.Map.add o.props_tmap refcnt objlits - ); - seen - | DefT (_, InstanceT (static, _, _, i)) -> - let static_props_id = match static with - | DefT (_, ObjT o) -> Some o.props_tmap - | _ -> None - in - let own_refcnt = - try fst (Properties.Map.find_unsafe i.own_props insts) - with Not_found -> 0 - in - let proto_refcnt = - try fst (Properties.Map.find_unsafe i.proto_props insts) - with Not_found -> 0 - in - let static_refcnt = Option.value_map static_props_id ~default:0 ~f:(fun id -> - try fst (Properties.Map.find_unsafe id insts) - with Not_found -> 0 - ) in - insts <- Properties.Map.add i.own_props (own_refcnt+1, i.initialized_fields) insts; - insts <- Properties.Map.add i.proto_props (proto_refcnt+1, SSet.empty) insts; - Option.iter static_props_id (fun id -> - insts <- Properties.Map.add id (static_refcnt+1, i.initialized_static_fields) insts - ); - let seen = super#type_ cx pole seen t in - insts <- ( - if own_refcnt = 0 - then Properties.Map.remove i.own_props insts - else Properties.Map.add i.own_props (own_refcnt, i.initialized_fields) insts - ); - insts <- ( - if proto_refcnt = 0 - then Properties.Map.remove i.proto_props insts - else Properties.Map.add i.proto_props (own_refcnt, SSet.empty) insts - ); - Option.iter static_props_id (fun id -> - insts <- ( - if static_refcnt = 0 - then Properties.Map.remove id insts - else Properties.Map.add id (static_refcnt, i.initialized_static_fields) insts - ) - ); - seen - | DefT (r, FunT (static, prototype, ft)) -> - let any = Locationless.AnyT.t in - unify_opt cx ~unify_any:true static any; - unify_opt cx ~unify_any:true prototype any; - unify_opt cx ~unify_any:true ft.this_t any; - super#type_ cx pole seen - (DefT (r, FunT (any, any, {ft with this_t = any}))) - | _ -> super#type_ cx pole seen t - in - seen) - - method! props cx pole seen id = - if Properties.Map.mem id objlits - then self#objlit_props cx pole seen id - else match Properties.Map.get id insts with - | Some (_, init) -> self#inst_props cx pole seen id init - | _ -> super#props cx pole seen id - - method private arrlit cx pole seen t ts = - let seen = self#type_ cx pole seen t in - let seen = Option.fold ts ~init:seen ~f:(List.fold_left (self#type_ cx pole)) in - seen - - method private objlit_props cx pole seen id = - let props = Context.find_props cx id in - SMap.fold (fun _ p acc -> - Property.read_t p |> Option.fold ~f:(self#type_ cx pole) ~init:acc - ) props seen - - method private inst_props cx pole seen id init = - let props = Context.find_props cx id in - SMap.fold (fun x p acc -> - if is_munged_prop_name cx x - then acc - else if SSet.mem x init - then Property.read_t p |> Option.fold ~f:(self#type_ cx pole) ~init:acc - else self#prop cx pole acc p - ) props seen - - method private typeapp = - let rec loop ?constant_polarity_param cx pole seen = function - | _, [] -> seen - | [], _ -> seen - | tparam::tparams, targ::targs -> - let param_polarity = match constant_polarity_param with - | Some (s, p) when tparam.name = s -> p - | _ -> Polarity.mult (pole, tparam.polarity) in - let seen = self#type_ cx param_polarity seen targ in - loop cx pole seen (tparams, targs) - in - fun targs cx pole seen -> function - | OpenT (r, id) -> - let seen = self#tvar cx Positive seen r id in - (match Context.find_graph cx id with - | Resolved t -> self#typeapp targs cx pole seen t - | Unresolved { lower; _ } -> - TypeMap.fold (fun t _ acc -> - self#typeapp targs cx pole acc t - ) lower seen) - | AnnotT (_, t, _) -> self#typeapp targs cx pole seen t - (* Shallowly check to see if it is an EvalT. If the EvalT's first - * value is a BoundT, we can visit that parameter with a constant - * positive polarity if it does not appear in the defer_use_t. - *) - | DefT (_, PolyT (tparams, DefT (_, TypeT (_, - EvalT (BoundT (_, s, _) as t, (TypeDestructorT (_, _, destructor)), _))), _)) -> - if (new type_finder t)#destructor cx false destructor - then loop cx pole seen (tparams, targs) - else loop ~constant_polarity_param:(s, Positive) cx pole seen (tparams, targs) - | DefT (_, PolyT (tparams, _, _)) -> loop cx pole seen (tparams, targs) - | DefT (_, EmptyT) -> seen - | DefT (_, AnyT) -> seen - | _ -> - (* We don't error here on an unexpected typeapp because we would have already - * caught that this type is not polymorphic earlier *) - seen + let match_this_binding = match_this_binding end -let enforce_strict cx t = - (* First, compute a set of ids to be skipped by calling `assume_ground`. After - the call, skip_ids contains precisely those ids that correspond to - requires/imports. *) - let skip_ids = ref ISet.empty in - LocMap.iter (fun _ tvar -> - assume_ground cx skip_ids (UseT (unknown_use, tvar)) - ) (Context.require_map cx); - - (* With the computed skip_ids, call `assert_ground` to force annotations while - walking the graph starting from id. Typically, id corresponds to - exports. *) - let seen = - let visitor = new assert_ground_visitor !skip_ids (reason_of_t t) cx in - visitor#type_ cx Positive Marked.empty t - in - ignore (seen: Marked.t) +include FlowJs + +(************* end of slab **************************************************) (* Would rather this live elsewhere, but here because module DAG. *) -let mk_default cx reason ~expr = Default.fold - ~expr:(expr cx) - ~cons:(fun t1 t2 -> - Tvar.mk_where cx reason (fun tvar -> - flow_t cx (t1, tvar); - flow_t cx (t2, tvar))) - ~selector:(fun r t sel -> - let id = mk_id () in - eval_selector cx r t sel id) +let mk_default cx reason = + Default.fold + ~expr:(fun t -> t) + ~cons:(fun t1 t2 -> + Tvar.mk_where cx reason (fun tvar -> + flow_t cx (t1, tvar); + flow_t cx (t2, tvar))) + ~selector:(fun r t sel -> Tvar.mk_where cx r (fun tvar -> eval_selector cx r t sel tvar)) diff --git a/src/typing/flow_js.mli b/src/typing/flow_js.mli index 1e0ad717929..16073ca7dd9 100644 --- a/src/typing/flow_js.mli +++ b/src/typing/flow_js.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,36 +8,49 @@ open Reason (* propagates sources to sinks following a subtype relation *) -val flow: Context.t -> (Type.t * Type.use_t) -> unit -val flow_t: Context.t -> (Type.t * Type.t) -> unit +val flow : Context.t -> Type.t * Type.use_t -> unit + +val flow_t : Context.t -> Type.t * Type.t -> unit (* given a use type, return a tvar constrained by the use type *) -val tvar_with_constraint: Context.t -> ?trace:Trace.t -> ?derivable:bool -> Type.use_t -> Type.t +val tvar_with_constraint : Context.t -> ?trace:Trace.t -> ?derivable:bool -> Type.use_t -> Type.t -val unify: Context.t -> Type.t -> Type.t -> unit +val unify : Context.t -> Type.t -> Type.t -> unit -val flow_p: +val flow_p : Context.t -> ?use_op:Type.use_op -> - reason -> (* lreason *) - reason -> (* ureason *) + reason -> + (* lreason *) + reason -> + (* ureason *) Type.propref -> - (Type.property * Type.property) -> unit + Type.property * Type.property -> + unit -val reposition: Context.t -> ?trace:Trace.t -> Loc.t -> ?desc:reason_desc -> ?annot_loc:Loc.t -> Type.t -> Type.t +val reposition : + Context.t -> + ?trace:Trace.t -> + ALoc.t -> + ?desc:reason_desc -> + ?annot_loc:ALoc.t -> + Type.t -> + Type.t (* constraint utils *) -val filter_optional: Context.t -> ?trace:Trace.t -> reason -> Type.t -> Type.t +val filter_optional : Context.t -> ?trace:Trace.t -> reason -> Type.t -> Type.t -module Cache: sig - val clear: unit -> unit - val stats_poly_instantiation: unit -> Hashtbl.statistics - val summarize_flow_constraint: unit -> (string * int) list +module Cache : sig + val clear : unit -> unit + + val stats_poly_instantiation : unit -> Hashtbl.statistics + + val summarize_flow_constraint : unit -> (string * int) list end -val get_builtin_typeapp: Context.t -> ?trace:Trace.t -> reason -> string -> Type.t list -> Type.t +val get_builtin_typeapp : Context.t -> ?trace:Trace.t -> reason -> string -> Type.t list -> Type.t -val resolve_spread_list: +val resolve_spread_list : Context.t -> use_op:Type.use_op -> reason_op:Reason.t -> @@ -47,77 +60,68 @@ val resolve_spread_list: (* polymorphism *) -val subst: Context.t -> ?use_op:Type.use_op -> ?force:bool -> (Type.t SMap.t) -> Type.t -> Type.t -val generate_tests: Context.t -> Type.typeparam list -> (Type.t SMap.t -> 'a) -> 'a -val match_this_binding: Type.t SMap.t -> (Type.t -> bool) -> bool +val subst : Context.t -> ?use_op:Type.use_op -> ?force:bool -> Type.t SMap.t -> Type.t -> Type.t + +val generate_tests : Context.t -> Type.typeparam list -> (Type.t SMap.t -> 'a) -> 'a -val check_polarity: - Context.t -> ?trace:Trace.t -> Type.polarity -> Type.t -> unit +val match_this_binding : Type.t SMap.t -> (Type.t -> bool) -> bool + +val check_polarity : Context.t -> ?trace:Trace.t -> Polarity.t -> Type.t -> unit (* selectors *) -val eval_selector : Context.t -> ?trace:Trace.t -> reason -> Type.t -> Type.selector -> int -> Type.t +val eval_selector : + Context.t -> ?trace:Trace.t -> reason -> Type.t -> Type.selector -> Type.t -> unit + val visit_eval_id : Context.t -> int -> (Type.t -> unit) -> unit (* destructors *) exception Not_expect_bound of string -val mk_type_destructor : Context.t -> trace:Trace.t -> Type.use_op -> Reason.t -> Type.t -> - Type.destructor -> int -> bool * Type.t +val eval_evalt : Context.t -> ?trace:Trace.t -> Type.t -> Type.defer_use_t -> int -> Type.t (* ... *) -val mk_default: Context.t -> reason -> - expr:(Context.t -> 'a -> Type.t) -> - 'a Default.t -> Type.t +val mk_default : Context.t -> reason -> Type.t Default.t -> Type.t (* val graph: bounds IMap.t ref *) -val lookup_module: Context.t -> string -> Type.t +val lookup_module : Context.t -> string -> Type.t (* contexts *) -val mk_builtins: Context.t -> unit -val add_output: Context.t -> ?trace:Trace.t -> Flow_error.error_message -> unit +val mk_builtins : Context.t -> unit + +val add_output : Context.t -> ?trace:Trace.t -> Error_message.t -> unit (* builtins *) -val builtins: Context.t -> Type.t -val get_builtin: Context.t -> ?trace:Trace.t -> string -> reason -> Type.t -val lookup_builtin: Context.t -> ?trace:Trace.t -> string -> reason -> Type.lookup_kind -> Type.t -> unit -val get_builtin_type: Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> string -> Type.t -val resolve_builtin_class: Context.t -> ?trace:Trace.t -> Type.t -> Type.t -val set_builtin: Context.t -> ?trace:Trace.t -> string -> Type.t -> unit +val builtins : Context.t -> Type.t + +val get_builtin : Context.t -> ?trace:Trace.t -> string -> reason -> Type.t + +val lookup_builtin : + Context.t -> ?trace:Trace.t -> string -> reason -> Type.lookup_kind -> Type.t -> unit -val mk_instance: Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> Type.t -> Type.t -val mk_typeof_annotation: Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> Type.t -> Type.t +val get_builtin_type : Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> string -> Type.t + +val set_builtin : Context.t -> ?trace:Trace.t -> string -> Type.t -> unit + +val mk_instance : Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> Type.t -> Type.t + +val mk_typeof_annotation : + Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> Type.t -> Type.t (* strict *) -val enforce_strict: Context.t -> Type.t -> unit -val merge_type: Context.t -> (Type.t * Type.t) -> Type.t -val resolve_type: Context.t -> Type.t -> Type.t -val resolve_tvar: Context.t -> Type.tvar -> Type.t -val possible_types: Context.t -> Constraint.ident -> Type.t list -val possible_types_of_type: Context.t -> Type.t -> Type.t list -val possible_uses: Context.t -> Constraint.ident -> Type.use_t list - -module Members : sig - type ('success, 'success_module) generic_t = - | Success of 'success - | SuccessModule of 'success_module - | FailureNullishType - | FailureAnyType - | FailureUnhandledType of Type.t - - type t = ( - (* Success *) (Loc.t option * Type.t) SMap.t, - (* SuccessModule *) (Loc.t option * Type.t) SMap.t * (Type.t option) - ) generic_t - - (* For debugging purposes *) - val string_of_extracted_type: (Type.t, Type.t) generic_t -> string - - val to_command_result: t -> ((Loc.t option * Type.t) SMap.t, string) result - - val extract: Context.t -> Type.t -> t - val extract_type: Context.t -> Type.t -> (Type.t, Type.t) generic_t - val extract_members: Context.t -> (Type.t, Type.t) generic_t -> t -end +val types_of : Constraint.constraints -> Type.t list + +val enforce_strict : Context.t -> Type.t -> should_munge_underscores:bool -> unit + +val possible_types : Context.t -> Constraint.ident -> Type.t list + +val possible_types_of_type : Context.t -> Type.t -> Type.t list + +val possible_uses : Context.t -> Constraint.ident -> Type.use_t list + +(* trust *) +val mk_trust_var : Context.t -> ?initial:Trust.trust_qualifier -> unit -> Type.ident + +val strengthen_trust : Context.t -> Type.ident -> Trust.trust_qualifier -> Error_message.t -> unit diff --git a/src/typing/func_params.ml b/src/typing/func_params.ml index 87b917aaa3e..ba22d948dcc 100644 --- a/src/typing/func_params.ml +++ b/src/typing/func_params.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -14,90 +14,56 @@ body of a function. These may not be the same due to default values and destructuring. *) -module Flow = Flow_js - -open Reason -open Type -open Destructuring - -type param = string option * Type.t -type rest = string option * Loc.t * Type.t -type default = (Loc.t, Loc.t) Flow_ast.Expression.t Default.t -type binding = string * Loc.t * Type.t * default option - -type t = { - params_rev: param list; - rest: rest option; - bindings_rev: binding list; -} - -let empty = { - params_rev = []; - rest = None; - bindings_rev = []; -} - -let add_simple cx ~optional ?default loc id t x = - let param_t = if optional || default <> None then Type.optional t else t in - let bound_t = if default <> None then t else param_t in - Type_table.set (Context.type_table cx) loc t; - let name = Option.map id ~f:(fun (id_loc, name) -> - let id_info = name, bound_t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - name - ) in - let params_rev = (name, param_t) :: x.params_rev in - let bindings_rev = match id with - | None -> x.bindings_rev - | Some (_, name) -> - let default = Option.map default Default.expr in - (name, loc, bound_t, default) :: x.bindings_rev - in - { x with params_rev; bindings_rev } - -let add_complex cx ~expr ?default patt t x = - let default = Option.map default Default.expr in - let bindings_rev = ref x.bindings_rev in - let patt = destructuring cx ~expr t None default patt ~f:(fun ~use_op:_ loc name default t -> - let t = match type_of_pattern patt with - | None -> t - | Some _ -> - let reason = mk_reason (RIdentifier name) loc in - EvalT (t, DestructuringT (reason, Become), mk_id()) - in - Type_table.set (Context.type_table cx) loc t; - bindings_rev := (name, loc, t, default) :: !bindings_rev - ) in - let t = if default <> None then Type.optional t else t in - let params_rev = (None, t) :: x.params_rev in - let bindings_rev = !bindings_rev in - { x with params_rev; bindings_rev }, patt - -let add_rest cx loc id t x = - let name = Option.map id ~f:(fun (id_loc, name) -> - let id_info = name, t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - name - ) in - let rest = Some (name, loc, t) in - let bindings_rev = match id with - | None -> x.bindings_rev - | Some (_, name) -> (name, loc, t, None) :: x.bindings_rev - in - { x with rest; bindings_rev } - -let value {params_rev; _} = List.rev params_rev - -let rest {rest; _} = rest - -let iter f {bindings_rev; _} = List.iter f (List.rev bindings_rev) - -let subst_param cx map (name, t) = (name, Flow.subst cx map t) -let subst_rest cx map (name, loc, t) = (name, loc, Flow.subst cx map t) -let subst_binding cx map (name, loc, t, default) = (name, loc, Flow.subst cx map t, default) - -let subst cx map { params_rev; rest; bindings_rev } = { - params_rev = List.map (subst_param cx map) params_rev; - rest = Option.map ~f:(subst_rest cx map) rest; - bindings_rev = List.map (subst_binding cx map) bindings_rev; -} +include Func_params_intf + +module Make (C : Config) = struct + type 'T ast = 'T C.ast + + type 'T param_ast = 'T C.param_ast + + type 'T rest_ast = 'T C.rest_ast + + type param = C.param + + type rest = C.rest + + type reconstruct = + (ALoc.t * Type.t) param_ast list -> + (ALoc.t * Type.t) rest_ast option -> + (ALoc.t * Type.t) ast option + + type t = { + params_rev: param list; + rest: rest option; + reconstruct: reconstruct; + } + + let empty reconstruct = { params_rev = []; rest = None; reconstruct } + + let add_param p x = { x with params_rev = p :: x.params_rev } + + let add_rest r x = { x with rest = Some r } + + let value { params_rev; _ } = + List.fold_left + (fun acc p -> + let t = C.param_type p in + t :: acc) + [] + params_rev + + let rest { rest; _ } = Option.map ~f:C.rest_type rest + + let subst cx map { params_rev; rest; reconstruct } = + { + params_rev = Core_list.map ~f:(C.subst_param cx map) params_rev; + rest = Option.map ~f:(C.subst_rest cx map) rest; + reconstruct; + } + + let eval cx { params_rev; rest; reconstruct } = + let params = List.rev params_rev in + let param_tasts_rev = List.rev_map (C.eval_param cx) params in + let rest_tast = Option.map ~f:(C.eval_rest cx) rest in + reconstruct (List.rev param_tasts_rev) rest_tast +end diff --git a/src/typing/func_params.mli b/src/typing/func_params.mli index 3ca9b7c8988..3fddf40a957 100644 --- a/src/typing/func_params.mli +++ b/src/typing/func_params.mli @@ -1,49 +1,16 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -type t +include module type of Func_params_intf -type default = (Loc.t, Loc.t) Flow_ast.Expression.t Default.t -type binding = string * Loc.t * Type.t * default option - -(* build up a params value *) -val empty: t - -val add_simple: Context.t -> - optional: bool -> - ?default: (Loc.t, Loc.t) Flow_ast.Expression.t -> - Loc.t -> (Loc.t * string) option -> Type.t -> - t -> t - -val add_complex: Context.t -> - expr:( - Context.t -> (Loc.t, Loc.t) Flow_ast.Expression.t -> - (Loc.t, Loc.t * Type.t) Flow_ast.Expression.t - ) -> - ?default: (Loc.t, Loc.t) Flow_ast.Expression.t -> - (Loc.t, Loc.t) Flow_ast.Pattern.t -> Type.t -> - t -> - t * (Loc.t, Loc.t * Type.t) Flow_ast.Pattern.t - -val add_rest: Context.t -> - Loc.t -> (Loc.t * string) option -> Type.t -> - t -> t - -(* (name, type) of each param, in order *) -(* destructured params will be unnamed *) -val value: t -> (string option * Type.t) list - -(* The rest param *) -val rest: t -> (string option * Loc.t * Type.t) option - -(* iterates over all bindings, traversing through any destructued - bindings as well, in source order of declaration *) -val iter: (binding -> unit) -> t -> unit - -val subst: Context.t -> - (Type.t SMap.t) -> (* type params map *) - t -> t +module Make (C : Config) : + S + with type 'T ast = 'T C.ast + and type 'T param_ast = 'T C.param_ast + and type 'T rest_ast = 'T C.rest_ast + and type param = C.param + and type rest = C.rest diff --git a/src/typing/func_params_intf.ml b/src/typing/func_params_intf.ml new file mode 100644 index 00000000000..454a6a984bb --- /dev/null +++ b/src/typing/func_params_intf.ml @@ -0,0 +1,63 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module type S = sig + type 'T ast + + type 'T param_ast + + type 'T rest_ast + + type t + + type param + + type rest + + type reconstruct = + (ALoc.t * Type.t) param_ast list -> + (ALoc.t * Type.t) rest_ast option -> + (ALoc.t * Type.t) ast option + + val empty : reconstruct -> t + + val add_param : param -> t -> t + + val add_rest : rest -> t -> t + + val value : t -> Type.fun_param list + + val rest : t -> Type.fun_rest_param option + + val subst : Context.t -> Type.t SMap.t -> t -> t + + val eval : Context.t -> t -> (ALoc.t * Type.t) ast option +end + +module type Config = sig + type 'T ast + + type 'T param_ast + + type 'T rest_ast + + type param + + type rest + + val param_type : param -> Type.fun_param + + val rest_type : rest -> Type.fun_rest_param + + val subst_param : Context.t -> Type.t SMap.t -> param -> param + + val subst_rest : Context.t -> Type.t SMap.t -> rest -> rest + + val eval_param : Context.t -> param -> (ALoc.t * Type.t) param_ast + + val eval_rest : Context.t -> rest -> (ALoc.t * Type.t) rest_ast +end diff --git a/src/typing/func_sig.ml b/src/typing/func_sig.ml index c223624e210..1a3967d3e01 100644 --- a/src/typing/func_sig.ml +++ b/src/typing/func_sig.ml @@ -1,348 +1,358 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - module Flow = Flow_js - open Reason open Type - -type kind = - | Ordinary - | Async - | Generator - | AsyncGenerator - | FieldInit of (Loc.t, Loc.t) Ast.Expression.t - | Predicate - | Ctor - -type t = { - reason: reason; - kind: kind; - tparams: Type.typeparam list; - tparams_map: Type.t SMap.t; - fparams: Func_params.t; - body: (Loc.t, Loc.t) Ast.Function.body option; - return_t: Type.t; -} - -let return_loc = - let module F = Ast.Function in - let open F in function - | {return = Available (_, (loc, _)); _} - | {F.body = BodyExpression (loc, _); _} -> loc - | {F.body = BodyBlock (loc, _); _} -> Loc.char_before loc - -let default_constructor reason = { - reason; - kind = Ctor; - tparams = []; - tparams_map = SMap.empty; - fparams = Func_params.empty; - body = None; - return_t = VoidT.why reason; -} - -let field_initializer tparams_map reason expr return_t = { - reason; - kind = FieldInit expr; - tparams = []; - tparams_map; - fparams = Func_params.empty; - body = None; - return_t; -} - -let subst cx map x = - let {tparams; tparams_map; fparams; return_t; _} = x in - (* Remove shadowed type params from `map`, but allow bounds/defaults to be +include Func_sig_intf + +module Make (F : Func_params.S) = struct + type func_params = F.t + + type func_params_tast = (ALoc.t * Type.t) F.ast + + type t = { + reason: reason; + kind: kind; + tparams: Type.typeparams; + tparams_map: Type.t SMap.t; + fparams: func_params; + body: (ALoc.t, ALoc.t) Ast.Function.body option; + return_t: Type.t; + (* To be unified with the type of the function. *) + knot: Type.t; + } + + let default_constructor reason = + { + reason; + kind = Ctor; + tparams = None; + tparams_map = SMap.empty; + fparams = F.empty (fun _ _ -> None); + body = None; + return_t = VoidT.why reason |> with_trust bogus_trust; + (* This can't be directly recursively called. In case this type is accidentally used downstream, + * stub it out with mixed. *) + knot = MixedT.why reason |> with_trust bogus_trust; + } + + let field_initializer tparams_map reason expr return_t = + { + reason; + kind = FieldInit expr; + tparams = None; + tparams_map; + fparams = F.empty (fun _ _ -> None); + body = None; + return_t; + (* This can't be recursively called. In case this type is accidentally used downstream, stub it + * out with mixed. *) + knot = MixedT.why reason |> with_trust bogus_trust; + } + + let subst cx map x = + let { tparams; tparams_map; fparams; return_t; _ } = x in + (* Remove shadowed type params from `map`, but allow bounds/defaults to be substituted if they refer to a type param before it is shadowed. *) - let tparams, map = tparams |> List.fold_left (fun (tparams, map) tp -> - let bound = Flow.subst cx map tp.bound in - let default = Option.map ~f:(Flow.subst cx map) tp.default in - {tp with bound; default}::tparams, - SMap.remove tp.name map - ) ([], map) in - let tparams = List.rev tparams in - let tparams_map = SMap.map (Flow.subst cx map) tparams_map in - let fparams = Func_params.subst cx map fparams in - let return_t = Flow.subst cx map return_t in - {x with tparams; tparams_map; fparams; return_t} - -let generate_tests cx f x = - let {tparams; tparams_map; fparams; return_t; _} = x in - Flow.generate_tests cx tparams (fun map -> f { - x with - tparams_map = SMap.map (Flow.subst cx map) tparams_map; - fparams = Func_params.subst cx map fparams; - return_t = Flow.subst cx map return_t; - }) - -let functiontype cx this_t {reason; kind; tparams; fparams; return_t; _} = - let knot = Tvar.mk cx reason in - let static = - let proto = FunProtoT reason in - Obj_type.mk_with_proto cx reason ~call:knot proto - in - let prototype = - let reason = replace_reason_const RPrototype reason in - Obj_type.mk cx reason - in - let funtype = { Type. - this_t; - params = Func_params.value fparams; - rest_param = Func_params.rest fparams; - return_t; - is_predicate = kind = Predicate; - closure_t = Env.peek_frame (); - changeset = Env.retrieve_closure_changeset (); - def_reason = reason; - } in - let t = DefT (reason, FunT (static, prototype, funtype)) in - let t = poly_type (Context.make_nominal cx) tparams t in - Flow.unify cx t knot; - t - -let methodtype cx {reason; tparams; fparams; return_t; _} = - let params = Func_params.value fparams in - let params_names, params_tlist = List.split params in - let rest_param = Func_params.rest fparams in - let def_reason = reason in - let t = DefT (reason, FunT ( - dummy_static reason, - dummy_prototype, - mk_boundfunctiontype - params_tlist ~rest_param ~def_reason ~params_names return_t - )) in - poly_type (Context.make_nominal cx) tparams t - -let gettertype ({return_t; _}: t) = return_t - -let settertype {fparams; _} = - match Func_params.value fparams with - | [(_, param_t)] -> param_t - | _ -> failwith "Setter property with unexpected type" - -let toplevels id cx this super ~decls ~stmts ~expr - {reason=reason_fn; kind; tparams_map; fparams; body; return_t; _} = - - let loc = Ast.Function.(match body with - | Some (BodyBlock (loc, _)) -> loc - | Some (BodyExpression (loc, _)) -> loc - | None -> Loc.none - ) in - let reason = mk_reason RFunctionBody loc in - - let env = Env.peek_env () in - let new_env = Env.clone_env env in - - Env.update_env cx loc new_env; - Env.havoc_all(); - - (* create and prepopulate function scope *) - let function_scope = - let var_scope_kind = - match kind with - | Ordinary - | FieldInit _ -> Scope.Ordinary - | Predicate -> Scope.Predicate - | Async -> Scope.Async - | Generator -> Scope.Generator - | AsyncGenerator -> Scope.AsyncGenerator - | Ctor -> Scope.Ctor + let tparams = + tparams + |> TypeParams.map (fun tp -> + let bound = Flow.subst cx map tp.bound in + let default = Option.map ~f:(Flow.subst cx map) tp.default in + { tp with bound; default }) in - Scope.fresh ~var_scope_kind () - in - - (* push the scope early so default exprs can reference earlier params *) - Env.push_var_scope cx function_scope; - - (* add `this` and `super` before looking at parameter bindings as when using - * `this` in default parameter values it refers to the function scope and - * `super` should resolve to the method's [[HomeObject]] - *) - Scope.add_entry (internal_name "this") this function_scope; - Scope.add_entry (internal_name "super") super function_scope; - - (* bind type params *) - SMap.iter (fun name t -> - let r = reason_of_t t in - let loc = aloc_of_reason r |> ALoc.to_loc in - Env.bind_type cx name (DefT (r, TypeT (TypeParamKind, t))) loc - ~state:Scope.State.Initialized - ) tparams_map; - - (* Check the rest parameter annotation *) - Option.iter - ~f:(fun (_, loc, t) -> - let rest_reason = - mk_reason (RCustom "Rest params are always arrays") loc in - Flow_js.flow cx (t, AssertRestParamT rest_reason) - ) - (Func_params.rest fparams); - - (* add param bindings *) - let const_params = Context.enable_const_params cx in - fparams |> Func_params.iter Scope.(fun (name, loc, t, default) -> - let reason = mk_reason (RParameter (Some name)) loc in - (* add default value as lower bound, if provided *) - Option.iter ~f:(fun default -> - let default_t = Flow.mk_default cx reason default - ~expr:(fun cx e -> snd (fst (expr cx e))) in - Flow.flow_t cx (default_t, t) - ) default; - (* add to scope *) - if const_params - then Env.bind_implicit_const ~state:State.Initialized - Entry.ConstParamBinding cx name t loc - else - let new_kind = - if Env.promote_to_const_like cx loc then Entry.ConstlikeParamBinding - else Entry.ParamBinding in - Env.bind_implicit_let ~state:State.Initialized - new_kind cx name t loc - ); - - (* early-add our own name binding for recursive calls *) - Option.iter id ~f:(fun (loc, name) -> - let entry = Scope.Entry.new_var ~loc (AnyT.at loc) in - Scope.add_entry name entry function_scope - ); - - let yield_t, next_t = - if kind = Generator || kind = AsyncGenerator then - Tvar.mk cx (replace_reason_const (RCustom "yield") reason), - Tvar.mk cx (replace_reason_const (RCustom "next") reason) - else - DefT (replace_reason_const (RCustom "no yield") reason, MixedT Mixed_everything), - DefT (replace_reason_const (RCustom "no next") reason, MixedT Mixed_everything) - in - - let yield, next, return = Scope.( - let new_entry t = Entry.( - let loc = loc_of_t t in - let state = State.Initialized in - new_const ~loc ~state t - ) in - new_entry yield_t, new_entry next_t, new_entry return_t - ) in - - Scope.add_entry (internal_name "yield") yield function_scope; - Scope.add_entry (internal_name "next") next function_scope; - Scope.add_entry (internal_name "return") return function_scope; - - let statements, reconstruct_body = Ast.Statement.( - match body with - | None -> [], Fn.const None - | Some (Ast.Function.BodyBlock (loc, { Block.body })) -> - body, (fun body -> Some (Ast.Function.BodyBlock (loc, { Block.body }))) - | Some (Ast.Function.BodyExpression expr) -> - [fst expr, Return {Return.argument = Some expr}], - (function - | [_, Return { Return.argument = Some expr }] - | [_, Expression { Expression.expression = expr; _ }] -> - Some (Ast.Function.BodyExpression expr) - | _ -> failwith "expected return body") - ) in - - (* NOTE: Predicate functions can currently only be of the form: + let map = + TypeParams.to_list tparams |> List.fold_left (fun map tp -> SMap.remove tp.name map) map + in + let tparams_map = SMap.map (Flow.subst cx map) tparams_map in + let fparams = F.subst cx map fparams in + let return_t = Flow.subst cx map return_t in + { x with tparams; tparams_map; fparams; return_t } + + let generate_tests cx f x = + let { tparams; tparams_map; fparams; return_t; _ } = x in + Flow.generate_tests cx (tparams |> TypeParams.to_list) (fun map -> + f + { + x with + tparams_map = SMap.map (Flow.subst cx map) tparams_map; + fparams = F.subst cx map fparams; + return_t = Flow.subst cx map return_t; + }) + + let functiontype cx this_t { reason; kind; tparams; fparams; return_t; knot; _ } = + let make_trust = Context.trust_constructor cx in + let static = + let proto = FunProtoT reason in + Obj_type.mk_with_proto cx reason proto + in + let prototype = + let reason = replace_desc_reason RPrototype reason in + Obj_type.mk cx reason + in + let funtype = + { + Type.this_t; + params = F.value fparams; + rest_param = F.rest fparams; + return_t; + is_predicate = kind = Predicate; + closure_t = Env.peek_frame (); + changeset = Env.retrieve_closure_changeset (); + def_reason = reason; + } + in + let t = DefT (reason, make_trust (), FunT (static, prototype, funtype)) in + let t = poly_type_of_tparams (Context.make_nominal cx) tparams t in + Flow.unify cx t knot; + t + + let methodtype cx { reason; tparams; fparams; return_t; _ } = + let params = F.value fparams in + let (params_names, params_tlist) = List.split params in + let rest_param = F.rest fparams in + let def_reason = reason in + let t = + DefT + ( reason, + bogus_trust (), + FunT + ( dummy_static reason, + dummy_prototype, + mk_boundfunctiontype params_tlist ~rest_param ~def_reason ~params_names return_t ) ) + in + poly_type_of_tparams (Context.make_nominal cx) tparams t + + let gettertype ({ return_t; _ } : t) = return_t + + let settertype { fparams; _ } = + match F.value fparams with + | [(_, param_t)] -> param_t + | _ -> failwith "Setter property with unexpected type" + + let toplevels + id + cx + this + super + ~decls + ~stmts + ~expr + { reason = reason_fn; kind; tparams_map; fparams; body; return_t; knot; _ } = + let loc = + Ast.Function.( + match body with + | Some (BodyBlock (loc, _)) -> loc + | Some (BodyExpression (loc, _)) -> loc + | None -> ALoc.none) + in + let reason = mk_reason RFunctionBody loc in + let env = Env.peek_env () in + let new_env = Env.clone_env env in + Env.update_env cx loc new_env; + Env.havoc_all (); + + (* create and prepopulate function scope *) + let function_scope = + let var_scope_kind = + match kind with + | Ordinary + | FieldInit _ -> + Scope.Ordinary + | Predicate -> Scope.Predicate + | Async -> Scope.Async + | Generator -> Scope.Generator + | AsyncGenerator -> Scope.AsyncGenerator + | Ctor -> Scope.Ctor + in + Scope.fresh ~var_scope_kind () + in + (* push the scope early so default exprs can reference earlier params *) + Env.push_var_scope cx function_scope; + + (* add `this` and `super` before looking at parameter bindings as when using + * `this` in default parameter values it refers to the function scope and + * `super` should resolve to the method's [[HomeObject]] + *) + Scope.add_entry (internal_name "this") this function_scope; + Scope.add_entry (internal_name "super") super function_scope; + + (* bind type params *) + SMap.iter + (fun name t -> + let r = reason_of_t t in + let loc = aloc_of_reason r in + Env.bind_type + cx + name + (DefT (r, bogus_trust (), TypeT (TypeParamKind, t))) + loc + ~state:Scope.State.Initialized) + tparams_map; + + (* add param bindings *) + let params_ast = F.eval cx fparams in + (* early-add our own name binding for recursive calls. *) + Option.iter id ~f:(fun (loc, { Ast.Identifier.name; comments = _ }) -> + let entry = knot |> Scope.Entry.new_var ~loc in + Scope.add_entry name entry function_scope); + + let (yield_t, next_t) = + if kind = Generator || kind = AsyncGenerator then + ( Tvar.mk cx (replace_desc_reason (RCustom "yield") reason), + Tvar.mk cx (replace_desc_reason (RCustom "next") reason) ) + else + ( DefT + ( replace_desc_reason (RCustom "no yield") reason, + bogus_trust (), + MixedT Mixed_everything ), + DefT + ( replace_desc_reason (RCustom "no next") reason, + bogus_trust (), + MixedT Mixed_everything ) ) + in + let (yield, next, return) = + Scope.( + let new_entry t = + Entry.( + let loc = loc_of_t t in + let state = State.Initialized in + new_const ~loc ~state t) + in + (new_entry yield_t, new_entry next_t, new_entry return_t)) + in + Scope.add_entry (internal_name "yield") yield function_scope; + Scope.add_entry (internal_name "next") next function_scope; + Scope.add_entry (internal_name "return") return function_scope; + + let (statements, reconstruct_body) = + Ast.Statement.( + match body with + | None -> ([], Fn.const None) + | Some (Ast.Function.BodyBlock (loc, { Block.body })) -> + (body, (fun body -> Some (Ast.Function.BodyBlock (loc, { Block.body })))) + | Some (Ast.Function.BodyExpression expr) -> + ( [ + ( fst expr, + Return + { Return.argument = Some expr; comments = Flow_ast_utils.mk_comments_opt () } ); + ], + (function + | [(_, Return { Return.argument = Some expr; comments = _ })] + | [(_, Expression { Expression.expression = expr; _ })] -> + Some (Ast.Function.BodyExpression expr) + | _ -> failwith "expected return body") )) + in + (* NOTE: Predicate functions can currently only be of the form: function f(...) { return ; } *) - Ast.Statement.( - match kind with - | Predicate -> begin - match statements with - | [(_, Return { Return.argument = Some _})] -> () - | _ -> - let loc = aloc_of_reason reason in - Flow_js.add_output cx - Flow_error.(EUnsupportedSyntax (loc |> ALoc.to_loc, PredicateInvalidBody)) - end - | _ -> () - ); - - (* decl/type visit pre-pass *) - decls cx statements; - - (* statement visit pass *) - let statements_ast, statements_abnormal = - Abnormal.catch_stmts_control_flow_exception (fun () -> stmts cx statements) in - let is_void = Abnormal.( - match statements_abnormal with - | Some Return -> false - | Some Throw -> false (* NOTE *) - | Some exn -> - (* TODO: look into where this throws to. Is it ok that this throws? *) - throw_stmt_control_flow_exception Typed_ast.Statement.error exn - | None -> true - ) in - let body_ast = reconstruct_body statements_ast in - - (* build return type for void funcs *) - let init_ast = if is_void then - let loc = loc_of_t return_t in - (* Some branches add an ImplicitTypeParam frame to force our flow_use_op - * algorithm to pick use_ops outside the provided loc. *) - let use_op, void_t, init_ast = match kind with - | Ordinary - | Ctor -> - let t = VoidT.at loc in - let use_op = Op (FunImplicitReturn {fn = reason_fn; upper = reason_of_t return_t}) in - use_op, t, None - | Async -> - let reason = annot_reason (mk_reason (RType "Promise") loc) in - let void_t = VoidT.at loc in - let t = Flow.get_builtin_typeapp cx reason "Promise" [void_t] in - let use_op = Op (FunImplicitReturn {fn = reason_fn; upper = reason_of_t return_t}) in - let use_op = Frame (ImplicitTypeParam (loc_of_t return_t), use_op) in - use_op, t, None - | Generator -> - let reason = annot_reason (mk_reason (RType "Generator") loc) in - let void_t = VoidT.at loc in - let t = Flow.get_builtin_typeapp cx reason "Generator" [yield_t; void_t; next_t] in - let use_op = Op (FunImplicitReturn {fn = reason_fn; upper = reason_of_t return_t}) in - let use_op = Frame (ImplicitTypeParam (loc_of_t return_t), use_op) in - use_op, t, None - | AsyncGenerator -> - let reason = annot_reason (mk_reason (RType "AsyncGenerator") loc) in - let void_t = VoidT.at loc in - let t = Flow.get_builtin_typeapp cx reason "AsyncGenerator" [yield_t; void_t; next_t] in - let use_op = Op (FunImplicitReturn {fn = reason_fn; upper = reason_of_t return_t}) in - let use_op = Frame (ImplicitTypeParam (loc_of_t return_t), use_op) in - use_op, t, None - | FieldInit e -> - let (_, t), _ as ast = expr cx e in - unknown_use, t, Some ast - | Predicate -> - let loc = aloc_of_reason reason |> ALoc.to_loc in - Flow_js.add_output cx - Flow_error.(EUnsupportedSyntax (loc, PredicateVoidReturn)); - let t = VoidT.at loc in - let use_op = Op (FunImplicitReturn {fn = reason_fn; upper = reason_of_t return_t}) in - use_op, t, None + Ast.Statement.( + match kind with + | Predicate -> + begin + match statements with + | [(_, Return { Return.argument = Some _; comments = _ })] -> () + | _ -> + let loc = aloc_of_reason reason in + Flow_js.add_output cx Error_message.(EUnsupportedSyntax (loc, PredicateInvalidBody)) + end + | _ -> ()); + + (* decl/type visit pre-pass *) + decls cx statements; + + (* statement visit pass *) + let (statements_ast, statements_abnormal) = + Abnormal.catch_stmts_control_flow_exception (fun () -> stmts cx statements) in - Flow.flow cx (void_t, UseT (use_op, return_t)); - init_ast - else None in - - Env.pop_var_scope (); + let is_void = + Abnormal.( + match statements_abnormal with + | Some Return -> false + | Some Throw -> false (* NOTE *) + | Some (Break _) + | Some (Continue _) -> + failwith "Illegal toplevel abnormal directive" + | None -> true) + in + let body_ast = reconstruct_body statements_ast in + (* build return type for void funcs *) + let init_ast = + if is_void then ( + let loc = loc_of_t return_t in + (* Some branches add an ImplicitTypeParam frame to force our flow_use_op + * algorithm to pick use_ops outside the provided loc. *) + let (use_op, void_t, init_ast) = + match kind with + | Ordinary + | Ctor -> + let t = VoidT.at loc |> with_trust bogus_trust in + let use_op = Op (FunImplicitReturn { fn = reason_fn; upper = reason_of_t return_t }) in + (use_op, t, None) + | Async -> + let reason = annot_reason (mk_reason (RType "Promise") loc) in + let void_t = VoidT.at loc |> with_trust bogus_trust in + let t = Flow.get_builtin_typeapp cx reason "Promise" [void_t] in + let use_op = Op (FunImplicitReturn { fn = reason_fn; upper = reason_of_t return_t }) in + let use_op = Frame (ImplicitTypeParam, use_op) in + (use_op, t, None) + | Generator -> + let reason = annot_reason (mk_reason (RType "Generator") loc) in + let void_t = VoidT.at loc |> with_trust bogus_trust in + let t = Flow.get_builtin_typeapp cx reason "Generator" [yield_t; void_t; next_t] in + let use_op = Op (FunImplicitReturn { fn = reason_fn; upper = reason_of_t return_t }) in + let use_op = Frame (ImplicitTypeParam, use_op) in + (use_op, t, None) + | AsyncGenerator -> + let reason = annot_reason (mk_reason (RType "AsyncGenerator") loc) in + let void_t = VoidT.at loc |> with_trust bogus_trust in + let t = + Flow.get_builtin_typeapp cx reason "AsyncGenerator" [yield_t; void_t; next_t] + in + let use_op = Op (FunImplicitReturn { fn = reason_fn; upper = reason_of_t return_t }) in + let use_op = Frame (ImplicitTypeParam, use_op) in + (use_op, t, None) + | FieldInit e -> + let (((_, t), _) as ast) = expr cx e in + let body = mk_expression_reason e in + let use_op = Op (InitField { op = reason_fn; body }) in + (use_op, t, Some ast) + | Predicate -> + let loc = aloc_of_reason reason in + Flow_js.add_output cx Error_message.(EUnsupportedSyntax (loc, PredicateVoidReturn)); + let t = VoidT.at loc |> with_trust bogus_trust in + let use_op = Op (FunImplicitReturn { fn = reason_fn; upper = reason_of_t return_t }) in + (use_op, t, None) + in + Flow.flow cx (void_t, UseT (use_op, return_t)); + init_ast + ) else + None + in + Env.pop_var_scope (); - Env.update_env cx loc env; + Env.update_env cx loc env; - (* return a tuple of (function body AST option, field initializer AST option). + (* return a tuple of (function body AST option, field initializer AST option). - the function body option is Some _ if the func sig's body was Some, and None if the func sig's body was None. - the field initializer is Some expr' if the func sig's kind was FieldInit expr, where expr' is the typed AST translation of expr. *) - body_ast, init_ast + (params_ast, body_ast, init_ast) -let to_ctor_sig f = { f with kind = Ctor } + let to_ctor_sig f = { f with kind = Ctor } +end -let with_typeparams cx f x = - Type_table.with_typeparams x.tparams (Context.type_table cx) f +let return_loc = function + | { Ast.Function.return = Ast.Type.Available (_, (loc, _)); _ } + | { Ast.Function.body = Ast.Function.BodyExpression (loc, _); _ } -> + loc + | { Ast.Function.body = Ast.Function.BodyBlock (loc, _); _ } -> + loc |> ALoc.to_loc_exn |> Loc.char_before |> ALoc.of_loc diff --git a/src/typing/func_sig.mli b/src/typing/func_sig.mli index 926a18e3f0f..bba1eddf9b9 100644 --- a/src/typing/func_sig.mli +++ b/src/typing/func_sig.mli @@ -1,113 +1,16 @@ -(** Intermediate representation for functions *) - -type kind = - | Ordinary - | Async - | Generator - | AsyncGenerator - | FieldInit of (Loc.t, Loc.t) Flow_ast.Expression.t - | Predicate - | Ctor - -type t = { - reason: Reason.t; - kind: kind; - tparams: Type.typeparam list; - tparams_map: Type.t SMap.t; - fparams: Func_params.t; - body: (Loc.t, Loc.t) Flow_ast.Function.body option; - return_t: Type.t; -} - -(** 1. Constructors *) - -(** Create signature for a default constructor. - - Flow represents default constructors as empty functions, i.e., functions - with no type parameters, no formal parameters, an empty body, and a void - return type. *) -val default_constructor: - Reason.t -> - t - -(** Create signature for a class field initializer. - - Field initializers are evaluated in the context of the class body. - Representing the initializer as a function means we can reuse `toplevels` - from this module to evaluate the initializer in the appropriate context, - where `this` and `super` point to the appropriate types. *) -val field_initializer: - Type.t SMap.t -> (* type params map *) - Reason.t -> - (Loc.t, Loc.t) Flow_ast.Expression.t -> (* init *) - Type.t -> (* return *) - t - -(** 1. Manipulation *) - -(** Return a signature with types from provided map substituted. - - Note that this function does not substitute type parameters declared by the - function itself, which may shadow the names of type parameters in the - provided map. +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) - This signature's own type parameters will be subtituted by the - `generate-tests` function. *) -val subst: Context.t -> - Type.t SMap.t -> (* type params map *) - t -> t - -(** Invoke callback with type parameters substituted by upper/lower bounds. *) -val generate_tests: Context.t -> - (t -> 'a) -> t -> 'a - -(** Evaluate the function. - - This function creates a new scope, installs bindings for the function's - parameters and internal bindings (e.g., this, yield), processes the - statements in the function body, and provides an implicit return type if - necessary. This is when the body of the function gets checked, so it also - returns a typed AST of the function body. *) -val toplevels: - Loc.t Flow_ast.Identifier.t option -> (* id *) - Context.t -> - Scope.Entry.t -> (* this *) - Scope.Entry.t -> (* super *) - decls:(Context.t -> (Loc.t, Loc.t) Flow_ast.Statement.t list -> unit) -> - stmts:(Context.t -> (Loc.t, Loc.t) Flow_ast.Statement.t list -> - (Loc.t, Loc.t * Type.t) Flow_ast.Statement.t list) -> - expr:(Context.t -> (Loc.t, Loc.t) Flow_ast.Expression.t -> - (Loc.t, Loc.t * Type.t) Flow_ast.Expression.t) -> - t -> - (Loc.t, Loc.t * Type.t) Flow_ast.Function.body option * - (Loc.t, Loc.t * Type.t) Flow_ast.Expression.t option - -(** 1. Type Conversion *) - -(** Create a function type for function declarations/expressions. *) -val functiontype: Context.t -> - Type.t -> (* this *) - t -> Type.t - -(** Create a function type for class/interface methods. *) -val methodtype: Context.t -> t -> Type.t - -(** Create a type of the return expression of a getter function. - - Note that this is a partial function. If the signature does not represent a - getter, this function will raise an exception. *) -val gettertype: t -> Type.t - -(** Create a type of the single parameter of a setter function. +(** Intermediate representation for functions *) - Note that this is a partial function. If the signature does not represent a - setter, this function will raise an exception. *) -val settertype: t -> Type.t +include module type of Func_sig_intf -(** 1. Util *) +module Make (F : Func_params.S) : + S with type func_params = F.t and type func_params_tast = (ALoc.t * Type.t) F.ast +val return_loc : (ALoc.t, ALoc.t) Flow_ast.Function.t -> ALoc.t (** The location of the return type for a function. *) -val return_loc: (Loc.t, Loc.t) Flow_ast.Function.t -> Loc.t -val to_ctor_sig: t -> t - -val with_typeparams: Context.t -> (unit -> 'a) -> t -> 'a diff --git a/src/typing/func_sig_intf.ml b/src/typing/func_sig_intf.ml new file mode 100644 index 00000000000..40fc306094d --- /dev/null +++ b/src/typing/func_sig_intf.ml @@ -0,0 +1,127 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type kind = + | Ordinary + | Async + | Generator + | AsyncGenerator + | FieldInit of (ALoc.t, ALoc.t) Flow_ast.Expression.t + | Predicate + | Ctor + +module type S = sig + type func_params + + type func_params_tast + + type t = { + reason: Reason.t; + kind: kind; + tparams: Type.typeparams; + tparams_map: Type.t SMap.t; + fparams: func_params; + body: (ALoc.t, ALoc.t) Flow_ast.Function.body option; + return_t: Type.t; + knot: Type.t; + } + + (** 1. Constructors *) + + val default_constructor : Reason.t -> t + (** Create signature for a default constructor. + + Flow represents default constructors as empty functions, i.e., functions + with no type parameters, no formal parameters, an empty body, and a void + return type. *) + + val field_initializer : + Type.t SMap.t -> + (* type params map *) + Reason.t -> + (ALoc.t, ALoc.t) Flow_ast.Expression.t -> + (* init *) + Type.t -> + (* return *) + t + (** Create signature for a class field initializer. + + Field initializers are evaluated in the context of the class body. + Representing the initializer as a function means we can reuse `toplevels` + from this module to evaluate the initializer in the appropriate context, + where `this` and `super` point to the appropriate types. *) + + (** 1. Manipulation *) + + val subst : Context.t -> Type.t SMap.t -> (* type params map *) + t -> t + (** Return a signature with types from provided map substituted. + + Note that this function does not substitute type parameters declared by the + function itself, which may shadow the names of type parameters in the + provided map. + + This signature's own type parameters will be subtituted by the + `generate-tests` function. *) + + val generate_tests : Context.t -> (t -> 'a) -> t -> 'a + (** Invoke callback with type parameters substituted by upper/lower bounds. *) + + val toplevels : + (ALoc.t, ALoc.t) Flow_ast.Identifier.t option -> + (* id *) + Context.t -> + Scope.Entry.t -> + (* this *) + Scope.Entry.t -> + decls:((* super *) + Context.t -> (ALoc.t, ALoc.t) Flow_ast.Statement.t list -> unit) -> + stmts: + (Context.t -> + (ALoc.t, ALoc.t) Flow_ast.Statement.t list -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Statement.t list) -> + expr: + (Context.t -> + (ALoc.t, ALoc.t) Flow_ast.Expression.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Expression.t) -> + t -> + func_params_tast option + * (ALoc.t, ALoc.t * Type.t) Flow_ast.Function.body option + * (ALoc.t, ALoc.t * Type.t) Flow_ast.Expression.t option + (** Evaluate the function. + + This function creates a new scope, installs bindings for the function's + parameters and internal bindings (e.g., this, yield), processes the + statements in the function body, and provides an implicit return type if + necessary. This is when the body of the function gets checked, so it also + returns a typed AST of the function body. *) + + (** 1. Type Conversion *) + + val functiontype : Context.t -> Type.t -> (* this *) + t -> Type.t + (** Create a function type for function declarations/expressions. *) + + val methodtype : Context.t -> t -> Type.t + (** Create a function type for class/interface methods. *) + + val gettertype : t -> Type.t + (** Create a type of the return expression of a getter function. + + Note that this is a partial function. If the signature does not represent a + getter, this function will raise an exception. *) + + val settertype : t -> Type.t + (** Create a type of the single parameter of a setter function. + + Note that this is a partial function. If the signature does not represent a + setter, this function will raise an exception. *) + + (** 1. Util *) + + val to_ctor_sig : t -> t +end diff --git a/src/typing/gc_js.ml b/src/typing/gc_js.ml deleted file mode 100644 index 2a75e9af42d..00000000000 --- a/src/typing/gc_js.ml +++ /dev/null @@ -1,185 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -open Constraint -open Type - -module LocMap = Utils_js.LocMap -module P = Type.Polarity -module Marked = Marked.IdMarked - -(** Garbage collection (GC) for graphs refers to the act of "marking" reachable - type variables from a given set of "roots," by following links between type - variables and traversing their concrete bounds. - - We mark only those dependencies that may contribute to errors. In - particular, only type variables that are indirectly reachable via concrete - bounds are marked; directly reachable type variables via links are not - marked, since Flow's algorithm ensures that their concrete bounds are - already propagated. - - This is useful for pruning the graph, i.e., removing type variables in a - graph that make no difference when the graph is merged with other graphs - through its requires and exports. **) - -(* GC can be made more precise by respecting "polarity," which is just a fancy - name that indicates the direction of walking: when a type variable is - reached, we can walk only its lower bounds or its upper bounds based on the - direction of the walk at that point. - - However, a directed walk requires determining the polarity of every part of - every type. For some types, like those for functions, objects, arrays, - etc. this is fairly standard. But for several other types, it is non-trivial - to determine polarity: to do so, we need to carefully analyze how they appear - in the flow rules, and whether their parts switch sides when those rules are - simplified. Determining the wrong polarity in even one case can lead to - hard-to-find bugs: at best, things crash because a type variable is reached - that was marked unreachable, leading to a crash; at worst, a dependency is - missed, leading to missed errors. - - The type visitor has an in-progress, conservative polarity calculation. It is - conservative in that any "unknown" or unimplemented polarity is treated as - Neutral, so we will preserve both lower bounds and upper bounds. *) - -type state = Marked.t - -let gc = object (self) - inherit [state] Type_visitor.t as super - - val depth = ref 0; - - method! type_ cx pole marked t = - Option.iter ~f:(fun { Verbose.depth = verbose_depth; indent; enabled_during_flowlib=_;} -> - let pid = Context.pid_prefix cx in - let indent = String.make (!depth * indent) ' ' in - Utils_js.prerr_endlinef "\n%s%sGC (%s): %s" indent pid - (Polarity.string pole) - (Debug_js.dump_t cx ~depth:verbose_depth t) - ) (Context.verbose cx); - incr depth; - let marked = super#type_ cx pole marked t in - decr depth; - marked - - method! tvar cx pole marked r id = - match Marked.add id pole marked with - | None -> marked - | Some (pole, marked) -> - let root_id, constraints = Context.find_constraints cx id in - if id != root_id then - self#tvar cx pole marked r root_id - else - match constraints with - | Resolved t -> self#type_ cx pole marked t - | Unresolved bounds -> - let marked = - if P.compat (pole, Positive) then - let marked = TypeMap.fold (fun l _ acc -> - self#type_ cx Positive acc l - ) bounds.lower marked in - let marked = IMap.fold (fun id _ acc -> - self#tvar_bounds cx Positive acc id - ) bounds.lowertvars marked in - marked - else marked - in - let marked = - if P.compat (pole, Negative) then - let marked = UseTypeMap.fold (fun u _ acc -> - self#use_type_ cx acc u - ) bounds.upper marked in - let marked = IMap.fold (fun id _ acc -> - self#tvar_bounds cx Negative acc id - ) bounds.uppertvars marked in - marked - else marked - in - marked - - method private tvar_bounds cx pole marked id = - match Marked.add id pole marked with - | None -> marked - | Some (pole, marked) -> - let root_id, constraints = Context.find_constraints cx id in - if id != root_id then - self#tvar_bounds cx pole marked root_id - else - match constraints with - | Resolved _ -> marked - | Unresolved bounds -> - let marked = - if P.compat (pole, Positive) then - IMap.fold (fun id _ acc -> - self#tvar_bounds cx Positive acc id - ) bounds.lowertvars marked - else marked - in - let marked = - if P.compat (pole, Negative) then - IMap.fold (fun id _ acc -> - self#tvar_bounds cx Negative acc id - ) bounds.uppertvars marked - else marked - in - marked -end - -(* Keep a reachable type variable around. *) -let live cx marked p id = - let constraints = Context.find_graph cx id in - match constraints with - | Resolved _ -> () - | Unresolved bounds -> - let lower, lowertvars = - if P.compat (p, Positive) then - bounds.lower, - IMap.filter (fun id _ -> Marked.mem id Positive marked) bounds.lowertvars - else - TypeMap.empty, IMap.empty - in - let upper, uppertvars = - if P.compat (p, Negative) then - bounds.upper, - IMap.filter (fun id _ -> Marked.mem id Negative marked) bounds.uppertvars - else - UseTypeMap.empty, IMap.empty - in - bounds.lower <- lower; - bounds.upper <- upper; - bounds.lowertvars <- lowertvars; - bounds.uppertvars <- uppertvars; - () - -(* Kill an unreachable type variable. *) -let die cx id = - Context.remove_tvar cx id - -let init ~master_cx = - Marked.empty - (* Exclude tvars from the master cx. Adding these ids to the marked set - * prevents the visitor from walking their bounds. *) - |> IMap.fold (fun id _ acc -> Marked.exclude id acc) (Context.graph_sig master_cx) - -let mark cx state = - state - (* Mark tvars reachable from imports. *) - |> LocMap.fold (fun _ t acc -> gc#type_ cx Negative acc t) (Context.require_map cx) - (* Mark tvars reachable from exports. *) - |> SMap.fold (fun _ t acc -> gc#type_ cx Positive acc t) (Context.module_map cx) - -let sweep ~master_cx cx state = - let master_graph = Context.graph_sig master_cx in - (* Collect unmarked tvars from the graph. *) - IMap.iter (fun id _ -> - (* Don't collect tvars from the master cx, which are explicitly excluded - * from GC. Because the master cx is part of every cx, we can simply remove - * all parts of the master from any cx. This is done separately in merge. *) - if IMap.mem id master_graph then () else - match Marked.get id state with - | None -> die cx id - | Some p -> live cx state p id - ) (Context.graph cx) diff --git a/src/typing/gc_js.mli b/src/typing/gc_js.mli deleted file mode 100644 index 0ebdbb23649..00000000000 --- a/src/typing/gc_js.mli +++ /dev/null @@ -1,14 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type state - -val init: master_cx:Context.sig_t -> state - -val mark: Context.t -> state -> state - -val sweep: master_cx:Context.sig_t -> Context.t -> state -> unit diff --git a/src/typing/graph.mli b/src/typing/graph.mli index cc50c8135a0..825a296914f 100644 --- a/src/typing/graph.mli +++ b/src/typing/graph.mli @@ -1,8 +1,8 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val format: Context.t -> string list +val format : Context.t -> string list diff --git a/src/typing/graph_explorer.ml b/src/typing/graph_explorer.ml index 0f3677796e5..fb9ad003e53 100644 --- a/src/typing/graph_explorer.ml +++ b/src/typing/graph_explorer.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -31,99 +31,73 @@ open Utils_js is guaranteed to receive one and only one lower bound over its lifetime. *) -(* There are two kinds of nodes: unexplored and explored. Explored nodes have +(* The graph maintains the two sets of nodes separately, with edges going across + in both directions. In other words, the graph is bipartite. + + There are two kinds of nodes: unexplored and explored. Explored nodes have dependencies to unexplored nodes. Unexplored nodes have reverse dependencies to explored nodes. Note that we always maintain dependencies (and reverse - dependencies) in transitively closed form. *) + dependencies) in transitively closed form. + + Logically these sets are disjoint. As explained above, each node starts out + as unexplored, and becomes explored when a set of edges are introduced from + the node to other nodes. +*) -type unexplored = { - mutable rev_deps: ISet.t; -} +type unexplored = { mutable rev_deps: ISet.t } -type explored = { - mutable deps: ISet.t; -} +type explored = { mutable deps: ISet.t } -(* union type of unexplored and explored nodes; useful for aggregation. *) type node = -| Unexplored of unexplored -| Explored of explored + | Unexplored of unexplored + | Explored of explored -(* The graph maintains the two sets of nodes separately, with edges going across - in both directions. In other words, the graph is bipartite. +module Tbl = Hashtbl.Make (struct + type t = int - For efficiency, we also maintain a third set of nodes, namely the nodes that - have been fully explored. + let equal a b = a = b - Logically these sets are disjoint. As explained above, each node starts out - as unexplored, and becomes explored when a set of edges are introduced from - the node to other nodes. When an explored node has no more dependencies, it - is moved to the finished set. + let hash = Hashtbl.hash +end) - That said, we sometimes aggressively mark nodes as finished (e.g., once we - reduce signature context graphs after merging), without taking the time to - clear out those nodes from the other sets. This is fine because we always - check membership in the set of finished nodes before doing any work. -*) -type graph = { - mutable unexplored_nodes: unexplored IMap.t; - mutable explored_nodes: explored IMap.t; - mutable finished: ISet.t; -} - -let new_graph finished = { - unexplored_nodes = IMap.empty; - explored_nodes = IMap.empty; - finished; -} - -(* When other_graph belongs to a dependency, merge finished from other_graph to - graph. We don't care about merging explored_nodes and unexplored_nodes from - other_graph, since those were local to other_graph and should have been - cleared in any case to optimize space. On the other hand, we do care about - preserving the explored_nodes and unexplored_nodes in graph, since they may - still be in use. - - When other_graph does *not* belong to a dependency (instead, it belongs to a - file in the same cycle as graph), we need to merge explored_nodes and - unexplored_nodes as well. -*) -let union other_graph graph = - { finished = ISet.union other_graph.finished graph.finished; - unexplored_nodes = IMap.union other_graph.unexplored_nodes graph.unexplored_nodes; - explored_nodes = IMap.union other_graph.explored_nodes graph.explored_nodes; - } +type graph = node Tbl.t + +let new_graph () = Tbl.create (1 lsl 12) let find_unexplored id graph = - IMap.find_unsafe id graph.unexplored_nodes + match Tbl.find graph id with + | Unexplored unexplored -> unexplored + | Explored _ -> raise Not_found let find_explored id graph = - IMap.find_unsafe id graph.explored_nodes + match Tbl.find graph id with + | Unexplored _ -> raise Not_found + | Explored explored -> explored + +let is_finished explored = ISet.is_empty explored.deps (* status of a node *) type stat = -| Found of node -| Finished -| Not_found + | Found of node + | Finished + | Node_not_found (* look up status of a node in a graph *) let stat_graph id graph = - if ISet.mem id graph.finished then Finished else - match IMap.get id graph.unexplored_nodes with - | Some unexplored -> Found (Unexplored unexplored) - | None -> begin match IMap.get id graph.explored_nodes with - | Some explored -> Found (Explored explored) - | None -> Not_found - end + match Tbl.find graph id with + | exception Not_found -> Node_not_found + | Unexplored _ as node -> Found node + | Explored explored as node -> + if is_finished explored then + Finished + else + Found node let find_graph id graph = match stat_graph id graph with | Found node -> node | _ -> failwith (spf "expected node %d to exist" id) -let is_finished explored = - ISet.is_empty explored.deps - let is_finished_node = function | Unexplored _ -> false | Explored explored -> is_finished explored @@ -134,68 +108,63 @@ let is_unexplored_node = function (* Adding edges from node id1 to nodes in ids2. We assume that id1 is unexplored, whereas ids2 may be explored or unexplored (but not finished). *) + (** NOTE: This process has a lot of similarities with how constraints on usual tvars are handled in the context graph. In the future, we might move this processing back into that framework, by introducing new kinds of tvars on which this processing can apply. **) let edges graph (id1, ids2) = - let unexplored1 = find_unexplored id1 graph in - graph.unexplored_nodes <- IMap.remove id1 graph.unexplored_nodes; + let { rev_deps = unexplored1_rev_deps } = find_unexplored id1 graph in let explored1 = { deps = ISet.empty } in - graph.explored_nodes <- IMap.add id1 explored1 graph.explored_nodes; + Tbl.replace graph id1 (Explored explored1); let finished_ids = ref ISet.empty in - - let ids2 = List.fold_left (fun ids2 id2 -> - match find_graph id2 graph with - | Unexplored unexplored2 -> - explored1.deps <- ISet.add id2 explored1.deps; - unexplored1.rev_deps |> ISet.iter (fun id0 -> - let explored0 = find_explored id0 graph in - explored0.deps <- ISet.add id2 explored0.deps; - ); - - unexplored2.rev_deps <- ISet.add id1 unexplored2.rev_deps; - unexplored2.rev_deps <- - ISet.union unexplored1.rev_deps unexplored2.rev_deps; - - ids2 - - | Explored explored2 -> - ISet.union explored2.deps ids2 - - ) ISet.empty ids2 in - + let ids2 = + List.fold_left + (fun ids2 id2 -> + if id2 = id1 then + ids2 + else + match find_graph id2 graph with + | Unexplored unexplored2 -> + explored1.deps <- ISet.add id2 explored1.deps; + unexplored1_rev_deps + |> ISet.iter (fun id0 -> + let explored0 = find_explored id0 graph in + explored0.deps <- ISet.add id2 explored0.deps); + + unexplored2.rev_deps <- ISet.add id1 unexplored2.rev_deps; + unexplored2.rev_deps <- ISet.union unexplored1_rev_deps unexplored2.rev_deps; + + ids2 + | Explored explored2 -> ISet.union explored2.deps ids2) + ISet.empty + ids2 + in let ids2 = ISet.remove id1 ids2 in - explored1.deps <- ISet.union ids2 explored1.deps; - unexplored1.rev_deps |> ISet.iter (fun id0 -> - let explored0 = find_explored id0 graph in - explored0.deps <- ISet.union ids2 explored0.deps; - ); - - ids2 |> ISet.iter (fun id2 -> - let unexplored2 = find_unexplored id2 graph in - unexplored2.rev_deps <- ISet.add id1 unexplored2.rev_deps; - unexplored2.rev_deps <- - ISet.union unexplored1.rev_deps unexplored2.rev_deps; - ); + unexplored1_rev_deps + |> ISet.iter (fun id0 -> + let explored0 = find_explored id0 graph in + explored0.deps <- ISet.union ids2 explored0.deps); + + ids2 + |> ISet.iter (fun id2 -> + let unexplored2 = find_unexplored id2 graph in + unexplored2.rev_deps <- ISet.add id1 unexplored2.rev_deps; + unexplored2.rev_deps <- ISet.union unexplored1_rev_deps unexplored2.rev_deps); explored1.deps <- ISet.remove id1 explored1.deps; - if is_finished explored1 - then finished_ids := ISet.add id1 !finished_ids; - unexplored1.rev_deps |> ISet.iter (fun id0 -> - let explored0 = find_explored id0 graph in - explored0.deps <- ISet.remove id1 explored0.deps; - if is_finished explored0 - then finished_ids := ISet.add id0 !finished_ids; - ); - unexplored1.rev_deps <- ISet.empty; - - graph.finished <- ISet.union !finished_ids graph.finished; + if is_finished explored1 then finished_ids := ISet.add id1 !finished_ids; + unexplored1_rev_deps + |> ISet.iter (fun id0 -> + let explored0 = find_explored id0 graph in + explored0.deps <- ISet.remove id1 explored0.deps; + if is_finished explored0 then finished_ids := ISet.add id0 !finished_ids); + !finished_ids (* Add a node to the graph *) let node graph id = let unexplored = { rev_deps = ISet.empty } in - graph.unexplored_nodes <- IMap.add id unexplored graph.unexplored_nodes + Tbl.add graph id (Unexplored unexplored) diff --git a/src/typing/import_export.ml b/src/typing/import_export.ml index 5467285ac84..1082a0e651d 100644 --- a/src/typing/import_export.ml +++ b/src/typing/import_export.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,19 +8,18 @@ (* AST handling and type setup for import/export *) module Flow = Flow_js - open Reason open Type -let mk_module_t cx reason = ModuleT( - reason, - { - exports_tmap = Context.make_export_map cx SMap.empty; - cjs_export = None; - has_every_named_export = false; - }, - Context.is_strict cx -) +let mk_module_t cx reason = + ModuleT + ( reason, + { + exports_tmap = Context.make_export_map cx SMap.empty; + cjs_export = None; + has_every_named_export = false; + }, + Context.is_strict cx ) (** * When CommonJS modules set their export type, we do two things: @@ -32,49 +31,44 @@ let mk_module_t cx reason = ModuleT( * ES <-> CJS module interop semantics) *) let mk_commonjs_module_t cx reason_exports_module reason export_t = - let exporttypes = { - exports_tmap = Context.make_export_map cx SMap.empty; - cjs_export = Some export_t; - has_every_named_export = false; - } in + let exporttypes = + { + exports_tmap = Context.make_export_map cx SMap.empty; + cjs_export = Some export_t; + has_every_named_export = false; + } + in Tvar.mk_where cx reason (fun t -> - Flow.flow cx ( - export_t, - CJSExtractNamedExportsT( - reason, - (reason_exports_module, exporttypes, (Context.is_strict cx)), - t - ) - ) - ) + Flow.flow + cx + ( export_t, + CJSExtractNamedExportsT + (reason, (reason_exports_module, exporttypes, Context.is_strict cx), t) )) let mk_resource_module_t cx loc f = - let reason, exports_t = match Utils_js.extension_of_filename f with - | Some ".css" -> - let reason = Reason.mk_reason RObjectType loc in - reason, Type.DefT (reason, Type.AnyObjT) - | Some _ -> - let reason = Reason.mk_reason RString loc in - reason, Type.StrT.why reason - | _ -> failwith "How did we find a resource file without an extension?!" + let (reason, exports_t) = + match Utils_js.extension_of_filename f with + | Some ".css" -> + let reason = Reason.mk_reason RObjectType loc in + (reason, Type.AnyT.make Type.Untyped reason) + | Some _ -> + let reason = Reason.mk_reason RString loc in + (reason, Type.StrT.why reason |> with_trust bogus_trust) + | _ -> failwith "How did we find a resource file without an extension?!" in - mk_commonjs_module_t cx reason reason exports_t - (* given a module name, return associated tvar in module map (failing if not found); e.g., used to find tvars associated with requires *after* all requires already have entries in the module map *) -let require_t_of_ref_unsafe cx (loc, _) = - Context.find_require cx loc +let require_t_of_ref_unsafe cx (loc, _) = Context.find_require cx loc let require cx ((_, module_ref) as source) require_loc = Type_inference_hooks_js.dispatch_import_hook cx source require_loc; let module_t = require_t_of_ref_unsafe cx source in let reason = mk_reason (RCommonJSExports module_ref) require_loc in Tvar.mk_where cx reason (fun t -> - Flow.flow cx (module_t, CJSRequireT(reason, t, Context.is_strict cx)) - ) + Flow.flow cx (module_t, CJSRequireT (reason, t, Context.is_strict cx))) let import cx source import_loc = Type_inference_hooks_js.dispatch_import_hook cx source import_loc; @@ -84,54 +78,7 @@ let import_ns cx reason source import_loc = Type_inference_hooks_js.dispatch_import_hook cx source import_loc; let module_t = require_t_of_ref_unsafe cx source in Tvar.mk_where cx reason (fun t -> - Flow.flow cx (module_t, ImportModuleNsT(reason, t, Context.is_strict cx)) - ) - -let module_t_of_cx cx = - let m = Context.module_ref cx in - match SMap.get m (Context.module_map cx) with - | Some t -> t - | None -> - let loc = Loc.({ none with source = Some (Context.file cx) }) in - let reason = (Reason.mk_reason (RCustom "exports") loc) in - Tvar.mk_where cx reason (fun t -> Context.add_module cx m t) - -let set_module_t cx reason f = - let module_ref = Context.module_ref cx in - Context.add_module cx module_ref (Tvar.mk_where cx reason f) - -(** - * Before running inference, we assume that we're dealing with a CommonJS - * module that has a built-in, initialized `exports` object (i.e. it is not an - * ES module). - * - * During inference, if we encounter an assignment to module.exports then we - * use this as an indicator that the module is definitely a CommonJS module -- - * but that the bult-in `exports` value is no longer the exported variable. - * Instead, whatever was assigned to `module.exports` is now that CJS exported - * value. - * - * On the other hand, if we encounter an ES `export` statement during inference, - * we use this as an indicator that the module is an ES module. The one - * exception to this rule is that we do not use `export type` as an indicator of - * an ES module (since we want CommonJS modules to be able to use `export type` - * as well). - * - * At the end of inference, we make use of this information to decide which - * types to store as the expors of the module (i.e. Do we use the built-in - * `exports` value? Do we use the type that clobbered `module.exports`? Or do we - * use neither because the module only has direct ES exports?). - *) -let set_module_kind cx loc new_exports_kind = Context.( - (match (Context.module_kind cx, new_exports_kind) with - | (ESModule, CommonJSModule(Some _)) - | (CommonJSModule(Some _), ESModule) - -> - Flow.add_output cx (Flow_error.EIndeterminateModuleType loc) - | _ -> () - ); - Context.set_module_kind cx new_exports_kind -) + Flow.flow cx (module_t, ImportModuleNsT (reason, t, Context.is_strict cx))) (** * Given an exported default declaration, identify nameless declarations and @@ -140,42 +87,50 @@ let set_module_kind cx loc new_exports_kind = Context.( * * Paired with function which undoes this, for typed AST construction *) -let nameify_default_export_decl decl = Flow_ast.Statement.( - let identity x = x in - match decl with - | loc, FunctionDeclaration func_decl -> Flow_ast.Function.( - if func_decl.id <> None then decl, identity else - (loc, FunctionDeclaration { - func_decl with - id = Some (loc, internal_name "*default*"); - }), (function - | x, FunctionDeclaration func_decl -> - x, FunctionDeclaration { func_decl with id = None } - | _ -> failwith "expected FunctionDeclaration" - ) - ) - - | loc, ClassDeclaration class_decl -> Flow_ast.Class.( - if class_decl.id <> None then decl, identity else - (loc, ClassDeclaration { - class_decl with - id = Some (loc, internal_name "*default*"); - }), (function - | x, ClassDeclaration class_decl -> - x, ClassDeclaration { class_decl with id = None } - | _ -> failwith "expected ClassDeclaration" - ) - ) - - | _ -> decl, identity -) +let nameify_default_export_decl decl = + Flow_ast.Statement.( + let identity x = x in + match decl with + | (loc, FunctionDeclaration func_decl) -> + Flow_ast.Function.( + if func_decl.id <> None then + (decl, identity) + else + ( ( loc, + FunctionDeclaration + { + func_decl with + id = Some (Flow_ast_utils.ident_of_source (loc, internal_name "*default*")); + } ), + (function + | (x, FunctionDeclaration func_decl) -> + (x, FunctionDeclaration { func_decl with id = None }) + | _ -> failwith "expected FunctionDeclaration") )) + | (loc, ClassDeclaration class_decl) -> + Flow_ast.Class.( + if class_decl.id <> None then + (decl, identity) + else + ( ( loc, + ClassDeclaration + { + class_decl with + id = Some (Flow_ast_utils.ident_of_source (loc, internal_name "*default*")); + } ), + (function + | (x, ClassDeclaration class_decl) -> + (x, ClassDeclaration { class_decl with id = None }) + | _ -> failwith "expected ClassDeclaration") )) + | _ -> (decl, identity)) let warn_or_ignore_export_star_as cx name = - if name = None then () else - match Context.esproposal_export_star_as cx, name with - | Options.ESPROPOSAL_WARN, Some(loc, _) -> - Flow.add_output cx (Flow_error.EExperimentalExportStarAs loc) - | _ -> () + if name = None then + () + else + match (Context.esproposal_export_star_as cx, name) with + | (Options.ESPROPOSAL_WARN, Some (loc, _)) -> + Flow.add_output cx (Error_message.EExperimentalExportStarAs loc) + | _ -> () (* Module exports are treated differently than `exports`. The latter is a variable that is implicitly set to the empty object at the top of a @@ -189,10 +144,82 @@ let warn_or_ignore_export_star_as cx name = module, and then flowing module.exports to exports, so that whatever its final value is (initial object or otherwise) is checked against the type declared for exports or any other use of exports. *) -let get_module_exports cx loc = - Env.get_internal_var cx "exports" loc +let get_module_exports cx loc = Env.get_internal_var cx "exports" loc let set_module_exports cx loc t = - let change: Changeset.EntryRef.t option = - Env.set_internal_var cx "exports" t loc in + let change : Changeset.EntryRef.t option = Env.set_internal_var cx "exports" t loc in ignore change + +let cjs_clobber cx loc t = + match Module_info.cjs_clobber (Context.module_info cx) loc with + | Ok () -> set_module_exports cx loc t + | Error msg -> Flow.add_output cx msg + +let export cx name loc t = + match Module_info.export (Context.module_info cx) name loc t with + | Ok () -> () + | Error msg -> Flow.add_output cx msg + +let export_star cx loc ns = + match Module_info.export_star (Context.module_info cx) loc ns with + | Ok () -> () + | Error msg -> Flow.add_output cx msg + +let export_type cx = Module_info.export_type (Context.module_info cx) + +let export_type_star cx = Module_info.export_type_star (Context.module_info cx) + +(* After we have seen all the export statements in a module, this function will + * calculate a ModuleT type (or a tvar that resolves to one) describing the + * exports of a file. + * + * For CommonJS modules, this is fairly simple. We have the exported value + * itself, plus any type exports. If the exported value is an object, we treat + * the fields as named exports for ES module dependents. + * + * For ES modules, we have both named exports and "star" exports, which copy the + * exports of one file into another. This can lead to conflits, which are + * resolved carefully. Note that locally named exports always win, even if they + * are followed by a star export that includes a conflicting name. + * + * Finally, both CJS and ES modules can export types, which also has a star + * export variant. Conflicts are handled in the same way. + *) +let mk_module_t = + Module_info.( + let copy_named_exports cx reason module_t (loc, from_ns) = + let reason = repos_reason loc reason in + Tvar.mk_where cx reason (fun tout -> + Flow.flow cx (from_ns, CopyNamedExportsT (reason, module_t, tout))) + in + let copy_type_exports cx reason module_t (loc, from_ns) = + let reason = repos_reason loc reason in + Tvar.mk_where cx reason (fun tout -> + Flow.flow cx (from_ns, CopyTypeExportsT (reason, module_t, tout))) + in + let copy_star_exports cx reason exports module_t = + Module_info.fold_star2 + (copy_named_exports cx reason) + (copy_type_exports cx reason) + module_t + exports + in + let export_named cx reason kind named module_t = + Tvar.mk_where cx reason (fun tout -> + Flow.flow cx (module_t, ExportNamedT (reason, false, named, kind, tout))) + in + fun cx reason -> + let info = Context.module_info cx in + match info.kind with + | CJS _ -> + Loc.{ none with source = Some (Context.file cx) } + |> ALoc.of_loc + |> get_module_exports cx + |> mk_commonjs_module_t cx reason reason + |> export_named cx reason ExportType info.type_named + |> copy_star_exports cx reason ([], info.type_star) + | ES { named; star } -> + mk_module_t cx reason + |> export_named cx reason ExportValue named + |> export_named cx reason ExportType info.type_named + |> copy_star_exports cx reason (star, info.type_star)) diff --git a/src/typing/import_export.mli b/src/typing/import_export.mli index 1fd7553bc46..6b44dac137c 100644 --- a/src/typing/import_export.mli +++ b/src/typing/import_export.mli @@ -1,23 +1,36 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) val mk_module_t : Context.t -> Reason.t -> Type.t -val mk_commonjs_module_t : - Context.t -> Reason.t -> Reason.t -> Type.t -> Type.t -val mk_resource_module_t : Context.t -> Loc.t -> string -> Type.t -val require : Context.t -> (Loc.t * string) -> Loc.t -> Type.t -val import : Context.t -> (Loc.t * string) -> Loc.t -> Type.t -val import_ns : Context.t -> Reason.t -> (Loc.t * string) -> Loc.t -> Type.t -val module_t_of_cx : Context.t -> Type.t -val set_module_t : Context.t -> Reason.t -> (Type.t -> unit) -> unit -val set_module_kind : Context.t -> Loc.t -> Context.module_kind -> unit + +val mk_resource_module_t : Context.t -> ALoc.t -> string -> Type.t + +val require : Context.t -> ALoc.t * string -> ALoc.t -> Type.t + +val import : Context.t -> ALoc.t * string -> ALoc.t -> Type.t + +val import_ns : Context.t -> Reason.t -> ALoc.t * string -> ALoc.t -> Type.t + val nameify_default_export_decl : ('M, 'M) Flow_ast.Statement.t -> ('M, 'M) Flow_ast.Statement.t * (('N, 'U) Flow_ast.Statement.t -> ('N, 'U) Flow_ast.Statement.t) -val warn_or_ignore_export_star_as : Context.t -> (Loc.t * 'a) option -> unit -val get_module_exports : Context.t -> Loc.t -> Type.t -val set_module_exports : Context.t -> Loc.t -> Type.t -> unit + +val warn_or_ignore_export_star_as : Context.t -> (ALoc.t * 'a) option -> unit + +val get_module_exports : Context.t -> ALoc.t -> Type.t + +val set_module_exports : Context.t -> ALoc.t -> Type.t -> unit + +val cjs_clobber : Context.t -> ALoc.t -> Type.t -> unit + +val export : Context.t -> string -> ALoc.t -> Type.t -> unit + +val export_type : Context.t -> string -> ALoc.t option -> Type.t -> unit + +val export_star : Context.t -> ALoc.t -> Type.t -> unit + +val export_type_star : Context.t -> ALoc.t -> Type.t -> unit diff --git a/src/typing/key.ml b/src/typing/key.ml index b8948ae542e..d390e6139c0 100644 --- a/src/typing/key.ml +++ b/src/typing/key.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,42 +8,47 @@ open Utils_js (* keys for refinements *) -type proj = Prop of string | Elem of t | PrivateField of string +type proj = + | Prop of string + | Elem of t + | PrivateField of string + and t = string * proj list let rec string_of_key (base, projs) = - base ^ String.concat "" ( - (List.rev projs) |> List.map (function - | Prop name -> spf ".%s" name - | PrivateField name -> spf "private.%s" name - | Elem expr -> spf "[%s]" (string_of_key expr) - )) + base + ^ String.concat + "" + ( List.rev projs + |> Core_list.map ~f:(function + | Prop name -> spf ".%s" name + | PrivateField name -> spf "private.%s" name + | Elem expr -> spf "[%s]" (string_of_key expr)) ) (* true if the given key uses the given property name *) -let rec uses_propname propname ~private_ (_base, proj) = - proj_uses_propname ~private_ propname proj +let rec uses_propname propname ~private_ (_base, proj) = proj_uses_propname ~private_ propname proj (* true if the given projection list uses the given property name *) and proj_uses_propname ~private_ propname = function -| Prop name :: tail -> - name = propname && not private_ || proj_uses_propname ~private_ propname tail -| PrivateField name :: tail -> - name = propname && private_ || proj_uses_propname ~private_ propname tail -| Elem key :: tail -> - uses_propname ~private_ propname key || proj_uses_propname ~private_ propname tail -| [] -> - false + | Prop name :: tail -> + (name = propname && not private_) || proj_uses_propname ~private_ propname tail + | PrivateField name :: tail -> + (name = propname && private_) || proj_uses_propname ~private_ propname tail + | Elem key :: tail -> + uses_propname ~private_ propname key || proj_uses_propname ~private_ propname tail + | [] -> false let compare = Pervasives.compare let is_simple (_, ps) = List.length ps = 0 -let reason_desc = Reason.(function -| name, [] when not (is_internal_name name) -> RIdentifier name -| name, [] -> RCustom name -| _, projs -> - (match List.hd (List.rev projs) with - | Prop x -> RProperty (Some x) - | PrivateField x -> RPrivateProperty x - | Elem _ -> RProperty None) -) +let reason_desc = + Reason.( + function + | (name, []) when not (is_internal_name name) -> RIdentifier name + | (name, []) -> RCustom name + | (_, projs) -> + (match List.hd (List.rev projs) with + | Prop x -> RProperty (Some x) + | PrivateField x -> RPrivateProperty x + | Elem _ -> RProperty None)) diff --git a/src/typing/key_map.ml b/src/typing/key_map.ml index ae4b0c8b425..69c987d79de 100644 --- a/src/typing/key_map.ml +++ b/src/typing/key_map.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. diff --git a/src/typing/marked.ml b/src/typing/marked.ml index 384ac08b6b2..6cf40ae3520 100644 --- a/src/typing/marked.ml +++ b/src/typing/marked.ml @@ -1,48 +1,57 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -open Type +open Polarity module type S = sig type t + type key - val empty: t - val add: key -> Type.polarity -> t -> (Type.polarity * t) option - val get: key -> t -> Type.polarity option - val mem: key -> Type.polarity -> t -> bool - val exclude: key -> t -> t + + val empty : t + + val add : key -> Polarity.t -> t -> (Polarity.t * t) option + + val get : key -> t -> Polarity.t option + + val mem : key -> Polarity.t -> t -> bool + + val exclude : key -> t -> t end -module Make(Key: Map.OrderedType): S with type key = Key.t = struct - module Map = MyMap.Make(Key) - type t = polarity Map.t +module Make (Key : Map.OrderedType) : S with type key = Key.t = struct + module Map = MyMap.Make (Key) + + type t = Polarity.t Map.t + type key = Map.key let empty = Map.empty let add id p x = match Map.get id x with - | None -> Some (p, Map.add id p x) - | Some p' -> - match p, p' with - | Positive, Negative - | Negative, Positive -> Some (p, Map.add id Neutral x) - | Neutral, Negative -> Some (Positive, Map.add id p x) - | Neutral, Positive -> Some (Negative, Map.add id p x) - | _ -> None + | None -> Some (p, Map.add id p x) + | Some p' -> + (match (p, p') with + | (Positive, Negative) + | (Negative, Positive) -> + Some (p, Map.add id Neutral x) + | (Neutral, Negative) -> Some (Positive, Map.add id p x) + | (Neutral, Positive) -> Some (Negative, Map.add id p x) + | _ -> None) let get = Map.get let mem id p x = match Map.get id x with - | None -> false - | Some p' -> Polarity.compat (p', p) + | None -> false + | Some p' -> Polarity.compat (p', p) let exclude id x = Map.add id Neutral x end -module IdMarked = Make(IntKey) +module IdMarked = Make (IntKey) diff --git a/src/typing/marked.mli b/src/typing/marked.mli index 2262e0b219b..4b21624b494 100644 --- a/src/typing/marked.mli +++ b/src/typing/marked.mli @@ -1,19 +1,25 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module type S = sig type t + type key - val empty: t - val add: key -> Type.polarity -> t -> (Type.polarity * t) option - val get: key -> t -> Type.polarity option - val mem: key -> Type.polarity -> t -> bool - val exclude: key -> t -> t + + val empty : t + + val add : key -> Polarity.t -> t -> (Polarity.t * t) option + + val get : key -> t -> Polarity.t option + + val mem : key -> Polarity.t -> t -> bool + + val exclude : key -> t -> t end -module Make(Key: Map.OrderedType): S with type key = Key.t +module Make (Key : Map.OrderedType) : S with type key = Key.t -module IdMarked: S with type key = int +module IdMarked : S with type key = int diff --git a/src/typing/members.ml b/src/typing/members.ml new file mode 100644 index 00000000000..0927415c96f --- /dev/null +++ b/src/typing/members.ml @@ -0,0 +1,599 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Utils_js +open Type +open Reason +open Flow_js + +type ('success, 'success_module) generic_t = + | Success of 'success + | SuccessModule of 'success_module + | FailureNullishType + | FailureAnyType + | FailureUnhandledType of Type.t + | FailureUnhandledMembers of Type.t + +type t = + ( (* Success *) + (ALoc.t option * Type.t) SMap.t, + (* SuccessModule *) + (ALoc.t option * Type.t) SMap.t * Type.t option ) + generic_t + +let rec merge_type cx = + let create_union rep = UnionT (locationless_reason (RCustom "union"), rep) in + function + | (DefT (_, _, NumT _), (DefT (_, _, NumT _) as t)) + | (DefT (_, _, StrT _), (DefT (_, _, StrT _) as t)) + | (DefT (_, _, BoolT _), (DefT (_, _, BoolT _) as t)) + | (DefT (_, _, NullT), (DefT (_, _, NullT) as t)) + | (DefT (_, _, VoidT), (DefT (_, _, VoidT) as t)) -> + t + | (ObjProtoT _, (ObjProtoT _ as t)) -> t + | (AnyT _, t) + | (t, AnyT _) -> + t + | (DefT (_, _, EmptyT _), t) + | (t, DefT (_, _, EmptyT _)) -> + t + | (_, (DefT (_, _, MixedT _) as t)) + | ((DefT (_, _, MixedT _) as t), _) -> + t + | (DefT (_, _, NullT), (MaybeT _ as t)) + | ((MaybeT _ as t), DefT (_, _, NullT)) + | (DefT (_, _, VoidT), (MaybeT _ as t)) + | ((MaybeT _ as t), DefT (_, _, VoidT)) -> + t + | ((DefT (_, _, FunT (_, _, ft1)) as fun1), (DefT (_, _, FunT (_, _, ft2)) as fun2)) -> + (* Functions with different number of parameters cannot be merged into a + * single function type. Instead, we should turn them into a union *) + let params = + if List.length ft1.params <> List.length ft2.params then + None + else + let params = + List.map2 + (fun (name1, t1) (name2, t2) -> + (* TODO: How to merge param names? *) + let name = + match (name1, name2) with + | (None, None) -> None + | (Some name, _) + | (_, Some name) -> + Some name + in + (name, merge_type cx (t1, t2))) + ft1.params + ft2.params + in + match (ft1.rest_param, ft2.rest_param) with + | (None, Some _) + | (Some _, None) -> + None + | (None, None) -> Some (params, None) + | (Some r1, Some r2) -> Some (params, Some (r1, r2)) + in + begin + match params with + | None -> create_union (UnionRep.make fun1 fun2 []) + | Some (params, rest_params) -> + let (params_names, tins) = List.split params in + let rest_param = + match rest_params with + | None -> None + | Some ((name1, loc, rest_t1), (name2, _, rest_t2)) -> + (* TODO: How to merge rest names and locs? *) + let name = + match (name1, name2) with + | (None, None) -> None + | (Some name, _) + | (_, Some name) -> + Some name + in + Some (name, loc, merge_type cx (rest_t1, rest_t2)) + in + let tout = merge_type cx (ft1.return_t, ft2.return_t) in + let reason = locationless_reason (RCustom "function") in + DefT + ( reason, + bogus_trust (), + FunT + ( dummy_static reason, + dummy_prototype, + mk_functiontype reason tins tout ~rest_param ~def_reason:reason ~params_names ) ) + end + | ((DefT (_, _, ObjT o1) as t1), (DefT (_, _, ObjT o2) as t2)) -> + let map1 = Context.find_props cx o1.props_tmap in + let map2 = Context.find_props cx o2.props_tmap in + (* Create an intermediate map of booleans indicating whether two objects can + * be merged, based on the properties in each map. *) + let merge_map = + SMap.merge + (fun _ p1_opt p2_opt -> + match (p1_opt, p2_opt) with + | (None, None) -> None + (* In general, even objects with disjoint key sets can not be merged due + * to width subtyping. For example, {x:T} and {y:U} is not the same as + * {x:T,y:U}, because {x,y} is a valid inhabitant of {x:T} and the type of + * y may != U. However, if either object type is exact, disjointness is + * sufficient. *) + | (Some _, None) + | (None, Some _) -> + Some (o1.flags.exact || o2.flags.exact) + (* Covariant fields can be merged. *) + | (Some (Field (_, _, Polarity.Positive)), Some (Field (_, _, Polarity.Positive))) -> + Some true + (* Getters are covariant and thus can be merged. *) + | (Some (Get _), Some (Get _)) -> Some true + (* Anything else is can't be merged. *) + | _ -> Some false) + map1 + map2 + in + let merge_dict = + match (o1.dict_t, o2.dict_t) with + (* If neither object has an indexer, neither will the merged object. *) + | (None, None) -> Some None + (* If both objects covariant indexers, we can merge them. However, if the + * key types are disjoint, the resulting dictionary is not useful. *) + | ( Some { key = k1; value = v1; dict_polarity = Polarity.Positive; _ }, + Some { key = k2; value = v2; dict_polarity = Polarity.Positive; _ } ) -> + (* TODO: How to merge indexer names? *) + Some + (Some + { + dict_name = None; + key = create_intersection (InterRep.make k1 k2 []); + value = merge_type cx (v1, v2); + dict_polarity = Polarity.Positive; + }) + (* Don't merge objects with possibly incompatible indexers. *) + | _ -> None + in + let merge_call = + match (o1.call_t, o2.call_t) with + | (None, None) -> Some None + | (Some _, None) -> + if o2.flags.exact then + Some o1.call_t + else + None + | (None, Some _) -> + if o1.flags.exact then + Some o2.call_t + else + None + | (Some id1, Some id2) -> + let c1 = Context.find_call cx id1 in + let c2 = Context.find_call cx id2 in + let id = Context.make_call_prop cx (create_union (UnionRep.make c1 c2 [])) in + Some (Some id) + in + (* Only merge objects if every property can be merged. *) + let should_merge = SMap.for_all (fun _ x -> x) merge_map in + (* Don't merge objects with different prototypes. *) + let should_merge = should_merge && o1.proto_t = o2.proto_t in + (match (should_merge, merge_dict, merge_call) with + | (true, Some dict, Some call) -> + let map = + SMap.merge + (fun _ p1_opt p2_opt -> + match (p1_opt, p2_opt) with + (* Merge disjoint+exact objects. *) + | (Some t, None) + | (None, Some t) -> + Some t + (* Shouldn't happen, per merge_map above. *) + | _ -> None) + map1 + map2 + in + let id = Context.generate_property_map cx map in + let sealed = + match (o1.flags.sealed, o2.flags.sealed) with + | (Sealed, Sealed) -> Sealed + | (UnsealedInFile s1, UnsealedInFile s2) when s1 = s2 -> UnsealedInFile s1 + | _ -> UnsealedInFile None + in + let flags = + { + sealed; + exact = o1.flags.exact && o2.flags.exact; + frozen = o1.flags.frozen && o2.flags.frozen; + } + in + let reason = locationless_reason (RCustom "object") in + mk_object_def_type ~reason ~flags ~dict ~call id o1.proto_t + | _ -> create_union (UnionRep.make t1 t2 [])) + | (DefT (_, _, ArrT (ArrayAT (t1, ts1))), DefT (_, _, ArrT (ArrayAT (t2, ts2)))) -> + let tuple_types = + match (ts1, ts2) with + | (None, _) + | (_, None) -> + None + | (Some ts1, Some ts2) -> Some (Core_list.map2_exn ~f:(merge_type cx |> curry) ts1 ts2) + in + DefT + ( locationless_reason (RCustom "array"), + bogus_trust (), + ArrT (ArrayAT (merge_type cx (t1, t2), tuple_types)) ) + | (DefT (_, _, ArrT (TupleAT (t1, ts1))), DefT (_, _, ArrT (TupleAT (t2, ts2)))) + when List.length ts1 = List.length ts2 -> + DefT + ( locationless_reason (RCustom "tuple"), + bogus_trust (), + ArrT + (TupleAT (merge_type cx (t1, t2), Core_list.map2_exn ~f:(merge_type cx |> curry) ts1 ts2)) + ) + | (DefT (_, _, ArrT (ROArrayAT elemt1)), DefT (_, _, ArrT (ROArrayAT elemt2))) -> + DefT + ( locationless_reason (RCustom "read only array"), + bogus_trust (), + ArrT (ROArrayAT (merge_type cx (elemt1, elemt2))) ) + | (MaybeT (_, t1), MaybeT (_, t2)) + | (MaybeT (_, t1), t2) + | (t1, MaybeT (_, t2)) -> + let t = merge_type cx (t1, t2) in + let reason = locationless_reason (RMaybe (desc_of_t t)) in + MaybeT (reason, t) + | (UnionT (_, rep1), UnionT (_, rep2)) -> create_union (UnionRep.rev_append rep1 rep2) + | (UnionT (_, rep), t) + | (t, UnionT (_, rep)) -> + create_union (UnionRep.cons t rep) + (* TODO: do we need to do anything special for merging Null with Void, + Optional with other types, etc.? *) + | (t1, t2) -> create_union (UnionRep.make t1 t2 []) + +let instantiate_poly_t cx t args = + match t with + | DefT (_, _, PolyT (_, type_params, t_, _)) -> + let args = Option.value ~default:[] args in + let maximum_arity = Nel.length type_params in + if List.length args > maximum_arity then ( + Hh_logger.error "Instantiating poly type failed"; + t + ) else + let (map, _, too_few_args) = + Nel.fold_left + (fun (map, ts, too_few_args) typeparam -> + let (t, ts, too_few_args) = + match (typeparam, ts) with + | ({ default = Some default; _ }, []) -> + (* fewer arguments than params and we have a default *) + (subst cx map default, [], too_few_args) + | ({ default = None; _ }, []) -> (AnyT.error (reason_of_t t), [], true) + | (_, t :: ts) -> (t, ts, too_few_args) + in + (SMap.add typeparam.name t map, ts, too_few_args)) + (SMap.empty, args, false) + type_params + in + if too_few_args then ( + Hh_logger.error "Instantiating poly type failed"; + t + ) else + subst cx map t_ + | DefT (_, _, EmptyT _) + | DefT (_, _, MixedT _) + | AnyT _ + | DefT (_, _, TypeT (_, AnyT _)) -> + t + | _ -> + (match args with + | None -> t + | Some _ -> assert_false ("unexpected args passed to instantiate_poly_t: " ^ string_of_ctor t)) + +let intersect_members cx members = + match members with + | [] -> SMap.empty + | _ -> + let map = SMap.map (fun x -> [x]) (List.hd members) in + let map = + List.fold_left + (fun acc x -> + SMap.merge + (fun _ tl t -> + match (tl, t) with + | (None, None) -> None + | (None, Some _) -> None + | (Some _, None) -> None + | (Some tl, Some t) -> Some (t :: tl)) + acc + x) + map + (List.tl members) + in + SMap.map + (List.fold_left + (fun (_, acc) (loc, t) -> + (* Arbitrarily use the last location encountered *) + (loc, merge_type cx (acc, t))) + (None, Locationless.EmptyT.t |> with_trust bogus_trust)) + map + +and instantiate_type = function + | ThisClassT (_, t) + | DefT (_, _, ClassT t) + | (AnyT _ as t) + | DefT (_, _, TypeT (_, t)) + | (DefT (_, _, EmptyT _) as t) -> + t + | t -> "cannot instantiate non-class type " ^ string_of_ctor t |> assert_false + +let possible_types_of_use cx = function + | UseT (_, t) -> possible_types_of_type cx t + | _ -> [] + +let string_of_extracted_type = function + | Success t -> Printf.sprintf "Success (%s)" (Type.string_of_ctor t) + | SuccessModule t -> Printf.sprintf "SuccessModule (%s)" (Type.string_of_ctor t) + | FailureNullishType -> "FailureNullishType" + | FailureAnyType -> "FailureAnyType" + | FailureUnhandledType t -> Printf.sprintf "FailureUnhandledType (%s)" (Type.string_of_ctor t) + | FailureUnhandledMembers t -> + Printf.sprintf "FailureUnhandledMembers (%s)" (Type.string_of_ctor t) + +let to_command_result = function + | Success map + | SuccessModule (map, None) -> + Ok map + | SuccessModule (named_exports, Some cjs_export) -> + Ok (SMap.add "default" (None, cjs_export) named_exports) + | FailureNullishType -> Error "autocomplete on possibly null or undefined value" + | FailureAnyType -> Error "not enough type information to autocomplete" + | FailureUnhandledType t -> + Error + (spf "autocomplete on unexpected type of value %s (please file a task!)" (string_of_ctor t)) + | FailureUnhandledMembers t -> + Error + (spf + "autocomplete on unexpected members of value %s (please file a task!)" + (string_of_ctor t)) + +let find_props cx = + Context.find_props cx + %> SMap.filter (fun key _ -> + (* Filter out keys that start with "$" *) + not (String.length key >= 1 && key.[0] = '$')) + +let resolve_tvar cx (_, id) = + let ts = possible_types cx id in + (* The list of types returned by possible_types is often empty, and the + most common reason is that we don't have enough type coverage to + resolve id. Thus, we take the unit of merging to be `any`. (Something + similar happens when summarizing exports in ContextOptimizer.) + + In the future, we might report errors in some cases where + possible_types returns an empty list: e.g., when we detect unreachable + code, or even we don't have enough type coverage. Irrespective of these + changes, the above decision would continue to make sense: as errors + become stricter, type resolution should become even more lenient to + improve failure tolerance. *) + List.fold_left + (fun u t -> merge_type cx (t, u)) + (RAnyImplicit |> locationless_reason |> Unsoundness.unresolved_any) + ts + +let rec resolve_type cx = function + | OpenT tvar -> resolve_tvar cx tvar |> resolve_type cx + | AnnotT (_, t, _) -> resolve_type cx t + | MergedT (_, uses) -> + begin + match Core_list.(uses >>= possible_types_of_use cx) with + (* The unit of intersection is normally mixed, but MergedT is hacky and empty + fits better here *) + | [] -> locationless_reason REmpty |> EmptyT.make |> with_trust bogus_trust + | [x] -> x + | x :: y :: ts -> InterRep.make x y ts |> create_intersection + end + | t -> t + +let rec extract_type cx this_t = + match this_t with + | OpenT _ + | AnnotT _ + | MergedT _ -> + resolve_type cx this_t |> extract_type cx + | OptionalT (_, ty) + | MaybeT (_, ty) -> + extract_type cx ty + | DefT (_, _, (NullT | VoidT)) + | InternalT (OptionalChainVoidT _) -> + FailureNullishType + | AnyT _ -> FailureAnyType + | DefT (_, _, InstanceT _) as t -> Success t + | DefT (_, _, ObjT _) as t -> Success t + | ExactT (_, t) -> extract_type cx t + | ModuleT _ as t -> SuccessModule t + | ThisTypeAppT (_, c, _, ts_opt) -> + let c = resolve_type cx c in + let inst_t = instantiate_poly_t cx c ts_opt in + let inst_t = instantiate_type inst_t in + extract_type cx inst_t + | TypeAppT (_, _, c, ts) -> + let c = resolve_type cx c in + let inst_t = instantiate_poly_t cx c (Some ts) in + let inst_t = instantiate_type inst_t in + extract_type cx inst_t + | DefT (_, _, PolyT (_, _, sub_type, _)) -> + (* TODO: replace type parameters with stable/proper names? *) + extract_type cx sub_type + | ThisClassT (_, DefT (_, _, InstanceT (static, _, _, _))) + | DefT (_, _, ClassT (DefT (_, _, InstanceT (static, _, _, _)))) -> + extract_type cx static + | DefT (_, _, FunT _) as t -> Success t + | IntersectionT _ as t -> Success t + | UnionT _ as t -> Success t + | DefT (reason, _, SingletonStrT _) + | DefT (reason, _, StrT _) -> + get_builtin_type cx reason "String" |> extract_type cx + | DefT (reason, _, SingletonNumT _) + | DefT (reason, _, NumT _) -> + get_builtin_type cx reason "Number" |> extract_type cx + | DefT (reason, _, SingletonBoolT _) + | DefT (reason, _, BoolT _) -> + get_builtin_type cx reason "Boolean" |> extract_type cx + | DefT (reason, _, CharSetT _) -> get_builtin_type cx reason "String" |> extract_type cx + | DefT (_, _, IdxWrapper t) -> extract_type cx t + | DefT (_, _, ReactAbstractComponentT _) as t -> Success t + | ReposT (_, t) + | InternalT (ReposUpperT (_, t)) -> + extract_type cx t + | OpaqueT (_, { underlying_t = Some t; _ }) + | OpaqueT (_, { super_t = Some t; _ }) -> + extract_type cx t + | DefT (reason, _, ArrT arrtype) -> + let (builtin, elemt) = + match arrtype with + | ArrayAT (elemt, _) -> (get_builtin cx "Array" reason, elemt) + | TupleAT (elemt, _) + | ROArrayAT elemt -> + (get_builtin cx "$ReadOnlyArray" reason, elemt) + in + let array_t = resolve_type cx builtin in + Some [elemt] |> instantiate_poly_t cx array_t |> instantiate_type |> extract_type cx + | EvalT (t, defer, id) -> eval_evalt cx t defer id |> extract_type cx + | BoundT _ + | InternalT (ChoiceKitT (_, _)) + | TypeDestructorTriggerT _ + | DefT (_, _, ClassT _) + | CustomFunT (_, _) + | MatchingPropT (_, _, _) + | DefT (_, _, EmptyT _) + | ExistsT _ + | InternalT (ExtendsT _) + | FunProtoApplyT _ + | FunProtoBindT _ + | FunProtoCallT _ + | FunProtoT _ + | KeysT (_, _) + | DefT (_, _, MixedT _) + | NullProtoT _ + | ObjProtoT _ + | OpaqueT _ + | OpenPredT (_, _, _, _) + | ShapeT _ + | ThisClassT _ + | DefT (_, _, TypeT _) -> + FailureUnhandledType this_t + +let rec extract_members ?(exclude_proto_members = false) cx = function + | FailureNullishType -> FailureNullishType + | FailureAnyType -> FailureAnyType + | FailureUnhandledType t -> FailureUnhandledType t + | FailureUnhandledMembers t -> FailureUnhandledMembers t + | Success (DefT (_, _, InstanceT (_, super, _, { own_props; proto_props; _ }))) -> + let members = + SMap.fold + (fun x p acc -> + (* TODO: It isn't currently possible to return two types for a given + * property in autocomplete, so for now we just return the getter + * type. *) + let (loc, t) = + match p with + | Field (loc, t, _) + | Get (loc, t) + | Set (loc, t) + (* arbitrarily use the location for the getter. maybe we can send both in the future *) + + | GetSet (loc, t, _, _) + | Method (loc, t) -> + (loc, t) + in + SMap.add x (loc, t) acc) + (find_props cx own_props) + SMap.empty + in + if exclude_proto_members then + Success members + else + (* TODO: own props should take precedence *) + let members = + SMap.fold + (fun x p acc -> + match Property.read_t p with + | Some t -> + let loc = Property.read_loc p in + SMap.add x (loc, t) acc + | None -> acc) + (find_props cx proto_props) + members + in + let super_flds = extract_members_as_map ~exclude_proto_members cx super in + Success (AugmentableSMap.augment super_flds ~with_bindings:members) + | Success (DefT (_, _, ObjT { props_tmap = flds; proto_t = proto; _ })) -> + let proto_reason = reason_of_t proto in + let rep = InterRep.make proto (get_builtin_type cx proto_reason "Object") [] in + let proto_t = IntersectionT (proto_reason, rep) in + let prot_members = + if exclude_proto_members then + SMap.empty + else + extract_members_as_map ~exclude_proto_members cx proto_t + in + let members = + SMap.fold + (fun x p acc -> + match Property.read_t p with + | Some t -> + let loc = Property.read_loc p in + SMap.add x (loc, t) acc + | None -> acc) + (find_props cx flds) + SMap.empty + in + Success (AugmentableSMap.augment prot_members ~with_bindings:members) + | SuccessModule (ModuleT (_, { exports_tmap; cjs_export; has_every_named_export = _ }, _)) -> + let named_exports = Context.find_exports cx exports_tmap in + let cjs_export = + match cjs_export with + | Some t -> Some (resolve_type cx t) + | None -> None + in + SuccessModule (named_exports, cjs_export) + | Success (DefT (_, _, FunT (static, proto, _))) -> + let members = extract_members_as_map ~exclude_proto_members cx static in + let prot_members = extract_members_as_map ~exclude_proto_members cx proto in + Success (AugmentableSMap.augment prot_members ~with_bindings:members) + | Success (IntersectionT (_, rep)) -> + (* Intersection type should autocomplete for every property of + every type in the intersection *) + let ts = InterRep.members rep in + let members = Core_list.map ~f:(extract_members_as_map ~exclude_proto_members cx) ts in + Success + (List.fold_left + (fun acc members -> AugmentableSMap.augment acc ~with_bindings:members) + SMap.empty + members) + | Success (UnionT (_, rep)) -> + (* Union type should autocomplete for only the properties that are in + * every type in the intersection *) + let ts = UnionRep.members rep in + let members = + ts + (* Although we'll ignore the any-ish and nullish members of the union *) + |> List.filter (function + | DefT (_, _, (NullT | VoidT)) + | AnyT _ -> + false + | _ -> true) + |> Core_list.map ~f:(extract_members_as_map ~exclude_proto_members cx) + |> intersect_members cx + in + Success members + | Success t + | SuccessModule t -> + FailureUnhandledMembers t + +and extract ?exclude_proto_members cx = + extract_type cx %> extract_members ?exclude_proto_members cx + +and extract_members_as_map ~exclude_proto_members cx this_t = + match extract ~exclude_proto_members cx this_t |> to_command_result with + | Ok map -> map + | Error _ -> SMap.empty diff --git a/src/typing/members.mli b/src/typing/members.mli new file mode 100644 index 00000000000..68ce599ea16 --- /dev/null +++ b/src/typing/members.mli @@ -0,0 +1,34 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type ('success, 'success_module) generic_t = + | Success of 'success + | SuccessModule of 'success_module + | FailureNullishType + | FailureAnyType + | FailureUnhandledType of Type.t + | FailureUnhandledMembers of Type.t + +type t = + ( (* Success *) + (ALoc.t option * Type.t) SMap.t, + (* SuccessModule *) + (ALoc.t option * Type.t) SMap.t * Type.t option ) + generic_t + +(* For debugging purposes *) +val string_of_extracted_type : (Type.t, Type.t) generic_t -> string + +val to_command_result : t -> ((ALoc.t option * Type.t) SMap.t, string) result + +val extract : ?exclude_proto_members:bool -> Context.t -> Type.t -> t + +val extract_type : Context.t -> Type.t -> (Type.t, Type.t) generic_t + +val extract_members : ?exclude_proto_members:bool -> Context.t -> (Type.t, Type.t) generic_t -> t + +val resolve_type : Context.t -> Type.t -> Type.t diff --git a/src/typing/merge_js.ml b/src/typing/merge_js.ml index 89f27b5f621..ad992bf9453 100644 --- a/src/typing/merge_js.ml +++ b/src/typing/merge_js.ml @@ -1,10 +1,12 @@ (** - * Copyright (c) 2014-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +type get_ast_return = Loc.t Flow_ast.Comment.t list * (ALoc.t, ALoc.t) Flow_ast.program + module RequireMap = MyMap.Make (struct (* If file A.js imports module 'Foo', this will be ('Foo' * A.js) *) type t = string * File_key.t @@ -16,14 +18,19 @@ module RequireMap = MyMap.Make (struct end) module FilenameMap = Utils_js.FilenameMap -module LocSet = Utils_js.LocSet +module ALocSet = Loc_collections.ALocSet module Reqs = struct - type impl = LocSet.t - type dep_impl = Context.sig_t * LocSet.t - type unchecked = LocSet.t - type res = LocSet.t - type decl = LocSet.t * Modulename.t + type impl = ALocSet.t + + type dep_impl = Context.sig_t * ALocSet.t + + type unchecked = ALocSet.t + + type res = ALocSet.t + + type decl = ALocSet.t * Modulename.t + type t = { impls: impl RequireMap.t; dep_impls: dep_impl RequireMap.t; @@ -32,20 +39,23 @@ module Reqs = struct decls: decl RequireMap.t; } - let empty = { - impls = RequireMap.empty; - dep_impls = RequireMap.empty; - unchecked = RequireMap.empty; - res = RequireMap.empty; - decls = RequireMap.empty; - } + let empty = + { + impls = RequireMap.empty; + dep_impls = RequireMap.empty; + unchecked = RequireMap.empty; + res = RequireMap.empty; + decls = RequireMap.empty; + } let add_impl require requirer require_locs reqs = - let impls = RequireMap.add ~combine:LocSet.union (require, requirer) require_locs reqs.impls in + let impls = + RequireMap.add ~combine:ALocSet.union (require, requirer) require_locs reqs.impls + in { reqs with impls } let add_dep_impl = - let combine (from_cx, locs1) (_, locs2) = from_cx, LocSet.union locs1 locs2 in + let combine (from_cx, locs1) (_, locs2) = (from_cx, ALocSet.union locs1 locs2) in fun require requirer (from_cx, require_locs) reqs -> let dep_impls = RequireMap.add ~combine (require, requirer) (from_cx, require_locs) reqs.dep_impls @@ -54,31 +64,33 @@ module Reqs = struct let add_unchecked require requirer require_locs reqs = let unchecked = - RequireMap.add ~combine:LocSet.union (require, requirer) require_locs reqs.unchecked + RequireMap.add ~combine:ALocSet.union (require, requirer) require_locs reqs.unchecked in { reqs with unchecked } let add_res require requirer require_locs reqs = - let res = RequireMap.add ~combine:LocSet.union (require, requirer) require_locs reqs.res in + let res = RequireMap.add ~combine:ALocSet.union (require, requirer) require_locs reqs.res in { reqs with res } let add_decl = - let combine (locs1, modulename) (locs2, _) = LocSet.union locs1 locs2, modulename in + let combine (locs1, modulename) (locs2, _) = (ALocSet.union locs1 locs2, modulename) in fun require requirer (require_locs, modulename) reqs -> - let decls = RequireMap.add ~combine (require, requirer) (require_locs, modulename) reqs.decls in - { reqs with decls } + let decls = + RequireMap.add ~combine (require, requirer) (require_locs, modulename) reqs.decls + in + { reqs with decls } end (* Connect the builtins object in master_cx to the builtins reference in some arbitrary cx. *) -let implicit_require_strict cx master_cx cx_to = +let implicit_require cx master_cx cx_to = let from_t = Context.find_module_sig master_cx Files.lib_module_ref in let to_t = Context.find_module cx_to Files.lib_module_ref in Flow_js.flow_t cx (from_t, to_t) (* Connect the export of cx_from to its import in cx_to. This happens in some arbitrary cx, so cx_from and cx_to should have already been copied to cx. *) -let explicit_impl_require_strict cx (cx_from, m, loc, cx_to) = +let explicit_impl_require cx (cx_from, m, loc, cx_to) = let from_t = Context.find_module_sig cx_from m in let to_t = Context.find_require cx_to loc in Flow_js.flow_t cx (from_t, to_t) @@ -86,7 +98,7 @@ let explicit_impl_require_strict cx (cx_from, m, loc, cx_to) = (* Create the export of a resource file on the fly and connect it to its import in cxs_to. This happens in some arbitrary cx, so cx_to should have already been copied to cx. *) -let explicit_res_require_strict cx (loc, f, cx_to) = +let explicit_res_require cx (loc, f, cx_to) = (* Recall that a resource file is not parsed, so its export doesn't depend on its contents, just its extension. So, we create the export of a resource file on the fly by looking at its extension. The general alternative of @@ -101,18 +113,12 @@ let explicit_res_require_strict cx (loc, f, cx_to) = (* Connect a export of a declared module to its import in cxs_to. This happens in some arbitrary cx, so cx_to should have already been copied to cx. *) -let explicit_decl_require_strict cx (m, loc, resolved_m, cx_to) = +let explicit_decl_require cx (m, loc, resolved_m, cx_to) = let reason = Reason.(mk_reason (RCustom m) loc) in - (* lookup module declaration from builtin context *) - let m_name = - resolved_m - |> Modulename.to_string - |> Reason.internal_module_name - in + let m_name = resolved_m |> Modulename.to_string |> Reason.internal_module_name in let from_t = Tvar.mk cx reason in - Flow_js.lookup_builtin cx m_name reason - (Type.Strict reason) from_t; + Flow_js.lookup_builtin cx m_name reason (Type.Strict reason) from_t; (* flow the declared module type to importing context *) let to_t = Context.find_require cx_to loc in @@ -122,14 +128,18 @@ let explicit_decl_require_strict cx (m, loc, resolved_m, cx_to) = still lookup the module instead of returning `any` directly. This is because a resolved-unchecked dependency is superceded by a possibly-checked libdef. See unchecked_*_module_vs_lib tests for examples. *) -let explicit_unchecked_require_strict cx (m, loc, cx_to) = +let explicit_unchecked_require cx (m, loc, cx_to) = (* Use a special reason so we can tell the difference between an any-typed type import * from an untyped module and an any-typed type import from a nonexistent module. *) let reason = Reason.(mk_reason (RUntypedModule m) loc) in let m_name = Reason.internal_module_name m in let from_t = Tvar.mk cx reason in - Flow_js.lookup_builtin cx m_name reason - (Type.NonstrictReturning (Some (Type.DefT (reason, Type.AnyT), from_t), None)) from_t; + Flow_js.lookup_builtin + cx + m_name + reason + (Type.NonstrictReturning (Some (Type.AnyT (reason, Type.Untyped), from_t), None)) + from_t; (* flow the declared module type to importing context *) let to_t = Context.find_require cx_to loc in @@ -137,152 +147,170 @@ let explicit_unchecked_require_strict cx (m, loc, cx_to) = let detect_sketchy_null_checks cx = let add_error ~loc ~null_loc kind falsy_loc = - let msg = Flow_error.ESketchyNullLint { kind; loc; null_loc; falsy_loc } in - Flow_error.error_of_msg ~trace_reasons:[] ~source_file:(Context.file cx) msg - |> Context.add_error cx + Error_message.ESketchyNullLint { kind; loc; null_loc; falsy_loc } |> Flow_js.add_output cx in - let detect_function exists_excuses loc exists_check = - let open ExistsCheck in - - let exists_excuse = Utils_js.LocMap.get loc exists_excuses - |> Option.value ~default:empty in - - begin match exists_check.null_loc with + ExistsCheck.( + let exists_excuse = + Loc_collections.ALocMap.get loc exists_excuses |> Option.value ~default:empty + in + match exists_check.null_loc with | None -> () | Some null_loc -> let add_error = add_error ~loc ~null_loc in - if (Option.is_none exists_excuse.bool_loc) then + if Option.is_none exists_excuse.bool_loc then Option.iter exists_check.bool_loc ~f:(add_error Lints.SketchyNullBool); - if (Option.is_none exists_excuse.number_loc) then + if Option.is_none exists_excuse.number_loc then Option.iter exists_check.number_loc ~f:(add_error Lints.SketchyNullNumber); - if (Option.is_none exists_excuse.string_loc) then + if Option.is_none exists_excuse.string_loc then Option.iter exists_check.string_loc ~f:(add_error Lints.SketchyNullString); - if (Option.is_none exists_excuse.mixed_loc) then + if Option.is_none exists_excuse.mixed_loc then Option.iter exists_check.mixed_loc ~f:(add_error Lints.SketchyNullMixed); - () - end + ()) in - - Utils_js.LocMap.iter (detect_function (Context.exists_excuses cx)) (Context.exists_checks cx) + Loc_collections.ALocMap.iter + (detect_function (Context.exists_excuses cx)) + (Context.exists_checks cx) let detect_test_prop_misses cx = let misses = Context.test_prop_get_never_hit cx in - List.iter (fun (name, reasons, use_op) -> - Flow_js.add_output cx (Flow_error.EPropNotFound (name, reasons, use_op)) - ) misses + Core_list.iter + ~f:(fun (name, reasons, use_op) -> + Flow_js.add_output cx (Error_message.EPropNotFound (name, reasons, use_op))) + misses let detect_unnecessary_optional_chains cx = - List.iter (fun (loc, lhs_reason) -> - Flow_js.add_output cx (Flow_error.EUnnecessaryOptionalChain (loc, lhs_reason)) - ) (Context.unnecessary_optional_chains cx) + Core_list.iter + ~f:(fun (loc, lhs_reason) -> + Flow_js.add_output cx (Error_message.EUnnecessaryOptionalChain (loc, lhs_reason))) + (Context.unnecessary_optional_chains cx) let detect_unnecessary_invariants cx = - List.iter (fun (loc, reason) -> - Flow_js.add_output cx (Flow_error.EUnnecessaryInvariant (loc, reason)) - ) (Context.unnecessary_invariants cx) - -let check_type_visitor wrap = - let open Ty in - object(self) - inherit [_] iter_ty as super - - method! private on_prop env = function - | NamedProp (_, p) -> self#on_named_prop env p - | IndexProp d -> self#on_dict env d - | CallProp _ -> wrap (Reason.RCustom "object Call Property") - - method! private on_named_prop env = function - | Field (t, _) -> self#on_t env t - | Method _ -> wrap (Reason.RMethod None) - | Get _ | Set _ -> wrap (Reason.RGetterSetterProperty) - - method! on_t env = function - | TVar _ -> wrap (Reason.RCustom "recursive type") - | Fun _ -> wrap Reason.RFunctionType - | Generic (_, _, Some _) -> wrap (Reason.RCustom "class with generics") - | Mu _ -> wrap (Reason.RCustom "recursive type") - | Any -> wrap Reason.RAny - | AnyObj -> wrap Reason.RAnyObject - | AnyFun -> wrap Reason.RAnyFunction - | Bound (Ty_symbol.Symbol (_, id)) -> wrap (Reason.RCustom ("bound type var " ^ id)) - | Top -> wrap Reason.RMixed - | Bot -> wrap Reason.REmpty - | Exists -> wrap Reason.RExistential - | Module (Ty_symbol.Symbol (_, x)) -> wrap (Reason.RModule x) - | TypeAlias {ta_name = Ty_symbol.Symbol (_, id); _} -> - wrap (Reason.RCustom ("type alias " ^ id)) - | (Obj _ | Arr _ | Tup _ | Union _ | Inter _) as t -> super#on_t env t - | (Void|Null|Num|Str|Bool|NumLit _|StrLit _|BoolLit _|TypeOf _|Generic _|Class _) -> () - - end - -let detect_invalid_type_assert_calls ~full_cx file_sigs cxs = - let options = { - Ty_normalizer_env. - fall_through_merged = false; - expand_internal_types = false; - expand_type_aliases = true; - flag_shadowed_type_params = false; - } in - let check_valid_call ~genv ~targs_map call_loc (_, targ_loc) = - Option.iter (Hashtbl.find_opt targs_map targ_loc) ~f:(fun scheme -> - let desc = Reason.RCustom "TypeAssert library function" in - let reason_main = Reason.mk_reason desc call_loc in - let wrap reason = Flow_js.add_output full_cx (Flow_error.EInvalidTypeArgs ( - reason_main, Reason.mk_reason reason call_loc - )) in - match Ty_normalizer.from_scheme ~options ~genv scheme with - | Ok ty -> - (check_type_visitor wrap)#on_t () ty - | Error _ -> - let { Type.TypeScheme.type_ = t; _ } = scheme in - wrap (Type.desc_of_t t) - ) - in - List.iter (fun (cx, _typed_ast) -> - let file = Context.file cx in - let type_table = Context.type_table cx in - let targs_map = Type_table.targs_hashtbl type_table in - let file_sig = FilenameMap.find_unsafe file file_sigs in - let genv = Ty_normalizer_env.mk_genv ~full_cx ~file ~type_table ~file_sig in - Utils_js.LocMap.iter (check_valid_call ~genv ~targs_map) (Context.type_asserts cx) - ) cxs - -let apply_docblock_overrides (metadata: Context.metadata) docblock_info = - let open Context in - - let metadata = - let jsx = match Docblock.jsx docblock_info with - | Some (Docblock.Jsx_pragma (expr, jsx_expr)) -> Options.Jsx_pragma (expr, jsx_expr) - | Some Docblock.Csx_pragma -> Options.Jsx_csx - | None -> Options.Jsx_react + Core_list.iter + ~f:(fun (loc, reason) -> + Flow_js.add_output cx (Error_message.EUnnecessaryInvariant (loc, reason))) + (Context.unnecessary_invariants cx) + +let detect_invalid_type_assert_calls cx file_sigs cxs tasts = + if Context.type_asserts cx then Type_asserts.detect_invalid_calls ~full_cx:cx file_sigs cxs tasts + +let force_annotations leader_cx other_cxs = + Core_list.iter + ~f:(fun cx -> + let should_munge_underscores = Context.should_munge_underscores cx in + Context.module_ref cx + |> Flow_js.lookup_module leader_cx + |> Flow_js.enforce_strict leader_cx ~should_munge_underscores) + (leader_cx :: other_cxs) + +let is_builtin_or_flowlib cx = + File_key.( + function + | Builtins -> true + | LibFile f + | SourceFile f -> + begin + match Context.default_lib_dir cx with + | Some path -> Files.is_prefix (Path.to_string path) f + | None -> false + end + | _ -> false) + +let apply_docblock_overrides (mtdt : Context.metadata) docblock_info = + Context.( + let metadata = + let jsx = + match Docblock.jsx docblock_info with + | Some (Docblock.Jsx_pragma (expr, jsx_expr)) -> + let jsx_expr = Ast_loc_utils.loc_to_aloc_mapper#expression jsx_expr in + Options.Jsx_pragma (expr, jsx_expr) + | Some Docblock.Csx_pragma -> Options.Jsx_csx + | None -> Options.Jsx_react + in + { mtdt with jsx } in - { metadata with jsx } - in - - let metadata = match Docblock.flow docblock_info with - | None -> metadata - | Some Docblock.OptIn -> { metadata with checked = true; } - | Some Docblock.OptInStrict -> { metadata with checked = true; strict = true; } - | Some Docblock.OptInStrictLocal -> { metadata with checked = true; strict_local = true; } - | Some Docblock.OptInWeak -> { metadata with checked = true; weak = true } - - (* --all (which sets metadata.checked = true) overrides @noflow, so there are + let metadata = + match Docblock.flow docblock_info with + | None -> metadata + | Some Docblock.OptIn -> { metadata with checked = true } + | Some Docblock.OptInStrict -> { metadata with checked = true; strict = true } + | Some Docblock.OptInStrictLocal -> { metadata with checked = true; strict_local = true } + | Some Docblock.OptInWeak -> { metadata with checked = true; weak = true } + (* --all (which sets metadata.checked = true) overrides @noflow, so there are currently no scenarios where we'd change checked = true to false. in the future, there may be a case where checked defaults to true (but is not forced to be true ala --all), but for now we do *not* want to force checked = false here. *) - | Some Docblock.OptOut -> metadata - in + | Some Docblock.OptOut -> metadata + in + let metadata = + match Docblock.preventMunge docblock_info with + | Some value -> { metadata with munge_underscores = not value } + | None -> metadata + in + metadata) - let metadata = match Docblock.preventMunge docblock_info with - | Some value -> { metadata with munge_underscores = not value; } - | None -> metadata +let detect_non_voidable_properties cx = + (* This function approximately checks whether VoidT can flow to the provided + * type without actually creating the flow so as not to disturb type inference. + * Even though this is happening post-merge, it is possible to encounter an + * unresolved tvar, in which case it conservatively returns false. + *) + let rec is_voidable seen_ids = + Type.( + function + | OpenT (_, id) -> + (* tvar is recursive: conservatively assume it is non-voidable *) + if ISet.mem id seen_ids then + false + else ( + match Flow_js.possible_types cx id with + (* tvar has no lower bounds: we conservatively assume it's non-voidable + * except in the special case when it also has no upper bounds + *) + | [] -> Flow_js.possible_uses cx id = [] + (* tvar is resolved: look at voidability of the resolved type *) + | [t] -> is_voidable (ISet.add id seen_ids) t + (* tvar is unresolved: conservatively assume it is non-voidable *) + | _ -> false + ) + (* a union is voidable if any of its members are voidable *) + | UnionT (_, rep) -> UnionRep.members rep |> List.exists (is_voidable seen_ids) + (* an intersection is voidable if all of its members are voidable *) + | IntersectionT (_, rep) -> InterRep.members rep |> List.for_all (is_voidable seen_ids) + (* trivially voidable *) + | MaybeT _ + | DefT (_, _, (VoidT | MixedT (Mixed_everything | Mixed_non_null))) + | OptionalT _ + | AnyT _ -> + true + (* conservatively assume all other types are non-voidable *) + | _ -> false) in - - metadata - + let check_properties (property_map : Type.Properties.id) : + ALoc.t Property_assignment.error list SMap.t -> unit = + let pmap = Context.find_props cx property_map in + SMap.iter (fun name errors -> + let should_error = + match SMap.get name pmap with + | Some (Type.Field (_, t, _)) -> not @@ is_voidable ISet.empty t + | _ -> true + in + if should_error then + List.iter + (fun { Property_assignment.loc; desc } -> + Flow_js.add_output cx (Error_message.EUninitializedInstanceProperty (loc, desc))) + errors) + in + List.iter + (fun { + Context.public_property_map; + private_property_map; + errors = { Property_assignment.public_property_errors; private_property_errors }; + } -> + check_properties public_property_map public_property_errors; + check_properties private_property_map private_property_errors) + (Context.voidable_checks cx) (* Merge a component with its "implicit requires" and "explicit requires." The implicit requires are those defined in libraries. For the explicit @@ -316,145 +344,190 @@ let apply_docblock_overrides (metadata: Context.metadata) docblock_info = 5. Link the local references to libraries in master_cx and component_cxs. *) -let merge_component_strict ~metadata ~lint_severities ~file_options ~strict_mode ~file_sigs - ~get_ast_unsafe ~get_docblock_unsafe ?(do_gc=false) - component reqs dep_cxs (master_cx: Context.sig_t) = - +let merge_component + ~metadata + ~lint_severities + ~file_options + ~strict_mode + ~file_sigs + ~get_ast_unsafe + ~get_aloc_table_unsafe + ~get_docblock_unsafe + ~phase + component + reqs + dep_cxs + (master_cx : Context.sig_t) = let sig_cx = Context.make_sig () in let need_merge_master_cx = ref true in - - let init_gc_state = if do_gc then Some (Gc_js.init ~master_cx) else None in - - let rev_cxs, impl_cxs, _ = Nel.fold_left (fun (cxs, impl_cxs, gc_state) filename -> - (* create cx *) - let info = get_docblock_unsafe filename in - let metadata = apply_docblock_overrides metadata info in - let module_ref = Files.module_ref filename in - let cx = Context.make sig_cx metadata filename module_ref in - - (* create builtins *) - if !need_merge_master_cx then ( - need_merge_master_cx := false; - Flow_js.mk_builtins cx; - Context.merge_into sig_cx master_cx; - implicit_require_strict cx master_cx cx - ); - - (* local inference *) - let ast = get_ast_unsafe filename in - let lint_severities = - if metadata.Context.strict || metadata.Context.strict_local - then StrictModeSettings.fold - (fun lint_kind lint_severities -> - LintSettings.set_value lint_kind (Severity.Err, None) lint_severities - ) strict_mode lint_severities - else lint_severities in - let file_sig = FilenameMap.find_unsafe filename file_sigs in - let typed_ast = - Type_inference_js.infer_ast cx filename ast - ~lint_severities ~file_options ~file_sig - in - - let gc_state = Option.map gc_state Gc_js.(fun gc_state -> - let gc_state = mark cx gc_state in - sweep ~master_cx cx gc_state; - gc_state - ) in - - (cx, typed_ast)::cxs, FilenameMap.add filename cx impl_cxs, gc_state - ) ([], FilenameMap.empty, init_gc_state) component in - let cxs = List.rev rev_cxs in - - let (cx, typed_ast), other_cxs = List.hd cxs, List.tl cxs in - - Flow_js.Cache.clear(); - - dep_cxs |> List.iter (Context.merge_into sig_cx); - - let open Reqs in - - reqs.impls - |> RequireMap.iter (fun (m, fn_to) locs -> - let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in - LocSet.iter (fun loc -> - explicit_impl_require_strict cx (sig_cx, m, loc, cx_to); - ) locs; - ); - - reqs.dep_impls - |> RequireMap.iter (fun (m, fn_to) (cx_from, locs) -> - let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in - LocSet.iter (fun loc -> - explicit_impl_require_strict cx (cx_from, m, loc, cx_to) - ) locs - ); - - reqs.res - |> RequireMap.iter (fun (f, fn_to) locs -> - let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in - LocSet.iter (fun loc -> - explicit_res_require_strict cx (loc, f, cx_to) - ) locs - ); - - reqs.decls - |> RequireMap.iter (fun (m, fn_to) (locs, resolved_m) -> - let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in - LocSet.iter (fun loc -> - explicit_decl_require_strict cx (m, loc, resolved_m, cx_to) - ) locs - ); - - reqs.unchecked - |> RequireMap.iter (fun (m, fn_to) locs -> - let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in - LocSet.iter (fun loc -> - explicit_unchecked_require_strict cx (m, loc, cx_to) - ) locs - ); - - (* Post-merge errors. - * - * At this point, all dependencies have been merged and the component has been - * linked together. Any constraints should have already been evaluated, which - * means we can complain about things that either haven't happened yet, or - * which require complete knowledge of tvar bounds. - *) - detect_sketchy_null_checks cx; - detect_test_prop_misses cx; - detect_unnecessary_optional_chains cx; - detect_unnecessary_invariants cx; - detect_invalid_type_assert_calls ~full_cx:cx file_sigs cxs; - - (cx, typed_ast), other_cxs + let (aloc_tables, rev_aloc_tables) = + Nel.fold_left + (fun (tables, rev_tables) filename -> + let table = lazy (get_aloc_table_unsafe filename) in + let rev_table = + lazy + (try Lazy.force table |> ALoc.reverse_table + with + (* If we aren't in abstract locations mode, or are in a libdef, we + won't have an aloc table, so we just create an empty reverse table. We + handle this exception here rather than explicitly making an optional + version of the get_aloc_table function for simplicity. *) + | Parsing_heaps_exceptions.Sig_ast_ALoc_table_not_found _ -> + ALoc.make_empty_reverse_table ()) + in + (FilenameMap.add filename table tables, FilenameMap.add filename rev_table rev_tables)) + (FilenameMap.empty, FilenameMap.empty) + component + in + let (rev_cxs, rev_tasts, impl_cxs) = + Nel.fold_left + (fun (cxs, tasts, impl_cxs) filename -> + (* create cx *) + let info = get_docblock_unsafe filename in + let metadata = apply_docblock_overrides metadata info in + let module_ref = Files.module_ref filename in + let rev_table = FilenameMap.find filename rev_aloc_tables in + let cx = Context.make sig_cx metadata filename aloc_tables rev_table module_ref phase in + (* create builtins *) + if !need_merge_master_cx then ( + need_merge_master_cx := false; + Flow_js.mk_builtins cx; + Context.merge_into sig_cx master_cx; + implicit_require cx master_cx cx + ); + + (* local inference *) + let (comments, ast) = get_ast_unsafe filename in + let lint_severities = + if metadata.Context.strict || metadata.Context.strict_local then + StrictModeSettings.fold + (fun lint_kind lint_severities -> + LintSettings.set_value lint_kind (Severity.Err, None) lint_severities) + strict_mode + lint_severities + else + lint_severities + in + let file_sig = FilenameMap.find_unsafe filename file_sigs in + let tast = + Type_inference_js.infer_ast + cx + filename + comments + ast + ~lint_severities + ~file_options + ~file_sig + in + (cx :: cxs, tast :: tasts, FilenameMap.add filename cx impl_cxs)) + ([], [], FilenameMap.empty) + component + in + let cxs = Core_list.rev rev_cxs in + let tasts = Core_list.rev rev_tasts in + let (cx, other_cxs) = (Core_list.hd_exn cxs, Core_list.tl_exn cxs) in + Flow_js.Cache.clear (); + + dep_cxs |> Core_list.iter ~f:(Context.merge_into sig_cx); + + Reqs.( + reqs.impls + |> RequireMap.iter (fun (m, fn_to) locs -> + let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in + ALocSet.iter (fun loc -> explicit_impl_require cx (sig_cx, m, loc, cx_to)) locs); + + reqs.dep_impls + |> RequireMap.iter (fun (m, fn_to) (cx_from, locs) -> + let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in + ALocSet.iter (fun loc -> explicit_impl_require cx (cx_from, m, loc, cx_to)) locs); + + reqs.res + |> RequireMap.iter (fun (f, fn_to) locs -> + let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in + ALocSet.iter (fun loc -> explicit_res_require cx (loc, f, cx_to)) locs); + + reqs.decls + |> RequireMap.iter (fun (m, fn_to) (locs, resolved_m) -> + let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in + ALocSet.iter (fun loc -> explicit_decl_require cx (m, loc, resolved_m, cx_to)) locs); + + reqs.unchecked + |> RequireMap.iter (fun (m, fn_to) locs -> + let cx_to = FilenameMap.find_unsafe fn_to impl_cxs in + ALocSet.iter (fun loc -> explicit_unchecked_require cx (m, loc, cx_to)) locs); + + let coverages = Query_types.component_coverage ~full_cx:cx tasts in + (* Post-merge errors. + * + * At this point, all dependencies have been merged and the component has been + * linked together. Any constraints should have already been evaluated, which + * means we can complain about things that either haven't happened yet, or + * which require complete knowledge of tvar bounds. + *) + detect_sketchy_null_checks cx; + detect_non_voidable_properties cx; + detect_test_prop_misses cx; + detect_unnecessary_optional_chains cx; + detect_unnecessary_invariants cx; + detect_invalid_type_assert_calls cx file_sigs cxs tasts; + + force_annotations cx other_cxs; + + match ListUtils.combine3 (cxs, tasts, coverages) with + | [] -> failwith "there is at least one cx" + | x :: xs -> (x, xs)) let merge_tvar = - let open Type in - let possible_types = Flow_js.possible_types in - let rec collect_lowers cx seen acc = function - | [] -> List.rev acc - | t::ts -> - match t with - (* Recursively unwrap unseen tvars *) - | OpenT (_, id) -> - if ISet.mem id seen - then collect_lowers cx seen acc ts (* already unwrapped *) - else collect_lowers cx (ISet.add id seen) acc (possible_types cx id @ ts) - (* Ignore empty *) - | DefT (_, EmptyT) -> collect_lowers cx seen acc ts - (* Everything else becomes part of the merge typed *) - | _ -> collect_lowers cx seen (t::acc) ts - in - fun cx r id -> - let lowers = collect_lowers cx (ISet.singleton id) [] (possible_types cx id) in - match lowers with + Type.( + let possible_types = Flow_js.possible_types in + let rec collect_lowers ~filter_empty cx seen acc = function + | [] -> Core_list.rev acc + | t :: ts -> + (match t with + (* Recursively unwrap unseen tvars *) + | OpenT (_, id) -> + if ISet.mem id seen then + collect_lowers ~filter_empty cx seen acc ts + (* already unwrapped *) + else + let seen = ISet.add id seen in + collect_lowers ~filter_empty cx seen acc (possible_types cx id @ ts) + (* Ignore empty in existentials. This behavior is sketchy, but the error + behavior without this filtering is worse. If an existential accumulates + an empty, we error but it's very non-obvious how the empty arose. *) + | DefT (_, _, EmptyT _) when filter_empty -> collect_lowers ~filter_empty cx seen acc ts + (* Everything else becomes part of the merge typed *) + | _ -> collect_lowers ~filter_empty cx seen (t :: acc) ts) + in + fun cx r id -> + (* Because the behavior of existentials are so difficult to predict, they + enjoy some special casing here. When existential types are finally + removed, this logic can be removed. *) + let existential = + Reason.( + match desc_of_reason r with + | RExistential -> true + | _ -> false) + in + let lowers = + let seen = ISet.singleton id in + collect_lowers cx seen [] (possible_types cx id) ~filter_empty:existential + in + match lowers with | [t] -> t - | t0::t1::ts -> DefT (r, UnionT (UnionRep.make t0 t1 ts)) + | t0 :: t1 :: ts -> UnionT (r, UnionRep.make t0 t1 ts) | [] -> let uses = Flow_js.possible_uses cx id in - if uses = [] - then Locationless.AnyT.t - else MergedT (r, uses) + if uses = [] || existential then + AnyT.locationless Unsoundness.existential + else + MergedT (r, uses)) + +let merge_trust_var constr = + Trust_constraint.( + match constr with + | TrustResolved t -> t + | TrustUnresolved bound -> get_trust bound |> Trust.fix) (****************** signature contexts *********************) @@ -502,237 +575,337 @@ module ContextOptimizer = struct open Constraint open Type - class context_optimizer = object(self) - inherit [Polarity.t] Type_mapper.t_with_uses as super - - val sig_hash = Xx.init () - method sig_hash () = Xx.digest sig_hash - - val mutable next_stable_id = 0 - method fresh_stable_id = - let stable_id = next_stable_id in - next_stable_id <- next_stable_id + 1; - stable_id - - val mutable stable_tvar_ids = IMap.empty - val mutable stable_nominal_ids = IMap.empty - val mutable stable_eval_ids = IMap.empty - val mutable stable_opaque_ids = IMap.empty - val mutable stable_poly_ids = IMap.empty - val mutable reduced_module_map = SMap.empty; - val mutable reduced_graph = IMap.empty; - val mutable reduced_property_maps = Properties.Map.empty; - val mutable reduced_call_props = IMap.empty; - val mutable reduced_export_maps = Exports.Map.empty; - val mutable reduced_evaluated = IMap.empty; - - method reduce cx module_ref = - let export = Context.find_module cx module_ref in - let export' = self#type_ cx Neutral export in - reduced_module_map <- SMap.add module_ref export' reduced_module_map - - method tvar cx pole r id = - let root_id, _ = Context.find_constraints cx id in - if id == root_id then - if IMap.mem id reduced_graph then - let stable_id = IMap.find_unsafe root_id stable_tvar_ids in - SigHash.add_int sig_hash stable_id; + class context_optimizer = + object (self) + inherit [Polarity.t] Type_mapper.t_with_uses as super + + val sig_hash = Xx.init () + + method sig_hash () = Xx.digest sig_hash + + val mutable next_stable_id = 0 + + method fresh_stable_id = + let stable_id = next_stable_id in + next_stable_id <- next_stable_id + 1; + stable_id + + val mutable stable_tvar_ids = IMap.empty + + val mutable stable_trust_var_ids = IMap.empty + + val mutable stable_eval_ids = IMap.empty + + val mutable stable_poly_ids = IMap.empty + + val mutable stable_props_ids = IMap.empty + + val mutable stable_call_prop_ids = IMap.empty + + val mutable reduced_module_map = SMap.empty + + val mutable reduced_graph = IMap.empty + + val mutable reduced_trust_graph = IMap.empty + + val mutable reduced_property_maps = Properties.Map.empty + + val mutable reduced_call_props = IMap.empty + + val mutable reduced_export_maps = Exports.Map.empty + + val mutable reduced_evaluated = IMap.empty + + val mutable export_reason = None + + val mutable export_file = None + + method private warn_dynamic_exports cx r reason_exp = + match Reason.aloc_of_reason reason_exp |> ALoc.source with + (* The second check here may seem unnecessary, but if the exports of a file are exactly what + * it imports from another file this can cause positioning issues. Consider + * + * module.exports = require('lib'); + * + * In this case the reason produced by the require statement is actually positioned in the + * 'lib' file where the exports were defined; this can break our invariant that all lints have + * their primary position in the file where the lint occurs. We don't want to change the + * positioning of the require, because this increases the verbosity of all error messages that + * reference types or values defined in other files. Instead, we just don't report any export + * warnings that arise when the reason isn't in the current file. Given that this warning is + * only reported when the type we are exporting contains an any, and we are exporting exactly + * what we import from another file, it must follow that the imported file itself exported an + * any and the warning was raised there. + + * The alternative check that export_file is builtin allows this lint to appear in libdefs, + * since the source of the export of a libdef is set to `Builtins` even if the libdef is + * user-provided. Actual builtin files will be filtered out by the first check. + *) + | Some file + when (not @@ is_builtin_or_flowlib cx file) + && (export_file = Some file || export_file = Some File_key.Builtins) -> + Error_message.EDynamicExport (r, reason_exp) |> Flow_js.add_output cx + | _ -> () + + method reduce cx module_ref = + let export = Context.find_module cx module_ref in + export_file <- reason_of_t export |> Reason.aloc_of_reason |> ALoc.source; + let export' = self#type_ cx Polarity.Neutral export in + reduced_module_map <- SMap.add module_ref export' reduced_module_map + + method tvar cx pole r id = + let (root_id, _) = Context.find_constraints cx id in + if id == root_id then ( + if IMap.mem id reduced_graph then ( + let stable_id = IMap.find_unsafe root_id stable_tvar_ids in + SigHash.add_int sig_hash stable_id; + id + ) else + let t = merge_tvar cx r id in + let node = Root { rank = 0; constraints = FullyResolved (unknown_use, t) } in + reduced_graph <- IMap.add id node reduced_graph; + let () = + let stable_id = self#fresh_stable_id in + stable_tvar_ids <- IMap.add id stable_id stable_tvar_ids + in + let t = self#type_ cx pole t in + let node = Root { rank = 0; constraints = FullyResolved (unknown_use, t) } in + reduced_graph <- IMap.add id node reduced_graph; + id + ) else ( + ignore (self#tvar cx pole r root_id); + let node = Goto root_id in + reduced_graph <- IMap.add id node reduced_graph; + id + ) + + method trust_var cx pole id = + let (root_id, constr) = Context.find_trust_constraints cx id in + if id == root_id then ( + if IMap.mem id reduced_trust_graph then ( + let stable_id = IMap.find_unsafe root_id stable_trust_var_ids in + SigHash.add_int sig_hash stable_id; + id + ) else + let t = merge_trust_var constr in + let node = Trust_constraint.new_resolved_root t in + reduced_trust_graph <- IMap.add id node reduced_trust_graph; + let () = + let stable_id = self#fresh_stable_id in + stable_trust_var_ids <- IMap.add id stable_id stable_trust_var_ids + in + id + ) else ( + ignore (self#trust_var cx pole root_id); + let node = Trust_constraint.TrustGoto root_id in + reduced_trust_graph <- IMap.add id node reduced_trust_graph; + id + ) + + method props cx pole id = + if Properties.Map.mem id reduced_property_maps then + let () = + Option.iter + ~f:(fun id_int -> + let stable_id = + if Context.mem_nominal_id cx id_int then + IMap.find_unsafe id_int stable_props_ids + else + id_int + in + SigHash.add_int sig_hash stable_id) + (Properties.id_as_int id) + in id else - let t = merge_tvar cx r id in - let node = Root { rank = 0; constraints = Resolved t } in - reduced_graph <- IMap.add id node reduced_graph; let () = - let stable_id = self#fresh_stable_id in - stable_tvar_ids <- IMap.add id stable_id stable_tvar_ids + Option.iter + ~f:(fun id_int -> + let stable_id = + if Context.mem_nominal_id cx id_int then ( + let stable_id = self#fresh_stable_id in + stable_props_ids <- IMap.add id_int stable_id stable_props_ids; + stable_id + ) else + id_int + in + SigHash.add_int sig_hash stable_id) + (Properties.id_as_int id) in - let t = (self#type_ cx pole t) in - let node = Root { rank = 0; constraints = Resolved t } in - reduced_graph <- IMap.add id node reduced_graph; + let pmap = Context.find_props cx id in + let () = SigHash.add_props_map sig_hash pmap in + reduced_property_maps <- Properties.Map.add id pmap reduced_property_maps; + let pmap' = SMap.ident_map (self#prop cx pole) pmap in + reduced_property_maps <- Properties.Map.add id pmap' reduced_property_maps; id - else ( - ignore (self#tvar cx pole r root_id); - let node = Goto root_id in - reduced_graph <- IMap.add id node reduced_graph; - id - ) - - method props cx pole id = - if (Properties.Map.mem id reduced_property_maps) - then - let () = SigHash.add_int sig_hash (id :> int) in - id - else - let pmap = Context.find_props cx id in - let () = SigHash.add_props_map sig_hash pmap in - reduced_property_maps <- Properties.Map.add id pmap reduced_property_maps; - let pmap' = SMap.ident_map (self#prop cx pole) pmap in - reduced_property_maps <- Properties.Map.add id pmap' reduced_property_maps; - id - - method call_prop cx pole id = - if (IMap.mem id reduced_call_props) - then - let () = SigHash.add_int sig_hash id in - id - else - let t = Context.find_call cx id in - reduced_call_props <- IMap.add id t reduced_call_props; - let t' = self#type_ cx pole t in - reduced_call_props <- IMap.add id t' reduced_call_props; - id - - method exports cx pole id = - if (Exports.Map.mem id reduced_export_maps) then id - else - let tmap = Context.find_exports cx id in - let map_pair (loc, t) = (loc, self#type_ cx pole t) in - reduced_export_maps <- Exports.Map.add id tmap reduced_export_maps; - let tmap' = SMap.ident_map map_pair tmap in - reduced_export_maps <- Exports.Map.add id tmap' reduced_export_maps; - SigHash.add_exports_map sig_hash tmap'; - id - - method eval_id cx pole id = - if IMap.mem id reduced_evaluated - then - let stable_id = IMap.find_unsafe id stable_eval_ids in - SigHash.add_int sig_hash stable_id; - id - else - let stable_id = self#fresh_stable_id in - stable_eval_ids <- IMap.add id stable_id stable_eval_ids; - match IMap.get id (Context.evaluated cx) with - | None -> id - | Some t -> - reduced_evaluated <- IMap.add id t reduced_evaluated; + + method call_prop cx pole id = + if IMap.mem id reduced_call_props then + let stable_id = IMap.find_unsafe id stable_call_prop_ids in + let () = SigHash.add_int sig_hash stable_id in + id + else + let () = + let stable_id = self#fresh_stable_id in + stable_call_prop_ids <- IMap.add id stable_id stable_call_prop_ids + in + let t = Context.find_call cx id in + reduced_call_props <- IMap.add id t reduced_call_props; let t' = self#type_ cx pole t in - reduced_evaluated <- IMap.add id t' reduced_evaluated; + reduced_call_props <- IMap.add id t' reduced_call_props; id - method! dict_type cx pole dicttype = - let dicttype' = super#dict_type cx pole dicttype in - SigHash.add_polarity sig_hash dicttype'.dict_polarity; - dicttype' - - method! type_ cx pole t = - SigHash.add_reason sig_hash (reason_of_t t); - match t with - | InternalT _ -> Utils_js.assert_false "internal types should not appear in signatures" - | OpenT _ -> super#type_ cx pole t - | DefT (_, InstanceT (_, _, _, { class_id; _ })) -> - let id = - if Context.mem_nominal_id cx class_id - then match IMap.get class_id stable_nominal_ids with - | None -> - let id = self#fresh_stable_id in - stable_nominal_ids <- IMap.add class_id id stable_nominal_ids; - id - | Some id -> id - else class_id in - SigHash.add_int sig_hash id; - super#type_ cx pole t - | OpaqueT (_, opaquetype) -> - let id = - let {opaque_id; _} = opaquetype in - if Context.mem_nominal_id cx opaque_id - then match IMap.get opaque_id stable_opaque_ids with - | None -> - let id = self#fresh_stable_id in - stable_opaque_ids <- IMap.add opaque_id id stable_opaque_ids; - id - | Some id -> id - else opaque_id + method! export_types cx map_cx e = + let { exports_tmap; cjs_export; has_every_named_export } = e in + let exports_tmap' = self#exports cx map_cx exports_tmap in + let cjs_export' = + OptionUtils.ident_map + (fun exp -> + export_reason <- Some (reason_of_t exp); + self#type_ cx map_cx exp) + cjs_export in - SigHash.add_int sig_hash id; - super#type_ cx pole t - | DefT (_, PolyT (_, _, poly_id)) -> - let id = - if Context.mem_nominal_id cx poly_id - then match IMap.get poly_id stable_poly_ids with - | None -> - let id = self#fresh_stable_id in - stable_poly_ids <- IMap.add poly_id id stable_poly_ids; + if exports_tmap == exports_tmap' && cjs_export == cjs_export' then + e + else + { exports_tmap = exports_tmap'; cjs_export = cjs_export'; has_every_named_export } + + method exports cx pole id = + if Exports.Map.mem id reduced_export_maps then + id + else + let tmap = Context.find_exports cx id in + let map_pair p = + let (loc, t) = p in + export_reason <- Some (reason_of_t t); + let t' = self#type_ cx pole t in + if t == t' then + p + else + (loc, t') + in + reduced_export_maps <- Exports.Map.add id tmap reduced_export_maps; + let tmap' = SMap.ident_map map_pair tmap in + reduced_export_maps <- Exports.Map.add id tmap' reduced_export_maps; + SigHash.add_exports_map sig_hash tmap'; + id + + method eval_id cx pole id = + if IMap.mem id reduced_evaluated then ( + let stable_id = IMap.find_unsafe id stable_eval_ids in + SigHash.add_int sig_hash stable_id; + id + ) else + let stable_id = self#fresh_stable_id in + stable_eval_ids <- IMap.add id stable_id stable_eval_ids; + match IMap.get id (Context.evaluated cx) with + | None -> id + | Some t -> + reduced_evaluated <- IMap.add id t reduced_evaluated; + let t' = self#type_ cx pole t in + reduced_evaluated <- IMap.add id t' reduced_evaluated; id - | Some id -> id - else poly_id - in - SigHash.add_int sig_hash id; - super#type_ cx pole t - | _ -> - let t' = super#type_ cx pole t in - SigHash.add_type sig_hash t'; - t' - - method! use_type cx pole use = - SigHash.add_reason sig_hash (reason_of_use_t use); - match use with - | UseT (u, t) -> - let t' = self#type_ cx Neutral t in - if t' == t then use - else UseT (u, t') - | _ -> - SigHash.add_use sig_hash use; - super#use_type cx pole use - - method! choice_use_tool cx pole t = - match t with - | FullyResolveType id -> - ignore @@ self#type_graph cx pole ISet.empty id; - t - | _ -> super#choice_use_tool cx pole t - - method private type_graph cx pole seen id = - let open Graph_explorer in - let seen' = ISet.add id seen in - if seen' == seen then (seen, id) else - let graph = Context.type_graph cx in - ignore @@ self#eval_id cx pole id; - let seen' = match IMap.get id graph.explored_nodes with - | None -> seen' - | Some {deps} -> - ISet.fold (fun id seen -> fst @@ self#type_graph cx pole seen id) deps seen' - in - let seen' = - match IMap.get id graph.unexplored_nodes with - | None -> seen' - | Some {rev_deps} -> - ISet.fold (fun id seen -> fst @@ self#type_graph cx pole seen id) rev_deps seen' - in - (seen', id) - - method get_stable_tvar_ids = stable_tvar_ids - method get_stable_nominal_ids = stable_nominal_ids - method get_stable_eval_ids = stable_eval_ids - method get_stable_opaque_ids = stable_opaque_ids - method get_stable_poly_ids = stable_poly_ids - method get_reduced_module_map = reduced_module_map - method get_reduced_graph = reduced_graph - method get_reduced_property_maps = reduced_property_maps - method get_reduced_call_props = reduced_call_props - method get_reduced_export_maps = reduced_export_maps - method get_reduced_evaluated = reduced_evaluated - end + + method! dict_type cx pole dicttype = + let dicttype' = super#dict_type cx pole dicttype in + SigHash.add_polarity sig_hash dicttype'.dict_polarity; + dicttype' + + method! type_ cx pole t = + SigHash.add_reason sig_hash (reason_of_t t); + begin + match t with + | DefT (_, trust, _) when Context.trust_tracking cx && is_ident trust -> + ignore (self#trust_var cx pole (as_ident trust)) + | _ -> () + end; + match t with + | InternalT _ -> Utils_js.assert_false "internal types should not appear in signatures" + | OpenT _ -> super#type_ cx pole t + | DefT (_, _, InstanceT (_, _, _, { class_id; _ })) -> + let id = class_id in + SigHash.add_aloc sig_hash id; + super#type_ cx pole t + | OpaqueT (_, { opaque_id; _ }) -> + let id = opaque_id in + SigHash.add_aloc sig_hash id; + super#type_ cx pole t + | AnyT (r, src) when Unsoundness.banned_in_exports src && Option.is_some export_reason -> + self#warn_dynamic_exports cx r (Option.value_exn export_reason); + let t' = super#type_ cx pole t in + SigHash.add_type sig_hash t'; + t' + | DefT (_, _, PolyT (_, _, _, poly_id)) -> + let id = + if Context.mem_nominal_id cx poly_id then + match IMap.get poly_id stable_poly_ids with + | None -> + let id = self#fresh_stable_id in + stable_poly_ids <- IMap.add poly_id id stable_poly_ids; + id + | Some id -> id + else + poly_id + in + SigHash.add_int sig_hash id; + super#type_ cx pole t + | _ -> + let t' = super#type_ cx pole t in + SigHash.add_type sig_hash t'; + t' + + method! use_type cx pole use = + SigHash.add_reason sig_hash (reason_of_use_t use); + match use with + | UseT (u, t) -> + let t' = self#type_ cx Polarity.Neutral t in + if t' == t then + use + else + UseT (u, t') + | _ -> + SigHash.add_use sig_hash use; + super#use_type cx pole use + + method! choice_use_tool = + (* Even with MergedT, any choice kit constraints should be fully + discharged by this point. This preserves a key invariant, that type + graphs are local to a single merge job. In other words, we will not see + a FullyResolveType constraint that corresponds to a tvar from another + context. This makes it possible to clear the type graph before storing + in the heap. *) + Utils_js.assert_false "choice kit uses should not appear in signatures" + + method get_reduced_module_map = reduced_module_map + + method get_reduced_graph = reduced_graph + + method get_reduced_trust_graph = reduced_trust_graph + + method get_reduced_property_maps = reduced_property_maps + + method get_reduced_call_props = reduced_call_props + + method get_reduced_export_maps = reduced_export_maps + + method get_reduced_evaluated = reduced_evaluated + end (* walk a context from a list of exports *) let reduce_context cx module_refs = let reducer = new context_optimizer in - List.iter (reducer#reduce cx) module_refs; - reducer#sig_hash (), reducer + Core_list.iter ~f:(reducer#reduce cx) module_refs; + (reducer#sig_hash (), reducer) (* reduce a context to a "signature context" *) let sig_context cx module_refs = - let sig_hash, reducer = reduce_context cx module_refs in + let (sig_hash, reducer) = reduce_context cx module_refs in Context.set_module_map cx reducer#get_reduced_module_map; Context.set_graph cx reducer#get_reduced_graph; + Context.set_trust_graph cx reducer#get_reduced_trust_graph; Context.set_property_maps cx reducer#get_reduced_property_maps; Context.set_call_props cx reducer#get_reduced_call_props; Context.set_export_maps cx reducer#get_reduced_export_maps; Context.set_evaluated cx reducer#get_reduced_evaluated; - Context.set_type_graph cx ( - Graph_explorer.new_graph - (IMap.fold (fun k _ -> ISet.add k) reducer#get_reduced_graph ISet.empty) - ); sig_hash - end diff --git a/src/typing/merge_js.mli b/src/typing/merge_js.mli index 74fb22c6edf..bce95336faa 100644 --- a/src/typing/merge_js.mli +++ b/src/typing/merge_js.mli @@ -1,29 +1,38 @@ (** - * Copyright (c) 2014-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +type get_ast_return = Loc.t Flow_ast.Comment.t list * (ALoc.t, ALoc.t) Flow_ast.program + module Reqs : sig type t - val empty: t - val add_impl: string -> File_key.t -> Utils_js.LocSet.t -> t -> t - val add_dep_impl: string -> File_key.t -> (Context.sig_t * Utils_js.LocSet.t) -> t -> t - val add_unchecked: string -> File_key.t -> Utils_js.LocSet.t -> t -> t - val add_res: string -> File_key.t -> Utils_js.LocSet.t -> t -> t - val add_decl:string -> File_key.t -> (Utils_js.LocSet.t * Modulename.t) -> t -> t + + val empty : t + + val add_impl : string -> File_key.t -> Loc_collections.ALocSet.t -> t -> t + + val add_dep_impl : string -> File_key.t -> Context.sig_t * Loc_collections.ALocSet.t -> t -> t + + val add_unchecked : string -> File_key.t -> Loc_collections.ALocSet.t -> t -> t + + val add_res : string -> File_key.t -> Loc_collections.ALocSet.t -> t -> t + + val add_decl : string -> File_key.t -> Loc_collections.ALocSet.t * Modulename.t -> t -> t end -val merge_component_strict: - metadata: Context.metadata -> - lint_severities: Severity.severity LintSettings.t -> - file_options: Files.options option -> - strict_mode: StrictModeSettings.t -> - file_sigs: File_sig.t Utils_js.FilenameMap.t -> - get_ast_unsafe: (File_key.t -> (Loc.t, Loc.t) Flow_ast.program) -> - get_docblock_unsafe: (File_key.t -> Docblock.t) -> - ?do_gc: bool -> +val merge_component : + metadata:Context.metadata -> + lint_severities:Severity.severity LintSettings.t -> + file_options:Files.options option -> + strict_mode:StrictModeSettings.t -> + file_sigs:File_sig.With_ALoc.t Utils_js.FilenameMap.t -> + get_ast_unsafe:(File_key.t -> get_ast_return) -> + get_aloc_table_unsafe:(File_key.t -> ALoc.table) -> + get_docblock_unsafe:(File_key.t -> Docblock.t) -> + phase:Context.phase -> (* component *) File_key.t Nel.t -> (* requires *) @@ -32,11 +41,11 @@ val merge_component_strict: Context.sig_t list -> (* master cx *) Context.sig_t -> - (* cxs in component order, hd is merged leader *) - (Context.t * (Loc.t, Loc.t * Type.t) Flow_ast.program) Nel.t + (* cxs in component order, hd is merged leader, along with a coverage summary for each file *) + (Context.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.program * Coverage_response.file_coverage) Nel.t -val merge_tvar: Context.t -> Reason.t -> Constraint.ident -> Type.t +val merge_tvar : Context.t -> Reason.t -> Constraint.ident -> Type.t -module ContextOptimizer: sig +module ContextOptimizer : sig val sig_context : Context.t -> string list -> Xx.hash end diff --git a/src/typing/module_info.ml b/src/typing/module_info.ml new file mode 100644 index 00000000000..87a639ec719 --- /dev/null +++ b/src/typing/module_info.ml @@ -0,0 +1,77 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(** + * This module tracks the exports from a given file, or declared module. + * + * Initially, we assume we're dealing with a CommonJS module. If we see an + * ES-style import/export, we switch over. If we see a combination of CommonJS + * and ES-style, we complain that the module type is indeterminate. + * + * Note that both CommonJS and ES modules can import and export types. + *) + +type t = { + ref: string; + mutable kind: kind; + mutable type_named: Type.Exports.t; + mutable type_star: (ALoc.t * Type.t) list; +} + +and kind = + | CJS of ALoc.t option + | ES of { + named: Type.Exports.t; + star: (ALoc.t * Type.t) list; + } + +let empty_cjs_module ref = { ref; kind = CJS None; type_named = SMap.empty; type_star = [] } + +let export info name loc t = + match info.kind with + | CJS None -> + info.kind <- ES { named = SMap.singleton name (Some loc, t); star = [] }; + Ok () + | ES { named; star } -> + info.kind <- ES { named = SMap.add name (Some loc, t) named; star }; + Ok () + | CJS (Some _) -> Error (Error_message.EIndeterminateModuleType loc) + +let export_star info loc ns = + match info.kind with + | CJS None -> + info.kind <- ES { named = SMap.empty; star = [(loc, ns)] }; + Ok () + | ES { named; star } -> + info.kind <- ES { named; star = (loc, ns) :: star }; + Ok () + | CJS (Some _) -> Error (Error_message.EIndeterminateModuleType loc) + +let export_type info name loc t = info.type_named <- SMap.add name (loc, t) info.type_named + +let export_type_star info loc ns = info.type_star <- (loc, ns) :: info.type_star + +let cjs_clobber info loc = + match info.kind with + | CJS _ -> + info.kind <- CJS (Some loc); + Ok () + | ES _ -> Error (Error_message.EIndeterminateModuleType loc) + +(* Re-exporting names from another file can lead to conflicts. We resolve + * conflicts on a last-export-wins basis. Star exports are accumulated in + * source order, so the head of each list is the last export. This helper + * function interleaves the two reverse-sorted lists. *) +let rec fold_star2 f g acc = function + | ([], []) -> acc + | (xs, []) -> List.fold_left f acc xs + | ([], ys) -> List.fold_left g acc ys + | ((x :: xs' as xs), (y :: ys' as ys)) -> + if ALoc.compare (fst x) (fst y) > 0 then + fold_star2 f g (f acc x) (xs', ys) + else + fold_star2 f g (g acc y) (xs, ys') diff --git a/src/typing/obj_type.ml b/src/typing/obj_type.ml index 087e304580f..4b96d956382 100644 --- a/src/typing/obj_type.ml +++ b/src/typing/obj_type.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,20 +7,37 @@ open Type -let mk_with_proto cx reason - ?(sealed=false) ?(exact=true) ?(frozen=false) ?dict ?call ?(props=SMap.empty) proto = +let mk_with_proto + cx + reason + ?(sealed = false) + ?(exact = true) + ?(frozen = false) + ?dict + ?call + ?(props = SMap.empty) + ?loc + proto = let sealed = - if sealed then Sealed - else UnsealedInFile (Loc.source (Reason.aloc_of_reason reason |> ALoc.to_loc)) + if sealed then + Sealed + else + UnsealedInFile (ALoc.source (Reason.aloc_of_reason reason)) in let flags = { sealed; exact; frozen } in let call = Option.map call ~f:(Context.make_call_prop cx) in - let pmap = Context.make_property_map cx props in - DefT (reason, ObjT (mk_objecttype ~flags ~dict ~call pmap proto)) + let pmap = + match loc with + | None -> Context.generate_property_map cx props + | Some loc -> Context.make_source_property_map cx props loc + in + DefT (reason, bogus_trust (), ObjT (mk_objecttype ~flags ~dict ~call pmap proto)) + +let mk_exact_empty cx reason = + ObjProtoT reason |> mk_with_proto cx reason ~sealed:true ~exact:true ~frozen:true -let mk cx reason = - mk_with_proto cx reason (ObjProtoT reason) +let mk ?(sealed = false) cx reason = mk_with_proto cx reason ~sealed (ObjProtoT reason) and sealed_in_op reason_op = function | Sealed -> true - | UnsealedInFile source -> source <> (Loc.source (Reason.aloc_of_reason reason_op |> ALoc.to_loc)) + | UnsealedInFile source -> source <> ALoc.source (Reason.aloc_of_reason reason_op) diff --git a/src/typing/object_kit.ml b/src/typing/object_kit.ml new file mode 100644 index 00000000000..1dcebea5f7e --- /dev/null +++ b/src/typing/object_kit.ml @@ -0,0 +1,994 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Reason +open Type + +module type OBJECT = sig + val run : + Context.t -> + Trace.t -> + use_op:Type.use_op -> + Reason.t -> + Type.Object.resolve_tool -> + Type.Object.tool -> + Type.t -> + Type.t -> + unit +end + +module Kit (Flow : Flow_common.S) : OBJECT = struct + include Flow + + exception CannotSpreadError of Error_message.t + + let run = + Object.( + (*******************************) + (* Shared Object Kit Utilities *) + (*******************************) + let read_prop r flags x p = + let t = + match Property.read_t p with + | Some t -> t + | None -> + let reason = replace_desc_reason (RUnknownProperty (Some x)) r in + let t = DefT (reason, bogus_trust (), MixedT Mixed_everything) in + t + in + (t, flags.exact) + in + let read_dict r { value; dict_polarity; _ } = + if Polarity.compat (dict_polarity, Polarity.Positive) then + value + else + let reason = replace_desc_reason (RUnknownProperty None) r in + DefT (reason, bogus_trust (), MixedT Mixed_everything) + in + (* Treat dictionaries as optional, own properties. Dictionary reads should + * be exact. TODO: Forbid writes to indexers through the photo chain. + * Property accesses which read from dictionaries normally result in a + * non-optional result, but that leads to confusing spread results. For + * example, `p` in `{...{|p:T|},...{[]:U}` should `T|U`, not `U`. *) + let get_prop r p dict = + match (p, dict) with + | (Some _, _) -> p + | (None, Some d) -> Some (optional (read_dict r d), true) + | (None, None) -> None + in + (* Lift a pairwise function to a function over a resolved list *) + let merge (f : slice -> slice -> slice) = + let f' (x0 : resolved) (x1 : resolved) = + Nel.map_concat (fun slice1 -> Nel.map (f slice1) x0) x1 + in + let rec loop x0 = function + | [] -> x0 + | x1 :: xs -> loop (f' x0 x1) xs + in + (fun x0 (x1, xs) -> loop (f' x0 x1) xs) + in + (* Lift a pairwise function that may return an error to a function over a resolved list + * that may return an error, like spread2 *) + let merge_result (f : 'a -> 'a -> ('a, Error_message.t) result) (conv : 'b -> 'a Nel.t) = + let bind f = function + | Ok data -> f data + | Error e -> Error e + in + let mapM f = function + | Ok data -> Ok (f data) + | Error e -> Error e + in + let f' (x0 : 'a Nel.t) (x1 : 'b) : ('a Nel.t, Error_message.t) result = + let x1 = conv x1 in + let resolved_list = + Nel.fold_left + (fun acc slice1 -> + bind + (fun acc -> + let resolved = + Nel.fold_left + (fun acc x -> + bind (fun acc -> bind (fun slice -> Ok (slice :: acc)) (f x slice1)) acc) + (Ok []) + x0 + in + let resolved = resolved |> mapM List.rev in + bind (fun resolved -> Ok (resolved :: acc)) resolved) + acc) + (Ok []) + x1 + in + let resolved_list = resolved_list |> mapM List.rev in + (* Each of the lists were non-empty, so concatenating them creates a non-empty list. Thus, + * Nel.of_list_exn is safe *) + bind (fun lists -> Ok (Nel.of_list_exn (Core_list.join lists))) resolved_list + in + let rec loop (x0 : 'a Nel.t) (xs : 'b list) = + match xs with + | [] -> Ok x0 + | x1 :: xs -> bind (fun resolved -> loop resolved xs) (f' x0 x1) + in + (fun x0 (x1, xs) -> bind (fun resolved -> loop resolved xs) (f' (conv x0) x1)) + in + (*****************) + (* Object Spread *) + (*****************) + let object_spread = + Object.Spread.( + (* Compute spread result: slice * slice -> slice *) + let spread2 + reason + (_inline1, { Object.reason = r1; props = props1; dict = dict1; flags = flags1 }) + (inline2, { Object.reason = r2; props = props2; dict = dict2; flags = flags2 }) = + let dict = + match (dict1, dict2) with + | (None, Some _) when inline2 -> Ok dict2 + | (_, Some { key; value = _; dict_name = _; dict_polarity = _ }) -> + Error + (Error_message.ECannotSpreadIndexerOnRight + { spread_reason = reason; object_reason = r2; key_reason = reason_of_t key }) + | (Some { key; value; dict_name = _; dict_polarity = _ }, _) when not flags2.exact -> + Error + (Error_message.EInexactMayOverwriteIndexer + { + spread_reason = reason; + key_reason = reason_of_t key; + value_reason = reason_of_t value; + object2_reason = r2; + }) + | _ -> Ok dict1 + in + match dict with + | Error e -> Error e + | Ok dict -> + let union t1 t2 = UnionT (reason, UnionRep.make t1 t2 []) in + let type_and_optionality t = + match t with + | OptionalT (_, t) -> (t, true) + | _ -> (t, false) + in + let merge_props (t1, _) (t2, _) = + let (t1, opt1) = type_and_optionality t1 in + let (t2, opt2) = type_and_optionality t2 in + if not opt2 then + (t2, true) + else if opt1 && opt2 then + (optional (union t1 t2), true) + (* In this case, we know opt2 is true and opt1 is false *) + else + (union t1 t2, true) + in + let props = + try + Ok + (SMap.merge + (fun x p1 p2 -> + match (p1, p2) with + | (None, None) -> None + | (_, Some p2) when inline2 -> Some (fst p2, true) + | (Some p1, Some p2) -> Some (merge_props p1 p2) + | (Some p1, None) -> + if flags2.exact then + Some (fst p1, true) + else + raise + (CannotSpreadError + (Error_message.EUnableToSpread + { + spread_reason = reason; + object1_reason = r1; + object2_reason = r2; + propname = x; + error_kind = Error_message.Inexact; + })) + (* We care about a few cases here. We want to make sure that we can + * infer a precise type. This is tricky when the left-hand slice is inexact, + * since it may contain p2 even though it's not explicitly specified. + * + * If p2 is not optional, then we won't have to worry about anything because it will + * definitely overwrite a property with a key matching p2's on the left. + * + * If p2 is optional, then we can split into a few more cases: + * 1. o1 is inexact: error, we cannot infer a precise type since o1 might contain p2 + * 2. o1 has an indexer: error, we would have to infer a union with the indexer type. + * This would be sound, but it's not likely that anyone would intend it. If that + * assumption turns out to be false, we can easily add support for it later. + * 3. o1 is exact: no problem, we don't need to worry about o1 having the + * same property. + * + * The if statement below handles 1. and 2., and the else statement + * handles 3. and the case when p2 is not optional. + *) + | (None, Some p2) -> + let (_, opt2) = type_and_optionality (fst p2) in + if (dict1 <> None || not flags1.exact) && opt2 then + let error_kind = + if dict1 <> None then + Error_message.Indexer + else + Error_message.Inexact + in + raise + (CannotSpreadError + (Error_message.EUnableToSpread + { + spread_reason = reason; + (* in this case, the object on the left is inexact. the error will say + * that object2_reason is inexact and may contain propname, so + * we should assign r2 to object1_reason and r1 to object2_reason *) + object1_reason = r2; + object2_reason = r1; + propname = x; + error_kind; + })) + else + Some (fst p2, true)) + props1 + props2) + with CannotSpreadError e -> Error e + in + let flags = + { + frozen = flags1.frozen && flags2.frozen; + sealed = Sealed; + exact = + flags1.exact + && flags2.exact + && Obj_type.sealed_in_op reason flags1.sealed + && Obj_type.sealed_in_op reason flags2.sealed; + } + in + (match props with + | Ok props -> Ok (false, { Object.reason; props; dict; flags }) + | Error e -> Error e) + in + let resolved_of_acc_element = function + | Object.Spread.ResolvedSlice resolved -> Nel.map (fun x -> (false, x)) resolved + | Object.Spread.InlineSlice { Object.Spread.reason; prop_map; dict } -> + let flags = { exact = true; frozen = false; sealed = Sealed } in + let props = SMap.mapi (read_prop reason flags) prop_map in + Nel.one (true, { Object.reason; props; dict; flags }) + in + let spread reason = function + | (x, []) -> Ok (resolved_of_acc_element x) + | (x0, (x1 :: xs : Object.Spread.acc_element list)) -> + merge_result (spread2 reason) resolved_of_acc_element x0 (x1, xs) + in + let mk_object cx reason target { Object.reason = r; props; dict; flags } = + let props = SMap.map (fun (t, _) -> Field (None, t, Polarity.Neutral)) props in + let id = Context.generate_property_map cx props in + let proto = ObjProtoT reason in + let flags = + let exact = + match target with + (* Type spread result is exact if annotated to be exact *) + | Annot { make_exact } -> make_exact + (* Value spread result is exact if all inputs are exact *) + | Value -> flags.exact + in + { sealed = Sealed; frozen = false; exact } + in + let call = None in + let t = mk_object_def_type ~reason:r ~flags ~dict ~call id proto in + (* Wrap the final type in an `ExactT` if we have an exact flag *) + if flags.exact then + ExactT (reason, t) + else + t + in + fun options state cx trace use_op reason tout x -> + let reason = update_desc_reason invalidate_rtype_alias reason in + let { todo_rev; acc } = state in + Nel.iter + (fun { Object.reason = r; props = _; dict = _; flags = { exact; _ } } -> + match options with + | Annot { make_exact } when make_exact && not exact -> + add_output cx ~trace (Error_message.EIncompatibleWithExact ((r, reason), use_op)) + | _ -> ()) + x; + let x = Object.Spread.ResolvedSlice x in + let rec continue acc (x : Object.Spread.acc_element) = function + | [] -> + let t = + match spread reason (x, acc) with + | Ok ((_, x), []) -> mk_object cx reason options x + | Ok ((_, x0), (_, x1) :: xs) -> + let xs = List.map snd xs in + UnionT + ( reason, + UnionRep.make + (mk_object cx reason options x0) + (mk_object cx reason options x1) + (Core_list.map ~f:(mk_object cx reason options) xs) ) + | Error e -> + add_output cx ~trace e; + AnyT.why AnyError reason + in + (* Intentional UnknownUse here. *) + rec_flow_t cx ~use_op trace (t, tout) + | Type t :: todo_rev -> + let tool = Resolve Next in + let state = { todo_rev; acc = x :: acc } in + rec_flow cx trace (t, ObjKitT (use_op, reason, tool, Spread (options, state), tout)) + | Slice operand_slice :: todo_rev -> + let acc = x :: acc in + continue acc (InlineSlice operand_slice) todo_rev + in + continue acc x todo_rev) + in + (***************) + (* Object Rest *) + (***************) + let object_rest = + Object.Rest.( + let optional = function + | OptionalT _ as t -> t + | t -> Type.optional t + in + (* Subtract the second slice from the first slice and return the difference + * slice. The runtime implementation of this type operation is: + * + * const result = {}; + * + * for (const p in props1) { + * if (hasOwnProperty(props1, p)) { + * if (!hasOwnProperty(props2, p)) { + * result[p] = props1[p]; + * } + * } + * } + * + * The resulting object only has a property if the property is own in props1 and + * it is not an own property of props2. + *) + let rest + cx + trace + ~use_op + reason + merge_mode + { Object.reason = r1; props = props1; dict = dict1; flags = flags1 } + { Object.reason = r2; props = props2; dict = dict2; flags = flags2 } = + let props = + SMap.merge + (fun k p1 p2 -> + match (merge_mode, get_prop r1 p1 dict1, get_prop r2 p2 dict2, flags2.exact) with + (* If the object we are using to subtract has an optional property, non-own + * property, or is inexact then we should add this prop to our result, but + * make it optional as we cannot know for certain whether or not at runtime + * the property would be subtracted. + * + * Sound subtraction also considers exactness and owness to determine + * optionality. If p2 is maybe-own then sometimes it may not be + * subtracted and so is optional. If props2 is not exact then we may + * optionally have some undocumented prop. *) + | ((Sound | IgnoreExactAndOwn), Some (t1, _), Some ((OptionalT _ as t2), _), _) + | (Sound, Some (t1, _), Some (t2, false), _) + | (Sound, Some (t1, _), Some (t2, _), false) -> + rec_flow cx trace (t1, UseT (use_op, optional t2)); + Some (Field (None, optional t1, Polarity.Neutral)) + (* Otherwise if the object we are using to subtract has a non-optional own + * property and the object is exact then we never add that property to our + * source object. *) + | ((Sound | IgnoreExactAndOwn), None, Some (t2, _), _) -> + let reason = replace_desc_reason (RUndefinedProperty k) r1 in + rec_flow + cx + trace + (VoidT.make reason |> with_trust bogus_trust, UseT (use_op, t2)); + None + | ((Sound | IgnoreExactAndOwn), Some (t1, _), Some (t2, _), _) -> + rec_flow cx trace (t1, UseT (use_op, t2)); + None + (* If we have some property in our first object and none in our second + * object, but our second object is inexact then we want to make our + * property optional and flow that type to mixed. *) + | (Sound, Some (t1, _), None, false) -> + rec_flow cx trace (t1, UseT (use_op, MixedT.make r2 |> with_trust bogus_trust)); + Some (Field (None, optional t1, Polarity.Neutral)) + (* If neither object has the prop then we don't add a prop to our + * result here. *) + | ((Sound | IgnoreExactAndOwn | ReactConfigMerge _), None, None, _) -> None + (* If our first object has a prop and our second object does not have that + * prop then we will copy over that prop. If the first object's prop is + * non-own then sometimes we may not copy it over so we mark it + * as optional. *) + | (IgnoreExactAndOwn, Some (t, _), None, _) -> + Some (Field (None, t, Polarity.Neutral)) + | (ReactConfigMerge _, Some (t, _), None, _) -> + Some (Field (None, t, Polarity.Positive)) + | (Sound, Some (t, true), None, _) -> Some (Field (None, t, Polarity.Neutral)) + | (Sound, Some (t, false), None, _) -> + Some (Field (None, optional t, Polarity.Neutral)) + (* React config merging is special. We are trying to solve for C + * in the equation (where ... represents spread instead of rest): + * + * {...DP, ...C} = P + * + * Where DP and P are known. Consider this case: + * + * {...{p?}, ...C} = {p} + * + * The solution for C here is {p} instead of {p?} since + * {...{p?}, ...{p?}} is {p?} instead of {p}. This is inconsistent with + * the behavior of other object rest merge modes implemented in this + * pattern match. *) + | (ReactConfigMerge _, Some (t1, _), Some (OptionalT (_, t2), _), _) -> + (* We only test the subtyping relation of t1 and t2 if both t1 and t2 + * are optional types. If t1 is required then t2 will always + * be overwritten. *) + (match t1 with + | OptionalT (_, t1) -> rec_flow_t cx trace (t2, t1) + | _ -> ()); + Some (Field (None, t1, Polarity.Positive)) + (* Using our same equation. Consider this case: + * + * {...{p}, ...C} = {p} + * + * The solution for C here is {p?}. An empty object, {}, is not a valid + * solution unless that empty object is exact. Even for exact objects, + * {|p?|} is the best solution since it accepts more valid + * programs then {||}. *) + | (ReactConfigMerge _, Some (t1, _), Some (t2, _), _) -> + (* The DP type for p must be a subtype of the P type for p. *) + rec_flow_t cx trace (t2, t1); + Some (Field (None, optional t1, Polarity.Positive)) + (* Consider this case: + * + * {...{p}, ...C} = {} + * + * For C there will be no prop. However, if the props object is exact + * then we need to throw an error. *) + | (ReactConfigMerge _, None, Some (_, _), _) -> + ( if flags1.exact then + let use_op = + Frame + ( PropertyCompatibility { prop = Some k; lower = r2; upper = r1 }, + unknown_use ) + in + let r2 = replace_desc_reason (RProperty (Some k)) r2 in + let err = Error_message.EPropNotFound (Some k, (r2, r1), use_op) in + add_output cx ~trace err ); + None) + props1 + props2 + in + let dict = + match (dict1, dict2) with + | (None, None) -> None + | (Some dict, None) -> Some dict + | (None, Some _) -> None + (* If our first and second objects have a dictionary then we use our first + * dictionary, but we make the value optional since any set of keys may have + * been removed. *) + | (Some dict1, Some dict2) -> + rec_flow cx trace (dict1.value, UseT (use_op, dict2.value)); + Some + { + dict_name = None; + key = dict1.key; + value = optional dict1.value; + dict_polarity = Polarity.Neutral; + } + in + let flags = + { + frozen = false; + sealed = Sealed; + exact = flags1.exact && Obj_type.sealed_in_op reason flags1.sealed; + } + in + let id = Context.generate_property_map cx props in + let proto = ObjProtoT r1 in + let call = None in + let t = mk_object_def_type ~reason:r1 ~flags ~dict ~call id proto in + (* Wrap the final type in an `ExactT` if we have an exact flag *) + if flags.exact then + ExactT (r1, t) + else + t + in + fun options state cx trace use_op reason tout x -> + match state with + | One t -> + let tool = Resolve Next in + let state = Done x in + rec_flow cx trace (t, ObjKitT (use_op, reason, tool, Rest (options, state), tout)) + | Done base -> + let xs = + Nel.map_concat + (fun slice -> Nel.map (rest cx trace ~use_op reason options slice) x) + base + in + let t = + match xs with + | (x, []) -> x + | (x0, x1 :: xs) -> UnionT (reason, UnionRep.make x0 x1 xs) + in + let use_op p = Frame (ReactGetConfig { polarity = p }, use_op) in + (match options with + | ReactConfigMerge Polarity.Neutral -> + rec_unify cx trace ~use_op:(use_op Polarity.Neutral) t tout + | ReactConfigMerge Polarity.Negative -> + rec_flow_t cx trace ~use_op:(use_op Polarity.Negative) (tout, t) + | ReactConfigMerge Polarity.Positive -> + rec_flow_t cx trace ~use_op:(use_op Polarity.Positive) (t, tout) + | _ -> + (* Intentional UnknownUse here. *) + rec_flow_t cx trace (t, tout))) + in + (********************) + (* Object Read Only *) + (********************) + let object_read_only = + let polarity = Polarity.Positive in + let mk_read_only_object cx reason slice = + let { Object.reason = r; props; dict; flags } = slice in + let props = SMap.map (fun (t, _) -> Field (None, t, polarity)) props in + let dict = Option.map dict (fun dict -> { dict with dict_polarity = polarity }) in + let call = None in + let id = Context.generate_property_map cx props in + let proto = ObjProtoT reason in + let t = mk_object_def_type ~reason:r ~flags ~dict ~call id proto in + if flags.exact then + ExactT (reason, t) + else + t + in + fun cx trace _ reason tout x -> + let t = + match Nel.map (mk_read_only_object cx reason) x with + | (t, []) -> t + | (t0, t1 :: ts) -> UnionT (reason, UnionRep.make t0 t1 ts) + in + (* Intentional UnknownUse here. *) + rec_flow_t cx trace (t, tout) + in + (**************) + (* Object Rep *) + (**************) + let object_rep = + let mk_object cx reason { Object.reason = r; props; dict; flags } = + (* TODO(jmbrown): Add polarity information to props *) + let polarity = Polarity.Neutral in + let props = SMap.map (fun (t, _) -> Field (None, t, polarity)) props in + let dict = Option.map dict (fun dict -> { dict with dict_polarity = polarity }) in + let call = None in + let id = Context.generate_property_map cx props in + let proto = ObjProtoT reason in + let t = mk_object_def_type ~reason:r ~flags ~dict ~call id proto in + if flags.exact then + ExactT (reason, t) + else + t + in + fun cx trace use_op reason tout x -> + let t = + match Nel.map (mk_object cx reason) x with + | (t, []) -> t + | (t0, t1 :: ts) -> UnionT (reason, UnionRep.make t0 t1 ts) + in + rec_flow_t cx trace ~use_op (t, tout) + in + (****************) + (* React Config *) + (****************) + let react_config = + Object.ReactConfig.( + (* All props currently have a neutral polarity. However, they should have a + * positive polarity (or even better, constant) since React.createElement() + * freezes the type of props. We use a neutral polarity today because the + * props type we flow the config into is written by users who very rarely + * add a positive variance annotation. We may consider marking that type as + * constant in the future as well. *) + let prop_polarity = Polarity.Neutral in + let finish cx trace reason config defaults children = + let { + Object.reason = config_reason; + props = config_props; + dict = config_dict; + flags = config_flags; + } = + config + in + (* If we have some type for children then we want to add a children prop + * to our config props. *) + let config_props = + Option.value_map children ~default:config_props ~f:(fun children -> + SMap.add "children" (children, true) config_props) + in + (* Remove the key and ref props from our config. We check key and ref + * independently of our config. So we must remove them so the user can't + * see them. *) + let config_props = SMap.remove "key" config_props in + let config_props = SMap.remove "ref" config_props in + (* Create the final props map and dict. + * + * NOTE: React will copy any enumerable prop whether or not it + * is own to the config. *) + let (props, dict, flags) = + match defaults with + (* If we have some default props then we want to add the types for those + * default props to our final props object. *) + | Some + { + Object.reason = defaults_reason; + props = defaults_props; + dict = defaults_dict; + flags = defaults_flags; + } -> + (* Merge our props and default props. *) + let props = + SMap.merge + (fun _ p1 p2 -> + let p1 = get_prop config_reason p1 config_dict in + let p2 = get_prop defaults_reason p2 defaults_dict in + match (p1, p2) with + | (None, None) -> None + | (Some (t, _), None) -> Some (Field (None, t, prop_polarity)) + | (None, Some (t, _)) -> Some (Field (None, t, prop_polarity)) + (* If a property is defined in both objects, and the first property's + * type includes void then we want to replace every occurrence of void + * with the second property's type. This is consistent with the behavior + * of function default arguments. If you call a function, `f`, like: + * `f(undefined)` and there is a default value for the first argument, + * then we will ignore the void type and use the type for the default + * parameter instead. *) + | (Some (t1, _), Some (t2, _)) -> + (* Use CondT to replace void with t1. *) + let t = + Tvar.mk_where cx reason (fun tvar -> + rec_flow + cx + trace + ( filter_optional cx ~trace reason t1, + CondT (reason, None, t2, tvar) )) + in + Some (Field (None, t, prop_polarity))) + config_props + defaults_props + in + (* Merge the dictionary from our config with the defaults dictionary. *) + let dict = + Option.merge config_dict defaults_dict (fun d1 d2 -> + { + dict_name = None; + key = UnionT (reason, UnionRep.make d1.key d2.key []); + value = + UnionT + ( reason, + UnionRep.make + (read_dict config_reason d1) + (read_dict defaults_reason d2) + [] ); + dict_polarity = prop_polarity; + }) + in + (* React freezes the config so we set the frozen flag to true. The + * final object is only exact if both the config and defaults objects + * are exact. *) + let flags = + { + frozen = true; + sealed = Sealed; + exact = + config_flags.exact + && defaults_flags.exact + && Obj_type.sealed_in_op reason config_flags.sealed + && Obj_type.sealed_in_op reason defaults_flags.sealed; + } + in + (props, dict, flags) + (* Otherwise turn our slice props map into an object props. *) + | None -> + (* All of the fields are read-only so we create positive fields. *) + let props = SMap.map (fun (t, _) -> Field (None, t, prop_polarity)) config_props in + (* Create a new dictionary from our config's dictionary with a + * positive polarity. *) + let dict = + Option.map config_dict (fun d -> + { + dict_name = None; + key = d.key; + value = d.value; + dict_polarity = prop_polarity; + }) + in + (* React freezes the config so we set the frozen flag to true. The + * final object is only exact if the config object is exact. *) + let flags = + { + frozen = true; + sealed = Sealed; + exact = config_flags.exact && Obj_type.sealed_in_op reason config_flags.sealed; + } + in + (props, dict, flags) + in + let call = None in + (* Finish creating our props object. *) + let id = Context.generate_property_map cx props in + let proto = ObjProtoT reason in + let t = + DefT (reason, bogus_trust (), ObjT (mk_objecttype ~flags ~dict ~call id proto)) + in + if flags.exact then + ExactT (reason, t) + else + t + in + fun state cx trace use_op reason tout x -> + match state with + (* If we have some type for default props then we need to wait for that + * type to resolve before finishing our props type. *) + | Config { defaults = Some t; children } -> + let tool = Resolve Next in + let state = Defaults { config = x; children } in + rec_flow cx trace (t, ObjKitT (use_op, reason, tool, ReactConfig state, tout)) + (* If we have no default props then finish our object and flow it to our + * tout type. *) + | Config { defaults = None; children } -> + let ts = Nel.map (fun x -> finish cx trace reason x None children) x in + let t = + match ts with + | (t, []) -> t + | (t0, t1 :: ts) -> UnionT (reason, UnionRep.make t0 t1 ts) + in + rec_flow cx trace (t, UseT (use_op, tout)) + (* If we had default props and those defaults resolved then finish our + * props object with those default props. *) + | Defaults { config; children } -> + let ts = + Nel.map_concat + (fun c -> Nel.map (fun d -> finish cx trace reason c (Some d) children) x) + config + in + let t = + match ts with + | (t, []) -> t + | (t0, t1 :: ts) -> UnionT (reason, UnionRep.make t0 t1 ts) + in + rec_flow cx trace (t, UseT (use_op, tout))) + in + (*********************) + (* Object Resolution *) + (*********************) + let next = function + | Spread (options, state) -> object_spread options state + | Rest (options, state) -> object_rest options state + | ReactConfig state -> react_config state + | ReadOnly -> object_read_only + | ObjectRep -> object_rep + in + (* Intersect two object slices: slice * slice -> slice + * + * In general it is unsound to combine intersection types, but since object + * kit utilities never write to their arguments, it is safe in this specific + * case. + * + * {...{p:T}&{q:U}} = {...{p:T,q:U}} + * {...{p:T}&{p:U}} = {...{p:T&U}} + * {...A&(B|C)} = {...(A&B)|(A&C)} + * {...(A|B)&C} = {...(A&C)|(B&C)} + *) + let intersect2 + reason + { Object.reason = r1; props = props1; dict = dict1; flags = flags1 } + { Object.reason = r2; props = props2; dict = dict2; flags = flags2 } = + let intersection t1 t2 = IntersectionT (reason, InterRep.make t1 t2 []) in + let merge_props (t1, own1) (t2, own2) = + let (t1, t2, opt) = + match (t1, t2) with + | (OptionalT (_, t1), OptionalT (_, t2)) -> (t1, t2, true) + | (OptionalT (_, t1), t2) + | (t1, OptionalT (_, t2)) + | (t1, t2) -> + (t1, t2, false) + in + let t = intersection t1 t2 in + let t = + if opt then + optional t + else + t + in + (t, own1 || own2) + in + let props = + SMap.merge + (fun _ p1 p2 -> + let read_dict r d = (optional (read_dict r d), true) in + match (p1, p2) with + | (None, None) -> None + | (Some p1, Some p2) -> Some (merge_props p1 p2) + | (Some p1, None) -> + (match dict2 with + | Some d2 -> Some (merge_props p1 (read_dict r2 d2)) + | None -> Some p1) + | (None, Some p2) -> + (match dict1 with + | Some d1 -> Some (merge_props (read_dict r1 d1) p2) + | None -> Some p2)) + props1 + props2 + in + let dict = + Option.merge dict1 dict2 (fun d1 d2 -> + { + dict_name = None; + key = intersection d1.key d2.key; + value = intersection (read_dict r1 d1) (read_dict r2 d2); + dict_polarity = Polarity.Neutral; + }) + in + let flags = + { + frozen = flags1.frozen || flags2.frozen; + sealed = Sealed; + exact = flags1.exact || flags2.exact; + } + in + (props, dict, flags) + in + let intersect2_with_reason reason intersection_loc x1 x2 = + let (props, dict, flags) = intersect2 reason x1 x2 in + let reason = mk_reason RObjectType intersection_loc in + Object.{ reason; props; dict; flags } + in + let resolved cx trace use_op reason resolve_tool tool tout x = + match resolve_tool with + | Next -> next tool cx trace use_op reason tout x + | List0 ((t, todo), join) -> + let resolve_tool = Resolve (List (todo, Nel.one x, join)) in + rec_flow cx trace (t, ObjKitT (use_op, reason, resolve_tool, tool, tout)) + | List (todo, done_rev, join) -> + (match todo with + | [] -> + let x = + match join with + | (_, Or) -> Nel.cons x done_rev |> Nel.concat + | (loc, And) -> merge (intersect2_with_reason reason loc) x done_rev + in + next tool cx trace use_op reason tout x + | t :: todo -> + let done_rev = Nel.cons x done_rev in + let resolve_tool = Resolve (List (todo, done_rev, join)) in + rec_flow cx trace (t, ObjKitT (use_op, reason, resolve_tool, tool, tout))) + in + let object_slice cx r id dict flags = + let props = Context.find_props cx id in + let props = SMap.mapi (read_prop r flags) props in + let dict = + Option.map dict (fun d -> + { + dict_name = None; + key = d.key; + value = read_dict r d; + dict_polarity = Polarity.Neutral; + }) + in + { Object.reason = r; props; dict; flags } + in + let interface_slice cx r id = + let flags = { frozen = false; exact = false; sealed = Sealed } in + let (id, dict) = + let props = Context.find_props cx id in + match (SMap.get "$key" props, SMap.get "$value" props) with + | (Some (Field (_, key, polarity)), Some (Field (_, value, polarity'))) + when polarity = polarity' -> + let props = props |> SMap.remove "$key" |> SMap.remove "$value" in + let id = Context.generate_property_map cx props in + let dict = { dict_name = None; key; value; dict_polarity = polarity } in + (id, Some dict) + | _ -> (id, None) + in + object_slice cx r id dict flags + in + let resolve cx trace use_op reason resolve_tool tool tout = function + (* We extract the props from an ObjT. *) + | DefT (r, _, ObjT { props_tmap; dict_t; Type.flags; _ }) -> + let x = Nel.one (object_slice cx r props_tmap dict_t flags) in + resolved cx trace use_op reason resolve_tool tool tout x + (* We take the fields from an InstanceT excluding methods (because methods + * are always on the prototype). We also want to resolve fields from the + * InstanceT's super class so we recurse. *) + | DefT (r, _, InstanceT (_, super, _, { own_props; inst_kind; _ })) -> + let resolve_tool = Super (interface_slice cx r own_props, resolve_tool) in + begin + match (tool, inst_kind) with + | (Spread _, InterfaceKind _) -> + add_output + cx + ~trace + (Error_message.ECannotSpreadInterface + { spread_reason = reason; interface_reason = r }); + rec_flow cx trace (AnyT.why AnyError reason, UseT (use_op, tout)) + | _ -> rec_flow cx trace (super, ObjKitT (use_op, reason, resolve_tool, tool, tout)) + end + (* Statics of a class. TODO: This logic is unfortunately duplicated from the + * top-level pattern matching against class lower bounds to object-like + * uses. This duplication should be removed. *) + | DefT (r, _, ClassT i) -> + let t = Tvar.mk cx r in + rec_flow cx trace (i, GetStaticsT (r, t)); + rec_flow cx trace (t, ObjKitT (use_op, reason, Resolve resolve_tool, tool, tout)) + (* Resolve each member of a union. *) + | UnionT (union_reason, rep) -> + let union_loc = aloc_of_reason union_reason in + let (t, todo) = UnionRep.members_nel rep in + let resolve_tool = Resolve (List0 (todo, (union_loc, Or))) in + rec_flow cx trace (t, ObjKitT (use_op, reason, resolve_tool, tool, tout)) + (* Resolve each member of an intersection. *) + | IntersectionT (intersection_reason, rep) -> + let intersection_loc = aloc_of_reason intersection_reason in + let (t, todo) = InterRep.members_nel rep in + let resolve_tool = Resolve (List0 (todo, (intersection_loc, And))) in + rec_flow cx trace (t, ObjKitT (use_op, reason, resolve_tool, tool, tout)) + (* Mirroring Object.assign() and {...null} semantics, treat null/void as + * empty objects. *) + | DefT (_, _, (NullT | VoidT)) -> + let flags = { frozen = true; sealed = Sealed; exact = true } in + let x = Nel.one { Object.reason; props = SMap.empty; dict = None; flags } in + resolved cx trace use_op reason resolve_tool tool tout x + (* mixed is treated as {[string]: mixed} except in type spread, where it's treated as + * {}. Any JavaScript value may be treated as an object and so this is safe. + * + * We ought to use {} for everything since it is a more sound representation + * of `mixed` as an object. + *) + | DefT (r, _, MixedT _) as t -> + let flags = { frozen = true; sealed = Sealed; exact = true } in + let x = + match tool with + | Spread _ -> Nel.one { Object.reason; props = SMap.empty; dict = None; flags } + | _ -> + Nel.one + { + Object.reason; + props = SMap.empty; + dict = + Some + { + dict_name = None; + key = StrT.make r |> with_trust bogus_trust; + value = t; + dict_polarity = Polarity.Neutral; + }; + flags; + } + in + resolved cx trace use_op reason resolve_tool tool tout x + (* If we see an empty then propagate empty to tout. *) + | DefT (r, trust, EmptyT _) -> rec_flow cx trace (EmptyT.make r trust, UseT (use_op, tout)) + (* Propagate any. *) + | AnyT (_, src) -> rec_flow cx trace (AnyT.why src reason, UseT (use_op, tout)) + (* Other types have reasonable object representations that may be added as + * new uses of the object kit resolution code is found. *) + | t -> + add_output + cx + ~trace + (Error_message.EInvalidObjectKit { reason = reason_of_t t; reason_op = reason; use_op }) + in + let super cx trace use_op reason resolve_tool tool tout acc = function + | DefT (r, _, InstanceT (_, super, _, { own_props; _ })) -> + let slice = interface_slice cx r own_props in + let acc = intersect2 reason acc slice in + let acc = + let (props, dict, flags) = acc in + { Object.reason; props; dict; flags } + in + let resolve_tool = Super (acc, resolve_tool) in + rec_flow cx trace (super, ObjKitT (use_op, reason, resolve_tool, tool, tout)) + | AnyT _ -> rec_flow cx trace (AnyT.untyped reason, UseT (use_op, tout)) + | _ -> next tool cx trace use_op reason tout (Nel.one acc) + in + fun cx trace ~use_op reason resolve_tool tool tout l -> + match resolve_tool with + | Resolve resolve_tool -> resolve cx trace use_op reason resolve_tool tool tout l + | Super (acc, resolve_tool) -> super cx trace use_op reason resolve_tool tool tout acc l) +end diff --git a/src/typing/partition.mli b/src/typing/partition.mli index ca30896a3f4..b735f48ef03 100644 --- a/src/typing/partition.mli +++ b/src/typing/partition.mli @@ -1,14 +1,20 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) type 'a t + val empty : ('a -> Type.t) -> 'a t + val cell : 'a -> 'a t -> 'a list + val mem : 'a -> 'a t -> 'a + val add : 'a -> 'a t -> 'a t + val is_discrete : 'a t -> bool + val from : ('a -> Type.t) -> 'a list -> 'a t diff --git a/src/typing/polarity/dune b/src/typing/polarity/dune new file mode 100644 index 00000000000..a5b5e52f768 --- /dev/null +++ b/src/typing/polarity/dune @@ -0,0 +1,4 @@ +(library + (name flow_typing_polarity) + (wrapped false) +) diff --git a/src/typing/polarity/polarity.ml b/src/typing/polarity/polarity.ml new file mode 100644 index 00000000000..fc0a4b931c2 --- /dev/null +++ b/src/typing/polarity/polarity.ml @@ -0,0 +1,45 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type t = + | Negative + | Neutral + | Positive + +(* Subtype relation for polarities, interpreting neutral as positive & + negative: whenever compat(p1,p2) holds, things that have polarity p1 can + appear in positions that have polarity p2. *) +let compat = function + | (Positive, Positive) + | (Negative, Negative) + | (Neutral, _) -> + true + | _ -> false + +let inv = function + | Positive -> Negative + | Negative -> Positive + | Neutral -> Neutral + +let mult = function + | (Positive, Positive) -> Positive + | (Negative, Negative) -> Positive + | (Neutral, _) + | (_, Neutral) -> + Neutral + | _ -> Negative + +(* printer *) +let string = function + | Positive -> "covariant" + | Negative -> "contravariant" + | Neutral -> "invariant" + +let sigil = function + | Positive -> "+" + | Negative -> "-" + | Neutral -> "" diff --git a/src/typing/polarity/polarity.mli b/src/typing/polarity/polarity.mli new file mode 100644 index 00000000000..6cd05a5c156 --- /dev/null +++ b/src/typing/polarity/polarity.mli @@ -0,0 +1,21 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type t = + | Negative + | Neutral + | Positive + +val compat : t * t -> bool + +val inv : t -> t + +val mult : t * t -> t + +val string : t -> string + +val sigil : t -> string diff --git a/src/typing/query_types.ml b/src/typing/query_types.ml index d158bb6e789..64ef3352db5 100644 --- a/src/typing/query_types.ml +++ b/src/typing/query_types.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,6 +7,7 @@ module Ast = Flow_ast open Typed_ast_utils +open Utils_js (*****************) (* Query/Suggest *) @@ -35,121 +36,154 @@ open Typed_ast_utils *) type result = -| FailureNoMatch -| FailureUnparseable of Loc.t * Type.t * string -| Success of Loc.t * Ty.t - -let sort_loc_pairs pair_list = - List.sort (fun (a, _) (b, _) -> Loc.compare a b) pair_list - -let types_in_file ~full_cx ~file ~file_sig ~expand_aliases ~type_table typed_ast = - let options = { - Ty_normalizer_env. - fall_through_merged = false; - expand_internal_types = false; - expand_type_aliases = expand_aliases; - flag_shadowed_type_params = false; - } in - let type_scheme_list = Typed_ast_utils.typed_ast_to_list typed_ast in - let genv = Ty_normalizer_env.mk_genv ~full_cx ~file ~file_sig ~type_table in - let ty_list = Ty_normalizer.from_schemes - ~options - ~genv - type_scheme_list + | FailureNoMatch + | FailureUnparseable of Loc.t * Type.t * string + | Success of Loc.t * Ty.t + +let concretize_loc_pairs pair_list = + Core_list.map ~f:(fun (loc, x) -> (ALoc.to_loc_exn loc, x)) pair_list + +let sort_loc_pairs pair_list = List.sort (fun (a, _) (b, _) -> Loc.compare a b) pair_list + +let type_of_scheme ~options ~full_cx ~file ~file_sig typed_ast loc scheme = + let genv = Ty_normalizer_env.mk_genv ~full_cx ~file ~file_sig ~typed_ast in + match Ty_normalizer.from_scheme ~options ~genv scheme with + | Ok ty -> Success (loc, ty) + | Error err -> + let msg = Ty_normalizer.error_to_string err in + FailureUnparseable (loc, scheme.Type.TypeScheme.type_, msg) + +let type_at_pos_type ~full_cx ~file ~file_sig ~expand_aliases ~omit_targ_defaults ~typed_ast loc = + let options = + { + Ty_normalizer_env.fall_through_merged = false; + expand_internal_types = false; + expand_type_aliases = expand_aliases; + flag_shadowed_type_params = false; + preserve_inferred_literal_types = false; + evaluate_type_destructors = false; + optimize_types = true; + omit_targ_defaults; + merge_bot_and_any_kinds = true; + } in - List.fold_left (fun map (loc, result) -> - match result with - | Ok ty -> LocMap.add loc ty map - | Error _ -> map - ) LocMap.empty ty_list - -let type_at_pos_type ~full_cx ~file ~file_sig ~expand_aliases ~type_table ~typed_ast loc = - let options = { - Ty_normalizer_env. - fall_through_merged = false; - expand_internal_types = false; - expand_type_aliases = expand_aliases; - flag_shadowed_type_params = false; - } in match find_type_at_pos_annotation typed_ast loc with | None -> FailureNoMatch - | Some (loc, scheme) -> - let genv = Ty_normalizer_env.mk_genv ~full_cx ~file ~file_sig ~type_table in - (match Ty_normalizer.from_scheme ~options ~genv scheme with - | Ok ty -> Success (loc, ty) - | Error err -> - let msg = Ty_normalizer.error_to_string err in - FailureUnparseable (loc, scheme.Type.TypeScheme.type_, msg)) - -let dump_types cx file_sig ~printer = - let options = Ty_normalizer_env.default_opts in + | Some (loc, scheme) -> type_of_scheme ~options ~full_cx ~file ~file_sig typed_ast loc scheme + +let dump_types ~printer cx file_sig typed_ast = + let options = + { + Ty_normalizer_env.fall_through_merged = false; + expand_internal_types = false; + expand_type_aliases = false; + flag_shadowed_type_params = false; + preserve_inferred_literal_types = false; + evaluate_type_destructors = false; + optimize_types = true; + omit_targ_defaults = false; + merge_bot_and_any_kinds = true; + } + in let file = Context.file cx in - let type_table = Context.type_table cx in - let genv = Ty_normalizer_env.mk_genv ~full_cx:cx ~file ~type_table ~file_sig in - let result = Ty_normalizer.from_schemes ~options ~genv - (Type_table.coverage_to_list type_table) in + let genv = Ty_normalizer_env.mk_genv ~full_cx:cx ~file ~typed_ast ~file_sig in + let result = + Ty_normalizer.from_schemes ~options ~genv (Typed_ast_utils.typed_ast_to_list typed_ast) + in let print_ok = function - | l, Ok t -> Some (l, printer t) + | (l, Ok t) -> Some (l, printer t) | _ -> None in - sort_loc_pairs (Core_list.filter_map result ~f:print_ok) - -let is_covered = function - | Ty.Any - | Ty.Bot -> false - | _ -> true - -let covered_types cx file_sig ~should_check = - let options = { - Ty_normalizer_env. - fall_through_merged = true; - expand_internal_types = false; - expand_type_aliases = false; - flag_shadowed_type_params = false; - } in - let file = Context.file cx in - let type_table = Context.type_table cx in - let genv = Ty_normalizer_env.mk_genv ~full_cx:cx ~file ~type_table ~file_sig in - let f = + Core_list.filter_map result ~f:print_ok |> concretize_loc_pairs |> sort_loc_pairs + +let covered_types ~should_check ~check_trust cx tast = + let check_trust = + if check_trust then + fun x -> + x + else + function + | Coverage_response.Tainted -> Coverage_response.Untainted + | x -> x + in + let compute_cov = if should_check then - fun acc (loc, result) -> - match result with - | Ok t -> (loc, is_covered t)::acc - | _ -> (loc, false)::acc + (new Coverage.visitor)#type_ cx %> Coverage.result_of_coverage %> check_trust else - fun acc (loc, _) -> (loc, false)::acc + fun _ -> + Coverage_response.Empty + in + let step loc t acc = (ALoc.to_loc_exn loc, compute_cov t) :: acc in + coverage_fold_tast ~f:step ~init:[] tast |> sort_loc_pairs + +let component_coverage : + full_cx:Context.t -> + (ALoc.t, ALoc.t * Type.t) Flow_polymorphic_ast_mapper.Ast.program list -> + Coverage_response.file_coverage list = + Coverage_response.( + Coverage.( + let coverage_computer = new visitor in + let step cx _ t acc = + let coverage = coverage_computer#type_ cx t in + match result_of_coverage coverage with + | Uncovered -> { acc with uncovered = acc.uncovered + 1 } + | Untainted -> { acc with untainted = acc.untainted + 1 } + | Tainted -> { acc with tainted = acc.tainted + 1 } + | Empty -> { acc with empty = acc.empty + 1 } + in + fun ~full_cx tasts -> + let step = step full_cx in + Core_list.map ~f:(Typed_ast_utils.coverage_fold_tast ~f:step ~init:initial_coverage) tasts)) + +let suggest_types cx file_sig typed_ast loc = + let options = + { + Ty_normalizer_env.fall_through_merged = false; + expand_internal_types = false; + expand_type_aliases = false; + flag_shadowed_type_params = true; + preserve_inferred_literal_types = false; + evaluate_type_destructors = false; + optimize_types = true; + omit_targ_defaults = false; + merge_bot_and_any_kinds = true; + } in - let g x = x in - let htbl = Type_table.coverage_hashtbl (Context.type_table cx) in - let coverage = Ty_normalizer.fold_hashtbl ~options ~genv ~f ~g ~htbl [] in - sort_loc_pairs coverage - -(* 'suggest' can use as many types in the type tables as possible, which is why - we are querying the tables from both "coverage" and "type_info". Coverage - should be enough on its own, but "type_info" stores method types more - reliably. On the other hand "type_info" only stores information about - identifiers, so anonymous functions and arrows are not captured. -*) -let suggest_types cx file_sig = - let options = { - Ty_normalizer_env. - fall_through_merged = false; - expand_internal_types = false; - expand_type_aliases = false; - flag_shadowed_type_params = true; - } in - let type_table = Context.type_table cx in let file = Context.file cx in - let genv = Ty_normalizer_env.mk_genv ~full_cx:cx ~file ~type_table ~file_sig in - let result = Utils_js.LocMap.empty in - let result = Ty_normalizer.fold_hashtbl - ~options ~genv - ~f:(fun acc (loc, t) -> Utils_js.LocMap.add loc t acc) - ~g:(fun t -> t) - ~htbl:(Type_table.coverage_hashtbl type_table) result in - let result = Ty_normalizer.fold_hashtbl - ~options ~genv - ~f:(fun acc (loc, t) -> Utils_js.LocMap.add loc t acc) - ~g:(fun (_, t, _) -> t) - ~htbl:(Type_table.type_info_hashtbl type_table) result in - result + let aLoc = ALoc.of_loc loc in + match Typed_ast_utils.find_exact_match_annotation typed_ast aLoc with + | None -> FailureNoMatch + | Some scheme -> type_of_scheme ~options ~full_cx:cx ~file ~file_sig typed_ast loc scheme + +let insert_type_normalize + ~full_cx + ?(file = Context.file full_cx) + ~file_sig + ~expand_aliases + ~omit_targ_defaults + ~typed_ast + loc + scheme = + let options = + { + Ty_normalizer_env.fall_through_merged = false; + expand_internal_types = false; + expand_type_aliases = expand_aliases; + (* Shadowed type parameters won't be valid for type insertion *) + flag_shadowed_type_params = true; + (* Insert-Types filters out literals at the users request. + * Setting this flag preserves literal information so the we later + * have the option of presenting it to the user in specialized types. *) + preserve_inferred_literal_types = true; + (* Utility types won't are not serialized so it may be worth evaluating them away + * if we find them in the resulting Ty.t. The trade off is that types might get + * larger. *) + evaluate_type_destructors = false; + (* Optimize types is false because Insert_types manually calls the simplifier with + a custom comparison operation *) + optimize_types = false; + omit_targ_defaults; + merge_bot_and_any_kinds = true; + } + in + type_of_scheme ~options ~full_cx ~file ~file_sig typed_ast loc scheme diff --git a/src/typing/react_kit.ml b/src/typing/react_kit.ml index b41e2c1bd18..b93436f36da 100644 --- a/src/typing/react_kit.ml +++ b/src/typing/react_kit.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,1124 +9,1281 @@ open Reason open Type open React -let run cx trace ~use_op reason_op l u - ~(add_output: Context.t -> ?trace:Trace.t -> Flow_error.error_message -> unit) - ~(reposition: Context.t -> ?trace:Trace.t -> Loc.t -> ?desc:reason_desc -> ?annot_loc:Loc.t -> Type.t -> Type.t) - ~(rec_flow: Context.t -> Trace.t -> (Type.t * Type.use_t) -> unit) - ~(rec_flow_t: Context.t -> Trace.t -> ?use_op:Type.use_op -> (Type.t * Type.t) -> unit) - ~(get_builtin: Context.t -> ?trace:Trace.t -> string -> reason -> Type.t) - ~(get_builtin_type: Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> string -> Type.t) - ~(get_builtin_typeapp: Context.t -> ?trace:Trace.t -> reason -> string -> Type.t list -> Type.t) - ~(mk_instance: Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> Type.t -> Type.t) - ~(string_key: string -> reason -> Type.t) - ~(mk_type_destructor: Context.t -> trace:Trace.t -> use_op -> reason -> t -> Type.destructor -> int -> bool * Type.t) - ~(sealed_in_op: reason -> Type.sealtype -> bool) - ~(union_of_ts: reason -> Type.t list -> Type.t) - ~(filter_maybe: Context.t -> ?trace:Trace.t -> reason -> Type.t -> Type.t) - = - let err_incompatible reason = - add_output cx ~trace (Flow_error.EReactKit - ((reason_op, reason), u, use_op)) - in - - (* ReactKit can't stall, so even if `l` is an unexpected type, we must produce - some outflow, usually some flavor of `any`, along with an error. However, - not every unexpected inflow should cause an error. For example, `any` - inflows shouldn't cause additional errors. Also, if we expect an array, but - we get one without any static information, we should fall back without - erroring. This is best-effort, after all. *) - - let coerce_object = function - | DefT (reason, ObjT { props_tmap; dict_t; flags; _ }) -> - Ok (reason, Context.find_props cx props_tmap, dict_t, flags) - | DefT (reason, AnyT) | DefT (reason, AnyObjT) -> - Error reason - | _ -> - let reason = reason_of_t l in - err_incompatible reason; - Error reason - in - - let coerce_prop_type = function - | CustomFunT (reason, ReactPropType (PropType.Primitive (required, t))) -> - let loc = aloc_of_reason reason |> ALoc.to_loc in - Ok (required, reposition cx ~trace loc t) - | DefT (reason, FunT _) as t -> - rec_flow_t cx trace (t, - get_builtin_type cx reason_op "ReactPropsCheckType"); - Error reason - | DefT (reason, AnyT) | DefT (reason, AnyFunT) -> - Error reason - | t -> - let reason = reason_of_t t in - err_incompatible reason; - Error reason - in - - let coerce_array = function - | DefT (_, ArrT (ArrayAT (_, Some ts) | TupleAT (_, ts))) -> - Ok ts - | DefT (reason, ArrT _) | DefT (reason, AnyT) -> - Error reason - | t -> - let reason = reason_of_t t in - err_incompatible reason; - Error reason - in - - (* Unlike other coercions, don't add a Flow error if the incoming type doesn't - have a singleton type representation. *) - let coerce_singleton = function - | DefT (reason, StrT (Literal (_, x))) -> - let reason = replace_reason_const (RStringLit x) reason in - Ok (DefT (reason, SingletonStrT x)) - - | DefT (reason, NumT (Literal (_, x))) -> - let reason = replace_reason_const (RNumberLit (snd x)) reason in - Ok (DefT (reason, SingletonNumT x)) - - | DefT (reason, BoolT (Some x)) -> - let reason = replace_reason_const (RBooleanLit x) reason in - Ok (DefT (reason, SingletonBoolT x)) - | DefT (_, NullT) | DefT (_, VoidT) as t -> - Ok t - | t -> - Error (reason_of_t t) - in - - let component_class props = - let reason = reason_of_t l in - DefT (reason, ClassT (get_builtin_typeapp cx reason - "React$Component" [props; AnyT.why reason])) +let err_incompatible + cx + trace + ~use_op + ~(add_output : Context.t -> ?trace:Trace.t -> Error_message.t -> unit) + reason + tool = + React.( + let err = + match tool with + | GetProps _ + | GetConfig _ + | GetRef _ + | CreateElement0 _ + | CreateElement _ + | ConfigCheck _ -> + Error_message.ENotAReactComponent { reason; use_op } + | GetConfigType _ -> Error_message.EInvalidReactConfigType { reason; use_op } + | SimplifyPropType (tool, _) -> Error_message.EInvalidReactPropType { reason; use_op; tool } + | CreateClass (tool, _, _) -> Error_message.EInvalidReactCreateClass { reason; use_op; tool } + in + add_output cx ~trace err) + +let component_class + cx + reason + ~(get_builtin_typeapp : + Context.t -> ?trace:Trace.t -> reason -> string -> Type.t list -> Type.t) + props = + DefT + ( reason, + bogus_trust (), + ClassT (get_builtin_typeapp cx reason "React$Component" [props; Tvar.mk cx reason]) ) + +let get_intrinsic + cx + trace + component + ~reason_op + artifact + literal + prop + ~rec_flow + ~(get_builtin_type : + Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> string -> Type.t) = + let reason = reason_of_t component in + (* Get the internal $JSXIntrinsics map. *) + let intrinsics = + let reason = mk_reason (RType "$JSXIntrinsics") (loc_of_t component) in + get_builtin_type cx ~trace reason "$JSXIntrinsics" in - - (* We create our own FunT instead of using - * React$StatelessFunctionalComponent in the same way as for class components - * because there seems to be a bug where reasons get mixed up when this - * function is called multiple times *) - let component_function ?(with_return_t=true) props = - let reason = replace_reason_const RReactSFC reason_op in - let any = DefT (reason_op, AnyT) in - DefT (reason, FunT ( - any, - any, - { - this_t = any; - params = [(None, props)]; - rest_param = Some (None, aloc_of_reason reason_op |> ALoc.to_loc, any); - return_t = if with_return_t - then get_builtin_type cx reason_op "React$Node" - else any; - closure_t = 0; - is_predicate = false; - changeset = Changeset.empty; - def_reason = reason_op; - } - )) + (* Create a use_op for the upcoming operations. *) + let use_op = + Op + (ReactGetIntrinsic + { + literal = + (match literal with + | Literal (_, name) -> replace_desc_reason (RIdentifier name) reason + | _ -> reason); + }) in - - let get_intrinsic artifact literal prop = - let reason = reason_of_t l in - (* Get the internal $JSXIntrinsics map. *) - let intrinsics = - let reason = mk_reason (RType "$JSXIntrinsics") (loc_of_t l) in - get_builtin_type cx ~trace reason "$JSXIntrinsics" - in - (* Create a use_op for the upcoming operations. *) - let use_op = Op (ReactGetIntrinsic { - literal = (match literal with - | Literal (_, name) -> replace_reason_const (RIdentifier name) reason - | _ -> reason); - }) in - (* GetPropT with a non-literal when there is not a dictionary will propagate - * any. Run the HasOwnPropT check to give the user an error if they use a - * non-literal without a dictionary. *) - (match literal with - | Literal _ -> () - | _ -> rec_flow cx trace (intrinsics, HasOwnPropT (use_op, reason, literal))); - (* Create a type variable which will represent the specific intrinsic we - * find in the intrinsics map. *) - let intrinsic = Tvar.mk cx reason in - (* Get the intrinsic from the map. *) - rec_flow cx trace (intrinsics, GetPropT (use_op, reason, (match literal with - | Literal (_, name) -> - Named (replace_reason_const (RReactElement (Some name)) reason, name) - | _ -> Computed l - ), intrinsic)); - (* Get the artifact from the intrinsic. *) - let propref = - let name = match artifact with + (* GetPropT with a non-literal when there is not a dictionary will propagate + * any. Run the HasOwnPropT check to give the user an error if they use a + * non-literal without a dictionary. *) + (match literal with + | Literal _ -> () + | _ -> rec_flow cx trace (intrinsics, HasOwnPropT (use_op, reason, literal))); + + (* Create a type variable which will represent the specific intrinsic we + * find in the intrinsics map. *) + let intrinsic = Tvar.mk cx reason in + (* Get the intrinsic from the map. *) + rec_flow + cx + trace + ( intrinsics, + GetPropT + ( use_op, + reason, + (match literal with + | Literal (_, name) -> + Named (replace_desc_reason (RReactElement (Some name)) reason, name) + | _ -> Computed component), + intrinsic ) ); + + (* Get the artifact from the intrinsic. *) + let propref = + let name = + match artifact with | `Props -> "props" | `Instance -> "instance" - in - Named (replace_reason_const (RCustom name) reason_op, name) in - (* TODO: if intrinsic is null, we will treat it like prototype termination, - * but we should error like a GetPropT would instead. *) - rec_flow cx trace (intrinsic, LookupT ( - reason_op, - Strict reason_op, - [], - propref, - LookupProp (unknown_use, prop) - )) + Named (replace_desc_reason (RCustom name) reason_op, name) in - - (* This function creates a constraint *from* tin *to* props so that props is - * an upper bound on tin. This is important because when the type of a - * component's props is inferred (such as when a stateless functional - * component has an unannotated props argument) we want to create a constraint - * *from* the props input *to* tin which should then be propagated to the - * inferred props type. *) - let tin_to_props tin = - let component = l in - match component with - (* Class components or legacy components. *) - | DefT (_, ClassT _) -> - (* The Props type parameter is invariant, but we only want to create a - * constraint tin <: props. *) - let props = Tvar.mk cx reason_op in - rec_flow_t cx trace (tin, props); - rec_flow_t cx trace (component, component_class props) - - (* Stateless functional components. *) - | DefT (_, FunT _) -> - (* This direction works because function arguments are flowed in the - * opposite direction. *) - rec_flow_t cx trace (component, component_function tin) - - (* Stateless functional components, again. This time for callable `ObjT`s. *) - | DefT (_, ObjT { call_t = Some _; _ }) -> - (* This direction works because function arguments are flowed in the - * opposite direction. *) - rec_flow_t cx trace (component, component_function tin) - - (* Intrinsic components. *) - | DefT (_, StrT lit) -> get_intrinsic `Props lit (Field (None, tin, Negative)) - - (* any and any specializations *) - | DefT (reason, (AnyT | AnyObjT | AnyFunT)) -> - rec_flow_t cx trace (tin, AnyT.why reason) - - (* ...otherwise, error. *) - | _ -> err_incompatible (reason_of_t component) - in - - let props_to_tout tout = - let component = l in - match component with - (* Class components or legacy components. *) - | DefT (_, ClassT _) -> - let props = Tvar.mk cx reason_op in - rec_flow_t cx trace (props, tout); - rec_flow_t cx trace (component, component_class props) - - (* Stateless functional components. *) - | DefT (_, FunT _) -> - (* This direction works because function arguments are flowed in the - * opposite direction. *) - rec_flow_t cx trace (component_function ~with_return_t:false tout, component) - - (* Stateless functional components, again. This time for callable `ObjT`s. *) - | DefT (_, ObjT { call_t = Some _; _ }) -> - (* This direction works because function arguments are flowed in the - * opposite direction. *) - rec_flow_t cx trace (component_function ~with_return_t:false tout, component) - - (* Special case for intrinsic components. *) - | DefT (_, StrT lit) -> get_intrinsic `Props lit (Field (None, tout, Positive)) - - (* any and any specializations *) - | DefT (reason, (AnyT | AnyObjT | AnyFunT)) -> - rec_flow_t cx trace (AnyT.why reason, tout) - - (* ...otherwise, error. *) - | _ -> err_incompatible (reason_of_t component) - in - - (* Get a type for the default props of a component. If a component has no - * default props then either the type will be Some void or we will - * return None. *) - let get_defaults () = - let component = l in - match component with - | DefT (_, ClassT _) - | DefT (_, FunT _) -> - Some (Tvar.mk_where cx reason_op (fun tvar -> - let name = "defaultProps" in - let reason_missing = - replace_reason_const (RMissingProperty (Some name)) reason_op in - let reason_prop = - replace_reason_const (RProperty (Some name)) reason_op in - (* NOTE: This is intentionally unsound. Function statics are modeled - * as an unsealed object and so a `GetPropT` would perform a shadow - * lookup since a write to an unsealed property may happen at any - * time. If we were to perform a shadow lookup for `defaultProps` and - * `defaultProps` was never written then our lookup would stall and - * therefore so would our props analysis. So instead we make the - * stateful assumption that `defaultProps` was already written to - * the component statics which may not always be true. *) - let strict = NonstrictReturning (Some - (DefT (reason_missing, VoidT), tvar), None) in - let propref = Named (reason_prop, name) in - let action = LookupProp (unknown_use, Field (None, tvar, Positive)) in - (* Lookup the `defaultProps` property. *) - rec_flow cx trace (component, - LookupT (reason_op, strict, [], propref, action)) - )) - (* Everything else will not have default props we should diff out. *) - | _ -> None - in - - let coerce_children_args (children, children_spread) = - match children, children_spread with - (* If we have no children and no variable spread argument then React will - * not pass in any value for children. *) - | [], None -> None - (* If we know that we have exactly one argument and no variable spread - * argument then React will pass in that single value. Notable we do not - * wrap the type in an array as React returns the single value. *) - | t::[], None -> Some t - (* If we have two or more known arguments and no spread argument then we - * want to create a tuple array type for our children. *) - | t::ts, None -> - (* Create a reason where the location is between our first and last known - * argument. *) - let r = mk_reason RReactChildren (match use_op with - | Op (ReactCreateElementCall {children; _}) -> children - | _ -> aloc_of_reason reason_op |> ALoc.to_loc) - in - Some (DefT (r, ArrT (ArrayAT (union_of_ts r (t::ts), Some (t::ts))))) - (* If we only have a spread of unknown length then React may not pass in - * children, React may pass in a single child, or React may pass in an array - * of children. We need to model all of these possibilities. *) - | [], Some spread -> - let r = replace_reason - (fun desc -> RReactChildrenOrUndefinedOrType desc) - (reason_of_t spread) - in - Some (DefT (r, OptionalT ( - union_of_ts r [ - spread; - (DefT (r, ArrT (ArrayAT (spread, None)))); - ] - ))) - (* If we have one children argument and a spread of unknown length then - * React may either pass in the unwrapped argument, or an array where the - * element type is the union of the known argument and the spread type. *) - | t::[], Some spread -> - (* Create a reason between our known argument and the spread argument. *) - let r = mk_reason - (RReactChildrenOrType (t |> reason_of_t |> desc_of_reason)) - (match use_op with - | Op (ReactCreateElementCall {children; _}) -> children - | _ -> aloc_of_reason reason_op |> ALoc.to_loc) - in - Some (union_of_ts r [ - t; - (DefT (r, ArrT (ArrayAT (union_of_ts r [spread; t], Some [t])))) - ]) - (* If we have two or more arguments and a spread argument of unknown length - * then we want to return an array type where the element type is the union - * of all argument types and the spread argument type. *) - | t::ts, Some spread -> - (* Create a reason between our known argument and the spread argument. *) - let r = mk_reason RReactChildren (match use_op with - | Op (ReactCreateElementCall {children; _}) -> children - | _ -> aloc_of_reason reason_op |> ALoc.to_loc) - in - Some (DefT (r, ArrT (ArrayAT (union_of_ts r (spread::t::ts), Some (t::ts))))) + (* TODO: if intrinsic is null, we will treat it like prototype termination, + * but we should error like a GetPropT would instead. *) + rec_flow + cx + trace + (intrinsic, LookupT (reason_op, Strict reason_op, [], propref, LookupProp (unknown_use, prop))) + +(* Lookup the defaultProps of a component and flow with upper depending + * on the given polarity. + *) +let lookup_defaults cx trace component ~reason_op ~rec_flow upper pole = + let name = "defaultProps" in + let reason_missing = replace_desc_reason RReactDefaultProps (reason_of_t component) in + let reason_prop = replace_desc_reason (RProperty (Some name)) reason_op in + (* NOTE: This is intentionally unsound. Function statics are modeled + * as an unsealed object and so a `GetPropT` would perform a shadow + * lookup since a write to an unsealed property may happen at any + * time. If we were to perform a shadow lookup for `defaultProps` and + * `defaultProps` was never written then our lookup would stall and + * therefore so would our props analysis. So instead we make the + * stateful assumption that `defaultProps` was already written to + * the component statics which may not always be true. *) + let strict = + NonstrictReturning (Some (DefT (reason_missing, bogus_trust (), VoidT), upper), None) in - - let create_element clone component config children_args tout = - (* If our config is void or null then we want to replace it with an - * empty object. *) - let config = - let reason = reason_of_t config in - let empty_object = Obj_type.mk_with_proto - cx reason - ~sealed:true ~exact:true ~frozen:true - (ObjProtoT reason) - in - Tvar.mk_where cx reason (fun tout -> - rec_flow cx trace (filter_maybe cx ~trace reason config, - CondT (reason, None, empty_object, tout)) - ) + let propref = Named (reason_prop, name) in + let action = LookupProp (unknown_use, Field (None, upper, pole)) in + (* Lookup the `defaultProps` property. *) + rec_flow cx trace (component, LookupT (reason_op, strict, [], propref, action)) + +(* Get a type for the default props of a component. If a component has no + * default props then either the type will be Some {||} or we will + * return None. *) +let get_defaults cx trace component ~reason_op ~rec_flow = + match component with + | DefT (_, _, ClassT _) + | DefT (_, _, FunT _) + | DefT (_, _, ObjT _) -> + let tvar = Tvar.mk cx reason_op in + lookup_defaults cx trace component ~reason_op ~rec_flow tvar Polarity.Positive; + Some tvar + | DefT (_, _, ReactAbstractComponentT _) -> None + (* Everything else will not have default props we should diff out. *) + | _ -> None + +let props_to_tout + cx + trace + component + ~use_op + ~reason_op + ~(rec_flow_t : Context.t -> Trace.t -> ?use_op:Type.use_op -> Type.t * Type.t -> unit) + ~rec_flow + ~(get_builtin_type : + Context.t -> ?trace:Trace.t -> reason -> ?use_desc:bool -> string -> Type.t) + ~(add_output : Context.t -> ?trace:Trace.t -> Error_message.t -> unit) + u + tout = + match component with + (* Class components or legacy components. *) + | DefT (_, _, ClassT _) -> + let props = Tvar.mk cx reason_op in + rec_flow_t cx trace (props, tout); + rec_flow cx trace (component, ReactPropsToOut (reason_op, props)) + (* Stateless functional components. *) + | DefT (_, _, FunT _) + | DefT (_, _, ObjT { call_t = Some _; _ }) -> + rec_flow cx trace (component, ReactPropsToOut (reason_op, tout)) + (* Special case for intrinsic components. *) + | DefT (_, _, StrT lit) -> + get_intrinsic + cx + trace + component + ~reason_op + ~rec_flow + ~get_builtin_type + `Props + lit + (Field (None, tout, Polarity.Positive)) + (* any and any specializations *) + | AnyT (reason, src) -> rec_flow_t cx trace (AnyT.why src reason, tout) + | DefT (reason, trust, ReactAbstractComponentT _) -> + rec_flow_t cx trace (MixedT.why reason trust, tout) + (* ...otherwise, error. *) + | _ -> err_incompatible cx trace ~use_op ~add_output (reason_of_t component) u + +(* Creates the type that we expect for a React config by diffing out default + * props with ObjKitT(Rest). The config does not include types for `key` + * or `ref`. + * + * There is some duplication between the logic used here to get a config type + * and ObjKitT(ReactConfig). In create_element, we want to produce a props + * object from the config object and the defaultProps object. This way we can + * add a lower bound to components who have a type variable for props. e.g. + * + * const MyComponent = props => null; + * ; + * + * Here, MyComponent has no annotation for props so Flow must infer a type. + * However, get_config must produce a valid type from only the component type. + * + * This approach may stall if props never gets a lower bound. Using the result + * of get_config as an upper bound won't give props a lower bound. However, + * the places in which this approach stalls are the same places as other type + * destructor annotations. Like object spread, $Diff, and $Rest. *) +let get_config + cx + trace + component + ~use_op + ~reason_op + ~(rec_flow_t : Context.t -> Trace.t -> ?use_op:Type.use_op -> Type.t * Type.t -> unit) + ~rec_flow + ~(rec_unify : + Context.t -> Trace.t -> use_op:Type.use_op -> ?unify_any:bool -> Type.t -> Type.t -> unit) + ~get_builtin_type + ~(add_output : Context.t -> ?trace:Trace.t -> Error_message.t -> unit) + u + pole + tout = + match component with + | DefT (_, _, ReactAbstractComponentT { config; _ }) -> + let use_op = Frame (ReactGetConfig { polarity = pole }, use_op) in + begin + match pole with + | Polarity.Positive -> rec_flow_t ~use_op cx trace (config, tout) + | Polarity.Negative -> rec_flow_t ~use_op cx trace (tout, config) + | Polarity.Neutral -> rec_unify cx trace ~use_op tout config + end + | _ -> + let reason_component = reason_of_t component in + let props = + Tvar.mk_where + cx + (replace_desc_reason RReactProps reason_component) + (props_to_tout + cx + trace + component + ~use_op + ~reason_op:reason_component + ~rec_flow_t + ~rec_flow + ~get_builtin_type + ~add_output + u) + in + let defaults = get_defaults cx trace component ~reason_op ~rec_flow in + (match defaults with + | None -> rec_flow cx trace (props, UseT (use_op, tout)) + | Some defaults -> + Object.( + Object.Rest.( + let tool = Resolve Next in + let state = One defaults in + rec_flow + cx + trace + (props, ObjKitT (use_op, reason_op, tool, Rest (ReactConfigMerge pole, state), tout))))) + +module type REACT = sig + val run : Context.t -> Trace.t -> use_op:use_op -> reason -> Type.t -> Type.React.tool -> unit +end + +module Kit (Flow : Flow_common.S) : REACT = struct + include Flow + + let sealed_in_op = Obj_type.sealed_in_op + + let run cx trace ~use_op reason_op l u = + let err_incompatible reason = err_incompatible cx trace ~use_op ~add_output reason u in + (* ReactKit can't stall, so even if `l` is an unexpected type, we must produce + some outflow, usually some flavor of `any`, along with an error. However, + not every unexpected inflow should cause an error. For example, `any` + inflows shouldn't cause additional errors. Also, if we expect an array, but + we get one without any static information, we should fall back without + erroring. This is best-effort, after all. *) + let coerce_object = function + | DefT (reason, _, ObjT { props_tmap; dict_t; flags; _ }) -> + Ok (reason, Context.find_props cx props_tmap, dict_t, flags) + | AnyT (reason, _) -> Error reason + | _ -> + let reason = reason_of_t l in + err_incompatible reason; + Error reason in - (* Create the optional children input type from the children arguments. *) - let children = coerce_children_args children_args in - (* Create a type variable for our props. *) - (* If we are cloning an existing element, the config does not need to - * provide the entire props type. *) - let props = if clone - then ShapeT (Tvar.mk_where cx reason_op props_to_tout) - else Tvar.mk_where cx reason_op tin_to_props + let coerce_prop_type = function + | CustomFunT (reason, ReactPropType (PropType.Primitive (required, t))) -> + let loc = aloc_of_reason reason in + Ok (required, reposition cx ~trace loc t) + | DefT (reason, _, FunT _) as t -> + rec_flow_t cx trace (t, get_builtin_type cx reason_op "ReactPropsCheckType"); + Error reason + | AnyT (reason, _) -> Error reason + | t -> + let reason = reason_of_t t in + err_incompatible reason; + Error reason in - (* Check the type of React keys in the config input. - * - * NOTE: We are intentionally being unsound here. If config is inexact - * and we can't find a key prop in config then the sound thing to do - * would be to assume that the type of key is mixed. Instead we are unsound - * and don't check a type for key. Otherwise we would cause a lot of issues - * in existing React code. *) - let () = - let reason_key = - (replace_reason_const (RCustom "React key") (reason_of_t config)) in - (* Create the key type. *) - let key_t = optional (maybe (get_builtin_type cx reason_key "React$Key")) in - (* Flow the config input key type to the key type. *) - let kind = NonstrictReturning (None, None) in - let propref = Named (reason_key, "key") in - let use_op = Frame (PropertyCompatibility { - prop = Some "key"; - lower = reason_of_t config; - upper = reason_key; - is_sentinel = false; - }, use_op) in - let action = LookupProp (use_op, Field (None, key_t, Positive)) in - rec_flow cx trace (config, - LookupT (reason_key, kind, [], propref, action)) + let coerce_array = function + | DefT (_, _, ArrT (ArrayAT (_, Some ts) | TupleAT (_, ts))) -> Ok ts + | DefT (reason, _, ArrT _) + | AnyT (reason, _) -> + Error reason + | t -> + let reason = reason_of_t t in + err_incompatible reason; + Error reason in - (* Check the type of React refs in the config input. - * - * NOTE: We are intentionally being unsound here. If config is inexact - * and we can't find a ref prop in config then the sound thing to do - * would be to assume that the type of ref is mixed. Instead we are unsound - * and don't check a type for key. Otherwise we would cause a lot of issues - * in existing React code. *) - let () = - let reason_ref = - (replace_reason_const (RCustom "React ref") (reason_of_t config)) in - (* Create the ref type. *) - let ref_t = optional (maybe (get_builtin_typeapp cx reason_ref "React$Ref" [l])) in - (* Flow the config input ref type to the ref type. *) - let kind = NonstrictReturning (None, None) in - let propref = Named (reason_ref, "ref") in - let use_op = Frame (PropertyCompatibility { - prop = Some "ref"; - lower = reason_of_t config; - upper = reason_ref; - is_sentinel = false; - }, use_op) in - let action = LookupProp (use_op, Field (None, ref_t, Positive)) in - rec_flow cx trace (config, - LookupT (reason_ref, kind, [], propref, action)) + (* Unlike other coercions, don't add a Flow error if the incoming type doesn't + have a singleton type representation. *) + let coerce_singleton = function + | DefT (reason, trust, StrT (Literal (_, x))) -> + let reason = replace_desc_reason (RStringLit x) reason in + Ok (DefT (reason, trust, SingletonStrT x)) + | DefT (reason, trust, NumT (Literal (_, x))) -> + let reason = replace_desc_reason (RNumberLit (snd x)) reason in + Ok (DefT (reason, trust, SingletonNumT x)) + | DefT (reason, trust, BoolT (Some x)) -> + let reason = replace_desc_reason (RBooleanLit x) reason in + Ok (DefT (reason, trust, SingletonBoolT x)) + | (DefT (_, _, NullT) | DefT (_, _, VoidT)) as t -> Ok t + | t -> Error (reason_of_t t) in - (* For class components and function components we want to lookup the - * static default props property so that we may add it to our config input. *) - let defaults = get_defaults () in - (* Use object spread to add children to config (if we have children) - * and remove key and ref since we already checked key and ref. Finally in - * this block we will flow the final config to our props type. *) - let () = - let open Object in - let open Object.ReactConfig in - (* We need to treat config input as a literal here so we ensure it has the - * RReactProps reason description. *) - let reason = replace_reason_const RReactProps (reason_of_t config) in - (* Create the final config object using the ReactConfig object kit tool - * and flow it to our type for props. - * - * We wrap our use_op in a ReactConfigCheck frame to increment the - * speculation error message score. Usually we will already have a - * ReactCreateElementCall use_op, but we want errors after this point to - * win when picking the best errors speculation discovered. *) - let use_op = Frame (ReactConfigCheck, use_op) in - rec_flow cx trace (config, - ObjKitT (use_op, reason, Resolve Next, - ReactConfig (Config { defaults; children }), props)) + let get_intrinsic = get_intrinsic cx trace l ~reason_op ~rec_flow ~get_builtin_type in + (* This function creates a constraint *from* tin *to* props so that props is + * an upper bound on tin. This is important because when the type of a + * component's props is inferred (such as when a stateless functional + * component has an unannotated props argument) we want to create a constraint + * *from* the props input *to* tin which should then be propagated to the + * inferred props type. *) + let tin_to_props tin = + let component = l in + match component with + (* Class components or legacy components. *) + | DefT (_, _, ClassT _) -> + (* The Props type parameter is invariant, but we only want to create a + * constraint tin <: props. *) + let props = Tvar.mk cx reason_op in + rec_flow_t cx trace (tin, props); + rec_flow cx trace (component, ReactInToProps (reason_op, props)) + (* Stateless functional components. *) + | DefT (_, _, FunT _) + (* Stateless functional components, again. This time for callable `ObjT`s. *) + + | DefT (_, _, ObjT { call_t = Some _; _ }) -> + rec_flow cx trace (component, ReactInToProps (reason_op, tin)) + (* Abstract components. *) + | DefT (reason, trust, ReactAbstractComponentT _) -> + rec_flow_t cx trace (tin, MixedT.why reason trust) + (* Intrinsic components. *) + | DefT (_, _, StrT lit) -> get_intrinsic `Props lit (Field (None, tin, Polarity.Negative)) + | AnyT (reason, source) -> rec_flow_t cx trace (tin, AnyT.why source reason) + (* ...otherwise, error. *) + | _ -> err_incompatible (reason_of_t component) in - (* Set the return type as a React element. *) - let elem_reason = annot_reason (replace_reason_const (RType "React$Element") reason_op) in - rec_flow_t cx trace ( - get_builtin_typeapp cx ~trace elem_reason "React$Element" [component], - tout - ) - in - - (* Creates the type that we expect for a React config by diffing out default - * props with ObjKitT(Rest). The config does not include types for `key` - * or `ref`. - * - * There is some duplication between the logic used here to get a config type - * and ObjKitT(ReactConfig). In create_element, we want to produce a props - * object from the config object and the defaultProps object. This way we can - * add a lower bound to components who have a type variable for props. e.g. - * - * const MyComponent = props => null; - * ; - * - * Here, MyComponent has no annotation for props so Flow must infer a type. - * However, get_config must produce a valid type from only the component type. - * - * This approach may stall if props never gets a lower bound. Using the result - * of get_config as an upper bound won't give props a lower bound. However, - * the places in which this approach stalls are the same places as other type - * destructor annotations. Like object spread, $Diff, and $Rest. *) - let get_config tout = - let props = Tvar.mk_where cx reason_op props_to_tout in - let defaults = get_defaults () in - match defaults with - | None -> rec_flow cx trace (props, UseT (use_op, tout)) - | Some defaults -> - let open Object in - let open Object.Rest in - let tool = Resolve Next in - let state = One defaults in - rec_flow cx trace (props, - ObjKitT (use_op, reason_op, tool, Rest (ReactConfigMerge, state), tout)) - in - - let get_instance tout = - let component = l in - match component with - (* Class components or legacy components. *) - | DefT (_, ClassT component) -> rec_flow_t cx trace (component, tout) - - (* Stateless functional components. *) - | DefT (r, FunT _) -> - rec_flow_t cx trace (VoidT.make (replace_reason_const RVoid r), tout) - - (* Stateless functional components, again. This time for callable `ObjT`s. *) - | DefT (r, ObjT { call_t = Some _; _ }) -> - rec_flow_t cx trace (VoidT.make (replace_reason_const RVoid r), tout) - - (* Intrinsic components. *) - | DefT (_, StrT lit) -> get_intrinsic `Instance lit (Field (None, tout, Positive)) - - (* any and any specializations *) - | DefT (reason, (AnyT | AnyObjT | AnyFunT)) -> - rec_flow_t cx trace (AnyT.why reason, tout) - - (* ...otherwise, error. *) - | _ -> err_incompatible (reason_of_t component) - in - - (* In order to create a useful type from the `propTypes` property of a React - class specification, Flow needs the ReactPropType CustomFunT type. This - tool evaluates a complex prop type such that a specific CustomFunT is - returned when there is enough static information. *) - - let simplify_prop_type tout = - let resolve t = rec_flow_t cx trace ( - CustomFunT (reason_op, ReactPropType (PropType.Primitive (false, t))), - tout - ) in - - let mk_union reason = function - | [] -> DefT (replace_reason_const REmpty reason, EmptyT) - | [t] -> t - | t0::t1::ts -> - let reason = replace_reason_const RUnionType reason in - DefT (reason, UnionT (UnionRep.make t0 t1 ts)) + let props_to_tout = + props_to_tout + cx + trace + l + ~use_op + ~reason_op + ~rec_flow_t + ~rec_flow + ~get_builtin_type + ~add_output + u in - - let open SimplifyPropType in - function - | ArrayOf -> - (* TODO: Don't ignore the required flag. *) - let elem_t = match coerce_prop_type l with - | Ok (_required, t) -> t - | Error reason -> DefT (reason, AnyT) - in - let reason = replace_reason_const RArrayType reason_op in - let t = DefT (reason, ArrT (ArrayAT (elem_t, None))) in - resolve t - - | InstanceOf -> - let t = mk_instance cx (annot_reason reason_op) l in - resolve t - - | ObjectOf -> - (* TODO: Don't ignore the required flag. *) - let value = match coerce_prop_type l with - | Ok (_required, t) -> t - | Error reason -> DefT (reason, AnyT) - in - let props = SMap.empty in - let dict = { - dict_name = None; - key = Locationless.AnyT.t; - value; - dict_polarity = Neutral; - } in - let proto = ObjProtoT (locationless_reason RObjectClassName) in - let reason = replace_reason_const RObjectType reason_op in - let t = Obj_type.mk_with_proto cx reason ~props proto - ~dict ~sealed:true ~exact:false in - resolve t - - | OneOf tool -> - let next todo done_rev = match todo with - | [] -> - let t = mk_union reason_op (List.rev done_rev) in - resolve t - | t::todo -> - rec_flow cx trace (t, ReactKitT (unknown_use, reason_op, - SimplifyPropType (OneOf - (ResolveElem (todo, done_rev)), tout))) - in - (match tool with - | ResolveArray -> - (match coerce_array l with - | Ok todo -> next todo [] - | Error _ -> resolve (DefT (reason_op, AnyT))) - | ResolveElem (todo, done_rev) -> - (match coerce_singleton l with - | Ok t -> next todo (t::done_rev) - | Error _ -> resolve (DefT (reason_op, AnyT)))) - - | OneOfType tool -> - (* TODO: This is _very_ similar to `one_of` above. *) - let next todo done_rev = match todo with - | [] -> - let t = mk_union reason_op (List.rev done_rev) in - resolve t - | t::todo -> - rec_flow cx trace (t, ReactKitT (unknown_use, reason_op, - SimplifyPropType (OneOfType - (ResolveElem (todo, done_rev)), tout))) + let coerce_children_args (children, children_spread) = + match (children, children_spread) with + (* If we have no children and no variable spread argument then React will + * not pass in any value for children. *) + | ([], None) -> None + (* If we know that we have exactly one argument and no variable spread + * argument then React will pass in that single value. Notable we do not + * wrap the type in an array as React returns the single value. *) + | ([t], None) -> Some t + (* If we have two or more known arguments and no spread argument then we + * want to create a tuple array type for our children. *) + | (t :: ts, None) -> + (* Create a reason where the location is between our first and last known + * argument. *) + let r = + mk_reason + RReactChildren + (match use_op with + | Op (ReactCreateElementCall { children; _ }) -> children + | _ -> aloc_of_reason reason_op) + in + Some (DefT (r, bogus_trust (), ArrT (ArrayAT (union_of_ts r (t :: ts), Some (t :: ts))))) + (* If we only have a spread of unknown length then React may not pass in + * children, React may pass in a single child, or React may pass in an array + * of children. We need to model all of these possibilities. *) + | ([], Some spread) -> + let r = + update_desc_reason + (fun desc -> RReactChildrenOrUndefinedOrType desc) + (reason_of_t spread) + in + Some + (OptionalT + (r, union_of_ts r [spread; DefT (r, bogus_trust (), ArrT (ArrayAT (spread, None)))])) + (* If we have one children argument and a spread of unknown length then + * React may either pass in the unwrapped argument, or an array where the + * element type is the union of the known argument and the spread type. *) + | ([t], Some spread) -> + (* Create a reason between our known argument and the spread argument. *) + let r = + mk_reason + (RReactChildrenOrType (t |> reason_of_t |> desc_of_reason)) + (match use_op with + | Op (ReactCreateElementCall { children; _ }) -> children + | _ -> aloc_of_reason reason_op) + in + Some + (union_of_ts + r + [t; DefT (r, bogus_trust (), ArrT (ArrayAT (union_of_ts r [spread; t], Some [t])))]) + (* If we have two or more arguments and a spread argument of unknown length + * then we want to return an array type where the element type is the union + * of all argument types and the spread argument type. *) + | (t :: ts, Some spread) -> + (* Create a reason between our known argument and the spread argument. *) + let r = + mk_reason + RReactChildren + (match use_op with + | Op (ReactCreateElementCall { children; _ }) -> children + | _ -> aloc_of_reason reason_op) + in + Some + (DefT + (r, bogus_trust (), ArrT (ArrayAT (union_of_ts r (spread :: t :: ts), Some (t :: ts))))) + in + let config_check clone config children_args = + (* Create the optional children input type from the children arguments. *) + let children = coerce_children_args children_args in + (* Create a type variable for our props. *) + (* If we are cloning an existing element, the config does not need to + * provide the entire props type. *) + let (props, defaults) = + match l with + | DefT (_, _, ReactAbstractComponentT { config; _ }) -> + (* This is a bit of a hack. We will be passing these props and + * default props to react_config in flow_js.ml to calculate the + * config and check the passed config against it. Since our config is + * already calculated, we can pretend the props type is the config + * type and that we have no defaultProps for identical behavior. + * + * This hack is necessary because we (by design) do not calculate + * props from Config and DefaultProps. Even if we did do that, we would + * just introduce unnecessary work here-- we would calculate the props from + * the config and defaultProps just so that we could re-calculate the config + * down the line. + * + * Additionally, this hack enables us to not have to explicitly handle + * AbstractComponent past this point. *) + ( ( if clone then + ShapeT config + else + config ), + None ) + | _ -> + ( ( if clone then + ShapeT (Tvar.mk_where cx reason_op props_to_tout) + else + Tvar.mk_where cx reason_op tin_to_props ), + (* For class components and function components we want to lookup the + * static default props property so that we may add it to our config input. *) + get_defaults cx trace l ~reason_op ~rec_flow ) in - (match tool with - | ResolveArray -> - (match coerce_array l with - | Ok todo -> next todo [] - | Error _ -> resolve (DefT (reason_op, AnyT))) - | ResolveElem (todo, done_rev) -> - (* TODO: Don't ignore the required flag. *) - (match coerce_prop_type l with - | Ok (_required, t) -> next todo (t::done_rev) - | Error _ -> resolve (DefT (reason_op, AnyT)))) + (* Use object spread to add children to config (if we have children) + * and remove key and ref since we already checked key and ref. Finally in + * this block we will flow the final config to our props type. + * + * NOTE: We don't eagerly run this check so that create_element can constrain the + * ref and key pseudoprops before we run the config check. + *) + Object.( + Object.ReactConfig.( + (* We need to treat config input as a literal here so we ensure it has the + * RReactProps reason description. *) + let reason = replace_desc_new_reason RReactProps (reason_of_t config) in + (* Create the final config object using the ReactConfig object kit tool + * and flow it to our type for props. + * + * We wrap our use_op in a ReactConfigCheck frame to increment the + * speculation error message score. Usually we will already have a + * ReactCreateElementCall use_op, but we want errors after this point to + * win when picking the best errors speculation discovered. *) + let use_op = Frame (ReactConfigCheck, use_op) in + rec_flow + cx + trace + ( config, + ObjKitT + (use_op, reason, Resolve Next, ReactConfig (Config { defaults; children }), props) + ))) + in + let create_element clone component config children_args tout = + config_check clone config children_args; - | Shape tool -> - (* TODO: This is _very_ similar to `CreateClass.PropTypes` below, except - for reasons descriptions/locations, recursive ReactKit constraints, and - `resolve` behavior. *) - let add_prop k t (reason, props, dict, flags) = - let props = SMap.add k (Field (None, t, Neutral)) props in - reason, props, dict, flags - in - let add_dict dict (reason, props, _, flags) = - reason, props, Some dict, flags + (* If our config is void or null then we want to replace it with an + * empty object. + * + * NOTE: We only need the normalized config to look up the key + * and ref. + *) + let normalized_config = + Tvar.mk_where cx (reason_of_t config) (fun normalized_config -> + Object.( + let reason = reason_of_t config in + rec_flow + cx + trace + (config, ObjKitT (use_op, reason, Resolve Next, ObjectRep, normalized_config)))) in - let rec next todo shape = - match SMap.choose todo with - | None -> - let reason = replace_reason_const RObjectType reason_op in - let proto = ObjProtoT (locationless_reason RObjectClassName) in - let _, props, dict, _ = shape in - let t = Obj_type.mk_with_proto cx reason ~props proto - ?dict ~sealed:true ~exact:false - in - resolve t - | Some (k, p) -> - let todo = SMap.remove k todo in - match Property.read_t p with - | None -> next todo shape - | Some t -> - rec_flow cx trace (t, ReactKitT (unknown_use, reason_op, - SimplifyPropType (Shape - (ResolveProp (k, todo, shape)), tout))) + (* Check the type of React keys in the config input. + * + * NOTE: We are intentionally being unsound here. If config is inexact + * and we can't find a key prop in config then the sound thing to do + * would be to assume that the type of key is mixed. Instead we are unsound + * and don't check a type for key. Otherwise we would cause a lot of issues + * in existing React code. *) + let () = + let reason_key = + replace_desc_reason (RCustom "React key") (reason_of_t normalized_config) + in + (* Create the key type. *) + let key_t = optional (maybe (get_builtin_type cx reason_key "React$Key")) in + (* Flow the config input key type to the key type. *) + let kind = NonstrictReturning (None, None) in + let propref = Named (reason_key, "key") in + let use_op = + Frame + ( PropertyCompatibility + { prop = Some "key"; lower = reason_of_t normalized_config; upper = reason_key }, + use_op ) + in + let action = LookupProp (use_op, Field (None, key_t, Polarity.Positive)) in + rec_flow cx trace (normalized_config, LookupT (reason_key, kind, [], propref, action)) in - (match tool with - | ResolveObject -> - (match coerce_object l with - (* TODO: If the resolved object is not exact and sealed, or if it does - * not have a dictionary -- that is, it may be wider in an unknown way, - * we should error and resolve to any. However, since all object spreads - * are currently unsealed, we must wait for precise spread support. - * Otherwise, we will cause too many spurious errors. *) - | Ok (reason, todo, dict, flags) -> - let shape = reason, SMap.empty, None, flags in - (match dict with - | None -> next todo shape - | Some dicttype -> - rec_flow cx trace (dicttype.value, ReactKitT (unknown_use, reason_op, - SimplifyPropType (Shape - (ResolveDict (dicttype, todo, shape)), tout)))) - | Error _ -> resolve (DefT (reason_op, AnyT))) - | ResolveDict (dicttype, todo, shape) -> - let dict = match coerce_prop_type l with - | Ok (_, t) -> {dicttype with value = t} - | Error reason -> {dicttype with value = DefT (reason, AnyT)} + (* Check the type of React refs in the config input. + * + * NOTE: We are intentionally being unsound here. If config is inexact + * and we can't find a ref prop in config then the sound thing to do + * would be to assume that the type of ref is mixed. Instead we are unsound + * and don't check a type for key. Otherwise we would cause a lot of issues + * in existing React code. *) + let () = + let reason_ref = + replace_desc_reason (RCustom "React ref") (reason_of_t normalized_config) in - next todo (add_dict dict shape) - | ResolveProp (k, todo, shape) -> - let t = match coerce_prop_type l with - | Ok (required, t) -> if required then t else Type.optional t - | Error _ -> Type.optional (DefT (reason_op, AnyT)) + (* Create the ref type. *) + let ref_t = optional (maybe (get_builtin_typeapp cx reason_ref "React$Ref" [l])) in + (* Flow the config input ref type to the ref type. *) + let kind = NonstrictReturning (None, None) in + let propref = Named (reason_ref, "ref") in + let use_op = + Frame + ( PropertyCompatibility + { prop = Some "ref"; lower = reason_of_t normalized_config; upper = reason_ref }, + use_op ) in - next todo (add_prop k t shape)) - in - - let create_class knot tout = - let open CreateClass in - - let maybe_known_of_result = function - | Ok x -> Known x - | Error e -> Unknown e - in - - let map_known f = function - | Known x -> Known (f x) - | Unknown e -> Unknown e - in - - let get_prop x (_, props, dict, _) = - match SMap.get x props with - | Some _ as p -> p - | None -> - Option.map dict (fun { key; value; dict_polarity; _ } -> - rec_flow_t cx trace (string_key x reason_op, key); - Field (None, value, dict_polarity)) - in - - let read_prop x obj = Option.bind (get_prop x obj) Property.read_t in - - let read_stack x ((obj, _), _) = read_prop x obj in - - let map_spec f ((obj, spec), tail) = ((obj, f spec), tail) in - - (* This tool recursively resolves types until the spec is resolved enough to - * compute the instance type. `resolve` and `resolve_call` actually emit the - * recursive constraints. The latter is for `getInitialState` and - * `getDefaultProps`, where the type we want to resolve is the return type - * of the bound function call *) - - let resolve tool t = - rec_flow cx trace (t, ReactKitT (unknown_use, reason_op, - CreateClass (tool, knot, tout))) + let action = LookupProp (use_op, Field (None, ref_t, Polarity.Positive)) in + rec_flow cx trace (normalized_config, LookupT (reason_ref, kind, [], propref, action)) + in + let elem_reason = annot_reason (replace_desc_reason (RType "React$Element") reason_op) in + rec_flow_t + cx + trace + (get_builtin_typeapp cx ~trace elem_reason "React$Element" [component], tout) in - - let resolve_call this tool t = - let reason = reason_of_t t in - let return_t = Tvar.mk cx reason in - let funcall = mk_methodcalltype this None [] return_t in - rec_flow cx trace (t, CallT (unknown_use, reason, funcall)); - resolve tool return_t + let get_config = + get_config + cx + trace + l + ~use_op + ~reason_op + ~rec_flow + ~rec_flow_t + ~rec_unify + ~get_builtin_type + ~add_output + u + Polarity.Positive in - - let merge_nullable f a b = - match a, b with - | NotNull a, NotNull b -> NotNull (f a b) - | Null _, x | x, Null _ -> x + let get_config_with_props_and_defaults default_props tout = + Object.( + Object.Rest.( + let props = l in + let tool = Resolve Next in + let state = One default_props in + rec_flow + cx + trace + ( props, + ObjKitT + ( Op UnknownUse, + reason_op, + tool, + Rest (ReactConfigMerge Polarity.Neutral, state), + tout ) ))) in - - let merge_unknown f a b = - match a, b with - | Known a, Known b -> Known (f a b) - | Unknown r, _ | _, Unknown r -> Unknown r + let get_instance tout = + let component = l in + match component with + (* Class components or legacy components. *) + | DefT (_, _, ClassT component) -> rec_flow_t cx trace (component, tout) + (* Stateless functional components. *) + | DefT (r, trust, FunT _) -> + rec_flow_t cx trace (VoidT.make (replace_desc_reason RVoid r) trust, tout) + (* Stateless functional components, again. This time for callable `ObjT`s. *) + | DefT (r, trust, ObjT { call_t = Some _; _ }) -> + rec_flow_t cx trace (VoidT.make (replace_desc_reason RVoid r) trust, tout) + (* Abstract components. *) + | DefT (_, _, ReactAbstractComponentT { instance; _ }) -> rec_flow_t cx trace (instance, tout) + (* Intrinsic components. *) + | DefT (_, _, StrT lit) -> + get_intrinsic `Instance lit (Field (None, tout, Polarity.Positive)) + | AnyT (reason, source) -> rec_flow_t cx trace (AnyT.why source reason, tout) + (* ...otherwise, error. *) + | _ -> err_incompatible (reason_of_t component) in - - let merge_flags a b = - let { frozen = f1; sealed = s1; exact = e1 } = a in - let { frozen = f2; sealed = s2; exact = e2 } = b in - let frozen = f1 && f2 in - let exact = e1 && e2 in - let sealed = - let s1 = sealed_in_op reason_op s1 in - let s2 = sealed_in_op reason_op s2 in - if exact && not (s1 || s2) - then UnsealedInFile (Loc.source (aloc_of_reason reason_op |> ALoc.to_loc)) - else Sealed + (* In order to create a useful type from the `propTypes` property of a React + class specification, Flow needs the ReactPropType CustomFunT type. This + tool evaluates a complex prop type such that a specific CustomFunT is + returned when there is enough static information. *) + let simplify_prop_type tout = + let resolve t = + rec_flow_t + cx + trace + (CustomFunT (reason_op, ReactPropType (PropType.Primitive (false, t))), tout) in - { frozen; exact; sealed } - in - - let merge_objs (r1, ps1, dict1, flags1) (_, ps2, dict2, flags2) = - let props = SMap.union ps1 ps2 in - let dict = Option.first_some dict1 dict2 in - let flags = merge_flags flags1 flags2 in - (r1, props, dict, flags) + let mk_union reason = function + | [] -> DefT (replace_desc_reason REmpty reason, bogus_trust (), EmptyT Bottom) + | [t] -> t + | t0 :: t1 :: ts -> + let reason = replace_desc_reason RUnionType reason in + UnionT (reason, UnionRep.make t0 t1 ts) + in + SimplifyPropType.( + function + | ArrayOf -> + (* TODO: Don't ignore the required flag. *) + let elem_t = + match coerce_prop_type l with + | Ok (_required, t) -> t + | Error reason -> AnyT.make AnyError reason + in + let reason = replace_desc_reason RArrayType reason_op in + let t = DefT (reason, bogus_trust (), ArrT (ArrayAT (elem_t, None))) in + resolve t + | InstanceOf -> + let t = mk_instance cx (annot_reason reason_op) l in + resolve t + | ObjectOf -> + (* TODO: Don't ignore the required flag. *) + let value = + match coerce_prop_type l with + | Ok (_required, t) -> t + | Error reason -> AnyT.make AnyError reason + in + let props = SMap.empty in + let dict = + { + dict_name = None; + key = tout |> reason_of_t |> StrT.why |> with_trust bogus_trust; + value; + dict_polarity = Polarity.Neutral; + } + in + let proto = ObjProtoT (locationless_reason RObjectClassName) in + let reason = replace_desc_reason RObjectType reason_op in + let t = Obj_type.mk_with_proto cx reason ~props proto ~dict ~sealed:true ~exact:false in + resolve t + | OneOf tool -> + let next todo done_rev = + match todo with + | [] -> + let t = mk_union reason_op (List.rev done_rev) in + resolve t + | t :: todo -> + rec_flow + cx + trace + ( t, + ReactKitT + ( unknown_use, + reason_op, + SimplifyPropType (OneOf (ResolveElem (todo, done_rev)), tout) ) ) + in + (match tool with + | ResolveArray -> + (match coerce_array l with + | Ok todo -> next todo [] + | Error _ -> AnyT.make AnyError reason_op |> resolve) + | ResolveElem (todo, done_rev) -> + (match coerce_singleton l with + | Ok t -> next todo (t :: done_rev) + | Error _ -> AnyT.make AnyError reason_op |> resolve)) + | OneOfType tool -> + (* TODO: This is _very_ similar to `one_of` above. *) + let next todo done_rev = + match todo with + | [] -> + let t = mk_union reason_op (List.rev done_rev) in + resolve t + | t :: todo -> + rec_flow + cx + trace + ( t, + ReactKitT + ( unknown_use, + reason_op, + SimplifyPropType (OneOfType (ResolveElem (todo, done_rev)), tout) ) ) + in + (match tool with + | ResolveArray -> + (match coerce_array l with + | Ok todo -> next todo [] + | Error _ -> AnyT.make AnyError reason_op |> resolve) + | ResolveElem (todo, done_rev) -> + (* TODO: Don't ignore the required flag. *) + (match coerce_prop_type l with + | Ok (_required, t) -> next todo (t :: done_rev) + | Error _ -> AnyT.make AnyError reason_op |> resolve)) + | Shape tool -> + (* TODO: This is _very_ similar to `CreateClass.PropTypes` below, except + for reasons descriptions/locations, recursive ReactKit constraints, and + `resolve` behavior. *) + let add_prop k t (reason, props, dict, flags) = + let props = SMap.add k (Field (None, t, Polarity.Neutral)) props in + (reason, props, dict, flags) + in + let add_dict dict (reason, props, _, flags) = (reason, props, Some dict, flags) in + let rec next todo shape = + match SMap.choose todo with + | None -> + let reason = replace_desc_reason RObjectType reason_op in + let proto = ObjProtoT (locationless_reason RObjectClassName) in + let (_, props, dict, _) = shape in + let t = + Obj_type.mk_with_proto cx reason ~props proto ?dict ~sealed:true ~exact:false + in + resolve t + | Some (k, p) -> + let todo = SMap.remove k todo in + (match Property.read_t p with + | None -> next todo shape + | Some t -> + rec_flow + cx + trace + ( t, + ReactKitT + ( unknown_use, + reason_op, + SimplifyPropType (Shape (ResolveProp (k, todo, shape)), tout) ) )) + in + (match tool with + | ResolveObject -> + (match coerce_object l with + (* TODO: If the resolved object is not exact and sealed, or if it does + * not have a dictionary -- that is, it may be wider in an unknown way, + * we should error and resolve to any. However, since all object spreads + * are currently unsealed, we must wait for precise spread support. + * Otherwise, we will cause too many spurious errors. *) + | Ok (reason, todo, dict, flags) -> + let shape = (reason, SMap.empty, None, flags) in + (match dict with + | None -> next todo shape + | Some dicttype -> + rec_flow + cx + trace + ( dicttype.value, + ReactKitT + ( unknown_use, + reason_op, + SimplifyPropType (Shape (ResolveDict (dicttype, todo, shape)), tout) ) )) + | Error _ -> AnyT.make AnyError reason_op |> resolve) + | ResolveDict (dicttype, todo, shape) -> + let dict = + match coerce_prop_type l with + | Ok (_, t) -> { dicttype with value = t } + | Error reason -> { dicttype with value = AnyT.make AnyError reason } + in + next todo (add_dict dict shape) + | ResolveProp (k, todo, shape) -> + let t = + match coerce_prop_type l with + | Ok (required, t) -> + if required then + t + else + Type.optional ?annot_loc:(annot_aloc_of_reason @@ reason_of_t t) t + | Error _ -> AnyT.make AnyError reason_op |> Type.optional + in + next todo (add_prop k t shape))) in - - (* When a type is resolved, we move on to the next field. If the field is - * not found on the spec, we skip ahead. Otherwise we emit a constraint to - * resolve the property type. *) - - let rec on_resolve_spec stack = - match read_stack "mixins" stack with - | None -> on_resolve_mixins stack - | Some t -> resolve (Mixins stack) t - - and on_resolve_mixins stack = - match read_stack "statics" stack with - | None -> on_resolve_statics stack - | Some t -> resolve (Statics stack) t - - and on_resolve_statics stack = - match read_stack "propTypes" stack with - | None -> on_resolve_prop_types stack - | Some t -> resolve (PropTypes (stack, ResolveObject)) t - - and on_resolve_prop_types stack = - match stack with - | (_, spec), [] -> - (* Done resolving class spec and mixin specs. *) - on_resolve_default_props None spec.get_default_props; - on_resolve_initial_state None spec.get_initial_state; - mk_class spec - | (_, mixin), ((obj, spec), todo, mixins_rev)::stack' -> - (* Done resolving a mixin *) - let mixins_rev = Known mixin :: mixins_rev in - match todo with - | [] -> - (* No more mixins, resume parent stack with accumulated mixins *) - let stack = (obj, flatten_mixins mixins_rev spec), stack' in - on_resolve_mixins stack - | t::todo -> - (* Resolve next mixin in parent's mixin list *) - let stack' = ((obj, spec), todo, mixins_rev)::stack' in - resolve (Spec stack') t - - and on_resolve_default_props acc = function - | [] -> - let t = match acc with - | None -> - let reason = replace_reason_const RReactDefaultProps reason_op in - Obj_type.mk cx reason - | Some (Unknown reason) -> DefT (reason, AnyObjT) - | Some (Known (reason, props, dict, _)) -> - Obj_type.mk_with_proto cx reason ~props (ObjProtoT reason) - ?dict ~sealed:true ~exact:false + let create_class knot tout = + CreateClass.( + let maybe_known_of_result = function + | Ok x -> Known x + | Error e -> Unknown e in - rec_flow_t cx trace (t, knot.default_t) - | t::todo -> - let tool = DefaultProps (todo, acc) in - resolve_call knot.static tool t - - and on_resolve_initial_state acc = function - | [] -> - let t = match acc with - | None -> - let reason = replace_reason_const RReactState reason_op in - Obj_type.mk cx reason - | Some (Unknown reason) -> DefT (reason, AnyObjT) - | Some (Known (Null reason)) -> DefT (reason, NullT) - | Some (Known (NotNull (reason, props, dict, { exact; sealed; _ }))) -> - let sealed = not (exact && sealed_in_op reason_op sealed) in - Obj_type.mk_with_proto cx reason ~props (ObjProtoT reason) - ?dict ~sealed ~exact + let map_known f = function + | Known x -> Known (f x) + | Unknown e -> Unknown e in - rec_flow_t cx trace (t, knot.state_t) - | t::todo -> - let tool = InitialState (todo, acc) in - resolve_call knot.this tool t - - and flatten_mixins mixins_rev spec = - List.fold_right (fun mixin acc -> - match mixin with - | Known spec -> - merge_specs acc spec - | Unknown reason -> - {acc with unknown_mixins = reason::acc.unknown_mixins} - ) mixins_rev spec - - and merge_statics = - Option.merge ~f:(merge_unknown merge_objs) - - and merge_prop_types = - Option.merge ~f:(merge_unknown merge_objs) - - and merge_default_props = - Option.merge ~f:(merge_unknown merge_objs) - - and merge_initial_state = - Option.merge ~f:(merge_unknown (merge_nullable merge_objs)) - - and merge_specs a b = { - obj = merge_objs a.obj b.obj; - statics = merge_statics a.statics b.statics; - prop_types = merge_prop_types a.prop_types b.prop_types; - get_default_props = a.get_default_props @ b.get_default_props; - get_initial_state = a.get_initial_state @ b.get_initial_state; - unknown_mixins = a.unknown_mixins @ b.unknown_mixins; - } - - and mk_class spec = - (* If the component doesn't specify propTypes, allow anything. To be - stricter, we could use an empty object type, but that would require all - components to specify propTypes *) - let props_t = match spec.prop_types with - | None -> DefT (reason_op, AnyObjT) - | Some (Unknown reason) -> DefT (reason, AnyObjT) - | Some (Known (reason, props, dict, _)) -> - Obj_type.mk_with_proto cx reason ~props (ObjProtoT reason) - ?dict ~sealed:true ~exact:false - in - let props_t = - mod_reason_of_t (replace_reason_const RReactPropTypes) props_t - in - - let props = - SMap.empty - |> SMap.add "props" (Field (None, props_t, Neutral)) - |> SMap.add "state" (Field (None, knot.state_t, Neutral)) - in - - (* Some spec fields are used to create the instance type, but are not - present on the resulting prototype or statics. Other spec fields should - become static props. Everything else should be on the prototype. *) - let _, spec_props, _, _ = spec.obj in - let props, static_props = SMap.fold (fun k v (props, static_props) -> - match k with - | "autobind" - | "mixins" - | "statics" -> - props, static_props - - | "childContextTypes" - | "contextTypes" - | "displayName" - | "getDefaultProps" - | "propTypes" -> - props, SMap.add k v static_props - - (* Don't autobind ReactClassInterface props, like getInitialState. - Instead, call with the correct this when resolving types. *) - | "getInitialState" - | "getChildContext" - | "render" - | "componentWillMount" - | "componentDidMount" - | "componentWillReceiveProps" - | "shouldComponentUpdate" - | "componentWillUpdate" - | "componentDidUpdate" - | "componentWillUnmount" - | "updateComponent" -> - let loc = Property.read_loc v in - let v = match Property.read_t v with - | None -> v - | Some t -> - (* Tie the `this` knot with BindT *) - let dummy_return = DefT (reason_op, AnyT) in - let calltype = mk_methodcalltype knot.this None [] dummy_return in - rec_flow cx trace (t, BindT (unknown_use, reason_op, calltype, true)); - (* Because we are creating an instance type, which can be used as an - upper bound (e.g., as a super class), it's more flexible to - create covariant methods. Otherwise, a subclass could not - override the `render` method, say. *) - Method (loc, t) + let get_prop x (_, props, dict, _) = + match SMap.get x props with + | Some _ as p -> p + | None -> + Option.map dict (fun { key; value; dict_polarity; _ } -> + rec_flow_t cx trace (string_key x reason_op, key); + Field (None, value, dict_polarity)) + in + let read_prop x obj = Option.bind (get_prop x obj) Property.read_t in + let read_stack x ((obj, _), _) = read_prop x obj in + let map_spec f ((obj, spec), tail) = ((obj, f spec), tail) in + (* This tool recursively resolves types until the spec is resolved enough to + * compute the instance type. `resolve` and `resolve_call` actually emit the + * recursive constraints. The latter is for `getInitialState` and + * `getDefaultProps`, where the type we want to resolve is the return type + * of the bound function call *) + let resolve tool t = + rec_flow cx trace (t, ReactKitT (unknown_use, reason_op, CreateClass (tool, knot, tout))) + in + let resolve_call this tool t = + let reason = reason_of_t t in + let return_t = Tvar.mk cx reason in + let funcall = mk_methodcalltype this None [] return_t in + rec_flow cx trace (t, CallT (unknown_use, reason, funcall)); + resolve tool return_t + in + let merge_nullable f a b = + match (a, b) with + | (NotNull a, NotNull b) -> NotNull (f a b) + | (Null _, x) + | (x, Null _) -> + x + in + let merge_unknown f a b = + match (a, b) with + | (Known a, Known b) -> Known (f a b) + | (Unknown r, _) + | (_, Unknown r) -> + Unknown r + in + let merge_flags a b = + let { frozen = f1; sealed = s1; exact = e1 } = a in + let { frozen = f2; sealed = s2; exact = e2 } = b in + let frozen = f1 && f2 in + let exact = e1 && e2 in + let sealed = + let s1 = sealed_in_op reason_op s1 in + let s2 = sealed_in_op reason_op s2 in + if exact && not (s1 || s2) then + UnsealedInFile (ALoc.source (aloc_of_reason reason_op)) + else + Sealed in - SMap.add k v props, static_props - - | _ -> - let bound_v = Property.map_t (fun t -> - let use_op = unknown_use in - let destructor = Bind knot.this in - let id = mk_id () in - ignore (mk_type_destructor cx ~trace use_op reason_op t destructor id); - EvalT (t, TypeDestructorT (use_op, reason_op, destructor), id) - ) v in - SMap.add k bound_v props, static_props - ) spec_props (props, SMap.empty) in - - let static_props = static_props - |> SMap.add "defaultProps" (Field (None, knot.default_t, Neutral)) - in - - let reason_component = replace_reason_const RReactComponent reason_op in - - let super = - let reason = replace_reason (fun x -> RSuperOf x) reason_component in - let c = get_builtin cx "LegacyReactComponent" reason in - this_typeapp c knot.this (Some [props_t; knot.state_t]) - in - - let static = - let reason, props, dict, exact, sealed = match spec.statics with - | None -> - reason_op, static_props, None, true, false - | Some (Unknown reason) -> - let dict = Some { - dict_name = None; - key = StrT.why reason; - value = AnyT.why reason; - dict_polarity = Neutral; - } in - reason, static_props, dict, false, true - | Some (Known (reason, props, dict, { exact; sealed; _ })) -> - let static_props = SMap.union props static_props in - let sealed = not (exact && sealed_in_op reason_op sealed) in - reason, static_props, dict, exact, sealed + { frozen; exact; sealed } in - let reason = replace_reason_const RReactStatics reason in - Obj_type.mk_with_proto cx reason ~props (class_type super) - ?dict ~exact ~sealed - in - - let insttype = { - class_id = 0; - type_args = []; - (* TODO: props are actually installed on the prototype *) - own_props = Context.make_property_map cx props; - proto_props = Context.make_property_map cx SMap.empty; - initialized_fields = SSet.empty; - initialized_static_fields = SSet.empty; - inst_call_t = None; - has_unknown_react_mixins = spec.unknown_mixins <> []; - structural = false; - } in - rec_flow cx trace (super, SuperT (use_op, reason_op, Derived { - own = props; - proto = SMap.empty; - (* TODO: check static signature against base class *) - static = SMap.empty; - })); - - let instance = DefT (reason_component, InstanceT (static, super, [], insttype)) in - rec_flow_t cx trace (instance, knot.this); - rec_flow_t cx trace (static, knot.static); - rec_flow_t cx trace (class_type instance, tout) - in - - let empty_spec obj = { - obj; - statics = None; - prop_types = None; - get_default_props = Option.to_list (read_prop "getDefaultProps" obj); - get_initial_state = Option.to_list (read_prop "getInitialState" obj); - unknown_mixins = []; - } in - - function - | Spec stack' -> - let result = match coerce_object l with - | Ok (reason, _, _, { exact; sealed; _ }) - when not (exact && sealed_in_op reason_op sealed) -> - err_incompatible reason; - Error reason - | result -> result - in - (match result with - | Ok obj -> - on_resolve_spec ((obj, empty_spec obj), stack') - | Error reason -> - (match stack' with - | [] -> - (* The root spec is unknown *) - rec_flow_t cx trace (AnyT.why reason_op, tout) - | ((obj, spec), todo, mixins_rev)::stack' -> - (* A mixin is unknown *) - let mixins_rev = Unknown reason :: mixins_rev in - (match todo with + let merge_objs (r1, ps1, dict1, flags1) (_, ps2, dict2, flags2) = + let props = SMap.union ps1 ps2 in + let dict = Option.first_some dict1 dict2 in + let flags = merge_flags flags1 flags2 in + (r1, props, dict, flags) + in + (* When a type is resolved, we move on to the next field. If the field is + * not found on the spec, we skip ahead. Otherwise we emit a constraint to + * resolve the property type. *) + let rec on_resolve_spec stack = + match read_stack "mixins" stack with + | None -> on_resolve_mixins stack + | Some t -> resolve (Mixins stack) t + and on_resolve_mixins stack = + match read_stack "statics" stack with + | None -> on_resolve_statics stack + | Some t -> resolve (Statics stack) t + and on_resolve_statics stack = + match read_stack "propTypes" stack with + | None -> on_resolve_prop_types stack + | Some t -> resolve (PropTypes (stack, ResolveObject)) t + and on_resolve_prop_types stack = + match stack with + | ((_, spec), []) -> + (* Done resolving class spec and mixin specs. *) + on_resolve_default_props None spec.get_default_props; + on_resolve_initial_state None spec.get_initial_state; + mk_class spec + | ((_, mixin), ((obj, spec), todo, mixins_rev) :: stack') -> + (* Done resolving a mixin *) + let mixins_rev = Known mixin :: mixins_rev in + (match todo with + | [] -> + (* No more mixins, resume parent stack with accumulated mixins *) + let stack = ((obj, flatten_mixins mixins_rev spec), stack') in + on_resolve_mixins stack + | t :: todo -> + (* Resolve next mixin in parent's mixin list *) + let stack' = ((obj, spec), todo, mixins_rev) :: stack' in + resolve (Spec stack') t) + and on_resolve_default_props acc = function | [] -> - (* No more mixins, resume parent stack with accumulated mixin *) - let stack = (obj, flatten_mixins mixins_rev spec), stack' in - on_resolve_mixins stack - | t::todo -> - (* Resolve next mixin in parent's mixin list *) - let stack' = ((obj, spec), todo, mixins_rev)::stack' in - resolve (Spec stack') t))) - - | Mixins stack -> - (match coerce_array l with - | Error reason -> - let stack = map_spec (fun spec -> { - spec with - unknown_mixins = reason::spec.unknown_mixins - }) stack in - on_resolve_mixins stack - | Ok [] -> on_resolve_mixins stack - | Ok (t::todo) -> - (* We need to resolve every mixin before we can continue resolving this - * spec. Push the stack and start resolving the first mixin. Once the - * mixins are done, we'll pop the stack and continue. *) - let head, tail = stack in - let tail = (head, todo, [])::tail in - resolve (Spec tail) t) - - | Statics stack -> - let statics = Some (maybe_known_of_result (coerce_object l)) in - map_spec (fun spec -> { - spec with - statics = merge_statics statics spec.statics - }) stack |> on_resolve_statics - - | PropTypes (stack, tool) -> - let add_prop k t (reason, props, dict, flags) = - let props = SMap.add k (Field (None, t, Neutral)) props in - reason, props, dict, flags - in - let add_dict dict (reason, props, _, flags) = - reason, props, Some dict, flags - in - let rec next todo prop_types = - match SMap.choose todo with - | None -> - let prop_types = Some (Known prop_types) in - map_spec (fun spec -> { - spec with - prop_types = merge_prop_types prop_types spec.prop_types - }) stack |> on_resolve_prop_types - | Some (k, p) -> - let todo = SMap.remove k todo in - match Property.read_t p with - | None -> next todo prop_types - | Some t -> - let tool = PropTypes (stack, - ResolveProp (k, todo, prop_types)) in - resolve tool t - in - (match tool with - | ResolveObject -> - (match coerce_object l with - (* TODO: If the resolved object is not exact and sealed, or if it does - * not have a dictionary -- that is, it may be wider in an unknown way, - * we should error and resolve to any. However, since all object spreads - * are currently unsealed, we must wait for precise spread support. - * Otherwise, we will cause too many spurious errors. *) - | Ok (reason, todo, dict, flags) -> - let prop_types = reason, SMap.empty, None, flags in - (match dict with - | None -> next todo prop_types - | Some dicttype -> - let tool = PropTypes (stack, - ResolveDict (dicttype, todo, prop_types)) in - resolve tool dicttype.value) - | Error reason -> - let prop_types = Some (Unknown reason) in - map_spec (fun spec -> { - spec with - prop_types = merge_prop_types prop_types spec.prop_types - }) stack |> on_resolve_prop_types) - | ResolveDict (dicttype, todo, prop_types) -> - let dict = match coerce_prop_type l with - | Ok (_, t) -> {dicttype with value = t} - | Error reason -> {dicttype with value = DefT (reason, AnyT)} + let t = + match acc with + | None -> + let reason = replace_desc_reason RReactDefaultProps reason_op in + VoidT.make reason (bogus_trust ()) + | Some (Unknown reason) -> AnyT.make Untyped reason + | Some (Known (reason, props, dict, _)) -> + Obj_type.mk_with_proto + cx + reason + ~props + (ObjProtoT reason) + ?dict + ~sealed:true + ~exact:false + in + rec_flow_t cx trace (t, knot.default_t) + | t :: todo -> + let tool = DefaultProps (todo, acc) in + resolve_call knot.static tool t + and on_resolve_initial_state acc = function + | [] -> + let t = + match acc with + | None -> + let reason = replace_desc_reason RReactState reason_op in + Obj_type.mk cx reason + | Some (Unknown reason) -> AnyT.make Untyped reason + | Some (Known (Null reason)) -> DefT (reason, bogus_trust (), NullT) + | Some (Known (NotNull (reason, props, dict, { exact; sealed; _ }))) -> + let sealed = not (exact && sealed_in_op reason_op sealed) in + Obj_type.mk_with_proto cx reason ~props (ObjProtoT reason) ?dict ~sealed ~exact + in + rec_flow_t cx trace (t, knot.state_t) + | t :: todo -> + let tool = InitialState (todo, acc) in + resolve_call knot.this tool t + and flatten_mixins mixins_rev spec = + List.fold_right + (fun mixin acc -> + match mixin with + | Known spec -> merge_specs acc spec + | Unknown reason -> { acc with unknown_mixins = reason :: acc.unknown_mixins }) + mixins_rev + spec + and merge_statics = Option.merge ~f:(merge_unknown merge_objs) + and merge_prop_types = Option.merge ~f:(merge_unknown merge_objs) + and merge_default_props = Option.merge ~f:(merge_unknown merge_objs) + and merge_initial_state = Option.merge ~f:(merge_unknown (merge_nullable merge_objs)) + and merge_specs a b = + { + obj = merge_objs a.obj b.obj; + statics = merge_statics a.statics b.statics; + prop_types = merge_prop_types a.prop_types b.prop_types; + get_default_props = a.get_default_props @ b.get_default_props; + get_initial_state = a.get_initial_state @ b.get_initial_state; + unknown_mixins = a.unknown_mixins @ b.unknown_mixins; + } + and mk_class spec = + (* If the component doesn't specify propTypes, allow anything. To be + stricter, we could use an empty object type, but that would require all + components to specify propTypes *) + let props_t = + match spec.prop_types with + | None -> AnyT.make Untyped reason_op + | Some (Unknown reason) -> AnyT.make Untyped reason + | Some (Known (reason, props, dict, _)) -> + Obj_type.mk_with_proto + cx + reason + ~props + (ObjProtoT reason) + ?dict + ~sealed:true + ~exact:false + in + let props_t = mod_reason_of_t (replace_desc_reason RReactPropTypes) props_t in + let props = + SMap.empty + |> SMap.add "props" (Field (None, props_t, Polarity.Neutral)) + |> SMap.add "state" (Field (None, knot.state_t, Polarity.Neutral)) + in + (* Some spec fields are used to create the instance type, but are not + present on the resulting prototype or statics. Other spec fields should + become static props. Everything else should be on the prototype. *) + let (_, spec_props, _, _) = spec.obj in + let (props, static_props) = + SMap.fold + (fun k v (props, static_props) -> + match k with + | "autobind" + | "mixins" + | "statics" -> + (props, static_props) + | "childContextTypes" + | "contextTypes" + | "displayName" + | "getDefaultProps" + | "propTypes" -> + let v = + match Property.read_t v with + | None -> v + | Some t -> + let loc = Property.read_loc v in + Field (loc, t, Polarity.Positive) + in + (props, SMap.add k v static_props) + (* Don't autobind ReactClassInterface props, like getInitialState. + Instead, call with the correct this when resolving types. *) + | "getInitialState" + | "getChildContext" + | "render" + | "componentWillMount" + | "componentDidMount" + | "componentWillReceiveProps" + | "shouldComponentUpdate" + | "componentWillUpdate" + | "componentDidUpdate" + | "componentWillUnmount" + | "updateComponent" -> + let loc = Property.read_loc v in + let v = + match Property.read_t v with + | None -> v + | Some t -> + (* Tie the `this` knot with BindT *) + let dummy_return = MixedT.make reason_op |> with_trust bogus_trust in + let calltype = mk_methodcalltype knot.this None [] dummy_return in + rec_flow cx trace (t, BindT (unknown_use, reason_op, calltype, true)); + + (* Because we are creating an instance type, which can be used as an + upper bound (e.g., as a super class), it's more flexible to + create covariant methods. Otherwise, a subclass could not + override the `render` method, say. *) + Method (loc, t) + in + (SMap.add k v props, static_props) + | _ -> + let bound_v = + Property.map_t + (fun t -> + let use_op = unknown_use in + let destructor = Bind knot.this in + let id = mk_id () in + ignore (mk_type_destructor cx ~trace use_op reason_op t destructor id); + EvalT (t, TypeDestructorT (use_op, reason_op, destructor), id)) + v + in + (SMap.add k bound_v props, static_props)) + spec_props + (props, SMap.empty) + in + let static_props = + static_props + |> SMap.add "defaultProps" (Field (None, knot.default_t, Polarity.Neutral)) + in + let reason_component = replace_desc_reason RReactComponent reason_op in + let super = + let reason = update_desc_reason (fun x -> RSuperOf x) reason_component in + let c = get_builtin cx "LegacyReactComponent" reason in + this_typeapp c knot.this (Some [props_t; knot.state_t]) + in + let static = + let (reason, props, dict, exact, sealed) = + match spec.statics with + | None -> (reason_op, static_props, None, true, false) + | Some (Unknown reason) -> + let dict = + Some + { + dict_name = None; + key = StrT.why reason (bogus_trust ()); + value = EmptyT.why reason (bogus_trust ()); + dict_polarity = Polarity.Neutral; + } + in + (reason, static_props, dict, false, true) + | Some (Known (reason, props, dict, { exact; sealed; _ })) -> + let static_props = SMap.union props static_props in + let sealed = not (exact && sealed_in_op reason_op sealed) in + (reason, static_props, dict, exact, sealed) + in + let reason = replace_desc_reason RReactStatics reason in + Obj_type.mk_with_proto cx reason ~props (class_type super) ?dict ~exact ~sealed + in + let insttype = + { + class_id = ALoc.none; + type_args = []; + (* TODO: props are actually installed on the prototype *) + own_props = Context.generate_property_map cx props; + proto_props = Context.generate_property_map cx SMap.empty; + initialized_fields = SSet.empty; + initialized_static_fields = SSet.singleton "propTypes"; + inst_call_t = None; + has_unknown_react_mixins = spec.unknown_mixins <> []; + inst_kind = ClassKind; + } + in + rec_flow + cx + trace + ( super, + SuperT + ( use_op, + reason_op, + Derived + { + own = props; + proto = SMap.empty; + (* TODO: check static signature against base class *) + static = SMap.empty; + } ) ); + + let instance = + DefT (reason_component, bogus_trust (), InstanceT (static, super, [], insttype)) + in + rec_flow_t cx trace (instance, knot.this); + rec_flow_t cx trace (static, knot.static); + rec_flow_t cx trace (class_type instance, tout) in - next todo (add_dict dict prop_types) - | ResolveProp (k, todo, prop_types) -> - let t = match coerce_prop_type l with - | Ok (required, t) -> if required then t else Type.optional t - | Error reason -> Type.optional (DefT (reason, AnyT)) + let empty_spec obj = + { + obj; + statics = None; + prop_types = None; + get_default_props = Option.to_list (read_prop "getDefaultProps" obj); + get_initial_state = Option.to_list (read_prop "getInitialState" obj); + unknown_mixins = []; + } in - next todo (add_prop k t prop_types)) - - | DefaultProps (todo, acc) -> - let default_props = Some (maybe_known_of_result (coerce_object l)) in - let acc = merge_default_props default_props acc in - on_resolve_default_props acc todo - - | InitialState (todo, acc) -> - let initial_state = Some (match l with - | DefT (reason, NullT) -> Known (Null reason) - | _ -> - coerce_object l - |> maybe_known_of_result - |> map_known (fun x -> NotNull x) - ) in - let acc = merge_initial_state initial_state acc in - on_resolve_initial_state acc todo - in - - match u with - | CreateElement0 _ -> failwith "handled elsewhere" - | CreateElement (clone, component, config, children, tout) -> - create_element clone component config children tout - | GetProps tout -> props_to_tout tout - | GetConfig tout -> get_config tout - | GetRef tout -> get_instance tout - | SimplifyPropType (tool, tout) -> simplify_prop_type tout tool - | CreateClass (tool, knot, tout) -> create_class knot tout tool + function + | Spec stack' -> + let result = + match coerce_object l with + | Ok (reason, _, _, { exact; sealed; _ }) + when not (exact && sealed_in_op reason_op sealed) -> + err_incompatible reason; + Error reason + | result -> result + in + (match result with + | Ok obj -> on_resolve_spec ((obj, empty_spec obj), stack') + | Error reason -> + (match stack' with + | [] -> + (* The root spec is unknown *) + rec_flow_t cx trace (AnyT.error reason_op, tout) + | ((obj, spec), todo, mixins_rev) :: stack' -> + (* A mixin is unknown *) + let mixins_rev = Unknown reason :: mixins_rev in + (match todo with + | [] -> + (* No more mixins, resume parent stack with accumulated mixin *) + let stack = ((obj, flatten_mixins mixins_rev spec), stack') in + on_resolve_mixins stack + | t :: todo -> + (* Resolve next mixin in parent's mixin list *) + let stack' = ((obj, spec), todo, mixins_rev) :: stack' in + resolve (Spec stack') t))) + | Mixins stack -> + (match coerce_array l with + | Error reason -> + let stack = + map_spec + (fun spec -> { spec with unknown_mixins = reason :: spec.unknown_mixins }) + stack + in + on_resolve_mixins stack + | Ok [] -> on_resolve_mixins stack + | Ok (t :: todo) -> + (* We need to resolve every mixin before we can continue resolving this + * spec. Push the stack and start resolving the first mixin. Once the + * mixins are done, we'll pop the stack and continue. *) + let (head, tail) = stack in + let tail = (head, todo, []) :: tail in + resolve (Spec tail) t) + | Statics stack -> + let statics = Some (maybe_known_of_result (coerce_object l)) in + map_spec (fun spec -> { spec with statics = merge_statics statics spec.statics }) stack + |> on_resolve_statics + | PropTypes (stack, tool) -> + let add_prop k t (reason, props, dict, flags) = + let props = SMap.add k (Field (None, t, Polarity.Neutral)) props in + (reason, props, dict, flags) + in + let add_dict dict (reason, props, _, flags) = (reason, props, Some dict, flags) in + let rec next todo prop_types = + match SMap.choose todo with + | None -> + let prop_types = Some (Known prop_types) in + map_spec + (fun spec -> + { spec with prop_types = merge_prop_types prop_types spec.prop_types }) + stack + |> on_resolve_prop_types + | Some (k, p) -> + let todo = SMap.remove k todo in + (match Property.read_t p with + | None -> next todo prop_types + | Some t -> + let tool = PropTypes (stack, ResolveProp (k, todo, prop_types)) in + resolve tool t) + in + (match tool with + | ResolveObject -> + (match coerce_object l with + (* TODO: If the resolved object is not exact and sealed, or if it does + * not have a dictionary -- that is, it may be wider in an unknown way, + * we should error and resolve to any. However, since all object spreads + * are currently unsealed, we must wait for precise spread support. + * Otherwise, we will cause too many spurious errors. *) + | Ok (reason, todo, dict, flags) -> + let prop_types = (reason, SMap.empty, None, flags) in + (match dict with + | None -> next todo prop_types + | Some dicttype -> + let tool = PropTypes (stack, ResolveDict (dicttype, todo, prop_types)) in + resolve tool dicttype.value) + | Error reason -> + let prop_types = Some (Unknown reason) in + map_spec + (fun spec -> + { spec with prop_types = merge_prop_types prop_types spec.prop_types }) + stack + |> on_resolve_prop_types) + | ResolveDict (dicttype, todo, prop_types) -> + let dict = + match coerce_prop_type l with + | Ok (_, t) -> { dicttype with value = t } + | Error reason -> { dicttype with value = AnyT.make AnyError reason } + in + next todo (add_dict dict prop_types) + | ResolveProp (k, todo, prop_types) -> + let t = + match coerce_prop_type l with + | Ok (required, t) -> + if required then + t + else + Type.optional ?annot_loc:(annot_aloc_of_reason @@ reason_of_t t) t + | Error reason -> AnyT.make AnyError reason |> Type.optional + in + next todo (add_prop k t prop_types)) + | DefaultProps (todo, acc) -> + let default_props = Some (maybe_known_of_result (coerce_object l)) in + let acc = merge_default_props default_props acc in + on_resolve_default_props acc todo + | InitialState (todo, acc) -> + let initial_state = + Some + (match l with + | DefT (reason, _, NullT) -> Known (Null reason) + | _ -> coerce_object l |> maybe_known_of_result |> map_known (fun x -> NotNull x)) + in + let acc = merge_initial_state initial_state acc in + on_resolve_initial_state acc todo) + in + match u with + | CreateElement0 _ -> failwith "handled elsewhere" + | CreateElement (clone, component, config, children, tout) -> + create_element clone component config children tout + | ConfigCheck config -> config_check false config ([], None) + | GetProps tout -> props_to_tout tout + | GetConfig tout -> get_config tout + | GetConfigType (default_props, tout) -> get_config_with_props_and_defaults default_props tout + | GetRef tout -> get_instance tout + | SimplifyPropType (tool, tout) -> simplify_prop_type tout tool + | CreateClass (tool, knot, tout) -> create_class knot tout tool +end diff --git a/src/typing/refinement.ml b/src/typing/refinement.ml index 83963286053..ba933f79591 100644 --- a/src/typing/refinement.ml +++ b/src/typing/refinement.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -16,68 +16,72 @@ module Ast = Flow_ast Eligible expressions are simple ids and chains of property|index lookups from an id base *) -let rec key = Ast.Expression.(function +let rec key = + Ast.Expression.( + function + | (_, This) -> + (* treat this as a property chain, in terms of refinement lifetime *) + Some (Reason.internal_name "this", []) + | (_, Super) -> + (* treat this as a property chain, in terms of refinement lifetime *) + Some (Reason.internal_name "super", []) + | (_, Identifier id) -> key_of_identifier id + | (_, Member member) -> key_of_member member + | _ -> + (* other LHSes unsupported currently/here *) + None) -| _, This -> - (* treat this as a property chain, in terms of refinement lifetime *) - Some (Reason.internal_name "this", []) +and key_of_identifier (_, { Ast.Identifier.name; comments = _ }) = + if name = "undefined" then + None + else + Some (name, []) -| _, Super -> - (* treat this as a property chain, in terms of refinement lifetime *) - Some (Reason.internal_name "super", []) +and key_of_member { Ast.Expression.Member._object; property; _ } = + Ast.Expression.Member.( + match property with + | PropertyIdentifier (_, { Ast.Identifier.name; comments = _ }) + | PropertyExpression + (_, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.String name; _ }) + | PropertyExpression + ( _, + Ast.Expression.Literal + { Ast.Literal.value = Ast.Literal.Number _; raw = name; comments = _ } ) -> + (match key _object with + | Some (base, chain) -> Some (base, Key.Prop name :: chain) + | None -> None) + | PropertyPrivateName (_, (_, { Ast.Identifier.name; comments = _ })) -> + (match key _object with + | Some (base, chain) -> Some (base, Key.PrivateField name :: chain) + | None -> None) + | PropertyExpression index -> + (* foo.bar[baz] -> Chain [Index baz; Id bar; Id foo] *) + (match key _object with + | Some (base, chain) -> + (match key index with + | Some key -> Some (base, Key.Elem key :: chain) + | None -> None) + | None -> None)) -| _, Identifier (_, name) when name != "undefined" -> - Some (name, []) - -| _, Member { Member._object; - (* foo.bar.baz -> Chain [Id baz; Id bar; Id foo] *) - property = ( - Member.PropertyIdentifier (_, name) - | Member.PropertyExpression (_, Ast.Expression.Literal { - Ast.Literal.value = Ast.Literal.String name; - _; - }) - | Member.PropertyExpression (_, Ast.Expression.Literal { - Ast.Literal.value = Ast.Literal.Number _; - raw = name; - }) - ); _; } -> ( - match key _object with - | Some (base, chain) -> - Some (base, Key.Prop name :: chain) - | None -> None - ) - -| _, Member { - Member._object; property = Member.PropertyPrivateName (_, (_, name)); _ - } -> ( - match key _object with - | Some (base, chain) -> - Some (base, Key.PrivateField name :: chain) - | None -> None - ) - -| _, Member { - Member._object; property = Member.PropertyExpression index; _ - } -> ( - (* foo.bar[baz] -> Chain [Index baz; Id bar; Id foo] *) - match key _object with - | Some (base, chain) -> ( - match key index with - | Some key -> - Some (base, Key.Elem key :: chain) - | None -> None - ) - | None -> None - ) - -| _ -> - (* other LHSes unsupported currently/here *) - None -) +let key_of_pattern patt = + match patt with + | (_, Ast.Pattern.Identifier { Ast.Pattern.Identifier.name; _ }) -> key_of_identifier name + | (_, Ast.Pattern.Expression (_, Ast.Expression.Member member)) -> key_of_member member + | (_, Ast.Pattern.Array _) + | (_, Ast.Pattern.Object _) -> + (* other LHSes unsupported currently/here *) + None + | (_, Ast.Pattern.Expression _) -> + (* non-member expression patterns are bogus *) + None (* get type refinement for expression, if it exists *) let get cx expr loc = match key expr with | Some k -> Env.get_refinement cx k loc | None -> None + +let get_of_pattern cx patt loc = + match key_of_pattern patt with + | Some k -> Env.get_refinement cx k loc + | None -> None diff --git a/src/typing/refinement.mli b/src/typing/refinement.mli index 084b68a2894..9982f32039c 100644 --- a/src/typing/refinement.mli +++ b/src/typing/refinement.mli @@ -1,13 +1,14 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val key: (Loc.t, Loc.t) Flow_ast.Expression.t -> Key.t option -val get: - Context.t -> - (Loc.t, Loc.t) Flow_ast.Expression.t -> - Loc.t -> - Type.t option +val key : ('loc, 'loc) Flow_ast.Expression.t -> Key.t option + +val get : Context.t -> ('loc, 'loc) Flow_ast.Expression.t -> ALoc.t -> Type.t option + +val key_of_pattern : ('loc, 'loc) Flow_ast.Pattern.t -> Key.t option + +val get_of_pattern : Context.t -> ('loc, 'loc) Flow_ast.Pattern.t -> ALoc.t -> Type.t option diff --git a/src/typing/repos_cache.ml b/src/typing/repos_cache.ml index 3cc9b413f3c..ccf77cf17ad 100644 --- a/src/typing/repos_cache.ml +++ b/src/typing/repos_cache.ml @@ -1,3 +1,10 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + (** Reposition cache to prevent repositioning loops Repositioning improves error messages by changing reasons to positions close @@ -149,19 +156,18 @@ type ident = Constraint.ident module ReposMap = MyMap.Make (struct type key = ident * Reason.t + type t = key + let compare = Pervasives.compare end) type t = { cache: Type.t ReposMap.t; - back: ident list IMap.t + back: ident list IMap.t; } -let empty = { - cache = ReposMap.empty; - back = IMap.empty; -} +let empty = { cache = ReposMap.empty; back = IMap.empty } let breadcrumb id x = match IMap.get id x.back with @@ -172,13 +178,16 @@ let find id reason x = let rec loop hd tl = match ReposMap.get (hd, reason) x.cache with | Some _ as found -> found - | None -> match tl with [] -> None | hd::tl -> loop hd tl + | None -> + (match tl with + | [] -> None + | hd :: tl -> loop hd tl) in loop id (breadcrumb id x) let add reason t t' x = - let _, id = Type.open_tvar t in - let _, id' = Type.open_tvar t' in + let (_, id) = Type.open_tvar t in + let (_, id') = Type.open_tvar t' in { cache = ReposMap.add (id, reason) t' x.cache; back = IMap.add id' (id :: breadcrumb id x) x.back; diff --git a/src/typing/resolvableTypeJob.ml b/src/typing/resolvableTypeJob.ml new file mode 100644 index 00000000000..db556cac6f6 --- /dev/null +++ b/src/typing/resolvableTypeJob.ml @@ -0,0 +1,326 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Constraint +open Reason +open Type + +(* Helper module for full type resolution as needed to check union and + intersection types. + + Given a type, we walk it to collect the parts of it we wish to resolve. Once + these parts are resolved, they must themselves be walked to collect further + parts to resolve, and so on. In other words, type resolution jobs are created + and processed in rounds, moving closer and closer to full resolution of the + original type. Needless to say, these jobs can be recursive, and so must be + managed carefully for termination and performance. The job management itself + is done in Graph_explorer. (The jobs are naturally modeled as a graph with + dynamically created nodes and edges.) + + Here, we define the function that creates a single round of such jobs. +*) + +(* A datatype describing type resolution jobs. + + We unfold types as we go, looking for parts that cannot be unfolded + immediately (thus needing resolution to proceed). + + The handling of these parts involve calls to `flow` and `unify`, and is + thus decoupled from the walker itself for clarity. Here, we just create + different jobs for different parts encountered. These jobs are further + processed by bindings_of_jobs. + + Briefly, jobs are created for the following cases. (1) Annotation sources + need to be resolved. (2) So do heads of type applications. (3) Resolved + tvars are recursively unfolded, but we need to remember which resolved + tvars have been unfolded to prevent infinite unfolding. (4) Unresolved + tvars are handled differently based on context: when they are expected + (e.g., when they are part of inferred types), they are logged; when they + are unexpected (e.g., when they are part of annotations), they are + converted to `any`. For more details see bindings_of_jobs. + +*) +type t = + | Binding of Type.tvar + | OpenResolved + | OpenUnresolved of int option * reason * Constraint.ident + +(* log_unresolved is a mode that determines whether to log unresolved tvars: + it is None when resolving annotations, and Some speculation_id when + resolving inferred types. *) +let rec collect_of_types ?log_unresolved cx = List.fold_left (collect_of_type ?log_unresolved cx) + +and collect_of_type ?log_unresolved cx acc = function + | OpenT (r, id) -> + let (id, constraints) = Context.find_constraints cx id in + if IMap.mem id acc then + acc + else ( + match constraints with + | FullyResolved _ -> + (* Everything reachable from this type is certainly resolved, so we can + avoid walking the type entirely. *) + acc + | Resolved (_, t) -> + let acc = IMap.add id OpenResolved acc in + collect_of_type ?log_unresolved cx acc t + | Unresolved _ -> + (* It is important to consider reads of constant property names as fully + resolvable, especially since constant property names are often used to + store literals that serve as tags for disjoint unions. Unfortunately, + today we cannot distinguish such reads from others, so we rely on a + common style convention to recognize constant property names. For now + this hack pays for itself: we do not ask such reads to be annotated + with the corresponding literal types to decide membership in those + disjoint unions. *) + if is_constant_reason r then + IMap.add id (Binding (r, id)) acc + (* Instantiable reasons indicate unresolved tvars that are created + "fresh" for the sole purpose of binding to other types, e.g. as + instantiations of type parameters or as existentials. Constraining + them during speculative matching typically do not cause side effects + across branches, and help make progress. *) + else if is_instantiable_reason r then + acc + else + IMap.add id (OpenUnresolved (log_unresolved, r, id)) acc + ) + | AnnotT (_, t, _) -> collect_of_binding ?log_unresolved cx acc t + | ThisTypeAppT (_, t, _, targs_opt) -> + let acc = collect_of_binding ?log_unresolved cx acc t in + let acc = + match targs_opt with + | None -> acc + | Some targs -> collect_of_types ?log_unresolved cx acc targs + in + acc + | TypeAppT (_, _, t, targs) -> + let acc = collect_of_binding ?log_unresolved cx acc t in + let acc = collect_of_types ?log_unresolved cx acc targs in + acc + | EvalT (t, TypeDestructorT (_, _, d), _) -> + let acc = collect_of_type ?log_unresolved cx acc t in + collect_of_destructor ?log_unresolved cx acc d + (* Some common kinds of types are quite overloaded: sometimes they + correspond to types written by the user, but sometimes they also model + internal types, and as such carry other bits of information. For now, we + walk only some parts of these types. These parts are chosen such that + they directly correspond to parts of the surface syntax of types. It is + less clear what it means to resolve other "internal" parts of these + types. In theory, ignoring them *might* lead to bugs, but we've not seen + examples of such bugs yet. Leaving further investigation of this point as + future work. *) + | DefT (_, _, ObjT { props_tmap; dict_t; call_t; _ }) -> + let props_tmap = Context.find_props cx props_tmap in + let acc = SMap.fold (collect_of_property ?log_unresolved cx) props_tmap acc in + let ts = + match dict_t with + | None -> [] + | Some { key; value; _ } -> [key; value] + in + let ts = + match call_t with + | None -> ts + | Some id -> Context.find_call cx id :: ts + in + collect_of_types ?log_unresolved cx acc ts + | DefT (_, _, FunT (_, _, { params; return_t; _ })) -> + let ts = List.fold_left (fun acc (_, t) -> t :: acc) [return_t] params in + collect_of_types ?log_unresolved cx acc ts + | DefT (_, _, ArrT (ArrayAT (elemt, tuple_types))) -> + let ts = Option.value ~default:[] tuple_types in + let ts = elemt :: ts in + collect_of_types ?log_unresolved cx acc ts + | DefT (_, _, ArrT (TupleAT (elemt, tuple_types))) -> + collect_of_types ?log_unresolved cx acc (elemt :: tuple_types) + | DefT (_, _, ArrT (ROArrayAT elemt)) -> collect_of_type ?log_unresolved cx acc elemt + | DefT + ( _, + _, + InstanceT + (static, super, _, { class_id; type_args; own_props; proto_props; inst_call_t; _ }) ) -> + let ts = + if class_id = ALoc.none then + [] + else + [super; static] + in + let ts = List.fold_left (fun ts (_, _, t, _) -> t :: ts) ts type_args in + let props_tmap = + SMap.union (Context.find_props cx own_props) (Context.find_props cx proto_props) + in + let ts = SMap.fold (fun _ p ts -> Property.fold_t (fun ts t -> t :: ts) ts p) props_tmap ts in + let ts = + match inst_call_t with + | None -> ts + | Some id -> Context.find_call cx id :: ts + in + collect_of_types ?log_unresolved cx acc ts + | DefT (_, _, PolyT (_, _, t, _)) -> collect_of_type ?log_unresolved cx acc t + | BoundT _ -> acc + (* TODO: The following kinds of types are not walked out of laziness. It's + not immediately clear what we'd gain (or lose) by walking them. *) + | EvalT _ + | InternalT (ChoiceKitT (_, _)) + | TypeDestructorTriggerT _ + | ModuleT (_, _, _) + | InternalT (ExtendsT _) -> + acc + (* The following cases exactly follow Type_visitor (i.e., they do the + standard walk). TODO: Rewriting this walker as a subclass of Type_visitor + would be quite nice (as long as we confirm that the resulting + virtualization of calls to this function doesn't lead to perf + degradation: this function is expected to be quite hot). *) + | OptionalT (_, t) + | MaybeT (_, t) -> + collect_of_type ?log_unresolved cx acc t + | UnionT (_, rep) -> + let ts = UnionRep.members rep in + collect_of_types ?log_unresolved cx acc ts + | IntersectionT (_, rep) -> + let ts = InterRep.members rep in + collect_of_types ?log_unresolved cx acc ts + | DefT (_, _, ReactAbstractComponentT { config; instance }) -> + collect_of_types ?log_unresolved cx acc [config; instance] + | OpaqueT (_, { underlying_t; super_t; _ }) -> + let acc = Option.fold underlying_t ~init:acc ~f:(collect_of_type ?log_unresolved cx) in + let acc = Option.fold super_t ~init:acc ~f:(collect_of_type ?log_unresolved cx) in + acc + | ExactT (_, t) + | DefT (_, _, TypeT (_, t)) + | DefT (_, _, ClassT t) + | ThisClassT (_, t) -> + collect_of_type ?log_unresolved cx acc t + | KeysT (_, t) -> collect_of_type ?log_unresolved cx acc t + | ShapeT t -> collect_of_type ?log_unresolved cx acc t + | MatchingPropT (_, _, t) -> collect_of_type ?log_unresolved cx acc t + | DefT (_, _, IdxWrapper t) -> collect_of_type ?log_unresolved cx acc t + | ReposT (_, t) + | InternalT (ReposUpperT (_, t)) -> + collect_of_type ?log_unresolved cx acc t + | InternalT (OptionalChainVoidT _) -> acc + | DefT (_, _, NumT _) + | DefT (_, _, StrT _) + | DefT (_, _, BoolT _) + | DefT (_, _, VoidT) + | DefT (_, _, NullT) + | DefT (_, _, EmptyT _) + | DefT (_, _, MixedT _) + | DefT (_, _, SingletonBoolT _) + | DefT (_, _, SingletonNumT _) + | DefT (_, _, SingletonStrT _) + | DefT (_, _, CharSetT _) + | AnyT _ -> + acc + (* Since MergedT only arises from context opt, we can be certain that its + * uses are all fully resolved. No need to traverse the structure. *) + | MergedT _ -> acc + | FunProtoBindT _ + | FunProtoCallT _ + | FunProtoApplyT _ + | FunProtoT _ + | NullProtoT _ + | ObjProtoT _ + | CustomFunT (_, _) + | ExistsT _ + | OpenPredT _ -> + acc + +and collect_of_destructor ?log_unresolved cx acc = function + | NonMaybeType -> acc + | PropertyType _ -> acc + | ElementType t -> collect_of_type ?log_unresolved cx acc t + | Bind t -> collect_of_type ?log_unresolved cx acc t + | ReadOnlyType -> acc + | SpreadType (_, ts, head_slice) -> + let acc = collect_of_object_kit_spread_operands ?log_unresolved cx acc ts in + begin + match head_slice with + | None -> acc + | Some head_slice -> + collect_of_object_kit_spread_operand_slice ?log_unresolved cx acc head_slice + end + | RestType (_, t) -> collect_of_type ?log_unresolved cx acc t + | ValuesType -> acc + | CallType ts -> collect_of_types ?log_unresolved cx acc ts + | TypeMap tmap -> collect_of_type_map ?log_unresolved cx acc tmap + | ReactConfigType default_props -> collect_of_type ?log_unresolved cx acc default_props + | ReactElementPropsType + | ReactElementConfigType + | ReactElementRefType -> + acc + +and collect_of_property ?log_unresolved cx name property acc = + if is_internal_name name then + acc + else + Property.fold_t (fun acc -> collect_of_type ?log_unresolved cx acc) acc property + +and collect_of_object_kit_spread_operand_slice + ?log_unresolved cx acc { Object.Spread.reason = _; prop_map; dict } = + let acc = SMap.fold (collect_of_property ?log_unresolved cx) prop_map acc in + let ts = + match dict with + | Some { key; value; dict_polarity = _; dict_name = _ } -> [key; value] + | None -> [] + in + collect_of_types ?log_unresolved cx acc ts + +and collect_of_object_kit_spread_operand ?log_unresolved cx acc = function + | Object.Spread.Slice operand_slice -> + collect_of_object_kit_spread_operand_slice ?log_unresolved cx acc operand_slice + | Object.Spread.Type t -> collect_of_type ?log_unresolved cx acc t + +and collect_of_object_kit_spread_operands ?log_unresolved cx acc operands = + List.fold_left + (fun acc op -> collect_of_object_kit_spread_operand ?log_unresolved cx acc op) + acc + operands + +and collect_of_type_map ?log_unresolved cx acc = function + | TupleMap t + | ObjectMap t + | ObjectMapi t -> + collect_of_type ?log_unresolved cx acc t + +(* In some positions, like annots, we trust that tvars are 0->1. *) +and collect_of_binding ?log_unresolved cx acc = function + | OpenT ((_, id) as tvar) -> + let (id, constraints) = Context.find_constraints cx id in + if IMap.mem id acc then + acc + else ( + match constraints with + | FullyResolved _ -> + (* Everything reachable from this type is certainly resolved, so we can + avoid walking the type entirely. *) + acc + | Resolved (_, t) -> + let acc = IMap.add id OpenResolved acc in + collect_of_type ?log_unresolved cx acc t + | Unresolved _ -> IMap.add id (Binding tvar) acc + ) + | t -> collect_of_type ?log_unresolved cx acc t + +(* TODO: Support for use types is currently sketchy. Full resolution of use + types are only needed for choice-making on intersections. We care about + calls in particular because one of the biggest uses of intersections is + function overloading. More uses will be added over time. *) +and collect_of_use ?log_unresolved cx acc = function + | UseT (_, t) -> collect_of_type ?log_unresolved cx acc t + | CallT (_, _, fct) -> + let arg_types = + Core_list.map + ~f:(function + | Arg t + | SpreadArg t -> + t) + fct.call_args_tlist + in + collect_of_types ?log_unresolved cx acc (arg_types @ [fct.call_tout]) + | GetPropT (_, _, _, t_out) -> collect_of_type ?log_unresolved cx acc t_out + | _ -> acc diff --git a/src/typing/resolvable_type_job.mli b/src/typing/resolvable_type_job.mli new file mode 100644 index 00000000000..995933224ec --- /dev/null +++ b/src/typing/resolvable_type_job.mli @@ -0,0 +1,20 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type t = + | Binding of Type.tvar + | OpenResolved + | OpenUnresolved of int option * Reason.reason * Constraint.ident + +val collect_of_types : + ?log_unresolved:int -> Context.t -> Reason.reason -> t IMap.t -> Type.t list -> t IMap.t + +val collect_of_type : + ?log_unresolved:int -> Context.t -> Reason.reason -> t IMap.t -> Type.t -> t IMap.t + +val collect_of_use : + ?log_unresolved:int -> Context.t -> Reason.reason -> t IMap.t -> Type.use_t -> t IMap.t diff --git a/src/typing/scope.ml b/src/typing/scope.ml index d1df3c9f90d..a986deae63c 100644 --- a/src/typing/scope.ml +++ b/src/typing/scope.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -19,26 +19,29 @@ let mk_id = Reason.mk_id (* entry state *) module State = struct - type t = Undeclared | Declared | MaybeInitialized | Initialized + type t = + | Undeclared + | Declared + | MaybeInitialized + | Initialized let to_int = function - | Undeclared -> 0 - | Declared -> 1 - | MaybeInitialized -> 2 - | Initialized -> 3 + | Undeclared -> 0 + | Declared -> 1 + | MaybeInitialized -> 2 + | Initialized -> 3 let to_string = function - | Undeclared -> "Undeclared" - | Declared -> "Declared" - | MaybeInitialized -> "MaybeInitialized" - | Initialized -> "Initialized" + | Undeclared -> "Undeclared" + | Declared -> "Declared" + | MaybeInitialized -> "MaybeInitialized" + | Initialized -> "Initialized" let compare x y = Pervasives.compare (to_int x) (to_int y) end (* entries for vars/lets, consts and types *) module Entry = struct - type value_kind = (* consts are either explicit bindings or e.g. const params *) | Const of const_binding_kind @@ -52,6 +55,7 @@ module Entry = struct | ConstImportBinding | ConstParamBinding | ConstVarBinding + | EnumNameBinding and let_binding_kind = | LetVarBinding @@ -67,29 +71,27 @@ module Entry = struct | ConstlikeVarBinding let string_of_value_kind = function - | Const ConstImportBinding -> "import" - | Const ConstParamBinding -> "const param" - | Const ConstVarBinding -> "const" - | Let LetVarBinding -> "let" - | Let ConstlikeLetVarBinding -> "let" - | Let ClassNameBinding -> "class" - | Let CatchParamBinding -> "catch" - | Let FunctionBinding -> "function" - | Let ParamBinding -> "param" - | Let ConstlikeParamBinding -> "param" - | Var VarBinding -> "var" - | Var ConstlikeVarBinding -> "var" + | Const ConstImportBinding -> "import" + | Const ConstParamBinding -> "const param" + | Const ConstVarBinding -> "const" + | Const EnumNameBinding -> "enum" + | Let LetVarBinding -> "let" + | Let ConstlikeLetVarBinding -> "let" + | Let ClassNameBinding -> "class" + | Let CatchParamBinding -> "catch" + | Let FunctionBinding -> "function" + | Let ParamBinding -> "param" + | Let ConstlikeParamBinding -> "param" + | Var VarBinding -> "var" + | Var ConstlikeVarBinding -> "var" type value_binding = { kind: value_kind; value_state: State.t; - (* The location where the binding was declared/created *) - value_declare_loc: Loc.t; - + value_declare_loc: ALoc.t; (* The last location (in this scope) where the entry value was assigned *) - value_assign_loc: Loc.t; - + value_assign_loc: ALoc.t; specific: Type.t; general: Type.t; } @@ -101,85 +103,84 @@ module Entry = struct type type_binding = { type_binding_kind: type_binding_kind; type_state: State.t; - type_loc: Loc.t; - _type: Type.t; + type_loc: ALoc.t; + type_: Type.t; } type t = - | Value of value_binding - | Type of type_binding - | Class of Type.class_binding + | Value of value_binding + | Type of type_binding + | Class of Type.class_binding (* constructors *) let new_class class_binding_id class_private_fields class_private_static_fields = Class { Type.class_binding_id; Type.class_private_fields; Type.class_private_static_fields } let new_value kind state specific general value_declare_loc = - Value { - kind; - value_state = state; - value_declare_loc; - value_assign_loc = value_declare_loc; - specific; - general; - } - - let new_const ~loc ?(state=State.Undeclared) ?(kind=ConstVarBinding) t = + Value + { + kind; + value_state = state; + value_declare_loc; + value_assign_loc = value_declare_loc; + specific; + general; + } + + let new_const ~loc ?(state = State.Undeclared) ?(kind = ConstVarBinding) t = new_value (Const kind) state t t loc - let new_import ~loc t = - new_value (Const ConstImportBinding) State.Initialized t t loc + let new_import ~loc t = new_value (Const ConstImportBinding) State.Initialized t t loc - let new_let ~loc ?(state=State.Undeclared) ?(kind=LetVarBinding) t = + let new_let ~loc ?(state = State.Undeclared) ?(kind = LetVarBinding) t = new_value (Let kind) state t t loc - let new_var ~loc ?(state=State.Undeclared) ?(kind=VarBinding) ?specific general = - let specific = match specific with Some t -> t | None -> general in + let new_var ~loc ?(state = State.Undeclared) ?(kind = VarBinding) ?specific general = + let specific = + match specific with + | Some t -> t + | None -> general + in new_value (Var kind) state specific general loc - let new_type_ type_binding_kind state loc _type = - Type { - type_binding_kind; - type_state = state; - type_loc = loc; - _type - } + let new_type_ type_binding_kind state loc type_ = + Type { type_binding_kind; type_state = state; type_loc = loc; type_ } - let new_type ~loc ?(state=State.Undeclared) _type = - new_type_ TypeBinding state loc _type + let new_type ~loc ?(state = State.Undeclared) type_ = new_type_ TypeBinding state loc type_ - let new_import_type ~loc _type = - new_type_ ImportTypeBinding State.Initialized loc _type + let new_import_type ~loc type_ = new_type_ ImportTypeBinding State.Initialized loc type_ (* accessors *) let entry_loc = function - | Value v -> v.value_declare_loc - | Type t -> t.type_loc - | Class _ -> Loc.none + | Value v -> v.value_declare_loc + | Type t -> t.type_loc + | Class _ -> ALoc.none let assign_loc = function - | Value v -> v.value_assign_loc - | Type t -> t.type_loc - | Class _ -> Loc.none + | Value v -> v.value_assign_loc + | Type t -> t.type_loc + | Class _ -> ALoc.none let declared_type = function - | Value v -> v.general - | Type t -> t._type - | Class _ -> assert_false "Internal Error: Class bindings have no type" + | Value v -> v.general + | Type t -> t.type_ + | Class _ -> assert_false "Internal Error: Class bindings have no type" let actual_type = function - | Value v -> v.specific - | Type t -> t._type - | Class _ -> assert_false "Internal Error: Class bindings have no type" + | Value v -> v.specific + | Type t -> t.type_ + | Class _ -> assert_false "Internal Error: Class bindings have no type" let string_of_kind = function - | Value v -> string_of_value_kind v.kind - | Type _ -> "type" - | Class c -> spf "Class %i" c.Type.class_binding_id + | Value v -> string_of_value_kind v.kind + | Type _ -> "type" + | Class c -> spf "Class %s" (ALoc.debug_to_string c.Type.class_binding_id) - let kind_of_value (value: value_binding) = value.kind - let general_of_value (value: value_binding) = value.general - let state_of_value (value: value_binding) = value.value_state + let kind_of_value (value : value_binding) = value.kind + + let general_of_value (value : value_binding) = value.general + + let state_of_value (value : value_binding) = value.value_state (** Given a named entry, return a new Value entry with specific type replaced with general type for non-internal, non-Const value entries. Types, consts @@ -188,25 +189,22 @@ module Entry = struct *) let havoc name entry = match entry with - | Type _ -> - entry - | Value ({ kind = Const _; specific = Type.DefT (_, Type.EmptyT); _ } as v) -> + | Type _ -> entry + | Value ({ kind = Const _; specific = Type.DefT (_, _, Type.EmptyT _); _ } as v) -> (* cleared consts: see note on Env.reset_current_activation *) - if Reason.is_internal_name name - then entry - else Value { v with specific = v.general } - | Value { kind = Const _; _ } -> - entry - | Value { kind = Var ConstlikeVarBinding; _ } -> - entry - | Value { kind = Let ConstlikeLetVarBinding; _ } -> - entry - | Value { kind = Let ConstlikeParamBinding; _ } -> - entry + if Reason.is_internal_name name then + entry + else + Value { v with specific = v.general } + | Value { kind = Const _; _ } -> entry + | Value { kind = Var ConstlikeVarBinding; _ } -> entry + | Value { kind = Let ConstlikeLetVarBinding; _ } -> entry + | Value { kind = Let ConstlikeParamBinding; _ } -> entry | Value v -> - if Reason.is_internal_name name - then entry - else Value { v with specific = v.general } + if Reason.is_internal_name name then + entry + else + Value { v with specific = v.general } | Class _ -> entry let reset loc name entry = @@ -215,53 +213,56 @@ module Entry = struct | Type _ -> entry | Value v -> - if Reason.is_internal_name name - then entry - else Value { v with specific = Type.EmptyT.at loc } + if Reason.is_internal_name name then + entry + else + Value { v with specific = Type.EmptyT.at loc |> Type.with_trust Trust.bogus_trust } let is_lex = function | Type _ -> false | Class _ -> true | Value v -> - match v.kind with + (match v.kind with | Const _ -> true | Let _ -> true - | _ -> false + | _ -> false) end type var_scope_kind = - | Ordinary (* function or module *) - | Async (* async function *) - | Generator (* generator function *) - | AsyncGenerator (* async generator function *) - | Module (* module scope *) - | Global (* global scope *) - | Predicate (* predicate function *) - | Ctor (* constructor *) + | Ordinary (* function or module *) + | Async (* async function *) + | Generator (* generator function *) + | AsyncGenerator (* async generator function *) + | Module (* module scope *) + | Global (* global scope *) + | Predicate (* predicate function *) + | Ctor + +(* constructor *) let string_of_var_scope_kind = function -| Ordinary -> "Ordinary" -| Async -> "Async" -| Generator -> "Generator" -| AsyncGenerator -> "AsyncGenerator" -| Module -> "Module" -| Global -> "Global" -| Predicate -> "Predicate" -| Ctor -> "Constructor" + | Ordinary -> "Ordinary" + | Async -> "Async" + | Generator -> "Generator" + | AsyncGenerator -> "AsyncGenerator" + | Module -> "Module" + | Global -> "Global" + | Predicate -> "Predicate" + | Ctor -> "Constructor" (* var and lexical scopes differ in hoisting behavior and auxiliary properties *) (* TODO lexical scope support *) type kind = -| VarScope of var_scope_kind -| LexScope + | VarScope of var_scope_kind + | LexScope let string_of_kind = function -| VarScope kind -> spf "VarScope %s" (string_of_var_scope_kind kind) -| LexScope -> "LexScope" + | VarScope kind -> spf "VarScope %s" (string_of_var_scope_kind kind) + | LexScope -> "LexScope" type refi_binding = { - refi_loc: Loc.t; + refi_loc: ALoc.t; refined: Type.t; original: Type.t; } @@ -283,21 +284,21 @@ type t = { kind: kind; mutable entries: Entry.t SMap.t; mutable refis: refi_binding Key_map.t; - mutable declare_func_annots: (Loc.t, Loc.t * Type.t) Flow_ast.Type.annotation SMap.t; + mutable declare_func_annots: (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.annotation SMap.t; } (* ctor helper *) -let fresh_impl kind = { - id = mk_id (); - kind; - entries = SMap.empty; - refis = Key_map.empty; - declare_func_annots = SMap.empty; -} +let fresh_impl kind = + { + id = mk_id (); + kind; + entries = SMap.empty; + refis = Key_map.empty; + declare_func_annots = SMap.empty; + } (* return a fresh scope of the most common kind (var) *) -let fresh ?(var_scope_kind=Ordinary) () = - fresh_impl (VarScope var_scope_kind) +let fresh ?(var_scope_kind = Ordinary) () = fresh_impl (VarScope var_scope_kind) (* return a fresh lexical scope *) let fresh_lex () = fresh_impl LexScope @@ -309,24 +310,19 @@ let clone { id; kind; entries; refis; declare_func_annots } = { id; kind; entries; refis; declare_func_annots } (* use passed f to iterate over all scope entries *) -let iter_entries f scope = - SMap.iter f scope.entries +let iter_entries f scope = SMap.iter f scope.entries (* use passed f to update all scope entries *) -let update_entries f scope = - scope.entries <- SMap.mapi f scope.entries +let update_entries f scope = scope.entries <- SMap.mapi f scope.entries (* add entry to scope *) -let add_entry name entry scope = - scope.entries <- SMap.add name entry scope.entries +let add_entry name entry scope = scope.entries <- SMap.add name entry scope.entries (* remove entry from scope *) -let remove_entry name scope = - scope.entries <- SMap.remove name scope.entries +let remove_entry name scope = scope.entries <- SMap.remove name scope.entries (* get entry from scope, or None *) -let get_entry name scope = - SMap.get name scope.entries +let get_entry name scope = SMap.get name scope.entries (* havoc entry *) let havoc_entry name scope = @@ -335,47 +331,42 @@ let havoc_entry name scope = let entry = Entry.havoc name entry in scope.entries <- SMap.add name entry scope.entries | None -> - assert_false (spf "entry %S not found in scope %d: { %s }" - name scope.id (String.concat ", " - (SMap.fold (fun n _ acc -> n :: acc) scope.entries []))) + assert_false + (spf + "entry %S not found in scope %d: { %s }" + name + scope.id + (String.concat ", " (SMap.fold (fun n _ acc -> n :: acc) scope.entries []))) (* use passed f to update all scope refis *) -let update_refis f scope = - scope.refis <- Key_map.mapi f scope.refis +let update_refis f scope = scope.refis <- Key_map.mapi f scope.refis (* add refi to scope *) -let add_refi key refi scope = - scope.refis <- Key_map.add key refi scope.refis +let add_refi key refi scope = scope.refis <- Key_map.add key refi scope.refis (* remove entry from scope *) -let remove_refi key scope = - scope.refis <- Key_map.remove key scope.refis +let remove_refi key scope = scope.refis <- Key_map.remove key scope.refis (* get entry from scope, or None *) -let get_refi name scope = - Key_map.get name scope.refis +let get_refi name scope = Key_map.get name scope.refis (* havoc a refi *) let havoc_refi key scope = - scope.refis <- scope.refis |> - Key_map.filter (fun k _ -> Key.compare key k != 0) + scope.refis <- scope.refis |> Key_map.filter (fun k _ -> Key.compare key k != 0) (* helper: filter all refis whose expressions involve the given name *) let filter_refis_using_propname ~private_ propname refis = - refis |> Key_map.filter (fun key _ -> - not (Key.uses_propname ~private_ propname key) - ) + refis |> Key_map.filter (fun key _ -> not (Key.uses_propname ~private_ propname key)) (* havoc a scope's refinements: if name is passed, clear refis whose expressions involve it. otherwise, clear them all *) let havoc_refis ?name ~private_ scope = - scope.refis <- match name with - | Some name -> - scope.refis |> (filter_refis_using_propname ~private_ name) - | None -> - Key_map.empty + scope.refis <- + (match name with + | Some name -> scope.refis |> filter_refis_using_propname ~private_ name + | None -> Key_map.empty) let havoc_all_refis ?name scope = havoc_refis ?name ~private_:false scope; @@ -396,8 +387,7 @@ let reset loc scope = let add_declare_func_annot name annot scope = scope.declare_func_annots <- SMap.add name annot scope.declare_func_annots -let get_declare_func_annot name scope = - SMap.get name scope.declare_func_annots +let get_declare_func_annot name scope = SMap.get name scope.declare_func_annots let is_lex scope = match scope.kind with @@ -408,3 +398,10 @@ let is_global scope = match scope.kind with | VarScope Global -> true | _ -> false + +let is_toplevel scope = + match scope.kind with + | VarScope Global + | VarScope Module -> + true + | _ -> false diff --git a/src/typing/scope.mli b/src/typing/scope.mli index 567a80431f0..e7d9308273f 100644 --- a/src/typing/scope.mli +++ b/src/typing/scope.mli @@ -1,80 +1,114 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module State : - sig - type t = Undeclared | Declared | MaybeInitialized | Initialized - val to_string : t -> string - val compare : t -> t -> int - end -module Entry : - sig - type value_kind = - | Const of const_binding_kind - | Let of let_binding_kind - | Var of var_binding_kind - and const_binding_kind = - | ConstImportBinding - | ConstParamBinding - | ConstVarBinding - and let_binding_kind = - | LetVarBinding - | ConstlikeLetVarBinding - | ClassNameBinding - | CatchParamBinding - | FunctionBinding - | ParamBinding - | ConstlikeParamBinding - and var_binding_kind = - | VarBinding - | ConstlikeVarBinding - val string_of_value_kind : value_kind -> string - type value_binding = { - kind : value_kind; - value_state : State.t; - value_declare_loc : Loc.t; - value_assign_loc : Loc.t; - specific : Type.t; - general : Type.t; - } - type type_binding_kind = - | ImportTypeBinding - | TypeBinding - type type_binding = { - type_binding_kind: type_binding_kind; - type_state : State.t; - type_loc : Loc.t; - _type : Type.t; - } - type t = Value of value_binding | Type of type_binding | Class of Type.class_binding - val new_class : int -> Type.Properties.id -> Type.Properties.id -> t - val new_value : value_kind -> State.t -> Type.t -> Type.t -> Loc.t -> t - val new_const : - loc:Loc.t -> ?state:State.t -> ?kind:const_binding_kind -> Type.t -> t - val new_import : - loc:Loc.t -> Type.t -> t - val new_let : - loc:Loc.t -> ?state:State.t -> ?kind:let_binding_kind -> Type.t -> t - val new_var : - loc:Loc.t -> ?state:State.t -> ?kind:var_binding_kind -> ?specific:Type.t -> Type.t -> t - val new_type : loc:Loc.t -> ?state:State.t -> Type.t -> t - val new_import_type : loc:Loc.t -> Type.t -> t - val entry_loc : t -> Loc.t - val assign_loc : t -> Loc.t - val declared_type : t -> Type.t - val actual_type : t -> Type.t - val string_of_kind : t -> string - val kind_of_value : value_binding -> value_kind - val general_of_value : value_binding -> Type.t - val state_of_value : value_binding -> State.t - val havoc : string -> t -> t - val reset : Loc.t -> string -> t -> t - val is_lex : t -> bool - end +module State : sig + type t = + | Undeclared + | Declared + | MaybeInitialized + | Initialized + + val to_string : t -> string + + val compare : t -> t -> int +end + +module Entry : sig + type value_kind = + | Const of const_binding_kind + | Let of let_binding_kind + | Var of var_binding_kind + + and const_binding_kind = + | ConstImportBinding + | ConstParamBinding + | ConstVarBinding + | EnumNameBinding + + and let_binding_kind = + | LetVarBinding + | ConstlikeLetVarBinding + | ClassNameBinding + | CatchParamBinding + | FunctionBinding + | ParamBinding + | ConstlikeParamBinding + + and var_binding_kind = + | VarBinding + | ConstlikeVarBinding + + val string_of_value_kind : value_kind -> string + + type value_binding = { + kind: value_kind; + value_state: State.t; + value_declare_loc: ALoc.t; + value_assign_loc: ALoc.t; + specific: Type.t; + general: Type.t; + } + + type type_binding_kind = + | ImportTypeBinding + | TypeBinding + + type type_binding = { + type_binding_kind: type_binding_kind; + type_state: State.t; + type_loc: ALoc.t; + type_: Type.t; + } + + type t = + | Value of value_binding + | Type of type_binding + | Class of Type.class_binding + + val new_class : ALoc.t -> Type.Properties.id -> Type.Properties.id -> t + + val new_value : value_kind -> State.t -> Type.t -> Type.t -> ALoc.t -> t + + val new_const : loc:ALoc.t -> ?state:State.t -> ?kind:const_binding_kind -> Type.t -> t + + val new_import : loc:ALoc.t -> Type.t -> t + + val new_let : loc:ALoc.t -> ?state:State.t -> ?kind:let_binding_kind -> Type.t -> t + + val new_var : + loc:ALoc.t -> ?state:State.t -> ?kind:var_binding_kind -> ?specific:Type.t -> Type.t -> t + + val new_type : loc:ALoc.t -> ?state:State.t -> Type.t -> t + + val new_import_type : loc:ALoc.t -> Type.t -> t + + val entry_loc : t -> ALoc.t + + val assign_loc : t -> ALoc.t + + val declared_type : t -> Type.t + + val actual_type : t -> Type.t + + val string_of_kind : t -> string + + val kind_of_value : value_binding -> value_kind + + val general_of_value : value_binding -> Type.t + + val state_of_value : value_binding -> State.t + + val havoc : string -> t -> t + + val reset : ALoc.t -> string -> t -> t + + val is_lex : t -> bool +end + type var_scope_kind = | Ordinary | Async @@ -84,42 +118,77 @@ type var_scope_kind = | Global | Predicate | Ctor + val string_of_var_scope_kind : var_scope_kind -> string -type kind = VarScope of var_scope_kind | LexScope + +type kind = + | VarScope of var_scope_kind + | LexScope + val string_of_kind : kind -> string + type refi_binding = { - refi_loc : Loc.t; - refined : Type.t; - original : Type.t; + refi_loc: ALoc.t; + refined: Type.t; + original: Type.t; } + type t = { - id : int; - kind : kind; - mutable entries : Entry.t SMap.t; - mutable refis : refi_binding Key_map.t; - mutable declare_func_annots: (Loc.t, Loc.t * Type.t) Flow_ast.Type.annotation SMap.t; + id: int; + kind: kind; + mutable entries: Entry.t SMap.t; + mutable refis: refi_binding Key_map.t; + mutable declare_func_annots: (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.annotation SMap.t; } + val fresh_impl : kind -> t + val fresh : ?var_scope_kind:var_scope_kind -> unit -> t + val fresh_lex : unit -> t + val clone : t -> t + val iter_entries : (SMap.key -> Entry.t -> unit) -> t -> unit + val update_entries : (SMap.key -> Entry.t -> Entry.t) -> t -> unit + val add_entry : SMap.key -> Entry.t -> t -> unit + val remove_entry : SMap.key -> t -> unit + val get_entry : SMap.key -> t -> Entry.t option + val havoc_entry : SMap.key -> t -> unit + val update_refis : (Key_map.key -> refi_binding -> refi_binding) -> t -> unit + val add_refi : Key_map.key -> refi_binding -> t -> unit + val remove_refi : Key_map.key -> t -> unit + val get_refi : Key_map.key -> t -> refi_binding option + val havoc_refi : Key_map.key -> t -> unit + val filter_refis_using_propname : private_:bool -> string -> 'a Key_map.t -> 'a Key_map.t + val havoc_refis : ?name:string -> private_:bool -> t -> unit + val havoc_all_refis : ?name:string -> t -> unit + val havoc : t -> unit -val reset : Loc.t -> t -> unit -val add_declare_func_annot : string -> (Loc.t, Loc.t * Type.t) Flow_ast.Type.annotation -> t -> unit -val get_declare_func_annot : string -> t -> (Loc.t, Loc.t * Type.t) Flow_ast.Type.annotation option + +val reset : ALoc.t -> t -> unit + +val add_declare_func_annot : + string -> (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.annotation -> t -> unit + +val get_declare_func_annot : + string -> t -> (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.annotation option + val is_lex : t -> bool + val is_global : t -> bool + +val is_toplevel : t -> bool diff --git a/src/typing/sigHash.ml b/src/typing/sigHash.ml index c38e522b127..36a857e6776 100644 --- a/src/typing/sigHash.ml +++ b/src/typing/sigHash.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -27,12 +27,13 @@ changed meaningfully. Finally, these structures may be huge, so we instead compare their digests, - tolerating improbable collisions (cf. SharedMem). + tolerating improbable collisions (cf. SharedMem_js). *) (* NOTE: it's critical that these are all constant constructors, which are - * represented as ints, because we hash in C assuming they are ints. Any - * non-constant constructors will be blocks, and fail to hash properly. *) + * represented as ints, because we hash in C assuming they are ints. (Any + * non-constant constructors will be blocks, and fail to hash properly.) This + * also means that there is effectively no limit on the number of cases. *) type hash = (* def types *) | NumH @@ -48,11 +49,20 @@ type hash = | FunProtoApplyH | FunProtoBindH | FunProtoCallH - | ObjH + | ObjFrozenSealedExactH + | ObjFrozenSealedNotExactH + | ObjFrozenNotSealedExactH + | ObjFrozenNotSealedNotExactH + | ObjNotFrozenSealedExactH + | ObjNotFrozenSealedNotExactH + | ObjNotFrozenNotSealedExactH + | ObjNotFrozenNotSealedNotExactH | ObjProtoH | MatchingPropH | NullProtoH - | ArrH + | ArrATH + | ArrTupleATH + | ArrROArrayATH | ClassH | OptionalH | EvalH @@ -65,11 +75,7 @@ type hash = | MaybeH | IntersectionH | UnionH - | AnyWithLowerBoundH - | AnyWithUpperBoundH | MergedH - | AnyObjH - | AnyFunH | ShapeH | KeysH | SingletonStrH @@ -78,8 +84,32 @@ type hash = | TypeH | AnnotH | ModuleH + | ModuleStrictH | TvarDestructorH - | CustomFunH + | CustomFunObjectAssignH + | CustomFunObjectGetPrototypeOfH + | CustomFunObjectSetPrototypeOfH + | CustomFunComposeH + | CustomFunReverseComposeH + | CustomFunReactPropTypePrimitiveRequiredH + | CustomFunReactPropTypePrimitiveNotRequiredH + | CustomFunReactPropTypeComplexArrayOfH + | CustomFunReactPropTypeComplexInstanceOfH + | CustomFunReactPropTypeComplexObjectOfH + | CustomFunReactPropTypeComplexOneOfH + | CustomFunReactPropTypeComplexOneOfTypeH + | CustomFunReactPropTypeComplexShapeH + | CustomFunReactCreateClassH + | CustomFunReactCreateElementH + | CustomFunReactCloneElementH + | CustomFunReactElementFactoryH + | CustomFunIdxH + | CustomFunTypeAssertIsH + | CustomFunTypeAssertThrowsH + | CustomFunTypeAssertWrapsH + | CustomFunDebugPrintH + | CustomFunDebugThrowH + | CustomFunDebugSleepH | OpenPredH | CharSetH | ReposH @@ -112,7 +142,6 @@ type hash = | AssertBinaryInLHSH | AssertBinaryInRHSH | AssertForInRHSH - | AssertRestParamH | PredicateH | GuardH | EqH @@ -153,6 +182,7 @@ type hash = | CopyTypeExportsH | ExportNamedH | ExportTypeH + | TypeExportifyH | MapTypeH | ReactKitH | ObjKitH @@ -173,243 +203,313 @@ type hash = | ExtendsUseH | ToStringH | InvariantH + | ReactAbstractComponentH + | ReactPropsToOutH + | ReactInToPropsH + | DestructuringH + | ModuleExportsAssignH -let hash_of_def_ctor = Type.(function - | InstanceT _ -> failwith "undefined hash of InstanceT" - | PolyT _ -> failwith "undefined hash of PolyT" - | IdxWrapper _ -> failwith "undefined hash of IdxWrapper" +let hash_of_def_ctor = + Type.( + function + | InstanceT _ -> failwith "undefined hash of InstanceT" + | PolyT _ -> failwith "undefined hash of PolyT" + | IdxWrapper _ -> failwith "undefined hash of IdxWrapper" + | ArrT (ArrayAT _) -> ArrATH + | ArrT (TupleAT _) -> ArrTupleATH + | ArrT (ROArrayAT _) -> ArrROArrayATH + | BoolT _ -> BoolH + | CharSetT _ -> CharSetH + | ClassT _ -> ClassH + | EmptyT _ -> EmptyH + | FunT _ -> FunH + | MixedT _ -> MixedH + | NullT -> NullH + | NumT _ -> NumH + | ObjT { flags = { frozen; sealed; exact }; _ } -> + begin + match (frozen, sealed, exact) with + | (true, Sealed, true) -> ObjFrozenSealedExactH + | (true, Sealed, false) -> ObjFrozenSealedNotExactH + | (true, UnsealedInFile _, true) -> ObjFrozenNotSealedExactH + | (true, UnsealedInFile _, false) -> ObjFrozenNotSealedNotExactH + | (false, Sealed, true) -> ObjNotFrozenSealedExactH + | (false, Sealed, false) -> ObjNotFrozenSealedNotExactH + | (false, UnsealedInFile _, true) -> ObjNotFrozenNotSealedExactH + | (false, UnsealedInFile _, false) -> ObjNotFrozenNotSealedNotExactH + end + | ReactAbstractComponentT _ -> ReactAbstractComponentH + | SingletonBoolT _ -> SingletonBoolH + | SingletonNumT _ -> SingletonNumH + | SingletonStrT _ -> SingletonStrH + | StrT _ -> StrH + | TypeT _ -> TypeH + | VoidT -> VoidH) - | AnyFunT -> AnyFunH - | AnyObjT -> AnyObjH - | AnyT -> AnyH - | ArrT _ -> ArrH - | BoolT _ -> BoolH - | CharSetT _ -> CharSetH - | ClassT _ -> ClassH - | EmptyT -> EmptyH - | FunT _ -> FunH - | IntersectionT _ -> IntersectionH - | MaybeT _ -> MaybeH - | MixedT _ -> MixedH - | NullT -> NullH - | NumT _ -> NumH - | ObjT _ -> ObjH - | OptionalT _ -> OptionalH - | SingletonBoolT _ -> SingletonBoolH - | SingletonNumT _ -> SingletonNumH - | SingletonStrT _ -> SingletonStrH - | StrT _ -> StrH - | TypeT _ -> TypeH - | TypeAppT _ -> TypeAppH - | VoidT -> VoidH - | UnionT _ -> UnionH -) +let hash_of_ctor = + Type.( + function + | OpenT _ -> failwith "undefined hash of OpenT" + | InternalT _ -> failwith "undefined hash of InternalT" + | OpaqueT _ -> failwith "undefined hash of OpaqueT" + | AnyT _ -> AnyH + | AnnotT _ -> AnnotH + | MergedT _ -> MergedH + | BoundT _ -> BoundH + | TypeDestructorTriggerT _ -> TvarDestructorH + | CustomFunT (_, ObjectAssign) -> CustomFunObjectAssignH + | CustomFunT (_, ObjectGetPrototypeOf) -> CustomFunObjectGetPrototypeOfH + | CustomFunT (_, ObjectSetPrototypeOf) -> CustomFunObjectSetPrototypeOfH + | CustomFunT (_, Compose true) -> CustomFunComposeH + | CustomFunT (_, Compose false) -> CustomFunReverseComposeH + | CustomFunT (_, ReactPropType rpt) -> + React.PropType.( + begin + match rpt with + | Primitive (true, _) -> CustomFunReactPropTypePrimitiveRequiredH + | Primitive (false, _) -> CustomFunReactPropTypePrimitiveNotRequiredH + | Complex ArrayOf -> CustomFunReactPropTypeComplexArrayOfH + | Complex InstanceOf -> CustomFunReactPropTypeComplexInstanceOfH + | Complex ObjectOf -> CustomFunReactPropTypeComplexObjectOfH + | Complex OneOf -> CustomFunReactPropTypeComplexOneOfH + | Complex OneOfType -> CustomFunReactPropTypeComplexOneOfTypeH + | Complex Shape -> CustomFunReactPropTypeComplexShapeH + end) + | CustomFunT (_, ReactCreateClass) -> CustomFunReactCreateClassH + | CustomFunT (_, ReactCreateElement) -> CustomFunReactCreateElementH + | CustomFunT (_, ReactCloneElement) -> CustomFunReactCloneElementH + | CustomFunT (_, ReactElementFactory _) -> CustomFunReactElementFactoryH + | CustomFunT (_, Idx) -> CustomFunIdxH + | CustomFunT (_, TypeAssertIs) -> CustomFunTypeAssertIsH + | CustomFunT (_, TypeAssertThrows) -> CustomFunTypeAssertThrowsH + | CustomFunT (_, TypeAssertWraps) -> CustomFunTypeAssertWrapsH + | CustomFunT (_, DebugPrint) -> CustomFunDebugPrintH + | CustomFunT (_, DebugThrow) -> CustomFunDebugThrowH + | CustomFunT (_, DebugSleep) -> CustomFunDebugSleepH + | DefT (_, _, t) -> hash_of_def_ctor t + | EvalT _ -> EvalH + | ExactT _ -> ExactH + | ExistsT _ -> ExistsH + | FunProtoT _ -> FunProtoH + | FunProtoApplyT _ -> FunProtoApplyH + | FunProtoBindT _ -> FunProtoBindH + | FunProtoCallT _ -> FunProtoCallH + | IntersectionT _ -> IntersectionH + | KeysT _ -> KeysH + | MaybeT _ -> MaybeH + | ModuleT (_, _, false) -> ModuleH + | ModuleT (_, _, true) -> ModuleStrictH + | NullProtoT _ -> NullProtoH + | ObjProtoT _ -> ObjProtoH + | OptionalT _ -> OptionalH + | MatchingPropT _ -> MatchingPropH + | OpenPredT _ -> OpenPredH + | ReposT _ -> ReposH + | ShapeT _ -> ShapeH + | ThisClassT _ -> ThisClassH + | ThisTypeAppT _ -> ThisTypeAppH + | TypeAppT _ -> TypeAppH + | UnionT _ -> UnionH) -let hash_of_ctor = Type.(function - | OpenT _ -> failwith "undefined hash of OpenT" - | InternalT _ -> failwith "undefined hash of InternalT" - | OpaqueT _ -> failwith "undefined hash of OpaqueT" - - | AnnotT _ -> AnnotH - | AnyWithLowerBoundT _ -> AnyWithLowerBoundH - | AnyWithUpperBoundT _ -> AnyWithUpperBoundH - | MergedT _ -> MergedH - | BoundT _ -> BoundH - | TypeDestructorTriggerT _ -> TvarDestructorH - | CustomFunT _ -> CustomFunH - | DefT (_, t) -> hash_of_def_ctor t - | EvalT _ -> EvalH - | ExactT _ -> ExactH - | ExistsT _ -> ExistsH - | FunProtoT _ -> FunProtoH - | FunProtoApplyT _ -> FunProtoApplyH - | FunProtoBindT _ -> FunProtoBindH - | FunProtoCallT _ -> FunProtoCallH - | KeysT _ -> KeysH - | ModuleT _ -> ModuleH - | NullProtoT _ -> NullProtoH - | ObjProtoT _ -> ObjProtoH - | MatchingPropT _ -> MatchingPropH - | OpenPredT _ -> OpenPredH - | ReposT _ -> ReposH - | ShapeT _ -> ShapeH - | ThisClassT _ -> ThisClassH - | ThisTypeAppT _ -> ThisTypeAppH -) - -let hash_of_use_ctor = Type.(function - | UseT _ -> failwith "undefined hash of UseT" - - | BindT _ -> BindH - | CallT _ -> CallH - | MethodT _ -> MethodH - | SetPropT _ -> SetPropH - | SetPrivatePropT _ -> SetPrivatePropH - | GetPropT _ -> GetPropH - | MatchPropT _ -> MatchPropH - | GetPrivatePropT _ -> GetPrivatePropH - | TestPropT _ -> TestPropH - | SetElemT _ -> SetElemH - | GetElemT _ -> GetElemH - | CallElemT _ -> CallElemH - | GetStaticsT _ -> GetStaticsH - | GetProtoT _ -> GetProtoH - | SetProtoT _ -> SetProtoH - | ReposLowerT _ -> ReposLowerH - | ReposUseT _ -> ReposUseH - | ConstructorT _ -> ConstructorH - | SuperT _ -> SuperH - | ImplementsT _ -> ImplementsH - | MixinT _ -> MixinH - | AdderT _ -> AdderH - | ComparatorT _ -> ComparatorH - | UnaryMinusT _ -> UnaryMinusH - | AssertArithmeticOperandT _ -> AssertArithmeticOperandH - | AssertBinaryInLHST _ -> AssertBinaryInLHSH - | AssertBinaryInRHST _ -> AssertBinaryInRHSH - | AssertForInRHST _ -> AssertForInRHSH - | AssertRestParamT _ -> AssertRestParamH - | PredicateT _ -> PredicateH - | GuardT _ -> GuardH - | EqT _ -> EqH - | AndT _ -> AndH - | OrT _ -> OrH - | NullishCoalesceT _ -> NullishCoalesceH - | NotT _ -> NotH - | SpecializeT _ -> SpecializeH - | ThisSpecializeT _ -> ThisSpecializeH - | VarianceCheckT _ -> VarianceCheckH - | TypeAppVarianceCheckT _ -> TypeAppVarianceCheckH - | ConcretizeTypeAppsT _ -> ConcretizeTypeAppsH - | LookupT _ -> LookupH - | ObjAssignToT _ -> ObjAssignToH - | ObjAssignFromT _ -> ObjAssignFromH - | ObjFreezeT _ -> ObjFreezeH - | ObjRestT _ -> ObjRestH - | ObjSealT _ -> ObjSealH - | ObjTestT _ -> ObjTestH - | ObjTestProtoT _ -> ObjTestProtoH - | ArrRestT _ -> ArrRestH - | UnifyT _ -> UnifyH - | BecomeT _ -> BecomeH - | GetKeysT _ -> GetKeysH - | HasOwnPropT _ -> HasOwnPropH - | GetValuesT _ -> GetValuesH - | ElemT _ -> ElemH - | MakeExactT _ -> MakeExactH - | CJSRequireT _ -> CJSRequireH - | ImportModuleNsT _ -> ImportModuleNsH - | ImportDefaultT _ -> ImportDefaultH - | ImportNamedT _ -> ImportNamedH - | ImportTypeT _ -> ImportTypeH - | ImportTypeofT _ -> ImportTypeofH - | AssertImportIsValueT _ -> AssertImportIsValueH - | CJSExtractNamedExportsT _ -> CJSExtractNamedExportsH - | CopyNamedExportsT _ -> CopyNamedExportsH - | CopyTypeExportsT _ -> CopyTypeExportsH - | ExportNamedT _ -> ExportNamedH - | ExportTypeT _ -> ExportTypeH - | MapTypeT _ -> MapTypeH - | ReactKitT _ -> ReactKitH - | ObjKitT _ -> ObjKitH - | ChoiceKitUseT _ -> ChoiceKitUseH - | IntersectionPreprocessKitT _ -> IntersectionPreprocessKitH - | DebugPrintT _ -> DebugPrintH - | DebugSleepT _ -> DebugSleepH - | SentinelPropTestT _ -> SentinelPropTestH - | IdxUnwrap _ -> IdxUnwrapH - | IdxUnMaybeifyT _ -> IdxUnMaybeifyH - | OptionalChainT _ -> OptionalChainH - | CallLatentPredT _ -> CallLatentPredH - | CallOpenPredT _ -> CallOpenPredH - | SubstOnPredT _ -> SubstOnPredH - | RefineT _ -> RefineH - | ResolveSpreadT _ -> ResolveSpreadH - | CondT _ -> CondH - | ExtendsUseT _ -> ExtendsUseH - | ToStringT _ -> ToStringH - | InvariantT _ -> InvariantH -) +let hash_of_use_ctor = + Type.( + function + | UseT _ -> failwith "undefined hash of UseT" + | BindT _ -> BindH + | CallT _ -> CallH + | MethodT _ -> MethodH + | SetPropT _ -> SetPropH + | SetPrivatePropT _ -> SetPrivatePropH + | GetPropT _ -> GetPropH + | MatchPropT _ -> MatchPropH + | GetPrivatePropT _ -> GetPrivatePropH + | TestPropT _ -> TestPropH + | SetElemT _ -> SetElemH + | GetElemT _ -> GetElemH + | CallElemT _ -> CallElemH + | GetStaticsT _ -> GetStaticsH + | GetProtoT _ -> GetProtoH + | SetProtoT _ -> SetProtoH + | ReposLowerT _ -> ReposLowerH + | ReposUseT _ -> ReposUseH + | ConstructorT _ -> ConstructorH + | SuperT _ -> SuperH + | ImplementsT _ -> ImplementsH + | MixinT _ -> MixinH + | AdderT _ -> AdderH + | ComparatorT _ -> ComparatorH + | UnaryMinusT _ -> UnaryMinusH + | AssertArithmeticOperandT _ -> AssertArithmeticOperandH + | AssertBinaryInLHST _ -> AssertBinaryInLHSH + | AssertBinaryInRHST _ -> AssertBinaryInRHSH + | AssertForInRHST _ -> AssertForInRHSH + | PredicateT _ -> PredicateH + | GuardT _ -> GuardH + | EqT _ -> EqH + | AndT _ -> AndH + | OrT _ -> OrH + | NullishCoalesceT _ -> NullishCoalesceH + | NotT _ -> NotH + | SpecializeT _ -> SpecializeH + | ThisSpecializeT _ -> ThisSpecializeH + | VarianceCheckT _ -> VarianceCheckH + | TypeAppVarianceCheckT _ -> TypeAppVarianceCheckH + | ConcretizeTypeAppsT _ -> ConcretizeTypeAppsH + | LookupT _ -> LookupH + | ObjAssignToT _ -> ObjAssignToH + | ObjAssignFromT _ -> ObjAssignFromH + | ObjFreezeT _ -> ObjFreezeH + | ObjRestT _ -> ObjRestH + | ObjSealT _ -> ObjSealH + | ObjTestT _ -> ObjTestH + | ObjTestProtoT _ -> ObjTestProtoH + | ArrRestT _ -> ArrRestH + | UnifyT _ -> UnifyH + | BecomeT _ -> BecomeH + | GetKeysT _ -> GetKeysH + | HasOwnPropT _ -> HasOwnPropH + | GetValuesT _ -> GetValuesH + | ElemT _ -> ElemH + | MakeExactT _ -> MakeExactH + | CJSRequireT _ -> CJSRequireH + | ImportModuleNsT _ -> ImportModuleNsH + | ImportDefaultT _ -> ImportDefaultH + | ImportNamedT _ -> ImportNamedH + | ImportTypeT _ -> ImportTypeH + | ImportTypeofT _ -> ImportTypeofH + | AssertImportIsValueT _ -> AssertImportIsValueH + | CJSExtractNamedExportsT _ -> CJSExtractNamedExportsH + | CopyNamedExportsT _ -> CopyNamedExportsH + | CopyTypeExportsT _ -> CopyTypeExportsH + | ExportNamedT _ -> ExportNamedH + | ExportTypeT _ -> ExportTypeH + | AssertExportIsTypeT _ -> TypeExportifyH + | MapTypeT _ -> MapTypeH + | ReactKitT _ -> ReactKitH + | ObjKitT _ -> ObjKitH + | ChoiceKitUseT _ -> ChoiceKitUseH + | IntersectionPreprocessKitT _ -> IntersectionPreprocessKitH + | DebugPrintT _ -> DebugPrintH + | DebugSleepT _ -> DebugSleepH + | SentinelPropTestT _ -> SentinelPropTestH + | IdxUnwrap _ -> IdxUnwrapH + | IdxUnMaybeifyT _ -> IdxUnMaybeifyH + | OptionalChainT _ -> OptionalChainH + | CallLatentPredT _ -> CallLatentPredH + | CallOpenPredT _ -> CallOpenPredH + | SubstOnPredT _ -> SubstOnPredH + | RefineT _ -> RefineH + | ResolveSpreadT _ -> ResolveSpreadH + | CondT _ -> CondH + | ExtendsUseT _ -> ExtendsUseH + | ToStringT _ -> ToStringH + | InvariantT _ -> InvariantH + | ReactPropsToOut _ -> ReactPropsToOutH + | ReactInToProps _ -> ReactInToPropsH + | DestructuringT _ -> DestructuringH + | ModuleExportsAssignT _ -> ModuleExportsAssignH) let add = Xx.update + let add_int = Xx.update_int + let add_bool = Xx.update_int (* bools are ints *) let add_option state f = function | None -> add_int state 0 - | Some x -> add_int state 1; f state x + | Some x -> + add_int state 1; + f state x -let add_literal state f = Type.(function - | Literal (_, x) -> add_int state 0; f state x - | Truthy -> add_int state 1 - | AnyLiteral -> add_int state 2 -) +let add_literal state f = + Type.( + function + | Literal (_, x) -> + add_int state 0; + f state x + | Truthy -> add_int state 1 + | AnyLiteral -> add_int state 2) let add_number_literal state (_, x) = add state x let add_type state t = add_int state (hash_of_ctor t); - let open Type in - match t with - | DefT (_, BoolT b) -> - add_option state add_bool b - | DefT (_, MixedT m) -> - add_int state m - | DefT (_, NumT n) -> - add_literal state add_number_literal n - | DefT (_, SingletonBoolT b) -> - add_bool state b - | DefT (_, SingletonNumT n) -> - add_number_literal state n - | DefT (_, SingletonStrT s) -> - add state s - | DefT (_, StrT s) -> - add_literal state add s - | _ -> () + Type.( + match t with + | DefT (_, _, BoolT b) -> add_option state add_bool b + | DefT (_, _, MixedT m) -> add_int state m + | DefT (_, _, NumT n) -> add_literal state add_number_literal n + | DefT (_, _, SingletonBoolT b) -> add_bool state b + | DefT (_, _, SingletonNumT n) -> add_number_literal state n + | DefT (_, _, SingletonStrT s) -> add state s + | DefT (_, _, StrT s) -> add_literal state add s + | _ -> ()) -let add_use state use = - add_int state (hash_of_use_ctor use) +let add_use state use = add_int state (hash_of_use_ctor use) -let add_file_key state = File_key.(function - | LibFile f -> - add_int state 0; add state f - | SourceFile f -> - add_int state 1; add state f - | JsonFile f -> - add_int state 2; add state f - | ResourceFile f -> - add_int state 3; add state f - | Builtins -> - add_int state 4 -) +let add_file_key state = + File_key.( + function + | LibFile f -> + add_int state 0; + add state f + | SourceFile f -> + add_int state 1; + add state f + | JsonFile f -> + add_int state 2; + add state f + | ResourceFile f -> + add_int state 3; + add state f + | Builtins -> add_int state 4) let add_loc state loc = - let open Loc in - add_option state add_file_key loc.source; - add_int state loc.start.line; - add_int state loc.start.column; - add_int state loc._end.line; - add_int state loc._end.column + Loc.( + add_option state add_file_key loc.source; + add_int state loc.start.line; + add_int state loc.start.column; + add_int state loc._end.line; + add_int state loc._end.column) + +let add_aloc state aloc = + (* When abstract locations (and types-first) are enabled, this should always be true. This is + * because the sig AST contains only abstract locations, and the sig context, under types-first, + * is built from the sig AST. + * + * When they are not enabled, this should always be false. + * + * TODO assert this based on config flags rather than checking it. + *) + if ALoc.ALocRepresentationDoNotUse.is_abstract aloc then ( + let source = ALoc.source aloc in + let key = ALoc.ALocRepresentationDoNotUse.get_key_exn aloc in + add_option state add_file_key source; + add_int state key + ) else + add_loc state (ALoc.to_loc_exn aloc) let add_reason state r = - let open Reason in - add_loc state (aloc_of_reason r |> ALoc.to_loc); - add_loc state (def_loc_of_reason r) + Reason.( + add_aloc state (aloc_of_reason r); + add_aloc state (def_aloc_of_reason r)) let add_polarity = add_int -let add_prop state = Type.(function - | Field (_, _, polarity) -> - add_int state 0; - add_int state polarity - | Get _ -> add_int state 1 - | Set _ -> add_int state 2 - | GetSet _ -> add_int state 3 - | Method _ -> add_int state 4 -) +let add_prop state = + Type.( + function + | Field (_, _, polarity) -> + add_int state 0; + add_int state polarity + | Get _ -> add_int state 1 + | Set _ -> add_int state 2 + | GetSet _ -> add_int state 3 + | Method _ -> add_int state 4) let add_props_map state = - SMap.iter (fun k p -> add state k; add_prop state p) + SMap.iter (fun k p -> + add state k; + add_prop state p) -let add_exports_map state = - SMap.iter (fun k _ -> add state k) +let add_exports_map state = SMap.iter (fun k _ -> add state k) diff --git a/src/typing/sigHash.mli b/src/typing/sigHash.mli index e42aef3a644..252facaa157 100644 --- a/src/typing/sigHash.mli +++ b/src/typing/sigHash.mli @@ -1,15 +1,24 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val add: Xx.state -> string -> unit -val add_int: Xx.state -> int -> unit -val add_type: Xx.state -> Type.t -> unit -val add_use: Xx.state -> Type.use_t -> unit -val add_reason: Xx.state -> Reason.t -> unit -val add_polarity: Xx.state -> Type.polarity -> unit -val add_props_map: Xx.state -> Type.Properties.t -> unit -val add_exports_map: Xx.state -> Type.Exports.t -> unit +val add : Xx.state -> string -> unit + +val add_int : Xx.state -> int -> unit + +val add_aloc : Xx.state -> ALoc.t -> unit + +val add_type : Xx.state -> Type.t -> unit + +val add_use : Xx.state -> Type.use_t -> unit + +val add_reason : Xx.state -> Reason.t -> unit + +val add_polarity : Xx.state -> Polarity.t -> unit + +val add_props_map : Xx.state -> Type.Properties.t -> unit + +val add_exports_map : Xx.state -> Type.Exports.t -> unit diff --git a/src/typing/sort_js.ml b/src/typing/sort_js.ml index 2f6ec1f1630..62db8c8a63b 100644 --- a/src/typing/sort_js.ml +++ b/src/typing/sort_js.ml @@ -1,10 +1,9 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open Utils_js - include Tarjan.Make (File_key) (FilenameMap) (FilenameSet) diff --git a/src/typing/sort_js.mli b/src/typing/sort_js.mli index 635a07c5998..9876e3ac483 100644 --- a/src/typing/sort_js.mli +++ b/src/typing/sort_js.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,6 +11,6 @@ open Utils_js cyclic, as well as a topologically sorted list of key lists where any keys in a list only depend on keys in a subsequent list *) -val topsort: roots:FilenameSet.t -> FilenameSet.t FilenameMap.t -> File_key.t Nel.t list -val log: File_key.t Nel.t list -> unit -val reverse: FilenameSet.t FilenameMap.t -> FilenameSet.t FilenameMap.t +val topsort : roots:FilenameSet.t -> FilenameSet.t FilenameMap.t -> File_key.t Nel.t list + +val log : File_key.t Nel.t list -> unit diff --git a/src/typing/speculation.ml b/src/typing/speculation.ml index 3b3aa393734..0948ca5874b 100644 --- a/src/typing/speculation.ml +++ b/src/typing/speculation.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,42 +11,41 @@ (* First up, a model for flow and unify actions that are deferred during speculative matching (and possibly fired afterwards). *) module Action = struct - type t = - | Flow of Type.t * Type.use_t - | Unify of Type.use_op * Type.t * Type.t - | Error of Flow_error.error_message + | Flow of Type.t * Type.use_t + | Unify of Type.use_op * Type.t * Type.t + | Error of Error_message.t (* Extract tvars involved in an action. *) - let tvars = - let open Type in - let f t acc = match t with - | OpenT (r, id) -> IMap.add id r acc - | _ -> acc - in - function - | Flow ((DefT (_, AnyT) | DefT (_, EmptyT)), _) - | Flow (_, UseT (_, (DefT (_, AnyT) | DefT (_, MixedT _)))) - -> IMap.empty - | Flow (t1, UseT (_, t2)) -> f t1 (f t2 IMap.empty) - | Flow (t1, _) -> f t1 IMap.empty - | Unify (_, t1, t2) -> f t1 (f t2 IMap.empty) - | Error _ -> failwith "tvars of error actions don't make sense" + let tvars cx = + Type.( + let f cx t acc = + match t with + | OpenT (r, id) -> + let (root_id, _) = Context.find_root cx id in + IMap.add root_id r acc + | _ -> acc + in + function + | Flow ((AnyT _ | DefT (_, _, EmptyT _)), _) + | Flow (_, UseT (_, (AnyT _ | DefT (_, _, MixedT _)))) -> + IMap.empty + | Flow (t1, UseT (_, t2)) -> f cx t1 (f cx t2 IMap.empty) + | Flow (t1, _) -> f cx t1 IMap.empty + | Unify (_, t1, t2) -> f cx t1 (f cx t2 IMap.empty) + | Error _ -> failwith "tvars of error actions don't make sense") (* Decide when two actions are the same. We use reasonless compare for types involved in the actions. *) let rec eq = function - | Flow (t1, t2), Flow (t1_, t2_) -> - eq_t (t1, t1_) && eq_use_t (t2, t2_) - | Unify (_, t1, t2), Unify (_, t1_, t2_) -> - eq_t (t1, t1_) && eq_t (t2, t2_) + | (Flow (t1, t2), Flow (t1_, t2_)) -> eq_t (t1, t1_) && eq_use_t (t2, t2_) + | (Unify (_, t1, t2), Unify (_, t1_, t2_)) -> eq_t (t1, t1_) && eq_t (t2, t2_) | _ -> false - and eq_t (t, t_) = - Type.reasonless_compare t t_ = 0 + and eq_t (t, t_) = Type.reasonless_compare t t_ = 0 and eq_use_t = function - | Type.UseT (_, t), Type.UseT (_, t_) -> eq_t (t, t_) + | (Type.UseT (_, t), Type.UseT (_, t_)) -> eq_t (t, t_) | _ -> false (* Action extended with a bit that determines whether the action is "benign." @@ -54,7 +53,6 @@ module Action = struct benign. See ignore, ignore_type, and defer_if_relevant below for details. *) type extended_t = bool * t - end type unresolved = ISet.t @@ -63,7 +61,6 @@ type unresolved = ISet.t match. In other words, while we're trying to execute a flow in speculation mode, we use this data structure to record stuff. *) module Case = struct - (* A case carries a (local) index that identifies which type we're currently considering among the members of a union or intersection type. This is used only for error reporting. @@ -84,27 +81,34 @@ module Case = struct constrained," i.e., whether it's failure would also imply the failure of the later case. This is approximated by diff'ing the set of unresolved tvars that are involved in non-benign actions in the two cases. *) - let diff case1 case2 = + let diff cx case1 case2 = let { unresolved = ts1; actions = actions1; _ } = case1 in let { actions = actions2; _ } = case2 in (* collect those actions in actions1 that are not benign and don't appear in actions2 *) let diff_actions1 = - List.filter (fun (benign, action1) -> - not benign && - List.for_all (fun (_, action2) -> not (Action.eq (action1, action2))) - actions2 - ) actions1 in + List.filter + (fun (benign, action1) -> + (not benign) + && List.for_all (fun (_, action2) -> not (Action.eq (action1, action2))) actions2) + actions1 + in (* collect those unresolved tvars in ts1 that are involved in actions in diff_actions1 *) let diff_ts1 = - List.fold_left (fun acc (_, diff_action1) -> - IMap.fold (fun id1 r1 acc -> - if ISet.mem id1 ts1 - then IMap.add id1 r1 acc - else acc - ) (Action.tvars diff_action1) acc - ) IMap.empty diff_actions1 in + List.fold_left + (fun acc (_, diff_action1) -> + IMap.fold + (fun id1 r1 acc -> + if ISet.mem id1 ts1 then + IMap.add id1 r1 acc + else + acc) + (Action.tvars cx diff_action1) + acc) + IMap.empty + diff_actions1 + in (* return *) IMap.elements diff_ts1 end @@ -113,12 +117,12 @@ end of lower/upper bounds of union/intersection types, respectively *) let init_speculation cx speculation_id = - Context.set_all_unresolved cx - (IMap.add speculation_id ISet.empty (Context.all_unresolved cx)) + Context.set_all_unresolved cx (IMap.add speculation_id ISet.empty (Context.all_unresolved cx)) let add_unresolved_to_speculation cx speculation_id id = + let (root_id, _) = Context.find_root cx id in Context.all_unresolved cx - |> IMap.add speculation_id (ISet.singleton id) ~combine:ISet.union + |> IMap.add speculation_id (ISet.singleton root_id) ~combine:ISet.union |> Context.set_all_unresolved cx (* Actions that involve some "ignored" unresolved tvars are considered @@ -126,6 +130,7 @@ let add_unresolved_to_speculation cx speculation_id id = that instantiate type parameters, this types, existentials, etc. are ignored. *) type ignore = Constraint.ident option + let ignore_type ignore id r = match ignore with | Some ignore_id when ignore_id = id -> true @@ -143,8 +148,8 @@ type branch = { (* The state maintained by speculative_matches when trying each case of a union/intersection in turn. *) type match_state = -| NoMatch of Flow_error.error_message list -| ConditionalMatch of Case.t + | NoMatch of Error_message.t list + | ConditionalMatch of Case.t module State : sig (* Maintain a stack of speculative branches. See Speculation for the contents @@ -156,19 +161,22 @@ module State : sig (1) flow and unify actions on unresolved tvars are deferred (2) any errors cause short-cutting *) - val set_speculative: branch -> unit - val restore_speculative: unit -> unit - val speculating: unit -> bool + val set_speculative : branch -> unit + + val restore_speculative : unit -> unit + + val speculating : unit -> bool (* decide whether an action should be deferred. when speculating, actions that involve unresolved tvars are deferred. *) - val defer_action: Context.t -> Action.t -> bool + val defer_action : Context.t -> Action.t -> bool end = struct let speculations = ref [] - let set_speculative branch = - speculations := branch::!speculations - let restore_speculative () = - speculations := List.tl !speculations + + let set_speculative branch = speculations := branch :: !speculations + + let restore_speculative () = speculations := List.tl !speculations + let speculating () = !speculations <> [] (* Decide, for a flow or unify action encountered during a speculative match, @@ -183,32 +191,33 @@ end = struct *) let defer_if_relevant cx branch action = let { ignore; speculation_id; case } = branch in - let open Case in - match action with - | Action.Error _ -> - case.actions <- case.actions @ [true, action]; - true - | _ -> - let action_tvars = Action.tvars action in - let all_unresolved = - IMap.find_unsafe speculation_id (Context.all_unresolved cx) in - let relevant_action_tvars = - IMap.filter (fun id _ -> ISet.mem id all_unresolved) action_tvars in - let defer = not (IMap.is_empty relevant_action_tvars) in - if defer then Case.( - let is_benign = IMap.exists (ignore_type ignore) action_tvars in - if not is_benign - then case.unresolved <- - IMap.fold (fun id _ acc -> ISet.add id acc) - relevant_action_tvars case.unresolved; - case.actions <- case.actions @ [is_benign, action] - ); - defer + Case.( + match action with + | Action.Error _ -> + case.actions <- case.actions @ [(true, action)]; + true + | _ -> + let action_tvars = Action.tvars cx action in + let all_unresolved = IMap.find_unsafe speculation_id (Context.all_unresolved cx) in + let relevant_action_tvars = + IMap.filter (fun id _ -> ISet.mem id all_unresolved) action_tvars + in + let defer = not (IMap.is_empty relevant_action_tvars) in + if defer then ( + Case.( + let is_benign = IMap.exists (ignore_type ignore) action_tvars in + if not is_benign then + case.unresolved <- + IMap.fold (fun id _ acc -> ISet.add id acc) relevant_action_tvars case.unresolved; + case.actions <- case.actions @ [(is_benign, action)]) + ); + defer) let defer_action cx action = - speculating() && - let branch = List.hd !speculations in - defer_if_relevant cx branch action + speculating () + && + let branch = List.hd !speculations in + defer_if_relevant cx branch action end include State diff --git a/src/typing/statement.ml b/src/typing/statement.ml index b51cc56fb18..849548ccac6 100644 --- a/src/typing/statement.ml +++ b/src/typing/statement.ml @@ -1,11 +1,12 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast +module Tast_utils = Typed_ast_utils (* This module contains the traversal functions which set up subtyping constraints for every expression, statement, and declaration form in a @@ -16,379 +17,532 @@ module Ast = Flow_ast point inside a function (and when to narrow or widen their types). *) module Anno = Type_annotation +module Class_type_sig = Anno.Class_type_sig +module Object_freeze = Anno.Object_freeze module Flow = Flow_js module T = Type - open Utils_js open Reason open Type open Env.LookupMode -open Destructuring -open Import_export - (*************) (* Utilities *) (*************) -let ident_name (_, name) = name +let ident_name = Flow_ast_utils.name_of_ident + +let mk_ident ~comments name = { Ast.Identifier.name; comments } + +class loc_mapper (typ : Type.t) = + object + inherit [ALoc.t, ALoc.t, ALoc.t, ALoc.t * Type.t] Flow_polymorphic_ast_mapper.mapper + + method on_loc_annot (x : ALoc.t) = x + + method on_type_annot (x : ALoc.t) = (x, typ) + end let snd_fst ((_, x), _) = x -let translate_identifier_or_literal_key t = Ast.Expression.Object.(function - | Property.Identifier (loc, name) -> Property.Identifier ((loc, t), name) - | Property.Literal (loc, lit) -> Property.Literal ((loc, t), lit) - | Property.PrivateName _ | Property.Computed _ -> assert_false "precondition not met") +let translate_identifier_or_literal_key t = + Ast.Expression.Object.( + function + | Property.Identifier (loc, name) -> Property.Identifier ((loc, t), name) + | Property.Literal (loc, lit) -> Property.Literal ((loc, t), lit) + | Property.PrivateName _ + | Property.Computed _ -> + assert_false "precondition not met") + +let is_call_to_invariant callee = + match callee with + | (_, Ast.Expression.Identifier (_, { Ast.Identifier.name = "invariant"; _ })) -> true + | _ -> false + +let convert_tparam_instantiations cx tparams_map instantiations = + Ast.Expression.TypeParameterInstantiation.( + let rec loop ts tasts cx tparams_map = function + | [] -> (List.rev ts, List.rev tasts) + | ast :: asts -> + begin + match ast with + | Explicit ast -> + let (((_, t), _) as tast) = Anno.convert cx tparams_map ast in + loop (ExplicitArg t :: ts) (Explicit tast :: tasts) cx tparams_map asts + | Implicit loc -> + let reason = mk_reason RImplicitInstantiation loc in + let id = Tvar.mk_no_wrap cx reason in + loop + (ImplicitArg (reason, id) :: ts) + (Implicit (loc, OpenT (reason, id)) :: tasts) + cx + tparams_map + asts + end + in + loop [] [] cx tparams_map instantiations) let convert_targs cx = function - | None -> None, None + | None -> (None, None) | Some (loc, args) -> - let targts, targs_ast = Anno.convert_list cx SMap.empty args in - List.iter (fun t -> - Type_table.set_targ (Context.type_table cx) (TypeUtil.loc_of_t t) t - ) targts; - Some targts, Some (loc, targs_ast) + let (targts, targs_ast) = convert_tparam_instantiations cx SMap.empty args in + (Some targts, Some (loc, targs_ast)) -(************) -(* Visitors *) -(************) +class return_finder = + object (this) + inherit [bool, ALoc.t] Flow_ast_visitor.visitor ~init:false as super -(******************************************************************** - * local inference preliminary pass: traverse AST, collecting - * declarations and populating variable environment (scope stack) - * in prep for main pass - ********************************************************************) + method! return _ node = + (* TODO we could pass over `return;` since it's definitely returning `undefined`. It will likely + * reposition existing errors from the `return;` to the location of the type annotation. *) + this#set_acc true; + node -let rec variable_decl cx entry = Ast.Statement.( - let value_kind, bind = match entry.VariableDeclaration.kind with - | VariableDeclaration.Const -> - Scope.Entry.(Const ConstVarBinding), Env.bind_const - | VariableDeclaration.Let -> - Scope.Entry.(Let LetVarBinding), Env.bind_let - | VariableDeclaration.Var -> - Scope.Entry.(Var VarBinding), Env.bind_var - in + method! call _loc expr = + if is_call_to_invariant Ast.Expression.Call.(expr.callee) then this#set_acc true; + expr - let str_of_kind = Scope.Entry.string_of_value_kind value_kind in - - let declarator = Ast.(function - | (loc, Pattern.Identifier { Pattern.Identifier.name=(id_loc, name); _ }) -> - let desc = RIdentifier name in - let r = mk_reason desc id_loc in - (* A variable declaration may have a type annotation, but trying to - resolve the type annotation now may lead to errors, since in general it - may contain types that will be declared later in this scope. So for - now, we create a tvar that will serve as the declared type. Later, we - will resolve the type annotation and unify it with this tvar. *) - let t = Tvar.mk cx r in - Type_table.set (Context.type_table cx) loc t; - bind cx name t id_loc - | (loc, _) as p -> - let pattern_name = internal_pattern_name loc in - let desc = RCustom (spf "%s _" str_of_kind) in - let r = mk_reason desc loc in - let annot = type_of_pattern p in - (* TODO: delay resolution of type annotation like above? *) - let t, _ = Anno.mk_type_annotation cx SMap.empty r annot in - bind cx pattern_name t loc; - let expr _ _ = - (* don't eval computed property keys *) - Typed_ast.error_annot, Typed_ast.Expression.error in - (destructuring cx ~expr t None None p ~f:(fun ~use_op:_ loc name _default t -> - let t = match annot with - | None -> t - | Some _ -> - let r = mk_reason (RIdentifier name) loc in - EvalT (t, DestructuringT (r, Become), mk_id()) - in - Type_table.set (Context.type_table cx) loc t; - bind cx name t loc - ) : (Loc.t, Loc.t * T.t) Ast.Pattern.t) |> ignore; - ) in + method! throw _loc stmt = + this#set_acc true; + stmt - VariableDeclaration.(entry.declarations |> List.iter (function - | (_, { Declarator.id; _; }) -> declarator id - )); -) + method! function_body_any body = + begin + match body with + (* If it's a body expression, some value is implicitly returned *) + | Flow_ast.Function.BodyExpression _ -> this#set_acc true + | _ -> () + end; + super#function_body_any body -and toplevel_decls cx = - List.iter (statement_decl cx) + (* Any returns in these constructs would be for nested function definitions, so we short-circuit + *) + method! class_ _ x = x -(* TODO: detect structural misuses abnormal control flow constructs *) -and statement_decl cx = Ast.Statement.( - let block_body cx { Block.body } = - Env.in_lex_scope cx (fun () -> - toplevel_decls cx body - ) - in + method! function_declaration _ x = x + end - let catch_clause cx { Try.CatchClause.body = (_, b); _ } = - block_body cx b - in +let might_have_nonvoid_return loc function_ast = + let finder = new return_finder in + finder#eval (finder#function_ loc) function_ast - function +module Func_stmt_config = struct + type 'T ast = (ALoc.t, 'T) Ast.Function.Params.t - | (_, Empty) -> () + type 'T param_ast = (ALoc.t, 'T) Ast.Function.Param.t - | (_, Block b) -> - block_body cx b + type 'T rest_ast = (ALoc.t, 'T) Ast.Function.RestParam.t - | (_, Expression _) -> () + type expr = + Context.t -> + (ALoc.t, ALoc.t) Flow_ast.Expression.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Expression.t - | (_, If { If.consequent; alternate; _ }) -> - statement_decl cx consequent; - (match alternate with + type pattern = + | Id of (ALoc.t, ALoc.t * Type.t) Ast.Pattern.Identifier.t + | Object of { + annot: (ALoc.t, ALoc.t * Type.t) Ast.Type.annotation_or_hint; + properties: (ALoc.t, ALoc.t) Ast.Pattern.Object.property list; + } + | Array of { + annot: (ALoc.t, ALoc.t * Type.t) Ast.Type.annotation_or_hint; + elements: (ALoc.t, ALoc.t) Ast.Pattern.Array.element option list; + comments: (ALoc.t, unit) Ast.Syntax.t option; + } + + type param = + | Param of { + t: Type.t; + loc: ALoc.t; + ploc: ALoc.t; + pattern: pattern; + default: (ALoc.t, ALoc.t) Ast.Expression.t option; + expr: expr; + } + + type rest = + | Rest of { + t: Type.t; + loc: ALoc.t; + ploc: ALoc.t; + id: (ALoc.t, ALoc.t * Type.t) Ast.Pattern.Identifier.t; + } + + let param_type (Param { t; pattern; default; _ }) = + match pattern with + | Id id -> + let { Ast.Pattern.Identifier.name = (_, { Ast.Identifier.name; _ }); optional; _ } = id in + let t = + if optional || default <> None then + Type.optional t + else + t + in + (Some name, t) + | _ -> + let t = + if default <> None then + Type.optional t + else + t + in + (None, t) + + let rest_type (Rest { t; loc; id; _ }) = + let { Ast.Pattern.Identifier.name = (_, { Ast.Identifier.name; _ }); _ } = id in + (Some name, loc, t) + + let subst_param cx map param = + let (Param { t; loc; ploc; pattern; default; expr }) = param in + let t = Flow.subst cx map t in + Param { t; loc; ploc; pattern; default; expr } + + let subst_rest cx map rest = + let (Rest { t; loc; ploc; id }) = rest in + let t = Flow.subst cx map t in + Rest { t; loc; ploc; id } + + let bind cx name t loc = + Scope.( + if Context.enable_const_params cx then + let kind = Entry.ConstParamBinding in + Env.bind_implicit_const ~state:State.Initialized kind cx name t loc + else + let kind = + if Env.promote_to_const_like cx loc then + Entry.ConstlikeParamBinding + else + Entry.ParamBinding + in + Env.bind_implicit_let ~state:State.Initialized kind cx name t loc) + + let destruct cx ~use_op:_ loc name default t = + Option.iter + ~f:(fun d -> + let reason = mk_reason (RIdentifier name) loc in + let default_t = Flow.mk_default cx reason d in + Flow.flow_t cx (default_t, t)) + default; + bind cx name t loc + + let eval_default cx ~expr = function + | None -> None + | Some e -> Some (expr cx e) + + let eval_param cx (Param { t; loc; ploc; pattern; default; expr }) = + match pattern with + | Id id -> + let default = eval_default cx ~expr default in + let () = + match default with | None -> () - | Some st -> statement_decl cx st - ) + | Some ((_, default_t), _) -> Flow.flow_t cx (default_t, t) + in + let () = + let { Ast.Pattern.Identifier.name = ((loc, _), { Ast.Identifier.name; _ }); optional; _ } = + id + in + let t = + if optional && default = None then + Type.optional t + else + t + in + bind cx name t loc + in + (loc, { Ast.Function.Param.argument = ((ploc, t), Ast.Pattern.Identifier id); default }) + | Object { annot; properties } -> + let default = eval_default cx ~expr default in + let properties = + let default = Option.map default (fun ((_, t), _) -> Default.expr t) in + let init = + Destructuring.empty + ?default + t + ~annot: + (match annot with + | Ast.Type.Missing _ -> false + | Ast.Type.Available _ -> true) + in + let f = destruct cx in + Destructuring.object_properties cx ~expr ~f init properties + in + ( loc, + { + Ast.Function.Param.argument = + ((ploc, t), Ast.Pattern.Object { Ast.Pattern.Object.properties; annot }); + default; + } ) + | Array { annot; elements; comments } -> + let default = eval_default cx ~expr default in + let elements = + let default = Option.map default (fun ((_, t), _) -> Default.expr t) in + let init = + Destructuring.empty + ?default + t + ~annot: + (match annot with + | Ast.Type.Missing _ -> false + | Ast.Type.Available _ -> true) + in + let f = destruct cx in + Destructuring.array_elements cx ~expr ~f init elements + in + ( loc, + { + Ast.Function.Param.argument = + ((ploc, t), Ast.Pattern.Array { Ast.Pattern.Array.elements; annot; comments }); + default; + } ) + + let eval_rest cx (Rest { t; loc; ploc; id }) = + let () = + let { Ast.Pattern.Identifier.name = ((loc, _), { Ast.Identifier.name; _ }); _ } = id in + bind cx name t loc + in + (loc, { Ast.Function.RestParam.argument = ((ploc, t), Ast.Pattern.Identifier id) }) +end + +module Func_stmt_params = Func_params.Make (Func_stmt_config) +module Func_stmt_sig = Func_sig.Make (Func_stmt_params) +module Class_stmt_sig = Class_sig.Make (Func_stmt_sig) + +(************) +(* Visitors *) +(************) - | (_, Labeled { Labeled.body; _ }) -> - statement_decl cx body +(******************************************************************** + * local inference preliminary pass: traverse AST, collecting + * declarations and populating variable environment (scope stack) + * in prep for main pass + ********************************************************************) - | (_, Break _) -> () +let rec variable_decl cx { Ast.Statement.VariableDeclaration.kind; declarations } = + let bind = + match kind with + | Ast.Statement.VariableDeclaration.Const -> Env.bind_const + | Ast.Statement.VariableDeclaration.Let -> Env.bind_let + | Ast.Statement.VariableDeclaration.Var -> Env.bind_var + in + Flow_ast_utils.fold_bindings_of_variable_declarations + (fun () (loc, { Ast.Identifier.name; comments = _ }) -> + let reason = mk_reason (RIdentifier name) loc in + let t = Tvar.mk cx reason in + bind cx name t loc) + () + declarations - | (_, Continue _) -> () +and toplevel_decls cx = List.iter (statement_decl cx) - | (_, With _) -> +(* TODO: detect structural misuses abnormal control flow constructs *) +and statement_decl cx = + Ast.Statement.( + let block_body cx { Block.body } = Env.in_lex_scope cx (fun () -> toplevel_decls cx body) in + let catch_clause cx { Try.CatchClause.body = (_, b); _ } = block_body cx b in + function + | (_, Empty) -> () + | (_, Block b) -> block_body cx b + | (_, Expression _) -> () + | (_, If { If.consequent; alternate; _ }) -> + statement_decl cx consequent; + (match alternate with + | None -> () + | Some st -> statement_decl cx st) + | (_, Labeled { Labeled.body; _ }) -> statement_decl cx body + | (_, Break _) -> () + | (_, Continue _) -> () + | (_, With _) -> (* TODO disallow or push vars into env? *) () - - | (_, DeclareTypeAlias { TypeAlias.id = (name_loc, name); _ } ) - | (_, TypeAlias { TypeAlias.id = (name_loc, name); _ } ) -> + | (_, DeclareTypeAlias { TypeAlias.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ }) + | (_, TypeAlias { TypeAlias.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ }) -> let r = DescFormat.type_reason name name_loc in let tvar = Tvar.mk cx r in Env.bind_type cx name tvar name_loc - - | (_, DeclareOpaqueType { OpaqueType.id = (name_loc, name); _ } ) - | (_, OpaqueType { OpaqueType.id = (name_loc, name); _ } ) -> + | ( _, + DeclareOpaqueType { OpaqueType.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ } + ) + | (_, OpaqueType { OpaqueType.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ }) -> let r = DescFormat.type_reason name name_loc in let tvar = Tvar.mk cx r in Env.bind_type cx name tvar name_loc - - | (_, Switch { Switch.cases; _ }) -> + | (_, Switch { Switch.cases; _ }) -> Env.in_lex_scope cx (fun () -> - cases |> List.iter (fun (_, { Switch.Case.consequent; _ }) -> - toplevel_decls cx consequent - ) - ) - - | (_, Return _) -> () - - | (_, Throw _) -> () - - | (_, Try { Try.block = (_, b); handler; finalizer }) -> + cases + |> List.iter (fun (_, { Switch.Case.consequent; _ }) -> toplevel_decls cx consequent)) + | (_, Return _) -> () + | (_, Throw _) -> () + | (_, Try { Try.block = (_, b); handler; finalizer; comments = _ }) -> block_body cx b; (match handler with - | None -> () - | Some (_, h) -> catch_clause cx h - ); + | None -> () + | Some (_, h) -> catch_clause cx h); (match finalizer with - | None -> () - | Some (_, b) -> block_body cx b - ) - - | (_, While { While.body; _ }) -> - statement_decl cx body - - | (_, DoWhile { DoWhile.body; _ }) -> - statement_decl cx body - - | (_, For { For.init; body; _ }) -> + | None -> () + | Some (_, b) -> block_body cx b) + | (_, While { While.body; _ }) -> statement_decl cx body + | (_, DoWhile { DoWhile.body; _ }) -> statement_decl cx body + | (_, For { For.init; body; _ }) -> Env.in_lex_scope cx (fun () -> - (match init with - | Some (For.InitDeclaration (_, decl)) -> - variable_decl cx decl - | _ -> () - ); - statement_decl cx body - ) - - | (_, ForIn { ForIn.left; body; _ }) -> + (match init with + | Some (For.InitDeclaration (_, decl)) -> variable_decl cx decl + | _ -> ()); + statement_decl cx body) + | (_, ForIn { ForIn.left; body; _ }) -> Env.in_lex_scope cx (fun () -> - (match left with - | ForIn.LeftDeclaration (_, decl) -> - variable_decl cx decl - | _ -> () - ); - statement_decl cx body - ) - - | (_, ForOf { ForOf.left; body; _ }) -> + (match left with + | ForIn.LeftDeclaration (_, decl) -> variable_decl cx decl + | _ -> ()); + statement_decl cx body) + | (_, ForOf { ForOf.left; body; _ }) -> Env.in_lex_scope cx (fun () -> - (match left with - | ForOf.LeftDeclaration (_, decl) -> - variable_decl cx decl - | _ -> () - ); - statement_decl cx body - ) - - | (_, Debugger) -> () - - | (loc, FunctionDeclaration func) -> - (match func.Ast.Function.id with - | Some (_, name) -> - let r = func_reason func loc in + (match left with + | ForOf.LeftDeclaration (_, decl) -> variable_decl cx decl + | _ -> ()); + statement_decl cx body) + | (_, Debugger) -> () + | (loc, FunctionDeclaration { Ast.Function.id; async; generator; _ }) -> + (match id with + | Some (_, { Ast.Identifier.name; comments = _ }) -> + let r = func_reason ~async ~generator loc in let tvar = Tvar.mk cx r in Env.bind_fun cx name tvar loc | None -> - failwith ( - "Flow Error: Nameless function declarations should always be given " ^ - "an implicit name before they get hoisted!" - ) - ) - - | (loc, DeclareVariable { DeclareVariable.id = (id_loc, name); _ }) -> + failwith + ( "Flow Error: Nameless function declarations should always be given " + ^ "an implicit name before they get hoisted!" )) + | (_, EnumDeclaration { EnumDeclaration.id = (name_loc, { Ast.Identifier.name; _ }); _ }) -> + let r = DescFormat.type_reason name name_loc in + let tvar = Tvar.mk cx r in + Env.bind_implicit_const Scope.Entry.EnumNameBinding cx name tvar name_loc + | ( loc, + DeclareVariable { DeclareVariable.id = (id_loc, { Ast.Identifier.name; comments = _ }); _ } + ) -> let r = mk_reason (RCustom (spf "declare %s" name)) loc in let t = Tvar.mk cx r in - Type_table.set (Context.type_table cx) id_loc t; Env.bind_declare_var cx name t id_loc - - | (loc, DeclareFunction ({ DeclareFunction. - id = (id_loc, name); - annot; - _; } as declare_function)) -> - (match declare_function_to_function_declaration cx declare_function with + | ( loc, + DeclareFunction + ( { DeclareFunction.id = (id_loc, { Ast.Identifier.name; comments = _ }); _ } as + declare_function ) ) -> + (match declare_function_to_function_declaration cx loc declare_function with | None -> - let r = mk_reason (RCustom (spf "declare %s" name)) loc in - let t, annot' = - Anno.mk_type_annotation cx SMap.empty r (Some annot) in - Type_table.set (Context.type_table cx) id_loc t; - let id_info = name, t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - Env.bind_declare_fun cx name t id_loc; - Option.iter - ~f:(fun annot' -> Scope.add_declare_func_annot name annot' (Env.peek_scope ())) - annot'; - | Some (func_decl, _) -> - statement_decl cx (loc, func_decl) - ) - - | (_, VariableDeclaration decl) -> - variable_decl cx decl - - | (_, ClassDeclaration { Ast.Class.id; _ }) -> ( - match id with - | Some (name_loc, name) -> + let r = mk_reason (RCustom (spf "declare %s" name)) loc in + let t = Tvar.mk cx r in + Env.bind_declare_fun cx name t id_loc + | Some (func_decl, _) -> statement_decl cx (loc, func_decl)) + | (_, VariableDeclaration decl) -> variable_decl cx decl + | (_, ClassDeclaration { Ast.Class.id; _ }) -> + (match id with + | Some (name_loc, { Ast.Identifier.name; comments = _ }) -> let r = mk_reason (RType name) name_loc in let tvar = Tvar.mk cx r in Env.bind_implicit_let Scope.Entry.ClassNameBinding cx name tvar name_loc - | None -> () - ) - - | (_, DeclareClass { DeclareClass.id = (name_loc, name); _ }) - | (_, DeclareInterface { Interface.id = (name_loc, name); _ }) - | (_, InterfaceDeclaration { Interface.id = (name_loc, name); _ }) as stmt -> - let is_interface = match stmt with - | (_, DeclareInterface _) -> true - | (_, InterfaceDeclaration _) -> true - | _ -> false in + | None -> ()) + | ( (_, DeclareClass { DeclareClass.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ }) + | ( _, + DeclareInterface { Interface.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ } + ) + | ( _, + InterfaceDeclaration + { Interface.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ } ) ) as stmt -> + let is_interface = + match stmt with + | (_, DeclareInterface _) -> true + | (_, InterfaceDeclaration _) -> true + | _ -> false + in let r = mk_reason (RType name) name_loc in let tvar = Tvar.mk cx r in (* interface is a type alias, declare class is a var *) - if is_interface - then Env.bind_type cx name tvar name_loc - else Env.bind_declare_var cx name tvar name_loc - - | (loc, DeclareModule { DeclareModule.id; _ }) -> - let name = match id with - | DeclareModule.Identifier (_, value) - | DeclareModule.Literal (_, { Ast.StringLiteral.value; _ }) -> value + if is_interface then + Env.bind_type cx name tvar name_loc + else + Env.bind_declare_var cx name tvar name_loc + | (loc, DeclareModule { DeclareModule.id; _ }) -> + let name = + match id with + | DeclareModule.Identifier (_, { Ast.Identifier.name = value; comments = _ }) + | DeclareModule.Literal (_, { Ast.StringLiteral.value; _ }) -> + value in let r = mk_reason (RModule name) loc in let t = Tvar.mk cx r in - Type_table.set (Context.type_table cx) loc t; Env.bind_declare_var cx (internal_module_name name) t loc - - | _, - DeclareExportDeclaration { - DeclareExportDeclaration.default; declaration; _ - } -> - DeclareExportDeclaration.(match declaration with - | Some (Variable (loc, v)) -> - statement_decl cx (loc, DeclareVariable v) - | Some (Function (loc, f)) -> - statement_decl cx (loc, DeclareFunction f) - | Some (Class (loc, c)) -> - statement_decl cx (loc, DeclareClass c) + | (_, DeclareExportDeclaration { DeclareExportDeclaration.default; declaration; _ }) -> + DeclareExportDeclaration.( + (match declaration with + | Some (Variable (loc, v)) -> statement_decl cx (loc, DeclareVariable v) + | Some (Function (loc, f)) -> statement_decl cx (loc, DeclareFunction f) + | Some (Class (loc, c)) -> statement_decl cx (loc, DeclareClass c) | Some (DefaultType _) -> () - | Some (NamedType (loc, t)) -> - statement_decl cx (loc, TypeAlias t) - | Some (NamedOpaqueType (loc, t)) -> - statement_decl cx (loc, OpaqueType t) - | Some (Interface (loc, i)) -> - statement_decl cx (loc, InterfaceDeclaration i) + | Some (NamedType (loc, t)) -> statement_decl cx (loc, TypeAlias t) + | Some (NamedOpaqueType (loc, t)) -> statement_decl cx (loc, OpaqueType t) + | Some (Interface (loc, i)) -> statement_decl cx (loc, InterfaceDeclaration i) | None -> - if Option.is_none default - then () - else failwith ( - "Parser Error: declare export default must always have an " ^ - "associated declaration or type!" - ) - ) - - | (_, DeclareModuleExports _) -> () - - | (_, ExportNamedDeclaration { ExportNamedDeclaration.declaration; _ }) -> ( - match declaration with + if Option.is_none default then + () + else + failwith + ( "Parser Error: declare export default must always have an " + ^ "associated declaration or type!" ))) + | (_, DeclareModuleExports _) -> () + | (_, ExportNamedDeclaration { ExportNamedDeclaration.declaration; _ }) -> + (match declaration with | Some stmt -> statement_decl cx stmt - | None -> () - ) - | _, ExportDefaultDeclaration { ExportDefaultDeclaration.declaration; _ } -> ( - match declaration with + | None -> ()) + | (_, ExportDefaultDeclaration { ExportDefaultDeclaration.declaration; _ }) -> + (match declaration with | ExportDefaultDeclaration.Declaration stmt -> - statement_decl cx (fst (nameify_default_export_decl stmt)) - | ExportDefaultDeclaration.Expression _ -> () - ) - | (_, ImportDeclaration { ImportDeclaration.importKind; specifiers; default; source = _ }) -> + let (stmt, _) = Import_export.nameify_default_export_decl stmt in + statement_decl cx stmt + | ExportDefaultDeclaration.Expression _ -> ()) + | (_, ImportDeclaration { ImportDeclaration.importKind; specifiers; default; source = _ }) -> let isType = match importKind with | ImportDeclaration.ImportType -> true | ImportDeclaration.ImportTypeof -> true | ImportDeclaration.ImportValue -> false in - - let bind_import local_name loc isType = - let reason = if isType - then DescFormat.type_reason local_name loc - else mk_reason (RIdentifier local_name) loc in + let bind_import local_name (loc : ALoc.t) isType = + let reason = + if isType then + DescFormat.type_reason local_name loc + else + mk_reason (RIdentifier local_name) loc + in let tvar = Tvar.mk cx reason in - if isType - then Env.bind_import_type cx local_name tvar loc - else Env.bind_import cx local_name tvar loc + if isType then + Env.bind_import_type cx local_name tvar loc + else + Env.bind_import cx local_name tvar loc in - - Option.iter ~f:(fun local -> - bind_import (ident_name local) (fst local) isType - ) default; - - Option.iter ~f:(function - | ImportDeclaration.ImportNamespaceSpecifier (_, local) -> - bind_import (ident_name local) (fst local) isType - - | ImportDeclaration.ImportNamedSpecifiers named_specifiers -> - List.iter (fun { ImportDeclaration.local; remote; kind;} -> - let remote_name = ident_name remote in - let (local_name, loc) = ( - match local with - | Some local -> - (ident_name local, Loc.btwn (fst remote) (fst local)) - | None -> - (remote_name, fst remote) - ) in - let isType = isType || ( - match kind with - | None -> isType - | Some kind -> - kind = ImportDeclaration.ImportType - || kind = ImportDeclaration.ImportTypeof - ) in - bind_import local_name loc isType - ) named_specifiers - ) specifiers -) + Option.iter ~f:(fun local -> bind_import (ident_name local) (fst local) isType) default; + + Option.iter + ~f:(function + | ImportDeclaration.ImportNamespaceSpecifier (_, local) -> + bind_import (ident_name local) (fst local) isType + | ImportDeclaration.ImportNamedSpecifiers named_specifiers -> + List.iter + (fun { ImportDeclaration.local; remote; kind } -> + let (loc, { Ast.Identifier.name = local_name; comments = _ }) = + Option.value ~default:remote local + in + let isType = + isType + || + match kind with + | None -> isType + | Some kind -> + kind = ImportDeclaration.ImportType || kind = ImportDeclaration.ImportTypeof + in + bind_import local_name loc isType) + named_specifiers) + specifiers) (*************************************************************** * local inference main pass: visit AST statement list, calling @@ -399,164 +553,160 @@ and statement_decl cx = Ast.Statement.( (* can raise Abnormal.(Exn (Stmts _, _)). *) and toplevels = let rec loop acc cx = function - | [] -> List.rev acc - | (loc, Ast.Statement.Empty)::stmts -> - loop ((loc, Ast.Statement.Empty)::acc) cx stmts - | stmt::stmts -> - match Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx stmt) with - | stmt, Some abnormal -> - (* control flow exit out of a flat list: + | [] -> List.rev acc + | (loc, Ast.Statement.Empty) :: stmts -> loop ((loc, Ast.Statement.Empty) :: acc) cx stmts + | stmt :: stmts -> + (match Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx stmt) with + | (stmt, Some abnormal) -> + (* control flow exit out of a flat list: check for unreachable code and rethrow *) - let warn_unreachable loc = - Flow.add_output cx (Flow_error.EUnreachable loc) in - let rest_opts = List.map Ast.Statement.(fun stmt -> - match stmt with - | (_, Empty) as stmt -> Some stmt - (* function declarations are hoisted, so not unreachable *) - | (_, FunctionDeclaration _ ) -> Some (statement cx stmt) - (* variable declarations are hoisted, but associated assignments are + let warn_unreachable loc = Flow.add_output cx (Error_message.EUnreachable loc) in + let rest = + Core_list.map + ~f: + Ast.Statement.( + fun stmt -> + match stmt with + | (_, Empty) as stmt -> stmt + (* function declarations are hoisted, so not unreachable *) + | (_, FunctionDeclaration _) -> statement cx stmt + (* variable declarations are hoisted, but associated assignments are not, so skip variable declarations with no assignments. Note: this does not seem like a practice anyone would use *) - | (_, VariableDeclaration d) -> VariableDeclaration.(d.declarations |> - List.iter Declarator.(function - | (_, { init = Some (loc, _); _ } ) -> warn_unreachable loc - | _ -> () - )); - None - | (loc, _) -> warn_unreachable loc; None - ) stmts in - let rest = - rest_opts - |> List.filter Option.is_some - |> List.map (fun stmt_opt -> Option.value_exn stmt_opt) - in - Abnormal.throw_stmts_control_flow_exception - (List.rev_append acc (stmt::rest)) - abnormal - | stmt, None -> loop (stmt::acc) cx stmts + | (_, VariableDeclaration d) as stmt -> + VariableDeclaration.( + d.declarations + |> List.iter + Declarator.( + function + | (_, { init = Some (loc, _); _ }) -> warn_unreachable loc + | _ -> ())); + Tast_utils.unreachable_mapper#statement stmt + | (loc, _) as stmt -> + warn_unreachable loc; + Tast_utils.unreachable_mapper#statement stmt) + stmts + in + Abnormal.throw_stmts_control_flow_exception (List.rev_append acc (stmt :: rest)) abnormal + | (stmt, None) -> loop (stmt :: acc) cx stmts) in - fun cx -> loop [] cx + (fun cx -> loop [] cx) (* can raise Abnormal.(Exn (Stmt _, _)) *) -and statement cx : 'a -> (Loc.t, Loc.t * Type.t) Ast.Statement.t = Ast.Statement.( - let variables cx { VariableDeclaration.declarations; kind } = - let declarations = List.map (fun vdecl -> variable cx kind vdecl) declarations in - { VariableDeclaration.declarations; kind; } - in - - let interface_helper cx loc (iface_sig, self) = - let def_reason = mk_reason (desc_of_t self) loc in - iface_sig |> Class_sig.generate_tests cx (fun iface_sig -> - Class_sig.check_super cx def_reason iface_sig; - Class_sig.check_implements cx def_reason iface_sig - ) |> ignore (* TODO(vijayramamurthy) *); - let t = Class_sig.classtype ~check_polarity:false cx iface_sig in - Flow.unify cx self t; - Type_table.set (Context.type_table cx) loc t; - t - in - - let interface cx loc decl = - let { Interface.id = (name_loc, name); _ } = decl in - let reason = DescFormat.instance_reason name name_loc in - let iface_sig, iface_t, decl_ast = Anno.mk_interface_sig cx reason decl in - let t = interface_helper cx loc (iface_sig, iface_t) in - Env.init_type cx name t loc; - decl_ast - in - - let declare_class cx loc decl = - let { DeclareClass.id = (name_loc, name); _ } = decl in - let reason = DescFormat.instance_reason name name_loc in - let class_sig, class_t, decl_ast = Anno.mk_declare_class_sig cx reason decl in - let t = interface_helper cx loc (class_sig, class_t) in - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name) loc); - init = reason_of_t t; - }) in - Env.init_var ~has_anno:false cx ~use_op name t loc; - decl_ast - in - - let check cx b = Abnormal.catch_stmts_control_flow_exception(fun () -> - toplevel_decls cx b.Block.body; - toplevels cx b.Block.body) in - - let catch_clause cx { Try.CatchClause.param; body = (b_loc, b) } = - Ast.Pattern.(match param with - | Some p -> (match p with - | loc, Identifier { - Identifier.name = (name_loc, name); annot = None; optional; - } -> +and statement cx : 'a -> (ALoc.t, ALoc.t * Type.t) Ast.Statement.t = + Ast.Statement.( + let variables cx decls = + VariableDeclaration.( + let { declarations; kind } = decls in + let declarations = + Core_list.map + ~f:(fun (loc, { Declarator.id; init }) -> + let (id, init) = variable cx kind id init in + (loc, { Declarator.id; init })) + declarations + in + { declarations; kind }) + in + let interface_helper cx loc (iface_sig, self) = + let def_reason = mk_reason (desc_of_t self) loc in + Class_type_sig.generate_tests + cx + (fun iface_sig -> + Class_type_sig.check_super cx def_reason iface_sig; + Class_type_sig.check_implements cx def_reason iface_sig) + iface_sig; + let t = Class_type_sig.classtype ~check_polarity:false cx iface_sig in + Flow.unify cx self t; + t + in + let interface cx loc decl = + let { Interface.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ } = decl in + let reason = DescFormat.instance_reason name name_loc in + let (iface_sig, iface_t, decl_ast) = Anno.mk_interface_sig cx reason decl in + let t = interface_helper cx loc (iface_sig, iface_t) in + Env.init_type cx name t loc; + decl_ast + in + let declare_class cx loc decl = + let { DeclareClass.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ } = decl in + let reason = DescFormat.instance_reason name name_loc in + let (class_sig, class_t, decl_ast) = Anno.mk_declare_class_sig cx reason decl in + let t = interface_helper cx loc (class_sig, class_t) in + let use_op = + Op (AssignVar { var = Some (mk_reason (RIdentifier name) loc); init = reason_of_t t }) + in + Env.init_var ~has_anno:false cx ~use_op name t loc; + decl_ast + in + let check cx b = + Abnormal.catch_stmts_control_flow_exception (fun () -> + toplevel_decls cx b.Block.body; + toplevels cx b.Block.body) + in + let catch_clause cx catch_clause = + let { Try.CatchClause.param; body = (b_loc, b) } = catch_clause in + Ast.Pattern.( + match param with + | Some p -> + (match p with + | ( loc, + Identifier + { + Identifier.name = (name_loc, ({ Ast.Identifier.name; comments = _ } as id)); + annot = Ast.Type.Missing mloc; + optional; + } ) -> let r = mk_reason (RCustom "catch") loc in let t = Tvar.mk cx r in - - Type_table.set (Context.type_table cx) loc t; - - let stmts, abnormal_opt = Env.in_lex_scope cx (fun () -> - Scope.(Env.bind_implicit_let - ~state:State.Initialized Entry.CatchParamBinding cx name t loc); - - check cx b - ) in - { Try.CatchClause. - param = Some ((loc, t), Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (name_loc, t), name; - annot = None; - optional; - }); - body = b_loc, { Block.body = stmts }; - }, - abnormal_opt - - - | loc, Identifier _ -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, CatchParameterAnnotation)); - Typed_ast.Statement.Try.CatchClause.error, None - - | loc, _ -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, CatchParameterDeclaration)); - Typed_ast.Statement.Try.CatchClause.error, None - ) - | None -> - let stmts, abnormal_opt = Env.in_lex_scope cx (fun () -> - check cx b - ) in - { Try.CatchClause. - param = None; - body = b_loc, { Block.body = stmts }; - }, - abnormal_opt - ) - in - - function - - | (_, Empty) as stmt -> stmt - - | (loc, Block { Block.body }) -> - let body, abnormal_opt = - Abnormal.catch_stmts_control_flow_exception (fun () -> - Env.in_lex_scope cx (fun () -> - toplevel_decls cx body; - toplevels cx body - ) - ) + let (stmts, abnormal_opt) = + Env.in_lex_scope cx (fun () -> + Scope.( + Env.bind_implicit_let + ~state:State.Initialized + Entry.CatchParamBinding + cx + name + t + loc); + + check cx b) + in + ( { + Try.CatchClause.param = + Some + ( (loc, t), + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = ((name_loc, t), id); + annot = Ast.Type.Missing (mloc, t); + optional; + } ); + body = (b_loc, { Block.body = stmts }); + }, + abnormal_opt ) + | (loc, Identifier _) -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, CatchParameterAnnotation)); + (Tast_utils.error_mapper#catch_clause catch_clause, None) + | (loc, _) -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, CatchParameterDeclaration)); + (Tast_utils.error_mapper#catch_clause catch_clause, None)) + | None -> + let (stmts, abnormal_opt) = Env.in_lex_scope cx (fun () -> check cx b) in + ({ Try.CatchClause.param = None; body = (b_loc, { Block.body = stmts }) }, abnormal_opt)) in - Abnormal.check_stmt_control_flow_exception ( - (loc, Block { Block.body }), - abnormal_opt - ) - - | (loc, Expression { Expression.expression = e; directive; }) -> - loc, Expression { Expression. - expression = expression cx e; - directive; - } - - (* Refinements for `if` are derived by the following Hoare logic rule: + function + | (_, Empty) as stmt -> stmt + | (loc, Block { Block.body }) -> + let (body, abnormal_opt) = + Abnormal.catch_stmts_control_flow_exception (fun () -> + Env.in_lex_scope cx (fun () -> + toplevel_decls cx body; + toplevels cx body)) + in + Abnormal.check_stmt_control_flow_exception ((loc, Block { Block.body }), abnormal_opt) + | (loc, Expression { Expression.expression = e; directive }) -> + (loc, Expression { Expression.expression = expression cx e; directive }) + (* Refinements for `if` are derived by the following Hoare logic rule: [Pre & c] S1 [Post1] [Pre & ~c] S2 [Post2] @@ -564,531 +714,536 @@ and statement cx : 'a -> (Loc.t, Loc.t * Type.t) Ast.Statement.t = Ast.Statement ---------------------------- [Pre] if c S1 else S2 [Post] *) - | (loc, If { If.test; consequent; alternate }) -> - let loc_test, _ = test in - let test_ast, preds, not_preds, xts = - predicates_of_condition cx test in - + | (loc, If { If.test; consequent; alternate; comments }) -> + let (loc_test, _) = test in + let (test_ast, preds, not_preds, xts) = predicates_of_condition cx test in (* grab a reference to the incoming env - we'll restore it and merge branched envs later *) - let start_env = Env.peek_env () in - let oldset = Changeset.clear () in - + let start_env = Env.peek_env () in + let oldset = Changeset.Global.clear () in (* swap in a refined clone of initial env for then *) Env.( update_env cx loc (clone_env start_env); - ignore (refine_with_preds cx loc_test preds xts) - ); + ignore (refine_with_preds cx loc_test preds xts)); - let then_ast, then_abnormal = Abnormal.catch_stmt_control_flow_exception - (fun () -> statement cx consequent) + let (then_ast, then_abnormal) = + Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx consequent) in - (* grab a reference to env after then branch *) let then_env = Env.peek_env () in - (* then swap in a refined clone of initial env for else *) Env.( update_env cx loc (clone_env start_env); - ignore (refine_with_preds cx loc_test not_preds xts) - ); + ignore (refine_with_preds cx loc_test not_preds xts)); - let else_ast, else_abnormal = match alternate with - | None -> None, None + let (else_ast, else_abnormal) = + match alternate with + | None -> (None, None) | Some st -> - let else_ast, else_abnormal = - Abnormal.catch_stmt_control_flow_exception - (fun () -> statement cx st) - in Some else_ast, else_abnormal + let (else_ast, else_abnormal) = + Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx st) + in + (Some else_ast, else_abnormal) in - (* grab a reference to env after else branch *) let else_env = Env.peek_env () in - (* snapshot if-else changes and merge old changes back into state *) - let newset = Changeset.merge oldset in - + let newset = Changeset.Global.merge oldset in (* adjust post-if environment. if we've returned from one arm, swap in the env generated by the other, otherwise merge *) - let end_env = match then_abnormal, else_abnormal with - | Some Abnormal.Return, None - | Some Abnormal.Throw, None -> - else_env - - | None, Some Abnormal.Return - | None, Some Abnormal.Throw -> - then_env - - | None, Some _ - | Some _, None - | Some _, Some _ -> - Env.merge_env cx loc (start_env, then_env, else_env) newset; - start_env - - | None, None -> - (* if neither branch has abnormal flow, then refinements that happen in + let end_env = + match (then_abnormal, else_abnormal) with + | (Some Abnormal.Return, None) + | (Some Abnormal.Throw, None) -> + else_env + | (None, Some Abnormal.Return) + | (None, Some Abnormal.Throw) -> + then_env + | (None, Some _) + | (Some _, None) + | (Some _, Some _) -> + Env.merge_env cx loc (start_env, then_env, else_env) newset; + start_env + | (None, None) -> + (* if neither branch has abnormal flow, then refinements that happen in the branches should be forgotten since the original type covers all of the options. *) - Env.merge_env cx loc - (start_env, then_env, else_env) - (Changeset.exclude_refines newset); - start_env + Env.merge_env cx loc (start_env, then_env, else_env) (Changeset.exclude_refines newset); + start_env in Env.update_env cx loc end_env; - let ast = loc, If { If. - test = test_ast; - consequent = then_ast; - alternate = else_ast; - } in - + let ast = + (loc, If { If.test = test_ast; consequent = then_ast; alternate = else_ast; comments }) + in (* handle control flow in cases where we've thrown from both sides *) - begin match then_abnormal, else_abnormal with - | Some Abnormal.Throw, Some Abnormal.Return - | Some Abnormal.Return, Some Abnormal.Throw -> - Abnormal.throw_stmt_control_flow_exception ast Abnormal.Return; - - | Some then_exn, Some else_exn when then_exn = else_exn -> - Abnormal.throw_stmt_control_flow_exception ast then_exn - - | _ -> ast + begin + match (then_abnormal, else_abnormal) with + | (Some Abnormal.Throw, Some Abnormal.Return) + | (Some Abnormal.Return, Some Abnormal.Throw) -> + Abnormal.throw_stmt_control_flow_exception ast Abnormal.Return + | (Some then_exn, Some else_exn) when then_exn = else_exn -> + Abnormal.throw_stmt_control_flow_exception ast then_exn + | _ -> ast end - - | (top_loc, Labeled { Labeled.label = _, name as lab_ast; body }) -> + | ( top_loc, + Labeled { Labeled.label = (_, { Ast.Identifier.name; comments = _ }) as lab_ast; body } ) + -> (match body with | (loc, While _) | (loc, DoWhile _) | (loc, For _) - | (loc, ForIn _) - -> - let oldset = Changeset.clear () in + | (loc, ForIn _) -> + let oldset = Changeset.Global.clear () in let label = Some name in let save_break = Abnormal.clear_saved (Abnormal.Break label) in let save_continue = Abnormal.clear_saved (Abnormal.Continue label) in - let env = Env.peek_env () in Env.widen_env cx loc; let loop_env = Env.clone_env env in Env.update_env cx loc loop_env; - let body_ast, body_abnormal = + let (body_ast, body_abnormal) = Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx body) |> Abnormal.ignore_break_or_continue_to_label label in - let ast = top_loc, Labeled { Labeled.label = lab_ast; body = body_ast } in - ignore (Abnormal.check_stmt_control_flow_exception (ast, body_abnormal) - : (Loc.t, Loc.t * Type.t) Ast.Statement.t); + let ast = (top_loc, Labeled { Labeled.label = lab_ast; body = body_ast }) in + ignore + ( Abnormal.check_stmt_control_flow_exception (ast, body_abnormal) + : (ALoc.t, ALoc.t * Type.t) Ast.Statement.t ); - let newset = Changeset.merge oldset in + let newset = Changeset.Global.merge oldset in + if Abnormal.swap_saved (Abnormal.Continue label) save_continue <> None then + Env.havoc_vars newset; - if Abnormal.swap_saved (Abnormal.Continue label) save_continue <> None - then Env.havoc_vars newset; + Env.copy_env cx loc (env, loop_env) newset; - Env.copy_env cx loc (env,loop_env) newset; - - if Abnormal.swap_saved (Abnormal.Break label) save_break <> None - then Env.havoc_vars newset; + if Abnormal.swap_saved (Abnormal.Break label) save_break <> None then Env.havoc_vars newset; ast - | _ -> - let oldset = Changeset.clear () in + let oldset = Changeset.Global.clear () in let label = Some name in let save_break = Abnormal.clear_saved (Abnormal.Break label) in - - let body_ast, body_abnormal = + let (body_ast, body_abnormal) = Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx body) |> Abnormal.ignore_break_to_label label in - let ast = top_loc, Labeled { Labeled.label = lab_ast; body = body_ast } in - ignore (Abnormal.check_stmt_control_flow_exception (ast, body_abnormal) - : (Loc.t, Loc.t * Type.t) Ast.Statement.t); - - let newset = Changeset.merge oldset in - if Abnormal.swap_saved (Abnormal.Break label) save_break <> None - then Env.havoc_vars newset; + let ast = (top_loc, Labeled { Labeled.label = lab_ast; body = body_ast }) in + ignore + ( Abnormal.check_stmt_control_flow_exception (ast, body_abnormal) + : (ALoc.t, ALoc.t * Type.t) Ast.Statement.t ); - ast - ) + let newset = Changeset.Global.merge oldset in + if Abnormal.swap_saved (Abnormal.Break label) save_break <> None then Env.havoc_vars newset; - | (loc, Break { Break.label }) -> + ast) + | (loc, Break { Break.label; comments }) -> (* save environment at unlabeled breaks, prior to activation clearing *) - let label_opt, env, label_ast = match label with - | None -> None, Env.(clone_env (peek_env ())), None - | Some (_, name as lab_ast) -> Some name, [], Some lab_ast + let (label_opt, env, label_ast) = + match label with + | None -> (None, Env.(clone_env (peek_env ())), None) + | Some ((_, { Ast.Identifier.name; comments = _ }) as lab_ast) -> + (Some name, [], Some lab_ast) in Env.reset_current_activation loc; - let ast = loc, Break { Break.label = label_ast } in + let ast = (loc, Break { Break.label = label_ast; comments }) in let abnormal = Abnormal.Break label_opt in Abnormal.save abnormal ~env; Abnormal.throw_stmt_control_flow_exception ast abnormal - - | (loc, Continue { Continue.label }) -> - let label_opt, label_ast = match label with - | None -> None, None - | Some (_, name as lab_ast) -> Some name, Some lab_ast + | (loc, Continue { Continue.label; comments }) -> + let (label_opt, label_ast) = + match label with + | None -> (None, None) + | Some ((_, { Ast.Identifier.name; comments = _ }) as lab_ast) -> (Some name, Some lab_ast) in Env.reset_current_activation loc; - let ast = loc, Continue { Continue.label = label_ast } in + let ast = (loc, Continue { Continue.label = label_ast; comments }) in let abnormal = Abnormal.Continue label_opt in Abnormal.save abnormal; Abnormal.throw_stmt_control_flow_exception ast abnormal - - | (loc, With _) -> + | (_, With _) as s -> (* TODO or disallow? *) - loc, snd Typed_ast.Statement.error - - |((loc, DeclareTypeAlias {TypeAlias.id=(name_loc, name); tparams; right;}) - | (loc, TypeAlias {TypeAlias.id=(name_loc, name); tparams; right;})) as stmt -> + Tast_utils.error_mapper#statement s + | ( ( loc, + DeclareTypeAlias + { + TypeAlias.id = (name_loc, ({ Ast.Identifier.name; comments = _ } as id)); + tparams; + right; + } ) + | ( loc, + TypeAlias + { + TypeAlias.id = (name_loc, ({ Ast.Identifier.name; comments = _ } as id)); + tparams; + right; + } ) ) as stmt -> let r = DescFormat.type_reason name name_loc in - let typeparams, typeparams_map, tparams_ast = - Anno.mk_type_param_declarations cx tparams in - let (_, t), _ as right_ast = Anno.convert cx typeparams_map right in + let (typeparams, typeparams_map, tparams_ast) = Anno.mk_type_param_declarations cx tparams in + let (((_, t), _) as right_ast) = Anno.convert cx typeparams_map right in let t = - let mod_reason = replace_reason ~keep_def_loc:true - (fun desc -> RTypeAlias (name, true, desc)) in + let mod_reason = update_desc_reason (fun desc -> RTypeAlias (name, true, desc)) in let rec loop = function - | ExactT (r, t) -> ExactT (mod_reason r, loop t) - | DefT (r, MaybeT t) -> DefT (mod_reason r, MaybeT (loop t)) - | t -> mod_reason_of_t mod_reason t + | ExactT (r, t) -> ExactT (mod_reason r, loop t) + | MaybeT (r, t) -> MaybeT (mod_reason r, loop t) + | t -> mod_reason_of_t mod_reason t in loop t in - let type_ = poly_type (Context.make_nominal cx) typeparams - (DefT (r, TypeT (TypeAliasKind, t))) in - Flow.check_polarity cx Positive t; - Type_table.set (Context.type_table cx) loc type_; - let id_info = name, type_, Type_table.Other in - Type_table.set_info name_loc id_info (Context.type_table cx); + let type_ = + poly_type_of_tparams + (Context.make_nominal cx) + typeparams + (DefT (r, bogus_trust (), TypeT (TypeAliasKind, t))) + in + Flow.check_polarity cx Polarity.Positive t; + Env.init_type cx name type_ name_loc; - let type_alias_ast = { TypeAlias. - id = (name_loc, type_), name; - tparams = tparams_ast; - right = right_ast; - } in + let type_alias_ast = + { TypeAlias.id = ((name_loc, type_), id); tparams = tparams_ast; right = right_ast } + in (match stmt with - | _, DeclareTypeAlias _ -> loc, DeclareTypeAlias type_alias_ast - | _, TypeAlias _ -> loc, TypeAlias type_alias_ast + | (_, DeclareTypeAlias _) -> (loc, DeclareTypeAlias type_alias_ast) + | (_, TypeAlias _) -> (loc, TypeAlias type_alias_ast) | _ -> assert false) - - |((loc, DeclareOpaqueType - {OpaqueType.id=(name_loc, name); tparams; impltype; supertype}) - | (loc, OpaqueType {OpaqueType.id=(name_loc, name); tparams; impltype; supertype})) - as stmt -> + | ( ( loc, + DeclareOpaqueType + { + OpaqueType.id = (name_loc, ({ Ast.Identifier.name; comments = _ } as id)); + tparams; + impltype; + supertype; + } ) + | ( loc, + OpaqueType + { + OpaqueType.id = (name_loc, ({ Ast.Identifier.name; comments = _ } as id)); + tparams; + impltype; + supertype; + } ) ) as stmt -> let r = DescFormat.type_reason name name_loc in - let typeparams, typeparams_map, tparams_ast = - Anno.mk_type_param_declarations cx tparams in - let underlying_t, impltype_ast = Anno.convert_opt cx typeparams_map impltype in - let opaque_type_args = List.map (fun {name; reason; polarity; _} -> - let t = SMap.find_unsafe name typeparams_map in - name, reason, t, polarity - ) typeparams in - let super_t, supertype_ast = Anno.convert_opt cx typeparams_map supertype in - let opaquetype = { - underlying_t; - super_t; - opaque_id = Context.make_nominal cx; - opaque_type_args; - opaque_name = name - } in + let (typeparams, typeparams_map, tparams_ast) = Anno.mk_type_param_declarations cx tparams in + let (underlying_t, impltype_ast) = Anno.convert_opt cx typeparams_map impltype in + let opaque_type_args = + Core_list.map + ~f:(fun { name; reason; polarity; _ } -> + let t = SMap.find_unsafe name typeparams_map in + (name, reason, t, polarity)) + (TypeParams.to_list typeparams) + in + let (super_t, supertype_ast) = Anno.convert_opt cx typeparams_map supertype in + let opaquetype = + { underlying_t; super_t; opaque_id = name_loc; opaque_type_args; opaque_name = name } + in let t = OpaqueT (mk_reason (ROpaqueType name) loc, opaquetype) in - Flow.check_polarity cx Positive t; - let type_ = poly_type (Context.make_nominal cx) typeparams - (DefT (r, TypeT (OpaqueKind, t))) in - let open Flow in - let () = match underlying_t, super_t with - | Some l, Some u -> - generate_tests cx typeparams (fun map_ -> - flow_t cx (subst cx map_ l, subst cx map_ u) - ) |> ignore - | _ -> () + Flow.check_polarity cx Polarity.Positive t; + let type_ = + poly_type_of_tparams + (Context.make_nominal cx) + typeparams + (DefT (r, bogus_trust (), TypeT (OpaqueKind, t))) in - Type_table.set (Context.type_table cx) loc type_; - let id_info = name, type_, Type_table.Other in - Type_table.set_info name_loc id_info (Context.type_table cx); - Env.init_type cx name type_ name_loc; - let opaque_type_ast = { OpaqueType. - id = (name_loc, type_), name; - tparams = tparams_ast; - impltype = impltype_ast; - supertype = supertype_ast; - } in - (match stmt with - | _, DeclareOpaqueType _ -> loc, DeclareOpaqueType opaque_type_ast - | _, OpaqueType _ -> loc, OpaqueType opaque_type_ast - | _ -> assert false) - - (*******************************************************) - - | (switch_loc, Switch { Switch.discriminant; cases; }) -> + Flow.( + let () = + match (underlying_t, super_t) with + | (Some l, Some u) -> + generate_tests cx (TypeParams.to_list typeparams) (fun map_ -> + flow_t cx (subst cx map_ l, subst cx map_ u)) + |> ignore + | _ -> () + in + Env.init_type cx name type_ name_loc; + let opaque_type_ast = + { + OpaqueType.id = ((name_loc, type_), id); + tparams = tparams_ast; + impltype = impltype_ast; + supertype = supertype_ast; + } + in + (match stmt with + | (_, DeclareOpaqueType _) -> (loc, DeclareOpaqueType opaque_type_ast) + | (_, OpaqueType _) -> (loc, OpaqueType opaque_type_ast) + | _ -> assert false)) + (*******************************************************) + | (switch_loc, Switch { Switch.discriminant; cases }) -> + (* add default if absent *) + let (cases, added_default) = + Switch.Case.( + if List.exists (fun (_, { test; _ }) -> test = None) cases then + (cases, false) + else + (cases @ [(switch_loc, { test = None; consequent = [] })], true)) + in + (* typecheck discriminant *) + let discriminant_ast = expression cx discriminant in + (* switch body is a single lexical scope *) + Env.in_lex_scope cx (fun () -> + (* save incoming env state, clear changeset *) + let incoming_changes = Changeset.Global.clear () in + let incoming_env = Env.peek_env () in + let incoming_depth = List.length incoming_env in + (* set up all bindings *) + cases + |> List.iter (fun (_, { Switch.Case.consequent; _ }) -> toplevel_decls cx consequent); + + (* each case starts with this env - begins as clone of incoming_env + plus bindings, also accumulates negative refis from case tests *) + let case_start_env = Env.clone_env incoming_env in + (* Some (env, writes, refis, reason) when a case falls through *) + let fallthrough_case = ref None in + (* switch_state tracks case effects and is used to create outgoing env *) + let switch_state = ref None in + let update_switch_state (case_env, case_writes, _, loc) = + let case_env = ListUtils.last_n incoming_depth case_env in + let state = + match !switch_state with + | None -> (case_env, Changeset.empty, case_writes) + | Some (env, partial_writes, total_writes) -> + let case_diff = Changeset.comp case_writes total_writes in + let partial_writes = Changeset.union partial_writes case_diff in + let total_writes = Changeset.inter case_writes total_writes in + (* merge new case into switch env *) + Env.merge_env cx loc (env, env, case_env) case_writes; + (env, partial_writes, total_writes) + in + switch_state := Some state + in + (* traverse case list, get list of control flow exits and list of ASTs *) + let (exits, cases_ast) = + cases + |> Core_list.map ~f:(fun (loc, { Switch.Case.test; consequent }) -> + (* compute predicates implied by case expr or default *) + let (test_ast, preds, not_preds, xtypes) = + match test with + | None -> (None, Key_map.empty, Key_map.empty, Key_map.empty) + | Some expr -> + let fake = + ( loc, + Ast.Expression.( + Binary + { + Binary.operator = Binary.StrictEqual; + left = discriminant; + right = expr; + }) ) + in + let ((_, fake_ast), preds, not_preds, xtypes) = + predicates_of_condition cx fake + in + let expr_ast = + match fake_ast with + | Ast.Expression.(Binary { Binary.right; _ }) -> right + | _ -> assert false + in + (Some expr_ast, preds, not_preds, xtypes) + in + (* swap in case's starting env and clear changeset *) + let case_env = Env.clone_env case_start_env in + Env.update_env cx loc case_env; + let save_changes = Changeset.Global.clear () in + (* add test refinements - save changelist for later *) + let test_refis = Env.refine_with_preds cx loc preds xtypes in + (* merge env changes from fallthrough case, if present *) + Option.iter !fallthrough_case ~f:(fun (env, writes, refis, _) -> + let changes = Changeset.union writes refis in + Env.merge_env cx loc (case_env, case_env, env) changes); + + (* process statements, track control flow exits: exit will be an + unconditional exit, break_opt will be any break *) + let save_break = Abnormal.clear_saved (Abnormal.Break None) in + let (consequent_ast, exit) = + Abnormal.catch_stmts_control_flow_exception (fun () -> + toplevels cx consequent) + in + let break_opt = Abnormal.swap_saved (Abnormal.Break None) save_break in + (* restore ambient changes and save case writes *) + let case_writes = + Changeset.include_writes save_changes |> Changeset.Global.merge + in + (* track fallthrough to next case and/or break to switch end *) + let (falls_through, breaks_to_end) = + match exit with + | Some Abnormal.Throw + | Some Abnormal.Return + | Some (Abnormal.Break (Some _)) + | Some (Abnormal.Continue _) -> + (false, false) + | Some (Abnormal.Break None) -> (false, true) + | None -> (true, Option.is_some break_opt) + in + (* save state for fallthrough *) + fallthrough_case := + if falls_through then + Some (case_env, case_writes, test_refis, loc) + else + None; + + (* if we break to end, add effects to terminal state *) + ( if breaks_to_end then + match break_opt with + | None -> + Flow.add_output cx Error_message.(EInternal (loc, BreakEnvMissingForCase)) + | Some break_env -> + update_switch_state (break_env, case_writes, test_refis, loc) ); + + (* add negative refis of this case's test to common start env *) + (* TODO add API to do this without having to swap in env *) + Env.update_env cx loc case_start_env; + let _ = Env.refine_with_preds cx loc not_preds xtypes in + (exit, (loc, { Switch.Case.test = test_ast; consequent = consequent_ast }))) + |> List.split + in + let cases_ast = + List.( + if added_default then + cases_ast |> rev |> tl |> rev + else + cases_ast) + in + (* if last case fell out, update terminal switch state with it *) + Option.iter !fallthrough_case ~f:update_switch_state; - (* add default if absent *) - let cases, added_default = Switch.Case.( - if List.exists (fun (_, { test; _ }) -> test = None) cases - then cases, false - else cases @ [switch_loc, { test = None; consequent = [] }], true - ) in + (* env in switch_state has accumulated switch effects. now merge in + original types for partially written values, and swap env in *) + Option.iter !switch_state ~f:(fun (env, partial_writes, _) -> + Env.merge_env cx switch_loc (env, env, incoming_env) partial_writes; + Env.update_env cx switch_loc env); - (* typecheck discriminant *) - let discriminant_ast = expression cx discriminant in - - (* switch body is a single lexical scope *) - Env.in_lex_scope cx (fun () -> - - (* save incoming env state, clear changeset *) - let incoming_changes = Changeset.clear () in - let incoming_env = Env.peek_env () in - let incoming_depth = List.length incoming_env in - - (* set up all bindings *) - cases |> List.iter (fun (_, { Switch.Case.consequent; _ }) -> - toplevel_decls cx consequent - ); - - (** each case starts with this env - begins as clone of incoming_env - plus bindings, also accumulates negative refis from case tests *) - let case_start_env = Env.clone_env incoming_env in - - (* Some (env, writes, refis, reason) when a case falls through *) - let fallthrough_case = ref None in - - (* switch_state tracks case effects and is used to create outgoing env *) - let switch_state = ref None in - let update_switch_state (case_env, case_writes, _test_refis, loc) = - let case_env = ListUtils.last_n incoming_depth case_env in - let state = match !switch_state with - | None -> - case_env, Changeset.empty, case_writes - | Some (env, partial_writes, total_writes) -> - let case_diff = Changeset.comp case_writes total_writes in - let partial_writes = Changeset.union partial_writes case_diff in - let total_writes = Changeset.inter case_writes total_writes in - (* merge new case into switch env *) - Env.merge_env cx loc (env, env, case_env) case_writes; - env, partial_writes, total_writes - in switch_state := Some state - in - - (* traverse case list, get list of control flow exits and list of ASTs *) - let exits, cases_ast = cases |> List.map ( - fun (loc, { Switch.Case.test; consequent }) -> - - (* compute predicates implied by case expr or default *) - let test_ast, preds, not_preds, xtypes = match test with - | None -> - None, Key_map.empty, Key_map.empty, Key_map.empty - | Some expr -> - let fake = loc, Ast.Expression.(Binary { - Binary.operator = Binary.StrictEqual; - left = discriminant; right = expr - }) in - let (_, fake_ast), preds, not_preds, xtypes = predicates_of_condition cx fake in - let expr_ast = match fake_ast with - | Ast.Expression.(Binary { Binary.right; _ }) -> right - | _ -> assert false - in - Some expr_ast, preds, not_preds, xtypes - in - - (* swap in case's starting env and clear changeset *) - let case_env = Env.clone_env case_start_env in - Env.update_env cx loc case_env; - let save_changes = Changeset.clear () in - - (* add test refinements - save changelist for later *) - let test_refis = Env.refine_with_preds cx loc preds xtypes in - - (* merge env changes from fallthrough case, if present *) - Option.iter !fallthrough_case ~f:(fun (env, writes, refis, _) -> - let chg = Changeset.union writes refis in - Env.merge_env cx loc (case_env, case_env, env) chg - ); - - (** process statements, track control flow exits: exit will be an - unconditional exit, break_opt will be any break *) - let save_break = Abnormal.clear_saved (Abnormal.Break None) in - let consequent_ast, exit = Abnormal.catch_stmts_control_flow_exception ( - fun () -> toplevels cx consequent - ) in - let break_opt = Abnormal.swap_saved (Abnormal.Break None) save_break in - - (* restore ambient changes and save case writes *) - let case_writes = - let case_changes = Changeset.merge save_changes in - Changeset.include_writes case_changes - in - - (* track fallthrough to next case and/or break to switch end *) - let falls_through, breaks_to_end = match exit with - | Some Abnormal.Throw - | Some Abnormal.Return - | Some Abnormal.Break (Some _) - | Some Abnormal.Continue _ -> - false, false - | Some Abnormal.Break None -> - false, true - | None -> - true, Option.is_some break_opt - in - - (* save state for fallthrough *) - fallthrough_case := if falls_through - then Some (case_env, case_writes, test_refis, loc) - else None; - - (* if we break to end, add effects to terminal state *) - if breaks_to_end then begin match break_opt with - | None -> - Flow.add_output cx - Flow_error.(EInternal (loc, BreakEnvMissingForCase)) - | Some break_env -> - update_switch_state (break_env, case_writes, test_refis, loc) - end; - - (* add negative refis of this case's test to common start env *) - (* TODO add API to do this without having to swap in env *) - Env.update_env cx loc case_start_env; - let _ = Env.refine_with_preds cx loc not_preds xtypes in - - exit, (loc, { Switch.Case.test = test_ast; consequent = consequent_ast}) - ) |> List.split in - - let cases_ast = List.( - if added_default - then cases_ast |> rev |> tl |> rev - else cases_ast - ) in - - (* if last case fell out, update terminal switch state with it *) - Option.iter !fallthrough_case ~f:update_switch_state; - - (** env in switch_state has accumulated switch effects. now merge in - original types for partially written values, and swap env in *) - Option.iter !switch_state ~f:(fun (env, partial_writes, _) -> - Env.merge_env cx switch_loc (env, env, incoming_env) partial_writes; - Env.update_env cx switch_loc env); - - (* merge original changeset back in *) - let _ = Changeset.merge incoming_changes in - - (** abnormal exit: if every case exits abnormally the same way (or falls + (* merge original changeset back in *) + let _ = Changeset.Global.merge incoming_changes in + (* abnormal exit: if every case exits abnormally the same way (or falls through to a case that does), then the switch as a whole exits that way. (as with if/else, we merge `throw` into `return` when both appear) *) - let uniform_switch_exit case_exits = - let rec loop = function - | acc, fallthrough, [] -> - (* end of cases: if nothing is falling through, we made it *) - if fallthrough then None else acc - | _, _, Some (Abnormal.Break _) :: _ -> - (* break wrecks everything *) - None - | acc, _, None :: exits -> - (* begin or continue to fall through *) - loop (acc, true, exits) - | acc, _, exit :: exits when exit = acc -> - (* current case exits the same way as prior cases *) - loop (acc, acc = None, exits) - | Some Abnormal.Throw, _, Some Abnormal.Return :: exits - | Some Abnormal.Return, _, Some Abnormal.Throw :: exits -> - (* fuzz throw into return *) - loop (Some Abnormal.Return, false, exits) - | None, _, exit :: exits -> - (* terminate an initial sequence of fall-thruugh cases *) - (* (later sequences will have acc = Some _ ) *) - loop (exit, false, exits) - | _, _, _ -> - (* the new case exits differently from previous ones - fail *) - None - in loop (None, false, case_exits) - in - let ast = switch_loc, Switch { Switch. - discriminant = discriminant_ast; - cases = cases_ast; - } in - begin match uniform_switch_exit exits with - | None -> ast - | Some abnormal -> Abnormal.throw_stmt_control_flow_exception ast abnormal - end - ) - - (*******************************************************) - - | (loc, Return { Return.argument }) -> + let uniform_switch_exit case_exits = + let rec loop = function + | (acc, fallthrough, []) -> + (* end of cases: if nothing is falling through, we made it *) + if fallthrough then + None + else + acc + | (_, _, Some (Abnormal.Break _) :: _) -> + (* break wrecks everything *) + None + | (acc, _, None :: exits) -> + (* begin or continue to fall through *) + loop (acc, true, exits) + | (acc, _, exit :: exits) when exit = acc -> + (* current case exits the same way as prior cases *) + loop (acc, acc = None, exits) + | (Some Abnormal.Throw, _, Some Abnormal.Return :: exits) + | (Some Abnormal.Return, _, Some Abnormal.Throw :: exits) -> + (* fuzz throw into return *) + loop (Some Abnormal.Return, false, exits) + | (None, _, exit :: exits) -> + (* terminate an initial sequence of fall-thruugh cases *) + (* (later sequences will have acc = Some _ ) *) + loop (exit, false, exits) + | (_, _, _) -> + (* the new case exits differently from previous ones - fail *) + None + in + loop (None, false, case_exits) + in + let ast = + (switch_loc, Switch { Switch.discriminant = discriminant_ast; cases = cases_ast }) + in + match uniform_switch_exit exits with + | None -> ast + | Some abnormal -> Abnormal.throw_stmt_control_flow_exception ast abnormal) + (*******************************************************) + | (loc, Return { Return.argument; comments }) -> let reason = mk_reason (RCustom "return") loc in let ret = Env.get_internal_var cx "return" loc in - let t, argument_ast = match argument with - | None -> VoidT.at loc, None + let (t, argument_ast) = + match argument with + | None -> (VoidT.at loc |> with_trust literal_trust, None) | Some expr -> if Env.in_predicate_scope () then - let ((_, t), _ as ast, p_map, n_map, _) = predicates_of_condition cx expr in - let pred_reason = replace_reason (fun desc -> - RPredicateOf desc - ) reason in - OpenPredT (pred_reason, t, p_map, n_map), Some ast + let ((((_, t), _) as ast), p_map, n_map, _) = predicates_of_condition cx expr in + let pred_reason = update_desc_reason (fun desc -> RPredicateOf desc) reason in + (OpenPredT (pred_reason, t, p_map, n_map), Some ast) else - let (_, t), _ as ast = expression cx expr in - t, Some ast + let (((_, t), _) as ast) = expression cx expr in + (t, Some ast) + in + let t = + match Env.var_scope_kind () with + | Scope.Async -> + (* Convert the return expression's type T to Promise. If the + * expression type is itself a Promise, ensure we still return + * a Promise via Promise.resolve. *) + let reason = mk_reason (RCustom "async return") loc in + let t' = + Flow.get_builtin_typeapp + cx + reason + "Promise" + [ + Tvar.mk_derivable_where cx reason (fun tvar -> + let funt = Flow.get_builtin cx "$await" reason in + let callt = mk_functioncalltype reason None [Arg t] tvar in + let reason = repos_reason (aloc_of_reason (reason_of_t t)) reason in + Flow.flow cx (funt, CallT (unknown_use, reason, callt))); + ] + in + Flow.reposition cx ~desc:(desc_of_t t) loc t' + | Scope.Generator -> + (* Convert the return expression's type R to Generator, where + * Y and R are internals, installed earlier. *) + let reason = mk_reason (RCustom "generator return") loc in + let t' = + Flow.get_builtin_typeapp + cx + reason + "Generator" + [ + Env.get_internal_var cx "yield" loc; + Tvar.mk_derivable_where cx reason (fun tvar -> Flow.flow_t cx (t, tvar)); + Env.get_internal_var cx "next" loc; + ] + in + Flow.reposition cx ~desc:(desc_of_t t) loc t' + | Scope.AsyncGenerator -> + let reason = mk_reason (RCustom "async generator return") loc in + let t' = + Flow.get_builtin_typeapp + cx + reason + "AsyncGenerator" + [ + Env.get_internal_var cx "yield" loc; + Tvar.mk_derivable_where cx reason (fun tvar -> Flow.flow_t cx (t, tvar)); + Env.get_internal_var cx "next" loc; + ] + in + Flow.reposition cx ~desc:(desc_of_t t) loc t' + | _ -> t in - let t = match Env.var_scope_kind () with - | Scope.Async -> - (* Convert the return expression's type T to Promise. If the - * expression type is itself a Promise, ensure we still return - * a Promise via Promise.resolve. *) - let reason = mk_reason (RCustom "async return") loc in - let t' = Flow.get_builtin_typeapp cx reason "Promise" [ - Tvar.mk_derivable_where cx reason (fun tvar -> - let funt = Flow.get_builtin cx "$await" reason in - let callt = mk_functioncalltype reason None [Arg t] tvar in - let reason = repos_reason (aloc_of_reason (reason_of_t t) |> ALoc.to_loc) reason in - Flow.flow cx (funt, CallT (unknown_use, reason, callt)) - ) - ] in - Flow.reposition cx ~desc:(desc_of_t t) loc t' - | Scope.Generator -> - (* Convert the return expression's type R to Generator, where - * Y and R are internals, installed earlier. *) - let reason = mk_reason (RCustom "generator return") loc in - let t' = Flow.get_builtin_typeapp cx reason "Generator" [ - Env.get_internal_var cx "yield" loc; - Tvar.mk_derivable_where cx reason (fun tvar -> - Flow.flow_t cx (t, tvar) - ); - Env.get_internal_var cx "next" loc - ] in - Flow.reposition cx ~desc:(desc_of_t t) loc t' - | Scope.AsyncGenerator -> - let reason = mk_reason (RCustom "async generator return") loc in - let t' = Flow.get_builtin_typeapp cx reason "AsyncGenerator" [ - Env.get_internal_var cx "yield" loc; - Tvar.mk_derivable_where cx reason (fun tvar -> - Flow.flow_t cx (t, tvar) - ); - Env.get_internal_var cx "next" loc - ] in - Flow.reposition cx ~desc:(desc_of_t t) loc t' - | _ -> t + let use_op = + Op + (FunReturnStatement + { value = Option.value_map argument ~default:(reason_of_t t) ~f:mk_expression_reason }) in - let use_op = Op (FunReturnStatement { - value = Option.value_map argument ~default:(reason_of_t t) ~f:mk_expression_reason; - }) in Flow.flow cx (t, UseT (use_op, ret)); Env.reset_current_activation loc; Abnormal.save Abnormal.Return; Abnormal.throw_stmt_control_flow_exception - (loc, Return { Return.argument = argument_ast }) + (loc, Return { Return.argument = argument_ast; comments }) Abnormal.Return - - | (loc, Throw { Throw.argument }) -> + | (loc, Throw { Throw.argument }) -> let argument_ast = expression cx argument in Env.reset_current_activation loc; Abnormal.save Abnormal.Throw; Abnormal.throw_stmt_control_flow_exception (loc, Throw { Throw.argument = argument_ast }) Abnormal.Throw - - (***************************************************************************) - (* Try-catch-finally statements have a lot of control flow possibilities. (To + (***************************************************************************) + (* Try-catch-finally statements have a lot of control flow possibilities. (To simplify matters, a missing catch block is considered to to be a catch block that throws, and a missing finally block is considered to be an empty block.) @@ -1146,129 +1301,122 @@ and statement cx : 'a -> (Loc.t, Loc.t * Type.t) Ast.Statement.t = Ast.Statement But since (2) models exactly the states from which subsequent code is reachable, we can use its tighter approximation as the basis for subsequent analysis without loss of soundness. - *) - (***************************************************************************) - | (loc, Try { Try.block = (b_loc, b); handler; finalizer }) -> - let oldset = Changeset.clear () in - + *) + (***************************************************************************) + | (loc, Try { Try.block = (b_loc, b); handler; finalizer; comments }) -> + let oldset = Changeset.Global.clear () in (* save ref to initial env and swap in a clone *) let start_env = Env.peek_env () in Env.(update_env cx loc (clone_env start_env)); - let try_block_ast, try_abnormal = Env.in_lex_scope cx (fun () -> - Abnormal.catch_stmts_control_flow_exception (fun () -> - toplevel_decls cx b.Block.body; - toplevels cx b.Block.body - ) - ) in - + let (try_block_ast, try_abnormal) = + Env.in_lex_scope cx (fun () -> + Abnormal.catch_stmts_control_flow_exception (fun () -> + toplevel_decls cx b.Block.body; + toplevels cx b.Block.body)) + in (* save ref to env at end of try *) let try_env = Env.peek_env () in - (* traverse catch block, save exceptions *) - let catch_ast, catch_abnormal = match handler with - | None -> - (* a missing catch is equivalent to a catch that always throws *) - None, Some Abnormal.Throw - - | Some (h_loc, h) -> - (* if try throws to here, we need an env that's conservative + let (catch_ast, catch_abnormal) = + match handler with + | None -> + (* a missing catch is equivalent to a catch that always throws *) + (None, Some Abnormal.Throw) + | Some (h_loc, h) -> + (* if try throws to here, we need an env that's conservative over everything that happened from start_env to try_env *) - Env.( - let e = clone_env start_env in - merge_env cx loc (e, e, try_env) (Changeset.peek ()); - update_env cx loc e - ); + Env.( + let e = clone_env start_env in + merge_env cx loc (e, e, try_env) (Changeset.Global.peek ()); + update_env cx loc e); - let catch_block_ast, catch_abnormal = catch_clause cx h in - Some (h_loc, catch_block_ast), catch_abnormal + let (catch_block_ast, catch_abnormal) = catch_clause cx h in + (Some (h_loc, catch_block_ast), catch_abnormal) in - (* save ref to env at end of catch *) let catch_env = Env.peek_env () in - (* build initial env for non-throwing finally *) - let nonthrow_finally_env = Env.(match catch_abnormal with - | None -> - (* if catch ends normally, then non-throwing finally can be + let nonthrow_finally_env = + Env.( + match catch_abnormal with + | None -> + (* if catch ends normally, then non-throwing finally can be reached via it or a non-throwing try. merge terminal states *) - let e = clone_env try_env in - merge_env cx loc (e, e, catch_env) (Changeset.peek ()); - e - | Some _ -> - (* if catch throws, then the only way into non-throwing finally + let e = clone_env start_env in + merge_env cx loc (e, try_env, catch_env) (Changeset.Global.peek ()); + e + | Some _ -> + (* if catch throws, then the only way into non-throwing finally is via non-throwing try *) - try_env - ) in - + try_env) + in (* traverse finally block, save exceptions, and leave in place the terminal env of the non-throwing case (in which subsequent code is reachable) *) - let finally_ast, finally_abnormal = match finalizer with - | None -> - Env.update_env cx loc nonthrow_finally_env; - None, None - - | Some (f_loc, { Block.body }) -> - (* analyze twice, with different start states *) - - (* 1. throwing-finally case. *) - (* env may be in any state from start of try through end of catch *) - Env.( - let e = clone_env start_env in - merge_env cx loc (e, e, catch_env) (Changeset.peek ()); - update_env cx loc e - ); - - let _, finally_abnormal = Env.in_lex_scope cx (fun () -> - Abnormal.catch_stmts_control_flow_exception (fun () -> - toplevel_decls cx body; - toplevels cx body - ) - ) in - - (* 2. non-throwing finally case. *) - Env.update_env cx loc nonthrow_finally_env; - - (* (exceptions will be the same in both cases) *) - let finally_block_ast, _ = Env.in_lex_scope cx (fun () -> - Abnormal.catch_stmts_control_flow_exception (fun () -> - toplevel_decls cx body; - toplevels cx body - ) - ) in - - Some (f_loc, { Block.body = finally_block_ast }), finally_abnormal + let (finally_ast, finally_abnormal) = + match finalizer with + | None -> + Env.update_env cx loc nonthrow_finally_env; + (None, None) + | Some (f_loc, { Block.body }) -> + (* analyze twice, with different start states *) + + (* 1. throwing-finally case. *) + (* env may be in any state from start of try through end of catch *) + Env.( + let e = clone_env start_env in + merge_env cx loc (e, e, catch_env) (Changeset.Global.peek ()); + update_env cx loc e); + + let (_, finally_abnormal) = + Env.in_lex_scope cx (fun () -> + Abnormal.catch_stmts_control_flow_exception (fun () -> + toplevel_decls cx body; + toplevels cx body)) + in + (* 2. non-throwing finally case. *) + Env.update_env cx loc nonthrow_finally_env; + + (* (exceptions will be the same in both cases) *) + let (finally_block_ast, _) = + Env.in_lex_scope cx (fun () -> + Abnormal.catch_stmts_control_flow_exception (fun () -> + toplevel_decls cx body; + toplevels cx body)) + in + (Some (f_loc, { Block.body = finally_block_ast }), finally_abnormal) in - - let newset = Changeset.merge oldset in + let newset = Changeset.Global.merge oldset in ignore newset; - let ast = loc, Try { Try. - block = b_loc, { Block.body = try_block_ast }; - handler = catch_ast; - finalizer = finally_ast; - } in - + let ast = + ( loc, + Try + { + Try.block = (b_loc, { Block.body = try_block_ast }); + handler = catch_ast; + finalizer = finally_ast; + comments; + } ) + in (* if finally has abnormal control flow, we throw here *) - ignore (Abnormal.check_stmt_control_flow_exception (ast, finally_abnormal) - : (Loc.t, Loc.t * Type.t) Ast.Statement.t); + ignore + ( Abnormal.check_stmt_control_flow_exception (ast, finally_abnormal) + : (ALoc.t, ALoc.t * Type.t) Ast.Statement.t ); (* other ways we throw due to try/catch abends *) - begin match try_abnormal, catch_abnormal with - | Some (Abnormal.Throw as try_abnormal), Some Abnormal.Throw - | Some (Abnormal.Return as try_abnormal), Some _ -> + begin + match (try_abnormal, catch_abnormal) with + | (Some (Abnormal.Throw as try_abnormal), Some Abnormal.Throw) + | (Some (Abnormal.Return as try_abnormal), Some _) -> Abnormal.throw_stmt_control_flow_exception ast try_abnormal - - | Some Abnormal.Throw, Some (Abnormal.Return as catch_abnormal) -> + | (Some Abnormal.Throw, Some (Abnormal.Return as catch_abnormal)) -> Abnormal.throw_stmt_control_flow_exception ast catch_abnormal - - | _ -> ast + | _ -> ast end - - - (***************************************************************************) - (* Refinements for `while` are derived by the following Hoare logic rule: + (***************************************************************************) + (* Refinements for `while` are derived by the following Hoare logic rule: [Pre' & c] S [Post'] Pre' = Pre | Post' @@ -1276,43 +1424,35 @@ and statement cx : 'a -> (Loc.t, Loc.t * Type.t) Ast.Statement.t = Ast.Statement ---------------------- [Pre] while c S [Post] *) - (***************************************************************************) - | (loc, While { While.test; body }) -> + (***************************************************************************) + | (loc, While { While.test; body }) -> let save_break = Abnormal.clear_saved (Abnormal.Break None) in let save_continue = Abnormal.clear_saved (Abnormal.Continue None) in - (* generate loop test preds and their complements *) - let test_ast, preds, not_preds, orig_types = - predicates_of_condition cx test in - + let (test_ast, preds, not_preds, orig_types) = predicates_of_condition cx test in (* save current changeset and install an empty one *) - let oldset = Changeset.clear () in - + let oldset = Changeset.Global.clear () in (* widen_env wraps specifics in tvars, anticipating widening inflows *) Env.widen_env cx loc; (* start_env is Pre above: env as of loop top *) let start_env = Env.peek_env () in - (* swap in Pre & c *) Env.( update_env cx loc (clone_env start_env); - ignore (refine_with_preds cx loc preds orig_types) - ); + ignore (refine_with_preds cx loc preds orig_types)); (* traverse loop body - after this, body_env = Post' *) - let body_ast, _ = Abnormal.catch_stmt_control_flow_exception - (fun () -> statement cx body) in - + let (body_ast, _) = + Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx body) + in (* save ref to env after loop body *) let body_env = Env.peek_env () in - (* save loop body changeset to newset, install merged changes *) - let newset = Changeset.merge oldset in - + let newset = Changeset.Global.merge oldset in (* if we continued out of the loop, havoc vars changed by loop body *) - if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None - then Env.havoc_vars newset; + if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None then + Env.havoc_vars newset; (* widen start_env with new specifics from body_env (turning Pre into Pre' = Pre | Post') @@ -1320,17 +1460,14 @@ and statement cx : 'a -> (Loc.t, Loc.t * Type.t) Ast.Statement.t = Ast.Statement Env.( copy_env cx loc (start_env, body_env) newset; update_env cx loc start_env; - ignore (refine_with_preds cx loc not_preds orig_types) - ); + ignore (refine_with_preds cx loc not_preds orig_types)); (* if we broke out of the loop, havoc vars changed by loop body *) - if Abnormal.swap_saved (Abnormal.Break None) save_break <> None - then Env.havoc_vars newset; - - loc, While { While.test = test_ast; body = body_ast } + if Abnormal.swap_saved (Abnormal.Break None) save_break <> None then Env.havoc_vars newset; - (***************************************************************************) - (* Refinements for `do-while` are derived by the following Hoare logic rule: + (loc, While { While.test = test_ast; body = body_ast }) + (***************************************************************************) + (* Refinements for `do-while` are derived by the following Hoare logic rule: [Pre'] S [Post'] Pre' = Pre | (Post' & c) @@ -1338,57 +1475,49 @@ and statement cx : 'a -> (Loc.t, Loc.t * Type.t) Ast.Statement.t = Ast.Statement ------------------------- [Pre] do S while c [Post] *) - (***************************************************************************) - | (loc, DoWhile { DoWhile.body; test }) -> + (***************************************************************************) + | (loc, DoWhile { DoWhile.body; test; comments }) -> let save_break = Abnormal.clear_saved (Abnormal.Break None) in let save_continue = Abnormal.clear_saved (Abnormal.Continue None) in - let env = Env.peek_env () in - let oldset = Changeset.clear () in + let env = Env.peek_env () in + let oldset = Changeset.Global.clear () in (* env = Pre *) (* ENV = [env] *) - Env.widen_env cx loc; - (* env = Pre', Pre' > Pre *) + (* env = Pre', Pre' > Pre *) let body_env = Env.clone_env env in Env.update_env cx loc body_env; + (* body_env = Pre' *) (* ENV = [body_env] *) - - let body_ast, body_abnormal = + let (body_ast, body_abnormal) = Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx body) |> Abnormal.ignore_break_or_continue_to_label None in + if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None then + Env.havoc_vars (Changeset.Global.peek ()); - if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None - then Env.havoc_vars (Changeset.peek ()); - - let test_ast, preds, not_preds, xtypes = - predicates_of_condition cx test in + let (test_ast, preds, not_preds, xtypes) = predicates_of_condition cx test in (* body_env = Post' *) - let done_env = Env.clone_env body_env in (* done_env = Post' *) - let _ = Env.refine_with_preds cx loc preds xtypes in (* body_env = Post' & c *) - - let newset = Changeset.merge oldset in + let newset = Changeset.Global.merge oldset in Env.copy_env cx loc (env, body_env) newset; - (* Pre' > Post' & c *) + (* Pre' > Post' & c *) Env.update_env cx loc done_env; let _ = Env.refine_with_preds cx loc not_preds xtypes in - if Abnormal.swap_saved (Abnormal.Break None) save_break <> None - then Env.havoc_vars newset; + if Abnormal.swap_saved (Abnormal.Break None) save_break <> None then Env.havoc_vars newset; + (* ENV = [done_env] *) (* done_env = Post' & ~c *) - - let ast = loc, DoWhile { DoWhile.body = body_ast; test = test_ast } in + let ast = (loc, DoWhile { DoWhile.body = body_ast; test = test_ast; comments }) in Abnormal.check_stmt_control_flow_exception (ast, body_abnormal) - - (***************************************************************************) - (* Refinements for `for` are derived by the following Hoare logic rule: + (***************************************************************************) + (* Refinements for `for` are derived by the following Hoare logic rule: [Pre] i [Init] [Pre' & c] S;u [Post'] @@ -1399,68 +1528,55 @@ and statement cx : 'a -> (Loc.t, Loc.t * Type.t) Ast.Statement.t = Ast.Statement NOTE: This rule is similar to that for `while`. *) - (***************************************************************************) - | (loc, For { For.init; test; update; body }) -> + (***************************************************************************) + | (loc, For { For.init; test; update; body }) -> Env.in_lex_scope cx (fun () -> - let save_break = Abnormal.clear_saved (Abnormal.Break None) in - let save_continue = Abnormal.clear_saved (Abnormal.Continue None) in - let init_ast = match init with - | None -> None - | Some (For.InitDeclaration (decl_loc, decl)) -> + let save_break = Abnormal.clear_saved (Abnormal.Break None) in + let save_continue = Abnormal.clear_saved (Abnormal.Continue None) in + let init_ast = + match init with + | None -> None + | Some (For.InitDeclaration (decl_loc, decl)) -> variable_decl cx decl; Some (For.InitDeclaration (decl_loc, variables cx decl)) - | Some (For.InitExpression expr) -> - Some (For.InitExpression (expression cx expr)) - in - - let env = Env.peek_env () in - let oldset = Changeset.clear () in - Env.widen_env cx loc; - - let do_env = Env.clone_env env in - Env.update_env cx loc do_env; - - let test_ast, preds, not_preds, xtypes = match test with - | None -> - None, Key_map.empty, Key_map.empty, - Key_map.empty (* TODO: prune the "not" case *) - | Some expr -> - let expr_ast, preds, not_preds, xtypes = - predicates_of_condition cx expr in - Some expr_ast, preds, not_preds, xtypes - in - - let body_env = Env.clone_env do_env in - Env.update_env cx loc body_env; - let _ = Env.refine_with_preds cx loc preds xtypes in - - let body_ast, _ = Abnormal.catch_stmt_control_flow_exception - (fun () -> statement cx body) in - - if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None - then Env.havoc_vars (Changeset.peek ()); + | Some (For.InitExpression expr) -> Some (For.InitExpression (expression cx expr)) + in + let env = Env.peek_env () in + let oldset = Changeset.Global.clear () in + Env.widen_env cx loc; - let update_ast = - Option.map ~f:(expression cx) update in + let do_env = Env.clone_env env in + Env.update_env cx loc do_env; - let newset = Changeset.merge oldset in - Env.copy_env cx loc (env, body_env) newset; + let (test_ast, preds, not_preds, xtypes) = + match test with + | None -> + (None, Key_map.empty, Key_map.empty, Key_map.empty) (* TODO: prune the "not" case *) + | Some expr -> + let (expr_ast, preds, not_preds, xtypes) = predicates_of_condition cx expr in + (Some expr_ast, preds, not_preds, xtypes) + in + let body_env = Env.clone_env do_env in + Env.update_env cx loc body_env; + let _ = Env.refine_with_preds cx loc preds xtypes in + let (body_ast, _) = + Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx body) + in + if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None then + Env.havoc_vars (Changeset.Global.peek ()); - Env.update_env cx loc do_env; - let _ = Env.refine_with_preds cx loc not_preds xtypes in - if Abnormal.swap_saved (Abnormal.Break None) save_break <> None - then Env.havoc_vars newset; + let update_ast = Option.map ~f:(expression cx) update in + let newset = Changeset.Global.merge oldset in + Env.copy_env cx loc (env, body_env) newset; - loc, For { For. - init = init_ast; - test = test_ast; - update = update_ast; - body = body_ast; - } - ) + Env.update_env cx loc do_env; + let _ = Env.refine_with_preds cx loc not_preds xtypes in + if Abnormal.swap_saved (Abnormal.Break None) save_break <> None then + Env.havoc_vars newset; - (***************************************************************************) - (* Refinements for `for-in` are derived by the following Hoare logic rule: + (loc, For { For.init = init_ast; test = test_ast; update = update_ast; body = body_ast })) + (***************************************************************************) + (* Refinements for `for-in` are derived by the following Hoare logic rule: [Pre] o [Init] [Pre'] S [Post'] @@ -1469,1089 +1585,1073 @@ and statement cx : 'a -> (Loc.t, Loc.t * Type.t) Ast.Statement.t = Ast.Statement -------------------------- [Pre] for (i in o) S [Post] *) - (***************************************************************************) - | (loc, ForIn { ForIn.left; right; body; each; }) -> + (***************************************************************************) + | (loc, ForIn { ForIn.left; right; body; each }) -> let reason = mk_reason (RCustom "for-in") loc in let save_break = Abnormal.clear_saved (Abnormal.Break None) in let save_continue = Abnormal.clear_saved (Abnormal.Continue None) in - - let (_, right_t), _ as right_ast = expression cx right in - Flow.flow cx (right_t, AssertForInRHST reason); - Env.in_lex_scope cx (fun () -> + let env = Env.peek_env () in + let oldset = Changeset.Global.clear () in + Env.widen_env cx loc; - let env = Env.peek_env () in - let oldset = Changeset.clear () in - Env.widen_env cx loc; - - let body_env = Env.clone_env env in - Env.update_env cx loc body_env; - - let _, preds, _, xtypes = - predicates_of_condition cx right in - ignore (Env.refine_with_preds cx loc preds xtypes : Changeset.t); + let body_env = Env.clone_env env in + Env.update_env cx loc body_env; - let left_ast = match left with - | ForIn.LeftDeclaration (decl_loc, ({ VariableDeclaration. - kind; declarations = [vdecl] - } as decl)) -> + let eval_right () = + let ((((right_loc, _), _) as right_ast), preds, _, xtypes) = + predicates_of_condition cx right + in + let (_ : Changeset.t) = Env.refine_with_preds cx right_loc preds xtypes in + right_ast + in + let (left_ast, right_ast) = + match left with + | ForIn.LeftDeclaration + ( decl_loc, + ( { + VariableDeclaration.kind; + declarations = + [(vdecl_loc, { VariableDeclaration.Declarator.id; init = None })]; + } as decl ) ) -> variable_decl cx decl; - let vdecl_ast = variable cx kind ~if_uninitialized:StrT.at vdecl in - ForIn.LeftDeclaration (decl_loc, { VariableDeclaration. - kind; - declarations = [vdecl_ast]; - }) - - | ForIn.LeftPattern (pat_loc, Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (name_loc, name_str); optional; annot; - }) -> - let t = StrT.at pat_loc in - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name_str) pat_loc); - init = reason_of_t t; - }) in + let right_ast = eval_right () in + let (id_ast, _) = + variable cx kind id None ~if_uninitialized:(StrT.at %> with_trust bogus_trust) + in + ( ForIn.LeftDeclaration + ( decl_loc, + { + VariableDeclaration.kind; + declarations = + [(vdecl_loc, { VariableDeclaration.Declarator.id = id_ast; init = None })]; + } ), + right_ast ) + | ForIn.LeftPattern + ( pat_loc, + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = + (name_loc, ({ Ast.Identifier.name = name_str; comments = _ } as id)); + optional; + annot; + } ) -> + let right_ast = eval_right () in + let t = StrT.at pat_loc |> with_trust bogus_trust in + let use_op = + Op + (AssignVar + { + var = Some (mk_reason (RIdentifier name_str) pat_loc); + init = reason_of_t t; + }) + in ignore Env.(set_var cx ~use_op name_str t pat_loc); - ForIn.LeftPattern ((pat_loc, t), Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = ((name_loc, t), name_str); - annot = Option.map - ~f:(fun (a_loc, _) -> a_loc, (Typed_ast.error_annot, Typed_ast.Type.error)) - annot; - optional; - }) - - | _ -> - Flow.add_output cx Flow_error.(EInternal (loc, ForInLHS)); - Typed_ast.Statement.ForIn.left_error - in - - let body_ast, _ = Abnormal.catch_stmt_control_flow_exception - (fun () -> statement cx body) in - - let newset = Changeset.merge oldset in - - if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None - then Env.havoc_vars newset; - Env.copy_env cx loc (env,body_env) newset; - - Env.update_env cx loc env; - if Abnormal.swap_saved (Abnormal.Break None) save_break <> None - then Env.havoc_vars newset; - - loc, ForIn { ForIn. - left = left_ast; - right = right_ast; - body = body_ast; - each; - } - ) + ( ForIn.LeftPattern + ( (pat_loc, t), + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = ((name_loc, t), id); + annot = + (match annot with + | Ast.Type.Available _ -> + Tast_utils.unchecked_mapper#type_annotation_hint annot + | Ast.Type.Missing loc -> Ast.Type.Missing (loc, t)); + optional; + } ), + right_ast ) + | _ -> + let right_ast = eval_right () in + Flow.add_output cx Error_message.(EInternal (loc, ForInLHS)); + (Tast_utils.error_mapper#for_in_statement_lhs left, right_ast) + in + let ((_, right_t), _) = right_ast in + Flow.flow cx (right_t, AssertForInRHST reason); - | (loc, ForOf { ForOf.left; right; body; async; }) -> - let reason_desc = match left with - | ForOf.LeftDeclaration (_, {VariableDeclaration.declarations = - [(_, {VariableDeclaration.Declarator.id = (_, Ast.Pattern.Identifier - {Ast.Pattern.Identifier.name=(_, x); _}); _})]; _}) -> RIdentifier x - | ForOf.LeftPattern (_, Ast.Pattern.Identifier - {Ast.Pattern.Identifier.name=(_, x); _}) -> RIdentifier x - | _ -> RCustom "for-of element" + let (body_ast, _) = + Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx body) + in + let newset = Changeset.Global.merge oldset in + if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None then + Env.havoc_vars newset; + Env.copy_env cx loc (env, body_env) newset; + + Env.update_env cx loc env; + if Abnormal.swap_saved (Abnormal.Break None) save_break <> None then + Env.havoc_vars newset; + + (loc, ForIn { ForIn.left = left_ast; right = right_ast; body = body_ast; each })) + | (loc, ForOf { ForOf.left; right; body; async }) -> + let reason_desc = + match left with + | ForOf.LeftDeclaration + ( _, + { + VariableDeclaration.declarations = + [ + ( _, + { + VariableDeclaration.Declarator.id = + ( _, + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = + (_, { Ast.Identifier.name; comments = _ }); + _; + } ); + _; + } ); + ]; + _; + } ) -> + RIdentifier name + | ForOf.LeftPattern + ( _, + Ast.Pattern.Identifier + { Ast.Pattern.Identifier.name = (_, { Ast.Identifier.name; comments = _ }); _ } ) + -> + RIdentifier name + | _ -> RCustom "for-of element" in let reason = mk_reason reason_desc loc in let save_break = Abnormal.clear_saved (Abnormal.Break None) in let save_continue = Abnormal.clear_saved (Abnormal.Continue None) in - let (_, t), _ = expression cx right in - - let element_tvar = Tvar.mk cx reason in - let o = - let targs = [element_tvar; AnyT.at loc; AnyT.at loc] in - if async then - let reason = mk_reason - (RCustom "async iteration expected on AsyncIterable") loc in - Flow.get_builtin_typeapp cx reason "$AsyncIterable" targs - else - Flow.get_builtin_typeapp cx - (mk_reason (RCustom "iteration expected on Iterable") loc) - "$Iterable" targs - in - - Flow.flow_t cx (t, o); (* null/undefined are NOT allowed *) - - Env.in_lex_scope cx (fun () -> - - let env = Env.peek_env () in - let oldset = Changeset.clear () in - Env.widen_env cx loc; - - let body_env = Env.clone_env env in - Env.update_env cx loc body_env; - - let right_ast, preds, _, xtypes = - predicates_of_condition cx right in - let _ = Env.refine_with_preds cx loc preds xtypes in - - let left_ast = match left with - | ForOf.LeftDeclaration (decl_loc, ({ VariableDeclaration. - kind; declarations = [vdecl] - } as decl)) -> - let repos_tvar _ = Flow.reposition cx (loc_of_t t) element_tvar in - variable_decl cx decl; - let vdecl_ast = variable cx kind ~if_uninitialized:repos_tvar vdecl in - ForOf.LeftDeclaration (decl_loc, { VariableDeclaration. - kind; - declarations = [vdecl_ast] - }) - - | ForOf.LeftPattern (pat_loc, Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (name_loc, name_str); optional; annot; - }) -> - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name_str) pat_loc); - init = reason_of_t element_tvar; - }) in - ignore Env.(set_var cx ~use_op name_str element_tvar pat_loc); - ForOf.LeftPattern ( - (pat_loc, element_tvar), - Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = ((name_loc, element_tvar), name_str); - annot = Option.map - ~f:(fun (loc, _) -> loc, (Typed_ast.error_annot, Typed_ast.Type.error)) - annot; - optional; - } - ) - - | _ -> - Flow.add_output cx Flow_error.(EInternal (loc, ForOfLHS)); - Typed_ast.Statement.ForOf.left_error + let eval_right () = + let ((((right_loc, t), _) as right_ast), preds, _, xtypes) = + predicates_of_condition cx right in + let (_ : Changeset.t) = Env.refine_with_preds cx right_loc preds xtypes in + let elem_t = Tvar.mk cx reason in + let o = + (* Second and third args here are never relevant to the loop, but they should be as + general as possible to allow iterating over arbitrary generators *) + let targs = + [ + elem_t; + MixedT.why reason |> with_trust bogus_trust; + EmptyT.why reason |> with_trust bogus_trust; + ] + in + if async then + let reason = mk_reason (RCustom "async iteration expected on AsyncIterable") loc in + Flow.get_builtin_typeapp cx reason "$AsyncIterable" targs + else + Flow.get_builtin_typeapp + cx + (mk_reason (RCustom "iteration expected on Iterable") loc) + "$Iterable" + targs + in + Flow.flow_t cx (t, o); - let body_ast, _ = Abnormal.catch_stmt_control_flow_exception - (fun () -> statement cx body) in - - let newset = Changeset.merge oldset in - - if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None - then Env.havoc_vars newset; - Env.copy_env cx loc (env,body_env) newset; - - Env.update_env cx loc env; - if Abnormal.swap_saved (Abnormal.Break None) save_break <> None - then Env.havoc_vars newset; - - loc, ForOf { ForOf. - left = left_ast; - right = right_ast; - body = body_ast; - async; - } - ) - - | (_, Debugger) as stmt -> stmt - - | (loc, FunctionDeclaration func) -> - let {Ast.Function.id; params; return; _} = func in - let sig_loc = match params, return with - | _, Ast.Function.Available (end_loc, _) - | (end_loc, _), Ast.Function.Missing _ - -> Loc.btwn loc end_loc + (* null/undefined are NOT allowed *) + (Flow.reposition cx (loc_of_t t) elem_t, right_ast) in - let fn_type, func_ast = mk_function None cx sig_loc func in - let type_table_loc = Type_table.function_decl_loc id loc in - Type_table.set (Context.type_table cx) type_table_loc fn_type; + Env.in_lex_scope cx (fun () -> + let env = Env.peek_env () in + let oldset = Changeset.Global.clear () in + Env.widen_env cx loc; + + let body_env = Env.clone_env env in + Env.update_env cx loc body_env; + + let (left_ast, right_ast) = + match left with + | ForOf.LeftDeclaration + ( decl_loc, + ( { + VariableDeclaration.kind; + declarations = + [(vdecl_loc, { VariableDeclaration.Declarator.id; init = None })]; + } as decl ) ) -> + variable_decl cx decl; + let (elem_t, right_ast) = eval_right () in + let (id_ast, _) = variable cx kind id None ~if_uninitialized:(fun _ -> elem_t) in + ( ForOf.LeftDeclaration + ( decl_loc, + { + VariableDeclaration.kind; + declarations = + [(vdecl_loc, { VariableDeclaration.Declarator.id = id_ast; init = None })]; + } ), + right_ast ) + | ForOf.LeftPattern + ( pat_loc, + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = + (name_loc, ({ Ast.Identifier.name = name_str; comments = _ } as id)); + optional; + annot; + } ) -> + let (elem_t, right_ast) = eval_right () in + let use_op = + Op + (AssignVar + { + var = Some (mk_reason (RIdentifier name_str) pat_loc); + init = reason_of_t elem_t; + }) + in + ignore Env.(set_var cx ~use_op name_str elem_t pat_loc); + ( ForOf.LeftPattern + ( (pat_loc, elem_t), + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = ((name_loc, elem_t), id); + annot = + (match annot with + | Ast.Type.Available annot -> + Ast.Type.Available (Tast_utils.error_mapper#type_annotation annot) + | Ast.Type.Missing loc -> Ast.Type.Missing (loc, elem_t)); + optional; + } ), + right_ast ) + | _ -> + let (_, right_ast) = eval_right () in + Flow.add_output cx Error_message.(EInternal (loc, ForOfLHS)); + (Tast_utils.error_mapper#for_of_statement_lhs left, right_ast) + in + let (body_ast, _) = + Abnormal.catch_stmt_control_flow_exception (fun () -> statement cx body) + in + let newset = Changeset.Global.merge oldset in + if Abnormal.swap_saved (Abnormal.Continue None) save_continue <> None then + Env.havoc_vars newset; + Env.copy_env cx loc (env, body_env) newset; + + Env.update_env cx loc env; + if Abnormal.swap_saved (Abnormal.Break None) save_break <> None then + Env.havoc_vars newset; + + (loc, ForOf { ForOf.left = left_ast; right = right_ast; body = body_ast; async })) + | (_, Debugger) as stmt -> stmt + | (loc, FunctionDeclaration func) -> + let { Ast.Function.id; sig_loc; _ } = func in + let (fn_type, func_ast) = mk_function_declaration None cx sig_loc func in (match id with - | Some(id_loc, name) -> - let id_info = name, fn_type, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name) loc); - init = reason_of_t fn_type - }) in + | Some (_, { Ast.Identifier.name; comments = _ }) -> + let use_op = + Op + (AssignVar + { var = Some (mk_reason (RIdentifier name) loc); init = reason_of_t fn_type }) + in Env.init_fun cx ~use_op name fn_type loc | None -> ()); - loc, FunctionDeclaration func_ast - - | (loc, DeclareVariable { DeclareVariable. - id = id_loc, name; - annot; - }) -> + (loc, FunctionDeclaration func_ast) + | (loc, EnumDeclaration enum) -> + if not @@ Context.enable_enums cx then + Flow.add_output cx (Error_message.EExperimentalEnums loc); + EnumDeclaration.( + let { id = (name_loc, ident); body } = enum in + let { Ast.Identifier.name; _ } = ident in + let t = AnyT.untyped @@ mk_reason REnum loc in + let id' = ((name_loc, t), ident) in + Env.declare_implicit_const Scope.Entry.EnumNameBinding cx name name_loc; + let use_op = + Op + (AssignVar { var = Some (mk_reason (RIdentifier name) name_loc); init = reason_of_t t }) + in + Env.init_implicit_const + Scope.Entry.EnumNameBinding + cx + ~use_op + name + ~has_anno:false + t + name_loc; + (loc, EnumDeclaration { id = id'; body })) + | ( loc, + DeclareVariable + { DeclareVariable.id = (id_loc, ({ Ast.Identifier.name; comments = _ } as id)); annot } + ) -> let r = mk_reason (RCustom (spf "declare %s" name)) loc in - let t, annot_ast = Anno.mk_type_annotation cx SMap.empty r annot in - let id_info = name, t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); + let (t, annot_ast) = Anno.mk_type_annotation cx SMap.empty r annot in Env.unify_declared_type cx name t; - loc, DeclareVariable { DeclareVariable. - id = (id_loc, t), name; - annot = annot_ast; - } - - | (loc, DeclareFunction declare_function) -> - (match declare_function_to_function_declaration cx declare_function with + (loc, DeclareVariable { DeclareVariable.id = ((id_loc, t), id); annot = annot_ast }) + | (loc, DeclareFunction declare_function) -> + (match declare_function_to_function_declaration cx loc declare_function with | Some (func_decl, reconstruct_ast) -> - loc, DeclareFunction (reconstruct_ast (statement cx (loc, func_decl))) + (loc, DeclareFunction (reconstruct_ast (statement cx (loc, func_decl)))) | None -> - let { DeclareFunction.id = _, name as id; _ } = declare_function in - let annot = Option.value - (Scope.get_declare_func_annot name (Env.peek_scope ())) - ~default:(Loc.none, (Typed_ast.error_annot, Typed_ast.Type.error)) - in - loc, DeclareFunction { DeclareFunction. - id; - annot; - predicate = None; - } - ) - - | (loc, VariableDeclaration decl) -> - loc, VariableDeclaration (variables cx decl) - - | (class_loc, ClassDeclaration c) -> + (* error case *) + let { DeclareFunction.id = (id_loc, id_name); annot; predicate; _ } = declare_function in + let { Ast.Identifier.name; comments = _ } = id_name in + let (t, annot_ast) = Anno.mk_type_available_annotation cx SMap.empty annot in + Env.unify_declared_fun_type cx name loc t; + let predicate = Option.map ~f:Tast_utils.error_mapper#type_predicate predicate in + ( loc, + DeclareFunction + { DeclareFunction.id = ((id_loc, t), id_name); annot = annot_ast; predicate } )) + | (loc, VariableDeclaration decl) -> (loc, VariableDeclaration (variables cx decl)) + | (class_loc, ClassDeclaration c) -> let (name_loc, name) = extract_class_name class_loc c in let reason = DescFormat.instance_reason name name_loc in Env.declare_implicit_let Scope.Entry.ClassNameBinding cx name name_loc; - let class_t, c_ast = mk_class cx class_loc reason c in - Type_table.set (Context.type_table cx) class_loc class_t; - Option.iter c.Ast.Class.id ~f:(fun (id_loc, id_name) -> - let id_info = id_name, class_t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - ); + let (class_t, c_ast) = mk_class cx class_loc ~name_loc reason c in Env.init_implicit_let Scope.Entry.ClassNameBinding cx - ~use_op:(Op (AssignVar { - var = Some (mk_reason (RIdentifier name) name_loc); - init = reason_of_t class_t; - })) + ~use_op: + (Op + (AssignVar + { var = Some (mk_reason (RIdentifier name) name_loc); init = reason_of_t class_t })) name ~has_anno:false class_t name_loc; - class_loc, ClassDeclaration c_ast - - | (loc, DeclareClass decl) -> - loc, DeclareClass (declare_class cx loc decl) - - | (loc, DeclareInterface decl) -> - loc, DeclareInterface (interface cx loc decl) - | (loc, InterfaceDeclaration decl) -> - loc, InterfaceDeclaration (interface cx loc decl) - - | (loc, DeclareModule { DeclareModule.id; body; kind; }) -> - let id_loc, name = match id with - | DeclareModule.Identifier (id_loc, value) - | DeclareModule.Literal (id_loc, { Ast.StringLiteral.value; _ }) -> - id_loc, value - in - let body_loc, { Ast.Statement.Block.body = elements } = body in - - let module_ref = Reason.internal_module_name name in - - let module_scope = Scope.fresh () in - Scope.add_entry - (Reason.internal_name "exports") - (Scope.Entry.new_var - ~loc:Loc.none - ~specific:Locationless.EmptyT.t - Locationless.MixedT.t) - module_scope; - - Env.push_var_scope cx module_scope; - let outer_module_exports_kind = Context.module_kind cx in - Context.set_module_kind cx (Context.CommonJSModule None); - Context.push_declare_module cx module_ref; - - let initial_module_t = module_t_of_cx cx in - - let elements_ast, elements_abnormal = - Abnormal.catch_stmts_control_flow_exception (fun () -> - toplevel_decls cx elements; - toplevels cx elements; - ) - in - - let reason = mk_reason (RModule name) loc in - let module_t = match Context.module_kind cx with - | Context.ESModule -> mk_module_t cx reason - | Context.CommonJSModule clobbered -> - let open Scope in - let open Entry in - let cjs_exports = match clobbered with - | Some loc -> get_module_exports cx loc - | None -> - let props = SMap.fold (fun x entry acc -> - match entry with - | Value {specific; _} -> - let loc = Some (entry_loc entry) in - Properties.add_field x Positive loc specific acc - | Type _ | Class _ -> acc - ) module_scope.entries SMap.empty in - let proto = ObjProtoT reason in - Obj_type.mk_with_proto cx reason ~props proto + (class_loc, ClassDeclaration c_ast) + | (loc, DeclareClass decl) -> (loc, DeclareClass (declare_class cx loc decl)) + | (loc, DeclareInterface decl) -> (loc, DeclareInterface (interface cx loc decl)) + | (loc, InterfaceDeclaration decl) -> (loc, InterfaceDeclaration (interface cx loc decl)) + | (loc, DeclareModule { DeclareModule.id; body; kind }) -> + let (_, name) = + match id with + | DeclareModule.Identifier (id_loc, { Ast.Identifier.name = value; comments = _ }) + | DeclareModule.Literal (id_loc, { Ast.StringLiteral.value; _ }) -> + (id_loc, value) in - let type_exports = SMap.fold (fun x entry acc -> - match entry with - (* TODO we may want to provide a location here *) - | Type {_type; _} -> SMap.add x (None, _type) acc - | Value _ | Class _ -> acc - ) module_scope.entries SMap.empty in - set_module_t cx reason (fun t -> - Flow.flow cx ( - module_t_of_cx cx, - ExportNamedT (reason, false, type_exports, t) - ) - ); - mk_commonjs_module_t cx reason reason cjs_exports - in - - let ast = loc, DeclareModule { DeclareModule. - id = begin match id with - | DeclareModule.Identifier (id_loc, name) -> - DeclareModule.Identifier ((id_loc, module_t), name) - | DeclareModule.Literal (id_loc, lit) -> - DeclareModule.Literal ((id_loc, module_t), lit) - end; - body = body_loc, { Block.body = elements_ast }; - kind; - } in - ignore (Abnormal.check_stmt_control_flow_exception (ast, elements_abnormal) - : (Loc.t, Loc.t * Type.t) Ast.Statement.t); - - let id_info = name, module_t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - - Flow.flow_t cx (module_t, initial_module_t); - - let t = Env.get_var_declared_type cx module_ref loc in - Flow.flow_t cx (initial_module_t, t); - - Context.pop_declare_module cx; - Context.set_module_kind cx outer_module_exports_kind; - Env.pop_var_scope (); - - ast - - - - | (loc, DeclareExportDeclaration ({ DeclareExportDeclaration. - default; declaration; specifiers; source; - } as decl)) -> - let open DeclareExportDeclaration in - let export_info, export_kind, declaration = - (* error-handling around calls to `statement` is omitted here because we + let (body_loc, { Ast.Statement.Block.body = elements }) = body in + let module_ref = Reason.internal_module_name name in + let module_scope = Scope.fresh () in + Scope.add_entry + (Reason.internal_name "exports") + (Scope.Entry.new_var + ~loc:ALoc.none + ~specific:(Locationless.EmptyT.t |> with_trust bogus_trust) + (Locationless.MixedT.t |> with_trust bogus_trust)) + module_scope; + + Env.push_var_scope cx module_scope; + Context.push_declare_module cx (Module_info.empty_cjs_module module_ref); + + let (elements_ast, elements_abnormal) = + Abnormal.catch_stmts_control_flow_exception (fun () -> + toplevel_decls cx elements; + toplevels cx elements) + in + let reason = mk_reason (RModule name) loc in + let () = + match Context.module_kind cx with + | Module_info.ES _ -> () + | Module_info.CJS clobbered -> + Scope.( + Entry.( + let () = + match clobbered with + | Some _ -> () + | None -> + let props = + SMap.fold + (fun x entry acc -> + match entry with + | Value { specific; _ } -> + let loc = Some (entry_loc entry) in + Properties.add_field x Polarity.Positive loc specific acc + | Type _ + | Class _ -> + acc) + module_scope.entries + SMap.empty + in + let proto = ObjProtoT reason in + let t = Obj_type.mk_with_proto cx reason ~props proto in + Import_export.set_module_exports cx loc t + in + SMap.iter + (fun x entry -> + match entry with + | Type { type_; type_binding_kind = TypeBinding; _ } -> + (* TODO we may want to provide a location here *) + Import_export.export_type cx x None type_ + | Type { type_binding_kind = ImportTypeBinding; _ } + | Value _ + | Class _ -> + ()) + module_scope.entries)) + in + let module_t = Import_export.mk_module_t cx reason in + let ast = + ( loc, + DeclareModule + { + DeclareModule.id = + begin + match id with + | DeclareModule.Identifier (id_loc, id) -> + DeclareModule.Identifier ((id_loc, module_t), id) + | DeclareModule.Literal (id_loc, lit) -> + DeclareModule.Literal ((id_loc, module_t), lit) + end; + body = (body_loc, { Block.body = elements_ast }); + kind; + } ) + in + ignore + ( Abnormal.check_stmt_control_flow_exception (ast, elements_abnormal) + : (ALoc.t, ALoc.t * Type.t) Ast.Statement.t ); + + let t = Env.get_var_declared_type cx module_ref loc in + Flow.flow_t cx (module_t, t); + + Context.pop_declare_module cx; + Env.pop_var_scope (); + + ast + | ( loc, + DeclareExportDeclaration + ({ DeclareExportDeclaration.default; declaration; specifiers; source } as decl) ) -> + DeclareExportDeclaration.( + let (export_info, export_kind, declaration) = + (* error-handling around calls to `statement` is omitted here because we don't expect declarations to have abnormal control flow *) - match declaration with - | Some (Variable (loc, v)) -> - let { DeclareVariable.id = (_, name); _; } = v in + match declaration with + | Some (Variable (loc, v)) -> + let { DeclareVariable.id = (_, { Ast.Identifier.name; comments = _ }); _ } = v in let dec_var = statement cx (loc, DeclareVariable v) in - let ast = match dec_var with - | _, DeclareVariable v_ast -> Some (Variable (loc, v_ast)) + let ast = + match dec_var with + | (_, DeclareVariable v_ast) -> Some (Variable (loc, v_ast)) | _ -> assert_false "DeclareVariable typed AST doesn't preserve structure" in - [(spf "var %s" name, loc, name, None)], ExportValue, ast - | Some (Function (loc, f)) -> - let { DeclareFunction.id = (_, name); _ } = f in + ([(spf "var %s" name, loc, name, None)], Ast.Statement.ExportValue, ast) + | Some (Function (loc, f)) -> + let { DeclareFunction.id = (_, { Ast.Identifier.name; comments = _ }); _ } = f in let dec_fun = statement cx (loc, DeclareFunction f) in - let ast = match dec_fun with - | _, DeclareFunction f_ast -> Some (Function (loc, f_ast)) + let ast = + match dec_fun with + | (_, DeclareFunction f_ast) -> Some (Function (loc, f_ast)) | _ -> assert_false "DeclareFunction typed AST doesn't preserve structure" in - [(spf "function %s() {}" name, loc, name, None)], ExportValue, ast - | Some (Class (loc, c)) -> - let { DeclareClass.id = (name_loc, name); _; } = c in + ([(spf "function %s() {}" name, loc, name, None)], Ast.Statement.ExportValue, ast) + | Some (Class (loc, c)) -> + let { DeclareClass.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ } = c in let dec_class = statement cx (loc, DeclareClass c) in - let ast = match dec_class with - | _, DeclareClass c_ast -> Some (Class (loc, c_ast)) + let ast = + match dec_class with + | (_, DeclareClass c_ast) -> Some (Class (loc, c_ast)) | _ -> assert_false "DeclareClass typed AST doesn't preserve structure" in - [(spf "class %s {}" name, name_loc, name, None)], ExportValue, ast - | Some (DefaultType (loc, t)) -> - let (_, _type), _ as t_ast = Anno.convert cx SMap.empty (loc, t) in + ([(spf "class %s {}" name, name_loc, name, None)], Ast.Statement.ExportValue, ast) + | Some (DefaultType (loc, t)) -> + let (((_, _type), _) as t_ast) = Anno.convert cx SMap.empty (loc, t) in let ast = Some (DefaultType t_ast) in - [( "<>", loc, "default", Some _type)], ExportValue, ast - | Some (NamedType (talias_loc, ({ - TypeAlias. - id = (name_loc, name); - _; - } as talias))) -> + ([("<>", loc, "default", Some _type)], Ast.Statement.ExportValue, ast) + | Some + (NamedType + ( talias_loc, + ( { TypeAlias.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ } as + talias ) )) -> let type_alias = statement cx (talias_loc, TypeAlias talias) in - let ast = match type_alias with - | _, TypeAlias talias -> Some (NamedType (talias_loc, talias)) + let ast = + match type_alias with + | (_, TypeAlias talias) -> Some (NamedType (talias_loc, talias)) | _ -> assert_false "TypeAlias typed AST doesn't preserve structure" in - [(spf "type %s = ..." name, name_loc, name, None)], ExportType, ast - | Some (NamedOpaqueType (opaque_loc, ({ - OpaqueType. - id = (name_loc, name); - _; - } as opaque_t))) -> + ([(spf "type %s = ..." name, name_loc, name, None)], Ast.Statement.ExportType, ast) + | Some + (NamedOpaqueType + ( opaque_loc, + ( { OpaqueType.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ } as + opaque_t ) )) -> let opaque_type = statement cx (opaque_loc, OpaqueType opaque_t) in - let ast = match opaque_type with - | _, OpaqueType opaque_t -> Some (NamedOpaqueType (opaque_loc, opaque_t)) + let ast = + match opaque_type with + | (_, OpaqueType opaque_t) -> Some (NamedOpaqueType (opaque_loc, opaque_t)) | _ -> assert_false "OpaqueType typed AST doesn't preserve structure" in - [(spf "opauqe type %s = ..." name, name_loc, name, None)], ExportType, ast - | Some (Interface (loc, i)) -> - let {Interface.id = (name_loc, name); _;} = i in + ( [(spf "opaque type %s = ..." name, name_loc, name, None)], + Ast.Statement.ExportType, + ast ) + | Some (Interface (loc, i)) -> + let { Interface.id = (name_loc, { Ast.Identifier.name; comments = _ }); _ } = i in let int_dec = statement cx (loc, InterfaceDeclaration i) in - let ast = match int_dec with - | _, InterfaceDeclaration i_ast -> Some (Interface (loc, i_ast)) + let ast = + match int_dec with + | (_, InterfaceDeclaration i_ast) -> Some (Interface (loc, i_ast)) | _ -> assert_false "InterfaceDeclaration typed AST doesn't preserve structure" in - [(spf "interface %s {}" name, name_loc, name, None)], ExportType, ast - | None -> - [], ExportValue, None + ([(spf "interface %s {}" name, name_loc, name, None)], Ast.Statement.ExportType, ast) + | None -> ([], Ast.Statement.ExportValue, None) + in + export_statement cx loc ~default export_info specifiers source export_kind; + + (loc, DeclareExportDeclaration { decl with DeclareExportDeclaration.declaration })) + | (loc, DeclareModuleExports (t_loc, t)) -> + let (((_, t), _) as t_ast) = Anno.convert cx SMap.empty t in + Import_export.cjs_clobber cx loc t; + (loc, DeclareModuleExports (t_loc, t_ast)) + | ( loc, + ExportNamedDeclaration + ({ ExportNamedDeclaration.declaration; specifiers; source; exportKind } as export_decl) + ) -> + let (declaration, export_info) = + match declaration with + | Some decl -> + ( Some (statement cx decl), + (match decl with + | (_, FunctionDeclaration { Ast.Function.id = None; _ }) -> + failwith + ( "Parser Error: Immediate exports of nameless functions can " + ^ "only exist for default exports!" ) + | ( _, + FunctionDeclaration + { Ast.Function.id = Some (id_loc, { Ast.Identifier.name; comments = _ }); _ } ) + -> + Type_inference_hooks_js.dispatch_export_named_hook name id_loc; + [(spf "function %s() {}" name, id_loc, name, None)] + | (_, ClassDeclaration { Ast.Class.id = None; _ }) -> + failwith + ( "Parser Error: Immediate exports of nameless classes can " + ^ "only exist for default exports" ) + | ( _, + ClassDeclaration + { Ast.Class.id = Some (id_loc, { Ast.Identifier.name; comments = _ }); _ } ) -> + Type_inference_hooks_js.dispatch_export_named_hook name id_loc; + [(spf "class %s {}" name, id_loc, name, None)] + | (_, VariableDeclaration { VariableDeclaration.declarations; _ }) -> + Flow_ast_utils.fold_bindings_of_variable_declarations + (fun acc (loc, { Ast.Identifier.name; comments = _ }) -> + Type_inference_hooks_js.dispatch_export_named_hook name loc; + (spf "var %s" name, loc, name, None) :: acc) + [] + declarations + |> List.rev + | (_, TypeAlias { TypeAlias.id; _ }) -> + let name = ident_name id in + [(spf "type %s = ..." name, loc, name, None)] + | (_, OpaqueType { OpaqueType.id; _ }) -> + let name = ident_name id in + [(spf "opaque type %s = ..." name, loc, name, None)] + | (_, InterfaceDeclaration { Interface.id; _ }) -> + let name = ident_name id in + [(spf "interface %s = ..." name, loc, name, None)] + | _ -> failwith "Parser Error: Invalid export-declaration type!") ) + | None -> (None, []) in - - export_statement cx loc ~default export_info specifiers source export_kind; - - loc, DeclareExportDeclaration { decl with DeclareExportDeclaration.declaration } - - | (loc, DeclareModuleExports (t_loc, t)) -> - let (_, t), _ as t_ast = Anno.convert cx SMap.empty t in - set_module_kind cx loc (Context.CommonJSModule(Some loc)); - set_module_exports cx loc t; - loc, DeclareModuleExports (t_loc, t_ast) - - | (loc, ExportNamedDeclaration ({ ExportNamedDeclaration. - declaration; specifiers; source; exportKind; - } as export_decl)) -> - let declaration, export_info = match declaration with - | Some decl -> - Some (statement cx decl), - (match decl with - | _, FunctionDeclaration {Ast.Function.id = None; _} -> - failwith ( - "Parser Error: Immediate exports of nameless functions can " ^ - "only exist for default exports!" - ) - | _, FunctionDeclaration {Ast.Function.id = Some (id_loc, name); _} -> - Type_inference_hooks_js.dispatch_export_named_hook name id_loc; - [(spf "function %s() {}" name, id_loc, name, None)] - | _, ClassDeclaration {Ast.Class.id = None; _} -> - failwith ( - "Parser Error: Immediate exports of nameless classes can " ^ - "only exist for default exports" - ) - | _, ClassDeclaration {Ast.Class.id = Some (id_loc, name); _} -> - Type_inference_hooks_js.dispatch_export_named_hook name id_loc; - [(spf "class %s {}" name, id_loc, name, None)] - | _, VariableDeclaration {VariableDeclaration.declarations; _} -> - let decl_to_bindings accum (_, decl) = - let id = snd decl.VariableDeclaration.Declarator.id in - List.rev (Ast_utils.bindings_of_pattern accum id) - in - let bound_names = List.fold_left decl_to_bindings [] declarations in - bound_names |> List.map (fun (loc, name) -> - Type_inference_hooks_js.dispatch_export_named_hook name loc; - (spf "var %s" name, loc, name, None) - ) - | _, TypeAlias {TypeAlias.id; _} -> - let name = ident_name id in - [(spf "type %s = ..." name, loc, name, None)] - | _, OpaqueType {OpaqueType.id; _} -> - let name = ident_name id in - [(spf "opaque type %s = ..." name, loc, name, None)] - | _, InterfaceDeclaration {Interface.id; _} -> - let name = ident_name id in - [(spf "interface %s = ..." name, loc, name, None)] - | _ -> failwith "Parser Error: Invalid export-declaration type!") - - | None -> None, [] in - export_statement cx loc ~default:None export_info specifiers source exportKind; - loc, ExportNamedDeclaration { export_decl with ExportNamedDeclaration.declaration } - - - | (loc, ExportDefaultDeclaration { ExportDefaultDeclaration.default; declaration }) -> + (loc, ExportNamedDeclaration { export_decl with ExportNamedDeclaration.declaration }) + | (loc, ExportDefaultDeclaration { ExportDefaultDeclaration.default; declaration }) -> Type_inference_hooks_js.dispatch_export_named_hook "default" default; - let declaration, export_info = match declaration with - | ExportDefaultDeclaration.Declaration decl -> - let decl, undo_nameify = nameify_default_export_decl decl in - ExportDefaultDeclaration.Declaration (undo_nameify (statement cx decl)), - (match decl with - | loc, FunctionDeclaration {Ast.Function.id = None; _} -> - [("function() {}", loc, internal_name "*default*", None)] - | loc, FunctionDeclaration {Ast.Function.id = Some ident; _} -> - let name = ident_name ident in - [(spf "function %s() {}" name, loc, name, None)] - | loc, ClassDeclaration {Ast.Class.id = None; _} -> - [("class {}", loc, internal_name "*default*", None)] - | _, ClassDeclaration {Ast.Class.id = Some ident; _} -> - let name = ident_name ident in - [(spf "class %s {}" name, (fst ident), name, None)] - | _, VariableDeclaration {VariableDeclaration.declarations; _} -> - let decl_to_bindings accum (_, decl) = - let id = snd decl.VariableDeclaration.Declarator.id in - List.rev (Ast_utils.bindings_of_pattern accum id) - in - let bound_names = List.fold_left decl_to_bindings [] declarations in - bound_names |> List.map (fun (loc, name) -> - (spf "var %s" name, loc, name, None) - ) - | _, TypeAlias {TypeAlias.id; _} -> - let name = ident_name id in - [(spf "type %s = ..." name, loc, name, None)] - | _, OpaqueType {OpaqueType .id; _} -> - let name = ident_name id in - [(spf "opaque type %s = ..." name, loc, name, None)] - | _, InterfaceDeclaration {Interface.id; _} -> - let name = ident_name id in - [(spf "interface %s = ..." name, loc, name, None)] - | _ -> failwith "Parser Error: Invalid export-declaration type!") - - | ExportDefaultDeclaration.Expression expr -> - let (_, expr_t), _ as expr_ast = expression cx expr in - ExportDefaultDeclaration.Expression expr_ast, - [( "<>", fst expr, "default", Some expr_t)] + let (declaration, export_info) = + match declaration with + | ExportDefaultDeclaration.Declaration decl -> + let (decl, undo_nameify) = Import_export.nameify_default_export_decl decl in + ( ExportDefaultDeclaration.Declaration (undo_nameify (statement cx decl)), + (match decl with + | (loc, FunctionDeclaration { Ast.Function.id = None; _ }) -> + [("function() {}", loc, internal_name "*default*", None)] + | (loc, FunctionDeclaration { Ast.Function.id = Some ident; _ }) -> + let name = ident_name ident in + [(spf "function %s() {}" name, loc, name, None)] + | (loc, ClassDeclaration { Ast.Class.id = None; _ }) -> + [("class {}", loc, internal_name "*default*", None)] + | (_, ClassDeclaration { Ast.Class.id = Some ident; _ }) -> + let name = ident_name ident in + [(spf "class %s {}" name, fst ident, name, None)] + | (_, VariableDeclaration { VariableDeclaration.declarations; _ }) -> + Flow_ast_utils.fold_bindings_of_variable_declarations + (fun acc (loc, { Ast.Identifier.name; comments = _ }) -> + (spf "var %s" name, loc, name, None) :: acc) + [] + declarations + |> List.rev + | (_, TypeAlias { TypeAlias.id; _ }) -> + let name = ident_name id in + [(spf "type %s = ..." name, loc, name, None)] + | (_, OpaqueType { OpaqueType.id; _ }) -> + let name = ident_name id in + [(spf "opaque type %s = ..." name, loc, name, None)] + | (_, InterfaceDeclaration { Interface.id; _ }) -> + let name = ident_name id in + [(spf "interface %s = ..." name, loc, name, None)] + | _ -> failwith "Parser Error: Invalid export-declaration type!") ) + | ExportDefaultDeclaration.Expression expr -> + let (((_, expr_t), _) as expr_ast) = expression cx expr in + ( ExportDefaultDeclaration.Expression expr_ast, + [("<>", fst expr, "default", Some expr_t)] ) in - (* export default is always a value *) let exportKind = Ast.Statement.ExportValue in - export_statement cx loc ~default:(Some default) export_info None None exportKind; - loc, ExportDefaultDeclaration { ExportDefaultDeclaration.default; declaration; } - - | (import_loc, ImportDeclaration import_decl) -> - Context.add_import_stmt cx import_decl; - - let { ImportDeclaration.source; specifiers; default; importKind } = import_decl in - - let source_loc, { Ast.StringLiteral.value = module_name; _ } = source in - - let type_kind_of_kind = function - | ImportDeclaration.ImportType -> Type.ImportType - | ImportDeclaration.ImportTypeof -> Type.ImportTypeof - | ImportDeclaration.ImportValue -> Type.ImportValue - in - - let module_t = import cx (source_loc, module_name) import_loc in - - let get_imported_t get_reason import_kind remote_export_name local_name = - Tvar.mk_where cx get_reason (fun t -> - let import_type = - if remote_export_name = "default" - then ImportDefaultT - (get_reason, import_kind, (local_name, module_name), t, Context.is_strict cx) - else ImportNamedT - (get_reason, import_kind, remote_export_name, module_name, t, Context.is_strict cx) - in - Context.add_imported_t cx local_name t; - Flow.flow cx (module_t, import_type) - ) - in + (loc, ExportDefaultDeclaration { ExportDefaultDeclaration.default; declaration }) + | (import_loc, ImportDeclaration import_decl) -> + Context.add_import_stmt cx import_decl; - let specifiers, specifiers_ast = match specifiers with - | Some (ImportDeclaration.ImportNamedSpecifiers named_specifiers) -> - let named_specifiers, named_specifiers_ast = - named_specifiers - |> List.map (function { ImportDeclaration.local; remote; kind;} -> - let (remote_name_loc, remote_name) = remote in - let (loc, local_name) = ( - match local with - | Some local -> - (Loc.btwn (fst remote) (fst local), ident_name local) - | None -> - (fst remote, remote_name) - ) in - let imported_t = - let import_reason = - mk_reason (RNamedImportedType (module_name, local_name)) (fst remote) + let { ImportDeclaration.source; specifiers; default; importKind } = import_decl in + let (source_loc, { Ast.StringLiteral.value = module_name; _ }) = source in + let type_kind_of_kind = function + | ImportDeclaration.ImportType -> Type.ImportType + | ImportDeclaration.ImportTypeof -> Type.ImportTypeof + | ImportDeclaration.ImportValue -> Type.ImportValue + in + let module_t = Import_export.import cx (source_loc, module_name) import_loc in + let get_imported_t get_reason import_kind remote_export_name local_name = + Tvar.mk_where cx get_reason (fun t -> + let import_type = + if remote_export_name = "default" then + ImportDefaultT + (get_reason, import_kind, (local_name, module_name), t, Context.is_strict cx) + else + ImportNamedT + ( get_reason, + import_kind, + remote_export_name, + module_name, + t, + Context.is_strict cx ) in - if Type_inference_hooks_js.dispatch_member_hook - cx remote_name remote_name_loc module_t - then AnyT.why import_reason - else - let import_kind = type_kind_of_kind (Option.value ~default:importKind kind) in - get_imported_t import_reason import_kind remote_name local_name - in - let id_kind = Type_table.Import (remote_name, module_t) in - let id_info = remote_name, imported_t, id_kind in - Type_table.set_info remote_name_loc id_info (Context.type_table cx); - let remote_ast = (remote_name_loc, imported_t), remote_name in - let local_ast = Option.map local ~f:(fun (local_loc, local_name) -> - let id_info = local_name, imported_t, id_kind in - Type_table.set_info local_loc id_info (Context.type_table cx); - (local_loc, imported_t), local_name - ) in - (loc, local_name, imported_t, kind), - { ImportDeclaration. - local = local_ast; - remote = remote_ast; - kind; - } - ) - |> List.split - in - named_specifiers, - Some (ImportDeclaration.ImportNamedSpecifiers named_specifiers_ast) - - | Some (ImportDeclaration.ImportNamespaceSpecifier (ns_loc, local)) as specifiers -> - let local_name = ident_name local in - - Type_inference_hooks_js.dispatch_import_hook cx (source_loc, module_name) ns_loc; - - let import_reason = - let import_reason_desc = - match importKind with - | ImportDeclaration.ImportType -> RImportStarType local_name - | ImportDeclaration.ImportTypeof -> RImportStarTypeOf local_name - | ImportDeclaration.ImportValue -> RImportStar local_name + Context.add_imported_t cx local_name t; + Flow.flow cx (module_t, import_type)) + in + let (specifiers, specifiers_ast) = + match specifiers with + | Some (ImportDeclaration.ImportNamedSpecifiers named_specifiers) -> + let (named_specifiers, named_specifiers_ast) = + named_specifiers + |> Core_list.map ~f:(function { ImportDeclaration.local; remote; kind } -> + let ( remote_name_loc, + ({ Ast.Identifier.name = remote_name; comments = _ } as rmt) ) = + remote + in + let (loc, { Ast.Identifier.name = local_name; comments = _ }) = + Option.value ~default:remote local + in + let imported_t = + let import_reason = + mk_reason (RNamedImportedType (module_name, local_name)) (fst remote) + in + if + Type_inference_hooks_js.dispatch_member_hook + cx + remote_name + remote_name_loc + module_t + then + Unsoundness.why InferenceHooks import_reason + else + let import_kind = + type_kind_of_kind (Option.value ~default:importKind kind) + in + get_imported_t import_reason import_kind remote_name local_name + in + let remote_ast = ((remote_name_loc, imported_t), rmt) in + let local_ast = + Option.map local ~f:(fun (local_loc, local_id) -> + let { Ast.Identifier.name = local_name; comments } = local_id in + ((local_loc, imported_t), mk_ident ~comments local_name)) + in + ( (loc, local_name, imported_t, kind), + { ImportDeclaration.local = local_ast; remote = remote_ast; kind } )) + |> List.split in - mk_reason import_reason_desc import_loc - in - - begin match importKind with - | ImportDeclaration.ImportType -> - assert_false "import type * is a parse error" - | ImportDeclaration.ImportTypeof -> - let bind_reason = repos_reason (fst local) import_reason in - let module_ns_t = - import_ns cx import_reason (fst source, module_name) import_loc - in - let module_ns_typeof = - Tvar.mk_where cx bind_reason (fun t -> - Context.add_imported_t cx local_name t; - Flow.flow cx (module_ns_t, - ImportTypeofT (bind_reason, "*", t)) - ) - in - [import_loc, local_name, module_ns_typeof, None] - | ImportDeclaration.ImportValue -> - let reason = - mk_reason (RModule module_name) import_loc - in - let module_ns_t = - import_ns cx reason (fst source, module_name) import_loc - in - Context.add_imported_t cx local_name module_ns_t; - [fst local, local_name, module_ns_t, None] - end, - specifiers - | None -> [], None - in - - let specifiers, default_ast = match default with - | Some local -> + (named_specifiers, Some (ImportDeclaration.ImportNamedSpecifiers named_specifiers_ast)) + | Some (ImportDeclaration.ImportNamespaceSpecifier (ns_loc, local)) as specifiers -> let local_name = ident_name local in - let loc = fst local in + Type_inference_hooks_js.dispatch_import_hook cx (source_loc, module_name) ns_loc; + + let import_reason = + let import_reason_desc = + match importKind with + | ImportDeclaration.ImportType -> RImportStarType local_name + | ImportDeclaration.ImportTypeof -> RImportStarTypeOf local_name + | ImportDeclaration.ImportValue -> RImportStar local_name + in + mk_reason import_reason_desc import_loc + in + ( begin + match importKind with + | ImportDeclaration.ImportType -> assert_false "import type * is a parse error" + | ImportDeclaration.ImportTypeof -> + let bind_reason = repos_reason (fst local) import_reason in + let module_ns_t = + Import_export.import_ns cx import_reason (fst source, module_name) import_loc + in + let module_ns_typeof = + Tvar.mk_where cx bind_reason (fun t -> + Context.add_imported_t cx local_name t; + Flow.flow cx (module_ns_t, ImportTypeofT (bind_reason, "*", t))) + in + [(import_loc, local_name, module_ns_typeof, None)] + | ImportDeclaration.ImportValue -> + let reason = mk_reason (RModule module_name) import_loc in + let module_ns_t = + Import_export.import_ns cx reason (fst source, module_name) import_loc + in + Context.add_imported_t cx local_name module_ns_t; + [(fst local, local_name, module_ns_t, None)] + end, + specifiers ) + | None -> ([], None) + in + let (specifiers, default_ast) = + match default with + | Some local -> + let (loc, ({ Ast.Identifier.name = local_name; comments = _ } as id)) = local in let import_reason = mk_reason (RDefaultImportedType (local_name, module_name)) loc in let imported_t = - if Type_inference_hooks_js.dispatch_member_hook - cx "default" loc module_t - then AnyT.why import_reason + if Type_inference_hooks_js.dispatch_member_hook cx "default" loc module_t then + Unsoundness.why InferenceHooks import_reason else let import_kind = type_kind_of_kind importKind in get_imported_t import_reason import_kind "default" local_name in - let id_info = local_name, imported_t, Type_table.Import ("default", module_t) in - Type_table.set_info loc id_info (Context.type_table cx); - (loc, local_name, imported_t, None) :: specifiers, - Some ((loc, imported_t), local_name) - | None -> specifiers, None + ((loc, local_name, imported_t, None) :: specifiers, Some ((loc, imported_t), id)) + | None -> (specifiers, None) + in + List.iter + (fun (loc, local_name, t, specifier_kind) -> + let t_generic = + let lookup_mode = + match Option.value ~default:importKind specifier_kind with + | ImportDeclaration.ImportType -> ForType + | ImportDeclaration.ImportTypeof -> ForType + | ImportDeclaration.ImportValue -> ForValue + in + Env.get_var_declared_type ~lookup_mode cx local_name loc + in + Flow.unify cx t t_generic) + specifiers; + + ( import_loc, + ImportDeclaration + { + ImportDeclaration.source; + specifiers = specifiers_ast; + default = default_ast; + importKind; + } )) + +and export_statement cx loc ~default declaration_export_info specifiers source exportKind = + Ast.Statement.( + let lookup_mode = + match exportKind with + | Ast.Statement.ExportValue -> ForValue + | Ast.Statement.ExportType -> ForType in - - List.iter (fun (loc, local_name, t, specifier_kind) -> - let t_generic = - let lookup_mode = - match Option.value ~default:importKind specifier_kind with - | ImportDeclaration.ImportType -> ForType - | ImportDeclaration.ImportTypeof -> ForType - | ImportDeclaration.ImportValue -> ForValue - in - Env.get_var_declared_type ~lookup_mode cx local_name loc + let export_from_local (_, loc, local_name, local_tvar) = + let local_tvar = + match local_tvar with + | None -> Env.var_ref ~lookup_mode cx local_name loc + | Some t -> t + in + let local_name = + if Option.is_some default then + "default" + else + local_name in - Flow.unify cx t t_generic - ) specifiers; - - import_loc, - ImportDeclaration { ImportDeclaration. - source; - specifiers = specifiers_ast; - default = default_ast; - importKind; - } -) - - -and export_statement cx loc - ~default declaration_export_info specifiers source exportKind = - - let open Ast.Statement in - let (lookup_mode, export_kind_start) = ( - match exportKind with - | ExportValue -> (ForValue, "export") - | ExportType -> (ForType, "export type") - ) in - - let export_reason_start = spf "%s%s" export_kind_start ( - if (Option.is_some default) then " default" else "" - ) in - - let export_from_local (export_reason, loc, local_name, local_tvar) = ( - let reason = - mk_reason (RCustom (spf "%s %s" export_reason_start export_reason)) loc + (* Use the location of the "default" keyword if this is a default export. For named exports, + * use the location of the identifier. *) + let loc = Option.value ~default:loc default in + match exportKind with + | Ast.Statement.ExportType -> Import_export.export_type cx local_name (Some loc) local_tvar + | Ast.Statement.ExportValue -> Import_export.export cx local_name loc local_tvar in - let local_tvar = match local_tvar with - | None -> Env.var_ref ~lookup_mode cx local_name loc - | Some t -> t in - - (** - * NOTE: We do not use type-only exports as an indicator of an - * ES module in order to allow CommonJS modules to export types. - * - * Note that this means that modules that consist only of - * type-only exports will be internally considered a CommonJS - * module, but this should have minimal observable effect to the - * user given CommonJS<->ESModule interop. - *) - (if lookup_mode != ForType then - set_module_kind cx loc Context.ESModule); - - let local_name = if (Option.is_some default) then "default" else local_name in - set_module_t cx reason (fun t -> - Flow.flow cx ( - module_t_of_cx cx, - (* Use the location of the "default" keyword if this is a default export. For named exports, - * use the location of the identifier. *) - let loc = Option.value ~default:loc default in - ExportNamedT(reason, false, SMap.singleton local_name (Some loc, local_tvar), t) - ) - ) - ) in - - (match (declaration_export_info, specifiers) with + match (declaration_export_info, specifiers) with (* [declare] export [type] {foo, bar} [from ...]; *) | ([], Some (ExportNamedDeclaration.ExportSpecifiers specifiers)) -> - let export_specifier specifier = ( - let loc, reason, local_name, remote_name = + let export_specifier specifier = + let (loc, reason, local_name, remote_name) = match specifier with - | loc, { ExportNamedDeclaration.ExportSpecifier. - local = (_, id); - exported = None; - } -> + | ( loc, + { + ExportNamedDeclaration.ExportSpecifier.local = + (_, { Ast.Identifier.name = id; comments = _ }); + exported = None; + } ) -> let reason = mk_reason (RCustom (spf "export {%s}" id)) loc in (loc, reason, id, id) - | loc, { ExportNamedDeclaration.ExportSpecifier. - local = (_, local); - exported = Some (_, exported); - } -> - let reason = - mk_reason (RCustom (spf "export {%s as %s}" local exported)) loc - in + | ( loc, + { + ExportNamedDeclaration.ExportSpecifier.local = + (_, { Ast.Identifier.name = local; comments = _ }); + exported = Some (_, { Ast.Identifier.name = exported; comments = _ }); + } ) -> + let reason = mk_reason (RCustom (spf "export {%s as %s}" local exported)) loc in (loc, reason, local, exported) in - - (** + (* * Determine if we're dealing with the `export {} from` form * (and if so, retrieve the ModuleNamespaceObject tvar for the * source module) *) - let source_module_tvar = ( + let source_module_tvar = match source with | Some (src_loc, { Ast.StringLiteral.value = module_name; _ }) -> - let reason = - mk_reason (RCustom "ModuleNamespace for export {} from") src_loc - in - Some (import_ns cx reason (src_loc, module_name) loc) + let reason = mk_reason (RCustom "ModuleNamespace for export {} from") src_loc in + Some (Import_export.import_ns cx reason (src_loc, module_name) loc) | None -> None - ) in - - let local_tvar = ( + in + let local_tvar = match source_module_tvar with - | Some(tvar) -> + | Some tvar -> Tvar.mk_where cx reason (fun t -> - Flow.flow cx (tvar, GetPropT (unknown_use, reason, Named (reason, local_name), t)) - ) - | None -> - Env.var_ref ~lookup_mode cx local_name loc - ) in - - (** - * NOTE: We do not use type-only exports as an indicator of an - * ES module in order to allow CommonJS modules to export - * types. - * - * Note that this means that modules that consist only of - * type-only exports will be internally considered a - * CommonJS module, but this should have minimal observable - * effect to the user given CommonJS<->ESModule interop. - *) - (if lookup_mode != ForType - then set_module_kind cx loc Context.ESModule); - - set_module_t cx reason (fun t -> - Flow.flow cx ( - module_t_of_cx cx, - (* TODO we may need a more precise loc here *) - ExportNamedT(reason, false, SMap.singleton remote_name (Some loc, local_tvar), t) - ) - ) - ) in + Flow.flow cx (tvar, GetPropT (unknown_use, reason, Named (reason, local_name), t))) + | None -> Env.var_ref ~lookup_mode cx local_name loc + in + match exportKind with + | Ast.Statement.ExportType -> + Import_export.export_type cx remote_name (Some loc) local_tvar + | Ast.Statement.ExportValue -> Import_export.export cx remote_name loc local_tvar + in List.iter export_specifier specifiers - (* [declare] export [type] * from "source"; *) - | [], - Some (ExportNamedDeclaration.ExportBatchSpecifier - (batch_loc, star_as_name) - ) -> - let source_loc, source_module_name = ( + | ([], Some (ExportNamedDeclaration.ExportBatchSpecifier (batch_loc, star_as_name))) -> + let (source_loc, source_module_name) = match source with - | Some (loc, { Ast.StringLiteral.value; _ }) -> loc, value - | None -> failwith ( - "Parser Error: `export * from` must specify a string " ^ - "literal for the source module name!" - ) - ) in - - warn_or_ignore_export_star_as cx star_as_name; + | Some (loc, { Ast.StringLiteral.value; _ }) -> (loc, value) + | None -> + failwith + ( "Parser Error: `export * from` must specify a string " + ^ "literal for the source module name!" ) + in + Import_export.warn_or_ignore_export_star_as cx star_as_name; let parse_export_star_as = Context.esproposal_export_star_as cx in (match star_as_name with | Some ident -> - let (_, name) = ident in + let (_, { Ast.Identifier.name; comments = _ }) = ident in let reason = - mk_reason - (RCustom (spf "export * as %s from %S" name source_module_name)) - loc + mk_reason (RCustom (spf "export * as %s from %S" name source_module_name)) loc in - set_module_kind cx loc Context.ESModule; - let remote_namespace_t = - if parse_export_star_as = Options.ESPROPOSAL_ENABLE - then import_ns cx reason (source_loc, source_module_name) loc - else AnyT.why ( - let config_value = - if parse_export_star_as = Options.ESPROPOSAL_IGNORE - then "ignore" - else "warn" - in - let reason = - spf "flowconfig: esproposal.export_star_as=%s" config_value in - mk_reason (RCustom reason) batch_loc - ) + if parse_export_star_as = Options.ESPROPOSAL_ENABLE then + Import_export.import_ns cx reason (source_loc, source_module_name) loc + else + AnyT.untyped + (let config_value = + if parse_export_star_as = Options.ESPROPOSAL_IGNORE then + "ignore" + else + "warn" + in + let reason = spf "flowconfig: esproposal.export_star_as=%s" config_value in + mk_reason (RCustom reason) batch_loc) in - set_module_t cx reason (fun t -> - Flow.flow cx ( - module_t_of_cx cx, - (* TODO we may need a more precise loc here *) - ExportNamedT(reason, false, SMap.singleton name (Some loc, remote_namespace_t), t) - ) - ) + Import_export.export cx name loc remote_namespace_t | None -> - let reason = - mk_reason - (RCustom (spf "%s * from %S" export_kind_start source_module_name)) - loc - in - - (* It's legal to export types from a CommonJS module. *) - if exportKind != ExportType - then set_module_kind cx loc Context.ESModule; - - set_module_t cx reason (fun t -> Flow.flow cx ( - import cx (source_loc, source_module_name) loc, - let module_t = module_t_of_cx cx in - match exportKind with - | ExportValue -> CopyNamedExportsT(reason, module_t, t) - | ExportType -> CopyTypeExportsT(reason, module_t, t) - )) - ) - - | ([], None) -> failwith ( - "Parser Error: Export statement missing one of: Declaration, " ^ - "Expression, or Specifier list!" - ) - | (_, Some _) -> failwith ( - "Parser Error: Export statement with a declaration/expression " ^ - "cannot also include a list of specifiers!" - ) - + let source_module_t = Import_export.import cx (source_loc, source_module_name) loc in + (match exportKind with + | Ast.Statement.ExportValue -> Import_export.export_star cx loc source_module_t + | Ast.Statement.ExportType -> Import_export.export_type_star cx loc source_module_t)) + | ([], None) -> + failwith + ( "Parser Error: Export statement missing one of: Declaration, " + ^ "Expression, or Specifier list!" ) + | (_, Some _) -> + failwith + ( "Parser Error: Export statement with a declaration/expression " + ^ "cannot also include a list of specifiers!" ) (* [declare] export [type] [default] <>; *) | (export_info, None) -> - (** + (* * Export each declared binding. Some declarations export multiple * bindings, like a multi-declarator variable declaration. *) - List.iter export_from_local export_info - ) - -and object_prop cx map = Ast.Expression.Object.(function - (* named prop *) - | Property (prop_loc, Property.Init { - key = - (Property.Identifier (loc, name) | - Property.Literal (loc, { - Ast.Literal.value = Ast.Literal.String name; - _; - })) as key; - value = v; shorthand; }) -> - let (_, t), _ as v = expression cx v in - let id_info = name, t, Type_table.Other in - Type_table.set_info loc id_info (Context.type_table cx); - Properties.add_field name Neutral (Some loc) t map, - Property (prop_loc, Property.Init { - key = translate_identifier_or_literal_key t key; - value = v; - shorthand - }) - - (* named method *) - | Property (prop_loc, Property.Method { - key = - (Property.Identifier (loc, name) | - Property.Literal (loc, { - Ast.Literal.value = Ast.Literal.String name; - _; - })) as key; - value = (fn_loc, func); - }) -> - let (_, t), v = expression cx (fn_loc, Ast.Expression.Function func) in - let func = match v with Ast.Expression.Function func -> func | _ -> assert false in - let id_info = name, t, Type_table.Other in - Type_table.set_info loc id_info (Context.type_table cx); - Properties.add_field name Neutral (Some loc) t map, - Property (prop_loc, Property.Method { - key = translate_identifier_or_literal_key t key; - value = fn_loc, func - }) - - (* We enable some unsafe support for getters and setters. The main unsafe bit + List.iter export_from_local export_info) + +and object_prop cx map prop = + Ast.Expression.Object.( + match prop with + (* named prop *) + | Property + ( prop_loc, + Property.Init + { + key = Property.Identifier (loc, { Ast.Identifier.name; comments }) as key; + value = v; + shorthand; + } ) -> + let (map, key, value) = + if Type_inference_hooks_js.dispatch_obj_prop_decl_hook cx name loc then + let t = Unsoundness.at InferenceHooks loc in + let key = translate_identifier_or_literal_key t key in + (* don't add `name` to `map` because `name` is the autocomplete token *) + if shorthand then + let value = + ((loc, t), Ast.Expression.Identifier ((loc, t), { Ast.Identifier.name; comments })) + in + (map, key, value) + else + let (((_, _t), _) as value) = expression cx v in + (map, key, value) + else + let (((_, t), _) as value) = expression cx v in + let key = translate_identifier_or_literal_key t key in + let map = Properties.add_field name Polarity.Neutral (Some loc) t map in + (map, key, value) + in + (map, Property (prop_loc, Property.Init { key; value; shorthand })) + (* string literal prop *) + | Property + ( prop_loc, + Property.Init + { + key = + Property.Literal (loc, { Ast.Literal.value = Ast.Literal.String name; _ }) as key; + value = v; + shorthand; + } ) -> + let (((_, t), _) as v) = expression cx v in + ( Properties.add_field name Polarity.Neutral (Some loc) t map, + Property + ( prop_loc, + Property.Init { key = translate_identifier_or_literal_key t key; value = v; shorthand } + ) ) + (* named method *) + | Property + ( prop_loc, + Property.Method + { + key = + ( Property.Identifier (loc, { Ast.Identifier.name; comments = _ }) + | Property.Literal (loc, { Ast.Literal.value = Ast.Literal.String name; _ }) ) as + key; + value = (fn_loc, func); + } ) -> + let ((_, t), v) = expression cx (fn_loc, Ast.Expression.Function func) in + let func = + match v with + | Ast.Expression.Function func -> func + | _ -> assert false + in + ( Properties.add_field name Polarity.Neutral (Some loc) t map, + Property + ( prop_loc, + Property.Method + { key = translate_identifier_or_literal_key t key; value = (fn_loc, func) } ) ) + (* We enable some unsafe support for getters and setters. The main unsafe bit * is that we don't properly havok refinements when getter and setter methods * are called. *) - (* unsafe getter property *) - | Property (loc, Property.Get { - key = - (Property.Identifier (id_loc, name) | - Property.Literal (id_loc, { - Ast.Literal.value = Ast.Literal.String name; - _; - })) as key; - value = (vloc, func); - }) -> - Flow_js.add_output cx (Flow_error.EUnsafeGettersSetters loc); - let function_type, func = mk_function None cx vloc func in - let return_t = Type.extract_getter_type function_type in - let id_info = name, return_t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - Properties.add_getter name (Some id_loc) return_t map, - Property (loc, Property.Get { - key = translate_identifier_or_literal_key return_t key; - value = vloc, func; - }) - - (* unsafe setter property *) - | Property (loc, Property.Set { - key = - (Property.Identifier (id_loc, name) | - Property.Literal (id_loc, { - Ast.Literal.value = Ast.Literal.String name; - _; - })) as key; - value = vloc, func; - }) -> - Flow_js.add_output cx (Flow_error.EUnsafeGettersSetters loc); - let function_type, func = mk_function None cx vloc func in - let param_t = Type.extract_setter_type function_type in - let id_info = name, param_t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - Properties.add_setter name (Some id_loc) param_t map, - Property (loc, Property.Set { - key = translate_identifier_or_literal_key param_t key; - value = vloc, func; - }) - - (* non-string literal LHS *) - | Property (loc, Property.Init { key = Property.Literal _; _ }) - | Property (loc, Property.Method { key = Property.Literal _; _ }) - | Property (loc, Property.Get { key = Property.Literal _; _ }) - | Property (loc, Property.Set { key = Property.Literal _; _ }) -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, ObjectPropertyLiteralNonString)); - map, Typed_ast.Expression.Object.property_error - - (* computed getters and setters aren't supported yet regardless of the + (* unsafe getter property *) + | Property + ( loc, + Property.Get + { + key = + ( Property.Identifier (id_loc, { Ast.Identifier.name; comments = _ }) + | Property.Literal (id_loc, { Ast.Literal.value = Ast.Literal.String name; _ }) ) + as key; + value = (vloc, func); + } ) -> + Flow_js.add_output cx (Error_message.EUnsafeGettersSetters loc); + let (function_type, func) = mk_function_expression None cx vloc func in + let return_t = Type.extract_getter_type function_type in + ( Properties.add_getter name (Some id_loc) return_t map, + Property + ( loc, + Property.Get + { key = translate_identifier_or_literal_key return_t key; value = (vloc, func) } ) ) + (* unsafe setter property *) + | Property + ( loc, + Property.Set + { + key = + ( Property.Identifier (id_loc, { Ast.Identifier.name; comments = _ }) + | Property.Literal (id_loc, { Ast.Literal.value = Ast.Literal.String name; _ }) ) + as key; + value = (vloc, func); + } ) -> + Flow_js.add_output cx (Error_message.EUnsafeGettersSetters loc); + let (function_type, func) = mk_function_expression None cx vloc func in + let param_t = Type.extract_setter_type function_type in + ( Properties.add_setter name (Some id_loc) param_t map, + Property + ( loc, + Property.Set + { key = translate_identifier_or_literal_key param_t key; value = (vloc, func) } ) ) + (* non-string literal LHS *) + | Property (loc, Property.Init { key = Property.Literal _; _ }) + | Property (loc, Property.Method { key = Property.Literal _; _ }) + | Property (loc, Property.Get { key = Property.Literal _; _ }) + | Property (loc, Property.Set { key = Property.Literal _; _ }) -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, ObjectPropertyLiteralNonString)); + (map, Tast_utils.error_mapper#object_property_or_spread_property prop) + (* computed getters and setters aren't supported yet regardless of the `enable_getters_and_setters` config option *) - | Property (loc, Property.Get { key = Property.Computed _; _ }) - | Property (loc, Property.Set { key = Property.Computed _; _ }) -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, ObjectPropertyComputedGetSet)); - map, Typed_ast.Expression.Object.property_error - - (* computed LHS silently ignored for now *) - | Property (_, Property.Init { key = Property.Computed _; _ }) - | Property (_, Property.Method { key = Property.Computed _; _ }) -> - map, Typed_ast.Expression.Object.property_error - - (* spread prop *) - | SpreadProperty _ -> - map, Typed_ast.Expression.Object.property_error - - | Property (_, Property.Init { key = Property.PrivateName _; _ }) - | Property (_, Property.Method { key = Property.PrivateName _; _ }) - | Property (_, Property.Get { key = Property.PrivateName _; _ }) - | Property (_, Property.Set { key = Property.PrivateName _; _ }) -> - failwith "Internal Error: Non-private field with private name" -) + | Property (loc, Property.Get { key = Property.Computed _; _ }) + | Property (loc, Property.Set { key = Property.Computed _; _ }) -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, ObjectPropertyComputedGetSet)); + (map, Tast_utils.error_mapper#object_property_or_spread_property prop) + (* computed LHS silently ignored for now *) + | Property (_, Property.Init { key = Property.Computed _; _ }) + | Property (_, Property.Method { key = Property.Computed _; _ }) -> + (map, Tast_utils.error_mapper#object_property_or_spread_property prop) + (* spread prop *) + | SpreadProperty _ -> (map, Tast_utils.error_mapper#object_property_or_spread_property prop) + | Property (_, Property.Init { key = Property.PrivateName _; _ }) + | Property (_, Property.Method { key = Property.PrivateName _; _ }) + | Property (_, Property.Get { key = Property.PrivateName _; _ }) + | Property (_, Property.Set { key = Property.PrivateName _; _ }) -> + failwith "Internal Error: Non-private field with private name") and prop_map_of_object cx props = - let map, rev_prop_asts = List.fold_left (fun (map, rev_prop_asts) prop -> - let map, prop = object_prop cx map prop in map, prop::rev_prop_asts - ) (SMap.empty, []) props in map, List.rev rev_prop_asts - -and object_ cx reason ?(allow_sealed=true) props = - let open Ast.Expression.Object in - - (* Use the same reason for proto and the ObjT so we can walk the proto chain - and use the root proto reason to build an error. *) - let obj_proto = ObjProtoT reason in - - (* Return an object with specified sealing. *) - let mk_object ?(proto=obj_proto) ?(sealed=false) props = - Obj_type.mk_with_proto cx reason ~sealed ~props proto + let (map, rev_prop_asts) = + List.fold_left + (fun (map, rev_prop_asts) prop -> + let (map, prop) = object_prop cx map prop in + (map, prop :: rev_prop_asts)) + (SMap.empty, []) + props in + (map, List.rev rev_prop_asts) - (* Copy properties from from_obj to to_obj. We should ensure that to_obj is +and object_ cx reason ?(allow_sealed = true) props = + Ast.Expression.Object.( + (* Use the same reason for proto and the ObjT so we can walk the proto chain + and use the root proto reason to build an error. *) + let obj_proto = ObjProtoT reason in + (* Return an object with specified sealing. *) + let mk_object ?(proto = obj_proto) ?(sealed = false) props = + Obj_type.mk_with_proto cx reason ~sealed ~props proto + in + (* Copy properties from from_obj to to_obj. We should ensure that to_obj is not sealed. *) - let mk_spread from_obj to_obj ~assert_exact = - Tvar.mk_where cx reason (fun t -> - Flow.flow cx (to_obj, ObjAssignToT(reason, from_obj, t, ObjAssign { assert_exact })); - ) - in - - (* Add property to object, using optional tout argument to SetElemT to wait + let mk_spread from_obj to_obj ~assert_exact = + let use_op = Op (ObjectSpread { op = reason_of_t from_obj }) in + Tvar.mk_where cx reason (fun t -> + Flow.flow + cx + (to_obj, ObjAssignToT (use_op, reason, from_obj, t, ObjAssign { assert_exact }))) + in + (* Add property to object, using optional tout argument to SetElemT to wait for the write to happen. This defers any reads until writes have happened, to avoid race conditions. *) - let mk_computed key value obj = - Tvar.mk_where cx reason (fun t -> - Flow.flow cx (obj, SetElemT (unknown_use, reason, key, value, Some t)) - ) - in - - (* When there's no result, return a new object with specified sealing. When + let mk_computed key value obj = + Tvar.mk_where cx reason (fun t -> + Flow.flow cx (obj, SetElemT (unknown_use, reason, key, Assign, value, Some t))) + in + (* When there's no result, return a new object with specified sealing. When there's result, copy a new object into it, sealing the result when necessary. When building an object incrementally, only the final call to this function may be with sealed=true, so we will always have an unsealed object to copy properties to. *) - let eval_object ?(proto=obj_proto) ?(sealed=false) (map, result) = - match result with - | None -> mk_object ~proto ~sealed map - | Some result -> - let result = - if not (SMap.is_empty map) - then mk_spread (mk_object ~proto map) result ~assert_exact:false - else result - in - if not sealed then result else - Tvar.mk_where cx reason (fun t -> - Flow.flow cx (result, ObjSealT (reason, t)) - ) - in - - let sealed, map, proto, result, rev_prop_asts = List.fold_left ( - fun (sealed, map, proto, result, rev_prop_asts) -> function - (* Enforce that the only way to make unsealed object literals is ...{} (spreading empty object + let eval_object ?(proto = obj_proto) ?(sealed = false) (map, result) = + match result with + | None -> mk_object ~proto ~sealed map + | Some result -> + let result = + if not (SMap.is_empty map) then + mk_spread (mk_object ~proto map) result ~assert_exact:false + else + result + in + if not sealed then + result + else + Tvar.mk_where cx reason (fun t -> Flow.flow cx (result, ObjSealT (reason, t))) + in + let (sealed, map, proto, result, rev_prop_asts) = + List.fold_left + (fun (sealed, map, proto, result, rev_prop_asts) -> function + (* Enforce that the only way to make unsealed object literals is ...{} (spreading empty object literals). Otherwise, spreading always returns sealed object literals. Also enforce that a spread of an inexact object can only appear as the first element of an @@ -2563,743 +2663,780 @@ and object_ cx reason ?(allow_sealed=true) props = TODO: This treatment of spreads is oblivious to issues that arise when spreading expressions of union type. *) - | SpreadProperty (prop_loc, { SpreadProperty.argument }) -> - let (_, spread), _ as argument = expression cx argument in - let not_empty_object_literal_argument = match spread with - | DefT (_, ObjT { flags; _ }) -> Obj_type.sealed_in_op reason flags.sealed - | _ -> true in - let obj = eval_object (map, result) in - let result = mk_spread spread obj - ~assert_exact:(not (SMap.is_empty map && result = None)) in - sealed && not_empty_object_literal_argument, - SMap.empty, - proto, - Some result, - SpreadProperty (prop_loc, { SpreadProperty. - argument; - })::rev_prop_asts - | Property (prop_loc, Property.Init { - key = Property.Computed k; - value = v; - shorthand; - }) -> - let (_, kt), _ as k = expression cx k in - let (_, vt), _ as v = expression cx v in - let obj = eval_object (map, result) in - let result = mk_computed kt vt obj in - sealed, - SMap.empty, - proto, - Some result, - Property (prop_loc, Property.Init { - key = Property.Computed k; - value = v; - shorthand; - })::rev_prop_asts - | Property (prop_loc, Property.Method { - key = Property.Computed k; - value = fn_loc, fn; - }) -> - let (_, kt), _ as k = expression cx k in - let (_, vt), v = expression cx (fn_loc, Ast.Expression.Function fn) in - let fn = match v with Ast.Expression.Function fn -> fn | _ -> assert false in - let obj = eval_object (map, result) in - let result = mk_computed kt vt obj in - sealed, - SMap.empty, - proto, - Some result, - Property (prop_loc, Property.Method { - key = Property.Computed k; - value = fn_loc, fn; - })::rev_prop_asts - | Property (prop_loc, Property.Init { - key = - (Property.Identifier (_, "__proto__") | - Property.Literal (_, { - Ast.Literal.value = Ast.Literal.String "__proto__"; - _; - })) as key; - value = v; - shorthand = false; - }) -> - let reason = mk_reason RPrototype (fst v) in - let (_, vt), _ as v = expression cx v in - let t = Tvar.mk_where cx reason (fun t -> - Flow.flow cx (vt, ObjTestProtoT (reason, t)) - ) in - sealed, - map, - Some t, - result, - Property (prop_loc, Property.Init { - key = translate_identifier_or_literal_key vt key; - value = v; - shorthand = false; - })::rev_prop_asts - | prop -> - let map, prop = object_prop cx map prop in - sealed, map, proto, result, prop::rev_prop_asts - ) (allow_sealed, SMap.empty, None, None, []) props in - - let sealed = match result with - | Some _ -> sealed - | None -> sealed && not (SMap.is_empty map) - in - eval_object ?proto ~sealed (map, result), - List.rev rev_prop_asts - -and variable cx kind ?if_uninitialized (vdecl_loc, vdecl) = Ast.Statement.( - let init_var, declare_var = Env.(match kind with - | VariableDeclaration.Const -> init_const, declare_const - | VariableDeclaration.Let -> init_let, declare_let - | VariableDeclaration.Var -> init_var, (fun _ _ _ -> ()) - ) in - let { VariableDeclaration.Declarator.id; init } = vdecl in - Ast.Expression.(match init with - | Some (_, Call { Call.callee = _, Identifier (_, "require"); _ }) + | SpreadProperty (prop_loc, { SpreadProperty.argument }) -> + let (((_, spread), _) as argument) = expression cx argument in + let not_empty_object_literal_argument = + match spread with + | DefT (_, _, ObjT { flags; _ }) -> Obj_type.sealed_in_op reason flags.sealed + | _ -> true + in + let obj = eval_object (map, result) in + let result = + mk_spread spread obj ~assert_exact:(not (SMap.is_empty map && result = None)) + in + ( sealed && not_empty_object_literal_argument, + SMap.empty, + proto, + Some result, + SpreadProperty (prop_loc, { SpreadProperty.argument }) :: rev_prop_asts ) + | Property (prop_loc, Property.Init { key = Property.Computed k; value = v; shorthand }) + -> + let (((_, kt), _) as k) = expression cx k in + let (((_, vt), _) as v) = expression cx v in + let obj = eval_object (map, result) in + let result = mk_computed kt vt obj in + ( sealed, + SMap.empty, + proto, + Some result, + Property (prop_loc, Property.Init { key = Property.Computed k; value = v; shorthand }) + :: rev_prop_asts ) + | Property (prop_loc, Property.Method { key = Property.Computed k; value = (fn_loc, fn) }) + -> + let (((_, kt), _) as k) = expression cx k in + let ((_, vt), v) = expression cx (fn_loc, Ast.Expression.Function fn) in + let fn = + match v with + | Ast.Expression.Function fn -> fn + | _ -> assert false + in + let obj = eval_object (map, result) in + let result = mk_computed kt vt obj in + ( sealed, + SMap.empty, + proto, + Some result, + Property + (prop_loc, Property.Method { key = Property.Computed k; value = (fn_loc, fn) }) + :: rev_prop_asts ) + | Property + ( prop_loc, + Property.Init + { + key = + ( Property.Identifier (_, { Ast.Identifier.name = "__proto__"; comments = _ }) + | Property.Literal + (_, { Ast.Literal.value = Ast.Literal.String "__proto__"; _ }) ) as key; + value = v; + shorthand = false; + } ) -> + let reason = mk_reason RPrototype (fst v) in + let (((_, vt), _) as v) = expression cx v in + let t = + Tvar.mk_where cx reason (fun t -> Flow.flow cx (vt, ObjTestProtoT (reason, t))) + in + ( sealed, + map, + Some t, + result, + Property + ( prop_loc, + Property.Init + { + key = translate_identifier_or_literal_key vt key; + value = v; + shorthand = false; + } ) + :: rev_prop_asts ) + | prop -> + let (map, prop) = object_prop cx map prop in + (sealed, map, proto, result, prop :: rev_prop_asts)) + (allow_sealed, SMap.empty, None, None, []) + props + in + let sealed = + match result with + | Some _ -> sealed + | None -> sealed && not (SMap.is_empty map) + in + (eval_object ?proto ~sealed (map, result), List.rev rev_prop_asts)) + +and variable cx kind ?if_uninitialized id init = + Ast.Statement.( + let (init_var, declare_var) = + match kind with + | VariableDeclaration.Const -> (Env.init_const, Env.declare_const) + | VariableDeclaration.Let -> (Env.init_let, Env.declare_let) + | VariableDeclaration.Var -> (Env.init_var, (fun _ _ _ -> ())) + in + Ast.Expression.( + match init with + | Some + ( _, + Call + { + Call.callee = (_, Identifier (_, { Ast.Identifier.name = "require"; comments = _ })); + _; + } ) when not (Env.local_scope_entry_exists "require") -> - let loc, _ = id in - (* Record the loc of the pattern, which contains the locations of any + let (loc, _) = id in + (* Record the loc of the pattern, which contains the locations of any local definitions introduced by the pattern. This information is used by commands to automatically "follow" such definitions to the actual definitions in the required module. *) - Type_inference_hooks_js.dispatch_require_pattern_hook loc - | _ -> () - ); - match id with - | (loc, Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (id_loc, name); annot; optional - }) -> - (* simple lvalue *) - (* make annotation, unify with declared type created in variable_decl *) - let t, annot_ast = - let desc = RIdentifier name in - let anno_reason = mk_reason desc loc in - Anno.mk_type_annotation cx SMap.empty anno_reason annot in - let id_info = name, t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - Env.unify_declared_type cx name t; - let has_anno = not (annot = None) in - Type_inference_hooks_js.(dispatch_lval_hook cx name loc (Val t)); - let id = (loc, t), Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (id_loc, t), name; - annot = annot_ast; - optional; - } in - (match init with - | Some ((rhs_loc, _) as expr) -> - let (_, rhs_t), _ as rhs_ast = expression cx expr in - (** - * Const and let variables are not declared during evaluation of - * their initializer expressions. - *) - declare_var cx name id_loc; - let rhs = Flow.reposition cx rhs_loc rhs_t in - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name) id_loc); - init = mk_expression_reason expr; - }) in - init_var cx ~use_op name ~has_anno rhs id_loc; - vdecl_loc, { VariableDeclaration.Declarator.id; init = Some rhs_ast } - | None -> - (match if_uninitialized with - | Some f -> - if not optional then - let t = f loc in - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name) id_loc); - init = reason_of_t t; - }) in - init_var cx ~use_op name ~has_anno t id_loc - | None -> - if has_anno - then Env.pseudo_init_declared_type cx name id_loc - else declare_var cx name id_loc); - vdecl_loc, { VariableDeclaration.Declarator.id; init = None } - ) - | loc, _ -> - (* compound lvalue *) - let pattern_name = internal_pattern_name loc in - let annot = type_of_pattern id in - let has_anno = not (annot = None) in - let t, init_ast, init_reason = match init with - | Some expr -> - let (_, t), _ as expr_ast = expression cx expr in - t, Some expr_ast, mk_expression_reason expr - | None -> ( - let t = match if_uninitialized with - | Some f -> f loc - | None -> VoidT.at loc in - t, None, reason_of_t t - ) + Type_inference_hooks_js.dispatch_require_pattern_hook loc + | _ -> ()); + + (* Identifiers do not need to be initialized at the declaration site as long + * as they are definitely initialized before use. Destructuring patterns must + * be initialized, since their declaration involves some operation on the + * right hand side, like a property access. *) + let (init_opt, init_ast) = + match (id, init, if_uninitialized) with + | ((_, Ast.Pattern.Identifier _), None, None) -> (None, None) + | (_, Some expr, _) -> + let (((_, t), _) as init_ast) = expression cx expr in + let r = mk_expression_reason expr in + (Some (t, r), Some init_ast) + | ((ploc, _), None, Some f) -> + let t = f ploc in + let r = reason_of_t t in + (Some (t, r), None) + | ((ploc, _), None, None) -> + let t = VoidT.at ploc |> with_trust bogus_trust in + let r = reason_of_t t in + (Some (t, r), None) + in + let id_reason = + match id with + | (_, Ast.Pattern.Identifier { Ast.Pattern.Identifier.name; _ }) -> + let (id_loc, { Ast.Identifier.name; _ }) = name in + mk_reason (RIdentifier name) id_loc + | (ploc, _) -> mk_reason RDestructuring ploc + in + let annot = Destructuring.type_of_pattern id in + let (annot_t, annot_ast) = Anno.mk_type_annotation cx SMap.empty id_reason annot in + let has_anno = + match annot with + | Ast.Type.Missing _ -> false + | Ast.Type.Available _ -> true + in + let id_ast = + match id with + | (ploc, Ast.Pattern.Identifier { Ast.Pattern.Identifier.name; annot = _; optional }) -> + let (id_loc, { Ast.Identifier.name; comments }) = name in + (* move const/let bindings from undeclared to declared *) + declare_var cx name id_loc; + Env.unify_declared_type cx name annot_t; + Option.iter init_opt ~f:(fun (init_t, init_reason) -> + let use_op = Op (AssignVar { var = Some id_reason; init = init_reason }) in + init_var cx ~use_op name ~has_anno init_t id_loc); + Type_inference_hooks_js.(dispatch_lval_hook cx name id_loc (Val annot_t)); + ( (ploc, annot_t), + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = ((id_loc, annot_t), { Ast.Identifier.name; comments }); + annot = annot_ast; + optional; + } ) + | _ -> + Option.iter init_opt ~f:(fun (init_t, init_reason) -> + let use_op = Op (AssignVar { var = Some id_reason; init = init_reason }) in + Flow.flow cx (init_t, UseT (use_op, annot_t))); + let init = + Destructuring.empty + ?init + annot_t + ~annot: + (match annot with + | Ast.Type.Missing _ -> false + | Ast.Type.Available _ -> true) in - let use_op = Op (AssignVar { - var = None; - init = init_reason; - }) in - init_var cx ~use_op pattern_name ~has_anno t loc; - let id_ast = - destructuring cx ~expr:expression ~f:(fun ~use_op loc name default t -> + Destructuring.pattern cx ~expr:expression init id ~f:(fun ~use_op loc name default t -> let reason = mk_reason (RIdentifier name) loc in - Option.iter default (fun d -> - let default_t = Flow.mk_default cx reason d - ~expr:(fun cx e -> snd_fst (expression cx e)) in - Flow.flow_t cx (default_t, t) - ); - Flow.flow cx (t, AssertImportIsValueT(reason, name)); - init_var cx ~use_op name ~has_anno t loc - ) t init None id - in - vdecl_loc, { VariableDeclaration.Declarator. - id = id_ast; - init = init_ast; - } -) - -and expression_or_spread cx = Ast.Expression.(function - | Expression e -> - let (_, t), _ as e' = expression cx e in - Arg t, Expression e' - | Spread (loc, { SpreadElement.argument }) -> - let (_, t), _ as e' = expression cx argument in - SpreadArg t, Spread (loc, { SpreadElement.argument = e' }) -) - -and expression_or_spread_list cx undef_loc = Ast.Expression.( - Fn.compose List.split (List.map (function - | Some (Expression e) -> - let (_, t), _ as e = expression cx e in - UnresolvedArg t, Some (Expression e) - | None -> - UnresolvedArg (EmptyT.at undef_loc), None - | Some (Spread (loc, { SpreadElement.argument })) -> - let (_, t), _ as argument = expression cx argument in - UnresolvedSpreadArg t, - Some (Spread (loc, { SpreadElement.argument = argument })) - )) -) + declare_var cx name loc; + + (* The bindings introduced by destructuring an annotation should themselves behave + * like annotations. That is, subsequent writes to this binding should be compatible + * with the relevant part of the annotation. *) + let t = + if has_anno then + AnnotT + ( reason, + Tvar.mk_where cx reason (fun t' -> Flow.flow cx (t, BecomeT (reason, t'))), + false ) + else + t + in + let () = + if has_anno then ( + Env.unify_declared_type cx name t; + Env.pseudo_init_declared_type cx name loc + ) else + init_var cx ~use_op name ~has_anno t loc + in + Flow.flow cx (t, AssertImportIsValueT (reason, name)); + Option.iter default ~f:(fun d -> + let default_t = Flow.mk_default cx reason d in + Flow.flow cx (default_t, UseT (use_op, t)))) + in + (id_ast, init_ast)) + +and expression_or_spread cx = + Ast.Expression.( + function + | Expression e -> + let (((_, t), _) as e') = expression cx e in + (Arg t, Expression e') + | Spread (loc, { SpreadElement.argument }) -> + let (((_, t), _) as e') = expression cx argument in + (SpreadArg t, Spread (loc, { SpreadElement.argument = e' }))) + +and expression_or_spread_list cx undef_loc = + Ast.Expression.( + Fn.compose + List.split + (Core_list.map ~f:(function + | Some (Expression e) -> + let (((_, t), _) as e) = expression cx e in + (UnresolvedArg t, Some (Expression e)) + | None -> (UnresolvedArg (EmptyT.at undef_loc |> with_trust bogus_trust), None) + | Some (Spread (loc, { SpreadElement.argument })) -> + let (((_, t), _) as argument) = expression cx argument in + (UnresolvedSpreadArg t, Some (Spread (loc, { SpreadElement.argument })))))) (* can raise Abnormal.(Exn (Stmt _, _)) *) -and expression ?(is_cond=false) cx (loc, e) : (Loc.t, Loc.t * Type.t) Ast.Expression.t = - let (_, t), _ as e = expression_ ~is_cond cx loc e in - Type_table.set (Context.type_table cx) loc t; - e - -and this_ cx loc = Ast.Expression.( - match Refinement.get cx (loc, This) loc with - | Some t -> t - | None -> Env.var_ref cx (internal_name "this") loc -) - -and super_ cx loc = - Env.var_ref cx (internal_name "super") loc - -and expression_ ~is_cond cx loc e : (Loc.t, Loc.t * Type.t) Ast.Expression.t = - let ex = (loc, e) in Ast.Expression.(match e with - - | Ast.Expression.Literal lit -> - (loc, literal cx loc lit), Ast.Expression.Literal lit - - (* Treat the identifier `undefined` as an annotation for error reporting - * purposes. Like we do with other literals. Otherwise we end up pointing to - * `void` in `core.js`. While possible to re-declare `undefined`, it is - * unlikely. The tradeoff is worth it. *) - | Identifier (id_loc, ("undefined" as name)) -> - let t = mod_reason_of_t annot_reason (identifier cx name loc) in - (loc, t), Identifier ((id_loc, t), name) - - | Identifier (id_loc, name) -> +and expression ?(is_cond = false) cx (loc, e) = expression_ ~is_cond cx loc e + +and this_ cx loc = + Ast.Expression.( + match Refinement.get cx (loc, This) loc with + | Some t -> t + | None -> Env.var_ref cx (internal_name "this") loc) + +and super_ cx loc = Env.var_ref cx (internal_name "super") loc + +and expression_ ~is_cond cx loc e : (ALoc.t, ALoc.t * Type.t) Ast.Expression.t = + let make_trust = Context.trust_constructor cx in + let ex = (loc, e) in + Ast.Expression.( + match e with + | Ast.Expression.Literal lit -> ((loc, literal cx loc lit), Ast.Expression.Literal lit) + (* Treat the identifier `undefined` as an annotation for error reporting + * purposes. Like we do with other literals. Otherwise we end up pointing to + * `void` in `core.js`. While possible to re-declare `undefined`, it is + * unlikely. The tradeoff is worth it. *) + | Identifier (id_loc, ({ Ast.Identifier.name = "undefined"; comments = _ } as name)) -> + let t = Flow.reposition cx loc ~annot_loc:loc (identifier cx name loc) in + ((loc, t), Identifier ((id_loc, t), name)) + | Identifier (id_loc, name) -> let t = identifier cx name loc in - (loc, t), Identifier ((id_loc, t), name) - - | This -> + ((loc, t), Identifier ((id_loc, t), name)) + | This -> let t = this_ cx loc in - let id_info = "this", t, Type_table.Other in - Type_table.set_info loc id_info (Context.type_table cx); - (loc, t), This - - | Super -> - (loc, identifier cx "super" loc), Super - - | Unary u -> - let t, u = unary cx loc u in - (loc, t), Unary u - - | Update u -> - let t, u = update cx loc u in - (loc, t), Update u - - | Binary b -> - let t, b = binary cx loc b in - (loc, t), Binary b - - | Logical l -> - let t, l = logical cx loc l in - (loc, t), Logical l - - | TypeCast { TypeCast.expression = e; annot } -> - let r = mk_reason (RCustom "typecast") loc in - let t, annot' = Anno.mk_type_annotation cx SMap.empty r (Some annot) in - let annot' = Option.value_exn annot' in - Type_table.set (Context.type_table cx) loc t; - let (_, infer_t), _ as e' = expression cx e in - let use_op = Op (Cast { - lower = mk_expression_reason e; - upper = reason_of_t t; - }) in + ((loc, t), This) + | Super -> ((loc, identifier cx (mk_ident ~comments:None "super") loc), Super) + | Unary u -> + let (t, u) = unary cx loc u in + ((loc, t), Unary u) + | Update u -> + let (t, u) = update cx loc u in + ((loc, t), Update u) + | Binary b -> + let (t, b) = binary cx loc b in + ((loc, t), Binary b) + | Logical l -> + let (t, l) = logical cx loc l in + ((loc, t), Logical l) + | TypeCast { TypeCast.expression = e; annot } -> + let (t, annot') = Anno.mk_type_available_annotation cx SMap.empty annot in + let (((_, infer_t), _) as e') = expression cx e in + let use_op = Op (Cast { lower = mk_expression_reason e; upper = reason_of_t t }) in Flow.flow cx (infer_t, UseT (use_op, t)); - (loc, t), - TypeCast { TypeCast.expression = e'; annot = annot' } - - | Member _ -> subscript ~is_cond cx ex - - | OptionalMember _ -> subscript ~is_cond cx ex - - | Object { Object.properties } -> - let reason = mk_reason RObjectLit loc in - let t, properties = object_ cx reason properties in - (loc, t), Object { Object.properties } - - | Array { Array.elements } -> ( - let reason = mk_reason RArrayLit loc in - match elements with - | [] -> + ((loc, t), TypeCast { TypeCast.expression = e'; annot = annot' }) + | Member _ -> subscript ~is_cond cx ex + | OptionalMember _ -> subscript ~is_cond cx ex + | Object { Object.properties; comments } -> + let reason = mk_reason RObjectLit loc in + let (t, properties) = object_ cx reason properties in + ((loc, t), Object { Object.properties; comments }) + | Array { Array.elements; comments } -> + let reason = mk_reason RArrayLit loc in + (match elements with + | [] -> (* empty array, analogous to object with implicit properties *) let element_reason = mk_reason Reason.unknown_elem_empty_array_desc loc in let elemt = Tvar.mk cx element_reason in - let reason = replace_reason_const REmptyArrayLit reason in - (loc, DefT (reason, ArrT (ArrayAT (elemt, Some [])))), - Array { Array.elements = [] } - | elems -> - let elem_spread_list, elements = expression_or_spread_list cx loc elems in - ( - loc, - Tvar.mk_where cx reason (fun tout -> - let reason_op = reason in - let element_reason = - replace_reason_const Reason.inferred_union_elem_array_desc reason_op in - let elem_t = Tvar.mk cx element_reason in - let resolve_to = (ResolveSpreadsToArrayLiteral (mk_id (), elem_t, tout)) in - - Flow.resolve_spread_list cx ~use_op:unknown_use ~reason_op elem_spread_list resolve_to - ) - ), - Array { Array.elements; } - ) - - | New { - New.callee = callee_loc, Identifier (id_loc, ("Function" as name)); - targs; - arguments - } -> ( - let targts_opt = Option.map targs (fun (targts_loc, args) -> - targts_loc, List.map (Anno.convert cx SMap.empty) args - ) in - let argts, arges = arguments - |> List.map (expression_or_spread cx) - |> List.split in + let reason = replace_desc_reason REmptyArrayLit reason in + ( (loc, DefT (reason, make_trust (), ArrT (ArrayAT (elemt, Some [])))), + Array { Array.elements = []; comments } ) + | elems -> + let (elem_spread_list, elements) = expression_or_spread_list cx loc elems in + ( ( loc, + Tvar.mk_where cx reason (fun tout -> + let reason_op = reason in + let element_reason = + replace_desc_reason Reason.inferred_union_elem_array_desc reason_op + in + let elem_t = Tvar.mk cx element_reason in + let resolve_to = ResolveSpreadsToArrayLiteral (mk_id (), elem_t, tout) in + Flow.resolve_spread_list + cx + ~use_op:unknown_use + ~reason_op + elem_spread_list + resolve_to) ), + Array { Array.elements; comments } )) + | New + { + New.callee = + ( callee_loc, + Identifier (id_loc, ({ Ast.Identifier.name = "Function"; comments = _ } as name)) ); + targs; + arguments; + comments; + } -> + let targts_opt = + Option.map targs (fun (targts_loc, args) -> + (targts_loc, convert_tparam_instantiations cx SMap.empty args)) + in + let (argts, arges) = arguments |> Core_list.map ~f:(expression_or_spread cx) |> List.split in let id_t = identifier cx name callee_loc in - let callee_annot = callee_loc, id_t in - match targts_opt with + let callee_annot = (callee_loc, id_t) in + (match targts_opt with | None -> - List.iter (function Arg t | SpreadArg t -> - Flow.flow_t cx (t, StrT.at loc) - ) argts; + List.iter + (function + | Arg t + | SpreadArg t -> + Flow.flow_t cx (t, StrT.at loc |> with_trust bogus_trust)) + argts; let reason = mk_reason (RCustom "new Function(..)") loc in let proto = ObjProtoT reason in - ( - loc, - DefT (reason, FunT ( - dummy_static reason, - dummy_prototype, - mk_functiontype reason - [] ~rest_param:None ~def_reason:reason ~params_names:[] proto - )) - ), - New { - New.callee = callee_annot, Identifier ((id_loc, id_t), name); - targs = None; - arguments = arges - } + ( ( loc, + DefT + ( reason, + bogus_trust (), + FunT + ( dummy_static reason, + dummy_prototype, + mk_functiontype + reason + [] + ~rest_param:None + ~def_reason:reason + ~params_names:[] + proto ) ) ), + New + { + New.callee = (callee_annot, Identifier ((id_loc, id_t), name)); + targs = None; + arguments = arges; + comments; + } ) | Some (targts_loc, targts) -> - Flow.add_output cx Flow_error.(ECallTypeArity { - call_loc = loc; - is_new = true; - reason_arity = Reason.(locationless_reason (RType "Function")); - expected_arity = 0; - }); - (loc, AnyT.at loc), - New { - New.callee = callee_annot, Identifier ((id_loc, id_t), name); - targs = Some (targts_loc, targts); - arguments = arges - } - ) - - | New { - New.callee = callee_loc, Identifier (id_loc, ("Array" as name)); - targs; - arguments - } -> ( - let targts = Option.map targs (fun (loc, args) -> - loc, List.map (Anno.convert cx SMap.empty) args - ) in - let args = List.map (expression_or_spread cx) arguments in - let result = match targts, args with - | Some (loc, [elem_t]), [Arg argt, arg] -> Ok (Some (loc, elem_t), argt, arg) - | None, [Arg argt, arg] -> Ok (None, argt, arg) - | None, _ -> Error (Flow_error.EUseArrayLiteral loc) - | Some _, _ -> - Error Flow_error.(ECallTypeArity { - call_loc = loc; - is_new = true; - reason_arity = Reason.(locationless_reason (RType name)); - expected_arity = 1; - }) + Flow.add_output + cx + Error_message.( + ECallTypeArity + { + call_loc = loc; + is_new = true; + reason_arity = Reason.(locationless_reason (RType "Function")); + expected_arity = 0; + }); + ( (loc, AnyT.at AnyError loc), + New + { + New.callee = (callee_annot, Identifier ((id_loc, id_t), name)); + targs = Some (targts_loc, snd targts); + arguments = arges; + comments; + } )) + | New + { + New.callee = + ( callee_loc, + Identifier (id_loc, ({ Ast.Identifier.name = "Array" as n; comments = _ } as name)) + ); + targs; + arguments; + comments; + } -> + let targts = + Option.map targs (fun (loc, args) -> + (loc, convert_tparam_instantiations cx SMap.empty args)) in - match result with + let args = Core_list.map ~f:(expression_or_spread cx) arguments in + let result = + match (targts, args) with + | (Some (loc, ([t], [elem_t])), [(Arg argt, arg)]) -> Ok (Some (loc, elem_t, t), argt, arg) + | (None, [(Arg argt, arg)]) -> Ok (None, argt, arg) + | (None, _) -> Error (Error_message.EUseArrayLiteral loc) + | (Some _, _) -> + Error + Error_message.( + ECallTypeArity + { + call_loc = loc; + is_new = true; + reason_arity = Reason.(locationless_reason (RType n)); + expected_arity = 1; + }) + in + (match result with | Ok (targ_t, arg_t, arg) -> let reason = mk_reason (RCustom "new Array(..)") loc in - let length_reason = - replace_reason_const (RCustom "array length") reason in - Flow.flow_t cx (arg_t, DefT (length_reason, NumT AnyLiteral)); - let t, targs = match targ_t with - | Some (loc, ((_, t), _ as targ)) -> t, Some (loc, [targ]) - | None -> - let element_reason = - replace_reason_const (RCustom "array element") reason in - Tvar.mk cx element_reason, None + let length_reason = replace_desc_reason (RCustom "array length") reason in + Flow.flow_t cx (arg_t, DefT (length_reason, bogus_trust (), NumT AnyLiteral)); + let (t, targs) = + match targ_t with + | Some (loc, ast, ExplicitArg t) -> (t, Some (loc, [ast])) + | Some (_, _, ImplicitArg _) + | None -> + let element_reason = replace_desc_reason (RCustom "array element") reason in + (Tvar.mk cx element_reason, None) in let id_t = identifier cx name callee_loc in (* TODO - tuple_types could be undefined x N if given a literal *) - (loc, DefT (reason, ArrT (ArrayAT (t, None)))), - New { New. - callee = (callee_loc, id_t), Identifier ((id_loc, id_t), name); - targs; - arguments = [arg]; - } + ( (loc, DefT (reason, bogus_trust (), ArrT (ArrayAT (t, None)))), + New + { + New.callee = ((callee_loc, id_t), Identifier ((id_loc, id_t), name)); + targs; + arguments = [arg]; + comments; + } ) | Error err -> Flow.add_output cx err; - Typed_ast.error_annot, - Typed_ast.Expression.error - ) - - | New { New.callee; targs; arguments } -> - let (_, class_), _ as callee_ast = expression cx callee in - let targts, targs_ast = convert_targs cx targs in - let argts, arguments_ast = - arguments - |> List.map (expression_or_spread cx) - |> List.split in + Tast_utils.error_mapper#expression ex) + | New { New.callee; targs; arguments; comments } -> + let (((_, class_), _) as callee_ast) = expression cx callee in + let (targts, targs_ast) = convert_targs cx targs in + let (argts, arguments_ast) = + arguments |> Core_list.map ~f:(expression_or_spread cx) |> List.split + in let reason = mk_reason (RConstructorCall (desc_of_t class_)) loc in - let use_op = Op (FunCall { - op = mk_expression_reason ex; - fn = mk_expression_reason callee; - args = mk_initial_arguments_reason arguments; - }) in - (loc, new_call cx reason ~use_op class_ targts argts), - New {New. - callee = callee_ast; - targs = targs_ast; - arguments = arguments_ast; - } - - | Call _ -> subscript ~is_cond cx ex - - | OptionalCall _ -> subscript ~is_cond cx ex - - | Conditional { Conditional.test; consequent; alternate } -> + let use_op = + Op + (FunCall + { + op = mk_expression_reason ex; + fn = mk_expression_reason callee; + args = mk_initial_arguments_reason arguments; + local = true; + }) + in + ( (loc, new_call cx reason ~use_op class_ targts argts), + New { New.callee = callee_ast; targs = targs_ast; arguments = arguments_ast; comments } ) + | Call _ -> subscript ~is_cond cx ex + | OptionalCall _ -> subscript ~is_cond cx ex + | Conditional { Conditional.test; consequent; alternate } -> let reason = mk_reason RConditional loc in - let test, preds, not_preds, xtypes = predicates_of_condition cx test in - let env = Env.peek_env () in - let oldset = Changeset.clear () in - + let (test, preds, not_preds, xtypes) = predicates_of_condition cx test in + let env = Env.peek_env () in + let oldset = Changeset.Global.clear () in let then_env = Env.clone_env env in Env.update_env cx loc then_env; let _ = Env.refine_with_preds cx loc preds xtypes in - let (_, t1), _ as consequent = expression cx consequent in - + let ((((_, t1), _) as consequent), then_abnormal) = + Abnormal.catch_expr_control_flow_exception (fun () -> expression cx consequent) + in let else_env = Env.clone_env env in Env.update_env cx loc else_env; let _ = Env.refine_with_preds cx loc not_preds xtypes in - let (_, t2), _ as alternate = expression cx alternate in + let ((((_, t2), _) as alternate), else_abnormal) = + Abnormal.catch_expr_control_flow_exception (fun () -> expression cx alternate) + in + let newset = Changeset.Global.merge oldset in + let (end_env, combined_type) = + match (then_abnormal, else_abnormal) with + (* If one side throws (using invariant()) only refine with the other + side.*) + | (Some Abnormal.Throw, None) -> (else_env, t2) + | (None, Some Abnormal.Throw) -> (then_env, t1) + | (Some Abnormal.Throw, Some Abnormal.Throw) -> + Env.merge_env cx loc (env, then_env, else_env) newset; + (env, EmptyT.at loc |> with_trust bogus_trust) + (* Both sides threw--see below for where we re-raise *) + | (None, None) -> + Env.merge_env cx loc (env, then_env, else_env) (Changeset.exclude_refines newset); + (env, UnionT (reason, UnionRep.make t1 t2 [])) + (* NOTE: In general it is dangerous to express the least upper bound of + some types as a union: it might pin down the least upper bound + prematurely (before all the types have been inferred), and when the + union appears as an upper bound, it might lead to speculative matching. + + However, here a union is safe, because this union is guaranteed to only + appear as a lower bound. + + In such "covariant" positions, avoiding unnecessary indirection via + tvars is a good thing, because it improves precision. In particular, it + enables more types to be fully resolvable, which improves results of + speculative matching. + + It should be possible to do this more broadly and systematically. For + example, results of operations on annotations (like property gets on + objects, calls on functions) are often represented as unresolved tvars, + where they could be pinned down to resolved types. + *) + | _ -> + (* The only kind of abnormal control flow that should be raised from + an expression is a Throw. The other kinds (return, break, continue) + can only arise from statements, and while statements can appear within + expressions (eg function expressions), any abnormals will be handled + before they get here. *) + assert_false "Unexpected abnormal control flow from within expression" + in + Env.update_env cx loc end_env; - let newset = Changeset.merge oldset in - Env.merge_env cx loc (env, then_env, else_env) - (Changeset.exclude_refines newset); - Env.update_env cx loc env; (* TODO call loc_of_predicate on some pred? t1 is wrong but hopefully close *) - - (* NOTE: In general it is dangerous to express the least upper bound of - some types as a union: it might pin down the least upper bound - prematurely (before all the types have been inferred), and when the - union appears as an upper bound, it might lead to speculative matching. - - However, here a union is safe, because this union is guaranteed to only - appear as a lower bound. - - In such "covariant" positions, avoiding unnecessary indirection via - tvars is a good thing, because it improves precision. In particular, it - enables more types to be fully resolvable, which improves results of - speculative matching. - - It should be possible to do this more broadly and systematically. For - example, results of operations on annotations (like property gets on - objects, calls on functions) are often represented as unresolved tvars, - where they could be pinned down to resolved types. - *) - (loc, DefT (reason, UnionT (UnionRep.make t1 t2 []))), - Conditional { Conditional. - test = test; - consequent = consequent; - alternate = alternate; - } - - | Assignment { Assignment.operator; left; right } -> - let t, left, right = assignment cx loc (left, operator, right) in - (loc, t), Assignment { Assignment.operator; left; right; } - - | Sequence { Sequence.expressions } -> - let expressions = List.map (expression cx) expressions in + let ast = ((loc, combined_type), Conditional { Conditional.test; consequent; alternate }) in + (* handle control flow in cases where we've thrown from both sides *) + begin + match (then_abnormal, else_abnormal) with + | (Some then_exn, Some else_exn) when then_exn = else_exn -> + Abnormal.throw_expr_control_flow_exception loc ast then_exn + | _ -> ast + end + | Assignment { Assignment.operator; left; right } -> + let (t, left, right) = assignment cx loc (left, operator, right) in + ((loc, t), Assignment { Assignment.operator; left; right }) + | Sequence { Sequence.expressions } -> + let expressions = Core_list.map ~f:(expression cx) expressions in (* t = last element of ts. The parser guarantees sequence expressions are nonempty. *) let t = List.(expressions |> map snd_fst |> rev |> hd) in - (loc, t), Sequence { Sequence.expressions } - - | Function func -> - let {Ast.Function.id; params; return; predicate; _} = func in - let sig_loc = match params, return with - | _, Ast.Function.Available (end_loc, _) - | (end_loc, _), Ast.Function.Missing _ - -> Loc.btwn loc end_loc - in - + ((loc, t), Sequence { Sequence.expressions }) + | Function func -> + let { Ast.Function.id; predicate; sig_loc; _ } = func in (match predicate with | Some (_, Ast.Type.Predicate.Inferred) -> - Flow.add_output cx Flow_error.( - EUnsupportedSyntax (loc, PredicateDeclarationWithoutExpression) - ) - | _ -> ()); - - let t, func = mk_function id cx sig_loc func in - (match id with - | Some (id_loc, name) -> - let id_info = name, t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx) + Flow.add_output + cx + Error_message.(EUnsupportedSyntax (loc, PredicateDeclarationWithoutExpression)) | _ -> ()); - (loc, t), Function func - | ArrowFunction func -> - let t, f = mk_arrow cx loc func in (loc, t), ArrowFunction f - - | TaggedTemplate { - TaggedTemplate.tag = tag_loc, (Identifier (id_loc, ("query" as name))); - (* TODO: walk quasis? *) - quasi = quasi_loc, { TemplateLiteral.quasis; expressions } - } -> - let expressions = List.map (expression cx) expressions in - (* TODO what is the type of "query"? is it in the environment? *) - let id_t = AnyFunT.at tag_loc in - (*parse_graphql cx encaps;*) - (loc, VoidT.at loc), - TaggedTemplate { TaggedTemplate. - tag = (tag_loc, id_t), Identifier ((id_loc, id_t), name); - quasi = quasi_loc, { TemplateLiteral.quasis; expressions; }; - } - - | TaggedTemplate { - TaggedTemplate.tag; - (* TODO: walk quasis? *) - quasi = quasi_loc, { TemplateLiteral.quasis; expressions } - } -> - let expressions = List.map (expression cx) expressions in - let (_, t), _ as tag_ast = expression cx tag in + let (t, func) = mk_function_expression id cx sig_loc func in + ((loc, t), Function func) + | ArrowFunction func -> + let (t, f) = mk_arrow cx loc func in + ((loc, t), ArrowFunction f) + | TaggedTemplate + { + TaggedTemplate.tag; + (* TODO: walk quasis? *) + quasi = (quasi_loc, { TemplateLiteral.quasis; expressions }); + } -> + let expressions = Core_list.map ~f:(expression cx) expressions in + let (((_, t), _) as tag_ast) = expression cx tag in let reason = mk_reason (RCustom "encaps tag") loc in - let reason_array = replace_reason_const RArray reason in + let reason_array = replace_desc_reason RArray reason in let ret = Tvar.mk cx reason in - let args = - [ Arg (DefT (reason_array, ArrT (ArrayAT (StrT.why reason, None)))); - SpreadArg (AnyT.why reason) ] in - let ft = mk_functioncalltype reason None args ret in - let use_op = Op (FunCall { - op = mk_expression_reason ex; - fn = mk_expression_reason tag; - args = []; - }) in - Flow.flow cx (t, CallT (use_op, reason, ft)); - (loc, ret), - TaggedTemplate { TaggedTemplate. - tag = tag_ast; - quasi = quasi_loc, { TemplateLiteral.quasis; expressions; }; - } - - | TemplateLiteral { - TemplateLiteral.quasis; - expressions - } -> - let t, expressions = match quasis with - | [head] -> - let elem_loc, { TemplateLiteral.Element. - value = { TemplateLiteral.Element.raw; cooked; }; _ - } = head in - let lit = { Ast.Literal.value = Ast.Literal.String cooked; raw; } in - literal cx elem_loc lit, [] - | _ -> - let t_out = StrT.at loc in - let expressions = List.map (fun expr -> - let (_, t), _ as e = expression cx expr in - Flow.flow cx (t, UseT (Op (Coercion { - from = mk_expression_reason expr; - target = reason_of_t t_out; - }), t_out)); - e - ) expressions in - t_out, expressions + (* tag`a${b}c${d}` -> tag(['a', 'c'], b, d) *) + let call_t = + let args = + let quasi_t = + DefT + ( reason_array, + bogus_trust (), + ArrT (ArrayAT (StrT.why reason |> with_trust bogus_trust, None)) ) + in + let exprs_t = Core_list.map ~f:(fun ((_, t), _) -> Arg t) expressions in + Arg quasi_t :: exprs_t + in + let ft = mk_functioncalltype reason None args ret in + let use_op = + Op + (FunCall + { + op = mk_expression_reason ex; + fn = mk_expression_reason tag; + args = []; + local = true; + }) + in + CallT (use_op, reason, ft) in - (loc, t), TemplateLiteral { TemplateLiteral.quasis; expressions } - - | JSXElement e -> - let t, e = jsx cx e in - (loc, t), JSXElement e - - | JSXFragment f -> - let t, f = jsx_fragment cx f in - (loc, t), JSXFragment f - - | Class c -> - let (name_loc, name) = extract_class_name loc c in - let reason = mk_reason (RIdentifier name) loc in + Flow.flow cx (t, call_t); + + ( (loc, ret), + TaggedTemplate + { + TaggedTemplate.tag = tag_ast; + quasi = (quasi_loc, { TemplateLiteral.quasis; expressions }); + } ) + | TemplateLiteral { TemplateLiteral.quasis; expressions } -> + let (t, expressions) = + match quasis with + | [head] -> + let ( elem_loc, + { TemplateLiteral.Element.value = { TemplateLiteral.Element.raw; cooked }; _ } ) = + head + in + let lit = + { + Ast.Literal.value = Ast.Literal.String cooked; + raw; + comments = Flow_ast_utils.mk_comments_opt (); + } + in + (literal cx elem_loc lit, []) + | _ -> + let t_out = StrT.at loc |> with_trust bogus_trust in + let expressions = + Core_list.map + ~f:(fun expr -> + let (((_, t), _) as e) = expression cx expr in + Flow.flow + cx + ( t, + UseT + ( Op + (Coercion + { from = mk_expression_reason expr; target = reason_of_t t_out }), + t_out ) ); + e) + expressions + in + (t_out, expressions) + in + ((loc, t), TemplateLiteral { TemplateLiteral.quasis; expressions }) + | JSXElement e -> + let (t, e) = jsx cx loc e in + ((loc, t), JSXElement e) + | JSXFragment f -> + let (t, f) = jsx_fragment cx loc f in + ((loc, t), JSXFragment f) + | Class c -> + let class_loc = loc in + let (name_loc, name) = extract_class_name class_loc c in + let reason = mk_reason (RIdentifier name) class_loc in (match c.Ast.Class.id with | Some _ -> - let tvar = Tvar.mk cx reason in - let id_info = name, tvar, Type_table.Other in - Type_table.set_info name_loc id_info (Context.type_table cx); - let scope = Scope.fresh () in - Scope.( - let kind = Entry.ClassNameBinding in - let entry = Entry.( - new_let tvar ~loc:name_loc ~state:State.Declared ~kind - ) in - add_entry name entry scope - ); - Env.push_var_scope cx scope; - let class_t, c = mk_class cx loc reason c in - Env.pop_var_scope (); - Flow.flow_t cx (class_t, tvar); - (loc, class_t), Class c + let tvar = Tvar.mk cx reason in + let scope = Scope.fresh () in + Scope.( + let kind = Entry.ClassNameBinding in + let entry = Entry.(new_let tvar ~loc:name_loc ~state:State.Declared ~kind) in + add_entry name entry scope); + Env.push_var_scope cx scope; + let (class_t, c) = mk_class cx class_loc ~name_loc reason c in + Env.pop_var_scope (); + Flow.flow_t cx (class_t, tvar); + ((class_loc, class_t), Class c) | None -> - let class_t, c = mk_class cx loc reason c in - (loc, class_t), Class c - ) - - | Yield { Yield.argument; delegate = false } -> + let (class_t, c) = mk_class cx class_loc ~name_loc reason c in + ((class_loc, class_t), Class c)) + | Yield { Yield.argument; delegate = false; comments } -> let yield = Env.get_internal_var cx "yield" loc in - let t, argument_ast = match argument with - | Some expr -> - let (_, t), _ as expr = expression cx expr in - t, Some expr - | None -> VoidT.at loc, None in + let (t, argument_ast) = + match argument with + | Some expr -> + let (((_, t), _) as expr) = expression cx expr in + (t, Some expr) + | None -> (VoidT.at loc |> with_trust bogus_trust, None) + in Env.havoc_heap_refinements (); - let use_op = Op (GeneratorYield { - value = (match argument with - | Some expr -> mk_expression_reason expr - | None -> reason_of_t t); - }) in + let use_op = + Op + (GeneratorYield + { + value = + (match argument with + | Some expr -> mk_expression_reason expr + | None -> reason_of_t t); + }) + in Flow.flow cx (t, UseT (use_op, yield)); - (loc, Env.get_internal_var cx "next" loc), - Yield { Yield.argument = argument_ast; delegate = false } - - | Yield { Yield.argument; delegate = true } -> + ( (loc, Env.get_internal_var cx "next" loc), + Yield { Yield.argument = argument_ast; delegate = false; comments } ) + | Yield { Yield.argument; delegate = true; comments } -> let reason = mk_reason (RCustom "yield* delegate") loc in let next = Env.get_internal_var cx "next" loc in let yield = Env.get_internal_var cx "yield" loc in - let t, argument_ast = match argument with - | Some expr -> - let (_, t), _ as expr = expression cx expr in - t, Some expr - | None -> assert_false "delegate yield without argument" in - - let ret_reason = replace_reason (fun desc -> RCustom ( - spf "return of child generator in %s" (string_of_desc desc) - )) reason in + let (t, argument_ast) = + match argument with + | Some expr -> + let (((_, t), _) as expr) = expression cx expr in + (t, Some expr) + | None -> assert_false "delegate yield without argument" + in + let ret_reason = + update_desc_reason + (fun desc -> RCustom (spf "return of child generator in %s" (string_of_desc desc))) + reason + in let ret = Tvar.mk cx ret_reason in - (* widen yield with the element type of the delegated-to iterable *) let iterable = let targs = [yield; ret; next] in if Env.in_async_scope () then - let reason = - mk_reason (RCustom "async iteration expected on AsyncIterable") loc - in + let reason = mk_reason (RCustom "async iteration expected on AsyncIterable") loc in Flow.get_builtin_typeapp cx reason "$AsyncIterable" targs else - Flow.get_builtin_typeapp cx + Flow.get_builtin_typeapp + cx (mk_reason (RCustom "iteration expected on Iterable") loc) - "$Iterable" targs + "$Iterable" + targs in Env.havoc_heap_refinements (); - let use_op = Op (GeneratorYield { - value = (match argument with - | Some expr -> mk_expression_reason expr - | None -> reason_of_t t); - }) in - Flow.flow cx (t, UseT (use_op, iterable)); - - (loc, ret), - Yield { Yield.argument = argument_ast; delegate = true } - - (* TODO *) - | Comprehension _ -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, ComprehensionExpression)); - (loc, EmptyT.at loc), Typed_ast.Expression.error - - | Generator _ -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, GeneratorExpression)); - (loc, EmptyT.at loc), Typed_ast.Expression.error - - | MetaProperty _-> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, MetaPropertyExpression)); - (loc, EmptyT.at loc), Typed_ast.Expression.error - - | Import arg -> ( - match arg with - | source_loc, Ast.Expression.Literal { - Ast.Literal.value = Ast.Literal.String module_name; raw; - } - | source_loc, TemplateLiteral { - TemplateLiteral.quasis = [_, { - TemplateLiteral.Element.value = { - TemplateLiteral.Element.cooked = module_name; raw; - }; _ - }]; - expressions = []; - } -> - - let imported_module_t = - let import_reason = mk_reason (RModule module_name) loc in - import_ns cx import_reason (source_loc, module_name) loc + let use_op = + Op + (GeneratorYield + { + value = + (match argument with + | Some expr -> mk_expression_reason expr + | None -> reason_of_t t); + }) in + Flow.flow cx (t, UseT (use_op, iterable)); - let reason = annot_reason (mk_reason (RCustom "async import") loc) in - (loc, Flow.get_builtin_typeapp cx reason "Promise" [imported_module_t]), - Ast.Expression.Literal { Ast.Literal. - value = Ast.Literal.String module_name; - raw; - } - | _ -> - let ignore_non_literals = - Context.should_ignore_non_literal_requires cx in - if not ignore_non_literals - then - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, ImportDynamicArgument)); - (loc, AnyT.at loc), Typed_ast.Expression.error - ) -) + ((loc, ret), Yield { Yield.argument = argument_ast; delegate = true; comments }) + (* TODO *) + | Comprehension _ -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, ComprehensionExpression)); + Tast_utils.error_mapper#expression ex + | Generator _ -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, GeneratorExpression)); + Tast_utils.error_mapper#expression ex + | MetaProperty _ -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, MetaPropertyExpression)); + Tast_utils.error_mapper#expression ex + | Import arg -> + (match arg with + | ( source_loc, + Ast.Expression.Literal + { Ast.Literal.value = Ast.Literal.String module_name; raw; comments = _ } ) + | ( source_loc, + TemplateLiteral + { + TemplateLiteral.quasis = + [ + ( _, + { + TemplateLiteral.Element.value = + { TemplateLiteral.Element.cooked = module_name; raw }; + _; + } ); + ]; + expressions = []; + } ) -> + let comments = + match arg with + | (_, Ast.Expression.Literal { Ast.Literal.comments; _ }) -> comments + | _ -> None + in + let imported_module_t = + let import_reason = mk_reason (RModule module_name) loc in + Import_export.import_ns cx import_reason (source_loc, module_name) loc + in + let reason = annot_reason (mk_reason (RCustom "async import") loc) in + let t = Flow.get_builtin_typeapp cx reason "Promise" [imported_module_t] in + ( (loc, t), + Import + ( (source_loc, t), + Ast.Expression.Literal + { Ast.Literal.value = Ast.Literal.String module_name; raw; comments } ) ) + | _ -> + let ignore_non_literals = Context.should_ignore_non_literal_requires cx in + if not ignore_non_literals then ( + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, ImportDynamicArgument)); + Tast_utils.error_mapper#expression ex + ) else + Tast_utils.unchecked_mapper#expression ex)) (* Handles subscript operations. Whereas `expression` recursively computes the type of the LHS, `subscript` instead walks the AST to first build up a @@ -3307,9 +3444,8 @@ and expression_ ~is_cond cx loc e : (Loc.t, Loc.t * Type.t) Ast.Expression.t = and emit constraints as we go along and omit the recursion. *) and subscript = - let open Ast.Expression in - - (* As long as we encounter AST nodes for optional subscript operations (which + Ast.Expression.( + (* As long as we encounter AST nodes for optional subscript operations (which require recursion on the LHS), prepend those nodes to acc and recursively call `build_chain` on the LHS. @@ -3317,732 +3453,796 @@ and subscript = all in one place and so that the optional nodes can leverage the non-optional pattern matching. *) - let rec build_chain ~is_cond cx ((loc, e) as ex) acc = - let opt_state, e' = match e with - | OptionalCall { OptionalCall. - call = { Call.callee; targs = _; arguments = _ } as call; - optional; - } -> - warn_or_ignore_optional_chaining optional cx loc; - begin match callee with - | _, Member _ - | _, OptionalMember _ -> - Flow.add_output cx Flow_error.(EOptionalChainingMethods loc) - | _ -> () - end; - let opt_state = if optional then NewChain else ContinueChain in - opt_state, Call call - - | OptionalMember { OptionalMember.member; optional } -> - warn_or_ignore_optional_chaining optional cx loc; - let opt_state = if optional then NewChain else ContinueChain in - opt_state, Member member - | _ -> NonOptional, e - in - let call_ast call = match opt_state with - | NewChain -> OptionalCall { OptionalCall.call; optional = true } - | ContinueChain -> OptionalCall { OptionalCall.call; optional = false } - | NonOptional -> Call call - in - let member_ast member = match opt_state with - | NewChain -> OptionalMember { OptionalMember.member; optional = true } - | ContinueChain -> OptionalMember { OptionalMember.member; optional = true } - | NonOptional -> Member member - in - - match e' with - | Call { - Call.callee = callee_loc, Identifier (id_loc, ("require" as name)); - targs; - arguments; - } when not (Env.local_scope_entry_exists name) -> - let lhs_t, arguments = ( - let targts = Option.map targs (fun (_, args) -> - List.map (Anno.convert cx SMap.empty) args - ) in - match targts, arguments with - | None, [ Expression (source_loc, Ast.Expression.Literal { - Ast.Literal.value = Ast.Literal.String module_name; _; - } as lit_exp) ] -> - require cx (source_loc, module_name) loc, - [ Expression (expression cx lit_exp) ] - | None, [ Expression (source_loc, TemplateLiteral { - TemplateLiteral.quasis = [ _, { - TemplateLiteral.Element.value = { - TemplateLiteral.Element.cooked = module_name; _; - }; _; - } ]; - expressions = []; - } as lit_exp) ] -> - require cx (source_loc, module_name) loc, - [ Expression (expression cx lit_exp) ] - | Some _, _ -> - List.iter (fun arg -> ignore (expression_or_spread cx arg)) arguments; - Flow.add_output cx Flow_error.(ECallTypeArity { - call_loc = loc; - is_new = false; - reason_arity = Reason.(locationless_reason (RFunction RNormal)); - expected_arity = 0; - }); - AnyT.at loc, Typed_ast.Expression.expression_or_spread_list_error - | _ -> - List.iter (fun arg -> ignore (expression_or_spread cx arg)) arguments; - let ignore_non_literals = - Context.should_ignore_non_literal_requires cx in - if not ignore_non_literals - then - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, RequireDynamicArgument)); - AnyT.at loc, Typed_ast.Expression.expression_or_spread_list_error - ) in - (* TODO(vijayramamurthy) type of require ? *) - let id_t = AnyFunT.at callee_loc in - ex, lhs_t, acc, ( - (loc, lhs_t), - call_ast { Call. - callee = (callee_loc, id_t), Identifier ((id_loc, id_t), name); - targs = None; - arguments; - } - ) - - | Call { - Call.callee = callee_loc, Identifier (id_loc, ("requireLazy" as name)); - targs; - arguments; - } when not (Env.local_scope_entry_exists name) -> - let lhs_t, arguments = ( - let targts = Option.map targs (fun (_, args) -> - List.map (Anno.convert cx SMap.empty) args - ) in - match targts, arguments with - | None, [ - Expression(_, Array({Array.elements;}) as elems_exp); - Expression(callback_expr); - ] -> - (** - * From a static perspective (and as long as side-effects aren't - * considered in Flow), a requireLazy call can be viewed as an immediate - * call to require() for each of the modules, and then an immediate call - * to the requireLazy() callback with the results of each of the prior - * calls to require(). - * - * TODO: requireLazy() is FB-specific. Let's find a way to either - * generalize or toggle this only for the FB environment. - *) - - let element_to_module_tvar tvars = (function - | Some(Expression(source_loc, Ast.Expression.Literal { - Ast.Literal.value = Ast.Literal.String module_name; - _; - })) -> - let module_tvar = require cx (source_loc, module_name) loc in - module_tvar::tvars - | _ -> - Flow.add_output cx Flow_error.( - EUnsupportedSyntax (loc, RequireLazyDynamicArgument) - ); - tvars - ) in - let rev_module_tvars = - List.fold_left element_to_module_tvar [] elements in - let module_tvars = List.rev_map (fun e -> Arg e) rev_module_tvars in - - let (_, callback_expr_t), _ as callback_ast = expression cx callback_expr in - let reason = mk_reason (RCustom "requireLazy() callback") loc in - let use_op = Op (FunCall { - op = mk_expression_reason ex; - fn = mk_expression_reason callback_expr; - args = []; - }) in - let _ = func_call cx reason ~use_op callback_expr_t None module_tvars in - - NullT.at loc, - [ Expression (expression cx elems_exp); Expression callback_ast ] - - | Some _, _ -> - List.iter (fun arg -> ignore (expression_or_spread cx arg)) arguments; - Flow.add_output cx Flow_error.(ECallTypeArity { - call_loc = loc; - is_new = false; - reason_arity = Reason.(locationless_reason (RFunction RNormal)); - expected_arity = 0; - }); - AnyT.at loc, Typed_ast.Expression.expression_or_spread_list_error - | _ -> - List.iter (fun arg -> ignore (expression_or_spread cx arg)) arguments; - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, RequireLazyDynamicArgument)); - AnyT.at loc, Typed_ast.Expression.expression_or_spread_list_error - ) in - (* TODO(vijayramamurthy) does "requireLazy" have a type? *) - let id_t = AnyFunT.at callee_loc in - ex, lhs_t, acc, ( - (loc, lhs_t), - call_ast { Call. - callee = (callee_loc, id_t), Identifier ((id_loc, id_t), name); - targs = None; - arguments; - } - ) - - | Call { - Call.callee = (callee_loc, Member { - Member._object = (_, Identifier (_, "Object") as obj); - property = Member.PropertyIdentifier (prop_loc, name); - computed; - } as expr); - targs; - arguments; - } -> - let (_, obj_t), _ as obj_ast = expression cx obj in - let lhs_t, targs, arguments = - static_method_call_Object cx loc callee_loc prop_loc expr obj_t name targs arguments - in - ex, lhs_t, acc, ( - (loc, lhs_t), - let t = AnyFunT.at callee_loc in - call_ast { Call. - (* TODO(vijayramamurthy): what is the type of `Object.name` ? *) - callee = (callee_loc, t), Member { Member. - _object = obj_ast; - property = Member.PropertyIdentifier ((prop_loc, t), name); - computed; - }; + let rec build_chain ~is_cond cx ((loc, e) as ex) acc = + let (opt_state, e') = + match e with + | OptionalCall + { OptionalCall.call = { Call.callee; targs = _; arguments = _ } as call; optional } -> + warn_or_ignore_optional_chaining optional cx loc; + begin + match callee with + | (_, Member _) + | (_, OptionalMember _) -> + Flow.add_output cx Error_message.(EOptionalChainingMethods loc) + | _ -> () + end; + let opt_state = + if optional then + NewChain + else + ContinueChain + in + (opt_state, Call call) + | OptionalMember { OptionalMember.member; optional } -> + warn_or_ignore_optional_chaining optional cx loc; + let opt_state = + if optional then + NewChain + else + ContinueChain + in + (opt_state, Member member) + | _ -> (NonOptional, e) + in + let call_ast call = + match opt_state with + | NewChain -> OptionalCall { OptionalCall.call; optional = true } + | ContinueChain -> OptionalCall { OptionalCall.call; optional = false } + | NonOptional -> Call call + in + let member_ast member = + match opt_state with + | NewChain -> OptionalMember { OptionalMember.member; optional = true } + | ContinueChain -> OptionalMember { OptionalMember.member; optional = false } + | NonOptional -> Member member + in + match e' with + | Call + { + Call.callee = + ( callee_loc, + Identifier + (id_loc, ({ Ast.Identifier.name = "require" as n; comments = _ } as name)) ); targs; arguments; } - ) - - | Call { - Call.callee = (callee_loc, Member { - Member._object = super_loc, Super; - property = Member.PropertyIdentifier (ploc, name); - computed; - }) as callee; - targs; - arguments; - } -> + when not (Env.local_scope_entry_exists n) -> + let targs = + Option.map targs (fun (args_loc, args) -> + (args_loc, snd (convert_tparam_instantiations cx SMap.empty args))) + in + let (lhs_t, arguments) = + match (targs, arguments) with + | ( None, + [ + Expression + ( ( source_loc, + Ast.Expression.Literal + { Ast.Literal.value = Ast.Literal.String module_name; _ } ) as lit_exp ); + ] ) -> + ( Import_export.require cx (source_loc, module_name) loc, + [Expression (expression cx lit_exp)] ) + | ( None, + [ + Expression + ( ( source_loc, + TemplateLiteral + { + TemplateLiteral.quasis = + [ + ( _, + { + TemplateLiteral.Element.value = + { TemplateLiteral.Element.cooked = module_name; _ }; + _; + } ); + ]; + expressions = []; + } ) as lit_exp ); + ] ) -> + ( Import_export.require cx (source_loc, module_name) loc, + [Expression (expression cx lit_exp)] ) + | (Some _, _) -> + List.iter (fun arg -> ignore (expression_or_spread cx arg)) arguments; + Flow.add_output + cx + Error_message.( + ECallTypeArity + { + call_loc = loc; + is_new = false; + reason_arity = Reason.(locationless_reason (RFunction RNormal)); + expected_arity = 0; + }); + (AnyT.at AnyError loc, List.map Tast_utils.error_mapper#expression_or_spread arguments) + | _ -> + List.iter (fun arg -> ignore (expression_or_spread cx arg)) arguments; + let ignore_non_literals = Context.should_ignore_non_literal_requires cx in + if not ignore_non_literals then + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, RequireDynamicArgument)); + (AnyT.at AnyError loc, List.map Tast_utils.error_mapper#expression_or_spread arguments) + in + let id_t = bogus_trust () |> MixedT.at callee_loc in + ( ex, + lhs_t, + acc, + ( (loc, lhs_t), + call_ast + { + Call.callee = ((callee_loc, id_t), Identifier ((id_loc, id_t), name)); + targs; + arguments; + } ) ) + | Call + { + Call.callee = + ( callee_loc, + Identifier + (id_loc, ({ Ast.Identifier.name = "requireLazy" as n; comments = _ } as name)) ); + targs; + arguments; + } + when not (Env.local_scope_entry_exists n) -> + let targs = + Option.map targs (fun (loc, args) -> + (loc, snd (convert_tparam_instantiations cx SMap.empty args))) + in + let (lhs_t, arguments) = + match (targs, arguments) with + | ( None, + [ + Expression ((_, Array { Array.elements; comments = _ }) as elems_exp); + Expression callback_expr; + ] ) -> + (* + * From a static perspective (and as long as side-effects aren't + * considered in Flow), a requireLazy call can be viewed as an immediate + * call to require() for each of the modules, and then an immediate call + * to the requireLazy() callback with the results of each of the prior + * calls to require(). + * + * TODO: requireLazy() is FB-specific. Let's find a way to either + * generalize or toggle this only for the FB environment. + *) + let element_to_module_tvar tvars = function + | Some + (Expression + ( source_loc, + Ast.Expression.Literal + { Ast.Literal.value = Ast.Literal.String module_name; _ } )) -> + let module_tvar = Import_export.require cx (source_loc, module_name) loc in + module_tvar :: tvars + | _ -> + Flow.add_output + cx + Error_message.(EUnsupportedSyntax (loc, RequireLazyDynamicArgument)); + tvars + in + let rev_module_tvars = List.fold_left element_to_module_tvar [] elements in + let module_tvars = List.rev_map (fun e -> Arg e) rev_module_tvars in + let (((_, callback_expr_t), _) as callback_ast) = expression cx callback_expr in + let reason = mk_reason (RCustom "requireLazy() callback") loc in + let use_op = + Op + (FunCall + { + op = mk_expression_reason ex; + fn = mk_expression_reason callback_expr; + args = []; + local = true; + }) + in + let _ = func_call cx reason ~use_op callback_expr_t None module_tvars in + ( NullT.at loc |> with_trust bogus_trust, + [Expression (expression cx elems_exp); Expression callback_ast] ) + | (Some _, _) -> + List.iter (fun arg -> ignore (expression_or_spread cx arg)) arguments; + Flow.add_output + cx + Error_message.( + ECallTypeArity + { + call_loc = loc; + is_new = false; + reason_arity = Reason.(locationless_reason (RFunction RNormal)); + expected_arity = 0; + }); + (AnyT.at AnyError loc, List.map Tast_utils.error_mapper#expression_or_spread arguments) + | _ -> + List.iter (fun arg -> ignore (expression_or_spread cx arg)) arguments; + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, RequireLazyDynamicArgument)); + (AnyT.at AnyError loc, List.map Tast_utils.error_mapper#expression_or_spread arguments) + in + let id_t = bogus_trust () |> MixedT.at callee_loc in + ( ex, + lhs_t, + acc, + ( (loc, lhs_t), + call_ast + { + Call.callee = ((callee_loc, id_t), Identifier ((id_loc, id_t), name)); + targs; + arguments; + } ) ) + | Call + { + Call.callee = + ( callee_loc, + Member + { + Member._object = + (_, Identifier (_, { Ast.Identifier.name = "Object"; comments = _ })) as obj; + property = + Member.PropertyIdentifier + (prop_loc, ({ Ast.Identifier.name; comments = _ } as id)); + } ) as expr; + targs; + arguments; + } -> + let (((_, obj_t), _) as obj_ast) = expression cx obj in + let (lhs_t, targs, arguments) = + static_method_call_Object cx loc callee_loc prop_loc expr obj_t name targs arguments + in + ( ex, + lhs_t, + acc, + ( (loc, lhs_t), + let t = bogus_trust () |> MixedT.at callee_loc in + call_ast + { + Call.callee (* TODO(vijayramamurthy): what is the type of `Object.name` ? *) = + ( (callee_loc, t), + Member + { + Member._object = obj_ast; + property = Member.PropertyIdentifier ((prop_loc, t), id); + } ); + targs; + arguments; + } ) ) + | Call + { + Call.callee = + ( callee_loc, + Member + { + Member._object = (super_loc, Super); + property = + Member.PropertyIdentifier + (ploc, ({ Ast.Identifier.name; comments = _ } as id)); + } ) as callee; + targs; + arguments; + } -> let reason = mk_reason (RMethodCall (Some name)) loc in let reason_lookup = mk_reason (RProperty (Some name)) callee_loc in let reason_prop = mk_reason (RProperty (Some name)) ploc in let super = super_ cx super_loc in - let id_info = "super", super, Type_table.Other in - Type_table.set_info super_loc id_info (Context.type_table cx); - let targts, targs = convert_targs cx targs in - let argts, argument_asts = arguments - |> List.map (expression_or_spread cx) - |> List.split in + let (targts, targs) = convert_targs cx targs in + let (argts, argument_asts) = + arguments |> Core_list.map ~f:(expression_or_spread cx) |> List.split + in Type_inference_hooks_js.dispatch_call_hook cx name ploc super; let prop_t = Tvar.mk cx reason_prop in - let lhs_t = Tvar.mk_where cx reason (fun t -> - let funtype = mk_methodcalltype super targts argts t in - let use_op = Op (FunCallMethod { - op = mk_expression_reason ex; - fn = mk_expression_reason callee; - prop = reason_prop; - args = mk_initial_arguments_reason arguments; - }) in - let id_info = name, prop_t, Type_table.PropertyAccess super in - Type_table.set_info ploc id_info (Context.type_table cx); - Flow.flow cx ( - super, - MethodT (use_op, reason, reason_lookup, Named (reason_prop, name), - funtype, Some prop_t) - ) - ) in - ex, lhs_t, acc, ( - (loc, lhs_t), - call_ast { Call. - callee = (callee_loc, prop_t), Member { Member. - _object = (super_loc, super), Super; - property = Member.PropertyIdentifier ((ploc, prop_t), name); - computed; - }; + let lhs_t = + Tvar.mk_where cx reason (fun t -> + let funtype = mk_methodcalltype super targts argts t in + let use_op = + Op + (FunCallMethod + { + op = mk_expression_reason ex; + fn = mk_expression_reason callee; + prop = reason_prop; + args = mk_initial_arguments_reason arguments; + local = true; + }) + in + Flow.flow + cx + ( super, + MethodT + (use_op, reason, reason_lookup, Named (reason_prop, name), funtype, Some prop_t) + )) + in + ( ex, + lhs_t, + acc, + ( (loc, lhs_t), + call_ast + { + Call.callee = + ( (callee_loc, prop_t), + Member + { + Member._object = ((super_loc, super), Super); + property = Member.PropertyIdentifier ((ploc, prop_t), id); + } ); + targs; + arguments = argument_asts; + } ) ) + | Call + { + Call.callee = (lookup_loc, Member { Member._object; property }) as callee; targs; - arguments = argument_asts; - } - ) - - | Call { - Call.callee = (lookup_loc, Member { Member. - _object; - property; - computed; - }) as callee; - targs; - arguments; - } -> + arguments; + } -> (* method call *) - let (_, ot), _ as _object = expression cx _object in - let targts, targs = convert_targs cx targs in - let argts, argument_asts = arguments - |> List.map (expression_or_spread cx) - |> List.split in - let (prop_t, lhs_t), property = (match property with - | Member.PropertyPrivateName (prop_loc, (name_loc, name)) -> - let reason_call = mk_reason (RMethodCall (Some name)) loc in - let use_op = Op (FunCallMethod { - op = mk_expression_reason ex; - fn = mk_expression_reason callee; - prop = mk_reason (RProperty (Some name)) prop_loc; - args = mk_initial_arguments_reason arguments; - }) in - method_call cx reason_call ~use_op prop_loc (callee, ot, name) targts argts, - Member.PropertyPrivateName (prop_loc, (name_loc, name)) - | Member.PropertyIdentifier (prop_loc, name) -> - let reason_call = mk_reason (RMethodCall (Some name)) loc in - let use_op = Op (FunCallMethod { - op = mk_expression_reason ex; - fn = mk_expression_reason callee; - prop = mk_reason (RProperty (Some name)) prop_loc; - args = mk_initial_arguments_reason arguments; - }) in - let (prop_t, _) as x = - method_call cx reason_call ~use_op prop_loc (callee, ot, name) targts argts in - x, Member.PropertyIdentifier ((prop_loc, prop_t), name) - | Member.PropertyExpression expr -> - let reason_call = mk_reason (RMethodCall None) loc in - let reason_lookup = mk_reason (RProperty None) lookup_loc in - let (_, elem_t), _ as expr = expression cx expr in - (* TODO: (pvekris) T33113417 make type more precise *) - (AnyFunT.at lookup_loc, - Tvar.mk_where cx reason_call (fun t -> - let frame = Env.peek_frame () in - let funtype = mk_methodcalltype ot targts argts t ~frame in - Flow.flow cx (ot, - CallElemT (reason_call, reason_lookup, elem_t, funtype)) - )), - Member.PropertyExpression expr - ) in - ex, lhs_t, acc, ( - (loc, lhs_t), - call_ast { Call. - callee = (lookup_loc, prop_t), Member { Member. - _object; - property; - computed; - }; - targs; - arguments = argument_asts; - } - ) - - | Call { - Call.callee = (super_loc, Super) as callee; - targs; - arguments; - } -> - let targts, targs = convert_targs cx targs in - let argts, argument_asts = arguments - |> List.map (expression_or_spread cx) - |> List.split in + let (((_, ot), _) as _object) = expression cx _object in + let (targts, targs) = convert_targs cx targs in + let (argts, argument_asts) = + arguments |> Core_list.map ~f:(expression_or_spread cx) |> List.split + in + let ((prop_t, lhs_t), property) = + match property with + | Member.PropertyPrivateName + (prop_loc, (name_loc, ({ Ast.Identifier.name; comments = _ } as id))) -> + let reason_call = mk_reason (RMethodCall (Some name)) loc in + let use_op = + Op + (FunCallMethod + { + op = mk_expression_reason ex; + fn = mk_expression_reason callee; + prop = mk_reason (RProperty (Some name)) prop_loc; + args = mk_initial_arguments_reason arguments; + local = true; + }) + in + ( method_call cx reason_call ~use_op prop_loc (callee, ot, name) targts argts, + Member.PropertyPrivateName (prop_loc, (name_loc, id)) ) + | Member.PropertyIdentifier (prop_loc, ({ Ast.Identifier.name; comments = _ } as id)) -> + let reason_call = mk_reason (RMethodCall (Some name)) loc in + let use_op = + Op + (FunCallMethod + { + op = mk_expression_reason ex; + fn = mk_expression_reason callee; + prop = mk_reason (RProperty (Some name)) prop_loc; + args = mk_initial_arguments_reason arguments; + local = true; + }) + in + let ((prop_t, _) as x) = + method_call cx reason_call ~use_op prop_loc (callee, ot, name) targts argts + in + (x, Member.PropertyIdentifier ((prop_loc, prop_t), id)) + | Member.PropertyExpression expr -> + let reason_call = mk_reason (RMethodCall None) loc in + let reason_lookup = mk_reason (RProperty None) lookup_loc in + let (((_, elem_t), _) as expr) = expression cx expr in + ( ( bogus_trust () |> MixedT.at lookup_loc, + Tvar.mk_where cx reason_call (fun t -> + let frame = Env.peek_frame () in + let funtype = mk_methodcalltype ot targts argts t ~frame in + Flow.flow cx (ot, CallElemT (reason_call, reason_lookup, elem_t, funtype))) ), + Member.PropertyExpression expr ) + in + ( ex, + lhs_t, + acc, + ( (loc, lhs_t), + call_ast + { + Call.callee = ((lookup_loc, prop_t), Member { Member._object; property }); + targs; + arguments = argument_asts; + } ) ) + | Call { Call.callee = (super_loc, Super) as callee; targs; arguments } -> + let (targts, targs) = convert_targs cx targs in + let (argts, argument_asts) = + arguments |> Core_list.map ~f:(expression_or_spread cx) |> List.split + in let reason = mk_reason (RFunctionCall RSuper) loc in - (* switch back env entries for this and super from undefined *) define_internal cx reason "this"; define_internal cx reason "super"; let this = this_ cx loc in let super = super_ cx super_loc in - let id_info = "super", super, Type_table.Other in - Type_table.set_info super_loc id_info (Context.type_table cx); let super_reason = reason_of_t super in - let lhs_t = Tvar.mk_where cx reason (fun t -> - let funtype = mk_methodcalltype this targts argts t in - let propref = Named (super_reason, "constructor") in - let use_op = Op (FunCall { - op = mk_expression_reason ex; - fn = mk_expression_reason callee; - args = mk_initial_arguments_reason arguments; - }) in - Flow.flow cx (super, MethodT (use_op, reason, super_reason, propref, funtype, None)) - ) in - ex, lhs_t, acc, ( - (loc, lhs_t), - call_ast { Call. - callee = (super_loc, super), Super; - targs; - arguments = argument_asts; - } - ) - - (******************************************) - (* See ~/www/static_upstream/core/ *) - - | Call { - Call.callee = (_, Identifier (_, "invariant")) as callee; - targs; - arguments; - } -> + let lhs_t = + Tvar.mk_where cx reason (fun t -> + let funtype = mk_methodcalltype this targts argts t in + let propref = Named (super_reason, "constructor") in + let use_op = + Op + (FunCall + { + op = mk_expression_reason ex; + fn = mk_expression_reason callee; + args = mk_initial_arguments_reason arguments; + local = true; + }) + in + Flow.flow cx (super, MethodT (use_op, reason, super_reason, propref, funtype, None))) + in + ( ex, + lhs_t, + acc, + ( (loc, lhs_t), + call_ast + { Call.callee = ((super_loc, super), Super); targs; arguments = argument_asts } ) ) + (******************************************) + (* See ~/www/static_upstream/core/ *) + | Call { Call.callee; targs; arguments } when is_call_to_invariant callee -> (* TODO: require *) - let (_, callee_t), _ as callee = expression cx callee in - let targs = Option.map targs (fun (loc, args) -> - loc, List.map (Anno.convert cx SMap.empty) args - ) in + let (((_, callee_t), _) as callee) = expression cx callee in + let targs = + Option.map targs (fun (loc, args) -> + (loc, snd (convert_tparam_instantiations cx SMap.empty args))) + in (* NOTE: if an invariant expression throws abnormal control flow, the entire statement it was in is reconstructed in the typed AST as an expression statement containing just the invariant call. This should be ok for the most part since this is the most common way to call invariant. It's worth experimenting with whether people use invariant in other ways, and if not, restricting it to this pattern. *) - let arguments = match targs, arguments with - | None, [] -> + let arguments = + match (targs, arguments) with + | (None, []) -> (* invariant() is treated like a throw *) Env.reset_current_activation loc; Abnormal.save Abnormal.Throw; - Abnormal.throw_stmt_control_flow_exception - (loc, Ast.Statement.Expression { Ast.Statement.Expression. - expression = (loc, VoidT.at loc), Call { Call. - callee = callee; - targs; - arguments = []; - }; - directive = None; - }) + Abnormal.throw_expr_control_flow_exception + loc + ( (loc, VoidT.at loc |> with_trust bogus_trust), + Ast.Expression.Call { Call.callee; targs; arguments = [] } ) Abnormal.Throw - | None, (Expression (_, Ast.Expression.Literal { - Ast.Literal.value = Ast.Literal.Boolean false; _ - } as lit_exp))::arguments -> + | ( None, + Expression + ( (_, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.Boolean false; _ }) + as lit_exp ) + :: arguments ) -> (* invariant(false, ...) is treated like a throw *) let arguments = - List.map (Fn.compose snd (expression_or_spread cx)) arguments in + Core_list.map ~f:(Fn.compose snd (expression_or_spread cx)) arguments + in Env.reset_current_activation loc; Abnormal.save Abnormal.Throw; let lit_exp = expression cx lit_exp in - Abnormal.throw_stmt_control_flow_exception - (loc, Ast.Statement.Expression { Ast.Statement.Expression. - expression = (loc, VoidT.at loc), Call { Call. - callee = callee; - targs; - arguments = Expression lit_exp :: arguments; - }; - directive = None; - }) + Abnormal.throw_expr_control_flow_exception + loc + ( (loc, VoidT.at loc |> with_trust bogus_trust), + Ast.Expression.Call + { Call.callee; targs; arguments = Expression lit_exp :: arguments } ) Abnormal.Throw - | None, (Expression cond)::arguments -> - let arguments = List.map (Fn.compose snd (expression_or_spread cx)) arguments in - let ((_, cond_t), _ as cond), preds, _, xtypes = predicates_of_condition cx cond in + | (None, Expression cond :: arguments) -> + let arguments = + Core_list.map ~f:(Fn.compose snd (expression_or_spread cx)) arguments + in + let ((((_, cond_t), _) as cond), preds, _, xtypes) = predicates_of_condition cx cond in let _ = Env.refine_with_preds cx loc preds xtypes in let reason = mk_reason (RFunctionCall (desc_of_t callee_t)) loc in Flow.flow cx (cond_t, InvariantT reason); Expression cond :: arguments - | _, (Spread _)::_ -> - ignore (List.map (expression_or_spread cx) arguments); - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, InvariantSpreadArgument)); - Typed_ast.Expression.expression_or_spread_list_error - | Some _, _ -> - ignore (List.map (expression_or_spread cx) arguments); - Flow.add_output cx Flow_error.(ECallTypeArity { - call_loc = loc; - is_new = false; - reason_arity = Reason.(locationless_reason (RFunction RNormal)); - expected_arity = 0; - }); - Typed_ast.Expression.expression_or_spread_list_error + | (_, Spread _ :: _) -> + ignore (Core_list.map ~f:(expression_or_spread cx) arguments); + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, InvariantSpreadArgument)); + List.map Tast_utils.error_mapper#expression_or_spread arguments + | (Some _, _) -> + ignore (Core_list.map ~f:(expression_or_spread cx) arguments); + Flow.add_output + cx + Error_message.( + ECallTypeArity + { + call_loc = loc; + is_new = false; + reason_arity = Reason.(locationless_reason (RFunction RNormal)); + expected_arity = 0; + }); + List.map Tast_utils.error_mapper#expression_or_spread arguments in - let lhs_t = VoidT.at loc in - ex, lhs_t, acc, ((loc, lhs_t), call_ast { Call.callee; targs; arguments; }) - - | Call { Call.callee; targs; arguments } -> - begin match callee with - | _, OptionalMember _ -> - Flow.add_output cx Flow_error.(EOptionalChainingMethods loc) - | _ -> () + let lhs_t = VoidT.at loc |> with_trust bogus_trust in + (ex, lhs_t, acc, ((loc, lhs_t), call_ast { Call.callee; targs; arguments })) + | Call { Call.callee; targs; arguments } -> + begin + match callee with + | (_, OptionalMember _) -> + Flow.add_output cx Error_message.(EOptionalChainingMethods loc) + | _ -> () end; - let targts, targs = convert_targs cx targs in - let argts, argument_asts = arguments - |> List.map (expression_or_spread cx) - |> List.split in - let use_op = Op (FunCall { - op = mk_expression_reason ex; - fn = mk_expression_reason callee; - args = mk_initial_arguments_reason arguments; - }) in + let (targts, targs) = convert_targs cx targs in + let (argts, argument_asts) = + arguments |> Core_list.map ~f:(expression_or_spread cx) |> List.split + in + let use_op = + Op + (FunCall + { + op = mk_expression_reason ex; + fn = mk_expression_reason callee; + args = mk_initial_arguments_reason arguments; + local = true; + }) + in let exp callee = call_ast { Call.callee; targs; arguments = argument_asts } in - begin match opt_state with - | NonOptional -> - let (_, f), _ as callee = expression cx callee in - let reason = mk_reason (RFunctionCall (desc_of_t f)) loc in - let lhs_t = func_call cx reason ~use_op f targts argts in - ex, lhs_t, acc, ((loc, lhs_t), exp callee) - | NewChain -> - let (_, lhs_t), _ as calleee = expression cx callee in - let reason = mk_reason (RFunctionCall (desc_of_t lhs_t)) loc in - let tout = Tvar.mk cx reason in - let opt_use = func_call_opt_use reason ~use_op targts argts in - callee, lhs_t, ref (loc, opt_use, tout) :: acc, ((loc, tout), exp calleee) - | ContinueChain -> - (* Hacky reason handling *) - let reason = mk_reason ROptionalChain loc in - let tout = Tvar.mk cx reason in - let opt_use = func_call_opt_use reason ~use_op targts argts in - let step = ref (loc, opt_use, tout) in - let lhs, lhs_t, chain, ((_, f), _ as callee) = - build_chain ~is_cond cx callee (step :: acc) in - let reason = replace_reason_const (RFunctionCall (desc_of_t f)) reason in - let tout = mod_reason_of_t (Fn.const reason) tout in - let opt_use = mod_reason_of_opt_use_t (Fn.const reason) opt_use in - step := (loc, opt_use, tout); - lhs, lhs_t, chain, ((loc, tout), exp callee) + begin + match opt_state with + | NonOptional -> + let (((_, f), _) as callee) = expression cx callee in + let reason = mk_reason (RFunctionCall (desc_of_t f)) loc in + let lhs_t = func_call cx reason ~use_op f targts argts in + (ex, lhs_t, acc, ((loc, lhs_t), exp callee)) + | NewChain -> + let (((_, lhs_t), _) as calleee) = expression cx callee in + let reason = mk_reason (RFunctionCall (desc_of_t lhs_t)) loc in + let tout = Tvar.mk cx reason in + let opt_use = func_call_opt_use reason ~use_op targts argts in + (callee, lhs_t, ref (loc, opt_use, tout) :: acc, ((loc, tout), exp calleee)) + | ContinueChain -> + (* Hacky reason handling *) + let reason = mk_reason ROptionalChain loc in + let tout = Tvar.mk cx reason in + let opt_use = func_call_opt_use reason ~use_op targts argts in + let step = ref (loc, opt_use, tout) in + let (lhs, lhs_t, chain, (((_, f), _) as callee)) = + build_chain ~is_cond cx callee (step :: acc) + in + let reason = replace_desc_reason (RFunctionCall (desc_of_t f)) reason in + let tout = mod_reason_of_t (Fn.const reason) tout in + let opt_use = mod_reason_of_opt_use_t (Fn.const reason) opt_use in + step := (loc, opt_use, tout); + (lhs, lhs_t, chain, ((loc, tout), exp callee)) end - - | Member { - Member._object; - property = Member.PropertyExpression index; - computed; - } -> + | Member { Member._object; property = Member.PropertyExpression index } -> let reason = mk_reason (RProperty None) loc in - let (_, tind), _ as index = expression cx index in + let (((_, tind), _) as index) = expression cx index in let use_op = Op (GetProperty (mk_expression_reason ex)) in let opt_use = OptGetElemT (use_op, reason, tind) in - begin match opt_state with - | NonOptional -> - let (_, tobj), _ as _object_ast = expression cx _object in - let lhs_t = (match Refinement.get cx (loc, e) loc with - | Some t -> t - | None -> - Tvar.mk_where cx reason (fun t -> - let use = apply_opt_use opt_use t in - Flow.flow cx (tobj, use) - ) - ) in - ex, lhs_t, acc, ( - (loc, lhs_t), - member_ast { Member. - _object = _object_ast; - property = Member.PropertyExpression index; - computed; - } - ) - | NewChain -> - let tout = Tvar.mk cx reason in - let (_, lhs_t), _ as _object_ast = expression cx _object in - _object, lhs_t, ref (loc, opt_use, tout) :: acc, ( - (loc, tout), - member_ast { Member. - _object = _object_ast; - property = Member.PropertyExpression index; - computed; - } - ) - | ContinueChain -> - let tout = Tvar.mk cx reason in - let lhs, lhs_t, chain, _object_ast = - build_chain ~is_cond cx _object (ref (loc, opt_use, tout) :: acc) in - lhs, lhs_t, chain, ( - (loc, tout), - member_ast { Member. - _object = _object_ast; - property = Member.PropertyExpression index; - computed; - } - ) + begin + match opt_state with + | NonOptional -> + let (((_, tobj), _) as _object_ast) = expression cx _object in + let lhs_t = + match Refinement.get cx (loc, e) loc with + | Some t -> t + | None -> + Tvar.mk_where cx reason (fun t -> + let use = apply_opt_use opt_use t in + Flow.flow cx (tobj, use)) + in + ( ex, + lhs_t, + acc, + ( (loc, lhs_t), + member_ast + { Member._object = _object_ast; property = Member.PropertyExpression index } ) ) + | NewChain -> + let tout = Tvar.mk cx reason in + let (((_, lhs_t), _) as _object_ast) = expression cx _object in + ( _object, + lhs_t, + ref (loc, opt_use, tout) :: acc, + ( (loc, tout), + member_ast + { Member._object = _object_ast; property = Member.PropertyExpression index } ) ) + | ContinueChain -> + let tout = Tvar.mk cx reason in + let (lhs, lhs_t, chain, _object_ast) = + build_chain ~is_cond cx _object (ref (loc, opt_use, tout) :: acc) + in + ( lhs, + lhs_t, + chain, + ( (loc, tout), + member_ast + { Member._object = _object_ast; property = Member.PropertyExpression index } ) ) end - - | Member { - Member._object = object_loc, Identifier (id_loc, "module"); - property = Member.PropertyIdentifier (ploc, ("exports" as name)); - computed; - } -> let lhs_t = get_module_exports cx loc in - ex, lhs_t, acc, ( - (loc, lhs_t), - (* TODO(vijayramamurthy) like in assignment, revisit the type of `module` *) - let t = AnyObjT.at object_loc in - let property = Member.PropertyIdentifier ((ploc, t), name) in - member_ast { Member. - _object = (object_loc, t), Identifier ((id_loc, t), name); - property; - computed; - } - ) - - | Member { - Member._object = - object_loc, Identifier (id_loc, ("ReactGraphQL" | "ReactGraphQLLegacy")); - property = Member.PropertyIdentifier (ploc, ("Mixin" as name)); - computed; - } -> + | Member + { + Member._object = + ( object_loc, + Identifier (id_loc, ({ Ast.Identifier.name = "module"; comments = _ } as id_name)) + ); + property = + Member.PropertyIdentifier + (ploc, ({ Ast.Identifier.name = "exports"; comments = _ } as exports_name)); + } -> + let lhs_t = Import_export.get_module_exports cx loc in + let module_reason = mk_reason (RCustom "module") object_loc in + let module_t = MixedT.why module_reason |> with_trust bogus_trust in + let _object = + ((object_loc, module_t), Ast.Expression.Identifier ((id_loc, module_t), id_name)) + in + ( ex, + lhs_t, + acc, + ( (loc, lhs_t), + member_ast + { + Member._object; + property = Member.PropertyIdentifier ((ploc, lhs_t), exports_name); + } ) ) + | Member + { + Member._object = + ( object_loc, + Identifier + ( id_loc, + { Ast.Identifier.name = "ReactGraphQL" | "ReactGraphQLLegacy"; comments = _ } + ) ); + property = + Member.PropertyIdentifier + (ploc, ({ Ast.Identifier.name = "Mixin"; comments = _ } as name)); + } -> let reason = mk_reason (RCustom "ReactGraphQLMixin") loc in let lhs_t = Flow.get_builtin cx "ReactGraphQLMixin" reason in - ex, lhs_t, acc, ( - (loc, lhs_t), - (* TODO(vijayramamurthy) what's the type of "ReactGraphQL"? *) - let t = AnyObjT.at object_loc in - let property = Member.PropertyIdentifier ((ploc, t), name) in - member_ast { Member. - _object = (object_loc, t), Identifier ((id_loc, t), name); - property; - computed; - } - ) - - | Member { - Member._object = super_loc, Super; - property = Member.PropertyIdentifier (ploc, name); - computed; - } -> + ( ex, + lhs_t, + acc, + ( (loc, lhs_t), + (* TODO(vijayramamurthy) what's the type of "ReactGraphQL"? *) + let t = AnyT.at Untyped object_loc in + let property = Member.PropertyIdentifier ((ploc, t), name) in + member_ast + { Member._object = ((object_loc, t), Identifier ((id_loc, t), name)); property } ) ) + | Member + { + Member._object = (super_loc, Super); + property = + Member.PropertyIdentifier (ploc, ({ Ast.Identifier.name; comments = _ } as id)); + } -> let super = super_ cx super_loc in - let id_info = "super", super, Type_table.Other in - Type_table.set_info super_loc id_info (Context.type_table cx); - let expr_reason = mk_reason (RProperty (Some name)) loc in - let lhs_t = (match Refinement.get cx (loc, e) loc with - | Some t -> t - | None -> - let prop_reason = mk_reason (RProperty (Some name)) ploc in - if Type_inference_hooks_js.dispatch_member_hook cx name ploc super - then AnyT.at ploc - else Tvar.mk_where cx expr_reason (fun tvar -> - let use_op = Op (GetProperty (mk_expression_reason ex)) in - Flow.flow cx ( - super, GetPropT (use_op, expr_reason, Named (prop_reason, name), tvar) - ) - ) - ) - |> begin fun t -> - let id_info = name, t, Type_table.PropertyAccess super in - Type_table.set_info ploc id_info (Context.type_table cx); - t - end in - let property = Member.PropertyIdentifier ((ploc, super), name) in - ex, lhs_t, acc, ( - (loc, lhs_t), - member_ast { Member. - _object = (super_loc, super), Super; - property; - computed; - } - ) - - | Member { - Member._object; - property = Member.PropertyIdentifier (ploc, name); - computed; - } -> let expr_reason = mk_reason (RProperty (Some name)) loc in - let prop_reason = mk_reason (RProperty (Some name)) ploc in - let use_op = Op (GetProperty (mk_expression_reason ex)) in - begin match opt_state with - | NonOptional -> - let (_, tobj), _ as _object_ast = expression cx _object in - let lhs_t = if Type_inference_hooks_js.dispatch_member_hook cx name ploc tobj - then AnyT.at ploc - else begin match Refinement.get cx (loc, e) loc with + let lhs_t = + match Refinement.get cx (loc, e) loc with | Some t -> t | None -> - get_prop ~is_cond cx expr_reason ~use_op tobj (prop_reason, name) - end in - let property = Member.PropertyIdentifier ((ploc, lhs_t), name) in - ex, lhs_t, acc, tobj, ( - (loc, lhs_t), - member_ast { Member._object = _object_ast; property; computed; } - ) - | NewChain -> - let (_, lhs_t), _ as _object_ast = expression cx _object in - let tout = if Type_inference_hooks_js.dispatch_member_hook cx name ploc lhs_t - then AnyT.at ploc else Tvar.mk cx expr_reason in - let opt_use = get_prop_opt_use ~is_cond expr_reason ~use_op (prop_reason, name) in - let property = Member.PropertyIdentifier ((ploc, tout), name) in - _object, lhs_t, ref (loc, opt_use, tout) :: acc, lhs_t, ( - (loc, tout), - member_ast { Member._object = _object_ast; property; computed; } - ) - | ContinueChain -> - let tout = AnyT.at ploc in - let opt_use = get_prop_opt_use ~is_cond expr_reason ~use_op (prop_reason, name) in - let step = ref (loc, opt_use, tout) in - let lhs, lhs_t, chain, ((_, tobj), _ as _object_ast) = - build_chain ~is_cond cx _object (step :: acc) in - let tout = if (Type_inference_hooks_js.dispatch_member_hook cx name ploc tobj) - then tout - else let tout = Tvar.mk cx expr_reason in step := (loc, opt_use, tout); tout - in - let property = Member.PropertyIdentifier ((ploc, tout), name) in - lhs, lhs_t, chain, tobj, ( - (loc, tout), - member_ast { Member._object = _object_ast; property; computed; } - ) - end - |> begin fun (lhs, lhs_t, chain, tobj, ((_, tout), _ as ast)) -> - let id_info = name, tout, Type_table.PropertyAccess tobj in - Type_table.set_info ploc id_info (Context.type_table cx); - lhs, lhs_t, chain, ast + let prop_reason = mk_reason (RProperty (Some name)) ploc in + if Type_inference_hooks_js.dispatch_member_hook cx name ploc super then + Unsoundness.at InferenceHooks ploc + else + Tvar.mk_where cx expr_reason (fun tvar -> + let use_op = Op (GetProperty (mk_expression_reason ex)) in + Flow.flow + cx + (super, GetPropT (use_op, expr_reason, Named (prop_reason, name), tvar))) + in + let property = Member.PropertyIdentifier ((ploc, super), id) in + ( ex, + lhs_t, + acc, + ((loc, lhs_t), member_ast { Member._object = ((super_loc, super), Super); property }) ) + | Member + { + Member._object; + property = + Member.PropertyIdentifier (ploc, ({ Ast.Identifier.name; comments = _ } as id)); + } -> + let expr_reason = mk_expression_reason ex in + let prop_reason = mk_reason (RProperty (Some name)) ploc in + let use_op = Op (GetProperty expr_reason) in + begin + match opt_state with + | NonOptional -> + let (((_, tobj), _) as _object_ast) = expression cx _object in + let lhs_t = + if Type_inference_hooks_js.dispatch_member_hook cx name ploc tobj then + Unsoundness.at InferenceHooks ploc + else + match Refinement.get cx (loc, e) loc with + | Some t -> t + | None -> get_prop ~is_cond cx expr_reason ~use_op tobj (prop_reason, name) + in + let property = Member.PropertyIdentifier ((ploc, lhs_t), id) in + (ex, lhs_t, acc, ((loc, lhs_t), member_ast { Member._object = _object_ast; property })) + | NewChain -> + let (((_, lhs_t), _) as _object_ast) = expression cx _object in + let tout = + if Type_inference_hooks_js.dispatch_member_hook cx name ploc lhs_t then + Unsoundness.at InferenceHooks ploc + else + Tvar.mk cx expr_reason + in + let opt_use = get_prop_opt_use ~is_cond expr_reason ~use_op (prop_reason, name) in + let property = Member.PropertyIdentifier ((ploc, tout), id) in + ( _object, + lhs_t, + ref (loc, opt_use, tout) :: acc, + ((loc, tout), member_ast { Member._object = _object_ast; property }) ) + | ContinueChain -> + let tout = bogus_trust () |> MixedT.at ploc in + let opt_use = get_prop_opt_use ~is_cond expr_reason ~use_op (prop_reason, name) in + let step = ref (loc, opt_use, tout) in + let (lhs, lhs_t, chain, (((_, tobj), _) as _object_ast)) = + build_chain ~is_cond cx _object (step :: acc) + in + let tout = + if Type_inference_hooks_js.dispatch_member_hook cx name ploc tobj then + tout + else + let tout = Tvar.mk cx expr_reason in + step := (loc, opt_use, tout); + tout + in + let property = Member.PropertyIdentifier ((ploc, tout), id) in + ( lhs, + lhs_t, + chain, + ((loc, tout), member_ast { Member._object = _object_ast; property }) ) end - - | Member { - Member._object; - property = Member.PropertyPrivateName (ploc, (_, name)) as property; - computed; - } -> + | Member + { + Member._object; + property = + Member.PropertyPrivateName (ploc, (_, { Ast.Identifier.name; comments = _ })) as + property; + } -> let expr_reason = mk_reason (RPrivateProperty name) loc in let use_op = Op (GetProperty (mk_expression_reason ex)) in - begin match opt_state with - | NonOptional -> - let (_, tobj), _ as _object_ast = expression cx _object in - let lhs_t = ( - match Refinement.get cx (loc, e) loc with - | Some t -> t - | None -> - if Type_inference_hooks_js.dispatch_member_hook cx name ploc tobj - then AnyT.at ploc - else get_private_field cx expr_reason ~use_op tobj name - ) in - ex, lhs_t, acc, ( - (loc, lhs_t), - member_ast { Member._object = _object_ast; property; computed; } - ) - | NewChain -> - let (_, lhs_t), _ as _object_ast = expression cx _object in - let tout = if Type_inference_hooks_js.dispatch_member_hook cx name ploc lhs_t - then AnyT.at ploc else Tvar.mk cx expr_reason in - let opt_use = get_private_field_opt_use expr_reason ~use_op name in - _object, lhs_t, ref (loc, opt_use, tout) :: acc, ( - (loc, tout), - member_ast { Member._object = _object_ast; property; computed; } - ) - | ContinueChain -> - let tout = AnyT.at ploc in - let opt_use = get_private_field_opt_use expr_reason ~use_op name in - let step = ref (loc, opt_use, tout) in - let lhs, lhs_t, chain, ((_, tobj), _ as _object_ast) = - build_chain ~is_cond cx _object (step :: acc) in - let tout = if (Type_inference_hooks_js.dispatch_member_hook cx name ploc tobj) - then tout - else let tout = Tvar.mk cx expr_reason in step := (loc, opt_use, tout); tout - in - lhs, lhs_t, chain, ( - (loc, tout), - member_ast { Member._object = _object_ast; property; computed; } - ) - end - |> begin fun (lhs, lhs_t, chain, ((_, t), _ as ast)) -> - (* TODO use PropertyAccess *) - let id_info = name, t, Type_table.Other in - Type_table.set_info ploc id_info (Context.type_table cx); - lhs, lhs_t, chain, ast + begin + match opt_state with + | NonOptional -> + let (((_, tobj), _) as _object_ast) = expression cx _object in + let lhs_t = + match Refinement.get cx (loc, e) loc with + | Some t -> t + | None -> + if Type_inference_hooks_js.dispatch_member_hook cx name ploc tobj then + Unsoundness.at InferenceHooks ploc + else + get_private_field cx expr_reason ~use_op tobj name + in + (ex, lhs_t, acc, ((loc, lhs_t), member_ast { Member._object = _object_ast; property })) + | NewChain -> + let (((_, lhs_t), _) as _object_ast) = expression cx _object in + let tout = + if Type_inference_hooks_js.dispatch_member_hook cx name ploc lhs_t then + Unsoundness.at InferenceHooks ploc + else + Tvar.mk cx expr_reason + in + let opt_use = get_private_field_opt_use expr_reason ~use_op name in + ( _object, + lhs_t, + ref (loc, opt_use, tout) :: acc, + ((loc, tout), member_ast { Member._object = _object_ast; property }) ) + | ContinueChain -> + let tout = bogus_trust () |> MixedT.at ploc in + let opt_use = get_private_field_opt_use expr_reason ~use_op name in + let step = ref (loc, opt_use, tout) in + let (lhs, lhs_t, chain, (((_, tobj), _) as _object_ast)) = + build_chain ~is_cond cx _object (step :: acc) + in + let tout = + if Type_inference_hooks_js.dispatch_member_hook cx name ploc tobj then + tout + else + let tout = Tvar.mk cx expr_reason in + step := (loc, opt_use, tout); + tout + in + ( lhs, + lhs_t, + chain, + ((loc, tout), member_ast { Member._object = _object_ast; property }) ) end - | _ -> - let (_, lhs_t), _ as ast = expression cx ex in - ex, lhs_t, acc, ast + | _ -> + let (((_, lhs_t), _) as ast) = expression cx ex in + (ex, lhs_t, acc, ast) in - - fun ~is_cond cx ex -> - let lhs, lhs_t, chain, ast = build_chain ~is_cond cx ex [] in - begin match chain with - | [] -> () - | hd :: tl -> - let (hd_loc, _, _) = !hd in - let chain = Nel.map (fun step -> - let (loc, use, t) = !step in - Type_table.set (Context.type_table cx) loc t; - use, t - ) (hd, tl) in - let reason = mk_reason ROptionalChain hd_loc in - let lhs_reason = mk_expression_reason lhs in - Flow.flow cx (lhs_t, OptionalChainT (reason, lhs_reason, chain)); - end; - ast + fun ~is_cond cx ex -> + let (lhs, lhs_t, chain, ast) = build_chain ~is_cond cx ex [] in + begin + match chain with + | [] -> () + | hd :: tl -> + let (hd_loc, _, _) = !hd in + let chain = + Nel.map + (fun step -> + let (_, use, t) = !step in + (use, t)) + (hd, tl) + in + let reason = mk_reason ROptionalChain hd_loc in + let lhs_reason = mk_expression_reason lhs in + Flow.flow cx (lhs_t, OptionalChainT (reason, lhs_reason, chain)) + end; + ast) (* Handles function calls that appear in conditional contexts. The main distinction from the case handled in `expression_` is that we also return @@ -4050,10 +4250,8 @@ and subscript = potenially the keys that correspond to the supplied arguments. *) and predicated_call_expression cx loc call = - let f, argks, argts, t, call = - predicated_call_expression_ cx loc call in - Type_table.set (Context.type_table cx) loc t; - f, argks, argts, t, call + let (f, argks, argts, t, call) = predicated_call_expression_ cx loc call in + (f, argks, argts, t, call) (* Returns a quadruple containing: - the function type @@ -4062,27 +4260,38 @@ and predicated_call_expression cx loc call = - the returned type *) and predicated_call_expression_ cx loc { Ast.Expression.Call.callee; targs; arguments } = - let targts, targ_asts = convert_targs cx targs in - let args = arguments |> List.map (function - | Ast.Expression.Expression e -> e - | _ -> Utils_js.assert_false "No spreads should reach here" - ) in - let (_, f), _ as callee_ast = expression cx callee in + let (targts, targ_asts) = convert_targs cx targs in + let args = + arguments + |> Core_list.map ~f:(function + | Ast.Expression.Expression e -> e + | _ -> Utils_js.assert_false "No spreads should reach here") + in + let (((_, f), _) as callee_ast) = expression cx callee in let reason = mk_reason (RFunctionCall (desc_of_t f)) loc in - let arg_asts = List.map (expression cx) args in - let argts = List.map snd_fst arg_asts in - let argks = List.map Refinement.key args in - let use_op = Op (FunCall { - op = reason; - fn = mk_expression_reason callee; - args = mk_initial_arguments_reason arguments; - }) in - let t = func_call cx reason ~use_op f targts (List.map (fun e -> Arg e) argts) in - f, argks, argts, t, { Ast.Expression.Call. - callee = callee_ast; - targs = targ_asts; - arguments = List.map (fun e -> Ast.Expression.Expression e) arg_asts; - } + let arg_asts = Core_list.map ~f:(expression cx) args in + let argts = Core_list.map ~f:snd_fst arg_asts in + let argks = Core_list.map ~f:Refinement.key args in + let use_op = + Op + (FunCall + { + op = reason; + fn = mk_expression_reason callee; + args = mk_initial_arguments_reason arguments; + local = true; + }) + in + let t = func_call cx reason ~use_op f targts (Core_list.map ~f:(fun e -> Arg e) argts) in + ( f, + argks, + argts, + t, + { + Ast.Expression.Call.callee = callee_ast; + targs = targ_asts; + arguments = Core_list.map ~f:(fun e -> Ast.Expression.Expression e) arg_asts; + } ) (* We assume that constructor functions return void and constructions return objects. @@ -4092,65 +4301,54 @@ and predicated_call_expression_ cx loc { Ast.Expression.Call.callee; targs; argu *) and new_call cx reason ~use_op class_ targs args = Tvar.mk_where cx reason (fun t -> - Flow.flow cx (class_, ConstructorT (use_op, reason, targs, args, t)); - ) + Flow.flow cx (class_, ConstructorT (use_op, reason, targs, args, t))) -and func_call_opt_use reason ~use_op ?(call_strict_arity=true) targts argts = +and func_call_opt_use reason ~use_op ?(call_strict_arity = true) targts argts = Env.havoc_heap_refinements (); let frame = Env.peek_frame () in let opt_app = mk_opt_functioncalltype reason targts argts frame call_strict_arity in OptCallT (use_op, reason, opt_app) -and func_call cx reason ~use_op ?(call_strict_arity=true) func_t targts argts = +and func_call cx reason ~use_op ?(call_strict_arity = true) func_t targts argts = let opt_use = func_call_opt_use reason ~use_op ~call_strict_arity targts argts in - Tvar.mk_where cx reason (fun t -> - Flow.flow cx (func_t, apply_opt_use opt_use t) - ) + Tvar.mk_where cx reason (fun t -> Flow.flow cx (func_t, apply_opt_use opt_use t)) (* returns (type of method itself, type returned from method) *) -and method_call cx reason ~use_op ?(call_strict_arity=true) prop_loc - (expr, obj_t, name) targts argts = +and method_call + cx reason ~use_op ?(call_strict_arity = true) prop_loc (expr, obj_t, name) targts argts = Type_inference_hooks_js.dispatch_call_hook cx name prop_loc obj_t; - (match Refinement.get cx expr (aloc_of_reason reason |> ALoc.to_loc) with + let (expr_loc, _) = expr in + match Refinement.get cx expr (aloc_of_reason reason) with | Some f -> - (* note: the current state of affairs is that we understand + (* note: the current state of affairs is that we understand member expressions as having refined types, rather than understanding receiver objects as carrying refined properties. generalizing this properly is a todo, and will deliver goodness. meanwhile, here we must hijack the property selection normally performed by the flow algorithm itself. *) - Env.havoc_heap_refinements (); - let id_info = name, f, Type_table.PropertyAccess obj_t in - Type_table.set_info prop_loc id_info (Context.type_table cx); - f, + Env.havoc_heap_refinements (); + ( f, Tvar.mk_where cx reason (fun t -> - let frame = Env.peek_frame () in - let app = - mk_methodcalltype obj_t targts argts t ~frame ~call_strict_arity in - Flow.flow cx (f, CallT (use_op, reason, app)); - ) + let frame = Env.peek_frame () in + let app = mk_methodcalltype obj_t targts argts t ~frame ~call_strict_arity in + Flow.flow cx (f, CallT (use_op, reason, app))) ) | None -> - Env.havoc_heap_refinements (); - let reason_prop = mk_reason (RProperty (Some name)) prop_loc in - let prop_t = Tvar.mk cx reason_prop in - prop_t, + Env.havoc_heap_refinements (); + let reason_prop = mk_reason (RProperty (Some name)) prop_loc in + let prop_t = Tvar.mk cx reason_prop in + ( prop_t, Tvar.mk_where cx reason (fun t -> - let frame = Env.peek_frame () in - let expr_loc, _ = expr in - let reason_expr = mk_reason (RProperty (Some name)) expr_loc in - let app = - mk_methodcalltype obj_t targts argts t ~frame ~call_strict_arity in - let propref = Named (reason_prop, name) in - let id_info = name, prop_t, Type_table.PropertyAccess obj_t in - Type_table.set_info prop_loc id_info (Context.type_table cx); - Flow.flow cx (obj_t, MethodT (use_op, reason, reason_expr, propref, app, Some prop_t)) - ) - ) + let frame = Env.peek_frame () in + let reason_expr = mk_reason (RProperty (Some name)) expr_loc in + let app = mk_methodcalltype obj_t targts argts t ~frame ~call_strict_arity in + let propref = Named (reason_prop, name) in + Flow.flow cx (obj_t, MethodT (use_op, reason, reason_expr, propref, app, Some prop_t))) + ) and identifier_ cx name loc = - if Type_inference_hooks_js.dispatch_id_hook cx name loc - then AnyT.at loc - else ( + if Type_inference_hooks_js.dispatch_id_hook cx name loc then + Unsoundness.at InferenceHooks loc + else let t = Env.var_ref ~lookup_mode:ForValue cx name loc in (* We want to make sure that the reason description for the type we return * is always `RIdentifier name`. *) @@ -4159,956 +4357,962 @@ and identifier_ cx name loc = | _ -> (match t with (* If this is an `OpenT` we can change its reason description directly. *) - | OpenT _ -> mod_reason_of_t (replace_reason_const (RIdentifier name)) t + | OpenT _ -> mod_reason_of_t (replace_desc_new_reason (RIdentifier name)) t (* If this is not an `OpenT` then create a new type variable with our * desired reason and unify it with our type. This adds a level of * indirection so that we don't modify the underlying reason of our type. *) | _ -> let reason = mk_reason (RIdentifier name) loc in - Tvar.mk_where cx reason (Flow.unify cx t) - ) - ) + Tvar.mk_where cx reason (Flow.unify cx t)) -and identifier cx name loc = +and identifier cx { Ast.Identifier.name; comments = _ } loc = let t = identifier_ cx name loc in - let id_info = name, t, Type_table.Other in - Type_table.set_info loc id_info (Context.type_table cx); t (* traverse a literal expression, return result type *) -and literal cx loc lit = Ast.Literal.(match lit.Ast.Literal.value with - | String s -> - (* It's too expensive to track literal information for large strings.*) - let max_literal_length = Context.max_literal_length cx in - let lit = - if max_literal_length = 0 || String.length s < max_literal_length - then Literal (None, s) - else AnyLiteral - in - DefT (annot_reason (mk_reason RString loc), StrT lit) - - | Boolean b -> - DefT (annot_reason (mk_reason RBoolean loc), BoolT (Some b)) - - | Null -> - NullT.at loc - - | Number f -> - DefT (annot_reason (mk_reason RNumber loc), NumT (Literal (None, (f, lit.raw)))) - - | RegExp _ -> - Flow.get_builtin_type cx (annot_reason (mk_reason RRegExp loc)) "RegExp" -) +and literal cx loc lit = + let make_trust = Context.trust_constructor cx in + Ast.Literal.( + match lit.Ast.Literal.value with + | String s -> + begin + match Context.haste_module_ref_prefix cx with + | Some prefix when String_utils.string_starts_with s prefix -> + let m = String_utils.lstrip s prefix in + let t = Import_export.require cx (loc, m) loc in + let reason = mk_reason (RCustom "module reference") loc in + Flow.get_builtin_typeapp cx reason "$Flow$ModuleRef" [t] + | _ -> + (* It's too expensive to track literal information for large strings.*) + let max_literal_length = Context.max_literal_length cx in + let (lit, r_desc) = + if max_literal_length = 0 || String.length s < max_literal_length then + (Literal (None, s), RString) + else + (AnyLiteral, RLongStringLit max_literal_length) + in + DefT (annot_reason (mk_reason r_desc loc), make_trust (), StrT lit) + end + | Boolean b -> DefT (annot_reason (mk_reason RBoolean loc), make_trust (), BoolT (Some b)) + | Null -> NullT.at loc |> with_trust make_trust + | Number f -> + DefT + (annot_reason (mk_reason RNumber loc), make_trust (), NumT (Literal (None, (f, lit.raw)))) + | BigInt _ -> + let reason = annot_reason (mk_reason (RBigIntLit lit.raw) loc) in + Flow.add_output cx (Error_message.EBigIntNotYetSupported reason); + AnyT.why AnyError reason + | RegExp _ -> Flow.get_builtin_type cx (annot_reason (mk_reason RRegExp loc)) "RegExp") (* traverse a unary expression, return result type *) -and unary cx loc = Ast.Expression.Unary.(function - | { operator = Not; argument; prefix } -> - let (_, arg), _ as argument = expression cx argument in +and unary cx loc = + Ast.Expression.Unary.( + function + | { operator = Not; argument; comments } -> + let (((_, arg), _) as argument) = expression cx argument in let reason = mk_reason (RUnaryOperator ("not", desc_of_t arg)) loc in - Tvar.mk_where cx reason (fun t -> Flow.flow cx (arg, NotT (reason, t))), - { operator = Not; argument; prefix; } - - | { operator = Plus; argument; prefix } -> + ( Tvar.mk_where cx reason (fun t -> Flow.flow cx (arg, NotT (reason, t))), + { operator = Not; argument; comments } ) + | { operator = Plus; argument; comments } -> let argument = expression cx argument in - NumT.at loc, { operator = Plus; argument; prefix; } - - | { operator = Minus; argument; prefix } -> - let (_, argt), _ as argument = expression cx argument in - begin match argt with - | DefT (reason, NumT (Literal (sense, (value, raw)))) -> - (* special case for negative number literals, to avoid creating an unnecessary tvar. not + (NumT.at loc |> with_trust literal_trust, { operator = Plus; argument; comments }) + | { operator = Minus; argument; comments } -> + let (((_, argt), _) as argument) = expression cx argument in + ( begin + match argt with + | DefT (reason, trust, NumT (Literal (sense, (value, raw)))) -> + (* special case for negative number literals, to avoid creating an unnecessary tvar. not having a tvar allows other special cases that match concrete lower bounds to proceed (notably, Object.freeze upgrades literal props to singleton types, and a tvar would make a negative number not look like a literal.) *) - let reason = repos_reason loc ~annot_loc:loc reason in - let (value, raw) = Ast_utils.negate_number_literal (value, raw) in - DefT (reason, NumT (Literal (sense, (value, raw)))) - | arg -> - let reason = mk_reason (desc_of_t arg) loc in - Tvar.mk_derivable_where cx reason (fun t -> - Flow.flow cx (arg, UnaryMinusT (reason, t)); - ) - end, - { operator = Minus; argument; prefix; } - - | { operator = BitNot; argument; prefix } -> - let t = NumT.at loc in - let (_, argt), _ as argument = expression cx argument in + let reason = repos_reason loc ~annot_loc:loc reason in + let (value, raw) = Flow_ast_utils.negate_number_literal (value, raw) in + DefT (reason, trust, NumT (Literal (sense, (value, raw)))) + | arg -> + let reason = mk_reason (desc_of_t arg) loc in + Tvar.mk_derivable_where cx reason (fun t -> + Flow.flow cx (arg, UnaryMinusT (reason, t))) + end, + { operator = Minus; argument; comments } ) + | { operator = BitNot; argument; comments } -> + let t = NumT.at loc |> with_trust literal_trust in + let (((_, argt), _) as argument) = expression cx argument in Flow.flow_t cx (argt, t); - t, { operator = BitNot; argument; prefix; } - - | { operator = Typeof; argument; prefix } -> + (t, { operator = BitNot; argument; comments }) + | { operator = Typeof; argument; comments } -> let argument = expression cx argument in - StrT.at loc, { operator = Typeof; argument = argument; prefix } - - | { operator = Void; argument; prefix } -> + (StrT.at loc |> with_trust literal_trust, { operator = Typeof; argument; comments }) + | { operator = Void; argument; comments } -> let argument = expression cx argument in - VoidT.at loc, { operator = Void; argument; prefix } - - | { operator = Delete; argument; prefix } -> - let argument = expression cx argument in - BoolT.at loc, { operator = Delete; argument; prefix } - - | { operator = Await; argument; prefix } -> - (** TODO: await should look up Promise in the environment instead of going + (VoidT.at loc |> with_trust literal_trust, { operator = Void; argument; comments }) + | { operator = Ast.Expression.Unary.Delete; argument; comments } -> + let argument = delete cx loc argument in + ( BoolT.at loc |> with_trust literal_trust, + { operator = Ast.Expression.Unary.Delete; argument; comments } ) + | { operator = Await; argument; comments } -> + (* TODO: await should look up Promise in the environment instead of going directly to the core definition. Otherwise, the following won't work with a polyfilled Promise! **) - (* see declaration of $await in core.js: + (* see declaration of $await in core.js: if argument is a Promise, then (await argument) returns T. otherwise it just returns the argument type. TODO update this comment when recursive unwrapping of Promise is done. *) - let reason = mk_reason (RCustom "await") loc in - let await = Flow.get_builtin cx "$await" reason in - let (_, arg), _ as argument_ast = expression cx argument in - let use_op = Op (FunCall { - op = reason; - fn = reason_of_t await; - args = [mk_expression_reason argument]; - }) in - func_call cx reason ~use_op await None [Arg arg], - { operator = Await; argument = argument_ast; prefix } -) + let reason = mk_reason (RCustom "await") loc in + let await = Flow.get_builtin cx "$await" reason in + let (((_, arg), _) as argument_ast) = expression cx argument in + let use_op = + Op + (FunCall + { + op = reason; + fn = reason_of_t await; + args = [mk_expression_reason argument]; + local = true; + }) + in + ( func_call cx reason ~use_op await None [Arg arg], + { operator = Await; argument = argument_ast; comments } )) (* numeric pre/post inc/dec *) -and update cx loc expr = Ast.Expression.Update.( - let reason = mk_reason (RCustom "update") loc in - let result_t = NumT.at loc in - result_t, - (match expr.argument with - | arg_loc, Ast.Expression.Identifier (id_loc, name) -> - Flow.flow cx (identifier cx name id_loc, AssertArithmeticOperandT reason); - (* enforce state-based guards for binding update, e.g., const *) - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name) id_loc); - init = reason_of_t result_t; - }) in - ignore (Env.set_var cx ~use_op name result_t id_loc); - let t = NumT.at arg_loc in - { expr with - argument = (arg_loc, t), Ast.Expression.Identifier ((id_loc, t), name) } - | argument -> - let (_, arg_t), _ as arg_ast = expression cx argument in - Flow.flow cx (arg_t, AssertArithmeticOperandT reason); - { expr with argument = arg_ast } - ) -) +and update cx loc expr = + Ast.Expression.Update.( + let reason = mk_reason (RCustom "update") loc in + let result_t = NumT.at loc |> with_trust literal_trust in + ( result_t, + match expr.argument with + | ( arg_loc, + Ast.Expression.Identifier (id_loc, ({ Ast.Identifier.name; comments = _ } as id_name)) ) + -> + Flow.flow cx (identifier cx id_name id_loc, AssertArithmeticOperandT reason); + + (* enforce state-based guards for binding update, e.g., const *) + let use_op = + Op + (AssignVar + { var = Some (mk_reason (RIdentifier name) id_loc); init = reason_of_t result_t }) + in + ignore (Env.set_var cx ~use_op name result_t id_loc); + let t = NumT.at arg_loc |> with_trust bogus_trust in + { expr with argument = ((arg_loc, t), Ast.Expression.Identifier ((id_loc, t), id_name)) } + | argument -> + let (((_, arg_t), _) as arg_ast) = expression cx argument in + Flow.flow cx (arg_t, AssertArithmeticOperandT reason); + { expr with argument = arg_ast } )) (* traverse a binary expression, return result type *) and binary cx loc { Ast.Expression.Binary.operator; left; right } = - let open Ast.Expression.Binary in - match operator with - | Equal - | NotEqual -> - let (_, t1), _ as left = expression cx left in - let (_, t2), _ as right = expression cx right in - let desc = RBinaryOperator ( - (match operator with - | Equal -> "==" - | NotEqual -> "!=" - | _ -> failwith "unreachable"), - desc_of_reason (reason_of_t t1), - desc_of_reason (reason_of_t t2) - ) in + Ast.Expression.Binary.( + match operator with + | Equal + | NotEqual -> + let (((_, t1), _) as left) = expression cx left in + let (((_, t2), _) as right) = expression cx right in + let desc = + RBinaryOperator + ( (match operator with + | Equal -> "==" + | NotEqual -> "!=" + | _ -> failwith "unreachable"), + desc_of_reason (reason_of_t t1), + desc_of_reason (reason_of_t t2) ) + in let reason = mk_reason desc loc in Flow.flow cx (t1, EqT (reason, false, t2)); - BoolT.at loc, { operator; left; right; } - - | In -> + (BoolT.at loc |> with_trust literal_trust, { operator; left; right }) + | In -> let (loc1, _) = left in let (loc2, _) = right in - let (_, t1), _ as left = expression cx left in - let (_, t2), _ as right = expression cx right in + let (((_, t1), _) as left) = expression cx left in + let (((_, t2), _) as right) = expression cx right in let reason_lhs = mk_reason (RCustom "LHS of `in` operator") loc1 in let reason_rhs = mk_reason (RCustom "RHS of `in` operator") loc2 in Flow.flow cx (t1, AssertBinaryInLHST reason_lhs); Flow.flow cx (t2, AssertBinaryInRHST reason_rhs); - BoolT.at loc, { operator; left; right; } - - | StrictEqual - | StrictNotEqual - | Instanceof -> + (BoolT.at loc |> with_trust literal_trust, { operator; left; right }) + | StrictEqual + | StrictNotEqual + | Instanceof -> let left = expression cx left in let right = expression cx right in - BoolT.at loc, { operator; left; right; } - - | LessThan - | LessThanEqual - | GreaterThan - | GreaterThanEqual -> - let (_, t1), _ as left = expression cx left in - let (_, t2), _ as right = expression cx right in - let desc = RBinaryOperator ( - (match operator with - | LessThan -> "<" - | LessThanEqual -> "<=" - | GreaterThan -> ">" - | GreaterThanEqual -> ">=" - | _ -> failwith "unreachable"), - desc_of_reason (reason_of_t t1), - desc_of_reason (reason_of_t t2) - ) in + (BoolT.at loc |> with_trust literal_trust, { operator; left; right }) + | LessThan + | LessThanEqual + | GreaterThan + | GreaterThanEqual -> + let (((_, t1), _) as left) = expression cx left in + let (((_, t2), _) as right) = expression cx right in + let desc = + RBinaryOperator + ( (match operator with + | LessThan -> "<" + | LessThanEqual -> "<=" + | GreaterThan -> ">" + | GreaterThanEqual -> ">=" + | _ -> failwith "unreachable"), + desc_of_reason (reason_of_t t1), + desc_of_reason (reason_of_t t2) ) + in let reason = mk_reason desc loc in Flow.flow cx (t1, ComparatorT (reason, false, t2)); - BoolT.at loc, { operator; left; right; } - - | LShift - | RShift - | RShift3 - | Minus - | Mult - | Exp - | Div - | Mod - | BitOr - | Xor - | BitAnd -> + (BoolT.at loc |> with_trust literal_trust, { operator; left; right }) + | LShift + | RShift + | RShift3 + | Minus + | Mult + | Exp + | Div + | Mod + | BitOr + | Xor + | BitAnd -> let reason = mk_reason (RCustom "arithmetic operation") loc in - let (_, t1), _ as left = expression cx left in - let (_, t2), _ as right = expression cx right in + let (((_, t1), _) as left) = expression cx left in + let (((_, t2), _) as right) = expression cx right in Flow.flow cx (t1, AssertArithmeticOperandT reason); Flow.flow cx (t2, AssertArithmeticOperandT reason); - NumT.at loc, { operator; left; right; } - - | Plus -> - let (_, t1), _ as left_ast = expression cx left in - let (_, t2), _ as right_ast = expression cx right in - let desc = RBinaryOperator ( - "+", - desc_of_reason (reason_of_t t1), - desc_of_reason (reason_of_t t2) - ) in + (NumT.at loc |> with_trust literal_trust, { operator; left; right }) + | Plus -> + let (((_, t1), _) as left_ast) = expression cx left in + let (((_, t2), _) as right_ast) = expression cx right in + let desc = + RBinaryOperator ("+", desc_of_reason (reason_of_t t1), desc_of_reason (reason_of_t t2)) + in let reason = mk_reason desc loc in - Tvar.mk_where cx reason (fun t -> - let use_op = Op (Addition { - op = reason; - left = mk_expression_reason left; - right = mk_expression_reason right; - }) in - Flow.flow cx (t1, AdderT (use_op, reason, false, t2, t)); - ), - { operator; left = left_ast; right = right_ast } + ( Tvar.mk_where cx reason (fun t -> + let use_op = + Op + (Addition + { + op = reason; + left = mk_expression_reason left; + right = mk_expression_reason right; + }) + in + Flow.flow cx (t1, AdderT (use_op, reason, false, t2, t))), + { operator; left = left_ast; right = right_ast } )) and logical cx loc { Ast.Expression.Logical.operator; left; right } = - let open Ast.Expression.Logical in - match operator with - | Or -> + Ast.Expression.Logical.( + match operator with + | Or -> let () = check_default_pattern cx left right in - let ((_, t1), _ as left), _, not_map, xtypes = predicates_of_condition cx left in - let (_, t2), _ as right = Env.in_refined_env cx loc not_map xtypes - (fun () -> expression cx right) + let ((((_, t1), _) as left), _, not_map, xtypes) = predicates_of_condition cx left in + let (((_, t2), _) as right) = + Env.in_refined_env cx loc not_map xtypes (fun () -> expression cx right) in let reason = mk_reason (RLogical ("||", desc_of_t t1, desc_of_t t2)) loc in - Tvar.mk_where cx reason (fun t -> - Flow.flow cx (t1, OrT (reason, t2, t)); - ), - { operator = Or; left; right; } - - | And -> - let ((_, t1), _ as left), map, _, xtypes = predicates_of_condition cx left in - let (_, t2), _ as right = Env.in_refined_env cx loc map xtypes - (fun () -> expression cx right) + ( Tvar.mk_where cx reason (fun t -> Flow.flow cx (t1, OrT (reason, t2, t))), + { operator = Or; left; right } ) + | And -> + let ((((_, t1), _) as left), map, _, xtypes) = predicates_of_condition cx left in + let (((_, t2), _) as right) = + Env.in_refined_env cx loc map xtypes (fun () -> expression cx right) in let reason = mk_reason (RLogical ("&&", desc_of_t t1, desc_of_t t2)) loc in - Tvar.mk_where cx reason (fun t -> - Flow.flow cx (t1, AndT (reason, t2, t)); - ), - { operator = And; left; right; } - | NullishCoalesce -> - let (_, t1), _ as left = expression cx left in - let (_, t2), _ as right = expression cx right in + ( Tvar.mk_where cx reason (fun t -> Flow.flow cx (t1, AndT (reason, t2, t))), + { operator = And; left; right } ) + | NullishCoalesce -> + let (((_, t1), _) as left) = expression cx left in + let (((_, t2), _) as right) = expression cx right in let reason = mk_reason (RLogical ("??", desc_of_t t1, desc_of_t t2)) loc in - Tvar.mk_where cx reason (fun t -> - Flow.flow cx (t1, NullishCoalesceT (reason, t2, t)); - ), - { operator = NullishCoalesce; left; right; } - -and assignment_lhs cx = Ast.Pattern.(function - | loc, Object _ - | loc, Array _ -> - Flow.add_output cx (Flow_error.EInvalidLHSInAssignment loc); - ((loc, AnyT.at loc), Typed_ast.Pattern.error) - - | pat_loc, Identifier { Identifier.name = (loc, name); optional; annot; } -> - let t = identifier cx name loc in - ((pat_loc, t), Identifier { Identifier. - name = (loc, t), name; - annot = Option.map - ~f:(fun (loc, _) -> loc, (Typed_ast.error_annot, Typed_ast.Type.error)) - annot; - optional; - }) - - | loc, Expression ((_, Ast.Expression.Member _) as m) -> - let (_, t), _ as m = expression cx m in - ((loc, t), Expression m) - - (* parser will error before we get here *) - | _ -> assert false -) - -(* traverse assignment expressions *) -and assignment cx loc = Ast.Expression.(function - (* r = e *) - | (r, Assignment.Assign, e) -> - - let (_, t), _ as rhs = expression cx e in - - (* update env, add constraints arising from LHS structure, - handle special cases, etc. *) - let lhs = match r with - - (* module.exports = e *) - | lhs_loc, Ast.Pattern.Expression (pat_loc, Member { - Member._object = object_loc, Ast.Expression.Identifier (id_loc, ("module" as mod_name)); - property = Member.PropertyIdentifier (ploc, ("exports" as name)); - computed - }) -> - set_module_kind cx lhs_loc (Context.CommonJSModule(Some(lhs_loc))); - set_module_exports cx lhs_loc t; - (* TODO: we should revisit what the type of "module" is once we make - the treatment of module.exports accurate (this isn't sensitive to - shadowing of the "module" variable, etc.) *) - let t = AnyObjT.at object_loc in - let property = Member.PropertyIdentifier ((ploc, t), name) in - (lhs_loc, t), Ast.Pattern.Expression ((pat_loc, t), Member { Member. - _object = (object_loc, t), Ast.Expression.Identifier ((id_loc, t), mod_name); - property; - computed; - }) - - (* super.name = e *) - | lhs_loc, Ast.Pattern.Expression ((pat_loc, Member { - Member._object = super_loc, Super; - property = Member.PropertyIdentifier (prop_loc, name); - computed - }) as rx) -> - let reason = - mk_reason (RPropertyAssignment (Some name)) lhs_loc in - let prop_reason = mk_reason (RProperty (Some name)) prop_loc in - let super = super_ cx lhs_loc in - let id_info = "super", super, Type_table.Other in - Type_table.set_info super_loc id_info (Context.type_table cx); - let prop_t = Tvar.mk cx prop_reason in - let id_info = name, prop_t, Type_table.PropertyAccess super in - Type_table.set_info prop_loc id_info (Context.type_table cx); - let use_op = Op (SetProperty { - lhs = reason; - prop = mk_reason (desc_of_reason (mk_expression_reason rx)) prop_loc; - value = mk_expression_reason e; - }) in - Flow.flow cx (super, SetPropT ( - use_op, reason, Named (prop_reason, name), Normal, t, Some prop_t - )); - let property = Member.PropertyIdentifier ((prop_loc, prop_t), name) in - (lhs_loc, prop_t), Ast.Pattern.Expression ((pat_loc, prop_t), Member { Member. - _object = (super_loc, super), Super; - property; - computed; - }) - - (* _object.#name = e *) - | lhs_loc, Ast.Pattern.Expression ((pat_loc, Member { - Member._object; - property = Member.PropertyPrivateName (prop_loc, (_, name)) as property; - computed; - }) as expr) -> - let (_, o), _ as _object = expression cx _object in - let prop_t = - (* if we fire this hook, it means the assignment is a sham. *) - if Type_inference_hooks_js.dispatch_member_hook cx name prop_loc o - then AnyT.at prop_loc - else - let reason = mk_reason (RPropertyAssignment (Some name)) lhs_loc in - - (* flow type to object property itself *) - let class_entries = Env.get_class_entries () in - let prop_reason = mk_reason (RPrivateProperty name) prop_loc in - let prop_t = Tvar.mk cx prop_reason in - let id_info = name, prop_t, Type_table.PropertyAccess o in - Type_table.set_info prop_loc id_info (Context.type_table cx); - let use_op = Op (SetProperty { - lhs = reason; - prop = mk_reason (desc_of_reason (mk_expression_reason expr)) prop_loc; - value = mk_expression_reason e; - }) in - Flow.flow cx (o, SetPrivatePropT ( - use_op, reason, name, class_entries, false, t, Some prop_t - )); - post_assignment_havoc ~private_:true name expr lhs_loc t; - prop_t - in - (lhs_loc, prop_t), Ast.Pattern.Expression ((pat_loc, prop_t), Member { Member. - _object; - property; - computed; - }) - - (* _object.name = e *) - | lhs_loc, Ast.Pattern.Expression ((pat_loc, Member { - Member._object; - property = Member.PropertyIdentifier (prop_loc, name); - computed; - }) as expr) -> - let wr_ctx = match _object, Env.var_scope_kind () with - | (_, This), Scope.Ctor -> ThisInCtor - | _ -> Normal - in - let (_, o), _ as _object = expression cx _object in - let prop_t = - (* if we fire this hook, it means the assignment is a sham. *) - if Type_inference_hooks_js.dispatch_member_hook cx name prop_loc o - then AnyT.at prop_loc - else - let reason = mk_reason (RPropertyAssignment (Some name)) lhs_loc in - let prop_reason = mk_reason (RProperty (Some name)) prop_loc in - - (* flow type to object property itself *) - let prop_t = Tvar.mk cx prop_reason in - let id_info = name, prop_t, Type_table.PropertyAccess o in - Type_table.set_info prop_loc id_info (Context.type_table cx); - let use_op = Op (SetProperty { - lhs = reason; - prop = mk_reason (desc_of_reason (mk_expression_reason expr)) prop_loc; - value = mk_expression_reason e; - }) in - Flow.flow cx (o, SetPropT ( - use_op, reason, Named (prop_reason, name), wr_ctx, t, Some prop_t - )); - post_assignment_havoc ~private_:false name expr lhs_loc t; - prop_t - in - let property = Member.PropertyIdentifier ((prop_loc, prop_t), name) in - (lhs_loc, prop_t), Ast.Pattern.Expression ((pat_loc, prop_t), Member { Member. - _object; - property; - computed; - }) - - (* _object[index] = e *) - | lhs_loc, Ast.Pattern.Expression ((pat_loc, Member { - Member._object; - property = Member.PropertyExpression ((iloc, _) as index); - computed; - }) as rx) -> - let reason = mk_reason (RPropertyAssignment None) lhs_loc in - let (_, a), _ as _object = expression cx _object in - let (_, i), _ as index = expression cx index in - let use_op = Op (SetProperty { - lhs = reason; - prop = mk_reason (desc_of_reason (mk_expression_reason rx)) iloc; - value = mk_expression_reason e; - }) in - Flow.flow cx (a, SetElemT (use_op, reason, i, t, None)); - - (* types involved in the assignment itself are computed - in pre-havoc environment. it's the assignment itself - which clears refis *) - Env.havoc_heap_refinements (); - (lhs_loc, t), Ast.Pattern.Expression ((pat_loc, t), Member { Member. - _object; - property = Member.PropertyExpression index; - computed; - }) - - (* other r structures are handled as destructuring assignments *) - | _ -> - destructuring_assignment cx ~expr:expression t e r - - - + ( Tvar.mk_where cx reason (fun t -> Flow.flow cx (t1, NullishCoalesceT (reason, t2, t))), + { operator = NullishCoalesce; left; right } )) +and assignment_lhs cx patt = + match patt with + | (pat_loc, Ast.Pattern.Identifier { Ast.Pattern.Identifier.name = (loc, name); optional; annot }) + -> + let t = identifier cx name loc in + ( (pat_loc, t), + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = ((loc, t), name); + annot = + (match annot with + | Ast.Type.Available annot -> + Ast.Type.Available (Tast_utils.error_mapper#type_annotation annot) + | Ast.Type.Missing hint -> Ast.Type.Missing (hint, AnyT.locationless Untyped)); + optional; + } ) + | (loc, Ast.Pattern.Expression ((_, Ast.Expression.Member _) as m)) -> + let (((_, t), _) as m) = expression cx m in + ((loc, t), Ast.Pattern.Expression m) + (* TODO: object, array and non-member expression patterns are invalid + (should be a parse error but isn't yet) *) + | (lhs_loc, Ast.Pattern.Object _) + | (lhs_loc, Ast.Pattern.Array _) + | (lhs_loc, Ast.Pattern.Expression _) -> + Flow.add_output cx (Error_message.EInvalidLHSInAssignment lhs_loc); + Tast_utils.error_mapper#pattern patt + +(* write a type into a member *) +and assign_member cx ~make_op t ~lhs_loc ~lhs_prop_reason ~mode lhs = + Ast.Expression.( + match lhs with + (* module.exports = e *) + | { + Member._object = + ( object_loc, + Ast.Expression.Identifier + (id_loc, ({ Ast.Identifier.name = "module"; comments = _ } as mod_name)) ); + property = + Member.PropertyIdentifier (ploc, ({ Ast.Identifier.name = "exports"; comments = _ } as name)); + } -> + Import_export.cjs_clobber cx lhs_loc t; + let module_reason = mk_reason (RCustom "module") object_loc in + let module_t = MixedT.why module_reason |> with_trust bogus_trust in + let _object = + ((object_loc, module_t), Ast.Expression.Identifier ((id_loc, module_t), mod_name)) + in + let property = Member.PropertyIdentifier ((ploc, t), name) in + ((lhs_loc, t), Member { Member._object; property }) + (* super.name = e *) + | { + Member._object = (super_loc, Super); + property = Member.PropertyIdentifier (prop_loc, ({ Ast.Identifier.name; comments = _ } as id)); + } -> + let reason = mk_reason (RPropertyAssignment (Some name)) lhs_loc in + let prop_reason = mk_reason (RProperty (Some name)) prop_loc in + let super = super_ cx lhs_loc in + let prop_t = Tvar.mk cx prop_reason in + let use_op = + make_op ~lhs:reason ~prop:(mk_reason (desc_of_reason lhs_prop_reason) prop_loc) + in + Flow.flow + cx + (super, SetPropT (use_op, reason, Named (prop_reason, name), mode, Normal, t, Some prop_t)); + let property = Member.PropertyIdentifier ((prop_loc, prop_t), id) in + ((lhs_loc, prop_t), Member { Member._object = ((super_loc, super), Super); property }) + (* _object.#name = e *) + | { + Member._object; + property = + Member.PropertyPrivateName (prop_loc, (_, { Ast.Identifier.name; comments = _ })) as + property; + } -> + let (((_, o), _) as _object) = expression cx _object in + let prop_t = + (* if we fire this hook, it means the assignment is a sham. *) + if Type_inference_hooks_js.dispatch_member_hook cx name prop_loc o then + Unsoundness.at InferenceHooks prop_loc + else + let reason = mk_reason (RPropertyAssignment (Some name)) lhs_loc in + (* flow type to object property itself *) + let class_entries = Env.get_class_entries () in + let prop_reason = mk_reason (RPrivateProperty name) prop_loc in + let prop_t = Tvar.mk cx prop_reason in + let use_op = + make_op ~lhs:reason ~prop:(mk_reason (desc_of_reason lhs_prop_reason) prop_loc) + in + Flow.flow + cx + (o, SetPrivatePropT (use_op, reason, name, mode, class_entries, false, t, Some prop_t)); + post_assignment_havoc ~private_:true name (lhs_loc, Member lhs) prop_t t; + prop_t + in + ((lhs_loc, prop_t), Member { Member._object; property }) + (* _object.name = e *) + | { + Member._object; + property = Member.PropertyIdentifier (prop_loc, ({ Ast.Identifier.name; comments = _ } as id)); + } -> + let wr_ctx = + match (_object, Env.var_scope_kind ()) with + | ((_, This), Scope.Ctor) -> ThisInCtor + | _ -> Normal + in + let (((_, o), _) as _object) = expression cx _object in + let prop_t = + (* if we fire this hook, it means the assignment is a sham. *) + if Type_inference_hooks_js.dispatch_member_hook cx name prop_loc o then + Unsoundness.at InferenceHooks prop_loc + else + let reason = mk_reason (RPropertyAssignment (Some name)) lhs_loc in + let prop_reason = mk_reason (RProperty (Some name)) prop_loc in + (* flow type to object property itself *) + let prop_t = Tvar.mk cx prop_reason in + let use_op = + make_op ~lhs:reason ~prop:(mk_reason (desc_of_reason lhs_prop_reason) prop_loc) + in + Flow.flow + cx + (o, SetPropT (use_op, reason, Named (prop_reason, name), mode, wr_ctx, t, Some prop_t)); + post_assignment_havoc ~private_:false name (lhs_loc, Member lhs) prop_t t; + prop_t + in + let property = Member.PropertyIdentifier ((prop_loc, prop_t), id) in + ((lhs_loc, prop_t), Member { Member._object; property }) + (* _object[index] = e *) + | { Member._object; property = Member.PropertyExpression ((iloc, _) as index) } -> + let reason = mk_reason (RPropertyAssignment None) lhs_loc in + let (((_, a), _) as _object) = expression cx _object in + let (((_, i), _) as index) = expression cx index in + let use_op = make_op ~lhs:reason ~prop:(mk_reason (desc_of_reason lhs_prop_reason) iloc) in + Flow.flow cx (a, SetElemT (use_op, reason, i, mode, t, None)); + + (* types involved in the assignment itself are computed + in pre-havoc environment. it's the assignment itself + which clears refis *) + Env.havoc_heap_refinements (); + ((lhs_loc, t), Member { Member._object; property = Member.PropertyExpression index })) + +(* traverse simple assignment expressions (`lhs = rhs`) *) +and simple_assignment cx _loc lhs rhs = + let (((_, t), _) as typed_rhs) = expression cx rhs in + (* update env, add constraints arising from LHS structure, + handle special cases, etc. *) + let lhs = + match lhs with + | (lhs_loc, Ast.Pattern.Expression (pat_loc, Ast.Expression.Member mem)) -> + let lhs_prop_reason = mk_pattern_reason lhs in + let make_op ~lhs ~prop = Op (SetProperty { lhs; prop; value = mk_expression_reason rhs }) in + let ((lhs_loc, t), lhs) = + assign_member cx ~make_op t ~lhs_loc ~lhs_prop_reason ~mode:Assign mem in - t, lhs, rhs + ((lhs_loc, t), Ast.Pattern.Expression ((pat_loc, t), lhs)) + (* other r structures are handled as destructuring assignments *) + | _ -> Destructuring.assignment cx ~expr:expression t rhs lhs + in + (t, lhs, typed_rhs) - | (lhs, Assignment.PlusAssign, rhs) -> +(* traverse assignment expressions with operators (`lhs += rhs`, `lhs *= rhs`, etc) *) +and op_assignment cx loc lhs op rhs = + Ast.Expression.( + match op with + | Assignment.PlusAssign -> (* lhs += rhs *) let reason = mk_reason (RCustom "+=") loc in - let (_, lhs_t), _ as lhs_ast = assignment_lhs cx lhs in - let (_, rhs_t), _ as rhs_ast = expression cx rhs in + let (((_, lhs_t), _) as lhs_ast) = assignment_lhs cx lhs in + let (((_, rhs_t), _) as rhs_ast) = expression cx rhs in let result_t = Tvar.mk cx reason in (* lhs = lhs + rhs *) let () = - let use_op = Op (Addition { - op = reason; - left = (match lhs with - | (_, Ast.Pattern.Expression lhs) -> mk_expression_reason lhs - | _ -> reason_of_t lhs_t); - right = mk_expression_reason rhs; - }) in + let use_op = + Op + (Addition + { op = reason; left = mk_pattern_reason lhs; right = mk_expression_reason rhs }) + in Flow.flow cx (lhs_t, AdderT (use_op, reason, false, rhs_t, result_t)) in - let () = - let use_op = Op (Addition { - op = reason; - left = mk_expression_reason rhs; - right = (match lhs with - | (_, Ast.Pattern.Expression lhs) -> mk_expression_reason lhs - | _ -> reason_of_t lhs_t); - }) in - Flow.flow cx (rhs_t, AdderT (use_op, reason, false, lhs_t, result_t)) - in (* enforce state-based guards for binding update, e.g., const *) (match lhs with - | _, Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = id_loc, name; - _; - } -> - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name) id_loc); - init = reason; - }) in + | ( _, + Ast.Pattern.Identifier + { Ast.Pattern.Identifier.name = (id_loc, { Ast.Identifier.name; comments = _ }); _ } ) + -> + let use_op = + Op (AssignVar { var = Some (mk_reason (RIdentifier name) id_loc); init = reason }) + in ignore Env.(set_var cx ~use_op name result_t id_loc) - | _ -> () - ); - lhs_t, lhs_ast, rhs_ast - - | (lhs, Assignment.MinusAssign, rhs) - | (lhs, Assignment.MultAssign, rhs) - | (lhs, Assignment.ExpAssign, rhs) - | (lhs, Assignment.DivAssign, rhs) - | (lhs, Assignment.ModAssign, rhs) - | (lhs, Assignment.LShiftAssign, rhs) - | (lhs, Assignment.RShiftAssign, rhs) - | (lhs, Assignment.RShift3Assign, rhs) - | (lhs, Assignment.BitOrAssign, rhs) - | (lhs, Assignment.BitXorAssign, rhs) - | (lhs, Assignment.BitAndAssign, rhs) - -> + | _ -> ()); + (lhs_t, lhs_ast, rhs_ast) + | Assignment.MinusAssign + | Assignment.MultAssign + | Assignment.ExpAssign + | Assignment.DivAssign + | Assignment.ModAssign + | Assignment.LShiftAssign + | Assignment.RShiftAssign + | Assignment.RShift3Assign + | Assignment.BitOrAssign + | Assignment.BitXorAssign + | Assignment.BitAndAssign -> (* lhs (numop)= rhs *) let reason = mk_reason (RCustom "(numop)=") loc in - let (_, lhs_t), _ as lhs_ast = assignment_lhs cx lhs in - let (_, rhs_t), _ as rhs_ast = expression cx rhs in + let (((_, lhs_t), _) as lhs_ast) = assignment_lhs cx lhs in + let (((_, rhs_t), _) as rhs_ast) = expression cx rhs in (* lhs = lhs (numop) rhs *) Flow.flow cx (lhs_t, AssertArithmeticOperandT reason); Flow.flow cx (rhs_t, AssertArithmeticOperandT reason); + (* enforce state-based guards for binding update, e.g., const *) (match lhs with - | _, Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = id_loc, name; - _; - } -> - let t = NumT.at loc in - let use_op = Op (AssignVar { - var = Some (mk_reason (RIdentifier name) id_loc); - init = reason_of_t t; - }) in + | ( _, + Ast.Pattern.Identifier + { Ast.Pattern.Identifier.name = (id_loc, { Ast.Identifier.name; comments = _ }); _ } ) + -> + let t = NumT.at loc |> with_trust literal_trust in + let use_op = + Op (AssignVar { var = Some (mk_reason (RIdentifier name) id_loc); init = reason_of_t t }) + in ignore Env.(set_var cx ~use_op name t id_loc) - | _ -> () - ); - lhs_t, lhs_ast, rhs_ast -) + | _ -> ()); + (lhs_t, lhs_ast, rhs_ast)) -and clone_object cx reason this that = +(* traverse assignment expressions *) +and assignment cx loc (lhs, op, rhs) = + match op with + | None -> simple_assignment cx loc lhs rhs + | Some op -> op_assignment cx loc lhs op rhs + +(* delete variables and properties *) +and delete cx loc target = + Ast.Expression.( + let void = VoidT.at loc |> with_trust literal_trust in + let (lhs_loc, targ_exp) = target in + match targ_exp with + | Member mem -> + let lhs_prop_reason = mk_expression_reason target in + let make_op ~lhs ~prop = Op (DeleteProperty { lhs; prop }) in + assign_member cx ~make_op void ~lhs_loc ~lhs_prop_reason ~mode:Type.Delete mem + | Identifier (_, { Ast.Identifier.name; _ }) -> + let use_op = Op (DeleteVar { var = mk_expression_reason target }) in + ignore Env.(set_var cx ~use_op name void loc); + expression cx target + | _ -> + let (((_, t), _) as target) = expression cx target in + Flow.add_output cx Error_message.(ECannotDelete (loc, reason_of_t t)); + target) + +and clone_object cx reason this that use_op = Tvar.mk_where cx reason (fun tvar -> - let u = ObjRestT (reason, [], tvar) in - let t = Flow.tvar_with_constraint cx u in - Flow.flow cx ( - this, - ObjAssignToT (reason, that, t, default_obj_assign_kind) - ) - ) - -and collapse_children cx children: - Type.unresolved_param list * - (Loc.t, Loc.t * Type.t) Ast.JSX.child list = Ast.JSX.( - children - |> List.fold_left (fun (unres_params, children) -> function - | ExpressionContainer.( - loc, - ExpressionContainer { expression = EmptyExpression empty_loc } - ) -> - unres_params, (loc, - ExpressionContainer.(ExpressionContainer { - expression = EmptyExpression empty_loc - }) - )::children - | child -> - let unres_param_opt, child = jsx_body cx child in - Option.value_map unres_param_opt - ~default:unres_params ~f:(fun x -> x::unres_params), - child::children - ) ([], []) - |> map_pair List.rev List.rev) - -and jsx cx e: Type.t * (Loc.t, Loc.t * Type.t) Ast.JSX.element = Ast.JSX.( - let { openingElement; children; closingElement } = e in - let locs = - let open_, _ = openingElement in - match closingElement with - | Some (close, _) -> Loc.btwn open_ close, open_, Loc.btwn_exclusive open_ close - | _ -> open_, open_, open_ - in - let unresolved_params, children = collapse_children cx children in - let t, openingElement, closingElement = - jsx_title cx openingElement closingElement unresolved_params locs in - t, { openingElement; children; closingElement } -) - -and jsx_fragment cx fragment: Type.t * (Loc.t, Loc.t * Type.t) Ast.JSX.fragment = - let open Ast.JSX in - let { frag_openingElement; frag_children; frag_closingElement } = fragment in - let locs = - let open_ = frag_openingElement in - match frag_closingElement with - | Some close -> Loc.btwn open_ close, open_, Loc.btwn_exclusive open_ close - | _ -> open_, open_, open_ + let u = ObjRestT (reason, [], tvar) in + let t = Flow.tvar_with_constraint cx u in + Flow.flow cx (this, ObjAssignToT (use_op, reason, that, t, default_obj_assign_kind))) + +and collapse_children cx (children_loc, children) : + Type.unresolved_param list * (ALoc.t * (ALoc.t, ALoc.t * Type.t) Ast.JSX.child list) = + let (unresolved_params, children') = + children + |> List.fold_left + (fun (unres_params, children) child -> + let (unres_param_opt, child) = jsx_body cx child in + ( Option.value_map unres_param_opt ~default:unres_params ~f:(fun x -> x :: unres_params), + child :: children )) + ([], []) + |> map_pair List.rev List.rev in - let _, loc_opening, _ = locs in - let unresolved_params, frag_children = collapse_children cx frag_children in - let fragment_t = - let reason = mk_reason (RIdentifier "React.Fragment") loc_opening in - let react = Env.var_ref ~lookup_mode:ForValue cx "React" loc_opening in - let use_op = Op (GetProperty reason) in - get_prop ~is_cond:false cx reason ~use_op react (reason, "Fragment") - in - let t = jsx_desugar cx "React.Fragment" fragment_t (NullT.at loc_opening) [] - unresolved_params locs in - t, { frag_openingElement; frag_children; frag_closingElement } - -and jsx_title cx openingElement closingElement children locs = Ast.JSX.( - let loc_element, _, _ = locs in - let loc, { Opening.name; attributes; selfClosing } = openingElement in - let facebook_fbt = Context.facebook_fbt cx in - let jsx_mode = Context.jsx cx in - - let t, name, attributes = match (name, facebook_fbt, jsx_mode) with - | Identifier (loc_id, ({ Identifier.name = "fbt" } as id)), Some facebook_fbt, _ -> - let fbt_reason = mk_reason RFbt loc_element in - let t = Flow.get_builtin_type cx fbt_reason facebook_fbt in - let name = Identifier ((loc_id, t), id) in - let attributes = Typed_ast.JSX.Opening.error_attribute_list attributes in - t, name, attributes - - | Identifier (loc, { Identifier.name }), _, Options.Jsx_react -> - if Type_inference_hooks_js.dispatch_id_hook cx name loc then - let t = AnyT.at loc_element in - let name = Identifier ((loc, t), { Identifier.name }) in - let attributes = Typed_ast.JSX.Opening.error_attribute_list attributes in - t, name, attributes - else - let reason = mk_reason (RReactElement (Some name)) loc_element in - let c = - if name = String.capitalize_ascii name then - identifier cx name loc + (unresolved_params, (children_loc, children')) + +and jsx cx expr_loc e : Type.t * (ALoc.t, ALoc.t * Type.t) Ast.JSX.element = + Ast.JSX.( + let { openingElement; children; closingElement } = e in + let (children_loc, _) = children in + let locs = + let (open_, _) = openingElement in + match closingElement with + | Some _ -> (expr_loc, open_, children_loc) + | _ -> (open_, open_, open_) + in + let (unresolved_params, children) = collapse_children cx children in + let (t, openingElement, closingElement) = + jsx_title cx openingElement closingElement unresolved_params locs + in + (t, { openingElement; children; closingElement })) + +and jsx_fragment cx expr_loc fragment : Type.t * (ALoc.t, ALoc.t * Type.t) Ast.JSX.fragment = + Ast.JSX.( + let { frag_openingElement; frag_children; frag_closingElement } = fragment in + let (children_loc, _) = frag_children in + let loc_opening = frag_openingElement in + let (unresolved_params, frag_children) = collapse_children cx frag_children in + let fragment_t = + let reason = mk_reason (RIdentifier "React.Fragment") loc_opening in + let react = Env.var_ref ~lookup_mode:ForValue cx "React" loc_opening in + let use_op = Op (GetProperty reason) in + get_prop ~is_cond:false cx reason ~use_op react (reason, "Fragment") + in + let locs = (expr_loc, frag_openingElement, children_loc) in + let t = + jsx_desugar + cx + "React.Fragment" + fragment_t + (NullT.at loc_opening |> with_trust bogus_trust) + [] + unresolved_params + locs + in + (t, { frag_openingElement; frag_children; frag_closingElement })) + +and jsx_title cx openingElement closingElement children locs = + Ast.JSX.( + let make_trust = Context.trust_constructor cx in + let (loc_element, _, _) = locs in + let (loc, { Opening.name; attributes; selfClosing }) = openingElement in + let facebook_fbs = Context.facebook_fbs cx in + let facebook_fbt = Context.facebook_fbt cx in + let jsx_mode = Context.jsx cx in + let (t, name, attributes) = + match (name, jsx_mode, (facebook_fbs, facebook_fbt)) with + | (Identifier (loc_id, ({ Identifier.name = "fbs" } as id)), _, (Some custom_jsx_type, _)) + | (Identifier (loc_id, ({ Identifier.name = "fbt" } as id)), _, (_, Some custom_jsx_type)) -> + let fbt_reason = mk_reason RFbt loc_element in + let t = Flow.get_builtin_type cx fbt_reason custom_jsx_type in + let name = Identifier ((loc_id, t), id) in + let attributes = List.map Tast_utils.error_mapper#jsx_opening_attribute attributes in + (t, name, attributes) + | (Identifier (loc, { Identifier.name }), Options.Jsx_react, _) -> + if Type_inference_hooks_js.dispatch_id_hook cx name loc then + let t = Unsoundness.at InferenceHooks loc_element in + let name = Identifier ((loc, t), { Identifier.name }) in + let attributes = List.map Tast_utils.error_mapper#jsx_opening_attribute attributes in + (t, name, attributes) else - DefT (mk_reason (RIdentifier name) loc, SingletonStrT name) - in - let o, attributes' = jsx_mk_props cx reason c name attributes children in - let t = jsx_desugar cx name c o attributes children locs in - let name = Identifier ((loc, t), { Identifier.name }) in - t, name, attributes' - - | Identifier (loc, { Identifier.name }), _, Options.Jsx_pragma _ -> - if Type_inference_hooks_js.dispatch_id_hook cx name loc then - let t = AnyT.at loc_element in - let name = Identifier ((loc, t), { Identifier.name }) in - let attributes = Typed_ast.JSX.Opening.error_attribute_list attributes in - t, name, attributes - else - let reason = mk_reason (RJSXElement (Some name)) loc_element in - let c = - if name = String.capitalize_ascii name then - identifier cx name loc + let reason = mk_reason (RReactElement (Some name)) loc_element in + let c = + if name = String.capitalize_ascii name then + identifier cx (mk_ident ~comments:None name) loc + else + DefT (mk_reason (RIdentifier name) loc, make_trust (), SingletonStrT name) + in + let (o, attributes') = jsx_mk_props cx reason c name attributes children in + let t = jsx_desugar cx name c o attributes children locs in + let name = Identifier ((loc, t), { Identifier.name }) in + (t, name, attributes') + | (Identifier (loc, { Identifier.name }), Options.Jsx_pragma _, _) -> + if Type_inference_hooks_js.dispatch_id_hook cx name loc then + let t = Unsoundness.at InferenceHooks loc_element in + let name = Identifier ((loc, t), { Identifier.name }) in + let attributes = List.map Tast_utils.error_mapper#jsx_opening_attribute attributes in + (t, name, attributes) else - DefT (mk_reason (RIdentifier name) loc, StrT (Literal (None, name))) - in - let o, attributes' = jsx_mk_props cx reason c name attributes children in - let t = jsx_desugar cx name c o attributes children locs in - let name = Identifier ((loc, t), { Identifier.name }) in - t, name, attributes' - - | Identifier (loc, { Identifier.name }), _, Options.Jsx_csx -> - (** - * It's a bummer to duplicate this case, but CSX does not want the - * "if name = String.capitalize name" restriction. - *) - if Type_inference_hooks_js.dispatch_id_hook cx name loc - then - let t = AnyT.at loc_element in - let name = Identifier ((loc, t), { Identifier.name }) in - let attributes' = Typed_ast.JSX.Opening.error_attribute_list attributes in - t, name, attributes' - else - let reason = mk_reason (RJSXElement (Some name)) loc_element in - let c = identifier cx name loc in - let o, attributes' = jsx_mk_props cx reason c name attributes children in - let t = jsx_desugar cx name c o attributes children locs in - let name = Identifier ((loc, t), { Identifier.name }) in - t, name, attributes' - - | MemberExpression member, _, Options.Jsx_react -> - let name = jsx_title_member_to_string member in - let el = RReactElement (Some name) in - let reason = mk_reason el loc_element in - let m_expr = jsx_title_member_to_expression member in - let (_, t), m_expr' = expression cx m_expr in - let c = mod_reason_of_t (replace_reason_const (RIdentifier name)) t in - let o, attributes' = jsx_mk_props cx reason c name attributes children in - let t = jsx_desugar cx name c o attributes children locs in - let name' = MemberExpression (expression_to_jsx_title_member m_expr') in - (t, name', attributes') - - | _ -> - (* TODO? covers namespaced names as element names *) - let t = AnyT.at loc_element in - let name = Typed_ast.JSX.error_name in - let attributes = Typed_ast.JSX.Opening.error_attribute_list attributes in - t, name, attributes - in - - let closingElement = - match closingElement with - | Some (c_loc, { Closing.name = cname }) -> - Some (c_loc, { Closing.name = jsx_match_closing_element name cname }) - | None -> None - in - t, (loc, { Opening.name; selfClosing; attributes; }), closingElement -) + let reason = mk_reason (RJSXElement (Some name)) loc_element in + let c = + if name = String.capitalize_ascii name then + identifier cx (mk_ident ~comments:None name) loc + else + DefT (mk_reason (RIdentifier name) loc, make_trust (), StrT (Literal (None, name))) + in + let (o, attributes') = jsx_mk_props cx reason c name attributes children in + let t = jsx_desugar cx name c o attributes children locs in + let name = Identifier ((loc, t), { Identifier.name }) in + (t, name, attributes') + | (Identifier (loc, { Identifier.name }), Options.Jsx_csx, _) -> + (* + * It's a bummer to duplicate this case, but CSX does not want the + * "if name = String.capitalize name" restriction. + *) + if Type_inference_hooks_js.dispatch_id_hook cx name loc then + let t = Unsoundness.at InferenceHooks loc_element in + let name = Identifier ((loc, t), { Identifier.name }) in + let attributes' = List.map Tast_utils.error_mapper#jsx_opening_attribute attributes in + (t, name, attributes') + else + let reason = mk_reason (RJSXElement (Some name)) loc_element in + let c = identifier cx (mk_ident ~comments:None name) loc in + let (o, attributes') = jsx_mk_props cx reason c name attributes children in + let t = jsx_desugar cx name c o attributes children locs in + let name = Identifier ((loc, t), { Identifier.name }) in + (t, name, attributes') + | (MemberExpression member, Options.Jsx_react, _) -> + let name = jsx_title_member_to_string member in + let el = RReactElement (Some name) in + let reason = mk_reason el loc_element in + let m_expr = jsx_title_member_to_expression member in + let ((m_loc, t), m_expr') = expression cx m_expr in + let c = mod_reason_of_t (replace_desc_reason (RIdentifier name)) t in + let (o, attributes') = jsx_mk_props cx reason c name attributes children in + let t = jsx_desugar cx name c o attributes children locs in + let member' = + match expression_to_jsx_title_member m_loc m_expr' with + | Some member -> member + | None -> Tast_utils.error_mapper#jsx_member_expression member + in + (t, MemberExpression member', attributes') + | (MemberExpression member, Options.(Jsx_csx | Jsx_pragma _), _) -> + let t = Unsoundness.at InferenceHooks loc_element in + let name' = Tast_utils.error_mapper#jsx_name name in + let el_name = jsx_title_member_to_string member in + let reason = mk_reason (RJSXElement (Some el_name)) loc_element in + let c = mod_reason_of_t (replace_desc_reason (RIdentifier el_name)) t in + let (_o, attributes') = jsx_mk_props cx reason c el_name attributes children in + (t, name', attributes') + | (NamespacedName namespace, _, _) -> + (* TODO? covers namespaced names as element names *) + let t = Unsoundness.at InferenceHooks loc_element in + let name' = Tast_utils.error_mapper#jsx_name name in + let el_name = jsx_title_namespaced_name_to_string namespace in + let reason = mk_reason (RJSXElement (Some el_name)) loc_element in + let c = mod_reason_of_t (replace_desc_reason (RIdentifier el_name)) t in + let (_o, attributes') = jsx_mk_props cx reason c el_name attributes children in + (t, name', attributes') + in + let closingElement = + match closingElement with + | Some (c_loc, { Closing.name = cname }) -> + Some (c_loc, { Closing.name = jsx_match_closing_element name cname }) + | None -> None + in + (t, (loc, { Opening.name; selfClosing; attributes }), closingElement)) and jsx_match_closing_element = let match_identifiers o_id c_id = - let (_, t), _ = o_id in - let loc, name = c_id in - (loc, t), name + let ((_, t), _) = o_id in + let (loc, name) = c_id in + ((loc, t), name) in let rec match_member_expressions o_mexp c_mexp = - let open Ast.JSX.MemberExpression in - let _, { _object = o_obj; property = o_prop; } = o_mexp in - let loc, { _object = c_obj; property = c_prop; } = c_mexp in - let _object = match_objects o_obj c_obj in - let property = match_identifiers o_prop c_prop in - loc, { _object; property; } - + Ast.JSX.MemberExpression.( + let (_, { _object = o_obj; property = o_prop }) = o_mexp in + let (loc, { _object = c_obj; property = c_prop }) = c_mexp in + let _object = match_objects o_obj c_obj in + let property = match_identifiers o_prop c_prop in + (loc, { _object; property })) and match_objects o_obj c_obj = - match o_obj, c_obj with - | Ast.JSX.MemberExpression.Identifier o_id, - Ast.JSX.MemberExpression.Identifier c_id -> + match (o_obj, c_obj) with + | (Ast.JSX.MemberExpression.Identifier o_id, Ast.JSX.MemberExpression.Identifier c_id) -> Ast.JSX.MemberExpression.Identifier (match_identifiers o_id c_id) - | Ast.JSX.MemberExpression.MemberExpression o_exp, - Ast.JSX.MemberExpression.MemberExpression c_exp -> + | ( Ast.JSX.MemberExpression.MemberExpression o_exp, + Ast.JSX.MemberExpression.MemberExpression c_exp ) -> Ast.JSX.MemberExpression.MemberExpression (match_member_expressions o_exp c_exp) - | _, _ -> Typed_ast.JSX.MemberExpression.error_object + | (_, _) -> Tast_utils.error_mapper#jsx_member_expression_object c_obj in let match_namespaced_names o_id c_id = - let _, { Ast.JSX.NamespacedName.namespace = o_ns; name = o_name } = o_id in - let loc, { Ast.JSX.NamespacedName.namespace = c_ns; name = c_name } = c_id in + let (_, { Ast.JSX.NamespacedName.namespace = o_ns; name = o_name }) = o_id in + let (loc, { Ast.JSX.NamespacedName.namespace = c_ns; name = c_name }) = c_id in let namespace = match_identifiers o_ns c_ns in let name = match_identifiers o_name c_name in - loc, { Ast.JSX.NamespacedName.namespace; name; } + (loc, { Ast.JSX.NamespacedName.namespace; name }) in (* Transfer open types to close types *) - fun o_name c_name -> Ast.JSX.( - match o_name, c_name with - | Identifier o_id, Identifier c_id -> - Identifier (match_identifiers o_id c_id) - | NamespacedName o_nname, NamespacedName c_nname -> + fun o_name c_name -> + Ast.JSX.( + match (o_name, c_name) with + | (Identifier o_id, Identifier c_id) -> Identifier (match_identifiers o_id c_id) + | (NamespacedName o_nname, NamespacedName c_nname) -> NamespacedName (match_namespaced_names o_nname c_nname) - | MemberExpression o_mexp, MemberExpression c_mexp -> + | (MemberExpression o_mexp, MemberExpression c_mexp) -> MemberExpression (match_member_expressions o_mexp c_mexp) - | _, _ -> - Typed_ast.JSX.error_name - ) - -and jsx_mk_props cx reason c name attributes children = Ast.JSX.( - let is_react = Context.jsx cx = Options.Jsx_react in - let reason_props = replace_reason_const - (if is_react then RReactProps else RJSXElementProps name) - reason in - (* Use the same reason for proto and the ObjT so we can walk the proto chain + | (_, _) -> Tast_utils.error_mapper#jsx_name c_name) + +and jsx_mk_props cx reason c name attributes children = + Ast.JSX.( + let is_react = Context.jsx cx = Options.Jsx_react in + let reason_props = + replace_desc_reason + ( if is_react then + RReactProps + else + RJSXElementProps name ) + reason + in + (* Use the same reason for proto and the ObjT so we can walk the proto chain and use the root proto reason to build an error. *) - let proto = (ObjProtoT reason_props) in - (* Return an object with specified sealing. *) - let mk_object ?(sealed=false) props = - Obj_type.mk_with_proto cx reason_props ~sealed ~props proto - in - (* Copy properties from from_obj to to_obj. We should ensure that to_obj is + let proto = ObjProtoT reason_props in + (* Return an object with specified sealing. *) + let mk_object ?(sealed = false) props = + Obj_type.mk_with_proto cx reason_props ~sealed ~props proto + in + (* Copy properties from from_obj to to_obj. We should ensure that to_obj is not sealed. *) - let mk_spread from_obj to_obj ~assert_exact = - Tvar.mk_where cx reason_props (fun t -> - Flow.flow cx (to_obj, - ObjAssignToT (reason_props, from_obj, t, ObjAssign { assert_exact })); - ) - in - (* When there's no result, return a new object with specified sealing. When + let mk_spread from_obj to_obj ~assert_exact = + let use_op = Op (ObjectSpread { op = reason_of_t from_obj }) in + Tvar.mk_where cx reason_props (fun t -> + Flow.flow + cx + (to_obj, ObjAssignToT (use_op, reason_props, from_obj, t, ObjAssign { assert_exact }))) + in + (* When there's no result, return a new object with specified sealing. When there's result, copy a new object into it, sealing the result when necessary. When building an object incrementally, only the final call to this function may be with sealed=true, so we will always have an unsealed object to copy properties to. *) - let eval_props ?(sealed=false) (map, result) = - match result with - | None -> mk_object ~sealed map - | Some result -> - let result = - if not (SMap.is_empty map) - then mk_spread (mk_object map) result ~assert_exact:false - else result - in - if not sealed then result else - Tvar.mk_where cx reason_props (fun t -> - Flow.flow cx (result, ObjSealT (reason_props, t)) - ) - in - - let sealed, map, result, atts = List.fold_left (fun (sealed, map, result, atts) att -> - match att with - (* All attributes with a non-namespaced name that are not a react ignored - * attribute. *) - | Opening.Attribute (aloc, { Attribute. - name = Attribute.Identifier (id_loc, { Identifier.name = aname }); - value - }) -> - (* Get the type for the attribute's value. *) - let atype, value = - if Type_inference_hooks_js.dispatch_jsx_hook cx aname aloc c - then AnyT.at aloc, None + let eval_props ?(sealed = false) (map, result) = + match result with + | None -> mk_object ~sealed map + | Some result -> + let result = + if not (SMap.is_empty map) then + mk_spread (mk_object map) result ~assert_exact:false + else + result + in + if not sealed then + result else - match value with - (* *) - | Some (Attribute.Literal (loc, lit)) -> - let t = literal cx loc lit in - t, Some (Attribute.Literal ((loc, t), lit)) - (* *) - | Some (Attribute.ExpressionContainer (ec_loc, { - ExpressionContainer.expression = - ExpressionContainer.Expression (loc, e) - })) -> - let (_, t), _ as e = expression cx (loc, e) in - t, Some (Attribute.ExpressionContainer ((ec_loc, t), { - ExpressionContainer.expression = - ExpressionContainer.Expression e - })) - (* *) - | Some (Attribute.ExpressionContainer (ec_loc, _)) -> - let t = EmptyT.at aloc in - t, Some (Attribute.ExpressionContainer ( - (ec_loc, t), ExpressionContainer.({ - expression = Expression Typed_ast.(error_annot, Expression.error) - }))) - (* *) - | None -> - DefT (mk_reason RBoolean aloc, BoolT (Some true)), None - in - let p = Field (Some id_loc, atype, Neutral) in - let att = Opening.Attribute (aloc, { Attribute. - name = Attribute.Identifier ((id_loc, atype), { Identifier.name = aname }); - value - }) in - (sealed, SMap.add aname p map, result, att::atts) - (* Do nothing for namespaced attributes or ignored React attributes. *) - | Opening.Attribute _ -> - (* TODO: attributes with namespaced names *) - (sealed, map, result, atts) - (* *) - | Opening.SpreadAttribute (spread_loc, { SpreadAttribute.argument }) -> - let (_, spread), _ as argument = expression cx argument in - let obj = eval_props (map, result) in - let result = mk_spread spread obj - ~assert_exact:(not (SMap.is_empty map && result = None)) in - let att = Opening.SpreadAttribute (spread_loc, { SpreadAttribute.argument }) in - sealed, SMap.empty, Some result, att::atts - ) (true, SMap.empty, None, []) attributes in - let attributes = List.rev atts in - let map = - match children with - | [] -> map - (* We add children to the React.createElement() call for React. Not to the - * props as other JSX users may support. *) - | _ when is_react -> map - | _ -> - let arr = Tvar.mk_where cx reason (fun tout -> - let reason_op = reason in - let element_reason = - replace_reason_const Reason.inferred_union_elem_array_desc reason_op in - let elem_t = Tvar.mk cx element_reason in - Flow.resolve_spread_list - cx - ~use_op:unknown_use - ~reason_op:reason - children - (ResolveSpreadsToArrayLiteral (mk_id (), elem_t, tout)) - ) in - let p = Field (None, arr, Neutral) in + Tvar.mk_where cx reason_props (fun t -> + Flow.flow cx (result, ObjSealT (reason_props, t))) + in + let (sealed, map, result, atts) = + List.fold_left + (fun (sealed, map, result, atts) att -> + match att with + (* All attributes with a non-namespaced name that are not a react ignored + * attribute. *) + | Opening.Attribute + ( attr_loc, + { + Attribute.name = Attribute.Identifier (id_loc, { Identifier.name = aname }); + value; + } ) -> + (* Get the type for the attribute's value. *) + let (atype, value) = + if Type_inference_hooks_js.dispatch_jsx_hook cx aname attr_loc c then + (Unsoundness.at InferenceHooks attr_loc, None) + else + match value with + (* *) + | Some (Attribute.Literal (loc, lit)) -> + let t = literal cx loc lit in + (t, Some (Attribute.Literal ((loc, t), lit))) + (* *) + | Some + (Attribute.ExpressionContainer + ( ec_loc, + { + ExpressionContainer.expression = ExpressionContainer.Expression (loc, e); + } )) -> + let (((_, t), _) as e) = expression cx (loc, e) in + ( t, + Some + (Attribute.ExpressionContainer + ( (ec_loc, t), + { ExpressionContainer.expression = ExpressionContainer.Expression e } )) + ) + (* *) + | Some (Attribute.ExpressionContainer _ as ec) -> + let t = EmptyT.at attr_loc |> with_trust bogus_trust in + (t, Some (Tast_utils.unchecked_mapper#jsx_attribute_value ec)) + (* *) + | None -> + (DefT (mk_reason RBoolean attr_loc, bogus_trust (), BoolT (Some true)), None) + in + let p = Field (Some id_loc, atype, Polarity.Neutral) in + let att = + Opening.Attribute + ( attr_loc, + { + Attribute.name = + Attribute.Identifier ((id_loc, atype), { Identifier.name = aname }); + value; + } ) + in + (sealed, SMap.add aname p map, result, att :: atts) + (* Do nothing for namespaced attributes or ignored React attributes. *) + | Opening.Attribute _ -> + (* TODO: attributes with namespaced names *) + (sealed, map, result, atts) + (* *) + | Opening.SpreadAttribute (spread_loc, { SpreadAttribute.argument }) -> + let (((_, spread), _) as argument) = expression cx argument in + let obj = eval_props (map, result) in + let result = + mk_spread spread obj ~assert_exact:(not (SMap.is_empty map && result = None)) + in + let att = Opening.SpreadAttribute (spread_loc, { SpreadAttribute.argument }) in + (sealed, SMap.empty, Some result, att :: atts)) + (true, SMap.empty, None, []) + attributes + in + let attributes = List.rev atts in + let map = + match children with + | [] -> map + (* We add children to the React.createElement() call for React. Not to the + * props as other JSX users may support. *) + | _ when is_react -> map + | _ -> + let arr = + Tvar.mk_where cx reason (fun tout -> + let reason_op = reason in + let element_reason = + replace_desc_reason Reason.inferred_union_elem_array_desc reason_op + in + let elem_t = Tvar.mk cx element_reason in + Flow.resolve_spread_list + cx + ~use_op:unknown_use + ~reason_op:reason + children + (ResolveSpreadsToArrayLiteral (mk_id (), elem_t, tout))) + in + let p = Field (None, arr, Polarity.Neutral) in SMap.add "children" p map - in - let t = eval_props ~sealed (map, result) in - t, attributes -) + in + let t = eval_props ~sealed (map, result) in + (t, attributes)) and jsx_desugar cx name component_t props attributes children locs = - let loc_element, loc_opening, loc_children = locs in + let (loc_element, loc_opening, loc_children) = locs in match Context.jsx cx with | Options.Jsx_react -> - let reason = mk_reason (RReactElement (Some name)) loc_element in - let react = Env.var_ref ~lookup_mode:ForValue cx "React" loc_opening in - let children = List.map (function - | UnresolvedArg a -> a - | UnresolvedSpreadArg a -> - Flow.add_output cx Flow_error.(EUnsupportedSyntax (loc_children, SpreadArgument)); - AnyT.why (reason_of_t a) - ) children in - Tvar.mk_where cx reason (fun tvar -> - let reason_createElement = - mk_reason (RProperty (Some "createElement")) loc_element in - let use_op = Op (ReactCreateElementCall { - op = reason_createElement; - component = reason_of_t component_t; - children = loc_children; - }) in - Flow.flow cx (react, MethodT ( - use_op, - reason, - reason_createElement, - Named (reason_createElement, "createElement"), - mk_methodcalltype - react - None - ([Arg component_t; Arg props] @ List.map (fun c -> Arg c) children) - tvar, - None - )) - ) + let reason = mk_reason (RReactElement (Some name)) loc_element in + let react = Env.var_ref ~lookup_mode:ForValue cx "React" loc_opening in + let children = + Core_list.map + ~f:(function + | UnresolvedArg a -> a + | UnresolvedSpreadArg a -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc_children, SpreadArgument)); + reason_of_t a |> AnyT.error) + children + in + Tvar.mk_where cx reason (fun tvar -> + let reason_createElement = mk_reason (RProperty (Some "createElement")) loc_element in + let use_op = + Op + (ReactCreateElementCall + { + op = reason_createElement; + component = reason_of_t component_t; + children = loc_children; + }) + in + Flow.flow + cx + ( react, + MethodT + ( use_op, + reason, + reason_createElement, + Named (reason_createElement, "createElement"), + mk_methodcalltype + react + None + ([Arg component_t; Arg props] @ Core_list.map ~f:(fun c -> Arg c) children) + tvar, + None ) )) | Options.Jsx_pragma (raw_jsx_expr, jsx_expr) -> - let reason = mk_reason (RJSXFunctionCall raw_jsx_expr) loc_element in - - (* A JSX element with no attributes should pass in null as the second - * arg *) - let props = match attributes with - | [] -> NullT.at loc_opening - | _ -> props in - let argts = - [Arg component_t; Arg props] @ - (List.map (function - | UnresolvedArg c -> Arg c - | UnresolvedSpreadArg c -> SpreadArg c - ) children) in - let use_op = Op (JSXCreateElement { - op = reason; - component = reason_of_t component_t; - }) in - Ast.Expression.(match jsx_expr with - | _, Member { - Member._object; - property = Member.PropertyIdentifier (prop_loc, name); - _; - } -> - let ot = jsx_pragma_expression cx raw_jsx_expr loc_element _object in - snd (method_call cx reason ~use_op ~call_strict_arity:false prop_loc - (jsx_expr, ot, name) None argts) + let reason = mk_reason (RJSXFunctionCall raw_jsx_expr) loc_element in + (* A JSX element with no attributes should pass in null as the second + * arg *) + let props = + match attributes with + | [] -> NullT.at loc_opening |> with_trust bogus_trust + | _ -> props + in + let argts = + [Arg component_t; Arg props] + @ Core_list.map + ~f:(function + | UnresolvedArg c -> Arg c + | UnresolvedSpreadArg c -> SpreadArg c) + children + in + let use_op = Op (JSXCreateElement { op = reason; component = reason_of_t component_t }) in + Ast.Expression.( + (match jsx_expr with + | ( _, + Member + { + Member._object; + property = Member.PropertyIdentifier (prop_loc, { Ast.Identifier.name; comments = _ }); + _; + } ) -> + let ot = jsx_pragma_expression cx raw_jsx_expr loc_element _object in + snd + (method_call + cx + reason + ~use_op + ~call_strict_arity:false + prop_loc + (jsx_expr, ot, name) + None + argts) | _ -> - let f = jsx_pragma_expression cx raw_jsx_expr loc_element jsx_expr in - func_call cx reason ~use_op ~call_strict_arity:false f None argts - ) + let f = jsx_pragma_expression cx raw_jsx_expr loc_element jsx_expr in + func_call cx reason ~use_op ~call_strict_arity:false f None argts)) | Options.Jsx_csx -> - let reason = mk_reason (RJSXFunctionCall name) loc_element in - let use_op = Op (JSXCreateElement { - op = reason; - component = reason_of_t component_t; - }) in - func_call cx reason ~use_op ~call_strict_arity:false component_t None [Arg props] + let reason = mk_reason (RJSXFunctionCall name) loc_element in + let use_op = Op (JSXCreateElement { op = reason; component = reason_of_t component_t }) in + func_call cx reason ~use_op ~call_strict_arity:false component_t None [Arg props] (* The @jsx pragma specifies a left hand side expression EXPR such that * @@ -5125,103 +5329,109 @@ and jsx_desugar cx name component_t props attributes children locs = * We can cover almost all the cases by just explicitly handling identifiers, * since the common error is that the identifier is not in scope. *) -and jsx_pragma_expression cx raw_jsx_expr loc = Ast.Expression.( - function - | _, Identifier (_, name) -> +and jsx_pragma_expression cx raw_jsx_expr loc = + Ast.Expression.( + function + | (_, Identifier (_, { Ast.Identifier.name; comments = _ })) -> let desc = RJSXIdentifier (raw_jsx_expr, name) in Env.var_ref ~lookup_mode:ForValue cx name loc ~desc - | expr -> + | expr -> (* Oh well, we tried *) - let (_, t), _ = expression cx expr in - t -) - -and jsx_body cx (loc, child) = Ast.JSX.( - match child with - | Element e -> - let t, e = jsx cx e in - Some (UnresolvedArg t), (loc, Element e) - | Fragment f -> - let t, f = jsx_fragment cx f in - Some (UnresolvedArg t), (loc, Fragment f) - | ExpressionContainer ec -> - let open ExpressionContainer in - let { expression = ex } = ec in - let unresolved_param, ex = - match ex with - | Expression e -> - let (_, t), _ as e = expression cx e in - UnresolvedArg t, Expression e - | EmptyExpression loc -> - let reason = mk_reason (RCustom "empty jsx body") loc in - let t = DefT (reason, EmptyT) in - UnresolvedArg t, EmptyExpression loc - in - Some unresolved_param, (loc, ExpressionContainer { expression = ex }) - | SpreadChild expr -> - let (_, t), _ as e = expression cx expr in - Some (UnresolvedSpreadArg t), (loc, SpreadChild e) - | Text { Text.value; raw; } -> - let unresolved_param_opt = - match jsx_trim_text loc value with - | Some c -> Some (UnresolvedArg c) - | None -> None - in - unresolved_param_opt, (loc, Text { Text.value; raw; }) -) + let ((_, t), _) = expression cx expr in + t) + +and jsx_body cx (loc, child) = + Ast.JSX.( + let make_trust = Context.trust_constructor cx in + match child with + | Element e -> + let (t, e) = jsx cx loc e in + (Some (UnresolvedArg t), (loc, Element e)) + | Fragment f -> + let (t, f) = jsx_fragment cx loc f in + (Some (UnresolvedArg t), (loc, Fragment f)) + | ExpressionContainer ec -> + ExpressionContainer.( + let { expression = ex } = ec in + let (unresolved_param, ex) = + match ex with + | Expression e -> + let (((_, t), _) as e) = expression cx e in + (Some (UnresolvedArg t), Expression e) + | EmptyExpression -> (None, EmptyExpression) + in + (unresolved_param, (loc, ExpressionContainer { expression = ex }))) + | SpreadChild expr -> + let (((_, t), _) as e) = expression cx expr in + (Some (UnresolvedSpreadArg t), (loc, SpreadChild e)) + | Text { Text.value; raw } -> + let unresolved_param_opt = + match jsx_trim_text make_trust loc value with + | Some c -> Some (UnresolvedArg c) + | None -> None + in + (unresolved_param_opt, (loc, Text { Text.value; raw }))) -and jsx_trim_text loc value = - match (Utils_jsx.trim_jsx_text loc value) with +and jsx_trim_text make_trust loc value = + match Utils_jsx.trim_jsx_text (ALoc.to_loc_exn loc) value with | Some (loc, trimmed) -> - Some (DefT (mk_reason RJSXText loc, StrT (Type.Literal (None, trimmed)))) + Some + (DefT + ( mk_reason RJSXText (loc |> ALoc.of_loc), + make_trust (), + StrT (Type.Literal (None, trimmed)) )) | None -> None -and jsx_title_member_to_string (_, member) = Ast.JSX.MemberExpression.( - let (_, { Ast.JSX.Identifier.name }) = member.property in - match member._object with - | MemberExpression member -> (jsx_title_member_to_string member) ^ "." ^ name - | Identifier (_, { Ast.JSX.Identifier.name = obj }) -> obj ^ "." ^ name -) +and jsx_title_member_to_string (_, member) = + Ast.JSX.MemberExpression.( + let (_, { Ast.JSX.Identifier.name }) = member.property in + match member._object with + | MemberExpression member -> jsx_title_member_to_string member ^ "." ^ name + | Identifier (_, { Ast.JSX.Identifier.name = obj }) -> obj ^ "." ^ name) + +and jsx_title_namespaced_name_to_string namespaced_name = + let (_, { Ast.JSX.NamespacedName.namespace = (_, namespace); name = (_, name) }) = + namespaced_name + in + namespace.Ast.JSX.Identifier.name ^ name.Ast.JSX.Identifier.name and jsx_title_member_to_expression member = let (mloc, member) = member in - let _object = Ast.JSX.MemberExpression.( - match member._object with - | MemberExpression member -> jsx_title_member_to_expression member - | Identifier (loc, { Ast.JSX.Identifier.name }) -> - (loc, Ast.Expression.Identifier (loc, name)) - ) in - let property = Ast.JSX.MemberExpression.( - let (loc, { Ast.JSX.Identifier.name }) = member.property in - (loc, name) - ) in - Ast.Expression.Member.( - (mloc, Ast.Expression.Member { - _object; - property = PropertyIdentifier property; - computed = false; - }) - ) + let _object = + Ast.JSX.MemberExpression.( + match member._object with + | MemberExpression member -> jsx_title_member_to_expression member + | Identifier (loc, { Ast.JSX.Identifier.name }) -> + (loc, Ast.Expression.Identifier (loc, mk_ident ~comments:None name))) + in + let property = + Ast.JSX.MemberExpression.( + let (loc, { Ast.JSX.Identifier.name }) = member.property in + (loc, mk_ident ~comments:None name)) + in + Ast.Expression.Member. + (mloc, Ast.Expression.Member { _object; property = PropertyIdentifier property }) (* reverses jsx_title_member_to_expression *) -and expression_to_jsx_title_member = function +and expression_to_jsx_title_member loc member = + match member with | Ast.Expression.Member.( - Ast.Expression.Member { - _object = (mloc, _), obj_expr; - property = PropertyIdentifier (pannot, name); - computed = false; - }) -> - let _object = match obj_expr with - | Ast.Expression.Identifier ((id_loc, t), name) -> - Ast.JSX.MemberExpression.Identifier ((id_loc, t), { Ast.JSX.Identifier.name }) - | _ -> - let jsx_expr = expression_to_jsx_title_member obj_expr in - Ast.JSX.MemberExpression.MemberExpression jsx_expr + Ast.Expression.Member + { + _object = ((mloc, _), obj_expr); + property = PropertyIdentifier (pannot, { Ast.Identifier.name; comments = _ }); + }) -> + let _object = + match obj_expr with + | Ast.Expression.Identifier ((id_loc, t), { Ast.Identifier.name; comments = _ }) -> + Some (Ast.JSX.MemberExpression.Identifier ((id_loc, t), { Ast.JSX.Identifier.name })) + | _ -> + expression_to_jsx_title_member mloc obj_expr + |> Option.map ~f:(fun e -> Ast.JSX.MemberExpression.MemberExpression e) in - let property = pannot, { Ast.JSX.Identifier.name = name } in - mloc, Ast.JSX.MemberExpression.{ _object; property; } - | _ -> - Typed_ast.JSX.MemberExpression.error + let property = (pannot, { Ast.JSX.Identifier.name }) in + Option.map _object ~f:(fun _object -> (loc, Ast.JSX.MemberExpression.{ _object; property })) + | _ -> None (* Given an expression found in a test position, notices certain type refinements which follow from the test's success or failure, @@ -5233,631 +5443,707 @@ and expression_to_jsx_title_member = function - map of unrefined types for lvalues found in refinement maps - typed AST of the test expression *) -and predicates_of_condition cx e = Ast.(Expression.( - (* refinement key if expr is eligible, along with unrefined type *) - let refinable_lvalue e = - Refinement.key e, condition cx e - in - - (* package empty result (no refinements derived) from test type *) - let empty_result test_tast = - (test_tast, Key_map.empty, Key_map.empty, Key_map.empty) - in - - let add_predicate key unrefined_t pred sense (test_tast, ps, notps, tmap) = - let p, notp = if sense - then pred, NotP pred - else NotP pred, pred - in - (test_tast, - Key_map.add key p ps, - Key_map.add key notp notps, - Key_map.add key unrefined_t tmap) - in - - let flow_eqt ~strict loc (t1, t2) = - if not strict then - let reason = mk_reason (RCustom "non-strict equality comparison") loc in - Flow.flow cx (t1, EqT (reason, false, t2)) - in - - (* package result quad from test typed ast, refi key, unrefined type, - predicate, and predicate's truth sense *) - let result test_tast key unrefined_t pred sense = - empty_result test_tast |> add_predicate key unrefined_t pred sense - in - - (* a wrapper around `condition` (which is a wrapper around `expression`) that - evaluates `expr`. if this is a sentinel property check (determined by - a strict equality check against a member expression `_object.prop_name`), - then also returns the refinement of `_object`. - - this is used by other tests such as `bool_test` such that if given - `foo.bar === false`, `foo.bar` is refined to be `false` (by `bool_test`) - and `foo` is refined to eliminate branches that don't have a `false` bar - property (by this function). *) - let condition_of_maybe_sentinel cx ~sense ~strict expr val_t = - match strict, expr with - | true, - (expr_loc, Member { - Member._object; - property = Member.PropertyIdentifier (prop_loc, prop_name); - computed; - }) -> - (* use `expression` instead of `condition` because `_object` is the object - in a member expression; if it itself is a member expression, it must - exist (so ~is_cond:false). e.g. `foo.bar.baz` shows up here as - `_object = foo.bar`, `prop_name = baz`, and `bar` must exist. *) - let (_, obj_t), _ as _object_ast = expression cx _object in - - let prop_reason = mk_reason (RProperty (Some prop_name)) prop_loc in - let expr_reason = mk_reason (RProperty (Some prop_name)) expr_loc in - let prop_t = match Refinement.get cx expr expr_loc with - | Some t -> t - | None -> - if Type_inference_hooks_js.dispatch_member_hook cx - prop_name prop_loc obj_t - then AnyT.at prop_loc - else - let use_op = Op (GetProperty prop_reason) in - get_prop ~is_cond:true cx - expr_reason ~use_op obj_t (prop_reason, prop_name) - in - let id_info = prop_name, prop_t, Type_table.PropertyAccess obj_t in - Type_table.set_info prop_loc id_info (Context.type_table cx); - - (* refine the object (`foo.bar` in the example) based on the prop. *) - let refinement = match Refinement.key _object with - | None -> None - | Some name -> - let pred = LeftP (SentinelProp prop_name, val_t) in - Some (name, obj_t, pred, sense) - in - - (* since we never called `expression cx expr`, we have to add to the - type table ourselves *) - Type_table.set (Context.type_table cx) expr_loc prop_t; - let property = Member.PropertyIdentifier ((prop_loc, prop_t), prop_name) in - - ( (expr_loc, prop_t), - Member { Member. - _object = _object_ast; - property; - computed; - } - ), refinement - | _ -> - condition cx expr, None - in - - (* inspect a null equality test *) - let null_test loc ~sense ~strict e null_t reconstruct_ast = - let ((_, t), _ as e_ast), sentinel_refinement = - condition_of_maybe_sentinel cx ~sense ~strict e null_t in - let ast = reconstruct_ast e_ast in - flow_eqt ~strict loc (t, null_t); - let out = match Refinement.key e with - | None -> empty_result ((loc, BoolT.at loc), ast) - | Some name -> - let pred = if strict then NullP else MaybeP in - result ((loc, BoolT.at loc), ast) name t pred sense - in - match sentinel_refinement with - | Some (name, obj_t, p, sense) -> out |> add_predicate name obj_t p sense - | None -> out - in - - let void_test loc ~sense ~strict e void_t reconstruct_ast = - (* if `void_t` is not a VoidT, make it one so that the sentinel test has a - literal type to test against. It's not appropriate to call `void_test` - with a `void_t` that you don't want to treat like an actual `void`! *) - let void_t = match void_t with - | DefT (_, VoidT) -> void_t - | _ -> VoidT.why (reason_of_t void_t) - in - let ((_, t), _ as e_ast), sentinel_refinement = - condition_of_maybe_sentinel cx ~sense ~strict e void_t in - let ast = reconstruct_ast e_ast in - flow_eqt ~strict loc (t, void_t); - let out = match Refinement.key e with - | None -> empty_result ((loc, BoolT.at loc), ast) - | Some name -> - let pred = if strict then VoidP else MaybeP in - result ((loc, BoolT.at loc), ast) name t pred sense - in - match sentinel_refinement with - | Some (name, obj_t, p, sense) -> out |> add_predicate name obj_t p sense - | None -> out - in - - (* inspect an undefined equality test *) - let undef_test loc ~sense ~strict e void_t reconstruct_ast = - (* if `undefined` isn't redefined in scope, then we assume it is `void` *) - if Env.is_global_var cx "undefined" - then void_test loc ~sense ~strict e void_t reconstruct_ast - else - let e_ast = expression cx e in - empty_result ((loc, BoolT.at loc), reconstruct_ast e_ast) - in - - let literal_test loc ~strict ~sense expr val_t pred reconstruct_ast = - let ((_, t), _ as expr_ast), sentinel_refinement = - condition_of_maybe_sentinel cx ~sense ~strict expr val_t in - let ast = reconstruct_ast expr_ast in - flow_eqt ~strict loc (t, val_t); - let refinement = if strict then Refinement.key expr else None in - let out = match refinement with - | Some name -> result ((loc, BoolT.at loc), ast) name t pred sense - | None -> empty_result ((loc, BoolT.at loc), ast) - in - match sentinel_refinement with - | Some (name, obj_t, p, sense) -> out |> add_predicate name obj_t p sense - | None -> out - in - - (* inspect a typeof equality test *) - let typeof_test loc sense arg typename str_loc reconstruct_ast = - let bool = BoolT.at loc in - match refinable_lvalue arg with - | Some name, ((_, t), _ as arg) -> - let pred = match typename with - | "boolean" -> Some BoolP - | "function" -> Some FunP - | "number" -> Some NumP - | "object" -> Some ObjP - | "string" -> Some StrP - | "undefined" -> Some VoidP - | _ -> None +and predicates_of_condition cx e = + Ast.( + Expression.( + (* refinement key if expr is eligible, along with unrefined type *) + let refinable_lvalue e = (Refinement.key e, condition cx e) in + (* package empty result (no refinements derived) from test type *) + let empty_result test_tast = (test_tast, Key_map.empty, Key_map.empty, Key_map.empty) in + let add_predicate key unrefined_t pred sense (test_tast, ps, notps, tmap) = + let (p, notp) = + if sense then + (pred, NotP pred) + else + (NotP pred, pred) in - begin match pred with - | Some pred -> result ((loc, bool), reconstruct_ast arg) name t pred sense - | None -> - Flow.add_output cx Flow_error.(EInvalidTypeof (str_loc, typename)); - empty_result ((loc, bool), reconstruct_ast arg) - end - | None, arg -> empty_result ((loc, bool), reconstruct_ast arg) - in - - let sentinel_prop_test loc ~sense ~strict expr val_t reconstruct_ast = - let ((_, t), _ as expr_ast), sentinel_refinement = - condition_of_maybe_sentinel cx ~sense ~strict expr val_t in - let ast = reconstruct_ast expr_ast in - flow_eqt ~strict loc (t, val_t); - let out = empty_result ((loc, BoolT.at loc), ast) in - match sentinel_refinement with - | Some (name, obj_t, p, sense) -> out |> add_predicate name obj_t p sense - | None -> out - in - - let eq_test loc ~sense ~strict left right reconstruct_ast = - match left, right with - (* typeof expr ==/=== string *) - (* this must happen before the case below involving Literal.String in order - to match anything. *) - | (typeof_loc, Expression.Unary { Unary.operator = Unary.Typeof; argument; prefix; }), - (str_loc, (Expression.Literal { Literal.value = Literal.String s; _ } as lit_exp)) -> - typeof_test loc sense argument s str_loc (fun argument -> - reconstruct_ast ( - (typeof_loc, StrT.at typeof_loc), - Expression.Unary { Unary.operator = Unary.Typeof; argument; prefix; } - ) ((str_loc, StrT.at str_loc), lit_exp) - ) - | (str_loc, (Expression.Literal { Literal.value = Literal.String s; _ } as lit_exp)), - (typeof_loc, Expression.Unary { Unary.operator = Unary.Typeof; argument; prefix; }) -> - typeof_test loc sense argument s str_loc (fun argument -> - reconstruct_ast ((str_loc, StrT.at str_loc), lit_exp) ( - (typeof_loc, StrT.at typeof_loc), - Expression.Unary { Unary.operator = Unary.Typeof; argument; prefix; } - ) - ) - | (typeof_loc, Expression.Unary { Unary.operator = Unary.Typeof; argument; prefix; }), - (str_loc, (Expression.TemplateLiteral { - TemplateLiteral.quasis = [_, { - TemplateLiteral.Element.value = { - TemplateLiteral.Element.cooked = s; _ - }; _ - }]; - expressions = []; - } as lit_exp)) -> - typeof_test loc sense argument s str_loc (fun argument -> - reconstruct_ast ( - (typeof_loc, StrT.at typeof_loc), - Expression.Unary { Unary.operator = Unary.Typeof; argument; prefix; } - ) ((str_loc, StrT.at str_loc), lit_exp) - ) - | (str_loc, (Expression.TemplateLiteral { - TemplateLiteral.quasis = [_, { - TemplateLiteral.Element.value = { - TemplateLiteral.Element.cooked = s; _ - }; _ - }]; - expressions = []; - } as lit_exp)), - (typeof_loc, Expression.Unary { Unary.operator = Unary.Typeof; argument; prefix; }) -> - typeof_test loc sense argument s str_loc (fun argument -> - reconstruct_ast ((str_loc, StrT.at str_loc), lit_exp) ( - (typeof_loc, StrT.at typeof_loc), - Expression.Unary { Unary.operator = Unary.Typeof; argument; prefix; } - ) - ) - - (* special case equality relations involving booleans *) - | (_, Expression.Literal { Literal.value = Literal.Boolean lit; _}) as value, - expr -> - let (_, val_t), _ as val_ast = expression cx value in - literal_test loc ~sense ~strict expr val_t (SingletonBoolP lit) - (fun expr -> reconstruct_ast val_ast expr) - | expr, - ((_, Expression.Literal { Literal.value = Literal.Boolean lit; _}) as value) -> - let (_, val_t), _ as val_ast = expression cx value in - literal_test loc ~sense ~strict expr val_t (SingletonBoolP lit) - (fun expr -> reconstruct_ast expr val_ast) - - (* special case equality relations involving strings *) - | ((lit_loc, Expression.Literal { Literal.value = Literal.String lit; _}) as value), - expr - | ((_, Expression.TemplateLiteral { - TemplateLiteral.quasis = [lit_loc, { - TemplateLiteral.Element.value = { - TemplateLiteral.Element.cooked = lit; _ - }; _ - }]; _ - }) as value), expr -> - let (_, val_t), _ as val_ast = expression cx value in - literal_test loc ~sense ~strict expr val_t (SingletonStrP (lit_loc, sense, lit)) - (fun expr -> reconstruct_ast val_ast expr) - | expr, - ((lit_loc, Expression.Literal { Literal.value = Literal.String lit; _}) as value) - | expr, ((_, Expression.TemplateLiteral { - TemplateLiteral.quasis = [lit_loc, { - TemplateLiteral.Element.value = { - TemplateLiteral.Element.cooked = lit; _ - }; _ - }]; _ - }) as value) -> - let (_, val_t), _ as val_ast = expression cx value in - literal_test loc ~sense ~strict expr val_t (SingletonStrP (lit_loc, sense, lit)) - (fun expr -> reconstruct_ast expr val_ast) - - (* special case equality relations involving numbers *) - | ((lit_loc, Expression.Literal { Literal.value = Literal.Number lit; raw }) as value), - expr -> - let (_, val_t), _ as val_ast = expression cx value in - literal_test loc ~sense ~strict expr val_t (SingletonNumP (lit_loc, sense, (lit, raw))) - (fun expr -> reconstruct_ast val_ast expr) - | expr, - ((lit_loc, Expression.Literal { Literal.value = Literal.Number lit; raw }) as value) -> - let (_, val_t), _ as val_ast = expression cx value in - literal_test loc ~sense ~strict expr val_t (SingletonNumP (lit_loc, sense, (lit, raw))) - (fun expr -> reconstruct_ast expr val_ast) - - (* TODO: add Type.predicate variant that tests number equality *) - - (* expr op null *) - | (_, Expression.Literal { Literal.value = Literal.Null; _ } as null), expr -> - let (_, null_t), _ as null_ast = expression cx null in - null_test loc ~sense ~strict expr null_t - (fun expr -> reconstruct_ast null_ast expr) - | expr, (_, Expression.Literal { Literal.value = Literal.Null; _ } as null) -> - let (_, null_t), _ as null_ast = expression cx null in - null_test loc ~sense ~strict expr null_t - (fun expr -> reconstruct_ast expr null_ast) - - (* expr op undefined *) - | (_, Identifier (_, "undefined") as void), expr -> - let (_, void_t), _ as void_ast = expression cx void in - undef_test loc ~sense ~strict expr void_t - (fun expr -> reconstruct_ast void_ast expr) - | expr, (_, Identifier (_, "undefined") as void) -> - let (_, void_t), _ as void_ast = expression cx void in - undef_test loc ~sense ~strict expr void_t - (fun expr -> reconstruct_ast expr void_ast) - - (* expr op void(...) *) - | (_, Unary ({ Unary.operator = Unary.Void; _ }) as void), expr -> - let (_, void_t), _ as void_ast = expression cx void in - void_test loc ~sense ~strict expr void_t - (fun expr -> reconstruct_ast void_ast expr) - | expr, (_, Unary ({ Unary.operator = Unary.Void; _ }) as void) -> - let (_, void_t), _ as void_ast = expression cx void in - void_test loc ~sense ~strict expr void_t - (fun expr -> reconstruct_ast expr void_ast) - - (* fallback case for equality relations involving sentinels (this should be - lower priority since it refines the object but not the property) *) - | (_, Expression.Member _ as expr), value -> - let (_, value_t), _ as value_ast = expression cx value in - sentinel_prop_test loc ~sense ~strict expr value_t - (fun expr -> reconstruct_ast expr value_ast) - | value, (_, Expression.Member _ as expr) -> - let (_, value_t), _ as value_ast = expression cx value in - sentinel_prop_test loc ~sense ~strict expr value_t - (fun expr -> reconstruct_ast value_ast expr) - - (* for all other cases, walk the AST but always return bool *) - | expr, value -> - let (_, t1), _ as expr = expression cx expr in - let (_, t2), _ as value = expression cx value in - flow_eqt ~strict loc (t1, t2); - let ast = reconstruct_ast expr value in - empty_result ((loc, BoolT.at loc), ast) - in - - let mk_and map1 map2 = Key_map.merge - (fun _ p1 p2 -> match (p1,p2) with - | (None, None) -> None - | (Some p, None) - | (None, Some p) -> Some p - | (Some p1, Some p2) -> Some (AndP(p1,p2)) - ) - map1 map2 - in - - let mk_or map1 map2 = Key_map.merge - (fun _ p1 p2 -> match (p1,p2) with - | (None, None) -> None - | (Some _, None) - | (None, Some _) -> None - | (Some p1, Some p2) -> Some (OrP(p1,p2)) - ) - map1 map2 - in - - (* main *) - match e with - - (* member expressions *) - | loc, Member { - Member._object; - property = Member.PropertyIdentifier (prop_loc, prop_name); - computed; - } -> - let (_, obj_t), _ as _object_ast = match _object with - | super_loc, Super -> - let t = super_ cx super_loc in - let id_info = "super", t, Type_table.Other in - Type_table.set_info super_loc id_info (Context.type_table cx); - (super_loc, t), Super - - | _ -> - (* use `expression` instead of `condition` because `_object` is the - object in a member expression; if it itself is a member expression, - it must exist (so ~is_cond:false). e.g. `foo.bar.baz` shows up here - as `_object = foo.bar`, `prop_name = baz`, and `bar` must exist. *) - expression cx _object + ( test_tast, + Key_map.add key p ps, + Key_map.add key notp notps, + Key_map.add key unrefined_t tmap ) in - let expr_reason = mk_reason (RProperty (Some prop_name)) loc in - let prop_reason = mk_reason (RProperty (Some prop_name)) prop_loc in - let t = match Refinement.get cx e loc with - | Some t -> t - | None -> - if Type_inference_hooks_js.dispatch_member_hook cx - prop_name prop_loc obj_t - then AnyT.at prop_loc - else - let use_op = Op (GetProperty (mk_expression_reason e)) in - get_prop ~is_cond:true cx - expr_reason ~use_op obj_t (prop_reason, prop_name) + let flow_eqt ~strict loc (t1, t2) = + if not strict then + let reason = mk_reason (RCustom "non-strict equality comparison") loc in + Flow.flow cx (t1, EqT (reason, false, t2)) + in + (* package result quad from test typed ast, refi key, unrefined type, + predicate, and predicate's truth sense *) + let result test_tast key unrefined_t pred sense = + empty_result test_tast |> add_predicate key unrefined_t pred sense in - let property = Member.PropertyIdentifier ((prop_loc, t), prop_name) in - let ast = (loc, t), Member { Member._object = _object_ast; property; computed; } in + (* a wrapper around `condition` (which is a wrapper around `expression`) that + evaluates `expr`. if this is a sentinel property check (determined by + a strict equality check against a member expression `_object.prop_name`), + then also returns the refinement of `_object`. - (* since we never called `expression cx e`, we have to add to the + this is used by other tests such as `bool_test` such that if given + `foo.bar === false`, `foo.bar` is refined to be `false` (by `bool_test`) + and `foo` is refined to eliminate branches that don't have a `false` bar + property (by this function). *) + let condition_of_maybe_sentinel cx ~sense ~strict expr val_t = + match (strict, expr) with + | ( true, + ( expr_loc, + Member + { + Member._object; + property = + ( Member.PropertyIdentifier + (prop_loc, { Ast.Identifier.name = prop_name; comments = _ }) + | Member.PropertyExpression + ( prop_loc, + Ast.Expression.Literal + { Ast.Literal.value = Ast.Literal.String prop_name; _ } ) ) as property; + } ) ) -> + (* use `expression` instead of `condition` because `_object` is the object + in a member expression; if it itself is a member expression, it must + exist (so ~is_cond:false). e.g. `foo.bar.baz` shows up here as + `_object = foo.bar`, `prop_name = baz`, and `bar` must exist. *) + let (((_, obj_t), _) as _object_ast) = expression cx _object in + let prop_reason = mk_reason (RProperty (Some prop_name)) prop_loc in + let expr_reason = mk_expression_reason expr in + let prop_t = + match Refinement.get cx expr expr_loc with + | Some t -> t + | None -> + if Type_inference_hooks_js.dispatch_member_hook cx prop_name prop_loc obj_t then + Unsoundness.at InferenceHooks prop_loc + else + let use_op = Op (GetProperty prop_reason) in + get_prop ~is_cond:true cx expr_reason ~use_op obj_t (prop_reason, prop_name) + in + (* refine the object (`foo.bar` in the example) based on the prop. *) + let refinement = + match Refinement.key _object with + | None -> None + | Some name -> + let pred = LeftP (SentinelProp prop_name, val_t) in + Some (name, obj_t, pred, sense) + in + (* since we never called `expression cx expr`, we have to add to the type table ourselves *) - Type_table.set (Context.type_table cx) loc t; - let id_info = prop_name, t, Type_table.PropertyAccess obj_t in - Type_table.set_info prop_loc id_info (Context.type_table cx); - - let out = match Refinement.key e with - | Some name -> result ast name t (ExistsP (Some loc)) true - | None -> empty_result ast + let m = new loc_mapper prop_t in + let property = m#member_property property in + (((expr_loc, prop_t), Member { Member._object = _object_ast; property }), refinement) + | _ -> (condition cx expr, None) in - - (* refine the object (`foo.bar` in the example) based on the prop. *) - begin match Refinement.key _object with - | Some name -> - let predicate = PropExistsP (expr_reason, prop_name, Some prop_loc) in - out |> add_predicate name obj_t predicate true - | None -> - out - end - - (* assignments *) - | _, Assignment { Assignment.left = loc, Ast.Pattern.Identifier id; _ } -> ( - let (_, expr), _ as tast = expression cx e in - let id = id.Ast.Pattern.Identifier.name in - match refinable_lvalue (loc, Ast.Expression.Identifier id) with - | Some name, _ -> result tast name expr (ExistsP (Some loc)) true - | None, _ -> empty_result tast - ) - - (* expr instanceof t *) - | loc, Binary { Binary.operator = Binary.Instanceof; left; right } -> ( - let bool = BoolT.at loc in - let name_opt, ((_, left_t), _ as left_ast) = refinable_lvalue left in - let (_, right_t), _ as right_ast = expression cx right in - let ast = - (loc, bool), - Binary { Binary. - operator = Binary.Instanceof; - left = left_ast; - right = right_ast; - } + (* inspect a null equality test *) + let null_test loc ~sense ~strict e null_t reconstruct_ast = + let ((((_, t), _) as e_ast), sentinel_refinement) = + condition_of_maybe_sentinel cx ~sense ~strict e null_t + in + let ast = reconstruct_ast e_ast in + flow_eqt ~strict loc (t, null_t); + let out = + match Refinement.key e with + | None -> + let t_out = BoolT.at loc |> with_trust bogus_trust in + empty_result ((loc, t_out), ast) + | Some name -> + let pred = + if strict then + NullP + else + MaybeP + in + let t_out = BoolT.at loc |> with_trust bogus_trust in + result ((loc, t_out), ast) name t pred sense + in + match sentinel_refinement with + | Some (name, obj_t, p, sense) -> out |> add_predicate name obj_t p sense + | None -> out + in + let void_test loc ~sense ~strict e void_t reconstruct_ast = + (* if `void_t` is not a VoidT, make it one so that the sentinel test has a + literal type to test against. It's not appropriate to call `void_test` + with a `void_t` that you don't want to treat like an actual `void`! *) + let void_t = + match void_t with + | DefT (_, _, VoidT) -> void_t + | _ -> VoidT.why (reason_of_t void_t) |> with_trust bogus_trust + in + let ((((_, t), _) as e_ast), sentinel_refinement) = + condition_of_maybe_sentinel cx ~sense ~strict e void_t + in + let ast = reconstruct_ast e_ast in + flow_eqt ~strict loc (t, void_t); + let out = + match Refinement.key e with + | None -> + let t_out = BoolT.at loc |> with_trust bogus_trust in + empty_result ((loc, t_out), ast) + | Some name -> + let pred = + if strict then + VoidP + else + MaybeP + in + let t_out = BoolT.at loc |> with_trust bogus_trust in + result ((loc, t_out), ast) name t pred sense + in + match sentinel_refinement with + | Some (name, obj_t, p, sense) -> out |> add_predicate name obj_t p sense + | None -> out + in + (* inspect an undefined equality test *) + let undef_test loc ~sense ~strict e void_t reconstruct_ast = + (* if `undefined` isn't redefined in scope, then we assume it is `void` *) + if Env.is_global_var cx "undefined" then + void_test loc ~sense ~strict e void_t reconstruct_ast + else + let e_ast = expression cx e in + empty_result ((loc, BoolT.at loc |> with_trust bogus_trust), reconstruct_ast e_ast) + in + let literal_test loc ~strict ~sense expr val_t pred reconstruct_ast = + let ((((_, t), _) as expr_ast), sentinel_refinement) = + condition_of_maybe_sentinel cx ~sense ~strict expr val_t + in + let ast = reconstruct_ast expr_ast in + flow_eqt ~strict loc (t, val_t); + let refinement = + if strict then + Refinement.key expr + else + None + in + let out = + match refinement with + | Some name -> + let t_out = BoolT.at loc |> with_trust bogus_trust in + result ((loc, t_out), ast) name t pred sense + | None -> + let t = BoolT.at loc |> with_trust bogus_trust in + empty_result ((loc, t), ast) + in + match sentinel_refinement with + | Some (name, obj_t, p, sense) -> out |> add_predicate name obj_t p sense + | None -> out + in + (* inspect a typeof equality test *) + let typeof_test loc sense arg typename str_loc reconstruct_ast = + let bool = BoolT.at loc |> with_trust bogus_trust in + match refinable_lvalue arg with + | (Some name, (((_, t), _) as arg)) -> + let pred = + match typename with + | "boolean" -> Some BoolP + | "function" -> Some FunP + | "number" -> Some NumP + | "object" -> Some ObjP + | "string" -> Some StrP + | "symbol" -> Some SymbolP + | "undefined" -> Some VoidP + | _ -> None + in + begin + match pred with + | Some pred -> result ((loc, bool), reconstruct_ast arg) name t pred sense + | None -> + Flow.add_output cx Error_message.(EInvalidTypeof (str_loc, typename)); + empty_result ((loc, bool), reconstruct_ast arg) + end + | (None, arg) -> empty_result ((loc, bool), reconstruct_ast arg) + in + let sentinel_prop_test loc ~sense ~strict expr val_t reconstruct_ast = + let ((((_, t), _) as expr_ast), sentinel_refinement) = + condition_of_maybe_sentinel cx ~sense ~strict expr val_t + in + let ast = reconstruct_ast expr_ast in + flow_eqt ~strict loc (t, val_t); + let t_out = BoolT.at loc |> with_trust bogus_trust in + let out = empty_result ((loc, t_out), ast) in + match sentinel_refinement with + | Some (name, obj_t, p, sense) -> out |> add_predicate name obj_t p sense + | None -> out + in + let eq_test loc ~sense ~strict left right reconstruct_ast = + let is_number_literal node = + match node with + | Expression.Literal { Literal.value = Literal.Number _; _ } + | Expression.Unary + { + Unary.operator = Unary.Minus; + argument = (_, Expression.Literal { Literal.value = Literal.Number _; _ }); + comments = _; + } -> + true + | _ -> false + in + let extract_number_literal node = + match node with + | Expression.Literal { Literal.value = Literal.Number lit; raw; comments = _ } -> + (lit, raw) + | Expression.Unary + { + Unary.operator = Unary.Minus; + argument = (_, Expression.Literal { Literal.value = Literal.Number lit; raw; _ }); + comments = _; + } -> + (-.lit, "-" ^ raw) + | _ -> Utils_js.assert_false "not a number literal" + in + match (left, right) with + (* typeof expr ==/=== string *) + (* this must happen before the case below involving Literal.String in order + to match anything. *) + | ( (typeof_loc, Expression.Unary { Unary.operator = Unary.Typeof; argument; comments }), + (str_loc, (Expression.Literal { Literal.value = Literal.String s; _ } as lit_exp)) ) -> + typeof_test loc sense argument s str_loc (fun argument -> + let left_t = StrT.at typeof_loc |> with_trust bogus_trust in + let left = + ( (typeof_loc, left_t), + Expression.Unary { Unary.operator = Unary.Typeof; argument; comments } ) + in + let right_t = StrT.at str_loc |> with_trust bogus_trust in + let right = ((str_loc, right_t), lit_exp) in + reconstruct_ast left right) + | ( (str_loc, (Expression.Literal { Literal.value = Literal.String s; _ } as lit_exp)), + (typeof_loc, Expression.Unary { Unary.operator = Unary.Typeof; argument; comments }) ) + -> + typeof_test loc sense argument s str_loc (fun argument -> + let left_t = StrT.at str_loc |> with_trust bogus_trust in + let left = ((str_loc, left_t), lit_exp) in + let right_t = StrT.at typeof_loc |> with_trust bogus_trust in + let right = + ( (typeof_loc, right_t), + Expression.Unary { Unary.operator = Unary.Typeof; argument; comments } ) + in + reconstruct_ast left right) + | ( (typeof_loc, Expression.Unary { Unary.operator = Unary.Typeof; argument; comments }), + ( str_loc, + ( Expression.TemplateLiteral + { + TemplateLiteral.quasis = + [ + ( _, + { + TemplateLiteral.Element.value = + { TemplateLiteral.Element.cooked = s; _ }; + _; + } ); + ]; + expressions = []; + } as lit_exp ) ) ) -> + typeof_test loc sense argument s str_loc (fun argument -> + let left_t = StrT.at typeof_loc |> with_trust bogus_trust in + let left = + ( (typeof_loc, left_t), + Expression.Unary { Unary.operator = Unary.Typeof; argument; comments } ) + in + let right_t = StrT.at str_loc |> with_trust bogus_trust in + let right = ((str_loc, right_t), lit_exp) in + reconstruct_ast left right) + | ( ( str_loc, + ( Expression.TemplateLiteral + { + TemplateLiteral.quasis = + [ + ( _, + { + TemplateLiteral.Element.value = + { TemplateLiteral.Element.cooked = s; _ }; + _; + } ); + ]; + expressions = []; + } as lit_exp ) ), + (typeof_loc, Expression.Unary { Unary.operator = Unary.Typeof; argument; comments }) ) + -> + typeof_test loc sense argument s str_loc (fun argument -> + let left_t = StrT.at str_loc |> with_trust bogus_trust in + let left = ((str_loc, left_t), lit_exp) in + let right_t = StrT.at typeof_loc |> with_trust bogus_trust in + let right = + ( (typeof_loc, right_t), + Expression.Unary { Unary.operator = Unary.Typeof; argument; comments } ) + in + reconstruct_ast left right) + (* special case equality relations involving booleans *) + | ( ((lit_loc, Expression.Literal { Literal.value = Literal.Boolean lit; _ }) as value), + expr ) -> + let (((_, val_t), _) as val_ast) = expression cx value in + literal_test + loc + ~sense + ~strict + expr + val_t + (SingletonBoolP (lit_loc, lit)) + (fun expr -> reconstruct_ast val_ast expr) + | ( expr, + ((lit_loc, Expression.Literal { Literal.value = Literal.Boolean lit; _ }) as value) ) + -> + let (((_, val_t), _) as val_ast) = expression cx value in + literal_test + loc + ~sense + ~strict + expr + val_t + (SingletonBoolP (lit_loc, lit)) + (fun expr -> reconstruct_ast expr val_ast) + (* special case equality relations involving strings *) + | (((lit_loc, Expression.Literal { Literal.value = Literal.String lit; _ }) as value), expr) + | ( ( ( _, + Expression.TemplateLiteral + { + TemplateLiteral.quasis = + [ + ( lit_loc, + { + TemplateLiteral.Element.value = + { TemplateLiteral.Element.cooked = lit; _ }; + _; + } ); + ]; + _; + } ) as value ), + expr ) -> + let (((_, val_t), _) as val_ast) = expression cx value in + literal_test + loc + ~sense + ~strict + expr + val_t + (SingletonStrP (lit_loc, sense, lit)) + (fun expr -> reconstruct_ast val_ast expr) + | (expr, ((lit_loc, Expression.Literal { Literal.value = Literal.String lit; _ }) as value)) + | ( expr, + ( ( _, + Expression.TemplateLiteral + { + TemplateLiteral.quasis = + [ + ( lit_loc, + { + TemplateLiteral.Element.value = + { TemplateLiteral.Element.cooked = lit; _ }; + _; + } ); + ]; + _; + } ) as value ) ) -> + let (((_, val_t), _) as val_ast) = expression cx value in + literal_test + loc + ~sense + ~strict + expr + val_t + (SingletonStrP (lit_loc, sense, lit)) + (fun expr -> reconstruct_ast expr val_ast) + (* special case equality relations involving numbers *) + | (((lit_loc, number_literal) as value), expr) when is_number_literal number_literal -> + let (lit, raw) = extract_number_literal number_literal in + let (((_, val_t), _) as val_ast) = expression cx value in + literal_test + loc + ~sense + ~strict + expr + val_t + (SingletonNumP (lit_loc, sense, (lit, raw))) + (fun expr -> reconstruct_ast val_ast expr) + | (expr, ((lit_loc, number_literal) as value)) when is_number_literal number_literal -> + let (lit, raw) = extract_number_literal number_literal in + let (((_, val_t), _) as val_ast) = expression cx value in + literal_test + loc + ~sense + ~strict + expr + val_t + (SingletonNumP (lit_loc, sense, (lit, raw))) + (fun expr -> reconstruct_ast expr val_ast) + (* TODO: add Type.predicate variant that tests number equality *) + + (* expr op null *) + | (((_, Expression.Literal { Literal.value = Literal.Null; _ }) as null), expr) -> + let (((_, null_t), _) as null_ast) = expression cx null in + null_test loc ~sense ~strict expr null_t (fun expr -> reconstruct_ast null_ast expr) + | (expr, ((_, Expression.Literal { Literal.value = Literal.Null; _ }) as null)) -> + let (((_, null_t), _) as null_ast) = expression cx null in + null_test loc ~sense ~strict expr null_t (fun expr -> reconstruct_ast expr null_ast) + (* expr op undefined *) + | (((_, Identifier (_, { Ast.Identifier.name = "undefined"; comments = _ })) as void), expr) + -> + let (((_, void_t), _) as void_ast) = expression cx void in + undef_test loc ~sense ~strict expr void_t (fun expr -> reconstruct_ast void_ast expr) + | (expr, ((_, Identifier (_, { Ast.Identifier.name = "undefined"; comments = _ })) as void)) + -> + let (((_, void_t), _) as void_ast) = expression cx void in + undef_test loc ~sense ~strict expr void_t (fun expr -> reconstruct_ast expr void_ast) + (* expr op void(...) *) + | (((_, Unary { Unary.operator = Unary.Void; _ }) as void), expr) -> + let (((_, void_t), _) as void_ast) = expression cx void in + void_test loc ~sense ~strict expr void_t (fun expr -> reconstruct_ast void_ast expr) + | (expr, ((_, Unary { Unary.operator = Unary.Void; _ }) as void)) -> + let (((_, void_t), _) as void_ast) = expression cx void in + void_test loc ~sense ~strict expr void_t (fun expr -> reconstruct_ast expr void_ast) + (* fallback case for equality relations involving sentinels (this should be + lower priority since it refines the object but not the property) *) + | (((_, Expression.Member _) as expr), value) -> + let (((_, value_t), _) as value_ast) = expression cx value in + sentinel_prop_test loc ~sense ~strict expr value_t (fun expr -> + reconstruct_ast expr value_ast) + | (value, ((_, Expression.Member _) as expr)) -> + let (((_, value_t), _) as value_ast) = expression cx value in + sentinel_prop_test loc ~sense ~strict expr value_t (fun expr -> + reconstruct_ast value_ast expr) + (* for all other cases, walk the AST but always return bool *) + | (expr, value) -> + let (((_, t1), _) as expr) = expression cx expr in + let (((_, t2), _) as value) = expression cx value in + flow_eqt ~strict loc (t1, t2); + let ast = reconstruct_ast expr value in + let t_out = BoolT.at loc |> with_trust bogus_trust in + empty_result ((loc, t_out), ast) in - match name_opt with - | Some name -> + let mk_and map1 map2 = + Key_map.merge + (fun _ p1 p2 -> + match (p1, p2) with + | (None, None) -> None + | (Some p, None) + | (None, Some p) -> + Some p + | (Some p1, Some p2) -> Some (AndP (p1, p2))) + map1 + map2 + in + let mk_or map1 map2 = + Key_map.merge + (fun _ p1 p2 -> + match (p1, p2) with + | (None, None) -> None + | (Some _, None) + | (None, Some _) -> + None + | (Some p1, Some p2) -> Some (OrP (p1, p2))) + map1 + map2 + in + (* main *) + match e with + (* member expressions *) + | ( loc, + Member + { + Member._object; + property = + Member.PropertyIdentifier + (prop_loc, ({ Ast.Identifier.name = prop_name; comments = _ } as id)); + } ) -> + let (((_, obj_t), _) as _object_ast) = + match _object with + | (super_loc, Super) -> + let t = super_ cx super_loc in + ((super_loc, t), Super) + | _ -> + (* use `expression` instead of `condition` because `_object` is the + object in a member expression; if it itself is a member expression, + it must exist (so ~is_cond:false). e.g. `foo.bar.baz` shows up here + as `_object = foo.bar`, `prop_name = baz`, and `bar` must exist. *) + expression cx _object + in + let expr_reason = mk_expression_reason e in + let prop_reason = mk_reason (RProperty (Some prop_name)) prop_loc in + let t = + match Refinement.get cx e loc with + | Some t -> t + | None -> + if Type_inference_hooks_js.dispatch_member_hook cx prop_name prop_loc obj_t then + Unsoundness.at InferenceHooks prop_loc + else + let use_op = Op (GetProperty (mk_expression_reason e)) in + get_prop ~is_cond:true cx expr_reason ~use_op obj_t (prop_reason, prop_name) + in + let property = Member.PropertyIdentifier ((prop_loc, t), id) in + let ast = ((loc, t), Member { Member._object = _object_ast; property }) in + let out = + match Refinement.key e with + | Some name -> result ast name t (ExistsP (Some loc)) true + | None -> empty_result ast + in + (* refine the object (`foo.bar` in the example) based on the prop. *) + begin + match Refinement.key _object with + | Some name -> + let predicate = PropExistsP (prop_name, Some prop_loc) in + out |> add_predicate name obj_t predicate true + | None -> out + end + (* assignments *) + | (_, Assignment { Assignment.left = (loc, Ast.Pattern.Identifier id); _ }) -> + let (((_, expr), _) as tast) = expression cx e in + let id = id.Ast.Pattern.Identifier.name in + (match refinable_lvalue (loc, Ast.Expression.Identifier id) with + | (Some name, _) -> result tast name expr (ExistsP (Some loc)) true + | (None, _) -> empty_result tast) + (* expr instanceof t *) + | (loc, Binary { Binary.operator = Binary.Instanceof; left; right }) -> + let bool = BoolT.at loc |> with_trust bogus_trust in + let (name_opt, (((_, left_t), _) as left_ast)) = refinable_lvalue left in + let (((_, right_t), _) as right_ast) = expression cx right in + let ast = + ( (loc, bool), + Binary { Binary.operator = Binary.Instanceof; left = left_ast; right = right_ast } ) + in + (match name_opt with + | Some name -> let pred = LeftP (InstanceofTest, right_t) in result ast name left_t pred true - | None -> - empty_result ast - ) - - (* expr op expr *) - | loc, Binary { Binary.operator = Binary.Equal; left; right; } -> - eq_test loc ~sense:true ~strict:false left right - (fun left right -> Binary { Binary.operator = Binary.Equal; left; right; }) - | loc, Binary { Binary.operator = Binary.StrictEqual; left; right; } -> - eq_test loc ~sense:true ~strict:true left right - (fun left right -> Binary { Binary.operator = Binary.StrictEqual; left; right; }) - | loc, Binary { Binary.operator = Binary.NotEqual; left; right; } -> - eq_test loc ~sense:false ~strict:false left right - (fun left right -> Binary { Binary.operator = Binary.NotEqual; left; right; }) - | loc, Binary { Binary.operator = Binary.StrictNotEqual; left; right; } -> - eq_test loc ~sense:false ~strict:true left right - (fun left right -> Binary { Binary.operator = Binary.StrictNotEqual; left; right; }) - - (* Array.isArray(expr) *) - | loc, Call { - Call.callee = callee_loc, Member { - Member._object = (_, Identifier (_, "Array") as o); - property = Member.PropertyIdentifier (prop_loc, ("isArray" as prop_name)); - computed; - }; - targs; - arguments = [Expression arg]; - } -> ( - Option.iter targs ~f:(fun _ -> - Flow.add_output cx Flow_error.(ECallTypeArity { - call_loc = loc; - is_new = false; - reason_arity = Reason.(locationless_reason (RFunction RNormal)); - expected_arity = 0; - })); - (* get Array.isArray in order to populate the type tables, but we don't + | None -> empty_result ast) + (* expr op expr *) + | (loc, Binary { Binary.operator = Binary.Equal; left; right }) -> + eq_test loc ~sense:true ~strict:false left right (fun left right -> + Binary { Binary.operator = Binary.Equal; left; right }) + | (loc, Binary { Binary.operator = Binary.StrictEqual; left; right }) -> + eq_test loc ~sense:true ~strict:true left right (fun left right -> + Binary { Binary.operator = Binary.StrictEqual; left; right }) + | (loc, Binary { Binary.operator = Binary.NotEqual; left; right }) -> + eq_test loc ~sense:false ~strict:false left right (fun left right -> + Binary { Binary.operator = Binary.NotEqual; left; right }) + | (loc, Binary { Binary.operator = Binary.StrictNotEqual; left; right }) -> + eq_test loc ~sense:false ~strict:true left right (fun left right -> + Binary { Binary.operator = Binary.StrictNotEqual; left; right }) + (* Array.isArray(expr) *) + | ( loc, + Call + { + Call.callee = + ( callee_loc, + Member + { + Member._object = + (_, Identifier (_, { Ast.Identifier.name = "Array"; comments = _ })) as o; + property = + Member.PropertyIdentifier + (prop_loc, ({ Ast.Identifier.name = "isArray"; comments = _ } as id)); + } ); + targs; + arguments = [Expression arg]; + } ) -> + Option.iter targs ~f:(fun _ -> + Flow.add_output + cx + Error_message.( + ECallTypeArity + { + call_loc = loc; + is_new = false; + reason_arity = Reason.(locationless_reason (RFunction RNormal)); + expected_arity = 0; + })); + + (* get Array.isArray in order to populate the type tables, but we don't care about the result. *) - (* TODO: one day we can replace this with a call to `method_call`, and + (* TODO: one day we can replace this with a call to `method_call`, and then discard the result. currently MethodT does not update type_table properly. *) - let (_, obj_t), _ as _object = expression cx o in - let reason = mk_reason (RCustom "`Array.isArray(...)`") callee_loc in - let fn_t = Tvar.mk_where cx reason (fun t -> - let prop_reason = mk_reason (RProperty (Some "isArray")) prop_loc in - let use_op = Op (GetProperty (mk_expression_reason e)) in - Flow.flow cx (obj_t, GetPropT (use_op, reason, Named (prop_reason, "isArray"), t)) - ) in - Type_table.set (Context.type_table cx) prop_loc fn_t; - let id_info = "isArray", fn_t, Type_table.Other in - Type_table.set_info prop_loc id_info (Context.type_table cx); - - let bool = BoolT.at loc in - let name_opt, ((_, t), _ as arg) = refinable_lvalue arg in - let property = Member.PropertyIdentifier ((prop_loc, fn_t), prop_name) in - let ast = - (loc, bool), - Call { Call. - callee = (callee_loc, fn_t), Member { Member._object; property; computed; }; - targs = None; - arguments = [ Expression arg ]; - } - in - match name_opt with - | Some name -> - result ast name t ArrP true - | None -> - empty_result ast - ) - - (* test1 && test2 *) - | loc, Logical { Logical.operator = Logical.And; left; right } -> - let ((_, t1), _ as left_ast), map1, not_map1, xts1 = - predicates_of_condition cx left in - let ((_, t2), _ as right_ast), map2, not_map2, xts2 = Env.in_refined_env cx loc map1 xts1 - (fun () -> predicates_of_condition cx right) in - let reason = mk_reason (RLogical ("&&", desc_of_t t1, desc_of_t t2)) loc in - ( - ( - (loc, Tvar.mk_where cx reason (fun t -> Flow.flow cx (t1, AndT (reason, t2, t));)), - Logical { Logical. - operator = Logical.And; - left = left_ast; - right = right_ast; - } - ), - mk_and map1 map2, - mk_or not_map1 not_map2, - Key_map.union xts1 xts2 - ) - - (* test1 || test2 *) - | loc, Logical { Logical.operator = Logical.Or; left; right } -> - let () = check_default_pattern cx left right in - let ((_, t1), _ as left_ast), map1, not_map1, xts1 = - predicates_of_condition cx left in - let ((_, t2), _ as right_ast), map2, not_map2, xts2 = Env.in_refined_env cx loc not_map1 xts1 - (fun () -> predicates_of_condition cx right) in - let reason = mk_reason (RLogical ("||", desc_of_t t1, desc_of_t t2)) loc in - ( - ( - (loc, Tvar.mk_where cx reason (fun t -> Flow.flow cx (t1, OrT (reason, t2, t)))), - Logical { Logical. - operator = Logical.Or; - left = left_ast; - right = right_ast; - } - ), - mk_or map1 map2, - mk_and not_map1 not_map2, - Key_map.union xts1 xts2 - ) - - (* !test *) - | loc, Unary { Unary.operator = Unary.Not; argument; prefix; } -> - let (arg, map, not_map, xts) = predicates_of_condition cx argument in - let ast' = Unary { Unary.operator = Unary.Not; argument = arg; prefix; } in - let ast = (loc, BoolT.at loc), ast' in - (ast, not_map, map, xts) - - (* ids *) - | loc, This - | loc, Identifier _ - | loc, Member _ -> ( - match refinable_lvalue e with - | Some name, ((_, t), _ as e) -> result e name t (ExistsP (Some loc)) true - | None, e -> empty_result e - ) - - (* e.m(...) *) - (* TODO: Don't trap method calls for now *) - | _, Call { Call.callee = (_, Member _); _ } -> - empty_result (expression cx e) - - (* f(...) *) - (* The concrete predicate is not known at this point. We attach a "latent" + let (((_, obj_t), _) as _object) = expression cx o in + let reason = mk_reason (RCustom "`Array.isArray(...)`") callee_loc in + let fn_t = + Tvar.mk_where cx reason (fun t -> + let prop_reason = mk_reason (RProperty (Some "isArray")) prop_loc in + let use_op = Op (GetProperty (mk_expression_reason e)) in + Flow.flow cx (obj_t, GetPropT (use_op, reason, Named (prop_reason, "isArray"), t))) + in + let bool = BoolT.at loc |> with_trust bogus_trust in + let (name_opt, (((_, t), _) as arg)) = refinable_lvalue arg in + let property = Member.PropertyIdentifier ((prop_loc, fn_t), id) in + let ast = + ( (loc, bool), + Call + { + Call.callee = ((callee_loc, fn_t), Member { Member._object; property }); + targs = None; + arguments = [Expression arg]; + } ) + in + (match name_opt with + | Some name -> result ast name t ArrP true + | None -> empty_result ast) + (* test1 && test2 *) + | (loc, Logical { Logical.operator = Logical.And; left; right }) -> + let ((((_, t1), _) as left_ast), map1, not_map1, xts1) = predicates_of_condition cx left in + let ((((_, t2), _) as right_ast), map2, not_map2, xts2) = + Env.in_refined_env cx loc map1 xts1 (fun () -> predicates_of_condition cx right) + in + let reason = mk_reason (RLogical ("&&", desc_of_t t1, desc_of_t t2)) loc in + let t_out = Tvar.mk_where cx reason (fun t -> Flow.flow cx (t1, AndT (reason, t2, t))) in + ( ( (loc, t_out), + Logical { Logical.operator = Logical.And; left = left_ast; right = right_ast } ), + mk_and map1 map2, + mk_or not_map1 not_map2, + Key_map.union xts1 xts2 ) + (* test1 || test2 *) + | (loc, Logical { Logical.operator = Logical.Or; left; right }) -> + let () = check_default_pattern cx left right in + let ((((_, t1), _) as left_ast), map1, not_map1, xts1) = predicates_of_condition cx left in + let ((((_, t2), _) as right_ast), map2, not_map2, xts2) = + Env.in_refined_env cx loc not_map1 xts1 (fun () -> predicates_of_condition cx right) + in + let reason = mk_reason (RLogical ("||", desc_of_t t1, desc_of_t t2)) loc in + let t_out = Tvar.mk_where cx reason (fun t -> Flow.flow cx (t1, OrT (reason, t2, t))) in + ( ( (loc, t_out), + Logical { Logical.operator = Logical.Or; left = left_ast; right = right_ast } ), + mk_or map1 map2, + mk_and not_map1 not_map2, + Key_map.union xts1 xts2 ) + (* !test *) + | (loc, Unary { Unary.operator = Unary.Not; argument; comments }) -> + let (arg, map, not_map, xts) = predicates_of_condition cx argument in + let ast' = Unary { Unary.operator = Unary.Not; argument = arg; comments } in + let t_out = BoolT.at loc |> with_trust bogus_trust in + let ast = ((loc, t_out), ast') in + (ast, not_map, map, xts) + (* ids *) + | (loc, This) + | (loc, Identifier _) + | (loc, Member _) -> + (match refinable_lvalue e with + | (Some name, (((_, t), _) as e)) -> result e name t (ExistsP (Some loc)) true + | (None, e) -> empty_result e) + (* e.m(...) *) + (* TODO: Don't trap method calls for now *) + | (_, Call { Call.callee = (_, Member _); _ }) -> empty_result (expression cx e) + (* f(...) *) + (* The concrete predicate is not known at this point. We attach a "latent" predicate pointing to the type of the function that will supply this predicated when it is resolved. *) - | loc, Call ({ Call.arguments; _ } as call) -> - let is_spread = function | Spread _ -> true | _ -> false in - if List.exists is_spread arguments then - empty_result (expression cx e) - else - let fun_t, keys, arg_ts, ret_t, call_ast = - predicated_call_expression cx loc call in - let ast = (loc, ret_t), Call call_ast in - let args_with_offset = ListUtils.zipi keys arg_ts in - let emp_pred_map = empty_result ast in - List.fold_left (fun pred_map arg_info -> match arg_info with - | (idx, Some key, unrefined_t) -> - let pred = LatentP (fun_t, idx+1) in - add_predicate key unrefined_t pred true pred_map - | _ -> - pred_map - ) emp_pred_map args_with_offset - - (* fallthrough case: evaluate test expr, no refinements *) - | e -> - empty_result (expression cx e) -)) - - - - + | (loc, Call ({ Call.arguments; _ } as call)) -> + let is_spread = function + | Spread _ -> true + | _ -> false + in + if List.exists is_spread arguments then + empty_result (expression cx e) + else + let (fun_t, keys, arg_ts, ret_t, call_ast) = predicated_call_expression cx loc call in + let ast = ((loc, ret_t), Call call_ast) in + let args_with_offset = ListUtils.zipi keys arg_ts in + let emp_pred_map = empty_result ast in + List.fold_left + (fun pred_map arg_info -> + match arg_info with + | (idx, Some key, unrefined_t) -> + let pred = LatentP (fun_t, idx + 1) in + add_predicate key unrefined_t pred true pred_map + | _ -> pred_map) + emp_pred_map + args_with_offset + (* fallthrough case: evaluate test expr, no refinements *) + | e -> empty_result (expression cx e))) (* Conditional expressions are checked like expressions, except that property accesses are provisionally allowed even when such properties do not exist. This accommodates the common JavaScript idiom of testing for the existence of a property before using that property. *) -and condition cx e : (Loc.t, Loc.t * Type.t) Ast.Expression.t = - expression ~is_cond:true cx e +and condition cx e : (ALoc.t, ALoc.t * Type.t) Ast.Expression.t = expression ~is_cond:true cx e and get_private_field_opt_use reason ~use_op name = let class_entries = Env.get_class_entries () in @@ -5865,10 +6151,9 @@ and get_private_field_opt_use reason ~use_op name = and get_private_field cx reason ~use_op tobj name = Tvar.mk_where cx reason (fun t -> - let opt_use = get_private_field_opt_use reason ~use_op name in - let get_prop_u = apply_opt_use opt_use t in - Flow.flow cx (tobj, get_prop_u) - ) + let opt_use = get_private_field_opt_use reason ~use_op name in + let get_prop_u = apply_opt_use opt_use t in + Flow.flow cx (tobj, get_prop_u)) (* Property lookups become non-strict when processing conditional expressions (see above). @@ -5878,219 +6163,277 @@ and get_private_field cx reason ~use_op tobj name = would make everything involving Refinement be in the same place. *) and get_prop_opt_use ~is_cond reason ~use_op (prop_reason, name) = - if is_cond - then OptTestPropT (reason, mk_id (), Named (prop_reason, name)) - else OptGetPropT (use_op, reason, Named (prop_reason, name)) + if is_cond then + OptTestPropT (reason, mk_id (), Named (prop_reason, name)) + else + OptGetPropT (use_op, reason, Named (prop_reason, name)) and get_prop ~is_cond cx reason ~use_op tobj (prop_reason, name) = let opt_use = get_prop_opt_use ~is_cond reason ~use_op (prop_reason, name) in Tvar.mk_where cx reason (fun t -> - let get_prop_u = apply_opt_use opt_use t in - Flow.flow cx (tobj, get_prop_u) - ) + let get_prop_u = apply_opt_use opt_use t in + Flow.flow cx (tobj, get_prop_u)) (* TODO: switch to TypeScript specification of Object *) and static_method_call_Object cx loc callee_loc prop_loc expr obj_t m targs args = - let open Ast.Expression in - - let reason = mk_reason (RCustom (spf "`Object.%s`" m)) loc in - match (m, targs, args) with - | "create", None, [Expression e] -> - let (_, e_t), _ as e_ast = expression cx e in - let proto = - let reason = mk_reason RPrototype (fst e) in - Tvar.mk_where cx reason (fun t -> - Flow.flow cx (e_t, ObjTestProtoT (reason, t)) - ) - in - Obj_type.mk_with_proto cx reason proto, - None, - [Expression e_ast] - - | "create", None, [Expression e; Expression (obj_loc, Object { Object.properties })] -> - let (_, e_t), _ as e_ast = expression cx e in - let proto = - let reason = mk_reason RPrototype (fst e) in - Tvar.mk_where cx reason (fun t -> - Flow.flow cx (e_t, ObjTestProtoT (reason, t)) - ) + Ast.Expression.( + let reason = mk_reason (RCustom (spf "`Object.%s`" m)) loc in + let use_op = + Op + (FunCallMethod + { + op = reason; + fn = mk_reason (RMethod (Some m)) callee_loc; + prop = mk_reason (RProperty (Some m)) prop_loc; + args = mk_initial_arguments_reason args; + local = true; + }) in - let pmap, properties = prop_map_of_object cx properties in - let props = SMap.fold (fun x p acc -> - let loc = Property.read_loc p in - match Property.read_t p with - | None -> - (* Since the properties object must be a literal, and literal objects + match (m, targs, args) with + | ("create", None, [Expression e]) -> + let (((_, e_t), _) as e_ast) = expression cx e in + let proto = + let reason = mk_reason RPrototype (fst e) in + Tvar.mk_where cx reason (fun t -> Flow.flow cx (e_t, ObjTestProtoT (reason, t))) + in + (Obj_type.mk_with_proto cx reason proto, None, [Expression e_ast]) + | ("create", None, [Expression e; Expression (obj_loc, Object { Object.properties; comments })]) + -> + let (((_, e_t), _) as e_ast) = expression cx e in + let proto = + let reason = mk_reason RPrototype (fst e) in + Tvar.mk_where cx reason (fun t -> Flow.flow cx (e_t, ObjTestProtoT (reason, t))) + in + let (pmap, properties) = prop_map_of_object cx properties in + let propdesc_type = Flow.get_builtin cx "PropertyDescriptor" reason in + let props = + SMap.fold + (fun x p acc -> + let loc = Property.read_loc p in + match Property.read_t p with + | None -> + (* Since the properties object must be a literal, and literal objects can only ever contain neutral fields, this should not happen. *) - Flow.add_output cx Flow_error.( - EInternal (prop_loc, PropertyDescriptorPropertyCannotBeRead) - ); - acc - | Some spec -> - let reason = replace_reason (fun desc -> - RCustom (spf ".%s of %s" x (string_of_desc desc)) - ) reason in - let t = Tvar.mk_where cx reason (fun tvar -> - Flow.flow cx (spec, GetPropT (unknown_use, reason, Named (reason, "value"), tvar)) - ) in - let p = Field (loc, t, Neutral) in - SMap.add x p acc - ) pmap SMap.empty in - Obj_type.mk_with_proto cx reason ~props proto, - None, - [ - Expression e_ast; - (* TODO(vijayramamurthy) construct object type *) - Expression ((obj_loc, AnyObjT.at obj_loc), Object { Object.properties }) - ] - - | ("getOwnPropertyNames" | "keys"), None, [Expression e] -> - let arr_reason = mk_reason RArrayType loc in - let (_, o), _ as e_ast = expression cx e in - DefT (arr_reason, ArrT ( - ArrayAT ( - Tvar.mk_where cx arr_reason (fun tvar -> - let keys_reason = replace_reason (fun desc -> - RCustom (spf "element of %s" (string_of_desc desc)) - ) reason in - Flow.flow cx (o, GetKeysT (keys_reason, UseT (unknown_use, tvar))); - ), - None - ) - )), - None, - [Expression e_ast] - - | "defineProperty", None, [ - Expression e; - Expression ((ploc, Ast.Expression.Literal { - Ast.Literal.value = Ast.Literal.String x; _ } - ) as key); - Expression config; - ] -> - let (_, o), _ as e_ast = expression cx e in - let key_ast = expression cx key in - let (_, spec), _ as config_ast = expression cx config in - let tvar = Tvar.mk cx reason in - let prop_reason = mk_reason (RProperty (Some x)) ploc in - Flow.flow cx (spec, GetPropT (unknown_use, reason, Named (reason, "value"), tvar)); - let prop_t = Tvar.mk cx prop_reason in - let id_info = x, prop_t, Type_table.Other in - Type_table.set_info ploc id_info (Context.type_table cx); - Flow.flow cx (o, SetPropT ( - unknown_use, reason, Named (prop_reason, x), Normal, tvar, Some prop_t - )); - o, - None, - [Expression e_ast; Expression key_ast; Expression config_ast] - - | "defineProperties", None, [Expression e; Expression (obj_loc, Object { Object.properties })] -> - let (_, o), _ as e_ast = expression cx e in - let pmap, properties = prop_map_of_object cx properties in - pmap |> SMap.iter (fun x p -> - match Property.read_t p with - | None -> - (* Since the properties object must be a literal, and literal objects + Flow.add_output + cx + Error_message.(EInternal (prop_loc, PropertyDescriptorPropertyCannotBeRead)); + acc + | Some spec -> + let reason = + update_desc_reason + (fun desc -> RCustom (spf ".%s of %s" x (string_of_desc desc))) + reason + in + let t = + Tvar.mk_where cx reason (fun tvar -> + let loc = aloc_of_reason reason in + let propdesc = typeapp ~implicit:true ~annot_loc:loc propdesc_type [tvar] in + Flow.flow cx (spec, UseT (use_op, propdesc))) + in + let p = Field (loc, t, Polarity.Neutral) in + SMap.add x p acc) + pmap + SMap.empty + in + ( Obj_type.mk_with_proto cx reason ~props proto, + None, + [ + Expression e_ast; + (* TODO(vijayramamurthy) construct object type *) + Expression ((obj_loc, AnyT.at Untyped obj_loc), Object { Object.properties; comments }); + ] ) + | (("getOwnPropertyNames" | "keys"), None, [Expression e]) -> + let arr_reason = mk_reason RArrayType loc in + let (((_, o), _) as e_ast) = expression cx e in + ( DefT + ( arr_reason, + bogus_trust (), + ArrT + (ArrayAT + ( Tvar.mk_where cx arr_reason (fun tvar -> + let keys_reason = + update_desc_reason + (fun desc -> RCustom (spf "element of %s" (string_of_desc desc))) + reason + in + Flow.flow cx (o, GetKeysT (keys_reason, UseT (use_op, tvar)))), + None )) ), + None, + [Expression e_ast] ) + | ( "defineProperty", + (None | Some (_, [Ast.Expression.TypeParameterInstantiation.Explicit _])), + [ + Expression e; + Expression + ((ploc, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.String x; _ }) as key); + Expression config; + ] ) -> + let (ty, targs) = + match targs with + | None -> (Tvar.mk cx reason, None) + | Some (targs_loc, [Ast.Expression.TypeParameterInstantiation.Explicit targ]) -> + let (((_, ty), _) as targ) = Anno.convert cx SMap.empty targ in + (ty, Some (targs_loc, [Ast.Expression.TypeParameterInstantiation.Explicit targ])) + | _ -> assert_false "unexpected type argument to Object.defineProperty, match guard failed" + in + let loc = aloc_of_reason reason in + let propdesc_type = Flow.get_builtin cx "PropertyDescriptor" reason in + let propdesc = typeapp ~implicit:true ~annot_loc:loc propdesc_type [ty] in + let (((_, o), _) as e_ast) = expression cx e in + let key_ast = expression cx key in + let (((_, spec), _) as config_ast) = expression cx config in + let prop_reason = mk_reason (RProperty (Some x)) ploc in + Flow.flow cx (spec, UseT (use_op, propdesc)); + let prop_t = Tvar.mk cx prop_reason in + Flow.flow + cx + (o, SetPropT (use_op, reason, Named (prop_reason, x), Assign, Normal, ty, Some prop_t)); + (o, targs, [Expression e_ast; Expression key_ast; Expression config_ast]) + | ( "defineProperties", + None, + [Expression e; Expression (obj_loc, Object { Object.properties; comments })] ) -> + let (((_, o), _) as e_ast) = expression cx e in + let (pmap, properties) = prop_map_of_object cx properties in + let propdesc_type = Flow.get_builtin cx "PropertyDescriptor" reason in + pmap + |> SMap.iter (fun x p -> + match Property.read_t p with + | None -> + (* Since the properties object must be a literal, and literal objects can only ever contain neutral fields, this should not happen. *) - Flow.add_output cx Flow_error.( - EInternal (prop_loc, PropertyDescriptorPropertyCannotBeRead) - ); - | Some spec -> - let reason = replace_reason (fun desc -> - RCustom (spf ".%s of %s" x (string_of_desc desc)) - ) reason in - let tvar = Tvar.mk cx reason in - Flow.flow cx (spec, GetPropT (unknown_use, reason, Named (reason, "value"), tvar)); - Flow.flow cx (o, SetPropT ( - unknown_use, reason, Named (reason, x), Normal, tvar, None - )); - ); - o, - None, - [ - Expression e_ast; - (* TODO(vijayramamurthy) construct object type *) - Expression ((obj_loc, AnyObjT.at obj_loc), Object { Object.properties }) - ] - - (* Freezing an object literal is supported since there's no way it could + Flow.add_output + cx + Error_message.(EInternal (prop_loc, PropertyDescriptorPropertyCannotBeRead)) + | Some spec -> + let reason = + update_desc_reason + (fun desc -> RCustom (spf ".%s of %s" x (string_of_desc desc))) + reason + in + let tvar = Tvar.mk cx reason in + let loc = aloc_of_reason reason in + let propdesc = typeapp ~implicit:true ~annot_loc:loc propdesc_type [tvar] in + Flow.flow cx (spec, UseT (use_op, propdesc)); + Flow.flow + cx + (o, SetPropT (use_op, reason, Named (reason, x), Assign, Normal, tvar, None))); + ( o, + None, + [ + Expression e_ast; + (* TODO(vijayramamurthy) construct object type *) + Expression ((obj_loc, AnyT.at Untyped obj_loc), Object { Object.properties; comments }); + ] ) + (* Freezing an object literal is supported since there's no way it could have been mutated elsewhere *) - | "freeze", None, [Expression ((arg_loc, Object _) as e)] -> - let (_, arg_t), _ as e_ast = expression cx e in - - let reason_arg = mk_reason (RFrozen RObjectLit) arg_loc in - let arg_t = Tvar.mk_where cx reason_arg (fun tvar -> - Flow.flow cx (arg_t, ObjFreezeT (reason_arg, tvar)); - ) in - - let reason = mk_reason (RMethodCall (Some m)) loc in - snd (method_call cx reason prop_loc ~use_op:unknown_use (expr, obj_t, m) None [Arg arg_t]), - None, - [Expression e_ast] - - | ( "create" - | "getOwnPropertyNames" - | "keys" - | "defineProperty" - | "defineProperties" - | "freeze" ), - Some (targs_loc, targs), - _ -> - let targs = List.map (Anno.convert cx SMap.empty) targs in - let args = List.map (fun arg -> snd (expression_or_spread cx arg)) args in - Flow.add_output cx Flow_error.(ECallTypeArity { - call_loc = loc; - is_new = false; - reason_arity = Reason.(locationless_reason (RFunction RNormal)); - expected_arity = 0; - }); - AnyT.at loc, - Some (targs_loc, targs), - args - - (* TODO *) - | _ -> - let targts, targ_asts = convert_targs cx targs in - let argts, arg_asts = - args - |> List.map (expression_or_spread cx) - |> List.split in - let reason = mk_reason (RMethodCall (Some m)) loc in - let use_op = Op (FunCallMethod { - op = reason; - fn = mk_reason (RMethod (Some m)) callee_loc; - prop = mk_reason (RProperty (Some m)) prop_loc; - args = mk_initial_arguments_reason args; - }) in - snd (method_call cx reason ~use_op prop_loc (expr, obj_t, m) targts argts), - targ_asts, - arg_asts - -and extract_class_name class_loc = Ast.Class.(function {id; _;} -> - match id with - | Some(name_loc, name) -> (name_loc, name) - | None -> (class_loc, "<>") -) - -and mk_class cx loc reason c = - let def_reason = repos_reason loc reason in - let this_in_class = Class_sig.This.in_class c in + | ("freeze", ((None | Some (_, [_])) as targs), [Expression ((arg_loc, Object _) as e)]) -> + let targs = + Option.map + ~f:(fun (loc, targs) -> (loc, convert_tparam_instantiations cx SMap.empty targs)) + targs + in + let (((_, arg_t), _) as e_ast) = expression cx e in + let arg_t = Object_freeze.freeze_object cx arg_loc arg_t in + let reason = mk_reason (RMethodCall (Some m)) loc in + ( snd + (method_call + cx + reason + prop_loc + ~use_op + (expr, obj_t, m) + (Option.map ~f:(snd %> fst) targs) + [Arg arg_t]), + Option.map ~f:(fun (loc, targs) -> (loc, snd targs)) targs, + [Expression e_ast] ) + | ( ( "create" | "getOwnPropertyNames" | "keys" | "defineProperty" | "defineProperties" + | "freeze" ), + Some (targs_loc, targs), + _ ) -> + let targs = snd (convert_tparam_instantiations cx SMap.empty targs) in + let args = Core_list.map ~f:(fun arg -> snd (expression_or_spread cx arg)) args in + let arity = + if m = "freeze" || m = "defineProperty" then + 1 + else + 0 + in + Flow.add_output + cx + Error_message.( + ECallTypeArity + { + call_loc = loc; + is_new = false; + reason_arity = Reason.(locationless_reason (RFunction RNormal)); + expected_arity = arity; + }); + (AnyT.at AnyError loc, Some (targs_loc, targs), args) + (* TODO *) + | _ -> + let (targts, targ_asts) = convert_targs cx targs in + let (argts, arg_asts) = args |> Core_list.map ~f:(expression_or_spread cx) |> List.split in + let reason = mk_reason (RMethodCall (Some m)) loc in + let use_op = + Op + (FunCallMethod + { + op = reason; + fn = mk_reason (RMethod (Some m)) callee_loc; + prop = mk_reason (RProperty (Some m)) prop_loc; + args = mk_initial_arguments_reason args; + local = true; + }) + in + ( snd (method_call cx reason ~use_op prop_loc (expr, obj_t, m) targts argts), + targ_asts, + arg_asts )) + +and extract_class_name class_loc = + Ast.Class.( + function + | { id; _ } -> + (match id with + | Some (name_loc, { Ast.Identifier.name; comments = _ }) -> (name_loc, name) + | None -> (class_loc, "<>"))) + +and mk_class cx class_loc ~name_loc reason c = + let def_reason = repos_reason class_loc reason in + let this_in_class = Class_stmt_sig.This.in_class c in let self = Tvar.mk cx reason in - let class_sig, class_ast_f = mk_class_sig cx loc reason self c in - class_sig |> Class_sig.with_typeparams cx (fun () -> - class_sig |> Class_sig.generate_tests cx (fun class_sig -> - Class_sig.check_super cx def_reason class_sig; - Class_sig.check_implements cx def_reason class_sig; - if this_in_class || not (Class_sig.This.is_bound_to_empty class_sig) then - Class_sig.toplevels cx class_sig - ~decls:toplevel_decls - ~stmts:toplevels - ~expr:expression - ); - let class_t = Class_sig.classtype cx class_sig in - Flow.unify cx self class_t; - class_t, class_ast_f class_t - ) + let (class_sig, class_ast_f) = mk_class_sig cx name_loc reason self c in + class_sig + |> Class_stmt_sig.generate_tests cx (fun class_sig -> + let public_property_map = + Class_stmt_sig.to_prop_map cx + @@ Class_stmt_sig.public_fields_of_signature ~static:false class_sig + in + let private_property_map = + Class_stmt_sig.to_prop_map cx + @@ Class_stmt_sig.private_fields_of_signature ~static:false class_sig + in + Class_stmt_sig.check_super cx def_reason class_sig; + Class_stmt_sig.check_implements cx def_reason class_sig; + if this_in_class || not (Class_stmt_sig.This.is_bound_to_empty class_sig) then + Class_stmt_sig.toplevels + cx + class_sig + ~decls:toplevel_decls + ~stmts:toplevels + ~expr:expression + ~private_property_map; + + let class_body = Ast.Class.((snd c.body).Body.body) in + Context.add_voidable_check + cx + { + Context.public_property_map; + private_property_map; + errors = Property_assignment.eval_property_assignment class_body; + }); + let class_t = Class_stmt_sig.classtype cx class_sig in + Flow.unify cx self class_t; + (class_t, class_ast_f class_t) (* Process a class definition, returning a (polymorphic) class type. A class type is a wrapper around an instance type, which contains types of instance @@ -6099,9 +6442,8 @@ and mk_class cx loc reason c = "metaclass": thus, the static type is itself implemented as an instance type. *) and mk_class_sig = - let open Class_sig in - - (* Given information about a field, returns: + Class_stmt_sig.( + (* Given information about a field, returns: - Class_sig.field representation of this field - typed AST of the field's type annotation - a function which will return a typed AST of the field's initializer expression. @@ -6109,124 +6451,119 @@ and mk_class_sig = Class_sig.t containing this field, as that is when the initializer expression gets checked. *) - let mk_field cx tparams_map reason annot init = - let annot_t, annot_ast = Anno.mk_type_annotation cx tparams_map reason annot in - let field, get_init = - match init with - | None -> Annot annot_t, Fn.const None - | Some expr -> - let value_ref : (Loc.t, Loc.t * Type.t) Ast.Expression.t option ref = ref None in - Infer ( - Func_sig.field_initializer tparams_map reason expr annot_t, - (fun (_, value_opt) -> value_ref := Some (Option.value_exn value_opt)) - ), - (fun () -> Some (Option.value (!value_ref) - ~default:(Typed_ast.error_annot, Typed_ast.Expression.error))) + let mk_field cx tparams_map reason annot init = + let (annot_t, annot_ast) = Anno.mk_type_annotation cx tparams_map reason annot in + let (field, get_init) = + match init with + | None -> (Annot annot_t, Fn.const None) + | Some expr -> + let value_ref : (ALoc.t, ALoc.t * Type.t) Ast.Expression.t option ref = ref None in + ( Infer + ( Func_stmt_sig.field_initializer tparams_map reason expr annot_t, + (fun (_, _, value_opt) -> value_ref := Some (Option.value_exn value_opt)) ), + fun () -> + Some (Option.value !value_ref ~default:(Tast_utils.error_mapper#expression expr)) ) + in + (field, annot_t, annot_ast, get_init) in - field, annot_t, annot_ast, get_init - in - - let mk_method = mk_func_sig in - - let mk_extends cx tparams_map = function - | None -> Implicit { null = false }, None - | Some (loc, { Ast.Class.Extends.expr; targs }) -> - let (_, c), _ as expr = expression cx expr in - let t, targs = Anno.mk_super cx tparams_map loc c targs in - Explicit t, Some (loc, { Ast.Class.Extends.expr; targs }) - in - - let warn_or_ignore_decorators cx = function - | [] -> () - | (start_loc, _)::ds -> - let loc = List.fold_left (fun start_loc (end_loc, _) -> - Loc.btwn start_loc end_loc - ) start_loc ds in - match Context.esproposal_decorators cx with - | Options.ESPROPOSAL_ENABLE -> failwith "Decorators cannot be enabled!" - | Options.ESPROPOSAL_IGNORE -> () - | Options.ESPROPOSAL_WARN -> - Flow.add_output cx (Flow_error.EExperimentalDecorators loc) - in - - let warn_or_ignore_class_properties cx ~static loc = - let config_setting = - if static - then Context.esproposal_class_static_fields cx - else Context.esproposal_class_instance_fields cx + let mk_method = mk_func_sig in + let mk_extends cx tparams_map = function + | None -> (Implicit { null = false }, None) + | Some (loc, { Ast.Class.Extends.expr; targs }) -> + let (((_, c), _) as expr) = expression cx expr in + let (t, targs) = Anno.mk_super cx tparams_map loc c targs in + (Explicit t, Some (loc, { Ast.Class.Extends.expr; targs })) in - match config_setting with - | Options.ESPROPOSAL_ENABLE - | Options.ESPROPOSAL_IGNORE -> () - | Options.ESPROPOSAL_WARN -> - Flow.add_output cx - (Flow_error.EExperimentalClassProperties (loc, static)) - in - - fun cx _loc reason self { Ast.Class. - id; - body = (body_loc, { Ast.Class.Body.body = elements }); - tparams; - extends; - implements; - classDecorators; - } -> - - warn_or_ignore_decorators cx classDecorators; - - let tparams, tparams_map, tparams_ast = - Anno.mk_type_param_declarations cx tparams - in - - let self', tparams, tparams_map = - add_this self cx reason tparams tparams_map - in - - let class_sig, extends_ast, implements_ast = - let id = Context.make_nominal cx in - let extends, extends_ast = mk_extends cx tparams_map extends in - let implements, implements_ast = implements |> List.map (fun (loc, i) -> - let { Ast.Class.Implements.id = (id_loc, name); targs } = i in - let c = Env.get_var ~lookup_mode:Env.LookupMode.ForType cx name id_loc in - let typeapp, targs = match targs with - | None -> (loc, c, None), None - | Some (targs_loc, targs) -> - let ts, targs_ast = Anno.convert_list cx tparams_map targs in - (loc, c, Some ts), Some (targs_loc, targs_ast) + let warn_or_ignore_decorators cx = function + | [] -> [] + | decorators -> + (match Context.esproposal_decorators cx with + | Options.ESPROPOSAL_ENABLE -> failwith "Decorators cannot be enabled!" + | Options.ESPROPOSAL_IGNORE -> + Core_list.map ~f:Tast_utils.unchecked_mapper#class_decorator decorators + | Options.ESPROPOSAL_WARN -> + List.iter + (fun (loc, _) -> Flow.add_output cx (Error_message.EExperimentalDecorators loc)) + decorators; + Core_list.map ~f:Tast_utils.error_mapper#class_decorator decorators) + in + let warn_or_ignore_class_properties cx ~static loc = + let config_setting = + if static then + Context.esproposal_class_static_fields cx + else + Context.esproposal_class_instance_fields cx in - let id_info = name, c, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - typeapp, (loc, { Ast.Class.Implements.id = (id_loc, c), name; targs }) - ) |> List.split in - let super = Class { extends; mixins = []; implements } in - empty id reason tparams tparams_map super, extends_ast, implements_ast - in - - (* In case there is no constructor, pick up a default one. *) - let class_sig = - if extends <> None - then - (* Subclass default constructors are technically of the form (...args) => + match config_setting with + | Options.ESPROPOSAL_ENABLE + | Options.ESPROPOSAL_IGNORE -> + () + | Options.ESPROPOSAL_WARN -> + Flow.add_output cx (Error_message.EExperimentalClassProperties (loc, static)) + in + fun cx + name_loc + reason + self + { + Ast.Class.id; + body = (body_loc, { Ast.Class.Body.body = elements }); + tparams; + extends; + implements; + classDecorators; + } -> + let classDecorators_ast = warn_or_ignore_decorators cx classDecorators in + let (tparams, tparams_map, tparams_ast) = Anno.mk_type_param_declarations cx tparams in + let (self', tparams, tparams_map) = add_this self cx reason tparams tparams_map in + let (class_sig, extends_ast, implements_ast) = + let id = name_loc in + let (extends, extends_ast) = mk_extends cx tparams_map extends in + let (implements, implements_ast) = + implements + |> Core_list.map ~f:(fun (loc, i) -> + let { + Ast.Class.Implements.id = (id_loc, ({ Ast.Identifier.name; comments = _ } as id)); + targs; + } = + i + in + let c = Env.get_var ~lookup_mode:Env.LookupMode.ForType cx name id_loc in + let (typeapp, targs) = + match targs with + | None -> ((loc, c, None), None) + | Some (targs_loc, targs) -> + let (ts, targs_ast) = Anno.convert_list cx tparams_map targs in + ((loc, c, Some ts), Some (targs_loc, targs_ast)) + in + (typeapp, (loc, { Ast.Class.Implements.id = ((id_loc, c), id); targs }))) + |> List.split + in + let super = Class { extends; mixins = []; implements } in + (empty id reason tparams tparams_map super, extends_ast, implements_ast) + in + (* In case there is no constructor, pick up a default one. *) + let class_sig = + if extends <> None then + (* Subclass default constructors are technically of the form (...args) => { super(...args) }, but we can approximate that using flow's existing inheritance machinery. *) - (* TODO: Does this distinction matter for the type checker? *) - class_sig - else - let reason = replace_reason_const RDefaultConstructor reason in - add_default_constructor reason class_sig - in - - (* All classes have a static "name" property. *) - let class_sig = add_name_field class_sig in - - (* NOTE: We used to mine field declarations from field assignments in a + (* TODO: Does this distinction matter for the type checker? *) + class_sig + else + let reason = replace_desc_reason RDefaultConstructor reason in + add_default_constructor reason class_sig + in + (* All classes have a static "name" property. *) + let class_sig = add_name_field class_sig in + (* NOTE: We used to mine field declarations from field assignments in a constructor as a convenience, but it was not worth it: often, all that did was exchange a complaint about a missing field for a complaint about a missing annotation. Moreover, it caused fields declared in the super class to be redeclared if they were assigned in the constructor. So we don't do it. In the future, we could do it again, but only for private fields. *) - (* NOTE: field initializer expressions and method bodies don't get checked + (* NOTE: field initializer expressions and method bodies don't get checked until Class_sig.toplevels is called on class_sig. For this reason rather than returning a typed AST, we'll return a function which returns a typed AST, and this function shouldn't be called until after Class_sig.toplevels @@ -6236,312 +6573,418 @@ and mk_class_sig = initializer/body (respectively) will not get checked, and the corresponding nodes of the typed AST will be filled in with error nodes. *) - let class_sig, rev_elements = List.fold_left Ast.Class.(fun (c, rev_elements) -> function - (* instance and static methods *) - | Body.Property (_, { - Property.key = Ast.Expression.Object.Property.PrivateName _; - _ - }) -> failwith "Internal Error: Found non-private field with private name" - - | Body.Method (_, { - Method.key = Ast.Expression.Object.Property.PrivateName _; - _ - }) -> failwith "Internal Error: Found method with private name" - - | Body.Method (loc, { - Method.key = Ast.Expression.Object.Property.Identifier (id_loc, name); - value = (func_loc, func); - kind; - static; - decorators; - }) -> - - Type_inference_hooks_js.dispatch_class_member_decl_hook cx self static name id_loc; - warn_or_ignore_decorators cx decorators; - - (match kind with - | Method.Get | Method.Set -> Flow_js.add_output cx (Flow_error.EUnsafeGettersSetters loc) - | _ -> ()); - - let method_sig, reconstruct_func = mk_method cx tparams_map loc func in - (* The body of a class method doesn't get checked until Class_sig.toplevels + let (class_sig, rev_elements) = + List.fold_left + Ast.Class.( + fun (c, rev_elements) -> function + (* instance and static methods *) + | Body.Property + (_, { Property.key = Ast.Expression.Object.Property.PrivateName _; _ }) -> + failwith "Internal Error: Found non-private field with private name" + | Body.Method (_, { Method.key = Ast.Expression.Object.Property.PrivateName _; _ }) + -> + failwith "Internal Error: Found method with private name" + | Body.Method + ( loc, + { + Method.key = + Ast.Expression.Object.Property.Identifier + (id_loc, ({ Ast.Identifier.name; comments = _ } as id)); + value = (func_loc, func); + kind; + static; + decorators; + } ) -> + Type_inference_hooks_js.dispatch_class_member_decl_hook cx self static name id_loc; + let decorators = warn_or_ignore_decorators cx decorators in + (match kind with + | Method.Get + | Method.Set -> + Flow_js.add_output cx (Error_message.EUnsafeGettersSetters loc) + | _ -> ()); + + let (method_sig, reconstruct_func) = mk_method cx tparams_map loc func in + (* The body of a class method doesn't get checked until Class_sig.toplevels is called on the class sig (in this case c). The order of how the methods were arranged in the class is lost by the time this happens, so rather than attempting to return a list of method bodies from the Class_sig.toplevels function, we have it place the function bodies into a list via side effects. We use a similar approach for method types *) - let body_ref : (Loc.t, Loc.t * Type.t) Ast.Function.body option ref = ref None in - let set_asts (body_opt, _) = body_ref := Some (Option.value_exn body_opt) in - let func_t_ref : Type.t option ref = ref None in - let set_type t = func_t_ref := Some t in - let get_element () = - let body = Option.value (!body_ref) ~default:Typed_ast.Function.body_error in - let func_t = Option.value (!func_t_ref) ~default:(AnyFunT.at id_loc) in - let func = reconstruct_func body func_t in - Body.Method ((loc, func_t), { Method. - key = Ast.Expression.Object.Property.Identifier ((id_loc, func_t), name); - value = func_loc, func; - kind; - static; - decorators = []; (* we don't currently typecheck decorators *) - }) - in - let add = match kind with - | Method.Constructor -> add_constructor (Some id_loc) - | Method.Method -> add_method ~static name id_loc - | Method.Get -> add_getter ~static name id_loc - | Method.Set -> add_setter ~static name id_loc + let params_ref : (ALoc.t, ALoc.t * Type.t) Ast.Function.Params.t option ref = + ref None + in + let body_ref : (ALoc.t, ALoc.t * Type.t) Ast.Function.body option ref = ref None in + let set_asts (params_opt, body_opt, _) = + params_ref := Some (Option.value_exn params_opt); + body_ref := Some (Option.value_exn body_opt) + in + let func_t_ref : Type.t option ref = ref None in + let set_type t = func_t_ref := Some t in + let get_element () = + let params = + Option.value + !params_ref + ~default:(Tast_utils.error_mapper#function_params func.Ast.Function.params) + in + let body = + Option.value + !body_ref + ~default:(Tast_utils.error_mapper#function_body func.Ast.Function.body) + in + let func_t = + Option.value !func_t_ref ~default:(EmptyT.at id_loc |> with_trust bogus_trust) + in + let func = reconstruct_func params body func_t in + Body.Method + ( (loc, func_t), + { + Method.key = + Ast.Expression.Object.Property.Identifier ((id_loc, func_t), id); + value = (func_loc, func); + kind; + static; + decorators; + } ) + in + let add = + match kind with + | Method.Constructor -> add_constructor (Some id_loc) + | Method.Method -> add_method ~static name id_loc + | Method.Get -> add_getter ~static name id_loc + | Method.Set -> add_setter ~static name id_loc + in + (add method_sig ~set_asts ~set_type c, get_element :: rev_elements) + (* fields *) + | Body.PrivateField + ( loc, + { + PrivateField.key = + (_, (id_loc, { Ast.Identifier.name; comments = _ })) as key; + annot; + value; + static; + variance; + } ) -> + Type_inference_hooks_js.dispatch_class_member_decl_hook cx self static name id_loc; + + if value <> None then warn_or_ignore_class_properties cx ~static loc; + + let reason = mk_reason (RProperty (Some name)) loc in + let polarity = Anno.polarity variance in + let (field, annot_t, annot_ast, get_value) = + mk_field cx tparams_map reason annot value + in + let get_element () = + Body.PrivateField + ( (loc, annot_t), + { + PrivateField.key; + annot = annot_ast; + value = get_value (); + static; + variance; + } ) + in + ( add_private_field ~static name id_loc polarity field c, + get_element :: rev_elements ) + | Body.Property + ( loc, + { + Property.key = + Ast.Expression.Object.Property.Identifier + (id_loc, ({ Ast.Identifier.name; comments = _ } as id)); + annot; + value; + static; + variance; + } ) -> + Type_inference_hooks_js.dispatch_class_member_decl_hook cx self static name id_loc; + + if value <> None then warn_or_ignore_class_properties cx ~static loc; + + let reason = mk_reason (RProperty (Some name)) loc in + let polarity = Anno.polarity variance in + let (field, annot_t, annot, get_value) = + mk_field cx tparams_map reason annot value + in + let get_element () = + Body.Property + ( (loc, annot_t), + { + Property.key = + Ast.Expression.Object.Property.Identifier ((id_loc, annot_t), id); + annot; + value = get_value (); + static; + variance; + } ) + in + (add_field ~static name id_loc polarity field c, get_element :: rev_elements) + (* literal LHS *) + | ( Body.Method (loc, { Method.key = Ast.Expression.Object.Property.Literal _; _ }) + | Body.Property + (loc, { Property.key = Ast.Expression.Object.Property.Literal _; _ }) ) as elem + -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, ClassPropertyLiteral)); + (c, (fun () -> Tast_utils.error_mapper#class_element elem) :: rev_elements) + (* computed LHS *) + | ( Body.Method (loc, { Method.key = Ast.Expression.Object.Property.Computed _; _ }) + | Body.Property + (loc, { Property.key = Ast.Expression.Object.Property.Computed _; _ }) ) as + elem -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, ClassPropertyComputed)); + (c, (fun () -> Tast_utils.error_mapper#class_element elem) :: rev_elements)) + (class_sig, []) + elements in - add method_sig ~set_asts ~set_type c, get_element::rev_elements - - (* fields *) - | Body.PrivateField(loc, { - PrivateField.key = (_, (id_loc, name)) as key; - annot; - value; - static; - variance; - }) -> - Type_inference_hooks_js.dispatch_class_member_decl_hook cx self static name id_loc; - - if value <> None - then warn_or_ignore_class_properties cx ~static loc; - - let reason = mk_reason (RProperty (Some name)) loc in - let polarity = Anno.polarity variance in - let field, annot_t, annot_ast, get_value = mk_field cx tparams_map reason annot value in - let get_element () = Body.PrivateField ((loc, annot_t), { PrivateField. - key; - annot = annot_ast; - value = get_value (); - static; - variance; - }) in - add_private_field ~static name id_loc polarity field c, get_element::rev_elements - - | Body.Property (loc, { - Property.key = Ast.Expression.Object.Property.Identifier (id_loc, name); - annot; - value; - static; - variance; - }) -> - Type_inference_hooks_js.dispatch_class_member_decl_hook cx self static name id_loc; - - if value <> None - then warn_or_ignore_class_properties cx ~static loc; - - let reason = mk_reason (RProperty (Some name)) loc in - let polarity = Anno.polarity variance in - let field, annot_t, annot, get_value = mk_field cx tparams_map reason annot value in - let get_element () = Body.Property ((loc, annot_t), { Property. - key = Ast.Expression.Object.Property.Identifier ((id_loc, annot_t), name); - annot; - value = get_value (); - static; - variance; - }) in - add_field ~static name id_loc polarity field c, get_element::rev_elements - - (* literal LHS *) - | Body.Method (loc, { - Method.key = Ast.Expression.Object.Property.Literal _; - _ - }) - | Body.Property (loc, { - Property.key = Ast.Expression.Object.Property.Literal _; - _ - }) -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, ClassPropertyLiteral)); - c, (fun () -> Typed_ast.Class.Body.element_error)::rev_elements - - (* computed LHS *) - | Body.Method (loc, { - Method.key = Ast.Expression.Object.Property.Computed _; - _ - }) - | Body.Property (loc, { - Property.key = Ast.Expression.Object.Property.Computed _; - _ - }) -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, ClassPropertyComputed)); - c, (fun () -> Typed_ast.Class.Body.element_error)::rev_elements - ) (class_sig, []) elements - in - let elements = List.rev rev_elements in - class_sig, - (fun class_t -> { Ast.Class. - id = Option.map ~f:(fun (loc, name) -> (loc, class_t), name) id; - body = (body_loc, self'), { Ast.Class.Body. - body = List.map (fun f -> f ()) elements; - }; - tparams = tparams_ast; - extends = extends_ast; - implements = implements_ast; - classDecorators = []; (* class decorators not yet supported *) - }) + let elements = List.rev rev_elements in + ( class_sig, + fun class_t -> + { + Ast.Class.id = Option.map ~f:(fun (loc, name) -> ((loc, class_t), name)) id; + body = + ( (body_loc, self'), + { Ast.Class.Body.body = Core_list.map ~f:(fun f -> f ()) elements } ); + tparams = tparams_ast; + extends = extends_ast; + implements = implements_ast; + classDecorators = classDecorators_ast; + } )) and mk_func_sig = - let open Func_sig in - - let function_kind {Ast.Function.async; generator; predicate; _ } = - Ast.Type.Predicate.(match async, generator, predicate with - | true, true, None -> AsyncGenerator - | true, false, None -> Async - | false, true, None -> Generator - | false, false, None -> Ordinary - | false, false, Some (_, Declared _) -> Predicate - | false, false, Some (_ , Inferred) -> Predicate - | _, _, _ -> Utils_js.assert_false "(async || generator) && pred") + let function_kind ~async ~generator ~predicate = + Func_sig.( + Ast.Type.Predicate.( + match (async, generator, predicate) with + | (true, true, None) -> AsyncGenerator + | (true, false, None) -> Async + | (false, true, None) -> Generator + | (false, false, None) -> Ordinary + | (false, false, Some (_, Declared _)) -> Predicate + | (false, false, Some (_, Ast.Type.Predicate.Inferred)) -> Predicate + | (_, _, _) -> Utils_js.assert_false "(async || generator) && pred")) in - - let mk_params cx tparams_map params = - let add_param_with_default default patt params = match patt with - | loc, Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (name_loc, name_str) as name; - annot; - optional; - } -> - let reason = mk_reason (RParameter (Some name_str)) loc in - let t, annot = Anno.mk_type_annotation cx tparams_map reason annot in - Func_params.add_simple cx ~optional ?default loc (Some name) t params, - ((loc, t), Ast.Pattern.Identifier { - Ast.Pattern.Identifier.name = ((name_loc, t), name_str); - annot; - optional; - }) - | loc, _ -> - let reason = mk_reason RDestructuring loc in - let annot = Destructuring.type_of_pattern patt in - let t, _ = Anno.mk_type_annotation cx tparams_map reason annot in - Func_params.add_complex cx ~expr:expression ?default patt t params - in - let add_rest patt params = + let id_param cx tparams_map id mk_reason = + let { Ast.Pattern.Identifier.name; annot; optional } = id in + let (id_loc, ({ Ast.Identifier.name; comments = _ } as id)) = name in + let reason = mk_reason name in + let (t, annot) = Anno.mk_type_annotation cx tparams_map reason annot in + let name = ((id_loc, t), id) in + (t, { Ast.Pattern.Identifier.name; annot; optional }) + in + let mk_param cx tparams_map param = + let (loc, { Ast.Function.Param.argument = (ploc, patt); default }) = param in + let expr = expression in + let (t, pattern) = match patt with - | loc, Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (name_loc, name_str) as name; - annot; - optional; - } -> - let reason = mk_reason (RRestParameter (Some name_str)) loc in - let t, annot = Anno.mk_type_annotation cx tparams_map reason annot in - Func_params.add_rest cx loc (Some name) t params, - ((loc, t), Ast.Pattern.Identifier { - Ast.Pattern.Identifier.name = ((name_loc, t), name_str); - annot; - optional; - }) - | loc, _ -> - Flow_js.add_output cx - Flow_error.(EInternal (loc, RestParameterNotIdentifierPattern)); - params, ((loc, AnyT.at loc), Typed_ast.Pattern.error) + | Ast.Pattern.Identifier id -> + let (t, id) = + id_param cx tparams_map id (fun name -> mk_reason (RParameter (Some name)) ploc) + in + (t, Func_stmt_config.Id id) + | Ast.Pattern.Object { Ast.Pattern.Object.annot; properties } -> + let reason = mk_reason RDestructuring ploc in + let (t, annot) = Anno.mk_type_annotation cx tparams_map reason annot in + (t, Func_stmt_config.Object { annot; properties }) + | Ast.Pattern.Array { Ast.Pattern.Array.annot; elements; comments } -> + let reason = mk_reason RDestructuring ploc in + let (t, annot) = Anno.mk_type_annotation cx tparams_map reason annot in + (t, Func_stmt_config.Array { annot; elements; comments }) + | Ast.Pattern.Expression _ -> failwith "unexpected expression pattern in param" + in + Func_stmt_config.Param { t; loc; ploc; pattern; default; expr } + in + let mk_rest cx tparams_map rest = + let (loc, { Ast.Function.RestParam.argument = (ploc, patt) }) = rest in + match patt with + | Ast.Pattern.Identifier id -> + let (t, id) = + id_param cx tparams_map id (fun name -> mk_reason (RRestParameter (Some name)) ploc) + in + Ok (Func_stmt_config.Rest { t; loc; ploc; id }) + | Ast.Pattern.Object _ + | Ast.Pattern.Array _ + | Ast.Pattern.Expression _ -> + (* TODO: this should be a parse error, unrepresentable AST *) + Error Error_message.(EInternal (ploc, RestParameterNotIdentifierPattern)) + in + let mk_params cx tparams_map (loc, { Ast.Function.Params.params; rest }) = + let fparams = + Func_stmt_params.empty (fun params rest -> Some (loc, { Ast.Function.Params.params; rest })) in - let add_param = function - | _, Ast.Pattern.Assignment { Ast.Pattern.Assignment.left; right; } -> - add_param_with_default (Some right) left - | patt -> - add_param_with_default None patt + let fparams = + List.fold_left + (fun acc param -> Func_stmt_params.add_param (mk_param cx tparams_map param) acc) + fparams + params in - let (params_loc, { Ast.Function.Params.params; rest }) = params in - let params, rev_param_asts = - List.fold_left (fun (params, rev_param_asts) param -> - let acc, param_ast = add_param param params in - acc, param_ast::rev_param_asts - ) (Func_params.empty, []) params + let fparams = + Option.fold + ~f:(fun acc rest -> + match mk_rest cx tparams_map rest with + | Ok rest -> Func_stmt_params.add_rest rest acc + | Error err -> + Flow_js.add_output cx err; + acc) + ~init:fparams + rest in - match rest with - | Some (rest_loc, { Ast.Function.RestElement.argument }) -> - let params, rest = add_rest argument params in - params, (params_loc, { Ast.Function.Params. - params = List.rev rev_param_asts; - rest = Some (rest_loc, { Ast.Function.RestElement.argument = rest }); - }) - | None -> params, (params_loc, { Ast.Function.Params. - params = List.rev rev_param_asts; - rest = None; - }) + fparams + in + let free_bound_ts cx t = + let finder = + object (_self) + inherit [Loc_collections.ALocSet.t] Type_visitor.t as super + + val mutable tparams : string list = [] + + method! type_ cx pole acc t = + match t with + | DefT (_, _, PolyT (_, tps, _, _)) -> + let old_tparams = tparams in + Nel.iter (fun tp -> tparams <- tp.name :: tparams) tps; + let acc = super#type_ cx pole acc t in + tparams <- old_tparams; + acc + | BoundT (_, name, _) when not (List.exists (fun x -> x = name) tparams) -> + Loc_collections.ALocSet.add (TypeUtil.loc_of_t t) acc + | _ -> super#type_ cx pole acc t + end + in + finder#type_ cx Polarity.Neutral Loc_collections.ALocSet.empty t in - fun cx tparams_map loc func -> - let {Ast.Function.tparams; return; body; predicate; params; id; _} = func in - let reason = func_reason func loc in - let kind = function_kind func in - let tparams, tparams_map, tparams_ast = + let { + Ast.Function.tparams; + return; + body; + predicate; + params; + id; + async; + generator; + sig_loc = _; + } = + func + in + let reason = func_reason ~async ~generator loc in + let kind = function_kind ~async ~generator ~predicate in + let (tparams, tparams_map, tparams_ast) = Anno.mk_type_param_declarations cx ~tparams_map tparams in - Type_table.with_typeparams tparams (Context.type_table cx) @@ fun _ -> - let fparams, params = mk_params cx tparams_map params in + let fparams = mk_params cx tparams_map params in let body = Some body in - let ret_reason = mk_reason RReturn (return_loc func) in - let return_t, return, loc = match return with - | Ast.Function.Available annot -> - let return_t, return = Anno.mk_type_annotation cx tparams_map ret_reason (Some annot) in - let (loc, _) = annot in - return_t, return, loc - | Ast.Function.Missing loc -> - let return_t, return = Anno.mk_type_annotation cx tparams_map ret_reason None in - return_t, return, loc + let ret_reason = mk_reason RReturn (Func_sig.return_loc func) in + let (return_t, return) = + let has_nonvoid_return = might_have_nonvoid_return loc func in + let definitely_returns_void = kind = Func_sig.Ordinary && not has_nonvoid_return in + Anno.mk_return_type_annotation cx tparams_map ret_reason ~definitely_returns_void return in - let return_t, predicate = Ast.Type.Predicate.(match predicate with - | None -> - return_t, None - | Some (loc, Inferred) -> - (* Restrict the fresh condition type by the declared return type *) - let fresh_t, _ = Anno.mk_type_annotation cx tparams_map ret_reason None in - Flow.flow_t cx (fresh_t, return_t); - fresh_t, Some (loc, Inferred) - | Some (loc, Declared _) -> - Flow_js.add_output cx Flow_error.( - EUnsupportedSyntax (loc, PredicateDeclarationForImplementation) - ); - fst (Anno.mk_type_annotation cx tparams_map ret_reason None), - Some (loc, Declared (Typed_ast.error_annot, Typed_ast.Expression.error)) - ) in - let return = match return with - | Some type_annot -> Ast.Function.Available type_annot - | None -> Ast.Function.Missing (loc, return_t) + let (return_t, predicate) = + Ast.Type.Predicate.( + match predicate with + | None -> (return_t, None) + | Some ((loc, Ast.Type.Predicate.Inferred) as pred) -> + (* Predicate Functions + * + * function f(x: S): [T] %checks { return e; } + * + * The return type we assign to this function will be used for refining the + * input x. The type annotation T may not have this ability (if it's an + * annotation). Instead we introduce a fresh type T'. T' will receive lower + * bounds from the return expression e, but is also checked against the + * return type (annotation) T: + * + * OpenPred(typeof e, preds) ~> T' + * T' ~> T + * + * The function signature will be + * + * (x: S) => T' (%checks) + *) + let bounds = free_bound_ts cx return_t in + if Loc_collections.ALocSet.is_empty bounds then + let return_t' = Tvar.mk_where cx reason (fun t -> Flow.flow_t cx (t, return_t)) in + (return_t', Some (loc, Ast.Type.Predicate.Inferred)) + else + (* If T is a polymorphic type P, this approach can lead to some + * complications. The 2nd constraint from above would become + * + * T' ~> P + * + * which is potentially ill-formed since it appears outside a generate_tests + * call (leads to Not_expect_bounds exception). We disallow this case + * and instead propagate the original return type T. + *) + let () = + Loc_collections.ALocSet.iter + (fun loc -> + Flow_js.add_output + cx + Error_message.(EUnsupportedSyntax (loc, PredicateFunctionAbstractReturnType))) + bounds + in + (return_t, Some (Tast_utils.error_mapper#type_predicate pred)) + | Some ((loc, Declared _) as pred) -> + Flow_js.add_output + cx + Error_message.(EUnsupportedSyntax (loc, PredicateDeclarationForImplementation)); + ( fst (Anno.mk_type_annotation cx tparams_map ret_reason (Ast.Type.Missing loc)), + Some (Tast_utils.error_mapper#type_predicate pred) )) in - {Func_sig.reason; kind; tparams; tparams_map; fparams; body; return_t}, - (fun body fun_type -> { func with Ast.Function. - id = Option.map ~f:(fun (id_loc, name) -> (id_loc, fun_type), name) id; - params; - body; - predicate; - return; - tparams = tparams_ast; - }) + let knot = Tvar.mk cx reason in + ( { Func_stmt_sig.reason; kind; tparams; tparams_map; fparams; body; return_t; knot }, + fun params body fun_type -> + { + func with + Ast.Function.id = Option.map ~f:(fun (id_loc, name) -> ((id_loc, fun_type), name)) id; + params; + body; + predicate; + return; + tparams = tparams_ast; + } ) (* Given a function declaration and types for `this` and `super`, extract a signature consisting of type parameters, parameter types, parameter names, and return type, check the body against that signature by adding `this` and super` to the environment, and return the signature. *) and function_decl id cx loc func this super = - let func_sig, reconstruct_func = mk_func_sig cx SMap.empty loc func in + let (func_sig, reconstruct_func) = mk_func_sig cx SMap.empty loc func in let save_return = Abnormal.clear_saved Abnormal.Return in let save_throw = Abnormal.clear_saved Abnormal.Throw in - let body = func_sig |> Func_sig.with_typeparams cx (fun () -> - func_sig |> Func_sig.generate_tests cx ( - Func_sig.toplevels id cx this super - ~decls:toplevel_decls - ~stmts:toplevels - ~expr:expression - ) - ) in + let (params_ast, body_ast, _) = + func_sig + |> Func_stmt_sig.generate_tests + cx + (Func_stmt_sig.toplevels + id + cx + this + super + ~decls:toplevel_decls + ~stmts:toplevels + ~expr:expression) + in ignore (Abnormal.swap_saved Abnormal.Return save_return); ignore (Abnormal.swap_saved Abnormal.Throw save_throw); - func_sig, reconstruct_func (Option.value_exn (fst body)) + (func_sig, reconstruct_func (Option.value_exn params_ast) (Option.value_exn body_ast)) (* Switch back to the declared type for an internal name. *) and define_internal cx reason x = let ix = internal_name x in - let loc = aloc_of_reason reason |> ALoc.to_loc in + let loc = aloc_of_reason reason in Env.declare_let cx ix loc; let t = Env.get_var_declared_type cx ix loc in Env.init_let cx ~use_op:unknown_use ix ~has_anno:false t loc -(* Process a function definition, returning a (polymorphic) function type. *) +(* Process a function declaration, returning a (polymorphic) function type. *) +and mk_function_declaration id cx loc func = mk_function id cx loc func + +(* Process a function expression, returning a (polymorphic) function type. *) +and mk_function_expression id cx loc func = mk_function id cx loc func + +(* Internal helper function. Use `mk_function_declaration` and `mk_function_expression` instead. *) and mk_function id cx loc func = let this_t = Tvar.mk cx (mk_reason RThis loc) in let this = Scope.Entry.new_let this_t ~loc ~state:Scope.State.Initialized in @@ -6550,172 +6993,220 @@ and mk_function id cx loc func = let t = ObjProtoT (mk_reason RNoSuper loc) in Scope.Entry.new_let t ~loc ~state:Scope.State.Initialized in - let func_sig, reconstruct_ast = function_decl id cx loc func this super in - let fun_type = Func_sig.functiontype cx this_t func_sig in - fun_type, reconstruct_ast fun_type + let (func_sig, reconstruct_ast) = function_decl id cx loc func this super in + let fun_type = Func_stmt_sig.functiontype cx this_t func_sig in + (fun_type, reconstruct_ast fun_type) (* Process an arrow function, returning a (polymorphic) function type. *) and mk_arrow cx loc func = - let _, this = Env.find_entry cx (internal_name "this") loc in - let _, super = Env.find_entry cx (internal_name "super") loc in - let {Ast.Function.id; _} = func in - let func_sig, reconstruct_ast = function_decl id cx loc func this super in + let (_, this) = Env.find_entry cx (internal_name "this") loc in + let (_, super) = Env.find_entry cx (internal_name "super") loc in + let { Ast.Function.id; _ } = func in + let (func_sig, reconstruct_ast) = function_decl id cx loc func this super in (* Do not expose the type of `this` in the function's type. The call to function_decl above has already done the necessary checking of `this` in the body of the function. Now we want to avoid re-binding `this` to objects through which the function may be called. *) - let fun_type = Func_sig.functiontype cx dummy_this func_sig in - fun_type, reconstruct_ast fun_type + let fun_type = Func_stmt_sig.functiontype cx dummy_this func_sig in + (fun_type, reconstruct_ast fun_type) (* Transform predicate declare functions to functions whose body is the predicate declared for the funcion *) (* Also returns a function for reversing this process, for the sake of typed AST construction. *) -and declare_function_to_function_declaration cx - { Ast.Statement.DeclareFunction.id; annot; predicate; } = +and declare_function_to_function_declaration cx declare_loc func_decl = + let { Ast.Statement.DeclareFunction.id; annot; predicate } = func_decl in match predicate with | Some (loc, Ast.Type.Predicate.Inferred) -> - Flow.add_output cx Flow_error.( - EUnsupportedSyntax (loc, PredicateDeclarationWithoutExpression) - ); - None - - | Some (loc, Ast.Type.Predicate.Declared e) -> begin + Flow.add_output + cx + Error_message.(EUnsupportedSyntax (loc, PredicateDeclarationWithoutExpression)); + None + | Some (loc, Ast.Type.Predicate.Declared e) -> + begin match annot with - | (annot_loc, (func_annot_loc, Ast.Type.Function - { Ast.Type.Function.params = (params_loc, { Ast.Type.Function.Params.params; rest }); - Ast.Type.Function.return; - Ast.Type.Function.tparams; - })) -> - let param_type_to_param = Ast.Type.Function.( + | ( annot_loc, + ( func_annot_loc, + Ast.Type.Function + { + Ast.Type.Function.params = (params_loc, { Ast.Type.Function.Params.params; rest }); + Ast.Type.Function.return; + Ast.Type.Function.tparams; + } ) ) -> + let param_type_to_param = + Ast.Type.Function.( fun (l, { Param.name; Param.annot; _ }) -> - let name = match name with - | Some name -> name - | None -> + let name = + match name with + | Some name -> name + | None -> let name_loc = fst annot in - Flow.add_output cx Flow_error.(EUnsupportedSyntax - (loc, PredicateDeclarationAnonymousParameters)); - (name_loc, "_") + Flow.add_output + cx + Error_message.( + EUnsupportedSyntax (loc, PredicateDeclarationAnonymousParameters)); + (name_loc, mk_ident ~comments:None "_") in - let name' = ({ Ast.Pattern.Identifier. - name; - annot = Some (fst annot, annot); - optional = false; - }) in - (l, Ast.Pattern.Identifier name') - ) in - let params = List.map param_type_to_param params in - let rest = Ast.Type.Function.( + let name' = + { + Ast.Pattern.Identifier.name; + annot = Ast.Type.Available (fst annot, annot); + optional = false; + } + in + (l, Ast.Pattern.Identifier name')) + in + let params = + Core_list.map + ~f:(fun param -> + let ((loc, _) as argument) = param_type_to_param param in + (loc, { Ast.Function.Param.argument; default = None })) + params + in + let rest = + Ast.Type.Function.( match rest with - | Some (rest_loc, { RestParam.argument; }) -> + | Some (rest_loc, { RestParam.argument }) -> let argument = param_type_to_param argument in - Some (rest_loc, { Ast.Function.RestElement.argument; }) - | None -> None - ) in - let body = Ast.Function.BodyBlock (loc, {Ast.Statement.Block.body = [ - (loc, Ast.Statement.Return { - Ast.Statement.Return.argument = Some e - }) - ]}) in - let return = Ast.Function.Available (loc, return) in - Some (Ast.Statement.FunctionDeclaration { Ast.Function. - id = Some id; - params = (params_loc, { Ast.Function.Params.params; rest }); - body; - async = false; - generator = false; - predicate = Some (loc, Ast.Type.Predicate.Inferred); - expression = false; - return; - tparams; - }, function - | _, Ast.Statement.FunctionDeclaration { Ast.Function. - id = Some ((id_loc, fun_type), id_name); - tparams; - params = params_loc, { Ast.Function.Params.params; rest }; - return = Ast.Function.Available (_, return); - body = Ast.Function.BodyBlock (pred_loc, { Ast.Statement.Block. - body = [_, Ast.Statement.Return { Ast.Statement.Return. - argument = Some e; - }] - }); - _; - } -> + Some (rest_loc, { Ast.Function.RestParam.argument }) + | None -> None) + in + let body = + Ast.Function.BodyBlock + ( loc, + { + Ast.Statement.Block.body = + [ + ( loc, + Ast.Statement.Return + { + Ast.Statement.Return.argument = Some e; + comments = Flow_ast_utils.mk_comments_opt (); + } ); + ]; + } ) + in + let return = Ast.Type.Available (loc, return) in + Some + ( Ast.Statement.FunctionDeclaration + { + Ast.Function.id = Some id; + params = (params_loc, { Ast.Function.Params.params; rest }); + body; + async = false; + generator = false; + predicate = Some (loc, Ast.Type.Predicate.Inferred); + return; + tparams; + sig_loc = declare_loc; + }, + function + | ( _, + Ast.Statement.FunctionDeclaration + { + Ast.Function.id = Some ((id_loc, fun_type), id_name); + tparams; + params = (params_loc, { Ast.Function.Params.params; rest }); + return = Ast.Type.Available (_, return); + body = + Ast.Function.BodyBlock + ( pred_loc, + { + Ast.Statement.Block.body = + [ + ( _, + Ast.Statement.Return + { Ast.Statement.Return.argument = Some e; comments = _ } ); + ]; + } ); + _; + } ) -> let param_to_param_type = function - | (loc, t), Ast.Pattern.Identifier { Ast.Pattern.Identifier. - name = (name_loc, _), name; - annot = Some (_, annot); - optional; - } -> - loc, - { Ast.Type.Function.Param.name = Some ((name_loc, t), name); annot; optional; } + | ( (loc, t), + Ast.Pattern.Identifier + { + Ast.Pattern.Identifier.name = ((name_loc, _), name); + annot = Ast.Type.Available (_, annot); + optional; + } ) -> + ( loc, + { Ast.Type.Function.Param.name = Some ((name_loc, t), name); annot; optional } + ) | _ -> assert_false "Function declaration AST has unexpected shape" in - let params = List.map param_to_param_type params in - let rest = Option.map - ~f:(fun (rest_loc, { Ast.Function.RestElement.argument }) -> - rest_loc, { Ast.Type.Function.RestParam.argument = param_to_param_type argument } - ) rest in - let annot : (Loc.t, Loc.t * Type.t) Ast.Type.annotation = - annot_loc, ( - (func_annot_loc, fun_type), - Ast.Type.Function { Ast.Type.Function. - params = params_loc, { Ast.Type.Function.Params.params; rest; }; - return; - tparams; - } - ) + let params = + Core_list.map + ~f:(fun (_, { Ast.Function.Param.argument; default }) -> + if default <> None then + assert_false "Function declaration AST has unexpected shape"; + param_to_param_type argument) + params in - { Ast.Statement.DeclareFunction. - id = id_loc, id_name; + let rest = + Option.map + ~f:(fun (rest_loc, { Ast.Function.RestParam.argument }) -> + ( rest_loc, + { Ast.Type.Function.RestParam.argument = param_to_param_type argument } )) + rest + in + let annot : (ALoc.t, ALoc.t * Type.t) Ast.Type.annotation = + ( annot_loc, + ( (func_annot_loc, fun_type), + Ast.Type.Function + { + Ast.Type.Function.params = + (params_loc, { Ast.Type.Function.Params.params; rest }); + return; + tparams; + } ) ) + in + { + Ast.Statement.DeclareFunction.id = ((id_loc, fun_type), id_name); annot; - predicate = Some (pred_loc, Ast.Type.Predicate.Declared e) + predicate = Some (pred_loc, Ast.Type.Predicate.Declared e); } - | _ -> Typed_ast.Statement.DeclareFunction.error - ) - - | _ -> - None - end - | _ -> - None + | _ -> failwith "Internal error: malformed predicate declare function" ) + | _ -> None + end + | _ -> None and check_default_pattern cx left right = let left_loc = fst left in let right_loc = fst right in - let update_excuses update_fun = let exists_excuses = Context.exists_excuses cx in - let exists_excuse = Utils_js.LocMap.get left_loc exists_excuses + let exists_excuse = + Loc_collections.ALocMap.get left_loc exists_excuses |> Option.value ~default:ExistsCheck.empty - |> update_fun in - let exists_excuses = Utils_js.LocMap.add left_loc exists_excuse exists_excuses in + |> update_fun + in + let exists_excuses = Loc_collections.ALocMap.add left_loc exists_excuse exists_excuses in Context.set_exists_excuses cx exists_excuses in - match snd right with - | Ast.Expression.Literal literal -> - let open ExistsCheck in - begin match literal.Ast.Literal.value with + | Ast.Expression.Literal literal -> + ExistsCheck.( + begin + match literal.Ast.Literal.value with | Ast.Literal.String "" -> - update_excuses (fun excuse -> {excuse with string_loc = Some right_loc}) + update_excuses (fun excuse -> { excuse with string_loc = Some right_loc }) | Ast.Literal.Boolean false -> - update_excuses (fun excuse -> {excuse with bool_loc = Some right_loc}) + update_excuses (fun excuse -> { excuse with bool_loc = Some right_loc }) | Ast.Literal.Number 0. -> - update_excuses (fun excuse -> {excuse with number_loc = Some right_loc}) + update_excuses (fun excuse -> { excuse with number_loc = Some right_loc }) (* There's no valid default value for mixed to create an excuse. *) | _ -> () - end - | _ -> () + end) + | _ -> () -and post_assignment_havoc ~private_ name expr lhs_loc t = +and post_assignment_havoc ~private_ name exp orig_t t = (* types involved in the assignment are computed in pre-havoc environment. it's the assignment itself which clears refis *) Env.havoc_heap_refinements_with_propname ~private_ name; (* add type refinement if LHS is a pattern we handle *) - match Refinement.key expr with + match Refinement.key exp with | Some key -> (* NOTE: currently, we allow property refinements to propagate even if they may turn out to be invalid w.r.t. the @@ -6727,20 +7218,23 @@ and post_assignment_havoc ~private_ name expr lhs_loc t = object and refinement types - `o` and `t` here - are fully resolved. *) - ignore Env.(set_expr key lhs_loc t t) - | None -> - () + ignore Env.(set_expr key (fst exp) t orig_t) + | None -> () -and mk_initial_arguments_reason = Ast.Expression.(function -| [] -> [] -| Expression x :: args -> mk_expression_reason x :: mk_initial_arguments_reason args -| Spread _ :: _ -> [] -) +and mk_initial_arguments_reason = + Ast.Expression.( + function + | [] -> [] + | Expression x :: args -> mk_expression_reason x :: mk_initial_arguments_reason args + | Spread _ :: _ -> []) and warn_or_ignore_optional_chaining optional cx loc = - if optional - then match Context.esproposal_optional_chaining cx with - | Options.ESPROPOSAL_ENABLE - | Options.ESPROPOSAL_IGNORE -> () - | Options.ESPROPOSAL_WARN -> Flow.add_output cx (Flow_error.EExperimentalOptionalChaining loc) - else () + if optional then + match Context.esproposal_optional_chaining cx with + | Options.ESPROPOSAL_ENABLE + | Options.ESPROPOSAL_IGNORE -> + () + | Options.ESPROPOSAL_WARN -> + Flow.add_output cx (Error_message.EExperimentalOptionalChaining loc) + else + () diff --git a/src/typing/subst.ml b/src/typing/subst.ml index cb0727e893b..19543878915 100644 --- a/src/typing/subst.ml +++ b/src/typing/subst.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,6 +7,7 @@ open Type open Reason + (*****************) (* substitutions *) (*****************) @@ -15,128 +16,179 @@ open Reason force substitution under polymorphic types. This ensures that existential type variables under a polymorphic type remain unevaluated until the polymorphic type is applied. **) -let substituter = object(self) - inherit [Type.t SMap.t * bool * use_op option] Type_mapper.t_with_uses as super - - method tvar _cx _map_cx _r id = id +let substituter = + object (self) + inherit [Type.t SMap.t * bool * use_op option] Type_mapper.t_with_uses as super - method call_prop cx map_cx id = - let t = Context.find_call cx id in - let t' = self#type_ cx map_cx t in - if t == t' then id else Context.make_call_prop cx t' + method tvar _cx _map_cx _r id = id - method props cx map_cx id = - let props_map = Context.find_props cx id in - let props_map' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) props_map in - let id' = if props_map == props_map' then id - else Context.make_property_map cx props_map' in - id' - - method exports cx map_cx id = - let exps = Context.find_exports cx id in - let map_loc_type_pair ((loc, t) as orig) = + method call_prop cx map_cx id = + let t = Context.find_call cx id in let t' = self#type_ cx map_cx t in - if t == t' then orig else (loc, t') - in - let exps' = SMap.ident_map map_loc_type_pair exps in - if exps == exps' then id - else Context.make_export_map cx exps' - - method! type_ cx map_cx t = - let (map, force, use_op) = map_cx in - if SMap.is_empty map then t - else match t with - | BoundT (tp_reason, name, _) -> - begin match SMap.get name map with - | None -> t - | Some param_t when name = "this" -> - ReposT (annot_reason tp_reason, param_t) - | Some param_t -> - (match desc_of_reason ~unwrap:false (reason_of_t param_t) with - | RPolyTest _ -> - mod_reason_of_t (fun reason -> - annot_reason (repos_reason (aloc_of_reason tp_reason |> ALoc.to_loc) reason) - ) param_t - | _ -> - param_t - ) - end - - | ExistsT reason -> - if force then Tvar.mk cx reason - else t - - | DefT (reason, PolyT (xs, inner, _)) -> - let xs, map, changed = List.fold_left (fun (xs, map, changed) typeparam -> - let bound = self#type_ cx (map, force, use_op) typeparam.bound in - let default = match typeparam.default with - | None -> None - | Some default -> - let default_ = self#type_ cx (map, force, use_op) default in - if default_ == default then typeparam.default else Some default_ + if t == t' then + id + else + Context.make_call_prop cx t' + + method props cx map_cx id = + let props_map = Context.find_props cx id in + let props_map' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) props_map in + let id' = + if props_map == props_map' then + id + (* When substitution results in a new property map, we have to use a + generated id, rather than a location from source. The substituted + object will have the same location as the generic version, meaning + that this location will not serve as a unique identifier. *) + else + Context.generate_property_map cx props_map' + in + id' + + method exports cx map_cx id = + let exps = Context.find_exports cx id in + let map_loc_type_pair ((loc, t) as orig) = + let t' = self#type_ cx map_cx t in + if t == t' then + orig + else + (loc, t') + in + let exps' = SMap.ident_map map_loc_type_pair exps in + if exps == exps' then + id + else + Context.make_export_map cx exps' + + method! type_ cx map_cx t = + let (map, force, use_op) = map_cx in + if SMap.is_empty map then + t + else + let t_out = + match t with + | BoundT (tp_reason, name, _) -> + begin + match SMap.get name map with + | None -> t + | Some (ReposT (_, param_t)) when name = "this" -> + ReposT (annot_reason tp_reason, param_t) + | Some param_t when name = "this" -> ReposT (annot_reason tp_reason, param_t) + | Some param_t -> + (match desc_of_reason ~unwrap:false (reason_of_t param_t) with + | RPolyTest _ -> + mod_reason_of_t + (fun reason -> + let loc = aloc_of_reason tp_reason in + repos_reason loc ~annot_loc:loc reason) + param_t + | _ -> param_t) + end + | ExistsT reason -> + if force then + Tvar.mk cx reason + else + t + | DefT (reason, trust, PolyT (tparams_loc, xs, inner, _)) -> + let (xs, map, changed) = + Nel.fold_left + (fun (xs, map, changed) typeparam -> + let bound = self#type_ cx (map, force, use_op) typeparam.bound in + let default = + match typeparam.default with + | None -> None + | Some default -> + let default_ = self#type_ cx (map, force, use_op) default in + if default_ == default then + typeparam.default + else + Some default_ + in + ( { typeparam with bound; default } :: xs, + SMap.remove typeparam.name map, + changed || bound != typeparam.bound || default != typeparam.default )) + ([], map, false) + xs + in + let xs = xs |> List.rev |> Nel.of_list in + (* The constructed list will always be nonempty because we fold over a nonempty list and add + * an element to the resulting list for every element in the original list. It's just a bit + * tricky to show this by construction while preserving the exact semantics of the above code. + *) + let xs = Option.value_exn xs in + let inner_ = self#type_ cx (map, false, None) inner in + let changed = changed || inner_ != inner in + if changed then + DefT (reason, trust, PolyT (tparams_loc, xs, inner_, Context.make_nominal cx)) + else + t + | ThisClassT (reason, this) -> + let map = SMap.remove "this" map in + let this_ = self#type_ cx (map, force, use_op) this in + if this_ == this then + t + else + ThisClassT (reason, this_) + | TypeAppT (r, op, c, ts) -> + let c' = self#type_ cx map_cx c in + let ts' = ListUtils.ident_map (self#type_ cx map_cx) ts in + if c == c' && ts == ts' then + t + else + (* If the TypeAppT changed then one of the type arguments had a + * BoundT that was substituted. In this case, also change the use_op + * so we can point at the op which instantiated the types that + * were substituted. *) + let use_op = Option.value use_op ~default:op in + TypeAppT (r, use_op, c', ts') + | EvalT (x, TypeDestructorT (op, r, d), _) -> + let x' = self#type_ cx map_cx x in + let d' = self#destructor cx map_cx d in + if x == x' && d == d' then + t + else + (* If the EvalT changed then either the target or destructor had a + * BoundT that was substituted. In this case, also change the use_op + * so we can point at the op which instantiated the types that + * were substituted. *) + let use_op = Option.value use_op ~default:op in + EvalT (x', TypeDestructorT (use_op, r, d'), Reason.mk_id ()) + (* We only want to change the EvalT id if the rest of the EvalT actually changed *) + | EvalT (t', dt, _id) -> + let t'' = self#type_ cx map_cx t' in + let dt' = self#defer_use_type cx map_cx dt in + if t' == t'' && dt == dt' then + t + else + EvalT (t'', dt', Reason.mk_id ()) + | ModuleT _ + | InternalT (ExtendsT _) -> + failwith (Utils_js.spf "Unhandled type ctor: %s" (string_of_ctor t)) + (* TODO *) + | t -> super#type_ cx map_cx t in - { typeparam with bound; default; }::xs, - SMap.remove typeparam.name map, - changed || bound != typeparam.bound || default != typeparam.default - ) ([], map, false) xs in - let inner_ = self#type_ cx (map, false, None) inner in - let changed = changed || inner_ != inner in - if changed then DefT (reason, PolyT (List.rev xs, inner_, mk_id ())) else t - - | ThisClassT (reason, this) -> - let map = SMap.remove "this" map in - let this_ = self#type_ cx (map, force, use_op) this in - if this_ == this then t else ThisClassT (reason, this_) - - | DefT (r, TypeAppT (op, c, ts)) -> - let c' = self#type_ cx map_cx c in - let ts' = ListUtils.ident_map (self#type_ cx map_cx) ts in - if c == c' && ts == ts' then t else ( - (* If the TypeAppT changed then one of the type arguments had a - * BoundT that was substituted. In this case, also change the use_op - * so we can point at the op which instantiated the types that - * were substituted. *) - let use_op = Option.value use_op ~default:op in - DefT (r, TypeAppT (use_op, c', ts')) - ) - - | EvalT (x, TypeDestructorT (op, r, d), _) -> - let x' = self#type_ cx map_cx x in - let d' = self#destructor cx map_cx d in - if x == x' && d == d' then t - else ( - (* If the EvalT changed then either the target or destructor had a - * BoundT that was substituted. In this case, also change the use_op - * so we can point at the op which instantiated the types that - * were substituted. *) - let use_op = Option.value use_op ~default:op in - EvalT (x', TypeDestructorT (use_op, r, d'), Reason.mk_id ()) - ) - - (* We only want to change the EvalT id if the rest of the EvalT actually changed *) - | EvalT (t', dt, _id) -> - let t'' = self#type_ cx map_cx t' in - let dt' = self#defer_use_type cx map_cx dt in - if t' == t'' && dt == dt' then t - else EvalT (t'', dt', Reason.mk_id ()) - - | ModuleT _ - | InternalT (ExtendsT _) - -> - failwith (Utils_js.spf "Unhandled type ctor: %s" (string_of_ctor t)) (* TODO *) - - | t -> super#type_ cx map_cx t - - method! predicate cx (map, force, use_op) p = match p with - | LatentP (t, i) -> - let t' = self#type_ cx (map, force, use_op) t in - if t == t' then p else LatentP (t', i) - | p -> p - - (* The EvalT case is the only case that calls this function. We've explicitly overrided it - * in all cases, so this should never be called *) - method eval_id _cx _map_cx _id = assert false -end - -let subst cx ?use_op ?(force=true) map = - substituter#type_ cx (map, force, use_op) + if t == t_out then + t + else + match Reason.desc_of_reason ~unwrap:false (reason_of_t t_out) with + | Reason.RTypeAlias (name, true, d) -> + let desc = Reason.RTypeAlias (name, false, d) in + mod_reason_of_t (Reason.replace_desc_reason desc) t_out + | _ -> t_out + + method! predicate cx (map, force, use_op) p = + match p with + | LatentP (t, i) -> + let t' = self#type_ cx (map, force, use_op) t in + if t == t' then + p + else + LatentP (t', i) + | p -> p + + (* The EvalT case is the only case that calls this function. We've explicitly overrided it + * in all cases, so this should never be called *) + method eval_id _cx _map_cx _id = assert false + end + +let subst cx ?use_op ?(force = true) map = substituter#type_ cx (map, force, use_op) diff --git a/src/typing/subst.mli b/src/typing/subst.mli index 68ee2ca574d..edf63aed921 100644 --- a/src/typing/subst.mli +++ b/src/typing/subst.mli @@ -1,8 +1,8 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -val subst: Context.t -> ?use_op:Type.use_op -> ?force:bool -> (Type.t SMap.t) -> Type.t -> Type.t +val subst : Context.t -> ?use_op:Type.use_op -> ?force:bool -> Type.t SMap.t -> Type.t -> Type.t diff --git a/src/typing/trace.ml b/src/typing/trace.ml index 52ee28e6326..ffc6f408d51 100644 --- a/src/typing/trace.ml +++ b/src/typing/trace.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -42,7 +42,9 @@ open Type structure for readability.) *) type step = Type.t * Type.use_t * parent * int + and t = step list + and parent = Parent of t let compare = Pervasives.compare @@ -52,14 +54,12 @@ let compare = Pervasives.compare may be thrown away due to externally imposed limits on trace depth; b) the recursion limiter in the flow function checks this on every call. *) -let trace_depth trace = - List.fold_left (fun acc (_, _, _, d) -> max acc d) 0 trace +let trace_depth trace = List.fold_left (fun acc (_, _, _, d) -> max acc d) 0 trace (* Single-step trace with no parent. This corresponds to a top-level invocation of the flow function, e.g. due to a constraint generated in Type_inference_js *) -let unit_trace lower upper = - [lower, upper, Parent [], 1] +let unit_trace lower upper = [(lower, upper, Parent [], 1)] let dummy_trace = [] @@ -70,8 +70,13 @@ let dummy_trace = [] *) let rec_trace ~max lower upper parent = let parent_depth = trace_depth parent in - let parent = if max > 0 then parent else [] in - [lower, upper, Parent parent, parent_depth + 1] + let parent = + if max > 0 then + parent + else + [] + in + [(lower, upper, Parent parent, parent_depth + 1)] (* join a list of traces *) let concat_trace = List.concat @@ -79,7 +84,9 @@ let concat_trace = List.concat (* used to index trace nodes *) module TraceMap : MyMap.S with type key = t = MyMap.Make (struct type key = t + type t = key + let compare = compare end) @@ -88,52 +95,55 @@ end) *) let index_trace = let rec f (level, tmap, imap) trace = - if level <= 0 || TraceMap.mem trace tmap - then level, tmap, imap - else ( - let tmap, imap = + if level <= 0 || TraceMap.mem trace tmap then + (level, tmap, imap) + else + let (tmap, imap) = let i = TraceMap.cardinal tmap in - TraceMap.(add trace i tmap), IMap.(add i trace imap) + (TraceMap.(add trace i tmap), IMap.(add i trace imap)) in - List.fold_left (fun acc (_, _, Parent parent, _) -> - match parent with [] -> acc | _ -> f acc parent - ) (level - 1, tmap, imap) trace - ) + List.fold_left + (fun acc (_, _, Parent parent, _) -> + match parent with + | [] -> acc + | _ -> f acc parent) + (level - 1, tmap, imap) + trace in fun level trace -> - let _, tmap, imap = f (level, TraceMap.empty, IMap.empty) trace in - tmap, imap - + let (_, tmap, imap) = f (level, TraceMap.empty, IMap.empty) trace in + (tmap, imap) (* scan a trace tree, return maximum position length of reasons at or above the given depth limit, and min of that limit and actual max depth *) let max_depth_of_trace limit trace = let rec f depth (_, _, parent, _) = - if depth > limit then depth - else ( + if depth > limit then + depth + else match parent with | Parent [] -> depth | Parent trace -> List.fold_left f (depth + 1) trace - ) - in List.fold_left f 0 trace - + in + List.fold_left f 1 trace (* reformat a reason's description with - the given prefix and suffix: if either is nonempty, "desc" becomes "prefix[desc]suffix" *) let pretty_r r prefix suffix = - replace_reason (fun desc -> - let desc_str = string_of_desc desc in - let custom = - if prefix = "" && suffix = "" - then desc_str - else spf "%s[%s]%s" prefix desc_str suffix - in - RCustom custom - ) r - + update_desc_new_reason + (fun desc -> + let desc_str = string_of_desc desc in + let custom = + if prefix = "" && suffix = "" then + desc_str + else + spf "%s[%s]%s" prefix desc_str suffix + in + RCustom custom) + r (* prettyprint a trace. what we print: @@ -146,45 +156,41 @@ let pretty_r r prefix suffix = if the step was derived from another path, we append a note to that effect. *) -let reasons_of_trace ?(level=0) trace = +let reasons_of_trace ?(level = 0) trace = let max_depth = max_depth_of_trace level trace in let level = min level max_depth in - - let tmap, imap = index_trace level trace in - + let (tmap, imap) = index_trace level trace in let is_pipelined_tvar ~steps ~i lower = - i > 0 && ( - let upper = match List.nth steps (i - 1) with (_, upper, _, _) -> upper in - match upper with - | UseT (_, upper) -> lower = upper - | _ -> false - ) + i > 0 + && + let upper = + match List.nth steps (i - 1) with + | (_, upper, _, _) -> upper + in + match upper with + | UseT (_, upper) -> lower = upper + | _ -> false in - let print_step (steps: step list) i (lower, upper, Parent parent, _) = + let print_step (steps : step list) i (lower, upper, Parent parent, _) = (* omit lower if it's a pipelined tvar *) - (if is_pipelined_tvar ~steps ~i lower - then [] - else [pretty_r (reason_of_t_add_id lower) - (spf "%s " (string_of_ctor lower)) ""] - ) - @ - [pretty_r (reason_of_use_t_add_id upper) - (spf "~> %s " (string_of_use_ctor upper)) - (if parent = [] - then "" - else match TraceMap.get parent tmap with - | Some i -> spf " (from path %d)" (i + 1) - | None -> " (from [not shown])" - ) - ] + ( if is_pipelined_tvar ~steps ~i lower then + [] + else + [pretty_r (reason_of_t_add_id lower) (spf "%s " (string_of_ctor lower)) ""] ) + @ [ + pretty_r + (reason_of_use_t_add_id upper) + (spf "~> %s " (string_of_use_ctor upper)) + ( if parent = [] then + "" + else + match TraceMap.get parent tmap with + | Some i -> spf " (from path %d)" (i + 1) + | None -> " (from [not shown])" ); + ] in - - let print_path i (steps: step list) = + let print_path i (steps : step list) = let desc = RCustom (spf "* path %d:" (i + 1)) in - (locationless_reason desc) :: - List.concat (List.mapi (print_step steps) steps) + locationless_reason desc :: List.concat (List.mapi (print_step steps) steps) in - - List.concat (List.rev (IMap.fold ( - fun i flow acc -> (print_path i flow) :: acc - ) imap [])) + List.concat (List.rev (IMap.fold (fun i flow acc -> print_path i flow :: acc) imap [])) diff --git a/src/typing/trace.mli b/src/typing/trace.mli index 621cb5f3cec..46292a21401 100644 --- a/src/typing/trace.mli +++ b/src/typing/trace.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,15 +7,16 @@ type t -val compare: t -> t -> int +val compare : t -> t -> int -val trace_depth: t -> int -val unit_trace: Type.t -> Type.use_t -> t -val rec_trace: max: int -> Type.t -> Type.use_t -> t -> t -val concat_trace: t list -> t -val dummy_trace: t +val trace_depth : t -> int -val reasons_of_trace: - ?level:int -> - t -> - Reason.reason list +val unit_trace : Type.t -> Type.use_t -> t + +val rec_trace : max:int -> Type.t -> Type.use_t -> t -> t + +val concat_trace : t list -> t + +val dummy_trace : t + +val reasons_of_trace : ?level:int -> t -> Reason.reason list diff --git a/src/typing/trust.ml b/src/typing/trust.ml new file mode 100644 index 00000000000..61d83fbc9ea --- /dev/null +++ b/src/typing/trust.ml @@ -0,0 +1,429 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +(* trust_qualifier information describes a type's relationship with the any type. + These may be applied to any DefT (except AnyT), and determine whether + the type is a subtype, supertype, both, or neither of "any". + To determine whether something is trusted we just need to care about whether + any is a subtype of it or not: if any is a subtype, then we can't trust_qualifier it, + otherwise we can. However, we care about the dual notion of "privacy" because + of contravariance: if we were allowed to pass (ex.) an array of trusted ints + to any, we would lose our guarantees of soundness, because the any-typed code + could arbitrarily mutate the array to contain strings. To prevent this, such + an array must be private. + + In addition, the trustedness of some types are not always known. For example, + we may wish to infer whether some type annotation is trusted or not in order + to report the trust_qualifier coverage of a module. + To do this inference, we need to be able to track interactions between types. + For example, if any flows into number, then that number needs to be marked as + untrusted. We'll accomplish this by reusing some of the ideas behind Flow's + tvar inference, but in order to do so, the type whose trust_qualifier is being inferred + (in this case number) needs to have an index into a graph of trust_qualifier inference + variables, just like tvars contain an index into the context graph. + + This module defines trust_qualifier information for Flow types. Since trust_qualifier information + is stored on ALL DefTs in the system (see type.ml), we want trust_qualifier to have a + very compact representation, and we need to represent both + trustedness/privacy and pointers into the trust_qualifier graph. + + The primary types that this module exports are the `trust` type and the + `trust_rep` type. A value of type `trust` represents trustedness and privacy: + these are the values that are directly compared to raise errors if an any + type flows into a type that is annotated as being trusted, for example. + A value of type 'trust_rep' is what is stored in DefTs, and represents EITHER + a `trust` value, OR a pointer into the trust_qualifier variable graph. + + ** trust_rep ** + In principle, we'd love to define the `trust_rep` type as such: + type trust_rep = Qualifier of trust_qualifier | Inferred of ident + but such a representation in every DefT would cause a pretty huge memory + overhead. Instead, we use a bitwise representation: a trust_rep value is a + 63bit int with the layout + (lower bits) [ tag | data ...... ] (upper bits) + ^ ^ + 1 bit 62 bits + + If tag is 0, then data should be interpreted as trust_qualifier information, and can be + converted to type `trust`. (See below for the representation of `trust`.) + + If tag is 1, then data should be interpreted as an identifier, which will be + a pointer into a graph for trust_qualifier inference, and be converted to an ident. + We're assuming we'll never have more than 2^62 vars :) + + This is all internal implementation details within this module, of course. + Externally, the core API for using trust_rep is: + is_qualifier: Does a trust_rep represent trust_qualifier data? + is_ident: Does a trust_rep represent and inference ident? + as_qualifier, + as_ident: convert to trust_qualifier or ident and assert_false if the value does not + correspond + from_qualifier, + from_ident: convert from trust_qualifier or ident to trust_rep + expand: convert from a trust_rep into the expanded trust_qualifier representation as + above (qualifier of trust_qualifier | Inferred of ident) + compress: convert from the expanded representation into the compact one. + + ** trust_qualifier ** + A value of type `trust` represents whether a type is trusted or tainted, + and public or private. Each trust_qualifier value has two components, represented by + the `trust_level` type, which represent trust_qualifier and privacy. Each trust_qualifier element + can be "top" (representing trusted or private) or "bot" (representing + tainted or public). The `trust_level` type is also inhabited by "unk" for when + the trustedness or privacy of a trust_qualifier value is unknown, as in the initial + state of a trust_qualifier variable, and "enf", which represents when runtime + checks are ensuring that a value is trusted. + + Internally a trust_qualifier value is represented as an int, with the `trust_level`s each + consisting of two bits: + (lower bits) [ privacy | trust_qualifier | unused... ] (upper bits) + ^ ^ + trust_level trust_level + 2 bits 2 bits + + trust_levels do not need to be used externally: instead, the API for `trust` + values allows clients to check if a trust_qualifier value is trusted or private, + whether two trust_qualifier values are related by subtyping, etc. + + ** trust_qualifier constructors ** + + This module exports several constructors for trusts and trust_reps which are + described below. The basic trust_qualifier values they refer to are + + * Initial = (Top, Bot) = T < any, any string, unit, string) format -> trust_qualifier -> 'a + + val fail_trust_rep : (int -> string, unit, string) format -> trust_rep -> 'a + + val untag_ident : trust_rep -> ident + + val untag_qualifier : trust_rep -> trust_qualifier + + val tag_ident : ident -> trust_rep + + val tag_qualifier : trust_qualifier -> trust_rep + + val is_ident : trust_rep -> bool + + val is_qualifier : trust_rep -> bool + + val bot : trust_level + + val top : trust_level + + val unk : trust_level + + val enf : trust_level + + val get_taint : trust_qualifier -> trust_level + + val get_pub : trust_qualifier -> trust_level + + val make_trust : trust_level -> trust_level -> trust_qualifier +end = struct + type bitrep = int + + type trust_qualifier = bitrep + + type trust_rep = bitrep + + let fail s (n : int) = Utils_js.assert_false (Utils_js.spf s n) + + let fail_trust s (n : trust_qualifier) = fail s n + + let fail_trust_rep s (n : trust_rep) = fail s n + + module Tag : sig + type tag + + val qualifier : tag + + val ident : tag + + val get_tag : trust_rep -> tag + + val untag : trust_rep -> bitrep + + val tag : tag -> bitrep -> trust_rep + end = struct + type tag = int + + let tag_size = 1 + + let tag_mask = mask tag_size + + let qualifier = 0 + + let ident = 1 + + let get_tag n = n land tag_mask + + let untag n = n lsr tag_size + + let tag t n = (n lsl tag_size) lor t + end + + let is_ident (n : trust_rep) : bool = Tag.get_tag n = Tag.ident + + let is_qualifier (n : trust_rep) : bool = Tag.get_tag n = Tag.qualifier + + let untag_ident : trust_rep -> ident = Tag.untag + + let untag_qualifier : trust_rep -> trust_qualifier = Tag.untag + + let tag_qualifier : trust_qualifier -> trust_rep = Tag.tag Tag.qualifier + + let tag_ident : ident -> trust_rep = Tag.tag Tag.ident + + module Elt : sig + type trust_level + + val bot : trust_level + + val top : trust_level + + val unk : trust_level + + val enf : trust_level + + val get_taint : trust_qualifier -> trust_level + + val get_pub : trust_qualifier -> trust_level + + val make_trust : trust_level -> trust_level -> trust_qualifier + end = struct + type trust_level = int + + let elt_size = 2 + + let elt_mask = mask elt_size + + let bot : trust_level = 0 + + let top : trust_level = 1 + + let unk : trust_level = 2 + + let enf : trust_level = 3 + + let get_taint (n : trust_qualifier) : trust_level = (n lsr elt_size) land elt_mask + + let get_pub (n : trust_qualifier) : trust_level = n land elt_mask + + let make_trust (taint : trust_level) (pub : trust_level) : trust_qualifier = + (taint lsl elt_size) lor pub + end + + include Elt +end + +include TrustRepresentation + +type expanded_trust = + | Qualifier of trust_qualifier + | Inferred of ident + +let dynamic = make_trust bot bot + +let initial = make_trust top bot + +let terminal = make_trust bot top + +let static = make_trust top top + +let infertrust = make_trust unk unk + +let dynamic_info = tag_qualifier dynamic + +let _initial_info = tag_qualifier initial + +let _terminal_info = tag_qualifier terminal + +let _static_info = tag_qualifier static + +let infer_info = tag_qualifier infertrust + +let bad_trust_rep n = fail_trust "invalid trust_qualifier representation: %d" n + +let expand n = + if is_ident n then + Inferred (untag_ident n) + else + Qualifier (untag_qualifier n) + +let compress x = + match x with + | Inferred n -> tag_ident n + | Qualifier trust_qualifier -> tag_qualifier trust_qualifier + +let as_qualifier n = + if is_qualifier n then + untag_qualifier n + else + fail_trust_rep "trust_rep value does not represent trust: %d" n + +let as_ident n = + if is_ident n then + untag_ident n + else + fail_trust_rep "trust_rep value does not represent inference ident: %d" n + +let from_ident ident = tag_ident ident + +let from_qualifier n = tag_qualifier n + +let trust_value ~default n = + if is_qualifier n then + untag_qualifier n + else + default + +let trust_value_map ~default ~f n = + if is_qualifier n then + untag_qualifier n |> f + else + default + +let string_of_taint t = + let n = get_taint t in + if n = bot then + "Tainted" + else if n = top then + "Trusted" + else if n = unk then + "?" + else if n = enf then + "Enforced" + else + bad_trust_rep t + +let string_of_pub t = + let n = get_pub t in + if n = bot then + "Public" + else if n = top then + "Private" + else if n = unk then + "?" + else if n = enf then + "Enforced" + else + bad_trust_rep t + +let string_of_trust n = Printf.sprintf "<%s/%s>" (string_of_taint n) (string_of_pub n) + +let string_of_trust_rep get_trust n = + if is_qualifier n then + untag_qualifier n |> string_of_trust + else + Printf.sprintf "%d -> [%s]" (untag_ident n) (untag_ident n |> get_trust |> string_of_trust) + +(* trust_qualifier creation functions and modules: + These functions will be used to generate trust_qualifier information when DefTs are + instantiated (unless the trust_qualifier information ought to be propagated from some + other type. *) +(* bogus_trust is a development placeholder, used to invent trust_qualifier when we + haven't yet figured out what the right trust_qualifier information to use is. The + number of calls to bogus_trust in the codebase is likely to be a decent + measure of how complete the trusted types project is. *) +let bogus_trust () = dynamic_info + +(* literal_trust is the trust_qualifier of literals: they're known to be trusted, since + we can see their entire (trivial) history, but they're by default free to + flow anywhere. *) +let literal_trust () = infer_info + +(* annot_trust is the trust_qualifier of standard type annotations; currently, unless they're made to be + trusted with the below operators, they accept untrusted values. *) +let annot_trust () = dynamic_info + +let unknown_qualifier () = infertrust + +let dynamic_qualifier () = dynamic + +(* Given a trust_qualifier datum, add (if not already present) the requirement that it + be trusted or private. *) +let make_trusted n = make_trust top (get_pub n) + +let make_private n = make_trust (get_taint n) top + +let make_enforced n = make_trust enf (get_pub n) + +let is_tainted n = get_taint n = bot + +let is_public n = get_pub n = bot + +let subtype_bit l u = l = u || (l = bot && u = top) || l = unk || u = unk || l = enf || u = enf + +let subtype_trust l u = + subtype_bit (get_taint u) (get_taint l) && subtype_bit (get_pub l) (get_pub u) + +let taint_with tainted other = + if get_taint other <> enf && is_tainted tainted then + make_trust bot (get_pub other) + else + other + +let publicize_with public other = + if is_public public then + make_trust (get_taint other) bot + else + other + +let join_bit l r = + if l = enf || r = enf then + enf + else if l = top || r = top then + top + else if l = bot || r = bot then + bot + else + unk + +let join_trust l r = + make_trust (join_bit (get_taint l) (get_taint r)) (join_bit (get_pub l) (get_pub r)) + +let fix_bit n = + if n = unk then + bot + else + n + +let fix t = make_trust (fix_bit (get_taint t)) (fix_bit (get_pub t)) diff --git a/src/typing/trust.mli b/src/typing/trust.mli new file mode 100644 index 00000000000..e3f03c2b913 --- /dev/null +++ b/src/typing/trust.mli @@ -0,0 +1,68 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +type trust_qualifier + +type trust_rep + +type expanded_trust = + | Qualifier of trust_qualifier + | Inferred of int + +val is_ident : trust_rep -> bool + +val is_qualifier : trust_rep -> bool + +val expand : trust_rep -> expanded_trust + +val compress : expanded_trust -> trust_rep + +val as_qualifier : trust_rep -> trust_qualifier + +val as_ident : trust_rep -> int + +val from_ident : int -> trust_rep + +val from_qualifier : trust_qualifier -> trust_rep + +val trust_value : default:trust_qualifier -> trust_rep -> trust_qualifier + +val trust_value_map : default:'t -> f:(trust_qualifier -> 't) -> trust_rep -> 't + +val string_of_trust : trust_qualifier -> string + +val string_of_trust_rep : (int -> trust_qualifier) -> trust_rep -> string + +val bogus_trust : unit -> trust_rep + +val literal_trust : unit -> trust_rep + +val annot_trust : unit -> trust_rep + +val unknown_qualifier : unit -> trust_qualifier + +val dynamic_qualifier : unit -> trust_qualifier + +val make_trusted : trust_qualifier -> trust_qualifier + +val make_private : trust_qualifier -> trust_qualifier + +val make_enforced : trust_qualifier -> trust_qualifier + +val is_public : trust_qualifier -> bool + +val is_tainted : trust_qualifier -> bool + +val subtype_trust : trust_qualifier -> trust_qualifier -> bool + +val publicize_with : trust_qualifier -> trust_qualifier -> trust_qualifier + +val taint_with : trust_qualifier -> trust_qualifier -> trust_qualifier + +val join_trust : trust_qualifier -> trust_qualifier -> trust_qualifier + +val fix : trust_qualifier -> trust_qualifier diff --git a/src/typing/trust_checking.ml b/src/typing/trust_checking.ml new file mode 100644 index 00000000000..df15e09e740 --- /dev/null +++ b/src/typing/trust_checking.ml @@ -0,0 +1,269 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Type +open Trust_constraint +open Debug_js.Verbose + +(* Equivalent to `forall x in set, fn x`, but unlike ISet.for_all, is not + short-circuiting, so fn can be side-effectful. *) +let for_all_iter fn set = + let r = ref true in + let fn' x = if (not (fn x)) && !r then r := false in + ISet.iter fn' set; + !r + +module TrustKit (Flow : Flow_common.S) : Flow_common.TRUST_CHECKING = struct + include Flow + + (* Create a new trust variable with an optional initial trust setting and install + it in the trust graph. *) + let mk_trust_var cx ?initial () = + let tvar = Reason.mk_id () in + let initial = Option.value initial ~default:(dynamic_qualifier ()) in + Context.add_trust_var cx tvar (new_unresolved_root initial); + tvar + + (* + See below for the algorithm that uses these helpers. + *) + let set_new_lower_bound cx trace id ltrust utrust new_trust ubounds = + print_if_verbose cx trace [Printf.sprintf "Tainting %d to %s" id (string_of_trust new_trust)]; + if subtype_trust ltrust utrust then ( + set_trust ubounds new_trust; + true + ) else + false + + let set_new_lower_bound_of_id cx trace ltrust id = + match Context.find_trust_graph cx id with + | TrustResolved utrust -> subtype_trust ltrust utrust + | TrustUnresolved bounds -> + let utrust = get_trust bounds in + let new_trust = taint_with ltrust utrust in + set_new_lower_bound cx trace id ltrust utrust new_trust bounds + + (* + This is the main event for adding a new lower trust bound to an unresolved + trust variable. Recall that all unresolved trust variables point to a + bounds in the trust graph; this is the `bounds` parameter to this function, + while id is the pointer into the trust graph and ltrust is the new lower bound + being added. The bounds value contains the variable's current trust value, and + its upper and lower variable bounds. + + Part of the work this algorithm does is ensure that the upper and lower + variable bounds of every variable are closed: if Z is an upper bound of Y and + Y is an upper bound of X, then Z is in the upper bounds of X. This means that + this algorithm does not have to be recursive. + + The algorithm tries to mark the variable receiving the new trust lower bound, and + all of the variable's upper bound variables, as being tained if the new lower + bound is also tainted. This attempt will if the new lower bound is not a trust + subtype of any of these trusts: this would occur if a tainted type like any + flowed (directly or transitively) into a type that was marked as trusted. + the lower bound is not a subtype of the trust of the variable or any of its + upper bounds, the algorithm returns false, and the caller may raise a type error. + However, this attempt-to-taint process is NOT short-circuiting: even if the variable + itself is trusted, it may have upper bound variables whose trust is unknown, and + we want to taint them before raising an error. In other words, a type + annotated as being trusted is not a barrier to taint: if a sequence of flows like + any -> number -> $Trusted -> number + happens, we want to taint both non-trusted numbers, in order to better understand + trust coverage. + *) + let flow_new_lower_bound cx trace ltrust id bounds = + let utrust = get_trust bounds in + let (_, uppervars) = get_bounds bounds in + let new_trust = taint_with ltrust utrust in + if new_trust = utrust then + true + else if set_new_lower_bound cx trace id ltrust utrust new_trust bounds then + for_all_iter (set_new_lower_bound_of_id cx trace new_trust) uppervars + else ( + ignore (for_all_iter (set_new_lower_bound_of_id cx trace new_trust) uppervars); + false + ) + + let add_trust_lower_bound cx trace ltrust id = + match Context.find_trust_graph cx id with + | TrustResolved utrust -> subtype_trust ltrust utrust + | TrustUnresolved bounds -> flow_new_lower_bound cx trace ltrust id bounds + + (* These functions work exactly as above, except for upper trust bounds + propagating to lower variable bounds, and with publicity instead of taint. *) + let set_new_upper_bound cx trace id ltrust utrust new_trust lbounds = + print_if_verbose + cx + trace + [Printf.sprintf "Publicizing %d to %s" id (string_of_trust new_trust)]; + if subtype_trust ltrust utrust then ( + set_trust lbounds new_trust; + true + ) else + false + + let set_new_upper_bound_of_id cx trace utrust id = + match Context.find_trust_graph cx id with + | TrustResolved ltrust -> subtype_trust ltrust utrust + | TrustUnresolved bounds -> + let ltrust = get_trust bounds in + let new_trust = publicize_with utrust ltrust in + set_new_upper_bound cx trace id ltrust utrust new_trust bounds + + let flow_new_upper_bound cx trace utrust id bounds = + let ltrust = get_trust bounds in + let (lowervars, _) = get_bounds bounds in + let new_trust = publicize_with utrust ltrust in + if new_trust = ltrust then + true + else if set_new_upper_bound cx trace id ltrust utrust new_trust bounds then + for_all_iter (set_new_upper_bound_of_id cx trace new_trust) lowervars + else ( + ignore (for_all_iter (set_new_upper_bound_of_id cx trace new_trust) lowervars); + false + ) + + let add_trust_upper_bound cx trace utrust id = + match Context.find_trust_graph cx id with + | TrustResolved ltrust -> subtype_trust ltrust utrust + | TrustUnresolved bounds -> flow_new_upper_bound cx trace utrust id bounds + + let extend_bounds cx extender news id = + match Context.find_trust_graph cx id with + | TrustResolved _ -> () + | TrustUnresolved b -> extender b news + + (* + When one trust variable flows into another, like X ~> Y, we need to make sure + that (Y union upperbounds(Y)) appears in the upper bounds of X and all of X's + lowerbounds, and the reverse for Y and Y's lower bounds. We also flow the current + trust of X to Y as a new trust lower bound (as above) and the current trust of + Y to X as a new trust upper bound (likewise). + *) + let link_trust_variables cx trace id1 id2 = + let lc = Context.find_trust_graph cx id1 in + let uc = Context.find_trust_graph cx id2 in + match (lc, uc) with + | (TrustResolved lt, TrustResolved ut) -> subtype_trust lt ut + | (TrustResolved lt, TrustUnresolved ub) -> flow_new_lower_bound cx trace lt id2 ub + | (TrustUnresolved lb, TrustResolved ut) -> flow_new_upper_bound cx trace ut id1 lb + | (TrustUnresolved lb, TrustUnresolved ub) -> + let ltrust = get_trust lb in + let (l_lowervars, _) = get_bounds lb in + let utrust = get_trust ub in + let (u_lowervars, u_uppervars) = get_bounds ub in + if not (ISet.mem id1 u_lowervars) then ( + print_if_verbose cx trace [Printf.sprintf "Trust linking %d to %d" id1 id2]; + if flow_new_lower_bound cx trace ltrust id2 ub then + if flow_new_upper_bound cx trace utrust id1 lb then ( + let new_upper = ISet.add id2 u_uppervars in + let new_lower = ISet.add id1 l_lowervars in + extend_uppervars lb new_upper; + extend_lowervars ub new_lower; + ISet.iter (extend_bounds cx extend_uppervars new_upper) l_lowervars; + ISet.iter (extend_bounds cx extend_lowervars new_lower) u_uppervars; + true + ) else + false + else + false + ) else + true + + (* + This function strengthens a trust variable in place, flowing the new trust value + to both its upper and lower bound variables. This is used in e.g. statement, when + a $Trusted annotation takes the current trust of T and makes it trusted. + *) + let strengthen_trust cx id new_trust message = + let constraints = Context.find_trust_graph cx id in + match constraints with + | TrustResolved _ -> + let msg = + Utils_js.spf + "strengthen_trust: attempting to strengthen fully resolved trust var %d in file %s" + id + (File_key.to_string @@ Context.file cx) + in + Utils_js.assert_false msg + | TrustUnresolved bound -> + let (lowervars, uppervars) = get_bounds bound in + let trust = get_trust bound in + let new_trust = join_trust trust new_trust in + if new_trust <> trust then ( + print_if_verbose + cx + Trace.dummy_trace + [ + Printf.sprintf + "Strengthening %d from %s to %s" + id + (string_of_trust trust) + (string_of_trust new_trust); + ]; + set_trust bound new_trust; + if + not + ( for_all_iter (add_trust_upper_bound cx Trace.dummy_trace new_trust) lowervars + && for_all_iter (add_trust_lower_bound cx Trace.dummy_trace new_trust) uppervars ) + then + add_output cx message + ) + + let trust_flow cx trace use_op l u = + let add_error lr ur = + if Context.trust_errors cx then + add_output cx ~trace (Error_message.ETrustIncompatibleWithUseOp (lr, ur, use_op)) + in + let info_of = function + | DefT (r, trust, _) -> Some (r, expand trust) + | AnyT (r, _) -> Some (r, Qualifier (dynamic_qualifier ())) + | _ -> None + in + let ldata = info_of l in + let udata = info_of u in + match (ldata, udata) with + (* When a trust-carrying type flows into another trust-carrying type, + we expand the trustdata into either a pointer into the trust graph or a + trust value, as per the expand function described in trust.ml. + + If we see a flow: + trust ~> ident + then we add trust as a new lower bound to the variable ident, and raise + an error if that fails. + *) + | (Some (lr, Qualifier ltrust), Some (ur, Inferred id)) -> + if not (add_trust_lower_bound cx trace ltrust id) then add_error lr ur + (* If we see a flow: + ident ~> trust + then we add trust as a new upper bound to the variable ident, and raise + an error if that fails. + *) + | (Some (lr, Inferred id), Some (ur, Qualifier utrust)) -> + if not (add_trust_upper_bound cx trace utrust id) then add_error lr ur + (* If we see a flow: + ident1 ~> ident2 + we link the two variables and propagate bounds and trust between then, + and raise an error if this fails. + *) + | (Some (lr, Inferred lid), Some (ur, Inferred uid)) -> + if not (link_trust_variables cx trace lid uid) then add_error lr ur + (* If we see a flow: + trust1 ~> trust2 + all we do is raise an error if trust1 is not a subtype of trust2. + *) + | (Some (lr, Qualifier ltrust), Some (ur, Qualifier utrust)) -> + if not (subtype_trust ltrust utrust) then add_error lr ur + | (None, _) + | (_, None) -> + () + + let trust_flow_to_use_t cx trace l u = + match u with + | UseT (use_op, u) -> trust_flow cx trace use_op l u + | _ -> () +end diff --git a/src/typing/trust_constraint.ml b/src/typing/trust_constraint.ml new file mode 100644 index 00000000000..c8d65f8a3df --- /dev/null +++ b/src/typing/trust_constraint.ml @@ -0,0 +1,57 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Type + +type ident = Type.ident + +type node = + | TrustGoto of ident + | TrustRoot of root + +and root = { + rank: int; + constraints: constraints; +} + +and constraints = + | TrustResolved of trust_qualifier + | TrustUnresolved of bounds + +and bounds = { + mutable trust: trust_qualifier; + mutable lowervars: ISet.t; + mutable uppervars: ISet.t; +} + +let get_constraints { constraints; _ } = constraints + +let get_bounds { lowervars; uppervars; _ } = (lowervars, uppervars) + +let get_trust { trust; _ } = trust + +let resolved_trust_constraint _ trust = TrustResolved trust + +let new_unresolved_root initial = + TrustRoot + { + rank = 0; + constraints = + TrustUnresolved { trust = initial; lowervars = ISet.empty; uppervars = ISet.empty }; + } + +let new_resolved_root trust = TrustRoot { rank = 0; constraints = TrustResolved trust } + +let new_goto id = TrustGoto id + +let set_trust bounds trust = bounds.trust <- trust + +let extend_uppervars bounds new_uppervars = + bounds.uppervars <- ISet.union bounds.uppervars new_uppervars + +let extend_lowervars bounds new_lowervars = + bounds.lowervars <- ISet.union bounds.lowervars new_lowervars diff --git a/src/typing/trust_constraint.mli b/src/typing/trust_constraint.mli new file mode 100644 index 00000000000..84e22da5749 --- /dev/null +++ b/src/typing/trust_constraint.mli @@ -0,0 +1,42 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Type + +type ident = Type.ident + +type bounds + +type root + +type node = + | TrustGoto of ident + | TrustRoot of root + +type constraints = + | TrustResolved of trust_qualifier + | TrustUnresolved of bounds + +val get_constraints : root -> constraints + +val get_bounds : bounds -> ISet.t * ISet.t + +val get_trust : bounds -> trust_qualifier + +val resolved_trust_constraint : Reason.t -> trust_qualifier -> constraints + +val new_unresolved_root : trust_qualifier -> node + +val new_resolved_root : trust_qualifier -> node + +val new_goto : ident -> node + +val set_trust : bounds -> trust_qualifier -> unit + +val extend_uppervars : bounds -> ISet.t -> unit + +val extend_lowervars : bounds -> ISet.t -> unit diff --git a/src/typing/trust_helpers.ml b/src/typing/trust_helpers.ml new file mode 100644 index 00000000000..cce3d033439 --- /dev/null +++ b/src/typing/trust_helpers.ml @@ -0,0 +1,40 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +open Trust + +let infer_trust cx = + if Context.trust_tracking cx then + Flow_js.mk_trust_var cx ~initial:(unknown_qualifier ()) () |> from_ident + else + bogus_trust () + +let with_trust_inference cx constructor = infer_trust cx |> constructor + +let strengthen newtrust cx trust err = + if is_qualifier trust then + as_qualifier trust |> join_trust newtrust |> from_qualifier + else ( + Flow_js.strengthen_trust cx (as_ident trust) newtrust err; + trust + ) + +let make_trusted = unknown_qualifier () |> make_trusted |> strengthen + +let make_private = unknown_qualifier () |> make_private |> strengthen + +(* Get the trust of a trust_rep, whether it's an ident or a fixed trust. *) +let actual_trust cx t = + Trust_constraint.( + match expand t with + | Qualifier trust -> trust + | Inferred ident -> + begin + match Context.find_trust_graph cx ident with + | TrustResolved trust -> trust + | TrustUnresolved bounds -> get_trust bounds + end) diff --git a/src/typing/tvar.ml b/src/typing/tvar.ml index 1b6de20d5af..17d9958bf9e 100644 --- a/src/typing/tvar.ml +++ b/src/typing/tvar.ml @@ -1,18 +1,23 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -let mk cx reason = +let mk_no_wrap cx reason = let tvar = Reason.mk_id () in let graph = Context.graph cx in Context.add_tvar cx tvar (Constraint.new_unresolved_root ()); - (if Context.is_verbose cx then Utils_js.prerr_endlinef - "TVAR %d (%d): %s" tvar (IMap.cardinal graph) - (Debug_js.string_of_reason cx reason)); - Type.OpenT (reason, tvar) + if Context.is_verbose cx then + Utils_js.prerr_endlinef + "TVAR %d (%d): %s" + tvar + (IMap.cardinal graph) + (Debug_js.string_of_reason cx reason); + tvar + +let mk cx reason = Type.OpenT (reason, mk_no_wrap cx reason) let mk_where cx reason f = let tvar = mk cx reason in diff --git a/src/typing/ty_normalizer.ml b/src/typing/ty_normalizer.ml index a3e585ef505..9a8d2d20332 100644 --- a/src/typing/ty_normalizer.ml +++ b/src/typing/ty_normalizer.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,10 +8,11 @@ open Pervasives open Utils_js open Reason - +open Loc_collections module Env = Ty_normalizer_env module T = Type module VSet = ISet +module File_sig = File_sig.With_ALoc (* The type normalizer converts infered types (of type `Type.t`) under a context cx to the simplified form of type `Ty.t`. It is called by various modules, @@ -30,9 +31,11 @@ type error_kind = | BadBoundT | BadCallProp | BadClassT + | BadThisClassT | BadPoly | BadTypeAlias | BadTypeApp + | BadInlineInterfaceExtends | BadInternalT | BadInstanceT | BadEvalT @@ -40,6 +43,7 @@ type error_kind = | ShadowTypeParam | UnsupportedTypeCtor | UnsupportedUseCtor + | TypeTooBig type error = error_kind * string @@ -48,9 +52,11 @@ let error_kind_to_string = function | BadBoundT -> "Unbound type parameter" | BadCallProp -> "Bad call property" | BadClassT -> "Bad class" + | BadThisClassT -> "Bad this class" | BadPoly -> "Bad polymorphic type" | BadTypeAlias -> "Bad type alias" | BadTypeApp -> "Bad type application" + | BadInlineInterfaceExtends -> "Bad inline interface extends" | BadInternalT -> "Bad internal type" | BadInstanceT -> "Bad instance type" | BadEvalT -> "Bad eval" @@ -58,42 +64,32 @@ let error_kind_to_string = function | ShadowTypeParam -> "Shadowed type parameters" | UnsupportedTypeCtor -> "Unsupported type constructor" | UnsupportedUseCtor -> "Unsupported use constructor" + | TypeTooBig -> "Type too big" -let error_to_string (kind, msg) = - spf "[%s] %s" (error_kind_to_string kind) msg - +let error_to_string (kind, msg) = spf "[%s] %s" (error_kind_to_string kind) msg module NormalizerMonad : sig - module State : sig type t - val empty: t + + val empty : t end val run_type : options:Env.options -> genv:Env.genv -> - imported_names:Loc.t SMap.t -> + imported_names:Ty.imported_ident ALocMap.t -> tparams:Type.typeparam list -> State.t -> Type.t -> (Ty.t, error) result * State.t - val run_imports: - options:Env.options -> - genv:Env.genv -> - State.t -> - Loc.t SMap.t * State.t - + val run_imports : options:Env.options -> genv:Env.genv -> Ty.imported_ident ALocMap.t end = struct - module State = struct - type t = { - (* Source of fresh ints for creating new Ty.tvar's *) counter: int; - (* A cache for resolved type variables. We cache the result even when the output is an error, to avoid @@ -103,17 +99,6 @@ end = struct The key to this map is the Type.tvar `ident`. *) tvar_cache: (Ty.t, error) result IMap.t; - - (* A cache for resolved EvalTs - - It is important to cache these results because we should only invoke - Flow_js.evaluate_type_destructor once for every EvalT. If we fail to do - so the result for calls that result to exceptions will not be cached in - Context.evaluated and subsequent calls may result to different results. - In particular we might get Empty instead of an exception. - *) - eval_t_cache: (Ty.t, error) result IMap.t; - (* This set is useful for synthesizing recursive types. It holds the set of type variables that are encountered "free". We say that a type variable is free when it appears in the body of its own definition. @@ -125,59 +110,52 @@ end = struct free_tvars: VSet.t; } - let empty = { - counter = 0; - tvar_cache = IMap.empty; - eval_t_cache = IMap.empty; - free_tvars = VSet.empty; - } - + let empty = { counter = 0; tvar_cache = IMap.empty; free_tvars = VSet.empty } end - include StateResult.Make(State) + include StateResult.Make (State) (* Monadic helper functions *) - let mapM f xs = all (List.map f xs) - let concat_fold_m f xs = mapM f xs >>| List.concat + let mapM f xs = all (Core_list.map ~f xs) + + let optMapM f = function + | Some xs -> mapM f xs >>| Option.return + | None as y -> return y + + let optM f = function + | Some x -> f x >>| Option.return + | None as y -> return y + + let _fstMapM f (x, y) = f x >>| mk_tuple_swapped y + + let sndMapM f (x, y) = f y >>| mk_tuple x + + let concat_fold_m f xs = mapM f xs >>| Core_list.concat let fresh_num = - let open State in - get >>= fun st -> - let n = st.counter in - put { st with counter = n + 1 } >>| fun _ -> n + State.( + let%bind st = get in + let n = st.counter in + let%map _ = put { st with counter = n + 1 } in + n) let terr ~kind ?msg t = - let t_str = Option.map t - ~f:(fun t -> spf "Raised on type: %s" (Type.string_of_ctor t)) - in + let t_str = Option.map t ~f:(fun t -> spf "Raised on type: %s" (Type.string_of_ctor t)) in let msg = ListUtils.cat_maybes [msg; t_str] |> String.concat "\n" in error (kind, msg) - (* Type caches *) let update_tvar_cache i t = - let open State in - get >>= fun st -> - let tvar_cache = IMap.add i t st.tvar_cache in - put { st with tvar_cache } + State.( + let%bind st = get in + let tvar_cache = IMap.add i t st.tvar_cache in + put { st with tvar_cache }) let find_tvar root_id = - let open State in - get >>| fun st -> - IMap.get root_id st.tvar_cache - - let update_eval_t_cache i t = - let open State in - get >>= fun st -> - let eval_t_cache = IMap.add i t st.eval_t_cache in - put { st with eval_t_cache } - - let find_eval_t id = - let open State in - get >>| fun st -> - IMap.get id st.eval_t_cache - + State.( + let%map st = get in + IMap.get root_id st.tvar_cache) (* Lookup a type parameter T in the current environment. There are three outcomes: 1. T appears in env and for its first occurence locations match. This means it @@ -191,96 +169,47 @@ end = struct 3. The type parameter is not in env. Do the default action. *) let lookup_tparam ~default env t tp_name tp_loc = - let open Type in - let pred { name; reason; _ } = ( - name = tp_name && - Reason.def_loc_of_reason reason = tp_loc - ) in - match List.find_opt pred env.Env.tparams with - | Some _ -> - (* If we care about shadowing of type params, then flag an error *) - if Env.flag_shadowed_type_params env then - let shadow_pred { name; _ } = (name = tp_name) in - match List.find_opt shadow_pred env.Env.tparams with - | Some { reason; _ } - when Reason.def_loc_of_reason reason <> tp_loc -> - terr ~kind:ShadowTypeParam (Some t) - | Some _ -> - return Ty.(Bound (Symbol (Local tp_loc, tp_name))) - | None -> assert false - else - return Ty.(Bound (Symbol (Local tp_loc, tp_name))) - | None -> - default t - - + Type.( + let pred { name; reason; _ } = name = tp_name && Reason.def_aloc_of_reason reason = tp_loc in + match List.find_opt pred env.Env.tparams with + | Some _ -> + (* If we care about shadowing of type params, then flag an error *) + if Env.flag_shadowed_type_params env then + let shadow_pred { name; _ } = name = tp_name in + match List.find_opt shadow_pred env.Env.tparams with + | Some { reason; _ } when Reason.def_aloc_of_reason reason <> tp_loc -> + terr ~kind:ShadowTypeParam (Some t) + | Some _ -> return (Ty.Bound (tp_loc, tp_name)) + | None -> assert false + else + return (Ty.Bound (tp_loc, tp_name)) + | None -> default t) (**************) - (* Type ops *) + (* Type ctors *) (**************) - (* Simplify union/intersection types + let generic_class name targs = Ty.mk_generic_class name targs - This visitor: - - removes identical nodes from union and intersection types. (At the moment - the comparison used is `Pervasives.compare`, but perhaps something more - clever can replace this.) - - removes the neutral element for union (resp. intersection) types, which - is the bottom (resp. top) type. + let generic_interface name targs = Ty.mk_generic_interface name targs - The Any state of this visitor is used to capture any change to the type - structure. - *) - let simplify_unions_inters = - let open Ty in - let simplify_zero_one ~zero ~one = - let rec simplify_aux acc = function - | [] -> acc - | t::ts -> - if t = zero then [t] - else if t = one then simplify_aux acc ts - else simplify_aux (t::acc) ts - in - simplify_aux [] - in - let o = object (self) - inherit [_] endo_ty - method private simplify env ~break ~zero ~one ~make ts = - let ts' = self#on_list self#on_t env ts in - let ts' = List.concat (List.map break ts') in - let ts' = List.sort Pervasives.compare ts' in - let ts' = ListUtils.uniq ts' in - let ts' = simplify_zero_one ~zero ~one ts' in - if List.length ts = List.length ts' - then None (* no change *) - else Some (make ts') - - method! on_t env t = - match t with - | Union (t0,t1,ts) -> - let opt = self#simplify ~break:Ty.bk_union ~zero:Ty.Top - ~one:Ty.Bot ~make:Ty.mk_union env (t0::t1::ts) in - Option.value ~default:t opt - | Inter (t0,t1,ts) -> - let opt = self#simplify ~break:Ty.bk_inter ~zero:Ty.Bot - ~one:Ty.Top ~make:Ty.mk_inter env (t0::t1::ts) in - Option.value ~default:t opt - (* WARNING: do not descend to other constructors or this will get slow *) - | _ -> t - end in - let rec go t = - let t' = o#on_t () t in - if t == t' then t else go t' - in - fun t -> go t + let generic_talias name targs = Ty.mk_generic_talias name targs - (* We wrap the union and intersection constructors with the following - functions that keep types as small as possible. - *) - let uniq_union ts = ts |> Ty.mk_union |> simplify_unions_inters - let uniq_inter ts = ts |> Ty.mk_inter |> simplify_unions_inters + let builtin_t name = generic_talias (Ty.builtin_symbol name) None + + let generic_builtin_t name ts = generic_talias (Ty.builtin_symbol name) (Some ts) + let empty_type = Ty.Bot Ty.EmptyType + let empty_matching_prop_t = Ty.Bot Ty.EmptyMatchingPropT + + let mk_empty bot_kind = + match bot_kind with + | Ty.EmptyType -> empty_type + | Ty.EmptyMatchingPropT -> empty_matching_prop_t + | Ty.EmptyTypeDestructorTriggerT _ + | Ty.NoLowerWithUpper _ -> + Ty.Bot bot_kind (*********************) (* Recursive types *) @@ -304,21 +233,25 @@ end = struct *) module Recursive = struct - (* Helper functions *) (* Replace a recursive type variable r with a symbol sym in the type t. *) - let subst = - let o = Ty.(object - inherit [_] map_ty as super - method! on_t env t = - let t = match t, env with - | TVar (i, ts), (r, sym) when r = i -> Generic (sym, true, ts) - | _ -> t in - super#on_t env t - end) in - fun r sym t -> - o#on_t (r, sym) t + let subst = + let o = + Ty.( + object + inherit [_] map_ty as super + + method! on_t env t = + let t = + match (t, env) with + | (TVar (i, ts), (r, sym)) when r = i -> generic_talias sym ts + | _ -> t + in + super#on_t env t + end) + in + (fun r sym t -> o#on_t (r, sym) t) (* We shouldn't really create bare Mu types for two reasons. @@ -337,12 +270,13 @@ end = struct in a "module element" category rather than the Ty.t structure. *) let mk_mu ~definitely_appears i t = - let open Ty in - if definitely_appears || Ty_utils.appears_in_t ~is_top:true i t then - match t with - | TypeAlias { ta_name; _ } -> subst (RVar i) ta_name t - | _ -> Mu (i, t) - else t + Ty.( + if definitely_appears || Ty_utils.tvar_appears_in_type ~is_toplevel:true (Ty.RVar i) t then + match t with + | TypeAlias { ta_name; _ } -> subst (RVar i) ta_name t + | _ -> Mu (i, t) + else + t) (* When inferring recursive types, the top-level appearances of the recursive variable should be eliminated. This visitor performs the following @@ -361,31 +295,43 @@ end = struct Bot and Top can be eliminated. *) let remove_toplevel_tvar = - let open Ty in - let o = object (self) - inherit [_] endo_ty - method env_zero = - function `Union -> Bot | `Inter -> Top - method! on_t env t = - match env, t with - | (v, _), Union (t0,t1,ts) -> - let ts = t0::t1::ts in - let ts' = self#on_list self#on_t (v, `Union) ts in - if ts == ts' then t else Ty.mk_union ts' - | (v, _), Inter (t0,t1,ts) -> - let ts = t0::t1::ts in - let ts' = self#on_list self#on_t (v, `Inter) ts in - if ts == ts' then t else Ty.mk_inter ts' - | (v, mode), TVar (Ty.RVar v', _) when v = v' -> - self#env_zero mode - | _, Mu (v, rt) -> - let rt' = self#on_t env rt in - if rt == rt' then t - else mk_mu ~definitely_appears:false v rt' - | _, _ -> t - end in - fun v t -> o#on_t (v, `Union) t - + Ty.( + let o = + object (self) + inherit [_] endo_ty + + method env_zero = + function + | `Union -> Bot (NoLowerWithUpper NoUpper) + | `Inter -> Top + + method! on_t env t = + match (env, t) with + | ((v, _), Union (t0, t1, ts)) -> + let t0' = self#on_t (v, `Union) t0 in + let ts' = self#on_list self#on_t (v, `Union) (t1 :: ts) in + if t0 == t0' && ts == ts' then + t + else + Ty.mk_union (t0', ts') + | ((v, _), Inter (t0, t1, ts)) -> + let t0' = self#on_t (v, `Inter) t0 in + let ts' = self#on_list self#on_t (v, `Inter) (t1 :: ts) in + if t0 == t0' && ts == ts' then + t + else + Ty.mk_inter (t0', ts') + | ((v, mode), TVar (Ty.RVar v', _)) when v = v' -> self#env_zero mode + | (_, Mu (v, rt)) -> + let rt' = self#on_t env rt in + if rt == rt' then + t + else + mk_mu ~definitely_appears:false v rt' + | (_, _) -> t + end + in + (fun v t -> o#on_t (v, `Union) t)) (* Constructing recursive types. @@ -416,105 +362,134 @@ end = struct not a recursive type. *) let make free_vars i t = - if not (VSet.mem i free_vars) then t else - (* Recursive, but still might be a degenerate case *) - let t' = remove_toplevel_tvar i t in - let changed = not (t == t') in - let t' = if changed then simplify_unions_inters t' else t' in - (* If not changed then all free_vars are still in, o.w. recompute free vars *) - mk_mu ~definitely_appears:(not changed) i t' + if not (VSet.mem i free_vars) then + t + else + (* Recursive, but still might be a degenerate case *) + let t' = remove_toplevel_tvar i t in + let changed = not (t == t') in + (* If not changed then all free_vars are still in, o.w. recompute free vars *) + mk_mu ~definitely_appears:(not changed) i t' end - module Substitution = struct open Ty + (* NOTE Traversing huge types may lead to merge-timeouts. We cut off the size + * of the recursion at 10K nodes. *) + let max_size = 10000 + + exception SizeCutOff + + let size = ref 0 + let init_env tparams types = let rec step acc = function - | [], _ | _, [] -> acc - | p::ps, t::ts -> step (SMap.add p.tp_name t acc) (ps, ts) + | ([], _) + | (_, []) -> + acc + | (p :: ps, t :: ts) -> step (SMap.add p.tp_name t acc) (ps, ts) in step SMap.empty (tparams, types) let remove_params env = function - | None -> env - | Some ps -> List.fold_left (fun e p -> SMap.remove p.tp_name e) env ps + | None -> env + | Some ps -> List.fold_left (fun e p -> SMap.remove p.tp_name e) env ps - let visitor = object - inherit [_] endo_ty as super - method! on_t env t = - match t with - | TypeAlias { ta_tparams = ps; _ } - | Fun { fun_type_params = ps; _ } -> - let env = remove_params env ps in - super#on_t env t - | Bound (Symbol (_, name)) -> - let t' = Option.value (SMap.get name env) ~default:t in - super#on_t env t' - | _ -> - super#on_t env t - end - (* Replace a list of type parameters with a list of types in the given type. - * These lists might not match exactly in length. *) + let visitor = + object + inherit [_] endo_ty as super + + method! on_t env t = + size := !size + 1; + if !size > max_size then raise SizeCutOff; + match t with + | TypeAlias { ta_tparams = ps; _ } + | Fun { fun_type_params = ps; _ } -> + let env = remove_params env ps in + super#on_t env t + | Bound (_, name) -> + begin + match SMap.get name env with + | Some t' -> t' + | None -> super#on_t env t + end + | _ -> super#on_t env t + end + + (* Replace a list of type parameters with a list of types in the given type. + * These lists might not match exactly in length. *) let run vs ts t = let env = init_env vs ts in - let t' = visitor#on_t env t in - if t != t' then simplify_unions_inters t' else t' + size := 0; + match visitor#on_t env t with + | exception SizeCutOff -> terr ~kind:TypeTooBig None + | t' -> return t' end (***********************) (* Construct built-ins *) (***********************) - let opt_param = Ty.({ prm_optional = true }) - let non_opt_param = Ty.({ prm_optional = false }) - - let mk_fun ?(params=[]) ?rest ?tparams ret = Ty.( - Fun { - fun_params = params; - fun_rest_param = rest; - fun_return = ret; - fun_type_params = tparams; - } - ) - - let mk_tparam ?bound ?(pol=Ty.Neutral) ?default name = Ty.({ - tp_name = name; - tp_bound = bound; - tp_polarity = pol; - tp_default = default; - }) - - let symbol_from_loc env loc name = - let open File_key in - let symbol_source = Loc.source loc in - let provenance = - match symbol_source with - | Some LibFile _ -> Ty.Library loc - | Some (SourceFile _) -> - let current_source = Env.(env.genv.file) in - (* Locally defined name *) - if Some current_source = symbol_source - then Ty.Local loc - (* Otherwise it is one of: - - Imported, or - - Remote (defined in a different file but not imported in this one) *) - else (match SMap.get name env.Env.imported_names with - | Some loc' when loc = loc' -> Ty.Imported loc - | _ -> Ty.Remote loc) - | Some (JsonFile _) - | Some (ResourceFile _) -> Ty.Local loc - | Some Builtins -> Ty.Builtin - | None -> Ty.Local loc - in - Ty.Symbol (provenance, name) + let opt_param = Ty.{ prm_optional = true } + + let non_opt_param = Ty.{ prm_optional = false } + + let mk_fun ?(params = []) ?rest ?tparams ret = + Ty.( + Fun + { fun_params = params; fun_rest_param = rest; fun_return = ret; fun_type_params = tparams }) + + let mk_tparam ?bound ?(pol = Ty.Neutral) ?default name = + Ty.{ tp_name = name; tp_bound = bound; tp_polarity = pol; tp_default = default } + + let symbol_from_loc env def_loc name = + File_key.( + let symbol_source = ALoc.source def_loc in + let provenance = + match symbol_source with + | Some (LibFile _) -> Ty.Library + | Some (SourceFile def_source) -> + let current_source = Env.(env.genv.file) in + if File_key.to_string current_source = def_source then + Ty.Local + else + Ty.Remote { Ty.imported_as = ALocMap.get def_loc env.Env.imported_names } + | Some (JsonFile _) + | Some (ResourceFile _) -> + Ty.Local + | Some Builtins -> Ty.Builtin + | None -> Ty.Local + in + let anonymous = name = "<>" in + { Ty.provenance; name; anonymous; def_loc }) (* TODO due to repositioninig `reason_loc` may not point to the actual location where `name` was defined. *) let symbol_from_reason env reason name = - let def_loc = Reason.def_loc_of_reason reason in + let def_loc = Reason.def_aloc_of_reason reason in symbol_from_loc env def_loc name + let remove_targs_matching_defaults targs tparams = + let matches_default targ tparam = Some targ = Ty.(tparam.tp_default) in + let rec remove_if_able targ_lst tparam_lst = + match (targ_lst, tparam_lst) with + (* Recursive case. Recurse, then if this is now the last targ (if later ones were eliminated), + * remove it if it matches the tparam default. *) + | (targ :: targ_rst, tparam :: tparam_rst) -> + let targ_rst = remove_if_able targ_rst tparam_rst in + if ListUtils.is_empty targ_rst && matches_default targ tparam then + [] + else + targ :: targ_rst + | ([], []) (* Base case *) + | ([], _ :: _) (* Fewer targs than tparams to begin with. *) + | (_ :: _, []) (* More targs than tparams. This shouldn't happen. *) -> + targ_lst + in + match (targs, tparams) with + | (Some targ_lst, Some tparam_lst) -> Some (remove_if_able targ_lst tparam_lst) + | _ -> targs (*************************) (* Main transformation *) @@ -526,7 +501,6 @@ end = struct reconstruct some types based on attached reasons. Two cases are of interest here: - Type parameters: we use RPolyTest reasons for these - Type aliases: we use RTypeAlias reasons for these *) - and type_poly ~env t = (* The RPolyTest description is used for types that represent type parameters. When normalizing, we want such types to be replaced by the type parameter, @@ -537,159 +511,182 @@ end = struct let reason = Type.reason_of_t t in match desc_of_reason ~unwrap:false reason with | RPolyTest (name, _) -> - let loc = Reason.def_loc_of_reason reason in + let loc = Reason.def_aloc_of_reason reason in let default t = type_with_alias_reason ~env t in lookup_tparam ~default env t name loc - | _ -> - type_with_alias_reason ~env t + | _ -> type_with_alias_reason ~env t and type_with_alias_reason ~env t = - if Env.expand_type_aliases env then type_after_reason ~env t else - let reason = Type.reason_of_t t in - let open Type in - (* These type are treated as transparent when it comes to the type alias - annotation. *) - match t with - | OpenT _ | EvalT _ -> + if Env.expand_type_aliases env then type_after_reason ~env t - | _ -> - begin match desc_of_reason ~unwrap:false reason with - | RTypeAlias (name, true, _) -> - (* The default action is to avoid expansion by using the type alias name, + else + let reason = Type.reason_of_t t in + Type.( + (* These type are treated as transparent when it comes to the type alias + * annotation. + * + * TypeDestructorTriggerT might hold a type-app, so avoid using the type here. + * Instead, do the fallback action which is to normalize to Bot. The trigger + * should have actually produced another concrete type as a lower bound. *) + match t with + | OpenT _ + | TypeDestructorTriggerT _ -> + type_after_reason ~env t + | EvalT _ when Env.evaluate_type_destructors env -> type_after_reason ~env t + | _ -> + begin + match desc_of_reason ~unwrap:false reason with + | RTypeAlias (name, true, _) -> + (* The default action is to avoid expansion by using the type alias name, when this can be trusted. The one case where we want to skip this process is when recovering the body of a type alias A. In that case the environment field under_type_alias will be 'Some A'. If the type alias name in the reason is also A, then we are still at the top-level of the type-alias, so we proceed by expanding one level preserving the same environment. *) - let continue = - match env.Env.under_type_alias with - | Some name' -> name = name' - | None -> false - in - if continue then type_after_reason ~env t else - let symbol = symbol_from_reason env reason name in - return (Ty.named_t symbol) - | _ -> - (* We are now beyond the point of the one-off expansion. Reset the environment + let continue = + match env.Env.under_type_alias with + | Some name' -> name = name' + | None -> false + in + if continue then + type_after_reason ~env t + else + let symbol = symbol_from_reason env reason name in + return (generic_talias symbol None) + | _ -> + (* We are now beyond the point of the one-off expansion. Reset the environment assigning None to under_type_alias, so that aliases are used in subsequent invocations. *) - let env = Env.{ env with under_type_alias = None } in - type_after_reason ~env t - end + let env = Env.{ env with under_type_alias = None } in + type_after_reason ~env t + end) and type_after_reason ~env t = - let open Type in - let env = Env.descend env in - match t with - | OpenT (_, id) -> type_variable ~env id - | BoundT (reason, name, _) -> bound_t ~env reason name - | AnnotT (_, t, _) -> type__ ~env t - | EvalT (t, d, id) -> eval_t ~env t id d - | ExactT (_, t) -> exact_t ~env t - | CustomFunT (_, f) -> custom_fun ~env f - | InternalT i -> internal_t ~env t i - | MatchingPropT _ -> return Ty.Bot - | AnyWithUpperBoundT t -> - type__ ~env t >>| fun ty -> - Ty.generic_builtin_t "$Subtype" [ty] - | AnyWithLowerBoundT t -> - type__ ~env t >>| fun ty -> - Ty.generic_builtin_t "$Supertype" [ty] - | DefT (_, MixedT _) -> return Ty.Top - | DefT (_, AnyT) -> return Ty.Any - | DefT (_, AnyObjT) -> return Ty.AnyObj - | DefT (_, AnyFunT) -> return Ty.AnyFun - | DefT (_, VoidT) -> return Ty.Void - | DefT (_, NumT _) -> return Ty.Num - | DefT (_, StrT _) -> return Ty.Str - | DefT (_, BoolT _) -> return Ty.Bool - | DefT (_, EmptyT) -> return Ty.Bot - | DefT (_, NullT) -> return Ty.Null - | DefT (_, SingletonNumT (_, lit)) -> return (Ty.NumLit lit) - | DefT (_, SingletonStrT lit) -> return (Ty.StrLit lit) - | DefT (_, SingletonBoolT lit) -> return (Ty.BoolLit lit) - | DefT (_, MaybeT t) -> - type__ ~env t >>| fun t -> uniq_union [Ty.Void; Ty.Null; t] - | DefT (_, OptionalT t) -> - type__ ~env t >>| fun t -> uniq_union [Ty.Void; t] - | DefT (_, FunT (_, _, f)) -> - fun_ty ~env f None >>| fun t -> Ty.Fun t - | DefT (_, ObjT o) -> - obj_ty ~env o >>| fun t -> Ty.Obj t - | DefT (_, ArrT a) -> arr_ty ~env a - | DefT (_, UnionT rep) -> - let t0, (t1, ts) = UnionRep.members_nel rep in - type__ ~env t0 >>= fun t0 -> - type__ ~env t1 >>= fun t1 -> - mapM (type__ ~env) ts >>| fun ts -> - uniq_union (t0::t1::ts) - | DefT (_, IntersectionT rep) -> - let t0, (t1, ts) = InterRep.members_nel rep in - type__ ~env t0 >>= fun t0 -> - type__ ~env t1 >>= fun t1 -> - mapM (type__ ~env) ts >>| fun ts -> - uniq_inter (t0::t1::ts) - | DefT (_, PolyT (ps, t, _)) -> poly_ty ~env t ps - | DefT (r, TypeT (kind, t)) -> type_t ~env r kind t None - | DefT (_, TypeAppT (_, t, ts)) -> type_app ~env t ts - | DefT (r, InstanceT (_, _, _, t)) -> instance_t ~env r t - | DefT (_, ClassT t) -> class_t ~env t None - | DefT (_, IdxWrapper t) -> type__ ~env t - | ThisClassT (_, t) -> this_class_t ~env t None - (* NOTE For now we are ignoring the "this" type here. *) - | ThisTypeAppT (_, c, _, None) -> type__ ~env c - | ThisTypeAppT (_, c, _, Some ts) -> type_app ~env c ts - | KeysT (_, t) -> - type__ ~env t >>| fun ty -> - Ty.generic_builtin_t "$Keys" [ty] - | OpaqueT (r, o) -> opaque_t ~env r o - | ReposT (_, t) -> type__ ~env t - | ShapeT t -> type__ ~env t - | TypeDestructorTriggerT _ -> return Ty.Bot - | MergedT (_, uses) -> merged_t ~env uses - | ExistsT _ -> return Ty.Exists - | ObjProtoT _ -> return (Ty.builtin_t "Object.prototype") - | FunProtoT _ -> return (Ty.builtin_t "Function.prototype") - | OpenPredT (_, t, _, _) -> type__ ~env t - - | FunProtoApplyT _ -> - if Env.expand_internal_types env then - (* Function.prototype.apply: (thisArg: any, argArray?: any): any *) - return Ty.(mk_fun - ~params:[ - (Some "thisArg", Any, non_opt_param); - (Some "argArray", Any, opt_param); - ] - Any) - else - return Ty.(TypeOf (Ty.builtin_symbol "Function.prototype.apply")) - - | FunProtoBindT _ -> - if Env.expand_internal_types env then - (* Function.prototype.bind: (thisArg: any, ...argArray: Array): any *) - return Ty.(mk_fun - ~params:[(Some "thisArg", Any, non_opt_param)] - ~rest:(Some "argArray", Arr { arr_readonly = false; arr_elt_t = Any }) - Any) - else - return Ty.(TypeOf (Ty.builtin_symbol "Function.prototype.bind")) - - | FunProtoCallT _ -> - if Env.expand_internal_types env then - (* Function.prototype.call: (thisArg: any, ...argArray: Array): any *) - return Ty.(mk_fun - ~params:[(Some "thisArg", Any, non_opt_param)] - ~rest:(Some "argArray", Arr { arr_readonly = false; arr_elt_t = Any }) - Any) - else - return Ty.(TypeOf (Ty.builtin_symbol "Function.prototype.call")) - - | ModuleT (reason, _, _) -> module_t env reason t - - | DefT (_, CharSetT _) - | NullProtoT _ -> - terr ~kind:UnsupportedTypeCtor (Some t) - + Type.( + let env = Env.descend env in + match t with + | OpenT (_, id) -> type_variable ~env id + | BoundT (reason, name, _) -> bound_t ~env reason name + | AnnotT (_, t, _) -> type__ ~env t + | EvalT (t, d, id) -> eval_t ~env t id d + | ExactT (_, t) -> exact_t ~env t + | CustomFunT (_, f) -> custom_fun ~env f + | InternalT i -> internal_t ~env t i + | MatchingPropT _ -> return (mk_empty Ty.EmptyMatchingPropT) + | DefT (_, _, MixedT _) -> return Ty.Top + | AnyT (_, kind) -> return (Ty.Any (any_t kind)) + | DefT (_, _, VoidT) -> return Ty.Void + | DefT (_, _, NumT (Literal (_, (_, x)))) when Env.preserve_inferred_literal_types env -> + return (Ty.Num (Some x)) + | DefT (_, _, NumT (Truthy | AnyLiteral | Literal _)) -> return (Ty.Num None) + | DefT (_, _, StrT (Literal (_, x))) when Env.preserve_inferred_literal_types env -> + return (Ty.Str (Some x)) + | DefT (_, _, StrT (Truthy | AnyLiteral | Literal _)) -> return (Ty.Str None) + | DefT (_, _, BoolT (Some x)) when Env.preserve_inferred_literal_types env -> + return (Ty.Bool (Some x)) + | DefT (_, _, BoolT _) -> return (Ty.Bool None) + | DefT (_, _, EmptyT _) -> return (mk_empty Ty.EmptyType) + | DefT (_, _, NullT) -> return Ty.Null + | DefT (_, _, SingletonNumT (_, lit)) -> return (Ty.NumLit lit) + | DefT (_, _, SingletonStrT lit) -> return (Ty.StrLit lit) + | DefT (_, _, SingletonBoolT lit) -> return (Ty.BoolLit lit) + | MaybeT (_, t) -> + let%map t = type__ ~env t in + Ty.mk_union (Ty.Void, [Ty.Null; t]) + | OptionalT (_, t) -> + let%map t = type__ ~env t in + Ty.mk_union (Ty.Void, [t]) + | DefT (_, _, FunT (_, _, f)) -> + let%map t = fun_ty ~env f None in + Ty.Fun t + | DefT (r, _, ObjT o) -> obj_ty ~env r o + | DefT (r, _, ArrT a) -> arr_ty ~env r a + | UnionT (_, rep) -> + let (t0, (t1, ts)) = UnionRep.members_nel rep in + let%bind t0 = type__ ~env t0 in + let%bind t1 = type__ ~env t1 in + let%map ts = mapM (type__ ~env) ts in + Ty.mk_union (t0, t1 :: ts) + | IntersectionT (_, rep) -> + let (t0, (t1, ts)) = InterRep.members_nel rep in + let%bind t0 = type__ ~env t0 in + let%bind t1 = type__ ~env t1 in + let%map ts = mapM (type__ ~env) ts in + Ty.mk_inter (t0, t1 :: ts) + | DefT (_, _, PolyT (_, ps, t, _)) -> poly_ty ~env t ps + | DefT (r, _, TypeT (kind, t)) -> type_t ~env r kind t None + | TypeAppT (_, _, t, ts) -> type_app ~env t (Some ts) + | DefT (r, _, InstanceT (_, super, _, t)) -> instance_t ~env r super t + | DefT (_, _, ClassT t) -> class_t ~env t None + | DefT (_, _, IdxWrapper t) -> type__ ~env t + | DefT (_, _, ReactAbstractComponentT { config; instance }) -> + let%bind config = type__ ~env config in + let%bind instance = type__ ~env instance in + return + (generic_talias + (Ty_symbol.builtin_symbol "React$AbstractComponent") + (Some [config; instance])) + | ThisClassT (_, t) -> this_class_t ~env t None + | ThisTypeAppT (_, c, _, ts) -> type_app ~env c ts + | KeysT (_, t) -> + let%map ty = type__ ~env t in + Ty.Utility (Ty.Keys ty) + | OpaqueT (r, o) -> opaque_t ~env r o + | ReposT (_, t) -> type__ ~env t + | ShapeT t -> + let%map t = type__ ~env t in + Ty.Utility (Ty.Shape t) + | TypeDestructorTriggerT (_, r, _, _, _) -> + let loc = Reason.def_aloc_of_reason r in + return (mk_empty (Ty.EmptyTypeDestructorTriggerT loc)) + | MergedT (_, uses) -> merged_t ~env uses + | ExistsT _ -> return Ty.(Utility Exists) + | ObjProtoT _ -> return Ty.(TypeOf ObjProto) + | FunProtoT _ -> return Ty.(TypeOf FunProto) + | OpenPredT (_, t, _, _) -> type__ ~env t + | FunProtoApplyT _ -> + if Env.expand_internal_types env then + (* Function.prototype.apply: (thisArg: any, argArray?: any): any *) + return + Ty.( + mk_fun + ~params: + [ + (Some "thisArg", explicit_any, non_opt_param); + (Some "argArray", explicit_any, opt_param); + ] + explicit_any) + else + return Ty.(TypeOf FunProtoApply) + | FunProtoBindT _ -> + if Env.expand_internal_types env then + (* Function.prototype.bind: (thisArg: any, ...argArray: Array): any *) + return + Ty.( + mk_fun + ~params:[(Some "thisArg", explicit_any, non_opt_param)] + ~rest: + ( Some "argArray", + Arr { arr_readonly = false; arr_literal = false; arr_elt_t = explicit_any } ) + explicit_any) + else + return Ty.(TypeOf FunProtoBind) + | FunProtoCallT _ -> + if Env.expand_internal_types env then + (* Function.prototype.call: (thisArg: any, ...argArray: Array): any *) + return + Ty.( + mk_fun + ~params:[(Some "thisArg", explicit_any, non_opt_param)] + ~rest: + ( Some "argArray", + Arr { arr_readonly = false; arr_literal = false; arr_elt_t = explicit_any } ) + explicit_any) + else + return Ty.(TypeOf FunProtoCall) + | ModuleT (reason, exports, _) -> module_t env reason exports t + | NullProtoT _ -> return Ty.Null + | DefT (_, _, CharSetT _) -> terr ~kind:UnsupportedTypeCtor (Some t)) (* Type variable normalization (input: a type variable `id`) @@ -707,19 +704,24 @@ end = struct Step 3: Start variable resolution. *) and type_variable ~env id = - let root_id, constraints = (* step 1 *) - Context.find_constraints Env.(env.genv.cx) id in - find_tvar root_id >>= function (* step 2 *) - | Some (Ok Ty.(TVar (Ty.RVar v, _) as t)) -> (* step 2a *) - modify State.(fun st -> { st with - free_tvars = VSet.add v st.free_tvars - }) >>= fun _ -> - return t - | Some (Ok t) -> return t (* step 2b *) - - | Some (Error s) -> error s (* step 2c *) - | None -> (* step 2d *) - resolve_tvar ~env constraints root_id (* step 3 *) + let (root_id, constraints) = + (* step 1 *) + Context.find_constraints Env.(env.genv.cx) id + in + match%bind find_tvar root_id with + (* step 2 *) + | Some (Ok Ty.(TVar (RVar v, _) as t)) -> + (* step 2a *) + let mod_state st = State.{ st with free_tvars = VSet.add v st.free_tvars } in + let%map () = modify mod_state in + t + | Some (Ok t) -> return t (* step 2b *) + | Some (Error s) -> error s (* step 2c *) + | None -> + (* step 2d *) + resolve_tvar ~env constraints root_id + + (* step 3 *) (* Resolve a type variable (encountered for the first time) @@ -729,310 +731,492 @@ end = struct of the monad under the current state and cache the "monadic" result. *) and resolve_tvar ~env cons root_id = - let open State in - fresh_num >>= fun rid -> - let rvar = Ty.RVar rid in - (* Set current variable "under resolution" *) - update_tvar_cache root_id (Ok (Ty.TVar (rvar, None))) >>= fun _ -> - get >>= fun in_st -> - - (* Resolve the tvar *) - let ty_res, out_st = run in_st (resolve_bounds ~env cons) in - - (* Create a recursive type (if needed) *) - let ty_res = Core_result.map - ~f:(Recursive.make out_st.free_tvars rid) ty_res - in - - (* Reset state by removing the current tvar from the free vars set *) - let out_st = - { out_st with free_tvars = VSet.remove rid out_st.free_tvars } - in - put out_st >>= fun _ -> - - (* Update cache with final result *) - update_tvar_cache root_id ty_res >>= fun _ -> - - (* Throw the error if one was encountered *) - match ty_res with - | Ok ty -> return ty - | Error e -> error e + State.( + let%bind rid = fresh_num in + let rvar = Ty.RVar rid in + (* Set current variable "under resolution" *) + let%bind _ = update_tvar_cache root_id (Ok (Ty.TVar (rvar, None))) in + let%bind in_st = get in + (* Resolve the tvar *) + let (ty_res, out_st) = run in_st (resolve_bounds ~env cons) in + (* Create a recursive type (if needed) *) + let ty_res = Core_result.map ~f:(Recursive.make out_st.free_tvars rid) ty_res in + (* Reset state by removing the current tvar from the free vars set *) + let out_st = { out_st with free_tvars = VSet.remove rid out_st.free_tvars } in + let%bind _ = put out_st in + (* Update cache with final result *) + let%bind _ = update_tvar_cache root_id ty_res in + (* Throw the error if one was encountered *) + match ty_res with + | Ok ty -> return ty + | Error e -> error e) (* Resolving a type variable amounts to normalizing its lower bounds and taking their union. *) - and resolve_bounds ~env = - let open Constraint in - function - | Resolved t -> type__ ~env t - | Unresolved bounds -> - let ts = T.TypeMap.keys bounds.lower in - mapM (type__ ~env) ts >>| - uniq_union + and resolve_bounds ~env = function + | Constraint.Resolved (_, t) + | Constraint.FullyResolved (_, t) -> + type__ ~env t + | Constraint.Unresolved bounds -> + (match%bind resolve_from_lower_bounds ~env bounds with + | [] -> empty_with_upper_bounds ~env bounds + | hd :: tl -> return (Ty.mk_union ~flattened:true (hd, tl))) + + and resolve_from_lower_bounds ~env bounds = + T.TypeMap.keys bounds.Constraint.lower + |> mapM (fun t -> + let%map ty = type__ ~env t in + Nel.to_list (Ty.bk_union ty)) + >>| Core_list.concat + >>| Core_list.dedup + + and empty_with_upper_bounds ~env bounds = + let uses = T.UseTypeMap.keys bounds.Constraint.upper in + let%map use_kind = uses_t ~env uses in + Ty.Bot (Ty.NoLowerWithUpper use_kind) + + and any_t = function + | T.Annotated -> Ty.Annotated + | T.AnyError -> Ty.AnyError + | T.Unsound k -> Ty.Unsound (unsoundness_any_t k) + | T.Untyped -> Ty.Untyped + + and unsoundness_any_t = function + | T.BoundFunctionThis -> Ty.BoundFunctionThis + | T.ComputedNonLiteralKey -> Ty.ComputedNonLiteralKey + | T.Constructor -> Ty.Constructor + | T.DummyStatic -> Ty.DummyStatic + | T.Existential -> Ty.Existential + | T.Exports -> Ty.Exports + | T.FunctionPrototype -> Ty.FunctionPrototype + | T.InferenceHooks -> Ty.InferenceHooks + | T.InstanceOfRefinement -> Ty.InstanceOfRefinement + | T.Merged -> Ty.Merged + | T.ResolveSpread -> Ty.ResolveSpread + | T.Unchecked -> Ty.Unchecked + | T.Unimplemented -> Ty.Unimplemented + | T.UnresolvedType -> Ty.UnresolvedType + | T.WeakContext -> Ty.WeakContext and bound_t ~env reason name = - let symbol = symbol_from_reason env reason name in - return (Ty.Bound symbol) + let { Ty.def_loc; name; _ } = symbol_from_reason env reason name in + return (Ty.Bound (def_loc, name)) and fun_ty ~env f fun_type_params = - let {T.params; rest_param; return_t; _} = f in - mapM (fun_param ~env) params >>= fun fun_params -> - fun_rest_param_t ~env rest_param >>= fun fun_rest_param -> - type__ ~env return_t >>= fun fun_return -> - return {Ty.fun_params; fun_rest_param; fun_return; fun_type_params} + let { T.params; rest_param; return_t; _ } = f in + let%bind fun_params = mapM (fun_param ~env) params in + let%bind fun_rest_param = fun_rest_param_t ~env rest_param in + let%bind fun_return = type__ ~env return_t in + return { Ty.fun_params; fun_rest_param; fun_return; fun_type_params } and method_ty ~env t = - let open Type in - match t with - | DefT (_, FunT (_, _, f)) -> - fun_ty ~env f None - | DefT (_, PolyT (ps, DefT (_, FunT (_, _, f)), _)) -> - mapM (type_param ~env) ps >>= fun ps -> - fun_ty ~env f (Some ps) - | _ -> - terr ~kind:BadMethodType (Some t) + Type.( + match t with + | DefT (_, _, FunT (_, _, f)) -> fun_ty ~env f None + | DefT (_, _, PolyT (_, ps, DefT (_, _, FunT (_, _, f)), _)) -> + let%bind ps = mapM (type_param ~env) (Nel.to_list ps) in + fun_ty ~env f (Some ps) + | _ -> terr ~kind:BadMethodType (Some t)) and fun_param ~env (x, t) = - opt_t ~env t >>= fun (t, prm_optional) -> + let%bind (t, prm_optional) = opt_t ~env t in return (x, t, { Ty.prm_optional }) and fun_rest_param_t ~env = function - | Some (x, _, t) -> type__ ~env t >>| fun t -> Some (x,t) + | Some (x, _, t) -> + let%map t = type__ ~env t in + Some (x, t) | _ -> return None - and obj_ty ~env {T.flags = {T.exact; T.frozen; _}; props_tmap; dict_t; _} = - let obj_exact = exact in - let obj_frozen = frozen in - obj_props ~env props_tmap dict_t >>| fun obj_props -> - {Ty.obj_exact; obj_frozen; obj_props} - - and obj_props ~env id dict = - let dispatch (x, p) = - if x = "$call" - then call_prop ~env p - else obj_prop ~env x p + and obj_ty ~env reason o = + let { T.flags; props_tmap; call_t; dict_t; _ } = o in + let { T.exact = obj_exact; T.frozen = obj_frozen; _ } = flags in + let obj_literal = + match Reason.desc_of_reason reason with + | Reason.RObjectLit -> true + | _ -> false in - let cx = Env.get_cx env in - let props = SMap.bindings (Context.find_props cx id) in - concat_fold_m dispatch props >>= fun obj_props -> - match dict with - | Some d -> index_prop ~env d >>| fun i -> i::obj_props - | None -> return obj_props + let%map obj_props = obj_props ~env props_tmap call_t dict_t in + Ty.Obj { Ty.obj_exact; obj_frozen; obj_literal; obj_props } - and obj_prop ~env x p = + and obj_prop ~env (x, p) = match p with | T.Field (_, t, polarity) -> let fld_polarity = type_polarity polarity in - opt_t ~env t >>| fun (t, fld_optional) -> - [Ty.(NamedProp (x, Field (t, {fld_polarity; fld_optional})))] + let%map (t, fld_optional) = opt_t ~env t in + [Ty.(NamedProp (x, Field (t, { fld_polarity; fld_optional })))] | T.Method (_, t) -> - method_ty ~env t >>| fun t -> [Ty.NamedProp (x, Ty.Method t)] + let%map t = method_ty ~env t in + [Ty.NamedProp (x, Ty.Method t)] | T.Get (_, t) -> - type__ ~env t >>| fun t -> [Ty.NamedProp (x, Ty.Get t)] + let%map t = type__ ~env t in + [Ty.NamedProp (x, Ty.Get t)] | T.Set (_, t) -> - type__ ~env t >>| fun t -> [Ty.NamedProp (x, Ty.Set t)] + let%map t = type__ ~env t in + [Ty.NamedProp (x, Ty.Set t)] | T.GetSet (loc1, t1, loc2, t2) -> - obj_prop ~env x (T.Get (loc1, t1)) >>= fun p1 -> - obj_prop ~env x (T.Set (loc2, t2)) >>| fun p2 -> - p1@p2 - - and call_prop ~env = - let intersection = function - | T.DefT (_, T.IntersectionT rep) -> T.InterRep.members rep - | t -> [t] + let%bind p1 = obj_prop ~env (x, T.Get (loc1, t1)) in + let%map p2 = obj_prop ~env (x, T.Set (loc2, t2)) in + p1 @ p2 + + and call_prop_from_t ~env t = + let ts = + match t with + | T.IntersectionT (_, rep) -> T.InterRep.members rep + | t -> [t] in - let multi_call ts = - mapM (method_ty ~env) ts >>| fun ts -> - List.map (fun t -> Ty.CallProp t) ts + let%map ts = mapM (method_ty ~env) ts in + Core_list.map ~f:(fun t -> Ty.CallProp t) ts + + and obj_props = + (* call property *) + let do_calls ~env = function + | Some call_id -> + let cx = Env.get_cx env in + let ft = Context.find_call cx call_id in + call_prop_from_t ~env ft + | None -> return [] in - function - | T.Method (_, t) -> intersection t |> multi_call - | T.Field (_, t, _) -> intersection t |> multi_call - | _ -> terr ~kind:BadCallProp None - - and index_prop ~env d = - let {T.dict_polarity; dict_name; key; value} = d in - let dict_polarity = type_polarity dict_polarity in - type__ ~env key >>= fun dict_key -> - type__ ~env value >>| fun dict_value -> - Ty.(IndexProp {dict_polarity; dict_name; dict_key; dict_value}) - - and arr_ty ~env = function + let do_props ~env props = concat_fold_m (obj_prop ~env) props in + let do_dict ~env = function + | Some d -> + let { T.dict_polarity; dict_name; key; value } = d in + let dict_polarity = type_polarity dict_polarity in + let%bind dict_key = type__ ~env key in + let%map dict_value = type__ ~env value in + [Ty.IndexProp { Ty.dict_polarity; dict_name; dict_key; dict_value }] + | None -> return [] + in + fun ~env props_id call_id_opt dict -> + let cx = Env.get_cx env in + let props = SMap.bindings (Context.find_props cx props_id) in + let%bind call_props = do_calls ~env call_id_opt in + let%bind props = do_props ~env props in + let%map dict = do_dict ~env dict in + call_props @ props @ dict + + and arr_ty ~env reason elt_t = + let arr_literal = + match Reason.desc_of_reason reason with + | Reason.RArrayLit -> true + | _ -> false + in + match elt_t with | T.ArrayAT (t, _) -> - type__ ~env t >>| fun t -> - Ty.(Arr { arr_readonly = false; arr_elt_t = t}) + let%map t = type__ ~env t in + Ty.Arr { Ty.arr_readonly = false; arr_literal; arr_elt_t = t } | T.ROArrayAT t -> - type__ ~env t >>| fun t -> - Ty.(Arr { arr_readonly = true; arr_elt_t = t}) + let%map t = type__ ~env t in + Ty.Arr { Ty.arr_readonly = true; arr_literal; arr_elt_t = t } | T.TupleAT (_, ts) -> - mapM (type__ ~env) ts >>| fun ts -> Ty.Tup ts - | T.EmptyAT -> - return Ty.Bot - - and name_of_instance_reason r = - (* This should cover all cases but throw an error just in case. *) - match desc_of_reason ~unwrap:false r with - | RType name - | RIdentifier name -> return name - | RReactComponent -> return "React$Component" - | r -> - let msg = spf "could not extract name from reason: %s" - (Reason.string_of_desc r) in - terr ~kind:BadInstanceT ~msg None - - and instance_t ~env r inst = - let open Type in - name_of_instance_reason r >>= fun name -> - let symbol = symbol_from_reason env r name in - mapM (fun (_, _, t, _) -> type__ ~env t) inst.type_args >>| function - | [] -> Ty.Generic (symbol, inst.structural, None) - | xs -> Ty.Generic (symbol, inst.structural, Some xs) - - and class_t ~env t ps = - let rec class_t_aux = function - | Ty.Class (name, structural, _) -> - return (Ty.Class (name, structural, ps)) - | Ty.Generic (name, structural, _) -> - return (Ty.Class (name, structural, ps)) - | (Ty.Bot | Ty.Exists | Ty.Any | Ty.AnyObj | Ty.Top) as b -> - return b - | Ty.Union (t0,t1,ts) -> - class_t_aux t0 >>= fun t0 -> - class_t_aux t1 >>= fun t1 -> - mapM class_t_aux ts >>| fun ts -> - uniq_union (t0::t1::ts) - | Ty.Inter (t0,t1,ts) -> - class_t_aux t0 >>= fun t0 -> - class_t_aux t1 >>= fun t1 -> - mapM class_t_aux ts >>| fun ts -> - uniq_inter (t0::t1::ts) - | Ty.Bound (Ty.Symbol (prov, sym_name)) -> - let pred Type.{ name; reason; _ } = ( - name = sym_name && - Reason.def_loc_of_reason reason = Ty.loc_of_provenance prov - ) in - begin match List.find_opt pred env.Env.tparams with - | Some Type.{ bound; _ } -> type__ ~env bound >>= class_t_aux - | _ -> terr ~kind:BadClassT ~msg:"bound" (Some t) - end - | ty -> - terr ~kind:BadClassT ~msg:(Ty_debug.string_of_ctor ty) (Some t) + let%map ts = mapM (type__ ~env) ts in + Ty.Tup ts + + (* Used for instances of React.createClass(..) *) + and react_component = + let react_props ~env ~default props name = + match SMap.find name props with + | exception Not_found -> return default + | Type.Field (_, t, _) -> type__ ~env t + | _ -> return default in - type__ ~env t >>= class_t_aux + let react_static_props ~env static = + let cx = Env.(env.genv.cx) in + match static with + | T.DefT (_, _, T.ObjT { T.props_tmap; _ }) -> + Context.find_props cx props_tmap + |> SMap.bindings + |> mapM (fun (name, p) -> obj_prop ~env (name, p)) + >>| List.concat + | _ -> return [] + in + let inexactify = function + | Ty.Obj ({ Ty.obj_exact = true; _ } as obj) -> Ty.Obj { obj with Ty.obj_exact = false } + | ty -> ty + in + fun ~env static own_props -> + let cx = Env.(env.genv.cx) in + let own_props = Context.find_props cx own_props in + let%bind props_ty = react_props ~env ~default:Ty.explicit_any own_props "props" in + let%bind state_ty = react_props ~env ~default:Ty.explicit_any own_props "state" in + let%map static_flds = react_static_props ~env static in + (* The inferred type for state is unsealed, which has its exact bit set. + * However, Ty.t does not account for unsealed and exact sealed objects are + * incompatible with exact and unsealed, so making state inexact here. *) + let state_ty = inexactify state_ty in + let parent_instance = generic_builtin_t "React$Component" [props_ty; state_ty] in + let parent_class = Ty.Utility (Ty.Class parent_instance) in + (* + * { + * +propTypes: { + * foo: React$PropType$Primitive$Required, + * bar: React$PropType$Primitive, + * }, + * defaultProps: { ... }, + * } + *) + let props_obj = + Ty.Obj + { + Ty.obj_exact = false; + obj_frozen = false; + obj_literal = false; + obj_props = static_flds; + } + in + Ty.mk_inter (parent_class, [props_obj]) + + and instance_t = + let to_generic ~env kind r inst = + match desc_of_reason ~unwrap:false r with + | RType name + | RIdentifier name -> + (* class or interface declaration *) + let symbol = symbol_from_reason env r name in + let%map tys = mapM (fun (_, _, t, _) -> type__ ~env t) inst.T.type_args in + let targs = + match tys with + | [] -> None + | _ -> Some tys + in + Ty.Generic (symbol, kind, targs) + | r -> + let desc = Reason.string_of_desc r in + let msg = "could not extract name from reason: " ^ desc in + terr ~kind:BadInstanceT ~msg None + in + fun ~env r super inst -> + let { T.inst_kind; own_props; inst_call_t; _ } = inst in + match inst_kind with + | T.InterfaceKind { inline = true } -> inline_interface ~env super own_props inst_call_t + | T.InterfaceKind { inline = false } -> to_generic ~env Ty.InterfaceKind r inst + | T.ClassKind -> to_generic ~env Ty.ClassKind r inst + + and inline_interface = + let rec extends = function + | Ty.Generic g -> return [g] + | Ty.Inter (t1, t2, ts) -> mapM extends (t1 :: t2 :: ts) >>| Core_list.concat + | Ty.TypeOf Ty.ObjProto (* interface {} *) + | Ty.TypeOf Ty.FunProto (* interface { (): void } *) -> + (* Do not contribute to the extends clause *) + return [] + | _ -> + (* Top-level syntax only allows generics in extends *) + terr ~kind:BadInlineInterfaceExtends None + in + let fix_dict_props props = + let (key, value, pole, props) = + List.fold_left + (fun (key, value, pole, ps) p -> + match p with + | Ty.NamedProp ("$key", Ty.Field (t, _)) -> + (* The $key's polarity is fixed to neutral so we ignore it *) + (Some t, value, pole, ps) + | Ty.NamedProp ("$value", Ty.Field (t, { Ty.fld_polarity; _ })) -> + (* The dictionary's polarity is determined by that of $value *) + (key, Some t, Some fld_polarity, ps) + | _ -> (key, value, pole, p :: ps)) + (None, None, None, []) + props + in + let props = List.rev props in + match (key, value, pole) with + | (Some dict_key, Some dict_value, Some dict_polarity) -> + let ind_prop = + Ty.IndexProp + { Ty.dict_polarity; dict_name = None; (* This seems to be lost *) + dict_key; dict_value } + in + ind_prop :: props + | (_, _, _) -> props + in + fun ~env super own_props inst_call_t -> + let%bind super = type__ ~env super in + let%bind if_extends = extends super in + let%map obj_props = obj_props ~env own_props inst_call_t None (* dict *) in + let obj_props = fix_dict_props obj_props in + let if_body = { Ty.obj_exact = false; obj_frozen = false; obj_literal = false; obj_props } in + Ty.InlineInterface { Ty.if_extends; if_body } + + and class_t = + let rec go ~env ps ty = + match ty with + | Ty.Generic (name, kind, _) -> + begin + match kind with + | Ty.InterfaceKind -> return (Ty.InterfaceDecl (name, ps)) + | Ty.TypeAliasKind -> return (Ty.Utility (Ty.Class ty)) + | Ty.ClassKind -> + (* If some parameters have been passed, then we are in the `PolyT-ThisClassT` + * case of Flow_js.canonicalize_imported_type. This case should still be + * normalized to an abstract class declaration. If no parameters are passed + * then this is a `Class` with an instance T. *) + begin + match ps with + | Some _ -> return (Ty.ClassDecl (name, ps)) + | None -> return (Ty.Utility (Ty.Class ty)) + end + end + | Ty.Utility (Ty.Class _ | Ty.Exists) + | Ty.Bot _ + | Ty.Any _ + | Ty.Top + | Ty.Union _ + | Ty.Inter _ -> + return (Ty.Utility (Ty.Class ty)) + | Ty.Bound (loc, bname) -> + let pred Type.{ name; reason; _ } = + name = bname && Reason.def_aloc_of_reason reason = loc + in + begin + match List.find_opt pred env.Env.tparams with + | Some Type.{ bound; _ } -> + let%bind b = type__ ~env bound in + go ~env ps b + | _ -> terr ~kind:BadClassT ~msg:"bound" None + end + | ty -> terr ~kind:BadClassT ~msg:(Ty_debug.string_of_ctor ty) None + in + fun ~env t ps -> + match t with + | T.DefT (r, _, T.InstanceT (static, _, _, inst)) + when desc_of_reason ~unwrap:false r = RReactComponent -> + let { Type.own_props; _ } = inst in + react_component ~env static own_props + | _ -> + let%bind t = type__ ~env t in + go ~env ps t and this_class_t ~env t ps = - class_t ~env t ps + let%bind ty = type__ ~env t in + match ty with + | Ty.Generic (name, Ty.ClassKind, _) -> return (Ty.ClassDecl (name, ps)) + | _ -> terr ~kind:BadThisClassT ~msg:(Ty_debug.string_of_ctor ty) (Some t) and poly_ty ~env t typeparams = - let env, results = List.fold_left (fun (env, rs) typeparam -> - let r = type_param ~env typeparam in - (Env.add_typeparam env typeparam, r::rs) - ) (env, []) typeparams in - List.rev results |> all >>= fun ps -> - let ps = match ps with [] -> None | _ -> Some ps in + let (env, results) = + Nel.fold_left + (fun (env, rs) typeparam -> + let r = type_param ~env typeparam in + (Env.add_typeparam env typeparam, r :: rs)) + (env, []) + typeparams + in + let%bind ps = List.rev results |> all in + let ps = + match ps with + | [] -> None + | _ -> Some ps + in match t with - | T.DefT (_, T.ClassT t) -> class_t ~env t ps + | T.DefT (_, _, T.ClassT t) -> class_t ~env t ps | T.ThisClassT (_, t) -> this_class_t ~env t ps - | T.DefT (r, T.TypeT (kind, t)) -> type_t ~env r kind t ps - | T.DefT (_, T.FunT (_, _, f)) -> - fun_ty ~env f ps >>| fun fun_t -> Ty.Fun fun_t - | _ -> - terr ~kind:BadPoly (Some t) + | T.DefT (r, _, T.TypeT (kind, t)) -> type_t ~env r kind t ps + | T.DefT (_, _, T.FunT (_, _, f)) -> + let%map fun_t = fun_ty ~env f ps in + Ty.Fun fun_t + | _ -> terr ~kind:BadPoly (Some t) (* Type Aliases *) and type_t = - let open Type in - (* NOTE the use of the reason within `t` instead of the one passed with + Type.( + (* NOTE the use of the reason within `t` instead of the one passed with the constructor TypeT. The latter is an RType, which is somewhat more unwieldy as it is used more pervasively. *) - let local env t ta_tparams = - let reason = TypeUtil.reason_of_t t in - match desc_of_reason ~unwrap:false reason with - | RTypeAlias (name, true, _) -> + let local env t ta_tparams = + let reason = TypeUtil.reason_of_t t in + match desc_of_reason ~unwrap:false reason with + | RTypeAlias (name, true, _) -> + let env = Env.{ env with under_type_alias = Some name } in + let%bind ta_type = type__ ~env t in + let symbol = symbol_from_reason env reason name in + return (Ty.named_alias symbol ?ta_tparams ~ta_type) + | _ -> terr ~kind:BadTypeAlias ~msg:"local" (Some t) + in + let import_symbol env r t = + match desc_of_reason ~unwrap:false r with + | RNamedImportedType (name, _) + | RDefaultImportedType (name, _) + | RImportStarType name + | RImportStarTypeOf name + | RImportStar name -> + return (symbol_from_reason env r name) + | _ -> terr ~kind:BadTypeAlias ~msg:"import" (Some t) + in + let import env r t ps = + let%bind symbol = import_symbol env r t in + let { Ty.name; _ } = symbol in let env = Env.{ env with under_type_alias = Some name } in - type__ ~env t >>= fun ta_type -> - let symbol = symbol_from_reason env reason name in - return (Ty.named_alias symbol ?ta_tparams ~ta_type) - | _ -> terr ~kind:BadTypeAlias ~msg:"local" (Some t) - in - let import_symbol env r t = - match desc_of_reason ~unwrap:false r with - | RNamedImportedType (name, _) - | RDefaultImportedType (name, _) - | RImportStarType name - | RImportStarTypeOf name - | RImportStar name -> - return (symbol_from_reason env r name) - | _ -> terr ~kind:BadTypeAlias ~msg:"import" (Some t) - in - let import env r t ps = - import_symbol env r t >>= fun symbol -> - let Ty.Symbol (_, name) = symbol in - let env = Env.{ env with under_type_alias = Some name } in - type__ ~env t >>= function - | Ty.TypeAlias _ -> - terr ~kind:BadTypeAlias ~msg:"nested type alias" None - | Ty.Class _ as t -> - (* Normalize imports of the form "import typeof { C } from 'm';" (where C - is defined as a class in 'm') as a Ty.Class, instead of Ty.TypeAlias. + let%bind ty = type__ ~env t in + match ty with + | Ty.TypeAlias _ -> terr ~kind:BadTypeAlias ~msg:"nested type alias" None + | Ty.ClassDecl _ + | Ty.InterfaceDecl _ -> + (* Normalize imports of the form "import typeof { C } from 'm';" (where C + is defined as a class/interface in 'm') as a Ty.ClassDecl/InterfaceDecl, + instead of Ty.TypeAlias. The provenance information on the class should point to the defining location. This way we avoid the indirection of the import location on the alias symbol. *) - return t - | t -> - return (Ty.named_alias symbol ?ta_tparams:ps ~ta_type:t) - in - let import_typeof env r t ps = import env r t ps in - let import_fun env r t ps = import env r t ps in - let opaque env t ps = - match t with - | OpaqueT (r, o) -> opaque_type_t ~env r o ps - | _ -> terr ~kind:BadTypeAlias ~msg:"opaque" (Some t) + return ty + | _ -> return (Ty.named_alias symbol ?ta_tparams:ps ~ta_type:ty) + in + let import_typeof env r t ps = import env r t ps in + let opaque env t ps = + match t with + | OpaqueT (r, o) -> opaque_type_t ~env r o ps + | _ -> terr ~kind:BadTypeAlias ~msg:"opaque" (Some t) + in + fun ~env r kind t ps -> + match kind with + | TypeAliasKind -> local env t ps + | ImportClassKind -> class_t ~env t ps + | ImportTypeofKind -> import_typeof env r t ps + | OpaqueKind -> opaque env t ps + (* The following cases are not common *) + | TypeParamKind -> terr ~kind:BadTypeAlias ~msg:"typeparam" (Some t) + | InstanceKind -> terr ~kind:BadTypeAlias ~msg:"instance" (Some t)) + + and exact_t ~env t = type__ ~env t >>| Ty.mk_exact + + and type_app = + let go ~env targs = function + | Ty.ClassDecl (name, _) -> return (generic_class name targs) + | Ty.InterfaceDecl (name, _) -> return (generic_interface name targs) + | Ty.TypeAlias { Ty.ta_name; ta_tparams; ta_type } -> + begin + match ta_type with + | Some ta_type when Env.expand_type_aliases env -> + begin + match Option.both ta_tparams targs with + | Some (ps, ts) -> Substitution.run ps ts ta_type + | None -> return ta_type + end + | _ -> + let targs = + if Env.omit_targ_defaults env then + remove_targs_matching_defaults targs ta_tparams + else + targs + in + return (generic_talias ta_name targs) + end + | Ty.(Any _ | Bot _ | Top) as ty -> return ty + (* "Fix" type application on recursive types *) + | Ty.TVar (Ty.RVar v, None) -> return (Ty.TVar (Ty.RVar v, targs)) + | Ty.Utility (Ty.Class _) as ty when Option.is_none targs -> return ty + | ty -> terr ~kind:BadTypeApp ~msg:(Ty_debug.string_of_ctor ty) None in - fun ~env r kind t ps -> - match kind with - | TypeAliasKind -> local env t ps - | ImportClassKind -> class_t ~env t ps - | ImportTypeofKind -> import_typeof env r t ps - | ImportFunKind -> import_fun env r t ps - | OpaqueKind -> opaque env t ps - (* The following cases are not common *) - | TypeParamKind -> terr ~kind:BadTypeAlias ~msg:"typeparam" (Some t) - | InstanceKind -> terr ~kind:BadTypeAlias ~msg:"instance" (Some t) - - and exact_t ~env t = - type__ ~env t >>| Ty.mk_exact - - and type_app ~env t targs = - type__ ~env t >>= fun ty -> - mapM (type__ ~env) targs >>= fun targs -> - match ty with - | Ty.Class (name, _, _) -> - return (Ty.generic_t name targs) - | Ty.TypeAlias { Ty.ta_name; ta_tparams; ta_type } -> - let t = if Env.expand_type_aliases env then - match ta_tparams, ta_type with - | Some ps, Some t -> Substitution.run ps targs t - | None, Some t -> t - | _ -> Ty.generic_t ta_name targs - else - Ty.generic_t ta_name targs - in return t - | Ty.Any -> - return Ty.Any - (* "Fix" type application on recursive types *) - | Ty.TVar (Ty.RVar v, None) -> - return (Ty.TVar (Ty.RVar v, Some targs)) - | Ty.Bot -> - return Ty.Bot - | _ -> - let msg = spf "Normalized receiver type: %s" (Ty_debug.dump_t ty) in - terr ~kind:BadTypeApp ~msg (Some t) + fun ~env t targs -> + let%bind ty = type__ ~env t in + let%bind targs = optMapM (type__ ~env) targs in + go ~env targs ty and opaque_t ~env reason opaque_type = let name = opaque_type.Type.opaque_name in let opaque_symbol = symbol_from_reason env reason name in - return (Ty.named_t opaque_symbol) + return (generic_talias opaque_symbol None) (* We are being a bit lax here with opaque types so that we don't have to introduce a new constructor in Ty.t to support all kinds of OpaqueT. @@ -1041,377 +1225,471 @@ end = struct Otherwise, we fall back to a bodyless TypeAlias. *) and opaque_type_t ~env reason opaque_type ta_tparams = - let open Type in - let name = opaque_type.opaque_name in - let current_source = Env.current_file env in - let opaque_source = Loc.source (def_loc_of_reason reason) in - let opaque_symbol = symbol_from_reason env reason name in - (* Compare the current file (of the query) and the file that the opaque + Type.( + let name = opaque_type.opaque_name in + let current_source = Env.current_file env in + let opaque_source = ALoc.source (def_aloc_of_reason reason) in + let opaque_symbol = symbol_from_reason env reason name in + (* Compare the current file (of the query) and the file that the opaque type is defined. If they differ, then hide the underlying/super type. Otherwise, display the underlying/super type. *) - if Some current_source <> opaque_source then - return (Ty.named_alias ?ta_tparams opaque_symbol) - else - let t_opt = match opaque_type with - | { underlying_t = Some t; _ } (* opaque type A = number; *) - | { super_t = Some t; _ } -> Some t (* declare opaque type B: number; *) - | _ -> None (* declare opaque type C; *) - (* TODO: This will potentially report a remote name. + if Some current_source <> opaque_source then + return (Ty.named_alias ?ta_tparams opaque_symbol) + else + let t_opt = + match opaque_type with + | { underlying_t = Some t; _ } (* opaque type A = number; *) + | { super_t = Some t; _ } -> + Some t (* declare opaque type B: number; *) + | _ -> None + (* declare opaque type C; *) + (* TODO: This will potentially report a remote name. The same fix for T25963804 should be applied here as well. *) - in - option (type__ ~env) t_opt >>| fun ta_type -> - Ty.named_alias ?ta_tparams ?ta_type opaque_symbol + in + let%map ta_type = option (type__ ~env) t_opt in + Ty.named_alias ?ta_tparams ?ta_type opaque_symbol) and custom_fun_expanded ~env = - let open Type in - function - (* Object.assign: (target: any, ...sources: Array): any *) - | ObjectAssign -> return Ty.(mk_fun - ~params:[(Some "target", Any, non_opt_param)] - ~rest:(Some "sources", Arr { arr_readonly = false; arr_elt_t = Any }) - Any - ) - - (* Object.getPrototypeOf: (o: any): any *) - | ObjectGetPrototypeOf -> - return Ty.(mk_fun ~params:[(Some "o", Any, non_opt_param)] Any) - - (* Object.setPrototypeOf: (o: any, p: any): any *) - | ObjectSetPrototypeOf -> - let params = [ - (Some "o", Ty.Any, non_opt_param); - (Some "p", Ty.Any, non_opt_param); - ] in - return (mk_fun ~params Ty.Any) - - (* var idx: - + Type.( + function + (* Object.assign: (target: any, ...sources: Array): any *) + | ObjectAssign -> + return + Ty.( + mk_fun + ~params:[(Some "target", explicit_any, non_opt_param)] + ~rest: + ( Some "sources", + Arr { arr_readonly = false; arr_literal = false; arr_elt_t = explicit_any } ) + explicit_any) + (* Object.getPrototypeOf: (o: any): any *) + | ObjectGetPrototypeOf -> + return Ty.(mk_fun ~params:[(Some "o", explicit_any, non_opt_param)] explicit_any) + (* Object.setPrototypeOf: (o: any, p: any): any *) + | ObjectSetPrototypeOf -> + let params = + [(Some "o", Ty.explicit_any, non_opt_param); (Some "p", Ty.explicit_any, non_opt_param)] + in + return (mk_fun ~params Ty.explicit_any) + (* var idx: + (obj: IdxObject, pathCallback: (demaybefiedObj: IdxObject) => IdxResult) => ?IdxResult; *) - | Idx -> - let idxObject = Ty.builtin_t "IdxObject" in - let idxResult = Ty.builtin_t "IdxResult" in - let tparams = [ - mk_tparam ~bound:Ty.AnyObj "IdxObject"; - mk_tparam "IdxResult"; - ] - in - let pathCallback = mk_fun - ~params:[(Some "demaybefiedObj", idxObject, non_opt_param)] - idxResult - in - let params = [ - (Some "obj", idxObject, non_opt_param); - (Some "pathCallback", pathCallback, non_opt_param); - ] - in - return (mk_fun ~tparams ~params (Ty.mk_maybe idxResult)) - - (* var TypeAssertIs: (value: mixed) => boolean *) - | TypeAssertIs -> - let tparams = [ mk_tparam "TypeAssertT" ] in - let params = [ (Some "value", Ty.Top, non_opt_param) ] in - return (mk_fun ~tparams ~params Ty.Bool) - - (* var TypeAssertThrows: (value: mixed) => TypeAssertT *) - | TypeAssertThrows -> - let tparams = [ mk_tparam "TypeAssertT" ] in - let params = [ (Some "value", Ty.Top, non_opt_param) ] in - let ret = Ty.builtin_t "TypeAssertT" in - return (mk_fun ~tparams ~params ret) - - (* Result = {success: true, value: T} | {success: false, error: string} - var TypeAssertWraps: (value: mixed) => Result *) - | TypeAssertWraps -> - let tparams = [ mk_tparam "TypeAssertT" ] in - let params = [ (Some "value", Ty.Top, non_opt_param) ] in - let result_fail_ty = Ty.mk_object (Ty.mk_field_props [ - ("success", Ty.BoolLit false, false); ("error", Ty.Str, false) - ]) in - let result_succ_ty = Ty.mk_object (Ty.mk_field_props [ - ("success", Ty.BoolLit true, false); ("value", Ty.builtin_t "TypeAssertT", false) - ]) in - let ret = Ty.mk_union [result_fail_ty; result_succ_ty] in - return (mk_fun ~tparams ~params ret) - - (* debugPrint: (_: any[]) => void *) - | DebugPrint -> return Ty.( - mk_fun ~params:[ - (Some "_", Arr { arr_readonly = false; arr_elt_t = Any }, non_opt_param) - ] Void - ) - - (* debugThrow: () => empty *) - | DebugThrow -> return (mk_fun Ty.Bot) - - (* debugSleep: (seconds: number) => void *) - | DebugSleep -> return Ty.( - mk_fun ~params:[(Some "seconds", Num, non_opt_param)] Void - ) - - (* reactPropType: any (TODO) *) - | ReactPropType _ -> return Ty.Any - - (* reactCreateClass: (spec: any) => ReactClass *) - | ReactCreateClass -> return Ty.(mk_fun - ~params:[(Some "spec", Any, non_opt_param)] - (generic_builtin_t "ReactClass" [Any]) - ) - - (* 1. Component class: - (name: ReactClass, config: T, children?: any) => React$Element - 2. Stateless functional component - type SFC = (config: T, context: any) => React$Element - (fn: SFC, config: T, children?: any) => React$Element - *) - | ReactCreateElement - | ReactCloneElement - | ReactElementFactory _ -> return Ty.( - let param_t = mk_tparam "T" in - let tparams = [param_t] in - let t = Bound (Symbol (Builtin, "T")) in - let params = [ - (Some "name", generic_builtin_t "ReactClass" [t], non_opt_param); - (Some "config", t, non_opt_param); - (Some "children", Any, opt_param); - ] + | Idx -> + let idxObject = Ty.Bound (ALoc.none, "IdxObject") in + let idxResult = Ty.Bound (ALoc.none, "IdxResult") in + let tparams = [mk_tparam ~bound:Ty.explicit_any "IdxObject"; mk_tparam "IdxResult"] in + let pathCallback = + mk_fun ~params:[(Some "demaybefiedObj", idxObject, non_opt_param)] idxResult + in + let params = + [ + (Some "obj", idxObject, non_opt_param); + (Some "pathCallback", pathCallback, non_opt_param); + ] in - let reactElement = generic_builtin_t "React$Element" [t] in - let f1 = mk_fun ~tparams ~params reactElement in - let params = [ - (Some "config", t, non_opt_param); - (Some "context", Any, non_opt_param); - ] + return (mk_fun ~tparams ~params (Ty.mk_maybe idxResult)) + (* var TypeAssertIs: (value: mixed) => boolean *) + | TypeAssertIs -> + let tparams = [mk_tparam "TypeAssertT"] in + let params = [(Some "value", Ty.Top, non_opt_param)] in + return (mk_fun ~tparams ~params (Ty.Bool None)) + (* var TypeAssertThrows: (value: mixed) => TypeAssertT *) + | TypeAssertThrows -> + let tparams = [mk_tparam "TypeAssertT"] in + let params = [(Some "value", Ty.Top, non_opt_param)] in + let ret = Ty.Bound (ALoc.none, "TypeAssertT") in + return (mk_fun ~tparams ~params ret) + (* Result = {success: true, value: T} | {success: false, error: string} + var TypeAssertWraps: (value: mixed) => Result *) + | TypeAssertWraps -> + let tparams = [mk_tparam "TypeAssertT"] in + let params = [(Some "value", Ty.Top, non_opt_param)] in + let result_fail_ty = + Ty.mk_object + (Ty.mk_field_props + [("success", Ty.BoolLit false, false); ("error", Ty.Str None, false)]) in - let sfc = mk_fun ~tparams ~params reactElement in - let params = [ - (Some "fn", sfc, non_opt_param); - (Some "config", t, non_opt_param); - (Some "children", Any, opt_param); - ] + let result_succ_ty = + Ty.mk_object + (Ty.mk_field_props + [("success", Ty.BoolLit true, false); ("value", builtin_t "TypeAssertT", false)]) in - let f2 = mk_fun ~tparams ~params reactElement in - mk_inter [f1; f2] - ) - - (* Fallback *) - | t -> custom_fun_short ~env t + let ret = Ty.mk_union (result_fail_ty, [result_succ_ty]) in + return (mk_fun ~tparams ~params ret) + (* debugPrint: (_: any[]) => void *) + | DebugPrint -> + return + Ty.( + mk_fun + ~params: + [ + ( Some "_", + Arr { arr_readonly = false; arr_literal = false; arr_elt_t = explicit_any }, + non_opt_param ); + ] + Void) + (* debugThrow: () => empty *) + | DebugThrow -> return (mk_fun (mk_empty Ty.EmptyType)) + (* debugSleep: (seconds: number) => void *) + | DebugSleep -> return Ty.(mk_fun ~params:[(Some "seconds", Num None, non_opt_param)] Void) + (* reactPropType: any (TODO) *) + | ReactPropType _ -> return Ty.explicit_any + (* reactCreateClass: (spec: any) => ReactClass *) + | ReactCreateClass -> + let params = [(Some "spec", Ty.explicit_any, non_opt_param)] in + let x = Ty.builtin_symbol "ReactClass" in + return (mk_fun ~params (generic_talias x (Some [Ty.explicit_any]))) + (* + * 1. Component class: + * (name: ReactClass, config: T, children?: any) => React$Element + * + * 2. Stateless functional component + * type SFC = (config: T, context: any) => React$Element + * (fn: SFC, config: T, children?: any) => React$Element + *) + | ReactCreateElement + | ReactCloneElement + | ReactElementFactory _ -> + return + Ty.( + let param_t = mk_tparam "T" in + let tparams = [param_t] in + let t = Bound (ALoc.none, "T") in + let params = + [ + (Some "name", generic_builtin_t "ReactClass" [t], non_opt_param); + (Some "config", t, non_opt_param); + (Some "children", explicit_any, opt_param); + ] + in + let reactElement = generic_builtin_t "React$Element" [t] in + let f1 = mk_fun ~tparams ~params reactElement in + let params = + [(Some "config", t, non_opt_param); (Some "context", explicit_any, non_opt_param)] + in + let sfc = mk_fun ~tparams ~params reactElement in + let params = + [ + (Some "fn", sfc, non_opt_param); + (Some "config", t, non_opt_param); + (Some "children", explicit_any, opt_param); + ] + in + let f2 = mk_fun ~tparams ~params reactElement in + mk_inter (f1, [f2])) + (* Fallback *) + | t -> custom_fun_short ~env t) and custom_fun_short ~env = - let open Type in - function - | ObjectAssign -> return (Ty.builtin_t "Object$Assign") - | ObjectGetPrototypeOf -> return (Ty.builtin_t "Object$GetPrototypeOf") - | ObjectSetPrototypeOf -> return (Ty.builtin_t "Object$SetPrototypeOf") - | Compose false -> return (Ty.builtin_t "$Compose") - | Compose true -> return (Ty.builtin_t "$ComposeReverse") - | ReactPropType t -> react_prop_type ~env t - | ReactCreateClass -> return (Ty.builtin_t "React$CreateClass") - | ReactCreateElement -> return (Ty.builtin_t "React$CreateElement") - | ReactCloneElement -> return (Ty.builtin_t "React$CloneElement") - | ReactElementFactory t -> - type__ ~env t >>| fun t -> - Ty.generic_builtin_t "React$ElementFactory" [t] - | Idx -> return (Ty.builtin_t "$Facebookism$Idx") - (* var TypeAssertIs: (value: mixed) => boolean *) - | TypeAssertIs -> - let tparams = [ mk_tparam "TypeAssertT" ] in - let params = [ (Some "value", Ty.Top, non_opt_param) ] in - return (mk_fun ~tparams ~params Ty.Bool) - - (* var TypeAssertThrows: (value: mixed) => TypeAssertT *) - | TypeAssertThrows -> - let tparams = [ mk_tparam "TypeAssertT" ] in - let params = [ (Some "value", Ty.Top, non_opt_param) ] in - let ret = Ty.builtin_t "TypeAssertT" in - return (mk_fun ~tparams ~params ret) - - (* Result = {success: true, value: T} | {success: false, error: string} + Type.( + function + | ObjectAssign -> return (builtin_t "Object$Assign") + | ObjectGetPrototypeOf -> return (builtin_t "Object$GetPrototypeOf") + | ObjectSetPrototypeOf -> return (builtin_t "Object$SetPrototypeOf") + | Compose false -> return (builtin_t "$Compose") + | Compose true -> return (builtin_t "$ComposeReverse") + | ReactPropType t -> react_prop_type ~env t + | ReactCreateClass -> return (builtin_t "React$CreateClass") + | ReactCreateElement -> return (builtin_t "React$CreateElement") + | ReactCloneElement -> return (builtin_t "React$CloneElement") + | ReactElementFactory t -> + let%map t = type__ ~env t in + generic_builtin_t "React$ElementFactory" [t] + | Idx -> return (builtin_t "$Facebookism$Idx") + (* var TypeAssertIs: (value: mixed) => boolean *) + | TypeAssertIs -> + let tparams = [mk_tparam "TypeAssertT"] in + let params = [(Some "value", Ty.Top, non_opt_param)] in + return (mk_fun ~tparams ~params (Ty.Bool None)) + (* var TypeAssertThrows: (value: mixed) => TypeAssertT *) + | TypeAssertThrows -> + let tparams = [mk_tparam "TypeAssertT"] in + let params = [(Some "value", Ty.Top, non_opt_param)] in + let ret = builtin_t "TypeAssertT" in + return (mk_fun ~tparams ~params ret) + (* Result = {success: true, value: T} | {success: false, error: string} var TypeAssertWraps: (value: mixed) => Result *) - | TypeAssertWraps -> - let tparams = [ mk_tparam "TypeAssertT" ] in - let params = [ (Some "value", Ty.Top, non_opt_param) ] in - let result_fail_ty = Ty.mk_object (Ty.mk_field_props [ - ("success", Ty.BoolLit false, false); ("error", Ty.Str, false) - ]) in - let result_succ_ty = Ty.mk_object (Ty.mk_field_props [ - ("success", Ty.BoolLit true, false); ("value", Ty.builtin_t "TypeAssertT", false) - ]) in - let ret = Ty.mk_union [result_fail_ty; result_succ_ty] in - return (mk_fun ~tparams ~params ret) - - | DebugPrint -> return (Ty.builtin_t "$Flow$DebugPrint") - | DebugThrow -> return (Ty.builtin_t "$Flow$DebugThrow") - | DebugSleep -> return (Ty.builtin_t "$Flow$DebugSleep") + | TypeAssertWraps -> + let tparams = [mk_tparam "TypeAssertT"] in + let params = [(Some "value", Ty.Top, non_opt_param)] in + let result_fail_ty = + Ty.mk_object + (Ty.mk_field_props + [("success", Ty.BoolLit false, false); ("error", Ty.Str None, false)]) + in + let result_succ_ty = + Ty.mk_object + (Ty.mk_field_props + [("success", Ty.BoolLit true, false); ("value", builtin_t "TypeAssertT", false)]) + in + let ret = Ty.mk_union (result_fail_ty, [result_succ_ty]) in + return (mk_fun ~tparams ~params ret) + | DebugPrint -> return (builtin_t "$Flow$DebugPrint") + | DebugThrow -> return (builtin_t "$Flow$DebugThrow") + | DebugSleep -> return (builtin_t "$Flow$DebugSleep")) and custom_fun ~env t = - if Env.expand_internal_types env - then custom_fun_expanded ~env t - else custom_fun_short ~env t + if Env.expand_internal_types env then + custom_fun_expanded ~env t + else + custom_fun_short ~env t and react_prop_type ~env = - let open T.React.PropType in - function - | Primitive (_, t) -> - type__ ~env t >>| fun t -> - Ty.generic_builtin_t "React$PropType$Primitive" [t] - | Complex ArrayOf -> return (Ty.builtin_t "React$PropType$ArrayOf") - | Complex InstanceOf -> return (Ty.builtin_t "React$PropType$ArrayOf") - | Complex ObjectOf -> return (Ty.builtin_t "React$PropType$dbjectOf") - | Complex OneOf -> return (Ty.builtin_t "React$PropType$OneOf") - | Complex OneOfType -> return (Ty.builtin_t "React$PropType$OneOfType") - | Complex Shape -> return (Ty.builtin_t "React$PropType$Shape") + T.React.PropType.( + function + | Primitive (is_req, t) -> + let%map t = type__ ~env t in + generic_builtin_t + ( if is_req then + "React$PropType$Primitive$Required" + else + "React$PropType$Primitive" ) + [t] + | Complex ArrayOf -> return (builtin_t "React$PropType$ArrayOf") + | Complex InstanceOf -> return (builtin_t "React$PropType$ArrayOf") + | Complex ObjectOf -> return (builtin_t "React$PropType$dbjectOf") + | Complex OneOf -> return (builtin_t "React$PropType$OneOf") + | Complex OneOfType -> return (builtin_t "React$PropType$OneOfType") + | Complex Shape -> return (builtin_t "React$PropType$Shape")) and internal_t ~env t = - let open Type in - function - | ChoiceKitT _ - | ExtendsT _ - | ReposUpperT _ -> - terr ~kind:BadInternalT (Some t) - | OptionalChainVoidT r -> type__ ~env (DefT (r, VoidT)); + Type.( + function + | ChoiceKitT _ + | ExtendsT _ + | ReposUpperT _ -> + terr ~kind:BadInternalT (Some t) + | OptionalChainVoidT r -> type__ ~env (DefT (r, bogus_trust (), VoidT))) and param_bound ~env = function - | T.DefT (_, T.MixedT _) -> return None - | bound -> type__ ~env bound >>= fun b -> return (Some b) + | T.DefT (_, _, T.MixedT _) -> return None + | bound -> + let%bind b = type__ ~env bound in + return (Some b) and default_t ~env = function - | Some d -> type__ ~env d >>= fun d -> return (Some d) + | Some d -> + let%bind d = type__ ~env d in + return (Some d) | _ -> return None - and type_param ~env { T.name; bound; polarity; default; _ } = + and type_param ~env tp = + let { T.name; bound; polarity; default; _ } = tp in let tp_polarity = type_polarity polarity in - param_bound ~env bound >>= fun tp_bound -> - default_t ~env default >>= fun tp_default -> - return { Ty.tp_name = name; tp_bound; tp_polarity; tp_default } + let%bind tp_bound = param_bound ~env bound in + let%map tp_default = default_t ~env default in + { Ty.tp_name = name; tp_bound; tp_polarity; tp_default } and opt_t ~env t = - let t, opt = match t with - | T.DefT (_, T.OptionalT t) -> (t, true) - | t -> (t, false) + let (t, opt) = + match t with + | T.OptionalT (_, t) -> (t, true) + | t -> (t, false) in - type__ ~env t >>| fun t -> (t, opt) + let%map t = type__ ~env t in + (t, opt) and type_polarity = function - | T.Positive -> Ty.Positive - | T.Negative -> Ty.Negative - | T.Neutral -> Ty.Neutral + | Polarity.Positive -> Ty.Positive + | Polarity.Negative -> Ty.Negative + | Polarity.Neutral -> Ty.Neutral (************) (* EvalT *) (************) - - and destructuring_t ~env t id r s = - let cx = Env.get_cx env in - let result = try - (* eval_selector may throw for BoundT. Catching here. *) - Ok (Flow_js.eval_selector cx r t s id) - with - exn -> Error (spf "Exception:%s" (Printexc.to_string exn)) + and spread = + let spread_of_ty = function + | Ty.Obj { Ty.obj_props; _ } -> obj_props + | t -> [Ty.SpreadProp t] in - match result with - | Ok tout -> type__ ~env tout - | Error msg -> terr ~kind:BadEvalT ~msg (Some t) - - and named_type_destructor_t ~env t d = - let open Type in - let from_name n ts = - mapM (type__ ~env) ts >>| fun tys -> - Ty.generic_builtin_t n tys + let obj_exact target = + match target with + | Type.Object.Spread.(Annot { make_exact }) -> return make_exact + | Type.Object.Spread.Value -> terr ~kind:BadEvalT ~msg:"spread-target-value" None in - match d with - | NonMaybeType -> from_name "$NonMaybeType" [t] - | ReadOnlyType -> from_name "$ReadOnlyType" [t] - | ValuesType -> from_name "$Values" [t] - | ElementType t' -> from_name "$ElementType" [t; t'] - | CallType ts -> from_name "$Call" (t::ts) - | ReactElementPropsType -> from_name "React$ElementProps" [t] - | ReactElementConfigType -> from_name "React$ElementConfig" [t] - | ReactElementRefType -> from_name "React$ElementRef" [t] - | PropertyType k -> - let r = mk_reason (RStringLit k) Loc.none in - from_name "$PropertyType" [t; DefT (r, SingletonStrT k)] - | TypeMap (ObjectMap t') -> from_name "$ObjMap" [t; t'] - | TypeMap (ObjectMapi t') -> from_name "$ObjMapi" [t; t'] - | TypeMap (TupleMap t') -> from_name "$TupleMap" [t; t'] - | RestType (Object.Rest.Sound, t') -> from_name "$Rest" [t; t'] - | RestType (Object.Rest.IgnoreExactAndOwn, t') -> from_name "$Diff" [t; t'] - | RestType (Object.Rest.ReactConfigMerge, _) | Bind _ | SpreadType _ -> - terr ~kind:BadEvalT ~msg:(Debug_js.string_of_destructor d) (Some t) - - and resolve_type_destructor_t ~env t id use_op reason d = - let cx = Env.get_cx env in - let trace = Trace.dummy_trace in - let result = - try - Ok (snd (Flow_js.mk_type_destructor cx ~trace use_op reason t d id)) - with - (* Allow bounds *) - | Flow_js.Not_expect_bound s -> Error s - (* But re-raise any other exception *) - | exn -> raise exn + let mk_spread ty target prefix_tys head_slice = + let obj_props = prefix_tys @ spread_of_ty ty in + let obj_props = + match head_slice with + | None -> obj_props + | Some obj -> obj_props @ spread_of_ty obj + in + let%map obj_exact = obj_exact target in + Ty.Obj { Ty.obj_props; obj_exact; obj_literal = false; obj_frozen = false (* default *) } + in + let spread_operand_slice ~env { T.Object.Spread.reason = _; prop_map; dict } = + Type.TypeTerm.( + let obj_exact = true in + let obj_frozen = false in + let obj_literal = false in + let props = SMap.fold (fun k p acc -> (k, p) :: acc) prop_map [] in + let%bind obj_props = concat_fold_m (obj_prop ~env) props in + let%bind obj_props = + match dict with + | Some { key; value; dict_name; dict_polarity } -> + let%bind dict_key = type__ ~env key in + let%bind dict_value = type__ ~env value in + return + ( Ty.IndexProp + { + Ty.dict_polarity = type_polarity dict_polarity; + dict_name; + dict_key; + dict_value; + } + :: obj_props ) + | None -> return obj_props + in + return (Ty.Obj { Ty.obj_exact; obj_frozen; obj_literal; obj_props })) + in + let spread_operand ~env = function + | T.Object.Spread.Type t -> type__ ~env t + | T.Object.Spread.Slice slice -> spread_operand_slice ~env slice in - match result with - | Ok t -> type__ ~env t - | Error _s -> named_type_destructor_t ~env t d + fun ~env ty target ts_rev head_slice -> + let%bind head_slice = + match head_slice with + | None -> return None + | Some s -> + let%bind s = spread_operand_slice ~env s in + return (Some s) + in + let%bind tys_rev = mapM (spread_operand ~env) ts_rev in + let prefix_tys = + List.fold_left (fun acc t -> List.rev_append (spread_of_ty t) acc) [] tys_rev + in + mk_spread ty target prefix_tys head_slice + + and type_destructor_t ~env id t d = + if Env.evaluate_type_destructors env then + let cx = Env.get_cx env in + let evaluated = Context.evaluated cx in + match IMap.get id evaluated with + | Some t -> type__ ~env t + | None -> type_destructor_unevaluated ~env t d + (* fallback *) + else + type_destructor_unevaluated ~env t d - and evaluate_type_destructor ~env t id use_op reason d = + and type_destructor_unevaluated ~env t d = + let%bind ty = type__ ~env t in + match d with + | T.NonMaybeType -> return (Ty.Utility (Ty.NonMaybeType ty)) + | T.ReadOnlyType -> return (Ty.Utility (Ty.ReadOnly ty)) + | T.ValuesType -> return (Ty.Utility (Ty.Values ty)) + | T.ElementType t' -> + let%map ty' = type__ ~env t' in + Ty.Utility (Ty.ElementType (ty, ty')) + | T.CallType ts -> + let%map tys = mapM (type__ ~env) ts in + Ty.Utility (Ty.Call (ty, tys)) + | T.TypeMap (T.ObjectMap t') -> + let%map ty' = type__ ~env t' in + Ty.Utility (Ty.ObjMap (ty, ty')) + | T.TypeMap (T.ObjectMapi t') -> + let%map ty' = type__ ~env t' in + Ty.Utility (Ty.ObjMapi (ty, ty')) + | T.PropertyType k -> return (Ty.Utility (Ty.PropertyType (ty, Ty.StrLit k))) + | T.TypeMap (T.TupleMap t') -> + let%map ty' = type__ ~env t' in + Ty.Utility (Ty.TupleMap (ty, ty')) + | T.RestType (T.Object.Rest.Sound, t') -> + let%map ty' = type__ ~env t' in + Ty.Utility (Ty.Rest (ty, ty')) + | T.RestType (T.Object.Rest.IgnoreExactAndOwn, t') -> + let%map ty' = type__ ~env t' in + Ty.Utility (Ty.Diff (ty, ty')) + | T.SpreadType (target, operands, head_slice) -> spread ~env ty target operands head_slice + | T.ReactElementPropsType -> return (Ty.Utility (Ty.ReactElementPropsType ty)) + | T.ReactElementConfigType -> return (Ty.Utility (Ty.ReactElementConfigType ty)) + | T.ReactElementRefType -> return (Ty.Utility (Ty.ReactElementRefType ty)) + | T.ReactConfigType default_props -> + let%map default_props' = type__ ~env default_props in + Ty.Utility (Ty.ReactConfigType (ty, default_props')) + | (T.RestType (T.Object.Rest.ReactConfigMerge _, _) | T.Bind _) as d -> + terr ~kind:BadEvalT ~msg:(Debug_js.string_of_destructor d) None + + and latent_pred_t ~env id t = let cx = Env.get_cx env in let evaluated = Context.evaluated cx in - match IMap.get id evaluated with - | Some cached_t -> type__ ~env cached_t - | None -> resolve_type_destructor_t ~env t id use_op reason d - - and type_destructor_t ~env t id use_op reason d = - find_eval_t id >>= function - | Some (Ok t) -> return t - | Some (Error e) -> error e - | None -> - get >>= fun in_st -> - (* To store the complete result (including error case) we need to run - evaluation outside the monad and then update the state of the main monad. - *) - let result, out_st = run in_st ( - evaluate_type_destructor ~env t id use_op reason d - ) in - put out_st >>= fun _ -> - update_eval_t_cache id result >>= fun _ -> - begin match result with - | Ok ty -> return ty - | Error e -> error e end + let t' = + match IMap.get id evaluated with + | Some evaled_t -> evaled_t + | None -> t + in + type__ ~env t' and eval_t ~env t id = function - | Type.DestructuringT (r, s) -> - destructuring_t ~env t id r s - | Type.TypeDestructorT (use_op, reason, d) -> - type_destructor_t ~env t id use_op reason d - - and module_t env reason t = - match desc_of_reason reason with - | RModule name - | RCommonJSExports name - | RUntypedModule name -> - let symbol = symbol_from_reason env reason name in - return (Ty.Module symbol) - | _ -> - terr ~kind:UnsupportedTypeCtor (Some t) - - and use_t ~env = function - | T.UseT (_, t) -> type__ ~env t - | T.ReposLowerT (_, _, u) -> use_t ~env u - | u -> - let msg = spf "Use: %s" (Type.string_of_use_ctor u) in - terr ~kind:BadUse ~msg None + | Type.LatentPredT _ -> latent_pred_t ~env id t + | Type.TypeDestructorT (_, _, d) -> type_destructor_t ~env id t d + + and module_t = + let mk_module env symbol_opt exports = + let cjs_export = optM (type__ ~env) T.(exports.cjs_export) in + let exports = + Context.find_exports Env.(env.genv.cx) T.(exports.exports_tmap) + |> SMap.map snd + |> SMap.bindings + |> mapM (type__ ~env |> sndMapM) + in + let%bind exports = exports in + let%map cjs_export = cjs_export in + Ty.Module (symbol_opt, Ty.{ exports; cjs_export }) + in + fun env reason exports t -> + match desc_of_reason reason with + | RModule name + | RCommonJSExports name + | RUntypedModule name -> + let symbol = symbol_from_reason env reason name in + mk_module env (Some symbol) exports + | RExports -> mk_module env None exports + | _ -> terr ~kind:UnsupportedTypeCtor (Some t) + + and uses_t = + let rec uses_t_aux ~env acc uses = + match uses with + | [] -> + begin + match acc with + | [] -> return Ty.NoUpper + | hd :: tl -> return (Ty.SomeKnownUpper (Ty.mk_inter (hd, tl))) + end + | T.UseT (_, t) :: rest -> + let%bind t = type__ ~env t in + uses_t_aux ~env (t :: acc) rest + | T.ReposLowerT (_, _, u) :: rest -> uses_t_aux ~env acc (u :: rest) + (* skip these *) + | T.CJSExtractNamedExportsT _ :: rest -> uses_t_aux ~env acc rest + | u :: _ -> return (Ty.SomeUnknownUpper (T.string_of_use_ctor u)) + in + (fun ~env uses -> uses_t_aux ~env [] uses) and merged_t ~env uses = - if Env.fall_through_merged env - then return Ty.Top - else mapM (use_t ~env) uses >>| uniq_inter + match%bind uses_t ~env uses with + | Ty.SomeUnknownUpper _ -> + (* un-normalizable *) + terr ~kind:BadUse None + | Ty.NoUpper -> + (* shouldn't happen - MergedT has at least one use by construction *) + return (mk_empty (Ty.NoLowerWithUpper Ty.NoUpper)) + | Ty.SomeKnownUpper t -> + (* return the recorded use type *) + return t let run_type ~options ~genv ~imported_names ~tparams state t = let env = Env.init ~options ~genv ~tparams ~imported_names in - run state (type__ ~env t) + let (result, state) = run state (type__ ~env t) in + let result = + match result with + | Ok t when options.Env.optimize_types -> + let { Env.merge_bot_and_any_kinds = merge_kinds; _ } = options in + Ok (Ty_utils.simplify_type ~merge_kinds ~sort:false t) + | _ -> result + in + (result, state) (* Before we start normalizing the input type we populate our environment with aliases that are in scope due to typed imports. These appear inside @@ -1424,89 +1702,102 @@ end = struct module Imports = struct open File_sig - let from_imported_locs local imported_locs acc = - let { local_loc; _ } = imported_locs in - SMap.add local local_loc acc - - let from_imported_locs_map map acc = - SMap.fold (fun _remote remote_map acc -> - SMap.fold (fun local imported_locs_nel acc -> - Nel.fold_left (fun acc imported_locs -> - from_imported_locs local imported_locs acc - ) acc imported_locs_nel - ) remote_map acc - ) map acc + (* Collect the names and locations of types that are available as we scan + * the imports. Later we'll match them with some remote defining loc. *) + type acc_t = Ty.imported_ident list + + let from_imported_locs_map ~import_mode map (acc : acc_t) = + SMap.fold + (fun _remote remote_map acc -> + SMap.fold + (fun local imported_locs_nel acc -> + Nel.fold_left + (fun acc { local_loc; _ } -> (local_loc, local, import_mode) :: acc) + acc + imported_locs_nel) + remote_map + acc) + map + acc - let from_binding binding acc = + let rec from_binding ~import_mode binding (acc : acc_t) = match binding with - | BindIdent (loc, x) -> SMap.add x loc acc - | BindNamed map -> from_imported_locs_map map acc + | BindIdent (loc, name) -> (loc, name, import_mode) :: acc + | BindNamed map -> + List.fold_left (fun acc (_, binding) -> from_binding ~import_mode binding acc) acc map - let from_bindings bindings_opt acc = - Option.value_map ~default:acc ~f:(fun bs -> from_binding bs acc) - bindings_opt + let from_bindings ~import_mode bindings_opt acc = + match bindings_opt with + | Some bindings -> from_binding ~import_mode bindings acc + | None -> acc - let from_require require acc = + let from_require require (acc : acc_t) = match require with - | Require { source=_; require_loc=_; bindings } -> - from_bindings bindings acc - | Import { source=_; named; ns=_; types; typesof; typesof_ns=_; } -> + | Require { source = _; require_loc = _; bindings } -> + from_bindings ~import_mode:Ty.ValueMode bindings acc + | Import { import_loc = _; source = _; named; ns = _; types; typesof; typesof_ns = _ } -> (* TODO import namespaces (`ns`) as modules that might contain imported types *) acc - |> from_imported_locs_map named - |> from_imported_locs_map types - |> from_imported_locs_map typesof + |> from_imported_locs_map ~import_mode:Ty.ValueMode named + |> from_imported_locs_map ~import_mode:Ty.TypeMode types + |> from_imported_locs_map ~import_mode:Ty.TypeofMode typesof | ImportDynamic _ - | Import0 _ -> acc + | Import0 _ -> + acc - let from_requires requires = - List.fold_left (fun acc require -> - from_require require acc - ) SMap.empty requires + let extract_imported_idents requires = + List.fold_left (fun acc require -> from_require require acc) [] requires - let extract_schemes type_table imported_locs = - SMap.fold (fun x loc acc -> - match Type_table.find_type_info type_table loc with - | Some (_, e, _) -> SMap.add x e acc - | None -> acc - ) imported_locs SMap.empty + let extract_schemes typed_ast (imported_locs : acc_t) = + List.fold_left + (fun acc (loc, name, import_mode) -> + match Typed_ast_utils.find_exact_match_annotation typed_ast loc with + | Some scheme -> (name, loc, import_mode, scheme) :: acc + | None -> acc) + [] + imported_locs - let extract_ident ~options ~genv (x, scheme) = Ty.( + let extract_ident ~options ~genv scheme = let { Type.TypeScheme.tparams; type_ = t } = scheme in - let env = Env.init ~options ~genv ~tparams ~imported_names:SMap.empty in - type__ ~env t >>| fun ty -> - match ty with - | TypeAlias { ta_name = Symbol (p, _) ; _ } - | Class (Symbol (p, _), _, _) -> - Some (x, loc_of_provenance p) + let imported_names = ALocMap.empty in + let env = Env.init ~options ~genv ~tparams ~imported_names in + match%map type__ ~env t with + | Ty.TypeAlias { Ty.ta_name = { Ty.def_loc; _ }; _ } + | Ty.ClassDecl ({ Ty.def_loc; _ }, _) + | Ty.InterfaceDecl ({ Ty.def_loc; _ }, _) -> + Some def_loc + | Ty.Utility (Ty.Class (Ty.Generic ({ Ty.def_loc; _ }, _, None))) -> + (* This is an acceptable proxy only if the class is not polymorphic *) + Some def_loc | _ -> None - ) - - let extract_idents ~options ~genv imported_schemes = - mapM (extract_ident ~options ~genv) (SMap.bindings imported_schemes) >>| - List.fold_left (fun acc x -> - match x with - | Some (x, id) -> SMap.add x id acc - | None -> acc - ) SMap.empty + let normalize_imports ~options ~genv imported_schemes : Ty.imported_ident ALocMap.t = + let state = State.empty in + let (_, result) = + List.fold_left + (fun (st, acc) (name, loc, import_mode, scheme) -> + match run st (extract_ident ~options ~genv scheme) with + | (Ok (Some def_loc), st) -> (st, ALocMap.add def_loc (loc, name, import_mode) acc) + | (Ok None, st) -> + (* unrecognizable remote type *) + (st, acc) + | (Error _, st) -> + (* normalization error *) + (st, acc)) + (state, ALocMap.empty) + imported_schemes + in + result end - let run_imports ~options ~genv state = - let open Imports in - let file_sig = genv.Env.file_sig in - let requires = File_sig.(file_sig.module_sig.requires) in - let type_table = genv.Env.type_table in - let imported_locs = from_requires requires in - let imported_schemes = extract_schemes type_table imported_locs in - match run state (extract_idents ~options ~genv imported_schemes) with - | Ok x, state -> x, state - | Error _, state -> - (* Fall back to empty imports map. - * TODO provide more fine grained handling of errors - *) - SMap.empty, state - + let run_imports ~options ~genv = + Imports.( + let { Env.file_sig = { File_sig.module_sig = { File_sig.requires; _ }; _ }; typed_ast; _ } = + genv + in + extract_imported_idents requires + |> extract_schemes typed_ast + |> normalize_imports ~options ~genv) end open NormalizerMonad @@ -1514,40 +1805,52 @@ open NormalizerMonad (* Exposed API *) let from_schemes ~options ~genv schemes = - let imported_names, state = run_imports ~options ~genv State.empty in - let _, result = ListUtils.fold_map (fun state (a, scheme) -> - let { Type.TypeScheme.tparams; type_ = t } = scheme in - match run_type ~options ~genv ~imported_names ~tparams state t with - | Ok t, state -> state, (a, Ok t) - | Error s, state -> state, (a, Error s) - ) state schemes in + let imported_names = run_imports ~options ~genv in + let (_, result) = + ListUtils.fold_map + (fun state (a, scheme) -> + let { Type.TypeScheme.tparams; type_ = t } = scheme in + match run_type ~options ~genv ~imported_names ~tparams state t with + | (Ok t, state) -> (state, (a, Ok t)) + | (Error s, state) -> (state, (a, Error s))) + State.empty + schemes + in result let from_types ~options ~genv ts = - let imported_names, state = run_imports ~options ~genv State.empty in - let _, result = ListUtils.fold_map (fun state (a, t) -> - match run_type ~options ~genv ~imported_names ~tparams:[] state t with - | Ok t, state -> state, (a, Ok t) - | Error s, state -> state, (a, Error s) - ) state ts in + let imported_names = run_imports ~options ~genv in + let (_, result) = + ListUtils.fold_map + (fun state (a, t) -> + match run_type ~options ~genv ~imported_names ~tparams:[] state t with + | (Ok t, state) -> (state, (a, Ok t)) + | (Error s, state) -> (state, (a, Error s))) + State.empty + ts + in result let from_scheme ~options ~genv scheme = - let imported_names, state = run_imports ~options ~genv State.empty in + let imported_names = run_imports ~options ~genv in let { Type.TypeScheme.tparams; type_ = t } = scheme in - let result, _ = run_type ~options ~genv ~imported_names ~tparams state t in + let (result, _) = run_type ~options ~genv ~imported_names ~tparams State.empty t in result let from_type ~options ~genv t = - let imported_names, state = run_imports ~options ~genv State.empty in - let result, _ = run_type ~options ~genv ~imported_names ~tparams:[] state t in + let imported_names = run_imports ~options ~genv in + let (result, _) = run_type ~options ~genv ~imported_names ~tparams:[] State.empty t in result let fold_hashtbl ~options ~genv ~f ~g ~htbl init = - let imported_names, state = run_imports ~options ~genv State.empty in - let result, _ = Hashtbl.fold (fun loc x (acc, state) -> - let { Type.TypeScheme.tparams; type_ = t } = g x in - let result, state = run_type ~options ~genv ~imported_names ~tparams state t in - f acc (loc, result), state - ) htbl (init, state) in + let imported_names = run_imports ~options ~genv in + let (result, _) = + Hashtbl.fold + (fun loc x (acc, state) -> + let { Type.TypeScheme.tparams; type_ = t } = g x in + let (result, state) = run_type ~options ~genv ~imported_names ~tparams state t in + (f acc (loc, result), state)) + htbl + (init, State.empty) + in result diff --git a/src/typing/ty_normalizer.mli b/src/typing/ty_normalizer.mli index f8049e02998..22c9bc1d323 100644 --- a/src/typing/ty_normalizer.mli +++ b/src/typing/ty_normalizer.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,9 +12,11 @@ type error_kind = | BadBoundT | BadCallProp | BadClassT + | BadThisClassT | BadPoly | BadTypeAlias | BadTypeApp + | BadInlineInterfaceExtends | BadInternalT | BadInstanceT | BadEvalT @@ -22,37 +24,30 @@ type error_kind = | ShadowTypeParam | UnsupportedTypeCtor | UnsupportedUseCtor + | TypeTooBig type error = error_kind * string -val error_to_string: error -> string +val error_to_string : error -> string -val from_type: - options:options -> genv:genv -> - Type.t -> - (Ty.t, error) result +val from_type : options:options -> genv:genv -> Type.t -> (Ty.t, error) result -val from_scheme: - options:options -> genv:genv -> - Type.TypeScheme.t -> - (Ty.t, error) result +val from_scheme : options:options -> genv:genv -> Type.TypeScheme.t -> (Ty.t, error) result (* The following differ from mapping `from_type` on each input as it folds over the input elements of the input propagating the state (caches) after each transformation to the next element. *) -val from_types: - options:options -> genv:genv -> - ('a * Type.t) list -> - ('a * (Ty.t, error) result) list - -val from_schemes: - options:options -> genv:genv -> - ('a * Type.TypeScheme.t) list -> - ('a * (Ty.t, error) result) list - -val fold_hashtbl: - options:options -> genv:genv -> - f:('a -> (Loc.t * (Ty.t, error) result) -> 'a) -> +val from_types : + options:options -> genv:genv -> ('a * Type.t) list -> ('a * (Ty.t, error) result) list + +val from_schemes : + options:options -> genv:genv -> ('a * Type.TypeScheme.t) list -> ('a * (Ty.t, error) result) list + +val fold_hashtbl : + options:options -> + genv:genv -> + f:('a -> 'loc * (Ty.t, error) result -> 'a) -> g:('b -> Type.TypeScheme.t) -> - htbl: (Loc.t, 'b) Hashtbl.t -> - 'a -> 'a + htbl:('loc, 'b) Hashtbl.t -> + 'a -> + 'a diff --git a/src/typing/ty_normalizer_env.ml b/src/typing/ty_normalizer_env.ml index 3fab2ef13b0..b87a7b36219 100644 --- a/src/typing/ty_normalizer_env.ml +++ b/src/typing/ty_normalizer_env.ml @@ -1,103 +1,102 @@ (** - * Copyright (c) 2018-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +module File_sig = File_sig.With_ALoc type options = { - (* MergedT is somewhat unconventional. It introduces UseT's that the - normalizer is not intended to handle. If this flag is set to true, all - instances of MergedT will fall through and return Top. Otherwise, we - attempt to convert the use_t's under the MergedT. This operation only - succeeds if the use is a UseT and the underlying type is successfully - normalized. - - Pick `true` if the result does not need to be "parseable", e.g. coverage. - *) + * normalizer is not intended to handle. If this flag is set to true, all + * instances of MergedT will fall through and return Top. Otherwise, we + * attempt to convert the use_t's under the MergedT. This operation only + * succeeds if the use is a UseT and the underlying type is successfully + * normalized. + * + * Pick `true` if the result does not need to be "parseable", e.g. coverage. + *) fall_through_merged: bool; - (* Expand the signatures of built-in functions, such as: - Function.prototype.apply: (thisArg: any, argArray?: any): any - *) + * Function.prototype.apply: (thisArg: any, argArray?: any): any + *) expand_internal_types: bool; - - (* AnnotT is used to hide information of the lower bound flowing in and - provides instead a type interface that then flows to the upper bounds. - For the normalizer this is good point to cut down on the recursion to - AnnotT's lower bounds and instead return a type constructed from the name - associated with the annotation. Typically this coincides with types used as - annotations, so this is a natural type type to return for type queries. - *) + (* If set to `true` type aliase names will be expanded to the types they represent. + * + * WARNING: This can cause a blow-up in the size of the produced types. + *) expand_type_aliases: bool; - (* The normalizer keeps a stack of type parameters that are in scope. This stack - may contain the same name twice (but with different associated locations). - This is a case of shadowing. For certain uses of normalized types (e.g. suggest) - we do not wish to allow the generation of type parameters that are shadowed by - another definition. For example the inferred type for `z` in: - - function outer(y: T) { - function inner(x: T, z) { inner(x, y); } - } - - is the _outer_ T. Adding the annotation ": T" for `z` would not be correct. - This flags toggles this behavior. - *) + * may contain the same name twice (but with different associated locations). + * This is a case of shadowing. For certain uses of normalized types (e.g. suggest) + * we do not wish to allow the generation of type parameters that are shadowed by + * another definition. For example the inferred type for `z` in:* + + * function outer(y: T) { + * function inner(x: T, z) { inner(x, y); } + * } + * + * is the _outer_ T. Adding the annotation ": T" for `z` would not be correct. + * This flags toggles this behavior. + *) flag_shadowed_type_params: bool; -} - -let default_opts = { - fall_through_merged = false; - expand_internal_types = false; - expand_type_aliases = false; - flag_shadowed_type_params = false; -} - -let mk_opts - ~fall_through_merged - ~expand_internal_types - ~expand_type_aliases - ~flag_shadowed_type_params = -{ fall_through_merged; - expand_internal_types; - expand_type_aliases; - flag_shadowed_type_params; + (* Makes the normalizer more aggressive in preserving inferred literal types *) + preserve_inferred_literal_types: bool; + (* If this flag is set to `true` then the normalizer will attempt to reuse the + cached results of evaluated type-destructors. If this is set to `false`, then + instread it will try to use: + - a potentially attendant type-alias annotation, or + - reuse the utility type that corresponds to this the specific type-destructor. + + Choosing 'false' will typically result in smaller produced types, which makes + it a more appropriate option for codemods. + *) + evaluate_type_destructors: bool; + (* Run an optimization pass that removes duplicates from unions and intersections. + * + * WARNING May be slow for large types + *) + optimize_types: bool; + (* Omits type params if they match the defaults, e.g: + * + * Given `type Foo`, `Foo` is reduced to `Foo` + * + * WARNING: May be slow due to the structural equality checks that this necessitates. + *) + omit_targ_defaults: bool; + (* Consider all kinds of Bot and Any the same when simplifying types. + * + * The normalized type Ty.Bot may correspond to either the `Empty` type, not + * lower-bounds or the internal types MatchingPropT or TypeDestructorTriggerT. + * These types are not easy to normalize, but may still encode some constraint. + * When using normalized types for codemods we might want to know if there might + * be some constraints that we missing in the normalized type. + * + * Any can be due to an annotation or implicitly arising from inference. + *) + merge_bot_and_any_kinds: bool; } (* This is a global environment that should not change during normalization *) type genv = { - (* File the query originated from *) file: File_key.t; - (* Full (merged) context *) cx: Context.t; - - (* Type table of the current file *) - type_table: Type_table.t; - + (* Typed AST of the current file *) + typed_ast: (ALoc.t, ALoc.t * Type.t) Flow_ast.program; (* The file_sig of the current file *) file_sig: File_sig.t; } -let mk_genv ~full_cx ~file ~type_table ~file_sig = { - file; - cx = full_cx; - type_table; - file_sig; -} +let mk_genv ~full_cx ~file ~typed_ast ~file_sig = { file; cx = full_cx; typed_ast; file_sig } type t = { - (* Does not change. Set once in the beginning. *) genv: genv; - (* Normalization parameters *) options: options; - (* Type parameters in scope The parameter environment is useful in handling inferred type parameters. @@ -122,16 +121,13 @@ type t = { the actual bounds and return those instead. So the normalized type here would be: Empty | Mixed, which simplifies to Mixed. *) tparams: Type.typeparam list; - (* In determining whether a symbol is Local, Imported, Remote, etc, it is - useful to keep the list of imported names and the corresponding + useful to keep a map of imported names and the corresponding location available. We can then make this decision by comparing the source file with the current context's file information. *) - imported_names: Loc.t SMap.t; - + imported_names: Ty.imported_ident Loc_collections.ALocMap.t; (* For debugging purposes mostly *) depth: int; - (* The default behavior with type aliases is to return the name of the alias instead of the expansion of the type. When normalizing type aliases `TypeT t`, however, we proceed by recovering the name of the alias (say A) and then @@ -152,24 +148,29 @@ type t = { under_type_alias: string option; } -let init ~options ~genv ~tparams ~imported_names = { - options; - genv; - depth = 0; - tparams; - imported_names; - under_type_alias = None; -} +let init ~options ~genv ~tparams ~imported_names = + { options; genv; depth = 0; tparams; imported_names; under_type_alias = None } let descend e = { e with depth = e.depth + 1 } let get_cx e = e.genv.cx let fall_through_merged e = e.options.fall_through_merged + let expand_internal_types e = e.options.expand_internal_types + let expand_type_aliases e = e.options.expand_type_aliases + +let evaluate_type_destructors e = e.options.evaluate_type_destructors + let flag_shadowed_type_params e = e.options.flag_shadowed_type_params + +let preserve_inferred_literal_types e = e.options.preserve_inferred_literal_types + +let omit_targ_defaults e = e.options.omit_targ_defaults + +let merge_bot_and_any_kinds e = e.options.merge_bot_and_any_kinds + let current_file e = e.genv.file -let add_typeparam env typeparam = - { env with tparams = typeparam :: env.tparams } +let add_typeparam env typeparam = { env with tparams = typeparam :: env.tparams } diff --git a/src/typing/type.ml b/src/typing/type.ml index 2d9c7f11a44..0f4e9861b49 100644 --- a/src/typing/type.ml +++ b/src/typing/type.ml @@ -1,10 +1,11 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) +open Polarity open Reason open Utils_js @@ -33,13 +34,14 @@ open Utils_js proof of the typing derivation based on these reasons as axioms. *) type ident = int + type name = string + type index = int type tvar = reason * ident module rec TypeTerm : sig - type t = (* open type variable *) (* A type variable (tvar) is an OpenT(reason, id) where id is an int index @@ -50,13 +52,10 @@ module rec TypeTerm : sig but that context and its tvars may later be merged into other contexts. *) | OpenT of tvar - (*************) (* def types *) (*************) - - | DefT of reason * def_t - + | DefT of reason * Trust.trust_rep * def_t (* type expression whose evaluation is deferred *) (* Usually a type expression is evaluated by splitting it into a def type and a use type, and flowing the former to the latter: the def type is the @@ -69,47 +68,43 @@ module rec TypeTerm : sig of evaluation. The explicit form simplifies other tasks, like substitution, but otherwise works in much the same way as usual. *) | EvalT of t * defer_use_t * int - (* bound type variable *) - | BoundT of reason * string * polarity + | BoundT of reason * string * Polarity.t (* existential type variable *) | ExistsT of reason - (* this-abstracted class *) | ThisClassT of reason * t (* this instantiation *) | ThisTypeAppT of reason * t * t * t list option - + (* type application *) + | TypeAppT of reason * use_op * t * t list (* exact *) | ExactT of reason * t - - | FunProtoT of reason (* Function.prototype *) - | ObjProtoT of reason (* Object.prototype *) - + | FunProtoT of reason (* Function.prototype *) + | ObjProtoT of reason (* Object.prototype *) (* Signifies the end of the prototype chain. Distinct from NullT when it appears as an upper bound of an object type, otherwise the same. *) | NullProtoT of reason - - | FunProtoApplyT of reason (* Function.prototype.apply *) - | FunProtoBindT of reason (* Function.prototype.bind *) - | FunProtoCallT of reason (* Function.prototype.call *) - - (* generalizations of AnyT *) - | AnyWithLowerBoundT of t (* any supertype of t *) - | AnyWithUpperBoundT of t (* any subtype of t *) - + | FunProtoApplyT of reason (* Function.prototype.apply *) + | FunProtoBindT of reason (* Function.prototype.bind *) + | FunProtoCallT of reason (* Function.prototype.call *) (* a merged tvar that had no lowers *) | MergedT of reason * use_t list - (* constrains some properties of an object *) | ShapeT of t | MatchingPropT of reason * string * t - + (* & types *) + | IntersectionT of reason * InterRep.t + (* | types *) + | UnionT of reason * UnionRep.t + (* ? types *) + | MaybeT of reason * t + (* type of an optional parameter *) + | OptionalT of reason * t (* collects the keys of an object *) | KeysT of reason * t - (* annotations *) - (** A type that annotates a storage location performs two functions: + (* A type that annotates a storage location performs two functions: * it constrains the types of values stored into the location @@ -150,7 +145,6 @@ module rec TypeTerm : sig wrapped tvar are T1 and T2, then the current rules would flow T1 | T2 to upper bounds, and would flow lower bounds to T1 & T2. **) | AnnotT of reason * t * bool (* use_desc *) - (* Opaque type aliases. The opaquetype.opaque_id is its unique id, opaquetype.underlying_t is * the underlying type, which we only allow access to when inside the file the opaque type * was defined, and opaquetype.super_t is the super type, which we use when an OpaqueT is @@ -160,24 +154,19 @@ module rec TypeTerm : sig * type is defined in a libdef. We also keep track of the name of the opaque type in * opaquetype.name for pretty printing. *) | OpaqueT of reason * opaquetype - (* Stores exports (and potentially other metadata) for a module *) | ModuleT of reason * exporttypes * bool (* is_strict *) - - (** Here's to the crazy ones. The misfits. The rebels. The troublemakers. + (* Here's to the crazy ones. The misfits. The rebels. The troublemakers. The round pegs in the square holes. **) (* types that should never appear in signatures *) | InternalT of internal_t - (* upper bound trigger for type destructors *) | TypeDestructorTriggerT of use_op * reason * (reason * bool) option * destructor * t - (* Sigil representing functions that the type system is not expressive enough to annotate, so we customize their behavior internally. *) | CustomFunT of reason * custom_fun_kind - - (** Predicate types **) + (* Predicate types **) (* `OpenPredT (reason, base_t, m_pos, m_neg)` wraps around a base type `base_t` and encodes additional information that hold in conditional @@ -187,14 +176,14 @@ module rec TypeTerm : sig instances, which are the keys to the two maps. *) | OpenPredT of reason * t * predicate Key_map.t * predicate Key_map.t - | ReposT of reason * t + | AnyT of reason * any_source and def_t = | NumT of number_literal literal | StrT of string literal | BoolT of bool option - | EmptyT + | EmptyT of empty_flavor | MixedT of mixed_flavor | NullT | VoidT @@ -217,12 +206,6 @@ module rec TypeTerm : sig | CharSetT of String_utils.CharSet.t (* type aliases *) | TypeT of type_t_kind * t - - | AnyT - - (* type of an optional parameter *) - | OptionalT of t - (* A polymorphic type is like a type-level "function" that, when applied to lists of type arguments, generates types. Just like a function, a polymorphic type has a list of type parameters, represented as bound @@ -237,35 +220,18 @@ module rec TypeTerm : sig it is forced only when polymorphic types are applied. *) (* polymorphic type *) - | PolyT of typeparam list * t * int - (* type application *) - | TypeAppT of use_op * t * t list - - (* ? types *) - | MaybeT of t - - (* & types *) - | IntersectionT of InterRep.t - - (* | types *) - | UnionT of UnionRep.t - - (* specializations of AnyT *) - | AnyObjT (* any object *) - | AnyFunT (* any function *) - + | PolyT of ALoc.t * typeparam Nel.t * t * int (* Type that wraps object types for the CustomFunT(Idx) function *) | IdxWrapper of t + (* React$AbstractComponent *) + | ReactAbstractComponentT of { + config: t; + instance: t; + } and defer_use_t = - (* type of a variable / parameter / property extracted from a pattern *) - | DestructuringT of reason * selector + | LatentPredT of reason * predicate (* destructors that extract parts of various kinds of types *) - (* TODO: in principle it should be possible to encode destructors as - selectors (see above), but currently we don't because some selectors are - programmed to do more than just destruct types---e.g., they handle - defaults---and these additional behaviors cannot be covered by a simple - implementation of destructors. *) | TypeDestructorT of use_op * reason * destructor and internal_t = @@ -282,73 +248,165 @@ module rec TypeTerm : sig | Refinement | WidenEnv - and root_use_op = - | Addition of { op: reason; left: reason; right: reason } - | AssignVar of { var: reason option; init: reason } - | Cast of { lower: reason; upper: reason } - | ClassExtendsCheck of { def: reason; name: reason; extends: reason } - | ClassImplementsCheck of { def: reason; name: reason; implements: reason } - | ClassOwnProtoCheck of { prop: string; own_loc: Loc.t option; proto_loc: Loc.t option } - | Coercion of { from: reason; target: reason } + and 'loc virtual_root_use_op = + | ObjectSpread of { op: 'loc virtual_reason } + | ObjectChain of { op: 'loc virtual_reason } + | Addition of { + op: 'loc virtual_reason; + left: 'loc virtual_reason; + right: 'loc virtual_reason; + } + | AssignVar of { + var: 'loc virtual_reason option; + init: 'loc virtual_reason; + } + | Cast of { + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; + } + | ClassExtendsCheck of { + def: 'loc virtual_reason; + name: 'loc virtual_reason; + extends: 'loc virtual_reason; + } + | ClassImplementsCheck of { + def: 'loc virtual_reason; + name: 'loc virtual_reason; + implements: 'loc virtual_reason; + } + | ClassOwnProtoCheck of { + prop: string; + own_loc: 'loc option; + proto_loc: 'loc option; + } + | Coercion of { + from: 'loc virtual_reason; + target: 'loc virtual_reason; + } + | DeleteProperty of { + lhs: 'loc virtual_reason; + prop: 'loc virtual_reason; + } + | DeleteVar of { var: 'loc virtual_reason } | FunCall of { - op: reason; - fn: reason; - args: reason list; + op: 'loc virtual_reason; + fn: 'loc virtual_reason; + args: 'loc virtual_reason list; + local: bool; (* Whether we can blame back to the function def *) } | FunCallMethod of { - op: reason; - fn: reason; - prop: reason; - args: reason list; + op: 'loc virtual_reason; + fn: 'loc virtual_reason; + prop: 'loc virtual_reason; + args: 'loc virtual_reason list; + local: bool; (* Whether we can blame back to the function def *) + } + | FunReturnStatement of { value: 'loc virtual_reason } + | FunImplicitReturn of { + fn: 'loc virtual_reason; + upper: 'loc virtual_reason; + } + | GeneratorYield of { value: 'loc virtual_reason } + | GetProperty of 'loc virtual_reason + | InitField of { + op: 'loc virtual_reason; + body: 'loc virtual_reason; } - | FunReturnStatement of { value: reason } - | FunImplicitReturn of { fn: reason; upper: reason } - | GeneratorYield of { value: reason } - | GetProperty of reason | Internal of internal_use_op - | JSXCreateElement of { op: reason; component: reason } - | ReactCreateElementCall of { op: reason; component: reason; children: Loc.t } - | ReactGetIntrinsic of { literal: reason } - | Speculation of use_op - | TypeApplication of { type': reason } - | SetProperty of { lhs: reason; prop: reason; value: reason } + | JSXCreateElement of { + op: 'loc virtual_reason; + component: 'loc virtual_reason; + } + | ReactCreateElementCall of { + op: 'loc virtual_reason; + component: 'loc virtual_reason; + children: 'loc; + } + | ReactGetIntrinsic of { literal: 'loc virtual_reason } + | Speculation of 'loc virtual_use_op + | TypeApplication of { type': 'loc virtual_reason } + | SetProperty of { + lhs: 'loc virtual_reason; + prop: 'loc virtual_reason; + value: 'loc virtual_reason; + } | UnknownUse - and frame_use_op = - | ArrayElementCompatibility of { lower: reason; upper: reason } - | FunCompatibility of { lower: reason; upper: reason } - | FunMissingArg of { n: int; op: reason; def: reason } - | FunParam of { n: int; name: string option; lower: reason; upper: reason } - | FunRestParam of { lower: reason; upper: reason } - | FunReturn of { lower: reason; upper: reason } - | ImplicitTypeParam of Loc.t - | IndexerKeyCompatibility of { lower: reason; upper: reason } + and 'loc virtual_frame_use_op = + | ArrayElementCompatibility of { + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; + } + | FunCompatibility of { + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; + } + | FunMissingArg of { + n: int; + op: 'loc virtual_reason; + def: 'loc virtual_reason; + } + | FunParam of { + n: int; + name: string option; + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; + } + | FunRestParam of { + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; + } + | FunReturn of { + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; + } + | ImplicitTypeParam + | IndexerKeyCompatibility of { + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; + } + | CallFunCompatibility of { n: int } + | TupleMapFunCompatibility of { value: 'loc virtual_reason } + | ObjMapFunCompatibility of { value: 'loc virtual_reason } + | ObjMapiFunCompatibility of { + key: 'loc virtual_reason; + value: 'loc virtual_reason; + } | PropertyCompatibility of { prop: string option; - lower: reason; - upper: reason; - is_sentinel: bool; + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; } | ReactConfigCheck - | TupleElementCompatibility of { n: int; lower: reason; upper: reason } + | ReactGetConfig of { polarity: Polarity.t } + | TupleElementCompatibility of { + n: int; + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; + } | TypeArgCompatibility of { name: string; - targ: reason; - lower: reason; - upper: reason; - polarity: polarity; + targ: 'loc virtual_reason; + lower: 'loc virtual_reason; + upper: 'loc virtual_reason; + polarity: Polarity.t; } | TypeParamBound of { name: string } | UnifyFlip - and use_op = - | Op of root_use_op - | Frame of frame_use_op * use_op + and 'loc virtual_use_op = + | Op of 'loc virtual_root_use_op + | Frame of 'loc virtual_frame_use_op * 'loc virtual_use_op + + and use_op = ALoc.t virtual_use_op + + and root_use_op = ALoc.t virtual_root_use_op + + and frame_use_op = ALoc.t virtual_frame_use_op and use_t = (* def types can be used as upper bounds *) | UseT of use_op * t - (*************) (* use types *) (*************) @@ -358,14 +416,16 @@ module rec TypeTerm : sig | CallT of use_op * reason * funcalltype (* The last position is an optional type that probes into the type of the method called. This will be primarily used for type-table bookkeeping. *) - | MethodT of use_op * (* call *) reason * (* lookup *) reason * propref * funcalltype * t option + | MethodT of + use_op * (* call *) reason * (* lookup *) reason * propref * funcalltype * t option (* Similar to the last element of the MethodT *) - | SetPropT of use_op * reason * propref * write_ctx * t * t option + | SetPropT of use_op * reason * propref * set_mode * write_ctx * t * t option (* The boolean flag indicates whether or not it is a static lookup. We cannot know this when * we generate the constraint, since the lower bound may be an unresolved OpenT. If it * resolves to a ClassT, we flip the flag to true, which causes us to check the private static * fields when the InstanceT ~> SetPrivatePropT constraint is processsed *) - | SetPrivatePropT of use_op * reason * string * class_binding list * bool * t * t option + | SetPrivatePropT of + use_op * reason * string * set_mode * class_binding list * bool * t * t option | GetPropT of use_op * reason * propref * t (* For shapes *) | MatchPropT of use_op * reason * propref * t @@ -377,25 +437,21 @@ module rec TypeTerm : sig In particular, a computed property in the object initializer users SetElemT to initialize the property value, but in order to avoid race conditions we need to ensure that reads happen after writes. *) - | SetElemT of use_op * reason * t * t * t option (*tout *) + | SetElemT of use_op * reason * t * set_mode * t * t option (*tout *) | GetElemT of use_op * reason * t * t | CallElemT of (* call *) reason * (* lookup *) reason * t * funcalltype | GetStaticsT of reason * t_out - | GetProtoT of reason * t_out | SetProtoT of reason * t - (* repositioning *) | ReposLowerT of reason * bool (* use_desc *) * use_t | ReposUseT of reason * bool (* use_desc *) * use_op * t - (* operations on runtime types, such as classes and functions *) - | ConstructorT of use_op * reason * t list option * call_arg list * t + | ConstructorT of use_op * reason * targ list option * call_arg list * t | SuperT of use_op * reason * derived_type | ImplementsT of use_op * t | MixinT of reason * t | ToStringT of reason * use_t - (* overloaded +, could be subsumed by general overloading *) | AdderT of use_op * reason * bool * t * t (* overloaded relational operator, could be subsumed by general @@ -403,33 +459,26 @@ module rec TypeTerm : sig | ComparatorT of reason * bool * t (* unary minus operator on numbers, allows negative number literals *) | UnaryMinusT of reason * t - | AssertArithmeticOperandT of reason | AssertBinaryInLHST of reason | AssertBinaryInRHST of reason | AssertForInRHST of reason - | AssertRestParamT of reason - (* operation specifying a type refinement via a predicate *) | PredicateT of predicate * t - (* like PredicateT, GuardT guards a subsequent flow with a predicate on an incoming type. Unlike PredicateT, the subsequent flow (if any) uses an arbitrary LB specified in the GuardT value, rather than the filtered result of the predicate itself *) | GuardT of predicate * t * t - (* == *) | EqT of reason * bool * t - (* logical operators *) | AndT of reason * t * t | OrT of reason * t * t | NullishCoalesceT of reason * t * t | NotT of reason * t - (* operation on polymorphic types *) - (** SpecializeT(_, _, _, cache, targs, tresult) instantiates a polymorphic type + (* SpecializeT(_, _, _, cache, targs, tresult) instantiates a polymorphic type with type arguments targs, and flows the result into tresult. If cache is set, it looks up a cache of existing instantiations for the type parameters of the polymorphic type, unifying the type arguments with @@ -442,30 +491,27 @@ module rec TypeTerm : sig (* operation on this-abstracted classes *) | ThisSpecializeT of reason * t * cont (* variance check on polymorphic types *) - | VarianceCheckT of reason * t list * polarity - + | VarianceCheckT of reason * t list * Polarity.t | TypeAppVarianceCheckT of use_op * reason * reason * (t * t) list - (* In TypeAppT (c, ts) ~> TypeAppT (c, ts) we need to check both cs against * each other which means that we must concretize them first. *) | ConcretizeTypeAppsT of (* The use_op from our original TypeAppT ~> TypeAppT *) - use_op * - (* The type args and reason for the TypeAppT that is currently the - * lower bound *) - (t list * use_op * reason) * - (* The polymorphic type, its type args, and reason for the TypeAppT that - * is currently the upper bound. *) - (t * t list * use_op * reason) * - (* A boolean which answers the question: Is the TypeAppT that is - * currently our lower bound in fact our upper bound in the original - * TypeAppT ~> TypeAppT? If the answer is yes then we need to flip our - * tuples and flow the polymorphic type currently in our upper bound as - * the lower bound. See the implementation of flow_js for more clarity. *) - bool - + use_op + * (* The type args and reason for the TypeAppT that is currently the + * lower bound *) + (t list * use_op * reason) + * (* The polymorphic type, its type args, and reason for the TypeAppT that + * is currently the upper bound. *) + (t * t list * use_op * reason) + * (* A boolean which answers the question: Is the TypeAppT that is + * currently our lower bound in fact our upper bound in the original + * TypeAppT ~> TypeAppT? If the answer is yes then we need to flip our + * tuples and flow the polymorphic type currently in our upper bound as + * the lower bound. See the implementation of flow_js for more clarity. *) + bool (* operation on prototypes *) - (** LookupT(_, strict, try_ts_on_failure, x, lookup_action) looks for + (* LookupT(_, strict, try_ts_on_failure, x, lookup_action) looks for property x in an object type and emits a constraint according to the provided lookup_action. @@ -479,13 +525,12 @@ module rec TypeTerm : sig (3) strict = Some reason, so the position in reason is blamed. **) | LookupT of reason * lookup_kind * t list * propref * lookup_action - (* operations on objects *) (* Resolves the object into which the properties are assigned *) - | ObjAssignToT of reason * t * t * obj_assign_kind + | ObjAssignToT of use_op * reason * t * t * obj_assign_kind (* Resolves the object from which the properties are assigned *) - | ObjAssignFromT of reason * t * t * obj_assign_kind + | ObjAssignFromT of use_op * reason * t * t * obj_assign_kind | ObjFreezeT of reason * t | ObjRestT of reason * string list * t | ObjSealT of reason * t @@ -493,30 +538,25 @@ module rec TypeTerm : sig | ObjTestProtoT of reason * t_out (* test that something is object-like, returning a default type otherwise *) | ObjTestT of reason * t * t - + (* Assign properties to module.exports. The only interesting case is when module.exports is a + function type, where we set the statics field of the function type. *) + | ModuleExportsAssignT of reason * t * t (* assignment rest element in array pattern *) | ArrRestT of use_op * reason * int * t - (* Guarded unification *) | UnifyT of t * t (* bidirectional *) - (* unifies with incoming concrete lower bound *) | BecomeT of reason * t - (* Keys *) | GetKeysT of reason * use_t | HasOwnPropT of use_op * reason * string literal - (* Values *) | GetValuesT of reason * t - (* Element access *) | ElemT of use_op * reason * t * elem_action - (* exact ops *) | MakeExactT of reason * cont - - (** + (* * Module import handling * * Why do the following have a is_strict flag, when that's already present in the context @@ -530,18 +570,21 @@ module rec TypeTerm : sig | ImportTypeT of reason * string * t | ImportTypeofT of reason * string * t | AssertImportIsValueT of reason * string - (* Module export handling *) | CJSExtractNamedExportsT of reason - * (* local ModuleT *) (reason * exporttypes * bool (* is_strict *)) - * (* 't_out' to receive the resolved ModuleT *) t_out + * (* local ModuleT *) + (reason * exporttypes * bool) + * (* is_strict *) + (* 't_out' to receive the resolved ModuleT *) t_out | CopyNamedExportsT of reason * t * t_out | CopyTypeExportsT of reason * t * t_out | ExportNamedT of reason * bool (* skip_duplicates *) - * (Loc.t option * t) SMap.t (* exports_tmap *) + * (ALoc.t option * t) SMap.t + (* exports_tmap *) + * export_kind * t_out | ExportTypeT of reason @@ -549,39 +592,29 @@ module rec TypeTerm : sig * string (* export_name *) * t (* target_module_t *) * t_out - + | AssertExportIsTypeT of reason * string (* export name *) * t_out (* Map a FunT over a structure *) - | MapTypeT of reason * type_map * t_out - + | MapTypeT of use_op * reason * type_map * t_out | ObjKitT of use_op * reason * Object.resolve_tool * Object.tool * t_out - | ReactKitT of use_op * reason * React.tool - | ChoiceKitUseT of reason * choice_use_tool - (* tools for preprocessing intersections *) | IntersectionPreprocessKitT of reason * intersection_preprocess_tool - | DebugPrintT of reason | DebugSleepT of reason - - | SentinelPropTestT of reason * t * string * bool * Enum.star * t_out - + | SentinelPropTestT of reason * t * string * bool * UnionEnum.star * t_out | IdxUnwrap of reason * t_out | IdxUnMaybeifyT of reason * t_out - | OptionalChainT of reason * reason * (opt_use_t * t_out) Nel.t - | InvariantT of reason - (* Function predicate uses *) - (** + (* * The following two uses are used when a predicate function is called to * establish a predicate over one of its arguments. *) - (** + (* * The intended use for CallLatentPredT is to flow a predicated function * type to it. This function will refine the unrefined argument of * CallLatentPredT and use the second of the two type arguments as the @@ -597,8 +630,7 @@ module rec TypeTerm : sig * The boolean part is the sense of the conditional check. *) | CallLatentPredT of reason * bool * index * t * t - - (** + (* * CallOpenPredT is fired subsequently, after processing the flow * described above. This flow is necessary since the return type of the * predicate function (which determines the predicate it expresses) @@ -611,8 +643,7 @@ module rec TypeTerm : sig * parameter. *) | CallOpenPredT of reason * bool * Key.t * t * t - - (** + (* * Even for the limited use of function predicates that is currently * allowed, we still have to build machinery to handle subtyping for * predicated function types. @@ -651,14 +682,12 @@ module rec TypeTerm : sig * useful later on. *) | SubstOnPredT of reason * substitution * t - - (** + (* * `RefineT (reason, pred, tvar)` is an instruction to refine an incoming * flow using the predicate `pred`. The result will be stored in `tvar`, * which is expected to be a type variable. *) | RefineT of reason * predicate * t - (* Spread elements show up in a bunch of places: array literals, function * parameters, function call arguments, method arguments. constructor * arguments, etc. Often we have logic that depends on what the spread @@ -666,28 +695,48 @@ module rec TypeTerm : sig * of spread and non-spread elements to resolve, and then constructs * whatever type it resolves to *) | ResolveSpreadT of use_op * reason * resolve_spread_type - (* CondT (_, then_t_opt, else_t, tout) is a branch, which flows `else_t` * into `tout` when the resolved lower bound is `empty`. If the resolved * lower bound is non-empty, it will flow either `Some then_t` or the lower * bound itself into `tout`. *) | CondT of reason * t option * t * t_out - (* util for deciding subclassing relations *) | ExtendsUseT of use_op * reason * t list * t * t + (* Models the GetProps React functionality *) + | ReactPropsToOut of reason * t + | ReactInToProps of reason * t + (* Used to calculate a destructured binding. If annot is true, the lower + * bound is an annotation (0->1), and t_out will be unified with the + * destructured type. The caller should wrap the tvar with an AnnotT. *) + | DestructuringT of reason * destruct_kind * selector * t_out + + (* Bindings created from destructuring annotations should themselves act like + * annotations. That is, `var {p}: {p: string}; p = 0` should be an error, + * because `p` should behave like a `string` annotation. + * + * We accomplish this by wrapping the binding itself in an AnnotT type. The + * wrapped type must be 0->1, which is enforced with BecomeT. + * + * Since DestructuringT uses with the DestructAnnot kind should only encounter + * annotations, the set of lower bounds will be a subset of all possible + * types. The only important cases to handle are disjunctive types that would + * violate the 0->1 property, like UnionT and MaybeT. *) + and destruct_kind = + | DestructAnnot + | DestructInfer (* use_ts which can be part of an optional chain, with t_out factored out *) and opt_use_t = - | OptCallT of use_op * reason * opt_funcalltype - | OptGetPropT of use_op * reason * propref - | OptGetPrivatePropT of use_op * reason * string * class_binding list * bool - | OptTestPropT of reason * ident * propref - | OptGetElemT of use_op * reason * t + | OptCallT of use_op * reason * opt_funcalltype + | OptGetPropT of use_op * reason * propref + | OptGetPrivatePropT of use_op * reason * string * class_binding list * bool + | OptTestPropT of reason * ident * propref + | OptGetElemT of use_op * reason * t and opt_state = - | NonOptional - | NewChain - | ContinueChain + | NonOptional + | NewChain + | ContinueChain and specialize_cache = reason list option @@ -695,32 +744,26 @@ module rec TypeTerm : sig | AndP of predicate * predicate | OrP of predicate * predicate | NotP of predicate - (* mechanism to handle binary tests where both sides need to be evaluated *) | LeftP of binary_test * t | RightP of binary_test * t - (* Only track locations of existence checks created when walking the AST *) - | ExistsP (* truthy *) of Loc.t option (* Location of the existence check *) + | ExistsP (* truthy *) of ALoc.t option (* Location of the existence check *) | NullP (* null *) | MaybeP (* null or undefined *) - - | SingletonBoolP of bool (* true or false *) - | SingletonStrP of Loc.t * bool * string (* string literal *) - | SingletonNumP of Loc.t * bool * number_literal - + | SingletonBoolP of ALoc.t * bool (* true or false *) + | SingletonStrP of ALoc.t * bool * string (* string literal *) + | SingletonNumP of ALoc.t * bool * number_literal | BoolP (* boolean *) | FunP (* function *) | NumP (* number *) | ObjP (* object *) | StrP (* string *) + | SymbolP (* symbol *) | VoidP (* undefined *) - | ArrP (* Array.isArray *) - - (* `if (a.b)` yields `flow (a, PredicateT(PropExistsP (reason, "b", loc), tout))` *) - | PropExistsP of reason * string * Loc.t option (* Location of the property in the existence check *) - + (* `if (a.b)` yields `flow (a, PredicateT(PropExistsP ("b", loc), tout))` *) + | PropExistsP of string * ALoc.t option (* Location of the property in the existence check *) (* Encondes the latent predicate associated with the i-th parameter of a function, whose type is the second element of the triplet. *) | LatentP of t * index @@ -738,7 +781,7 @@ module rec TypeTerm : sig | Truthy | AnyLiteral - and number_literal = (float * string) + and number_literal = float * string and mixed_flavor = | Mixed_everything @@ -746,13 +789,47 @@ module rec TypeTerm : sig | Mixed_non_maybe | Mixed_non_null | Mixed_non_void - | Empty_intersection + | Mixed_function + | Mixed_symbol + + and empty_flavor = + | Bottom + | Zeroed + + and any_source = + | Annotated + | AnyError + | Unsound of unsoundness_kind + | Untyped + + (* Tracks the kinds of unsoundness inherent in Flow. If you can't find a kind that matches + your use case, make one *) + and unsoundness_kind = + | BoundFunctionThis + | ComputedNonLiteralKey + | Constructor + | DummyStatic + | Existential + | Exports + | FunctionPrototype + | InferenceHooks + | InstanceOfRefinement + | Merged + | ResolveSpread + | Unchecked + | Unimplemented + | UnresolvedType + | WeakContext + + and fun_param = string option * t + + and fun_rest_param = string option * ALoc.t * t (* used by FunT *) and funtype = { this_t: t; - params: (string option * t) list; - rest_param: (string option * Loc.t * t) option; + params: fun_param list; + rest_param: fun_rest_param option; return_t: t; closure_t: int; is_predicate: bool; @@ -763,31 +840,36 @@ module rec TypeTerm : sig (* Used by CallT and similar constructors *) and funcalltype = { call_this_t: t; - call_targs: t list option; + call_targs: targ list option; call_args_tlist: call_arg list; call_tout: t; call_closure_t: int; call_strict_arity: bool; } - and opt_funcalltype = t * t list option * call_arg list * int * bool + and targ = + (* This tvar gets lower bounds from the instantiations of _. It is used to power type-services + * like type-at-pos and should not be used for type checking + *) + | ImplicitArg of tvar + | ExplicitArg of t + + and opt_funcalltype = t * targ list option * call_arg list * int * bool and call_arg = - | Arg of t - | SpreadArg of t + | Arg of t + | SpreadArg of t and arrtype = - | ArrayAT of t * t list option - (* TupleAT of elemt * tuple_types. Why do tuples carry around elemt? Well, so - * that they don't need to recompute their general type when you do - * myTuple[expr] - *) - | TupleAT of t * t list - (* ROArrayAT(elemt) is the super type for all tuples and arrays for which - * elemt is a supertype of every element type *) - | ROArrayAT of t - (* EmptyAT is the bottom type for all arrays and tuples *) - | EmptyAT + | ArrayAT of t * t list option + (* TupleAT of elemt * tuple_types. Why do tuples carry around elemt? Well, so + * that they don't need to recompute their general type when you do + * myTuple[expr] + *) + | TupleAT of t * t list + (* ROArrayAT(elemt) is the super type for all tuples and arrays for which + * elemt is a supertype of every element type *) + | ROArrayAT of t and objtype = { flags: flags; @@ -800,10 +882,10 @@ module rec TypeTerm : sig (* Object.assign(target, source1, ...source2) first resolves target then the sources. *) and obj_assign_kind = - (* Obj.assign(target, source) with flag indicating whether source must be exact *) - | ObjAssign of { assert_exact: bool } - (* Obj.assign(target, ...source) *) - | ObjSpreadAssign + (* Obj.assign(target, source) with flag indicating whether source must be exact *) + | ObjAssign of { assert_exact: bool } + (* Obj.assign(target, ...source) *) + | ObjSpreadAssign and cont = | Lower of use_op * t @@ -812,11 +894,12 @@ module rec TypeTerm : sig (* Instance types are represented as an InstanceT while statics are ObjT. For superclass compatibility checking, it suffices to just check the properties instead of creating the full InstanceT/ObjT. *) - and derived_type = Derived of { - own: property SMap.t; - proto: property SMap.t; - static: property SMap.t; - } + and derived_type = + | Derived of { + own: property SMap.t; + proto: property SMap.t; + static: property SMap.t; + } (* LookupT is a general-purpose tool for traversing prototype chains in search of properties. In all cases, if the property is found somewhere along the @@ -852,22 +935,55 @@ module rec TypeTerm : sig along the prototype chain, to ensure that the entire proto chain is subtype compatible. *) and lookup_kind = - | Strict of reason - | NonstrictReturning of (t * t) option * (ident * (reason * reason)) option - | ShadowRead of reason option * Properties.id Nel.t - | ShadowWrite of Properties.id Nel.t + | Strict of reason + | NonstrictReturning of (t * t) option * (ident * (reason * reason)) option + | ShadowRead of reason option * Properties.id Nel.t + | ShadowWrite of Properties.id Nel.t and lookup_action = - | RWProp of use_op * t (* original target *) * t (* in/out type *) * rw - | LookupProp of use_op * Property.t - | SuperProp of use_op * Property.t - | MatchProp of use_op * t - - and rw = - | Read - | Write of write_ctx * t option (* original type of field *) - - and write_ctx = ThisInCtor | Normal + | ReadProp of { + use_op: use_op; + obj_t: t; + tout: t; + } + | WriteProp of { + use_op: use_op; + obj_t: t; + prop_tout: t option; + tin: t; + write_ctx: write_ctx; + mode: set_mode; + } + | LookupProp of use_op * Property.t + | SuperProp of use_op * Property.t + | MatchProp of use_op * t + + and write_ctx = + | ThisInCtor + | Normal + + (* Property writes can either be assignments (from `a.x = e`) or deletions + (from `delete a.x`). For the most part, we can treat these the same + (flowing void into `a.x` in a deletion), but if the property being deleted + originates in an indexer, we need to know not to flow `void` into the + indexer's type, which would cause an error. The `set_mode` type records + whether a property write is an assignment or a deletion, to help handle + this special case. *) + and set_mode = + | Delete + | Assign + + (* See the above comment on `set_mode`--this type is the other half, which + records whether a property originates in an indexer or from a property + map. This is relevant when the property is being deleted--we should flow + `void` to the property's type if it originates in a property map (to ensure + that its type is nullable and raise an error if it's not) but not if it's + from an indexer, where our current semantics are intentionally unsound with + respect to undefined anyways. *) + and property_source = + | DynamicProperty + | PropertyMapProperty + | IndexerProperty (* WriteElem has a `tout` parameter to serve as a trigger for ordering operations. We only need this in one place: object literal initialization. @@ -876,7 +992,7 @@ module rec TypeTerm : sig need to ensure that reads happen after writes. *) and elem_action = | ReadElem of t - | WriteElem of t * t option (* tout *) + | WriteElem of t * t option (* tout *) * set_mode | CallElem of reason (* call *) * funcalltype and propref = @@ -897,48 +1013,50 @@ module rec TypeTerm : sig dict_name: string option; key: t; value: t; - dict_polarity: polarity; + dict_polarity: Polarity.t; } - and polarity = Negative | Neutral | Positive - (* Locations refer to the location of the identifier, if one exists *) and property = - | Field of Loc.t option * t * polarity - | Get of Loc.t option * t - | Set of Loc.t option * t - | GetSet of Loc.t option * t * Loc.t option * t - | Method of Loc.t option * t + | Field of ALoc.t option * t * Polarity.t + | Get of ALoc.t option * t + | Set of ALoc.t option * t + | GetSet of ALoc.t option * t * ALoc.t option * t + | Method of ALoc.t option * t (* This has to go here so that Type doesn't depend on Scope *) and class_binding = { - class_binding_id: ident; + class_binding_id: ALoc.t; class_private_fields: Properties.id; class_private_static_fields: Properties.id; } and insttype = { - class_id: ident; - type_args: (string * reason * t * polarity) list; + class_id: ALoc.t; + type_args: (string * reason * t * Polarity.t) list; own_props: Properties.id; proto_props: Properties.id; inst_call_t: int option; initialized_fields: SSet.t; initialized_static_fields: SSet.t; has_unknown_react_mixins: bool; - structural: bool; + inst_kind: instance_kind; } + and instance_kind = + | ClassKind + | InterfaceKind of { inline: bool } + and opaquetype = { - opaque_id: int; + opaque_id: ALoc.t; underlying_t: t option; super_t: t option; - opaque_type_args: (string * reason * t * polarity) list; + opaque_type_args: (string * reason * t * Polarity.t) list; opaque_name: string; } and exporttypes = { - (** + (* * tmap used to store individual, named ES exports as generated by `export` * statements in a module. Note that this includes `export type` as well. * @@ -947,15 +1065,13 @@ module rec TypeTerm : sig * it has any "type" exports via `export type ...`. *) exports_tmap: Exports.id; - - (** + (* * This stores the CommonJS export type when applicable and is used as the * exact return type for calls to require(). This slot doesn't apply to pure * ES modules. *) cjs_export: t option; - - (** + (* * Sometimes we claim the module exports any or Object, implying that it * has every named export *) @@ -967,42 +1083,51 @@ module rec TypeTerm : sig | ImportTypeof | ImportValue + and export_kind = + | ExportType + | ExportValue + | ReExport + and typeparam = { reason: reason; name: string; bound: t; - polarity: polarity; + polarity: Polarity.t; default: t option; } + and typeparams_nonempty = ALoc.t * typeparam Nel.t + + and typeparams = typeparams_nonempty option + and selector = - | Prop of string - | Elem of t - | ObjRest of string list - | ArrRest of int - | Default - | Become - | Refine of predicate + | Prop of string * bool + | Elem of t + | ObjRest of string list + | ArrRest of int + | Default and destructor = - | NonMaybeType - | PropertyType of string - | ElementType of t - | Bind of t - | ReadOnlyType - | SpreadType of Object.Spread.target * t list - | RestType of Object.Rest.merge_mode * t - | ValuesType - | CallType of t list - | TypeMap of type_map - | ReactElementPropsType - | ReactElementConfigType - | ReactElementRefType + | NonMaybeType + | PropertyType of string + | ElementType of t + | Bind of t + | ReadOnlyType + | SpreadType of + Object.Spread.target * Object.Spread.operand list * Object.Spread.operand_slice option + | RestType of Object.Rest.merge_mode * t + | ValuesType + | CallType of t list + | TypeMap of type_map + | ReactElementPropsType + | ReactElementConfigType + | ReactElementRefType + | ReactConfigType of t and type_map = - | TupleMap of t - | ObjectMap of t - | ObjectMapi of t + | TupleMap of t + | ObjectMap of t + | ObjectMapi of t and prototype = t @@ -1015,47 +1140,42 @@ module rec TypeTerm : sig and t_out = t and custom_fun_kind = - (* builtins *) - | ObjectAssign - | ObjectGetPrototypeOf - | ObjectSetPrototypeOf - - (* common community functions *) - | Compose of bool - - (* 3rd party libs *) - | ReactPropType of React.PropType.t - | ReactCreateClass - | ReactCreateElement - | ReactCloneElement - | ReactElementFactory of t - - (* Facebookisms *) - | Idx - | TypeAssertIs - | TypeAssertThrows - | TypeAssertWraps - - (* Internal tools *) - | DebugPrint - | DebugThrow - | DebugSleep - - and choice_tool = - | Trigger + (* builtins *) + | ObjectAssign + | ObjectGetPrototypeOf + | ObjectSetPrototypeOf + (* common community functions *) + | Compose of bool + (* 3rd party libs *) + | ReactPropType of React.PropType.t + | ReactCreateClass + | ReactCreateElement + | ReactCloneElement + | ReactElementFactory of t + (* Facebookisms *) + | Idx + | TypeAssertIs + | TypeAssertThrows + | TypeAssertWraps + (* Internal tools *) + | DebugPrint + | DebugThrow + | DebugSleep + + and choice_tool = Trigger and choice_use_tool = - | FullyResolveType of ident - | TryFlow of int * spec + | FullyResolveType of ident + | TryFlow of int * spec and intersection_preprocess_tool = - | ConcretizeTypes of t list * t list * t * use_t - | SentinelPropTest of bool * string * t * t * t - | PropExistsTest of bool * string * t * t + | ConcretizeTypes of t list * t list * t * use_t + | SentinelPropTest of bool * string * t * t * t + | PropExistsTest of bool * string * t * t and spec = - | UnionCases of use_op * t * UnionRep.t * t list - | IntersectionCases of t list * use_t + | UnionCases of use_op * t * UnionRep.t * t list + | IntersectionCases of t list * use_t (* A dependent predicate type consisting of: - the result type of the test (not always bool) @@ -1063,8 +1183,7 @@ module rec TypeTerm : sig test is true - a map of refinements which hold if the test is false *) - and dep_preds = - t * predicate Key_map.t * predicate Key_map.t + and dep_preds = t * predicate Key_map.t * predicate Key_map.t and resolve_spread_type = { (* This is the list of elements that are already resolved (that is have no @@ -1078,60 +1197,58 @@ module rec TypeTerm : sig } and unresolved_param = - | UnresolvedArg of t - | UnresolvedSpreadArg of t + | UnresolvedArg of t + | UnresolvedSpreadArg of t and resolved_param = - | ResolvedArg of t - | ResolvedSpreadArg of reason * arrtype - | ResolvedAnySpreadArg of reason + | ResolvedArg of t + | ResolvedSpreadArg of reason * arrtype + | ResolvedAnySpreadArg of reason and spread_resolve = - (* Once we've finished resolving spreads, try to construct a tuple *) - | ResolveSpreadsToTuple of int * t * t (* elem type, array type *) - (* Once we've finished resolving spreads, try to construct an array with known element types *) - | ResolveSpreadsToArrayLiteral of int * t * t (* elem type, array type *) - (* Once we've finished resolving spreads, try to construct a non-tuple array *) - | ResolveSpreadsToArray of t * t (* elem type, array type *) - - (* Once we've finished resolving spreads for a function's arguments, call the - * function with those arguments *) - | ResolveSpreadsToMultiflowCallFull of int * funtype - | ResolveSpreadsToMultiflowSubtypeFull of int * funtype - (* We can also call custom functions. *) - | ResolveSpreadsToCustomFunCall of int * custom_fun_kind * t - - (* Once we've finished resolving spreads for a function's arguments, - * partially apply the arguments to the function and return the resulting - * function (basically what func.bind(that, ...args) does) *) - | ResolveSpreadsToMultiflowPartial of int * funtype * reason * t - - | ResolveSpreadsToCallT of funcalltype * t + (* Once we've finished resolving spreads, try to construct a tuple *) + | ResolveSpreadsToTuple of int * t * t (* elem type, array type *) + (* Once we've finished resolving spreads, try to construct an array with known element types *) + | ResolveSpreadsToArrayLiteral of int * t * t (* elem type, array type *) + (* Once we've finished resolving spreads, try to construct a non-tuple array *) + | ResolveSpreadsToArray of t * t (* elem type, array type *) + (* Once we've finished resolving spreads for a function's arguments, call the + * function with those arguments *) + | ResolveSpreadsToMultiflowCallFull of int * funtype + | ResolveSpreadsToMultiflowSubtypeFull of int * funtype + (* We can also call custom functions. *) + | ResolveSpreadsToCustomFunCall of int * custom_fun_kind * t + (* Once we've finished resolving spreads for a function's arguments, + * partially apply the arguments to the function and return the resulting + * function (basically what func.bind(that, ...args) does) *) + | ResolveSpreadsToMultiflowPartial of int * funtype * reason * t + | ResolveSpreadsToCallT of funcalltype * t (* Add some flavor to the TypeT constructor. For now this information is only * used by the type normalizer. *) and type_t_kind = - | TypeAliasKind (* type A = T *) - | TypeParamKind - | OpaqueKind (* opaque type O [: T] = T' *) - | ImportTypeofKind (* import typeof *) - | ImportClassKind (* import type { SomeClass } from ... *) - | ImportFunKind (* import type { SomeFunction } from ... *) - | InstanceKind - -end = TypeTerm - -and Enum : sig + | TypeAliasKind (* type A = T *) + | TypeParamKind + | OpaqueKind (* opaque type O [: T] = T' *) + | ImportTypeofKind (* import typeof *) + | ImportClassKind (* import type { SomeClass } from ... *) + | InstanceKind +end = + TypeTerm + +and UnionEnum : sig type t = | Str of string | Num of TypeTerm.number_literal | Bool of bool | Void | Null - val compare: t -> t -> int + + val compare : t -> t -> int + type star = | One of t - | Many of EnumSet.t + | Many of UnionEnumSet.t end = struct type t = | Str of string @@ -1139,85 +1256,42 @@ end = struct | Bool of bool | Void | Null + let compare = Pervasives.compare + type star = | One of t - | Many of EnumSet.t + | Many of UnionEnumSet.t end -and EnumSet: Set.S with type elt = Enum.t = Set.Make(Enum) +and UnionEnumSet : (Set.S with type elt = UnionEnum.t) = Set.Make (UnionEnum) -and Polarity : sig - type t = TypeTerm.polarity +and Property : sig + type t = TypeTerm.property - val compat: t * t -> bool - val inv: t -> t - val mult: t * t -> t - val of_rw: TypeTerm.rw -> t + val polarity : t -> Polarity.t - val string: t -> string - val sigil: t -> string -end = struct - open TypeTerm + val read_t : t -> TypeTerm.t option - type t = polarity + val write_t : ?ctx:TypeTerm.write_ctx -> t -> TypeTerm.t option - (* Subtype relation for polarities, interpreting neutral as positive & - negative: whenever compat(p1,p2) holds, things that have polarity p1 can - appear in positions that have polarity p2. *) - let compat = function - | Positive, Positive - | Negative, Negative - | Neutral, _ -> true - | _ -> false + val read_loc : t -> ALoc.t option - let inv = function - | Positive -> Negative - | Negative -> Positive - | Neutral -> Neutral - - let mult = function - | Positive, Positive -> Positive - | Negative, Negative -> Positive - | Neutral, _ | _, Neutral -> Neutral - | _ -> Negative - - let of_rw = function - | Read -> Positive - | Write _ -> Negative - - (* printer *) - let string = function - | Positive -> "covariant" - | Negative -> "contravariant" - | Neutral -> "invariant" - - let sigil = function - | Positive -> "+" - | Negative -> "-" - | Neutral -> "" -end + val write_loc : t -> ALoc.t option -and Property : sig - type t = TypeTerm.property + val first_loc : t -> ALoc.t option - val polarity: t -> Polarity.t + val iter_t : (TypeTerm.t -> unit) -> t -> unit - val read_t: t -> TypeTerm.t option - val write_t: ?ctx:TypeTerm.write_ctx -> t -> TypeTerm.t option - val access: TypeTerm.rw -> t -> TypeTerm.t option + val fold_t : ('a -> TypeTerm.t -> 'a) -> 'a -> t -> 'a - val read_loc: t -> Loc.t option - val write_loc: t -> Loc.t option - val first_loc: t -> Loc.t option + val map_t : (TypeTerm.t -> TypeTerm.t) -> t -> t - val iter_t: (TypeTerm.t -> unit) -> t -> unit - val fold_t: ('a -> TypeTerm.t -> 'a) -> 'a -> t -> 'a - val map_t: (TypeTerm.t -> TypeTerm.t) -> t -> t - val ident_map_t: (TypeTerm.t -> TypeTerm.t) -> t -> t - val forall_t: (TypeTerm.t -> bool) -> t -> bool + val ident_map_t : (TypeTerm.t -> TypeTerm.t) -> t -> t - val assert_field: t -> TypeTerm.t + val forall_t : (TypeTerm.t -> bool) -> t -> bool + + val assert_field : t -> TypeTerm.t end = struct open TypeTerm @@ -1232,20 +1306,22 @@ end = struct let read_t = function | Field (_, t, polarity) -> - if Polarity.compat (polarity, Positive) - then Some t - else None + if Polarity.compat (polarity, Positive) then + Some t + else + None | Get (_, t) -> Some t | Set _ -> None | GetSet (_, t, _, _) -> Some t | Method (_, t) -> Some t - let write_t ?(ctx=Normal) = function + let write_t ?(ctx = Normal) = function | Field (_, t, _) when ctx = ThisInCtor -> Some t | Field (_, t, polarity) -> - if Polarity.compat (polarity, Negative) - then Some t - else None + if Polarity.compat (polarity, Negative) then + Some t + else + None | Get _ -> None | Set (_, t) -> Some t | GetSet (_, _, _, t) -> Some t @@ -1256,36 +1332,39 @@ end = struct | Get (loc, _) | GetSet (loc, _, _, _) | Method (loc, _) -> - loc + loc | Set _ -> None let write_loc = function | Field (loc, _, _) | Set (loc, _) | GetSet (_, _, loc, _) -> - loc + loc | Method _ | Get _ -> - None + None let first_loc = function | Field (loc, _, _) | Get (loc, _) | Set (loc, _) | Method (loc, _) -> - loc + loc | GetSet (loc1_opt, _, loc2_opt, _) -> - match loc1_opt, loc2_opt with - | None, None -> None - | Some loc, None | None, Some loc -> Some loc - | Some loc1, Some loc2 -> - let k = Loc.compare loc1 loc2 in - let loc = if k <= 0 then loc1 else loc2 in - Some loc - - let access = function - | Read -> read_t - | Write (ctx, _) -> write_t ~ctx + (match (loc1_opt, loc2_opt) with + | (None, None) -> None + | (Some loc, None) + | (None, Some loc) -> + Some loc + | (Some loc1, Some loc2) -> + let k = ALoc.compare loc1 loc2 in + let loc = + if k <= 0 then + loc1 + else + loc2 + in + Some loc) let iter_t f = function | Field (_, t, _) @@ -1303,8 +1382,7 @@ end = struct | Set (_, t) | Method (_, t) -> f acc t - | GetSet (_, t1, _, t2) -> - f (f acc t1) t2 + | GetSet (_, t1, _, t2) -> f (f acc t1) t2 let map_t f = function | Field (loc, t, polarity) -> Field (loc, f t, polarity) @@ -1317,20 +1395,35 @@ end = struct match p with | Field (loc, t, polarity) -> let t_ = f t in - if t_ == t then p else Field (loc, t_, polarity) + if t_ == t then + p + else + Field (loc, t_, polarity) | Get (loc, t) -> let t_ = f t in - if t_ == t then p else Get (loc, t_) + if t_ == t then + p + else + Get (loc, t_) | Set (loc, t) -> let t_ = f t in - if t_ == t then p else Set (loc, t_) + if t_ == t then + p + else + Set (loc, t_) | GetSet (loc1, t1, loc2, t2) -> let t1_ = f t1 in let t2_ = f t2 in - if t1_ == t1 && t2_ == t2 then p else GetSet (loc1, t1_, loc2, t2_) + if t1_ == t1 && t2_ == t2 then + p + else + GetSet (loc1, t1_, loc2, t2_) | Method (loc, t) -> let t_ = f t in - if t_ == t then p else Method (loc, t_) + if t_ == t then + p + else + Method (loc, t_) let forall_t f = fold_t (fun acc t -> acc && f t) true @@ -1342,110 +1435,168 @@ end and Properties : sig type t = Property.t SMap.t - type id = private int + type id + module Map : MyMap.S with type key = id + module Set : Set.S with type elt = id + type map = t Map.t - val add_field: string -> Polarity.t -> Loc.t option -> TypeTerm.t -> t -> t - val add_getter: string -> Loc.t option -> TypeTerm.t -> t -> t - val add_setter: string -> Loc.t option -> TypeTerm.t -> t -> t - val add_method: string -> Loc.t option -> TypeTerm.t -> t -> t + val add_field : string -> Polarity.t -> ALoc.t option -> TypeTerm.t -> t -> t + + val add_getter : string -> ALoc.t option -> TypeTerm.t -> t -> t + + val add_setter : string -> ALoc.t option -> TypeTerm.t -> t -> t + + val add_method : string -> ALoc.t option -> TypeTerm.t -> t -> t + + val generate_id : unit -> id + + val id_of_int : int -> id + + val id_as_int : id -> int option - val mk_id: unit -> id - val fake_id: id - val string_of_id: id -> string - val extract_named_exports: t -> Exports.t + val id_of_aloc : ALoc.t -> id - val iter_t: (TypeTerm.t -> unit) -> t -> unit + val fake_id : id - val map_t: (TypeTerm.t -> TypeTerm.t) -> t -> t - val map_fields: (TypeTerm.t -> TypeTerm.t) -> t -> t - val mapi_fields: (string -> TypeTerm.t -> TypeTerm.t) -> t -> t + val string_of_id : id -> string + + val extract_named_exports : t -> Exports.t + + val iter_t : (TypeTerm.t -> unit) -> t -> unit + + val map_t : (TypeTerm.t -> TypeTerm.t) -> t -> t + + val map_fields : (TypeTerm.t -> TypeTerm.t) -> t -> t + + val mapi_fields : (string -> TypeTerm.t -> TypeTerm.t) -> t -> t end = struct open TypeTerm type t = Property.t SMap.t - type id = int - module Map : MyMap.S with type key = id = MyMap.Make(struct + (* In order to minimize the frequency with which we unnecessarily compare + equivalent objects, we assign all objects created at the top level of a + source program an id of their location instead of an int. This way, if we + see the object twice between the merge and check phases, we still hit + the object to object fast path when checking *) + type id = + | Source of ALoc.t + | Generated of int + + let compare_id id1 id2 = + match (id1, id2) with + | (Source loc1, Source loc2) -> ALoc.quick_compare loc1 loc2 + | (Generated i1, Generated i2) -> i1 - i2 + | (Source _, Generated _) -> -1 + | (Generated _, Source _) -> 1 + + module Map : MyMap.S with type key = id = MyMap.Make (struct type t = id - let compare = Pervasives.compare + + let compare = compare_id end) - module Set : Set.S with type elt = id = Set.Make(struct + + module Set : Set.S with type elt = id = Set.Make (struct type t = id - let compare = Pervasives.compare + + let compare = compare_id end) type map = t Map.t - let add_field x polarity loc t = - SMap.add x (Field (loc, t, polarity)) + let add_field x polarity loc t = SMap.add x (Field (loc, t, polarity)) let add_getter x loc get_t map = - let p = match SMap.get x map with - | Some (Set (set_loc, set_t)) -> GetSet (loc, get_t, set_loc, set_t) - | _ -> Get (loc, get_t) + let p = + match SMap.get x map with + | Some (Set (set_loc, set_t)) -> GetSet (loc, get_t, set_loc, set_t) + | _ -> Get (loc, get_t) in SMap.add x p map let add_setter x loc set_t map = - let p = match SMap.get x map with - | Some (Get (get_loc, get_t)) -> GetSet (get_loc, get_t, loc, set_t) - | _ -> Set (loc, set_t) + let p = + match SMap.get x map with + | Some (Get (get_loc, get_t)) -> GetSet (get_loc, get_t, loc, set_t) + | _ -> Set (loc, set_t) in SMap.add x p map - let add_method x loc t = - SMap.add x (Method (loc, t)) + let add_method x loc t = SMap.add x (Method (loc, t)) - let mk_id = Reason.mk_id - let fake_id = 0 - let string_of_id = string_of_int + let id_of_int i = Generated i + + let id_as_int = function + | Generated i -> Some i + | _ -> None + + let generate_id = Reason.mk_id %> id_of_int + + let id_of_aloc loc = Source loc + + let fake_id = Generated 0 + + let string_of_id = function + | Generated id -> string_of_int id + | Source id -> string_of_aloc id let extract_named_exports pmap = - SMap.fold (fun x p tmap -> - match Property.read_t p with - | Some t -> SMap.add x (Property.read_loc p, t) tmap - | None -> tmap - ) pmap SMap.empty + SMap.fold + (fun x p tmap -> + match Property.read_t p with + | Some t -> SMap.add x (Property.read_loc p, t) tmap + | None -> tmap) + pmap + SMap.empty let iter_t f = SMap.iter (fun _ -> Property.iter_t f) let map_t f = SMap.map (Property.map_t f) - let map_fields f = SMap.map (function - | Field (loc, t, polarity) -> Field (loc, f t, polarity) - | p -> p - ) + let map_fields f = + SMap.map (function + | Field (loc, t, polarity) -> Field (loc, f t, polarity) + | p -> p) - let mapi_fields f = SMap.mapi (fun k -> function - | Field (loc, t, polarity) -> Field (loc, f k t, polarity) - | p -> p - ) + let mapi_fields f = + SMap.mapi (fun k -> + function + | Field (loc, t, polarity) -> Field (loc, f k t, polarity) + | p -> p) end and Exports : sig - type t = (Loc.t option * TypeTerm.t) SMap.t + type t = (ALoc.t option * TypeTerm.t) SMap.t type id + module Map : MyMap.S with type key = id + type map = t Map.t - val mk_id: unit -> id - val string_of_id: id -> string + val mk_id : unit -> id + + val string_of_id : id -> string end = struct - type t = (Loc.t option * TypeTerm.t) SMap.t + type t = (ALoc.t option * TypeTerm.t) SMap.t type id = int - module Map : MyMap.S with type key = id = MyMap.Make(struct + + module Map : MyMap.S with type key = id = MyMap.Make (struct type key = id + type t = key + let compare = Pervasives.compare end) + type map = t Map.t let mk_id = Reason.mk_id + let string_of_id = string_of_int end @@ -1460,34 +1611,36 @@ end can do so via `members`, which provides access via the standard list representation. *) - and UnionRep : sig type t + val make : TypeTerm.t -> TypeTerm.t -> TypeTerm.t list -> t (** build a rep from list of members *) - val make: TypeTerm.t -> TypeTerm.t -> TypeTerm.t list -> t + val specialized_reason : reason -> t -> reason (** replace reason with specialized desc, if any *) - val specialized_reason: reason -> t -> reason + val members : t -> TypeTerm.t list (** members in declaration order *) - val members: t -> TypeTerm.t list - val members_nel: t -> TypeTerm.t * TypeTerm.t Nel.t - val cons: TypeTerm.t -> t -> t + val members_nel : t -> TypeTerm.t * TypeTerm.t Nel.t - val rev_append: t -> t -> t + val cons : TypeTerm.t -> t -> t + val rev_append : t -> t -> t + + val ident_map : (TypeTerm.t -> TypeTerm.t) -> t -> t (** map rep r to rep r' along type mapping f. if nothing would be changed, returns the physically-identical rep. *) - val ident_map: (TypeTerm.t -> TypeTerm.t) -> t -> t - val optimize: t -> + val optimize : + t -> flatten:(TypeTerm.t list -> TypeTerm.t list) -> find_resolved:(TypeTerm.t -> TypeTerm.t option) -> find_props:(Properties.id -> TypeTerm.property SMap.t) -> unit - val is_optimized_finally: t -> bool + + val is_optimized_finally : t -> bool (** quick membership tests for enums and disjoint unions *) type quick_mem_result = @@ -1496,90 +1649,95 @@ and UnionRep : sig | Conditional of TypeTerm.t | Unknown - val join_quick_mem_results: quick_mem_result * quick_mem_result -> quick_mem_result + val join_quick_mem_results : quick_mem_result * quick_mem_result -> quick_mem_result - val quick_mem_enum: - TypeTerm.t -> - t -> quick_mem_result + val quick_mem_enum : bool -> TypeTerm.t -> t -> quick_mem_result - val quick_mem_disjoint_union: + val quick_mem_disjoint_union : find_resolved:(TypeTerm.t -> TypeTerm.t option) -> find_props:(Properties.id -> TypeTerm.property SMap.t) -> + bool -> TypeTerm.t -> - t -> quick_mem_result + t -> + quick_mem_result - val check_enum: t -> EnumSet.t option + val check_enum : t -> UnionEnumSet.t option end = struct - (* canonicalize a type w.r.t. enum membership *) - let canon = TypeTerm.(function - | DefT (_, SingletonStrT lit) - | DefT (_, StrT (Literal (_, lit))) -> Some (Enum.Str lit) - | DefT (_, SingletonNumT lit) - | DefT (_, NumT (Literal (_, lit))) -> Some (Enum.Num lit) - | DefT (_, SingletonBoolT lit) - | DefT (_, BoolT (Some lit)) -> Some (Enum.Bool lit) - | DefT (_, VoidT) -> Some (Enum.Void) - | DefT (_, NullT) -> Some (Enum.Null) - | _ -> None - ) - - let is_base = TypeTerm.(function - | DefT (_, SingletonStrT _) - | DefT (_, SingletonNumT _) - | DefT (_, SingletonBoolT _) - | DefT (_, VoidT) - | DefT (_, NullT) - -> true - | _ -> false - ) + let canon = + TypeTerm.( + function + | DefT (_, _, SingletonStrT lit) + | DefT (_, _, StrT (Literal (_, lit))) -> + Some (UnionEnum.Str lit) + | DefT (_, _, SingletonNumT lit) + | DefT (_, _, NumT (Literal (_, lit))) -> + Some (UnionEnum.Num lit) + | DefT (_, _, SingletonBoolT lit) + | DefT (_, _, BoolT (Some lit)) -> + Some (UnionEnum.Bool lit) + | DefT (_, _, VoidT) -> Some UnionEnum.Void + | DefT (_, _, NullT) -> Some UnionEnum.Null + | _ -> None) + + let is_base = + TypeTerm.( + function + | DefT (_, _, SingletonStrT _) + | DefT (_, _, SingletonNumT _) + | DefT (_, _, SingletonBoolT _) + | DefT (_, _, VoidT) + | DefT (_, _, NullT) -> + true + | _ -> false) (* disjoint unions are stored as singleton type maps *) - module EnumMap = MyMap.Make(Enum) + module UnionEnumMap = MyMap.Make (UnionEnum) type finally_optimized_rep = - | Enum of EnumSet.t - | PartiallyOptimizedEnum of EnumSet.t * TypeTerm.t Nel.t - | DisjointUnion of TypeTerm.t EnumMap.t SMap.t - | PartiallyOptimizedDisjointUnion of TypeTerm.t EnumMap.t SMap.t * TypeTerm.t Nel.t + | UnionEnum of UnionEnumSet.t + | PartiallyOptimizedUnionEnum of UnionEnumSet.t * TypeTerm.t Nel.t + | DisjointUnion of TypeTerm.t UnionEnumMap.t SMap.t + | PartiallyOptimizedDisjointUnion of TypeTerm.t UnionEnumMap.t SMap.t * TypeTerm.t Nel.t | Empty | Singleton of TypeTerm.t | Unoptimized + type t = TypeTerm.t * TypeTerm.t * TypeTerm.t list * finally_optimized_rep option ref (** union rep is: - list of members in declaration order, with at least 2 elements - if union is an enum (set of singletons over a common base) then Some (base, set) (additional specializations probably to come) *) - type t = - TypeTerm.t * TypeTerm.t * TypeTerm.t list * - finally_optimized_rep option ref (** given a list of members, build a rep. specialized reps are used on compatible type lists *) let make = let rec mk_enum tset = function | [] -> Some tset - | t::ts -> - begin match canon t with - | Some tcanon when is_base t -> mk_enum (EnumSet.add tcanon tset) ts + | t :: ts -> + begin + match canon t with + | Some tcanon when is_base t -> mk_enum (UnionEnumSet.add tcanon tset) ts | _ -> None - end in - + end + in fun t0 t1 ts -> - let enum = Option.(mk_enum EnumSet.empty (t0::t1::ts) >>| fun tset -> Enum tset) in - t0, t1, ts, ref enum + let enum = + Option.(mk_enum UnionEnumSet.empty (t0 :: t1 :: ts) >>| (fun tset -> UnionEnum tset)) + in + (t0, t1, ts, ref enum) + + let members (t0, t1, ts, _) = t0 :: t1 :: ts - let members (t0, t1, ts, _) = t0::t1::ts - let members_nel (t0, t1, ts, _) = t0, (t1, ts) + let members_nel (t0, t1, ts, _) = (t0, (t1, ts)) - let cons t0 (t1, t2, ts, _) = - make t0 t1 (t2::ts) + let cons t0 (t1, t2, ts, _) = make t0 t1 (t2 :: ts) let rev_append rep1 rep2 = match List.rev_append (members rep1) (members rep2) with - | t0::t1::ts -> make t0 t1 ts + | t0 :: t1 :: ts -> make t0 t1 ts | _ -> failwith "impossible" let ident_map f ((t0, t1, ts, _) as rep) = @@ -1587,145 +1745,179 @@ end = struct let t1_ = f t1 in let ts_ = ListUtils.ident_map f ts in let changed = t0_ != t0 || t1_ != t1 || ts_ != ts in - if changed then make t0_ t1_ ts_ - else rep + if changed then + make t0_ t1_ ts_ + else + rep let specialized_reason r (_, _, _, specialization) = match !specialization with - | Some Empty -> replace_reason_const REmpty r + | Some Empty -> replace_desc_reason REmpty r | Some (Singleton t) -> TypeUtil.reason_of_t t - | Some (Enum _) -> replace_reason_const REnum r + | Some (UnionEnum _) -> replace_desc_reason RUnionEnum r | _ -> r (********** Optimizations **********) - let is_optimized_finally (_, _, _, specialization) = - !specialization <> None + let is_optimized_finally (_, _, _, specialization) = !specialization <> None (* Private helper, must be called after full resolution. Ideally would be returned as a bit by TypeTerm.union_flatten, and kept in sync there. *) let contains_only_flattened_types = - List.for_all TypeTerm.(function - (* the only unresolved tvars at this point are those that instantiate polymorphic types *) - | OpenT _ - (* some types may not be evaluated yet; TODO *) - | EvalT _ - | DefT (_, TypeAppT _) - | KeysT _ - | DefT (_, IntersectionT _) - (* other types might wrap parts that are accessible directly *) - | OpaqueT _ - | DefT (_, InstanceT _) - | DefT (_, PolyT _) - -> false - | _ -> true - ) + List.for_all + TypeTerm.( + function + (* the only unresolved tvars at this point are those that instantiate polymorphic types *) + | OpenT _ + (* some types may not be evaluated yet; TODO *) + + | EvalT _ + | TypeAppT _ + | KeysT _ + | IntersectionT _ + (* other types might wrap parts that are accessible directly *) + + | OpaqueT _ + | DefT (_, _, InstanceT _) + | DefT (_, _, PolyT _) -> + false + | _ -> true) let enum_optimize = let split_enum = - List.fold_left (fun (tset, others) t -> - match canon t with - | Some tcanon when is_base t -> - EnumSet.add tcanon tset, others - | _ -> tset, t::others - ) (EnumSet.empty, []) in - + List.fold_left + (fun (tset, others) t -> + match canon t with + | Some tcanon when is_base t -> (UnionEnumSet.add tcanon tset, others) + | _ -> (tset, t :: others)) + (UnionEnumSet.empty, []) + in function - | [] -> Empty - | [t] -> Singleton t - | ts -> - let tset, others = split_enum ts in - match others with - | [] -> Enum tset - | x::xs -> - if EnumSet.is_empty tset then Unoptimized - else PartiallyOptimizedEnum (tset, Nel.rev (x, xs)) - - let canon_prop find_resolved p = - Option.(Property.read_t p >>= find_resolved >>= canon) + | [] -> Empty + | [t] -> Singleton t + | ts -> + let (tset, others) = split_enum ts in + (match others with + | [] -> UnionEnum tset + | x :: xs -> + if UnionEnumSet.is_empty tset then + Unoptimized + else + PartiallyOptimizedUnionEnum (tset, Nel.rev (x, xs))) + + let canon_prop find_resolved p = Option.(Property.read_t p >>= find_resolved >>= canon) let base_prop find_resolved p = match Option.(Property.read_t p >>= find_resolved) with - | Some t when is_base t -> canon t - | _ -> None + | Some t when is_base t -> canon t + | _ -> None let props_of find_props t = - let open TypeTerm in - match t with - | DefT (_, ObjT { props_tmap; _ }) | ExactT (_, DefT (_, ObjT { props_tmap; _ })) -> + TypeTerm.( + match t with + | DefT (_, _, ObjT { props_tmap; _ }) + | ExactT (_, DefT (_, _, ObjT { props_tmap; _ })) -> Some (find_props props_tmap) - | _ -> None + | _ -> None) let disjoint_union_optimize = let base_props_of find_resolved find_props t = - Option.(props_of find_props t >>| fun prop_map -> - SMap.fold (fun key p acc -> - match base_prop find_resolved p with - | Some enum -> SMap.add key (enum, t) acc - | _ -> acc - ) prop_map SMap.empty) in + Option.( + props_of find_props t + >>| fun prop_map -> + SMap.fold + (fun key p acc -> + match base_prop find_resolved p with + | Some enum -> SMap.add key (enum, t) acc + | _ -> acc) + prop_map + SMap.empty) + in let split_disjoint_union find_resolved find_props ts = - List.fold_left (fun (candidates, others) t -> - match base_props_of find_resolved find_props t with - | None -> candidates, t::others - | Some base_props -> base_props::candidates, others - ) ([], []) ts in + List.fold_left + (fun (candidates, others) t -> + match base_props_of find_resolved find_props t with + | None -> (candidates, t :: others) + | Some base_props -> (base_props :: candidates, others)) + ([], []) + ts + in let unique_values = let rec unique_values idx = function - | [] -> Some idx - | (enum, t)::values -> - begin match EnumMap.get enum idx with - | None -> unique_values (EnumMap.add enum t idx) values - | Some t' -> - if TypeUtil.reasonless_eq t t' - then unique_values idx values - else None - end - in fun values -> - unique_values EnumMap.empty values in + | [] -> Some idx + | (enum, t) :: values -> + begin + match UnionEnumMap.get enum idx with + | None -> unique_values (UnionEnumMap.add enum t idx) values + | Some t' -> + if TypeUtil.reasonless_eq t t' then + unique_values idx values + else + None + end + in + (fun values -> unique_values UnionEnumMap.empty values) + in let unique idx = - SMap.fold (fun key values acc -> - match unique_values values with - | None -> acc - | Some idx -> SMap.add key idx acc - ) idx SMap.empty in + SMap.fold + (fun key values acc -> + match unique_values values with + | None -> acc + | Some idx -> SMap.add key idx acc) + idx + SMap.empty + in let index candidates = match candidates with - | [] -> SMap.empty - | base_props::candidates -> - (* Compute the intersection of properties of objects that have singleton types *) - let init = SMap.map (fun enum_t -> [enum_t]) base_props in - let idx = List.fold_left (fun acc base_props -> - SMap.merge (fun _key enum_t_opt values_opt -> - Option.(both enum_t_opt values_opt >>| (fun (enum_t, values) -> - List.cons enum_t values - )) - ) base_props acc - ) init candidates in - (* Ensure that enums map to unique types *) - unique idx in - + | [] -> SMap.empty + | base_props :: candidates -> + (* Compute the intersection of properties of objects that have singleton types *) + let init = SMap.map (fun enum_t -> [enum_t]) base_props in + let idx = + List.fold_left + (fun acc base_props -> + SMap.merge + (fun _key enum_t_opt values_opt -> + Option.( + both enum_t_opt values_opt >>| (fun (enum_t, values) -> List.cons enum_t values))) + base_props + acc) + init + candidates + in + (* Ensure that enums map to unique types *) + unique idx + in fun ~find_resolved ~find_props -> function | [] -> Empty | [t] -> Singleton t | ts -> - let candidates, others = split_disjoint_union find_resolved find_props ts in + let (candidates, others) = split_disjoint_union find_resolved find_props ts in let map = index candidates in - let others = if SMap.is_empty map then ts else List.rev others in - match others with - | [] -> DisjointUnion map - | x::xs -> - if SMap.is_empty map then Unoptimized - else PartiallyOptimizedDisjointUnion (map, (x, xs)) + let others = + if SMap.is_empty map then + ts + else + List.rev others + in + (match others with + | [] -> DisjointUnion map + | x :: xs -> + if SMap.is_empty map then + Unoptimized + else + PartiallyOptimizedDisjointUnion (map, (x, xs))) let optimize rep ~flatten ~find_resolved ~find_props = let ts = flatten (members rep) in if contains_only_flattened_types ts then let opt = enum_optimize ts in - let opt = match opt with + let opt = + match opt with | Unoptimized -> disjoint_union_optimize ~find_resolved ~find_props ts - | _ -> opt in - let _, _, _, specialization = rep in + | _ -> opt + in + let (_, _, _, specialization) = rep in specialization := Some opt (********** Quick matching **********) @@ -1737,86 +1929,120 @@ end = struct | Unknown let join_quick_mem_results = function - | Yes, _ | _, Yes -> Yes - | Unknown, _ | _, Unknown -> Unknown - | Conditional _, _ | _, Conditional _ -> Unknown (* TODO *) - | No, No -> No + | (Yes, _) + | (_, Yes) -> + Yes + | (Unknown, _) + | (_, Unknown) -> + Unknown + | (Conditional _, _) + | (_, Conditional _) -> + Unknown (* TODO *) + | (No, No) -> No (* assume we know that l is a canonizable type *) - let quick_mem_enum l (_t0, _t1, _ts, specialization) = + let quick_mem_enum trust_checked l (_t0, _t1, _ts, specialization) = match canon l with | Some tcanon -> - begin match !specialization with + begin + match !specialization with | None -> Unknown | Some Unoptimized -> Unknown | Some Empty -> No | Some (Singleton t) -> - if TypeUtil.quick_subtype l t then Yes - else Conditional t + if TypeUtil.quick_subtype trust_checked l t then + Yes + else + Conditional t | Some (DisjointUnion _) -> No | Some (PartiallyOptimizedDisjointUnion (_, others)) -> - if Nel.exists (TypeUtil.quick_subtype l) others - then Yes - else Unknown - | Some (Enum tset) -> - if EnumSet.mem tcanon tset - then Yes - else No - | Some (PartiallyOptimizedEnum (tset, others)) -> - if EnumSet.mem tcanon tset - then Yes - else if Nel.exists (TypeUtil.quick_subtype l) others - then Yes - else Unknown + if Nel.exists (TypeUtil.quick_subtype trust_checked l) others then + Yes + else + Unknown + | Some (UnionEnum tset) -> + if UnionEnumSet.mem tcanon tset then + Yes + else + No + | Some (PartiallyOptimizedUnionEnum (tset, others)) -> + if UnionEnumSet.mem tcanon tset then + Yes + else if Nel.exists (TypeUtil.quick_subtype trust_checked l) others then + Yes + else + Unknown end | None -> failwith "quick_mem_enum is defined only for canonizable type" let lookup_disjoint_union find_resolved prop_map ~partial map = - SMap.fold (fun key idx acc -> - if acc <> Unknown then acc - else match SMap.get key prop_map with - | Some p -> - begin match canon_prop find_resolved p with - | Some enum -> - begin match EnumMap.get enum idx with - | Some t' -> Conditional t' - | None -> if partial then Unknown else No - end - | None -> Unknown - end - | None -> if partial then Unknown else No - ) map Unknown + SMap.fold + (fun key idx acc -> + if acc <> Unknown then + acc + else + match SMap.get key prop_map with + | Some p -> + begin + match canon_prop find_resolved p with + | Some enum -> + begin + match UnionEnumMap.get enum idx with + | Some t' -> Conditional t' + | None -> + if partial then + Unknown + else + No + end + | None -> Unknown + end + | None -> + if partial then + Unknown + else + No) + map + Unknown (* we know that l is an object type or exact object type *) - let quick_mem_disjoint_union ~find_resolved ~find_props l (_t0, _t1, _ts, specialization) = + let quick_mem_disjoint_union + ~find_resolved ~find_props trust_checked l (_t0, _t1, _ts, specialization) = match props_of find_props l with - | Some prop_map -> - begin match !specialization with - | None -> Unknown - | Some Unoptimized -> Unknown - | Some Empty -> No - | Some (Singleton t) -> - if TypeUtil.quick_subtype l t then Yes - else Conditional t - | Some (DisjointUnion map) -> - lookup_disjoint_union find_resolved prop_map ~partial:false map - | Some (PartiallyOptimizedDisjointUnion (map, others)) -> - let result = lookup_disjoint_union find_resolved prop_map ~partial:true map in - if result <> Unknown then result - else if Nel.exists (TypeUtil.quick_subtype l) others then Yes - else Unknown - | Some (Enum _) -> No - | Some (PartiallyOptimizedEnum (_, others)) -> - if Nel.exists (TypeUtil.quick_subtype l) others then Yes - else Unknown - end - | _ -> failwith "quick_mem_disjoint_union is defined only on object / exact object types" + | Some prop_map -> + begin + match !specialization with + | None -> Unknown + | Some Unoptimized -> Unknown + | Some Empty -> No + | Some (Singleton t) -> + if TypeUtil.quick_subtype trust_checked l t then + Yes + else + Conditional t + | Some (DisjointUnion map) -> + lookup_disjoint_union find_resolved prop_map ~partial:false map + | Some (PartiallyOptimizedDisjointUnion (map, others)) -> + let result = lookup_disjoint_union find_resolved prop_map ~partial:true map in + if result <> Unknown then + result + else if Nel.exists (TypeUtil.quick_subtype trust_checked l) others then + Yes + else + Unknown + | Some (UnionEnum _) -> No + | Some (PartiallyOptimizedUnionEnum (_, others)) -> + if Nel.exists (TypeUtil.quick_subtype trust_checked l) others then + Yes + else + Unknown + end + | _ -> failwith "quick_mem_disjoint_union is defined only on object / exact object types" let check_enum (_, _, _, specialization) = match !specialization with - | Some Enum enums -> Some enums - | _ -> None - + | Some (UnionEnum enums) -> Some enums + | _ -> None end (* We encapsulate IntersectionT's internal structure. @@ -1827,39 +2053,38 @@ end can do so via `members`, which provides access via the standard list representation. *) - and InterRep : sig type t + val make : TypeTerm.t -> TypeTerm.t -> TypeTerm.t list -> t (** build rep from list of members *) - val make: TypeTerm.t -> TypeTerm.t -> TypeTerm.t list -> t + val members : t -> TypeTerm.t list (** member list in declaration order *) - val members: t -> TypeTerm.t list - val members_nel: t -> TypeTerm.t * TypeTerm.t Nel.t + val members_nel : t -> TypeTerm.t * TypeTerm.t Nel.t + + val map : (TypeTerm.t -> TypeTerm.t) -> t -> t (** map rep r to rep r' along type mapping f. drops history *) - val map: (TypeTerm.t -> TypeTerm.t) -> t -> t - val append: TypeTerm.t list -> t -> t + val append : TypeTerm.t list -> t -> t + val ident_map : (TypeTerm.t -> TypeTerm.t) -> t -> t (** map rep r to rep r' along type mapping f. drops history. if nothing would be changed, returns the physically-identical rep. *) - val ident_map: (TypeTerm.t -> TypeTerm.t) -> t -> t - end = struct + type t = TypeTerm.t * TypeTerm.t * TypeTerm.t list (** intersection rep is: - member list in declaration order *) - type t = - TypeTerm.t * TypeTerm.t * TypeTerm.t list let make t0 t1 ts = (t0, t1, ts) - let members (t0, t1, ts) = t0::t1::ts - let members_nel (t0, t1, ts) = t0, (t1, ts) + let members (t0, t1, ts) = t0 :: t1 :: ts - let map f (t0, t1, ts) = make (f t0) (f t1) (List.map f ts) + let members_nel (t0, t1, ts) = (t0, (t1, ts)) + + let map f (t0, t1, ts) = make (f t0) (f t1) (Core_list.map ~f ts) let append ts2 (t0, t1, ts1) = make t0 t1 (ts1 @ ts2) @@ -1867,12 +2092,18 @@ end = struct let t0_ = f t0 in let t1_ = f t1 in let changed = t0_ != t0 || t1_ != t1 in - let rev_ts, changed = List.fold_left (fun (rev_ts, changed) member -> - let member_ = f member in - member_::rev_ts, changed || member_ != member - ) ([], changed) ts in - if changed then make t0_ t1_ (List.rev rev_ts) else rep - + let (rev_ts, changed) = + List.fold_left + (fun (rev_ts, changed) member -> + let member_ = f member in + (member_ :: rev_ts, changed || member_ != member)) + ([], changed) + ts + in + if changed then + make t0_ t1_ (List.rev rev_ts) + else + rep end (* The typechecking algorithm often needs to maintain sets of types, or more @@ -1880,28 +2111,35 @@ end information to types). Type terms may also contain internal sets or maps. *) - -and TypeSet : Set.S with type elt = TypeTerm.t = Set.Make(struct +and TypeSet : (Set.S with type elt = TypeTerm.t) = Set.Make (struct type elt = TypeTerm.t + type t = elt + let compare = Pervasives.compare end) -and TypeMap : MyMap.S with type key = TypeTerm.t = MyMap.Make (struct +and TypeMap : (MyMap.S with type key = TypeTerm.t) = MyMap.Make (struct type key = TypeTerm.t + type t = key + let compare = Pervasives.compare end) -and UseTypeSet : Set.S with type elt = TypeTerm.use_t = Set.Make (struct +and UseTypeSet : (Set.S with type elt = TypeTerm.use_t) = Set.Make (struct type elt = TypeTerm.use_t + type t = elt + let compare = Pervasives.compare end) -and UseTypeMap : MyMap.S with type key = TypeTerm.use_t = MyMap.Make (struct +and UseTypeMap : (MyMap.S with type key = TypeTerm.use_t) = MyMap.Make (struct type key = TypeTerm.use_t + type t = key + let compare = Pervasives.compare end) @@ -1919,22 +2157,51 @@ and Object : sig | List0 of TypeTerm.t Nel.t * join | List of TypeTerm.t list * resolved Nel.t * join - and join = And | Or + and join' = + | And + | Or + + (* This location is that of the entire intersection/union, not just the location of the &/| symbol *) + and join = ALoc.t * join' (* A union type resolves to a resolved spread with more than one element *) and resolved = slice Nel.t - and slice = reason * props * dict * TypeTerm.flags + and slice = { + reason: reason; + props: props; + dict: dict; + flags: TypeTerm.flags; + } and props = prop SMap.t - and prop = TypeTerm.t * bool (* own *) + and prop = TypeTerm.t * bool + + (* own *) and dict = TypeTerm.dicttype option module Spread : sig + (* This is the type we feed into SpreadType to be processed by object_kit. It's different + * than slice because object_kit processes the properties in ways that do not need to + * be exposed to other files. *) + type operand_slice = { + reason: reason; + prop_map: Properties.t; + dict: dict; + } + + type operand = + | Slice of operand_slice + | Type of TypeTerm.t + + type acc_element = + | ResolvedSlice of resolved + | InlineSlice of operand_slice + type state = { - todo_rev: TypeTerm.t list; - acc: resolved list; + todo_rev: operand list; + acc: acc_element list; } and target = @@ -1955,13 +2222,19 @@ and Object : sig type merge_mode = | Sound | IgnoreExactAndOwn - | ReactConfigMerge + | ReactConfigMerge of Polarity.t end module ReactConfig : sig type state = - | Config of { defaults: TypeTerm.t option; children: TypeTerm.t option } - | Defaults of { config: resolved; children: TypeTerm.t option } + | Config of { + defaults: TypeTerm.t option; + children: TypeTerm.t option; + } + | Defaults of { + config: resolved; + children: TypeTerm.t option; + } end type tool = @@ -1969,45 +2242,46 @@ and Object : sig | Spread of Spread.target * Spread.state | Rest of Rest.merge_mode * Rest.state | ReactConfig of ReactConfig.state -end = Object + | ObjectRep +end = + Object and React : sig module PropType : sig type t = - | Primitive of (is_required * TypeTerm.t) - | Complex of complex + | Primitive of (is_required * TypeTerm.t) + | Complex of complex and is_required = bool and complex = - | ArrayOf - | InstanceOf - | ObjectOf - | OneOf - | OneOfType - | Shape + | ArrayOf + | InstanceOf + | ObjectOf + | OneOf + | OneOfType + | Shape end - type resolved_object = - reason * Properties.t * TypeTerm.dicttype option * TypeTerm.flags + type resolved_object = reason * Properties.t * TypeTerm.dicttype option * TypeTerm.flags type resolve_object = - | ResolveObject - | ResolveDict of (TypeTerm.dicttype * Properties.t * resolved_object) - | ResolveProp of (string * Properties.t * resolved_object) + | ResolveObject + | ResolveDict of (TypeTerm.dicttype * Properties.t * resolved_object) + | ResolveProp of (string * Properties.t * resolved_object) type resolve_array = - | ResolveArray - | ResolveElem of TypeTerm.t list * TypeTerm.t list + | ResolveArray + | ResolveElem of TypeTerm.t list * TypeTerm.t list module SimplifyPropType : sig type tool = - | ArrayOf - | InstanceOf - | ObjectOf - | OneOf of resolve_array - | OneOfType of resolve_array - | Shape of resolve_object + | ArrayOf + | InstanceOf + | ObjectOf + | OneOf of resolve_array + | OneOfType of resolve_array + | Shape of resolve_object end module CreateClass : sig @@ -2016,18 +2290,20 @@ and React : sig * this in order, accumulating the resolved information until we have enough * to compute the instance type. *) type tool = - | Spec of stack_tail - | Mixins of stack - | Statics of stack - | PropTypes of stack * resolve_object - | DefaultProps of TypeTerm.t list * default_props option - | InitialState of TypeTerm.t list * initial_state option + | Spec of stack_tail + | Mixins of stack + | Statics of stack + | PropTypes of stack * resolve_object + | DefaultProps of TypeTerm.t list * default_props option + | InitialState of TypeTerm.t list * initial_state option (* When we encounter mixins, we push the current spec's props into a stack, * then resolve each mixin in turn. This is recursive, as mixins can have * mixins. *) and stack = stack_head * stack_tail + and stack_head = resolved_object * spec + and stack_tail = (stack_head * TypeTerm.t list * spec maybe_known list) list and spec = { @@ -2040,12 +2316,20 @@ and React : sig } and statics = resolved_object maybe_known + and prop_types = resolved_object maybe_known + and default_props = resolved_object maybe_known + and initial_state = resolved_object or_null maybe_known - and 'a maybe_known = Known of 'a | Unknown of reason - and 'a or_null = NotNull of 'a | Null of reason + and 'a maybe_known = + | Known of 'a + | Unknown of reason + + and 'a or_null = + | NotNull of 'a + | Null of reason (* Components have some recursive dependencies. For example, the instance * type depends on the return value of its methods, but those methods also @@ -2059,43 +2343,68 @@ and React : sig end type tool = - | CreateElement0 of bool * TypeTerm.t * (TypeTerm.t list * TypeTerm.t option) * TypeTerm.t_out - | CreateElement of bool * TypeTerm.t * TypeTerm.t * (TypeTerm.t list * TypeTerm.t option) * TypeTerm.t_out - | GetProps of TypeTerm.t_out - | GetConfig of TypeTerm.t_out - | GetRef of TypeTerm.t_out - | SimplifyPropType of SimplifyPropType.tool * TypeTerm.t_out - | CreateClass of CreateClass.tool * CreateClass.knot * TypeTerm.t_out -end = React + | CreateElement0 of bool * TypeTerm.t * (TypeTerm.t list * TypeTerm.t option) * TypeTerm.t_out + | CreateElement of + bool * TypeTerm.t * TypeTerm.t * (TypeTerm.t list * TypeTerm.t option) * TypeTerm.t_out + | ConfigCheck of TypeTerm.t + | GetProps of TypeTerm.t_out + | GetConfig of TypeTerm.t_out + | GetConfigType of TypeTerm.t * TypeTerm.t_out + | GetRef of TypeTerm.t_out + | SimplifyPropType of SimplifyPropType.tool * TypeTerm.t_out + | CreateClass of CreateClass.tool * CreateClass.knot * TypeTerm.t_out +end = + React and TypeUtil : sig - val reason_of_t: TypeTerm.t -> reason - val reason_of_defer_use_t: TypeTerm.defer_use_t -> reason - val reason_of_use_t: TypeTerm.use_t -> reason + val reason_of_t : TypeTerm.t -> reason + + val reason_of_defer_use_t : TypeTerm.defer_use_t -> reason + + val reason_of_use_t : TypeTerm.use_t -> reason + + val reason_of_t_add_id : TypeTerm.t -> reason + + val reason_of_use_t_add_id : TypeTerm.use_t -> reason + + val desc_of_t : TypeTerm.t -> reason_desc + + val loc_of_t : TypeTerm.t -> ALoc.t + + val def_loc_of_t : TypeTerm.t -> ALoc.t + + val mod_reason_of_t : (reason -> reason) -> TypeTerm.t -> TypeTerm.t + + val mod_reason_of_defer_use_t : + (reason -> reason) -> TypeTerm.defer_use_t -> TypeTerm.defer_use_t - val reason_of_t_add_id: TypeTerm.t -> reason - val reason_of_use_t_add_id: TypeTerm.use_t -> reason + val mod_reason_of_use_t : (reason -> reason) -> TypeTerm.use_t -> TypeTerm.use_t - val desc_of_t: TypeTerm.t -> reason_desc - val loc_of_t: TypeTerm.t -> Loc.t - val def_loc_of_t: TypeTerm.t -> Loc.t + val mod_reason_of_opt_use_t : (reason -> reason) -> TypeTerm.opt_use_t -> TypeTerm.opt_use_t - val mod_reason_of_t: (reason -> reason) -> TypeTerm.t -> TypeTerm.t - val mod_reason_of_defer_use_t: (reason -> reason) -> TypeTerm.defer_use_t -> TypeTerm.defer_use_t - val mod_reason_of_use_t: (reason -> reason) -> TypeTerm.use_t -> TypeTerm.use_t - val mod_reason_of_opt_use_t: (reason -> reason) -> TypeTerm.opt_use_t -> TypeTerm.opt_use_t + val use_op_of_use_t : TypeTerm.use_t -> TypeTerm.use_op option - val use_op_of_use_t: TypeTerm.use_t -> TypeTerm.use_op option - val mod_use_op_of_use_t: (TypeTerm.use_op -> TypeTerm.use_op) -> TypeTerm.use_t -> TypeTerm.use_t + val mod_use_op_of_use_t : + (TypeTerm.use_op -> TypeTerm.use_op) -> TypeTerm.use_t -> TypeTerm.use_t - val reasonless_compare: TypeTerm.t -> TypeTerm.t -> int - val reasonless_eq: TypeTerm.t -> TypeTerm.t -> bool + val mod_root_of_use_op : + (TypeTerm.root_use_op -> TypeTerm.root_use_op) -> TypeTerm.use_op -> TypeTerm.use_op - val literal_eq: string -> string TypeTerm.literal -> bool - val number_literal_eq: TypeTerm.number_literal -> TypeTerm.number_literal TypeTerm.literal -> bool - val boolean_literal_eq: bool -> bool option -> bool + val mod_loc_of_virtual_use_op : + ('a -> 'b) -> 'a TypeTerm.virtual_use_op -> 'b TypeTerm.virtual_use_op - val quick_subtype: TypeTerm.t -> TypeTerm.t -> bool + val reasonless_compare : TypeTerm.t -> TypeTerm.t -> int + + val reasonless_eq : TypeTerm.t -> TypeTerm.t -> bool + + val literal_eq : string -> string TypeTerm.literal -> bool + + val number_literal_eq : + TypeTerm.number_literal -> TypeTerm.number_literal TypeTerm.literal -> bool + + val boolean_literal_eq : bool -> bool option -> bool + + val quick_subtype : bool -> TypeTerm.t -> TypeTerm.t -> bool end = struct open TypeTerm @@ -2105,16 +2414,14 @@ end = struct type means to the programmer. *) let rec reason_of_t = function - | OpenT (reason,_) -> reason + | OpenT (reason, _) -> reason | AnnotT (reason, _, _) -> reason - | AnyWithLowerBoundT (t) -> reason_of_t t - | AnyWithUpperBoundT (t) -> reason_of_t t | MergedT (reason, _) -> reason | BoundT (reason, _, _) -> reason | InternalT (ChoiceKitT (reason, _)) -> reason | TypeDestructorTriggerT (_, reason, _, _, _) -> reason | CustomFunT (reason, _) -> reason - | DefT (reason, _) -> reason + | DefT (reason, _, _) -> reason | EvalT (_, defer_use_t, _) -> reason_of_defer_use_t defer_use_t | ExactT (reason, _) -> reason | ExistsT reason -> reason @@ -2133,25 +2440,30 @@ end = struct | InternalT (OptionalChainVoidT reason) -> reason | ReposT (reason, _) -> reason | InternalT (ReposUpperT (reason, _)) -> reason (* HUH? cf. mod_reason below *) - | ShapeT (t) -> reason_of_t t + | ShapeT t -> reason_of_t t | ThisClassT (reason, _) -> reason | ThisTypeAppT (reason, _, _, _) -> reason + | TypeAppT (reason, _, _, _) -> reason + | AnyT (reason, _) -> reason + | UnionT (reason, _) -> reason + | IntersectionT (reason, _) -> reason + | MaybeT (reason, _) -> reason + | OptionalT (reason, _) -> reason and reason_of_defer_use_t = function - | DestructuringT (reason, _) + | LatentPredT (reason, _) | TypeDestructorT (_, reason, _) -> - reason + reason and reason_of_use_t = function | UseT (_, t) -> reason_of_t t - | AdderT (_,reason,_,_,_) -> reason + | AdderT (_, reason, _, _, _) -> reason | AndT (reason, _, _) -> reason | ArrRestT (_, reason, _, _) -> reason | AssertArithmeticOperandT reason -> reason | AssertBinaryInLHST reason -> reason | AssertBinaryInRHST reason -> reason | AssertForInRHST reason -> reason - | AssertRestParamT reason -> reason | AssertImportIsValueT (reason, _) -> reason | BecomeT (reason, _) -> reason | BindT (_, reason, _, _) -> reason @@ -2162,24 +2474,25 @@ end = struct | ChoiceKitUseT (reason, _) -> reason | CJSExtractNamedExportsT (reason, _, _) -> reason | CJSRequireT (reason, _, _) -> reason - | ComparatorT (reason,_,_) -> reason - | ConstructorT (_,reason,_,_,_) -> reason + | ComparatorT (reason, _, _) -> reason + | ConstructorT (_, reason, _, _, _) -> reason | CopyNamedExportsT (reason, _, _) -> reason | CopyTypeExportsT (reason, _, _) -> reason | DebugPrintT reason -> reason | DebugSleepT reason -> reason | ElemT (_, reason, _, _) -> reason | EqT (reason, _, _) -> reason - | ExportNamedT (reason, _, _, _) -> reason + | ExportNamedT (reason, _, _, _, _) -> reason | ExportTypeT (reason, _, _, _, _) -> reason + | AssertExportIsTypeT (reason, _, _) -> reason | ExtendsUseT (_, reason, _, _, _) -> reason - | GetElemT (_,reason,_,_) -> reason + | GetElemT (_, reason, _, _) -> reason | GetKeysT (reason, _) -> reason | GetValuesT (reason, _) -> reason - | GetPropT (_,reason,_,_) -> reason - | GetPrivatePropT (_,reason,_,_,_, _) -> reason - | GetProtoT (reason,_) -> reason - | GetStaticsT (reason,_) -> reason + | GetPropT (_, reason, _, _) -> reason + | GetPrivatePropT (_, reason, _, _, _, _) -> reason + | GetProtoT (reason, _) -> reason + | GetStaticsT (reason, _) -> reason | GuardT (_, _, t) -> reason_of_t t | HasOwnPropT (_, reason, _) -> reason | IdxUnMaybeifyT (reason, _) -> reason @@ -2192,15 +2505,15 @@ end = struct | ImportTypeT (reason, _, _) -> reason | IntersectionPreprocessKitT (reason, _) -> reason | InvariantT reason -> reason - | LookupT(reason, _, _, _, _) -> reason + | LookupT (reason, _, _, _, _) -> reason | MakeExactT (reason, _) -> reason - | MapTypeT (reason, _, _) -> reason - | MethodT (_,reason,_,_,_,_) -> reason + | MapTypeT (_, reason, _, _) -> reason + | MethodT (_, reason, _, _, _, _) -> reason | MixinT (reason, _) -> reason | NotT (reason, _) -> reason | NullishCoalesceT (reason, _, _) -> reason - | ObjAssignToT (reason, _, _, _) -> reason - | ObjAssignFromT (reason, _, _, _) -> reason + | ObjAssignToT (_, reason, _, _, _) -> reason + | ObjAssignFromT (_, reason, _, _, _) -> reason | ObjFreezeT (reason, _) -> reason | ObjRestT (reason, _, _) -> reason | ObjSealT (reason, _) -> reason @@ -2215,67 +2528,75 @@ end = struct | ReposUseT (reason, _, _, _) -> reason | ResolveSpreadT (_, reason, _) -> reason | SentinelPropTestT (_, _, _, _, _, result) -> reason_of_t result - | SetElemT (_,reason,_,_,_) -> reason - | SetPropT (_,reason,_,_,_,_) -> reason - | SetPrivatePropT (_,reason,_,_,_,_,_) -> reason - | SetProtoT (reason,_) -> reason - | SpecializeT(_,_,reason,_,_,_) -> reason + | SetElemT (_, reason, _, _, _, _) -> reason + | SetPropT (_, reason, _, _, _, _, _) -> reason + | SetPrivatePropT (_, reason, _, _, _, _, _, _) -> reason + | SetProtoT (reason, _) -> reason + | SpecializeT (_, _, reason, _, _, _) -> reason | ObjKitT (_, reason, _, _, _) -> reason + | ModuleExportsAssignT (reason, _, _) -> reason | SubstOnPredT (reason, _, _) -> reason - | SuperT (_,reason,_) -> reason + | SuperT (_, reason, _) -> reason | TestPropT (reason, _, _, _) -> reason - | ThisSpecializeT(reason,_,_) -> reason + | ThisSpecializeT (reason, _, _) -> reason | ToStringT (reason, _) -> reason | UnaryMinusT (reason, _) -> reason - | UnifyT (_,t) -> reason_of_t t - | VarianceCheckT(reason,_,_) -> reason + | UnifyT (_, t) -> reason_of_t t + | VarianceCheckT (reason, _, _) -> reason | TypeAppVarianceCheckT (_, reason, _, _) -> reason | ConcretizeTypeAppsT (_, _, (_, _, _, reason), _) -> reason | CondT (reason, _, _, _) -> reason | MatchPropT (_, reason, _, _) -> reason + | ReactPropsToOut (reason, _) + | ReactInToProps (reason, _) -> + reason + | DestructuringT (reason, _, _, _) -> reason (* helper: we want the tvar id as well *) (* NOTE: uncalled for now, because ids are nondetermistic due to parallelism, which messes up test diffs. Should add a config, but for now must uncomment impl to use *) let reason_of_t_add_id = reason_of_t + (* function | OpenT (r, id) -> prefix_reason (spf "%d: " id) r | t -> reason_of_t t *) let reason_of_use_t_add_id = reason_of_use_t - let desc_of_t t = desc_of_reason (reason_of_t t) + let desc_of_t = reason_of_t %> desc_of_reason - let loc_of_t t = aloc_of_reason (reason_of_t t) |> ALoc.to_loc + let loc_of_t = reason_of_t %> aloc_of_reason - let def_loc_of_t t = def_loc_of_reason (reason_of_t t) + let def_loc_of_t = reason_of_t %> def_aloc_of_reason (* TODO make a type visitor *) let rec mod_reason_of_t f = function | OpenT (reason, id) -> OpenT (f reason, id) | AnnotT (reason, t, use_desc) -> AnnotT (f reason, t, use_desc) - | AnyWithLowerBoundT t -> AnyWithLowerBoundT (mod_reason_of_t f t) - | AnyWithUpperBoundT t -> AnyWithUpperBoundT (mod_reason_of_t f t) | MergedT (reason, uses) -> MergedT (f reason, uses) | BoundT (reason, name, polarity) -> BoundT (f reason, name, polarity) | InternalT (ChoiceKitT (reason, tool)) -> InternalT (ChoiceKitT (f reason, tool)) | TypeDestructorTriggerT (use_op, reason, repos, d, t) -> - TypeDestructorTriggerT (use_op, f reason, repos, d, t) + TypeDestructorTriggerT (use_op, f reason, repos, d, t) | CustomFunT (reason, kind) -> CustomFunT (f reason, kind) - | DefT (reason, t) -> DefT (f reason, t) - | EvalT (t, defer_use_t, id) -> - EvalT (t, mod_reason_of_defer_use_t f defer_use_t, id) + | DefT (reason, trust, t) -> DefT (f reason, trust, t) + | AnyT (reason, src) -> AnyT (f reason, src) + | UnionT (reason, src) -> UnionT (f reason, src) + | IntersectionT (reason, src) -> IntersectionT (f reason, src) + | MaybeT (reason, src) -> MaybeT (f reason, src) + | OptionalT (reason, src) -> OptionalT (f reason, src) + | EvalT (t, defer_use_t, id) -> EvalT (t, mod_reason_of_defer_use_t f defer_use_t, id) | ExactT (reason, t) -> ExactT (f reason, t) | ExistsT reason -> ExistsT (f reason) | InternalT (ExtendsT (reason, t1, t2)) -> InternalT (ExtendsT (f reason, t1, t2)) - | FunProtoApplyT (reason) -> FunProtoApplyT (f reason) - | FunProtoT (reason) -> FunProtoT (f reason) - | FunProtoBindT (reason) -> FunProtoBindT (f reason) - | FunProtoCallT (reason) -> FunProtoCallT (f reason) + | FunProtoApplyT reason -> FunProtoApplyT (f reason) + | FunProtoT reason -> FunProtoT (f reason) + | FunProtoBindT reason -> FunProtoBindT (f reason) + | FunProtoCallT reason -> FunProtoCallT (f reason) | KeysT (reason, t) -> KeysT (f reason, t) | ModuleT (reason, exports, is_strict) -> ModuleT (f reason, exports, is_strict) | NullProtoT reason -> NullProtoT (f reason) - | ObjProtoT (reason) -> ObjProtoT (f reason) + | ObjProtoT reason -> ObjProtoT (f reason) | MatchingPropT (reason, k, v) -> MatchingPropT (f reason, k, v) | OpaqueT (reason, opaquetype) -> OpaqueT (f reason, opaquetype) | OpenPredT (reason, t, p, n) -> OpenPredT (f reason, t, p, n) @@ -2285,9 +2606,10 @@ end = struct | ShapeT t -> ShapeT (mod_reason_of_t f t) | ThisClassT (reason, t) -> ThisClassT (f reason, t) | ThisTypeAppT (reason, t1, t2, t3) -> ThisTypeAppT (f reason, t1, t2, t3) + | TypeAppT (reason, t1, t2, t3) -> TypeAppT (f reason, t1, t2, t3) and mod_reason_of_defer_use_t f = function - | DestructuringT (reason, s) -> DestructuringT (f reason, s) + | LatentPredT (reason, p) -> LatentPredT (f reason, p) | TypeDestructorT (use_op, reason, s) -> TypeDestructorT (use_op, f reason, s) and mod_reason_of_use_t f = function @@ -2299,44 +2621,42 @@ end = struct | AssertBinaryInLHST reason -> AssertBinaryInLHST (f reason) | AssertBinaryInRHST reason -> AssertBinaryInRHST (f reason) | AssertForInRHST reason -> AssertForInRHST (f reason) - | AssertRestParamT reason -> AssertRestParamT (f reason) | AssertImportIsValueT (reason, name) -> AssertImportIsValueT (f reason, name) | BecomeT (reason, t) -> BecomeT (f reason, t) | BindT (use_op, reason, ft, pass) -> BindT (use_op, f reason, ft, pass) | CallElemT (reason_call, reason_lookup, t, ft) -> - CallElemT (f reason_call, reason_lookup, t, ft) - | CallLatentPredT (reason, b, k, l, t) -> - CallLatentPredT (f reason, b, k, l, t) - | CallOpenPredT (reason, sense, key, l, t) -> - CallOpenPredT (f reason, sense, key, l, t) + CallElemT (f reason_call, reason_lookup, t, ft) + | CallLatentPredT (reason, b, k, l, t) -> CallLatentPredT (f reason, b, k, l, t) + | CallOpenPredT (reason, sense, key, l, t) -> CallOpenPredT (f reason, sense, key, l, t) | CallT (use_op, reason, ft) -> CallT (use_op, f reason, ft) | ChoiceKitUseT (reason, tool) -> ChoiceKitUseT (f reason, tool) | CJSExtractNamedExportsT (reason, exports, t2) -> - CJSExtractNamedExportsT (f reason, exports, t2) + CJSExtractNamedExportsT (f reason, exports, t2) | CJSRequireT (reason, t, is_strict) -> CJSRequireT (f reason, t, is_strict) | ComparatorT (reason, flip, t) -> ComparatorT (f reason, flip, t) | ConstructorT (use_op, reason, targs, args, tout) -> - ConstructorT (use_op, f reason, targs, args, tout) + ConstructorT (use_op, f reason, targs, args, tout) | CopyNamedExportsT (reason, target_module_t, t_out) -> - CopyNamedExportsT(f reason, target_module_t, t_out) + CopyNamedExportsT (f reason, target_module_t, t_out) | CopyTypeExportsT (reason, target_module_t, t_out) -> - CopyTypeExportsT(f reason, target_module_t, t_out) + CopyTypeExportsT (f reason, target_module_t, t_out) | DebugPrintT reason -> DebugPrintT (f reason) | DebugSleepT reason -> DebugSleepT (f reason) | ElemT (use_op, reason, t, action) -> ElemT (use_op, f reason, t, action) | EqT (reason, flip, t) -> EqT (f reason, flip, t) - | ExportNamedT (reason, skip_dupes, tmap, t_out) -> - ExportNamedT(f reason, skip_dupes, tmap, t_out) + | ExportNamedT (reason, skip_dupes, tmap, export_kind, t_out) -> + ExportNamedT (f reason, skip_dupes, tmap, export_kind, t_out) | ExportTypeT (reason, skip_dupes, name, t, t_out) -> - ExportTypeT(f reason, skip_dupes, name, t, t_out) - | ExtendsUseT (use_op, reason, ts, t1, t2) -> - ExtendsUseT(use_op, f reason, ts, t1, t2) + ExportTypeT (f reason, skip_dupes, name, t, t_out) + | AssertExportIsTypeT (reason, export_name, t_out) -> + AssertExportIsTypeT (f reason, export_name, t_out) + | ExtendsUseT (use_op, reason, ts, t1, t2) -> ExtendsUseT (use_op, f reason, ts, t1, t2) | GetElemT (use_op, reason, it, et) -> GetElemT (use_op, f reason, it, et) | GetKeysT (reason, t) -> GetKeysT (f reason, t) | GetValuesT (reason, t) -> GetValuesT (f reason, t) | GetPropT (use_op, reason, n, t) -> GetPropT (use_op, f reason, n, t) | GetPrivatePropT (use_op, reason, name, bindings, static, t) -> - GetPrivatePropT (use_op, f reason, name, bindings, static, t) + GetPrivatePropT (use_op, f reason, name, bindings, static, t) | GetProtoT (reason, t) -> GetProtoT (f reason, t) | GetStaticsT (reason, t) -> GetStaticsT (f reason, t) | GuardT (pred, result, t) -> GuardT (pred, result, mod_reason_of_t f t) @@ -2345,27 +2665,24 @@ end = struct | IdxUnwrap (reason, t_out) -> IdxUnwrap (f reason, t_out) | ImplementsT (use_op, t) -> ImplementsT (use_op, mod_reason_of_t f t) | ImportDefaultT (reason, import_kind, name, t, is_strict) -> - ImportDefaultT (f reason, import_kind, name, t, is_strict) + ImportDefaultT (f reason, import_kind, name, t, is_strict) | ImportModuleNsT (reason, t, is_strict) -> ImportModuleNsT (f reason, t, is_strict) | ImportNamedT (reason, import_kind, name, t, module_name, is_strict) -> - ImportNamedT (f reason, import_kind, name, t, module_name, is_strict) + ImportNamedT (f reason, import_kind, name, t, module_name, is_strict) | ImportTypeofT (reason, name, t) -> ImportTypeofT (f reason, name, t) | ImportTypeT (reason, name, t) -> ImportTypeT (f reason, name, t) - | IntersectionPreprocessKitT (reason, tool) -> - IntersectionPreprocessKitT (f reason, tool) + | IntersectionPreprocessKitT (reason, tool) -> IntersectionPreprocessKitT (f reason, tool) | InvariantT reason -> InvariantT (f reason) | LookupT (reason, r2, ts, x, t) -> LookupT (f reason, r2, ts, x, t) | MakeExactT (reason, t) -> MakeExactT (f reason, t) - | MapTypeT (reason, kind, t) -> MapTypeT (f reason, kind, t) + | MapTypeT (use_op, reason, kind, t) -> MapTypeT (use_op, f reason, kind, t) | MethodT (use_op, reason_call, reason_lookup, name, ft, tm) -> - MethodT (use_op, f reason_call, reason_lookup, name, ft, tm) + MethodT (use_op, f reason_call, reason_lookup, name, ft, tm) | MixinT (reason, inst) -> MixinT (f reason, inst) | NotT (reason, t) -> NotT (f reason, t) | NullishCoalesceT (reason, t1, t2) -> NullishCoalesceT (f reason, t1, t2) - | ObjAssignToT (reason, t, t2, kind) -> - ObjAssignToT (f reason, t, t2, kind) - | ObjAssignFromT (reason, t, t2, kind) -> - ObjAssignFromT (f reason, t, t2, kind) + | ObjAssignToT (op, reason, t, t2, kind) -> ObjAssignToT (op, f reason, t, t2, kind) + | ObjAssignFromT (op, reason, t, t2, kind) -> ObjAssignFromT (op, f reason, t, t2, kind) | ObjFreezeT (reason, t) -> ObjFreezeT (f reason, t) | ObjRestT (reason, t, t2) -> ObjRestT (f reason, t, t2) | ObjSealT (reason, t) -> ObjSealT (f reason, t) @@ -2381,178 +2698,293 @@ end = struct | ResolveSpreadT (use_op, reason_op, resolve) -> ResolveSpreadT (use_op, f reason_op, resolve) | SentinelPropTestT (reason_op, l, key, sense, sentinel, result) -> SentinelPropTestT (reason_op, l, key, sense, sentinel, mod_reason_of_t f result) - | SetElemT (use_op, reason, it, et, t) -> SetElemT (use_op, f reason, it, et, t) - | SetPropT (use_op, reason, n, i, t, tp) -> SetPropT (use_op, f reason, n, i, t, tp) - | SetPrivatePropT (use_op, reason, n, scopes, static, t, tp) -> - SetPrivatePropT (use_op, f reason, n, scopes, static, t, tp) + | SetElemT (use_op, reason, it, mode, et, t) -> SetElemT (use_op, f reason, it, mode, et, t) + | SetPropT (use_op, reason, n, mode, i, t, tp) -> SetPropT (use_op, f reason, n, mode, i, t, tp) + | SetPrivatePropT (use_op, reason, n, mode, scopes, static, t, tp) -> + SetPrivatePropT (use_op, f reason, n, mode, scopes, static, t, tp) | SetProtoT (reason, t) -> SetProtoT (f reason, t) | SpecializeT (use_op, reason_op, reason_tapp, cache, ts, t) -> - SpecializeT (use_op, f reason_op, reason_tapp, cache, ts, t) + SpecializeT (use_op, f reason_op, reason_tapp, cache, ts, t) | ObjKitT (use_op, reason, resolve_tool, tool, tout) -> - ObjKitT (use_op, f reason, resolve_tool, tool, tout) + ObjKitT (use_op, f reason, resolve_tool, tool, tout) + | ModuleExportsAssignT (reason, ts, t) -> ModuleExportsAssignT (f reason, ts, t) | SubstOnPredT (reason, subst, t) -> SubstOnPredT (f reason, subst, t) | SuperT (op, reason, inst) -> SuperT (op, f reason, inst) | TestPropT (reason, id, n, t) -> TestPropT (f reason, id, n, t) - | ThisSpecializeT(reason, this, k) -> ThisSpecializeT (f reason, this, k) + | ThisSpecializeT (reason, this, k) -> ThisSpecializeT (f reason, this, k) | ToStringT (reason, t) -> ToStringT (f reason, t) | UnaryMinusT (reason, t) -> UnaryMinusT (f reason, t) | UnifyT (t, t2) -> UnifyT (mod_reason_of_t f t, mod_reason_of_t f t2) - | VarianceCheckT(reason, ts, polarity) -> - VarianceCheckT (f reason, ts, polarity) + | VarianceCheckT (reason, ts, polarity) -> VarianceCheckT (f reason, ts, polarity) | TypeAppVarianceCheckT (use_op, reason_op, reason_tapp, targs) -> - TypeAppVarianceCheckT (use_op, f reason_op, reason_tapp, targs) + TypeAppVarianceCheckT (use_op, f reason_op, reason_tapp, targs) | ConcretizeTypeAppsT (use_op, t1, (t2, ts2, op2, r2), targs) -> - ConcretizeTypeAppsT (use_op, t1, (t2, ts2, op2, f r2), targs) + ConcretizeTypeAppsT (use_op, t1, (t2, ts2, op2, f r2), targs) | CondT (reason, then_t, else_t, tout) -> CondT (f reason, then_t, else_t, tout) | MatchPropT (op, reason, prop, t) -> MatchPropT (op, f reason, prop, t) + | ReactPropsToOut (reason, t) -> ReactPropsToOut (f reason, t) + | ReactInToProps (reason, t) -> ReactInToProps (f reason, t) + | DestructuringT (reason, a, s, t) -> DestructuringT (f reason, a, s, t) and mod_reason_of_opt_use_t f = function - | OptCallT (use_op, reason, ft) -> OptCallT (use_op, reason, ft) - | OptGetPropT (use_op, reason, n) -> OptGetPropT (use_op, f reason, n) - | OptGetPrivatePropT (use_op, reason, name, bindings, static) -> - OptGetPrivatePropT (use_op, f reason, name, bindings, static) - | OptTestPropT (reason, id, n) -> OptTestPropT (f reason, id, n) - | OptGetElemT (use_op, reason, it) -> OptGetElemT (use_op, f reason, it) - let rec util_use_op_of_use_t: 'a. (use_t -> 'a) -> (use_t -> use_op -> (use_op -> use_t) -> 'a) -> use_t -> 'a = - fun nope util u -> - let util = util u in - let nested_util u2 make2 = - let result = util_use_op_of_use_t - (fun _ -> None) - (fun _ op make -> Some (op, make)) - u2 in - (match result with - | None -> nope u - | Some (op, make) -> util op (fun op -> make2 (make op)) - ) - in - match u with - | UseT (op, t) -> util op (fun op -> UseT (op, t)) - | BindT (op, r, f, b) -> util op (fun op -> BindT (op, r, f, b)) - | CallT (op, r, f) -> util op (fun op -> CallT (op, r, f)) - | MethodT (op, r1, r2, p, f, tm) -> util op (fun op -> MethodT (op, r1, r2, p, f, tm)) - | SetPropT (op, r, p, w, t, tp) -> util op (fun op -> SetPropT (op, r, p, w, t, tp)) - | SetPrivatePropT (op, r, s, c, b, t, tp) -> - util op (fun op -> SetPrivatePropT (op, r, s, c, b, t, tp)) - | GetPropT (op, r, p, t) -> util op (fun op -> GetPropT (op, r, p, t)) - | MatchPropT (op, r, p, t) -> util op (fun op -> MatchPropT (op, r, p, t)) - | GetPrivatePropT (op, r, s, c, b, t) -> util op (fun op -> GetPrivatePropT (op, r, s, c, b, t)) - | SetElemT (op, r, t1, t2, t3) -> util op (fun op -> SetElemT (op, r, t1, t2, t3)) - | GetElemT (op, r, t1, t2) -> util op (fun op -> GetElemT (op, r, t1, t2)) - | ReposLowerT (r, d, u2) -> nested_util u2 (fun u2 -> ReposLowerT (r, d, u2)) - | ReposUseT (r, d, op, t) -> util op (fun op -> ReposUseT (r, d, op, t)) - | ConstructorT (op, r, targs, args, t) -> util op (fun op -> ConstructorT (op, r, targs, args, t)) - | SuperT (op, r, i) -> util op (fun op -> SuperT (op, r, i)) - | AdderT (op, d, f, l, r) -> util op (fun op -> AdderT (op, d, f, l, r)) - | ImplementsT (op, t) -> util op (fun op -> ImplementsT (op, t)) - | ToStringT (r, u2) -> nested_util u2 (fun u2 -> ToStringT (r, u2)) - | SpecializeT (op, r1, r2, c, ts, t) -> util op (fun op -> SpecializeT (op, r1, r2, c, ts, t)) - | TypeAppVarianceCheckT (op, r1, r2, ts) -> - util op (fun op -> TypeAppVarianceCheckT (op, r1, r2, ts)) - | ConcretizeTypeAppsT (u, (ts1, op, r1), x2, b) -> - util op (fun op -> ConcretizeTypeAppsT (u, (ts1, op, r1), x2, b)) - | ArrRestT (op, r, i, t) -> util op (fun op -> ArrRestT (op, r, i, t)) - | HasOwnPropT (op, r, p) -> util op (fun op -> HasOwnPropT (op, r, p)) - | GetKeysT (r, u2) -> nested_util u2 (fun u2 -> GetKeysT (r, u2)) - | ElemT (op, r, t, a) -> util op (fun op -> ElemT (op, r, t, a)) - | ObjKitT (op, r, x, y, t) -> util op (fun op -> ObjKitT (op, r, x, y, t)) - | ReactKitT (op, r, t) -> util op (fun op -> ReactKitT (op, r, t)) - | ResolveSpreadT (op, r, s) -> util op (fun op -> ResolveSpreadT (op, r, s)) - | ExtendsUseT (op, r, ts, a, b) -> util op (fun op -> ExtendsUseT (op, r, ts, a, b)) - | TestPropT (_, _, _, _) - | CallElemT (_, _, _, _) - | GetStaticsT (_, _) - | GetProtoT (_, _) - | SetProtoT (_, _) - | MixinT (_, _) - | ComparatorT (_, _, _) - | UnaryMinusT (_, _) - | AssertArithmeticOperandT (_) - | AssertBinaryInLHST (_) - | AssertBinaryInRHST (_) - | AssertForInRHST (_) - | AssertRestParamT (_) - | PredicateT (_, _) - | GuardT (_, _, _) - | EqT (_, _, _) - | AndT (_, _, _) - | OrT (_, _, _) - | NullishCoalesceT (_, _, _) - | NotT (_, _) - | ThisSpecializeT (_, _, _) - | VarianceCheckT (_, _, _) - | LookupT (_, _, _, _, _) - | ObjAssignToT (_, _, _, _) - | ObjAssignFromT (_, _, _, _) - | ObjFreezeT (_, _) - | ObjRestT (_, _, _) - | ObjSealT (_, _) - | ObjTestProtoT (_, _) - | ObjTestT (_, _, _) - | UnifyT (_, _) - | BecomeT (_, _) - | GetValuesT (_, _) - | MakeExactT (_, _) - | CJSRequireT (_, _, _) - | ImportModuleNsT (_, _, _) - | ImportDefaultT (_, _, _, _, _) - | ImportNamedT (_, _, _, _, _, _) - | ImportTypeT (_, _, _) - | ImportTypeofT (_, _, _) - | AssertImportIsValueT (_, _) - | CJSExtractNamedExportsT (_, _, _) - | CopyNamedExportsT (_, _, _) - | CopyTypeExportsT (_, _, _) - | ExportNamedT (_, _, _, _) - | ExportTypeT (_, _, _, _, _) - | MapTypeT (_, _, _) - | ChoiceKitUseT (_, _) - | IntersectionPreprocessKitT (_, _) - | DebugPrintT (_) - | DebugSleepT (_) - | SentinelPropTestT (_, _, _, _, _, _) - | IdxUnwrap (_, _) - | IdxUnMaybeifyT (_, _) - | OptionalChainT (_, _, _) - | InvariantT _ - | CallLatentPredT (_, _, _, _, _) - | CallOpenPredT (_, _, _, _, _) - | SubstOnPredT (_, _, _) - | RefineT (_, _, _) - | CondT (_, _, _, _) - -> nope u - - let use_op_of_use_t = - util_use_op_of_use_t - (fun _ -> None) - (fun _ op _ -> Some op) + | OptCallT (use_op, reason, ft) -> OptCallT (use_op, reason, ft) + | OptGetPropT (use_op, reason, n) -> OptGetPropT (use_op, f reason, n) + | OptGetPrivatePropT (use_op, reason, name, bindings, static) -> + OptGetPrivatePropT (use_op, f reason, name, bindings, static) + | OptTestPropT (reason, id, n) -> OptTestPropT (f reason, id, n) + | OptGetElemT (use_op, reason, it) -> OptGetElemT (use_op, f reason, it) + + let rec util_use_op_of_use_t : + 'a. (use_t -> 'a) -> (use_t -> use_op -> (use_op -> use_t) -> 'a) -> use_t -> 'a = + fun nope util u -> + let util = util u in + let nested_util u2 make2 = + let result = util_use_op_of_use_t (fun _ -> None) (fun _ op make -> Some (op, make)) u2 in + match result with + | None -> nope u + | Some (op, make) -> util op (fun op -> make2 (make op)) + in + match u with + | UseT (op, t) -> util op (fun op -> UseT (op, t)) + | BindT (op, r, f, b) -> util op (fun op -> BindT (op, r, f, b)) + | CallT (op, r, f) -> util op (fun op -> CallT (op, r, f)) + | MethodT (op, r1, r2, p, f, tm) -> util op (fun op -> MethodT (op, r1, r2, p, f, tm)) + | SetPropT (op, r, p, m, w, t, tp) -> util op (fun op -> SetPropT (op, r, p, m, w, t, tp)) + | SetPrivatePropT (op, r, s, m, c, b, t, tp) -> + util op (fun op -> SetPrivatePropT (op, r, s, m, c, b, t, tp)) + | GetPropT (op, r, p, t) -> util op (fun op -> GetPropT (op, r, p, t)) + | MatchPropT (op, r, p, t) -> util op (fun op -> MatchPropT (op, r, p, t)) + | GetPrivatePropT (op, r, s, c, b, t) -> + util op (fun op -> GetPrivatePropT (op, r, s, c, b, t)) + | SetElemT (op, r, t1, m, t2, t3) -> util op (fun op -> SetElemT (op, r, t1, m, t2, t3)) + | GetElemT (op, r, t1, t2) -> util op (fun op -> GetElemT (op, r, t1, t2)) + | ReposLowerT (r, d, u2) -> nested_util u2 (fun u2 -> ReposLowerT (r, d, u2)) + | ReposUseT (r, d, op, t) -> util op (fun op -> ReposUseT (r, d, op, t)) + | ConstructorT (op, r, targs, args, t) -> + util op (fun op -> ConstructorT (op, r, targs, args, t)) + | SuperT (op, r, i) -> util op (fun op -> SuperT (op, r, i)) + | AdderT (op, d, f, l, r) -> util op (fun op -> AdderT (op, d, f, l, r)) + | ImplementsT (op, t) -> util op (fun op -> ImplementsT (op, t)) + | ToStringT (r, u2) -> nested_util u2 (fun u2 -> ToStringT (r, u2)) + | SpecializeT (op, r1, r2, c, ts, t) -> util op (fun op -> SpecializeT (op, r1, r2, c, ts, t)) + | TypeAppVarianceCheckT (op, r1, r2, ts) -> + util op (fun op -> TypeAppVarianceCheckT (op, r1, r2, ts)) + | ConcretizeTypeAppsT (u, (ts1, op, r1), x2, b) -> + util op (fun op -> ConcretizeTypeAppsT (u, (ts1, op, r1), x2, b)) + | ArrRestT (op, r, i, t) -> util op (fun op -> ArrRestT (op, r, i, t)) + | HasOwnPropT (op, r, p) -> util op (fun op -> HasOwnPropT (op, r, p)) + | GetKeysT (r, u2) -> nested_util u2 (fun u2 -> GetKeysT (r, u2)) + | ElemT (op, r, t, a) -> util op (fun op -> ElemT (op, r, t, a)) + | ObjKitT (op, r, x, y, t) -> util op (fun op -> ObjKitT (op, r, x, y, t)) + | ReactKitT (op, r, t) -> util op (fun op -> ReactKitT (op, r, t)) + | ResolveSpreadT (op, r, s) -> util op (fun op -> ResolveSpreadT (op, r, s)) + | ExtendsUseT (op, r, ts, a, b) -> util op (fun op -> ExtendsUseT (op, r, ts, a, b)) + | MapTypeT (op, r, k, t) -> util op (fun op -> MapTypeT (op, r, k, t)) + | ObjAssignToT (op, r, t1, t2, k) -> util op (fun op -> ObjAssignToT (op, r, t1, t2, k)) + | ObjAssignFromT (op, r, t1, t2, k) -> util op (fun op -> ObjAssignFromT (op, r, t1, t2, k)) + | MakeExactT (r, Lower (op, t)) -> util op (fun op -> MakeExactT (r, Lower (op, t))) + | MakeExactT (_, _) + | TestPropT (_, _, _, _) + | CallElemT (_, _, _, _) + | GetStaticsT (_, _) + | GetProtoT (_, _) + | SetProtoT (_, _) + | MixinT (_, _) + | ComparatorT (_, _, _) + | UnaryMinusT (_, _) + | AssertArithmeticOperandT _ + | AssertBinaryInLHST _ + | AssertBinaryInRHST _ + | AssertForInRHST _ + | PredicateT (_, _) + | GuardT (_, _, _) + | EqT (_, _, _) + | AndT (_, _, _) + | OrT (_, _, _) + | NullishCoalesceT (_, _, _) + | NotT (_, _) + | ThisSpecializeT (_, _, _) + | VarianceCheckT (_, _, _) + | LookupT (_, _, _, _, _) + | ObjFreezeT (_, _) + | ObjRestT (_, _, _) + | ObjSealT (_, _) + | ObjTestProtoT (_, _) + | ObjTestT (_, _, _) + | UnifyT (_, _) + | BecomeT (_, _) + | GetValuesT (_, _) + | CJSRequireT (_, _, _) + | ImportModuleNsT (_, _, _) + | ImportDefaultT (_, _, _, _, _) + | ImportNamedT (_, _, _, _, _, _) + | ImportTypeT (_, _, _) + | ImportTypeofT (_, _, _) + | AssertImportIsValueT (_, _) + | CJSExtractNamedExportsT (_, _, _) + | CopyNamedExportsT (_, _, _) + | CopyTypeExportsT (_, _, _) + | ExportNamedT (_, _, _, _, _) + | ExportTypeT (_, _, _, _, _) + | AssertExportIsTypeT (_, _, _) + | ChoiceKitUseT (_, _) + | IntersectionPreprocessKitT (_, _) + | DebugPrintT _ + | DebugSleepT _ + | SentinelPropTestT (_, _, _, _, _, _) + | IdxUnwrap (_, _) + | IdxUnMaybeifyT (_, _) + | OptionalChainT (_, _, _) + | InvariantT _ + | CallLatentPredT (_, _, _, _, _) + | CallOpenPredT (_, _, _, _, _) + | SubstOnPredT (_, _, _) + | RefineT (_, _, _) + | CondT (_, _, _, _) + | ReactPropsToOut _ + | ReactInToProps _ + | DestructuringT _ + | ModuleExportsAssignT _ -> + nope u + + let use_op_of_use_t = util_use_op_of_use_t (fun _ -> None) (fun _ op _ -> Some op) let mod_use_op_of_use_t f = util_use_op_of_use_t (fun u -> u) (fun u op make -> let op' = f op in - if op' == op then u else make op') + if op' == op then + u + else + make op') + + let rec mod_root_of_use_op f = function + | Op op -> Op (f op) + | Frame (fr, o) -> Frame (fr, mod_root_of_use_op f o) + + let rec mod_loc_of_virtual_use_op f = + let mod_reason = Reason.map_reason_locs f in + let mod_loc_of_root_use_op f = function + | InitField { op; body } -> InitField { op = mod_reason op; body = mod_reason body } + | ObjectSpread { op } -> ObjectSpread { op = mod_reason op } + | ObjectChain { op } -> ObjectChain { op = mod_reason op } + | Addition { op; left; right } -> + Addition { op = mod_reason op; left = mod_reason left; right = mod_reason right } + | AssignVar { var; init } -> + AssignVar { var = Option.map ~f:mod_reason var; init = mod_reason init } + | Cast { lower; upper } -> Cast { lower = mod_reason lower; upper = mod_reason upper } + | ClassExtendsCheck { def; name; extends } -> + ClassExtendsCheck + { def = mod_reason def; name = mod_reason name; extends = mod_reason extends } + | ClassImplementsCheck { def; name; implements } -> + ClassImplementsCheck + { def = mod_reason def; name = mod_reason name; implements = mod_reason implements } + | ClassOwnProtoCheck { own_loc; proto_loc; prop } -> + ClassOwnProtoCheck + { prop; own_loc = Option.map ~f own_loc; proto_loc = Option.map ~f proto_loc } + | Coercion { from; target } -> + Coercion { from = mod_reason from; target = mod_reason target } + | DeleteProperty { lhs; prop } -> + DeleteProperty { lhs = mod_reason lhs; prop = mod_reason prop } + | DeleteVar { var } -> DeleteVar { var = mod_reason var } + | FunCall { op; fn; args; local } -> + FunCall + { + local; + op = mod_reason op; + fn = mod_reason fn; + args = Core_list.map ~f:mod_reason args; + } + | FunCallMethod { op; fn; args; prop; local } -> + FunCallMethod + { + local; + op = mod_reason op; + fn = mod_reason fn; + prop = mod_reason prop; + args = Core_list.map ~f:mod_reason args; + } + | FunReturnStatement { value } -> FunReturnStatement { value = mod_reason value } + | FunImplicitReturn { fn; upper } -> + FunImplicitReturn { fn = mod_reason fn; upper = mod_reason upper } + | GeneratorYield { value } -> GeneratorYield { value = mod_reason value } + | GetProperty reason -> GetProperty (mod_reason reason) + | Internal o -> Internal o + | JSXCreateElement { op; component } -> + JSXCreateElement { op = mod_reason op; component = mod_reason component } + | ReactCreateElementCall { op; component; children } -> + ReactCreateElementCall + { op = mod_reason op; component = mod_reason component; children = f children } + | ReactGetIntrinsic { literal } -> ReactGetIntrinsic { literal = mod_reason literal } + | Speculation op -> Speculation (mod_loc_of_virtual_use_op f op) + | TypeApplication { type' } -> TypeApplication { type' = mod_reason type' } + | SetProperty { lhs; prop; value } -> + SetProperty { lhs = mod_reason lhs; prop = mod_reason prop; value = mod_reason value } + | UnknownUse -> UnknownUse + in + let mod_loc_of_frame_use_op = function + | ArrayElementCompatibility { lower; upper } -> + ArrayElementCompatibility { lower = mod_reason lower; upper = mod_reason upper } + | FunCompatibility { lower; upper } -> + FunCompatibility { lower = mod_reason lower; upper = mod_reason upper } + | FunMissingArg { n; op; def } -> + FunMissingArg { n; op = mod_reason op; def = mod_reason def } + | FunParam { n; name; lower; upper } -> + FunParam { n; name; lower = mod_reason lower; upper = mod_reason upper } + | FunRestParam { lower; upper } -> + FunRestParam { lower = mod_reason lower; upper = mod_reason upper } + | FunReturn { lower; upper } -> + FunReturn { lower = mod_reason lower; upper = mod_reason upper } + | ImplicitTypeParam -> ImplicitTypeParam + | IndexerKeyCompatibility { lower; upper } -> + IndexerKeyCompatibility { lower = mod_reason lower; upper = mod_reason upper } + | CallFunCompatibility { n } -> CallFunCompatibility { n } + | TupleMapFunCompatibility { value } -> TupleMapFunCompatibility { value = mod_reason value } + | ObjMapFunCompatibility { value } -> ObjMapFunCompatibility { value = mod_reason value } + | ObjMapiFunCompatibility { key; value } -> + ObjMapiFunCompatibility { key = mod_reason key; value = mod_reason value } + | PropertyCompatibility { prop; lower; upper } -> + PropertyCompatibility { prop; lower = mod_reason lower; upper = mod_reason upper } + | ReactConfigCheck -> ReactConfigCheck + | ReactGetConfig o -> ReactGetConfig o + | TupleElementCompatibility { n; lower; upper } -> + TupleElementCompatibility { n; lower = mod_reason lower; upper = mod_reason upper } + | TypeArgCompatibility { name; targ; lower; upper; polarity } -> + TypeArgCompatibility + { + name; + polarity; + targ = mod_reason targ; + lower = mod_reason lower; + upper = mod_reason upper; + } + | TypeParamBound o -> TypeParamBound o + | UnifyFlip -> UnifyFlip + in + function + | Op op -> Op (mod_loc_of_root_use_op f op) + | Frame (fr, o) -> Frame (mod_loc_of_frame_use_op fr, mod_loc_of_virtual_use_op f o) (* type comparison mod reason *) let reasonless_compare = let rec swap_reason t2 t1 = - match t2, t1 with + match (t2, t1) with (* In reposition we also recurse and reposition some nested types. We need * to make sure we swap the types for these reasons as well. Otherwise our * optimized union ~> union check will not pass. *) - | DefT (_, MaybeT t2), DefT (r, MaybeT t1) -> DefT (r, MaybeT (swap_reason t2 t1)) - | DefT (_, OptionalT t2), DefT (r, OptionalT t1) -> DefT (r, OptionalT (swap_reason t2 t1)) - | ExactT (_, t2), ExactT (r, t1) -> ExactT (r, swap_reason t2 t1) - + | (MaybeT (_, t2), MaybeT (r, t1)) -> MaybeT (r, swap_reason t2 t1) + | (OptionalT (_, t2), OptionalT (r, t1)) -> OptionalT (r, swap_reason t2 t1) + | (ExactT (_, t2), ExactT (r, t1)) -> ExactT (r, swap_reason t2 t1) | _ -> mod_reason_of_t (fun _ -> reason_of_t t1) t2 in fun t1 t2 -> - if t1 == t2 then 0 else - compare t1 (swap_reason t2 t1) + if t1 == t2 then + 0 + else + compare t1 (swap_reason t2 t1) - let reasonless_eq t1 t2 = - reasonless_compare t1 t2 = 0 + let reasonless_eq t1 t2 = reasonless_compare t1 t2 = 0 let literal_eq x = function | Literal (_, y) -> x = y @@ -2568,24 +3000,38 @@ end = struct | Some y -> x = y | None -> false - let quick_subtype t1 t2 = - match t1, t2 with - | DefT (_, NumT _), DefT (_, NumT _) - | DefT (_, SingletonNumT _), DefT (_, NumT _) - | DefT (_, StrT _), DefT (_, StrT _) - | DefT (_, SingletonStrT _), DefT (_, StrT _) - | DefT (_, BoolT _), DefT (_, BoolT _) - | DefT (_, SingletonBoolT _), DefT (_, BoolT _) - | DefT (_, NullT), DefT (_, NullT) - | DefT (_, VoidT), DefT (_, VoidT) - | DefT (_, EmptyT), _ - | _, DefT (_, MixedT _) - -> true - | DefT (_, StrT actual), DefT (_, SingletonStrT expected) -> literal_eq expected actual - | DefT (_, NumT actual), DefT (_, SingletonNumT expected) -> number_literal_eq expected actual - | DefT (_, BoolT actual), DefT (_, SingletonBoolT expected) -> boolean_literal_eq expected actual - | _ -> reasonless_eq t1 t2 + let trust_subtype_fixed tr1 tr2 = + match (Trust.expand tr1, Trust.expand tr2) with + | (Trust.Qualifier trust1, Trust.Qualifier trust2) -> Trust.subtype_trust trust1 trust2 + | _ -> false + let quick_subtype trust_checked t1 t2 = + Trust.( + match (t1, t2) with + | (DefT (_, ltrust, NumT _), DefT (_, rtrust, NumT _)) + | (DefT (_, ltrust, SingletonNumT _), DefT (_, rtrust, NumT _)) + | (DefT (_, ltrust, StrT _), DefT (_, rtrust, StrT _)) + | (DefT (_, ltrust, SingletonStrT _), DefT (_, rtrust, StrT _)) + | (DefT (_, ltrust, BoolT _), DefT (_, rtrust, BoolT _)) + | (DefT (_, ltrust, SingletonBoolT _), DefT (_, rtrust, BoolT _)) + | (DefT (_, ltrust, NullT), DefT (_, rtrust, NullT)) + | (DefT (_, ltrust, VoidT), DefT (_, rtrust, VoidT)) + | (DefT (_, ltrust, EmptyT _), DefT (_, rtrust, _)) + | (DefT (_, ltrust, _), DefT (_, rtrust, MixedT _)) -> + (not trust_checked) || trust_subtype_fixed ltrust rtrust + | (DefT (_, ltrust, EmptyT _), _) -> + (not trust_checked) || trust_value_map ~f:is_public ~default:false ltrust + | (_, DefT (_, rtrust, MixedT _)) -> + (not trust_checked) || trust_value_map ~f:is_tainted ~default:false rtrust + | (DefT (_, ltrust, StrT actual), DefT (_, rtrust, SingletonStrT expected)) -> + ((not trust_checked) || trust_subtype_fixed ltrust rtrust) && literal_eq expected actual + | (DefT (_, ltrust, NumT actual), DefT (_, rtrust, SingletonNumT expected)) -> + ((not trust_checked) || trust_subtype_fixed ltrust rtrust) + && number_literal_eq expected actual + | (DefT (_, ltrust, BoolT actual), DefT (_, rtrust, SingletonBoolT expected)) -> + ((not trust_checked) || trust_subtype_fixed ltrust rtrust) + && boolean_literal_eq expected actual + | _ -> reasonless_eq t1 t2) end (* Type scheme: a type and an attendant environment of type parameters. @@ -2599,6 +3045,12 @@ end include TypeTerm include TypeUtil +include Trust + +(**** Trust utilities ****) + +let with_trust (trust_constructor : unit -> trust_rep) (type_constructor : trust_rep -> t) : t = + trust_constructor () |> type_constructor (*********************************************************) @@ -2606,79 +3058,169 @@ let compare = Pervasives.compare let open_tvar tvar = match tvar with - | OpenT(reason,id) -> (reason,id) + | OpenT (reason, id) -> (reason, id) | _ -> assert false module type PrimitiveType = sig - val desc: reason_desc - val make: reason -> t + val desc : reason_desc + + val make : reason -> trust_rep -> t end -module Primitive (P: PrimitiveType) = struct +module Primitive (P : PrimitiveType) = struct let desc = P.desc + let at tok = P.make (annot_reason (mk_reason desc tok)) - let why reason = P.make (replace_reason_const ~keep_def_loc:true desc reason) + + let why reason = P.make (replace_desc_reason desc reason) + let make = P.make end module NumT = Primitive (struct let desc = RNumber - let make r = DefT (r, NumT AnyLiteral) + + let make r trust = DefT (r, trust, NumT AnyLiteral) end) module StrT = Primitive (struct let desc = RString - let make r = DefT (r, StrT AnyLiteral) + + let make r trust = DefT (r, trust, StrT AnyLiteral) end) module BoolT = Primitive (struct let desc = RBoolean - let make r = DefT (r, BoolT None) + + let make r trust = DefT (r, trust, BoolT None) end) module MixedT = Primitive (struct let desc = RMixed - let make r = DefT (r, MixedT Mixed_everything) + + let make r trust = DefT (r, trust, MixedT Mixed_everything) end) module EmptyT = Primitive (struct let desc = REmpty - let make r = DefT (r, EmptyT) -end) -module AnyT = Primitive (struct - let desc = RAny - let make r = DefT (r, AnyT) + let make r trust = DefT (r, trust, EmptyT Bottom) end) +module AnyT = struct + let desc = function + | Annotated -> RAnyExplicit + | _ -> RAnyImplicit + + let make source r = AnyT (r, source) + + let at source = mk_reason (desc source) %> annot_reason %> make source + + let why source = replace_desc_reason (desc source) %> make source + + let annot = why Annotated + + let error = why AnyError + + let untyped = why Untyped + + let locationless source = desc source |> locationless_reason |> make source + + let source = function + | AnyT (_, s) -> s + | _ -> failwith "not an any type" +end + +module Unsoundness = struct + let constructor = Unsound Constructor + + let computed_nonlit_key = Unsound ComputedNonLiteralKey + + let function_proto = Unsound FunctionPrototype + + let merged = Unsound Merged + + let instance_of_refi = Unsound InstanceOfRefinement + + let unresolved = Unsound UnresolvedType + + let resolve_spread = Unsound ResolveSpread + + let unimplemented = Unsound Unimplemented + + let weak_context = Unsound WeakContext + + let inference_hooks = Unsound InferenceHooks + + let exports = Unsound Exports + + let existential = Unsound Existential + + let bound_fn_this = Unsound BoundFunctionThis + + let dummy_static = Unsound DummyStatic + + let merged_any = AnyT.make merged + + let instance_of_refi_any = AnyT.make instance_of_refi + + let unresolved_any = AnyT.make unresolved + + let resolve_spread_any = AnyT.make resolve_spread + + let constructor_any = AnyT.make constructor + + let function_proto_any = AnyT.make function_proto + + let computed_nonlit_key_any = AnyT.make computed_nonlit_key + + let unimplemented_any = AnyT.make unimplemented + + let weak_context_any = AnyT.make weak_context + + let inference_hooks_any = AnyT.make inference_hooks + + let exports_any = AnyT.make exports + + let existential_any = AnyT.make existential + + let bound_fn_this_any = AnyT.make bound_fn_this + + let dummy_static_any = AnyT.make dummy_static + + let why kind = Unsound kind |> AnyT.why + + let at kind = Unsound kind |> AnyT.at + + (* Unsound types would be too noisy if we banned them in exports, + so we allow them for now *) + let banned_in_exports = function + | Unsound _ -> false + | _ -> true +end + module VoidT = Primitive (struct let desc = RVoid - let make r = DefT (r, VoidT) + + let make r trust = DefT (r, trust, VoidT) end) module NullT = Primitive (struct let desc = RNull - let make r = DefT (r, NullT) + + let make r trust = DefT (r, trust, NullT) end) module ObjProtoT = Primitive (struct let desc = RDummyPrototype - let make r = ObjProtoT r + + let make r _ = ObjProtoT r end) module NullProtoT = Primitive (struct let desc = RNull - let make r = NullProtoT r -end) - -module AnyObjT = Primitive (struct - let desc = RAnyObject - let make r = DefT (r, AnyObjT) -end) -module AnyFunT = Primitive (struct - let desc = RAnyFunction - let make r = DefT (r, AnyFunT) + let make r _ = NullProtoT r end) (* USE WITH CAUTION!!! Locationless types should not leak to errors, otherwise @@ -2688,15 +3230,15 @@ end) doesn't cause errors, locationless `AnyT` is OK. *) module Locationless = struct - module LocationLess (P: PrimitiveType) = struct + module LocationLess (P : PrimitiveType) = struct let t = P.make (locationless_reason P.desc) end + module NumT = LocationLess (NumT) module StrT = LocationLess (StrT) module BoolT = LocationLess (BoolT) module MixedT = LocationLess (MixedT) module EmptyT = LocationLess (EmptyT) - module AnyT = LocationLess (AnyT) module VoidT = LocationLess (VoidT) module NullT = LocationLess (NullT) end @@ -2713,21 +3255,32 @@ let is_use = function (* not all so-called def types can appear as use types *) let is_proper_def = function - | InternalT _ -> false + | InternalT _ + | MatchingPropT _ -> + false + | _ -> true + +(* not all use types should appear in "merged" types *) +let is_proper_use = function + (* Speculation should be completed by the end of merge. This does not hold + today because non-0->1 things are erroneously considered 0->1, specifically + type parameters and sometimes eval types. Until this situation is fixed, we + can at least avoid these things leaking into dependent merge steps. *) + | ChoiceKitUseT _ -> false | _ -> true (* convenience *) let is_bot = function -| DefT (_, EmptyT) -> true -| _ -> false + | DefT (_, _, EmptyT _) -> true + | _ -> false let is_top = function -| DefT (_, MixedT _) -> true -| _ -> false + | DefT (_, _, MixedT _) -> true + | _ -> false let is_any = function -| DefT (_, AnyT) -> true -| _ -> false + | AnyT _ -> true + | _ -> false (* Primitives, like string, will be promoted to their wrapper object types for * certain operations, like GetPropT, but not for others, like `UseT _`. *) @@ -2738,176 +3291,73 @@ let primitive_promoting_use_t = function | GetPrivatePropT _ | GetProtoT _ | MethodT _ - | TestPropT _ - -> true - + | TestPropT _ -> + true (* "internal" use types, which should not be called directly on primitives, * but it's OK if they are in practice. TODO: consider making this an internal * error *) - | LookupT _ - -> true - + | LookupT _ -> true (* TODO: enumerate all use types *) | _ -> false -(* Use types trapped for any propagation *) -let any_propagating_use_t = function - | AdderT _ - | AndT _ - | ArrRestT _ - | BecomeT _ - | BindT _ - | CJSExtractNamedExportsT _ - | CJSRequireT _ - | CallElemT _ - | CallLatentPredT _ - | CallOpenPredT _ - | CallT _ - | ChoiceKitUseT _ - | ConstructorT _ - | CopyNamedExportsT _ - | CopyTypeExportsT _ - | ElemT _ - | ExportNamedT _ - | ExportTypeT _ - | GetElemT _ - | GetKeysT _ - | GetValuesT _ - | GetPropT _ - | MatchPropT _ - | GetPrivatePropT _ - | GetProtoT _ - | GetStaticsT _ - | GuardT _ - | IdxUnMaybeifyT _ - | IdxUnwrap _ - | ImportDefaultT _ - | ImportModuleNsT _ - | ImportNamedT _ - | ImportTypeT _ - | ImportTypeofT _ - | IntersectionPreprocessKitT _ - | LookupT _ - | MakeExactT _ - | MapTypeT _ - | MethodT _ - | MixinT _ - | NotT _ - | NullishCoalesceT _ - | ObjFreezeT _ - | ObjRestT _ - | ObjSealT _ - | ObjKitT _ - | ObjTestProtoT _ - | ObjTestT _ - | OptionalChainT _ - | OrT _ - | PredicateT _ - | ReactKitT _ - | RefineT _ - | ReposLowerT _ - | ReposUseT _ - | ResolveSpreadT _ - | SentinelPropTestT _ - | SetElemT _ - | SetPropT _ - | SpecializeT _ - | TestPropT _ - | ThisSpecializeT _ - | ToStringT _ - | UnaryMinusT _ - | UnifyT _ - | UseT (_, DefT (_, ClassT _)) (* mk_instance ~for_type:false *) - | UseT (_, DefT (_, MaybeT _)) (* used to filter maybe *) - | UseT (_, DefT (_, OptionalT _)) (* used to filter optional *) - | UseT (_, DefT (_, TypeT _)) (* import type *) - | CondT _ - -> true - - (* These types have no t_out, so can't propagate anything *) - | AssertArithmeticOperandT _ - | AssertBinaryInLHST _ - | AssertBinaryInRHST _ - | AssertForInRHST _ - | AssertImportIsValueT _ - | AssertRestParamT _ - | ComparatorT _ - | DebugPrintT _ - | DebugSleepT _ - | EqT _ - | HasOwnPropT _ - | ImplementsT _ - | InvariantT _ - | SetPrivatePropT _ - | SetProtoT _ - | SuperT _ - | TypeAppVarianceCheckT _ - | VarianceCheckT _ - | ConcretizeTypeAppsT _ - | ExtendsUseT _ - -> false - - (* TODO: Figure out if these should be true or false *) - | ObjAssignFromT _ - | ObjAssignToT _ - | SubstOnPredT _ - | UseT _ - -> false - let rec fold_use_op f1 f2 = function -| Op root -> f1 root -| Frame (frame, use_op) -> - let acc = fold_use_op f1 f2 use_op in - f2 acc frame + | Op root -> f1 root + | Frame (frame, use_op) -> + let acc = fold_use_op f1 f2 use_op in + f2 acc frame let rec root_of_use_op = function -| Op use_op -> use_op -| Frame (_, use_op) -> root_of_use_op use_op + | Op use_op -> use_op + | Frame (_, use_op) -> root_of_use_op use_op let replace_speculation_root_use_op = let rec loop new_parent_use_op = function - | Op (Speculation _) -> Ok new_parent_use_op - | Op _ -> Error new_parent_use_op - | (Frame (frame, parent_use_op)) as use_op -> - let parent_use_op' = loop new_parent_use_op parent_use_op in - (match parent_use_op' with - | (Error _) as error -> error - | Ok parent_use_op' -> - if parent_use_op' == parent_use_op then - Ok use_op - else - Ok (Frame (frame, parent_use_op')) - ) + | Op (Speculation _) -> Ok new_parent_use_op + | Op _ -> Error new_parent_use_op + | Frame (frame, parent_use_op) as use_op -> + let parent_use_op' = loop new_parent_use_op parent_use_op in + (match parent_use_op' with + | Error _ as error -> error + | Ok parent_use_op' -> + if parent_use_op' == parent_use_op then + Ok use_op + else + Ok (Frame (frame, parent_use_op'))) in fun new_parent_use_op use_op -> match loop new_parent_use_op use_op with | Ok use_op -> use_op | Error use_op -> use_op -let loc_of_root_use_op = function -| Addition {op; _} -| AssignVar {init=op; _} -| Cast {lower=op; _} -| ClassExtendsCheck {def=op; _} -| ClassImplementsCheck {def=op; _} -| Coercion {from=op; _} -| FunCall {op; _} -| FunCallMethod {op; _} -| FunReturnStatement {value=op} -| FunImplicitReturn {upper=op; _} -| GeneratorYield {value=op} -| GetProperty op -| JSXCreateElement {op; _} -| ReactCreateElementCall {op; _} -| TypeApplication {type'=op} -| SetProperty {value=op; _} - -> aloc_of_reason op |> ALoc.to_loc -| ReactGetIntrinsic _ -| Speculation _ -| Internal _ -| UnknownUse -| ClassOwnProtoCheck _ - -> Loc.none +let aloc_of_root_use_op : root_use_op -> ALoc.t = function + | InitField { op; _ } + | ObjectSpread { op } + | ObjectChain { op } + | Addition { op; _ } + | AssignVar { init = op; _ } + | Cast { lower = op; _ } + | ClassExtendsCheck { def = op; _ } + | ClassImplementsCheck { def = op; _ } + | Coercion { from = op; _ } + | DeleteProperty { lhs = op; _ } + | DeleteVar { var = op; _ } + | FunCall { op; _ } + | FunCallMethod { op; _ } + | FunReturnStatement { value = op } + | FunImplicitReturn { upper = op; _ } + | GeneratorYield { value = op } + | GetProperty op + | JSXCreateElement { op; _ } + | ReactCreateElementCall { op; _ } + | TypeApplication { type' = op } + | SetProperty { value = op; _ } -> + aloc_of_reason op + | ReactGetIntrinsic _ + | Speculation _ + | Internal _ + | UnknownUse + | ClassOwnProtoCheck _ -> + ALoc.none (* Printing some types in parseable form relies on particular formats in corresponding reason descriptions. The following module formalizes the @@ -2917,8 +3367,7 @@ let loc_of_root_use_op = function module DescFormat = struct (* InstanceT reasons have desc = name *) - let instance_reason name loc = - mk_reason (RType name) loc + let instance_reason name loc = mk_reason (RType name) loc let name_of_instance_reason r = match desc_of_reason r with @@ -2926,64 +3375,57 @@ module DescFormat = struct | desc -> string_of_desc desc (* TypeT reasons have desc = type `name` *) - let type_reason name loc = - mk_reason (RType name) loc + let type_reason name loc = mk_reason (RType name) loc let name_of_type_reason r = match desc_of_reason r with | RType name -> name | _ -> failwith "not a type reason" - end (* printing *) let string_of_defer_use_ctor = function - | DestructuringT _ -> "DestructuringT" + | LatentPredT _ -> "LatentPredT" | TypeDestructorT _ -> "TypeDestructorT" let string_of_def_ctor = function | ArrT _ -> "ArrT" - | AnyT -> "AnyT" - | AnyObjT -> "AnyObjT" - | AnyFunT -> "AnyFunT" | BoolT _ -> "BoolT" | CharSetT _ -> "CharSetT" | ClassT _ -> "ClassT" - | EmptyT -> "EmptyT" + | EmptyT _ -> "EmptyT" | FunT _ -> "FunT" | IdxWrapper _ -> "IdxWrapper" | InstanceT _ -> "InstanceT" - | IntersectionT _ -> "IntersectionT" - | MaybeT _ -> "MaybeT" | MixedT _ -> "MixedT" | NullT -> "NullT" | NumT _ -> "NumT" | ObjT _ -> "ObjT" - | OptionalT _ -> "OptionalT" | PolyT _ -> "PolyT" + | ReactAbstractComponentT _ -> "ReactAbstractComponentT" | SingletonBoolT _ -> "SingletonBoolT" | SingletonNumT _ -> "SingletonNumT" | SingletonStrT _ -> "SingletonStrT" | StrT _ -> "StrT" | TypeT _ -> "TypeT" - | TypeAppT _ -> "TypeAppT" - | UnionT _ -> "UnionT" | VoidT -> "VoidT" let string_of_ctor = function | OpenT _ -> "OpenT" + | AnyT _ -> "AnyT" | AnnotT _ -> "AnnotT" - | AnyWithLowerBoundT _ -> "AnyWithLowerBoundT" - | AnyWithUpperBoundT _ -> "AnyWithUpperBoundT" | MergedT _ -> "MergedT" | BoundT _ -> "BoundT" | InternalT (ChoiceKitT (_, tool)) -> - spf "ChoiceKitT %s" begin match tool with - | Trigger -> "Trigger" - end + spf + "ChoiceKitT %s" + begin + match tool with + | Trigger -> "Trigger" + end | TypeDestructorTriggerT _ -> "TypeDestructorTriggerT" | CustomFunT _ -> "CustomFunT" - | DefT (_, t) -> string_of_def_ctor t + | DefT (_, _, t) -> string_of_def_ctor t | EvalT _ -> "EvalT" | ExactT _ -> "ExactT" | ExistsT _ -> "ExistsT" @@ -3005,6 +3447,11 @@ let string_of_ctor = function | ShapeT _ -> "ShapeT" | ThisClassT _ -> "ThisClassT" | ThisTypeAppT _ -> "ThisTypeAppT" + | TypeAppT _ -> "TypeAppT" + | UnionT _ -> "UnionT" + | IntersectionT _ -> "IntersectionT" + | OptionalT _ -> "OptionalT" + | MaybeT _ -> "MaybeT" let string_of_internal_use_op = function | CopyEnv -> "CopyEnv" @@ -3012,57 +3459,65 @@ let string_of_internal_use_op = function | Refinement -> "Refinement" | WidenEnv -> "WidenEnv" -let string_of_root_use_op = function -| Addition _ -> "Addition" -| AssignVar _ -> "AssignVar" -| Cast _ -> "Cast" -| ClassExtendsCheck _ -> "ClassExtendsCheck" -| ClassImplementsCheck _ -> "ClassImplementsCheck" -| ClassOwnProtoCheck _ -> "ClassOwnProtoCheck" -| Coercion _ -> "Coercion" -| FunCall _ -> "FunCall" -| FunCallMethod _ -> "FunCallMethod" -| FunImplicitReturn _ -> "FunImplicitReturn" -| FunReturnStatement _ -> "FunReturnStatement" -| GeneratorYield _ -> "GeneratorYield" -| GetProperty _ -> "GetProperty" -| Internal op -> spf "Internal(%s)" (string_of_internal_use_op op) -| JSXCreateElement _ -> "JSXCreateElement" -| ReactCreateElementCall _ -> "ReactCreateElementCall" -| ReactGetIntrinsic _ -> "ReactGetIntrinsic" -| Speculation _ -> "Speculation" -| TypeApplication _ -> "TypeApplication" -| SetProperty _ -> "SetProperty" -| UnknownUse -> "UnknownUse" - -let string_of_frame_use_op = function -| ArrayElementCompatibility _ -> "ArrayElementCompatibility" -| FunCompatibility _ -> "FunCompatibility" -| FunMissingArg _ -> "FunMissingArg" -| FunParam _ -> "FunParam" -| FunRestParam _ -> "FunRestParam" -| FunReturn _ -> "FunReturn" -| ImplicitTypeParam _ -> "ImplicitTypeParam" -| IndexerKeyCompatibility _ -> "IndexerKeyCompatibility" -| PropertyCompatibility _ -> "PropertyCompatibility" -| ReactConfigCheck -> "ReactConfigCheck" -| TupleElementCompatibility _ -> "TupleElementCompatibility" -| TypeArgCompatibility _ -> "TypeArgCompatibility" -| TypeParamBound _ -> "TypeParamBound" -| UnifyFlip -> "UnifyFlip" - -let string_of_use_op = function -| Op root -> string_of_root_use_op root -| Frame (frame, _) -> string_of_frame_use_op frame - -let string_of_use_op_rec = - fold_use_op - (string_of_root_use_op) - (fun acc use_op -> spf "%s(%s)" (string_of_frame_use_op use_op) acc) +let string_of_root_use_op (type a) : a virtual_root_use_op -> string = function + | InitField _ -> "InitField" + | ObjectSpread _ -> "ObjectSpread" + | ObjectChain _ -> "ObjectChain" + | Addition _ -> "Addition" + | AssignVar _ -> "AssignVar" + | Cast _ -> "Cast" + | ClassExtendsCheck _ -> "ClassExtendsCheck" + | ClassImplementsCheck _ -> "ClassImplementsCheck" + | ClassOwnProtoCheck _ -> "ClassOwnProtoCheck" + | Coercion _ -> "Coercion" + | DeleteProperty _ -> "DeleteProperty" + | DeleteVar _ -> "DeleteVar" + | FunCall _ -> "FunCall" + | FunCallMethod _ -> "FunCallMethod" + | FunImplicitReturn _ -> "FunImplicitReturn" + | FunReturnStatement _ -> "FunReturnStatement" + | GeneratorYield _ -> "GeneratorYield" + | GetProperty _ -> "GetProperty" + | Internal op -> spf "Internal(%s)" (string_of_internal_use_op op) + | JSXCreateElement _ -> "JSXCreateElement" + | ReactCreateElementCall _ -> "ReactCreateElementCall" + | ReactGetIntrinsic _ -> "ReactGetIntrinsic" + | Speculation _ -> "Speculation" + | TypeApplication _ -> "TypeApplication" + | SetProperty _ -> "SetProperty" + | UnknownUse -> "UnknownUse" + +let string_of_frame_use_op (type a) : a virtual_frame_use_op -> string = function + | ArrayElementCompatibility _ -> "ArrayElementCompatibility" + | FunCompatibility _ -> "FunCompatibility" + | FunMissingArg _ -> "FunMissingArg" + | FunParam _ -> "FunParam" + | FunRestParam _ -> "FunRestParam" + | FunReturn _ -> "FunReturn" + | ImplicitTypeParam -> "ImplicitTypeParam" + | IndexerKeyCompatibility _ -> "IndexerKeyCompatibility" + | CallFunCompatibility _ -> "CallFunCompatibility" + | TupleMapFunCompatibility _ -> "TupleMapFunCompatibility" + | ObjMapFunCompatibility _ -> "ObjMapFunCompatibility" + | ObjMapiFunCompatibility _ -> "ObjMapiFunCompatibility" + | PropertyCompatibility _ -> "PropertyCompatibility" + | ReactConfigCheck -> "ReactConfigCheck" + | ReactGetConfig _ -> "ReactGetConfig" + | TupleElementCompatibility _ -> "TupleElementCompatibility" + | TypeArgCompatibility _ -> "TypeArgCompatibility" + | TypeParamBound _ -> "TypeParamBound" + | UnifyFlip -> "UnifyFlip" + +let string_of_use_op (type a) : a virtual_use_op -> string = function + | Op root -> string_of_root_use_op root + | Frame (frame, _) -> string_of_frame_use_op frame + +let string_of_use_op_rec : use_op -> string = + fold_use_op string_of_root_use_op (fun acc use_op -> + spf "%s(%s)" (string_of_frame_use_op use_op) acc) let string_of_use_ctor = function | UseT (op, t) -> spf "UseT(%s, %s)" (string_of_use_op op) (string_of_ctor t) - | AdderT _ -> "AdderT" | AndT _ -> "AndT" | ArrRestT _ -> "ArrRestT" @@ -3071,7 +3526,6 @@ let string_of_use_ctor = function | AssertBinaryInRHST _ -> "AssertBinaryInRHST" | AssertForInRHST _ -> "AssertForInRHST" | AssertImportIsValueT _ -> "AssertImportIsValueT" - | AssertRestParamT _ -> "AssertRestParamT" | BecomeT _ -> "BecomeT" | BindT _ -> "BindT" | CallElemT _ -> "CallElemT" @@ -3079,10 +3533,13 @@ let string_of_use_ctor = function | CallOpenPredT _ -> "CallOpenPredT" | CallT _ -> "CallT" | ChoiceKitUseT (_, tool) -> - spf "ChoiceKitUseT %s" begin match tool with - | FullyResolveType _ -> "FullyResolveType" - | TryFlow _ -> "TryFlow" - end + spf + "ChoiceKitUseT %s" + begin + match tool with + | FullyResolveType _ -> "FullyResolveType" + | TryFlow _ -> "TryFlow" + end | CJSExtractNamedExportsT _ -> "CJSExtractNamedExportsT" | CJSRequireT _ -> "CJSRequireT" | ComparatorT _ -> "ComparatorT" @@ -3095,6 +3552,7 @@ let string_of_use_ctor = function | EqT _ -> "EqT" | ExportNamedT _ -> "ExportNamedT" | ExportTypeT _ -> "ExportTypeT" + | AssertExportIsTypeT _ -> "AssertExportIsTypeT" | ExtendsUseT _ -> "ExtendsUseT" | GetElemT _ -> "GetElemT" | GetKeysT _ -> "GetKeysT" @@ -3114,11 +3572,14 @@ let string_of_use_ctor = function | ImportTypeofT _ -> "ImportTypeofT" | ImportTypeT _ -> "ImportTypeT" | IntersectionPreprocessKitT (_, tool) -> - spf "IntersectionPreprocessKitT %s" begin match tool with - | ConcretizeTypes _ -> "ConcretizeTypes" - | SentinelPropTest _ -> "SentinelPropTest" - | PropExistsTest _ -> "PropExistsTest" - end + spf + "IntersectionPreprocessKitT %s" + begin + match tool with + | ConcretizeTypes _ -> "ConcretizeTypes" + | SentinelPropTest _ -> "SentinelPropTest" + | PropExistsTest _ -> "PropExistsTest" + end | InvariantT _ -> "InvariantT" | LookupT _ -> "LookupT" | MakeExactT _ -> "MakeExactT" @@ -3141,18 +3602,20 @@ let string_of_use_ctor = function | RefineT _ -> "RefineT" | ReposLowerT _ -> "ReposLowerT" | ReposUseT _ -> "ReposUseT" - | ResolveSpreadT (_, _, {rrt_resolve_to; _;})-> - spf "ResolveSpreadT(%s)" begin match rrt_resolve_to with - | ResolveSpreadsToTuple _ -> "ResolveSpreadsToTuple" - | ResolveSpreadsToArray _ -> "ResolveSpreadsToArray" - | ResolveSpreadsToArrayLiteral _ -> "ResolveSpreadsToArrayLiteral" - | ResolveSpreadsToMultiflowCallFull _ -> "ResolveSpreadsToMultiflowCallFull" - | ResolveSpreadsToMultiflowSubtypeFull _ -> - "ResolveSpreadsToMultiflowSubtypeFull" - | ResolveSpreadsToCustomFunCall _ -> "ResolveSpreadsToCustomFunCall" - | ResolveSpreadsToMultiflowPartial _ -> "ResolveSpreadsToMultiflowPartial" - | ResolveSpreadsToCallT _ -> "ResolveSpreadsToCallT" - end + | ResolveSpreadT (_, _, { rrt_resolve_to; _ }) -> + spf + "ResolveSpreadT(%s)" + begin + match rrt_resolve_to with + | ResolveSpreadsToTuple _ -> "ResolveSpreadsToTuple" + | ResolveSpreadsToArray _ -> "ResolveSpreadsToArray" + | ResolveSpreadsToArrayLiteral (id, _, _) -> spf "ResolveSpreadsToArrayLiteral (%d)" id + | ResolveSpreadsToMultiflowCallFull _ -> "ResolveSpreadsToMultiflowCallFull" + | ResolveSpreadsToMultiflowSubtypeFull _ -> "ResolveSpreadsToMultiflowSubtypeFull" + | ResolveSpreadsToCustomFunCall _ -> "ResolveSpreadsToCustomFunCall" + | ResolveSpreadsToMultiflowPartial _ -> "ResolveSpreadsToMultiflowPartial" + | ResolveSpreadsToCallT _ -> "ResolveSpreadsToCallT" + end | SentinelPropTestT _ -> "SentinelPropTestT" | SetElemT _ -> "SetElemT" | SetPropT _ -> "SetPropT" @@ -3172,33 +3635,36 @@ let string_of_use_ctor = function | TypeAppVarianceCheckT _ -> "TypeAppVarianceCheck" | ConcretizeTypeAppsT _ -> "ConcretizeTypeAppsT" | CondT _ -> "CondT" + | ReactPropsToOut _ -> "ReactPropsToOut" + | ReactInToProps _ -> "ReactInToProps" + | DestructuringT _ -> "DestructuringT" + | ModuleExportsAssignT _ -> "ModuleExportsAssignT" let string_of_binary_test = function | InstanceofTest -> "instanceof" | SentinelProp key -> "sentinel prop " ^ key - let rec string_of_predicate = function - | AndP (p1,p2) -> - (string_of_predicate p1) ^ " && " ^ (string_of_predicate p2) - | OrP (p1,p2) -> - (string_of_predicate p1) ^ " || " ^ (string_of_predicate p2) - | NotP p -> "not " ^ (string_of_predicate p) + | AndP (p1, p2) -> string_of_predicate p1 ^ " && " ^ string_of_predicate p2 + | OrP (p1, p2) -> string_of_predicate p1 ^ " || " ^ string_of_predicate p2 + | NotP p -> "not " ^ string_of_predicate p | LeftP (b, t) -> - spf "left operand of %s with right operand = %s" - (string_of_binary_test b) (string_of_desc (desc_of_t t)) + spf + "left operand of %s with right operand = %s" + (string_of_binary_test b) + (string_of_desc (desc_of_t t)) | RightP (b, t) -> - spf "right operand of %s with left operand = %s" - (string_of_binary_test b) (string_of_desc (desc_of_t t)) + spf + "right operand of %s with left operand = %s" + (string_of_binary_test b) + (string_of_desc (desc_of_t t)) | ExistsP _ -> "truthy" | NullP -> "null" | MaybeP -> "null or undefined" - - | SingletonBoolP false -> "false" - | SingletonBoolP true -> "true" + | SingletonBoolP (_, false) -> "false" + | SingletonBoolP (_, true) -> "true" | SingletonStrP (_, _, str) -> spf "string `%s`" str - | SingletonNumP (_, _, (_,raw)) -> spf "number `%s`" raw - + | SingletonNumP (_, _, (_, raw)) -> spf "number `%s`" raw (* typeof *) | VoidP -> "undefined" | BoolP -> "boolean" @@ -3206,14 +3672,12 @@ let rec string_of_predicate = function | NumP -> "number" | FunP -> "function" | ObjP -> "object" - + | SymbolP -> "symbol" (* Array.isArray *) | ArrP -> "array" - - | PropExistsP (_, key, _) -> spf "prop `%s` is truthy" key - - | LatentP (OpenT (_, id),i) -> spf "LatentPred(TYPE_%d, %d)" id i - | LatentP (t,i) -> spf "LatentPred(%s, %d)" (string_of_ctor t) i + | PropExistsP (key, _) -> spf "prop `%s` is truthy" key + | LatentP (OpenT (_, id), i) -> spf "LatentPred(TYPE_%d, %d)" id i + | LatentP (t, i) -> spf "LatentPred(%s, %d)" (string_of_ctor t) i let name_of_propref = function | Named (_, x) -> Some x @@ -3224,73 +3688,107 @@ let reason_of_propref = function | Computed t -> reason_of_t t and extract_setter_type = function - | DefT (_, FunT (_, _, { params = [_, param_t]; _; })) -> param_t - | _ -> failwith "Setter property with unexpected type" + | DefT (_, _, FunT (_, _, { params = [(_, param_t)]; _ })) -> param_t + | _ -> failwith "Setter property with unexpected type" and extract_getter_type = function - | DefT (_, FunT (_, _, { return_t; _; })) -> return_t + | DefT (_, _, FunT (_, _, { return_t; _ })) -> return_t | _ -> failwith "Getter property with unexpected type" -and elemt_of_arrtype reason = function -| ArrayAT (elemt, _) -| ROArrayAT (elemt) -| TupleAT (elemt, _) -> elemt -| EmptyAT -> DefT (reason, EmptyT) +and elemt_of_arrtype = function + | ArrayAT (elemt, _) + | ROArrayAT elemt + | TupleAT (elemt, _) -> + elemt -let optional t = - let reason = replace_reason (fun desc -> ROptional desc) (reason_of_t t) in - DefT (reason, OptionalT t) +let optional ?annot_loc t = + let reason = update_desc_new_reason (fun desc -> ROptional desc) (reason_of_t t) in + let reason = + match annot_loc with + | Some loc -> repos_reason loc ~annot_loc:loc reason + | None -> reason + in + OptionalT (reason, t) let maybe t = - let reason = replace_reason (fun desc -> RMaybe desc) (reason_of_t t) in - DefT (reason, MaybeT t) + let reason = update_desc_new_reason (fun desc -> RMaybe desc) (reason_of_t t) in + MaybeT (reason, t) -let exact t = - ExactT (reason_of_t t, t) +let exact t = ExactT (reason_of_t t, t) -let class_type ?(structural=false) t = +let class_type ?(structural = false) ?annot_loc t = let reason = - if structural then reason_of_t t - else replace_reason (fun desc -> RClass desc) (reason_of_t t) + if structural then + reason_of_t t + else + update_desc_new_reason (fun desc -> RClass desc) (reason_of_t t) in - DefT (reason, ClassT t) + let reason = + match annot_loc with + | Some loc -> repos_reason loc ~annot_loc:loc reason + | None -> reason + in + DefT (reason, bogus_trust (), ClassT t) let this_class_type t = - let reason = replace_reason (fun desc -> RClass desc) (reason_of_t t) in + let reason = update_desc_new_reason (fun desc -> RClass desc) (reason_of_t t) in ThisClassT (reason, t) let extends_type r l u = - let reason = replace_reason (fun desc -> RExtends desc) r in + let reason = update_desc_reason (fun desc -> RExtends desc) r in InternalT (ExtendsT (reason, l, u)) let extends_use_type use_op l u = - let reason = replace_reason (fun desc -> RExtends desc) (reason_of_t u) in + let reason = update_desc_new_reason (fun desc -> RExtends desc) (reason_of_t u) in ExtendsUseT (use_op, reason, [], l, u) -let poly_type id tparams t = - if tparams = [] - then t - else - let reason = replace_reason (fun desc -> RPolyType desc) (reason_of_t t) in - DefT (reason, PolyT (tparams, t, id)) - -let typeapp ?annot_loc t targs = - let reason = replace_reason (fun desc -> RTypeApp desc) (reason_of_t t) in - let reason = match annot_loc with - | Some loc -> annot_reason (repos_reason loc reason) - | None -> reason +let poly_type id tparams_loc (tparams : typeparam Nel.t) t = + let reason = update_desc_new_reason (fun desc -> RPolyType desc) (reason_of_t t) in + DefT (reason, bogus_trust (), PolyT (tparams_loc, tparams, t, id)) + +let poly_type_of_tparam_list id tparams_loc tparams t = + match tparams with + | [] -> t + | hd :: tl -> + let tparams_nel = (hd, tl) in + poly_type id tparams_loc tparams_nel t + +let poly_type_of_tparams id (tparams : typeparams) t = + match tparams with + | None -> t + | Some (tparams_loc, tparams_nel) -> poly_type id tparams_loc tparams_nel t + +(* The implicit parameter specifies that the application is not a product of some + * source level type application, but merely a tool for some other functionality, + * e.g. canonicalize_imported_type in flow_js.ml. *) +let typeapp ?(implicit = false) ?annot_loc t targs = + let reason = + update_desc_new_reason + (fun desc -> + if implicit then + RTypeAppImplicit desc + else + RTypeApp desc) + (reason_of_t t) + in + let reason = + match annot_loc with + | Some loc -> repos_reason loc ~annot_loc:loc reason + | None -> reason in let use_op = Op (TypeApplication { type' = reason }) in - DefT (reason, TypeAppT (use_op, t, targs)) + TypeAppT (reason, use_op, t, targs) let this_typeapp ?annot_loc t this targs = - let reason = match targs with - | Some _ -> replace_reason (fun desc -> RTypeApp desc) (reason_of_t t) - | None -> reason_of_t t + let reason = + match targs with + | Some _ -> update_desc_new_reason (fun desc -> RTypeApp desc) (reason_of_t t) + | None -> reason_of_t t in - let reason = match annot_loc with - | Some loc -> annot_reason (repos_reason loc reason) - | None -> reason + let reason = + match annot_loc with + | Some loc -> repos_reason loc ~annot_loc:loc reason + | None -> reason in ThisTypeAppT (reason, t, this, targs) @@ -3308,56 +3806,52 @@ let unknown_use = Op UnknownUse (* Methods may use a dummy statics object type to carry properties. We do not want to encourage this pattern, but we also don't want to block uses of this pattern. Thus, we compromise by not tracking the property types. *) -let dummy_static reason = - DefT (replace_reason (fun desc -> RStatics desc) reason, AnyFunT) +let dummy_static = update_desc_reason (fun desc -> RStatics desc) %> Unsoundness.dummy_static_any -let dummy_prototype = - ObjProtoT (locationless_reason RDummyPrototype) +let dummy_prototype = ObjProtoT (locationless_reason RDummyPrototype) -let dummy_this = - let reason = locationless_reason RDummyThis in - DefT (reason, AnyT) +let bound_function_dummy_this = locationless_reason RDummyThis |> Unsoundness.bound_fn_this_any + +let dummy_this = locationless_reason RDummyThis |> MixedT.make |> with_trust bogus_trust let global_this reason = - let reason = replace_reason_const (RCustom "global object") reason in + let reason = replace_desc_reason (RCustom "global object") reason in ObjProtoT reason -let default_obj_assign_kind = - ObjAssign { assert_exact = false } +let default_obj_assign_kind = ObjAssign { assert_exact = false } (* A method type is a function type with `this` specified. *) let mk_methodtype - this tins ~rest_param ~def_reason - ?(frame=0) ?params_names ?(is_predicate=false) tout = { - this_t = this; - params = ( - match params_names with - | None -> List.map (fun t -> None, t) tins - | Some xs -> List.map2 (fun x t -> (x, t)) xs tins - ); - rest_param; - return_t = tout; - is_predicate; - closure_t = frame; - changeset = Changeset.empty; - def_reason; -} - -let mk_methodcalltype - this targs args ?(frame=0) ?(call_strict_arity=true) tout = { - call_this_t = this; - call_targs = targs; - call_args_tlist = args; - call_tout = tout; - call_closure_t = frame; - call_strict_arity; -} + this tins ~rest_param ~def_reason ?(frame = 0) ?params_names ?(is_predicate = false) tout = + { + this_t = this; + params = + (match params_names with + | None -> Core_list.map ~f:(fun t -> (None, t)) tins + | Some xs -> List.map2 (fun x t -> (x, t)) xs tins); + rest_param; + return_t = tout; + is_predicate; + closure_t = frame; + changeset = Changeset.empty; + def_reason; + } + +let mk_methodcalltype this targs args ?(frame = 0) ?(call_strict_arity = true) tout = + { + call_this_t = this; + call_targs = targs; + call_args_tlist = args; + call_tout = tout; + call_closure_t = frame; + call_strict_arity; + } (* A bound function type is a function type with `this` = `any`. Typically, such a type is given to a method when it can be considered bound: in other words, when calling that method through any object would be fine, since the object would be ignored. *) -let mk_boundfunctiontype = mk_methodtype dummy_this +let mk_boundfunctiontype = mk_methodtype bound_function_dummy_this (* A function type has `this` = `mixed`. Such a type can be given to functions that are meant to be called directly. On the other hand, it deliberately @@ -3365,6 +3859,7 @@ let mk_boundfunctiontype = mk_methodtype dummy_this non-trivially: indeed, calling them directly would cause `this` to be bound to the global object, which is typically unintended. *) let mk_functiontype reason = mk_methodtype (global_this reason) + let mk_functioncalltype reason = mk_methodcalltype (global_this reason) let mk_opt_functioncalltype reason targs args clos strict = @@ -3380,37 +3875,49 @@ let mk_opt_functioncalltype reason targs args clos strict = Types of object literals are exact, but can be sealed or unsealed. Object type annotations are sealed but not exact. *) -let default_flags = { - sealed = UnsealedInFile None; - exact = true; - frozen = false; -} - -let mk_objecttype ?(flags=default_flags) ~dict ~call pmap proto = { - flags; - proto_t = proto; - props_tmap = pmap; - dict_t = dict; - call_t = call; -} - -let mk_object_def_type ~reason ?(flags=default_flags) ~dict ~call pmap proto = - let reason = replace_reason invalidate_rtype_alias reason in - DefT (reason, ObjT (mk_objecttype ~flags ~dict ~call pmap proto)) - -let apply_opt_funcalltype (this, targs, args, clos, strict) t_out = { - call_this_t = this; - call_targs = targs; - call_args_tlist = args; - call_tout = t_out; - call_closure_t = clos; - call_strict_arity = strict; -} - -let apply_opt_use opt_use t_out = match opt_use with -| OptCallT (u, r, f) -> - CallT (u, r, apply_opt_funcalltype f t_out) -| OptGetPropT (u, r, p) -> GetPropT (u, r, p, t_out) -| OptGetPrivatePropT (u, r, s, cbs, b) -> GetPrivatePropT (u, r, s, cbs, b, t_out) -| OptTestPropT (r, i, p) -> TestPropT (r, i, p, t_out) -| OptGetElemT (u, r, t) -> GetElemT (u, r, t, t_out) +let default_flags = { sealed = UnsealedInFile None; exact = true; frozen = false } + +let mk_objecttype ?(flags = default_flags) ~dict ~call pmap proto = + { flags; proto_t = proto; props_tmap = pmap; dict_t = dict; call_t = call } + +let mk_object_def_type ~reason ?(flags = default_flags) ~dict ~call pmap proto = + let reason = update_desc_reason invalidate_rtype_alias reason in + DefT (reason, bogus_trust (), ObjT (mk_objecttype ~flags ~dict ~call pmap proto)) + +let apply_opt_funcalltype (this, targs, args, clos, strict) t_out = + { + call_this_t = this; + call_targs = targs; + call_args_tlist = args; + call_tout = t_out; + call_closure_t = clos; + call_strict_arity = strict; + } + +let create_intersection rep = IntersectionT (locationless_reason (RCustom "intersection"), rep) + +let apply_opt_use opt_use t_out = + match opt_use with + | OptCallT (u, r, f) -> CallT (u, r, apply_opt_funcalltype f t_out) + | OptGetPropT (u, r, p) -> GetPropT (u, r, p, t_out) + | OptGetPrivatePropT (u, r, s, cbs, b) -> GetPrivatePropT (u, r, s, cbs, b, t_out) + | OptTestPropT (r, i, p) -> TestPropT (r, i, p, t_out) + | OptGetElemT (u, r, t) -> GetElemT (u, r, t, t_out) + +module TypeParams : sig + val to_list : typeparams -> typeparam list + + val of_list : ALoc.t -> typeparam list -> typeparams + + val map : (typeparam -> typeparam) -> typeparams -> typeparams +end = struct + let to_list tparams = + Option.value_map tparams ~default:[] ~f:(fun (_loc, tparam_nel) -> Nel.to_list tparam_nel) + + let of_list tparams_loc tparams = + match tparams with + | [] -> None + | hd :: tl -> Some (tparams_loc, (hd, tl)) + + let map f tparams = Option.map ~f:(fun (loc, params) -> (loc, Nel.map f params)) tparams +end diff --git a/src/typing/type_annotation.ml b/src/typing/type_annotation.ml index 4c14d5f43eb..e210b337319 100644 --- a/src/typing/type_annotation.ml +++ b/src/typing/type_annotation.ml @@ -1,154 +1,204 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - +module Tast_utils = Typed_ast_utils open Utils_js open Reason open Type open Env.LookupMode - -module FlowError = Flow_error +open Trust_helpers module Flow = Flow_js +module T = Ast.Type + +module Func_type_params = Func_params.Make (struct + type 'T ast = (ALoc.t, 'T) Ast.Type.Function.Params.t + + type 'T param_ast = (ALoc.t, 'T) Ast.Type.Function.Param.t + + type 'T rest_ast = (ALoc.t, 'T) Ast.Type.Function.RestParam.t + + type param = Type.t * (ALoc.t * Type.t) param_ast + + type rest = Type.t * (ALoc.t * Type.t) rest_ast + + let id_name (_, { Ast.Identifier.name; _ }) = name + + let param_type (t, (_, { Ast.Type.Function.Param.name; optional; _ })) = + let name = Option.map name ~f:id_name in + let t = + if optional then + Type.optional t + else + t + in + (name, t) + + let rest_type (t, (loc, { Ast.Type.Function.RestParam.argument })) = + let (_, { Ast.Type.Function.Param.name; _ }) = argument in + let name = Option.map name ~f:id_name in + (name, loc, t) + + let subst_param cx map (t, tast) = + let t = Flow.subst cx map t in + (t, tast) + + let subst_rest cx map (t, tast) = + let t = Flow.subst cx map t in + (t, tast) + + let eval_param _cx (_, tast) = tast + + let eval_rest _cx (_, tast) = tast +end) + +module Func_type_sig = Func_sig.Make (Func_type_params) +module Class_type_sig = Class_sig.Make (Func_type_sig) + +module Object_freeze = struct + let freeze_object cx loc t = + let reason_arg = mk_reason (RFrozen RObjectLit) loc in + Tvar.mk_derivable_where cx reason_arg (fun tvar -> + Flow.flow cx (t, ObjFreezeT (reason_arg, tvar))) +end (* AST helpers *) let qualified_name = - let rec loop acc = Ast.Type.Generic.Identifier.(function - | Unqualified (_, name) -> - let parts = name::acc in - String.concat "." parts - | Qualified (_, { qualification; id = (_, name) }) -> - loop (name::acc) qualification - ) in + let rec loop acc = + Ast.Type.Generic.Identifier.( + function + | Unqualified (_, { Ast.Identifier.name; comments = _ }) -> + let parts = name :: acc in + String.concat "." parts + | Qualified (_, { qualification; id = (_, { Ast.Identifier.name; comments = _ }) }) -> + loop (name :: acc) qualification) + in loop [] -let ident_name (_, name) = name +let ident_name (_, { Ast.Identifier.name; comments = _ }) = name -let error_type cx loc msg = +let error_type cx loc msg t_in = Flow.add_output cx msg; - (loc, AnyT.at loc), Typed_ast.Type.error + let t_out = Tast_utils.error_mapper#type_ t_in |> snd in + ((loc, AnyT.at AnyError loc), t_out) -let is_suppress_type cx type_name = - SSet.mem type_name (Context.suppress_types cx) +let is_suppress_type cx type_name = SSet.mem type_name (Context.suppress_types cx) -let check_type_arg_arity cx loc params n f = +let check_type_arg_arity cx loc t_ast params n f = match params with | None -> if n = 0 then f () else - error_type cx loc (FlowError.ETypeParamArity (loc, n)) + error_type cx loc (Error_message.ETypeParamArity (loc, n)) t_ast | Some (_, l) -> if n = List.length l && n <> 0 then f () else - error_type cx loc (FlowError.ETypeParamArity (loc, n)) - -let mk_custom_fun cx loc targs (id_loc, name) kind = - check_type_arg_arity cx loc targs 0 (fun () -> - let reason = mk_reason RFunctionType loc in - let t = CustomFunT (reason, kind) in - (loc, t), - Ast.Type.(Generic { - Generic.id = Generic.Identifier.Unqualified ((id_loc, t), name); - targs = None - }) - ) - -let mk_react_prop_type cx loc targs id kind = - mk_custom_fun cx loc targs id - (ReactPropType (React.PropType.Complex kind)) - -let add_unclear_type_error_if_not_lib_file cx loc = Loc.( - match loc.source with - | Some file when not @@ File_key.is_lib_file file -> - Flow_js.add_output cx (FlowError.EUnclearType loc) - | _ -> () -) - -let add_deprecated_type_error_if_not_lib_file cx loc = Loc.( - match loc.source with - | Some file when not @@ File_key.is_lib_file file -> - Flow_js.add_output cx (FlowError.EDeprecatedType loc) - | _ -> () -) + error_type cx loc (Error_message.ETypeParamArity (loc, n)) t_ast + +let mk_custom_fun cx loc t_ast targs (id_loc, name, comments) kind = + check_type_arg_arity cx loc t_ast targs 0 (fun () -> + let reason = mk_reason RFunctionType loc in + let t = CustomFunT (reason, kind) in + ( (loc, t), + Ast.Type.( + Generic + { + Generic.id = + Generic.Identifier.Unqualified ((id_loc, t), { Ast.Identifier.name; comments }); + targs = None; + }) )) + +let mk_react_prop_type cx loc t_ast targs id kind = + mk_custom_fun cx loc t_ast targs id (ReactPropType (React.PropType.Complex kind)) + +let add_unclear_type_error_if_not_lib_file cx loc = + match ALoc.source loc with + | Some file when not @@ File_key.is_lib_file file -> + Flow_js.add_output cx (Error_message.EUnclearType loc) + | _ -> () + +let add_deprecated_type_error_if_not_lib_file cx loc = + match ALoc.source loc with + | Some file when not @@ File_key.is_lib_file file -> + Flow_js.add_output cx (Error_message.EDeprecatedType loc) + | _ -> () + +let polarity = function + | Some (_, Ast.Variance.Plus) -> Polarity.Positive + | Some (_, Ast.Variance.Minus) -> Polarity.Negative + | None -> Polarity.Neutral (**********************************) (* Transform annotations to types *) (**********************************) -(* converter *) -let rec convert cx tparams_map = Ast.Type.(function - -| loc, (Any as t_ast) -> - add_unclear_type_error_if_not_lib_file cx loc; - (loc, AnyT.at loc), t_ast - -| loc, (Mixed as t_ast) -> (loc, MixedT.at loc), t_ast - -| loc, (Empty as t_ast) -> (loc, EmptyT.at loc), t_ast - -| loc, (Void as t_ast) -> (loc, VoidT.at loc), t_ast - -| loc, (Null as t_ast) -> (loc, NullT.at loc), t_ast - -| loc, (Number as t_ast) -> (loc, NumT.at loc), t_ast +exception UnexpectedTemporaryObject -| loc, (String as t_ast) -> (loc, StrT.at loc), t_ast - -| loc, (Boolean as t_ast) -> (loc, BoolT.at loc), t_ast - -| loc, Nullable t -> - let (_, t), _ as t_ast = convert cx tparams_map t in - let reason = annot_reason (mk_reason (RMaybe (desc_of_t t)) loc) in - (loc, DefT (reason, MaybeT t)), Nullable t_ast - -| loc, Union (t0, t1, ts) -> - let (_, t0), _ as t0_ast = convert cx tparams_map t0 in - let (_, t1), _ as t1_ast = convert cx tparams_map t1 in - let ts, ts_ast = convert_list cx tparams_map ts in - let rep = UnionRep.make t0 t1 (ts) in - (loc, DefT (mk_reason RUnionType loc, UnionT rep)), - Union (t0_ast, t1_ast, ts_ast) - -| loc, Intersection (t0, t1, ts) -> - let (_, t0), _ as t0_ast = convert cx tparams_map t0 in - let (_, t1), _ as t1_ast = convert cx tparams_map t1 in - let ts, ts_ast = convert_list cx tparams_map ts in - let rep = InterRep.make t0 t1 ts in - (loc, DefT (mk_reason RIntersectionType loc, IntersectionT rep)), - Intersection (t0_ast, t1_ast, ts_ast) - -| loc, Typeof x -> - begin match x with - | q_loc, Generic { - Generic.id = qualification; - targs = None - } -> - let valtype, qualification_ast = convert_qualification - ~lookup_mode:ForTypeof cx "typeof-annotation" qualification in - let desc = RTypeof (qualified_name qualification) in - let reason = mk_reason desc loc in - (loc, Flow.mk_typeof_annotation cx reason valtype), - Typeof ((q_loc, valtype), Generic { Generic.id = qualification_ast; targs = None }) - | loc, _ -> - error_type cx loc (FlowError.EUnexpectedTypeof loc) - end - -| loc, Tuple ts -> - let tuple_types, ts_ast = convert_list cx tparams_map ts in - let reason = annot_reason (mk_reason RTupleType loc) in - let element_reason = mk_reason RTupleElement loc in - let elemt = match tuple_types with - | [] -> EmptyT.why element_reason - | [t] -> t - | t0::t1::ts -> - (* If a tuple should be viewed as an array, what would the element type of +(* converter *) +let rec convert cx tparams_map = + Ast.Type.( + function + | (loc, (Any as t_ast)) -> + add_unclear_type_error_if_not_lib_file cx loc; + ((loc, AnyT.at Annotated loc), t_ast) + | (loc, (Mixed as t_ast)) -> ((loc, MixedT.at loc |> with_trust_inference cx), t_ast) + | (loc, (Empty as t_ast)) -> ((loc, EmptyT.at loc |> with_trust_inference cx), t_ast) + | (loc, (Void as t_ast)) -> ((loc, VoidT.at loc |> with_trust_inference cx), t_ast) + | (loc, (Null as t_ast)) -> ((loc, NullT.at loc |> with_trust_inference cx), t_ast) + | (loc, (Number as t_ast)) -> ((loc, NumT.at loc |> with_trust_inference cx), t_ast) + | (loc, (BigInt as t_ast)) -> + let reason = annot_reason (mk_reason RBigInt loc) in + Flow.add_output cx (Error_message.EBigIntNotYetSupported reason); + ((loc, AnyT.why AnyError reason), t_ast) + | (loc, (String as t_ast)) -> ((loc, StrT.at loc |> with_trust_inference cx), t_ast) + | (loc, (Boolean as t_ast)) -> ((loc, BoolT.at loc |> with_trust_inference cx), t_ast) + | (loc, Nullable t) -> + let (((_, t), _) as t_ast) = convert cx tparams_map t in + let reason = annot_reason (mk_reason (RMaybe (desc_of_t t)) loc) in + ((loc, MaybeT (reason, t)), Nullable t_ast) + | (loc, Union (t0, t1, ts)) -> + let (((_, t0), _) as t0_ast) = convert cx tparams_map t0 in + let (((_, t1), _) as t1_ast) = convert cx tparams_map t1 in + let (ts, ts_ast) = convert_list cx tparams_map ts in + let rep = UnionRep.make t0 t1 ts in + ((loc, UnionT (mk_reason RUnionType loc, rep)), Union (t0_ast, t1_ast, ts_ast)) + | (loc, Intersection (t0, t1, ts)) -> + let (((_, t0), _) as t0_ast) = convert cx tparams_map t0 in + let (((_, t1), _) as t1_ast) = convert cx tparams_map t1 in + let (ts, ts_ast) = convert_list cx tparams_map ts in + let rep = InterRep.make t0 t1 ts in + ( (loc, IntersectionT (mk_reason RIntersectionType loc, rep)), + Intersection (t0_ast, t1_ast, ts_ast) ) + | (loc, Typeof x) as t_ast -> + begin + match x with + | (q_loc, Generic { Generic.id = qualification; targs = None }) -> + let (valtype, qualification_ast) = + convert_qualification ~lookup_mode:ForTypeof cx "typeof-annotation" qualification + in + let desc = RTypeof (qualified_name qualification) in + let reason = mk_reason desc loc in + ( (loc, Flow.mk_typeof_annotation cx reason valtype), + Typeof ((q_loc, valtype), Generic { Generic.id = qualification_ast; targs = None }) ) + | (q_loc, _) -> error_type cx loc (Error_message.EUnexpectedTypeof q_loc) t_ast + end + | (loc, Tuple ts) -> + let (tuple_types, ts_ast) = convert_list cx tparams_map ts in + let reason = annot_reason (mk_reason RTupleType loc) in + let element_reason = mk_reason RTupleElement loc in + let elemt = + match tuple_types with + | [] -> EmptyT.why element_reason |> with_trust bogus_trust + | [t] -> t + | t0 :: t1 :: ts -> + (* If a tuple should be viewed as an array, what would the element type of the array be? Using a union here seems appealing but is wrong: setting elements @@ -160,1492 +210,1852 @@ let rec convert cx tparams_map = Ast.Type.(function unsound reads. The correct solution is to safely case a tuple type to a covariant - array interface whose element type would be a union. Until we have - that, we use the following closest approximation, that behaves like a - union as a lower bound but `any` as an upper bound. + array interface whose element type would be a union. *) - AnyWithLowerBoundT (DefT (element_reason, UnionT (UnionRep.make t0 t1 ts))) - in - (loc, DefT (reason, ArrT (TupleAT (elemt, tuple_types)))), Tuple ts_ast - -| loc, Array t -> - let r = mk_reason RArrayType loc in - let (_, elemt), _ as t_ast = convert cx tparams_map t in - (loc, DefT (r, ArrT (ArrayAT (elemt, None)))), Array t_ast - -| loc, (StringLiteral { Ast.StringLiteral.value; _ } as t_ast) -> - (loc, mk_singleton_string loc value), t_ast - -| loc, (NumberLiteral { Ast.NumberLiteral.value; raw } as t_ast) -> - (loc, mk_singleton_number loc value raw), t_ast - -| loc, (BooleanLiteral value as t_ast) -> - (loc, mk_singleton_boolean loc value), t_ast - -(* TODO *) -| loc, Generic { Generic.id = (Generic.Identifier.Qualified (qid_loc, - { Generic.Identifier.qualification; id; }) as qid); targs } -> - let m, qualification_ast = - convert_qualification cx "type-annotation" qualification in - let id_loc, name = id in - let reason = mk_reason (RType name) loc in - let id_reason = mk_reason (RType name) id_loc in - let qid_reason = mk_reason (RType (qualified_name qid)) qid_loc in - let t_unapplied = Tvar.mk_where cx qid_reason (fun t -> - let id_info = name, t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - let use_op = Op (GetProperty qid_reason) in - Flow.flow cx (m, GetPropT (use_op, qid_reason, Named (id_reason, name), t)); - ) in - let t, targs = mk_nominal_type cx reason tparams_map (t_unapplied, targs) in - (loc, t), - Generic { - Generic.id = Generic.Identifier.Qualified (qid_loc, { - Generic.Identifier.qualification = qualification_ast; - id = (id_loc, t_unapplied), name; - }); - targs - } - -(* type applications: name < params > *) -| loc, Generic { - Generic.id = Generic.Identifier.Unqualified (name_loc, name as ident); - targs - } -> - - let convert_type_params () = - match targs with - | None -> [], None - | Some (loc, targs) -> - let elemts, targs = convert_list cx tparams_map targs in - elemts, Some (loc, targs) - in - - let reconstruct_ast t ?id_t targs = - (loc, t), Generic { Generic. - id = Generic.Identifier.Unqualified ((name_loc, Option.value id_t ~default:t), name); - targs; - } in - - let use_op reason = - Op (TypeApplication { type' = reason }) in - - begin match name with - - (* Array *) - | "Array" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let elemts, targs = convert_type_params () in - let elemt = List.hd elemts in - reconstruct_ast - (DefT (mk_reason RArrayType loc, ArrT (ArrayAT (elemt, None)))) - targs - ) - - (* $ReadOnlyArray is the supertype of all tuples and all arrays *) - | "$ReadOnlyArray" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let elemts, targs = convert_type_params () in - let elemt = List.hd elemts in - reconstruct_ast - (DefT (annot_reason (mk_reason RROArrayType loc), ArrT (ROArrayAT (elemt)))) - targs - ) - - (* $Supertype acts as any over supertypes of T *) - | "$Supertype" -> - add_unclear_type_error_if_not_lib_file cx loc; - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - reconstruct_ast (AnyWithLowerBoundT t) targs - ) - - (* $Subtype acts as any over subtypes of T *) - | "$Subtype" -> - add_unclear_type_error_if_not_lib_file cx loc; - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - reconstruct_ast (AnyWithUpperBoundT t) targs - ) - - (* $PropertyType acts as the type of 'x' in object type T *) - | "$PropertyType" -> - check_type_arg_arity cx loc targs 2 (fun () -> - match convert_type_params () with - | ([t; DefT (_, SingletonStrT key)], targs) -> - let reason = mk_reason (RType "$PropertyType") loc in - reconstruct_ast - (EvalT (t, TypeDestructorT - (use_op reason, reason, PropertyType key), mk_id())) - targs - | _ -> - error_type cx loc (FlowError.EPropertyTypeAnnot loc) - ) - - (* $ElementType acts as the type of the string elements in object - type T *) - | "$ElementType" -> - check_type_arg_arity cx loc targs 2 (fun () -> - match convert_type_params () with - | ([t; e], targs) -> - let reason = mk_reason (RType "$ElementType") loc in - reconstruct_ast - (EvalT (t, TypeDestructorT - (use_op reason, reason, ElementType e), mk_id())) - targs - | _ -> assert false - ) - - (* $NonMaybeType acts as the type T without null and void *) - | "$NonMaybeType" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - let reason = mk_reason (RType "$NonMaybeType") loc in - reconstruct_ast - (EvalT (t, TypeDestructorT - (use_op reason, reason, NonMaybeType), mk_id())) - targs - ) - - (* $Shape matches the shape of T *) - | "$Shape" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - reconstruct_ast (ShapeT t) targs - ) - - (* $Diff *) - | "$Diff" -> - check_type_arg_arity cx loc targs 2 (fun () -> - let t1, t2, targs = match convert_type_params () with - | [t1; t2], targs -> t1, t2, targs - | _ -> assert false in - let reason = mk_reason (RType "$Diff") loc in - reconstruct_ast - (EvalT (t1, TypeDestructorT (use_op reason, reason, - RestType (Type.Object.Rest.IgnoreExactAndOwn, t2)), mk_id ())) - targs - ) - - (* $ReadOnly *) - | "$ReadOnly" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - let reason = mk_reason (RType "$ReadOnly") loc in - reconstruct_ast - (EvalT ( - t, - TypeDestructorT ( - use_op reason, - reason, - ReadOnlyType - ), - mk_id () - )) - targs - ) - - (* $Keys is the set of keys of T *) - (** TODO: remove $Enum **) - | "$Keys" | "$Enum" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - reconstruct_ast - (KeysT (mk_reason RKeySet loc, t)) - targs - ) - - (* $Values is a union of all the own enumerable value types of T *) - | "$Values" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - let reason = mk_reason (RType "$Values") loc in - reconstruct_ast - (EvalT (t, TypeDestructorT - (use_op reason, reason, ValuesType), mk_id())) - targs - ) - - | "$Exact" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - let desc = RExactType (desc_of_t t) in - reconstruct_ast (ExactT (mk_reason desc loc, t)) targs - ) - - | "$Rest" -> - check_type_arg_arity cx loc targs 2 (fun () -> - let t1, t2, targs = match convert_type_params () with - | [t1; t2], targs -> t1, t2, targs - | _ -> assert false in - let reason = mk_reason (RType "$Rest") loc in - reconstruct_ast - (EvalT (t1, TypeDestructorT (use_op reason, reason, - RestType (Type.Object.Rest.Sound, t2)), mk_id ())) - targs - ) - - (* $Exports<'M'> is the type of the exports of module 'M' *) - (** TODO: use `import typeof` instead when that lands **) - | "$Exports" -> - check_type_arg_arity cx loc targs 1 (fun () -> - match targs with - | Some (targs_loc, (str_loc, StringLiteral { Ast.StringLiteral.value; raw })::_) -> - let desc = RModule value in - let reason = mk_reason desc loc in - let remote_module_t = - Env.get_var_declared_type cx (internal_module_name value) loc + UnionT (element_reason, UnionRep.make t0 t1 ts) + in + ((loc, DefT (reason, infer_trust cx, ArrT (TupleAT (elemt, tuple_types)))), Tuple ts_ast) + | (loc, Array t) -> + let r = mk_reason RArrayType loc in + let (((_, elemt), _) as t_ast) = convert cx tparams_map t in + ((loc, DefT (r, infer_trust cx, ArrT (ArrayAT (elemt, None)))), Array t_ast) + | (loc, (StringLiteral { Ast.StringLiteral.value; _ } as t_ast)) -> + ((loc, mk_singleton_string cx loc value), t_ast) + | (loc, (NumberLiteral { Ast.NumberLiteral.value; raw } as t_ast)) -> + ((loc, mk_singleton_number cx loc value raw), t_ast) + | (loc, (BigIntLiteral { Ast.BigIntLiteral.bigint; _ } as t_ast)) -> + let reason = annot_reason (mk_reason (RBigIntLit bigint) loc) in + Flow.add_output cx (Error_message.EBigIntNotYetSupported reason); + ((loc, AnyT.why AnyError reason), t_ast) + | (loc, (BooleanLiteral value as t_ast)) -> ((loc, mk_singleton_boolean cx loc value), t_ast) + (* TODO *) + | ( loc, + Generic + { + Generic.id = + Generic.Identifier.Qualified (qid_loc, { Generic.Identifier.qualification; id }) as + qid; + targs; + } ) -> + let (m, qualification_ast) = convert_qualification cx "type-annotation" qualification in + let (id_loc, ({ Ast.Identifier.name; comments = _ } as id_name)) = id in + let reason = mk_reason (RType name) loc in + let id_reason = mk_reason (RType name) id_loc in + let qid_reason = mk_reason (RType (qualified_name qid)) qid_loc in + let t_unapplied = + Tvar.mk_where cx qid_reason (fun t -> + let use_op = Op (GetProperty qid_reason) in + Flow.flow cx (m, GetPropT (use_op, qid_reason, Named (id_reason, name), t))) + in + let (t, targs) = mk_nominal_type cx reason tparams_map (t_unapplied, targs) in + ( (loc, t), + Generic + { + Generic.id = + Generic.Identifier.Qualified + ( qid_loc, + { + Generic.Identifier.qualification = qualification_ast; + id = ((id_loc, t_unapplied), id_name); + } ); + targs; + } ) + (* type applications: name < params > *) + | ( loc, + Generic + { + Generic.id = + Generic.Identifier.Unqualified + (name_loc, ({ Ast.Identifier.name; comments } as id_name)); + targs; + } ) as t_ast -> + (* Comments are innecessary, so they can be stripped to meet the generic requirements *) + let ident = (name_loc, name, comments) in + let convert_type_params () = + match targs with + | None -> ([], None) + | Some (loc, targs) -> + let (elemts, targs) = convert_list cx tparams_map targs in + (elemts, Some (loc, targs)) + in + let reconstruct_ast t ?id_t targs = + ( (loc, t), + Generic + { + Generic.id = + Generic.Identifier.Unqualified ((name_loc, Option.value id_t ~default:t), id_name); + targs; + } ) + in + let use_op reason = Op (TypeApplication { type' = reason }) in + (* NOTE: The following two functions implement the currently broken "value spread" logic in the + `Statement` module, adapted to operate on object literal types instead of object literal + values. This code is used in the implementation of `$TEMPORARY$object`, which in turn wraps an + encoding of object literal values as object literal types created by the signature generator + (or, regrettably, the inadvertent user). + + TODO: When the value spread logic in the `Statement` module is fixed to match the "type spread" + logic, this code should be updated. *) + + (************ (begin) adaptation of code in statement.ml *****************) + let object_prop cx map prop = + Ast.Type.Object.( + match prop with + (* named prop or method *) + | { + Property.key = + ( Ast.Expression.Object.Property.Identifier + (loc, { Ast.Identifier.name; comments = _ }) + | Ast.Expression.Object.Property.Literal + (loc, { Ast.Literal.value = Ast.Literal.String name; _ }) ); + value = Property.Init v; + optional; + _; + } -> + let ((_, t), _) = convert cx tparams_map v in + let t = + if optional then + Type.optional t + else + t + in + Properties.add_field name Polarity.Neutral (Some loc) t map + (* We enable some unsafe support for getters and setters. The main unsafe bit + * is that we don't properly havok refinements when getter and setter methods + * are called. *) + + (* unsafe getter property *) + | { + Property.key = + ( Ast.Expression.Object.Property.Identifier + (id_loc, { Ast.Identifier.name; comments = _ }) + | Ast.Expression.Object.Property.Literal + (id_loc, { Ast.Literal.value = Ast.Literal.String name; _ }) ); + value = Property.Get (func_loc, func); + _; + } -> + Flow_js.add_output cx (Error_message.EUnsafeGettersSetters func_loc); + let ((_, function_type), _) = + convert cx tparams_map (func_loc, Ast.Type.Function func) + in + let return_t = Type.extract_getter_type function_type in + Properties.add_getter name (Some id_loc) return_t map + (* unsafe setter property *) + | { + Property.key = + ( Ast.Expression.Object.Property.Identifier + (id_loc, { Ast.Identifier.name; comments = _ }) + | Ast.Expression.Object.Property.Literal + (id_loc, { Ast.Literal.value = Ast.Literal.String name; _ }) ); + value = Property.Set (func_loc, func); + _; + } -> + Flow_js.add_output cx (Error_message.EUnsafeGettersSetters func_loc); + let ((_, function_type), _) = + convert cx tparams_map (func_loc, Ast.Type.Function func) + in + let param_t = Type.extract_setter_type function_type in + Properties.add_setter name (Some id_loc) param_t map + | _ -> raise UnexpectedTemporaryObject) + in + let object_ cx reason ?(allow_sealed = true) props = + Ast.Type.Object.( + (* Use the same reason for proto and the ObjT so we can walk the proto chain + and use the root proto reason to build an error. *) + let obj_proto = ObjProtoT reason in + (* Return an object with specified sealing. *) + let mk_object ?(proto = obj_proto) ?(sealed = false) props = + Obj_type.mk_with_proto cx reason ~sealed ~props proto + in + (* Copy properties from from_obj to to_obj. We should ensure that to_obj is + not sealed. *) + let mk_spread from_obj to_obj ~assert_exact = + let use_op = Op (ObjectSpread { op = reason_of_t from_obj }) in + Tvar.mk_where cx reason (fun t -> + Flow.flow + cx + (to_obj, ObjAssignToT (use_op, reason, from_obj, t, ObjAssign { assert_exact }))) + in + (* When there's no result, return a new object with specified sealing. When + there's result, copy a new object into it, sealing the result when + necessary. + + When building an object incrementally, only the final call to this function + may be with sealed=true, so we will always have an unsealed object to copy + properties to. *) + let eval_object ?(proto = obj_proto) ?(sealed = false) (map, result) = + match result with + | None -> mk_object ~proto ~sealed map + | Some result -> + let result = + if not (SMap.is_empty map) then + mk_spread (mk_object ~proto map) result ~assert_exact:false + else + result + in + if not sealed then + result + else + Tvar.mk_where cx reason (fun t -> Flow.flow cx (result, ObjSealT (reason, t))) in - let str_t = mk_singleton_string str_loc value in - reconstruct_ast - (Tvar.mk_where cx reason (fun t -> - Flow.flow cx (remote_module_t, CJSRequireT(reason, t, Context.is_strict cx)) - )) - (Some ( - targs_loc, - [ (str_loc, str_t), StringLiteral { Ast.StringLiteral.value; raw } ] - )) - | _ -> - error_type cx loc (FlowError.EExportsAnnot loc) - ) - - | "$Call" -> - (match convert_type_params () with - | fn::args, targs -> - let reason = mk_reason RFunctionCallType loc in - reconstruct_ast - (EvalT (fn, TypeDestructorT (use_op reason, reason, CallType args), mk_id ())) - targs - | _ -> - error_type cx loc (FlowError.ETypeParamMinArity (loc, 1))) - - | "$TupleMap" -> - check_type_arg_arity cx loc targs 2 (fun () -> - let t1, t2, targs = match convert_type_params () with - | [t1; t2], targs -> t1, t2, targs - | _ -> assert false in - let reason = mk_reason RTupleMap loc in - reconstruct_ast - (EvalT (t1, TypeDestructorT (use_op reason, reason, TypeMap (TupleMap t2)), mk_id ())) - targs - ) - - | "$ObjMap" -> - check_type_arg_arity cx loc targs 2 (fun () -> - let t1, t2, targs = match convert_type_params () with - | [t1; t2], targs -> t1, t2, targs - | _ -> assert false in - let reason = mk_reason RObjectMap loc in - reconstruct_ast - (EvalT (t1, TypeDestructorT (use_op reason, reason, TypeMap (ObjectMap t2)), mk_id ())) - targs - ) - - | "$ObjMapi" -> - check_type_arg_arity cx loc targs 2 (fun () -> - let t1, t2, targs = match convert_type_params () with - | [t1; t2], targs -> t1, t2, targs - | _ -> assert false in - let reason = mk_reason RObjectMapi loc in - reconstruct_ast - (EvalT (t1, TypeDestructorT (use_op reason, reason, TypeMap (ObjectMapi t2)), mk_id ())) - targs - ) - - | "$CharSet" -> - check_type_arg_arity cx loc targs 1 (fun () -> - match targs with - | Some (targs_loc, [ str_loc, StringLiteral { Ast.StringLiteral.value; raw } ]) -> - let str_t = mk_singleton_string str_loc value in - let chars = String_utils.CharSet.of_string value in - let char_str = String_utils.CharSet.to_string chars in (* sorts them *) - let reason = mk_reason (RCustom (spf "character set `%s`" char_str)) loc in - reconstruct_ast - (DefT (reason, CharSetT chars)) - (Some ( - targs_loc, - [ (str_loc, str_t), StringLiteral { Ast.StringLiteral.value; raw } ] - )) - | _ -> - error_type cx loc (FlowError.ECharSetAnnot loc) - ) - - | "this" -> - if SMap.mem "this" tparams_map then - (* We model a this type like a type parameter. The bound on a this + let (sealed, map, proto, result) = + List.fold_left + (fun (sealed, map, proto, result) -> function + (* Enforce that the only way to make unsealed object literals is ...{} (spreading empty object + literals). Otherwise, spreading always returns sealed object literals. + + Also enforce that a spread of an inexact object can only appear as the first element of an + object literal, because otherwise we cannot determine the type of the object literal without + significantly losing precision about elements preceding that spread. + + Finally, the exactness of an object literal type is determined solely by its sealedness. + + TODO: This treatment of spreads is oblivious to issues that arise when spreading expressions + of union type. + *) + | SpreadProperty (_prop_loc, { SpreadProperty.argument }) -> + let ((_, spread), _) = convert cx tparams_map argument in + let not_empty_object_literal_argument = + match spread with + | DefT (_, _, ObjT { flags; _ }) -> Obj_type.sealed_in_op reason flags.sealed + | _ -> true + in + let obj = eval_object (map, result) in + let result = + mk_spread spread obj ~assert_exact:(not (SMap.is_empty map && result = None)) + in + (sealed && not_empty_object_literal_argument, SMap.empty, proto, Some result) + | Property (_prop_loc, prop) -> + let map = object_prop cx map prop in + (sealed, map, proto, result) + | _ -> raise UnexpectedTemporaryObject) + (allow_sealed, SMap.empty, None, None) + props + in + let sealed = + match result with + | Some _ -> sealed + | None -> sealed && not (SMap.is_empty map) + in + eval_object ?proto ~sealed (map, result)) + in + (************ (end) adaptation of code in statement.ml *****************) + begin + match name with + (* Temporary base types with literal information *) + | "$TEMPORARY$number" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (elemts, targs) = convert_type_params () in + match List.hd elemts with + | DefT (r, trust, SingletonNumT num_lit) -> + reconstruct_ast + (DefT (replace_desc_reason RNumber r, trust, NumT (Literal (None, num_lit)))) + targs + | _ -> error_type cx loc (Error_message.EUnexpectedTemporaryBaseType loc) t_ast) + | "$TEMPORARY$string" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (elemts, targs) = convert_type_params () in + match List.hd elemts with + | DefT (r, trust, SingletonStrT str_lit) -> + reconstruct_ast + (DefT (replace_desc_reason RString r, trust, StrT (Literal (None, str_lit)))) + targs + | _ -> error_type cx loc (Error_message.EUnexpectedTemporaryBaseType loc) t_ast) + | "$TEMPORARY$boolean" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (elemts, targs) = convert_type_params () in + match List.hd elemts with + | DefT (r, trust, SingletonBoolT bool) -> + reconstruct_ast + (DefT (replace_desc_reason RBoolean r, trust, BoolT (Some bool))) + targs + | _ -> error_type cx loc (Error_message.EUnexpectedTemporaryBaseType loc) t_ast) + | "$TEMPORARY$Object$freeze" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + let t = Object_freeze.freeze_object cx loc t in + (* TODO fix targs *) + reconstruct_ast t targs) + | "$TEMPORARY$module$exports$assign" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + let (ts, targs) = convert_type_params () in + match ts with + | [annot; assign] -> + let reason = reason_of_t annot in + let tout = + Tvar.mk_where cx reason (fun tvar -> + Flow.flow cx (annot, ModuleExportsAssignT (reason, assign, tvar))) + in + reconstruct_ast tout targs + | _ -> assert false) + | "$TEMPORARY$function" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + let (ts, targs) = convert_type_params () in + match ts with + | [annot; assign] -> + begin + match (annot, assign) with + | (DefT (r, trust, FunT (statics, proto, ft)), DefT (_, objtrust, ObjT objtype)) + -> + let reason = reason_of_t statics in + let statics' = + DefT (reason, objtrust, ObjT { objtype with proto_t = FunProtoT reason }) + in + let t = DefT (r, trust, FunT (statics', proto, ft)) in + reconstruct_ast t targs + | ( DefT + ( poly_r, + poly_trust, + PolyT + (tparams_loc, tparams, DefT (r, trust, FunT (statics, proto, ft)), id) + ), + DefT (_, objtrust, ObjT objtype) ) -> + let reason = reason_of_t statics in + let statics' = + DefT (reason, objtrust, ObjT { objtype with proto_t = FunProtoT reason }) + in + let t = + DefT + ( poly_r, + poly_trust, + PolyT + (tparams_loc, tparams, DefT (r, trust, FunT (statics', proto, ft)), id) + ) + in + reconstruct_ast t targs + | _ -> + (* fall back *) + reconstruct_ast annot targs + end + | _ -> assert false) + | "$TEMPORARY$object" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (fake_ts, fake_targs) = convert_type_params () in + let t_object = + try + match targs with + | Some (_, [(loc, Ast.Type.Object { Ast.Type.Object.properties; _ })]) -> + let reason = mk_reason RObjectLit loc in + object_ cx reason properties + | _ -> raise UnexpectedTemporaryObject + with UnexpectedTemporaryObject -> (* TODO: lint error *) + List.hd fake_ts + in + reconstruct_ast t_object fake_targs) + | "$TEMPORARY$array" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (elemts, targs) = convert_type_params () in + let elemt = List.hd elemts in + reconstruct_ast + (DefT (mk_reason RArrayLit loc, infer_trust cx, ArrT (ArrayAT (elemt, None)))) + targs) + (* Array *) + | "Array" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (elemts, targs) = convert_type_params () in + let elemt = List.hd elemts in + reconstruct_ast + (DefT (mk_reason RArrayType loc, infer_trust cx, ArrT (ArrayAT (elemt, None)))) + targs) + (* $ReadOnlyArray is the supertype of all tuples and all arrays *) + | "$ReadOnlyArray" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (elemts, targs) = convert_type_params () in + let elemt = List.hd elemts in + reconstruct_ast + (DefT + ( annot_reason (mk_reason RROArrayType loc), + infer_trust cx, + ArrT (ROArrayAT elemt) )) + targs) + (* These utilities are no longer supported *) + (* $Supertype acts as any over supertypes of T *) + | "$Supertype" -> + Error_message.EDeprecatedUtility (loc, name) |> Flow_js.add_output cx; + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + reconstruct_ast (reason_of_t t |> AnyT.annot) targs) + (* $Subtype acts as any over subtypes of T *) + | "$Subtype" -> + Error_message.EDeprecatedUtility (loc, name) |> Flow_js.add_output cx; + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + reconstruct_ast (reason_of_t t |> AnyT.annot) targs) + (* $PropertyType acts as the type of 'x' in object type T *) + | "$PropertyType" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + match convert_type_params () with + | ([t; DefT (_, _, SingletonStrT key)], targs) -> + let reason = mk_reason (RType "$PropertyType") loc in + reconstruct_ast + (EvalT (t, TypeDestructorT (use_op reason, reason, PropertyType key), mk_id ())) + targs + | _ -> error_type cx loc (Error_message.EPropertyTypeAnnot loc) t_ast) + (* $ElementType acts as the type of the string elements in object + type T *) + | "$ElementType" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + match convert_type_params () with + | ([t; e], targs) -> + let reason = mk_reason (RType "$ElementType") loc in + reconstruct_ast + (EvalT (t, TypeDestructorT (use_op reason, reason, ElementType e), mk_id ())) + targs + | _ -> assert false) + (* $NonMaybeType acts as the type T without null and void *) + | "$NonMaybeType" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + let reason = mk_reason (RType "$NonMaybeType") loc in + reconstruct_ast + (EvalT (t, TypeDestructorT (use_op reason, reason, NonMaybeType), mk_id ())) + targs) + (* $Shape matches the shape of T *) + | "$Shape" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + reconstruct_ast (ShapeT t) targs) + (* $Diff *) + | "$Diff" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + let (t1, t2, targs) = + match convert_type_params () with + | ([t1; t2], targs) -> (t1, t2, targs) + | _ -> assert false + in + let reason = mk_reason (RType "$Diff") loc in + reconstruct_ast + (EvalT + ( t1, + TypeDestructorT + (use_op reason, reason, RestType (Type.Object.Rest.IgnoreExactAndOwn, t2)), + mk_id () )) + targs) + (* $ReadOnly *) + | "$ReadOnly" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + let reason = mk_reason (RType "$ReadOnly") loc in + reconstruct_ast + (EvalT (t, TypeDestructorT (use_op reason, reason, ReadOnlyType), mk_id ())) + targs) + (* $Keys is the set of keys of T *) + | "$Keys" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + reconstruct_ast (KeysT (mk_reason RKeySet loc, t)) targs) + (* $Values is a union of all the own enumerable value types of T *) + | "$Values" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + let reason = mk_reason (RType "$Values") loc in + reconstruct_ast + (EvalT (t, TypeDestructorT (use_op reason, reason, ValuesType), mk_id ())) + targs) + | "$Exact" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + let desc = RExactType (desc_of_t t) in + reconstruct_ast (ExactT (mk_reason desc loc, t)) targs) + | "$Rest" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + let (t1, t2, targs) = + match convert_type_params () with + | ([t1; t2], targs) -> (t1, t2, targs) + | _ -> assert false + in + let reason = mk_reason (RType "$Rest") loc in + reconstruct_ast + (EvalT + ( t1, + TypeDestructorT (use_op reason, reason, RestType (Type.Object.Rest.Sound, t2)), + mk_id () )) + targs) + (* $Exports<'M'> is the type of the exports of module 'M' *) + (* TODO: use `import typeof` instead when that lands **) + | "$Exports" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + match targs with + | Some (targs_loc, (str_loc, StringLiteral { Ast.StringLiteral.value; raw }) :: _) -> + let desc = RModule value in + let reason = mk_reason desc loc in + let remote_module_t = + Env.get_var_declared_type cx (internal_module_name value) loc + in + let str_t = mk_singleton_string cx str_loc value in + reconstruct_ast + (Tvar.mk_where cx reason (fun t -> + Flow.flow cx (remote_module_t, CJSRequireT (reason, t, Context.is_strict cx)))) + (Some + ( targs_loc, + [((str_loc, str_t), StringLiteral { Ast.StringLiteral.value; raw })] )) + | _ -> error_type cx loc (Error_message.EExportsAnnot loc) t_ast) + | "$Call" -> + (match convert_type_params () with + | (fn :: args, targs) -> + let reason = mk_reason RFunctionCallType loc in + reconstruct_ast + (EvalT (fn, TypeDestructorT (use_op reason, reason, CallType args), mk_id ())) + targs + | _ -> error_type cx loc (Error_message.ETypeParamMinArity (loc, 1)) t_ast) + | "$TupleMap" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + let (t1, t2, targs) = + match convert_type_params () with + | ([t1; t2], targs) -> (t1, t2, targs) + | _ -> assert false + in + let reason = mk_reason RTupleMap loc in + reconstruct_ast + (EvalT + (t1, TypeDestructorT (use_op reason, reason, TypeMap (TupleMap t2)), mk_id ())) + targs) + | "$ObjMap" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + let (t1, t2, targs) = + match convert_type_params () with + | ([t1; t2], targs) -> (t1, t2, targs) + | _ -> assert false + in + let reason = mk_reason RObjectMap loc in + reconstruct_ast + (EvalT + (t1, TypeDestructorT (use_op reason, reason, TypeMap (ObjectMap t2)), mk_id ())) + targs) + | "$ObjMapi" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + let (t1, t2, targs) = + match convert_type_params () with + | ([t1; t2], targs) -> (t1, t2, targs) + | _ -> assert false + in + let reason = mk_reason RObjectMapi loc in + reconstruct_ast + (EvalT + (t1, TypeDestructorT (use_op reason, reason, TypeMap (ObjectMapi t2)), mk_id ())) + targs) + | "$CharSet" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + match targs with + | Some (targs_loc, [(str_loc, StringLiteral { Ast.StringLiteral.value; raw })]) -> + let str_t = mk_singleton_string cx str_loc value in + let chars = String_utils.CharSet.of_string value in + let char_str = String_utils.CharSet.to_string chars in + (* sorts them *) + let reason = mk_reason (RCustom (spf "character set `%s`" char_str)) loc in + reconstruct_ast + (DefT (reason, infer_trust cx, CharSetT chars)) + (Some + ( targs_loc, + [((str_loc, str_t), StringLiteral { Ast.StringLiteral.value; raw })] )) + | _ -> error_type cx loc (Error_message.ECharSetAnnot loc) t_ast) + | "this" -> + if SMap.mem "this" tparams_map then + (* We model a this type like a type parameter. The bound on a this type reflects the interface of `this` exposed in the current environment. Currently, we only support this types in a class environment: a this type in class C is bounded by C. *) - check_type_arg_arity cx loc targs 0 (fun () -> - reconstruct_ast (Flow.reposition cx loc (SMap.find_unsafe "this" tparams_map)) None - ) - else ( - Flow.add_output cx (FlowError.EUnexpectedThisType loc); - (loc, Locationless.AnyT.t), Any (* why locationless? *) - ) - - (* Class is the type of the class whose instances are of type T *) - | "Class" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - let reason = mk_reason (RStatics (desc_of_t t)) loc in - reconstruct_ast (DefT (reason, ClassT t)) targs - ) - - | "Function" | "function" -> - add_unclear_type_error_if_not_lib_file cx loc; - check_type_arg_arity cx loc targs 0 (fun () -> - let reason = mk_reason RFunctionType loc in - reconstruct_ast (DefT (reason, AnyFunT)) None - ) - - | "Object" -> - add_unclear_type_error_if_not_lib_file cx loc; - check_type_arg_arity cx loc targs 0 (fun () -> - let reason = mk_reason RObjectType loc in - reconstruct_ast (DefT (reason, AnyObjT)) None - ) - - | "Function$Prototype$Apply" -> - check_type_arg_arity cx loc targs 0 (fun () -> - let reason = mk_reason RFunctionType loc in - reconstruct_ast (FunProtoApplyT reason) None - ) - - | "Function$Prototype$Bind" -> - check_type_arg_arity cx loc targs 0 (fun () -> - let reason = mk_reason RFunctionType loc in - reconstruct_ast (FunProtoBindT reason) None - ) - - | "Function$Prototype$Call" -> - check_type_arg_arity cx loc targs 0 (fun () -> - let reason = mk_reason RFunctionType loc in - reconstruct_ast (FunProtoCallT reason) None - ) - - | "Object$Assign" -> - mk_custom_fun cx loc targs ident ObjectAssign - | "Object$GetPrototypeOf" -> - mk_custom_fun cx loc targs ident ObjectGetPrototypeOf - | "Object$SetPrototypeOf" -> - mk_custom_fun cx loc targs ident ObjectSetPrototypeOf - - | "$Compose" -> - mk_custom_fun cx loc targs ident (Compose false) - | "$ComposeReverse" -> - mk_custom_fun cx loc targs ident (Compose true) - - | "React$PropType$Primitive" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - let prop_type = (ReactPropType (React.PropType.Primitive (false, t))) in - let (_, prop_t), _ = mk_custom_fun cx loc None ident prop_type in - reconstruct_ast prop_t targs - ) - | "React$PropType$ArrayOf" -> - mk_react_prop_type cx loc targs ident React.PropType.ArrayOf - | "React$PropType$InstanceOf" -> - mk_react_prop_type cx loc targs ident React.PropType.InstanceOf - | "React$PropType$ObjectOf" -> - mk_react_prop_type cx loc targs ident React.PropType.ObjectOf - | "React$PropType$OneOf" -> - mk_react_prop_type cx loc targs ident React.PropType.OneOf - | "React$PropType$OneOfType" -> - mk_react_prop_type cx loc targs ident React.PropType.OneOfType - | "React$PropType$Shape" -> - mk_react_prop_type cx loc targs ident React.PropType.Shape - | "React$CreateClass" -> - mk_custom_fun cx loc targs ident ReactCreateClass - | "React$CreateElement" -> - mk_custom_fun cx loc targs ident ReactCreateElement - | "React$CloneElement" -> - mk_custom_fun cx loc targs ident ReactCloneElement - | "React$ElementFactory" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let t = match convert_type_params () with - | [t], _ -> t - | _ -> assert false in - mk_custom_fun cx loc None ident (ReactElementFactory t) - ) - | "React$ElementProps" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - let reason = mk_reason (RType "React$ElementProps") loc in - reconstruct_ast - (EvalT (t, TypeDestructorT - (use_op reason, reason, - ReactElementPropsType), mk_id ())) - targs - ) - | "React$ElementConfig" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - let reason = mk_reason (RType "React$ElementConfig") loc in - reconstruct_ast - (EvalT (t, TypeDestructorT ( - use_op reason, reason, ReactElementConfigType), mk_id () - )) - targs - ) - | "React$ElementRef" -> - check_type_arg_arity cx loc targs 1 (fun () -> - let ts, targs = convert_type_params () in - let t = List.hd ts in - let reason = mk_reason (RType "React$ElementRef") loc in - reconstruct_ast - (EvalT (t, TypeDestructorT ( - use_op reason, reason, ReactElementRefType), mk_id () - )) - targs - ) - - | "$Facebookism$Idx" -> - mk_custom_fun cx loc targs ident Idx - | "$Facebookism$TypeAssertIs" -> - mk_custom_fun cx loc targs ident TypeAssertIs - | "$Facebookism$TypeAssertThrows" -> - mk_custom_fun cx loc targs ident TypeAssertThrows - | "$Facebookism$TypeAssertWraps" -> - mk_custom_fun cx loc targs ident TypeAssertWraps - - | "$Flow$DebugPrint" -> - mk_custom_fun cx loc targs ident DebugPrint - | "$Flow$DebugThrow" -> - mk_custom_fun cx loc targs ident DebugThrow - | "$Flow$DebugSleep" -> - mk_custom_fun cx loc targs ident DebugSleep - - (* You can specify in the .flowconfig the names of types that should be - * treated like any. So if you have - * suppress_type=$FlowFixMe - * - * Then you can do - * - * var x: $FlowFixMe = 123; - *) - (* TODO move these to type aliases once optional type args + check_type_arg_arity cx loc t_ast targs 0 (fun () -> + reconstruct_ast (Flow.reposition cx loc (SMap.find_unsafe "this" tparams_map)) None) + else ( + Flow.add_output cx (Error_message.EUnexpectedThisType loc); + Tast_utils.error_mapper#type_ t_ast + ) + (* Class is the type of the class whose instances are of type T *) + | "Class" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + let reason = mk_reason (RStatics (desc_of_t t)) loc in + reconstruct_ast (DefT (reason, infer_trust cx, ClassT t)) targs) + | "Function" + | "function" -> + add_unclear_type_error_if_not_lib_file cx loc; + check_type_arg_arity cx loc t_ast targs 0 (fun () -> + let reason = mk_reason RFunctionType loc in + reconstruct_ast (AnyT.make Annotated reason) None) + | "Object" -> + add_unclear_type_error_if_not_lib_file cx loc; + check_type_arg_arity cx loc t_ast targs 0 (fun () -> + let reason = mk_reason RObjectType loc in + reconstruct_ast (AnyT.make Annotated reason) None) + | "Function$Prototype$Apply" -> + check_type_arg_arity cx loc t_ast targs 0 (fun () -> + let reason = mk_reason RFunctionType loc in + reconstruct_ast (FunProtoApplyT reason) None) + | "Function$Prototype$Bind" -> + check_type_arg_arity cx loc t_ast targs 0 (fun () -> + let reason = mk_reason RFunctionType loc in + reconstruct_ast (FunProtoBindT reason) None) + | "Function$Prototype$Call" -> + check_type_arg_arity cx loc t_ast targs 0 (fun () -> + let reason = mk_reason RFunctionType loc in + reconstruct_ast (FunProtoCallT reason) None) + | "Object$Assign" -> mk_custom_fun cx loc t_ast targs ident ObjectAssign + | "Object$GetPrototypeOf" -> mk_custom_fun cx loc t_ast targs ident ObjectGetPrototypeOf + | "Object$SetPrototypeOf" -> mk_custom_fun cx loc t_ast targs ident ObjectSetPrototypeOf + | "$Compose" -> mk_custom_fun cx loc t_ast targs ident (Compose false) + | "$ComposeReverse" -> mk_custom_fun cx loc t_ast targs ident (Compose true) + | "React$AbstractComponent" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + let (ts, targs) = convert_type_params () in + let config = List.nth ts 0 in + let instance = List.nth ts 1 in + reconstruct_ast + (DefT + ( mk_reason (RCustom "AbstractComponent") loc, + infer_trust cx, + ReactAbstractComponentT { config; instance } )) + targs) + | "React$Config" -> + check_type_arg_arity cx loc t_ast targs 2 (fun () -> + let (ts, targs) = convert_type_params () in + let props = List.nth ts 0 in + let default_props = List.nth ts 1 in + let reason = mk_reason RReactConfig loc in + reconstruct_ast + (EvalT + ( props, + TypeDestructorT (use_op reason, reason, ReactConfigType default_props), + mk_id () )) + targs) + | "React$PropType$Primitive" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, typed_targs) = convert_type_params () in + let t = List.hd ts in + let prop_type = ReactPropType (React.PropType.Primitive (false, t)) in + let targ = + match targs with + | Some (_, [t]) -> t + | Some _ + | None -> + assert false + in + let ((_, prop_t), _) = mk_custom_fun cx loc targ None ident prop_type in + reconstruct_ast prop_t typed_targs) + | "React$PropType$Primitive$Required" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, typed_targs) = convert_type_params () in + let t = List.hd ts in + let prop_type = ReactPropType (React.PropType.Primitive (true, t)) in + let targ = + match targs with + | Some (_, [t]) -> t + | Some _ + | None -> + assert false + in + let ((_, prop_t), _) = mk_custom_fun cx loc targ None ident prop_type in + reconstruct_ast prop_t typed_targs) + | "React$PropType$ArrayOf" -> + mk_react_prop_type cx loc t_ast targs ident React.PropType.ArrayOf + | "React$PropType$InstanceOf" -> + mk_react_prop_type cx loc t_ast targs ident React.PropType.InstanceOf + | "React$PropType$ObjectOf" -> + mk_react_prop_type cx loc t_ast targs ident React.PropType.ObjectOf + | "React$PropType$OneOf" -> + mk_react_prop_type cx loc t_ast targs ident React.PropType.OneOf + | "React$PropType$OneOfType" -> + mk_react_prop_type cx loc t_ast targs ident React.PropType.OneOfType + | "React$PropType$Shape" -> + mk_react_prop_type cx loc t_ast targs ident React.PropType.Shape + | "React$CreateClass" -> mk_custom_fun cx loc t_ast targs ident ReactCreateClass + | "React$CreateElement" -> mk_custom_fun cx loc t_ast targs ident ReactCreateElement + | "React$CloneElement" -> mk_custom_fun cx loc t_ast targs ident ReactCloneElement + | "React$ElementFactory" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let t = + match convert_type_params () with + | ([t], _) -> t + | _ -> assert false + in + let targ = + match targs with + | Some (_, [t]) -> t + | Some _ + | None -> + assert false + in + mk_custom_fun cx loc targ None ident (ReactElementFactory t)) + | "React$ElementProps" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + let reason = mk_reason (RType "React$ElementProps") loc in + reconstruct_ast + (EvalT (t, TypeDestructorT (use_op reason, reason, ReactElementPropsType), mk_id ())) + targs) + | "React$ElementConfig" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + let reason = mk_reason (RType "React$ElementConfig") loc in + reconstruct_ast + (EvalT + (t, TypeDestructorT (use_op reason, reason, ReactElementConfigType), mk_id ())) + targs) + | "React$ElementRef" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + let (ts, targs) = convert_type_params () in + let t = List.hd ts in + let reason = mk_reason (RType "React$ElementRef") loc in + reconstruct_ast + (EvalT (t, TypeDestructorT (use_op reason, reason, ReactElementRefType), mk_id ())) + targs) + | "$Facebookism$Idx" -> mk_custom_fun cx loc t_ast targs ident Idx + | "$Facebookism$TypeAssertIs" when Context.type_asserts cx -> + mk_custom_fun cx loc t_ast targs ident TypeAssertIs + | "$Facebookism$TypeAssertThrows" when Context.type_asserts cx -> + mk_custom_fun cx loc t_ast targs ident TypeAssertThrows + | "$Facebookism$TypeAssertWraps" when Context.type_asserts cx -> + mk_custom_fun cx loc t_ast targs ident TypeAssertWraps + | "$Flow$DebugPrint" -> mk_custom_fun cx loc t_ast targs ident DebugPrint + | "$Flow$DebugThrow" -> mk_custom_fun cx loc t_ast targs ident DebugThrow + | "$Flow$DebugSleep" -> mk_custom_fun cx loc t_ast targs ident DebugSleep + (* You can specify in the .flowconfig the names of types that should be + * treated like any. So if you have + * suppress_type=$FlowFixMe + * + * Then you can do + * + * var x: $FlowFixMe = 123; + *) + (* TODO move these to type aliases once optional type args work properly in type aliases: #7007731 *) - | type_name when is_suppress_type cx type_name -> - (* Optional type params are info-only, validated then forgotten. *) - let _, targs = convert_type_params () in - reconstruct_ast (AnyT.at loc) targs - - (* TODO: presumably some existing uses of AnyT can benefit from AnyObjT - as well: e.g., where AnyT is used to model prototypes and statics we - don't care about; but then again, some of these uses may be internal, - so while using AnyObjT may offer some sanity checking it might not - reveal user-facing errors. *) - - (* in-scope type vars *) - | _ when SMap.mem name tparams_map -> - check_type_arg_arity cx loc targs 0 (fun () -> - let t = Flow.reposition cx loc (SMap.find_unsafe name tparams_map) in - let id_info = name, t, Type_table.Other in - Type_table.set_info name_loc id_info (Context.type_table cx); - reconstruct_ast t None - ) - - | "$Pred" -> - let fun_reason = mk_reason (RCustom "abstract predicate function") loc in - let static_reason = mk_reason (RCustom "abstract predicate static") loc in - let out_reason = mk_reason (RCustom "open predicate") loc in - - check_type_arg_arity cx loc targs 1 (fun () -> - match convert_type_params () with - | [DefT (_, SingletonNumT (f, _))], targs -> - let n = Pervasives.int_of_float f in - let key_strs = - ListUtils.range 0 n |> - List.map (fun i -> Some ("x_" ^ Pervasives.string_of_int i)) in - let emp = Key_map.empty in - let tins = ListUtils.repeat n (AnyT.at loc) in - let tout = OpenPredT (out_reason, MixedT.at loc, emp, emp) in - reconstruct_ast - (DefT (fun_reason, FunT ( - dummy_static static_reason, - DefT (mk_reason RPrototype loc, AnyT), - mk_functiontype fun_reason tins tout - ~rest_param:None ~def_reason:fun_reason - ~params_names:key_strs ~is_predicate:true - ))) - targs - - | _ -> - error_type cx loc (FlowError.EPredAnnot loc) - ) - - | "$Refine" -> - check_type_arg_arity cx loc targs 3 (fun () -> - match convert_type_params () with - | [base_t; fun_pred_t; DefT (_, SingletonNumT (f, _))], targs -> - let idx = Pervasives.int_of_float f in - let reason = mk_reason (RCustom "refined type") loc in - let pred = LatentP (fun_pred_t, idx) in - reconstruct_ast - (EvalT (base_t, DestructuringT (reason, Refine pred), mk_id())) - targs - | _ -> - error_type cx loc (FlowError.ERefineAnnot loc) - ) - - (* other applications with id as head expr *) - | _ -> - let reason = mk_reason (RType name) loc in - let c = type_identifier cx name name_loc in - let id_info = name, c, Type_table.Other in - Type_table.set_info name_loc id_info (Context.type_table cx); - let t, targs = mk_nominal_type cx reason tparams_map (c, targs) in - reconstruct_ast t ~id_t:c targs - - end - -| loc, Function { Function. - params = (params_loc, { Function.Params.params; rest }); - return; - tparams; - } -> - let tparams, tparams_map, tparams_ast = - mk_type_param_declarations cx ~tparams_map tparams in - - let rev_params, rev_param_asts = List.fold_left (fun (params_acc, asts_acc) (param_loc, param) -> - let { Function.Param.name; annot; optional } = param in - let (_, t), _ as annot_ast = convert cx tparams_map annot in - let t = if optional then Type.optional t else t in - let name = Option.map ~f:(fun (loc, name) -> - let id_info = name, t, Type_table.Other in - Type_table.set_info ~extra_tparams:tparams loc id_info (Context.type_table cx); - (loc, t), name - ) name in - (Option.map ~f:ident_name name, t) :: params_acc, - (param_loc, { - Function.Param.name; - annot = annot_ast; - optional - }) :: asts_acc - ) ([], []) params in - - let reason = mk_reason RFunctionType loc in - - let rest_param, rest_param_ast = match rest with - | Some (rest_loc, { Function.RestParam.argument = (param_loc, param) }) -> - let { Function.Param.name; annot; optional } = param in - let (_, rest), _ as annot_ast = convert cx tparams_map annot in - (* TODO - Use AssertRestParamT here. The big problem is that, at this - * point, there might be some unsubstituted type parameters in the rest - * type. Unlike expressions, which know all type parameters have been - * substituted thanks to generate_tests, we visit types outside of - * generate_tests. - * - * One solution might be to build a type visitor that runs during - * generate_tests and does the various subst and tests then - *) - Some (Option.map ~f:ident_name name, loc_of_t rest, rest), - Some (rest_loc, { - Function.RestParam.argument = (param_loc, { - Function.Param.name = Option.map ~f:(fun (loc, name) -> (loc, rest), name) name; - annot = annot_ast; - optional - }); - }) - | None -> None, None in - - let (_, return_t), _ as return_ast = convert cx tparams_map return in - let ft = - DefT (reason, FunT ( - dummy_static reason, - DefT (mk_reason RPrototype loc, AnyT), - { - this_t = DefT (mk_reason RThis loc, AnyT); - params = List.rev rev_params; - rest_param; - return_t; - is_predicate = false; - closure_t = 0; - changeset = Changeset.empty; - def_reason = reason; - })) - in - let id = Context.make_nominal cx in - (loc, poly_type id tparams ft), - Function { - Function.params = (params_loc, { - Function.Params.params = List.rev rev_param_asts; - rest = rest_param_ast; - }); - return = return_ast; - tparams = tparams_ast; - } - -| loc, Object { Object.exact; properties } -> - let reason_desc = RObjectType in - let callable = List.exists (function - | Object.CallProperty (_, { Object.CallProperty.static; _ }) -> not static - | _ -> false - ) properties in - let mk_object ~exact (call_props, dict, props_map, proto, call_deprecated) = - let call = match List.rev call_props with - | [] -> - (* Note that call properties using the call property syntax always override - $call properties. Previously, if both were present, the $call property - was ignored, but is now left as a named property. *) - call_deprecated - | [t] -> Some t - | t0::t1::ts -> - let callable_reason = mk_reason (RCustom "callable object type") loc in - let rep = InterRep.make t0 t1 ts in - let t = DefT (callable_reason, IntersectionT rep) in - Some t - in - (* Previously, call properties were stored in the props map under the key - $call. Unfortunately, this made it possible to specify call properties - using this syntax in object types, and some libraries adopted this - syntax. - - Note that call properties using the call property syntax always override - $call properties. Previously, if both were present, the $call property - was ignored, but is now left as a named property. *) - let props_map, call = - if call <> None then props_map, call - else match SMap.get "$call" props_map with - | Some (Field (_, t, (Positive | Neutral))) -> - SMap.remove "$call" props_map, Some t - | _ -> props_map, call - in - (* Use the same reason for proto and the ObjT so we can walk the proto chain - and use the root proto reason to build an error. *) - let props_map, proto = match proto with - | Some t -> - (* The existence of a callable property already implies that - * __proto__ = Function.prototype. Treat __proto__ as a property *) - if callable - then - SMap.add "__proto__" (Field (None, t, Neutral)) props_map, - FunProtoT (locationless_reason RFunctionPrototype) - else - props_map, t - | None -> - props_map, - if callable - then FunProtoT (locationless_reason RFunctionPrototype) - else ObjProtoT (locationless_reason RObjectPrototype) - in - let call = Option.map call ~f:(Context.make_call_prop cx) in - let pmap = Context.make_property_map cx props_map in - let flags = { - sealed = Sealed; - exact; - frozen = false - } in - DefT (mk_reason reason_desc loc, - ObjT (mk_objecttype ~flags ~dict ~call pmap proto)) - in - let property loc prop props proto call_deprecated = - match prop with - | { Object.Property. - key; value = Object.Property.Init value; optional; variance; _method; _ - } -> - begin match key with - (* Previously, call properties were stored in the props map under the key - $call. Unfortunately, this made it possible to specify call properties - using this syntax in object types, and some libraries adopted this - syntax. - - Note that call properties using the call property syntax always override - $call properties. Previously, if both were present, the $call property - was ignored, but is now left as a named property. *) - | Ast.Expression.Object.Property.Identifier (loc, "$call") -> - Flow.add_output cx Flow_error.(EDeprecatedCallSyntax loc); - let (_, t), _ as value_ast = convert cx tparams_map value in - let t = if optional then Type.optional t else t in - let key = Ast.Expression.Object.Property.Identifier ((loc, t), "$call") in - props, proto, Some t, - { prop with Object.Property.key; value = Object.Property.Init value_ast } - | Ast.Expression.Object.Property.Literal - (loc, { Ast.Literal.value = Ast.Literal.String name; _ }) - | Ast.Expression.Object.Property.Identifier (loc, name) -> - Type_inference_hooks_js.dispatch_obj_prop_decl_hook cx name loc; - let (_, t), _ as value_ast = convert cx tparams_map value in - let prop_ast t = { prop with Object.Property. - key = begin match key with - | Ast.Expression.Object.Property.Literal (_, lit) -> - Ast.Expression.Object.Property.Literal ((loc, t), lit) - | Ast.Expression.Object.Property.Identifier _ -> - Ast.Expression.Object.Property.Identifier ((loc, t), name) - | _ -> assert_false "branch invariant" - end; - value = Object.Property.Init value_ast; - } in - if name = "__proto__" && not (_method || optional) && variance = None - then - let reason = mk_reason RPrototype (fst value) in - let proto = Tvar.mk_where cx reason (fun tout -> - Flow.flow cx (t, ObjTestProtoT (reason, tout)) - ) in - let prop_ast = prop_ast proto in - let proto = Some (Flow.mk_typeof_annotation cx reason proto) in - props, proto, call_deprecated, prop_ast - else - let t = if optional then Type.optional t else t in - let id_info = name, t, Type_table.Other in - Type_table.set_info loc id_info (Context.type_table cx); - let polarity = if _method then Positive else polarity variance in - let props = SMap.add name (Field (Some loc, t, polarity)) props in - props, proto, call_deprecated, (prop_ast t) - | Ast.Expression.Object.Property.Literal (loc, _) - | Ast.Expression.Object.Property.PrivateName (loc, _) - | Ast.Expression.Object.Property.Computed (loc, _) - -> - Flow.add_output cx (FlowError.EUnsupportedKeyInObjectType loc); - props, proto, call_deprecated, Typed_ast.Type.Object.Property.error + | type_name when is_suppress_type cx type_name -> + (* Optional type params are info-only, validated then forgotten. *) + let (_, targs) = convert_type_params () in + reconstruct_ast (AnyT.at Annotated loc) targs + (* in-scope type vars *) + | _ when SMap.mem name tparams_map -> + check_type_arg_arity cx loc t_ast targs 0 (fun () -> + let t = Flow.reposition cx loc (SMap.find_unsafe name tparams_map) in + reconstruct_ast t None) + | "$Pred" -> + let fun_reason = mk_reason (RCustom "abstract predicate function") loc in + let static_reason = mk_reason (RCustom "abstract predicate static") loc in + let out_reason = mk_reason (RCustom "open predicate") loc in + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + match convert_type_params () with + | ([DefT (_, _, SingletonNumT (f, _))], targs) -> + let n = Pervasives.int_of_float f in + let key_strs = + ListUtils.range 0 n + |> Core_list.map ~f:(fun i -> Some ("x_" ^ Pervasives.string_of_int i)) + in + let emp = Key_map.empty in + let tins = Unsoundness.at FunctionPrototype loc |> ListUtils.repeat n in + let tout = + OpenPredT (out_reason, MixedT.at loc |> with_trust bogus_trust, emp, emp) + in + reconstruct_ast + (DefT + ( fun_reason, + infer_trust cx, + FunT + ( dummy_static static_reason, + mk_reason RPrototype loc |> Unsoundness.function_proto_any, + mk_functiontype + fun_reason + tins + tout + ~rest_param:None + ~def_reason:fun_reason + ~params_names:key_strs + ~is_predicate:true ) )) + targs + | _ -> error_type cx loc (Error_message.EPredAnnot loc) t_ast) + | "$Refine" -> + check_type_arg_arity cx loc t_ast targs 3 (fun () -> + match convert_type_params () with + | ([base_t; fun_pred_t; DefT (_, _, SingletonNumT (f, _))], targs) -> + let idx = Pervasives.int_of_float f in + let reason = mk_reason (RCustom "refined type") loc in + let pred = LatentP (fun_pred_t, idx) in + reconstruct_ast (EvalT (base_t, LatentPredT (reason, pred), mk_id ())) targs + | _ -> error_type cx loc (Error_message.ERefineAnnot loc) t_ast) + | "$Trusted" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + match convert_type_params () with + | ([AnyT _], _) -> error_type cx loc (Error_message.ETrustedAnnot loc) t_ast + | ([DefT (rs, trust, ty)], targs) -> + let trust = make_trusted cx trust (Error_message.ETrustedAnnot loc) in + reconstruct_ast + (DefT (annot_reason (mk_reason (RTrusted (desc_of_reason rs)) loc), trust, ty)) + targs + | _ -> error_type cx loc (Error_message.ETrustedAnnot loc) t_ast) + | "$Private" -> + check_type_arg_arity cx loc t_ast targs 1 (fun () -> + match convert_type_params () with + | ([AnyT _], _) -> error_type cx loc (Error_message.EPrivateAnnot loc) t_ast + | ([DefT (rs, trust, ty)], targs) -> + let trust = make_private cx trust (Error_message.EPrivateAnnot loc) in + reconstruct_ast + (DefT (annot_reason (mk_reason (RPrivate (desc_of_reason rs)) loc), trust, ty)) + targs + | _ -> error_type cx loc (Error_message.EPrivateAnnot loc) t_ast) + (* other applications with id as head expr *) + | _ -> + let reason = mk_reason (RType name) loc in + let c = type_identifier cx name name_loc in + let (t, targs) = mk_nominal_type cx reason tparams_map (c, targs) in + reconstruct_ast t ~id_t:c targs end - - (* unsafe getter property *) - | { Object.Property. - key = Ast.Expression.Object.Property.Identifier (id_loc, name); - value = Object.Property.Get (loc, f); - _method; _ } -> - Flow_js.add_output cx (FlowError.EUnsafeGettersSetters loc); - let function_type, f_ast = - match convert cx tparams_map (loc, Ast.Type.Function f) with - | (_, function_type), Ast.Type.Function f_ast -> function_type, f_ast - | _ -> assert false + | ( loc, + Function + { Function.params = (params_loc, { Function.Params.params; rest }); return; tparams } ) + -> + let (tparams, tparams_map, tparams_ast) = + mk_type_param_declarations cx ~tparams_map tparams in - let return_t = Type.extract_getter_type function_type in - let id_info = name, return_t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - let props = Properties.add_getter name (Some id_loc) return_t props in - props, proto, call_deprecated, - { prop with Object.Property. - key = Ast.Expression.Object.Property.Identifier ((id_loc, return_t), name); - value = Object.Property.Get (loc, f_ast); - } - (* unsafe setter property *) - | { Object.Property. - key = Ast.Expression.Object.Property.Identifier (id_loc, name); - value = Object.Property.Set (loc, f); - _method; _ } -> - Flow_js.add_output cx (FlowError.EUnsafeGettersSetters loc); - let function_type, f_ast = - match convert cx tparams_map (loc, Ast.Type.Function f) with - | (_, function_type), Ast.Type.Function f_ast -> function_type, f_ast - | _ -> assert false + let (rev_params, rev_param_asts) = + List.fold_left + (fun (params_acc, asts_acc) (param_loc, param) -> + let { Function.Param.name; annot; optional } = param in + let (((_, t), _) as annot_ast) = convert cx tparams_map annot in + let t = + if optional then + Type.optional t + else + t + in + let name = Option.map ~f:(fun (loc, id_name) -> ((loc, t), id_name)) name in + ( (Option.map ~f:ident_name name, t) :: params_acc, + (param_loc, { Function.Param.name; annot = annot_ast; optional }) :: asts_acc )) + ([], []) + params in - let param_t = Type.extract_setter_type function_type in - let id_info = name, param_t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - let props = Properties.add_setter name (Some id_loc) param_t props in - props, proto, call_deprecated, - { prop with Object.Property. - key = Ast.Expression.Object.Property.Identifier ((id_loc, param_t), name); - value = Object.Property.Set (loc, f_ast); - } - | { Object.Property. - value = Object.Property.Get _ | Object.Property.Set _; _ } -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, ObjectPropertyGetSet)); - props, proto, call_deprecated, Typed_ast.Type.Object.Property.error - in - let add_call c = function - | None -> Some ([c], None, SMap.empty, None, None) - | Some (cs, d, pmap, proto, _) -> - (* Note that call properties using the call property syntax always override - $call properties. Previously, if both were present, the $call property - was ignored, but is now left as a named property. *) - Some (c::cs, d, pmap, proto, None) - in - let make_dict ({ Object.Indexer.id; key; value; variance; _ } as indexer) = - let (_, key), _ as key_ast = convert cx tparams_map key in - let (_, value), _ as value_ast = convert cx tparams_map value in - Some { Type. - dict_name = Option.map ~f:snd id; - key; - value; - dict_polarity = polarity variance; - }, - { indexer with Object.Indexer.key = key_ast; value = value_ast; } - in - let add_dict loc indexer = function - | None -> - let dict, indexer_ast = make_dict indexer in - Some ([], dict, SMap.empty, None, None), indexer_ast - | Some (cs, None, pmap, proto, call_deprecated) -> - let dict, indexer_ast = make_dict indexer in - Some (cs, dict, pmap, proto, call_deprecated), indexer_ast - | Some (_, Some _, _, _, _) as o -> - Flow.add_output cx - FlowError.(EUnsupportedSyntax (loc, MultipleIndexers)); - o, Typed_ast.Type.Object.Indexer.error - in - let add_prop loc p = function - | None -> - let pmap, proto, call_deprecated, p_ast = property loc p SMap.empty None None in - Some ([], None, pmap, proto, call_deprecated), p_ast - | Some (cs, d, pmap, proto, call_deprecated) -> - let pmap, proto, call_deprecated, p_ast = property loc p pmap proto call_deprecated in - Some (cs, d, pmap, proto, call_deprecated), p_ast - in - let o, ts, spread, rev_prop_asts = List.fold_left Object.( - fun (o, ts, spread, rev_prop_asts) -> function - | CallProperty (loc, { CallProperty.value = (value_loc, ft); static }) -> - let t, ft_ast = match convert cx tparams_map (loc, Ast.Type.Function ft) with - | (_, t), Ast.Type.Function ft_ast -> t, ft_ast - | _ -> assert false + let reason = mk_reason RFunctionType loc in + let (rest_param, rest_param_ast) = + match rest with + | Some (rest_loc, { Function.RestParam.argument = (param_loc, param) }) -> + let { Function.Param.name; annot; optional } = param in + let (((_, rest), _) as annot_ast) = convert cx tparams_map annot in + ( Some (Option.map ~f:ident_name name, loc_of_t rest, rest), + Some + ( rest_loc, + { + Function.RestParam.argument = + ( param_loc, + { + Function.Param.name = + Option.map ~f:(fun (loc, id_name) -> ((loc, rest), id_name)) name; + annot = annot_ast; + optional; + } ); + } ) ) + | None -> (None, None) in - let prop_ast = CallProperty (loc, { CallProperty.value = value_loc, ft_ast; static }) in - add_call t o, ts, spread, prop_ast::rev_prop_asts - | Indexer (loc, i) -> - let o, i_ast = add_dict loc i o in - o, ts, spread, Indexer (loc, i_ast)::rev_prop_asts - | Property (loc, p) -> - let o, p_ast = add_prop loc p o in - o, ts, spread, Property (loc, p_ast)::rev_prop_asts - | InternalSlot (loc, slot) -> - let { Object.InternalSlot. - id = (_, name); - value; - static=_; (* object props are never static *) - optional; - _method=_; - } = slot in - if name = "call" then - let (_, t), _ as value_ast = convert cx tparams_map value in - let t = if optional then Type.optional t else t in - add_call t o, ts, spread, - InternalSlot (loc, { slot with Object.InternalSlot.value = value_ast })::rev_prop_asts - else ( - Flow.add_output cx FlowError.( - EUnsupportedSyntax (loc, UnsupportedInternalSlot { - name; - static = false; - })); - o, ts, spread, InternalSlot (loc, Typed_ast.Type.Object.InternalSlot.error)::rev_prop_asts - ) - | SpreadProperty (loc, { Object.SpreadProperty.argument }) -> - let ts = match o with - | None -> ts - | Some o -> (mk_object ~exact:true o)::ts + let (((_, return_t), _) as return_ast) = convert cx tparams_map return in + let statics_t = + let reason = update_desc_reason (fun d -> RStatics d) reason in + Obj_type.mk_with_proto cx reason (FunProtoT reason) ~sealed:true ~exact:false ?call:None in - let (_, o), _ as argument_ast = convert cx tparams_map argument in - None, o::ts, true, - SpreadProperty (loc, { SpreadProperty.argument = argument_ast })::rev_prop_asts - ) (None, [], false, []) properties in - let ts = match o with - | None -> ts - | Some o -> mk_object ~exact:spread o::ts - in ( - loc, - match ts with - | [] -> - let t = mk_object ~exact ([], None, SMap.empty, None, None) in - if exact - then ExactT (mk_reason (RExactType reason_desc) loc, t) - else t - | [t] when not spread -> - if exact - then ExactT (mk_reason (RExactType reason_desc) loc, t) - else t - | t::ts -> - let open Type.Object.Spread in - let reason = mk_reason RObjectType loc in - let target = Annot {make_exact = exact} in - EvalT (t, TypeDestructorT (unknown_use, reason, SpreadType (target, ts)), mk_id ()) - ), Object { Object.exact; properties = List.rev rev_prop_asts } - -| loc, Interface {Interface.extends; body} -> - let body_loc, {Ast.Type.Object.properties; exact} = body in - let reason = mk_reason RInterfaceType loc in - let iface_sig, extend_asts = - let id = Context.make_nominal cx in - let extends, extend_asts = extends - |> List.map (mk_interface_super cx tparams_map) - |> List.split - in - let super = - let callable = List.exists Ast.Type.Object.(function - | CallProperty (_, { CallProperty.static; _ }) -> not static - | _ -> false - ) properties in - Class_sig.Interface { extends; callable } - in - Class_sig.empty id reason [] tparams_map super, extend_asts - in - let iface_sig, property_asts = - add_interface_properties cx tparams_map properties iface_sig in - Class_sig.generate_tests cx (fun iface_sig -> - Class_sig.check_super cx reason iface_sig; - Class_sig.check_implements cx reason iface_sig - ) iface_sig |> ignore; - (loc, Class_sig.thistype cx iface_sig), - Interface { Interface. - body = body_loc, { Object. - exact; - properties = property_asts; - }; - extends = extend_asts; - } - -| loc, Exists -> - add_deprecated_type_error_if_not_lib_file cx loc; - (* Do not evaluate existential type variables when map is non-empty. This + let ft = + DefT + ( reason, + infer_trust cx, + FunT + ( statics_t, + mk_reason RPrototype loc |> Unsoundness.function_proto_any, + { + this_t = bound_function_dummy_this; + params = List.rev rev_params; + rest_param; + return_t; + is_predicate = false; + closure_t = 0; + changeset = Changeset.empty; + def_reason = reason; + } ) ) + in + let t = poly_type_of_tparams (Context.make_nominal cx) tparams ft in + ( (loc, t), + Function + { + Function.params = + ( params_loc, + { Function.Params.params = List.rev rev_param_asts; rest = rest_param_ast } ); + return = return_ast; + tparams = tparams_ast; + } ) + | (loc, Object { Object.exact; properties; inexact }) -> + let exact_by_default = Context.exact_by_default cx in + let exact_type = exact || ((not inexact) && exact_by_default) in + let (t, properties) = convert_object cx tparams_map loc ~exact:exact_type properties in + if (not exact) && (not inexact) && not exact_by_default then + Flow.add_output cx Error_message.(EImplicitInexactObject loc); + ((loc, t), Object { Object.exact; properties; inexact }) + | (loc, Interface { Interface.extends; body }) -> + let (body_loc, { Ast.Type.Object.properties; exact; inexact = _inexact }) = body in + let reason = mk_reason RInterfaceType loc in + let (iface_sig, extend_asts) = + let id = ALoc.none in + let (extends, extend_asts) = + extends |> Core_list.map ~f:(mk_interface_super cx tparams_map) |> List.split + in + let super = + let callable = + List.exists + Ast.Type.Object.( + function + | CallProperty (_, { CallProperty.static; _ }) -> not static + | _ -> false) + properties + in + Class_type_sig.Interface { inline = true; extends; callable } + in + (Class_type_sig.empty id reason None tparams_map super, extend_asts) + in + let (iface_sig, property_asts) = + add_interface_properties cx tparams_map properties iface_sig + in + Class_type_sig.generate_tests + cx + (fun iface_sig -> + Class_type_sig.check_super cx reason iface_sig; + Class_type_sig.check_implements cx reason iface_sig) + iface_sig + |> ignore; + ( (loc, Class_type_sig.thistype cx iface_sig), + Interface + { + Interface.body = + (body_loc, { Object.exact; inexact = false; properties = property_asts }); + extends = extend_asts; + } ) + | (loc, Exists) -> + add_deprecated_type_error_if_not_lib_file cx loc; + + (* Do not evaluate existential type variables when map is non-empty. This ensures that existential type variables under a polymorphic type remain unevaluated until the polymorphic type is applied. *) - let force = SMap.is_empty tparams_map in - let reason = derivable_reason (mk_reason RExistential loc) in - if force then begin - let tvar = Tvar.mk cx reason in - Type_table.set_info loc ("Star", tvar, Type_table.Exists) (Context.type_table cx); - (loc, tvar), Exists - end - else (loc, ExistsT reason), Exists -) + let force = SMap.is_empty tparams_map in + let reason = derivable_reason (mk_reason RExistential loc) in + if force then + let tvar = Tvar.mk cx reason in + ((loc, tvar), Exists) + else + ((loc, ExistsT reason), Exists)) and convert_list = let rec loop (ts, tasts) cx tparams_map = function - | [] -> (List.rev ts, List.rev tasts) - | ast::asts -> - let (_, t), _ as tast = convert cx tparams_map ast in - loop (t::ts, tast::tasts) cx tparams_map asts + | [] -> (List.rev ts, List.rev tasts) + | ast :: asts -> + let (((_, t), _) as tast) = convert cx tparams_map ast in + loop (t :: ts, tast :: tasts) cx tparams_map asts in - fun cx tparams_map asts -> - loop ([], []) cx tparams_map asts + (fun cx tparams_map asts -> loop ([], []) cx tparams_map asts) and convert_opt cx tparams_map ast_opt = let tast_opt = Option.map ~f:(convert cx tparams_map) ast_opt in let t_opt = Option.map ~f:(fun ((_, x), _) -> x) tast_opt in - t_opt, tast_opt - -and convert_qualification ?(lookup_mode=ForType) cx reason_prefix - = Ast.Type.Generic.Identifier.(function - | Qualified (loc, { qualification; id; }) as qualified -> - let m, qualification = - convert_qualification ~lookup_mode cx reason_prefix qualification in - let id_loc, name = id in - let desc = RCustom (spf "%s `%s`" reason_prefix (qualified_name qualified)) in - let reason = mk_reason desc loc in - let id_reason = mk_reason desc id_loc in - let t = Tvar.mk_where cx reason (fun t -> - let id_info = name, t, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - let use_op = Op (GetProperty (mk_reason (RType (qualified_name qualified)) loc)) in - Flow.flow cx (m, GetPropT (use_op, id_reason, Named (id_reason, name), t)); - ) in - t, Qualified (loc, { qualification; id = (id_loc, t), name; }) - - | Unqualified (loc, name) -> - let t = Env.get_var ~lookup_mode cx name loc in - let id_info = name, t, Type_table.Other in - Type_table.set_info loc id_info (Context.type_table cx); - t, Unqualified ((loc, t), name) -) + (t_opt, tast_opt) + +and convert_qualification ?(lookup_mode = ForType) cx reason_prefix = + Ast.Type.Generic.Identifier.( + function + | Qualified (loc, { qualification; id }) as qualified -> + let (m, qualification) = convert_qualification ~lookup_mode cx reason_prefix qualification in + let (id_loc, id_name) = id in + let { Ast.Identifier.name; comments = _ } = id_name in + let desc = RCustom (spf "%s `%s`" reason_prefix (qualified_name qualified)) in + let reason = mk_reason desc loc in + let id_reason = mk_reason desc id_loc in + let t = + Tvar.mk_where cx reason (fun t -> + let use_op = Op (GetProperty (mk_reason (RType (qualified_name qualified)) loc)) in + Flow.flow cx (m, GetPropT (use_op, id_reason, Named (id_reason, name), t))) + in + (t, Qualified (loc, { qualification; id = ((id_loc, t), id_name) })) + | Unqualified (loc, ({ Ast.Identifier.name; comments = _ } as id_name)) -> + let t = Env.get_var ~lookup_mode cx name loc in + (t, Unqualified ((loc, t), id_name))) + +and convert_object = + let obj_proto_t = ObjProtoT (locationless_reason RObjectPrototype) in + let fun_proto_t = FunProtoT (locationless_reason RFunctionPrototype) in + let module Acc = struct + type element = + | Spread of Type.t + | Slice of { + dict: Type.dicttype option; + pmap: Type.Properties.t; + } -and mk_func_sig = - let open Ast.Type.Function in - let add_param cx tparams_map (x, rev_param_asts) (loc, param) = - let { Param.name = id; annot; optional } = param in - let (_, t), _ as annot = convert cx tparams_map annot in - Func_params.add_simple cx ~optional loc id t x, - (loc, { Param. - name = Option.map ~f:(fun (loc, name) -> (loc, t), name) id; - annot; - optional - })::rev_param_asts - in - let add_rest cx tparams_map (loc, param) x = - let { Param.name = id; annot; optional } = param in - let (_, t), _ as annot = convert cx tparams_map annot in - let () = - let name = Option.map id ~f:snd in - let reason = mk_reason (RRestParameter name) (loc_of_t t) in - Flow.flow cx (t, AssertRestParamT reason) + type t = { + dict: Type.dicttype option; + pmap: Type.Properties.t; + tail: element list; + proto: Type.t option; + calls: Type.t list; + } + + let empty = { dict = None; pmap = SMap.empty; tail = []; proto = None; calls = [] } + + let empty_slice = Slice { dict = None; pmap = SMap.empty } + + let head_slice { dict; pmap; _ } = + if dict = None && SMap.is_empty pmap then + None + else + Some (Slice { dict; pmap }) + + let add_call c = function + | { proto = Some _; _ } -> Error Error_message.ExplicitCallAfterProto + | acc -> Ok { acc with calls = c :: acc.calls } + + let add_dict d = function + | { dict = Some _; _ } -> Error Error_message.MultipleIndexers + | acc -> Ok { acc with dict = Some d } + + let add_prop f acc = { acc with pmap = f acc.pmap } + + let add_proto p = function + | { proto = Some _; _ } -> Error Error_message.MultipleProtos + | { calls = _ :: _; _ } -> Error Error_message.ExplicitProtoAfterCall + | acc -> Ok { acc with proto = Some p } + + let add_spread t acc = + let tail = + match head_slice acc with + | None -> acc.tail + | Some slice -> slice :: acc.tail + in + { acc with dict = None; pmap = SMap.empty; tail = Spread t :: tail } + + let elements_rev acc = + match head_slice acc with + | Some slice -> (slice, acc.tail) + | None -> + (match acc.tail with + | [] -> (empty_slice, []) + | x :: xs -> (x, xs)) + + let proto = function + | { proto = Some t; _ } -> t + | { calls = _ :: _; _ } -> fun_proto_t + | _ -> obj_proto_t + + let calls_rev acc = acc.calls + end in + let mk_object cx loc ~src_loc ~exact call dict pmap proto = + let pmap = + if src_loc && Env.peek_scope () |> Scope.is_toplevel then + Context.make_source_property_map cx pmap loc + else + Context.generate_property_map cx pmap in - Func_params.add_rest cx loc id t x, - (loc, { Param. - name = Option.map ~f:(fun (loc, name) -> (loc, t), name) id; - annot; - optional - }) + let call = Option.map ~f:(Context.make_call_prop cx) call in + let flags = { sealed = Sealed; exact; frozen = false } in + DefT + ( mk_reason RObjectType loc, + infer_trust cx, + ObjT (mk_objecttype ~flags ~dict ~call pmap proto) ) in - let convert_params cx tparams_map (loc, {Params.params; rest}) = - let params, rev_param_asts = - List.fold_left (add_param cx tparams_map) (Func_params.empty, []) params in - match rest with - | Some (rest_loc, { RestParam.argument }) -> - let params, argument = add_rest cx tparams_map argument params in - params, ( - loc, - { Params. - params = List.rev rev_param_asts; - rest = Some (rest_loc, { RestParam.argument; }) - } - ) - | None -> - params, (loc, { Params.params = List.rev rev_param_asts; rest = None; }) + let mk_object_annot cx loc ~exact call dict pmap proto = + let t = mk_object cx loc ~src_loc:true ~exact call dict pmap proto in + if exact then + ExactT (mk_reason (RExactType RObjectType) loc, t) + else + t in - fun cx tparams_map loc func -> - let tparams, tparams_map, tparams_ast = - mk_type_param_declarations cx ~tparams_map func.tparams in - Type_table.with_typeparams tparams (Context.type_table cx) @@ fun _ -> - let fparams, params_ast = convert_params cx tparams_map func.Ast.Type.Function.params in - let (_, return_t), _ as return_ast = convert cx tparams_map func.return in - { Func_sig. - reason = mk_reason RFunctionType loc; - kind = Func_sig.Ordinary; - tparams; - tparams_map; - fparams; - body = None; - return_t; - }, { Ast.Type.Function. - tparams = tparams_ast; - params = params_ast; - return = return_ast; - } - -and mk_type cx tparams_map reason = function - | None -> + Ast.Type.( + let named_property cx tparams_map loc acc prop = + match prop with + | { Object.Property.key; value = Object.Property.Init value; optional; variance; _method; _ } + -> + begin + match key with + | Ast.Expression.Object.Property.Literal + (loc, { Ast.Literal.value = Ast.Literal.String name; _ }) + | Ast.Expression.Object.Property.Identifier (loc, { Ast.Identifier.name; comments = _ }) + -> + Type_inference_hooks_js.dispatch_obj_type_prop_decl_hook cx name loc; + let (((_, t), _) as value_ast) = convert cx tparams_map value in + let prop_ast t = + { + prop with + Object.Property.key = + begin + match key with + | Ast.Expression.Object.Property.Literal (_, lit) -> + Ast.Expression.Object.Property.Literal ((loc, t), lit) + | Ast.Expression.Object.Property.Identifier + (_loc, { Ast.Identifier.name = _; comments = comments_inner }) -> + Ast.Expression.Object.Property.Identifier + ((loc, t), { Ast.Identifier.name; comments = comments_inner }) + | _ -> assert_false "branch invariant" + end; + value = Object.Property.Init value_ast; + } + in + if name = "__proto__" && (not (_method || optional)) && variance = None then + let reason = mk_reason RPrototype (fst value) in + let proto = + Tvar.mk_where cx reason (fun tout -> + Flow.flow cx (t, ObjTestProtoT (reason, tout))) + in + let acc = + match Acc.add_proto (Flow.mk_typeof_annotation cx reason proto) acc with + | Ok acc -> acc + | Error err -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, err)); + acc + in + (acc, prop_ast proto) + else + let t = + if optional then + Type.optional t + else + t + in + let polarity = + if _method then + Polarity.Positive + else + polarity variance + in + (Acc.add_prop (Properties.add_field name polarity (Some loc) t) acc, prop_ast t) + | Ast.Expression.Object.Property.Literal (loc, _) + | Ast.Expression.Object.Property.PrivateName (loc, _) + | Ast.Expression.Object.Property.Computed (loc, _) -> + Flow.add_output cx (Error_message.EUnsupportedKeyInObjectType loc); + let (_, prop_ast) = Tast_utils.error_mapper#object_property_type (loc, prop) in + (acc, prop_ast) + end + (* unsafe getter property *) + | { + Object.Property.key = + Ast.Expression.Object.Property.Identifier + (id_loc, ({ Ast.Identifier.name; comments = _ } as id_name)); + value = Object.Property.Get (loc, f); + _method; + _; + } -> + Flow_js.add_output cx (Error_message.EUnsafeGettersSetters loc); + let (function_type, f_ast) = + match convert cx tparams_map (loc, Ast.Type.Function f) with + | ((_, function_type), Ast.Type.Function f_ast) -> (function_type, f_ast) + | _ -> assert false + in + let return_t = Type.extract_getter_type function_type in + ( Acc.add_prop (Properties.add_getter name (Some id_loc) return_t) acc, + { + prop with + Object.Property.key = + Ast.Expression.Object.Property.Identifier ((id_loc, return_t), id_name); + value = Object.Property.Get (loc, f_ast); + } ) + (* unsafe setter property *) + | { + Object.Property.key = + Ast.Expression.Object.Property.Identifier + (id_loc, ({ Ast.Identifier.name; comments = _ } as id_name)); + value = Object.Property.Set (loc, f); + _method; + _; + } -> + Flow_js.add_output cx (Error_message.EUnsafeGettersSetters loc); + let (function_type, f_ast) = + match convert cx tparams_map (loc, Ast.Type.Function f) with + | ((_, function_type), Ast.Type.Function f_ast) -> (function_type, f_ast) + | _ -> assert false + in + let param_t = Type.extract_setter_type function_type in + ( Acc.add_prop (Properties.add_setter name (Some id_loc) param_t) acc, + { + prop with + Object.Property.key = + Ast.Expression.Object.Property.Identifier ((id_loc, param_t), id_name); + value = Object.Property.Set (loc, f_ast); + } ) + | { Object.Property.value = Object.Property.Get _ | Object.Property.Set _; _ } -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, ObjectPropertyGetSet)); + let (_, prop_ast) = Tast_utils.error_mapper#object_property_type (loc, prop) in + (acc, prop_ast) + in + let make_call cx tparams_map loc call = + let { Object.CallProperty.value = (fn_loc, fn); static } = call in + let (t, fn) = + match convert cx tparams_map (loc, Ast.Type.Function fn) with + | ((_, t), Ast.Type.Function fn) -> (t, fn) + | _ -> assert false + in + (t, { Object.CallProperty.value = (fn_loc, fn); static }) + in + let make_dict cx tparams_map indexer = + let { Object.Indexer.id; key; value; static; variance } = indexer in + let (((_, key), _) as key_ast) = convert cx tparams_map key in + let (((_, value), _) as value_ast) = convert cx tparams_map value in + ( { + Type.dict_name = Option.map ~f:ident_name id; + key; + value; + dict_polarity = polarity variance; + }, + { Object.Indexer.id; key = key_ast; value = value_ast; static; variance } ) + in + let property cx tparams_map acc = + Object.( + function + | CallProperty (loc, call) -> + let (t, call) = make_call cx tparams_map loc call in + let acc = + match Acc.add_call t acc with + | Ok acc -> acc + | Error err -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, err)); + acc + in + (acc, CallProperty (loc, call)) + | Indexer (loc, i) -> + let (d, i) = make_dict cx tparams_map i in + let acc = + match Acc.add_dict d acc with + | Ok acc -> acc + | Error err -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, err)); + acc + in + (acc, Indexer (loc, i)) + | Property (loc, p) -> + let (acc, p) = named_property cx tparams_map loc acc p in + (acc, Property (loc, p)) + | InternalSlot (loc, slot) as prop -> + let { + Object.InternalSlot.id = (_, { Ast.Identifier.name; comments = _ }); + value; + static = _; + (* object props are never static *) + optional; + _method = _; + } = + slot + in + if name = "call" then + let (((_, t), _) as value_ast) = convert cx tparams_map value in + let t = + if optional then + Type.optional t + else + t + in + let acc = + match Acc.add_call t acc with + | Ok acc -> acc + | Error err -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, err)); + acc + in + (acc, InternalSlot (loc, { slot with Object.InternalSlot.value = value_ast })) + else ( + Flow.add_output + cx + Error_message.( + EUnsupportedSyntax (loc, UnsupportedInternalSlot { name; static = false })); + (acc, Tast_utils.error_mapper#object_type_property prop) + ) + | SpreadProperty (loc, { Object.SpreadProperty.argument }) -> + let (((_, t), _) as argument_ast) = convert cx tparams_map argument in + (Acc.add_spread t acc, SpreadProperty (loc, { SpreadProperty.argument = argument_ast }))) + in + fun cx tparams_map loc ~exact properties -> + let (acc, rev_prop_asts) = + List.fold_left + (fun (acc, rev_prop_asts) p -> + let (acc, prop_ast) = property cx tparams_map acc p in + (acc, prop_ast :: rev_prop_asts)) + (Acc.empty, []) + properties + in + let proto = Acc.proto acc in + let calls_rev = Acc.calls_rev acc in let t = - if Context.is_weak cx - then AnyT.why reason - else Tvar.mk cx reason + match Acc.elements_rev acc with + | (Acc.Slice { dict; pmap }, []) -> + let ts = + List.rev_map + (fun call -> mk_object_annot cx loc ~exact (Some call) dict pmap proto) + calls_rev + in + (match ts with + | [] -> mk_object_annot cx loc ~exact None dict pmap proto + | [t] -> t + | t0 :: t1 :: ts -> + let callable_reason = mk_reason (RCustom "callable object type") loc in + let rep = InterRep.make t0 t1 ts in + IntersectionT (callable_reason, rep)) + | os -> + Type.Object.Spread.( + let reason = mk_reason RObjectType loc in + let target = Annot { make_exact = exact } in + let (t, ts, head_slice) = + let (t, ts) = os in + (* We don't need to do this recursively because every pair of slices must be separated + * by a spread *) + match (t, ts) with + | (Acc.Spread t, ts) -> + let ts = + List.map + (function + | Acc.Spread t -> Type t + | Acc.Slice { dict; pmap } -> + Slice { Type.Object.Spread.reason; prop_map = pmap; dict }) + ts + in + (t, ts, None) + | (Acc.Slice { dict; pmap = prop_map }, Acc.Spread t :: ts) -> + let head_slice = { Type.Object.Spread.reason; prop_map; dict } in + let ts = + List.map + (function + | Acc.Spread t -> Type t + | Acc.Slice { dict; pmap } -> + Slice { Type.Object.Spread.reason; prop_map = pmap; dict }) + ts + in + (t, ts, Some head_slice) + | _ -> failwith "Invariant Violation: spread list has two slices in a row" + in + EvalT + ( t, + TypeDestructorT (unknown_use, reason, SpreadType (target, ts, head_slice)), + mk_id () )) in - Hashtbl.replace (Context.annot_table cx) (aloc_of_reason reason |> ALoc.to_loc) t; - t, None + (t, List.rev rev_prop_asts)) +and mk_func_sig = + Ast.Type.Function.( + let add_param cx tparams_map x param = + let (loc, { Param.name; annot; optional }) = param in + let (((_, t), _) as annot) = convert cx tparams_map annot in + let name = Option.map ~f:(fun (loc, id_name) -> ((loc, t), id_name)) name in + let param = (t, (loc, { Param.name; annot; optional })) in + Func_type_params.add_param param x + in + let add_rest cx tparams_map x rest_param = + let (rest_loc, { RestParam.argument = (loc, { Param.name; annot; optional }) }) = + rest_param + in + let (((_, t), _) as annot) = convert cx tparams_map annot in + let name = Option.map ~f:(fun (loc, id_name) -> ((loc, t), id_name)) name in + let rest = + (t, (rest_loc, { RestParam.argument = (loc, { Param.name; annot; optional }) })) + in + Func_type_params.add_rest rest x + in + let convert_params cx tparams_map (loc, { Params.params; rest }) = + let fparams = + Func_type_params.empty (fun params rest -> Some (loc, { Params.params; rest })) + in + let fparams = List.fold_left (add_param cx tparams_map) fparams params in + let fparams = Option.fold ~f:(add_rest cx tparams_map) ~init:fparams rest in + let params_ast = Func_type_params.eval cx fparams in + (fparams, Option.value_exn params_ast) + in + fun cx tparams_map loc func -> + let (tparams, tparams_map, tparams_ast) = + mk_type_param_declarations cx ~tparams_map func.tparams + in + let (fparams, params_ast) = convert_params cx tparams_map func.Ast.Type.Function.params in + let (((_, return_t), _) as return_ast) = convert cx tparams_map func.return in + let reason = mk_reason RFunctionType loc in + let knot = Tvar.mk cx reason in + ( { + Func_type_sig.reason; + kind = Func_sig.Ordinary; + tparams; + tparams_map; + fparams; + body = None; + return_t; + knot; + }, + { Ast.Type.Function.tparams = tparams_ast; params = params_ast; return = return_ast } )) + +and mk_type cx tparams_map reason = function + | None -> + let t = + if Context.is_weak cx then + Unsoundness.why WeakContext reason + else + Tvar.mk cx reason + in + (t, None) | Some annot -> - let (_, t), _ as annot_ast = convert cx tparams_map annot in - t, Some annot_ast + let (((_, t), _) as annot_ast) = convert cx tparams_map annot in + (t, Some annot_ast) and mk_type_annotation cx tparams_map reason = function -| None -> - fst (mk_type cx tparams_map reason None), None -| Some (loc, annot) -> - let t, annot_ast = mk_type cx tparams_map reason (Some annot) in - t, Option.map ~f:(fun ast -> loc, ast) annot_ast - -and mk_singleton_string loc key = + | T.Missing loc -> + let (t, _) = mk_type cx tparams_map reason None in + (t, T.Missing (loc, t)) + | T.Available annot -> + let (t, ast_annot) = mk_type_available_annotation cx tparams_map annot in + (t, T.Available ast_annot) + +and mk_return_type_annotation cx tparams_map reason ~definitely_returns_void annot = + match annot with + | T.Missing loc when definitely_returns_void -> + let t = VoidT.why reason |> with_trust literal_trust in + (t, T.Missing (loc, t)) + (* TODO we could probably take the same shortcut for functions with an explicit `void` annotation + and no explicit returns *) + | _ -> mk_type_annotation cx tparams_map reason annot + +and mk_type_available_annotation cx tparams_map (loc, annot) = + let (((_, t), _) as annot_ast) = convert cx tparams_map annot in + (t, (loc, annot_ast)) + +and mk_singleton_string cx loc key = let reason = mk_reason (RStringLit key) loc in - DefT (reason, SingletonStrT key) + DefT (reason, infer_trust cx, SingletonStrT key) -and mk_singleton_number loc num raw = +and mk_singleton_number cx loc num raw = let reason = mk_reason (RNumberLit raw) loc in - DefT (reason, SingletonNumT (num, raw)) + DefT (reason, infer_trust cx, SingletonNumT (num, raw)) -and mk_singleton_boolean loc b = +and mk_singleton_boolean cx loc b = let reason = mk_reason (RBooleanLit b) loc in - DefT (reason, SingletonBoolT b) + DefT (reason, infer_trust cx, SingletonBoolT b) (* Given the type of expression C and type arguments T1...Tn, return the type of values described by C, or C when there are no type arguments. *) and mk_nominal_type cx reason tparams_map (c, targs) = let reason = annot_reason reason in match targs with - | None -> - Flow.mk_instance cx reason c, None + | None -> (Flow.mk_instance cx reason c, None) | Some (loc, targs) -> - let annot_loc = aloc_of_reason reason |> ALoc.to_loc in - let targs, targs_ast = convert_list cx tparams_map targs in - typeapp ~annot_loc c targs, Some (loc, targs_ast) + let annot_loc = aloc_of_reason reason in + let (targs, targs_ast) = convert_list cx tparams_map targs in + (typeapp ~annot_loc c targs, Some (loc, targs_ast)) (* take a list of AST type param declarations, do semantic checking and create types for them. *) -and mk_type_param_declarations cx ?(tparams_map=SMap.empty) tparams = - let open Ast.Type.ParameterDeclaration in - let add_type_param (tparams, tparams_map, bounds_map, rev_asts) (loc, type_param) = - let { TypeParam.name = name_loc, name as id; bound; variance; default; } = type_param in - let reason = mk_reason (RType name) name_loc in - let bound, bound_ast = match bound with - | None -> DefT (reason, MixedT Mixed_everything), None - | Some (bound_loc, u) -> - let bound, bound_ast = mk_type cx tparams_map reason (Some u) in - bound, Option.map ~f:(fun ast -> bound_loc, ast) bound_ast - in - let default, default_ast = match default with - | None -> None, None - | Some default -> - let t, default_ast = mk_type cx tparams_map reason (Some default) in - Flow.flow_t cx (Flow.subst cx bounds_map t, - Flow.subst cx bounds_map bound); - Some t, default_ast in - let polarity = polarity variance in - let tparam = { reason; name; bound; polarity; default; } in - let t = BoundT (reason, name, polarity) in - let id_info = name, t, Type_table.Other in - - let name_ast = - let loc, ident = id in - (loc, t), ident +and mk_type_param_declarations cx ?(tparams_map = SMap.empty) tparams = + Ast.Type.ParameterDeclaration.( + let add_type_param (tparams, tparams_map, bounds_map, rev_asts) (loc, type_param) = + let { + TypeParam.name = (name_loc, { Ast.Identifier.name; comments = _ }) as id; + bound; + variance; + default; + } = + type_param + in + let reason = mk_reason (RType name) name_loc in + let (bound, bound_ast) = + match bound with + | Ast.Type.Missing loc -> + let t = DefT (reason, infer_trust cx, MixedT Mixed_everything) in + (t, Ast.Type.Missing (loc, t)) + | Ast.Type.Available (bound_loc, u) -> + let (bound, bound_ast) = mk_type cx tparams_map reason (Some u) in + let bound_ast = + match bound_ast with + | Some ast -> Ast.Type.Available (bound_loc, ast) + | None -> Ast.Type.Missing (bound_loc, bound) + in + (bound, bound_ast) + in + let (default, default_ast) = + match default with + | None -> (None, None) + | Some default -> + let (t, default_ast) = mk_type cx tparams_map reason (Some default) in + Flow.flow_t cx (Flow.subst cx bounds_map t, Flow.subst cx bounds_map bound); + (Some t, default_ast) + in + let polarity = polarity variance in + let tparam = { reason; name; bound; polarity; default } in + let t = BoundT (reason, name, polarity) in + let name_ast = + let (loc, id_name) = id in + ((loc, t), id_name) + in + let ast = + ( (loc, t), + { TypeParam.name = name_ast; bound = bound_ast; variance; default = default_ast } ) + in + let tparams = tparam :: tparams in + ( tparams, + SMap.add name t tparams_map, + SMap.add name (Flow.subst cx bounds_map bound) bounds_map, + ast :: rev_asts ) in - - let ast = (loc, t), { - TypeParam.name = name_ast; - bound = bound_ast; - variance; - default = default_ast - } in - let tparams = tparam :: tparams in - Type_table.set_info ~extra_tparams:tparams name_loc id_info (Context.type_table cx); - tparams, - SMap.add name t tparams_map, - SMap.add name (Flow.subst cx bounds_map bound) bounds_map, - ast :: rev_asts - in - let rev_tparams, tparams_map, _, rev_asts = - tparams - |> Option.value_map ~f:snd ~default:[] - |> List.fold_left add_type_param ([], tparams_map, SMap.empty, []) - in - let tparams_ast = - Option.map ~f:(fun (tparams_loc, _) -> tparams_loc, List.rev rev_asts) tparams in - List.rev rev_tparams, tparams_map, tparams_ast + match tparams with + | None -> (None, tparams_map, None) + | Some (tparams_loc, tparams) -> + let (rev_tparams, tparams_map, _, rev_asts) = + List.fold_left add_type_param ([], tparams_map, SMap.empty, []) tparams + in + let tparams_ast = Some (tparams_loc, List.rev rev_asts) in + let tparams = + match List.rev rev_tparams with + | [] -> None + | hd :: tl -> Some (tparams_loc, (hd, tl)) + in + (tparams, tparams_map, tparams_ast)) and type_identifier cx name loc = - if Type_inference_hooks_js.dispatch_id_hook cx name loc - then AnyT.at loc - else if name = "undefined" - then VoidT.at loc - else Env.var_ref ~lookup_mode:ForType cx name loc - -and polarity = Ast.Variance.(function - | Some (_, Plus) -> Positive - | Some (_, Minus) -> Negative - | None -> Neutral -) - -and mk_interface_super cx tparams_map (loc, {Ast.Type.Generic.id; targs}) = + if Type_inference_hooks_js.dispatch_id_hook cx name loc then + Unsoundness.at InferenceHooks loc + else if name = "undefined" then + VoidT.at loc |> with_trust_inference cx + else + Env.var_ref ~lookup_mode:ForType cx name loc + +and mk_interface_super cx tparams_map (loc, { Ast.Type.Generic.id; targs }) = let lookup_mode = Env.LookupMode.ForType in - let c, id = convert_qualification ~lookup_mode cx "extends" id in - let typeapp, targs = match targs with - | None -> (loc, c, None), None - | Some (targs_loc, targs) -> - let ts, targs_ast = convert_list cx tparams_map targs in - (loc, c, Some ts), Some (targs_loc, targs_ast) + let (c, id) = convert_qualification ~lookup_mode cx "extends" id in + let (typeapp, targs) = + match targs with + | None -> ((loc, c, None), None) + | Some (targs_loc, targs) -> + let (ts, targs_ast) = convert_list cx tparams_map targs in + ((loc, c, Some ts), Some (targs_loc, targs_ast)) in - typeapp, (loc, { Ast.Type.Generic.id; targs }) + (typeapp, (loc, { Ast.Type.Generic.id; targs })) and add_interface_properties cx tparams_map properties s = - let open Class_sig in - let x, rev_prop_asts = - List.fold_left Ast.Type.Object.(fun (x, rev_prop_asts) -> function - | CallProperty (loc, { CallProperty.value = value_loc, ft; static }) -> - let (_, t), ft = convert cx tparams_map (loc, Ast.Type.Function ft) in - let ft = match ft with Ast.Type.Function ft -> ft | _ -> assert false in - append_call ~static t x, - CallProperty (loc, { CallProperty. - value = value_loc, ft; - static; - })::rev_prop_asts - | Indexer (loc, { Indexer.static; _ }) - when mem_field ~static "$key" x -> - Flow.add_output cx - Flow_error.(EUnsupportedSyntax (loc, MultipleIndexers)); - x, Indexer (loc, Typed_ast.Type.Object.Indexer.error)::rev_prop_asts - | Indexer (loc, indexer) -> - let { Indexer.key; value; static; variance; _ } = indexer in - let k, _ as key = convert cx tparams_map key in - let v, _ as value = convert cx tparams_map value in - let polarity = polarity variance in - add_indexer ~static polarity ~key:k ~value:v x, - Indexer (loc, { indexer with Indexer.key; value; })::rev_prop_asts - | Property (loc, ({ Property. - key; value; static; proto; optional; _method; variance; - } as prop)) -> - if optional && _method - then Flow.add_output cx Flow_error.(EInternal (loc, OptionalMethod)); - let polarity = polarity variance in - let x, prop = Ast.Expression.Object.( - match _method, key, value with - | _, Property.Literal (loc, _), _ - | _, Property.PrivateName (loc, _), _ - | _, Property.Computed (loc, _), _ -> - Flow.add_output cx (Flow_error.EUnsupportedSyntax (loc, Flow_error.IllegalName)); - x, (loc, Typed_ast.Type.Object.Property.error) - - (* Previously, call properties were stored in the props map under the key - $call. Unfortunately, this made it possible to specify call properties - using this syntax in interfaces, declared classes, and even normal classes. - - Note that $call properties always override the call property syntax. - As before, if both are present, the $call property is used and the call - property is ignored. *) - | _, (Property.Identifier (id_loc, "$call")), - Ast.Type.Object.Property.Init value when not proto -> - Flow.add_output cx Flow_error.(EDeprecatedCallSyntax id_loc); - let (_, t), _ as value_ast = convert cx tparams_map value in - let t = if optional then Type.optional t else t in - add_call_deprecated ~static t x, - Ast.Type.(loc, { prop with Object.Property. - key = Property.Identifier ((id_loc, t), "$call"); - value = Object.Property.Init value_ast; - }) - - | true, (Property.Identifier (id_loc, name)), - Ast.Type.Object.Property.Init (func_loc, Ast.Type.Function func) -> - let fsig, func_ast = mk_func_sig cx tparams_map loc func in - let ft = Func_sig.methodtype cx fsig in - let append_method = match static, name with - | false, "constructor" -> append_constructor (Some id_loc) - | _ -> append_method ~static name id_loc - in - append_method fsig x, - Ast.Type.(loc, { prop with Object.Property. - key = Property.Identifier ((id_loc, ft), name); - value = Object.Property.Init ((func_loc, ft), Function func_ast); - }) - - | true, Property.Identifier _, _ -> - Flow.add_output cx - Flow_error.(EInternal (loc, MethodNotAFunction)); - x, (loc, Typed_ast.Type.Object.Property.error) - - | false, (Property.Identifier (id_loc, name)), - Ast.Type.Object.Property.Init value -> - let (_, t), _ as value_ast = convert cx tparams_map value in - let t = if optional then Type.optional t else t in - let add = if proto then add_proto_field else add_field ~static in - add name id_loc polarity (Annot t) x, - Ast.Type.(loc, { prop with Object.Property. - key = Property.Identifier ((id_loc, t), name); - value = Object.Property.Init value_ast; - }) - - (* unsafe getter property *) - | _, (Property.Identifier (id_loc, name)), - Ast.Type.Object.Property.Get (get_loc, func) -> - Flow_js.add_output cx (Flow_error.EUnsafeGettersSetters loc); - let fsig, func_ast = mk_func_sig cx tparams_map loc func in - let prop_t = fsig.Func_sig.return_t in - add_getter ~static name id_loc fsig x, - Ast.Type.(loc, { prop with Object.Property. - key = Property.Identifier ((id_loc, prop_t), name); - value = Object.Property.Get (get_loc, func_ast); - }) - - (* unsafe setter property *) - | _, (Property.Identifier (id_loc, name)), - Ast.Type.Object.Property.Set (set_loc, func) -> - Flow_js.add_output cx (Flow_error.EUnsafeGettersSetters loc); - let fsig, func_ast = mk_func_sig cx tparams_map loc func in - let prop_t = match fsig with - | { Func_sig.tparams=[]; fparams; _ } -> - (match Func_params.value fparams with - | [_, t] -> t - | _ -> AnyT.at id_loc (* error case: report any ok *)) - | _ -> AnyT.at id_loc (* error case: report any ok *) in - add_setter ~static name id_loc fsig x, - Ast.Type.(loc, { prop with Object.Property. - key = Property.Identifier ((id_loc, prop_t), name); - value = Object.Property.Set (set_loc, func_ast); - }) - ) - in - x, Ast.Type.Object.Property prop :: rev_prop_asts - - | InternalSlot (loc, slot) -> - let { InternalSlot. - id = _, name; - value; - optional; - static; - _method; - } = slot in - if name = "call" then - let (_, t), _ as value = convert cx tparams_map value in - let t = if optional then Type.optional t else t in - append_call ~static t x, - InternalSlot (loc, { slot with InternalSlot.value })::rev_prop_asts - else ( - Flow.add_output cx Flow_error.( - EUnsupportedSyntax (loc, UnsupportedInternalSlot { - name; - static; - })); - x, InternalSlot (loc, Typed_ast.Type.Object.InternalSlot.error)::rev_prop_asts - ) - - | SpreadProperty (loc, _) -> - Flow.add_output cx Flow_error.(EInternal (loc, InterfaceTypeSpread)); - x, - SpreadProperty (loc, Typed_ast.Type.Object.SpreadProperty.error)::rev_prop_asts - ) (s, []) properties - in - x, List.rev rev_prop_asts + Class_type_sig.( + let (x, rev_prop_asts) = + List.fold_left + Ast.Type.Object.( + fun (x, rev_prop_asts) -> function + | CallProperty (loc, { CallProperty.value = (value_loc, ft); static }) -> + let ((_, t), ft) = convert cx tparams_map (loc, Ast.Type.Function ft) in + let ft = + match ft with + | Ast.Type.Function ft -> ft + | _ -> assert false + in + ( append_call ~static t x, + CallProperty (loc, { CallProperty.value = (value_loc, ft); static }) + :: rev_prop_asts ) + | Indexer (loc, { Indexer.static; _ }) as indexer_prop when mem_field ~static "$key" x + -> + Flow.add_output cx Error_message.(EUnsupportedSyntax (loc, MultipleIndexers)); + (x, Tast_utils.error_mapper#object_type_property indexer_prop :: rev_prop_asts) + | Indexer (loc, indexer) -> + let { Indexer.key; value; static; variance; _ } = indexer in + let (((_, k), _) as key) = convert cx tparams_map key in + let (((_, v), _) as value) = convert cx tparams_map value in + let polarity = polarity variance in + ( add_indexer ~static polarity ~key:k ~value:v x, + Indexer (loc, { indexer with Indexer.key; value }) :: rev_prop_asts ) + | Property + (loc, ({ Property.key; value; static; proto; optional; _method; variance } as prop)) + -> + if optional && _method then + Flow.add_output cx Error_message.(EInternal (loc, OptionalMethod)); + let polarity = polarity variance in + let (x, prop) = + Ast.Expression.Object.( + match (_method, key, value) with + | (_, Property.Literal (loc, _), _) + | (_, Property.PrivateName (loc, _), _) + | (_, Property.Computed (loc, _), _) -> + Flow.add_output + cx + (Error_message.EUnsupportedSyntax (loc, Error_message.IllegalName)); + (x, Tast_utils.error_mapper#object_property_type (loc, prop)) + | ( true, + Property.Identifier + (id_loc, ({ Ast.Identifier.name; comments = _ } as id_name)), + Ast.Type.Object.Property.Init (func_loc, Ast.Type.Function func) ) -> + let (fsig, func_ast) = mk_func_sig cx tparams_map loc func in + let ft = Func_type_sig.methodtype cx fsig in + let append_method = + match (static, name) with + | (false, "constructor") -> append_constructor (Some id_loc) + | _ -> append_method ~static name id_loc + in + ( append_method fsig x, + Ast.Type. + ( loc, + { + prop with + Object.Property.key = Property.Identifier ((id_loc, ft), id_name); + value = Object.Property.Init ((func_loc, ft), Function func_ast); + } ) ) + | (true, Property.Identifier _, _) -> + Flow.add_output cx Error_message.(EInternal (loc, MethodNotAFunction)); + (x, Tast_utils.error_mapper#object_property_type (loc, prop)) + | ( false, + Property.Identifier + (id_loc, ({ Ast.Identifier.name; comments = _ } as id_name)), + Ast.Type.Object.Property.Init value ) -> + let (((_, t), _) as value_ast) = convert cx tparams_map value in + let t = + if optional then + Type.optional t + else + t + in + let add = + if proto then + add_proto_field + else + add_field ~static + in + ( add name id_loc polarity (Annot t) x, + Ast.Type. + ( loc, + { + prop with + Object.Property.key = Property.Identifier ((id_loc, t), id_name); + value = Object.Property.Init value_ast; + } ) ) + (* unsafe getter property *) + | ( _, + Property.Identifier + (id_loc, ({ Ast.Identifier.name; comments = _ } as id_name)), + Ast.Type.Object.Property.Get (get_loc, func) ) -> + Flow_js.add_output cx (Error_message.EUnsafeGettersSetters loc); + let (fsig, func_ast) = mk_func_sig cx tparams_map loc func in + let prop_t = fsig.Func_type_sig.return_t in + ( add_getter ~static name id_loc fsig x, + Ast.Type. + ( loc, + { + prop with + Object.Property.key = Property.Identifier ((id_loc, prop_t), id_name); + value = Object.Property.Get (get_loc, func_ast); + } ) ) + (* unsafe setter property *) + | ( _, + Property.Identifier + (id_loc, ({ Ast.Identifier.name; comments = _ } as id_name)), + Ast.Type.Object.Property.Set (set_loc, func) ) -> + Flow_js.add_output cx (Error_message.EUnsafeGettersSetters loc); + let (fsig, func_ast) = mk_func_sig cx tparams_map loc func in + let prop_t = + match fsig with + | { Func_type_sig.tparams = None; fparams; _ } -> + (match Func_type_params.value fparams with + | [(_, t)] -> t + | _ -> AnyT.at AnyError id_loc) + (* error case: report any ok *) + | _ -> AnyT.at AnyError id_loc + (* error case: report any ok *) + in + ( add_setter ~static name id_loc fsig x, + Ast.Type. + ( loc, + { + prop with + Object.Property.key = Property.Identifier ((id_loc, prop_t), id_name); + value = Object.Property.Set (set_loc, func_ast); + } ) )) + in + (x, Ast.Type.Object.Property prop :: rev_prop_asts) + | InternalSlot (loc, slot) as prop -> + let { + InternalSlot.id = (_, { Ast.Identifier.name; comments = _ }); + value; + optional; + static; + _method; + } = + slot + in + if name = "call" then + let (((_, t), _) as value) = convert cx tparams_map value in + let t = + if optional then + Type.optional t + else + t + in + ( append_call ~static t x, + InternalSlot (loc, { slot with InternalSlot.value }) :: rev_prop_asts ) + else ( + Flow.add_output + cx + Error_message.( + EUnsupportedSyntax (loc, UnsupportedInternalSlot { name; static })); + (x, Tast_utils.error_mapper#object_type_property prop :: rev_prop_asts) + ) + | SpreadProperty (loc, _) as prop -> + Flow.add_output cx Error_message.(EInternal (loc, InterfaceTypeSpread)); + (x, Tast_utils.error_mapper#object_type_property prop :: rev_prop_asts)) + (s, []) + properties + in + (x, List.rev rev_prop_asts)) let mk_super cx tparams_map loc c targs = match targs with - | None -> (loc, c, None), None + | None -> ((loc, c, None), None) | Some (targs_loc, targs) -> - let ts, targs_ast = convert_list cx tparams_map targs in - (loc, c, Some ts), Some (targs_loc, targs_ast) + let (ts, targs_ast) = convert_list cx tparams_map targs in + ((loc, c, Some ts), Some (targs_loc, targs_ast)) let mk_interface_sig cx reason decl = - let open Class_sig in - let { Ast.Statement.Interface. - id = id_loc, id_name; - tparams; - body = (body_loc, { Ast.Type.Object.properties; exact }); - extends; - _; - } = decl in - - let self = Tvar.mk cx reason in - - let tparams, tparams_map, tparams_ast = - mk_type_param_declarations cx tparams in - - let id_info = id_name, self, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - - let iface_sig, extends_ast = - let id = Context.make_nominal cx in - let extends, extends_ast = - extends - |> List.map (mk_interface_super cx tparams_map) - |> List.split in - let super = - let callable = List.exists Ast.Type.Object.(function - | CallProperty (_, { CallProperty.static; _ }) -> not static - | _ -> false - ) properties in - Interface { extends; callable } - in - empty id reason tparams tparams_map super, extends_ast - in - - (* TODO: interfaces don't have a name field, or even statics *) - let iface_sig = add_name_field iface_sig in - - let iface_sig, properties = add_interface_properties cx tparams_map properties iface_sig in - - iface_sig, self, - { Ast.Statement.Interface. - id = (id_loc, self), id_name; - tparams = tparams_ast; - extends = extends_ast; - body = body_loc, { Ast.Type.Object.exact; properties }; - } - -let mk_declare_class_sig = - let open Class_sig in - - let mk_mixins cx tparams_map (loc, {Ast.Type.Generic.id; targs}) = - let name = qualified_name id in - let r = mk_reason (RType name) loc in - let i, id = - let lookup_mode = Env.LookupMode.ForValue in - convert_qualification ~lookup_mode cx "mixins" id - in - let props_bag = Tvar.mk_derivable_where cx r (fun tvar -> - Flow.flow cx (i, Type.MixinT (r, tvar)) - ) in - let t, targs = mk_super cx tparams_map loc props_bag targs in - t, (loc, { Ast.Type.Generic.id; targs }) - in - - let is_object_builtin_libdef (loc, name) = - name = "Object" && - match Loc.source loc with - | None -> false - | Some source -> File_key.is_lib_file source - in - - fun cx reason decl -> - let { Ast.Statement.DeclareClass. - id = (id_loc, id_name) as ident; + Class_type_sig.( + let { + Ast.Statement.Interface.id = (id_loc, id_name); tparams; - body = body_loc, { Ast.Type.Object.properties; exact }; + body = (body_loc, { Ast.Type.Object.properties; exact; inexact = _inexact }); extends; - mixins; - implements; - } = decl in - + _; + } = + decl + in let self = Tvar.mk cx reason in - - let tparams, tparams_map, tparam_asts = - mk_type_param_declarations cx tparams in - - let id_info = id_name, self, Type_table.Other in - Type_table.set_info id_loc id_info (Context.type_table cx); - - let _, tparams, tparams_map = Class_sig.add_this self cx reason tparams tparams_map in - - Type_table.with_typeparams tparams (Context.type_table cx) @@ fun _ -> - - let iface_sig, extends_ast, mixins_ast, implements_ast = - let id = Context.make_nominal cx in - let extends, extends_ast = - match extends with - | Some (loc, {Ast.Type.Generic.id; targs}) -> - let lookup_mode = Env.LookupMode.ForValue in - let i, id = - convert_qualification ~lookup_mode cx "mixins" id in - let t, targs = mk_super cx tparams_map loc i targs in - Some t, Some (loc, { Ast.Type.Generic.id; targs }) - | None -> - None, None - in - let mixins, mixins_ast = - mixins - |> List.map (mk_mixins cx tparams_map) - |> List.split + let (tparams, tparams_map, tparams_ast) = mk_type_param_declarations cx tparams in + let (iface_sig, extends_ast) = + let id = id_loc in + let (extends, extends_ast) = + extends |> Core_list.map ~f:(mk_interface_super cx tparams_map) |> List.split in - let implements, implements_ast = - implements - |> List.map (fun (loc, i) -> - let { Ast.Class.Implements.id = (id_loc, name); targs } = i in - let c = Env.get_var ~lookup_mode:Env.LookupMode.ForType cx name id_loc in - let typeapp, targs = match targs with - | None -> (loc, c, None), None - | Some (targs_loc, targs) -> - let ts, targs_ast = convert_list cx tparams_map targs in - (loc, c, Some ts), Some (targs_loc, targs_ast) - in - typeapp, (loc, { Ast.Class.Implements.id = (id_loc, c), name; targs }) - ) - |> List.split in let super = - let extends = match extends with - | None -> Implicit { null = is_object_builtin_libdef ident } - | Some extends -> Explicit extends + let callable = + List.exists + Ast.Type.Object.( + function + | CallProperty (_, { CallProperty.static; _ }) -> not static + | _ -> false) + properties in - Class { extends; mixins; implements } + Interface { inline = false; extends; callable } in - empty id reason tparams tparams_map super, - extends_ast, mixins_ast, implements_ast + (empty id reason tparams tparams_map super, extends_ast) in - - (* All classes have a static "name" property. *) + (* TODO: interfaces don't have a name field, or even statics *) let iface_sig = add_name_field iface_sig in + let (iface_sig, properties) = add_interface_properties cx tparams_map properties iface_sig in + ( iface_sig, + self, + { + Ast.Statement.Interface.id = ((id_loc, self), id_name); + tparams = tparams_ast; + extends = extends_ast; + body = (body_loc, { Ast.Type.Object.exact; properties; inexact = false }); + } )) - let iface_sig, properties = - add_interface_properties cx tparams_map properties iface_sig in - - (* Add a default ctor if we don't have a ctor and won't inherit one from a super *) - let iface_sig = - if mem_constructor iface_sig || extends <> None || mixins <> [] then - iface_sig - else - let reason = replace_reason_const RDefaultConstructor reason in - Class_sig.add_default_constructor reason iface_sig +let mk_declare_class_sig = + Class_type_sig.( + let mk_mixins cx tparams_map (loc, { Ast.Type.Generic.id; targs }) = + let name = qualified_name id in + let r = mk_reason (RType name) loc in + let (i, id) = + let lookup_mode = Env.LookupMode.ForValue in + convert_qualification ~lookup_mode cx "mixins" id + in + let props_bag = + Tvar.mk_derivable_where cx r (fun tvar -> Flow.flow cx (i, Type.MixinT (r, tvar))) + in + let (t, targs) = mk_super cx tparams_map loc props_bag targs in + (t, (loc, { Ast.Type.Generic.id; targs })) in - iface_sig, self, - { Ast.Statement.DeclareClass. - id = (id_loc, self), id_name; - tparams = tparam_asts; - body = body_loc, { Ast.Type.Object.properties; exact }; - extends = extends_ast; - mixins = mixins_ast; - implements = implements_ast; - } + let is_object_builtin_libdef (loc, { Ast.Identifier.name; comments = _ }) = + name = "Object" + && + match ALoc.source loc with + | None -> false + | Some source -> File_key.is_lib_file source + in + fun cx reason decl -> + let { + Ast.Statement.DeclareClass.id = (id_loc, id_name) as ident; + tparams; + body = (body_loc, { Ast.Type.Object.properties; exact; inexact = _inexact }); + extends; + mixins; + implements; + } = + decl + in + let self = Tvar.mk cx reason in + let (tparams, tparams_map, tparam_asts) = mk_type_param_declarations cx tparams in + let (_, tparams, tparams_map) = Class_type_sig.add_this self cx reason tparams tparams_map in + let (iface_sig, extends_ast, mixins_ast, implements_ast) = + let id = id_loc in + let (extends, extends_ast) = + match extends with + | Some (loc, { Ast.Type.Generic.id; targs }) -> + begin + match (id, targs) with + | ( Ast.Type.Generic.Identifier.Unqualified + ( id_loc, + ( { Ast.Identifier.name = "$TEMPORARY$Super$FlowFixMe"; comments = _ } as + id_name ) ), + None ) -> + let ty = AnyT.at Annotated id_loc in + let t = (loc, ty, None) in + let id = + let id_loc_ty = (id_loc, ty) in + Ast.Type.Generic.Identifier.Unqualified (id_loc_ty, id_name) + in + (Some t, Some (loc, { Ast.Type.Generic.id; targs = None })) + | _ -> + let lookup_mode = Env.LookupMode.ForValue in + let (i, id) = convert_qualification ~lookup_mode cx "mixins" id in + let (t, targs) = mk_super cx tparams_map loc i targs in + (Some t, Some (loc, { Ast.Type.Generic.id; targs })) + end + | None -> (None, None) + in + let (mixins, mixins_ast) = + mixins |> Core_list.map ~f:(mk_mixins cx tparams_map) |> List.split + in + let (implements, implements_ast) = + implements + |> Core_list.map ~f:(fun (loc, i) -> + let { Ast.Class.Implements.id = (id_loc, id_name_inner); targs } = i in + let { Ast.Identifier.name; comments = _ } = id_name_inner in + let c = Env.get_var ~lookup_mode:Env.LookupMode.ForType cx name id_loc in + let (typeapp, targs) = + match targs with + | None -> ((loc, c, None), None) + | Some (targs_loc, targs) -> + let (ts, targs_ast) = convert_list cx tparams_map targs in + ((loc, c, Some ts), Some (targs_loc, targs_ast)) + in + (typeapp, (loc, { Ast.Class.Implements.id = ((id_loc, c), id_name_inner); targs }))) + |> List.split + in + let super = + let extends = + match extends with + | None -> Implicit { null = is_object_builtin_libdef ident } + | Some extends -> Explicit extends + in + Class { extends; mixins; implements } + in + (empty id reason tparams tparams_map super, extends_ast, mixins_ast, implements_ast) + in + (* All classes have a static "name" property. *) + let iface_sig = add_name_field iface_sig in + let (iface_sig, properties) = add_interface_properties cx tparams_map properties iface_sig in + (* Add a default ctor if we don't have a ctor and won't inherit one from a super *) + let iface_sig = + if mem_constructor iface_sig || extends <> None || mixins <> [] then + iface_sig + else + let reason = replace_desc_reason RDefaultConstructor reason in + add_default_constructor reason iface_sig + in + ( iface_sig, + self, + { + Ast.Statement.DeclareClass.id = ((id_loc, self), id_name); + tparams = tparam_asts; + body = (body_loc, { Ast.Type.Object.properties; exact; inexact = false }); + extends = extends_ast; + mixins = mixins_ast; + implements = implements_ast; + } )) diff --git a/src/typing/type_annotation.mli b/src/typing/type_annotation.mli index b540a524f1a..c490ad7c4e5 100644 --- a/src/typing/type_annotation.mli +++ b/src/typing/type_annotation.mli @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,67 +9,101 @@ called during AST traversal. *) -val convert: Context.t -> +module Class_type_sig : Class_sig.S + +module Object_freeze : sig + val freeze_object : Context.t -> ALoc.t -> Type.t -> Type.t +end + +val convert : + Context.t -> Type.t SMap.t -> - (Loc.t, Loc.t) Flow_ast.Type.t -> - (Loc.t, Loc.t * Type.t) Flow_ast.Type.t + (ALoc.t, ALoc.t) Flow_ast.Type.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.t val convert_list : Context.t -> Type.t SMap.t -> - (Loc.t, Loc.t) Flow_ast.Type.t list -> - Type.t list * - (Loc.t, Loc.t * Type.t) Flow_ast.Type.t list + (ALoc.t, ALoc.t) Flow_ast.Type.t list -> + Type.t list * (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.t list val convert_opt : Context.t -> Type.t SMap.t -> - (Loc.t, Loc.t) Flow_ast.Type.t option -> - Type.t option * (Loc.t, Loc.t * Type.t) Flow_ast.Type.t option + (ALoc.t, ALoc.t) Flow_ast.Type.t option -> + Type.t option * (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.t option -val convert_qualification: ?lookup_mode:Env.LookupMode.t -> +val convert_qualification : + ?lookup_mode:Env.LookupMode.t -> Context.t -> string -> - (Loc.t, Loc.t) Flow_ast.Type.Generic.Identifier.t -> - Type.t * (Loc.t, Loc.t * Type.t) Flow_ast.Type.Generic.Identifier.t + (ALoc.t, ALoc.t) Flow_ast.Type.Generic.Identifier.t -> + Type.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.Generic.Identifier.t -val mk_super: Context.t -> +val mk_super : + Context.t -> Type.t SMap.t -> - Loc.t -> + ALoc.t -> Type.t -> - (Loc.t, Loc.t) Flow_ast.Type.ParameterInstantiation.t option -> - (Loc.t * Type.t * Type.t list option) * (Loc.t, Loc.t * Type.t) Flow_ast.Type.ParameterInstantiation.t option + (ALoc.t, ALoc.t) Flow_ast.Type.ParameterInstantiation.t option -> + (ALoc.t * Type.t * Type.t list option) + * (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.ParameterInstantiation.t option -val mk_type_annotation: Context.t -> +val mk_type_annotation : + Context.t -> Type.t SMap.t -> Reason.t -> - (Loc.t, Loc.t) Flow_ast.Type.annotation option -> - Type.t * (Loc.t, Loc.t * Type.t) Flow_ast.Type.annotation option + (ALoc.t, ALoc.t) Flow_ast.Type.annotation_or_hint -> + Type.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.annotation_or_hint -val mk_nominal_type: +val mk_return_type_annotation : Context.t -> + Type.t SMap.t -> Reason.t -> + definitely_returns_void:bool -> + (ALoc.t, ALoc.t) Flow_ast.Type.annotation_or_hint -> + Type.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.annotation_or_hint + +val mk_type_available_annotation : + Context.t -> Type.t SMap.t -> - (Type.t * (Loc.t, Loc.t) Flow_ast.Type.ParameterInstantiation.t option) -> - Type.t * (Loc.t, Loc.t * Type.t) Flow_ast.Type.ParameterInstantiation.t option + (ALoc.t, ALoc.t) Flow_ast.Type.annotation -> + Type.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.annotation -val mk_type_param_declarations: Context.t -> - ?tparams_map:(Type.t SMap.t) -> - (Loc.t, Loc.t) Flow_ast.Type.ParameterDeclaration.t option -> - Type.typeparam list * - Type.t SMap.t * - (Loc.t, Loc.t * Type.t) Flow_ast.Type.ParameterDeclaration.t option +val mk_nominal_type : + Context.t -> + Reason.t -> + Type.t SMap.t -> + Type.t * (ALoc.t, ALoc.t) Flow_ast.Type.ParameterInstantiation.t option -> + Type.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.ParameterInstantiation.t option + +val mk_type_param_declarations : + Context.t -> + ?tparams_map:Type.t SMap.t -> + (ALoc.t, ALoc.t) Flow_ast.Type.ParameterDeclaration.t option -> + Type.typeparams + * Type.t SMap.t + * (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.ParameterDeclaration.t option -val mk_interface_sig: Context.t -> +val mk_interface_sig : + Context.t -> Reason.t -> - (Loc.t, Loc.t) Flow_ast.Statement.Interface.t -> - Class_sig.t * Type.t * (Loc.t, Loc.t * Type.t) Flow_ast.Statement.Interface.t + (ALoc.t, ALoc.t) Flow_ast.Statement.Interface.t -> + Class_type_sig.t * Type.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.Statement.Interface.t -val mk_declare_class_sig: Context.t -> +val mk_declare_class_sig : + Context.t -> Reason.t -> - (Loc.t, Loc.t) Flow_ast.Statement.DeclareClass.t -> - Class_sig.t * Type.t * (Loc.t, Loc.t * Type.t) Flow_ast.Statement.DeclareClass.t + (ALoc.t, ALoc.t) Flow_ast.Statement.DeclareClass.t -> + Class_type_sig.t * Type.t * (ALoc.t, ALoc.t * Type.t) Flow_ast.Statement.DeclareClass.t -val polarity: Loc.t Flow_ast.Variance.t option -> Type.polarity +val polarity : 'a Flow_ast.Variance.t option -> Polarity.t -val qualified_name: (Loc.t, Loc.t) Flow_ast.Type.Generic.Identifier.t -> string +val qualified_name : (ALoc.t, ALoc.t) Flow_ast.Type.Generic.Identifier.t -> string + +val error_type : + Context.t -> + ALoc.t -> + Error_message.t -> + (ALoc.t, ALoc.t) Flow_ast.Type.t -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.Type.t diff --git a/src/typing/type_asserts.ml b/src/typing/type_asserts.ml new file mode 100644 index 00000000000..82fca3d04bc --- /dev/null +++ b/src/typing/type_asserts.ml @@ -0,0 +1,101 @@ +(** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +module FilenameMap = Utils_js.FilenameMap + +let check_type_visitor wrap = + Ty.( + object (self) + inherit [_] iter_ty as super + + method! private on_prop env = + function + | NamedProp (_, p) -> self#on_named_prop env p + | IndexProp d -> self#on_dict env d + | CallProp _ -> wrap (Reason.RCustom "object Call Property") + | SpreadProp _ -> wrap (Reason.RCustom "object Spread Property") + + method! private on_named_prop env = + function + | Field (t, _) -> self#on_t env t + | Method _ -> wrap (Reason.RMethod None) + | Get _ + | Set _ -> + wrap Reason.RGetterSetterProperty + + method! on_t env = + function + | TVar _ -> wrap (Reason.RCustom "recursive type") + | Fun _ -> wrap Reason.RFunctionType + | Generic (_, _, Some _) -> wrap (Reason.RCustom "class with generics") + | Mu _ -> wrap (Reason.RCustom "recursive type") + | Any Annotated -> Reason.RAnyExplicit |> wrap + | Any _ -> Reason.RAnyImplicit |> wrap + | Bound (_, name) -> wrap (Reason.RCustom ("bound type var " ^ name)) + | Top -> wrap Reason.RMixed + | Bot _ -> wrap Reason.REmpty + | Module (Some { Ty.name; _ }, _) -> wrap (Reason.RModule name) + | TypeAlias { ta_tparams = None; ta_type = Some t; _ } -> self#on_t env t + | TypeAlias { ta_name = { Ty.name; _ }; _ } -> wrap (Reason.RCustom ("type alias " ^ name)) + | (Obj _ | Arr _ | Tup _ | Union _ | Inter _) as t -> super#on_t env t + | Void + | Null + | Num _ + | Str _ + | Bool _ + | NumLit _ + | StrLit _ + | BoolLit _ + | TypeOf _ + | Generic _ + | ClassDecl _ + | InterfaceDecl _ + | Utility _ + | Module _ + | InlineInterface _ -> + () + end) + +let detect_invalid_calls ~full_cx file_sigs cxs tasts = + let options = + { + Ty_normalizer_env.fall_through_merged = false; + expand_internal_types = false; + expand_type_aliases = true; + flag_shadowed_type_params = false; + evaluate_type_destructors = true; + preserve_inferred_literal_types = false; + optimize_types = true; + omit_targ_defaults = false; + merge_bot_and_any_kinds = true; + } + in + let check_valid_call ~genv (call_loc : ALoc.t) (_, targ_loc) = + let typed_ast = genv.Ty_normalizer_env.typed_ast in + let ty_opt = Typed_ast_utils.find_exact_match_annotation typed_ast targ_loc in + Option.iter ty_opt ~f:(fun scheme -> + let desc = Reason.RCustom "TypeAssert library function" in + let reason_main = Reason.mk_reason desc call_loc in + let wrap reason = + Flow_js.add_output + full_cx + (Error_message.EInvalidTypeArgs (reason_main, Reason.mk_reason reason call_loc)) + in + match Ty_normalizer.from_scheme ~options ~genv scheme with + | Ok ty -> (check_type_visitor wrap)#on_t () ty + | Error _ -> + let { Type.TypeScheme.type_ = t; _ } = scheme in + wrap (Type.desc_of_t t)) + in + Core_list.iter2_exn + ~f:(fun cx typed_ast -> + let file = Context.file cx in + let file_sig = FilenameMap.find_unsafe file file_sigs in + let genv = Ty_normalizer_env.mk_genv ~full_cx ~file ~typed_ast ~file_sig in + Loc_collections.ALocMap.iter (check_valid_call ~genv) (Context.type_asserts_map cx)) + cxs + tasts diff --git a/src/typing/type_filter.ml b/src/typing/type_filter.ml index 2733fa0f41b..d99b3dfc0f8 100644 --- a/src/typing/type_filter.ml +++ b/src/typing/type_filter.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,349 +8,399 @@ open Reason open Type -let recurse_into_union filter_fn ((r, ts): reason * Type.t list) = - let new_ts = List.fold_left (fun new_ts t -> - match filter_fn t with - | DefT (_, EmptyT) -> new_ts - | filtered_type -> filtered_type::new_ts - ) [] ts in +let recurse_into_union filter_fn ((r, ts) : reason * Type.t list) = + let new_ts = + List.fold_left + (fun new_ts t -> + match filter_fn t with + | DefT (_, _, EmptyT Bottom) -> new_ts + | filtered_type -> filtered_type :: new_ts) + [] + ts + in let new_ts = List.rev new_ts in match new_ts with - | [] -> DefT (r, EmptyT) + | [] -> DefT (r, bogus_trust (), EmptyT Bottom) | [t] -> t - | t0::t1::ts -> DefT (r, UnionT (UnionRep.make t0 t1 ts)) + | t0 :: t1 :: ts -> UnionT (r, UnionRep.make t0 t1 ts) let recurse_into_intersection = let rec helper filter_fn r acc = function - | [] -> List.rev acc - | t::ts -> - begin match filter_fn t with - | DefT (_, EmptyT) -> [] - | filtered_type -> helper filter_fn r (filtered_type::acc) ts - end + | [] -> List.rev acc + | t :: ts -> + begin + match filter_fn t with + | DefT (_, _, EmptyT Bottom) -> [] + | filtered_type -> helper filter_fn r (filtered_type :: acc) ts + end in - fun filter_fn ((r, ts): reason * Type.t list) -> + fun filter_fn ((r, ts) : reason * Type.t list) -> match helper filter_fn r [] ts with - | [] -> DefT (r, EmptyT) + | [] -> DefT (r, bogus_trust (), EmptyT Bottom) | [t] -> t - | t0::t1::ts -> DefT (r, IntersectionT (InterRep.make t0 t1 ts)) + | t0 :: t1 :: ts -> IntersectionT (r, InterRep.make t0 t1 ts) let rec exists = function (* falsy things get removed *) - | DefT (r, ( - NullT - | VoidT - | SingletonBoolT false - | BoolT (Some false) - | SingletonStrT "" - | StrT (Literal (_, "")) - | SingletonNumT (0., _) - | NumT (Literal (_, (0., _))) - )) -> DefT (r, EmptyT) - + | DefT + ( r, + trust, + ( NullT | VoidT + | SingletonBoolT false + | BoolT (Some false) + | SingletonStrT "" + | StrT (Literal (_, "")) + | SingletonNumT (0., _) + | NumT (Literal (_, (0., _))) ) ) -> + DefT (r, trust, EmptyT Bottom) (* unknown things become truthy *) - | DefT (_, MaybeT t) -> t - | DefT (_, OptionalT t) -> exists t - | DefT (r, BoolT None) -> DefT (r, BoolT (Some true)) - | DefT (r, StrT AnyLiteral) -> DefT (r, StrT Truthy) - | DefT (r, NumT AnyLiteral) -> DefT (r, NumT Truthy) - | DefT (r, MixedT _) -> DefT (r, MixedT Mixed_truthy) - + | MaybeT (_, t) -> t + | OptionalT (_, t) -> exists t + | DefT (r, trust, BoolT None) -> DefT (r, trust, BoolT (Some true)) + | DefT (r, trust, StrT AnyLiteral) -> DefT (r, trust, StrT Truthy) + | DefT (r, trust, NumT AnyLiteral) -> DefT (r, trust, NumT Truthy) + | DefT (r, trust, MixedT _) -> DefT (r, trust, MixedT Mixed_truthy) (* an intersection passes through iff all of its members pass through *) - | DefT (r, IntersectionT rep) -> - recurse_into_intersection exists (r, InterRep.members rep) - + | IntersectionT (r, rep) -> recurse_into_intersection exists (r, InterRep.members rep) (* truthy things pass through *) | t -> t -let rec not_exists t = match t with +let rec not_exists t = + match t with (* falsy things pass through *) - | DefT (_, ( - NullT - | VoidT - | SingletonBoolT false - | BoolT (Some false) - | SingletonStrT "" - | StrT (Literal (_, "")) - | SingletonNumT (0., _) - | NumT (Literal (_, (0., _))) - )) -> t - + | DefT + ( _, + _, + ( NullT | VoidT + | SingletonBoolT false + | BoolT (Some false) + | SingletonStrT "" + | StrT (Literal (_, "")) + | SingletonNumT (0., _) + | NumT (Literal (_, (0., _))) ) ) -> + t + | AnyT (r, _) -> DefT (r, Trust.bogus_trust (), EmptyT Bottom) (* truthy things get removed *) - | DefT (r, ( - SingletonBoolT _ - | BoolT (Some _) - | SingletonStrT _ - | StrT (Literal _ | Truthy) - | ArrT _ - | ObjT _ - | InstanceT _ - | AnyObjT - | FunT _ - | AnyFunT - | SingletonNumT _ - | NumT (Literal _ | Truthy) - | MixedT Mixed_truthy - )) -> DefT (r, EmptyT) - - | DefT (reason, ClassT _) -> DefT (reason, EmptyT) - + | DefT + ( r, + trust, + ( SingletonBoolT _ + | BoolT (Some _) + | SingletonStrT _ + | StrT (Literal _ | Truthy) + | ArrT _ | ObjT _ | InstanceT _ | FunT _ | SingletonNumT _ + | NumT (Literal _ | Truthy) + | MixedT Mixed_truthy ) ) -> + DefT (r, trust, EmptyT Bottom) + | DefT (reason, trust, ClassT _) -> DefT (reason, trust, EmptyT Bottom) (* unknown boolies become falsy *) - | DefT (r, MaybeT _) -> - DefT (r, UnionT (UnionRep.make (NullT.why r) (VoidT.why r) [])) - | DefT (r, BoolT None) -> DefT (r, BoolT (Some false)) - | DefT (r, StrT AnyLiteral) -> DefT (r, StrT (Literal (None, ""))) - | DefT (r, NumT AnyLiteral) -> DefT (r, NumT (Literal (None, (0., "0")))) - + | MaybeT (r, _) -> + UnionT + ( r, + UnionRep.make + (Trust.bogus_trust () |> NullT.why r) + (Trust.bogus_trust () |> VoidT.why r) + [] ) + | DefT (r, trust, BoolT None) -> DefT (r, trust, BoolT (Some false)) + | DefT (r, trust, StrT AnyLiteral) -> DefT (r, trust, StrT (Literal (None, ""))) + | DefT (r, trust, NumT AnyLiteral) -> DefT (r, trust, NumT (Literal (None, (0., "0")))) (* an intersection passes through iff all of its members pass through *) - | DefT (r, IntersectionT rep) -> - recurse_into_intersection not_exists (r, InterRep.members rep) - + | IntersectionT (r, rep) -> recurse_into_intersection not_exists (r, InterRep.members rep) (* things that don't track truthiness pass through *) | t -> t let maybe = function - | DefT (r, MaybeT _) -> - DefT (r, UnionT (UnionRep.make (NullT.why r) (VoidT.why r) [])) - | DefT (r, MixedT Mixed_everything) -> - DefT (r, UnionT (UnionRep.make (NullT.why r) (VoidT.why r) [])) - | DefT (r, MixedT Mixed_truthy) -> EmptyT.why r - | DefT (r, MixedT Mixed_non_maybe) -> EmptyT.why r - | DefT (r, MixedT Mixed_non_void) -> DefT (r, NullT) - | DefT (r, MixedT Mixed_non_null) -> DefT (r, VoidT) - | DefT (_, NullT) as t -> t - | DefT (_, VoidT) as t -> t - | DefT (r, OptionalT _) -> VoidT.why r - | DefT (_, AnyT) as t -> t + | MaybeT (r, _) -> + UnionT + ( r, + UnionRep.make + (Trust.bogus_trust () |> NullT.why r) + (Trust.bogus_trust () |> VoidT.why r) + [] ) + | DefT (r, trust, MixedT Mixed_everything) -> + UnionT (r, UnionRep.make (NullT.why r trust) (VoidT.why r trust) []) + | DefT (r, trust, MixedT Mixed_truthy) -> EmptyT.why r trust + | DefT (r, trust, MixedT Mixed_non_maybe) -> EmptyT.why r trust + | DefT (r, trust, MixedT Mixed_non_void) -> DefT (r, trust, NullT) + | DefT (r, trust, MixedT Mixed_non_null) -> DefT (r, trust, VoidT) + | DefT (_, _, NullT) as t -> t + | DefT (_, _, VoidT) as t -> t + | OptionalT (r, _) -> Trust.bogus_trust () |> VoidT.why r + | AnyT _ as t -> t + | DefT (r, trust, _) -> EmptyT.why r trust | t -> let reason = reason_of_t t in - EmptyT.why reason + EmptyT.why reason |> with_trust bogus_trust let rec not_maybe = function - | DefT (_, MaybeT t) -> t - | DefT (_, OptionalT t) -> not_maybe t - | DefT (r, (NullT | VoidT)) -> DefT (r, EmptyT) - | DefT (r, MixedT Mixed_truthy) -> DefT (r, MixedT Mixed_truthy) - | DefT (r, MixedT Mixed_non_maybe) -> DefT (r, MixedT Mixed_non_maybe) - | DefT (r, MixedT Mixed_everything) - | DefT (r, MixedT Mixed_non_void) - | DefT (r, MixedT Mixed_non_null) - -> DefT (r, MixedT Mixed_non_maybe) + | MaybeT (_, t) -> t + | OptionalT (_, t) -> not_maybe t + | DefT (r, trust, (NullT | VoidT)) -> DefT (r, trust, EmptyT Bottom) + | DefT (r, trust, MixedT Mixed_truthy) -> DefT (r, trust, MixedT Mixed_truthy) + | DefT (r, trust, MixedT Mixed_non_maybe) -> DefT (r, trust, MixedT Mixed_non_maybe) + | DefT (r, trust, MixedT Mixed_everything) + | DefT (r, trust, MixedT Mixed_non_void) + | DefT (r, trust, MixedT Mixed_non_null) -> + DefT (r, trust, MixedT Mixed_non_maybe) | t -> t let null = function - | DefT (_, OptionalT (DefT (r, MaybeT _))) - | DefT (r, MaybeT _) -> NullT.why r - | DefT (_, NullT) as t -> t - | DefT (r, MixedT Mixed_everything) - | DefT (r, MixedT Mixed_non_void) -> NullT.why r - | DefT (_, AnyT) as t -> t + | OptionalT (_, MaybeT (r, _)) + | MaybeT (r, _) -> + Trust.bogus_trust () |> NullT.why r + | DefT (_, _, NullT) as t -> t + | DefT (r, trust, MixedT Mixed_everything) + | DefT (r, trust, MixedT Mixed_non_void) -> + NullT.why r trust + | AnyT _ as t -> t + | DefT (r, trust, _) -> EmptyT.why r trust | t -> let reason = reason_of_t t in - EmptyT.why reason + EmptyT.why reason |> with_trust bogus_trust let rec not_null = function - | DefT (r, MaybeT t) -> - DefT (r, UnionT (UnionRep.make (VoidT.why r) t [])) - | DefT (r, OptionalT t) -> - DefT (r, OptionalT (not_null t)) - | DefT (r, UnionT rep) -> - recurse_into_union not_null (r, UnionRep.members rep) - | DefT (r, NullT) -> DefT (r, EmptyT) - | DefT (r, MixedT Mixed_everything) -> DefT (r, MixedT Mixed_non_null) - | DefT (r, MixedT Mixed_non_void) -> DefT (r, MixedT Mixed_non_maybe) + | MaybeT (r, t) -> UnionT (r, UnionRep.make (Trust.bogus_trust () |> VoidT.why r) t []) + | OptionalT (r, t) -> OptionalT (r, not_null t) + | UnionT (r, rep) -> recurse_into_union not_null (r, UnionRep.members rep) + | DefT (r, trust, NullT) -> DefT (r, trust, EmptyT Bottom) + | DefT (r, trust, MixedT Mixed_everything) -> DefT (r, trust, MixedT Mixed_non_null) + | DefT (r, trust, MixedT Mixed_non_void) -> DefT (r, trust, MixedT Mixed_non_maybe) | t -> t let undefined = function - | DefT (r, MaybeT _) -> VoidT.why r - | DefT (_, VoidT) as t -> t - | DefT (r, OptionalT _) -> VoidT.why r - | DefT (r, MixedT Mixed_everything) - | DefT (r, MixedT Mixed_non_null) -> VoidT.why r - | DefT (_, AnyT) as t -> t + | MaybeT (r, _) -> VoidT.why r |> with_trust bogus_trust + | DefT (_, _, VoidT) as t -> t + | OptionalT (r, _) -> VoidT.why r |> with_trust bogus_trust + | DefT (r, trust, MixedT Mixed_everything) + | DefT (r, trust, MixedT Mixed_non_null) -> + VoidT.why r trust + | AnyT _ as t -> t + | DefT (r, trust, _) -> EmptyT.why r trust | t -> let reason = reason_of_t t in - EmptyT.why reason + EmptyT.why reason |> with_trust bogus_trust let rec not_undefined = function - | DefT (r, MaybeT t) -> - DefT (r, UnionT (UnionRep.make (NullT.why r) t [])) - | DefT (_, OptionalT t) -> not_undefined t - | DefT (r, UnionT rep) -> - recurse_into_union not_undefined (r, UnionRep.members rep) - | DefT (r, VoidT) -> DefT (r, EmptyT) - | DefT (r, MixedT Mixed_everything) -> DefT (r, MixedT Mixed_non_void) - | DefT (r, MixedT Mixed_non_null) -> DefT (r, MixedT Mixed_non_maybe) + | MaybeT (r, t) -> UnionT (r, UnionRep.make (NullT.why r |> with_trust bogus_trust) t []) + | OptionalT (_, t) -> not_undefined t + | UnionT (r, rep) -> recurse_into_union not_undefined (r, UnionRep.members rep) + | DefT (r, trust, VoidT) -> DefT (r, trust, EmptyT Bottom) + | DefT (r, trust, MixedT Mixed_everything) -> DefT (r, trust, MixedT Mixed_non_void) + | DefT (r, trust, MixedT Mixed_non_null) -> DefT (r, trust, MixedT Mixed_non_maybe) | t -> t let string_literal expected_loc sense expected t = let expected_desc = RStringLit expected in - let lit_reason = replace_reason_const expected_desc in + let lit_reason = replace_desc_new_reason expected_desc in match t with - | DefT (_, StrT (Literal (_, actual))) -> - if actual = expected then t - else DefT (mk_reason expected_desc expected_loc, StrT (Literal (Some sense, expected))) - | DefT (r, StrT Truthy) when expected <> "" -> - DefT (lit_reason r, StrT (Literal (None, expected))) - | DefT (r, StrT AnyLiteral) -> - DefT (lit_reason r, StrT (Literal (None, expected))) - | DefT (r, MixedT _) -> - DefT (lit_reason r, StrT (Literal (None, expected))) - | DefT (_, AnyT) as t -> t - | _ -> DefT (reason_of_t t, EmptyT) + | DefT (_, trust, StrT (Literal (_, actual))) -> + if actual = expected then + t + else + DefT (mk_reason expected_desc expected_loc, trust, StrT (Literal (Some sense, expected))) + | DefT (r, trust, StrT Truthy) when expected <> "" -> + DefT (lit_reason r, trust, StrT (Literal (None, expected))) + | DefT (r, trust, StrT AnyLiteral) -> DefT (lit_reason r, trust, StrT (Literal (None, expected))) + | DefT (r, trust, MixedT _) -> DefT (lit_reason r, trust, StrT (Literal (None, expected))) + | AnyT _ as t -> t + | DefT (r, trust, _) -> DefT (r, trust, EmptyT Bottom) + | _ -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_string_literal expected = function - | DefT (r, StrT (Literal (_, actual))) when actual = expected -> DefT (r, EmptyT) + | DefT (r, trust, StrT (Literal (_, actual))) when actual = expected -> + DefT (r, trust, EmptyT Bottom) | t -> t let number_literal expected_loc sense expected t = - let _, expected_raw = expected in + let (_, expected_raw) = expected in let expected_desc = RNumberLit expected_raw in - let lit_reason = replace_reason_const expected_desc in + let lit_reason = replace_desc_new_reason expected_desc in match t with - | DefT (_, NumT (Literal (_, (_, actual_raw)))) -> - if actual_raw = expected_raw then t - else DefT (mk_reason expected_desc expected_loc, NumT (Literal (Some sense, expected))) - | DefT (r, NumT Truthy) when snd expected <> "0" -> - DefT (lit_reason r, NumT (Literal (None, expected))) - | DefT (r, NumT AnyLiteral) -> - DefT (lit_reason r, NumT (Literal (None, expected))) - | DefT (r, MixedT _) -> - DefT (lit_reason r, NumT (Literal (None, expected))) - | DefT (_, AnyT) as t -> t - | _ -> DefT (reason_of_t t, EmptyT) + | DefT (_, trust, NumT (Literal (_, (_, actual_raw)))) -> + if actual_raw = expected_raw then + t + else + DefT (mk_reason expected_desc expected_loc, trust, NumT (Literal (Some sense, expected))) + | DefT (r, trust, NumT Truthy) when snd expected <> "0" -> + DefT (lit_reason r, trust, NumT (Literal (None, expected))) + | DefT (r, trust, NumT AnyLiteral) -> DefT (lit_reason r, trust, NumT (Literal (None, expected))) + | DefT (r, trust, MixedT _) -> DefT (lit_reason r, trust, NumT (Literal (None, expected))) + | AnyT _ as t -> t + | _ -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_number_literal expected = function - | DefT (r, NumT (Literal (_, actual))) when snd actual = snd expected -> DefT (r, EmptyT) + | DefT (r, trust, NumT (Literal (_, actual))) when snd actual = snd expected -> + DefT (r, trust, EmptyT Bottom) | t -> t let true_ t = - let lit_reason = replace_reason_const (RBooleanLit true) in + let lit_reason = replace_desc_new_reason (RBooleanLit true) in match t with - | DefT (r, BoolT (Some true)) -> DefT (lit_reason r, BoolT (Some true)) - | DefT (r, BoolT None) -> DefT (lit_reason r, BoolT (Some true)) - | DefT (r, MixedT _) -> DefT (lit_reason r, BoolT (Some true)) - | DefT (_, AnyT) as t -> t - | t -> DefT (reason_of_t t, EmptyT) + | DefT (r, trust, BoolT (Some true)) -> DefT (lit_reason r, trust, BoolT (Some true)) + | DefT (r, trust, BoolT None) -> DefT (lit_reason r, trust, BoolT (Some true)) + | DefT (r, trust, MixedT _) -> DefT (lit_reason r, trust, BoolT (Some true)) + | AnyT _ as t -> t + | t -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_true t = - let lit_reason = replace_reason_const (RBooleanLit false) in + let lit_reason = replace_desc_new_reason (RBooleanLit false) in match t with - | DefT (r, BoolT (Some true)) -> DefT (r, EmptyT) - | DefT (r, BoolT None) -> DefT (lit_reason r, BoolT (Some false)) + | DefT (r, trust, BoolT (Some true)) -> DefT (r, trust, EmptyT Bottom) + | DefT (r, trust, BoolT None) -> DefT (lit_reason r, trust, BoolT (Some false)) | t -> t let false_ t = - let lit_reason = replace_reason_const (RBooleanLit false) in + let lit_reason = replace_desc_new_reason (RBooleanLit false) in match t with - | DefT (r, BoolT (Some false)) -> DefT (lit_reason r, BoolT (Some false)) - | DefT (r, BoolT None) -> DefT (lit_reason r, BoolT (Some false)) - | DefT (r, MixedT _) -> DefT (lit_reason r, BoolT (Some false)) - | DefT (_, AnyT) as t -> t - | t -> DefT (reason_of_t t, EmptyT) + | DefT (r, trust, BoolT (Some false)) -> DefT (lit_reason r, trust, BoolT (Some false)) + | DefT (r, trust, BoolT None) -> DefT (lit_reason r, trust, BoolT (Some false)) + | DefT (r, trust, MixedT _) -> DefT (lit_reason r, trust, BoolT (Some false)) + | AnyT _ as t -> t + | t -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_false t = - let lit_reason = replace_reason_const (RBooleanLit true) in + let lit_reason = replace_desc_new_reason (RBooleanLit true) in match t with - | DefT (r, BoolT (Some false)) -> DefT (r, EmptyT) - | DefT (r, BoolT None) -> DefT (lit_reason r, BoolT (Some true)) + | DefT (r, trust, BoolT (Some false)) -> DefT (r, trust, EmptyT Bottom) + | DefT (r, trust, BoolT None) -> DefT (lit_reason r, trust, BoolT (Some true)) | t -> t let boolean t = match t with - | DefT (r, MixedT Mixed_truthy) -> DefT (replace_reason_const BoolT.desc r, BoolT (Some true)) - | DefT (r, MixedT _) -> BoolT.why r - | DefT (_, (AnyT | BoolT _)) -> t - | _ -> DefT (reason_of_t t, EmptyT) + | DefT (r, trust, MixedT Mixed_truthy) -> + DefT (replace_desc_new_reason BoolT.desc r, trust, BoolT (Some true)) + | DefT (r, trust, MixedT _) -> BoolT.why r trust + | DefT (_, _, BoolT _) + | AnyT _ -> + t + | DefT (r, trust, _) -> DefT (r, trust, EmptyT Bottom) + | _ -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_boolean t = match t with (* TODO: this is wrong, AnyT can be a bool *) - | DefT (_, (AnyT | BoolT _)) -> DefT (reason_of_t t, EmptyT) + | DefT (_, trust, BoolT _) -> DefT (reason_of_t t, trust, EmptyT Bottom) + | AnyT _ -> DefT (reason_of_t t, Trust.bogus_trust (), EmptyT Bottom) | _ -> t let string t = match t with - | DefT (r, MixedT Mixed_truthy) -> DefT (replace_reason_const StrT.desc r, StrT Truthy) - | DefT (r, MixedT _) -> StrT.why r - | DefT (_, (AnyT | StrT _)) -> t - | _ -> DefT (reason_of_t t, EmptyT) + | DefT (r, trust, MixedT Mixed_truthy) -> + DefT (replace_desc_new_reason StrT.desc r, trust, StrT Truthy) + | DefT (r, trust, MixedT _) -> StrT.why r trust + | DefT (_, _, StrT _) + | AnyT _ -> + t + | DefT (r, trust, _) -> DefT (r, trust, EmptyT Bottom) + | _ -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_string t = match t with (* TODO: this is wrong, AnyT can be a string *) - | DefT (_, (AnyT | StrT _)) -> DefT (reason_of_t t, EmptyT) + | AnyT _ -> DefT (reason_of_t t, Trust.bogus_trust (), EmptyT Bottom) + | DefT (_, trust, StrT _) -> DefT (reason_of_t t, trust, EmptyT Bottom) | _ -> t +let symbol t = + match t with + | DefT (r, trust, MixedT _) -> + DefT (replace_desc_new_reason RSymbol r, trust, MixedT Mixed_symbol) + | _ -> + (* TODO: since symbols aren't supported, `t` is never a symbol so always empty *) + let reason = reason_of_t t in + DefT (replace_desc_new_reason RSymbol reason, bogus_trust (), EmptyT Bottom) + +let not_symbol t = + (* TODO: since symbols aren't supported, `t` is never a symbol so always pass it through *) + t + let number t = match t with - | DefT (r, MixedT Mixed_truthy) -> DefT (replace_reason_const NumT.desc r, NumT Truthy) - | DefT (r, MixedT _) -> NumT.why r - | DefT (_, (AnyT | NumT _)) -> t - | _ -> DefT (reason_of_t t, EmptyT) + | DefT (r, trust, MixedT Mixed_truthy) -> + DefT (replace_desc_new_reason NumT.desc r, trust, NumT Truthy) + | DefT (r, trust, MixedT _) -> NumT.why r trust + | DefT (_, _, NumT _) + | AnyT _ -> + t + | DefT (r, trust, _) -> DefT (r, trust, EmptyT Bottom) + | _ -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_number t = match t with (* TODO: this is wrong, AnyT can be a number *) - | DefT (_, (AnyT | NumT _)) -> DefT (reason_of_t t, EmptyT) + | AnyT _ -> DefT (reason_of_t t, Trust.bogus_trust (), EmptyT Bottom) + | DefT (_, trust, NumT _) -> DefT (reason_of_t t, trust, EmptyT Bottom) | _ -> t let object_ cx t = match t with - | DefT (r, MixedT flavor) -> - let reason = replace_reason_const RObject r in - let dict = Some { - key = StrT.why r; - value = DefT (replace_reason_const MixedT.desc r, MixedT Mixed_everything); - dict_name = None; - dict_polarity = Neutral; - } in + | DefT (r, trust, MixedT flavor) -> + let reason = replace_desc_new_reason RObject r in + let dict = + Some + { + key = StrT.why r |> with_trust bogus_trust; + value = + DefT (replace_desc_new_reason MixedT.desc r, bogus_trust (), MixedT Mixed_everything); + dict_name = None; + dict_polarity = Polarity.Positive; + } + in let proto = ObjProtoT reason in let obj = Obj_type.mk_with_proto cx reason ?dict proto in - begin match flavor with - | Mixed_truthy - | Mixed_non_maybe - | Mixed_non_null -> obj - | Mixed_everything - | Mixed_non_void -> - let reason = replace_reason_const RUnion (reason_of_t t) in - DefT (reason, UnionT (UnionRep.make (NullT.why r) obj [])) - | Empty_intersection -> DefT (r, EmptyT) + begin + match flavor with + | Mixed_symbol + | Mixed_truthy + | Mixed_non_maybe + | Mixed_non_null -> + obj + | Mixed_function + | Mixed_everything + | Mixed_non_void -> + let reason = replace_desc_new_reason RUnion (reason_of_t t) in + UnionT (reason, UnionRep.make (NullT.why r trust) obj []) end - | DefT (_, (AnyT | AnyObjT | ObjT _ | ArrT _ | NullT | InstanceT _)) -> t - | _ -> DefT (reason_of_t t, EmptyT) + | DefT (_, _, (ObjT _ | ArrT _ | NullT | InstanceT _)) + | AnyT _ -> + t + | DefT (r, trust, _) -> DefT (r, trust, EmptyT Bottom) + | _ -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_object t = match t with - | DefT (_, (AnyT | AnyObjT | ObjT _ | ArrT _ | NullT | InstanceT _)) -> - DefT (reason_of_t t, EmptyT) + | AnyT _ -> DefT (reason_of_t t, Trust.bogus_trust (), EmptyT Bottom) + | DefT (_, trust, (ObjT _ | ArrT _ | NullT | InstanceT _)) -> + DefT (reason_of_t t, trust, EmptyT Bottom) | _ -> t -let function_ t = - match t with - | DefT (r, MixedT _) -> - let desc = RFunction RNormal in - DefT (replace_reason_const desc r, AnyFunT) - | DefT (_, (AnyT | AnyFunT | FunT _ | ClassT _)) -> t - | _ -> DefT (reason_of_t t, EmptyT) +let function_ = function + | DefT (r, trust, MixedT _) -> + DefT (replace_desc_new_reason (RFunction RUnknown) r, trust, MixedT Mixed_function) + | (DefT (_, _, (FunT _ | ClassT _)) | AnyT _) as t -> t + | DefT (r, trust, _) -> DefT (r, trust, EmptyT Bottom) + | t -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_function t = match t with - | DefT (_, (AnyT | AnyFunT | FunT _ | ClassT _)) -> DefT (reason_of_t t, EmptyT) + | AnyT _ -> DefT (reason_of_t t, Trust.bogus_trust (), EmptyT Bottom) + | DefT (_, trust, (FunT _ | ClassT _)) -> DefT (reason_of_t t, trust, EmptyT Bottom) | _ -> t let array t = match t with - | DefT (r, MixedT _) -> - DefT (replace_reason_const RArray r, - ArrT (ArrayAT (DefT (r, MixedT Mixed_everything), None)) - ) - | DefT (_, (AnyT | ArrT _)) -> + | DefT (r, trust, MixedT _) -> + DefT + ( replace_desc_new_reason RROArrayType r, + trust, + ArrT (ROArrayAT (DefT (r, trust, MixedT Mixed_everything))) ) + | DefT (_, _, ArrT _) + | AnyT _ -> t - | _ -> - DefT (reason_of_t t, EmptyT) + | _ -> DefT (reason_of_t t, bogus_trust (), EmptyT Bottom) let not_array t = match t with - | DefT (_, (AnyT | ArrT _)) -> DefT (reason_of_t t, EmptyT) + | AnyT _ -> DefT (reason_of_t t, Trust.bogus_trust (), EmptyT Bottom) + | DefT (_, trust, ArrT _) -> DefT (reason_of_t t, trust, EmptyT Bottom) | _ -> t diff --git a/src/typing/type_inference_hooks_js.ml b/src/typing/type_inference_hooks_js.ml index fcb4f02d035..6bf59d13582 100644 --- a/src/typing/type_inference_hooks_js.ml +++ b/src/typing/type_inference_hooks_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -21,7 +21,9 @@ let ref_nop _ _ _ = () let class_member_decl_nop _ _ _ _ _ = () -let obj_prop_decl_nop _ _ _ = () +let obj_prop_decl_nop _ _ _ = false + +let obj_type_prop_decl_nop _ _ _ = () let require_pattern_nop _ = () @@ -33,7 +35,7 @@ let export_named_nop _ _ = () (* This type represents the possible definition-points for an lvalue. *) type def = - (** + (* * Given a variable declaration such as: * * var a = 42; // <-- this @@ -43,7 +45,6 @@ type def = * looking at its assignments, as the "definition" for the lvalue. *) | Val of Type.t - (* * Given a destructuring pattern for an initialization such as: * @@ -57,184 +58,113 @@ type def = * destructuring pattern. *) | Parent of Type.t - - (** + (* * For assignments, we consider lvalues to have the same "definition" as * corresponding rvalues: both kinds of references point to the declaration site. *) | Id type hook_state_t = { - id_hook: - (Context.t -> - string -> Loc.t -> - bool); - - lval_hook: - (Context.t -> - string -> Loc.t -> def -> - unit); - - member_hook: - (Context.t -> - string -> Loc.t -> Type.t -> - bool); - -(* TODO: This is inconsistent with the way the id/member hooks work, but we + id_hook: Context.t -> string -> ALoc.t -> bool; + lval_hook: Context.t -> string -> ALoc.t -> def -> unit; + member_hook: Context.t -> string -> ALoc.t -> Type.t -> bool; + (* TODO: This is inconsistent with the way the id/member hooks work, but we currently don't need a way to override call types, so it simplifies things a bit *) - call_hook: - (Context.t -> - string -> Loc.t -> Type.t -> - unit); - + call_hook: Context.t -> string -> ALoc.t -> Type.t -> unit; import_hook: - (Context.t -> - (* Location of the string identifiying the imported module, and the contents of that string. *) - (Loc.t * string) -> - (* Location of the entire import statement/require call *) - Loc.t -> - unit); - - jsx_hook: - (Context.t -> - string -> Loc.t -> Type.t -> - bool); - - ref_hook: - (Context.t -> - Loc.t -> - Loc.t -> - unit); - + Context.t -> + (* Location of the string identifiying the imported module, and the contents of that string. *) + ALoc.t * string -> + (* Location of the entire import statement/require call *) + ALoc.t -> + unit; + jsx_hook: Context.t -> string -> ALoc.t -> Type.t -> bool; class_member_decl_hook: - (Context.t -> - Type.t (* self *) -> - bool (* static *) -> - string -> Loc.t -> - unit); - - obj_prop_decl_hook: - (Context.t -> - string -> Loc.t -> - unit); - - require_pattern_hook: - Loc.t -> unit; - + Context.t -> Type.t (* self *) -> bool (* static *) -> string -> ALoc.t -> unit; + obj_prop_decl_hook: Context.t -> string -> ALoc.t -> bool; + obj_type_prop_decl_hook: Context.t -> string -> ALoc.t -> unit; + require_pattern_hook: ALoc.t -> unit; (* Called when ObjT 1 ~> ObjT 2 *) - obj_to_obj_hook: - (Context.t -> - Type.t (* ObjT 1 *) -> - Type.t (* ObjT 2 *) -> - unit); - + obj_to_obj_hook: Context.t -> Type.t (* ObjT 1 *) -> Type.t (* ObjT 2 *) -> unit; (* Called when InstanceT ~> ObjT *) - instance_to_obj_hook: - (Context.t -> - Type.t (* InstanceT *) -> - Type.t (* ObjT *) -> - unit); - + instance_to_obj_hook: Context.t -> Type.t (* InstanceT *) -> Type.t (* ObjT *) -> unit; (* Dispatched with "default" for default exports *) - export_named_hook: string (* name *) -> Loc.t -> unit; + export_named_hook: string (* name *) -> ALoc.t -> unit; } -let nop_hook_state = { - id_hook = id_nop; - lval_hook = lval_nop; - member_hook = member_nop; - call_hook = call_nop; - import_hook = import_nop; - jsx_hook = jsx_nop; - ref_hook = ref_nop; - class_member_decl_hook = class_member_decl_nop; - obj_prop_decl_hook = obj_prop_decl_nop; - require_pattern_hook = require_pattern_nop; - obj_to_obj_hook = obj_to_obj_nop; - instance_to_obj_hook = instance_to_obj_nop; - export_named_hook = export_named_nop; -} +let nop_hook_state = + { + id_hook = id_nop; + lval_hook = lval_nop; + member_hook = member_nop; + call_hook = call_nop; + import_hook = import_nop; + jsx_hook = jsx_nop; + class_member_decl_hook = class_member_decl_nop; + obj_prop_decl_hook = obj_prop_decl_nop; + obj_type_prop_decl_hook = obj_type_prop_decl_nop; + require_pattern_hook = require_pattern_nop; + obj_to_obj_hook = obj_to_obj_nop; + instance_to_obj_hook = instance_to_obj_nop; + export_named_hook = export_named_nop; + } let hook_state = ref nop_hook_state -let set_id_hook hook = - hook_state := { !hook_state with id_hook = hook } - -let set_lval_hook hook = - hook_state := { !hook_state with lval_hook = hook } +let set_id_hook hook = hook_state := { !hook_state with id_hook = hook } -let set_member_hook hook = - hook_state := { !hook_state with member_hook = hook } +let set_lval_hook hook = hook_state := { !hook_state with lval_hook = hook } -let set_call_hook hook = - hook_state := { !hook_state with call_hook = hook } +let set_member_hook hook = hook_state := { !hook_state with member_hook = hook } -let set_import_hook hook = - hook_state := { !hook_state with import_hook = hook } +let set_call_hook hook = hook_state := { !hook_state with call_hook = hook } -let set_jsx_hook hook = - hook_state := { !hook_state with jsx_hook = hook } +let set_import_hook hook = hook_state := { !hook_state with import_hook = hook } -let set_ref_hook hook = - hook_state := { !hook_state with ref_hook = hook } +let set_jsx_hook hook = hook_state := { !hook_state with jsx_hook = hook } let set_class_member_decl_hook hook = hook_state := { !hook_state with class_member_decl_hook = hook } -let set_obj_prop_decl_hook hook = - hook_state := { !hook_state with obj_prop_decl_hook = hook } +let set_obj_prop_decl_hook hook = hook_state := { !hook_state with obj_prop_decl_hook = hook } -let set_require_pattern_hook hook = - hook_state := { !hook_state with require_pattern_hook = hook } +let set_obj_type_prop_decl_hook hook = + hook_state := { !hook_state with obj_type_prop_decl_hook = hook } -let set_obj_to_obj_hook hook = - hook_state := { !hook_state with obj_to_obj_hook = hook } +let set_require_pattern_hook hook = hook_state := { !hook_state with require_pattern_hook = hook } -let set_instance_to_obj_hook hook = - hook_state := { !hook_state with instance_to_obj_hook = hook } +let set_obj_to_obj_hook hook = hook_state := { !hook_state with obj_to_obj_hook = hook } -let set_export_named_hook hook = - hook_state := { !hook_state with export_named_hook = hook } +let set_instance_to_obj_hook hook = hook_state := { !hook_state with instance_to_obj_hook = hook } -let reset_hooks () = - hook_state := nop_hook_state +let set_export_named_hook hook = hook_state := { !hook_state with export_named_hook = hook } -let dispatch_id_hook cx name loc = - !hook_state.id_hook cx name loc +let reset_hooks () = hook_state := nop_hook_state -let dispatch_lval_hook cx name lhs_loc rhs_loc = - !hook_state.lval_hook cx name lhs_loc rhs_loc +let dispatch_id_hook cx name loc = !hook_state.id_hook cx name loc -let dispatch_member_hook cx name loc this_t = - !hook_state.member_hook cx name loc this_t +let dispatch_lval_hook cx name lhs_loc rhs_loc = !hook_state.lval_hook cx name lhs_loc rhs_loc -let dispatch_call_hook cx name loc this_t = - !hook_state.call_hook cx name loc this_t +let dispatch_member_hook cx name loc this_t = !hook_state.member_hook cx name loc this_t -let dispatch_import_hook cx name loc = - !hook_state.import_hook cx name loc +let dispatch_call_hook cx name loc this_t = !hook_state.call_hook cx name loc this_t -let dispatch_jsx_hook cx name loc this_t = - !hook_state.jsx_hook cx name loc this_t +let dispatch_import_hook cx name loc = !hook_state.import_hook cx name loc -let dispatch_ref_hook cx loc = - !hook_state.ref_hook cx loc +let dispatch_jsx_hook cx name loc this_t = !hook_state.jsx_hook cx name loc this_t let dispatch_class_member_decl_hook cx self static name loc = !hook_state.class_member_decl_hook cx self static name loc -let dispatch_obj_prop_decl_hook cx name loc = - !hook_state.obj_prop_decl_hook cx name loc +let dispatch_obj_prop_decl_hook cx name loc = !hook_state.obj_prop_decl_hook cx name loc + +let dispatch_obj_type_prop_decl_hook cx name loc = !hook_state.obj_type_prop_decl_hook cx name loc -let dispatch_require_pattern_hook loc = - !hook_state.require_pattern_hook loc +let dispatch_require_pattern_hook loc = !hook_state.require_pattern_hook loc -let dispatch_obj_to_obj_hook cx t1 t2 = - !hook_state.obj_to_obj_hook cx t1 t2 +let dispatch_obj_to_obj_hook cx t1 t2 = !hook_state.obj_to_obj_hook cx t1 t2 -let dispatch_instance_to_obj_hook cx t1 t2 = - !hook_state.instance_to_obj_hook cx t1 t2 +let dispatch_instance_to_obj_hook cx t1 t2 = !hook_state.instance_to_obj_hook cx t1 t2 -let dispatch_export_named_hook loc = - !hook_state.export_named_hook loc +let dispatch_export_named_hook loc = !hook_state.export_named_hook loc diff --git a/src/typing/type_inference_js.ml b/src/typing/type_inference_js.ml index 4af8f801027..4fce3bd6e00 100644 --- a/src/typing/type_inference_js.ml +++ b/src/typing/type_inference_js.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -9,19 +9,12 @@ module Ast = Flow_ast (* infer phase services *) -module FlowError = Flow_error module ImpExp = Import_export -module Utils = Utils_js (**********) (* Driver *) (**********) -let force_annotations cx = - let m = Context.module_ref cx in - let tvar = Flow_js.lookup_module cx m in - Flow_js.enforce_strict cx tvar - (* core inference, assuming setup and teardown happens elsewhere *) let infer_core cx statements = try @@ -33,13 +26,11 @@ let infer_core cx statements = stmts | Abnormal.Exn (Abnormal.Stmts stmts, _) -> (* should never happen *) - let loc = Loc.({ none with source = Some (Context.file cx) }) in - Flow_js.add_output cx FlowError.(EInternal (loc, AbnormalControlFlow)); + let loc = Loc.{ none with source = Some (Context.file cx) } |> ALoc.of_loc in + Flow_js.add_output cx Error_message.(EInternal (loc, AbnormalControlFlow)); stmts - | Abnormal.Exn _ -> - failwith "Flow bug: Statement.toplevels threw with non-stmts payload" - | exc -> - raise exc + | Abnormal.Exn _ -> failwith "Flow bug: Statement.toplevels threw with non-stmts payload" + | exc -> raise exc (* There's a .flowconfig option to specify suppress_comments regexes. Any * comments that match those regexes will suppress any errors on the next line @@ -47,18 +38,20 @@ let infer_core cx statements = let scan_for_error_suppressions = let should_suppress suppress_comments comment = List.exists (fun r -> Str.string_match r comment 0) suppress_comments - - in fun cx comments -> + in + fun cx comments -> let suppress_comments = Context.suppress_comments cx in let should_suppress = should_suppress suppress_comments in - (* Bail immediately if we're not using error suppressing comments *) - if suppress_comments <> [] - then List.iter (function - | loc, Ast.Comment.Block comment - | loc, Ast.Comment.Line comment when should_suppress comment -> - Context.add_error_suppression cx loc - | _ -> ()) comments + if suppress_comments <> [] then + List.iter + (function + | (loc, Ast.Comment.Block comment) + | (loc, Ast.Comment.Line comment) + when should_suppress comment -> + Context.add_error_suppression cx loc + | _ -> ()) + comments type 'a located = { value: 'a; @@ -68,142 +61,137 @@ type 'a located = { type range_keyword = | Unending (* Comment lasting until negated *) | Line (* covers current line *) - | Next_line (* covers next line *) + | Next_line + +(* covers next line *) let scan_for_lint_suppressions = let ignore_chars = " \t\n\r*" in - (* Get the position induced by reading the string str from the starting position pos *) let update_pos = (* Get the position induced by reading [the substring of str from index * onwards] from the starting position pos *) let rec update_pos' pos str index length = - let open Loc in - if index < length then - let new_loc, ind_diff = - match str.[index] with - | '\r' -> - if index + 1 < length && str.[index + 1] = '\n' then - {line = pos.line + 1; column = 0; offset = pos.offset + 2}, 2 - else {line = pos.line + 1; column = 0; offset = pos.offset + 1}, 1 - | '\n' -> {line = pos.line + 1; column = 0; offset = pos.offset + 1}, 1 - | _ -> {pos with column = pos.column + 1; offset = pos.offset + 1}, 1 - in update_pos' new_loc str (index + ind_diff) length - else - pos - in fun pos str -> - update_pos' pos str 0 (String.length str) + Loc.( + if index < length then + let (new_loc, ind_diff) = + match str.[index] with + | '\r' -> + if index + 1 < length && str.[index + 1] = '\n' then + ({ line = pos.line + 1; column = 0 }, 2) + else + ({ line = pos.line + 1; column = 0 }, 1) + | '\n' -> ({ line = pos.line + 1; column = 0 }, 1) + | _ -> ({ pos with column = pos.column + 1 }, 1) + in + update_pos' new_loc str (index + ind_diff) length + else + pos) + in + (fun pos str -> update_pos' pos str 0 (String.length str)) in - - (* Given a string like `"flowlint-line foo:bar"`, returns `Some (Line, Some "foo:bar")` *) let parse_keyword : string located -> (range_keyword located * string located option) option = - let keywords = [ - "flowlint-line", Line; - "flowlint-next-line", Next_line; - "flowlint", Unending; - ] in - + let keywords = + [("flowlint-line", Line); ("flowlint-next-line", Next_line); ("flowlint", Unending)] + in (* [prefix_length prefix str] returns the position of the first non-whitespace character in [str] after [prefix]. If [str] does not start with [prefix], or [prefix] is not followed by whitespace, returns [None]. *) let prefix_length prefix str = let sl = String.length prefix in - if not (String_utils.string_starts_with str prefix) then None - else if String.length str = sl then Some sl - else match String_utils.index_not_from_opt str sl ignore_chars with + if not (String_utils.string_starts_with str prefix) then + None + else if String.length str = sl then + Some sl + else + match String_utils.index_not_from_opt str sl ignore_chars with | Some i when i = sl -> None | Some i -> Some i | None -> None in - let rec try_keyword comment = function - | [] -> None - | (prefix, range)::todo -> - let { loc; value } = comment in - let value_len = String.length value in - begin match prefix_length prefix value with - | Some i when i = value_len -> - Some ({ loc; value = range }, None) - | Some i -> - let range_end = update_pos loc.Loc.start prefix in - let args_start = update_pos loc.Loc.start (String.sub value 0 i) in - let range = { - value = range; - loc = { loc with Loc._end = range_end }; - } in - let args = { - value = String.sub value i (String.length value - i); - loc = { loc with Loc.start = args_start } - } in - Some (range, Some args) - | None -> try_keyword comment todo - end + | [] -> None + | (prefix, range) :: todo -> + let { loc; value } = comment in + let value_len = String.length value in + begin + match prefix_length prefix value with + | Some i when i = value_len -> Some ({ loc; value = range }, None) + | Some i -> + let range_end = update_pos loc.Loc.start prefix in + let args_start = update_pos loc.Loc.start (String.sub value 0 i) in + let range = { value = range; loc = { loc with Loc._end = range_end } } in + let args = + { + value = String.sub value i (String.length value - i); + loc = { loc with Loc.start = args_start }; + } + in + Some (range, Some args) + | None -> try_keyword comment todo + end in - - fun comment -> try_keyword comment keywords + (fun comment -> try_keyword comment keywords) in - (* Trims whitespace and stars from the front and end of loc_str. *) let trim_and_stars_locational { value; loc } = - let open Loc in - let start_offset = String_utils.index_not_opt value ignore_chars in - let end_offset = String_utils.rindex_not_opt value ignore_chars in - let start = match start_offset with - | Some offset -> update_pos loc.start (String.sub value 0 offset) - | None -> loc.start - in - let value = match start_offset, end_offset with - | Some i, Some j -> String.sub value i (j - i + 1) - | Some i, None -> String.sub value i (String.length value - i) - | None, Some j -> String.sub value 0 (j + 1) - | None, None -> value - in - let _end = update_pos start value in - let loc = { loc with start; _end } in - { value; loc } + Loc.( + let start_offset = String_utils.index_not_opt value ignore_chars in + let end_offset = String_utils.rindex_not_opt value ignore_chars in + let start = + match start_offset with + | Some offset -> update_pos loc.start (String.sub value 0 offset) + | None -> loc.start + in + let value = + match (start_offset, end_offset) with + | (Some i, Some j) -> String.sub value i (j - i + 1) + | (Some i, None) -> String.sub value i (String.length value - i) + | (None, Some j) -> String.sub value 0 (j + 1) + | (None, None) -> value + in + let _end = update_pos start value in + let loc = { loc with start; _end } in + { value; loc }) in - let split_delim_locational delim { loc; value } = let delim_str = String.make 1 delim in let source = loc.Loc.source in - let parts = String_utils.split_on_char delim value in - let parts, _ = List.fold_left (fun (parts, start) value -> - let _end = update_pos start value in - let next_start = update_pos _end delim_str in - ({loc = {Loc.source; start; _end}; value}::parts, next_start) - ) ([], loc.Loc.start) parts in + let parts = String.split_on_char delim value in + let (parts, _) = + List.fold_left + (fun (parts, start) value -> + let _end = update_pos start value in + let next_start = update_pos _end delim_str in + ({ loc = { Loc.source; start; _end }; value } :: parts, next_start)) + ([], loc.Loc.start) + parts + in List.rev parts in - - let add_error cx (loc, kind) = - let err = FlowError.ELintSetting (loc, kind) in - FlowError.error_of_msg ~trace_reasons:[] ~source_file:(Context.file cx) err - |> Context.add_error cx - in - + let add_error cx (loc, kind) = Error_message.ELintSetting (loc, kind) |> Flow_js.add_output cx in let parse_kind loc_str = match Lints.kinds_of_string loc_str.value with | Some kinds -> Ok kinds | None -> Error (loc_str.loc, LintSettings.Nonexistent_rule) in - let parse_value loc_value = match Severity.severity_of_string loc_value.value with - | Some state -> Ok state - | None -> Error (loc_value.loc, LintSettings.Invalid_setting) + | Some state -> Ok state + | None -> Error (loc_value.loc, LintSettings.Invalid_setting) in - let get_kind_setting cx arg = let arg = trim_and_stars_locational arg in match split_delim_locational ':' arg with | [rule; setting] -> let rule = trim_and_stars_locational rule in let setting = trim_and_stars_locational setting in - begin match parse_kind rule, parse_value setting with - | Ok kinds, Ok setting -> - Some (List.map (fun kind -> ({value = kind; loc = arg.loc}, setting)) kinds) - | rule_result, setting_result -> + begin + match (parse_kind rule, parse_value setting) with + | (Ok kinds, Ok setting) -> + Some (Core_list.map ~f:(fun kind -> ({ value = kind; loc = arg.loc }, setting)) kinds) + | (rule_result, setting_result) -> Core_result.iter_error rule_result ~f:(add_error cx); Core_result.iter_error setting_result ~f:(add_error cx); None @@ -212,135 +200,134 @@ let scan_for_lint_suppressions = add_error cx (arg.loc, LintSettings.Malformed_argument); None in - (* parse arguments of the form lint1:setting1,lint2:setting2... *) let get_settings_list cx args = split_delim_locational ',' args - |> List.map (fun rule -> get_kind_setting cx rule |> Option.value ~default:[]) + |> Core_list.map ~f:(fun rule -> get_kind_setting cx rule |> Option.value ~default:[]) in - (* Doesn't preserve offset, but is only used in locations where offset isn't used, * so that's fine. *) let get_range = - let open Loc in - - let range_of_line source line = - let start = {line; column = 0; offset = 0} in - let _end = {line = line + 1; column = 0; offset = 0} in - {source; start; _end} - in - - let range_unending loc = - let new_end = {line = max_int / 2; column = max_int / 2; offset = max_int / 2} - in {loc with _end = new_end} - in - - fun {loc; value = keyword} -> - match keyword with - | Unending -> range_unending loc - | Line -> range_of_line loc.source loc._end.line - | Next_line -> range_of_line loc.source (loc._end.line + 1) + Loc.( + let range_of_line source line = + let start = { line; column = 0 } in + let _end = { line = line + 1; column = 0 } in + { source; start; _end } + in + let range_unending loc = + let new_end = { line = max_int / 2; column = max_int / 2 } in + { loc with _end = new_end } + in + fun { loc; value = keyword } -> + match keyword with + | Unending -> range_unending loc + | Line -> range_of_line loc.source loc._end.line + | Next_line -> range_of_line loc.source (loc._end.line + 1)) in - let convert_comment (loc, comment) = (* Comment locs contain the comment characters themselves. (//, /*, and */) * Trim the locs to line up with the contents of the comment. *) - let open Loc in - match comment with - | Ast.Comment.Block s -> - let new_start = {loc.start with - column = loc.start.column + 2; - offset = loc.start.offset + 2} in - let new_end = {loc._end with - column = loc._end.column - 2; - offset = loc._end.offset - 2} in - let new_loc = {loc with start = new_start; _end = new_end} in - {loc = new_loc; value = s} - | Ast.Comment.Line s -> - let new_start = {loc.start with - column = loc.start.column + 2; - offset = loc.start.offset + 2} in - let new_loc = {loc with start = new_start} in - {loc = new_loc; value = s} - in - - let nested_map f outer_list = - List.map (List.map f) outer_list + Loc.( + match comment with + | Ast.Comment.Block s -> + let new_start = { loc.start with column = loc.start.column + 2 } in + let new_end = { loc._end with column = loc._end.column - 2 } in + let new_loc = { loc with start = new_start; _end = new_end } in + { loc = new_loc; value = s } + | Ast.Comment.Line s -> + let new_start = { loc.start with column = loc.start.column + 2 } in + let new_loc = { loc with start = new_start } in + { loc = new_loc; value = s }) in - + let nested_map f outer_list = Core_list.map ~f:(Core_list.map ~f) outer_list in let process_comment - cx - ((severity_cover_builder, running_settings, suppression_locs) as acc) - comment = + cx ((severity_cover_builder, running_settings, suppression_locs) as acc) comment = let loc_comment = comment |> convert_comment |> trim_and_stars_locational in match parse_keyword loc_comment with | Some (keyword, Some args) -> - (* Case where we're changing certain lint settings *) - let settings_list = - get_settings_list cx args - |> nested_map (fun ({loc; value = kind}, state) -> (kind, (state, loc))) - in - let error_encountered = ref false in - let (new_builder, new_running_settings) = - let covered_range = get_range keyword in - ExactCover.update_settings_and_running running_settings - (fun err -> error_encountered := true; add_error cx err) - covered_range settings_list severity_cover_builder in - (* Only report overwritten arguments if there are no no-op arguments, - * to avoid error duplication *) - let () = if not !error_encountered then + (* Case where we're changing certain lint settings *) + let settings_list = + get_settings_list cx args + |> nested_map (fun ({ loc; value = kind }, state) -> (kind, (state, loc))) + in + let error_encountered = ref false in + let (new_builder, new_running_settings) = + let covered_range = get_range keyword in + ExactCover.update_settings_and_running + running_settings + (fun err -> + error_encountered := true; + add_error cx err) + covered_range + settings_list + severity_cover_builder + in + (* Only report overwritten arguments if there are no no-op arguments, + * to avoid error duplication *) + let () = + if not !error_encountered then (* Check for overwritten arguments *) - let used_locs = LintSettings.fold - (fun _ (_, loc) loc_set -> match loc with - | Some loc -> Utils.LocSet.add loc loc_set - | None -> loc_set) - new_running_settings Utils.LocSet.empty + let used_locs = + LintSettings.fold + (fun _ (_, loc) loc_set -> + match loc with + | Some loc -> Loc_collections.LocSet.add loc loc_set + | None -> loc_set) + new_running_settings + Loc_collections.LocSet.empty + in + let arg_locs = + List.map + (function + | (_, (_, loc)) :: _ -> Some loc + | [] -> None) + settings_list in - let arg_locs = List.map + List.iter (function - | (_,(_,loc))::_ -> Some loc - | [] -> None) + | Some arg_loc -> + if not (Loc_collections.LocSet.mem arg_loc used_locs) then ( + error_encountered := true; + add_error cx (arg_loc, LintSettings.Overwritten_argument) + ) + | None -> ()) + arg_locs + in + let suppression_locs = + (* Only report unused suppressions if there are no redundant settings, + * to avoid error duplication. (The suppression_locs are later used to detect + * unused suppressions; by never storing their locations we are effectively + * immediately using them.) *) + if not !error_encountered then + List.fold_left + (fun suppression_locs -> function + | (_, (Severity.Off, loc)) :: _ -> Loc_collections.LocSet.add loc suppression_locs + | _ -> suppression_locs) + suppression_locs settings_list - in - List.iter (function - | Some arg_loc -> - if not (Utils.LocSet.mem arg_loc used_locs) then begin - error_encountered := true; - add_error cx (arg_loc, LintSettings.Overwritten_argument) - end - | None -> ()) arg_locs - in - let suppression_locs = - (* Only report unused suppressions if there are no redundant settings, - * to avoid error duplication. (The suppression_locs are later used to detect - * unused suppressions; by never storing their locations we are effectively - * immediately using them.) *) - if not !error_encountered then - List.fold_left ( - fun suppression_locs -> function - | (_, (Severity.Off, loc))::_ -> Utils.LocSet.add loc suppression_locs - | _ -> suppression_locs - ) suppression_locs settings_list - else suppression_locs - in - begin match keyword.value with + else + suppression_locs + in + begin + match keyword.value with | Line | Next_line -> (new_builder, running_settings, suppression_locs) - | Unending -> - (new_builder, new_running_settings, suppression_locs) - end + | Unending -> (new_builder, new_running_settings, suppression_locs) + end | Some (keyword, None) -> - (* Case where we're wholly enabling/disabling linting *) - add_error cx (keyword.loc, LintSettings.Naked_comment); - acc (* TODO (rballard): regional lint disabling *) + (* Case where we're wholly enabling/disabling linting *) + add_error cx (keyword.loc, LintSettings.Naked_comment); + acc (* TODO (rballard): regional lint disabling *) | None -> acc in - fun cx base_settings comments -> let severity_cover_builder = ExactCover.new_builder (Context.file cx) base_settings in - let severity_cover_builder, _, suppression_locs = List.fold_left - (process_comment cx) (severity_cover_builder, base_settings, Utils.LocSet.empty) comments + let (severity_cover_builder, _, suppression_locs) = + List.fold_left + (process_comment cx) + (severity_cover_builder, base_settings, Loc_collections.LocSet.empty) + comments in let severity_cover = ExactCover.bake severity_cover_builder in Context.add_severity_cover cx (Context.file cx) severity_cover; @@ -348,9 +335,10 @@ let scan_for_lint_suppressions = let scan_for_suppressions cx lint_severities file_options comments = let filename = File_key.to_string (Context.file cx) in - let declaration = match file_options with - | Some file_options -> Files.is_declaration file_options filename - | None -> false + let declaration = + match file_options with + | Some file_options -> Files.is_declaration file_options filename + | None -> false in if declaration then (* Declaration mode. @@ -359,8 +347,8 @@ let scan_for_suppressions cx lint_severities file_options comments = else (* Scan comments for line suppressions. *) scan_for_error_suppressions cx comments; - scan_for_lint_suppressions cx lint_severities comments - ; + scan_for_lint_suppressions cx lint_severities comments; + () let add_require_tvars = @@ -378,105 +366,105 @@ let add_require_tvars = let t = Flow_js.get_builtin cx m_name reason in Context.add_require cx loc t in - fun cx file_sig -> - let open File_sig in - SMap.iter (fun mref locs -> - let desc = Reason.RCustom mref in - Nel.iter (add cx desc) locs - ) (require_loc_map file_sig.module_sig); - SMap.iter (fun _ (_, module_sig) -> - SMap.iter (fun mref locs -> - let m_name = Reason.internal_module_name mref in - let desc = Reason.RCustom mref in - Nel.iter (add_decl cx m_name desc) locs - ) (require_loc_map module_sig) - ) file_sig.declare_modules + fun cx (file_sig : File_sig.With_ALoc.t) -> + File_sig.With_ALoc.( + SMap.iter + (fun mref locs -> + let desc = Reason.RCustom mref in + Nel.iter (add cx desc) locs) + (require_loc_map file_sig.module_sig); + SMap.iter + (fun _ (_, module_sig) -> + SMap.iter + (fun mref locs -> + let m_name = Reason.internal_module_name mref in + let desc = Reason.RCustom mref in + Nel.iter (add_decl cx m_name desc) locs) + (require_loc_map module_sig)) + file_sig.declare_modules) (* build module graph *) (* Lint suppressions are handled iff lint_severities is Some. *) -let infer_ast ~lint_severities ~file_options ~file_sig cx filename ast = +let infer_ast ~lint_severities ~file_options ~file_sig cx filename comments aloc_ast = assert (Context.is_checked cx); - Flow_js.Cache.clear(); - - let prog_loc, statements, comments = ast in + Flow_js.Cache.clear (); + let (prog_aloc, aloc_statements, aloc_comments) = aloc_ast in add_require_tvars cx file_sig; let module_ref = Context.module_ref cx in - begin - try Context.set_use_def cx @@ Ssa_builder.program_with_scope ast - with _ -> () + try Context.set_use_def cx @@ Ssa_builder.With_ALoc.program_with_scope aloc_ast with _ -> () end; let reason_exports_module = let desc = Reason.RModule module_ref in - Reason.locationless_reason desc + Loc.{ none with source = Some (Context.file cx) } |> ALoc.of_loc |> Reason.mk_reason desc in - let local_exports_var = Tvar.mk cx reason_exports_module in - - let module_scope = Scope.( - let scope = fresh ~var_scope_kind:Module () in - - add_entry "exports" - (Entry.new_var ~loc:(Type.loc_of_t local_exports_var) local_exports_var) - scope; - - add_entry (Reason.internal_name "exports") - (Entry.new_var - ~loc:(Reason.aloc_of_reason reason_exports_module |> ALoc.to_loc) - ~specific:(Type.DefT ( - Reason.replace_reason_const - (Reason.RCustom "undefined exports") - reason_exports_module, - Type.EmptyT)) - (Type.DefT (reason_exports_module, Type.AnyT))) - scope; - - scope - ) in - + let module_scope = + Scope.( + let scope = fresh ~var_scope_kind:Module () in + add_entry + "exports" + (Entry.new_var ~loc:(Type.loc_of_t local_exports_var) local_exports_var) + scope; + + add_entry + (Reason.internal_name "exports") + (Entry.new_var + ~loc:(Reason.aloc_of_reason reason_exports_module) + ~specific: + (Type.DefT (reason_exports_module, Type.bogus_trust (), Type.EmptyT Type.Bottom)) + (Type.Unsoundness.exports_any reason_exports_module)) + scope; + + scope) + in Env.init_env cx module_scope; - let file_loc = Loc.({ none with source = Some filename }) in - let reason = Reason.mk_reason (Reason.RCustom "exports") file_loc in - - let initial_module_t = ImpExp.module_t_of_cx cx in + let file_loc = Loc.{ none with source = Some filename } |> ALoc.of_loc in + let reason = Reason.mk_reason Reason.RExports file_loc in let init_exports = Obj_type.mk cx reason in ImpExp.set_module_exports cx file_loc init_exports; (* infer *) Flow_js.flow_t cx (init_exports, local_exports_var); - let typed_statements = infer_core cx statements in - + let typed_statements = infer_core cx aloc_statements in scan_for_suppressions cx lint_severities file_options comments; - let module_t = Context.( - match Context.module_kind cx with - (* CommonJS with a clobbered module.exports *) - | CommonJSModule(Some(loc)) -> - let module_exports_t = ImpExp.get_module_exports cx file_loc in - let reason = Reason.mk_reason (Reason.RCustom "exports") loc in - ImpExp.mk_commonjs_module_t cx reason_exports_module - reason module_exports_t + let module_t = Import_export.mk_module_t cx reason in + Context.add_module cx module_ref module_t; - (* CommonJS with a mutated 'exports' object *) - | CommonJSModule(None) -> - ImpExp.mk_commonjs_module_t cx reason_exports_module - reason local_exports_var + (prog_aloc, typed_statements, aloc_comments) - (* Uses standard ES module exports *) - | ESModule -> ImpExp.mk_module_t cx reason_exports_module - ) in - Flow_js.flow_t cx (module_t, initial_module_t); +(* Because libdef parsing is overly permissive, a libdef file might include an + unexpected top-level statement like `export type` which mutates the module + map and overwrites the builtins object. - (* insist that whatever type flows into exports is fully annotated *) - force_annotations cx; + Since all libdefs share a sig_cx, this mutation will cause problems in later + lib files if not unwound. - prog_loc, typed_statements, comments + Until we can restrict libdef parsing to forbid unexpected behaviors like + this, we need this wrapper to preserve the existing behavior. However, none + of this should be necessary. +*) +let with_libdef_builtins cx f = + (* Store the original builtins and replace with a fresh tvar. *) + let orig_builtins = Flow_js.builtins cx in + Flow_js.mk_builtins cx; + (* This function call might replace the builtins we just installed. *) + f (); + + (* Connect the original builtins to the one we just calculated. *) + let () = + let builtins = Context.find_module cx Files.lib_module_ref in + Flow_js.flow_t cx (orig_builtins, builtins) + in + (* Restore the original builtins tvar for the next file. *) + Context.add_module cx Files.lib_module_ref orig_builtins (* infer a parsed library file. processing is similar to an ordinary module, except that @@ -484,23 +472,25 @@ let infer_ast ~lint_severities ~file_options ~file_sig cx filename ast = b) bindings are added as properties to the builtin object *) let infer_lib_file ~exclude_syms ~lint_severities ~file_options ~file_sig cx ast = - let _, statements, comments = ast in - Flow_js.Cache.clear(); + let aloc_ast = Ast_loc_utils.loc_to_aloc_mapper#program ast in + let (_, _, comments) = ast in + let (_, aloc_statements, _) = aloc_ast in + Flow_js.Cache.clear (); let () = (* TODO: Wait a minute, why do we bother with requires for lib files? Pretty confident that we don't support them in any sensible way. *) add_require_tvars cx file_sig in - let module_scope = Scope.fresh () in Env.init_env ~exclude_syms cx module_scope; - ignore (infer_core cx statements : (Loc.t, Loc.t * Type.t) Ast.Statement.t list); - scan_for_suppressions cx lint_severities file_options comments; + with_libdef_builtins cx (fun () -> + ignore (infer_core cx aloc_statements : (ALoc.t, ALoc.t * Type.t) Ast.Statement.t list); + scan_for_suppressions cx lint_severities file_options comments); - module_scope |> Scope.(iter_entries Entry.(fun name entry -> - Flow_js.set_builtin cx name (actual_type entry) - )); + ( module_scope + |> Scope.( + iter_entries Entry.((fun name entry -> Flow_js.set_builtin cx name (actual_type entry)))) ); SMap.keys Scope.(module_scope.entries) diff --git a/src/typing/type_inference_js.mli b/src/typing/type_inference_js.mli index 72fc205278b..7311a5d4450 100644 --- a/src/typing/type_inference_js.mli +++ b/src/typing/type_inference_js.mli @@ -1,25 +1,27 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) (* Lint suppressions are handled iff lint_severities is Some. *) -val infer_ast: - lint_severities: Severity.severity LintSettings.t -> - file_options: Files.options option -> - file_sig: File_sig.t -> +val infer_ast : + lint_severities:Severity.severity LintSettings.t -> + file_options:Files.options option -> + file_sig:File_sig.With_ALoc.t -> Context.t -> File_key.t -> - (Loc.t, Loc.t) Flow_ast.program -> - (Loc.t, Loc.t * Type.t) Flow_ast.program + Loc.t Flow_ast.Comment.t list -> + (ALoc.t, ALoc.t) Flow_ast.program -> + (ALoc.t, ALoc.t * Type.t) Flow_ast.program + (* Lint suppressions are handled iff lint_severities is Some. *) -val infer_lib_file: - exclude_syms: SSet.t -> - lint_severities: Severity.severity LintSettings.t -> - file_options: Files.options option -> - file_sig: File_sig.t -> +val infer_lib_file : + exclude_syms:SSet.t -> + lint_severities:Severity.severity LintSettings.t -> + file_options:Files.options option -> + file_sig:File_sig.With_ALoc.t -> Context.t -> (Loc.t, Loc.t) Flow_ast.program -> string list diff --git a/src/typing/type_mapper.ml b/src/typing/type_mapper.ml index bdec61e8e05..a1daf6c77d3 100644 --- a/src/typing/type_mapper.ml +++ b/src/typing/type_mapper.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,1511 +8,2166 @@ open Type let maybe_known f x = - let open React.CreateClass in - begin match x with + React.CreateClass.( + match x with | Known x' -> - let x'' = f x' in - if x'' == x' then x - else Known x'' - | Unknown x -> Unknown x - end + let x'' = f x' in + if x'' == x' then + x + else + Known x'' + | Unknown x -> Unknown x) + +let unwrap_type = + let rec unwrap seen cx t = + match t with + | OpenT (_, id) -> + if ISet.mem id !seen then + t + else ( + seen := ISet.add id !seen; + Constraint.( + match Context.find_graph cx id with + | Resolved (_, t') + | FullyResolved (_, t') -> + unwrap seen cx t' + | Unresolved _ -> t) + ) + | AnnotT (_, t, _) + | ReposT (_, t) -> + unwrap seen cx t + | t -> t + in + (fun cx -> unwrap (ref ISet.empty) cx) (* NOTE: While union flattening could be performed at any time, it is most effective when we know that all tvars have been resolved. *) let union_flatten = - let rec union_flatten cx seen ts = - List.flatten @@ List.map (flatten cx seen) ts - and flatten cx seen t = match t with + let rec union_flatten cx seen ts = Core_list.(ts >>= flatten cx seen) + and flatten cx seen t = + match t with | OpenT (_, id) -> - if ISet.mem id !seen then [] - else begin + if ISet.mem id !seen then + [] + else ( seen := ISet.add id !seen; - match Context.find_graph cx id with - | Constraint.Resolved t' -> flatten cx seen t' - | _ -> [t] - end + Constraint.( + match Context.find_graph cx id with + | Resolved (_, t') + | FullyResolved (_, t') -> + flatten cx seen t' + | Unresolved _ -> [t]) + ) | AnnotT (_, t, _) -> flatten cx seen t | ReposT (_, t) -> flatten cx seen t - | DefT (_, UnionT rep) -> union_flatten cx seen @@ UnionRep.members rep - | DefT (r, MaybeT t) -> (DefT (r, NullT))::(DefT (r, VoidT))::(flatten cx seen t) - | DefT (r, OptionalT t) -> (DefT (r, VoidT))::(flatten cx seen t) - | DefT (_, EmptyT) -> [] + | UnionT (_, rep) -> union_flatten cx seen @@ UnionRep.members rep + | MaybeT (r, t) -> + DefT (r, Trust.bogus_trust (), NullT) + :: DefT (r, Trust.bogus_trust (), VoidT) + :: flatten cx seen t + | OptionalT (r, t) -> DefT (r, Trust.bogus_trust (), VoidT) :: flatten cx seen t + | DefT (_, _, EmptyT _) -> [] | _ -> [t] in - fun cx ts -> union_flatten cx (ref ISet.empty) ts + (fun cx ts -> union_flatten cx (ref ISet.empty) ts) (* This class should be used when trying to perform some mapping function on * a type. It will recurse through the structure of the type, applying it to * each sub-part. *) -class virtual ['a] t = object(self) - method type_ cx (map_cx: 'a) t = - match t with +class virtual ['a] t = + object (self) + method type_ cx (map_cx : 'a) t = + match t with | OpenT (r, id) -> - let id' = self#tvar cx map_cx r id in - if id' == id then t else OpenT (r, id') - | DefT (r, t') -> - let t'' = self#def_type cx map_cx t' in - if t' == t'' then t else DefT (r, t'') + let id' = self#tvar cx map_cx r id in + if id' == id then + t + else + OpenT (r, id') + | DefT (r, trust, t') -> + let t'' = self#def_type cx map_cx t' in + if t' == t'' then + t + else + DefT (r, trust, t'') | EvalT (t', dt, id) -> - let t'' = self#type_ cx map_cx t' in - let dt' = self#defer_use_type cx map_cx dt in - let id' = self#eval_id cx map_cx id in - if t' == t'' && dt == dt' && id' == id then t - else EvalT (t'', dt', id') + let t'' = self#type_ cx map_cx t' in + let dt' = self#defer_use_type cx map_cx dt in + let id' = self#eval_id cx map_cx id in + if t' == t'' && dt == dt' && id' == id then + t + else + EvalT (t'', dt', id') | BoundT _ -> t | ExistsT _ -> t | ThisClassT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ThisClassT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ThisClassT (r, t'') | ThisTypeAppT (r, t1, t2, tlist_opt) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - let tlist_opt' = OptionUtils.ident_map (ListUtils.ident_map (self#type_ cx map_cx)) tlist_opt in - if t1' == t1 && t2' == t2 && tlist_opt' == tlist_opt then t - else ThisTypeAppT(r, t1', t2', tlist_opt') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + let tlist_opt' = + OptionUtils.ident_map (ListUtils.ident_map (self#type_ cx map_cx)) tlist_opt + in + if t1' == t1 && t2' == t2 && tlist_opt' == tlist_opt then + t + else + ThisTypeAppT (r, t1', t2', tlist_opt') + | TypeAppT (r, op, t', ts) -> + let t'' = self#type_ cx map_cx t' in + let ts' = ListUtils.ident_map (self#type_ cx map_cx) ts in + if t' == t'' && ts == ts' then + t + else + TypeAppT (r, op, t'', ts') | ExactT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ExactT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ExactT (r, t'') | FunProtoT _ | ObjProtoT _ | NullProtoT _ | FunProtoApplyT _ | FunProtoBindT _ - | FunProtoCallT _ -> t - | AnyWithLowerBoundT t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else AnyWithLowerBoundT t'' - | AnyWithUpperBoundT t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else AnyWithUpperBoundT t'' + | FunProtoCallT _ -> + t | MergedT (r, uses) -> - let uses' = ListUtils.ident_map (self#use_type cx map_cx) uses in - if uses == uses' then t - else MergedT (r, uses') + let uses' = ListUtils.ident_map (self#use_type cx map_cx) uses in + if uses == uses' then + t + else + MergedT (r, uses') | ShapeT t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ShapeT t'' + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ShapeT t'' | MatchingPropT (r, x, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else MatchingPropT (r, x, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + MatchingPropT (r, x, t'') | KeysT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else KeysT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + KeysT (r, t'') | AnnotT (r, t', use_desc) -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else AnnotT (r, t'', use_desc) + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + AnnotT (r, t'', use_desc) | OpaqueT (r, opaquetype) -> - let underlying_t = OptionUtils.ident_map (self#type_ cx map_cx) opaquetype.underlying_t in - let super_t = OptionUtils.ident_map (self#type_ cx map_cx) opaquetype.super_t in - let opaque_type_args = ListUtils.ident_map (fun x -> - let (s, r, t, p) = x in - let t' = self#type_ cx map_cx t in - if t == t' then x else (s, r, t', p) - ) opaquetype.opaque_type_args in - if underlying_t == opaquetype.underlying_t && - super_t == opaquetype.super_t && - opaque_type_args == opaquetype.opaque_type_args - then t - else OpaqueT (r, {opaquetype with underlying_t; super_t; opaque_type_args}) + let underlying_t = OptionUtils.ident_map (self#type_ cx map_cx) opaquetype.underlying_t in + let super_t = OptionUtils.ident_map (self#type_ cx map_cx) opaquetype.super_t in + let opaque_type_args = + ListUtils.ident_map + (fun x -> + let (s, r, t, p) = x in + let t' = self#type_ cx map_cx t in + if t == t' then + x + else + (s, r, t', p)) + opaquetype.opaque_type_args + in + if + underlying_t == opaquetype.underlying_t + && super_t == opaquetype.super_t + && opaque_type_args == opaquetype.opaque_type_args + then + t + else + OpaqueT (r, { opaquetype with underlying_t; super_t; opaque_type_args }) | ModuleT (r, exporttypes, is_strict) -> - let exporttypes' = self#export_types cx map_cx exporttypes in - if exporttypes == exporttypes' then t - else ModuleT (r, exporttypes', is_strict) + let exporttypes' = self#export_types cx map_cx exporttypes in + if exporttypes == exporttypes' then + t + else + ModuleT (r, exporttypes', is_strict) | InternalT (ExtendsT (r, t1, t2)) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else InternalT (ExtendsT (r, t1', t2')) + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + InternalT (ExtendsT (r, t1', t2')) | InternalT (ChoiceKitT _) -> t | TypeDestructorTriggerT (u, r, repos, d, x) -> - let d' = self#destructor cx map_cx d in - let x' = self#type_ cx map_cx x in - if d == d' && x == x' then t - else TypeDestructorTriggerT (u, r, repos, d', x') + let d' = self#destructor cx map_cx d in + let x' = self#type_ cx map_cx x in + if d == d' && x == x' then + t + else + TypeDestructorTriggerT (u, r, repos, d', x') | CustomFunT (r, kind) -> - let kind' = self#custom_fun_kind cx map_cx kind in - if kind' == kind then t - else CustomFunT (r, kind') + let kind' = self#custom_fun_kind cx map_cx kind in + if kind' == kind then + t + else + CustomFunT (r, kind') | OpenPredT (r, t', map1, map2) -> - let t'' = self#type_ cx map_cx t' in - let map1' = Key_map.map (self#predicate cx map_cx) map1 in - let map2' = Key_map.map (self#predicate cx map_cx) map2 in - if t'' == t' then t - else OpenPredT (r, t'', map1', map2') + let t'' = self#type_ cx map_cx t' in + let map1' = Key_map.map (self#predicate cx map_cx) map1 in + let map2' = Key_map.map (self#predicate cx map_cx) map2 in + if t'' == t' then + t + else + OpenPredT (r, t'', map1', map2') | ReposT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ReposT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ReposT (r, t'') | InternalT (ReposUpperT (r, t')) -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else InternalT (ReposUpperT (r, t'')) + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + InternalT (ReposUpperT (r, t'')) + | AnyT _ -> t | InternalT (OptionalChainVoidT _) -> t + | OptionalT (r, t') -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + OptionalT (r, t'') + | MaybeT (r, t') -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + MaybeT (r, t'') + | IntersectionT (r, irep) -> + let irep' = InterRep.ident_map (self#type_ cx map_cx) irep in + if irep == irep' then + t + else + IntersectionT (r, irep') + | UnionT (r, urep) -> + let urep' = UnionRep.ident_map (self#type_ cx map_cx) urep in + if urep' == urep then + t + else + UnionT (r, urep') + + method virtual tvar : Context.t -> 'a -> Reason.t -> Constraint.ident -> Constraint.ident + + method targ cx map_cx t = + match t with + | ImplicitArg _ -> t + | ExplicitArg t' -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ExplicitArg t'' - method virtual tvar: Context.t -> 'a -> Reason.t -> Constraint.ident -> Constraint.ident - - method def_type cx map_cx t = - match t with + method def_type cx map_cx t = + match t with | NumT _ | StrT _ | BoolT _ - | EmptyT + | EmptyT _ | MixedT _ | NullT - | VoidT -> t + | VoidT -> + t | FunT (s, p, f) -> - let s' = self#type_ cx map_cx s in - let p' = self#type_ cx map_cx p in - let f' = self#fun_type cx map_cx f in - if s == s' && p == p' && f == f' then t - else FunT (s', p', f') + let s' = self#type_ cx map_cx s in + let p' = self#type_ cx map_cx p in + let f' = self#fun_type cx map_cx f in + if s == s' && p == p' && f == f' then + t + else + FunT (s', p', f') | ObjT objtype -> - let objtype' = self#obj_type cx map_cx objtype in - if objtype' == objtype then t - else ObjT objtype' + let objtype' = self#obj_type cx map_cx objtype in + if objtype' == objtype then + t + else + ObjT objtype' | ArrT arrtype -> - let arrtype' = self#arr_type cx map_cx arrtype in - if arrtype == arrtype' then t - else ArrT arrtype' + let arrtype' = self#arr_type cx map_cx arrtype in + if arrtype == arrtype' then + t + else + ArrT arrtype' | CharSetT _ -> t | ClassT t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ClassT t'' + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ClassT t'' | InstanceT (st, su, impl, instt) -> - let st' = self#type_ cx map_cx st in - let su' = self#type_ cx map_cx su in - let impl' = ListUtils.ident_map (self#type_ cx map_cx) impl in - let instt' = self#inst_type cx map_cx instt in - if st' == st && su' == su && impl' == impl && instt' == instt then t - else InstanceT (st', su', impl', instt') + let st' = self#type_ cx map_cx st in + let su' = self#type_ cx map_cx su in + let impl' = ListUtils.ident_map (self#type_ cx map_cx) impl in + let instt' = self#inst_type cx map_cx instt in + if st' == st && su' == su && impl' == impl && instt' == instt then + t + else + InstanceT (st', su', impl', instt') | SingletonStrT _ | SingletonNumT _ - | SingletonBoolT _ -> t + | SingletonBoolT _ -> + t | TypeT (s, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else TypeT (s, t'') - | AnyT -> t - | OptionalT t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else OptionalT t'' - | PolyT (tparamlist, t', _) -> - let tparamlist' = ListUtils.ident_map (self#type_param cx map_cx) tparamlist in - let t'' = self#type_ cx map_cx t' in - if tparamlist == tparamlist' && t' == t'' then t - else PolyT (tparamlist', t'', Reason.mk_id ()) - | TypeAppT (op, t', ts) -> - let t'' = self#type_ cx map_cx t' in - let ts' = ListUtils.ident_map (self#type_ cx map_cx) ts in - if t' == t'' && ts == ts' then t - else TypeAppT (op, t'', ts') - | MaybeT t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else MaybeT t'' - | IntersectionT irep -> - let irep' = InterRep.ident_map (self#type_ cx map_cx) irep in - if irep == irep' then t - else IntersectionT irep' - | UnionT urep -> - let urep' = UnionRep.ident_map (self#type_ cx map_cx) urep in - if urep' == urep then t - else UnionT urep' - | AnyObjT - | AnyFunT -> t + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + TypeT (s, t'') + | PolyT (tparams_loc, tparamlist, t', _) -> + let tparamlist' = Nel.ident_map (self#type_param cx map_cx) tparamlist in + let t'' = self#type_ cx map_cx t' in + if tparamlist == tparamlist' && t' == t'' then + t + else + PolyT (tparams_loc, tparamlist', t'', Context.make_nominal cx) | IdxWrapper t' -> - let t'' = self#type_ cx map_cx t' in - if t' == t'' then t - else IdxWrapper t'' - - method defer_use_type cx map_cx t = - match t with - | DestructuringT (r, s) -> - let s' = self#selector cx map_cx s in - if s' == s then t - else DestructuringT (r, s') - | TypeDestructorT (u, r, d) -> + let t'' = self#type_ cx map_cx t' in + if t' == t'' then + t + else + IdxWrapper t'' + | ReactAbstractComponentT { config; instance } -> + let config' = self#type_ cx map_cx config in + let instance' = self#type_ cx map_cx instance in + if config' == config && instance' == instance then + t + else + ReactAbstractComponentT { config = config'; instance = instance' } + + method defer_use_type cx map_cx t = + match t with + | LatentPredT (r, p) -> + let p' = self#predicate cx map_cx p in + if p' == p then + t + else + LatentPredT (r, p') + | TypeDestructorT (u, r, d) -> let d' = self#destructor cx map_cx d in - if d == d' then t - else TypeDestructorT (u, r, d') - - method export_types cx map_cx ({exports_tmap; cjs_export; has_every_named_export} as t) = - let exports_tmap' = self#exports cx map_cx exports_tmap in - let cjs_export' = OptionUtils.ident_map (self#type_ cx map_cx) cjs_export in - if exports_tmap == exports_tmap' && cjs_export == cjs_export' then t - else {exports_tmap = exports_tmap'; cjs_export = cjs_export'; has_every_named_export} - - method fun_type cx map_cx ({ this_t; - params; - rest_param; - return_t; - closure_t; - is_predicate; - changeset; - def_reason } as t) = - let this_t' = self#type_ cx map_cx this_t in - let params' = ListUtils.ident_map (fun ((name, t) as param) -> - let t' = self#type_ cx map_cx t in - if t' == t then param else (name, t') - ) params in - let rest_param' = match rest_param with - | None -> rest_param - | Some (name, loc, t) -> - let t' = self#type_ cx map_cx t in - if t' == t then rest_param else Some (name, loc, t') - in - let return_t' = self#type_ cx map_cx return_t in - if this_t' == this_t && - return_t' == return_t && - params' == params && - rest_param' == rest_param then t - else - let this_t = this_t' in - let return_t = return_t' in - let params = params' in - let rest_param = rest_param' in - {this_t; params; rest_param; return_t; - closure_t; is_predicate; changeset; def_reason} - - method inst_type cx map_cx i = - let { - class_id; - type_args; - own_props; - proto_props; - inst_call_t; - initialized_fields; - initialized_static_fields; - has_unknown_react_mixins; - structural - } = i in - let type_args' = ListUtils.ident_map (fun x -> - let (s, r, t, p) = x in - let t' = self#type_ cx map_cx t in - if t == t' then x else (s, r, t', p) - ) type_args in - let own_props' = self#props cx map_cx own_props in - let proto_props' = self#props cx map_cx proto_props in - let inst_call_t' = OptionUtils.ident_map (self#call_prop cx map_cx) inst_call_t in - if ( - type_args == type_args' && - own_props == own_props' && - proto_props == proto_props' && - inst_call_t == inst_call_t' - ) - then i - else { - class_id; - type_args = type_args'; - own_props = own_props'; - proto_props = proto_props'; - inst_call_t = inst_call_t'; - initialized_fields; - initialized_static_fields; - has_unknown_react_mixins; - structural; - } - - method type_param cx map_cx ({reason; name; bound; polarity; default} as t) = - let bound' = self#type_ cx map_cx bound in - let default' = OptionUtils.ident_map (self#type_ cx map_cx) default in - if bound == bound' && default == default' then t - else - let bound = bound' in - let default = default' in - {reason; name; bound; polarity; default} - - method selector cx map_cx t = - match t with + if d == d' then + t + else + TypeDestructorT (u, r, d') + + method export_types cx map_cx ({ exports_tmap; cjs_export; has_every_named_export } as t) = + let exports_tmap' = self#exports cx map_cx exports_tmap in + let cjs_export' = OptionUtils.ident_map (self#type_ cx map_cx) cjs_export in + if exports_tmap == exports_tmap' && cjs_export == cjs_export' then + t + else + { exports_tmap = exports_tmap'; cjs_export = cjs_export'; has_every_named_export } + + method fun_type + cx + map_cx + ( { this_t; params; rest_param; return_t; closure_t; is_predicate; changeset; def_reason } + as t ) = + let this_t' = self#type_ cx map_cx this_t in + let params' = + ListUtils.ident_map + (fun ((name, t) as param) -> + let t' = self#type_ cx map_cx t in + if t' == t then + param + else + (name, t')) + params + in + let rest_param' = + match rest_param with + | None -> rest_param + | Some (name, loc, t) -> + let t' = self#type_ cx map_cx t in + if t' == t then + rest_param + else + Some (name, loc, t') + in + let return_t' = self#type_ cx map_cx return_t in + if + this_t' == this_t + && return_t' == return_t + && params' == params + && rest_param' == rest_param + then + t + else + let this_t = this_t' in + let return_t = return_t' in + let params = params' in + let rest_param = rest_param' in + { this_t; params; rest_param; return_t; closure_t; is_predicate; changeset; def_reason } + + method inst_type cx map_cx i = + let { + class_id; + type_args; + own_props; + proto_props; + inst_call_t; + initialized_fields; + initialized_static_fields; + has_unknown_react_mixins; + inst_kind; + } = + i + in + let type_args' = + ListUtils.ident_map + (fun x -> + let (s, r, t, p) = x in + let t' = self#type_ cx map_cx t in + if t == t' then + x + else + (s, r, t', p)) + type_args + in + let own_props' = self#props cx map_cx own_props in + let proto_props' = self#props cx map_cx proto_props in + let inst_call_t' = OptionUtils.ident_map (self#call_prop cx map_cx) inst_call_t in + if + type_args == type_args' + && own_props == own_props' + && proto_props == proto_props' + && inst_call_t == inst_call_t' + then + i + else + { + class_id; + type_args = type_args'; + own_props = own_props'; + proto_props = proto_props'; + inst_call_t = inst_call_t'; + initialized_fields; + initialized_static_fields; + has_unknown_react_mixins; + inst_kind; + } + + method type_param cx map_cx ({ reason; name; bound; polarity; default } as t) = + let bound' = self#type_ cx map_cx bound in + let default' = OptionUtils.ident_map (self#type_ cx map_cx) default in + if bound == bound' && default == default' then + t + else + let bound = bound' in + let default = default' in + { reason; name; bound; polarity; default } + + method selector cx map_cx t = + match t with | Prop _ -> t | Elem t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else Elem t'' + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + Elem t'' | ObjRest _ | ArrRest _ - | Default - | Become -> t - | Refine p -> - let p' = self#predicate cx map_cx p in - if p' == p then t - else Refine p' - - method destructor cx map_cx t = - match t with + | Default -> + t + + method destructor cx map_cx t = + match t with | NonMaybeType - | PropertyType _ -> t + | PropertyType _ -> + t | ElementType t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ElementType t'' + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ElementType t'' | Bind t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else Bind t'' + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + Bind t'' | ReadOnlyType -> t - | SpreadType (options, tlist) -> - let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in - if tlist' == tlist then t - else SpreadType (options, tlist') + | SpreadType (options, tlist, acc) -> + let tlist' = ListUtils.ident_map (self#object_kit_spread_operand cx map_cx) tlist in + let acc' = OptionUtils.ident_map (self#object_kit_spread_operand_slice cx map_cx) acc in + if tlist' == tlist && acc == acc' then + t + else + SpreadType (options, tlist', acc') | RestType (options, x) -> - let x' = self#type_ cx map_cx x in - if x' == x then t - else RestType (options, x') + let x' = self#type_ cx map_cx x in + if x' == x then + t + else + RestType (options, x') | ValuesType -> t | CallType args -> - let args' = ListUtils.ident_map (self#type_ cx map_cx) args in - if args' == args then t - else CallType args' + let args' = ListUtils.ident_map (self#type_ cx map_cx) args in + if args' == args then + t + else + CallType args' | TypeMap tmap -> - let tmap' = self#type_map cx map_cx tmap in - if tmap' == tmap then t - else TypeMap tmap' + let tmap' = self#type_map cx map_cx tmap in + if tmap' == tmap then + t + else + TypeMap tmap' + | ReactConfigType default_props -> + let default_props' = self#type_ cx map_cx default_props in + if default_props' == default_props then + t + else + ReactConfigType default_props' | ReactElementPropsType | ReactElementConfigType - | ReactElementRefType - -> t + | ReactElementRefType -> + t - method private custom_fun_kind cx map_cx kind = - match kind with - | ReactPropType (React.PropType.Primitive (b, t)) -> - let t' = self#type_ cx map_cx t in - if t' == t then kind - else ReactPropType (React.PropType.Primitive (b, t')) - | ReactElementFactory t -> - let t' = self#type_ cx map_cx t in - if t' == t then kind - else ReactElementFactory t' - | ObjectAssign - | ObjectGetPrototypeOf - | ObjectSetPrototypeOf - | Compose _ - | ReactPropType _ - | ReactCreateClass - | ReactCreateElement - | ReactCloneElement - | Idx - | TypeAssertIs - | TypeAssertThrows - | TypeAssertWraps - | DebugPrint - | DebugThrow - | DebugSleep - -> kind - - method virtual exports: Context.t -> 'a -> Type.Exports.id -> Type.Exports.id - - method obj_type cx map_cx t = - let { flags; dict_t; props_tmap; proto_t; call_t } = t in - let dict_t' = OptionUtils.ident_map (self#dict_type cx map_cx) dict_t in - let props_tmap' = self#props cx map_cx props_tmap in - let proto_t' = self#type_ cx map_cx proto_t in - let call_t' = OptionUtils.ident_map (self#call_prop cx map_cx) call_t in - if dict_t' == dict_t && props_tmap' == props_tmap && proto_t' == proto_t && call_t' == call_t - then t - else { - flags; - dict_t = dict_t'; - props_tmap = props_tmap'; - proto_t = proto_t'; - call_t = call_t'; - } - - method virtual call_prop: Context.t -> 'a -> int -> int - - method dict_type cx map_cx ({dict_name; key; value; dict_polarity} as t) = - let key' = self#type_ cx map_cx key in - let value' = self#type_ cx map_cx value in - if key' == key && value' == value then t - else - let key = key' in - let value = value' in - {dict_name; key; value; dict_polarity} - - method arr_type cx map_cx t = - match t with - | ArrayAT (t', tlistopt) -> + method object_kit_spread_operand_slice + cx map_cx ({ Object.Spread.reason; prop_map; dict } as slice) = + let prop_map' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) prop_map in + let dict' = OptionUtils.ident_map (self#dict_type cx map_cx) dict in + if prop_map' == prop_map && dict' == dict then + slice + else + { Object.Spread.reason; prop_map = prop_map'; dict = dict' } + + method object_kit_spread_operand cx map_cx operand = + Object.Spread.( + match operand with + | Slice slice -> + let slice' = self#object_kit_spread_operand_slice cx map_cx slice in + if slice' == slice then + operand + else + Slice slice' + | Type t -> + let t' = self#type_ cx map_cx t in + if t' == t then + operand + else + Type t') + + method private custom_fun_kind cx map_cx kind = + match kind with + | ReactPropType (React.PropType.Primitive (b, t)) -> + let t' = self#type_ cx map_cx t in + if t' == t then + kind + else + ReactPropType (React.PropType.Primitive (b, t')) + | ReactElementFactory t -> + let t' = self#type_ cx map_cx t in + if t' == t then + kind + else + ReactElementFactory t' + | ObjectAssign + | ObjectGetPrototypeOf + | ObjectSetPrototypeOf + | Compose _ + | ReactPropType _ + | ReactCreateClass + | ReactCreateElement + | ReactCloneElement + | Idx + | TypeAssertIs + | TypeAssertThrows + | TypeAssertWraps + | DebugPrint + | DebugThrow + | DebugSleep -> + kind + + method virtual exports : Context.t -> 'a -> Type.Exports.id -> Type.Exports.id + + method obj_type cx map_cx t = + let { flags; dict_t; props_tmap; proto_t; call_t } = t in + let dict_t' = OptionUtils.ident_map (self#dict_type cx map_cx) dict_t in + let props_tmap' = self#props cx map_cx props_tmap in + let proto_t' = self#type_ cx map_cx proto_t in + let call_t' = OptionUtils.ident_map (self#call_prop cx map_cx) call_t in + if dict_t' == dict_t && props_tmap' == props_tmap && proto_t' == proto_t && call_t' == call_t + then + t + else + { flags; dict_t = dict_t'; props_tmap = props_tmap'; proto_t = proto_t'; call_t = call_t' } + + method virtual call_prop : Context.t -> 'a -> int -> int + + method dict_type cx map_cx ({ dict_name; key; value; dict_polarity } as t) = + let key' = self#type_ cx map_cx key in + let value' = self#type_ cx map_cx value in + if key' == key && value' == value then + t + else + let key = key' in + let value = value' in + { dict_name; key; value; dict_polarity } + + method arr_type cx map_cx t = + match t with + | ArrayAT (t', tlistopt) -> let t'' = self#type_ cx map_cx t' in let tlistopt' = - OptionUtils.ident_map (ListUtils.ident_map (self#type_ cx map_cx)) tlistopt in - if t'' == t' && tlistopt' == tlistopt then t - else ArrayAT (t'', tlistopt') - | TupleAT (t', tlist) -> + OptionUtils.ident_map (ListUtils.ident_map (self#type_ cx map_cx)) tlistopt + in + if t'' == t' && tlistopt' == tlistopt then + t + else + ArrayAT (t'', tlistopt') + | TupleAT (t', tlist) -> let t'' = self#type_ cx map_cx t' in let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in - if t'' == t' && tlist' == tlist then t - else TupleAT(t'', tlist') - | ROArrayAT t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ROArrayAT t'' - | EmptyAT -> t - - method bounds cx map_cx t = - let open Constraint in - let lower' = TypeMap.ident_map_key (self#type_ cx map_cx) t.lower in - if lower' != t.lower then - t.lower <- lower'; - let upper' = UseTypeMap.ident_map_key (self#use_type cx map_cx) t.upper in - if upper' != t.upper then - t.upper <- upper'; - t - - method virtual use_type: Context.t -> 'a -> Type.UseTypeMap.key -> Type.UseTypeMap.key - - method predicate cx map_cx p = - match p with - | AndP (p1, p2) -> + if t'' == t' && tlist' == tlist then + t + else + TupleAT (t'', tlist') + | ROArrayAT t' -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ROArrayAT t'' + + method bounds cx map_cx t = + Constraint.( + let lower' = TypeMap.ident_map_key (self#type_ cx map_cx) t.lower in + if lower' != t.lower then t.lower <- lower'; + let upper' = UseTypeMap.ident_map_key (self#use_type cx map_cx) t.upper in + if upper' != t.upper then t.upper <- upper'; + t) + + method virtual use_type : Context.t -> 'a -> Type.use_t -> Type.use_t + + method predicate cx map_cx p = + match p with + | AndP (p1, p2) -> let p1' = self#predicate cx map_cx p1 in let p2' = self#predicate cx map_cx p2 in - if p1' == p1 && p2' == p2 then p - else AndP (p1', p2') - | OrP (p1, p2) -> + if p1' == p1 && p2' == p2 then + p + else + AndP (p1', p2') + | OrP (p1, p2) -> let p1' = self#predicate cx map_cx p1 in let p2' = self#predicate cx map_cx p2 in - if p1' == p1 && p2' == p2 then p - else OrP (p1', p2') - | NotP p' -> + if p1' == p1 && p2' == p2 then + p + else + OrP (p1', p2') + | NotP p' -> let p'' = self#predicate cx map_cx p' in - if p'' == p' then p - else NotP p'' - | LeftP (test, t) -> + if p'' == p' then + p + else + NotP p'' + | LeftP (test, t) -> let t' = self#type_ cx map_cx t in - if t' == t then p - else LeftP (test, t') - | RightP (test, t) -> + if t' == t then + p + else + LeftP (test, t') + | RightP (test, t) -> let t' = self#type_ cx map_cx t in - if t' == t then p - else RightP (test, t') - | ExistsP _ - | NullP - | MaybeP - | SingletonBoolP _ - | SingletonStrP _ - | SingletonNumP _ - | BoolP - | FunP - | NumP - | ObjP - | StrP - | VoidP - | ArrP - | PropExistsP _ -> p - | LatentP (t, i) -> + if t' == t then + p + else + RightP (test, t') + | ExistsP _ + | NullP + | MaybeP + | SingletonBoolP _ + | SingletonStrP _ + | SingletonNumP _ + | BoolP + | FunP + | NumP + | ObjP + | StrP + | SymbolP + | VoidP + | ArrP + | PropExistsP _ -> + p + | LatentP (t, i) -> let t' = self#type_ cx map_cx t in - if t' == t then p - else LatentP (t', i) - - method private read_write cx map_cx rw = - match rw with - | Read -> rw - | Write (wr_ctx, prop_t) -> - let prop_t' = OptionUtils.ident_map (self#type_ cx map_cx) prop_t in - if prop_t' == prop_t then rw - else Write (wr_ctx, prop_t') - - method type_map cx map_cx t = - match t with - | TupleMap t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else TupleMap t'' - | ObjectMap t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ObjectMap t'' - | ObjectMapi t' -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ObjectMapi t'' - - method virtual props: Context.t -> 'a -> Properties.id -> Properties.id - - method virtual eval_id: Context.t -> 'a -> IMap.key -> IMap.key - - method prop cx map_cx prop = - match prop with - | Field (l, t, p) -> + if t' == t then + p + else + LatentP (t', i) + + method type_map cx map_cx t = + match t with + | TupleMap t' -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + TupleMap t'' + | ObjectMap t' -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ObjectMap t'' + | ObjectMapi t' -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ObjectMapi t'' + + method virtual props : Context.t -> 'a -> Properties.id -> Properties.id + + method virtual eval_id : Context.t -> 'a -> int -> int + + method prop cx map_cx prop = + match prop with + | Field (l, t, p) -> let t' = self#type_ cx map_cx t in - if t == t' then prop - else Field (l, t', p) - | Method (l, t) -> + if t == t' then + prop + else + Field (l, t', p) + | Method (l, t) -> let t' = self#type_ cx map_cx t in - if t == t' then prop - else Method (l, t') - | Get (l, t) -> + if t == t' then + prop + else + Method (l, t') + | Get (l, t) -> let t' = self#type_ cx map_cx t in - if t == t' then prop - else Get (l, t') - | Set (l, t) -> + if t == t' then + prop + else + Get (l, t') + | Set (l, t) -> let t' = self#type_ cx map_cx t in - if t == t' then prop - else Set (l, t') - | GetSet (l1, t1, l2, t2) -> + if t == t' then + prop + else + Set (l, t') + | GetSet (l1, t1, l2, t2) -> let t1' = self#type_ cx map_cx t1 in let t2' = self#type_ cx map_cx t2 in - if t1 == t1' && t2 == t2' then prop - else GetSet(l1, t1', l2, t2') -end + if t1 == t1' && t2 == t2' then + prop + else + GetSet (l1, t1', l2, t2') + end + +class virtual ['a] t_with_uses = + object (self) + inherit ['a] t as _super -class virtual ['a] t_with_uses = object(self) - inherit ['a] t as _super method use_type cx map_cx t = match t with | UseT (u, t') -> - let t'' = self#type_ cx map_cx t'; in - if t'' == t' then t - else UseT (u, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + UseT (u, t'') | BindT (op, r, funcall, passthrough) -> - let funcall' = self#fun_call_type cx map_cx funcall in - if funcall == funcall' then t - else BindT (op, r, funcall', passthrough) + let funcall' = self#fun_call_type cx map_cx funcall in + if funcall == funcall' then + t + else + BindT (op, r, funcall', passthrough) | CallT (op, r, funcall) -> - let funcall' = self#fun_call_type cx map_cx funcall in - if funcall == funcall' then t - else CallT (op, r, funcall') + let funcall' = self#fun_call_type cx map_cx funcall in + if funcall == funcall' then + t + else + CallT (op, r, funcall') | MethodT (op, r1, r2, prop, funcall, prop_t) -> - let prop' = self#prop_ref cx map_cx prop in - let funcall' = self#fun_call_type cx map_cx funcall in - let prop_t' = OptionUtils.ident_map (self#type_ cx map_cx) prop_t in - if prop' == prop && funcall' == funcall && prop_t' == prop_t then t - else MethodT (op, r1, r2, prop', funcall', prop_t') - | SetPropT (use_op, r, prop, i, t', prop_t) -> - let prop' = self#prop_ref cx map_cx prop in - let t'' = self#type_ cx map_cx t' in - let prop_t' = OptionUtils.ident_map (self#type_ cx map_cx) prop_t in - if prop' == prop && t'' == t' && prop_t' == prop_t then t - else SetPropT (use_op, r, prop', i, t'', prop_t') - | SetPrivatePropT (use_op, r, prop, scopes, static, t', prop_t) -> - let t'' = self#type_ cx map_cx t' in - let scopes' = ListUtils.ident_map (self#class_binding cx map_cx) scopes in - let prop_t' = OptionUtils.ident_map (self#type_ cx map_cx) prop_t in - if t'' == t' && scopes' == scopes && prop_t' == prop_t then t - else SetPrivatePropT (use_op, r, prop, scopes', static, t'', prop_t') + let prop' = self#prop_ref cx map_cx prop in + let funcall' = self#fun_call_type cx map_cx funcall in + let prop_t' = OptionUtils.ident_map (self#type_ cx map_cx) prop_t in + if prop' == prop && funcall' == funcall && prop_t' == prop_t then + t + else + MethodT (op, r1, r2, prop', funcall', prop_t') + | SetPropT (use_op, r, prop, mode, i, t', prop_t) -> + let prop' = self#prop_ref cx map_cx prop in + let t'' = self#type_ cx map_cx t' in + let prop_t' = OptionUtils.ident_map (self#type_ cx map_cx) prop_t in + if prop' == prop && t'' == t' && prop_t' == prop_t then + t + else + SetPropT (use_op, r, prop', mode, i, t'', prop_t') + | SetPrivatePropT (use_op, r, prop, mode, scopes, static, t', prop_t) -> + let t'' = self#type_ cx map_cx t' in + let scopes' = ListUtils.ident_map (self#class_binding cx map_cx) scopes in + let prop_t' = OptionUtils.ident_map (self#type_ cx map_cx) prop_t in + if t'' == t' && scopes' == scopes && prop_t' == prop_t then + t + else + SetPrivatePropT (use_op, r, prop, mode, scopes', static, t'', prop_t') | GetPropT (use_op, r, prop, t') -> - let prop' = self#prop_ref cx map_cx prop in - let t'' = self#type_ cx map_cx t' in - if prop' == prop && t'' == t' then t - else GetPropT (use_op, r, prop', t'') + let prop' = self#prop_ref cx map_cx prop in + let t'' = self#type_ cx map_cx t' in + if prop' == prop && t'' == t' then + t + else + GetPropT (use_op, r, prop', t'') | MatchPropT (use_op, r, prop, t') -> - let prop' = self#prop_ref cx map_cx prop in - let t'' = self#type_ cx map_cx t' in - if prop' == prop && t'' == t' then t - else MatchPropT (use_op, r, prop', t'') + let prop' = self#prop_ref cx map_cx prop in + let t'' = self#type_ cx map_cx t' in + if prop' == prop && t'' == t' then + t + else + MatchPropT (use_op, r, prop', t'') | GetPrivatePropT (use_op, r, prop, scopes, static, t') -> - let t'' = self#type_ cx map_cx t' in - let scopes' = ListUtils.ident_map (self#class_binding cx map_cx) scopes in - if t'' == t' && scopes' == scopes then t - else GetPrivatePropT (use_op, r, prop, scopes', static, t'') + let t'' = self#type_ cx map_cx t' in + let scopes' = ListUtils.ident_map (self#class_binding cx map_cx) scopes in + if t'' == t' && scopes' == scopes then + t + else + GetPrivatePropT (use_op, r, prop, scopes', static, t'') | TestPropT (r, id, prop, t') -> - let prop' = self#prop_ref cx map_cx prop in - let t'' = self#type_ cx map_cx t' in - if prop' == prop && t'' == t' then t - else TestPropT (r, id, prop', t'') - | SetElemT (use_op, r, t1, t2, t3) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - let t3' = OptionUtils.ident_map (self#type_ cx map_cx) t3 in - if t1' == t1 && t2' == t2 && t3' == t3 then t - else SetElemT (use_op, r, t1', t2', t3') + let prop' = self#prop_ref cx map_cx prop in + let t'' = self#type_ cx map_cx t' in + if prop' == prop && t'' == t' then + t + else + TestPropT (r, id, prop', t'') + | SetElemT (use_op, r, t1, m, t2, t3) -> + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + let t3' = OptionUtils.ident_map (self#type_ cx map_cx) t3 in + if t1' == t1 && t2' == t2 && t3' == t3 then + t + else + SetElemT (use_op, r, t1', m, t2', t3') | GetElemT (use_op, r, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else GetElemT (use_op, r, t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + GetElemT (use_op, r, t1', t2') | CallElemT (r1, r2, t', funcall) -> - let t'' = self#type_ cx map_cx t' in - let funcall' = self#fun_call_type cx map_cx funcall in - if t' == t'' && funcall' == funcall then t - else CallElemT (r1, r2, t'', funcall') + let t'' = self#type_ cx map_cx t' in + let funcall' = self#fun_call_type cx map_cx funcall in + if t' == t'' && funcall' == funcall then + t + else + CallElemT (r1, r2, t'', funcall') | GetStaticsT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else GetStaticsT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + GetStaticsT (r, t'') | GetProtoT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else GetProtoT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + GetProtoT (r, t'') | SetProtoT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else SetProtoT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + SetProtoT (r, t'') | ReposLowerT (r, use_desc, use) -> - let use' = self#use_type cx map_cx use in - if use' == use then t - else ReposLowerT (r, use_desc, use') + let use' = self#use_type cx map_cx use in + if use' == use then + t + else + ReposLowerT (r, use_desc, use') | ReposUseT (r, use_desc, use_op, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ReposUseT (r, use_desc, use_op, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ReposUseT (r, use_desc, use_op, t'') | ConstructorT (op, r, targs, args, t') -> - let targs' = OptionUtils.ident_map (ListUtils.ident_map (self#type_ cx map_cx)) targs in - let args' = ListUtils.ident_map (self#call_arg cx map_cx) args in - let t'' = self#type_ cx map_cx t' in - if targs' == targs && args' == args && t'' == t' then t - else ConstructorT (op, r, targs', args', t'') - | SuperT (op, r, Derived {own=o; proto=p; static=s}) -> - let o' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) o in - let p' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) p in - let s' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) s in - if o' == o && p' == p && s' == s then t - else SuperT (op, r, Derived {own=o'; proto=p'; static=s'}) + let targs' = OptionUtils.ident_map (ListUtils.ident_map (self#targ cx map_cx)) targs in + let args' = ListUtils.ident_map (self#call_arg cx map_cx) args in + let t'' = self#type_ cx map_cx t' in + if targs' == targs && args' == args && t'' == t' then + t + else + ConstructorT (op, r, targs', args', t'') + | SuperT (op, r, Derived { own = o; proto = p; static = s }) -> + let o' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) o in + let p' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) p in + let s' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) s in + if o' == o && p' == p && s' == s then + t + else + SuperT (op, r, Derived { own = o'; proto = p'; static = s' }) | ImplementsT (use_op, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ImplementsT (use_op, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ImplementsT (use_op, t'') | MixinT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else MixinT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + MixinT (r, t'') | ToStringT (r, t') -> - let t'' = self#use_type cx map_cx t' in - if t'' == t' then t - else ToStringT (r, t'') + let t'' = self#use_type cx map_cx t' in + if t'' == t' then + t + else + ToStringT (r, t'') | AdderT (op, r, flip, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else AdderT (op, r, flip, t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + AdderT (op, r, flip, t1', t2') | ComparatorT (r, flip, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ComparatorT (r, flip, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ComparatorT (r, flip, t'') | UnaryMinusT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else UnaryMinusT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + UnaryMinusT (r, t'') | AssertArithmeticOperandT _ | AssertBinaryInLHST _ | AssertBinaryInRHST _ - | AssertForInRHST _ - | AssertRestParamT _ -> t + | AssertForInRHST _ -> + t | PredicateT (p, t') -> - let p' = self#predicate cx map_cx p in - let t'' = self#type_ cx map_cx t' in - if p' == p && t'' == t' then t - else PredicateT (p', t'') + let p' = self#predicate cx map_cx p in + let t'' = self#type_ cx map_cx t' in + if p' == p && t'' == t' then + t + else + PredicateT (p', t'') | GuardT (p, t1, t2) -> - let p' = self#predicate cx map_cx p in - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if p' == p && t1' == t1 && t2' == t2 then t - else GuardT (p', t1', t2') + let p' = self#predicate cx map_cx p in + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if p' == p && t1' == t1 && t2' == t2 then + t + else + GuardT (p', t1', t2') | EqT (r, flip, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else EqT (r, flip, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + EqT (r, flip, t'') | AndT (r, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else AndT (r, t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + AndT (r, t1', t2') | OrT (r, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else OrT (r, t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + OrT (r, t1', t2') | NullishCoalesceT (r, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else NullishCoalesceT (r, t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + NullishCoalesceT (r, t1', t2') | NotT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else NotT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + NotT (r, t'') | SpecializeT (u, r1, r2, cache, tlist_opt, t') -> - let tlist_opt' = - OptionUtils.ident_map (ListUtils.ident_map (self#type_ cx map_cx)) tlist_opt in - let t'' = self#type_ cx map_cx t' in - if tlist_opt' == tlist_opt && t'' == t' then t - else SpecializeT (u, r1, r2, cache, tlist_opt', t'') + let tlist_opt' = + OptionUtils.ident_map (ListUtils.ident_map (self#type_ cx map_cx)) tlist_opt + in + let t'' = self#type_ cx map_cx t' in + if tlist_opt' == tlist_opt && t'' == t' then + t + else + SpecializeT (u, r1, r2, cache, tlist_opt', t'') | ThisSpecializeT (r, this, k) -> - let this' = self#type_ cx map_cx this in - let k' = self#cont cx map_cx k in - if this' == this && k' == k then t - else ThisSpecializeT (r, this', k') + let this' = self#type_ cx map_cx this in + let k' = self#cont cx map_cx k in + if this' == this && k' == k then + t + else + ThisSpecializeT (r, this', k') | VarianceCheckT (r, tlist, p) -> - let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in - if tlist' == tlist then t - else VarianceCheckT (r, tlist', p) + let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in + if tlist' == tlist then + t + else + VarianceCheckT (r, tlist', p) | TypeAppVarianceCheckT (use_op, r1, r2, tpairlist) -> - let tpairlist' = ListUtils.ident_map (fun ((x,y) as z) -> - let x' = self#type_ cx map_cx x in - let y' = self#type_ cx map_cx y in - if x' == x && y' == y then z - else (x', y')) - tpairlist in - if tpairlist' == tpairlist then t - else TypeAppVarianceCheckT (use_op, r1, r2, tpairlist') + let tpairlist' = + ListUtils.ident_map + (fun ((x, y) as z) -> + let x' = self#type_ cx map_cx x in + let y' = self#type_ cx map_cx y in + if x' == x && y' == y then + z + else + (x', y')) + tpairlist + in + if tpairlist' == tpairlist then + t + else + TypeAppVarianceCheckT (use_op, r1, r2, tpairlist') | ConcretizeTypeAppsT (use_op, (ts1, op1, r1), (t2, ts2, op2, r2), flip) -> - let ts1' = ListUtils.ident_map (self#type_ cx map_cx) ts1 in - let t2' = self#type_ cx map_cx t2 in - let ts2' = ListUtils.ident_map (self#type_ cx map_cx) ts2 in - if ts1' == ts1 && t2' == t2 && ts2' == ts2 then t - else ConcretizeTypeAppsT (use_op, (ts1', op1, r1), (t2', ts2', op2, r2), flip) + let ts1' = ListUtils.ident_map (self#type_ cx map_cx) ts1 in + let t2' = self#type_ cx map_cx t2 in + let ts2' = ListUtils.ident_map (self#type_ cx map_cx) ts2 in + if ts1' == ts1 && t2' == t2 && ts2' == ts2 then + t + else + ConcretizeTypeAppsT (use_op, (ts1', op1, r1), (t2', ts2', op2, r2), flip) | LookupT (r, lookup, tlist, prop, action) -> - let lookup' = self#lookup_kind cx map_cx lookup in - let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in - let prop' = self#prop_ref cx map_cx prop in - let action' = self#lookup_action cx map_cx action in - if lookup' == lookup && tlist' == tlist && prop' == prop && action' == action then t - else LookupT (r, lookup', tlist', prop', action') - | ObjAssignToT (r, t1, t2, obj_assign) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else ObjAssignToT (r, t1', t2', obj_assign) - | ObjAssignFromT (r, t1, t2, obj_assign) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else ObjAssignFromT (r, t1', t2', obj_assign) + let lookup' = self#lookup_kind cx map_cx lookup in + let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in + let prop' = self#prop_ref cx map_cx prop in + let action' = self#lookup_action cx map_cx action in + if lookup' == lookup && tlist' == tlist && prop' == prop && action' == action then + t + else + LookupT (r, lookup', tlist', prop', action') + | ObjAssignToT (op, r, t1, t2, obj_assign) -> + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + ObjAssignToT (op, r, t1', t2', obj_assign) + | ObjAssignFromT (op, r, t1, t2, obj_assign) -> + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + ObjAssignFromT (op, r, t1', t2', obj_assign) | ObjFreezeT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ObjFreezeT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ObjFreezeT (r, t'') | ObjRestT (r, strings, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ObjRestT (r, strings, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ObjRestT (r, strings, t'') | ObjSealT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ObjSealT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ObjSealT (r, t'') | ObjTestT (r, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else ObjTestT (r, t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + ObjTestT (r, t1', t2') | ObjTestProtoT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ObjTestProtoT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ObjTestProtoT (r, t'') | ArrRestT (op, r, i, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ArrRestT (op, r, i, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ArrRestT (op, r, i, t'') | UnifyT (t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else UnifyT (t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + UnifyT (t1', t2') | BecomeT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else BecomeT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + BecomeT (r, t'') | GetKeysT (r, t') -> - let t'' = self#use_type cx map_cx t' in - if t'' == t' then t - else GetKeysT (r, t'') + let t'' = self#use_type cx map_cx t' in + if t'' == t' then + t + else + GetKeysT (r, t'') | HasOwnPropT _ -> t | GetValuesT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else GetValuesT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + GetValuesT (r, t'') + | ReactPropsToOut (r, t') -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ReactPropsToOut (r, t'') + | ReactInToProps (r, t') -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ReactInToProps (r, t'') | ElemT (use_op, r, t', action) -> - let t'' = self#type_ cx map_cx t' in - let action' = self#elem_action cx map_cx action in - if t'' == t' && action' == action then t - else ElemT (use_op, r, t'', action') + let t'' = self#type_ cx map_cx t' in + let action' = self#elem_action cx map_cx action in + if t'' == t' && action' == action then + t + else + ElemT (use_op, r, t'', action') | MakeExactT (r, cont) -> - let cont' = self#cont cx map_cx cont in - if cont' == cont then t - else MakeExactT (r, cont') + let cont' = self#cont cx map_cx cont in + if cont' == cont then + t + else + MakeExactT (r, cont') | CJSRequireT (r, t', is_strict) -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else CJSRequireT (r, t'', is_strict) + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + CJSRequireT (r, t'', is_strict) | ImportModuleNsT (r, t', is_strict) -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ImportModuleNsT (r, t'', is_strict) + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ImportModuleNsT (r, t'', is_strict) | ImportDefaultT (r, import, s, t', is_strict) -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ImportDefaultT (r, import, s, t'', is_strict) + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ImportDefaultT (r, import, s, t'', is_strict) | ImportNamedT (r, import, s, m, t', is_strict) -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ImportNamedT (r, import, s, m, t'', is_strict) + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ImportNamedT (r, import, s, m, t'', is_strict) | ImportTypeT (r, s, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ImportTypeT (r, s, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ImportTypeT (r, s, t'') | ImportTypeofT (r, s, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ImportTypeofT (r, s, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + ImportTypeofT (r, s, t'') | AssertImportIsValueT _ -> t | CJSExtractNamedExportsT (r1, (r2, exports, is_strict), t') -> - let exports' = self#export_types cx map_cx exports in - let t'' = self#type_ cx map_cx t' in - if exports' == exports && t'' == t' then t - else CJSExtractNamedExportsT (r1, (r2, exports', is_strict), t'') + let exports' = self#export_types cx map_cx exports in + let t'' = self#type_ cx map_cx t' in + if exports' == exports && t'' == t' then + t + else + CJSExtractNamedExportsT (r1, (r2, exports', is_strict), t'') | CopyNamedExportsT (r, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else CopyNamedExportsT (r, t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + CopyNamedExportsT (r, t1', t2') | CopyTypeExportsT (r, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else CopyTypeExportsT (r, t1', t2') - | ExportNamedT (r, skip, tmap, t') -> - let map_loc_type_pair ((loc, t) as orig) = - let t' = self#type_ cx map_cx t in - if t == t' then orig else (loc, t') - in - let tmap' = SMap.ident_map map_loc_type_pair tmap in - let t'' = self#type_ cx map_cx t' in - if tmap' == tmap && t'' == t' then t - else ExportNamedT (r, skip, tmap', t'') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + CopyTypeExportsT (r, t1', t2') + | ExportNamedT (r, skip, tmap, export_kind, t') -> + let map_loc_type_pair ((loc, t) as orig) = + let t' = self#type_ cx map_cx t in + if t == t' then + orig + else + (loc, t') + in + let tmap' = SMap.ident_map map_loc_type_pair tmap in + let t'' = self#type_ cx map_cx t' in + if tmap' == tmap && t'' == t' then + t + else + ExportNamedT (r, skip, tmap', export_kind, t'') | ExportTypeT (r, skip, name, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else ExportTypeT (r, skip, name, t1', t2') - | MapTypeT (r, tmap, t') -> - let tmap' = self#type_map cx map_cx tmap in - let t'' = self#type_ cx map_cx t' in - if tmap' == tmap && t'' == t' then t - else MapTypeT (r, tmap', t'') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + ExportTypeT (r, skip, name, t1', t2') + | AssertExportIsTypeT (r, name, t1) -> + let t1' = self#type_ cx map_cx t1 in + if t1' == t1 then + t + else + AssertExportIsTypeT (r, name, t1') + | MapTypeT (use_op, r, tmap, t') -> + let tmap' = self#type_map cx map_cx tmap in + let t'' = self#type_ cx map_cx t' in + if tmap' == tmap && t'' == t' then + t + else + MapTypeT (use_op, r, tmap', t'') | ReactKitT (use_op, r, react_tool) -> - let react_tool' = self#react_tool cx map_cx react_tool in - if react_tool' == react_tool then t - else ReactKitT (use_op, r, react_tool') + let react_tool' = self#react_tool cx map_cx react_tool in + if react_tool' == react_tool then + t + else + ReactKitT (use_op, r, react_tool') | ObjKitT (use_op, r, resolve_tool, tool, tout) -> - let resolve_tool' = self#object_kit_resolve_tool cx map_cx resolve_tool in - let tool' = self#object_kit_tool cx map_cx tool in - let tout' = self#type_ cx map_cx tout in - if resolve_tool' == resolve_tool && tool' == tool && tout' == tout then t - else ObjKitT (use_op, r, resolve_tool', tool', tout') + let resolve_tool' = self#object_kit_resolve_tool cx map_cx resolve_tool in + let tool' = self#object_kit_tool cx map_cx tool in + let tout' = self#type_ cx map_cx tout in + if resolve_tool' == resolve_tool && tool' == tool && tout' == tout then + t + else + ObjKitT (use_op, r, resolve_tool', tool', tout') | ChoiceKitUseT (r, choice_use_tool) -> - let choice_use_tool' = self#choice_use_tool cx map_cx choice_use_tool in - if choice_use_tool' == choice_use_tool then t - else ChoiceKitUseT (r, choice_use_tool') + let choice_use_tool' = self#choice_use_tool cx map_cx choice_use_tool in + if choice_use_tool' == choice_use_tool then + t + else + ChoiceKitUseT (r, choice_use_tool') | IntersectionPreprocessKitT (r, ipt) -> - let ipt' = self#intersection_preprocess_tool cx map_cx ipt in - if ipt' == ipt then t - else IntersectionPreprocessKitT (r, ipt') + let ipt' = self#intersection_preprocess_tool cx map_cx ipt in + if ipt' == ipt then + t + else + IntersectionPreprocessKitT (r, ipt') | DebugPrintT _ -> t | DebugSleepT _ -> t | SentinelPropTestT (r, t1, key, b, sentinel, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else SentinelPropTestT (r, t1', key, b, sentinel, t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + SentinelPropTestT (r, t1', key, b, sentinel, t2') | IdxUnwrap (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else IdxUnwrap (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + IdxUnwrap (r, t'') | IdxUnMaybeifyT (r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else IdxUnMaybeifyT (r, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + IdxUnMaybeifyT (r, t'') | OptionalChainT (r, lhs_r, uses) -> - let uses' = Nel.map (fun (use, tout) -> - self#opt_use_type cx map_cx use, - self#type_ cx map_cx tout - ) uses in - if uses' == uses then t - else OptionalChainT (r, lhs_r, uses') + let uses' = + Nel.map + (fun (use, tout) -> (self#opt_use_type cx map_cx use, self#type_ cx map_cx tout)) + uses + in + if uses' == uses then + t + else + OptionalChainT (r, lhs_r, uses') | InvariantT _ -> t | CallLatentPredT (r, b, i, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else CallLatentPredT (r, b, i, t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + CallLatentPredT (r, b, i, t1', t2') | CallOpenPredT (r, b, key, t1, t2) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else CallOpenPredT (r, b, key, t1', t2') + let t1' = self#type_ cx map_cx t1 in + let t2' = self#type_ cx map_cx t2 in + if t1' == t1 && t2' == t2 then + t + else + CallOpenPredT (r, b, key, t1', t2') | SubstOnPredT (r, sub, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else SubstOnPredT (r, sub, t'') + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + SubstOnPredT (r, sub, t'') | RefineT (r, p, t') -> - let p' = self#predicate cx map_cx p in - let t'' = self#type_ cx map_cx t' in - if p' == p && t'' == t' then t - else RefineT (r, p', t'') + let p' = self#predicate cx map_cx p in + let t'' = self#type_ cx map_cx t' in + if p' == p && t'' == t' then + t + else + RefineT (r, p', t'') | ResolveSpreadT (op, r, resolve_spread) -> - let resolve_spread' = self#resolve_spread cx map_cx resolve_spread in - if resolve_spread' == resolve_spread then t - else ResolveSpreadT (op, r, resolve_spread') + let resolve_spread' = self#resolve_spread cx map_cx resolve_spread in + if resolve_spread' == resolve_spread then + t + else + ResolveSpreadT (op, r, resolve_spread') | CondT (r, then_t, else_t, tout) -> - let then_t' = OptionUtils.ident_map (self#type_ cx map_cx) then_t in - let else_t' = self#type_ cx map_cx else_t in - let tout' = self#type_ cx map_cx tout in - if then_t' == then_t && else_t' == else_t && tout' == tout then t - else CondT (r, then_t', else_t', tout') + let then_t' = OptionUtils.ident_map (self#type_ cx map_cx) then_t in + let else_t' = self#type_ cx map_cx else_t in + let tout' = self#type_ cx map_cx tout in + if then_t' == then_t && else_t' == else_t && tout' == tout then + t + else + CondT (r, then_t', else_t', tout') | ExtendsUseT (use_op, r, tlist, t1, t2) -> let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in let t1' = self#type_ cx map_cx t1 in let t2' = self#type_ cx map_cx t2 in - if tlist' == tlist && t1' == t1 && t2' == t2 then t - else ExtendsUseT (use_op, r, tlist', t1', t2') - - method private opt_use_type cx map_cx t = match t with - | OptCallT (op, r, funcall) -> - let funcall' = self#opt_fun_call_type cx map_cx funcall in - if funcall == funcall' then t - else OptCallT (op, r, funcall') - | OptGetPropT (use_op, r, prop) -> - let prop' = self#prop_ref cx map_cx prop in - if prop' == prop then t - else OptGetPropT (use_op, r, prop') - | OptGetPrivatePropT (use_op, r, prop, scopes, static) -> - let scopes' = ListUtils.ident_map (self#class_binding cx map_cx) scopes in - if scopes' == scopes then t - else OptGetPrivatePropT (use_op, r, prop, scopes', static) - | OptTestPropT (r, id, prop) -> - let prop' = self#prop_ref cx map_cx prop in - if prop' == prop then t - else OptTestPropT (r, id, prop') - | OptGetElemT (use_op, r, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else OptGetElemT (use_op, r, t'') + if tlist' == tlist && t1' == t1 && t2' == t2 then + t + else + ExtendsUseT (use_op, r, tlist', t1', t2') + | ModuleExportsAssignT (r, t', t_out) -> + let t'' = self#type_ cx map_cx t' in + let t_out' = self#type_ cx map_cx t_out in + if t' == t'' && t_out == t_out' then + t + else + ModuleExportsAssignT (r, t'', t_out') + | DestructuringT (r, k, s, t') -> + let s' = self#selector cx map_cx s in + let t'' = self#type_ cx map_cx t' in + if s' == s && t'' == t' then + t + else + DestructuringT (r, k, s', t'') + + method private opt_use_type cx map_cx t = + match t with + | OptCallT (op, r, funcall) -> + let funcall' = self#opt_fun_call_type cx map_cx funcall in + if funcall == funcall' then + t + else + OptCallT (op, r, funcall') + | OptGetPropT (use_op, r, prop) -> + let prop' = self#prop_ref cx map_cx prop in + if prop' == prop then + t + else + OptGetPropT (use_op, r, prop') + | OptGetPrivatePropT (use_op, r, prop, scopes, static) -> + let scopes' = ListUtils.ident_map (self#class_binding cx map_cx) scopes in + if scopes' == scopes then + t + else + OptGetPrivatePropT (use_op, r, prop, scopes', static) + | OptTestPropT (r, id, prop) -> + let prop' = self#prop_ref cx map_cx prop in + if prop' == prop then + t + else + OptTestPropT (r, id, prop') + | OptGetElemT (use_op, r, t') -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + OptGetElemT (use_op, r, t'') method private opt_fun_call_type cx map_cx ((this, targs, args, clos, strict) as t) = let this' = self#type_ cx map_cx this in - let targs' = OptionUtils.ident_map (ListUtils.ident_map (self#type_ cx map_cx)) targs in + let targs' = OptionUtils.ident_map (ListUtils.ident_map (self#targ cx map_cx)) targs in let args' = ListUtils.ident_map (self#call_arg cx map_cx) args in - if this' == this && targs' == targs && args' == args - then t - else (this', targs', args', clos, strict) + if this' == this && targs' == targs && args' == args then + t + else + (this', targs', args', clos, strict) - method prop_ref cx map_cx t = - match t with - | Named _ -> t - | Computed t' -> + method prop_ref cx map_cx t = + match t with + | Named _ -> t + | Computed t' -> let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else Computed t'' - - method class_binding cx map_cx binding = - let class_private_fields = self#props cx map_cx binding.class_private_fields in - let class_private_static_fields = self#props cx map_cx binding.class_private_static_fields in - if class_private_fields == binding.class_private_fields && - class_private_static_fields == binding.class_private_static_fields - then binding - else {binding with class_private_fields; class_private_static_fields} - - method elem_action cx map_cx t = - match t with - | ReadElem t' -> + if t'' == t' then + t + else + Computed t'' + + method class_binding cx map_cx binding = + let class_private_fields = self#props cx map_cx binding.class_private_fields in + let class_private_static_fields = self#props cx map_cx binding.class_private_static_fields in + if + class_private_fields == binding.class_private_fields + && class_private_static_fields == binding.class_private_static_fields + then + binding + else + { binding with class_private_fields; class_private_static_fields } + + method elem_action cx map_cx t = + match t with + | ReadElem t' -> let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ReadElem t'' - | WriteElem (tin, tout) -> + if t'' == t' then + t + else + ReadElem t'' + | WriteElem (tin, tout, mode) -> let tin' = self#type_ cx map_cx tin in let tout' = OptionUtils.ident_map (self#type_ cx map_cx) tout in - if tin' == tin && tout' == tout then t - else WriteElem (tin', tout') - | CallElem (r, funcall) -> + if tin' == tin && tout' == tout then + t + else + WriteElem (tin', tout', mode) + | CallElem (r, funcall) -> let funcall' = self#fun_call_type cx map_cx funcall in - if funcall' == funcall then t - else CallElem (r, funcall') - - method resolve_spread cx map_cx ({rrt_resolved; rrt_unresolved; rrt_resolve_to} as t)= - let rrt_resolved' = ListUtils.ident_map (self#resolved_param cx map_cx) rrt_resolved in - let rrt_unresolved' = ListUtils.ident_map (self#unresolved_param cx map_cx) rrt_unresolved in - let rrt_resolve_to' = self#spread_resolve cx map_cx rrt_resolve_to in - if rrt_resolved' == rrt_resolved && rrt_unresolved' == rrt_unresolved - && rrt_resolve_to' == rrt_resolve_to - then t - else {rrt_resolved = rrt_resolved'; rrt_unresolved = rrt_unresolved'; - rrt_resolve_to = rrt_resolve_to'} - - method spread_resolve cx map_cx t = - match t with - | ResolveSpreadsToTuple (i, t1', t2') -> + if funcall' == funcall then + t + else + CallElem (r, funcall') + + method resolve_spread cx map_cx ({ rrt_resolved; rrt_unresolved; rrt_resolve_to } as t) = + let rrt_resolved' = ListUtils.ident_map (self#resolved_param cx map_cx) rrt_resolved in + let rrt_unresolved' = ListUtils.ident_map (self#unresolved_param cx map_cx) rrt_unresolved in + let rrt_resolve_to' = self#spread_resolve cx map_cx rrt_resolve_to in + if + rrt_resolved' == rrt_resolved + && rrt_unresolved' == rrt_unresolved + && rrt_resolve_to' == rrt_resolve_to + then + t + else + { + rrt_resolved = rrt_resolved'; + rrt_unresolved = rrt_unresolved'; + rrt_resolve_to = rrt_resolve_to'; + } + + method spread_resolve cx map_cx t = + match t with + | ResolveSpreadsToTuple (i, t1', t2') -> let t1'' = self#type_ cx map_cx t1' in let t2'' = self#type_ cx map_cx t2' in - if t1'' == t1' && t2'' == t2' then t - else ResolveSpreadsToTuple (i, t1'', t2'') - | ResolveSpreadsToArrayLiteral (i, t1', t2') -> + if t1'' == t1' && t2'' == t2' then + t + else + ResolveSpreadsToTuple (i, t1'', t2'') + | ResolveSpreadsToArrayLiteral (i, t1', t2') -> let t1'' = self#type_ cx map_cx t1' in let t2'' = self#type_ cx map_cx t2' in - if t1'' == t1' && t2'' == t2' then t - else ResolveSpreadsToArrayLiteral (i, t1'', t2'') - | ResolveSpreadsToArray (t1', t2') -> + if t1'' == t1' && t2'' == t2' then + t + else + ResolveSpreadsToArrayLiteral (i, t1'', t2'') + | ResolveSpreadsToArray (t1', t2') -> let t1'' = self#type_ cx map_cx t1' in let t2'' = self#type_ cx map_cx t2' in - if t1'' == t1' && t2'' == t2' then t - else ResolveSpreadsToArray (t1'', t2'') - | ResolveSpreadsToMultiflowCallFull (i, funtype) -> + if t1'' == t1' && t2'' == t2' then + t + else + ResolveSpreadsToArray (t1'', t2'') + | ResolveSpreadsToMultiflowCallFull (i, funtype) -> let funtype' = self#fun_type cx map_cx funtype in - if funtype' == funtype then t - else ResolveSpreadsToMultiflowCallFull (i, funtype') - | ResolveSpreadsToMultiflowSubtypeFull (i, funtype) -> + if funtype' == funtype then + t + else + ResolveSpreadsToMultiflowCallFull (i, funtype') + | ResolveSpreadsToMultiflowSubtypeFull (i, funtype) -> let funtype' = self#fun_type cx map_cx funtype in - if funtype' == funtype then t - else ResolveSpreadsToMultiflowSubtypeFull (i, funtype') - | ResolveSpreadsToCustomFunCall (i, kind, tout) -> + if funtype' == funtype then + t + else + ResolveSpreadsToMultiflowSubtypeFull (i, funtype') + | ResolveSpreadsToCustomFunCall (i, kind, tout) -> let tout' = self#type_ cx map_cx tout in - if tout' == tout then t - else ResolveSpreadsToCustomFunCall (i, kind, tout') - | ResolveSpreadsToMultiflowPartial (i, funtype, r, t') -> + if tout' == tout then + t + else + ResolveSpreadsToCustomFunCall (i, kind, tout') + | ResolveSpreadsToMultiflowPartial (i, funtype, r, t') -> let funtype' = self#fun_type cx map_cx funtype in let t'' = self#type_ cx map_cx t' in - if funtype' == funtype && t'' == t' then t - else ResolveSpreadsToMultiflowPartial (i, funtype', r, t'') - | ResolveSpreadsToCallT (funcalltype, t') -> + if funtype' == funtype && t'' == t' then + t + else + ResolveSpreadsToMultiflowPartial (i, funtype', r, t'') + | ResolveSpreadsToCallT (funcalltype, t') -> let funcalltype' = self#fun_call_type cx map_cx funcalltype in let t'' = self#type_ cx map_cx t' in - if funcalltype' == funcalltype && t'' == t' then t - else ResolveSpreadsToCallT (funcalltype', t'') - - method fun_call_type cx map_cx t = - let { - call_this_t; - call_targs; - call_args_tlist; - call_tout; - call_closure_t; - call_strict_arity; - } = t in - let call_this_t' = self#type_ cx map_cx call_this_t in - let call_targs' = OptionUtils.ident_map (ListUtils.ident_map (self#type_ cx map_cx)) call_targs in - let call_args_tlist' = ListUtils.ident_map (self#call_arg cx map_cx) call_args_tlist in - let call_tout' = self#type_ cx map_cx call_tout in - if ( - call_this_t' == call_this_t && - call_targs' == call_targs && - call_args_tlist' == call_args_tlist && - call_tout' == call_tout - ) - then t - else { - call_this_t = call_this_t'; - call_targs = call_targs'; - call_args_tlist = call_args_tlist'; - call_tout = call_tout'; - call_closure_t; - call_strict_arity; - } - - method call_arg cx map_cx t = - match t with - | Arg t' -> + if funcalltype' == funcalltype && t'' == t' then + t + else + ResolveSpreadsToCallT (funcalltype', t'') + + method fun_call_type cx map_cx t = + let { + call_this_t; + call_targs; + call_args_tlist; + call_tout; + call_closure_t; + call_strict_arity; + } = + t + in + let call_this_t' = self#type_ cx map_cx call_this_t in + let call_targs' = + OptionUtils.ident_map (ListUtils.ident_map (self#targ cx map_cx)) call_targs + in + let call_args_tlist' = ListUtils.ident_map (self#call_arg cx map_cx) call_args_tlist in + let call_tout' = self#type_ cx map_cx call_tout in + if + call_this_t' == call_this_t + && call_targs' == call_targs + && call_args_tlist' == call_args_tlist + && call_tout' == call_tout + then + t + else + { + call_this_t = call_this_t'; + call_targs = call_targs'; + call_args_tlist = call_args_tlist'; + call_tout = call_tout'; + call_closure_t; + call_strict_arity; + } + + method call_arg cx map_cx t = + match t with + | Arg t' -> let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else Arg t'' - | SpreadArg t' -> + if t'' == t' then + t + else + Arg t'' + | SpreadArg t' -> let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else SpreadArg t'' + if t'' == t' then + t + else + SpreadArg t'' - method lookup_kind cx map_cx t = - match t with - | Strict _ -> t - | NonstrictReturning (tpairopt, testopt) -> - begin match tpairopt with - | Some (t1, t2) -> + method lookup_kind cx map_cx t = + match t with + | Strict _ -> t + | NonstrictReturning (tpairopt, testopt) -> + begin + match tpairopt with + | Some (t1, t2) -> let t1' = self#type_ cx map_cx t1 in let t2' = self#type_ cx map_cx t2 in - if t1' == t1 && t2' == t2 then t - else NonstrictReturning (Some (t1', t2'), testopt) - | None -> t + if t1' == t1 && t2' == t2 then + t + else + NonstrictReturning (Some (t1', t2'), testopt) + | None -> t end - | ShadowRead (r, pidlist) -> - let pidlist' = Nel.ident_map (fun property_id -> - self#props cx map_cx property_id - ) pidlist in - if pidlist == pidlist' then t - else ShadowRead (r, pidlist') - | ShadowWrite pidlist -> - let pidlist' = Nel.ident_map (fun property_id -> - self#props cx map_cx property_id - ) pidlist in - if pidlist == pidlist' then t - else ShadowWrite pidlist' - - method lookup_action cx map_cx t = - match t with - | RWProp (use_op, t1, t2, rw) -> - let t1' = self#type_ cx map_cx t1 in - let t2' = self#type_ cx map_cx t2 in - let rw' = self#read_write cx map_cx rw in - if t1' == t1 && t2' == t2 && rw' == rw then t - else RWProp (use_op, t1', t2', rw') - | LookupProp (use, prop) -> + | ShadowRead (r, pidlist) -> + let pidlist' = + Nel.ident_map (fun property_id -> self#props cx map_cx property_id) pidlist + in + if pidlist == pidlist' then + t + else + ShadowRead (r, pidlist') + | ShadowWrite pidlist -> + let pidlist' = + Nel.ident_map (fun property_id -> self#props cx map_cx property_id) pidlist + in + if pidlist == pidlist' then + t + else + ShadowWrite pidlist' + + method lookup_action cx map_cx t = + match t with + | ReadProp { use_op; obj_t; tout } -> + let obj_t' = self#type_ cx map_cx obj_t in + let tout' = self#type_ cx map_cx tout in + if obj_t' == obj_t && tout' == tout then + t + else + ReadProp { use_op; obj_t = obj_t'; tout = tout' } + | WriteProp { use_op; obj_t; prop_tout; tin; write_ctx; mode } -> + let obj_t' = self#type_ cx map_cx obj_t in + let tin' = self#type_ cx map_cx tin in + let prop_tout' = OptionUtils.ident_map (self#type_ cx map_cx) prop_tout in + if obj_t' == obj_t && tin' == tin && prop_tout' == prop_tout then + t + else + WriteProp { use_op; obj_t = obj_t'; prop_tout = prop_tout'; tin = tin'; write_ctx; mode } + | LookupProp (use, prop) -> let prop' = Property.ident_map_t (self#type_ cx map_cx) prop in - if prop == prop' then t - else LookupProp (use, prop') - | SuperProp (op, prop) -> + if prop == prop' then + t + else + LookupProp (use, prop') + | SuperProp (op, prop) -> let prop' = Property.ident_map_t (self#type_ cx map_cx) prop in - if prop == prop' then t - else SuperProp (op, prop') - | MatchProp (use, t') -> - let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else MatchProp (use, t') - - method cont cx map_cx t = - match t with - | Lower (use_op, t') -> + if prop == prop' then + t + else + SuperProp (op, prop') + | MatchProp (use, t') -> let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else Lower (use_op, t'') - | Upper use_t -> - let use_t' = self#use_type cx map_cx use_t in - if use_t' == use_t then t - else Upper use_t' + if t'' == t' then + t + else + MatchProp (use, t') - method react_tool cx map_cx t = - let open React in - match t with - | CreateElement0 (clone, config, (children, children_spread), tout) -> - let config' = self#type_ cx map_cx config in - let children' = ListUtils.ident_map (self#type_ cx map_cx) children in - let children_spread' = OptionUtils.ident_map (self#type_ cx map_cx) children_spread in - let tout' = self#type_ cx map_cx tout in - if ( - config' == config && - children' == children && - children_spread' == children_spread && - tout' == tout - ) then t else CreateElement0 (clone, config', (children', children_spread'), tout') - | CreateElement (clone, component, config, (children, children_spread), tout) -> - let component' = self#type_ cx map_cx component in - let config' = self#type_ cx map_cx config in - let children' = ListUtils.ident_map (self#type_ cx map_cx) children in - let children_spread' = OptionUtils.ident_map (self#type_ cx map_cx) children_spread in - let tout' = self#type_ cx map_cx tout in - if ( - component' == component && - config' == config && - children' == children && - children_spread' == children_spread && - tout' == tout - ) then t else CreateElement (clone, component', config', (children', children_spread'), tout') - | GetProps tout -> - let tout' = self#type_ cx map_cx tout in - if tout' == tout then t - else GetProps tout' - | GetConfig tout -> - let tout' = self#type_ cx map_cx tout in - if tout' == tout then t - else GetConfig tout' - | GetRef tout -> - let tout' = self#type_ cx map_cx tout in - if tout' == tout then t - else GetRef tout' - | SimplifyPropType (tool, t') -> - let tool' = self#simplify_prop_type_tool cx map_cx tool in - let t'' = self#type_ cx map_cx t' in - if tool' == tool && t'' == t' then t - else SimplifyPropType (tool', t'') - | CreateClass (tool, knot, t') -> - let tool' = self#create_class_tool cx map_cx tool in - let knot' = self#create_class_knot cx map_cx knot in - let t'' = self#type_ cx map_cx t' in - if tool' == tool && knot' == knot && t'' == t' then t - else CreateClass (tool', knot', t'') - - method object_kit_resolve_tool cx map_cx t = - let open Object in - match t with - | Resolve r -> - let r' = self#resolve cx map_cx r in - if r' == r then t - else Resolve r' - | Super ((reason, props, dict, flags), r) -> - let props' = SMap.ident_map (fun (t, b) -> (self#type_ cx map_cx t, b)) props in - let dict' = OptionUtils.ident_map (self#dict_type cx map_cx) dict in - let r' = self#resolve cx map_cx r in - if r' == r && props' == props then t - else Super ((reason, props', dict', flags), r') - - method object_kit_tool cx map_cx tool = - let open Object in - match tool with - | ReadOnly -> tool - | Spread (options, state) -> - let open Object.Spread in - let todo_rev' = ListUtils.ident_map (self#type_ cx map_cx) state.todo_rev in - let acc' = ListUtils.ident_map (self#resolved cx map_cx) state.acc in - if todo_rev' == state.todo_rev && acc' == state.acc then tool - else Spread (options, {todo_rev = todo_rev'; acc = acc'}) - | Rest (options, state) -> - let open Object.Rest in - let state' = match state with - | One t -> - let t' = self#type_ cx map_cx t in - if t == t' then state - else One t' - | Done o -> - let o' = self#resolved cx map_cx o in - if o == o' then state - else Done o' - in - if state == state' then tool - else Rest (options, state') - | ReactConfig state -> - let open Object.ReactConfig in - let state' = match state with - | Config { defaults; children } -> - let defaults' = OptionUtils.ident_map (self#type_ cx map_cx) defaults in - let children' = OptionUtils.ident_map (self#type_ cx map_cx) children in - if defaults == defaults' && children == children' then state - else Config { defaults = defaults'; children = children' } - | Defaults { config; children } -> - let config' = self#resolved cx map_cx config in - let children' = OptionUtils.ident_map (self#type_ cx map_cx) children in - if config == config' && children == children' then state - else Defaults { config = config'; children = children' } - in - if state == state' then tool - else ReactConfig state' - - method choice_use_tool cx map_cx t = - match t with - | FullyResolveType _ -> t - | TryFlow (i, spec) -> + method cont cx map_cx t = + match t with + | Lower (use_op, t') -> + let t'' = self#type_ cx map_cx t' in + if t'' == t' then + t + else + Lower (use_op, t'') + | Upper use_t -> + let use_t' = self#use_type cx map_cx use_t in + if use_t' == use_t then + t + else + Upper use_t' + + method react_tool cx map_cx t = + React.( + match t with + | CreateElement0 (clone, config, (children, children_spread), tout) -> + let config' = self#type_ cx map_cx config in + let children' = ListUtils.ident_map (self#type_ cx map_cx) children in + let children_spread' = OptionUtils.ident_map (self#type_ cx map_cx) children_spread in + let tout' = self#type_ cx map_cx tout in + if + config' == config + && children' == children + && children_spread' == children_spread + && tout' == tout + then + t + else + CreateElement0 (clone, config', (children', children_spread'), tout') + | CreateElement (clone, component, config, (children, children_spread), tout) -> + let component' = self#type_ cx map_cx component in + let config' = self#type_ cx map_cx config in + let children' = ListUtils.ident_map (self#type_ cx map_cx) children in + let children_spread' = OptionUtils.ident_map (self#type_ cx map_cx) children_spread in + let tout' = self#type_ cx map_cx tout in + if + component' == component + && config' == config + && children' == children + && children_spread' == children_spread + && tout' == tout + then + t + else + CreateElement (clone, component', config', (children', children_spread'), tout') + | ConfigCheck config -> + let config' = self#type_ cx map_cx config in + if config' == config then + t + else + ConfigCheck config' + | GetProps tout -> + let tout' = self#type_ cx map_cx tout in + if tout' == tout then + t + else + GetProps tout' + | GetConfig tout -> + let tout' = self#type_ cx map_cx tout in + if tout' == tout then + t + else + GetConfig tout' + | GetConfigType (default_props, tout) -> + let default_props' = self#type_ cx map_cx default_props in + let tout' = self#type_ cx map_cx tout in + if tout' == tout && default_props' == default_props then + t + else + GetConfigType (default_props', tout') + | GetRef tout -> + let tout' = self#type_ cx map_cx tout in + if tout' == tout then + t + else + GetRef tout' + | SimplifyPropType (tool, t') -> + let tool' = self#simplify_prop_type_tool cx map_cx tool in + let t'' = self#type_ cx map_cx t' in + if tool' == tool && t'' == t' then + t + else + SimplifyPropType (tool', t'') + | CreateClass (tool, knot, t') -> + let tool' = self#create_class_tool cx map_cx tool in + let knot' = self#create_class_knot cx map_cx knot in + let t'' = self#type_ cx map_cx t' in + if tool' == tool && knot' == knot && t'' == t' then + t + else + CreateClass (tool', knot', t'')) + + method object_kit_resolve_tool cx map_cx t = + Object.( + match t with + | Resolve r -> + let r' = self#resolve cx map_cx r in + if r' == r then + t + else + Resolve r' + | Super ({ Object.reason; props; dict; flags }, r) -> + let props' = SMap.ident_map (fun (t, b) -> (self#type_ cx map_cx t, b)) props in + let dict' = OptionUtils.ident_map (self#dict_type cx map_cx) dict in + let r' = self#resolve cx map_cx r in + if r' == r && props' == props then + t + else + Super ({ reason; Object.props = props'; dict = dict'; flags }, r')) + + method object_kit_tool cx map_cx tool = + Object.( + match tool with + | ReadOnly -> tool + | ObjectRep -> tool + | Spread (options, state) -> + Object.Spread.( + let todo_rev' = + ListUtils.ident_map (self#object_kit_spread_operand cx map_cx) state.todo_rev + in + let acc' = ListUtils.ident_map (self#object_kit_acc_element cx map_cx) state.acc in + if todo_rev' == state.todo_rev && acc' == state.acc then + tool + else + Spread (options, { todo_rev = todo_rev'; acc = acc' })) + | Rest (options, state) -> + Object.Rest.( + let state' = + match state with + | One t -> + let t' = self#type_ cx map_cx t in + if t == t' then + state + else + One t' + | Done o -> + let o' = self#resolved cx map_cx o in + if o == o' then + state + else + Done o' + in + if state == state' then + tool + else + Rest (options, state')) + | ReactConfig state -> + Object.ReactConfig.( + let state' = + match state with + | Config { defaults; children } -> + let defaults' = OptionUtils.ident_map (self#type_ cx map_cx) defaults in + let children' = OptionUtils.ident_map (self#type_ cx map_cx) children in + if defaults == defaults' && children == children' then + state + else + Config { defaults = defaults'; children = children' } + | Defaults { config; children } -> + let config' = self#resolved cx map_cx config in + let children' = OptionUtils.ident_map (self#type_ cx map_cx) children in + if config == config' && children == children' then + state + else + Defaults { config = config'; children = children' } + in + if state == state' then + tool + else + ReactConfig state')) + + method choice_use_tool cx map_cx t = + match t with + | FullyResolveType _ -> t + | TryFlow (i, spec) -> let spec' = self#spec cx map_cx spec in - if spec' == spec then t - else TryFlow (i, spec') + if spec' == spec then + t + else + TryFlow (i, spec') - method intersection_preprocess_tool cx map_cx t = - match t with - | ConcretizeTypes (tlist1, tlist2, t', use_t) -> + method intersection_preprocess_tool cx map_cx t = + match t with + | ConcretizeTypes (tlist1, tlist2, t', use_t) -> let tlist1' = ListUtils.ident_map (self#type_ cx map_cx) tlist1 in let tlist2' = ListUtils.ident_map (self#type_ cx map_cx) tlist2 in let t'' = self#type_ cx map_cx t' in let use_t' = self#use_type cx map_cx use_t in - if tlist1' == tlist1 && tlist2' == tlist2 && t'' == t' && use_t' == use_t then t - else ConcretizeTypes (tlist1', tlist2', t'', use_t') - | SentinelPropTest (b, s, t1, t2, t3) -> + if tlist1' == tlist1 && tlist2' == tlist2 && t'' == t' && use_t' == use_t then + t + else + ConcretizeTypes (tlist1', tlist2', t'', use_t') + | SentinelPropTest (b, s, t1, t2, t3) -> let t1' = self#type_ cx map_cx t1 in let t2' = self#type_ cx map_cx t2 in let t3' = self#type_ cx map_cx t3 in - if t1' == t1 && t2' == t2 && t3' == t3 then t - else SentinelPropTest (b, s, t1', t2', t3') - | PropExistsTest (b, s, t1, t2) -> + if t1' == t1 && t2' == t2 && t3' == t3 then + t + else + SentinelPropTest (b, s, t1', t2', t3') + | PropExistsTest (b, s, t1, t2) -> let t1' = self#type_ cx map_cx t1 in let t2' = self#type_ cx map_cx t2 in - if t1' == t2 && t2' == t2 then t - else PropExistsTest (b, s, t1', t2') - - method simplify_prop_type_tool cx map_cx tool = - let open React.SimplifyPropType in - match tool with - | ArrayOf - | InstanceOf - | ObjectOf -> tool - | OneOf resolve_array -> - let resolve_array' = self#resolve_array cx map_cx resolve_array in - if resolve_array' == resolve_array then tool - else OneOf resolve_array' - | OneOfType resolve_array -> - let resolve_array' = self#resolve_array cx map_cx resolve_array in - if resolve_array' == resolve_array then tool - else OneOfType resolve_array' - | Shape resolve_object -> - let resolve_object' = self#resolve_object cx map_cx resolve_object in - if resolve_object' == resolve_object then tool - else Shape resolve_object' - - method create_class_tool cx map_cx tool = - let open React.CreateClass in - match tool with - | Spec tail -> - let tail' = self#stack_tail cx map_cx tail in - if tail' == tail then tool - else Spec tail' - | Mixins (head, tail) -> - let head' = self#stack_head cx map_cx head in - let tail' = self#stack_tail cx map_cx tail in - if head' == head && tail' == tail then tool - else Mixins (head', tail') - | Statics (head, tail) -> - let head' = self#stack_head cx map_cx head in - let tail' = self#stack_tail cx map_cx tail in - if head' == head && tail' == tail then tool - else Statics (head', tail') - | PropTypes ((head, tail), resolve_object) -> - let head' = self#stack_head cx map_cx head in - let tail' = self#stack_tail cx map_cx tail in - let resolve_object' = self#resolve_object cx map_cx resolve_object in - if head' == head && tail' == tail && resolve_object' == resolve_object then tool - else PropTypes ((head', tail'), resolve_object') - | DefaultProps (tlist, default_props) -> - let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in - let default_props' = OptionUtils.ident_map (self#default_props cx map_cx) default_props in - if tlist' == tlist && default_props' == default_props then tool - else DefaultProps (tlist', default_props') - | InitialState (tlist, initial_state) -> - let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in - let initial_state' = OptionUtils.ident_map (self#initial_state cx map_cx) initial_state in - if tlist' == tlist && initial_state' == initial_state then tool - else InitialState (tlist', initial_state') + if t1' == t2 && t2' == t2 then + t + else + PropExistsTest (b, s, t1', t2') + + method simplify_prop_type_tool cx map_cx tool = + React.SimplifyPropType.( + match tool with + | ArrayOf + | InstanceOf + | ObjectOf -> + tool + | OneOf resolve_array -> + let resolve_array' = self#resolve_array cx map_cx resolve_array in + if resolve_array' == resolve_array then + tool + else + OneOf resolve_array' + | OneOfType resolve_array -> + let resolve_array' = self#resolve_array cx map_cx resolve_array in + if resolve_array' == resolve_array then + tool + else + OneOfType resolve_array' + | Shape resolve_object -> + let resolve_object' = self#resolve_object cx map_cx resolve_object in + if resolve_object' == resolve_object then + tool + else + Shape resolve_object') + + method create_class_tool cx map_cx tool = + React.CreateClass.( + match tool with + | Spec tail -> + let tail' = self#stack_tail cx map_cx tail in + if tail' == tail then + tool + else + Spec tail' + | Mixins (head, tail) -> + let head' = self#stack_head cx map_cx head in + let tail' = self#stack_tail cx map_cx tail in + if head' == head && tail' == tail then + tool + else + Mixins (head', tail') + | Statics (head, tail) -> + let head' = self#stack_head cx map_cx head in + let tail' = self#stack_tail cx map_cx tail in + if head' == head && tail' == tail then + tool + else + Statics (head', tail') + | PropTypes ((head, tail), resolve_object) -> + let head' = self#stack_head cx map_cx head in + let tail' = self#stack_tail cx map_cx tail in + let resolve_object' = self#resolve_object cx map_cx resolve_object in + if head' == head && tail' == tail && resolve_object' == resolve_object then + tool + else + PropTypes ((head', tail'), resolve_object') + | DefaultProps (tlist, default_props) -> + let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in + let default_props' = + OptionUtils.ident_map (self#default_props cx map_cx) default_props + in + if tlist' == tlist && default_props' == default_props then + tool + else + DefaultProps (tlist', default_props') + | InitialState (tlist, initial_state) -> + let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in + let initial_state' = + OptionUtils.ident_map (self#initial_state cx map_cx) initial_state + in + if tlist' == tlist && initial_state' == initial_state then + tool + else + InitialState (tlist', initial_state')) - method resolved_param cx map_cx t = - match t with - | ResolvedArg t' -> + method resolved_param cx map_cx t = + match t with + | ResolvedArg t' -> let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else ResolvedArg t'' - | ResolvedSpreadArg (r, arrtype) -> + if t'' == t' then + t + else + ResolvedArg t'' + | ResolvedSpreadArg (r, arrtype) -> let arrtype' = self#arr_type cx map_cx arrtype in - if arrtype' == arrtype then t - else ResolvedSpreadArg (r, arrtype') - | ResolvedAnySpreadArg _ -> t - + if arrtype' == arrtype then + t + else + ResolvedSpreadArg (r, arrtype') + | ResolvedAnySpreadArg _ -> t - method unresolved_param cx map_cx t = - match t with - | UnresolvedArg t' -> + method unresolved_param cx map_cx t = + match t with + | UnresolvedArg t' -> let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else UnresolvedArg t'' - | UnresolvedSpreadArg t' -> + if t'' == t' then + t + else + UnresolvedArg t'' + | UnresolvedSpreadArg t' -> let t'' = self#type_ cx map_cx t' in - if t'' == t' then t - else UnresolvedSpreadArg t'' - - method resolve_array cx map_cx t = - let open React in - match t with - | ResolveArray -> t - | ResolveElem (tlist1, tlist2) -> - let tlist1' = ListUtils.ident_map (self#type_ cx map_cx) tlist1 in - let tlist2' = ListUtils.ident_map (self#type_ cx map_cx) tlist2 in - if tlist1' == tlist1 && tlist2' == tlist2 then t - else ResolveElem (tlist1', tlist2') - - method resolve_object cx map_cx t = - let open React in - match t with - | ResolveObject -> t - | ResolveDict (dict, props, obj) -> - let dict' = self#dict_type cx map_cx dict in - let props' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) props in - let obj' = self#resolved_object cx map_cx obj in - if dict' == dict && props' == props && obj' == obj then t - else ResolveDict (dict', props', obj') - | ResolveProp (s, props, obj) -> - let props' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) props in - let obj' = self#resolved_object cx map_cx obj in - if props' == props && obj' == obj then t - else ResolveProp (s, props', obj') - - method create_class_knot cx map_cx t = - let open React.CreateClass in - let this' = self#type_ cx map_cx t.this in - let static' = self#type_ cx map_cx t.static in - let state_t' = self#type_ cx map_cx t.state_t in - let default_t' = self#type_ cx map_cx t.default_t in - if this' == t.this && static' == t.static && state_t' == t.state_t - && default_t' == t.default_t - then t - else {this = this'; static = static'; state_t = state_t'; default_t = default_t'} - - method resolve cx map_cx t = - let open Object in - match t with - | Next -> t - | List0 (tnelist, join) -> - let tnelist' = Nel.ident_map (self#type_ cx map_cx) tnelist in - if tnelist' == tnelist then t - else List0 (tnelist', join) - | List (tlist, resolvednelist, join) -> - let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in - let resolvednelist' = Nel.ident_map (self#resolved cx map_cx) resolvednelist in - if tlist' == tlist && resolvednelist' == resolvednelist then t - else List (tlist', resolvednelist', join) + if t'' == t' then + t + else + UnresolvedSpreadArg t'' + + method resolve_array cx map_cx t = + React.( + match t with + | ResolveArray -> t + | ResolveElem (tlist1, tlist2) -> + let tlist1' = ListUtils.ident_map (self#type_ cx map_cx) tlist1 in + let tlist2' = ListUtils.ident_map (self#type_ cx map_cx) tlist2 in + if tlist1' == tlist1 && tlist2' == tlist2 then + t + else + ResolveElem (tlist1', tlist2')) + + method resolve_object cx map_cx t = + React.( + match t with + | ResolveObject -> t + | ResolveDict (dict, props, obj) -> + let dict' = self#dict_type cx map_cx dict in + let props' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) props in + let obj' = self#resolved_object cx map_cx obj in + if dict' == dict && props' == props && obj' == obj then + t + else + ResolveDict (dict', props', obj') + | ResolveProp (s, props, obj) -> + let props' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) props in + let obj' = self#resolved_object cx map_cx obj in + if props' == props && obj' == obj then + t + else + ResolveProp (s, props', obj')) + + method create_class_knot cx map_cx t = + React.CreateClass.( + let this' = self#type_ cx map_cx t.this in + let static' = self#type_ cx map_cx t.static in + let state_t' = self#type_ cx map_cx t.state_t in + let default_t' = self#type_ cx map_cx t.default_t in + if + this' == t.this + && static' == t.static + && state_t' == t.state_t + && default_t' == t.default_t + then + t + else + { this = this'; static = static'; state_t = state_t'; default_t = default_t' }) + + method resolve cx map_cx t = + Object.( + match t with + | Next -> t + | List0 (tnelist, join) -> + let tnelist' = Nel.ident_map (self#type_ cx map_cx) tnelist in + if tnelist' == tnelist then + t + else + List0 (tnelist', join) + | List (tlist, resolvednelist, join) -> + let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in + let resolvednelist' = Nel.ident_map (self#resolved cx map_cx) resolvednelist in + if tlist' == tlist && resolvednelist' == resolvednelist then + t + else + List (tlist', resolvednelist', join)) - method resolved_prop cx map_cx ((t, own) as prop) = - let t' = self#type_ cx map_cx t in - if t' == t then prop - else (t', own) + method resolved_prop cx map_cx ((t, own) as prop) = + let t' = self#type_ cx map_cx t in + if t' == t then + prop + else + (t', own) - method resolved cx map_cx t = - let t' = Nel.ident_map (fun ((r, props, dict, flags) as slice) -> + method object_kit_slice cx map_cx ({ Object.reason = _; props; dict; flags = _ } as slice) = let props' = SMap.ident_map (self#resolved_prop cx map_cx) props in let dict' = OptionUtils.ident_map (self#dict_type cx map_cx) dict in - if props' == props && dict' == dict then slice - else (r, props', dict', flags)) t in - if t' == t then t - else t' - - method spec cx map_cx t = - match t with - | UnionCases (use_op, t', rep, tlist) -> + if props' == props && dict' == dict then + slice + else + { slice with Object.props = props'; dict = dict' } + + method object_kit_acc_element cx map_cx el = + Object.Spread.( + match el with + | InlineSlice slice -> + let slice' = self#object_kit_spread_operand_slice cx map_cx slice in + if slice' == slice then + el + else + InlineSlice slice' + | ResolvedSlice resolved -> + let resolved' = self#resolved cx map_cx resolved in + if resolved' == resolved then + el + else + ResolvedSlice resolved') + + method resolved cx map_cx t = + let t' = Nel.ident_map (self#object_kit_slice cx map_cx) t in + if t' == t then + t + else + t' + + method spec cx map_cx t = + match t with + | UnionCases (use_op, t', rep, tlist) -> let t'' = self#type_ cx map_cx t' in let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in - if t'' == t' && tlist' == tlist then t - else UnionCases (use_op, t'', rep, tlist') - | IntersectionCases (tlist, use_t) -> + if t'' == t' && tlist' == tlist then + t + else + UnionCases (use_op, t'', rep, tlist') + | IntersectionCases (tlist, use_t) -> let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in let use_t' = self#use_type cx map_cx use_t in - if tlist' == tlist && use_t' == use_t then t - else IntersectionCases (tlist', use_t') - - method stack_tail cx map_cx tail = ListUtils.ident_map (self#stack_tail_elem cx map_cx) tail - - method stack_tail_elem cx map_cx ((head, tlist, maybespeclist) as t) = - let head' = self#stack_head cx map_cx head in - let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in - let maybespeclist' = - ListUtils.ident_map (maybe_known (self#create_class_spec cx map_cx)) maybespeclist in - if head' == head && tlist' == tlist && maybespeclist' == maybespeclist then t - else (head', tlist', maybespeclist') - - method create_class_spec cx map_cx t = - let open React.CreateClass in - let obj = self#resolved_object cx map_cx t.obj in - let statics = OptionUtils.ident_map (maybe_known (self#resolved_object cx map_cx)) t.statics in - let prop_types = - OptionUtils.ident_map (maybe_known (self#resolved_object cx map_cx)) t.prop_types in - let get_default_props = ListUtils.ident_map (self#type_ cx map_cx) t.get_default_props in - let get_initial_state = ListUtils.ident_map (self#type_ cx map_cx) t.get_initial_state in - if obj == t.obj && statics == t.statics && prop_types == t.prop_types - && get_default_props == t.get_default_props && get_initial_state == t.get_initial_state - then t - else {obj; statics; prop_types; get_default_props; get_initial_state; - unknown_mixins = t.unknown_mixins} - - method stack_head cx map_cx ((obj, spec) as t) = - let obj' = self#resolved_object cx map_cx obj in - let spec' = self#create_class_spec cx map_cx spec in - if obj' == obj && spec' == spec then t - else (obj', spec') - - method default_props cx map_cx default_props = - maybe_known (self#resolved_object cx map_cx) default_props - - method resolved_object cx map_cx ((r, props, dictopt, flags) as t) = - let props' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) props in - let dictopt' = OptionUtils.ident_map (self#dict_type cx map_cx) dictopt in - if props' == props && dictopt' == dictopt then t - else (r, props', dictopt', flags) - - method initial_state cx map_cx t = - let open React.CreateClass in - maybe_known (fun x -> match x with - | NotNull obj -> - let obj' = self#resolved_object cx map_cx obj in - if obj' == obj then x - else NotNull obj' - | Null _ -> x) t - -end + if tlist' == tlist && use_t' == use_t then + t + else + IntersectionCases (tlist', use_t') + + method stack_tail cx map_cx tail = ListUtils.ident_map (self#stack_tail_elem cx map_cx) tail + + method stack_tail_elem cx map_cx ((head, tlist, maybespeclist) as t) = + let head' = self#stack_head cx map_cx head in + let tlist' = ListUtils.ident_map (self#type_ cx map_cx) tlist in + let maybespeclist' = + ListUtils.ident_map (maybe_known (self#create_class_spec cx map_cx)) maybespeclist + in + if head' == head && tlist' == tlist && maybespeclist' == maybespeclist then + t + else + (head', tlist', maybespeclist') + + method create_class_spec cx map_cx t = + React.CreateClass.( + let obj = self#resolved_object cx map_cx t.obj in + let statics = + OptionUtils.ident_map (maybe_known (self#resolved_object cx map_cx)) t.statics + in + let prop_types = + OptionUtils.ident_map (maybe_known (self#resolved_object cx map_cx)) t.prop_types + in + let get_default_props = ListUtils.ident_map (self#type_ cx map_cx) t.get_default_props in + let get_initial_state = ListUtils.ident_map (self#type_ cx map_cx) t.get_initial_state in + if + obj == t.obj + && statics == t.statics + && prop_types == t.prop_types + && get_default_props == t.get_default_props + && get_initial_state == t.get_initial_state + then + t + else + { + obj; + statics; + prop_types; + get_default_props; + get_initial_state; + unknown_mixins = t.unknown_mixins; + }) + + method stack_head cx map_cx ((obj, spec) as t) = + let obj' = self#resolved_object cx map_cx obj in + let spec' = self#create_class_spec cx map_cx spec in + if obj' == obj && spec' == spec then + t + else + (obj', spec') + + method default_props cx map_cx default_props = + maybe_known (self#resolved_object cx map_cx) default_props + + method resolved_object cx map_cx ((r, props, dictopt, flags) as t) = + let props' = SMap.ident_map (Property.ident_map_t (self#type_ cx map_cx)) props in + let dictopt' = OptionUtils.ident_map (self#dict_type cx map_cx) dictopt in + if props' == props && dictopt' == dictopt then + t + else + (r, props', dictopt', flags) + + method initial_state cx map_cx t = + React.CreateClass.( + maybe_known + (fun x -> + match x with + | NotNull obj -> + let obj' = self#resolved_object cx map_cx obj in + if obj' == obj then + x + else + NotNull obj' + | Null _ -> x) + t) + end diff --git a/src/typing/type_mapper.mli b/src/typing/type_mapper.mli index ed5e52b3f82..8352c07be19 100644 --- a/src/typing/type_mapper.mli +++ b/src/typing/type_mapper.mli @@ -1,194 +1,168 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) class virtual ['a] t : object - method arr_type : - Context.t -> 'a -> Type.arrtype -> Type.arrtype - method bounds : - Context.t -> 'a -> Constraint.bounds -> Constraint.bounds + method arr_type : Context.t -> 'a -> Type.arrtype -> Type.arrtype + + method bounds : Context.t -> 'a -> Constraint.bounds -> Constraint.bounds + method virtual call_prop : Context.t -> 'a -> int -> int + method def_type : Context.t -> 'a -> Type.def_t -> Type.def_t - method defer_use_type : - Context.t -> 'a -> Type.defer_use_t -> Type.defer_use_t - method destructor : - Context.t -> 'a -> Type.destructor -> Type.destructor - method dict_type : - Context.t -> 'a -> Type.dicttype -> Type.dicttype - method virtual eval_id : Context.t -> 'a -> IMap.key -> IMap.key - method export_types : - Context.t -> 'a -> Type.exporttypes -> Type.exporttypes - method virtual exports : - Context.t -> 'a -> Type.Exports.id -> Type.Exports.id - method fun_type : - Context.t -> 'a -> Type.funtype -> Type.funtype - method inst_type : - Context.t -> 'a -> Type.insttype -> Type.insttype - method obj_type : - Context.t -> 'a -> Type.objtype -> Type.objtype - method predicate : - Context.t -> 'a -> Type.predicate -> Type.predicate + + method defer_use_type : Context.t -> 'a -> Type.defer_use_t -> Type.defer_use_t + + method destructor : Context.t -> 'a -> Type.destructor -> Type.destructor + + method dict_type : Context.t -> 'a -> Type.dicttype -> Type.dicttype + + method virtual eval_id : Context.t -> 'a -> int -> int + + method export_types : Context.t -> 'a -> Type.exporttypes -> Type.exporttypes + + method virtual exports : Context.t -> 'a -> Type.Exports.id -> Type.Exports.id + + method fun_type : Context.t -> 'a -> Type.funtype -> Type.funtype + + method inst_type : Context.t -> 'a -> Type.insttype -> Type.insttype + + method object_kit_spread_operand : + Context.t -> 'a -> Type.Object.Spread.operand -> Type.Object.Spread.operand + + method object_kit_spread_operand_slice : + Context.t -> 'a -> Type.Object.Spread.operand_slice -> Type.Object.Spread.operand_slice + + method obj_type : Context.t -> 'a -> Type.objtype -> Type.objtype + + method predicate : Context.t -> 'a -> Type.predicate -> Type.predicate + method prop : Context.t -> 'a -> Type.Property.t -> Type.Property.t + method virtual props : Context.t -> 'a -> Type.Properties.id -> Type.Properties.id - method selector : - Context.t -> 'a -> Type.selector -> Type.selector - method virtual tvar : - Context.t -> 'a -> Reason.t -> Constraint.ident -> Constraint.ident + + method selector : Context.t -> 'a -> Type.selector -> Type.selector + + method targ : Context.t -> 'a -> Type.targ -> Type.targ + + method virtual tvar : Context.t -> 'a -> Reason.t -> Constraint.ident -> Constraint.ident + method type_ : Context.t -> 'a -> Type.t -> Type.t - method type_param : - Context.t -> 'a -> Type.typeparam -> Type.typeparam + + method type_param : Context.t -> 'a -> Type.typeparam -> Type.typeparam + method type_map : Context.t -> 'a -> Type.type_map -> Type.type_map - method virtual use_type : - Context.t -> 'a -> Type.UseTypeMap.key -> Type.UseTypeMap.key -end + + method virtual use_type : Context.t -> 'a -> Type.use_t -> Type.use_t + end class virtual ['a] t_with_uses : object - method arr_type : - Context.t -> 'a -> Type.arrtype -> Type.arrtype - method bounds : - Context.t -> 'a -> Constraint.bounds -> Constraint.bounds - method call_arg : - Context.t -> 'a -> Type.call_arg -> Type.call_arg - method virtual call_prop : Context.t -> 'a -> int -> int - method choice_use_tool : - Context.t -> - 'a -> Type.choice_use_tool -> Type.choice_use_tool + inherit ['a] t + + method call_arg : Context.t -> 'a -> Type.call_arg -> Type.call_arg + + method choice_use_tool : Context.t -> 'a -> Type.choice_use_tool -> Type.choice_use_tool + method class_binding : Context.t -> 'a -> Type.class_binding -> Type.class_binding + method cont : Context.t -> 'a -> Type.cont -> Type.cont + method create_class_knot : - Context.t -> - 'a -> - Type.React.CreateClass.knot -> Type.React.CreateClass.knot + Context.t -> 'a -> Type.React.CreateClass.knot -> Type.React.CreateClass.knot + method create_class_spec : - Context.t -> - 'a -> - Type.React.CreateClass.spec -> Type.React.CreateClass.spec + Context.t -> 'a -> Type.React.CreateClass.spec -> Type.React.CreateClass.spec + method create_class_tool : - Context.t -> - 'a -> - Type.React.CreateClass.tool -> Type.React.CreateClass.tool - method def_type : Context.t -> 'a -> Type.def_t -> Type.def_t + Context.t -> 'a -> Type.React.CreateClass.tool -> Type.React.CreateClass.tool + method default_props : Context.t -> 'a -> Type.React.CreateClass.default_props -> Type.React.CreateClass.default_props - method defer_use_type : - Context.t -> 'a -> Type.defer_use_t -> Type.defer_use_t - method destructor : - Context.t -> 'a -> Type.destructor -> Type.destructor - method dict_type : - Context.t -> 'a -> Type.dicttype -> Type.dicttype - method elem_action : - Context.t -> 'a -> Type.elem_action -> Type.elem_action - method virtual eval_id : Context.t -> 'a -> IMap.key -> IMap.key - method export_types : - Context.t -> 'a -> Type.exporttypes -> Type.exporttypes - method virtual exports : - Context.t -> 'a -> Type.Exports.id -> Type.Exports.id - method fun_call_type : - Context.t -> 'a -> Type.funcalltype -> Type.funcalltype - method fun_type : - Context.t -> 'a -> Type.funtype -> Type.funtype + + method elem_action : Context.t -> 'a -> Type.elem_action -> Type.elem_action + + method fun_call_type : Context.t -> 'a -> Type.funcalltype -> Type.funcalltype + method initial_state : Context.t -> 'a -> Type.React.CreateClass.initial_state -> Type.React.CreateClass.initial_state - method inst_type : - Context.t -> 'a -> Type.insttype -> Type.insttype + method intersection_preprocess_tool : - Context.t -> - 'a -> - Type.intersection_preprocess_tool -> - Type.intersection_preprocess_tool - method lookup_action : - Context.t -> 'a -> Type.lookup_action -> Type.lookup_action - method lookup_kind : - Context.t -> 'a -> Type.lookup_kind -> Type.lookup_kind - method obj_type : - Context.t -> 'a -> Type.objtype -> Type.objtype + Context.t -> 'a -> Type.intersection_preprocess_tool -> Type.intersection_preprocess_tool + + method lookup_action : Context.t -> 'a -> Type.lookup_action -> Type.lookup_action + + method lookup_kind : Context.t -> 'a -> Type.lookup_kind -> Type.lookup_kind + + method object_kit_acc_element : + Context.t -> 'a -> Type.Object.Spread.acc_element -> Type.Object.Spread.acc_element + method object_kit_resolve_tool : - Context.t -> - 'a -> Type.Object.resolve_tool -> Type.Object.resolve_tool - method object_kit_tool : - Context.t -> - 'a -> Type.Object.tool -> Type.Object.tool - method predicate : - Context.t -> 'a -> Type.predicate -> Type.predicate - method prop : Context.t -> 'a -> Type.Property.t -> Type.Property.t - method prop_ref : - Context.t -> 'a -> Type.propref -> Type.propref - method virtual props : Context.t -> 'a -> Type.Properties.id -> Type.Properties.id - method react_tool : - Context.t -> 'a -> Type.React.tool -> Type.React.tool - method resolve : - Context.t -> - 'a -> Type.Object.resolve -> Type.Object.resolve - method resolve_array : - Context.t -> - 'a -> Type.React.resolve_array -> Type.React.resolve_array + Context.t -> 'a -> Type.Object.resolve_tool -> Type.Object.resolve_tool + + method object_kit_slice : Context.t -> 'a -> Type.Object.slice -> Type.Object.slice + + method object_kit_tool : Context.t -> 'a -> Type.Object.tool -> Type.Object.tool + + method prop_ref : Context.t -> 'a -> Type.propref -> Type.propref + + method react_tool : Context.t -> 'a -> Type.React.tool -> Type.React.tool + + method resolve : Context.t -> 'a -> Type.Object.resolve -> Type.Object.resolve + + method resolved_prop : Context.t -> 'a -> Type.Object.prop -> Type.Object.prop + + method resolve_array : Context.t -> 'a -> Type.React.resolve_array -> Type.React.resolve_array + method resolve_object : - Context.t -> - 'a -> Type.React.resolve_object -> Type.React.resolve_object - method resolve_spread : - Context.t -> - 'a -> Type.resolve_spread_type -> Type.resolve_spread_type - method resolved_prop : - Context.t -> - 'a -> Type.Object.prop -> Type.Object.prop - method resolved : - Context.t -> - 'a -> Type.Object.resolved -> Type.Object.resolved + Context.t -> 'a -> Type.React.resolve_object -> Type.React.resolve_object + + method resolve_spread : Context.t -> 'a -> Type.resolve_spread_type -> Type.resolve_spread_type + + method resolved : Context.t -> 'a -> Type.Object.resolved -> Type.Object.resolved + method resolved_object : - Context.t -> - 'a -> Type.React.resolved_object -> Type.React.resolved_object - method resolved_param : - Context.t -> 'a -> Type.resolved_param -> Type.resolved_param - method selector : - Context.t -> 'a -> Type.selector -> Type.selector + Context.t -> 'a -> Type.React.resolved_object -> Type.React.resolved_object + + method resolved_param : Context.t -> 'a -> Type.resolved_param -> Type.resolved_param + method simplify_prop_type_tool : - Context.t -> - 'a -> - Type.React.SimplifyPropType.tool -> - Type.React.SimplifyPropType.tool + Context.t -> 'a -> Type.React.SimplifyPropType.tool -> Type.React.SimplifyPropType.tool + method spec : Context.t -> 'a -> Type.spec -> Type.spec - method spread_resolve : - Context.t -> 'a -> Type.spread_resolve -> Type.spread_resolve + + method spread_resolve : Context.t -> 'a -> Type.spread_resolve -> Type.spread_resolve + method stack_head : - Context.t -> - 'a -> - Type.React.CreateClass.stack_head -> - Type.React.CreateClass.stack_head + Context.t -> 'a -> Type.React.CreateClass.stack_head -> Type.React.CreateClass.stack_head + method stack_tail : - Context.t -> - 'a -> - Type.React.CreateClass.stack_tail -> - Type.React.CreateClass.stack_tail + Context.t -> 'a -> Type.React.CreateClass.stack_tail -> Type.React.CreateClass.stack_tail + method stack_tail_elem : Context.t -> 'a -> - Type.React.CreateClass.stack_head * Type.t list * - Type.React.CreateClass.spec Type.React.CreateClass.maybe_known - list -> - Type.React.CreateClass.stack_head * Type.t list * - Type.React.CreateClass.spec Type.React.CreateClass.maybe_known - list - method virtual tvar : - Context.t -> 'a -> Reason.t -> Constraint.ident -> Constraint.ident - method type_ : Context.t -> 'a -> Type.t -> Type.t - method type_param : - Context.t -> 'a -> Type.typeparam -> Type.typeparam - method type_map : Context.t -> 'a -> Type.type_map -> Type.type_map - method unresolved_param : - Context.t -> - 'a -> Type.unresolved_param -> Type.unresolved_param - method use_type : - Context.t -> 'a -> Type.UseTypeMap.key -> Type.UseTypeMap.key -end + Type.React.CreateClass.stack_head + * Type.t list + * Type.React.CreateClass.spec Type.React.CreateClass.maybe_known list -> + Type.React.CreateClass.stack_head + * Type.t list + * Type.React.CreateClass.spec Type.React.CreateClass.maybe_known list + + method unresolved_param : Context.t -> 'a -> Type.unresolved_param -> Type.unresolved_param + + method use_type : Context.t -> 'a -> Type.use_t -> Type.use_t + end + +val union_flatten : Context.t -> Type.t list -> Type.t list -val union_flatten: Context.t -> Type.t list -> Type.t list +val unwrap_type : Context.t -> Type.t -> Type.t diff --git a/src/typing/type_table.ml b/src/typing/type_table.ml deleted file mode 100644 index 2534c3dd0dc..00000000000 --- a/src/typing/type_table.ml +++ /dev/null @@ -1,139 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type id_kind = - | PropertyAccess of Type.t (* receiver type *) - | Import of string (* remote name *) * Type.t (* module type *) - | Exists - | Other - -type name = string - -type 'a entry = name * 'a * id_kind - - -type type_entry = Type.t entry -type scheme_entry = Type.TypeScheme.t entry - -type t = { - (* This stores type information about expressions. Every expression in the program should have a - * type here. This means that there is some nesting, e.g. in the expression `5 + 4` there will be - * a type for `5`, a type for `4`, and a type for the entire addition expression. *) - coverage: (Loc.t, Type.TypeScheme.t) Hashtbl.t; - (* This stores type information about identifiers only. There should be no overlap or nesting of - * locations here. *) - type_info: (Loc.t, Type.TypeScheme.t entry) Hashtbl.t; - (* Keep a stack of the type parameters in scope and use it to create type schemes. *) - tparams: Type.typeparam list ref; - (* This stores type information for explicit type arguments to polymorphic - * functions. TODO once typed AST is available, this won't be necessary anymore. *) - targs: (Loc.t, Type.TypeScheme.t ) Hashtbl.t; -} - -let create () = { - coverage = Hashtbl.create 0; - type_info = Hashtbl.create 0; - tparams = ref []; - targs = Hashtbl.create 0; -} - -let set {coverage; tparams; _} loc type_ = - let scheme = { Type.TypeScheme.tparams = !tparams; type_ } in - Hashtbl.replace coverage loc scheme - -(* Insert a located tuple into the type_info hashtable (intended for type-at-pos). - * In certain contexts it is useful to allow the caller to provide some additional - * type parameters that should be in scope when reconstructing this type. See for - * example the case of generic functions. `extra_tparams` can be used to pass this - * additional environment. *) -let set_info ?extra_tparams loc (name, t, i) x = - let {type_info; tparams; _} = x in - let extra_tparams = Option.value ~default:[] extra_tparams in - let tparams = extra_tparams @ !tparams in - let scheme = { Type.TypeScheme.tparams; type_ = t } in - Hashtbl.replace type_info loc (name, scheme, i) - -let set_targ {targs; tparams; _} loc t = - let scheme = { Type.TypeScheme.tparams = !tparams; type_ = t } in - Hashtbl.replace targs loc scheme - -let fold_coverage f t init = Hashtbl.fold f t.coverage init - -let find_unsafe_coverage t k = Hashtbl.find t.coverage k - -let find_unsafe_coverage_type t k = - let s = Hashtbl.find t.coverage k in - s.Type.TypeScheme.type_ - -let find_unsafe_targ t k = Hashtbl.find t.targs k - -let reset {coverage; type_info; tparams; targs} = - Hashtbl.reset coverage; - Hashtbl.reset type_info; - tparams := []; - Hashtbl.reset targs - -let copy {coverage; type_info; tparams; targs} = { - coverage = Hashtbl.copy coverage; - type_info = Hashtbl.copy type_info; - tparams = ref !tparams; - targs = Hashtbl.copy targs; -} - -let with_typeparams new_tparams x f = - let old_tparams = !(x.tparams) in - x.tparams := new_tparams @ old_tparams; - let r = f () in - x.tparams := old_tparams; - r - -let find_type_info t loc = - match Hashtbl.find t.type_info loc with - | exception Not_found -> None - | x -> Some x - -let find_type_info_with_pred t pred = - Hashtbl.fold (fun k v a -> - if pred k then Some (k, v) else a - ) t.type_info None - -let type_info_hashtbl t = - t.type_info - -let coverage_to_list t = - let r = ref [] in - Hashtbl.iter (fun l t -> r := (l, t) :: !r) t.coverage; - !r - -let targs_to_list t = - let r = ref [] in - Hashtbl.iter (fun l t -> r := (l, t) :: !r) t.targs; - !r - -let targs_hashtbl t = - t.targs - -let coverage_hashtbl t = - t.coverage - -(** - * Use the loc for the function name in the types table. When the function - * has no name (i.e. for `export default function() ...`), generate a loc - * that will span the `function` keyword as a next-best-thing location. - *) -let function_decl_loc id loc = - match id with - | Some (loc, _) -> loc - | None -> - Loc.({ loc with - _end = { - line = loc.start.line; - (* len('function') is 8 *) - column = loc.start.column + 8; - offset = loc.start.offset + 8; - }; - }) diff --git a/src/typing/type_table.mli b/src/typing/type_table.mli deleted file mode 100644 index 303a61dbaaf..00000000000 --- a/src/typing/type_table.mli +++ /dev/null @@ -1,38 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -type id_kind = - | PropertyAccess of Type.t (* receiver type *) - | Import of string (* remote name *) * Type.t (* module type *) - | Exists - | Other - -type name = string -type scheme_entry = name * Type.TypeScheme.t * id_kind -type type_entry = name * Type.t * id_kind - -type t - -val create: unit -> t -val set: t -> Loc.t -> Type.t -> unit -val set_targ: t -> Loc.t -> Type.t -> unit -val set_info: ?extra_tparams:Type.typeparam list -> Loc.t -> type_entry -> t -> unit -val fold_coverage: (Loc.t -> Type.TypeScheme.t -> 'a -> 'a) -> t -> 'a -> 'a -val find_unsafe_coverage: t -> Loc.t -> Type.TypeScheme.t -val find_unsafe_coverage_type: t -> Loc.t -> Type.t -val find_unsafe_targ: t -> Loc.t -> Type.TypeScheme.t -val reset: t -> unit -val copy: t -> t -val with_typeparams: Type.typeparam list -> t -> (unit -> 'a) -> 'a -val find_type_info: t -> Loc.t -> scheme_entry option -val find_type_info_with_pred: t -> (Loc.t -> bool) -> (Loc.t * scheme_entry) option -val function_decl_loc : (Loc.t * 'a) option -> Loc.t -> Loc.t -val targs_hashtbl: t -> (Loc.t, Type.TypeScheme.t) Hashtbl.t -val targs_to_list: t -> (Loc.t * Type.TypeScheme.t) list -val coverage_to_list: t -> (Loc.t * Type.TypeScheme.t) list -val coverage_hashtbl: t -> (Loc.t, Type.TypeScheme.t) Hashtbl.t -val type_info_hashtbl: t -> (Loc.t, scheme_entry) Hashtbl.t diff --git a/src/typing/type_visitor.ml b/src/typing/type_visitor.ml index ffb7fa6c795..1151591490a 100644 --- a/src/typing/type_visitor.ml +++ b/src/typing/type_visitor.ml @@ -1,14 +1,14 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open Type -module P = Type.Polarity +module P = Polarity -let pole_TODO = Neutral +let pole_TODO = Polarity.Neutral (* We walk types in a lot of places for all kinds of things, but often most of the code is boilerplate. The following visitor class for types aims to @@ -18,985 +18,948 @@ let pole_TODO = Neutral WARNING: This is only a partial implementation, sufficient for current purposes but intended to be completed in a later diff. *) -class ['a] t = object(self) - method type_ cx pole (acc: 'a) = function - | OpenT (r, id) -> self#tvar cx pole acc r id - - | DefT (_, t) -> self#def_type cx pole acc t - - | InternalT (ChoiceKitT (_, Trigger)) -> acc - - | TypeDestructorTriggerT (_, _, _, d, t) -> - let acc = self#destructor cx acc d in - let acc = self#type_ cx pole acc t in - acc - - | FunProtoT _ - | FunProtoApplyT _ - | FunProtoBindT _ - | FunProtoCallT _ - | ObjProtoT _ - | NullProtoT _ - -> acc - - | CustomFunT (_, kind) -> self#custom_fun_kind cx acc kind - - | EvalT (t, defer_use_t, id) -> - let acc = self#type_ cx Positive acc t in - let acc = self#defer_use_type cx acc defer_use_t in - let acc = - let pole = match defer_use_t, t with - | DestructuringT _, _ -> pole - | TypeDestructorT _, OpenT _ -> Neutral - | TypeDestructorT _, _ -> Positive - in - self#eval_id cx pole acc id - in - acc - - | BoundT _ -> acc - - | ExistsT _ -> acc - - | ExactT (_, t) -> self#type_ cx pole acc t - - | AnyWithLowerBoundT t - | AnyWithUpperBoundT t -> self#type_ cx pole acc t - - | MergedT (_, uses) -> - List.fold_left (self#use_type_ cx) acc uses - - | ShapeT t -> self#type_ cx pole acc t - - | MatchingPropT (_, _, t) -> self#type_ cx pole_TODO acc t - - | KeysT (_, t) -> self#type_ cx Positive acc t - - | AnnotT (_, t, _) -> self#type_ cx Positive acc t - - | OpaqueT (_, ot) -> - let { - opaque_id = _; - underlying_t; - super_t; - opaque_type_args; - opaque_name = _; - } = ot in - let acc = self#list (fun acc (_, _, t, pole') -> - self#type_ cx (P.mult (pole, pole')) acc t - ) acc opaque_type_args in - let acc = self#opt (self#type_ cx pole) acc underlying_t in - let acc = self#opt (self#type_ cx pole) acc super_t in - acc - - | ModuleT (_, exporttypes, _) -> - self#export_types cx pole acc exporttypes - - | InternalT (ExtendsT (_, t1, t2)) -> - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - acc +class ['a] t = + object (self) + method type_ cx pole (acc : 'a) = + function + | OpenT (r, id) -> self#tvar cx pole acc r id + | DefT (_, _, t) -> self#def_type cx pole acc t + | InternalT (ChoiceKitT (_, Trigger)) -> acc + | TypeDestructorTriggerT (_, _, _, d, t) -> + let acc = self#destructor cx acc d in + let acc = self#type_ cx pole acc t in + acc + | FunProtoT _ + | FunProtoApplyT _ + | FunProtoBindT _ + | FunProtoCallT _ + | ObjProtoT _ + | NullProtoT _ -> + acc + | CustomFunT (_, kind) -> self#custom_fun_kind cx acc kind + | EvalT (t, defer_use_t, id) -> + let acc = self#type_ cx P.Positive acc t in + let acc = self#defer_use_type cx acc defer_use_t in + let acc = + let pole = + match (defer_use_t, t) with + | (LatentPredT _, _) -> pole + | (TypeDestructorT _, OpenT _) -> P.Neutral + | (TypeDestructorT _, _) -> P.Positive + in + self#eval_id cx pole acc id + in + acc + | BoundT _ -> acc + | ExistsT _ -> acc + | ExactT (_, t) -> self#type_ cx pole acc t + | MergedT (_, uses) -> List.fold_left (self#use_type_ cx) acc uses + | ShapeT t -> self#type_ cx pole acc t + | MatchingPropT (_, _, t) -> self#type_ cx pole_TODO acc t + | KeysT (_, t) -> self#type_ cx P.Positive acc t + | AnnotT (_, t, _) -> self#type_ cx P.Positive acc t + | OpaqueT (_, ot) -> + let { opaque_id = _; underlying_t; super_t; opaque_type_args; opaque_name = _ } = ot in + let acc = + self#list + (fun acc (_, _, t, pole') -> self#type_ cx (P.mult (pole, pole')) acc t) + acc + opaque_type_args + in + let acc = self#opt (self#type_ cx pole) acc underlying_t in + let acc = self#opt (self#type_ cx pole) acc super_t in + acc + | ModuleT (_, exporttypes, _) -> self#export_types cx pole acc exporttypes + | InternalT (ExtendsT (_, t1, t2)) -> + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in + acc + | OpenPredT (_, t, p_map, n_map) -> + let acc = self#type_ cx pole acc t in + let acc = self#list (self#predicate cx) acc (Key_map.values p_map) in + let acc = self#list (self#predicate cx) acc (Key_map.values n_map) in + acc + | ThisClassT (_, t) -> self#type_ cx pole acc t + | ThisTypeAppT (_, t, this, ts_opt) -> + let acc = self#type_ cx P.Positive acc t in + let acc = self#type_ cx pole acc this in + (* If we knew what `t` resolved to, we could determine the polarities for + `ts`, but in general `t` might be unresolved. Subclasses which have more + information should override this to be more specific. *) + let acc = self#opt (self#list (self#type_ cx pole_TODO)) acc ts_opt in + acc + | TypeAppT (_, _, t, ts) -> + let acc = self#type_ cx P.Positive acc t in + (* If we knew what `t` resolved to, we could determine the polarities for + `ts`, but in general `t` might be unresolved. Subclasses which have more + information should override this to be more specific. *) + let acc = self#list (self#type_ cx pole_TODO) acc ts in + acc + | ReposT (_, t) + | InternalT (ReposUpperT (_, t)) -> + self#type_ cx pole acc t + | AnyT _ + | InternalT (OptionalChainVoidT _) -> + acc + | OptionalT (_, t) + | MaybeT (_, t) -> + self#type_ cx pole acc t + | IntersectionT (_, rep) -> self#list (self#type_ cx pole) acc (InterRep.members rep) + | UnionT (_, rep) -> self#list (self#type_ cx pole) acc (UnionRep.members rep) + + method def_type cx pole acc = + function + | NumT _ + | StrT _ + | BoolT _ + | EmptyT _ + | MixedT _ + | NullT + | VoidT -> + acc + | FunT (static, prototype, funtype) -> + let acc = self#type_ cx pole acc static in + let acc = self#type_ cx pole_TODO acc prototype in + let acc = self#fun_type cx pole acc funtype in + acc + | ObjT objtype -> self#obj_type cx pole acc objtype + | ArrT arrtype -> self#arr_type cx pole acc arrtype + | CharSetT _ -> acc + | ClassT t -> self#type_ cx pole acc t + | InstanceT (static, super, implements, insttype) -> + let acc = self#type_ cx pole acc static in + let acc = self#type_ cx pole acc super in + let acc = self#list (self#type_ cx pole_TODO) acc implements in + let acc = self#inst_type cx pole acc insttype in + acc + | SingletonStrT _ + | SingletonNumT _ + | SingletonBoolT _ -> + acc + | TypeT (_, t) -> self#type_ cx pole acc t + | PolyT (_, xs, t, _) -> + let acc = self#nel (self#type_param cx pole) acc xs in + let acc = self#type_ cx pole acc t in + acc + | IdxWrapper t -> self#type_ cx pole acc t + | ReactAbstractComponentT { config; instance } -> + let acc = self#type_ cx (P.inv pole) acc config in + self#type_ cx pole acc instance + + method targ cx pole acc = + function + | ImplicitArg _ -> acc + | ExplicitArg t -> self#type_ cx pole acc t + + method private defer_use_type cx acc = + function + | LatentPredT (_, p) -> self#predicate cx acc p + | TypeDestructorT (_, _, d) -> self#destructor cx acc d + + method private selector cx acc = + function + | Prop _ -> acc + | Elem key -> self#type_ cx pole_TODO acc key + | ObjRest _ -> acc + | ArrRest _ -> acc + | Default -> acc + + method private predicate cx acc = + function + | AndP (p1, p2) -> self#list (self#predicate cx) acc [p1; p2] + | OrP (p1, p2) -> self#list (self#predicate cx) acc [p1; p2] + | NotP p -> self#predicate cx acc p + | LeftP (_, t) -> self#type_ cx P.Positive acc t + | RightP (_, t) -> self#type_ cx P.Positive acc t + | ExistsP _ -> acc + | NullP -> acc + | MaybeP -> acc + | SingletonBoolP _ -> acc + | SingletonStrP _ -> acc + | SingletonNumP _ -> acc + | BoolP -> acc + | FunP -> acc + | NumP -> acc + | ObjP -> acc + | StrP -> acc + | SymbolP -> acc + | VoidP -> acc + | ArrP -> acc + | PropExistsP _ -> acc + | LatentP (t, _) -> self#type_ cx P.Positive acc t + + method destructor cx acc = + function + | NonMaybeType + | PropertyType _ + | ValuesType + | ReadOnlyType + | ReactElementPropsType + | ReactElementConfigType + | ReactElementRefType -> + acc + | ReactConfigType default_props -> self#type_ cx pole_TODO acc default_props + | ElementType t -> self#type_ cx pole_TODO acc t + | Bind t -> self#type_ cx pole_TODO acc t + | SpreadType (_, ts, head_slice) -> + let acc = self#list (self#object_kit_spread_operand cx) acc ts in + self#opt (self#object_kit_spread_operand_slice cx) acc head_slice + | RestType (_, t) -> self#type_ cx pole_TODO acc t + | CallType args -> self#list (self#type_ cx pole_TODO) acc args + | TypeMap map -> self#type_map cx acc map + + method private custom_fun_kind cx acc = + function + | ReactPropType (React.PropType.Primitive (_, t)) + | ReactElementFactory t -> + self#type_ cx pole_TODO acc t + | ObjectAssign + | ObjectGetPrototypeOf + | ObjectSetPrototypeOf + | Compose _ + | ReactPropType _ + | ReactCreateClass + | ReactCreateElement + | ReactCloneElement + | Idx + | TypeAssertIs + | TypeAssertThrows + | TypeAssertWraps + | DebugPrint + | DebugThrow + | DebugSleep -> + acc - | OpenPredT (_ , t, p_map, n_map) -> - let acc = self#type_ cx pole acc t in - let acc = self#list (self#predicate cx) acc (Key_map.values p_map) in - let acc = self#list (self#predicate cx) acc (Key_map.values n_map) in - acc + method use_type_ cx (acc : 'a) = + function + | UseT (_, t) -> self#type_ cx P.Negative acc t + | BindT (_, _, fn, _) + | CallT (_, _, fn) -> + self#fun_call_type cx acc fn + | MethodT (_, _, _, p, fn, prop_t) -> + let acc = self#propref cx acc p in + let acc = self#fun_call_type cx acc fn in + let acc = self#opt (self#type_ cx pole_TODO) acc prop_t in + acc + | SetPropT (_, _, p, _, _, t, prop_t) -> + let acc = self#propref cx acc p in + let acc = self#type_ cx pole_TODO acc t in + let acc = self#opt (self#type_ cx pole_TODO) acc prop_t in + acc + | GetPropT (_, _, p, t) + | MatchPropT (_, _, p, t) + | TestPropT (_, _, p, t) -> + let acc = self#propref cx acc p in + let acc = self#type_ cx pole_TODO acc t in + acc + | SetPrivatePropT (_, _, _, _, scopes, _, t, prop_t) -> + let acc = List.fold_left (self#class_binding cx) acc scopes in + let acc = self#type_ cx pole_TODO acc t in + let acc = self#opt (self#type_ cx pole_TODO) acc prop_t in + acc + | GetPrivatePropT (_, _, _, scopes, _, t) -> + let acc = List.fold_left (self#class_binding cx) acc scopes in + let acc = self#type_ cx pole_TODO acc t in + acc + | SetElemT (_, _, e, _, tin, tout) -> + let acc = self#type_ cx pole_TODO acc e in + let acc = self#type_ cx pole_TODO acc tin in + let acc = self#opt (self#type_ cx pole_TODO) acc tout in + acc + | GetElemT (_, _, e, t) -> + let acc = self#type_ cx pole_TODO acc e in + let acc = self#type_ cx pole_TODO acc t in + acc + | CallElemT (_, _, t, fn) -> + let acc = self#type_ cx pole_TODO acc t in + let acc = self#fun_call_type cx acc fn in + acc + | GetStaticsT (_, t) + | GetProtoT (_, t) + | SetProtoT (_, t) -> + self#type_ cx pole_TODO acc t + | ReposLowerT (_, _, u) -> self#use_type_ cx acc u + | ReposUseT (_, _, _, t) -> self#type_ cx pole_TODO acc t + | ConstructorT (_, _, targs, args, t) -> + let acc = Option.fold ~init:acc ~f:(List.fold_left (self#targ cx pole_TODO)) targs in + let acc = List.fold_left (self#call_arg cx) acc args in + let acc = self#type_ cx pole_TODO acc t in + acc + | SuperT (_, _, Derived { own; proto; static }) -> + let acc = self#smap (self#prop cx pole_TODO) acc own in + let acc = self#smap (self#prop cx pole_TODO) acc proto in + let acc = self#smap (self#prop cx pole_TODO) acc static in + acc + | ImplementsT (_, t) -> self#type_ cx pole_TODO acc t + | MixinT (_, t) -> self#type_ cx pole_TODO acc t + | ToStringT (_, t) -> self#use_type_ cx acc t + | AdderT (_, _, _, a, b) -> + let acc = self#type_ cx pole_TODO acc a in + let acc = self#type_ cx pole_TODO acc b in + acc + | ComparatorT (_, _, t) -> self#type_ cx pole_TODO acc t + | UnaryMinusT (_, t) -> self#type_ cx pole_TODO acc t + | AssertArithmeticOperandT _ + | AssertBinaryInLHST _ + | AssertBinaryInRHST _ + | AssertForInRHST _ -> + acc + | PredicateT (predicate, t) -> + let acc = self#predicate cx acc predicate in + let acc = self#type_ cx pole_TODO acc t in + acc + | GuardT (predicate, t1, t2) -> + let acc = self#predicate cx acc predicate in + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in + acc + | EqT (_, _, t) + | NotT (_, t) -> + self#type_ cx pole_TODO acc t + | AndT (_, a, b) + | OrT (_, a, b) + | NullishCoalesceT (_, a, b) -> + let acc = self#type_ cx pole_TODO acc a in + let acc = self#type_ cx pole_TODO acc b in + acc + | SpecializeT (_, _, _, _, ts, t) -> + let acc = self#opt (List.fold_left (self#type_ cx pole_TODO)) acc ts in + let acc = self#type_ cx pole_TODO acc t in + acc + | ThisSpecializeT (_, this, k) -> + let acc = self#type_ cx pole_TODO acc this in + let acc = self#cont cx acc k in + acc + | VarianceCheckT (_, ts, _) -> List.fold_left (self#type_ cx pole_TODO) acc ts + | TypeAppVarianceCheckT (_, _, _, ts) -> + List.fold_left + (fun acc (a, b) -> + let acc = self#type_ cx pole_TODO acc a in + let acc = self#type_ cx pole_TODO acc b in + acc) + acc + ts + | ConcretizeTypeAppsT (_, (ts1, _, _), (t2, ts2, _, _), _) -> + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts1 in + let acc = self#type_ cx pole_TODO acc t2 in + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts2 in + acc + | LookupT (_, kind, ts, prop, action) -> + let acc = self#lookup_kind cx acc kind in + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in + let acc = self#propref cx acc prop in + let acc = self#lookup_action cx acc action in + acc + | ObjAssignToT (_, _, t1, t2, _) + | ObjAssignFromT (_, _, t1, t2, _) + | ObjTestT (_, t1, t2) -> + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in + acc + | ObjTestProtoT (_, t) -> self#type_ cx pole_TODO acc t + | ObjFreezeT (_, t) + | ObjRestT (_, _, t) + | ObjSealT (_, t) + | ArrRestT (_, _, _, t) -> + self#type_ cx pole_TODO acc t + | UnifyT (t1, t2) -> + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in + acc + | BecomeT (_, t) -> self#type_ cx pole_TODO acc t + | GetKeysT (_, t) -> self#use_type_ cx acc t + | GetValuesT (_, t) -> self#type_ cx pole_TODO acc t + | HasOwnPropT _ -> acc + | ElemT (_, _, t, action) -> + let acc = self#type_ cx pole_TODO acc t in + let acc = self#elem_action cx acc action in + acc + | MakeExactT (_, cont) -> self#cont cx acc cont + | CJSRequireT (_, t, _) + | ImportModuleNsT (_, t, _) + | ImportDefaultT (_, _, _, t, _) + | ImportNamedT (_, _, _, _, t, _) + | ImportTypeT (_, _, t) + | ImportTypeofT (_, _, t) -> + self#type_ cx P.Negative acc t + | AssertImportIsValueT _ -> acc + | CJSExtractNamedExportsT (_, (_, ts, _), t) -> + let acc = self#export_types cx pole_TODO acc ts in + let acc = self#type_ cx pole_TODO acc t in + acc + | CopyNamedExportsT (_, t, tout) + | CopyTypeExportsT (_, t, tout) + | ExportTypeT (_, _, _, t, tout) -> + let acc = self#type_ cx pole_TODO acc t in + let acc = self#type_ cx pole_TODO acc tout in + acc + | AssertExportIsTypeT (_, _, tout) -> + let acc = self#type_ cx pole_TODO acc tout in + acc + | ExportNamedT (_, _, ts, _, tout) -> + let visit_pair acc (_loc, t) = self#type_ cx pole_TODO acc t in + let acc = self#smap visit_pair acc ts in + let acc = self#type_ cx pole_TODO acc tout in + acc + | MapTypeT (_, _, map, tout) -> + let acc = self#type_map cx acc map in + let acc = self#type_ cx pole_TODO acc tout in + acc + | ReactKitT (_, _, tool) -> + (match tool with + | React.GetProps t + | React.GetConfig t + | React.GetRef t -> + self#type_ cx pole_TODO acc t + | React.GetConfigType (default_props, t) -> + let acc = self#type_ cx pole_TODO acc default_props in + self#type_ cx pole_TODO acc t + | React.CreateElement0 (_, config, (children, children_spread), tout) -> + let acc = self#type_ cx pole_TODO acc config in + let acc = List.fold_left (self#type_ cx pole_TODO) acc children in + let acc = self#opt (self#type_ cx pole_TODO) acc children_spread in + let acc = self#type_ cx pole_TODO acc tout in + acc + | React.CreateElement (_, component, config, (children, children_spread), tout) -> + let acc = self#type_ cx pole_TODO acc component in + let acc = self#type_ cx pole_TODO acc config in + let acc = List.fold_left (self#type_ cx pole_TODO) acc children in + let acc = self#opt (self#type_ cx pole_TODO) acc children_spread in + let acc = self#type_ cx pole_TODO acc tout in + acc + | React.ConfigCheck config -> self#type_ cx pole_TODO acc config + | React.SimplifyPropType (tool, t) -> + React.( + React.SimplifyPropType.( + let acc = + match tool with + | ArrayOf + | InstanceOf + | ObjectOf -> + acc + | OneOf r + | OneOfType r -> + (match r with + | ResolveArray -> acc + | ResolveElem (ts1, ts2) -> + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts1 in + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts2 in + acc) + | Shape o -> self#react_resolve_object cx acc o + in + let acc = self#type_ cx pole_TODO acc t in + acc)) + | React.CreateClass (tool, knot, tout) -> + let acc = self#react_create_class_tool cx acc tool in + let acc = self#react_create_class_knot cx acc knot in + let acc = self#type_ cx pole_TODO acc tout in + acc) + | ObjKitT (_, _, resolve_tool, tool, tout) -> + Object.( + let acc = + match resolve_tool with + | Resolve r -> self#object_kit_resolve cx acc r + | Super (s, r) -> + let acc = self#object_kit_slice cx acc s in + let acc = self#object_kit_resolve cx acc r in + acc + in + let acc = + match tool with + | ReadOnly -> acc + | ObjectRep -> acc + | Spread (_, state) -> + Object.Spread.( + let { todo_rev; acc = object_spread_acc } = state in + let acc = List.fold_left (self#object_kit_spread_operand cx) acc todo_rev in + let acc = List.fold_left (self#object_kit_acc_element cx) acc object_spread_acc in + acc) + | Rest (_, state) -> + Object.Rest.( + (match state with + | One t -> self#type_ cx pole_TODO acc t + | Done o -> Nel.fold_left (self#object_kit_slice cx) acc o)) + | ReactConfig state -> + Object.ReactConfig.( + (match state with + | Config { defaults; children } -> + let acc = self#opt (self#type_ cx pole_TODO) acc defaults in + let acc = self#opt (self#type_ cx pole_TODO) acc children in + acc + | Defaults { config; children } -> + let acc = Nel.fold_left (self#object_kit_slice cx) acc config in + let acc = self#opt (self#type_ cx pole_TODO) acc children in + acc)) + in + let acc = self#type_ cx pole_TODO acc tout in + acc) + | DebugPrintT _ -> acc + | DebugSleepT _ -> acc + | SentinelPropTestT (_, t, _, _, _, tout) -> + let acc = self#type_ cx pole_TODO acc t in + let acc = self#type_ cx pole_TODO acc tout in + acc + | IdxUnwrap (_, tout) + | IdxUnMaybeifyT (_, tout) -> + self#type_ cx pole_TODO acc tout + | OptionalChainT (_, _, uses) -> + Nel.fold_left + (fun acc (use, tout) -> self#use_type_ cx acc (apply_opt_use use tout)) + acc + uses + | InvariantT _ -> acc + | CallLatentPredT (_, _, _, t1, t2) + | CallOpenPredT (_, _, _, t1, t2) -> + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in + acc + | SubstOnPredT (_, _, t) -> self#type_ cx pole_TODO acc t + | RefineT (_, predicate, t) -> + let acc = self#predicate cx acc predicate in + let acc = self#type_ cx pole_TODO acc t in + acc + | ReactPropsToOut (_, t) + | ReactInToProps (_, t) -> + self#type_ cx pole_TODO acc t + | ResolveSpreadT (_, _, { rrt_resolved; rrt_unresolved; rrt_resolve_to }) -> + let acc = + List.fold_left + (fun (acc : 'a) -> function + | ResolvedArg t -> self#type_ cx pole_TODO acc t + | ResolvedAnySpreadArg _ -> acc + | ResolvedSpreadArg (_, arr) -> self#arr_type cx pole_TODO acc arr) + acc + rrt_resolved + in + let acc = + List.fold_left + (fun acc -> function + | UnresolvedArg t + | UnresolvedSpreadArg t -> + self#type_ cx pole_TODO acc t) + acc + rrt_unresolved + in + let acc = + match rrt_resolve_to with + | ResolveSpreadsToTuple (_, t1, t2) + | ResolveSpreadsToArrayLiteral (_, t1, t2) + | ResolveSpreadsToArray (t1, t2) -> + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in + acc + | ResolveSpreadsToMultiflowCallFull (_, fn) + | ResolveSpreadsToMultiflowSubtypeFull (_, fn) -> + self#fun_type cx pole_TODO acc fn + | ResolveSpreadsToCustomFunCall (_, kind, t) -> + let acc = self#custom_fun_kind cx acc kind in + let acc = self#type_ cx pole_TODO acc t in + acc + | ResolveSpreadsToMultiflowPartial (_, fn, _, t) -> + let acc = self#fun_type cx pole_TODO acc fn in + let acc = self#type_ cx pole_TODO acc t in + acc + | ResolveSpreadsToCallT (fn, t) -> + let acc = self#fun_call_type cx acc fn in + let acc = self#type_ cx pole_TODO acc t in + acc + in + acc + | CondT (_, then_t_opt, else_t, tout) -> + let acc = self#opt (self#type_ cx pole_TODO) acc then_t_opt in + let acc = self#type_ cx pole_TODO acc else_t in + let acc = self#type_ cx pole_TODO acc tout in + acc + | ChoiceKitUseT (_, tool) -> self#choice_use_tool cx acc tool + | ExtendsUseT (_, _, ts, t1, t2) -> + let acc = self#list (self#type_ cx pole_TODO) acc ts in + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in + acc + | IntersectionPreprocessKitT (_, tool) -> + (match tool with + | ConcretizeTypes (ts1, ts2, t, use) -> + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts1 in + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts2 in + let acc = self#type_ cx pole_TODO acc t in + let acc = self#use_type_ cx acc use in + acc + | SentinelPropTest (_, _, t1, t2, t3) -> + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in + let acc = self#type_ cx pole_TODO acc t3 in + acc + | PropExistsTest (_, _, t1, t2) -> + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in + acc) + | DestructuringT (_, _, s, tout) -> + let acc = self#selector cx acc s in + let acc = self#type_ cx pole_TODO acc tout in + acc + | ModuleExportsAssignT (_, t, tout) -> + let acc = self#type_ cx pole_TODO acc t in + let acc = self#type_ cx pole_TODO acc tout in + acc - | ThisClassT (_, t) -> self#type_ cx pole acc t + (* The default behavior here could be fleshed out a bit, to look up the graph, + handle Resolved and Unresolved cases, etc. *) + method tvar _cx _pole acc _r _id = acc - | ThisTypeAppT (_, t, this, ts_opt) -> - let acc = self#type_ cx pole acc t in - let acc = self#type_ cx pole acc this in - let acc = self#opt (self#list (self#type_ cx pole_TODO)) acc ts_opt in - acc + method dict_type cx pole acc d = + let { dict_name = _; key; value; dict_polarity = p } = d in + let acc = self#type_ cx pole_TODO acc key in + let acc = self#type_ cx (P.mult (pole, p)) acc value in + acc - | ReposT (_, t) - | InternalT (ReposUpperT (_, t)) -> - self#type_ cx pole acc t + method props cx pole acc id = Context.find_props cx id |> self#smap (self#prop cx pole) acc + + method prop cx pole acc = + function + | Field (_, t, p) -> self#type_ cx (P.mult (pole, p)) acc t + | Method (_, t) -> self#type_ cx pole acc t + | Get (_, t) -> self#type_ cx pole acc t + | Set (_, t) -> self#type_ cx (P.inv pole) acc t + | GetSet (_, t1, _, t2) -> + let acc = self#type_ cx pole acc t1 in + let acc = self#type_ cx (P.inv pole) acc t2 in + acc - | InternalT (OptionalChainVoidT _) -> acc + method call_prop cx pole acc id = + let t = Context.find_call cx id in + self#type_ cx pole acc t - method def_type cx pole acc = function - | AnyT - | NumT _ - | StrT _ - | BoolT _ - | EmptyT - | MixedT _ - | NullT - | VoidT - | AnyObjT - | AnyFunT - -> acc + method exports cx pole acc id = + let visit_pair acc (_loc, t) = self#type_ cx pole acc t in + Context.find_exports cx id |> self#smap visit_pair acc - | FunT (static, prototype, funtype) -> - let acc = self#type_ cx pole acc static in - let acc = self#type_ cx pole_TODO acc prototype in - let acc = self#fun_type cx pole acc funtype in - acc + method eval_id cx pole acc id = + match IMap.get id (Context.evaluated cx) with + | None -> acc + | Some t -> self#type_ cx pole acc t + + method private type_param cx pole acc tp = + let { reason = _; name = _; bound; default; polarity = p } = tp in + let pole = P.mult (pole, p) in + let acc = self#type_ cx pole acc bound in + self#opt (self#type_ cx pole) acc default + + method fun_type cx pole acc ft = + let { + this_t; + params; + rest_param; + return_t; + closure_t = _; + is_predicate = _; + changeset = _; + def_reason = _; + } = + ft + in + let acc = self#type_ cx pole acc this_t in + let acc = self#list (fun acc (_, t) -> self#type_ cx (P.inv pole) acc t) acc params in + let acc = self#opt (fun acc (_, _, t) -> self#type_ cx (P.inv pole) acc t) acc rest_param in + let acc = self#type_ cx pole acc return_t in + acc - | ObjT objtype -> self#obj_type cx pole acc objtype + method private obj_type cx pole acc o = + let { dict_t; props_tmap; proto_t; call_t; flags = _ } = o in + let acc = self#opt (self#dict_type cx pole) acc dict_t in + let acc = self#props cx pole acc props_tmap in + let acc = self#type_ cx pole acc proto_t in + let acc = self#opt (self#call_prop cx pole) acc call_t in + acc - | ArrT (arrtype) -> self#arr_type cx pole acc arrtype + method private arr_type cx pole acc = + function + | ArrayAT (t, None) -> self#type_ cx P.Neutral acc t + | ArrayAT (t, Some ts) + | TupleAT (t, ts) -> + let acc = self#type_ cx P.Neutral acc t in + let acc = self#list (self#type_ cx P.Neutral) acc ts in + acc + | ROArrayAT t -> self#type_ cx pole acc t + + method private inst_type cx pole acc i = + let { + class_id = _; + type_args; + own_props; + proto_props; + inst_call_t; + initialized_fields = _; + initialized_static_fields = _; + has_unknown_react_mixins = _; + inst_kind = _; + } = + i + in + let acc = + self#list + (fun acc (_, _, t, pole') -> self#type_ cx (P.mult (pole, pole')) acc t) + acc + type_args + in + let acc = self#props cx pole acc own_props in + let acc = self#props cx pole acc proto_props in + let acc = self#opt (self#call_prop cx pole) acc inst_call_t in + acc - | CharSetT _ -> acc + method private export_types cx pole acc e = + let { exports_tmap; cjs_export; has_every_named_export = _ } = e in + let acc = self#exports cx pole acc exports_tmap in + let acc = self#opt (self#type_ cx pole) acc cjs_export in + acc - | ClassT t -> self#type_ cx pole acc t + method private fun_call_type cx acc call = + let { + call_this_t; + call_targs; + call_args_tlist; + call_tout; + call_closure_t = _; + call_strict_arity = _; + } = + call + in + let acc = self#type_ cx pole_TODO acc call_this_t in + let acc = self#opt (self#list (self#targ cx pole_TODO)) acc call_targs in + let acc = self#list (self#call_arg cx) acc call_args_tlist in + let acc = self#type_ cx pole_TODO acc call_tout in + acc - | InstanceT (static, super, implements, insttype) -> - let acc = self#type_ cx pole acc static in - let acc = self#type_ cx pole acc super in - let acc = self#list (self#type_ cx pole_TODO) acc implements in - let acc = self#inst_type cx pole acc insttype in - acc + method private propref cx acc = + function + | Named _ -> acc + | Computed t -> self#type_ cx pole_TODO acc t - | SingletonStrT _ - | SingletonNumT _ - | SingletonBoolT _ -> acc + method private class_binding cx acc { class_private_fields; class_private_static_fields; _ } = + let acc = self#props cx pole_TODO acc class_private_fields in + let acc = self#props cx pole_TODO acc class_private_static_fields in + acc - | TypeT (_, t) -> self#type_ cx pole acc t + method private call_arg cx acc = + function + | Arg t -> self#type_ cx pole_TODO acc t + | SpreadArg t -> self#type_ cx pole_TODO acc t - | OptionalT t -> self#type_ cx pole acc t - - | PolyT (xs, t, _) -> - let acc = self#list (self#type_param cx pole) acc xs in - let acc = self#type_ cx pole acc t in - acc - - | TypeAppT (_, t, ts) -> - let acc = self#type_ cx Positive acc t in - (* If we knew what `t` resolved to, we could determine the polarities for - `ts`, but in general `t` might be unresolved. Subclasses which have more - information should override this to be more specific. *) - let acc = self#list (self#type_ cx pole_TODO) acc ts in - acc - - | MaybeT t -> self#type_ cx pole acc t - - | IntersectionT rep -> - self#list (self#type_ cx pole) acc (InterRep.members rep) - - | UnionT rep -> - self#list (self#type_ cx pole) acc (UnionRep.members rep) - - | IdxWrapper t -> - self#type_ cx pole acc t - - - method private defer_use_type cx acc = function - | DestructuringT (_, s) -> self#selector cx acc s - | TypeDestructorT (_, _, d) -> self#destructor cx acc d - - method private selector cx acc = function - | Prop _ -> acc - | Elem key -> self#type_ cx pole_TODO acc key - | ObjRest _ -> acc - | ArrRest _ -> acc - | Default -> acc - | Become -> acc - | Refine p -> self#predicate cx acc p - - method private predicate cx acc = function - | AndP (p1, p2) -> self#list (self#predicate cx) acc [p1;p2] - | OrP (p1, p2) -> self#list (self#predicate cx) acc [p1;p2] - | NotP p -> self#predicate cx acc p - | LeftP (_, t) -> self#type_ cx Positive acc t - | RightP (_, t) -> self#type_ cx Positive acc t - | ExistsP _ -> acc - | NullP -> acc - | MaybeP -> acc - | SingletonBoolP _ -> acc - | SingletonStrP _ -> acc - | SingletonNumP _ -> acc - | BoolP -> acc - | FunP -> acc - | NumP -> acc - | ObjP -> acc - | StrP -> acc - | VoidP -> acc - | ArrP -> acc - | PropExistsP _ -> acc - | LatentP (t, _) -> self#type_ cx Positive acc t - - method destructor cx acc = function - | NonMaybeType - | PropertyType _ - | ValuesType - | ReadOnlyType - | ReactElementPropsType - | ReactElementConfigType - | ReactElementRefType - -> acc - | ElementType t -> self#type_ cx pole_TODO acc t - | Bind t -> self#type_ cx pole_TODO acc t - | SpreadType (_, ts) -> self#list (self#type_ cx pole_TODO) acc ts - | RestType (_,t) -> self#type_ cx pole_TODO acc t - | CallType args -> self#list (self#type_ cx pole_TODO) acc args - | TypeMap map -> self#type_map cx acc map - - method private custom_fun_kind cx acc = function - | ReactPropType (React.PropType.Primitive (_, t)) - | ReactElementFactory t - -> self#type_ cx pole_TODO acc t - | ObjectAssign - | ObjectGetPrototypeOf - | ObjectSetPrototypeOf - | Compose _ - | ReactPropType _ - | ReactCreateClass - | ReactCreateElement - | ReactCloneElement - | Idx - | TypeAssertIs - | TypeAssertThrows - | TypeAssertWraps - | DebugPrint - | DebugThrow - | DebugSleep - -> acc - - method use_type_ cx (acc: 'a) = function - | UseT (_, t) -> - self#type_ cx Negative acc t - - | BindT (_, _, fn, _) - | CallT (_, _, fn) -> - self#fun_call_type cx acc fn - - | MethodT (_, _, _, p, fn, prop_t) -> - let acc = self#propref cx acc p in - let acc = self#fun_call_type cx acc fn in - let acc = self#opt (self#type_ cx pole_TODO) acc prop_t in - acc - - | SetPropT (_, _, p, _, t, prop_t) -> - let acc = self#propref cx acc p in - let acc = self#type_ cx pole_TODO acc t in - let acc = self#opt (self#type_ cx pole_TODO) acc prop_t in - acc - - | GetPropT (_, _, p, t) - | MatchPropT(_, _, p, t) - | TestPropT (_, _, p, t) -> - let acc = self#propref cx acc p in - let acc = self#type_ cx pole_TODO acc t in - acc - - | SetPrivatePropT (_, _, _, scopes, _, t, prop_t) -> - let acc = List.fold_left (self#class_binding cx) acc scopes in - let acc = self#type_ cx pole_TODO acc t in - let acc = self#opt (self#type_ cx pole_TODO) acc prop_t in - acc - - | GetPrivatePropT (_, _, _, scopes, _, t) -> - let acc = List.fold_left (self#class_binding cx) acc scopes in - let acc = self#type_ cx pole_TODO acc t in - acc - - | SetElemT (_, _, e, tin, tout) -> - let acc = self#type_ cx pole_TODO acc e in - let acc = self#type_ cx pole_TODO acc tin in - let acc = self#opt (self#type_ cx pole_TODO) acc tout in - acc - - | GetElemT (_, _, e, t) -> - let acc = self#type_ cx pole_TODO acc e in - let acc = self#type_ cx pole_TODO acc t in - acc - - | CallElemT (_, _, t, fn) -> - let acc = self#type_ cx pole_TODO acc t in - let acc = self#fun_call_type cx acc fn in - acc - - | GetStaticsT (_, t) - | GetProtoT (_, t) - | SetProtoT (_, t) -> - self#type_ cx pole_TODO acc t - - | ReposLowerT (_, _, u) -> self#use_type_ cx acc u - | ReposUseT (_, _, _, t) -> self#type_ cx pole_TODO acc t - - | ConstructorT (_, _, targs, args, t) -> - let acc = Option.fold ~init:acc ~f:(List.fold_left (self#type_ cx pole_TODO)) targs in - let acc = List.fold_left (self#call_arg cx) acc args in - let acc = self#type_ cx pole_TODO acc t in - acc - - | SuperT (_, _, Derived {own; proto; static}) -> - let acc = self#smap (self#prop cx pole_TODO) acc own in - let acc = self#smap (self#prop cx pole_TODO) acc proto in - let acc = self#smap (self#prop cx pole_TODO) acc static in - acc - - | ImplementsT (_, t) -> self#type_ cx pole_TODO acc t - | MixinT (_, t) -> self#type_ cx pole_TODO acc t - | ToStringT (_, t) -> self#use_type_ cx acc t - - | AdderT (_, _, _, a, b) -> - let acc = self#type_ cx pole_TODO acc a in - let acc = self#type_ cx pole_TODO acc b in - acc - - | ComparatorT (_, _, t) -> self#type_ cx pole_TODO acc t - | UnaryMinusT (_, t) -> self#type_ cx pole_TODO acc t - - | AssertArithmeticOperandT _ - | AssertBinaryInLHST _ - | AssertBinaryInRHST _ - | AssertForInRHST _ - | AssertRestParamT _ -> acc - - | PredicateT (predicate, t) -> - let acc = self#predicate cx acc predicate in - let acc = self#type_ cx pole_TODO acc t in - acc - - | GuardT (predicate, t1, t2) -> - let acc = self#predicate cx acc predicate in - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - acc - - | EqT (_, _, t) - | NotT (_, t) -> - self#type_ cx pole_TODO acc t - - | AndT (_, a, b) - | OrT (_, a, b) - | NullishCoalesceT (_, a, b) -> - let acc = self#type_ cx pole_TODO acc a in - let acc = self#type_ cx pole_TODO acc b in - acc - - | SpecializeT (_, _, _, _, ts, t) -> - let acc = self#opt (List.fold_left (self#type_ cx pole_TODO)) acc ts in - let acc = self#type_ cx pole_TODO acc t in - acc - - | ThisSpecializeT (_, this, k) -> - let acc = self#type_ cx pole_TODO acc this in - let acc = self#cont cx acc k in - acc - - | VarianceCheckT (_, ts, _) -> List.fold_left (self#type_ cx pole_TODO) acc ts - - | TypeAppVarianceCheckT (_, _, _, ts) -> - List.fold_left (fun acc (a, b) -> - let acc = self#type_ cx pole_TODO acc a in - let acc = self#type_ cx pole_TODO acc b in - acc - ) acc ts - - | ConcretizeTypeAppsT (_, (ts1, _, _), (t2, ts2, _, _), _) -> - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts1 in - let acc = self#type_ cx pole_TODO acc t2 in - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts2 in - acc - - | LookupT (_, kind, ts, prop, action) -> - let acc = self#lookup_kind cx acc kind in - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in - let acc = self#propref cx acc prop in - let acc = self#lookup_action cx acc action in - acc - - | ObjAssignToT (_, t1, t2, _) - | ObjAssignFromT (_, t1, t2, _) - | ObjTestT (_, t1, t2) -> - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - acc - - | ObjTestProtoT (_, t) -> self#type_ cx pole_TODO acc t - - | ObjFreezeT (_, t) - | ObjRestT (_, _, t) - | ObjSealT (_, t) - | ArrRestT (_, _, _, t) -> - self#type_ cx pole_TODO acc t - - | UnifyT (t1, t2) -> - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - acc - - | BecomeT (_, t) -> self#type_ cx pole_TODO acc t - - | GetKeysT (_, t) -> self#use_type_ cx acc t - | GetValuesT (_, t) -> self#type_ cx pole_TODO acc t - - | HasOwnPropT _ -> acc - - | ElemT (_, _, t, action) -> - let acc = self#type_ cx pole_TODO acc t in - let acc = self#elem_action cx acc action in - acc - - | MakeExactT (_, cont) -> self#cont cx acc cont - - | CJSRequireT (_, t, _) - | ImportModuleNsT (_, t, _) - | ImportDefaultT (_, _, _, t, _) - | ImportNamedT (_, _, _, _, t, _) - | ImportTypeT (_, _, t) - | ImportTypeofT (_, _, t) - -> self#type_ cx Negative acc t - - | AssertImportIsValueT _ -> acc - - | CJSExtractNamedExportsT (_, (_, ts, _), t) -> - let acc = self#export_types cx pole_TODO acc ts in - let acc = self#type_ cx pole_TODO acc t in - acc - - | CopyNamedExportsT (_, t, tout) - | CopyTypeExportsT (_, t, tout) - | ExportTypeT (_, _, _, t, tout) -> - let acc = self#type_ cx pole_TODO acc t in - let acc = self#type_ cx pole_TODO acc tout in - acc - - | ExportNamedT (_, _, ts, tout) -> - let visit_pair acc (_loc, t) = self#type_ cx pole_TODO acc t in - let acc = self#smap visit_pair acc ts in - let acc = self#type_ cx pole_TODO acc tout in - acc - - | MapTypeT (_, map, tout) -> - let acc = self#type_map cx acc map in - let acc = self#type_ cx pole_TODO acc tout in - acc - - | ReactKitT (_, _, tool) -> (match tool with - | React.GetProps t | React.GetConfig t | React.GetRef t - -> self#type_ cx pole_TODO acc t - | React.CreateElement0 (_, config, (children, children_spread), tout) -> - let acc = self#type_ cx pole_TODO acc config in - let acc = List.fold_left (self#type_ cx pole_TODO) acc children in - let acc = self#opt (self#type_ cx pole_TODO) acc children_spread in - let acc = self#type_ cx pole_TODO acc tout in - acc - | React.CreateElement (_, component, config, (children, children_spread), tout) -> - let acc = self#type_ cx pole_TODO acc component in - let acc = self#type_ cx pole_TODO acc config in - let acc = List.fold_left (self#type_ cx pole_TODO) acc children in - let acc = self#opt (self#type_ cx pole_TODO) acc children_spread in - let acc = self#type_ cx pole_TODO acc tout in - acc - | React.SimplifyPropType (tool, t) -> - let open React in - let open React.SimplifyPropType in - let acc = match tool with - | ArrayOf | InstanceOf | ObjectOf -> acc - | OneOf r | OneOfType r -> (match r with - | ResolveArray -> acc - | ResolveElem (ts1, ts2) -> - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts1 in - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts2 in - acc) - | Shape o -> self#react_resolve_object cx acc o - in - let acc = self#type_ cx pole_TODO acc t in - acc - | React.CreateClass (tool, knot, tout) -> - let acc = self#react_create_class_tool cx acc tool in - let acc = self#react_create_class_knot cx acc knot in - let acc = self#type_ cx pole_TODO acc tout in - acc) - - | ObjKitT (_, _, resolve_tool, tool, tout) -> - let open Object in - let acc = - match resolve_tool with - | Resolve r -> self#object_kit_resolve cx acc r - | Super (s, r) -> - let acc = self#object_kit_slice cx acc s in - let acc = self#object_kit_resolve cx acc r in - acc - in - let acc = match tool with - | ReadOnly -> acc - | Spread (_, state) -> - let open Object.Spread in - let { todo_rev; acc = object_spread_acc } = state in - let acc = List.fold_left (self#type_ cx pole_TODO) acc todo_rev in - let acc = List.fold_left - (Nel.fold_left (self#object_kit_slice cx)) - acc object_spread_acc - in + method private lookup_kind cx acc = + function + | Strict _ -> acc + | NonstrictReturning (Some (t1, t2), _) -> + let acc = self#type_ cx pole_TODO acc t1 in + let acc = self#type_ cx pole_TODO acc t2 in acc - | Rest (_, state) -> - let open Object.Rest in - (match state with - | One t -> self#type_ cx pole_TODO acc t - | Done o -> Nel.fold_left (self#object_kit_slice cx) acc o) - | ReactConfig state -> - let open Object.ReactConfig in - (match state with - | Config { defaults; children } -> - let acc = self#opt (self#type_ cx pole_TODO) acc defaults in - let acc = self#opt (self#type_ cx pole_TODO) acc children in - acc - | Defaults { config; children } -> - let acc = Nel.fold_left (self#object_kit_slice cx) acc config in - let acc = self#opt (self#type_ cx pole_TODO) acc children in - acc) - in - let acc = self#type_ cx pole_TODO acc tout in - acc - - | DebugPrintT _ -> acc - | DebugSleepT _ -> acc - - | SentinelPropTestT (_, t, _, _, _, tout) -> - let acc = self#type_ cx pole_TODO acc t in - let acc = self#type_ cx pole_TODO acc tout in - acc - - | IdxUnwrap (_, tout) - | IdxUnMaybeifyT (_, tout) -> - self#type_ cx pole_TODO acc tout - - | OptionalChainT (_, _, uses) -> - Nel.fold_left (fun acc (use, tout) -> - self#use_type_ cx acc (apply_opt_use use tout) - ) acc uses - - | InvariantT _ -> acc - - | CallLatentPredT (_, _, _, t1, t2) - | CallOpenPredT (_, _, _, t1, t2) -> - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - acc - - | SubstOnPredT (_, _, t) -> self#type_ cx pole_TODO acc t - - | RefineT (_, predicate, t) -> - let acc = self#predicate cx acc predicate in - let acc = self#type_ cx pole_TODO acc t in - acc - - | ResolveSpreadT (_, _, { rrt_resolved; rrt_unresolved; rrt_resolve_to }) -> - let acc = List.fold_left (fun (acc: 'a) -> function - | ResolvedArg t -> self#type_ cx pole_TODO acc t - | ResolvedAnySpreadArg _ -> acc - | ResolvedSpreadArg (_, arr) -> self#arr_type cx pole_TODO acc arr - ) acc rrt_resolved in - let acc = List.fold_left (fun acc -> function - | UnresolvedArg t - | UnresolvedSpreadArg t -> - self#type_ cx pole_TODO acc t - ) acc rrt_unresolved in - let acc = match rrt_resolve_to with - | ResolveSpreadsToTuple (_, t1, t2) - | ResolveSpreadsToArrayLiteral (_, t1, t2) - | ResolveSpreadsToArray (t1, t2) - -> + | NonstrictReturning (None, _) -> acc + | ShadowRead (_, props) + | ShadowWrite props -> + Nel.fold_left (self#props cx pole_TODO) acc props + + method private lookup_action cx acc = + function + | ReadProp { use_op = _; obj_t = t1; tout = t2 } -> let acc = self#type_ cx pole_TODO acc t1 in let acc = self#type_ cx pole_TODO acc t2 in acc - | ResolveSpreadsToMultiflowCallFull (_, fn) - | ResolveSpreadsToMultiflowSubtypeFull (_, fn) - -> self#fun_type cx pole_TODO acc fn - | ResolveSpreadsToCustomFunCall (_, kind, t) -> - let acc = self#custom_fun_kind cx acc kind in - let acc = self#type_ cx pole_TODO acc t in + | WriteProp { use_op = _; obj_t; prop_tout; tin; write_ctx = _; mode = _ } -> + let acc = self#type_ cx pole_TODO acc obj_t in + let acc = self#opt (self#type_ cx pole_TODO) acc prop_tout in + let acc = self#type_ cx pole_TODO acc tin in acc - | ResolveSpreadsToMultiflowPartial (_, fn, _, t) -> - let acc = self#fun_type cx pole_TODO acc fn in - let acc = self#type_ cx pole_TODO acc t in + | LookupProp (_, prop) + | SuperProp (_, prop) -> + self#prop cx pole_TODO acc prop + | MatchProp (_, t) -> self#type_ cx pole_TODO acc t + + method private elem_action cx acc = + function + | ReadElem t -> self#type_ cx pole_TODO acc t + | WriteElem (tin, tout, _) -> + let acc = self#type_ cx pole_TODO acc tin in + let acc = self#opt (self#type_ cx pole_TODO) acc tout in acc - | ResolveSpreadsToCallT (fn, t) -> - let acc = self#fun_call_type cx acc fn in + | CallElem (_, fn) -> self#fun_call_type cx acc fn + + method private cont cx acc = + function + | Lower (_, l) -> self#type_ cx pole_TODO acc l + | Upper u -> self#use_type_ cx acc u + + method private type_map cx acc = + function + | TupleMap t + | ObjectMap t + | ObjectMapi t -> + self#type_ cx pole_TODO acc t + + method private choice_use_tool cx acc = + function + | FullyResolveType id -> + let (_, acc) = self#type_graph cx (ISet.empty, acc) id in + acc + | TryFlow (_, spec) -> self#try_flow_spec cx acc spec + + method private type_graph cx (seen, acc) id = + Graph_explorer.( + let seen' = ISet.add id seen in + if seen' == seen then + (seen, acc) + else + let graph = Context.type_graph cx in + let acc = (seen', self#eval_id cx pole_TODO acc id) in + match Tbl.find graph id with + | exception Not_found -> acc (* shouldn't happen *) + | Unexplored { rev_deps = deps } + | Explored { deps } -> + ISet.fold (fun id acc -> self#type_graph cx acc id) deps acc) + + method private try_flow_spec cx acc = + function + | UnionCases (_, t, _rep, ts) -> let acc = self#type_ cx pole_TODO acc t in + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in acc - in - acc - - | CondT (_, then_t_opt, else_t, tout) -> - let acc = self#opt (self#type_ cx pole_TODO) acc then_t_opt in - let acc = self#type_ cx pole_TODO acc else_t in - let acc = self#type_ cx pole_TODO acc tout in - acc - - | ChoiceKitUseT (_, tool) -> - self#choice_use_tool cx acc tool - - | ExtendsUseT (_, _, ts, t1, t2) -> - let acc = self#list (self#type_ cx pole_TODO) acc ts in - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - acc - - | IntersectionPreprocessKitT (_, tool) -> (match tool with - | ConcretizeTypes (ts1, ts2, t, use) -> - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts1 in - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts2 in - let acc = self#type_ cx pole_TODO acc t in - let acc = self#use_type_ cx acc use in - acc - | SentinelPropTest (_, _, t1, t2, t3) -> - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - let acc = self#type_ cx pole_TODO acc t3 in + | IntersectionCases (ts, use) -> + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in + let acc = self#use_type_ cx acc use in + acc + + method private object_kit_resolve cx acc = + Object.( + function + | Next -> acc + | List0 (ts, _) -> Nel.fold_left (self#type_ cx pole_TODO) acc ts + | List (ts, rs, _) -> + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in + let acc = Nel.fold_left (Nel.fold_left (self#object_kit_slice cx)) acc rs in + acc) + + method private object_kit_slice cx acc { Object.reason = _; props; dict; flags = _ } = + let acc = self#smap (fun acc (t, _) -> self#type_ cx pole_TODO acc t) acc props in + let acc = self#opt (self#dict_type cx pole_TODO) acc dict in acc - | PropExistsTest (_, _, t1, t2) -> - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - acc) - (* The default behavior here could be fleshed out a bit, to look up the graph, - handle Resolved and Unresolved cases, etc. *) - method tvar _cx _pole acc _r _id = acc - - method dict_type cx pole acc d = - let { - dict_name = _; - key; - value; - dict_polarity = p; - } = d in - let acc = self#type_ cx pole_TODO acc key in - let acc = self#type_ cx (P.mult (pole, p)) acc value in - acc - - method props cx pole acc id = - Context.find_props cx id - |> self#smap (self#prop cx pole) acc - - method prop cx pole acc = function - | Field (_, t, p) -> self#type_ cx (P.mult (pole, p)) acc t - | Method (_, t) -> self#type_ cx pole acc t - | Get (_, t) -> self#type_ cx pole acc t - | Set (_, t) -> self#type_ cx (P.inv pole) acc t - | GetSet (_, t1, _, t2) -> - let acc = self#type_ cx pole acc t1 in - let acc = self#type_ cx (P.inv pole) acc t2 in + method private object_kit_spread_operand_slice + cx acc { Object.Spread.reason = _; prop_map; dict } = + let acc = self#smap (Property.fold_t (self#type_ cx pole_TODO)) acc prop_map in + let acc = self#opt (self#dict_type cx pole_TODO) acc dict in acc - method call_prop cx pole acc id = - let t = Context.find_call cx id in - self#type_ cx pole acc t - - method exports cx pole acc id = - let visit_pair acc (_loc, t) = self#type_ cx pole acc t in - Context.find_exports cx id - |> self#smap visit_pair acc - - method eval_id cx pole acc id = - match IMap.get id (Context.evaluated cx) with - | None -> acc - | Some t -> self#type_ cx pole acc t - - method private type_param cx pole acc tp = - let { - reason = _; - name = _; - bound; - default; - polarity = p; - } = tp in - let pole = P.mult (pole, p) in - let acc = self#type_ cx pole acc bound in - self#opt (self#type_ cx pole) acc default - - method fun_type cx pole acc ft = - let { - this_t; - params; - rest_param; - return_t; - closure_t = _; - is_predicate = _; - changeset = _; - def_reason = _; - } = ft in - let acc = self#type_ cx pole acc this_t in - let acc = self#list (fun acc (_, t) -> self#type_ cx (P.inv pole) acc t) acc params in - let acc = self#opt (fun acc (_, _, t) -> self#type_ cx (P.inv pole) acc t) acc rest_param in - let acc = self#type_ cx pole acc return_t in - acc - - method private obj_type cx pole acc o = - let { - dict_t; - props_tmap; - proto_t; - call_t; - flags = _; - } = o in - let acc = self#opt (self#dict_type cx pole) acc dict_t in - let acc = self#props cx pole acc props_tmap in - let acc = self#type_ cx pole acc proto_t in - let acc = self#opt (self#call_prop cx pole) acc call_t in - acc - - method private arr_type cx pole acc = function - | ArrayAT (t, None) -> - self#type_ cx Neutral acc t - | ArrayAT (t, Some ts) - | TupleAT (t, ts) -> - let acc = self#type_ cx Neutral acc t in - let acc = self#list (self#type_ cx Neutral) acc ts in - acc - | ROArrayAT t -> - self#type_ cx pole acc t - | EmptyAT -> acc - - method private inst_type cx pole acc i = - let { - class_id = _; - type_args; - own_props; - proto_props; - inst_call_t; - initialized_fields = _; - initialized_static_fields = _; - has_unknown_react_mixins = _; - structural = _; - } = i in - let acc = self#list (fun acc (_, _, t, pole') -> - self#type_ cx (P.mult (pole, pole')) acc t - ) acc type_args in - let acc = self#props cx pole acc own_props in - let acc = self#props cx pole acc proto_props in - let acc = self#opt (self#call_prop cx pole) acc inst_call_t in - acc - - method private export_types cx pole acc e = - let { - exports_tmap; - cjs_export; - has_every_named_export = _; - } = e in - let acc = self#exports cx pole acc exports_tmap in - let acc = self#opt (self#type_ cx pole) acc cjs_export in - acc - - method private fun_call_type cx acc call = - let { - call_this_t; - call_targs; - call_args_tlist; - call_tout; - call_closure_t = _; - call_strict_arity = _; - } = call in - let acc = self#type_ cx pole_TODO acc call_this_t in - let acc = self#opt (self#list (self#type_ cx pole_TODO)) acc call_targs in - let acc = self#list (self#call_arg cx) acc call_args_tlist in - let acc = self#type_ cx pole_TODO acc call_tout in - acc - - method private propref cx acc = function - | Named _ -> acc - | Computed t -> self#type_ cx pole_TODO acc t - - method private class_binding cx acc { class_private_fields; class_private_static_fields; _ } = - let acc = self#props cx pole_TODO acc class_private_fields in - let acc = self#props cx pole_TODO acc class_private_static_fields in - acc - - method private call_arg cx acc = function - | Arg t -> self#type_ cx pole_TODO acc t - | SpreadArg t -> self#type_ cx pole_TODO acc t - - method private lookup_kind cx acc = function - | Strict _ -> acc - | NonstrictReturning (Some (t1, t2), _) -> - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - acc - | NonstrictReturning (None, _) -> acc - | ShadowRead (_, props) - | ShadowWrite props -> - Nel.fold_left (self#props cx pole_TODO) acc props - - method private lookup_action cx acc = function - | RWProp (_, t1, t2, rw) -> - let acc = self#type_ cx pole_TODO acc t1 in - let acc = self#type_ cx pole_TODO acc t2 in - let acc = self#read_write cx acc rw in - acc - | LookupProp (_, prop) - | SuperProp (_, prop) -> - self#prop cx pole_TODO acc prop - | MatchProp (_, t) -> - self#type_ cx pole_TODO acc t - - method private read_write cx acc = function - | Read -> acc - | Write (_, prop_t) -> self#opt (self#type_ cx pole_TODO) acc prop_t - - method private elem_action cx acc = function - | ReadElem t -> - self#type_ cx pole_TODO acc t - | WriteElem (tin, tout) -> - let acc = self#type_ cx pole_TODO acc tin in - let acc = self#opt (self#type_ cx pole_TODO) acc tout in - acc - | CallElem (_, fn) -> - self#fun_call_type cx acc fn - - method private cont cx acc = function - | Lower (_, l) -> self#type_ cx pole_TODO acc l - | Upper u -> self#use_type_ cx acc u - - method private type_map cx acc = function - | TupleMap t - | ObjectMap t - | ObjectMapi t -> self#type_ cx pole_TODO acc t - - method private choice_use_tool cx acc = function - | FullyResolveType id -> - let _, acc = self#type_graph cx (ISet.empty, acc) id in - acc - | TryFlow (_, spec) -> - self#try_flow_spec cx acc spec - - method private type_graph cx (seen, acc) id = - let open Graph_explorer in - let seen' = ISet.add id seen in - if seen' == seen then seen, acc else - let graph = Context.type_graph cx in - let acc = seen', self#eval_id cx pole_TODO acc id in - let acc = - match IMap.get id graph.explored_nodes with - | None -> acc - | Some {deps} -> - ISet.fold (fun id acc -> self#type_graph cx acc id) deps acc - in - let acc = - match IMap.get id graph.unexplored_nodes with - | None -> acc - | Some {rev_deps} -> - ISet.fold (fun id acc -> self#type_graph cx acc id) rev_deps acc - in - acc - - method private try_flow_spec cx acc = function - | UnionCases (_, t, _rep, ts) -> - let acc = self#type_ cx pole_TODO acc t in - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in - acc - | IntersectionCases (ts, use) -> - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in - let acc = self#use_type_ cx acc use in - acc - - method private object_kit_resolve cx acc = - let open Object in - function - | Next -> acc - | List0 (ts, _) -> Nel.fold_left (self#type_ cx pole_TODO) acc ts - | List (ts, rs, _) -> - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in - let acc = Nel.fold_left (Nel.fold_left (self#object_kit_slice cx)) acc rs in + method private object_kit_acc_element cx acc = + Object.Spread.( + function + | InlineSlice slice -> self#object_kit_spread_operand_slice cx acc slice + | ResolvedSlice resolved -> Nel.fold_left (self#object_kit_slice cx) acc resolved) + + method private object_kit_spread_operand cx acc = + Object.Spread.( + function + | Slice operand_slice -> self#object_kit_spread_operand_slice cx acc operand_slice + | Type t -> self#type_ cx pole_TODO acc t) + + method private react_resolved_object cx acc (_, props, dict, _) = + let acc = self#smap (self#prop cx pole_TODO) acc props in + let acc = self#opt (self#dict_type cx pole_TODO) acc dict in acc - method private object_kit_slice cx acc (_, props, dict, _) = - let acc = self#smap (fun acc (t, _) -> self#type_ cx pole_TODO acc t) acc props in - let acc = self#opt (self#dict_type cx pole_TODO) acc dict in - acc - - method private react_resolved_object cx acc (_, props, dict, _) = - let acc = self#smap (self#prop cx pole_TODO) acc props in - let acc = self#opt (self#dict_type cx pole_TODO) acc dict in - acc - - method private react_resolve_object cx acc o = - let open React in - match o with - | ResolveObject -> acc - | ResolveDict (dict, props, o) -> - let acc = self#dict_type cx pole_TODO acc dict in - let acc = self#smap (self#prop cx pole_TODO) acc props in - let acc = self#react_resolved_object cx acc o in - acc - | ResolveProp (_, props, o) -> - let acc = self#smap (self#prop cx pole_TODO) acc props in - let acc = self#react_resolved_object cx acc o in - acc - - method private react_create_class_tool cx acc tool = - let open React.CreateClass in - match tool with - | Spec tail -> self#react_create_class_stack_tail cx acc tail - | Mixins stack -> self#react_create_class_stack cx acc stack - | Statics stack -> self#react_create_class_stack cx acc stack - | PropTypes (stack, o) -> - let acc = self#react_create_class_stack cx acc stack in - let acc = self#react_resolve_object cx acc o in - acc - | DefaultProps (ts, dp) -> - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in - let acc = self#opt (self#maybe_known (self#react_resolved_object cx)) acc dp in - acc - | InitialState (ts, s) -> - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in - let acc = self#opt (self#maybe_known (self#or_null - (self#react_resolved_object cx))) acc s in - acc + method private react_resolve_object cx acc o = + React.( + match o with + | ResolveObject -> acc + | ResolveDict (dict, props, o) -> + let acc = self#dict_type cx pole_TODO acc dict in + let acc = self#smap (self#prop cx pole_TODO) acc props in + let acc = self#react_resolved_object cx acc o in + acc + | ResolveProp (_, props, o) -> + let acc = self#smap (self#prop cx pole_TODO) acc props in + let acc = self#react_resolved_object cx acc o in + acc) + + method private react_create_class_tool cx acc tool = + React.CreateClass.( + match tool with + | Spec tail -> self#react_create_class_stack_tail cx acc tail + | Mixins stack -> self#react_create_class_stack cx acc stack + | Statics stack -> self#react_create_class_stack cx acc stack + | PropTypes (stack, o) -> + let acc = self#react_create_class_stack cx acc stack in + let acc = self#react_resolve_object cx acc o in + acc + | DefaultProps (ts, dp) -> + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in + let acc = self#opt (self#maybe_known (self#react_resolved_object cx)) acc dp in + acc + | InitialState (ts, s) -> + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in + let acc = + self#opt (self#maybe_known (self#or_null (self#react_resolved_object cx))) acc s + in + acc) + + method private react_create_class_stack cx acc (head, tail) = + let acc = self#react_create_class_stack_head cx acc head in + let acc = self#react_create_class_stack_tail cx acc tail in + acc - method private react_create_class_stack cx acc (head, tail) = - let acc = self#react_create_class_stack_head cx acc head in - let acc = self#react_create_class_stack_tail cx acc tail in - acc + method private react_create_class_stack_head cx acc (o, spec) = + let acc = self#react_resolved_object cx acc o in + let acc = self#react_create_class_spec cx acc spec in + acc - method private react_create_class_stack_head cx acc (o, spec) = - let acc = self#react_resolved_object cx acc o in - let acc = self#react_create_class_spec cx acc spec in - acc + method private react_create_class_stack_tail cx acc = + List.fold_left + (fun acc (head, ts, specs) -> + let acc = self#react_create_class_stack_head cx acc head in + let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in + let acc = + List.fold_left (self#maybe_known (self#react_create_class_spec cx)) acc specs + in + acc) + acc - method private react_create_class_stack_tail cx acc = - List.fold_left (fun acc (head, ts, specs) -> - let acc = self#react_create_class_stack_head cx acc head in - let acc = List.fold_left (self#type_ cx pole_TODO) acc ts in - let acc = List.fold_left (self#maybe_known - (self#react_create_class_spec cx)) acc specs in - acc - ) acc - - method private react_create_class_spec cx acc spec = - let open React.CreateClass in - let { obj; statics; prop_types; get_default_props; get_initial_state; _ } = spec in - let acc = self#react_resolved_object cx acc obj in - let acc = self#opt (self#maybe_known (self#react_resolved_object cx)) acc statics in - let acc = self#opt (self#maybe_known (self#react_resolved_object cx)) acc prop_types in - let acc = List.fold_left (self#type_ cx pole_TODO) acc get_default_props in - let acc = List.fold_left (self#type_ cx pole_TODO) acc get_initial_state in - acc - - method private maybe_known: 't. ('a -> 't -> 'a) -> 'a -> 't React.CreateClass.maybe_known -> 'a = - let open React.CreateClass in - fun f acc x -> match x with - | Known a -> f acc a - | Unknown _ -> acc - - method private or_null: 't. ('a -> 't -> 'a) -> 'a -> 't React.CreateClass.or_null -> 'a = - let open React.CreateClass in - fun f acc x -> match x with - | NotNull a -> f acc a - | Null _ -> acc - - method private react_create_class_knot cx acc knot = - let open React.CreateClass in - let { this; static; state_t; default_t } = knot in - let acc = self#type_ cx pole_TODO acc this in - let acc = self#type_ cx pole_TODO acc static in - let acc = self#type_ cx pole_TODO acc state_t in - let acc = self#type_ cx pole_TODO acc default_t in - acc - - method private list: 't. ('a -> 't -> 'a) -> 'a -> 't list -> 'a = - List.fold_left - - method private opt: 't. ('a -> 't -> 'a) -> 'a -> 't option -> 'a = - fun f acc opt -> Option.fold opt ~init:acc ~f - - method private smap: 't. ('a -> 't -> 'a) -> 'a -> 't SMap.t -> 'a = - fun f acc smap -> SMap.fold (fun _ t acc -> f acc t) smap acc -end + method private react_create_class_spec cx acc spec = + React.CreateClass.( + let { obj; statics; prop_types; get_default_props; get_initial_state; _ } = spec in + let acc = self#react_resolved_object cx acc obj in + let acc = self#opt (self#maybe_known (self#react_resolved_object cx)) acc statics in + let acc = self#opt (self#maybe_known (self#react_resolved_object cx)) acc prop_types in + let acc = List.fold_left (self#type_ cx pole_TODO) acc get_default_props in + let acc = List.fold_left (self#type_ cx pole_TODO) acc get_initial_state in + acc) + + method private maybe_known + : 't. ('a -> 't -> 'a) -> 'a -> 't React.CreateClass.maybe_known -> 'a = + React.CreateClass.( + fun f acc x -> + match x with + | Known a -> f acc a + | Unknown _ -> acc) + + method private or_null : 't. ('a -> 't -> 'a) -> 'a -> 't React.CreateClass.or_null -> 'a = + React.CreateClass.( + fun f acc x -> + match x with + | NotNull a -> f acc a + | Null _ -> acc) + + method private react_create_class_knot cx acc knot = + React.CreateClass.( + let { this; static; state_t; default_t } = knot in + let acc = self#type_ cx pole_TODO acc this in + let acc = self#type_ cx pole_TODO acc static in + let acc = self#type_ cx pole_TODO acc state_t in + let acc = self#type_ cx pole_TODO acc default_t in + acc) + + method private list : 't. ('a -> 't -> 'a) -> 'a -> 't list -> 'a = List.fold_left + + method private nel : 't. ('a -> 't -> 'a) -> 'a -> 't Nel.t -> 'a = Nel.fold_left + + method private opt : 't. ('a -> 't -> 'a) -> 'a -> 't option -> 'a = + (fun f acc opt -> Option.fold opt ~init:acc ~f) + + method private smap : 't. ('a -> 't -> 'a) -> 'a -> 't SMap.t -> 'a = + (fun f acc smap -> SMap.fold (fun _ t acc -> f acc t) smap acc) + end diff --git a/src/typing/type_visitor.mli b/src/typing/type_visitor.mli index 2690d565216..64c9c1839b5 100644 --- a/src/typing/type_visitor.mli +++ b/src/typing/type_visitor.mli @@ -1,22 +1,36 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -class ['a] t: object - (* Only exposing a few methods for now. *) - method type_ : Context.t -> Type.polarity -> 'a -> Type.t -> 'a - method def_type : Context.t -> Type.polarity -> 'a -> Type.def_t -> 'a - method use_type_ : Context.t -> 'a -> Type.use_t -> 'a - method tvar : Context.t -> Type.polarity -> 'a -> Reason.reason -> Constraint.ident -> 'a - method props : Context.t -> Type.polarity -> 'a -> Type.Properties.id -> 'a - method prop : Context.t -> Type.polarity -> 'a -> Type.property -> 'a - method call_prop : Context.t -> Type.polarity -> 'a -> int -> 'a - method exports : Context.t -> Type.polarity -> 'a -> Type.Exports.id -> 'a - method eval_id : Context.t -> Type.polarity -> 'a -> int -> 'a - method fun_type : Context.t -> Type.polarity -> 'a -> Type.funtype -> 'a - method dict_type : Context.t -> Type.polarity -> 'a -> Type.dicttype -> 'a - method destructor: Context.t -> 'a -> Type.destructor -> 'a -end +class ['a] t : + object + (* Only exposing a few methods for now. *) + method type_ : Context.t -> Polarity.t -> 'a -> Type.t -> 'a + + method def_type : Context.t -> Polarity.t -> 'a -> Type.def_t -> 'a + + method targ : Context.t -> Polarity.t -> 'a -> Type.targ -> 'a + + method use_type_ : Context.t -> 'a -> Type.use_t -> 'a + + method tvar : Context.t -> Polarity.t -> 'a -> Reason.reason -> Constraint.ident -> 'a + + method props : Context.t -> Polarity.t -> 'a -> Type.Properties.id -> 'a + + method prop : Context.t -> Polarity.t -> 'a -> Type.property -> 'a + + method call_prop : Context.t -> Polarity.t -> 'a -> int -> 'a + + method exports : Context.t -> Polarity.t -> 'a -> Type.Exports.id -> 'a + + method eval_id : Context.t -> Polarity.t -> 'a -> int -> 'a + + method fun_type : Context.t -> Polarity.t -> 'a -> Type.funtype -> 'a + + method dict_type : Context.t -> Polarity.t -> 'a -> Type.dicttype -> 'a + + method destructor : Context.t -> 'a -> Type.destructor -> 'a + end diff --git a/src/typing/typed_ast.ml b/src/typing/typed_ast.ml deleted file mode 100644 index f3f393335e5..00000000000 --- a/src/typing/typed_ast.ml +++ /dev/null @@ -1,285 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -module Ast = Flow_ast - -(** Error AST nodes. These are used when, while generating the typed AST, - errors prevent some part of the original AST from being translated. - These are all chosen arbitrarily, and currently nothing relies on them - being these specific values. - TODO(vijayramamurthy): redo error nodes in a safer way (e.g. add error - constructors to the datatype) - - This module also contains "unimplemented" AST nodes; these will be deleted once - we've finished implementing the typed AST translation - *) -module T = Type - -type program = (Loc.t, Loc.t * Type.t) Ast.program - -let error_annot = Loc.none, Type.Locationless.AnyT.t -let unimplemented_annot = Loc.none, Type.Locationless.AnyT.t - -module Type = struct - open Ast.Type - let error = - Generic { Ast.Type.Generic. - id = (Ast.Type.Generic.Identifier.Unqualified (error_annot, "Error")); - targs = None - } - - module Function = struct - open Function - - module Params = struct - let error = { Params. - params = []; - rest = None; - } - end - - let error = { - tparams = None; - params = Loc.none, Params.error; - return = error_annot, error; - } - end - - module Object = struct - open Object - - module Property = struct - open Property - let error = { - key = Ast.Expression.Object.Property.Literal (error_annot, { Ast.Literal. - value = Ast.Literal.Null; - raw = "Error"; - }); - value = Init (error_annot, error); - optional = false; - static = false; - proto = false; - _method = false; - variance = None - } - end - - module Indexer = struct - open Indexer - let error = { - id = Some (Loc.none, "Error"); - key = error_annot, error; - value = error_annot, error; - static = false; - variance = None; - } - end - - module InternalSlot = struct - open InternalSlot - let error = { - id = Loc.none, "Error"; - value = error_annot, error; - optional = false; - static = false; - _method = false; - } - end - - module SpreadProperty = struct - open SpreadProperty - let error = { - argument = error_annot, error - } - end - end -end - -module Expression = struct - open Ast.Expression - let error = Identifier (error_annot, "Error") - let expression_or_spread_list_error = [ Expression (error_annot, error) ] - let unimplemented = Identifier (unimplemented_annot, "Unimplemented") - let targs_unimplemented = None - let expression_or_spread_list_unimplemented = [ Expression (error_annot, unimplemented) ] - - module Object = struct - open Object - module Property = struct - open Property - let key_error = Property.Identifier (error_annot, "Error") - let error = Init { - key = key_error; - value = error_annot, error; - shorthand = false; - } - end - let property_error = - Property (Loc.none, Property.error) - end -end - -module Pattern = struct - open Ast.Pattern - let error = Expression (error_annot, Expression.error) - let unimplemented = Expression (error_annot, Expression.unimplemented) -end - -module Statement = struct - open Ast.Statement - let error = Loc.none, Labeled { Labeled. - label = Loc.none, "Error"; - body = Loc.none, Empty; - } - module Try = struct - open Try - module CatchClause = struct - open CatchClause - let error = { - param = None; - body = Loc.none, { Ast.Statement.Block.body = [error] } - } - end - end - module ForIn = struct - open ForIn - let left_error = LeftPattern (error_annot, Pattern.error) - end - module ForOf = struct - open ForOf - let left_error = LeftPattern (error_annot, Pattern.error) - end - module DeclareFunction = struct - open DeclareFunction - let error = { - id = Loc.none, "Error"; - annot = Loc.none, (error_annot, Ast.Type.Function Type.Function.error); - predicate = None; - } - end -end - -module Function = struct - open Ast.Function - let body_error = BodyExpression (error_annot, Expression.error) - let body_unimplemented = BodyExpression (error_annot, Expression.unimplemented) - let unimplemented = { - id = Some (error_annot, "Unimplemented"); - params = Loc.none, { Params.params = []; rest = None; }; - body = body_unimplemented; - async = false; - generator = false; - predicate = None; - expression = false; - return = Missing error_annot; - tparams = None; - } - - module RestElement = struct - open RestElement - let error = { - argument = error_annot, Pattern.error - } - end - - module Params = struct - (* open Params *) - let error = { Params. - params = []; - rest = None; - } - end - - let error = { - id = Some (error_annot, "Error"); - params = Loc.none, Params.error; - body = BodyExpression (error_annot, Expression.error); - async = false; - generator = false; - predicate = None; - expression = false; - return = Missing error_annot; - tparams = None; - } - -end - -module Class = struct - open Ast.Class - let unimplemented = { - id = Some (error_annot, "Unimplemented"); - body = error_annot, { Ast.Class.Body.body = [] }; - tparams = None; - extends = None; - implements = []; - classDecorators = []; - } - - module Body = struct - open Body - let element_error = Method (error_annot, { Method. - kind = Method.Method; - key = Expression.Object.Property.key_error; - value = Loc.none, Function.error; - static = false; - decorators = []; - }) - end -end - -module JSX = struct - module Identifier = struct - open Ast.JSX.Identifier - let error = error_annot, { name = "Error" } - end - - module Attribute = struct - let error_name = - Ast.JSX.Attribute.Identifier Identifier.error - - let error = Loc.none, { - Ast.JSX.Attribute.name = error_name; - value = None; - } - end - - module SpreadAttribute = struct - let error = Loc.none, { - Ast.JSX.SpreadAttribute.argument = error_annot, Expression.error - } - end - - module MemberExpression = struct - let error = Loc.none, { - Ast.JSX.MemberExpression._object = - Ast.JSX.MemberExpression.Identifier Identifier.error; - Ast.JSX.MemberExpression.property = Identifier.error; - } - let error_object = Ast.JSX.MemberExpression.Identifier ( - Identifier.error - ) - end - - let error_name = - Ast.JSX.Identifier Identifier.error - - module Opening = struct - open Ast.JSX.Opening - let error_attribute_list attributes = - List.map (function - | Attribute _ -> Attribute Attribute.error - | SpreadAttribute _ -> SpreadAttribute SpreadAttribute.error - ) attributes - end - - module Closing = struct - open Ast.JSX.Closing - let error = Loc.none, { - name = error_name; - } - end -end diff --git a/src/typing/typed_ast.mli b/src/typing/typed_ast.mli deleted file mode 100644 index 23d88af09bb..00000000000 --- a/src/typing/typed_ast.mli +++ /dev/null @@ -1,119 +0,0 @@ -(** - * Copyright (c) 2013-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - *) - -module T = Type - -type program = (Loc.t, Loc.t * Type.t) Flow_ast.program - -val error_annot : Loc.t * Type.t - -module Type : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Type.t' - - module Object : sig - module Property : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Type.Object.Property.t' - end - - module Indexer : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Type.Object.Indexer.t' - end - - module InternalSlot : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Type.Object.InternalSlot.t' - end - - module SpreadProperty : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Type.Object.SpreadProperty.t' - end - end -end - -module Statement : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Statement.t - module Try : sig - module CatchClause : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Statement.Try.CatchClause.t' - end - end - module ForIn : sig - val left_error : (Loc.t, Loc.t * T.t) Flow_ast.Statement.ForIn.left - end - module ForOf : sig - val left_error : (Loc.t, Loc.t * T.t) Flow_ast.Statement.ForOf.left - end - module DeclareFunction : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Statement.DeclareFunction.t - end -end - -module Expression : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Expression.t' - val expression_or_spread_list_error - : (Loc.t, Loc.t * T.t) Flow_ast.Expression.expression_or_spread list - val unimplemented : (Loc.t, Loc.t * T.t) Flow_ast.Expression.t' - val targs_unimplemented : (Loc.t, Loc.t * T.t) Flow_ast.Type.ParameterInstantiation.t option - val expression_or_spread_list_unimplemented - : (Loc.t, Loc.t * T.t) Flow_ast.Expression.expression_or_spread list - module Object : sig - val property_error : (Loc.t, Loc.t * T.t) Flow_ast.Expression.Object.property - module Property : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Expression.Object.Property.t' - val key_error : (Loc.t, Loc.t * T.t) Flow_ast.Expression.Object.Property.key - end - end -end - -module Pattern : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Pattern.t' - val unimplemented : (Loc.t, Loc.t * T.t) Flow_ast.Pattern.t' -end - -module Function : sig - val body_error : (Loc.t, Loc.t * T.t) Flow_ast.Function.body - val error : (Loc.t, Loc.t * T.t) Flow_ast.Function.t - val body_unimplemented : (Loc.t, Loc.t * T.t) Flow_ast.Function.body - val unimplemented : (Loc.t, Loc.t * T.t) Flow_ast.Function.t - module RestElement : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Function.RestElement.t' - end - module Params : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.Function.Params.t' - end -end - -module Class : sig - val unimplemented : (Loc.t, Loc.t * T.t) Flow_ast.Class.t - module Body : sig - val element_error : (Loc.t, Loc.t * T.t) Flow_ast.Class.Body.element - end -end - -module JSX : sig - module Identifier : sig - val error : (Loc.t * T.t) Flow_ast.JSX.Identifier.t - end - module Attribute : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.JSX.Attribute.t - end - module SpreadAttribute : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.JSX.SpreadAttribute.t - end - module MemberExpression : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.JSX.MemberExpression.t - val error_object : (Loc.t, Loc.t * T.t) Flow_ast.JSX.MemberExpression._object - end - val error_name : (Loc.t, Loc.t * T.t) Flow_ast.JSX.name - module Opening : sig - val error_attribute_list : - (Loc.t, Loc.t) Flow_ast.JSX.Opening.attribute list -> - (Loc.t, Loc.t * T.t) Flow_ast.JSX.Opening.attribute list - end - module Closing : sig - val error : (Loc.t, Loc.t * T.t) Flow_ast.JSX.Closing.t - end -end diff --git a/src/typing/typed_ast_utils.ml b/src/typing/typed_ast_utils.ml index e503a912af6..8698c67bd87 100644 --- a/src/typing/typed_ast_utils.ml +++ b/src/typing/typed_ast_utils.ml @@ -1,167 +1,479 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast -module LocMap = Utils_js.LocMap +module ALocMap = Loc_collections.ALocMap -class type_parameter_mapper = object(_) - inherit [ - Loc.t, Loc.t * Type.t, - Loc.t, Loc.t * Type.t - ] Flow_polymorphic_ast_mapper.mapper as super +(* TODO(nmote) come up with a consistent story for abstract/concrete locations in this module *) - method on_loc_annot (x: Loc.t) = x - method on_type_annot (x: Loc.t * Type.t) = x +class type_parameter_mapper = + object + inherit + [ALoc.t, ALoc.t * Type.t, ALoc.t, ALoc.t * Type.t] Flow_polymorphic_ast_mapper.mapper as super - (* Since the mapper wasn't originally written to pass an accumulator value + method on_loc_annot (x : ALoc.t) = x + + method on_type_annot (x : ALoc.t * Type.t) = x + + (* Since the mapper wasn't originally written to pass an accumulator value through the calls, we're maintaining this accumulator imperatively. *) - val mutable bound_tparams : Type.typeparam list = [] - - method annot_with_tparams : 'a . (Type.typeparam list -> 'a) -> 'a = - fun f -> f bound_tparams - - (* Imperatively adds type parameter to bound_tparams environment. *) - method! type_parameter_declaration_type_param tparam = - let res = super#type_parameter_declaration_type_param tparam in - (* Recover the Type.typeparams corresponding to AST type parameters *) - let tparam = Ast.Type.ParameterDeclaration.( - let _, { TypeParam.name = (_, t), name; bound; variance; default; } = tparam in - let reason = Type.reason_of_t t in - let bound = match bound with - | None -> Type.MixedT.make reason - | Some (_, ((_, t), _)) -> t - in - let polarity = Type_annotation.polarity variance in - let default = Option.map default ~f:(fun ((_, t), _) -> t) + val mutable bound_tparams : Type.typeparam list = [] + + method annot_with_tparams : 'a. (Type.typeparam list -> 'a) -> 'a = (fun f -> f bound_tparams) + + (* Imperatively adds type parameter to bound_tparams environment. *) + method! type_parameter_declaration_type_param tparam = + let res = super#type_parameter_declaration_type_param tparam in + (* Recover the Type.typeparams corresponding to AST type parameters *) + let tparam = + Ast.Type.ParameterDeclaration.( + let ( _, + { + TypeParam.name = ((_, t), { Ast.Identifier.name; comments = _ }); + bound; + variance; + default; + } ) = + tparam + in + let reason = Type.reason_of_t t in + let bound = + match bound with + | Ast.Type.Missing _ -> Type.MixedT.make reason |> Type.with_trust Trust.bogus_trust + | Ast.Type.Available (_, ((_, t), _)) -> t + in + let polarity = + Ast.Variance.( + match variance with + | Some (_, Plus) -> Polarity.Positive + | Some (_, Minus) -> Polarity.Negative + | None -> Polarity.Neutral) + in + let default = Option.map default ~f:(fun ((_, t), _) -> t) in + { Type.reason; name; bound; polarity; default }) in - { Type.reason; name; bound; polarity; default; } - ) in - bound_tparams <- tparam :: bound_tparams; - res + bound_tparams <- tparam :: bound_tparams; + res - (* Record and restore the parameter environment around nodes that might + (* Record and restore the parameter environment around nodes that might update it. *) - method! type_parameter_declaration_opt pd f = - let originally_bound_tparams = bound_tparams in - let res = super#type_parameter_declaration_opt pd f in - bound_tparams <- originally_bound_tparams; - res + method! type_parameter_declaration_opt pd f = + let originally_bound_tparams = bound_tparams in + let res = super#type_parameter_declaration_opt pd f in + bound_tparams <- originally_bound_tparams; + res - (* Classes assume an additional "this" type parameter, which needs to be + (* Classes assume an additional "this" type parameter, which needs to be explicitly added to bound_tparams *) - method! class_ cls = - let this_tparam = Ast.Class.( - let { body = ((body_loc, self_t), _); id; _ } = cls in - let name = Option.value_map ~f:snd id ~default:"<>" in - let name_loc = Option.value_map ~f:(fun ((loc, _), _) -> loc) id ~default:body_loc in - { Type. - name = "this"; - reason = Reason.mk_reason (Reason.RType name) name_loc; - bound = self_t; - polarity = Type.Positive; - default = None; - } - ) in - let originally_bound_tparams = bound_tparams in - bound_tparams <- this_tparam :: bound_tparams; - let cls = super#class_ cls in - bound_tparams <- originally_bound_tparams; - cls + method! class_ cls = + let this_tparam = + Ast.Class.( + let { body = ((body_loc, self_t), _); id; _ } = cls in + let name = + Option.value_map ~f:Flow_ast_utils.name_of_ident id ~default:"<>" + in + let name_loc = Option.value_map ~f:(fun ((loc, _), _) -> loc) id ~default:body_loc in + { + Type.name = "this"; + reason = Reason.mk_reason (Reason.RType name) name_loc; + bound = self_t; + polarity = Polarity.Positive; + default = None; + }) + in + let originally_bound_tparams = bound_tparams in + bound_tparams <- this_tparam :: bound_tparams; + let cls = super#class_ cls in + bound_tparams <- originally_bound_tparams; + cls + end + +(* Find exact location match *) +module ExactMatchQuery = struct + exception Found of Type.TypeScheme.t + + let found t tparams = raise (Found { Type.TypeScheme.tparams; type_ = t }) + + class exact_match_searcher (target_loc : ALoc.t) = + object (self) + inherit type_parameter_mapper as super + method! on_type_annot annot = + let (loc, t) = annot in + if target_loc = loc then + self#annot_with_tparams (found t) + else + super#on_type_annot annot + end + + let find typed_ast aloc = + let searcher = new exact_match_searcher aloc in + try + ignore (searcher#program typed_ast); + None + with Found scheme -> Some scheme end +let find_exact_match_annotation = ExactMatchQuery.find (* Find identifier under location *) +module Type_at_pos = struct + exception Found of ALoc.t * Type.TypeScheme.t -exception Found of Loc.t * Type.TypeScheme.t + (* Kinds of nodes that "type-at-pos" is interested in: + * - identifiers (handled in t_identifier) + * - literal object keys (handled in object_key) + * - `this`, `super` (handled in expression) + * - private property names (handled in expression) + *) + class type_at_pos_searcher (target_loc : Loc.t) = + object (self) + inherit type_parameter_mapper as super -(* Kinds of nodes that "type-at-pos" is interested in: - * - identifiers (handled in t_identifier) - * - literal object keys (handled in object_key) - * - `this`, `super` (handled in expression) - * - private property names (handled in expression) - *) -class type_at_pos_searcher target_loc = object(self) - inherit type_parameter_mapper as super - - method covers_target loc = - Reason.in_range target_loc loc - - method find_loc: 'a . Loc.t -> Type.t -> Type.typeparam list -> 'a = - fun loc t tparams -> - raise (Found (loc, { Type.TypeScheme.tparams; type_ = t})) - - method! t_identifier (((loc, t), _) as id) = - if self#covers_target loc - then self#annot_with_tparams (self#find_loc loc t) - else super#t_identifier id - - method! jsx_identifier (((loc, t), _) as id) = - if self#covers_target loc - then self#annot_with_tparams (self#find_loc loc t) - else super#jsx_identifier id - - method! object_key key = - let open Ast.Expression.Object.Property in - match key with - | Literal ((loc, t), _) when self#covers_target loc -> - self#annot_with_tparams (self#find_loc loc t) - | _ -> super#object_key key - - method! expression expr = - let open Ast.Expression in - match expr with - | (loc, t), (This | Super) - | (_, t), Member { Member.property = Member.PropertyPrivateName (loc, _); _ } - | (_, t), OptionalMember { OptionalMember.member = { Member.property = - Member.PropertyPrivateName (loc, _); _ - }; _} - when self#covers_target loc -> - self#annot_with_tparams (fun tparams -> self#find_loc loc t tparams) - | _ -> super#expression expr + method covers_target loc = Reason.in_range target_loc (ALoc.to_loc_exn loc) -end + method find_loc : 'a. ALoc.t -> Type.t -> Type.typeparam list -> 'a = + (fun loc t tparams -> raise (Found (loc, { Type.TypeScheme.tparams; type_ = t }))) -class type_at_loc_map_folder = object(_) - inherit type_parameter_mapper - val mutable map = LocMap.empty - method! on_type_annot x = - let loc, type_ = x in - let scheme = Type.TypeScheme.{ type_; tparams = bound_tparams; } in - map <- LocMap.add loc scheme map; - x - method to_map = map -end + method! t_identifier (((loc, t), _) as id) = + if self#covers_target loc then + self#annot_with_tparams (self#find_loc loc t) + else + super#t_identifier id + + method! jsx_identifier (((loc, t), _) as id) = + if self#covers_target loc then + self#annot_with_tparams (self#find_loc loc t) + else + super#jsx_identifier id -class type_at_loc_list_folder = object(_) - inherit type_parameter_mapper - val mutable l = [] - method! on_type_annot x = - let loc, type_ = x in - l <- (loc, Type.TypeScheme.{ type_; tparams = bound_tparams; }) :: l; - x - method to_list = l + method! object_key key = + Ast.Expression.Object.Property.( + match key with + | Literal ((loc, t), _) when self#covers_target loc -> + self#annot_with_tparams (self#find_loc loc t) + | _ -> super#object_key key) + + method! expression expr = + Ast.Expression.( + match expr with + | ((loc, t), (This | Super)) + | ((_, t), Member { Member.property = Member.PropertyPrivateName (loc, _); _ }) + | ( (_, t), + OptionalMember + { + OptionalMember.member = + { Member.property = Member.PropertyPrivateName (loc, _); _ }; + _; + } ) + when self#covers_target loc -> + self#annot_with_tparams (fun tparams -> self#find_loc loc t tparams) + | _ -> super#expression expr) + + method! implicit (loc, t) = + if self#covers_target loc then + self#annot_with_tparams (self#find_loc loc t) + else + super#implicit (loc, t) + end + + let find typed_ast loc = + let searcher = new type_at_pos_searcher loc in + try + ignore (searcher#program typed_ast); + None + with Found (loc, scheme) -> Some (ALoc.to_loc_exn loc, scheme) end -let find_type_at_pos_annotation typed_ast loc = - let searcher = new type_at_pos_searcher loc in - try - let _ = searcher#program typed_ast in - None - with - | Found (loc, scheme) -> Some (loc, scheme) - | exc -> raise exc - -let typed_ast_to_map typed_ast = - let folder = new type_at_loc_map_folder in +let find_type_at_pos_annotation = Type_at_pos.find + +class type_at_aloc_map_folder = + object + inherit type_parameter_mapper + + val mutable map = ALocMap.empty + + method! on_type_annot x = + let (loc, type_) = x in + let scheme = Type.TypeScheme.{ type_; tparams = bound_tparams } in + map <- ALocMap.add loc scheme map; + x + + method to_map = map + end + +class type_at_aloc_list_folder = + object + inherit type_parameter_mapper + + val mutable l = [] + + method! on_type_annot x = + let (loc, type_) = x in + l <- (loc, Type.TypeScheme.{ type_; tparams = bound_tparams }) :: l; + x + + method to_list = l + end + +let typed_ast_to_map typed_ast : Type.TypeScheme.t ALocMap.t = + let folder = new type_at_aloc_map_folder in ignore (folder#program typed_ast); folder#to_map -let typed_ast_to_list typed_ast: (Loc.t * Type.TypeScheme.t) list = - let folder = new type_at_loc_list_folder in +let typed_ast_to_list typed_ast : (ALoc.t * Type.TypeScheme.t) list = + let folder = new type_at_aloc_list_folder in ignore (folder#program typed_ast); folder#to_list + +(* Get-def *) + +type get_def_object_source = + | GetDefType of Type.t + | GetDefRequireLoc of ALoc.t + +(* source loc *) + +type get_def_member_info = { + get_def_prop_name: string; + get_def_object_source: get_def_object_source; +} + +module Get_def = struct + exception Found of get_def_member_info + + class searcher (target_loc : Loc.t) = + object (this) + inherit + [ALoc.t, ALoc.t * Type.t, ALoc.t, ALoc.t * Type.t] Flow_polymorphic_ast_mapper.mapper as super + + method on_loc_annot (x : ALoc.t) = x + + method on_type_annot (x : ALoc.t * Type.t) = x + + method covers_target loc = Reason.in_range target_loc (ALoc.to_loc_exn loc) + + method find_loc x = raise (Found x) + + method! import_declaration import_loc decl = + Ast.Statement.ImportDeclaration.( + let { importKind = _; source = (source_loc, _); specifiers; default } = decl in + Option.iter ~f:(this#import_specifier_with_loc ~source_loc) specifiers; + Option.iter ~f:(this#import_default_specifier_with_loc ~source_loc) default; + super#import_declaration import_loc decl) + + method import_specifier_with_loc ~source_loc specifier = + Ast.Statement.ImportDeclaration.( + match specifier with + | ImportNamedSpecifiers named_specifiers -> + Core_list.iter ~f:(this#import_named_specifier_with_loc ~source_loc) named_specifiers + | ImportNamespaceSpecifier _ -> ()) + + method import_named_specifier_with_loc ~source_loc specifier = + Ast.Statement.ImportDeclaration.( + let { kind = _; local; remote } = specifier in + let ((remote_name_loc, _), { Ast.Identifier.name = remote_name; _ }) = remote in + let member_info = + { + get_def_prop_name = remote_name; + get_def_object_source = GetDefRequireLoc source_loc; + } + in + if this#covers_target remote_name_loc then this#find_loc member_info; + Option.iter + ~f:(fun local -> + let ((local_name_loc, _), _) = local in + if this#covers_target local_name_loc then + let member_info = + { + get_def_prop_name = remote_name; + get_def_object_source = GetDefRequireLoc source_loc; + } + in + this#find_loc member_info) + local) + + method! member expr = + let expr = super#member expr in + Ast.Expression.Member.( + let { _object; property } = expr in + begin + match property with + | PropertyIdentifier ((loc, _), { Ast.Identifier.name; _ }) when this#covers_target loc + -> + let ((_, t), _) = _object in + let member_info = + { get_def_prop_name = name; get_def_object_source = GetDefType t } + in + this#find_loc member_info + | _ -> () + end; + expr) + + method import_default_specifier_with_loc ~source_loc default = + let ((remote_name_loc, _), _) = default in + if this#covers_target remote_name_loc then + let member_info = + { + get_def_prop_name = "default"; + (* see members.ml *) + get_def_object_source = GetDefRequireLoc source_loc; + } + in + this#find_loc member_info + end + + let find_get_def_info typed_ast loc = + let searcher = new searcher loc in + try + ignore (searcher#program typed_ast); + None + with Found info -> Some info +end + +let find_get_def_info = Get_def.find_get_def_info + +(* Coverage *) + +class ['a, 'l, 't] coverage_folder ~(f : 'l -> 't -> 'a -> 'a) ~(init : 'a) = + object (this) + inherit ['l, 'l * 't, 'l, 'l * 't] Flow_polymorphic_ast_mapper.mapper as super + + val mutable acc : 'a = init + + method on_loc_annot x = x + + method on_type_annot x = x + + method! expression exp = + let ((loc, t), _) = exp in + acc <- f loc t acc; + super#expression exp + + method! object_property prop = + let prop = super#object_property prop in + Ast.Expression.Object.Property.( + match prop with + | (loc, Method { key = Literal ((_, t), _) | Identifier ((_, t), _); _ }) -> + acc <- f loc t acc; + prop + | _ -> prop) + + method! statement stmt = + let stmt = super#statement stmt in + match stmt with + | (loc, Ast.Statement.ClassDeclaration { Ast.Class.id = Some ((_, t), _); _ }) + | (loc, Ast.Statement.DeclareClass { Ast.Statement.DeclareClass.id = ((_, t), _); _ }) + | ( _, + Ast.Statement.DeclareExportDeclaration + { + Ast.Statement.DeclareExportDeclaration.declaration = + Some + ( Ast.Statement.DeclareExportDeclaration.NamedOpaqueType + (loc, { Ast.Statement.OpaqueType.id = ((_, t), _); _ }) + | Ast.Statement.DeclareExportDeclaration.Class + (loc, { Ast.Statement.DeclareClass.id = ((_, t), _); _ }) ); + _; + } ) + | (loc, Ast.Statement.DeclareInterface { Ast.Statement.Interface.id = ((_, t), _); _ }) + | ( loc, + Ast.Statement.DeclareModule + { + Ast.Statement.DeclareModule.id = + ( Ast.Statement.DeclareModule.Identifier ((_, t), _) + | Ast.Statement.DeclareModule.Literal ((_, t), _) ); + _; + } ) + | (loc, Ast.Statement.DeclareTypeAlias { Ast.Statement.TypeAlias.id = ((_, t), _); _ }) + | (loc, Ast.Statement.DeclareOpaqueType { Ast.Statement.OpaqueType.id = ((_, t), _); _ }) + | (loc, Ast.Statement.InterfaceDeclaration { Ast.Statement.Interface.id = ((_, t), _); _ }) + | (loc, Ast.Statement.OpaqueType { Ast.Statement.OpaqueType.id = ((_, t), _); _ }) + | (loc, Ast.Statement.TypeAlias { Ast.Statement.TypeAlias.id = ((_, t), _); _ }) -> + acc <- f loc t acc; + stmt + | _ -> stmt + + method! class_identifier i = i + + (* skip this *) + method! jsx_name name = + Ast.JSX.( + let name = super#jsx_name name in + match name with + | MemberExpression (loc, { MemberExpression.property = ((_, t), _); _ }) -> + acc <- f loc t acc; + name + | Identifier _ + | NamespacedName _ -> + name) + + method! jsx_member_expression_object _object = + Ast.JSX.MemberExpression.( + match _object with + | Identifier ((loc, t), _) -> + acc <- f loc t acc; + _object + | MemberExpression _ -> super#jsx_member_expression_object _object) + + method! t_pattern_identifier ?kind i = + let ((loc, t), _) = i in + acc <- f loc t acc; + super#t_pattern_identifier ?kind i + + method top_level_program prog = + acc <- init; + ignore (this#program prog); + acc + end + +let coverage_fold_tast ~(f : 'l -> 't -> 'acc -> 'acc) ~(init : 'acc) tast = + let folder = new coverage_folder ~f ~init in + folder#top_level_program tast + +(** Mappers + * Used to construct error nodes during type checking. + *) + +(* Error nodes are typed at `any`. Do not change this type as it might change + * current behavior. *) +let error_mapper = + object + inherit [ALoc.t, ALoc.t, ALoc.t, ALoc.t * Type.t] Flow_polymorphic_ast_mapper.mapper + + method on_loc_annot loc = loc + + method on_type_annot loc = (loc, Type.AnyT.at Type.AnyError loc) + end + +(* Used in unimplemented cases or unsupported nodes *) +let unimplemented_mapper = + object + inherit [ALoc.t, ALoc.t, ALoc.t, ALoc.t * Type.t] Flow_polymorphic_ast_mapper.mapper + + method on_loc_annot loc = loc + + method on_type_annot loc = (loc, Type.(AnyT.at (Unsound Unimplemented)) loc) + end + +(* Code is not checked at all *) +let unchecked_mapper = + object + inherit [ALoc.t, ALoc.t, ALoc.t, ALoc.t * Type.t] Flow_polymorphic_ast_mapper.mapper + + method on_loc_annot loc = loc + + method on_type_annot loc = (loc, Type.(AnyT.at (Unsound Unchecked)) loc) + end + +let unreachable_mapper = + object + inherit [ALoc.t, ALoc.t, ALoc.t, ALoc.t * Type.t] Flow_polymorphic_ast_mapper.mapper + + method on_loc_annot loc = loc + + method on_type_annot loc = (loc, Type.(EmptyT.at loc |> with_trust bogus_trust)) + end diff --git a/src/typing/typed_ast_utils.mli b/src/typing/typed_ast_utils.mli index 94ab4b232eb..5ecad881c1c 100644 --- a/src/typing/typed_ast_utils.mli +++ b/src/typing/typed_ast_utils.mli @@ -1,21 +1,67 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module LocMap = Utils_js.LocMap +val find_exact_match_annotation : + (ALoc.t, ALoc.t * Type.t) Flow_ast.program -> ALoc.t -> Type.TypeScheme.t option +(** + * Return the first typed AST entry that exactly matches the (abstract) location + * passed as input. + * + *) val find_type_at_pos_annotation : - (Loc.t, Loc.t * Type.t) Flow_ast.program -> - Loc.t -> - (Loc.t * Type.TypeScheme.t) option + (ALoc.t, ALoc.t * Type.t) Flow_ast.program -> Loc.t -> (Loc.t * Type.TypeScheme.t) option +(** + * Find the first typed AST entry for "type-at-pos" related queries. A query + * succeeds if the location is within the range of a symbol in the AST. The kinds + * of symbols handled here are: + * - identifiers + * - literal object keys + * - `this`, `super` + * - private property names + * + * The first part of the return is the full span of the matching symbol. + * + * It's convenient to use Loc.t as the input query, since this is usually called + * in direct response to a client query, which are typically concrete locations. + *) + +type get_def_object_source = + | GetDefType of Type.t + | GetDefRequireLoc of ALoc.t + +type get_def_member_info = { + get_def_prop_name: string; + get_def_object_source: get_def_object_source; +} + +val find_get_def_info : + (ALoc.t, ALoc.t * Type.t) Flow_ast.program -> Loc.t -> get_def_member_info option val typed_ast_to_map : - (Loc.t, Loc.t * Type.t) Flow_polymorphic_ast_mapper.Ast.program -> - Type.TypeScheme.t LocMap.t + (ALoc.t, ALoc.t * Type.t) Flow_polymorphic_ast_mapper.Ast.program -> + Type.TypeScheme.t Loc_collections.ALocMap.t val typed_ast_to_list : - (Loc.t, Loc.t * Type.t) Flow_polymorphic_ast_mapper.Ast.program -> - (Loc.t * Type.TypeScheme.t) list + (ALoc.t, ALoc.t * Type.t) Flow_polymorphic_ast_mapper.Ast.program -> + (ALoc.t * Type.TypeScheme.t) list + +val coverage_fold_tast : + f:('l -> 't -> 'acc -> 'acc) -> + init:'acc -> + ('l, 'l * 't) Flow_polymorphic_ast_mapper.Ast.program -> + 'acc + +val error_mapper : (ALoc.t, ALoc.t, ALoc.t, ALoc.t * Type.t) Flow_polymorphic_ast_mapper.mapper + +val unimplemented_mapper : + (ALoc.t, ALoc.t, ALoc.t, ALoc.t * Type.t) Flow_polymorphic_ast_mapper.mapper + +val unchecked_mapper : (ALoc.t, ALoc.t, ALoc.t, ALoc.t * Type.t) Flow_polymorphic_ast_mapper.mapper + +val unreachable_mapper : + (ALoc.t, ALoc.t, ALoc.t, ALoc.t * Type.t) Flow_polymorphic_ast_mapper.mapper diff --git a/testgen/Makefile b/testgen/Makefile index ddce268090e..1fc09385991 100644 --- a/testgen/Makefile +++ b/testgen/Makefile @@ -1,4 +1,4 @@ -# Copyright (c) 2013-present, Facebook, Inc. +# Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. DIR:=$(dir $(abspath $(lastword $(MAKEFILE_LIST)))) diff --git a/testgen/code.ml b/testgen/code.ml index ed709b598ca..18e3ef43d8a 100644 --- a/testgen/code.ml +++ b/testgen/code.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -7,15 +7,15 @@ (* Main type for code *) type t = { - stmt : (Loc.t, Loc.t) Flow_ast.Statement.t; - stmt_deps : t list -};; + stmt: (Loc.t, Loc.t) Flow_ast.Statement.t; + stmt_deps: t list; +} (* This is mainly used for expressions. Eventually this will be turned * into Code.t. The purpose for this type is to carry dependecies for * expressions. -*) + *) type t' = { - expr : (Loc.t, Loc.t) Flow_ast.Expression.t'; - expr_deps : t list -};; + expr: (Loc.t, Loc.t) Flow_ast.Expression.t'; + expr_deps: t list; +} diff --git a/testgen/engine.ml b/testgen/engine.ml index 7a7dcd5424f..b596b845e9d 100644 --- a/testgen/engine.ml +++ b/testgen/engine.ml @@ -1,12 +1,13 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module Utils = Flowtestgen_utils;; -module Logging = Flowtestgen_logging;; -module Config = Flowtestgen_config;; + +module Utils = Flowtestgen_utils +module Logging = Flowtestgen_logging +module Config = Flowtestgen_config (* A virtual class that defines the framework for ocaml-stype rules. @@ -97,10 +98,9 @@ exception Fail (* 'a - type of environment element 'b - type of the environment 'c - type of the syntax *) -class virtual ['a, 'b, 'c] engine = object(self) - - - (* The backtracking is implemented using a hash table +class virtual ['a, 'b, 'c] engine = + object (self) + (* The backtracking is implemented using a hash table with a "size" integer which simulates a stack. Whenever a user writes a require function or any other functions that returns multiple values and backtracking is desired, @@ -121,41 +121,44 @@ class virtual ['a, 'b, 'c] engine = object(self) to access the values other than the top. The "size" value keeps track of the size of the stack. *) - val stack = Array.init 100 (fun _ -> []) - val mutable size = 0 + val stack = Array.init 100 (fun _ -> []) - (* This is used to keep track of the most recent used stack - level in order to do the correct forward at the right level *) - val mutable last_stack_lvl = -1 + val mutable size = 0 + (* This is used to keep track of the most recent used stack + level in order to do the correct forward at the right level *) + val mutable last_stack_lvl = -1 - (* Main methods for getting all the rules for generating programs *) - method virtual get_all_rules : unit -> ('b -> ('c * 'b)) array + (* Main methods for getting all the rules for generating programs *) + method virtual get_all_rules : unit -> ('b -> 'c * 'b) array - (* The assert function that provides backtracking. A strong + (* The assert function that provides backtracking. A strong assertion is guaranteed to be satisfied using backtracking. *) - method backtrack_on_false (b : bool) : unit = - if not b then raise Backtrack + method backtrack_on_false (b : bool) : unit = if not b then raise Backtrack - (* The assert function that will abort a rule *) - method virtual weak_assert : bool -> unit + (* The assert function that will abort a rule *) + method virtual weak_assert : bool -> unit - (* Shuffle a list. This is from + (* Shuffle a list. This is from https://stackoverflow.com/questions/15095541/how-to-shuffle-list-in-on-in-ocaml *) - method shuffle (d : 'a list) : 'a list = - let nd = List.map (fun c -> (Random.bits (), c)) d in - let sond = List.sort compare nd in - List.map snd sond + method shuffle (d : 'a list) : 'a list = + let nd = Core_list.map ~f:(fun c -> (Random.bits (), c)) d in + let sond = List.sort compare nd in + Core_list.map ~f:snd sond - (* A method for printing the stack *) - method virtual print_stack : unit -> unit - method virtual print_env : 'b -> unit - method virtual print_syntax : 'c -> unit - method virtual combine_syntax : 'c list -> string - (* A mehod for getting the name of an engine *) - method virtual get_name : unit -> string + (* A method for printing the stack *) + method virtual print_stack : unit -> unit - (* Choose a element from a list using combinatorial search. + method virtual print_env : 'b -> unit + + method virtual print_syntax : 'c -> unit + + method virtual combine_syntax : 'c list -> string + + (* A mehod for getting the name of an engine *) + method virtual get_name : unit -> string + + (* Choose a element from a list using combinatorial search. id : This is index in the stack func : This is the function that produces the data @@ -172,146 +175,135 @@ class virtual ['a, 'b, 'c] engine = object(self) param ensures that we don't return the same value for these identical require functions. *) - - method choose - (id : int) - (func : unit -> 'a list) : 'a = - (* The depth is larger than the stack right now. We need to + method choose (id : int) (func : unit -> 'a list) : 'a = + (* The depth is larger than the stack right now. We need to push the candidate values onto the stack *) - if id >= size then begin - (* push the data onto the stack *) - stack.(id) <- (func ()); - size <- size + 1; - end; - - (* Get the current value from the stack *) - match stack.(id) with - | [] -> raise Fail - | hd :: _ -> last_stack_lvl <- id; hd - - (* We move the pointer forward so that next time "choose" is + if id >= size then ( + (* push the data onto the stack *) + stack.(id) <- func (); + size <- size + 1 + ); + + (* Get the current value from the stack *) + match stack.(id) with + | [] -> raise Fail + | hd :: _ -> + last_stack_lvl <- id; + hd + + (* We move the pointer forward so that next time "choose" is called, we give a different result *) - method forward () = - (* The stack is empty. Abort the rule *) - if size = 0 || last_stack_lvl = -1 then - raise Fail - else begin - size <- min (last_stack_lvl + 1) size; - - (* remove the old value *) - let all_vals = stack.(size - 1) in - stack.(size - 1) <- (List.tl all_vals); - - (* If there's no more new candidate value, + method forward () = + (* The stack is empty. Abort the rule *) + if size = 0 || last_stack_lvl = -1 then + raise Fail + else ( + size <- min (last_stack_lvl + 1) size; + + (* remove the old value *) + let all_vals = stack.(size - 1) in + stack.(size - 1) <- List.tl all_vals; + + (* If there's no more new candidate value, we pop the function and move the pointer for the next level *) - if stack.(size - 1) = [] then begin - - (* pop the empty candidate value list *) - size <- size - 1; - - (* Move the pointer for the next candidate value list *) - self#forward () - end; - end - - (* Clear the stack *) - method clear () = - size <- 0; - last_stack_lvl <- -1 - - (* method for running a single rule *) - method run - (rule : 'b -> ('c * 'b)) - (env : 'b) : ('c * 'b) = - - (* run the rule *) - try rule env with - | Backtrack -> - self#forward (); - self#run rule env - | Fail -> raise Fail + if stack.(size - 1) = [] then ( + (* pop the empty candidate value list *) + size <- size - 1; + + (* Move the pointer for the next candidate value list *) + self#forward () + ) + ) + + (* Clear the stack *) + method clear () = + size <- 0; + last_stack_lvl <- -1 + + (* method for running a single rule *) + method run (rule : 'b -> 'c * 'b) (env : 'b) : 'c * 'b = + (* run the rule *) + try rule env with + | Backtrack -> + self#forward (); + self#run rule env + | Fail -> raise Fail (* Run the rule until we run out of choices *) - method run_exhaustive - (rule : 'b -> ('c * 'b)) - (env : 'b) : ('c * 'b) list = - self#clear (); - let rec helper all_result = - let r = try Some (self#run rule env) with - | Fail -> None in - match r with - | None -> all_result - | Some r -> - if size > 0 then begin - try - self#forward (); - helper (r :: all_result) - with - | Fail -> r :: all_result - end - else r :: all_result in - - helper [] - - (* Main function for generating programs exhaustively. + method run_exhaustive (rule : 'b -> 'c * 'b) (env : 'b) : ('c * 'b) list = + self#clear (); + let rec helper all_result = + let r = (try Some (self#run rule env) with Fail -> None) in + match r with + | None -> all_result + | Some r -> + if size > 0 then + try + self#forward (); + helper (r :: all_result) + with Fail -> r :: all_result + else + r :: all_result + in + helper [] + + (* Main function for generating programs exhaustively. Limit is an integer used to limit the number of programs at the end. *) - method gen_prog (limit : int) : ('c list * 'b) list = - let rules = self#get_all_rules () in - (* This is the main queue for storying environments and corresponding syntax *) - let queue = Queue.create () in - - (* This is used to store temporary results *) - let tmp_queue = Queue.create () in - - (* We start with empty syntax and empty environment *) - Queue.push ([], []) queue; - - (* Run a rule through all the results in the queue *) - let helper (rule : 'b -> ('c * 'b)) : unit = - (* + method gen_prog (limit : int) : ('c list * 'b) list = + let rules = self#get_all_rules () in + (* This is the main queue for storying environments and corresponding syntax *) + let queue = Queue.create () in + (* This is used to store temporary results *) + let tmp_queue = Queue.create () in + (* We start with empty syntax and empty environment *) + Queue.push ([], []) queue; + + (* Run a rule through all the results in the queue *) + let helper (rule : 'b -> 'c * 'b) : unit = + (* Printf.printf "Queue size : %d\n" (Queue.length queue); Queue.iter (fun (slist, env) -> self#print_env env; Printf.printf "Syntax:\n"; List.iter (fun s -> self#print_syntax s) slist) queue; *) - - Queue.iter (fun (slist, env) -> - (* type check the program *) - let prog = self#combine_syntax slist in - let type_check_result = - if Utils.is_typecheck (self#get_name ()) || Config.(config.random) then - Utils.type_check prog - else - None in - match type_check_result with - | Some msg -> Logging.log_early_type_error prog msg - | None -> - let result = self#run_exhaustive rule env in - if result = [] then - (* We failed. Put the old syntax and env back into the queue *) - Queue.push (slist, env) tmp_queue - else - List.iter (fun (s, e) -> Queue.push (s :: slist, e) tmp_queue) result) - queue; - - (* transfer the run results into the queue *) - Queue.clear queue; - Queue.transfer tmp_queue queue in - - let rec limit_result count acc all_result = match all_result with - | [] -> acc - | _ when count >= limit -> acc - | hd :: tl -> limit_result (count + 1) (hd :: acc) tl in - - (* Run all the rules *) - Array.iter (fun rule -> helper rule) rules; - - (* We limit the number of results at the end *) - Queue.fold (fun acc elt -> elt :: acc) [] queue - |> limit_result 0 [] - -end;; + Queue.iter + (fun (slist, env) -> + (* type check the program *) + let prog = self#combine_syntax slist in + let type_check_result = + if Utils.is_typecheck (self#get_name ()) || Config.(config.random) then + Utils.type_check prog + else + None + in + match type_check_result with + | Some msg -> Logging.log_early_type_error prog msg + | None -> + let result = self#run_exhaustive rule env in + if result = [] then + (* We failed. Put the old syntax and env back into the queue *) + Queue.push (slist, env) tmp_queue + else + List.iter (fun (s, e) -> Queue.push (s :: slist, e) tmp_queue) result) + queue; + + (* transfer the run results into the queue *) + Queue.clear queue; + Queue.transfer tmp_queue queue + in + let rec limit_result count acc all_result = + match all_result with + | [] -> acc + | _ when count >= limit -> acc + | hd :: tl -> limit_result (count + 1) (hd :: acc) tl + in + (* Run all the rules *) + Array.iter (fun rule -> helper rule) rules; + + (* We limit the number of results at the end *) + Queue.fold (fun acc elt -> elt :: acc) [] queue |> limit_result 0 [] + end diff --git a/testgen/flowtestgen.ml b/testgen/flowtestgen.ml index 907e84d8c5c..39c0c3318c1 100644 --- a/testgen/flowtestgen.ml +++ b/testgen/flowtestgen.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,28 +8,26 @@ (* This program generates programs that can type check but have * runtime error *) -module Utils = Flowtestgen_utils;; -module Config = Flowtestgen_config;; -module Logging = Flowtestgen_logging;; -module Syntax = Syntax_base;; -open Printf;; +module Utils = Flowtestgen_utils +module Config = Flowtestgen_config +module Logging = Flowtestgen_logging +module Syntax = Syntax_base +open Printf let check_file (filename : string) (cmd : string) = - if not (Sys.file_exists filename) then begin + if not (Sys.file_exists filename) then ( printf "%s does not exist in the directory.\n" filename; printf "Creating %s ...\n" filename; - if Sys.command cmd > 0 then - failwith ("Failed to create " ^ filename) - end;; + if Sys.command cmd > 0 then failwith ("Failed to create " ^ filename) + ) let sys_init () = printf "Checking required libraries...\n"; check_file ".flowconfig" "flow init"; check_file "package.json" "npm init -f"; - check_file - "./node_modules/.bin/flow-remove-types" - "npm install flow-remove-types";; - (* + check_file "./node_modules/.bin/flow-remove-types" "npm install flow-remove-types" + +(* check_file "./node_modules/.bin/babel" "npm install babel-cli babel-preset-flow"; @@ -39,10 +37,11 @@ let sys_init () = *) let move_func (prog : Syntax.t list) = - let is_func s = match s with - | Syntax.Stmt (Flow_ast.Statement.FunctionDeclaration _) -> true - | _ -> false in - + let is_func s = + match s with + | Syntax.Stmt (Flow_ast.Statement.FunctionDeclaration _) -> true + | _ -> false + in let all_func = List.filter is_func prog in let all_non_func = List.filter (fun p -> not (is_func p)) prog in all_func @ all_non_func @@ -50,10 +49,10 @@ let move_func (prog : Syntax.t list) = (* Main entry functions for generating code *) let mk_code engine prog_num = engine#gen_prog prog_num - |> (List.map (fun (slist, env) -> - (* We add type assertions at the end *) - let prog = slist |> move_func in - Printf.sprintf "%s\n%!" ((Syntax.combine_syntax prog) ^ (Ruleset_base.str_of_env env)))) + |> Core_list.map ~f:(fun (slist, env) -> + (* We add type assertions at the end *) + let prog = slist |> move_func in + Printf.sprintf "%s\n%!" (Syntax.combine_syntax prog ^ Ruleset_base.str_of_env env)) (* Generate some ASTs from scratch and then type check them. *) let main () = @@ -61,29 +60,41 @@ let main () = Random.self_init (); printf "Generating programs...\n%!"; let base_engine = - if Config.(config.random) - then new Ruleset_base.ruleset_random_base - else new Ruleset_base.ruleset_base in + if Config.(config.random) then + new Ruleset_base.ruleset_random_base + else + new Ruleset_base.ruleset_base + in let depth_engine = - if Config.(config.random) - then new Ruleset_depth.ruleset_random_depth - else new Ruleset_depth.ruleset_depth in + if Config.(config.random) then + new Ruleset_depth.ruleset_random_depth + else + new Ruleset_depth.ruleset_depth + in let func_engine = - if Config.(config.random) - then new Ruleset_func.ruleset_random_func - else new Ruleset_func.ruleset_func in + if Config.(config.random) then + new Ruleset_func.ruleset_random_func + else + new Ruleset_func.ruleset_func + in let optional_engine = - if Config.(config.random) - then new Ruleset_optional.ruleset_random_optional - else new Ruleset_optional.ruleset_optional in + if Config.(config.random) then + new Ruleset_optional.ruleset_random_optional + else + new Ruleset_optional.ruleset_optional + in let exact_engine = - if Config.(config.random) - then new Ruleset_exact.ruleset_random_exact - else new Ruleset_exact.ruleset_exact in + if Config.(config.random) then + new Ruleset_exact.ruleset_random_exact + else + new Ruleset_exact.ruleset_exact + in let union_engine = - if Config.(config.random) - then new Ruleset_union.ruleset_random_union - else new Ruleset_union.ruleset_union in + if Config.(config.random) then + new Ruleset_union.ruleset_random_union + else + new Ruleset_union.ruleset_union + in ignore base_engine; ignore depth_engine; ignore func_engine; @@ -95,25 +106,34 @@ let main () = printf "Generated %d programs.\n%!" (List.length all_prog); (* Filter out all the programs that don't type check *) - let type_check_progs = List.filter (fun content -> - let result = - if Utils.is_typecheck (engine#get_name ()) then - Utils.type_check content - else - None in - match result with - | None -> true - | Some msg -> - Logging.log_type_error content msg; - false) all_prog in - + let type_check_progs = + List.filter + (fun content -> + let result = + if Utils.is_typecheck (engine#get_name ()) then + Utils.type_check content + else + None + in + match result with + | None -> true + | Some msg -> + Logging.log_type_error content msg; + false) + all_prog + in (* run all the type check programs *) let batch_result = Utils.batch_run type_check_progs in - List.iter2 (fun content msg -> match msg with + List.iter2 + (fun content msg -> + match msg with | None -> Logging.log_no_error content - | Some msg -> Logging.log_runtime_error content msg) type_check_progs batch_result; + | Some msg -> Logging.log_runtime_error content msg) + type_check_progs + batch_result; printf "Done!\n%!"; Logging.print_stats (); - Logging.close ();; + Logging.close () -main ();; +;; +main () diff --git a/testgen/flowtestgen_config.ml b/testgen/flowtestgen_config.ml index 756d63ce778..949025c1dc2 100644 --- a/testgen/flowtestgen_config.ml +++ b/testgen/flowtestgen_config.ml @@ -1,85 +1,63 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module Config_utils = Flowtestgen_utils.Config;; -open Printf;; +module Config_utils = Flowtestgen_utils.Config +open Printf (* Config module *) type t = { (* the number of programs we generate *) - num_prog : int; + num_prog: int; (* Whether we log to the console *) - log_to_console : bool; + log_to_console: bool; (* Whether we want to type check using Flow *) - type_check : bool; - + type_check: bool; (* Whether we want to generate programs randomly *) - random : bool; - + random: bool; (* which engine to use *) - engine : string + engine: string; } (* Default config value *) -let default_config : t = { - num_prog = 10; - log_to_console = false; - type_check = false; - random = true; - engine = "base"; -};; +let default_config : t = + { num_prog = 10; log_to_console = false; type_check = false; random = true; engine = "base" } (* Default filename for the config file *) -let config_filename = "flowtestgen.json";; +let config_filename = "flowtestgen.json" let string_of_config (conf : t) : string = - let plist = Config_utils.( - [("num_prog", Int conf.num_prog); - ("type_check", Bool conf.type_check); - ("random", Bool conf.random); - ("engine", Str conf.engine); - ("log_to_console", Bool conf.log_to_console)]) in - Config_utils.string_of_config plist;; + let plist = + Config_utils. + [ + ("num_prog", Int conf.num_prog); + ("type_check", Bool conf.type_check); + ("random", Bool conf.random); + ("engine", Str conf.engine); + ("log_to_console", Bool conf.log_to_console); + ] + in + Config_utils.string_of_config plist let load_config () : t = - if not (Sys.file_exists config_filename) then begin + if not (Sys.file_exists config_filename) then ( printf "No config file detected. Creating one with default values...\n"; let out = open_out config_filename in let config_str = string_of_config default_config in fprintf out "%s\n" config_str; - close_out out; - end; + close_out out + ); let conf = Config_utils.load_json_config config_filename in - let dc = default_config in { - num_prog = - (Config_utils.get_int - ~default:dc.num_prog - conf - "num_prog"); - engine = - (Config_utils.get_str - ~default:dc.engine - conf - "engine"); - type_check = - (Config_utils.get_bool - ~default:dc.type_check - conf - "type_check"); - random = - (Config_utils.get_bool - ~default:dc.random - conf - "random"); - log_to_console = - (Config_utils.get_bool - ~default:dc.log_to_console - conf - "log_to_console") + let dc = default_config in + { + num_prog = Config_utils.get_int ~default:dc.num_prog conf "num_prog"; + engine = Config_utils.get_str ~default:dc.engine conf "engine"; + type_check = Config_utils.get_bool ~default:dc.type_check conf "type_check"; + random = Config_utils.get_bool ~default:dc.random conf "random"; + log_to_console = Config_utils.get_bool ~default:dc.log_to_console conf "log_to_console"; } -let config = load_config ();; +let config = load_config () diff --git a/testgen/flowtestgen_logging.ml b/testgen/flowtestgen_logging.ml index 008c1e9863f..2e99f325efc 100644 --- a/testgen/flowtestgen_logging.ml +++ b/testgen/flowtestgen_logging.ml @@ -1,39 +1,47 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module Config = Flowtestgen_config;; -open Printf;; +module Config = Flowtestgen_config +open Printf +let runtime_error_file = "runtime_error.txt" -let runtime_error_file = "runtime_error.txt";; -let type_error_file = "type_error.txt";; -let early_type_error_file = "early_type_error.txt";; -let no_error_file = "no_error.txt";; +let type_error_file = "type_error.txt" -let no_error_count = ref 0;; -let type_error_count = ref 0;; -let early_type_error_count = ref 0;; -let runtime_error_count = ref 0;; +let early_type_error_file = "early_type_error.txt" + +let no_error_file = "no_error.txt" + +let no_error_count = ref 0 + +let type_error_count = ref 0 + +let early_type_error_count = ref 0 + +let runtime_error_count = ref 0 let runtime_error_out = if Config.(config.log_to_console) then stdout else open_out runtime_error_file + let type_error_out = if Config.(config.log_to_console) then stdout else open_out type_error_file + let early_type_error_out = if Config.(config.log_to_console) then stdout else open_out early_type_error_file + let no_error_out = if Config.(config.log_to_console) then stdout @@ -43,69 +51,60 @@ let no_error_out = let reset () = no_error_count := 0; type_error_count := 0; - runtime_error_count := 0;; + runtime_error_count := 0 let log_no_error code = no_error_count := !no_error_count + 1; - fprintf no_error_out "// Good program ==========\n%s\n%!" code;; + fprintf no_error_out "// Good program ==========\n%s\n%!" code let log_type_error code msg = type_error_count := !type_error_count + 1; printf "TYPE ERROR.\n%!"; fprintf type_error_out "//====================\n%s\n%!" code; - fprintf type_error_out "/*\nType Error: \n%s\n*/\n%!" msg;; + fprintf type_error_out "/*\nType Error: \n%s\n*/\n%!" msg -let log_early_type_error code msg = +let log_early_type_error code msg = early_type_error_count := !early_type_error_count + 1; printf "EARLY TYPE ERROR.\n%!"; fprintf early_type_error_out "//====================\n%s\n%!" code; - fprintf early_type_error_out "/*\nType Error: \n%s\n*/\n%!" msg;; + fprintf early_type_error_out "/*\nType Error: \n%s\n*/\n%!" msg -let log_runtime_error code msg = +let log_runtime_error code msg = runtime_error_count := !runtime_error_count + 1; printf "RUNTIME ERROR.\n%!"; fprintf runtime_error_out "//====================\n%s\n%!" code; - fprintf runtime_error_out "/*\nRuntime Error: \n%s\n*/\n%!" msg;; - + fprintf runtime_error_out "/*\nRuntime Error: \n%s\n*/\n%!" msg let print_stats () = let early_type_count_str = - sprintf - "%d early type errors written to %s\n%!" - !early_type_error_count - early_type_error_file in + sprintf "%d early type errors written to %s\n%!" !early_type_error_count early_type_error_file + in fprintf early_type_error_out "// %s\n%!" early_type_count_str; printf "%s%!" early_type_count_str; (* print type error message *) let type_count_str = - sprintf - "%d type errors written to %s\n%!" - !type_error_count - type_error_file in + sprintf "%d type errors written to %s\n%!" !type_error_count type_error_file + in fprintf type_error_out "// %s\n%!" type_count_str; printf "%s%!" type_count_str; (* print runtime error message *) let runtime_count_str = - sprintf - "%d runtime errors written to %s\n%!" - !runtime_error_count - runtime_error_file in + sprintf "%d runtime errors written to %s\n%!" !runtime_error_count runtime_error_file + in fprintf runtime_error_out "// %s\n%!" runtime_count_str; printf "%s%!" runtime_count_str; (* Print no error message *) let noerror_count_str = - sprintf - "%d programs without error written to %s.\n%!" - !no_error_count - no_error_file in + sprintf "%d programs without error written to %s.\n%!" !no_error_count no_error_file + in fprintf no_error_out "// %s\n%!" noerror_count_str; - printf "%s%!" noerror_count_str;; - + printf "%s%!" noerror_count_str + let close () = if not Config.(config.log_to_console) then close_out early_type_error_out; if not Config.(config.log_to_console) then close_out type_error_out; if not Config.(config.log_to_console) then close_out runtime_error_out; - if not Config.(config.log_to_console) then close_out no_error_out;; + if not Config.(config.log_to_console) then close_out no_error_out diff --git a/testgen/flowtestgen_types.ml b/testgen/flowtestgen_types.ml index 846a34befe5..03cb4060d97 100644 --- a/testgen/flowtestgen_types.ml +++ b/testgen/flowtestgen_types.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,152 +8,166 @@ module Ast = Flow_ast (* Main module for generating code *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; -module Config = Flowtestgen_config;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils +module Config = Flowtestgen_config (* Set for types *) -module TypeSet' = Set.Make( - struct - type t = T.t' - let compare = Pervasives.compare - end) +module TypeSet' = Set.Make (struct + type t = T.t' + + let compare = Pervasives.compare +end) + module TypeSet = struct include TypeSet' - let choose choose_func set = - set - |> TypeSet'.elements - |> Array.of_list - |> choose_func -end;; + let choose choose_func set = set |> TypeSet'.elements |> Array.of_list |> choose_func +end (* This is a set of supported types so far *) -let primitive_types = TypeSet.of_list [T.Number; T.String; T.Boolean];; -let literal_types = - TypeSet.of_list [T.StringLiteral T.StringLiteral.({value = "strlit"; - raw = "\"strlit\"";}); - T.NumberLiteral T.NumberLiteral.({value = 2.2; - raw = "2.2";}); - T.BooleanLiteral T.BooleanLiteral.({value = true; - raw = "true";}); - ];; +let primitive_types = TypeSet.of_list [T.Number; T.String; T.Boolean] +let literal_types = + TypeSet.of_list + [ + T.StringLiteral T.StringLiteral.{ value = "strlit"; raw = "\"strlit\"" }; + T.NumberLiteral T.NumberLiteral.{ value = 2.2; raw = "2.2" }; + T.BooleanLiteral T.BooleanLiteral.{ value = true; raw = "true" }; + ] (* Make a union type out of an array of types *) let mk_union_type (tarray : T.t' array) : T.t' = - let open Array in - match length tarray with - | 0 | 1 -> failwith "Must provide at least two types" - | _ -> T.Union - ((Loc.none, get tarray 0), - (Loc.none, get tarray 1), - (List.map - (fun (s) -> (Loc.none, s)) - (to_list (sub tarray 2 ((length tarray) - 2))))) + Array.( + match length tarray with + | 0 + | 1 -> + failwith "Must provide at least two types" + | _ -> + T.Union + ( (Loc.none, get tarray 0), + (Loc.none, get tarray 1), + List.map (fun s -> (Loc.none, s)) (to_list (sub tarray 2 (length tarray - 2))) )) (* Make an object type output of a list of properties *) let mk_obj_type (props : (string * T.t') list) : T.t' = - let open T.Object in - let plist = List.map (fun (p, t) -> - let key = E.Object.Property.(Identifier (Loc.none, p)) in - let value = T.Object.Property.(Init (Loc.none, t)) in - let open T.Object.Property in - let variance = match Random.int 3 with - | 0 -> None - | 1 -> Some (Loc.none, Ast.Variance.Plus) - | _ -> Some (Loc.none, Ast.Variance.Minus) in - Property (Loc.none, {key; - value; - optional = false; - static = false; - _method = false; - variance})) props in - T.Object {exact = Random.bool (); - properties = plist} + T.Object.( + let plist = + Core_list.map + ~f:(fun (p, t) -> + let key = E.Object.Property.(Identifier (Loc.none, p)) in + let value = T.Object.Property.(Init (Loc.none, t)) in + T.Object.Property.( + let variance = + match Random.int 3 with + | 0 -> None + | 1 -> Some (Loc.none, Ast.Variance.Plus) + | _ -> Some (Loc.none, Ast.Variance.Minus) + in + Property + ( Loc.none, + { key; value; optional = false; static = false; _method = false; variance } ))) + props + in + T.Object { exact = Random.bool (); properties = plist }) let mk_tuple_type (tlist : T.t' list) : T.t' = - T.Tuple (List.map (fun t -> (Loc.none, t)) tlist) + T.Tuple (Core_list.map ~f:(fun t -> (Loc.none, t)) tlist) (* Return a string literal of a given type *) let strlit_of_type (t : T.t') : E.t' = let mk_type_lit (tstring : string) = - let open Ast.Literal in - {value = String tstring; raw = "\"" ^ tstring ^ "\""} in - E.Literal (match t with - | T.String -> mk_type_lit "string" - | T.Number -> mk_type_lit "number" - | T.Boolean -> mk_type_lit "boolean" - | _ -> mk_type_lit "null") + Ast.Literal.{ value = String tstring; raw = "\"" ^ tstring ^ "\"" } + in + E.Literal + (match t with + | T.String -> mk_type_lit "string" + | T.Number -> mk_type_lit "number" + | T.Boolean -> mk_type_lit "boolean" + | _ -> mk_type_lit "null") (* Make a literal expression.*) let rec mk_literal_expr (t : T.t') : Code.t' = - let open Code in - match t with - | T.Number -> - let lit = Ast.Literal.({value = Number 1.1; raw = "1.1"}) in - {expr = E.Literal lit; expr_deps = []} - | T.String -> - let lit = Ast.Literal.({value = String "foo"; raw = "\"foo\""}) in - {expr = E.Literal lit; expr_deps = []} - | T.Boolean -> - let lit = Ast.Literal.({value = Boolean false; raw = "false"}) in - {expr = E.Literal lit; expr_deps = []} - | T.Union (t1, t2, rest) -> - let all_types = (t1 :: t2 :: rest) |> (List.map snd) in - let t = Utils.random_choice (Array.of_list all_types) in - mk_literal_expr t - | T.Object obj_t -> mk_obj_literal_expr obj_t - | T.StringLiteral lit -> - let value = T.StringLiteral.(lit.value) in - let raw = T.StringLiteral.(lit.raw) in - let lit = Ast.Literal.({value = String value; raw}) in - {expr = E.Literal lit; expr_deps = []} - | T.NumberLiteral lit -> - let value = T.NumberLiteral.(lit.value) in - let raw = T.NumberLiteral.(lit.raw) in - let lit = Ast.Literal.({value = Number value; raw}) in - {expr = E.Literal lit; expr_deps = []} - | T.BooleanLiteral lit -> - let value = T.BooleanLiteral.(lit.value) in - let raw = T.BooleanLiteral.(lit.raw) in - let lit = Ast.Literal.({value = Boolean value; raw}) in - {expr = E.Literal lit; expr_deps = []} - | T.Tuple tlist -> - let elements = List.map (fun (_, tt) -> - let e = mk_literal_expr tt in - Some (E.Expression (Loc.none, e.expr))) tlist in - {expr = E.Array.(E.Array {elements}); - expr_deps = []} - | T.Array t -> mk_literal_expr (T.Tuple [t; t; t; t; t;]) - | _ -> - let lit = Ast.Literal.({value = Null; raw = "null"}) in - {expr = E.Literal lit; expr_deps = []} + Code.( + match t with + | T.Number -> + let lit = Ast.Literal.{ value = Number 1.1; raw = "1.1" } in + { expr = E.Literal lit; expr_deps = [] } + | T.String -> + let lit = Ast.Literal.{ value = String "foo"; raw = "\"foo\"" } in + { expr = E.Literal lit; expr_deps = [] } + | T.Boolean -> + let lit = Ast.Literal.{ value = Boolean false; raw = "false" } in + { expr = E.Literal lit; expr_deps = [] } + | T.Union (t1, t2, rest) -> + let all_types = t1 :: t2 :: rest |> Core_list.map ~f:snd in + let t = Utils.random_choice (Array.of_list all_types) in + mk_literal_expr t + | T.Object obj_t -> mk_obj_literal_expr obj_t + | T.StringLiteral lit -> + let value = T.StringLiteral.(lit.value) in + let raw = T.StringLiteral.(lit.raw) in + let lit = Ast.Literal.{ value = String value; raw } in + { expr = E.Literal lit; expr_deps = [] } + | T.NumberLiteral lit -> + let value = T.NumberLiteral.(lit.value) in + let raw = T.NumberLiteral.(lit.raw) in + let lit = Ast.Literal.{ value = Number value; raw } in + { expr = E.Literal lit; expr_deps = [] } + | T.BooleanLiteral lit -> + let value = T.BooleanLiteral.(lit.value) in + let raw = T.BooleanLiteral.(lit.raw) in + let lit = Ast.Literal.{ value = Boolean value; raw } in + { expr = E.Literal lit; expr_deps = [] } + | T.Tuple tlist -> + let elements = + Core_list.map + ~f:(fun (_, tt) -> + let e = mk_literal_expr tt in + Some (E.Expression (Loc.none, e.expr))) + tlist + in + { expr = E.Array.(E.Array { elements }); expr_deps = [] } + | T.Array t -> mk_literal_expr (T.Tuple [t; t; t; t; t]) + | _ -> + let lit = Ast.Literal.{ value = Null; raw = "null" } in + { expr = E.Literal lit; expr_deps = [] }) (* Make an object literal based on its type *) and mk_obj_literal_expr (t : T.Object.t) : Code.t' = - let open Code in - let prop_init_list = - List.map (fun p -> - let open T.Object.Property in - match p with - | T.Object.Property (_, {key = k; - value = Init (_, ptype); - optional = o; - static = _; - _method = _; - variance = _}) -> (k, o, mk_literal_expr ptype) - | _ -> failwith "Unsupported property") T.Object.(t.properties) - (* Randomly remove some optional properties *) - |> List.filter (fun (_, o, _) -> not o || (Random.bool ())) - |> List.map (fun (key, _, expr_t) -> - let open E.Object.Property in - E.Object.Property (Loc.none, {key; - value = Init (Loc.none, expr_t.expr); - _method = false; - shorthand = false})) in - E.Object.({expr = E.Object {properties = prop_init_list}; - expr_deps = []}) + Code.( + let prop_init_list = + Core_list.map + ~f:(fun p -> + T.Object.Property.( + match p with + | T.Object.Property + ( _, + { + key = k; + value = Init (_, ptype); + optional = o; + static = _; + _method = _; + variance = _; + } ) -> + (k, o, mk_literal_expr ptype) + | _ -> failwith "Unsupported property")) + T.Object.(t.properties) + (* Randomly remove some optional properties *) + |> List.filter (fun (_, o, _) -> (not o) || Random.bool ()) + |> Core_list.map ~f:(fun (key, _, expr_t) -> + E.Object.Property.( + E.Object.Property + ( Loc.none, + { + key; + value = Init (Loc.none, expr_t.expr); + _method = false; + shorthand = false; + } ))) + in + E.Object.{ expr = E.Object { properties = prop_init_list }; expr_deps = [] }) diff --git a/testgen/flowtestgen_utils.ml b/testgen/flowtestgen_utils.ml index 3ce7c2d7c17..d6d4c606298 100644 --- a/testgen/flowtestgen_utils.ml +++ b/testgen/flowtestgen_utils.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -8,32 +8,38 @@ module Ast = Flow_ast (* This file contains util functions*) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module F = Flow_ast.Function;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module F = Flow_ast.Function -module StrSet = Set.Make(struct - type t = string - let compare = Pervasives.compare - end) +module StrSet = Set.Make (struct + type t = string + + let compare = Pervasives.compare +end) let random_choice (arr : 'a array) : 'a = arr.(Random.int (Array.length arr)) (* Generate a sequence of numbers *) let sequence i j = let rec helper n acc = - if n < i then acc else helper (n - 1) (n :: acc) in - helper j [];; + if n < i then + acc + else + helper (n - 1) (n :: acc) + in + helper j [] (* Read all lines from the in_channel *) let read_all ic : string list = let lines = ref [] in try - while true; do + while true do lines := input_line ic :: !lines - done; !lines + done; + !lines with End_of_file -> close_in ic; List.rev !lines @@ -45,7 +51,7 @@ let read_file filename = String.concat "\n" lines (* convert an AST into a string for printing *) - (* +(* let string_of_ast endline func = fun (ast) -> let layout = func (Loc.none, ast) in @@ -61,150 +67,167 @@ let string_of_type = string_of_ast false Js_layout_generator.type_;; *) - (* A hacky version of string_of function for AST nodes. This will be replaced once we have a better solution. *) let rec string_of_pattern (pattern : (Loc.t, Loc.t) P.t') = match pattern with | P.Identifier id -> - let open P.Identifier in - (snd id.name) ^ - (if id.optional then "?" else "") ^ " " ^ - (match id.annot with - | Some (_, (_, t)) -> " : " ^ (string_of_type t) - | None -> "") + P.Identifier.( + Flow_ast_utils.name_of_ident id.name + ^ ( if id.optional then + "?" + else + "" ) + ^ " " + ^ + (match id.annot with + | Ast.Type.Available (_, (_, t)) -> " : " ^ string_of_type t + | Ast.Type.Missing _ -> "")) | P.Expression (_, e) -> string_of_expr e - | P.Assignment assign -> - let open P.Assignment in - (string_of_pattern (snd assign.left)) ^ - " = " ^ - (string_of_expr (snd assign.right)) | _ -> failwith "[string_of_pattern] unsupported pattern" and string_of_expr (expr : (Loc.t, Loc.t) E.t') = let string_of_proplist plist = - let helper prop = match prop with + let helper prop = + match prop with | E.Object.Property (_, E.Object.Property.Init p) -> - let open E.Object.Property in - (match p.key, p.value with - | Identifier (_, name), (_, e) -> name ^ " : " ^ (string_of_expr e) - | _ -> failwith "Unsupported expression") - | _ -> failwith "Unsupported property" in - String.concat ", " (List.map helper plist) in - + E.Object.Property.( + (match (p.key, p.value) with + | (Identifier (_, { Ast.Identifier.name; comments = _ }), (_, e)) -> + name ^ " : " ^ string_of_expr e + | _ -> failwith "Unsupported expression")) + | _ -> failwith "Unsupported property" + in + String.concat ", " (Core_list.map ~f:helper plist) + in let string_of_assign_op op = - let open E.Assignment in - match op with - | Assign -> "=" - | PlusAssign -> "+=" - | MinusAssign -> "-=" - | MultAssign -> "*=" - | ExpAssign -> "^=" - | DivAssign -> "/=" - | _ -> failwith "unsupported assign" in - + E.Assignment.( + match op with + | None -> "=" + | Some PlusAssign -> "+=" + | Some MinusAssign -> "-=" + | Some MultAssign -> "*=" + | Some ExpAssign -> "^=" + | Some DivAssign -> "/=" + | _ -> failwith "unsupported assign") + in match expr with - | E.Object o -> - "{" ^ (string_of_proplist E.Object.(o.properties)) ^ "}" + | E.Object o -> "{" ^ string_of_proplist E.Object.(o.properties) ^ "}" | E.Literal lit -> Ast.Literal.(lit.raw) | E.Assignment assign -> - let open E.Assignment in - [(string_of_pattern (snd assign.left)); - (string_of_assign_op assign.operator); - (string_of_expr (snd assign.right))] - |> String.concat " " + E.Assignment.( + [ + string_of_pattern (snd assign.left); + string_of_assign_op assign.operator; + string_of_expr (snd assign.right); + ] + |> String.concat " ") | E.Call call -> - let open E.Call in - let callee_str = string_of_expr (snd call.callee) in - let arglist_str = - call.arguments - |> List.map (fun a -> match a with - | E.Expression (_, e) -> e - | E.Spread _ -> failwith "[string_of_expr] call does not support spread argument.") - |> List.map string_of_expr - |> String.concat ", " in - callee_str ^ "(" ^ arglist_str ^ ")" - | E.Identifier (_, id) -> id + E.Call.( + let callee_str = string_of_expr (snd call.callee) in + let arglist_str = + call.arguments + |> Core_list.map ~f:(fun a -> + match a with + | E.Expression (_, e) -> e + | E.Spread _ -> failwith "[string_of_expr] call does not support spread argument.") + |> Core_list.map ~f:string_of_expr + |> String.concat ", " + in + callee_str ^ "(" ^ arglist_str ^ ")") + | E.Identifier (_, { Ast.Identifier.name; comments = _ }) -> name | E.Member mem -> - let open E.Member in - let obj_str = string_of_expr (snd mem._object) in - let prop_str = match mem.property with - | PropertyIdentifier (_, id) -> id - | PropertyExpression (_, e) -> string_of_expr e - | PropertyPrivateName (_, (_, id)) -> id in - obj_str ^ "." ^ prop_str + E.Member.( + let obj_str = string_of_expr (snd mem._object) in + let prop_str = + match mem.property with + | PropertyIdentifier (_, { Ast.Identifier.name; comments = _ }) -> name + | PropertyExpression (_, e) -> string_of_expr e + | PropertyPrivateName (_, (_, { Ast.Identifier.name; comments = _ })) -> name + in + obj_str ^ "." ^ prop_str) | E.TypeCast cast -> - let open E.TypeCast in - (string_of_expr (snd cast.expression)) ^ - " : " ^ - (string_of_type (snd (snd cast.annot))) + E.TypeCast.( + string_of_expr (snd cast.expression) ^ " : " ^ string_of_type (snd (snd cast.annot))) | E.Array array -> - let open E.Array in - "[" ^ - (List.map (fun elt -> match elt with - | Some (E.Expression (_, e)) -> string_of_expr e - | Some (E.Spread (_, e)) -> string_of_expr (E.SpreadElement.((snd e.argument))) - | None -> "") array.elements - |> (String.concat ", ")) ^ - "]" + E.Array.( + "[" + ^ ( Core_list.map + ~f:(fun elt -> + match elt with + | Some (E.Expression (_, e)) -> string_of_expr e + | Some (E.Spread (_, e)) -> string_of_expr E.SpreadElement.(snd e.argument) + | None -> "") + array.elements + |> String.concat ", " ) + ^ "]") | _ -> failwith "unknown expr" and string_of_stmt (stmt : (Loc.t, Loc.t) S.t') = + let string_of_function_param = function + | (_, { Ast.Function.Param.argument = (_, patt); default = None }) -> string_of_pattern patt + | (_, { Ast.Function.Param.argument = (_, patt); default = Some (_, expr) }) -> + string_of_pattern patt ^ " = " ^ string_of_expr expr + in match stmt with | S.Block b -> S.Block.(b.body) - |> List.map snd - |> List.map string_of_stmt + |> Core_list.map ~f:snd + |> Core_list.map ~f:string_of_stmt |> String.concat "\n" | S.Empty -> "\n" | S.FunctionDeclaration func -> - let open Ast.Function in - let fname = match func.id with - | Some (_, n) -> n - | None -> "" in - let params_str = - let (_, { Ast.Function.Params.params; rest = _ }) = func.params in - params - |> List.map snd - |> List.map string_of_pattern - |> String.concat ", " in - let body_str = match func.body with - | BodyBlock (_, s) -> string_of_stmt (S.Block s) - | BodyExpression (_, e) -> string_of_expr e in - let ret_type_str = match func.return with - | F.Available (_, (_, t)) -> ": " ^ string_of_type t - | F.Missing _ -> "" in - "function " ^ fname ^ "(" ^ params_str ^ ") " ^ ret_type_str ^ " {\n" ^ - body_str ^ - "}\n" + Ast.Function.( + let fname = + match func.id with + | Some (_, { Ast.Identifier.name; comments = _ }) -> name + | None -> "" + in + let params_str = + let (_, { Ast.Function.Params.params; rest = _ }) = func.params in + params |> Core_list.map ~f:string_of_function_param |> String.concat ", " + in + let body_str = + match func.body with + | BodyBlock (_, s) -> string_of_stmt (S.Block s) + | BodyExpression (_, e) -> string_of_expr e + in + let ret_type_str = + match func.return with + | T.Available (_, (_, t)) -> ": " ^ string_of_type t + | T.Missing _ -> "" + in + "function " ^ fname ^ "(" ^ params_str ^ ") " ^ ret_type_str ^ " {\n" ^ body_str ^ "}\n") | S.Return r -> - let open S.Return in - (match r.argument with - | Some (_, e) -> "return " ^ (string_of_expr e) ^ "\n;" - | None -> "return;\n") + S.Return.( + (match r.argument with + | Some (_, e) -> "return " ^ string_of_expr e ^ "\n;" + | None -> "return;\n")) | S.VariableDeclaration decl -> - let open S.VariableDeclaration in - let string_of_dtor dtor = - let open S.VariableDeclaration.Declarator in - let init_str = match dtor.init with - | Some (_, e) -> "= " ^ (string_of_expr e) - | None -> "" in - (string_of_pattern (snd dtor.id)) ^ init_str in - let kind_str = match decl.kind with - | Var -> "var" - | Let -> "let" - | Const -> "const" in - let dlist = List.map snd decl.declarations in - let dlist_str = String.concat ", " (List.map string_of_dtor dlist) in - kind_str ^ " " ^ dlist_str ^ "\n" - | S.Expression e -> - let open S.Expression in - (string_of_expr (snd e.expression)) ^ ";\n" + S.VariableDeclaration.( + let string_of_dtor dtor = + S.VariableDeclaration.Declarator.( + let init_str = + match dtor.init with + | Some (_, e) -> "= " ^ string_of_expr e + | None -> "" + in + string_of_pattern (snd dtor.id) ^ init_str) + in + let kind_str = + match decl.kind with + | Var -> "var" + | Let -> "let" + | Const -> "const" + in + let dlist = Core_list.map ~f:snd decl.declarations in + let dlist_str = String.concat ", " (Core_list.map ~f:string_of_dtor dlist) in + kind_str ^ " " ^ dlist_str ^ "\n") + | S.Expression e -> S.Expression.(string_of_expr (snd e.expression) ^ ";\n") | _ -> failwith "[string_of_stmt] Unspported stmt" and string_of_type (t : (Loc.t, Loc.t) T.t') = - match t with | T.Any -> "any" | T.Mixed -> "mixed" @@ -215,150 +238,176 @@ and string_of_type (t : (Loc.t, Loc.t) T.t') = | T.String -> "string" | T.Boolean -> "boolean" | T.Object ot -> - let open T.Object in - let string_of_prop prop = match prop with - | T.Object.Property (_, p) -> - let open T.Object.Property in - let key_str = match p.key with - | E.Object.Property.Literal (_, lit) -> Ast.Literal.(lit.raw) - | E.Object.Property.Identifier (_, name) -> name - | E.Object.Property.PrivateName (_, (_, name)) -> name - | E.Object.Property.Computed (_, e) -> string_of_expr e in - let t_str = match p.value with - | Init (_, init_t) -> string_of_type init_t - | Get (_, ft) -> string_of_type (T.Function ft) - | Set (_, ft) -> string_of_type (T.Function ft) in - let opt = if p.optional then "?" else "" in - key_str ^ opt ^ " : " ^ t_str - | _ -> failwith "[string_of_prop] unsupported property" in - let prop_str_list = ot.properties - |> List.map string_of_prop - |> String.concat ", " in - if ot.exact then "{|" ^ prop_str_list ^ "|}" - else "{" ^ prop_str_list ^ "}" + T.Object.( + let string_of_prop prop = + match prop with + | T.Object.Property (_, p) -> + T.Object.Property.( + let key_str = + match p.key with + | E.Object.Property.Literal (_, lit) -> Ast.Literal.(lit.raw) + | E.Object.Property.Identifier (_, { Ast.Identifier.name; comments = _ }) -> name + | E.Object.Property.PrivateName (_, (_, { Ast.Identifier.name; comments = _ })) -> + name + | E.Object.Property.Computed (_, e) -> string_of_expr e + in + let t_str = + match p.value with + | Init (_, init_t) -> string_of_type init_t + | Get (_, ft) -> string_of_type (T.Function ft) + | Set (_, ft) -> string_of_type (T.Function ft) + in + let opt = + if p.optional then + "?" + else + "" + in + key_str ^ opt ^ " : " ^ t_str) + | _ -> failwith "[string_of_prop] unsupported property" + in + let prop_str_list = ot.properties |> Core_list.map ~f:string_of_prop |> String.concat ", " in + if ot.exact then + "{|" ^ prop_str_list ^ "|}" + else + "{" ^ prop_str_list ^ "}") | T.Union ((_, t1), (_, t2), trest) -> let t_strlist = - [(string_of_type t1); (string_of_type t2)] - @ (trest |> (List.map snd) |> (List.map string_of_type)) in + [string_of_type t1; string_of_type t2] + @ (trest |> Core_list.map ~f:snd |> Core_list.map ~f:string_of_type) + in String.concat " | " t_strlist | T.StringLiteral st -> Ast.StringLiteral.(st.raw) | T.NumberLiteral nt -> Ast.NumberLiteral.(nt.raw) - | T.BooleanLiteral bt -> if bt then "true" else "false" + | T.BooleanLiteral bt -> + if bt then + "true" + else + "false" | T.Function func -> - let open T.Function in - let string_of_param param = - let open T.Function.Param in - let opt_str = if param.optional then "?" else "" in - let name_str = match param.name with - | Some (_, id) -> id ^ opt_str ^ " : " - | None -> "" in - name_str ^ (string_of_type (snd param.annot)) in - let params_str = - let (_, { T.Function.Params.params; rest = _ }) = func.params in - params - |> List.map snd - |> List.map string_of_param - |> String.concat ", " in - let ret_type_str = (string_of_type (snd func.return)) in - "(" ^ params_str ^ ") => " ^ ret_type_str + T.Function.( + let string_of_param param = + T.Function.Param.( + let opt_str = + if param.optional then + "?" + else + "" + in + let name_str = + match param.name with + | Some (_, { Ast.Identifier.name; comments = _ }) -> name ^ opt_str ^ " : " + | None -> "" + in + name_str ^ string_of_type (snd param.annot)) + in + let params_str = + let (_, { T.Function.Params.params; rest = _ }) = func.params in + params |> Core_list.map ~f:snd |> Core_list.map ~f:string_of_param |> String.concat ", " + in + let ret_type_str = string_of_type (snd func.return) in + "(" ^ params_str ^ ") => " ^ ret_type_str) | _ -> failwith "[string_of_type] unsupported type" - (* A generator function for creating functions that makes variables and * properties *) let mk_gen (prefix : string) = let count = ref 0 in fun () -> - let vname : string = prefix ^ (string_of_int !count) in + let vname : string = prefix ^ string_of_int !count in count := !count + 1; - vname;; + vname (* A function that makes unique names. *) -let mk_var = mk_gen "v_";; -let mk_prop = mk_gen "p_";; -let mk_func = mk_gen "f";; -let mk_obj_cons = mk_gen "Obj";; +let mk_var = mk_gen "v_" + +let mk_prop = mk_gen "p_" + +let mk_func = mk_gen "f" + +let mk_obj_cons = mk_gen "Obj" (* Convert a code and its dependencies into a list CAUTION: This function will lose some independencies between codes *) let list_of_code (code : Code.t) : (Loc.t, Loc.t) Ast.Statement.t list = - let open Code in - let rec helper acc lst = match lst with - | [] -> acc - | hd :: tl -> - hd.stmt :: (helper (helper acc hd.stmt_deps) tl) in - (code.stmt :: (helper [] code.stmt_deps)) |> List.rev - + Code.( + let rec helper acc lst = + match lst with + | [] -> acc + | hd :: tl -> hd.stmt :: helper (helper acc hd.stmt_deps) tl + in + code.stmt :: helper [] code.stmt_deps |> List.rev) (* Convert a list of statements into a code object. Dependencies are based on the order of the statements. Thus, it will create unnecessary dependnecies. USE THIS WITH CAUTION. *) let code_of_stmt_list (slist : (Loc.t, Loc.t) Ast.Statement.t list) : Code.t option = - let open Code in - - let rec helper lst = match lst with - | [] -> failwith "List is empty, but this cannot happen" - | hd :: [] -> {stmt = hd; stmt_deps = []} - | hd :: tl -> {stmt = hd; stmt_deps = [helper tl]} in - - if (List.length slist) = 0 then None - else - let rev_slist = List.rev slist in - Some (helper rev_slist) + Code.( + let rec helper lst = + match lst with + | [] -> failwith "List is empty, but this cannot happen" + | [hd] -> { stmt = hd; stmt_deps = [] } + | hd :: tl -> { stmt = hd; stmt_deps = [helper tl] } + in + if List.length slist = 0 then + None + else + let rev_slist = List.rev slist in + Some (helper rev_slist)) (* We also remove redundant assignment as well. Redundant assignments will appear after empty object init. *) -let rm_prop_write - (prop : (Loc.t, Loc.t) E.Member.t) - (clist : Code.t list) : Code.t list = - let open S.Expression in - let open Code in - let is_target (code : Code.t) : bool = - match code.stmt with - | (_, S.Expression {expression = (_, E.Assignment assign); - directive = _}) -> - (match E.Assignment.(assign.left) with - | (_, P.Expression (_, E.Member p)) when p = prop -> false - | _ -> true) - | _ -> true in - List.filter is_target clist;; +let rm_prop_write (prop : (Loc.t, Loc.t) E.Member.t) (clist : Code.t list) : Code.t list = + S.Expression.( + Code.( + let is_target (code : Code.t) : bool = + match code.stmt with + | (_, S.Expression { expression = (_, E.Assignment assign); directive = _ }) -> + (match E.Assignment.(assign.left) with + | (_, P.Expression (_, E.Member p)) when p = prop -> false + | _ -> true) + | _ -> true + in + List.filter is_target clist)) (* Remove variable declaration from a list of code where vname is * defined *) -let rm_vardecl - (vname : string) - (clist : Code.t list) : Code.t list = - let open S.VariableDeclaration.Declarator in - let open S.VariableDeclaration in - - (* Check whether this declaration defines the target variable *) - let is_target (decl : (Loc.t, Loc.t) S.VariableDeclaration.Declarator.t) = - let decl' = (snd decl) in - match decl'.id with - | (_, P.Identifier { P.Identifier.name = (_, name); _;}) - -> name != vname - | _ -> true in - - let open Code in - List.fold_left (fun acc code -> match code.stmt with - | (loc, S.VariableDeclaration {declarations = decls; kind = k;}) -> - (* Remove vname's decls *) - (match List.filter is_target decls with - (* No declarators. We remove this statement *) - | [] -> acc - (* Create a new var decl statement *) - | lst -> - let new_stmt = {declarations = lst; kind = k} in - let new_code = - {stmt = (loc, (S.VariableDeclaration new_stmt)); - stmt_deps = code.stmt_deps} in - new_code :: acc) - | _ -> code :: acc) [] clist |> List.rev - +let rm_vardecl (vname : string) (clist : Code.t list) : Code.t list = + S.VariableDeclaration.Declarator.( + S.VariableDeclaration.( + (* Check whether this declaration defines the target variable *) + let is_target (decl : (Loc.t, Loc.t) S.VariableDeclaration.Declarator.t) = + let decl' = snd decl in + match decl'.id with + | (_, P.Identifier { P.Identifier.name = (_, { Ast.Identifier.name; comments = _ }); _ }) + -> + name != vname + | _ -> true + in + Code.( + List.fold_left + (fun acc code -> + match code.stmt with + | (loc, S.VariableDeclaration { declarations = decls; kind = k }) -> + (* Remove vname's decls *) + (match List.filter is_target decls with + (* No declarators. We remove this statement *) + | [] -> acc + (* Create a new var decl statement *) + | lst -> + let new_stmt = { declarations = lst; kind = k } in + let new_code = + { stmt = (loc, S.VariableDeclaration new_stmt); stmt_deps = code.stmt_deps } + in + new_code :: acc) + | _ -> code :: acc) + [] + clist + |> List.rev))) (* This is the JSON config library. It loads a JSON file into a list of (name, value) pairs. Simply provide load_config a JSON filename @@ -381,76 +430,98 @@ let rm_vardecl *) module Config = struct - (* config type *) type value = - Int of int + | Int of int | Str of string | Bool of bool | Obj of t - and t = (string * value) list;; + + and t = (string * value) list (* Convert a JSON ast into a config *) let rec to_config (ast : (Loc.t, Loc.t) E.Object.t) : t = - (* get config value from an expression *) - let get_value (expr : (Loc.t, Loc.t) E.t') : value = match expr with + let get_value (expr : (Loc.t, Loc.t) E.t') : value = + match expr with | E.Object o -> Obj (to_config o) - | E.Literal lit -> let open Ast.Literal in - (match lit.value with - | String s -> Str s - | Boolean b -> Bool b - | Number n -> Int (int_of_float n) - | _ -> failwith "We only support string, bool, and int as config vals.") - | _ -> failwith "Unknown AST type for config" in - - let open E.Object in - - (* get all the properties *) - let prop_list = (List.map (fun p -> match p with - | Property (_, E.Object.Property.Init { key = k; value = (_, e); _ }) -> - let k = (match k with - | E.Object.Property.Literal (_, id) -> Ast.Literal.(match id.value with - | String s -> - if String.contains s '.' then - failwith ("Config key '" ^ - s ^ - "' contains dots which are not allowed"); - s - | _ -> failwith "Config key can only be a string") - | _ -> failwith "Config key can only be a string literal.") in - let v = get_value e in - (k, v) - | Property (_, E.Object.Property.Get _) -> - failwith "Getter properties are not allowed" - | Property (_, E.Object.Property.Set _) -> - failwith "Setter properties are not allowed" - | _ -> - failwith "Spread properties are not allowed" - ) ast.properties) in - prop_list + | E.Literal lit -> + Ast.Literal.( + (match lit.value with + | String s -> Str s + | Boolean b -> Bool b + | Number n -> Int (int_of_float n) + | _ -> failwith "We only support string, bool, and int as config vals.")) + | _ -> failwith "Unknown AST type for config" + in + E.Object.( + (* get all the properties *) + let prop_list = + Core_list.map + ~f:(fun p -> + match p with + | Property (_, E.Object.Property.Init { key = k; value = (_, e); _ }) -> + let k = + match k with + | E.Object.Property.Literal (_, id) -> + Ast.Literal.( + (match id.value with + | String s -> + if String.contains s '.' then + failwith ("Config key '" ^ s ^ "' contains dots which are not allowed"); + s + | _ -> failwith "Config key can only be a string")) + | _ -> failwith "Config key can only be a string literal." + in + let v = get_value e in + (k, v) + | Property (_, E.Object.Property.Get _) -> failwith "Getter properties are not allowed" + | Property (_, E.Object.Property.Set _) -> failwith "Setter properties are not allowed" + | _ -> failwith "Spread properties are not allowed") + ast.properties + in + prop_list) (* Convert a config into an expression ast. Mainly used for printing *) let rec ast_of_config (c : t) : (Loc.t, Loc.t) E.Object.t = - - let expr_of_value (v : value) : (Loc.t, Loc.t) E.t' = let open Ast.Literal in - match v with - | Int i -> E.Literal {value = Number (float_of_int i); raw = string_of_int i} - | Str s -> E.Literal {value = String s; raw = "\"" ^ s ^ "\""} - | Bool b -> E.Literal {value = Boolean b; raw = string_of_bool b} - | Obj o -> E.Object (ast_of_config o) in - + let expr_of_value (v : value) : (Loc.t, Loc.t) E.t' = + Ast.Literal.( + match v with + | Int i -> + E.Literal + { + value = Number (float_of_int i); + raw = string_of_int i; + comments = Flow_ast_utils.mk_comments_opt (); + } + | Str s -> + E.Literal + { + value = String s; + raw = "\"" ^ s ^ "\""; + comments = Flow_ast_utils.mk_comments_opt (); + } + | Bool b -> + E.Literal + { + value = Boolean b; + raw = string_of_bool b; + comments = Flow_ast_utils.mk_comments_opt (); + } + | Obj o -> E.Object (ast_of_config o)) + in (* Convert all properties into object properties *) - let open E.Object in - let prop_list = - let open E.Object.Property in - List.map (fun (k, v) -> - let key = Identifier (Loc.none, "\"" ^ k ^ "\"") in - let value = Loc.none, expr_of_value v in - Property (Loc.none, Init {key; - value; - shorthand = false})) c in - {properties = prop_list};; + E.Object.( + let prop_list = + E.Object.Property.( + Core_list.map + ~f:(fun (k, v) -> + let key = Identifier (Flow_ast_utils.ident_of_source (Loc.none, "\"" ^ k ^ "\"")) in + let value = (Loc.none, expr_of_value v) in + Property (Loc.none, Init { key; value; shorthand = false })) + c) + in + { properties = prop_list; comments = Flow_ast_utils.mk_comments_opt () }) (* Convert a config into string for printing *) let string_of_config (c : t) : string = @@ -459,23 +530,21 @@ module Config = struct (* Return an empty config *) let empty () : t = - let open E.Object in - to_config {properties = []};; + E.Object.(to_config { properties = []; comments = Flow_ast_utils.mk_comments_opt () }) (* Get a value from the config given a string.*) let get (conf : t) (prop_name : string) : value = let name_list = Str.split (Str.regexp "\\.") prop_name in - - let rec helper (c : t) (slist : string list) = match slist with + let rec helper (c : t) (slist : string list) = + match slist with | [] -> failwith "Config is empty" - | hd :: [] -> List.assoc hd c - | hd :: tl -> (match List.assoc hd c with - | Obj o -> helper o tl - | _ -> failwith "It has to be a config type") in - try - helper conf name_list - with - Not_found -> failwith ("No config value for '" ^ prop_name) + | [hd] -> List.assoc hd c + | hd :: tl -> + (match List.assoc hd c with + | Obj o -> helper o tl + | _ -> failwith "It has to be a config type") + in + (try helper conf name_list with Not_found -> failwith ("No config value for '" ^ prop_name)) (* Normal get function requires users to do type conversion. That's why we are creating these functions. It checks types as well. @@ -483,71 +552,88 @@ module Config = struct let get_int ?default (conf : t) (prop_name : string) : int = match get conf prop_name with | Int i -> i - | _ -> (match default with - | None -> failwith ("Config '" ^ prop_name ^ "' is not an int.") - | Some i -> i);; + | _ -> + (match default with + | None -> failwith ("Config '" ^ prop_name ^ "' is not an int.") + | Some i -> i) + let get_str ?default (conf : t) (prop_name : string) : string = match get conf prop_name with | Str s -> s - | _ -> (match default with - | None -> failwith ("Config '" ^ prop_name ^ "' is not a string.") - | Some s -> s);; + | _ -> + (match default with + | None -> failwith ("Config '" ^ prop_name ^ "' is not a string.") + | Some s -> s) + let get_bool ?default (conf : t) (prop_name : string) : bool = match get conf prop_name with | Bool b -> b - | _ -> (match default with - | None -> failwith ("Config '" ^ prop_name ^ "' is not a boolean.") - | Some b -> b);; + | _ -> + (match default with + | None -> failwith ("Config '" ^ prop_name ^ "' is not a boolean.") + | Some b -> b) (* load a config from a string *) let load_json_config_string ?filename json_str : t = - let expr_ast = Parser_flow.json_file + let expr_ast = + Parser_flow.json_file json_str (match filename with - | None -> None - | Some f -> (Some (File_key.JsonFile f))) in - to_config (match (fst expr_ast) with - | (_, E.Object o) -> o - | _ -> failwith "Can only be an object") + | None -> None + | Some f -> Some (File_key.JsonFile f)) + in + to_config + (match fst expr_ast with + | (_, E.Object o) -> o + | _ -> failwith "Can only be an object") (* Load a config into _config *) let load_json_config (filename : string) : t = let content = read_file filename in - load_json_config_string ~filename content;; + load_json_config_string ~filename content end (* Metadata for involing flow type checking *) -let stub_metadata ~root ~checked = { Context. - (* local *) - checked; - munge_underscores = false; - (* +let stub_metadata ~root ~checked = + { + Context.checked (* local *); + munge_underscores = false; + (* verbose = Some { Verbose.depth = 2; indent = 2 }; *) - verbose = None; - weak = false; - jsx = Options.Jsx_react; - strict = true; - strict_local = false; - (* global *) - max_literal_length = 100; - enable_const_params = false; - enforce_strict_call_arity = true; - esproposal_class_static_fields = Options.ESPROPOSAL_ENABLE; - esproposal_class_instance_fields = Options.ESPROPOSAL_ENABLE; - esproposal_decorators = Options.ESPROPOSAL_ENABLE; - esproposal_export_star_as = Options.ESPROPOSAL_ENABLE; - esproposal_optional_chaining = Options.ESPROPOSAL_ENABLE; - esproposal_nullish_coalescing = Options.ESPROPOSAL_ENABLE; - facebook_fbt = None; - ignore_non_literal_requires = false; - max_trace_depth = 0; - max_workers = 0; - root; - strip_root = true; - suppress_comments = []; - suppress_types = SSet.empty; -} + verbose = None; + weak = false; + jsx = Options.Jsx_react; + strict = true; + strict_local = false; + include_suppressions = false; + (* global *) + max_literal_length = 100; + enable_const_params = false; + enable_enums = true; + enforce_strict_call_arity = true; + esproposal_class_static_fields = Options.ESPROPOSAL_ENABLE; + esproposal_class_instance_fields = Options.ESPROPOSAL_ENABLE; + esproposal_decorators = Options.ESPROPOSAL_ENABLE; + esproposal_export_star_as = Options.ESPROPOSAL_ENABLE; + esproposal_optional_chaining = Options.ESPROPOSAL_ENABLE; + esproposal_nullish_coalescing = Options.ESPROPOSAL_ENABLE; + exact_by_default = false; + facebook_fbs = None; + facebook_fbt = None; + haste_module_ref_prefix = None; + ignore_non_literal_requires = false; + max_trace_depth = 0; + max_workers = 0; + recursion_limit = 10000; + root; + strip_root = true; + suppress_comments = []; + suppress_types = SSet.empty; + default_lib_dir = None; + trust_mode = Options.NoTrust; + type_asserts = false; + } (* Invoke flow for type checking *) let flow_check (code : string) : string option = @@ -557,24 +643,47 @@ let flow_check (code : string) : string option = try let root = Path.dummy_path in let master_sig_cx = Context.make_sig () in - let master_cx = Context.make master_sig_cx - (stub_metadata ~root ~checked:false) - File_key.Builtins - Files.lib_module_ref in - + let aloc_table = Utils_js.FilenameMap.empty in + let rev_table = lazy (ALoc.make_empty_reverse_table ()) in + let master_cx = + Context.make + master_sig_cx + (stub_metadata ~root ~checked:false) + File_key.Builtins + aloc_table + rev_table + Files.lib_module_ref + Context.Checking + in (* Merge builtins *) let builtin_metadata = stub_metadata ~root ~checked:true in let lint_severities = LintSettings.empty_severities in - let builtins_ast, _ = Parser_flow.program (read_file "lib/core.js") in - let builtins_file_sig = match File_sig.program ~ast:builtins_ast with - | Ok file_sig -> file_sig - | Error _ -> failwith "error calculating builtins file sig" + let (builtins_ast, _) = Parser_flow.program (read_file "lib/core.js") in + let builtins_file_sig = + match File_sig.With_Loc.program ~ast:builtins_ast ~module_ref_prefix:None with + | Ok file_sig -> file_sig + | Error _ -> failwith "error calculating builtins file sig" in let builtins_sig_cx = Context.make_sig () in - let builtins_cx = Context.make builtins_sig_cx builtin_metadata - File_key.Builtins Files.lib_module_ref in - let _ = Type_inference_js.infer_lib_file builtins_cx builtins_ast - ~exclude_syms:SSet.empty ~lint_severities ~file_options:None ~file_sig:builtins_file_sig in + let builtins_cx = + Context.make + builtins_sig_cx + builtin_metadata + File_key.Builtins + aloc_table + rev_table + Files.lib_module_ref + Context.Checking + in + let _ = + Type_inference_js.infer_lib_file + builtins_cx + builtins_ast + ~exclude_syms:SSet.empty + ~lint_severities + ~file_options:None + ~file_sig:(File_sig.abstractify_locs builtins_file_sig) + in let () = let from_t = Context.find_module master_cx Files.lib_module_ref in let to_t = Context.find_module builtins_cx Files.lib_module_ref in @@ -588,93 +697,152 @@ let flow_check (code : string) : string option = (* Merge the input program into the context *) let strict_mode = StrictModeSettings.empty in - let stub_docblock = { Docblock. - flow = Docblock.(Some OptIn); - typeAssert = false; - preventMunge = None; - providesModule = None; - isDeclarationFile = false; - jsx = None; - } in - let input_ast, _ = Parser_flow.program code in + let stub_docblock = + { + Docblock.flow = Docblock.(Some OptIn); + typeAssert = false; + preventMunge = None; + providesModule = None; + isDeclarationFile = false; + jsx = None; + } + in + let (input_ast, _) = Parser_flow.program code in + let (_, _, comments) = input_ast in + let aloc_ast = Ast_loc_utils.loc_to_aloc_mapper#program input_ast in let filename = File_key.SourceFile "/tmp/foo.js" in - let file_sig = match File_sig.program ~ast:input_ast with - | Ok file_sig -> file_sig - | Error _ -> failwith "error calculating implementation file sig" + let file_sig = + match File_sig.With_Loc.program ~ast:input_ast ~module_ref_prefix:None with + | Ok file_sig -> file_sig + | Error _ -> failwith "error calculating implementation file sig" + in + let file_sigs = + Utils_js.FilenameMap.singleton filename (File_sig.abstractify_locs file_sig) in - let file_sigs = Utils_js.FilenameMap.singleton filename file_sig in let reqs = Merge_js.Reqs.empty in (* WARNING: This line might crash. That's why we put the entire block into a try catch *) - let (final_cx, _), _other_cxs = Merge_js.merge_component_strict - ~metadata:builtin_metadata ~lint_severities ~file_options:None ~strict_mode ~file_sigs - ~get_ast_unsafe:(fun _ -> input_ast) + let ((final_cx, _, _), _other_cxs) = + Merge_js.merge_component + ~metadata:builtin_metadata + ~lint_severities + ~file_options:None + ~strict_mode + ~file_sigs + ~get_ast_unsafe:(fun _ -> (comments, aloc_ast)) + ~get_aloc_table_unsafe:(fun _ -> + failwith "Did not expect to need an ALoc table in testgen") ~get_docblock_unsafe:(fun _ -> stub_docblock) - (Nel.one filename) reqs [] master_sig_cx in + ~phase:Context.Checking + (Nel.one filename) + reqs + [] + master_sig_cx + in let suppressions = Error_suppressions.empty in - let severity_cover = Utils_js.FilenameMap.singleton filename (ExactCover.default_file_cover filename) in - let errors, warnings, _, _ = Error_suppressions.filter_suppressed_errors - suppressions severity_cover (Context.errors final_cx) + let severity_cover = + Utils_js.FilenameMap.singleton filename (ExactCover.default_file_cover filename) + in + let errors = Context.errors final_cx in + let include_suppressions = Context.include_suppressions final_cx in + let aloc_tables = Utils_js.FilenameMap.empty in + let (errors, warnings, suppressions) = + Error_suppressions.filter_lints + ~include_suppressions + suppressions + errors + aloc_tables + severity_cover + in + let lazy_table_of_aloc _ = + lazy (failwith "Did not expect to encounter an abstract location in flowtestgen") + in + let errors = Flow_error.make_errors_printable lazy_table_of_aloc errors in + let warnings = Flow_error.make_errors_printable lazy_table_of_aloc warnings in + let (errors, _, suppressions) = + Error_suppressions.filter_suppressed_errors + ~root + ~file_options:None + suppressions + errors ~unused:suppressions in - let error_num = Errors.ErrorSet.cardinal errors in + let (warnings, _, _) = + Error_suppressions.filter_suppressed_errors + ~root + ~file_options:None + suppressions + warnings + ~unused:suppressions + in + let error_num = Errors.ConcreteLocPrintableErrorSet.cardinal errors in if error_num = 0 then None - else begin - + else (* This is used for pringing errors *) let string_of_errors (json : Hh_json.json) : string = - let open Hh_json in - let string_of_error (error_json : json) : string = - let error = get_object_exn error_json in - let msg_helper (msg_json : json) : string = - let msg = get_object_exn msg_json in - let desc = get_string_exn (List.assoc "descr" msg) in - let loc = get_object_exn (List.assoc "loc" msg) in - let start = - let start_json = get_object_exn (List.assoc "start" loc) in - (Printf.sprintf - "line %d\tcolumn %d\toffset %d" - (int_of_string (get_number_exn (List.assoc "line" start_json))) - (int_of_string (get_number_exn (List.assoc "column" start_json))) - (int_of_string (get_number_exn (List.assoc "offset" start_json)))) in - let eend = - let end_json = get_object_exn (List.assoc "end" loc) in - (Printf.sprintf - "line %d\tcolumn %d\toffset %d" - (int_of_string (get_number_exn (List.assoc "line" end_json))) - (int_of_string (get_number_exn (List.assoc "column" end_json))) - (int_of_string (get_number_exn (List.assoc "offset" end_json)))) in - Printf.sprintf "Error: %sStart: %s\nEnd: %s\n" desc start eend + Hh_json.( + let string_of_error (error_json : json) : string = + let error = get_object_exn error_json in + let msg_helper (msg_json : json) : string = + let msg = get_object_exn msg_json in + let desc = get_string_exn (List.assoc "descr" msg) in + let loc = get_object_exn (List.assoc "loc" msg) in + let start = + let start_json = get_object_exn (List.assoc "start" loc) in + Printf.sprintf + "line %d\tcolumn %d\toffset %d" + (int_of_string (get_number_exn (List.assoc "line" start_json))) + (int_of_string (get_number_exn (List.assoc "column" start_json))) + (int_of_string (get_number_exn (List.assoc "offset" start_json))) + in + let eend = + let end_json = get_object_exn (List.assoc "end" loc) in + Printf.sprintf + "line %d\tcolumn %d\toffset %d" + (int_of_string (get_number_exn (List.assoc "line" end_json))) + (int_of_string (get_number_exn (List.assoc "column" end_json))) + (int_of_string (get_number_exn (List.assoc "offset" end_json))) + in + Printf.sprintf "Error: %sStart: %s\nEnd: %s\n" desc start eend + in + String.concat + "" + (Core_list.map ~f:msg_helper (get_array_exn (List.assoc "message" error))) in - String.concat "" (List.map msg_helper (get_array_exn (List.assoc "message" error))) - in - (List.assoc "errors" (get_object_exn json)) - |> get_array_exn - |> List.map string_of_error - |> String.concat "\n" + List.assoc "errors" (get_object_exn json) + |> get_array_exn + |> Core_list.map ~f:string_of_error + |> String.concat "\n") in - (* Return error message *) let error_msg = let stdin_file = None in let strip_root = None in - let profiling = None in let suppressed_errors = [] in - let res = Errors.Json_output.full_status_json_of_errors ~strip_root ~profiling ~stdin_file - ~suppressed_errors ~errors ~warnings () in - (* + let res = + Errors.Json_output.full_status_json_of_errors + ~strip_root + ~stdin_file + ~suppressed_errors + ~errors + ~warnings + ~profiling_props:[] + () + in + (* Printf.printf "%s\n" (Hh_json.json_to_string ~pretty:false res); *) - string_of_errors res in - (* + string_of_errors res + in + (* Printf.printf "'%s'\n" error_msg; Errors.Cli_output.print_errors stdout Errors.Cli_output.default_error_flags None errors warnings (); *) Some error_msg - end with _ -> Some "Failed to type check." -let assert_func = " +let assert_func = + " // from http://tinyurl.com/y93dykzv const util = require('util'); function assert_type(actual: any, expected: any) { @@ -742,50 +910,57 @@ function check_opt_prop(obj_list : any, actual : any, expected : any) { assert_type(actual, expected); } \n\n -";; +" (* type check a piece of code. * Return true if this code doesn't have type error. *) let type_check (code : string) : string option = - (Printf.sprintf "/* @flow */\n%s\n" (assert_func ^ code)) - |> flow_check + Printf.sprintf "/* @flow */\n%s\n" (assert_func ^ code) |> flow_check let is_typecheck engine_name = engine_name = "union" (* Run Javascript programs in batch mode *) -let batch_run (code_list : string list) : (string option) list = +let batch_run (code_list : string list) : string option list = (* Wrap the input program into a stand alont scope with try-catch block *) let to_stmt (code : string) : string = Printf.sprintf -"(function () { + "(function () { try { %s console.log('Done'); } catch (_err_) { console.log(_err_.message); } -})();\n" code in - +})();\n" + code + in (* Split the batch run output into a list of single-program outputs *) - let to_msg_list (output : string) : (string option) list = + let to_msg_list (output : string) : string option list = let msg_list = Str.split (Str.regexp "====\n") output in - List.map (fun m -> if (String.trim m) = "Done" then None else Some m) msg_list in - + Core_list.map + ~f:(fun m -> + if String.trim m = "Done" then + None + else + Some m) + msg_list + in (* Convert all programs into a string for batch run *) - let progs = List.map to_stmt code_list in + let progs = Core_list.map ~f:to_stmt code_list in let progs_string = String.concat "console.log('====');\n" progs in - (* run all the programs *) let cmd = "./node_modules/.bin/flow-remove-types" ^ " -a -p | node" in let content = progs_string in - let ic, oc = Unix.open_process cmd in + let (ic, oc) = Unix.open_process cmd in let out_str = Printf.sprintf "/* @flow */\n%s\n" (assert_func ^ content) in Printf.fprintf oc "%s" out_str; close_out oc; let lines = read_all ic in close_in ic; - let _ = match (Unix.close_process (ic, oc)) with + let _ = + match Unix.close_process (ic, oc) with | Unix.WEXITED code -> code - | _ -> failwith "Command exited abnormally." in + | _ -> failwith "Command exited abnormally." + in String.concat "\n" lines |> to_msg_list diff --git a/testgen/old/codegen.ml b/testgen/old/codegen.ml index 37744dc960b..910c96f8ce1 100644 --- a/testgen/old/codegen.ml +++ b/testgen/old/codegen.ml @@ -1,20 +1,19 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) (* Main module for generating code *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; -module Config = Flowtestgen_config;; -module FTypes = Flowtestgen_types;; -module FRandom = Utils.FRandom;; - +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils +module Config = Flowtestgen_config +module FTypes = Flowtestgen_types +module FRandom = Utils.FRandom open Code (* Check the expression is of the given type *) @@ -24,55 +23,45 @@ let mk_type_assertion (etype : T.t') (expr : t') : t = let final_decl = Mutator.mk_vardecl_code "t" (Some etype) (Some expr) in let callee = E.Identifier (Loc.none, "assert_type") in let expected = FTypes.mk_literal_expr etype in - let arguments = - [E.Expression (Loc.none, final_id); - E.Expression (Loc.none, expected.expr)] in - let call = let open E.Call in - E.Call {callee = (Loc.none, callee); arguments} in - {expr = call; expr_deps = [final_decl]} - |> Mutator.mk_expr_code - + let arguments = [E.Expression (Loc.none, final_id); E.Expression (Loc.none, expected.expr)] in + let call = E.Call.(E.Call { callee = (Loc.none, callee); arguments }) in + { expr = call; expr_deps = [final_decl] } |> Mutator.mk_expr_code (* We move function definitions to the end at random *) let shuffle_fun_defs (code : t) : t = - let rec move_fun_defs stmt_acc fun_acc slist = match slist with - | [] -> (List.rev stmt_acc) @ (List.rev fun_acc) - | hd :: tl -> match hd with - | (_, S.FunctionDeclaration _) when (FRandom.rbool ()) -> + let rec move_fun_defs stmt_acc fun_acc slist = + match slist with + | [] -> List.rev stmt_acc @ List.rev fun_acc + | hd :: tl -> + (match hd with + | (_, S.FunctionDeclaration _) when FRandom.rbool () -> (* We move function defs to the end *) move_fun_defs stmt_acc (hd :: fun_acc) tl - | _ -> move_fun_defs (hd :: stmt_acc) fun_acc tl in - - (* Move some functions to the end *) - let result = Utils.list_of_code code - |> move_fun_defs [] [] - |> Utils.code_of_stmt_list + | _ -> move_fun_defs (hd :: stmt_acc) fun_acc tl) in + (* Move some functions to the end *) + let result = Utils.list_of_code code |> move_fun_defs [] [] |> Utils.code_of_stmt_list in match result with | None -> failwith "This cannot be None." | Some s -> s - (* Widen the type and mutate the value *) let rec mk_widen_and_mutation - (obj_name : string) - (prop_name : string) - (etype : T.t') - (prev_stmt : t) : T.t' * t = - + (obj_name : string) (prop_name : string) (etype : T.t') (prev_stmt : t) : T.t' * t = (* Widen the type *) let new_t = Widener.widen_type etype in if new_t = etype then - etype, prev_stmt + (etype, prev_stmt) else (* mutate the value *) - let f = match new_t with + let f = + match new_t with | T.Array _ -> Mutator.mk_array_mutation - | _ -> FRandom.choice [|Mutator.mk_assignment_mutation; - Mutator.mk_func_mutation|] in + | _ -> FRandom.choice [|Mutator.mk_assignment_mutation; Mutator.mk_func_mutation|] + in let new_stmt = f new_t obj_name prop_name prev_stmt in if FRandom.rbool () then - new_t, new_stmt + (new_t, new_stmt) else (* We continue to widen & mutate at random *) mk_widen_and_mutation obj_name prop_name new_t new_stmt @@ -86,11 +75,8 @@ let mk_random_code () : t = let prop_name = Utils.mk_prop () in let prop_read = Mutator.mk_prop_read obj_name prop_name in let obj_decl = Mutator.mk_objdecl_code prop_read etype in - (* Widen the type and mutate the property *) - let _, widening = mk_widen_and_mutation obj_name prop_name etype obj_decl in - + let (_, widening) = mk_widen_and_mutation obj_name prop_name etype obj_decl in (* Check the type *) - let read_expr = {expr = E.Member prop_read; expr_deps = [widening]} in - mk_type_assertion etype read_expr - |> shuffle_fun_defs;; + let read_expr = { expr = E.Member prop_read; expr_deps = [widening] } in + mk_type_assertion etype read_expr |> shuffle_fun_defs diff --git a/testgen/old/gen_rule.ml b/testgen/old/gen_rule.ml index 004ad855c43..18e5a732e20 100644 --- a/testgen/old/gen_rule.ml +++ b/testgen/old/gen_rule.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -47,7 +47,6 @@ (* Module type for code generation rules *) module type Gen_rule_t = sig - (* condition/constraint type *) type cond_t @@ -60,9 +59,9 @@ module type Gen_rule_t = sig (* The rule itself *) type rule_t = { - grammar : grammar_t; - premises : cond_t list; - cons : cond_t list; + grammar: grammar_t; + premises: cond_t list; + cons: cond_t list; } (* Make a rule *) @@ -79,7 +78,7 @@ module type Gen_rule_t = sig (* Exercise a rule and return the syntactic element and the consequent condition. *) - val exercise : rule_t -> (code_t * cond_t list) + val exercise : rule_t -> code_t * cond_t list (* Combine two syntactic elements together *) val combine_code : code_t list -> code_t -> code_t @@ -88,15 +87,18 @@ module type Gen_rule_t = sig might need some inferences here to discover new conditions until we reach a fixed point *) val merge_cond : cond_t list -> cond_t list -> cond_t list -end;; +end (* This functor makes a code generator with a given generation rule *) (* module *) -module Mk_Generator(Gen_rule : Gen_rule_t) = struct +module Mk_Generator (Gen_rule : Gen_rule_t) = struct (* Types for basic stuffs *) type cond_t = Gen_rule.cond_t + type grammar_t = Gen_rule.grammar_t + type code_t = Gen_rule.code_t + type rule_t = Gen_rule.rule_t (* make a rule *) @@ -106,33 +108,33 @@ module Mk_Generator(Gen_rule : Gen_rule_t) = struct (* to give it all the rules it can use during code generation, a *) (* state containing all the conditions that holds already and the *) (* rule we want to execute. *) - let rec gen_prog - (all_rules : rule_t list) - (state : cond_t list) - (rule : rule_t) : (code_t * cond_t list) = - + let rec gen_prog (all_rules : rule_t list) (state : cond_t list) (rule : rule_t) : + code_t * cond_t list = (* get a list of premise conditions we need to satisfy *) let to_sat = List.filter (fun p -> not (Gen_rule.is_valid state p)) Gen_rule.(rule.premises) in - (* all the code and conditions necessary to exercise the input rule *) - let pre_code, pre_cond = + let (pre_code, pre_cond) = if to_sat = [] then - [], [] + ([], []) else (* get their corresponding rules *) - let new_rules = List.map (fun cond -> Gen_rule.cond_to_rule all_rules cond) to_sat in - + let new_rules = + Core_list.map ~f:(fun cond -> Gen_rule.cond_to_rule all_rules cond) to_sat + in (* exercise necessary rules *) - let result = List.fold_left (fun acc r -> - let new_code, new_cond = gen_prog all_rules (snd acc) r in - new_code :: (fst acc), Gen_rule.merge_cond (snd acc) new_cond) + let result = + List.fold_left + (fun acc r -> + let (new_code, new_cond) = gen_prog all_rules (snd acc) r in + (new_code :: fst acc, Gen_rule.merge_cond (snd acc) new_cond)) ([], state) - new_rules in - (fst result |> List.rev, snd result) in - + new_rules + in + (fst result |> List.rev, snd result) + in (* We exercise the rule and return everything generated during this function call *) - let new_code, new_cond = Gen_rule.exercise rule in - Gen_rule.combine_code pre_code new_code, pre_cond @ new_cond -end;; + let (new_code, new_cond) = Gen_rule.exercise rule in + (Gen_rule.combine_code pre_code new_code, pre_cond @ new_cond) +end diff --git a/testgen/old/gen_rule_main.ml b/testgen/old/gen_rule_main.ml index 8d167be96b0..676965c3165 100644 --- a/testgen/old/gen_rule_main.ml +++ b/testgen/old/gen_rule_main.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -12,96 +12,95 @@ condition type or code type to have more complicated conditions such as type judgements *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils module Obj_rule = struct (* A condition is an assertion of an expression being of a type *) type cond_t = E.t' * T.t' + type code_t = string + type grammar_t = string type rule_t = { - grammar : grammar_t; - premises : cond_t list; - cons : cond_t list; + grammar: grammar_t; + premises: cond_t list; + cons: cond_t list; } let string_of_cond (cond : cond_t) : string = - (Utils.string_of_expr (Loc.none, (fst cond))) ^ - " : " ^ - (Utils.string_of_type (Loc.none, (snd cond))) + Utils.string_of_expr (Loc.none, fst cond) ^ " : " ^ Utils.string_of_type (Loc.none, snd cond) |> Str.global_replace (Str.regexp "\n") "" (* We don't do fix-point calculation yet *) - let merge_cond (facts : cond_t list) (new_cond : cond_t list) : cond_t list = - facts @ new_cond + let merge_cond (facts : cond_t list) (new_cond : cond_t list) : cond_t list = facts @ new_cond let mk_rule (grm : grammar_t) (pre : cond_t list) (post : cond_t list) : rule_t = - {grammar = grm; premises = pre; cons = post} + { grammar = grm; premises = pre; cons = post } (* we simply check whether the state has the given condition. No *) (* inference or implication here *) - let is_valid (facts : cond_t list) (cond : cond_t) : bool = - List.mem cond facts + let is_valid (facts : cond_t list) (cond : cond_t) : bool = List.mem cond facts (* Simply check whether the consequence of a rule has the given *) (* condition *) let cond_to_rule (all_rules : rule_t list) (cond : cond_t) : rule_t = - List.filter (fun r -> List.mem cond r.cons) all_rules - |> List.hd + List.filter (fun r -> List.mem cond r.cons) all_rules |> List.hd - let exercise (rule : rule_t) : (code_t * cond_t list) = - (rule.grammar, rule.cons) + let exercise (rule : rule_t) : code_t * cond_t list = (rule.grammar, rule.cons) let combine_code (clist : code_t list) (new_code : code_t) : code_t = String.concat "\n" (clist @ [new_code]) -end;; +end (* This is the module used for generating code *) -module Gen = Gen_rule.Mk_Generator(Obj_rule);; +module Gen = Gen_rule.Mk_Generator (Obj_rule) (* functions for parsing type rules *) -module Type_parser = Type_parser.Type (Parser_flow.Parse);; +module Type_parser = Type_parser.Type (Parser_flow.Parse) + let parse_helper f s = let env = Parser_env.init_env ~token_sink:None ~parse_options:None None s in - let (_, out), _ = Parser_flow.do_parse env f true in - out;; + let ((_, out), _) = Parser_flow.do_parse env f true in + out -let parse_type_rule cons : Gen.cond_t = - let expr, etype = cons in - parse_helper Parser_flow.Parse.expression expr, - parse_helper Type_parser._type etype +let parse_type_rule cons : Gen.cond_t = + let (expr, etype) = cons in + (parse_helper Parser_flow.Parse.expression expr, parse_helper Type_parser._type etype) (* Make rules from strings *) let mk_rule_from_string - (grm : string) - (pre : (string * string) list) - (post : (string * string) list) : Gen.rule_t = + (grm : string) (pre : (string * string) list) (post : (string * string) list) : Gen.rule_t = let grammar = grm in - let premises = List.map parse_type_rule pre in - let cons = List.map parse_type_rule post in - Obj_rule.({grammar; premises; cons});; + let premises = Core_list.map ~f:parse_type_rule pre in + let cons = Core_list.map ~f:parse_type_rule post in + Obj_rule.{ grammar; premises; cons } (* We set up three simple rules: object decl, prop read and prop write *) let rules = - [mk_rule_from_string "var o = {};" [] [("o", "{}")]; - mk_rule_from_string "o.p = 1;" [("o", "{}")] [("o", "{p : number}")]; - mk_rule_from_string "o.p;" [("o", "{p : number}")] [("o.p", "number")] - ];; - + [ + mk_rule_from_string "var o = {};" [] [("o", "{}")]; + mk_rule_from_string "o.p = 1;" [("o", "{}")] [("o", "{p : number}")]; + mk_rule_from_string "o.p;" [("o", "{p : number}")] [("o.p", "number")]; + ] let main rule_index = - let code, facts = Gen.gen_prog rules [] (List.nth rules rule_index) in + let (code, facts) = Gen.gen_prog rules [] (List.nth rules rule_index) in Printf.printf "Exercising rule %d =========\n" rule_index; Printf.printf "Code ==============\n%s\n" code; Printf.printf "New conditions =============\n"; - List.iter (fun f -> (print_string ((Obj_rule.string_of_cond f) ^ "\n"))) facts; - Printf.printf "\n";; + List.iter (fun f -> print_string (Obj_rule.string_of_cond f ^ "\n")) facts; + Printf.printf "\n" + +;; +main 0 + +;; +main 1 -main 0;; -main 1;; -main 2;; +;; +main 2 diff --git a/testgen/old/mutator.ml b/testgen/old/mutator.ml index c61eff7e8ec..c958cbba1dc 100644 --- a/testgen/old/mutator.ml +++ b/testgen/old/mutator.ml @@ -1,437 +1,438 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; -module Config = Flowtestgen_config;; -module FTypes = Flowtestgen_types;; -module FRandom = Utils.FRandom;; - +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils +module Config = Flowtestgen_config +module FTypes = Flowtestgen_types +module FRandom = Utils.FRandom open Code (* Put a statement into a dead if branch *) let dead_branch (s : S.t') : S.t' = - let test = - let open Ast.Literal in - (Loc.none, E.Literal {value = Boolean false; - raw = "false"}) in - S.If.(S.If {test; consequent = (Loc.none, s); alternate = None}) + let test = Ast.Literal.(Loc.none, E.Literal { value = Boolean false; raw = "false" }) in + S.If.(S.If { test; consequent = (Loc.none, s); alternate = None }) let mk_prop_read (obj_name : string) (prop_name : string) : E.Member.t = - let open E.Member in - {_object = (Loc.none, E.Identifier (Loc.none, obj_name)); - property = PropertyIdentifier (Loc.none, prop_name); - computed = false} + E.Member. + { + _object = (Loc.none, E.Identifier (Loc.none, obj_name)); + property = PropertyIdentifier (Loc.none, prop_name); + computed = false; + } (* Make a chain of property read *) let rec prop_read_of_list (plist : string list) : E.Member.t = - let open E.Member in - match plist with - | [] -> failwith "prop_read_of_list: Cannot accept empty list" - | _ :: [] -> failwith "prop_read_of_list: Must have at least two elements" - | hd1 :: hd2 :: [] -> mk_prop_read hd1 hd2 - | hd :: tl -> - {_object = (Loc.none, E.Identifier (Loc.none, hd)); - property = PropertyExpression (Loc.none, E.Member (prop_read_of_list tl)); - computed = false} + E.Member.( + match plist with + | [] -> failwith "prop_read_of_list: Cannot accept empty list" + | [_] -> failwith "prop_read_of_list: Must have at least two elements" + | [hd1; hd2] -> mk_prop_read hd1 hd2 + | hd :: tl -> + { + _object = (Loc.none, E.Identifier (Loc.none, hd)); + property = PropertyExpression (Loc.none, E.Member (prop_read_of_list tl)); + computed = false; + }) (* Make an expression code into a statement code *) let mk_expr_code (e : t') : t = - let stmt = let open S.Expression in - S.Expression {expression = (Loc.none, e.expr); directive = None} in - {stmt = (Loc.none, stmt); stmt_deps = e.expr_deps} + let stmt = S.Expression.(S.Expression { expression = (Loc.none, e.expr); directive = None }) in + { stmt = (Loc.none, stmt); stmt_deps = e.expr_deps } (* Make a variable . It only makes a literal at this point. We will support * other expressions like property read. *) -let mk_vardecl_code - (vname : string) - (vtype : T.t' option) - (expr : t' option) : t = - +let mk_vardecl_code (vname : string) (vtype : T.t' option) (expr : t' option) : t = (* We add type annotation based on random choice *) - let type_annot = match vtype with + let type_annot = + match vtype with | None -> None - | Some t -> Some (Loc.none, (Loc.none, t)) in - + | Some t -> Some (Loc.none, (Loc.none, t)) + in (* Make an identifier *) - let id = let open P.Identifier in - (Loc.none, P.Identifier - { name = (Loc.none, vname); - typeAnnotation = type_annot; - optional = false}) in - + let id = + P.Identifier. + ( Loc.none, + P.Identifier { name = (Loc.none, vname); typeAnnotation = type_annot; optional = false } ) + in (* get the expression and its dependencies *) - let (init, deps) = match expr with + let (init, deps) = + match expr with | Some e -> (Some (Loc.none, e.expr), e.expr_deps) - | None -> (None, []) in - + | None -> (None, []) + in (* Make a var declaration *) let decl : S.VariableDeclaration.Declarator.t = - let open S.VariableDeclaration.Declarator in - (Loc.none, {id; init}) in + S.VariableDeclaration.Declarator.(Loc.none, { id; init }) + in let var_decl : S.VariableDeclaration.t = - let open S.VariableDeclaration in - {declarations = [decl]; kind = Var} in - let decl_stmt = let open S in - (Loc.none, VariableDeclaration var_decl) in - {stmt = decl_stmt; stmt_deps = deps} + S.VariableDeclaration.{ declarations = [decl]; kind = Var } + in + let decl_stmt = S.(Loc.none, VariableDeclaration var_decl) in + { stmt = decl_stmt; stmt_deps = deps } (* Make an object initialization expression according to etype. * This only creates an object with a single property. Maybe * we could have more properties in the future. *) -let mk_obj_init_expr - ?init_expr - (prop_key : E.Object.Property.key) - (etype : T.t') : t' = +let mk_obj_init_expr ?init_expr (prop_key : E.Object.Property.key) (etype : T.t') : t' = (* Make an init expression of the given type *) - let init_expr = match init_expr with + let init_expr = + match init_expr with | None -> FTypes.mk_literal_expr etype - | Some e -> e in - + | Some e -> e + in (* Make the property *) - let prop = let open E.Object.Property in - {key = prop_key; - value = Init (Loc.none, init_expr.expr); - _method = false; - shorthand = false} in - - let content = let open E.Object in - E.Object {properties = [(Property (Loc.none, prop))]} in - {expr = content; expr_deps = init_expr.expr_deps} + let prop = + E.Object.Property. + { + key = prop_key; + value = Init (Loc.none, init_expr.expr); + _method = false; + shorthand = false; + } + in + let content = E.Object.(E.Object { properties = [Property (Loc.none, prop)] }) in + { expr = content; expr_deps = init_expr.expr_deps } (* Make an object constructor. Return the constructor and its name *) -let mk_obj_constructor - (prop_name : string) - (t : T.t') : (t * string) = +let mk_obj_constructor (prop_name : string) (t : T.t') : t * string = let fname = Utils.mk_obj_cons () in - - let param = let open P.Identifier in - (Loc.none, P.Identifier {name = (Loc.none, prop_name); - typeAnnotation = Some (Loc.none, (Loc.none, t)); - optional = false}) in - + let param = + P.Identifier. + ( Loc.none, + P.Identifier + { + name = (Loc.none, prop_name); + typeAnnotation = Some (Loc.none, (Loc.none, t)); + optional = false; + } ) + in let assign_expr = - let open E.Assignment in - let left = P.Expression (Loc.none, E.Member (mk_prop_read "this" prop_name)) in - let right = E.Identifier (Loc.none, prop_name) in - E.Assignment {operator = Assign; - left = (Loc.none, left); - right = (Loc.none, right)} in - + E.Assignment.( + let left = P.Expression (Loc.none, E.Member (mk_prop_read "this" prop_name)) in + let right = E.Identifier (Loc.none, prop_name) in + E.Assignment { operator = Assign; left = (Loc.none, left); right = (Loc.none, right) }) + in (* Randomly put the assignment into a dead if statement *) let assign_stmt = - S.Expression.(S.Expression {expression = (Loc.none, assign_expr); - directive = None}) - |> (FRandom.rchoice [|(fun id -> id); dead_branch|]) in - - let body = let open S.Block in {body = [(Loc.none, assign_stmt)]} in - - let func = let open Ast.Function in - {id = Some (Loc.none, fname); - params = ([param], None); - body = Ast.Function.BodyBlock (Loc.none, body); - async = false; - generator = false; - predicate = None; - expression = false; - returnType = None; - typeParameters = None} in - ({stmt = (Loc.none, S.FunctionDeclaration func); - stmt_deps = []}, fname) + S.Expression.(S.Expression { expression = (Loc.none, assign_expr); directive = None }) + |> FRandom.rchoice [|(fun id -> id); dead_branch|] + in + let body = S.Block.{ body = [(Loc.none, assign_stmt)] } in + let func = + Ast.Function. + { + id = Some (Loc.none, fname); + params = ([param], None); + body = Ast.Function.BodyBlock (Loc.none, body); + async = false; + generator = false; + predicate = None; + expression = false; + returnType = None; + typeParameters = None; + } + in + ({ stmt = (Loc.none, S.FunctionDeclaration func); stmt_deps = [] }, fname) (* Generate the code for object declaration given a property read.*) -let rec mk_objdecl_code - ?init_expr - (read : E.Member.t) - (t : T.t') : t = - - let open E.Member in - (* Make a property *) - let prop_id = match read.property with - | PropertyIdentifier (_, id) -> id - | _ -> failwith ("Property name has to be an identifier.") in - let prop_key = - let open E.Object.Property in Identifier (Loc.none, prop_id) in - - (* Get the object name according to the property read *) - let obj_name = match read._object with - | (_, E.Identifier id) -> id - | _ -> failwith ("Object name has to be an identifier.") in - - (* Get the object type *) - let obj_type = FTypes.mk_obj_type [(prop_id, t)] in - - let obj_init = match init_expr with - | None -> mk_obj_init_expr prop_key t - | Some e -> e in - - match FRandom.int 4 with - | 0 -> - (* We create normal object decl with init expr *) - mk_vardecl_code - (snd obj_name) - Config.(match config.type_annot with +let rec mk_objdecl_code ?init_expr (read : E.Member.t) (t : T.t') : t = + E.Member.( + (* Make a property *) + let prop_id = + match read.property with + | PropertyIdentifier (_, id) -> id + | _ -> failwith "Property name has to be an identifier." + in + let prop_key = E.Object.Property.(Identifier (Loc.none, prop_id)) in + (* Get the object name according to the property read *) + let obj_name = + match read._object with + | (_, E.Identifier id) -> id + | _ -> failwith "Object name has to be an identifier." + in + (* Get the object type *) + let obj_type = FTypes.mk_obj_type [(prop_id, t)] in + let obj_init = + match init_expr with + | None -> mk_obj_init_expr prop_key t + | Some e -> e + in + match FRandom.int 4 with + | 0 -> + (* We create normal object decl with init expr *) + mk_vardecl_code + (snd obj_name) + Config.( + match config.type_annot with | Force -> Some obj_type | Random -> FRandom.rchoice [|None; Some obj_type|] | No -> None) - (Some obj_init) - | 1 -> - (* We create an empty object first and then assign + (Some obj_init) + | 1 -> + (* We create an empty object first and then assign the property to it *) - let empty_init = let open E.Object in - {expr = E.Object {properties = []}; - expr_deps = []} in - let empty_decl = - if FRandom.rbool () then + let empty_init = E.Object.{ expr = E.Object { properties = [] }; expr_deps = [] } in + let empty_decl = + if FRandom.rbool () then + mk_vardecl_code (snd obj_name) None (Some empty_init) + else + let empty_body = S.Block.{ body = [] } in + let empty_func = + Ast.Function. + { + id = Some obj_name; + params = ([], None); + body = Ast.Function.BodyBlock (Loc.none, empty_body); + async = false; + generator = false; + predicate = None; + expression = false; + returnType = None; + typeParameters = None; + } + in + { stmt = (Loc.none, S.FunctionDeclaration empty_func); stmt_deps = [] } + in + let left = P.Expression (Loc.none, E.Member read) in + let right = FTypes.mk_literal_expr t in + let assign = + E.Assignment.( + E.Assignment + { operator = Assign; left = (Loc.none, left); right = (Loc.none, right.expr) }) + in + let assign_stmt = + S.Expression.(S.Expression { expression = (Loc.none, assign); directive = None }) + |> FRandom.rchoice [|(fun id -> id); dead_branch|] + in + { stmt = (Loc.none, assign_stmt); stmt_deps = empty_decl :: right.expr_deps } + | 2 -> + (* create an object with a constructor *) + let (con, fname) = mk_obj_constructor prop_id t in + let new_call = + let callee = (Loc.none, E.Identifier (Loc.none, fname)) in + let init_expr = FTypes.mk_literal_expr t in + let arguments = [E.Expression (Loc.none, init_expr.expr)] in + let call_expr = E.New.(E.New { callee; arguments }) in + { expr = call_expr; expr_deps = [con] } + in mk_vardecl_code (snd obj_name) - None - (Some empty_init) - else - let empty_body = S.Block.({body = []}) in - let empty_func = - let open Ast.Function in - {id = Some obj_name; - params = ([], None); - body = Ast.Function.BodyBlock (Loc.none, empty_body); - async = false; - generator = false; - predicate = None; - expression = false; - returnType = None; - typeParameters = None} in - {stmt = (Loc.none, S.FunctionDeclaration empty_func); - stmt_deps = []} in - let left = P.Expression (Loc.none, E.Member read) in - let right = FTypes.mk_literal_expr t in - let assign = let open E.Assignment in - E.Assignment {operator = Assign; - left = (Loc.none, left); - right = (Loc.none, right.expr)} in - let assign_stmt = - S.Expression.(S.Expression {expression = (Loc.none, assign); - directive = None}) - |> (FRandom.rchoice [|(fun id -> id); dead_branch|]) in - {stmt = (Loc.none, assign_stmt); - stmt_deps = empty_decl :: right.expr_deps} - | 2 -> - (* create an object with a constructor *) - let con, fname = mk_obj_constructor prop_id t in - let new_call = - let callee = (Loc.none, E.Identifier (Loc.none, fname)) in - let init_expr = FTypes.mk_literal_expr t in - let arguments = [E.Expression (Loc.none, init_expr.expr)] in - let call_expr = E.New.(E.New {callee; arguments}) in - {expr = call_expr; expr_deps = [con]} in - mk_vardecl_code - (snd obj_name) - (if FRandom.rbool () then (Some obj_type) else None) - (Some new_call) - | _ -> - (* Create an object using Object.create() *) - let create_call = - let callee = (Loc.none, E.Member (mk_prop_read "Object" "create")) in - let proto_name = Utils.mk_var () in - let proto_id = E.Identifier (Loc.none, proto_name) in - let proto_decl = mk_objdecl_code (mk_prop_read proto_name prop_id) t in - let arguments = [E.Expression (Loc.none, proto_id)] in - let call_expr = E.Call.(E.Call {callee; arguments}) in - {expr = call_expr; expr_deps = [proto_decl]} in - mk_vardecl_code - (snd obj_name) - (if FRandom.rbool () then (Some obj_type) else None) - (Some create_call) + ( if FRandom.rbool () then + Some obj_type + else + None ) + (Some new_call) + | _ -> + (* Create an object using Object.create() *) + let create_call = + let callee = (Loc.none, E.Member (mk_prop_read "Object" "create")) in + let proto_name = Utils.mk_var () in + let proto_id = E.Identifier (Loc.none, proto_name) in + let proto_decl = mk_objdecl_code (mk_prop_read proto_name prop_id) t in + let arguments = [E.Expression (Loc.none, proto_id)] in + let call_expr = E.Call.(E.Call { callee; arguments }) in + { expr = call_expr; expr_deps = [proto_decl] } + in + mk_vardecl_code + (snd obj_name) + ( if FRandom.rbool () then + Some obj_type + else + None ) + (Some create_call)) (* Make an object mutatation statement which assigns * a new property to an object *) -let mk_mutation_assignment_code - ?rhs - (read : E.Member.t) - (t : T.t') - (deps : t list): t = +let mk_mutation_assignment_code ?rhs (read : E.Member.t) (t : T.t') (deps : t list) : t = let left = P.Expression (Loc.none, E.Member read) in - let right = match rhs with + let right = + match rhs with | None -> FTypes.mk_literal_expr t - | Some e -> e in - let assign = let open E.Assignment in - E.Assignment {operator = Assign; - left = (Loc.none, left); - right = (Loc.none, right.expr)} in + | Some e -> e + in + let assign = + E.Assignment.( + E.Assignment { operator = Assign; left = (Loc.none, left); right = (Loc.none, right.expr) }) + in (* Randomly put the assignment into a dead if statement *) let assign_stmt = - S.Expression.(S.Expression {expression = (Loc.none, assign); - directive = None}) - |> (FRandom.rchoice [|(fun id -> id); dead_branch|]) in - {stmt = (Loc.none, assign_stmt); stmt_deps = deps @ right.expr_deps} + S.Expression.(S.Expression { expression = (Loc.none, assign); directive = None }) + |> FRandom.rchoice [|(fun id -> id); dead_branch|] + in + { stmt = (Loc.none, assign_stmt); stmt_deps = deps @ right.expr_deps } (* Define a function for function calls *) -let mk_obj_mutation_funcdecl_code - (fname : Ast.Identifier.t) - (prop_name : string) - (t : T.t') : t = - +let mk_obj_mutation_funcdecl_code (fname : Ast.Identifier.t) (prop_name : string) (t : T.t') : t = let obj_type = (Loc.none, (Loc.none, FTypes.mk_obj_type [(prop_name, t)])) in - (* We add type annotation based on random choice *) let type_annot = - Config.(match config.type_annot with - | Force -> Some obj_type - | Random -> FRandom.rchoice [|None; Some obj_type|] - | No -> None) in - + Config.( + match config.type_annot with + | Force -> Some obj_type + | Random -> FRandom.rchoice [|None; Some obj_type|] + | No -> None) + in let param_name = Utils.mk_var () in - let param = let open P.Identifier in - [(Loc.none, P.Identifier {name = (Loc.none, param_name); - typeAnnotation = type_annot; - optional = false})] in - + let param = + P.Identifier. + [ + ( Loc.none, + P.Identifier + { name = (Loc.none, param_name); typeAnnotation = type_annot; optional = false } ); + ] + in let prop_read = - if FRandom.rbool() then + if FRandom.rbool () then (* We use "arguments" *) - let prop = E.Literal Ast.Literal.({value = Number 0.0; raw = "0"}) in - let args = let open E.Member in - {_object = (Loc.none, E.Identifier (Loc.none, "arguments")); - property = PropertyExpression (Loc.none, prop); - computed = true} in - let open E.Member in - {_object = (Loc.none, E.Member args); - property = PropertyIdentifier (Loc.none, prop_name); - computed = false} + let prop = E.Literal Ast.Literal.{ value = Number 0.0; raw = "0" } in + let args = + E.Member. + { + _object = (Loc.none, E.Identifier (Loc.none, "arguments")); + property = PropertyExpression (Loc.none, prop); + computed = true; + } + in + E.Member. + { + _object = (Loc.none, E.Member args); + property = PropertyIdentifier (Loc.none, prop_name); + computed = false; + } else - let open E.Member in - {_object = (Loc.none, E.Identifier (Loc.none, param_name)); - property = PropertyIdentifier (Loc.none, prop_name); - computed = false} in - + E.Member. + { + _object = (Loc.none, E.Identifier (Loc.none, param_name)); + property = PropertyIdentifier (Loc.none, prop_name); + computed = false; + } + in let assign_stmt = mk_mutation_assignment_code prop_read t [] in - (* Generate the body *) - let body = let open S.Block in - {body = assign_stmt |> Utils.list_of_code} in - let func = let open Ast.Function in - {id = Some fname; - params = (param, None); - body = Ast.Function.BodyBlock (Loc.none, body); - async = false; - generator = false; - predicate = None; - expression = false; - returnType = None; - typeParameters = None} in - {stmt = (Loc.none, S.FunctionDeclaration func); - stmt_deps = []} - + let body = S.Block.{ body = assign_stmt |> Utils.list_of_code } in + let func = + Ast.Function. + { + id = Some fname; + params = (param, None); + body = Ast.Function.BodyBlock (Loc.none, body); + async = false; + generator = false; + predicate = None; + expression = false; + returnType = None; + typeParameters = None; + } + in + { stmt = (Loc.none, S.FunctionDeclaration func); stmt_deps = [] } (* Mutate the value using a single assignment *) -let mk_assignment_mutation - (etype : T.t') - (obj_name : string) - (prop_name : string) - (obj_decl : t) : t = +let mk_assignment_mutation (etype : T.t') (obj_name : string) (prop_name : string) (obj_decl : t) : + t = (* Create a new object with the given type *) let new_obj = Utils.mk_var () in let prop_read = mk_prop_read new_obj prop_name in - let init_expr = {expr = E.Identifier (Loc.none, obj_name); - expr_deps = [obj_decl]} in + let init_expr = { expr = E.Identifier (Loc.none, obj_name); expr_deps = [obj_decl] } in let new_obj_decl = let obj_type = FTypes.mk_obj_type [(prop_name, etype)] in mk_vardecl_code new_obj - Config.(match config.type_annot with - | Force -> Some obj_type - | Random -> FRandom.rchoice [|None; Some obj_type|] - | No -> None) - (Some init_expr) in - + Config.( + match config.type_annot with + | Force -> Some obj_type + | Random -> FRandom.rchoice [|None; Some obj_type|] + | No -> None) + (Some init_expr) + in mk_mutation_assignment_code prop_read etype [new_obj_decl] - (* Make an object mutation statement using function calls *) -let mk_func_mutation - (etype : T.t') - (obj_name : string) - (prop_name : string) - (obj_decl : t) : t = +let mk_func_mutation (etype : T.t') (obj_name : string) (prop_name : string) (obj_decl : t) : t = (* Get the object name according to the property read *) - let funcname = Loc.none, Utils.mk_func () in + let funcname = (Loc.none, Utils.mk_func ()) in let obj_id = E.Identifier (Loc.none, obj_name) in - let call = let open E.Call in - E.Call {callee = (Loc.none, E.Identifier funcname); - arguments = [E.Expression (Loc.none, obj_id)]} in - let func_def = - mk_obj_mutation_funcdecl_code - funcname - prop_name - etype in - mk_expr_code {expr = call; expr_deps = [obj_decl; func_def]} - -let mk_array_mutation - (etype : T.t') - (obj_name : string) - (prop_name : string) - (prev_stmt : t) : t = + let call = + E.Call.( + E.Call + { + callee = (Loc.none, E.Identifier funcname); + arguments = [E.Expression (Loc.none, obj_id)]; + }) + in + let func_def = mk_obj_mutation_funcdecl_code funcname prop_name etype in + mk_expr_code { expr = call; expr_deps = [obj_decl; func_def] } + +let mk_array_mutation (etype : T.t') (obj_name : string) (prop_name : string) (prev_stmt : t) : t = (* Create a new object with the given type *) let new_obj = Utils.mk_var () in let prop_read = mk_prop_read new_obj prop_name in - let init_expr = {expr = E.Identifier (Loc.none, obj_name); - expr_deps = [prev_stmt]} in + let init_expr = { expr = E.Identifier (Loc.none, obj_name); expr_deps = [prev_stmt] } in let new_obj_decl = let obj_type = FTypes.mk_obj_type [(prop_name, etype)] in mk_vardecl_code new_obj - Config.(match config.type_annot with - | Force -> Some obj_type - | Random -> FRandom.rchoice [|None; Some obj_type|] - | No -> None) - (Some init_expr) in - + Config.( + match config.type_annot with + | Force -> Some obj_type + | Random -> FRandom.rchoice [|None; Some obj_type|] + | No -> None) + (Some init_expr) + in match etype with | T.Array (_, atype) -> let fname = FRandom.choice [|"push"; "pop"; "shift"; "length"|] in - let callee = let open E.Member in - E.Member {_object = (Loc.none, E.Member prop_read); - property = PropertyIdentifier (Loc.none, fname); - computed = false} in - let expr = match fname with - | "push" | "pop" | "shift" -> + let callee = + E.Member.( + E.Member + { + _object = (Loc.none, E.Member prop_read); + property = PropertyIdentifier (Loc.none, fname); + computed = false; + }) + in + let expr = + match fname with + | "push" + | "pop" + | "shift" -> let arg = FTypes.mk_literal_expr atype in let arguments = - if fname = "push" - then [E.Expression (Loc.none, arg.expr)] - else [] in + if fname = "push" then + [E.Expression (Loc.none, arg.expr)] + else + [] + in if FRandom.rbool () then - E.Call.(E.Call {callee = (Loc.none, callee); arguments}) + E.Call.(E.Call { callee = (Loc.none, callee); arguments }) else (* Call the function from prototype *) - let proto_call = prop_read_of_list ["Array"; - "prototype"; - fname; - "apply"] in + let proto_call = prop_read_of_list ["Array"; "prototype"; fname; "apply"] in let proto_args = E.Expression (Loc.none, E.Member prop_read) :: arguments in - E.Call.(E.Call {callee = (Loc.none, E.Member proto_call); - arguments = proto_args}) + E.Call.(E.Call { callee = (Loc.none, E.Member proto_call); arguments = proto_args }) | "length" -> let left = callee in - let right = E.Literal Ast.Literal.({value = Number 1.0; raw = "1"}) in - let open E.Assignment in - E.Assignment {operator = Assign; - left = (Loc.none, P.Expression (Loc.none, left)); - right = (Loc.none, right)} - - | _ -> failwith ("array mutation: '" ^ fname ^ "' is unsupported") in - - {expr; expr_deps = [new_obj_decl]} |> mk_expr_code + let right = E.Literal Ast.Literal.{ value = Number 1.0; raw = "1" } in + E.Assignment.( + E.Assignment + { + operator = Assign; + left = (Loc.none, P.Expression (Loc.none, left)); + right = (Loc.none, right); + }) + | _ -> failwith ("array mutation: '" ^ fname ^ "' is unsupported") + in + { expr; expr_deps = [new_obj_decl] } |> mk_expr_code | _ -> failwith "Can only accept array type here" diff --git a/testgen/old/widener.ml b/testgen/old/widener.ml index bf7c4a79e38..211e042e935 100644 --- a/testgen/old/widener.ml +++ b/testgen/old/widener.ml @@ -1,80 +1,94 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; -module Config = Flowtestgen_config;; -module FTypes = Flowtestgen_types;; -module FRandom = Utils.FRandom;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils +module Config = Flowtestgen_config +module FTypes = Flowtestgen_types +module FRandom = Utils.FRandom (* Do a single widening step up. If we are at the top, return t *) let rec widen_type (t : T.t') : T.t' = - let open FTypes.TypeSet in - let new_type = match t with - (* Add a type to make a union type *) - | T.Number | T.String | T.Boolean -> - let available_set = diff FTypes.primitive_types (singleton t) in - FTypes.mk_union_type [|t; FTypes.TypeSet.choose FRandom.rchoice available_set|] - - (* Add a type into the existing union type *) - | T.Union ((l1, t1), (l2, t2), trest) -> - let tlist = List.map snd trest in - let used_set = of_list (t1 :: t2 :: tlist) in - let available_set = diff FTypes.primitive_types used_set in - if is_empty available_set then - t - else - let new_t = FTypes.TypeSet.choose FRandom.rchoice available_set in - T.Union ((l1, t1), (l2, t2), (Loc.none, new_t) :: trest) - | T.Object obj_t -> widen_obj_type obj_t - | T.StringLiteral _ -> T.String - | T.NumberLiteral _ -> T.Number - | T.BooleanLiteral _ -> T.Boolean - | T.Tuple tlist -> - (* Randomly select a type and widen that *) - let tarray = Array.of_list (List.map snd tlist) in - let old_t_index = FRandom.rint (Array.length tarray) in - let old_t = (Array.get tarray old_t_index) in - let new_t = widen_type old_t in - if new_t = old_t then - T.Array (Loc.none, new_t) + FTypes.TypeSet.( + let new_type = + match t with + (* Add a type to make a union type *) + | T.Number + | T.String + | T.Boolean -> + let available_set = diff FTypes.primitive_types (singleton t) in + FTypes.mk_union_type [|t; FTypes.TypeSet.choose FRandom.rchoice available_set|] + (* Add a type into the existing union type *) + | T.Union ((l1, t1), (l2, t2), trest) -> + let tlist = Core_list.map ~f:snd trest in + let used_set = of_list (t1 :: t2 :: tlist) in + let available_set = diff FTypes.primitive_types used_set in + if is_empty available_set then + t + else + let new_t = FTypes.TypeSet.choose FRandom.rchoice available_set in + T.Union ((l1, t1), (l2, t2), (Loc.none, new_t) :: trest) + | T.Object obj_t -> widen_obj_type obj_t + | T.StringLiteral _ -> T.String + | T.NumberLiteral _ -> T.Number + | T.BooleanLiteral _ -> T.Boolean + | T.Tuple tlist -> + (* Randomly select a type and widen that *) + let tarray = Array.of_list (Core_list.map ~f:snd tlist) in + let old_t_index = FRandom.rint (Array.length tarray) in + let old_t = tarray.(old_t_index) in + let new_t = widen_type old_t in + if new_t = old_t then + T.Array (Loc.none, new_t) + else + T.Tuple + (List.mapi + (fun i t -> + if i = old_t_index then + (Loc.none, new_t) + else + t) + tlist) + | T.Array (_, array_type) -> T.Array (Loc.none, widen_type array_type) + | _ -> failwith "Widen: unsupported type\n" + in + (* Randomly widen the type again *) + if new_type = t || FRandom.rbool () then + new_type else - T.Tuple - (List.mapi - (fun i t -> if i = old_t_index then (Loc.none, new_t) else t) - tlist) - | T.Array (_, array_type) -> T.Array (Loc.none, widen_type array_type) - | _ -> failwith "Widen: unsupported type\n" in - - (* Randomly widen the type again *) - if new_type = t || (FRandom.rbool ()) - then new_type - else widen_type new_type + widen_type new_type) (* Widen an object property type *) -and widen_obj_prop - (prop : T.Object.Property.t') : T.Object.Property.t' = - let open T.Object.Property in - let old_t = match prop.value with - | Init (_, t) -> t - | _ -> failwith "widen_obj_prop: Unsupported prop value" in - let new_t = widen_type old_t in - - (* We make it optional if we cannot widen the type anymore *) - let optional = if new_t = old_t then true else false in - {key = prop.key; - value = Init (Loc.none, new_t); - optional; - static = prop.static; - _method = prop._method; - variance = prop.variance} +and widen_obj_prop (prop : T.Object.Property.t') : T.Object.Property.t' = + T.Object.Property.( + let old_t = + match prop.value with + | Init (_, t) -> t + | _ -> failwith "widen_obj_prop: Unsupported prop value" + in + let new_t = widen_type old_t in + (* We make it optional if we cannot widen the type anymore *) + let optional = + if new_t = old_t then + true + else + false + in + { + key = prop.key; + value = Init (Loc.none, new_t); + optional; + static = prop.static; + _method = prop._method; + variance = prop.variance; + }) (* Widen an object type *) and widen_obj_type (t : T.Object.t) : T.t' = @@ -82,10 +96,9 @@ and widen_obj_type (t : T.Object.t) : T.t' = let count = List.length T.Object.(t.properties) in if count < 2 then T.Object t - else let sel = FRandom.rint count in - let rec helper - (plist : T.Object.property list) - (i : int) : T.Object.property list = + else + let sel = FRandom.rint count in + let rec helper (plist : T.Object.property list) (i : int) : T.Object.property list = match plist with | [] -> [] | hd :: tl when i != sel -> hd :: helper tl (1 + i) @@ -94,7 +107,7 @@ and widen_obj_type (t : T.Object.t) : T.t' = if new_prop = prop then tl else - (T.Object.Property (loc, new_prop)) :: tl - | _ -> failwith "Unsupported property type" in - T.Object.(T.Object {exact = false; - properties = (helper t.properties 0)}) + T.Object.Property (loc, new_prop) :: tl + | _ -> failwith "Unsupported property type" + in + T.Object.(T.Object { exact = false; properties = helper t.properties 0 }) diff --git a/testgen/ruleset_base.ml b/testgen/ruleset_base.ml index ec4ea5890d4..91982b1c428 100644 --- a/testgen/ruleset_base.ml +++ b/testgen/ruleset_base.ml @@ -1,5 +1,5 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -11,43 +11,34 @@ programs that exposes type rule unsoundness *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils (* ESSENTIAL: Syntax type and related functions *) -module Syntax = Syntax_base;; - +module Syntax = Syntax_base (* ESSENTIAL: environment type and its element type. *) type env_elt_t = | Expr of (Loc.t, Loc.t) E.t' * (Loc.t, Loc.t) T.t' | Type of (Loc.t, Loc.t) T.t' | Int of int + type env_t = env_elt_t list (* string of functions *) -let str_of_env_elt (elt : env_elt_t) : string = match elt with - | Expr (e, t) -> - Printf.sprintf - "%s : %s" - (Utils.string_of_expr e) - (Utils.string_of_type t) - | Type t -> - Printf.sprintf - "%s" - (Utils.string_of_type t) +let str_of_env_elt (elt : env_elt_t) : string = + match elt with + | Expr (e, t) -> Printf.sprintf "%s : %s" (Utils.string_of_expr e) (Utils.string_of_type t) + | Type t -> Printf.sprintf "%s" (Utils.string_of_type t) | Int i -> string_of_int i let str_of_env (env : env_t) : string = - "\n/*\nEnv:\n" ^ - (List.fold_left (fun acc e -> (str_of_env_elt e) ^ "\n" ^ acc) "" env) ^ - "*/\n\n" + "\n/*\nEnv:\n" ^ List.fold_left (fun acc e -> str_of_env_elt e ^ "\n" ^ acc) "" env ^ "*/\n\n" -let print_env (env : env_t) : unit = - Printf.printf "%s\n" (str_of_env env) +let print_env (env : env_t) : unit = Printf.printf "%s\n" (str_of_env env) (* This is a sample ruleset that has unsound type rules. This also serves as an example to use the engine for generating @@ -57,830 +48,955 @@ let print_env (env : env_t) : unit = body for inner statements such as function definitions. This might change in the future when we have better strategy. *) -class ruleset_base = object(self) - - - (* ESSENTIAL: Users have to inherit from the engine type and +class ruleset_base = + object (self) + (* ESSENTIAL: Users have to inherit from the engine type and implement the get_all_rules method *) - inherit [env_elt_t, env_t, Syntax.t] Engine.engine - - method get_name () : string = "base" - - method print_stack () : unit = - Printf.printf "Stack: ============\n"; - for i = size - 1 downto 0 do - List.iter (fun elt -> Printf.printf "%s\t" (str_of_env_elt elt)) stack.(i); - Printf.printf "\n----------------\n"; - done - - method print_env (env : env_t) : unit = print_env env - - method print_syntax (s : Syntax.t) : unit = Printf.printf "%s\n" (Syntax.str_of_syntax s) - method combine_syntax (slist : Syntax.t list) : string = Syntax.combine_syntax slist - - (* We have a small chance to bypass this assertion *) - method weak_assert b = - if (not b) && ((Random.int 5) > 0) then raise Engine.Backtrack - - (* check t1 <: t2 *) - method is_subtype (t1 : (Loc.t, Loc.t) T.t') (t2 : (Loc.t, Loc.t) T.t') : bool = - match t1, t2 with - | (T.Union ((_, tu1), (_, tu2), tlist), t) -> - List.mem t (tu1 :: tu2 :: (List.map snd tlist)) - | T.Object o1, T.Object o2 -> self#is_subtype_obj o1 o2 - | T.Function f1, T.Function f2 -> self#is_subtype_func f1 f2 - | _ when t1 = t2 -> true - | _ -> false - - method is_subtype_func - (f1 : (Loc.t, Loc.t) T.Function.t) - (f2 : (Loc.t, Loc.t) T.Function.t) : bool = - let open T.Function in - let get_type_list (f : (Loc.t, Loc.t) T.Function.t) : (Loc.t, Loc.t) T.t' list = - let open T.Function.Param in - let (_, { T.Function.Params.params; rest = _ }) = f.params in - List.map - (fun param -> (snd param).annot |> snd) - params @ [f.return |> snd] in - - let rec func_subtype_helper l1 l2 = match l1, l2 with - | [], [] -> true - (* checking the return type *) - | hd1 :: [], hd2 :: [] -> self#is_subtype hd1 hd2 - (* checking the param type *) - | hd1 :: tl1, hd2 :: tl2 -> - if self#is_subtype hd2 hd1 then - func_subtype_helper tl1 tl2 - else - false - | _ -> false in - - let p1_list = get_type_list f1 in - let p2_list = get_type_list f2 in - if (not ((List.length p1_list) = (List.length p2_list))) then false - else func_subtype_helper p1_list p2_list - - method is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = - let get_prop_set (o : (Loc.t, Loc.t) T.Object.t) = - let tbl = Hashtbl.create 1000 in - - (* hash table for storing optional properties *) - let opt_tbl = Hashtbl.create 1000 in - let open T.Object.Property in - List.iter (fun p -> match p with - | T.Object.Property (_, {key = E.Object.Property.Identifier (_, name); - value = Init (_, t); - optional = o; - static = _; - proto = _; - _method = _; - variance = _;}) -> - if o then Hashtbl.add opt_tbl name t - else Hashtbl.add tbl name t - | _ -> ()) T.Object.(o.properties); - tbl, opt_tbl in - let s1, opt1 = get_prop_set o1 in - let s2, opt2 = get_prop_set o2 in - let subtype = ref true in - (* check non optional properties *) - Hashtbl.iter (fun n t -> - if (not (Hashtbl.mem s1 n)) || (not ((Hashtbl.find s1 n) = t)) then - subtype := false) s2; - - (* check optional properties *) - Hashtbl.iter (fun n t -> - if (((Hashtbl.mem s1 n) && ((Hashtbl.find s1 n) != t)) || - ((Hashtbl.mem opt1 n) && ((Hashtbl.find opt1 n) != t))) - then subtype := false) opt2; - !subtype - - (* A user custom function for populating the env. *) - method add_binding - (env : env_t) - (elt : env_elt_t) : env_t = - - let rec helper lst acc = match lst with - | [] -> List.rev (elt :: acc) - | hd :: tl -> (match elt, hd with - | Type t1, Type t2 when t1 = t2 -> lst @ acc - | Expr (e1, t1), Expr (e2, _) when e1 = e2 -> - ((Expr (e1, t1) :: tl)) @ acc - | _ -> helper tl (hd :: acc)) in - helper env [] - - (* get the type of an expression from the environment assuming + inherit [env_elt_t, env_t, Syntax.t] Engine.engine + + method get_name () : string = "base" + + method print_stack () : unit = + Printf.printf "Stack: ============\n"; + for i = size - 1 downto 0 do + List.iter (fun elt -> Printf.printf "%s\t" (str_of_env_elt elt)) stack.(i); + Printf.printf "\n----------------\n" + done + + method print_env (env : env_t) : unit = print_env env + + method print_syntax (s : Syntax.t) : unit = Printf.printf "%s\n" (Syntax.str_of_syntax s) + + method combine_syntax (slist : Syntax.t list) : string = Syntax.combine_syntax slist + + (* We have a small chance to bypass this assertion *) + method weak_assert b = if (not b) && Random.int 5 > 0 then raise Engine.Backtrack + + (* check t1 <: t2 *) + method is_subtype (t1 : (Loc.t, Loc.t) T.t') (t2 : (Loc.t, Loc.t) T.t') : bool = + match (t1, t2) with + | (T.Union ((_, tu1), (_, tu2), tlist), t) -> + List.mem t (tu1 :: tu2 :: Core_list.map ~f:snd tlist) + | (T.Object o1, T.Object o2) -> self#is_subtype_obj o1 o2 + | (T.Function f1, T.Function f2) -> self#is_subtype_func f1 f2 + | _ when t1 = t2 -> true + | _ -> false + + method is_subtype_func (f1 : (Loc.t, Loc.t) T.Function.t) (f2 : (Loc.t, Loc.t) T.Function.t) + : bool = + T.Function.( + let get_type_list (f : (Loc.t, Loc.t) T.Function.t) : (Loc.t, Loc.t) T.t' list = + T.Function.Param.( + let (_, { T.Function.Params.params; rest = _ }) = f.params in + List.map (fun param -> (snd param).annot |> snd) params @ [f.return |> snd]) + in + let rec func_subtype_helper l1 l2 = + match (l1, l2) with + | ([], []) -> true + (* checking the return type *) + | ([hd1], [hd2]) -> self#is_subtype hd1 hd2 + (* checking the param type *) + | (hd1 :: tl1, hd2 :: tl2) -> + if self#is_subtype hd2 hd1 then + func_subtype_helper tl1 tl2 + else + false + | _ -> false + in + let p1_list = get_type_list f1 in + let p2_list = get_type_list f2 in + if not (List.length p1_list = List.length p2_list) then + false + else + func_subtype_helper p1_list p2_list) + + method is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = + let get_prop_set (o : (Loc.t, Loc.t) T.Object.t) = + let tbl = Hashtbl.create 1000 in + (* hash table for storing optional properties *) + let opt_tbl = Hashtbl.create 1000 in + T.Object.Property.( + List.iter + (fun p -> + match p with + | T.Object.Property + ( _, + { + key = E.Object.Property.Identifier (_, name); + value = Init (_, t); + optional = o; + static = _; + proto = _; + _method = _; + variance = _; + } ) -> + if o then + Hashtbl.add opt_tbl name t + else + Hashtbl.add tbl name t + | _ -> ()) + T.Object.(o.properties); + (tbl, opt_tbl)) + in + let (s1, opt1) = get_prop_set o1 in + let (s2, opt2) = get_prop_set o2 in + let subtype = ref true in + (* check non optional properties *) + Hashtbl.iter + (fun n t -> + if (not (Hashtbl.mem s1 n)) || not (Hashtbl.find s1 n = t) then subtype := false) + s2; + + (* check optional properties *) + Hashtbl.iter + (fun n t -> + if + (Hashtbl.mem s1 n && Hashtbl.find s1 n != t) + || (Hashtbl.mem opt1 n && Hashtbl.find opt1 n != t) + then + subtype := false) + opt2; + !subtype + + (* A user custom function for populating the env. *) + method add_binding (env : env_t) (elt : env_elt_t) : env_t = + let rec helper lst acc = + match lst with + | [] -> List.rev (elt :: acc) + | hd :: tl -> + (match (elt, hd) with + | (Type t1, Type t2) when t1 = t2 -> lst @ acc + | (Expr (e1, t1), Expr (e2, _)) when e1 = e2 -> (Expr (e1, t1) :: tl) @ acc + | _ -> helper tl (hd :: acc)) + in + helper env [] + + (* get the type of an expression from the environment assuming we have the expression *) - method get_type_from_expr - (expr : (Loc.t, Loc.t) E.t') - (env : env_t) : (Loc.t, Loc.t) T.t' = - let rec helper lst = match lst with - | [] -> raise Not_found - | Expr (e, t) :: _ when expr = e -> t - | _ :: tl -> helper tl in - helper env - - (* Some require functions for checking preconditions + method get_type_from_expr (expr : (Loc.t, Loc.t) E.t') (env : env_t) : (Loc.t, Loc.t) T.t' = + let rec helper lst = + match lst with + | [] -> raise Not_found + | Expr (e, t) :: _ when expr = e -> t + | _ :: tl -> helper tl + in + helper env + + (* Some require functions for checking preconditions and getting things from the environment *) - method require_expr (env : env_t) : env_elt_t list = - List.fold_right (fun elt acc -> match elt with - | Expr _ -> elt :: acc - | _ -> acc) env [] - - method require_var (env : env_t) : env_elt_t list = - List.fold_right (fun elt acc -> match elt with - | Expr (E.Identifier _, _) -> elt :: acc - | _ -> acc) env [] - - method require_type (env : env_t) : env_elt_t list = - List.fold_right (fun elt acc -> match elt with - | Type _ -> elt :: acc - | _ -> acc) env [] - - (* Requiring the object has some properties *) - method require_prop (ot : (Loc.t, Loc.t) T.t') (take_opt : bool): env_elt_t list = - let open T.Object.Property in - let props = match ot with - | T.Object o -> - List.fold_right (fun p acc -> match p with - | T.Object.Property (_, {key = E.Object.Property.Identifier (_, name); - value = Init (_, t); - optional = o; - static = _; - proto = _; - _method = _; - variance = _;}) -> - if take_opt || (not o) then - Expr (E.Identifier (Loc.none, name), t) :: acc - else - acc - | _ -> failwith "Unsupported property") T.Object.(o.properties) [] - | _ -> failwith "Input type is not an object type" in - props - - (* Getting only optional properties *) - method require_optional_prop (ot : (Loc.t, Loc.t) T.t') : env_elt_t list = - let open T.Object.Property in - let props = match ot with - | T.Object o -> - List.fold_right (fun p acc -> match p with - | T.Object.Property (_, {key = E.Object.Property.Identifier (_, name); - value = Init (_, t); - optional = true; - static = _; - proto = _; - _method = _; - variance = _;}) -> - Expr (E.Identifier (Loc.none, name), t) :: acc - | _ -> acc) T.Object.(o.properties) [] - | _ -> failwith "Input type is not an object type" in - props - - (* generate a list of env elements. Start should be provided if choose function is previously + method require_expr (env : env_t) : env_elt_t list = + List.fold_right + (fun elt acc -> + match elt with + | Expr _ -> elt :: acc + | _ -> acc) + env + [] + + method require_var (env : env_t) : env_elt_t list = + List.fold_right + (fun elt acc -> + match elt with + | Expr (E.Identifier _, _) -> elt :: acc + | _ -> acc) + env + [] + + method require_type (env : env_t) : env_elt_t list = + List.fold_right + (fun elt acc -> + match elt with + | Type _ -> elt :: acc + | _ -> acc) + env + [] + + (* Requiring the object has some properties *) + method require_prop (ot : (Loc.t, Loc.t) T.t') (take_opt : bool) : env_elt_t list = + T.Object.Property.( + let props = + match ot with + | T.Object o -> + List.fold_right + (fun p acc -> + match p with + | T.Object.Property + ( _, + { + key = E.Object.Property.Identifier (_, name); + value = Init (_, t); + optional = o; + static = _; + proto = _; + _method = _; + variance = _; + } ) -> + if take_opt || not o then + Expr (E.Identifier (Loc.none, name), t) :: acc + else + acc + | _ -> failwith "Unsupported property") + T.Object.(o.properties) + [] + | _ -> failwith "Input type is not an object type" + in + props) + + (* Getting only optional properties *) + method require_optional_prop (ot : (Loc.t, Loc.t) T.t') : env_elt_t list = + T.Object.Property.( + let props = + match ot with + | T.Object o -> + List.fold_right + (fun p acc -> + match p with + | T.Object.Property + ( _, + { + key = E.Object.Property.Identifier (_, name); + value = Init (_, t); + optional = true; + static = _; + proto = _; + _method = _; + variance = _; + } ) -> + Expr (E.Identifier (Loc.none, name), t) :: acc + | _ -> acc) + T.Object.(o.properties) + [] + | _ -> failwith "Input type is not an object type" + in + props) + + (* generate a list of env elements. Start should be provided if choose function is previously used in a rule *) - method gen_elt_list - (start : int) - (require_func : env_t -> env_elt_t list) - (cons : env_elt_t -> bool) - (num : int) - (env : env_t) : env_elt_t list = - let rec helper count limit result = - if count = limit then result - else - let elt = self#choose (count + start) (fun () -> require_func env) in - self#backtrack_on_false (cons elt); - helper (count + 1) limit (elt :: result) in - helper start num [] - - (* A function for generating literal expressions and types *) - method gen_obj_lit - ?(start = 0) - ?(cons = (fun _ -> true)) - (prop_num : int) - (option_num : int) - (env : env_t) : (Syntax.t * (Loc.t, Loc.t) E.t' * (Loc.t, Loc.t) T.t') = - - (* We are getting 1 property *) - let elist = self#gen_elt_list start self#require_expr cons (prop_num + option_num) env in - let props = - let count = ref 0 in - let mk_prop () = - let r = "p_" ^ (string_of_int !count) in - let index = !count in - count := !count + 1; - r, index in - List.map (fun elt -> match elt with - | Expr (e, t) -> let pname, index = mk_prop () in pname, (e, t), index - | _ -> failwith "This has to be an expression.") elist in - - (* get the literal syntax and its type *) - let lit = Syntax.mk_obj_lit (List.map (fun (n, e, _) -> n, e) props) in - let lit_expr = (match lit with + method gen_elt_list + (start : int) + (require_func : env_t -> env_elt_t list) + (cons : env_elt_t -> bool) + (num : int) + (env : env_t) : env_elt_t list = + let rec helper count limit result = + if count = limit then + result + else + let elt = self#choose (count + start) (fun () -> require_func env) in + self#backtrack_on_false (cons elt); + helper (count + 1) limit (elt :: result) + in + helper start num [] + + (* A function for generating literal expressions and types *) + method gen_obj_lit + ?(start = 0) ?(cons = (fun _ -> true)) (prop_num : int) (option_num : int) (env : env_t) + : Syntax.t * (Loc.t, Loc.t) E.t' * (Loc.t, Loc.t) T.t' = + (* We are getting 1 property *) + let elist = self#gen_elt_list start self#require_expr cons (prop_num + option_num) env in + let props = + let count = ref 0 in + let mk_prop () = + let r = "p_" ^ string_of_int !count in + let index = !count in + count := !count + 1; + (r, index) + in + Core_list.map + ~f:(fun elt -> + match elt with + | Expr (e, t) -> + let (pname, index) = mk_prop () in + (pname, (e, t), index) + | _ -> failwith "This has to be an expression.") + elist + in + (* get the literal syntax and its type *) + let lit = Syntax.mk_obj_lit (Core_list.map ~f:(fun (n, e, _) -> (n, e)) props) in + let lit_expr = + match lit with | Syntax.Expr e -> e - | _ -> failwith "[rule_obj_lit] Literal has to be an expr") in - let ret_type = - let prop_types = - List.map (fun (name, (_, e), index) -> - let open T.Object.Property in - T.Object.Property (Loc.none, {key = E.Object.Property.Identifier (Loc.none, name); - value = Init (Loc.none, e); - optional = if index >= prop_num then true else false; - static = false; - proto = false; - _method = false; - variance = None})) props in - let open T.Object in - T.Object {exact = false; properties = prop_types} in - lit, lit_expr, ret_type - - (* A function for generating literal expressions and types *) - method gen_obj_type - ?(start = 0) - ?(cons = (fun _ -> true)) - (prop_num : int) - (option_num : int) - (env : env_t) : (Loc.t, Loc.t) T.t' = - - (* We are getting 1 property *) - let tlist = self#gen_elt_list start self#require_type cons (prop_num + option_num) env in - let props = - let count = ref 0 in - let mk_prop () = - let r = "p_" ^ (string_of_int !count) in - let index = !count in - count := !count + 1; - r, index in - List.map (fun elt -> match elt with - | Type t -> let pname, index = mk_prop () in pname, t, index - | _ -> failwith "This has to be an expression.") tlist in - - (* get the literal syntax and its type *) - let ret_type = - let prop_types = - List.map (fun (name, t, index) -> - let open T.Object.Property in - T.Object.Property (Loc.none, {key = E.Object.Property.Identifier (Loc.none, name); - value = Init (Loc.none, t); - optional = if index >= prop_num then true else false; - static = false; - proto = false; - _method = false; - variance = None})) props in - let open T.Object in - T.Object {exact = false; properties = prop_types} in - ret_type - - (* ESSENTIAL: rules *) - (* Property read rule *) - method rule_prop_read (env : env_t) : (Syntax.t * env_t) = - (* we require we have an object *) - let obj = self#choose 0 (fun () -> self#require_expr env) in - self#backtrack_on_false (match obj with + | _ -> failwith "[rule_obj_lit] Literal has to be an expr" + in + let ret_type = + let prop_types = + Core_list.map + ~f:(fun (name, (_, e), index) -> + T.Object.Property.( + T.Object.Property + ( Loc.none, + { + key = + E.Object.Property.Identifier + (Flow_ast_utils.ident_of_source (Loc.none, name)); + value = Init (Loc.none, e); + optional = + ( if index >= prop_num then + true + else + false ); + static = false; + proto = false; + _method = false; + variance = None; + } ))) + props + in + T.Object.(T.Object { exact = false; properties = prop_types; inexact = true }) + in + (lit, lit_expr, ret_type) + + (* A function for generating literal expressions and types *) + method gen_obj_type + ?(start = 0) ?(cons = (fun _ -> true)) (prop_num : int) (option_num : int) (env : env_t) + : (Loc.t, Loc.t) T.t' = + (* We are getting 1 property *) + let tlist = self#gen_elt_list start self#require_type cons (prop_num + option_num) env in + let props = + let count = ref 0 in + let mk_prop () = + let r = "p_" ^ string_of_int !count in + let index = !count in + count := !count + 1; + (r, index) + in + Core_list.map + ~f:(fun elt -> + match elt with + | Type t -> + let (pname, index) = mk_prop () in + (pname, t, index) + | _ -> failwith "This has to be an expression.") + tlist + in + (* get the literal syntax and its type *) + let ret_type = + let prop_types = + Core_list.map + ~f:(fun (name, t, index) -> + T.Object.Property.( + T.Object.Property + ( Loc.none, + { + key = + E.Object.Property.Identifier + (Flow_ast_utils.ident_of_source (Loc.none, name)); + value = Init (Loc.none, t); + optional = + ( if index >= prop_num then + true + else + false ); + static = false; + proto = false; + _method = false; + variance = None; + } ))) + props + in + T.Object.(T.Object { exact = false; properties = prop_types; inexact = true }) + in + ret_type + + (* ESSENTIAL: rules *) + (* Property read rule *) + method rule_prop_read (env : env_t) : Syntax.t * env_t = + (* we require we have an object *) + let obj = self#choose 0 (fun () -> self#require_expr env) in + self#backtrack_on_false + (match obj with (* we ensure we are getting an object variable *) | Expr (E.Identifier _, T.Object _) -> true | _ -> false); - let oexpr, otype = match obj with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - let prop = self#choose 1 (fun () -> self#require_prop otype false) in - let pexpr, ptype = match prop with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - let read = - Syntax.mk_prop_read (Utils.string_of_expr oexpr) (Utils.string_of_expr pexpr) in - - let ret_type = ptype in - let new_env = - self#add_binding - env - (match read with - | Syntax.Expr e -> Expr (e, ret_type) - | _ -> failwith "has to be an expr") in - let new_env = - self#add_binding new_env (Type ret_type) in - (read, new_env) - - (* property update rule *) - method rule_prop_update (env : env_t) : (Syntax.t * env_t) = - (* get an object variable *) - let obj = self#choose 0 (fun () -> self#require_expr env) in - self#backtrack_on_false (match obj with + let (oexpr, otype) = + match obj with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + let prop = self#choose 1 (fun () -> self#require_prop otype false) in + let (pexpr, ptype) = + match prop with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + let read = Syntax.mk_prop_read (Utils.string_of_expr oexpr) (Utils.string_of_expr pexpr) in + let ret_type = ptype in + let new_env = + self#add_binding + env + (match read with + | Syntax.Expr e -> Expr (e, ret_type) + | _ -> failwith "has to be an expr") + in + let new_env = self#add_binding new_env (Type ret_type) in + (read, new_env) + + (* property update rule *) + method rule_prop_update (env : env_t) : Syntax.t * env_t = + (* get an object variable *) + let obj = self#choose 0 (fun () -> self#require_expr env) in + self#backtrack_on_false + (match obj with | Expr (E.Identifier _, T.Object _) -> true | _ -> false); - let oexpr, otype = match obj with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - let prop = self#choose 1 (fun () -> self#require_prop otype true) in - let pexpr, ptype = match prop with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* get the expression on the rhs of the update *) - let rhs = self#choose 2 (fun () -> self#require_expr env) in - let rhs_expr, rhs_type = match rhs with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* assert that type(rhs) <: type(prop) *) - self#weak_assert (self#is_subtype rhs_type ptype); - - (* produce a write syntax *) - let write = - Syntax.mk_prop_write - (Utils.string_of_expr oexpr) - (Utils.string_of_expr pexpr) - rhs_expr in - - (* update the type of the object *) - let ret_type = - let o_type = match otype with - | T.Object o -> o - | _ -> failwith "Has to be an object type" in - if pexpr = E.Identifier (Loc.none, "_number_prop_") then - let new_prop = let open T.Object.Property in - {key = E.Object.Property.Identifier (Loc.none, (Utils.string_of_expr pexpr)); - value = Init (Loc.none, T.Number); - optional = false; - static = false; - proto = false; - _method = false; - variance = None} in - let open T.Object in - T.Object {exact = o_type.exact; - properties = Property (Loc.none, new_prop) :: o_type.properties} - else - T.Object o_type in - - let new_env = self#add_binding env (Expr (oexpr, ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - (write, new_env) - - (* rule for variable declaration with initialization *) - method rule_vardecl (env : env_t) : (Syntax.t * env_t) = - (* get the init expression *) - let init = self#choose 0 (fun () -> self#require_expr env) in - let init_expr, init_type = match init with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - let vname = Utils.mk_var () in - let var_decl = Syntax.mk_vardecl vname init_expr in - let new_env = - self#add_binding - env - (Expr ((E.Identifier (Loc.none, vname)), init_type)) in - let new_env = self#add_binding new_env (Type init_type) in - var_decl, new_env - - - (* Rule for declaring a variable with init and type annotation *) - method rule_vardecl_with_type (env : env_t) : (Syntax.t * env_t) = - (* require an expression from the environment *) - let rhs = self#choose 0 (fun () -> self#require_expr env) in - let rhs_expr, rhs_type = match rhs with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* require a type from the environment.*) - let vtype = self#choose 1 (fun () -> self#require_type env) in - let vtype = match vtype with - | Type t -> t - | _ -> failwith "This has to a type" in - - (* assert the subtyping relationhips between the rhs and lhs *) - self#weak_assert (self#is_subtype rhs_type vtype); - let vname = Utils.mk_var () in - let var_decl = Syntax.mk_vardecl ~etype:vtype vname rhs_expr in - let new_env = - self#add_binding - env - (Expr ((E.Identifier (Loc.none, vname)), vtype)) in - let new_env = self#add_binding new_env (Type vtype) in - var_decl, new_env - - (* A rule for generating object literals *) - method rule_obj_lit (prop_num : int) (opt_num : int) (env : env_t) : (Syntax.t * env_t) = - - let lit, lit_expr, ret_type = self#gen_obj_lit prop_num opt_num env in - let new_env = - self#add_binding - env - (Expr (lit_expr, ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - lit, new_env - - (* A rule for generating number literals *) - method rule_num_lit (env : env_t) : (Syntax.t * env_t) = - let lit = Syntax.mk_literal T.Number in - let ret_type = T.Number in - let new_env = - self#add_binding - env - (Expr ((match lit with - | Syntax.Expr e -> e - | _ -> failwith "[rule_num_list] Literal has to be an expr"), - ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - Syntax.Empty, new_env - - (* A rule for generating number literals *) - method rule_str_lit (env : env_t) : (Syntax.t * env_t) = - let lit = Syntax.mk_literal T.String in - let ret_type = T.String in - let new_env = - self#add_binding - env - (Expr ((match lit with - | Syntax.Expr e -> e - | _ -> failwith "Literal has to be an expr"), - ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - Syntax.Empty, new_env - - (* A rule for generating number literals *) - method rule_bool_lit (env : env_t) : (Syntax.t * env_t) = - let lit = Syntax.mk_literal T.Boolean in - let ret_type = T.Boolean in - let new_env = - self#add_binding - env - (Expr ((match lit with - | Syntax.Expr e -> e - | _ -> failwith "[rule_num_list] Literal has to be an expr"), - ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - Syntax.Empty, new_env - - (* A rule for generating function definitions *) - method rule_funcdef (env : env_t) : (Syntax.t * env_t) = - let mk_func_type (ptype : (Loc.t, Loc.t) T.t') (rtype : (Loc.t, Loc.t) T.t') : (Loc.t, Loc.t) T.t' = + let (oexpr, otype) = + match obj with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + let prop = self#choose 1 (fun () -> self#require_prop otype true) in + let (pexpr, ptype) = + match prop with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* get the expression on the rhs of the update *) + let rhs = self#choose 2 (fun () -> self#require_expr env) in + let (rhs_expr, rhs_type) = + match rhs with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* assert that type(rhs) <: type(prop) *) + self#weak_assert (self#is_subtype rhs_type ptype); + + (* produce a write syntax *) + let write = + Syntax.mk_prop_write (Utils.string_of_expr oexpr) (Utils.string_of_expr pexpr) rhs_expr + in + (* update the type of the object *) + let ret_type = + let o_type = + match otype with + | T.Object o -> o + | _ -> failwith "Has to be an object type" + in + if pexpr = E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, "_number_prop_")) then + let new_prop = + T.Object.Property. + { + key = + E.Object.Property.Identifier + (Flow_ast_utils.ident_of_source (Loc.none, Utils.string_of_expr pexpr)); + value = Init (Loc.none, T.Number); + optional = false; + static = false; + proto = false; + _method = false; + variance = None; + } + in + T.Object.( + T.Object + { + exact = o_type.exact; + properties = Property (Loc.none, new_prop) :: o_type.properties; + inexact = not o_type.exact; + }) + else + T.Object o_type + in + let new_env = self#add_binding env (Expr (oexpr, ret_type)) in + let new_env = self#add_binding new_env (Type ret_type) in + (write, new_env) + + (* rule for variable declaration with initialization *) + method rule_vardecl (env : env_t) : Syntax.t * env_t = + (* get the init expression *) + let init = self#choose 0 (fun () -> self#require_expr env) in + let (init_expr, init_type) = + match init with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + let vname = Utils.mk_var () in + let var_decl = Syntax.mk_vardecl vname init_expr in + let new_env = + self#add_binding + env + (Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, vname)), init_type)) + in + let new_env = self#add_binding new_env (Type init_type) in + (var_decl, new_env) + + (* Rule for declaring a variable with init and type annotation *) + method rule_vardecl_with_type (env : env_t) : Syntax.t * env_t = + (* require an expression from the environment *) + let rhs = self#choose 0 (fun () -> self#require_expr env) in + let (rhs_expr, rhs_type) = + match rhs with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* require a type from the environment.*) + let vtype = self#choose 1 (fun () -> self#require_type env) in + let vtype = + match vtype with + | Type t -> t + | _ -> failwith "This has to a type" + in + (* assert the subtyping relationhips between the rhs and lhs *) + self#weak_assert (self#is_subtype rhs_type vtype); + let vname = Utils.mk_var () in + let var_decl = Syntax.mk_vardecl ~etype:vtype vname rhs_expr in + let new_env = + self#add_binding + env + (Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, vname)), vtype)) + in + let new_env = self#add_binding new_env (Type vtype) in + (var_decl, new_env) + + (* A rule for generating object literals *) + method rule_obj_lit (prop_num : int) (opt_num : int) (env : env_t) : Syntax.t * env_t = + let (lit, lit_expr, ret_type) = self#gen_obj_lit prop_num opt_num env in + let new_env = self#add_binding env (Expr (lit_expr, ret_type)) in + let new_env = self#add_binding new_env (Type ret_type) in + (lit, new_env) + + (* A rule for generating number literals *) + method rule_num_lit (env : env_t) : Syntax.t * env_t = + let lit = Syntax.mk_literal T.Number in + let ret_type = T.Number in + let new_env = + self#add_binding + env + (Expr + ( (match lit with + | Syntax.Expr e -> e + | _ -> failwith "[rule_num_list] Literal has to be an expr"), + ret_type )) + in + let new_env = self#add_binding new_env (Type ret_type) in + (Syntax.Empty, new_env) + + (* A rule for generating number literals *) + method rule_str_lit (env : env_t) : Syntax.t * env_t = + let lit = Syntax.mk_literal T.String in + let ret_type = T.String in + let new_env = + self#add_binding + env + (Expr + ( (match lit with + | Syntax.Expr e -> e + | _ -> failwith "Literal has to be an expr"), + ret_type )) + in + let new_env = self#add_binding new_env (Type ret_type) in + (Syntax.Empty, new_env) + + (* A rule for generating number literals *) + method rule_bool_lit (env : env_t) : Syntax.t * env_t = + let lit = Syntax.mk_literal T.Boolean in + let ret_type = T.Boolean in + let new_env = + self#add_binding + env + (Expr + ( (match lit with + | Syntax.Expr e -> e + | _ -> failwith "[rule_num_list] Literal has to be an expr"), + ret_type )) + in + let new_env = self#add_binding new_env (Type ret_type) in + (Syntax.Empty, new_env) + + (* A rule for generating function definitions *) + method rule_funcdef (env : env_t) : Syntax.t * env_t = + let mk_func_type (ptype : (Loc.t, Loc.t) T.t') (rtype : (Loc.t, Loc.t) T.t') : + (Loc.t, Loc.t) T.t' = + let param_type = + (Loc.none, T.Function.Param.{ name = None; annot = (Loc.none, ptype); optional = false }) + in + let ret_type = (Loc.none, rtype) in + T.Function.( + T.Function + { + params = (Loc.none, { Params.params = [param_type]; rest = None }); + return = ret_type; + tparams = None; + }) + in + (* parameter type *) let param_type = - (Loc.none, T.Function.Param.({name = None; - annot = (Loc.none, ptype); - optional = false})) in - let ret_type = (Loc.none, rtype) in - - T.Function.(T.Function {params = (Loc.none, { Params.params = [param_type]; rest = None }); - return = ret_type; - tparams = None}) in - - (* parameter type *) - let param_type = - match self#choose 0 (fun () -> self#require_type env) with - | Type t -> t - | _ -> failwith "has to be a type" in - - (* We are assuming we only have one parameter for now *) - let pname = "param" in - - (* We don't support recursion at this point, since in the syntax + match self#choose 0 (fun () -> self#require_type env) with + | Type t -> t + | _ -> failwith "has to be a type" + in + (* We are assuming we only have one parameter for now *) + let pname = "param" in + (* We don't support recursion at this point, since in the syntax there's no way to stop recursion *) - let fenv = (Expr (E.Identifier (Loc.none, pname), param_type)) :: env in - - (* return expression and its type *) - let func_return_type = - match self#choose 1 (fun () -> self#require_type fenv) with - | Type t -> t - | _ -> failwith "Has to be a type" in - self#backtrack_on_false (match func_return_type with + let fenv = + Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, pname)), param_type) :: env + in + (* return expression and its type *) + let func_return_type = + match self#choose 1 (fun () -> self#require_type fenv) with + | Type t -> t + | _ -> failwith "Has to be a type" + in + self#backtrack_on_false + (match func_return_type with | T.Object _ -> true | _ -> false); - let fname = Utils.mk_func () in - - (* return expression and its type *) - let ret_expr = self#choose 2 (fun () -> self#require_expr fenv) in - let ret_expr_expr, ret_expr_type = match ret_expr with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (self#is_subtype ret_expr_type func_return_type); - let ret_stmt = Syntax.mk_ret_stmt ret_expr_expr in - - let func_def = - Syntax.mk_func_def - fname - pname - param_type - [ret_stmt] - func_return_type in - - let ret_type = mk_func_type param_type func_return_type in - let new_env = - self#add_binding - env - (Expr ((E.Identifier (Loc.none, fname)), ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - func_def, new_env - - (* A rule for generating function definitions *) - method rule_func_mutate (env : env_t) : (Syntax.t * env_t) = - let mk_func_type (ptype : (Loc.t, Loc.t) T.t') (rtype : (Loc.t, Loc.t) T.t') : (Loc.t, Loc.t) T.t' = + let fname = Utils.mk_func () in + (* return expression and its type *) + let ret_expr = self#choose 2 (fun () -> self#require_expr fenv) in + let (ret_expr_expr, ret_expr_type) = + match ret_expr with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false (self#is_subtype ret_expr_type func_return_type); + let ret_stmt = Syntax.mk_ret_stmt ret_expr_expr in + let func_def = Syntax.mk_func_def fname pname param_type [ret_stmt] func_return_type in + let ret_type = mk_func_type param_type func_return_type in + let new_env = + self#add_binding + env + (Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, fname)), ret_type)) + in + let new_env = self#add_binding new_env (Type ret_type) in + (func_def, new_env) + + (* A rule for generating function definitions *) + method rule_func_mutate (env : env_t) : Syntax.t * env_t = + let mk_func_type (ptype : (Loc.t, Loc.t) T.t') (rtype : (Loc.t, Loc.t) T.t') : + (Loc.t, Loc.t) T.t' = + let param_type = + (Loc.none, T.Function.Param.{ name = None; annot = (Loc.none, ptype); optional = false }) + in + let ret_type = (Loc.none, rtype) in + T.Function.( + T.Function + { + params = (Loc.none, { Params.params = [param_type]; rest = None }); + return = ret_type; + tparams = None; + }) + in + (* parameter type *) let param_type = - (Loc.none, T.Function.Param.({name = None; - annot = (Loc.none, ptype); - optional = false})) in - let ret_type = (Loc.none, rtype) in - - T.Function.(T.Function {params = (Loc.none, { Params.params = [param_type]; rest = None }); - return = ret_type; - tparams = None}) in - - (* parameter type *) - let param_type = - match self#choose 0 (fun () -> self#require_type env) with - | Type t -> t - | _ -> failwith "has to be a type" in - - (* We need to ensure the parameter is an object for mutation *) - self#backtrack_on_false (match param_type with + match self#choose 0 (fun () -> self#require_type env) with + | Type t -> t + | _ -> failwith "has to be a type" + in + (* We need to ensure the parameter is an object for mutation *) + self#backtrack_on_false + (match param_type with | T.Object _ -> true | _ -> false); - (* We are assuming we only have one parameter for now *) - let pname = "param" in - - let prop = self#choose 1 (fun () -> self#require_prop param_type true) in - let pexpr, ptype = match prop with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* get the expression on the rhs of the update *) - let rhs = self#choose 2 (fun () -> self#require_expr env) in - let rhs_expr, rhs_type = match rhs with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* assert that type(rhs) <: type(prop) *) - self#weak_assert (self#is_subtype rhs_type ptype); - - (* produce a write syntax *) - let write = - Syntax.mk_prop_write - (Utils.string_of_expr (E.Identifier (Loc.none, pname))) - (Utils.string_of_expr pexpr) - rhs_expr in - - (* return expression and its type *) - let func_return_type = T.Void in - - let fname = Utils.mk_func () in - - let func_def = - Syntax.mk_func_def - fname - pname - param_type - [write] - func_return_type in - - let ret_type = mk_func_type param_type func_return_type in - let new_env = - self#add_binding - env - (Expr ((E.Identifier (Loc.none, fname)), ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - func_def, new_env - - (* A rule for generating function calls *) - method rule_func_call (env : env_t) : (Syntax.t * env_t) = - (* require a function from the environment.*) - let func = self#choose 0 (fun () -> self#require_expr env) in - let func_expr, func_type = match func with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (match func_type with + (* We are assuming we only have one parameter for now *) + let pname = "param" in + let prop = self#choose 1 (fun () -> self#require_prop param_type true) in + let (pexpr, ptype) = + match prop with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* get the expression on the rhs of the update *) + let rhs = self#choose 2 (fun () -> self#require_expr env) in + let (rhs_expr, rhs_type) = + match rhs with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* assert that type(rhs) <: type(prop) *) + self#weak_assert (self#is_subtype rhs_type ptype); + + (* produce a write syntax *) + let write = + Syntax.mk_prop_write + (Utils.string_of_expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, pname)))) + (Utils.string_of_expr pexpr) + rhs_expr + in + (* return expression and its type *) + let func_return_type = T.Void in + let fname = Utils.mk_func () in + let func_def = Syntax.mk_func_def fname pname param_type [write] func_return_type in + let ret_type = mk_func_type param_type func_return_type in + let new_env = + self#add_binding + env + (Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, fname)), ret_type)) + in + let new_env = self#add_binding new_env (Type ret_type) in + (func_def, new_env) + + (* A rule for generating function calls *) + method rule_func_call (env : env_t) : Syntax.t * env_t = + (* require a function from the environment.*) + let func = self#choose 0 (fun () -> self#require_expr env) in + let (func_expr, func_type) = + match func with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false + (match func_type with | T.Function _ -> true | _ -> false); - (* get the type of the parameter assuming we only have one param *) - let f_ptype = - let open T.Function in - match func_type with - | T.Function {params = (_, { Params.params = plist; rest = _ }); - return = _; - tparams = _} -> - T.Function.Param.((plist |> List.hd |> snd).annot) - | _ -> failwith "This has to a function type" in - - (* parameter *) - let param = self#choose 1 (fun () -> self#require_expr env) in - let param_expr, param_type = match param with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#weak_assert (self#is_subtype param_type (snd f_ptype)); - - let func_call = Syntax.mk_func_call func_expr param_expr in - - let ret_type = T.Function.(match func_type with - | T.Function {params = _; - return = (_, rt); - tparams =_} -> rt - | _ -> failwith "This has to be a function type") in - let new_env = - self#add_binding - env - (Expr ((match func_call with - | Syntax.Expr e -> e - | _ -> failwith "This has to be an expression"), - ret_type)) in - - let new_env = self#add_binding new_env (Type ret_type) in - func_call, new_env - - (* A rule for adding primitive types *) - method rule_prim_type (env : env_t) : (Syntax.t * env_t) = - let new_env = - self#add_binding - (self#add_binding env (Type T.Number)) - (Type T.String) in - Syntax.Empty, new_env - - method gen_type_list - (cons : ((Loc.t, Loc.t) T.t') -> bool) - (num : int) - (env : env_t) : ((Loc.t, Loc.t) T.t') list = - let rec helper count limit result = - if count = limit then result - else - let expr = self#choose count (fun () -> self#require_expr env) in - let t = match expr with - | Type t -> t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (cons t); - helper (count + 1) limit (t :: result) in - helper 0 num [] - - (* A rule for adding object types *) - method rule_obj_type (prop_num : int) (opt_num : int) (env : env_t) : (Syntax.t * env_t) = - let ret_type = self#gen_obj_type prop_num opt_num env in - let new_env = - self#add_binding env (Type ret_type) in - Syntax.Empty, new_env - - (* A rule for adding function types *) - method rule_func_type (env : env_t) : (Syntax.t * env_t) = - (* parameter type *) - let param_type = - match self#choose 0 (fun () -> self#require_type env) with - | Type t -> t - | _ -> failwith "has to be a type" in - - (* return expression and its type *) - let func_ret_type = - match self#choose 1 (fun () -> self#require_type env) with - | Type t -> t - | _ -> failwith "Has to be a type" in - - let ret_type = - let param = T.Function.Param.({name = None; - annot = (Loc.none, param_type); - optional = false}) in - T.Function.(T.Function { - params = (Loc.none, { Params. - params = [(Loc.none, param)]; - rest = None; - }); - return = (Loc.none, func_ret_type); - tparams = None; - }) in - let new_env = - self#add_binding env (Type ret_type) in - Syntax.Empty, new_env - - (* A rule for adding primitive types *) - method rule_union_type (tnum : int) (env : env_t) : (Syntax.t * env_t) = - (* a helper function for generating object property types *) - let rec gen_type_list - (count : int) - (limit : int) - (result : (Loc.t, Loc.t) T.t' list) : (Loc.t, Loc.t) T.t' list = - if count = limit then result - else - let ptype = self#choose count (fun () -> self#require_type env) in - let ptype = match ptype with - | Type t -> t - | _ -> failwith "This has to be a type" in - (* Do not pick the same type again! *) - self#backtrack_on_false (not (List.mem ptype result)); - gen_type_list (count + 1) limit (ptype :: result) in - - let ret_type = - let open Array in - let tarray = (gen_type_list 0 tnum []) |> of_list in - T.Union ((Loc.none, get tarray 0), - (Loc.none, get tarray 1), - (List.map - (fun (s) -> (Loc.none, s)) - (to_list (sub tarray 2 ((length tarray) - 2))))) in - let new_env = - self#add_binding env (Type ret_type) in - Syntax.Empty, new_env - - (* A rule for adding runtime checks *) - method rule_runtime_check (env : env_t) : (Syntax.t * env_t) = - let mk_prop_read (obj : (Loc.t, Loc.t) E.t') (prop : (Loc.t, Loc.t) E.t') : (Loc.t, Loc.t) E.t' = - let open E.Member in - E.Member {_object = (Loc.none, obj); - property = PropertyExpression (Loc.none, prop); - computed = false} in - - let rec get_prop (oname : (Loc.t, Loc.t) E.t') (ot : (Loc.t, Loc.t) T.Object.t) (depth : int) : env_elt_t = - let prop = self#choose depth (fun () -> self#require_prop (T.Object ot) true) in - let pexpr, ptype = match prop with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - let prop_elt = match ptype with - | T.Object t -> get_prop pexpr t (depth + 1) - | _ -> Expr (pexpr, ptype) in - match prop_elt with - | Expr (e, t) -> Expr (mk_prop_read oname e, t) - | _ -> failwith "This has to be an expression." in - - let var = self#choose 0 (fun () -> self#require_var env) in - let vexpr, vtype = match var with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression." in - self#backtrack_on_false (match vtype with + (* get the type of the parameter assuming we only have one param *) + let f_ptype = + T.Function.( + match func_type with + | T.Function + { params = (_, { Params.params = plist; rest = _ }); return = _; tparams = _ } -> + T.Function.Param.((plist |> List.hd |> snd).annot) + | _ -> failwith "This has to a function type") + in + (* parameter *) + let param = self#choose 1 (fun () -> self#require_expr env) in + let (param_expr, param_type) = + match param with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#weak_assert (self#is_subtype param_type (snd f_ptype)); + + let func_call = Syntax.mk_func_call func_expr param_expr in + let ret_type = + T.Function.( + match func_type with + | T.Function { params = _; return = (_, rt); tparams = _ } -> rt + | _ -> failwith "This has to be a function type") + in + let new_env = + self#add_binding + env + (Expr + ( (match func_call with + | Syntax.Expr e -> e + | _ -> failwith "This has to be an expression"), + ret_type )) + in + let new_env = self#add_binding new_env (Type ret_type) in + (func_call, new_env) + + (* A rule for adding primitive types *) + method rule_prim_type (env : env_t) : Syntax.t * env_t = + let new_env = self#add_binding (self#add_binding env (Type T.Number)) (Type T.String) in + (Syntax.Empty, new_env) + + method gen_type_list (cons : (Loc.t, Loc.t) T.t' -> bool) (num : int) (env : env_t) + : (Loc.t, Loc.t) T.t' list = + let rec helper count limit result = + if count = limit then + result + else + let expr = self#choose count (fun () -> self#require_expr env) in + let t = + match expr with + | Type t -> t + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false (cons t); + helper (count + 1) limit (t :: result) + in + helper 0 num [] + + (* A rule for adding object types *) + method rule_obj_type (prop_num : int) (opt_num : int) (env : env_t) : Syntax.t * env_t = + let ret_type = self#gen_obj_type prop_num opt_num env in + let new_env = self#add_binding env (Type ret_type) in + (Syntax.Empty, new_env) + + (* A rule for adding function types *) + method rule_func_type (env : env_t) : Syntax.t * env_t = + (* parameter type *) + let param_type = + match self#choose 0 (fun () -> self#require_type env) with + | Type t -> t + | _ -> failwith "has to be a type" + in + (* return expression and its type *) + let func_ret_type = + match self#choose 1 (fun () -> self#require_type env) with + | Type t -> t + | _ -> failwith "Has to be a type" + in + let ret_type = + let param = + T.Function.Param.{ name = None; annot = (Loc.none, param_type); optional = false } + in + T.Function.( + T.Function + { + params = (Loc.none, { Params.params = [(Loc.none, param)]; rest = None }); + return = (Loc.none, func_ret_type); + tparams = None; + }) + in + let new_env = self#add_binding env (Type ret_type) in + (Syntax.Empty, new_env) + + (* A rule for adding primitive types *) + method rule_union_type (tnum : int) (env : env_t) : Syntax.t * env_t = + (* a helper function for generating object property types *) + let rec gen_type_list (count : int) (limit : int) (result : (Loc.t, Loc.t) T.t' list) : + (Loc.t, Loc.t) T.t' list = + if count = limit then + result + else + let ptype = self#choose count (fun () -> self#require_type env) in + let ptype = + match ptype with + | Type t -> t + | _ -> failwith "This has to be a type" + in + (* Do not pick the same type again! *) + self#backtrack_on_false (not (List.mem ptype result)); + gen_type_list (count + 1) limit (ptype :: result) + in + let ret_type = + Array.( + let tarray = gen_type_list 0 tnum [] |> of_list in + T.Union + ( (Loc.none, get tarray 0), + (Loc.none, get tarray 1), + List.map (fun s -> (Loc.none, s)) (to_list (sub tarray 2 (length tarray - 2))) )) + in + let new_env = self#add_binding env (Type ret_type) in + (Syntax.Empty, new_env) + + (* A rule for adding runtime checks *) + method rule_runtime_check (env : env_t) : Syntax.t * env_t = + let mk_prop_read (obj : (Loc.t, Loc.t) E.t') (prop : (Loc.t, Loc.t) E.t') : + (Loc.t, Loc.t) E.t' = + E.Member.( + E.Member { _object = (Loc.none, obj); property = PropertyExpression (Loc.none, prop) }) + in + let rec get_prop (oname : (Loc.t, Loc.t) E.t') (ot : (Loc.t, Loc.t) T.Object.t) (depth : int) + : env_elt_t = + let prop = self#choose depth (fun () -> self#require_prop (T.Object ot) true) in + let (pexpr, ptype) = + match prop with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + let prop_elt = + match ptype with + | T.Object t -> get_prop pexpr t (depth + 1) + | _ -> Expr (pexpr, ptype) + in + match prop_elt with + | Expr (e, t) -> Expr (mk_prop_read oname e, t) + | _ -> failwith "This has to be an expression." + in + let var = self#choose 0 (fun () -> self#require_var env) in + let (vexpr, vtype) = + match var with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression." + in + self#backtrack_on_false + (match vtype with | T.Function _ -> false | T.Union _ -> false | _ -> true); - let final_expr = match vtype with - | T.Object ot -> get_prop vexpr ot 1 - | _ -> var in - let fexpr, ftype = match final_expr with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression." in - self#backtrack_on_false (match ftype with + let final_expr = + match vtype with + | T.Object ot -> get_prop vexpr ot 1 + | _ -> var + in + let (fexpr, ftype) = + match final_expr with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression." + in + self#backtrack_on_false + (match ftype with | T.Function _ -> false | T.Union _ -> false | _ -> true); - Syntax.mk_runtime_check fexpr ftype, env - - (* A rule for adding runtime checks *) - method rule_check_optional_prop (env : env_t) : (Syntax.t * env_t) = - let mk_prop_read (obj : (Loc.t, Loc.t) E.t') (prop : (Loc.t, Loc.t) E.t') : (Loc.t, Loc.t) E.t' = - let open E.Member in - E.Member {_object = (Loc.none, obj); - property = PropertyExpression (Loc.none, prop); - computed = false} in - - let rec get_prop (oname : (Loc.t, Loc.t) E.t') (ot : (Loc.t, Loc.t) T.Object.t) (depth : int) : env_elt_t = - let prop = self#choose depth (fun () -> self#require_optional_prop (T.Object ot)) in - let pexpr, ptype = match prop with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - let prop_elt = match ptype with - | T.Object t -> get_prop pexpr t (depth + 1) - | _ -> Expr (pexpr, ptype) in - match prop_elt with - | Expr (e, t) -> Expr (mk_prop_read oname e, t) - | _ -> failwith "This has to be an expression." in - - let var = self#choose 0 (fun () -> self#require_var env) in - let vexpr, vtype = match var with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression." in - self#backtrack_on_false (match vtype with + (Syntax.mk_runtime_check fexpr ftype, env) + + (* A rule for adding runtime checks *) + method rule_check_optional_prop (env : env_t) : Syntax.t * env_t = + let mk_prop_read (obj : (Loc.t, Loc.t) E.t') (prop : (Loc.t, Loc.t) E.t') : + (Loc.t, Loc.t) E.t' = + E.Member.( + E.Member { _object = (Loc.none, obj); property = PropertyExpression (Loc.none, prop) }) + in + let rec get_prop (oname : (Loc.t, Loc.t) E.t') (ot : (Loc.t, Loc.t) T.Object.t) (depth : int) + : env_elt_t = + let prop = self#choose depth (fun () -> self#require_optional_prop (T.Object ot)) in + let (pexpr, ptype) = + match prop with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + let prop_elt = + match ptype with + | T.Object t -> get_prop pexpr t (depth + 1) + | _ -> Expr (pexpr, ptype) + in + match prop_elt with + | Expr (e, t) -> Expr (mk_prop_read oname e, t) + | _ -> failwith "This has to be an expression." + in + let var = self#choose 0 (fun () -> self#require_var env) in + let (vexpr, vtype) = + match var with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression." + in + self#backtrack_on_false + (match vtype with | T.Function _ -> false | T.Union _ -> false | _ -> true); - let final_expr = match vtype with - | T.Object ot -> get_prop vexpr ot 1 - | _ -> var in - let fexpr, ftype = match final_expr with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression." in - self#backtrack_on_false (match ftype with + let final_expr = + match vtype with + | T.Object ot -> get_prop vexpr ot 1 + | _ -> var + in + let (fexpr, ftype) = + match final_expr with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression." + in + self#backtrack_on_false + (match ftype with | T.Function _ -> false | T.Union _ -> false | _ -> true); - Syntax.mk_check_opt_prop fexpr ftype, env - - method get_all_rules () = - let all_rules = [|self#rule_num_lit; - self#rule_str_lit; - self#rule_obj_lit 1 0; - self#rule_obj_type 1 0; - self#rule_vardecl; - self#rule_vardecl_with_type; - self#rule_func_type; - self#rule_union_type 2; - self#rule_prim_type; - self#rule_funcdef; - self#rule_func_call; - self#rule_prop_read; - self#rule_prop_update;|] in - all_rules -end;; - -class ruleset_random_base = object - inherit ruleset_base - method! weak_assert b = - if (not b) && ((Random.int 5) > 0) then raise Engine.Backtrack -end + (Syntax.mk_check_opt_prop fexpr ftype, env) + + method get_all_rules () = + let all_rules = + [| + self#rule_num_lit; + self#rule_str_lit; + self#rule_obj_lit 1 0; + self#rule_obj_type 1 0; + self#rule_vardecl; + self#rule_vardecl_with_type; + self#rule_func_type; + self#rule_union_type 2; + self#rule_prim_type; + self#rule_funcdef; + self#rule_func_call; + self#rule_prop_read; + self#rule_prop_update; + |] + in + all_rules + end + +class ruleset_random_base = + object + inherit ruleset_base + + method! weak_assert b = if (not b) && Random.int 5 > 0 then raise Engine.Backtrack + end diff --git a/testgen/ruleset_depth.ml b/testgen/ruleset_depth.ml index a034f6a4134..1b3ee5fef15 100644 --- a/testgen/ruleset_depth.ml +++ b/testgen/ruleset_depth.ml @@ -1,188 +1,223 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils (* ESSENTIAL: Syntax type and related functions *) -module Syntax = Syntax_base;; -open Ruleset_base;; - -class ruleset_depth = object(self) - inherit Ruleset_base.ruleset_base - - method! get_name () : string = "depth" - - method! weak_assert b = self#backtrack_on_false b - - method! is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = - let get_prop_set (o : (Loc.t, Loc.t) T.Object.t) = - let tbl = Hashtbl.create 1000 in - let open T.Object.Property in - List.iter (fun p -> match p with - | T.Object.Property (_, {key = E.Object.Property.Identifier (_, name); - value = Init (_, t); - optional = _; - static = _; - proto = _; - _method = _; - variance = _;}) -> Hashtbl.add tbl name t - | _ -> ()) T.Object.(o.properties); - tbl in - let s1 = get_prop_set o1 in - let s2 = get_prop_set o2 in - let subtype = ref true in - (* check non optional properties *) - Hashtbl.iter (fun n t -> - (* Shouldn't use call is_subtyping recursivingly. We should +module Syntax = Syntax_base +open Ruleset_base + +class ruleset_depth = + object (self) + inherit Ruleset_base.ruleset_base + + method! get_name () : string = "depth" + + method! weak_assert b = self#backtrack_on_false b + + method! is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = + let get_prop_set (o : (Loc.t, Loc.t) T.Object.t) = + let tbl = Hashtbl.create 1000 in + T.Object.Property.( + List.iter + (fun p -> + match p with + | T.Object.Property + ( _, + { + key = E.Object.Property.Identifier (_, name); + value = Init (_, t); + optional = _; + static = _; + proto = _; + _method = _; + variance = _; + } ) -> + Hashtbl.add tbl name t + | _ -> ()) + T.Object.(o.properties); + tbl) + in + let s1 = get_prop_set o1 in + let s2 = get_prop_set o2 in + let subtype = ref true in + (* check non optional properties *) + Hashtbl.iter + (fun n t -> + (* Shouldn't use call is_subtyping recursivingly. We should use equality to limit depth subtyping *) - if (not (Hashtbl.mem s1 n)) || (not (self#is_subtype (Hashtbl.find s1 n) t)) then - subtype := false) s2; - !subtype + if (not (Hashtbl.mem s1 n)) || not (self#is_subtype (Hashtbl.find s1 n) t) then + subtype := false) + s2; + !subtype - (* A helper funtions for wrapping an expression and a type + (* A helper funtions for wrapping an expression and a type into an object for mutation and expose type errors. *) - method wrap_in_obj (expr : (Loc.t, Loc.t) E.t') (etype : (Loc.t, Loc.t) T.t') : ((Loc.t, Loc.t) E.t' * (Loc.t, Loc.t) T.t') = - let pname = "p_0" in - let obj_expr = - let prop = - let open E.Object.Property in - E.Object.Property (Loc.none, Init { - key = Identifier (Loc.none, pname); - value = Loc.none, expr; - shorthand = false - }) in - let properties = [prop] in - E.Object.(E.Object {properties}) in - let obj_type = - let open T.Object.Property in - let prop_type = - T.Object.Property (Loc.none, {key = E.Object.Property.Identifier (Loc.none, pname); - value = Init (Loc.none, etype); - optional = false; - static = false; - proto = false; - _method = false; - variance = None;}) in - T.Object.(T.Object {exact = false; properties = [prop_type]}) in - obj_expr, obj_type - - (* property update rule *) - method! rule_prop_update (env : env_t) : (Syntax.t * env_t) = - (* get an object variable *) - let obj = self#choose 0 (fun () -> self#require_expr env) in - self#backtrack_on_false (match obj with + method wrap_in_obj (expr : (Loc.t, Loc.t) E.t') (etype : (Loc.t, Loc.t) T.t') + : (Loc.t, Loc.t) E.t' * (Loc.t, Loc.t) T.t' = + let pname = "p_0" in + let obj_expr = + let prop = + E.Object.Property.( + E.Object.Property + ( Loc.none, + Init + { + key = Identifier (Flow_ast_utils.ident_of_source (Loc.none, pname)); + value = (Loc.none, expr); + shorthand = false; + } )) + in + let properties = [prop] in + E.Object.(E.Object { properties; comments = Flow_ast_utils.mk_comments_opt () }) + in + let obj_type = + T.Object.Property.( + let prop_type = + T.Object.Property + ( Loc.none, + { + key = + E.Object.Property.Identifier (Flow_ast_utils.ident_of_source (Loc.none, pname)); + value = Init (Loc.none, etype); + optional = false; + static = false; + proto = false; + _method = false; + variance = None; + } ) + in + T.Object.(T.Object { exact = false; properties = [prop_type]; inexact = true })) + in + (obj_expr, obj_type) + + (* property update rule *) + method! rule_prop_update (env : env_t) : Syntax.t * env_t = + (* get an object variable *) + let obj = self#choose 0 (fun () -> self#require_expr env) in + self#backtrack_on_false + (match obj with | Expr (E.Identifier _, T.Object _) -> true | _ -> false); - let oexpr, otype = match obj with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - let prop = self#choose 1 (fun () -> self#require_prop otype true) in - let pexpr, ptype = match prop with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (match ptype with + let (oexpr, otype) = + match obj with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + let prop = self#choose 1 (fun () -> self#require_prop otype true) in + let (pexpr, ptype) = + match prop with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false + (match ptype with | T.Object _ -> true | _ -> false); - (* get the expression on the rhs of the update *) - let rhs = self#choose 2 (fun () -> self#require_expr env) in - let rhs_expr, rhs_type = match rhs with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (match rhs_expr with + (* get the expression on the rhs of the update *) + let rhs = self#choose 2 (fun () -> self#require_expr env) in + let (rhs_expr, rhs_type) = + match rhs with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false + (match rhs_expr with | E.Object _ -> true | _ -> false); - (* assert that type(rhs) <: type(prop) *) - self#weak_assert (self#is_subtype rhs_type ptype); - - (* produce a write syntax *) - let write = - Syntax.mk_prop_write - (Utils.string_of_expr oexpr) - (Utils.string_of_expr pexpr) - rhs_expr in - - (* update the type of the object *) - let ret_type = - let o_type = match otype with - | T.Object o -> o - | _ -> failwith "Has to be an object type" in - T.Object o_type in - - let new_env = self#add_binding env (Expr (oexpr, ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - (write, new_env) - - (* A rule for generating object literals *) - method! rule_obj_lit (prop_num : int) (opt_num : int) (env : env_t) : (Syntax.t * env_t) = - - let lit, lit_expr, ret_type = self#gen_obj_lit prop_num opt_num env in - - let wrap_expr, wrap_ret_type = self#wrap_in_obj lit_expr ret_type in - let new_env = - self#add_binding - (self#add_binding - env - (Expr (lit_expr, ret_type))) - (Expr (wrap_expr, wrap_ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - let new_env = self#add_binding new_env (Type wrap_ret_type) in - lit, new_env - - (* Rule for declaring a variable with init and type annotation *) - method! rule_vardecl_with_type (env : env_t) : (Syntax.t * env_t) = - (* require an expression from the environment *) - let rhs = self#choose 0 (fun () -> self#require_expr env) in - let rhs_expr, rhs_type = match rhs with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* For fast search on depth-subtyping *) - self#backtrack_on_false (match rhs_expr, rhs_type with - | E.Identifier _, _ -> true - | _, T.Object _ -> true + (* assert that type(rhs) <: type(prop) *) + self#weak_assert (self#is_subtype rhs_type ptype); + + (* produce a write syntax *) + let write = + Syntax.mk_prop_write (Utils.string_of_expr oexpr) (Utils.string_of_expr pexpr) rhs_expr + in + (* update the type of the object *) + let ret_type = + let o_type = + match otype with + | T.Object o -> o + | _ -> failwith "Has to be an object type" + in + T.Object o_type + in + let new_env = self#add_binding env (Expr (oexpr, ret_type)) in + let new_env = self#add_binding new_env (Type ret_type) in + (write, new_env) + + (* A rule for generating object literals *) + method! rule_obj_lit (prop_num : int) (opt_num : int) (env : env_t) : Syntax.t * env_t = + let (lit, lit_expr, ret_type) = self#gen_obj_lit prop_num opt_num env in + let (wrap_expr, wrap_ret_type) = self#wrap_in_obj lit_expr ret_type in + let new_env = + self#add_binding + (self#add_binding env (Expr (lit_expr, ret_type))) + (Expr (wrap_expr, wrap_ret_type)) + in + let new_env = self#add_binding new_env (Type ret_type) in + let new_env = self#add_binding new_env (Type wrap_ret_type) in + (lit, new_env) + + (* Rule for declaring a variable with init and type annotation *) + method! rule_vardecl_with_type (env : env_t) : Syntax.t * env_t = + (* require an expression from the environment *) + let rhs = self#choose 0 (fun () -> self#require_expr env) in + let (rhs_expr, rhs_type) = + match rhs with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* For fast search on depth-subtyping *) + self#backtrack_on_false + (match (rhs_expr, rhs_type) with + | (E.Identifier _, _) -> true + | (_, T.Object _) -> true | _ -> false); - (* require a type from the environment.*) - let vtype = self#choose 1 (fun () -> self#require_type env) in - let vtype = match vtype with - | Type t -> t - | _ -> failwith "This has to a type" in - - (* assert the subtyping relationhips between the rhs and lhs *) - self#weak_assert (self#is_subtype rhs_type vtype); - let vname = Utils.mk_var () in - let var_decl = Syntax.mk_vardecl ~etype:vtype vname rhs_expr in - let new_env = - self#add_binding - env - (Expr ((E.Identifier (Loc.none, vname)), vtype)) in - let new_env = self#add_binding new_env (Type vtype) in - var_decl, new_env - - method! get_all_rules () = - [|self#rule_num_lit; - self#rule_obj_lit 2 0; - self#rule_vardecl_with_type; - self#rule_obj_lit 1 0; - self#rule_vardecl_with_type; - self#rule_prop_update; - self#rule_runtime_check; - |] -end - -class ruleset_random_depth = object - inherit ruleset_depth - method! weak_assert b = - if (not b) && ((Random.int 3) > 0) then raise Engine.Backtrack -end + (* require a type from the environment.*) + let vtype = self#choose 1 (fun () -> self#require_type env) in + let vtype = + match vtype with + | Type t -> t + | _ -> failwith "This has to a type" + in + (* assert the subtyping relationhips between the rhs and lhs *) + self#weak_assert (self#is_subtype rhs_type vtype); + let vname = Utils.mk_var () in + let var_decl = Syntax.mk_vardecl ~etype:vtype vname rhs_expr in + let new_env = + self#add_binding + env + (Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, vname)), vtype)) + in + let new_env = self#add_binding new_env (Type vtype) in + (var_decl, new_env) + + method! get_all_rules () = + [| + self#rule_num_lit; + self#rule_obj_lit 2 0; + self#rule_vardecl_with_type; + self#rule_obj_lit 1 0; + self#rule_vardecl_with_type; + self#rule_prop_update; + self#rule_runtime_check; + |] + end + +class ruleset_random_depth = + object + inherit ruleset_depth + + method! weak_assert b = if (not b) && Random.int 3 > 0 then raise Engine.Backtrack + end diff --git a/testgen/ruleset_exact.ml b/testgen/ruleset_exact.ml index e524e37b810..409f8bef849 100644 --- a/testgen/ruleset_exact.ml +++ b/testgen/ruleset_exact.ml @@ -1,48 +1,52 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils (* Show how to use exact types. *) -open Ruleset_base;; +open Ruleset_base (* ESSENTIAL: Syntax type and related functions *) -module Syntax = Syntax_base;; - -class ruleset_exact = object(self) - inherit ruleset_base as super - - method! get_name () : string = "exact" - - method! weak_assert b = self#backtrack_on_false b - - method! is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = - let open T.Object in - if (o1.exact && o2.exact) then - o1 = o2 - else if (o1.exact || o2.exact) then - false - else - super#is_subtype_obj o1 o2 - - method! get_all_rules () = - [|self#rule_num_lit; - self#rule_obj_lit 1 0; - self#rule_vardecl_with_type; - self#rule_prop_read; - self#rule_prop_update;|] +module Syntax = Syntax_base + +class ruleset_exact = + object (self) + inherit ruleset_base as super + + method! get_name () : string = "exact" + + method! weak_assert b = self#backtrack_on_false b + + method! is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = + T.Object.( + if o1.exact && o2.exact then + o1 = o2 + else if o1.exact || o2.exact then + false + else + super#is_subtype_obj o1 o2) + + method! get_all_rules () = + [| + self#rule_num_lit; + self#rule_obj_lit 1 0; + self#rule_vardecl_with_type; + self#rule_prop_read; + self#rule_prop_update; + |] end - class ruleset_random_exact = object +class ruleset_random_exact = + object inherit ruleset_exact - method! weak_assert b = - if (not b) && ((Random.int 3) > 0) then raise Engine.Backtrack + + method! weak_assert b = if (not b) && Random.int 3 > 0 then raise Engine.Backtrack end diff --git a/testgen/ruleset_func.ml b/testgen/ruleset_func.ml index de6baf5f80a..b0d3993ae86 100644 --- a/testgen/ruleset_func.ml +++ b/testgen/ruleset_func.ml @@ -1,228 +1,260 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils (* ESSENTIAL: Syntax type and related functions *) -module Syntax = Syntax_base;; -open Ruleset_base;; - -class ruleset_func = object(self) - inherit Ruleset_base.ruleset_base - - method! get_name () : string = "func" - - method! weak_assert b = self#backtrack_on_false b - - method! is_subtype_func - (f1 : (Loc.t, Loc.t) T.Function.t) - (f2 : (Loc.t, Loc.t) T.Function.t) : bool = - let open T.Function in - let get_type_list (f : (Loc.t, Loc.t) T.Function.t) : (Loc.t, Loc.t) T.t' list = - let open T.Function.Param in - let (_, { T.Function.Params.params; rest = _ }) = f.params in - List.map - (fun param -> (snd param).annot |> snd) - params @ [f.return |> snd] in - - let rec func_subtype_helper l1 l2 = match l1, l2 with - | [], [] -> true - (* checking the return type *) - | hd1 :: [], hd2 :: [] -> self#is_subtype hd1 hd2 - (* checking the param type *) - | hd1 :: tl1, hd2 :: tl2 -> - (* BAD subtyping check. Please look at ruleset_base +module Syntax = Syntax_base +open Ruleset_base + +class ruleset_func = + object (self) + inherit Ruleset_base.ruleset_base + + method! get_name () : string = "func" + + method! weak_assert b = self#backtrack_on_false b + + method! is_subtype_func (f1 : (Loc.t, Loc.t) T.Function.t) (f2 : (Loc.t, Loc.t) T.Function.t) + : bool = + T.Function.( + let get_type_list (f : (Loc.t, Loc.t) T.Function.t) : (Loc.t, Loc.t) T.t' list = + T.Function.Param.( + let (_, { T.Function.Params.params; rest = _ }) = f.params in + List.map (fun param -> (snd param).annot |> snd) params @ [f.return |> snd]) + in + let rec func_subtype_helper l1 l2 = + match (l1, l2) with + | ([], []) -> true + (* checking the return type *) + | ([hd1], [hd2]) -> self#is_subtype hd1 hd2 + (* checking the param type *) + | (hd1 :: tl1, hd2 :: tl2) -> + (* BAD subtyping check. Please look at ruleset_base for the correct subtyping check *) - (match hd1, hd2 with - | T.Object _, T.Object _ - | T.Number, T.Number - | T.String, T.String - | T.Function _, T.Function _ -> - func_subtype_helper tl1 tl2 - | _ -> false) - | _ -> false in - - let p1_list = get_type_list f1 in - let p2_list = get_type_list f2 in - if (not ((List.length p1_list) = (List.length p2_list))) then false - else func_subtype_helper p1_list p2_list - - (* A rule for generating function definitions *) - method! rule_funcdef (env : env_t) : (Syntax.t * env_t) = - let mk_func_type (ptype : (Loc.t, Loc.t) T.t') (rtype : (Loc.t, Loc.t) T.t') : (Loc.t, Loc.t) T.t' = + (match (hd1, hd2) with + | (T.Object _, T.Object _) + | (T.Number, T.Number) + | (T.String, T.String) + | (T.Function _, T.Function _) -> + func_subtype_helper tl1 tl2 + | _ -> false) + | _ -> false + in + let p1_list = get_type_list f1 in + let p2_list = get_type_list f2 in + if not (List.length p1_list = List.length p2_list) then + false + else + func_subtype_helper p1_list p2_list) + + (* A rule for generating function definitions *) + method! rule_funcdef (env : env_t) : Syntax.t * env_t = + let mk_func_type (ptype : (Loc.t, Loc.t) T.t') (rtype : (Loc.t, Loc.t) T.t') : + (Loc.t, Loc.t) T.t' = + let param_type = + (Loc.none, T.Function.Param.{ name = None; annot = (Loc.none, ptype); optional = false }) + in + let ret_type = (Loc.none, rtype) in + T.Function.( + T.Function + { + params = (Loc.none, { Params.params = [param_type]; rest = None }); + return = ret_type; + tparams = None; + }) + in + (* parameter type *) let param_type = - (Loc.none, T.Function.Param.({name = None; - annot = (Loc.none, ptype); - optional = false})) in - let ret_type = (Loc.none, rtype) in - - T.Function.(T.Function {params = (Loc.none, { Params.params = [param_type]; rest = None }); - return = ret_type; - tparams = None}) in - - (* parameter type *) - let param_type = - match self#choose 0 (fun () -> self#require_type env) with - | Type t -> t - | _ -> failwith "has to be a type" in - self#backtrack_on_false (match param_type with - | T.Object _ | T.Function _ -> true + match self#choose 0 (fun () -> self#require_type env) with + | Type t -> t + | _ -> failwith "has to be a type" + in + self#backtrack_on_false + (match param_type with + | T.Object _ + | T.Function _ -> + true | _ -> false); - (* We are assuming we only have one parameter for now *) - let pname = "param" in - - (* make a new environment to account for parameters + (* We are assuming we only have one parameter for now *) + let pname = "param" in + (* make a new environment to account for parameters TODO: This is a hacky way to account for parameters. The correct way to do this is to change every expression that has the variable occurrences whose type is the super type of the parameter *) - let fenv = (Expr (E.Identifier (Loc.none, pname), param_type)) :: - (let open T.Function in - match param_type with - (* If the parameter is a function, we create new function calls *) - | T.Function {params = _; - return = _, rt; - tparams = _;} -> - let open E.Call in - List.fold_right (fun elt acc -> - match elt with - | Expr (E.Call {callee = _, fid; - targs; - arguments = args}, _) -> - let ftype = self#get_type_from_expr fid env in - if self#is_subtype param_type ftype then begin - (Expr (E.Call {callee = (Loc.none, E.Identifier (Loc.none, pname)); - targs; - arguments = args}, rt)) :: elt :: acc - end else elt :: acc - | _ -> elt :: acc) env [] - (* If the parameter is an object, we create new property read *) - | T.Object _ -> - let open E.Member in - List.fold_right (fun elt acc -> - match elt with - | Expr (E.Member {_object = _, obj; - property = prop; - computed = c}, t) -> - let otype = self#get_type_from_expr obj env in - if self#is_subtype param_type otype then begin - (Expr (E.Member {_object = (Loc.none, E.Identifier (Loc.none, pname)); - property = prop; - computed = c}, t)) :: elt :: acc - end else elt :: acc - | _ -> elt :: acc) env [] - | _ -> env) in - - (* return expression and its type *) - let func_return_type = - match self#choose 1 (fun () -> self#require_type fenv) with - | Type t -> t - | _ -> failwith "Has to be a type" in - self#backtrack_on_false (match func_return_type with + let fenv = + Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, pname)), param_type) + :: T.Function.( + match param_type with + (* If the parameter is a function, we create new function calls *) + | T.Function { params = _; return = (_, rt); tparams = _ } -> + E.Call.( + List.fold_right + (fun elt acc -> + match elt with + | Expr (E.Call { callee = (_, fid); targs; arguments = args }, _) -> + let ftype = self#get_type_from_expr fid env in + if self#is_subtype param_type ftype then + Expr + ( E.Call + { + callee = + ( Loc.none, + E.Identifier + (Flow_ast_utils.ident_of_source (Loc.none, pname)) ); + targs; + arguments = args; + }, + rt ) + :: elt + :: acc + else + elt :: acc + | _ -> elt :: acc) + env + []) + (* If the parameter is an object, we create new property read *) + | T.Object _ -> + E.Member.( + List.fold_right + (fun elt acc -> + match elt with + | Expr (E.Member { _object = (_, obj); property = prop }, t) -> + let otype = self#get_type_from_expr obj env in + if self#is_subtype param_type otype then + Expr + ( E.Member + { + _object = + ( Loc.none, + E.Identifier + (Flow_ast_utils.ident_of_source (Loc.none, pname)) ); + property = prop; + }, + t ) + :: elt + :: acc + else + elt :: acc + | _ -> elt :: acc) + env + []) + | _ -> env) + in + (* return expression and its type *) + let func_return_type = + match self#choose 1 (fun () -> self#require_type fenv) with + | Type t -> t + | _ -> failwith "Has to be a type" + in + self#backtrack_on_false + (match func_return_type with | T.Object _ -> true | _ -> false); - let fname = Utils.mk_func () in - - (* return expression and its type *) - let ret_expr = self#choose 2 (fun () -> self#require_expr fenv) in - let ret_expr_expr, ret_expr_type = match ret_expr with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (self#is_subtype ret_expr_type func_return_type); - let ret_stmt = Syntax.mk_ret_stmt ret_expr_expr in - - let func_def = - Syntax.mk_func_def - fname - pname - param_type - [ret_stmt] - func_return_type in - - let ret_type = mk_func_type param_type func_return_type in - let new_env = - self#add_binding - env - (Expr ((E.Identifier (Loc.none, fname)), ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - func_def, new_env - - (* A rule for generating function calls *) - method! rule_func_call (env : env_t) : (Syntax.t * env_t) = - (* require a function from the environment.*) - let func = self#choose 0 (fun () -> self#require_expr env) in - let func_expr, func_type = match func with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (match func_type with + let fname = Utils.mk_func () in + (* return expression and its type *) + let ret_expr = self#choose 2 (fun () -> self#require_expr fenv) in + let (ret_expr_expr, ret_expr_type) = + match ret_expr with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false (self#is_subtype ret_expr_type func_return_type); + let ret_stmt = Syntax.mk_ret_stmt ret_expr_expr in + let func_def = Syntax.mk_func_def fname pname param_type [ret_stmt] func_return_type in + let ret_type = mk_func_type param_type func_return_type in + let new_env = + self#add_binding + env + (Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, fname)), ret_type)) + in + let new_env = self#add_binding new_env (Type ret_type) in + (func_def, new_env) + + (* A rule for generating function calls *) + method! rule_func_call (env : env_t) : Syntax.t * env_t = + (* require a function from the environment.*) + let func = self#choose 0 (fun () -> self#require_expr env) in + let (func_expr, func_type) = + match func with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false + (match func_type with | T.Function _ -> true | _ -> false); - (* get the type of the parameter assuming we only have one param *) - let f_ptype = - let open T.Function in - match func_type with - | T.Function {params = (_, { Params.params = plist; rest = _ }); - return = _; - tparams = _} -> - T.Function.Param.((plist |> List.hd |> snd).annot) - | _ -> failwith "This has to a function type" in - - (* parameter *) - let param = self#choose 1 (fun () -> self#require_expr env) in - let param_expr, param_type = match param with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (match param_type with + (* get the type of the parameter assuming we only have one param *) + let f_ptype = + T.Function.( + match func_type with + | T.Function + { params = (_, { Params.params = plist; rest = _ }); return = _; tparams = _ } -> + T.Function.Param.((plist |> List.hd |> snd).annot) + | _ -> failwith "This has to a function type") + in + (* parameter *) + let param = self#choose 1 (fun () -> self#require_expr env) in + let (param_expr, param_type) = + match param with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false + (match param_type with | T.Function _ -> true | T.Object _ -> true | _ -> false); - self#weak_assert (self#is_subtype param_type (snd f_ptype)); - - let func_call = Syntax.mk_func_call func_expr param_expr in - - let ret_type = T.Function.(match func_type with - | T.Function {params = _; - return = (_, rt); - tparams =_} -> rt - | _ -> failwith "This has to be a function type") in - let new_env = - self#add_binding - env - (Expr ((match func_call with - | Syntax.Expr e -> e - | _ -> failwith "This has to be an expression"), - ret_type)) in - - let new_env = self#add_binding new_env (Type ret_type) in - func_call, new_env - - method! get_all_rules () = - [|self#rule_num_lit; - self#rule_obj_lit 1 0; - self#rule_obj_lit 2 0; - self#rule_funcdef; - self#rule_funcdef; - self#rule_func_call; - self#rule_funcdef; - self#rule_func_call; - |] -end - - -class ruleset_random_func = object - inherit ruleset_func - method! weak_assert b = - if (not b) && ((Random.int 3) > 0) then raise Engine.Backtrack -end + self#weak_assert (self#is_subtype param_type (snd f_ptype)); + + let func_call = Syntax.mk_func_call func_expr param_expr in + let ret_type = + T.Function.( + match func_type with + | T.Function { params = _; return = (_, rt); tparams = _ } -> rt + | _ -> failwith "This has to be a function type") + in + let new_env = + self#add_binding + env + (Expr + ( (match func_call with + | Syntax.Expr e -> e + | _ -> failwith "This has to be an expression"), + ret_type )) + in + let new_env = self#add_binding new_env (Type ret_type) in + (func_call, new_env) + + method! get_all_rules () = + [| + self#rule_num_lit; + self#rule_obj_lit 1 0; + self#rule_obj_lit 2 0; + self#rule_funcdef; + self#rule_funcdef; + self#rule_func_call; + self#rule_funcdef; + self#rule_func_call; + |] + end + +class ruleset_random_func = + object + inherit ruleset_func + + method! weak_assert b = if (not b) && Random.int 3 > 0 then raise Engine.Backtrack + end diff --git a/testgen/ruleset_optional.ml b/testgen/ruleset_optional.ml index e9fe2416ce0..610eedbeaf7 100644 --- a/testgen/ruleset_optional.ml +++ b/testgen/ruleset_optional.ml @@ -1,181 +1,213 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils (* ESSENTIAL: Syntax type and related functions *) -module Syntax = Syntax_base;; -open Ruleset_base;; - -class ruleset_optional = object(self) - inherit Ruleset_base.ruleset_base - - method! get_name () : string = "optional" - - method! weak_assert b = self#backtrack_on_false b - - method! is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = - let get_prop_set (o : (Loc.t, Loc.t) T.Object.t) = - let tbl = Hashtbl.create 1000 in - - (* hash table for storing optional properties *) - let opt_tbl = Hashtbl.create 1000 in - let open T.Object.Property in - List.iter (fun p -> match p with - | T.Object.Property (_, {key = E.Object.Property.Identifier (_, name); - value = Init (_, t); - optional = o; - static = _; - proto = _; - _method = _; - variance = _;}) -> - if o then Hashtbl.add opt_tbl name t - else Hashtbl.add tbl name t - | _ -> ()) T.Object.(o.properties); - tbl, opt_tbl in - let s1, opt1 = get_prop_set o1 in - let s2, opt2 = get_prop_set o2 in - let subtype = ref true in - (* check non optional properties *) - Hashtbl.iter (fun n t -> - if (not (Hashtbl.mem s1 n)) || (not ((Hashtbl.find s1 n) = t)) then - subtype := false) s2; - - (* check optional properties *) - (* This is bad subtyping *) - Hashtbl.iter (fun n t -> - if (((Hashtbl.mem s1 n) && ((Hashtbl.find s1 n) != t)) || - ((Hashtbl.mem opt1 n) && ((Hashtbl.find opt1 n) != t))) - then subtype := false) opt2; - !subtype - - (* rule for variable declaration with initialization *) - method! rule_vardecl (env : env_t) : (Syntax.t * env_t) = - (* get the init expression *) - let init = self#choose 0 (fun () -> self#require_expr env) in - let init_expr, init_type = match init with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* For fast search on depth-subtyping *) - self#backtrack_on_false (match init_expr, init_type with - | E.Object _, T.Object _ -> true +module Syntax = Syntax_base +open Ruleset_base + +class ruleset_optional = + object (self) + inherit Ruleset_base.ruleset_base + + method! get_name () : string = "optional" + + method! weak_assert b = self#backtrack_on_false b + + method! is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = + let get_prop_set (o : (Loc.t, Loc.t) T.Object.t) = + let tbl = Hashtbl.create 1000 in + (* hash table for storing optional properties *) + let opt_tbl = Hashtbl.create 1000 in + T.Object.Property.( + List.iter + (fun p -> + match p with + | T.Object.Property + ( _, + { + key = E.Object.Property.Identifier (_, name); + value = Init (_, t); + optional = o; + static = _; + proto = _; + _method = _; + variance = _; + } ) -> + if o then + Hashtbl.add opt_tbl name t + else + Hashtbl.add tbl name t + | _ -> ()) + T.Object.(o.properties); + (tbl, opt_tbl)) + in + let (s1, opt1) = get_prop_set o1 in + let (s2, opt2) = get_prop_set o2 in + let subtype = ref true in + (* check non optional properties *) + Hashtbl.iter + (fun n t -> + if (not (Hashtbl.mem s1 n)) || not (Hashtbl.find s1 n = t) then subtype := false) + s2; + + (* check optional properties *) + (* This is bad subtyping *) + Hashtbl.iter + (fun n t -> + if + (Hashtbl.mem s1 n && Hashtbl.find s1 n != t) + || (Hashtbl.mem opt1 n && Hashtbl.find opt1 n != t) + then + subtype := false) + opt2; + !subtype + + (* rule for variable declaration with initialization *) + method! rule_vardecl (env : env_t) : Syntax.t * env_t = + (* get the init expression *) + let init = self#choose 0 (fun () -> self#require_expr env) in + let (init_expr, init_type) = + match init with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* For fast search on depth-subtyping *) + self#backtrack_on_false + (match (init_expr, init_type) with + | (E.Object _, T.Object _) -> true | _ -> false); - let vname = Utils.mk_var () in - let var_decl = Syntax.mk_vardecl vname init_expr in - let new_env = - self#add_binding - env - (Expr ((E.Identifier (Loc.none, vname)), init_type)) in - let new_env = self#add_binding new_env (Type init_type) in - var_decl, new_env - - (* Rule for declaring a variable with init and type annotation *) - method! rule_vardecl_with_type (env : env_t) : (Syntax.t * env_t) = - (* require an expression from the environment *) - let rhs = self#choose 0 (fun () -> self#require_expr env) in - let rhs_expr, rhs_type = match rhs with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* For fast search *) - self#backtrack_on_false (match rhs_expr, rhs_type with - | E.Identifier _, _ -> true + let vname = Utils.mk_var () in + let var_decl = Syntax.mk_vardecl vname init_expr in + let new_env = + self#add_binding + env + (Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, vname)), init_type)) + in + let new_env = self#add_binding new_env (Type init_type) in + (var_decl, new_env) + + (* Rule for declaring a variable with init and type annotation *) + method! rule_vardecl_with_type (env : env_t) : Syntax.t * env_t = + (* require an expression from the environment *) + let rhs = self#choose 0 (fun () -> self#require_expr env) in + let (rhs_expr, rhs_type) = + match rhs with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* For fast search *) + self#backtrack_on_false + (match (rhs_expr, rhs_type) with + | (E.Identifier _, _) -> true | _ -> false); - (* require a type from the environment.*) - let vtype = self#choose 1 (fun () -> self#require_type env) in - let vtype = match vtype with - | Type t -> t - | _ -> failwith "This has to a type" in - - (* assert the subtyping relationhips between the rhs and lhs *) - self#weak_assert (self#is_subtype rhs_type vtype); - let vname = Utils.mk_var () in - let var_decl = Syntax.mk_vardecl ~etype:vtype vname rhs_expr in - let new_env = - self#add_binding - env - (Expr ((E.Identifier (Loc.none, vname)), vtype)) in - let new_env = self#add_binding new_env (Type vtype) in - var_decl, new_env - - (* property update rule *) - method! rule_prop_update (env : env_t) : (Syntax.t * env_t) = - (* get an object variable *) - let obj = self#choose 0 (fun () -> self#require_expr env) in - self#backtrack_on_false (match obj with + (* require a type from the environment.*) + let vtype = self#choose 1 (fun () -> self#require_type env) in + let vtype = + match vtype with + | Type t -> t + | _ -> failwith "This has to a type" + in + (* assert the subtyping relationhips between the rhs and lhs *) + self#weak_assert (self#is_subtype rhs_type vtype); + let vname = Utils.mk_var () in + let var_decl = Syntax.mk_vardecl ~etype:vtype vname rhs_expr in + let new_env = + self#add_binding + env + (Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, vname)), vtype)) + in + let new_env = self#add_binding new_env (Type vtype) in + (var_decl, new_env) + + (* property update rule *) + method! rule_prop_update (env : env_t) : Syntax.t * env_t = + (* get an object variable *) + let obj = self#choose 0 (fun () -> self#require_expr env) in + self#backtrack_on_false + (match obj with | Expr (E.Identifier _, T.Object _) -> true | _ -> false); - let oexpr, otype = match obj with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - self#backtrack_on_false (match otype with + let (oexpr, otype) = + match obj with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false + (match otype with | T.Object ot -> T.Object.(List.length ot.properties) = 2 | _ -> true); - let prop = self#choose 1 (fun () -> self#require_prop otype true) in - let pexpr, ptype = match prop with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* get the expression on the rhs of the update *) - let rhs = self#choose 2 (fun () -> self#require_expr env) in - let rhs_expr, rhs_type = match rhs with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (match rhs_expr with - | E.Identifier _ | E.Member _ -> false + let prop = self#choose 1 (fun () -> self#require_prop otype true) in + let (pexpr, ptype) = + match prop with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* get the expression on the rhs of the update *) + let rhs = self#choose 2 (fun () -> self#require_expr env) in + let (rhs_expr, rhs_type) = + match rhs with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false + (match rhs_expr with + | E.Identifier _ + | E.Member _ -> + false | _ -> true); - (* assert that type(rhs) <: type(prop) *) - self#weak_assert (self#is_subtype rhs_type ptype); - - (* produce a write syntax *) - let write = - Syntax.mk_prop_write - (Utils.string_of_expr oexpr) - (Utils.string_of_expr pexpr) - rhs_expr in - - (* update the type of the object *) - let ret_type = - let o_type = match otype with - | T.Object o -> o - | _ -> failwith "Has to be an object type" in - T.Object o_type in - - let new_env = self#add_binding env (Expr (oexpr, ret_type)) in - let new_env = self#add_binding new_env (Type ret_type) in - (write, new_env) - - method! get_all_rules () = - [|self#rule_num_lit; - self#rule_str_lit; - self#rule_obj_lit 1 0; - self#rule_vardecl; - self#rule_obj_type 1 1; - self#rule_vardecl_with_type; - self#rule_prop_update; - self#rule_obj_type 1 1; - self#rule_vardecl_with_type; - self#rule_prop_update; - self#rule_check_optional_prop; - |] -end - -class ruleset_random_optional = object - inherit ruleset_optional - method! weak_assert b = - if (not b) && ((Random.int 3) > 0) then raise Engine.Backtrack -end + (* assert that type(rhs) <: type(prop) *) + self#weak_assert (self#is_subtype rhs_type ptype); + + (* produce a write syntax *) + let write = + Syntax.mk_prop_write (Utils.string_of_expr oexpr) (Utils.string_of_expr pexpr) rhs_expr + in + (* update the type of the object *) + let ret_type = + let o_type = + match otype with + | T.Object o -> o + | _ -> failwith "Has to be an object type" + in + T.Object o_type + in + let new_env = self#add_binding env (Expr (oexpr, ret_type)) in + let new_env = self#add_binding new_env (Type ret_type) in + (write, new_env) + + method! get_all_rules () = + [| + self#rule_num_lit; + self#rule_str_lit; + self#rule_obj_lit 1 0; + self#rule_vardecl; + self#rule_obj_type 1 1; + self#rule_vardecl_with_type; + self#rule_prop_update; + self#rule_obj_type 1 1; + self#rule_vardecl_with_type; + self#rule_prop_update; + self#rule_check_optional_prop; + |] + end + +class ruleset_random_optional = + object + inherit ruleset_optional + + method! weak_assert b = if (not b) && Random.int 3 > 0 then raise Engine.Backtrack + end diff --git a/testgen/ruleset_rtest.ml b/testgen/ruleset_rtest.ml index 89b24eedfc7..bf871505bc3 100644 --- a/testgen/ruleset_rtest.ml +++ b/testgen/ruleset_rtest.ml @@ -1,59 +1,63 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils (* ESSENTIAL: Syntax type and related functions *) -module Syntax = Syntax_base;; -open Ruleset_base;; - -class ruleset_rtest = object(self) - inherit Ruleset_base.ruleset_base - - method! get_name () : string = "rtest" - - method! weak_assert b = - if (not b) && ((Random.int 3) > 0) then raise Engine.Backtrack - - (* Rule for declaring a variable with init and type annotation *) - method! rule_vardecl_with_type (env : env_t) : (Syntax.t * env_t) = - (* require an expression from the environment *) - let rhs = self#choose 0 (fun () -> self#require_expr env) in - let rhs_expr, rhs_type = match rhs with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - - (* require a type from the environment.*) - let vtype = self#choose 1 (fun () -> self#require_type env) in - let vtype = match vtype with - | Type t -> t - | _ -> failwith "This has to a type" in - - (* assert the subtyping relationhips between the rhs and lhs *) - self#weak_assert (self#is_subtype rhs_type vtype); - let vname = Utils.mk_var () in - let var_decl = Syntax.mk_vardecl ~etype:vtype vname rhs_expr in - let new_env = - self#add_binding - env - (Expr ((E.Identifier (Loc.none, vname)), vtype)) in - let new_env = self#add_binding new_env (Type vtype) in - var_decl, new_env - - method! get_all_rules () = - [|self#rule_num_lit; - self#rule_str_lit; - self#rule_obj_lit 2 0; - self#rule_vardecl_with_type; - self#rule_vardecl_with_type; - self#rule_runtime_check; - |] -end +module Syntax = Syntax_base +open Ruleset_base + +class ruleset_rtest = + object (self) + inherit Ruleset_base.ruleset_base + + method! get_name () : string = "rtest" + + method! weak_assert b = if (not b) && Random.int 3 > 0 then raise Engine.Backtrack + + (* Rule for declaring a variable with init and type annotation *) + method! rule_vardecl_with_type (env : env_t) : Syntax.t * env_t = + (* require an expression from the environment *) + let rhs = self#choose 0 (fun () -> self#require_expr env) in + let (rhs_expr, rhs_type) = + match rhs with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + (* require a type from the environment.*) + let vtype = self#choose 1 (fun () -> self#require_type env) in + let vtype = + match vtype with + | Type t -> t + | _ -> failwith "This has to a type" + in + (* assert the subtyping relationhips between the rhs and lhs *) + self#weak_assert (self#is_subtype rhs_type vtype); + let vname = Utils.mk_var () in + let var_decl = Syntax.mk_vardecl ~etype:vtype vname rhs_expr in + let new_env = + self#add_binding + env + (Expr (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, vname)), vtype)) + in + let new_env = self#add_binding new_env (Type vtype) in + (var_decl, new_env) + + method! get_all_rules () = + [| + self#rule_num_lit; + self#rule_str_lit; + self#rule_obj_lit 2 0; + self#rule_vardecl_with_type; + self#rule_vardecl_with_type; + self#rule_runtime_check; + |] + end diff --git a/testgen/ruleset_union.ml b/testgen/ruleset_union.ml index 5e0cdd8a283..dec1a659dea 100644 --- a/testgen/ruleset_union.ml +++ b/testgen/ruleset_union.ml @@ -1,138 +1,156 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils (* ESSENTIAL: Syntax type and related functions *) -module Syntax = Syntax_base;; -open Ruleset_base;; +module Syntax = Syntax_base +open Ruleset_base -class ruleset_union = object(self) - inherit Ruleset_base.ruleset_base +class ruleset_union = + object (self) + inherit Ruleset_base.ruleset_base - method! get_name () : string = "union" + method! get_name () : string = "union" - method! weak_assert b = self#backtrack_on_false b + method! weak_assert b = self#backtrack_on_false b - (* check t1 <: t2 *) - method! is_subtype (t1 : (Loc.t, Loc.t) T.t') (t2 : (Loc.t, Loc.t) T.t') : bool = - match t1, t2 with - | (t, T.Union ((_, tu1), (_, tu2), tlist)) -> (* t should be one of the branches of Union *) - List.mem t (tu1 :: tu2 :: (List.map snd tlist)) - | T.Object o1, T.Object o2 -> self#is_subtype_obj o1 o2 - | T.Function f1, T.Function f2 -> self#is_subtype_func f1 f2 - | _ when t1 = t2 -> true - | _ -> false + (* check t1 <: t2 *) + method! is_subtype (t1 : (Loc.t, Loc.t) T.t') (t2 : (Loc.t, Loc.t) T.t') : bool = + match (t1, t2) with + | (t, T.Union ((_, tu1), (_, tu2), tlist)) -> + (* t should be one of the branches of Union *) + List.mem t (tu1 :: tu2 :: Core_list.map ~f:snd tlist) + | (T.Object o1, T.Object o2) -> self#is_subtype_obj o1 o2 + | (T.Function f1, T.Function f2) -> self#is_subtype_func f1 f2 + | _ when t1 = t2 -> true + | _ -> false - (* Using a loose form of subtyping from ruleset_depth, so we can allow + (* Using a loose form of subtyping from ruleset_depth, so we can allow passing { p : 3 } to a function that expects { p : number | string }. In general this is unsound, and Flow allows this only in certain situations. *) - method! is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = - let get_prop_set (o : (Loc.t, Loc.t) T.Object.t) = - let tbl = Hashtbl.create 1000 in - let open T.Object.Property in - List.iter (fun p -> match p with - | T.Object.Property (_, {key = E.Object.Property.Identifier (_, name); - value = Init (_, t); - optional = _; - static = _; - proto = _; - _method = _; - variance = _;}) -> Hashtbl.add tbl name t - | _ -> ()) T.Object.(o.properties); - tbl in - let s1 = get_prop_set o1 in - let s2 = get_prop_set o2 in - let subtype = ref true in - (* check non optional properties *) - Hashtbl.iter (fun n t -> - (* Shouldn't use call is_subtyping recursivingly. We should + method! is_subtype_obj (o1 : (Loc.t, Loc.t) T.Object.t) (o2 : (Loc.t, Loc.t) T.Object.t) = + let get_prop_set (o : (Loc.t, Loc.t) T.Object.t) = + let tbl = Hashtbl.create 1000 in + T.Object.Property.( + List.iter + (fun p -> + match p with + | T.Object.Property + ( _, + { + key = E.Object.Property.Identifier (_, name); + value = Init (_, t); + optional = _; + static = _; + proto = _; + _method = _; + variance = _; + } ) -> + Hashtbl.add tbl name t + | _ -> ()) + T.Object.(o.properties); + tbl) + in + let s1 = get_prop_set o1 in + let s2 = get_prop_set o2 in + let subtype = ref true in + (* check non optional properties *) + Hashtbl.iter + (fun n t -> + (* Shouldn't use call is_subtyping recursivingly. We should use equality to limit depth subtyping *) - if (not (Hashtbl.mem s1 n)) || (not (self#is_subtype (Hashtbl.find s1 n) t)) then - subtype := false) s2; - !subtype - - method! rule_func_call (env : env_t) : (Syntax.t * env_t) = - (* require a function from the environment.*) - let func = self#choose 0 (fun () -> self#require_expr env) in - let func_expr, func_type = match func with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (match func_type with + if (not (Hashtbl.mem s1 n)) || not (self#is_subtype (Hashtbl.find s1 n) t) then + subtype := false) + s2; + !subtype + + method! rule_func_call (env : env_t) : Syntax.t * env_t = + (* require a function from the environment.*) + let func = self#choose 0 (fun () -> self#require_expr env) in + let (func_expr, func_type) = + match func with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false + (match func_type with | T.Function _ -> true | _ -> false); - (* get the type of the parameter assuming we only have one param *) - let f_ptype = - match func_type with - | T.Function ft -> - let ft_param = T.Function.(ft.params) |> snd in - let params = T.Function.Params.(ft_param.params) |> List.hd |> snd in - T.Function.Param.(params.annot) - | _ -> failwith "This has to a function type" in - - (* parameter *) - let param = self#choose 1 (fun () -> self#require_expr env) in - let param_expr, param_type = match param with - | Expr (e, t) -> e, t - | _ -> failwith "This has to be an expression" in - self#backtrack_on_false (match param_expr with + (* get the type of the parameter assuming we only have one param *) + let f_ptype = + match func_type with + | T.Function ft -> + let ft_param = T.Function.(ft.params) |> snd in + let params = T.Function.Params.(ft_param.params) |> List.hd |> snd in + T.Function.Param.(params.annot) + | _ -> failwith "This has to a function type" + in + (* parameter *) + let param = self#choose 1 (fun () -> self#require_expr env) in + let (param_expr, param_type) = + match param with + | Expr (e, t) -> (e, t) + | _ -> failwith "This has to be an expression" + in + self#backtrack_on_false + (match param_expr with | E.Identifier _ -> true | _ -> false); - self#weak_assert (self#is_subtype param_type (snd f_ptype)); - - let func_call = Syntax.mk_func_call func_expr param_expr in - - let ret_type = T.Function.(match func_type with - | T.Function {params = _; - return = (_, rt); - tparams =_} -> rt - | _ -> failwith "This has to be a function type") in - let new_env = - self#add_binding - env - (Expr ((match func_call with - | Syntax.Expr e -> e - | _ -> failwith "This has to be an expression"), - ret_type)) in - - let new_env = self#add_binding new_env (Type ret_type) in - func_call, new_env - - method! rule_obj_type (prop_num : int) (opt_num : int) (env : env_t) : (Syntax.t * env_t) = - let ret_type = self#gen_obj_type - prop_num - opt_num - env - ~cons:(fun elt -> - (match elt with - | Type (T.Union _) -> true - | _ -> false)) in - let new_env = - self#add_binding env (Type ret_type) in - Syntax.Empty, new_env - - method! get_all_rules () = - [|self#rule_num_lit; - self#rule_str_lit; - self#rule_obj_lit 1 0; - self#rule_vardecl; - self#rule_union_type 2; - self#rule_obj_type 1 0; - self#rule_func_mutate; - self#rule_func_call; - self#rule_runtime_check; - (* + self#weak_assert (self#is_subtype param_type (snd f_ptype)); + + let func_call = Syntax.mk_func_call func_expr param_expr in + let ret_type = + T.Function.( + match func_type with + | T.Function { params = _; return = (_, rt); tparams = _ } -> rt + | _ -> failwith "This has to be a function type") + in + let new_env = + self#add_binding + env + (Expr + ( (match func_call with + | Syntax.Expr e -> e + | _ -> failwith "This has to be an expression"), + ret_type )) + in + let new_env = self#add_binding new_env (Type ret_type) in + (func_call, new_env) + + method! rule_obj_type (prop_num : int) (opt_num : int) (env : env_t) : Syntax.t * env_t = + let ret_type = + self#gen_obj_type prop_num opt_num env ~cons:(fun elt -> + match elt with + | Type (T.Union _) -> true + | _ -> false) + in + let new_env = self#add_binding env (Type ret_type) in + (Syntax.Empty, new_env) + + method! get_all_rules () = + [| + self#rule_num_lit; + self#rule_str_lit; + self#rule_obj_lit 1 0; + self#rule_vardecl; + self#rule_union_type 2; + self#rule_obj_type 1 0; + self#rule_func_mutate; + self#rule_func_call; + self#rule_runtime_check + (* self#rule_vardecl_with_type; (*make it challenging*) self#rule_prop_update; self#rule_vardecl_with_type; @@ -140,12 +158,13 @@ class ruleset_union = object(self) self#rule_func_mutate; self#rule_func_call; self#rule_prop_read; - *) + *); |] -end + end + +class ruleset_random_union = + object + inherit ruleset_union -class ruleset_random_union = object - inherit ruleset_union - method! weak_assert b = - if (not b) && ((Random.int 5) > 0) then raise Engine.Backtrack -end + method! weak_assert b = if (not b) && Random.int 5 > 0 then raise Engine.Backtrack + end diff --git a/testgen/syntax_base.ml b/testgen/syntax_base.ml index 6236011ac43..15c7e5f9378 100644 --- a/testgen/syntax_base.ml +++ b/testgen/syntax_base.ml @@ -1,17 +1,16 @@ (** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) module Ast = Flow_ast - -module S = Flow_ast.Statement;; -module E = Flow_ast.Expression;; -module T = Flow_ast.Type;; -module P = Flow_ast.Pattern;; -module Utils = Flowtestgen_utils;; +module S = Flow_ast.Statement +module E = Flow_ast.Expression +module T = Flow_ast.Type +module P = Flow_ast.Pattern +module Utils = Flowtestgen_utils (* ESSENTIAL: Syntax type and related functions *) type t = @@ -29,112 +28,133 @@ let str_of_syntax (s : t) : string = let rec mk_literal_expr (t : (Loc.t, Loc.t) T.t') : (Loc.t, Loc.t) E.t' = match t with | T.Number -> - E.Literal (Ast.Literal.({value = Number 1.1; raw = "1.1"})) + E.Literal + Ast.Literal.{ value = Number 1.1; raw = "1.1"; comments = Flow_ast_utils.mk_comments_opt () } | T.String -> - E.Literal (Ast.Literal.({value = String "foo"; raw = "\"foo\""})) + E.Literal + Ast.Literal. + { value = String "foo"; raw = "\"foo\""; comments = Flow_ast_utils.mk_comments_opt () } | T.Boolean -> - E.Literal (Ast.Literal.({value = Boolean false; raw = "false"})) + E.Literal + Ast.Literal. + { value = Boolean false; raw = "false"; comments = Flow_ast_utils.mk_comments_opt () } | T.Union (t1, t2, rest) -> - let elements = (t1 :: t2 :: rest) - |> List.map snd - |> List.map mk_literal_expr - |> List.map (fun e -> Some (E.Expression (Loc.none, e))) in - E.Array.(E.Array {elements}) + let elements = + t1 :: t2 :: rest + |> Core_list.map ~f:snd + |> Core_list.map ~f:mk_literal_expr + |> Core_list.map ~f:(fun e -> Some (E.Expression (Loc.none, e))) + in + E.Array.(E.Array { elements; comments = Flow_ast_utils.mk_comments_opt () }) | T.Object obj_t -> mk_obj_literal_expr obj_t | T.StringLiteral lit -> let value = Ast.StringLiteral.(lit.value) in let raw = Ast.StringLiteral.(lit.raw) in - E.Literal (Ast.Literal.({value = String value; raw})) + E.Literal + Ast.Literal.{ value = String value; raw; comments = Flow_ast_utils.mk_comments_opt () } | T.NumberLiteral lit -> let value = Ast.NumberLiteral.(lit.value) in let raw = Ast.NumberLiteral.(lit.raw) in - E.Literal (Ast.Literal.({value = Number value; raw})) + E.Literal + Ast.Literal.{ value = Number value; raw; comments = Flow_ast_utils.mk_comments_opt () } | T.BooleanLiteral value -> - let raw = if value then "true" else "false" in - E.Literal (Ast.Literal.({value = Boolean value; raw})) + let raw = + if value then + "true" + else + "false" + in + E.Literal + Ast.Literal.{ value = Boolean value; raw; comments = Flow_ast_utils.mk_comments_opt () } | T.Tuple tlist -> - let elements = List.map (fun (_, tt) -> - let e = mk_literal_expr tt in - Some (E.Expression (Loc.none, e))) tlist in - E.Array.(E.Array {elements}) - | T.Array t -> mk_literal_expr (T.Tuple [t; t; t; t; t;]) + let elements = + Core_list.map + ~f:(fun (_, tt) -> + let e = mk_literal_expr tt in + Some (E.Expression (Loc.none, e))) + tlist + in + E.Array.(E.Array { elements; comments = Flow_ast_utils.mk_comments_opt () }) + | T.Array t -> mk_literal_expr (T.Tuple [t; t; t; t; t]) | _ -> - E.Literal (Ast.Literal.({value = Null; raw = "null"})) + E.Literal + Ast.Literal.{ value = Null; raw = "null"; comments = Flow_ast_utils.mk_comments_opt () } (* Make an object literal based on its type *) and mk_obj_literal_expr (t : (Loc.t, Loc.t) T.Object.t) : (Loc.t, Loc.t) E.t' = let prop_init_list = - List.map (fun p -> - let open T.Object.Property in - match p with - | T.Object.Property (_, {key = k; - value = Init (_, ptype); - optional = o; - static = _; - proto = _; - _method = _; - variance = _}) -> (k, o, mk_literal_expr ptype) - | _ -> failwith "Unsupported property") T.Object.(t.properties) + Core_list.map + ~f:(fun p -> + T.Object.Property.( + match p with + | T.Object.Property + ( _, + { + key = k; + value = Init (_, ptype); + optional = o; + static = _; + proto = _; + _method = _; + variance = _; + } ) -> + (k, o, mk_literal_expr ptype) + | _ -> failwith "Unsupported property")) + T.Object.(t.properties) (* Randomly remove some optional properties *) (* |> List.filter (fun (_, o, _) -> (not o) || Random.bool ()) *) - |> List.map (fun (key, _, expr_t) -> - let open E.Object.Property in - E.Object.Property (Loc.none, Init { - key; - value = Loc.none, expr_t; - shorthand = false - }) - ) + |> Core_list.map ~f:(fun (key, _, expr_t) -> + E.Object.Property.( + E.Object.Property + (Loc.none, Init { key; value = (Loc.none, expr_t); shorthand = false }))) in - E.Object.(E.Object {properties = prop_init_list}) + E.Object.(E.Object { properties = prop_init_list; comments = Flow_ast_utils.mk_comments_opt () }) (* Check the expression is of the given type *) let mk_runtime_check (expr : (Loc.t, Loc.t) E.t') (etype : (Loc.t, Loc.t) T.t') : t = (* Make a variable decalration first *) - let callee = E.Identifier (Loc.none, "assert_type") in + let callee = E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, "assert_type")) in let arguments = - [E.Expression (Loc.none, expr); - E.Expression (Loc.none, (mk_literal_expr etype))] in - let call = let open E.Call in - E.Call {callee = (Loc.none, callee); targs = None; arguments} in - Stmt (S.Expression.(S.Expression {expression = (Loc.none, call); - directive = None})) + [E.Expression (Loc.none, expr); E.Expression (Loc.none, mk_literal_expr etype)] + in + let call = E.Call.(E.Call { callee = (Loc.none, callee); targs = None; arguments }) in + Stmt S.Expression.(S.Expression { expression = (Loc.none, call); directive = None }) (* Check the expression is of the given type *) let mk_check_opt_prop (expr : (Loc.t, Loc.t) E.t') (etype : (Loc.t, Loc.t) T.t') : t = (* Make a variable decalration first *) - let callee = E.Identifier (Loc.none, "check_opt_prop") in - + let callee = E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, "check_opt_prop")) in let rec get_obj (read : (Loc.t, Loc.t) E.t') (acc : (Loc.t, Loc.t) E.t' list) = - let open E.Member in - match read with - | E.Member {_object = (_, obj); - property = _; - computed = _} -> get_obj obj (obj :: acc) - | _ -> List.rev acc in - + E.Member.( + match read with + | E.Member { _object = (_, obj); property = _ } -> get_obj obj (obj :: acc) + | _ -> List.rev acc) + in (* We want to make sure the parent is not undefined *) let parent_array = let elements = - (get_obj expr []) - |> List.map (fun e -> Some (E.Expression (Loc.none, e))) in - E.Array.(E.Array {elements}) in + get_obj expr [] |> Core_list.map ~f:(fun e -> Some (E.Expression (Loc.none, e))) + in + E.Array.(E.Array { elements; comments = Flow_ast_utils.mk_comments_opt () }) + in let arguments = - [E.Expression (Loc.none, parent_array); - E.Expression (Loc.none, expr); - E.Expression (Loc.none, (mk_literal_expr etype))] in - let call = let open E.Call in - E.Call {callee = (Loc.none, callee); targs = None; arguments} in - Stmt (S.Expression.(S.Expression {expression = (Loc.none, call); - directive = None})) + [ + E.Expression (Loc.none, parent_array); + E.Expression (Loc.none, expr); + E.Expression (Loc.none, mk_literal_expr etype); + ] + in + let call = E.Call.(E.Call { callee = (Loc.none, callee); targs = None; arguments }) in + Stmt S.Expression.(S.Expression { expression = (Loc.none, call); directive = None }) (* ESSENTIAL: functions for making syntax *) let mk_expr_stmt (expr : (Loc.t, Loc.t) E.t') : (Loc.t, Loc.t) S.t' = - S.Expression.(S.Expression {expression = (Loc.none, expr); - directive = None}) + S.Expression.(S.Expression { expression = (Loc.none, expr); directive = None }) let mk_ret_stmt (expr : (Loc.t, Loc.t) E.t') : t = - Stmt (S.Return.(S.Return {argument = Some (Loc.none, expr)})) + Stmt + (S.Return + { S.Return.argument = Some (Loc.none, expr); comments = Flow_ast_utils.mk_comments_opt () }) let mk_func_def (fname : string) @@ -142,128 +162,164 @@ let mk_func_def (ptype : (Loc.t, Loc.t) T.t') (body : t list) (rtype : (Loc.t, Loc.t) T.t') : t = - (* Add a runtime check for the parameter *) - let body = body @ (match ptype with - | T.Function _ -> [] - | _ -> [(mk_runtime_check (E.Identifier (Loc.none, pname)) ptype)]) in - let body = - let open S.Block in - let stmt_list = List.fold_left (fun acc s -> match s with - | Stmt st -> (Loc.none, st) :: acc - | Expr e -> (Loc.none, (mk_expr_stmt e)) :: acc - | Empty -> acc) [] body in - {body = stmt_list} in - - let param = let open P.Identifier in - (Loc.none, P.Identifier {name = (Loc.none, pname); - annot = Some (Loc.none, (Loc.none, ptype)); - optional = false}) in - - let func = let open Ast.Function in - {id = Some (Loc.none, fname); - params = (Loc.none, { Params.params = [param]; rest = None }); - body = Ast.Function.BodyBlock (Loc.none, body); - async = false; - generator = false; - predicate = None; - expression = false; - return = Ast.Function.Available (Loc.none, (Loc.none, rtype)); - tparams = None} in + body + @ + match ptype with + | T.Function _ -> [] + | _ -> + [mk_runtime_check (E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, pname))) ptype] + in + let body = + S.Block.( + let stmt_list = + List.fold_left + (fun acc s -> + match s with + | Stmt st -> (Loc.none, st) :: acc + | Expr e -> (Loc.none, mk_expr_stmt e) :: acc + | Empty -> acc) + [] + body + in + { body = stmt_list }) + in + let param = + ( Loc.none, + { + Ast.Function.Param.argument = + ( Loc.none, + P.Identifier + { + P.Identifier.name = Flow_ast_utils.ident_of_source (Loc.none, pname); + annot = T.Available (Loc.none, (Loc.none, ptype)); + optional = false; + } ); + default = None; + } ) + in + let func = + Ast.Function. + { + id = Some (Flow_ast_utils.ident_of_source (Loc.none, fname)); + params = (Loc.none, { Params.params = [param]; rest = None }); + body = Ast.Function.BodyBlock (Loc.none, body); + async = false; + generator = false; + predicate = None; + return = T.Available (Loc.none, (Loc.none, rtype)); + tparams = None; + sig_loc = Loc.none; + } + in Stmt (S.FunctionDeclaration func) let mk_func_call (fid : (Loc.t, Loc.t) E.t') (param : (Loc.t, Loc.t) E.t') : t = - Expr (E.Call.(E.Call {callee = (Loc.none, fid); - targs = None; - arguments = [E.Expression (Loc.none, param)]})) + Expr + E.Call.( + E.Call + { callee = (Loc.none, fid); targs = None; arguments = [E.Expression (Loc.none, param)] }) -let mk_literal (t : (Loc.t, Loc.t) T.t') : t = match t with +let mk_literal (t : (Loc.t, Loc.t) T.t') : t = + match t with | T.Number -> - let lit = Ast.Literal.({value = Number 1.1; raw = "1.1"}) in + let lit = + Ast.Literal.{ value = Number 1.1; raw = "1.1"; comments = Flow_ast_utils.mk_comments_opt () } + in Expr (E.Literal lit) | T.String -> - let lit = Ast.Literal.({value = String "foo"; raw = "\"foo\""}) in + let lit = + Ast.Literal. + { value = String "foo"; raw = "\"foo\""; comments = Flow_ast_utils.mk_comments_opt () } + in Expr (E.Literal lit) | T.Boolean -> - let lit = Ast.Literal.({value = Boolean false; raw = "false"}) in + let lit = + Ast.Literal. + { value = Boolean false; raw = "false"; comments = Flow_ast_utils.mk_comments_opt () } + in Expr (E.Literal lit) | _ -> failwith "Unsupported" -let mk_prop_read - (obj_name : string) - (prop_name : string) : t = - let open E.Member in - Expr (E.Member {_object = (Loc.none, E.Identifier (Loc.none, obj_name)); - property = PropertyIdentifier (Loc.none, prop_name); - computed = false}) +let mk_prop_read (obj_name : string) (prop_name : string) : t = + E.Member.( + Expr + (E.Member + { + _object = (Loc.none, E.Identifier (Flow_ast_utils.ident_of_source (Loc.none, obj_name))); + property = PropertyIdentifier (Flow_ast_utils.ident_of_source (Loc.none, prop_name)); + })) -let mk_prop_write - (oname : string) - (pname : string) - (expr : (Loc.t, Loc.t) E.t') : t = - let read = match mk_prop_read oname pname with +let mk_prop_write (oname : string) (pname : string) (expr : (Loc.t, Loc.t) E.t') : t = + let read = + match mk_prop_read oname pname with | Expr e -> e - | _ -> failwith "This has to be an expression" in + | _ -> failwith "This has to be an expression" + in let left = P.Expression (Loc.none, read) in let right = expr in let assign = - let open E.Assignment in - E.Assignment {operator = Assign; - left = (Loc.none, left); - right = (Loc.none, right)} in + E.Assignment.( + E.Assignment { operator = None; left = (Loc.none, left); right = (Loc.none, right) }) + in Stmt (mk_expr_stmt assign) let mk_vardecl ?etype (vname : string) (expr : (Loc.t, Loc.t) E.t') : t = (* Make an identifier *) - let t = match etype with - | None -> None - | Some t -> Some (Loc.none, (Loc.none, t)) in - - let id = let open P.Identifier in - (Loc.none, P.Identifier - { name = (Loc.none, vname); - annot = t; - optional = false}) in - + let t = + match etype with + | None -> T.Missing Loc.none + | Some t -> T.Available (Loc.none, (Loc.none, t)) + in + let id = + P.Identifier. + ( Loc.none, + P.Identifier + { name = Flow_ast_utils.ident_of_source (Loc.none, vname); annot = t; optional = false } + ) + in (* get the expression and its dependencies *) let init = expr in - (* Make a var declaration *) - let decl = let open S.VariableDeclaration.Declarator in - (Loc.none, {id; init = Some (Loc.none, init)}) in - let var_decl = let open S.VariableDeclaration in - {declarations = [decl]; kind = Var} in - + let decl = S.VariableDeclaration.Declarator.(Loc.none, { id; init = Some (Loc.none, init) }) in + let var_decl = S.VariableDeclaration.{ declarations = [decl]; kind = Var } in Stmt (S.VariableDeclaration var_decl) let mk_obj_lit (plist : (string * ((Loc.t, Loc.t) E.t' * (Loc.t, Loc.t) T.t')) list) : t = - let props = List.map (fun p -> - let pname = fst p in - let expr = fst (snd p) in - let open E.Object.Property in - E.Object.Property (Loc.none, Init { - key = Identifier (Loc.none, pname); - value = Loc.none, expr; - shorthand = false - }) - ) plist in - let open E.Object in - Expr (E.Object {properties = props}) + let props = + Core_list.map + ~f:(fun p -> + let pname = fst p in + let expr = fst (snd p) in + E.Object.Property.( + E.Object.Property + ( Loc.none, + Init + { + key = Identifier (Flow_ast_utils.ident_of_source (Loc.none, pname)); + value = (Loc.none, expr); + shorthand = false; + } ))) + plist + in + E.Object.(Expr (E.Object { properties = props; comments = Flow_ast_utils.mk_comments_opt () })) let combine_syntax (prog : t list) : string = String.concat "" - ((List.filter (fun c -> match c with - | Stmt _ -> true - | Expr (E.Call _) -> true - | _ -> false) prog) - |> (List.map (fun c -> match c with - | Empty -> failwith "This cannot be empty" - | Stmt _ -> c - | Expr e -> - let open S.Expression in - Stmt - (S.Expression {expression = (Loc.none, e); - directive = None}))) - |> List.rev |> (List.map str_of_syntax)) + ( List.filter + (fun c -> + match c with + | Stmt _ -> true + | Expr (E.Call _) -> true + | _ -> false) + prog + |> Core_list.map ~f:(fun c -> + match c with + | Empty -> failwith "This cannot be empty" + | Stmt _ -> c + | Expr e -> + S.Expression.(Stmt (S.Expression { expression = (Loc.none, e); directive = None }))) + |> List.rev + |> Core_list.map ~f:str_of_syntax ) diff --git a/tests/abstract-locations-cycle/.flowconfig b/tests/abstract-locations-cycle/.flowconfig new file mode 100644 index 00000000000..5f7fd4cf99b --- /dev/null +++ b/tests/abstract-locations-cycle/.flowconfig @@ -0,0 +1,6 @@ +[options] +experimental.types_first=true +experimental.abstract_locations=true + +[lints] +all=error diff --git a/tests/abstract-locations-cycle/abstract-locations-cycle.exp b/tests/abstract-locations-cycle/abstract-locations-cycle.exp new file mode 100644 index 00000000000..2829d581f51 --- /dev/null +++ b/tests/abstract-locations-cycle/abstract-locations-cycle.exp @@ -0,0 +1 @@ +Found 0 errors diff --git a/tests/abstract-locations-cycle/cycle1.js b/tests/abstract-locations-cycle/cycle1.js new file mode 100644 index 00000000000..f31e133e987 --- /dev/null +++ b/tests/abstract-locations-cycle/cycle1.js @@ -0,0 +1,10 @@ +// @flow + +import {foo} from './cycle2'; +import type {Bar} from './cycle2'; + +export class Foo {} + +const z: Foo = foo; + +(z: Bar); diff --git a/tests/abstract-locations-cycle/cycle2.js b/tests/abstract-locations-cycle/cycle2.js new file mode 100644 index 00000000000..febf2bd23ba --- /dev/null +++ b/tests/abstract-locations-cycle/cycle2.js @@ -0,0 +1,7 @@ +// @flow + +import {Foo} from './cycle1'; + +export const foo = new Foo(); + +export type Bar = Foo; diff --git a/tests/abstract-locations-ide-services/.flowconfig b/tests/abstract-locations-ide-services/.flowconfig new file mode 100644 index 00000000000..7b3b367b107 --- /dev/null +++ b/tests/abstract-locations-ide-services/.flowconfig @@ -0,0 +1,5 @@ +[options] + +experimental.well_formed_exports=true +experimental.types_first=true +experimental.abstract_locations=true diff --git a/tests/type-at-pos/.testconfig b/tests/abstract-locations-ide-services/.testconfig similarity index 100% rename from tests/type-at-pos/.testconfig rename to tests/abstract-locations-ide-services/.testconfig diff --git a/tests/abstract-locations-ide-services/abstract-locations-ide-services.exp b/tests/abstract-locations-ide-services/abstract-locations-ide-services.exp new file mode 100644 index 00000000000..caf1d2857f2 --- /dev/null +++ b/tests/abstract-locations-ide-services/abstract-locations-ide-services.exp @@ -0,0 +1,252 @@ +Get-def imported class: +{"path":"foo.js","line":3,"endline":3,"start":14,"end":16} +Get-def class member: +{"path":"foo.js","line":4,"endline":4,"start":3,"end":5} +Find-refs class property: +{ + "kind":"symbol-found", + "name":"foo", + "locs":[ + { + "source":"bar.js", + "type":"SourceFile", + "start":{"line":7,"column":5,"offset":71}, + "end":{"line":7,"column":7,"offset":74} + }, + { + "source":"foo.js", + "type":"SourceFile", + "start":{"line":4,"column":3,"offset":31}, + "end":{"line":4,"column":5,"offset":34} + } + ] +} +Autocomplete class property: +{ + "result":[ + { + "name":"foo", + "type":"() => void", + "func_details":{"return_type":"void","params":[]}, + "path":"foo.js", + "line":4, + "endline":4, + "start":3, + "end":16 + } + ] +} +dump types: +[ + { + "type":"class Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":3,"column":9,"offset":18}, + "end":{"line":3,"column":11,"offset":21} + }, + "path":"bar.js", + "line":3, + "endline":3, + "start":9, + "end":11 + }, + { + "type":"Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":5,"column":7,"offset":44}, + "end":{"line":5,"column":9,"offset":47} + }, + "path":"bar.js", + "line":5, + "endline":5, + "start":7, + "end":9 + }, + { + "type":"Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":5,"column":7,"offset":44}, + "end":{"line":5,"column":14,"offset":52} + }, + "path":"bar.js", + "line":5, + "endline":5, + "start":7, + "end":14 + }, + { + "type":"Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":5,"column":12,"offset":49}, + "end":{"line":5,"column":14,"offset":52} + }, + "path":"bar.js", + "line":5, + "endline":5, + "start":12, + "end":14 + }, + { + "type":"class Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":5,"column":12,"offset":49}, + "end":{"line":5,"column":14,"offset":52} + }, + "path":"bar.js", + "line":5, + "endline":5, + "start":12, + "end":14 + }, + { + "type":"Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":5,"column":18,"offset":55}, + "end":{"line":5,"column":26,"offset":64} + }, + "path":"bar.js", + "line":5, + "endline":5, + "start":18, + "end":26 + }, + { + "type":"class Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":5,"column":22,"offset":59}, + "end":{"line":5,"column":24,"offset":62} + }, + "path":"bar.js", + "line":5, + "endline":5, + "start":22, + "end":24 + }, + { + "type":"class Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":5,"column":22,"offset":59}, + "end":{"line":5,"column":24,"offset":62} + }, + "path":"bar.js", + "line":5, + "endline":5, + "start":22, + "end":24 + }, + { + "type":"Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":7,"column":1,"offset":67}, + "end":{"line":7,"column":3,"offset":70} + }, + "path":"bar.js", + "line":7, + "endline":7, + "start":1, + "end":3 + }, + { + "type":"Foo", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":7,"column":1,"offset":67}, + "end":{"line":7,"column":3,"offset":70} + }, + "path":"bar.js", + "line":7, + "endline":7, + "start":1, + "end":3 + }, + { + "type":"() => void", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":7,"column":1,"offset":67}, + "end":{"line":7,"column":7,"offset":74} + }, + "path":"bar.js", + "line":7, + "endline":7, + "start":1, + "end":7 + }, + { + "type":"void", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":7,"column":1,"offset":67}, + "end":{"line":7,"column":9,"offset":76} + }, + "path":"bar.js", + "line":7, + "endline":7, + "start":1, + "end":9 + }, + { + "type":"() => void", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":7,"column":5,"offset":71}, + "end":{"line":7,"column":7,"offset":74} + }, + "path":"bar.js", + "line":7, + "endline":7, + "start":5, + "end":7 + } +] +type-at-pos: +{ + "type":"() => void", + "reasons":[], + "loc":{ + "source":"bar.js", + "type":"SourceFile", + "start":{"line":7,"column":5,"offset":71}, + "end":{"line":7,"column":7,"offset":74} + }, + "path":"bar.js", + "line":7, + "endline":7, + "start":5, + "end":7 +} diff --git a/tests/abstract-locations-ide-services/bar.js b/tests/abstract-locations-ide-services/bar.js new file mode 100644 index 00000000000..f71f882c2d5 --- /dev/null +++ b/tests/abstract-locations-ide-services/bar.js @@ -0,0 +1,7 @@ +// @flow + +import {Foo} from './foo'; + +const foo: Foo = new Foo(); + +foo.foo(); diff --git a/tests/abstract-locations-ide-services/bar.js.txt b/tests/abstract-locations-ide-services/bar.js.txt new file mode 100644 index 00000000000..043b5cb53d8 --- /dev/null +++ b/tests/abstract-locations-ide-services/bar.js.txt @@ -0,0 +1,7 @@ +// @flow + +import {Foo} from './foo'; + +const foo: Foo = new Foo(); + +foo. diff --git a/tests/abstract-locations-ide-services/foo.js b/tests/abstract-locations-ide-services/foo.js new file mode 100644 index 00000000000..779ea82bd77 --- /dev/null +++ b/tests/abstract-locations-ide-services/foo.js @@ -0,0 +1,5 @@ +// @flow + +export class Foo { + foo(): void {} +} diff --git a/tests/abstract-locations-ide-services/test.sh b/tests/abstract-locations-ide-services/test.sh new file mode 100755 index 00000000000..a25da84d4d3 --- /dev/null +++ b/tests/abstract-locations-ide-services/test.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +printf "Get-def imported class:\n" +assert_ok "$FLOW" get-def bar.js 5 23 --strip-root --pretty + +printf "Get-def class member:\n" +assert_ok "$FLOW" get-def bar.js 7 6 --strip-root --pretty + +printf "Find-refs class property:\n" +assert_ok "$FLOW" find-refs --global foo.js 4 4 --strip-root --pretty + +printf "Autocomplete class property:\n" +assert_ok "$FLOW" autocomplete bar.js 7 5 --strip-root --pretty < bar.js.txt + +printf "dump types:\n" +assert_ok "$FLOW" dump-types bar.js --strip-root --pretty + +printf "type-at-pos:\n" +assert_ok "$FLOW" type-at-pos bar.js 7 6 --strip-root --pretty diff --git a/tests/abstract-locations-minimal-merge/.flowconfig b/tests/abstract-locations-minimal-merge/.flowconfig new file mode 100644 index 00000000000..d618201c728 --- /dev/null +++ b/tests/abstract-locations-minimal-merge/.flowconfig @@ -0,0 +1,3 @@ +[options] +experimental.types_first=true +experimental.abstract_locations=true diff --git a/tests/abstract-locations-minimal-merge/.testconfig b/tests/abstract-locations-minimal-merge/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/abstract-locations-minimal-merge/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/abstract-locations-minimal-merge/abstract-locations-minimal-merge.exp b/tests/abstract-locations-minimal-merge/abstract-locations-minimal-merge.exp new file mode 100644 index 00000000000..5996fdf35c1 --- /dev/null +++ b/tests/abstract-locations-minimal-merge/abstract-locations-minimal-merge.exp @@ -0,0 +1,38 @@ +No errors! + +Changing the exported type should produce downstream errors. +We should recheck all files, but should not remerge files that are not recursive sig dependents. +Error ------------------------------------------------------------------------------------------- impl-dependent.js:3:30 + +Cannot return empty string because string [1] is incompatible with number [2]. + + impl-dependent.js:3:30 + 3| function bar(): Foo { return ''; } + ^^ [1] + +References: + impl-dependent.js:3:17 + 3| function bar(): Foo { return ''; } + ^^^ [2] + + +Error -------------------------------------------------------------------------------------------- sig-dependent.js:3:37 + +Cannot return empty string because string [1] is incompatible with number [2]. + + sig-dependent.js:3:37 + 3| export function bar(): Foo { return ''; } + ^^ [1] + +References: + sig-dependent.js:3:24 + 3| export function bar(): Foo { return ''; } + ^^^ [2] + + + +Found 2 errors + +========Skipping stats======== +Merge skipped 1 of 5 modules +Check will skip 1 of 5 files diff --git a/tests/abstract-locations-minimal-merge/foo.js b/tests/abstract-locations-minimal-merge/foo.js new file mode 100644 index 00000000000..33b3f31aba5 --- /dev/null +++ b/tests/abstract-locations-minimal-merge/foo.js @@ -0,0 +1 @@ +export type Foo = string; diff --git a/tests/abstract-locations-minimal-merge/impl-dep-of-sig-dep.js b/tests/abstract-locations-minimal-merge/impl-dep-of-sig-dep.js new file mode 100644 index 00000000000..1b7876059c6 --- /dev/null +++ b/tests/abstract-locations-minimal-merge/impl-dep-of-sig-dep.js @@ -0,0 +1,3 @@ +import {bar} from './sig-dependent'; + +bar(); diff --git a/tests/abstract-locations-minimal-merge/impl-dependent.js b/tests/abstract-locations-minimal-merge/impl-dependent.js new file mode 100644 index 00000000000..9d9459b52ac --- /dev/null +++ b/tests/abstract-locations-minimal-merge/impl-dependent.js @@ -0,0 +1,7 @@ +import type {Foo} from './foo'; + +function bar(): Foo { return ''; } + +export type Bar = string; + +export function f(): string { return ''; } diff --git a/tests/abstract-locations-minimal-merge/sig-dep-of-impl-dep.js b/tests/abstract-locations-minimal-merge/sig-dep-of-impl-dep.js new file mode 100644 index 00000000000..9ae4b976031 --- /dev/null +++ b/tests/abstract-locations-minimal-merge/sig-dep-of-impl-dep.js @@ -0,0 +1,3 @@ +import type {Bar} from './impl-dependent'; + +export function f(): Bar { return ''; } diff --git a/tests/abstract-locations-minimal-merge/sig-dependent.js b/tests/abstract-locations-minimal-merge/sig-dependent.js new file mode 100644 index 00000000000..2b29b737b8f --- /dev/null +++ b/tests/abstract-locations-minimal-merge/sig-dependent.js @@ -0,0 +1,3 @@ +import type {Foo} from './foo'; + +export function bar(): Foo { return ''; } diff --git a/tests/abstract-locations-minimal-merge/test.sh b/tests/abstract-locations-minimal-merge/test.sh new file mode 100644 index 00000000000..f75465584dc --- /dev/null +++ b/tests/abstract-locations-minimal-merge/test.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +assert_ok "$FLOW" status --strip-root + +# We should drive merge based on the sig dependency graph, not the implementation dependency graph. +# Therefore, only `foo.js` and `sig-dependent.js` should be considered for merge. At the time that +# this test is being written, we consider all five files for merge. This results in unnecessary +# work. + +printf "\\nChanging the exported type should produce downstream errors.\\n" +printf "We should recheck all files, but should not remerge files that are not recursive sig " +printf "dependents.\\n" +sed -i -e 's/string/number/' foo.js +assert_ok "$FLOW" force-recheck foo.js +assert_errors "$FLOW" status --strip-root +show_skipping_stats_types_first "$FLOW_LOG_FILE" + +assert_ok "$FLOW" stop diff --git a/tests/abstract-locations-saved-state/.flowconfig b/tests/abstract-locations-saved-state/.flowconfig new file mode 100644 index 00000000000..598f4bb9600 --- /dev/null +++ b/tests/abstract-locations-saved-state/.flowconfig @@ -0,0 +1,8 @@ +[options] + +experimental.well_formed_exports=true +experimental.types_first=true +experimental.abstract_locations=true + +; Since we run all tests with saved state as well, we don't need any specific +; config to exercise saved state with this test. diff --git a/tests/abstract-locations-saved-state/abstract-locations-saved-state.exp b/tests/abstract-locations-saved-state/abstract-locations-saved-state.exp new file mode 100644 index 00000000000..fe0f8cf2a99 --- /dev/null +++ b/tests/abstract-locations-saved-state/abstract-locations-saved-state.exp @@ -0,0 +1,28 @@ +Error ------------------------------------------------------------------------------------------------------ foo.js:3:21 + +Missing type annotation for `x`. + + 3| export function foo(x) { return x; } + ^ + + +Error ------------------------------------------------------------------------------------------------------ foo.js:3:21 + +Failed to build a typed interface for this module. The exports of this module must be annotated with types. Missing type +annotation at array pattern: (`signature-verification-failure`) + + 3| export function foo(x) { return x; } + ^ + + +Error ------------------------------------------------------------------------------------------------------ foo.js:3:23 + +Failed to build a typed interface for this module. The exports of this module must be annotated with types. Missing type +annotation at function return: (`signature-verification-failure`) + + 3| export function foo(x) { return x; } + + + + +Found 3 errors diff --git a/tests/abstract-locations-saved-state/foo.js b/tests/abstract-locations-saved-state/foo.js new file mode 100644 index 00000000000..801c0f9210a --- /dev/null +++ b/tests/abstract-locations-saved-state/foo.js @@ -0,0 +1,3 @@ +// @flow + +export function foo(x) { return x; } diff --git a/tests/abstract-locations-skip-recheck/.flowconfig b/tests/abstract-locations-skip-recheck/.flowconfig new file mode 100644 index 00000000000..14540c0b6bd --- /dev/null +++ b/tests/abstract-locations-skip-recheck/.flowconfig @@ -0,0 +1,7 @@ +[ignore] +/tmp* + +[options] +experimental.types_first=true +experimental.abstract_locations=true +experimental.allow_skip_direct_dependents=true diff --git a/tests/abstract-locations-skip-recheck/.testconfig b/tests/abstract-locations-skip-recheck/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/abstract-locations-skip-recheck/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/abstract-locations-skip-recheck/abstract-locations-skip-recheck.exp b/tests/abstract-locations-skip-recheck/abstract-locations-skip-recheck.exp new file mode 100644 index 00000000000..9a02c4805bc --- /dev/null +++ b/tests/abstract-locations-skip-recheck/abstract-locations-skip-recheck.exp @@ -0,0 +1,113 @@ + +Server should start in types-first mode +Error ----------------------------------------------------------------------------------------------------- test2.js:3:2 + +Cannot cast `require(...)` to string because number [1] is incompatible with string [2]. + + test2.js:3:2 + 3| (require('./test1'): string); + ^^^^^^^^^^^^^^^^^^ + +References: + test1.js:5:18 + 5| module.exports = 0; + ^ [1] + test2.js:3:22 + 3| (require('./test1'): string); + ^^^^^^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test3.js:5:2 + +Cannot cast `0` to string because number [1] is incompatible with string [2]. + + test3.js:5:2 + 5| (0: string); + ^ [1] + +References: + test3.js:5:5 + 5| (0: string); + ^^^^^^ [2] + + + +Found 2 errors + +Adding a line should cause minimal rechecking (but preserve errors) +Error ----------------------------------------------------------------------------------------------------- test2.js:3:2 + +Cannot cast `require(...)` to string because number [1] is incompatible with string [2]. + + test2.js:3:2 + 3| (require('./test1'): string); + ^^^^^^^^^^^^^^^^^^ + +References: + test1.js:6:18 + 6| module.exports = 0; + ^ [1] + test2.js:3:22 + 3| (require('./test1'): string); + ^^^^^^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test3.js:5:2 + +Cannot cast `0` to string because number [1] is incompatible with string [2]. + + test3.js:5:2 + 5| (0: string); + ^ [1] + +References: + test3.js:5:5 + 5| (0: string); + ^^^^^^ [2] + + + +Found 2 errors + +========Skipping stats======== +Merge skipped 2 of 3 modules +Check will skip 2 of 3 files + +Adding a comment should cause minimal rechecking (but preserve errors) +Error ----------------------------------------------------------------------------------------------------- test2.js:3:2 + +Cannot cast `require(...)` to string because number [1] is incompatible with string [2]. + + test2.js:3:2 + 3| (require('./test1'): string); + ^^^^^^^^^^^^^^^^^^ + +References: + test1.js:6:18 + 6| module.exports = 0; + ^ [1] + test2.js:3:22 + 3| (require('./test1'): string); + ^^^^^^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test3.js:5:2 + +Cannot cast `0` to string because number [1] is incompatible with string [2]. + + test3.js:5:2 + 5| (0: string); + ^ [1] + +References: + test3.js:5:5 + 5| (0: string); + ^^^^^^ [2] + + + +Found 2 errors + +========Skipping stats======== +Merge skipped 2 of 3 modules +Check will skip 2 of 3 files diff --git a/tests/abstract-locations-skip-recheck/test.sh b/tests/abstract-locations-skip-recheck/test.sh new file mode 100644 index 00000000000..854f9305228 --- /dev/null +++ b/tests/abstract-locations-skip-recheck/test.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +mkdir tmp +cp test1.js tmp/ + +printf "\\nServer should start in types-first mode\\n" +start_flow . +assert_errors "$FLOW" status --strip-root + +printf "\\nAdding a line should cause minimal rechecking (but preserve errors)\\n" +cp tmp1/test1.js test1.js +assert_ok "$FLOW" force-recheck --focus test1.js +assert_errors "$FLOW" status --strip-root +show_skipping_stats_types_first "$FLOW_LOG_FILE" + +printf "\\nAdding a comment should cause minimal rechecking (but preserve errors)\\n" +cp tmp2/test1.js test1.js +assert_ok "$FLOW" force-recheck --focus test1.js +assert_errors "$FLOW" status --strip-root +show_skipping_stats_types_first "$FLOW_LOG_FILE" + +assert_ok "$FLOW" stop + +cp tmp/test1.js test1.js +rm -rf tmp diff --git a/tests/abstract-locations-skip-recheck/test1.js b/tests/abstract-locations-skip-recheck/test1.js new file mode 100644 index 00000000000..df6f0124243 --- /dev/null +++ b/tests/abstract-locations-skip-recheck/test1.js @@ -0,0 +1,5 @@ +// @flow + +export type T = string; + +module.exports = 0; diff --git a/tests/abstract-locations-skip-recheck/test2.js b/tests/abstract-locations-skip-recheck/test2.js new file mode 100644 index 00000000000..f260d69f12f --- /dev/null +++ b/tests/abstract-locations-skip-recheck/test2.js @@ -0,0 +1,5 @@ +// @flow + +(require('./test1'): string); +import type { T } from './test1'; +export type S = T; diff --git a/tests/abstract-locations-skip-recheck/test3.js b/tests/abstract-locations-skip-recheck/test3.js new file mode 100644 index 00000000000..37f308270f2 --- /dev/null +++ b/tests/abstract-locations-skip-recheck/test3.js @@ -0,0 +1,7 @@ +// @flow + +import type { S } from './test2'; + +(0: string); + +export type R = S; diff --git a/tests/abstract-locations-skip-recheck/tmp1/test1.js b/tests/abstract-locations-skip-recheck/tmp1/test1.js new file mode 100644 index 00000000000..76e71cf8a99 --- /dev/null +++ b/tests/abstract-locations-skip-recheck/tmp1/test1.js @@ -0,0 +1,6 @@ +// @flow + +export type T = string; + + +module.exports = 0; diff --git a/tests/abstract-locations-skip-recheck/tmp2/test1.js b/tests/abstract-locations-skip-recheck/tmp2/test1.js new file mode 100644 index 00000000000..3e8be60d701 --- /dev/null +++ b/tests/abstract-locations-skip-recheck/tmp2/test1.js @@ -0,0 +1,6 @@ +// @flow + +export type T = string; // + + +module.exports = 0; diff --git a/tests/abstract_locations_object_deopt/.flowconfig b/tests/abstract_locations_object_deopt/.flowconfig new file mode 100644 index 00000000000..a951ff372b7 --- /dev/null +++ b/tests/abstract_locations_object_deopt/.flowconfig @@ -0,0 +1,15 @@ +[ignore] + +[libs] + +[options] + +experimental.well_formed_exports=true +experimental.types_first=true +experimental.abstract_locations=true + +[lints] + +[strict] + +[version] diff --git a/tests/abstract_locations_object_deopt/.testconfig b/tests/abstract_locations_object_deopt/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/abstract_locations_object_deopt/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/abstract_locations_object_deopt/a.js b/tests/abstract_locations_object_deopt/a.js new file mode 100644 index 00000000000..9569208c04a --- /dev/null +++ b/tests/abstract_locations_object_deopt/a.js @@ -0,0 +1,19 @@ +// @flow + +const bar = require('./b.js'); + +type Foo = {}; + +// Checked naively, we will compute two different object types for Bar in the +// merge and check phases of Flow, and so will not hit the objT -> objT fast +// path when checking. This regression test ensures that whatever scheme we +// use to assign ids to object property maps assigns Bar the same id in both +// merge and check, rather than computing a new one unnecessarily each time. + +export type Bar = {| + bar: () => { + foo?: Foo => Foo, + }, +|}; + +(bar: Bar); diff --git a/tests/abstract_locations_object_deopt/abstract_locations_object_deopt.exp b/tests/abstract_locations_object_deopt/abstract_locations_object_deopt.exp new file mode 100644 index 00000000000..6be85edd1d0 --- /dev/null +++ b/tests/abstract_locations_object_deopt/abstract_locations_object_deopt.exp @@ -0,0 +1,4 @@ + +Server should start in verbose mode +No errors! +ObjT ~> ObjT fast path: yes diff --git a/tests/abstract_locations_object_deopt/b.js b/tests/abstract_locations_object_deopt/b.js new file mode 100644 index 00000000000..a83f6459ffa --- /dev/null +++ b/tests/abstract_locations_object_deopt/b.js @@ -0,0 +1,7 @@ +// @flow + +import type {Bar} from './a.js'; + +declare var bar: Bar; + +module.exports = bar; diff --git a/tests/abstract_locations_object_deopt/test.sh b/tests/abstract_locations_object_deopt/test.sh new file mode 100644 index 00000000000..a0de5b6d63f --- /dev/null +++ b/tests/abstract_locations_object_deopt/test.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +show_obj_obj_fast_path() { + grep "ObjT ~> ObjT fast path" $1 | tail -n 1 +} + +printf "\\nServer should start in verbose mode\\n" +start_flow . --verbose +assert_ok "$FLOW" status --strip-root +show_obj_obj_fast_path "$FLOW_LOG_FILE" diff --git a/tests/any_propagation/.flowconfig b/tests/any_propagation/.flowconfig new file mode 100644 index 00000000000..de38d19537d --- /dev/null +++ b/tests/any_propagation/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false diff --git a/tests/any_propagation/any_propagation.exp b/tests/any_propagation/any_propagation.exp new file mode 100644 index 00000000000..bca2b91a792 --- /dev/null +++ b/tests/any_propagation/any_propagation.exp @@ -0,0 +1,48 @@ +Error --------------------------------------------------------------------------------------------------- arrays.js:8:14 + +Cannot call `noop` with `new_arr` bound to `arr` because null or undefined [1] is incompatible with string [2] in array +element of array element. + + arrays.js:8:14 + 8| noop(new_arr); + ^^^^^^^ + +References: + arrays.js:4:31 + 4| declare var arr : Array>; + ^^^^^^^ [1] + arrays.js:8:6 + 8| noop(new_arr); + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------ function.js:10:10 + +Cannot assign `42` to `o[x]` because number [1] is incompatible with string [2]. + + function.js:10:10 + 10| o[x] = 42; + ^^ [1] + +References: + function.js:2:27 + 2| declare var o: {[string]: string}; + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------ function.js:16:10 + +Cannot assign `42` to `o[x]` because number [1] is incompatible with string [2]. + + function.js:16:10 + 16| o[x] = 42; + ^^ [1] + +References: + function.js:2:27 + 2| declare var o: {[string]: string}; + ^^^^^^ [2] + + + +Found 3 errors diff --git a/tests/any_propagation/arrays.js b/tests/any_propagation/arrays.js new file mode 100644 index 00000000000..fa3f693fb0b --- /dev/null +++ b/tests/any_propagation/arrays.js @@ -0,0 +1,8 @@ +//@flow + +declare var noop : (arr: Array>) => void; +declare var arr : Array>; +let new_arr = []; +arr.forEach(x => new_arr.push(x)); +new_arr = new_arr.filter(Boolean); +noop(new_arr); diff --git a/tests/any_propagation/function.js b/tests/any_propagation/function.js new file mode 100644 index 00000000000..9b16af2c7ba --- /dev/null +++ b/tests/any_propagation/function.js @@ -0,0 +1,18 @@ +// @flow +declare var o: {[string]: string}; +function f1(x) { + // no errors, f1 is never called so `x` is never constrained + o[x] = 42; +}; + +function f2(x) { + // error, cast below constrains `x`, so we see the set-elem error + o[x] = 42; +}; +(f2: (string) => void); + +function f3(x) { + // error, case to any also constrains `x`, so we see the set-elem error + o[x] = 42; +}; +(f3: any); diff --git a/tests/arith/arith.exp b/tests/arith/arith.exp index ada61fb7e06..d3431b182fb 100644 --- a/tests/arith/arith.exp +++ b/tests/arith/arith.exp @@ -120,9 +120,9 @@ Cannot add `"foo"` and `undefined` because undefined [1] is incompatible with st ^^^^^^^^^^^^^^^^^ References: - /core.js:13:24 - 13| declare var undefined: void; - ^^^^ [1] + Arith.js:60:13 + 60| str("foo" + undefined); // error + ^^^^^^^^^ [1] Arith.js:60:5 60| str("foo" + undefined); // error ^^^^^ [2] @@ -137,9 +137,9 @@ Cannot add `undefined` and `"foo"` because undefined [1] is incompatible with st ^^^^^^^^^^^^^^^^^ References: - /core.js:13:24 - 13| declare var undefined: void; - ^^^^ [1] + Arith.js:61:5 + 61| str(undefined + "foo"); // error + ^^^^^^^^^ [1] Arith.js:61:17 61| str(undefined + "foo"); // error ^^^^^ [2] @@ -411,7 +411,9 @@ References: Error -------------------------------------------------------------------------------------------------- generic.js:4:40 -Cannot add `a` and `b` because `A` [1] could either behave like a string or like a number. +Cannot add `a` and `b` because: + - `A` [1] could either behave like a string or like a number. + - `B` [2] could either behave like a string or like a number. generic.js:4:40 4| function f(a: A, b: B): A {return a + b; } // error @@ -421,11 +423,16 @@ References: generic.js:4:20 4| function f(a: A, b: B): A {return a + b; } // error ^ [1] + generic.js:4:26 + 4| function f(a: A, b: B): A {return a + b; } // error + ^ [2] Error -------------------------------------------------------------------------------------------------- generic.js:5:40 -Cannot add `b` and `a` because `B` [1] could either behave like a string or like a number. +Cannot add `b` and `a` because: + - `B` [1] could either behave like a string or like a number. + - `A` [2] could either behave like a string or like a number. generic.js:5:40 5| function f(a: A, b: B): A {return b + a; } // error @@ -435,11 +442,16 @@ References: generic.js:5:26 5| function f(a: A, b: B): A {return b + a; } // error ^ [1] + generic.js:5:20 + 5| function f(a: A, b: B): A {return b + a; } // error + ^ [2] Error -------------------------------------------------------------------------------------------------- generic.js:6:40 -Cannot add `a` and `b` because `A` [1] could either behave like a string or like a number. +Cannot add `a` and `b` because: + - `A` [1] could either behave like a string or like a number. + - `B` [2] could either behave like a string or like a number. generic.js:6:40 6| function f(a: A, b: B): B {return a + b; } // error @@ -449,11 +461,16 @@ References: generic.js:6:20 6| function f(a: A, b: B): B {return a + b; } // error ^ [1] + generic.js:6:26 + 6| function f(a: A, b: B): B {return a + b; } // error + ^ [2] Error -------------------------------------------------------------------------------------------------- generic.js:7:40 -Cannot add `b` and `a` because `B` [1] could either behave like a string or like a number. +Cannot add `b` and `a` because: + - `B` [1] could either behave like a string or like a number. + - `A` [2] could either behave like a string or like a number. generic.js:7:40 7| function f(a: A, b: B): B {return b + a; } // error @@ -463,6 +480,9 @@ References: generic.js:7:26 7| function f(a: A, b: B): B {return b + a; } // error ^ [1] + generic.js:7:20 + 7| function f(a: A, b: B): B {return b + a; } // error + ^ [2] Error ------------------------------------------------------------------------------------------------------ mult.js:5:5 @@ -658,12 +678,9 @@ Cannot compare undefined [1] to null [2]. relational.js:18:2 18| (undefined < null); // error - ^^^^^^^^^ + ^^^^^^^^^ [1] References: - /core.js:13:24 - 13| declare var undefined: void; - ^^^^ [1] relational.js:18:14 18| (undefined < null); // error ^^^^ [2] @@ -678,24 +695,24 @@ Cannot compare null [1] to undefined [2]. ^^^^ [1] References: - /core.js:13:24 - 13| declare var undefined: void; - ^^^^ [2] + relational.js:19:9 + 19| (null < undefined); // error + ^^^^^^^^^ [2] Error ----------------------------------------------------------------------------------------------- relational.js:20:2 -Cannot compare undefined [1] to undefined [1]. +Cannot compare undefined [1] to undefined [2]. relational.js:20:2 20| (undefined < undefined); // error - ^^^^^^^^^ + ^^^^^^^^^ [1] References: - /core.js:13:24 - 13| declare var undefined: void; - ^^^^ [1] + relational.js:20:14 + 20| (undefined < undefined); // error + ^^^^^^^^^ [2] -Found 48 errors +Found 52 errors diff --git a/tests/arraylib/array_lib.js b/tests/arraylib/array_lib.js index c04f301f999..381b1933e34 100644 --- a/tests/arraylib/array_lib.js +++ b/tests/arraylib/array_lib.js @@ -62,3 +62,54 @@ function of_test() { var upcastOkay: Array = Array.of("hello", "world"); var incompatibleTypeNotOkay: Array = Array.of(1, 2); } + +function flatMap_test() { + /* Adapted from the following source: + * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/flatMap + */ + function case1() { + let arr1 = [1, 2, 3, 4]; + + let arr2 = arr1.map(x => [x * 2]); // [[2], [4], [6], [8]] + + let arr3: Array = arr1.flatMap(x => [x * 2]); // [2, 4, 6, 8] + + // only one level is flattened + let arr4: Array> = arr1.flatMap(x => [[x * 2]]); // [[2], [4], [6], [8]] + } + function case2() { + let arr1 = ["it's Sunny in", "", "California"]; + + let arr2 = arr1.map(x => x.split(" ")); + // [["it's","Sunny","in"],[""],["California"]] + + let arr3: Array = arr1.flatMap(x => x.split(" ")); + // ["it's","Sunny","in", "", "California"] + } + function case3() { + // Let's say we want to remove all the negative numbers and split the odd numbers into an even number and a 1 + let arr1 = [5, 4, -3, 20, 17, -33, -4, 18]; + // |\ \ x | | \ x x | + // [4,1, 4, 20, 16, 1, 18] + + let arr2: Array = arr1.flatMap(n => + n < 0 ? [] : n % 2 == 0 ? [n] : [n - 1, 1] + ); + + // expected output: [4, 1, 4, 20, 16, 1, 18] + } + function case4() { + let arr1 = [5, 2, 3, 4]; + let arr2: Array = arr1.flatMap(n => (n < 0 ? [1, 2, 3] : "ok")); + + let arr3: $ReadOnlyArray = [5, 2, 3, 4]; + let arr4: Array = arr3.flatMap(n => (n < 0 ? [1, 2, 3] : "ok")); + } + function case5() { + let arr1: $ReadOnlyArray = [5, 2, 3, 4]; + let arr2: Array = arr1.flatMap(n => { + const r: $ReadOnlyArray = [1, 2, 3]; + return r; + }); + } +} diff --git a/tests/arraylib/issue_5182.js b/tests/arraylib/issue_5182.js new file mode 100644 index 00000000000..3169bad0d60 --- /dev/null +++ b/tests/arraylib/issue_5182.js @@ -0,0 +1,26 @@ +// @flow + +type ReturnType = {| + a: string +|}; + +// works +const a = [].reduce( + (p: ReturnType, e: string): ReturnType => { + // annotate return type + return { a: "" }; + }, + { a: "" } +); + +// works +const b = [].reduce( + (p: ReturnType, e: string) => { + // without annotation + return { a: "" }; + }, + { a: "" } +); + +(a: ReturnType); // ok +(b: ReturnType); // ok diff --git a/tests/arraylib/issue_6656.js b/tests/arraylib/issue_6656.js new file mode 100644 index 00000000000..bdf679742a3 --- /dev/null +++ b/tests/arraylib/issue_6656.js @@ -0,0 +1,4 @@ +// @flow + +const a = ["a", "b"].reduce(acc => acc * 2, 1.0); // works +const b = ["a", "b"].reduce(acc => acc * 2, (1.0: any)); // works diff --git a/tests/arraylib/issue_7680.js b/tests/arraylib/issue_7680.js new file mode 100644 index 00000000000..e3aeeb961a4 --- /dev/null +++ b/tests/arraylib/issue_7680.js @@ -0,0 +1,21 @@ +// @flow + +function workingGenericReduce( + obj: { array: Array }, + accumulator: V, + mapper: (V, T) => V +): V { + return obj.array.reduce(mapper, accumulator); +} + +function brokenGenericReduce( + obj: { array: Array }, + accumulator: V, + mapper: (V, T) => V +): V { + return obj.array.reduce( + // works + (a, v) => mapper(a, v), + accumulator + ); +} diff --git a/tests/arraylib/issue_7901.js b/tests/arraylib/issue_7901.js new file mode 100644 index 00000000000..1c7f9bac4d2 --- /dev/null +++ b/tests/arraylib/issue_7901.js @@ -0,0 +1,4 @@ +// @flow + +var und1: void = [0].reduce(acc => acc, undefined); // ok +var und2: void = [0].reduceRight(acc => acc, undefined); // ok diff --git a/tests/arrays/arrays.exp b/tests/arrays/arrays.exp index 960f1ba9fcf..eca5fd19204 100644 --- a/tests/arrays/arrays.exp +++ b/tests/arrays/arrays.exp @@ -15,22 +15,98 @@ References: ^^^^^^ [2] +Error -------------------------------------------------------------------------------------------------- isarray.js:4:31 + +Cannot assign `array` to `problem` because read-only array type [1] is incompatible with array type [2]. + + isarray.js:4:31 + 4| const problem: Array = array; // error + ^^^^^ + +References: + isarray.js:2:24 + 2| function hmm(array: Z) { + ^ [1] + isarray.js:4:20 + 4| const problem: Array = array; // error + ^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------- isarray.js:11:31 + +Cannot assign `array` to `problem` because read-only array type [1] is incompatible with array type [2]. + + isarray.js:11:31 + 11| const problem: Array = array; // error + ^^^^^ + +References: + isarray.js:9:26 + 9| function hmm(array: Z): Z { + ^ [1] + isarray.js:11:20 + 11| const problem: Array = array; // error + ^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------- isarray.js:24:35 + +Cannot assign `array` to `problem` because read-only array type [1] is incompatible with array type [2]. + + isarray.js:24:35 + 24| const problem: Array = array; // error + ^^^^^ + +References: + isarray.js:22:21 + 22| function hmm(array: mixed) { + ^^^^^ [1] + isarray.js:24:20 + 24| const problem: Array = array; // error + ^^^^^^^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------- numeric_elem.js:6:1 + +Cannot assign `0` to `arr[day]` because `Date` [1] is not an array index. + + numeric_elem.js:6:1 + 6| arr[day] = 0; + ^^^^^^^^ + +References: + numeric_elem.js:2:11 + 2| var day = new Date; + ^^^^^^^^ [1] + + Error ---------------------------------------------------------------------------------------------- numeric_elem.js:7:2 -Cannot cast `arr[day]` to string because number [1] is incompatible with string [2]. +Cannot get `arr[day]` because `Date` [1] is not an array index. numeric_elem.js:7:2 7| (arr[day]: string); // error: number ~> string ^^^^^^^^ References: - numeric_elem.js:6:12 - 6| arr[day] = 0; - ^ [1] - numeric_elem.js:7:12 - 7| (arr[day]: string); // error: number ~> string - ^^^^^^ [2] + numeric_elem.js:2:11 + 2| var day = new Date; + ^^^^^^^^ [1] + + +Error -------------------------------------------------------------------------------------------------- spread.js:64:22 + +Cannot assign array literal to `t2s` because array literal [1] has an arity of 3 but tuple type [2] has an arity of 4. + + spread.js:64:22 + 64| let t2s: [1,2,3,4] = [...[t, t, t]]; // error tuple is only 3 long + ^^^^^^^^^^^^^^ [1] + +References: + spread.js:64:10 + 64| let t2s: [1,2,3,4] = [...[t, t, t]]; // error tuple is only 3 long + ^^^^^^^^^ [2] -Found 2 errors +Found 7 errors diff --git a/tests/arrays/isarray.js b/tests/arrays/isarray.js new file mode 100644 index 00000000000..e29025da50a --- /dev/null +++ b/tests/arrays/isarray.js @@ -0,0 +1,27 @@ +// @flow +function hmm(array: Z) { + if (Array.isArray(array)) { + const problem: Array = array; // error + } +} + +function coerce(t: T): U { + function hmm(array: Z): Z { + if (!Array.isArray(array)) throw new Error("Unreachable."); + const problem: Array = array; // error + if (array.length === 0) throw new Error("Unreachable."); + return problem[0]; + } + const result: Array<{ value: T }> = hmm([{ value: t }]); + if (Array.isArray(result)) throw new Error("Unreachable."); + return ((result: empty).value: U); +} +const twelve: number = coerce("twelve"); +twelve.toFixed(); + +function hmm(array: mixed) { + if (Array.isArray(array)) { + const problem: Array = array; // error + problem[1] = 0; + } +} diff --git a/tests/arrays/predicates.js b/tests/arrays/predicates.js new file mode 100644 index 00000000000..fc9decf578c --- /dev/null +++ b/tests/arrays/predicates.js @@ -0,0 +1,4 @@ +// @flow +declare function foo(x : number) : boolean; + +[1,2,3].filter(foo); diff --git a/tests/arrays/spread.js b/tests/arrays/spread.js new file mode 100644 index 00000000000..c46b7f797f3 --- /dev/null +++ b/tests/arrays/spread.js @@ -0,0 +1,72 @@ +// @flow + +declare function foo, TReturn>( + implementation?: (...args: TArguments) => TReturn, + ): (...args: TArguments) => TReturn + +type Config = {| + +control: () => {foo: 'a'}, + +areEqual: (control: Return, test: Return) => boolean, +|}; + +declare function bar(config: Config<{foo: 'a'}>) : void + +bar({ + control: () => ({foo: 'a'}), + areEqual : foo((control, test) => control.foo === test.foo), +}); + +let x = [3, 4]; +let y = [3, "a"]; +let z = ["a"]; + +([...x][0] : 3); +([...x][1] : 4); +([...x, ...z][0] : 3); +([...x, ...z][1] : 4); +([...x, ...z][2] : "a"); +([...x, ...y][0] : 3); +([...x, ...y][1] : 4); +([...x, ...y][2] : 3); +([...x, ...y][3] : "a"); +([...x, ...y, ...z][0] : 3); +([...x, ...y, ...z][1] : 4); +([...x, ...y, ...z][2] : 3); +([...x, ...y, ...z][3] : "a"); +([...x, ...y, ...z][4] : "a"); + +([...z, ...z][0] : "a"); +([...z, ...z][1] : "a"); + +([...x, ...[3, 4]][0] : 3); +([...x, ...[3, 4]][1] : 4); + +([...x, ...x][0] : 3); // error +([...x, ...x][1] : 4); // error + +([...x, ...x, ...y][0] : 3); // error +([...x, ...x, ...y][1] : 4); // error +([...x, ...x, 1][0] : 3); // error + +let three = 3; +let a = [three, three]; +let b = [three, 4]; +([...a, ...b][0] : 3); +([...a, ...b][1] : 3); +([...a, ...b][2] : 3); +([...a, ...b][3] : 4); + +declare function makeTvar(): T; + +let t = makeTvar(); +let ts: [1,2,3,4] = [...[t, t, t, t]]; + +let t2s: [1,2,3,4] = [...[t, t, t]]; // error tuple is only 3 long + +function foo(a: [1,2,3], b: [4,5,6]): [1,2,3,4,5,6] { + return [...a, ...b]; +} + +function bar(a: [1,2,3], b: [4,5,6]): [1,2,3,1,2,3] { + return [...a, ...a]; +} diff --git a/tests/arrays/spread_perf.js b/tests/arrays/spread_perf.js new file mode 100644 index 00000000000..5d0d5a2ecfc --- /dev/null +++ b/tests/arrays/spread_perf.js @@ -0,0 +1,12 @@ +// @flow strict + +// this file should not time out + +declare function useState(initialState: S): [S, (S) => void]; + +const [array, update] = useState([]); + +const add1 = x => update([...array, x]); +const add2 = x => update([...array, x]); +update([...array]); +update([...array]); diff --git a/tests/arrows/arrows.exp b/tests/arrows/arrows.exp index 48a86018a83..55f49460b32 100644 --- a/tests/arrows/arrows.exp +++ b/tests/arrows/arrows.exp @@ -45,8 +45,8 @@ References: arrows.js:7:36 7| images = images.sort((a, b) => (a.width - b.width) + ""); ^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /core.js:269:38 - 269| sort(compareFn?: (a: T, b: T) => number): Array; + /core.js:288:38 + 288| sort(compareFn?: (a: T, b: T) => number): Array; ^^^^^^ [2] diff --git a/tests/ast_tokens/ast_tokens.exp b/tests/ast_tokens/ast_tokens.exp index d61077c6838..b9048376020 100644 --- a/tests/ast_tokens/ast_tokens.exp +++ b/tests/ast_tokens/ast_tokens.exp @@ -2,7 +2,7 @@ "errors":[ { "loc":{"source":null,"start":{"line":14,"column":4},"end":{"line":14,"column":7}}, - "message":"Unexpected token var" + "message":"Unexpected token `var`" } ], "tokens":[ diff --git a/tests/async/async.exp b/tests/async/async.exp index 0f0821b1935..f826033462e 100644 --- a/tests/async/async.exp +++ b/tests/async/async.exp @@ -10,8 +10,8 @@ References: async.js:11:30 11| async function f1(): Promise { ^^^^ [2] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [3] @@ -30,8 +30,8 @@ References: async.js:30:48 30| async function f4(p: Promise): Promise { ^^^^ [2] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [3] @@ -100,8 +100,8 @@ References: async2.js:57:13 57| : Promise { // error, number != void ^^^^^^ [1] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [2] @@ -134,8 +134,8 @@ References: async_return_void.js:3:32 3| async function foo1(): Promise { ^^^^^^ [2] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [3] @@ -151,8 +151,8 @@ References: async_return_void.js:7:32 7| async function foo2(): Promise { ^^^^^^ [2] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [3] @@ -171,14 +171,14 @@ References: async_return_void.js:11:32 11| async function foo3(): Promise { ^^^^^^ [2] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [3] Error ---------------------------------------------------------------------------------------- await_not_in_async.js:5:9 -Unexpected number +Unexpected number, expected the end of an expression statement (`;`) 5| await 1; ^ @@ -186,7 +186,7 @@ Unexpected number Error --------------------------------------------------------------------------------------- await_not_in_async2.js:6:9 -Unexpected number +Unexpected number, expected the token `,` 6| f(await 1); ^ @@ -194,7 +194,7 @@ Unexpected number Error --------------------------------------------------------------------------------------- await_not_in_async3.js:6:9 -Unexpected number +Unexpected number, expected the token `,` 6| f(await 1); ^ diff --git a/tests/async_iteration/async_iteration.exp b/tests/async_iteration/async_iteration.exp index 2b766252309..e76bc4bff9b 100644 --- a/tests/async_iteration/async_iteration.exp +++ b/tests/async_iteration/async_iteration.exp @@ -99,9 +99,9 @@ Cannot cast `result.value` to string because: ^^^^^^^^^^^^ References: - /core.js:484:28 - 484| | { done: true, +value?: Return } - ^^^^^^ [1] + /core.js:528:14 + 528| +value?: Return, + ^^^^^^ [1] return.js:20:20 20| (result.value: string); // error: number | void ~> string ^^^^^^ [2] diff --git a/tests/autocomplete/autocomplete.exp b/tests/autocomplete/autocomplete.exp index 2340eac1133..4d60c43ca70 100644 --- a/tests/autocomplete/autocomplete.exp +++ b/tests/autocomplete/autocomplete.exp @@ -1,7 +1,7 @@ -foo_parse_fail.js = hasOwnProperty (prop: any) => boolean -isPrototypeOf (o: any) => boolean +foo_parse_fail.js = hasOwnProperty (prop: mixed) => boolean +isPrototypeOf (o: mixed) => boolean num number -propertyIsEnumerable (prop: any) => boolean +propertyIsEnumerable (prop: mixed) => boolean str string toLocaleString () => string toString () => string @@ -10,23 +10,23 @@ foo.js = { "result":[ { "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":63, - "endline":63, + "line":80, + "endline":80, "start":5, - "end":38 + "end":40 }, { "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, "path":"[LIB] core.js", - "line":64, - "endline":64, + "line":81, + "endline":81, "start":5, - "end":34 + "end":36 }, { "name":"num", @@ -40,13 +40,13 @@ foo.js = { }, { "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":65, - "endline":65, + "line":82, + "endline":82, "start":5, - "end":44 + "end":46 }, { "name":"str", @@ -63,8 +63,8 @@ foo.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":66, - "endline":66, + "line":83, + "endline":83, "start":5, "end":28 }, @@ -73,8 +73,8 @@ foo.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":67, - "endline":67, + "line":84, + "endline":84, "start":5, "end":22 }, @@ -83,8 +83,8 @@ foo.js = { "type":"() => mixed", "func_details":{"return_type":"mixed","params":[]}, "path":"[LIB] core.js", - "line":68, - "endline":68, + "line":85, + "endline":85, "start":5, "end":20 } @@ -112,8 +112,8 @@ str.js = { "type":"() => Iterator", "func_details":{"return_type":"Iterator","params":[]}, "path":"[LIB] core.js", - "line":291, - "endline":291, + "line":321, + "endline":321, "start":5, "end":34 }, @@ -122,8 +122,8 @@ str.js = { "type":"(name: string) => string", "func_details":{"return_type":"string","params":[{"name":"name","type":"string"}]}, "path":"[LIB] core.js", - "line":292, - "endline":292, + "line":322, + "endline":322, "start":5, "end":32 }, @@ -132,8 +132,8 @@ str.js = { "type":"(pos: number) => string", "func_details":{"return_type":"string","params":[{"name":"pos","type":"number"}]}, "path":"[LIB] core.js", - "line":293, - "endline":293, + "line":323, + "endline":323, "start":5, "end":31 }, @@ -142,8 +142,8 @@ str.js = { "type":"(index: number) => number", "func_details":{"return_type":"number","params":[{"name":"index","type":"number"}]}, "path":"[LIB] core.js", - "line":294, - "endline":294, + "line":324, + "endline":324, "start":5, "end":37 }, @@ -152,8 +152,8 @@ str.js = { "type":"(index: number) => number", "func_details":{"return_type":"number","params":[{"name":"index","type":"number"}]}, "path":"[LIB] core.js", - "line":295, - "endline":295, + "line":325, + "endline":325, "start":5, "end":38 }, @@ -165,8 +165,8 @@ str.js = { "params":[{"name":"...strings","type":"Array"}] }, "path":"[LIB] core.js", - "line":296, - "endline":296, + "line":326, + "endline":326, "start":5, "end":45 }, @@ -178,8 +178,8 @@ str.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":298, - "endline":298, + "line":328, + "endline":328, "start":5, "end":62 }, @@ -191,8 +191,8 @@ str.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":299, - "endline":299, + "line":329, + "endline":329, "start":5, "end":62 }, @@ -204,8 +204,8 @@ str.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":300, - "endline":300, + "line":330, + "endline":330, "start":5, "end":60 }, @@ -217,8 +217,8 @@ str.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":301, - "endline":301, + "line":331, + "endline":331, "start":5, "end":64 }, @@ -227,8 +227,8 @@ str.js = { "type":"number", "func_details":null, "path":"[LIB] core.js", - "line":325, - "endline":325, + "line":358, + "endline":358, "start":13, "end":18 }, @@ -237,8 +237,8 @@ str.js = { "type":"(href: string) => string", "func_details":{"return_type":"string","params":[{"name":"href","type":"string"}]}, "path":"[LIB] core.js", - "line":302, - "endline":302, + "line":332, + "endline":332, "start":5, "end":30 }, @@ -254,8 +254,8 @@ str.js = { ] }, "path":"[LIB] core.js", - "line":303, - "endline":303, + "line":333, + "endline":333, "start":5, "end":105 }, @@ -267,18 +267,31 @@ str.js = { "params":[{"name":"regexp","type":"string | RegExp"}] }, "path":"[LIB] core.js", - "line":304, - "endline":304, + "line":334, + "endline":334, "start":5, "end":61 }, + { + "name":"matchAll", + "type":"(regexp: (string | RegExp)) => Iterator", + "func_details":{ + "return_type":"Iterator", + "params":[{"name":"regexp","type":"string | RegExp"}] + }, + "path":"[LIB] core.js", + "line":335, + "endline":335, + "start":5, + "end":67 + }, { "name":"normalize", "type":"(format?: string) => string", "func_details":{"return_type":"string","params":[{"name":"format?","type":"string"}]}, "path":"[LIB] core.js", - "line":305, - "endline":305, + "line":336, + "endline":336, "start":5, "end":38 }, @@ -290,8 +303,8 @@ str.js = { "params":[{"name":"targetLength","type":"number"},{"name":"padString?","type":"string"}] }, "path":"[LIB] core.js", - "line":306, - "endline":306, + "line":337, + "endline":337, "start":5, "end":60 }, @@ -303,8 +316,8 @@ str.js = { "params":[{"name":"targetLength","type":"number"},{"name":"padString?","type":"string"}] }, "path":"[LIB] core.js", - "line":307, - "endline":307, + "line":338, + "endline":338, "start":5, "end":62 }, @@ -313,8 +326,8 @@ str.js = { "type":"(count: number) => string", "func_details":{"return_type":"string","params":[{"name":"count","type":"number"}]}, "path":"[LIB] core.js", - "line":308, - "endline":308, + "line":339, + "endline":339, "start":5, "end":33 }, @@ -332,8 +345,8 @@ str.js = { ] }, "path":"[LIB] core.js", - "line":309, - "endline":309, + "line":340, + "endline":340, "start":5, "end":124 }, @@ -342,8 +355,8 @@ str.js = { "type":"(regexp: (string | RegExp)) => number", "func_details":{"return_type":"number","params":[{"name":"regexp","type":"string | RegExp"}]}, "path":"[LIB] core.js", - "line":310, - "endline":310, + "line":341, + "endline":341, "start":5, "end":43 }, @@ -355,8 +368,8 @@ str.js = { "params":[{"name":"start?","type":"number"},{"name":"end?","type":"number"}] }, "path":"[LIB] core.js", - "line":311, - "endline":311, + "line":342, + "endline":342, "start":5, "end":47 }, @@ -371,8 +384,8 @@ str.js = { ] }, "path":"[LIB] core.js", - "line":312, - "endline":312, + "line":343, + "endline":343, "start":5, "end":69 }, @@ -384,8 +397,8 @@ str.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":313, - "endline":313, + "line":344, + "endline":344, "start":5, "end":64 }, @@ -397,8 +410,8 @@ str.js = { "params":[{"name":"from","type":"number"},{"name":"length?","type":"number"}] }, "path":"[LIB] core.js", - "line":314, - "endline":314, + "line":345, + "endline":345, "start":5, "end":49 }, @@ -410,8 +423,8 @@ str.js = { "params":[{"name":"start","type":"number"},{"name":"end?","type":"number"}] }, "path":"[LIB] core.js", - "line":315, - "endline":315, + "line":346, + "endline":346, "start":5, "end":50 }, @@ -423,8 +436,8 @@ str.js = { "params":[{"name":"locale?","type":"string | Array"}] }, "path":"[LIB] core.js", - "line":316, - "endline":316, + "line":347, + "endline":347, "start":5, "end":62 }, @@ -436,8 +449,8 @@ str.js = { "params":[{"name":"locale?","type":"string | Array"}] }, "path":"[LIB] core.js", - "line":317, - "endline":317, + "line":348, + "endline":348, "start":5, "end":62 }, @@ -446,8 +459,8 @@ str.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":318, - "endline":318, + "line":349, + "endline":349, "start":5, "end":25 }, @@ -456,8 +469,8 @@ str.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":324, - "endline":324, + "line":357, + "endline":357, "start":5, "end":22 }, @@ -466,8 +479,8 @@ str.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":319, - "endline":319, + "line":350, + "endline":350, "start":5, "end":25 }, @@ -476,18 +489,28 @@ str.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":320, - "endline":320, + "line":351, + "endline":351, "start":5, "end":18 }, + { + "name":"trimEnd", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":352, + "endline":352, + "start":5, + "end":21 + }, { "name":"trimLeft", "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":321, - "endline":321, + "line":353, + "endline":353, "start":5, "end":22 }, @@ -496,8 +519,18 @@ str.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":322, - "endline":322, + "line":354, + "endline":354, + "start":5, + "end":23 + }, + { + "name":"trimStart", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":355, + "endline":355, "start":5, "end":23 }, @@ -506,8 +539,8 @@ str.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":323, - "endline":323, + "line":356, + "endline":356, "start":5, "end":21 } @@ -520,8 +553,8 @@ num.js = { "type":"(fractionDigits?: number) => string", "func_details":{"return_type":"string","params":[{"name":"fractionDigits?","type":"number"}]}, "path":"[LIB] core.js", - "line":140, - "endline":140, + "line":160, + "endline":160, "start":5, "end":50 }, @@ -530,8 +563,8 @@ num.js = { "type":"(fractionDigits?: number) => string", "func_details":{"return_type":"string","params":[{"name":"fractionDigits?","type":"number"}]}, "path":"[LIB] core.js", - "line":141, - "endline":141, + "line":161, + "endline":161, "start":5, "end":44 }, @@ -546,8 +579,8 @@ num.js = { ] }, "path":"[LIB] core.js", - "line":142, - "endline":142, + "line":162, + "endline":162, "start":5, "end":96 }, @@ -556,8 +589,8 @@ num.js = { "type":"(precision?: number) => string", "func_details":{"return_type":"string","params":[{"name":"precision?","type":"number"}]}, "path":"[LIB] core.js", - "line":143, - "endline":143, + "line":163, + "endline":163, "start":5, "end":43 }, @@ -566,8 +599,8 @@ num.js = { "type":"(radix?: number) => string", "func_details":{"return_type":"string","params":[{"name":"radix?","type":"number"}]}, "path":"[LIB] core.js", - "line":144, - "endline":144, + "line":164, + "endline":164, "start":5, "end":36 }, @@ -576,8 +609,8 @@ num.js = { "type":"() => number", "func_details":{"return_type":"number","params":[]}, "path":"[LIB] core.js", - "line":145, - "endline":145, + "line":165, + "endline":165, "start":5, "end":21 } @@ -590,8 +623,8 @@ bool.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":120, - "endline":120, + "line":140, + "endline":140, "start":5, "end":22 }, @@ -600,8 +633,8 @@ bool.js = { "type":"() => boolean", "func_details":{"return_type":"boolean","params":[]}, "path":"[LIB] core.js", - "line":119, - "endline":119, + "line":139, + "endline":139, "start":5, "end":22 } @@ -621,248 +654,41 @@ union.js = { }, { "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, - "path":"[LIB] core.js", - "line":63, - "endline":63, - "start":5, - "end":38 - }, - { - "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, - "path":"[LIB] core.js", - "line":64, - "endline":64, - "start":5, - "end":34 - }, - { - "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, - "path":"[LIB] core.js", - "line":65, - "endline":65, - "start":5, - "end":44 - }, - { - "name":"toLocaleString", - "type":"() => string", - "func_details":{"return_type":"string","params":[]}, - "path":"[LIB] core.js", - "line":66, - "endline":66, - "start":5, - "end":28 - }, - { - "name":"toString", - "type":"() => string", - "func_details":{"return_type":"string","params":[]}, - "path":"[LIB] core.js", - "line":67, - "endline":67, - "start":5, - "end":22 - }, - { - "name":"valueOf", - "type":"() => mixed", - "func_details":{"return_type":"mixed","params":[]}, - "path":"[LIB] core.js", - "line":68, - "endline":68, - "start":5, - "end":20 - } - ] -} -object_builtins.js = { - "result":[ - { - "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, - "path":"[LIB] core.js", - "line":63, - "endline":63, - "start":5, - "end":38 - }, - { - "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, - "path":"[LIB] core.js", - "line":64, - "endline":64, - "start":5, - "end":34 - }, - { - "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, - "path":"[LIB] core.js", - "line":65, - "endline":65, - "start":5, - "end":44 - }, - { - "name":"toLocaleString", - "type":"() => string", - "func_details":{"return_type":"string","params":[]}, - "path":"[LIB] core.js", - "line":66, - "endline":66, - "start":5, - "end":28 - }, - { - "name":"toString", - "type":"() => string", - "func_details":{"return_type":"string","params":[]}, - "path":"[LIB] core.js", - "line":67, - "endline":67, - "start":5, - "end":22 - }, - { - "name":"valueOf", - "type":"() => mixed", - "func_details":{"return_type":"mixed","params":[]}, - "path":"[LIB] core.js", - "line":68, - "endline":68, - "start":5, - "end":20 - } - ] -} -function_builtins.js = { - "result":[ - { - "name":"apply", - "type":"(thisArg: any, argArray?: any) => any", - "func_details":{ - "return_type":"any", - "params":[{"name":"thisArg","type":"any"},{"name":"argArray?","type":"any"}] - }, - "path":"[LIB] core.js", - "line":106, - "endline":106, - "start":18, - "end":41 - }, - { - "name":"arguments", - "type":"any", - "func_details":null, - "path":"[LIB] core.js", - "line":110, - "endline":110, - "start":16, - "end":18 - }, - { - "name":"bind", - "type":"(thisArg: any, ...argArray: Array) => any", - "func_details":{ - "return_type":"any", - "params":[{"name":"thisArg","type":"any"},{"name":"...argArray","type":"Array"}] - }, - "path":"[LIB] core.js", - "line":107, - "endline":107, - "start":17, - "end":39 - }, - { - "name":"call", - "type":"(thisArg: any, ...argArray: Array) => any", - "func_details":{ - "return_type":"any", - "params":[{"name":"thisArg","type":"any"},{"name":"...argArray","type":"Array"}] - }, - "path":"[LIB] core.js", - "line":108, - "endline":108, - "start":17, - "end":39 - }, - { - "name":"caller", - "type":"Function | null", - "func_details":null, - "path":"[LIB] core.js", - "line":111, - "endline":111, - "start":13, - "end":27 - }, - { - "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":63, - "endline":63, + "line":80, + "endline":80, "start":5, - "end":38 + "end":40 }, { "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, "path":"[LIB] core.js", - "line":64, - "endline":64, + "line":81, + "endline":81, "start":5, - "end":34 - }, - { - "name":"length", - "type":"number", - "func_details":null, - "path":"[LIB] core.js", - "line":112, - "endline":112, - "start":13, - "end":18 - }, - { - "name":"name", - "type":"string", - "func_details":null, - "path":"[LIB] core.js", - "line":113, - "endline":113, - "start":11, - "end":16 + "end":36 }, { "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":65, - "endline":65, + "line":82, + "endline":82, "start":5, - "end":44 + "end":46 }, { "name":"toLocaleString", "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":66, - "endline":66, + "line":83, + "endline":83, "start":5, "end":28 }, @@ -871,8 +697,8 @@ function_builtins.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":67, - "endline":67, + "line":84, + "endline":84, "start":5, "end":22 }, @@ -881,131 +707,54 @@ function_builtins.js = { "type":"() => mixed", "func_details":{"return_type":"mixed","params":[]}, "path":"[LIB] core.js", - "line":68, - "endline":68, + "line":85, + "endline":85, "start":5, "end":20 } ] } +object_builtins.js = {"error":"not enough type information to autocomplete","result":[]} +function_builtins.js = {"error":"not enough type information to autocomplete","result":[]} fun.js = { "result":[ - { - "name":"apply", - "type":"(thisArg: any, argArray?: any) => any", - "func_details":{ - "return_type":"any", - "params":[{"name":"thisArg","type":"any"},{"name":"argArray?","type":"any"}] - }, - "path":"[LIB] core.js", - "line":106, - "endline":106, - "start":18, - "end":41 - }, - { - "name":"arguments", - "type":"any", - "func_details":null, - "path":"[LIB] core.js", - "line":110, - "endline":110, - "start":16, - "end":18 - }, - { - "name":"bind", - "type":"(thisArg: any, ...argArray: Array) => any", - "func_details":{ - "return_type":"any", - "params":[{"name":"thisArg","type":"any"},{"name":"...argArray","type":"Array"}] - }, - "path":"[LIB] core.js", - "line":107, - "endline":107, - "start":17, - "end":39 - }, - { - "name":"call", - "type":"(thisArg: any, ...argArray: Array) => any", - "func_details":{ - "return_type":"any", - "params":[{"name":"thisArg","type":"any"},{"name":"...argArray","type":"Array"}] - }, - "path":"[LIB] core.js", - "line":108, - "endline":108, - "start":17, - "end":39 - }, - { - "name":"caller", - "type":"Function | null", - "func_details":null, - "path":"[LIB] core.js", - "line":111, - "endline":111, - "start":13, - "end":27 - }, { "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":63, - "endline":63, + "line":80, + "endline":80, "start":5, - "end":38 + "end":40 }, { "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, "path":"[LIB] core.js", - "line":64, - "endline":64, + "line":81, + "endline":81, "start":5, - "end":34 - }, - { - "name":"length", - "type":"number", - "func_details":null, - "path":"[LIB] core.js", - "line":112, - "endline":112, - "start":13, - "end":18 - }, - { - "name":"name", - "type":"string", - "func_details":null, - "path":"[LIB] core.js", - "line":113, - "endline":113, - "start":11, - "end":16 + "end":36 }, { "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":65, - "endline":65, + "line":82, + "endline":82, "start":5, - "end":44 + "end":46 }, { "name":"toLocaleString", "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":66, - "endline":66, + "line":83, + "endline":83, "start":5, "end":28 }, @@ -1014,8 +763,8 @@ fun.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":67, - "endline":67, + "line":84, + "endline":84, "start":5, "end":22 }, @@ -1024,8 +773,8 @@ fun.js = { "type":"() => mixed", "func_details":{"return_type":"mixed","params":[]}, "path":"[LIB] core.js", - "line":68, - "endline":68, + "line":85, + "endline":85, "start":5, "end":20 } @@ -1072,8 +821,8 @@ typeparams.js = { "type":"(fractionDigits?: number) => string", "func_details":{"return_type":"string","params":[{"name":"fractionDigits?","type":"number"}]}, "path":"[LIB] core.js", - "line":140, - "endline":140, + "line":160, + "endline":160, "start":5, "end":50 }, @@ -1082,8 +831,8 @@ typeparams.js = { "type":"(fractionDigits?: number) => string", "func_details":{"return_type":"string","params":[{"name":"fractionDigits?","type":"number"}]}, "path":"[LIB] core.js", - "line":141, - "endline":141, + "line":161, + "endline":161, "start":5, "end":44 }, @@ -1098,8 +847,8 @@ typeparams.js = { ] }, "path":"[LIB] core.js", - "line":142, - "endline":142, + "line":162, + "endline":162, "start":5, "end":96 }, @@ -1108,8 +857,8 @@ typeparams.js = { "type":"(precision?: number) => string", "func_details":{"return_type":"string","params":[{"name":"precision?","type":"number"}]}, "path":"[LIB] core.js", - "line":143, - "endline":143, + "line":163, + "endline":163, "start":5, "end":43 }, @@ -1118,8 +867,8 @@ typeparams.js = { "type":"(radix?: number) => string", "func_details":{"return_type":"string","params":[{"name":"radix?","type":"number"}]}, "path":"[LIB] core.js", - "line":144, - "endline":144, + "line":164, + "endline":164, "start":5, "end":36 }, @@ -1128,8 +877,8 @@ typeparams.js = { "type":"() => number", "func_details":{"return_type":"number","params":[]}, "path":"[LIB] core.js", - "line":145, - "endline":145, + "line":165, + "endline":165, "start":5, "end":21 } @@ -1149,41 +898,41 @@ generics.js = { }, { "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":63, - "endline":63, + "line":80, + "endline":80, "start":5, - "end":38 + "end":40 }, { "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, "path":"[LIB] core.js", - "line":64, - "endline":64, + "line":81, + "endline":81, "start":5, - "end":34 + "end":36 }, { "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":65, - "endline":65, + "line":82, + "endline":82, "start":5, - "end":44 + "end":46 }, { "name":"toLocaleString", "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":66, - "endline":66, + "line":83, + "endline":83, "start":5, "end":28 }, @@ -1192,8 +941,8 @@ generics.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":67, - "endline":67, + "line":84, + "endline":84, "start":5, "end":22 }, @@ -1202,8 +951,8 @@ generics.js = { "type":"() => mixed", "func_details":{"return_type":"mixed","params":[]}, "path":"[LIB] core.js", - "line":68, - "endline":68, + "line":85, + "endline":85, "start":5, "end":20 } @@ -1223,23 +972,23 @@ optional.js = { }, { "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":63, - "endline":63, + "line":80, + "endline":80, "start":5, - "end":38 + "end":40 }, { "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, "path":"[LIB] core.js", - "line":64, - "endline":64, + "line":81, + "endline":81, "start":5, - "end":34 + "end":36 }, { "name":"o", @@ -1253,21 +1002,21 @@ optional.js = { }, { "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":65, - "endline":65, + "line":82, + "endline":82, "start":5, - "end":44 + "end":46 }, { "name":"toLocaleString", "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":66, - "endline":66, + "line":83, + "endline":83, "start":5, "end":28 }, @@ -1276,8 +1025,8 @@ optional.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":67, - "endline":67, + "line":84, + "endline":84, "start":5, "end":22 }, @@ -1286,8 +1035,8 @@ optional.js = { "type":"() => mixed", "func_details":{"return_type":"mixed","params":[]}, "path":"[LIB] core.js", - "line":68, - "endline":68, + "line":85, + "endline":85, "start":5, "end":20 }, @@ -1305,66 +1054,6 @@ optional.js = { } jsx1.js = { "result":[ - { - "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, - "path":"[LIB] core.js", - "line":63, - "endline":63, - "start":5, - "end":38 - }, - { - "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, - "path":"[LIB] core.js", - "line":64, - "endline":64, - "start":5, - "end":34 - }, - { - "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, - "path":"[LIB] core.js", - "line":65, - "endline":65, - "start":5, - "end":44 - }, - { - "name":"toLocaleString", - "type":"() => string", - "func_details":{"return_type":"string","params":[]}, - "path":"[LIB] core.js", - "line":66, - "endline":66, - "start":5, - "end":28 - }, - { - "name":"toString", - "type":"() => string", - "func_details":{"return_type":"string","params":[]}, - "path":"[LIB] core.js", - "line":67, - "endline":67, - "start":5, - "end":22 - }, - { - "name":"valueOf", - "type":"() => mixed", - "func_details":{"return_type":"mixed","params":[]}, - "path":"[LIB] core.js", - "line":68, - "endline":68, - "start":5, - "end":20 - }, { "name":"x", "type":"number", @@ -1379,66 +1068,6 @@ jsx1.js = { } jsx2.js = { "result":[ - { - "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, - "path":"[LIB] core.js", - "line":63, - "endline":63, - "start":5, - "end":38 - }, - { - "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, - "path":"[LIB] core.js", - "line":64, - "endline":64, - "start":5, - "end":34 - }, - { - "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, - "path":"[LIB] core.js", - "line":65, - "endline":65, - "start":5, - "end":44 - }, - { - "name":"toLocaleString", - "type":"() => string", - "func_details":{"return_type":"string","params":[]}, - "path":"[LIB] core.js", - "line":66, - "endline":66, - "start":5, - "end":28 - }, - { - "name":"toString", - "type":"() => string", - "func_details":{"return_type":"string","params":[]}, - "path":"[LIB] core.js", - "line":67, - "endline":67, - "start":5, - "end":22 - }, - { - "name":"valueOf", - "type":"() => mixed", - "func_details":{"return_type":"mixed","params":[]}, - "path":"[LIB] core.js", - "line":68, - "endline":68, - "start":5, - "end":20 - }, { "name":"x", "type":"number", @@ -1461,6 +1090,30 @@ jsx2.js = { } ] } +jsx3.js = { + "result":[ + { + "name":"x", + "type":"number", + "func_details":null, + "path":"jsx3.js", + "line":5, + "endline":5, + "start":29, + "end":34 + }, + { + "name":"y", + "type":"string", + "func_details":null, + "path":"jsx3.js", + "line":5, + "endline":5, + "start":40, + "end":45 + } + ] +} customfun.js = { "result":[ { @@ -1488,7 +1141,7 @@ customfun.js = { }, { "name":"idx", - "type":"(obj: IdxObject, pathCallback: (demaybefiedObj: IdxObject) => IdxResult) => ?IdxResult", + "type":"(obj: IdxObject, pathCallback: (demaybefiedObj: IdxObject) => IdxResult) => ?IdxResult", "func_details":{ "return_type":"?IdxResult", "params":[ @@ -1506,7 +1159,7 @@ customfun.js = { "name":"exports", "type":"{||}", "func_details":null, - "path":"", + "path":"customfun.js", "line":0, "endline":0, "start":1, @@ -1555,8 +1208,8 @@ if.js = { "type":"() => Iterator", "func_details":{"return_type":"Iterator","params":[]}, "path":"[LIB] core.js", - "line":291, - "endline":291, + "line":321, + "endline":321, "start":5, "end":34 }, @@ -1565,8 +1218,8 @@ if.js = { "type":"(name: string) => string", "func_details":{"return_type":"string","params":[{"name":"name","type":"string"}]}, "path":"[LIB] core.js", - "line":292, - "endline":292, + "line":322, + "endline":322, "start":5, "end":32 }, @@ -1575,8 +1228,8 @@ if.js = { "type":"(pos: number) => string", "func_details":{"return_type":"string","params":[{"name":"pos","type":"number"}]}, "path":"[LIB] core.js", - "line":293, - "endline":293, + "line":323, + "endline":323, "start":5, "end":31 }, @@ -1585,8 +1238,8 @@ if.js = { "type":"(index: number) => number", "func_details":{"return_type":"number","params":[{"name":"index","type":"number"}]}, "path":"[LIB] core.js", - "line":294, - "endline":294, + "line":324, + "endline":324, "start":5, "end":37 }, @@ -1595,8 +1248,8 @@ if.js = { "type":"(index: number) => number", "func_details":{"return_type":"number","params":[{"name":"index","type":"number"}]}, "path":"[LIB] core.js", - "line":295, - "endline":295, + "line":325, + "endline":325, "start":5, "end":38 }, @@ -1608,8 +1261,8 @@ if.js = { "params":[{"name":"...strings","type":"Array"}] }, "path":"[LIB] core.js", - "line":296, - "endline":296, + "line":326, + "endline":326, "start":5, "end":45 }, @@ -1621,8 +1274,8 @@ if.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":298, - "endline":298, + "line":328, + "endline":328, "start":5, "end":62 }, @@ -1634,8 +1287,8 @@ if.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":299, - "endline":299, + "line":329, + "endline":329, "start":5, "end":62 }, @@ -1647,8 +1300,8 @@ if.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":300, - "endline":300, + "line":330, + "endline":330, "start":5, "end":60 }, @@ -1660,8 +1313,8 @@ if.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":301, - "endline":301, + "line":331, + "endline":331, "start":5, "end":64 }, @@ -1670,8 +1323,8 @@ if.js = { "type":"number", "func_details":null, "path":"[LIB] core.js", - "line":325, - "endline":325, + "line":358, + "endline":358, "start":13, "end":18 }, @@ -1680,8 +1333,8 @@ if.js = { "type":"(href: string) => string", "func_details":{"return_type":"string","params":[{"name":"href","type":"string"}]}, "path":"[LIB] core.js", - "line":302, - "endline":302, + "line":332, + "endline":332, "start":5, "end":30 }, @@ -1697,8 +1350,8 @@ if.js = { ] }, "path":"[LIB] core.js", - "line":303, - "endline":303, + "line":333, + "endline":333, "start":5, "end":105 }, @@ -1710,18 +1363,31 @@ if.js = { "params":[{"name":"regexp","type":"string | RegExp"}] }, "path":"[LIB] core.js", - "line":304, - "endline":304, + "line":334, + "endline":334, "start":5, "end":61 }, + { + "name":"matchAll", + "type":"(regexp: (string | RegExp)) => Iterator", + "func_details":{ + "return_type":"Iterator", + "params":[{"name":"regexp","type":"string | RegExp"}] + }, + "path":"[LIB] core.js", + "line":335, + "endline":335, + "start":5, + "end":67 + }, { "name":"normalize", "type":"(format?: string) => string", "func_details":{"return_type":"string","params":[{"name":"format?","type":"string"}]}, "path":"[LIB] core.js", - "line":305, - "endline":305, + "line":336, + "endline":336, "start":5, "end":38 }, @@ -1733,8 +1399,8 @@ if.js = { "params":[{"name":"targetLength","type":"number"},{"name":"padString?","type":"string"}] }, "path":"[LIB] core.js", - "line":306, - "endline":306, + "line":337, + "endline":337, "start":5, "end":60 }, @@ -1746,8 +1412,8 @@ if.js = { "params":[{"name":"targetLength","type":"number"},{"name":"padString?","type":"string"}] }, "path":"[LIB] core.js", - "line":307, - "endline":307, + "line":338, + "endline":338, "start":5, "end":62 }, @@ -1756,8 +1422,8 @@ if.js = { "type":"(count: number) => string", "func_details":{"return_type":"string","params":[{"name":"count","type":"number"}]}, "path":"[LIB] core.js", - "line":308, - "endline":308, + "line":339, + "endline":339, "start":5, "end":33 }, @@ -1775,8 +1441,8 @@ if.js = { ] }, "path":"[LIB] core.js", - "line":309, - "endline":309, + "line":340, + "endline":340, "start":5, "end":124 }, @@ -1785,8 +1451,8 @@ if.js = { "type":"(regexp: (string | RegExp)) => number", "func_details":{"return_type":"number","params":[{"name":"regexp","type":"string | RegExp"}]}, "path":"[LIB] core.js", - "line":310, - "endline":310, + "line":341, + "endline":341, "start":5, "end":43 }, @@ -1798,8 +1464,8 @@ if.js = { "params":[{"name":"start?","type":"number"},{"name":"end?","type":"number"}] }, "path":"[LIB] core.js", - "line":311, - "endline":311, + "line":342, + "endline":342, "start":5, "end":47 }, @@ -1814,8 +1480,8 @@ if.js = { ] }, "path":"[LIB] core.js", - "line":312, - "endline":312, + "line":343, + "endline":343, "start":5, "end":69 }, @@ -1827,8 +1493,8 @@ if.js = { "params":[{"name":"searchString","type":"string"},{"name":"position?","type":"number"}] }, "path":"[LIB] core.js", - "line":313, - "endline":313, + "line":344, + "endline":344, "start":5, "end":64 }, @@ -1840,8 +1506,8 @@ if.js = { "params":[{"name":"from","type":"number"},{"name":"length?","type":"number"}] }, "path":"[LIB] core.js", - "line":314, - "endline":314, + "line":345, + "endline":345, "start":5, "end":49 }, @@ -1853,8 +1519,8 @@ if.js = { "params":[{"name":"start","type":"number"},{"name":"end?","type":"number"}] }, "path":"[LIB] core.js", - "line":315, - "endline":315, + "line":346, + "endline":346, "start":5, "end":50 }, @@ -1866,8 +1532,8 @@ if.js = { "params":[{"name":"locale?","type":"string | Array"}] }, "path":"[LIB] core.js", - "line":316, - "endline":316, + "line":347, + "endline":347, "start":5, "end":62 }, @@ -1879,8 +1545,8 @@ if.js = { "params":[{"name":"locale?","type":"string | Array"}] }, "path":"[LIB] core.js", - "line":317, - "endline":317, + "line":348, + "endline":348, "start":5, "end":62 }, @@ -1889,8 +1555,8 @@ if.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":318, - "endline":318, + "line":349, + "endline":349, "start":5, "end":25 }, @@ -1899,8 +1565,8 @@ if.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":324, - "endline":324, + "line":357, + "endline":357, "start":5, "end":22 }, @@ -1909,8 +1575,8 @@ if.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":319, - "endline":319, + "line":350, + "endline":350, "start":5, "end":25 }, @@ -1919,18 +1585,28 @@ if.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":320, - "endline":320, + "line":351, + "endline":351, "start":5, "end":18 }, + { + "name":"trimEnd", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":352, + "endline":352, + "start":5, + "end":21 + }, { "name":"trimLeft", "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":321, - "endline":321, + "line":353, + "endline":353, "start":5, "end":22 }, @@ -1939,8 +1615,18 @@ if.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":322, - "endline":322, + "line":354, + "endline":354, + "start":5, + "end":23 + }, + { + "name":"trimStart", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":355, + "endline":355, "start":5, "end":23 }, @@ -1949,8 +1635,8 @@ if.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":323, - "endline":323, + "line":356, + "endline":356, "start":5, "end":21 } @@ -2004,23 +1690,23 @@ class.js = { }, { "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":63, - "endline":63, + "line":80, + "endline":80, "start":5, - "end":38 + "end":40 }, { "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, "path":"[LIB] core.js", - "line":64, - "endline":64, + "line":81, + "endline":81, "start":5, - "end":34 + "end":36 }, { "name":"name", @@ -2034,21 +1720,21 @@ class.js = { }, { "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":65, - "endline":65, + "line":82, + "endline":82, "start":5, - "end":44 + "end":46 }, { "name":"toLocaleString", "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":66, - "endline":66, + "line":83, + "endline":83, "start":5, "end":28 }, @@ -2057,8 +1743,8 @@ class.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":67, - "endline":67, + "line":84, + "endline":84, "start":5, "end":22 }, @@ -2067,8 +1753,8 @@ class.js = { "type":"() => mixed", "func_details":{"return_type":"mixed","params":[]}, "path":"[LIB] core.js", - "line":68, - "endline":68, + "line":85, + "endline":85, "start":5, "end":20 } @@ -2116,41 +1802,181 @@ idx.js = { }, { "name":"hasOwnProperty", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":63, - "endline":63, + "line":80, + "endline":80, "start":5, - "end":38 + "end":40 }, { "name":"isPrototypeOf", - "type":"(o: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"o","type":"any"}]}, + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, "path":"[LIB] core.js", - "line":64, - "endline":64, + "line":81, + "endline":81, "start":5, - "end":34 + "end":36 }, { "name":"propertyIsEnumerable", - "type":"(prop: any) => boolean", - "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"any"}]}, + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, "path":"[LIB] core.js", - "line":65, - "endline":65, + "line":82, + "endline":82, "start":5, - "end":44 + "end":46 + }, + { + "name":"toLocaleString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":83, + "endline":83, + "start":5, + "end":28 + }, + { + "name":"toString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":84, + "endline":84, + "start":5, + "end":22 + }, + { + "name":"valueOf", + "type":"() => mixed", + "func_details":{"return_type":"mixed","params":[]}, + "path":"[LIB] core.js", + "line":85, + "endline":85, + "start":5, + "end":20 + } + ] +} +generic_alias.js = { + "result":[ + { + "name":"hasOwnProperty", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":80, + "endline":80, + "start":5, + "end":40 + }, + { + "name":"isPrototypeOf", + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":81, + "endline":81, + "start":5, + "end":36 + }, + { + "name":"name", + "type":"string", + "func_details":null, + "path":"generic_alias.js", + "line":5, + "endline":5, + "start":25, + "end":30 + }, + { + "name":"propertyIsEnumerable", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":82, + "endline":82, + "start":5, + "end":46 + }, + { + "name":"toLocaleString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":83, + "endline":83, + "start":5, + "end":28 + }, + { + "name":"toString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":84, + "endline":84, + "start":5, + "end":22 + }, + { + "name":"valueOf", + "type":"() => mixed", + "func_details":{"return_type":"mixed","params":[]}, + "path":"[LIB] core.js", + "line":85, + "endline":85, + "start":5, + "end":20 + } + ] +} +object_literal.js:5:16 = {"result":[]} +object_literal.js:7:17 = {"result":[]} +optional_object.js = { + "result":[ + { + "name":"hasOwnProperty", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":80, + "endline":80, + "start":5, + "end":40 + }, + { + "name":"isPrototypeOf", + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":81, + "endline":81, + "start":5, + "end":36 + }, + { + "name":"propertyIsEnumerable", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":82, + "endline":82, + "start":5, + "end":46 }, { "name":"toLocaleString", "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":66, - "endline":66, + "line":83, + "endline":83, "start":5, "end":28 }, @@ -2159,8 +1985,8 @@ idx.js = { "type":"() => string", "func_details":{"return_type":"string","params":[]}, "path":"[LIB] core.js", - "line":67, - "endline":67, + "line":84, + "endline":84, "start":5, "end":22 }, @@ -2169,10 +1995,1232 @@ idx.js = { "type":"() => mixed", "func_details":{"return_type":"mixed","params":[]}, "path":"[LIB] core.js", - "line":68, - "endline":68, + "line":85, + "endline":85, "start":5, "end":20 + }, + { + "name":"z", + "type":"number", + "func_details":null, + "path":"optional_object.js", + "line":2, + "endline":2, + "start":25, + "end":30 + } + ] +} +indirect_array.js:5:3 = { + "result":[ + { + "name":"@@iterator", + "type":"() => Iterator", + "func_details":{"return_type":"Iterator","params":[]}, + "path":"[LIB] core.js", + "line":218, + "endline":218, + "start":5, + "end":29 + }, + { + "name":"concat", + "type":" | S)>(...items: Array) => Array<(number | S)>", + "func_details":{ + "return_type":"Array<(number | S)>", + "params":[{"name":"...items","type":"Array"}] + }, + "path":"[LIB] core.js", + "line":221, + "endline":221, + "start":5, + "end":79 + }, + { + "name":"copyWithin", + "type":"(target: number, start: number, end?: number) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + {"name":"target","type":"number"}, + {"name":"start","type":"number"}, + {"name":"end?","type":"number"} + ] + }, + "path":"[LIB] core.js", + "line":259, + "endline":259, + "start":5, + "end":64 + }, + { + "name":"entries", + "type":"() => Iterator<[number, number]>", + "func_details":{"return_type":"Iterator<[number, number]>","params":[]}, + "path":"[LIB] core.js", + "line":222, + "endline":222, + "start":5, + "end":36 + }, + { + "name":"every", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => boolean", + "func_details":{ + "return_type":"boolean", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":260, + "endline":260, + "start":5, + "end":98 + }, + { + "name":"fill", + "type":"(value: number, begin?: number, end?: number) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + {"name":"value","type":"number"}, + {"name":"begin?","type":"number"}, + {"name":"end?","type":"number"} + ] + }, + "path":"[LIB] core.js", + "line":261, + "endline":261, + "start":5, + "end":58 + }, + { + "name":"filter", + "type":"((callbackfn: class Boolean) => Array<$NonMaybeType>) & ((callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => Array)", + "func_details":null, + "path":"[LIB] core.js", + "line":262, + "endline":262, + "start":5, + "end":63 + }, + { + "name":"find", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => (number | void)", + "func_details":{ + "return_type":"number | void", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":264, + "endline":264, + "start":5, + "end":98 + }, + { + "name":"findIndex", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => number", + "func_details":{ + "return_type":"number", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":265, + "endline":265, + "start":5, + "end":101 + }, + { + "name":"flatMap", + "type":"(callbackfn: (value: number, index: number, array: Array) => ($ReadOnlyArray | U), thisArg?: any) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => ($ReadOnlyArray | U)" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":268, + "endline":268, + "start":5, + "end":120 + }, + { + "name":"forEach", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => void", + "func_details":{ + "return_type":"void", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":266, + "endline":266, + "start":5, + "end":97 + }, + { + "name":"includes", + "type":"(searchElement: mixed, fromIndex?: number) => boolean", + "func_details":{ + "return_type":"boolean", + "params":[{"name":"searchElement","type":"mixed"},{"name":"fromIndex?","type":"number"}] + }, + "path":"[LIB] core.js", + "line":229, + "endline":229, + "start":5, + "end":63 + }, + { + "name":"indexOf", + "type":"(searchElement: mixed, fromIndex?: number) => number", + "func_details":{ + "return_type":"number", + "params":[{"name":"searchElement","type":"mixed"},{"name":"fromIndex?","type":"number"}] + }, + "path":"[LIB] core.js", + "line":230, + "endline":230, + "start":5, + "end":61 + }, + { + "name":"join", + "type":"(separator?: string) => string", + "func_details":{"return_type":"string","params":[{"name":"separator?","type":"string"}]}, + "path":"[LIB] core.js", + "line":231, + "endline":231, + "start":5, + "end":36 + }, + { + "name":"keys", + "type":"() => Iterator", + "func_details":{"return_type":"Iterator","params":[]}, + "path":"[LIB] core.js", + "line":232, + "endline":232, + "start":5, + "end":28 + }, + { + "name":"lastIndexOf", + "type":"(searchElement: mixed, fromIndex?: number) => number", + "func_details":{ + "return_type":"number", + "params":[{"name":"searchElement","type":"mixed"},{"name":"fromIndex?","type":"number"}] + }, + "path":"[LIB] core.js", + "line":233, + "endline":233, + "start":5, + "end":65 + }, + { + "name":"length", + "type":"number", + "func_details":null, + "path":"[LIB] core.js", + "line":294, + "endline":294, + "start":13, + "end":18 + }, + { + "name":"map", + "type":"(callbackfn: (value: number, index: number, array: Array) => U, thisArg?: any) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => U" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":267, + "endline":267, + "start":5, + "end":96 + }, + { + "name":"pop", + "type":"() => number", + "func_details":{"return_type":"number","params":[]}, + "path":"[LIB] core.js", + "line":269, + "endline":269, + "start":5, + "end":12 + }, + { + "name":"push", + "type":"(...items: Array) => number", + "func_details":{"return_type":"number","params":[{"name":"...items","type":"Array"}]}, + "path":"[LIB] core.js", + "line":270, + "endline":270, + "start":5, + "end":36 + }, + { + "name":"reduce", + "type":"((callbackfn: (previousValue: number, currentValue: number, currentIndex: number, array: Array) => number) => number) & ((callbackfn: (previousValue: U, currentValue: number, currentIndex: number, array: Array) => U, initialValue: U) => U)", + "func_details":null, + "path":"[LIB] core.js", + "line":271, + "endline":273, + "start":5, + "end":8 + }, + { + "name":"reduceRight", + "type":"((callbackfn: (previousValue: number, currentValue: number, currentIndex: number, array: Array) => number) => number) & ((callbackfn: (previousValue: U, currentValue: number, currentIndex: number, array: Array) => U, initialValue: U) => U)", + "func_details":null, + "path":"[LIB] core.js", + "line":278, + "endline":280, + "start":5, + "end":8 + }, + { + "name":"reverse", + "type":"() => Array", + "func_details":{"return_type":"Array","params":[]}, + "path":"[LIB] core.js", + "line":285, + "endline":285, + "start":5, + "end":23 + }, + { + "name":"shift", + "type":"() => number", + "func_details":{"return_type":"number","params":[]}, + "path":"[LIB] core.js", + "line":286, + "endline":286, + "start":5, + "end":14 + }, + { + "name":"slice", + "type":"(start?: number, end?: number) => Array", + "func_details":{ + "return_type":"Array", + "params":[{"name":"start?","type":"number"},{"name":"end?","type":"number"}] + }, + "path":"[LIB] core.js", + "line":251, + "endline":251, + "start":5, + "end":49 + }, + { + "name":"some", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => boolean", + "func_details":{ + "return_type":"boolean", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":287, + "endline":287, + "start":5, + "end":97 + }, + { + "name":"sort", + "type":"(compareFn?: (a: number, b: number) => number) => Array", + "func_details":{ + "return_type":"Array", + "params":[{"name":"compareFn?","type":"(a: number, b: number) => number"}] + }, + "path":"[LIB] core.js", + "line":288, + "endline":288, + "start":5, + "end":54 + }, + { + "name":"splice", + "type":"(start: number, deleteCount?: number, ...items: Array) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + {"name":"start","type":"number"}, + {"name":"deleteCount?","type":"number"}, + {"name":"...items","type":"Array"} + ] + }, + "path":"[LIB] core.js", + "line":289, + "endline":289, + "start":5, + "end":77 + }, + { + "name":"toLocaleString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":219, + "endline":219, + "start":5, + "end":28 + }, + { + "name":"unshift", + "type":"(...items: Array) => number", + "func_details":{"return_type":"number","params":[{"name":"...items","type":"Array"}]}, + "path":"[LIB] core.js", + "line":290, + "endline":290, + "start":5, + "end":39 + }, + { + "name":"values", + "type":"() => Iterator", + "func_details":{"return_type":"Iterator","params":[]}, + "path":"[LIB] core.js", + "line":253, + "endline":253, + "start":5, + "end":25 + } + ] +} +indirect_array.js:10:3 = { + "result":[ + { + "name":"@@iterator", + "type":"() => Iterator", + "func_details":{"return_type":"Iterator","params":[]}, + "path":"[LIB] core.js", + "line":218, + "endline":218, + "start":5, + "end":29 + }, + { + "name":"concat", + "type":" | S)>(...items: Array) => Array<(number | S)>", + "func_details":{ + "return_type":"Array<(number | S)>", + "params":[{"name":"...items","type":"Array"}] + }, + "path":"[LIB] core.js", + "line":221, + "endline":221, + "start":5, + "end":79 + }, + { + "name":"copyWithin", + "type":"(target: number, start: number, end?: number) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + {"name":"target","type":"number"}, + {"name":"start","type":"number"}, + {"name":"end?","type":"number"} + ] + }, + "path":"[LIB] core.js", + "line":259, + "endline":259, + "start":5, + "end":64 + }, + { + "name":"entries", + "type":"() => Iterator<[number, number]>", + "func_details":{"return_type":"Iterator<[number, number]>","params":[]}, + "path":"[LIB] core.js", + "line":222, + "endline":222, + "start":5, + "end":36 + }, + { + "name":"every", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => boolean", + "func_details":{ + "return_type":"boolean", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":260, + "endline":260, + "start":5, + "end":98 + }, + { + "name":"fill", + "type":"(value: number, begin?: number, end?: number) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + {"name":"value","type":"number"}, + {"name":"begin?","type":"number"}, + {"name":"end?","type":"number"} + ] + }, + "path":"[LIB] core.js", + "line":261, + "endline":261, + "start":5, + "end":58 + }, + { + "name":"filter", + "type":"((callbackfn: class Boolean) => Array<$NonMaybeType>) & ((callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => Array)", + "func_details":null, + "path":"[LIB] core.js", + "line":262, + "endline":262, + "start":5, + "end":63 + }, + { + "name":"find", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => (number | void)", + "func_details":{ + "return_type":"number | void", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":264, + "endline":264, + "start":5, + "end":98 + }, + { + "name":"findIndex", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => number", + "func_details":{ + "return_type":"number", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":265, + "endline":265, + "start":5, + "end":101 + }, + { + "name":"flatMap", + "type":"(callbackfn: (value: number, index: number, array: Array) => ($ReadOnlyArray | U), thisArg?: any) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => ($ReadOnlyArray | U)" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":268, + "endline":268, + "start":5, + "end":120 + }, + { + "name":"forEach", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => void", + "func_details":{ + "return_type":"void", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":266, + "endline":266, + "start":5, + "end":97 + }, + { + "name":"includes", + "type":"(searchElement: mixed, fromIndex?: number) => boolean", + "func_details":{ + "return_type":"boolean", + "params":[{"name":"searchElement","type":"mixed"},{"name":"fromIndex?","type":"number"}] + }, + "path":"[LIB] core.js", + "line":229, + "endline":229, + "start":5, + "end":63 + }, + { + "name":"indexOf", + "type":"(searchElement: mixed, fromIndex?: number) => number", + "func_details":{ + "return_type":"number", + "params":[{"name":"searchElement","type":"mixed"},{"name":"fromIndex?","type":"number"}] + }, + "path":"[LIB] core.js", + "line":230, + "endline":230, + "start":5, + "end":61 + }, + { + "name":"join", + "type":"(separator?: string) => string", + "func_details":{"return_type":"string","params":[{"name":"separator?","type":"string"}]}, + "path":"[LIB] core.js", + "line":231, + "endline":231, + "start":5, + "end":36 + }, + { + "name":"keys", + "type":"() => Iterator", + "func_details":{"return_type":"Iterator","params":[]}, + "path":"[LIB] core.js", + "line":232, + "endline":232, + "start":5, + "end":28 + }, + { + "name":"lastIndexOf", + "type":"(searchElement: mixed, fromIndex?: number) => number", + "func_details":{ + "return_type":"number", + "params":[{"name":"searchElement","type":"mixed"},{"name":"fromIndex?","type":"number"}] + }, + "path":"[LIB] core.js", + "line":233, + "endline":233, + "start":5, + "end":65 + }, + { + "name":"length", + "type":"number", + "func_details":null, + "path":"[LIB] core.js", + "line":294, + "endline":294, + "start":13, + "end":18 + }, + { + "name":"map", + "type":"(callbackfn: (value: number, index: number, array: Array) => U, thisArg?: any) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => U" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":267, + "endline":267, + "start":5, + "end":96 + }, + { + "name":"pop", + "type":"() => number", + "func_details":{"return_type":"number","params":[]}, + "path":"[LIB] core.js", + "line":269, + "endline":269, + "start":5, + "end":12 + }, + { + "name":"push", + "type":"(...items: Array) => number", + "func_details":{"return_type":"number","params":[{"name":"...items","type":"Array"}]}, + "path":"[LIB] core.js", + "line":270, + "endline":270, + "start":5, + "end":36 + }, + { + "name":"reduce", + "type":"((callbackfn: (previousValue: number, currentValue: number, currentIndex: number, array: Array) => number) => number) & ((callbackfn: (previousValue: U, currentValue: number, currentIndex: number, array: Array) => U, initialValue: U) => U)", + "func_details":null, + "path":"[LIB] core.js", + "line":271, + "endline":273, + "start":5, + "end":8 + }, + { + "name":"reduceRight", + "type":"((callbackfn: (previousValue: number, currentValue: number, currentIndex: number, array: Array) => number) => number) & ((callbackfn: (previousValue: U, currentValue: number, currentIndex: number, array: Array) => U, initialValue: U) => U)", + "func_details":null, + "path":"[LIB] core.js", + "line":278, + "endline":280, + "start":5, + "end":8 + }, + { + "name":"reverse", + "type":"() => Array", + "func_details":{"return_type":"Array","params":[]}, + "path":"[LIB] core.js", + "line":285, + "endline":285, + "start":5, + "end":23 + }, + { + "name":"shift", + "type":"() => number", + "func_details":{"return_type":"number","params":[]}, + "path":"[LIB] core.js", + "line":286, + "endline":286, + "start":5, + "end":14 + }, + { + "name":"slice", + "type":"(start?: number, end?: number) => Array", + "func_details":{ + "return_type":"Array", + "params":[{"name":"start?","type":"number"},{"name":"end?","type":"number"}] + }, + "path":"[LIB] core.js", + "line":251, + "endline":251, + "start":5, + "end":49 + }, + { + "name":"some", + "type":"(callbackfn: (value: number, index: number, array: Array) => mixed, thisArg?: any) => boolean", + "func_details":{ + "return_type":"boolean", + "params":[ + { + "name":"callbackfn", + "type":"(value: number, index: number, array: Array) => mixed" + }, + {"name":"thisArg?","type":"any"} + ] + }, + "path":"[LIB] core.js", + "line":287, + "endline":287, + "start":5, + "end":97 + }, + { + "name":"sort", + "type":"(compareFn?: (a: number, b: number) => number) => Array", + "func_details":{ + "return_type":"Array", + "params":[{"name":"compareFn?","type":"(a: number, b: number) => number"}] + }, + "path":"[LIB] core.js", + "line":288, + "endline":288, + "start":5, + "end":54 + }, + { + "name":"splice", + "type":"(start: number, deleteCount?: number, ...items: Array) => Array", + "func_details":{ + "return_type":"Array", + "params":[ + {"name":"start","type":"number"}, + {"name":"deleteCount?","type":"number"}, + {"name":"...items","type":"Array"} + ] + }, + "path":"[LIB] core.js", + "line":289, + "endline":289, + "start":5, + "end":77 + }, + { + "name":"toLocaleString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":219, + "endline":219, + "start":5, + "end":28 + }, + { + "name":"unshift", + "type":"(...items: Array) => number", + "func_details":{"return_type":"number","params":[{"name":"...items","type":"Array"}]}, + "path":"[LIB] core.js", + "line":290, + "endline":290, + "start":5, + "end":39 + }, + { + "name":"values", + "type":"() => Iterator", + "func_details":{"return_type":"Iterator","params":[]}, + "path":"[LIB] core.js", + "line":253, + "endline":253, + "start":5, + "end":25 + } + ] +} +infer.js = { + "result":[ + { + "name":"b", + "type":"number", + "func_details":null, + "path":"infer.js", + "line":3, + "endline":3, + "start":26, + "end":27 + }, + { + "name":"hasOwnProperty", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":80, + "endline":80, + "start":5, + "end":40 + }, + { + "name":"isPrototypeOf", + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":81, + "endline":81, + "start":5, + "end":36 + }, + { + "name":"propertyIsEnumerable", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":82, + "endline":82, + "start":5, + "end":46 + }, + { + "name":"toLocaleString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":83, + "endline":83, + "start":5, + "end":28 + }, + { + "name":"toString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":84, + "endline":84, + "start":5, + "end":22 + }, + { + "name":"valueOf", + "type":"() => mixed", + "func_details":{"return_type":"mixed","params":[]}, + "path":"[LIB] core.js", + "line":85, + "endline":85, + "start":5, + "end":20 + } + ] +} +eval_predicate.js = { + "result":[ + { + "name":"a", + "type":"number", + "func_details":null, + "path":"eval_predicate.js", + "line":3, + "endline":3, + "start":23, + "end":28 + }, + { + "name":"hasOwnProperty", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":80, + "endline":80, + "start":5, + "end":40 + }, + { + "name":"isPrototypeOf", + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":81, + "endline":81, + "start":5, + "end":36 + }, + { + "name":"propertyIsEnumerable", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":82, + "endline":82, + "start":5, + "end":46 + }, + { + "name":"toLocaleString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":83, + "endline":83, + "start":5, + "end":28 + }, + { + "name":"toString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":84, + "endline":84, + "start":5, + "end":22 + }, + { + "name":"valueOf", + "type":"() => mixed", + "func_details":{"return_type":"mixed","params":[]}, + "path":"[LIB] core.js", + "line":85, + "endline":85, + "start":5, + "end":20 + } + ] +} +eval_destructor.js = { + "result":[ + { + "name":"a", + "type":"number", + "func_details":null, + "path":"eval_destructor.js", + "line":3, + "endline":3, + "start":24, + "end":29 + }, + { + "name":"hasOwnProperty", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":80, + "endline":80, + "start":5, + "end":40 + }, + { + "name":"isPrototypeOf", + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":81, + "endline":81, + "start":5, + "end":36 + }, + { + "name":"propertyIsEnumerable", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":82, + "endline":82, + "start":5, + "end":46 + }, + { + "name":"toLocaleString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":83, + "endline":83, + "start":5, + "end":28 + }, + { + "name":"toString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":84, + "endline":84, + "start":5, + "end":22 + }, + { + "name":"valueOf", + "type":"() => mixed", + "func_details":{"return_type":"mixed","params":[]}, + "path":"[LIB] core.js", + "line":85, + "endline":85, + "start":5, + "end":20 + } + ] +} +poly.js = { + "result":[ + { + "name":"hasOwnProperty", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":80, + "endline":80, + "start":5, + "end":40 + }, + { + "name":"isPrototypeOf", + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":81, + "endline":81, + "start":5, + "end":36 + }, + { + "name":"propertyIsEnumerable", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":82, + "endline":82, + "start":5, + "end":46 + }, + { + "name":"toLocaleString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":83, + "endline":83, + "start":5, + "end":28 + }, + { + "name":"toString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":84, + "endline":84, + "start":5, + "end":22 + }, + { + "name":"valueOf", + "type":"() => mixed", + "func_details":{"return_type":"mixed","params":[]}, + "path":"[LIB] core.js", + "line":85, + "endline":85, + "start":5, + "end":20 + }, + { + "name":"x", + "type":"number", + "func_details":null, + "path":"poly.js", + "line":4, + "endline":4, + "start":18, + "end":23 + }, + { + "name":"y", + "type":"number", + "func_details":null, + "path":"poly.js", + "line":4, + "endline":4, + "start":18, + "end":23 + } + ] +} +poly_no_args.js = { + "result":[ + { + "name":"hasOwnProperty", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":80, + "endline":80, + "start":5, + "end":40 + }, + { + "name":"isPrototypeOf", + "type":"(o: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"o","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":81, + "endline":81, + "start":5, + "end":36 + }, + { + "name":"propertyIsEnumerable", + "type":"(prop: mixed) => boolean", + "func_details":{"return_type":"boolean","params":[{"name":"prop","type":"mixed"}]}, + "path":"[LIB] core.js", + "line":82, + "endline":82, + "start":5, + "end":46 + }, + { + "name":"toLocaleString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":83, + "endline":83, + "start":5, + "end":28 + }, + { + "name":"toString", + "type":"() => string", + "func_details":{"return_type":"string","params":[]}, + "path":"[LIB] core.js", + "line":84, + "endline":84, + "start":5, + "end":22 + }, + { + "name":"valueOf", + "type":"() => mixed", + "func_details":{"return_type":"mixed","params":[]}, + "path":"[LIB] core.js", + "line":85, + "endline":85, + "start":5, + "end":20 + }, + { + "name":"x", + "type":"number", + "func_details":null, + "path":"poly_no_args.js", + "line":3, + "endline":3, + "start":10, + "end":15 } ] } diff --git a/tests/autocomplete/eval_destructor.js b/tests/autocomplete/eval_destructor.js new file mode 100644 index 00000000000..a942791e6e6 --- /dev/null +++ b/tests/autocomplete/eval_destructor.js @@ -0,0 +1,5 @@ +//@flow + +type t = $ReadOnly<{a: number}>; +declare var x: t; +x. diff --git a/tests/autocomplete/eval_predicate.js b/tests/autocomplete/eval_predicate.js new file mode 100644 index 00000000000..a6009ec8a40 --- /dev/null +++ b/tests/autocomplete/eval_predicate.js @@ -0,0 +1,5 @@ +//@flow + +type s = $Refine<{|a: number|}, $Pred<1>, 1>; +declare var y: s +y. diff --git a/tests/autocomplete/generic_alias.js b/tests/autocomplete/generic_alias.js new file mode 100644 index 00000000000..375384b2e89 --- /dev/null +++ b/tests/autocomplete/generic_alias.js @@ -0,0 +1,7 @@ +/** + * + */ +type Person = { name: A }; +declare var obj: Person; + +obj. diff --git a/tests/autocomplete/indirect_array.js b/tests/autocomplete/indirect_array.js new file mode 100644 index 00000000000..ab95ba7a3b9 --- /dev/null +++ b/tests/autocomplete/indirect_array.js @@ -0,0 +1,10 @@ +//@flow + +declare var y: ?Array; + +y. + +opaque type t: Array = Array; +declare var z: t; + +z. diff --git a/tests/autocomplete/infer.js b/tests/autocomplete/infer.js new file mode 100644 index 00000000000..b945d0b0f21 --- /dev/null +++ b/tests/autocomplete/infer.js @@ -0,0 +1,4 @@ +//@flow + +var x: {a: ?*} = {a: {b: 42}} +x.a. diff --git a/tests/autocomplete/jsx3.js b/tests/autocomplete/jsx3.js new file mode 100644 index 00000000000..eeaab6e561f --- /dev/null +++ b/tests/autocomplete/jsx3.js @@ -0,0 +1,10 @@ +// @flow + +var React = require('react'); + +type Props = $ReadOnly<{ x: number, y: string }>; + +class C extends React.Component { + props: Props; +} + = {x: S, y: T}; +declare var a: T; +a. diff --git a/tests/autocomplete/poly_no_args.js b/tests/autocomplete/poly_no_args.js new file mode 100644 index 00000000000..747569e6752 --- /dev/null +++ b/tests/autocomplete/poly_no_args.js @@ -0,0 +1,5 @@ +//@flow + +type T = {x: S}; +declare var a: T<>; +a. diff --git a/tests/autocomplete/test.sh b/tests/autocomplete/test.sh index df48dda4aa3..4a46db962f5 100755 --- a/tests/autocomplete/test.sh +++ b/tests/autocomplete/test.sh @@ -1,4 +1,6 @@ #!/bin/bash +# shellcheck disable=SC2094 + printf "foo_parse_fail.js = " assert_ok \ "$FLOW" autocomplete --strip-root foo_parse_fail.js 10 17 < foo_parse_fail.js @@ -67,6 +69,10 @@ printf "jsx2.js = " assert_ok \ "$FLOW" autocomplete --strip-root --pretty jsx2.js 8 11 < jsx2.js +printf "jsx3.js = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty jsx3.js 10 4 < jsx3.js + printf "customfun.js = " assert_ok \ "$FLOW" autocomplete --strip-root --pretty customfun.js 6 1 < customfun.js @@ -98,3 +104,47 @@ assert_ok \ printf "idx.js = " assert_ok \ "$FLOW" autocomplete --strip-root --pretty idx.js 12 28 < idx.js + +printf "generic_alias.js = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty generic_alias.js 7 5 < generic_alias.js + +printf "object_literal.js:5:16 = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty object_literal.js 5 16 < object_literal.js + +printf "object_literal.js:7:17 = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty object_literal.js 7 17 < object_literal.js + +printf "optional_object.js = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty optional_object.js 3 5 < optional_object.js + +printf "indirect_array.js:5:3 = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty indirect_array.js 5 3 < indirect_array.js + +printf "indirect_array.js:10:3 = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty indirect_array.js 10 3 < indirect_array.js + +printf "infer.js = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty infer.js 4 5 < infer.js + +printf "eval_predicate.js = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty eval_predicate.js 5 3 < eval_predicate.js + +printf "eval_destructor.js = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty eval_destructor.js 5 3 < eval_destructor.js + +printf "poly.js = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty poly.js 5 3 < poly.js + +printf "poly_no_args.js = " +assert_ok \ + "$FLOW" autocomplete --strip-root --pretty poly_no_args.js 5 3 < poly_no_args.js diff --git a/tests/autofix-array-literal/.flowconfig b/tests/autofix-array-literal/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-array-literal/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-array-literal/.testconfig b/tests/autofix-array-literal/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-array-literal/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-array-literal/a.js b/tests/autofix-array-literal/a.js new file mode 100644 index 00000000000..8be6f3d1f88 --- /dev/null +++ b/tests/autofix-array-literal/a.js @@ -0,0 +1,16 @@ +// @flow + +type Inexact = { data: string; type: string; } +type Exact = {| data: string; type: string; |} + +function foo() { + return [ + {data: "", type: ""}, + {data: "", type: ""}, + ]; +} + +(foo(): Array); +(foo(): Array); + +module.exports = foo; diff --git a/tests/autofix-array-literal/autofix-array-literal.exp b/tests/autofix-array-literal/autofix-array-literal.exp new file mode 100644 index 00000000000..3546764e233 --- /dev/null +++ b/tests/autofix-array-literal/autofix-array-literal.exp @@ -0,0 +1,54 @@ +> insert-type a.js 6 15 --strategy=temporary +> cat a.js +// @flow + +type Inexact = { data: string; type: string; } +type Exact = {| data: string; type: string; |} + +function foo(): $TEMPORARY$array<$TEMPORARY$object<{data: string, type: string}>> { + return [ + {data: "", type: ""}, + {data: "", type: ""}, + ]; +} + +(foo(): Array); +(foo(): Array); + +module.exports = foo; +> insert-type b.js 6 15 --strategy=generalize +> cat b.js +// @flow + +type Inexact = { data: string; type: string; } +type Exact = {| data: string; type: string; |} + +function foo(): Array<{data: string, type: string}> { + return [ + {data: "", type: ""}, + {data: "", type: ""}, + ]; +} + +(foo(): Array); + +module.exports = foo; +> insert-type c.js 6 15 --strategy=specialize +> cat c.js +// @flow + +type Inexact = { data: string; type: string; } +type Exact = {| data: string; type: string; |} + +function foo(): Array<{|data: string, type: string|}> { + return [ + {data: "", type: ""}, + {data: "", type: ""}, + ]; +} + +(foo(): Array); + +module.exports = foo; +> flow status +No errors! diff --git a/tests/autofix-array-literal/b.js b/tests/autofix-array-literal/b.js new file mode 100644 index 00000000000..38051068416 --- /dev/null +++ b/tests/autofix-array-literal/b.js @@ -0,0 +1,15 @@ +// @flow + +type Inexact = { data: string; type: string; } +type Exact = {| data: string; type: string; |} + +function foo() { + return [ + {data: "", type: ""}, + {data: "", type: ""}, + ]; +} + +(foo(): Array); + +module.exports = foo; diff --git a/tests/autofix-array-literal/c.js b/tests/autofix-array-literal/c.js new file mode 100644 index 00000000000..6071ea0548f --- /dev/null +++ b/tests/autofix-array-literal/c.js @@ -0,0 +1,15 @@ +// @flow + +type Inexact = { data: string; type: string; } +type Exact = {| data: string; type: string; |} + +function foo() { + return [ + {data: "", type: ""}, + {data: "", type: ""}, + ]; +} + +(foo(): Array); + +module.exports = foo; diff --git a/tests/autofix-array-literal/test.sh b/tests/autofix-array-literal/test.sh new file mode 100755 index 00000000000..41ddd7cdded --- /dev/null +++ b/tests/autofix-array-literal/test.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +test_file(){ + FILE=$1 + echo "> insert-type" "$@" + assert_ok "$FLOW" autofix insert-type --in-place "$@" + assert_ok "$FLOW" force-recheck "$FILE" + echo "> cat $FILE" + cat "$FILE" +} + +test_file a.js 6 15 --strategy=temporary +test_file b.js 6 15 --strategy=generalize +test_file c.js 6 15 --strategy=specialize + +echo "> flow status" +assert_ok "$FLOW" status --strip-root diff --git a/tests/autofix-boolean-literals/.flowconfig b/tests/autofix-boolean-literals/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-boolean-literals/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-boolean-literals/.testconfig b/tests/autofix-boolean-literals/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-boolean-literals/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-boolean-literals/a.js b/tests/autofix-boolean-literals/a.js new file mode 100644 index 00000000000..eeee964e4ab --- /dev/null +++ b/tests/autofix-boolean-literals/a.js @@ -0,0 +1,5 @@ +// @flow + +declare var cond: boolean; + +module.exports = () => (cond ? true : false); diff --git a/tests/autofix-boolean-literals/autofix-boolean-literals.exp b/tests/autofix-boolean-literals/autofix-boolean-literals.exp new file mode 100644 index 00000000000..55a33f62570 --- /dev/null +++ b/tests/autofix-boolean-literals/autofix-boolean-literals.exp @@ -0,0 +1,32 @@ +> autofix insert-type a.js 5 20 --strategy=generalize +> cat a.js +// @flow + +declare var cond: boolean; + +module.exports = (): boolean => (cond ? true : false); +> autofix exports tmp/a.js +> cat tmp/a.js +// @flow + +declare var cond: boolean; + +module.exports = (): boolean => (cond ? true : false); +> autofix insert-type b.js 5 20 --strategy=specialize +> cat b.js +// @flow + +declare var cond: boolean; + +module.exports = (): boolean => (cond ? true : false); +> autofix exports tmp/b.js +> cat tmp/b.js +// @flow + +declare var cond: boolean; + +module.exports = (): boolean => (cond ? true : false); +> flow status +No errors! +> flow status tmp +No errors! diff --git a/tests/autofix-boolean-literals/b.js b/tests/autofix-boolean-literals/b.js new file mode 100644 index 00000000000..eeee964e4ab --- /dev/null +++ b/tests/autofix-boolean-literals/b.js @@ -0,0 +1,5 @@ +// @flow + +declare var cond: boolean; + +module.exports = () => (cond ? true : false); diff --git a/tests/autofix-boolean-literals/test.sh b/tests/autofix-boolean-literals/test.sh new file mode 100755 index 00000000000..4a326179480 --- /dev/null +++ b/tests/autofix-boolean-literals/test.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +mkdir tmp || rm tmp/* +cp .flowconfig tmp/.flowconfig +start_flow tmp + +test_file () { + local FILE=$1 + cp "$FILE" "tmp/$FILE" + + echo "> autofix insert-type" "$@" + assert_ok "$FLOW" autofix insert-type --in-place "$@" + assert_ok "$FLOW" force-recheck "$FILE" + echo "> cat $FILE" + cat "$FILE" + + echo "> autofix exports tmp/$FILE" + assert_ok "$FLOW" autofix exports --in-place "tmp/$FILE" + assert_ok "$FLOW" force-recheck "tmp/$FILE" + echo "> cat tmp/$FILE" + cat "tmp/$FILE" +} + +test_file a.js 5 20 --strategy=generalize +test_file b.js 5 20 --strategy=specialize + +echo "> flow status" +assert_ok "$FLOW" status + +echo "> flow status tmp" +assert_ok "$FLOW" status tmp + +assert_ok "$FLOW" stop tmp diff --git a/tests/autofix-class/.flowconfig b/tests/autofix-class/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-class/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-class/.testconfig b/tests/autofix-class/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-class/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-class/a.js b/tests/autofix-class/a.js new file mode 100644 index 00000000000..be24009def1 --- /dev/null +++ b/tests/autofix-class/a.js @@ -0,0 +1,8 @@ +// @flow + +class C {} +// XYZXYZ +export default class { + array = []; + num = 1; +} diff --git a/tests/autofix-class/autofix-class.exp b/tests/autofix-class/autofix-class.exp new file mode 100644 index 00000000000..9a7306322f8 --- /dev/null +++ b/tests/autofix-class/autofix-class.exp @@ -0,0 +1,70 @@ +> flow autofix insert-type a.js 6 3 6 14 +> flow autofix insert-type a.js 7 3 7 11 +> cat a.js +// @flow + +class C {} +// XYZXYZ +export default class { + array: Array = []; + num: number = 1; +} +> flow autofix exports tmp/a.js +> cat tmp/a.js +// @flow + +class C {} +// XYZXYZ +export default class { + array: Array = []; + num: number = 1; +} +> flow autofix insert-type b.js 4 6 4 6 +> cat b.js +// @flow + +class C { + m(): number { + return 1; + } +} +module.exports = C; +> flow autofix exports tmp/b.js +> cat tmp/b.js +// @flow + +class C { + m(): number { + return 1; + } +} +module.exports = C; +> flow autofix insert-type c.js 4 3 4 24 +> flow autofix insert-type c.js 9 17 9 17 +> cat c.js +// @flow + +class A { + f: ((x: string) => string) = (x: string) => x; +} + +module.exports = { + a: A, + b: (x: string): string => x, +}; +> flow autofix exports tmp/c.js +> cat tmp/c.js +// @flow + +class A { + f: ((x: string) => string) = (x: string) => x; +} + +module.exports = { + a: A, + b: (x: string): string => x, +}; +> flow status +No errors! +> flow status tmp +No errors! diff --git a/tests/autofix-class/b.js b/tests/autofix-class/b.js new file mode 100644 index 00000000000..ba7cf883a07 --- /dev/null +++ b/tests/autofix-class/b.js @@ -0,0 +1,8 @@ +// @flow + +class C { + m() { + return 1; + } +} +module.exports = C; diff --git a/tests/autofix-class/c.js b/tests/autofix-class/c.js new file mode 100644 index 00000000000..db26813556c --- /dev/null +++ b/tests/autofix-class/c.js @@ -0,0 +1,10 @@ +// @flow + +class A { + f = (x: string) => x; +} + +module.exports = { + a: A, + b: (x: string) => x, +}; diff --git a/tests/autofix-class/test.sh b/tests/autofix-class/test.sh new file mode 100755 index 00000000000..54605885289 --- /dev/null +++ b/tests/autofix-class/test.sh @@ -0,0 +1,41 @@ +#!/bin/bash + +mkdir tmp || rm tmp/* +cp .flowconfig tmp/.flowconfig +start_flow tmp + +do_file () { + FILE=$1; shift + + cp "$FILE" "tmp/$FILE" + + while [[ $# -ge 4 ]]; do + local a1=$1; shift; + local a2=$1; shift; + local a3=$1; shift; + local a4=$1; shift; + echo "> flow autofix insert-type $FILE $a1 $a2 $a3 $a4" + assert_ok "$FLOW" autofix insert-type --in-place "$FILE" "$a1" "$a2" "$a3" "$a4" + done + assert_ok "$FLOW" force-recheck "$FILE" + echo "> cat $FILE" + cat "$FILE" + + echo "> flow autofix exports tmp/$FILE" + assert_ok "$FLOW" autofix exports --in-place "tmp/$FILE" + assert_ok "$FLOW" force-recheck "tmp/$FILE" + echo "> cat tmp/$FILE" + cat "tmp/$FILE" +} + +do_file a.js 6 3 6 14 7 3 7 11 +do_file b.js 4 6 4 6 +do_file c.js 4 3 4 24 9 17 9 17 + +echo "> flow status" +assert_ok "$FLOW" status + +echo "> flow status tmp" +assert_ok "$FLOW" status tmp + +assert_ok "$FLOW" stop tmp diff --git a/tests/autofix-dotFlow/.flowconfig b/tests/autofix-dotFlow/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-dotFlow/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-dotFlow/.testconfig b/tests/autofix-dotFlow/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-dotFlow/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-dotFlow/a.js b/tests/autofix-dotFlow/a.js new file mode 100644 index 00000000000..a171b962dc9 --- /dev/null +++ b/tests/autofix-dotFlow/a.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = () => 1; diff --git a/tests/autofix-dotFlow/a.js.flow b/tests/autofix-dotFlow/a.js.flow new file mode 100644 index 00000000000..a171b962dc9 --- /dev/null +++ b/tests/autofix-dotFlow/a.js.flow @@ -0,0 +1,3 @@ +// @flow + +module.exports = () => 1; diff --git a/tests/autofix-dotFlow/autofix-dotFlow.exp b/tests/autofix-dotFlow/autofix-dotFlow.exp new file mode 100644 index 00000000000..4ace44e4e37 --- /dev/null +++ b/tests/autofix-dotFlow/autofix-dotFlow.exp @@ -0,0 +1,10 @@ +> cat a.js +// @flow + +module.exports = (): number => 1; +> cat a.js.flow +// @flow + +module.exports = (): number => 1; +> flow status +No errors! diff --git a/tests/autofix-dotFlow/dotFlow.exp b/tests/autofix-dotFlow/dotFlow.exp new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/autofix-dotFlow/test.sh b/tests/autofix-dotFlow/test.sh new file mode 100755 index 00000000000..bd4ee65be91 --- /dev/null +++ b/tests/autofix-dotFlow/test.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +assert_ok "$FLOW" autofix insert-type --in-place a.js 3 20 +echo "> cat a.js" +cat a.js + +assert_ok "$FLOW" autofix insert-type --in-place a.js.flow 3 20 +echo "> cat a.js.flow" +cat a.js.flow + +assert_ok "$FLOW" force-recheck a.js a.js.flow +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-empty-array/.flowconfig b/tests/autofix-empty-array/.flowconfig new file mode 100644 index 00000000000..56042694641 --- /dev/null +++ b/tests/autofix-empty-array/.flowconfig @@ -0,0 +1,14 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +no_flowlib=false +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-empty-array/.testconfig b/tests/autofix-empty-array/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-empty-array/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-empty-array/a.js b/tests/autofix-empty-array/a.js new file mode 100644 index 00000000000..67eb3d480a8 --- /dev/null +++ b/tests/autofix-empty-array/a.js @@ -0,0 +1,11 @@ +// @flow + +const a0 = []; + +const a1 = []; +a1.push(1); + +const a2 = []; +(a2[0]: number); + +module.exports = { a0, a1, a2}; diff --git a/tests/autofix-empty-array/autofix-empty-array.exp b/tests/autofix-empty-array/autofix-empty-array.exp new file mode 100644 index 00000000000..7bd35f9be1a --- /dev/null +++ b/tests/autofix-empty-array/autofix-empty-array.exp @@ -0,0 +1,12 @@ +// @flow + +const a0: Array = []; + +const a1: Array = []; +a1.push(1); + +const a2: Array = []; +(a2[0]: number); + +module.exports = { a0, a1, a2}; +No errors! diff --git a/tests/autofix-empty-array/test.sh b/tests/autofix-empty-array/test.sh new file mode 100755 index 00000000000..b51321209e5 --- /dev/null +++ b/tests/autofix-empty-array/test.sh @@ -0,0 +1,8 @@ +#!/bin/bash +assert_ok "$FLOW" autofix insert-type --in-place a.js 3 12 3 14 +assert_ok "$FLOW" autofix insert-type --in-place a.js 5 12 5 14 +assert_ok "$FLOW" autofix insert-type --in-place a.js 8 12 8 14 + +cat a.js +assert_ok "$FLOW" force-recheck a.js +assert_ok "$FLOW" status --strip-root diff --git a/tests/autofix-empty-object/.flowconfig b/tests/autofix-empty-object/.flowconfig new file mode 100644 index 00000000000..7bb071dadbd --- /dev/null +++ b/tests/autofix-empty-object/.flowconfig @@ -0,0 +1,12 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-empty-object/.testconfig b/tests/autofix-empty-object/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-empty-object/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-empty-object/a.js b/tests/autofix-empty-object/a.js new file mode 100644 index 00000000000..6fe209d6bc4 --- /dev/null +++ b/tests/autofix-empty-object/a.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = {}; diff --git a/tests/autofix-empty-object/autofix-empty-object.exp b/tests/autofix-empty-object/autofix-empty-object.exp new file mode 100644 index 00000000000..ed3adee67c2 --- /dev/null +++ b/tests/autofix-empty-object/autofix-empty-object.exp @@ -0,0 +1,65 @@ +> insert-type a.js 3 18 3 20 +cat a.js +// @flow + +module.exports = ({}: {}); +> insert-type b.js 3 23 3 25 +cat b.js +// @flow + +module.exports = { f: ({}: {}) }; +> insert-type c.js 3 18 3 20 +cat c.js +// @flow + +const obj = { f: ({}: {}) }; + +obj.f = { x: 1 }; +obj.f = { x: "a" }; + +module.exports = obj; +> insert-type d.js 3 13 3 16 +cat d.js +// @flow + +const obj: {f: {x: number}, g: {x: string}} = { }; + +obj.f = { x: 1 }; +obj["g"] = { x: "a" }; + +module.exports = obj; +> insert-type e.js 3 13 3 15 +cat e.js +// @flow + +const obj: {a: number} = {}; +declare function foo(x: { a: number }): void; +foo(obj); +module.exports = obj; +> insert-type f.js 3 13 3 15 +cat f.js +// @flow + +const obj: {f: number} = {}; +(obj.f: number); +module.exports = obj; +> insert-type g.js 5 13 5 15 +cat g.js +// @flow + +declare function foo(x: X): void; + +const obj: {a: 1} = {}; +foo(obj); +module.exports = obj; +> insert-type h.js 4 5 4 8 +cat h.js +// @flow + + +let obj: {x: number} = {}; +obj.x = 5; + +module.exports = obj; +> flow status +No errors! diff --git a/tests/autofix-empty-object/b.js b/tests/autofix-empty-object/b.js new file mode 100644 index 00000000000..de4ef449d8d --- /dev/null +++ b/tests/autofix-empty-object/b.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = { f: {} }; diff --git a/tests/autofix-empty-object/c.js b/tests/autofix-empty-object/c.js new file mode 100644 index 00000000000..e4c4e844ca5 --- /dev/null +++ b/tests/autofix-empty-object/c.js @@ -0,0 +1,8 @@ +// @flow + +const obj = { f: {} }; + +obj.f = { x: 1 }; +obj.f = { x: "a" }; + +module.exports = obj; diff --git a/tests/autofix-empty-object/d.js b/tests/autofix-empty-object/d.js new file mode 100644 index 00000000000..c02d9e29a5d --- /dev/null +++ b/tests/autofix-empty-object/d.js @@ -0,0 +1,8 @@ +// @flow + +const obj = { }; + +obj.f = { x: 1 }; +obj["g"] = { x: "a" }; + +module.exports = obj; diff --git a/tests/autofix-empty-object/e.js b/tests/autofix-empty-object/e.js new file mode 100644 index 00000000000..fdb20527cd8 --- /dev/null +++ b/tests/autofix-empty-object/e.js @@ -0,0 +1,6 @@ +// @flow + +const obj = {}; +declare function foo(x: { a: number }): void; +foo(obj); +module.exports = obj; diff --git a/tests/autofix-empty-object/f.js b/tests/autofix-empty-object/f.js new file mode 100644 index 00000000000..fa11a3ac9de --- /dev/null +++ b/tests/autofix-empty-object/f.js @@ -0,0 +1,5 @@ +// @flow + +const obj = {}; +(obj.f: number); +module.exports = obj; diff --git a/tests/autofix-empty-object/g.js b/tests/autofix-empty-object/g.js new file mode 100644 index 00000000000..279c01b6b9b --- /dev/null +++ b/tests/autofix-empty-object/g.js @@ -0,0 +1,7 @@ +// @flow + +declare function foo(x: X): void; + +const obj = {}; +foo(obj); +module.exports = obj; diff --git a/tests/autofix-empty-object/h.js b/tests/autofix-empty-object/h.js new file mode 100644 index 00000000000..848be9a45ad --- /dev/null +++ b/tests/autofix-empty-object/h.js @@ -0,0 +1,7 @@ +// @flow + + +let obj = {}; +obj.x = 5; + +module.exports = obj; diff --git a/tests/autofix-empty-object/test.sh b/tests/autofix-empty-object/test.sh new file mode 100755 index 00000000000..c7daef4655f --- /dev/null +++ b/tests/autofix-empty-object/test.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +update_in_place(){ + local FILE=$1; + echo "> insert-type" "$@" + assert_ok "$FLOW" autofix insert-type --in-place "$@" + assert_ok "$FLOW" force-recheck "$FILE" + echo "cat $FILE" + cat "$FILE" +} + +update_in_place a.js 3 18 3 20 +update_in_place b.js 3 23 3 25 +update_in_place c.js 3 18 3 20 +update_in_place d.js 3 13 3 16 +update_in_place e.js 3 13 3 15 +update_in_place f.js 3 13 3 15 +update_in_place g.js 5 13 5 15 +update_in_place h.js 4 5 4 8 + +echo "> flow status" +assert_ok "$FLOW" status --strip-root diff --git a/tests/autofix-exports/.flowconfig b/tests/autofix-exports/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-exports/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-exports/.testconfig b/tests/autofix-exports/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-exports/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-exports/autofix-exports.exp b/tests/autofix-exports/autofix-exports.exp new file mode 100644 index 00000000000..f04ca27ccb7 --- /dev/null +++ b/tests/autofix-exports/autofix-exports.exp @@ -0,0 +1,26 @@ +> autofix exports f.js +> cat f.js +// @flow + +declare var cond : bool + +const foo = 42; +let foo2: number = foo; +function bar(x: any): number { + return x + x +}; +bar(foo2); +bar(foo); + +module.exports = [{ + f1: (): string => (cond ? "0" : "1"), + f2: (): string => (cond ? "A0" : "A1"), + f3: (): string => (cond ? "Aa" : "Bb"), + f4: (): string => (cond ? "A_" : "B_"), +}, + (): string => "string", + bar, + foo2, + (bar(foo2): number)] +> flow status +No errors! diff --git a/tests/autofix-exports/f.js b/tests/autofix-exports/f.js new file mode 100644 index 00000000000..1ae6607afea --- /dev/null +++ b/tests/autofix-exports/f.js @@ -0,0 +1,22 @@ +// @flow + +declare var cond : bool + +const foo = 42; +let foo2 = foo; +function bar(x): number { + return x + x +}; +bar(foo2); +bar(foo); + +module.exports = [{ + f1: () => (cond ? "0" : "1"), + f2: () => (cond ? "A0" : "A1"), + f3: () => (cond ? "Aa" : "Bb"), + f4: () => (cond ? "A_" : "B_"), +}, + () => "string", + bar, + foo2, + bar(foo2)] diff --git a/tests/autofix-exports/test.sh b/tests/autofix-exports/test.sh new file mode 100755 index 00000000000..9d3d848929f --- /dev/null +++ b/tests/autofix-exports/test.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +do_file(){ + local FILE="$1" + echo "> autofix exports" "$@" + assert_ok "$FLOW" autofix exports --in-place "$@" + assert_ok "$FLOW" force-recheck "$FILE" + echo "> cat $FILE" + cat "$FILE" +} + +do_file f.js + +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-fbt/.flowconfig b/tests/autofix-fbt/.flowconfig new file mode 100644 index 00000000000..90df229f066 --- /dev/null +++ b/tests/autofix-fbt/.flowconfig @@ -0,0 +1,16 @@ +[ignore] + +[include] + +[libs] +lib + +[lints] + +[options] +no_flowlib=false +facebook.fbt=FbtElement +experimental.well_formed_exports=true +experimental.types_first=true + +[strict] diff --git a/tests/autofix-fbt/.testconfig b/tests/autofix-fbt/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-fbt/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-fbt/a.js b/tests/autofix-fbt/a.js new file mode 100644 index 00000000000..8f0988c0864 --- /dev/null +++ b/tests/autofix-fbt/a.js @@ -0,0 +1,6 @@ +// @flow + +const React = require("react"); + +const foo = () => ; +module.exports = foo(); diff --git a/tests/autofix-fbt/autofix-fbt.exp b/tests/autofix-fbt/autofix-fbt.exp new file mode 100644 index 00000000000..448525c35e3 --- /dev/null +++ b/tests/autofix-fbt/autofix-fbt.exp @@ -0,0 +1,18 @@ +> cat a.js +// @flow + +const React = require("react"); + +const foo = () => ; +module.exports = (foo(): FbtResultBase); +> cat tmp/a.js +// @flow + +const React = require("react"); + +const foo = () => ; +module.exports = (foo(): FbtResultBase); +> flow status +No errors! +> flow status tmp +No errors! diff --git a/tests/autofix-fbt/lib/fbt.js b/tests/autofix-fbt/lib/fbt.js new file mode 100644 index 00000000000..5bf2237ebed --- /dev/null +++ b/tests/autofix-fbt/lib/fbt.js @@ -0,0 +1,3 @@ +// @flow +declare type FbtElement = FbtResultBase; +declare class FbtResultBase {} diff --git a/tests/autofix-fbt/test.sh b/tests/autofix-fbt/test.sh new file mode 100755 index 00000000000..46b4a621069 --- /dev/null +++ b/tests/autofix-fbt/test.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +mkdir tmp || rm -r tmp/* +cp -r .flowconfig lib tmp +start_flow tmp + +cp a.js tmp/a.js +assert_ok "$FLOW" autofix insert-type --in-place a.js 6 18 6 23 +assert_ok "$FLOW" force-recheck a.js +echo "> cat a.js" +cat a.js + +assert_ok "$FLOW" autofix exports --in-place tmp/a.js +assert_ok "$FLOW" force-recheck tmp/a.js +echo "> cat tmp/a.js" +cat tmp/a.js + +echo "> flow status" +assert_ok "$FLOW" status + +echo "> flow status tmp" +assert_ok "$FLOW" status tmp + +assert_ok "$FLOW" stop tmp diff --git a/tests/autofix-function/.flowconfig b/tests/autofix-function/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-function/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-function/.testconfig b/tests/autofix-function/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-function/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-function/a.js b/tests/autofix-function/a.js new file mode 100644 index 00000000000..6185c20e6a8 --- /dev/null +++ b/tests/autofix-function/a.js @@ -0,0 +1,5 @@ +// @flow + +export function f() { + return 1; +} diff --git a/tests/autofix-function/autofix-function.exp b/tests/autofix-function/autofix-function.exp new file mode 100644 index 00000000000..6c923cdd642 --- /dev/null +++ b/tests/autofix-function/autofix-function.exp @@ -0,0 +1,32 @@ +> flow autofix insert-type a.js 3 20 +> cat a.js +// @flow + +export function f(): number { + return 1; +} +> flow autofix exports tmp/a.js +> cat tmp/a.js +// @flow + +export function f(): number { + return 1; +} +> flow autofix insert-type b.js 3 25 +> cat b.js +// @flow + +const functionArray = (): Array<(x: number) => number> => ([(x: number) => x]) + +module.exports = functionArray; +> flow autofix exports tmp/b.js +> cat tmp/b.js +// @flow + +const functionArray = (): Array<(x: number) => number> => ([(x: number) => x]) + +module.exports = functionArray; +> flow status +No errors! +> flow status tmp +No errors! diff --git a/tests/autofix-function/b.js b/tests/autofix-function/b.js new file mode 100644 index 00000000000..d47b1979490 --- /dev/null +++ b/tests/autofix-function/b.js @@ -0,0 +1,5 @@ +// @flow + +const functionArray = () => ([(x: number) => x]) + +module.exports = functionArray; diff --git a/tests/autofix-function/test.sh b/tests/autofix-function/test.sh new file mode 100755 index 00000000000..6814414d15d --- /dev/null +++ b/tests/autofix-function/test.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +mkdir tmp || rm tmp/* +cp .flowconfig tmp/.flowconfig +start_flow tmp + +do_file () { + FILE=$1; shift + + cp "$FILE" "tmp/$FILE" + + while [[ $# -ge 2 ]]; do + local a1=$1; shift; + local a2=$1; shift; + echo "> flow autofix insert-type $FILE $a1 $a2" + assert_ok "$FLOW" autofix insert-type --in-place "$FILE" "$a1" "$a2" + done + assert_ok "$FLOW" force-recheck "$FILE" + echo "> cat $FILE" + cat "$FILE" + + echo "> flow autofix exports tmp/$FILE" + assert_ok "$FLOW" autofix exports --in-place "tmp/$FILE" + assert_ok "$FLOW" force-recheck "tmp/$FILE" + echo "> cat tmp/$FILE" + cat "tmp/$FILE" +} + +do_file a.js 3 20 +do_file b.js 3 25 + +echo "> flow status" +assert_ok "$FLOW" status +echo "> flow status tmp" +assert_ok "$FLOW" status tmp + +assert_ok "$FLOW" stop tmp diff --git a/tests/autofix-hard-coded-fixes/.flowconfig b/tests/autofix-hard-coded-fixes/.flowconfig new file mode 100644 index 00000000000..56042694641 --- /dev/null +++ b/tests/autofix-hard-coded-fixes/.flowconfig @@ -0,0 +1,14 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +no_flowlib=false +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-hard-coded-fixes/.testconfig b/tests/autofix-hard-coded-fixes/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-hard-coded-fixes/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-hard-coded-fixes/any-in-union.js b/tests/autofix-hard-coded-fixes/any-in-union.js new file mode 100644 index 00000000000..3ad09c615f2 --- /dev/null +++ b/tests/autofix-hard-coded-fixes/any-in-union.js @@ -0,0 +1,17 @@ +// @flow + +const React = require('react'); + +declare var x1: { f: number } | any; +declare var x2: any | { f: number }; +declare var x3: any | { f: number } | { node: React.Node }; +declare var x4: { f: number } | any | { node: React.Node }; +declare var x5: { f: number } | { node: React.Node } | any; + +module.exports = [ + () => x1, + () => x2, + () => x3, + () => x4, + () => x5, +]; diff --git a/tests/autofix-hard-coded-fixes/any-to-flowfixme-no-strict.js b/tests/autofix-hard-coded-fixes/any-to-flowfixme-no-strict.js new file mode 100644 index 00000000000..f3fb1c05117 --- /dev/null +++ b/tests/autofix-hard-coded-fixes/any-to-flowfixme-no-strict.js @@ -0,0 +1,4 @@ +// @flow + +declare var x1: { f: number } | any; +module.exports = () => x1; diff --git a/tests/autofix-hard-coded-fixes/any-to-flowfixme-strict-local.js b/tests/autofix-hard-coded-fixes/any-to-flowfixme-strict-local.js new file mode 100644 index 00000000000..e481666d141 --- /dev/null +++ b/tests/autofix-hard-coded-fixes/any-to-flowfixme-strict-local.js @@ -0,0 +1,4 @@ +// @flow strict-local + +declare var x1: { f: number } | any; +module.exports = () => x1; diff --git a/tests/autofix-hard-coded-fixes/any-to-flowfixme-strict.js b/tests/autofix-hard-coded-fixes/any-to-flowfixme-strict.js new file mode 100644 index 00000000000..a9c99cc5412 --- /dev/null +++ b/tests/autofix-hard-coded-fixes/any-to-flowfixme-strict.js @@ -0,0 +1,4 @@ +// @flow strict + +declare var x1: { f: number } | any; +module.exports = () => x1; diff --git a/tests/autofix-hard-coded-fixes/autofix-hard-coded-fixes.exp b/tests/autofix-hard-coded-fixes/autofix-hard-coded-fixes.exp new file mode 100644 index 00000000000..8f133035f2c --- /dev/null +++ b/tests/autofix-hard-coded-fixes/autofix-hard-coded-fixes.exp @@ -0,0 +1,107 @@ +> cat any-in-union.js +// @flow + +const React = require('react'); + +declare var x1: { f: number } | any; +declare var x2: any | { f: number }; +declare var x3: any | { f: number } | { node: React.Node }; +declare var x4: { f: number } | any | { node: React.Node }; +declare var x5: { f: number } | { node: React.Node } | any; + +module.exports = [ + (): any | {f: number} => x1, + () => x2, + () => x3, + () => x4, + () => x5, +]; +> cat any-in-union.js +// @flow + +const React = require('react'); + +declare var x1: { f: number } | any; +declare var x2: any | { f: number }; +declare var x3: any | { f: number } | { node: React.Node }; +declare var x4: { f: number } | any | { node: React.Node }; +declare var x5: { f: number } | { node: React.Node } | any; + +module.exports = [ + (): any | {f: number} => x1, + (): any | {f: number} => x2, + () => x3, + () => x4, + () => x5, +]; +> cat any-in-union.js +// @flow + +const React = require('react'); + +declare var x1: { f: number } | any; +declare var x2: any | { f: number }; +declare var x3: any | { f: number } | { node: React.Node }; +declare var x4: { f: number } | any | { node: React.Node }; +declare var x5: { f: number } | { node: React.Node } | any; + +module.exports = [ + (): any | {f: number} => x1, + (): any | {f: number} => x2, + (): any | {f: number} | {node: React.Node} => x3, + () => x4, + () => x5, +]; +> cat any-in-union.js +// @flow + +const React = require('react'); + +declare var x1: { f: number } | any; +declare var x2: any | { f: number }; +declare var x3: any | { f: number } | { node: React.Node }; +declare var x4: { f: number } | any | { node: React.Node }; +declare var x5: { f: number } | { node: React.Node } | any; + +module.exports = [ + (): any | {f: number} => x1, + (): any | {f: number} => x2, + (): any | {f: number} | {node: React.Node} => x3, + (): any | {f: number} | {node: React.Node} => x4, + () => x5, +]; +> cat any-in-union.js +// @flow + +const React = require('react'); + +declare var x1: { f: number } | any; +declare var x2: any | { f: number }; +declare var x3: any | { f: number } | { node: React.Node }; +declare var x4: { f: number } | any | { node: React.Node }; +declare var x5: { f: number } | { node: React.Node } | any; + +module.exports = [ + (): any | {f: number} => x1, + (): any | {f: number} => x2, + (): any | {f: number} | {node: React.Node} => x3, + (): any | {f: number} | {node: React.Node} => x4, + (): any | {f: number} | {node: React.Node} => x5, +]; +> cat any-to-flowfixme-no-strict.js +// @flow + +declare var x1: { f: number } | any; +module.exports = (): any | {f: number} => x1; +> cat any-to-flowfixme-strict-local.js +// @flow strict-local + +declare var x1: { f: number } | any; +module.exports = (): any | {f: number} => x1; +> cat any-to-flowfixme-strict.js +// @flow strict + +declare var x1: { f: number } | any; +module.exports = (): any | {f: number} => x1; +> flow status +No errors! diff --git a/tests/autofix-hard-coded-fixes/test.sh b/tests/autofix-hard-coded-fixes/test.sh new file mode 100755 index 00000000000..127b1da1c0b --- /dev/null +++ b/tests/autofix-hard-coded-fixes/test.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# autofix doesn't try to control the order of unions +update_in_place(){ + local FILE=$1; + assert_ok "$FLOW" autofix insert-type --in-place "$@" + assert_ok "$FLOW" force-recheck "$FILE" + echo "> cat $FILE" + cat "$FILE" +} + +update_in_place any-in-union.js 12 5 +update_in_place any-in-union.js 13 5 +update_in_place any-in-union.js 14 5 +update_in_place any-in-union.js 15 5 +update_in_place any-in-union.js 16 5 +update_in_place any-to-flowfixme-no-strict.js 4 20 +update_in_place any-to-flowfixme-strict-local.js 4 20 +update_in_place any-to-flowfixme-strict.js 4 20 + +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-insert-type-error-messages/.flowconfig b/tests/autofix-insert-type-error-messages/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-insert-type-error-messages/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-insert-type-error-messages/.testconfig b/tests/autofix-insert-type-error-messages/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-insert-type-error-messages/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-insert-type-error-messages/annotated.js b/tests/autofix-insert-type-error-messages/annotated.js new file mode 100644 index 00000000000..4fcb6a6cc0a --- /dev/null +++ b/tests/autofix-insert-type-error-messages/annotated.js @@ -0,0 +1,3 @@ +// @flow + +let x : number = 5; diff --git a/tests/autofix-insert-type-error-messages/anonymous.js b/tests/autofix-insert-type-error-messages/anonymous.js new file mode 100644 index 00000000000..fc61f3849cf --- /dev/null +++ b/tests/autofix-insert-type-error-messages/anonymous.js @@ -0,0 +1,5 @@ +// @flow + +function f() { + return class {}; +} diff --git a/tests/autofix-insert-type-error-messages/autofix-insert-type-error-messages.exp b/tests/autofix-insert-type-error-messages/autofix-insert-type-error-messages.exp new file mode 100644 index 00000000000..64a9b1362c8 --- /dev/null +++ b/tests/autofix-insert-type-error-messages/autofix-insert-type-error-messages.exp @@ -0,0 +1,18 @@ +empty.js +flow autofix insert-type: Invalid position given +empty.js +flow autofix insert-type: Did not find an annotation at 5:1-0 +annotated.js +flow autofix insert-type: Preexisiting type annotation at 3:7-14: number +big-type.js +flow autofix insert-type: The type that would be generated (size: 121) exceeds the size limit (30) +object.js +flow autofix insert-type: Multiple types possible at point: +empty-array.js +flow autofix insert-type: Failed to validate type: Empty_SomeUnknownUpper (use: GetPropT) +type-shadowing.js +flow autofix insert-type: couldn't print type: [Shadowed type parameters] Raised on type: MixedT +recursive.js +flow autofix insert-type: Failed to validate type: Recursive +anonymous.js +flow autofix insert-type: Failed to validate type: Anonymous (def: 4:10-17) diff --git a/tests/autofix-insert-type-error-messages/big-type.js b/tests/autofix-insert-type-error-messages/big-type.js new file mode 100644 index 00000000000..a26de378820 --- /dev/null +++ b/tests/autofix-insert-type-error-messages/big-type.js @@ -0,0 +1,29 @@ +// @flow + +let x = {a:{b:{c:{d:{e:{f:{g:{h:{i:{j: + {k:{l:{m:{n:{o:{p:{q:{r:{s:{t: + {a:{b:{c:{d:{e:{f:{g:{h:{i:{j: + {k:{l:{m:{n:{o:{p:{q:{r:{s:{t: + {a:{b:{c:{d:{e:{f:{g:{h:{i:{j: + {k:{l:{m:{n:{o:{p:{q:{r:{s:{t: + {a:{b:{c:{d:{e:{f:{g:{h:{i:{j: + {k:{l:{m:{n:{o:{p:{q:{r:{s:{t: + {a:{b:{c:{d:{e:{f:{g:{h:{i:{j: + {k:{l:{m:{n:{o:{p:{q:{r:{s:{t: + {a:{b:{c:{d:{e:{f:{g:{h:{i:{j: + {k:{l:{m:{n:{o:{p:{q:{r:{s:{t: + + "foo" + + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} + }}}}}}}}}} diff --git a/tests/autofix-insert-type-error-messages/empty-array.js b/tests/autofix-insert-type-error-messages/empty-array.js new file mode 100644 index 00000000000..504e8504e11 --- /dev/null +++ b/tests/autofix-insert-type-error-messages/empty-array.js @@ -0,0 +1,5 @@ +// @flow + +const a3 = []; +(a3[0].f: number); +module.exports = {a3}; diff --git a/tests/autofix-insert-type-error-messages/empty.js b/tests/autofix-insert-type-error-messages/empty.js new file mode 100644 index 00000000000..46e7f7c0456 --- /dev/null +++ b/tests/autofix-insert-type-error-messages/empty.js @@ -0,0 +1 @@ +// @flow diff --git a/tests/autofix-insert-type-error-messages/object.js b/tests/autofix-insert-type-error-messages/object.js new file mode 100644 index 00000000000..946625c90de --- /dev/null +++ b/tests/autofix-insert-type-error-messages/object.js @@ -0,0 +1,11 @@ +// @flow + +module.exports = { + a: () => { + return { + x: + // comment + "blah" + }; + } +} diff --git a/tests/autofix-insert-type-error-messages/recursive.js b/tests/autofix-insert-type-error-messages/recursive.js new file mode 100644 index 00000000000..07cbea082f5 --- /dev/null +++ b/tests/autofix-insert-type-error-messages/recursive.js @@ -0,0 +1,6 @@ +// @flow + +function f(x:number) { + if (x == 0) return "foo" + else return {x:f(x-1)} +}; diff --git a/tests/autofix-insert-type-error-messages/test.sh b/tests/autofix-insert-type-error-messages/test.sh new file mode 100644 index 00000000000..54b26096089 --- /dev/null +++ b/tests/autofix-insert-type-error-messages/test.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +UNKNOWN=110 + +# Note when the grep fails error output is suppressed +# I usually just remove the error output greping and writing to the .err file (using 2 > &1 instead) +# The error grepping is useful to filter out spurious warnings +test_file () { + local EXIT="$1"; shift; + local FILE="$1"; shift; + echo "$FILE" + assert_exit "$EXIT" "$FLOW" autofix insert-type "$FILE" "$@" 2> "$FILE.err" + # If grep fails I don't want the test to fail + grep "flow autofix insert-type:" "$FILE.err" || true +} + +# User Errors +test_file "$UNKNOWN" empty.js +test_file "$UNKNOWN" empty.js 5 0 +test_file "$UNKNOWN" annotated.js 3 6 + +# No perfect solution errors +# Type size bigger than expected +test_file "$UNKNOWN" big-type.js 3 6 +# Fails because the user could want an exact object or the general object +test_file "$UNKNOWN" object.js 4 8 --strategy=fail + +test_file "$UNKNOWN" empty-array.js 3 12 3 14 + +# Type isn't well scoped +test_file "$UNKNOWN" type-shadowing.js 4 41 4 42 + +# Type isn't representatable in contrete syntax +test_file "$UNKNOWN" recursive.js 3 21 +test_file "$UNKNOWN" anonymous.js 3 13 diff --git a/tests/autofix-insert-type-error-messages/type-shadowing.js b/tests/autofix-insert-type-error-messages/type-shadowing.js new file mode 100644 index 00000000000..39a53add265 --- /dev/null +++ b/tests/autofix-insert-type-error-messages/type-shadowing.js @@ -0,0 +1,5 @@ +// @flow + +function outer(y: T) { + function inner(x: T, z) { inner(x, y); } +} diff --git a/tests/autofix-insert-type-error-messages/unparsable.js b/tests/autofix-insert-type-error-messages/unparsable.js new file mode 100644 index 00000000000..03bf80a23b5 --- /dev/null +++ b/tests/autofix-insert-type-error-messages/unparsable.js @@ -0,0 +1,5 @@ +// @flow + + + +var x : number = 5 diff --git a/tests/autofix-insert-type/.flowconfig b/tests/autofix-insert-type/.flowconfig new file mode 100644 index 00000000000..740261edd07 --- /dev/null +++ b/tests/autofix-insert-type/.flowconfig @@ -0,0 +1,12 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +no_flowlib=false + +[strict] diff --git a/tests/autofix-insert-type/.testconfig b/tests/autofix-insert-type/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-insert-type/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-insert-type/alias-0.js b/tests/autofix-insert-type/alias-0.js new file mode 100644 index 00000000000..82871e6c0b9 --- /dev/null +++ b/tests/autofix-insert-type/alias-0.js @@ -0,0 +1,13 @@ +// @flow + +type MyObject = { + foo: number, + bar: boolean, + baz: string, +}; + +function f(x: MyObject) { + return x; +}; + +f({foo:1, bar:true, baz:"baz"}); diff --git a/tests/autofix-insert-type/alias.js b/tests/autofix-insert-type/alias.js new file mode 100644 index 00000000000..229e7654a9f --- /dev/null +++ b/tests/autofix-insert-type/alias.js @@ -0,0 +1,9 @@ +// @flow + +type MyNumber = number; + +function f(x: MyNumber) { + return x; +}; + +f(1); diff --git a/tests/autofix-insert-type/array.js b/tests/autofix-insert-type/array.js new file mode 100644 index 00000000000..3ded78ac679 --- /dev/null +++ b/tests/autofix-insert-type/array.js @@ -0,0 +1,5 @@ +// @flow + +function foo() { + return [1, 2]; +} diff --git a/tests/autofix-insert-type/arrow-0.js b/tests/autofix-insert-type/arrow-0.js new file mode 100644 index 00000000000..6d29cd7e13d --- /dev/null +++ b/tests/autofix-insert-type/arrow-0.js @@ -0,0 +1,4 @@ +// @flow + +const a = (x) => x + 1; +a(1); diff --git a/tests/autofix-insert-type/arrow-1.js b/tests/autofix-insert-type/arrow-1.js new file mode 100644 index 00000000000..3020f038dd4 --- /dev/null +++ b/tests/autofix-insert-type/arrow-1.js @@ -0,0 +1,4 @@ +// @flow + +const b = (x, y, z) => x(y, z.f); +b((k, l) => (l + k), "a", { f: 2 }); diff --git a/tests/autofix-insert-type/arrow-2.js b/tests/autofix-insert-type/arrow-2.js new file mode 100644 index 00000000000..86155001137 --- /dev/null +++ b/tests/autofix-insert-type/arrow-2.js @@ -0,0 +1,3 @@ +// @flow + +((x) => x + 1)(4); diff --git a/tests/autofix-insert-type/autofix-insert-type.exp b/tests/autofix-insert-type/autofix-insert-type.exp new file mode 100644 index 00000000000..8245f6ae766 --- /dev/null +++ b/tests/autofix-insert-type/autofix-insert-type.exp @@ -0,0 +1,685 @@ +array.js 3 15 --strategy=specialize +// @flow + +function foo(): Array<1 | 2> { + return [1, 2]; +} +No errors! +arrow-0.js 3 8 +// @flow + +const a: ((x: number) => number) = (x) => x + 1; +a(1); +No errors! +arrow-0.js 3 13 +// @flow + +const a = (x: number) => x + 1; +a(1); +No errors! +arrow-0.js 3 14 +// @flow + +const a = (x): number => x + 1; +a(1); +No errors! +arrow-1.js 3 16 +// @flow + +const b = (x, y: string, z) => x(y, z.f); +b((k, l) => (l + k), "a", { f: 2 }); +No errors! +arrow-2.js 3 5 +// @flow + +((x): number => x + 1)(4); +No errors! +class-0.js 4 3 +// @flow + +class A { + f: number = 1; + m(x) { + return this.f + 1; + } +} + +(new A).m(""); +No errors! +class-0.js 4 4 +// @flow + +class A { + f: number = 1; + m(x) { + return this.f + 1; + } +} + +(new A).m(""); +No errors! +class-0.js 4 3 4 4 +// @flow + +class A { + f: number = 1; + m(x) { + return this.f + 1; + } +} + +(new A).m(""); +No errors! +class-1.js 5 6 +// @flow + +class A { + f = 1; + m(): string { return "a"; } +} + +var a = new A; +function foo(x) { return x; } +foo(A); +No errors! +class-2.js 4 6 +// @flow + +class A { + m(): this { + return this; + } +} +No errors! +class-3.js 9 15 +// @flow + +class A { + m() { + return this; + } +} + +function foo(x: (() => A)) {} +foo(new A().m); +No errors! +comments-0.js 4 33 +// @flow + +// BEFORE FUNCTION +export function test (x: number): string { + // ONE + const a = 4 + x; // END OF a = 4... + // TWO + return /* BETWEEEN */ `${a}` // END OF return ... +} // END +No errors! +dictionary.js 12 12 +// @flow + +() => ({ + ab: null, + "a b": null, + "a'b": null, + "1": null, + "'": null, + " ": null, + "_": null, + "": null, + get "x"(): null { + return null; + }, + set "y"(z: string) {} +}); + +() => ({ + ab: null, + 'a b': null, + 'a"b': null, + '1': null, + '"': null, + ' ': null, + '_': null, + '': null, + get 'x'() { + return null; + }, + set 'y'(z: string) {} +}); +No errors! +func-0.js 3 26 +// @flow + +function foo(x: number, y: string) { + return x + y; +} + +foo(1, ""); +No errors! +func-1.js 3 18 +// @flow + +function foo(x, y: ((z: number) => number)) { + return x + y(x); +} + +foo(1, (z) => z + 1); +No errors! +func-2.js 4 17 +// @flow + +function foo(x, y) { + function bar(z: number, w) { + return x + z + y + w; + } + bar(x, y); +} + +foo(1, 1); +No errors! +func-3.js 2 22 +// @flow +function throws_arg(): empty { + throw 42; +} +No errors! +func-4.js 2 22 +// @flow +function throws_arg(): empty { + if (Math.random() < 0.5) { + throw 42; + } else { + throw 42; + } +} +No errors! +func-5.js 2 22 +// @flow +function throws_arg(): number { + if (Math.random() < 0.5) { + return 42; + } else { + throw 42; + } +} +No errors! +func-poly-0.js 3 24 +// @flow + +function outer(y: T): void { + function inner(x: T, z) { + inner(x, x); + } +} +No errors! +object-0.js 7 6 +// @flow + +var obj = { + n(y) { + return this.m(y); + }, + m(x: number) { + return ""; + } +} + +obj.n(0) +No errors! +object-1.js 7 8 +// @flow + +var obj = { + 'n'(y) { + return this.m(y); + }, + 'm'(x: number) { + return ""; + } +} + +obj['n'](0) +No errors! +object-2.js 6 6 +// @flow + +function foo(x) {} + +foo({ + m(): number { return 1; }, + n: () => 2, + l: function bar() {} +}); +No errors! +poly-0.js 3 22 --strategy=specialize +// @flow + +function foo(x: T): {|f: T|} { + return { + f: x + } +} +No errors! +poly-0.js 3 15 +// @flow + +function foo(x: T) { + return { + f: x + } +} +No errors! +react-0.js 6 21 +// @flow + +import React from "react"; +import ReactDOM from "react-dom"; + +function Clock(props: {date: Date}) { + return ( +
+

Hello, world!

+

It is {props.date.toLocaleTimeString()}.

+
+ ); +} + +function tick() { + const element = document.getElementById('root'); + if (element) { + ReactDOM.render( + , + element + ); + } +} + +setInterval(tick, 1000); +No errors! +string-literal.js 11 14 +// @flow + +declare var a: "a"; +declare var single_quote: "'"; +declare var double_quote: '""'; +declare var backslash: '\\'; + +function f1() { return a; } +function f2() { return single_quote; } +function f3() { return double_quote; } +function f4(): "\\" { return backslash; } +No errors! +type-utils.js 6 3 +// @flow + +declare class C

{}; + +declare var typeof_C: typeof C; +(): typeof C => typeof_C; + +declare var class_of_C_number: Class>; +() => class_of_C_number; +No errors! +union-0.js 3 15 +// @flow + +function foo(x: void | number | string | (() => void)) { } + +foo(1); +foo("1"); +foo(1); +foo(2); +foo(() => foo()); +foo(); +No errors! +replacement-array.js 5 15 --strategy=generalize +// @flow + +// preserve spacing + +function foo(): Array {return [1, 2];} +No errors! +replacement-arrow.js 3 13 +// @flow + +const a = (x: number) => + x + 1; + + +a(1); +No errors! +replacement-class.js 5 7 +// @flow + +class A { // end of line comment + f = 1; + m(x): number { return this.f + 1; } +} (new A).m(""); // weird spacing +No errors! +replacement-dictionary.js 3 3 --strategy=generalize +// @flow + +(): { + "": null, + " ": null, + "'": null, + "1": null, + _: null, + "a b": null, + "a'b": null, + ab: null, + get x(): null, + set y(string): void, +} => ({ + ab: null, "a b": null, "a'b": null, + "1": null, "'": null, " ": null, + "_": null, "": null, + get "x"() {return null; }, + set "y"(z: string) {}}); + +() => ({ + ab: null, 'a b': null, 'a"b': null, '1': null, '"': null, ' ': null, '_': null, '': null, + get 'x'() {return null;}, set 'y'(z: string) {} } ); +No errors! +replacement-function.js 7 47 +// @flow + +// Spaced out comments /* non-comments + +// BEFORE FUNCTION +export //too many comments +function /*Random*/ test /*weird*/ (x: number): string /*Comments*/ { +// ONE +/* Two */ const a = 4 + x; // END OF a = 4... + + + + // Three + + return /* BETWEEEN */ `${a}` // END OF return ... +} // END */ +No errors! +replacement-object.js 2 16 +// @flow +var obj = {n(y): string {return this.m(y);},m(x) {return "";}} +obj.n(0) + + + + + + + +// This is kinda trailing whitespace +No errors! +func-0.js 4 14 4 15 +// @flow + +function foo(x: number, y) { + return x + (y: string); +} + +foo(1, ""); +No errors! +func-0.js 4 10 4 15 +// @flow + +function foo(x: number, y) { + return (x + y: string); +} + +foo(1, ""); +No errors! +arrow-0.js 3 11 3 23 +// @flow + +const a: ((x: number) => number) = (x) => x + 1; +a(1); +No errors! +arrow-0.js 4 1 4 5 +// @flow + +const a = (x) => x + 1; +(a(1): number); +No errors! +arrow-0.js 3 7 3 8 +// @flow + +const a: ((x: number) => number) = (x) => x + 1; +a(1); +No errors! +alias.js 5 24 +// @flow + +type MyNumber = number; + +function f(x: MyNumber): MyNumber { + return x; +}; + +f(1); +No errors! +alias.js 5 24 --expand-type-aliases +// @flow + +type MyNumber = number; + +function f(x: MyNumber): number { + return x; +}; + +f(1); +No errors! +alias.js 6 10 6 11 +// @flow + +type MyNumber = number; + +function f(x: MyNumber) { + return (x: MyNumber); +}; + +f(1); +No errors! +alias.js 9 1 9 5 --expand-type-aliases +// @flow + +type MyNumber = number; + +function f(x: MyNumber) { + return x; +}; + +(f(1): number); +No errors! +alias-0.js 9 24 +// @flow + +type MyObject = { + foo: number, + bar: boolean, + baz: string, +}; + +function f(x: MyObject): MyObject { + return x; +}; + +f({foo:1, bar:true, baz:"baz"}); +No errors! +alias-0.js 9 24 --expand-type-aliases +// @flow + +type MyObject = { + foo: number, + bar: boolean, + baz: string, +}; + +function f(x: MyObject): {bar: boolean, baz: string, foo: number} { + return x; +}; + +f({foo:1, bar:true, baz:"baz"}); +No errors! +alias-0.js 10 10 10 11 +// @flow + +type MyObject = { + foo: number, + bar: boolean, + baz: string, +}; + +function f(x: MyObject) { + return (x: MyObject); +}; + +f({foo:1, bar:true, baz:"baz"}); +No errors! +alias-0.js 13 1 13 32 --expand-type-aliases +// @flow + +type MyObject = { + foo: number, + bar: boolean, + baz: string, +}; + +function f(x: MyObject) { + return x; +}; + +(f({foo:1, bar:true, baz:"baz"}): {bar: boolean, baz: string, foo: number}); +No errors! +replacement-object.js 2 5 --strategy=generalize +// @flow +var obj: {m: (x: number) => string, n: (y: number) => string} = {n(y) {return this.m(y);},m(x) {return "";}} +obj.n(0) + + + + + + + +// This is kinda trailing whitespace +No errors! +replacement-object.js 2 6 --strategy=specialize +// @flow +var obj: {|m: (x: 0) => "", n: (y: 0) => ""|} = {n(y) {return this.m(y);},m(x) {return "";}} +obj.n(0) + + + + + + + +// This is kinda trailing whitespace +No errors! +replacement-object.js 2 7 --strategy=temporary +// @flow +var obj: $TEMPORARY$object<{m: (x: number) => string, n: (y: number) => string}> = {n(y) {return this.m(y);},m(x) {return "";}} +obj.n(0) + + + + + + + +// This is kinda trailing whitespace +No errors! +replacement-object.js 2 8 --strategy=fixme +// @flow +var obj: $FlowFixMe = {n(y) {return this.m(y);},m(x) {return "";}} +obj.n(0) + + + + + + + +// This is kinda trailing whitespace +No errors! +replacement-object.js 2 5 2 8 --strategy=generalize +// @flow +var obj: {m: (x: number) => string, n: (y: number) => string} = {n(y) {return this.m(y);},m(x) {return "";}} +obj.n(0) + + + + + + + +// This is kinda trailing whitespace +No errors! +func-2.js 3 14 +// @flow + +function foo(x: number, y) { + function bar(z, w) { + return x + z + y + w; + } + bar(x, y); +} + +foo(1, 1); +No errors! +func-2.js 3 17 3 18 +// @flow + +function foo(x, y: number) { + function bar(z, w) { + return x + z + y + w; + } + bar(x, y); +} + +foo(1, 1); +No errors! +func-2.js 4 16 +// @flow + +function foo(x, y) { + function bar(z: number, w) { + return x + z + y + w; + } + bar(x, y); +} + +foo(1, 1); +No errors! +func-2.js 4 19 4 20 +// @flow + +function foo(x, y) { + function bar(z, w: number) { + return x + z + y + w; + } + bar(x, y); +} + +foo(1, 1); +No errors! +insert-type array.js 3:15 +cat array.js 3:15 +// @flow + +function foo(): Array<1 | 2> { + return [1, 2]; +} +insert-type arrow-0.js 3:11-3:23 +cat arrow-0-copy.js 3:11-3:23 +// @flow + +const a: ((x: number) => number) = (x) => x + 1; +a(1); +cat arrow-0.js 3:11-3:23 +// @flow + +const a = (x) => x + 1; +a(1); diff --git a/tests/autofix-insert-type/class-0.js b/tests/autofix-insert-type/class-0.js new file mode 100644 index 00000000000..e71bba512f2 --- /dev/null +++ b/tests/autofix-insert-type/class-0.js @@ -0,0 +1,10 @@ +// @flow + +class A { + f = 1; + m(x) { + return this.f + 1; + } +} + +(new A).m(""); diff --git a/tests/autofix-insert-type/class-1.js b/tests/autofix-insert-type/class-1.js new file mode 100644 index 00000000000..82e37980588 --- /dev/null +++ b/tests/autofix-insert-type/class-1.js @@ -0,0 +1,10 @@ +// @flow + +class A { + f = 1; + m() { return "a"; } +} + +var a = new A; +function foo(x) { return x; } +foo(A); diff --git a/tests/autofix-insert-type/class-2.js b/tests/autofix-insert-type/class-2.js new file mode 100644 index 00000000000..b520b3c7174 --- /dev/null +++ b/tests/autofix-insert-type/class-2.js @@ -0,0 +1,7 @@ +// @flow + +class A { + m() { + return this; + } +} diff --git a/tests/autofix-insert-type/class-3.js b/tests/autofix-insert-type/class-3.js new file mode 100644 index 00000000000..ca7ee8c5513 --- /dev/null +++ b/tests/autofix-insert-type/class-3.js @@ -0,0 +1,10 @@ +// @flow + +class A { + m() { + return this; + } +} + +function foo(x) {} +foo(new A().m); diff --git a/tests/autofix-insert-type/comments-0.js b/tests/autofix-insert-type/comments-0.js new file mode 100644 index 00000000000..28080c95f0d --- /dev/null +++ b/tests/autofix-insert-type/comments-0.js @@ -0,0 +1,9 @@ +// @flow + +// BEFORE FUNCTION +export function test (x: number) { + // ONE + const a = 4 + x; // END OF a = 4... + // TWO + return /* BETWEEEN */ `${a}` // END OF return ... +} // END diff --git a/tests/autofix-insert-type/dictionary.js b/tests/autofix-insert-type/dictionary.js new file mode 100644 index 00000000000..9cae072216d --- /dev/null +++ b/tests/autofix-insert-type/dictionary.js @@ -0,0 +1,31 @@ +// @flow + +() => ({ + ab: null, + "a b": null, + "a'b": null, + "1": null, + "'": null, + " ": null, + "_": null, + "": null, + get "x"() { + return null; + }, + set "y"(z: string) {} +}); + +() => ({ + ab: null, + 'a b': null, + 'a"b': null, + '1': null, + '"': null, + ' ': null, + '_': null, + '': null, + get 'x'() { + return null; + }, + set 'y'(z: string) {} +}); diff --git a/tests/autofix-insert-type/func-0.js b/tests/autofix-insert-type/func-0.js new file mode 100644 index 00000000000..7986fc419f5 --- /dev/null +++ b/tests/autofix-insert-type/func-0.js @@ -0,0 +1,7 @@ +// @flow + +function foo(x: number, y) { + return x + y; +} + +foo(1, ""); diff --git a/tests/autofix-insert-type/func-1.js b/tests/autofix-insert-type/func-1.js new file mode 100644 index 00000000000..7239ea94916 --- /dev/null +++ b/tests/autofix-insert-type/func-1.js @@ -0,0 +1,7 @@ +// @flow + +function foo(x, y) { + return x + y(x); +} + +foo(1, (z) => z + 1); diff --git a/tests/autofix-insert-type/func-2.js b/tests/autofix-insert-type/func-2.js new file mode 100644 index 00000000000..27abe8a3a45 --- /dev/null +++ b/tests/autofix-insert-type/func-2.js @@ -0,0 +1,10 @@ +// @flow + +function foo(x, y) { + function bar(z, w) { + return x + z + y + w; + } + bar(x, y); +} + +foo(1, 1); diff --git a/tests/autofix-insert-type/func-3.js b/tests/autofix-insert-type/func-3.js new file mode 100644 index 00000000000..bf3765622de --- /dev/null +++ b/tests/autofix-insert-type/func-3.js @@ -0,0 +1,4 @@ +// @flow +function throws_arg() { + throw 42; +} diff --git a/tests/autofix-insert-type/func-4.js b/tests/autofix-insert-type/func-4.js new file mode 100644 index 00000000000..0a9e98453d4 --- /dev/null +++ b/tests/autofix-insert-type/func-4.js @@ -0,0 +1,8 @@ +// @flow +function throws_arg() { + if (Math.random() < 0.5) { + throw 42; + } else { + throw 42; + } +} diff --git a/tests/autofix-insert-type/func-5.js b/tests/autofix-insert-type/func-5.js new file mode 100644 index 00000000000..e786c4406a3 --- /dev/null +++ b/tests/autofix-insert-type/func-5.js @@ -0,0 +1,8 @@ +// @flow +function throws_arg() { + if (Math.random() < 0.5) { + return 42; + } else { + throw 42; + } +} diff --git a/tests/autofix-insert-type/func-poly-0.js b/tests/autofix-insert-type/func-poly-0.js new file mode 100644 index 00000000000..52034c2b37d --- /dev/null +++ b/tests/autofix-insert-type/func-poly-0.js @@ -0,0 +1,7 @@ +// @flow + +function outer(y: T) { + function inner(x: T, z) { + inner(x, x); + } +} diff --git a/tests/autofix-insert-type/object-0.js b/tests/autofix-insert-type/object-0.js new file mode 100644 index 00000000000..c7cc23d3e54 --- /dev/null +++ b/tests/autofix-insert-type/object-0.js @@ -0,0 +1,12 @@ +// @flow + +var obj = { + n(y) { + return this.m(y); + }, + m(x) { + return ""; + } +} + +obj.n(0) diff --git a/tests/autofix-insert-type/object-1.js b/tests/autofix-insert-type/object-1.js new file mode 100644 index 00000000000..b4b36a5eb5b --- /dev/null +++ b/tests/autofix-insert-type/object-1.js @@ -0,0 +1,12 @@ +// @flow + +var obj = { + 'n'(y) { + return this.m(y); + }, + 'm'(x) { + return ""; + } +} + +obj['n'](0) diff --git a/tests/autofix-insert-type/object-2.js b/tests/autofix-insert-type/object-2.js new file mode 100644 index 00000000000..01eac82e141 --- /dev/null +++ b/tests/autofix-insert-type/object-2.js @@ -0,0 +1,9 @@ +// @flow + +function foo(x) {} + +foo({ + m() { return 1; }, + n: () => 2, + l: function bar() {} +}); diff --git a/tests/autofix-insert-type/poly-0.js b/tests/autofix-insert-type/poly-0.js new file mode 100644 index 00000000000..e8b10ff1294 --- /dev/null +++ b/tests/autofix-insert-type/poly-0.js @@ -0,0 +1,7 @@ +// @flow + +function foo(x: T) { + return { + f: x + } +} diff --git a/tests/autofix-insert-type/react-0.js b/tests/autofix-insert-type/react-0.js new file mode 100644 index 00000000000..358ca2dce09 --- /dev/null +++ b/tests/autofix-insert-type/react-0.js @@ -0,0 +1,25 @@ +// @flow + +import React from "react"; +import ReactDOM from "react-dom"; + +function Clock(props) { + return ( +

+

Hello, world!

+

It is {props.date.toLocaleTimeString()}.

+
+ ); +} + +function tick() { + const element = document.getElementById('root'); + if (element) { + ReactDOM.render( + , + element + ); + } +} + +setInterval(tick, 1000); diff --git a/tests/autofix-insert-type/replacement-array.js b/tests/autofix-insert-type/replacement-array.js new file mode 100644 index 00000000000..6da04d6df07 --- /dev/null +++ b/tests/autofix-insert-type/replacement-array.js @@ -0,0 +1,5 @@ +// @flow + +// preserve spacing + +function foo() {return [1, 2];} diff --git a/tests/autofix-insert-type/replacement-arrow.js b/tests/autofix-insert-type/replacement-arrow.js new file mode 100644 index 00000000000..a7de3ab9747 --- /dev/null +++ b/tests/autofix-insert-type/replacement-arrow.js @@ -0,0 +1,7 @@ +// @flow + +const a = (x) => + x + 1; + + +a(1); diff --git a/tests/autofix-insert-type/replacement-class.js b/tests/autofix-insert-type/replacement-class.js new file mode 100644 index 00000000000..2363af83ed0 --- /dev/null +++ b/tests/autofix-insert-type/replacement-class.js @@ -0,0 +1,6 @@ +// @flow + +class A { // end of line comment + f = 1; + m(x) { return this.f + 1; } +} (new A).m(""); // weird spacing diff --git a/tests/autofix-insert-type/replacement-dictionary.js b/tests/autofix-insert-type/replacement-dictionary.js new file mode 100644 index 00000000000..03257e659d7 --- /dev/null +++ b/tests/autofix-insert-type/replacement-dictionary.js @@ -0,0 +1,12 @@ +// @flow + +() => ({ + ab: null, "a b": null, "a'b": null, + "1": null, "'": null, " ": null, + "_": null, "": null, + get "x"() {return null; }, + set "y"(z: string) {}}); + +() => ({ + ab: null, 'a b': null, 'a"b': null, '1': null, '"': null, ' ': null, '_': null, '': null, + get 'x'() {return null;}, set 'y'(z: string) {} } ); diff --git a/tests/autofix-insert-type/replacement-function.js b/tests/autofix-insert-type/replacement-function.js new file mode 100644 index 00000000000..bb7d4d81f14 --- /dev/null +++ b/tests/autofix-insert-type/replacement-function.js @@ -0,0 +1,16 @@ +// @flow + +// Spaced out comments /* non-comments + +// BEFORE FUNCTION +export //too many comments +function /*Random*/ test /*weird*/ (x: number) /*Comments*/ { +// ONE +/* Two */ const a = 4 + x; // END OF a = 4... + + + + // Three + + return /* BETWEEEN */ `${a}` // END OF return ... +} // END */ diff --git a/tests/autofix-insert-type/replacement-object.js b/tests/autofix-insert-type/replacement-object.js new file mode 100644 index 00000000000..fd60245d384 --- /dev/null +++ b/tests/autofix-insert-type/replacement-object.js @@ -0,0 +1,11 @@ +// @flow +var obj = {n(y) {return this.m(y);},m(x) {return "";}} +obj.n(0) + + + + + + + +// This is kinda trailing whitespace diff --git a/tests/autofix-insert-type/spread.js b/tests/autofix-insert-type/spread.js new file mode 100644 index 00000000000..a886177e2d0 --- /dev/null +++ b/tests/autofix-insert-type/spread.js @@ -0,0 +1,12 @@ +// @flow + +declare opaque type O; +class C { f: O; } +class D { g: O; } + +() => { declare var x: {w: O, ...{| x: O, y: O |}, z: O}; return x; }; +() => { declare var x: {|w: O, ...{| x: O, y: O |}, z: O|}; return x; }; +() => { declare var x: {...C, o: O}; return x; }; +() => { declare var x: {...C} & {...D}; return x; }; +() => { declare var x: {...T, ...S, o: O}; return x; }; +() => { declare var x: {...T, o: O}; return x; }; diff --git a/tests/autofix-insert-type/string-literal.js b/tests/autofix-insert-type/string-literal.js new file mode 100644 index 00000000000..8d9d673b127 --- /dev/null +++ b/tests/autofix-insert-type/string-literal.js @@ -0,0 +1,11 @@ +// @flow + +declare var a: "a"; +declare var single_quote: "'"; +declare var double_quote: '""'; +declare var backslash: '\\'; + +function f1() { return a; } +function f2() { return single_quote; } +function f3() { return double_quote; } +function f4() { return backslash; } diff --git a/tests/autofix-insert-type/test.sh b/tests/autofix-insert-type/test.sh new file mode 100755 index 00000000000..9287a0412e2 --- /dev/null +++ b/tests/autofix-insert-type/test.sh @@ -0,0 +1,110 @@ +#!/bin/bash + +## Correct tests + +TEMP_DIR=tmp +mkdir $TEMP_DIR + +do_file() { + FILE="$1"; shift; + echo "$FILE" "$@" + cp "$FILE" "$TEMP_DIR/$FILE" + assert_ok "$FLOW" autofix insert-type --strip-root --quiet --in-place --path="$TEMP_DIR/out.js" \ + "$TEMP_DIR/$FILE" "$@" + cat "$TEMP_DIR/out.js" + rm "$TEMP_DIR/$FILE" + assert_ok "$FLOW" force-recheck "$TEMP_DIR/out.js" + assert_ok "$FLOW" status + rm "$TEMP_DIR/out.js" +} + +do_file "array.js" 3 15 --strategy=specialize +do_file "arrow-0.js" 3 8 +do_file "arrow-0.js" 3 13 +do_file "arrow-0.js" 3 14 +do_file "arrow-1.js" 3 16 +do_file "arrow-2.js" 3 5 +do_file "class-0.js" 4 3 +do_file "class-0.js" 4 4 +do_file "class-0.js" 4 3 4 4 +do_file "class-1.js" 5 6 +do_file "class-2.js" 4 6 +do_file "class-3.js" 9 15 +do_file "comments-0.js" 4 33 +do_file "dictionary.js" 12 12 +do_file "func-0.js" 3 26 +do_file "func-1.js" 3 18 +do_file "func-2.js" 4 17 +do_file "func-3.js" 2 22 +do_file "func-4.js" 2 22 +do_file "func-5.js" 2 22 +do_file "func-poly-0.js" 3 24 +do_file "object-0.js" 7 6 +do_file "object-1.js" 7 8 +do_file "object-2.js" 6 6 +do_file "poly-0.js" 3 22 --strategy=specialize +do_file "poly-0.js" 3 15 +do_file "react-0.js" 6 21 +do_file "string-literal.js" 11 14 +do_file "type-utils.js" 6 3 +do_file "union-0.js" 3 15 +do_file "replacement-array.js" 5 15 --strategy=generalize +do_file "replacement-arrow.js" 3 13 +do_file "replacement-class.js" 5 7 +do_file "replacement-dictionary.js" 3 3 --strategy=generalize +do_file "replacement-function.js" 7 47 +do_file "replacement-object.js" 2 16 + +do_file "func-0.js" 4 14 4 15 +do_file "func-0.js" 4 10 4 15 +do_file "arrow-0.js" 3 11 3 23 +do_file "arrow-0.js" 4 1 4 5 +do_file "arrow-0.js" 3 7 3 8 + +do_file "alias.js" 5 24 +do_file "alias.js" 5 24 --expand-type-aliases +do_file "alias.js" 6 10 6 11 +do_file "alias.js" 9 1 9 5 --expand-type-aliases + +do_file "alias-0.js" 9 24 +do_file "alias-0.js" 9 24 --expand-type-aliases +do_file "alias-0.js" 10 10 10 11 +do_file "alias-0.js" 13 1 13 32 --expand-type-aliases + +# Test pointing to identifiers +do_file "replacement-object.js" 2 5 --strategy=generalize +do_file "replacement-object.js" 2 6 --strategy=specialize +do_file "replacement-object.js" 2 7 --strategy=temporary +assert_exit 110 "$FLOW" autofix insert-type --strip-root --quiet \ + "replacement-object.js" 2 7 --strategy=fail +do_file "replacement-object.js" 2 8 --strategy=fixme +do_file "replacement-object.js" 2 5 2 8 --strategy=generalize +do_file "func-2.js" 3 14 +do_file "func-2.js" 3 17 3 18 +do_file "func-2.js" 4 16 +do_file "func-2.js" 4 19 4 20 +assert_exit 110 "$FLOW" autofix insert-type --strip-root --quiet --strict-location "func-2.js" 3 14 +assert_exit 110 "$FLOW" autofix insert-type --strip-root --quiet --strict-location "func-2.js" 3 17 3 18 +assert_exit 110 "$FLOW" autofix insert-type --strip-root --quiet --strict-location "func-2.js" 4 16 +assert_exit 110 "$FLOW" autofix insert-type --strip-root --quiet --strict-location "func-2.js" 4 19 4 20 + + +assert_exit 110 "$FLOW" autofix insert-type --strip-root --quiet "object-0.js" 4 4 +assert_exit 110 "$FLOW" autofix insert-type --strip-root --quiet "poly-0.js" 3 21 + +# Test File IO +echo "insert-type array.js 3:15" +cp "array.js" "$TEMP_DIR/array.js" +assert_ok "$FLOW" autofix insert-type --strip-root --quiet --in-place \ + --strategy=specialize "$TEMP_DIR/array.js" 3 15 +echo "cat array.js 3:15" +cat "$TEMP_DIR/array.js" + +echo "insert-type arrow-0.js 3:11-3:23" +cp "arrow-0.js" "$TEMP_DIR/arrow-0.js" +assert_ok "$FLOW" autofix insert-type --strip-root --quiet \ + --in-place --path "$TEMP_DIR/arrow-0-copy.js" "$TEMP_DIR/arrow-0.js" 3 11 3 23 +echo "cat arrow-0-copy.js 3:11-3:23" +cat "$TEMP_DIR/arrow-0-copy.js" +echo "cat arrow-0.js 3:11-3:23" +cat "$TEMP_DIR/arrow-0.js" diff --git a/tests/autofix-insert-type/type-utils.js b/tests/autofix-insert-type/type-utils.js new file mode 100644 index 00000000000..8aee9d8fd72 --- /dev/null +++ b/tests/autofix-insert-type/type-utils.js @@ -0,0 +1,9 @@ +// @flow + +declare class C

{}; + +declare var typeof_C: typeof C; +() => typeof_C; + +declare var class_of_C_number: Class>; +() => class_of_C_number; diff --git a/tests/autofix-insert-type/union-0.js b/tests/autofix-insert-type/union-0.js new file mode 100644 index 00000000000..d578034fa04 --- /dev/null +++ b/tests/autofix-insert-type/union-0.js @@ -0,0 +1,10 @@ +// @flow + +function foo(x) { } + +foo(1); +foo("1"); +foo(1); +foo(2); +foo(() => foo()); +foo(); diff --git a/tests/autofix-multi-run/.flowconfig b/tests/autofix-multi-run/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-multi-run/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-multi-run/.testconfig b/tests/autofix-multi-run/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-multi-run/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-multi-run/a.js b/tests/autofix-multi-run/a.js new file mode 100644 index 00000000000..4584c20a4ba --- /dev/null +++ b/tests/autofix-multi-run/a.js @@ -0,0 +1,9 @@ +// @flow + +class C0 { } +class C1 { m1() { return C0; } } +class C2 { m2() { return C1; } } +class C3 { m3() { return C2; } } +class C4 { m4() { return C3; } } + +module.exports = C4; diff --git a/tests/autofix-multi-run/autofix-multi-run.exp b/tests/autofix-multi-run/autofix-multi-run.exp new file mode 100644 index 00000000000..3272944b85f --- /dev/null +++ b/tests/autofix-multi-run/autofix-multi-run.exp @@ -0,0 +1,92 @@ +> cat a.js +// @flow + +class C0 { } +class C1 { m1() { return C0; } } +class C2 { m2() { return C1; } } +class C3 { m3() { return C2; } } +class C4 { m4(): typeof C3 { return C3; } } + +module.exports = C4; +> cat b.js +// @flow + +class C0 { } +class C1 { m1() { return C0; } } +class C2 { m2() { return C1; } } +class C3 { m3(): typeof C2 { return C2; } } + +module.exports = C3; +> cat c.js +// @flow + +class C0 { } +class C1 { m1() { return C0; } } +class C2 { m2(): typeof C1 { return C1; } } + +module.exports = C2; +> cat d.js +// @flow + +class C0 { } +class C1 { m1(): typeof C0 { return C0; } } + +module.exports = C1; +> cat a.js +// @flow + +class C0 { } +class C1 { m1() { return C0; } } +class C2 { m2() { return C1; } } +class C3 { m3(): typeof C2 { return C2; } } +class C4 { m4(): typeof C3 { return C3; } } + +module.exports = C4; +> cat b.js +// @flow + +class C0 { } +class C1 { m1() { return C0; } } +class C2 { m2(): typeof C1 { return C1; } } +class C3 { m3(): typeof C2 { return C2; } } + +module.exports = C3; +> cat c.js +// @flow + +class C0 { } +class C1 { m1(): typeof C0 { return C0; } } +class C2 { m2(): typeof C1 { return C1; } } + +module.exports = C2; +> cat a.js +// @flow + +class C0 { } +class C1 { m1() { return C0; } } +class C2 { m2(): typeof C1 { return C1; } } +class C3 { m3(): typeof C2 { return C2; } } +class C4 { m4(): typeof C3 { return C3; } } + +module.exports = C4; +> cat b.js +// @flow + +class C0 { } +class C1 { m1(): typeof C0 { return C0; } } +class C2 { m2(): typeof C1 { return C1; } } +class C3 { m3(): typeof C2 { return C2; } } + +module.exports = C3; +> cat a.js +// @flow + +class C0 { } +class C1 { m1(): typeof C0 { return C0; } } +class C2 { m2(): typeof C1 { return C1; } } +class C3 { m3(): typeof C2 { return C2; } } +class C4 { m4(): typeof C3 { return C3; } } + +module.exports = C4; +> flow status +No errors! diff --git a/tests/autofix-multi-run/b.js b/tests/autofix-multi-run/b.js new file mode 100644 index 00000000000..7fbe85f95b8 --- /dev/null +++ b/tests/autofix-multi-run/b.js @@ -0,0 +1,8 @@ +// @flow + +class C0 { } +class C1 { m1() { return C0; } } +class C2 { m2() { return C1; } } +class C3 { m3() { return C2; } } + +module.exports = C3; diff --git a/tests/autofix-multi-run/c.js b/tests/autofix-multi-run/c.js new file mode 100644 index 00000000000..41a5a1294e1 --- /dev/null +++ b/tests/autofix-multi-run/c.js @@ -0,0 +1,7 @@ +// @flow + +class C0 { } +class C1 { m1() { return C0; } } +class C2 { m2() { return C1; } } + +module.exports = C2; diff --git a/tests/autofix-multi-run/d.js b/tests/autofix-multi-run/d.js new file mode 100644 index 00000000000..ceb46d65ac5 --- /dev/null +++ b/tests/autofix-multi-run/d.js @@ -0,0 +1,6 @@ +// @flow + +class C0 { } +class C1 { m1() { return C0; } } + +module.exports = C1; diff --git a/tests/autofix-multi-run/e.js b/tests/autofix-multi-run/e.js new file mode 100644 index 00000000000..d0b11b991fc --- /dev/null +++ b/tests/autofix-multi-run/e.js @@ -0,0 +1,5 @@ +// @flow + +class C0 { } + +module.exports = C0; diff --git a/tests/autofix-multi-run/test.sh b/tests/autofix-multi-run/test.sh new file mode 100755 index 00000000000..e14bea89769 --- /dev/null +++ b/tests/autofix-multi-run/test.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +update_in_place(){ + local FILE="$1" + assert_ok "$FLOW" autofix insert-type --in-place "$@" + assert_ok "$FLOW" force-recheck "$FILE" + echo "> cat $FILE" + cat "$FILE" +} + +update_in_place a.js 7 16 +update_in_place b.js 6 16 +update_in_place c.js 5 16 +update_in_place d.js 4 16 +# assert_errors "$FLOW" status --strip-root +update_in_place a.js 6 16 +update_in_place b.js 5 16 +update_in_place c.js 4 16 +#assert_errors "$FLOW" status --strip-root +update_in_place a.js 5 16 +update_in_place b.js 4 16 +#assert_errors "$FLOW" status --strip-root +update_in_place a.js 4 16 + +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-numeric-literals/.flowconfig b/tests/autofix-numeric-literals/.flowconfig new file mode 100644 index 00000000000..7bb071dadbd --- /dev/null +++ b/tests/autofix-numeric-literals/.flowconfig @@ -0,0 +1,12 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-numeric-literals/.testconfig b/tests/autofix-numeric-literals/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-numeric-literals/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-numeric-literals/a.js b/tests/autofix-numeric-literals/a.js new file mode 100644 index 00000000000..97e809cbfd3 --- /dev/null +++ b/tests/autofix-numeric-literals/a.js @@ -0,0 +1,5 @@ +// @flow + +declare var cond: boolean; + +module.exports = () => (cond ? 0 : 1); diff --git a/tests/autofix-numeric-literals/autofix-numeric-literals.exp b/tests/autofix-numeric-literals/autofix-numeric-literals.exp new file mode 100644 index 00000000000..c2eaac38267 --- /dev/null +++ b/tests/autofix-numeric-literals/autofix-numeric-literals.exp @@ -0,0 +1,8 @@ +> cat a.js +// @flow + +declare var cond: boolean; + +module.exports = (): number => (cond ? 0 : 1); +> flow status +No errors! diff --git a/tests/autofix-numeric-literals/test.sh b/tests/autofix-numeric-literals/test.sh new file mode 100755 index 00000000000..2e48c24110e --- /dev/null +++ b/tests/autofix-numeric-literals/test.sh @@ -0,0 +1,7 @@ +#!/bin/bash +assert_ok "$FLOW" autofix exports --in-place a.js +assert_ok "$FLOW" force-recheck a.js +echo "> cat a.js" +cat a.js +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-object-dynamic-props/.flowconfig b/tests/autofix-object-dynamic-props/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-object-dynamic-props/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-object-dynamic-props/.testconfig b/tests/autofix-object-dynamic-props/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-object-dynamic-props/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-object-dynamic-props/a.js b/tests/autofix-object-dynamic-props/a.js new file mode 100644 index 00000000000..79c5ffaf11a --- /dev/null +++ b/tests/autofix-object-dynamic-props/a.js @@ -0,0 +1,19 @@ +// @flow + +function keyA() { return 'keyA' }; +function keyB() { return 'keyB' }; +function keyAny(): any { return 'keyAny' }; + +const FIELDS = { + A: keyA(), + B: keyB(), + keyAny: keyAny(), +} + +const dict = { + [FIELDS.A]: 1, + [FIELDS.B]: 2, + [FIELDS.keyAny]: 3, +}; + +module.exports = dict; diff --git a/tests/autofix-object-dynamic-props/autofix-object-dynamic-props.exp b/tests/autofix-object-dynamic-props/autofix-object-dynamic-props.exp new file mode 100644 index 00000000000..4e866b55403 --- /dev/null +++ b/tests/autofix-object-dynamic-props/autofix-object-dynamic-props.exp @@ -0,0 +1,22 @@ +> cat a.js +// @flow + +function keyA() { return 'keyA' }; +function keyB() { return 'keyB' }; +function keyAny(): any { return 'keyAny' }; + +const FIELDS = { + A: keyA(), + B: keyB(), + keyAny: keyAny(), +} + +const dict: {keyA: number, keyB: number} = { + [FIELDS.A]: 1, + [FIELDS.B]: 2, + [FIELDS.keyAny]: 3, +}; + +module.exports = dict; +> flow status +No errors! diff --git a/tests/autofix-object-dynamic-props/test.sh b/tests/autofix-object-dynamic-props/test.sh new file mode 100755 index 00000000000..76660413e30 --- /dev/null +++ b/tests/autofix-object-dynamic-props/test.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +assert_ok "$FLOW" autofix exports --in-place a.js +echo "> cat a.js" +cat a.js +assert_ok "$FLOW" force-recheck a.js +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-object-key/.flowconfig b/tests/autofix-object-key/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-object-key/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-object-key/.testconfig b/tests/autofix-object-key/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-object-key/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-object-key/a.js b/tests/autofix-object-key/a.js new file mode 100644 index 00000000000..f988d4d73e6 --- /dev/null +++ b/tests/autofix-object-key/a.js @@ -0,0 +1,9 @@ +// @flow + +type Key = 'A' | 'B'; +declare opaque type Val; + +declare var key: Key; +declare var val: Val + +module.exports = { [key]: val }; diff --git a/tests/autofix-object-key/autofix-object-key.exp b/tests/autofix-object-key/autofix-object-key.exp new file mode 100644 index 00000000000..7eb5804133e --- /dev/null +++ b/tests/autofix-object-key/autofix-object-key.exp @@ -0,0 +1,12 @@ +> cat a.js +// @flow + +type Key = 'A' | 'B'; +declare opaque type Val; + +declare var key: Key; +declare var val: Val + +module.exports = ({ [key]: val }: {A: Val, B: Val}); +> flow status +No errors! diff --git a/tests/autofix-object-key/test.sh b/tests/autofix-object-key/test.sh new file mode 100755 index 00000000000..ed0ab885492 --- /dev/null +++ b/tests/autofix-object-key/test.sh @@ -0,0 +1,7 @@ +#!/bin/bash +assert_ok "$FLOW" autofix exports --in-place a.js +echo "> cat a.js" +cat a.js +assert_ok "$FLOW" force-recheck a.js +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-object-spread/.flowconfig b/tests/autofix-object-spread/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-object-spread/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-object-spread/.testconfig b/tests/autofix-object-spread/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-object-spread/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-object-spread/a.js b/tests/autofix-object-spread/a.js new file mode 100644 index 00000000000..6624aac57e6 --- /dev/null +++ b/tests/autofix-object-spread/a.js @@ -0,0 +1,6 @@ +// @flow + +declare opaque type Val; +declare var obj: { a: Val, b: Val }; + +module.exports = { ...obj }; diff --git a/tests/autofix-object-spread/autofix-object-spread.exp b/tests/autofix-object-spread/autofix-object-spread.exp new file mode 100644 index 00000000000..6a6ea0cb7e5 --- /dev/null +++ b/tests/autofix-object-spread/autofix-object-spread.exp @@ -0,0 +1,9 @@ +> cat a.js +// @flow + +declare opaque type Val; +declare var obj: { a: Val, b: Val }; + +module.exports = { ...obj }; +> flow status +No errors! diff --git a/tests/autofix-object-spread/test.sh b/tests/autofix-object-spread/test.sh new file mode 100755 index 00000000000..ed0ab885492 --- /dev/null +++ b/tests/autofix-object-spread/test.sh @@ -0,0 +1,7 @@ +#!/bin/bash +assert_ok "$FLOW" autofix exports --in-place a.js +echo "> cat a.js" +cat a.js +assert_ok "$FLOW" force-recheck a.js +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-object/.flowconfig b/tests/autofix-object/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-object/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-object/.testconfig b/tests/autofix-object/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-object/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-object/a.js b/tests/autofix-object/a.js new file mode 100644 index 00000000000..946625c90de --- /dev/null +++ b/tests/autofix-object/a.js @@ -0,0 +1,11 @@ +// @flow + +module.exports = { + a: () => { + return { + x: + // comment + "blah" + }; + } +} diff --git a/tests/autofix-object/autofix-object.exp b/tests/autofix-object/autofix-object.exp new file mode 100644 index 00000000000..abc5ea0e17f --- /dev/null +++ b/tests/autofix-object/autofix-object.exp @@ -0,0 +1,41 @@ +> cat a.js +// @flow + +module.exports = { + a: (): {x: string} => { + return { + x: + // comment + "blah" + }; + } +} +> cat b.js +// @flow + +function foo(): {f: number, g: number, h: string} { + // Comment 0 + const x = { + // Comment 1 + f: 1, + // Comment 2 + g: 2, + // Comment 3 + h: + // Comment 4 + "blah", + } + // Comment 5 + return x; +} +module.exports = { foo }; +> cat c.js +// @flow + +module.exports = { + m(): number { + return 1; + }, +}; +> flow status +No errors! diff --git a/tests/autofix-object/b.js b/tests/autofix-object/b.js new file mode 100644 index 00000000000..aa9924e739b --- /dev/null +++ b/tests/autofix-object/b.js @@ -0,0 +1,18 @@ +// @flow + +function foo() { + // Comment 0 + const x = { + // Comment 1 + f: 1, + // Comment 2 + g: 2, + // Comment 3 + h: + // Comment 4 + "blah", + } + // Comment 5 + return x; +} +module.exports = { foo }; diff --git a/tests/autofix-object/c.js b/tests/autofix-object/c.js new file mode 100644 index 00000000000..e0a0a3c6bb5 --- /dev/null +++ b/tests/autofix-object/c.js @@ -0,0 +1,7 @@ +// @flow + +module.exports = { + m() { + return 1; + }, +}; diff --git a/tests/autofix-object/test.sh b/tests/autofix-object/test.sh new file mode 100755 index 00000000000..4e50a55dd7e --- /dev/null +++ b/tests/autofix-object/test.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +run_test(){ + local FILE="$1" + assert_ok "$FLOW" autofix exports --in-place "$FILE" + assert_ok "$FLOW" force-recheck "$FILE" + echo "> cat $FILE" + cat "$FILE" +} + +run_test a.js +run_test b.js +run_test c.js + +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-omit-targ-defaults/.flowconfig b/tests/autofix-omit-targ-defaults/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-omit-targ-defaults/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-omit-targ-defaults/.testconfig b/tests/autofix-omit-targ-defaults/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-omit-targ-defaults/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-omit-targ-defaults/autofix-omit-targ-defaults.exp b/tests/autofix-omit-targ-defaults/autofix-omit-targ-defaults.exp new file mode 100644 index 00000000000..094fedaf31d --- /dev/null +++ b/tests/autofix-omit-targ-defaults/autofix-omit-targ-defaults.exp @@ -0,0 +1,11 @@ +> cat defaults.js +// @flow + + +type Foo = {}; + +function foo(): Foo { return {}; } + +module.exports = (foo(): Foo); +> flow status +No errors! diff --git a/tests/autofix-omit-targ-defaults/defaults.js b/tests/autofix-omit-targ-defaults/defaults.js new file mode 100644 index 00000000000..4a0b8b42abc --- /dev/null +++ b/tests/autofix-omit-targ-defaults/defaults.js @@ -0,0 +1,8 @@ +// @flow + + +type Foo = {}; + +function foo(): Foo { return {}; } + +module.exports = foo(); diff --git a/tests/autofix-omit-targ-defaults/test.sh b/tests/autofix-omit-targ-defaults/test.sh new file mode 100755 index 00000000000..68594bb3e15 --- /dev/null +++ b/tests/autofix-omit-targ-defaults/test.sh @@ -0,0 +1,7 @@ +#!/bin/bash +assert_ok "$FLOW" autofix exports --in-place defaults.js +echo "> cat defaults.js" +cat defaults.js +assert_ok "$FLOW" force-recheck defaults.js +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-simplify-hardcoded-fixes/.flowconfig b/tests/autofix-simplify-hardcoded-fixes/.flowconfig new file mode 100644 index 00000000000..4974098d34c --- /dev/null +++ b/tests/autofix-simplify-hardcoded-fixes/.flowconfig @@ -0,0 +1,14 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +no_flowlib=false +experimental.well_formed_exports=true +experimental.types_first=true + +[strict] diff --git a/tests/autofix-simplify-hardcoded-fixes/.testconfig b/tests/autofix-simplify-hardcoded-fixes/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-simplify-hardcoded-fixes/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-simplify-hardcoded-fixes/a.js b/tests/autofix-simplify-hardcoded-fixes/a.js new file mode 100644 index 00000000000..25c79a97de1 --- /dev/null +++ b/tests/autofix-simplify-hardcoded-fixes/a.js @@ -0,0 +1,5 @@ +// @flow + +declare function f(): React$Element<'a'> | React$Element<'b'>; + +module.exports = f(); diff --git a/tests/autofix-simplify-hardcoded-fixes/autofix-simplify-hardcoded-fixes.exp b/tests/autofix-simplify-hardcoded-fixes/autofix-simplify-hardcoded-fixes.exp new file mode 100644 index 00000000000..c899002c5ae --- /dev/null +++ b/tests/autofix-simplify-hardcoded-fixes/autofix-simplify-hardcoded-fixes.exp @@ -0,0 +1,8 @@ +> cat a.js +// @flow + +declare function f(): React$Element<'a'> | React$Element<'b'>; + +module.exports = (f(): React$Element<"a"> | React$Element<"b">); +> flow status +No errors! diff --git a/tests/autofix-simplify-hardcoded-fixes/test.sh b/tests/autofix-simplify-hardcoded-fixes/test.sh new file mode 100755 index 00000000000..d3243361d53 --- /dev/null +++ b/tests/autofix-simplify-hardcoded-fixes/test.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +assert_ok "$FLOW" autofix insert-type --in-place a.js 5 18 5 21 +echo "> cat a.js" +cat a.js +assert_ok "$FLOW" force-recheck a.js +echo "> flow status" +assert_ok "$FLOW" status --strip-root diff --git a/tests/autofix-string-literals/.flowconfig b/tests/autofix-string-literals/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-string-literals/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-string-literals/.testconfig b/tests/autofix-string-literals/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-string-literals/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-string-literals/a.js b/tests/autofix-string-literals/a.js new file mode 100644 index 00000000000..4394ba471a2 --- /dev/null +++ b/tests/autofix-string-literals/a.js @@ -0,0 +1,10 @@ +// @flow + +declare var cond: boolean; + +module.exports = { + f1: () => (cond ? "0" : "1"), + f2: () => (cond ? "A0" : "A1"), + f3: () => (cond ? "Aa" : "Bb"), + f4: () => (cond ? "A_" : "B_"), +} diff --git a/tests/autofix-string-literals/autofix-string-literals.exp b/tests/autofix-string-literals/autofix-string-literals.exp new file mode 100644 index 00000000000..657aac61522 --- /dev/null +++ b/tests/autofix-string-literals/autofix-string-literals.exp @@ -0,0 +1,13 @@ +> cat a.js +// @flow + +declare var cond: boolean; + +module.exports = { + f1: (): string => (cond ? "0" : "1"), + f2: (): string => (cond ? "A0" : "A1"), + f3: (): string => (cond ? "Aa" : "Bb"), + f4: (): string => (cond ? "A_" : "B_"), +} +> flow status +No errors! diff --git a/tests/autofix-string-literals/test.sh b/tests/autofix-string-literals/test.sh new file mode 100755 index 00000000000..9b916abcf73 --- /dev/null +++ b/tests/autofix-string-literals/test.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +assert_ok "$FLOW" autofix insert-type --in-place a.js 6 9 +assert_ok "$FLOW" autofix insert-type --in-place a.js 7 9 +assert_ok "$FLOW" autofix insert-type --in-place a.js 8 9 +assert_ok "$FLOW" autofix insert-type --in-place a.js 9 9 +echo "> cat a.js" +cat a.js +assert_ok "$FLOW" force-recheck a.js +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-suggest/.flowconfig b/tests/autofix-suggest/.flowconfig new file mode 100644 index 00000000000..15589e0b83e --- /dev/null +++ b/tests/autofix-suggest/.flowconfig @@ -0,0 +1,2 @@ +[options] +module.system=haste diff --git a/tests/autofix-suggest/.testconfig b/tests/autofix-suggest/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-suggest/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-suggest/autofix-suggest.exp b/tests/autofix-suggest/autofix-suggest.exp new file mode 100644 index 00000000000..06e3fb128bf --- /dev/null +++ b/tests/autofix-suggest/autofix-suggest.exp @@ -0,0 +1,33 @@ +func-0.js +// @flow + +function foo(x: number, y: string): string { + return x + y; +} + +foo(1, ""); +func-1.js +// @flow + +function foo(x: number, y: ((z: number) => number)): number { + return x + y(x); +} + +foo(1, (z: number): number => z + 1); +func-2.js +// @flow + +function foo(x: number, y: number): void { + function bar(z: number, w: number): number { + return x + z + y + w; + } + bar(x, y); +} + +foo(1, 1); +func-3.js +// @flow +function throws_arg(): empty { + throw 42; +} +Found 0 errors diff --git a/tests/autofix-suggest/func-0.js b/tests/autofix-suggest/func-0.js new file mode 100644 index 00000000000..7986fc419f5 --- /dev/null +++ b/tests/autofix-suggest/func-0.js @@ -0,0 +1,7 @@ +// @flow + +function foo(x: number, y) { + return x + y; +} + +foo(1, ""); diff --git a/tests/autofix-suggest/func-1.js b/tests/autofix-suggest/func-1.js new file mode 100644 index 00000000000..7239ea94916 --- /dev/null +++ b/tests/autofix-suggest/func-1.js @@ -0,0 +1,7 @@ +// @flow + +function foo(x, y) { + return x + y(x); +} + +foo(1, (z) => z + 1); diff --git a/tests/autofix-suggest/func-2.js b/tests/autofix-suggest/func-2.js new file mode 100644 index 00000000000..27abe8a3a45 --- /dev/null +++ b/tests/autofix-suggest/func-2.js @@ -0,0 +1,10 @@ +// @flow + +function foo(x, y) { + function bar(z, w) { + return x + z + y + w; + } + bar(x, y); +} + +foo(1, 1); diff --git a/tests/autofix-suggest/func-3.js b/tests/autofix-suggest/func-3.js new file mode 100644 index 00000000000..bf3765622de --- /dev/null +++ b/tests/autofix-suggest/func-3.js @@ -0,0 +1,4 @@ +// @flow +function throws_arg() { + throw 42; +} diff --git a/tests/autofix-suggest/test.sh b/tests/autofix-suggest/test.sh new file mode 100755 index 00000000000..be8156cf0a6 --- /dev/null +++ b/tests/autofix-suggest/test.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# Dump the generated file in a temporary folder and in the end Flow check them +TEMP_DIR=tmp + +do_file() { + file=$1 + echo "$file" + assert_ok "$FLOW" autofix suggest --strip-root --quiet "$file" | tee $TEMP_DIR/"$file" +} + +mkdir $TEMP_DIR +do_file "func-0.js" +do_file "func-1.js" +do_file "func-2.js" +do_file "func-3.js" + +"$FLOW" init $TEMP_DIR +assert_ok "$FLOW" check $TEMP_DIR diff --git a/tests/autofix-union-style/.flowconfig b/tests/autofix-union-style/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-union-style/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-union-style/.testconfig b/tests/autofix-union-style/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-union-style/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-union-style/a.js b/tests/autofix-union-style/a.js new file mode 100644 index 00000000000..323f06e81d1 --- /dev/null +++ b/tests/autofix-union-style/a.js @@ -0,0 +1,46 @@ +// @flow + +declare var cond: boolean; +declare var cond2: boolean; +declare var cond3: boolean; + +module.exports = { + b1: () => + (cond ? true : true), + b2: () => + (cond ? false : false), + b3: () => + (cond ? true : false), + b4: () => + (cond ? false : true), + b5: (b: bool) => + (cond ? b : true), + b6: (b: bool) => + (cond ? false : b), + b7: (t: mixed) => + (cond ? false: t), + b8: (e: empty) => + (cond ? false : e), + b9: (a: any) => + (cond ? false : a), + n1: () => + (cond ? 0 : 1), + n2: () => + (cond ? 0 : (cond ? 1 : (cond ? 0 : 1))), + n3: (n : number) => + (cond ? 0 : (cond2 ? 1 : (cond3 ? 0 : n))), + s1: () => + (cond ? "0" : "1"), + s2: () => + (cond ? "0" : (cond ? "1" : (cond ? "0" : "1"))), + s3: (s : string) => + (cond ? "0" : (cond2 ? "1" : (cond3 ? s : "1"))), + a1: (x:any) => + (cond ? x.f : x), + a2: (n : number, s : string) => + cond ? (cond2 ? "0" : 0) : (cond3 ? n : s), + + + o2: () => + cond ? {x: true} : {x: false}, +} diff --git a/tests/autofix-union-style/autofix-union-style.exp b/tests/autofix-union-style/autofix-union-style.exp new file mode 100644 index 00000000000..347e9e2b83e --- /dev/null +++ b/tests/autofix-union-style/autofix-union-style.exp @@ -0,0 +1,49 @@ +> cat a.js +// @flow + +declare var cond: boolean; +declare var cond2: boolean; +declare var cond3: boolean; + +module.exports = { + b1: (): true => + (cond ? true : true), + b2: (): false => + (cond ? false : false), + b3: (): boolean => + (cond ? true : false), + b4: (): boolean => + (cond ? false : true), + b5: (b: bool): boolean => + (cond ? b : true), + b6: (b: bool): boolean => + (cond ? false : b), + b7: (t: mixed): mixed => + (cond ? false: t), + b8: (e: empty): false => + (cond ? false : e), + b9: (a: any): any | false => + (cond ? false : a), + n1: (): 0 | 1 => + (cond ? 0 : 1), + n2: (): 0 | 1 => + (cond ? 0 : (cond ? 1 : (cond ? 0 : 1))), + n3: (n : number): number => + (cond ? 0 : (cond2 ? 1 : (cond3 ? 0 : n))), + s1: (): "0" | "1" => + (cond ? "0" : "1"), + s2: (): "0" | "1" => + (cond ? "0" : (cond ? "1" : (cond ? "0" : "1"))), + s3: (s : string): string => + (cond ? "0" : (cond2 ? "1" : (cond3 ? s : "1"))), + a1: (x:any): any => + (cond ? x.f : x), + a2: (n : number, s : string): number | string => + cond ? (cond2 ? "0" : 0) : (cond3 ? n : s), + + + o2: (): {|x: false|} | {|x: true|} => + cond ? {x: true} : {x: false}, +} +> flow status +No errors! diff --git a/tests/autofix-union-style/test.sh b/tests/autofix-union-style/test.sh new file mode 100755 index 00000000000..993bc94ac74 --- /dev/null +++ b/tests/autofix-union-style/test.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +spec_it () { + assert_ok "$FLOW" autofix insert-type --in-place --strategy=specialize "$@" +} + +spec_it a.js 8 9 +spec_it a.js 10 9 +spec_it a.js 12 9 +spec_it a.js 14 9 +spec_it a.js 16 16 +spec_it a.js 18 16 +spec_it a.js 20 17 +spec_it a.js 22 17 +spec_it a.js 24 15 +spec_it a.js 26 9 +spec_it a.js 28 9 +spec_it a.js 30 19 +spec_it a.js 32 9 +spec_it a.js 34 9 +spec_it a.js 36 19 +spec_it a.js 38 14 +spec_it a.js 40 31 +spec_it a.js 44 9 + + +echo "> cat a.js" +cat a.js +assert_ok "$FLOW" force-recheck a.js + +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/autofix-validator/.flowconfig b/tests/autofix-validator/.flowconfig new file mode 100644 index 00000000000..4b3fa759b2e --- /dev/null +++ b/tests/autofix-validator/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +experimental.types_first=true +experimental.well_formed_exports=true + +[strict] diff --git a/tests/autofix-validator/.testconfig b/tests/autofix-validator/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/autofix-validator/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/autofix-validator/autofix-validator.exp b/tests/autofix-validator/autofix-validator.exp new file mode 100644 index 00000000000..883e0106bd7 --- /dev/null +++ b/tests/autofix-validator/autofix-validator.exp @@ -0,0 +1,11 @@ +> cat simplify-empty.js +// @flow + +export function foo(x: any): any { + if (x && x.bar) { + return x.bar(); + } + return x; +} +> flow status +No errors! diff --git a/tests/autofix-validator/simplify-empty.js b/tests/autofix-validator/simplify-empty.js new file mode 100644 index 00000000000..c6dde518b1d --- /dev/null +++ b/tests/autofix-validator/simplify-empty.js @@ -0,0 +1,8 @@ +// @flow + +export function foo(x: any) { + if (x && x.bar) { + return x.bar(); + } + return x; +} diff --git a/tests/autofix-validator/test.sh b/tests/autofix-validator/test.sh new file mode 100755 index 00000000000..9c28fc56d50 --- /dev/null +++ b/tests/autofix-validator/test.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +assert_ok "$FLOW" autofix insert-type --in-place simplify-empty.js 3 28 +echo "> cat simplify-empty.js" +cat simplify-empty.js +assert_ok "$FLOW" force-recheck simplify-empty.js +echo "> flow status" +assert_ok "$FLOW" status diff --git a/tests/badly_positioned_flow_use_op/.flowconfig b/tests/badly_positioned_flow_use_op/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/badly_positioned_flow_use_op/b.js b/tests/badly_positioned_flow_use_op/b.js new file mode 100644 index 00000000000..cb763ce540a --- /dev/null +++ b/tests/badly_positioned_flow_use_op/b.js @@ -0,0 +1,23 @@ +//@flow + +type Data = {| x: number |} + +declare function foo(data: Data): void; + +const o = { + fun: foo, +} +/* +The error position for this one is bad, +as it points to line 5. +*/ +function test1(b: boolean) { + var data = { x: 0 }; + if (b) data = { z: 0 }; + o['fun'](data); +} +/*The error position for this one is ok.*/ +function test2(b: boolean) { + var data = { z: 0 }; + o['fun'](data); +} diff --git a/tests/badly_positioned_flow_use_op/badly_positioned_flow_use_op.exp b/tests/badly_positioned_flow_use_op/badly_positioned_flow_use_op.exp new file mode 100644 index 00000000000..57a290d7f7e --- /dev/null +++ b/tests/badly_positioned_flow_use_op/badly_positioned_flow_use_op.exp @@ -0,0 +1,84 @@ +Error ------------------------------------------------------------------------------------------------------- b.js:17:12 + +Property `x` is missing in object literal [1] but exists in `Data` [2]. + + b.js:17:12 + 17| o['fun'](data); + ^^^^ + +References: + b.js:16:17 + 16| if (b) data = { z: 0 }; + ^^^^^^^^ [1] + b.js:5:28 + 5| declare function foo(data: Data): void; + ^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------- b.js:17:12 + +Property `z` is missing in `Data` [1] but exists in object literal [2] in the first argument. + + b.js:17:12 + 17| o['fun'](data); + ^^^^ + +References: + b.js:5:28 + 5| declare function foo(data: Data): void; + ^^^^ [1] + b.js:16:17 + 16| if (b) data = { z: 0 }; + ^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------- b.js:22:12 + +Property `x` is missing in object literal [1] but exists in `Data` [2]. + + b.js:22:12 + 22| o['fun'](data); + ^^^^ + +References: + b.js:21:14 + 21| var data = { z: 0 }; + ^^^^^^^^ [1] + b.js:5:28 + 5| declare function foo(data: Data): void; + ^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------- b.js:22:12 + +Property `z` is missing in `Data` [1] but exists in object literal [2] in the first argument. + + b.js:22:12 + 22| o['fun'](data); + ^^^^ + +References: + b.js:5:28 + 5| declare function foo(data: Data): void; + ^^^^ [1] + b.js:21:14 + 21| var data = { z: 0 }; + ^^^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- test2.js:6:16 + +Cannot assign `5` to `t` because number [1] is incompatible with string [2]. + + test2.js:6:16 + 6| const t: arg = 5; + ^ [1] + +References: + test2.js:4:17 + 4| type fn = (arg: string) => number; + ^^^^^^ [2] + + + +Found 5 errors diff --git a/tests/badly_positioned_flow_use_op/test2.js b/tests/badly_positioned_flow_use_op/test2.js new file mode 100644 index 00000000000..1315f64fa83 --- /dev/null +++ b/tests/badly_positioned_flow_use_op/test2.js @@ -0,0 +1,6 @@ +// @flow + +type T = ((A) => mixed) => (A & A); +type fn = (arg: string) => number; +type arg = $Call; +const t: arg = 5; diff --git a/tests/badly_positioned_func_sig/.flowconfig b/tests/badly_positioned_func_sig/.flowconfig new file mode 100644 index 00000000000..1a9d7ab1244 --- /dev/null +++ b/tests/badly_positioned_func_sig/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=true \ No newline at end of file diff --git a/tests/badly_positioned_func_sig/badly_positioned_func_sig.exp b/tests/badly_positioned_func_sig/badly_positioned_func_sig.exp new file mode 100644 index 00000000000..c0c5102eedf --- /dev/null +++ b/tests/badly_positioned_func_sig/badly_positioned_func_sig.exp @@ -0,0 +1,20 @@ +Error ----------------------------------------------------------------------------------------------------- test.js:11:3 + +Cannot initialize property `foo` with object literal because string [1] is incompatible with number [2] in property +`bar`. + + test.js:11:3 + 11| foo: {|bar: Bar|} = {bar: 'cat'}; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + test.js:11:29 + 11| foo: {|bar: Bar|} = {bar: 'cat'}; + ^^^^^ [1] + test.js:11:15 + 11| foo: {|bar: Bar|} = {bar: 'cat'}; + ^^^ [2] + + + +Found 1 error diff --git a/tests/badly_positioned_func_sig/test.js b/tests/badly_positioned_func_sig/test.js new file mode 100644 index 00000000000..1b3218f2c03 --- /dev/null +++ b/tests/badly_positioned_func_sig/test.js @@ -0,0 +1,12 @@ +// @flow + +export type Bar = $Call< + ({ + +blah: T, + }) => T, + {+blah: number}, +>; + +class X { + foo: {|bar: Bar|} = {bar: 'cat'}; +} diff --git a/tests/badly_positioned_make_exactt/.flowconfig b/tests/badly_positioned_make_exactt/.flowconfig new file mode 100644 index 00000000000..3ba1c048e9f --- /dev/null +++ b/tests/badly_positioned_make_exactt/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false \ No newline at end of file diff --git a/tests/badly_positioned_make_exactt/badly_positioned_make_exactt.exp b/tests/badly_positioned_make_exactt/badly_positioned_make_exactt.exp new file mode 100644 index 00000000000..f7d79b83b4c --- /dev/null +++ b/tests/badly_positioned_make_exactt/badly_positioned_make_exactt.exp @@ -0,0 +1,21 @@ +Error ----------------------------------------------------------------------------------------------------- test.js:17:1 + +Cannot create `X` element because: + - Either property `x` is missing in props [1] but exists in `RequiredProps` [2]. + - Or property `y` is missing in props [1] but exists in `RequiredProps1` [3]. + + test.js:17:1 + 17| ; + ^^^^^ [1] + +References: + test.js:11:28 + 11| export type Props = RequiredProps & RequiredProps1; + ^^^^^^^^^^^^^^^^^^^^ [2] + test.js:11:51 + 11| export type Props = RequiredProps & RequiredProps1; + ^^^^^^^^^^^^^^ [3] + + + +Found 1 error diff --git a/tests/badly_positioned_make_exactt/test.js b/tests/badly_positioned_make_exactt/test.js new file mode 100644 index 00000000000..36f457fcb21 --- /dev/null +++ b/tests/badly_positioned_make_exactt/test.js @@ -0,0 +1,17 @@ +// @flow +import * as React from 'react'; + +type RequiredProps = { + x : any, +}; +type RequiredProps1 = { + y : any, +}; + +export type Props = RequiredProps & RequiredProps1; + +class FlatList extends React.Component> {} + +type Props1 = $Exact>; +class X extends React.Component> {} +; diff --git a/tests/badly_positioned_objassign/.flowconfig b/tests/badly_positioned_objassign/.flowconfig new file mode 100644 index 00000000000..3ba1c048e9f --- /dev/null +++ b/tests/badly_positioned_objassign/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false \ No newline at end of file diff --git a/tests/badly_positioned_objassign/badly_positioned_objassign.exp b/tests/badly_positioned_objassign/badly_positioned_objassign.exp new file mode 100644 index 00000000000..e3188fa0ccf --- /dev/null +++ b/tests/badly_positioned_objassign/badly_positioned_objassign.exp @@ -0,0 +1,44 @@ +Error ----------------------------------------------------------------------------------------------------- test1.js:2:1 + +Incorrect arguments passed to call of method `assign` because string [1] is incompatible with number [2]. + + test1.js:2:1 + 2| Object.assign({ p: 0 }, new C); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + test1.js:1:14 + 1| class C { p: string }; + ^^^^^^ [1] + test1.js:2:20 + 2| Object.assign({ p: 0 }, new C); + ^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test2.js:3:7 + +Cannot spread `i` because property `p` is not readable. + + 3| ({ ...i }); + ^ + + +Error ---------------------------------------------------------------------------------------------------- test3.js:5:14 + +Cannot assign `x` to `a` because rest of object pattern [1] is incompatible with string [2]. + + test3.js:5:14 + 5| var {...a} = x; + ^ + +References: + test3.js:5:6 + 5| var {...a} = x; + ^^^^ [1] + test3.js:4:9 + 4| var a : string; + ^^^^^^ [2] + + + +Found 3 errors diff --git a/tests/badly_positioned_objassign/test1.js b/tests/badly_positioned_objassign/test1.js new file mode 100644 index 00000000000..f68956d8edb --- /dev/null +++ b/tests/badly_positioned_objassign/test1.js @@ -0,0 +1,2 @@ +class C { p: string }; +Object.assign({ p: 0 }, new C); diff --git a/tests/badly_positioned_objassign/test2.js b/tests/badly_positioned_objassign/test2.js new file mode 100644 index 00000000000..3c96df4cef8 --- /dev/null +++ b/tests/badly_positioned_objassign/test2.js @@ -0,0 +1,3 @@ +class C { -p: string } +declare var i: C; +({ ...i }); diff --git a/tests/badly_positioned_objassign/test3.js b/tests/badly_positioned_objassign/test3.js new file mode 100644 index 00000000000..481151e41c1 --- /dev/null +++ b/tests/badly_positioned_objassign/test3.js @@ -0,0 +1,5 @@ +// @flow +class X {foo: number;}; +const x = new X; +var a : string; +var {...a} = x; diff --git a/tests/badly_positioned_objmap/.flowconfig b/tests/badly_positioned_objmap/.flowconfig new file mode 100644 index 00000000000..3cd895fbd2f --- /dev/null +++ b/tests/badly_positioned_objmap/.flowconfig @@ -0,0 +1,2 @@ +[options] +module.system=haste \ No newline at end of file diff --git a/tests/badly_positioned_objmap/badly_positioned_objmap.exp b/tests/badly_positioned_objmap/badly_positioned_objmap.exp new file mode 100644 index 00000000000..df6d25efdde --- /dev/null +++ b/tests/badly_positioned_objmap/badly_positioned_objmap.exp @@ -0,0 +1,53 @@ +Error ---------------------------------------------------------------------------------------------------- objmap.js:5:1 + +Cannot call `c` because string [1] is not a valid argument of `$ObjMap` [2]. + + objmap.js:5:1 + 5| c('string')(); + ^^^^^^^^^^^ + +References: + objmap.js:5:3 + 5| c('string')(); + ^^^^^^^^ [1] + objmap.js:3:20 + 3| type C = (H) => $ObjMap(X)=>X>; + ^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------- objmapi.js:22:12 + +Cannot instantiate `PickKeysFromObject` because property `q` is missing in `MySpec` [1] in the first argument. + + objmapi.js:22:12 + 22| type A = PickKeysFromObject; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + objmapi.js:22:31 + 22| type A = PickKeysFromObject; + ^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------- objmapi.js:25:17 + +Cannot assign `_a` to `_b` because property `q` is missing in object type [1] but exists in object type [2]. + + objmapi.js:25:17 + 25| const _b: A = _a; + ^^ + +References: + objmapi.js:23:19 + 23| declare var _a: {|x: number|}; + ^^^^^^^^^^^^^ [1] + objmapi.js:25:13 + 25| const _b: A = _a; + ^ [2] + + + +Found 3 errors + +Only showing the most relevant union/intersection branches. +To see all branches, re-run Flow with --show-all-branches diff --git a/tests/badly_positioned_objmap/objmap.js b/tests/badly_positioned_objmap/objmap.js new file mode 100644 index 00000000000..f9a02e395d9 --- /dev/null +++ b/tests/badly_positioned_objmap/objmap.js @@ -0,0 +1,5 @@ +// @flow + +type C = (H) => $ObjMap(X)=>X>; +declare var c: C; +c('string')(); diff --git a/tests/badly_positioned_objmap/objmapi.js b/tests/badly_positioned_objmap/objmapi.js new file mode 100644 index 00000000000..0ee8c2761ee --- /dev/null +++ b/tests/badly_positioned_objmap/objmapi.js @@ -0,0 +1,27 @@ +// @flow + +type MapKeyValue = + (( + Key, + Value, + ) => PickKeysFromObject<$ElementType, Value>) & + ((Key) => $ElementType); + +export type PickKeysFromObject = $ObjMapi< + KeyMap, + MapKeyValue, +>; + +type MySpec = {| + x: number, +|}; + +(function() { + const keys = {x: null, q: null}; + // $FlowExpectedError: `q` is missing in `MySpec` + type A = PickKeysFromObject; + declare var _a: {|x: number|}; + // $FlowExpectedError: `q` is missing in object type + const _b: A = _a; +})(); + diff --git a/tests/badly_positioned_polyt/.flowconfig b/tests/badly_positioned_polyt/.flowconfig new file mode 100644 index 00000000000..3ba1c048e9f --- /dev/null +++ b/tests/badly_positioned_polyt/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false \ No newline at end of file diff --git a/tests/badly_positioned_polyt/badly_positioned_polyt.exp b/tests/badly_positioned_polyt/badly_positioned_polyt.exp new file mode 100644 index 00000000000..724dbe6b85c --- /dev/null +++ b/tests/badly_positioned_polyt/badly_positioned_polyt.exp @@ -0,0 +1,68 @@ +Error ----------------------------------------------------------------------------------------------- concretize.js:10:1 + +Cannot create `C1` element because: + - `EventTarget` [1] is incompatible with `C1` [2] in type argument `T` [3] of the first argument of property + `onKeyDown`. + - `EventTarget` [4] is incompatible with `C1` [2] in type argument `T` of the first argument of property `onKeyDown`. + + concretize.js:10:1 + 10| ; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react-dom.js:181:21 + 181| +T: EventTarget = EventTarget, + ^^^^^^^^^^^ [1] + concretize.js:9:47 + 9| function _onKeyDown(e: SyntheticKeyboardEvent): void {}; + ^^ [2] + /react-dom.js:181:4 + 181| +T: EventTarget = EventTarget, + ^ [3] + /react-dom.js:181:7 + 181| +T: EventTarget = EventTarget, + ^^^^^^^^^^^ [4] + + +Error ----------------------------------------------------------------------------------------------------- test.js:23:1 + +Cannot create `Foo` element because undefined property `context` [1] is incompatible with object type [2] in type +argument `Context`. + + test.js:23:1 + 23| ; + ^^^^^^^^^^^^^^^^^^^ + +References: + test.js:17:10 + 17| Props: {}, + ^^ [1] + test.js:18:12 + 18| Context: {}, + ^^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test.js:35:1 + +Cannot create `Bar` element because: + - undefined property `context` [1] is incompatible with object type [2] in type argument `Context`. + - `T` [3] is incompatible with object type [1] in type argument `Props`. + + test.js:35:1 + 35| ; + ^^^^^^^^^^^^^^^^^^^ + +References: + test.js:29:10 + 29| Props: {}, + ^^ [1] + test.js:30:12 + 30| Context: {}, + ^^ [2] + test.js:6:18 + 6| type Props = {t: T}; + ^ [3] + + + +Found 5 errors diff --git a/tests/badly_positioned_polyt/concretize.js b/tests/badly_positioned_polyt/concretize.js new file mode 100644 index 00000000000..459b492438f --- /dev/null +++ b/tests/badly_positioned_polyt/concretize.js @@ -0,0 +1,10 @@ +// @flow + +const React = require('React'); + +type Props = { + onKeyDown?: ?(e: SyntheticKeyboardEvent<>) => mixed, +} +class C1 extends React.Component {}; +function _onKeyDown(e: SyntheticKeyboardEvent): void {}; +; diff --git a/tests/badly_positioned_polyt/test.js b/tests/badly_positioned_polyt/test.js new file mode 100644 index 00000000000..b690a31c715 --- /dev/null +++ b/tests/badly_positioned_polyt/test.js @@ -0,0 +1,35 @@ +// @flow + +import * as React from 'react'; + +declare opaque type T; +type Props = {t: T}; +declare var props: Props; + +type D = $Diff< + Props, + {context: Context} +>; +type X = { + x: D, +}; +class Foo< + Props: {}, + Context: {}, +> extends React.Component< + X +> {} +//Error: cannot create Foo +; + +type Y = { + y: $Diff; +}; +class Bar< + Props: {}, + Context: {}, +> extends React.Component< + Y +> {} +//Error: cannot create Bar +; diff --git a/tests/badly_positioned_react/.flowconfig b/tests/badly_positioned_react/.flowconfig new file mode 100644 index 00000000000..3ba1c048e9f --- /dev/null +++ b/tests/badly_positioned_react/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false \ No newline at end of file diff --git a/tests/badly_positioned_react/badly_positioned_react.exp b/tests/badly_positioned_react/badly_positioned_react.exp new file mode 100644 index 00000000000..8bc56d03afa --- /dev/null +++ b/tests/badly_positioned_react/badly_positioned_react.exp @@ -0,0 +1,19 @@ +Error ----------------------------------------------------------------------------------------------------- test.js:24:1 + +Cannot create `Foo` element because property `x` is missing in object type [1] but exists in `Props` [2]. + + test.js:24:1 + 24| ; + ^^^^^^^ + +References: + test.js:6:34 + 6| Component: React.ComponentType<{...P, ...{}}>, + ^^^^^^^^^^^^^ [1] + test.js:21:35 + 21| class Foo extends React.Component {} + ^^^^^ [2] + + + +Found 1 error diff --git a/tests/badly_positioned_react/test.js b/tests/badly_positioned_react/test.js new file mode 100644 index 00000000000..a18b28241f3 --- /dev/null +++ b/tests/badly_positioned_react/test.js @@ -0,0 +1,24 @@ +// @flow + +import * as React from 'react'; + +function create( + Component: React.ComponentType<{...P, ...{}}>, +): React.ComponentType

{ + return Component; +} + +export type Props = { + x: {}, +}; + +function create1( + Component: React.ComponentType

, +): React.ComponentType

{ + return Component; +} + +class Foo extends React.Component {} + +Foo = create(create1(Foo)); +; diff --git a/tests/badly_positioned_spread/.flowconfig b/tests/badly_positioned_spread/.flowconfig new file mode 100644 index 00000000000..61ad9d5f8f5 --- /dev/null +++ b/tests/badly_positioned_spread/.flowconfig @@ -0,0 +1,4 @@ +[options] +module.system=haste +experimental.well_formed_exports=true +experimental.types_first=true \ No newline at end of file diff --git a/tests/badly_positioned_spread/bad.js b/tests/badly_positioned_spread/bad.js new file mode 100644 index 00000000000..438cfbeb06f --- /dev/null +++ b/tests/badly_positioned_spread/bad.js @@ -0,0 +1,11 @@ +// @flow + +const Foo = require('./foo'); + +const y : {foo: {color?: string}} = {foo: {color: "cat"}}; +const x = { + Bar: { + foo: Foo, + }, + ...{Bar: y} +}; diff --git a/tests/badly_positioned_spread/badly_positioned_spread.exp b/tests/badly_positioned_spread/badly_positioned_spread.exp new file mode 100644 index 00000000000..e50808806d7 --- /dev/null +++ b/tests/badly_positioned_spread/badly_positioned_spread.exp @@ -0,0 +1,58 @@ +Error ------------------------------------------------------------------------------------------------------ bad.js:10:8 + +Cannot spread object literal because property `color` is missing in statics of function type [1] but exists in object +type [2] in property `foo`. + + bad.js:10:8 + 10| ...{Bar: y} + ^^^^^^^^ + +References: + foo.js:3:1 + 3| function Foo(props: {||}): void {} + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + bad.js:5:17 + 5| const y : {foo: {color?: string}} = {foo: {color: "cat"}}; + ^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ good.js:8:8 + +Cannot spread object literal because property `color` is missing in statics of function type [1] but exists in object +type [2] in property `foo`. + + good.js:8:8 + v------ + 8| ...{Bar: { + 9| foo: Foo, + 10| }} + -^ + +References: + foo.js:3:1 + 3| function Foo(props: {||}): void {} + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + good.js:5:17 + 5| const y : {foo: {color?: string}} = {foo: {color: "cat"}}; + ^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ test.js:7:6 + +Cannot spread `V` because number [1] is incompatible with string [2]. + + test.js:7:6 + 7| ...V + ^ + +References: + test.js:3:25 + 3| declare var V : { name: number }; + ^^^^^^ [1] + test.js:6:10 + 6| name : 'good', + ^^^^^^ [2] + + + +Found 3 errors diff --git a/tests/badly_positioned_spread/foo.js b/tests/badly_positioned_spread/foo.js new file mode 100644 index 00000000000..c665ed9f773 --- /dev/null +++ b/tests/badly_positioned_spread/foo.js @@ -0,0 +1,4 @@ +// @flow + +function Foo(props: {||}): void {} +module.exports = Foo; diff --git a/tests/badly_positioned_spread/good.js b/tests/badly_positioned_spread/good.js new file mode 100644 index 00000000000..b01dba25880 --- /dev/null +++ b/tests/badly_positioned_spread/good.js @@ -0,0 +1,11 @@ +// @flow + +const Foo = require('./foo'); + +const y : {foo: {color?: string}} = {foo: {color: "cat"}}; +const x = { + Bar: y, + ...{Bar: { + foo: Foo, + }} +}; diff --git a/tests/badly_positioned_spread/test.js b/tests/badly_positioned_spread/test.js new file mode 100644 index 00000000000..2511fc5ecb7 --- /dev/null +++ b/tests/badly_positioned_spread/test.js @@ -0,0 +1,8 @@ +// @flow + +declare var V : { name: number }; + +const x = { + name : 'good', + ...V +} diff --git a/tests/badly_positioned_unknown_use/.flowconfig b/tests/badly_positioned_unknown_use/.flowconfig new file mode 100644 index 00000000000..de38d19537d --- /dev/null +++ b/tests/badly_positioned_unknown_use/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false diff --git a/tests/badly_positioned_unknown_use/badly_positioned_unknown_use.exp b/tests/badly_positioned_unknown_use/badly_positioned_unknown_use.exp new file mode 100644 index 00000000000..0b9f2571800 --- /dev/null +++ b/tests/badly_positioned_unknown_use/badly_positioned_unknown_use.exp @@ -0,0 +1,19 @@ +Error --------------------------------------------------------------------------------------------------- test1.js:22:13 + +Cannot call `foo` because inexact object type [1] is incompatible with exact object type [2]. + + test1.js:22:13 + 22| var x = f()(foo(Comp)); + ^^^^^^^^^ + +References: + test1.js:12:29 + 12| Comp: React.ComponentType<{}>, + ^^ [1] + test1.js:6:34 + 6| Component: React$ComponentType<{|...P|}>, + ^^^^^^^^ [2] + + + +Found 1 error diff --git a/tests/badly_positioned_unknown_use/test1.js b/tests/badly_positioned_unknown_use/test1.js new file mode 100644 index 00000000000..2929a238a61 --- /dev/null +++ b/tests/badly_positioned_unknown_use/test1.js @@ -0,0 +1,22 @@ +// @flow + +import * as React from 'react'; + +declare export function foo

( + Component: React$ComponentType<{|...P|}>, +): React$ComponentType

; + +class Comp extends React.Component<{}, {}> {} + +function f< + Comp: React.ComponentType<{}>, + Props: $Diff, {}>, +>(): Comp => { +} { + return function() { + return {} + }; +} + +// Error: inexact from foo vs exact from f +var x = f()(foo(Comp)); diff --git a/tests/bom/bom.exp b/tests/bom/bom.exp index c47140d6610..2675be617ee 100644 --- a/tests/bom/bom.exp +++ b/tests/bom/bom.exp @@ -7,8 +7,8 @@ Cannot call `FormData` with empty string bound to `form` because string [1] is i ^^ [1] References: - /bom.js:330:24 - 330| constructor(form?: HTMLFormElement): void; + /bom.js:527:24 + 527| constructor(form?: HTMLFormElement): void; ^^^^^^^^^^^^^^^ [2] @@ -22,11 +22,11 @@ with `HTMLFormElement` [2]. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:654:36 - 654| createElement(tagName: 'input'): HTMLInputElement; + /dom.js:768:36 + 768| createElement(tagName: 'input'): HTMLInputElement; ^^^^^^^^^^^^^^^^ [1] - /bom.js:330:24 - 330| constructor(form?: HTMLFormElement): void; + /bom.js:527:24 + 527| constructor(form?: HTMLFormElement): void; ^^^^^^^^^^^^^^^ [2] @@ -41,14 +41,14 @@ Cannot assign `a.get(...)` to `d` because: ^^^^^^^^^^^^ References: - /bom.js:333:24 - 333| get(name: string): ?FormDataEntryValue; + /bom.js:530:24 + 530| get(name: string): ?FormDataEntryValue; ^^^^^^^^^^^^^^^^^^^ [1] FormData.js:14:10 14| const d: string = a.get('foo'); // incorrect ^^^^^^ [2] - /bom.js:333:25 - 333| get(name: string): ?FormDataEntryValue; + /bom.js:530:25 + 530| get(name: string): ?FormDataEntryValue; ^^^^^^^^^^^^^^^^^^ [3] @@ -63,14 +63,14 @@ Cannot assign `a.get(...)` to `e` because: ^^^^^^^^^^^^ References: - /bom.js:333:24 - 333| get(name: string): ?FormDataEntryValue; + /bom.js:530:24 + 530| get(name: string): ?FormDataEntryValue; ^^^^^^^^^^^^^^^^^^^ [1] FormData.js:15:10 15| const e: Blob = a.get('foo'); // incorrect ^^^^ [2] - /bom.js:333:25 - 333| get(name: string): ?FormDataEntryValue; + /bom.js:530:25 + 530| get(name: string): ?FormDataEntryValue; ^^^^^^^^^^^^^^^^^^ [3] @@ -83,8 +83,8 @@ Cannot call `a.get` with `2` bound to `name` because number [1] is incompatible ^ [1] References: - /bom.js:333:15 - 333| get(name: string): ?FormDataEntryValue; + /bom.js:530:15 + 530| get(name: string): ?FormDataEntryValue; ^^^^^^ [2] @@ -100,8 +100,8 @@ References: FormData.js:21:33 21| const a2: Array = a.getAll('foo'); // incorrect ^^^^^^ [1] - /bom.js:327:27 - 327| type FormDataEntryValue = string | File + /bom.js:524:27 + 524| type FormDataEntryValue = string | File ^^^^^^ [2] @@ -117,8 +117,8 @@ References: FormData.js:22:26 22| const a3: Array = a.getAll('foo'); // incorrect ^^^^ [1] - /bom.js:327:36 - 327| type FormDataEntryValue = string | File + /bom.js:524:36 + 524| type FormDataEntryValue = string | File ^^^^ [2] @@ -131,8 +131,8 @@ Cannot call `a.getAll` with `23` bound to `name` because number [1] is incompati ^^ [1] References: - /bom.js:334:18 - 334| getAll(name: string): Array; + /bom.js:531:18 + 531| getAll(name: string): Array; ^^^^^^ [2] @@ -150,11 +150,11 @@ References: FormData.js:27:14 27| a.set('foo', {}); // incorrect ^^ [1] - /bom.js:337:30 - 337| set(name: string, value: Blob, filename?: string): void; + /bom.js:534:30 + 534| set(name: string, value: Blob, filename?: string): void; ^^^^ [2] - /bom.js:338:30 - 338| set(name: string, value: File, filename?: string): void; + /bom.js:535:30 + 535| set(name: string, value: File, filename?: string): void; ^^^^ [3] @@ -173,14 +173,14 @@ References: FormData.js:28:7 28| a.set(2, 'bar'); // incorrect ^ [1] - /bom.js:336:15 - 336| set(name: string, value: string): void; + /bom.js:533:15 + 533| set(name: string, value: string): void; ^^^^^^ [2] - /bom.js:337:15 - 337| set(name: string, value: Blob, filename?: string): void; + /bom.js:534:15 + 534| set(name: string, value: Blob, filename?: string): void; ^^^^^^ [3] - /bom.js:338:15 - 338| set(name: string, value: File, filename?: string): void; + /bom.js:535:15 + 535| set(name: string, value: File, filename?: string): void; ^^^^^^ [4] @@ -198,11 +198,11 @@ References: FormData.js:29:14 29| a.set('foo', 'bar', 'baz'); // incorrect ^^^^^ [1] - /bom.js:337:30 - 337| set(name: string, value: Blob, filename?: string): void; + /bom.js:534:30 + 534| set(name: string, value: Blob, filename?: string): void; ^^^^ [2] - /bom.js:338:30 - 338| set(name: string, value: File, filename?: string): void; + /bom.js:535:30 + 535| set(name: string, value: File, filename?: string): void; ^^^^ [3] @@ -220,11 +220,11 @@ References: FormData.js:32:33 32| a.set('bar', new File([], 'q'), 2) // incorrect ^ [1] - /bom.js:337:47 - 337| set(name: string, value: Blob, filename?: string): void; + /bom.js:534:47 + 534| set(name: string, value: Blob, filename?: string): void; ^^^^^^ [2] - /bom.js:338:47 - 338| set(name: string, value: File, filename?: string): void; + /bom.js:535:47 + 535| set(name: string, value: File, filename?: string): void; ^^^^^^ [3] @@ -240,8 +240,8 @@ References: FormData.js:35:24 35| a.set('bar', new Blob, 2) // incorrect ^ [1] - /bom.js:337:47 - 337| set(name: string, value: Blob, filename?: string): void; + /bom.js:534:47 + 534| set(name: string, value: Blob, filename?: string): void; ^^^^^^ [2] @@ -259,11 +259,11 @@ References: FormData.js:39:17 39| a.append('foo', {}); // incorrect ^^ [1] - /bom.js:341:33 - 341| append(name: string, value: Blob, filename?: string): void; + /bom.js:538:33 + 538| append(name: string, value: Blob, filename?: string): void; ^^^^ [2] - /bom.js:342:33 - 342| append(name: string, value: File, filename?: string): void; + /bom.js:539:33 + 539| append(name: string, value: File, filename?: string): void; ^^^^ [3] @@ -282,14 +282,14 @@ References: FormData.js:40:10 40| a.append(2, 'bar'); // incorrect ^ [1] - /bom.js:340:18 - 340| append(name: string, value: string): void; + /bom.js:537:18 + 537| append(name: string, value: string): void; ^^^^^^ [2] - /bom.js:341:18 - 341| append(name: string, value: Blob, filename?: string): void; + /bom.js:538:18 + 538| append(name: string, value: Blob, filename?: string): void; ^^^^^^ [3] - /bom.js:342:18 - 342| append(name: string, value: File, filename?: string): void; + /bom.js:539:18 + 539| append(name: string, value: File, filename?: string): void; ^^^^^^ [4] @@ -307,11 +307,11 @@ References: FormData.js:41:17 41| a.append('foo', 'bar', 'baz'); // incorrect ^^^^^ [1] - /bom.js:341:33 - 341| append(name: string, value: Blob, filename?: string): void; + /bom.js:538:33 + 538| append(name: string, value: Blob, filename?: string): void; ^^^^ [2] - /bom.js:342:33 - 342| append(name: string, value: File, filename?: string): void; + /bom.js:539:33 + 539| append(name: string, value: File, filename?: string): void; ^^^^ [3] @@ -329,11 +329,11 @@ References: FormData.js:45:36 45| a.append('bar', new File([], 'q'), 2) // incorrect ^ [1] - /bom.js:341:50 - 341| append(name: string, value: Blob, filename?: string): void; + /bom.js:538:50 + 538| append(name: string, value: Blob, filename?: string): void; ^^^^^^ [2] - /bom.js:342:50 - 342| append(name: string, value: File, filename?: string): void; + /bom.js:539:50 + 539| append(name: string, value: File, filename?: string): void; ^^^^^^ [3] @@ -349,8 +349,8 @@ References: FormData.js:48:27 48| a.append('bar', new Blob, 2) // incorrect ^ [1] - /bom.js:341:50 - 341| append(name: string, value: Blob, filename?: string): void; + /bom.js:538:50 + 538| append(name: string, value: Blob, filename?: string): void; ^^^^^^ [2] @@ -363,8 +363,8 @@ Cannot call `a.delete` with `3` bound to `name` because number [1] is incompatib ^ [1] References: - /bom.js:344:18 - 344| delete(name: string): void; + /bom.js:541:18 + 541| delete(name: string): void; ^^^^^^ [2] @@ -377,8 +377,8 @@ Cannot assign `x` to `x` because string [1] is incompatible with number [2]. ^^^^^^^^ References: - /bom.js:346:22 - 346| keys(): Iterator; + /bom.js:543:22 + 543| keys(): Iterator; ^^^^^^ [1] FormData.js:56:13 56| for (let x: number of a.keys()) {} // incorrect @@ -387,7 +387,7 @@ References: Error ------------------------------------------------------------------------------------------------ FormData.js:64:52 -Cannot assign for-of element to variable because `Blob` [1] is incompatible with `File` [2] in index 1. +Cannot assign for-of element to destructuring because `Blob` [1] is incompatible with `File` [2] in index 1. FormData.js:64:52 64| for (let [x, y]: [string, string | File | Blob] of a.entries()) {} // incorrect @@ -397,48 +397,14 @@ References: FormData.js:64:43 64| for (let [x, y]: [string, string | File | Blob] of a.entries()) {} // incorrect ^^^^ [1] - /bom.js:327:36 - 327| type FormDataEntryValue = string | File + /bom.js:524:36 + 524| type FormDataEntryValue = string | File ^^^^ [2] -Error ------------------------------------------------------------------------------------------------ FormData.js:65:11 - -Cannot assign element 0 to `x` because string [1] is incompatible with number [2]. - - FormData.js:65:11 - 65| for (let [x, y]: [number, string] of a.entries()) {} // incorrect - ^ - -References: - /bom.js:348:26 - 348| entries(): Iterator<[string, FormDataEntryValue]>; - ^^^^^^ [1] - FormData.js:65:19 - 65| for (let [x, y]: [number, string] of a.entries()) {} // incorrect - ^^^^^^ [2] - - -Error ------------------------------------------------------------------------------------------------ FormData.js:65:14 - -Cannot assign element 1 to `y` because `File` [1] is incompatible with string [2]. - - FormData.js:65:14 - 65| for (let [x, y]: [number, string] of a.entries()) {} // incorrect - ^ - -References: - /bom.js:348:34 - 348| entries(): Iterator<[string, FormDataEntryValue]>; - ^^^^^^^^^^^^^^^^^^ [1] - FormData.js:65:27 - 65| for (let [x, y]: [number, string] of a.entries()) {} // incorrect - ^^^^^^ [2] - - Error ------------------------------------------------------------------------------------------------ FormData.js:65:38 -Cannot assign for-of element to variable because: +Cannot assign for-of element to destructuring because: - string [1] is incompatible with number [2] in index 0. - `File` [3] is incompatible with string [4] in index 1. @@ -447,42 +413,23 @@ Cannot assign for-of element to variable because: ^^^^^^^^^^^ References: - /bom.js:348:26 - 348| entries(): Iterator<[string, FormDataEntryValue]>; + /bom.js:545:26 + 545| entries(): Iterator<[string, FormDataEntryValue]>; ^^^^^^ [1] FormData.js:65:19 65| for (let [x, y]: [number, string] of a.entries()) {} // incorrect ^^^^^^ [2] - /bom.js:348:34 - 348| entries(): Iterator<[string, FormDataEntryValue]>; + /bom.js:545:34 + 545| entries(): Iterator<[string, FormDataEntryValue]>; ^^^^^^^^^^^^^^^^^^ [3] FormData.js:65:27 65| for (let [x, y]: [number, string] of a.entries()) {} // incorrect ^^^^^^ [4] -Error ------------------------------------------------------------------------------------------------ FormData.js:66:14 - -Cannot assign element 1 to `y` because: - - `File` [1] is incompatible with number [2]. - - string [1] is incompatible with number [2]. - - FormData.js:66:14 - 66| for (let [x, y]: [string, number] of a.entries()) {} // incorrect - ^ - -References: - /bom.js:348:34 - 348| entries(): Iterator<[string, FormDataEntryValue]>; - ^^^^^^^^^^^^^^^^^^ [1] - FormData.js:66:27 - 66| for (let [x, y]: [string, number] of a.entries()) {} // incorrect - ^^^^^^ [2] - - Error ------------------------------------------------------------------------------------------------ FormData.js:66:38 -Cannot assign for-of element to variable because: +Cannot assign for-of element to destructuring because: - `File` [1] is incompatible with number [2] in index 1. - string [1] is incompatible with number [2] in index 1. - number [2] is incompatible with string [3] in index 1. @@ -492,56 +439,20 @@ Cannot assign for-of element to variable because: ^^^^^^^^^^^ References: - /bom.js:348:34 - 348| entries(): Iterator<[string, FormDataEntryValue]>; + /bom.js:545:34 + 545| entries(): Iterator<[string, FormDataEntryValue]>; ^^^^^^^^^^^^^^^^^^ [1] FormData.js:66:27 66| for (let [x, y]: [string, number] of a.entries()) {} // incorrect ^^^^^^ [2] - /bom.js:327:27 - 327| type FormDataEntryValue = string | File + /bom.js:524:27 + 524| type FormDataEntryValue = string | File ^^^^^^ [3] -Error ------------------------------------------------------------------------------------------------ FormData.js:67:11 - -Cannot assign element 0 to `x` because string [1] is incompatible with number [2]. - - FormData.js:67:11 - 67| for (let [x, y]: [number, number] of a.entries()) {} // incorrect - ^ - -References: - /bom.js:348:26 - 348| entries(): Iterator<[string, FormDataEntryValue]>; - ^^^^^^ [1] - FormData.js:67:19 - 67| for (let [x, y]: [number, number] of a.entries()) {} // incorrect - ^^^^^^ [2] - - -Error ------------------------------------------------------------------------------------------------ FormData.js:67:14 - -Cannot assign element 1 to `y` because: - - `File` [1] is incompatible with number [2]. - - string [1] is incompatible with number [2]. - - FormData.js:67:14 - 67| for (let [x, y]: [number, number] of a.entries()) {} // incorrect - ^ - -References: - /bom.js:348:34 - 348| entries(): Iterator<[string, FormDataEntryValue]>; - ^^^^^^^^^^^^^^^^^^ [1] - FormData.js:67:27 - 67| for (let [x, y]: [number, number] of a.entries()) {} // incorrect - ^^^^^^ [2] - - Error ------------------------------------------------------------------------------------------------ FormData.js:67:38 -Cannot assign for-of element to variable because: +Cannot assign for-of element to destructuring because: - string [1] is incompatible with number [2] in index 0. - `File` [3] is incompatible with number [4] in index 1. - string [3] is incompatible with number [4] in index 1. @@ -552,20 +463,20 @@ Cannot assign for-of element to variable because: ^^^^^^^^^^^ References: - /bom.js:348:26 - 348| entries(): Iterator<[string, FormDataEntryValue]>; + /bom.js:545:26 + 545| entries(): Iterator<[string, FormDataEntryValue]>; ^^^^^^ [1] FormData.js:67:19 67| for (let [x, y]: [number, number] of a.entries()) {} // incorrect ^^^^^^ [2] - /bom.js:348:34 - 348| entries(): Iterator<[string, FormDataEntryValue]>; + /bom.js:545:34 + 545| entries(): Iterator<[string, FormDataEntryValue]>; ^^^^^^^^^^^^^^^^^^ [3] FormData.js:67:27 67| for (let [x, y]: [number, number] of a.entries()) {} // incorrect ^^^^^^ [4] - /bom.js:327:27 - 327| type FormDataEntryValue = string | File + /bom.js:524:27 + 524| type FormDataEntryValue = string | File ^^^^^^ [5] @@ -574,16 +485,16 @@ Error -------------------------------------------------------------------------- Cannot assign `headers.get(...)` to `b` because null [1] is incompatible with string [2]. Headers.js:8:19 - 8| const b: string = headers.get('foo'); // incorrect - ^^^^^^^^^^^^^^^^^^ + 8| const b: string = headers.get('foo'); // incorrect + ^^^^^^^^^^^^^^^^^^ References: - /bom.js:921:24 - 921| get(name: string): null | string; - ^^^^ [1] + /bom.js:1365:24 + 1365| get(name: string): null | string; + ^^^^ [1] Headers.js:8:10 - 8| const b: string = headers.get('foo'); // incorrect - ^^^^^^ [2] + 8| const b: string = headers.get('foo'); // incorrect + ^^^^^^ [2] Error ----------------------------------------------------------------------------------------- MutationObserver.js:10:1 @@ -595,9 +506,9 @@ Cannot call `MutationObserver` because function [1] requires another argument. ^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:376:5 - 376| constructor(callback: (arr: Array, observer: MutationObserver) => any): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:574:5 + 574| constructor(callback: (arr: Array, observer: MutationObserver) => mixed): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error ---------------------------------------------------------------------------------------- MutationObserver.js:11:22 @@ -609,9 +520,9 @@ Cannot call `MutationObserver` with `42` bound to `callback` because number [1] ^^ [1] References: - /bom.js:376:27 - 376| constructor(callback: (arr: Array, observer: MutationObserver) => any): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + /bom.js:574:27 + 574| constructor(callback: (arr: Array, observer: MutationObserver) => mixed): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] Error ---------------------------------------------------------------------------------------- MutationObserver.js:12:22 @@ -627,8 +538,8 @@ References: MutationObserver.js:12:26 12| new MutationObserver((n: number) => {}); // incorrect ^^^^^^ [1] - /bom.js:376:33 - 376| constructor(callback: (arr: Array, observer: MutationObserver) => any): void; + /bom.js:574:33 + 574| constructor(callback: (arr: Array, observer: MutationObserver) => mixed): void; ^^^^^^^^^^^^^^^^^^^^^ [2] @@ -646,18 +557,18 @@ Cannot call `o.observe` because: ^^^^^^^^^^^ [2] References: - /bom.js:377:5 - 377| observe(target: Node, options: MutationObserverInit): void; + /bom.js:575:5 + 575| observe(target: Node, options: MutationObserverInit): void; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /bom.js:364:7 - 364| | { childList: true } - ^^^^^^^^^^^^^^^^^^^ [3] - /bom.js:365:7 - 365| | { attributes: true } - ^^^^^^^^^^^^^^^^^^^^ [4] - /bom.js:366:7 - 366| | { characterData: true } - ^^^^^^^^^^^^^^^^^^^^^^^ [5] + /bom.js:561:7 + 561| | { childList: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^ [3] + /bom.js:562:7 + 562| | { attributes: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^ [4] + /bom.js:563:7 + 563| | { characterData: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [5] Error ----------------------------------------------------------------------------------------- MutationObserver.js:19:1 @@ -674,18 +585,18 @@ Cannot call `o.observe` because: ^^^^^^^^^^^^^^^^^^^^ [2] References: - /bom.js:377:5 - 377| observe(target: Node, options: MutationObserverInit): void; + /bom.js:575:5 + 575| observe(target: Node, options: MutationObserverInit): void; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /bom.js:364:7 - 364| | { childList: true } - ^^^^^^^^^^^^^^^^^^^ [3] - /bom.js:365:7 - 365| | { attributes: true } - ^^^^^^^^^^^^^^^^^^^^ [4] - /bom.js:366:7 - 366| | { characterData: true } - ^^^^^^^^^^^^^^^^^^^^^^^ [5] + /bom.js:561:7 + 561| | { childList: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^ [3] + /bom.js:562:7 + 562| | { attributes: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^ [4] + /bom.js:563:7 + 563| | { characterData: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [5] Error ---------------------------------------------------------------------------------------- MutationObserver.js:19:11 @@ -697,8 +608,8 @@ Cannot call `o.observe` with `'invalid'` bound to `target` because string [1] is ^^^^^^^^^ [1] References: - /bom.js:377:21 - 377| observe(target: Node, options: MutationObserverInit): void; + /bom.js:575:21 + 575| observe(target: Node, options: MutationObserverInit): void; ^^^^ [2] @@ -716,18 +627,18 @@ Cannot call `o.observe` because: ^^^^^^^^^^^^^^ [2] References: - /bom.js:377:5 - 377| observe(target: Node, options: MutationObserverInit): void; + /bom.js:575:5 + 575| observe(target: Node, options: MutationObserverInit): void; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /bom.js:364:7 - 364| | { childList: true } - ^^^^^^^^^^^^^^^^^^^ [3] - /bom.js:365:7 - 365| | { attributes: true } - ^^^^^^^^^^^^^^^^^^^^ [4] - /bom.js:366:7 - 366| | { characterData: true } - ^^^^^^^^^^^^^^^^^^^^^^^ [5] + /bom.js:561:7 + 561| | { childList: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^ [3] + /bom.js:562:7 + 562| | { attributes: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^ [4] + /bom.js:563:7 + 563| | { characterData: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [5] Error ---------------------------------------------------------------------------------------- MutationObserver.js:21:16 @@ -742,15 +653,15 @@ Cannot call `o.observe` with object literal bound to `options` because: ^^ [1] References: - /bom.js:364:7 - 364| | { childList: true } - ^^^^^^^^^^^^^^^^^^^ [2] - /bom.js:365:7 - 365| | { attributes: true } - ^^^^^^^^^^^^^^^^^^^^ [3] - /bom.js:366:7 - 366| | { characterData: true } - ^^^^^^^^^^^^^^^^^^^^^^^ [4] + /bom.js:561:7 + 561| | { childList: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^ [2] + /bom.js:562:7 + 562| | { attributes: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^ [3] + /bom.js:563:7 + 563| | { characterData: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [4] Error ---------------------------------------------------------------------------------------- MutationObserver.js:22:16 @@ -765,15 +676,15 @@ Cannot call `o.observe` with object literal bound to `options` because: ^^^^^^^^^^^^^^^^^ [1] References: - /bom.js:364:7 - 364| | { childList: true } - ^^^^^^^^^^^^^^^^^^^ [2] - /bom.js:365:7 - 365| | { attributes: true } - ^^^^^^^^^^^^^^^^^^^^ [3] - /bom.js:366:7 - 366| | { characterData: true } - ^^^^^^^^^^^^^^^^^^^^^^^ [4] + /bom.js:561:7 + 561| | { childList: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^ [2] + /bom.js:562:7 + 562| | { attributes: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^ [3] + /bom.js:563:7 + 563| | { characterData: true, ... } + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [4] Error ---------------------------------------------------------------------------------------- MutationObserver.js:23:16 @@ -789,9 +700,9 @@ References: MutationObserver.js:23:53 23| o.observe(div, { attributes: true, attributeFilter: true }); // incorrect ^^^^ [1] - /bom.js:372:23 - 372| attributeFilter?: Array; - ^^^^^^^^^^^^^ [2] + /bom.js:569:21 + 569| attributeFilter?: Array, + ^^^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------ URLSearchParams.js:8:19 @@ -799,20 +710,20 @@ Error -------------------------------------------------------------------------- Cannot assign `params.get(...)` to `b` because null [1] is incompatible with string [2]. URLSearchParams.js:8:19 - 8| const b: string = params.get('foo'); // incorrect - ^^^^^^^^^^^^^^^^^ + 8| const b: string = params.get('foo'); // incorrect + ^^^^^^^^^^^^^^^^^ References: - /bom.js:935:24 - 935| get(name: string): null | string; - ^^^^ [1] + /bom.js:1379:24 + 1379| get(name: string): null | string; + ^^^^ [1] URLSearchParams.js:8:10 - 8| const b: string = params.get('foo'); // incorrect - ^^^^^^ [2] + 8| const b: string = params.get('foo'); // incorrect + ^^^^^^ [2] -Found 54 errors +Found 47 errors Only showing the most relevant union/intersection branches. To see all branches, re-run Flow with --show-all-branches diff --git a/tests/call_caching2/lib/immutable.js b/tests/call_caching2/lib/immutable.js index 8331ab63712..28e881a078f 100644 --- a/tests/call_caching2/lib/immutable.js +++ b/tests/call_caching2/lib/immutable.js @@ -1,4 +1,4 @@ -// Copyright 2004-present Facebook. All Rights Reserved. +// Copyright (c) Facebook, Inc. and its affiliates. declare class Array { } diff --git a/tests/call_properties/call_properties.exp b/tests/call_properties/call_properties.exp index 79b8d99ac22..eb2b7ec4d25 100644 --- a/tests/call_properties/call_properties.exp +++ b/tests/call_properties/call_properties.exp @@ -45,7 +45,7 @@ References: Error ------------------------------------------------------------------------------------------------------- A.js:23:10 -Cannot call `f` because a callable signature is missing in object type [1]. +Cannot call `f` because a call signature declaring the expected parameter / return type is missing in object type [1]. A.js:23:10 23| return f(); @@ -59,7 +59,8 @@ References: Error ------------------------------------------------------------------------------------------------------- A.js:29:10 -Cannot call `x` because a callable signature is missing in object literal [1]. +Cannot call `x` because a call signature declaring the expected parameter / return type is missing in object +literal [1]. A.js:29:10 29| return x(); @@ -175,7 +176,8 @@ References: Error ------------------------------------------------------------------------------------------------------- C.js:23:10 -Cannot return `x` because a callable signature is missing in object type [1] but exists in function type [2]. +Cannot return `x` because a call signature declaring the expected parameter / return type is missing in object type [1] +but exists in function type [2]. C.js:23:10 23| return x; @@ -190,23 +192,6 @@ References: ^^^^^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------------------- C.js:33:10 - -Cannot return `x` because a callable signature is missing in object type [1] but exists in function type [2]. - - C.js:33:10 - 33| return x; // error - ^ - -References: - C.js:32:15 - 32| function g(x: {}): Function { - ^^ [1] - C.js:32:20 - 32| function g(x: {}): Function { - ^^^^^^^^ [2] - - Error -------------------------------------------------------------------------------------------------------- D.js:12:3 Cannot assign function to `c` because function [1] requires another argument from function type [2]. @@ -226,7 +211,7 @@ References: Error ------------------------------------------------------------------------------------------------------- D.js:21:10 -Cannot return `x` because string [1] is incompatible with number [2] in the return value of the callable signature. +Cannot return `x` because string [1] is incompatible with number [2] in the return value. D.js:21:10 21| return x; @@ -291,7 +276,8 @@ References: Error --------------------------------------------------------------------------------------------------------- G.js:5:1 -Cannot call `o` because a callable signature is missing in `Object.create` [1]. +Cannot call `o` because a call signature declaring the expected parameter / return type is missing in +`Object.create` [1]. G.js:5:1 5| o(); // error: o is not callable @@ -303,33 +289,12 @@ References: ^^^^^^^^^^^^^^^^ [1] -Error ------------------------------------------------------------------------------------------------ deprecated.js:2:3 - -Deprecated $call syntax. Use callable property syntax instead. (`deprecated-call-syntax`) - - 2| $call: () => void; - ^^^^^ - - -Error ------------------------------------------------------------------------------------------------ deprecated.js:6:3 - -Deprecated $call syntax. Use callable property syntax instead. (`deprecated-call-syntax`) - - 6| $call: () => void; - ^^^^^ - - -Error ----------------------------------------------------------------------------------------------- deprecated.js:10:3 - -Deprecated $call syntax. Use callable property syntax instead. (`deprecated-call-syntax`) - - 10| $call: () => void; - ^^^^^ - - Error --------------------------------------------------------------------------------------------- internal_slot.js:5:2 -Cannot cast object literal to `O` because a callable signature is missing in object literal [1] but exists in `O` [2]. +Cannot cast object literal to `O` because: + - object literal [1] is incompatible with function prototype [2]. + - a call signature declaring the expected parameter / return type is missing in object literal [1] but exists in + `O` [2]. internal_slot.js:5:2 5| ({}: O); // err: no callable property @@ -360,7 +325,8 @@ References: Error -------------------------------------------------------------------------------------------- internal_slot.js:13:2 -Cannot cast object literal to `I` because a callable signature is missing in object literal [1] but exists in `I` [2]. +Cannot cast object literal to `I` because a call signature declaring the expected parameter / return type is missing in +object literal [1] but exists in `I` [2]. internal_slot.js:13:2 13| ({}: I); // err: no callable property @@ -453,8 +419,8 @@ References: Error --------------------------------------------------------------------------------------------------- use_ops.js:4:2 -Cannot cast `a` to `B` because a callable signature is missing in object type [1] but exists in function type [2] in -property `p`. +Cannot cast `a` to `B` because a call signature declaring the expected parameter / return type is missing in object +type [1] but exists in function type [2] in property `p`. use_ops.js:4:2 4| (a: B); // error HERE and preserve use ops @@ -470,7 +436,7 @@ References: -Found 31 errors +Found 28 errors Only showing the most relevant union/intersection branches. To see all branches, re-run Flow with --show-all-branches diff --git a/tests/call_properties/deprecated.js b/tests/call_properties/deprecated.js deleted file mode 100644 index 64439d3024a..00000000000 --- a/tests/call_properties/deprecated.js +++ /dev/null @@ -1,11 +0,0 @@ -type O = { - $call: () => void; -} - -interface I { - $call: () => void; -} - -declare class C { - $call: () => void; -} diff --git a/tests/call_type/call_type.exp b/tests/call_type/call_type.exp index ebcaf0b5e71..2dfb0d0d262 100644 --- a/tests/call_type/call_type.exp +++ b/tests/call_type/call_type.exp @@ -167,8 +167,8 @@ References: Error ----------------------------------------------------------------------------------------------------- test.js:38:3 Cannot cast `null` to `G` because: - - Either mixed [1] is incompatible with number [2]. - - Or mixed [1] is incompatible with string [3]. + - Either cannot instantiate `$Call` because mixed [1] is incompatible with number [2] in the first argument. + - Or cannot instantiate `$Call` because mixed [1] is incompatible with string [3] in the second argument. test.js:38:3 38| ((null: mixed): G); // Error: mixed ~> number | string diff --git a/tests/callable/callable.exp b/tests/callable/callable.exp index fc0a119acc2..f5145833868 100644 --- a/tests/callable/callable.exp +++ b/tests/callable/callable.exp @@ -1,6 +1,7 @@ Error ------------------------------------------------------------------------------------------------ primitives.js:6:1 -Cannot call `dict` because a callable signature is missing in object type [1]. +Cannot call `dict` because a call signature declaring the expected parameter / return type is missing in object +type [1]. primitives.js:6:1 6| dict(); // error, callable signature not found diff --git a/tests/cancelable_rechecks/.flowconfig b/tests/cancelable_rechecks/.flowconfig index 1116bf877c4..e69de29bb2d 100644 --- a/tests/cancelable_rechecks/.flowconfig +++ b/tests/cancelable_rechecks/.flowconfig @@ -1,2 +0,0 @@ -[options] -experimental.cancelable_rechecks=true diff --git a/tests/cancelable_workloads/.flowconfig b/tests/cancelable_workloads/.flowconfig index 1116bf877c4..e69de29bb2d 100644 --- a/tests/cancelable_workloads/.flowconfig +++ b/tests/cancelable_workloads/.flowconfig @@ -1,2 +0,0 @@ -[options] -experimental.cancelable_rechecks=true diff --git a/tests/cancelable_workloads/test.sh b/tests/cancelable_workloads/test.sh index 16aa9d4b2c4..35714f1a433 100755 --- a/tests/cancelable_workloads/test.sh +++ b/tests/cancelable_workloads/test.sh @@ -1,5 +1,9 @@ #!/bin/bash +# Workloads can't be canceled anymore by files changing. But in this test +# find-refs starts a recheck which is cancelable. So this test still works. +# So I'm leaving it here. + assert_ok "$FLOW" stop # Introduce a file which will cause stuff to hang diff --git a/tests/class_fields/class_fields.exp b/tests/class_fields/class_fields.exp index 65de7a297b4..e302f323da5 100644 --- a/tests/class_fields/class_fields.exp +++ b/tests/class_fields/class_fields.exp @@ -1,26 +1,34 @@ -Error ----------------------------------------------------------------------------------------------- base_class.js:9:46 +Error ------------------------------------------------------------------------------------------------ base_class.js:9:3 -string [1] is incompatible with number [2]. +Cannot initialize property `annotatedInitializedFieldInvalid` with `'asdf'` because string [1] is incompatible with +number [2]. - base_class.js:9:46 + base_class.js:9:3 9| annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: + base_class.js:9:46 + 9| annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number + ^^^^^^ [1] base_class.js:9:37 9| annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number ^^^^^^ [2] -Error ---------------------------------------------------------------------------------------------- base_class.js:16:53 +Error ----------------------------------------------------------------------------------------------- base_class.js:16:3 -string [1] is incompatible with number [2]. +Cannot initialize property `annotatedInitializedFieldInvalid` with `'asdf'` because string [1] is incompatible with +number [2]. - base_class.js:16:53 + base_class.js:16:3 16| static annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: + base_class.js:16:53 + 16| static annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number + ^^^^^^ [1] base_class.js:16:44 16| static annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number ^^^^^^ [2] @@ -202,7 +210,7 @@ Error -------------------------------------------------------------------------- Classes may not have fields named `constructor`. 8| constructor: T = e; // error - ^^^^^^^^^^^^^^^^^^^ + ^^^^^^^^^^^ Error ----------------------------------------------------------------------- declared_class_constructor_property.js:8:3 @@ -213,57 +221,73 @@ Classes may not have fields named `constructor`. ^^^^^^^^^^^ -Error -------------------------------------------------------------------------------------------- derived_class.js:9:51 +Error --------------------------------------------------------------------------------------------- derived_class.js:9:3 -string [1] is incompatible with number [2]. +Cannot initialize property `base_annotatedInitializedFieldInvalid` with `'asdf'` because string [1] is incompatible with +number [2]. - derived_class.js:9:51 + derived_class.js:9:3 9| base_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: + derived_class.js:9:51 + 9| base_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number + ^^^^^^ [1] derived_class.js:9:42 9| base_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number ^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- derived_class.js:16:58 +Error -------------------------------------------------------------------------------------------- derived_class.js:16:3 -string [1] is incompatible with number [2]. +Cannot initialize property `base_annotatedInitializedFieldInvalid` with `'asdf'` because string [1] is incompatible with +number [2]. - derived_class.js:16:58 + derived_class.js:16:3 16| static base_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: + derived_class.js:16:58 + 16| static base_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number + ^^^^^^ [1] derived_class.js:16:49 16| static base_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number ^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- derived_class.js:28:52 +Error -------------------------------------------------------------------------------------------- derived_class.js:28:3 -string [1] is incompatible with number [2]. +Cannot initialize property `child_annotatedInitializedFieldInvalid` with `'asdf'` because string [1] is incompatible +with number [2]. - derived_class.js:28:52 + derived_class.js:28:3 28| child_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: + derived_class.js:28:52 + 28| child_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number + ^^^^^^ [1] derived_class.js:28:43 28| child_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number ^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- derived_class.js:35:59 +Error -------------------------------------------------------------------------------------------- derived_class.js:35:3 -string [1] is incompatible with number [2]. +Cannot initialize property `child_annotatedInitializedFieldInvalid` with `'asdf'` because string [1] is incompatible +with number [2]. - derived_class.js:35:59 + derived_class.js:35:3 35| static child_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: + derived_class.js:35:59 + 35| static child_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number + ^^^^^^ [1] derived_class.js:35:50 35| static child_annotatedInitializedFieldInvalid: number = 'asdf'; // Error: string ~> number ^^^^^^ [2] @@ -681,29 +705,35 @@ References: ^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- generic_class.js:26:16 +Error -------------------------------------------------------------------------------------------- generic_class.js:26:3 -number [1] is incompatible with `T` [2]. +Cannot initialize property `invalid` with `42` because number [1] is incompatible with `T` [2]. - generic_class.js:26:16 + generic_class.js:26:3 26| invalid: T = 42; // Error: number ~> Generic - ^^ [1] + ^^^^^^^^^^^^^^^^ References: + generic_class.js:26:16 + 26| invalid: T = 42; // Error: number ~> Generic + ^^ [1] generic_class.js:26:12 26| invalid: T = 42; // Error: number ~> Generic ^ [2] -Error ------------------------------------------------------------------------------------------- generic_class.js:29:23 +Error -------------------------------------------------------------------------------------------- generic_class.js:29:3 -number [1] is incompatible with `T` [2]. +Cannot initialize property `invalid` with `42` because number [1] is incompatible with `T` [2]. - generic_class.js:29:23 + generic_class.js:29:3 29| static invalid: T = 42; // Error: number ~> Generic - ^^ [1] + ^^^^^^^^^^^^^^^^^^^^^^^ References: + generic_class.js:29:23 + 29| static invalid: T = 42; // Error: number ~> Generic + ^^ [1] generic_class.js:29:19 29| static invalid: T = 42; // Error: number ~> Generic ^ [2] diff --git a/tests/class_statics/class_statics.exp b/tests/class_statics/class_statics.exp index 23357ee9ae6..0681a235a84 100644 --- a/tests/class_statics/class_statics.exp +++ b/tests/class_statics/class_statics.exp @@ -1,17 +1,17 @@ -Error ---------------------------------------------------------------------------------- class_static_constructor.js:4:3 +Error --------------------------------------------------------------------------------- class_static_constructor.js:4:10 -Classes may not have fields named `constructor`. +Classes may not have static fields named `constructor`. 4| static constructor: Object; // error - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ^^^^^^^^^^^ -Error ------------------------------------------------------------------------------------ class_static_prototype.js:4:3 +Error ----------------------------------------------------------------------------------- class_static_prototype.js:4:10 Classes may not have static fields named `prototype`. 4| static prototype: Object; // error - ^^^^^^^^^^^^^^^^^^^^^^^^^ + ^^^^^^^^^ Error ------------------------------------------------------------------------ declared_class_static_constructor.js:4:10 diff --git a/tests/classic-dep-graph-change/.flowconfig b/tests/classic-dep-graph-change/.flowconfig new file mode 100644 index 00000000000..c9688f58e78 --- /dev/null +++ b/tests/classic-dep-graph-change/.flowconfig @@ -0,0 +1,6 @@ +[ignore] +/tmp + +[options] +experimental.types_first=false +experimental.allow_skip_direct_dependents=true diff --git a/tests/classic-dep-graph-change/.testconfig b/tests/classic-dep-graph-change/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/classic-dep-graph-change/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/classic-dep-graph-change/classic-dep-graph-change.exp b/tests/classic-dep-graph-change/classic-dep-graph-change.exp new file mode 100644 index 00000000000..a2ed1829a1f --- /dev/null +++ b/tests/classic-dep-graph-change/classic-dep-graph-change.exp @@ -0,0 +1,65 @@ + +Server should start in classic mode + +Error should refer to test/node_modules/foo.js +Error ------------------------------------------------------------------------------------------------- test/test.js:5:2 + +Cannot cast `foo()` to empty because number [1] is incompatible with empty [2]. + + test/test.js:5:2 + 5| (foo(): empty); + ^^^^^ + +References: + test/node_modules/foo.js:3:24 + 3| export function foo(): number { return 0; } + ^^^^^^ [1] + test/test.js:5:9 + 5| (foo(): empty); + ^^^^^ [2] + + + +Found 1 error + +Removing test/node_modules/foo.js should make error refer to node_modules/foo.js +Error ------------------------------------------------------------------------------------------------- test/test.js:5:2 + +Cannot cast `foo()` to empty because string [1] is incompatible with empty [2]. + + test/test.js:5:2 + 5| (foo(): empty); + ^^^^^ + +References: + node_modules/foo.js:3:24 + 3| export function foo(): string { return ''; } + ^^^^^^ [1] + test/test.js:5:9 + 5| (foo(): empty); + ^^^^^ [2] + + + +Found 1 error + +Adding test/node_modules/foo.js should make error refer to test/node_modules/foo.js +Error ------------------------------------------------------------------------------------------------- test/test.js:5:2 + +Cannot cast `foo()` to empty because number [1] is incompatible with empty [2]. + + test/test.js:5:2 + 5| (foo(): empty); + ^^^^^ + +References: + test/node_modules/foo.js:3:24 + 3| export function foo(): number { return 0; } + ^^^^^^ [1] + test/test.js:5:9 + 5| (foo(): empty); + ^^^^^ [2] + + + +Found 1 error diff --git a/tests/classic-dep-graph-change/node_modules/foo.js b/tests/classic-dep-graph-change/node_modules/foo.js new file mode 100644 index 00000000000..209980d7463 --- /dev/null +++ b/tests/classic-dep-graph-change/node_modules/foo.js @@ -0,0 +1,3 @@ +// @flow + +export function foo(): string { return ''; } diff --git a/tests/classic-dep-graph-change/test.sh b/tests/classic-dep-graph-change/test.sh new file mode 100644 index 00000000000..8937cd3f71b --- /dev/null +++ b/tests/classic-dep-graph-change/test.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +mkdir tmp + +printf "\\nServer should start in classic mode\\n" +start_flow . + +printf "\\nError should refer to test/node_modules/foo.js\\n" +assert_errors "$FLOW" status --strip-root + +printf "\\nRemoving test/node_modules/foo.js should make error refer to node_modules/foo.js\\n" +mv test/node_modules/foo.js tmp/foo.js +assert_ok "$FLOW" force-recheck test/node_modules/foo.js +assert_errors "$FLOW" status --strip-root + +printf "\\nAdding test/node_modules/foo.js should make error refer to test/node_modules/foo.js\\n" +mv tmp/foo.js test/node_modules/foo.js +assert_ok "$FLOW" force-recheck test/node_modules/foo.js +assert_errors "$FLOW" status --strip-root + +assert_ok "$FLOW" stop + +rm -rf tmp diff --git a/tests/classic-dep-graph-change/test/node_modules/foo.js b/tests/classic-dep-graph-change/test/node_modules/foo.js new file mode 100644 index 00000000000..206c6c079d9 --- /dev/null +++ b/tests/classic-dep-graph-change/test/node_modules/foo.js @@ -0,0 +1,3 @@ +// @flow + +export function foo(): number { return 0; } diff --git a/tests/classic-dep-graph-change/test/test.js b/tests/classic-dep-graph-change/test/test.js new file mode 100644 index 00000000000..0cb3f329a56 --- /dev/null +++ b/tests/classic-dep-graph-change/test/test.js @@ -0,0 +1,5 @@ +// @flow + +import {foo} from 'foo'; + +(foo(): empty); diff --git a/tests/classic-haste-dep-graph-change/.flowconfig b/tests/classic-haste-dep-graph-change/.flowconfig new file mode 100644 index 00000000000..3eb1f9c971a --- /dev/null +++ b/tests/classic-haste-dep-graph-change/.flowconfig @@ -0,0 +1,7 @@ +[ignore] +/tmp + +[options] +experimental.types_first=false +module.system=haste +experimental.allow_skip_direct_dependents=true diff --git a/tests/classic-haste-dep-graph-change/.testconfig b/tests/classic-haste-dep-graph-change/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/classic-haste-dep-graph-change/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/classic-haste-dep-graph-change/classic-haste-dep-graph-change.exp b/tests/classic-haste-dep-graph-change/classic-haste-dep-graph-change.exp new file mode 100644 index 00000000000..cf144c45abb --- /dev/null +++ b/tests/classic-haste-dep-graph-change/classic-haste-dep-graph-change.exp @@ -0,0 +1,93 @@ + +Server should start in classic mode + +Error should refer to test/node_modules/foo.js +Error ------------------------------------------------------------------------------------------------------ foo2.js:1:1 + +Duplicate module provider for `foo`. Change either this module provider or the current module provider [1]. + + foo2.js:1:1 + 1| // @flow + + +References: + foo1.js:1:1 + 1| // @flow + [1] + + +Error ------------------------------------------------------------------------------------------------------ test.js:5:2 + +Cannot cast `foo()` to empty because number [1] is incompatible with empty [2]. + + test.js:5:2 + 5| (foo(): empty); + ^^^^^ + +References: + foo1.js:4:24 + 4| export function foo(): number { return 0; } + ^^^^^^ [1] + test.js:5:9 + 5| (foo(): empty); + ^^^^^ [2] + + + +Found 2 errors + +Removing foo1.js should make error refer to foo2.js +Error ------------------------------------------------------------------------------------------------------ test.js:5:2 + +Cannot cast `foo()` to empty because string [1] is incompatible with empty [2]. + + test.js:5:2 + 5| (foo(): empty); + ^^^^^ + +References: + foo2.js:4:24 + 4| export function foo(): string { return ''; } + ^^^^^^ [1] + test.js:5:9 + 5| (foo(): empty); + ^^^^^ [2] + + + +Found 1 error + +Adding foo1.js should make error refer to foo1.js +Error ------------------------------------------------------------------------------------------------------ foo2.js:1:1 + +Duplicate module provider for `foo`. Change either this module provider or the current module provider [1]. + + foo2.js:1:1 + 1| // @flow + + +References: + foo1.js:1:1 + 1| // @flow + [1] + + +Error ------------------------------------------------------------------------------------------------------ test.js:5:2 + +Cannot cast `foo()` to empty because number [1] is incompatible with empty [2]. + + test.js:5:2 + 5| (foo(): empty); + ^^^^^ + +References: + foo1.js:4:24 + 4| export function foo(): number { return 0; } + ^^^^^^ [1] + test.js:5:9 + 5| (foo(): empty); + ^^^^^ [2] + + + +Found 2 errors diff --git a/tests/classic-haste-dep-graph-change/foo1.js b/tests/classic-haste-dep-graph-change/foo1.js new file mode 100644 index 00000000000..afe2e3750ce --- /dev/null +++ b/tests/classic-haste-dep-graph-change/foo1.js @@ -0,0 +1,4 @@ +// @flow +// @providesModule foo + +export function foo(): number { return 0; } diff --git a/tests/classic-haste-dep-graph-change/foo2.js b/tests/classic-haste-dep-graph-change/foo2.js new file mode 100644 index 00000000000..851c81b0d42 --- /dev/null +++ b/tests/classic-haste-dep-graph-change/foo2.js @@ -0,0 +1,4 @@ +// @flow +// @providesModule foo + +export function foo(): string { return ''; } diff --git a/tests/classic-haste-dep-graph-change/test.js b/tests/classic-haste-dep-graph-change/test.js new file mode 100644 index 00000000000..0cb3f329a56 --- /dev/null +++ b/tests/classic-haste-dep-graph-change/test.js @@ -0,0 +1,5 @@ +// @flow + +import {foo} from 'foo'; + +(foo(): empty); diff --git a/tests/classic-haste-dep-graph-change/test.sh b/tests/classic-haste-dep-graph-change/test.sh new file mode 100644 index 00000000000..f3dbb7a547c --- /dev/null +++ b/tests/classic-haste-dep-graph-change/test.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +mkdir tmp + +printf "\\nServer should start in classic mode\\n" +start_flow . + +printf "\\nError should refer to test/node_modules/foo.js\\n" +assert_errors "$FLOW" status --strip-root + +printf "\\nRemoving foo1.js should make error refer to foo2.js\\n" +mv foo1.js tmp/foo1.js +assert_ok "$FLOW" force-recheck foo1.js +assert_errors "$FLOW" status --strip-root + +printf "\\nAdding foo1.js should make error refer to foo1.js\\n" +mv tmp/foo1.js foo1.js +assert_ok "$FLOW" force-recheck foo1.js +assert_errors "$FLOW" status --strip-root + +assert_ok "$FLOW" stop + +rm -rf tmp diff --git a/tests/cli_renderer_traces/cli_renderer_traces.exp b/tests/cli_renderer_traces/cli_renderer_traces.exp index f1d742ca935..19a07a45e8d 100644 --- a/tests/cli_renderer_traces/cli_renderer_traces.exp +++ b/tests/cli_renderer_traces/cli_renderer_traces.exp @@ -19,7 +19,12 @@ Trace: 2: ({p: 42}: {p: empty}); ^^ NumT [number] 2: ({p: 42}: {p: empty}); - ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from [not shown]) + ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from path 2) +* path 2: + 2: ({p: 42}: {p: empty}); + ^^^^^^^ ObjT [object literal] + 2: ({p: 42}: {p: empty}); + ^^^^^^^^^^ ~> UseT(Cast, ObjT) [object type] Error ------------------------------------------------------------------------------------------------ code_frame.js:4:2 @@ -43,7 +48,12 @@ Trace: 4: ({p: 42}: {p: empty}); ^^ NumT [number] 4: ({p: 42}: {p: empty}); - ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from [not shown]) + ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from path 2) +* path 2: + 4: ({p: 42}: {p: empty}); + ^^^^^^^ ObjT [object literal] + 4: ({p: 42}: {p: empty}); + ^^^^^^^^^^ ~> UseT(Cast, ObjT) [object type] Error ------------------------------------------------------------------------------------------------ code_frame.js:6:2 @@ -70,7 +80,18 @@ Trace: 7: p: 42, ^^ NumT [number] 9: p: empty, - ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from [not shown]) + ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from path 2) +* path 2: + v + 6: ({ + 7: p: 42, + 8: }: { + ^ ObjT [object literal] + v + 8: }: { + 9: p: empty, + 10: }); + ^ ~> UseT(Cast, ObjT) [object type] Error ----------------------------------------------------------------------------------------------- code_frame.js:12:2 @@ -94,7 +115,12 @@ Trace: 12: ({p: 42}: {p: empty}); ^^ NumT [number] 12: ({p: 42}: {p: empty}); - ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from [not shown]) + ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from path 2) +* path 2: + 12: ({p: 42}: {p: empty}); + ^^^^^^^ ObjT [object literal] + 12: ({p: 42}: {p: empty}); + ^^^^^^^^^^ ~> UseT(Cast, ObjT) [object type] Error ------------------------------------------------------------------------------------------------------ test.js:6:2 @@ -121,7 +147,18 @@ Trace: 7: p: 42, // Error: number ~> empty ^^ NumT [number] 9: p: empty, - ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from [not shown]) + ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from path 2) +* path 2: + v + 6: ({ + 7: p: 42, // Error: number ~> empty + 8: }: { + ^ ObjT [object literal] + v + 8: }: { + 9: p: empty, + 10: }); + ^ ~> UseT(Cast, ObjT) [object type] Error ----------------------------------------------------------------------------------------------------- test.js:12:2 @@ -150,7 +187,22 @@ Trace: 13: a: 1, // Error: number ~> empty ^ NumT [number] 17: a: empty, - ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from [not shown]) + ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from path 2) +* path 2: + v + 12: ({ + 13: a: 1, // Error: number ~> empty + 14: b: 2, // Error: number ~> empty + 15: c: 3, // Error: number ~> empty + 16: }: { + ^ ObjT [object literal] + v + 16: }: { + 17: a: empty, + 18: b: empty, + 19: c: empty, + 20: }); + ^ ~> UseT(Cast, ObjT) [object type] Error ----------------------------------------------------------------------------------------------------- test.js:12:2 @@ -179,7 +231,22 @@ Trace: 14: b: 2, // Error: number ~> empty ^ NumT [number] 18: b: empty, - ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from [not shown]) + ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from path 2) +* path 2: + v + 12: ({ + 13: a: 1, // Error: number ~> empty + 14: b: 2, // Error: number ~> empty + 15: c: 3, // Error: number ~> empty + 16: }: { + ^ ObjT [object literal] + v + 16: }: { + 17: a: empty, + 18: b: empty, + 19: c: empty, + 20: }); + ^ ~> UseT(Cast, ObjT) [object type] Error ----------------------------------------------------------------------------------------------------- test.js:12:2 @@ -208,7 +275,22 @@ Trace: 15: c: 3, // Error: number ~> empty ^ NumT [number] 19: c: empty, - ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from [not shown]) + ^^^^^ ~> UseT(PropertyCompatibility, EmptyT) [empty] (from path 2) +* path 2: + v + 12: ({ + 13: a: 1, // Error: number ~> empty + 14: b: 2, // Error: number ~> empty + 15: c: 3, // Error: number ~> empty + 16: }: { + ^ ObjT [object literal] + v + 16: }: { + 17: a: empty, + 18: b: empty, + 19: c: empty, + 20: }); + ^ ~> UseT(Cast, ObjT) [object type] diff --git a/tests/compose/compose.exp b/tests/compose/compose.exp index a741ba443e0..8d0acef35d9 100644 --- a/tests/compose/compose.exp +++ b/tests/compose/compose.exp @@ -7,8 +7,8 @@ Cannot cast `compose(...)(...)` to empty because string [1] is incompatible with ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:144:31 - 144| toString(radix?: number): string; + /core.js:164:31 + 164| toString(radix?: number): string; ^^^^^^ [1] basic.js:6:34 6| (compose(n => n.toString())(42): empty); // Error: string ~> empty @@ -24,8 +24,8 @@ Cannot cast `composeReverse(...)(...)` to empty because string [1] is incompatib ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:144:31 - 144| toString(radix?: number): string; + /core.js:164:31 + 164| toString(radix?: number): string; ^^^^^^ [1] basic.js:8:41 8| (composeReverse(n => n.toString())(42): empty); // Error: string ~> empty @@ -62,8 +62,8 @@ Cannot perform arithmetic operation because string [1] is not a number. ^ References: - /core.js:144:31 - 144| toString(radix?: number): string; + /core.js:164:31 + 164| toString(radix?: number): string; ^^^^^^ [1] @@ -80,8 +80,8 @@ Cannot cast `composeReverse(...)(...)` to empty because string [1] is incompatib ----^ References: - /core.js:144:31 - 144| toString(radix?: number): string; + /core.js:164:31 + 164| toString(radix?: number): string; ^^^^^^ [1] basic.js:18:8 18| )(42): empty); // Error: string ~> empty @@ -100,8 +100,8 @@ References: recompose.js:20:8 20| p: `${props.p * 3}`, ^^^^^^^^^^^^^^^^ [1] - /core.js:184:14 - 184| round(x: number): number; + /core.js:204:14 + 204| round(x: number): number, ^^^^^^ [2] @@ -130,6 +130,27 @@ References: ^^ [3] +Error --------------------------------------------------------------------------------------------------- spread.js:12:2 + +Cannot call `compose` with compose intermediate value bound to the first parameter because string [1] is incompatible +with number [2]. + + spread.js:12:2 + v------- + 12| (compose( + 13| ...fns1, // Error: string ~> number + 14| )('foo'): empty); // Error: string ~> empty and number ~> empty + ^ + +References: + spread.js:14:3 + 14| )('foo'): empty); // Error: string ~> empty and number ~> empty + ^^^^^ [1] + spread.js:4:26 + 4| declare var fns1: Array<(number) => number>; + ^^^^^^ [2] + + Error --------------------------------------------------------------------------------------------------- spread.js:12:2 Cannot cast `compose(...)(...)` to empty because: @@ -155,21 +176,24 @@ References: ^^^^^ [3] -Error --------------------------------------------------------------------------------------------------- spread.js:13:6 +Error --------------------------------------------------------------------------------------------------- spread.js:16:2 Cannot call `compose` with compose intermediate value bound to the first parameter because string [1] is incompatible with number [2]. - spread.js:13:6 - 13| ...fns1, // Error: string ~> number - ^^^^ + spread.js:16:2 + v------- + 16| (compose( + 17| ...fns2, // Error: string ~> number + 18| )(42): empty); // Error: number ~> empty and string ~> empty + ^ References: - spread.js:14:3 - 14| )('foo'): empty); // Error: string ~> empty and number ~> empty - ^^^^^ [1] - spread.js:4:26 - 4| declare var fns1: Array<(number) => number>; + spread.js:5:37 + 5| declare var fns2: Array<(number) => string>; + ^^^^^^ [1] + spread.js:5:26 + 5| declare var fns2: Array<(number) => string>; ^^^^^^ [2] @@ -198,24 +222,6 @@ References: ^^ [3] -Error --------------------------------------------------------------------------------------------------- spread.js:17:6 - -Cannot call `compose` with compose intermediate value bound to the first parameter because string [1] is incompatible -with number [2]. - - spread.js:17:6 - 17| ...fns2, // Error: string ~> number - ^^^^ - -References: - spread.js:5:37 - 5| declare var fns2: Array<(number) => string>; - ^^^^^^ [1] - spread.js:5:26 - 5| declare var fns2: Array<(number) => string>; - ^^^^^^ [2] - - Error --------------------------------------------------------------------------------------------------- spread.js:21:2 Cannot call `compose` because property `p` is missing in `Number` [1]. diff --git a/tests/computed_props/computed_props.exp b/tests/computed_props/computed_props.exp index 1ecd368a022..420d7111819 100644 --- a/tests/computed_props/computed_props.exp +++ b/tests/computed_props/computed_props.exp @@ -114,8 +114,8 @@ Cannot assign `arr[0]()` to `y` because number [1] is incompatible with string [ ^^^^^^^^ References: - /core.js:275:13 - 275| length: number; + /core.js:294:13 + 294| length: number; ^^^^^^ [1] test7.js:5:8 5| var y: string = arr[0](); // error: number ~> string diff --git a/tests/config_check_version/.flowconfig b/tests/config_check_version/.flowconfig new file mode 100644 index 00000000000..026d82d291c --- /dev/null +++ b/tests/config_check_version/.flowconfig @@ -0,0 +1,2 @@ +[version] +^0.1 diff --git a/tests/config_check_version/.testconfig b/tests/config_check_version/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/config_check_version/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/config_check_version/config_check_version.exp b/tests/config_check_version/config_check_version.exp new file mode 100644 index 00000000000..c9d44d255f5 --- /dev/null +++ b/tests/config_check_version/config_check_version.exp @@ -0,0 +1,11 @@ +flow config check: +{ + "flowVersion":"", + "exit":{ + "code":8, + "reason":"Invalid_flowconfig", + "msg":"Wrong version of Flow. The config specifies version ^0.1 but this is version " + } +} + +flow config check --ignore-version: diff --git a/tests/config_check_version/test.sh b/tests/config_check_version/test.sh new file mode 100644 index 00000000000..7b4acbe6713 --- /dev/null +++ b/tests/config_check_version/test.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +echo "flow config check:" +assert_exit 8 "$FLOW" config check --pretty + +echo +echo "flow config check --ignore-version:" +assert_ok "$FLOW" config check --ignore-version --pretty diff --git a/tests/config_invalid_option/.flowconfig b/tests/config_invalid_option/.flowconfig new file mode 100644 index 00000000000..2e640bff9fa --- /dev/null +++ b/tests/config_invalid_option/.flowconfig @@ -0,0 +1,4 @@ +[options] +invalid_option1=foo +invalid_option2=bar +invalid_option2=baz diff --git a/tests/config_invalid_option/.testconfig b/tests/config_invalid_option/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/config_invalid_option/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/config_invalid_option/config_invalid_option.exp b/tests/config_invalid_option/config_invalid_option.exp new file mode 100644 index 00000000000..b9e657dc11c --- /dev/null +++ b/tests/config_invalid_option/config_invalid_option.exp @@ -0,0 +1,36 @@ +Invalid flowconfig should fail +.flowconfig:2 Unsupported option specified! (invalid_option1) +.flowconfig:3 Unsupported option specified! (invalid_option2) +.flowconfig:4 Unsupported option specified! (invalid_option2) + +Invalid flowconfig should pass +.flowconfig:2 Unsupported option specified! (invalid_option1) +.flowconfig:3 Unsupported option specified! (invalid_option2) +.flowconfig:4 Unsupported option specified! (invalid_option2) +Found 0 errors + +flow config check: +{ + "errors":[ + { + "line":2, + "message":"Unsupported option specified! (invalid_option1)", + "level":"warning" + }, + { + "line":3, + "message":"Unsupported option specified! (invalid_option2)", + "level":"warning" + }, + { + "line":4, + "message":"Unsupported option specified! (invalid_option2)", + "level":"warning" + } + ], + "flowVersion":"", + "exit":{"code":8,"reason":"Invalid_flowconfig"} +} + +flow config check --ignore-version: +{} diff --git a/tests/config_invalid_option/test.sh b/tests/config_invalid_option/test.sh new file mode 100644 index 00000000000..c85f607464e --- /dev/null +++ b/tests/config_invalid_option/test.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +echo "Invalid flowconfig should fail" +assert_exit 8 "$FLOW" check 2>&1 + +echo +echo "Invalid flowconfig should pass" +assert_ok "$FLOW" check --quiet --ignore-version 2>&1 + +echo +echo "flow config check:" +assert_exit 8 "$FLOW" config check --pretty + +echo +echo "flow config check --ignore-version:" +assert_ok "$FLOW" config check --ignore-version --pretty diff --git a/tests/const_params/const_params.exp b/tests/const_params/const_params.exp index c21911ca549..ce24e112a48 100644 --- a/tests/const_params/const_params.exp +++ b/tests/const_params/const_params.exp @@ -9,8 +9,78 @@ Cannot reassign constant `x` [1]. References: test.js:16:26 16| function cannot_reassign(x: string) { - ^^^^^^^^^ [1] + ^ [1] +Error ----------------------------------------------------------------------------------------------------- test.js:32:3 -Found 1 error +Cannot reassign constant `x` [1]. + + test.js:32:3 + 32| x = 0; // error, const param cannot be reeassigned + ^ + +References: + test.js:31:33 + 31| function const_rest_reassign(...x) { + ^ [1] + + +Error ----------------------------------------------------------------------------------------------------- test.js:36:3 + +Cannot reassign constant `x` [1]. + + test.js:36:3 + 36| x = 0; // error, const param cannot be reeassigned + ^ + +References: + test.js:35:35 + 35| function const_obj_patt_reassign({x, ...o}) { + ^ [1] + + +Error ----------------------------------------------------------------------------------------------------- test.js:37:3 + +Cannot reassign constant `o` [1]. + + test.js:37:3 + 37| o = 0; // error, const param cannot be reeassigned + ^ + +References: + test.js:35:41 + 35| function const_obj_patt_reassign({x, ...o}) { + ^ [1] + + +Error ----------------------------------------------------------------------------------------------------- test.js:41:3 + +Cannot reassign constant `x` [1]. + + test.js:41:3 + 41| x = 0; // error, const param cannot be reeassigned + ^ + +References: + test.js:40:35 + 40| function const_arr_patt_reassign([x, ...a]) { + ^ [1] + + +Error ----------------------------------------------------------------------------------------------------- test.js:42:3 + +Cannot reassign constant `a` [1]. + + test.js:42:3 + 42| a = 0; // error, const param cannot be reeassigned + ^ + +References: + test.js:40:41 + 40| function const_arr_patt_reassign([x, ...a]) { + ^ [1] + + + +Found 6 errors diff --git a/tests/const_params/test.js b/tests/const_params/test.js index 0f7838c3c1f..fd9c30e7570 100644 --- a/tests/const_params/test.js +++ b/tests/const_params/test.js @@ -27,3 +27,17 @@ function durable_refi(x: ?number) { return () => { var y:number = x; }; } } + +function const_rest_reassign(...x) { + x = 0; // error, const param cannot be reeassigned +} + +function const_obj_patt_reassign({x, ...o}) { + x = 0; // error, const param cannot be reeassigned + o = 0; // error, const param cannot be reeassigned +} + +function const_arr_patt_reassign([x, ...a]) { + x = 0; // error, const param cannot be reeassigned + a = 0; // error, const param cannot be reeassigned +} diff --git a/tests/core_tests/core_tests.exp b/tests/core_tests/core_tests.exp index 6cf0e20c63f..c7ca5e03db2 100644 --- a/tests/core_tests/core_tests.exp +++ b/tests/core_tests/core_tests.exp @@ -1,3 +1,20 @@ +Error -------------------------------------------------------------------------------------------- json_stringify.js:9:2 + +Cannot cast `JSON.stringify(...)` to string because undefined [1] is incompatible with string [2]. + + json_stringify.js:9:2 + 9| (JSON.stringify(bad1): string); + ^^^^^^^^^^^^^^^^^^^^ + +References: + /core.js:520:17 + 520| ): string | void; + ^^^^ [1] + json_stringify.js:9:24 + 9| (JSON.stringify(bad1): string); + ^^^^^^ [2] + + Error ----------------------------------------------------------------------------------------------------- map.js:23:21 Cannot call `Map` with array literal bound to `iterable` because: @@ -14,11 +31,11 @@ References: map.js:23:22 23| let x = new Map(['foo', 123]); // error ^^^^^ [1] - /core.js:533:37 - 533| constructor(iterable: ?Iterable<[K, V]>): void; + /core.js:594:37 + 594| constructor(iterable: ?Iterable<[K, V]>): void; ^^^^^^ [2] - /core.js:487:22 - 487| interface $Iterator<+Yield,+Return,-Next> { + /core.js:537:22 + 537| interface $Iterator<+Yield,+Return,-Next> { ^^^^^ [3] map.js:23:29 23| let x = new Map(['foo', 123]); // error @@ -42,8 +59,8 @@ References: map.js:24:16 24| let y: Map = new Map([['foo', 123]]); // error ^^^^^^ [2] - /core.js:531:19 - 531| declare class Map { + /core.js:592:19 + 592| declare class Map extends $ReadOnlyMap { ^ [3] map.js:24:51 24| let y: Map = new Map([['foo', 123]]); // error @@ -51,8 +68,8 @@ References: map.js:24:24 24| let y: Map = new Map([['foo', 123]]); // error ^^^^^^ [5] - /core.js:531:22 - 531| declare class Map { + /core.js:592:22 + 592| declare class Map extends $ReadOnlyMap { ^ [6] @@ -67,8 +84,8 @@ Cannot cast `x.get(...)` to boolean because: ^^^^^^^^^^^^ References: - /core.js:538:22 - 538| get(key: K): V | void; + /core.js:599:22 + 599| get(key: K): V | void; ^^^^ [1] map.js:29:20 29| (x.get('foo'): boolean); // error, string | void @@ -102,8 +119,8 @@ since `z` is not a member of the set. ^^^ [1] References: - /core.js:287:21 - 287| type RegExp$flags = $CharSet<"gimsuy">; + /core.js:312:21 + 312| type RegExp$flags = $CharSet<"gimsuy">; ^^^^^^^^^^^^^^^^^^ [2] @@ -117,63 +134,73 @@ since `z` is not a member of the set. ^^^ [1] References: - /core.js:287:21 - 287| type RegExp$flags = $CharSet<"gimsuy">; + /core.js:312:21 + 312| type RegExp$flags = $CharSet<"gimsuy">; ^^^^^^^^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- weakset.js:19:23 Cannot call `WeakSet` with array literal bound to `iterable` because: - - number [1] is incompatible with object type [2] in type argument `Yield` [3] of the return value of property - `@@iterator`. - - number [4] is incompatible with object type [2] in type argument `Yield` [3] of the return value of property - `@@iterator`. - - number [5] is incompatible with object type [2] in type argument `Yield` [3] of the return value of property - `@@iterator`. + - in type argument `Yield` [1] of the return value of property `@@iterator`: + - Either number [2] is incompatible with object type [3]. + - Or number [2] is incompatible with read-only array type [4]. + - in type argument `Yield` [1] of the return value of property `@@iterator`: + - Either number [5] is incompatible with object type [3]. + - Or number [5] is incompatible with read-only array type [4]. + - in type argument `Yield` [1] of the return value of property `@@iterator`: + - Either number [6] is incompatible with object type [3]. + - Or number [6] is incompatible with read-only array type [4]. weakset.js:19:23 19| let ws3 = new WeakSet([1, 2, 3]); // error, must be objects ^^^^^^^^^ References: + /core.js:537:22 + 537| interface $Iterator<+Yield,+Return,-Next> { + ^^^^^ [1] weakset.js:19:24 19| let ws3 = new WeakSet([1, 2, 3]); // error, must be objects - ^ [1] - /core.js:571:26 - 571| declare class WeakSet { - ^^^^^^ [2] - /core.js:487:22 - 487| interface $Iterator<+Yield,+Return,-Next> { - ^^^^^ [3] + ^ [2] + /core.js:653:26 + 653| declare class WeakSet> extends $ReadOnlyWeakSet { + ^^^^^ [3] + /core.js:653:34 + 653| declare class WeakSet> extends $ReadOnlyWeakSet { + ^^^^^^^^^^^^^^^^^^^ [4] weakset.js:19:27 19| let ws3 = new WeakSet([1, 2, 3]); // error, must be objects - ^ [4] + ^ [5] weakset.js:19:30 19| let ws3 = new WeakSet([1, 2, 3]); // error, must be objects - ^ [5] + ^ [6] Error ------------------------------------------------------------------------------------------------- weakset.js:36:23 -Cannot call `WeakSet` with `numbers()` bound to `iterable` because number [1] is incompatible with object type [2] in -type argument `Yield` [3]. +Cannot call `WeakSet` with `numbers()` bound to `iterable` because in type argument `Yield` [1]: + - Either number [2] is incompatible with object type [3]. + - Or number [2] is incompatible with read-only array type [4]. weakset.js:36:23 36| let ws5 = new WeakSet(numbers()); // error, must be objects ^^^^^^^^^ References: + /core.js:543:22 + 543| interface $Iterable<+Yield,+Return,-Next> { + ^^^^^ [1] weakset.js:29:31 29| function* numbers(): Iterable { - ^^^^^^ [1] - /core.js:571:26 - 571| declare class WeakSet { - ^^^^^^ [2] - /core.js:493:22 - 493| interface $Iterable<+Yield,+Return,-Next> { - ^^^^^ [3] + ^^^^^^ [2] + /core.js:653:26 + 653| declare class WeakSet> extends $ReadOnlyWeakSet { + ^^^^^ [3] + /core.js:653:34 + 653| declare class WeakSet> extends $ReadOnlyWeakSet { + ^^^^^^^^^^^^^^^^^^^ [4] -Found 13 errors +Found 14 errors diff --git a/tests/core_tests/json_stringify.js b/tests/core_tests/json_stringify.js new file mode 100644 index 00000000000..6579a19235f --- /dev/null +++ b/tests/core_tests/json_stringify.js @@ -0,0 +1,15 @@ +// @flow + +declare var bad1: mixed; +declare var bad2: () => {}; +declare var good1: number; +declare var good2: {foo: string}; + +// error: undefined is not compatible with string +(JSON.stringify(bad1): string); + +// TODO should error, but currently does not. We allow functions to be coerced to objects +(JSON.stringify(bad2): string); + +(JSON.stringify(good1): string); // ok +(JSON.stringify(good2): string); // ok diff --git a/tests/coverage/coverage.exp b/tests/coverage/coverage.exp index 4dda8fb2494..00133899a34 100644 --- a/tests/coverage/coverage.exp +++ b/tests/coverage/coverage.exp @@ -1,35 +1,35 @@ -// check coverage of declare module - -declare module foo { -} +// check coverage of declare module +declare module foo { +} + Covered: 100.00% (1 of 1 expressions) -// This file triggers a violation of the "disjoint-or-nested ranges invariant" +// This file triggers a violation of the "disjoint-or-nested ranges invariant" // that we implicitly assume in type-at-pos and coverage implementations. In // particular, when unchecked it causes a crash with coverage --color. -declare module foo { -} - -declare module bar { -} +declare module foo { +} +declare module bar { +} + Covered: 100.00% (2 of 2 expressions) -// This file triggers a violation of the "disjoint-or-nested ranges invariant" +// This file triggers a violation of the "disjoint-or-nested ranges invariant" // that we implicitly assume in type-at-pos and coverage implementations. In // particular, when unchecked it causes non-termination with coverage --color. -declare module foo { -} +declare module foo { +} -declare module bar { -} - -declare class qux { -} +declare module bar { +} +declare class qux { +} + Covered: 100.00% (3 of 3 expressions) Covered: 75.00% (3 of 4 expressions) @@ -40,3 +40,667 @@ Covered: 0.00% (0 of 4 expressions) Covered: 75.00% (3 of 4 expressions) +----------------------------- +coverage.js +----------------------------- + +{ + "expressions":{ + "covered_count":10, + "covered_locs":[ + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":3,"column":7,"offset":16}, + "end":{"line":3,"column":8,"offset":18} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":3,"column":12,"offset":21}, + "end":{"line":3,"column":12,"offset":22} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":4,"column":17,"offset":40}, + "end":{"line":4,"column":18,"offset":42} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":4,"column":17,"offset":40}, + "end":{"line":4,"column":22,"offset":46} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":4,"column":22,"offset":45}, + "end":{"line":4,"column":22,"offset":46} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":5,"column":17,"offset":64}, + "end":{"line":5,"column":17,"offset":65} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":11,"column":1,"offset":134}, + "end":{"line":11,"column":25,"offset":159} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":12,"column":1,"offset":160}, + "end":{"line":12,"column":25,"offset":185} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":15,"column":1,"offset":232}, + "end":{"line":15,"column":26,"offset":258} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":23,"column":1,"offset":406}, + "end":{"line":23,"column":26,"offset":432} + } + ], + "uncovered_count":31, + "uncovered_locs":[ + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":4,"column":7,"offset":30}, + "end":{"line":4,"column":8,"offset":32} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":5,"column":7,"offset":54}, + "end":{"line":5,"column":8,"offset":56} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":5,"column":12,"offset":59}, + "end":{"line":5,"column":13,"offset":61} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":5,"column":12,"offset":59}, + "end":{"line":5,"column":17,"offset":65} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":7,"column":1,"offset":68}, + "end":{"line":7,"column":16,"offset":84} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":9,"column":1,"offset":86}, + "end":{"line":9,"column":23,"offset":109} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":10,"column":1,"offset":110}, + "end":{"line":10,"column":23,"offset":133} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":13,"column":1,"offset":186}, + "end":{"line":13,"column":22,"offset":208} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":14,"column":1,"offset":209}, + "end":{"line":14,"column":22,"offset":231} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":17,"column":1,"offset":260}, + "end":{"line":17,"column":23,"offset":283} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":18,"column":1,"offset":284}, + "end":{"line":18,"column":23,"offset":307} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":19,"column":1,"offset":308}, + "end":{"line":19,"column":25,"offset":333} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":20,"column":1,"offset":334}, + "end":{"line":20,"column":25,"offset":359} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":21,"column":1,"offset":360}, + "end":{"line":21,"column":22,"offset":382} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":22,"column":1,"offset":383}, + "end":{"line":22,"column":22,"offset":405} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":25,"column":1,"offset":434}, + "end":{"line":25,"column":11,"offset":445} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":27,"column":1,"offset":447}, + "end":{"line":27,"column":21,"offset":468} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":28,"column":1,"offset":469}, + "end":{"line":28,"column":23,"offset":492} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":29,"column":1,"offset":493}, + "end":{"line":29,"column":21,"offset":514} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":30,"column":1,"offset":515}, + "end":{"line":30,"column":23,"offset":538} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":31,"column":1,"offset":539}, + "end":{"line":31,"column":15,"offset":554} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":32,"column":1,"offset":555}, + "end":{"line":32,"column":24,"offset":579} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":33,"column":1,"offset":580}, + "end":{"line":33,"column":24,"offset":604} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":34,"column":1,"offset":605}, + "end":{"line":34,"column":21,"offset":626} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":36,"column":1,"offset":628}, + "end":{"line":36,"column":21,"offset":649} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":37,"column":1,"offset":650}, + "end":{"line":37,"column":23,"offset":673} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":38,"column":1,"offset":674}, + "end":{"line":38,"column":21,"offset":695} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":39,"column":1,"offset":696}, + "end":{"line":39,"column":23,"offset":719} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":40,"column":1,"offset":720}, + "end":{"line":40,"column":24,"offset":744} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":41,"column":1,"offset":745}, + "end":{"line":41,"column":24,"offset":769} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":42,"column":1,"offset":770}, + "end":{"line":42,"column":21,"offset":791} + } + ], + "empty_count":3, + "empty_locs":[ + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":7,"column":1,"offset":68}, + "end":{"line":7,"column":16,"offset":84} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":19,"column":1,"offset":308}, + "end":{"line":19,"column":25,"offset":333} + }, + { + "source":"coverage.js", + "type":"SourceFile", + "start":{"line":20,"column":1,"offset":334}, + "end":{"line":20,"column":25,"offset":359} + } + ] + } +} +----------------------------- +unicode.js +----------------------------- + +{ + "expressions":{ + "covered_count":5, + "covered_locs":[ + { + "source":"unicode.js", + "type":"SourceFile", + "start":{"line":3,"column":2,"offset":11}, + "end":{"line":3,"column":5,"offset":15} + }, + { + "source":"unicode.js", + "type":"SourceFile", + "start":{"line":5,"column":7,"offset":30}, + "end":{"line":5,"column":11,"offset":35} + }, + { + "source":"unicode.js", + "type":"SourceFile", + "start":{"line":5,"column":15,"offset":38}, + "end":{"line":5,"column":17,"offset":44} + }, + { + "source":"unicode.js", + "type":"SourceFile", + "start":{"line":5,"column":21,"offset":47}, + "end":{"line":5,"column":24,"offset":51} + }, + { + "source":"unicode.js", + "type":"SourceFile", + "start":{"line":7,"column":2,"offset":61}, + "end":{"line":7,"column":5,"offset":65} + } + ], + "uncovered_count":3, + "uncovered_locs":[ + { + "source":"unicode.js", + "type":"SourceFile", + "start":{"line":3,"column":2,"offset":11}, + "end":{"line":3,"column":10,"offset":20} + }, + { + "source":"unicode.js", + "type":"SourceFile", + "start":{"line":5,"column":21,"offset":47}, + "end":{"line":5,"column":29,"offset":56} + }, + { + "source":"unicode.js", + "type":"SourceFile", + "start":{"line":7,"column":2,"offset":61}, + "end":{"line":7,"column":10,"offset":70} + } + ], + "empty_count":0, + "empty_locs":[] + } +} +// @flow + +(null: any); + +const emoji = "💩"; (null: any); + +(null: any); + +Covered: 62.50% (5 of 8 expressions) + +----------------------------- +trust.js +----------------------------- + +{ + "expressions":{ + "untainted_count":22, + "untainted_locs":[ + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":3,"column":1,"offset":9}, + "end":{"line":3,"column":16,"offset":25} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":4,"column":1,"offset":39}, + "end":{"line":4,"column":26,"offset":65} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":9,"column":1,"offset":160}, + "end":{"line":9,"column":16,"offset":176} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":10,"column":1,"offset":190}, + "end":{"line":10,"column":16,"offset":206} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":12,"column":1,"offset":244}, + "end":{"line":12,"column":16,"offset":260} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":15,"column":1,"offset":325}, + "end":{"line":15,"column":17,"offset":342} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":17,"column":1,"offset":379}, + "end":{"line":17,"column":17,"offset":396} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":19,"column":1,"offset":411}, + "end":{"line":19,"column":16,"offset":427} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":29,"column":1,"offset":650}, + "end":{"line":29,"column":27,"offset":677} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":30,"column":1,"offset":678}, + "end":{"line":30,"column":27,"offset":705} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":31,"column":1,"offset":706}, + "end":{"line":31,"column":27,"offset":733} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":32,"column":1,"offset":734}, + "end":{"line":32,"column":27,"offset":761} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":34,"column":13,"offset":775}, + "end":{"line":34,"column":14,"offset":777} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":35,"column":13,"offset":796}, + "end":{"line":35,"column":14,"offset":798} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":36,"column":13,"offset":817}, + "end":{"line":36,"column":14,"offset":819} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":37,"column":13,"offset":838}, + "end":{"line":37,"column":14,"offset":840} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":39,"column":1,"offset":848}, + "end":{"line":39,"column":2,"offset":850} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":39,"column":1,"offset":848}, + "end":{"line":39,"column":4,"offset":852} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":40,"column":1,"offset":854}, + "end":{"line":40,"column":2,"offset":856} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":41,"column":1,"offset":860}, + "end":{"line":41,"column":2,"offset":862} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":41,"column":1,"offset":860}, + "end":{"line":41,"column":4,"offset":864} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":42,"column":1,"offset":866}, + "end":{"line":42,"column":2,"offset":868} + } + ], + "tainted_count":0, + "tainted_locs":[], + "uncovered_count":17, + "uncovered_locs":[ + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":5,"column":1,"offset":77}, + "end":{"line":5,"column":26,"offset":103} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":6,"column":1,"offset":113}, + "end":{"line":6,"column":15,"offset":128} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":7,"column":1,"offset":138}, + "end":{"line":7,"column":13,"offset":151} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":11,"column":1,"offset":220}, + "end":{"line":11,"column":16,"offset":236} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":13,"column":1,"offset":274}, + "end":{"line":13,"column":16,"offset":290} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":14,"column":1,"offset":298}, + "end":{"line":14,"column":17,"offset":315} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":16,"column":1,"offset":354}, + "end":{"line":16,"column":17,"offset":371} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":20,"column":1,"offset":439}, + "end":{"line":20,"column":16,"offset":455} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":21,"column":1,"offset":465}, + "end":{"line":21,"column":16,"offset":481} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":22,"column":1,"offset":489}, + "end":{"line":22,"column":16,"offset":505} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":23,"column":1,"offset":519}, + "end":{"line":23,"column":16,"offset":535} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":24,"column":1,"offset":543}, + "end":{"line":24,"column":17,"offset":560} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":25,"column":1,"offset":570}, + "end":{"line":25,"column":17,"offset":587} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":26,"column":1,"offset":597}, + "end":{"line":26,"column":17,"offset":614} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":27,"column":1,"offset":622}, + "end":{"line":27,"column":17,"offset":639} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":40,"column":1,"offset":854}, + "end":{"line":40,"column":4,"offset":858} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":42,"column":1,"offset":866}, + "end":{"line":42,"column":4,"offset":870} + } + ], + "empty_count":9, + "empty_locs":[ + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":5,"column":1,"offset":77}, + "end":{"line":5,"column":26,"offset":103} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":6,"column":1,"offset":113}, + "end":{"line":6,"column":15,"offset":128} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":14,"column":1,"offset":298}, + "end":{"line":14,"column":17,"offset":315} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":20,"column":1,"offset":439}, + "end":{"line":20,"column":16,"offset":455} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":22,"column":1,"offset":489}, + "end":{"line":22,"column":16,"offset":505} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":24,"column":1,"offset":543}, + "end":{"line":24,"column":17,"offset":560} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":25,"column":1,"offset":570}, + "end":{"line":25,"column":17,"offset":587} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":27,"column":1,"offset":622}, + "end":{"line":27,"column":17,"offset":639} + }, + { + "source":"trust.js", + "type":"SourceFile", + "start":{"line":42,"column":1,"offset":866}, + "end":{"line":42,"column":4,"offset":870} + } + ] + } +} +Covered: 56.41% (22 of 39 expressions) + diff --git a/tests/coverage/coverage.js b/tests/coverage/coverage.js new file mode 100644 index 00000000000..07fd00f817b --- /dev/null +++ b/tests/coverage/coverage.js @@ -0,0 +1,42 @@ +// @flow + +const a1 = 5; +const a2: any = a1 - 2; +const a3 = a2 + 1; + +type E = empty; + +type U1 = any | number; +type U2 = number | any; +type U3 = empty | number; +type U4 = number | empty; +type U5 = empty | any; +type U6 = any | empty; +type U7 = number | number; + +type I1 = any & number; +type I2 = number & any; +type I3 = empty & number; +type I4 = number & empty; +type I5 = empty & any; +type I6 = any & empty; +type I7 = number & number; + +type R = R; + +type RU1 = RU1 | any; +type RU2 = RU2 | empty; +type RU3 = any | RU3; +type RU4 = empty | RU4; +type RU5 = RU5; +type RU6 = RU6 | number; +type RU7 = number | RU7; +type RU8 = RU8 | RU8; + +type RI1 = RI1 & any; +type RI2 = RI2 & empty; +type RI3 = any & RI3; +type RI4 = empty & RI4; +type RI5 = RI5 & number; +type RI6 = number & RI6; +type RI7 = RI7 & RI7; diff --git a/tests/coverage/test.sh b/tests/coverage/test.sh index 47a4d6fbefb..c067343ef46 100755 --- a/tests/coverage/test.sh +++ b/tests/coverage/test.sh @@ -19,3 +19,27 @@ assert_ok "$FLOW" coverage --respect-pragma no_pragma.js # --all wins (and assumes @flow weak) assert_ok "$FLOW" coverage --respect-pragma --all no_pragma.js + +echo "-----------------------------" +echo "coverage.js" +echo "-----------------------------" +echo +# some more detailed tests: +assert_ok "$FLOW" coverage --strip-root --pretty coverage.js + +echo "-----------------------------" +echo "unicode.js" +echo "-----------------------------" +echo +# tests for +assert_ok "$FLOW" coverage --strip-root --pretty unicode.js +assert_ok "$FLOW" coverage --color unicode.js + +"$FLOW" stop +"$FLOW" start --trust-mode=check +echo "-----------------------------" +echo "trust.js" +echo "-----------------------------" +echo +assert_ok "$FLOW" coverage --show-trust --strip-root --pretty trust.js +assert_ok "$FLOW" coverage --show-trust trust.js diff --git a/tests/coverage/trust.js b/tests/coverage/trust.js new file mode 100644 index 00000000000..7398a1aff37 --- /dev/null +++ b/tests/coverage/trust.js @@ -0,0 +1,42 @@ +//@flow + +type N = number; // untrusted +type T = $Trusted; // trusted +type TE = $Trusted; // empty +type E = empty; // empty +type A = any; // any + +type U1 = N | T; // untrusted +type U2 = E | T; // untrusted +type U3 = A | T; // any +type U4 = E | N; // untrusted +type U5 = A | N; // any +type U6 = TE | E; // empty +type U7 = TE | T; // trusted +type U8 = TE | A; // any +type U9 = TE | N; // untrusted + +type I1 = N & T; // trusted +type I2 = E & T; // empty +type I3 = A & T; // any +type I4 = E & N; // untrusted +type I5 = A & N; // any +type I6 = TE & E; // empty +type I7 = TE & T; // empty +type I8 = TE & A; // any +type I9 = TE & N; // empty + +type O1 = $Trusted<{x : N}> +type O2 = $Trusted<{x : A}> +type O3 = $Trusted<{x : T}> +type O4 = $Trusted<{x : E}> + +declare var o1 : O1; +declare var o2 : O2; +declare var o3 : O3; +declare var o4 : O4; + +o1.x; +o2.x; +o3.x; +o4.x; diff --git a/tests/coverage/unicode.js b/tests/coverage/unicode.js new file mode 100644 index 00000000000..b0311ad9174 --- /dev/null +++ b/tests/coverage/unicode.js @@ -0,0 +1,7 @@ +// @flow + +(null: any); + +const emoji = "💩"; (null: any); + +(null: any); diff --git a/tests/cycle-command/.flowconfig b/tests/cycle-command/.flowconfig new file mode 100644 index 00000000000..4771c059106 --- /dev/null +++ b/tests/cycle-command/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] + +experimental.well_formed_exports=true + +[strict] diff --git a/tests/cycle-command/.testconfig b/tests/cycle-command/.testconfig new file mode 100644 index 00000000000..d1ea190969c --- /dev/null +++ b/tests/cycle-command/.testconfig @@ -0,0 +1,2 @@ +shell: test.sh +skip_saved_state: true diff --git a/tests/cycle-command/cycle-command.exp b/tests/cycle-command/cycle-command.exp new file mode 100644 index 00000000000..1e357ad70c4 --- /dev/null +++ b/tests/cycle-command/cycle-command.exp @@ -0,0 +1,23 @@ + +Start server in classic mode + +Value cycle should include both files +digraph { + "fileB.js" -> "fileA.js" + "fileA.js" -> "fileB.js" +} +Type cycle should include both files +digraph { + "fileB.js" -> "fileA.js" + "fileA.js" -> "fileB.js" +} +Start server in types-first mode + +Value cycle should include both files +digraph { + "fileB.js" -> "fileA.js" + "fileA.js" -> "fileB.js" +} +Type cycle should be empty +digraph { +} diff --git a/tests/cycle-command/fileA.js b/tests/cycle-command/fileA.js new file mode 100644 index 00000000000..a10c035ff44 --- /dev/null +++ b/tests/cycle-command/fileA.js @@ -0,0 +1,7 @@ +// @flow + +const B = require('./fileB'); + +class A {} + +module.exports = A; diff --git a/tests/cycle-command/fileB.js b/tests/cycle-command/fileB.js new file mode 100644 index 00000000000..c82ec251956 --- /dev/null +++ b/tests/cycle-command/fileB.js @@ -0,0 +1,7 @@ +// @flow + +const A = require("./fileA"); + +class B extends A {} + +module.exports = B; diff --git a/tests/cycle-command/test.sh b/tests/cycle-command/test.sh new file mode 100644 index 00000000000..72f82fdf9ba --- /dev/null +++ b/tests/cycle-command/test.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +assert_ok "$FLOW" stop + +printf "\\nStart server in classic mode\\n" +start_flow . + +printf "\\nValue cycle should include both files\\n" +assert_ok "$FLOW" cycle --strip-root fileA.js + +printf "\\nType cycle should include both files\\n" +assert_ok "$FLOW" cycle --strip-root --types fileA.js + +assert_ok "$FLOW" stop + +printf "\\nStart server in types-first mode\\n" +start_flow . --types-first + +printf "\\nValue cycle should include both files\\n" +assert_ok "$FLOW" cycle --strip-root fileA.js + +printf "\\nType cycle should be empty\\n" +assert_ok "$FLOW" cycle --strip-root --types fileA.js + +assert_ok "$FLOW" stop + +printf "\\n" diff --git a/tests/date/date.exp b/tests/date/date.exp index e48e4cf5f50..8f52f368045 100644 --- a/tests/date/date.exp +++ b/tests/date/date.exp @@ -3,24 +3,38 @@ Error -------------------------------------------------------------------------- Cannot assign `d.getTime()` to `x` because number [1] is incompatible with string [2]. date.js:2:16 - 2| var x:string = d.getTime(); + 2| var x:string = d.getTime(); // expect error ^^^^^^^^^^^ References: - /core.js:363:16 - 363| getTime(): number; + /core.js:400:16 + 400| getTime(): number; ^^^^^^ [1] date.js:2:7 - 2| var x:string = d.getTime(); + 2| var x:string = d.getTime(); // expect error ^^^^^^ [2] +Error ----------------------------------------------------------------------------------------------------- date.js:4:16 + +Cannot assign `d` to `y` because `Date` [1] is incompatible with number [2]. + + date.js:4:16 + 4| var y:number = d; // expect error + ^ + +References: + date.js:1:9 + 1| var d = new Date(0); + ^^^^^^^^^^^ [1] + date.js:4:7 + 4| var y:number = d; // expect error + ^^^^^^ [2] + + Error ----------------------------------------------------------------------------------------------------- date.js:18:1 -Cannot call `Date` because: - - Either object literal [1] is incompatible with number [2]. - - Or object literal [1] is incompatible with string [3]. - - Or object literal [1] is incompatible with number [4]. +Cannot call `Date` because object literal [1] is incompatible with `Date` [2]. date.js:18:1 18| new Date({}); @@ -30,15 +44,9 @@ References: date.js:18:10 18| new Date({}); ^^ [1] - /core.js:352:28 - 352| constructor(timestamp: number): void; - ^^^^^^ [2] - /core.js:353:29 - 353| constructor(dateString: string): void; - ^^^^^^ [3] - /core.js:354:23 - 354| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; - ^^^^^^ [4] + /core.js:389:23 + 389| constructor(date: Date): void; + ^^^^ [2] Error ----------------------------------------------------------------------------------------------------- date.js:19:1 @@ -53,8 +61,8 @@ References: date.js:19:16 19| new Date(2015, '6'); ^^^ [1] - /core.js:354:38 - 354| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; + /core.js:391:38 + 391| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; ^^^^^^ [2] @@ -70,8 +78,8 @@ References: date.js:20:19 20| new Date(2015, 6, '18'); ^^^^ [1] - /core.js:354:52 - 354| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; + /core.js:391:52 + 391| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; ^^^^^^ [2] @@ -87,8 +95,8 @@ References: date.js:21:23 21| new Date(2015, 6, 18, '11'); ^^^^ [1] - /core.js:354:67 - 354| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; + /core.js:391:67 + 391| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; ^^^^^^ [2] @@ -104,8 +112,8 @@ References: date.js:22:27 22| new Date(2015, 6, 18, 11, '55'); ^^^^ [1] - /core.js:354:84 - 354| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; + /core.js:391:84 + 391| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; ^^^^^^ [2] @@ -121,8 +129,8 @@ References: date.js:23:31 23| new Date(2015, 6, 18, 11, 55, '42'); ^^^^ [1] - /core.js:354:101 - 354| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; + /core.js:391:101 + 391| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; ^^^^^^ [2] @@ -138,8 +146,8 @@ References: date.js:24:35 24| new Date(2015, 6, 18, 11, 55, 42, '999'); ^^^^^ [1] - /core.js:354:123 - 354| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; + /core.js:391:123 + 391| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; ^^^^^^ [2] @@ -149,25 +157,29 @@ Cannot call `Date` because: - Either no arguments are expected by function type [1]. - Or no more than 1 argument is expected by function type [2]. - Or no more than 1 argument is expected by function type [3]. - - Or no more than 7 arguments are expected by function type [4]. + - Or no more than 1 argument is expected by function type [4]. + - Or no more than 7 arguments are expected by function type [5]. date.js:25:1 25| new Date(2015, 6, 18, 11, 55, 42, 999, 'hahaha'); ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:351:5 - 351| constructor(): void; + /core.js:387:5 + 387| constructor(): void; ^^^^^^^^^^^^^^^^^^^ [1] - /core.js:352:5 - 352| constructor(timestamp: number): void; + /core.js:388:5 + 388| constructor(timestamp: number): void; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] - /core.js:353:5 - 353| constructor(dateString: string): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] - /core.js:354:5 - 354| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [4] + /core.js:389:5 + 389| constructor(date: Date): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] + /core.js:390:5 + 390| constructor(dateString: string): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [4] + /core.js:391:5 + 391| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [5] Error ----------------------------------------------------------------------------------------------------- date.js:26:1 @@ -182,13 +194,61 @@ References: date.js:26:10 26| new Date('2015', 6); ^^^^^^ [1] - /core.js:354:23 - 354| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; + /core.js:391:23 + 391| constructor(year: number, month: number, day?: number, hour?: number, minute?: number, second?: number, millisecond?: number): void; ^^^^^^ [2] +Error ---------------------------------------------------------------------------------------------------- date.js:31:17 + +Cannot add `d` and `12` because `Date` [1] is incompatible with number [2]. + + date.js:31:17 + 31| var n3:number = d + 12; // expect error + ^^^^^^ + +References: + date.js:1:9 + 1| var d = new Date(0); + ^^^^^^^^^^^ [1] + date.js:31:21 + 31| var n3:number = d + 12; // expect error + ^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- date.js:32:16 + +Cannot add `d` and `12` because `Date` [1] is incompatible with number [2]. + + date.js:32:16 + 32| var s:string = d + 12; // fixme? in js this coerces both to string and concats them + ^^^^^^ + +References: + date.js:1:9 + 1| var d = new Date(0); + ^^^^^^^^^^^ [1] + date.js:32:20 + 32| var s:string = d + 12; // fixme? in js this coerces both to string and concats them + ^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- date.js:32:16 + +Cannot assign `d + 12` to `s` because number [1] is incompatible with string [2]. + + date.js:32:16 + 32| var s:string = d + 12; // fixme? in js this coerces both to string and concats them + ^^^^^^ [1] + +References: + date.js:32:7 + 32| var s:string = d + 12; // fixme? in js this coerces both to string and concats them + ^^^^^^ [2] + + -Found 10 errors +Found 14 errors Only showing the most relevant union/intersection branches. To see all branches, re-run Flow with --show-all-branches diff --git a/tests/date/date.js b/tests/date/date.js index 0edf1189de3..dd83ce85fa4 100644 --- a/tests/date/date.js +++ b/tests/date/date.js @@ -1,7 +1,7 @@ var d = new Date(0); -var x:string = d.getTime(); +var x:string = d.getTime(); // expect error -var y:number = d; +var y:number = d; // expect error // valid constructors new Date(); @@ -24,3 +24,9 @@ new Date(2015, 6, 18, 11, 55, '42'); new Date(2015, 6, 18, 11, 55, 42, '999'); new Date(2015, 6, 18, 11, 55, 42, 999, 'hahaha'); new Date('2015', 6); + +var b:boolean = d > 0; +var n1:number = d - 12; +var n2:number = d & 255; +var n3:number = d + 12; // expect error +var s:string = d + 12; // fixme? in js this coerces both to string and concats them diff --git a/tests/declaration_files_haste/ImplicitProvidesModule.js.flow b/tests/declaration_files_haste/ImplicitProvidesModule.js.flow index 8d851c0d260..2f8f07cf251 100644 --- a/tests/declaration_files_haste/ImplicitProvidesModule.js.flow +++ b/tests/declaration_files_haste/ImplicitProvidesModule.js.flow @@ -1 +1,2 @@ +// @providesModule ImplicitProvidesModule declare export function fun(): number; diff --git a/tests/declaration_files_haste/declaration_files_haste.exp b/tests/declaration_files_haste/declaration_files_haste.exp index f4f1f303257..109d8a235d2 100644 --- a/tests/declaration_files_haste/declaration_files_haste.exp +++ b/tests/declaration_files_haste/declaration_files_haste.exp @@ -83,8 +83,8 @@ Cannot cast `Implicit.fun()` to string because number [1] is incompatible with s ^^^^^^^^^^^^^^ References: - ImplicitProvidesModule.js.flow:1:32 - 1| declare export function fun(): number; + ImplicitProvidesModule.js.flow:2:32 + 2| declare export function fun(): number; ^^^^^^ [1] test.js:4:18 4| (Implicit.fun(): string); diff --git a/tests/declaration_files_incremental_haste/ImplicitProvidesModule.js.flow.ignored b/tests/declaration_files_incremental_haste/ImplicitProvidesModule.js.flow.ignored index 745530fb92b..2a0de9bf2f4 100644 --- a/tests/declaration_files_incremental_haste/ImplicitProvidesModule.js.flow.ignored +++ b/tests/declaration_files_incremental_haste/ImplicitProvidesModule.js.flow.ignored @@ -1,2 +1,3 @@ +// @providesModule ImplicitProvidesModule declare class Declaration {} declare export function fun(): Declaration; diff --git a/tests/declaration_files_incremental_haste/declaration_files_incremental_haste.exp b/tests/declaration_files_incremental_haste/declaration_files_incremental_haste.exp index 366bb7d13f9..8dd116e2b90 100644 --- a/tests/declaration_files_incremental_haste/declaration_files_incremental_haste.exp +++ b/tests/declaration_files_incremental_haste/declaration_files_incremental_haste.exp @@ -163,8 +163,8 @@ Cannot cast `Implicit.fun()` to boolean because `Declaration` [1] is incompatibl ^^^^^^^^^^^^^^ References: - ImplicitProvidesModule.js.flow:2:32 - 2| declare export function fun(): Declaration; + ImplicitProvidesModule.js.flow:3:32 + 3| declare export function fun(): Declaration; ^^^^^^^^^^^ [1] test.js:4:18 4| (Implicit.fun(): boolean); // Error: Either Implementation ~> boolean or Declaration ~> boolean @@ -374,8 +374,8 @@ Cannot cast `Implicit.fun()` to boolean because `Declaration` [1] is incompatibl ^^^^^^^^^^^^^^ References: - ImplicitProvidesModule.js.flow:2:32 - 2| declare export function fun(): Declaration; + ImplicitProvidesModule.js.flow:3:32 + 3| declare export function fun(): Declaration; ^^^^^^^^^^^ [1] test.js:4:18 4| (Implicit.fun(): boolean); // Error: Either Implementation ~> boolean or Declaration ~> boolean @@ -582,8 +582,8 @@ Cannot cast `Implicit.fun()` to boolean because `Declaration` [1] is incompatibl ^^^^^^^^^^^^^^ References: - ImplicitProvidesModule.js.flow:2:32 - 2| declare export function fun(): Declaration; + ImplicitProvidesModule.js.flow:3:32 + 3| declare export function fun(): Declaration; ^^^^^^^^^^^ [1] test.js:4:18 4| (Implicit.fun(): boolean); // Error: Either Implementation ~> boolean or Declaration ~> boolean @@ -680,12 +680,21 @@ References: ^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- test.js:3:24 +Error ------------------------------------------------------------------------------------------------------ test.js:4:2 -Cannot resolve module `ImplicitProvidesModule`. +Cannot cast `Implicit.fun()` to boolean because `Declaration` [1] is incompatible with boolean [2]. - 3| var Implicit = require('ImplicitProvidesModule'); - ^^^^^^^^^^^^^^^^^^^^^^^^ + test.js:4:2 + 4| (Implicit.fun(): boolean); // Error: Either Implementation ~> boolean or Declaration ~> boolean + ^^^^^^^^^^^^^^ + +References: + ImplicitProvidesModule.js.flow:3:32 + 3| declare export function fun(): Declaration; + ^^^^^^^^^^^ [1] + test.js:4:18 + 4| (Implicit.fun(): boolean); // Error: Either Implementation ~> boolean or Declaration ~> boolean + ^^^^^^^ [2] Error ------------------------------------------------------------------------------------------------------ test.js:7:2 @@ -784,8 +793,8 @@ Cannot cast `Implicit.fun()` to boolean because `Declaration` [1] is incompatibl ^^^^^^^^^^^^^^ References: - ImplicitProvidesModule.js.flow:2:32 - 2| declare export function fun(): Declaration; + ImplicitProvidesModule.js.flow:3:32 + 3| declare export function fun(): Declaration; ^^^^^^^^^^^ [1] test.js:4:18 4| (Implicit.fun(): boolean); // Error: Either Implementation ~> boolean or Declaration ~> boolean @@ -879,12 +888,21 @@ References: ^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- test.js:3:24 +Error ------------------------------------------------------------------------------------------------------ test.js:4:2 -Cannot resolve module `ImplicitProvidesModule`. +Cannot cast `Implicit.fun()` to boolean because `Declaration` [1] is incompatible with boolean [2]. - 3| var Implicit = require('ImplicitProvidesModule'); - ^^^^^^^^^^^^^^^^^^^^^^^^ + test.js:4:2 + 4| (Implicit.fun(): boolean); // Error: Either Implementation ~> boolean or Declaration ~> boolean + ^^^^^^^^^^^^^^ + +References: + ImplicitProvidesModule.js.flow:3:32 + 3| declare export function fun(): Declaration; + ^^^^^^^^^^^ [1] + test.js:4:18 + 4| (Implicit.fun(): boolean); // Error: Either Implementation ~> boolean or Declaration ~> boolean + ^^^^^^^ [2] Error ------------------------------------------------------------------------------------------------------ test.js:7:2 diff --git a/tests/declare_class/declare_class.exp b/tests/declare_class/declare_class.exp index be85fe3857c..19397df1f5b 100644 --- a/tests/declare_class/declare_class.exp +++ b/tests/declare_class/declare_class.exp @@ -22,62 +22,62 @@ References: ^^^^^^ [3] -Error --------------------------------------------------------------------------------------------- declare_class.js:8:7 +Error -------------------------------------------------------------------------------------------- declare_class.js:10:7 Cannot assign empty string to `C.x` because string [1] is incompatible with number [2]. - declare_class.js:8:7 - 8| C.x = ""; - ^^ [1] + declare_class.js:10:7 + 10| C.x = ""; + ^^ [1] References: - declare_class.js:2:15 - 2| static x: number; - ^^^^^^ [2] + declare_class.js:4:15 + 4| static x: number; + ^^^^^^ [2] -Error --------------------------------------------------------------------------------------------- declare_class.js:9:7 +Error -------------------------------------------------------------------------------------------- declare_class.js:11:7 Cannot call `C.foo` with empty string bound to `x` because string [1] is incompatible with number [2]. - declare_class.js:9:7 - 9| C.foo(""); - ^^ [1] + declare_class.js:11:7 + 11| C.foo(""); + ^^ [1] References: - declare_class.js:3:19 - 3| static foo(x: number): void; - ^^^^^^ [2] + declare_class.js:5:19 + 5| static foo(x: number): void; + ^^^^^^ [2] -Error -------------------------------------------------------------------------------------------- declare_class.js:12:2 +Error -------------------------------------------------------------------------------------------- declare_class.js:14:2 Cannot cast `C.name` to number because string [1] is incompatible with number [2]. - declare_class.js:12:2 - 12| (C.name: number); // error, it's a string + declare_class.js:14:2 + 14| (C.name: number); // error, it's a string ^^^^^^ References: - declare_class.js:1:15 - 1| declare class C { + declare_class.js:3:15 + 3| declare class C { ^ [1] - declare_class.js:12:10 - 12| (C.name: number); // error, it's a string + declare_class.js:14:10 + 14| (C.name: number); // error, it's a string ^^^^^^ [2] -Error -------------------------------------------------------------------------------------------- declare_class.js:15:7 +Error -------------------------------------------------------------------------------------------- declare_class.js:17:7 Cannot call `D` with `123` bound to `x` because number [1] is incompatible with string [2]. - declare_class.js:15:7 - 15| new D(123); // error, number ~> string + declare_class.js:17:7 + 17| new D(123); // error, number ~> string ^^^ [1] References: - declare_class.js:5:20 - 5| constructor(x: string): void; + declare_class.js:7:20 + 7| constructor(x: string): void; ^^^^^^ [2] diff --git a/tests/declare_class/declare_class.js b/tests/declare_class/declare_class.js index bd1867d22f4..727687e61fb 100644 --- a/tests/declare_class/declare_class.js +++ b/tests/declare_class/declare_class.js @@ -1,3 +1,5 @@ +// @flow + declare class C { static x: number; static foo(x: number): void; @@ -13,3 +15,8 @@ C.foo(""); declare class D extends C { } new D(123); // error, number ~> string + +declare class E { + +[key: string]: number; +} +;(new E()['a']: number) // no error diff --git a/tests/declare_fun/declare_fun.exp b/tests/declare_fun/declare_fun.exp index 384abc256b3..39012043726 100644 --- a/tests/declare_fun/declare_fun.exp +++ b/tests/declare_fun/declare_fun.exp @@ -1,3 +1,17 @@ +Error ----------------------------------------------------------------------------------------------------- scope.js:3:5 + +Cannot call `bar` with `0` bound to `y` because number [1] is incompatible with string [2]. + + scope.js:3:5 + 3| bar(0); + ^ [1] + +References: + scope.js:1:25 + 1| declare function bar(y: T): string; + ^ [2] + + Error ------------------------------------------------------------------------------------------------------ test.js:7:2 Cannot cast `foo(...)` to undefined because boolean [1] is incompatible with undefined [2]. @@ -16,4 +30,4 @@ References: -Found 1 error +Found 2 errors diff --git a/tests/declare_fun/scope.js b/tests/declare_fun/scope.js new file mode 100644 index 00000000000..a0796c789a5 --- /dev/null +++ b/tests/declare_fun/scope.js @@ -0,0 +1,4 @@ +declare function bar(y: T): string; +type T = string; +bar(0); +bar(""); diff --git a/tests/declare_module_exports/declare_module_exports.exp b/tests/declare_module_exports/declare_module_exports.exp index 831ccb07288..cc4faaf80e5 100644 --- a/tests/declare_module_exports/declare_module_exports.exp +++ b/tests/declare_module_exports/declare_module_exports.exp @@ -72,5 +72,39 @@ References: ^^^^^^ [2] +Error ----------------------------------------------------------------------------------------------------- main.js:29:2 -Found 5 errors +Cannot cast `foo(...)` to string because number [1] is incompatible with string [2]. + + main.js:29:2 + 29| (foo(0): string); // Error: number ~> string + ^^^^^^ + +References: + flow-typed/libs.js:37:36 + 37| declare function foo(x: number): number; + ^^^^^^ [1] + main.js:29:10 + 29| (foo(0): string); // Error: number ~> string + ^^^^^^ [2] + + +Error ----------------------------------------------------------------------------------------------------- main.js:31:2 + +Cannot cast `foo(...)` to number because string [1] is incompatible with number [2]. + + main.js:31:2 + 31| (foo(""): number); // Error: string ~> number + ^^^^^^^ + +References: + flow-typed/libs.js:36:36 + 36| declare function foo(x: string): string; + ^^^^^^ [1] + main.js:31:11 + 31| (foo(""): number); // Error: string ~> number + ^^^^^^ [2] + + + +Found 7 errors diff --git a/tests/declare_module_exports/flow-typed/libs.js b/tests/declare_module_exports/flow-typed/libs.js index 816e6d71146..1b79983ea70 100644 --- a/tests/declare_module_exports/flow-typed/libs.js +++ b/tests/declare_module_exports/flow-typed/libs.js @@ -28,3 +28,11 @@ declare module "declare_m_e_with_declare_var_e" { declare module.exports: number; declare var exports: string; } + +/** + * Ensure that the intersection of the two declarations is exported. + */ +declare module "declare_overloaded_function" { + declare function foo(x: string): string; + declare function foo(x: number): number; +} diff --git a/tests/declare_module_exports/main.js b/tests/declare_module_exports/main.js index a8b92866a60..1a419048863 100644 --- a/tests/declare_module_exports/main.js +++ b/tests/declare_module_exports/main.js @@ -23,3 +23,9 @@ import declare_var_exports from "declare_var_exports"; import declare_m_e_with_declare_var_e from "declare_m_e_with_declare_var_e"; (declare_m_e_with_declare_var_e: number); (declare_m_e_with_declare_var_e: string); // Error: number ~> string + +import { foo } from "declare_overloaded_function"; +(foo(0): number); +(foo(0): string); // Error: number ~> string +(foo(""): string); +(foo(""): number); // Error: string ~> number diff --git a/tests/delete/.flowconfig b/tests/delete/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/delete/delete.exp b/tests/delete/delete.exp new file mode 100644 index 00000000000..1f6113ed5f0 --- /dev/null +++ b/tests/delete/delete.exp @@ -0,0 +1,235 @@ +Error ---------------------------------------------------------------------------------------------------- delete.js:4:8 + +Cannot delete `obj1.f` because undefined [1] is incompatible with number [2]. + + delete.js:4:8 + 4| delete obj1.f; // error, f is required + ^^^^^^ + +References: + delete.js:4:1 + 4| delete obj1.f; // error, f is required + ^^^^^^^^^^^^^ [1] + delete.js:3:24 + 3| declare var obj1: { f: number }; + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------------- delete.js:12:2 + +Cannot cast `x` to number because undefined [1] is incompatible with number [2]. + + delete.js:12:2 + 12| (x: number); + ^ + +References: + delete.js:11:1 + 11| delete x; + ^^^^^^^^ [1] + delete.js:12:5 + 12| (x: number); + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------------- delete.js:16:8 + +Cannot delete `y` because undefined [1] is incompatible with number [2]. + + delete.js:16:8 + 16| delete y; + ^ + +References: + delete.js:16:1 + 16| delete y; + ^^^^^^^^ [1] + delete.js:15:8 + 15| var y: number = 42; + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------------- delete.js:27:8 + +Cannot delete `b[0]` because read-only arrays cannot be written to. + + 27| delete b[0]; + ^^^^ + + +Error --------------------------------------------------------------------------------------------------- delete.js:30:8 + +Cannot delete `c[1]` because undefined [1] is incompatible with number [2]. + + delete.js:30:8 + 30| delete c[1]; + ^^^^ + +References: + delete.js:30:1 + 30| delete c[1]; + ^^^^^^^^^^^ [1] + delete.js:29:25 + 29| declare var c: [number, number]; + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------------- delete.js:31:8 + +Cannot delete `c[2]` because tuple type [1] only has 2 elements, so index 2 is out of bounds. + + delete.js:31:8 + 31| delete c[2]; + ^^^^ + +References: + delete.js:29:16 + 29| declare var c: [number, number]; + ^^^^^^^^^^^^^^^^ [1] + + +Error --------------------------------------------------------------------------------------------------- delete.js:34:2 + +Cannot cast `w.a` to undefined because number [1] is incompatible with undefined [2]. + + delete.js:34:2 + 34| (w.a: void); + ^^^ + +References: + delete.js:33:12 + 33| var w: {a: number | void} = {a: 42}; + ^^^^^^ [1] + delete.js:34:7 + 34| (w.a: void); + ^^^^ [2] + + +Error -------------------------------------------------------------------------------------------------- delete.js:40:14 + +Cannot call method `freeze` because property `freeze` is missing in statics of `Object` [1]. + + delete.js:40:14 + 40| const obj4 = Object.freeze({ f: 1 }); + ^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /prelude.js:5:15 + 5| declare class Object {} + ^^^^^^ [1] + + +Error --------------------------------------------------------------------------------------------------- delete.js:44:8 + +Cannot delete `obj5.f` because property `f` is not writable. + + 44| delete obj5.f; // error, just like when writing to read-only object + ^^^^^^ + + +Error --------------------------------------------------------------------------------------------------- delete.js:49:8 + +Cannot delete `obj6.m` because property `m` is not writable. + + 49| delete obj6.m; // warn, m is not own (delete only has effect + ^^^^^^ + + +Error --------------------------------------------------------------------------------------------------- delete.js:52:1 + +Cannot delete number [1] because only member expressions and variables can be deleted. + + delete.js:52:1 + 52| delete 1; + ^^^^^^^^ + +References: + delete.js:52:8 + 52| delete 1; + ^ [1] + + +Error --------------------------------------------------------------------------------------------------- delete.js:54:1 + +Cannot delete function [1] because only member expressions and variables can be deleted. + + delete.js:54:1 + 54| delete (() => 42); + ^^^^^^^^^^^^^^^^^ + +References: + delete.js:54:9 + 54| delete (() => 42); + ^^^^^^^^ [1] + + +Error --------------------------------------------------------------------------------------------------- delete.js:61:8 + +Cannot delete `index.a` because undefined [1] is incompatible with boolean [2]. + + delete.js:61:8 + 61| delete index.a; + ^^^^^^^ + +References: + delete.js:61:1 + 61| delete index.a; + ^^^^^^^^^^^^^^ [1] + delete.js:60:43 + 60| declare var index: { [string]: number, a: boolean }; + ^^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------------- delete.js:62:8 + +Cannot delete `index['a']` because undefined [1] is incompatible with boolean [2]. + + delete.js:62:8 + 62| delete index['a']; + ^^^^^^^^^^ + +References: + delete.js:62:1 + 62| delete index['a']; + ^^^^^^^^^^^^^^^^^ [1] + delete.js:60:43 + 60| declare var index: { [string]: number, a: boolean }; + ^^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------------- delete.js:71:2 + +Cannot cast `dct['a']` to string because undefined [1] is incompatible with string [2]. + + delete.js:71:2 + 71| (dct['a']: string); + ^^^^^^^^ + +References: + delete.js:69:1 + 69| delete dct['a']; + ^^^^^^^^^^^^^^^ [1] + delete.js:71:12 + 71| (dct['a']: string); + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------------- delete.js:72:2 + +Cannot cast `dct['a']` to undefined because string [1] is incompatible with undefined [2]. + + delete.js:72:2 + 72| (dct['a']: void); + ^^^^^^^^ + +References: + delete.js:67:12 + 67| dct['a'] = 'hello'; + ^^^^^^^ [1] + delete.js:72:12 + 72| (dct['a']: void); + ^^^^ [2] + + + +Found 16 errors diff --git a/tests/delete/delete.js b/tests/delete/delete.js new file mode 100644 index 00000000000..c6ddde7a44f --- /dev/null +++ b/tests/delete/delete.js @@ -0,0 +1,80 @@ +//@flow + +declare var obj1: { f: number }; +delete obj1.f; // error, f is required + + +declare var obj2: { f?: number }; +delete obj2.f; //fine + +var x = 42; +delete x; +(x: number); +(x: number | void); + +var y: number = 42; +delete y; + +type A = { -a?: string }; + +declare var a: A; + + +const z = delete a.a; + + +declare var b: $ReadOnlyArray; +delete b[0]; + +declare var c: [number, number]; +delete c[1]; +delete c[2]; + +var w: {a: number | void} = {a: 42}; +(w.a: void); + +var w2: {a: number | void} = {a: 42}; +delete w2.a; +(w2.a: void); + +const obj4 = Object.freeze({ f: 1 }); +delete obj4.f; // error, just like when writing to frozen object + +declare var obj5: { +f?: number }; +delete obj5.f; // error, just like when writing to read-only object + +class C { x; m() {} } +declare var obj6: C; +delete obj6.x; +delete obj6.m; // warn, m is not own (delete only has effect + // on own-properties) + +delete 1; +delete C; +delete (() => 42); + + +declare var f: Array; +delete f[0]; + +declare var index: { [string]: number, a: boolean }; +delete index.a; +delete index['a']; +delete index.b; +delete index['foo']; + +var dct = {}; +dct['a'] = 'hello'; +dct['c'] = 'bye'; +delete dct['a']; +delete dct['b']; +(dct['a']: string); +(dct['a']: void); +(dct['c']: string); + +var dct2 = {}; +dct2['a'] = 'hello'; +dct2['c'] = 'bye'; +declare var key: string; +delete dct2[key]; +(dct2['a']: string) diff --git a/tests/dep_graph/.flowconfig b/tests/dep_graph/.flowconfig new file mode 100644 index 00000000000..4771c059106 --- /dev/null +++ b/tests/dep_graph/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] + +experimental.well_formed_exports=true + +[strict] diff --git a/tests/dep_graph/.testconfig b/tests/dep_graph/.testconfig new file mode 100644 index 00000000000..d1ea190969c --- /dev/null +++ b/tests/dep_graph/.testconfig @@ -0,0 +1,2 @@ +shell: test.sh +skip_saved_state: true diff --git a/tests/dep_graph/dep_graph.exp b/tests/dep_graph/dep_graph.exp new file mode 100644 index 00000000000..9ec90f0504f --- /dev/null +++ b/tests/dep_graph/dep_graph.exp @@ -0,0 +1,27 @@ + +Start server in classic mode + +Value dep-graph should include both edges +digraph { + "fileB.js" -> "fileA.js" + "fileA.js" -> "fileB.js" +} + +Type dep-graph should include both edges +digraph { + "fileB.js" -> "fileA.js" + "fileA.js" -> "fileB.js" +} + +Start server in types-first mode + +Value dep-graph should include both edges +digraph { + "fileB.js" -> "fileA.js" + "fileA.js" -> "fileB.js" +} + +Type dep-graph should include one edge +digraph { + "fileB.js" -> "fileA.js" +} diff --git a/tests/dep_graph/fileA.js b/tests/dep_graph/fileA.js new file mode 100644 index 00000000000..a10c035ff44 --- /dev/null +++ b/tests/dep_graph/fileA.js @@ -0,0 +1,7 @@ +// @flow + +const B = require('./fileB'); + +class A {} + +module.exports = A; diff --git a/tests/dep_graph/fileB.js b/tests/dep_graph/fileB.js new file mode 100644 index 00000000000..c82ec251956 --- /dev/null +++ b/tests/dep_graph/fileB.js @@ -0,0 +1,7 @@ +// @flow + +const A = require("./fileA"); + +class B extends A {} + +module.exports = B; diff --git a/tests/dep_graph/test.sh b/tests/dep_graph/test.sh new file mode 100644 index 00000000000..80fe8cae586 --- /dev/null +++ b/tests/dep_graph/test.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +assert_ok "$FLOW" stop + +printf "\\nStart server in classic mode\\n" +start_flow . + +printf "\\nValue dep-graph should include both edges\\n" +assert_ok "$FLOW" graph dep-graph --strip-root --out classic-values.log +cat classic-values.log + +printf "\\n\\nType dep-graph should include both edges\\n" +assert_ok "$FLOW" graph dep-graph --strip-root --types --out classic-types.log +cat classic-types.log + +assert_ok "$FLOW" stop + +printf "\\n\\nStart server in types-first mode\\n" +start_flow . --types-first + +printf "\\nValue dep-graph should include both edges\\n" +assert_ok "$FLOW" graph dep-graph --strip-root --out types-first-values.log +cat types-first-values.log + +printf "\\n\\nType dep-graph should include one edge\\n" +assert_ok "$FLOW" graph dep-graph --strip-root --types --out types-first-types.log +cat types-first-types.log + +assert_ok "$FLOW" stop + +printf "\\n" diff --git a/tests/destructuring/.flowconfig b/tests/destructuring/.flowconfig index 15589e0b83e..2e4a2ec92b8 100644 --- a/tests/destructuring/.flowconfig +++ b/tests/destructuring/.flowconfig @@ -1,2 +1,3 @@ [options] module.system=haste +no_flowlib=false diff --git a/tests/destructuring/annot.js b/tests/destructuring/annot.js new file mode 100644 index 00000000000..62c37125cd3 --- /dev/null +++ b/tests/destructuring/annot.js @@ -0,0 +1,4 @@ +// @flow +var {p}: T = {p: "foo"}; +p = 42; // error: number ~> string +type T = {p: string}; diff --git a/tests/destructuring/annot_loop.js b/tests/destructuring/annot_loop.js new file mode 100644 index 00000000000..bfbc44d9fc0 --- /dev/null +++ b/tests/destructuring/annot_loop.js @@ -0,0 +1,20 @@ +// @flow + +// Destructuring self-referential annotations should terminate. + +type T = T; +function f({p}: T) { + p = 0; +} + + +// Including tricky cases + +type A = B; +type B = A; +function g({p}: A) { + p = 0; +} +function h({p}: B) { + p = 0; +} diff --git a/tests/destructuring/bad_annot.js b/tests/destructuring/bad_annot.js new file mode 100644 index 00000000000..579cd482386 --- /dev/null +++ b/tests/destructuring/bad_annot.js @@ -0,0 +1,31 @@ +// @flow + +/* The code in this test has a confusing error and should be improved. + * Bindings that result from destructuring an annotation should themselves + * behave like annotations. In some cases, annotations are not recursively + * annotations, like the class example below. + * + * For now, we use some sketchy unification logic to pin things down, but it + * does not behave sensibly for tvars with incompatible lower bounds. + * + * Ideally annotations would be recursively annotations, instead of shallowly. + * Another possibility would be to forego the annotation behavior for these + * kinds of destructurings. + */ + +class C { + p; + m(cond: boolean) { + if (cond) { + this.p = 0; + } else { + this.p = ""; + } + } +} + +function f({ + p // weird: string ~/~> number. C#p is inferred, with both number and string inflows +}: C) { + p = null; // weird: null ~/~> number. we pinned `p` to `number` +} diff --git a/tests/destructuring/defaults.js b/tests/destructuring/defaults.js index 0c5fcfb358b..374123c0758 100644 --- a/tests/destructuring/defaults.js +++ b/tests/destructuring/defaults.js @@ -76,7 +76,7 @@ function obj_prop_opt({p}:{p?:string}={p:0}) {} function obj_prop_maybe({p}:{p:?string}={p:0}) {} function obj_prop_union({p}:{p:number|string}={p:true}) {} -// TODO: union-of-objects upper bounds preserved through destructuring +// union-of-objects upper bounds preserved through destructuring function obj_prop_union2({p}:{p:number}|{p:string}={p:true}) {} function default_expr_scope({a, b = a}) {} diff --git a/tests/destructuring/destructuring.exp b/tests/destructuring/destructuring.exp index 4f1a973e070..50651403055 100644 --- a/tests/destructuring/destructuring.exp +++ b/tests/destructuring/destructuring.exp @@ -1,3 +1,17 @@ +Error ----------------------------------------------------------------------------------------------------- annot.js:3:5 + +Cannot assign `42` to `p` because number [1] is incompatible with string [2]. + + annot.js:3:5 + 3| p = 42; // error: number ~> string + ^^ [1] + +References: + annot.js:2:6 + 2| var {p}: T = {p: "foo"}; + ^ [2] + + Error ------------------------------------------------------------------------------------------------ array_rest.js:7:2 Cannot cast `a` to undefined because number [1] is incompatible with undefined [2]. @@ -89,6 +103,37 @@ References: ^ [1] +Error ------------------------------------------------------------------------------------------------ bad_annot.js:28:3 + +string [1] is incompatible with number [2]. + + bad_annot.js:28:3 + 28| p // weird: string ~/~> number. C#p is inferred, with both number and string inflows + ^ + +References: + bad_annot.js:22:16 + 22| this.p = ""; + ^^ [1] + bad_annot.js:20:16 + 20| this.p = 0; + ^ [2] + + +Error ------------------------------------------------------------------------------------------------ bad_annot.js:30:7 + +Cannot assign `null` to `p` because null [1] is incompatible with number [2]. + + bad_annot.js:30:7 + 30| p = null; // weird: null ~/~> number. we pinned `p` to `number` + ^^^^ [1] + +References: + bad_annot.js:28:3 + 28| p // weird: string ~/~> number. C#p is inferred, with both number and string inflows + ^ [2] + + Error -------------------------------------------------------------------------------------------------- computed.js:2:2 Cannot cast `val1` to undefined because string [1] is incompatible with undefined [2]. @@ -223,15 +268,12 @@ number [1] is incompatible with string [2]. defaults.js:43:3 43| p = true // error: boolean ~> string - ^ + ^ [2] References: defaults.js:47:6 47| p: 0 // error: number ~> string ^ [1] - defaults.js:45:6 - 45| p: string - ^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- defaults.js:43:7 @@ -243,9 +285,9 @@ boolean [1] is incompatible with string [2]. ^^^^ [1] References: - defaults.js:45:6 - 45| p: string - ^^^^^^ [2] + defaults.js:43:3 + 43| p = true // error: boolean ~> string + ^ [2] Error ------------------------------------------------------------------------------------------------- defaults.js:49:4 @@ -257,9 +299,9 @@ Cannot cast `p` to undefined because string [1] is incompatible with undefined [ ^ References: - defaults.js:45:6 - 45| p: string - ^^^^^^ [1] + defaults.js:43:3 + 43| p = true // error: boolean ~> string + ^ [1] defaults.js:49:6 49| (p:void); // error: string ~> void ^^^^ [2] @@ -271,20 +313,17 @@ Cannot assign `{...}.p` to `p` because boolean [1] is incompatible with string [ defaults.js:53:3 53| p = true // error: boolean ~> string - ^ + ^ [2] References: defaults.js:53:7 53| p = true // error: boolean ~> string ^^^^ [1] - defaults.js:55:6 - 55| p: string - ^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- defaults.js:56:5 -Cannot assign object literal to variable because number [1] is incompatible with string [2] in property `p`. +Cannot assign object literal to destructuring because number [1] is incompatible with string [2] in property `p`. defaults.js:56:5 v @@ -311,9 +350,9 @@ Cannot cast `p` to undefined because string [1] is incompatible with undefined [ ^ References: - defaults.js:55:6 - 55| p: string - ^^^^^^ [1] + defaults.js:53:3 + 53| p = true // error: boolean ~> string + ^ [1] defaults.js:59:4 59| (p:void); // error: string ~> void ^^^^ [2] @@ -349,7 +388,7 @@ References: Error ------------------------------------------------------------------------------------------------ defaults.js:63:24 -An indexer property is missing in null [1]. +An index signature declaring the expected key / value type is missing in null [1]. defaults.js:63:24 63| function arr_elem_err([x]=null) {} // error: element 0 cannot be accessed on null @@ -433,24 +472,9 @@ References: Error ------------------------------------------------------------------------------------------------ defaults.js:80:27 -string [1] is incompatible with number [2]. - - defaults.js:80:27 - 80| function obj_prop_union2({p}:{p:number}|{p:string}={p:true}) {} - ^ - -References: - defaults.js:80:44 - 80| function obj_prop_union2({p}:{p:number}|{p:string}={p:true}) {} - ^^^^^^ [1] - defaults.js:80:33 - 80| function obj_prop_union2({p}:{p:number}|{p:string}={p:true}) {} - ^^^^^^ [2] - - -Error ------------------------------------------------------------------------------------------------ defaults.js:80:27 - -boolean [1] is incompatible with number [2]. +All branches are incompatible: + - Either boolean [1] is incompatible with number [2]. + - Or boolean [1] is incompatible with string [3]. defaults.js:80:27 80| function obj_prop_union2({p}:{p:number}|{p:string}={p:true}) {} @@ -463,6 +487,9 @@ References: defaults.js:80:33 80| function obj_prop_union2({p}:{p:number}|{p:string}={p:true}) {} ^^^^^^ [2] + defaults.js:80:44 + 80| function obj_prop_union2({p}:{p:number}|{p:string}={p:true}) {} + ^^^^^^ [3] Error --------------------------------------------------------------------------------------------- destructuring.js:4:3 @@ -610,7 +637,7 @@ References: Error ------------------------------------------------------------------------------------------- destructuring.js:20:15 -Cannot assign array literal to `c` because `c` [1] is incompatible with string [2]. +Cannot assign array literal to `c` because rest of array pattern [1] is incompatible with string [2]. destructuring.js:20:15 20| [a,,b,...c] = [0,1,true,3]; @@ -627,7 +654,7 @@ References: Error ------------------------------------------------------------------------------------------- destructuring.js:23:37 -Cannot assign `z` to `o` because property `x` is missing in `z` [1] but exists in object type [2]. +Cannot assign `z` to `o` because property `x` is missing in rest of object pattern [1] but exists in object type [2]. destructuring.js:23:37 23| var o:{x: string; y: number;} = z; @@ -694,7 +721,7 @@ References: Error ------------------------------------------------------------------------------------------- destructuring.js:35:23 -Cannot assign object literal to variable because string [1] is incompatible with number [2] in property `n`. +Cannot assign object literal to destructuring because string [1] is incompatible with number [2] in property `n`. destructuring.js:35:23 35| var {n}:{n: number} = {n: ""} @@ -753,7 +780,7 @@ References: Error -------------------------------------------------------------------------------------------- destructuring.js:46:4 -Cannot get `rest.baz` because property `baz` is missing in `rest` [1]. +Cannot get `rest.baz` because property `baz` is missing in rest of object pattern [1]. destructuring.js:46:4 46| (rest.baz: string); // error, rest is sealed @@ -841,6 +868,124 @@ Destructuring assignment must be initialized ^^^^^ +Error ------------------------------------------------------------------------------- destructuring_missing_props.js:3:6 + +Property `nonExistent` is missing in object type [1]. + + destructuring_missing_props.js:3:6 + 3| var {nonExistent, defaults = "hi", foo = 3} = x; // Error, missing nonExistent + ^^^^^^^^^^^ + +References: + destructuring_missing_props.js:2:16 + 2| declare var x: {| foo: string |}; + ^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------- destructuring_missing_props.js:5:2 + +Cannot cast `foo` to number because string [1] is incompatible with number [2]. + + destructuring_missing_props.js:5:2 + 5| (foo: number); // Error, number | string + ^^^ + +References: + destructuring_missing_props.js:2:24 + 2| declare var x: {| foo: string |}; + ^^^^^^ [1] + destructuring_missing_props.js:5:7 + 5| (foo: number); // Error, number | string + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------- destructuring_missing_props.js:9:6 + +Property `nonExistent2` is missing in object type [1]. + + destructuring_missing_props.js:9:6 + 9| var {nonExistent2, defaults2 = "hi", bar = 3} = y; // Error, missing nonExistent2 + ^^^^^^^^^^^^ + +References: + destructuring_missing_props.js:8:16 + 8| declare var y: {| bar: string |}; + ^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------ destructuring_missing_props.js:11:2 + +Cannot cast `bar` to string because number [1] is incompatible with string [2]. + + destructuring_missing_props.js:11:2 + 11| (bar: string); // Error, number | string + ^^^ + +References: + destructuring_missing_props.js:9:44 + 9| var {nonExistent2, defaults2 = "hi", bar = 3} = y; // Error, missing nonExistent2 + ^ [1] + destructuring_missing_props.js:11:7 + 11| (bar: string); // Error, number | string + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------ destructuring_missing_props.js:14:7 + +Property `baz` is missing in null [1]. + + destructuring_missing_props.js:14:7 + 14| var { baz = 15150 } = null // Error, baz is missing in null (you can't destructure null) + ^^^ + +References: + destructuring_missing_props.js:14:23 + 14| var { baz = 15150 } = null // Error, baz is missing in null (you can't destructure null) + ^^^^ [1] + + +Error ------------------------------------------------------------------------------ destructuring_missing_props.js:17:7 + +Property `grunt` is missing in object type [1]. + + destructuring_missing_props.js:17:7 + 17| var { grunt = 15210 } = z; // Error, grunt missing in inexact object type + ^^^^^ + +References: + destructuring_missing_props.js:16:17 + 16| declare var z : { thud : string }; + ^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------ destructuring_missing_props.js:21:7 + +Property `qux` is missing in object literal [1]. + + destructuring_missing_props.js:21:7 + 21| var { qux = "string" } = obj; // Error, qux missing + ^^^ + +References: + destructuring_missing_props.js:20:13 + 20| const obj = { __proto__ : proto, baz : "string" }; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ----------------------------------------------------------------------------- destructuring_missing_props.js:26:47 + +Property `regularProp` is missing in props [1]. + + destructuring_missing_props.js:26:47 + 26| function Component({defaultProps = "default", regularProp}) { // Error, missing regularProp + ^^^^^^^^^^^ + +References: + destructuring_missing_props.js:33:12 + 33| const _b = ; + ^^^^^^^^^^^^^ [1] + + Error -------------------------------------------------------------------------------------- destructuring_param.js:5:17 Strict mode function may not have duplicate parameter names @@ -865,7 +1010,7 @@ References: Error ----------------------------------------------------------------------------------------------- object_rest.js:5:1 -Cannot assign `0` to `o1_rest.x` because property `x` is missing in `o1_rest` [1]. +Cannot assign `0` to `o1_rest.x` because property `x` is missing in rest of object pattern [1]. object_rest.js:5:1 5| o1_rest.x = 0; // error: rest result is sealed @@ -879,7 +1024,7 @@ References: Error ---------------------------------------------------------------------------------------------- object_rest.js:11:1 -Cannot assign `0` to `o2_rest.x` because property `x` is missing in `o2_rest` [1]. +Cannot assign `0` to `o2_rest.x` because property `x` is missing in rest of object pattern [1]. object_rest.js:11:1 11| o2_rest.x = 0; // error: rest result is sealed @@ -893,7 +1038,7 @@ References: Error ---------------------------------------------------------------------------------------------- object_rest.js:16:1 -Cannot assign `0` to `o3_rest.x` because property `x` is missing in `o3_rest` [1]. +Cannot assign `0` to `o3_rest.x` because property `x` is missing in rest of object pattern [1]. object_rest.js:16:1 16| o3_rest.x = 0; // error: rest result is sealed @@ -937,4 +1082,4 @@ References: -Found 69 errors +Found 79 errors diff --git a/tests/destructuring/destructuring_missing_props.js b/tests/destructuring/destructuring_missing_props.js new file mode 100644 index 00000000000..035f22db0c5 --- /dev/null +++ b/tests/destructuring/destructuring_missing_props.js @@ -0,0 +1,40 @@ +//@flow +declare var x: {| foo: string |}; +var {nonExistent, defaults = "hi", foo = 3} = x; // Error, missing nonExistent +(defaults: string); +(foo: number); // Error, number | string +(foo: number | string); + +declare var y: {| bar: string |}; +var {nonExistent2, defaults2 = "hi", bar = 3} = y; // Error, missing nonExistent2 +(defaults2: string); +(bar: string); // Error, number | string +(bar: number | string); + +var { baz = 15150 } = null // Error, baz is missing in null (you can't destructure null) + +declare var z : { thud : string }; +var { grunt = 15210 } = z; // Error, grunt missing in inexact object type + +const proto : {| foo : number |} = { foo : 3 }; +const obj = { __proto__ : proto, baz : "string" }; +var { qux = "string" } = obj; // Error, qux missing + +// Begin React examples + +const React = require('react'); +function Component({defaultProps = "default", regularProp}) { // Error, missing regularProp + (defaultProps: string); + (regularProp: number); + return null; +} + +const _a = ; +const _b = ; + +class A { + prop: boolean; + // No err! prop will always be initialized to a boolean + constructor({prop = false}: {| prop: boolean |} = {}) { + } +} diff --git a/tests/dictionary/dictionary.exp b/tests/dictionary/dictionary.exp index 883238be57b..a6b9aa810f6 100644 --- a/tests/dictionary/dictionary.exp +++ b/tests/dictionary/dictionary.exp @@ -69,8 +69,8 @@ Cannot cast `o.toString()` to boolean because string [1] is incompatible with bo ^^^^^^^^^^^^ References: - /core.js:67:17 - 67| toString(): string; + /core.js:84:17 + 84| toString(): string; ^^^^^^ [1] dictionary.js:94:18 94| (o.toString(): boolean); // error: string ~> boolean @@ -502,8 +502,8 @@ References: Error --------------------------------------------------------------------------------------------- dictionary.js:227:10 -Cannot return `x` because an indexer property is missing in object type [1] but exists in object type [2] in array -element. +Cannot return `x` because an index signature declaring the expected key / value type is missing in object type [1] but +exists in object type [2] in array element. dictionary.js:227:10 227| return x; // error: if allowed, could write {p:X,q:Y} into `x` @@ -520,8 +520,8 @@ References: Error --------------------------------------------------------------------------------------------- dictionary.js:233:10 -Cannot return `x` because an indexer property is missing in object type [1] but exists in object type [2] in array -element. +Cannot return `x` because an index signature declaring the expected key / value type is missing in object type [1] but +exists in object type [2] in array element. dictionary.js:233:10 233| return x; // error: if allowed, could write {p:X,q:Y} into returned array @@ -946,8 +946,8 @@ References: Error -------------------------------------------------------------------------------------------- incompatible.js:42:10 -Cannot return `x` because an indexer property is missing in object type [1] but exists in object type [2] in array -element. +Cannot return `x` because an index signature declaring the expected key / value type is missing in object type [1] but +exists in object type [2] in array element. incompatible.js:42:10 42| return x; @@ -964,8 +964,8 @@ References: Error -------------------------------------------------------------------------------------------- incompatible.js:47:10 -Cannot return `x` because an indexer property is missing in object type [1] but exists in object type [2] in array -element. +Cannot return `x` because an index signature declaring the expected key / value type is missing in object type [1] but +exists in object type [2] in array element. incompatible.js:47:10 47| return x; diff --git a/tests/dictionary/union_disambiguate.js b/tests/dictionary/union_disambiguate.js new file mode 100644 index 00000000000..76b4c099b6d --- /dev/null +++ b/tests/dictionary/union_disambiguate.js @@ -0,0 +1,6 @@ +/* A function is arguably compatible with both branches of the union below, but + * we should choose the first branch, even without annotations on the function + * parameter and return types. */ +type T = (X => X) | X; +type D = { [string]: number }; +(x => x: T); diff --git a/tests/dir-coverage/.flowconfig b/tests/dir-coverage/.flowconfig new file mode 100644 index 00000000000..8b137891791 --- /dev/null +++ b/tests/dir-coverage/.flowconfig @@ -0,0 +1 @@ + diff --git a/tests/dir-coverage/.testconfig b/tests/dir-coverage/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/dir-coverage/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/dir-coverage/a.js b/tests/dir-coverage/a.js new file mode 100644 index 00000000000..83252737e12 --- /dev/null +++ b/tests/dir-coverage/a.js @@ -0,0 +1,4 @@ +// @flow + +let x : $Trusted = 3; +module.exports = (x : any); diff --git a/tests/dir-coverage/a.js.ignored b/tests/dir-coverage/a.js.ignored new file mode 100644 index 00000000000..e37f1a72d4d --- /dev/null +++ b/tests/dir-coverage/a.js.ignored @@ -0,0 +1,4 @@ +// @flow + +let x : $Trusted = 4; +module.exports = (x : any); diff --git a/tests/dir-coverage/b.js b/tests/dir-coverage/b.js new file mode 100644 index 00000000000..2dbf6b9db4b --- /dev/null +++ b/tests/dir-coverage/b.js @@ -0,0 +1,4 @@ +// @flow + +const c = require('./folder/c') +let x : string = c; diff --git a/tests/dir-coverage/cycle/cycle1.js b/tests/dir-coverage/cycle/cycle1.js new file mode 100644 index 00000000000..ab03ce7be21 --- /dev/null +++ b/tests/dir-coverage/cycle/cycle1.js @@ -0,0 +1,8 @@ +//@flow + +const cycle3 = require('cycle3'); + +let x : number = cycle3.f; +let y : string = cycle3.g; + +module.exports = {x : number, y : string}; diff --git a/tests/dir-coverage/cycle/cycle2.js b/tests/dir-coverage/cycle/cycle2.js new file mode 100644 index 00000000000..596eb5203fa --- /dev/null +++ b/tests/dir-coverage/cycle/cycle2.js @@ -0,0 +1,9 @@ +//@flow + +const cycle1 = require('cycle1'); + +let a : number = cycle1.x; +let b : string = cycle1.y; +let c : number = 4; + +module.exports = {a : number, b : string, c : number}; diff --git a/tests/dir-coverage/cycle/cycle3.js b/tests/dir-coverage/cycle/cycle3.js new file mode 100644 index 00000000000..3c7105440d4 --- /dev/null +++ b/tests/dir-coverage/cycle/cycle3.js @@ -0,0 +1,8 @@ +//@flow + +const cycle2 = require('cycle2'); + +let f : number = cycle2.a + cycle2.c; +let g : string = cycle2.b; + +module.exports = {x : number, y : string}; diff --git a/tests/dir-coverage/dir-coverage.exp b/tests/dir-coverage/dir-coverage.exp new file mode 100644 index 00000000000..3aba8afedbe --- /dev/null +++ b/tests/dir-coverage/dir-coverage.exp @@ -0,0 +1,386 @@ +----------------------------- +restart server +----------------------------- + +----------------------------- +root +----------------------------- + + +Coverage results from 13 file(s): + +other_folder/i.js: 33.33% (4 of 12 expressions) +other_folder/h.js: 100.00% (9 of 9 expressions) +match_coverage/coverage.js: 24.39% (10 of 41 expressions) +folder/subfolder/j.js: 100.00% (6 of 6 expressions) +folder/e.js: 37.50% (3 of 8 expressions) +folder/d.js: 50.00% (3 of 6 expressions) +folder/c.js: 100.00% (6 of 6 expressions) +f.js: 37.50% (3 of 8 expressions) +cycle/cycle3.js: 42.11% (8 of 19 expressions) +cycle/cycle2.js: 52.63% (10 of 19 expressions) +cycle/cycle1.js: 50.00% (8 of 16 expressions) +b.js: 100.00% (6 of 6 expressions) +a.js: 57.14% (4 of 7 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 13 +Expressions : + Covered : 80 + Total : 163 + Covered Percentage : 49.08% + +----------------------------- +folder +----------------------------- + + +Coverage results from 4 file(s): + +folder/subfolder/j.js: 100.00% (6 of 6 expressions) +folder/e.js: 37.50% (3 of 8 expressions) +folder/d.js: 50.00% (3 of 6 expressions) +folder/c.js: 100.00% (6 of 6 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 4 +Expressions : + Covered : 18 + Total : 26 + Covered Percentage : 69.23% + +----------------------------- +cycle +----------------------------- + + +Coverage results from 3 file(s): + +cycle/cycle3.js: 42.11% (8 of 19 expressions) +cycle/cycle2.js: 52.63% (10 of 19 expressions) +cycle/cycle1.js: 50.00% (8 of 16 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 3 +Expressions : + Covered : 26 + Total : 54 + Covered Percentage : 48.15% + +----------------------------- +match_coverage +----------------------------- + + +Coverage results from 1 file(s): + +match_coverage/coverage.js: 24.39% (10 of 41 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 1 +Expressions : + Covered : 10 + Total : 41 + Covered Percentage : 24.39% + +----------------------------- +other_folder +----------------------------- + + +Coverage results from 2 file(s): + +other_folder/i.js: 33.33% (4 of 12 expressions) +other_folder/h.js: 100.00% (9 of 9 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 2 +Expressions : + Covered : 13 + Total : 21 + Covered Percentage : 61.90% + +----------------------------- +folder/subfolder +----------------------------- + + +Coverage results from 1 file(s): + +folder/subfolder/j.js: 100.00% (6 of 6 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 1 +Expressions : + Covered : 6 + Total : 6 + Covered Percentage : 100.00% + +----------------------------- +file list +----------------------------- + + +Coverage results from 3 file(s): + +folder/subfolder/j.js: 100.00% (6 of 6 expressions) +folder/d.js: 50.00% (3 of 6 expressions) +a.js: 57.14% (4 of 7 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 3 +Expressions : + Covered : 13 + Total : 19 + Covered Percentage : 68.42% + +----------------------------- +file and dir list +----------------------------- + + +Coverage results from 5 file(s): + +folder/subfolder/j.js: 100.00% (6 of 6 expressions) +folder/e.js: 37.50% (3 of 8 expressions) +folder/d.js: 50.00% (3 of 6 expressions) +folder/c.js: 100.00% (6 of 6 expressions) +a.js: 57.14% (4 of 7 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 5 +Expressions : + Covered : 22 + Total : 33 + Covered Percentage : 66.67% + +----------------------------- +files +----------------------------- + + +Coverage results from 3 file(s): + +folder/subfolder/j.js: 100.00% (6 of 6 expressions) +folder/d.js: 50.00% (3 of 6 expressions) +a.js: 57.14% (4 of 7 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 3 +Expressions : + Covered : 13 + Total : 19 + Covered Percentage : 68.42% + +----------------------------- +json +----------------------------- + +{ + "files":[ + {"file":"a.js","percentage":57.14,"covered":4,"total":7}, + {"file":"folder/d.js","percentage":50.00,"covered":3,"total":6}, + {"file":"folder/subfolder/j.js","percentage":100.00,"covered":6,"total":6} + ], + "statistics":{ + "files_in_directory":3, + "covered_expressions":13, + "total_expressions":19, + "percentage":68.42 + } +} +----------------------------- +root info survives recheck +----------------------------- + +Coverage results from 13 file(s): + +other_folder/i.js: 33.33% (4 of 12 expressions) +other_folder/h.js: 100.00% (9 of 9 expressions) +match_coverage/coverage.js: 24.39% (10 of 41 expressions) +folder/subfolder/j.js: 100.00% (6 of 6 expressions) +folder/e.js: 37.50% (3 of 8 expressions) +folder/d.js: 50.00% (3 of 6 expressions) +folder/c.js: 100.00% (6 of 6 expressions) +f.js: 37.50% (3 of 8 expressions) +cycle/cycle3.js: 42.11% (8 of 19 expressions) +cycle/cycle2.js: 52.63% (10 of 19 expressions) +cycle/cycle1.js: 50.00% (8 of 16 expressions) +b.js: 100.00% (6 of 6 expressions) +a.js: 57.14% (4 of 7 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 13 +Expressions : + Covered : 80 + Total : 163 + Covered Percentage : 49.08% + +----------------------------- +trust +----------------------------- + + +Coverage results from 13 file(s): + +other_folder/i.js: 16.67% trusted (2 of 12 expressions), 33.33% covered (4 of 12 expressions) +other_folder/h.js: 77.78% trusted (7 of 9 expressions), 100.00% covered (9 of 9 expressions) +match_coverage/coverage.js: 24.39% trusted (10 of 41 expressions), 24.39% covered (10 of 41 expressions) +folder/subfolder/j.js: 16.67% trusted (1 of 6 expressions), 100.00% covered (6 of 6 expressions) +folder/e.js: 12.50% trusted (1 of 8 expressions), 37.50% covered (3 of 8 expressions) +folder/d.js: 16.67% trusted (1 of 6 expressions), 50.00% covered (3 of 6 expressions) +folder/c.js: 83.33% trusted (5 of 6 expressions), 100.00% covered (6 of 6 expressions) +f.js: 12.50% trusted (1 of 8 expressions), 37.50% covered (3 of 8 expressions) +cycle/cycle3.js: 5.26% trusted (1 of 19 expressions), 42.11% covered (8 of 19 expressions) +cycle/cycle2.js: 15.79% trusted (3 of 19 expressions), 52.63% covered (10 of 19 expressions) +cycle/cycle1.js: 6.25% trusted (1 of 16 expressions), 50.00% covered (8 of 16 expressions) +b.js: 83.33% trusted (5 of 6 expressions), 100.00% covered (6 of 6 expressions) +a.js: 42.86% trusted (3 of 7 expressions), 57.14% covered (4 of 7 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 13 +Expressions : + Trusted : 41 + Covered : 80 + Total : 163 + Trust Percentage : 25.15% + Covered Percentage : 49.08% + +{ + "files":[ + { + "file":"a.js", + "trusted_percentage":42.86, + "percentage":57.14, + "trusted":3, + "covered":4, + "total":7 + }, + { + "file":"b.js", + "trusted_percentage":83.33, + "percentage":100.00, + "trusted":5, + "covered":6, + "total":6 + }, + { + "file":"cycle/cycle1.js", + "trusted_percentage":6.25, + "percentage":50.00, + "trusted":1, + "covered":8, + "total":16 + }, + { + "file":"cycle/cycle2.js", + "trusted_percentage":15.79, + "percentage":52.63, + "trusted":3, + "covered":10, + "total":19 + }, + { + "file":"cycle/cycle3.js", + "trusted_percentage":5.26, + "percentage":42.11, + "trusted":1, + "covered":8, + "total":19 + }, + { + "file":"f.js", + "trusted_percentage":12.50, + "percentage":37.50, + "trusted":1, + "covered":3, + "total":8 + }, + { + "file":"folder/c.js", + "trusted_percentage":83.33, + "percentage":100.00, + "trusted":5, + "covered":6, + "total":6 + }, + { + "file":"folder/d.js", + "trusted_percentage":16.67, + "percentage":50.00, + "trusted":1, + "covered":3, + "total":6 + }, + { + "file":"folder/e.js", + "trusted_percentage":12.50, + "percentage":37.50, + "trusted":1, + "covered":3, + "total":8 + }, + { + "file":"folder/subfolder/j.js", + "trusted_percentage":16.67, + "percentage":100.00, + "trusted":1, + "covered":6, + "total":6 + }, + { + "file":"match_coverage/coverage.js", + "trusted_percentage":24.39, + "percentage":24.39, + "trusted":10, + "covered":10, + "total":41 + }, + { + "file":"other_folder/h.js", + "trusted_percentage":77.78, + "percentage":100.00, + "trusted":7, + "covered":9, + "total":9 + }, + { + "file":"other_folder/i.js", + "trusted_percentage":16.67, + "percentage":33.33, + "trusted":2, + "covered":4, + "total":12 + } + ], + "statistics":{ + "files_in_directory":13, + "trusted_expressions":41, + "covered_expressions":80, + "total_expressions":163, + "trusted_percentage":25.15, + "percentage":49.08 + } +} diff --git a/tests/dir-coverage/f.js b/tests/dir-coverage/f.js new file mode 100644 index 00000000000..0f2fdb10e48 --- /dev/null +++ b/tests/dir-coverage/f.js @@ -0,0 +1,4 @@ +// @flow + +const e = require('./folder/e'); +module.exports = e; diff --git a/tests/dir-coverage/files.txt b/tests/dir-coverage/files.txt new file mode 100644 index 00000000000..083288221bb --- /dev/null +++ b/tests/dir-coverage/files.txt @@ -0,0 +1,3 @@ +a.js +folder/d.js +folder/subfolder/j.js diff --git a/tests/dir-coverage/folder/c.js b/tests/dir-coverage/folder/c.js new file mode 100644 index 00000000000..a82837c7109 --- /dev/null +++ b/tests/dir-coverage/folder/c.js @@ -0,0 +1,4 @@ +// @flow + +let x : $Trusted = "hello"; +module.exports = x; diff --git a/tests/dir-coverage/folder/d.js b/tests/dir-coverage/folder/d.js new file mode 100644 index 00000000000..d6b73e4ade8 --- /dev/null +++ b/tests/dir-coverage/folder/d.js @@ -0,0 +1,4 @@ +//@flow + +const a = require('../a') +let x : number = a; diff --git a/tests/dir-coverage/folder/e.js b/tests/dir-coverage/folder/e.js new file mode 100644 index 00000000000..cdca903c3ec --- /dev/null +++ b/tests/dir-coverage/folder/e.js @@ -0,0 +1,4 @@ +// @flow + +const f = require('../f'); +module.exports = f; diff --git a/tests/dir-coverage/folder/g.js b/tests/dir-coverage/folder/g.js new file mode 100644 index 00000000000..fe2250feef2 --- /dev/null +++ b/tests/dir-coverage/folder/g.js @@ -0,0 +1,2 @@ +const h = require('../other_folder/h'); +(h : string); diff --git a/tests/dir-coverage/folder/subfolder/j.js b/tests/dir-coverage/folder/subfolder/j.js new file mode 100644 index 00000000000..0c67655922a --- /dev/null +++ b/tests/dir-coverage/folder/subfolder/j.js @@ -0,0 +1,4 @@ +// @flow + +const h = require('../../other_folder/h'); +let a = h; diff --git a/tests/dir-coverage/match_coverage/coverage.js b/tests/dir-coverage/match_coverage/coverage.js new file mode 100644 index 00000000000..07fd00f817b --- /dev/null +++ b/tests/dir-coverage/match_coverage/coverage.js @@ -0,0 +1,42 @@ +// @flow + +const a1 = 5; +const a2: any = a1 - 2; +const a3 = a2 + 1; + +type E = empty; + +type U1 = any | number; +type U2 = number | any; +type U3 = empty | number; +type U4 = number | empty; +type U5 = empty | any; +type U6 = any | empty; +type U7 = number | number; + +type I1 = any & number; +type I2 = number & any; +type I3 = empty & number; +type I4 = number & empty; +type I5 = empty & any; +type I6 = any & empty; +type I7 = number & number; + +type R = R; + +type RU1 = RU1 | any; +type RU2 = RU2 | empty; +type RU3 = any | RU3; +type RU4 = empty | RU4; +type RU5 = RU5; +type RU6 = RU6 | number; +type RU7 = number | RU7; +type RU8 = RU8 | RU8; + +type RI1 = RI1 & any; +type RI2 = RI2 & empty; +type RI3 = any & RI3; +type RI4 = empty & RI4; +type RI5 = RI5 & number; +type RI6 = number & RI6; +type RI7 = RI7 & RI7; diff --git a/tests/dir-coverage/other_folder/h.js b/tests/dir-coverage/other_folder/h.js new file mode 100644 index 00000000000..5dc1f9d5484 --- /dev/null +++ b/tests/dir-coverage/other_folder/h.js @@ -0,0 +1,4 @@ +// @flow + +const c = require('../folder/c'); +module.exports = (c : string); diff --git a/tests/dir-coverage/other_folder/i.js b/tests/dir-coverage/other_folder/i.js new file mode 100644 index 00000000000..5c7948e8a58 --- /dev/null +++ b/tests/dir-coverage/other_folder/i.js @@ -0,0 +1,5 @@ +// @flow + +const a = require('../a'); +const g = require ('../folder/g') +let m = a + g; diff --git a/tests/dir-coverage/test.sh b/tests/dir-coverage/test.sh new file mode 100644 index 00000000000..c947d87331c --- /dev/null +++ b/tests/dir-coverage/test.sh @@ -0,0 +1,72 @@ +#!/bin/bash +echo "-----------------------------" +echo "restart server" +echo "-----------------------------" +echo +assert_ok "$FLOW" stop +assert_ok "$FLOW" start +echo "-----------------------------" +echo "root" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root . +echo "-----------------------------" +echo "folder" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root folder +echo "-----------------------------" +echo "cycle" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root cycle +echo "-----------------------------" +echo "match_coverage" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root match_coverage +echo "-----------------------------" +echo "other_folder" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root other_folder +echo "-----------------------------" +echo "folder/subfolder" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root folder/subfolder +echo "-----------------------------" +echo "file list" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root a.js folder/d.js folder/subfolder/j.js +echo "-----------------------------" +echo "file and dir list" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root a.js folder folder/d.js +echo "-----------------------------" +echo "files" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root --input-file files.txt +echo "-----------------------------" +echo "json" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root --json --pretty --input-file files.txt +echo "-----------------------------" +echo "root info survives recheck" +echo "-----------------------------" +assert_ok mv a.js.ignored a.js +assert_ok "$FLOW" force-recheck a.js +assert_ok "$FLOW" batch-coverage --strip-root --wait-for-recheck true . + +"$FLOW" stop +"$FLOW" start --trust-mode=check +echo "-----------------------------" +echo "trust" +echo "-----------------------------" +echo +assert_ok "$FLOW" batch-coverage --strip-root --show-trust . +assert_ok "$FLOW" batch-coverage --strip-root --json --pretty --show-trust . diff --git a/tests/direct_dependent_files_cache/.testconfig b/tests/direct_dependent_files_cache/.testconfig new file mode 100644 index 00000000000..7e0f5b794ef --- /dev/null +++ b/tests/direct_dependent_files_cache/.testconfig @@ -0,0 +1,2 @@ +shell: test.sh +auto_start: false diff --git a/tests/direct_dependent_files_cache/direct_dependent_files_cache.exp b/tests/direct_dependent_files_cache/direct_dependent_files_cache.exp new file mode 100644 index 00000000000..4dd4396f7f0 --- /dev/null +++ b/tests/direct_dependent_files_cache/direct_dependent_files_cache.exp @@ -0,0 +1,283 @@ +== Initial there should be 2 errors == +Error --------------------------------------------------------------------------------------------- src/dependent.js:4:2 + +Cannot cast `dependency` to string because boolean [1] is incompatible with string [2]. + + src/dependent.js:4:2 + 4| (dependency: string); + ^^^^^^^^^^ + +References: + src/node_modules/dependency.js:3:16 + 3| export default true; + ^^^^ [1] + src/dependent.js:4:14 + 4| (dependency: string); + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- src/unrelated.js:3:19 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + src/unrelated.js:3:19 + 3| const x: string = 123; + ^^^ [1] + +References: + src/unrelated.js:3:10 + 3| const x: string = 123; + ^^^^^^ [2] + + + +Found 2 errors + +== Delete unrelated.js and now there is 1 error == + Resolved requires are unchanged + Resolved requires are unchanged + +Error --------------------------------------------------------------------------------------------- src/dependent.js:4:2 + +Cannot cast `dependency` to string because boolean [1] is incompatible with string [2]. + + src/dependent.js:4:2 + 4| (dependency: string); + ^^^^^^^^^^ + +References: + src/node_modules/dependency.js:3:16 + 3| export default true; + ^^^^ [1] + src/dependent.js:4:14 + 4| (dependency: string); + ^^^^^^ [2] + + + +Found 1 error + +== Restore unrelated.js and back to 2 errors == + Resolved requires are unchanged + Resolved requires are unchanged + +Error --------------------------------------------------------------------------------------------- src/dependent.js:4:2 + +Cannot cast `dependency` to string because boolean [1] is incompatible with string [2]. + + src/dependent.js:4:2 + 4| (dependency: string); + ^^^^^^^^^^ + +References: + src/node_modules/dependency.js:3:16 + 3| export default true; + ^^^^ [1] + src/dependent.js:4:14 + 4| (dependency: string); + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- src/unrelated.js:3:19 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + src/unrelated.js:3:19 + 3| const x: string = 123; + ^^^ [1] + +References: + src/unrelated.js:3:10 + 3| const x: string = 123; + ^^^^^^ [2] + + + +Found 2 errors + +== Delete src/node_modules/dependency.js changes an error == + Resolved requires are unchanged + Resolved requires changed + +Error --------------------------------------------------------------------------------------------- src/dependent.js:4:2 + +Cannot cast `dependency` to string because number [1] is incompatible with string [2]. + + src/dependent.js:4:2 + 4| (dependency: string); + ^^^^^^^^^^ + +References: + node_modules/dependency.js:3:16 + 3| export default 123; + ^^^ [1] + src/dependent.js:4:14 + 4| (dependency: string); + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- src/unrelated.js:3:19 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + src/unrelated.js:3:19 + 3| const x: string = 123; + ^^^ [1] + +References: + src/unrelated.js:3:10 + 3| const x: string = 123; + ^^^^^^ [2] + + + +Found 2 errors + +== Restore src/node_modules/dependency.js change it back == + Resolved requires are unchanged + Resolved requires changed + +Error --------------------------------------------------------------------------------------------- src/dependent.js:4:2 + +Cannot cast `dependency` to string because boolean [1] is incompatible with string [2]. + + src/dependent.js:4:2 + 4| (dependency: string); + ^^^^^^^^^^ + +References: + src/node_modules/dependency.js:3:16 + 3| export default true; + ^^^^ [1] + src/dependent.js:4:14 + 4| (dependency: string); + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- src/unrelated.js:3:19 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + src/unrelated.js:3:19 + 3| const x: string = 123; + ^^^ [1] + +References: + src/unrelated.js:3:10 + 3| const x: string = 123; + ^^^^^^ [2] + + + +Found 2 errors + +== Remove the import from dependent.js == + Resolved requires changed + Resolved requires are unchanged + +Error -------------------------------------------------------------------------------------------- src/unrelated.js:3:19 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + src/unrelated.js:3:19 + 3| const x: string = 123; + ^^^ [1] + +References: + src/unrelated.js:3:10 + 3| const x: string = 123; + ^^^^^^ [2] + + + +Found 1 error + +== Add the import back to dependent.js == + Resolved requires changed + Resolved requires are unchanged + +Error --------------------------------------------------------------------------------------------- src/dependent.js:4:2 + +Cannot cast `dependency` to string because boolean [1] is incompatible with string [2]. + + src/dependent.js:4:2 + 4| (dependency: string); + ^^^^^^^^^^ + +References: + src/node_modules/dependency.js:3:16 + 3| export default true; + ^^^^ [1] + src/dependent.js:4:14 + 4| (dependency: string); + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- src/unrelated.js:3:19 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + src/unrelated.js:3:19 + 3| const x: string = 123; + ^^^ [1] + +References: + src/unrelated.js:3:10 + 3| const x: string = 123; + ^^^^^^ [2] + + + +Found 2 errors + +== Adding code that doesn't import has no effect on dep graph == + Resolved requires are unchanged + Resolved requires are unchanged + +Error --------------------------------------------------------------------------------------------- src/dependent.js:4:2 + +Cannot cast `dependency` to string because boolean [1] is incompatible with string [2]. + + src/dependent.js:4:2 + 4| (dependency: string); + ^^^^^^^^^^ + +References: + src/node_modules/dependency.js:3:16 + 3| export default true; + ^^^^ [1] + src/dependent.js:4:14 + 4| (dependency: string); + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------ src/node_modules/dependency.js:4:24 + +Cannot assign `123` to `foo` because number [1] is incompatible with boolean [2]. + + src/node_modules/dependency.js:4:24 + 4| export var foo: bool = 123 + ^^^ [1] + +References: + src/node_modules/dependency.js:4:17 + 4| export var foo: bool = 123 + ^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- src/unrelated.js:3:19 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + src/unrelated.js:3:19 + 3| const x: string = 123; + ^^^ [1] + +References: + src/unrelated.js:3:10 + 3| const x: string = 123; + ^^^^^^ [2] + + + +Found 3 errors diff --git a/tests/direct_dependent_files_cache/node_modules/dependency.js b/tests/direct_dependent_files_cache/node_modules/dependency.js new file mode 100644 index 00000000000..507fca71a47 --- /dev/null +++ b/tests/direct_dependent_files_cache/node_modules/dependency.js @@ -0,0 +1,3 @@ +// @flow + +export default 123; diff --git a/tests/direct_dependent_files_cache/src/.flowconfig b/tests/direct_dependent_files_cache/src/.flowconfig new file mode 100644 index 00000000000..c69a323fad5 --- /dev/null +++ b/tests/direct_dependent_files_cache/src/.flowconfig @@ -0,0 +1,2 @@ +[include] +../node_modules diff --git a/tests/direct_dependent_files_cache/src/dependent.js b/tests/direct_dependent_files_cache/src/dependent.js new file mode 100644 index 00000000000..4b32c696f54 --- /dev/null +++ b/tests/direct_dependent_files_cache/src/dependent.js @@ -0,0 +1,4 @@ +// @flow + +import dependency from "dependency"; +(dependency: string); diff --git a/tests/direct_dependent_files_cache/src/node_modules/dependency.js b/tests/direct_dependent_files_cache/src/node_modules/dependency.js new file mode 100644 index 00000000000..f6d9a053c7e --- /dev/null +++ b/tests/direct_dependent_files_cache/src/node_modules/dependency.js @@ -0,0 +1,3 @@ +// @flow + +export default true; diff --git a/tests/direct_dependent_files_cache/src/unrelated.js b/tests/direct_dependent_files_cache/src/unrelated.js new file mode 100644 index 00000000000..ae376f18b05 --- /dev/null +++ b/tests/direct_dependent_files_cache/src/unrelated.js @@ -0,0 +1,3 @@ +// @flow + +const x: string = 123; diff --git a/tests/direct_dependent_files_cache/test.sh b/tests/direct_dependent_files_cache/test.sh new file mode 100644 index 00000000000..adb4ac27c4e --- /dev/null +++ b/tests/direct_dependent_files_cache/test.sh @@ -0,0 +1,86 @@ +#!/bin/bash + +log_file="$FLOW_TEMP_DIR/direct_dependent_files_cache.log" + +start_flow src + +printf "== Initial there should be 2 errors ==\n" +assert_errors "$FLOW" status --no-auto-start src + +printf "\n== Delete unrelated.js and now there is 1 error ==\n" +# Unchanged during `ResolvedRequires` (we fail to delete them) +# Unchanged during `ReresolveDirectDependents` +assert_ok mv src/unrelated{.js,.js.ignored} +assert_ok "$FLOW" force-recheck --profile src/unrelated.js \ + > /dev/null + +grep "Resolved requires" "$log_file" | tail -n 2 | cut -d"]" -f 2 +printf "\n" +assert_errors "$FLOW" status --no-auto-start src + +printf "\n== Restore unrelated.js and back to 2 errors ==\n" +# Unchanged during `ResolvedRequires` (we forget to delete them so still there) +# Unchanged during `ReresolveDirectDependents` +assert_ok mv src/unrelated{.js.ignored,.js} +assert_ok "$FLOW" force-recheck --profile src/unrelated.js \ + > /dev/null + +grep "Resolved requires" "$log_file" | tail -n 2 | cut -d"]" -f 2 +printf "\n" +assert_errors "$FLOW" status --no-auto-start src + +printf "\n== Delete src/node_modules/dependency.js changes an error ==\n" +# Unchanged during `ResolvedRequires` (we fail to delete them) +# Changed during `ReresolveDirectDependents` +assert_ok mv src/node_modules/dependency{.js,.js.ignored} +assert_ok "$FLOW" force-recheck --profile src/node_modules/dependency.js \ + > /dev/null + +grep "Resolved requires" "$log_file" | tail -n 2 | cut -d"]" -f 2 +printf "\n" +assert_errors "$FLOW" status --no-auto-start src + +printf "\n== Restore src/node_modules/dependency.js change it back ==\n" +# Unchanged during `ResolvedRequires` (we forget to delete them so still there) +# Changed during `ReresolveDirectDependents` +assert_ok mv src/node_modules/dependency{.js.ignored,.js} +assert_ok "$FLOW" force-recheck --profile src/node_modules/dependency.js \ + > /dev/null + +grep "Resolved requires" "$log_file" | tail -n 2 | cut -d"]" -f 2 +printf "\n" +assert_errors "$FLOW" status --no-auto-start src + +printf "\n== Remove the import from dependent.js ==\n" +# Changed during `ResolvedRequires` +# Unchanged during `ReresolveDirectDependents` +assert_ok mv src/dependent{.js,.js.ignored} +assert_ok echo "// @flow" > src/dependent.js +assert_ok "$FLOW" force-recheck --profile src/dependent.js \ + > /dev/null + +grep "Resolved requires" "$log_file" | tail -n 2 | cut -d"]" -f 2 +printf "\n" +assert_errors "$FLOW" status --no-auto-start src + +printf "\n== Add the import back to dependent.js ==\n" +# Changed during `ResolvedRequires` +# Unchanged during `ReresolveDirectDependents` +assert_ok mv src/dependent{.js.ignored,.js} +assert_ok "$FLOW" force-recheck --profile src/dependent.js \ + > /dev/null + +grep "Resolved requires" "$log_file" | tail -n 2 | cut -d"]" -f 2 +printf "\n" +assert_errors "$FLOW" status --no-auto-start src + +printf "\n== Adding code that doesn't import has no effect on dep graph ==\n" +# Unchanged during `ResolvedRequires` +# Unchanged during `ReresolveDirectDependents` +assert_ok echo "export var foo: bool = 123" >> src/node_modules/dependency.js +assert_ok "$FLOW" force-recheck --profile src/node_modules/dependency.js \ + > /dev/null + +grep "Resolved requires" "$log_file" | tail -n 2 | cut -d"]" -f 2 +printf "\n" +assert_errors "$FLOW" status --no-auto-start src diff --git a/tests/dom/Document.js b/tests/dom/Document.js index fa37d38f93d..f7e315de89e 100644 --- a/tests/dom/Document.js +++ b/tests/dom/Document.js @@ -9,5 +9,8 @@ let tests = [ (document.createElement('select'): HTMLSelectElement); (document.querySelector('select'): HTMLSelectElement | null); (document.createElement('hr'): HTMLElement); // GH #3752 + }, + function (document: Document) { + (document.head: HTMLHeadElement | null); } ]; diff --git a/tests/dom/HTMLCollection.js b/tests/dom/HTMLCollection.js index fcac55eabf2..a27d821a3f6 100644 --- a/tests/dom/HTMLCollection.js +++ b/tests/dom/HTMLCollection.js @@ -26,3 +26,7 @@ if (el) el.className // valid for (var field of collection) { field.className // valid } + +// covariance +declare var Anchors: HTMLCollection; +(Anchors: HTMLCollection); diff --git a/tests/dom/HTMLElement.js b/tests/dom/HTMLElement.js index d3942a27571..cc9805e027c 100644 --- a/tests/dom/HTMLElement.js +++ b/tests/dom/HTMLElement.js @@ -14,5 +14,17 @@ let tests = [ element.scrollIntoView({ behavior: 'invalid' }); element.scrollIntoView({ block: 'invalid' }); element.scrollIntoView(1); + }, + + // focus + function(element: HTMLElement) { + element.focus(); + element.focus({}); + element.focus({ preventScroll: true }); + element.focus({ preventScroll: false }); + + // fails + element.focus({ preventScroll: 'invalid' }); + element.focus(1); } ]; diff --git a/tests/dom/dom.exp b/tests/dom/dom.exp index db33d8cb2c1..528859bd9f8 100644 --- a/tests/dom/dom.exp +++ b/tests/dom/dom.exp @@ -7,8 +7,8 @@ Cannot call `ctx.moveTo` with `'0'` bound to `x` because string [1] is incompati ^^^ [1] References: - /dom.js:1896:13 - 1896| moveTo(x: number, y: number): void; + /dom.js:2039:13 + 2039| moveTo(x: number, y: number): void; ^^^^^^ [2] @@ -21,8 +21,8 @@ Cannot call `ctx.moveTo` with `'1'` bound to `y` because string [1] is incompati ^^^ [1] References: - /dom.js:1896:24 - 1896| moveTo(x: number, y: number): void; + /dom.js:2039:24 + 2039| moveTo(x: number, y: number): void; ^^^^^^ [2] @@ -35,8 +35,8 @@ Cannot call `ClipboardEvent` with `'invalid'` bound to `type` because string [1] ^^^^^^^^^ [1] References: - /dom.js:478:21 - 478| constructor(type: ClipboardEventTypes, eventInit?: ClipboardEvent$Init): void; + /dom.js:563:21 + 563| constructor(type: ClipboardEventTypes, eventInit?: ClipboardEvent$Init): void; ^^^^^^^^^^^^^^^^^^^ [2] @@ -50,12 +50,9 @@ object literal [1] but exists in object type [2]. ^^ [1] References: - /dom.js:473:41 - v - 473| type ClipboardEvent$Init = Event$Init & { - 474| clipboardData: DataTransfer | null; - 475| }; - ^ [2] + /dom.js:560:41 + 560| type ClipboardEvent$Init = Event$Init & { clipboardData: DataTransfer | null, ... }; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------ ClipboardEvent.js:27:48 @@ -75,9 +72,9 @@ References: 27| const invalid2 = new ClipboardEvent('cut', {clipboardData: { 28| 'text/plain': 'thats not how you do it'}}); // invalid ---------------------------------------^ [1] - /dom.js:474:18 - 474| clipboardData: DataTransfer | null; - ^^^^^^^^^^^^ [2] + /dom.js:560:58 + 560| type ClipboardEvent$Init = Event$Init & { clipboardData: DataTransfer | null, ... }; + ^^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------- ClipboardEvent.js:32:7 @@ -89,8 +86,8 @@ Cannot call `e.clipboardData.getData` because property `getData` is missing in n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:479:19 - 479| +clipboardData: ?DataTransfer; // readonly + /dom.js:564:19 + 564| +clipboardData: ?DataTransfer; // readonly ^^^^^^^^^^^^^ [1] @@ -107,9 +104,9 @@ References: Element.js:14:40 14| element.scrollIntoView({ behavior: 'invalid' }); ^^^^^^^^^ [1] - /dom.js:1368:49 - 1368| scrollIntoView(arg?: (boolean | { behavior?: ('auto' | 'instant' | 'smooth'), block?: ('start' | 'center' | 'end' | 'nearest'), inline?: ('start' | 'center' | 'end' | 'nearest') })): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + /dom.js:1495:22 + 1495| behavior?: ('auto' | 'instant' | 'smooth'), + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- Element.js:15:28 @@ -125,9 +122,9 @@ References: Element.js:15:37 15| element.scrollIntoView({ block: 'invalid' }); ^^^^^^^^^ [1] - /dom.js:1368:90 - 1368| scrollIntoView(arg?: (boolean | { behavior?: ('auto' | 'instant' | 'smooth'), block?: ('start' | 'center' | 'end' | 'nearest'), inline?: ('start' | 'center' | 'end' | 'nearest') })): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + /dom.js:1496:19 + 1496| block?: ('start' | 'center' | 'end' | 'nearest'), + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- Element.js:16:28 @@ -139,8 +136,8 @@ Cannot call `element.scrollIntoView` with `1` bound to `arg` because number [1] ^ [1] References: - /dom.js:1368:25 - 1368| scrollIntoView(arg?: (boolean | { behavior?: ('auto' | 'instant' | 'smooth'), block?: ('start' | 'center' | 'end' | 'nearest'), inline?: ('start' | 'center' | 'end' | 'nearest') })): void; + /dom.js:1494:25 + 1494| scrollIntoView(arg?: (boolean | { ^^^^^^^ [2] @@ -153,8 +150,8 @@ Cannot get `el.className` because property `className` is missing in null [1]. ^^^^^^^^^^^^ References: - /dom.js:576:56 - 576| item(nameOrIndex?: any, optionalIndex?: any): Elem | null; + /dom.js:684:56 + 684| item(nameOrIndex?: any, optionalIndex?: any): Elem | null; ^^^^ [1] @@ -167,8 +164,8 @@ Cannot get `el.className` because property `className` is missing in null [1]. ^^^^^^^^^^^^ References: - /dom.js:577:35 - 577| namedItem(name: string): Elem | null; + /dom.js:685:35 + 685| namedItem(name: string): Elem | null; ^^^^ [1] @@ -185,9 +182,9 @@ References: HTMLElement.js:14:40 14| element.scrollIntoView({ behavior: 'invalid' }); ^^^^^^^^^ [1] - /dom.js:1368:49 - 1368| scrollIntoView(arg?: (boolean | { behavior?: ('auto' | 'instant' | 'smooth'), block?: ('start' | 'center' | 'end' | 'nearest'), inline?: ('start' | 'center' | 'end' | 'nearest') })): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + /dom.js:1495:22 + 1495| behavior?: ('auto' | 'instant' | 'smooth'), + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] Error --------------------------------------------------------------------------------------------- HTMLElement.js:15:28 @@ -203,9 +200,9 @@ References: HTMLElement.js:15:37 15| element.scrollIntoView({ block: 'invalid' }); ^^^^^^^^^ [1] - /dom.js:1368:90 - 1368| scrollIntoView(arg?: (boolean | { behavior?: ('auto' | 'instant' | 'smooth'), block?: ('start' | 'center' | 'end' | 'nearest'), inline?: ('start' | 'center' | 'end' | 'nearest') })): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + /dom.js:1496:19 + 1496| block?: ('start' | 'center' | 'end' | 'nearest'), + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] Error --------------------------------------------------------------------------------------------- HTMLElement.js:16:28 @@ -217,11 +214,43 @@ Cannot call `element.scrollIntoView` with `1` bound to `arg` because number [1] ^ [1] References: - /dom.js:1368:25 - 1368| scrollIntoView(arg?: (boolean | { behavior?: ('auto' | 'instant' | 'smooth'), block?: ('start' | 'center' | 'end' | 'nearest'), inline?: ('start' | 'center' | 'end' | 'nearest') })): void; + /dom.js:1494:25 + 1494| scrollIntoView(arg?: (boolean | { ^^^^^^^ [2] +Error --------------------------------------------------------------------------------------------- HTMLElement.js:27:19 + +Cannot call `element.focus` with object literal bound to `options` because string [1] is incompatible with boolean [2] +in property `preventScroll`. + + HTMLElement.js:27:19 + 27| element.focus({ preventScroll: 'invalid' }); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + HTMLElement.js:27:36 + 27| element.focus({ preventScroll: 'invalid' }); + ^^^^^^^^^ [1] + /dom.js:1318:39 + 1318| type FocusOptions = { preventScroll?: boolean, ... } + ^^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------- HTMLElement.js:28:19 + +Cannot call `element.focus` with `1` bound to `options` because number [1] is incompatible with `FocusOptions` [2]. + + HTMLElement.js:28:19 + 28| element.focus(1); + ^ [1] + +References: + /dom.js:1661:19 + 1661| focus(options?: FocusOptions): void; + ^^^^^^^^^^^^ [2] + + Error ------------------------------------------------------------------------------------------ HTMLFormElement.js:23:1 Cannot get `el.className` because property `className` is missing in null [1]. @@ -231,8 +260,8 @@ Cannot get `el.className` because property `className` is missing in null [1]. ^^^^^^^^^^^^ References: - /dom.js:2824:43 - 2824| [index: number | string]: HTMLElement | null; + /dom.js:2968:43 + 2968| [index: number | string]: HTMLElement | null; ^^^^ [1] @@ -245,8 +274,8 @@ Cannot get `el.className` because property `className` is missing in null [1]. ^^^^^^^^^^^^ References: - /dom.js:2824:43 - 2824| [index: number | string]: HTMLElement | null; + /dom.js:2968:43 + 2968| [index: number | string]: HTMLElement | null; ^^^^ [1] @@ -262,8 +291,8 @@ References: HTMLInputElement.js:7:28 7| el.setRangeText('foo', 123); // end is required ^^^ [1] - /dom.js:3164:45 - 3164| setRangeText(replacement: string, start?: void, end?: void, selectMode?: void): void; + /dom.js:3349:45 + 3349| setRangeText(replacement: string, start?: void, end?: void, selectMode?: void): void; ^^^^ [2] @@ -279,8 +308,8 @@ References: HTMLInputElement.js:10:38 10| el.setRangeText('foo', 123, 234, 'bogus'); // invalid value ^^^^^^^ [1] - /dom.js:3165:78 - 3165| setRangeText(replacement: string, start: number, end: number, selectMode?: SelectionMode): void; + /dom.js:3350:78 + 3350| setRangeText(replacement: string, start: number, end: number, selectMode?: SelectionMode): void; ^^^^^^^^^^^^^ [2] @@ -293,8 +322,8 @@ Cannot get `form.action` because property `action` is missing in null [1]. ^^^^^^^^^^^ References: - /dom.js:3225:27 - 3225| form: HTMLFormElement | null; + /dom.js:3410:27 + 3410| form: HTMLFormElement | null; ^^^^ [1] @@ -307,8 +336,8 @@ Cannot get `item.value` because property `value` is missing in null [1]. ^^^^^^^^^^ References: - /dom.js:3243:44 - 3243| item(index: number): HTMLOptionElement | null; + /dom.js:3428:44 + 3428| item(index: number): HTMLOptionElement | null; ^^^^ [1] @@ -321,8 +350,8 @@ Cannot get `item.value` because property `value` is missing in null [1]. ^^^^^^^^^^ References: - /dom.js:3244:48 - 3244| namedItem(name: string): HTMLOptionElement | null; + /dom.js:3429:48 + 3429| namedItem(name: string): HTMLOptionElement | null; ^^^^ [1] @@ -349,8 +378,8 @@ Cannot call `target.attachEvent` because undefined [1] is not a function. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:196:17 - 196| attachEvent?: (type: string, listener: EventListener) => void; + /dom.js:244:17 + 244| attachEvent?: (type: string, listener: EventListener) => void; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] @@ -363,25 +392,25 @@ Cannot call `target.detachEvent` because undefined [1] is not a function. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:208:17 - 208| detachEvent?: (type: string, listener: EventListener) => void; + /dom.js:261:17 + 261| detachEvent?: (type: string, listener: EventListener) => void; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] -Error ---------------------------------------------------------------------------------------------------- path2d.js:9:6 +Error --------------------------------------------------------------------------------------------------- path2d.js:16:6 Cannot call `path.arcTo` because string [1] is incompatible with number [2]. - path2d.js:9:6 - 9| (path.arcTo(0, 0, 0, 0, 10, '20', 5): void); // invalid + path2d.js:16:6 + 16| (path.arcTo(0, 0, 0, 0, 10, '20', 5): void); // invalid ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - path2d.js:9:33 - 9| (path.arcTo(0, 0, 0, 0, 10, '20', 5): void); // invalid + path2d.js:16:33 + 16| (path.arcTo(0, 0, 0, 0, 10, '20', 5): void); // invalid ^^^^ [1] - /dom.js:1761:83 - 1761| arcTo(x1: number, y1: number, x2: number, y2: number, radiusX: number, radiusY: number, rotation: number): void; + /dom.js:1904:83 + 1904| arcTo(x1: number, y1: number, x2: number, y2: number, radiusX: number, radiusY: number, rotation: number): void; ^^^^^^ [2] @@ -408,61 +437,61 @@ References: registerElement.js:52:19 52| oldVal: string, // Error: This might be null ^^^^^^ [1] - /dom.js:592:26 - 592| oldAttributeValue: null, - ^^^^ [2] + /dom.js:702:36 + 702| oldAttributeValue: null, + ^^^^ [2] registerElement.js:53:19 53| newVal: string, // Error: This might be null ^^^^^^ [3] - /dom.js:607:26 - 607| newAttributeValue: null, - ^^^^ [4] + /dom.js:717:36 + 717| newAttributeValue: null, + ^^^^ [4] Error ------------------------------------------------------------------------------------------------ traversal.js:29:5 Cannot call `document.createNodeIterator` because: - - Either object literal [1] is incompatible with `Attr` [2] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [3] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [4] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [5] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [6] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [7] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [8] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [9] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [10] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [11] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [12] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [13] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [14] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [15] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [16] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [17] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [18] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [19] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [20] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [21] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [22] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [23] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [24] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [25] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [26] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [27] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [28] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [29] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [30] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [31] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [32] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [33] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [34] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [35] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [36] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [37] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [38] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [39] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [40] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [41] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [42] in type argument `RootNodeT`. + - Either object literal [1] is incompatible with `Attr` [2]. + - Or object literal [1] is incompatible with `Document` [3]. + - Or object literal [1] is incompatible with `Document` [4]. + - Or object literal [1] is incompatible with `Document` [5]. + - Or object literal [1] is incompatible with `Document` [6]. + - Or object literal [1] is incompatible with `Document` [7]. + - Or object literal [1] is incompatible with `Document` [8]. + - Or object literal [1] is incompatible with `Document` [9]. + - Or object literal [1] is incompatible with `Document` [10]. + - Or object literal [1] is incompatible with `Document` [11]. + - Or object literal [1] is incompatible with `Document` [12]. + - Or object literal [1] is incompatible with `Document` [13]. + - Or object literal [1] is incompatible with `Document` [14]. + - Or object literal [1] is incompatible with `Document` [15]. + - Or object literal [1] is incompatible with `Document` [16]. + - Or object literal [1] is incompatible with `Document` [17]. + - Or object literal [1] is incompatible with `Document` [18]. + - Or object literal [1] is incompatible with `Document` [19]. + - Or object literal [1] is incompatible with `Document` [20]. + - Or object literal [1] is incompatible with `Document` [21]. + - Or object literal [1] is incompatible with `Document` [22]. + - Or object literal [1] is incompatible with `Document` [23]. + - Or object literal [1] is incompatible with `Document` [24]. + - Or object literal [1] is incompatible with `Document` [25]. + - Or object literal [1] is incompatible with `Document` [26]. + - Or object literal [1] is incompatible with `DocumentFragment` [27]. + - Or object literal [1] is incompatible with `DocumentFragment` [28]. + - Or object literal [1] is incompatible with `DocumentFragment` [29]. + - Or object literal [1] is incompatible with `DocumentFragment` [30]. + - Or object literal [1] is incompatible with `DocumentFragment` [31]. + - Or object literal [1] is incompatible with `DocumentFragment` [32]. + - Or object literal [1] is incompatible with `DocumentFragment` [33]. + - Or object literal [1] is incompatible with `DocumentFragment` [34]. + - Or object literal [1] is incompatible with `Node` [35]. + - Or object literal [1] is incompatible with `Node` [36]. + - Or object literal [1] is incompatible with `Node` [37]. + - Or object literal [1] is incompatible with `Node` [38]. + - Or object literal [1] is incompatible with `Node` [39]. + - Or object literal [1] is incompatible with `Node` [40]. + - Or object literal [1] is incompatible with `Node` [41]. + - Or object literal [1] is incompatible with `Node` [42]. traversal.js:29:5 29| document.createNodeIterator({}); // invalid @@ -472,175 +501,175 @@ References: traversal.js:29:33 29| document.createNodeIterator({}); // invalid ^^ [1] - /dom.js:1020:33 - 1020| createNodeIterator(root: RootNodeT, whatToShow: 2, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1143:33 + 1143| createNodeIterator(root: RootNodeT, whatToShow: 2, filter?: NodeFilterInterface): NodeIterator; ^^^^ [2] - /dom.js:1028:33 - 1028| createNodeIterator(root: RootNodeT, whatToShow: 256, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1151:33 + 1151| createNodeIterator(root: RootNodeT, whatToShow: 256, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [3] - /dom.js:1029:33 - 1029| createNodeIterator(root: RootNodeT, whatToShow: 257, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1152:33 + 1152| createNodeIterator(root: RootNodeT, whatToShow: 257, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [4] - /dom.js:1030:33 - 1030| createNodeIterator(root: RootNodeT, whatToShow: 260, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1153:33 + 1153| createNodeIterator(root: RootNodeT, whatToShow: 260, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [5] - /dom.js:1031:33 - 1031| createNodeIterator(root: RootNodeT, whatToShow: 261, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1154:33 + 1154| createNodeIterator(root: RootNodeT, whatToShow: 261, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [6] - /dom.js:1032:33 - 1032| createNodeIterator(root: RootNodeT, whatToShow: 384, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1155:33 + 1155| createNodeIterator(root: RootNodeT, whatToShow: 384, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [7] - /dom.js:1033:33 - 1033| createNodeIterator(root: RootNodeT, whatToShow: 385, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1156:33 + 1156| createNodeIterator(root: RootNodeT, whatToShow: 385, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [8] - /dom.js:1034:33 - 1034| createNodeIterator(root: RootNodeT, whatToShow: 388, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1157:33 + 1157| createNodeIterator(root: RootNodeT, whatToShow: 388, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [9] - /dom.js:1035:33 - 1035| createNodeIterator(root: RootNodeT, whatToShow: 389, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1158:33 + 1158| createNodeIterator(root: RootNodeT, whatToShow: 389, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [10] - /dom.js:1036:33 - 1036| createNodeIterator(root: RootNodeT, whatToShow: 512, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1159:33 + 1159| createNodeIterator(root: RootNodeT, whatToShow: 512, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [11] - /dom.js:1037:33 - 1037| createNodeIterator(root: RootNodeT, whatToShow: 513, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1160:33 + 1160| createNodeIterator(root: RootNodeT, whatToShow: 513, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [12] - /dom.js:1038:33 - 1038| createNodeIterator(root: RootNodeT, whatToShow: 516, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1161:33 + 1161| createNodeIterator(root: RootNodeT, whatToShow: 516, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [13] - /dom.js:1039:33 - 1039| createNodeIterator(root: RootNodeT, whatToShow: 517, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1162:33 + 1162| createNodeIterator(root: RootNodeT, whatToShow: 517, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [14] - /dom.js:1040:33 - 1040| createNodeIterator(root: RootNodeT, whatToShow: 640, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1163:33 + 1163| createNodeIterator(root: RootNodeT, whatToShow: 640, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [15] - /dom.js:1041:33 - 1041| createNodeIterator(root: RootNodeT, whatToShow: 641, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1164:33 + 1164| createNodeIterator(root: RootNodeT, whatToShow: 641, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [16] - /dom.js:1042:33 - 1042| createNodeIterator(root: RootNodeT, whatToShow: 644, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1165:33 + 1165| createNodeIterator(root: RootNodeT, whatToShow: 644, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [17] - /dom.js:1043:33 - 1043| createNodeIterator(root: RootNodeT, whatToShow: 645, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1166:33 + 1166| createNodeIterator(root: RootNodeT, whatToShow: 645, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [18] - /dom.js:1044:33 - 1044| createNodeIterator(root: RootNodeT, whatToShow: 768, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1167:33 + 1167| createNodeIterator(root: RootNodeT, whatToShow: 768, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [19] - /dom.js:1045:33 - 1045| createNodeIterator(root: RootNodeT, whatToShow: 769, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1168:33 + 1168| createNodeIterator(root: RootNodeT, whatToShow: 769, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [20] - /dom.js:1046:33 - 1046| createNodeIterator(root: RootNodeT, whatToShow: 772, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1169:33 + 1169| createNodeIterator(root: RootNodeT, whatToShow: 772, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [21] - /dom.js:1047:33 - 1047| createNodeIterator(root: RootNodeT, whatToShow: 773, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1170:33 + 1170| createNodeIterator(root: RootNodeT, whatToShow: 773, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [22] - /dom.js:1048:33 - 1048| createNodeIterator(root: RootNodeT, whatToShow: 896, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1171:33 + 1171| createNodeIterator(root: RootNodeT, whatToShow: 896, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [23] - /dom.js:1049:33 - 1049| createNodeIterator(root: RootNodeT, whatToShow: 897, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1172:33 + 1172| createNodeIterator(root: RootNodeT, whatToShow: 897, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [24] - /dom.js:1050:33 - 1050| createNodeIterator(root: RootNodeT, whatToShow: 900, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1173:33 + 1173| createNodeIterator(root: RootNodeT, whatToShow: 900, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [25] - /dom.js:1051:33 - 1051| createNodeIterator(root: RootNodeT, whatToShow: 901, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1174:33 + 1174| createNodeIterator(root: RootNodeT, whatToShow: 901, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^ [26] - /dom.js:1079:33 - 1079| createNodeIterator(root: RootNodeT, whatToShow: 1024, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1202:33 + 1202| createNodeIterator(root: RootNodeT, whatToShow: 1024, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [27] - /dom.js:1080:33 - 1080| createNodeIterator(root: RootNodeT, whatToShow: 1025, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1203:33 + 1203| createNodeIterator(root: RootNodeT, whatToShow: 1025, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [28] - /dom.js:1081:33 - 1081| createNodeIterator(root: RootNodeT, whatToShow: 1028, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1204:33 + 1204| createNodeIterator(root: RootNodeT, whatToShow: 1028, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [29] - /dom.js:1082:33 - 1082| createNodeIterator(root: RootNodeT, whatToShow: 1029, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1205:33 + 1205| createNodeIterator(root: RootNodeT, whatToShow: 1029, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [30] - /dom.js:1083:33 - 1083| createNodeIterator(root: RootNodeT, whatToShow: 1152, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1206:33 + 1206| createNodeIterator(root: RootNodeT, whatToShow: 1152, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [31] - /dom.js:1084:33 - 1084| createNodeIterator(root: RootNodeT, whatToShow: 1153, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1207:33 + 1207| createNodeIterator(root: RootNodeT, whatToShow: 1153, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [32] - /dom.js:1085:33 - 1085| createNodeIterator(root: RootNodeT, whatToShow: 1156, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1208:33 + 1208| createNodeIterator(root: RootNodeT, whatToShow: 1156, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [33] - /dom.js:1086:33 - 1086| createNodeIterator(root: RootNodeT, whatToShow: 1157, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1209:33 + 1209| createNodeIterator(root: RootNodeT, whatToShow: 1157, filter?: NodeFilterInterface): NodeIterator; ^^^^^^^^^^^^^^^^ [34] - /dom.js:1099:33 - 1099| createNodeIterator(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1222:33 + 1222| createNodeIterator(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface): NodeIterator; ^^^^ [35] - /dom.js:1100:33 - 1100| createNodeIterator(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1223:33 + 1223| createNodeIterator(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface): NodeIterator; ^^^^ [36] - /dom.js:1101:33 - 1101| createNodeIterator(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1224:33 + 1224| createNodeIterator(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface): NodeIterator; ^^^^ [37] - /dom.js:1102:33 - 1102| createNodeIterator(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1225:33 + 1225| createNodeIterator(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface): NodeIterator; ^^^^ [38] - /dom.js:1103:33 - 1103| createNodeIterator(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1226:33 + 1226| createNodeIterator(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface): NodeIterator; ^^^^ [39] - /dom.js:1104:33 - 1104| createNodeIterator(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1227:33 + 1227| createNodeIterator(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface): NodeIterator; ^^^^ [40] - /dom.js:1105:33 - 1105| createNodeIterator(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1228:33 + 1228| createNodeIterator(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface): NodeIterator; ^^^^ [41] - /dom.js:1116:33 - 1116| createNodeIterator(root: RootNodeT, whatToShow?: number, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1239:33 + 1239| createNodeIterator(root: RootNodeT, whatToShow?: number, filter?: NodeFilterInterface): NodeIterator; ^^^^ [42] Error ------------------------------------------------------------------------------------------------ traversal.js:33:5 Cannot call `document.createTreeWalker` because: - - Either object literal [1] is incompatible with `Attr` [2] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [3] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [4] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [5] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [6] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [7] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [8] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [9] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [10] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [11] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [12] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [13] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [14] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [15] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [16] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [17] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [18] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [19] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [20] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [21] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [22] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [23] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [24] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [25] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Document` [26] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [27] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [28] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [29] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [30] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [31] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [32] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [33] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `DocumentFragment` [34] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [35] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [36] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [37] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [38] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [39] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [40] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [41] in type argument `RootNodeT`. - - Or object literal [1] is incompatible with `Node` [42] in type argument `RootNodeT`. + - Either object literal [1] is incompatible with `Attr` [2]. + - Or object literal [1] is incompatible with `Document` [3]. + - Or object literal [1] is incompatible with `Document` [4]. + - Or object literal [1] is incompatible with `Document` [5]. + - Or object literal [1] is incompatible with `Document` [6]. + - Or object literal [1] is incompatible with `Document` [7]. + - Or object literal [1] is incompatible with `Document` [8]. + - Or object literal [1] is incompatible with `Document` [9]. + - Or object literal [1] is incompatible with `Document` [10]. + - Or object literal [1] is incompatible with `Document` [11]. + - Or object literal [1] is incompatible with `Document` [12]. + - Or object literal [1] is incompatible with `Document` [13]. + - Or object literal [1] is incompatible with `Document` [14]. + - Or object literal [1] is incompatible with `Document` [15]. + - Or object literal [1] is incompatible with `Document` [16]. + - Or object literal [1] is incompatible with `Document` [17]. + - Or object literal [1] is incompatible with `Document` [18]. + - Or object literal [1] is incompatible with `Document` [19]. + - Or object literal [1] is incompatible with `Document` [20]. + - Or object literal [1] is incompatible with `Document` [21]. + - Or object literal [1] is incompatible with `Document` [22]. + - Or object literal [1] is incompatible with `Document` [23]. + - Or object literal [1] is incompatible with `Document` [24]. + - Or object literal [1] is incompatible with `Document` [25]. + - Or object literal [1] is incompatible with `Document` [26]. + - Or object literal [1] is incompatible with `DocumentFragment` [27]. + - Or object literal [1] is incompatible with `DocumentFragment` [28]. + - Or object literal [1] is incompatible with `DocumentFragment` [29]. + - Or object literal [1] is incompatible with `DocumentFragment` [30]. + - Or object literal [1] is incompatible with `DocumentFragment` [31]. + - Or object literal [1] is incompatible with `DocumentFragment` [32]. + - Or object literal [1] is incompatible with `DocumentFragment` [33]. + - Or object literal [1] is incompatible with `DocumentFragment` [34]. + - Or object literal [1] is incompatible with `Node` [35]. + - Or object literal [1] is incompatible with `Node` [36]. + - Or object literal [1] is incompatible with `Node` [37]. + - Or object literal [1] is incompatible with `Node` [38]. + - Or object literal [1] is incompatible with `Node` [39]. + - Or object literal [1] is incompatible with `Node` [40]. + - Or object literal [1] is incompatible with `Node` [41]. + - Or object literal [1] is incompatible with `Node` [42]. traversal.js:33:5 33| document.createTreeWalker({}); // invalid @@ -650,128 +679,128 @@ References: traversal.js:33:31 33| document.createTreeWalker({}); // invalid ^^ [1] - /dom.js:1021:31 - 1021| createTreeWalker(root: RootNodeT, whatToShow: 2, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1144:31 + 1144| createTreeWalker(root: RootNodeT, whatToShow: 2, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [2] - /dom.js:1052:31 - 1052| createTreeWalker(root: RootNodeT, whatToShow: 256, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1175:31 + 1175| createTreeWalker(root: RootNodeT, whatToShow: 256, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [3] - /dom.js:1053:31 - 1053| createTreeWalker(root: RootNodeT, whatToShow: 257, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1176:31 + 1176| createTreeWalker(root: RootNodeT, whatToShow: 257, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [4] - /dom.js:1054:31 - 1054| createTreeWalker(root: RootNodeT, whatToShow: 260, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1177:31 + 1177| createTreeWalker(root: RootNodeT, whatToShow: 260, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [5] - /dom.js:1055:31 - 1055| createTreeWalker(root: RootNodeT, whatToShow: 261, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1178:31 + 1178| createTreeWalker(root: RootNodeT, whatToShow: 261, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [6] - /dom.js:1056:31 - 1056| createTreeWalker(root: RootNodeT, whatToShow: 384, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1179:31 + 1179| createTreeWalker(root: RootNodeT, whatToShow: 384, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [7] - /dom.js:1057:31 - 1057| createTreeWalker(root: RootNodeT, whatToShow: 385, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1180:31 + 1180| createTreeWalker(root: RootNodeT, whatToShow: 385, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [8] - /dom.js:1058:31 - 1058| createTreeWalker(root: RootNodeT, whatToShow: 388, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1181:31 + 1181| createTreeWalker(root: RootNodeT, whatToShow: 388, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [9] - /dom.js:1059:31 - 1059| createTreeWalker(root: RootNodeT, whatToShow: 389, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1182:31 + 1182| createTreeWalker(root: RootNodeT, whatToShow: 389, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [10] - /dom.js:1060:31 - 1060| createTreeWalker(root: RootNodeT, whatToShow: 512, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1183:31 + 1183| createTreeWalker(root: RootNodeT, whatToShow: 512, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [11] - /dom.js:1061:31 - 1061| createTreeWalker(root: RootNodeT, whatToShow: 513, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1184:31 + 1184| createTreeWalker(root: RootNodeT, whatToShow: 513, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [12] - /dom.js:1062:31 - 1062| createTreeWalker(root: RootNodeT, whatToShow: 516, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1185:31 + 1185| createTreeWalker(root: RootNodeT, whatToShow: 516, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [13] - /dom.js:1063:31 - 1063| createTreeWalker(root: RootNodeT, whatToShow: 517, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1186:31 + 1186| createTreeWalker(root: RootNodeT, whatToShow: 517, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [14] - /dom.js:1064:31 - 1064| createTreeWalker(root: RootNodeT, whatToShow: 640, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1187:31 + 1187| createTreeWalker(root: RootNodeT, whatToShow: 640, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [15] - /dom.js:1065:31 - 1065| createTreeWalker(root: RootNodeT, whatToShow: 641, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1188:31 + 1188| createTreeWalker(root: RootNodeT, whatToShow: 641, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [16] - /dom.js:1066:31 - 1066| createTreeWalker(root: RootNodeT, whatToShow: 644, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1189:31 + 1189| createTreeWalker(root: RootNodeT, whatToShow: 644, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [17] - /dom.js:1067:31 - 1067| createTreeWalker(root: RootNodeT, whatToShow: 645, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1190:31 + 1190| createTreeWalker(root: RootNodeT, whatToShow: 645, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [18] - /dom.js:1068:31 - 1068| createTreeWalker(root: RootNodeT, whatToShow: 768, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1191:31 + 1191| createTreeWalker(root: RootNodeT, whatToShow: 768, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [19] - /dom.js:1069:31 - 1069| createTreeWalker(root: RootNodeT, whatToShow: 769, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1192:31 + 1192| createTreeWalker(root: RootNodeT, whatToShow: 769, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [20] - /dom.js:1070:31 - 1070| createTreeWalker(root: RootNodeT, whatToShow: 772, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1193:31 + 1193| createTreeWalker(root: RootNodeT, whatToShow: 772, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [21] - /dom.js:1071:31 - 1071| createTreeWalker(root: RootNodeT, whatToShow: 773, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1194:31 + 1194| createTreeWalker(root: RootNodeT, whatToShow: 773, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [22] - /dom.js:1072:31 - 1072| createTreeWalker(root: RootNodeT, whatToShow: 896, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1195:31 + 1195| createTreeWalker(root: RootNodeT, whatToShow: 896, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [23] - /dom.js:1073:31 - 1073| createTreeWalker(root: RootNodeT, whatToShow: 897, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1196:31 + 1196| createTreeWalker(root: RootNodeT, whatToShow: 897, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [24] - /dom.js:1074:31 - 1074| createTreeWalker(root: RootNodeT, whatToShow: 900, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1197:31 + 1197| createTreeWalker(root: RootNodeT, whatToShow: 900, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [25] - /dom.js:1075:31 - 1075| createTreeWalker(root: RootNodeT, whatToShow: 901, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1198:31 + 1198| createTreeWalker(root: RootNodeT, whatToShow: 901, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^ [26] - /dom.js:1087:31 - 1087| createTreeWalker(root: RootNodeT, whatToShow: 1024, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1210:31 + 1210| createTreeWalker(root: RootNodeT, whatToShow: 1024, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [27] - /dom.js:1088:31 - 1088| createTreeWalker(root: RootNodeT, whatToShow: 1025, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1211:31 + 1211| createTreeWalker(root: RootNodeT, whatToShow: 1025, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [28] - /dom.js:1089:31 - 1089| createTreeWalker(root: RootNodeT, whatToShow: 1028, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1212:31 + 1212| createTreeWalker(root: RootNodeT, whatToShow: 1028, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [29] - /dom.js:1090:31 - 1090| createTreeWalker(root: RootNodeT, whatToShow: 1029, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1213:31 + 1213| createTreeWalker(root: RootNodeT, whatToShow: 1029, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [30] - /dom.js:1091:31 - 1091| createTreeWalker(root: RootNodeT, whatToShow: 1152, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1214:31 + 1214| createTreeWalker(root: RootNodeT, whatToShow: 1152, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [31] - /dom.js:1092:31 - 1092| createTreeWalker(root: RootNodeT, whatToShow: 1153, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1215:31 + 1215| createTreeWalker(root: RootNodeT, whatToShow: 1153, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [32] - /dom.js:1093:31 - 1093| createTreeWalker(root: RootNodeT, whatToShow: 1156, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1216:31 + 1216| createTreeWalker(root: RootNodeT, whatToShow: 1156, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [33] - /dom.js:1094:31 - 1094| createTreeWalker(root: RootNodeT, whatToShow: 1157, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1217:31 + 1217| createTreeWalker(root: RootNodeT, whatToShow: 1157, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^^^^^^^^^^^^^ [34] - /dom.js:1106:31 - 1106| createTreeWalker(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1229:31 + 1229| createTreeWalker(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [35] - /dom.js:1107:31 - 1107| createTreeWalker(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1230:31 + 1230| createTreeWalker(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [36] - /dom.js:1108:31 - 1108| createTreeWalker(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1231:31 + 1231| createTreeWalker(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [37] - /dom.js:1109:31 - 1109| createTreeWalker(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1232:31 + 1232| createTreeWalker(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [38] - /dom.js:1110:31 - 1110| createTreeWalker(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1233:31 + 1233| createTreeWalker(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [39] - /dom.js:1111:31 - 1111| createTreeWalker(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1234:31 + 1234| createTreeWalker(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [40] - /dom.js:1112:31 - 1112| createTreeWalker(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1235:31 + 1235| createTreeWalker(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [41] - /dom.js:1117:31 - 1117| createTreeWalker(root: RootNodeT, whatToShow?: number, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1240:31 + 1240| createTreeWalker(root: RootNodeT, whatToShow?: number, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^^ [42] @@ -788,11 +817,11 @@ References: traversal.js:186:60 186| document.createNodeIterator(document_body, -1, node => 'accept'); // invalid ^^^^^^^^ [1] - /dom.js:3707:1 + /dom.js:3892:1 v-------------------------------- - 3707| typeof NodeFilter.FILTER_ACCEPT | - 3708| typeof NodeFilter.FILTER_REJECT | - 3709| typeof NodeFilter.FILTER_SKIP; + 3892| typeof NodeFilter.FILTER_ACCEPT | + 3893| typeof NodeFilter.FILTER_REJECT | + 3894| typeof NodeFilter.FILTER_SKIP; ----------------------------^ [2] @@ -809,11 +838,11 @@ References: traversal.js:188:74 188| document.createNodeIterator(document_body, -1, { acceptNode: node => 'accept' }); // invalid ^^^^^^^^ [1] - /dom.js:3707:1 + /dom.js:3892:1 v-------------------------------- - 3707| typeof NodeFilter.FILTER_ACCEPT | - 3708| typeof NodeFilter.FILTER_REJECT | - 3709| typeof NodeFilter.FILTER_SKIP; + 3892| typeof NodeFilter.FILTER_ACCEPT | + 3893| typeof NodeFilter.FILTER_REJECT | + 3894| typeof NodeFilter.FILTER_SKIP; ----------------------------^ [2] @@ -836,26 +865,26 @@ References: traversal.js:189:48 189| document.createNodeIterator(document_body, -1, {}); // invalid ^^ [1] - /dom.js:1099:68 - 1099| createNodeIterator(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1222:68 + 1222| createNodeIterator(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface): NodeIterator; ^ [2] - /dom.js:1100:68 - 1100| createNodeIterator(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1223:68 + 1223| createNodeIterator(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface): NodeIterator; ^ [3] - /dom.js:1101:68 - 1101| createNodeIterator(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1224:68 + 1224| createNodeIterator(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface): NodeIterator; ^ [4] - /dom.js:1102:68 - 1102| createNodeIterator(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1225:68 + 1225| createNodeIterator(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface): NodeIterator; ^^^ [5] - /dom.js:1103:68 - 1103| createNodeIterator(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1226:68 + 1226| createNodeIterator(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface): NodeIterator; ^^^ [6] - /dom.js:1104:68 - 1104| createNodeIterator(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1227:68 + 1227| createNodeIterator(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface): NodeIterator; ^^^ [7] - /dom.js:1105:68 - 1105| createNodeIterator(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface): NodeIterator; + /dom.js:1228:68 + 1228| createNodeIterator(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface): NodeIterator; ^^^ [8] @@ -872,11 +901,11 @@ References: traversal.js:193:58 193| document.createTreeWalker(document_body, -1, node => 'accept'); // invalid ^^^^^^^^ [1] - /dom.js:3707:1 + /dom.js:3892:1 v-------------------------------- - 3707| typeof NodeFilter.FILTER_ACCEPT | - 3708| typeof NodeFilter.FILTER_REJECT | - 3709| typeof NodeFilter.FILTER_SKIP; + 3892| typeof NodeFilter.FILTER_ACCEPT | + 3893| typeof NodeFilter.FILTER_REJECT | + 3894| typeof NodeFilter.FILTER_SKIP; ----------------------------^ [2] @@ -893,11 +922,11 @@ References: traversal.js:195:72 195| document.createTreeWalker(document_body, -1, { acceptNode: node => 'accept' }); // invalid ^^^^^^^^ [1] - /dom.js:3707:1 + /dom.js:3892:1 v-------------------------------- - 3707| typeof NodeFilter.FILTER_ACCEPT | - 3708| typeof NodeFilter.FILTER_REJECT | - 3709| typeof NodeFilter.FILTER_SKIP; + 3892| typeof NodeFilter.FILTER_ACCEPT | + 3893| typeof NodeFilter.FILTER_REJECT | + 3894| typeof NodeFilter.FILTER_SKIP; ----------------------------^ [2] @@ -920,31 +949,31 @@ References: traversal.js:196:46 196| document.createTreeWalker(document_body, -1, {}); // invalid ^^ [1] - /dom.js:1106:66 - 1106| createTreeWalker(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1229:66 + 1229| createTreeWalker(root: RootNodeT, whatToShow: 1, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^ [2] - /dom.js:1107:66 - 1107| createTreeWalker(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1230:66 + 1230| createTreeWalker(root: RootNodeT, whatToShow: 4, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^ [3] - /dom.js:1108:66 - 1108| createTreeWalker(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1231:66 + 1231| createTreeWalker(root: RootNodeT, whatToShow: 5, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^ [4] - /dom.js:1109:66 - 1109| createTreeWalker(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1232:66 + 1232| createTreeWalker(root: RootNodeT, whatToShow: 128, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^ [5] - /dom.js:1110:66 - 1110| createTreeWalker(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1233:66 + 1233| createTreeWalker(root: RootNodeT, whatToShow: 129, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^ [6] - /dom.js:1111:66 - 1111| createTreeWalker(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1234:66 + 1234| createTreeWalker(root: RootNodeT, whatToShow: 132, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^ [7] - /dom.js:1112:66 - 1112| createTreeWalker(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; + /dom.js:1235:66 + 1235| createTreeWalker(root: RootNodeT, whatToShow: 133, filter?: NodeFilterInterface, entityReferenceExpansion?: boolean): TreeWalker; ^^^ [8] -Found 35 errors +Found 37 errors Only showing the most relevant union/intersection branches. To see all branches, re-run Flow with --show-all-branches diff --git a/tests/dom/path2d.js b/tests/dom/path2d.js index 5610e88c534..893b3c9f1cb 100644 --- a/tests/dom/path2d.js +++ b/tests/dom/path2d.js @@ -1,6 +1,13 @@ // @flow let tests = [ + // constructors + function() { + let path1 = new Path2D(); // valid + let path2 = new Path2D(path1); // valid + let path3 = new Path2D('M10 10 h 80 v 80 h -80 Z'); // valid + }, + // arcTo function() { let path = new Path2D(); diff --git a/tests/dump-types/dump-types.exp b/tests/dump-types/dump-types.exp index cbff9e29862..e75558738d9 100644 --- a/tests/dump-types/dump-types.exp +++ b/tests/dump-types/dump-types.exp @@ -14,6 +14,66 @@ "start":5, "end":7 }, + { + "type":"number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":2,"column":5,"offset":13}, + "end":{"line":2,"column":7,"offset":16} + }, + "path":"test.js", + "line":2, + "endline":2, + "start":5, + "end":7 + }, + { + "type":"number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":2,"column":8,"offset":16}, + "end":{"line":2,"column":7,"offset":16} + }, + "path":"test.js", + "line":2, + "endline":2, + "start":8, + "end":7 + }, + { + "type":"mixed", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":2,"column":11,"offset":19}, + "end":{"line":2,"column":17,"offset":26} + }, + "path":"test.js", + "line":2, + "endline":2, + "start":11, + "end":17 + }, + { + "type":"mixed", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":2,"column":11,"offset":19}, + "end":{"line":2,"column":17,"offset":26} + }, + "path":"test.js", + "line":2, + "endline":2, + "start":11, + "end":17 + }, { "type":"number", "reasons":[], @@ -74,6 +134,66 @@ "start":14, "end":14 }, + { + "type":"number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":3,"column":14,"offset":53}, + "end":{"line":3,"column":14,"offset":54} + }, + "path":"test.js", + "line":3, + "endline":3, + "start":14, + "end":14 + }, + { + "type":"number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":3,"column":15,"offset":54}, + "end":{"line":3,"column":14,"offset":54} + }, + "path":"test.js", + "line":3, + "endline":3, + "start":15, + "end":14 + }, + { + "type":"void", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":3,"column":16,"offset":55}, + "end":{"line":3,"column":15,"offset":55} + }, + "path":"test.js", + "line":3, + "endline":3, + "start":16, + "end":15 + }, + { + "type":"(x: number) => void", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":4,"column":1,"offset":60}, + "end":{"line":4,"column":3,"offset":63} + }, + "path":"test.js", + "line":4, + "endline":4, + "start":1, + "end":3 + }, { "type":"(x: number) => void", "reasons":[], @@ -119,6 +239,21 @@ "start":5, "end":5 }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":5,"column":5,"offset":72}, + "end":{"line":5,"column":5,"offset":73} + }, + "path":"test.js", + "line":5, + "endline":5, + "start":5, + "end":5 + }, { "type":"string", "reasons":[], @@ -134,6 +269,36 @@ "start":5, "end":12 }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":5,"column":7,"offset":74}, + "end":{"line":5,"column":12,"offset":80} + }, + "path":"test.js", + "line":5, + "endline":5, + "start":7, + "end":12 + }, + { + "type":"number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":5,"column":16,"offset":83}, + "end":{"line":5,"column":18,"offset":86} + }, + "path":"test.js", + "line":5, + "endline":5, + "start":16, + "end":18 + }, { "type":"number", "reasons":[], @@ -185,479 +350,1319 @@ "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":8,"column":10,"offset":124}, - "end":{"line":8,"column":10,"offset":125} + "start":{"line":7,"column":22,"offset":110}, + "end":{"line":7,"column":22,"offset":111} }, "path":"test.js", - "line":8, - "endline":8, - "start":10, - "end":10 + "line":7, + "endline":7, + "start":22, + "end":22 }, { - "type":"(val: ?(string | Object)) => (string | void | Object)", + "type":"empty", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":12,"column":7,"offset":171}, - "end":{"line":12,"column":21,"offset":186} + "start":{"line":7,"column":23,"offset":111}, + "end":{"line":7,"column":22,"offset":111} }, "path":"test.js", - "line":12, - "endline":12, - "start":7, - "end":21 + "line":7, + "endline":7, + "start":23, + "end":22 }, { - "type":"?string | Object", + "type":"empty", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":12,"column":25,"offset":189}, - "end":{"line":12,"column":27,"offset":192} + "start":{"line":7,"column":24,"offset":112}, + "end":{"line":7,"column":23,"offset":112} }, "path":"test.js", - "line":12, - "endline":12, - "start":25, - "end":27 + "line":7, + "endline":7, + "start":24, + "end":23 }, { - "type":"(val: ?(string | Object)) => (string | void | Object)", + "type":"empty", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":12,"column":25,"offset":189}, - "end":{"line":12,"column":61,"offset":226} + "start":{"line":8,"column":10,"offset":124}, + "end":{"line":8,"column":10,"offset":125} }, "path":"test.js", - "line":12, - "endline":12, - "start":25, - "end":61 + "line":8, + "endline":8, + "start":10, + "end":10 }, { - "type":"?string | Object", + "type":"empty", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":12,"column":32,"offset":196}, - "end":{"line":12,"column":34,"offset":199} + "start":{"line":8,"column":10,"offset":124}, + "end":{"line":8,"column":10,"offset":125} }, "path":"test.js", - "line":12, - "endline":12, - "start":32, - "end":34 + "line":8, + "endline":8, + "start":10, + "end":10 }, { - "type":"string | void | Object", + "type":"(val: ?(any | string)) => (any | void | string)", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":12,"column":32,"offset":196}, - "end":{"line":12,"column":61,"offset":226} + "start":{"line":12,"column":7,"offset":171}, + "end":{"line":12,"column":21,"offset":186} }, "path":"test.js", "line":12, "endline":12, - "start":32, - "end":61 + "start":7, + "end":21 }, { - "type":"null", + "type":"(val: ?(any | string)) => (any | void | string)", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":12,"column":40,"offset":204}, - "end":{"line":12,"column":43,"offset":208} + "start":{"line":12,"column":7,"offset":171}, + "end":{"line":12,"column":21,"offset":186} }, "path":"test.js", "line":12, "endline":12, - "start":40, - "end":43 + "start":7, + "end":21 }, { - "type":"void", + "type":"(val: ?(any | string)) => (any | void | string)", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":12,"column":47,"offset":211}, - "end":{"line":12,"column":55,"offset":220} + "start":{"line":12,"column":22,"offset":186}, + "end":{"line":12,"column":21,"offset":186} }, "path":"test.js", "line":12, "endline":12, - "start":47, - "end":55 + "start":22, + "end":21 }, { - "type":"string | void | Object", + "type":"?any | string", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":12,"column":59,"offset":223}, - "end":{"line":12,"column":61,"offset":226} + "start":{"line":12,"column":25,"offset":189}, + "end":{"line":12,"column":27,"offset":192} }, "path":"test.js", "line":12, "endline":12, - "start":59, - "end":61 + "start":25, + "end":27 }, { - "type":"(x: ?Object) => (string | void | Object)", + "type":"?any | string", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":14,"column":10,"offset":238}, - "end":{"line":14,"column":11,"offset":240} + "start":{"line":12,"column":25,"offset":189}, + "end":{"line":12,"column":27,"offset":192} }, "path":"test.js", - "line":14, - "endline":14, - "start":10, - "end":11 - }, + "line":12, + "endline":12, + "start":25, + "end":27 + }, + { + "type":"(val: ?(any | string)) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":25,"offset":189}, + "end":{"line":12,"column":61,"offset":226} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":25, + "end":61 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":28,"offset":192}, + "end":{"line":12,"column":27,"offset":192} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":28, + "end":27 + }, + { + "type":"?any | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":28,"offset":192}, + "end":{"line":12,"column":27,"offset":192} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":28, + "end":27 + }, + { + "type":"?any | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":32,"offset":196}, + "end":{"line":12,"column":34,"offset":199} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":32, + "end":34 + }, + { + "type":"?any | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":32,"offset":196}, + "end":{"line":12,"column":34,"offset":199} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":32, + "end":34 + }, + { + "type":"boolean", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":32,"offset":196}, + "end":{"line":12,"column":43,"offset":208} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":32, + "end":43 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":32,"offset":196}, + "end":{"line":12,"column":61,"offset":226} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":32, + "end":61 + }, + { + "type":"null", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":40,"offset":204}, + "end":{"line":12,"column":43,"offset":208} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":40, + "end":43 + }, + { + "type":"void", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":47,"offset":211}, + "end":{"line":12,"column":55,"offset":220} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":47, + "end":55 + }, + { + "type":"void", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":47,"offset":211}, + "end":{"line":12,"column":55,"offset":220} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":47, + "end":55 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":59,"offset":223}, + "end":{"line":12,"column":61,"offset":226} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":59, + "end":61 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":12,"column":59,"offset":223}, + "end":{"line":12,"column":61,"offset":226} + }, + "path":"test.js", + "line":12, + "endline":12, + "start":59, + "end":61 + }, + { + "type":"(x: ?any) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":10,"offset":238}, + "end":{"line":14,"column":11,"offset":240} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":10, + "end":11 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":13,"offset":241}, + "end":{"line":14,"column":13,"offset":242} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":13, + "end":13 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":13,"offset":241}, + "end":{"line":14,"column":22,"offset":251} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":13, + "end":22 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":16,"offset":244}, + "end":{"line":14,"column":22,"offset":251} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":16, + "end":22 + }, + { + "type":"any (explicit)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":17,"offset":245}, + "end":{"line":14,"column":22,"offset":251} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":17, + "end":22 + }, + { + "type":"any (explicit)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":17,"offset":245}, + "end":{"line":14,"column":22,"offset":251} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":17, + "end":22 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":24,"offset":252}, + "end":{"line":14,"column":23,"offset":252} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":24, + "end":23 + }, + { + "type":"(val: ?(any | string)) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":34,"offset":262}, + "end":{"line":14,"column":48,"offset":277} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":34, + "end":48 + }, + { + "type":"(val: ?(any | string)) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":34,"offset":262}, + "end":{"line":14,"column":48,"offset":277} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":34, + "end":48 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":34,"offset":262}, + "end":{"line":14,"column":51,"offset":280} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":34, + "end":51 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":50,"offset":278}, + "end":{"line":14,"column":50,"offset":279} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":50, + "end":50 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":14,"column":50,"offset":278}, + "end":{"line":14,"column":50,"offset":279} + }, + "path":"test.js", + "line":14, + "endline":14, + "start":50, + "end":50 + }, + { + "type":"(x: ?any) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":10,"offset":293}, + "end":{"line":15,"column":11,"offset":295} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":10, + "end":11 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":13,"offset":296}, + "end":{"line":15,"column":13,"offset":297} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":13, + "end":13 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":13,"offset":296}, + "end":{"line":15,"column":22,"offset":306} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":13, + "end":22 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":16,"offset":299}, + "end":{"line":15,"column":22,"offset":306} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":16, + "end":22 + }, + { + "type":"any (explicit)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":17,"offset":300}, + "end":{"line":15,"column":22,"offset":306} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":17, + "end":22 + }, + { + "type":"any (explicit)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":17,"offset":300}, + "end":{"line":15,"column":22,"offset":306} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":17, + "end":22 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":24,"offset":307}, + "end":{"line":15,"column":23,"offset":307} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":24, + "end":23 + }, + { + "type":"(val: ?(any | string)) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":34,"offset":317}, + "end":{"line":15,"column":48,"offset":332} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":34, + "end":48 + }, + { + "type":"(val: ?(any | string)) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":34,"offset":317}, + "end":{"line":15,"column":48,"offset":332} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":34, + "end":48 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":34,"offset":317}, + "end":{"line":15,"column":51,"offset":335} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":34, + "end":51 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":50,"offset":333}, + "end":{"line":15,"column":50,"offset":334} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":50, + "end":50 + }, + { + "type":"?any", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":15,"column":50,"offset":333}, + "end":{"line":15,"column":50,"offset":334} + }, + "path":"test.js", + "line":15, + "endline":15, + "start":50, + "end":50 + }, + { + "type":"(x: ?string) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":10,"offset":348}, + "end":{"line":16,"column":11,"offset":350} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":10, + "end":11 + }, + { + "type":"?string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":13,"offset":351}, + "end":{"line":16,"column":13,"offset":352} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":13, + "end":13 + }, + { + "type":"?string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":13,"offset":351}, + "end":{"line":16,"column":22,"offset":361} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":13, + "end":22 + }, + { + "type":"?string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":16,"offset":354}, + "end":{"line":16,"column":22,"offset":361} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":16, + "end":22 + }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":17,"offset":355}, + "end":{"line":16,"column":22,"offset":361} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":17, + "end":22 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":24,"offset":362}, + "end":{"line":16,"column":23,"offset":362} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":24, + "end":23 + }, + { + "type":"(val: ?(any | string)) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":34,"offset":372}, + "end":{"line":16,"column":48,"offset":387} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":34, + "end":48 + }, + { + "type":"(val: ?(any | string)) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":34,"offset":372}, + "end":{"line":16,"column":48,"offset":387} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":34, + "end":48 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":34,"offset":372}, + "end":{"line":16,"column":51,"offset":390} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":34, + "end":51 + }, + { + "type":"?string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":50,"offset":388}, + "end":{"line":16,"column":50,"offset":389} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":50, + "end":50 + }, + { + "type":"?string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":16,"column":50,"offset":388}, + "end":{"line":16,"column":50,"offset":389} + }, + "path":"test.js", + "line":16, + "endline":16, + "start":50, + "end":50 + }, + { + "type":"(x: ?string) => (any | void | string)", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":17,"column":10,"offset":403}, + "end":{"line":17,"column":11,"offset":405} + }, + "path":"test.js", + "line":17, + "endline":17, + "start":10, + "end":11 + }, + { + "type":"?string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":17,"column":13,"offset":406}, + "end":{"line":17,"column":13,"offset":407} + }, + "path":"test.js", + "line":17, + "endline":17, + "start":13, + "end":13 + }, + { + "type":"?string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":17,"column":13,"offset":406}, + "end":{"line":17,"column":22,"offset":416} + }, + "path":"test.js", + "line":17, + "endline":17, + "start":13, + "end":22 + }, + { + "type":"?string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":17,"column":16,"offset":409}, + "end":{"line":17,"column":22,"offset":416} + }, + "path":"test.js", + "line":17, + "endline":17, + "start":16, + "end":22 + }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":17,"column":17,"offset":410}, + "end":{"line":17,"column":22,"offset":416} + }, + "path":"test.js", + "line":17, + "endline":17, + "start":17, + "end":22 + }, { - "type":"?Object", + "type":"any | void | string", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":14,"column":13,"offset":241}, - "end":{"line":14,"column":22,"offset":251} + "start":{"line":17,"column":24,"offset":417}, + "end":{"line":17,"column":23,"offset":417} }, "path":"test.js", - "line":14, - "endline":14, - "start":13, - "end":22 + "line":17, + "endline":17, + "start":24, + "end":23 }, { - "type":"(val: ?(string | Object)) => (string | void | Object)", + "type":"(val: ?(any | string)) => (any | void | string)", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":14,"column":34,"offset":262}, - "end":{"line":14,"column":48,"offset":277} + "start":{"line":17,"column":34,"offset":427}, + "end":{"line":17,"column":48,"offset":442} }, "path":"test.js", - "line":14, - "endline":14, + "line":17, + "endline":17, "start":34, "end":48 }, { - "type":"string | void | Object", + "type":"(val: ?(any | string)) => (any | void | string)", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":14,"column":34,"offset":262}, - "end":{"line":14,"column":51,"offset":280} + "start":{"line":17,"column":34,"offset":427}, + "end":{"line":17,"column":48,"offset":442} }, "path":"test.js", - "line":14, - "endline":14, + "line":17, + "endline":17, + "start":34, + "end":48 + }, + { + "type":"any | void | string", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":17,"column":34,"offset":427}, + "end":{"line":17,"column":51,"offset":445} + }, + "path":"test.js", + "line":17, + "endline":17, "start":34, "end":51 }, { - "type":"?Object", + "type":"?string", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":14,"column":50,"offset":278}, - "end":{"line":14,"column":50,"offset":279} + "start":{"line":17,"column":50,"offset":443}, + "end":{"line":17,"column":50,"offset":444} }, "path":"test.js", - "line":14, - "endline":14, + "line":17, + "endline":17, "start":50, "end":50 }, { - "type":"(x: ?Object) => (string | void | Object)", + "type":"?string", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":15,"column":10,"offset":293}, - "end":{"line":15,"column":11,"offset":295} + "start":{"line":17,"column":50,"offset":443}, + "end":{"line":17,"column":50,"offset":444} }, "path":"test.js", - "line":15, - "endline":15, - "start":10, - "end":11 + "line":17, + "endline":17, + "start":50, + "end":50 }, { - "type":"?Object", + "type":"$Facebookism$Idx", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":15,"column":13,"offset":296}, - "end":{"line":15,"column":22,"offset":306} + "start":{"line":19,"column":13,"offset":462}, + "end":{"line":19,"column":15,"offset":465} }, "path":"test.js", - "line":15, - "endline":15, + "line":19, + "endline":19, "start":13, - "end":22 + "end":15 }, { - "type":"(val: ?(string | Object)) => (string | void | Object)", + "type":"$Facebookism$Idx", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":15,"column":34,"offset":317}, - "end":{"line":15,"column":48,"offset":332} + "start":{"line":19,"column":18,"offset":467}, + "end":{"line":19,"column":33,"offset":483} }, "path":"test.js", - "line":15, - "endline":15, - "start":34, - "end":48 + "line":19, + "endline":19, + "start":18, + "end":33 }, { - "type":"string | void | Object", + "type":"$Facebookism$Idx", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":15,"column":34,"offset":317}, - "end":{"line":15,"column":51,"offset":335} + "start":{"line":19,"column":18,"offset":467}, + "end":{"line":19,"column":33,"offset":483} }, "path":"test.js", - "line":15, - "endline":15, - "start":34, - "end":51 + "line":19, + "endline":19, + "start":18, + "end":33 }, { - "type":"?Object", + "type":"{a?: {b: ?{c: (null | {d: number})}}}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":15,"column":50,"offset":333}, - "end":{"line":15,"column":50,"offset":334} + "start":{"line":20,"column":13,"offset":497}, + "end":{"line":20,"column":15,"offset":500} }, "path":"test.js", - "line":15, - "endline":15, - "start":50, - "end":50 + "line":20, + "endline":20, + "start":13, + "end":15 }, { - "type":"(x: ?string) => (string | void | Object)", + "type":"{a?: {b: ?{c: (null | {d: number})}}}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":16,"column":10,"offset":348}, - "end":{"line":16,"column":11,"offset":350} + "start":{"line":20,"column":18,"offset":502}, + "end":{"line":20,"column":52,"offset":537} }, "path":"test.js", - "line":16, - "endline":16, - "start":10, - "end":11 + "line":20, + "endline":20, + "start":18, + "end":52 }, { - "type":"?string", + "type":"void | {b: ?{c: (null | {d: number})}}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":16,"column":13,"offset":351}, - "end":{"line":16,"column":22,"offset":361} + "start":{"line":20,"column":19,"offset":503}, + "end":{"line":20,"column":19,"offset":504} }, "path":"test.js", - "line":16, - "endline":16, - "start":13, - "end":22 + "line":20, + "endline":20, + "start":19, + "end":19 }, { - "type":"(val: ?(string | Object)) => (string | void | Object)", + "type":"{b: ?{c: (null | {d: number})}}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":16,"column":34,"offset":372}, - "end":{"line":16,"column":48,"offset":387} + "start":{"line":20,"column":23,"offset":507}, + "end":{"line":20,"column":51,"offset":536} }, "path":"test.js", - "line":16, - "endline":16, - "start":34, - "end":48 + "line":20, + "endline":20, + "start":23, + "end":51 }, { - "type":"string | void | Object", + "type":"?{c: (null | {d: number})}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":16,"column":34,"offset":372}, - "end":{"line":16,"column":51,"offset":390} + "start":{"line":20,"column":24,"offset":508}, + "end":{"line":20,"column":24,"offset":509} }, "path":"test.js", - "line":16, - "endline":16, - "start":34, - "end":51 + "line":20, + "endline":20, + "start":24, + "end":24 }, { - "type":"?string", + "type":"?{c: (null | {d: number})}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":16,"column":50,"offset":388}, - "end":{"line":16,"column":50,"offset":389} + "start":{"line":20,"column":27,"offset":511}, + "end":{"line":20,"column":50,"offset":535} }, "path":"test.js", - "line":16, - "endline":16, - "start":50, + "line":20, + "endline":20, + "start":27, "end":50 }, { - "type":"(x: ?string) => (string | void | Object)", + "type":"{c: (null | {d: number})}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":17,"column":10,"offset":403}, - "end":{"line":17,"column":11,"offset":405} + "start":{"line":20,"column":28,"offset":512}, + "end":{"line":20,"column":50,"offset":535} }, "path":"test.js", - "line":17, - "endline":17, - "start":10, - "end":11 + "line":20, + "endline":20, + "start":28, + "end":50 }, { - "type":"?string", + "type":"null | {d: number}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":17,"column":13,"offset":406}, - "end":{"line":17,"column":22,"offset":416} + "start":{"line":20,"column":29,"offset":513}, + "end":{"line":20,"column":29,"offset":514} }, "path":"test.js", - "line":17, - "endline":17, - "start":13, - "end":22 + "line":20, + "endline":20, + "start":29, + "end":29 }, { - "type":"(val: ?(string | Object)) => (string | void | Object)", + "type":"null", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":17,"column":34,"offset":427}, - "end":{"line":17,"column":48,"offset":442} + "start":{"line":20,"column":32,"offset":516}, + "end":{"line":20,"column":35,"offset":520} }, "path":"test.js", - "line":17, - "endline":17, - "start":34, - "end":48 + "line":20, + "endline":20, + "start":32, + "end":35 }, { - "type":"string | void | Object", + "type":"null | {d: number}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":17,"column":34,"offset":427}, - "end":{"line":17,"column":51,"offset":445} + "start":{"line":20,"column":32,"offset":516}, + "end":{"line":20,"column":49,"offset":534} }, "path":"test.js", - "line":17, - "endline":17, - "start":34, - "end":51 + "line":20, + "endline":20, + "start":32, + "end":49 }, { - "type":"?string", + "type":"{d: number}", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":17,"column":50,"offset":443}, - "end":{"line":17,"column":50,"offset":444} + "start":{"line":20,"column":39,"offset":523}, + "end":{"line":20,"column":49,"offset":534} }, "path":"test.js", - "line":17, - "endline":17, - "start":50, - "end":50 + "line":20, + "endline":20, + "start":39, + "end":49 }, { - "type":"$Facebookism$Idx", + "type":"number", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":19,"column":13,"offset":462}, - "end":{"line":19,"column":15,"offset":465} + "start":{"line":20,"column":40,"offset":524}, + "end":{"line":20,"column":40,"offset":525} }, "path":"test.js", - "line":19, - "endline":19, - "start":13, + "line":20, + "endline":20, + "start":40, + "end":40 + }, + { + "type":"number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":20,"column":43,"offset":527}, + "end":{"line":20,"column":48,"offset":533} + }, + "path":"test.js", + "line":20, + "endline":20, + "start":43, + "end":48 + }, + { + "type":"?number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":7,"offset":545}, + "end":{"line":21,"column":15,"offset":554} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":7, + "end":15 + }, + { + "type":"?number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":7,"offset":545}, + "end":{"line":21,"column":15,"offset":554} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":7, "end":15 }, { - "type":"{a?: {b: ?{c: (null | {d: number})}}}", + "type":"?number", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":20,"column":13,"offset":497}, - "end":{"line":20,"column":15,"offset":500} + "start":{"line":21,"column":16,"offset":554}, + "end":{"line":21,"column":15,"offset":554} }, "path":"test.js", - "line":20, - "endline":20, - "start":13, + "line":21, + "endline":21, + "start":16, "end":15 }, { - "type":"?number", + "type":"$Facebookism$Idx", "reasons":[], "loc":{ "source":"test.js", "type":"SourceFile", - "start":{"line":21,"column":7,"offset":545}, - "end":{"line":21,"column":15,"offset":554} + "start":{"line":21,"column":19,"offset":557}, + "end":{"line":21,"column":21,"offset":560} }, "path":"test.js", "line":21, "endline":21, - "start":7, - "end":15 + "start":19, + "end":21 }, { "type":"$Facebookism$Idx", @@ -704,6 +1709,36 @@ "start":23, "end":25 }, + { + "type":"{a?: {b: ?{c: (null | {d: number})}}}", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":23,"offset":561}, + "end":{"line":21,"column":25,"offset":564} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":23, + "end":25 + }, + { + "type":"{a?: {b: ?{c: (null | {d: number})}}}", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":28,"offset":566}, + "end":{"line":21,"column":30,"offset":569} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":28, + "end":30 + }, { "type":"{a?: {b: ?{c: (null | {d: number})}}}", "reasons":[], @@ -734,6 +1769,51 @@ "start":28, "end":45 }, + { + "type":"number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":31,"offset":569}, + "end":{"line":21,"column":30,"offset":569} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":31, + "end":30 + }, + { + "type":"{a?: {b: ?{c: (null | {d: number})}}}", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":31,"offset":569}, + "end":{"line":21,"column":30,"offset":569} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":31, + "end":30 + }, + { + "type":"{a?: {b: ?{c: (null | {d: number})}}}", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":35,"offset":573}, + "end":{"line":21,"column":37,"offset":576} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":35, + "end":37 + }, { "type":"{a?: {b: ?{c: (null | {d: number})}}}", "reasons":[], @@ -808,6 +1888,66 @@ "endline":21, "start":35, "end":45 + }, + { + "type":"void | {b: ?{c: (null | {d: number})}}", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":39,"offset":577}, + "end":{"line":21,"column":39,"offset":578} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":39, + "end":39 + }, + { + "type":"?{c: (null | {d: number})}", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":41,"offset":579}, + "end":{"line":21,"column":41,"offset":580} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":41, + "end":41 + }, + { + "type":"null | {d: number}", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":43,"offset":581}, + "end":{"line":21,"column":43,"offset":582} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":43, + "end":43 + }, + { + "type":"number", + "reasons":[], + "loc":{ + "source":"test.js", + "type":"SourceFile", + "start":{"line":21,"column":45,"offset":583}, + "end":{"line":21,"column":45,"offset":584} + }, + "path":"test.js", + "line":21, + "endline":21, + "start":45, + "end":45 } ] === predicates.js === @@ -827,6 +1967,36 @@ "start":5, "end":5 }, + { + "type":"{|FOO: string|}", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":3,"column":5,"offset":14}, + "end":{"line":3,"column":5,"offset":15} + }, + "path":"predicates.js", + "line":3, + "endline":3, + "start":5, + "end":5 + }, + { + "type":"{|FOO: string|}", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":3,"column":6,"offset":15}, + "end":{"line":3,"column":5,"offset":15} + }, + "path":"predicates.js", + "line":3, + "endline":3, + "start":6, + "end":5 + }, { "type":"{|FOO: string|}", "reasons":[], @@ -843,19 +2013,109 @@ "end":22 }, { - "type":"string", + "type":"string", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":3,"column":11,"offset":20}, + "end":{"line":3,"column":13,"offset":23} + }, + "path":"predicates.js", + "line":3, + "endline":3, + "start":11, + "end":13 + }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":3,"column":16,"offset":25}, + "end":{"line":3,"column":20,"offset":30} + }, + "path":"predicates.js", + "line":3, + "endline":3, + "start":16, + "end":20 + }, + { + "type":"{|FOO: string|}", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":4,"column":5,"offset":38}, + "end":{"line":4,"column":5,"offset":39} + }, + "path":"predicates.js", + "line":4, + "endline":4, + "start":5, + "end":5 + }, + { + "type":"{|FOO: string|}", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":4,"column":5,"offset":38}, + "end":{"line":4,"column":5,"offset":39} + }, + "path":"predicates.js", + "line":4, + "endline":4, + "start":5, + "end":5 + }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":4,"column":5,"offset":38}, + "end":{"line":4,"column":9,"offset":43} + }, + "path":"predicates.js", + "line":4, + "endline":4, + "start":5, + "end":9 + }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":4,"column":7,"offset":40}, + "end":{"line":4,"column":9,"offset":43} + }, + "path":"predicates.js", + "line":4, + "endline":4, + "start":7, + "end":9 + }, + { + "type":"{|FOO: string|}", "reasons":[], "loc":{ "source":"predicates.js", "type":"SourceFile", - "start":{"line":3,"column":16,"offset":25}, - "end":{"line":3,"column":20,"offset":30} + "start":{"line":5,"column":5,"offset":52}, + "end":{"line":5,"column":5,"offset":53} }, "path":"predicates.js", - "line":3, - "endline":3, - "start":16, - "end":20 + "line":5, + "endline":5, + "start":5, + "end":5 }, { "type":"{|FOO: string|}", @@ -863,12 +2123,12 @@ "loc":{ "source":"predicates.js", "type":"SourceFile", - "start":{"line":4,"column":5,"offset":38}, - "end":{"line":4,"column":5,"offset":39} + "start":{"line":5,"column":5,"offset":52}, + "end":{"line":5,"column":5,"offset":53} }, "path":"predicates.js", - "line":4, - "endline":4, + "line":5, + "endline":5, "start":5, "end":5 }, @@ -878,29 +2138,29 @@ "loc":{ "source":"predicates.js", "type":"SourceFile", - "start":{"line":4,"column":5,"offset":38}, - "end":{"line":4,"column":9,"offset":43} + "start":{"line":5,"column":5,"offset":52}, + "end":{"line":5,"column":9,"offset":57} }, "path":"predicates.js", - "line":4, - "endline":4, + "line":5, + "endline":5, "start":5, "end":9 }, { - "type":"{|FOO: string|}", + "type":"boolean", "reasons":[], "loc":{ "source":"predicates.js", "type":"SourceFile", "start":{"line":5,"column":5,"offset":52}, - "end":{"line":5,"column":5,"offset":53} + "end":{"line":5,"column":15,"offset":63} }, "path":"predicates.js", "line":5, "endline":5, "start":5, - "end":5 + "end":15 }, { "type":"string", @@ -908,13 +2168,13 @@ "loc":{ "source":"predicates.js", "type":"SourceFile", - "start":{"line":5,"column":5,"offset":52}, + "start":{"line":5,"column":7,"offset":54}, "end":{"line":5,"column":9,"offset":57} }, "path":"predicates.js", "line":5, "endline":5, - "start":5, + "start":7, "end":9 }, { @@ -947,6 +2207,21 @@ "start":5, "end":5 }, + { + "type":"{|FOO: string|}", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":6,"column":5,"offset":72}, + "end":{"line":6,"column":5,"offset":73} + }, + "path":"predicates.js", + "line":6, + "endline":6, + "start":5, + "end":5 + }, { "type":"string", "reasons":[], @@ -962,6 +2237,36 @@ "start":5, "end":9 }, + { + "type":"boolean", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":6,"column":5,"offset":72}, + "end":{"line":6,"column":16,"offset":84} + }, + "path":"predicates.js", + "line":6, + "endline":6, + "start":5, + "end":16 + }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":6,"column":7,"offset":74}, + "end":{"line":6,"column":9,"offset":77} + }, + "path":"predicates.js", + "line":6, + "endline":6, + "start":7, + "end":9 + }, { "type":"string", "reasons":[], @@ -992,6 +2297,21 @@ "start":5, "end":5 }, + { + "type":"{|FOO: string|}", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":7,"column":5,"offset":93}, + "end":{"line":7,"column":5,"offset":94} + }, + "path":"predicates.js", + "line":7, + "endline":7, + "start":5, + "end":5 + }, { "type":"string", "reasons":[], @@ -1007,6 +2327,36 @@ "start":5, "end":9 }, + { + "type":"boolean", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":7,"column":5,"offset":93}, + "end":{"line":7,"column":17,"offset":106} + }, + "path":"predicates.js", + "line":7, + "endline":7, + "start":5, + "end":17 + }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":7,"column":7,"offset":95}, + "end":{"line":7,"column":9,"offset":98} + }, + "path":"predicates.js", + "line":7, + "endline":7, + "start":7, + "end":9 + }, { "type":"null", "reasons":[], @@ -1037,6 +2387,21 @@ "start":5, "end":5 }, + { + "type":"{|FOO: string|}", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":8,"column":5,"offset":115}, + "end":{"line":8,"column":5,"offset":116} + }, + "path":"predicates.js", + "line":8, + "endline":8, + "start":5, + "end":5 + }, { "type":"string", "reasons":[], @@ -1052,6 +2417,51 @@ "start":5, "end":9 }, + { + "type":"boolean", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":8,"column":5,"offset":115}, + "end":{"line":8,"column":22,"offset":133} + }, + "path":"predicates.js", + "line":8, + "endline":8, + "start":5, + "end":22 + }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":8,"column":7,"offset":117}, + "end":{"line":8,"column":9,"offset":120} + }, + "path":"predicates.js", + "line":8, + "endline":8, + "start":7, + "end":9 + }, + { + "type":"void", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":8,"column":14,"offset":124}, + "end":{"line":8,"column":22,"offset":133} + }, + "path":"predicates.js", + "line":8, + "endline":8, + "start":14, + "end":22 + }, { "type":"void", "reasons":[], @@ -1083,7 +2493,52 @@ "end":9 }, { - "type":"(obj: any) => boolean", + "type":"class Array", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":9,"column":5,"offset":142}, + "end":{"line":9,"column":9,"offset":147} + }, + "path":"predicates.js", + "line":9, + "endline":9, + "start":5, + "end":9 + }, + { + "type":"(obj: mixed) => boolean", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":9,"column":5,"offset":142}, + "end":{"line":9,"column":17,"offset":155} + }, + "path":"predicates.js", + "line":9, + "endline":9, + "start":5, + "end":17 + }, + { + "type":"boolean", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":9,"column":5,"offset":142}, + "end":{"line":9,"column":24,"offset":162} + }, + "path":"predicates.js", + "line":9, + "endline":9, + "start":5, + "end":24 + }, + { + "type":"(obj: mixed) => boolean", "reasons":[], "loc":{ "source":"predicates.js", @@ -1112,6 +2567,21 @@ "start":19, "end":19 }, + { + "type":"{|FOO: string|}", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":9,"column":19,"offset":156}, + "end":{"line":9,"column":19,"offset":157} + }, + "path":"predicates.js", + "line":9, + "endline":9, + "start":19, + "end":19 + }, { "type":"string", "reasons":[], @@ -1126,5 +2596,20 @@ "endline":9, "start":19, "end":23 + }, + { + "type":"string", + "reasons":[], + "loc":{ + "source":"predicates.js", + "type":"SourceFile", + "start":{"line":9,"column":21,"offset":158}, + "end":{"line":9,"column":23,"offset":161} + }, + "path":"predicates.js", + "line":9, + "endline":9, + "start":21, + "end":23 } ] diff --git a/tests/duplicate_name/.flowconfig b/tests/duplicate_name/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/duplicate_name/duplicate_name.exp b/tests/duplicate_name/duplicate_name.exp new file mode 100644 index 00000000000..82c78f49386 --- /dev/null +++ b/tests/duplicate_name/duplicate_name.exp @@ -0,0 +1,58 @@ +Error --------------------------------------------------------------------------------------------- type_then_var.js:4:5 + +Cannot declare `A` [1] because the name is already bound. + + type_then_var.js:4:5 + 4| var A: string + ^ + +References: + type_then_var.js:2:11 + 2| interface A {} + ^ [1] + + +Error --------------------------------------------------------------------------------------------- type_then_var.js:8:5 + +Cannot declare `B` [1] because the name is already bound. + + type_then_var.js:8:5 + 8| var B : string; + ^ + +References: + type_then_var.js:6:6 + 6| type B = number; + ^ [1] + + +Error -------------------------------------------------------------------------------------------- var_then_type.js:4:11 + +Cannot declare `A` [1] because the name is already bound. + + var_then_type.js:4:11 + 4| interface A {} + ^ + +References: + var_then_type.js:2:5 + 2| var A: string + ^ [1] + + +Error --------------------------------------------------------------------------------------------- var_then_type.js:8:6 + +Cannot declare `B` [1] because the name is already bound. + + var_then_type.js:8:6 + 8| type B = number; + ^ + +References: + var_then_type.js:6:5 + 6| var B : string; + ^ [1] + + + +Found 4 errors diff --git a/tests/duplicate_name/type_then_var.js b/tests/duplicate_name/type_then_var.js new file mode 100644 index 00000000000..ca93af5a7c3 --- /dev/null +++ b/tests/duplicate_name/type_then_var.js @@ -0,0 +1,8 @@ +// @flow +interface A {} + +var A: string + +type B = number; + +var B : string; diff --git a/tests/duplicate_name/var_then_type.js b/tests/duplicate_name/var_then_type.js new file mode 100644 index 00000000000..42213baf55f --- /dev/null +++ b/tests/duplicate_name/var_then_type.js @@ -0,0 +1,8 @@ +// @flow +var A: string + +interface A {} + +var B : string; + +type B = number; diff --git a/tests/dynamic_export/.flowconfig b/tests/dynamic_export/.flowconfig new file mode 100644 index 00000000000..d45c5d1fc4a --- /dev/null +++ b/tests/dynamic_export/.flowconfig @@ -0,0 +1,13 @@ +[lints] +dynamic-export=error + +[libs] +lib/ + +[options] +no_flowlib=false +suppress_comment=.*\\$FlowFixMe +module.system=haste +module.system.haste.use_name_reducers=true +module.system.haste.name_reducers='^.*/\([a-zA-Z0-9$_.-]+\.js\(\.flow\)?\)$' -> '\1' +module.system.haste.name_reducers='^\(.*\)\.js\(\.flow\)?$' -> '\1' diff --git a/tests/dynamic_export/.testconfig b/tests/dynamic_export/.testconfig new file mode 100644 index 00000000000..d31f7922cbe --- /dev/null +++ b/tests/dynamic_export/.testconfig @@ -0,0 +1 @@ +all: false diff --git a/tests/dynamic_export/basic.js b/tests/dynamic_export/basic.js new file mode 100644 index 00000000000..8ba8f992154 --- /dev/null +++ b/tests/dynamic_export/basic.js @@ -0,0 +1,4 @@ +// @flow +let x : any = 3; // error +let y : Function = () => {}; // error +module.exports = {any: x, function: y}; diff --git a/tests/dynamic_export/crash.js b/tests/dynamic_export/crash.js new file mode 100644 index 00000000000..490d40ab009 --- /dev/null +++ b/tests/dynamic_export/crash.js @@ -0,0 +1,3 @@ +// @flow +// This previously caused a crash because of locationless any type +Object.assign(module.exports, require('other')); diff --git a/tests/dynamic_export/cycle1.js b/tests/dynamic_export/cycle1.js new file mode 100644 index 00000000000..0dfece25348 --- /dev/null +++ b/tests/dynamic_export/cycle1.js @@ -0,0 +1,5 @@ +//@flow + +let c = require('cycle2'); +module.exports.f = c.f; +module.exports.g = (3 : any) diff --git a/tests/dynamic_export/cycle2.js b/tests/dynamic_export/cycle2.js new file mode 100644 index 00000000000..e97a20e561e --- /dev/null +++ b/tests/dynamic_export/cycle2.js @@ -0,0 +1,5 @@ +// @flow + +let c = require('cycle1'); +module.exports.f = (3 : any); +module.exports.g = c.g; diff --git a/tests/dynamic_export/dynamic_export.exp b/tests/dynamic_export/dynamic_export.exp new file mode 100644 index 00000000000..ab6bf6a16a4 --- /dev/null +++ b/tests/dynamic_export/dynamic_export.exp @@ -0,0 +1,298 @@ +Error ---------------------------------------------------------------------------------------------------- basic.js:4:24 + +Dynamic explicit 'any' [1] unsafely appears in exported `x` [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + basic.js:4:24 + 4| module.exports = {any: x, function: y}; + ^ + +References: + basic.js:2:9 + 2| let x : any = 3; // error + ^^^ [1] + basic.js:2:5 + 2| let x : any = 3; // error + ^ [2] + + +Error ---------------------------------------------------------------------------------------------------- basic.js:4:37 + +Dynamic function type [1] unsafely appears in exported `y` [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + basic.js:4:37 + 4| module.exports = {any: x, function: y}; + ^ + +References: + basic.js:3:9 + 3| let y : Function = () => {}; // error + ^^^^^^^^ [1] + basic.js:3:5 + 3| let y : Function = () => {}; // error + ^ [2] + + +Error ---------------------------------------------------------------------------------------------------- cycle1.js:4:1 + +Dynamic explicit 'any' [1] unsafely appears in exported .f [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + cycle1.js:4:1 + 4| module.exports.f = c.f; + ^^^^^^^^^^^^^^^^ [2] + +References: + cycle2.js:4:25 + 4| module.exports.f = (3 : any); + ^^^ [1] + + +Error ---------------------------------------------------------------------------------------------------- cycle1.js:5:1 + +Dynamic explicit 'any' [1] unsafely appears in exported .g [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + cycle1.js:5:1 + 5| module.exports.g = (3 : any) + ^^^^^^^^^^^^^^^^ [2] + +References: + cycle1.js:5:25 + 5| module.exports.g = (3 : any) + ^^^ [1] + + +Error ---------------------------------------------------------------------------------------------------- cycle2.js:4:1 + +Dynamic explicit 'any' [1] unsafely appears in exported .f [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + cycle2.js:4:1 + 4| module.exports.f = (3 : any); + ^^^^^^^^^^^^^^^^ [2] + +References: + cycle2.js:4:25 + 4| module.exports.f = (3 : any); + ^^^ [1] + + +Error ---------------------------------------------------------------------------------------------------- cycle2.js:5:1 + +Dynamic explicit 'any' [1] unsafely appears in exported .g [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + cycle2.js:5:1 + 5| module.exports.g = c.g; + ^^^^^^^^^^^^^^^^ [2] + +References: + cycle1.js:5:25 + 5| module.exports.g = (3 : any) + ^^^ [1] + + +Error ---------------------------------------------------------------------------------------------------- error.js:3:21 + +Cannot resolve module `./does_not_exist.js`. + + 3| const err = require('./does_not_exist.js') + ^^^^^^^^^^^^^^^^^^^^^ + + +Error ---------------------------------------------------------------------------------------------------- error.js:5:18 + +Dynamic implicit 'any' [1] unsafely appears in exported `err` [2]. This can cause importing modules to lose type +coverage! (`dynamic-export`) + + error.js:5:18 + 5| module.exports = err; + ^^^ [2] + +References: + error.js:3:13 + 3| const err = require('./does_not_exist.js') + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------- esmodule.js:3:17 + +Dynamic explicit 'any' [1] unsafely appears in exported function [2]. This can cause importing modules to lose type +coverage! (`dynamic-export`) + + esmodule.js:3:17 + 3| export function foo (x : any) {} + ^^^ + +References: + esmodule.js:3:26 + 3| export function foo (x : any) {} + ^^^ [1] + esmodule.js:3:8 + 3| export function foo (x : any) {} + ^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------------- esmodule.js:5:9 + +Dynamic explicit 'any' [1] unsafely appears in exported `x` [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + esmodule.js:5:9 + 5| export {x as field} + ^^^^^^^^^^ + +References: + esmodule.js:4:9 + 4| let x : any = 3; + ^^^ [1] + esmodule.js:4:5 + 4| let x : any = 3; + ^ [2] + + +Error ----------------------------------------------------------------------------------------------- not_object.js:3:18 + +Dynamic explicit 'any' [1] unsafely appears in exported `x` [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + not_object.js:3:18 + 3| module.exports = x; + ^ + +References: + not_object.js:2:9 + 2| let x : any = 3; // error + ^^^ [1] + not_object.js:2:5 + 2| let x : any = 3; // error + ^ [2] + + +Error ------------------------------------------------------------------------------------------------ transitive.js:9:1 + +Dynamic explicit 'any' [1] unsafely appears in exported .explicit_any [2]. This can cause importing modules to lose type +coverage! (`dynamic-export`) + + transitive.js:9:1 + 9| module.exports.explicit_any = e.any; // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + +References: + basic.js:2:9 + 2| let x : any = 3; // error + ^^^ [1] + + +Error ----------------------------------------------------------------------------------------------- transitive.js:10:1 + +Dynamic function type [1] unsafely appears in exported .explicit_function [2]. This can cause importing modules to lose +type coverage! (`dynamic-export`) + + transitive.js:10:1 + 10| module.exports.explicit_function = e.function; // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + +References: + basic.js:3:9 + 3| let y : Function = () => {}; // error + ^^^^^^^^ [1] + + +Error ----------------------------------------------------------------------------------------------- transitive.js:12:1 + +Dynamic implicit 'any' [1] unsafely appears in exported .untyped [2]. This can cause importing modules to lose type +coverage! (`dynamic-export`) + + transitive.js:12:1 + 12| module.exports.untyped = u; // error + ^^^^^^^^^^^^^^^^^^^^^^ [2] + +References: + transitive.js:4:11 + 4| const u = require('untyped'); + ^^^^^^^^^^^^^^^^^^ [1] + + +Error ----------------------------------------------------------------------------------------------- transitive.js:13:1 + +Dynamic explicit 'any' [1] unsafely appears in exported .lib [2]. This can cause importing modules to lose type +coverage! (`dynamic-export`) + + transitive.js:13:1 + 13| module.exports.lib = L.v; // error + ^^^^^^^^^^^^^^^^^^ [2] + +References: + lib/libdef.js:3:19 + 3| declare var v : any; + ^^^ [1] + + +Error ----------------------------------------------------------------------------------------------- transitive.js:14:1 + +Dynamic `Type` [1] unsafely appears in exported .field [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + transitive.js:14:1 + 14| module.exports.field = f; // error + ^^^^^^^^^^^^^^^^^^^^ [2] + +References: + type.js.flow:4:30 + 4| declare module.exports: {f : Type}; + ^^^^ [1] + + +Error ----------------------------------------------------------------------------------------------- transitive.js:16:1 + +Dynamic `Type` [1] unsafely appears in exported .x [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + transitive.js:16:1 + 16| module.exports.x = x; + ^^^^^^^^^^^^^^^^ [2] + +References: + type.js.flow:2:20 + 2| export type Type = any; + ^^^ [1] + + +Error ------------------------------------------------------------------------------------------------- type.js.flow:2:1 + +Dynamic `Type` [1] unsafely appears in exported `Type` [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + type.js.flow:2:1 + 2| export type Type = any; + ^^^^^^^^^^^^^^^^^^^^^^^ + +References: + type.js.flow:2:20 + 2| export type Type = any; + ^^^ [1] + type.js.flow:2:13 + 2| export type Type = any; + ^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------ type.js.flow:4:30 + +Dynamic `Type` [1] unsafely appears in exported `Type` [2]. This can cause importing modules to lose type coverage! +(`dynamic-export`) + + type.js.flow:4:30 + 4| declare module.exports: {f : Type}; + ^^^^ [2] + +References: + type.js.flow:2:20 + 2| export type Type = any; + ^^^ [1] + + + +Found 19 errors diff --git a/tests/dynamic_export/error.js b/tests/dynamic_export/error.js new file mode 100644 index 00000000000..6c3cfefe0d8 --- /dev/null +++ b/tests/dynamic_export/error.js @@ -0,0 +1,5 @@ +//@flow + +const err = require('./does_not_exist.js') + +module.exports = err; diff --git a/tests/dynamic_export/esmodule.js b/tests/dynamic_export/esmodule.js new file mode 100644 index 00000000000..e82442906aa --- /dev/null +++ b/tests/dynamic_export/esmodule.js @@ -0,0 +1,8 @@ +//@flow + +export function foo (x : any) {} +let x : any = 3; +export {x as field} + +export function a () {} +export class A {} diff --git a/tests/dynamic_export/implicit_ignored.js b/tests/dynamic_export/implicit_ignored.js new file mode 100644 index 00000000000..c473068a93a --- /dev/null +++ b/tests/dynamic_export/implicit_ignored.js @@ -0,0 +1,2 @@ +// @flow +module.exports = (() => {}).constructor; diff --git a/tests/dynamic_export/lib/libdef.js b/tests/dynamic_export/lib/libdef.js new file mode 100644 index 00000000000..28989dc12c2 --- /dev/null +++ b/tests/dynamic_export/lib/libdef.js @@ -0,0 +1,4 @@ +// @flow +declare module libdef { + declare var v : any; +} diff --git a/tests/dynamic_export/not_object.js b/tests/dynamic_export/not_object.js new file mode 100644 index 00000000000..b6caabede2a --- /dev/null +++ b/tests/dynamic_export/not_object.js @@ -0,0 +1,3 @@ +// @flow +let x : any = 3; // error +module.exports = x; diff --git a/tests/dynamic_export/other.js b/tests/dynamic_export/other.js new file mode 100644 index 00000000000..19c7f716266 --- /dev/null +++ b/tests/dynamic_export/other.js @@ -0,0 +1,3 @@ +//@flow +//$FlowFixMe can't report this error because the message depends on the machine running the test +module.exports = (3 : any); diff --git a/tests/dynamic_export/transitive.js b/tests/dynamic_export/transitive.js new file mode 100644 index 00000000000..5ca38babcc6 --- /dev/null +++ b/tests/dynamic_export/transitive.js @@ -0,0 +1,16 @@ +// @flow +const e = require('basic'); +const i = require('implicit_ignored'); +const u = require('untyped'); +const L = require('libdef'); +import type { Type } from 'type'; +import { f } from 'type'; + +module.exports.explicit_any = e.any; // error +module.exports.explicit_function = e.function; // error +module.exports.implicit_ignore = i; +module.exports.untyped = u; // error +module.exports.lib = L.v; // error +module.exports.field = f; // error +let x : Type = 3; +module.exports.x = x; diff --git a/tests/dynamic_export/type.js.flow b/tests/dynamic_export/type.js.flow new file mode 100644 index 00000000000..a6cb867b424 --- /dev/null +++ b/tests/dynamic_export/type.js.flow @@ -0,0 +1,4 @@ +//@flow +export type Type = any; + +declare module.exports: {f : Type}; diff --git a/tests/dynamic_export/untyped.js b/tests/dynamic_export/untyped.js new file mode 100644 index 00000000000..690aad34a46 --- /dev/null +++ b/tests/dynamic_export/untyped.js @@ -0,0 +1 @@ +module.exports = 3; diff --git a/tests/ensure_parsed_no_saved_state/ensure_parsed_no_saved_state.exp b/tests/ensure_parsed_no_saved_state/ensure_parsed_no_saved_state.exp index 919122fe92d..33e5bc79f40 100644 --- a/tests/ensure_parsed_no_saved_state/ensure_parsed_no_saved_state.exp +++ b/tests/ensure_parsed_no_saved_state/ensure_parsed_no_saved_state.exp @@ -15,7 +15,7 @@ To learn more, visit flow.org/en/docs/lang/lazy-modes ==== Now we see the parse error ==== Error --------------------------------------------------------------------------------------------------------- B.js:4:1 -Unexpected end of input +Unexpected end of input, expected the token `(` 4| diff --git a/tests/ensure_parsed_saved_state/ensure_parsed_saved_state.exp b/tests/ensure_parsed_saved_state/ensure_parsed_saved_state.exp index 49090e849c9..99128de6d10 100644 --- a/tests/ensure_parsed_saved_state/ensure_parsed_saved_state.exp +++ b/tests/ensure_parsed_saved_state/ensure_parsed_saved_state.exp @@ -8,7 +8,7 @@ To learn more, visit flow.org/en/docs/lang/lazy-modes ==== ensure_parsed notices that B has changed ==== Error --------------------------------------------------------------------------------------------------------- B.js:4:1 -Unexpected end of input +Unexpected end of input, expected the token `(` 4| diff --git a/tests/enumerror/enum-crash.js b/tests/enumerror/enum-crash.js index 19868ba5acb..cbb17a22b76 100644 --- a/tests/enumerror/enum-crash.js +++ b/tests/enumerror/enum-crash.js @@ -1,14 +1,12 @@ /** - * Copyright 2004-present Facebook. All Rights Reserved. + * Copyright (c) Facebook, Inc. and its affiliates. * * @flow * @format */ import type {Enum} from 'somewhere'; -function bar( - e: Enum, -) { +function bar(e: Enum) { switch (e) { case Enum.FOO: return 0; diff --git a/tests/enumerror/enumerror.exp b/tests/enumerror/enumerror.exp index 3bffd6fda57..62a468cce6f 100644 --- a/tests/enumerror/enumerror.exp +++ b/tests/enumerror/enumerror.exp @@ -6,22 +6,22 @@ Cannot resolve module `somewhere`. ^^^^^^^^^^^ -Error ----------------------------------------------------------------------------------------------- enum-crash.js:13:5 +Error ----------------------------------------------------------------------------------------------- enum-crash.js:11:5 Cannot refine `Enum` as a value. v------------- - 13| case Enum.FOO: - 14| return 0; + 11| case Enum.FOO: + 12| return 0; --------^ -Error ---------------------------------------------------------------------------------------------- enum-crash.js:13:10 +Error ---------------------------------------------------------------------------------------------- enum-crash.js:11:10 Cannot reference type `Enum` [1] from a value position. - enum-crash.js:13:10 - 13| case Enum.FOO: + enum-crash.js:11:10 + 11| case Enum.FOO: ^^^^ References: diff --git a/tests/enums/.flowconfig b/tests/enums/.flowconfig new file mode 100644 index 00000000000..a424026f73e --- /dev/null +++ b/tests/enums/.flowconfig @@ -0,0 +1,8 @@ +[ignore] + +[include] + +[libs] + +[options] +experimental.enums=true diff --git a/tests/enums/enums.exp b/tests/enums/enums.exp new file mode 100644 index 00000000000..5131ad2fc3c --- /dev/null +++ b/tests/enums/enums.exp @@ -0,0 +1,16 @@ +Error -------------------------------------------------------------------------------------------- error-reassign.js:4:1 + +Cannot reassign enum `E` [1]. + + error-reassign.js:4:1 + 4| E = 0; + ^ + +References: + error-reassign.js:3:6 + 3| enum E {} + ^ [1] + + + +Found 1 error diff --git a/tests/enums/error-reassign.js b/tests/enums/error-reassign.js new file mode 100644 index 00000000000..cb24a097c98 --- /dev/null +++ b/tests/enums/error-reassign.js @@ -0,0 +1,4 @@ +// @flow + +enum E {} +E = 0; diff --git a/tests/enums/valid.js b/tests/enums/valid.js new file mode 100644 index 00000000000..a509d748c27 --- /dev/null +++ b/tests/enums/valid.js @@ -0,0 +1,12 @@ +// @flow + +enum E { + A, + B, +} + +const a: E = E.A; + +// Use an enum name as a type before its declaration +declare var x: E2; +enum E2 {} diff --git a/tests/equality/.flowconfig b/tests/equality/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/equality/equality.exp b/tests/equality/equality.exp new file mode 100644 index 00000000000..02b8ae492d8 --- /dev/null +++ b/tests/equality/equality.exp @@ -0,0 +1,155 @@ +Error ------------------------------------------------------------------------------------------------------ test.js:7:7 + +Cannot compare number literal `3` [1] to string literal `A` [2]. + + test.js:7:7 + 7| if (o1 == o2) { + ^^ + +References: + test.js:6:20 + 6| function foo (o1 : T, o2 : U) { + ^ [1] + test.js:6:28 + 6| function foo (o1 : T, o2 : U) { + ^ [2] + + +Error ------------------------------------------------------------------------------------------------------ test.js:7:7 + +Cannot compare number literal `3` [1] to string literal `B` [2]. + + test.js:7:7 + 7| if (o1 == o2) { + ^^ + +References: + test.js:6:20 + 6| function foo (o1 : T, o2 : U) { + ^ [1] + test.js:6:28 + 6| function foo (o1 : T, o2 : U) { + ^ [2] + + +Error ------------------------------------------------------------------------------------------------------ test.js:7:7 + +Cannot compare number literal `4` [1] to string literal `A` [2]. + + test.js:7:7 + 7| if (o1 == o2) { + ^^ + +References: + test.js:6:20 + 6| function foo (o1 : T, o2 : U) { + ^ [1] + test.js:6:28 + 6| function foo (o1 : T, o2 : U) { + ^ [2] + + +Error ------------------------------------------------------------------------------------------------------ test.js:7:7 + +Cannot compare number literal `4` [1] to string literal `B` [2]. + + test.js:7:7 + 7| if (o1 == o2) { + ^^ + +References: + test.js:6:20 + 6| function foo (o1 : T, o2 : U) { + ^ [1] + test.js:6:28 + 6| function foo (o1 : T, o2 : U) { + ^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test.js:16:7 + +Cannot compare number literal `1` [1] to string literal `B` [2]. + + test.js:16:7 + 16| if (o1 == o2) { + ^^ + +References: + test.js:15:20 + 15| function foo (o1 : A, o2 : B) { + ^ [1] + test.js:15:28 + 15| function foo (o1 : A, o2 : B) { + ^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test.js:16:7 + +Cannot compare number literal `1` [1] to string literal `C` [2]. + + test.js:16:7 + 16| if (o1 == o2) { + ^^ + +References: + test.js:15:20 + 15| function foo (o1 : A, o2 : B) { + ^ [1] + test.js:15:28 + 15| function foo (o1 : A, o2 : B) { + ^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test.js:16:7 + +Cannot compare number literal `2` [1] to string literal `B` [2]. + + test.js:16:7 + 16| if (o1 == o2) { + ^^ + +References: + test.js:15:20 + 15| function foo (o1 : A, o2 : B) { + ^ [1] + test.js:15:28 + 15| function foo (o1 : A, o2 : B) { + ^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test.js:16:7 + +Cannot compare number literal `2` [1] to string literal `C` [2]. + + test.js:16:7 + 16| if (o1 == o2) { + ^^ + +References: + test.js:15:20 + 15| function foo (o1 : A, o2 : B) { + ^ [1] + test.js:15:28 + 15| function foo (o1 : A, o2 : B) { + ^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test.js:16:7 + +Cannot compare string literal `A` [1] to number literal `3` [2]. + + test.js:16:7 + 16| if (o1 == o2) { + ^^ + +References: + test.js:15:20 + 15| function foo (o1 : A, o2 : B) { + ^ [1] + test.js:15:28 + 15| function foo (o1 : A, o2 : B) { + ^ [2] + + + +Found 9 errors diff --git a/tests/equality/test.js b/tests/equality/test.js new file mode 100644 index 00000000000..442f2ca00eb --- /dev/null +++ b/tests/equality/test.js @@ -0,0 +1,23 @@ +// @flow + +type T = 3 | 4; +type U = "A" | "B"; + +function foo (o1 : T, o2 : U) { + if (o1 == o2) { + + } +} + +type A = "A" | 1 | 2; +type B = "B" | 3 | "C"; + +function foo (o1 : A, o2 : B) { + if (o1 == o2) { + + } +} + +declare var cond : boolean; +const i = cond ? 1 : -1; +if (i >= 0) {} diff --git a/tests/error_messages/error_messages.exp b/tests/error_messages/error_messages.exp index 843fda4b8ca..5217bc4be5d 100644 --- a/tests/error_messages/error_messages.exp +++ b/tests/error_messages/error_messages.exp @@ -6,5 +6,21 @@ Cannot resolve name `define`. ^^^^^^ +Error ------------------------------------------------------------------------------------------- long_string_lit.js:7:2 -Found 1 error +Cannot assign `'aaaaaaaaaa...'` to `long_lit` because string [1] is incompatible with string literal +`aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa` [2] because strings longer than `100` characters are not treated as literals. + + long_string_lit.js:7:2 + 7| 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + long_string_lit.js:5:2 + 5| 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + + +Found 2 errors diff --git a/tests/error_messages/long_string_lit.js b/tests/error_messages/long_string_lit.js new file mode 100644 index 00000000000..04f016fd3bf --- /dev/null +++ b/tests/error_messages/long_string_lit.js @@ -0,0 +1,7 @@ +//@flow + + +const long_lit: + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' += + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' diff --git a/tests/es_declare_module/es_declare_module.exp b/tests/es_declare_module/es_declare_module.exp index 30149ecbc30..ca82d100ec6 100644 --- a/tests/es_declare_module/es_declare_module.exp +++ b/tests/es_declare_module/es_declare_module.exp @@ -215,5 +215,13 @@ Cannot import `exports` because there is no `exports` export in `ES`. ^^^^^^^ +Error --------------------------------------------------------------------------------------- es_declare_module.js:43:14 -Found 16 errors +Cannot import `Foo` because there is no `Foo` export in `re-export`. + + 43| import type {Foo} from 're-export'; // Error: imports are not explicitly exported + ^^^ + + + +Found 17 errors diff --git a/tests/es_declare_module/es_declare_module.js b/tests/es_declare_module/es_declare_module.js index 6395bceca7d..22408e01edd 100644 --- a/tests/es_declare_module/es_declare_module.js +++ b/tests/es_declare_module/es_declare_module.js @@ -39,3 +39,5 @@ import type {T as T2} from "ES"; ('asdf': T2); // Error: string ~> number import {exports as nope} from "ES"; // Error: Not an export + +import type {Foo} from 're-export'; // Error: imports are not explicitly exported diff --git a/tests/es_declare_module/flow-typed/declares.js b/tests/es_declare_module/flow-typed/declares.js index ae3e2fba1c1..946f66b595d 100644 --- a/tests/es_declare_module/flow-typed/declares.js +++ b/tests/es_declare_module/flow-typed/declares.js @@ -19,3 +19,7 @@ declare module "ES" { declare export type T = number; declare var exports: number; } + +declare module "re-export" { + import type {Foo} from 'to-import'; +} diff --git a/tests/es_declare_module/flow-typed/imported_declares.js b/tests/es_declare_module/flow-typed/imported_declares.js new file mode 100644 index 00000000000..b2134dc3483 --- /dev/null +++ b/tests/es_declare_module/flow-typed/imported_declares.js @@ -0,0 +1,3 @@ +declare module 'to-import' { + declare type Foo = number; +} diff --git a/tests/esproposal_decorators.warn/esproposal_decorators.warn.exp b/tests/esproposal_decorators.warn/esproposal_decorators.warn.exp index fb1ea6754b3..2dbbbfe1e50 100644 --- a/tests/esproposal_decorators.warn/esproposal_decorators.warn.exp +++ b/tests/esproposal_decorators.warn/esproposal_decorators.warn.exp @@ -21,11 +21,18 @@ Error -------------------------------------------------------------------------- Experimental decorator usage. Decorators are an early stage proposal that may change. Additionally, Flow does not account for the type implications of decorators at this time. - v---------- 8| @decorator2 + ^^^^^^^^^^^ + + +Error ------------------------------------------------------------------------------------------------------ test.js:9:3 + +Experimental decorator usage. Decorators are an early stage proposal that may change. Additionally, Flow does not +account for the type implications of decorators at this time. + 9| @decorator3 - ----------^ + ^^^^^^^^^^^ -Found 3 errors +Found 4 errors diff --git a/tests/exact/exact.exp b/tests/exact/exact.exp index 99ca6e2b1e8..949615f1ce6 100644 --- a/tests/exact/exact.exp +++ b/tests/exact/exact.exp @@ -38,7 +38,8 @@ References: Error ------------------------------------------------------------------------------------------------ callable.js:17:26 -Cannot assign `f` to `g` because a callable signature is missing in object type [1] but exists in object type [2]. +Cannot assign `f` to `g` because a call signature declaring the expected parameter / return type is missing in object +type [1] but exists in object type [2]. callable.js:17:26 17| var g: {| x: string |} = f; // error: callable signature in f missing in g diff --git a/tests/exact_by_default/.flowconfig b/tests/exact_by_default/.flowconfig new file mode 100644 index 00000000000..ef82d27b939 --- /dev/null +++ b/tests/exact_by_default/.flowconfig @@ -0,0 +1,5 @@ +[options] +exact_by_default=true + +[lints] +implicit-inexact-object=error diff --git a/tests/exact_by_default/exact_by_default.exp b/tests/exact_by_default/exact_by_default.exp new file mode 100644 index 00000000000..7c50cdc70e0 --- /dev/null +++ b/tests/exact_by_default/exact_by_default.exp @@ -0,0 +1,35 @@ +Error ----------------------------------------------------------------------------------------------------- test.js:4:26 + +Cannot assign object literal to `x` because property `bar` is missing in object type [1] but exists in object +literal [2]. + + test.js:4:26 + 4| const x: {foo: number} = {foo: 3, bar: 3}; // Error, {foo: number} is exact so can't include bar + ^^^^^^^^^^^^^^^^ [2] + +References: + test.js:4:10 + 4| const x: {foo: number} = {foo: 3, bar: 3}; // Error, {foo: number} is exact so can't include bar + ^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------------ test.js:9:6 + +Cannot call `test` with `inexact` bound to `x` because inexact object type [1] is incompatible with exact object +type [2]. + + test.js:9:6 + 9| test(inexact); // Error inexact ~> exact + ^^^^^^^ + +References: + test.js:8:16 + 8| const inexact: {foo: number, ...} = {foo: 3, bar: 3}; + ^^^^^^^^^^^^^^^^^^ [1] + test.js:6:18 + 6| function test(x: {foo: number}) {} + ^^^^^^^^^^^^^ [2] + + + +Found 2 errors diff --git a/tests/exact_by_default/test.js b/tests/exact_by_default/test.js new file mode 100644 index 00000000000..8b045eb5ab7 --- /dev/null +++ b/tests/exact_by_default/test.js @@ -0,0 +1,15 @@ +//@flow + +// Note, no lint error because {foo: number} is now exact! +const x: {foo: number} = {foo: 3, bar: 3}; // Error, {foo: number} is exact so can't include bar + +function test(x: {foo: number}) {} + +const inexact: {foo: number, ...} = {foo: 3, bar: 3}; +test(inexact); // Error inexact ~> exact + +const exact: {foo: number} = {foo: 3}; + +const alsoExact: {| foo: number |} = exact; + +const inexact2: {foo: number, ...} = alsoExact; diff --git a/tests/explicit_inexact/.flowconfig b/tests/explicit_inexact/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/explicit_inexact/explicit_inexact.exp b/tests/explicit_inexact/explicit_inexact.exp new file mode 100644 index 00000000000..613376a8723 --- /dev/null +++ b/tests/explicit_inexact/explicit_inexact.exp @@ -0,0 +1,21 @@ +Error ------------------------------------------------------------------------------------------------------ test.js:8:2 + +Cannot cast `x` to object type because: + - inexact `U` [1] is incompatible with exact object type [2]. + - property `foo` is missing in object type [2] but exists in `U` [1]. + + test.js:8:2 + 8| (x: {||}); // Error, inexact vs. exact + ^ + +References: + test.js:5:16 + 5| declare var x: U; + ^ [1] + test.js:8:5 + 8| (x: {||}); // Error, inexact vs. exact + ^^^^ [2] + + + +Found 2 errors diff --git a/tests/explicit_inexact/test.js b/tests/explicit_inexact/test.js new file mode 100644 index 00000000000..ec7c8b273a1 --- /dev/null +++ b/tests/explicit_inexact/test.js @@ -0,0 +1,8 @@ +//@flow +type T = {...}; +type U = {foo: number, ...}; + +declare var x: U; +(x: T); // Ok, by width subtyping + +(x: {||}); // Error, inexact vs. exact diff --git a/tests/export_type/export_type.exp b/tests/export_type/export_type.exp index 2e7f413c171..a4f622bc191 100644 --- a/tests/export_type/export_type.exp +++ b/tests/export_type/export_type.exp @@ -102,6 +102,34 @@ References: ^^^^^^ [2] +Error ------------------------------------------------------------------------------------------------- importer.js:41:5 + +Cannot reference type `ClsType` [1] from a value position. + + importer.js:41:5 + 41| new ClsType(); + ^^^^^^^ + +References: + importer.js:40:21 + 40| import type {Cls as ClsType} from './values_as_type_exports'; + ^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------- importer.js:43:2 + +Cannot cast `5` to `ClsType` because number [1] is incompatible with `Cls` [2]. + + importer.js:43:2 + 43| (5: ClsType); + ^ [1] + +References: + importer.js:43:5 + 43| (5: ClsType); + ^^^^^^^ [2] + + Error ----------------------------------------------------------------------------------------------- types_only.js:5:23 Cannot assign `'asdf'` to `b` because string [1] is incompatible with number [2]. @@ -130,5 +158,21 @@ References: ^^^^^^^^^^^^^^^ [1] +Error ----------------------------------------------------------------------------------- values_as_type_exports.js:4:14 + +Cannot export the value `num` as a type. + + 4| export type {num}; + ^^^ + + +Error ----------------------------------------------------------------------------------- values_as_type_exports.js:7:14 + +Cannot export the value `fun` as a type. + + 7| export type {fun} + ^^^ + + -Found 9 errors +Found 13 errors diff --git a/tests/export_type/importer.js b/tests/export_type/importer.js index cb35f910304..8a65db7924e 100644 --- a/tests/export_type/importer.js +++ b/tests/export_type/importer.js @@ -30,3 +30,14 @@ var l: IFoo = {prop: 'asdf'}; // Error: {prop:string} ~> {prop:number} var m: IFoo2 = {prop: 'asdf'}; var n: IFoo2 = {prop: 42}; // Error: {prop:number} ~> {prop:string} + +import {clsInstance} from './values_as_type_exports'; + +// Should be an error, but currently isn't +import {Cls as ClsValue} from './values_as_type_exports'; +new ClsValue(); + +import type {Cls as ClsType} from './values_as_type_exports'; +new ClsType(); +(clsInstance: ClsType); +(5: ClsType); diff --git a/tests/export_type/values_as_type_exports.js b/tests/export_type/values_as_type_exports.js new file mode 100644 index 00000000000..b9338e074fd --- /dev/null +++ b/tests/export_type/values_as_type_exports.js @@ -0,0 +1,16 @@ +// @flow + +const num = 42; +export type {num}; + +function fun() {} +export type {fun} + +// This one is actually okay, since classes can be used as both values and +// types. However, we should make sure that importers only end up with the type. +class Cls {} +export type {Cls} + +// Exported for testing the imported type of Cls +const clsInstance = new Cls(); +export {clsInstance} diff --git a/tests/extends/.flowconfig b/tests/extends/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/extends/extends.exp b/tests/extends/extends.exp new file mode 100644 index 00000000000..fcb2f82f0d8 --- /dev/null +++ b/tests/extends/extends.exp @@ -0,0 +1,36 @@ +Error ------------------------------------------------------------------------------------------------------ test.js:8:1 + +Cannot extend `C` [1] with `B` because string [1] is not inheritable. + + test.js:8:1 + 8| class B extends C {} + ^^^^^^^^^^^^^^^^^^^^ + +References: + test.js:8:17 + 8| class B extends C {} + ^ [1] + + +Error ---------------------------------------------------------------------------------------------------- test.js:13:30 + +`B` [1] is incompatible with `A` [2]. + + test.js:13:30 + 13| invariant(value instanceof B); + ^ + +References: + test.js:8:7 + 8| class B extends C {} + ^ [1] + test.js:5:18 + 5| type AOrString = A | string; + ^ [2] + + + +Found 2 errors + +Only showing the most relevant union/intersection branches. +To see all branches, re-run Flow with --show-all-branches diff --git a/tests/extends/test.js b/tests/extends/test.js new file mode 100644 index 00000000000..18e007ddfd5 --- /dev/null +++ b/tests/extends/test.js @@ -0,0 +1,14 @@ +// @flow + +declare class A {} + +type AOrString = A | string; + +declare var C: Class; +class B extends C {} + +function invariant(x) {} + +function foo(value: AOrString) { + invariant(value instanceof B); +} diff --git a/tests/facebook_fbs_none/.flowconfig b/tests/facebook_fbs_none/.flowconfig new file mode 100644 index 00000000000..de38d19537d --- /dev/null +++ b/tests/facebook_fbs_none/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false diff --git a/tests/facebook_fbs_none/facebook_fbs_none.exp b/tests/facebook_fbs_none/facebook_fbs_none.exp new file mode 100644 index 00000000000..804158ea941 --- /dev/null +++ b/tests/facebook_fbs_none/facebook_fbs_none.exp @@ -0,0 +1,16 @@ +Error ------------------------------------------------------------------------------------------------------ main.js:4:2 + +Cannot cast `` to number because `React.Element` [1] is incompatible with number [2]. + + main.js:4:2 + 4| (: number); // Error: ReactElement ~> number + ^^^^^^^ [1] + +References: + main.js:4:11 + 4| (: number); // Error: ReactElement ~> number + ^^^^^^ [2] + + + +Found 1 error diff --git a/tests/facebook_fbs_none/main.js b/tests/facebook_fbs_none/main.js new file mode 100644 index 00000000000..97869939e96 --- /dev/null +++ b/tests/facebook_fbs_none/main.js @@ -0,0 +1,4 @@ +// @flow +var React = require('react'); +(: React$Element<*>); +(: number); // Error: ReactElement ~> number diff --git a/tests/facebook_fbs_some/.flowconfig b/tests/facebook_fbs_some/.flowconfig new file mode 100644 index 00000000000..2dae1ba07e5 --- /dev/null +++ b/tests/facebook_fbs_some/.flowconfig @@ -0,0 +1,2 @@ +[options] +facebook.fbs=Fbs diff --git a/tests/facebook_fbs_some/facebook_fbs_some.exp b/tests/facebook_fbs_some/facebook_fbs_some.exp new file mode 100644 index 00000000000..fd28163bc60 --- /dev/null +++ b/tests/facebook_fbs_some/facebook_fbs_some.exp @@ -0,0 +1,19 @@ +Error ------------------------------------------------------------------------------------------------------ main.js:4:2 + +Cannot cast `` to string because number [1] is incompatible with string [2]. + + main.js:4:2 + 4| (: string); // Error (the libdef in this test marks fbs as number) + ^^^^^^^ + +References: + flow-typed/fbs.js:1:12 + 1| type Fbs = number + ^^^^^^ [1] + main.js:4:11 + 4| (: string); // Error (the libdef in this test marks fbs as number) + ^^^^^^ [2] + + + +Found 1 error diff --git a/tests/facebook_fbs_some/flow-typed/fbs.js b/tests/facebook_fbs_some/flow-typed/fbs.js new file mode 100644 index 00000000000..a0c51559a5e --- /dev/null +++ b/tests/facebook_fbs_some/flow-typed/fbs.js @@ -0,0 +1 @@ +type Fbs = number diff --git a/tests/facebook_fbs_some/main.js b/tests/facebook_fbs_some/main.js new file mode 100644 index 00000000000..cc84229762d --- /dev/null +++ b/tests/facebook_fbs_some/main.js @@ -0,0 +1,4 @@ +// @flow + +(: number); +(: string); // Error (the libdef in this test marks fbs as number) diff --git a/tests/fetch/fetch.exp b/tests/fetch/fetch.exp index 7e4942c49f6..08596b15cfa 100644 --- a/tests/fetch/fetch.exp +++ b/tests/fetch/fetch.exp @@ -7,14 +7,14 @@ Cannot assign `fetch(...)` to `b` because `Response` [1] is incompatible with st ^^^^^^^^^^^^^^^^ References: - /bom.js:1032:76 - 1032| declare function fetch(input: RequestInfo, init?: RequestOptions): Promise; + /bom.js:1489:76 + 1489| declare function fetch(input: RequestInfo, init?: RequestOptions): Promise; ^^^^^^^^ [1] fetch.js:12:18 12| const b: Promise = fetch(myRequest); // incorrect ^^^^^^ [2] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [3] @@ -30,11 +30,11 @@ References: fetch.js:25:18 25| const d: Promise = fetch('image.png'); // incorrect ^^^^ [1] - /bom.js:1032:76 - 1032| declare function fetch(input: RequestInfo, init?: RequestOptions): Promise; + /bom.js:1489:76 + 1489| declare function fetch(input: RequestInfo, init?: RequestOptions): Promise; ^^^^^^^^ [2] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [3] @@ -45,16 +45,16 @@ Cannot call `Headers` with `''Content-T...'` bound to `init` because: - Or string [1] is incompatible with object type [3]. headers.js:3:23 - 3| const a = new Headers("'Content-Type': 'image/jpeg'"); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + 3| const a = new Headers("'Content-Type': 'image/jpeg'"); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] References: - /bom.js:909:20 - 909| type HeadersInit = Headers | {[key: string]: string}; - ^^^^^^^ [2] - /bom.js:909:30 - 909| type HeadersInit = Headers | {[key: string]: string}; - ^^^^^^^^^^^^^^^^^^^^^^^ [3] + /bom.js:1353:20 + 1353| type HeadersInit = Headers | { [key: string]: string, ... }; + ^^^^^^^ [2] + /bom.js:1353:30 + 1353| type HeadersInit = Headers | { [key: string]: string, ... }; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] Error -------------------------------------------------------------------------------------------------- headers.js:4:23 @@ -64,16 +64,16 @@ Cannot call `Headers` with array literal bound to `init` because: - Or array literal [1] is incompatible with object type [3]. headers.js:4:23 - 4| const b = new Headers(['Content-Type', 'image/jpeg']); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + 4| const b = new Headers(['Content-Type', 'image/jpeg']); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] References: - /bom.js:909:20 - 909| type HeadersInit = Headers | {[key: string]: string}; - ^^^^^^^ [2] - /bom.js:909:30 - 909| type HeadersInit = Headers | {[key: string]: string}; - ^^^^^^^^^^^^^^^^^^^^^^^ [3] + /bom.js:1353:20 + 1353| type HeadersInit = Headers | { [key: string]: string, ... }; + ^^^^^^^ [2] + /bom.js:1353:30 + 1353| type HeadersInit = Headers | { [key: string]: string, ... }; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] Error --------------------------------------------------------------------------------------------------- headers.js:9:1 @@ -81,13 +81,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.append` because function [1] requires another argument. headers.js:9:1 - 9| e.append('Content-Type'); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^ + 9| e.append('Content-Type'); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:917:5 - 917| append(name: string, value: string): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1361:5 + 1361| append(name: string, value: string): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error -------------------------------------------------------------------------------------------------- headers.js:10:1 @@ -95,13 +95,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.append` because function [1] requires another argument. headers.js:10:1 - 10| e.append({'Content-Type': 'image/jpeg'}); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 10| e.append({'Content-Type': 'image/jpeg'}); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:917:5 - 917| append(name: string, value: string): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1361:5 + 1361| append(name: string, value: string): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error ------------------------------------------------------------------------------------------------- headers.js:10:10 @@ -109,13 +109,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.append` with object literal bound to `name` because object literal [1] is incompatible with string [2]. headers.js:10:10 - 10| e.append({'Content-Type': 'image/jpeg'}); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + 10| e.append({'Content-Type': 'image/jpeg'}); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] References: - /bom.js:917:18 - 917| append(name: string, value: string): void; - ^^^^^^ [2] + /bom.js:1361:18 + 1361| append(name: string, value: string): void; + ^^^^^^ [2] Error -------------------------------------------------------------------------------------------------- headers.js:12:1 @@ -123,13 +123,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.set` because function [1] requires another argument. headers.js:12:1 - 12| e.set('Content-Type'); // not correct - ^^^^^^^^^^^^^^^^^^^^^ + 12| e.set('Content-Type'); // not correct + ^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:924:5 - 924| set(name: string, value: string): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1368:5 + 1368| set(name: string, value: string): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error -------------------------------------------------------------------------------------------------- headers.js:13:1 @@ -137,13 +137,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.set` because function [1] requires another argument. headers.js:13:1 - 13| e.set({'Content-Type': 'image/jpeg'}); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 13| e.set({'Content-Type': 'image/jpeg'}); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:924:5 - 924| set(name: string, value: string): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1368:5 + 1368| set(name: string, value: string): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error -------------------------------------------------------------------------------------------------- headers.js:13:7 @@ -151,13 +151,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.set` with object literal bound to `name` because object literal [1] is incompatible with string [2]. headers.js:13:7 - 13| e.set({'Content-Type': 'image/jpeg'}); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + 13| e.set({'Content-Type': 'image/jpeg'}); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] References: - /bom.js:924:15 - 924| set(name: string, value: string): void; - ^^^^^^ [2] + /bom.js:1368:15 + 1368| set(name: string, value: string): void; + ^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- headers.js:15:20 @@ -165,16 +165,16 @@ Error -------------------------------------------------------------------------- Cannot assign `e.append(...)` to `f` because undefined [1] is incompatible with `Headers` [2]. headers.js:15:20 - 15| const f: Headers = e.append('Content-Type', 'image/jpeg'); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 15| const f: Headers = e.append('Content-Type', 'image/jpeg'); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:917:42 - 917| append(name: string, value: string): void; - ^^^^ [1] + /bom.js:1361:42 + 1361| append(name: string, value: string): void; + ^^^^ [1] headers.js:15:10 - 15| const f: Headers = e.append('Content-Type', 'image/jpeg'); // not correct - ^^^^^^^ [2] + 15| const f: Headers = e.append('Content-Type', 'image/jpeg'); // not correct + ^^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- headers.js:17:19 @@ -182,16 +182,16 @@ Error -------------------------------------------------------------------------- Cannot assign `e.get(...)` to `g` because null [1] is incompatible with string [2]. headers.js:17:19 - 17| const g: string = e.get('Content-Type'); // correct - ^^^^^^^^^^^^^^^^^^^^^ + 17| const g: string = e.get('Content-Type'); // correct + ^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:921:24 - 921| get(name: string): null | string; - ^^^^ [1] + /bom.js:1365:24 + 1365| get(name: string): null | string; + ^^^^ [1] headers.js:17:10 - 17| const g: string = e.get('Content-Type'); // correct - ^^^^^^ [2] + 17| const g: string = e.get('Content-Type'); // correct + ^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- headers.js:18:19 @@ -201,19 +201,19 @@ Cannot assign `e.get(...)` to `h` because: - string [3] is incompatible with number [2]. headers.js:18:19 - 18| const h: number = e.get('Content-Type'); // not correct - ^^^^^^^^^^^^^^^^^^^^^ + 18| const h: number = e.get('Content-Type'); // not correct + ^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:921:24 - 921| get(name: string): null | string; - ^^^^ [1] + /bom.js:1365:24 + 1365| get(name: string): null | string; + ^^^^ [1] headers.js:18:10 - 18| const h: number = e.get('Content-Type'); // not correct - ^^^^^^ [2] - /bom.js:921:31 - 921| get(name: string): null | string; - ^^^^^^ [3] + 18| const h: number = e.get('Content-Type'); // not correct + ^^^^^^ [2] + /bom.js:1365:31 + 1365| get(name: string): null | string; + ^^^^^^ [3] Error -------------------------------------------------------------------------------------------------- headers.js:28:1 @@ -238,19 +238,19 @@ Cannot call `Request` because: - Or undefined [1] is incompatible with string [4]. request.js:2:20 - 2| const a: Request = new Request(); // incorrect - ^^^^^^^^^^^^^ [1] + 2| const a: Request = new Request(); // incorrect + ^^^^^^^^^^^^^ [1] References: - /bom.js:956:20 - 956| type RequestInfo = Request | URL | string; - ^^^^^^^ [2] - /bom.js:956:30 - 956| type RequestInfo = Request | URL | string; - ^^^ [3] - /bom.js:956:36 - 956| type RequestInfo = Request | URL | string; - ^^^^^^ [4] + /bom.js:1400:20 + 1400| type RequestInfo = Request | URL | string; + ^^^^^^^ [2] + /bom.js:1400:30 + 1400| type RequestInfo = Request | URL | string; + ^^^ [3] + /bom.js:1400:36 + 1400| type RequestInfo = Request | URL | string; + ^^^^^^ [4] Error -------------------------------------------------------------------------------------------------- request.js:6:35 @@ -272,60 +272,60 @@ Cannot call `Request` with `c` bound to `init` because: ^ References: - /bom.js:1012:12 - 1012| cache: CacheType; + /bom.js:1458:12 + 1458| cache: CacheType; ^^^^^^^^^ [1] - /bom.js:961:13 - 961| cache?: CacheType; - ^^^^^^^^^ [2] - /bom.js:1013:18 - 1013| credentials: CredentialsType; + /bom.js:1404:11 + 1404| cache?: CacheType, + ^^^^^^^^^ [2] + /bom.js:1459:18 + 1459| credentials: CredentialsType; ^^^^^^^^^^^^^^^ [3] - /bom.js:962:19 - 962| credentials?: CredentialsType; - ^^^^^^^^^^^^^^^ [4] - /bom.js:1014:14 - 1014| headers: Headers; + /bom.js:1405:17 + 1405| credentials?: CredentialsType, + ^^^^^^^^^^^^^^^ [4] + /bom.js:1460:14 + 1460| headers: Headers; ^^^^^^^ [5] - /bom.js:963:15 - 963| headers?: HeadersInit; - ^^^^^^^^^^^ [6] - /bom.js:1015:16 - 1015| integrity: string; + /bom.js:1406:13 + 1406| headers?: HeadersInit, + ^^^^^^^^^^^ [6] + /bom.js:1461:16 + 1461| integrity: string; ^^^^^^ [7] - /bom.js:964:17 - 964| integrity?: string; - ^^^^^^ [8] - /bom.js:1016:13 - 1016| method: string; + /bom.js:1407:15 + 1407| integrity?: string, + ^^^^^^ [8] + /bom.js:1462:13 + 1462| method: string; ^^^^^^ [9] - /bom.js:966:14 - 966| method?: string; - ^^^^^^ [10] - /bom.js:1017:11 - 1017| mode: ModeType; + /bom.js:1409:12 + 1409| method?: string, + ^^^^^^ [10] + /bom.js:1463:11 + 1463| mode: ModeType; ^^^^^^^^ [11] - /bom.js:967:12 - 967| mode?: ModeType; - ^^^^^^^^ [12] - /bom.js:1018:15 - 1018| redirect: RedirectType; + /bom.js:1410:10 + 1410| mode?: ModeType, + ^^^^^^^^ [12] + /bom.js:1464:15 + 1464| redirect: RedirectType; ^^^^^^^^^^^^ [13] - /bom.js:968:16 - 968| redirect?: RedirectType; - ^^^^^^^^^^^^ [14] - /bom.js:1019:15 - 1019| referrer: string; + /bom.js:1411:14 + 1411| redirect?: RedirectType, + ^^^^^^^^^^^^ [14] + /bom.js:1465:15 + 1465| referrer: string; ^^^^^^ [15] - /bom.js:969:16 - 969| referrer?: string; - ^^^^^^ [16] - /bom.js:1020:21 - 1020| referrerPolicy: ReferrerPolicyType; + /bom.js:1412:14 + 1412| referrer?: string, + ^^^^^^ [16] + /bom.js:1466:21 + 1466| referrerPolicy: ReferrerPolicyType; ^^^^^^^^^^^^^^^^^^ [17] - /bom.js:970:22 - 970| referrerPolicy?: ReferrerPolicyType; - ^^^^^^^^^^^^^^^^^^ [18] + /bom.js:1413:20 + 1413| referrerPolicy?: ReferrerPolicyType, + ^^^^^^^^^^^^^^^^^^ [18] Error -------------------------------------------------------------------------------------------------- request.js:8:32 @@ -335,16 +335,16 @@ Cannot call `Request` with object literal bound to `input` because: - Or object literal [1] is incompatible with `URL` [3]. request.js:8:32 - 8| const f: Request = new Request({}) // incorrect - ^^ [1] + 8| const f: Request = new Request({}) // incorrect + ^^ [1] References: - /bom.js:956:20 - 956| type RequestInfo = Request | URL | string; - ^^^^^^^ [2] - /bom.js:956:30 - 956| type RequestInfo = Request | URL | string; - ^^^ [3] + /bom.js:1400:20 + 1400| type RequestInfo = Request | URL | string; + ^^^^^^^ [2] + /bom.js:1400:30 + 1400| type RequestInfo = Request | URL | string; + ^^^ [3] Error -------------------------------------------------------------------------------------------------- request.js:24:1 @@ -359,8 +359,8 @@ References: request.js:24:19 24| h.text().then((t: Buffer) => t); // incorrect ^^^^^^ [1] - /bom.js:1029:21 - 1029| text(): Promise; + /bom.js:1475:21 + 1475| text(): Promise; ^^^^^^ [2] @@ -373,8 +373,8 @@ Cannot call `h.arrayBuffer().then` because `ArrayBuffer` [1] is incompatible wit ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:1025:28 - 1025| arrayBuffer(): Promise; + /bom.js:1471:28 + 1471| arrayBuffer(): Promise; ^^^^^^^^^^^ [1] request.js:26:27 26| h.arrayBuffer().then((ab: Buffer) => ab); // incorrect @@ -388,25 +388,25 @@ Cannot call `Request` with object literal bound to `init` because in property `h - Or string [1] is incompatible with object type [3]. request.js:54:54 - v - 54| const l: Request = new Request('http://example.org', { - 55| method: 'GET', - 56| headers: 'Content-Type: image/jpeg', - 57| mode: 'cors', - 58| cache: 'default' - 59| }) // incorrect - headers is string - ^ + v + 54| const l: Request = new Request('http://example.org', { + 55| method: 'GET', + 56| headers: 'Content-Type: image/jpeg', + 57| mode: 'cors', + 58| cache: 'default' + 59| }) // incorrect - headers is string + ^ References: request.js:56:12 - 56| headers: 'Content-Type: image/jpeg', - ^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /bom.js:909:20 - 909| type HeadersInit = Headers | {[key: string]: string}; - ^^^^^^^ [2] - /bom.js:909:30 - 909| type HeadersInit = Headers | {[key: string]: string}; - ^^^^^^^^^^^^^^^^^^^^^^^ [3] + 56| headers: 'Content-Type: image/jpeg', + ^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1353:20 + 1353| type HeadersInit = Headers | { [key: string]: string, ... }; + ^^^^^^^ [2] + /bom.js:1353:30 + 1353| type HeadersInit = Headers | { [key: string]: string, ... }; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] Error ------------------------------------------------------------------------------------------------- request.js:63:18 @@ -415,16 +415,16 @@ Cannot call `Request` with object literal bound to `init` because null [1] is in `method`. request.js:63:18 - 63| new Request('/', { method: null }); // incorrect - ^^^^^^^^^^^^^^^^ + 63| new Request('/', { method: null }); // incorrect + ^^^^^^^^^^^^^^^^ References: request.js:63:28 - 63| new Request('/', { method: null }); // incorrect - ^^^^ [1] - /bom.js:966:14 - 966| method?: string; - ^^^^^^ [2] + 63| new Request('/', { method: null }); // incorrect + ^^^^ [1] + /bom.js:1409:12 + 1409| method?: string, + ^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- response.js:8:18 @@ -433,16 +433,16 @@ Cannot call `Response` with object literal bound to `init` because string [1] is property `status`. response.js:8:18 - 8| new Response("", { status: "404" }); // incorrect - ^^^^^^^^^^^^^^^^^ + 8| new Response("", { status: "404" }); // incorrect + ^^^^^^^^^^^^^^^^^ References: response.js:8:28 - 8| new Response("", { status: "404" }); // incorrect - ^^^^^ [1] - /bom.js:975:14 - 975| status?: number; - ^^^^^^ [2] + 8| new Response("", { status: "404" }); // incorrect + ^^^^^ [1] + /bom.js:1420:12 + 1420| status?: number, + ^^^^^^ [2] Error ------------------------------------------------------------------------------------------------- response.js:9:18 @@ -451,16 +451,16 @@ Cannot call `Response` with object literal bound to `init` because null [1] is i `status`. response.js:9:18 - 9| new Response("", { status: null }); // incorrect - ^^^^^^^^^^^^^^^^ + 9| new Response("", { status: null }); // incorrect + ^^^^^^^^^^^^^^^^ References: response.js:9:28 - 9| new Response("", { status: null }); // incorrect - ^^^^ [1] - /bom.js:975:14 - 975| status?: number; - ^^^^^^ [2] + 9| new Response("", { status: null }); // incorrect + ^^^^ [1] + /bom.js:1420:12 + 1420| status?: number, + ^^^^^^ [2] Error ------------------------------------------------------------------------------------------------ response.js:11:50 @@ -470,23 +470,23 @@ Cannot call `Response` with object literal bound to `init` because in property ` - Or string [1] is incompatible with object type [3]. response.js:11:50 - v - 11| const f: Response = new Response("responsebody", { - 12| status: 404, - 13| headers: "'Content-Type': 'image/jpeg'" - 14| }); // incorrect - ^ + v + 11| const f: Response = new Response("responsebody", { + 12| status: 404, + 13| headers: "'Content-Type': 'image/jpeg'" + 14| }); // incorrect + ^ References: response.js:13:14 - 13| headers: "'Content-Type': 'image/jpeg'" - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /bom.js:909:20 - 909| type HeadersInit = Headers | {[key: string]: string}; - ^^^^^^^ [2] - /bom.js:909:30 - 909| type HeadersInit = Headers | {[key: string]: string}; - ^^^^^^^^^^^^^^^^^^^^^^^ [3] + 13| headers: "'Content-Type': 'image/jpeg'" + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1353:20 + 1353| type HeadersInit = Headers | { [key: string]: string, ... }; + ^^^^^^^ [2] + /bom.js:1353:30 + 1353| type HeadersInit = Headers | { [key: string]: string, ... }; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] Error ------------------------------------------------------------------------------------------------ response.js:30:34 @@ -501,37 +501,37 @@ Cannot call `Response` with object literal bound to `input` because: - Or object literal [1] is incompatible with `ReadableStream` [8]. response.js:30:34 - v - 30| const i: Response = new Response({ - 31| status: 404, - 32| headers: new Headers({ - 33| 'Content-Type': 'image/jpeg' - 34| }) - 35| }); // incorrect - ^ [1] - -References: - /bom.js:954:26 - 954| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; - ^^^^^^^^^^^^^^^ [2] - /bom.js:954:44 - 954| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; - ^^^^^^^^ [3] - /bom.js:954:55 - 954| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; - ^^^^ [4] - /bom.js:954:62 - 954| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; - ^^^^^^^^^^^ [5] - /core.js:627:25 - 627| type $ArrayBufferView = $TypedArray | DataView; - ^^^^^^^^^^^ [6] - /core.js:627:39 - 627| type $ArrayBufferView = $TypedArray | DataView; - ^^^^^^^^ [7] - /bom.js:954:95 - 954| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; - ^^^^^^^^^^^^^^ [8] + v + 30| const i: Response = new Response({ + 31| status: 404, + 32| headers: new Headers({ + 33| 'Content-Type': 'image/jpeg' + 34| }) + 35| }); // incorrect + ^ [1] + +References: + /bom.js:1398:26 + 1398| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; + ^^^^^^^^^^^^^^^ [2] + /bom.js:1398:44 + 1398| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; + ^^^^^^^^ [3] + /bom.js:1398:55 + 1398| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; + ^^^^ [4] + /bom.js:1398:62 + 1398| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; + ^^^^^^^^^^^ [5] + /core.js:709:25 + 709| type $ArrayBufferView = $TypedArray | DataView; + ^^^^^^^^^^^ [6] + /core.js:709:39 + 709| type $ArrayBufferView = $TypedArray | DataView; + ^^^^^^^^ [7] + /bom.js:1398:95 + 1398| type BodyInit = string | URLSearchParams | FormData | Blob | ArrayBuffer | $ArrayBufferView | ReadableStream; + ^^^^^^^^^^^^^^ [8] Error ------------------------------------------------------------------------------------------------- response.js:42:1 @@ -546,8 +546,8 @@ References: response.js:42:19 42| h.text().then((t: Buffer) => t); // incorrect ^^^^^^ [1] - /bom.js:1003:21 - 1003| text(): Promise; + /bom.js:1449:21 + 1449| text(): Promise; ^^^^^^ [2] @@ -556,16 +556,16 @@ Error -------------------------------------------------------------------------- Cannot call `h.arrayBuffer().then` because `ArrayBuffer` [1] is incompatible with `Buffer` [2] in the first argument. response.js:44:1 - 44| h.arrayBuffer().then((ab: Buffer) => ab); // incorrect - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 44| h.arrayBuffer().then((ab: Buffer) => ab); // incorrect + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:999:28 - 999| arrayBuffer(): Promise; - ^^^^^^^^^^^ [1] + /bom.js:1445:28 + 1445| arrayBuffer(): Promise; + ^^^^^^^^^^^ [1] response.js:44:27 - 44| h.arrayBuffer().then((ab: Buffer) => ab); // incorrect - ^^^^^^ [2] + 44| h.arrayBuffer().then((ab: Buffer) => ab); // incorrect + ^^^^^^ [2] Error ------------------------------------------------------------------------------------------ urlsearchparams.js:4:31 @@ -575,19 +575,19 @@ Cannot call `URLSearchParams` with array literal bound to `query` because: - string [3] is incompatible with tuple type [2] in array element. urlsearchparams.js:4:31 - 4| const b = new URLSearchParams(['key1', 'value1']); // not correct - ^^^^^^^^^^^^^^^^^^ + 4| const b = new URLSearchParams(['key1', 'value1']); // not correct + ^^^^^^^^^^^^^^^^^^ References: urlsearchparams.js:4:32 - 4| const b = new URLSearchParams(['key1', 'value1']); // not correct - ^^^^^^ [1] - /bom.js:930:58 - 930| constructor(query?: string | URLSearchParams | Array<[string, string]> | {[string]: string} ): void; - ^^^^^^^^^^^^^^^^ [2] + 4| const b = new URLSearchParams(['key1', 'value1']); // not correct + ^^^^^^ [1] + /bom.js:1374:58 + 1374| constructor(query?: string | URLSearchParams | Array<[string, string]> | { [string]: string, ... } ): void; + ^^^^^^^^^^^^^^^^ [2] urlsearchparams.js:4:40 - 4| const b = new URLSearchParams(['key1', 'value1']); // not correct - ^^^^^^^^ [3] + 4| const b = new URLSearchParams(['key1', 'value1']); // not correct + ^^^^^^^^ [3] Error ------------------------------------------------------------------------------------------- urlsearchparams.js:9:1 @@ -595,13 +595,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.append` because function [1] requires another argument. urlsearchparams.js:9:1 - 9| e.append('key1'); // not correct - ^^^^^^^^^^^^^^^^ + 9| e.append('key1'); // not correct + ^^^^^^^^^^^^^^^^ References: - /bom.js:931:5 - 931| append(name: string, value: string): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1375:5 + 1375| append(name: string, value: string): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error ------------------------------------------------------------------------------------------ urlsearchparams.js:10:1 @@ -609,13 +609,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.append` because function [1] requires another argument. urlsearchparams.js:10:1 - 10| e.append({'key1': 'value1'}); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 10| e.append({'key1': 'value1'}); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:931:5 - 931| append(name: string, value: string): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1375:5 + 1375| append(name: string, value: string): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error ----------------------------------------------------------------------------------------- urlsearchparams.js:10:10 @@ -623,13 +623,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.append` with object literal bound to `name` because object literal [1] is incompatible with string [2]. urlsearchparams.js:10:10 - 10| e.append({'key1': 'value1'}); // not correct - ^^^^^^^^^^^^^^^^^^ [1] + 10| e.append({'key1': 'value1'}); // not correct + ^^^^^^^^^^^^^^^^^^ [1] References: - /bom.js:931:18 - 931| append(name: string, value: string): void; - ^^^^^^ [2] + /bom.js:1375:18 + 1375| append(name: string, value: string): void; + ^^^^^^ [2] Error ------------------------------------------------------------------------------------------ urlsearchparams.js:12:1 @@ -637,13 +637,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.set` because function [1] requires another argument. urlsearchparams.js:12:1 - 12| e.set('key1'); // not correct - ^^^^^^^^^^^^^ + 12| e.set('key1'); // not correct + ^^^^^^^^^^^^^ References: - /bom.js:939:5 - 939| set(name: string, value: string): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1383:5 + 1383| set(name: string, value: string): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error ------------------------------------------------------------------------------------------ urlsearchparams.js:13:1 @@ -651,13 +651,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.set` because function [1] requires another argument. urlsearchparams.js:13:1 - 13| e.set({'key1': 'value1'}); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^ + 13| e.set({'key1': 'value1'}); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:939:5 - 939| set(name: string, value: string): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /bom.js:1383:5 + 1383| set(name: string, value: string): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error ------------------------------------------------------------------------------------------ urlsearchparams.js:13:7 @@ -665,13 +665,13 @@ Error -------------------------------------------------------------------------- Cannot call `e.set` with object literal bound to `name` because object literal [1] is incompatible with string [2]. urlsearchparams.js:13:7 - 13| e.set({'key1': 'value1'}); // not correct - ^^^^^^^^^^^^^^^^^^ [1] + 13| e.set({'key1': 'value1'}); // not correct + ^^^^^^^^^^^^^^^^^^ [1] References: - /bom.js:939:15 - 939| set(name: string, value: string): void; - ^^^^^^ [2] + /bom.js:1383:15 + 1383| set(name: string, value: string): void; + ^^^^^^ [2] Error ----------------------------------------------------------------------------------------- urlsearchparams.js:15:28 @@ -679,16 +679,16 @@ Error -------------------------------------------------------------------------- Cannot assign `e.append(...)` to `f` because undefined [1] is incompatible with `URLSearchParams` [2]. urlsearchparams.js:15:28 - 15| const f: URLSearchParams = e.append('key1', 'value1'); // not correct - ^^^^^^^^^^^^^^^^^^^^^^^^^^ + 15| const f: URLSearchParams = e.append('key1', 'value1'); // not correct + ^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /bom.js:931:42 - 931| append(name: string, value: string): void; - ^^^^ [1] + /bom.js:1375:42 + 1375| append(name: string, value: string): void; + ^^^^ [1] urlsearchparams.js:15:10 - 15| const f: URLSearchParams = e.append('key1', 'value1'); // not correct - ^^^^^^^^^^^^^^^ [2] + 15| const f: URLSearchParams = e.append('key1', 'value1'); // not correct + ^^^^^^^^^^^^^^^ [2] Error ----------------------------------------------------------------------------------------- urlsearchparams.js:17:19 @@ -696,16 +696,16 @@ Error -------------------------------------------------------------------------- Cannot assign `e.get(...)` to `g` because null [1] is incompatible with string [2]. urlsearchparams.js:17:19 - 17| const g: string = e.get('key1'); // correct - ^^^^^^^^^^^^^ + 17| const g: string = e.get('key1'); // correct + ^^^^^^^^^^^^^ References: - /bom.js:935:24 - 935| get(name: string): null | string; - ^^^^ [1] + /bom.js:1379:24 + 1379| get(name: string): null | string; + ^^^^ [1] urlsearchparams.js:17:10 - 17| const g: string = e.get('key1'); // correct - ^^^^^^ [2] + 17| const g: string = e.get('key1'); // correct + ^^^^^^ [2] Error ----------------------------------------------------------------------------------------- urlsearchparams.js:18:19 @@ -715,19 +715,19 @@ Cannot assign `e.get(...)` to `h` because: - string [3] is incompatible with number [2]. urlsearchparams.js:18:19 - 18| const h: number = e.get('key1'); // not correct - ^^^^^^^^^^^^^ + 18| const h: number = e.get('key1'); // not correct + ^^^^^^^^^^^^^ References: - /bom.js:935:24 - 935| get(name: string): null | string; - ^^^^ [1] + /bom.js:1379:24 + 1379| get(name: string): null | string; + ^^^^ [1] urlsearchparams.js:18:10 - 18| const h: number = e.get('key1'); // not correct - ^^^^^^ [2] - /bom.js:935:31 - 935| get(name: string): null | string; - ^^^^^^ [3] + 18| const h: number = e.get('key1'); // not correct + ^^^^^^ [2] + /bom.js:1379:31 + 1379| get(name: string): null | string; + ^^^^^^ [3] diff --git a/tests/find-refs-global/declare.js b/tests/find-refs-global/declare.js new file mode 100644 index 00000000000..3fb893ec994 --- /dev/null +++ b/tests/find-refs-global/declare.js @@ -0,0 +1,13 @@ +// @flow + +declare var foo: number; +foo; + +declare export var bar; +bar; + +declare function baz(): void; +baz(); + +declare class Foo {}; +new Foo(); diff --git a/tests/find-refs-global/find-refs-global.exp b/tests/find-refs-global/find-refs-global.exp index 9e88c7097d1..405cd5fc6cf 100644 --- a/tests/find-refs-global/find-refs-global.exp +++ b/tests/find-refs-global/find-refs-global.exp @@ -1212,20 +1212,14 @@ CJS object exporting shorthand: { "source":"cjs-2.js", "type":"SourceFile", - "start":{"line":4,"column":37,"offset":77}, - "end":{"line":4,"column":40,"offset":81} - }, - { - "source":"cjs-4.js", - "type":"SourceFile", - "start":{"line":3,"column":22,"offset":70}, - "end":{"line":3,"column":25,"offset":74} + "start":{"line":3,"column":7,"offset":28}, + "end":{"line":3,"column":10,"offset":32} }, { - "source":"cjs-4.js", + "source":"cjs-2.js", "type":"SourceFile", - "start":{"line":9,"column":1,"offset":150}, - "end":{"line":9,"column":4,"offset":154} + "start":{"line":4,"column":37,"offset":77}, + "end":{"line":4,"column":40,"offset":81} } ] } @@ -1341,3 +1335,79 @@ CJS default imports bound to a local: } ] } +declare var: +{ + "kind":"symbol-found", + "name":"foo", + "locs":[ + { + "source":"declare.js", + "type":"SourceFile", + "start":{"line":3,"column":13,"offset":22}, + "end":{"line":3,"column":15,"offset":25} + }, + { + "source":"declare.js", + "type":"SourceFile", + "start":{"line":4,"column":1,"offset":35}, + "end":{"line":4,"column":3,"offset":38} + } + ] +} +declare export var: +{ + "kind":"symbol-found", + "name":"bar", + "locs":[ + { + "source":"declare.js", + "type":"SourceFile", + "start":{"line":6,"column":20,"offset":60}, + "end":{"line":6,"column":22,"offset":63} + }, + { + "source":"declare.js", + "type":"SourceFile", + "start":{"line":7,"column":1,"offset":65}, + "end":{"line":7,"column":3,"offset":68} + } + ] +} +declare function: +{ + "kind":"symbol-found", + "name":"baz", + "locs":[ + { + "source":"declare.js", + "type":"SourceFile", + "start":{"line":9,"column":18,"offset":88}, + "end":{"line":9,"column":20,"offset":91} + }, + { + "source":"declare.js", + "type":"SourceFile", + "start":{"line":10,"column":1,"offset":101}, + "end":{"line":10,"column":3,"offset":104} + } + ] +} +declare class: +{ + "kind":"symbol-found", + "name":"Foo", + "locs":[ + { + "source":"declare.js", + "type":"SourceFile", + "start":{"line":12,"column":15,"offset":123}, + "end":{"line":12,"column":17,"offset":126} + }, + { + "source":"declare.js", + "type":"SourceFile", + "start":{"line":13,"column":5,"offset":135}, + "end":{"line":13,"column":7,"offset":138} + } + ] +} diff --git a/tests/find-refs-global/test.sh b/tests/find-refs-global/test.sh index e1a5de526df..5ca91bcabcd 100755 --- a/tests/find-refs-global/test.sh +++ b/tests/find-refs-global/test.sh @@ -209,3 +209,23 @@ echo "CJS default imports bound to a local:" # ^ # Should have the same results as above assert_ok "$FLOW" find-refs --global --json --pretty --strip-root cjs-4.js 4 7 + +echo "declare var:" +# declare var foo: number; +# ^ +assert_ok "$FLOW" find-refs --global --json --pretty --strip-root declare.js 3 14 + +echo "declare export var:" +# declare export var bar; +# ^ +assert_ok "$FLOW" find-refs --global --json --pretty --strip-root declare.js 6 21 + +echo "declare function:" +# declare function baz(): void; +# ^ +assert_ok "$FLOW" find-refs --global --json --pretty --strip-root declare.js 9 19 + +echo "declare class:" +# declare class Foo {}; +# ^ +assert_ok "$FLOW" find-refs --global --json --pretty --strip-root declare.js 12 16 diff --git a/tests/find-refs-local/find-refs-local.exp b/tests/find-refs-local/find-refs-local.exp index 43b416dca58..9e63b905842 100644 --- a/tests/find-refs-local/find-refs-local.exp +++ b/tests/find-refs-local/find-refs-local.exp @@ -357,14 +357,20 @@ Destructuring: { "source":"locals.js", "type":"SourceFile", - "start":{"line":37,"column":7,"offset":476}, - "end":{"line":37,"column":7,"offset":477} + "start":{"line":36,"column":7,"offset":445}, + "end":{"line":36,"column":7,"offset":446} }, { "source":"locals.js", "type":"SourceFile", "start":{"line":37,"column":26,"offset":495}, "end":{"line":37,"column":26,"offset":496} + }, + { + "source":"locals.js", + "type":"SourceFile", + "start":{"line":40,"column":8,"offset":531}, + "end":{"line":40,"column":8,"offset":532} } ] } diff --git a/tests/find-refs-with-mergedT/.flowconfig b/tests/find-refs-with-mergedT/.flowconfig new file mode 100644 index 00000000000..b009741f3a0 --- /dev/null +++ b/tests/find-refs-with-mergedT/.flowconfig @@ -0,0 +1,2 @@ +[libs] +libs diff --git a/tests/find-refs-with-mergedT/.testconfig b/tests/find-refs-with-mergedT/.testconfig new file mode 100644 index 00000000000..70d0dd9761a --- /dev/null +++ b/tests/find-refs-with-mergedT/.testconfig @@ -0,0 +1 @@ +shell: test.sh \ No newline at end of file diff --git a/tests/find-refs-with-mergedT/find-refs-with-mergedT.exp b/tests/find-refs-with-mergedT/find-refs-with-mergedT.exp new file mode 100644 index 00000000000..b92ecb69140 --- /dev/null +++ b/tests/find-refs-with-mergedT/find-refs-with-mergedT.exp @@ -0,0 +1,13 @@ +Don't crash on MergedT: +{ + "kind":"symbol-found", + "name":"arr", + "locs":[ + { + "source":"test.js", + "type":"SourceFile", + "start":{"line":7,"column":3,"offset":173}, + "end":{"line":7,"column":5,"offset":176} + } + ] +} diff --git a/tests/find-refs-with-mergedT/libs/libdefs.js b/tests/find-refs-with-mergedT/libs/libdefs.js new file mode 100644 index 00000000000..efa1ed4a8d7 --- /dev/null +++ b/tests/find-refs-with-mergedT/libs/libdefs.js @@ -0,0 +1,6 @@ +//@flow + +declare class Super {} +declare module lib { + declare export var Super: typeof Super; +} diff --git a/tests/find-refs-with-mergedT/test.js b/tests/find-refs-with-mergedT/test.js new file mode 100644 index 00000000000..06e9e1cfc8f --- /dev/null +++ b/tests/find-refs-with-mergedT/test.js @@ -0,0 +1,10 @@ +//@flow +// This test makes resolve_type encounter a MergedT. Previously, this would +// cause a crash. +const Lib = require('lib'); + +class Component extends Lib.Super<{}> { + arr(): Array { + return []; + } +} diff --git a/tests/find-refs-with-mergedT/test.sh b/tests/find-refs-with-mergedT/test.sh new file mode 100755 index 00000000000..cc8a1af7cd8 --- /dev/null +++ b/tests/find-refs-with-mergedT/test.sh @@ -0,0 +1,3 @@ +#!/bin/bash +echo "Don't crash on MergedT:" +assert_ok "$FLOW" find-refs --global --json --pretty --strip-root test.js 7 3 diff --git a/tests/flowconfig_ignore/.flowconfig b/tests/flowconfig_ignore/.flowconfig new file mode 100644 index 00000000000..a0c7186edc8 --- /dev/null +++ b/tests/flowconfig_ignore/.flowconfig @@ -0,0 +1,23 @@ +[declarations] +.*/my_declarations/.* +!.*/my_declarations/typecheck/.* +!.*/my_declarations/actually_typecheck\.js +!/my_declarations/typecheck_if_in_root\.js +!/my_declarations/root_typecheck/.* + +[ignore] +.*/my_ignores/.* +!.*/my_ignores/typecheck/.* +!.*/my_ignores/actually_typecheck\.js +!/my_ignores/typecheck_if_in_root\.js +!/my_ignores/root_typecheck/.* + +[untyped] +.*/my_untyped/.* +!.*/my_untyped/typecheck/.* +!.*/my_untyped/actually_typecheck\.js +!/my_untyped/typecheck_if_in_root\.js +!/my_untyped/root_typecheck/.* + +[options] +all=false diff --git a/tests/flowconfig_ignore/.testconfig b/tests/flowconfig_ignore/.testconfig new file mode 100644 index 00000000000..d31f7922cbe --- /dev/null +++ b/tests/flowconfig_ignore/.testconfig @@ -0,0 +1 @@ +all: false diff --git a/tests/flowconfig_ignore/control/control.js b/tests/flowconfig_ignore/control/control.js new file mode 100644 index 00000000000..436c1758e69 --- /dev/null +++ b/tests/flowconfig_ignore/control/control.js @@ -0,0 +1,4 @@ +//@flow + +var x = 42; +x() diff --git a/tests/flowconfig_ignore/flowconfig_ignore.exp b/tests/flowconfig_ignore/flowconfig_ignore.exp new file mode 100644 index 00000000000..84e1ea7f737 --- /dev/null +++ b/tests/flowconfig_ignore/flowconfig_ignore.exp @@ -0,0 +1,234 @@ +Error ------------------------------------------------------------------------------------------- control/control.js:4:1 + +Cannot call `x` because number [1] is not a function. + + control/control.js:4:1 + 4| x() + ^^^ + +References: + control/control.js:3:9 + 3| var x = 42; + ^^ [1] + + +Error ------------------------------------------------------------------------ my_declarations/actually_typecheck.js:4:1 + +Cannot call `x` because number [1] is not a function. + + my_declarations/actually_typecheck.js:4:1 + 4| x(); + ^^^ + +References: + my_declarations/actually_typecheck.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error -------------------------------------------------------------------- my_declarations/root_typecheck/checked.js:4:1 + +Cannot call `x` because number [1] is not a function. + + my_declarations/root_typecheck/checked.js:4:1 + 4| x(); + ^^^ + +References: + my_declarations/root_typecheck/checked.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error ---------------------------------------------------------------------- my_declarations/typecheck_if_in_root.js:4:1 + +Cannot call `x` because mixed [1] is not a function. + + my_declarations/typecheck_if_in_root.js:4:1 + 4| x(); + ^^^ + +References: + my_declarations/typecheck_if_in_root.js:3:8 + 3| var x: mixed = 42; + ^^^^^ [1] + + +Error ----------------------------------------------------------------------------- my_ignores/actually_typecheck.js:4:1 + +Cannot call `x` because number [1] is not a function. + + my_ignores/actually_typecheck.js:4:1 + 4| x(); + ^^^ + +References: + my_ignores/actually_typecheck.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error ------------------------------------------------------------------------- my_ignores/root_typecheck/checked.js:4:1 + +Cannot call `x` because number [1] is not a function. + + my_ignores/root_typecheck/checked.js:4:1 + 4| x(); + ^^^ + +References: + my_ignores/root_typecheck/checked.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error --------------------------------------------------------------------------- my_ignores/typecheck_if_in_root.js:4:1 + +Cannot call `x` because mixed [1] is not a function. + + my_ignores/typecheck_if_in_root.js:4:1 + 4| x(); + ^^^ + +References: + my_ignores/typecheck_if_in_root.js:3:8 + 3| var x: mixed = 42; + ^^^^^ [1] + + +Error ----------------------------------------------------------------------------- my_untyped/actually_typecheck.js:4:1 + +Cannot call `x` because number [1] is not a function. + + my_untyped/actually_typecheck.js:4:1 + 4| x(); + ^^^ + +References: + my_untyped/actually_typecheck.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error ------------------------------------------------------------------------- my_untyped/root_typecheck/checked.js:4:1 + +Cannot call `x` because number [1] is not a function. + + my_untyped/root_typecheck/checked.js:4:1 + 4| x(); + ^^^ + +References: + my_untyped/root_typecheck/checked.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error --------------------------------------------------------------------------- my_untyped/typecheck_if_in_root.js:4:1 + +Cannot call `x` because mixed [1] is not a function. + + my_untyped/typecheck_if_in_root.js:4:1 + 4| x(); + ^^^ + +References: + my_untyped/typecheck_if_in_root.js:3:8 + 3| var x: mixed = 42; + ^^^^^ [1] + + +Error ----------------------------------------------------------- subdirectory/my_declarations/actually_typecheck.js:4:1 + +Cannot call `x` because number [1] is not a function. + + subdirectory/my_declarations/actually_typecheck.js:4:1 + 4| x(); + ^^^ + +References: + subdirectory/my_declarations/actually_typecheck.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error ------------------------------------------------------------ subdirectory/my_declarations/typecheck/checked.js:4:1 + +Cannot call `x` because number [1] is not a function. + + subdirectory/my_declarations/typecheck/checked.js:4:1 + 4| x(); + ^^^ + +References: + subdirectory/my_declarations/typecheck/checked.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error ---------------------------------------------------------------- subdirectory/my_ignores/actually_typecheck.js:4:1 + +Cannot call `x` because number [1] is not a function. + + subdirectory/my_ignores/actually_typecheck.js:4:1 + 4| x(); + ^^^ + +References: + subdirectory/my_ignores/actually_typecheck.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error ----------------------------------------------------------------- subdirectory/my_ignores/typecheck/checked.js:4:1 + +Cannot call `x` because number [1] is not a function. + + subdirectory/my_ignores/typecheck/checked.js:4:1 + 4| x(); + ^^^ + +References: + subdirectory/my_ignores/typecheck/checked.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error ---------------------------------------------------------------- subdirectory/my_untyped/actually_typecheck.js:4:1 + +Cannot call `x` because number [1] is not a function. + + subdirectory/my_untyped/actually_typecheck.js:4:1 + 4| x(); + ^^^ + +References: + subdirectory/my_untyped/actually_typecheck.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error ----------------------------------------------------------------- subdirectory/my_untyped/typecheck/checked.js:4:1 + +Cannot call `x` because number [1] is not a function. + + subdirectory/my_untyped/typecheck/checked.js:4:1 + 4| x(); + ^^^ + +References: + subdirectory/my_untyped/typecheck/checked.js:3:8 + 3| var x: number = 42; + ^^^^^^ [1] + + +Error ----------------------------------------------------------------------------------------------------- test.js:6:24 + +Cannot resolve module `./my_ignores/outer_bogus`. + + 6| import type { K } from "./my_ignores/outer_bogus" //should error + ^^^^^^^^^^^^^^^^^^^^^^^^^^ + + + +Found 17 errors diff --git a/tests/flowconfig_ignore/my_declarations/actually_typecheck.js b/tests/flowconfig_ignore/my_declarations/actually_typecheck.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/my_declarations/actually_typecheck.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/my_declarations/outer_decls.js b/tests/flowconfig_ignore/my_declarations/outer_decls.js new file mode 100644 index 00000000000..8294d0ec937 --- /dev/null +++ b/tests/flowconfig_ignore/my_declarations/outer_decls.js @@ -0,0 +1,5 @@ +//@flow +export type T = number; +export var t:T = 42; + +t(); diff --git a/tests/flowconfig_ignore/my_declarations/root_typecheck/checked.js b/tests/flowconfig_ignore/my_declarations/root_typecheck/checked.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/my_declarations/root_typecheck/checked.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/my_declarations/typecheck_if_in_root.js b/tests/flowconfig_ignore/my_declarations/typecheck_if_in_root.js new file mode 100644 index 00000000000..a59c1fd0b65 --- /dev/null +++ b/tests/flowconfig_ignore/my_declarations/typecheck_if_in_root.js @@ -0,0 +1,4 @@ +//@flow + +var x: mixed = 42; +x(); diff --git a/tests/flowconfig_ignore/my_ignores/actually_typecheck.js b/tests/flowconfig_ignore/my_ignores/actually_typecheck.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/my_ignores/actually_typecheck.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/my_ignores/outer_bogus.js b/tests/flowconfig_ignore/my_ignores/outer_bogus.js new file mode 100644 index 00000000000..2824171f46e --- /dev/null +++ b/tests/flowconfig_ignore/my_ignores/outer_bogus.js @@ -0,0 +1,5 @@ +//@flow +var x = 42; +x(); + +export type K = mixed; diff --git a/tests/flowconfig_ignore/my_ignores/root_typecheck/checked.js b/tests/flowconfig_ignore/my_ignores/root_typecheck/checked.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/my_ignores/root_typecheck/checked.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/my_ignores/typecheck_if_in_root.js b/tests/flowconfig_ignore/my_ignores/typecheck_if_in_root.js new file mode 100644 index 00000000000..a59c1fd0b65 --- /dev/null +++ b/tests/flowconfig_ignore/my_ignores/typecheck_if_in_root.js @@ -0,0 +1,4 @@ +//@flow + +var x: mixed = 42; +x(); diff --git a/tests/flowconfig_ignore/my_untyped/actually_typecheck.js b/tests/flowconfig_ignore/my_untyped/actually_typecheck.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/my_untyped/actually_typecheck.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/my_untyped/outer_untyped.js b/tests/flowconfig_ignore/my_untyped/outer_untyped.js new file mode 100644 index 00000000000..6af36f49f0a --- /dev/null +++ b/tests/flowconfig_ignore/my_untyped/outer_untyped.js @@ -0,0 +1,5 @@ +//@flow +var x = 42; +x(); + +export var y = 42; diff --git a/tests/flowconfig_ignore/my_untyped/root_typecheck/checked.js b/tests/flowconfig_ignore/my_untyped/root_typecheck/checked.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/my_untyped/root_typecheck/checked.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/my_untyped/typecheck_if_in_root.js b/tests/flowconfig_ignore/my_untyped/typecheck_if_in_root.js new file mode 100644 index 00000000000..a59c1fd0b65 --- /dev/null +++ b/tests/flowconfig_ignore/my_untyped/typecheck_if_in_root.js @@ -0,0 +1,4 @@ +//@flow + +var x: mixed = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_declarations/actually_typecheck.js b/tests/flowconfig_ignore/subdirectory/my_declarations/actually_typecheck.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_declarations/actually_typecheck.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_declarations/inner_decls.js b/tests/flowconfig_ignore/subdirectory/my_declarations/inner_decls.js new file mode 100644 index 00000000000..63d0c12935b --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_declarations/inner_decls.js @@ -0,0 +1,3 @@ +//@flow +export type T = number; +export var t:T = 42; diff --git a/tests/flowconfig_ignore/subdirectory/my_declarations/typecheck/checked.js b/tests/flowconfig_ignore/subdirectory/my_declarations/typecheck/checked.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_declarations/typecheck/checked.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_declarations/typecheck_if_in_root.js b/tests/flowconfig_ignore/subdirectory/my_declarations/typecheck_if_in_root.js new file mode 100644 index 00000000000..a59c1fd0b65 --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_declarations/typecheck_if_in_root.js @@ -0,0 +1,4 @@ +//@flow + +var x: mixed = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_ignores/actually_typecheck.js b/tests/flowconfig_ignore/subdirectory/my_ignores/actually_typecheck.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_ignores/actually_typecheck.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_ignores/inner_bogus.js b/tests/flowconfig_ignore/subdirectory/my_ignores/inner_bogus.js new file mode 100644 index 00000000000..cfb5938398b --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_ignores/inner_bogus.js @@ -0,0 +1,4 @@ +//@flow + +var x = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_ignores/typecheck/checked.js b/tests/flowconfig_ignore/subdirectory/my_ignores/typecheck/checked.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_ignores/typecheck/checked.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_ignores/typecheck_if_in_root.js b/tests/flowconfig_ignore/subdirectory/my_ignores/typecheck_if_in_root.js new file mode 100644 index 00000000000..a59c1fd0b65 --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_ignores/typecheck_if_in_root.js @@ -0,0 +1,4 @@ +//@flow + +var x: mixed = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_untyped/actually_typecheck.js b/tests/flowconfig_ignore/subdirectory/my_untyped/actually_typecheck.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_untyped/actually_typecheck.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_untyped/inner_untyped.js b/tests/flowconfig_ignore/subdirectory/my_untyped/inner_untyped.js new file mode 100644 index 00000000000..cfb5938398b --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_untyped/inner_untyped.js @@ -0,0 +1,4 @@ +//@flow + +var x = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_untyped/typecheck/checked.js b/tests/flowconfig_ignore/subdirectory/my_untyped/typecheck/checked.js new file mode 100644 index 00000000000..c7484e5a783 --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_untyped/typecheck/checked.js @@ -0,0 +1,4 @@ +//@flow + +var x: number = 42; +x(); diff --git a/tests/flowconfig_ignore/subdirectory/my_untyped/typecheck_if_in_root.js b/tests/flowconfig_ignore/subdirectory/my_untyped/typecheck_if_in_root.js new file mode 100644 index 00000000000..a59c1fd0b65 --- /dev/null +++ b/tests/flowconfig_ignore/subdirectory/my_untyped/typecheck_if_in_root.js @@ -0,0 +1,4 @@ +//@flow + +var x: mixed = 42; +x(); diff --git a/tests/flowconfig_ignore/test.js b/tests/flowconfig_ignore/test.js new file mode 100644 index 00000000000..f69e5fba772 --- /dev/null +++ b/tests/flowconfig_ignore/test.js @@ -0,0 +1,7 @@ +//@flow + +import type { T } from "./my_declarations/outer_decls"; +import { t } from "./subdirectory/my_declarations/inner_decls"; + +import type { K } from "./my_ignores/outer_bogus" //should error +import { y } from "./my_untyped/outer_untyped" diff --git a/tests/flowconfig_rollouts/.flowconfig.0_pct_ignore b/tests/flowconfig_rollouts/.flowconfig.0_pct_ignore new file mode 100644 index 00000000000..468a452caac --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.0_pct_ignore @@ -0,0 +1,8 @@ +[ignore] +(ignore_everything=true).*\.js + +[rollouts] +ignore_everything=0% true, 100% false + +[options] +experimental.well_formed_exports=true \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.0_pct_well_formed_exports b/tests/flowconfig_rollouts/.flowconfig.0_pct_well_formed_exports new file mode 100644 index 00000000000..9b136e3c74a --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.0_pct_well_formed_exports @@ -0,0 +1,5 @@ +[rollouts] +verify_sig=0% on, 100% off + +[options] +(verify_sig=on) experimental.well_formed_exports=true \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.100_pct_well_formed_exports b/tests/flowconfig_rollouts/.flowconfig.100_pct_well_formed_exports new file mode 100644 index 00000000000..0c6985047a5 --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.100_pct_well_formed_exports @@ -0,0 +1,5 @@ +[rollouts] +verify_sig=100% on, 0% off + +[options] +(verify_sig=on) experimental.well_formed_exports=true \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.bad_group_name b/tests/flowconfig_rollouts/.flowconfig.bad_group_name new file mode 100644 index 00000000000..9997e0cb97d --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.bad_group_name @@ -0,0 +1,2 @@ +[rollouts] +rollout=100% $potato \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.bad_rollout_name b/tests/flowconfig_rollouts/.flowconfig.bad_rollout_name new file mode 100644 index 00000000000..9a0315f15e2 --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.bad_rollout_name @@ -0,0 +1,2 @@ +[rollouts] +$bad_rollout=100% on \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.duplicate_group_names b/tests/flowconfig_rollouts/.flowconfig.duplicate_group_names new file mode 100644 index 00000000000..8023a34759c --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.duplicate_group_names @@ -0,0 +1,2 @@ +[rollouts] +foo=20% on, 80% on \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.duplicate_rollout_names b/tests/flowconfig_rollouts/.flowconfig.duplicate_rollout_names new file mode 100644 index 00000000000..872982ed0c6 --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.duplicate_rollout_names @@ -0,0 +1,3 @@ +[rollouts] +foo=20% on, 80% off +foo=20% on, 80% off \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.pct_sum_too_high b/tests/flowconfig_rollouts/.flowconfig.pct_sum_too_high new file mode 100644 index 00000000000..2daa8fd5f1c --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.pct_sum_too_high @@ -0,0 +1,2 @@ +[rollouts] +foo=100% on, 80% off \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.pct_sum_too_low b/tests/flowconfig_rollouts/.flowconfig.pct_sum_too_low new file mode 100644 index 00000000000..13d96b3593d --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.pct_sum_too_low @@ -0,0 +1,2 @@ +[rollouts] +foo=10% on, 80% off \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.unknown_group b/tests/flowconfig_rollouts/.flowconfig.unknown_group new file mode 100644 index 00000000000..0405d8c3eff --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.unknown_group @@ -0,0 +1,5 @@ +[rollouts] +verify_sig=0% on, 100% off + +[options] +(verify_sig=potato) experimental.well_formed_exports=true \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.flowconfig.unknown_rollout b/tests/flowconfig_rollouts/.flowconfig.unknown_rollout new file mode 100644 index 00000000000..5a4b466057b --- /dev/null +++ b/tests/flowconfig_rollouts/.flowconfig.unknown_rollout @@ -0,0 +1,2 @@ +[options] +(verify_sig=on) experimental.well_formed_exports=true \ No newline at end of file diff --git a/tests/flowconfig_rollouts/.testconfig b/tests/flowconfig_rollouts/.testconfig new file mode 100644 index 00000000000..28909efe920 --- /dev/null +++ b/tests/flowconfig_rollouts/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh \ No newline at end of file diff --git a/tests/flowconfig_rollouts/error_malformed_exports.js b/tests/flowconfig_rollouts/error_malformed_exports.js new file mode 100644 index 00000000000..f679d02cc5d --- /dev/null +++ b/tests/flowconfig_rollouts/error_malformed_exports.js @@ -0,0 +1,3 @@ +// @flow + +export function foo(x: number) { return x; } diff --git a/tests/flowconfig_rollouts/flowconfig_rollouts.exp b/tests/flowconfig_rollouts/flowconfig_rollouts.exp new file mode 100644 index 00000000000..f2c7862a36a --- /dev/null +++ b/tests/flowconfig_rollouts/flowconfig_rollouts.exp @@ -0,0 +1,53 @@ + +Should detect that rollout groups sum to less than 100%: +.flowconfig:2 Rollout groups must sum to 100%. "foo" sums to 90% + +Should detect that rollout groups sum to more than 100%: +.flowconfig:2 Rollout groups must sum to 100%. "foo" sums to 180% + +Duplicate rollout names are banned: +.flowconfig:3 Rollouts must have unique names. There already is a "foo" rollout + +Duplicate group names are banned: +.flowconfig:2 Groups must have unique names. There is more than one "on" group + +Rollout names may only contain [a-zA-Z0-9._]: +.flowconfig:2 Malformed rollout. A rollout should be an identifier followed by a list of groups, like `myRollout=10% on, 50% off` + +Group names may only contain [a-zA-Z0-9._]: +.flowconfig:2 Malformed rollout group. A group should be a percentage and an identifier, like `50% on` + +100% on should always be on: +Error ---------------------------------------------------------------------------------- error_malformed_exports.js:3:31 + +Failed to build a typed interface for this module. The exports of this module must be annotated with types. Missing type +annotation at function return: (`signature-verification-failure`) + + 3| export function foo(x: number) { return x; } + + + + +Found 1 error + +0% on should always be off: +Found 0 errors + +Unknown rollout: +.flowconfig:2 Unknown rollout "verify_sig" + +Unknown group: +.flowconfig:5 Unknown group "potato" in rollout "verify_sig" + +Disable a .*.js ignore via rollout: +Error ---------------------------------------------------------------------------------- error_malformed_exports.js:3:31 + +Failed to build a typed interface for this module. The exports of this module must be annotated with types. Missing type +annotation at function return: (`signature-verification-failure`) + + 3| export function foo(x: number) { return x; } + + + + +Found 1 error diff --git a/tests/flowconfig_rollouts/test.sh b/tests/flowconfig_rollouts/test.sh new file mode 100644 index 00000000000..cbd401a8baa --- /dev/null +++ b/tests/flowconfig_rollouts/test.sh @@ -0,0 +1,45 @@ +#!/bin/bash + +printf "\nShould detect that rollout groups sum to less than 100%%:\n"; +assert_exit 8 "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.pct_sum_too_low" . 2>&1 + +printf "\nShould detect that rollout groups sum to more than 100%%:\n"; +assert_exit 8 "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.pct_sum_too_high" . 2>&1 + +printf "\nDuplicate rollout names are banned:\n"; +assert_exit 8 "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.duplicate_rollout_names" . 2>&1 + +printf "\nDuplicate group names are banned:\n"; +assert_exit 8 "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.duplicate_group_names" . 2>&1 + +printf "\nRollout names may only contain [a-zA-Z0-9._]:\n"; +assert_exit 8 "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.bad_rollout_name" . 2>&1 + +printf "\nGroup names may only contain [a-zA-Z0-9._]:\n"; +assert_exit 8 "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.bad_group_name" . 2>&1 + +printf "\n100%% on should always be on:\n"; +assert_errors "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.100_pct_well_formed_exports" . + +printf "\n0%% on should always be off:\n"; +assert_ok "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.0_pct_well_formed_exports" . + +printf "\nUnknown rollout:\n" +assert_exit 8 "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.unknown_rollout" . 2>&1 + +printf "\nUnknown group:\n" +assert_exit 8 "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.unknown_group" . 2>&1 + +printf "\nDisable a .*.js ignore via rollout:\n" +assert_errors "$FLOW" check \ + --strip-root --no-flowlib --flowconfig-name ".flowconfig.0_pct_ignore" . \ No newline at end of file diff --git a/tests/focus_unparsed_file/.flowconfig b/tests/focus_unparsed_file/.flowconfig new file mode 100644 index 00000000000..1fed445333e --- /dev/null +++ b/tests/focus_unparsed_file/.flowconfig @@ -0,0 +1,11 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] + +[strict] diff --git a/tests/focus_unparsed_file/.testconfig b/tests/focus_unparsed_file/.testconfig new file mode 100644 index 00000000000..ee5a4faee71 --- /dev/null +++ b/tests/focus_unparsed_file/.testconfig @@ -0,0 +1,2 @@ +all: false +shell: test.sh diff --git a/tests/focus_unparsed_file/focus_unparsed_file.exp b/tests/focus_unparsed_file/focus_unparsed_file.exp new file mode 100644 index 00000000000..2deef3bb7d3 --- /dev/null +++ b/tests/focus_unparsed_file/focus_unparsed_file.exp @@ -0,0 +1,8 @@ + +No errors: +No errors! + +Focusing the unparsed file shouldn't blow up: + +No errors: +No errors! diff --git a/tests/focus_unparsed_file/test.sh b/tests/focus_unparsed_file/test.sh new file mode 100644 index 00000000000..2191de81d6c --- /dev/null +++ b/tests/focus_unparsed_file/test.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +printf "\nNo errors:\n" +assert_ok "$FLOW" status --no-auto-start --strip-root + +printf "\nFocusing the unparsed file shouldn't blow up:\n" +assert_ok "$FLOW" force-recheck --focus --no-auto-start unparsed.js + +printf "\nNo errors:\n" +assert_ok "$FLOW" status --no-auto-start --strip-root diff --git a/tests/focus_unparsed_file/unparsed.js b/tests/focus_unparsed_file/unparsed.js new file mode 100644 index 00000000000..2a1651ccdbb --- /dev/null +++ b/tests/focus_unparsed_file/unparsed.js @@ -0,0 +1 @@ +var x = 123; diff --git a/tests/for/for.exp b/tests/for/for.exp index 2829d581f51..e497de07327 100644 --- a/tests/for/for.exp +++ b/tests/for/for.exp @@ -1 +1,16 @@ -Found 0 errors +Error ---------------------------------------------------------------------------------------------------- scope.js:1:17 + +Cannot use variable `x` [1] because the declaration either comes later or was skipped. + + scope.js:1:17 + 1| for (const x in x) {} // error: can not reference undeclared x in right-hand expr + ^ + +References: + scope.js:1:12 + 1| for (const x in x) {} // error: can not reference undeclared x in right-hand expr + ^ [1] + + + +Found 1 error diff --git a/tests/for/scope.js b/tests/for/scope.js new file mode 100644 index 00000000000..022786df2f8 --- /dev/null +++ b/tests/for/scope.js @@ -0,0 +1 @@ +for (const x in x) {} // error: can not reference undeclared x in right-hand expr diff --git a/tests/forof/forof.exp b/tests/forof/forof.exp index a311ba39586..fb2ad1fb528 100644 --- a/tests/forof/forof.exp +++ b/tests/forof/forof.exp @@ -41,8 +41,8 @@ Cannot cast `x` to number because string [1] is incompatible with number [2]. ^ References: - /core.js:291:28 - 291| @@iterator(): Iterator; + /core.js:321:28 + 321| @@iterator(): Iterator; ^^^^^^ [1] forof.js:25:9 25| (x: number); // Error - string ~> number @@ -58,8 +58,8 @@ Cannot cast `elem` to number because tuple type [1] is incompatible with number ^^^^ References: - /core.js:532:28 - 532| @@iterator(): Iterator<[K, V]>; + /core.js:593:28 + 593| @@iterator(): Iterator<[K, V]>; ^^^^^^ [1] forof.js:32:12 32| (elem: number); // Error - tuple ~> number @@ -75,8 +75,8 @@ Cannot cast `elem` to number because tuple type [1] is incompatible with number ^^^^ References: - /core.js:532:28 - 532| @@iterator(): Iterator<[K, V]>; + /core.js:593:28 + 593| @@iterator(): Iterator<[K, V]>; ^^^^^^ [1] forof.js:39:12 39| (elem: number); // Error - tuple ~> number @@ -100,5 +100,89 @@ References: ^^^^^^ [2] +Error ---------------------------------------------------------------------------------------------------- forof.js:66:7 -Found 6 errors +Cannot perform arithmetic operation because string [1] is not a number. + + forof.js:66:7 + 66| x = x * 3; // error + ^ + +References: + forof.js:55:48 + 55| declare var funky1 : { @@iterator(): $Iterator }; + ^^^^^^ [1] + + +Error ---------------------------------------------------------------------------------------------------- forof.js:85:4 + +Cannot cast `x` to number because boolean [1] is incompatible with number [2]. + + forof.js:85:4 + 85| (x : number); // error + ^ + +References: + forof.js:61:9 + 61| yield true; + ^^^^ [1] + forof.js:85:8 + 85| (x : number); // error + ^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- forof.js:86:4 + +Cannot cast `x` to boolean because number [1] is incompatible with boolean [2]. + + forof.js:86:4 + 86| (x : boolean); // error + ^ + +References: + forof.js:60:9 + 60| yield 0; + ^ [1] + forof.js:86:8 + 86| (x : boolean); // error + ^^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- forof.js:87:4 + +Cannot cast `x` to string because: + - number [1] is incompatible with string [2]. + - boolean [3] is incompatible with string [2]. + + forof.js:87:4 + 87| (x : string) //error + ^ + +References: + forof.js:60:9 + 60| yield 0; + ^ [1] + forof.js:87:8 + 87| (x : string) //error + ^^^^^^ [2] + forof.js:61:9 + 61| yield true; + ^^^^ [3] + + +Error ---------------------------------------------------------------------------------------------------- scope.js:2:17 + +Cannot use variable `x` [1] because the declaration either comes later or was skipped. + + scope.js:2:17 + 2| for (const x of x) { // error: can not reference undeclared x in right-hand expr + ^ + +References: + scope.js:2:12 + 2| for (const x of x) { // error: can not reference undeclared x in right-hand expr + ^ [1] + + + +Found 12 errors diff --git a/tests/forof/forof.js b/tests/forof/forof.js index ffcece1b14a..f1f1aff53bd 100644 --- a/tests/forof/forof.js +++ b/tests/forof/forof.js @@ -51,3 +51,38 @@ function testSet2(set: Set<*>): void { (x: number); // Anything goes } } + +declare var funky1 : { @@iterator(): $Iterator }; +declare var funky2 : { @@iterator(): $Iterator<() => {}, empty, mixed> }; +declare var funky3 : { @@iterator(): $Iterator }; +declare var funky4 : { @@iterator(): $Iterator }; +function *funky() { + yield 0; + yield true; + return ""; +} + +for (var x of funky1) { + x = x * 3; // error +} + +for (var x of funky2) { + x(); +} + +for (var x of funky3) { + if (!x) continue; + for (var y of x) { + y(); + } +} + +for (var x of funky4) { + (x : void) +} + +for (var x of funky()) { + (x : number); // error + (x : boolean); // error + (x : string) //error +} diff --git a/tests/forof/scope.js b/tests/forof/scope.js new file mode 100644 index 00000000000..daa15290a78 --- /dev/null +++ b/tests/forof/scope.js @@ -0,0 +1,4 @@ +const x: Array = []; +for (const x of x) { // error: can not reference undeclared x in right-hand expr + (x: empty); +} diff --git a/tests/function/.flowconfig b/tests/function/.flowconfig index 9fa81f9fed3..6d1fdb05736 100644 --- a/tests/function/.flowconfig +++ b/tests/function/.flowconfig @@ -6,3 +6,4 @@ [options] no_flowlib=false +esproposal.optional_chaining=enable diff --git a/tests/function/apply-array-like.js b/tests/function/apply-array-like.js new file mode 100644 index 00000000000..213d4f7ff51 --- /dev/null +++ b/tests/function/apply-array-like.js @@ -0,0 +1,20 @@ +function test(a: string, b: string): number { + return this.length; // expect []/"" this +} + +test.apply("", "foo"); // error: string ~> object +declare class MyArrayLike { + length: number; + [index: number]: T; +} +var x = new MyArrayLike(); +test.apply("", x); +var y = new MyArrayLike(); +test.apply("", y); // error: number ~> string + +function * gen() { + yield "foo"; + yield "bar"; +} + +test.apply([], gen()); // error: iterable ~> array-like diff --git a/tests/function/apply.js b/tests/function/apply.js index 3cf2bfb0b2e..68faa0b2faf 100644 --- a/tests/function/apply.js +++ b/tests/function/apply.js @@ -2,6 +2,9 @@ function test(a: string, b: number): number { return this.length; // expect []/"" this } +// arity is strictly two arguments +test.apply("", ["", 0], 'error') + // tuples flow correctly into params test.apply("", ["", 0]); @@ -21,8 +24,8 @@ f(["", 0]); // OK f(["", ""]); // error: string ~> number (2nd arg) f([0, 0]); // error: number ~> string (1st arg) -// expect array -test.apply("", "not array"); // error: expect array of args +// expect array-like +test.apply("", "not array"); // error: string ~> object // expect 4 errors: // - lookup length on Number (because 0 is used as `this`) diff --git a/tests/function/call_error_generic.js b/tests/function/call_error_generic.js new file mode 100644 index 00000000000..11191b4bc05 --- /dev/null +++ b/tests/function/call_error_generic.js @@ -0,0 +1,23 @@ +// @flow + +declare var some: ?{ + x: string; +} + +declare class Set { + add(x: T): void; +} +declare class ROArray<+T> { } +declare class RWArray extends ROArray { } + +declare function from(set: Set): RWArray; + +const foo = (() => { + const set = new Set(); + set.add(some?.x); + return from(set); +})(); + +function bar(x: ROArray) { } + +bar(foo); diff --git a/tests/function/call_error_generic2.js b/tests/function/call_error_generic2.js new file mode 100644 index 00000000000..a36e1173262 --- /dev/null +++ b/tests/function/call_error_generic2.js @@ -0,0 +1,12 @@ +// @flow + +declare function bar1(key: TKey): void; + +function bar2(fragmentRef) { + bar1(fragmentRef); +} + +function foo(props: { userRef?: {} }) { + const userRef = props.userRef; + bar2(userRef); +} diff --git a/tests/function/function.exp b/tests/function/function.exp index 6a6ee53728f..2ca1733dea7 100644 --- a/tests/function/function.exp +++ b/tests/function/function.exp @@ -1,3 +1,44 @@ +Error ----------------------------------------------------------------------------------------- apply-array-like.js:5:16 + +string [1] is incompatible with `$ArrayLike` [2]. + + apply-array-like.js:5:16 + 5| test.apply("", "foo"); // error: string ~> object + ^^^^^ [1] + +References: + /core.js:306:22 + v + 306| type $ArrayLike = { + 307| [indexer: number]: T, + 308| length: number, + 309| ... + 310| } + ^ [2] + + +Error ---------------------------------------------------------------------------------------- apply-array-like.js:20:16 + +Property `length` is missing in `Generator` [1] but exists in `$ArrayLike` [2]. + + apply-array-like.js:20:16 + 20| test.apply([], gen()); // error: iterable ~> array-like + ^^^^^ + +References: + apply-array-like.js:15:17 + 15| function * gen() { + ^ [1] + /core.js:306:22 + v + 306| type $ArrayLike = { + 307| [indexer: number]: T, + 308| length: number, + 309| ... + 310| } + ^ [2] + + Error ---------------------------------------------------------------------------------------------------- apply.js:2:10 Cannot get `this.length` because: @@ -10,23 +51,37 @@ Cannot get `this.length` because: ^^^^^^^^^^^ References: - apply.js:9:12 - 9| test.apply(0, ["", 0]); // error: lookup `length` on Number + apply.js:12:12 + 12| test.apply(0, ["", 0]); // error: lookup `length` on Number ^ [1] - apply.js:32:25 - 32| (test.call.apply(test, [0, 123, 'foo']): void); + apply.js:35:25 + 35| (test.call.apply(test, [0, 123, 'foo']): void); ^ [2] - apply.js:37:25 - 37| (test.bind.apply(test, [0, 123]): (b: number) => number); + apply.js:40:25 + 40| (test.bind.apply(test, [0, 123]): (b: number) => number); ^ [3] -Error ---------------------------------------------------------------------------------------------------- apply.js:12:1 +Error ----------------------------------------------------------------------------------------------------- apply.js:6:1 + +Cannot call `test.apply` because no more than 2 arguments are expected by function type [1]. + + apply.js:6:1 + 6| test.apply("", ["", 0], 'error') + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /core.js:126:18 + 126| proto apply: Function$Prototype$Apply; // (thisArg: any, argArray?: any) => any + ^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ---------------------------------------------------------------------------------------------------- apply.js:15:1 Cannot call `test.apply` because function [1] requires another argument. - apply.js:12:1 - 12| test.apply("", [""]); // error: void ~> number + apply.js:15:1 + 15| test.apply("", [""]); // error: void ~> number ^^^^^^^^^^^^^^^^^^^^ References: @@ -35,12 +90,12 @@ References: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] -Error --------------------------------------------------------------------------------------------------- apply.js:15:21 +Error --------------------------------------------------------------------------------------------------- apply.js:18:21 Cannot call `test.apply` with string bound to `b` because string [1] is incompatible with number [2]. - apply.js:15:21 - 15| test.apply("", ["", ""]); // error: string ~> number (2nd arg) + apply.js:18:21 + 18| test.apply("", ["", ""]); // error: string ~> number (2nd arg) ^^ [1] References: @@ -49,12 +104,12 @@ References: ^^^^^^ [2] -Error --------------------------------------------------------------------------------------------------- apply.js:16:17 +Error --------------------------------------------------------------------------------------------------- apply.js:19:17 Cannot call `test.apply` with number bound to `a` because number [1] is incompatible with string [2]. - apply.js:16:17 - 16| test.apply("", [0, 0]); // error: number ~> string (1st arg) + apply.js:19:17 + 19| test.apply("", [0, 0]); // error: number ~> string (1st arg) ^ [1] References: @@ -63,12 +118,12 @@ References: ^^^^^^ [2] -Error ---------------------------------------------------------------------------------------------------- apply.js:21:8 +Error ---------------------------------------------------------------------------------------------------- apply.js:24:8 Cannot call `test.apply` with string bound to `b` because string [1] is incompatible with number [2]. - apply.js:21:8 - 21| f(["", ""]); // error: string ~> number (2nd arg) + apply.js:24:8 + 24| f(["", ""]); // error: string ~> number (2nd arg) ^^ [1] References: @@ -77,12 +132,12 @@ References: ^^^^^^ [2] -Error ---------------------------------------------------------------------------------------------------- apply.js:22:4 +Error ---------------------------------------------------------------------------------------------------- apply.js:25:4 Cannot call `test.apply` with number bound to `a` because number [1] is incompatible with string [2]. - apply.js:22:4 - 22| f([0, 0]); // error: number ~> string (1st arg) + apply.js:25:4 + 25| f([0, 0]); // error: number ~> string (1st arg) ^ [1] References: @@ -91,46 +146,48 @@ References: ^^^^^^ [2] -Error ---------------------------------------------------------------------------------------------------- apply.js:25:1 +Error --------------------------------------------------------------------------------------------------- apply.js:28:16 -Cannot call `test.apply` because string [1] is incompatible with number [2]. +string [1] is incompatible with `$ArrayLike` [2]. - apply.js:25:1 - 25| test.apply("", "not array"); // error: expect array of args - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + apply.js:28:16 + 28| test.apply("", "not array"); // error: string ~> object + ^^^^^^^^^^^ [1] References: - /core.js:291:28 - 291| @@iterator(): Iterator; - ^^^^^^ [1] - apply.js:1:29 - 1| function test(a: string, b: number): number { - ^^^^^^ [2] + /core.js:306:22 + v + 306| type $ArrayLike = { + 307| [indexer: number]: T, + 308| length: number, + 309| ... + 310| } + ^ [2] -Error ---------------------------------------------------------------------------------------------------- apply.js:32:2 +Error ---------------------------------------------------------------------------------------------------- apply.js:35:2 Cannot cast `test.call.apply(...)` to undefined because number [1] is incompatible with undefined [2]. - apply.js:32:2 - 32| (test.call.apply(test, [0, 123, 'foo']): void); + apply.js:35:2 + 35| (test.call.apply(test, [0, 123, 'foo']): void); ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: apply.js:1:38 1| function test(a: string, b: number): number { ^^^^^^ [1] - apply.js:32:42 - 32| (test.call.apply(test, [0, 123, 'foo']): void); + apply.js:35:42 + 35| (test.call.apply(test, [0, 123, 'foo']): void); ^^^^ [2] -Error --------------------------------------------------------------------------------------------------- apply.js:32:28 +Error --------------------------------------------------------------------------------------------------- apply.js:35:28 Cannot call `test.call.apply` with number bound to `a` because number [1] is incompatible with string [2]. - apply.js:32:28 - 32| (test.call.apply(test, [0, 123, 'foo']): void); + apply.js:35:28 + 35| (test.call.apply(test, [0, 123, 'foo']): void); ^^^ [1] References: @@ -139,12 +196,12 @@ References: ^^^^^^ [2] -Error --------------------------------------------------------------------------------------------------- apply.js:32:33 +Error --------------------------------------------------------------------------------------------------- apply.js:35:33 Cannot call `test.call.apply` with string bound to `b` because string [1] is incompatible with number [2]. - apply.js:32:33 - 32| (test.call.apply(test, [0, 123, 'foo']): void); + apply.js:35:33 + 35| (test.call.apply(test, [0, 123, 'foo']): void); ^^^^^ [1] References: @@ -153,12 +210,12 @@ References: ^^^^^^ [2] -Error --------------------------------------------------------------------------------------------------- apply.js:37:28 +Error --------------------------------------------------------------------------------------------------- apply.js:40:28 Cannot call `test.bind.apply` with number bound to `a` because number [1] is incompatible with string [2]. - apply.js:37:28 - 37| (test.bind.apply(test, [0, 123]): (b: number) => number); + apply.js:40:28 + 40| (test.bind.apply(test, [0, 123]): (b: number) => number); ^^^ [1] References: @@ -167,17 +224,17 @@ References: ^^^^^^ [2] -Error --------------------------------------------------------------------------------------------------- apply.js:47:22 +Error --------------------------------------------------------------------------------------------------- apply.js:50:22 Cannot call `x.apply` with number bound to `b` because number [1] is incompatible with string [2]. - apply.js:47:22 - 47| x.apply(x, ['foo', 123]); // error, number !~> string + apply.js:50:22 + 50| x.apply(x, ['foo', 123]); // error, number !~> string ^^^ [1] References: - apply.js:45:36 - 45| function test3(x: { (a: string, b: string): void }) { + apply.js:48:36 + 48| function test3(x: { (a: string, b: string): void }) { ^^^^^^ [2] @@ -428,63 +485,54 @@ References: ^^^^^ [2] -Error ------------------------------------------------------------------------------------------------- function.js:21:3 - -Cannot resolve name `React`. - - 21| ; - ^^^^^^^^^^^ - +Error --------------------------------------------------------------------------------------- call_error_generic.js:23:5 -Error ------------------------------------------------------------------------------------------------ function.js:29:19 +Cannot call `bar` with `foo` bound to `x` because null or undefined [1] is incompatible with string [2] in type argument +`T` [3]. -Cannot assign `x` to `a` because function type [1] is incompatible with number [2]. - - function.js:29:19 - 29| var a: number = x; // Error - ^ + call_error_generic.js:23:5 + 23| bar(foo); + ^^^ References: - function.js:28:17 - 28| function bad(x: Function, y: Object): void { - ^^^^^^^^ [1] - function.js:29:10 - 29| var a: number = x; // Error - ^^^^^^ [2] + call_error_generic.js:3:19 + v- + 3| declare var some: ?{ + 4| x: string; + 5| } + ^ [1] + call_error_generic.js:21:25 + 21| function bar(x: ROArray) { } + ^^^^^^ [2] + call_error_generic.js:10:24 + 10| declare class ROArray<+T> { } + ^ [3] -Error ------------------------------------------------------------------------------------------------ function.js:30:19 +Error -------------------------------------------------------------------------------------- call_error_generic2.js:11:8 -Cannot assign `x` to `b` because function type [1] is incompatible with string [2]. +Cannot call `bar2` with `userRef` bound to `fragmentRef` because property `$data` is missing in object type [1] but +exists in object type [2]. - function.js:30:19 - 30| var b: string = x; // Error - ^ + call_error_generic2.js:11:8 + 11| bar2(userRef); + ^^^^^^^ References: - function.js:28:17 - 28| function bad(x: Function, y: Object): void { - ^^^^^^^^ [1] - function.js:30:10 - 30| var b: string = x; // Error - ^^^^^^ [2] - + call_error_generic2.js:9:33 + 9| function foo(props: { userRef?: {} }) { + ^^ [1] + call_error_generic2.js:3:30 + 3| declare function bar1(key: TKey): void; + ^^^^^^^^^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------------ function.js:31:21 -Cannot assign `y` to `c` because object type [1] is incompatible with function type [2]. +Error ------------------------------------------------------------------------------------------------- function.js:21:3 - function.js:31:21 - 31| var c: Function = y; // Object is not a Function - ^ +Cannot resolve name `React`. -References: - function.js:28:30 - 28| function bad(x: Function, y: Object): void { - ^^^^^^ [1] - function.js:31:10 - 31| var c: Function = y; // Object is not a Function - ^^^^^^^^ [2] + 21| ; + ^^^^^^^^^^^ Error ------------------------------------------------------------------------------------------------- function.js:37:6 @@ -496,8 +544,8 @@ Cannot cast `x.length` to undefined because number [1] is incompatible with unde ^^^^^^^^ References: - /core.js:112:13 - 112| length: number; + /core.js:132:13 + 132| length: number; ^^^^^^ [1] function.js:37:16 37| (x.length: void); // error, it's a number @@ -513,31 +561,14 @@ Cannot cast `y.length` to undefined because number [1] is incompatible with unde ^^^^^^^^ References: - /core.js:112:13 - 112| length: number; + /core.js:132:13 + 132| length: number; ^^^^^^ [1] function.js:38:16 38| (y.length: void); // error, it's a number ^^^^ [2] -Error ------------------------------------------------------------------------------------------------- function.js:39:6 - -Cannot cast `z.length` to undefined because number [1] is incompatible with undefined [2]. - - function.js:39:6 - 39| (z.length: void); // error, it's a number - ^^^^^^^^ - -References: - /core.js:112:13 - 112| length: number; - ^^^^^^ [1] - function.js:39:16 - 39| (z.length: void); // error, it's a number - ^^^^ [2] - - Error ------------------------------------------------------------------------------------------------- function.js:41:6 Cannot cast `x.name` to undefined because string [1] is incompatible with undefined [2]. @@ -547,8 +578,8 @@ Cannot cast `x.name` to undefined because string [1] is incompatible with undefi ^^^^^^ References: - /core.js:113:11 - 113| name: string; + /core.js:133:11 + 133| name: string; ^^^^^^ [1] function.js:41:14 41| (x.name: void); // error, it's a string @@ -564,31 +595,14 @@ Cannot cast `y.name` to undefined because string [1] is incompatible with undefi ^^^^^^ References: - /core.js:113:11 - 113| name: string; + /core.js:133:11 + 133| name: string; ^^^^^^ [1] function.js:42:14 42| (y.name: void); // error, it's a string ^^^^ [2] -Error ------------------------------------------------------------------------------------------------- function.js:43:6 - -Cannot cast `z.name` to undefined because string [1] is incompatible with undefined [2]. - - function.js:43:6 - 43| (z.name: void); // error, it's a string - ^^^^^^ - -References: - /core.js:113:11 - 113| name: string; - ^^^^^^ [1] - function.js:43:14 - 43| (z.name: void); // error, it's a string - ^^^^ [2] - - Error ------------------------------------------------------------------------------------------------ function.js:48:16 Cannot assign `'foo'` to `x.length` because string [1] is incompatible with number [2]. @@ -598,8 +612,8 @@ Cannot assign `'foo'` to `x.length` because string [1] is incompatible with numb ^^^^^ [1] References: - /core.js:112:13 - 112| length: number; + /core.js:132:13 + 132| length: number; ^^^^^^ [2] @@ -612,22 +626,8 @@ Cannot assign `'foo'` to `y.length` because string [1] is incompatible with numb ^^^^^ [1] References: - /core.js:112:13 - 112| length: number; - ^^^^^^ [2] - - -Error ------------------------------------------------------------------------------------------------ function.js:50:16 - -Cannot assign `'foo'` to `z.length` because string [1] is incompatible with number [2]. - - function.js:50:16 - 50| z.length = 'foo'; // error, it's a number - ^^^^^ [1] - -References: - /core.js:112:13 - 112| length: number; + /core.js:132:13 + 132| length: number; ^^^^^^ [2] @@ -640,8 +640,8 @@ Cannot assign `123` to `x.name` because number [1] is incompatible with string [ ^^^ [1] References: - /core.js:113:11 - 113| name: string; + /core.js:133:11 + 133| name: string; ^^^^^^ [2] @@ -654,23 +654,23 @@ Cannot assign `123` to `y.name` because number [1] is incompatible with string [ ^^^ [1] References: - /core.js:113:11 - 113| name: string; + /core.js:133:11 + 133| name: string; ^^^^^^ [2] -Error ------------------------------------------------------------------------------------------------ function.js:54:14 +Error ------------------------------------------------------------------------------------------------ issue-7529.js:4:7 -Cannot assign `123` to `z.name` because number [1] is incompatible with string [2]. +Cannot call `foo` with `123` bound to `x` because number [1] is incompatible with string [2]. - function.js:54:14 - 54| z.name = 123; // error, it's a string - ^^^ [1] + issue-7529.js:4:7 + 4| foo(123); + ^^^ [1] References: - /core.js:113:11 - 113| name: string; - ^^^^^^ [2] + issue-7529.js:3:29 + 3| const bar = function foo(x: string) { + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------------- rest.js:36:10 @@ -687,18 +687,32 @@ References: ^ [1] -Error ---------------------------------------------------------------------------------------------------- rest.js:38:38 +Error ---------------------------------------------------------------------------------------------------- rest.js:38:55 + +Cannot call `string_rest_t` because rest array [1] is incompatible with string [2]. + + rest.js:38:55 + 38| function string_rest_t(...xs: T): void {}; string_rest_t(); // Error - rest param can't be a string + ^^^^^^^^^^^^^^^ [1] + +References: + rest.js:38:27 + 38| function string_rest_t(...xs: T): void {}; string_rest_t(); // Error - rest param can't be a string + ^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- rest.js:39:53 -Rest params are always arrays [1] is incompatible with string [2]. +Cannot call `empty_rest_t` because rest array [1] is incompatible with empty [2]. - rest.js:38:38 - 38| function string_rest_t(...xs: T): void {} // Error - rest param can't be a string - ^^^^^ [1] + rest.js:39:53 + 39| function empty_rest_t(...xs: T): void {}; empty_rest_t(); // Error - rest param can't be empty + ^^^^^^^^^^^^^^ [1] References: - rest.js:38:42 - 38| function string_rest_t(...xs: T): void {} // Error - rest param can't be a string - ^ [2] + rest.js:29:26 + 29| function empty_rest_t(...xs: T): void {} + ^^^^^ [2] Error ----------------------------------------------------------------------------------------------------- rest.js:53:2 diff --git a/tests/function/issue-7529.js b/tests/function/issue-7529.js new file mode 100644 index 00000000000..ea3b7af58ee --- /dev/null +++ b/tests/function/issue-7529.js @@ -0,0 +1,5 @@ +// @flow + +const bar = function foo(x: string) { + foo(123); +} diff --git a/tests/function/rest.js b/tests/function/rest.js index 1b7e33fccba..6e3944a893b 100644 --- a/tests/function/rest.js +++ b/tests/function/rest.js @@ -35,8 +35,8 @@ function bounds_on_bounds() { function bad_unbound_rest_t(...xs: T): T { return xs.pop(); // Error - no bound on T } -function string_rest_t(...xs: T): void {} // Error - rest param can't be a string -function empty_rest_t(...xs: T): void {} // Error - rest param can't be empty +function string_rest_t(...xs: T): void {}; string_rest_t(); // Error - rest param can't be a string +function empty_rest_t(...xs: T): void {}; empty_rest_t(); // Error - rest param can't be empty type Rest = Array; function rest_alias(...xs: Rest): void {} // Ok diff --git a/tests/function_as_type/.flowconfig b/tests/function_as_type/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/function_as_type/fun.js b/tests/function_as_type/fun.js new file mode 100644 index 00000000000..0006cefe3aa --- /dev/null +++ b/tests/function_as_type/fun.js @@ -0,0 +1,3 @@ +// @flow + +export function fun() {} diff --git a/tests/function_as_type/function_as_type.exp b/tests/function_as_type/function_as_type.exp new file mode 100644 index 00000000000..f9f5c893c1e --- /dev/null +++ b/tests/function_as_type/function_as_type.exp @@ -0,0 +1,11 @@ +Error ------------------------------------------------------------------------------------------------- uses_fun.js:3:14 + +Cannot import the value `fun` as a type. `import type` only works on type exports like type aliases, interfaces, and +classes. If you intended to import the type of a value use `import typeof` instead. + + 3| import type {fun} from './fun'; + ^^^ + + + +Found 1 error diff --git a/tests/function_as_type/uses_fun.js b/tests/function_as_type/uses_fun.js new file mode 100644 index 00000000000..d7b057839e6 --- /dev/null +++ b/tests/function_as_type/uses_fun.js @@ -0,0 +1,5 @@ +// @flow + +import type {fun} from './fun'; + +({}: fun) diff --git a/tests/generators/generators.exp b/tests/generators/generators.exp index d30ff3c0ee7..a484c19577a 100644 --- a/tests/generators/generators.exp +++ b/tests/generators/generators.exp @@ -41,8 +41,8 @@ References: class.js:23:39 23| *stmt_return_err(): Generator { ^^^^^^ [2] - /core.js:498:29 - 498| interface Generator<+Yield,+Return,-Next> { + /core.js:548:29 + 548| interface Generator<+Yield,+Return,-Next> { ^^^^^^ [3] @@ -107,14 +107,14 @@ of property `@@iterator`. ^^ References: - /core.js:491:38 - 491| type Iterator<+T> = $Iterator; + /core.js:541:38 + 541| type Iterator<+T> = $Iterator; ^^^^ [1] class.js:125:42 125| examples.delegate_next_iterable([]).next(""); // error: Iterator has no next value ^^ [2] - /core.js:487:37 - 487| interface $Iterator<+Yield,+Return,-Next> { + /core.js:537:37 + 537| interface $Iterator<+Yield,+Return,-Next> { ^^^^ [3] @@ -280,8 +280,8 @@ References: generators.js:22:46 22| function *stmt_return_err(): Generator { ^^^^^^ [2] - /core.js:498:29 - 498| interface Generator<+Yield,+Return,-Next> { + /core.js:548:29 + 548| interface Generator<+Yield,+Return,-Next> { ^^^^^^ [3] @@ -397,14 +397,14 @@ of property `@@iterator`. ^^ References: - /core.js:491:38 - 491| type Iterator<+T> = $Iterator; + /core.js:541:38 + 541| type Iterator<+T> = $Iterator; ^^^^ [1] generators.js:94:33 94| delegate_next_iterable([]).next(""); // error: Iterator has no next value ^^ [2] - /core.js:487:37 - 487| interface $Iterator<+Yield,+Return,-Next> { + /core.js:537:37 + 537| interface $Iterator<+Yield,+Return,-Next> { ^^^^ [3] @@ -514,9 +514,9 @@ Cannot cast `refuse_return_result.value` to string because: ^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:484:28 - 484| | { done: true, +value?: Return } - ^^^^^^ [1] + /core.js:528:14 + 528| +value?: Return, + ^^^^^^ [1] return.js:20:32 20| (refuse_return_result.value: string); // error: number | void ~> string ^^^^^^ [2] diff --git a/tests/generic_zeroed/.flowconfig b/tests/generic_zeroed/.flowconfig new file mode 100644 index 00000000000..de38d19537d --- /dev/null +++ b/tests/generic_zeroed/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false diff --git a/tests/generic_zeroed/add1arg.js b/tests/generic_zeroed/add1arg.js new file mode 100644 index 00000000000..267d29c3af1 --- /dev/null +++ b/tests/generic_zeroed/add1arg.js @@ -0,0 +1,7 @@ +//@flow + +function corrupt(x: S): S { + return "A" + x; +} + +var x: "B" = corrupt<"B">("B") diff --git a/tests/generic_zeroed/generic_zeroed.exp b/tests/generic_zeroed/generic_zeroed.exp new file mode 100644 index 00000000000..d8f3a0c98de --- /dev/null +++ b/tests/generic_zeroed/generic_zeroed.exp @@ -0,0 +1,105 @@ +Error -------------------------------------------------------------------------------------------------- add1arg.js:4:10 + +Cannot return `"A" + x` because string [1] is incompatible with `S` [2]. + + add1arg.js:4:10 + 4| return "A" + x; + ^^^^^^^ + +References: + add1arg.js:4:10 + 4| return "A" + x; + ^^^ [1] + add1arg.js:3:36 + 3| function corrupt(x: S): S { + ^ [2] + + +Error ---------------------------------------------------------------------------------------------------- logic.js:3:10 + +Cannot return `a && b` because string [1] is incompatible with `A` [2]. + + logic.js:3:10 + 3| return a && b + ^^^^^^ + +References: + logic.js:2:41 + 2| function f(a: A, b: B): A { + ^ [1] + logic.js:2:45 + 2| function f(a: A, b: B): A { + ^ [2] + + +Error --------------------------------------------------------------------------------------------------- logic2.js:3:10 + +Cannot return `a && b` because `B` [1] is incompatible with `A` [2]. + + logic2.js:3:10 + 3| return a && b + ^^^^^^ + +References: + logic2.js:2:27 + 2| function f(a: A, b: B): A { + ^ [1] + logic2.js:2:31 + 2| function f(a: A, b: B): A { + ^ [2] + + +Error ------------------------------------------------------------------------------------------------------ num.js:3:10 + +Cannot return `a + b` because number [1] is incompatible with `A` [2]. + + num.js:3:10 + 3| return a + b + ^^^^^ + +References: + num.js:2:41 + 2| function f(a: A, b: B): A { + ^ [1] + num.js:2:45 + 2| function f(a: A, b: B): A { + ^ [2] + + +Error ---------------------------------------------------------------------------------------------------- reduce.js:8:6 + +Cannot call `nums.reduce` with function bound to `callbackfn` because number [1] is incompatible with `T` [2] in the +return value. + + reduce.js:8:6 + 8| ((prevnum, curnum) => curnum + prevnum), + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + reduce.js:5:18 + 5| ...nums: Array + ^^^^^^ [1] + reduce.js:7:22 + 7| return nums.reduce( + ^ [2] + + +Error ------------------------------------------------------------------------------------------------------- wc.js:7:12 + +Cannot return `"BAD_" + s` because string [1] is incompatible with `S` [2]. + + wc.js:7:12 + 7| return "BAD_" + s; + ^^^^^^^^^^ + +References: + wc.js:7:12 + 7| return "BAD_" + s; + ^^^^^^ [1] + wc.js:6:38 + 6| function corrupt(s: S): S { + ^ [2] + + + +Found 6 errors diff --git a/tests/generic_zeroed/logic.js b/tests/generic_zeroed/logic.js new file mode 100644 index 00000000000..c35a9e7178d --- /dev/null +++ b/tests/generic_zeroed/logic.js @@ -0,0 +1,14 @@ +//@flow +function f(a: A, b: B): A { + return a && b +} +//var x: number = f(14, "broken"); +var y: "a" = f<"a", "b">("a", "b"); + +function compareGeneric(a: T, b: T): boolean { + return a < b; +} + +function compareGeneric2(a: T, b: S): boolean { + return a < b; +} diff --git a/tests/generic_zeroed/logic2.js b/tests/generic_zeroed/logic2.js new file mode 100644 index 00000000000..bb1d6ad41e6 --- /dev/null +++ b/tests/generic_zeroed/logic2.js @@ -0,0 +1,5 @@ +//@flow +function f(a: A, b: B): A { + return a && b +} +var x: number = f(14, "broken"); diff --git a/tests/generic_zeroed/num.js b/tests/generic_zeroed/num.js new file mode 100644 index 00000000000..205bac6c992 --- /dev/null +++ b/tests/generic_zeroed/num.js @@ -0,0 +1,6 @@ +//@flow +function f(a: A, b: B): A { + return a + b +} +//var x: number = f(14, "broken"); +var y: 42 = f<42, 9>(42, 9); diff --git a/tests/generic_zeroed/reduce.js b/tests/generic_zeroed/reduce.js new file mode 100644 index 00000000000..51520719d41 --- /dev/null +++ b/tests/generic_zeroed/reduce.js @@ -0,0 +1,13 @@ +//@flow + +function mergeNumsError( + defaultNumber: T, + ...nums: Array +): T { + return nums.reduce( + ((prevnum, curnum) => curnum + prevnum), + defaultNumber + ) +} + +var x: 42 = mergeNumsError(42, 90, 90); diff --git a/tests/generic_zeroed/wc.js b/tests/generic_zeroed/wc.js new file mode 100644 index 00000000000..15ec933ca5c --- /dev/null +++ b/tests/generic_zeroed/wc.js @@ -0,0 +1,17 @@ +// @flow +function coerce(t: T): U { + type Fruit = + | {| +type: "APPLE", +value: T |} + | {| +type: "BAD_APPLE", +value: empty |}; + function corrupt(s: S): S { + return "BAD_" + s; + } + const fruit: Fruit = { type: (corrupt("APPLE"): "APPLE"), value: t }; + if (fruit.type === "BAD_APPLE") { + return fruit.value; + } else { + throw new Error("Unreachable."); + } +} +const twelve: number = coerce("twelve"); // no type error! +twelve.toFixed(); // runtime error! diff --git a/tests/get-def/get-def.exp b/tests/get-def/get-def.exp index 1f3e37e021b..37e73996ed8 100644 --- a/tests/get-def/get-def.exp +++ b/tests/get-def/get-def.exp @@ -36,7 +36,7 @@ optional chain subsequent property of null = {"path":"optional_chaining.js","lin shorthand destructuring = {"path":"objects.js","line":3,"endline":3,"start":13,"end":15} non-shorthand destructuring = {"path":"objects.js","line":3,"endline":3,"start":13,"end":15} destructuring without type alias = {"path":"objects.js","line":7,"endline":7,"start":12,"end":14} -destructuring a shadow prop = {"path":"","line":0,"endline":0,"start":1,"end":0} +destructuring a shadow prop = {"path":"objects.js","line":23,"endline":23,"start":10,"end":12} bogus array destructuring of an object = {"path":"","line":0,"endline":0,"start":1,"end":0} property access on the arg to the idx callback = {"path":"idx.js","line":6,"endline":6,"start":3,"end":5} nested property access on the arg to the idx callback = {"path":"idx.js","line":7,"endline":7,"start":5,"end":7} diff --git a/tests/get-def/test.sh b/tests/get-def/test.sh index 4c6bfb2b8e4..a252d3ee5a0 100755 --- a/tests/get-def/test.sh +++ b/tests/get-def/test.sh @@ -120,7 +120,6 @@ printf "non-shorthand destructuring = " assert_ok "$FLOW" get-def objects.js 20 11 --strip-root --pretty printf "destructuring without type alias = " assert_ok "$FLOW" get-def objects.js 22 11 --strip-root --pretty -# TODO this should return results printf "destructuring a shadow prop = " assert_ok "$FLOW" get-def objects.js 23 11 --strip-root --pretty # This one should return no results diff --git a/tests/getters_and_setters/getters_and_setters.exp b/tests/getters_and_setters/getters_and_setters.exp index 358921c55d0..2b140a0a3ef 100644 --- a/tests/getters_and_setters/getters_and_setters.exp +++ b/tests/getters_and_setters/getters_and_setters.exp @@ -465,8 +465,8 @@ References: react.js:17:13 17| (); // error: number ~> string ^^^^^ [1] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [2] @@ -482,8 +482,8 @@ References: react.js:18:20 18| (); // error: number ~> string ^ [1] - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [2] diff --git a/tests/guess-root/.testconfig b/tests/guess-root/.testconfig new file mode 100644 index 00000000000..7e0f5b794ef --- /dev/null +++ b/tests/guess-root/.testconfig @@ -0,0 +1,2 @@ +shell: test.sh +auto_start: false diff --git a/tests/guess-root/config/.flowconfig b/tests/guess-root/config/.flowconfig new file mode 100644 index 00000000000..1fed445333e --- /dev/null +++ b/tests/guess-root/config/.flowconfig @@ -0,0 +1,11 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] + +[strict] diff --git a/tests/guess-root/config/no-config/d2.js b/tests/guess-root/config/no-config/d2.js new file mode 100644 index 00000000000..9c657602e8c --- /dev/null +++ b/tests/guess-root/config/no-config/d2.js @@ -0,0 +1,5 @@ +// @flow + +let y = 0; +function f(x = y) {} +function g({x = y}) {} diff --git a/tests/guess-root/d0.js b/tests/guess-root/d0.js new file mode 100644 index 00000000000..9c657602e8c --- /dev/null +++ b/tests/guess-root/d0.js @@ -0,0 +1,5 @@ +// @flow + +let y = 0; +function f(x = y) {} +function g({x = y}) {} diff --git a/tests/guess-root/guess-root.exp b/tests/guess-root/guess-root.exp new file mode 100644 index 00000000000..4cba84dcfd8 --- /dev/null +++ b/tests/guess-root/guess-root.exp @@ -0,0 +1,30 @@ +nothing +with file +with file and config +number +no-config/d2.js:3:5,3:5 +with bad root directory +number +-:3:5,3:5 +with bad root directory, and file +number +no-config/d2.js:3:5,3:5 +with bad root directory, and file +number +no-config/d2.js:3:5,3:5 +with file batch coverage + +Coverage results from 1 file(s): + +no-config/d2.js: 88.89% (8 of 9 expressions) + +----------------------------------- +Aggregate coverage statistics +----------------------------------- +Files : 1 +Expressions : + Covered : 8 + Total : 9 + Covered Percentage : 88.89% + +with bad root directory, and file batch coverage diff --git a/tests/guess-root/no-config/d1.js b/tests/guess-root/no-config/d1.js new file mode 100644 index 00000000000..9c657602e8c --- /dev/null +++ b/tests/guess-root/no-config/d1.js @@ -0,0 +1,5 @@ +// @flow + +let y = 0; +function f(x = y) {} +function g({x = y}) {} diff --git a/tests/guess-root/test.sh b/tests/guess-root/test.sh new file mode 100644 index 00000000000..5fbf5d033d9 --- /dev/null +++ b/tests/guess-root/test.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +echo "nothing" +assert_exit "$EXIT_COULD_NOT_FIND_FLOWCONFIG" "$FLOW" type-at-pos 3 5 < d0.js + +echo "with file" +assert_exit "$EXIT_COULD_NOT_FIND_FLOWCONFIG" "$FLOW" type-at-pos no-config/d1.js 3 5 + +echo "with file and config" +assert_ok "$FLOW" type-at-pos --strip-root config/no-config/d2.js 3 5 + +echo "with bad root directory" +assert_ok "$FLOW" type-at-pos --root ./config/no-config 3 5 < ./config/no-config/d2.js + +echo "with bad root directory, and file" +assert_ok "$FLOW" type-at-pos --strip-root --root ./config/no-config ./config/no-config/d2.js 3 5 + +echo "with bad root directory, and file" +assert_ok "$FLOW" type-at-pos --strip-root --root ./config/no-config ./config/no-config/d2.js 3 5 + +echo "with file batch coverage" +assert_ok "$FLOW" batch-coverage --strip-root ./config/no-config/d2.js + +echo "with bad root directory, and file batch coverage" +assert_exit "$EXIT_COULD_NOT_FIND_FLOWCONFIG" "$FLOW" batch-coverage --root ./config/no-config ./config/no-config/d2.js diff --git a/tests/implicit_inexact/.flowconfig b/tests/implicit_inexact/.flowconfig new file mode 100644 index 00000000000..afc6071d1c5 --- /dev/null +++ b/tests/implicit_inexact/.flowconfig @@ -0,0 +1,5 @@ +[options] +no_flowlib=false + +[lints] +implicit-inexact-object=error diff --git a/tests/implicit_inexact/implicit_inexact.exp b/tests/implicit_inexact/implicit_inexact.exp new file mode 100644 index 00000000000..b1ce2059d91 --- /dev/null +++ b/tests/implicit_inexact/implicit_inexact.exp @@ -0,0 +1,74 @@ +Error ----------------------------------------------------------------------------------------------------- test.js:3:10 + +Please add `...` to the end of the list of properties to express an inexact object type. (`implicit-inexact-object`) + + 3| type T = {}; // need ... in type alias + ^^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:5:17 + +Please add `...` to the end of the list of properties to express an inexact object type. (`implicit-inexact-object`) + + 5| opaque type U = {}; // need ... in opaque type alias + ^^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:7:19 + +Please add `...` to the end of the list of properties to express an inexact object type. (`implicit-inexact-object`) + + 7| function test1(x: {}) {} // need ... in function param + ^^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:9:19 + +Please add `...` to the end of the list of properties to express an inexact object type. (`implicit-inexact-object`) + + 9| function test2(): {} { return {}; } // need ... in function return + ^^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:12:7 + +Please add `...` to the end of the list of properties to express an inexact object type. (`implicit-inexact-object`) + + 12| test3<{}>(); // need ... in generic function type parameter + ^^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:14:12 + +Please add `...` to the end of the list of properties to express an inexact object type. (`implicit-inexact-object`) + + 14| class A {} // need ... in upper bound of generic + ^^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:17:7 + +Please add `...` to the end of the list of properties to express an inexact object type. (`implicit-inexact-object`) + + 17| new B<{}>(); // need ... in generic class type parameter + ^^ + + +Error ----------------------------------------------------------------------------------------------------- test.js:19:6 + +Please add `...` to the end of the list of properties to express an inexact object type. (`implicit-inexact-object`) + + 19| ({}: {}); // need ... in cast + ^^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:21:10 + +Please add `...` to the end of the list of properties to express an inexact object type. (`implicit-inexact-object`) + + 21| const x: {} = {}; // need ... in annotation + ^^ + + + +Found 9 errors diff --git a/tests/implicit_inexact/test.js b/tests/implicit_inexact/test.js new file mode 100644 index 00000000000..da589fdb1bf --- /dev/null +++ b/tests/implicit_inexact/test.js @@ -0,0 +1,42 @@ +//@flow + +type T = {}; // need ... in type alias + +opaque type U = {}; // need ... in opaque type alias + +function test1(x: {}) {} // need ... in function param + +function test2(): {} { return {}; } // need ... in function return + +function test3() {} +test3<{}>(); // need ... in generic function type parameter + +class A {} // need ... in upper bound of generic + +class B {} +new B<{}>(); // need ... in generic class type parameter + +({}: {}); // need ... in cast + +const x: {} = {}; // need ... in annotation + +// No errors with ... +type V = {...}; + +opaque type W = {...}; + +function test4(x: {...}) {} + +function test5(): {...} { return {}; } + +function test6() {} +test6<{...}>(); + +class C {} + +class D {} +new D<{...}>(); // need ... in generic class type parameter + +({}: {...}); + +const y: {...} = {}; diff --git a/tests/implicit_instantiation/.flowconfig b/tests/implicit_instantiation/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/implicit_instantiation/implicit_instantiation.exp b/tests/implicit_instantiation/implicit_instantiation.exp new file mode 100644 index 00000000000..4233c3b11aa --- /dev/null +++ b/tests/implicit_instantiation/implicit_instantiation.exp @@ -0,0 +1,61 @@ +Error ----------------------------------------------------------------------------------------------------- test.js:5:19 + +Cannot assign `identity<...>(...)` to `y` because number [1] is incompatible with string [2]. + + test.js:5:19 + 5| const y: string = identity<_>(3); // Error, string incompatible with number. + ^^^^^^^^^^^^^^ + +References: + test.js:5:31 + 5| const y: string = identity<_>(3); // Error, string incompatible with number. + ^ [1] + test.js:5:10 + 5| const y: string = identity<_>(3); // Error, string incompatible with number. + ^^^^^^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test.js:14:2 + +Cannot cast `a` to object type because string [1] is incompatible with number [2] in property `x`. + + test.js:14:2 + 14| (a: {x: number}); // Not ok, number incompatible with string + ^ + +References: + test.js:12:12 + 12| var b: {x: string} = a; // Concretize to string. + ^^^^^^ [1] + test.js:14:9 + 14| (a: {x: number}); // Not ok, number incompatible with string + ^^^^^^ [2] + + +Error ----------------------------------------------------------------------------------------------------- test.js:17:2 + +Cannot cast `z` to string because number [1] is incompatible with string [2]. + + test.js:17:2 + 17| (z: string); // Error, number lower bound string upper bound + ^ + +References: + test.js:16:23 + 16| const z = identity<_>(3); // Give z a lower bound. + ^ [1] + test.js:17:5 + 17| (z: string); // Error, number lower bound string upper bound + ^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- test.js:22:22 + +Please use a concrete type annotation instead of `_` in this position. + + 22| x: unimplementable<_>(), // Error, requires concrete annot + ^ + + + +Found 4 errors diff --git a/tests/implicit_instantiation/test.js b/tests/implicit_instantiation/test.js new file mode 100644 index 00000000000..6ae9011ddb6 --- /dev/null +++ b/tests/implicit_instantiation/test.js @@ -0,0 +1,24 @@ +//@flow + +function identity(x: T): T { return x; } +const x: string = identity<_>('string'); // Ok +const y: string = identity<_>(3); // Error, string incompatible with number. + +declare function unimplementable(): {x: T}; + + +const a = unimplementable<_>(); // Ok, not exported. Leaks a tvar. + +var b: {x: string} = a; // Concretize to string. +(a: {x: string}); // Ok +(a: {x: number}); // Not ok, number incompatible with string + +const z = identity<_>(3); // Give z a lower bound. +(z: string); // Error, number lower bound string upper bound + +declare function readOnly(): {+x :T}; + +module.exports = { + x: unimplementable<_>(), // Error, requires concrete annot + y: readOnly<_>(), // Ok, type var is an a positive position +}; diff --git a/tests/import_meta/import_meta.exp b/tests/import_meta/import_meta.exp index 1e6cb09e630..b6025194e8d 100644 --- a/tests/import_meta/import_meta.exp +++ b/tests/import_meta/import_meta.exp @@ -30,6 +30,14 @@ Invalid left-hand side in assignment ^^^^^^^^^^^ +Error ------------------------------------------------------------------------- no_nonidentifier_after_import_dot.js:4:8 + +Unexpected token `while`, expected identifier for `import` metaproperty + + 4| import.while; // parse error + ^^^^^ + + Error ----------------------------------------------------------------------------------- no_other_metaproperties.js:4:1 The only valid meta property for import is import.meta @@ -39,4 +47,4 @@ The only valid meta property for import is import.meta -Found 5 errors +Found 6 errors diff --git a/tests/import_type/ExportDefaultNonType.js b/tests/import_type/ExportDefaultNonType.js new file mode 100644 index 00000000000..c92839f23ad --- /dev/null +++ b/tests/import_type/ExportDefaultNonType.js @@ -0,0 +1,7 @@ +/** + * @flow + */ + +var x: number = 42; + +export default x; diff --git a/tests/import_type/ExportExactObject.js b/tests/import_type/ExportExactObject.js new file mode 100644 index 00000000000..0b881f9b878 --- /dev/null +++ b/tests/import_type/ExportExactObject.js @@ -0,0 +1,4 @@ +// @flow + +declare var obj: {| f: number |} +module.exports = obj; diff --git a/tests/import_type/ExportObject.js b/tests/import_type/ExportObject.js new file mode 100644 index 00000000000..cbccaa211cc --- /dev/null +++ b/tests/import_type/ExportObject.js @@ -0,0 +1,4 @@ +// @flow + +declare var obj: { f: number } +module.exports = obj; diff --git a/tests/import_type/import_type.exp b/tests/import_type/import_type.exp index aa106f731f3..71805f68257 100644 --- a/tests/import_type/import_type.exp +++ b/tests/import_type/import_type.exp @@ -152,7 +152,7 @@ References: Error --------------------------------------------------------------------------------------------- import_type.js:71:14 -Cannot import the value `numValue` as a type. `import type` only works on type exports. Like type aliases, interfaces, +Cannot import the value `numValue` as a type. `import type` only works on type exports like type aliases, interfaces, and classes. If you intended to import the type of a value use `import typeof` instead. 71| import type {numValue} from "./ExportsANumber"; // Error: Cannot import-type a number value @@ -173,5 +173,14 @@ References: ^^^^^^^^^ [1] +Error -------------------------------------------------------------------------------------------- import_type.js:100:13 -Found 12 errors +Cannot import the default export as a type. `import type` only works on type exports like type aliases, interfaces, and +classes. If you intended to import the type of a value use `import typeof` instead. + + 100| import type x from './ExportDefaultNonType'; + ^ + + + +Found 13 errors diff --git a/tests/import_type/import_type.js b/tests/import_type/import_type.js index 96ec2be84ea..b0f0e429308 100644 --- a/tests/import_type/import_type.js +++ b/tests/import_type/import_type.js @@ -80,3 +80,21 @@ import type ClassFoo6 from "./issue-359"; function foo() { ClassFoo6; // Error: Not a value binding } + +////////////////////////////////////////////////// +// == Import Type of Object (Default Export) == // +////////////////////////////////////////////////// + +import type Obj from './ExportObject'; + +//////////////////////////////////////////////////////// +// == Import Type of Exact Object (Default Export) == // +//////////////////////////////////////////////////////// + +import type ExactObj from './ExportExactObject'; + +//////////////////////////////////////////////////// +// == Import Type of a Non-Type Default Export == // +//////////////////////////////////////////////////// + +import type x from './ExportDefaultNonType'; diff --git a/tests/import_typeof/ExportCJSDefault_Function.js b/tests/import_typeof/ExportCJSDefault_Function.js new file mode 100644 index 00000000000..ba55e65d953 --- /dev/null +++ b/tests/import_typeof/ExportCJSDefault_Function.js @@ -0,0 +1,7 @@ +/** + * @flow + */ + +function functionFoo3(x: number): number { return x; } + +module.exports = functionFoo3; diff --git a/tests/import_typeof/ExportCJSNamed_Function.js b/tests/import_typeof/ExportCJSNamed_Function.js new file mode 100644 index 00000000000..ba3263afc9f --- /dev/null +++ b/tests/import_typeof/ExportCJSNamed_Function.js @@ -0,0 +1,7 @@ +/** + * @flow + */ + +function functionFoo4(x: number): number { return x; } + +exports.functionFoo4 = functionFoo4; diff --git a/tests/import_typeof/ExportDefault_Function.js b/tests/import_typeof/ExportDefault_Function.js new file mode 100644 index 00000000000..5e1e661f021 --- /dev/null +++ b/tests/import_typeof/ExportDefault_Function.js @@ -0,0 +1,7 @@ +/** + * @flow + */ + +function functionFoo1(x: number): number { return x; } + +export default functionFoo1; diff --git a/tests/import_typeof/ExportNamed_Function.js b/tests/import_typeof/ExportNamed_Function.js new file mode 100644 index 00000000000..e5e93e47991 --- /dev/null +++ b/tests/import_typeof/ExportNamed_Function.js @@ -0,0 +1,7 @@ +/** + * @flow + */ + +function functionFoo2(x: number): number { return x; } + +export {functionFoo2}; diff --git a/tests/import_typeof/import_typeof.exp b/tests/import_typeof/import_typeof.exp index e8b800e62a6..c22781a5bca 100644 --- a/tests/import_typeof/import_typeof.exp +++ b/tests/import_typeof/import_typeof.exp @@ -49,9 +49,9 @@ Cannot reference type `ClassFoo2T` [1] from a value position. ^^^^^^^^^^ References: - import_typeof.js:20:16 + import_typeof.js:20:29 20| import typeof {ClassFoo2 as ClassFoo2T} from "./ExportNamed_Class"; - ^^^^^^^^^^^^^^^^^^^^^^^ [1] + ^^^^^^^^^^ [1] Error ------------------------------------------------------------------------------------------- import_typeof.js:35:22 @@ -82,82 +82,150 @@ References: ^^^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- import_typeof.js:51:16 +Error -------------------------------------------------------------------------------------------- import_typeof.js:54:2 -Cannot import the type `AliasFoo3` as a type. `import typeof` only works on value exports. Like variables, functions, -and classes. If you intended to import a type use `import type` instead. +Cannot cast function to `functionFoo1T` because undefined [1] is incompatible with number [2] in the return value. - 51| import typeof {AliasFoo3} from "./ExportNamed_Alias"; // Error: Can't `import typeof` type aliases! + import_typeof.js:54:2 + 54| (() => {}: functionFoo1T); // Error: return types are not compatible + ^^^^^^^^ + +References: + import_typeof.js:54:7 + 54| (() => {}: functionFoo1T); // Error: return types are not compatible + ^ [1] + ExportDefault_Function.js:5:35 + 5| function functionFoo1(x: number): number { return x; } + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- import_typeof.js:63:2 + +Cannot cast function to `functionFoo2T` because undefined [1] is incompatible with number [2] in the return value. + + import_typeof.js:63:2 + 63| (() => {}: functionFoo2T); // Error: return types are not compatible + ^^^^^^^^ + +References: + import_typeof.js:63:7 + 63| (() => {}: functionFoo2T); // Error: return types are not compatible + ^ [1] + ExportNamed_Function.js:5:35 + 5| function functionFoo2(x: number): number { return x; } + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- import_typeof.js:72:2 + +Cannot cast function to `functionFoo3T` because undefined [1] is incompatible with number [2] in the return value. + + import_typeof.js:72:2 + 72| (() => {}: functionFoo3T); // Error: return types are not compatible + ^^^^^^^^ + +References: + import_typeof.js:72:7 + 72| (() => {}: functionFoo3T); // Error: return types are not compatible + ^ [1] + ExportCJSDefault_Function.js:5:35 + 5| function functionFoo3(x: number): number { return x; } + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- import_typeof.js:81:2 + +Cannot cast function to `functionFoo4T` because undefined [1] is incompatible with number [2] in the return value. + + import_typeof.js:81:2 + 81| (() => {}: functionFoo4T); // Error: return types are not compatible + ^^^^^^^^ + +References: + import_typeof.js:81:7 + 81| (() => {}: functionFoo4T); // Error: return types are not compatible + ^ [1] + ExportCJSNamed_Function.js:5:35 + 5| function functionFoo4(x: number): number { return x; } + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- import_typeof.js:87:16 + +Cannot import the type `AliasFoo3` as a type. `import typeof` only works on value exports like variables, functions, and +classes. If you intended to import a type use `import type` instead. + + 87| import typeof {AliasFoo3} from "./ExportNamed_Alias"; // Error: Can't `import typeof` type aliases! ^^^^^^^^^ -Error ------------------------------------------------------------------------------------------- import_typeof.js:68:23 +Error ------------------------------------------------------------------------------------------ import_typeof.js:104:23 Cannot assign `'asdf'` to `f2` because string [1] is incompatible with number [2]. - import_typeof.js:68:23 - 68| var f2: num_default = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + import_typeof.js:104:23 + 104| var f2: num_default = 'asdf'; // Error: string ~> number + ^^^^^^ [1] References: - import_typeof.js:68:9 - 68| var f2: num_default = 'asdf'; // Error: string ~> number - ^^^^^^^^^^^ [2] + import_typeof.js:104:9 + 104| var f2: num_default = 'asdf'; // Error: string ~> number + ^^^^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- import_typeof.js:77:21 +Error ------------------------------------------------------------------------------------------ import_typeof.js:113:21 Cannot assign `'asdf'` to `g2` because string [1] is incompatible with number [2]. - import_typeof.js:77:21 - 77| var g2: num_named = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + import_typeof.js:113:21 + 113| var g2: num_named = 'asdf'; // Error: string ~> number + ^^^^^^ [1] References: - import_typeof.js:77:9 - 77| var g2: num_named = 'asdf'; // Error: string ~> number - ^^^^^^^^^ [2] + import_typeof.js:113:9 + 113| var g2: num_named = 'asdf'; // Error: string ~> number + ^^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- import_typeof.js:86:27 +Error ------------------------------------------------------------------------------------------ import_typeof.js:122:27 Cannot assign `'asdf'` to `h2` because string [1] is incompatible with number [2]. - import_typeof.js:86:27 - 86| var h2: num_cjs_default = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + import_typeof.js:122:27 + 122| var h2: num_cjs_default = 'asdf'; // Error: string ~> number + ^^^^^^ [1] References: - import_typeof.js:86:9 - 86| var h2: num_cjs_default = 'asdf'; // Error: string ~> number - ^^^^^^^^^^^^^^^ [2] + import_typeof.js:122:9 + 122| var h2: num_cjs_default = 'asdf'; // Error: string ~> number + ^^^^^^^^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- import_typeof.js:95:25 +Error ------------------------------------------------------------------------------------------ import_typeof.js:131:25 Cannot assign `'asdf'` to `i2` because string [1] is incompatible with number [2]. - import_typeof.js:95:25 - 95| var i2: num_cjs_named = 'asdf'; // Error: string ~> number - ^^^^^^ [1] + import_typeof.js:131:25 + 131| var i2: num_cjs_named = 'asdf'; // Error: string ~> number + ^^^^^^ [1] References: - import_typeof.js:95:9 - 95| var i2: num_cjs_named = 'asdf'; // Error: string ~> number - ^^^^^^^^^^^^^ [2] + import_typeof.js:131:9 + 131| var i2: num_cjs_named = 'asdf'; // Error: string ~> number + ^^^^^^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------ import_typeof.js:103:24 +Error ------------------------------------------------------------------------------------------ import_typeof.js:139:24 Cannot assign object literal to `j2` because number [1] is incompatible with string [2] in property `str`. - import_typeof.js:103:24 - 103| var j2: ModuleNSObjT = {num: 42, str: 42}; // Error: number ~> string + import_typeof.js:139:24 + 139| var j2: ModuleNSObjT = {num: 42, str: 42}; // Error: number ~> string ^^^^^^^^^^^^^^^^^^ References: - import_typeof.js:103:39 - 103| var j2: ModuleNSObjT = {num: 42, str: 42}; // Error: number ~> string + import_typeof.js:139:39 + 139| var j2: ModuleNSObjT = {num: 42, str: 42}; // Error: number ~> string ^^ [1] ExportNamed_Multi.js:4:18 4| export var str = 'asdf'; @@ -165,4 +233,4 @@ References: -Found 12 errors +Found 16 errors diff --git a/tests/import_typeof/import_typeof.js b/tests/import_typeof/import_typeof.js index 6916f800420..9d13171a39c 100644 --- a/tests/import_typeof/import_typeof.js +++ b/tests/import_typeof/import_typeof.js @@ -44,6 +44,42 @@ import {ClassFoo4} from "./ExportCJSNamed_Class"; var d1: ClassFoo4T = ClassFoo4; var d2: ClassFoo4T = new ClassFoo4(); // Error: ClassFoo4 (inst) ~> ClassFoo4 (class) +////////////////////////////////////////////////////// +// == Importing Function Typeof (Default Export) == // +////////////////////////////////////////////////////// + +import typeof functionFoo1T from "./ExportDefault_Function"; +import functionFoo1 from "./ExportDefault_Function"; +(functionFoo1: functionFoo1T); +(() => {}: functionFoo1T); // Error: return types are not compatible + +//////////////////////////////////////////////////// +// == Importing Function Typeof (Named Export) == // +//////////////////////////////////////////////////// + +import typeof {functionFoo2 as functionFoo2T} from "./ExportNamed_Function"; +import {functionFoo2} from "./ExportNamed_Function"; +(functionFoo2: functionFoo2T); +(() => {}: functionFoo2T); // Error: return types are not compatible + +////////////////////////////////////////////////////////// +// == Importing Function Typeof (CJS Default Export) == // +////////////////////////////////////////////////////////// + +import typeof functionFoo3T from "./ExportCJSDefault_Function"; +import functionFoo3 from "./ExportCJSDefault_Function"; +(functionFoo3: functionFoo3T); +(() => {}: functionFoo3T); // Error: return types are not compatible + +//////////////////////////////////////////////////////// +// == Importing Function Typeof (CJS Named Export) == // +//////////////////////////////////////////////////////// + +import typeof {functionFoo4 as functionFoo4T} from "./ExportCJSNamed_Function"; +import {functionFoo4} from "./ExportCJSNamed_Function"; +(functionFoo4: functionFoo4T); +(() => {}: functionFoo4T); // Error: return types are not compatible + ////////////////////////////////////////////// // == Import Typeof Alias (Named Export) == // ////////////////////////////////////////////// diff --git a/tests/include_suppressed/.testconfig b/tests/include_suppressed/.testconfig index 5a3e9f8a853..9bfaf8ba1d7 100644 --- a/tests/include_suppressed/.testconfig +++ b/tests/include_suppressed/.testconfig @@ -1 +1,2 @@ +auto_start: false shell: test.sh diff --git a/tests/include_suppressed/include_suppressed.exp b/tests/include_suppressed/include_suppressed.exp index eaa807edef1..3b0f37e3358 100644 --- a/tests/include_suppressed/include_suppressed.exp +++ b/tests/include_suppressed/include_suppressed.exp @@ -375,3 +375,71 @@ JSON with --include-suppressed "passed":false } + +Server without --include-suppressed +Error ------------------------------------------------------------------------------------------------------ test.js:3:2 + +Cannot cast `123` to string because number [1] is incompatible with string [2]. + + test.js:3:2 + 3| (123: string); // Normal error + ^^^ [1] + +References: + test.js:3:7 + 3| (123: string); // Normal error + ^^^^^^ [2] + + +Warning ---------------------------------------------------------------------------------------------------- test.js:8:1 + +Unused suppression comment. + + 8| // $FlowFixMe - unused suppression comment + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + + +Found 1 error and 1 warning + + +Server with --include-suppressed +Error ------------------------------------------------------------------------------------------------------ test.js:3:2 + +Cannot cast `123` to string because number [1] is incompatible with string [2]. + + test.js:3:2 + 3| (123: string); // Normal error + ^^^ [1] + +References: + test.js:3:7 + 3| (123: string); // Normal error + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ test.js:6:2 + +Cannot cast `123` to string because number [1] is incompatible with string [2]. + + test.js:6:2 + 6| (123: string); // Suppressed error + ^^^ [1] + +References: + test.js:6:7 + 6| (123: string); // Suppressed error + ^^^^^^ [2] + + +Warning ---------------------------------------------------------------------------------------------------- test.js:8:1 + +Unused suppression comment. + + 8| // $FlowFixMe - unused suppression comment + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + + +Found 2 errors and 1 warning + diff --git a/tests/include_suppressed/test.sh b/tests/include_suppressed/test.sh index b7554d62d30..f90b24ae665 100755 --- a/tests/include_suppressed/test.sh +++ b/tests/include_suppressed/test.sh @@ -15,4 +15,14 @@ assert_errors \ "$FLOW" check --all --strip-root --include-suppressed --json --pretty \ | grep -v '^ *"flowVersion":.*' +printf "\n\nServer without --include-suppressed\n" +"$FLOW" start --wait --all +assert_errors "$FLOW" status --no-auto-start +"$FLOW" stop + +printf "\n\nServer with --include-suppressed\n" +"$FLOW" start --wait --all --include-suppressed +assert_errors "$FLOW" status --no-auto-start +"$FLOW" stop + printf "\n" diff --git a/tests/incremental_cycle_unparsed/.flowconfig b/tests/incremental_cycle_unparsed/.flowconfig new file mode 100644 index 00000000000..c136f32d600 --- /dev/null +++ b/tests/incremental_cycle_unparsed/.flowconfig @@ -0,0 +1,2 @@ +[ignore] +/tmp* diff --git a/tests/incremental_cycle_unparsed/.testconfig b/tests/incremental_cycle_unparsed/.testconfig new file mode 100644 index 00000000000..5c099d3b687 --- /dev/null +++ b/tests/incremental_cycle_unparsed/.testconfig @@ -0,0 +1,2 @@ +shell: test.sh +all: false diff --git a/tests/incremental_cycle_unparsed/bar.js b/tests/incremental_cycle_unparsed/bar.js new file mode 100644 index 00000000000..d99eaf3f22e --- /dev/null +++ b/tests/incremental_cycle_unparsed/bar.js @@ -0,0 +1,12 @@ +// @flow + +import type {foo} from './foo'; + +function err(foo: foo): number { + return foo.y; +} + +export type bar = { + x: string; + foo: foo +} diff --git a/tests/incremental_cycle_unparsed/foo.js b/tests/incremental_cycle_unparsed/foo.js new file mode 100644 index 00000000000..8a927740c23 --- /dev/null +++ b/tests/incremental_cycle_unparsed/foo.js @@ -0,0 +1,12 @@ +// @flow + +import type {bar} from './bar'; + +function err(bar: bar): number { + return bar.x; +} + +export type foo = { + y: string; + bar: bar +} diff --git a/tests/incremental_cycle_unparsed/incremental_cycle_unparsed.exp b/tests/incremental_cycle_unparsed/incremental_cycle_unparsed.exp new file mode 100644 index 00000000000..e30c85c4379 --- /dev/null +++ b/tests/incremental_cycle_unparsed/incremental_cycle_unparsed.exp @@ -0,0 +1,181 @@ + +Initial status...with type errors: +Error ------------------------------------------------------------------------------------------------------ bar.js:6:10 + +Cannot return `foo.y` because string [1] is incompatible with number [2]. + + bar.js:6:10 + 6| return foo.y; + ^^^^^ + +References: + foo.js:10:6 + 10| y: string; + ^^^^^^ [1] + bar.js:5:25 + 5| function err(foo: foo): number { + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ foo.js:6:10 + +Cannot return `bar.x` because string [1] is incompatible with number [2]. + + foo.js:6:10 + 6| return bar.x; + ^^^^^ + +References: + bar.js:10:6 + 10| x: string; + ^^^^^^ [1] + foo.js:5:25 + 5| function err(bar: bar): number { + ^^^^^^ [2] + + + +Found 2 errors + +Delete foo.js (cannot resolve module!): +Error ------------------------------------------------------------------------------------------------------ bar.js:3:24 + +Cannot resolve module `./foo`. + + 3| import type {foo} from './foo'; + ^^^^^^^ + + + +Found 1 error + +Restore foo.js (same as initial status): +Error ------------------------------------------------------------------------------------------------------ bar.js:6:10 + +Cannot return `foo.y` because string [1] is incompatible with number [2]. + + bar.js:6:10 + 6| return foo.y; + ^^^^^ + +References: + foo.js:10:6 + 10| y: string; + ^^^^^^ [1] + bar.js:5:25 + 5| function err(foo: foo): number { + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ foo.js:6:10 + +Cannot return `bar.x` because string [1] is incompatible with number [2]. + + foo.js:6:10 + 6| return bar.x; + ^^^^^ + +References: + bar.js:10:6 + 10| x: string; + ^^^^^^ [1] + foo.js:5:25 + 5| function err(bar: bar): number { + ^^^^^^ [2] + + + +Found 2 errors + +Remove @flow in foo.js (no errors!): +No errors! + +Restore foo.js (same as initial status): +Error ------------------------------------------------------------------------------------------------------ bar.js:6:10 + +Cannot return `foo.y` because string [1] is incompatible with number [2]. + + bar.js:6:10 + 6| return foo.y; + ^^^^^ + +References: + foo.js:10:6 + 10| y: string; + ^^^^^^ [1] + bar.js:5:25 + 5| function err(foo: foo): number { + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ foo.js:6:10 + +Cannot return `bar.x` because string [1] is incompatible with number [2]. + + foo.js:6:10 + 6| return bar.x; + ^^^^^ + +References: + bar.js:10:6 + 10| x: string; + ^^^^^^ [1] + foo.js:5:25 + 5| function err(bar: bar): number { + ^^^^^^ [2] + + + +Found 2 errors + +Introduce parse error in foo.js (unexpected identifier!): +Error ----------------------------------------------------------------------------------------------------- foo.js:14:11 + +Unexpected identifier, expected the token `;` + + 14| let parse error + ^^^^^ + + + +Found 1 error + +Restore foo.js (same as initial status): +Error ------------------------------------------------------------------------------------------------------ bar.js:6:10 + +Cannot return `foo.y` because string [1] is incompatible with number [2]. + + bar.js:6:10 + 6| return foo.y; + ^^^^^ + +References: + foo.js:10:6 + 10| y: string; + ^^^^^^ [1] + bar.js:5:25 + 5| function err(foo: foo): number { + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ foo.js:6:10 + +Cannot return `bar.x` because string [1] is incompatible with number [2]. + + foo.js:6:10 + 6| return bar.x; + ^^^^^ + +References: + bar.js:10:6 + 10| x: string; + ^^^^^^ [1] + foo.js:5:25 + 5| function err(bar: bar): number { + ^^^^^^ [2] + + + +Found 2 errors + +Done! diff --git a/tests/incremental_cycle_unparsed/test.sh b/tests/incremental_cycle_unparsed/test.sh new file mode 100644 index 00000000000..da74f328621 --- /dev/null +++ b/tests/incremental_cycle_unparsed/test.sh @@ -0,0 +1,40 @@ +#!/bin/bash +mkdir tmp +cp foo.js tmp/ + +printf "\nInitial status...with type errors:\n" +assert_errors "$FLOW" status --no-auto-start . + +printf "\nDelete foo.js (cannot resolve module!):\n" +rm foo.js +assert_ok "$FLOW" force-recheck --no-auto-start foo.js +assert_errors "$FLOW" status --no-auto-start . + +printf "\nRestore foo.js (same as initial status):\n" +cp tmp/foo.js . +assert_ok "$FLOW" force-recheck --no-auto-start foo.js +assert_errors "$FLOW" status --no-auto-start . + +printf "\nRemove @flow in foo.js (no errors!):\n" +cp tmp1/foo.js . +assert_ok "$FLOW" force-recheck --no-auto-start foo.js +assert_ok "$FLOW" status --no-auto-start . + +printf "\nRestore foo.js (same as initial status):\n" +cp tmp/foo.js . +assert_ok "$FLOW" force-recheck --no-auto-start foo.js +assert_errors "$FLOW" status --no-auto-start . + +printf "\nIntroduce parse error in foo.js (unexpected identifier!):\n" +cp tmp2/foo.js . +assert_ok "$FLOW" force-recheck --no-auto-start foo.js +assert_errors "$FLOW" status --no-auto-start . + +printf "\nRestore foo.js (same as initial status):\n" +cp tmp/foo.js . +assert_ok "$FLOW" force-recheck --no-auto-start foo.js +assert_errors "$FLOW" status --no-auto-start . + +rm tmp/foo.js +rmdir tmp +printf "\nDone!\n" diff --git a/tests/incremental_cycle_unparsed/tmp1/foo.js b/tests/incremental_cycle_unparsed/tmp1/foo.js new file mode 100644 index 00000000000..b908d10308d --- /dev/null +++ b/tests/incremental_cycle_unparsed/tmp1/foo.js @@ -0,0 +1,12 @@ +// @noflow + +import type {bar} from './bar'; + +function err(bar: bar): number { + return bar.x; +} + +export type foo = { + y: string; + bar: bar +} diff --git a/tests/incremental_cycle_unparsed/tmp2/foo.js b/tests/incremental_cycle_unparsed/tmp2/foo.js new file mode 100644 index 00000000000..e74d28147cf --- /dev/null +++ b/tests/incremental_cycle_unparsed/tmp2/foo.js @@ -0,0 +1,14 @@ +// @flow + +import type {bar} from './bar'; + +function err(bar: bar): number { + return bar.x; +} + +export type foo = { + y: string; + bar: bar +} + +let parse error diff --git a/tests/indexer/indexer.exp b/tests/indexer/indexer.exp index 0a9cdd76d20..2cba114898d 100644 --- a/tests/indexer/indexer.exp +++ b/tests/indexer/indexer.exp @@ -72,7 +72,8 @@ Error -------------------------------------------------------------------------- Cannot cast `y` to object type because: - object type [1] is incompatible with function prototype [2]. - - a callable signature is missing in object type [1] but exists in object type [2]. + - a call signature declaring the expected parameter / return type is missing in object type [1] but exists in object + type [2]. call.js:4:2 4| (y: {(number): string}); diff --git a/tests/instanceof/instanceof.exp b/tests/instanceof/instanceof.exp index 0a3dbd2b6e0..1378828fd3f 100644 --- a/tests/instanceof/instanceof.exp +++ b/tests/instanceof/instanceof.exp @@ -56,7 +56,8 @@ References: Error ----------------------------------------------------------------------------------------------- instanceof.js:85:5 -Cannot assign `123` to `x[0]` because an indexer property is missing in number [1]. +Cannot assign `123` to `x[0]` because an index signature declaring the expected key / value type is missing in +number [1]. instanceof.js:85:5 85| x[0] = 123; // error @@ -68,6 +69,51 @@ References: ^^^^^^ [1] +Error ----------------------------------------------------------------------------------------------------- mixed.js:6:6 + +Cannot cast `x` to number because `C` [1] is incompatible with number [2]. + + mixed.js:6:6 + 6| (x: number); + ^ + +References: + mixed.js:5:20 + 5| if (x instanceof C) { + ^ [1] + mixed.js:6:9 + 6| (x: number); + ^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- mixed.js:40:5 + +Cannot get `x.p` because property `p` is missing in `Object` [1]. + + mixed.js:40:5 + 40| x.p; + ^^^ + +References: + mixed.js:39:20 + 39| if (x instanceof Object) { + ^^^^^^ [1] + + +Error ---------------------------------------------------------------------------------------------------- mixed.js:46:5 + +Cannot call `x` because a call signature declaring the expected parameter / return type is missing in `Function` [1]. + + mixed.js:46:5 + 46| x(); + ^^^ + +References: + mixed.js:45:20 + 45| if (x instanceof Function) { + ^^^^^^^^ [1] + + Error -------------------------------------------------------------------------------------------------- objproto.js:4:4 Cannot cast `c.p` to empty because string [1] is incompatible with empty [2]. @@ -86,4 +132,4 @@ References: -Found 6 errors +Found 9 errors diff --git a/tests/instanceof/mixed.js b/tests/instanceof/mixed.js new file mode 100644 index 00000000000..d3aa82d1b60 --- /dev/null +++ b/tests/instanceof/mixed.js @@ -0,0 +1,48 @@ +// @flow + +class C { } +function foo(x: mixed) { + if (x instanceof C) { + (x: number); + } +} + +class A { } +class B extends A { } +function bar(x: mixed) { + if (x instanceof B) { + (x: A); + } +} + +class PA<+X> { } +class PB extends PA { } +function baz(x: mixed) { + if (x instanceof PB) { + (x: PA); + } +} + +function qux_readonlyarray(x: mixed) { + if (x instanceof Array) { + (x: $ReadOnlyArray); + } +} + +function qux_array(x: mixed) { + if (x instanceof Array) { + (x: Array); + } +} + +function qux_object(x: mixed) { + if (x instanceof Object) { + x.p; + } +} + +function qux_function(x: mixed) { + if (x instanceof Function) { + x(); + } +} diff --git a/tests/intl/intl.exp b/tests/intl/intl.exp index 34e05144425..8c1f90de16f 100644 --- a/tests/intl/intl.exp +++ b/tests/intl/intl.exp @@ -52,38 +52,38 @@ References: collator.js:7:18 7| localeMatcher: 'look fit', ^^^^^^^^^^ [1] - /intl.js:47:19 - 47| localeMatcher?: 'lookup' | 'best fit', + /intl.js:48:19 + 48| localeMatcher?: 'lookup' | 'best fit', ^^^^^^^^^^^^^^^^^^^^^ [2] collator.js:8:10 8| usage: 'find', ^^^^^^ [3] - /intl.js:48:11 - 48| usage?: 'sort' | 'search', + /intl.js:49:11 + 49| usage?: 'sort' | 'search', ^^^^^^^^^^^^^^^^^ [4] collator.js:9:16 9| sensitivity: '', ^^ [5] - /intl.js:49:17 - 49| sensitivity?: 'base' | 'accent' | 'case' | 'variant', + /intl.js:50:17 + 50| sensitivity?: 'base' | 'accent' | 'case' | 'variant', ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [6] collator.js:10:22 10| ignorePunctuation: null, ^^^^ [7] - /intl.js:50:23 - 50| ignorePunctuation?: boolean, + /intl.js:51:23 + 51| ignorePunctuation?: boolean, ^^^^^^^ [8] collator.js:11:12 11| numeric: 1, ^ [9] - /intl.js:51:13 - 51| numeric?: boolean, + /intl.js:52:13 + 52| numeric?: boolean, ^^^^^^^ [10] collator.js:12:14 12| caseFirst: 'true' ^^^^^^ [11] - /intl.js:52:15 - 52| caseFirst?: 'upper' | 'lower' | 'false' + /intl.js:53:15 + 53| caseFirst?: 'upper' | 'lower' | 'false', ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [12] @@ -164,80 +164,80 @@ References: date_time_format.js:7:18 7| localeMatcher: 'look', ^^^^^^ [1] - /intl.js:97:19 - 97| localeMatcher?: 'lookup' | 'best fit', + /intl.js:101:19 + 101| localeMatcher?: 'lookup' | 'best fit', ^^^^^^^^^^^^^^^^^^^^^ [2] date_time_format.js:8:13 8| timeZone: 1, ^ [3] - /intl.js:98:14 - 98| timeZone?: string, + /intl.js:102:14 + 102| timeZone?: string, ^^^^^^ [4] date_time_format.js:9:11 9| hour12: '', ^^ [5] - /intl.js:99:12 - 99| hour12?: boolean, + /intl.js:103:12 + 103| hour12?: boolean, ^^^^^^^ [6] date_time_format.js:10:18 10| formatMatcher: 'basic fit', ^^^^^^^^^^^ [7] - /intl.js:100:19 - 100| formatMatcher?: 'basic' | 'best fit', + /intl.js:104:19 + 104| formatMatcher?: 'basic' | 'best fit', ^^^^^^^^^^^^^^^^^^^^ [8] date_time_format.js:11:12 11| weekday: '2-digit', ^^^^^^^^^ [9] - /intl.js:101:13 - 101| weekday?: 'narrow' | 'short' | 'long', + /intl.js:105:13 + 105| weekday?: 'narrow' | 'short' | 'long', ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [10] date_time_format.js:12:8 12| era: '', ^^ [11] - /intl.js:102:9 - 102| era?: 'narrow' | 'short' | 'long', + /intl.js:106:9 + 106| era?: 'narrow' | 'short' | 'long', ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [12] date_time_format.js:13:9 13| year: '', ^^ [13] - /intl.js:103:10 - 103| year?: 'numeric' | '2-digit', + /intl.js:107:10 + 107| year?: 'numeric' | '2-digit', ^^^^^^^^^^^^^^^^^^^^^ [14] date_time_format.js:14:10 14| month: '', ^^ [15] - /intl.js:104:11 - 104| month?: 'numeric' | '2-digit' | 'narrow' | 'short' | 'long', + /intl.js:108:11 + 108| month?: 'numeric' | '2-digit' | 'narrow' | 'short' | 'long', ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [16] date_time_format.js:15:8 15| day: '', ^^ [17] - /intl.js:105:9 - 105| day?: 'numeric' | '2-digit', + /intl.js:109:9 + 109| day?: 'numeric' | '2-digit', ^^^^^^^^^^^^^^^^^^^^^ [18] date_time_format.js:16:9 16| hour: '', ^^ [19] - /intl.js:106:10 - 106| hour?: 'numeric' | '2-digit', + /intl.js:110:10 + 110| hour?: 'numeric' | '2-digit', ^^^^^^^^^^^^^^^^^^^^^ [20] date_time_format.js:17:11 17| minute: 'long', ^^^^^^ [21] - /intl.js:107:12 - 107| minute?: 'numeric' | '2-digit', + /intl.js:111:12 + 111| minute?: 'numeric' | '2-digit', ^^^^^^^^^^^^^^^^^^^^^ [22] date_time_format.js:18:11 18| second: 'short', ^^^^^^^ [23] - /intl.js:108:12 - 108| second?: 'numeric' | '2-digit', + /intl.js:112:12 + 112| second?: 'numeric' | '2-digit', ^^^^^^^^^^^^^^^^^^^^^ [24] date_time_format.js:19:17 19| timeZoneName: 'narrow' ^^^^^^^^ [25] - /intl.js:109:18 - 109| timeZoneName?: 'short' | 'long' + /intl.js:113:18 + 113| timeZoneName?: 'short' | 'long', ^^^^^^^^^^^^^^^^ [26] @@ -258,8 +258,8 @@ Cannot assign `Intl.getCanonicalLocales()` to `a` because array type [1] is inco ^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /intl.js:14:53 - 14| getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[] + /intl.js:13:53 + 13| getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[], ^^^^^^^^^^^^^ [1] intl.js:2:10 2| const a: string = Intl.getCanonicalLocales(); // incorrect @@ -275,8 +275,8 @@ Cannot call `Intl.getCanonicalLocales` because undefined [1] is not a function. ^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /intl.js:14:25 - 14| getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[] + /intl.js:13:25 + 13| getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[], ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] @@ -289,8 +289,8 @@ Cannot assign `getCanonicalLocales()` to `b` because array type [1] is incompati ^^^^^^^^^^^^^^^^^^^^^ References: - /intl.js:14:53 - 14| getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[] + /intl.js:13:53 + 13| getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[], ^^^^^^^^^^^^^ [1] intl.js:5:12 5| const b: string = getCanonicalLocales(); // incorrect @@ -354,8 +354,8 @@ References: 10| DateTimeFormat: Class, 11| NumberFormat: Class, 12| PluralRules: ?Class, - 13| - 14| getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[] + 13| getCanonicalLocales?: (locales?: Intl$Locales) => Intl$Locale[], + 14| ... 15| } ^ [1] @@ -423,62 +423,62 @@ References: number_format.js:7:18 7| localeMatcher: 'best', ^^^^^^ [1] - /intl.js:143:19 - 143| localeMatcher?: 'lookup' | 'best fit', + /intl.js:149:19 + 149| localeMatcher?: 'lookup' | 'best fit', ^^^^^^^^^^^^^^^^^^^^^ [2] number_format.js:8:10 8| style: 'octal', ^^^^^^^ [3] - /intl.js:144:11 - 144| style?: 'decimal' | 'currency' | 'percent', + /intl.js:150:11 + 150| style?: 'decimal' | 'currency' | 'percent', ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [4] number_format.js:9:13 9| currency: 123, ^^^ [5] - /intl.js:145:14 - 145| currency?: string, + /intl.js:151:14 + 151| currency?: string, ^^^^^^ [6] number_format.js:10:20 10| currencyDisplay: 'sym', ^^^^^ [7] - /intl.js:146:21 - 146| currencyDisplay?: 'symbol' | 'code' | 'name', + /intl.js:152:21 + 152| currencyDisplay?: 'symbol' | 'code' | 'name', ^^^^^^^^^^^^^^^^^^^^^^^^^^ [8] number_format.js:11:16 11| useGrouping: 5, ^ [9] - /intl.js:147:17 - 147| useGrouping?: boolean, + /intl.js:153:17 + 153| useGrouping?: boolean, ^^^^^^^ [10] number_format.js:12:25 12| minimumIntegerDigits: {}, ^^ [11] - /intl.js:148:26 - 148| minimumIntegerDigits?: number, + /intl.js:154:26 + 154| minimumIntegerDigits?: number, ^^^^^^ [12] number_format.js:13:26 13| minimumFractionDigits: '', ^^ [13] - /intl.js:149:27 - 149| minimumFractionDigits?: number, + /intl.js:155:27 + 155| minimumFractionDigits?: number, ^^^^^^ [14] number_format.js:14:26 14| maximumFractionDigits: null, ^^^^ [15] - /intl.js:150:27 - 150| maximumFractionDigits?: number, + /intl.js:156:27 + 156| maximumFractionDigits?: number, ^^^^^^ [16] number_format.js:15:29 15| minimumSignificantDigits: '', ^^ [17] - /intl.js:151:30 - 151| minimumSignificantDigits?: number, + /intl.js:157:30 + 157| minimumSignificantDigits?: number, ^^^^^^ [18] number_format.js:16:29 16| maximumSignificantDigits: null ^^^^ [19] - /intl.js:152:30 - 152| maximumSignificantDigits?: number + /intl.js:158:30 + 158| maximumSignificantDigits?: number, ^^^^^^ [20] @@ -494,7 +494,7 @@ Error -------------------------------------------------------------------------- Cannot call `Intl.PluralRules` because: - null or undefined [1] is not a function. - - a callable signature is missing in statics of `Intl$PluralRules` [2]. + - a call signature declaring the expected parameter / return type is missing in statics of `Intl$PluralRules` [2]. plural_rules.js:3:11 3| const a = Intl.PluralRules(); // incorrect @@ -504,22 +504,23 @@ References: /intl.js:12:16 12| PluralRules: ?Class, ^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /intl.js:155:15 - 155| declare class Intl$PluralRules { + /intl.js:162:15 + 162| declare class Intl$PluralRules { ^^^^^^^^^^^^^^^^ [2] Error --------------------------------------------------------------------------------------------- plural_rules.js:6:13 -Cannot call `PluralRules` because a callable signature is missing in statics of `Intl$PluralRules` [1]. +Cannot call `PluralRules` because a call signature declaring the expected parameter / return type is missing in statics +of `Intl$PluralRules` [1]. plural_rules.js:6:13 6| const b = PluralRules(); // incorrect ^^^^^^^^^^^^^ References: - /intl.js:155:15 - 155| declare class Intl$PluralRules { + /intl.js:162:15 + 162| declare class Intl$PluralRules { ^^^^^^^^^^^^^^^^ [1] @@ -563,32 +564,32 @@ References: plural_rules.js:12:20 12| localeMatcher: 'best one', ^^^^^^^^^^ [1] - /intl.js:180:19 - 180| localeMatcher?: 'lookup' | 'best fit', + /intl.js:188:19 + 188| localeMatcher?: 'lookup' | 'best fit', ^^^^^^^^^^^^^^^^^^^^^ [2] plural_rules.js:13:11 13| type: 'count', ^^^^^^^ [3] - /intl.js:181:10 - 181| type?: 'cardinal' | 'ordinal', + /intl.js:189:10 + 189| type?: 'cardinal' | 'ordinal', ^^^^^^^^^^^^^^^^^^^^^^ [4] plural_rules.js:14:27 14| minimumIntegerDigits: '', ^^ [5] - /intl.js:182:26 - 182| minimumIntegerDigits?: number, + /intl.js:190:26 + 190| minimumIntegerDigits?: number, ^^^^^^ [6] plural_rules.js:7:13 7| const c = new PluralRules(); // correct ^^^^^^^^^^^^^^^^^ [7] - /intl.js:185:30 - 185| minimumSignificantDigits?: number, + /intl.js:193:30 + 193| minimumSignificantDigits?: number, ^^^^^^ [8] plural_rules.js:18:31 18| maximumSignificantDigits: '' ^^ [9] - /intl.js:186:30 - 186| maximumSignificantDigits?: number + /intl.js:194:30 + 194| maximumSignificantDigits?: number, ^^^^^^ [10] @@ -615,8 +616,8 @@ Cannot call `new PluralRules().select` because function [1] requires another arg ^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /intl.js:161:3 - 161| select (number): Intl$PluralRule; + /intl.js:168:3 + 168| select (number): Intl$PluralRule; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] @@ -630,8 +631,8 @@ Cannot call `PluralRules.getCanonicalLocales` because property `getCanonicalLoca ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /intl.js:155:15 - 155| declare class Intl$PluralRules { + /intl.js:162:15 + 162| declare class Intl$PluralRules { ^^^^^^^^^^^^^^^^ [1] diff --git a/tests/invariant_reachability/.flowconfig b/tests/invariant_reachability/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/invariant_reachability/index.js b/tests/invariant_reachability/index.js new file mode 100644 index 00000000000..c1a19ef4183 --- /dev/null +++ b/tests/invariant_reachability/index.js @@ -0,0 +1,56 @@ +/* @flow */ + +declare function invariant(): empty; // raises + +function foo(c: bool): string { + const y = c ? 5 : invariant(); + return "default string"; +} + + +function foo(c: bool): string { + c ? 5 : invariant(false); + return "default string"; +} + + +function foo(c: bool): string { + const y = c ? invariant() : invariant(false); + return "default string"; +} + + +function foo(c: bool): string { + const y = false ? 5 : invariant(false); + return "default string"; +} + + +function foo(c: bool): string { + invariant() + return "default string"; +} + + +function foo(c: bool): string { + invariant(false) + return "default string"; +} + +function foo(c: bool): string { + invariant(c) + return "default string"; +} + +function foo(c: bool):string { + return c ? 'a' : invariant(); +} + +function foo(c: bool):string { + return c ? 1 : invariant(); +} + + +function foo(c: bool):string { + return c ? invariant() : invariant(); +} diff --git a/tests/invariant_reachability/invariant_reachability.exp b/tests/invariant_reachability/invariant_reachability.exp new file mode 100644 index 00000000000..7cb565151e8 --- /dev/null +++ b/tests/invariant_reachability/invariant_reachability.exp @@ -0,0 +1,43 @@ +Error ---------------------------------------------------------------------------------------------------- index.js:19:3 + +Unreachable code. + + 19| return "default string"; + ^^^^^^^^^^^^^^^^^^^^^^^^ + + +Error ---------------------------------------------------------------------------------------------------- index.js:31:3 + +Unreachable code. + + 31| return "default string"; + ^^^^^^^^^^^^^^^^^^^^^^^^ + + +Error ---------------------------------------------------------------------------------------------------- index.js:37:3 + +Unreachable code. + + 37| return "default string"; + ^^^^^^^^^^^^^^^^^^^^^^^^ + + +Error --------------------------------------------------------------------------------------------------- index.js:50:10 + +Cannot return `c ? 1 : invariant()` because number [1] is incompatible with string [2]. + + index.js:50:10 + 50| return c ? 1 : invariant(); + ^^^^^^^^^^^^^^^^^^^ + +References: + index.js:50:14 + 50| return c ? 1 : invariant(); + ^ [1] + index.js:49:23 + 49| function foo(c: bool):string { + ^^^^^^ [2] + + + +Found 4 errors diff --git a/tests/iterable/iterable.exp b/tests/iterable/iterable.exp index a601ea7ac7a..76afb1ddc02 100644 --- a/tests/iterable/iterable.exp +++ b/tests/iterable/iterable.exp @@ -14,8 +14,8 @@ References: array.js:7:19 7| (["hi"]: Iterable); // Error string ~> number ^^^^^^ [2] - /core.js:487:22 - 487| interface $Iterator<+Yield,+Return,-Next> { + /core.js:537:22 + 537| interface $Iterator<+Yield,+Return,-Next> { ^^^^^ [3] @@ -35,8 +35,8 @@ References: array.js:8:22 8| (["hi", 1]: Iterable); // Error number ~> string ^^^^^^ [2] - /core.js:487:22 - 487| interface $Iterator<+Yield,+Return,-Next> { + /core.js:537:22 + 537| interface $Iterator<+Yield,+Return,-Next> { ^^^^^ [3] @@ -56,8 +56,8 @@ References: caching_bug.js:21:62 21| function miss_the_cache(x: Array): Iterable { return x; } ^^^^^^ [2] - /core.js:487:22 - 487| interface $Iterator<+Yield,+Return,-Next> { + /core.js:537:22 + 537| interface $Iterator<+Yield,+Return,-Next> { ^^^^^ [3] @@ -73,9 +73,9 @@ References: iterator_result.js:17:40 17| function makeIterator(coin_flip: () => boolean ): Iterator { ^^^^^^^ [1] - /core.js:485:13 - 485| | { done: false, +value: Yield }; - ^^^^^ [2] + /core.js:532:11 + 532| done: false, + ^^^^^ [2] Error ----------------------------------------------------------------------------------------- iterator_result.js:25:16 @@ -90,9 +90,9 @@ References: iterator_result.js:17:40 17| function makeIterator(coin_flip: () => boolean ): Iterator { ^^^^^^^ [1] - /core.js:484:13 - 484| | { done: true, +value?: Return } - ^^^^ [2] + /core.js:527:11 + 527| done: true, + ^^^^ [2] Error ----------------------------------------------------------------------------------------------------- map.js:14:10 @@ -105,14 +105,14 @@ value of property `@@iterator`. ^^^ References: - /core.js:532:28 - 532| @@iterator(): Iterator<[K, V]>; + /core.js:593:28 + 593| @@iterator(): Iterator<[K, V]>; ^^^^^^ [1] map.js:13:55 13| function mapTest4(map: Map): Iterable { ^^^^^^ [2] - /core.js:487:22 - 487| interface $Iterator<+Yield,+Return,-Next> { + /core.js:537:22 + 537| interface $Iterator<+Yield,+Return,-Next> { ^^^^^ [3] @@ -132,8 +132,8 @@ References: set.js:13:47 13| function setTest4(set: Set): Iterable { ^^^^^^ [2] - /core.js:487:22 - 487| interface $Iterator<+Yield,+Return,-Next> { + /core.js:537:22 + 537| interface $Iterator<+Yield,+Return,-Next> { ^^^^^ [3] @@ -147,14 +147,14 @@ return value of property `@@iterator`. ^^^^ References: - /core.js:291:28 - 291| @@iterator(): Iterator; + /core.js:321:28 + 321| @@iterator(): Iterator; ^^^^^^ [1] string.js:5:17 5| ("hi": Iterable); // Error - string is a Iterable ^^^^^^ [2] - /core.js:487:22 - 487| interface $Iterator<+Yield,+Return,-Next> { + /core.js:537:22 + 537| interface $Iterator<+Yield,+Return,-Next> { ^^^^^ [3] diff --git a/tests/json2_output/json2_output.exp b/tests/json2_output/json2_output.exp index d134ae8da1c..6886d67abb7 100644 --- a/tests/json2_output/json2_output.exp +++ b/tests/json2_output/json2_output.exp @@ -1,12 +1,12 @@ flow check --json-version=2 -{"flowVersion":"0.82.0","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} +{"flowVersion":"","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} flow check --json-version=2 --json -{"flowVersion":"0.82.0","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} +{"flowVersion":"","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} flow check --json-version=2 --pretty { - "flowVersion":"0.82.0", + "flowVersion":"", "jsonVersion":"2", "errors":[ { @@ -292,14 +292,14 @@ flow check --json-version=2 --pretty } flow focus-check test.js --json-version=2 -{"flowVersion":"0.82.0","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} +{"flowVersion":"","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} flow focus-check test.js --json-version=2 --json -{"flowVersion":"0.82.0","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} +{"flowVersion":"","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} flow focus-check test.js --json-version=2 --pretty { - "flowVersion":"0.82.0", + "flowVersion":"", "jsonVersion":"2", "errors":[ { @@ -585,14 +585,14 @@ flow focus-check test.js --json-version=2 --pretty } flow check-contents --json-version=2 < test.js -{"flowVersion":"0.82.0","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"-","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"-","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"-","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"-","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"-","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} +{"flowVersion":"","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"-","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"-","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"-","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"-","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"-","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} flow check-contents --json-version=2 --json < test.js -{"flowVersion":"0.82.0","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"-","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"-","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"-","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"-","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"-","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} +{"flowVersion":"","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"-","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"-","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"-","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"-","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"-","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"-","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} flow check-contents --json-version=2 --pretty < test.js { - "flowVersion":"0.82.0", + "flowVersion":"", "jsonVersion":"2", "errors":[ { @@ -878,14 +878,14 @@ flow check-contents --json-version=2 --pretty < test.js } flow status --json-version=2 -{"flowVersion":"0.82.0","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} +{"flowVersion":"","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} flow status --json-version=2 --json -{"flowVersion":"0.82.0","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} +{"flowVersion":"","jsonVersion":"2","errors":[{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":3,"offset":92},"end":{"line":9,"column":4,"offset":94},"context":{"9":"([o1]: [O]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":9,"column":2,"offset":91},"end":{"line":9,"column":5,"offset":95},"context":{"9":"([o1]: [O]);"}},"messageMarkup":[{"kind":"Text","text":"Cannot cast array literal to tuple type because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":7,"column":16,"offset":65},"end":{"line":7,"column":17,"offset":67},"context":{"7":"const o1 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":6,"column":14,"offset":42},"end":{"line":6,"column":18,"offset":47},"context":{"6":"type O = {p: empty};"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":2,"offset":104},"end":{"line":10,"column":5,"offset":108},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"messageMarkup":{"kind":"UnorderedList","message":[{"kind":"Text","text":"Cannot cast array literal to union type because:"}],"items":[[{"kind":"Text","text":"Either "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}],[{"kind":"Text","text":"Or "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"3","message":[{"kind":"Text","text":"empty"}]},{"kind":"Text","text":" in property "},{"kind":"Code","text":"p"},{"kind":"Text","text":" of index 0."}]]},"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":8,"column":16,"offset":85},"end":{"line":8,"column":17,"offset":87},"context":{"8":"const o2 = {p: 42};"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":13,"offset":115},"end":{"line":10,"column":25,"offset":128},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}},"3":{"source":"test.js","type":"SourceFile","start":{"line":10,"column":36,"offset":138},"end":{"line":10,"column":48,"offset":151},"context":{"10":"([o2]: [{p: empty | empty}] | [{p: empty | empty}]);"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"five_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":33,"offset":189},"end":{"line":16,"column":7,"offset":225},"context":{"12":"const five_line_error: number = `Line 1","13":"Line 2","14":"Line 3","15":"Line 4","16":"Line 5`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":12,"column":24,"offset":180},"end":{"line":12,"column":29,"offset":186},"context":{"12":"const five_line_error: number = `Line 1"}}}},{"kind":"infer","level":"error","suppressions":[],"classic":false,"primaryLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"rootLoc":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"messageMarkup":[{"kind":"Text","text":"Cannot assign template string to "},{"kind":"Code","text":"ten_line_error"},{"kind":"Text","text":" because "},{"kind":"Reference","referenceId":"1","message":[{"kind":"Text","text":"string"}]},{"kind":"Text","text":" is incompatible with "},{"kind":"Reference","referenceId":"2","message":[{"kind":"Text","text":"number"}]},{"kind":"Text","text":"."}],"referenceLocs":{"1":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":32,"offset":259},"end":{"line":27,"column":8,"offset":331},"context":{"18":"const ten_line_error: number = `Line 1","19":"Line 2","20":"Line 3","26":"Line 9","27":"Line 10`;"}},"2":{"source":"test.js","type":"SourceFile","start":{"line":18,"column":23,"offset":250},"end":{"line":18,"column":28,"offset":256},"context":{"18":"const ten_line_error: number = `Line 1"}}}}],"passed":false} flow status --json-version=2 --pretty { - "flowVersion":"0.82.0", + "flowVersion":"", "jsonVersion":"2", "errors":[ { @@ -1169,4 +1169,3 @@ flow status --json-version=2 --pretty ], "passed":false } - diff --git a/tests/json2_output/test.sh b/tests/json2_output/test.sh index b3452696326..eec6f0cde8e 100755 --- a/tests/json2_output/test.sh +++ b/tests/json2_output/test.sh @@ -59,4 +59,3 @@ echo "" echo "flow status --json-version=2 --pretty" assert_errors "$FLOW" status . --strip-root --json-version=2 --pretty echo "" -echo "" diff --git a/tests/json_exit/json_exit.exp b/tests/json_exit/json_exit.exp index b51c5801f32..46524b163f0 100644 --- a/tests/json_exit/json_exit.exp +++ b/tests/json_exit/json_exit.exp @@ -1,18 +1,18 @@ -{"flowVersion":"0.82.0","exit":{"code":12,"reason":"Could_not_find_flowconfig","msg":"Could not find file or directory pants; canceling search for .flowconfig.\nSee \"flow init --help\" for more info"}} +{"flowVersion":"","exit":{"code":12,"reason":"Could_not_find_flowconfig","msg":"Could not find file or directory pants; canceling search for .flowconfig.\nSee \"flow init --help\" for more info"}} { - "flowVersion":"0.82.0", + "flowVersion":"", "exit":{ "code":12, "reason":"Could_not_find_flowconfig", "msg":"Could not find file or directory pants; canceling search for .flowconfig.\nSee \"flow init --help\" for more info" } } -{"flowVersion":"0.82.0","exit":{"code":64,"reason":"Commandline_usage_error","msg":"flow: --pants unknown option\nUsage: flow check [OPTION]... [ROOT]\n\nDoes a full Flow check and prints the results.\n\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be the current directory if unspecified.\n\n --all Typecheck all files, not just @flow\n --color Display terminal output in color. never, always, auto (default: auto)\n --debug Print debug info during typecheck\n --declaration Specify one or more patterns, comma separated, for files to treat as declarations\n --flowconfig-name Set the name of the flow configuration file. (default: .flowconfig)\n --from Specify client (for use by editor plugins)\n --help This list of options\n --ignore Specify one or more ignore patterns, comma separated\n --ignore-version Ignore the version constraint in .flowconfig\n --include Specify one or more include patterns, comma separated\n --include-suppressed Ignore any `suppress_comment` lines in .flowconfig\n --include-warnings Include warnings in the error output (warnings are excluded by default)\n --json Output results in JSON format\n --json-version The version of the JSON format (defaults to 1)\n --lib Specify one or more lib files/directories, comma separated\n --lints Specify one or more lint rules, comma separated\n --max-warnings Warnings above this number will cause a nonzero exit code (implies --include-warnings)\n --max-workers Maximum number of workers to create (capped by number of cores)\n --merge-timeout The maximum time in seconds to attempt to typecheck a file or cycle of files. 0 means no timeout (default: 100)\n --message-width Sets the width of messages but not code snippets (defaults to the smaller of 120 or the terminal width)\n --munge-underscore-members Treat any class member name with a leading underscore as private\n --no-flowlib Do not include embedded declarations\n --no-saved-state Do not load from a saved state even if one is available\n --one-line Escapes newlines so that each error prints on one line\n --pretty Pretty-print JSON output (implies --json)\n --profile Output profiling information\n --quiet Suppress output about server startup\n --saved-state-fetcher Which saved state fetcher Flow should use (none, local) (default: none)\n --saved-state-no-fallback If saved state fails to load, exit (normally fallback is to initialize from scratch)\n --sharedmemory-dep-table-pow The exponent for the size of the shared memory dependency table. The default is 17, implying a size of 2^17 bytes\n --sharedmemory-dirs Directory in which to store shared memory heap (default: /dev/shm/)\n --sharedmemory-hash-table-pow The exponent for the size of the shared memory hash table. The default is 19, implying a size of 2^19 bytes\n --sharedmemory-log-level The logging level for shared memory statistics. 0=none, 1=some\n --sharedmemory-minimum-available Flow will only use a filesystem for shared memory if it has at least these many bytes available (default: 536870912 - which is 512MB)\n --show-all-branches Print all branch errors (the default is to print the most relevant branches)\n --show-all-errors Print all errors (the default is to truncate after 50 errors)\n --strip-root Print paths without the root\n --temp-dir Directory in which to store temp files (default: FLOW_TEMP_DIR, or /tmp/flow/)\n --traces Outline an error path up to a specified level\n --unicode Display terminal output with unicode decoration. never, always, auto (default: auto)\n --untyped Specify one or more patterns, comma separated, for files to treat as untyped\n --verbose Print verbose info during typecheck\n --verbose-depth Recursively print types up to specified depth (default 1, implies --verbose)\n --verbose-flowlib Print verbose info while initializing the flowlib\n --verbose-indent Indent verbose info during typecheck (implies --verbose)\n --weak Typecheck with weak inference, assuming dynamic types by default"}} +{"flowVersion":"","exit":{"code":64,"reason":"Commandline_usage_error","msg":"flow: --pants unknown option\nUsage: flow check [OPTION]... [ROOT]\n\nDoes a full Flow check and prints the results.\n\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be the current directory if unspecified.\n\n --abstract-locations [EXPERIMENTAL] Use abstract locations to improve recheck times. Has no effect unless types-first is also enabled\n --all Typecheck all files, not just @flow\n --color Display terminal output in color. never, always, auto (default: auto)\n --debug Print debug info during typecheck\n --declaration Specify one or more patterns, comma separated, for files to treat as declarations\n --flowconfig-name Set the name of the flow configuration file. (default: .flowconfig)\n --from Specify who is calling this CLI command (used by logging)\n --help This list of options\n --ignore Specify one or more ignore patterns, comma separated\n --ignore-version Ignore the version constraint in .flowconfig\n --include Specify one or more include patterns, comma separated\n --include-suppressed Ignore any `suppress_comment` lines in .flowconfig\n --include-warnings Include warnings in the error output (warnings are excluded by default)\n --json Output results in JSON format\n --json-version The version of the JSON format (defaults to 1)\n --lib Specify one or more lib files/directories, comma separated\n --lints Specify one or more lint rules, comma separated\n --max-warnings Warnings above this number will cause a nonzero exit code (implies --include-warnings)\n --max-workers Maximum number of workers to create (capped by number of cores)\n --merge-timeout The maximum time in seconds to attempt to typecheck a file or cycle of files. 0 means no timeout (default: 100)\n --message-width Sets the width of messages but not code snippets (defaults to the smaller of 120 or the terminal width)\n --munge-underscore-members Treat any class member name with a leading underscore as private\n --no-cgroup Don't automatically run this command in a cgroup (if cgroups are available)\n --no-flowlib Do not include embedded declarations\n --no-saved-state Do not load from a saved state even if one is available\n --one-line Escapes newlines so that each error prints on one line\n --pretty Pretty-print JSON output (implies --json)\n --profile Output profiling information\n --quiet Suppress output about server startup\n --saved-state-fetcher Which saved state fetcher Flow should use (none, local) (default: none)\n --saved-state-force-recheck Force a lazy server to recheck the changes since the saved state was generated\n --saved-state-no-fallback If saved state fails to load, exit (normally fallback is to initialize from scratch)\n --sharedmemory-dep-table-pow The exponent for the size of the shared memory dependency table. The default is 17, implying a size of 2^17 bytes\n --sharedmemory-dirs Directory in which to store shared memory heap (default: /dev/shm/)\n --sharedmemory-hash-table-pow The exponent for the size of the shared memory hash table. The default is 19, implying a size of 2^19 bytes\n --sharedmemory-log-level The logging level for shared memory statistics. 0=none, 1=some\n --sharedmemory-minimum-available Flow will only use a filesystem for shared memory if it has at least these many bytes available (default: 536870912 - which is 512MB)\n --show-all-branches Print all branch errors (the default is to print the most relevant branches)\n --show-all-errors Print all errors (the default is to truncate after 50 errors)\n --strip-root Print paths without the root\n --temp-dir Directory in which to store temp files (default: FLOW_TEMP_DIR, or /tmp/flow/)\n --traces Outline an error path up to a specified level\n --types-first [EXPERIMENTAL] types-first architecture\n --unicode Display terminal output with unicode decoration. never, always, auto (default: auto)\n --untyped Specify one or more patterns, comma separated, for files to treat as untyped\n --verbose Print verbose info during typecheck\n --verbose-depth Recursively print types up to specified depth (default 1, implies --verbose)\n --verbose-flowlib Print verbose info while initializing the flowlib\n --verbose-indent Indent verbose info during typecheck (implies --verbose)\n --wait-for-recheck If true, always wait for rechecks to finish before serving commands (default: false)\n --weak Typecheck with weak inference, assuming dynamic types by default"}} { - "flowVersion":"0.82.0", + "flowVersion":"", "exit":{ "code":64, "reason":"Commandline_usage_error", - "msg":"flow: --pants unknown option\nUsage: flow check [OPTION]... [ROOT]\n\nDoes a full Flow check and prints the results.\n\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be the current directory if unspecified.\n\n --all Typecheck all files, not just @flow\n --color Display terminal output in color. never, always, auto (default: auto)\n --debug Print debug info during typecheck\n --declaration Specify one or more patterns, comma separated, for files to treat as declarations\n --flowconfig-name Set the name of the flow configuration file. (default: .flowconfig)\n --from Specify client (for use by editor plugins)\n --help This list of options\n --ignore Specify one or more ignore patterns, comma separated\n --ignore-version Ignore the version constraint in .flowconfig\n --include Specify one or more include patterns, comma separated\n --include-suppressed Ignore any `suppress_comment` lines in .flowconfig\n --include-warnings Include warnings in the error output (warnings are excluded by default)\n --json Output results in JSON format\n --json-version The version of the JSON format (defaults to 1)\n --lib Specify one or more lib files/directories, comma separated\n --lints Specify one or more lint rules, comma separated\n --max-warnings Warnings above this number will cause a nonzero exit code (implies --include-warnings)\n --max-workers Maximum number of workers to create (capped by number of cores)\n --merge-timeout The maximum time in seconds to attempt to typecheck a file or cycle of files. 0 means no timeout (default: 100)\n --message-width Sets the width of messages but not code snippets (defaults to the smaller of 120 or the terminal width)\n --munge-underscore-members Treat any class member name with a leading underscore as private\n --no-flowlib Do not include embedded declarations\n --no-saved-state Do not load from a saved state even if one is available\n --one-line Escapes newlines so that each error prints on one line\n --pretty Pretty-print JSON output (implies --json)\n --profile Output profiling information\n --quiet Suppress output about server startup\n --saved-state-fetcher Which saved state fetcher Flow should use (none, local) (default: none)\n --saved-state-no-fallback If saved state fails to load, exit (normally fallback is to initialize from scratch)\n --sharedmemory-dep-table-pow The exponent for the size of the shared memory dependency table. The default is 17, implying a size of 2^17 bytes\n --sharedmemory-dirs Directory in which to store shared memory heap (default: /dev/shm/)\n --sharedmemory-hash-table-pow The exponent for the size of the shared memory hash table. The default is 19, implying a size of 2^19 bytes\n --sharedmemory-log-level The logging level for shared memory statistics. 0=none, 1=some\n --sharedmemory-minimum-available Flow will only use a filesystem for shared memory if it has at least these many bytes available (default: 536870912 - which is 512MB)\n --show-all-branches Print all branch errors (the default is to print the most relevant branches)\n --show-all-errors Print all errors (the default is to truncate after 50 errors)\n --strip-root Print paths without the root\n --temp-dir Directory in which to store temp files (default: FLOW_TEMP_DIR, or /tmp/flow/)\n --traces Outline an error path up to a specified level\n --unicode Display terminal output with unicode decoration. never, always, auto (default: auto)\n --untyped Specify one or more patterns, comma separated, for files to treat as untyped\n --verbose Print verbose info during typecheck\n --verbose-depth Recursively print types up to specified depth (default 1, implies --verbose)\n --verbose-flowlib Print verbose info while initializing the flowlib\n --verbose-indent Indent verbose info during typecheck (implies --verbose)\n --weak Typecheck with weak inference, assuming dynamic types by default" + "msg":"flow: --pants unknown option\nUsage: flow check [OPTION]... [ROOT]\n\nDoes a full Flow check and prints the results.\n\nFlow will search upward for a .flowconfig file, beginning at ROOT.\nROOT is assumed to be the current directory if unspecified.\n\n --abstract-locations [EXPERIMENTAL] Use abstract locations to improve recheck times. Has no effect unless types-first is also enabled\n --all Typecheck all files, not just @flow\n --color Display terminal output in color. never, always, auto (default: auto)\n --debug Print debug info during typecheck\n --declaration Specify one or more patterns, comma separated, for files to treat as declarations\n --flowconfig-name Set the name of the flow configuration file. (default: .flowconfig)\n --from Specify who is calling this CLI command (used by logging)\n --help This list of options\n --ignore Specify one or more ignore patterns, comma separated\n --ignore-version Ignore the version constraint in .flowconfig\n --include Specify one or more include patterns, comma separated\n --include-suppressed Ignore any `suppress_comment` lines in .flowconfig\n --include-warnings Include warnings in the error output (warnings are excluded by default)\n --json Output results in JSON format\n --json-version The version of the JSON format (defaults to 1)\n --lib Specify one or more lib files/directories, comma separated\n --lints Specify one or more lint rules, comma separated\n --max-warnings Warnings above this number will cause a nonzero exit code (implies --include-warnings)\n --max-workers Maximum number of workers to create (capped by number of cores)\n --merge-timeout The maximum time in seconds to attempt to typecheck a file or cycle of files. 0 means no timeout (default: 100)\n --message-width Sets the width of messages but not code snippets (defaults to the smaller of 120 or the terminal width)\n --munge-underscore-members Treat any class member name with a leading underscore as private\n --no-cgroup Don't automatically run this command in a cgroup (if cgroups are available)\n --no-flowlib Do not include embedded declarations\n --no-saved-state Do not load from a saved state even if one is available\n --one-line Escapes newlines so that each error prints on one line\n --pretty Pretty-print JSON output (implies --json)\n --profile Output profiling information\n --quiet Suppress output about server startup\n --saved-state-fetcher Which saved state fetcher Flow should use (none, local) (default: none)\n --saved-state-force-recheck Force a lazy server to recheck the changes since the saved state was generated\n --saved-state-no-fallback If saved state fails to load, exit (normally fallback is to initialize from scratch)\n --sharedmemory-dep-table-pow The exponent for the size of the shared memory dependency table. The default is 17, implying a size of 2^17 bytes\n --sharedmemory-dirs Directory in which to store shared memory heap (default: /dev/shm/)\n --sharedmemory-hash-table-pow The exponent for the size of the shared memory hash table. The default is 19, implying a size of 2^19 bytes\n --sharedmemory-log-level The logging level for shared memory statistics. 0=none, 1=some\n --sharedmemory-minimum-available Flow will only use a filesystem for shared memory if it has at least these many bytes available (default: 536870912 - which is 512MB)\n --show-all-branches Print all branch errors (the default is to print the most relevant branches)\n --show-all-errors Print all errors (the default is to truncate after 50 errors)\n --strip-root Print paths without the root\n --temp-dir Directory in which to store temp files (default: FLOW_TEMP_DIR, or /tmp/flow/)\n --traces Outline an error path up to a specified level\n --types-first [EXPERIMENTAL] types-first architecture\n --unicode Display terminal output with unicode decoration. never, always, auto (default: auto)\n --untyped Specify one or more patterns, comma separated, for files to treat as untyped\n --verbose Print verbose info during typecheck\n --verbose-depth Recursively print types up to specified depth (default 1, implies --verbose)\n --verbose-flowlib Print verbose info while initializing the flowlib\n --verbose-indent Indent verbose info during typecheck (implies --verbose)\n --wait-for-recheck If true, always wait for rechecks to finish before serving commands (default: false)\n --weak Typecheck with weak inference, assuming dynamic types by default" } } diff --git a/tests/jsx_csx_member_expression/.flowconfig b/tests/jsx_csx_member_expression/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/jsx_csx_member_expression/csx.js b/tests/jsx_csx_member_expression/csx.js new file mode 100644 index 00000000000..1a9a4d92939 --- /dev/null +++ b/tests/jsx_csx_member_expression/csx.js @@ -0,0 +1,5 @@ +//@flow +//@csx + +const x = {a: 3}; +; diff --git a/tests/jsx_csx_member_expression/jsx.js b/tests/jsx_csx_member_expression/jsx.js new file mode 100644 index 00000000000..0fcdef6ac5f --- /dev/null +++ b/tests/jsx_csx_member_expression/jsx.js @@ -0,0 +1,5 @@ +//@flow +// @jsx Preact.h + +const x = {a: 3}; +; diff --git a/tests/jsx_csx_member_expression/jsx_csx_member_expression.exp b/tests/jsx_csx_member_expression/jsx_csx_member_expression.exp new file mode 100644 index 00000000000..7f0ed78703e --- /dev/null +++ b/tests/jsx_csx_member_expression/jsx_csx_member_expression.exp @@ -0,0 +1,30 @@ +Error ------------------------------------------------------------------------------------------------------ csx.js:5:17 + +Cannot get `x.b` because property `b` is missing in object literal [1]. + + csx.js:5:17 + 5| ; + ^^^ + +References: + csx.js:4:11 + 4| const x = {a: 3}; + ^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------------ jsx.js:5:17 + +Cannot get `x.b` because property `b` is missing in object literal [1]. + + jsx.js:5:17 + 5| ; + ^^^ + +References: + jsx.js:4:11 + 4| const x = {a: 3}; + ^^^^^^ [1] + + + +Found 2 errors diff --git a/tests/jsx_intrinsics.custom/jsx_intrinsics.custom.exp b/tests/jsx_intrinsics.custom/jsx_intrinsics.custom.exp index a66c1a26b06..f71701deda5 100644 --- a/tests/jsx_intrinsics.custom/jsx_intrinsics.custom.exp +++ b/tests/jsx_intrinsics.custom/jsx_intrinsics.custom.exp @@ -66,7 +66,8 @@ References: Error -------------------------------------------------------------------------------------------------- strings.js:11:2 -Cannot create string element because an indexer property is missing in `$JSXIntrinsics` [1]. +Cannot create string element because an index signature declaring the expected key / value type is missing in +`$JSXIntrinsics` [1]. strings.js:11:2 11| ; // Error: string ~> keys of JSXIntrinsics @@ -119,7 +120,8 @@ References: Error ------------------------------------------------------------------------------------------------- strings.js:15:21 -Cannot create string element because an indexer property is missing in `$JSXIntrinsics` [1]. +Cannot create string element because an index signature declaring the expected key / value type is missing in +`$JSXIntrinsics` [1]. strings.js:15:21 15| React.createElement(Str, {}); // Error: string ~> keys of JSXIntrinsics diff --git a/tests/jsx_namespaced_name/.flowconfig b/tests/jsx_namespaced_name/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/jsx_namespaced_name/jsx_namespaced_name.exp b/tests/jsx_namespaced_name/jsx_namespaced_name.exp new file mode 100644 index 00000000000..783c0aad0cb --- /dev/null +++ b/tests/jsx_namespaced_name/jsx_namespaced_name.exp @@ -0,0 +1,32 @@ +Error ---------------------------------------------------------------------------------------------------- test.js:10:12 + +Cannot resolve name `doesNotExist`. + + 10| ; // Error, doesNotExist does not exist + ^^^^^^^^^^^^ + + +Error ---------------------------------------------------------------------------------------------------- test.js:12:12 + +Cannot get `{...}.y` because property `y` is missing in object literal [1]. + + test.js:12:12 + 12| ; // Error, y does not exist on the object + ^^^^^^^^ + +References: + test.js:12:12 + 12| ; // Error, y does not exist on the object + ^^^^^^ [1] + + +Error ---------------------------------------------------------------------------------------------------- test.js:14:12 + +Cannot perform arithmetic operation because string [1] is not a number. + + 14| ; // Error, can't multiply string by number + ^^^^^^^^ [1] + + + +Found 3 errors diff --git a/tests/jsx_namespaced_name/test.js b/tests/jsx_namespaced_name/test.js new file mode 100644 index 00000000000..bece9fee0f0 --- /dev/null +++ b/tests/jsx_namespaced_name/test.js @@ -0,0 +1,14 @@ +//@flow +// We don't enforce that: +// 1. React is in scope +// 2. The properties match the config of the component +// 3. The component exists +// +// The only thing we check is that the properties don't cause errors. +; // Ok + +; // Error, doesNotExist does not exist + +; // Error, y does not exist on the object + +; // Error, can't multiply string by number diff --git a/tests/keys/enum.js b/tests/keys/enum.js new file mode 100644 index 00000000000..ccfae170ef8 --- /dev/null +++ b/tests/keys/enum.js @@ -0,0 +1,20 @@ +// @flow + +function foo(r: $Keys): boolean { + switch (r) { + case R.A: + return false; + case R.B: + return false; + default: + return true; + } +} + +const R: {| + A: 'A', + B: 'B', +|} = { + A: 'A', + B: 'B', +}; diff --git a/tests/keys/keys.exp b/tests/keys/keys.exp index cd8d394d27f..d1fa07b513f 100644 --- a/tests/keys/keys.exp +++ b/tests/keys/keys.exp @@ -114,7 +114,8 @@ References: Error ----------------------------------------------------------------------------------------------------- keys.js:38:4 -Cannot cast `str` to key set because an indexer property is missing in `ObjLit` [1]. +Cannot cast `str` to key set because an index signature declaring the expected key / value type is missing in +`ObjLit` [1]. keys.js:38:4 38| (str: $Keys); // Error: string -> keys of ObjLit @@ -128,7 +129,8 @@ References: Error ----------------------------------------------------------------------------------------------------- keys.js:40:6 -Cannot cast `str` to key set because an indexer property is missing in `ObjLit` [1]. +Cannot cast `str` to key set because an index signature declaring the expected key / value type is missing in +`ObjLit` [1]. keys.js:40:6 40| (str: $Keys); // Error: truthy string -> keys of ObjLit diff --git a/tests/lazy-mode-fs-dependency/.flowconfig b/tests/lazy-mode-fs-dependency/.flowconfig new file mode 100644 index 00000000000..c136f32d600 --- /dev/null +++ b/tests/lazy-mode-fs-dependency/.flowconfig @@ -0,0 +1,2 @@ +[ignore] +/tmp* diff --git a/tests/lazy-mode-fs-dependency/.testconfig b/tests/lazy-mode-fs-dependency/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/lazy-mode-fs-dependency/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/lazy-mode-fs-dependency/foo.js b/tests/lazy-mode-fs-dependency/foo.js new file mode 100644 index 00000000000..1f965578d5e --- /dev/null +++ b/tests/lazy-mode-fs-dependency/foo.js @@ -0,0 +1,3 @@ +// @flow + +(require('bar'): string); diff --git a/tests/lazy-mode-fs-dependency/lazy-mode-fs-dependency.exp b/tests/lazy-mode-fs-dependency/lazy-mode-fs-dependency.exp new file mode 100644 index 00000000000..7a1feaa6a88 --- /dev/null +++ b/tests/lazy-mode-fs-dependency/lazy-mode-fs-dependency.exp @@ -0,0 +1,60 @@ + +Server should start in fs lazy mode +No errors! + +The Flow server is currently in filesystem lazy mode and is only checking 0/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Focus a file +Error ------------------------------------------------------------------------------------------------------- foo.js:3:2 + +Cannot cast `require(...)` to string because number [1] is incompatible with string [2]. + + foo.js:3:2 + 3| (require('bar'): string); + ^^^^^^^^^^^^^^ + +References: + node_modules/bar/index.js:3:18 + 3| module.exports = 0; + ^ [1] + foo.js:3:18 + 3| (require('bar'): string); + ^^^^^^ [2] + + + +Found 1 error + +The Flow server is currently in filesystem lazy mode and is only checking 2/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Edit a dependency +No errors! + +The Flow server is currently in filesystem lazy mode and is only checking 2/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Revert edit +Error ------------------------------------------------------------------------------------------------------- foo.js:3:2 + +Cannot cast `require(...)` to string because number [1] is incompatible with string [2]. + + foo.js:3:2 + 3| (require('bar'): string); + ^^^^^^^^^^^^^^ + +References: + node_modules/bar/index.js:3:18 + 3| module.exports = 0; + ^ [1] + foo.js:3:18 + 3| (require('bar'): string); + ^^^^^^ [2] + + + +Found 1 error + +The Flow server is currently in filesystem lazy mode and is only checking 2/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes diff --git a/tests/lazy-mode-fs-dependency/node_modules/bar/index.js b/tests/lazy-mode-fs-dependency/node_modules/bar/index.js new file mode 100644 index 00000000000..570d4130fb9 --- /dev/null +++ b/tests/lazy-mode-fs-dependency/node_modules/bar/index.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = 0; \ No newline at end of file diff --git a/tests/lazy-mode-fs-dependency/test.sh b/tests/lazy-mode-fs-dependency/test.sh new file mode 100644 index 00000000000..fd0bb0fed97 --- /dev/null +++ b/tests/lazy-mode-fs-dependency/test.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +printf "\\nServer should start in fs lazy mode\\n" +start_flow . --lazy-mode fs + +assert_ok "$FLOW" status --strip-root + +printf "\\nFocus a file\\n" +assert_ok "$FLOW" force-recheck --focus foo.js +assert_errors "$FLOW" status --strip-root + +printf "\\nEdit a dependency\\n" +cp tmp1/node_modules_bar_index.js node_modules/bar/index.js +assert_ok "$FLOW" force-recheck node_modules/bar/index.js +assert_ok "$FLOW" status --strip-root + +printf "\\nRevert edit\\n" +cp tmp2/node_modules_bar_index.js node_modules/bar/index.js +assert_ok "$FLOW" force-recheck node_modules/bar/index.js +assert_errors "$FLOW" status --strip-root + +assert_ok "$FLOW" stop diff --git a/tests/lazy-mode-fs-dependency/tmp1/node_modules_bar_index.js b/tests/lazy-mode-fs-dependency/tmp1/node_modules_bar_index.js new file mode 100644 index 00000000000..5bd72ac0801 --- /dev/null +++ b/tests/lazy-mode-fs-dependency/tmp1/node_modules_bar_index.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = ""; diff --git a/tests/lazy-mode-fs-dependency/tmp2/node_modules_bar_index.js b/tests/lazy-mode-fs-dependency/tmp2/node_modules_bar_index.js new file mode 100644 index 00000000000..05a78cd62e8 --- /dev/null +++ b/tests/lazy-mode-fs-dependency/tmp2/node_modules_bar_index.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = 0; diff --git a/tests/lazy-mode-ide-dependency/.flowconfig b/tests/lazy-mode-ide-dependency/.flowconfig new file mode 100644 index 00000000000..c136f32d600 --- /dev/null +++ b/tests/lazy-mode-ide-dependency/.flowconfig @@ -0,0 +1,2 @@ +[ignore] +/tmp* diff --git a/tests/lazy-mode-ide-dependency/.testconfig b/tests/lazy-mode-ide-dependency/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/lazy-mode-ide-dependency/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/lazy-mode-ide-dependency/dependency.js b/tests/lazy-mode-ide-dependency/dependency.js new file mode 100644 index 00000000000..05a78cd62e8 --- /dev/null +++ b/tests/lazy-mode-ide-dependency/dependency.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = 0; diff --git a/tests/lazy-mode-ide-dependency/focused.js b/tests/lazy-mode-ide-dependency/focused.js new file mode 100644 index 00000000000..cc8f8f56f29 --- /dev/null +++ b/tests/lazy-mode-ide-dependency/focused.js @@ -0,0 +1,3 @@ +// @flow + +(require('./dependency'): string); diff --git a/tests/lazy-mode-ide-dependency/lazy-mode-ide-dependency.exp b/tests/lazy-mode-ide-dependency/lazy-mode-ide-dependency.exp new file mode 100644 index 00000000000..2a5768e1497 --- /dev/null +++ b/tests/lazy-mode-ide-dependency/lazy-mode-ide-dependency.exp @@ -0,0 +1,60 @@ + +Server should start in fs lazy mode +No errors! + +The Flow server is currently in filesystem lazy mode and is only checking 0/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Focus a file +Error --------------------------------------------------------------------------------------------------- focused.js:3:2 + +Cannot cast `require(...)` to string because number [1] is incompatible with string [2]. + + focused.js:3:2 + 3| (require('./dependency'): string); + ^^^^^^^^^^^^^^^^^^^^^^^ + +References: + dependency.js:3:18 + 3| module.exports = 0; + ^ [1] + focused.js:3:27 + 3| (require('./dependency'): string); + ^^^^^^ [2] + + + +Found 1 error + +The Flow server is currently in filesystem lazy mode and is only checking 2/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Edit a dependency +No errors! + +The Flow server is currently in filesystem lazy mode and is only checking 2/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Revert edit +Error --------------------------------------------------------------------------------------------------- focused.js:3:2 + +Cannot cast `require(...)` to string because number [1] is incompatible with string [2]. + + focused.js:3:2 + 3| (require('./dependency'): string); + ^^^^^^^^^^^^^^^^^^^^^^^ + +References: + dependency.js:3:18 + 3| module.exports = 0; + ^ [1] + focused.js:3:27 + 3| (require('./dependency'): string); + ^^^^^^ [2] + + + +Found 1 error + +The Flow server is currently in filesystem lazy mode and is only checking 2/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes diff --git a/tests/lazy-mode-ide-dependency/test.sh b/tests/lazy-mode-ide-dependency/test.sh new file mode 100644 index 00000000000..48b8fae7f56 --- /dev/null +++ b/tests/lazy-mode-ide-dependency/test.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +printf "\\nServer should start in fs lazy mode\\n" +start_flow . --lazy-mode fs + +assert_ok "$FLOW" status --strip-root + +printf "\\nFocus a file\\n" +assert_ok "$FLOW" force-recheck --focus focused.js +assert_errors "$FLOW" status --strip-root + +printf "\\nEdit a dependency\\n" +cp tmp1/dependency.js dependency.js +assert_ok "$FLOW" force-recheck dependency.js +assert_ok "$FLOW" status --strip-root + +printf "\\nRevert edit\\n" +cp tmp2/dependency.js dependency.js +assert_ok "$FLOW" force-recheck dependency.js +assert_errors "$FLOW" status --strip-root + +assert_ok "$FLOW" stop diff --git a/tests/lazy-mode-ide-dependency/tmp1/dependency.js b/tests/lazy-mode-ide-dependency/tmp1/dependency.js new file mode 100644 index 00000000000..5bd72ac0801 --- /dev/null +++ b/tests/lazy-mode-ide-dependency/tmp1/dependency.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = ""; diff --git a/tests/lazy-mode-ide-dependency/tmp2/dependency.js b/tests/lazy-mode-ide-dependency/tmp2/dependency.js new file mode 100644 index 00000000000..05a78cd62e8 --- /dev/null +++ b/tests/lazy-mode-ide-dependency/tmp2/dependency.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = 0; diff --git a/tests/lazy_mode_flowconfig/.flowconfig b/tests/lazy_mode_flowconfig/.flowconfig new file mode 100644 index 00000000000..a74fd308199 --- /dev/null +++ b/tests/lazy_mode_flowconfig/.flowconfig @@ -0,0 +1,12 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +lazy_mode=fs + +[strict] diff --git a/tests/lazy_mode_flowconfig/.testconfig b/tests/lazy_mode_flowconfig/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/lazy_mode_flowconfig/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/lazy_mode_flowconfig/foo.js b/tests/lazy_mode_flowconfig/foo.js new file mode 100644 index 00000000000..0a254d61750 --- /dev/null +++ b/tests/lazy_mode_flowconfig/foo.js @@ -0,0 +1,3 @@ +// @flow + +var x: string = 123; diff --git a/tests/lazy_mode_flowconfig/lazy_mode_flowconfig.exp b/tests/lazy_mode_flowconfig/lazy_mode_flowconfig.exp new file mode 100644 index 00000000000..3ff34178bc9 --- /dev/null +++ b/tests/lazy_mode_flowconfig/lazy_mode_flowconfig.exp @@ -0,0 +1,75 @@ + +A full check should ignore the lazy mode in the .flowconfig +Error ------------------------------------------------------------------------------------------------------ foo.js:3:17 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + foo.js:3:17 + 3| var x: string = 123; + ^^^ [1] + +References: + foo.js:3:8 + 3| var x: string = 123; + ^^^^^^ [2] + + + +Found 1 error + +Server should start in fs lazy mode +No errors! + +The Flow server is currently in filesystem lazy mode and is only checking 0/1 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Editing a file should cause fs lazy mode to focus on the file +Error ------------------------------------------------------------------------------------------------------ foo.js:3:17 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + foo.js:3:17 + 3| var x: string = 123; + ^^^ [1] + +References: + foo.js:3:8 + 3| var x: string = 123; + ^^^^^^ [2] + + + +Found 1 error + +The Flow server is currently in filesystem lazy mode and is only checking 1/1 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Server should start in non-lazy mode due to --lazy-mode none +Error ------------------------------------------------------------------------------------------------------ foo.js:3:17 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + foo.js:3:17 + 3| var x: string = 123; + ^^^ [1] + +References: + foo.js:3:8 + 3| var x: string = 123; + ^^^^^^ [2] + + + +Found 1 error + +Server should start in ide lazy mode due to --lazy-mode ide +No errors! + +The Flow server is currently in IDE lazy mode and is only checking 0/1 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Editing a file shouldn't cause ide lazy mode to focus on the file +No errors! + +The Flow server is currently in IDE lazy mode and is only checking 0/1 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes diff --git a/tests/lazy_mode_flowconfig/test.sh b/tests/lazy_mode_flowconfig/test.sh new file mode 100755 index 00000000000..1f8d009ac33 --- /dev/null +++ b/tests/lazy_mode_flowconfig/test.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +printf "\\nA full check should ignore the lazy mode in the .flowconfig\\n" +assert_errors "$FLOW" check --strip-root + +printf "\\nServer should start in fs lazy mode\\n" +start_flow . +assert_ok "$FLOW" status --strip-root + +printf "\\nEditing a file should cause fs lazy mode to focus on the file\\n" +echo " " >> foo.js +assert_ok "$FLOW" force-recheck foo.js +assert_errors "$FLOW" status --strip-root +assert_ok "$FLOW" stop + +printf "\\nServer should start in non-lazy mode due to --lazy-mode none\\n" +start_flow . --lazy-mode none +assert_errors "$FLOW" status --strip-root +assert_ok "$FLOW" stop + +printf "\\nServer should start in ide lazy mode due to --lazy-mode ide\\n" +start_flow . --lazy-mode ide +assert_ok "$FLOW" status --strip-root + +printf "\\nEditing a file shouldn't cause ide lazy mode to focus on the file\\n" +echo " " >> foo.js +assert_ok "$FLOW" force-recheck foo.js +assert_ok "$FLOW" status --strip-root +assert_ok "$FLOW" stop diff --git a/tests/lib/lib.exp b/tests/lib/lib.exp index 2004eecc2af..b0646dae3e8 100644 --- a/tests/lib/lib.exp +++ b/tests/lib/lib.exp @@ -24,8 +24,8 @@ Cannot assign `Number.MAX_VALUE` to `y` because number [1] is incompatible with ^^^^^^^^^^^^^^^^ References: - /core.js:126:23 - 126| static MAX_VALUE: number; + /core.js:146:23 + 146| static MAX_VALUE: number; ^^^^^^ [1] libtest.js:2:7 2| var y:string = Number.MAX_VALUE; @@ -41,8 +41,8 @@ Cannot assign `new TypeError().name` to `z` because string [1] is incompatible w ^^^^^^^^^^^^^^^^^^^^ References: - /core.js:427:11 - 427| name: string; + /core.js:464:11 + 464| name: string; ^^^^^^ [1] libtest.js:3:7 3| var z:number = new TypeError().name; diff --git a/tests/lib_ignore_json/.flowconfig b/tests/lib_ignore_json/.flowconfig new file mode 100644 index 00000000000..a5cb5758a09 --- /dev/null +++ b/tests/lib_ignore_json/.flowconfig @@ -0,0 +1,9 @@ +[ignore] + +[include] + +[lints] + +[options] + +[strict] diff --git a/tests/lib_ignore_json/flow-typed/foo/index.js b/tests/lib_ignore_json/flow-typed/foo/index.js new file mode 100644 index 00000000000..31f7e893f63 --- /dev/null +++ b/tests/lib_ignore_json/flow-typed/foo/index.js @@ -0,0 +1,3 @@ +declare module 'foo' { + declare module.exports: string; +} diff --git a/tests/lib_ignore_json/flow-typed/foo/package.json b/tests/lib_ignore_json/flow-typed/foo/package.json new file mode 100644 index 00000000000..4d44dc64dc9 --- /dev/null +++ b/tests/lib_ignore_json/flow-typed/foo/package.json @@ -0,0 +1,3 @@ +{ + "name": "@flowtyped/foo" +} diff --git a/tests/lib_ignore_json/lib_ignore_json.exp b/tests/lib_ignore_json/lib_ignore_json.exp new file mode 100644 index 00000000000..2829d581f51 --- /dev/null +++ b/tests/lib_ignore_json/lib_ignore_json.exp @@ -0,0 +1 @@ +Found 0 errors diff --git a/tests/lib_ignore_json/test.js b/tests/lib_ignore_json/test.js new file mode 100644 index 00000000000..8d68aaed8af --- /dev/null +++ b/tests/lib_ignore_json/test.js @@ -0,0 +1,5 @@ +// @flow + +import data from 'foo' + +(data: string); diff --git a/tests/lint_all_warn/dynamic-export.js b/tests/lint_all_warn/dynamic-export.js new file mode 100644 index 00000000000..9fc0bf9f7f2 --- /dev/null +++ b/tests/lint_all_warn/dynamic-export.js @@ -0,0 +1,3 @@ +//@flow + +module.exports = (3 : any); // should only report unclear type, not dynamic-export diff --git a/tests/lint_all_warn/implicit-inexact-object.js b/tests/lint_all_warn/implicit-inexact-object.js new file mode 100644 index 00000000000..2e2f79e8fca --- /dev/null +++ b/tests/lint_all_warn/implicit-inexact-object.js @@ -0,0 +1,6 @@ +//@flow + +// Implicit inexact object lints are not included in all until +// https://github.com/flow-typed/flow-typed/pull/3365 lands + +type X = {}; // No error diff --git a/tests/lint_all_warn/lint_all_warn.exp b/tests/lint_all_warn/lint_all_warn.exp index 7b68d059a05..8f704a35d1a 100644 --- a/tests/lint_all_warn/lint_all_warn.exp +++ b/tests/lint_all_warn/lint_all_warn.exp @@ -16,6 +16,14 @@ References: ^^^^^^^ [2] +Warning ----------------------------------------------------------------------------------------- dynamic-export.js:3:23 + +Unclear type. Using `any`, `Object`, or `Function` types is not safe! (`unclear-type`) + + 3| module.exports = (3 : any); // should only report unclear type, not dynamic-export + ^^^ + + Warning -------------------------------------------------------------------------------------------- sketchy_null.js:8:5 Sketchy null check on string [1] which is potentially an empty string. Perhaps you meant to check for null or @@ -35,4 +43,4 @@ References: -Found 1 error and 1 warning +Found 1 error and 2 warnings diff --git a/tests/lint_all_warn/uninitialized-instance-property.js b/tests/lint_all_warn/uninitialized-instance-property.js new file mode 100644 index 00000000000..c1c297ed933 --- /dev/null +++ b/tests/lint_all_warn/uninitialized-instance-property.js @@ -0,0 +1,8 @@ +//@flow + +// Uninitialized instance property lints are not included in all until the +// analysis is more complete + +class A { + p; +} diff --git a/tests/lint_cli_add_all/lint_cli_add_all.exp b/tests/lint_cli_add_all/lint_cli_add_all.exp index 0e975d1b7c0..fa4bd5af26b 100644 --- a/tests/lint_cli_add_all/lint_cli_add_all.exp +++ b/tests/lint_cli_add_all/lint_cli_add_all.exp @@ -135,7 +135,7 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:46:21 - 46| function l(o: { p?: number }) { + 46| function l(o: { p?: number, ... }) { ^^^^^^ [1] @@ -150,10 +150,10 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:51:21 - 51| function m(o: { p: ?number }) { + 51| function m(o: { p: ?number, ... }) { ^^^^^^ [1] simple_sketchies.js:51:20 - 51| function m(o: { p: ?number }) { + 51| function m(o: { p: ?number, ... }) { ^^^^^^^ [2] @@ -168,10 +168,10 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:56:20 - 56| function n(o: { p: number|null|void }) { + 56| function n(o: { p: number|null|void, ... }) { ^^^^^^ [1] simple_sketchies.js:56:32 - 56| function n(o: { p: number|null|void }) { + 56| function n(o: { p: number|null|void, ... }) { ^^^^ [2] diff --git a/tests/lint_cli_add_all/simple_sketchies.js b/tests/lint_cli_add_all/simple_sketchies.js index e0fb0ae55d3..225f911e9b9 100644 --- a/tests/lint_cli_add_all/simple_sketchies.js +++ b/tests/lint_cli_add_all/simple_sketchies.js @@ -43,21 +43,21 @@ s(unknown_str); // possibly falsey, sketchy // PropExistsP // optional prop -function l(o: { p?: number }) { +function l(o: { p?: number, ... }) { if (o.p) {/* sketchy */} } // maybe prop -function m(o: { p: ?number }) { +function m(o: { p: ?number, ... }) { if (o.p) {/* sketchy */} } // union -function n(o: { p: number|null|void }) { +function n(o: { p: number|null|void, ... }) { if (o.p) { /* sketchy */ } } -function q(o: { p: number }) { +function q(o: { p: number, ... }) { if (o.p) { /* NOT sketchy */ } } diff --git a/tests/lint_cli_no_override_all/lint_cli_no_override_all.exp b/tests/lint_cli_no_override_all/lint_cli_no_override_all.exp index 0e975d1b7c0..fa4bd5af26b 100644 --- a/tests/lint_cli_no_override_all/lint_cli_no_override_all.exp +++ b/tests/lint_cli_no_override_all/lint_cli_no_override_all.exp @@ -135,7 +135,7 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:46:21 - 46| function l(o: { p?: number }) { + 46| function l(o: { p?: number, ... }) { ^^^^^^ [1] @@ -150,10 +150,10 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:51:21 - 51| function m(o: { p: ?number }) { + 51| function m(o: { p: ?number, ... }) { ^^^^^^ [1] simple_sketchies.js:51:20 - 51| function m(o: { p: ?number }) { + 51| function m(o: { p: ?number, ... }) { ^^^^^^^ [2] @@ -168,10 +168,10 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:56:20 - 56| function n(o: { p: number|null|void }) { + 56| function n(o: { p: number|null|void, ... }) { ^^^^^^ [1] simple_sketchies.js:56:32 - 56| function n(o: { p: number|null|void }) { + 56| function n(o: { p: number|null|void, ... }) { ^^^^ [2] diff --git a/tests/lint_cli_no_override_all/simple_sketchies.js b/tests/lint_cli_no_override_all/simple_sketchies.js index e0fb0ae55d3..225f911e9b9 100644 --- a/tests/lint_cli_no_override_all/simple_sketchies.js +++ b/tests/lint_cli_no_override_all/simple_sketchies.js @@ -43,21 +43,21 @@ s(unknown_str); // possibly falsey, sketchy // PropExistsP // optional prop -function l(o: { p?: number }) { +function l(o: { p?: number, ... }) { if (o.p) {/* sketchy */} } // maybe prop -function m(o: { p: ?number }) { +function m(o: { p: ?number, ... }) { if (o.p) {/* sketchy */} } // union -function n(o: { p: number|null|void }) { +function n(o: { p: number|null|void, ... }) { if (o.p) { /* sketchy */ } } -function q(o: { p: number }) { +function q(o: { p: number, ... }) { if (o.p) { /* NOT sketchy */ } } diff --git a/tests/lint_cli_remove_single/simple_sketchies.js b/tests/lint_cli_remove_single/simple_sketchies.js index e0fb0ae55d3..225f911e9b9 100644 --- a/tests/lint_cli_remove_single/simple_sketchies.js +++ b/tests/lint_cli_remove_single/simple_sketchies.js @@ -43,21 +43,21 @@ s(unknown_str); // possibly falsey, sketchy // PropExistsP // optional prop -function l(o: { p?: number }) { +function l(o: { p?: number, ... }) { if (o.p) {/* sketchy */} } // maybe prop -function m(o: { p: ?number }) { +function m(o: { p: ?number, ... }) { if (o.p) {/* sketchy */} } // union -function n(o: { p: number|null|void }) { +function n(o: { p: number|null|void, ... }) { if (o.p) { /* sketchy */ } } -function q(o: { p: number }) { +function q(o: { p: number, ... }) { if (o.p) { /* NOT sketchy */ } } diff --git a/tests/lint_comments/lint_comments.exp b/tests/lint_comments/lint_comments.exp index 6cf5e73c5fc..910b6fb0f65 100644 --- a/tests/lint_comments/lint_comments.exp +++ b/tests/lint_comments/lint_comments.exp @@ -133,10 +133,10 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:111:20 - 111| function n(o: { p: number|null|void }) { + 111| function n(o: { p: number|null|void, ... }) { ^^^^^^ [1] simple_sketchies.js:111:32 - 111| function n(o: { p: number|null|void }) { + 111| function n(o: { p: number|null|void, ... }) { ^^^^ [2] diff --git a/tests/lint_comments/simple_sketchies.js b/tests/lint_comments/simple_sketchies.js index ac252c4e93b..35e7a359820 100644 --- a/tests/lint_comments/simple_sketchies.js +++ b/tests/lint_comments/simple_sketchies.js @@ -97,25 +97,25 @@ s(unknown_str); // possibly falsey, sketchy // PropExistsP sketchy checks // optional prop -function l(o: { p?: number }) { +function l(o: { p?: number, ... }) { if (o.p) {/* sketchy; suppressed */} //flowlint-line sketchy-null:off } // maybe prop -function m(o: { p: ?number }) { +function m(o: { p: ?number, ... }) { /* flowlint-next-line sketchy-null:off */ if (o.p) {/* sketchy; suppressed */} } // union -function n(o: { p: number|null|void }) { +function n(o: { p: number|null|void, ... }) { /*flowlint sketchy-null:off*/ //Unused suppression // flowlint-next-line sketchy-null:error if (o.p) { /* sketchy */ } // flowlint sketchy-null:error } -function q(o: { p: number }) { +function q(o: { p: number, ... }) { if (o.p) { /* NOT sketchy */ } } diff --git a/tests/lint_config_default/.flowconfig b/tests/lint_config_default/.flowconfig new file mode 100644 index 00000000000..d7b6fb84827 --- /dev/null +++ b/tests/lint_config_default/.flowconfig @@ -0,0 +1,2 @@ +[lints] +deprecated-utility=error diff --git a/tests/lint_config_default/lint_config_default.exp b/tests/lint_config_default/lint_config_default.exp new file mode 100644 index 00000000000..2829d581f51 --- /dev/null +++ b/tests/lint_config_default/lint_config_default.exp @@ -0,0 +1 @@ +Found 0 errors diff --git a/tests/lint_node_modules/.flowconfig b/tests/lint_node_modules/.flowconfig new file mode 100644 index 00000000000..46a26198710 --- /dev/null +++ b/tests/lint_node_modules/.flowconfig @@ -0,0 +1,15 @@ +[ignore] + +[include] + +[libs] + +[lints] +signature-verification-failure=error + +[options] +experimental.well_formed_exports=true +module.system.node.resolve_dirname=node_modules +module.system.node.resolve_dirname=foo_modules + +[strict] diff --git a/tests/lint_node_modules/foo_modules/b.js b/tests/lint_node_modules/foo_modules/b.js new file mode 100644 index 00000000000..aa713d3e626 --- /dev/null +++ b/tests/lint_node_modules/foo_modules/b.js @@ -0,0 +1,5 @@ +// @flow + +declare function foo(): void; +module.exports = foo(); // this is a signature verification error which will + // be ignored since the file is within node_modules diff --git a/tests/lint_node_modules/lint_node_modules.exp b/tests/lint_node_modules/lint_node_modules.exp new file mode 100644 index 00000000000..2829d581f51 --- /dev/null +++ b/tests/lint_node_modules/lint_node_modules.exp @@ -0,0 +1 @@ +Found 0 errors diff --git a/tests/lint_node_modules/node_modules/a.js b/tests/lint_node_modules/node_modules/a.js new file mode 100644 index 00000000000..aa713d3e626 --- /dev/null +++ b/tests/lint_node_modules/node_modules/a.js @@ -0,0 +1,5 @@ +// @flow + +declare function foo(): void; +module.exports = foo(); // this is a signature verification error which will + // be ignored since the file is within node_modules diff --git a/tests/lint_suppressions_all/lint_suppressions_all.exp b/tests/lint_suppressions_all/lint_suppressions_all.exp index 0e975d1b7c0..fa4bd5af26b 100644 --- a/tests/lint_suppressions_all/lint_suppressions_all.exp +++ b/tests/lint_suppressions_all/lint_suppressions_all.exp @@ -135,7 +135,7 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:46:21 - 46| function l(o: { p?: number }) { + 46| function l(o: { p?: number, ... }) { ^^^^^^ [1] @@ -150,10 +150,10 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:51:21 - 51| function m(o: { p: ?number }) { + 51| function m(o: { p: ?number, ... }) { ^^^^^^ [1] simple_sketchies.js:51:20 - 51| function m(o: { p: ?number }) { + 51| function m(o: { p: ?number, ... }) { ^^^^^^^ [2] @@ -168,10 +168,10 @@ Sketchy null check on number [1] which is potentially 0. Perhaps you meant to ch References: simple_sketchies.js:56:20 - 56| function n(o: { p: number|null|void }) { + 56| function n(o: { p: number|null|void, ... }) { ^^^^^^ [1] simple_sketchies.js:56:32 - 56| function n(o: { p: number|null|void }) { + 56| function n(o: { p: number|null|void, ... }) { ^^^^ [2] diff --git a/tests/lint_suppressions_all/simple_sketchies.js b/tests/lint_suppressions_all/simple_sketchies.js index e0fb0ae55d3..225f911e9b9 100644 --- a/tests/lint_suppressions_all/simple_sketchies.js +++ b/tests/lint_suppressions_all/simple_sketchies.js @@ -43,21 +43,21 @@ s(unknown_str); // possibly falsey, sketchy // PropExistsP // optional prop -function l(o: { p?: number }) { +function l(o: { p?: number, ... }) { if (o.p) {/* sketchy */} } // maybe prop -function m(o: { p: ?number }) { +function m(o: { p: ?number, ... }) { if (o.p) {/* sketchy */} } // union -function n(o: { p: number|null|void }) { +function n(o: { p: number|null|void, ... }) { if (o.p) { /* sketchy */ } } -function q(o: { p: number }) { +function q(o: { p: number, ... }) { if (o.p) { /* NOT sketchy */ } } diff --git a/tests/lint_suppressions_with_errors/.flowconfig b/tests/lint_suppressions_with_errors/.flowconfig new file mode 100644 index 00000000000..293762e51f8 --- /dev/null +++ b/tests/lint_suppressions_with_errors/.flowconfig @@ -0,0 +1,5 @@ +[lints] +sketchy-null-mixed=warn + +[options] +suppress_type=$FlowFixMe diff --git a/tests/lint_suppressions_with_errors/.testconfig b/tests/lint_suppressions_with_errors/.testconfig new file mode 100644 index 00000000000..374360f31b1 --- /dev/null +++ b/tests/lint_suppressions_with_errors/.testconfig @@ -0,0 +1 @@ +cmd: status --strip-root --show-all-errors --include-warnings diff --git a/tests/lint_suppressions_with_errors/lib.js b/tests/lint_suppressions_with_errors/lib.js new file mode 100644 index 00000000000..d8638af9b15 --- /dev/null +++ b/tests/lint_suppressions_with_errors/lib.js @@ -0,0 +1,11 @@ +//@flow + +type BaseProps = { + prop1?: ?boolean, + prop2?: ?number, +}; + +export type Props = BaseProps & { + prop3?: ?boolean, + prop4?: ?number, +}; diff --git a/tests/lint_suppressions_with_errors/lint.js b/tests/lint_suppressions_with_errors/lint.js new file mode 100644 index 00000000000..e9b0716b0a9 --- /dev/null +++ b/tests/lint_suppressions_with_errors/lint.js @@ -0,0 +1,20 @@ +//@flow + +'use strict'; + +import type {Props} from './lib.js'; + +class Class { + constructor(props: Props) { + const inputProps = { + foo: 'bar', + ...props, + }; + } +} + +declare var x : mixed; +//$FlowFixMe should suppress +if(x) { + +}; diff --git a/tests/lint_suppressions_with_errors/lint_suppressions_with_errors.exp b/tests/lint_suppressions_with_errors/lint_suppressions_with_errors.exp new file mode 100644 index 00000000000..c19fb38448d --- /dev/null +++ b/tests/lint_suppressions_with_errors/lint_suppressions_with_errors.exp @@ -0,0 +1 @@ +No errors! diff --git a/tests/lint_with_include_suppressed/test.sh b/tests/lint_with_include_suppressed/test.sh index 2624c531934..abed0b679b5 100755 --- a/tests/lint_with_include_suppressed/test.sh +++ b/tests/lint_with_include_suppressed/test.sh @@ -2,4 +2,4 @@ printf "Without --include-suppressed:\n" assert_ok "$FLOW" check printf "With --include-suppressed:\n" -assert_ok "$FLOW" check --include-suppressed +assert_errors "$FLOW" check --include-suppressed diff --git a/tests/long_directory_names/.testconfig b/tests/long_directory_names/.testconfig new file mode 100644 index 00000000000..9bfaf8ba1d7 --- /dev/null +++ b/tests/long_directory_names/.testconfig @@ -0,0 +1,2 @@ +auto_start: false +shell: test.sh diff --git a/tests/long_directory_names/long_directory_names.exp b/tests/long_directory_names/long_directory_names.exp new file mode 100644 index 00000000000..4e52575c7c4 --- /dev/null +++ b/tests/long_directory_names/long_directory_names.exp @@ -0,0 +1,36 @@ + +Flow check: +Error ------------------------------------------------------------------------------------------------------ foo.js:3:17 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + foo.js:3:17 + 3| let x: string = 123 + ^^^ [1] + +References: + foo.js:3:8 + 3| let x: string = 123 + ^^^^^^ [2] + + + +Found 1 error + +Flow status: +Error ------------------------------------------------------------------------------------------------------ foo.js:3:17 + +Cannot assign `123` to `x` because number [1] is incompatible with string [2]. + + foo.js:3:17 + 3| let x: string = 123 + ^^^ [1] + +References: + foo.js:3:8 + 3| let x: string = 123 + ^^^^^^ [2] + + + +Found 1 error diff --git a/tests/long_directory_names/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/.flowconfig b/tests/long_directory_names/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/.flowconfig new file mode 100644 index 00000000000..1fed445333e --- /dev/null +++ b/tests/long_directory_names/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/.flowconfig @@ -0,0 +1,11 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] + +[strict] diff --git a/tests/long_directory_names/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/foo.js b/tests/long_directory_names/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/foo.js new file mode 100644 index 00000000000..439a3707014 --- /dev/null +++ b/tests/long_directory_names/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/foo.js @@ -0,0 +1,3 @@ +// @flow + +let x: string = 123 diff --git a/tests/long_directory_names/test.sh b/tests/long_directory_names/test.sh new file mode 100644 index 00000000000..828bd35101b --- /dev/null +++ b/tests/long_directory_names/test.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +# Hopefully this will be short enough not to upset windows CI +LONG_PATH="really_long_path/really_long_path/really_long_path/really_long_path/\ +really_long_path/really_long_path/really_long_path/really_long_path/\ +really_long_path/really_long_path/really_long_path" + +printf "\nFlow check:\n" +assert_errors "$FLOW" check --strip-root "$LONG_PATH" + +printf "\nFlow status:\n" +start_flow "$LONG_PATH" +assert_errors "$FLOW" status --strip-root --no-auto-start "$LONG_PATH" +assert_ok "$FLOW" stop "$LONG_PATH" diff --git a/tests/match_failure/match_failure.exp b/tests/match_failure/match_failure.exp index d9079e00ec3..965c6db6dfc 100644 --- a/tests/match_failure/match_failure.exp +++ b/tests/match_failure/match_failure.exp @@ -19,6 +19,25 @@ References: ^^^^^^^^ [3] +Error ------------------------------------------------------------------------------------------- disjoint_union.js:8:22 + +All branches are incompatible: + - Either string [1] is incompatible with string literal `rectangle` [2]. + - Or string [1] is incompatible with string literal `circle` [3]. + + disjoint_union.js:8:22 + 8| if (shape.type === 'square') { // error + ^^^^^^^^ [1] + +References: + disjoint_union.js:4:10 + 4| {type: 'rectangle', width: number, height: number} | + ^^^^^^^^^^^ [2] + disjoint_union.js:5:10 + 5| {type: 'circle', radius: number}; + ^^^^^^^^ [3] + + Error ------------------------------------------------------------------------------------------- disjoint_union.js:21:7 All branches are incompatible: @@ -40,6 +59,25 @@ References: ^^^^^^^^ [3] +Error ------------------------------------------------------------------------------------------ disjoint_union.js:21:22 + +All branches are incompatible: + - Either string [1] is incompatible with string literal `rectangle` [2]. + - Or string [1] is incompatible with string literal `circle` [3]. + + disjoint_union.js:21:22 + 21| if (shape.type === 'square') { // error + ^^^^^^^^ [1] + +References: + disjoint_union.js:17:11 + 17| {|type: 'rectangle', width: number, height: number|} | + ^^^^^^^^^^^ [2] + disjoint_union.js:18:11 + 18| {|type: 'circle', radius: number|}; + ^^^^^^^^ [3] + + Error ------------------------------------------------------------------------------------------- disjoint_union.js:34:7 All branches are incompatible: @@ -61,6 +99,25 @@ References: ^^^^^^^^ [3] +Error ------------------------------------------------------------------------------------------ disjoint_union.js:34:22 + +All branches are incompatible: + - Either string [1] is incompatible with string literal `rectangle` [2]. + - Or string [1] is incompatible with string literal `circle` [3]. + + disjoint_union.js:34:22 + 34| if (shape.type === 'square') { // error + ^^^^^^^^ [1] + +References: + disjoint_union.js:30:11 + 30| {+type: 'rectangle', width: number, height: number} | + ^^^^^^^^^^^ [2] + disjoint_union.js:31:11 + 31| {+type: 'circle', radius: number}; + ^^^^^^^^ [3] + + Error ----------------------------------------------------------------------------------------------------- enum.js:8:25 number literal `2` [1] is incompatible with enum [2]. @@ -76,4 +133,4 @@ References: -Found 4 errors +Found 7 errors diff --git a/tests/misc/misc.exp b/tests/misc/misc.exp index 10c7622eb56..c25588b0bb3 100644 --- a/tests/misc/misc.exp +++ b/tests/misc/misc.exp @@ -134,8 +134,8 @@ Cannot return `x.length` because number [1] is incompatible with string [2]. ^^^^^^^^ References: - /core.js:275:13 - 275| length: number; + /core.js:294:13 + 294| length: number; ^^^^^^ [1] F.js:4:33 4| function foo(x: Array): string { @@ -179,8 +179,8 @@ Cannot assign `"duck"` to `b.length` because string [1] is incompatible with num ^^^^^^ [1] References: - /core.js:275:13 - 275| length: number; + /core.js:294:13 + 294| length: number; ^^^^^^ [2] diff --git a/tests/missing_annotation/invariant_this.js b/tests/missing_annotation/invariant_this.js new file mode 100644 index 00000000000..1c8f802466e --- /dev/null +++ b/tests/missing_annotation/invariant_this.js @@ -0,0 +1,15 @@ +/* This is a regression test. If `this` appears in an input position reachable + * from exports, we would raise a confusing error, because the assert ground + * visitor would reach the instantiating tvar. + * + * For example, the example below would give an assert ground error: + * Missing type annotation for new C(). + */ + +class C { + m(x: this) { } // error: this in contravariant position +} + +module.exports = { + foo: new C(), // no missing annot error here +} diff --git a/tests/missing_annotation/missing_annotation.exp b/tests/missing_annotation/missing_annotation.exp index 6b42032f26a..39d5c33d9bc 100644 --- a/tests/missing_annotation/missing_annotation.exp +++ b/tests/missing_annotation/missing_annotation.exp @@ -22,5 +22,19 @@ Missing type annotation for `x`. ^ +Error ------------------------------------------------------------------------------------------- invariant_this.js:10:8 -Found 3 errors +Cannot use `this` [1] in an input position because `this` [1] is expected to occur only in output positions. + + invariant_this.js:10:8 + 10| m(x: this) { } // error: this in contravariant position + ^^^^ + +References: + invariant_this.js:9:7 + 9| class C { + ^ [1] + + + +Found 4 errors diff --git a/tests/module_ref/.flowconfig b/tests/module_ref/.flowconfig new file mode 100644 index 00000000000..15f8184393b --- /dev/null +++ b/tests/module_ref/.flowconfig @@ -0,0 +1,13 @@ +[ignore] + +[include] + +[libs] + +[options] +no_flowlib=false +module.system=haste +module.system.haste.module_ref_prefix=m# + +[lints] +untyped-import=error diff --git a/tests/module_ref/.testconfig b/tests/module_ref/.testconfig new file mode 100644 index 00000000000..d31f7922cbe --- /dev/null +++ b/tests/module_ref/.testconfig @@ -0,0 +1 @@ +all: false diff --git a/tests/module_ref/A.js b/tests/module_ref/A.js new file mode 100644 index 00000000000..6f26582e8ce --- /dev/null +++ b/tests/module_ref/A.js @@ -0,0 +1,8 @@ +/* + * @providesModule A + * @flow + */ + +module.exports = { + FOO: 'bar', +}; diff --git a/tests/module_ref/B.js b/tests/module_ref/B.js new file mode 100644 index 00000000000..f26ac1a1e97 --- /dev/null +++ b/tests/module_ref/B.js @@ -0,0 +1,13 @@ +/* + * @providesModule B + * @flow + */ + +declare function ifRequired( + id: $Flow$ModuleRef, + cbYes: (module: TModule) => TYes, +): TYes | void; + +ifRequired('A', A => A.FOO); // Error - A is a plain string, not a module ref +ifRequired('m#A', A => A.FOO); // Error - FOO is not present in A's exports +ifRequired('m#A', A => A.BAR); // Ok diff --git a/tests/module_ref/C.js b/tests/module_ref/C.js new file mode 100644 index 00000000000..c382bcffd9c --- /dev/null +++ b/tests/module_ref/C.js @@ -0,0 +1,4 @@ +/** + * @providesModule C + * @noflow + */ diff --git a/tests/module_ref/D.js b/tests/module_ref/D.js new file mode 100644 index 00000000000..d34aedc5295 --- /dev/null +++ b/tests/module_ref/D.js @@ -0,0 +1,11 @@ +/** + * @flow + */ + +declare function myRequire( + id: $Flow$ModuleRef, +): TModule; + +const C = myRequire( + 'm#C', +); diff --git a/tests/module_ref/module_ref.exp b/tests/module_ref/module_ref.exp new file mode 100644 index 00000000000..38d5ac08170 --- /dev/null +++ b/tests/module_ref/module_ref.exp @@ -0,0 +1,42 @@ +Error ------------------------------------------------------------------------------------------------------- B.js:11:12 + +Cannot call `ifRequired` with `'A'` bound to `id` because string [1] is incompatible with `$Flow$ModuleRef` [2]. + + B.js:11:12 + 11| ifRequired('A', A => A.FOO); // Error - A is a plain string, not a module ref + ^^^ [1] + +References: + B.js:7:7 + 7| id: $Flow$ModuleRef, + ^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------- B.js:13:24 + +Cannot get `A.BAR` because property `BAR` is missing in object literal [1]. + + B.js:13:24 + 13| ifRequired('m#A', A => A.BAR); // Ok + ^^^^^ + +References: + A.js:6:18 + v + 6| module.exports = { + 7| FOO: 'bar', + 8| }; + ^ [1] + + +Error -------------------------------------------------------------------------------------------------------- D.js:10:3 + +Importing from an untyped module makes it `any` and is not safe! Did you mean to add `// @flow` to the top of `C`? +(`untyped-import`) + + 10| 'm#C', + ^^^^^ + + + +Found 3 errors diff --git a/tests/more_generics/.flowconfig b/tests/more_generics/.flowconfig index 1040ff3332f..2e4a2ec92b8 100644 --- a/tests/more_generics/.flowconfig +++ b/tests/more_generics/.flowconfig @@ -1,3 +1,3 @@ [options] module.system=haste - +no_flowlib=false diff --git a/tests/more_generics/class_generic.js b/tests/more_generics/class_generic.js new file mode 100644 index 00000000000..d132ea78a4c --- /dev/null +++ b/tests/more_generics/class_generic.js @@ -0,0 +1,10 @@ +// @flow + +class C { + arr: Array<{value: T}>; + + foo(value: T) { + var entry: {value: T} = {value}; + this.arr.push(entry); + } +} diff --git a/tests/more_react/inexact_config.js b/tests/more_react/inexact_config.js new file mode 100644 index 00000000000..43a42668961 --- /dev/null +++ b/tests/more_react/inexact_config.js @@ -0,0 +1,23 @@ +//@flow +const React = require('react'); +function Component(): React$Node { return null; } + +const _a = ; +const _b = {"foo"}; +const _b2 = ; + +type Props = {||} +function Component2(props : Props) : React$Node { return null; } + +const _c = ; // error +const _d = {"foo"}; //error + +function Component3(props : { }) : React$Node { return null; } + +const _e = ; +const _f = {"foo"}; + +function Component4(props : {| foo : number |}) : React$Node { return null; } + +const _g = ; //error +const _h = ; //error diff --git a/tests/more_react/inexact_props.js b/tests/more_react/inexact_props.js new file mode 100644 index 00000000000..11155a45498 --- /dev/null +++ b/tests/more_react/inexact_props.js @@ -0,0 +1,44 @@ +//@flow + +const React = require('react'); + +type Props1 = { + +x : number; + y : string; +} + +type Props2 = {| + +x : number; + y : string; +|} + +class A extends React$Component {} +class B extends React$Component {} + +; +; // error + + +type Props3 = { p1? : boolean, p2? : number } + +class XComponent extends React.PureComponent< + XProps, + XState +> {} + +declare var cond : boolean; +const BaseComponent = cond ? React.Component : XComponent; + +type Props4 = $Exact<{ + p1? : boolean, + p2? : number, + p3? : string, +}>; + +class YComponent extends BaseComponent { + props : Props4; +} + +declare var s : ?string; + + diff --git a/tests/more_react/more_react.exp b/tests/more_react/more_react.exp index 4c4bf1d5e92..0b2073ae35c 100644 --- a/tests/more_react/more_react.exp +++ b/tests/more_react/more_react.exp @@ -58,15 +58,15 @@ Cannot call `checkPropTypes` because function [1] requires another argument. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:214:41 + /react.js:227:41 v--- - 214| declare export function checkPropTypes( - 215| propTypes: $Subtype<{[_: $Keys]: ReactPropsCheckType}>, - 216| values: V, - 217| location: string, - 218| componentName: string, - 219| getStack: ?(() => ?string) - 220| ) : void; + 227| declare export function checkPropTypes( + 228| propTypes : any, + 229| values: V, + 230| location: string, + 231| componentName: string, + 232| getStack: ?(() => ?string) + 233| ) : void; -------^ [1] @@ -79,27 +79,18 @@ Cannot call `checkPropTypes` because function [1] requires another argument. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:214:41 + /react.js:227:41 v--- - 214| declare export function checkPropTypes( - 215| propTypes: $Subtype<{[_: $Keys]: ReactPropsCheckType}>, - 216| values: V, - 217| location: string, - 218| componentName: string, - 219| getStack: ?(() => ?string) - 220| ) : void; + 227| declare export function checkPropTypes( + 228| propTypes : any, + 229| values: V, + 230| location: string, + 231| componentName: string, + 232| getStack: ?(() => ?string) + 233| ) : void; -------^ [1] -Error ------------------------------------------------------------------------------------------ checkPropTypes.js:10:43 - -Cannot call `checkPropTypes` with object literal bound to `values` because property `bar` is missing in object -literal [1]. - - 10| checkPropTypes({ bar: PropTypes.string }, { foo: 'foo' }, 'value', 'TestComponent'); // error: property not found - ^^^^^^^^^^^^^^ [1] - - Error ------------------------------------------------------------------------------------------ checkPropTypes.js:12:85 Cannot call `checkPropTypes` with function bound to `getStack` because number [1] is incompatible with string [2] in the @@ -113,11 +104,100 @@ References: checkPropTypes.js:12:91 12| checkPropTypes({ foo: PropTypes.string }, { foo: 'foo' }, 'value', 'TestComponent', () => 123); // error: number ~> string ^^^ [1] - /react.js:219:24 - 219| getStack: ?(() => ?string) + /react.js:232:24 + 232| getStack: ?(() => ?string) ^^^^^^ [2] +Error ------------------------------------------------------------------------------------------ inexact_config.js:12:12 + +Cannot create `Component2` element because: + - property `bar` is missing in `Props` [1] but exists in props [2]. + - property `foo` is missing in `Props` [1] but exists in props [2]. + + inexact_config.js:12:12 + 12| const _c = ; // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + +References: + inexact_config.js:10:29 + 10| function Component2(props : Props) : React$Node { return null; } + ^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------ inexact_config.js:13:12 + +Cannot create `Component2` element because property `children` is missing in `Props` [1] but exists in props [2]. + + inexact_config.js:13:12 + 13| const _d = {"foo"}; //error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + +References: + inexact_config.js:10:29 + 10| function Component2(props : Props) : React$Node { return null; } + ^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------ inexact_config.js:22:12 + +Cannot create `Component4` element because property `bar` is missing in object type [1] but exists in props [2]. + + inexact_config.js:22:12 + 22| const _g = ; //error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + +References: + inexact_config.js:20:29 + 20| function Component4(props : {| foo : number |}) : React$Node { return null; } + ^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------ inexact_config.js:23:12 + +Cannot create `Component4` element because property `foo` is missing in props [1] but exists in object type [2]. + + inexact_config.js:23:12 + 23| const _h = ; //error + ^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + inexact_config.js:20:29 + 20| function Component4(props : {| foo : number |}) : React$Node { return null; } + ^^^^^^^^^^^^^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- inexact_props.js:19:1 + +Cannot create `B` element because property `z` is missing in `Props2` [1] but exists in props [2]. + + inexact_props.js:19:1 + 19| ; // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + +References: + inexact_props.js:16:33 + 16| class B extends React$Component {} + ^^^^^^ [1] + + +Error -------------------------------------------------------------------------------------------- inexact_props.js:44:1 + +Cannot create `YComponent` element because null or undefined [1] is incompatible with string [2] in property `p3`. + + inexact_props.js:44:1 + 44| + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + inexact_props.js:42:17 + 42| declare var s : ?string; + ^^^^^^^ [1] + inexact_props.js:35:9 + 35| p3? : string, + ^^^^^^ [2] + + Error ------------------------------------------------------------------------------------------------ propTypes.js:15:1 Cannot create `D` element because: @@ -138,5 +218,56 @@ References: ^ [2] +Error ---------------------------------------------------------------------------------------- subclassComponent.js:29:2 + +`D` [1] is incompatible with `React.Component` [2]. + + subclassComponent.js:29:2 + 29| ; // error + ^ + +References: + subclassComponent.js:21:15 + 21| declare class D extends React$Component2 {} + ^ [1] + /react.js:26:15 + 26| declare class React$Component { + ^^^^^^^^^^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------- subclassComponent.js:30:2 + +`E` [1] is incompatible with `React.Component` [2]. + + subclassComponent.js:30:2 + 30| ; // error + ^ + +References: + subclassComponent.js:23:15 + 23| declare class E extends D {} + ^ [1] + /react.js:26:15 + 26| declare class React$Component { + ^^^^^^^^^^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------- subclassComponent.js:31:2 + +`F` [1] is incompatible with `React.Component` [2]. + + subclassComponent.js:31:2 + 31| ; // error + ^ + +References: + subclassComponent.js:25:15 + 25| declare class F { + ^ [1] + /react.js:26:15 + 26| declare class React$Component { + ^^^^^^^^^^^^^^^ [2] + + -Found 9 errors +Found 18 errors diff --git a/tests/more_react/react-copy-write.js b/tests/more_react/react-copy-write.js new file mode 100644 index 00000000000..b6c27b5545e --- /dev/null +++ b/tests/more_react/react-copy-write.js @@ -0,0 +1,42 @@ +//@flow +var React = require("react"); + +export type Recipe = (draft: T, state: $ReadOnly) => void; +export type Mutate = (recipe: Recipe) => void; + +type ConsumerRender = (...S) => React$Node; + +type ProviderProps = {| + children: React$Node, + initialState?: T, +|}; + +export type Provider = React$ComponentType>; + +type GetReturnType = ((T) => S) => S; + +type ConsumerProps mixed>> = {| + select?: TSelect, + children?: ConsumerRender<$TupleMap>, + render?: ConsumerRender<$TupleMap>, +|}; + +type Selector = T => R; + +export type Store = { + +Provider: Provider, + +Consumer: { + mixed>>( + ConsumerProps, + ): React$Node, + // Need the following to fake this as a functional component + displayName?: ?string, + propTypes?: any, + contextTypes?: any, + }, + +mutate: Mutate, + createSelector(Selector): Selector, +}; + +declare var store : Store; + diff --git a/tests/more_react/subclassComponent.js b/tests/more_react/subclassComponent.js new file mode 100644 index 00000000000..f9401717698 --- /dev/null +++ b/tests/more_react/subclassComponent.js @@ -0,0 +1,31 @@ +//@#flow + +var React = require("React"); + +type Props1 = {||} + +declare class A extends React$Component {} +declare class B extends A {} +declare class C extends B {} + +; +; +; + +declare class React$Component2 { + props : Props; +} + +type Props2 = {||} + +declare class D extends React$Component2 {} + +declare class E extends D {} + +declare class F { + props : {||} +} + +; // error +; // error +; // error diff --git a/tests/multiflow/multiflow.exp b/tests/multiflow/multiflow.exp index 24d97235871..8220379aaba 100644 --- a/tests/multiflow/multiflow.exp +++ b/tests/multiflow/multiflow.exp @@ -215,7 +215,7 @@ References: Error ------------------------------------------------------------------------------------------------------ jsx.js:34:2 Cannot create `ExpectsProps` element because property `name` is missing in object type [1] but exists in object type [2] -in the first argument of type argument `C`. +in the first argument. jsx.js:34:2 34| (); // Error - missing prop @@ -250,7 +250,7 @@ References: Error ------------------------------------------------------------------------------------------------------ jsx.js:38:2 Cannot create `ExpectsChildrenTuple` element because rest array [1] has an arity of 0 but tuple type [2] has an arity of -1 in the second argument of type argument `C`. +1. jsx.js:38:2 38| (); // Error - missing child @@ -264,26 +264,25 @@ References: Error ------------------------------------------------------------------------------------------------------ jsx.js:40:2 -Cannot create `ExpectsChildrenTuple` element because string [1] is incompatible with number [2] in index 0 of the second -argument of type argument `C`. +Cannot create `ExpectsChildrenTuple` element because number [1] is incompatible with string [2] in index 0. jsx.js:40:2 40| ({123}); // Error: number ~> string ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - jsx.js:37:62 - 37| declare function ExpectsChildrenTuple(props: any, children: [string]): string; - ^^^^^^ [1] jsx.js:40:25 40| ({123}); // Error: number ~> string - ^^^ [2] + ^^^ [1] + jsx.js:37:62 + 37| declare function ExpectsChildrenTuple(props: any, children: [string]): string; + ^^^^^^ [2] Error ------------------------------------------------------------------------------------------------------ jsx.js:41:2 Cannot create `ExpectsChildrenTuple` element because rest array [1] has an arity of 2 but tuple type [2] has an arity of -1 in the second argument of type argument `C`. +1. jsx.js:41:2 41| (Hi {"there"}); // Error: too many children @@ -297,20 +296,19 @@ References: Error ------------------------------------------------------------------------------------------------------ jsx.js:46:2 -Cannot create `ExpectsChildrenArray` element because string [1] is incompatible with number [2] in array element of the -second argument of type argument `C`. +Cannot create `ExpectsChildrenArray` element because number [1] is incompatible with string [2] in array element. jsx.js:46:2 46| ({123}); // Error: number ~> string ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - jsx.js:43:67 - 43| declare function ExpectsChildrenArray(props: any, children: Array): string; - ^^^^^^ [1] jsx.js:46:25 46| ({123}); // Error: number ~> string - ^^^ [2] + ^^^ [1] + jsx.js:43:67 + 43| declare function ExpectsChildrenArray(props: any, children: Array): string; + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------------- spread.js:7:1 diff --git a/tests/munge_underscores_assert_ground_cycle/.flowconfig b/tests/munge_underscores_assert_ground_cycle/.flowconfig new file mode 100644 index 00000000000..ddd542a43d2 --- /dev/null +++ b/tests/munge_underscores_assert_ground_cycle/.flowconfig @@ -0,0 +1,2 @@ +[options] +munge_underscores=true diff --git a/tests/munge_underscores_assert_ground_cycle/a.js b/tests/munge_underscores_assert_ground_cycle/a.js new file mode 100644 index 00000000000..af59ab7a650 --- /dev/null +++ b/tests/munge_underscores_assert_ground_cycle/a.js @@ -0,0 +1,4 @@ +//@flow +//@preventMunge +const b = require('./b'); +module.exports = b.x; diff --git a/tests/munge_underscores_assert_ground_cycle/b.js b/tests/munge_underscores_assert_ground_cycle/b.js new file mode 100644 index 00000000000..b783dbe874d --- /dev/null +++ b/tests/munge_underscores_assert_ground_cycle/b.js @@ -0,0 +1,8 @@ +//@flow +const a = require('./a'); +class A { + static x: number = 3; + _munged(arg) { return a; } +} + +module.exports = A; diff --git a/tests/munge_underscores_assert_ground_cycle/munge_underscores_assert_ground_cycle.exp b/tests/munge_underscores_assert_ground_cycle/munge_underscores_assert_ground_cycle.exp new file mode 100644 index 00000000000..2829d581f51 --- /dev/null +++ b/tests/munge_underscores_assert_ground_cycle/munge_underscores_assert_ground_cycle.exp @@ -0,0 +1 @@ +Found 0 errors diff --git a/tests/name_prop/function.js b/tests/name_prop/function.js index 304356168f1..20c1872d989 100644 --- a/tests/name_prop/function.js +++ b/tests/name_prop/function.js @@ -1,4 +1,4 @@ -/* TODO - we currently say that a function's statics are an AnyObjT and +/* TODO - we currently say that a function's statics are an AnyT and * anything goes. When we start enforcing the statics properly, we'll need to * know that .name exists */ diff --git a/tests/new_react/FeedUFI.react.js b/tests/new_react/FeedUFI.react.js index 1f2f35f2a3e..6e4fa0e188a 100644 --- a/tests/new_react/FeedUFI.react.js +++ b/tests/new_react/FeedUFI.react.js @@ -1,5 +1,5 @@ /** - * Copyright 2004-present Facebook. All Rights Reserved. + * Copyright (c) Facebook, Inc. and its affiliates. * * @providesModule FeedUFI.react * @flow diff --git a/tests/new_react/UFILikeCount.react.js b/tests/new_react/UFILikeCount.react.js index 3260acdea0b..39ab041cd72 100644 --- a/tests/new_react/UFILikeCount.react.js +++ b/tests/new_react/UFILikeCount.react.js @@ -1,5 +1,5 @@ /** - * Copyright 2004-present Facebook. All Rights Reserved. + * Copyright (c) Facebook, Inc. and its affiliates. * * @providesModule UFILikeCount.react * @flow diff --git a/tests/new_react/argless_function.js b/tests/new_react/argless_function.js new file mode 100644 index 00000000000..fc28aedd557 --- /dev/null +++ b/tests/new_react/argless_function.js @@ -0,0 +1,10 @@ +//@flow + +const React = require('react'); + +function Component(): React.Node { return null; } + +const element = ; + +var x: React.ElementProps = element.props; +x.foo = 3; // Error, the props type for Component is a sealed empty object. diff --git a/tests/new_react/new_react.exp b/tests/new_react/new_react.exp index 9e79a1c89dd..81d3f1d1069 100644 --- a/tests/new_react/new_react.exp +++ b/tests/new_react/new_react.exp @@ -1,37 +1,52 @@ +Error ----------------------------------------------------------------------------------------- argless_function.js:10:1 + +Cannot assign `3` to `x.foo` because property `foo` is missing in function [1]. + + argless_function.js:10:1 + 10| x.foo = 3; // Error, the props type for Component is a sealed empty object. + ^^^^^ + +References: + argless_function.js:9:8 + 9| var x: React.ElementProps = element.props; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + Error --------------------------------------------------------------------------------------- bad_default_props.js:15:18 -Cannot use property `Component` [1] with fewer than 1 type argument. +Cannot use `React.Component` [1] with fewer than 1 type argument. bad_default_props.js:15:18 15| class C3 extends React.Component { // error ^^^^^^^^^^^^^^^ References: - /react.js:26:31 + /react.js:26:30 26| declare class React$Component { - ^^^^^^^^^^^^ [1] + ^^^^^^^^^^^^^^^^^^^^^ [1] Error --------------------------------------------------------------------------------------- bad_default_props.js:20:18 -Cannot use property `Component` [1] with fewer than 1 type argument. +Cannot use `React.Component` [1] with fewer than 1 type argument. bad_default_props.js:20:18 20| class C4 extends React.Component { ^^^^^^^^^^^^^^^ References: - /react.js:26:31 + /react.js:26:30 26| declare class React$Component { - ^^^^^^^^^^^^ [1] + ^^^^^^^^^^^^^^^^^^^^^ [1] Error --------------------------------------------------------------------------------------------------- classes.js:7:1 -Cannot extend property `Component` [1] with `Foo` because: +Cannot extend `React.Component` [1] with `Foo` because: - property `y_` is missing in function type [2] but exists in object type [3] in the first argument of property `setState`. - property `y_` is missing in `State` [4] but exists in object type [3] in the first argument of property `setState`. + - object type [3] is incompatible with null or undefined [5] in the first argument of property `setState`. classes.js:7:1 v------------------------------------------------ @@ -71,15 +86,18 @@ References: classes.js:7:19 7| class Foo extends React.Component { ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /react.js:35:36 - 35| partialState: $Shape | ((State, Props) => $Shape | void), - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + /react.js:35:37 + 35| partialState: ?$Shape | ((State, Props) => ?$Shape), + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] classes.js:23:15 23| setState(o: { y_: string }): void { } ^^^^^^^^^^^^^^ [3] classes.js:7:42 7| class Foo extends React.Component { ^^^^^ [4] + /react.js:35:19 + 35| partialState: ?$Shape | ((State, Props) => ?$Shape), + ^^^^^^^^^^^^^^ [5] Error ------------------------------------------------------------------------------------------------- classes.js:15:21 @@ -132,19 +150,25 @@ References: Error ------------------------------------------------------------------------------------------------- classes.js:39:25 -Cannot assign `` to `foo` because class `Foo` [1] is incompatible with number [2] in property `type`. +Cannot assign `` to `foo` because: + - number [1] is not a React component. + - number [1] is incompatible with string [2] in type argument `ElementType`. + - class `Foo` [3] is incompatible with number [1] in property `type`. classes.js:39:25 - 39| var foo: $jsx = ; - ^^^^^^ + 39| var foo: $jsx = ; + ^^^^^^ References: - classes.js:7:7 - 7| class Foo extends React.Component { - ^^^ [1] classes.js:39:15 - 39| var foo: $jsx = ; - ^^^^^^ [2] + 39| var foo: $jsx = ; + ^^^^^^ [1] + /react.js:170:5 + 170| | string + ^^^^^^ [2] + classes.js:7:7 + 7| class Foo extends React.Component { + ^^^ [3] Error ------------------------------------------------------------------------------------------------- classes.js:39:25 @@ -161,25 +185,6 @@ References: ^^^^^ [2] -Error ------------------------------------------------------------------------------------------------- classes.js:39:25 - -Cannot instantiate `React.Element` because: - - number [1] is not a React component. - - number [1] is incompatible with string [2] in type argument `ElementType`. - - classes.js:39:25 - 39| var foo: $jsx = ; - ^^^^^^ - -References: - classes.js:39:15 - 39| var foo: $jsx = ; - ^^^^^^ [1] - /react.js:165:5 - 165| | string - ^^^^^^ [2] - - Error ------------------------------------------------------------------------------------------------- classes.js:57:21 Cannot assign `this.props.x` to `_` because number [1] is incompatible with string [2]. @@ -189,8 +194,8 @@ Cannot assign `this.props.x` to `_` because number [1] is incompatible with stri ^^^^^^^^^^^^ References: - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [1] classes.js:57:12 57| var _: string = this.props.x; @@ -206,9 +211,9 @@ Property `y_` is missing in function type [1] but exists in object type [2]. ^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:35:36 - 35| partialState: $Shape | ((State, Props) => $Shape | void), - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /react.js:35:37 + 35| partialState: ?$Shape | ((State, Props) => ?$Shape), + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] classes.js:64:15 64| setState(o: { y_: string }): void { }, ^^^^^^^^^^^^^^ [2] @@ -231,53 +236,76 @@ References: ^^^^^^^^^^^^^^ [2] +Error ------------------------------------------------------------------------------------------------- classes.js:64:15 + +object type [1] is incompatible with null or undefined [2]. + + classes.js:64:15 + 64| setState(o: { y_: string }): void { }, + ^^^^^^^^^^^^^^ [1] + +References: + /react.js:35:19 + 35| partialState: ?$Shape | ((State, Props) => ?$Shape), + ^^^^^^^^^^^^^^ [2] + + Error ------------------------------------------------------------------------------------------------- classes.js:79:32 -Cannot assign `` to `foo_legacy` because class React component [1] is incompatible with number [2] in -property `type`. +Cannot assign `` to `foo_legacy` because: + - number [1] is not a React component. + - number [1] is incompatible with string [2] in type argument `ElementType`. + - class React component [3] is incompatible with number [1] in property `type`. + - number [4] is not an object in type argument `ElementType`. classes.js:79:32 - 79| var foo_legacy: $jsx = ; - ^^^^^^^^^^^^ + 79| var foo_legacy: $jsx = ; + ^^^^^^^^^^^^ References: - classes.js:43:17 - v------------------ - 43| var FooLegacy = React.createClass({ - 44| is_mounted: (undefined: ?boolean), - 45| - 46| propTypes: { - 47| x: React.PropTypes.number.isRequired - 48| }, - 49| - 50| getDefaultProps(): DefaultProps { return {} }, - 51| - 52| statics: { - 53| bar(): void {} - 54| }, - 55| - 56| qux(): void { - 57| var _: string = this.props.x; - : - 62| }, - 63| - 64| setState(o: { y_: string }): void { }, - 65| - 66| componentDidMount(): void { - 67| this.is_mounted = true; - 68| }, - 69| - 70| componentWillReceiveProps( - 71| nextProps: Object, - 72| nextContext: any - 73| ): void { - 74| this.qux(); - 75| }, - 76| }); - -^ [1] classes.js:79:22 - 79| var foo_legacy: $jsx = ; - ^^^^^^ [2] + 79| var foo_legacy: $jsx = ; + ^^^^^^ [1] + /react.js:170:5 + 170| | string + ^^^^^^ [2] + classes.js:43:17 + v------------------ + 43| var FooLegacy = React.createClass({ + 44| is_mounted: (undefined: ?boolean), + 45| + 46| propTypes: { + 47| x: React.PropTypes.number.isRequired + 48| }, + 49| + 50| getDefaultProps(): DefaultProps { return {} }, + 51| + 52| statics: { + 53| bar(): void {} + 54| }, + 55| + 56| qux(): void { + 57| var _: string = this.props.x; + : + 62| }, + 63| + 64| setState(o: { y_: string }): void { }, + 65| + 66| componentDidMount(): void { + 67| this.is_mounted = true; + 68| }, + 69| + 70| componentWillReceiveProps( + 71| nextProps: Object, + 72| nextContext: any + 73| ): void { + 74| this.qux(); + 75| }, + 76| }); + -^ [3] + classes.js:78:26 + 78| FooLegacy.defaultProps = 0; + ^ [4] Error ------------------------------------------------------------------------------------------------- classes.js:79:32 @@ -302,25 +330,6 @@ References: ^ [3] -Error ------------------------------------------------------------------------------------------------- classes.js:79:32 - -Cannot instantiate `React.Element` because: - - number [1] is not a React component. - - number [1] is incompatible with string [2] in type argument `ElementType`. - - classes.js:79:32 - 79| var foo_legacy: $jsx = ; - ^^^^^^^^^^^^ - -References: - classes.js:79:22 - 79| var foo_legacy: $jsx = ; - ^^^^^^ [1] - /react.js:165:5 - 165| | string - ^^^^^^ [2] - - Error --------------------------------------------------------------------------------------------- import-react.js:16:1 Cannot create `HelloMessage` element because number [1] is incompatible with string [2] in property `name`. @@ -388,8 +397,8 @@ Cannot assign `this.props.z` to `qux` because: ^^^^^^^^^^^^ References: - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [1] new_react.js:19:18 19| var qux: string = this.props.z; @@ -405,8 +414,8 @@ Cannot assign `this.props.x` to `w` because string [1] is incompatible with numb ^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] new_react.js:20:15 20| var w:number = this.props.x; @@ -415,7 +424,8 @@ References: Error ------------------------------------------------------------------------------------------------ new_react.js:21:9 -Cannot get `this.props.y[0]` because an indexer property is missing in undefined [1]. +Cannot get `this.props.y[0]` because an index signature declaring the expected key / value type is missing in +undefined [1]. new_react.js:21:9 21| this.props.y[0]; @@ -439,8 +449,8 @@ References: new_react.js:29:23 29| var element = ; ^ [1] - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [2] @@ -475,63 +485,87 @@ Cannot assign `C.displayName` to `x` because: ^^^^^^^^^^^^^ References: - /react.js:89:24 - 89| static displayName?: ?string; + /react.js:87:24 + 87| static displayName?: ?string; ^^^^^^^ [1] new_react.js:32:8 32| var x: number = C.displayName; ^^^^^^ [2] - /react.js:89:25 - 89| static displayName?: ?string; + /react.js:87:25 + 87| static displayName?: ?string; ^^^^^^ [3] -Error ------------------------------------------------------------------------------------------------ propTypes.js:13:1 +Error ---------------------------------------------------------------------------------------- object_component.js:14:12 -Cannot create `C` element because property `label` is missing in object literal [1] but exists in object type [2] in -array element of property `statistics`. +Cannot create `Component` element because property `bar` is missing in props [1] but exists in object type [2]. - propTypes.js:13:1 - v--------------- - 13| ; // error (label is required, value not required) - ---^ + object_component.js:14:12 + 14| const _a = ; // Error, missing bar + ^^^^^^^^^^^^^ [1] References: - propTypes.js:14:3 - 14| {}, - ^^ [1] - propTypes.js:6:35 - v---------------- - 6| statistics: PropTypes.arrayOf(PropTypes.shape({ - 7| label: PropTypes.string.isRequired, - 8| value: PropTypes.number, - 9| })).isRequired, - -^ [2] + object_component.js:8:11 + 8| (props: {| foo: number, bar: number |}): React.Node, + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------- propTypes.js:18:53 +Error ---------------------------------------------------------------------------------------- object_component.js:17:12 -Cannot assign array literal to `props` because property `label` is missing in object literal [1] but exists in object -type [2] in array element. +Cannot create `Component` element because property `baz` is missing in object type [1] but exists in props [2]. - propTypes.js:18:53 - v - 18| var props: Array<{label: string, value?: number}> = [ - 19| {}, - 20| {label:"",value:undefined}, - 21| ]; // error (same as ^) - ^ + object_component.js:17:12 + 17| const _d = ; // Error, baz is not in the config + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + +References: + object_component.js:8:11 + 8| (props: {| foo: number, bar: number |}): React.Node, + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ---------------------------------------------------------------------------------------- object_component.js:20:47 + +Cannot assign `null` to `_y` because null [1] is incompatible with undefined [2]. + + object_component.js:20:47 + 20| const _y: React.ElementRef = null; // Error, ref is undefined + ^^^^ [1] + +References: + object_component.js:20:11 + 20| const _y: React.ElementRef = null; // Error, ref is undefined + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------- object_component.js:23:56 + +Cannot assign object literal to `_badProps` because property `foo` is missing in object literal [1] but exists in object +type [2]. + + object_component.js:23:56 + 23| const _badProps: React.ElementProps = {bar: 3}; // Error missing foo + ^^^^^^^^ [1] + +References: + object_component.js:23:18 + 23| const _badProps: React.ElementProps = {bar: 3}; // Error missing foo + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------- object_component.js:24:57 + +Cannot assign object literal to `_badProps2` because property `baz` is missing in object type [1] but exists in object +literal [2]. + + object_component.js:24:57 + 24| const _badProps2: React.ElementProps = {bar: 3, foo: 3, baz: 3}; // Error extra baz + ^^^^^^^^^^^^^^^^^^^^^^^^ [2] References: - propTypes.js:19:3 - 19| {}, - ^^ [1] - propTypes.js:18:18 - 18| var props: Array<{label: string, value?: number}> = [ - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + object_component.js:24:19 + 24| const _badProps2: React.ElementProps = {bar: 3, foo: 3, baz: 3}; // Error extra baz + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] Error --------------------------------------------------------------------------------------------------- props.js:14:25 @@ -545,8 +579,8 @@ Cannot assign `this.props.x` to `a` because: ^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] props.js:14:16 14| var a: number = this.props.x; // error @@ -582,8 +616,8 @@ Cannot assign `this.props.z` to `c` because: ^^^^^^^^^^^^ References: - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [1] props.js:16:16 16| var c: string = this.props.z; // error @@ -604,14 +638,14 @@ References: props.js:20:29 20| var element = ; // 3 errors ^^^^^ [1] - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [2] props.js:20:49 20| var element = ; // 3 errors ^^^^^ [3] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [4] @@ -668,8 +702,8 @@ References: props2.js:9:41 9| getInitialState: function(): { bar: number } { ^^^^^^ [1] - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [2] props2.js:15:42 15| return ; @@ -685,8 +719,8 @@ Cannot get `React.PropTypes.imaginaryType` because property `imaginaryType` is m ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:211:33 - 211| declare export var PropTypes: ReactPropTypes; + /react.js:224:33 + 224| declare export var PropTypes: ReactPropTypes; ^^^^^^^^^^^^^^ [1] @@ -700,12 +734,13 @@ Cannot get `React.PropTypes.string.inRequired` because property `inRequired` is ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:302:39 + /react.js:445:39 v - 302| type ReactPropsChainableTypeChecker = { - 303| isRequired: ReactPropsCheckType; - 304| (props: any, propName: string, componentName: string, href?: string): ?Error; - 305| }; + 445| type ReactPropsChainableTypeChecker = { + 446| (props: any, propName: string, componentName: string, href?: string): ?Error, + 447| isRequired: ReactPropsCheckType, + 448| ... + 449| }; ^ [1] @@ -750,8 +785,8 @@ Cannot call `ReactDOM.render` with `document.body` bound to `container` because ^^^^^^^^^^^^^ References: - /dom.js:619:27 - 619| body: HTMLBodyElement | null; + /dom.js:733:27 + 733| body: HTMLBodyElement | null; ^^^^ [1] /react-dom.js:18:16 18| container: Element, @@ -935,7 +970,7 @@ References: -Found 60 errors +Found 67 errors Only showing the most relevant union/intersection branches. To see all branches, re-run Flow with --show-all-branches diff --git a/tests/new_react/object_component.js b/tests/new_react/object_component.js new file mode 100644 index 00000000000..587d4ada696 --- /dev/null +++ b/tests/new_react/object_component.js @@ -0,0 +1,26 @@ +//@flow + +const React = require('react'); + +// Callable objects are valid react components with a void instance type. + +type ObjectComponent = { + (props: {| foo: number, bar: number |}): React.Node, + defaultProps: {| foo: number |}, +}; + +declare var Component: ObjectComponent; + +const _a = ; // Error, missing bar +const _b = ; // Ok +const _c = ; // Ok +const _d = ; // Error, baz is not in the config + +const _x: React.ElementRef = undefined; // ok +const _y: React.ElementRef = null; // Error, ref is undefined + +const _props: React.ElementProps = {foo: 3, bar: 3}; +const _badProps: React.ElementProps = {bar: 3}; // Error missing foo +const _badProps2: React.ElementProps = {bar: 3, foo: 3, baz: 3}; // Error extra baz + +const AC: React.AbstractComponent<{| +foo?: number, +bar: number |}, void> = Component; diff --git a/tests/new_refi/test3.js b/tests/new_refi/test3.js index a15a6aabc37..cd4b6b0303b 100644 --- a/tests/new_refi/test3.js +++ b/tests/new_refi/test3.js @@ -5,3 +5,15 @@ const x: (?number) => (() => number) = val => { } return () => val; // OK, since val cannot be null } + +function param_annot(x: ?string): ?(() => string) { + if (x != null) { + return () => x; + } +} + +function rest(...x: ?Array): ?(() => Array) { + if (x != null) { + return () => x; + } +} diff --git a/tests/new_spread/cross_module/app.js b/tests/new_spread/cross_module/app.js index 9b633b2d15c..8bc23283ee1 100644 --- a/tests/new_spread/cross_module/app.js +++ b/tests/new_spread/cross_module/app.js @@ -5,4 +5,4 @@ import fn from './fn'; const app = fn((o: {a: number, b: number}) => {}); app({a: 'foo', b: 2}); -export default fn((o: {a: number, b: number}) => {}); +export default (fn((o: {a: number, b: number}) => {}): {|a:number, b:number|} => void); diff --git a/tests/new_spread/error_messages.js b/tests/new_spread/error_messages.js new file mode 100644 index 00000000000..b6c86d435c8 --- /dev/null +++ b/tests/new_spread/error_messages.js @@ -0,0 +1,37 @@ +//@flow + +declare var x: {a: number, ...{| b: number |}, c: number, ...{d: number}}; // Error +(x: any); + + +declare var y: {...{a: number}, c: number, d: number, ...{| b?: number |}}; // Error +(y: any); + +type A = {| b: number |} +type B = {d: number}; + +declare var x2: {a: number, ...A, c: number, ...B}; // Error +(x2: any); + +type C = {a: number}; +type D = {| b?: number |}; + +declare var y2: {...C, c: number, d: number, ...D}; // Error +(y2: any); + + +declare var x3: { // Error, but message could use improvement. + ...{a: number}, + d: number, + ...{b: number}, + e: number, + ...{c: number}, + f: number, +}; +(x3: any); + +declare var x4: {...A, ...B, ...C, ...D}; // Error, representative of common case +(x4: any); + +declare var x5: {foo: number, bar: number, ...B}; // Error, representative of common case +(x5: any); diff --git a/tests/new_spread/new_spread.exp b/tests/new_spread/new_spread.exp index 7cc72eeb1c5..095930e9b16 100644 --- a/tests/new_spread/new_spread.exp +++ b/tests/new_spread/new_spread.exp @@ -16,39 +16,215 @@ References: ^^^^^ [2] -Error ----------------------------------------------------------------------------------------- cross_module/app.js:8:19 +Error ------------------------------------------------------------------------------------------ cross_module/use.js:5:5 -Cannot call `fn` with function bound to `arg` because number [1] is incompatible with string [2] in property `a` of the -first argument. +Cannot call `app` with object literal bound to the first parameter because string [1] is incompatible with number [2] in +property `a`. - cross_module/app.js:8:19 - 8| export default fn((o: {a: number, b: number}) => {}); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + cross_module/use.js:5:5 + 5| app({a: 'foo', b: 2}); + ^^^^^^^^^^^^^^^^ References: - cross_module/app.js:8:27 - 8| export default fn((o: {a: number, b: number}) => {}); - ^^^^^^ [1] cross_module/use.js:5:9 5| app({a: 'foo', b: 2}); - ^^^^^ [2] + ^^^^^ [1] + cross_module/app.js:8:60 + 8| export default (fn((o: {a: number, b: number}) => {}): {|a:number, b:number|} => void); + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- error_messages.js:3:62 + +Cannot determine a type for object type [1]. object type [2] is inexact, so it may contain `c` with a type that +conflicts with `c`'s definition in object type [1]. Can you make object type [2] exact? + + error_messages.js:3:62 + 3| declare var x: {a: number, ...{| b: number |}, c: number, ...{d: number}}; // Error + ^^^^^^^^^^^ [2] + +References: + error_messages.js:3:16 + 3| declare var x: {a: number, ...{| b: number |}, c: number, ...{d: number}}; // Error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- error_messages.js:7:16 + +Cannot determine a type for object type [1]. object type [1] is inexact, so it may contain `b` with a type that +conflicts with `b`'s definition in object type [2]. Can you make object type [1] exact? + + error_messages.js:7:16 + 7| declare var y: {...{a: number}, c: number, d: number, ...{| b?: number |}}; // Error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + error_messages.js:7:58 + 7| declare var y: {...{a: number}, c: number, d: number, ...{| b?: number |}}; // Error + ^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------ error_messages.js:13:49 + +Cannot determine a type for object type [1]. `B` [2] is inexact, so it may contain `c` with a type that conflicts with +`c`'s definition in object type [1]. Can you make `B` [2] exact? + + error_messages.js:13:49 + 13| declare var x2: {a: number, ...A, c: number, ...B}; // Error + ^ [2] + +References: + error_messages.js:13:17 + 13| declare var x2: {a: number, ...A, c: number, ...B}; // Error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------ error_messages.js:19:17 + +Cannot determine a type for object type [1]. object type [1] is inexact, so it may contain `b` with a type that +conflicts with `b`'s definition in `D` [2]. Can you make object type [1] exact? + + error_messages.js:19:17 + 19| declare var y2: {...C, c: number, d: number, ...D}; // Error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + error_messages.js:19:49 + 19| declare var y2: {...C, c: number, d: number, ...D}; // Error + ^ [2] + + +Error ------------------------------------------------------------------------------------------- error_messages.js:26:6 + +Cannot determine a type for object type [1]. object type [2] is inexact, so it may contain `d` with a type that +conflicts with `d`'s definition in object type [1]. Can you make object type [2] exact? + + error_messages.js:26:6 + 26| ...{b: number}, + ^^^^^^^^^^^ [2] + +References: + error_messages.js:23:17 + v--------------------------------------------- + 23| declare var x3: { // Error, but message could use improvement. + 24| ...{a: number}, + 25| d: number, + 26| ...{b: number}, + 27| e: number, + 28| ...{c: number}, + 29| f: number, + 30| }; + ^ [1] + + +Error ------------------------------------------------------------------------------------------ error_messages.js:33:27 + +Cannot determine a type for object type [1]. `B` [2] is inexact, so it may contain `b` with a type that conflicts with +`b`'s definition in `A` [3]. Can you make `B` [2] exact? + + error_messages.js:33:27 + 33| declare var x4: {...A, ...B, ...C, ...D}; // Error, representative of common case + ^ [2] + +References: + error_messages.js:33:17 + 33| declare var x4: {...A, ...B, ...C, ...D}; // Error, representative of common case + ^^^^^^^^^^^^^^^^^^^^^^^^ [1] + error_messages.js:33:21 + 33| declare var x4: {...A, ...B, ...C, ...D}; // Error, representative of common case + ^ [3] + + +Error ------------------------------------------------------------------------------------------ error_messages.js:36:47 + +Cannot determine a type for object type [1]. `B` [2] is inexact, so it may contain `foo` with a type that conflicts with +`foo`'s definition in object type [1]. Can you make `B` [2] exact? + + error_messages.js:36:47 + 36| declare var x5: {foo: number, bar: number, ...B}; // Error, representative of common case + ^ [2] + +References: + error_messages.js:36:17 + 36| declare var x5: {foo: number, bar: number, ...B}; // Error, representative of common case + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- no_interfaces.js:16:11 + +Cannot determine a type for object type [1]. `B` [2] cannot be spread because interfaces do not track the own-ness of +their properties. Can you use an object type instead? + + no_interfaces.js:16:11 + 16| spread(a, b); // Error, can't spread interface + ^ [2] + +References: + no_interfaces.js:9:4 + 9| ): {...A, ...B} { + ^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- no_interfaces.js:18:20 + +Cannot determine a type for `X` [1]. `B` [2] cannot be spread because interfaces do not track the own-ness of their +properties. Can you use an object type instead? + + no_interfaces.js:18:20 + 18| type X = {...A, ...B}; // Error, can't spread interface + ^ [2] + +References: + no_interfaces.js:18:10 + 18| type X = {...A, ...B}; // Error, can't spread interface + ^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- no_interfaces.js:23:14 + +Cannot determine a type for `Y` [1]. `A` [2] cannot be spread because interfaces do not track the own-ness of their +properties. Can you use an object type instead? + + no_interfaces.js:23:14 + 23| type Y = {...A, foo: number}; // Error, can't spread interface + ^ [2] + +References: + no_interfaces.js:23:10 + 23| type Y = {...A, foo: number}; // Error, can't spread interface + ^^^^^^^^^^^^^^^^^^^ [1] + +Error ------------------------------------------------------------------------------------------- no_interfaces.js:27:27 -Error ----------------------------------------------------------------------------------------------------- type.js:13:2 +Cannot determine a type for `Z` [1]. `A` [2] cannot be spread because interfaces do not track the own-ness of their +properties. Can you use an object type instead? + + no_interfaces.js:27:27 + 27| type Z = {foo: number, ...A}; // Error, can't spread interface + ^ [2] + +References: + no_interfaces.js:27:10 + 27| type Z = {foo: number, ...A}; // Error, can't spread interface + ^^^^^^^^^^^^^^^^^^^ [1] -Cannot cast `o1` to object type because undefined [1] is incompatible with `T` [2] in property `p`. - type.js:13:2 - 13| (o1: {p:T}); // error: o1.p is optional +Error ----------------------------------------------------------------------------------------------------- type.js:12:2 + +Cannot cast `o1` to object type because `T` [1] is incompatible with undefined [2] in property `p`. + + type.js:12:2 + 12| (o1: {p?:T}); // ok ^^ References: type.js:10:18 10| type O1 = {...{p:T}}; ^ [1] - type.js:13:9 - 13| (o1: {p:T}); // error: o1.p is optional - ^ [2] + type.js:12:10 + 12| (o1: {p?:T}); // ok + ^ [2] Error ----------------------------------------------------------------------------------------------------- type.js:16:2 @@ -133,18 +309,21 @@ References: ^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- type.js:37:2 +Error ----------------------------------------------------------------------------------------------------- type.js:37:3 -Cannot cast object literal to `O4` because property `p` is missing in object literal [1] but exists in object type [2]. +Cannot cast object literal to `O4` because property `p` is missing in object type [1] but exists in object type [2]. - type.js:37:2 - 37| ({}: O4); // error: property `p` not found - ^^ [1] + type.js:37:3 + 37| (({}:{}): O4); // error: property `p` not found + ^^^^^ References: type.js:37:6 - 37| ({}: O4); // error: property `p` not found - ^^ [2] + 37| (({}:{}): O4); // error: property `p` not found + ^^ [1] + type.js:37:11 + 37| (({}:{}): O4); // error: property `p` not found + ^^ [2] Error ----------------------------------------------------------------------------------------------------- type.js:39:2 @@ -212,18 +391,21 @@ References: ^^ [1] -Error ----------------------------------------------------------------------------------------------------- type.js:56:2 +Error ----------------------------------------------------------------------------------------------------- type.js:56:3 -Cannot cast object literal to `O6` because property `p` is missing in object literal [1] but exists in `O6` [2]. +Cannot cast object literal to `O6` because property `p` is missing in object type [1] but exists in `O6` [2]. - type.js:56:2 - 56| ({}: O6); // error: property `p` not found - ^^ [1] + type.js:56:3 + 56| (({}:{}): O6); // error: property `p` not found + ^^^^^ References: type.js:56:6 - 56| ({}: O6); // error: property `p` not found - ^^ [2] + 56| (({}:{}): O6); // error: property `p` not found + ^^ [1] + type.js:56:11 + 56| (({}:{}): O6); // error: property `p` not found + ^^ [2] Error ----------------------------------------------------------------------------------------------------- type.js:57:2 @@ -274,55 +456,74 @@ References: ^ [2] -Error ----------------------------------------------------------------------------------------------------- type.js:69:2 +Error ----------------------------------------------------------------------------------------------------- type.js:74:2 -Cannot cast `o8.p` to `U` because `T` [1] is incompatible with `U` [2]. +Cannot cast `o9` to object type because `T` [1] is incompatible with undefined [2] in property `p`. - type.js:69:2 - 69| (o8.p: U); // error: T ~> U - ^^^^ + type.js:74:2 + 74| (o9: {p?:T,q:U}); + ^^ References: - type.js:65:19 - 65| type O8 = {...{|p:T|},...{p:U}}; - ^ [1] - type.js:69:8 - 69| (o8.p: U); // error: T ~> U - ^ [2] + type.js:72:18 + 72| type O9 = {...{p:T},...{|q:U|}}; + ^ [1] + type.js:74:10 + 74| (o9: {p?:T,q:U}); + ^ [2] -Error ----------------------------------------------------------------------------------------------------- type.js:75:2 +Error ---------------------------------------------------------------------------------------------------- type.js:79:27 -Cannot cast `o9.p` to `T` because undefined [1] is incompatible with `T` [2]. +Cannot determine a type for `O10` [1]. object type [2] is inexact, so it may contain `p` with a type that conflicts with +`p`'s definition in object type [3]. Can you make object type [2] exact? - type.js:75:2 - 75| (o9.p: T); // error: o9.p is optional - ^^^^ + type.js:79:27 + 79| type O10 = {...{|p:T|},...{q:U}}; // Error, p may exist in second object + ^^^^^ [2] References: - type.js:72:18 - 72| type O9 = {...{p:T},...{|q:U|}}; - ^ [1] - type.js:75:8 - 75| (o9.p: T); // error: o9.p is optional - ^ [2] + type.js:79:12 + 79| type O10 = {...{|p:T|},...{q:U}}; // Error, p may exist in second object + ^^^^^^^^^^^^^^^^^^^^^ [1] + type.js:79:16 + 79| type O10 = {...{|p:T|},...{q:U}}; // Error, p may exist in second object + ^^^^^^^ [3] + + +Error ---------------------------------------------------------------------------------------------------- type.js:84:25 + +Cannot determine a type for `O11` [1]. object type [2] is inexact, so it may contain `p` with a type that conflicts with +`p`'s definition in object type [3]. Can you make object type [2] exact? + type.js:84:25 + 84| type O11 = {...{p:T},...{q:U}}; // Error, p may exist in second object + ^^^^^ [2] + +References: + type.js:84:12 + 84| type O11 = {...{p:T},...{q:U}}; // Error, p may exist in second object + ^^^^^^^^^^^^^^^^^^^ [1] + type.js:84:16 + 84| type O11 = {...{p:T},...{q:U}}; // Error, p may exist in second object + ^^^^^ [3] -Error ---------------------------------------------------------------------------------------------------- type.js:101:2 -Cannot cast `o14` to object type because undefined [1] is incompatible with `T` [2] in property `p`. +Error ---------------------------------------------------------------------------------------------------- type.js:102:2 - type.js:101:2 - 101| (o14: {p:T}); // error: `p` is optional +Cannot cast `o14` to object type because `T` [1] is incompatible with undefined [2] in property `p`. + + type.js:102:2 + 102| (o14: {p?:T}); // ok ^^^ References: type.js:99:24 99| type O14 = {...{...{|p:T|}}}; ^ [1] - type.js:101:10 - 101| (o14: {p:T}); // error: `p` is optional - ^ [2] + type.js:102:11 + 102| (o14: {p?:T}); // ok + ^ [2] Error ---------------------------------------------------------------------------------------------------- type.js:105:2 @@ -361,9 +562,7 @@ References: Error ----------------------------------------------------------------------------------------------- type_contra.js:7:2 -Cannot cast `o1.p` to `T` because: - - property `p` of unknown type [1] is incompatible with `T` [2]. - - undefined [1] is incompatible with `T` [2]. +Cannot cast `o1.p` to `T` because property `p` of unknown type [1] is incompatible with `T` [2]. type_contra.js:7:2 7| (o1.p: T); // errors: undefined ~> T, unknown ~> T @@ -413,6 +612,61 @@ References: ^ [2] +Error ---------------------------------------------------------------------------------------------- type_contra.js:18:2 + +Cannot cast `o3` to object type because property `p` of unknown type [1] is incompatible with `T` [2] in property `p`. + + type_contra.js:18:2 + 18| (o3: {p:T}); // error: unknown ~> T + ^^ + +References: + type_contra.js:15:11 + 15| type O3 = {...{||}, -p: T}; + ^^^^^^^^^^^^^^^^ [1] + type_contra.js:18:9 + 18| (o3: {p:T}); // error: unknown ~> T + ^ [2] + + +Error ---------------------------------------------------------------------------------------------- type_contra.js:19:2 + +Cannot cast `o3.p` to `T` because property `p` of unknown type [1] is incompatible with `T` [2]. + + type_contra.js:19:2 + 19| (o3.p: T); // errors: unknown ~> T + ^^^^ + +References: + type_contra.js:15:11 + 15| type O3 = {...{||}, -p: T}; + ^^^^^^^^^^^^^^^^ [1] + type_contra.js:19:8 + 19| (o3.p: T); // errors: unknown ~> T + ^ [2] + + +Error ------------------------------------------------------------------------------------------------ type_dict.js:4:37 + +Cannot determine a type for object type [1]. object type [2] is inexact and may have a property key that conflicts with +string [3] or a property value that conflicts with `T` [4]. Can you make object type [2] exact? + + type_dict.js:4:37 + 4| declare var o1: {...{[string]:T},...{p:U}}; // Error, can't spread because inexact may clash with T + ^^^^^ [2] + +References: + type_dict.js:4:17 + 4| declare var o1: {...{[string]:T},...{p:U}}; // Error, can't spread because inexact may clash with T + ^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + type_dict.js:4:23 + 4| declare var o1: {...{[string]:T},...{p:U}}; // Error, can't spread because inexact may clash with T + ^^^^^^ [3] + type_dict.js:4:31 + 4| declare var o1: {...{[string]:T},...{p:U}}; // Error, can't spread because inexact may clash with T + ^ [4] + + Error ------------------------------------------------------------------------------------------------ type_empty.js:5:2 Cannot cast `42` to `O` because number [1] is incompatible with empty [2]. @@ -484,6 +738,80 @@ References: ^^^^^ [4] +Error --------------------------------------------------------------------------------------------- type_instance.js:6:2 + +Cannot cast `o1` to object type because: + - number [1] is incompatible with undefined [2] in property `p`. + - in property `p`: + - Either undefined [2] is incompatible with string [3]. + - Or undefined [2] is incompatible with number [4]. + + type_instance.js:6:2 + 6| (o1: {p?:number}); // Error + ^^ + +References: + type_instance.js:2:23 + 2| class B extends A {p: number} + ^^^^^^ [1] + type_instance.js:6:10 + 6| (o1: {p?:number}); // Error + ^^^^^^ [2] + type_instance.js:1:14 + 1| class A {+p: string|number} + ^^^^^^ [3] + type_instance.js:1:21 + 1| class A {+p: string|number} + ^^^^^^ [4] + + +Error ----------------------------------------------------------------------------------------- type_intersection.js:9:2 + +Cannot cast `o1` to object type because: + - `T` [1] is incompatible with undefined [2] in property `p`. + - `U` [3] is incompatible with undefined [4] in property `q`. + + type_intersection.js:9:2 + 9| (o1: {p?:T,q?:U}); // ok + ^^ + +References: + type_intersection.js:7:18 + 7| type O1 = {...{p:T}&{q:U}}; + ^ [1] + type_intersection.js:9:10 + 9| (o1: {p?:T,q?:U}); // ok + ^ [2] + type_intersection.js:7:24 + 7| type O1 = {...{p:T}&{q:U}}; + ^ [3] + type_intersection.js:9:15 + 9| (o1: {p?:T,q?:U}); // ok + ^ [4] + + +Error ---------------------------------------------------------------------------------------- type_intersection.js:13:2 + +Cannot cast `o2` to object type because: + - `A` [1] is incompatible with undefined [2] in property `p`. + - `B` [3] is incompatible with undefined [2] in property `p`. + + type_intersection.js:13:2 + 13| (o2: {p?:B}); // ok + ^^ + +References: + type_intersection.js:11:18 + 11| type O2 = {...{p:A}&{p:B}}; + ^ [1] + type_intersection.js:13:10 + 13| (o2: {p?:B}); // ok + ^ [2] + type_intersection.js:11:24 + 11| type O2 = {...{p:A}&{p:B}}; + ^ [3] + + Error ---------------------------------------------------------------------------------------- type_intersection.js:15:2 Cannot cast object literal to `O2` because `A` [1] is incompatible with `B` [2] in property `p`. @@ -501,38 +829,174 @@ References: ^ [2] +Error -------------------------------------------------------------------------------- type_intersection_optional.js:5:2 + +Cannot cast `o1` to object type because: + - `T` [1] is incompatible with undefined [2] in property `p`. + - `U` [3] is incompatible with undefined [2] in property `p`. + + type_intersection_optional.js:5:2 + 5| (o1: {p?:T&U}); // ok + ^^ + +References: + type_intersection_optional.js:4:24 + 4| declare var o1: {...{p:T}&{p:U}}; + ^ [1] + type_intersection_optional.js:5:10 + 5| (o1: {p?:T&U}); // ok + ^^^ [2] + type_intersection_optional.js:4:30 + 4| declare var o1: {...{p:T}&{p:U}}; + ^ [3] + + +Error -------------------------------------------------------------------------------- type_intersection_optional.js:8:2 + +Cannot cast `o2` to object type because: + - `T` [1] is incompatible with undefined [2] in property `p`. + - `U` [3] is incompatible with undefined [2] in property `p`. + + type_intersection_optional.js:8:2 + 8| (o2: {p?:T&U}); // ok + ^^ + +References: + type_intersection_optional.js:7:25 + 7| declare var o2: {...{p?:T}&{p:U}}; + ^ [1] + type_intersection_optional.js:8:10 + 8| (o2: {p?:T&U}); // ok + ^^^ [2] + type_intersection_optional.js:7:31 + 7| declare var o2: {...{p?:T}&{p:U}}; + ^ [3] + + +Error ------------------------------------------------------------------------------- type_intersection_optional.js:11:2 + +Cannot cast `o3` to object type because: + - `T` [1] is incompatible with undefined [2] in property `p`. + - `U` [3] is incompatible with undefined [2] in property `p`. + + type_intersection_optional.js:11:2 + 11| (o3: {p?:T&U}); // ok + ^^ + +References: + type_intersection_optional.js:10:24 + 10| declare var o3: {...{p:T}&{p?:U}}; + ^ [1] + type_intersection_optional.js:11:10 + 11| (o3: {p?:T&U}); // ok + ^^^ [2] + type_intersection_optional.js:10:31 + 10| declare var o3: {...{p:T}&{p?:U}}; + ^ [3] + + +Error ------------------------------------------------------------------------------- type_intersection_optional.js:53:2 + +Cannot cast `o17` to object type because: + - `T` [1] is incompatible with undefined [2] in property `p`. + - `U` [3] is incompatible with undefined [4] in property `q`. + + type_intersection_optional.js:53:2 + 53| (o17: {p?:T,q?:U}); // ok + ^^^ + +References: + type_intersection_optional.js:52:25 + 52| declare var o17: {...{p:T}&{q:U}}; + ^ [1] + type_intersection_optional.js:53:11 + 53| (o17: {p?:T,q?:U}); // ok + ^ [2] + type_intersection_optional.js:52:31 + 52| declare var o17: {...{p:T}&{q:U}}; + ^ [3] + type_intersection_optional.js:53:16 + 53| (o17: {p?:T,q?:U}); // ok + ^ [4] + + +Error ------------------------------------------------------------------------------- type_intersection_optional.js:56:2 + +Cannot cast `o18` to object type because `U` [1] is incompatible with undefined [2] in property `q`. + + type_intersection_optional.js:56:2 + 56| (o18: {p?:T,q?:U}); // ok + ^^^ + +References: + type_intersection_optional.js:55:32 + 55| declare var o18: {...{p?:T}&{q:U}}; + ^ [1] + type_intersection_optional.js:56:16 + 56| (o18: {p?:T,q?:U}); // ok + ^ [2] + + +Error ------------------------------------------------------------------------------- type_intersection_optional.js:59:2 + +Cannot cast `o19` to object type because `T` [1] is incompatible with undefined [2] in property `p`. + + type_intersection_optional.js:59:2 + 59| (o19: {p?:T,q?:U}); // ok + ^^^ + +References: + type_intersection_optional.js:58:25 + 58| declare var o19: {...{p:T}&{q?:U}}; + ^ [1] + type_intersection_optional.js:59:11 + 59| (o19: {p?:T,q?:U}); // ok + ^ [2] + + +Error ------------------------------------------------------------------------------- type_intersection_optional.js:65:2 + +Cannot cast `o21` to object type because `U` [1] is incompatible with undefined [2] in property `q`. + + type_intersection_optional.js:65:2 + 65| (o21: {p:T,q?:U}); // ok + ^^^ + +References: + type_intersection_optional.js:64:33 + 64| declare var o21: {...{|p:T|}&{q:U}}; + ^ [1] + type_intersection_optional.js:65:15 + 65| (o21: {p:T,q?:U}); // ok + ^ [2] + + Error ------------------------------------------------------------------------------------------------ type_mixed.js:5:2 -Cannot cast `o1.someProp` to empty because mixed [1] is incompatible with empty [2]. +Cannot get `o1.someProp` because property `someProp` is missing in `O1` [1]. type_mixed.js:5:2 - 5| (o1.someProp: empty); // Error: mixed ~> empty + 5| (o1.someProp: empty); // Error someProp does not exist ^^^^^^^^^^^ References: - type_mixed.js:3:15 - 3| type O1 = {...mixed}; - ^^^^^ [1] - type_mixed.js:5:15 - 5| (o1.someProp: empty); // Error: mixed ~> empty - ^^^^^ [2] + type_mixed.js:4:17 + 4| declare var o1: O1; + ^^ [1] Error ------------------------------------------------------------------------------------------------ type_mixed.js:9:2 -Cannot cast `o2.someProp` to empty because mixed [1] is incompatible with empty [2]. +Cannot get `o2.someProp` because property `someProp` is missing in `O2` [1]. type_mixed.js:9:2 - 9| (o2.someProp: empty); // Error: mixed ~> empty + 9| (o2.someProp: empty); // Error someProp does not exist ^^^^^^^^^^^ References: - type_mixed.js:7:16 - 7| type O2 = {|...mixed|}; - ^^^^^ [1] - type_mixed.js:9:15 - 9| (o2.someProp: empty); // Error: mixed ~> empty - ^^^^^ [2] + type_mixed.js:8:17 + 8| declare var o2: O2; + ^^ [1] Error ----------------------------------------------------------------------------------------------- type_mixed.js:13:2 @@ -555,61 +1019,65 @@ References: Error ----------------------------------------------------------------------------------------------- type_mixed.js:14:2 Cannot cast `o3.b` to empty because: + - number [1] is incompatible with empty [2]. - undefined [1] is incompatible with empty [2]. - - mixed [3] is incompatible with empty [2]. - - number [4] is incompatible with empty [2]. type_mixed.js:14:2 14| (o3.b: empty); // Error: mixed ~> empty, number ~> empty, and undefined ~> empty ^^^^ References: - type_mixed.js:11:11 + type_mixed.js:11:42 11| type O3 = {...mixed, ...{|a: number, b?: number|}}; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + ^^^^^^ [1] type_mixed.js:14:8 14| (o3.b: empty); // Error: mixed ~> empty, number ~> empty, and undefined ~> empty ^^^^^ [2] - type_mixed.js:11:15 - 11| type O3 = {...mixed, ...{|a: number, b?: number|}}; - ^^^^^ [3] - type_mixed.js:11:42 - 11| type O3 = {...mixed, ...{|a: number, b?: number|}}; - ^^^^^^ [4] Error ----------------------------------------------------------------------------------------------- type_mixed.js:15:2 -Cannot cast `o3.c` to empty because mixed [1] is incompatible with empty [2]. +Cannot get `o3.c` because property `c` is missing in `O3` [1]. type_mixed.js:15:2 - 15| (o3.c: empty); // Error: mixed ~> empty + 15| (o3.c: empty); // Error c does not exist ^^^^ References: - type_mixed.js:11:15 - 11| type O3 = {...mixed, ...{|a: number, b?: number|}}; - ^^^^^ [1] - type_mixed.js:15:8 - 15| (o3.c: empty); // Error: mixed ~> empty - ^^^^^ [2] + type_mixed.js:12:17 + 12| declare var o3: O3; + ^^ [1] Error ----------------------------------------------------------------------------------------------- type_mixed.js:18:4 -Cannot cast `obj.someProp` to empty because `O` [1] is incompatible with empty [2]. +Cannot get `obj.someProp` because property `someProp` is missing in object type [1]. type_mixed.js:18:4 - 18| (obj.someProp: empty); // Error: mixed ~> empty + 18| (obj.someProp: empty); // Error someProp does not exist ^^^^^^^^^^^^ References: - type_mixed.js:17:25 + type_mixed.js:17:21 17| function fn(obj: {...O}) { - ^ [1] - type_mixed.js:18:18 - 18| (obj.someProp: empty); // Error: mixed ~> empty - ^^^^^ [2] + ^^^^^^ [1] + + +Error ---------------------------------------------------------------------------------------------- type_statics.js:5:2 + +Cannot cast `o1` to object type because number [1] is incompatible with undefined [2] in property `p`. + + type_statics.js:5:2 + 5| (o1: {p?:number}); // ok + ^^ + +References: + type_statics.js:1:20 + 1| class A {static p: number} + ^^^^^^ [1] + type_statics.js:5:10 + 5| (o1: {p?:number}); // ok + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------- type_string.js:1:15 @@ -620,6 +1088,97 @@ string literal `foo` [1] is not an object. ^^^^^ [1] +Error ------------------------------------------------------------------------------------------------ type_union.js:9:2 + +Cannot cast `o1` to union type because: + - `T` [1] is incompatible with undefined [2] in property `p`. + - `U` [3] is incompatible with undefined [4] in property `q`. + + type_union.js:9:2 + 9| (o1: {p?:T}|{q?:U}); // ok + ^^ + +References: + type_union.js:7:18 + 7| type O1 = {...{p:T}|{q:U}}; + ^ [1] + type_union.js:9:10 + 9| (o1: {p?:T}|{q?:U}); // ok + ^ [2] + type_union.js:7:24 + 7| type O1 = {...{p:T}|{q:U}}; + ^ [3] + type_union.js:9:17 + 9| (o1: {p?:T}|{q?:U}); // ok + ^ [4] + + +Error ---------------------------------------------------------------------------------------------- type_union.js:14:32 + +Cannot determine a type for object type [1]. object type [2] cannot be spread because the indexer number [3] may +overwrite properties with explicit keys in a way that Flow cannot track. Can you spread object type [2] first or remove +the indexer? + + type_union.js:14:32 + 14| declare var x1: {...Union1, ...Union2}; // Error, indexer on right + ^^^^^^ [2] + +References: + type_union.js:14:17 + 14| declare var x1: {...Union1, ...Union2}; // Error, indexer on right + ^^^^^^^^^^^^^^^^^^^^^^ [1] + type_union.js:12:39 + 12| type Union2 = {| bar: number |} | {| [number]: string |}; + ^^^^^^ [3] + + +Error ---------------------------------------------------------------------------------------------- type_union.js:17:32 + +Cannot determine a type for object type [1]. object type [2] is inexact, so it may contain `foo` with a type that +conflicts with `foo`'s definition in object type [3]. Can you make object type [2] exact? + + type_union.js:17:32 + 17| declare var x5: {...Union1, ...{}} // Error, spreading {} overwrites indexer + ^^ [2] + +References: + type_union.js:17:17 + 17| declare var x5: {...Union1, ...{}} // Error, spreading {} overwrites indexer + ^^^^^^^^^^^^^^^^^^ [1] + type_union.js:17:21 + 17| declare var x5: {...Union1, ...{}} // Error, spreading {} overwrites indexer + ^^^^^^ [3] + + +Error ----------------------------------------------------------------------------------------------- type_union.js:20:9 + +Cannot assign object literal to `y` because object literal [1] is incompatible with `U` [2]. + + type_union.js:20:9 + 20| var y = {}; // unsealed + ^^ [1] + +References: + type_union.js:5:16 + 5| declare var y: U; + ^ [2] + + +Error ---------------------------------------------------------------------------------------------- type_union.js:37:21 + +Cannot determine a type for object type [1]. `I1` [2] cannot be spread because interfaces do not track the own-ness of +their properties. Can you use an object type instead? + + type_union.js:37:21 + 37| declare var x4: {...Union5}; // Error, cannot spread interface + ^^^^^^ [2] + +References: + type_union.js:37:17 + 37| declare var x4: {...Union5}; // Error, cannot spread interface + ^^^^^^^^^^^ [1] + + Error ------------------------------------------------------------------------------------------------ type_void.js:12:2 Cannot cast `o1` to object type because inexact `O1` [1] is incompatible with exact object type [2]. @@ -778,21 +1337,21 @@ References: ^^ [1] -Error ------------------------------------------------------------------------------------------------ type_void.js:44:2 +Error ------------------------------------------------------------------------------------------------ type_void.js:43:2 -Cannot cast `o5` to object type because undefined [1] is incompatible with `T` [2] in property `p`. +Cannot cast `o5` to object type because `T` [1] is incompatible with undefined [2] in property `p`. - type_void.js:44:2 - 44| (o5: {p:T}); // error: o5.p is optional + type_void.js:43:2 + 43| (o5: {p?:T}); // ok ^^ References: type_void.js:41:27 41| type O5 = {...void, ...{p:T}}; ^ [1] - type_void.js:44:9 - 44| (o5: {p:T}); // error: o5.p is optional - ^ [2] + type_void.js:43:10 + 43| (o5: {p?:T}); // ok + ^ [2] Error ------------------------------------------------------------------------------------------------ type_void.js:47:2 @@ -812,21 +1371,21 @@ References: ^ [2] -Error ------------------------------------------------------------------------------------------------ type_void.js:53:2 +Error ------------------------------------------------------------------------------------------------ type_void.js:52:2 -Cannot cast `o6` to object type because undefined [1] is incompatible with `T` [2] in property `p`. +Cannot cast `o6` to object type because `T` [1] is incompatible with undefined [2] in property `p`. - type_void.js:53:2 - 53| (o6: {p:T}); // error: o6.p is optional + type_void.js:52:2 + 52| (o6: {p?:T}); // error, void doesn't overwrite p ^^ References: type_void.js:50:18 50| type O6 = {...{p:T}, ...void}; ^ [1] - type_void.js:53:9 - 53| (o6: {p:T}); // error: o6.p is optional - ^ [2] + type_void.js:52:10 + 52| (o6: {p?:T}); // error, void doesn't overwrite p + ^ [2] Error ------------------------------------------------------------------------------------------------ type_void.js:56:2 @@ -847,4 +1406,7 @@ References: -Found 55 errors +Found 92 errors + +Only showing the most relevant union/intersection branches. +To see all branches, re-run Flow with --show-all-branches diff --git a/tests/new_spread/no_interfaces.js b/tests/new_spread/no_interfaces.js new file mode 100644 index 00000000000..299f6a7a3c3 --- /dev/null +++ b/tests/new_spread/no_interfaces.js @@ -0,0 +1,35 @@ +//@flow + +interface A {} +interface B {} + +function spread( + x: A, + y: B, +): {...A, ...B} { + return (null: any); +} + +declare var a: A; +declare var b: B; + +spread(a, b); // Error, can't spread interface + +type X = {...A, ...B}; // Error, can't spread interface + +declare var x: X; +(x: any); + +type Y = {...A, foo: number}; // Error, can't spread interface +declare var y: Y; +(y: any); + +type Z = {foo: number, ...A}; // Error, can't spread interface +declare var z: Z; +(z: any); + +// Instances and classes can be spread: +class F {} +type G = {...F, ...Class}; // Ok +declare var g: G; +(g: any); diff --git a/tests/new_spread/type.js b/tests/new_spread/type.js index bd97f43e446..229b40d73f7 100644 --- a/tests/new_spread/type.js +++ b/tests/new_spread/type.js @@ -34,7 +34,7 @@ type O4 = {...{|p:T|}}; declare var o4: O4; (o4: {p:T}); // ok (o4: {|p:T|}); // error: not exact -({}: O4); // error: property `p` not found +(({}:{}): O4); // error: property `p` not found ({p:x}: O4); // ok ({p:y}: O4); // error: y ~> T ({p:x,q:y}: O4); // ok @@ -53,7 +53,7 @@ declare var o5: O5; type O6 = {...{p:T},...{|p:U|}}; declare var o6: O6; (o6: {p:U}); // ok -({}: O6); // error: property `p` not found +(({}:{}): O6); // error: property `p` not found ({p:x}: O6); // error: x ~> U ({p:y}: O6); // ok ({p:y,q:x}: O6); // ok @@ -64,9 +64,9 @@ type O7 = {|...{p:T},...{|p:U|}|}; ({p:y}: O7);// error: spread result is not ex // exact p + inexact p type O8 = {...{|p:T|},...{p:U}}; declare var o8: O8; -(o8: {p:T|U}); // ok +(o8: {p:U}); // ok (o8.p: T); // error: U ~> T -(o8.p: U); // error: T ~> U + // inexact p + exact q type O9 = {...{p:T},...{|q:U|}}; @@ -76,14 +76,14 @@ declare var o9: O9; (o9.q: U); // ok // exact p + inexact q -type O10 = {...{|p:T|},...{q:U}}; +type O10 = {...{|p:T|},...{q:U}}; // Error, p may exist in second object declare var o10: O10; -(o10: {p:mixed, q?: U}); // ok +(o10: {p:any, q: any}); // inexact p + inexact q -type O11 = {...{p:T},...{q:U}}; +type O11 = {...{p:T},...{q:U}}; // Error, p may exist in second object declare var o11: O11; -(o11: {p:mixed, q: mixed}); // ok +(o11: {p:any, q: any}); // Error // exact + exact type O12 = {...{|p:T|},...{|q:U|}}; diff --git a/tests/new_spread/type_contra.js b/tests/new_spread/type_contra.js index b6ffd4b6640..5bfaaf897d3 100644 --- a/tests/new_spread/type_contra.js +++ b/tests/new_spread/type_contra.js @@ -11,3 +11,9 @@ declare var o2: O2; (o2: {[string]:mixed}); // ok (o2: {[string]:T}); // error: unknown ~> T (o2.p: T); // errors: unknown ~> T + +type O3 = {...{||}, -p: T}; +declare var o3: O3; +(o3: {p:mixed}); // ok +(o3: {p:T}); // error: unknown ~> T +(o3.p: T); // errors: unknown ~> T diff --git a/tests/new_spread/type_dict.js b/tests/new_spread/type_dict.js index ccbfe0cc1c2..2f782ea3e14 100644 --- a/tests/new_spread/type_dict.js +++ b/tests/new_spread/type_dict.js @@ -1,38 +1,6 @@ declare class T {} declare class U {} -declare var o1: {...{[string]:T},...{p:U}}; -(o1: {p?:T|U,[string]:T}); // ok +declare var o1: {...{[string]:T},...{p:U}}; // Error, can't spread because inexact may clash with T +(o1: {p?:T|U,[string]:T}); -declare var o2: {...{p:T},...{[string]:U}}; -(o2: {p?:T|U,[string]:U}); // ok - -declare var o3: {...{[string]:T},...{[string]:U}}; -(o3: {[string]:T|U}); // ok - -declare var o4: {...{|[string]:T|},...{p:U}}; -(o4: {p?:T|U,[string]:T}); // ok - -declare var o5: {...{|p:T|},...{[string]:U}}; -(o5: {p:T|U,[string]:U}); // ok - -declare var o6: {...{|[string]:T|},...{[string]:U}}; -(o6: {[string]:T|U}); // ok - -declare var o7: {...{[string]:T},...{|p:U|}}; -(o7: {p:U,[string]:T}); // ok - -declare var o8: {...{p:T},...{|[string]:U|}}; -(o8: {p?:T|U,[string]:U}); // ok - -declare var o9: {...{[string]:T},...{|[string]:U|}}; -(o9: {[string]:T|U}); // ok - -declare var o10: {|...{|[string]:T|},...{|p:U|}|}; -(o10: {|p:U,[string]:T|}); // ok - -declare var o11: {|...{|p :T|},...{|[string]:U|}|}; -(o11: {|p:T|U,[string]:U|}); // ok - -declare var o12: {|...{|[string]:T|},...{|[string]:U|}|}; -(o12: {|[string]:T|U|}); // ok diff --git a/tests/new_spread/type_instance.js b/tests/new_spread/type_instance.js index dd8d4f37d1b..d15b8dca234 100644 --- a/tests/new_spread/type_instance.js +++ b/tests/new_spread/type_instance.js @@ -3,7 +3,7 @@ class B extends A {p: number} type O1 = {...B}; declare var o1: O1; -(o1: {p?:number}); // ok +(o1: {p?:number}); // Error declare class C {[string]:number} type O2 = {...C}; diff --git a/tests/new_spread/type_mixed.js b/tests/new_spread/type_mixed.js index 0513513cb3c..bfa7d4de7f9 100644 --- a/tests/new_spread/type_mixed.js +++ b/tests/new_spread/type_mixed.js @@ -2,18 +2,18 @@ type O1 = {...mixed}; declare var o1: O1; -(o1.someProp: empty); // Error: mixed ~> empty +(o1.someProp: empty); // Error someProp does not exist type O2 = {|...mixed|}; declare var o2: O2; -(o2.someProp: empty); // Error: mixed ~> empty +(o2.someProp: empty); // Error someProp does not exist type O3 = {...mixed, ...{|a: number, b?: number|}}; declare var o3: O3; (o3.a: empty); // Error: number ~> empty (o3.b: empty); // Error: mixed ~> empty, number ~> empty, and undefined ~> empty -(o3.c: empty); // Error: mixed ~> empty +(o3.c: empty); // Error c does not exist function fn(obj: {...O}) { - (obj.someProp: empty); // Error: mixed ~> empty + (obj.someProp: empty); // Error someProp does not exist } diff --git a/tests/new_spread/type_optional.js b/tests/new_spread/type_optional.js index d96da19500f..148d1966afa 100644 --- a/tests/new_spread/type_optional.js +++ b/tests/new_spread/type_optional.js @@ -1,19 +1,19 @@ declare class T {} declare class U {} -declare var a: {...{ p :T },...{ p :U }}; (a: { p?:T|U }); -declare var b: {...{ p?:T },...{ p :U }}; (b: { p?:T|U }); -declare var c: {...{ p :T },...{ p?:U }}; (c: { p?:T|U }); +declare var a: {...{ p :T },...{ p :U }}; (a: { p:U }); +declare var b: {...{ p?:T },...{ p :U }}; (b: { p:U }); +declare var c: {...{ p :T },...{ p?:U }}; (c: { p:T|U }); declare var d: {...{ p?:T },...{ p?:U }}; (d: { p?:T|U }); -declare var e: {...{|p :T|},...{ p :U }}; (e: { p :T|U }); -declare var f: {...{|p?:T|},...{ p :U }}; (f: { p?:T|U }); +declare var e: {...{|p :T|},...{ p :U }}; (e: { p :U }); +declare var f: {...{|p?:T|},...{ p :U }}; (f: { p:U }); declare var g: {...{|p :T|},...{ p?:U }}; (g: { p :T|U }); declare var h: {...{|p?:T|},...{ p?:U }}; (h: { p?:T|U }); declare var i: {...{ p :T },...{|p :U|}}; (i: { p : U }); declare var j: {...{ p?:T },...{|p :U|}}; (j: { p : U }); -declare var k: {...{ p :T },...{|p?:U|}}; (k: { p?:T|U }); +declare var k: {...{ p :T },...{|p?:U|}}; (k: { p:T|U }); declare var l: {...{ p?:T },...{|p?:U|}}; (l: { p?:T|U }); declare var m: {|...{|p :T|},...{|p :U|}|}; (m: {|p : U|}); diff --git a/tests/new_spread/type_union.js b/tests/new_spread/type_union.js index 21a8619e582..3bbd5fb92b2 100644 --- a/tests/new_spread/type_union.js +++ b/tests/new_spread/type_union.js @@ -7,3 +7,32 @@ declare var y: U; type O1 = {...{p:T}|{q:U}}; declare var o1: O1; (o1: {p?:T}|{q?:U}); // ok + +type Union1 = {| [string]: number |} | {| foo: number |} +type Union2 = {| bar: number |} | {| [number]: string |}; + +declare var x1: {...Union1, ...Union2}; // Error, indexer on right +(x1: {}); + +declare var x5: {...Union1, ...{}} // Error, spreading {} overwrites indexer +(x5: {}); + +var y = {}; // unsealed + +type UnsealedInUnion = Union1 | Union2 | typeof y; +declare var x2: {...UnsealedInUnion}; // Error, unsealed +(x2: {}); + +type Union3 = {| foo: number |} | {| bar: number |}; +type Union4 = {| baz: number |} | {| qux: number |}; +declare var x3: {| ...Union3, ...Union4 |}; +(x3: {| foo: number, baz: number|} // Should consider erroring instead of calculating combinatorial blowup + | {| foo: number, qux: number|} + | {| bar: number, baz: number|} + | {| bar: number, qux: number|} +); + +interface I1 {} +type Union5 = I1 | Union3 | Union4; +declare var x4: {...Union5}; // Error, cannot spread interface +(x4: {}); diff --git a/tests/new_spread/type_void.js b/tests/new_spread/type_void.js index 14b97aa45dd..aa364b7e7fb 100644 --- a/tests/new_spread/type_void.js +++ b/tests/new_spread/type_void.js @@ -49,8 +49,8 @@ declare var o5: O5; type O6 = {...{p:T}, ...void}; declare var o6: O6; -(o6: {p?:T}); // ok -(o6: {p:T}); // error: o6.p is optional +(o6: {p?:T}); // error, void doesn't overwrite p +(o6: {p:T}); // ok ({}: O6); // ok ({p:x}: O6); // ok ({p:y}: O6); // error: y ~> T diff --git a/tests/node_modules_with_bad_package_format/.flowconfig b/tests/node_modules_with_bad_package_format/.flowconfig new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/node_modules_with_bad_package_format/node_modules/bar/index.js b/tests/node_modules_with_bad_package_format/node_modules/bar/index.js new file mode 100644 index 00000000000..6fe209d6bc4 --- /dev/null +++ b/tests/node_modules_with_bad_package_format/node_modules/bar/index.js @@ -0,0 +1,3 @@ +// @flow + +module.exports = {}; diff --git a/tests/node_modules_with_bad_package_format/node_modules/bar/package.json b/tests/node_modules_with_bad_package_format/node_modules/bar/package.json new file mode 100644 index 00000000000..20b4bb04cc9 --- /dev/null +++ b/tests/node_modules_with_bad_package_format/node_modules/bar/package.json @@ -0,0 +1 @@ +["not an object"] diff --git a/tests/node_modules_with_bad_package_format/node_modules/foo/package.json b/tests/node_modules_with_bad_package_format/node_modules/foo/package.json new file mode 100644 index 00000000000..20b4bb04cc9 --- /dev/null +++ b/tests/node_modules_with_bad_package_format/node_modules/foo/package.json @@ -0,0 +1 @@ +["not an object"] diff --git a/tests/node_modules_with_bad_package_format/node_modules_with_bad_package_format.exp b/tests/node_modules_with_bad_package_format/node_modules_with_bad_package_format.exp new file mode 100644 index 00000000000..52578b75643 --- /dev/null +++ b/tests/node_modules_with_bad_package_format/node_modules_with_bad_package_format.exp @@ -0,0 +1,26 @@ +Error -------------------------------------------------------------------------------- node_modules/bar/package.json:1:1 + +Expected an object literal + + 1| ["not an object"] + ^^^^^^^^^^^^^^^^^ + + +Error -------------------------------------------------------------------------------- node_modules/foo/package.json:1:1 + +Expected an object literal + + 1| ["not an object"] + ^^^^^^^^^^^^^^^^^ + + +Error ------------------------------------------------------------------------------------------------------ test.js:8:9 + +Cannot resolve module `foo`. + + 8| require('foo'); + ^^^^^ + + + +Found 3 errors diff --git a/tests/node_modules_with_bad_package_format/test.js b/tests/node_modules_with_bad_package_format/test.js new file mode 100644 index 00000000000..4f384232a06 --- /dev/null +++ b/tests/node_modules_with_bad_package_format/test.js @@ -0,0 +1,8 @@ +// @flow + +// resolves to node_modules/bar/index.js even though bar/package.json is invalid +require('bar'); + +// can't be resolved because node_modules/foo/package.json is invalid and there +// is no default "main" file (index.js) +require('foo'); diff --git a/tests/node_modules_without_json/node_modules_without_json.exp b/tests/node_modules_without_json/node_modules_without_json.exp index a40891c958a..b3f39ec1116 100644 --- a/tests/node_modules_without_json/node_modules_without_json.exp +++ b/tests/node_modules_without_json/node_modules_without_json.exp @@ -1,6 +1,6 @@ Error -------------------------------------------------------------------------------- node_modules/bar/package.json:1:1 -Unexpected identifier +Unexpected identifier, expected a valid JSON value 1| This invalid JSON file should still be picked up even though .json isn't in ^^^^ diff --git a/tests/node_tests/dns/dns.js b/tests/node_tests/dns/dns.js new file mode 100644 index 00000000000..4428d973c04 --- /dev/null +++ b/tests/node_tests/dns/dns.js @@ -0,0 +1,33 @@ +var dns = require("dns"); + +/* lookup */ + +dns.lookup("test.com", (err, address, family) => { + (err: ?Error); + (address: string); + (family: number); +}); + +dns.lookup("test.com", 6, (err, address, family) => { + (err: ?Error); + (address: string); + (family: number); +}); + +dns.lookup("test.com", { family: 6 }, (err, address, family) => { + (err: ?Error); + (address: string); + (family: number); +}); + +dns.lookup(); // error + +dns.lookup("test.com"); // error + +dns.lookup("test.com", 4); // error + +dns.lookup("test.com", { family: 6 }); // error + +dns.lookup("test.com", null, (err, address, family) => {}); // error + +dns.lookup((err, address, family) => {}); // error diff --git a/tests/node_tests/events/events.js b/tests/node_tests/events/events.js new file mode 100644 index 00000000000..1b1148c2dc9 --- /dev/null +++ b/tests/node_tests/events/events.js @@ -0,0 +1,67 @@ +/* @flow */ + +const EventEmitter = require('events'); +const emitter = new EventEmitter(); +const noop = function() {} + +emitter.addListener('foo', noop); // ok +emitter.addListener('bar', noop).addListener('baz', noop); // ok: supports chaining +emitter.addListener(); // err: both args are required +emitter.addListener(123, {}); // err: `event` and `handler `type mismatch + +emitter.emit('foo', 'bar', {}, [], noop); // ok: emits `foo` with any args +emitter.emit('foo'); // ok: emits `foo` with no event data +emitter.emit({}); // err: `event` must be a string + +emitter.eventNames().pop(); // ok: returns string[] +emitter.eventNames('foo') // err: does not process args + +emitter.listeners('foo').pop()(); // ok: returns Function[] +emitter.listeners(); // err: requires `event` + +emitter.listenerCount('foo').toFixed(); // ok: returns a number +emitter.listenerCount(); // err: requires `event` + +emitter.on('foo', noop); // ok +emitter.on('bar', noop).on('baz', noop); // ok: chaining +emitter.on(123, []); // err: `event` and `handler `type mismatch + +emitter.once('foo', noop); // ok +emitter.once('bar', noop).on('baz', noop); // ok: chaining +emitter.once(123, []); // err: `event` and `handler `type mismatch + +emitter.prependListener('foo', noop); // ok +emitter.prependListener('bar', noop).prependListener('baz', noop); // ok: supports chaining +emitter.prependListener(); // err: both args are required +emitter.prependListener(123, {}); // err: `event` and `handler `type mismatch + +emitter.prependOnceListener('foo', noop); // ok +emitter.prependOnceListener('bar', noop).prependOnceListener('baz', noop); // ok: supports chaining +emitter.prependOnceListener(); // err: both args are required +emitter.prependOnceListener(123, {}); // err: `event` and `handler `type mismatch + +emitter.removeAllListeners('foo'); // ok +emitter.removeAllListeners(); // ok +emitter.removeAllListeners().removeAllListeners(); // ok: supports chaining +emitter.removeAllListeners(123); // err: `event` must be a string + +emitter.removeListener('foo', noop); // ok +emitter.removeListener('foo', noop).removeListener('foo', noop); // ok: supports chaining +emitter.removeListener(); // err: both args are required +emitter.removeListener(123, {}); // `event` and `handler `type mismatch + +emitter.off('foo', noop); // ok +emitter.off('foo', noop).off('foo', noop); // ok: supports chaining +emitter.off(); // err: both args are required +emitter.off(123, {}); // `event` and `handler `type mismatch + +emitter.setMaxListeners(5); // ok +emitter.setMaxListeners('foo'); // err: numeric arg is required + +emitter.getMaxListeners().toFixed(); // ok +emitter.getMaxListeners('foo'); // err: does not process args + +emitter.rawListeners('foo').pop()(); // ok: returns Function[] +emitter.rawListeners(); // err: requires `event` + +EventEmitter.defaultMaxListeners.toFixed() // ok diff --git a/tests/node_tests/http/get.js b/tests/node_tests/http/get.js new file mode 100644 index 00000000000..6cf36ca0182 --- /dev/null +++ b/tests/node_tests/http/get.js @@ -0,0 +1,16 @@ +// @flow + +const http = require('http'); +const url = 'http://nodejs.org/dist/index.json'; + +http.get({}); +http.get({host: 'localhost'}); +http.get(url); +http.get(url, () => {}); +http.get(url, {}, () => {}); +http.get(url, {host: 'localhost'}, () => {}); + +http.get(-1); // error +http.get({port: 'expects number'}); // error +http.get(url, {}, -1); // error +http.get(url, {port: 'expects number'}, () => {}); // error diff --git a/tests/node_tests/http/request.js b/tests/node_tests/http/request.js new file mode 100644 index 00000000000..ea1db939840 --- /dev/null +++ b/tests/node_tests/http/request.js @@ -0,0 +1,16 @@ +// @flow + +const http = require('http'); +const url = 'http://nodejs.org/dist/index.json'; + +http.request({}); +http.request({host: 'localhost'}); +http.request(url); +http.request(url, () => {}); +http.request(url, {}, () => {}); +http.request(url, {host: 'localhost'}, () => {}); + +http.request(-1); // error +http.request({port: 'expects number'}); // error +http.request(url, {}, -1); // error +http.request(url, {port: 'expects number'}, () => {}); // error diff --git a/tests/node_tests/https/get.js b/tests/node_tests/https/get.js new file mode 100644 index 00000000000..f220968a972 --- /dev/null +++ b/tests/node_tests/https/get.js @@ -0,0 +1,16 @@ +// @flow + +const https = require('https'); +const url = 'http://nodejs.org/dist/index.json'; + +https.get({}); +https.get({host: 'localhost'}); +https.get(url); +https.get(url, () => {}); +https.get(url, {}, () => {}); +https.get(url, {host: 'localhost'}, () => {}); + +https.get(-1); // error +https.get({port: 'expects number'}); // error +https.get(url, {}, -1); // error +https.get(url, {port: 'expects number'}, () => {}); // error diff --git a/tests/node_tests/https/request.js b/tests/node_tests/https/request.js new file mode 100644 index 00000000000..bccec6294c5 --- /dev/null +++ b/tests/node_tests/https/request.js @@ -0,0 +1,16 @@ +// @flow + +const https = require('https'); +const url = 'http://nodejs.org/dist/index.json'; + +https.request({}); +https.request({host: 'localhost'}); +https.request(url); +https.request(url, () => {}); +https.request(url, {}, () => {}); +https.request(url, {host: 'localhost'}, () => {}); + +https.request(-1); // error +https.request({port: 'expects number'}); // error +https.request(url, {}, -1); // error +https.request(url, {port: 'expects number'}, () => {}); // error diff --git a/tests/node_tests/node_tests.exp b/tests/node_tests/node_tests.exp index f9666da084d..175333d2f54 100644 --- a/tests/node_tests/node_tests.exp +++ b/tests/node_tests/node_tests.exp @@ -10,8 +10,8 @@ References: buffer/buffer.js:57:22 57| buffer = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72], (a:number) => a + 1, {}); // error ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /node.js:111:22 - 111| static from(value: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /node.js:119:22 + 119| static from(value: ArrayBuffer, byteOffset?: number, length?: number): Buffer; ^^^^^^^^^^^ [2] @@ -27,8 +27,8 @@ References: child_process/execSync.js:8:27 8| (execSync('ls', {timeout: '250'})); // error, no signatures match ^^^^^ [1] - /node.js:151:13 - 151| timeout?: number; + /node.js:167:13 + 167| timeout?: number, ^^^^^^ [2] @@ -44,32 +44,40 @@ Cannot cast `hmac.read()` to number because: ^^^^^^^^^^^ References: - /node.js:1606:24 - 1606| read(size?: number): ?(string | Buffer); + /node.js:1836:24 + 1836| read(size?: number): ?(string | Buffer); ^^^^^^^^^^^^^^^^^^ [1] crypto/crypto.js:12:21 12| (hmac.read(): number); // 4 errors: null, void, string, Buffer ^^^^^^ [2] - /node.js:1606:26 - 1606| read(size?: number): ?(string | Buffer); + /node.js:1836:26 + 1836| read(size?: number): ?(string | Buffer); ^^^^^^ [3] - /node.js:1606:35 - 1606| read(size?: number): ?(string | Buffer); + /node.js:1836:35 + 1836| read(size?: number): ?(string | Buffer); ^^^^^^ [4] -Error ------------------------------------------------------------------------------------------- crypto/crypto.js:16:16 +Error -------------------------------------------------------------------------------------------- crypto/crypto.js:16:5 -Cannot call `hmac.write` with `123` bound to `chunk` because number [1] is incompatible with string [2]. +Cannot call `hmac.write` because: + - Either number [1] is incompatible with string [2]. + - Or number [1] is incompatible with string [3]. - crypto/crypto.js:16:16 + crypto/crypto.js:16:5 16| hmac.write(123); // 2 errors: not a string or a Buffer - ^^^ [1] + ^^^^^^^^^^^^^^^ References: - /node.js:1657:21 - 1657| chunk: Buffer | string, - ^^^^^^ [2] + crypto/crypto.js:16:16 + 16| hmac.write(123); // 2 errors: not a string or a Buffer + ^^^ [1] + /node.js:1880:16 + 1880| write(chunk: string | Buffer | Uint8Array, callback?: (error?: Error) => void): boolean; + ^^^^^^ [2] + /node.js:1881:16 + 1881| write(chunk: string | Buffer | Uint8Array, encoding?: string, callback?: (error?: Error) => void): boolean; + ^^^^^^ [3] Error ------------------------------------------------------------------------------------------- crypto/crypto.js:26:24 @@ -81,10 +89,10 @@ Cannot call `hmac.update` with `'bogus'` bound to `input_encoding` because strin ^^^^^^^ [1] References: - /node.js:473:50 + /node.js:508:50 v---------------------------- - 473| update(data: string | Buffer, input_encoding?: 'utf8' | 'ascii' | 'latin1' | - 474| 'binary'): crypto$Hmac; + 508| update(data: string | Buffer, input_encoding?: 'utf8' | 'ascii' | 'latin1' | + 509| 'binary'): crypto$Hmac; -------^ [2] @@ -97,10 +105,10 @@ Cannot call `hmac.update` with `'bogus'` bound to `input_encoding` because strin ^^^^^^^ [1] References: - /node.js:473:50 + /node.js:508:50 v---------------------------- - 473| update(data: string | Buffer, input_encoding?: 'utf8' | 'ascii' | 'latin1' | - 474| 'binary'): crypto$Hmac; + 508| update(data: string | Buffer, input_encoding?: 'utf8' | 'ascii' | 'latin1' | + 509| 'binary'): crypto$Hmac; -------^ [2] @@ -113,8 +121,8 @@ Cannot cast `hmac.digest(...)` to undefined because string [1] is incompatible w ^^^^^^^^^^^^^^^^^^ References: - /node.js:470:61 - 470| digest(encoding: 'hex' | 'latin1' | 'binary' | 'base64'): string; + /node.js:505:61 + 505| digest(encoding: 'hex' | 'latin1' | 'binary' | 'base64'): string; ^^^^^^ [1] crypto/crypto.js:36:26 36| (hmac.digest('hex'): void); // 1 error @@ -130,49 +138,465 @@ Cannot cast `hmac.digest()` to undefined because `Buffer` [1] is incompatible wi ^^^^^^^^^^^^^ References: - /node.js:472:27 - 472| digest(encoding: void): Buffer; + /node.js:507:27 + 507| digest(encoding: void): Buffer; ^^^^^^ [1] crypto/crypto.js:37:21 37| (hmac.digest(): void); // 1 error ^^^^ [2] +Error -------------------------------------------------------------------------------------------------- dns/dns.js:23:1 + +Cannot call `dns.lookup` because: + - Either function type [1] requires another argument from call of method `lookup` [2]. + - Or function type [3] requires another argument from call of method `lookup` [2]. + + dns/dns.js:23:1 + 23| dns.lookup(); // error + ^^^^^^^^^^^^ [2] + +References: + /node.js:744:26 + v + 744| declare function lookup( + 745| domain: string, + 746| options: number | LookupOptions, + 747| callback: (err: ?Error, address: string, family: number) => void + 748| ): void; + ------^ [1] + /node.js:749:26 + v + 749| declare function lookup( + 750| domain: string, + 751| callback: (err: ?Error, address: string, family: number) => void + 752| ): void; + ------^ [3] + + +Error -------------------------------------------------------------------------------------------------- dns/dns.js:25:1 + +Cannot call `dns.lookup` because: + - Either undefined [1] is incompatible with number [2]. + - Or undefined [1] is incompatible with `LookupOptions` [3]. + - Or function type [4] requires another argument from call of method `lookup` [1]. + + dns/dns.js:25:1 + 25| dns.lookup("test.com"); // error + ^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + /node.js:746:14 + 746| options: number | LookupOptions, + ^^^^^^ [2] + /node.js:746:23 + 746| options: number | LookupOptions, + ^^^^^^^^^^^^^ [3] + /node.js:749:26 + v + 749| declare function lookup( + 750| domain: string, + 751| callback: (err: ?Error, address: string, family: number) => void + 752| ): void; + ------^ [4] + + +Error -------------------------------------------------------------------------------------------------- dns/dns.js:27:1 + +Cannot call `dns.lookup` because number [1] is incompatible with function type [2]. + + dns/dns.js:27:1 + 27| dns.lookup("test.com", 4); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + dns/dns.js:27:24 + 27| dns.lookup("test.com", 4); // error + ^ [1] + /node.js:751:15 + 751| callback: (err: ?Error, address: string, family: number) => void + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------------- dns/dns.js:29:1 + +Cannot call `dns.lookup` because a call signature declaring the expected parameter / return type is missing in object +literal [1] but exists in function type [2]. + + dns/dns.js:29:1 + 29| dns.lookup("test.com", { family: 6 }); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + dns/dns.js:29:24 + 29| dns.lookup("test.com", { family: 6 }); // error + ^^^^^^^^^^^^^ [1] + /node.js:751:15 + 751| callback: (err: ?Error, address: string, family: number) => void + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------------- dns/dns.js:31:1 + +Cannot call `dns.lookup` because no more than 2 arguments are expected by function type [1]. + + dns/dns.js:31:1 + 31| dns.lookup("test.com", null, (err, address, family) => {}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:749:26 + v + 749| declare function lookup( + 750| domain: string, + 751| callback: (err: ?Error, address: string, family: number) => void + 752| ): void; + ------^ [1] + + +Error -------------------------------------------------------------------------------------------------- dns/dns.js:33:1 + +Cannot call `dns.lookup` because: + - Either function [1] is incompatible with string [2]. + - Or function [1] is incompatible with string [3]. + + dns/dns.js:33:1 + 33| dns.lookup((err, address, family) => {}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + dns/dns.js:33:12 + 33| dns.lookup((err, address, family) => {}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /node.js:745:13 + 745| domain: string, + ^^^^^^ [2] + /node.js:750:13 + 750| domain: string, + ^^^^^^ [3] + + +Error --------------------------------------------------------------------------------------------- events/events.js:9:1 + +Cannot call `emitter.addListener` because function [1] requires another argument. + + events/events.js:9:1 + 9| emitter.addListener(); // err: both args are required + ^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:810:3 + 810| addListener(event: string, listener: Function): this; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- events/events.js:10:21 + +Cannot call `emitter.addListener` with `123` bound to `event` because number [1] is incompatible with string [2]. + + events/events.js:10:21 + 10| emitter.addListener(123, {}); // err: `event` and `handler `type mismatch + ^^^ [1] + +References: + /node.js:810:22 + 810| addListener(event: string, listener: Function): this; + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- events/events.js:14:14 + +Cannot call `emitter.emit` with object literal bound to `event` because object literal [1] is incompatible with +string [2]. + + events/events.js:14:14 + 14| emitter.emit({}); // err: `event` must be a string + ^^ [1] + +References: + /node.js:811:15 + 811| emit(event: string, ...args:Array): boolean; + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- events/events.js:17:1 + +Cannot call `emitter.eventNames` because no arguments are expected by function type [1]. + + events/events.js:17:1 + 17| emitter.eventNames('foo') // err: does not process args + ^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:812:3 + 812| eventNames(): Array; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error -------------------------------------------------------------------------------------------- events/events.js:20:1 + +Cannot call `emitter.listeners` because function [1] requires another argument. + + events/events.js:20:1 + 20| emitter.listeners(); // err: requires `event` + ^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:813:3 + 813| listeners(event: string): Array; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error -------------------------------------------------------------------------------------------- events/events.js:23:1 + +Cannot call `emitter.listenerCount` because function [1] requires another argument. + + events/events.js:23:1 + 23| emitter.listenerCount(); // err: requires `event` + ^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:814:3 + 814| listenerCount(event: string): number; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- events/events.js:27:12 + +Cannot call `emitter.on` with `123` bound to `event` because number [1] is incompatible with string [2]. + + events/events.js:27:12 + 27| emitter.on(123, []); // err: `event` and `handler `type mismatch + ^^^ [1] + +References: + /node.js:815:13 + 815| on(event: string, listener: Function): this; + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- events/events.js:31:14 + +Cannot call `emitter.once` with `123` bound to `event` because number [1] is incompatible with string [2]. + + events/events.js:31:14 + 31| emitter.once(123, []); // err: `event` and `handler `type mismatch + ^^^ [1] + +References: + /node.js:816:15 + 816| once(event: string, listener: Function): this; + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- events/events.js:35:1 + +Cannot call `emitter.prependListener` because function [1] requires another argument. + + events/events.js:35:1 + 35| emitter.prependListener(); // err: both args are required + ^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:817:3 + 817| prependListener(event: string, listener: Function): this; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- events/events.js:36:25 + +Cannot call `emitter.prependListener` with `123` bound to `event` because number [1] is incompatible with string [2]. + + events/events.js:36:25 + 36| emitter.prependListener(123, {}); // err: `event` and `handler `type mismatch + ^^^ [1] + +References: + /node.js:817:26 + 817| prependListener(event: string, listener: Function): this; + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- events/events.js:40:1 + +Cannot call `emitter.prependOnceListener` because function [1] requires another argument. + + events/events.js:40:1 + 40| emitter.prependOnceListener(); // err: both args are required + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:818:3 + 818| prependOnceListener(event: string, listener: Function): this; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- events/events.js:41:29 + +Cannot call `emitter.prependOnceListener` with `123` bound to `event` because number [1] is incompatible with +string [2]. + + events/events.js:41:29 + 41| emitter.prependOnceListener(123, {}); // err: `event` and `handler `type mismatch + ^^^ [1] + +References: + /node.js:818:30 + 818| prependOnceListener(event: string, listener: Function): this; + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- events/events.js:46:28 + +Cannot call `emitter.removeAllListeners` with `123` bound to `event` because number [1] is incompatible with string [2]. + + events/events.js:46:28 + 46| emitter.removeAllListeners(123); // err: `event` must be a string + ^^^ [1] + +References: + /node.js:819:30 + 819| removeAllListeners(event?: string): this; + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- events/events.js:50:1 + +Cannot call `emitter.removeListener` because function [1] requires another argument. + + events/events.js:50:1 + 50| emitter.removeListener(); // err: both args are required + ^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:820:3 + 820| removeListener(event: string, listener: Function): this; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- events/events.js:51:24 + +Cannot call `emitter.removeListener` with `123` bound to `event` because number [1] is incompatible with string [2]. + + events/events.js:51:24 + 51| emitter.removeListener(123, {}); // `event` and `handler `type mismatch + ^^^ [1] + +References: + /node.js:820:25 + 820| removeListener(event: string, listener: Function): this; + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- events/events.js:55:1 + +Cannot call `emitter.off` because function [1] requires another argument. + + events/events.js:55:1 + 55| emitter.off(); // err: both args are required + ^^^^^^^^^^^^^ + +References: + /node.js:821:3 + 821| off(event: string, listener: Function): this; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- events/events.js:56:13 + +Cannot call `emitter.off` with `123` bound to `event` because number [1] is incompatible with string [2]. + + events/events.js:56:13 + 56| emitter.off(123, {}); // `event` and `handler `type mismatch + ^^^ [1] + +References: + /node.js:821:14 + 821| off(event: string, listener: Function): this; + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- events/events.js:59:25 + +Cannot call `emitter.setMaxListeners` with `'foo'` bound to `n` because string [1] is incompatible with number [2]. + + events/events.js:59:25 + 59| emitter.setMaxListeners('foo'); // err: numeric arg is required + ^^^^^ [1] + +References: + /node.js:822:22 + 822| setMaxListeners(n: number): this; + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- events/events.js:62:1 + +Cannot call `emitter.getMaxListeners` because no arguments are expected by function type [1]. + + events/events.js:62:1 + 62| emitter.getMaxListeners('foo'); // err: does not process args + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:823:3 + 823| getMaxListeners(): number; + ^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error -------------------------------------------------------------------------------------------- events/events.js:65:1 + +Cannot call `emitter.rawListeners` because function [1] requires another argument. + + events/events.js:65:1 + 65| emitter.rawListeners(); // err: requires `event` + ^^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:824:3 + 824| rawListeners(event: string): Array; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + Error ---------------------------------------------------------------------------------------------------- fs/fs.js:13:1 -Could not decide which case to select. Since case 3 [1] may work but if it doesn't case 4 [2] looks promising too. To +Could not decide which case to select, since case 3 [1] may work but if it doesn't case 4 [2] looks promising too. To fix add a type annotation to `_` [3] or to `data` [4]. fs/fs.js:13:1 - v----------------------------------------------------------- - 13| fs.readFile("file.exp", { encoding: "blah" }, (_, data) => { - 14| (data : string); - 15| }); - -^ - -References: - /node.js:974:28 - v - 974| declare function readFile( - 975| path: string | Buffer | URL | number, - 976| options: { encoding: string; flag?: string }, - 977| callback: (err: ?ErrnoError, data: string) => void - 978| ): void; - ------^ [1] - /node.js:979:28 - v - 979| declare function readFile( - 980| path: string | Buffer | URL | number, - 981| options: { flag?: string }, - 982| callback: (err: ?ErrnoError, data: Buffer) => void - 983| ): void; - ------^ [2] + v----------------------------------------------------------- + 13| fs.readFile("file.exp", { encoding: "blah" }, (_, data) => { + 14| (data : string); + 15| }); + -^ + +References: + /node.js:1074:3 + v------------------------- + 1074| declare function readFile( + 1075| path: string | Buffer | URL | number, + 1076| options: { + 1077| encoding: string, + 1078| flag?: string, + 1079| ... + 1080| }, + 1081| callback: (err: ?ErrnoError, data: string) => void + 1082| ): void; + -------^ [1] + /node.js:1083:3 + v------------------------- + 1083| declare function readFile( + 1084| path: string | Buffer | URL | number, + 1085| options: { flag?: string, ... }, + 1086| callback: (err: ?ErrnoError, data: Buffer) => void + 1087| ): void; + -------^ [2] fs/fs.js:13:48 - 13| fs.readFile("file.exp", { encoding: "blah" }, (_, data) => { - ^ [3] + 13| fs.readFile("file.exp", { encoding: "blah" }, (_, data) => { + ^ [3] fs/fs.js:13:51 - 13| fs.readFile("file.exp", { encoding: "blah" }, (_, data) => { - ^^^^ [4] + 13| fs.readFile("file.exp", { encoding: "blah" }, (_, data) => { + ^^^^ [4] Error ---------------------------------------------------------------------------------------------------- fs/fs.js:28:2 @@ -180,16 +604,16 @@ Error -------------------------------------------------------------------------- Cannot cast `fs.readFileSync(...)` to string because `Buffer` [1] is incompatible with string [2]. fs/fs.js:28:2 - 28| (fs.readFileSync("file.exp") : string); // error - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 28| (fs.readFileSync("file.exp") : string); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /node.js:986:6 - 986| ): Buffer; - ^^^^^^ [1] + /node.js:1090:6 + 1090| ): Buffer; + ^^^^^^ [1] fs/fs.js:28:32 - 28| (fs.readFileSync("file.exp") : string); // error - ^^^^^^ [2] + 28| (fs.readFileSync("file.exp") : string); // error + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------------- fs/fs.js:31:2 @@ -197,16 +621,16 @@ Error -------------------------------------------------------------------------- Cannot cast `fs.readFileSync(...)` to `Buffer` because string [1] is incompatible with `Buffer` [2]. fs/fs.js:31:2 - 31| (fs.readFileSync("file.exp", "blah") : Buffer); // error - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 31| (fs.readFileSync("file.exp", "blah") : Buffer); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /node.js:990:6 - 990| ): string; - ^^^^^^ [1] + /node.js:1094:6 + 1094| ): string; + ^^^^^^ [1] fs/fs.js:31:40 - 31| (fs.readFileSync("file.exp", "blah") : Buffer); // error - ^^^^^^ [2] + 31| (fs.readFileSync("file.exp", "blah") : Buffer); // error + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------------- fs/fs.js:34:2 @@ -214,16 +638,16 @@ Error -------------------------------------------------------------------------- Cannot cast `fs.readFileSync(...)` to `Buffer` because string [1] is incompatible with `Buffer` [2]. fs/fs.js:34:2 - 34| (fs.readFileSync("file.exp", { encoding: "blah" }) : Buffer); // error - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 34| (fs.readFileSync("file.exp", { encoding: "blah" }) : Buffer); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /node.js:991:118 - 991| declare function readFileSync(path: string | Buffer | URL | number, options: { encoding: string, flag?: string }): string; - ^^^^^^ [1] + /node.js:1099:7 + 1099| }): string; + ^^^^^^ [1] fs/fs.js:34:54 - 34| (fs.readFileSync("file.exp", { encoding: "blah" }) : Buffer); // error - ^^^^^^ [2] + 34| (fs.readFileSync("file.exp", { encoding: "blah" }) : Buffer); // error + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------------- fs/fs.js:37:2 @@ -231,52 +655,152 @@ Error -------------------------------------------------------------------------- Cannot cast `fs.readFileSync(...)` to string because `Buffer` [1] is incompatible with string [2]. fs/fs.js:37:2 - 37| (fs.readFileSync("file.exp", {}) : string); // error - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 37| (fs.readFileSync("file.exp", {}) : string); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /node.js:992:117 - 992| declare function readFileSync(path: string | Buffer | URL | number, options: { encoding?: void, flag?: string }): Buffer; - ^^^^^^ [1] + /node.js:1104:7 + 1104| }): Buffer; + ^^^^^^ [1] fs/fs.js:37:36 - 37| (fs.readFileSync("file.exp", {}) : string); // error - ^^^^^^ [2] + 37| (fs.readFileSync("file.exp", {}) : string); // error + ^^^^^^ [2] -Error ---------------------------------------------------------------------------------------------- http/server.js:67:1 +Error ------------------------------------------------------------------------------------------------- http/get.js:13:1 -Cannot call `server.listen` because a callable signature is missing in object literal [1] but exists in function -type [2]. +Cannot call `http.get` because number [1] is incompatible with string [2]. - http/server.js:67:1 - 67| server.listen(() => {}, {}); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + http/get.js:13:1 + 13| http.get(-1); // error + ^^^^^^^^^^^^ References: - http/server.js:67:25 - 67| server.listen(() => {}, {}); - ^^ [1] - /node.js:1272:39 - 1272| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] + http/get.js:13:10 + 13| http.get(-1); // error + ^^ [1] + /node.js:1496:10 + 1496| url: string, + ^^^^^^ [2] -Error ---------------------------------------------------------------------------------------------- http/server.js:68:1 +Error ------------------------------------------------------------------------------------------------- http/get.js:14:1 -Cannot call `server.listen` because a callable signature is missing in object literal [1] but exists in function -type [2]. +Cannot call `http.get` because string [1] is incompatible with number [2] in property `port`. - http/server.js:68:1 - 68| server.listen(function() {}, {}); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + http/get.js:14:1 + 14| http.get({port: 'expects number'}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + http/get.js:14:17 + 14| http.get({port: 'expects number'}); // error + ^^^^^^^^^^^^^^^^ [1] + /node.js:1456:10 + 1456| port?: number, + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------- http/get.js:15:1 + +Cannot call `http.get` because number [1] is incompatible with function type [2]. + + http/get.js:15:1 + 15| http.get(url, {}, -1); // error + ^^^^^^^^^^^^^^^^^^^^^ + +References: + http/get.js:15:19 + 15| http.get(url, {}, -1); // error + ^^ [1] + /node.js:1498:16 + 1498| callback?: (response: IncomingMessage) => void + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------- http/get.js:16:1 + +Cannot call `http.get` because string [1] is incompatible with number [2] in property `port`. + + http/get.js:16:1 + 16| http.get(url, {port: 'expects number'}, () => {}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - http/server.js:68:30 - 68| server.listen(function() {}, {}); - ^^ [1] - /node.js:1272:39 - 1272| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] + http/get.js:16:22 + 16| http.get(url, {port: 'expects number'}, () => {}); // error + ^^^^^^^^^^^^^^^^ [1] + /node.js:1456:10 + 1456| port?: number, + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------- http/request.js:13:1 + +Cannot call `http.request` because number [1] is incompatible with string [2]. + + http/request.js:13:1 + 13| http.request(-1); // error + ^^^^^^^^^^^^^^^^ + +References: + http/request.js:13:14 + 13| http.request(-1); // error + ^^ [1] + /node.js:1487:10 + 1487| url: string, + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------- http/request.js:14:1 + +Cannot call `http.request` because string [1] is incompatible with number [2] in property `port`. + + http/request.js:14:1 + 14| http.request({port: 'expects number'}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + http/request.js:14:21 + 14| http.request({port: 'expects number'}); // error + ^^^^^^^^^^^^^^^^ [1] + /node.js:1456:10 + 1456| port?: number, + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------- http/request.js:15:1 + +Cannot call `http.request` because number [1] is incompatible with function type [2]. + + http/request.js:15:1 + 15| http.request(url, {}, -1); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + http/request.js:15:23 + 15| http.request(url, {}, -1); // error + ^^ [1] + /node.js:1489:16 + 1489| callback?: (response: IncomingMessage) => void + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------- http/request.js:16:1 + +Cannot call `http.request` because string [1] is incompatible with number [2] in property `port`. + + http/request.js:16:1 + 16| http.request(url, {port: 'expects number'}, () => {}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + http/request.js:16:26 + 16| http.request(url, {port: 'expects number'}, () => {}); // error + ^^^^^^^^^^^^^^^^ [1] + /node.js:1456:10 + 1456| port?: number, + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------- http/server.js:69:1 @@ -291,9 +815,9 @@ References: http/server.js:69:15 69| server.listen({}, () => {}, 'localhost', 123); ^^ [1] - /node.js:1266:19 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] + /node.js:1397:17 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------- http/server.js:70:1 @@ -308,9 +832,9 @@ References: http/server.js:70:15 70| server.listen({}, function() {}, 'localhost', 123); ^^ [1] - /node.js:1266:19 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] + /node.js:1397:17 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------- http/server.js:71:1 @@ -328,15 +852,15 @@ References: http/server.js:71:15 71| server.listen({}, () => {}, 123); ^^ [1] - /node.js:1266:19 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1268:19 - 1268| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1269:19 - 1269| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1397:17 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1399:17 + 1399| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1400:17 + 1400| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error ---------------------------------------------------------------------------------------------- http/server.js:72:1 @@ -354,49 +878,15 @@ References: http/server.js:72:15 72| server.listen({}, function() {}, 123); ^^ [1] - /node.js:1266:19 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1268:19 - 1268| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1269:19 - 1269| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] - - -Error ---------------------------------------------------------------------------------------------- http/server.js:73:1 - -Cannot call `server.listen` because number [1] is incompatible with function type [2]. - - http/server.js:73:1 - 73| server.listen(() => {}, 123); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - http/server.js:73:25 - 73| server.listen(() => {}, 123); - ^^^ [1] - /node.js:1272:39 - 1272| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] - - -Error ---------------------------------------------------------------------------------------------- http/server.js:74:1 - -Cannot call `server.listen` because number [1] is incompatible with function type [2]. - - http/server.js:74:1 - 74| server.listen(function() {}, 123); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - http/server.js:74:30 - 74| server.listen(function() {}, 123); - ^^^ [1] - /node.js:1272:39 - 1272| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] + /node.js:1397:17 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1399:17 + 1399| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1400:17 + 1400| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error ---------------------------------------------------------------------------------------------- http/server.js:75:1 @@ -414,15 +904,15 @@ References: http/server.js:75:15 75| server.listen(() => {}, 'localhost', 123); ^^^^^^^^ [1] - /node.js:1266:19 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1268:19 - 1268| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1269:19 - 1269| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1397:17 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1399:17 + 1399| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1400:17 + 1400| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error ---------------------------------------------------------------------------------------------- http/server.js:76:1 @@ -440,49 +930,15 @@ References: http/server.js:76:15 76| server.listen(function() {}, 'localhost', 123); ^^^^^^^^^^ [1] - /node.js:1266:19 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1268:19 - 1268| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1269:19 - 1269| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] - - -Error ---------------------------------------------------------------------------------------------- http/server.js:77:1 - -Cannot call `server.listen` because string [1] is incompatible with function type [2]. - - http/server.js:77:1 - 77| server.listen(() => {}, 'localhost'); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - http/server.js:77:25 - 77| server.listen(() => {}, 'localhost'); - ^^^^^^^^^^^ [1] - /node.js:1272:39 - 1272| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] - - -Error ---------------------------------------------------------------------------------------------- http/server.js:78:1 - -Cannot call `server.listen` because string [1] is incompatible with function type [2]. - - http/server.js:78:1 - 78| server.listen(function() {}, 'localhost'); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - http/server.js:78:30 - 78| server.listen(function() {}, 'localhost'); - ^^^^^^^^^^^ [1] - /node.js:1272:39 - 1272| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] + /node.js:1397:17 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1399:17 + 1399| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1400:17 + 1400| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error ---------------------------------------------------------------------------------------------- http/server.js:79:1 @@ -497,9 +953,9 @@ References: http/server.js:79:21 79| server.listen(8080, () => {}, 'localhost', 123); ^^^^^^^^ [1] - /node.js:1266:38 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] + /node.js:1397:36 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------- http/server.js:80:1 @@ -514,9 +970,9 @@ References: http/server.js:80:21 80| server.listen(8080, function() {}, 'localhost', 123); ^^^^^^^^^^ [1] - /node.js:1266:38 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] + /node.js:1397:36 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------- http/server.js:81:1 @@ -534,15 +990,15 @@ References: http/server.js:81:21 81| server.listen(8080, () => {}, 123); ^^^^^^^^ [1] - /node.js:1266:38 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1268:37 - 1268| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1269:38 - 1269| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1397:36 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1399:35 + 1399| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1400:36 + 1400| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error ---------------------------------------------------------------------------------------------- http/server.js:82:1 @@ -560,15 +1016,15 @@ References: http/server.js:82:21 82| server.listen(8080, function() {}, 123); ^^^^^^^^^^ [1] - /node.js:1266:38 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1268:37 - 1268| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1269:38 - 1269| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1397:36 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1399:35 + 1399| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1400:36 + 1400| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error ---------------------------------------------------------------------------------------------- http/server.js:83:1 @@ -586,15 +1042,15 @@ References: http/server.js:83:21 83| server.listen(8080, () => {}, 'localhost'); ^^^^^^^^ [1] - /node.js:1266:38 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1268:37 - 1268| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1269:38 - 1269| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1397:36 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1399:35 + 1399| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1400:36 + 1400| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error ---------------------------------------------------------------------------------------------- http/server.js:84:1 @@ -612,51 +1068,151 @@ References: http/server.js:84:21 84| server.listen(8080, function() {}, 'localhost'); ^^^^^^^^^^ [1] - /node.js:1266:38 - 1266| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1268:37 - 1268| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1269:38 - 1269| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1397:36 + 1397| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1399:35 + 1399| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1400:36 + 1400| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] -Error --------------------------------------------------------------------------------------------- https/server.js:67:1 +Error ------------------------------------------------------------------------------------------------ https/get.js:13:1 -Cannot call `server.listen` because a callable signature is missing in object literal [1] but exists in function -type [2]. +Cannot call `https.get` because number [1] is incompatible with string [2]. - https/server.js:67:1 - 67| server.listen(() => {}, {}); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + https/get.js:13:1 + 13| https.get(-1); // error + ^^^^^^^^^^^^^ References: - https/server.js:67:25 - 67| server.listen(() => {}, {}); - ^^ [1] - /node.js:1313:39 - 1313| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] + https/get.js:13:11 + 13| https.get(-1); // error + ^^ [1] + /node.js:1543:10 + 1543| url: string, + ^^^^^^ [2] -Error --------------------------------------------------------------------------------------------- https/server.js:68:1 +Error ------------------------------------------------------------------------------------------------ https/get.js:14:1 -Cannot call `server.listen` because a callable signature is missing in object literal [1] but exists in function -type [2]. +Cannot call `https.get` because string [1] is incompatible with number [2] in property `port`. - https/server.js:68:1 - 68| server.listen(function() {}, {}); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + https/get.js:14:1 + 14| https.get({port: 'expects number'}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - https/server.js:68:30 - 68| server.listen(function() {}, {}); - ^^ [1] - /node.js:1313:39 - 1313| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] + https/get.js:14:18 + 14| https.get({port: 'expects number'}); // error + ^^^^^^^^^^^^^^^^ [1] + /node.js:1456:10 + 1456| port?: number, + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------ https/get.js:15:1 + +Cannot call `https.get` because number [1] is incompatible with function type [2]. + + https/get.js:15:1 + 15| https.get(url, {}, -1); // error + ^^^^^^^^^^^^^^^^^^^^^^ + +References: + https/get.js:15:20 + 15| https.get(url, {}, -1); // error + ^^ [1] + /node.js:1545:16 + 1545| callback?: (response: IncomingMessage) => void + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------ https/get.js:16:1 + +Cannot call `https.get` because string [1] is incompatible with number [2] in property `port`. + + https/get.js:16:1 + 16| https.get(url, {port: 'expects number'}, () => {}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + https/get.js:16:23 + 16| https.get(url, {port: 'expects number'}, () => {}); // error + ^^^^^^^^^^^^^^^^ [1] + /node.js:1456:10 + 1456| port?: number, + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- https/request.js:13:1 + +Cannot call `https.request` because number [1] is incompatible with string [2]. + + https/request.js:13:1 + 13| https.request(-1); // error + ^^^^^^^^^^^^^^^^^ + +References: + https/request.js:13:15 + 13| https.request(-1); // error + ^^ [1] + /node.js:1534:10 + 1534| url: string, + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- https/request.js:14:1 + +Cannot call `https.request` because string [1] is incompatible with number [2] in property `port`. + + https/request.js:14:1 + 14| https.request({port: 'expects number'}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + https/request.js:14:22 + 14| https.request({port: 'expects number'}); // error + ^^^^^^^^^^^^^^^^ [1] + /node.js:1456:10 + 1456| port?: number, + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- https/request.js:15:1 + +Cannot call `https.request` because number [1] is incompatible with function type [2]. + + https/request.js:15:1 + 15| https.request(url, {}, -1); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + https/request.js:15:24 + 15| https.request(url, {}, -1); // error + ^^ [1] + /node.js:1536:16 + 1536| callback?: (response: IncomingMessage) => void + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------------- https/request.js:16:1 + +Cannot call `https.request` because string [1] is incompatible with number [2] in property `port`. + + https/request.js:16:1 + 16| https.request(url, {port: 'expects number'}, () => {}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + https/request.js:16:27 + 16| https.request(url, {port: 'expects number'}, () => {}); // error + ^^^^^^^^^^^^^^^^ [1] + /node.js:1456:10 + 1456| port?: number, + ^^^^^^ [2] Error --------------------------------------------------------------------------------------------- https/server.js:69:1 @@ -671,9 +1227,9 @@ References: https/server.js:69:15 69| server.listen({}, () => {}, 'localhost', 123); ^^ [1] - /node.js:1307:19 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] + /node.js:1423:17 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] Error --------------------------------------------------------------------------------------------- https/server.js:70:1 @@ -688,9 +1244,9 @@ References: https/server.js:70:15 70| server.listen({}, function() {}, 'localhost', 123); ^^ [1] - /node.js:1307:19 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] + /node.js:1423:17 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] Error --------------------------------------------------------------------------------------------- https/server.js:71:1 @@ -708,15 +1264,15 @@ References: https/server.js:71:15 71| server.listen({}, () => {}, 123); ^^ [1] - /node.js:1307:19 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1309:19 - 1309| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1310:19 - 1310| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1423:17 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1425:17 + 1425| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1426:17 + 1426| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error --------------------------------------------------------------------------------------------- https/server.js:72:1 @@ -734,49 +1290,15 @@ References: https/server.js:72:15 72| server.listen({}, function() {}, 123); ^^ [1] - /node.js:1307:19 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1309:19 - 1309| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1310:19 - 1310| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] - - -Error --------------------------------------------------------------------------------------------- https/server.js:73:1 - -Cannot call `server.listen` because number [1] is incompatible with function type [2]. - - https/server.js:73:1 - 73| server.listen(() => {}, 123); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - https/server.js:73:25 - 73| server.listen(() => {}, 123); - ^^^ [1] - /node.js:1313:39 - 1313| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] - - -Error --------------------------------------------------------------------------------------------- https/server.js:74:1 - -Cannot call `server.listen` because number [1] is incompatible with function type [2]. - - https/server.js:74:1 - 74| server.listen(function() {}, 123); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - https/server.js:74:30 - 74| server.listen(function() {}, 123); - ^^^ [1] - /node.js:1313:39 - 1313| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] + /node.js:1423:17 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1425:17 + 1425| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1426:17 + 1426| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error --------------------------------------------------------------------------------------------- https/server.js:75:1 @@ -794,15 +1316,15 @@ References: https/server.js:75:15 75| server.listen(() => {}, 'localhost', 123); ^^^^^^^^ [1] - /node.js:1307:19 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1309:19 - 1309| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1310:19 - 1310| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1423:17 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1425:17 + 1425| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1426:17 + 1426| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error --------------------------------------------------------------------------------------------- https/server.js:76:1 @@ -820,49 +1342,15 @@ References: https/server.js:76:15 76| server.listen(function() {}, 'localhost', 123); ^^^^^^^^^^ [1] - /node.js:1307:19 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1309:19 - 1309| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1310:19 - 1310| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] - - -Error --------------------------------------------------------------------------------------------- https/server.js:77:1 - -Cannot call `server.listen` because string [1] is incompatible with function type [2]. - - https/server.js:77:1 - 77| server.listen(() => {}, 'localhost'); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - https/server.js:77:25 - 77| server.listen(() => {}, 'localhost'); - ^^^^^^^^^^^ [1] - /node.js:1313:39 - 1313| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] - - -Error --------------------------------------------------------------------------------------------- https/server.js:78:1 - -Cannot call `server.listen` because string [1] is incompatible with function type [2]. - - https/server.js:78:1 - 78| server.listen(function() {}, 'localhost'); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - https/server.js:78:30 - 78| server.listen(function() {}, 'localhost'); - ^^^^^^^^^^^ [1] - /node.js:1313:39 - 1313| listen(handle: Object, callback?: Function): Server; - ^^^^^^^^ [2] + /node.js:1423:17 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1425:17 + 1425| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1426:17 + 1426| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error --------------------------------------------------------------------------------------------- https/server.js:79:1 @@ -877,9 +1365,9 @@ References: https/server.js:79:21 79| server.listen(8443, () => {}, 'localhost', 123); ^^^^^^^^ [1] - /node.js:1307:38 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] + /node.js:1423:36 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] Error --------------------------------------------------------------------------------------------- https/server.js:80:1 @@ -894,9 +1382,9 @@ References: https/server.js:80:21 80| server.listen(8443, function() {}, 'localhost', 123); ^^^^^^^^^^ [1] - /node.js:1307:38 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] + /node.js:1423:36 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] Error --------------------------------------------------------------------------------------------- https/server.js:81:1 @@ -914,15 +1402,15 @@ References: https/server.js:81:21 81| server.listen(8443, () => {}, 123); ^^^^^^^^ [1] - /node.js:1307:38 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1309:37 - 1309| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1310:38 - 1310| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1423:36 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1425:35 + 1425| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1426:36 + 1426| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error --------------------------------------------------------------------------------------------- https/server.js:82:1 @@ -940,15 +1428,15 @@ References: https/server.js:82:21 82| server.listen(8443, function() {}, 123); ^^^^^^^^^^ [1] - /node.js:1307:38 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1309:37 - 1309| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1310:38 - 1310| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1423:36 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1425:35 + 1425| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1426:36 + 1426| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error --------------------------------------------------------------------------------------------- https/server.js:83:1 @@ -966,15 +1454,15 @@ References: https/server.js:83:21 83| server.listen(8443, () => {}, 'localhost'); ^^^^^^^^ [1] - /node.js:1307:38 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1309:37 - 1309| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1310:38 - 1310| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1423:36 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1425:35 + 1425| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1426:36 + 1426| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error --------------------------------------------------------------------------------------------- https/server.js:84:1 @@ -992,20 +1480,20 @@ References: https/server.js:84:21 84| server.listen(8443, function() {}, 'localhost'); ^^^^^^^^^^ [1] - /node.js:1307:38 - 1307| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): Server; - ^^^^^^ [2] - /node.js:1309:37 - 1309| listen(port?: number, backlog?: number, callback?: Function): Server; - ^^^^^^ [3] - /node.js:1310:38 - 1310| listen(port?: number, hostname?: string, callback?: Function): Server; - ^^^^^^ [4] + /node.js:1423:36 + 1423| listen(port?: number, hostname?: string, backlog?: number, callback?: Function): this; + ^^^^^^ [2] + /node.js:1425:35 + 1425| listen(port?: number, backlog?: number, callback?: Function): this; + ^^^^^^ [3] + /node.js:1426:36 + 1426| listen(port?: number, hostname?: string, callback?: Function): this; + ^^^^^^ [4] Error ---------------------------------------------------------------------------- invalid_package_file/package.json:1:1 -Unexpected end of input +Unexpected end of input, expected a valid JSON value 1| @@ -1013,7 +1501,7 @@ Unexpected end of input Error ---------------------------------------------------------------------------------- json_file/json_invalid.json:1:1 -Unexpected token : +Unexpected token `:`, expected a valid JSON value 1| :derp ^ @@ -1223,8 +1711,8 @@ Cannot cast `u1.username` to `Buffer` because string [1] is incompatible with `B ^^^^^^^^^^^ References: - /node.js:1473:13 - 1473| username: string, + /node.js:1686:13 + 1686| username: string, ^^^^^^ [1] os/userInfo.js:7:15 7| (u1.username: Buffer); // error @@ -1240,8 +1728,8 @@ Cannot cast `u2.username` to `Buffer` because string [1] is incompatible with `B ^^^^^^^^^^^ References: - /node.js:1473:13 - 1473| username: string, + /node.js:1686:13 + 1686| username: string, ^^^^^^ [1] os/userInfo.js:11:15 11| (u2.username: Buffer); // error @@ -1257,14 +1745,133 @@ Cannot cast `u3.username` to string because `Buffer` [1] is incompatible with st ^^^^^^^^^^^ References: - /node.js:1465:13 - 1465| username: Buffer, + /node.js:1677:13 + 1677| username: Buffer, ^^^^^^ [1] os/userInfo.js:14:15 14| (u3.username: string); // error ^^^^^^ [2] +Error -------------------------------------------------------------------------------------- process/emitWarning.js:10:1 + +Cannot call `process.emitWarning` because: + - Either undefined [1] is incompatible with string [2]. + - Or undefined [1] is incompatible with `Error` [3]. + - Or function type [4] requires another argument from call of method `emitWarning` [1]. + - Or function type [5] requires another argument from call of method `emitWarning` [1]. + - Or function type [6] requires another argument from call of method `emitWarning` [1]. + + process/emitWarning.js:10:1 + 10| process.emitWarning(); // error + ^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + /node.js:2584:24 + 2584| emitWarning(warning: string | Error): void; + ^^^^^^ [2] + /node.js:2584:33 + 2584| emitWarning(warning: string | Error): void; + ^^^^^ [3] + /node.js:2585:3 + 2585| emitWarning(warning: string, typeOrCtor: string | (...empty) => mixed): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [4] + /node.js:2586:3 + 2586| emitWarning(warning: string, type: string, codeOrCtor: string | (...empty) => mixed): void; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [5] + /node.js:2587:3 + v----------- + 2587| emitWarning( + 2588| warning: string, + 2589| type: string, + 2590| code: string, + 2591| ctor?: (...empty) => mixed + 2592| ): void; + ------^ [6] + + +Error -------------------------------------------------------------------------------------- process/emitWarning.js:11:1 + +Cannot call `process.emitWarning` because: + - Either number [1] is incompatible with string [2]. + - Or number [1] is incompatible with string [3]. + - Or number [1] is incompatible with string [4]. + + process/emitWarning.js:11:1 + 11| process.emitWarning(42); // error + ^^^^^^^^^^^^^^^^^^^^^^^ + +References: + process/emitWarning.js:11:21 + 11| process.emitWarning(42); // error + ^^ [1] + /node.js:2585:24 + 2585| emitWarning(warning: string, typeOrCtor: string | (...empty) => mixed): void; + ^^^^^^ [2] + /node.js:2586:24 + 2586| emitWarning(warning: string, type: string, codeOrCtor: string | (...empty) => mixed): void; + ^^^^^^ [3] + /node.js:2588:14 + 2588| warning: string, + ^^^^^^ [4] + + +Error -------------------------------------------------------------------------------------- process/emitWarning.js:12:1 + +Cannot call `process.emitWarning` because: + - Either number [1] is incompatible with string [2]. + - Or number [1] is incompatible with string [3]. + + process/emitWarning.js:12:1 + 12| process.emitWarning("blah", 42); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + process/emitWarning.js:12:29 + 12| process.emitWarning("blah", 42); // error + ^^ [1] + /node.js:2586:38 + 2586| emitWarning(warning: string, type: string, codeOrCtor: string | (...empty) => mixed): void; + ^^^^^^ [2] + /node.js:2589:11 + 2589| type: string, + ^^^^^^ [3] + + +Error -------------------------------------------------------------------------------------- process/emitWarning.js:13:1 + +Cannot call `process.emitWarning` because number [1] is incompatible with string [2]. + + process/emitWarning.js:13:1 + 13| process.emitWarning("blah", "blah", 42); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + process/emitWarning.js:13:37 + 13| process.emitWarning("blah", "blah", 42); // error + ^^ [1] + /node.js:2590:11 + 2590| code: string, + ^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------- process/emitWarning.js:14:2 + +Cannot cast `process.emitWarning(...)` to string because undefined [1] is incompatible with string [2]. + + process/emitWarning.js:14:2 + 14| (process.emitWarning("blah"): string); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /node.js:2584:41 + 2584| emitWarning(warning: string | Error): void; + ^^^^ [1] + process/emitWarning.js:14:31 + 14| (process.emitWarning("blah"): string); // error + ^^^^^^ [2] + + Error ----------------------------------------------------------------------------------------- process/nextTick.js:12:1 Cannot call `process.nextTick` because: @@ -1334,132 +1941,206 @@ References: process/nextTick.js:27:3 27| (a: string, b: number, c: boolean) => {} // Error: too few arguments ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - /node.js:2195:21 - 2195| nextTick: (cb: (...T) => mixed, ...T) => void; + /node.js:2613:21 + 2613| nextTick: (cb: (...T) => mixed, ...T) => void; ^^^^^^^^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------ process/process.js:10:1 +Error ------------------------------------------------------------------------------------------- process/process.js:5:2 -Cannot call `process.emitWarning` because: - - Either undefined [1] is incompatible with string [2]. - - Or undefined [1] is incompatible with `Error` [3]. - - Or function type [4] requires another argument from call of method `emitWarning` [1]. - - Or function type [5] requires another argument from call of method `emitWarning` [1]. - - Or function type [6] requires another argument from call of method `emitWarning` [1]. +Cannot cast `process.allowedNodeEnvironmentFlags` to string because `Set` [1] is incompatible with string [2]. - process/process.js:10:1 - 10| process.emitWarning(); // error - ^^^^^^^^^^^^^^^^^^^^^ [1] + process/process.js:5:2 + 5| (process.allowedNodeEnvironmentFlags: string); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /node.js:2167:24 - 2167| emitWarning(warning: string | Error): void; - ^^^^^^ [2] - /node.js:2167:33 - 2167| emitWarning(warning: string | Error): void; - ^^^^^ [3] - /node.js:2168:3 - 2168| emitWarning(warning: string, typeOrCtor: string | Function): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [4] - /node.js:2169:3 - 2169| emitWarning(warning: string, type: string, codeOrCtor: string | Function): void; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [5] - /node.js:2170:3 - v----------- - 2170| emitWarning( - 2171| warning: string, - 2172| type: string, - 2173| code: string, - 2174| ctor?: Function - 2175| ): void; - ------^ [6] + /node.js:2573:32 + 2573| allowedNodeEnvironmentFlags: Set; + ^^^^^^^^^^^ [1] + process/process.js:5:39 + 5| (process.allowedNodeEnvironmentFlags: string); // error + ^^^^^^ [2] -Error ------------------------------------------------------------------------------------------ process/process.js:11:1 +Error -------------------------------------------------------------------------------------------- stream/stream.js:36:2 -Cannot call `process.emitWarning` because: - - Either number [1] is incompatible with string [2]. - - Or number [1] is incompatible with string [3]. - - Or number [1] is incompatible with string [4]. +Cannot cast `pipe` to `MyDuplex` because `MyWriteStream` [1] is incompatible with `MyDuplex` [2]. - process/process.js:11:1 - 11| process.emitWarning(42); // error - ^^^^^^^^^^^^^^^^^^^^^^^ + stream/stream.js:36:2 + 36| (pipe: MyDuplex); // error + ^^^^ References: - process/process.js:11:21 - 11| process.emitWarning(42); // error - ^^ [1] - /node.js:2168:24 - 2168| emitWarning(warning: string, typeOrCtor: string | Function): void; - ^^^^^^ [2] - /node.js:2169:24 - 2169| emitWarning(warning: string, type: string, codeOrCtor: string | Function): void; - ^^^^^^ [3] - /node.js:2171:14 - 2171| warning: string, - ^^^^^^ [4] + stream/stream.js:32:9 + 32| .pipe(new MyWriteStream()); + ^^^^^^^^^^^^^^^^^^^ [1] + stream/stream.js:36:8 + 36| (pipe: MyDuplex); // error + ^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------ process/process.js:12:1 +Error -------------------------------------------------------------------------------------------- stream/stream.js:45:6 -Cannot call `process.emitWarning` because: - - Either number [1] is incompatible with string [2]. - - Or number [1] is incompatible with string [3]. +Cannot cast `error` to null because: + - `Error` [1] is incompatible with null [2]. + - undefined [1] is incompatible with null [2]. - process/process.js:12:1 - 12| process.emitWarning("blah", 42); // error - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + stream/stream.js:45:6 + 45| (error: null); // error + ^^^^^ References: - process/process.js:12:29 - 12| process.emitWarning("blah", 42); // error - ^^ [1] - /node.js:2169:38 - 2169| emitWarning(warning: string, type: string, codeOrCtor: string | Function): void; - ^^^^^^ [2] - /node.js:2172:11 - 2172| type: string, - ^^^^^^ [3] + /node.js:1964:18 + 1964| cb: (error?: Error) => void, + ^^^^^ [1] + stream/stream.js:45:13 + 45| (error: null); // error + ^^^^ [2] -Error ------------------------------------------------------------------------------------------ process/process.js:13:1 +Error -------------------------------------------------------------------------------------------- stream/stream.js:50:2 -Cannot call `process.emitWarning` because number [1] is incompatible with string [2]. +Cannot cast `pipeline` to `MyDuplex` because `MyWriteStream` [1] is incompatible with `MyDuplex` [2]. - process/process.js:13:1 - 13| process.emitWarning("blah", "blah", 42); // error - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + stream/stream.js:50:2 + 50| (pipeline: MyDuplex); // error + ^^^^^^^^ References: - process/process.js:13:37 - 13| process.emitWarning("blah", "blah", 42); // error - ^^ [1] - /node.js:2173:11 - 2173| code: string, - ^^^^^^ [2] + stream/stream.js:42:3 + 42| new MyWriteStream(), + ^^^^^^^^^^^^^^^^^^^ [1] + stream/stream.js:50:12 + 50| (pipeline: MyDuplex); // error + ^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------ process/process.js:14:2 +Error -------------------------------------------------------------------------------------------- stream/stream.js:52:1 -Cannot cast `process.emitWarning(...)` to string because undefined [1] is incompatible with string [2]. +Cannot call `stream.pipeline` because: + - Either `MyWriteStream` [1] is incompatible with `stream$Readable` [2]. + - Or `MyWriteStream` [1] is incompatible with `stream$Readable` [3]. + - Or `MyWriteStream` [1] is incompatible with `stream$Readable` [4]. + - Or `MyWriteStream` [1] is incompatible with `stream$Readable` [5]. + - Or `MyWriteStream` [1] is incompatible with `stream$Readable` [6]. + - Or `MyWriteStream` [1] is incompatible with `stream$Readable` [7]. - process/process.js:14:2 - 14| (process.emitWarning("blah"): string); // error - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + stream/stream.js:52:1 + v--------------- + 52| stream.pipeline( + 53| new MyWriteStream(), // error - first stream must be Readable + 54| new MyDuplex(), + 55| () => {}, + 56| ); + ^ References: - /node.js:2167:41 - 2167| emitWarning(warning: string | Error): void; - ^^^^ [1] - process/process.js:14:31 - 14| (process.emitWarning("blah"): string); // error - ^^^^^^ [2] - + stream/stream.js:53:3 + 53| new MyWriteStream(), // error - first stream must be Readable + ^^^^^^^^^^^^^^^^^^^ [1] + /node.js:1949:9 + 1949| s1: stream$Readable, + ^^^^^^^^^^^^^^^ [2] + /node.js:1954:9 + 1954| s1: stream$Readable, + ^^^^^^^^^^^^^^^ [3] + /node.js:1960:9 + 1960| s1: stream$Readable, + ^^^^^^^^^^^^^^^ [4] + /node.js:1967:9 + 1967| s1: stream$Readable, + ^^^^^^^^^^^^^^^ [5] + /node.js:1975:9 + 1975| s1: stream$Readable, + ^^^^^^^^^^^^^^^ [6] + /node.js:1984:9 + 1984| s1: stream$Readable, + ^^^^^^^^^^^^^^^ [7] + + +Error -------------------------------------------------------------------------------------------- stream/stream.js:58:1 + +Cannot call `stream.pipeline` because: + - Either `MyWriteStream` [1] is incompatible with `stream$Duplex` [2]. + - Or `MyWriteStream` [1] is incompatible with `stream$Duplex` [3]. + - Or `MyWriteStream` [1] is incompatible with `stream$Duplex` [4]. + - Or `MyWriteStream` [1] is incompatible with `stream$Duplex` [5]. + - Or `MyWriteStream` [1] is incompatible with `stream$Duplex` [6]. + + stream/stream.js:58:1 + v--------------- + 58| stream.pipeline( + 59| new MyDuplex(), + 60| new MyWriteStream(), // error - middle stream must be Duplex + 61| new MyDuplex(), + 62| () => {}, + 63| ); + ^ +References: + stream/stream.js:60:3 + 60| new MyWriteStream(), // error - middle stream must be Duplex + ^^^^^^^^^^^^^^^^^^^ [1] + /node.js:1955:9 + 1955| s2: stream$Duplex, + ^^^^^^^^^^^^^ [2] + /node.js:1961:9 + 1961| s2: stream$Duplex, + ^^^^^^^^^^^^^ [3] + /node.js:1968:9 + 1968| s2: stream$Duplex, + ^^^^^^^^^^^^^ [4] + /node.js:1976:9 + 1976| s2: stream$Duplex, + ^^^^^^^^^^^^^ [5] + /node.js:1985:9 + 1985| s2: stream$Duplex, + ^^^^^^^^^^^^^ [6] + + +Error -------------------------------------------------------------------------------------------- stream/stream.js:65:1 + +Cannot call `stream.pipeline` because: + - Either `MyReadStream` [1] is incompatible with `stream$Writable` [2]. + - Or `MyReadStream` [1] is incompatible with `stream$Duplex` [3]. + - Or `MyReadStream` [1] is incompatible with `stream$Duplex` [4]. + - Or `MyReadStream` [1] is incompatible with `stream$Duplex` [5]. + - Or `MyReadStream` [1] is incompatible with `stream$Duplex` [6]. + + stream/stream.js:65:1 + v--------------- + 65| stream.pipeline( + 66| new MyDuplex(), + 67| new MyDuplex(), + 68| new MyReadStream(), // error - last stream must be Writable + 69| () => {}, + 70| ); + ^ -Found 77 errors +References: + stream/stream.js:68:3 + 68| new MyReadStream(), // error - last stream must be Writable + ^^^^^^^^^^^^^^^^^^ [1] + /node.js:1953:32 + 1953| declare function pipeline( + ^^^^^^^^^^^^^^^ [2] + /node.js:1962:9 + 1962| s3: stream$Duplex, + ^^^^^^^^^^^^^ [3] + /node.js:1969:9 + 1969| s3: stream$Duplex, + ^^^^^^^^^^^^^ [4] + /node.js:1977:9 + 1977| s3: stream$Duplex, + ^^^^^^^^^^^^^ [5] + /node.js:1986:9 + 1986| s3: stream$Duplex, + ^^^^^^^^^^^^^ [6] + + + +Found 115 errors Only showing the most relevant union/intersection branches. To see all branches, re-run Flow with --show-all-branches diff --git a/tests/node_tests/process/emitWarning.js b/tests/node_tests/process/emitWarning.js new file mode 100644 index 00000000000..dffcdab7f3a --- /dev/null +++ b/tests/node_tests/process/emitWarning.js @@ -0,0 +1,14 @@ +/* @flow */ + +/* emitWarning */ + +process.emitWarning("blah"); +process.emitWarning(new Error("blah")); +process.emitWarning("blah", "blah"); +process.emitWarning("blah", "blah", () => {}); + +process.emitWarning(); // error +process.emitWarning(42); // error +process.emitWarning("blah", 42); // error +process.emitWarning("blah", "blah", 42); // error +(process.emitWarning("blah"): string); // error diff --git a/tests/node_tests/process/process.js b/tests/node_tests/process/process.js index dffcdab7f3a..e30fbc82f40 100644 --- a/tests/node_tests/process/process.js +++ b/tests/node_tests/process/process.js @@ -1,14 +1,5 @@ /* @flow */ -/* emitWarning */ +(process.allowedNodeEnvironmentFlags: Set); -process.emitWarning("blah"); -process.emitWarning(new Error("blah")); -process.emitWarning("blah", "blah"); -process.emitWarning("blah", "blah", () => {}); - -process.emitWarning(); // error -process.emitWarning(42); // error -process.emitWarning("blah", 42); // error -process.emitWarning("blah", "blah", 42); // error -(process.emitWarning("blah"): string); // error +(process.allowedNodeEnvironmentFlags: string); // error diff --git a/tests/node_tests/stream/stream.js b/tests/node_tests/stream/stream.js index 044a1da4e7e..9f888c206f7 100644 --- a/tests/node_tests/stream/stream.js +++ b/tests/node_tests/stream/stream.js @@ -26,11 +26,49 @@ class MyWriteStream extends stream.Writable {} class MyDuplex extends stream.Duplex {} class MyTransform extends stream.Duplex {} -new MyReadStream() +var pipe = new MyReadStream() .pipe(new MyDuplex()) .pipe(new MyTransform()) .pipe(new MyWriteStream()); +(pipe: stream.Writable); +(pipe: MyWriteStream); +(pipe: MyDuplex); // error + +var pipeline = stream.pipeline( + new MyReadStream(), + new MyDuplex(), + new MyTransform(), + new MyWriteStream(), + error => { + (error: ?Error); + (error: null); // error + }, +); + +(pipeline: MyWriteStream); +(pipeline: MyDuplex); // error + +stream.pipeline( + new MyWriteStream(), // error - first stream must be Readable + new MyDuplex(), + () => {}, +); + +stream.pipeline( + new MyDuplex(), + new MyWriteStream(), // error - middle stream must be Duplex + new MyDuplex(), + () => {}, +); + +stream.pipeline( + new MyDuplex(), + new MyDuplex(), + new MyReadStream(), // error - last stream must be Writable + () => {}, +); + new MyReadStream() .on('error', () => {}) .pipe(new MyDuplex()) diff --git a/tests/non_array_spread/.flowconfig b/tests/non_array_spread/.flowconfig new file mode 100644 index 00000000000..d1f9953accf --- /dev/null +++ b/tests/non_array_spread/.flowconfig @@ -0,0 +1,11 @@ +[ignore] + +[include] + +[libs] + +[options] +no_flowlib=false + +[lints] +non-array-spread=error diff --git a/tests/non_array_spread/apply.js b/tests/non_array_spread/apply.js new file mode 100644 index 00000000000..3d5a292a490 --- /dev/null +++ b/tests/non_array_spread/apply.js @@ -0,0 +1,13 @@ +f.apply(null, [1,2,3]); // No error + +const it: Iterable = [7,8,9]; +if (Array.isArray(it)) { + f.apply(null, it); // No error +} + +// NOTE: This is ALWAYS incorrect since Function.prototype.apply only accepts +// arrays and array-like objects - never iterables. When that is fixed we can +// update this test to expect the proper error instead of the lint. +f.apply(null, it); // Error + +function f(...args) {} diff --git a/tests/non_array_spread/arguments.js b/tests/non_array_spread/arguments.js new file mode 100644 index 00000000000..2ac7dd41f33 --- /dev/null +++ b/tests/non_array_spread/arguments.js @@ -0,0 +1,9 @@ +// No errors are expected in this file. + +function f() { + const a = [...arguments]; + g(...arguments); + g.apply(null, arguments); +} + +function g() {} diff --git a/tests/non_array_spread/compose.js b/tests/non_array_spread/compose.js new file mode 100644 index 00000000000..62f94e085bc --- /dev/null +++ b/tests/non_array_spread/compose.js @@ -0,0 +1,14 @@ +/* @flow */ + +declare var compose: $Compose; +declare var fns1: Iterable<(number) => number>; + +(compose( + ...fns1, // Error +)(42)); + +if (Array.isArray(fns1)) { + (compose( + ...fns1, // No error + )(42)); +} diff --git a/tests/non_array_spread/iterables.js b/tests/non_array_spread/iterables.js new file mode 100644 index 00000000000..611a3229aea --- /dev/null +++ b/tests/non_array_spread/iterables.js @@ -0,0 +1,11 @@ +const it: Iterable = [7,8,9]; +[...it]; // Error +f(...it); // Error +f.bind(null, ...it); // Error +if (Array.isArray(it)) { + [...it]; // No error + f(...it); // No error + f.bind(null, ...it); // No error +} + +function f(...args) {} diff --git a/tests/non_array_spread/maps.js b/tests/non_array_spread/maps.js new file mode 100644 index 00000000000..1d8b3528f52 --- /dev/null +++ b/tests/non_array_spread/maps.js @@ -0,0 +1,12 @@ +const map1 = new Map(); +const map2 = new Map(); +new Map([ + ...map1, // Error + ...map2 // Error +]); +new Map([...Array.from(map1), ...Array.from(map2)]); // No error +f( + ...map1, // Error + ...map2 // Error +); +function f(...args) {} diff --git a/tests/non_array_spread/non_array_spread.exp b/tests/non_array_spread/non_array_spread.exp new file mode 100644 index 00000000000..51be28fb703 --- /dev/null +++ b/tests/non_array_spread/non_array_spread.exp @@ -0,0 +1,136 @@ +Error --------------------------------------------------------------------------------------------------- apply.js:11:15 + +Property `length` is missing in `$Iterable` [1] but exists in `$ArrayLike` [2]. + + apply.js:11:15 + 11| f.apply(null, it); // Error + ^^ + +References: + apply.js:3:11 + 3| const it: Iterable = [7,8,9]; + ^^^^^^^^^^^^^^^^ [1] + /core.js:306:22 + v + 306| type $ArrayLike = { + 307| [indexer: number]: T, + 308| length: number, + 309| ... + 310| } + ^ [2] + + +Error --------------------------------------------------------------------------------------------------- compose.js:7:6 + +Cannot spread non-array iterable `$Iterable` [1]. Use `...Array.from()` instead. (`non-array-spread`) + + compose.js:7:6 + 7| ...fns1, // Error + ^^^^ + +References: + compose.js:4:19 + 4| declare var fns1: Iterable<(number) => number>; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------- iterables.js:2:5 + +Cannot spread non-array iterable `$Iterable` [1]. Use `...Array.from()` instead. (`non-array-spread`) + + iterables.js:2:5 + 2| [...it]; // Error + ^^ + +References: + iterables.js:1:11 + 1| const it: Iterable = [7,8,9]; + ^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------- iterables.js:3:6 + +Cannot spread non-array iterable `$Iterable` [1]. Use `...Array.from()` instead. (`non-array-spread`) + + iterables.js:3:6 + 3| f(...it); // Error + ^^ + +References: + iterables.js:1:11 + 1| const it: Iterable = [7,8,9]; + ^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------ iterables.js:4:17 + +Cannot spread non-array iterable `$Iterable` [1]. Use `...Array.from()` instead. (`non-array-spread`) + + iterables.js:4:17 + 4| f.bind(null, ...it); // Error + ^^ + +References: + iterables.js:1:11 + 1| const it: Iterable = [7,8,9]; + ^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------------ maps.js:4:6 + +Cannot spread non-array iterable `Map` [1]. Use `...Array.from()` instead. (`non-array-spread`) + + maps.js:4:6 + 4| ...map1, // Error + ^^^^ + +References: + maps.js:1:14 + 1| const map1 = new Map(); + ^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------------ maps.js:5:6 + +Cannot spread non-array iterable `Map` [1]. Use `...Array.from()` instead. (`non-array-spread`) + + maps.js:5:6 + 5| ...map2 // Error + ^^^^ + +References: + maps.js:2:14 + 2| const map2 = new Map(); + ^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------------ maps.js:9:6 + +Cannot spread non-array iterable `Map` [1]. Use `...Array.from()` instead. (`non-array-spread`) + + maps.js:9:6 + 9| ...map1, // Error + ^^^^ + +References: + maps.js:1:14 + 1| const map1 = new Map(); + ^^^^^^^^^ [1] + + +Error ----------------------------------------------------------------------------------------------------- maps.js:10:6 + +Cannot spread non-array iterable `Map` [1]. Use `...Array.from()` instead. (`non-array-spread`) + + maps.js:10:6 + 10| ...map2 // Error + ^^^^ + +References: + maps.js:2:14 + 2| const map2 = new Map(); + ^^^^^^^^^ [1] + + + +Found 9 errors diff --git a/tests/non_array_spread/passing_cases.js b/tests/non_array_spread/passing_cases.js new file mode 100644 index 00000000000..af07bd4697c --- /dev/null +++ b/tests/non_array_spread/passing_cases.js @@ -0,0 +1,18 @@ +// No errors are expected in this file. + +[...[1,2,3]]; +const a: Array = [4,5,6]; +[...a]; +f(...a); +[...a.map(x => x + 1)]; +f(...a.map(x => x + 1)); +const b: [number, string] = [42, "foo"]; +[...b]; +f(...b); +f.apply(null, b); +f.bind(null, ...b); + +function f(...args) {} + +declare var compose: $Compose; +compose(...[x => x, x => x]); diff --git a/tests/non_array_spread/types.js b/tests/non_array_spread/types.js new file mode 100644 index 00000000000..7bfdd5533c0 --- /dev/null +++ b/tests/non_array_spread/types.js @@ -0,0 +1,10 @@ +// No errors are expected in this file. + +type GenericFnType = (...TArgs) => TReturn; +type T = GenericFnType, boolean>; +var t: T = function(x: string, y: string): boolean {return false;} +type U = (number, ...Iterable) => boolean; +var u: U = function(x: number, y: string): boolean {return false;} +type GenericFnType2 = (number, ...TArgs) => TReturn; +type V = (number, ...Iterable) => boolean; +var v: V = function(x: number, y: string): boolean {return false;} diff --git a/tests/nonstrict_import/libs/modules.js b/tests/nonstrict_import/libs/modules.js index 7d77ef3e6bc..d4018f5ee2c 100644 --- a/tests/nonstrict_import/libs/modules.js +++ b/tests/nonstrict_import/libs/modules.js @@ -1,4 +1,4 @@ -// Copyright 2004-present Facebook. All Rights Reserved. +// Copyright (c) Facebook, Inc. and its affiliates. declare module "CommonJSModule" { declare module.exports: any; diff --git a/tests/number_constants/number_constants.exp b/tests/number_constants/number_constants.exp index 908dca87d72..e21a74000a8 100644 --- a/tests/number_constants/number_constants.exp +++ b/tests/number_constants/number_constants.exp @@ -7,8 +7,8 @@ Cannot assign `Number.MAX_SAFE_INTEGER` to `b` because number [1] is incompatibl ^^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:125:30 - 125| static MAX_SAFE_INTEGER: number; + /core.js:145:30 + 145| static MAX_SAFE_INTEGER: number; ^^^^^^ [1] number_constants.js:2:8 2| var b: string = Number.MAX_SAFE_INTEGER; @@ -24,8 +24,8 @@ Cannot assign `Number.MIN_SAFE_INTEGER` to `d` because number [1] is incompatibl ^^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:127:30 - 127| static MIN_SAFE_INTEGER: number; + /core.js:147:30 + 147| static MIN_SAFE_INTEGER: number; ^^^^^^ [1] number_constants.js:4:8 4| var d: string = Number.MIN_SAFE_INTEGER; @@ -41,8 +41,8 @@ Cannot assign `Number.MAX_VALUE` to `f` because number [1] is incompatible with ^^^^^^^^^^^^^^^^ References: - /core.js:126:23 - 126| static MAX_VALUE: number; + /core.js:146:23 + 146| static MAX_VALUE: number; ^^^^^^ [1] number_constants.js:6:8 6| var f: string = Number.MAX_VALUE; @@ -58,8 +58,8 @@ Cannot assign `Number.MIN_VALUE` to `h` because number [1] is incompatible with ^^^^^^^^^^^^^^^^ References: - /core.js:128:23 - 128| static MIN_VALUE: number; + /core.js:148:23 + 148| static MIN_VALUE: number; ^^^^^^ [1] number_constants.js:8:8 8| var h: string = Number.MIN_VALUE; @@ -75,8 +75,8 @@ Cannot assign `Number.NaN` to `j` because number [1] is incompatible with string ^^^^^^^^^^ References: - /core.js:129:17 - 129| static NaN: number; + /core.js:149:17 + 149| static NaN: number; ^^^^^^ [1] number_constants.js:10:8 10| var j: string = Number.NaN; @@ -92,8 +92,8 @@ Cannot assign `Number.EPSILON` to `l` because number [1] is incompatible with st ^^^^^^^^^^^^^^ References: - /core.js:124:21 - 124| static EPSILON: number; + /core.js:144:21 + 144| static EPSILON: number; ^^^^^^ [1] number_constants.js:12:8 12| var l: string = Number.EPSILON; diff --git a/tests/object/.flowconfig b/tests/object/.flowconfig new file mode 100644 index 00000000000..de38d19537d --- /dev/null +++ b/tests/object/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false diff --git a/tests/object/obj.js b/tests/object/obj.js new file mode 100644 index 00000000000..3cb6d4a7885 --- /dev/null +++ b/tests/object/obj.js @@ -0,0 +1,45 @@ +//@flow + +type PropertyDescriptor = number; + +var x = {}; +Object.defineProperty(x, 'a', {get: () => 42}); +Object.defineProperty(x, 'b', {get: () => 20, value: 42}); +Object.defineProperty(x, 'c', {get: () => 'a', value: 42, set: (x: boolean) => {}}); +(x.b: number); +(x.a: number); +(x.c: number); + +declare var y: {|a: number, b: number|}; + +Object.defineProperty(y, 'a', {writable: false}); +Object.defineProperty(y, 'a', {value: "a"}); +Object.defineProperty(y, 'a', {writable: false}); +Object.defineProperty(y, 'b', {value: 42}) +Object.defineProperty(y, 'b', {value: "a"}) +Object.defineProperty(y, 'b', {value: "a"}) +Object.defineProperty(y, 'b', {value: "a"}) //targ arity mismatch +Object.defineProperty(y, 'c', {value: 42}); +Object.defineProperty(y, 'c', {writable:false}); + +Object.defineProperties(x, {d: {value: 42}, e: {writable:false}, f:{get: () => 42}}); +Object.defineProperties(y, {d: {value: 42}, a: {writable:false}, b:{get: () => "a"}}); +Object.defineProperties({}, {}); // targ arity mismatch + +var z = {}; +Object.create(z, {a: {get: () => 42}, b: {writable:true}}); +(z.a: number); +z.b = 10; +(z.b: string); + +Object.freeze<{}>({}); +Object.freeze<{a: number}>({a: 42, b: 42}); +Object.freeze<{a: number}>({a: 42, b: 42}).b; // b is hidden +Object.freeze<{c: number}>({a: 42, b: 42}); // c does not exist +Object.freeze({}); // targ arity misnatch + +function f(x: T): {x: T} { + var a = {} + Object.defineProperty(a, 'x', {get: () => x}); + return a; +} diff --git a/tests/object/object.exp b/tests/object/object.exp new file mode 100644 index 00000000000..24b70fd6aa2 --- /dev/null +++ b/tests/object/object.exp @@ -0,0 +1,212 @@ +Error ------------------------------------------------------------------------------------------------------- obj.js:8:1 + +Cannot call method `defineProperty` because: + - boolean [1] is incompatible with string [2] in the first argument of property `set`. + - boolean [1] is incompatible with number [3] in the first argument of property `set`. + + obj.js:8:1 + 8| Object.defineProperty(x, 'c', {get: () => 'a', value: 42, set: (x: boolean) => {}}); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + obj.js:8:68 + 8| Object.defineProperty(x, 'c', {get: () => 'a', value: 42, set: (x: boolean) => {}}); + ^^^^^^^ [1] + obj.js:8:43 + 8| Object.defineProperty(x, 'c', {get: () => 'a', value: 42, set: (x: boolean) => {}}); + ^^^ [2] + obj.js:8:55 + 8| Object.defineProperty(x, 'c', {get: () => 'a', value: 42, set: (x: boolean) => {}}); + ^^ [3] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:11:2 + +Cannot cast `x.c` to number because string [1] is incompatible with number [2]. + + obj.js:11:2 + 11| (x.c: number); + ^^^ + +References: + obj.js:8:43 + 8| Object.defineProperty(x, 'c', {get: () => 'a', value: 42, set: (x: boolean) => {}}); + ^^^ [1] + obj.js:11:7 + 11| (x.c: number); + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:16:1 + +Cannot call method `defineProperty` because string [1] is incompatible with number [2]. + + obj.js:16:1 + 16| Object.defineProperty(y, 'a', {value: "a"}); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + obj.js:16:39 + 16| Object.defineProperty(y, 'a', {value: "a"}); + ^^^ [1] + obj.js:13:21 + 13| declare var y: {|a: number, b: number|}; + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:19:1 + +Cannot call method `defineProperty` because string [1] is incompatible with number [2] in property `value`. + + obj.js:19:1 + 19| Object.defineProperty(y, 'b', {value: "a"}) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + obj.js:19:47 + 19| Object.defineProperty(y, 'b', {value: "a"}) + ^^^ [1] + obj.js:19:23 + 19| Object.defineProperty(y, 'b', {value: "a"}) + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:20:1 + +Cannot call method `defineProperty` because string [1] is incompatible with number [2]. + + obj.js:20:1 + 20| Object.defineProperty(y, 'b', {value: "a"}) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + obj.js:20:23 + 20| Object.defineProperty(y, 'b', {value: "a"}) + ^^^^^^ [1] + obj.js:13:32 + 13| declare var y: {|a: number, b: number|}; + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:21:1 + +Cannot call function without exactly 1 type argument. + + 21| Object.defineProperty(y, 'b', {value: "a"}) //targ arity mismatch + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + +Error ------------------------------------------------------------------------------------------------------ obj.js:22:1 + +Cannot call method `defineProperty` because property `c` is missing in object type [1]. + + obj.js:22:1 + 22| Object.defineProperty(y, 'c', {value: 42}); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + obj.js:13:16 + 13| declare var y: {|a: number, b: number|}; + ^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:23:1 + +Cannot call method `defineProperty` because property `c` is missing in object type [1]. + + obj.js:23:1 + 23| Object.defineProperty(y, 'c', {writable:false}); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + obj.js:13:16 + 13| declare var y: {|a: number, b: number|}; + ^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:26:1 + +Cannot call method `defineProperties` because: + - property `d` is missing in object type [1]. + - string [2] is incompatible with number [3]. + + obj.js:26:1 + 26| Object.defineProperties(y, {d: {value: 42}, a: {writable:false}, b:{get: () => "a"}}); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + obj.js:13:16 + 13| declare var y: {|a: number, b: number|}; + ^^^^^^^^^^^^^^^^^^^^^^^^ [1] + obj.js:26:80 + 26| Object.defineProperties(y, {d: {value: 42}, a: {writable:false}, b:{get: () => "a"}}); + ^^^ [2] + obj.js:13:32 + 13| declare var y: {|a: number, b: number|}; + ^^^^^^ [3] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:27:1 + +Cannot call non-polymorphic function with type arguments. + + 27| Object.defineProperties({}, {}); // targ arity mismatch + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + +Error ------------------------------------------------------------------------------------------------------ obj.js:33:2 + +Cannot cast `z.b` to string because number [1] is incompatible with string [2]. + + obj.js:33:2 + 33| (z.b: string); + ^^^ + +References: + obj.js:32:7 + 32| z.b = 10; + ^^ [1] + obj.js:33:7 + 33| (z.b: string); + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:37:1 + +Cannot get `Object.freeze<...>(...).b` because property `b` is missing in object type [1]. + + obj.js:37:1 + 37| Object.freeze<{a: number}>({a: 42, b: 42}).b; // b is hidden + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + obj.js:37:15 + 37| Object.freeze<{a: number}>({a: 42, b: 42}).b; // b is hidden + ^^^^^^^^^^^ [1] + + +Error ----------------------------------------------------------------------------------------------------- obj.js:38:28 + +Cannot call method `freeze` with object literal bound to `o` because property `c` is missing in frozen object +literal [1] but exists in object type [2]. + + obj.js:38:28 + 38| Object.freeze<{c: number}>({a: 42, b: 42}); // c does not exist + ^^^^^^^^^^^^^^ [1] + +References: + obj.js:38:15 + 38| Object.freeze<{c: number}>({a: 42, b: 42}); // c does not exist + ^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------------------ obj.js:39:1 + +Cannot call function without exactly 1 type argument. + + 39| Object.freeze({}); // targ arity misnatch + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + + +Found 16 errors diff --git a/tests/object_api/object_api.exp b/tests/object_api/object_api.exp index b37b8ad5730..b8b222eac65 100644 --- a/tests/object_api/object_api.exp +++ b/tests/object_api/object_api.exp @@ -12,24 +12,6 @@ References: ^^^^^^^^^^^^^^^^^^^ [1] -Error -------------------------------------------------------------------------------------------- object_assign.js:7:44 - -Cannot assign `Object.assign(...)` to `decl_export_` because property `bar` is missing in object literal [1] but exists -in object type [2]. - - object_assign.js:7:44 - 7| var decl_export_: { foo: any; bar: any } = Object.assign({}, export_); - ^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - object_assign.js:7:58 - 7| var decl_export_: { foo: any; bar: any } = Object.assign({}, export_); - ^^ [1] - object_assign.js:7:19 - 7| var decl_export_: { foo: any; bar: any } = Object.assign({}, export_); - ^^^^^^^^^^^^^^^^^^^^^^ [2] - - Error -------------------------------------------------------------------------------------------- object_create.js:12:2 Cannot cast object literal to `C` because object literal [1] is incompatible with `C` [2]. @@ -115,12 +97,9 @@ Cannot cast `Object.keys(...)` to array type because string [1] is incompatible object_keys.js:18:2 18| (Object.keys(any): Array); // error, Array - ^^^^^^^^^^^^^^^^ + ^^^^^^^^^^^^^^^^ [1] References: - object_keys.js:17:10 - 17| var any: Object = {}; - ^^^^^^ [1] object_keys.js:18:26 18| (Object.keys(any): Array); // error, Array ^^^^^^ [2] @@ -198,8 +177,8 @@ Cannot call method `doesNotExist` because property `doesNotExist` is missing in ^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:38:15 - 38| declare class Object { + /core.js:44:15 + 44| declare class Object { ^^^^^^ [1] @@ -220,8 +199,8 @@ Cannot assign `x.toString` to `xToString` because function type [1] is incompati ^^^^^^^^^^ References: - /core.js:67:5 - 67| toString(): string; + /core.js:84:5 + 84| toString(): string; ^^^^^^^^^^^^^^^^^^ [1] object_prototype.js:38:17 38| var xToString : number = x.toString; // error @@ -237,8 +216,8 @@ Cannot assign `x.toString` to `xToString2` because string [1] is incompatible wi ^^^^^^^^^^ References: - /core.js:67:17 - 67| toString(): string; + /core.js:84:17 + 84| toString(): string; ^^^^^^ [1] object_prototype.js:39:24 39| var xToString2 : () => number = x.toString; // error @@ -254,8 +233,8 @@ Cannot assign `y.toString` to `yToString` because function type [1] is incompati ^^^^^^^^^^ References: - /core.js:67:5 - 67| toString(): string; + /core.js:84:5 + 84| toString(): string; ^^^^^^^^^^^^^^^^^^ [1] object_prototype.js:43:17 43| var yToString : number = y.toString; // error @@ -285,8 +264,8 @@ Cannot call `123.toString` with `'foo'` bound to `radix` because string [1] is i ^^^^^ [1] References: - /core.js:144:22 - 144| toString(radix?: number): string; + /core.js:164:22 + 164| toString(radix?: number): string; ^^^^^^ [2] @@ -299,8 +278,8 @@ Cannot call `123.toString` with `null` bound to `radix` because null [1] is inco ^^^^ [1] References: - /core.js:144:22 - 144| toString(radix?: number): string; + /core.js:164:22 + 164| toString(radix?: number): string; ^^^^^^ [2] @@ -321,9 +300,9 @@ Cannot assign `x.hasOwnProperty` to `xHasOwnProperty` because function type [1] ^^^^^^^^^^^^^^^^ References: - /core.js:63:5 - 63| hasOwnProperty(prop: any): boolean; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /core.js:80:5 + 80| hasOwnProperty(prop: mixed): boolean; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] object_prototype.js:71:23 71| var xHasOwnProperty : number = x.hasOwnProperty; // error ^^^^^^ [2] @@ -339,9 +318,9 @@ value. ^^^^^^^^^^^^^^^^ References: - /core.js:63:32 - 63| hasOwnProperty(prop: any): boolean; - ^^^^^^^ [1] + /core.js:80:34 + 80| hasOwnProperty(prop: mixed): boolean; + ^^^^^^^ [1] object_prototype.js:72:42 72| var xHasOwnProperty2 : (prop: string) => number = x.hasOwnProperty; // error ^^^^^^ [2] @@ -356,9 +335,9 @@ Cannot assign `y.hasOwnProperty` to `yHasOwnProperty` because function type [1] ^^^^^^^^^^^^^^^^ References: - /core.js:63:5 - 63| hasOwnProperty(prop: any): boolean; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /core.js:80:5 + 80| hasOwnProperty(prop: mixed): boolean; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] object_prototype.js:76:23 76| var yHasOwnProperty : number = y.hasOwnProperty; // error ^^^^^^ [2] @@ -382,9 +361,9 @@ number [2]. ^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:65:5 - 65| propertyIsEnumerable(prop: any): boolean; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /core.js:82:5 + 82| propertyIsEnumerable(prop: mixed): boolean; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] object_prototype.js:96:29 96| var xPropertyIsEnumerable : number = x.propertyIsEnumerable; // error ^^^^^^ [2] @@ -400,9 +379,9 @@ in the return value. ^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:65:38 - 65| propertyIsEnumerable(prop: any): boolean; - ^^^^^^^ [1] + /core.js:82:40 + 82| propertyIsEnumerable(prop: mixed): boolean; + ^^^^^^^ [1] object_prototype.js:97:48 97| var xPropertyIsEnumerable2 : (prop: string) => number = ^^^^^^ [2] @@ -418,9 +397,9 @@ number [2]. ^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:65:5 - 65| propertyIsEnumerable(prop: any): boolean; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /core.js:82:5 + 82| propertyIsEnumerable(prop: mixed): boolean; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] object_prototype.js:102:29 102| var yPropertyIsEnumerable : number = y.propertyIsEnumerable; // error ^^^^^^ [2] @@ -443,8 +422,8 @@ Cannot assign `x.valueOf` to `xValueOf` because function type [1] is incompatibl ^^^^^^^^^ References: - /core.js:396:5 - 396| valueOf(): number; + /core.js:433:5 + 433| valueOf(): number; ^^^^^^^^^^^^^^^^^ [1] object_prototype.js:122:16 122| var xValueOf : number = x.valueOf; // error @@ -460,8 +439,8 @@ Cannot assign `y.valueOf` to `yValueOf` because function type [1] is incompatibl ^^^^^^^^^ References: - /core.js:68:5 - 68| valueOf(): mixed; + /core.js:85:5 + 85| valueOf(): mixed; ^^^^^^^^^^^^^^^^ [1] object_prototype.js:126:16 126| var yValueOf : number = y.valueOf; // error @@ -485,8 +464,8 @@ Cannot assign `x.toLocaleString` to `xToLocaleString` because function type [1] ^^^^^^^^^^^^^^^^ References: - /core.js:392:5 - 392| toLocaleString(locales?: string | Array, options?: Intl$DateTimeFormatOptions): string; + /core.js:429:5 + 429| toLocaleString(locales?: string | Array, options?: Intl$DateTimeFormatOptions): string; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] object_prototype.js:150:23 150| var xToLocaleString : number = x.toLocaleString; // error @@ -503,8 +482,8 @@ value. ^^^^^^^^^^^^^^^^ References: - /core.js:392:93 - 392| toLocaleString(locales?: string | Array, options?: Intl$DateTimeFormatOptions): string; + /core.js:429:93 + 429| toLocaleString(locales?: string | Array, options?: Intl$DateTimeFormatOptions): string; ^^^^^^ [1] object_prototype.js:151:30 151| var xToLocaleString2 : () => number = x.toLocaleString; // error @@ -520,8 +499,8 @@ Cannot assign `y.toLocaleString` to `yToLocaleString` because function type [1] ^^^^^^^^^^^^^^^^ References: - /core.js:66:5 - 66| toLocaleString(): string; + /core.js:83:5 + 83| toLocaleString(): string; ^^^^^^^^^^^^^^^^^^^^^^^^ [1] object_prototype.js:155:23 155| var yToLocaleString : number = y.toLocaleString; // error @@ -537,8 +516,8 @@ Cannot cast `o1_proto.toString` to empty because function type [1] is incompatib ^^^^^^^^^^^^^^^^^ References: - /core.js:67:5 - 67| toString(): string; + /core.js:84:5 + 84| toString(): string; ^^^^^^^^^^^^^^^^^^ [1] proto.js:3:21 3| (o1_proto.toString: empty); // error: function ~> empty @@ -591,4 +570,4 @@ References: -Found 39 errors +Found 38 errors diff --git a/tests/object_api/object_create.js b/tests/object_api/object_create.js index e15effcb588..b31e4b41f9e 100644 --- a/tests/object_api/object_create.js +++ b/tests/object_api/object_create.js @@ -16,4 +16,4 @@ type O = { foo: string; } declare var o: O; (o: C); -(Object.create(({}: Object)): C); // OK: AnyObjT might be C, who knows +(Object.create(({}: Object)): C); // OK: AnyT might be C, who knows diff --git a/tests/object_assign/B.js b/tests/object_assign/B.js index 01cf2921a7a..49a64942da8 100644 --- a/tests/object_assign/B.js +++ b/tests/object_assign/B.js @@ -7,6 +7,6 @@ var A = require('./A.js'); var good: number = A.Good.foo(); // string ~> number var f = A.Bad.foo; // Property access is fine -var bad_: number = f(); // errors: string ~> number, global object incompatible with `this` +var bad_: number = f(); // error: string ~> number var bad: number = A.Bad.foo(); // error: string, number (but `this` types are compatible) diff --git a/tests/object_assign/apply.js b/tests/object_assign/apply.js index 6583510c8ad..edea151f561 100644 --- a/tests/object_assign/apply.js +++ b/tests/object_assign/apply.js @@ -1,3 +1,15 @@ // @flow (Object.assign.apply(null, [{}, {a: 1}, {b: 'foo'}]): {a: number, b: string}); +(Object.assign.apply(null, [{}, {a: 1}, {b: 2}]): {a: number, b: string}); // error +(Object.assign.apply({}, {a: 1}, {b: 'foo'}): {a: number, b: string}); // error + +(Object.assign.call({}, [{a: 1}, {b: 'foo'}]): {a: number, b: string}); // error +(Object.assign.call({}, {a: 1}, {b: 'foo'}): {a: number, b: string}); +(Object.assign.call({}, {a: 1}, {b: 2}): {a: number, b: string}); // error + +(Object.assign.length : number); +(Object.assign.length : string); // error + +(Object.assign.name : number); // error +(Object.assign.name : string); diff --git a/tests/object_assign/object_assign.exp b/tests/object_assign/object_assign.exp index 3400aa8c0a1..b4f7b1a2eb8 100644 --- a/tests/object_assign/object_assign.exp +++ b/tests/object_assign/object_assign.exp @@ -54,7 +54,7 @@ Error -------------------------------------------------------------------------- Cannot assign `f()` to `bad_` because string [1] is incompatible with number [2]. B.js:10:20 - 10| var bad_: number = f(); // errors: string ~> number, global object incompatible with `this` + 10| var bad_: number = f(); // error: string ~> number ^^^ References: @@ -62,24 +62,10 @@ References: 9| foo: function(): string { return 'hi'; } ^^^^^^ [1] B.js:10:11 - 10| var bad_: number = f(); // errors: string ~> number, global object incompatible with `this` + 10| var bad_: number = f(); // error: string ~> number ^^^^^^ [2] -Error ------------------------------------------------------------------------------------------------------- B.js:10:20 - -Cannot call `f` because property `foo` is missing in global object [1] but exists in object literal [2]. - - B.js:10:20 - 10| var bad_: number = f(); // errors: string ~> number, global object incompatible with `this` - ^^^ [1] - -References: - A.js:8:25 - 8| var Bad = Object.assign({}, EventEmitter.prototype, { - ^^ [2] - - Error ------------------------------------------------------------------------------------------------------- B.js:12:19 Cannot assign `A.Bad.foo()` to `bad` because string [1] is incompatible with number [2]. @@ -97,9 +83,106 @@ References: ^^^^^^ [2] +Error ----------------------------------------------------------------------------------------------------- apply.js:4:2 + +Cannot cast `Object.assign.apply(...)` to object type because number [1] is incompatible with string [2] in property +`b`. + + apply.js:4:2 + 4| (Object.assign.apply(null, [{}, {a: 1}, {b: 2}]): {a: number, b: string}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + apply.js:4:45 + 4| (Object.assign.apply(null, [{}, {a: 1}, {b: 2}]): {a: number, b: string}); // error + ^ [1] + apply.js:4:66 + 4| (Object.assign.apply(null, [{}, {a: 1}, {b: 2}]): {a: number, b: string}); // error + ^^^^^^ [2] + + +Error ----------------------------------------------------------------------------------------------------- apply.js:5:2 + +Cannot call `Object.assign.apply` because no more than 2 arguments are expected by function type [1]. + + apply.js:5:2 + 5| (Object.assign.apply({}, {a: 1}, {b: 'foo'}): {a: number, b: string}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /core.js:126:18 + 126| proto apply: Function$Prototype$Apply; // (thisArg: any, argArray?: any) => any + ^^^^^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ----------------------------------------------------------------------------------------------------- apply.js:7:2 + +Incorrect arguments passed to call of method `call` because array literal [1] is not an object. + + apply.js:7:2 + 7| (Object.assign.call({}, [{a: 1}, {b: 'foo'}]): {a: number, b: string}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + apply.js:7:25 + 7| (Object.assign.call({}, [{a: 1}, {b: 'foo'}]): {a: number, b: string}); // error + ^^^^^^^^^^^^^^^^^^^^ [1] + + +Error ----------------------------------------------------------------------------------------------------- apply.js:9:2 + +Cannot cast `Object.assign.call(...)` to object type because number [1] is incompatible with string [2] in property `b`. + + apply.js:9:2 + 9| (Object.assign.call({}, {a: 1}, {b: 2}): {a: number, b: string}); // error + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + apply.js:9:37 + 9| (Object.assign.call({}, {a: 1}, {b: 2}): {a: number, b: string}); // error + ^ [1] + apply.js:9:57 + 9| (Object.assign.call({}, {a: 1}, {b: 2}): {a: number, b: string}); // error + ^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- apply.js:12:2 + +Cannot cast `Object.assign.length` to string because number [1] is incompatible with string [2]. + + apply.js:12:2 + 12| (Object.assign.length : string); // error + ^^^^^^^^^^^^^^^^^^^^ + +References: + /core.js:132:13 + 132| length: number; + ^^^^^^ [1] + apply.js:12:25 + 12| (Object.assign.length : string); // error + ^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- apply.js:14:2 + +Cannot cast `Object.assign.name` to number because string [1] is incompatible with number [2]. + + apply.js:14:2 + 14| (Object.assign.name : number); // error + ^^^^^^^^^^^^^^^^^^ + +References: + /core.js:133:11 + 133| name: string; + ^^^^^^ [1] + apply.js:14:23 + 14| (Object.assign.name : number); // error + ^^^^^^ [2] + + Error ----------------------------------------------------------------------------------------------- non_objects.js:3:1 -Property `a` is missing in string [1]. +Incorrect arguments passed to call of method `assign` because property `a` is missing in string [1]. non_objects.js:3:1 3| Object.assign("123", {a: "foo"}); @@ -113,7 +196,7 @@ References: Error ----------------------------------------------------------------------------------------------- non_objects.js:4:1 -Property `a` is missing in number [1]. +Incorrect arguments passed to call of method `assign` because property `a` is missing in number [1]. non_objects.js:4:1 4| Object.assign(123, {a: "foo"}); @@ -127,7 +210,7 @@ References: Error ----------------------------------------------------------------------------------------------- non_objects.js:5:1 -number [1] is not an object. +Incorrect arguments passed to call of method `assign` because number [1] is not an object. non_objects.js:5:1 5| Object.assign({a: "foo"}, 123); @@ -139,6 +222,23 @@ References: ^^^ [1] +Error ---------------------------------------------------------------------------------------------------- scope.js:33:4 + +Cannot cast `obj.attr` to number because string [1] is incompatible with number [2]. + + scope.js:33:4 + 33| (obj.attr: number); + ^^^^^^^^ + +References: + scope.js:31:16 + 31| obj.attr = "hello"; + ^^^^^^^ [1] + scope.js:33:14 + 33| (obj.attr: number); + ^^^^^^ [2] + + Error ---------------------------------------------------------------------------------------------------- spread.js:7:2 Cannot cast `Object.assign(...)` to object type because string [1] is incompatible with number [2] in property `foo`. @@ -214,4 +314,4 @@ References: -Found 13 errors +Found 19 errors diff --git a/tests/object_assign/scope.js b/tests/object_assign/scope.js new file mode 100644 index 00000000000..6c5d1b51b73 --- /dev/null +++ b/tests/object_assign/scope.js @@ -0,0 +1,34 @@ +//@flow +type Obj = { attr: number | string } + +function f(obj: Obj, b: boolean) { + obj.attr = 42; + if (b) { + obj.attr = "hello"; + } else { + obj.attr = "hello"; + } +} + +function g(obj: Obj, b: boolean) { + obj.attr = 42; + if (b) { + obj.attr = "hello"; + } +} + + +function h(obj: Obj, b: boolean) { + obj.attr = 42; + if (obj.attr) { + obj.attr = "hello"; + } +} + +function i(obj: Obj, b: boolean) { + obj.attr = 42; + if (b) { + obj.attr = "hello"; + } + (obj.attr: number); +} diff --git a/tests/object_freeze/object_freeze.exp b/tests/object_freeze/object_freeze.exp index 86696718b16..9c8809eb4b4 100644 --- a/tests/object_freeze/object_freeze.exp +++ b/tests/object_freeze/object_freeze.exp @@ -8,7 +8,7 @@ Cannot assign `'23456'` to `foo.bar` because property `bar` is not writable. Error --------------------------------------------------------------------------------------------- object_freeze.js:6:1 -Property `bar` is not writable. +Incorrect arguments passed to call of method `assign` because property `bar` is not writable. 6| Object.assign(foo, {bar: '12345'}); // error ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/object_freeze/object_freeze.js b/tests/object_freeze/object_freeze.js index 3ae97e5530c..5a828c42271 100644 --- a/tests/object_freeze/object_freeze.js +++ b/tests/object_freeze/object_freeze.js @@ -23,5 +23,5 @@ function f(x: Object) { (Object.freeze({...x}): Object); // ok let y = Object.freeze({...x}); - y.foo = "bar"; // there is no frozen form of AnyObjT so this is "allowed" + y.foo = "bar"; // there is no frozen form of AnyT so this is "allowed" } diff --git a/tests/object_freeze2/.flowconfig b/tests/object_freeze2/.flowconfig new file mode 100644 index 00000000000..b5cff59a8d2 --- /dev/null +++ b/tests/object_freeze2/.flowconfig @@ -0,0 +1,3 @@ +[options] +no_flowlib=false +experimental.types_first=true diff --git a/tests/object_freeze2/downstream.js b/tests/object_freeze2/downstream.js new file mode 100644 index 00000000000..19b6bf0039e --- /dev/null +++ b/tests/object_freeze2/downstream.js @@ -0,0 +1,8 @@ +// @flow + +import typeof T from './nested_frozen_object'; +declare var o: T; + +(o.a: number); + +module.exports = o; diff --git a/tests/object_freeze2/nested_frozen_object.js b/tests/object_freeze2/nested_frozen_object.js new file mode 100644 index 00000000000..797b1b42718 --- /dev/null +++ b/tests/object_freeze2/nested_frozen_object.js @@ -0,0 +1,7 @@ +// @flow + +module.exports = Object.freeze({ + a: Object.freeze({ + b: 'c', + }), +}); diff --git a/tests/object_freeze2/object_freeze2.exp b/tests/object_freeze2/object_freeze2.exp new file mode 100644 index 00000000000..78d87c97a61 --- /dev/null +++ b/tests/object_freeze2/object_freeze2.exp @@ -0,0 +1,22 @@ +Error ------------------------------------------------------------------------------------------------ downstream.js:6:2 + +Cannot cast `o.a` to number because frozen object type [1] is incompatible with number [2]. + + downstream.js:6:2 + 6| (o.a: number); + ^^^ + +References: + nested_frozen_object.js:4:20 + v + 4| a: Object.freeze({ + 5| b: 'c', + 6| }), + ^ [1] + downstream.js:6:7 + 6| (o.a: number); + ^^^^^^ [2] + + + +Found 1 error diff --git a/tests/object_is/object_is.exp b/tests/object_is/object_is.exp index 9437c7d0ff4..29e0c2380e4 100644 --- a/tests/object_is/object_is.exp +++ b/tests/object_is/object_is.exp @@ -7,9 +7,9 @@ Cannot assign `Object.is(...)` to `b` because boolean [1] is incompatible with s ^^^^^^^^^^^^^^^^^^^ References: - /core.js:54:32 - 54| static is(a: any, b: any): boolean; - ^^^^^^^ [1] + /core.js:69:31 + 69| static is(a: T, b: T): boolean; + ^^^^^^^ [1] object_is.js:20:8 20| var b: string = Object.is('a', 'a'); ^^^^^^ [2] @@ -24,9 +24,9 @@ Cannot call method `is` because no more than 2 arguments are expected by functio ^^^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:54:5 - 54| static is(a: any, b: any): boolean; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /core.js:69:5 + 69| static is(a: T, b: T): boolean; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] diff --git a/tests/object_prelude/.flowconfig b/tests/object_prelude/.flowconfig new file mode 100644 index 00000000000..8108079446a --- /dev/null +++ b/tests/object_prelude/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=true diff --git a/tests/object_prelude/obj.js b/tests/object_prelude/obj.js new file mode 100644 index 00000000000..27587355591 --- /dev/null +++ b/tests/object_prelude/obj.js @@ -0,0 +1,9 @@ +//@flow + +var x = {}; +Object.defineProperty(x, 'a', {get: () => 42}); +Object.defineProperty(x, 'b', {value: "a"}) +Object.defineProperties(x, {d: {value: 42}, e: {writable:false}, f:{get: () => 42}}); + +var z = {}; +Object.create(z, {a: {get: () => 42}, b: {writable:true}}); diff --git a/tests/object_prelude/object_prelude.exp b/tests/object_prelude/object_prelude.exp new file mode 100644 index 00000000000..2829d581f51 --- /dev/null +++ b/tests/object_prelude/object_prelude.exp @@ -0,0 +1 @@ +Found 0 errors diff --git a/tests/objects/objects.exp b/tests/objects/objects.exp index 4973f74e266..f38f4826df5 100644 --- a/tests/objects/objects.exp +++ b/tests/objects/objects.exp @@ -128,9 +128,9 @@ Cannot cast `Object(...)` to `Number` because object type [1] is incompatible wi ^^^^^^^^^^^^^^^^^ References: - /core.js:39:24 - 39| static (o: ?void): {[key: any]: any}; - ^^^^^^^^^^^^^^^^^ [1] + /core.js:45:24 + 45| static (o: ?void): { [key: any]: any, ... }; + ^^^^^^^^^^^^^^^^^^^^^^^^ [1] conversion.js:10:21 10| (Object(undefined): Number); // error ^^^^^^ [2] @@ -145,8 +145,8 @@ Cannot call `z.charAt` because property `charAt` is missing in `Number` [1]. ^^^^^^^^^^^ References: - /core.js:41:25 - 41| static (o: number): Number; + /core.js:47:25 + 47| static (o: number): Number; ^^^^^^ [1] @@ -277,9 +277,9 @@ Cannot cast `y['hasOwnProperty']` to string because function type [1] is incompa ^^^^^^^^^^^^^^^^^^^ References: - /core.js:63:5 - 63| hasOwnProperty(prop: any): boolean; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /core.js:80:5 + 80| hasOwnProperty(prop: mixed): boolean; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] objects.js:18:23 18| (y['hasOwnProperty']: string); // error, prototype method is not a string ^^^^^^ [2] diff --git a/tests/objmap/arity.js b/tests/objmap/arity.js new file mode 100644 index 00000000000..28ab0b4e22c --- /dev/null +++ b/tests/objmap/arity.js @@ -0,0 +1,32 @@ +// @flow + +function g(o: T): $ObjMap { + return o; +} + +function f1(o: Columns): Columns { + return g(o); +} + +function f2(o: Columns): Columns { + return g(o); +} + +function f3(o: T): T { + return g(o); +} + +function f4(o: T): T { + return g(o); +} + +function h(o: Columns): $ObjMap { + return o; +} + +type Columns = {[string]: number}; + +declare function makeEditedColumn( + a: number, + b: string, +): number; diff --git a/tests/objmap/arity2.js b/tests/objmap/arity2.js new file mode 100644 index 00000000000..7c9bbf4a14b --- /dev/null +++ b/tests/objmap/arity2.js @@ -0,0 +1,33 @@ +// @flow + +function g(o: T): $ObjMapi { + return o; +} + +function f1(o: Columns): Columns { + return g(o); +} + +function f2(o: Columns): Columns { + return g(o); +} + +function f3(o: T): T { + return g(o); +} + +function f4(o: T): T { + return g(o); +} + +function h(o: Columns): $ObjMapi { + return o; +} + +type Columns = {[string]: number}; + +declare function makeEditedColumn( + a: string, + b: number, + c: boolean, +): number; diff --git a/tests/objmap/arity3.js b/tests/objmap/arity3.js new file mode 100644 index 00000000000..de577d6eb08 --- /dev/null +++ b/tests/objmap/arity3.js @@ -0,0 +1,24 @@ +// @flow + +function g>(o: T): $TupleMap { + return o; +} + +function f1(o: Columns): Columns { + return g(o); +} + +function f2(o: Columns): Columns { + return g(o); +} + +function h(o: Columns): $TupleMap { + return o; +} + +type Columns = Array; + +declare function makeEditedColumn( + a: number, + b: string, +): number; diff --git a/tests/objmap/arity4.js b/tests/objmap/arity4.js new file mode 100644 index 00000000000..169e0c4d668 --- /dev/null +++ b/tests/objmap/arity4.js @@ -0,0 +1,24 @@ +// @flow + +function g(o: T): $Call { + return o; +} + +function f1(o: Columns): Columns { + return g(o); +} + +function f2(o: Columns): Columns { + return g(o); +} + +function h(o: Columns): $Call { + return o; +} + +type Columns = number; + +declare function makeEditedColumn( + a: number, + b: string, +): number; diff --git a/tests/objmap/identity.js b/tests/objmap/identity.js index 9e6a7fb406a..1e9cd409cbf 100644 --- a/tests/objmap/identity.js +++ b/tests/objmap/identity.js @@ -26,7 +26,7 @@ declare var foo: Foo; b: string, // Error: string ~> number }): Foo); -({}: Foo); // Error: `a` and `b` are not defined. +(({}: {}): Foo); // Error: `a` and `b` are not defined. ((any: {}): Foo); // Error: `a` and `b` are not defined. diff --git a/tests/objmap/objmap.exp b/tests/objmap/objmap.exp index 7ebcefde35c..67cb7de3c95 100644 --- a/tests/objmap/objmap.exp +++ b/tests/objmap/objmap.exp @@ -1,3 +1,457 @@ +Error ---------------------------------------------------------------------------------------------------- arity.js:8:10 + +Cannot call `g` because `$ObjMap` [1] expects the provided function type to take only one argument, the value type +number [2], but function type [3] takes more than one argument. See https://flow.org/en/docs/types/utilities/#toc-objmap +for documentation. + + arity.js:8:10 + 8| return g(o); + ^^^^ + +References: + arity.js:3:26 + 3| function g(o: T): $ObjMap { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: number, + 31| b: string, + 32| ): number; + --------^ [3] + + +Error --------------------------------------------------------------------------------------------------- arity.js:12:10 + +Cannot call `g` because `$ObjMap` [1] expects the provided function type to take only one argument, the value type +number [2], but function type [3] takes more than one argument. See https://flow.org/en/docs/types/utilities/#toc-objmap +for documentation. + + arity.js:12:10 + 12| return g(o); + ^^^^^^^^^^^^^ + +References: + arity.js:3:26 + 3| function g(o: T): $ObjMap { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: number, + 31| b: string, + 32| ): number; + --------^ [3] + + +Error --------------------------------------------------------------------------------------------------- arity.js:16:10 + +Cannot call `g` because `$ObjMap` [1] expects the provided function type to take only one argument, the value type +number [2], but function type [3] takes more than one argument. See https://flow.org/en/docs/types/utilities/#toc-objmap +for documentation. + + arity.js:16:10 + 16| return g(o); + ^^^^ + +References: + arity.js:3:26 + 3| function g(o: T): $ObjMap { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: number, + 31| b: string, + 32| ): number; + --------^ [3] + + +Error --------------------------------------------------------------------------------------------------- arity.js:20:10 + +Cannot call `g` because `$ObjMap` [1] expects the provided function type to take only one argument, the value type +number [2], but function type [3] takes more than one argument. See https://flow.org/en/docs/types/utilities/#toc-objmap +for documentation. + + arity.js:20:10 + 20| return g(o); + ^^^^^^^ + +References: + arity.js:3:26 + 3| function g(o: T): $ObjMap { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: number, + 31| b: string, + 32| ): number; + --------^ [3] + + +Error --------------------------------------------------------------------------------------------------- arity.js:23:25 + +Cannot instantiate `$ObjMap` because `$ObjMap` [1] expects the provided function type to take only one argument, the +value type number [2], but function type [3] takes more than one argument. See +https://flow.org/en/docs/types/utilities/#toc-objmap for documentation. + + arity.js:23:25 + 23| function h(o: Columns): $ObjMap { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + arity.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: number, + 31| b: string, + 32| ): number; + --------^ [3] + + +Error --------------------------------------------------------------------------------------------------- arity2.js:8:10 + +Cannot call `g` because `$ObjMapi` [1] expects the provided function type to take only two arguments, the key type +string [2] and the value type number [3], but function type [4] takes more than two arguments. See +https://flow.org/en/docs/types/utilities/#toc-objmapi for documentation. + + arity2.js:8:10 + 8| return g(o); + ^^^^ + +References: + arity2.js:3:26 + 3| function g(o: T): $ObjMapi { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity2.js:27:18 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity2.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [3] + arity2.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: string, + 31| b: number, + 32| c: boolean, + 33| ): number; + --------^ [4] + + +Error -------------------------------------------------------------------------------------------------- arity2.js:12:10 + +Cannot call `g` because `$ObjMapi` [1] expects the provided function type to take only two arguments, the key type +string [2] and the value type number [3], but function type [4] takes more than two arguments. See +https://flow.org/en/docs/types/utilities/#toc-objmapi for documentation. + + arity2.js:12:10 + 12| return g(o); + ^^^^^^^^^^^^^ + +References: + arity2.js:3:26 + 3| function g(o: T): $ObjMapi { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity2.js:27:18 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity2.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [3] + arity2.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: string, + 31| b: number, + 32| c: boolean, + 33| ): number; + --------^ [4] + + +Error -------------------------------------------------------------------------------------------------- arity2.js:16:10 + +Cannot call `g` because `$ObjMapi` [1] expects the provided function type to take only two arguments, the key type +string [2] and the value type number [3], but function type [4] takes more than two arguments. See +https://flow.org/en/docs/types/utilities/#toc-objmapi for documentation. + + arity2.js:16:10 + 16| return g(o); + ^^^^ + +References: + arity2.js:3:26 + 3| function g(o: T): $ObjMapi { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity2.js:27:18 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity2.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [3] + arity2.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: string, + 31| b: number, + 32| c: boolean, + 33| ): number; + --------^ [4] + + +Error -------------------------------------------------------------------------------------------------- arity2.js:20:10 + +Cannot call `g` because `$ObjMapi` [1] expects the provided function type to take only two arguments, the key type +string [2] and the value type number [3], but function type [4] takes more than two arguments. See +https://flow.org/en/docs/types/utilities/#toc-objmapi for documentation. + + arity2.js:20:10 + 20| return g(o); + ^^^^^^^ + +References: + arity2.js:3:26 + 3| function g(o: T): $ObjMapi { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity2.js:27:18 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity2.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [3] + arity2.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: string, + 31| b: number, + 32| c: boolean, + 33| ): number; + --------^ [4] + + +Error -------------------------------------------------------------------------------------------------- arity2.js:23:25 + +Cannot instantiate `$ObjMapi` because `$ObjMapi` [1] expects the provided function type to take only two arguments, the +key type string [2] and the value type number [3], but function type [4] takes more than two arguments. See +https://flow.org/en/docs/types/utilities/#toc-objmapi for documentation. + + arity2.js:23:25 + 23| function h(o: Columns): $ObjMapi { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + arity2.js:27:18 + 27| type Columns = {[string]: number}; + ^^^^^^ [2] + arity2.js:27:27 + 27| type Columns = {[string]: number}; + ^^^^^^ [3] + arity2.js:29:34 + v + 29| declare function makeEditedColumn( + 30| a: string, + 31| b: number, + 32| c: boolean, + 33| ): number; + --------^ [4] + + +Error --------------------------------------------------------------------------------------------------- arity3.js:3:46 + +Cannot instantiate `$TupleMap` because `$TupleMap` [1] expects the provided function type to take only one argument, the +value type number [2], but function type [3] takes more than one argument. See +https://flow.org/en/docs/types/utilities/#toc-tuplemap for documentation. + + arity3.js:3:46 + 3| function g>(o: T): $TupleMap { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + arity3.js:3:30 + 3| function g>(o: T): $TupleMap { + ^^^^^^ [2] + arity3.js:21:34 + v + 21| declare function makeEditedColumn( + 22| a: number, + 23| b: string, + 24| ): number; + --------^ [3] + + +Error --------------------------------------------------------------------------------------------------- arity3.js:8:10 + +Cannot call `g` because `$TupleMap` [1] expects the provided function type to take only one argument, the value type +number [2], but function type [3] takes more than one argument. See +https://flow.org/en/docs/types/utilities/#toc-tuplemap for documentation. + + arity3.js:8:10 + 8| return g(o); + ^^^^ + +References: + arity3.js:3:46 + 3| function g>(o: T): $TupleMap { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity3.js:3:30 + 3| function g>(o: T): $TupleMap { + ^^^^^^ [2] + arity3.js:21:34 + v + 21| declare function makeEditedColumn( + 22| a: number, + 23| b: string, + 24| ): number; + --------^ [3] + + +Error -------------------------------------------------------------------------------------------------- arity3.js:12:10 + +Cannot call `g` because `$TupleMap` [1] expects the provided function type to take only one argument, the value type +number [2], but function type [3] takes more than one argument. See +https://flow.org/en/docs/types/utilities/#toc-tuplemap for documentation. + + arity3.js:12:10 + 12| return g(o); + ^^^^^^^^^^^^^ + +References: + arity3.js:3:46 + 3| function g>(o: T): $TupleMap { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity3.js:19:22 + 19| type Columns = Array; + ^^^^^^ [2] + arity3.js:21:34 + v + 21| declare function makeEditedColumn( + 22| a: number, + 23| b: string, + 24| ): number; + --------^ [3] + + +Error -------------------------------------------------------------------------------------------------- arity3.js:15:25 + +Cannot instantiate `$TupleMap` because `$TupleMap` [1] expects the provided function type to take only one argument, the +value type number [2], but function type [3] takes more than one argument. See +https://flow.org/en/docs/types/utilities/#toc-tuplemap for documentation. + + arity3.js:15:25 + 15| function h(o: Columns): $TupleMap { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + arity3.js:19:22 + 19| type Columns = Array; + ^^^^^^ [2] + arity3.js:21:34 + v + 21| declare function makeEditedColumn( + 22| a: number, + 23| b: string, + 24| ): number; + --------^ [3] + + +Error --------------------------------------------------------------------------------------------------- arity4.js:3:30 + +Cannot instantiate `$Call` because `$Call` [1] passes only one argument to the provided function type, but function +type [2] expects more than one argument. See https://flow.org/en/docs/types/utilities/#toc-call for documentation. + + arity4.js:3:30 + 3| function g(o: T): $Call { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + arity4.js:21:34 + v + 21| declare function makeEditedColumn( + 22| a: number, + 23| b: string, + 24| ): number; + --------^ [2] + + +Error --------------------------------------------------------------------------------------------------- arity4.js:8:10 + +Cannot call `g` because `$Call` [1] passes only one argument to the provided function type, but function type [2] +expects more than one argument. See https://flow.org/en/docs/types/utilities/#toc-call for documentation. + + arity4.js:8:10 + 8| return g(o); + ^^^^ + +References: + arity4.js:3:30 + 3| function g(o: T): $Call { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity4.js:21:34 + v + 21| declare function makeEditedColumn( + 22| a: number, + 23| b: string, + 24| ): number; + --------^ [2] + + +Error -------------------------------------------------------------------------------------------------- arity4.js:12:10 + +Cannot call `g` because `$Call` [1] passes only one argument to the provided function type, but function type [2] +expects more than one argument. See https://flow.org/en/docs/types/utilities/#toc-call for documentation. + + arity4.js:12:10 + 12| return g(o); + ^^^^^^^^^^^^^ + +References: + arity4.js:3:30 + 3| function g(o: T): $Call { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + arity4.js:21:34 + v + 21| declare function makeEditedColumn( + 22| a: number, + 23| b: string, + 24| ): number; + --------^ [2] + + +Error -------------------------------------------------------------------------------------------------- arity4.js:15:25 + +Cannot instantiate `$Call` because `$Call` [1] passes only one argument to the provided function type, but function +type [2] expects more than one argument. See https://flow.org/en/docs/types/utilities/#toc-call for documentation. + + arity4.js:15:25 + 15| function h(o: Columns): $Call { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + +References: + arity4.js:21:34 + v + 21| declare function makeEditedColumn( + 22| a: number, + 23| b: string, + 24| ): number; + --------^ [2] + + Error ------------------------------------------------------------------------------------------------- identity.js:19:2 Cannot cast object literal to `Foo` because: @@ -56,20 +510,23 @@ References: ^^^^^^ [4] -Error ------------------------------------------------------------------------------------------------- identity.js:29:2 +Error ------------------------------------------------------------------------------------------------- identity.js:29:3 Cannot cast object literal to `Foo` because: - - property `a` is missing in object literal [1] but exists in object type [2]. - - property `b` is missing in object literal [1] but exists in object type [2]. + - property `a` is missing in object type [1] but exists in object type [2]. + - property `b` is missing in object type [1] but exists in object type [2]. - identity.js:29:2 - 29| ({}: Foo); // Error: `a` and `b` are not defined. - ^^ [1] + identity.js:29:3 + 29| (({}: {}): Foo); // Error: `a` and `b` are not defined. + ^^^^^^ References: - identity.js:29:6 - 29| ({}: Foo); // Error: `a` and `b` are not defined. - ^^^ [2] + identity.js:29:7 + 29| (({}: {}): Foo); // Error: `a` and `b` are not defined. + ^^ [1] + identity.js:29:12 + 29| (({}: {}): Foo); // Error: `a` and `b` are not defined. + ^^^ [2] Error ------------------------------------------------------------------------------------------------- identity.js:31:3 @@ -314,6 +771,24 @@ References: ^^^^^^ [2] +Error --------------------------------------------------------------------------------------------------- objmap.js:23:5 + +Cannot assign object literal to `foo` because inexact object literal [1] is incompatible with exact object type [2]. + + objmap.js:23:5 + 23| > = {} // error, {| a: number |} ~> {} + ^^ [1] + +References: + objmap.js:20:10 + v------- + 20| var foo: $ObjMap< + 21| {|a: number|}, + 22| (t:T) => T + 23| > = {} // error, {| a: number |} ~> {} + ^ [2] + + Error ------------------------------------------------------------------------------------------------- optional.js:13:2 Cannot cast `o3.b` to array type because undefined [1] is incompatible with array type [2]. @@ -349,4 +824,4 @@ References: -Found 23 errors +Found 42 errors diff --git a/tests/objmap/objmap.js b/tests/objmap/objmap.js index 2bdd9fd2ddc..778bb7ff77c 100644 --- a/tests/objmap/objmap.js +++ b/tests/objmap/objmap.js @@ -16,3 +16,13 @@ promiseAllByKey({ (o.foo: string); // error, number ~> string (o.bar: 'bar'); // ok }); + +var foo: $ObjMap< + {|a: number|}, + (t:T) => T +> = {} // error, {| a: number |} ~> {} + +var bar: $ObjMap< + {a: number}, + (t:T) => T +> = {} // ok diff --git a/tests/oopsla2017_paper_examples/oopsla2017_paper_examples.exp b/tests/oopsla2017_paper_examples/oopsla2017_paper_examples.exp index b5759c83928..700c62bf358 100644 --- a/tests/oopsla2017_paper_examples/oopsla2017_paper_examples.exp +++ b/tests/oopsla2017_paper_examples/oopsla2017_paper_examples.exp @@ -28,7 +28,7 @@ References: Error ------------------------------------------------------------------------------------------------- example6.js:9:10 -Could not decide which case to select. Since case 1 [1] may work but if it doesn't case 2 [2] looks promising too. To +Could not decide which case to select, since case 1 [1] may work but if it doesn't case 2 [2] looks promising too. To fix add a type annotation to return [3] or to `x` [4]. example6.js:9:10 diff --git a/tests/opaque_subtype/opaque_subtype.exp b/tests/opaque_subtype/opaque_subtype.exp index 18c367c4bcd..8c8f6d87790 100644 --- a/tests/opaque_subtype/opaque_subtype.exp +++ b/tests/opaque_subtype/opaque_subtype.exp @@ -176,9 +176,9 @@ Cannot use `PolyFoo` [1] with fewer than 1 type argument. ^^^^^^^ References: - test.js:14:22 + test.js:14:21 14| export class PolyFoo {} - ^ [1] + ^^^ [1] Error ---------------------------------------------------------------------------------------------------- test.js:21:47 diff --git a/tests/optional/optional.exp b/tests/optional/optional.exp index 02ef7cae26e..fc67be11763 100644 --- a/tests/optional/optional.exp +++ b/tests/optional/optional.exp @@ -280,13 +280,13 @@ Error -------------------------------------------------------------------------- Cannot get `x.duck` because property `duck` is missing in undefined [1]. undefined.js:8:12 - 8| return x.duck; - ^^^^^^ + 8| return x.duck; + ^^^^^^ References: - /core.js:13:24 - 13| declare var undefined: void; - ^^^^ [1] + undefined.js:3:21 + 3| function foo(bar? = undefined) { + ^^^^^^^^^ [1] Error ----------------------------------------------------------------------------------------------- undefined2.js:8:12 diff --git a/tests/optional_chaining/optional_chaining.exp b/tests/optional_chaining/optional_chaining.exp index 25766d9d8ba..0f5a193af44 100644 --- a/tests/optional_chaining/optional_chaining.exp +++ b/tests/optional_chaining/optional_chaining.exp @@ -277,7 +277,8 @@ References: Error -------------------------------------------------------------------------------------- computed_properties.js:22:2 -Cannot get `y1?.["baz"]["foo"]` because an indexer property is missing in null or undefined [1]. +Cannot get `y1?.["baz"]["foo"]` because an index signature declaring the expected key / value type is missing in null or +undefined [1]. computed_properties.js:22:2 22| (y1?.["baz"]["foo"]: empty); @@ -323,7 +324,8 @@ References: Error -------------------------------------------------------------------------------------- computed_properties.js:23:2 -Cannot get `y2?.["baz"]["foo"]` because an indexer property is missing in null or undefined [1]. +Cannot get `y2?.["baz"]["foo"]` because an index signature declaring the expected key / value type is missing in null or +undefined [1]. computed_properties.js:23:2 23| (y2?.["baz"]["foo"]: empty); @@ -337,7 +339,8 @@ References: Error -------------------------------------------------------------------------------------- computed_properties.js:25:2 -Cannot get `y1["bar"]` because an indexer property is missing in null or undefined [1]. +Cannot get `y1["bar"]` because an index signature declaring the expected key / value type is missing in null or +undefined [1]. computed_properties.js:25:2 25| (y1["bar"]?.["foo"]: empty); @@ -415,7 +418,8 @@ References: Error -------------------------------------------------------------------------------------- computed_properties.js:27:2 -Cannot get `y1["baz"]` because an indexer property is missing in null or undefined [1]. +Cannot get `y1["baz"]` because an index signature declaring the expected key / value type is missing in null or +undefined [1]. computed_properties.js:27:2 27| (y1["baz"]?.["foo"]: empty); @@ -490,7 +494,8 @@ References: Error -------------------------------------------------------------------------------------- computed_properties.js:30:2 -Cannot get `y1?.["bar"]["foo"]` because an indexer property is missing in null or undefined [1]. +Cannot get `y1?.["bar"]["foo"]` because an index signature declaring the expected key / value type is missing in null or +undefined [1]. computed_properties.js:30:2 30| ((y1?.["bar"])["foo"]: empty); @@ -554,8 +559,8 @@ References: Error -------------------------------------------------------------------------------------- computed_properties.js:32:2 Cannot get `y1?.["baz"]["foo"]` because: - - an indexer property is missing in null or undefined [1]. - - an indexer property is missing in null or undefined [2]. + - an index signature declaring the expected key / value type is missing in null or undefined [1]. + - an index signature declaring the expected key / value type is missing in null or undefined [2]. computed_properties.js:32:2 32| ((y1?.["baz"])["foo"]: empty); @@ -589,7 +594,8 @@ References: Error -------------------------------------------------------------------------------------- computed_properties.js:33:2 -Cannot get `y2?.["baz"]["foo"]` because an indexer property is missing in null or undefined [1]. +Cannot get `y2?.["baz"]["foo"]` because an index signature declaring the expected key / value type is missing in null or +undefined [1]. computed_properties.js:33:2 33| ((y2?.["baz"])["foo"]: empty); @@ -1584,5 +1590,22 @@ References: ^^ [1] +Error --------------------------------------------------------------------------------------------------- typeof.js:6:25 -Found 121 errors +undefined [1] is incompatible with read-only array type [2]. + + typeof.js:6:25 + 6| type T = $ElementType; + ^^^^^^^^ + +References: + typeof.js:4:17 + 4| const x: {a?: {b: $ReadOnlyArray<{c: number}>}} = {}; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + typeof.js:4:21 + 4| const x: {a?: {b: $ReadOnlyArray<{c: number}>}} = {}; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + + + +Found 122 errors diff --git a/tests/optional_chaining/typeof.js b/tests/optional_chaining/typeof.js new file mode 100644 index 00000000000..49ea8ed7c2a --- /dev/null +++ b/tests/optional_chaining/typeof.js @@ -0,0 +1,7 @@ +// @flow + +function foo() { + const x: {a?: {b: $ReadOnlyArray<{c: number}>}} = {}; + const y = x.a?.b; + type T = $ElementType; +} diff --git a/tests/overload/overload.exp b/tests/overload/overload.exp index 30521be4c32..85f0b3ddc16 100644 --- a/tests/overload/overload.exp +++ b/tests/overload/overload.exp @@ -29,8 +29,8 @@ Cannot assign `"".match(...)[0]` to `x1` because string [1] is incompatible with ^^^^^^^^^^^^^^ References: - /core.js:288:33 - 288| type RegExp$matchResult = Array & {index: number, input: string, groups: ?{[name: string]: string}}; + /core.js:313:33 + 313| type RegExp$matchResult = Array & { ^^^^^^ [1] overload.js:7:9 7| var x1: number = "".match(0)[0]; @@ -39,15 +39,15 @@ References: Error ------------------------------------------------------------------------------------------------- overload.js:7:18 -Cannot get `"".match(...)[0]` because an indexer property is missing in null [1]. +Cannot get `"".match(...)[0]` because an index signature declaring the expected key / value type is missing in null [1]. overload.js:7:18 7| var x1: number = "".match(0)[0]; ^^^^^^^^^^^^^^ References: - /core.js:304:58 - 304| match(regexp: string | RegExp): RegExp$matchResult | null; + /core.js:334:58 + 334| match(regexp: string | RegExp): RegExp$matchResult | null; ^^^^ [1] @@ -60,8 +60,8 @@ Cannot call `"".match` with `0` bound to `regexp` because number [1] is incompat ^ [1] References: - /core.js:304:19 - 304| match(regexp: string | RegExp): RegExp$matchResult | null; + /core.js:334:19 + 334| match(regexp: string | RegExp): RegExp$matchResult | null; ^^^^^^ [2] @@ -74,8 +74,8 @@ Cannot assign `"".match(...)[0]` to `x2` because string [1] is incompatible with ^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:288:33 - 288| type RegExp$matchResult = Array & {index: number, input: string, groups: ?{[name: string]: string}}; + /core.js:313:33 + 313| type RegExp$matchResult = Array & { ^^^^^^ [1] overload.js:8:9 8| var x2: number = "".match(/pattern/)[0]; @@ -84,15 +84,15 @@ References: Error ------------------------------------------------------------------------------------------------- overload.js:8:18 -Cannot get `"".match(...)[0]` because an indexer property is missing in null [1]. +Cannot get `"".match(...)[0]` because an index signature declaring the expected key / value type is missing in null [1]. overload.js:8:18 8| var x2: number = "".match(/pattern/)[0]; ^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:304:58 - 304| match(regexp: string | RegExp): RegExp$matchResult | null; + /core.js:334:58 + 334| match(regexp: string | RegExp): RegExp$matchResult | null; ^^^^ [1] @@ -105,8 +105,8 @@ Cannot assign `"".split(...)[0]` to `x4` because string [1] is incompatible with ^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:312:63 - 312| split(separator?: string | RegExp, limit?: number): Array; + /core.js:343:63 + 343| split(separator?: string | RegExp, limit?: number): Array; ^^^^^^ [1] overload.js:10:9 10| var x4: number = "".split(/pattern/)[0]; diff --git a/tests/parallelizable_command_cancel_recheck/.flowconfig b/tests/parallelizable_command_cancel_recheck/.flowconfig new file mode 100644 index 00000000000..a74fd308199 --- /dev/null +++ b/tests/parallelizable_command_cancel_recheck/.flowconfig @@ -0,0 +1,12 @@ +[ignore] + +[include] + +[libs] + +[lints] + +[options] +lazy_mode=fs + +[strict] diff --git a/tests/parallelizable_command_cancel_recheck/.testconfig b/tests/parallelizable_command_cancel_recheck/.testconfig new file mode 100644 index 00000000000..5a3e9f8a853 --- /dev/null +++ b/tests/parallelizable_command_cancel_recheck/.testconfig @@ -0,0 +1 @@ +shell: test.sh diff --git a/tests/parallelizable_command_cancel_recheck/dependency.js b/tests/parallelizable_command_cancel_recheck/dependency.js new file mode 100644 index 00000000000..5c4d9653fed --- /dev/null +++ b/tests/parallelizable_command_cancel_recheck/dependency.js @@ -0,0 +1,5 @@ +// @flow + +let mybug: string = 123; + +export var x = { propA: 456, propB: "hello" }; diff --git a/tests/parallelizable_command_cancel_recheck/focused.js b/tests/parallelizable_command_cancel_recheck/focused.js new file mode 100644 index 00000000000..66301b0c46f --- /dev/null +++ b/tests/parallelizable_command_cancel_recheck/focused.js @@ -0,0 +1,5 @@ +// @flow + +import {x} from './dependency'; + +x.toString(); diff --git a/tests/parallelizable_command_cancel_recheck/focused.js.stdin b/tests/parallelizable_command_cancel_recheck/focused.js.stdin new file mode 100644 index 00000000000..315abfa4303 --- /dev/null +++ b/tests/parallelizable_command_cancel_recheck/focused.js.stdin @@ -0,0 +1,5 @@ +// @flow + +import {x} from './dependency'; + +x. diff --git a/tests/parallelizable_command_cancel_recheck/parallelizable_command_cancel_recheck.exp b/tests/parallelizable_command_cancel_recheck/parallelizable_command_cancel_recheck.exp new file mode 100644 index 00000000000..b2e5171c4c4 --- /dev/null +++ b/tests/parallelizable_command_cancel_recheck/parallelizable_command_cancel_recheck.exp @@ -0,0 +1,31 @@ + +No errors thanks to lazy mode: +No errors! + +The Flow server is currently in filesystem lazy mode and is only checking 0/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes + +Autocomplete should kick off a recheck: +propA number +propB string + +Now we should see the errors: +Error ----------------------------------------------------------------------------------------------- dependency.js:3:21 + +Cannot assign `123` to `mybug` because number [1] is incompatible with string [2]. + + dependency.js:3:21 + 3| let mybug: string = 123; + ^^^ [1] + +References: + dependency.js:3:12 + 3| let mybug: string = 123; + ^^^^^^ [2] + + + +Found 1 error + +The Flow server is currently in filesystem lazy mode and is only checking 1/2 files. +To learn more, visit flow.org/en/docs/lang/lazy-modes diff --git a/tests/parallelizable_command_cancel_recheck/test.sh b/tests/parallelizable_command_cancel_recheck/test.sh new file mode 100644 index 00000000000..6c9eb71fdfc --- /dev/null +++ b/tests/parallelizable_command_cancel_recheck/test.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +printf "\nNo errors thanks to lazy mode:\n" +assert_ok "$FLOW" status --no-auto-start --strip-root + +printf "\nAutocomplete should kick off a recheck:\n" +assert_ok "$FLOW" autocomplete --strip-root --wait-for-recheck false \ + focused.js 5 3 < focused.js.stdin + +printf "\nNow we should see the errors:\n" +assert_errors "$FLOW" status --no-auto-start --strip-root diff --git a/tests/parse/parse.exp b/tests/parse/parse.exp index 0451758b8e2..500fe340a6a 100644 --- a/tests/parse/parse.exp +++ b/tests/parse/parse.exp @@ -1,6 +1,6 @@ Error ----------------------------------------------------------------------------------------------- fail-flow-2.js:4:1 -Unexpected token . +Unexpected token `.`, expected the start of a statement 4| . ^ @@ -8,7 +8,7 @@ Unexpected token . Error ------------------------------------------------------------------------------------------------- fail-flow.js:3:1 -Unexpected token . +Unexpected token `.`, expected the start of a statement 3| . ^ @@ -16,7 +16,7 @@ Unexpected token . Error ------------------------------------------------------------------------------------------------------ fail.js:1:1 -Unexpected token . +Unexpected token `.`, expected the start of a statement 1| . ^ diff --git a/tests/parse_error_haste/parse_error_haste.exp b/tests/parse_error_haste/parse_error_haste.exp index acae9185d4d..56d4fa15776 100644 --- a/tests/parse_error_haste/parse_error_haste.exp +++ b/tests/parse_error_haste/parse_error_haste.exp @@ -1,6 +1,6 @@ Error ----------------------------------------------------------------------------------------------- ParseError.js:3:18 -Unexpected token # +Unexpected token `#`, expected an identifier 3| function foo() { ### // invalid token ^ diff --git a/tests/parse_error_node/parse_error_node.exp b/tests/parse_error_node/parse_error_node.exp index acae9185d4d..56d4fa15776 100644 --- a/tests/parse_error_node/parse_error_node.exp +++ b/tests/parse_error_node/parse_error_node.exp @@ -1,6 +1,6 @@ Error ----------------------------------------------------------------------------------------------- ParseError.js:3:18 -Unexpected token # +Unexpected token `#`, expected an identifier 3| function foo() { ### // invalid token ^ diff --git a/tests/poly/annot1.js b/tests/poly/annot1.js new file mode 100644 index 00000000000..200fc8bd81b --- /dev/null +++ b/tests/poly/annot1.js @@ -0,0 +1,2 @@ +export const f = (x: T) => (y: T) => y; +export const g = (x: T): (T => T) => (y: T) => y; diff --git a/tests/poly/annot2.js b/tests/poly/annot2.js new file mode 100644 index 00000000000..59badef9ea0 --- /dev/null +++ b/tests/poly/annot2.js @@ -0,0 +1,4 @@ +import {f, g} from "./annot1"; + +(f(0)(1): number); // errors: number ~> T (empty), T (mixed) ~> number +(g(0)(1): number); // ok diff --git a/tests/poly/error.js b/tests/poly/error.js index 6a9ff43af8e..91db54bd82f 100644 --- a/tests/poly/error.js +++ b/tests/poly/error.js @@ -9,7 +9,7 @@ newFn1('string'); // The error should point here. declare function fn2(x: T): ((T) => void) => void; const newFn2 = fn2(42); -newFn2((x: string) => {}); // The error should point here. +newFn2((x: string) => {}); declare function fn3(x: T, y: (T) => void): void; fn3(42, (x: string) => {}); // The error should point to 42 and not string. diff --git a/tests/poly/inout.js b/tests/poly/inout.js new file mode 100644 index 00000000000..b9fd86bd33e --- /dev/null +++ b/tests/poly/inout.js @@ -0,0 +1,7 @@ +declare opaque type A; +declare opaque type B: A; + +class C<+Out, -In: Out = Out> {} + +declare var x: C; +(x: C); // error: A ~> B in default-expanded type diff --git a/tests/poly/poly.exp b/tests/poly/poly.exp index d9bba9f9204..730d729917e 100644 --- a/tests/poly/poly.exp +++ b/tests/poly/poly.exp @@ -7,9 +7,9 @@ Cannot use `A` [1] with fewer than 1 type argument. ^ References: - annot.js:1:9 + annot.js:1:8 1| class A { } - ^ [1] + ^^^ [1] Error --------------------------------------------------------------------------------------------------- annot.js:10:10 @@ -33,6 +33,37 @@ References: ^ [3] +Error ---------------------------------------------------------------------------------------------------- annot2.js:3:2 + +Cannot cast `f(...)(...)` to number because `T` [1] is incompatible with number [2]. + + annot2.js:3:2 + 3| (f(0)(1): number); // errors: number ~> T (empty), T (mixed) ~> number + ^^^^^^^ + +References: + annot1.js:1:35 + 1| export const f = (x: T) => (y: T) => y; + ^ [1] + annot2.js:3:11 + 3| (f(0)(1): number); // errors: number ~> T (empty), T (mixed) ~> number + ^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------------- annot2.js:3:7 + +Cannot call `f(...)` with `1` bound to `y` because number [1] is incompatible with `T` [2]. + + annot2.js:3:7 + 3| (f(0)(1): number); // errors: number ~> T (empty), T (mixed) ~> number + ^ [1] + +References: + annot1.js:1:35 + 1| export const f = (x: T) => (y: T) => y; + ^ [2] + + Error ----------------------------------------------------------------------------------------------------- error.js:8:8 Cannot call `newFn1` with `'string'` bound to the first parameter because string [1] is incompatible with number [2]. @@ -47,22 +78,18 @@ References: ^^^^^^ [2] -Error ---------------------------------------------------------------------------------------------------- error.js:12:8 +Error --------------------------------------------------------------------------------------------------- error.js:11:20 -Cannot call `newFn2` with function bound to the first parameter because string [1] is incompatible with number [2] in -the first argument. +Cannot call `fn2` with `42` bound to `x` because number [1] is incompatible with string [2]. - error.js:12:8 - 12| newFn2((x: string) => {}); // The error should point here. - ^^^^^^^^^^^^^^^^^ + error.js:11:20 + 11| const newFn2 = fn2(42); + ^^ [1] References: error.js:12:12 - 12| newFn2((x: string) => {}); // The error should point here. - ^^^^^^ [1] - error.js:11:20 - 11| const newFn2 = fn2(42); - ^^ [2] + 12| newFn2((x: string) => {}); + ^^^^^^ [2] Error ---------------------------------------------------------------------------------------------------- error.js:15:5 @@ -146,6 +173,26 @@ References: ^^^^^^ [2] +Error ----------------------------------------------------------------------------------------------------- inout.js:7:2 + +Cannot cast `x` to `C` because `B` [1] is incompatible with `A` [2] in type argument `In` [3]. + + inout.js:7:2 + 7| (x: C); // error: A ~> B in default-expanded type + ^ + +References: + inout.js:6:18 + 6| declare var x: C; + ^ [1] + inout.js:7:7 + 7| (x: C); // error: A ~> B in default-expanded type + ^ [2] + inout.js:4:16 + 4| class C<+Out, -In: Out = Out> {} + ^^ [3] + + Error --------------------------------------------------------------------------------------------------- phantom.js:7:2 Cannot cast `a` to `B` because number [1] is incompatible with string [2] in type argument `Phantom` [3]. @@ -263,4 +310,4 @@ References: -Found 17 errors +Found 20 errors diff --git a/tests/poorly_formed_exports/poorly_formed_exports.exp b/tests/poorly_formed_exports/poorly_formed_exports.exp index 67eea002951..13f3f1a785e 100644 --- a/tests/poorly_formed_exports/poorly_formed_exports.exp +++ b/tests/poorly_formed_exports/poorly_formed_exports.exp @@ -1,20 +1,4 @@ FLOW STATUS: -Error ------------------------------------------------------------------------------------------------- libs/libs.js:4:3 - -Cannot resolve name `exports`. - - 4| exports.foo = 5; - ^^^^^^^ - - -Error ------------------------------------------------------------------------------------------------- libs/libs.js:4:3 - -Exports can only appear at the top level - - 4| exports.foo = 5; - ^^^^^^^ - - Error ---------------------------------------------------------------------------------------- bad-default-export.js:4:3 Exports can only appear at the top level @@ -176,16 +160,8 @@ Error -------------------------------------------------------------------------- -Found 22 errors +Found 20 errors FLOW CHECK-CONTENTS: -Error ------------------------------------------------------------------------------------------------- libs/libs.js:4:3 - -Cannot resolve name `exports`. - - 4| exports.foo = 5; - ^^^^^^^ - - Error ------------------------------------------------------------------------------------------ bad-named-export.js:4:3 Exports can only appear at the top level @@ -203,4 +179,4 @@ Exports can only appear at the top level -Found 3 errors +Found 2 errors diff --git a/tests/poorly_formed_exports_config_false/poorly_formed_exports_config_false.exp b/tests/poorly_formed_exports_config_false/poorly_formed_exports_config_false.exp index 028369f7b11..06ef34c63ac 100644 --- a/tests/poorly_formed_exports_config_false/poorly_formed_exports_config_false.exp +++ b/tests/poorly_formed_exports_config_false/poorly_formed_exports_config_false.exp @@ -1,22 +1,4 @@ FLOW STATUS: -Error ------------------------------------------------------------------------------------------------- libs/libs.js:4:3 - -Cannot resolve name `exports`. - - 4| exports.foo = 5; - ^^^^^^^ - - - -Found 1 error +No errors! FLOW CHECK-CONTENTS: -Error ------------------------------------------------------------------------------------------------- libs/libs.js:4:3 - -Cannot resolve name `exports`. - - 4| exports.foo = 5; - ^^^^^^^ - - - -Found 1 error +No errors! diff --git a/tests/poorly_formed_exports_config_false/test.sh b/tests/poorly_formed_exports_config_false/test.sh index 05d0493a9d8..b3af7bad320 100755 --- a/tests/poorly_formed_exports_config_false/test.sh +++ b/tests/poorly_formed_exports_config_false/test.sh @@ -2,10 +2,8 @@ echo "FLOW STATUS:" -# Has an unrelated error in the lib file -assert_errors $FLOW status +assert_ok $FLOW status echo "FLOW CHECK-CONTENTS:" -# Has an unrelated error in the lib file -assert_errors $FLOW check-contents bad-default-export.js < bad-default-export.js +assert_ok $FLOW check-contents bad-default-export.js < bad-default-export.js diff --git a/tests/predicates-abstract/predicates-abstract.exp b/tests/predicates-abstract/predicates-abstract.exp index c9cbcaf6dca..d2bce9a06dc 100644 --- a/tests/predicates-abstract/predicates-abstract.exp +++ b/tests/predicates-abstract/predicates-abstract.exp @@ -1,40 +1,31 @@ -Error -------------------------------------------------------------------------------------- sanity-filter-union.js:6:71 - -object with property `kind` that matches string literal `A` [1] is incompatible with string literal `B` [2]. - - sanity-filter-union.js:6:71 - 6| declare function my_filter>(v: Array, cb: P): Array<$Refine>; - ^^^^^^^^^^^^^^ [1] - -References: - sanity-filter-union.js:9:18 - 9| type B = { kind: 'B', v: string } - ^^^ [2] - - Error -------------------------------------------------------------------------------------- sanity-filter-union.js:16:2 Cannot cast `my_filter(...)` to array type because: - - property `v` is missing in `A` [1] but exists in `B` [2] in array element. - - string literal `A` [3] is incompatible with string literal `B` [4] in property `kind` of array element. + - object with property `kind` that matches string literal `A` [1] is incompatible with string literal `B` [2] in array + element. + - property `v` is missing in `A` [3] but exists in `B` [4] in array element. + - string literal `A` [5] is incompatible with string literal `B` [2] in property `kind` of array element. sanity-filter-union.js:16:2 16| (my_filter(ab, (x): %checks => x.kind === 'A'): Array); // ERROR ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: + sanity-filter-union.js:6:71 + 6| declare function my_filter>(v: Array, cb: P): Array<$Refine>; + ^^^^^^^^^^^^^^ [1] + sanity-filter-union.js:9:18 + 9| type B = { kind: 'B', v: string } + ^^^ [2] sanity-filter-union.js:14:23 14| declare var ab: Array; - ^ [1] + ^ [3] sanity-filter-union.js:16:55 16| (my_filter(ab, (x): %checks => x.kind === 'A'): Array); // ERROR - ^ [2] + ^ [4] sanity-filter-union.js:8:18 8| type A = { kind: 'A', u: number } - ^^^ [3] - sanity-filter-union.js:9:18 - 9| type B = { kind: 'B', v: string } - ^^^ [4] + ^^^ [5] Error -------------------------------------------------------------------------------------- sanity-filter-union.js:17:2 diff --git a/tests/predicates-declared/predicates-declared.exp b/tests/predicates-declared/predicates-declared.exp index 96096420154..9e4e433f7bc 100644 --- a/tests/predicates-declared/predicates-declared.exp +++ b/tests/predicates-declared/predicates-declared.exp @@ -1,6 +1,6 @@ Error --------------------------------------------------------------------------------------- sanity-conditional.js:5:50 -Unexpected token = +Unexpected token `=`, expected the token `)` 5| declare function foo(x: string): mixed %checks(x = "1"); ^ diff --git a/tests/predicates-inferred/predicates-inferred.exp b/tests/predicates-inferred/predicates-inferred.exp index eaf3f0bcff1..51f2a4b5ca9 100644 --- a/tests/predicates-inferred/predicates-inferred.exp +++ b/tests/predicates-inferred/predicates-inferred.exp @@ -29,51 +29,6 @@ References: ^^^^^^ [1] -Error ------------------------------------------------------------------------------------------- sanity-ordering.js:7:4 - -Cannot cast `obj.page` to object type because null or undefined [1] is incompatible with object type [2]. - - sanity-ordering.js:7:4 - 7| (obj.page: Object); - ^^^^^^^^ - -References: - sanity-ordering.js:4:26 - 4| declare var obj: { page: ?Object; }; - ^^^^^^^ [1] - sanity-ordering.js:7:14 - 7| (obj.page: Object); - ^^^^^^ [2] - - -Error ----------------------------------------------------------------------------------------- sanity-ordering.js:15:12 - -Cannot get `head[key]` because an indexer property is missing in undefined [1]. - - sanity-ordering.js:15:12 - 15| head = head[key] || create && (head[key] = {}); - ^^^^^^^^^ - -References: - sanity-ordering.js:6:5 - 6| if (dotAccess(obj)) { - ^^^^^^^^^^^^^^ [1] - - -Error ----------------------------------------------------------------------------------------- sanity-ordering.js:15:36 - -Cannot assign object literal to `head[key]` because an indexer property is missing in undefined [1]. - - sanity-ordering.js:15:36 - 15| head = head[key] || create && (head[key] = {}); - ^^^^^^^^^ - -References: - sanity-ordering.js:6:5 - 6| if (dotAccess(obj)) { - ^^^^^^^^^^^^^^ [1] - - Error -------------------------------------------------------------------------------------- sanity-unbound-var.js:13:12 Cannot return `x` because array type [1] is incompatible with string [2]. @@ -125,5 +80,45 @@ Invalid body for predicate function. Expected a simple return statement as body. ^ +Error -------------------------------------------------------------------------------------------------- wf-eval.js:5:42 + +The return type of a predicate function cannot contain a generic type. The function predicate will be ignored here. + + 5| function f3(x: V): $Call<(V) => V, V> %checks { return x; } // error + ^ + + +Error ------------------------------------------------------------------------------------------- wf-polymorphic.js:3:23 + +The return type of a predicate function cannot contain a generic type. The function predicate will be ignored here. + + 3| function f1(x: X): X %checks { return x; } // error: poly return in %checks + ^ + + +Error ------------------------------------------------------------------------------------------- wf-polymorphic.js:4:24 + +The return type of a predicate function cannot contain a generic type. The function predicate will be ignored here. + + 4| function f2(x: X): [X, X] %checks { return [x, x]; } // error: poly return in %checks + ^ + + +Error ------------------------------------------------------------------------------------------- wf-polymorphic.js:4:27 + +The return type of a predicate function cannot contain a generic type. The function predicate will be ignored here. + + 4| function f2(x: X): [X, X] %checks { return [x, x]; } // error: poly return in %checks + ^ + + +Error ------------------------------------------------------------------------------------------- wf-polymorphic.js:8:30 + +The return type of a predicate function cannot contain a generic type. The function predicate will be ignored here. + + 8| declare function g(x: X): X %checks(x); // error: poly return in %checks + ^ + + -Found 9 errors +Found 11 errors diff --git a/tests/predicates-inferred/wf-eval.js b/tests/predicates-inferred/wf-eval.js new file mode 100644 index 00000000000..989019bb0ec --- /dev/null +++ b/tests/predicates-inferred/wf-eval.js @@ -0,0 +1,5 @@ +// @flow + +function f1(x: string): $Call<(V) => V, string> %checks { return x; } // okay +function f2(x: string): $Call<(V) => V, $Call<(V) => V, string>> %checks { return x; } // okay +function f3(x: V): $Call<(V) => V, V> %checks { return x; } // error diff --git a/tests/predicates-inferred/wf-polymorphic.js b/tests/predicates-inferred/wf-polymorphic.js new file mode 100644 index 00000000000..f7dc60ba98d --- /dev/null +++ b/tests/predicates-inferred/wf-polymorphic.js @@ -0,0 +1,8 @@ +// @flow + +function f1(x: X): X %checks { return x; } // error: poly return in %checks +function f2(x: X): [X, X] %checks { return [x, x]; } // error: poly return in %checks +function f3(x: X): mixed %checks { return x; } // okay +function f4(x: X): %checks { return x; } // okay + +declare function g(x: X): X %checks(x); // error: poly return in %checks diff --git a/tests/predicates-parsing/predicates-parsing.exp b/tests/predicates-parsing/predicates-parsing.exp index cc63c29f82c..ca749cc9efd 100644 --- a/tests/predicates-parsing/predicates-parsing.exp +++ b/tests/predicates-parsing/predicates-parsing.exp @@ -67,7 +67,7 @@ Cannot declare predicate when a function body is present. Error -------------------------------------------------------------------------------------------- unsupported-0.js:5:31 -Unexpected token % +Unexpected token `%`, expected the token `;` 5| var a3: (x: mixed) => boolean %checks (x !== null); ^ @@ -75,7 +75,7 @@ Unexpected token % Error -------------------------------------------------------------------------------------------- unsupported-1.js:5:31 -Unexpected token % +Unexpected token `%`, expected the token `;` 5| var a4: (x: mixed) => boolean %checks = (x: mixed) => x !== null; ^ @@ -83,7 +83,7 @@ Unexpected token % Error -------------------------------------------------------------------------------------------- unsupported-2.js:5:31 -Unexpected token % +Unexpected token `%`, expected the token `;` 5| var a5: (x: mixed) => boolean %checks(x !== null) = ^ diff --git a/tests/private_class_fields/private_class_fields.exp b/tests/private_class_fields/private_class_fields.exp index 2b75b538ff1..960831f17e7 100644 --- a/tests/private_class_fields/private_class_fields.exp +++ b/tests/private_class_fields/private_class_fields.exp @@ -189,29 +189,35 @@ References: ^^^^^^ [1] -Error --------------------------------------------------------------------------------------------------- test.js:107:16 +Error ---------------------------------------------------------------------------------------------------- test.js:107:3 -number [1] is incompatible with string [2]. +Cannot initialize property `s` with `0` because number [1] is incompatible with string [2]. - test.js:107:16 + test.js:107:3 107| #s: string = 0; // Error, number ~> string - ^ [1] + ^^^^^^^^^^^^^^^ References: + test.js:107:16 + 107| #s: string = 0; // Error, number ~> string + ^ [1] test.js:107:7 107| #s: string = 0; // Error, number ~> string ^^^^^^ [2] -Error --------------------------------------------------------------------------------------------------- test.js:112:24 +Error ---------------------------------------------------------------------------------------------------- test.js:112:3 -number [1] is incompatible with string [2]. +Cannot initialize property `ss` with `0` because number [1] is incompatible with string [2]. - test.js:112:24 + test.js:112:3 112| static #ss: string = 0; // Error, number ~> string - ^ [1] + ^^^^^^^^^^^^^^^^^^^^^^^ References: + test.js:112:24 + 112| static #ss: string = 0; // Error, number ~> string + ^ [1] test.js:112:15 112| static #ss: string = 0; // Error, number ~> string ^^^^^^ [2] @@ -304,7 +310,9 @@ References: Error ---------------------------------------------------------------------------------------------------- test.js:159:7 -Cannot cast `this.#p` to number literal `4` because number literal `3` [1] is incompatible with number literal `4` [2]. +Cannot cast `this.#p` to number literal `4` because: + - number literal `3` [1] is incompatible with number literal `4` [2]. + - number literal `3` [3] is incompatible with number literal `4` [2]. test.js:159:7 159| (this.#p: 4); // Error, this.p doesnt refine this.#p @@ -317,6 +325,9 @@ References: test.js:159:16 159| (this.#p: 4); // Error, this.p doesnt refine this.#p ^ [2] + test.js:154:20 + 154| if (this.#p === 3) { + ^ [3] Error ---------------------------------------------------------------------------------------------------- test.js:162:7 @@ -372,8 +383,9 @@ References: Error ---------------------------------------------------------------------------------------------------- test.js:173:7 -Cannot cast `RefinementClashes.#q` to number literal `4` because number literal `3` [1] is incompatible with number -literal `4` [2]. +Cannot cast `RefinementClashes.#q` to number literal `4` because: + - number literal `3` [1] is incompatible with number literal `4` [2]. + - number literal `3` [3] is incompatible with number literal `4` [2]. test.js:173:7 173| (RefinementClashes.#q: 4); // Error, RefinementClashes.q doesnt refine RefinementClashes.#q @@ -386,6 +398,9 @@ References: test.js:173:29 173| (RefinementClashes.#q: 4); // Error, RefinementClashes.q doesnt refine RefinementClashes.#q ^ [2] + test.js:168:33 + 168| if (RefinementClashes.#q === 3) { + ^ [3] Error ---------------------------------------------------------------------------------------------------- test.js:176:7 @@ -440,4 +455,4 @@ References: -Found 28 errors +Found 30 errors diff --git a/tests/promises/promises.exp b/tests/promises/promises.exp index 7d7f9982f52..ce56a5034fd 100644 --- a/tests/promises/promises.exp +++ b/tests/promises/promises.exp @@ -84,8 +84,8 @@ Cannot call `Promise.all` because property `@@iterator` is missing in undefined ^^^^^^^^^^^^^ [1] References: - /core.js:607:19 - 607| static all>(promises: T): Promise<$TupleMap>; + /core.js:689:19 + 689| static all>(promises: T): Promise<$TupleMap>; ^^^^^^^^^^^^^^^ [2] @@ -99,8 +99,8 @@ in `$Iterable` [2]. ^ [1] References: - /core.js:607:19 - 607| static all>(promises: T): Promise<$TupleMap>; + /core.js:689:19 + 689| static all>(promises: T): Promise<$TupleMap>; ^^^^^^^^^^^^^^^ [2] @@ -405,8 +405,8 @@ References: resolve_void.js:3:29 3| (Promise.resolve(): Promise); // error ^^^^^^ [2] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [3] @@ -426,8 +426,8 @@ References: resolve_void.js:5:38 5| (Promise.resolve(undefined): Promise); // error ^^^^^^ [2] - /core.js:582:24 - 582| declare class Promise<+R> { + /core.js:664:24 + 664| declare class Promise<+R> { ^ [3] diff --git a/tests/proto/annot.js b/tests/proto/annot.js index ca8ea78bbd3..278f61332ab 100644 --- a/tests/proto/annot.js +++ b/tests/proto/annot.js @@ -21,11 +21,11 @@ declare var o_variance: O_variance; (o_variance.q: empty); // error: property `q` not found (o_variance.__proto__: empty); // error: object type ~> empty -// __proto__ for callable objects treated like a normal property -type O_callable = { (): void, __proto__: {} }; +// __proto__ for callable objects is an error +type O_callable = { (): void, __proto__: {} }; // error: unexpected proto after call declare var o_callable: O_callable; (o_callable.q: empty); // error: property `q` not found -(o_callable.__proto__: empty); // error: object type ~> empty +(o_callable.__proto__: empty); // error: function proto ~> empty // __proto__() treated like a normal (function-valued) property type O_method = { __proto__(): void }; @@ -38,3 +38,8 @@ declare var o_loop: O_loop; (o_loop.q: empty); // TODO: error (pruned at constraint cache) type O_invalid = { __proto__: number }; // error: number is not a valid proto + +type O_multi = { + __proto__: {}, + __proto__: {}, // error: multiple protos +} diff --git a/tests/proto/proto.exp b/tests/proto/proto.exp index 947f3b8dcac..3b44dacafac 100644 --- a/tests/proto/proto.exp +++ b/tests/proto/proto.exp @@ -144,6 +144,14 @@ References: ^^^^^ [2] +Error --------------------------------------------------------------------------------------------------- annot.js:25:31 + +Unexpected prototype after call property. + + 25| type O_callable = { (): void, __proto__: {} }; // error: unexpected proto after call + ^^^^^^^^^ + + Error ---------------------------------------------------------------------------------------------------- annot.js:27:2 Cannot get `o_callable.q` because property `q` is missing in `O_callable` [1]. @@ -160,18 +168,15 @@ References: Error ---------------------------------------------------------------------------------------------------- annot.js:28:2 -Cannot cast `o_callable.__proto__` to empty because object type [1] is incompatible with empty [2]. +Cannot cast `o_callable.__proto__` to empty because function prototype [1] is incompatible with empty [2]. annot.js:28:2 - 28| (o_callable.__proto__: empty); // error: object type ~> empty - ^^^^^^^^^^^^^^^^^^^^ + 28| (o_callable.__proto__: empty); // error: function proto ~> empty + ^^^^^^^^^^^^^^^^^^^^ [1] References: - annot.js:25:42 - 25| type O_callable = { (): void, __proto__: {} }; - ^^ [1] annot.js:28:24 - 28| (o_callable.__proto__: empty); // error: object type ~> empty + 28| (o_callable.__proto__: empty); // error: function proto ~> empty ^^^^^ [2] @@ -217,6 +222,14 @@ Cannot use number [1] as a prototype. Expected an object or null. ^^^^^^ [1] +Error ---------------------------------------------------------------------------------------------------- annot.js:44:3 + +Multiple prototypes specified. + + 44| __proto__: {}, // error: multiple protos + ^^^^^^^^^ + + Error --------------------------------------------------------------------------------------------------- literal.js:2:2 Cannot cast `o.p` to empty because number [1] is incompatible with empty [2]. @@ -445,4 +458,4 @@ References: -Found 31 errors +Found 33 errors diff --git a/tests/react/ConcurrentMode.js b/tests/react/ConcurrentMode.js new file mode 100644 index 00000000000..3d48559f289 --- /dev/null +++ b/tests/react/ConcurrentMode.js @@ -0,0 +1,37 @@ +// @flow + +import React from 'react'; + +{ + const {ConcurrentMode} = React; + + +

+ +} + +{ + const {Component, ConcurrentMode} = React; + + class ClassExample extends Component<{||}> { + render() { + return null; + } + } + + + + +} + +{ + const {ConcurrentMode} = React; + + function FunctionExample() { + return null; + } + + + + +} diff --git a/tests/react/StrictMode.js b/tests/react/StrictMode.js new file mode 100644 index 00000000000..b56cc9c4c46 --- /dev/null +++ b/tests/react/StrictMode.js @@ -0,0 +1,37 @@ +// @flow + +import React from 'react'; + +{ + const {StrictMode} = React; + + +
+ +} + +{ + const {Component, StrictMode} = React; + + class ClassExample extends Component<{||}> { + render() { + return null; + } + } + + + + +} + +{ + const {StrictMode} = React; + + function FunctionExample() { + return null; + } + + + + +} diff --git a/tests/react/abstractelement.js b/tests/react/abstractelement.js new file mode 100644 index 00000000000..f0df5fe6745 --- /dev/null +++ b/tests/react/abstractelement.js @@ -0,0 +1,27 @@ +//@flow + +const React = require('react'); + +type Props1 = { a : number } +type Props2 = {| b : string |} + +class Component1 extends React.Component{} +class Component2 extends React.Component{} + +function takesTop(e : React.MixedElement) { + (e.props : mixed); + e.props.a; // error +} + +takesTop(); +takesTop(); +takesTop(
) + +function takesAny(e : React$Element) { + (e.props : mixed); + e.props.a; +} + +takesAny(); +takesAny(); +takesAny(
) diff --git a/tests/react/creatRef.js b/tests/react/creatRef.js new file mode 100644 index 00000000000..cfd68a5a336 --- /dev/null +++ b/tests/react/creatRef.js @@ -0,0 +1,13 @@ +// @flow + +import React from 'react'; + +{ + class MyComponent extends React.Component {} + + const ref: {current: null | React$ComponentType} = React.createRef(); // Ok +} + +{ + const ref: {|current: null | number|} = React.createRef(); // Ok +} diff --git a/tests/react/createContext.js b/tests/react/createContext.js new file mode 100644 index 00000000000..46127556530 --- /dev/null +++ b/tests/react/createContext.js @@ -0,0 +1,57 @@ +// @flow + +import React from 'react'; + +{ + const Context = React.createContext('div'); + const {Consumer, Provider} = Context; + + class Foo extends React.Component<{}> { + divRef: {current: null | HTMLDivElement} = React.createRef(); + + render() { + return ( + + +
+ + {(Tag: 'div' | 'span' | 'img') => } + +
+
+ {/* Error: enum is incompatible with string */} + + {(Tag: 'div' | 'span' | 'img') => } + + +
+ ); + } + + componentDidMount() { + var div: null | HTMLDivElement = this.divRef.current; // Ok + var image: null | HTMLImageElement = this.divRef.current; // Error: HTMLDivElement is incompatible with HTMLImageElement + } + } +} + +{ + const Context = React.createContext( + {foo: 0, bar: 0, baz: 0}, + (a, b) => { + let result = 0; + if (a.foo !== b.foo) { + result |= 0b001; + } + if (a.bar !== b.bar) { + result |= 0b010; + } + return result; + }, + ); +} + +{ + const ThemeContext = createContext("light"); + ThemeContext.displayName = "ThemeContext"; +} diff --git a/tests/react/create_class.js b/tests/react/create_class.js index 699a8fe0e97..7d99e97193e 100644 --- a/tests/react/create_class.js +++ b/tests/react/create_class.js @@ -172,3 +172,10 @@ const L = React.createClass({ React.createClass({}); // error: spec must be [x] exact and [ ] sealed React.createClass(({}: {})); // error: spec must be [ ] exact and [x] sealed + +const M = React.createClass({ + propTypes: { + foo: React.PropTypes.string, + }, +}); +M.defaultProps.bar = 1; // error cannot update void property diff --git a/tests/react/dotvsdollar.js b/tests/react/dotvsdollar.js new file mode 100644 index 00000000000..1b0a59b6dd9 --- /dev/null +++ b/tests/react/dotvsdollar.js @@ -0,0 +1,31 @@ +//@flow + +// React.Element(Type) was behaving differently from React$Element(Type) due to a mishandled +// type destructor case. This tests that the logic stays correct, as all 8 of these should correctly +// typecheck. +const React = require('react'); +class Component extends React.Component<{}> {}; + +declare var a : React.Element>; +(a : React.Element); + +const b = ; +(b: React.Element); + +declare var c : React.Element>; +(c : React.Element); + +const d = ; +(d: React.Element); + +declare var e : React.Element>; +(d : React$Element); + +const f = ; +(f: React$Element); + +declare var g : React.Element>; +(g : React$Element); + +const h = ; +(h: React$Element); diff --git a/tests/react/element_config.js b/tests/react/element_config.js index 9391acc0611..9a86f5655b1 100644 --- a/tests/react/element_config.js +++ b/tests/react/element_config.js @@ -55,11 +55,11 @@ class J extends React.Component<{p?: ?number}> { ({p: 42}: React.ElementConfig); // OK ({p: 'foo'}: React.ElementConfig); // Error: string ~> number -({}: React.ElementConfig); // Error: missing property `p` +(({}: {}): React.ElementConfig); // Error: missing property `p` ({p: 42}: React.ElementConfig); // OK ({p: 'foo'}: React.ElementConfig); // Error: string ~> number -({}: React.ElementConfig); // Error: missing property `p` +(({}: {}): React.ElementConfig); // Error: missing property `p` ({p: 42}: React.ElementConfig); // OK ({p: 'foo'}: React.ElementConfig); // Error: string ~> number diff --git a/tests/react/hoc.js b/tests/react/hoc.js index a8065334a8a..f29cfe701e2 100644 --- a/tests/react/hoc.js +++ b/tests/react/hoc.js @@ -35,10 +35,10 @@ function UnwrappedFun(props: {foo: number, bar: number}) { myHOC(class Empty extends React.Component<{foo: string}, void> {}); // Error myHOC(function Empty(props: {foo: string}) {}); // Error -const Wrapped = myHOC(Unwrapped); +const Wrapped: React$ComponentType<{foo: number}> = myHOC(Unwrapped); const WrappedFun = myHOC(UnwrappedFun); -; // Error: `foo` is required. +; // Error: `foo` is required. ; // OK ; // Error: `foo` is required. ; // OK diff --git a/tests/react/hoc5.js b/tests/react/hoc5.js index 745e223ae1b..d0252de6759 100644 --- a/tests/react/hoc5.js +++ b/tests/react/hoc5.js @@ -13,10 +13,14 @@ function hoc>( class MyComponent1 extends React.Component<{foo: string, bar: number}> { static defaultProps = {foo: 'qux'}; - render() { return null } + render() { + return null; + } } -function MyComponent2(props: {foo: string, bar: number}) { return null } +function MyComponent2(props: {foo: string, bar: number}) { + return null; +} MyComponent2.defaultProps = {foo: 'qux'}; ; // Error diff --git a/tests/react/profiler.js b/tests/react/profiler.js new file mode 100644 index 00000000000..4ff4ff4eacf --- /dev/null +++ b/tests/react/profiler.js @@ -0,0 +1,37 @@ +// @flow + +import React from 'react'; + +{ + const {Profiler} = React; + + function onRender( + id: string, + phase: "mount" | "update", + actualDuration: number, + baseDuration: number, + startTime: number, + commitTime: number, + interactions: Set, + ) { + // Dummy callback + } + + + +
+ +} + +{ + const {Profiler} = React; + + // Error: no "id" or "onRender" prop + // Error: no "onRender" prop + // Error: no "id" prop + // Error: invalid "onRender" prop + + function badOnRender(foo: number) {} + + // Error: invalid "onRender" prop +} diff --git a/tests/react/react.exp b/tests/react/react.exp index 98c22721a48..24b77c88487 100644 --- a/tests/react/react.exp +++ b/tests/react/react.exp @@ -1,3 +1,84 @@ +Error ------------------------------------------------------------------------------------------ abstractelement.js:13:3 + +Cannot get `e.props.a` because property `a` is missing in mixed [1]. + + abstractelement.js:13:3 + 13| e.props.a; // error + ^^^^^^^^^ + +References: + /react.js:184:49 + 184| declare type React$MixedElement = React$Element; + ^^^^^^^^^^^^^^^^^ [1] + + +Error ------------------------------------------------------------------------------------------- createContext.js:17:15 + +Cannot create `Consumer` element because enum [1] is incompatible with string [2] in the first argument of property +`children`. + + createContext.js:17:15 + v--------- + 17| + 18| {(Tag: 'div' | 'span' | 'img') => } + 19| + ----------^ + +References: + createContext.js:18:24 + 18| {(Tag: 'div' | 'span' | 'img') => } + ^^^^^^^^^^^^^^^^^^^^^^ [1] + createContext.js:22:27 + 22| {/* Error: enum is incompatible with string */} + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- createContext.js:23:13 + +Cannot create `Consumer` element because enum [1] is incompatible with string [2] in the first argument of property +`children`. + + createContext.js:23:13 + v--------- + 23| + 24| {(Tag: 'div' | 'span' | 'img') => } + 25| + ----------^ + +References: + createContext.js:24:22 + 24| {(Tag: 'div' | 'span' | 'img') => } + ^^^^^^^^^^^^^^^^^^^^^^ [1] + createContext.js:22:27 + 22| {/* Error: enum is incompatible with string */} + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- createContext.js:33:44 + +Cannot assign `this.divRef.current` to `image` because `HTMLDivElement` [1] is incompatible with `HTMLImageElement` [2]. + + createContext.js:33:44 + 33| var image: null | HTMLImageElement = this.divRef.current; // Error: HTMLDivElement is incompatible with HTMLImageElement + ^^^^^^^^^^^^^^^^^^^ + +References: + createContext.js:10:30 + 10| divRef: {current: null | HTMLDivElement} = React.createRef(); + ^^^^^^^^^^^^^^ [1] + createContext.js:33:25 + 33| var image: null | HTMLImageElement = this.divRef.current; // Error: HTMLDivElement is incompatible with HTMLImageElement + ^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- createContext.js:55:24 + +Cannot resolve name `createContext`. + + 55| const ThemeContext = createContext("light"); + ^^^^^^^^^^^^^ + + Error ------------------------------------------------------------------------ createElementRequiredProp_string.js:17:13 Cannot create `Cmp` element because property `test` is missing in props [1] but exists in object type [2]. @@ -21,8 +102,8 @@ Cannot cast `this.props.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:7:22 7| (this.props.foo: empty); // error: string ~> empty @@ -38,8 +119,8 @@ Cannot cast `this.props.bar` to empty because number [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [1] create_class.js:8:22 8| (this.props.bar: empty); // error: number ~> empty @@ -192,8 +273,8 @@ Cannot cast `this.props.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:117:22 117| (this.props.foo: empty); // string ~> empty @@ -209,8 +290,8 @@ Cannot cast `this.state.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:118:22 118| (this.state.foo: empty); // string ~> empty @@ -226,8 +307,8 @@ Cannot cast `this.props.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:133:22 133| (this.props.foo: empty); // string ~> empty @@ -243,8 +324,8 @@ Cannot cast `this.props.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:137:22 137| (this.props.foo: empty); // string ~> empty @@ -260,8 +341,8 @@ Cannot cast `this.props.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:141:22 141| (this.props.foo: empty); // string ~> empty @@ -277,8 +358,8 @@ Cannot cast `nextProps.foo` to empty because string [1] is incompatible with emp ^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:142:21 142| (nextProps.foo: empty); // string ~> empty @@ -294,8 +375,8 @@ Cannot cast `this.props.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:146:22 146| (this.props.foo: empty); // string ~> empty @@ -328,8 +409,8 @@ Cannot cast `nextProps.foo` to empty because string [1] is incompatible with emp ^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:148:21 148| (nextProps.foo: empty); // string ~> empty @@ -376,8 +457,8 @@ Cannot cast `this.props.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:153:22 153| (this.props.foo: empty); // string ~> empty @@ -410,8 +491,8 @@ Cannot cast `nextProps.foo` to empty because string [1] is incompatible with emp ^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:155:21 155| (nextProps.foo: empty); // string ~> empty @@ -444,8 +525,8 @@ Cannot cast `this.props.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:160:22 160| (this.props.foo: empty); // string ~> empty @@ -478,8 +559,8 @@ Cannot cast `nextProps.foo` to empty because string [1] is incompatible with emp ^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:162:21 162| (nextProps.foo: empty); // string ~> empty @@ -512,8 +593,8 @@ Cannot cast `this.props.foo` to empty because string [1] is incompatible with em ^^^^^^^^^^^^^^ References: - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [1] create_class.js:167:22 167| (this.props.foo: empty); // string ~> empty @@ -565,6 +646,25 @@ References: ^^ [1] +Error -------------------------------------------------------------------------------------------- create_class.js:181:1 + +Cannot assign `1` to `M.defaultProps.bar` because property `bar` is missing in default props of React component [1]. + + create_class.js:181:1 + 181| M.defaultProps.bar = 1; // error cannot update void property + ^^^^^^^^^^^^^^^^^^ + +References: + create_class.js:176:11 + v------------------ + 176| const M = React.createClass({ + 177| propTypes: { + 178| foo: React.PropTypes.string, + 179| }, + 180| }); + -^ [1] + + Error ------------------------------------------------------------------------- create_class_initial_state_sealed.js:9:6 Cannot cast `this.state.q` to empty because number [1] is incompatible with empty [2]. @@ -1041,19 +1141,22 @@ References: ^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- element_config.js:58:2 +Error ------------------------------------------------------------------------------------------- element_config.js:58:3 -Cannot cast object literal to `React.ElementConfig` because property `p` is missing in object literal [1] but exists in +Cannot cast object literal to `React.ElementConfig` because property `p` is missing in object type [1] but exists in object type [2]. - element_config.js:58:2 - 58| ({}: React.ElementConfig); // Error: missing property `p` - ^^ [1] + element_config.js:58:3 + 58| (({}: {}): React.ElementConfig); // Error: missing property `p` + ^^^^^^ References: - element_config.js:58:6 - 58| ({}: React.ElementConfig); // Error: missing property `p` - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + element_config.js:58:7 + 58| (({}: {}): React.ElementConfig); // Error: missing property `p` + ^^ [1] + element_config.js:58:12 + 58| (({}: {}): React.ElementConfig); // Error: missing property `p` + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------- element_config.js:60:2 @@ -1073,19 +1176,22 @@ References: ^^^^^^ [2] -Error ------------------------------------------------------------------------------------------- element_config.js:62:2 +Error ------------------------------------------------------------------------------------------- element_config.js:62:3 -Cannot cast object literal to `React.ElementConfig` because property `p` is missing in object literal [1] but exists in +Cannot cast object literal to `React.ElementConfig` because property `p` is missing in object type [1] but exists in object type [2]. - element_config.js:62:2 - 62| ({}: React.ElementConfig); // Error: missing property `p` - ^^ [1] + element_config.js:62:3 + 62| (({}: {}): React.ElementConfig); // Error: missing property `p` + ^^^^^^ References: - element_config.js:62:6 - 62| ({}: React.ElementConfig); // Error: missing property `p` - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] + element_config.js:62:7 + 62| (({}: {}): React.ElementConfig); // Error: missing property `p` + ^^ [1] + element_config.js:62:12 + 62| (({}: {}): React.ElementConfig); // Error: missing property `p` + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------- element_config.js:64:2 @@ -1205,23 +1311,20 @@ References: Error ------------------------------------------------------------------------------- element_with_component_type.js:8:2 -Cannot cast `` to `React.Element` because property `b` is missing in object type [1] but exists in object -type [2] in type argument `Props` [3] of property `type`. +Cannot cast `` to `React.Element` because property `a` is missing in object type [1] but exists in object +type [2] in property `type`. element_with_component_type.js:8:2 - 8| (: React.Element>); // Error - ^^^^^^^^^^^^^ + 8| (: React.Element>); // Error + ^^^^^^^^^^^^^ References: - element_with_component_type.js:5:35 - 5| class Foo extends React.Component<{a: number}> {} - ^^^^^^^^^^^ [1] element_with_component_type.js:8:51 - 8| (: React.Element>); // Error - ^^^^^^^^^^^ [2] - /react.js:26:31 - 26| declare class React$Component { - ^^^^^ [3] + 8| (: React.Element>); // Error + ^^^^^^^^^^^ [1] + element_with_component_type.js:5:35 + 5| class Foo extends React.Component<{a: number}> {} + ^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------------------ hoc.js:11:7 @@ -1257,7 +1360,7 @@ References: Error ------------------------------------------------------------------------------------------------------ hoc.js:35:7 Cannot call `myHOC` with `class { ... }` bound to `Component` because string [1] is incompatible with number [2] in -property `foo` of type argument `Props` [3]. +property `foo`. hoc.js:35:7 35| myHOC(class Empty extends React.Component<{foo: string}, void> {}); // Error @@ -1270,27 +1373,55 @@ References: hoc.js:6:40 6| Component: React$ComponentType<{foo: number, bar: number}>, ^^^^^^ [2] - /react.js:26:31 - 26| declare class React$Component { - ^^^^^ [3] Error ------------------------------------------------------------------------------------------------------ hoc.js:36:7 -Cannot call `myHOC` with function bound to `Component` because string [1] is incompatible with number [2] in property -`foo` of the first argument. +Cannot call `myHOC` with function bound to `Component` because: + - number [1] is incompatible with string [2] in property `foo`. + - all branches are incompatible: + - Either undefined [3] is incompatible with null [4]. + - Or property `@@iterator` is missing in undefined [3] but exists in `$Iterable` [5]. hoc.js:36:7 36| myHOC(function Empty(props: {foo: string}) {}); // Error ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - hoc.js:36:35 - 36| myHOC(function Empty(props: {foo: string}) {}); // Error - ^^^^^^ [1] hoc.js:6:40 6| Component: React$ComponentType<{foo: number, bar: number}>, - ^^^^^^ [2] + ^^^^^^ [1] + hoc.js:36:35 + 36| myHOC(function Empty(props: {foo: string}) {}); // Error + ^^^^^^ [2] + hoc.js:36:43 + 36| myHOC(function Empty(props: {foo: string}) {}); // Error + ^ [3] + /react.js:14:5 + 14| | null + ^^^^ [4] + /react.js:20:5 + 20| | Iterable; + ^^^^^^^^^^^^^^^^^^^^^ [5] + + +Error ----------------------------------------------------------------------------------------------------- hoc.js:36:43 + +All branches are incompatible: + - Either undefined [1] is incompatible with null [2]. + - Or property `@@iterator` is missing in undefined [1] but exists in `$Iterable` [3]. + + hoc.js:36:43 + 36| myHOC(function Empty(props: {foo: string}) {}); // Error + ^ [1] + +References: + /react.js:14:5 + 14| | null + ^^^^ [2] + /react.js:20:5 + 20| | Iterable; + ^^^^^^^^^^^^^^^^^^^^^ [3] Error ------------------------------------------------------------------------------------------------------ hoc.js:41:1 @@ -1298,13 +1429,13 @@ Error -------------------------------------------------------------------------- Cannot create `Wrapped` element because property `foo` is missing in props [1] but exists in object type [2]. hoc.js:41:1 - 41| ; // Error: `foo` is required. - ^^^^^^^^^^^ [1] + 41| ; // Error: `foo` is required. + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] References: - hoc.js:7:24 - 7| ): React$ComponentType<{foo: number}> { - ^^^^^^^^^^^^^ [2] + hoc.js:38:36 + 38| const Wrapped: React$ComponentType<{foo: number}> = myHOC(Unwrapped); + ^^^^^^^^^^^^^ [2] Error ------------------------------------------------------------------------------------------------------ hoc.js:43:1 @@ -1321,77 +1452,12 @@ References: ^^^^^^^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc2.js:17:1 - -Property `a` is missing in object type [1] but exists in object type [2] in the first argument. - - hoc2.js:17:1 - v------------------------------------- - 17| function MyFunctionComponent(props: {| - 18| a: number, - 19| b: number, - 20| prop: number, - 21| |}) { return null } - --^ - -References: - hoc2.js:6:34 - 6| Component: React.ComponentType<{|...TProps, prop: number|}>, - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - hoc2.js:17:37 - v- - 17| function MyFunctionComponent(props: {| - 18| a: number, - 19| b: number, - 20| prop: number, - 21| |}) { return null } - -^ [2] - - -Error ----------------------------------------------------------------------------------------------------- hoc2.js:17:1 - -Property `b` is missing in object type [1] but exists in object type [2] in the first argument. - - hoc2.js:17:1 - v------------------------------------- - 17| function MyFunctionComponent(props: {| - 18| a: number, - 19| b: number, - 20| prop: number, - 21| |}) { return null } - --^ - -References: - hoc2.js:6:34 - 6| Component: React.ComponentType<{|...TProps, prop: number|}>, - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - hoc2.js:17:37 - v- - 17| function MyFunctionComponent(props: {| - 18| a: number, - 19| b: number, - 20| prop: number, - 21| |}) { return null } - -^ [2] - - -Error ----------------------------------------------------------------------------------------------------- hoc2.js:18:6 - -number [1] is incompatible with string [2] in property `a` of the first argument. - - hoc2.js:18:6 - 18| a: number, - ^^^^^^ [1] - -References: - hoc2.js:32:32 - 32| ; // Error: string ~> number - ^^^^^ [2] - - Error ---------------------------------------------------------------------------------------------------- hoc2.js:23:37 -Property `a` is missing in object type [1] but exists in object type [2] in type argument `Props` [3]. +Cannot call `connect` with `MyComponent` bound to `Component` because: + - property `a` is missing in object type [1] but exists in object type [2]. + - property `b` is missing in object type [1] but exists in object type [2]. + - number [3] is incompatible with string [4] in property `a`. hoc2.js:23:37 23| const MyEnhancedComponent = connect(MyComponent); @@ -1409,54 +1475,43 @@ References: 14| prop: number, 15| |}> { render() { return null }} -^ [2] - /react.js:26:31 - 26| declare class React$Component { - ^^^^^ [3] + hoc2.js:12:6 + 12| a: number, + ^^^^^^ [3] + hoc2.js:28:24 + 28| ; // Error: string ~> number + ^^^^^ [4] -Error ---------------------------------------------------------------------------------------------------- hoc2.js:23:37 +Error ---------------------------------------------------------------------------------------------------- hoc2.js:24:45 -Property `b` is missing in object type [1] but exists in object type [2] in type argument `Props` [3]. +Cannot call `connect` with `MyFunctionComponent` bound to `Component` because: + - property `a` is missing in props [1] but exists in object type [2]. + - property `b` is missing in props [1] but exists in object type [2]. + - string [3] is incompatible with number [4] in property `a`. - hoc2.js:23:37 - 23| const MyEnhancedComponent = connect(MyComponent); - ^^^^^^^^^^^ + hoc2.js:24:45 + 24| const MyEnhancedFunctionComponent = connect(MyFunctionComponent); + ^^^^^^^^^^^^^^^^^^^ References: hoc2.js:6:34 6| Component: React.ComponentType<{|...TProps, prop: number|}>, ^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - hoc2.js:11:43 - v- - 11| class MyComponent extends React.Component<{| - 12| a: number, - 13| b: number, - 14| prop: number, - 15| |}> { render() { return null }} + hoc2.js:17:37 + v- + 17| function MyFunctionComponent(props: {| + 18| a: number, + 19| b: number, + 20| prop: number, + 21| |}) { return null } -^ [2] - /react.js:26:31 - 26| declare class React$Component { - ^^^^^ [3] - - -Error ---------------------------------------------------------------------------------------------------- hoc2.js:23:37 - -number [1] is incompatible with string [2] in property `a` of type argument `Props` [3]. - - hoc2.js:23:37 - 23| const MyEnhancedComponent = connect(MyComponent); - ^^^^^^^^^^^ - -References: - hoc2.js:12:6 - 12| a: number, - ^^^^^^ [1] - hoc2.js:28:24 - 28| ; // Error: string ~> number - ^^^^^ [2] - /react.js:26:31 - 26| declare class React$Component { - ^^^^^ [3] + hoc2.js:32:32 + 32| ; // Error: string ~> number + ^^^^^ [3] + hoc2.js:18:6 + 18| a: number, + ^^^^^^ [4] Error ----------------------------------------------------------------------------------------------------- hoc3.js:26:1 @@ -1505,9 +1560,12 @@ Cannot create `MyEnhancedFunctionComponent` element because: hoc3.js:30:1 30| ; // Error: Needs `a` and `b`. - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: + hoc3.js:6:34 + 6| Component: React.ComponentType<{prop: number} & TProps>, + ^^^^^^^^^^^^^^^^^^^^^^^ [1] hoc3.js:17:37 v 17| function MyFunctionComponent(props: { @@ -1573,50 +1631,12 @@ References: ^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc4.js:30:1 - -Cannot create `MyEnhancedFunctionComponent` element because: - - property `a` is missing in props [1] but exists in object type [2]. - - property `b` is missing in props [1] but exists in object type [2]. - - hoc4.js:30:1 - 30| ; // Error: Needs `a` and `b`. - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] - -References: - hoc4.js:17:37 - v - 17| function MyFunctionComponent(props: { - 18| a: number, - 19| b: number, - 20| prop: number, - 21| }) { return null } - ^ [2] - - -Error ----------------------------------------------------------------------------------------------------- hoc4.js:32:1 - -Cannot create `MyEnhancedFunctionComponent` element because string [1] is incompatible with number [2] in property `a`. - - hoc4.js:32:1 - 32| ; // Error: string ~> number - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -References: - hoc4.js:32:32 - 32| ; // Error: string ~> number - ^^^^^ [1] - hoc4.js:18:6 - 18| a: number, - ^^^^^^ [2] - - -Error ----------------------------------------------------------------------------------------------------- hoc5.js:22:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:26:1 Cannot create `MyComponent1` element because property `bar` is missing in props [1] but exists in object type [2]. - hoc5.js:22:1 - 22| ; // Error + hoc5.js:26:1 + 26| ; // Error ^^^^^^^^^^^^^^^^ [1] References: @@ -1625,95 +1645,95 @@ References: ^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:24:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:28:1 Cannot create `MyComponent1` element because string [1] is incompatible with number [2] in property `bar`. - hoc5.js:24:1 - 24| ; // Error + hoc5.js:28:1 + 28| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - hoc5.js:24:19 - 24| ; // Error + hoc5.js:28:19 + 28| ; // Error ^^^^^^ [1] hoc5.js:14:63 14| class MyComponent1 extends React.Component<{foo: string, bar: number}> { ^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:26:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:30:1 Cannot create `MyComponent1` element because number [1] is incompatible with string [2] in property `foo`. - hoc5.js:26:1 - 26| ; // Error + hoc5.js:30:1 + 30| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - hoc5.js:26:29 - 26| ; // Error + hoc5.js:30:29 + 30| ; // Error ^^^ [1] hoc5.js:14:50 14| class MyComponent1 extends React.Component<{foo: string, bar: number}> { ^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:29:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:33:1 Cannot create `MyComponent2` element because property `bar` is missing in props [1] but exists in object type [2]. - hoc5.js:29:1 - 29| ; // Error + hoc5.js:33:1 + 33| ; // Error ^^^^^^^^^^^^^^^^ [1] References: - hoc5.js:19:30 - 19| function MyComponent2(props: {foo: string, bar: number}) { return null } + hoc5.js:21:30 + 21| function MyComponent2(props: {foo: string, bar: number}) { ^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:31:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:35:1 Cannot create `MyComponent2` element because string [1] is incompatible with number [2] in property `bar`. - hoc5.js:31:1 - 31| ; // Error + hoc5.js:35:1 + 35| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - hoc5.js:31:19 - 31| ; // Error + hoc5.js:35:19 + 35| ; // Error ^^^^^^ [1] - hoc5.js:19:49 - 19| function MyComponent2(props: {foo: string, bar: number}) { return null } + hoc5.js:21:49 + 21| function MyComponent2(props: {foo: string, bar: number}) { ^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:33:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:37:1 Cannot create `MyComponent2` element because number [1] is incompatible with string [2] in property `foo`. - hoc5.js:33:1 - 33| ; // Error + hoc5.js:37:1 + 37| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - hoc5.js:33:29 - 33| ; // Error + hoc5.js:37:29 + 37| ; // Error ^^^ [1] - hoc5.js:19:36 - 19| function MyComponent2(props: {foo: string, bar: number}) { return null } + hoc5.js:21:36 + 21| function MyComponent2(props: {foo: string, bar: number}) { ^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:39:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:43:1 Cannot create `MyEnhancedComponent1` element because property `bar` is missing in props [1] but exists in object type [2]. - hoc5.js:39:1 - 39| ; // Error + hoc5.js:43:1 + 43| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^ [1] References: @@ -1722,47 +1742,47 @@ References: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:41:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:45:1 Cannot create `MyEnhancedComponent1` element because string [1] is incompatible with number [2] in property `bar`. - hoc5.js:41:1 - 41| ; // Error + hoc5.js:45:1 + 45| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - hoc5.js:41:27 - 41| ; // Error + hoc5.js:45:27 + 45| ; // Error ^^^^^^ [1] hoc5.js:14:63 14| class MyComponent1 extends React.Component<{foo: string, bar: number}> { ^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:43:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:47:1 Cannot create `MyEnhancedComponent1` element because number [1] is incompatible with string [2] in property `foo`. - hoc5.js:43:1 - 43| ; // Error + hoc5.js:47:1 + 47| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - hoc5.js:43:37 - 43| ; // Error + hoc5.js:47:37 + 47| ; // Error ^^^ [1] hoc5.js:14:50 14| class MyComponent1 extends React.Component<{foo: string, bar: number}> { ^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:46:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:50:1 Cannot create `MyEnhancedComponent2` element because property `bar` is missing in props [1] but exists in object type [2]. - hoc5.js:46:1 - 46| ; // Error + hoc5.js:50:1 + 50| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^ [1] References: @@ -1771,37 +1791,37 @@ References: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:48:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:52:1 Cannot create `MyEnhancedComponent2` element because string [1] is incompatible with number [2] in property `bar`. - hoc5.js:48:1 - 48| ; // Error + hoc5.js:52:1 + 52| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - hoc5.js:48:27 - 48| ; // Error + hoc5.js:52:27 + 52| ; // Error ^^^^^^ [1] - hoc5.js:19:49 - 19| function MyComponent2(props: {foo: string, bar: number}) { return null } + hoc5.js:21:49 + 21| function MyComponent2(props: {foo: string, bar: number}) { ^^^^^^ [2] -Error ----------------------------------------------------------------------------------------------------- hoc5.js:50:1 +Error ----------------------------------------------------------------------------------------------------- hoc5.js:54:1 Cannot create `MyEnhancedComponent2` element because number [1] is incompatible with string [2] in property `foo`. - hoc5.js:50:1 - 50| ; // Error + hoc5.js:54:1 + 54| ; // Error ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - hoc5.js:50:37 - 50| ; // Error + hoc5.js:54:37 + 54| ; // Error ^^^ [1] - hoc5.js:19:36 - 19| function MyComponent2(props: {foo: string, bar: number}) { return null } + hoc5.js:21:36 + 21| function MyComponent2(props: {foo: string, bar: number}) { ^^^^^^ [2] @@ -1845,8 +1865,8 @@ References: jsx_spread.js:10:19 10| var props = {bar: 42}; ^^ [1] - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [2] @@ -1864,11 +1884,11 @@ References: key.js:12:11 12| ; // Error ^^^^ [1] - /react.js:184:26 - 184| declare type React$Key = string | number; + /react.js:190:26 + 190| declare type React$Key = string | number; ^^^^^^ [2] - /react.js:184:35 - 184| declare type React$Key = string | number; + /react.js:190:35 + 190| declare type React$Key = string | number; ^^^^^^ [3] @@ -1886,11 +1906,11 @@ References: key.js:21:16 21| ; // Error ^^^^ [1] - /react.js:184:26 - 184| declare type React$Key = string | number; + /react.js:190:26 + 190| declare type React$Key = string | number; ^^^^^^ [2] - /react.js:184:35 - 184| declare type React$Key = string | number; + /react.js:190:35 + 190| declare type React$Key = string | number; ^^^^^^ [3] @@ -1904,14 +1924,22 @@ incompatible with `Element` [2]. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:701:52 - 701| getElementById(elementId: string): HTMLElement | null; + /dom.js:819:52 + 819| getElementById(elementId: string): HTMLElement | null; ^^^^ [1] /react-dom.js:30:16 30| container: Element, ^^^^^^^ [2] +Error ------------------------------------------------------------------------------------------------- profiler.js:21:3 + +Unexpected token <. Remember, adjacent JSX elements must be wrapped in an enclosing parent tag + + 21| + ^ + + Error ---------------------------------------------------------------------------------------- proptype_arrayOf.js:13:20 Cannot create `Example` element because property `arr` is missing in props [1] but exists in propTypes of React @@ -1959,8 +1987,8 @@ References: proptype_arrayOf.js:15:45 15| var fail_mistyped_elems = ^^^^^ [1] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [2] @@ -1976,8 +2004,8 @@ References: proptype_arrayOf.js:20:36 20| var todo_required = ^^^^ [1] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [2] @@ -1994,32 +2022,21 @@ References: proptype_arrayOf.js:30:25 30| (); // error: string ~> number ^^ [1] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [2] -Error ----------------------------------------------------------------------------------------- proptype_arrayOf.js:38:2 +Error ---------------------------------------------------------------------------------------- proptype_arrayOf.js:43:10 -Cannot create `AnyExample` element because number [1] is incompatible with array type [2] in property `arr`. +Cannot call `React.PropTypes.arrayOf` because number [1] is not a React propType. - proptype_arrayOf.js:38:2 - 38| (); // error: still needs to be an array - ^^^^^^^^^^^^^^^^^^^^^^ + proptype_arrayOf.js:43:10 + 43| arr: React.PropTypes.arrayOf(0), // error: number not a prop type + ^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - proptype_arrayOf.js:38:19 - 38| (); // error: still needs to be an array - ^ [1] - proptype_arrayOf.js:34:10 - 34| arr: React.PropTypes.arrayOf((0:any)), // OK - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] - - -Error ---------------------------------------------------------------------------------------- proptype_arrayOf.js:43:34 - -number [1] is not a React propType. - + proptype_arrayOf.js:43:34 43| arr: React.PropTypes.arrayOf(0), // error: number not a prop type ^ [1] @@ -2033,8 +2050,8 @@ Cannot cast `propName` to empty because string [1] is incompatible with empty [2 ^^^^^^^^ References: - /react.js:298:13 - 298| propName: string, + /react.js:441:13 + 441| propName: string, ^^^^^^ [1] proptype_custom_validator.js:8:18 8| (propName: empty); // error: propName is a string @@ -2050,8 +2067,8 @@ Cannot cast `componentName` to empty because string [1] is incompatible with emp ^^^^^^^^^^^^^ References: - /react.js:299:18 - 299| componentName: string, + /react.js:442:18 + 442| componentName: string, ^^^^^^ [1] proptype_custom_validator.js:9:23 9| (componentName: empty); // error: componentName is a string @@ -2069,8 +2086,8 @@ Cannot cast `href` to empty because: ^^^^ References: - /react.js:300:10 - 300| href?: string) => ?Error; + /react.js:443:10 + 443| href?: string) => ?Error; ^^^^^^ [1] proptype_custom_validator.js:10:14 10| (href: empty); // error: href is an optional string @@ -2089,28 +2106,11 @@ References: proptype_custom_validator.js:11:18 11| return (0: mixed); // error: should return ?Error ^^^^^ [1] - /react.js:300:22 - 300| href?: string) => ?Error; + /react.js:443:22 + 443| href?: string) => ?Error; ^^^^^ [2] -Error ------------------------------------------------------------------------------------------- proptype_func.js:14:21 - -Cannot create `Example` element because number [1] is incompatible with function type [2] in property `func`. - - proptype_func.js:14:21 - 14| var fail_mistyped = - ^^^^^^^^^^^^^^^^^^^^ - -References: - proptype_func.js:14:36 - 14| var fail_mistyped = - ^ [1] - /react.js:325:34 - 325| func: React$PropType$Primitive; - ^^^^^^^^ [2] - - Error ------------------------------------------------------------------------------------ proptype_incompatible.js:5:10 number [1] is not a React propType. @@ -2153,23 +2153,6 @@ References: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] -Error ----------------------------------------------------------------------------------------- proptype_object.js:13:21 - -Cannot create `Example` element because number [1] is incompatible with object type [2] in property `object`. - - proptype_object.js:13:21 - 13| var fail_mistyped = - ^^^^^^^^^^^^^^^^^^^^^^ - -References: - proptype_object.js:13:38 - 13| var fail_mistyped = - ^ [1] - /react.js:327:36 - 327| object: React$PropType$Primitive; - ^^^^^^ [2] - - Error --------------------------------------------------------------------------------------- proptype_objectOf.js:13:20 Cannot create `Example` element because property `obj` is missing in props [1] but exists in propTypes of React @@ -2217,8 +2200,8 @@ References: proptype_objectOf.js:15:47 15| var fail_mistyped_props = ^^^^^ [1] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [2] @@ -2234,8 +2217,8 @@ References: proptype_objectOf.js:20:38 20| var todo_required = ^^^^ [1] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [2] @@ -2251,32 +2234,21 @@ References: proptype_objectOf.js:30:27 30| (); // error: string ~> number ^^ [1] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [2] -Error ---------------------------------------------------------------------------------------- proptype_objectOf.js:38:2 +Error --------------------------------------------------------------------------------------- proptype_objectOf.js:43:10 -Cannot create `AnyExample` element because number [1] is incompatible with object type [2] in property `obj`. +Cannot call `React.PropTypes.objectOf` because number [1] is not a React propType. - proptype_objectOf.js:38:2 - 38| (); // error: still needs to be an object - ^^^^^^^^^^^^^^^^^^^^^^ + proptype_objectOf.js:43:10 + 43| obj: React.PropTypes.objectOf(0), // error: number not a prop type + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - proptype_objectOf.js:38:19 - 38| (); // error: still needs to be an object - ^ [1] - proptype_objectOf.js:34:10 - 34| obj: React.PropTypes.objectOf((0:any)), // OK - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] - - -Error --------------------------------------------------------------------------------------- proptype_objectOf.js:43:35 - -number [1] is not a React propType. - + proptype_objectOf.js:43:35 43| obj: React.PropTypes.objectOf(0), // error: number not a prop type ^ [1] @@ -2384,10 +2356,16 @@ References: ^^^^^^^^^^^^^^^^^^^^^^^^^ [2] -Error ------------------------------------------------------------------------------------------ proptype_oneOf.js:76:30 +Error ------------------------------------------------------------------------------------------- proptype_oneOf.js:76:8 -number [1] is not an array. +Cannot call `React.PropTypes.oneOf` because number [1] is not an array. + proptype_oneOf.js:76:8 + 76| p: React.PropTypes.oneOf(0), // error: expected array, got 0 + ^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + proptype_oneOf.js:76:30 76| p: React.PropTypes.oneOf(0), // error: expected array, got 0 ^ [1] @@ -2427,11 +2405,11 @@ References: proptype_oneOfType.js:24:32 24| var fail_bool = ; ^^^^ [1] - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [2] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [3] @@ -2449,11 +2427,11 @@ References: proptype_oneOfType.js:29:36 29| var todo_required = ; ^^^^ [1] - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [2] - /react.js:326:36 - 326| number: React$PropType$Primitive; + /react.js:470:36 + 470| number: React$PropType$Primitive, ^^^^^^ [3] @@ -2469,8 +2447,8 @@ References: proptype_oneOfType.js:41:22 41| (); // error: number ~> string ^ [1] - /react.js:328:36 - 328| string: React$PropType$Primitive; + /react.js:472:36 + 472| string: React$PropType$Primitive, ^^^^^^ [2] @@ -2491,10 +2469,16 @@ References: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [2] -Error -------------------------------------------------------------------------------------- proptype_oneOfType.js:91:34 +Error --------------------------------------------------------------------------------------- proptype_oneOfType.js:91:8 + +Cannot call `React.PropTypes.oneOfType` because number [1] is not an array. -number [1] is not an array. + proptype_oneOfType.js:91:8 + 91| p: React.PropTypes.oneOfType(0), // error: expected array, got 0 + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +References: + proptype_oneOfType.js:91:34 91| p: React.PropTypes.oneOfType(0), // error: expected array, got 0 ^ [1] @@ -2552,14 +2536,14 @@ Cannot cast `React.PropTypes.arrayOf` to `NoFun` because: ^^^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:308:17 - 308| (typeChecker: ReactPropsCheckType) => ReactPropsChainableTypeChecker; + /react.js:452:17 + 452| (typeChecker: ReactPropsCheckType) => ReactPropsChainableTypeChecker; ^^^^^^^^^^^^^^^^^^^ [1] proptypes_builtins.js:3:14 3| type NoFun = mixed => empty; ^^^^^ [2] - /react.js:308:41 - 308| (typeChecker: ReactPropsCheckType) => ReactPropsChainableTypeChecker; + /react.js:452:41 + 452| (typeChecker: ReactPropsCheckType) => ReactPropsChainableTypeChecker; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] proptypes_builtins.js:3:23 3| type NoFun = mixed => empty; @@ -2576,8 +2560,8 @@ empty [2] in the return value. ^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:310:27 - 310| (expectedClass: any) => ReactPropsChainableTypeChecker; + /react.js:454:27 + 454| (expectedClass: any) => ReactPropsChainableTypeChecker; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] proptypes_builtins.js:3:23 3| type NoFun = mixed => empty; @@ -2595,14 +2579,14 @@ Cannot cast `React.PropTypes.objectOf` to `NoFun` because: ^^^^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:312:17 - 312| (typeChecker: ReactPropsCheckType) => ReactPropsChainableTypeChecker; + /react.js:456:17 + 456| (typeChecker: ReactPropsCheckType) => ReactPropsChainableTypeChecker; ^^^^^^^^^^^^^^^^^^^ [1] proptypes_builtins.js:3:14 3| type NoFun = mixed => empty; ^^^^^ [2] - /react.js:312:41 - 312| (typeChecker: ReactPropsCheckType) => ReactPropsChainableTypeChecker; + /react.js:456:41 + 456| (typeChecker: ReactPropsCheckType) => ReactPropsChainableTypeChecker; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] proptypes_builtins.js:3:23 3| type NoFun = mixed => empty; @@ -2620,14 +2604,14 @@ Cannot cast `React.PropTypes.oneOf` to `NoFun` because: ^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:314:20 - 314| (expectedValues: Array) => ReactPropsChainableTypeChecker; + /react.js:458:20 + 458| (expectedValues: Array) => ReactPropsChainableTypeChecker; ^^^^^^^^^^ [1] proptypes_builtins.js:3:14 3| type NoFun = mixed => empty; ^^^^^ [2] - /react.js:314:35 - 314| (expectedValues: Array) => ReactPropsChainableTypeChecker; + /react.js:458:35 + 458| (expectedValues: Array) => ReactPropsChainableTypeChecker; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] proptypes_builtins.js:3:23 3| type NoFun = mixed => empty; @@ -2645,14 +2629,14 @@ Cannot cast `React.PropTypes.oneOfType` to `NoFun` because: ^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:316:25 - 316| (arrayOfTypeCheckers: Array) => + /react.js:460:25 + 460| (arrayOfTypeCheckers: Array) => ^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] proptypes_builtins.js:3:14 3| type NoFun = mixed => empty; ^^^^^ [2] - /react.js:317:5 - 317| ReactPropsChainableTypeChecker; + /react.js:461:5 + 461| ReactPropsChainableTypeChecker; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] proptypes_builtins.js:3:23 3| type NoFun = mixed => empty; @@ -2670,14 +2654,14 @@ Cannot cast `React.PropTypes.shape` to `NoFun` because: ^^^^^^^^^^^^^^^^^^^^^ References: - /react.js:319:16 - 319| (shapeTypes: { [key: string]: ReactPropsCheckType }) => - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] + /react.js:463:16 + 463| (shapeTypes: { [key: string]: ReactPropsCheckType, ... }) => + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [1] proptypes_builtins.js:3:14 3| type NoFun = mixed => empty; ^^^^^ [2] - /react.js:320:5 - 320| ReactPropsChainableTypeChecker; + /react.js:464:5 + 464| ReactPropsChainableTypeChecker; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ [3] proptypes_builtins.js:3:23 3| type NoFun = mixed => empty; @@ -2744,8 +2728,8 @@ Cannot cast `foo` to `Foo` because null [1] is incompatible with `Foo` [2]. ^^^ References: - /react.js:191:39 - 191| | ((React$ElementRef | null) => mixed) + /react.js:197:39 + 197| | ((React$ElementRef | null) => mixed) ^^^^ [1] ref.js:13:24 13| (foo: Foo)} />; // Error: `Foo` may be null. @@ -2796,8 +2780,8 @@ Cannot cast `foo` to `FooExact` because null [1] is incompatible with `FooExact` ^^^ References: - /react.js:191:39 - 191| | ((React$ElementRef | null) => mixed) + /react.js:197:39 + 197| | ((React$ElementRef | null) => mixed) ^^^^ [1] ref.js:24:29 24| (foo: FooExact)} />; // Error: `FooExact` may be null. @@ -2823,7 +2807,7 @@ References: Error ---------------------------------------------------------------------------------------------------- render.js:8:1 -Cannot extend property `Component` [1] with `A` because in the return value of property `render`: +Cannot extend `React.Component` [1] with `A` because in the return value of property `render`: - Either undefined [2] is incompatible with null [3]. - Or property `@@iterator` is missing in undefined [2] but exists in `$Iterable` [4]. @@ -2898,7 +2882,7 @@ References: Error --------------------------------------------------------------------------------------------------- render.js:46:1 -Cannot extend property `Component` [1] with `F` because in the return value of property `render`: +Cannot extend `React.Component` [1] with `F` because in the return value of property `render`: - Either undefined [2] is incompatible with null [3]. - Or property `@@iterator` is missing in undefined [2] but exists in `$Iterable` [4]. @@ -2955,16 +2939,16 @@ Cannot call `ReactDOM.render` with `document.querySelector(...)` bound to `conta with `Element` [2]. render_callback.js:13:29 - 13| ReactDOM.render(, document.querySelector('#site'), () => { - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 13| ReactDOM.render(, document.querySelector('#site'), () => { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:939:50 - 939| querySelector(selector: string): HTMLElement | null; - ^^^^ [1] + /dom.js:1062:50 + 1062| querySelector(selector: string): HTMLElement | null; + ^^^^ [1] /react-dom.js:18:16 - 18| container: Element, - ^^^^^^^ [2] + 18| container: Element, + ^^^^^^^ [2] Error ----------------------------------------------------------------------------------------- render_callback.js:17:29 @@ -2973,16 +2957,16 @@ Cannot call `ReactDOM.render` with `document.querySelector(...)` bound to `conta with `Element` [2]. render_callback.js:17:29 - 17| ReactDOM.render(, document.querySelector('#site'), function() { - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 17| ReactDOM.render(, document.querySelector('#site'), function() { + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:939:50 - 939| querySelector(selector: string): HTMLElement | null; - ^^^^ [1] + /dom.js:1062:50 + 1062| querySelector(selector: string): HTMLElement | null; + ^^^^ [1] /react-dom.js:18:16 - 18| container: Element, - ^^^^^^^ [2] + 18| container: Element, + ^^^^^^^ [2] Error ----------------------------------------------------------------------------------------- render_callback.js:22:29 @@ -2991,16 +2975,16 @@ Cannot call `ReactDOM.render` with `document.querySelector(...)` bound to `conta with `Element` [2]. render_callback.js:22:29 - 22| ReactDOM.render(, document.querySelector('#site'), 1); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 22| ReactDOM.render(, document.querySelector('#site'), 1); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:939:50 - 939| querySelector(selector: string): HTMLElement | null; - ^^^^ [1] + /dom.js:1062:50 + 1062| querySelector(selector: string): HTMLElement | null; + ^^^^ [1] /react-dom.js:18:16 - 18| container: Element, - ^^^^^^^ [2] + 18| container: Element, + ^^^^^^^ [2] Error ----------------------------------------------------------------------------------------- render_callback.js:22:62 @@ -3023,22 +3007,22 @@ Cannot call `ReactDOM.render` with `document.querySelector(...)` bound to `conta with `Element` [2]. render_callback.js:23:29 - 23| ReactDOM.render(, document.querySelector('#site'), {}); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 23| ReactDOM.render(, document.querySelector('#site'), {}); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:939:50 - 939| querySelector(selector: string): HTMLElement | null; - ^^^^ [1] + /dom.js:1062:50 + 1062| querySelector(selector: string): HTMLElement | null; + ^^^^ [1] /react-dom.js:18:16 - 18| container: Element, - ^^^^^^^ [2] + 18| container: Element, + ^^^^^^^ [2] Error ----------------------------------------------------------------------------------------- render_callback.js:23:62 -Cannot call `ReactDOM.render` with object literal bound to `callback` because a callable signature is missing in object -literal [1] but exists in function type [2]. +Cannot call `ReactDOM.render` with object literal bound to `callback` because a call signature declaring the expected +parameter / return type is missing in object literal [1] but exists in function type [2]. render_callback.js:23:62 23| ReactDOM.render(, document.querySelector('#site'), {}); @@ -3056,16 +3040,16 @@ Cannot call `ReactDOM.render` with `document.querySelector(...)` bound to `conta with `Element` [2]. render_callback.js:24:29 - 24| ReactDOM.render(, document.querySelector('#site'), ''); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 24| ReactDOM.render(, document.querySelector('#site'), ''); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:939:50 - 939| querySelector(selector: string): HTMLElement | null; - ^^^^ [1] + /dom.js:1062:50 + 1062| querySelector(selector: string): HTMLElement | null; + ^^^^ [1] /react-dom.js:18:16 - 18| container: Element, - ^^^^^^^ [2] + 18| container: Element, + ^^^^^^^ [2] Error ----------------------------------------------------------------------------------------- render_callback.js:24:62 @@ -3089,16 +3073,16 @@ Cannot call `ReactDOM.render` with `document.querySelector(...)` bound to `conta with `Element` [2]. render_callback.js:25:29 - 25| ReactDOM.render(, document.querySelector('#site'), null); - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 25| ReactDOM.render(, document.querySelector('#site'), null); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /dom.js:939:50 - 939| querySelector(selector: string): HTMLElement | null; - ^^^^ [1] + /dom.js:1062:50 + 1062| querySelector(selector: string): HTMLElement | null; + ^^^^ [1] /react-dom.js:18:16 - 18| container: Element, - ^^^^^^^ [2] + 18| container: Element, + ^^^^^^^ [2] Error ----------------------------------------------------------------------------------------- render_callback.js:25:62 @@ -3124,30 +3108,1144 @@ Cannot get `child.tagName` because property `tagName` is missing in `React.Compo ^^^^^^^^^^^^^ References: - /react-dom.js:88:19 - 88| test: (child: React$Component) => boolean, + /react-dom.js:90:19 + 90| test: (child: React$Component) => boolean, ^^^^^^^^^^^^^^^^^^^^^^^^^ [1] +Error ---------------------------------------------------------------------------------------------- test-utils.js:58:15 + +Cannot call `TestUtils.act` with function bound to `callback` because property `then` is missing in object literal [1] +but exists in `Thenable` [2] in the return value. + + test-utils.js:58:15 + 58| TestUtils.act(() => ({count: 123})); // error + ^^^^^^^^^^^^^^^^^^^^ + +References: + test-utils.js:58:22 + 58| TestUtils.act(() => ({count: 123})); // error + ^^^^^^^^^^^^ [1] + /react-dom.js:116:47 + 116| declare function act(callback: () => void | Thenable): Thenable; + ^^^^^^^^ [2] + + Error -------------------------------------------------------------------------------------------- undefined_prop.js:7:1 Cannot create `MyComponent` element because undefined [1] is incompatible with string [2] in property `title`. undefined_prop.js:7:1 - 7| ; - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 7| ; + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ References: - /core.js:13:24 - 13| declare var undefined: void; - ^^^^ [1] + undefined_prop.js:7:21 + 7| ; + ^^^^^^^^^ [1] undefined_prop.js:5:51 - 5| class MyComponent extends React.Component<{title: string}> {} - ^^^^^^ [2] + 5| class MyComponent extends React.Component<{title: string}> {} + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------ useCallback_hook.js:6:3 + +Cannot call `React.useCallback` because function [1] requires another argument. + + useCallback_hook.js:6:3 + 6| React.useCallback(); // Error: function requires another argument. + ^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:360:38 + v---------------------------------------------- + 360| declare export function useCallback) => mixed>( + 361| callback: T, + 362| inputs: ?$ReadOnlyArray, + 363| ): T; + ---^ [1] + + +Error ---------------------------------------------------------------------------------------- useCallback_hook.js:12:23 + +Cannot assign `callback()` to `str` because number [1] is incompatible with string [2]. + + useCallback_hook.js:12:23 + 12| const str: string = callback();// Error: number is incompatible with string. + ^^^^^^^^^^ + +References: + useCallback_hook.js:10:44 + 10| const callback = React.useCallback(() => 123); + ^^^ [1] + useCallback_hook.js:12:14 + 12| const str: string = callback();// Error: number is incompatible with string. + ^^^^^^ [2] + + +Error ----------------------------------------------------------------------------------------- useCallback_hook.js:21:3 + +Cannot call `callback` because function [1] requires another argument. + + useCallback_hook.js:21:3 + 21| callback(true); // Error: function requires another argument. + ^^^^^^^^^^^^^^ + +References: + useCallback_hook.js:16:38 + v------------------------------ + 16| const callback = React.useCallback((num: number, str: string) => { + 17| (num: number); + 18| (str: string); + 19| }); + ^ [1] + + +Error ---------------------------------------------------------------------------------------- useCallback_hook.js:21:12 + +Cannot call `callback` with `true` bound to `num` because boolean [1] is incompatible with number [2]. + + useCallback_hook.js:21:12 + 21| callback(true); // Error: function requires another argument. + ^^^^ [1] + +References: + useCallback_hook.js:16:44 + 16| const callback = React.useCallback((num: number, str: string) => { + ^^^^^^ [2] + + +Error ---------------------------------------------------------------------------------------- useCallback_hook.js:22:12 + +Cannot call `callback` with `'123'` bound to `num` because string [1] is incompatible with number [2]. + + useCallback_hook.js:22:12 + 22| callback('123', 'abc'); // Error: string is incompatible with number. + ^^^^^ [1] + +References: + useCallback_hook.js:16:44 + 16| const callback = React.useCallback((num: number, str: string) => { + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- useContext_hook.js:6:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useContext_hook.js:6:3 + 6| React.useMutationEffect(); // Error: function requires another argument. + ^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ----------------------------------------------------------------------------------------- useContext_hook.js:21:18 + +Cannot assign `React.useContext(...)` to `numericValue` because string [1] is incompatible with number [2]. + + useContext_hook.js:21:18 + 21| numericValue = React.useContext(StringContext); // Error: string is incompatible with number + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + useContext_hook.js:19:45 + 19| const StringContext = React.createContext('hello'); + ^^^^^^^ [1] + useContext_hook.js:16:21 + 16| let numericValue: number; + ^^^^^^ [2] + + +Error ----------------------------------------------------------------------------------------- useContext_hook.js:23:53 + +Cannot assign `React.createContext(...)` to `InvalidContext` because inexact string [1] is incompatible with exact +`CustomType` [2] in type argument `T` [3]. + + useContext_hook.js:23:53 + 23| const InvalidContext: React$Context = React.createContext('hello'); // Error: inexact string is incompatible with exact CustomType + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + useContext_hook.js:23:73 + 23| const InvalidContext: React$Context = React.createContext('hello'); // Error: inexact string is incompatible with exact CustomType + ^^^^^^^ [1] + useContext_hook.js:23:39 + 23| const InvalidContext: React$Context = React.createContext('hello'); // Error: inexact string is incompatible with exact CustomType + ^^^^^^^^^^ [2] + /react.js:204:28 + 204| declare type React$Context = { + ^ [3] + + +Error ----------------------------------------------------------------------------------------- useContext_hook.js:29:17 + +Cannot assign `React.useContext(...)` to `stringValue` because `CustomType` [1] is incompatible with string [2]. + + useContext_hook.js:29:17 + 29| stringValue = React.useContext(CustomContext); // Error: CustomType is incompatible with string + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + useContext_hook.js:25:38 + 25| const CustomContext: React$Context = React.createContext({ + ^^^^^^^^^^ [1] + useContext_hook.js:15:20 + 15| let stringValue: string; + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------ useContext_hook.js:54:4 + +Cannot cast `bar` to string because number [1] is incompatible with string [2]. + + useContext_hook.js:54:4 + 54| (bar: string); // Error: number is incompatible with string + ^^^ + +References: + useContext_hook.js:35:19 + 35| {foo: 0, bar: 0, baz: 0}, + ^ [1] + useContext_hook.js:54:9 + 54| (bar: string); // Error: number is incompatible with string + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------- useDebugValue_hook.js:5:24 + +Cannot call `React.useDebugValue` because property `useDebugValue` is missing in object type [1]. + + useDebugValue_hook.js:5:24 + 5| const undefinedValue = React.useDebugValue(123); + ^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error -------------------------------------------------------------------------------------------- useEffect_hook.js:6:3 + +Cannot call `React.useEffect` because function [1] requires another argument. + + useEffect_hook.js:6:3 + 6| React.useEffect(); // Error: function requires another argument. + ^^^^^^^^^^^^^^^^^ + +References: + /react.js:350:36 + v + 350| declare export function useEffect( + 351| create: () => MaybeCleanUpFn, + 352| inputs: ?$ReadOnlyArray, + 353| ): void; + ------^ [1] + + +Error ------------------------------------------------------------------------------------------ useEffect_hook.js:22:19 + +Cannot call `React.useEffect` with `1` bound to `create` because number [1] is incompatible with function type [2]. + + useEffect_hook.js:22:19 + 22| React.useEffect(1); // Error: number is incompatible with function type + ^ [1] + +References: + /react.js:351:13 + 351| create: () => MaybeCleanUpFn, + ^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------ useEffect_hook.js:23:29 + +Cannot call `React.useEffect` with `1` bound to `inputs` because number [1] is incompatible with read-only array +type [2]. + + useEffect_hook.js:23:29 + 23| React.useEffect(() => {}, 1); // Error: number is incompatible with function react-only array + ^ [1] + +References: + /react.js:352:14 + 352| inputs: ?$ReadOnlyArray, + ^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------ useEffect_hook.js:24:19 + +Cannot call `React.useEffect` with async function bound to `create` because a call signature declaring the expected +parameter / return type is missing in `Promise` [1] but exists in function type [2] in the return value. + + useEffect_hook.js:24:19 + 24| React.useEffect(async () => {}) // Error: promise is incompatible with function return type + ^^^^^^^^^^^^^^ + +References: + useEffect_hook.js:24:30 + 24| React.useEffect(async () => {}) // Error: promise is incompatible with function return type + ^ [1] + /react.js:316:41 + 316| declare type MaybeCleanUpFn = void | (() => void); + ^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------ useEffect_hook.js:25:19 + +Cannot call `React.useEffect` with function bound to `create` because number [1] is incompatible with undefined [2] in +the return value of the return value. + + useEffect_hook.js:25:19 + 25| React.useEffect(() => () => 123) // Error: cleanup function should not return a value + ^^^^^^^^^^^^^^^ + +References: + useEffect_hook.js:25:31 + 25| React.useEffect(() => () => 123) // Error: cleanup function should not return a value + ^^^ [1] + /react.js:316:47 + 316| declare type MaybeCleanUpFn = void | (() => void); + ^^^^ [2] + + +Error ---------------------------------------------------------------------------------- useImperativeHandle_hook.js:6:3 + +Cannot call `React.useImperativeHandle` because function [1] requires another argument. + + useImperativeHandle_hook.js:6:3 + 6| React.useImperativeHandle(); // Error: function requires another argument. + ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:370:46 + v--- + 370| declare export function useImperativeHandle( + 371| ref: { current: T | null, ... } | ((inst: T | null) => mixed) | null | void, + 372| create: () => T, + 373| inputs: ?$ReadOnlyArray, + 374| ): void; + ------^ [1] + + +Error -------------------------------------------------------------------------------- useImperativeHandle_hook.js:31:34 + +Cannot call `React.useImperativeHandle` with function bound to `create` because inexact object literal [1] is +incompatible with exact `Interface` [2] in the return value. + + useImperativeHandle_hook.js:31:34 + 31| React.useImperativeHandle(ref, () => ({})); // Error: inexact object literal is incompatible with exact Interface + ^^^^^^^^^^ + +References: + useImperativeHandle_hook.js:31:41 + 31| React.useImperativeHandle(ref, () => ({})); // Error: inexact object literal is incompatible with exact Interface + ^^ [1] + useImperativeHandle_hook.js:30:31 + 30| const ref: {current: null | Interface } = React.createRef(); + ^^^^^^^^^ [2] + + +Error -------------------------------------------------------------------------------- useImperativeHandle_hook.js:34:40 + +Cannot call `React.useImperativeHandle` with function bound to `create` because inexact object literal [1] is +incompatible with exact `Interface` [2] in the return value. + + useImperativeHandle_hook.js:34:40 + 34| React.useImperativeHandle(refSetter, () => ({})); // Error: inexact object literal is incompatible with exact Interface + ^^^^^^^^^^ + +References: + useImperativeHandle_hook.js:34:47 + 34| React.useImperativeHandle(refSetter, () => ({})); // Error: inexact object literal is incompatible with exact Interface + ^^ [1] + useImperativeHandle_hook.js:33:39 + 33| const refSetter = (instance: null | Interface) => {}; + ^^^^^^^^^ [2] + + +Error -------------------------------------------------------------------------------------- useLayoutEffect_hook.js:6:3 + +Cannot call `React.useLayoutEffect` because function [1] requires another argument. + + useLayoutEffect_hook.js:6:3 + 6| React.useLayoutEffect(); // Error: function requires another argument. + ^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:355:42 + v + 355| declare export function useLayoutEffect( + 356| create: () => MaybeCleanUpFn, + 357| inputs: ?$ReadOnlyArray, + 358| ): void; + ------^ [1] + + +Error ------------------------------------------------------------------------------------ useLayoutEffect_hook.js:22:25 + +Cannot call `React.useLayoutEffect` with `1` bound to `create` because number [1] is incompatible with function +type [2]. + + useLayoutEffect_hook.js:22:25 + 22| React.useLayoutEffect(1); // Error: number is incompatible with function type + ^ [1] + +References: + /react.js:356:13 + 356| create: () => MaybeCleanUpFn, + ^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------ useLayoutEffect_hook.js:23:35 + +Cannot call `React.useLayoutEffect` with `1` bound to `inputs` because number [1] is incompatible with read-only array +type [2]. + + useLayoutEffect_hook.js:23:35 + 23| React.useLayoutEffect(() => {}, 1); // Error: number is incompatible with function react-only array + ^ [1] + +References: + /react.js:357:14 + 357| inputs: ?$ReadOnlyArray, + ^^^^^^^^^^^^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------ useLayoutEffect_hook.js:24:25 + +Cannot call `React.useLayoutEffect` with async function bound to `create` because a call signature declaring the +expected parameter / return type is missing in `Promise` [1] but exists in function type [2] in the return value. + + useLayoutEffect_hook.js:24:25 + 24| React.useLayoutEffect(async () => {}) // Error: promise is incompatible with function return type + ^^^^^^^^^^^^^^ + +References: + useLayoutEffect_hook.js:24:36 + 24| React.useLayoutEffect(async () => {}) // Error: promise is incompatible with function return type + ^ [1] + /react.js:316:41 + 316| declare type MaybeCleanUpFn = void | (() => void); + ^^^^^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------ useLayoutEffect_hook.js:25:25 + +Cannot call `React.useLayoutEffect` with function bound to `create` because number [1] is incompatible with +undefined [2] in the return value of the return value. + + useLayoutEffect_hook.js:25:25 + 25| React.useLayoutEffect(() => () => 123) // Error: cleanup function should not return a value + ^^^^^^^^^^^^^^^ + +References: + useLayoutEffect_hook.js:25:37 + 25| React.useLayoutEffect(() => () => 123) // Error: cleanup function should not return a value + ^^^ [1] + /react.js:316:47 + 316| declare type MaybeCleanUpFn = void | (() => void); + ^^^^ [2] + + +Error ---------------------------------------------------------------------------------------------- useMemo_hook.js:6:3 + +Cannot call `React.useMemo` because function [1] requires another argument. + + useMemo_hook.js:6:3 + 6| React.useMemo(); // Error: function requires another argument. + ^^^^^^^^^^^^^^^ + +References: + /react.js:365:34 + v--- + 365| declare export function useMemo( + 366| create: () => T, + 367| inputs: ?$ReadOnlyArray, + 368| ): T; + ---^ [1] + + +Error -------------------------------------------------------------------------------------------- useMemo_hook.js:17:27 + +Cannot assign `React.useMemo(...)` to `invalid` because string [1] is incompatible with number [2]. + + useMemo_hook.js:17:27 + 17| const invalid: number = React.useMemo(() => "abc"); // Error: string is incompatible with number + ^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + useMemo_hook.js:17:47 + 17| const invalid: number = React.useMemo(() => "abc"); // Error: string is incompatible with number + ^^^^^ [1] + useMemo_hook.js:17:18 + 17| const invalid: number = React.useMemo(() => "abc"); // Error: string is incompatible with number + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------ useMutationEffect_hook.js:6:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useMutationEffect_hook.js:6:3 + 6| React.useMutationEffect(); // Error: function requires another argument. + ^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ----------------------------------------------------------------------------------- useMutationEffect_hook.js:11:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useMutationEffect_hook.js:11:3 + 11| React.useMutationEffect(() => {}); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ----------------------------------------------------------------------------------- useMutationEffect_hook.js:12:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useMutationEffect_hook.js:12:3 + 12| React.useMutationEffect(() => {}, []); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ----------------------------------------------------------------------------------- useMutationEffect_hook.js:13:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useMutationEffect_hook.js:13:3 + 13| React.useMutationEffect(() => {}, [1, 2, 3]); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ----------------------------------------------------------------------------------- useMutationEffect_hook.js:16:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useMutationEffect_hook.js:16:3 + 16| React.useMutationEffect(() => () => {}); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ----------------------------------------------------------------------------------- useMutationEffect_hook.js:17:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useMutationEffect_hook.js:17:3 + 17| React.useMutationEffect(() => () => {}, []); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ----------------------------------------------------------------------------------- useMutationEffect_hook.js:18:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useMutationEffect_hook.js:18:3 + 18| React.useMutationEffect(() => () => {}, [1, 2, 3]); + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ----------------------------------------------------------------------------------- useMutationEffect_hook.js:22:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useMutationEffect_hook.js:22:3 + 22| React.useMutationEffect(1); // Error: number is incompatible with function type + ^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ----------------------------------------------------------------------------------- useMutationEffect_hook.js:23:3 + +Cannot call `React.useMutationEffect` because property `useMutationEffect` is missing in object type [1]. + + useMutationEffect_hook.js:23:3 + 23| React.useMutationEffect(() => {}, 1); // Error: number is incompatible with function react-only array + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +References: + /react.js:398:26 + v- + 398| declare export default {| + 399| +DOM: typeof DOM, + 400| +PropTypes: typeof PropTypes, + 401| +version: typeof version, + 402| +checkPropTypes: typeof checkPropTypes, + 403| +memo: typeof memo, + 404| +lazy: typeof lazy, + 405| +createClass: typeof createClass, + 406| +createContext: typeof createContext, + 407| +createElement: typeof createElement, + 408| +cloneElement: typeof cloneElement, + 409| +createFactory: typeof createFactory, + 410| +createRef: typeof createRef, + 411| +forwardRef: typeof forwardRef, + 412| +isValidElement: typeof isValidElement, + : + 416| +Children: typeof Children, + 417| +ConcurrentMode: typeof ConcurrentMode, + 418| +StrictMode: typeof StrictMode, + 419| +Profiler: typeof Profiler, + 420| +Suspense: typeof Suspense, + 421| +useContext: typeof useContext, + 422| +useState: typeof useState, + 423| +useReducer: typeof useReducer, + 424| +useRef: typeof useRef, + 425| +useEffect: typeof useEffect, + 426| +useLayoutEffect: typeof useLayoutEffect, + 427| +useCallback: typeof useCallback, + 428| +useMemo: typeof useMemo, + 429| +useImperativeHandle: typeof useImperativeHandle, + 430| |}; + -^ [1] + + +Error ------------------------------------------------------------------------------------------- useReducer_hook.js:6:3 + +Cannot call `React.useReducer` because: + - Either function type [1] requires another argument from call of method `useReducer` [2]. + - Or function type [3] requires another argument from call of method `useReducer` [2]. + - Or function type [4] requires another argument from call of method `useReducer` [2]. + + useReducer_hook.js:6:3 + 6| React.useReducer(); // Error: function requires another argument. + ^^^^^^^^^^^^^^^^^^ [2] + +References: + /react.js:329:37 + v------ + 329| declare export function useReducer( + 330| reducer: (S, A) => S, + 331| initialState: S, + 332| ): [S, Dispatch]; + ------------------^ [1] + /react.js:334:37 + v------ + 334| declare export function useReducer( + 335| reducer: (S, A) => S, + 336| initialState: S, + 337| init: void, + 338| ): [S, Dispatch]; + ------------------^ [3] + /react.js:340:37 + v--------- + 340| declare export function useReducer( + 341| reducer: (S, A) => S, + 342| initialArg: I, + 343| init: (I) => S, + 344| ): [S, Dispatch]; + ------------------^ [4] + + +Error ------------------------------------------------------------------------------------------ useReducer_hook.js:27:4 + +Cannot cast `state.count` to string because: + - number [1] is incompatible with string [2]. + - number [3] is incompatible with string [2]. + - number [4] is incompatible with string [2]. + - number [5] is incompatible with string [2]. + - number [6] is incompatible with string [2]. + - number [7] is incompatible with string [2]. + + useReducer_hook.js:27:4 + 27| (state.count: string); // Error: number is incompatible with string + ^^^^^^^^^^^ + +References: + useReducer_hook.js:14:23 + 14| return { count: state.count + 1 }; + ^^^^^^^^^^^^^^^ [1] + useReducer_hook.js:27:17 + 27| (state.count: string); // Error: number is incompatible with string + ^^^^^^ [2] + useReducer_hook.js:16:23 + 16| return { count: state.count - 1 }; + ^^^^^^^^^^^^^^^ [3] + useReducer_hook.js:22:31 + 22| const initialState = { count: 0 }; + ^ [4] + useReducer_hook.js:29:38 + 29| dispatch({ type: "reset", payload: 123 }); + ^^^ [5] + useReducer_hook.js:41:66 + 41| const [state, dispatch] = React.useReducer(reducer, { initial: 123 }, init); + ^^^ [6] + useReducer_hook.js:44:38 + 44| dispatch({ type: "reset", payload: 123 }); + ^^^ [7] + + +Error --------------------------------------------------------------------------------------------- useRef_hook.js:16:49 + +Cannot assign `React.useRef(...)` to `stringValue` because number [1] is incompatible with string [2] in property +`current`. + + useRef_hook.js:16:49 + 16| const stringValue: {current: string | null} = React.useRef(123); // Error: number is incompatible with string in property current + ^^^^^^^^^^^^^^^^^ + +References: + useRef_hook.js:16:62 + 16| const stringValue: {current: string | null} = React.useRef(123); // Error: number is incompatible with string in property current + ^^^ [1] + useRef_hook.js:16:32 + 16| const stringValue: {current: string | null} = React.useRef(123); // Error: number is incompatible with string in property current + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------- useRef_hook.js:17:49 + +Cannot assign `React.useRef(...)` to `numberValue` because string [1] is incompatible with number [2] in property +`current`. + + useRef_hook.js:17:49 + 17| const numberValue: {current: number | null} = React.useRef("abc"); // Error: string is incompatible with number in property current + ^^^^^^^^^^^^^^^^^^^ + +References: + useRef_hook.js:17:62 + 17| const numberValue: {current: number | null} = React.useRef("abc"); // Error: string is incompatible with number in property current + ^^^^^ [1] + useRef_hook.js:17:32 + 17| const numberValue: {current: number | null} = React.useRef("abc"); // Error: string is incompatible with number in property current + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------- useRef_hook.js:18:38 + +Cannot assign `React.useRef(...)` to `nullValue` because boolean [1] is incompatible with null [2] in property +`current`. + + useRef_hook.js:18:38 + 18| const nullValue: {current: null} = React.useRef(true); // Error: boolean is incompatible with null in property current + ^^^^^^^^^^^^^^^^^^ + +References: + useRef_hook.js:18:51 + 18| const nullValue: {current: null} = React.useRef(true); // Error: boolean is incompatible with null in property current + ^^^^ [1] + useRef_hook.js:18:30 + 18| const nullValue: {current: null} = React.useRef(true); // Error: boolean is incompatible with null in property current + ^^^^ [2] + + +Error --------------------------------------------------------------------------------------------- useRef_hook.js:24:25 + +Cannot assign `123` to `stringValue.current` because number [1] is incompatible with string [2]. + + useRef_hook.js:24:25 + 24| stringValue.current = 123; // Error: number is incompatible with string in property current + ^^^ [1] + +References: + useRef_hook.js:22:32 + 22| const stringValue: {current: string | null} = React.useRef(null); + ^^^^^^ [2] + + +Error --------------------------------------------------------------------------------------------- useRef_hook.js:32:38 + +Cannot assign `React.useRef(...)` to `foo` because `Bar` [1] is incompatible with `Foo` [2] in property `current`. + + useRef_hook.js:32:38 + 32| const foo: {current: Foo | null} = React.useRef(new Bar()); // Error: Bar is incompatible with Foo in property current + ^^^^^^^^^^^^^^^^^^^^^^^ + +References: + useRef_hook.js:32:51 + 32| const foo: {current: Foo | null} = React.useRef(new Bar()); // Error: Bar is incompatible with Foo in property current + ^^^^^^^^^ [1] + useRef_hook.js:32:24 + 32| const foo: {current: Foo | null} = React.useRef(new Bar()); // Error: Bar is incompatible with Foo in property current + ^^^ [2] + + +Error -------------------------------------------------------------------------------------------- useState_hook.js:10:2 + +Cannot cast `count` to string because number [1] is incompatible with string [2]. + + useState_hook.js:10:2 + 10| (count: string); // Error: number is incompatible with string + ^^^^^ + +References: + useState_hook.js:7:42 + 7| const [count, setCount] = React.useState(1); + ^^^^^^ [1] + useState_hook.js:10:9 + 10| (count: string); // Error: number is incompatible with string + ^^^^^^ [2] + + +Error ------------------------------------------------------------------------------------------- useState_hook.js:13:10 + +Cannot call `setCount` with `true` bound to the first parameter because boolean [1] is incompatible with number [2]. + + useState_hook.js:13:10 + 13| setCount(true); // Error: boolean is incompatible with number + ^^^^ [1] + +References: + useState_hook.js:7:42 + 7| const [count, setCount] = React.useState(1); + ^^^^^^ [2] -Found 193 errors +Found 245 errors Only showing the most relevant union/intersection branches. To see all branches, re-run Flow with --show-all-branches diff --git a/tests/react/ref.js b/tests/react/ref.js index 48eddd55eb3..e12769a598b 100644 --- a/tests/react/ref.js +++ b/tests/react/ref.js @@ -24,3 +24,16 @@ class FooExact extends React.Component<{||}, void> {} (foo: FooExact)} />; // Error: `FooExact` may be null. (foo: FooExact | null)} />; // OK (foo: Bar | null)} />; // Error: `FooExact` is not `Bar`. + +class NumRefs extends React.Component<{}> { + getChild(i: number) { + return this.refs[i]; + } + render() { + var children = []; + for (var i = 0; i < 10; i++) { + children.push(
); + } + return children; + } +} diff --git a/tests/react/test-utils.js b/tests/react/test-utils.js index c6fa6e84cca..53a0f2ea24f 100644 --- a/tests/react/test-utils.js +++ b/tests/react/test-utils.js @@ -1,5 +1,5 @@ /** - * Copyright (c) 2013-present, Facebook, Inc. + * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. @@ -33,9 +33,10 @@ TestUtils.mockComponent(MyTestingComponent, 'span'); tree, child => child.tagName === 'BUTTON', ): Array>); -(TestUtils.scryRenderedDOMComponentsWithClass(tree, 'my-button'): Array< - Element, ->); +(TestUtils.scryRenderedDOMComponentsWithClass( + tree, + 'my-button', +): Array); const buttonEl = TestUtils.findRenderedDOMComponentWithClass(tree, 'my-button'); if (buttonEl != null) { @@ -51,3 +52,21 @@ if (buttonEl != null) { tree, MyTestingComponent, ): ?React.Component); +TestUtils.act(() => { + Math.random(); +}); +TestUtils.act(() => ({count: 123})); // error +async function runTest() { + await TestUtils.act(async () => { + // .. some test code + await Promise.resolve(); + }); + /* // wishlist - + act(async () => { + // some test code + }); // ideally this should error + await act(() => { + // ... + }); // ideally this should error + */ +} diff --git a/tests/react/useCallback_hook.js b/tests/react/useCallback_hook.js new file mode 100644 index 00000000000..0e552b4df29 --- /dev/null +++ b/tests/react/useCallback_hook.js @@ -0,0 +1,23 @@ +// @flow + +import React from 'react'; + +{ + React.useCallback(); // Error: function requires another argument. +} + +{ + const callback = React.useCallback(() => 123); + const num: number = callback(); + const str: string = callback();// Error: number is incompatible with string. +} + +{ + const callback = React.useCallback((num: number, str: string) => { + (num: number); + (str: string); + }); + callback(123, 'abc'); // Ok + callback(true); // Error: function requires another argument. + callback('123', 'abc'); // Error: string is incompatible with number. +} diff --git a/tests/react/useContext_hook.js b/tests/react/useContext_hook.js new file mode 100644 index 00000000000..60512ed6931 --- /dev/null +++ b/tests/react/useContext_hook.js @@ -0,0 +1,55 @@ +// @flow + +import React from 'react'; + +{ + React.useMutationEffect(); // Error: function requires another argument. +} + +type CustomType = {| + foo: string, + bar: number, +|}; + +{ + let stringValue: string; + let numericValue: number; + let customValue: CustomType; + + const StringContext = React.createContext('hello'); + stringValue = React.useContext(StringContext); // Ok + numericValue = React.useContext(StringContext); // Error: string is incompatible with number + + const InvalidContext: React$Context = React.createContext('hello'); // Error: inexact string is incompatible with exact CustomType + + const CustomContext: React$Context = React.createContext({ + foo: 'abc', + bar: 123, + }); + stringValue = React.useContext(CustomContext); // Error: CustomType is incompatible with string + customValue = React.useContext(CustomContext); // Ok +} + +{ + const Context = React.createContext( + {foo: 0, bar: 0, baz: 0}, + (a, b) => { + let result = 0; + if (a.foo !== b.foo) { + result |= 0b001; + } + if (a.bar !== b.bar) { + result |= 0b010; + } + if (a.baz !== b.baz) { + result |= 0b100; + } + return result; + }, + ); + const {foo} = React.useContext(Context, 0b001); + (foo: number); // Ok + const {bar} = React.useContext(Context, 0b010); + (bar: number); // Ok + (bar: string); // Error: number is incompatible with string +} diff --git a/tests/react/useDebugValue_hook.js b/tests/react/useDebugValue_hook.js new file mode 100644 index 00000000000..d799832a49c --- /dev/null +++ b/tests/react/useDebugValue_hook.js @@ -0,0 +1,8 @@ +// @flow + +import React from 'react'; + +const undefinedValue = React.useDebugValue(123); + +(undefinedValue: typeof undefined); // Ok +(undefinedValue: string); // Error: undefined is incompatible with string diff --git a/tests/react/useEffect_hook.js b/tests/react/useEffect_hook.js new file mode 100644 index 00000000000..5e06e7f7cbb --- /dev/null +++ b/tests/react/useEffect_hook.js @@ -0,0 +1,26 @@ +// @flow + +import React from 'react'; + +{ + React.useEffect(); // Error: function requires another argument. +} + +{ + // Ok variants without cleanup functions + React.useEffect(() => {}); + React.useEffect(() => {}, []); + React.useEffect(() => {}, [1, 2, 3]); + + // Ok variants with cleanup functions + React.useEffect(() => () => {}); + React.useEffect(() => () => {}, []); + React.useEffect(() => () => {}, [1, 2, 3]); +} + +{ + React.useEffect(1); // Error: number is incompatible with function type + React.useEffect(() => {}, 1); // Error: number is incompatible with function react-only array + React.useEffect(async () => {}) // Error: promise is incompatible with function return type + React.useEffect(() => () => 123) // Error: cleanup function should not return a value +} diff --git a/tests/react/useImperativeHandle_hook.js b/tests/react/useImperativeHandle_hook.js new file mode 100644 index 00000000000..c2d99065c4f --- /dev/null +++ b/tests/react/useImperativeHandle_hook.js @@ -0,0 +1,35 @@ +// @flow + +import React from 'react'; + +{ + React.useImperativeHandle(); // Error: function requires another argument. +} + +type Interface = {| + focus: () => void +|}; + +{ + const api: Interface = { + focus: () => {} + }; + + const ref: {current: null | Interface } = React.createRef(); + React.useImperativeHandle(ref, () => api); // Ok + + const refSetter = (instance: null | Interface) => {}; + React.useImperativeHandle(refSetter, () => api); // Ok +} + +{ + const api: Interface = { + focus: () => {} + }; + + const ref: {current: null | Interface } = React.createRef(); + React.useImperativeHandle(ref, () => ({})); // Error: inexact object literal is incompatible with exact Interface + + const refSetter = (instance: null | Interface) => {}; + React.useImperativeHandle(refSetter, () => ({})); // Error: inexact object literal is incompatible with exact Interface +} diff --git a/tests/react/useLayoutEffect_hook.js b/tests/react/useLayoutEffect_hook.js new file mode 100644 index 00000000000..2e788afbfdc --- /dev/null +++ b/tests/react/useLayoutEffect_hook.js @@ -0,0 +1,26 @@ +// @flow + +import React from 'react'; + +{ + React.useLayoutEffect(); // Error: function requires another argument. +} + +{ + // Ok variants without cleanup functions + React.useLayoutEffect(() => {}); + React.useLayoutEffect(() => {}, []); + React.useLayoutEffect(() => {}, [1, 2, 3]); + + // Ok variants with cleanup functions + React.useLayoutEffect(() => () => {}); + React.useLayoutEffect(() => () => {}, []); + React.useLayoutEffect(() => () => {}, [1, 2, 3]); +} + +{ + React.useLayoutEffect(1); // Error: number is incompatible with function type + React.useLayoutEffect(() => {}, 1); // Error: number is incompatible with function react-only array + React.useLayoutEffect(async () => {}) // Error: promise is incompatible with function return type + React.useLayoutEffect(() => () => 123) // Error: cleanup function should not return a value +} diff --git a/tests/react/useMemo_hook.js b/tests/react/useMemo_hook.js new file mode 100644 index 00000000000..e9f42ce1278 --- /dev/null +++ b/tests/react/useMemo_hook.js @@ -0,0 +1,18 @@ +// @flow + +import React from 'react'; + +{ + React.useMemo(); // Error: function requires another argument. +} + +{ + let numeric: number; + numeric = React.useMemo(() => 1); // Ok + numeric = React.useMemo(() => 1, []); // Ok + numeric = React.useMemo(() => 1, [1, 2, 3]); // Ok +} + +{ + const invalid: number = React.useMemo(() => "abc"); // Error: string is incompatible with number +} diff --git a/tests/react/useMutationEffect_hook.js b/tests/react/useMutationEffect_hook.js new file mode 100644 index 00000000000..90ac0c58a07 --- /dev/null +++ b/tests/react/useMutationEffect_hook.js @@ -0,0 +1,24 @@ +// @flow + +import React from 'react'; + +{ + React.useMutationEffect(); // Error: function requires another argument. +} + +{ + // Ok variants without cleanup functions + React.useMutationEffect(() => {}); + React.useMutationEffect(() => {}, []); + React.useMutationEffect(() => {}, [1, 2, 3]); + + // Ok variants with cleanup functions + React.useMutationEffect(() => () => {}); + React.useMutationEffect(() => () => {}, []); + React.useMutationEffect(() => () => {}, [1, 2, 3]); +} + +{ + React.useMutationEffect(1); // Error: number is incompatible with function type + React.useMutationEffect(() => {}, 1); // Error: number is incompatible with function react-only array +} diff --git a/tests/react/useReducer_hook.js b/tests/react/useReducer_hook.js new file mode 100644 index 00000000000..ce2cea7219e --- /dev/null +++ b/tests/react/useReducer_hook.js @@ -0,0 +1,47 @@ +// @flow + +import React from 'react'; + +{ + React.useReducer(); // Error: function requires another argument. +} + +function reducer(state, action) { + switch (action.type) { + case "reset": + return { count: action.payload }; + case "increment": + return { count: state.count + 1 }; + case "decrement": + return { count: state.count - 1 }; + default: + return state; + } +} + +const initialState = { count: 0 }; + +{ + const [state, dispatch] = React.useReducer(reducer, initialState); + (state.count: number); // Ok + (state.count: string); // Error: number is incompatible with string + + dispatch({ type: "reset", payload: 123 }); + dispatch({ type: "increment" }); + dispatch({ type: "decrement" }); +} + +{ + function init(initialArg){ + return { + count: initialArg.initial + } + } + + const [state, dispatch] = React.useReducer(reducer, { initial: 123 }, init); + (state.count: number); // Ok + + dispatch({ type: "reset", payload: 123 }); + dispatch({ type: "increment" }); + dispatch({ type: "decrement" }); +} diff --git a/tests/react/useRef_hook.js b/tests/react/useRef_hook.js new file mode 100644 index 00000000000..1fe9a1d56a9 --- /dev/null +++ b/tests/react/useRef_hook.js @@ -0,0 +1,33 @@ +// @flow + +import React from 'react'; + +class Foo extends React.Component<{}, void> {} +class Bar extends React.Component<{}, void> {} + +{ + const stringValue: {current: string} = React.useRef("abc"); // Ok + const numberValue: {current: number} = React.useRef(123); // Ok + const booleanValue: {current: boolean} = React.useRef(true); // Ok + const nullValue: {current: null} = React.useRef(null); // Ok +} + +{ + const stringValue: {current: string | null} = React.useRef(123); // Error: number is incompatible with string in property current + const numberValue: {current: number | null} = React.useRef("abc"); // Error: string is incompatible with number in property current + const nullValue: {current: null} = React.useRef(true); // Error: boolean is incompatible with null in property current +} + +{ + const stringValue: {current: string | null} = React.useRef(null); + stringValue.current = "foo"; // Ok + stringValue.current = 123; // Error: number is incompatible with string in property current +} + +{ + const foo: {current: Foo | null} = React.useRef(new Foo()); // Ok +} + +{ + const foo: {current: Foo | null} = React.useRef(new Bar()); // Error: Bar is incompatible with Foo in property current +} diff --git a/tests/react/useState_hook.js b/tests/react/useState_hook.js new file mode 100644 index 00000000000..bc9336b3aa4 --- /dev/null +++ b/tests/react/useState_hook.js @@ -0,0 +1,13 @@ +// @flow + +import React from 'react'; + +React.useState(); // Error: function requires another argument. + +const [count, setCount] = React.useState(1); + +(count: number); // Ok +(count: string); // Error: number is incompatible with string + +setCount(2); // Okay +setCount(true); // Error: boolean is incompatible with number diff --git a/tests/react_16_3/.flowconfig b/tests/react_16_3/.flowconfig new file mode 100644 index 00000000000..de38d19537d --- /dev/null +++ b/tests/react_16_3/.flowconfig @@ -0,0 +1,2 @@ +[options] +no_flowlib=false diff --git a/tests/react_16_3/forwardRef.js b/tests/react_16_3/forwardRef.js new file mode 100644 index 00000000000..e232eb77479 --- /dev/null +++ b/tests/react_16_3/forwardRef.js @@ -0,0 +1,42 @@ +//@flow +const React = require('react'); + +type Props = {| foo: number |}; +const FancyButton = React.forwardRef((props, ref) => ( + +)); + +(FancyButton: React.AbstractComponent); + +const _a = ; // Error, missing foo +const _b = ; +const _c = ; // Error bar, not allowed in exact props + +const goodRef = React.createRef(); +const _d = ; + +const badRef = React.createRef(); +const _e = ; // Incorrect ref type + +const _f = x} />; +const _g = x} />; // Incorrect ref type + +type FooProps = {|foo: number|}; + +const UnionRef = React.forwardRef( + (props, ref): React.Element<'button' | 'a'> => { + if (props.foo === 0) { + return ; + } + + return